diff --git a/.bazelrc b/.bazelrc new file mode 100644 index 0000000..7c3f144 --- /dev/null +++ b/.bazelrc @@ -0,0 +1,52 @@ +# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# +# TFLM Bazel configuration file. + +# Use the following C++ standard +build --cxxopt -std=c++17 + +# When building with the address sanitizer +# E.g., bazel build --config asan +build:asan --repo_env CC=clang +build:asan --strip=never +build:asan --copt -fsanitize=address +build:asan --copt -DADDRESS_SANITIZER +build:asan --copt -g +build:asan --copt -O3 +build:asan --copt -fno-omit-frame-pointer +build:asan --linkopt -fsanitize=address + +# When building with the memory sanitizer +# E.g., bazel build --config msan +build:msan --repo_env CC=clang +build:msan --strip=never +build:msan --copt -fsanitize=memory +build:msan --copt -DADDRESS_SANITIZER +build:msan --copt -g +build:msan --copt -O3 +build:msan --copt -fno-omit-frame-pointer +build:msan --linkopt -fsanitize=memory + +# When building with the undefined behavior sanitizer +# E.g., bazel build --config ubsan +build:ubsan --repo_env CC=clang +build:ubsan --strip=never +build:ubsan --copt -fsanitize=undefined +build:ubsan --copt -g +build:ubsan --copt -O3 +build:ubsan --copt -fno-omit-frame-pointer +build:ubsan --linkopt -fsanitize=undefined +build:ubsan --linkopt -lubsan diff --git a/.clang-format b/.clang-format new file mode 100644 index 0000000..e06cf47 --- /dev/null +++ b/.clang-format @@ -0,0 +1,4 @@ +# Run manually to reformat a file: +# clang-format -i --style=file +BasedOnStyle: Google +DerivePointerAlignment: false diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..e91fc03 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,29 @@ +# Define the project's code formatting styles +# +# https://editorconfig.org +# +# EditorConfig is a file format for defining coding styles. EditorConfig is +# natively supported by VisualStudio, GitHub, Neovim, etc. + +[*] +# Unix-style newlines and a newline ending in every file +end_of_line = lf +insert_final_newline = true + +[*.{cc,h}] +# https://google.github.io/styleguide/cppguide.html +indent_style = space +indent_size = 2 + +[*.py] +# https://google.github.io/styleguide/pyguide.html but 2-space indent +indent_style = space +indent_size = 2 + +[WORKSPACE,BUILD,*.bzl] +# https://bazel.build/build/style-guide +indent_style = space +indent_size = 4 + +[Makefile] +indent_style = tab diff --git a/.github/ci-error-template.md b/.github/ci-error-template.md new file mode 100644 index 0000000..8e21be3 --- /dev/null +++ b/.github/ci-error-template.md @@ -0,0 +1,7 @@ +--- +title: Failed CI Test +labels: bug +--- +There was a failed test in the CI pipeline for [PR {{ env.PR_NUM }}]({{ env.PR_LINK }}). Please see comments in the PR for more details. + +This issue automatically generated for notification purposes. diff --git a/.github/mergify.yml b/.github/mergify.yml new file mode 100644 index 0000000..79b9fc9 --- /dev/null +++ b/.github/mergify.yml @@ -0,0 +1,29 @@ +queue_rules: + - name: default + checks_timeout: 2 h + conditions: + - base=main + - label=ci:ready_to_merge + + +pull_request_rules: + - name: push to default merge queue + conditions: + - base=main + - label=ci:ready_to_merge + actions: + queue: + name: default + require_branch_protection: true + method: squash + commit_message_template: | + {{ title }} (#{{ number }}) + {{ body_raw }} + + - name: remove ci:ready_to_merge label + conditions: + - merged + actions: + label: + remove: + - ci:ready_to_merge diff --git a/.github/scheduled-error-template.md b/.github/scheduled-error-template.md new file mode 100644 index 0000000..d0f4321 --- /dev/null +++ b/.github/scheduled-error-template.md @@ -0,0 +1,7 @@ +--- +title: Scheduled workflow failed +labels: bug +--- +{{ env.WORKFLOW }} run number {{ env.RUN_NUMBER }} failed. Please examine the run itself for more details. + +This issue has been automatically generated for notification purposes. diff --git a/.github/stale.yml b/.github/stale.yml new file mode 100644 index 0000000..b210e3b --- /dev/null +++ b/.github/stale.yml @@ -0,0 +1,15 @@ +# Number of days of inactivity before an Issue or Pull Request becomes stale +daysUntilStale: 30 +# Number of days of inactivity before a stale Issue or Pull Request is closed +daysUntilClose: 15 +# Comment to post when marking as stale. Set to `false` to disable +markComment: > + This issue has been automatically marked as stale because it has no + recent activity. It will be closed if no further activity occurs. Thank you. +# Comment to post when removing the stale label. Set to `false` to disable +unmarkComment: false +closeComment: > + Closing as stale. Please reopen if you'd like to work on this further. +limitPerRun: 30 +# Limit to only `issues` or `pulls` +only: issues diff --git a/.github/workflows/check_tflite_files.yml b/.github/workflows/check_tflite_files.yml new file mode 100644 index 0000000..761d763 --- /dev/null +++ b/.github/workflows/check_tflite_files.yml @@ -0,0 +1,40 @@ +name: Check TfLite Files + +on: + workflow_call: + inputs: + trigger-sha: + required: true + type: string + pr-number: + required: true + type: string + pr-body: + required: true + type: string + secrets: + tflm-bot-token: + required: true + +jobs: + check_tflite_files: + runs-on: ubuntu-latest + name: Check PR Modifies TfLite Files + steps: + - uses: actions/checkout@v2 + with: + ref: ${{ inputs.trigger-sha }} + + - name: Check Files + if: ${{ !contains(inputs.pr-body, 'NO_CHECK_TFLITE_FILES=') }} + run: | + URL="https://api.github.com/repos/${{ github.repository }}/pulls/${{ inputs.pr-number }}/files" + PR_FILES=$(curl -s -X GET -H "Authorization: Bearer ${{ secrets.tflm-bot-token }}" $URL | jq -r '.[] | .filename') + rm -rf tmp_pull_request_files.txt + echo "${PR_FILES}" >> tmp_pull_request_files.txt + rm -rf .git + echo ${{ secrets.tflm-bot-token }} | docker login ghcr.io -u tflm-bot --password-stdin + docker run --rm -v `pwd`:/tflite-micro -w /tflite-micro ghcr.io/tflm-bot/tflm-ci:latest python3 ci/check_tflite_files.py tmp_pull_request_files.txt + TFLITE_FILE_TEST_STATUS=$? + rm -f tmp_pull_request_files.txt + exit ${TFLITE_FILE_TEST_STATUS} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..8728675 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,309 @@ +# YAML schema for GitHub Actions: +# https://help.github.com/en/actions/automating-your-workflow-with-github-actions/workflow-syntax-for-github-actions +# +# Helpful YAML parser to clarify YAML syntax: +# https://yaml-online-parser.appspot.com/ +# +# +# This file contains jobs that are run prior to merging a pull request. +# +# This file can not be run stand-alone. It is called from tests_entry.yml as part of +# the ci automation or from run-ci.yml for scheduled or dispatch triggering. + +name: CI + +on: + workflow_call: + inputs: + trigger-sha: + required: true + type: string + +jobs: + bazel_tests: + runs-on: ubuntu-latest + + name: Bazel (presubmit) + steps: + - uses: actions/setup-python@v4 + with: + python-version: '3.10' + - uses: actions/checkout@v3 + with: + ref: ${{ inputs.trigger-sha }} + - name: Install dependencies + run: | + sudo ci/install_bazelisk.sh + pip3 install Pillow + pip3 install Wave + pip3 install numpy + - name: Test + run: | + tensorflow/lite/micro/tools/ci_build/test_bazel.sh + + bazel_tests_tflite_tools: + runs-on: ubuntu-latest + + name: Bazel TFLite Tools (presubmit) + steps: + - uses: actions/setup-python@v4 + with: + python-version: '3.10' + - uses: actions/checkout@v3 + with: + ref: ${{ inputs.trigger-sha }} + - name: Install dependencies + run: | + sudo ci/install_bazelisk.sh + pip3 install Pillow + pip3 install Wave + pip3 install numpy + - name: Test + run: | + tensorflow/lite/micro/tools/ci_build/test_bazel_tflite_tools.sh + + bazel_msan: + runs-on: ubuntu-latest + + name: Bazel msan (presubmit) + steps: + - uses: actions/setup-python@v4 + with: + python-version: '3.10' + - uses: actions/checkout@v3 + with: + ref: ${{ inputs.trigger-sha }} + - name: Install dependencies + run: | + sudo ci/install_bazelisk.sh + pip3 install Pillow + pip3 install Wave + pip3 install numpy + - name: Test + run: | + tensorflow/lite/micro/tools/ci_build/test_bazel_msan.sh + + bazel_asan: + runs-on: ubuntu-latest + + name: Bazel asan (presubmit) + steps: + - uses: actions/setup-python@v4 + with: + python-version: '3.10' + - uses: actions/checkout@v3 + with: + ref: ${{ inputs.trigger-sha }} + - name: Install dependencies + run: | + sudo ci/install_bazelisk.sh + pip3 install Pillow + pip3 install Wave + pip3 install numpy + - name: Test + run: | + tensorflow/lite/micro/tools/ci_build/test_bazel_asan.sh + + cortex_m_bluepill_release: + runs-on: ubuntu-latest + + name: Cortex-M Bluepill Release (presubmit) + steps: + - uses: actions/setup-python@v4 + with: + python-version: '3.10' + - uses: actions/checkout@v3 + with: + ref: ${{ inputs.trigger-sha }} + - name: Install dependencies + run: | + pip3 install Pillow + pip3 install Wave + pip3 install numpy + - name: Test + run: | + cd ../ + tflite-micro/tensorflow/lite/micro/tools/ci_build/test_bluepill_release.sh tflite-micro/ + + cortex_m_bluepill_no_release: + runs-on: ubuntu-latest + + name: Cortex-M Bluepill No Release (presubmit) + steps: + - uses: actions/setup-python@v4 + with: + python-version: '3.10' + - uses: actions/checkout@v3 + with: + ref: ${{ inputs.trigger-sha }} + - name: Install dependencies + run: | + pip3 install Pillow + pip3 install Wave + pip3 install numpy + - name: Test + run: | + cd ../ + tflite-micro/tensorflow/lite/micro/tools/ci_build/test_bluepill_no_release.sh tflite-micro/ + + cortex_m_bluepill_renode: + runs-on: ubuntu-latest + + name: Cortex-M Bluepill Renode (presubmit) + steps: + - uses: actions/setup-python@v4 + with: + python-version: '3.10' + - uses: actions/checkout@v3 + with: + ref: ${{ inputs.trigger-sha }} + - name: Install dependencies + run: | + pip3 install Pillow + pip3 install Wave + pip3 install numpy + - name: Test + run: | + cd ../ + tflite-micro/tensorflow/lite/micro/tools/ci_build/test_bluepill_renode.sh tflite-micro/ + + cortex_m_qemu: + runs-on: ubuntu-latest + + name: Cortex-M QEMU Unit Tests (presubmit) + steps: + - uses: actions/setup-python@v4 + with: + python-version: '3.10' + - uses: actions/checkout@v3 + with: + ref: ${{ inputs.trigger-sha }} + - name: Test + uses: docker://ghcr.io/tflm-bot/tflm-ci:latest + with: + args: /bin/sh -c tensorflow/lite/micro/tools/ci_build/test_cortex_m_qemu.sh tflite-micro/ + + check_code_style: + runs-on: ubuntu-latest + + name: Code Style (presubmit) + steps: + - uses: actions/setup-python@v4 + with: + python-version: '3.10' + - uses: actions/checkout@v3 + with: + ref: ${{ inputs.trigger-sha }} + - name: Check + uses: docker://ghcr.io/tflm-bot/tflm-ci:latest + with: + args: /bin/sh -c "git config --global --add safe.directory /github/workspace && tensorflow/lite/micro/tools/ci_build/test_code_style.sh" + + project_generation: + runs-on: ubuntu-latest + + name: Project Generation (presubmit) + steps: + - uses: actions/setup-python@v4 + with: + python-version: '3.10' + - uses: actions/checkout@v3 + with: + ref: ${{ inputs.trigger-sha }} + - name: Install dependencies + run: | + pip3 install Pillow + pip3 install Wave + pip3 install numpy + - name: Test + run: | + cd ../ + tflite-micro/tensorflow/lite/micro/tools/ci_build/test_project_generation.sh tflite-micro/ + + x86_release: + runs-on: ubuntu-latest + + name: Makefile x86 Release (presubmit) + steps: + - uses: actions/setup-python@v4 + with: + python-version: '3.10' + - uses: actions/checkout@v3 + with: + ref: ${{ inputs.trigger-sha }} + - name: Install dependencies + run: | + pip3 install Pillow + pip3 install Wave + pip3 install numpy + - name: Test + run: | + tensorflow/lite/micro/tools/ci_build/test_makefile.sh + cd ../ + tflite-micro/tensorflow/lite/micro/tools/ci_build/test_x86_release.sh tflite-micro/ + + x86_default: + runs-on: ubuntu-latest + + name: Makefile x86 Default (presubmit) + steps: + - uses: actions/setup-python@v4 + with: + python-version: '3.10' + - uses: actions/checkout@v3 + with: + ref: ${{ inputs.trigger-sha }} + - name: Install dependencies + run: | + pip3 install Pillow + pip3 install Wave + pip3 install numpy + - name: Test + run: | + tensorflow/lite/micro/tools/ci_build/test_makefile.sh + cd ../ + tflite-micro/tensorflow/lite/micro/tools/ci_build/test_x86_default.sh tflite-micro/ + + x86_out_of_tree: + runs-on: ubuntu-latest + + name: Makefile x86 Out Of Tree (presubmit) + steps: + - uses: actions/setup-python@v4 + with: + python-version: '3.10' + - uses: actions/checkout@v3 + with: + ref: ${{ inputs.trigger-sha }} + - name: Install dependencies + run: | + pip3 install Pillow + pip3 install Wave + pip3 install numpy + - name: Test + run: | + tensorflow/lite/micro/tools/ci_build/test_makefile.sh + cd ../ + tflite-micro/tensorflow/lite/micro/tools/ci_build/test_x86_out_of_tree.sh tflite-micro/ + + x86_no_tflite_static_memory: + runs-on: ubuntu-latest + + name: Makefile x86 No TFLite Static Memory (presubmit) + steps: + - uses: actions/setup-python@v4 + with: + python-version: '3.10' + - uses: actions/checkout@v3 + with: + ref: ${{ inputs.trigger-sha }} + - name: Install dependencies + run: | + pip3 install Pillow + pip3 install Wave + pip3 install numpy + - name: Test + run: | + tensorflow/lite/micro/tools/ci_build/test_makefile.sh + cd ../ + tflite-micro/tensorflow/lite/micro/tools/ci_build/test_x86_no_tflite_static_memory.sh tflite-micro/ diff --git a/.github/workflows/cortex_m.yml b/.github/workflows/cortex_m.yml new file mode 100644 index 0000000..26fe77d --- /dev/null +++ b/.github/workflows/cortex_m.yml @@ -0,0 +1,73 @@ +# YAML schema for GitHub Actions: +# https://help.github.com/en/actions/automating-your-workflow-with-github-actions/workflow-syntax-for-github-actions +# +# Helpful YAML parser to clarify YAML syntax: +# https://yaml-online-parser.appspot.com/ + +name: Cortex-M + +# https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#onschedule +on: + schedule: + - cron: '0 4 * * *' + + # Allow manually triggering of the workflow. + workflow_dispatch: {} + +jobs: + cortex_m_generic: + runs-on: ubuntu-latest + + if: | + github.event_name == 'workflow_dispatch' || + (github.event_name == 'schedule' && github.repository == 'tensorflow/tflite-micro') + + name: Cortex-M Generic + steps: + - uses: actions/setup-python@v4 + with: + python-version: '3.10' + - uses: actions/checkout@v2 + - name: Install dependencies + run: | + pip3 install Pillow + pip3 install Wave + pip3 install numpy + - name: Test + run: | + tensorflow/lite/micro/tools/ci_build/test_cortex_m_generic.sh + + cortex_m_corstone_300: + runs-on: ubuntu-latest + + if: | + github.event_name == 'workflow_dispatch' || + (github.event_name == 'schedule' && github.repository == 'tensorflow/tflite-micro') + + name: Cortex-M Corstone 300 (FVP) + steps: + - uses: actions/setup-python@v4 + with: + python-version: '3.10' + - uses: actions/checkout@v2 + - name: Install dependencies + run: | + pip3 install Pillow + pip3 install Wave + pip3 install numpy + - name: Test + run: | + tensorflow/lite/micro/tools/ci_build/test_cortex_m_corstone_300.sh + + issue-on-error: + needs: [cortex_m_generic, cortex_m_corstone_300] + if: ${{ always() && contains(needs.*.result, 'failure') }} + uses: ./.github/workflows/issue_on_error.yml + with: + repo: ${{ github.repository }} + workflow: ${{ github.workflow }} + run_id: ${{ github.run_id }} + run_number: ${{ github.run_number }} + flag_label: ci:bot_issue + secrets: + token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/cortex_m_virtual_hardware.yml b/.github/workflows/cortex_m_virtual_hardware.yml new file mode 100644 index 0000000..5e2e909 --- /dev/null +++ b/.github/workflows/cortex_m_virtual_hardware.yml @@ -0,0 +1,80 @@ +# YAML schema for GitHub Actions: +# https://help.github.com/en/actions/automating-your-workflow-with-github-actions/workflow-syntax-for-github-actions +# +# Helpful YAML parser to clarify YAML syntax: +# https://yaml-online-parser.appspot.com/ + +name: Cortex-M on Arm Virtual Hardware + +# https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#onschedule +on: + schedule: + - cron: '0 4 * * *' + + # Allow manually triggering of the workflow. + workflow_dispatch: {} + +env: + AWS_DEFAULT_REGION: eu-west-1 + AWS_S3_BUCKET_NAME: tensorflow-ci-1 + AWS_IAM_PROFILE: Proj-s3-orta-vht-role + AWS_SECURITY_GROUP_ID: sg-03afe5ec007b4bcb0 + AWS_SUBNET_ID: subnet-025b7baebd743a68b +jobs: + cortex_m_generic: + runs-on: ubuntu-latest + permissions: + id-token: write + contents: read + if: | + github.event_name == 'workflow_dispatch' || + (github.event_name == 'schedule' && github.repository == 'tensorflow/tflite-micro') + name: Cortex-M Generic + steps: + - uses: actions/checkout@v2 + - name: Set up Python 3.10 + uses: actions/setup-python@v2 + with: + python-version: '3.10' + - name: Install AVH Client for Python + run: | + pip install git+https://github.com/ARM-software/avhclient.git@v0.1 + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@v1 + with: + role-to-assume: arn:aws:iam::720528183931:role/Proj-vht-assume-role + aws-region: eu-west-1 + - name: Execute test suite on Arm Virtual Hardware at AWS + run: | + avhclient -b aws execute --specfile ./tensorflow/lite/micro/tools/github/arm_virtual_hardware/cortex_m_generic_avh.yml + - name: Fetch results from Arm Virtual Hardware + run: | + cat ./tensorflow/lite/micro/tools/github/arm_virtual_hardware/cortex_m_generic.log + + + cortex_m_corstone_300: + runs-on: ubuntu-latest + if: | + github.event_name == 'workflow_dispatch' || + (github.event_name == 'schedule' && github.repository == 'tensorflow/tflite-micro') + name: Cortex-M Corstone 300 (FVP) + steps: + - uses: actions/checkout@v2 + - name: Set up Python 3.10 + uses: actions/setup-python@v2 + with: + python-version: '3.10' + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@v1 + with: + role-to-assume: arn:aws:iam::720528183931:role/Proj-vht-assume-role + aws-region: eu-west-1 + - name: Install AVH Client for Python + run: | + pip install git+https://github.com/ARM-software/avhclient.git@v0.1.1 + - name: Execute test suite on Arm Virtual Hardware at AWS + run: | + avhclient -b aws execute --specfile ./tensorflow/lite/micro/tools/github/arm_virtual_hardware/cortex_m_corstone_300_avh.yml + - name: Fetch results from Arm Virtual Hardware + run: | + cat ./tensorflow/lite/micro/tools/github/arm_virtual_hardware/corstone300.log diff --git a/.github/workflows/generate_integration_tests.yml b/.github/workflows/generate_integration_tests.yml new file mode 100644 index 0000000..91c8f18 --- /dev/null +++ b/.github/workflows/generate_integration_tests.yml @@ -0,0 +1,51 @@ +# YAML schema for GitHub Actions: +# https://help.github.com/en/actions/automating-your-workflow-with-github-actions/workflow-syntax-for-github-actions +# +# Helpful YAML parser to clarify YAML syntax: +# https://yaml-online-parser.appspot.com/ + +name: Generate Integration Tests + +# https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#onschedule +on: + schedule: + - cron: '0 4 * * *' + + # Allow manually triggering of the workflow. + workflow_dispatch: {} + +jobs: + generate_integration_tests: + runs-on: ubuntu-latest + + if: | + github.event_name == 'workflow_dispatch' || + (github.event_name == 'schedule' && github.repository == 'tensorflow/tflite-micro') + name: Generate Integration Tests + steps: + - uses: actions/setup-python@v4 + with: + python-version: '3.10' + - uses: actions/checkout@v2 + - name: Install dependencies + run: | + pip3 install Pillow + pip3 install Wave + pip3 install numpy + - name: Test + run: | + tensorflow/lite/micro/tools/ci_build/test_generate_integration_tests.sh + + issue-on-error: + needs: [generate_integration_tests] + if: ${{ always() && contains(needs.*.result, 'failure') }} + uses: ./.github/workflows/issue_on_error.yml + with: + repo: ${{ github.repository }} + workflow: ${{ github.workflow }} + run_id: ${{ github.run_id }} + run_number: ${{ github.run_number }} + flag_label: ci:bot_issue + secrets: + token: ${{ secrets.GITHUB_TOKEN }} + diff --git a/.github/workflows/hexagon.yml b/.github/workflows/hexagon.yml new file mode 100644 index 0000000..ed83a16 --- /dev/null +++ b/.github/workflows/hexagon.yml @@ -0,0 +1,38 @@ +# YAML schema for GitHub Actions: +# https://help.github.com/en/actions/automating-your-workflow-with-github-actions/workflow-syntax-for-github-actions +# +# Helpful YAML parser to clarify YAML syntax: +# https://yaml-online-parser.appspot.com/ +# +# This file can not be run stand-alone. It is called from tests_entry.yml as part of +# the ci automation or from run_hexagon.yml for scheduled or dispatch triggering. + +name: Hexagon + +on: + workflow_call: + inputs: + trigger-sha: + required: true + type: string + secrets: + tflm-bot-token: + required: true + +jobs: + hexagon_build: + runs-on: ubuntu-latest + + name: Hexagon Build Test (presubmit) + steps: + - uses: actions/checkout@v2 + with: + ref: ${{ inputs.trigger-sha }} + - run: | + rm -rf .git + echo ${{ secrets.tflm-bot-token }} | docker login ghcr.io -u tflm-bot --password-stdin + docker run --rm -v `pwd`:/opt/tflite-micro ghcr.io/tflm-bot/hexagon:0.4 \ + /bin/bash -c \ + "cd /opt && tflite-micro/tensorflow/lite/micro/tools/ci_build/test_hexagon.sh tflite-micro/" + + diff --git a/.github/workflows/issue_on_error.yml b/.github/workflows/issue_on_error.yml new file mode 100644 index 0000000..103ef6f --- /dev/null +++ b/.github/workflows/issue_on_error.yml @@ -0,0 +1,61 @@ +name: Issue On Error + +# This workflow can be called from other workflows in order to generate +# an issue on failure. +# +# With only the required inputs issue will have the title of the failing workflow +# and a link to it. +# +# If the optional pr_number and link are provided the title and link will be for +# the PR. +# +# Either type of issue will be tagged (by default with ci:bot_issue). +# If an issue already exists, a comment will be added with a timestamp of the +# new failure and a link. + +on: + workflow_call: + inputs: + repo: + type: string + required: true + workflow: + type: string + required: true + run_id: + type: string + required: true + run_number: + type: string + required: true + flag_label: + type: string + default: 'ci:bot_issue' + pr_number: + type: string + pr_link: + type: string + + secrets: + token: + required: true + +jobs: + error-trap: + runs-on: ubuntu-latest + name: Error Trap + steps: + - uses: actions/checkout@v3 + - name: Run Reporting Script + env: + REPO: ${{ inputs.repo }} + WORKFLOW: ${{ inputs.workflow }} + RUN_ID: ${{ inputs.run_id }} + RUN_NUMBER: ${{ inputs.run_number }} + GITHUB_TOKEN: ${{ secrets.token }} + FLAG_LABEL: ${{ inputs.flag_label }} + PR_NUMBER: ${{ inputs.pr_number }} + PR_LINK: ${{ inputs.pr_link }} + run: | + python3 ci/issue_on_error.py + diff --git a/.github/workflows/log_binary_size_pr.yml b/.github/workflows/log_binary_size_pr.yml new file mode 100644 index 0000000..12b3527 --- /dev/null +++ b/.github/workflows/log_binary_size_pr.yml @@ -0,0 +1,63 @@ +name: Binary Size Log PR + +# Submits a PR with all the size profiling logs from $LOG_BRANCH +# This is intended to be used with the logs stored by log_binary_size.yml +# +# Points that can be confusing: +# * git checkout --track / creates a local tracking branch from a remote +# branch reference and changes to that branch. +# * git checkout -- checks out files from into the current branch. +# It does not switch branches. +# +# The provided token must be able to submit a PR + +on: + workflow_dispatch: {} + +# uncomment this section to run on schedule + schedule: + # 6am PT 15 and 30th of month +# - cron: '0 13 15,30 * *' + # 6am PT every sunday + - cron: '0 13 * * 0' + +# uncomment this section to run on a tag +# pull_request: +# types: [labeled] +# branches: +# - main + +env: + LOG_BRANCH: profiling-logs + +jobs: + binary-size-log: + runs-on: ubuntu-latest + if: | + github.event_name == 'workflow_dispatch' || + (github.event_name == 'pull_request' && contains(github.event.pull_request.labels.*.name, 'ci:test')) || + github.event_name == 'schedule' + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + token: ${{ secrets.TFLM_BOT_REPO_TOKEN }} + - name: prepare files for logging + run: | + git checkout --track origin/$LOG_BRANCH + git checkout main + git checkout $LOG_BRANCH -- data/continuous_builds/size_profiling/ + - name: Create Logs PR Request + id: create-pr + uses: peter-evans/create-pull-request@052fc72b4198ba9fbc81b818c6e1859f747d49a8 + with: + branch: binary_size_profiling_update + delete-branch: true + token: ${{ secrets.TFLM_BOT_REPO_TOKEN }} + title: Automated binary size log update + commit-message: Automated binary size log update + committer: TFLM-bot + author: TFLM-bot + body: "BUG=automated binary size log update" + labels: ci:run + reviewers: advaitjain \ No newline at end of file diff --git a/.github/workflows/riscv.yml b/.github/workflows/riscv.yml new file mode 100644 index 0000000..7364500 --- /dev/null +++ b/.github/workflows/riscv.yml @@ -0,0 +1,42 @@ +name: RISC-V + +# https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#onschedule +on: + schedule: + # 10am UTC is 3am or 4am PT depending on daylight savings. + - cron: '0 10 * * *' + + # Allow manually triggering of the workflow. + workflow_dispatch: {} + +jobs: + riscv_daily: + runs-on: ubuntu-latest + + if: | + github.event_name == 'workflow_dispatch' || + (github.event_name == 'schedule' && github.repository == 'tensorflow/tflite-micro') + + name: RISC-V Continuous Builds + steps: + - uses: actions/setup-python@v4 + with: + python-version: '3.10' + - uses: actions/checkout@v3 + - name: Test + uses: docker://ghcr.io/tflm-bot/tflm-ci:latest + with: + args: /bin/sh -c tensorflow/lite/micro/tools/ci_build/test_riscv.sh + + issue-on-error: + needs: [riscv_daily] + if: ${{ always() && contains(needs.*.result, 'failure') }} + uses: ./.github/workflows/issue_on_error.yml + with: + repo: ${{ github.repository }} + workflow: ${{ github.workflow }} + run_id: ${{ github.run_id }} + run_number: ${{ github.run_number }} + flag_label: ci:bot_issue + secrets: + token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/riscv_postmerge.yml b/.github/workflows/riscv_postmerge.yml new file mode 100644 index 0000000..7d54c2a --- /dev/null +++ b/.github/workflows/riscv_postmerge.yml @@ -0,0 +1,37 @@ +# YAML schema for GitHub Actions: +# https://help.github.com/en/actions/automating-your-workflow-with-github-actions/workflow-syntax-for-github-actions +# +# Helpful YAML parser to clarify YAML syntax: +# https://yaml-online-parser.appspot.com/ +# +# This file can not be run stand-alone. It is called from tests_post.yml as part of +# the ci automation. + +name: RISC-V Postmerge + +on: + workflow_call: + inputs: + trigger-sha: + required: true + type: string + secrets: + tflm-bot-token: + required: true + +jobs: + riscv_postmerge: + runs-on: ubuntu-latest + + name: RISC-V Tests (postmerge) + steps: + - uses: actions/setup-python@v4 + with: + python-version: '3.10' + - uses: actions/checkout@v3 + with: + ref: ${{ inputs.trigger-sha }} + - name: Test + uses: docker://ghcr.io/tflm-bot/tflm-ci:latest + with: + args: /bin/sh -c tensorflow/lite/micro/tools/ci_build/test_riscv.sh diff --git a/.github/workflows/run_ci.yml b/.github/workflows/run_ci.yml new file mode 100644 index 0000000..64ec514 --- /dev/null +++ b/.github/workflows/run_ci.yml @@ -0,0 +1,33 @@ +name: Run-CI + +# This is the entry point for ci.yml for scheduled and workflow_dispatch events. + +on: + schedule: + # 10am UTC is 3am or 4am PT depending on daylight savings. + - cron: '0 10 * * *' + + # Allow manually triggering of the workflow. + workflow_dispatch: {} + +jobs: + call-ci: + uses: ./.github/workflows/ci.yml + if: | + github.event_name == 'workflow_dispatch' || + (github.event_name == 'schedule' && github.repository == 'tensorflow/tflite-micro') + with: + trigger-sha: ${{ github.sha }} + + issue-on-error: + needs: [call-ci] + if: ${{ always() && contains(needs.*.result, 'failure') }} + uses: ./.github/workflows/issue_on_error.yml + with: + repo: ${{ github.repository }} + workflow: ${{ github.workflow }} + run_id: ${{ github.run_id }} + run_number: ${{ github.run_number }} + flag_label: ci:bot_issue + secrets: + token: ${{ secrets.GITHUB_TOKEN }} \ No newline at end of file diff --git a/.github/workflows/run_hexagon.yml b/.github/workflows/run_hexagon.yml new file mode 100644 index 0000000..49d4186 --- /dev/null +++ b/.github/workflows/run_hexagon.yml @@ -0,0 +1,35 @@ +name: Run-Hexagon + +# This is the entry point for hexagon.yml for scheduled and workflow_dispatch events. + +on: + schedule: + # 10am UTC is 3am or 4am PT depending on daylight savings. + - cron: '0 10 * * *' + + # Allow manually triggering of the workflow. + workflow_dispatch: {} + +jobs: + call-hexagon: + uses: ./.github/workflows/hexagon.yml + if: | + github.event_name == 'workflow_dispatch' || + (github.event_name == 'schedule' && github.repository == 'tensorflow/tflite-micro') + with: + trigger-sha: ${{ github.sha }} + secrets: + tflm-bot-token: ${{ secrets.TFLM_BOT_PACKAGE_READ_TOKEN }} + + issue-on-error: + needs: [call-hexagon] + if: ${{ always() && contains(needs.*.result, 'failure') }} + uses: ./.github/workflows/issue_on_error.yml + with: + repo: ${{ github.repository }} + workflow: ${{ github.workflow }} + run_id: ${{ github.run_id }} + run_number: ${{ github.run_number }} + flag_label: ci:bot_issue + secrets: + token: ${{ secrets.GITHUB_TOKEN }} \ No newline at end of file diff --git a/.github/workflows/run_xtensa.yml b/.github/workflows/run_xtensa.yml new file mode 100644 index 0000000..cbd6dec --- /dev/null +++ b/.github/workflows/run_xtensa.yml @@ -0,0 +1,42 @@ +name: Run-Xtensa + +# This is the entry point for xtensa.yml for scheduled and workflow_dispatch events. + + +on: + schedule: + # 10am UTC is 3am or 4am PT depending on daylight savings. + - cron: '0 10 * * *' + + # Allow manually triggering of the workflow. + workflow_dispatch: {} + +jobs: + call-xtensa-presubmit: + uses: ./.github/workflows/xtensa_presubmit.yml + if: github.repository == 'tensorflow/tflite-micro' + with: + trigger-sha: ${{ github.sha }} + secrets: + tflm-bot-token: ${{ secrets.TFLM_BOT_PACKAGE_READ_TOKEN }} + + call-xtensa-postmerge: + uses: ./.github/workflows/xtensa_postmerge.yml + if: github.repository == 'tensorflow/tflite-micro' + with: + trigger-sha: ${{ github.sha }} + secrets: + tflm-bot-token: ${{ secrets.TFLM_BOT_PACKAGE_READ_TOKEN }} + + issue-on-error: + needs: [call-xtensa-presubmit,call-xtensa-postmerge] + if: ${{ always() && contains(needs.*.result, 'failure') }} + uses: ./.github/workflows/issue_on_error.yml + with: + repo: ${{ github.repository }} + workflow: ${{ github.workflow }} + run_id: ${{ github.run_id }} + run_number: ${{ github.run_number }} + flag_label: ci:bot_issue + secrets: + token: ${{ secrets.GITHUB_TOKEN }} \ No newline at end of file diff --git a/.github/workflows/stale_handler.yml b/.github/workflows/stale_handler.yml new file mode 100644 index 0000000..d6c3fa7 --- /dev/null +++ b/.github/workflows/stale_handler.yml @@ -0,0 +1,29 @@ +name: 'Stale Handler' +on: + schedule: + - cron: '0 10 * * *' + + workflow_dispatch: {} + +jobs: + stale: + runs-on: ubuntu-latest + steps: + - uses: actions/stale@v7 + with: + stale-issue-message: > + "This issue is being marked as stale due to inactivity. Remove label or + comment to prevent closure in 5 days." + stale-pr-message: > + "This PR is being marked as stale due to inactivity. Remove label or + comment to prevent closure in 5 days." + close-issue-message: > + "This issue is being closed because it has been marked as + stale for 5 days with no further activity." + close-pr-message: > + "This PR is being closed because it has been marked as + stale for 5 days with no further activity." + days-before-issue-stale: 25 + days-before-issue-close: 5 + days-before-pr-stale: 40 + days-before-pr-close: 5 diff --git a/.github/workflows/sync.yml b/.github/workflows/sync.yml new file mode 100644 index 0000000..d505a9d --- /dev/null +++ b/.github/workflows/sync.yml @@ -0,0 +1,66 @@ +# YAML schema for GitHub Actions: +# https://help.github.com/en/actions/automating-your-workflow-with-github-actions/workflow-syntax-for-github-actions +# +# Helpful YAML parser to clarify YAML syntax: +# https://yaml-online-parser.appspot.com/ +# + +name: Sync from Upstream TF + +on: + schedule: + # 2pm UTC is 7am or 8am PT depending on daylight savings. + - cron: '0 14 * * *' + + # Allow manually triggering of the workflow. + workflow_dispatch: {} + +jobs: + sync: + runs-on: ubuntu-latest + + if: | + github.event_name == 'workflow_dispatch' || + (github.event_name == 'schedule' && github.repository == 'tensorflow/tflite-micro') + + steps: + - uses: actions/setup-python@v4 + with: + python-version: '3.10' + - uses: actions/checkout@v2 + with: + token: ${{ secrets.TFLM_BOT_REPO_TOKEN }} + + - name: Install dependencies for sync + run: | + sudo ./ci/install_bazelisk.sh + pip3 install numpy + + - name: Sync the code + run: | + ./ci/sync_from_upstream_tf.sh + git config --local user.name "TFLM-bot" + git config --local user.email "tflm-github-bot@google.com" + git add * + + if [[ $(git status --porcelain | wc -l) == 0 ]]; then + echo "no changes" + else + git commit -m "Sync from upstream TF." + fi + + - name: Create Pull Request + id: create-pr + uses: peter-evans/create-pull-request@052fc72b4198ba9fbc81b818c6e1859f747d49a8 + with: + branch: sync-from-upstream-tf + delete-branch: true + token: ${{ secrets.TFLM_BOT_REPO_TOKEN }} + title: Automated sync from github.com/tensorflow/tensorflow + commit-message: Automated sync from github.com/tensorflow/tensorflow + committer: TFLM-bot + author: TFLM-bot + body: "BUG=automated sync from upstream\nNO_CHECK_TFLITE_FILES=automated sync from upstream" + labels: bot:sync-tf, ci:run + reviewers: advaitjain + diff --git a/.github/workflows/tests_entry.yml b/.github/workflows/tests_entry.yml new file mode 100644 index 0000000..07cf617 --- /dev/null +++ b/.github/workflows/tests_entry.yml @@ -0,0 +1,136 @@ +# tests_entry.yml +# Entry point to the test suites +# +# - If neither ci:run, ci:run_full or ci:ready_to_merge labels are on PR, fail and exit. +# - If the PR comment body doesn't contain 'BUG=' fail and exit. +# - If ci:run or ci:run_full label is on PR, remove label and run the test scripts. +# - If ci:ready_to_merge is on the PR and the pull_request_target type is synchronize not triggered +# by the mergify[bot] user, remove label and fail job. +# - If ci:ready_to_merge label is on PR and pull_request_target type is labeled, run the test scripts. +# +# The end result is labeling ci:run or ci:ready_to_merge will run the test scripts, +# If Mergify merges to the PR, the test scripts will run. If anyone else tries to add +# a commit to the PR or merge , the script will fail and ci:ready_to_merge will be +# removed. +# +# This script runs the test scripts directly. Scheduled or manual runs use +# run_.yml as the entry point. + +name: Tests Entry Point +on: + pull_request_target: + types: + - synchronize + - labeled + +jobs: + no-labels: + runs-on: ubuntu-latest + steps: + - name: fail-without-labels + if: ${{ !(contains(github.event.pull_request.labels.*.name, 'ci:run') || + contains(github.event.pull_request.labels.*.name, 'ci:ready_to_merge') || + contains(github.event.pull_request.labels.*.name, 'ci:run_full')) }} + run: exit 1 + + ci-ready-to-merge: + runs-on: ubuntu-latest + needs: no-labels + steps: + - name: remove-ready-to-merge + if: ${{ (contains(github.event.pull_request.labels.*.name, 'ci:ready_to_merge') && + (github.event.action == 'synchronize') && + !(github.event.sender.login == 'mergify[bot]')) }} + uses: actions/github-script@v5 + with: + github-token: ${{ secrets.TFLM_BOT_REPO_TOKEN }} + script: | + github.rest.issues.removeLabel({ + issue_number: context.issue.number, + owner: context.repo.owner, + repo: context.repo.repo, + name: 'ci:ready_to_merge' + }) + - name: fail-on-bad-synch + if: ${{ (github.event.action == 'synchronize') && !(github.event.sender.login == 'mergify[bot]') }} + run: exit 1 + + ci-run: + runs-on: ubuntu-latest + needs: ci-ready-to-merge + steps: + - name: remove-cirun + if: ${{ contains(github.event.pull_request.labels.*.name, 'ci:run') }} + uses: actions/github-script@v5 + with: + github-token: ${{ secrets.TFLM_BOT_REPO_TOKEN }} + script: | + github.rest.issues.removeLabel({ + issue_number: context.issue.number, + owner: context.repo.owner, + repo: context.repo.repo, + name: 'ci:run' + }) + continue-on-error: true + + ci-run-full: + runs-on: ubuntu-latest + needs: ci-run + steps: + - name: remove-cirun-full + if: ${{ contains(github.event.pull_request.labels.*.name, 'ci:run_full') }} + uses: actions/github-script@v5 + with: + github-token: ${{ secrets.TFLM_BOT_REPO_TOKEN }} + script: | + github.rest.issues.removeLabel({ + issue_number: context.issue.number, + owner: context.repo.owner, + repo: context.repo.repo, + name: 'ci:run_full' + }) + continue-on-error: true + + pr-has-bug: + runs-on: ubuntu-latest + needs: ci-run-full + name: PR has Bug + steps: + - name: Check for BUG= + if: ${{ !contains(github.event.pull_request.body, 'BUG=') }} + run: | + echo "PR description requires a BUG= line with issue number." + echo "See https://testing.googleblog.com/2017/09/code-health-providing-context-with.html for additional context" + exit 1 + + call-ci: + needs: ci-run + uses: ./.github/workflows/ci.yml + with: + trigger-sha: ${{ github.event.pull_request.head.sha }} + + call-hexagon: + needs: ci-run + uses: ./.github/workflows/hexagon.yml + with: + trigger-sha: ${{ github.event.pull_request.head.sha }} + secrets: + tflm-bot-token: ${{ secrets.TFLM_BOT_PACKAGE_READ_TOKEN }} + + call-xtensa-presubmit: + needs: ci-run + uses: ./.github/workflows/xtensa_presubmit.yml + with: + trigger-sha: ${{ github.event.pull_request.head.sha }} + secrets: + tflm-bot-token: ${{ secrets.TFLM_BOT_PACKAGE_READ_TOKEN }} + + call-check-tflite-files: + needs: ci-run + uses: ./.github/workflows/check_tflite_files.yml + with: + trigger-sha: ${{ github.event.pull_request.head.sha }} + pr-number: ${{ github.event.pull_request.number }} + pr-body: ${{ github.event.pull_request.body }} + secrets: + tflm-bot-token: ${{ secrets.TFLM_BOT_PACKAGE_READ_TOKEN }} diff --git a/.github/workflows/tests_post.yml b/.github/workflows/tests_post.yml new file mode 100644 index 0000000..4919d1d --- /dev/null +++ b/.github/workflows/tests_post.yml @@ -0,0 +1,65 @@ +name: Post Tests +# This is for tests that run on a PR post merge. This to optimize CI test suite time +# for exceptionally long running test processes. Errors will still generate an +# issue against the PR/commit so will be caught. +# These tests also run on the ci:run_full label. + +on: + pull_request_target: + types: + - closed + - labeled + +jobs: + riscv_postmerge: + if: ${{ github.event.pull_request.merged == true || + contains(github.event.pull_request.labels.*.name, 'ci:run_full') }} + uses: ./.github/workflows/riscv_postmerge.yml + with: + trigger-sha: ${{ github.event.pull_request.head.sha }} + secrets: + tflm-bot-token: ${{ secrets.TFLM_BOT_PACKAGE_READ_TOKEN }} + + xtensa_postmerge: + if: ${{ github.event.pull_request.merged == true || + contains(github.event.pull_request.labels.*.name, 'ci:run_full') }} + uses: ./.github/workflows/xtensa_postmerge.yml + with: + trigger-sha: ${{ github.event.pull_request.head.sha }} + secrets: + tflm-bot-token: ${{ secrets.TFLM_BOT_PACKAGE_READ_TOKEN }} + + issue_on_error: + needs: [xtensa_postmerge] + if: ${{ always() && contains(needs.*.result, 'failure') && + !contains(github.event.pull_request.labels.*.name, 'ci:run_full') }} + uses: ./.github/workflows/issue_on_error.yml + with: + repo: ${{ github.repository}} + workflow: ${{ github.workflow }} + run_number: ${{ github.run_number }} + run_id: ${{ github.run_id }} + flag_label: ci:bot_issue + pr_number: ${{ github.event.number }} + pr_link: ${{ github.event.pull_request._links.html.href }} + secrets: + token: ${{ secrets.GITHUB_TOKEN }} + + ci_run_full: + needs: [issue_on_error] + runs-on: ubuntu-latest + steps: + - name: remove-cirun-full + if: ${{ contains(github.event.pull_request.labels.*.name, 'ci:run_full') }} + uses: actions/github-script@v5 + with: + github-token: ${{ secrets.TFLM_BOT_REPO_TOKEN }} + script: | + github.rest.issues.removeLabel({ + issue_number: context.issue.number, + owner: context.repo.owner, + repo: context.repo.repo, + name: 'ci:run_full' + }) + continue-on-error: true + diff --git a/.github/workflows/xtensa_postmerge.yml b/.github/workflows/xtensa_postmerge.yml new file mode 100644 index 0000000..1bb4373 --- /dev/null +++ b/.github/workflows/xtensa_postmerge.yml @@ -0,0 +1,66 @@ +# YAML schema for GitHub Actions: +# https://help.github.com/en/actions/automating-your-workflow-with-github-actions/workflow-syntax-for-github-actions +# +# Helpful YAML parser to clarify YAML syntax: +# https://yaml-online-parser.appspot.com/ +# +# This file can not be run stand-alone. It is called from tests_post.yml as part of +# the ci automation or from run_xtensa.yml for scheduled or manual dispatch. + +name: Xtensa Postmerge + +on: + workflow_call: + inputs: + trigger-sha: + required: true + type: string + secrets: + tflm-bot-token: + required: true + +jobs: + f1_unit_tests: + runs-on: ubuntu-latest + + name: Fusion F1 Unit Tests (postmerge) + steps: + - uses: actions/checkout@v2 + with: + ref: ${{ inputs.trigger-sha }} + - run: | + rm -rf .git + echo ${{ secrets.tflm-bot-token }} | docker login ghcr.io -u tflm-bot --password-stdin + docker run --env XTENSA_TOOLS_VERSION=RI-2020.4-linux --rm -v `pwd`:/opt/tflite-micro ghcr.io/tflm-bot/xtensa:0.6 \ + /bin/bash -c \ + "cd /opt && tflite-micro/tensorflow/lite/micro/tools/ci_build/test_xtensa_fusion_f1.sh EXTERNAL tflite-micro/" + + vision_p6_unit_tests: + runs-on: ubuntu-latest + + name: Vision P6 Unit Tests (postmerge) + steps: + - uses: actions/checkout@v2 + with: + ref: ${{ inputs.trigger-sha }} + - run: | + rm -rf .git + echo ${{ secrets.tflm-bot-token }} | docker login ghcr.io -u tflm-bot --password-stdin + docker run --env XTENSA_TOOLS_VERSION=RI-2020.4-linux --rm -v `pwd`:/opt/tflite-micro ghcr.io/tflm-bot/xtensa:0.6 \ + /bin/bash -c \ + "cd /opt && tflite-micro/tensorflow/lite/micro/tools/ci_build/test_xtensa_vision_p6.sh RUN_TESTS tflite-micro/" + + hifimini_unit_tests: + runs-on: ubuntu-latest + + name: Hifimini Unit Tests (postmerge) + steps: + - uses: actions/checkout@v2 + with: + ref: ${{ inputs.trigger-sha }} + - run: | + rm -rf .git + echo ${{ secrets.tflm-bot-token }} | docker login ghcr.io -u tflm-bot --password-stdin + docker run --env XTENSA_TOOLS_VERSION=RI-2019.2-linux --rm -v `pwd`:/opt/tflite-micro ghcr.io/tflm-bot/xtensa:0.6 \ + /bin/bash -c \ + "cd /opt && tflite-micro/tensorflow/lite/micro/tools/ci_build/test_xtensa_hifimini.sh tflite-micro/" \ No newline at end of file diff --git a/.github/workflows/xtensa_presubmit.yml b/.github/workflows/xtensa_presubmit.yml new file mode 100644 index 0000000..80bad15 --- /dev/null +++ b/.github/workflows/xtensa_presubmit.yml @@ -0,0 +1,67 @@ +# YAML schema for GitHub Actions: +# https://help.github.com/en/actions/automating-your-workflow-with-github-actions/workflow-syntax-for-github-actions +# +# Helpful YAML parser to clarify YAML syntax: +# https://yaml-online-parser.appspot.com/ +# +# This file can not be run stand-alone. It is called from tests_entry.yml as part of +# the ci automation or from run_xtensa.yml for scheduled or manual dispatch. + +name: Xtensa Presubmit + +on: + workflow_call: + inputs: + trigger-sha: + required: true + type: string + secrets: + tflm-bot-token: + required: true + +jobs: + + vision_p6_presubmit: + runs-on: ubuntu-latest + + name: Vision P6 Build (presubmit) + steps: + - uses: actions/checkout@v2 + with: + ref: ${{ inputs.trigger-sha }} + - run: | + rm -rf .git + echo ${{ secrets.tflm-bot-token }} | docker login ghcr.io -u tflm-bot --password-stdin + docker run --env XTENSA_TOOLS_VERSION=RI-2020.4-linux --rm -v `pwd`:/opt/tflite-micro ghcr.io/tflm-bot/xtensa:0.6 \ + /bin/bash -c \ + "cd /opt && tflite-micro/tensorflow/lite/micro/tools/ci_build/test_xtensa_vision_p6.sh RUN_NO_TESTS tflite-micro/" + + hifi5_unit_tests: + runs-on: ubuntu-latest + + name: Hifi5 Unit Tests (presubmit) + steps: + - uses: actions/checkout@v2 + with: + ref: ${{ inputs.trigger-sha }} + - run: | + rm -rf .git + echo ${{ secrets.tflm-bot-token }} | docker login ghcr.io -u tflm-bot --password-stdin + docker run --env XTENSA_TOOLS_VERSION=RI-2022.9-linux --rm -v `pwd`:/opt/tflite-micro ghcr.io/tflm-bot/xtensa:0.6 \ + /bin/bash -c \ + "cd /opt && tflite-micro/tensorflow/lite/micro/tools/ci_build/test_xtensa_hifi5.sh tflite-micro/" + + hifi_3z_unit_tests: + runs-on: ubuntu-latest + + name: Hifi3z Unit Tests (presubmit) + steps: + - uses: actions/checkout@v2 + with: + ref: ${{ inputs.trigger-sha }} + - run: | + rm -rf .git + echo ${{ secrets.tflm-bot-token }} | docker login ghcr.io -u tflm-bot --password-stdin + docker run --env XTENSA_TOOLS_VERSION=RI-2020.4-linux --rm -v `pwd`:/opt/tflite-micro ghcr.io/tflm-bot/xtensa:0.6 \ + /bin/bash -c \ + "cd /opt && tflite-micro/tensorflow/lite/micro/tools/ci_build/test_xtensa_hifi3z.sh EXTERNAL tflite-micro/" \ No newline at end of file diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..1d1c4de --- /dev/null +++ b/.gitignore @@ -0,0 +1,15 @@ +/bazel-* +/compile_commands.json +*.swp +.vscode/ +*audio_frontend* +*google* +*__pycache__* +venv +gen + +# Ignore the directory in which `clangd` stores its local index. +/.cache/ + +# Ignore the `external` symlink added by `bazel-compile-commands-extractor` +/external diff --git a/AUTHORS b/AUTHORS new file mode 100644 index 0000000..f3a6dae --- /dev/null +++ b/AUTHORS @@ -0,0 +1,11 @@ +# This is the list of Tensorflow's significant contributors. +# +# This does not necessarily list everyone who has contributed code, +# especially since many employees of one corporation may be contributing. +# To see the full list of contributors, see the revision history in +# source control. + +Google LLC +Yuan Tang +Arm Ltd + diff --git a/BUILD b/BUILD new file mode 100644 index 0000000..2b539af --- /dev/null +++ b/BUILD @@ -0,0 +1,9 @@ +load("@hedron_compile_commands//:refresh_compile_commands.bzl", "refresh_compile_commands") + +# `bazel run` this target to generate compile_commands.json, which can be used +# by various tools like editors and LSPs to provide features like intelligent +# navigation and autocompletion based on the source graph and compiler commands. +refresh_compile_commands( + name = "refresh_compile_commands", + targets = ["//..."], +) diff --git a/CODEOWNERS b/CODEOWNERS new file mode 100644 index 0000000..6c8b497 --- /dev/null +++ b/CODEOWNERS @@ -0,0 +1,4 @@ +* @tensorflow/micro + +/.github/ @advaitjain @rockyrhodes @rascani +/ci/ @advaitjain @rockyrhodes @rascani diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..a442570 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,302 @@ + + + + * [How to Contribute](#how-to-contribute) + * [Contributor License Agreement](#contributor-license-agreement) + * [Community Guidelines](#community-guidelines) + * [Code Contribution Guidelines](#code-contribution-guidelines) + * [General Pull Request Guidelines](#general-pull-request-guidelines) + * [Guidelines for Specific Contribution Categories](#guidelines-for-specific-contribution-categories) + * [Bug Fixes](#bug-fixes) + * [Reference Kernel Implementations](#reference-kernel-implementations) + * [Optimized Kernel Implementations](#optimized-kernel-implementations) + * [New Target / Platform / IDE / Examples](#new-target--platform--ide--examples) + * [Development Workflow Notes](#development-workflow-notes) + * [Initial Setup](#initial-setup) + * [Before submitting your PR](#before-submitting-your-pr) + * [During the PR review](#during-the-pr-review) + * [Reviewer notes](#reviewer-notes) + * [Python notes](#python-notes) + * [Continuous Integration System](#continuous-integration-system) + + + + + +# How to Contribute + +We'd love to accept your patches and contributions to this project. There are +just a few small guidelines you need to follow. + +## Contributor License Agreement + +Contributions to this project must be accompanied by a Contributor License +Agreement (CLA). You (or your employer) retain the copyright to your +contribution; this simply gives us permission to use and redistribute your +contributions as part of the project. Head over to + to see your current agreements on file or +to sign a new one. + +You generally only need to submit a CLA once, so if you've already submitted one +(even if it was for a different project), you probably don't need to do it +again. + +## Community Guidelines + +This project follows +[Google's Open Source Community Guidelines](https://opensource.google/conduct/). + +# Code Contribution Guidelines + +We provide some general guidelines with the goal of enabling community +contributions while still maintaining code health, maintainability, and +consistency in style. + +Please note that while these guidelines may seem onerous to some developers, +they are derived from Google's software engineering best practices. + +Before we describe project-specific guidelines, we recommend that external +contributors read these tips from the Google Testing Blog: + +* [Code Health: Providing Context with Commit Messages and Bug Reports](https://testing.googleblog.com/2017/09/code-health-providing-context-with.html) +* [Code Health: Understanding Code In Review](https://testing.googleblog.com/2018/05/code-health-understanding-code-in-review.html) +* [Code Health: Too Many Comments on Your Code Reviews?](https://testing.googleblog.com/2017/06/code-health-too-many-comments-on-your.html) +* [Code Health: To Comment or Not to Comment?](https://testing.googleblog.com/2017/07/code-health-to-comment-or-not-to-comment.html) + +We also recommend that contributors take a look at the +[Tensorflow Contributing Guidelines](https://github.com/tensorflow/tensorflow/blob/master/CONTRIBUTING.md). + +## General Pull Request Guidelines + +We strongly recommend that contributors: + +1. Initiate a conversation with the TFLM team via a + [TF Lite Micro Github issue](https://github.com/tensorflow/tensorflow/issues/new?labels=comp%3Amicro&template=70-tflite-micro-issue.md) + as early as possible. + + * This enables us to give guidance on how to proceed, prevent duplicated + effort and also point to alternatives as well as context if we are not + able to accept a particular contribution at a given time. + + * Ideally, you should make an issue ***before*** starting to work on a + pull request and provide context on both what you want to contribute and + why. + +1. Once step 1. is complete and it is determined that a PR from an external + contributor is the way to go, please follow these guidelines from + [Google's Engineering Practices documentation](https://google.github.io/eng-practices/): + + * [Send Small Pull Requests](https://google.github.io/eng-practices/review/developer/small-cls.html) + + * If a pull request is doing more than one thing, the reviewer will + request that it be broken up into two or more PRs. + + * [Write Good Pull Request Descriptions](https://google.github.io/eng-practices/review/developer/cl-descriptions.html) + + * We require that all PR descriptions link to the GitHub issue + created in step 1 via the text `BUG=#nn` on a line by itself [^1]. This + is enforced by CI. + + [^1]: This despite GitHub having additional forms of + [linked references](https://docs.github.com/en/get-started/writing-on-github/working-with-advanced-formatting/autolinked-references-and-urls). + +1. Unit tests are critical to a healthy codebase. PRs without tests should be + the exception rather than the norm. And contributions to improve, simplify, + or make the unit tests more exhaustive are welcome! Please refer to + [this guideline](https://google.github.io/eng-practices/review/developer/small-cls.html#test_code) + on how test code and writing small PRs should be reconciled. + +## Guidelines for Specific Contribution Categories + +We provide some additional guidelines for different categories of contributions. + +### Bug Fixes + +Pull requests that fix bugs are always welcome and often uncontroversial, unless +there is a conflict between different requirements from the platform, or if +fixing a bug needs a bigger architectural change. + +1. Create a [Github issue](https://github.com/tensorflow/tflite-micro/issues/new/choose) + to determine the scope of the bug fix. +1. Send a PR (if that is determined to be the best path forward). +1. Bugfix PRs should be accompanied by a test case that fails prior to the fix + and passes with the fix. This validates that the fix works as expected, and + helps prevent future regressions. + +### Reference Kernel Implementations + +Pull requests that port reference kernels from TF Lite Mobile to TF Lite Micro +are welcome once we have context from the contributor on why the additional +kernel is needed. + +Please see the [reference kernel porting guide](tensorflow/lite/micro/docs/porting_reference_ops.md) +for more details of that process. + +### Optimized Kernel Implementations +Please see the [optimized kernel implementations guide](tensorflow/lite/micro/docs/optimized_kernel_implementations.md). + +### New Target / Platform / IDE / Examples + +Please see the [new platform support guide](tensorflow/lite/micro/docs/new_platform_support.md) +for documentation on how to add TFLM support for your particular platform. + + +# Development Workflow Notes + +## Initial Setup + +Below are some tips that might be useful and improve the development experience. + +* Add the [Refined GitHub](https://github.com/sindresorhus/refined-github) + plugin to make the github experience even better. + +* Code search the [TfLite Micro codebase](https://sourcegraph.com/github.com/tensorflow/tflite-micro@main) + on Sourcegraph. And optionally install the [plugin that enables GitHub integration](https://docs.sourcegraph.com/integration/github#github-integration-with-sourcegraph). + +* Install [bazel](ci/install_bazelisk.sh) and [buildifier](ci/install_buildifier.sh). + +* Install the latest clang and clang-format. For example, [here](ci/Dockerfile.micro) + is the what we do for the TFLM continuous integration Docker container. + +* Get a copy of [cpplint](https://github.com/google/styleguide/tree/gh-pages/cpplint) + or install it: + +* Install Pillow and Wave. For example, [here](ci/Dockerfile.micro) is what we + do for the TFLM continuous integration Docker container. + + ``` + pip install cpplint + ``` + +* [yapf](https://github.com/google/yapf/) should be used for formatting Python + code. For example: + + ``` + pip install yapf + yapf log_parser.py -i --style='{based_on_style: pep8, indent_width: 2}' + ``` + +* Add a git hook to check for code style etc. prior to creating a pull request: + ``` + cp tensorflow/lite/micro/tools/dev_setup/pre-push.tflm .git/hooks/pre-push + ``` + +## Before submitting your PR + +1. Run in-place clang-format on all the files that are modified in your git + tree with + + ``` + clang-format -i -style=google `git ls-files -m | grep "\.cc"` + clang-format -i -style=google `git ls-files -m | grep "\.h"` + ``` + +1. Make sure your code is lint-free. + + ``` + cpplint `git ls-files -m` + ``` + +1. Run all the tests for x86, and any other platform that you are modifying. + + ``` + tensorflow/lite/micro/tools/ci_build/test_x86_default.sh + ``` + + Please check the READMEs in the optimized kernel directories for specific + instructions. + +1. Sometimes, bugs are caught by the sanitizers that can go unnoticed + via the Makefile. To run a test with the different sanitizers, use the + following commands (replace `micro_interpreter_test` with the target that you + want to test: + + ``` + CC=clang bazel run --config=asan tensorflow/lite/micro:micro_interpreter_test + CC=clang bazel run --config=msan tensorflow/lite/micro:micro_interpreter_test + CC=clang bazel run --config=ubsan tensorflow/lite/micro:micro_interpreter_test + ``` + +## During the PR review + +1. Do not change the git version history. + + * Always merge upstream/main (***do not rebase***) and no force-pushes + please. + + * Having an extra merge commit is ok as the github review tool handles + that gracefully. + + Assuming that you forked tensorflow and added a remote called upstream with: + + ``` + git remote add upstream https://github.com/tensorflow/tflite-micro.git + ``` + + Fetch the latest changes from upstream and merge into your local branch. + + ``` + git fetch upstream + git merge upstream/main + ``` + + In case of a merge conflict, resolve via: + + ``` + git mergetool + + # Use your favorite diff tools (e.g. meld) to resolve the conflicts. + + git add + + git commit + ``` + +1. If a force push seems to be the only path forward, please stop and let your + PR reviewer know ***before*** force pushing. We will attempt to do the merge + for you. This will also help us better understand in what conditions a + force-push may be unavoidable. + +## Reviewer notes + +* [GIthub CLI](https://cli.github.com) can be useful to quickly checkout a PR + to test locally. + + `gh pr checkout ` + +* Google engineers on the Tensorflow team will have the permissions to push + edits to most PRs. This can be useful to make some small fixes as a result + of errors due to internal checks that are not easily reproducible via + github. + + One example of this is + [this comment](https://github.com/tensorflow/tensorflow/pull/38634#issuecomment-683190474). + + And a sketch of the steps: + + ``` + git remote add git@github.com:/tflite-micro.git + git fetch + + git checkout -b / + + # make changes and commit to local branch + + # push changes to remove branch + + git push + + # remove the temp remote to clean up your git environment. + + git remote rm + ``` + +## Python notes + +* [TFLM Python guide](docs/python.md) + +# Continuous Integration System + * Some [additional documentation](docs/continuous_integration.md) on the TFLM CI. diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..d645695 --- /dev/null +++ b/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/README.md b/README.md new file mode 100644 index 0000000..e997acf --- /dev/null +++ b/README.md @@ -0,0 +1,111 @@ + + * [TensorFlow Lite for Microcontrollers](#tensorflow-lite-for-microcontrollers) + * [Build Status](#build-status) + * [Official Builds](#official-builds) + * [Community Supported TFLM Examples](#community-supported-tflm-examples) + * [Community Supported Kernels and Unit Tests](#community-supported-kernels-and-unit-tests) + * [Contributing](#contributing) + * [Getting Help](#getting-help) + * [Additional Documentation](#additional-documentation) + * [RFCs](#rfcs) + + + + + +# TensorFlow Lite for Microcontrollers + +TensorFlow Lite for Microcontrollers is a port of TensorFlow Lite designed to +run machine learning models on DSPs, microcontrollers and other devices with +limited memory. + +Additional Links: + * [Tensorflow github repository](https://github.com/tensorflow/tensorflow/) + * [TFLM at tensorflow.org](https://www.tensorflow.org/lite/microcontrollers) + +# Build Status + + * [GitHub Status](https://www.githubstatus.com/) + +## Official Builds + +Build Type | Status | +----------- | --------------| +CI (Linux) | [![CI](https://github.com/tensorflow/tflite-micro/actions/workflows/run_ci.yml/badge.svg)](https://github.com/tensorflow/tflite-micro/actions/workflows/run_ci.yml) | +Code Sync | [![Sync from Upstream TF](https://github.com/tensorflow/tflite-micro/actions/workflows/sync.yml/badge.svg)](https://github.com/tensorflow/tflite-micro/actions/workflows/sync.yml) | + + +## Community Supported TFLM Examples +This table captures platforms that TFLM has been ported to. Please see +[New Platform Support](tensorflow/lite/micro/docs/new_platform_support.md) for +additional documentation. + +Platform | Status | +----------- | --------------| +Arduino | [![Arduino](https://github.com/tensorflow/tflite-micro-arduino-examples/actions/workflows/ci.yml/badge.svg)](https://github.com/tensorflow/tflite-micro-arduino-examples/actions/workflows/ci.yml) [![Antmicro](https://github.com/antmicro/tensorflow-arduino-examples/actions/workflows/test_examples.yml/badge.svg)](https://github.com/antmicro/tensorflow-arduino-examples/actions/workflows/test_examples.yml) | +[Coral Dev Board Micro](https://coral.ai/products/dev-board-micro) | [TFLM + EdgeTPU Examples for Coral Dev Board Micro](https://github.com/google-coral/coralmicro) | +Espressif Systems Dev Boards | [![ESP Dev Boards](https://github.com/espressif/tflite-micro-esp-examples/actions/workflows/ci.yml/badge.svg)](https://github.com/espressif/tflite-micro-esp-examples/actions/workflows/ci.yml) | +Renesas Boards | [TFLM Examples for Renesas Boards](https://github.com/renesas/tflite-micro-renesas) | +Silicon Labs Dev Kits | [TFLM Examples for Silicon Labs Dev Kits](https://github.com/SiliconLabs/tflite-micro-efr32-examples) +Sparkfun Edge | [![Sparkfun Edge](https://github.com/advaitjain/tflite-micro-sparkfun-edge-examples/actions/workflows/ci.yml/badge.svg?event=schedule)](https://github.com/advaitjain/tflite-micro-sparkfun-edge-examples/actions/workflows/ci.yml) +Texas Instruments Dev Boards | [![Texas Instruments Dev Boards](https://github.com/TexasInstruments/tensorflow-lite-micro-examples/actions/workflows/ci.yml/badge.svg?event=status)](https://github.com/TexasInstruments/tensorflow-lite-micro-examples/actions/workflows/ci.yml) + + +## Community Supported Kernels and Unit Tests +This is a list of targets that have optimized kernel implementations and/or run +the TFLM unit tests using software emulation or instruction set simulators. + +Build Type | Status | +----------- | --------------| +Cortex-M | [![Cortex-M](https://github.com/tensorflow/tflite-micro/actions/workflows/cortex_m.yml/badge.svg)](https://github.com/tensorflow/tflite-micro/actions/workflows/cortex_m.yml) | +Hexagon | [![Hexagon](https://github.com/tensorflow/tflite-micro/actions/workflows/run_hexagon.yml/badge.svg)](https://github.com/tensorflow/tflite-micro/actions/workflows/run_hexagon.yml) | +RISC-V | [![RISC-V](https://github.com/tensorflow/tflite-micro/actions/workflows/riscv.yml/badge.svg)](https://github.com/tensorflow/tflite-micro/actions/workflows/riscv.yml) | +Xtensa | [![Xtensa](https://github.com/tensorflow/tflite-micro/actions/workflows/run_xtensa.yml/badge.svg)](https://github.com/tensorflow/tflite-micro/actions/workflows/run_xtensa.yml) | +Generate Integration Test | [![Generate Integration Test](https://github.com/tensorflow/tflite-micro/actions/workflows/generate_integration_tests.yml/badge.svg)](https://github.com/tensorflow/tflite-micro/actions/workflows/generate_integration_tests.yml) | + + +# Contributing +See our [contribution documentation](CONTRIBUTING.md). + +# Getting Help + +A [Github issue](https://github.com/tensorflow/tflite-micro/issues/new/choose) +should be the primary method of getting in touch with the TensorFlow Lite Micro +(TFLM) team. + +The following resources may also be useful: + +1. SIG Micro [email group](https://groups.google.com/a/tensorflow.org/g/micro) + and + [monthly meetings](http://doc/1YHq9rmhrOUdcZnrEnVCWvd87s2wQbq4z17HbeRl-DBc). + +1. SIG Micro [gitter chat room](https://gitter.im/tensorflow/sig-micro). + +1. For questions that are not specific to TFLM, please consult the broader TensorFlow project, e.g.: + * Create a topic on the [TensorFlow Discourse forum](https://discuss.tensorflow.org) + * Send an email to the [TensorFlow Lite mailing list](https://groups.google.com/a/tensorflow.org/g/tflite) + * Create a [TensorFlow issue](https://github.com/tensorflow/tensorflow/issues/new/choose) + * Create a [Model Optimization Toolkit](https://github.com/tensorflow/model-optimization) issue + +# Additional Documentation + + * [Continuous Integration](docs/continuous_integration.md) + * [Benchmarks](tensorflow/lite/micro/benchmarks/README.md) + * [Profiling](tensorflow/lite/micro/docs/profiling.md) + * [Memory Management](tensorflow/lite/micro/docs/memory_management.md) + * [Logging](tensorflow/lite/micro/docs/logging.md) + * [Porting Reference Kernels from TfLite to TFLM](tensorflow/lite/micro/docs/porting_reference_ops.md) + * [Optimized Kernel Implementations](tensorflow/lite/micro/docs/optimized_kernel_implementations.md) + * [New Platform Support](tensorflow/lite/micro/docs/new_platform_support.md) + * Platform/IP support + * [Arm IP support](tensorflow/lite/micro/docs/arm.md) + * [Software Emulation with Renode](tensorflow/lite/micro/docs/renode.md) + * [Software Emulation with QEMU](tensorflow/lite/micro/docs/qemu.md) + * [Python Dev Guide](docs/python.md) + * [Automatically Generated Files](docs/automatically_generated_files.md) + * [Python Interpreter Guide](python/tflite_micro/README.md) + +# RFCs + +1. [Pre-allocated tensors](tensorflow/lite/micro/docs/rfc/001_preallocated_tensors.md) +1. [TensorFlow Lite for Microcontrollers Port of 16x8 Quantized Operators](tensorflow/lite/micro/docs/rfc/002_16x8_quantization_port.md) diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 0000000..44e85ac --- /dev/null +++ b/SECURITY.md @@ -0,0 +1 @@ +Please refer to: https://github.com/tensorflow/tensorflow/blob/master/SECURITY.md diff --git a/WORKSPACE b/WORKSPACE new file mode 100644 index 0000000..f881df9 --- /dev/null +++ b/WORKSPACE @@ -0,0 +1,80 @@ +# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +workspace(name = "tflite_micro") + +load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") + +# compile_commands.json generator +http_archive( + name = "hedron_compile_commands", + url = "https://github.com/hedronvision/bazel-compile-commands-extractor/archive/1266d6a25314d165ca78d0061d3399e909b7920e.tar.gz", + strip_prefix = "bazel-compile-commands-extractor-1266d6a25314d165ca78d0061d3399e909b7920e", + sha256 = "bacabfe758676fdc19e4bea7c4a3ac99c7e7378d259a9f1054d341c6a6b44ff6", +) +load("@hedron_compile_commands//:workspace_setup.bzl", "hedron_compile_commands_setup") +hedron_compile_commands_setup() + +http_archive( + name = "rules_python", + sha256 = "497ca47374f48c8b067d786b512ac10a276211810f4a580178ee9b9ad139323a", + strip_prefix = "rules_python-0.16.1", + url = "https://github.com/bazelbuild/rules_python/archive/refs/tags/0.16.1.tar.gz", +) + +load("@rules_python//python:pip.bzl", "pip_parse") +pip_parse( + name = "tflm_pip_deps", + requirements_lock = "//third_party:python_requirements.txt", +) + +load("@tflm_pip_deps//:requirements.bzl", "install_deps", "requirement") +install_deps() + +load("//tensorflow:workspace.bzl", "workspace") +workspace() + +http_archive( + name = "pybind11_bazel", + strip_prefix = "pybind11_bazel-faf56fb3df11287f26dbc66fdedf60a2fc2c6631", + urls = ["https://github.com/pybind/pybind11_bazel/archive/faf56fb3df11287f26dbc66fdedf60a2fc2c6631.zip"], + sha256 = "a185aa68c93b9f62c80fcb3aadc3c83c763854750dc3f38be1dadcb7be223837", +) + +http_archive( + name = "pybind11", + build_file = "@pybind11_bazel//:pybind11.BUILD", + strip_prefix = "pybind11-2.10.0", + urls = ["https://github.com/pybind/pybind11/archive/refs/tags/v2.10.0.tar.gz"], + sha256 = "eacf582fa8f696227988d08cfc46121770823839fe9e301a20fbce67e7cd70ec", +) + +load("@pybind11_bazel//:python_configure.bzl", "python_configure") +python_configure(name = "local_config_python", python_version = "3") + +load("//python:py_pkg_cc_deps.bzl", "py_pkg_cc_deps") + +py_pkg_cc_deps( + name = "numpy_cc_deps", + includes = ["numpy/core/include"], + pkg = requirement("numpy"), +) + +py_pkg_cc_deps( + name = "tensorflow_cc_deps", + includes = ["tensorflow/include"], + libs = ["tensorflow/libtensorflow_framework.so.2"], + pkg = requirement("tensorflow-cpu"), +) diff --git a/ci/Dockerfile.hexagon b/ci/Dockerfile.hexagon new file mode 100644 index 0000000..9e2ad54 --- /dev/null +++ b/ci/Dockerfile.hexagon @@ -0,0 +1,47 @@ +FROM ubuntu:bionic +ARG DEBIAN_FRONTEND=noninteractive + +RUN \ + apt update && \ + apt install -y \ + automake \ + build-essential \ + curl \ + git \ + unzip \ + wget \ + python3 \ + python \ + python3-pip + +WORKDIR /opt/hexagon + +COPY ./qualcomm_hexagon_sdk_3_5_1_linux.zip . + +RUN \ + pip3 install --upgrade pip setuptools wheel + +RUN \ + pip3 install Pillow + +RUN \ + pip3 install numpy + +RUN unzip qualcomm_hexagon_sdk_3_5_1_linux.zip && \ + rm qualcomm_hexagon_sdk_3_5_1_linux.zip && \ + cd qualcomm_hexagon_sdk_3_5_1_linux && \ + chmod +x qualcomm_hexagon_sdk_3_5_1_eval.bin && \ + ./qualcomm_hexagon_sdk_3_5_1_eval.bin -i silent -DDOWNLOAD_ANDROID=false -DDOWNLOAD_FULL_ANDROID=false -DDOWNLOAD_ECLIPSE=false && \ + cd /opt/hexagon && rm -rf qualcomm_hexagon_sdk_3_5_1_linux + +COPY ./hexagon_tflm_core.a /root/Qualcomm/ + +ENV HEXAGON_TFLM_CORE=/root/Qualcomm/hexagon_tflm_core.a +ENV HEXAGON_CPU_VER=v66 +ENV HEXAGON_SDK_ROOT=/root/Qualcomm/Hexagon_SDK/3.5.1 +ENV HEXAGON_ROOT=${HEXAGON_SDK_ROOT}/tools/HEXAGON_Tools/ +ENV HEXAGON_TOOL_VER=8.3.07 +ENV PATH=${HEXAGON_ROOT}/${HEXAGON_TOOL_VER}/Tools/bin:${PATH} + + +CMD hexagon-clang++ diff --git a/ci/Dockerfile.micro b/ci/Dockerfile.micro new file mode 100644 index 0000000..88d9cae --- /dev/null +++ b/ci/Dockerfile.micro @@ -0,0 +1,77 @@ +# This docker container can be used to run all the TFLM CI checks. +# +# It is only used as part of the GitHub workflows to test for code-style. But +# the container is available and ready for use to run all the checks locally, +# in case that is useful for debugging. See all the versions at +# https://github.com/users/TFLM-bot/packages/container/tflm-ci/versions +# +# docker pull ghcr.io/tflm-bot/tflm-ci: +# +# Build you own container with: +# docker build -f ci/Dockerfile.micro -t tflm-ci . +# +# Use a prebuilt Python image instead of base Ubuntu to speed up the build process, +# since it has all the build dependencies we need for Micro and downloads much faster +# than the install process. + +# Using a multistage build so that the build tools required for stage 1 don't make the +# CI container unnecessarily large. +FROM python:3.10-bullseye AS qemu_builder +RUN apt-get update +RUN apt-get install -y ninja-build wget xz-utils +COPY ci/install_qemu.sh ./ +# Installs all built files into /qemu_install rather than /usr/local default. +RUN ./install_qemu.sh /tmp/qemu_install + +# This stage is the final CI container. +FROM python:3.10-bullseye as tflm-ci + +RUN apt-get update + +RUN apt-get install -y zip xxd sudo + +RUN apt install -y lsb-release wget software-properties-common gnupg +RUN wget https://apt.llvm.org/llvm.sh +RUN chmod +x llvm.sh +RUN ./llvm.sh 16 +RUN ln -s /usr/bin/clang-16 /usr/bin/clang +RUN ln -s /usr/bin/clang++-16 /usr/bin/clang++ + +RUN apt-get install clang-format-16 +RUN ln -s /usr/bin/clang-format-16 /usr/bin/clang-format + +# Needed when using the Dockerfile locally. +RUN git config --global --add safe.directory /opt/tflm + +# Needed when the docker container is used with GitHub actions. +RUN git config --global --add safe.directory /github/workspace + +# Install yapf to check for Python formatting as part of the TFLM continuous +# integration. +RUN pip install yapf==0.32.0 + +# Pillow was added first for the C array generation as a result of the following +# PRs: +# https://github.com/tensorflow/tflite-micro/pull/337 +# https://github.com/tensorflow/tflite-micro/pull/410 +RUN pip install Pillow +RUN pip install Wave + +# necessary bits for create_size_log scripts +RUN pip install pandas +RUN pip install matplotlib +RUN pip install six + +# Install Renode test dependencies +RUN pip install pyyaml requests psutil robotframework==4.0.1 + +# Install QEMU from build container qemu_builder into tflm-ci container. +# We're using a two stage build to keep the CI container smaller. +WORKDIR /usr/local +# Merge built files into /usr/local so that the path is already setup. +COPY --from=qemu_builder /tmp/qemu_install/. . + +WORKDIR / +COPY ci/*.sh /install/ +RUN /install/install_bazelisk.sh +RUN /install/install_buildifier.sh diff --git a/ci/check_tflite_files.py b/ci/check_tflite_files.py new file mode 100644 index 0000000..7e2ab48 --- /dev/null +++ b/ci/check_tflite_files.py @@ -0,0 +1,34 @@ +# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +import argparse +import sys + +if __name__=="__main__": + parser = argparse.ArgumentParser() + parser.add_argument("pr_files", help="File with list of files modified by the Pull Request", default="") + args = parser.parse_args() + + tflite_files = set(line.strip() for line in open("ci/tflite_files.txt")) + pr_files = set(line.strip() for line in open(args.pr_files)) + + tflite_files_in_pr = tflite_files.intersection(pr_files) + + if len(tflite_files_in_pr) != 0: + print("The following files should be modified in the upstream Tensorflow repo:") + print("\n".join(tflite_files_in_pr)) + sys.exit(1) + else: + print("No TfLite files are modified in the PR. We can proceed.") diff --git a/ci/flatbuffers_for_tf_sync/BUILD b/ci/flatbuffers_for_tf_sync/BUILD new file mode 100644 index 0000000..82bab3f --- /dev/null +++ b/ci/flatbuffers_for_tf_sync/BUILD @@ -0,0 +1 @@ +# This empty BUILD file is required to make Bazel treat this directory as a package. diff --git a/ci/flatbuffers_for_tf_sync/BUILD.system b/ci/flatbuffers_for_tf_sync/BUILD.system new file mode 100644 index 0000000..8fe4d7a --- /dev/null +++ b/ci/flatbuffers_for_tf_sync/BUILD.system @@ -0,0 +1,43 @@ +licenses(["notice"]) # Apache 2.0 + +filegroup( + name = "LICENSE.txt", + visibility = ["//visibility:public"], +) + +# Public flatc library to compile flatbuffer files at runtime. +cc_library( + name = "flatbuffers", + linkopts = ["-lflatbuffers"], + visibility = ["//visibility:public"], +) + +# Public flatc compiler library. +cc_library( + name = "flatc_library", + linkopts = ["-lflatbuffers"], + visibility = ["//visibility:public"], +) + +genrule( + name = "lnflatc", + outs = ["flatc.bin"], + cmd = "ln -s $$(which flatc) $@", +) + +# Public flatc compiler. +sh_binary( + name = "flatc", + srcs = ["flatc.bin"], + visibility = ["//visibility:public"], +) + +cc_library( + name = "runtime_cc", + visibility = ["//visibility:public"], +) + +py_library( + name = "runtime_py", + visibility = ["//visibility:public"], +) diff --git a/ci/flatbuffers_for_tf_sync/build_defs.bzl b/ci/flatbuffers_for_tf_sync/build_defs.bzl new file mode 100644 index 0000000..94516de --- /dev/null +++ b/ci/flatbuffers_for_tf_sync/build_defs.bzl @@ -0,0 +1,639 @@ +"""BUILD rules for generating flatbuffer files.""" + +load("@build_bazel_rules_android//android:rules.bzl", "android_library") + +flatc_path = "@flatbuffers//:flatc" +zip_files = "//tensorflow/lite/tools:zip_files" + +DEFAULT_INCLUDE_PATHS = [ + "./", + "$(GENDIR)", + "$(BINDIR)", +] + +DEFAULT_FLATC_ARGS = [ + "--no-union-value-namespacing", + "--gen-object-api", +] + +def flatbuffer_library_public( + name, + srcs, + outs, + language_flag, + out_prefix = "", + includes = [], + include_paths = [], + compatible_with = [], + flatc_args = DEFAULT_FLATC_ARGS, + reflection_name = "", + reflection_visibility = None, + output_to_bindir = False): + """Generates code files for reading/writing the given flatbuffers in the requested language using the public compiler. + + Outs: + filegroup(name): all generated source files. + Fileset([reflection_name]): (Optional) all generated reflection binaries. + + Args: + name: Rule name. + srcs: Source .fbs files. Sent in order to the compiler. + outs: Output files from flatc. + language_flag: Target language flag. One of [-c, -j, -js]. + out_prefix: Prepend this path to the front of all generated files except on + single source targets. Usually is a directory name. + includes: Optional, list of filegroups of schemas that the srcs depend on. + include_paths: Optional, list of paths the includes files can be found in. + compatible_with: Optional, passed to genrule for environments this rule + can be built for. + flatc_args: Optional, list of additional arguments to pass to flatc. + reflection_name: Optional, if set this will generate the flatbuffer + reflection binaries for the schemas. + reflection_visibility: The visibility of the generated reflection Fileset. + output_to_bindir: Passed to genrule for output to bin directory. + """ + include_paths_cmd = ["-I %s" % (s) for s in include_paths] + + # '$(@D)' when given a single source target will give the appropriate + # directory. Appending 'out_prefix' is only necessary when given a build + # target with multiple sources. + output_directory = ( + ("-o $(@D)/%s" % (out_prefix)) if len(srcs) > 1 else ("-o $(@D)") + ) + genrule_cmd = " ".join([ + "for f in $(SRCS); do", + "$(location %s)" % (flatc_path), + " ".join(flatc_args), + " ".join(include_paths_cmd), + language_flag, + output_directory, + "$$f;", + "done", + ]) + native.genrule( + name = name, + srcs = srcs, + outs = outs, + output_to_bindir = output_to_bindir, + compatible_with = compatible_with, + tools = includes + [flatc_path], + cmd = genrule_cmd, + message = "Generating flatbuffer files for %s:" % (name), + ) + if reflection_name: + reflection_genrule_cmd = " ".join([ + "for f in $(SRCS); do", + "$(location %s)" % (flatc_path), + "-b --schema", + " ".join(flatc_args), + " ".join(include_paths_cmd), + language_flag, + output_directory, + "$$f;", + "done", + ]) + reflection_outs = [ + (out_prefix + "%s.bfbs") % (s.replace(".fbs", "").split("/")[-1]) + for s in srcs + ] + native.genrule( + name = "%s_srcs" % reflection_name, + srcs = srcs, + outs = reflection_outs, + output_to_bindir = output_to_bindir, + compatible_with = compatible_with, + tools = includes + [flatc_path], + cmd = reflection_genrule_cmd, + message = "Generating flatbuffer reflection binary for %s:" % (name), + ) + # TODO(b/114456773): Make bazel rules proper and supported by flatbuffer + # Have to comment this since FilesetEntry is not supported in bazel + # skylark. + # native.Fileset( + # name = reflection_name, + # out = "%s_out" % reflection_name, + # entries = [ + # native.FilesetEntry(files = reflection_outs), + # ], + # visibility = reflection_visibility, + # compatible_with = compatible_with, + # ) + +def flatbuffer_cc_library( + name, + srcs, + srcs_filegroup_name = "", + out_prefix = "", + includes = [], + include_paths = [], + compatible_with = [], + flatc_args = DEFAULT_FLATC_ARGS, + visibility = None, + srcs_filegroup_visibility = None, + gen_reflections = False): + '''A cc_library with the generated reader/writers for the given flatbuffer definitions. + + Outs: + filegroup([name]_srcs): all generated .h files. + filegroup(srcs_filegroup_name if specified, or [name]_includes if not): + Other flatbuffer_cc_library's can pass this in for their `includes` + parameter, if they depend on the schemas in this library. + Fileset([name]_reflection): (Optional) all generated reflection binaries. + cc_library([name]): library with sources and flatbuffers deps. + + Remarks: + ** Because the genrule used to call flatc does not have any trivial way of + computing the output list of files transitively generated by includes and + --gen-includes (the default) being defined for flatc, the --gen-includes + flag will not work as expected. The way around this is to add a dependency + to the flatbuffer_cc_library defined alongside the flatc included Fileset. + For example you might define: + + flatbuffer_cc_library( + name = "my_fbs", + srcs = [ "schemas/foo.fbs" ], + includes = [ "//third_party/bazz:bazz_fbs_includes" ], + ) + + In which foo.fbs includes a few files from the Fileset defined at + //third_party/bazz:bazz_fbs_includes. When compiling the library that + includes foo_generated.h, and therefore has my_fbs as a dependency, it + will fail to find any of the bazz *_generated.h files unless you also + add bazz's flatbuffer_cc_library to your own dependency list, e.g.: + + cc_library( + name = "my_lib", + deps = [ + ":my_fbs", + "//third_party/bazz:bazz_fbs" + ], + ) + + Happy dependent Flatbuffering! + + Args: + name: Rule name. + srcs: Source .fbs files. Sent in order to the compiler. + srcs_filegroup_name: Name of the output filegroup that holds srcs. Pass this + filegroup into the `includes` parameter of any other + flatbuffer_cc_library that depends on this one's schemas. + out_prefix: Prepend this path to the front of all generated files. Usually + is a directory name. + includes: Optional, list of filegroups of schemas that the srcs depend on. + ** SEE REMARKS BELOW ** + include_paths: Optional, list of paths the includes files can be found in. + compatible_with: Optional, passed to genrule for environments this rule + can be built for + flatc_args: Optional list of additional arguments to pass to flatc + (e.g. --gen-mutable). + visibility: The visibility of the generated cc_library. By default, use the + default visibility of the project. + srcs_filegroup_visibility: The visibility of the generated srcs filegroup. + By default, use the value of the visibility parameter above. + gen_reflections: Optional, if true this will generate the flatbuffer + reflection binaries for the schemas. + ''' + output_headers = [ + (out_prefix + "%s_generated.h") % (s.replace(".fbs", "").split("/")[-1]) + for s in srcs + ] + reflection_name = "%s_reflection" % name if gen_reflections else "" + + flatbuffer_library_public( + name = "%s_srcs" % (name), + srcs = srcs, + outs = output_headers, + language_flag = "-c", + out_prefix = out_prefix, + includes = includes, + include_paths = include_paths, + compatible_with = compatible_with, + flatc_args = flatc_args, + reflection_name = reflection_name, + reflection_visibility = visibility, + ) + native.cc_library( + name = name, + hdrs = output_headers, + srcs = output_headers, + features = [ + "-parse_headers", + ], + deps = [ + "@flatbuffers//:runtime_cc", + ], + includes = ["."], + linkstatic = 1, + visibility = visibility, + compatible_with = compatible_with, + ) + + # A filegroup for the `srcs`. That is, all the schema files for this + # Flatbuffer set. + native.filegroup( + name = srcs_filegroup_name if srcs_filegroup_name else "%s_includes" % (name), + srcs = srcs, + visibility = srcs_filegroup_visibility if srcs_filegroup_visibility != None else visibility, + compatible_with = compatible_with, + ) + +# Custom provider to track dependencies transitively. +FlatbufferInfo = provider( + fields = { + "transitive_srcs": "flatbuffer schema definitions.", + }, +) + +def _flatbuffer_schemas_aspect_impl(target, ctx): + _ignore = [target] + transitive_srcs = depset() + if hasattr(ctx.rule.attr, "deps"): + for dep in ctx.rule.attr.deps: + if FlatbufferInfo in dep: + transitive_srcs = depset(dep[FlatbufferInfo].transitive_srcs, transitive = [transitive_srcs]) + if hasattr(ctx.rule.attr, "srcs"): + for src in ctx.rule.attr.srcs: + if FlatbufferInfo in src: + transitive_srcs = depset(src[FlatbufferInfo].transitive_srcs, transitive = [transitive_srcs]) + for f in src.files: + if f.extension == "fbs": + transitive_srcs = depset([f], transitive = [transitive_srcs]) + return [FlatbufferInfo(transitive_srcs = transitive_srcs)] + +# An aspect that runs over all dependencies and transitively collects +# flatbuffer schema files. +_flatbuffer_schemas_aspect = aspect( + attr_aspects = [ + "deps", + "srcs", + ], + implementation = _flatbuffer_schemas_aspect_impl, +) + +# Rule to invoke the flatbuffer compiler. +def _gen_flatbuffer_srcs_impl(ctx): + outputs = ctx.attr.outputs + include_paths = ctx.attr.include_paths + if ctx.attr.no_includes: + no_includes_statement = ["--no-includes"] + else: + no_includes_statement = [] + + # Need to generate all files in a directory. + if not outputs: + outputs = [ctx.actions.declare_directory("{}_all".format(ctx.attr.name))] + output_directory = outputs[0].path + else: + outputs = [ctx.actions.declare_file(output) for output in outputs] + output_directory = outputs[0].dirname + + deps = depset(ctx.files.srcs + ctx.files.deps, transitive = [ + dep[FlatbufferInfo].transitive_srcs + for dep in ctx.attr.deps + if FlatbufferInfo in dep + ]) + + include_paths_cmd_line = [] + for s in include_paths: + include_paths_cmd_line.extend(["-I", s]) + + for src in ctx.files.srcs: + ctx.actions.run( + inputs = deps, + outputs = outputs, + executable = ctx.executable._flatc, + arguments = [ + ctx.attr.language_flag, + "-o", + output_directory, + # Allow for absolute imports and referencing of generated files. + "-I", + "./", + "-I", + ctx.genfiles_dir.path, + "-I", + ctx.bin_dir.path, + ] + no_includes_statement + + include_paths_cmd_line + [ + "--no-union-value-namespacing", + "--gen-object-api", + src.path, + ], + progress_message = "Generating flatbuffer files for {}:".format(src), + use_default_shell_env = True, + ) + return [ + DefaultInfo(files = depset(outputs)), + ] + +_gen_flatbuffer_srcs = rule( + _gen_flatbuffer_srcs_impl, + attrs = { + "srcs": attr.label_list( + allow_files = [".fbs"], + mandatory = True, + ), + "outputs": attr.string_list( + default = [], + mandatory = False, + ), + "deps": attr.label_list( + default = [], + mandatory = False, + aspects = [_flatbuffer_schemas_aspect], + ), + "include_paths": attr.string_list( + default = [], + mandatory = False, + ), + "language_flag": attr.string( + mandatory = True, + ), + "no_includes": attr.bool( + default = False, + mandatory = False, + ), + "_flatc": attr.label( + default = Label("@flatbuffers//:flatc"), + executable = True, + cfg = "exec", + ), + }, + output_to_genfiles = True, +) + +def flatbuffer_py_strip_prefix_srcs(name, srcs = [], strip_prefix = ""): + """Strips path prefix. + + Args: + name: Rule name. (required) + srcs: Source .py files. (required) + strip_prefix: Path that needs to be stripped from the srcs filepaths. (required) + """ + for src in srcs: + native.genrule( + name = name + "_" + src.replace(".", "_").replace("/", "_"), + srcs = [src], + outs = [src.replace(strip_prefix, "")], + cmd = "cp $< $@", + ) + +def _concat_flatbuffer_py_srcs_impl(ctx): + # Merge all generated python files. The files are concatenated and import + # statements are removed. Finally we import the flatbuffer runtime library. + # IMPORTANT: Our Windows shell does not support "find ... -exec" properly. + # If you're changing the commandline below, please build wheels and run smoke + # tests on all the three operating systems. + command = "echo 'import flatbuffers\n' > %s; " + command += "for f in $(find %s -name '*.py' | sort); do cat $f | sed '/import flatbuffers/d' >> %s; done " + ctx.actions.run_shell( + inputs = ctx.attr.deps[0].files, + outputs = [ctx.outputs.out], + command = command % ( + ctx.outputs.out.path, + ctx.attr.deps[0].files.to_list()[0].path, + ctx.outputs.out.path, + ), + use_default_shell_env = True, + ) + +_concat_flatbuffer_py_srcs = rule( + _concat_flatbuffer_py_srcs_impl, + attrs = { + "deps": attr.label_list(mandatory = True), + }, + output_to_genfiles = True, + outputs = {"out": "%{name}.py"}, +) + +def flatbuffer_py_library( + name, + srcs, + deps = [], + include_paths = []): + """A py_library with the generated reader/writers for the given schema. + + This rule assumes that the schema files define non-conflicting names, so that + they can be merged in a single file. This is e.g. the case if only a single + namespace is used. + The rule call the flatbuffer compiler for all schema files and merges the + generated python files into a single file that is wrapped in a py_library. + + Args: + name: Rule name. (required) + srcs: List of source .fbs files. (required) + deps: List of dependencies. + include_paths: Optional, list of paths the includes files can be found in. + """ + all_srcs = "{}_srcs".format(name) + _gen_flatbuffer_srcs( + name = all_srcs, + srcs = srcs, + language_flag = "--python", + deps = deps, + include_paths = include_paths, + ) + all_srcs_no_include = "{}_srcs_no_include".format(name) + _gen_flatbuffer_srcs( + name = all_srcs_no_include, + srcs = srcs, + language_flag = "--python", + deps = deps, + no_includes = True, + include_paths = include_paths, + ) + concat_py_srcs = "{}_generated".format(name) + _concat_flatbuffer_py_srcs( + name = concat_py_srcs, + deps = [ + ":{}".format(all_srcs_no_include), + ], + ) + native.py_library( + name = name, + srcs = [ + ":{}".format(concat_py_srcs), + ], + srcs_version = "PY3", + deps = deps + [ + "@flatbuffers//:runtime_py", + ], + ) + +def flatbuffer_java_library( + name, + srcs, + custom_package = "", + package_prefix = "", + include_paths = DEFAULT_INCLUDE_PATHS, + flatc_args = DEFAULT_FLATC_ARGS, + visibility = None): + """A java library with the generated reader/writers for the given flatbuffer definitions. + + Args: + name: Rule name. (required) + srcs: List of source .fbs files including all includes. (required) + custom_package: Package name of generated Java files. If not specified + namespace in the schema files will be used. (optional) + package_prefix: like custom_package, but prefixes to the existing + namespace. (optional) + include_paths: List of paths that includes files can be found in. (optional) + flatc_args: List of additional arguments to pass to flatc. (optional) + visibility: Visibility setting for the java_library rule. (optional) + """ + out_srcjar = "java_%s_all.srcjar" % name + flatbuffer_java_srcjar( + name = "%s_srcjar" % name, + srcs = srcs, + out = out_srcjar, + custom_package = custom_package, + flatc_args = flatc_args, + include_paths = include_paths, + package_prefix = package_prefix, + ) + + native.filegroup( + name = "%s.srcjar" % name, + srcs = [out_srcjar], + ) + + native.java_library( + name = name, + srcs = [out_srcjar], + javacopts = ["-source 7 -target 7"], + deps = [ + "@flatbuffers//:runtime_java", + ], + visibility = visibility, + ) + +def flatbuffer_java_srcjar( + name, + srcs, + out, + custom_package = "", + package_prefix = "", + include_paths = DEFAULT_INCLUDE_PATHS, + flatc_args = DEFAULT_FLATC_ARGS): + """Generate flatbuffer Java source files. + + Args: + name: Rule name. (required) + srcs: List of source .fbs files including all includes. (required) + out: Output file name. (required) + custom_package: Package name of generated Java files. If not specified + namespace in the schema files will be used. (optional) + package_prefix: like custom_package, but prefixes to the existing + namespace. (optional) + include_paths: List of paths that includes files can be found in. (optional) + flatc_args: List of additional arguments to pass to flatc. (optional) + """ + command_fmt = """set -e + tmpdir=$(@D) + schemas=$$tmpdir/schemas + java_root=$$tmpdir/java + rm -rf $$schemas + rm -rf $$java_root + mkdir -p $$schemas + mkdir -p $$java_root + + for src in $(SRCS); do + dest=$$schemas/$$src + rm -rf $$(dirname $$dest) + mkdir -p $$(dirname $$dest) + if [ -z "{custom_package}" ] && [ -z "{package_prefix}" ]; then + cp -f $$src $$dest + else + if [ -z "{package_prefix}" ]; then + sed -e "s/namespace\\s.*/namespace {custom_package};/" $$src > $$dest + else + sed -e "s/namespace \\([^;]\\+\\);/namespace {package_prefix}.\\1;/" $$src > $$dest + fi + fi + done + + flatc_arg_I="-I $$tmpdir/schemas" + for include_path in {include_paths}; do + flatc_arg_I="$$flatc_arg_I -I $$schemas/$$include_path" + done + + flatc_additional_args= + for arg in {flatc_args}; do + flatc_additional_args="$$flatc_additional_args $$arg" + done + + for src in $(SRCS); do + $(location {flatc_path}) $$flatc_arg_I --java $$flatc_additional_args -o $$java_root $$schemas/$$src + done + + $(location {zip_files}) -export_zip_path=$@ -file_directory=$$java_root + """ + genrule_cmd = command_fmt.format( + package_name = native.package_name(), + custom_package = custom_package, + package_prefix = package_prefix, + flatc_path = flatc_path, + zip_files = zip_files, + include_paths = " ".join(include_paths), + flatc_args = " ".join(flatc_args), + ) + + native.genrule( + name = name, + srcs = srcs, + outs = [out], + tools = [flatc_path, zip_files], + cmd = genrule_cmd, + ) + +def flatbuffer_android_library( + name, + srcs, + custom_package = "", + package_prefix = "", + include_paths = DEFAULT_INCLUDE_PATHS, + flatc_args = DEFAULT_FLATC_ARGS, + visibility = None): + """An android_library with the generated reader/writers for the given flatbuffer definitions. + + Args: + name: Rule name. (required) + srcs: List of source .fbs files including all includes. (required) + custom_package: Package name of generated Java files. If not specified + namespace in the schema files will be used. (optional) + package_prefix: like custom_package, but prefixes to the existing + namespace. (optional) + include_paths: List of paths that includes files can be found in. (optional) + flatc_args: List of additional arguments to pass to flatc. (optional) + visibility: Visibility setting for the android_library rule. (optional) + """ + out_srcjar = "android_%s_all.srcjar" % name + flatbuffer_java_srcjar( + name = "%s_srcjar" % name, + srcs = srcs, + out = out_srcjar, + custom_package = custom_package, + flatc_args = flatc_args, + include_paths = include_paths, + package_prefix = package_prefix, + ) + + native.filegroup( + name = "%s.srcjar" % name, + srcs = [out_srcjar], + ) + + # To support org.checkerframework.dataflow.qual.Pure. + checkerframework_annotations = [ + "@org_checkerframework_qual", + ] if "--java-checkerframework" in flatc_args else [] + + android_library( + name = name, + srcs = [out_srcjar], + javacopts = ["-source 7 -target 7"], + visibility = visibility, + deps = [ + "@flatbuffers//:runtime_android", + ] + checkerframework_annotations, + ) diff --git a/ci/flatbuffers_for_tf_sync/flatbuffers.BUILD b/ci/flatbuffers_for_tf_sync/flatbuffers.BUILD new file mode 100644 index 0000000..108c0cd --- /dev/null +++ b/ci/flatbuffers_for_tf_sync/flatbuffers.BUILD @@ -0,0 +1,156 @@ +load("@build_bazel_rules_android//android:rules.bzl", "android_library") +load(":build_defs.bzl", "flatbuffer_py_strip_prefix_srcs") + +package(default_visibility = ["//visibility:public"]) + +licenses(["notice"]) # Apache 2.0 + +exports_files(["LICENSE.txt"]) + +licenses(["notice"]) + +config_setting( + name = "freebsd", + values = {"cpu": "freebsd"}, +) + +config_setting( + name = "windows", + values = {"cpu": "x64_windows"}, +) + +load("@rules_cc//cc:defs.bzl", "cc_binary", "cc_library") + +# Public flatc library to compile flatbuffer files at runtime. +cc_library( + name = "flatbuffers", + hdrs = ["//:public_headers"], + linkstatic = 1, + strip_include_prefix = "/include", + visibility = ["//visibility:public"], + deps = ["//src:flatbuffers"], +) + +# Public C++ headers for the Flatbuffers library. +filegroup( + name = "public_headers", + srcs = [ + "include/flatbuffers/base.h", + "include/flatbuffers/code_generators.h", + "include/flatbuffers/flatbuffers.h", + "include/flatbuffers/flexbuffers.h", + "include/flatbuffers/hash.h", + "include/flatbuffers/idl.h", + "include/flatbuffers/minireflect.h", + "include/flatbuffers/reflection.h", + "include/flatbuffers/reflection_generated.h", + "include/flatbuffers/registry.h", + "include/flatbuffers/stl_emulation.h", + "include/flatbuffers/util.h", + ], + visibility = ["//:__subpackages__"], +) + +# Public flatc compiler library. +cc_library( + name = "flatc_library", + linkstatic = 1, + visibility = ["//visibility:public"], + deps = [ + "@flatbuffers//src:flatc_library", + ], +) + +# Public flatc compiler. +cc_binary( + name = "flatc", + linkopts = select({ + ":freebsd": [ + "-lm", + ], + ":windows": [], + "//conditions:default": [ + "-lm", + "-ldl", + ], + }), + visibility = ["//visibility:public"], + deps = [ + "@flatbuffers//src:flatc", + ], +) + +filegroup( + name = "flatc_headers", + srcs = [ + "include/flatbuffers/flatc.h", + ], + visibility = ["//:__subpackages__"], +) + +# Library used by flatbuffer_cc_library rules. +cc_library( + name = "runtime_cc", + hdrs = [ + "include/flatbuffers/base.h", + "include/flatbuffers/flatbuffers.h", + "include/flatbuffers/flexbuffers.h", + "include/flatbuffers/stl_emulation.h", + "include/flatbuffers/util.h", + ], + linkstatic = 1, + strip_include_prefix = "/include", + visibility = ["//visibility:public"], +) + +flatbuffer_py_strip_prefix_srcs( + name = "flatbuffer_py_strip_prefix", + srcs = [ + "python/flatbuffers/__init__.py", + "python/flatbuffers/builder.py", + "python/flatbuffers/compat.py", + "python/flatbuffers/encode.py", + "python/flatbuffers/number_types.py", + "python/flatbuffers/packer.py", + "python/flatbuffers/table.py", + "python/flatbuffers/util.py", + ], + strip_prefix = "python/flatbuffers/", +) + +filegroup( + name = "runtime_py_srcs", + srcs = [ + "__init__.py", + "builder.py", + "compat.py", + "encode.py", + "number_types.py", + "packer.py", + "table.py", + "util.py", + ], +) + +py_library( + name = "runtime_py", + srcs = [":runtime_py_srcs"], + visibility = ["//visibility:public"], +) + +filegroup( + name = "runtime_java_srcs", + srcs = glob(["java/com/google/flatbuffers/**/*.java"]), +) + +java_library( + name = "runtime_java", + srcs = [":runtime_java_srcs"], + visibility = ["//visibility:public"], +) + +android_library( + name = "runtime_android", + srcs = [":runtime_java_srcs"], + visibility = ["//visibility:public"], +) diff --git a/ci/flatbuffers_for_tf_sync/workspace.bzl b/ci/flatbuffers_for_tf_sync/workspace.bzl new file mode 100644 index 0000000..59c1fd9 --- /dev/null +++ b/ci/flatbuffers_for_tf_sync/workspace.bzl @@ -0,0 +1,16 @@ +"""Loads the Flatbuffers library, used by TF Lite.""" + +load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls") + +def repo(): + tf_http_archive( + name = "flatbuffers", + strip_prefix = "flatbuffers-1.12.0", + sha256 = "62f2223fb9181d1d6338451375628975775f7522185266cd5296571ac152bc45", + urls = tf_mirror_urls("https://github.com/google/flatbuffers/archive/v1.12.0.tar.gz"), + build_file = "//third_party/flatbuffers:flatbuffers.BUILD", + system_build_file = "//third_party/flatbuffers:BUILD.system", + link_files = { + "//third_party/flatbuffers:build_defs.bzl": "build_defs.bzl", + }, + ) diff --git a/ci/install_bazelisk.sh b/ci/install_bazelisk.sh new file mode 100755 index 0000000..d2f8a13 --- /dev/null +++ b/ci/install_bazelisk.sh @@ -0,0 +1,22 @@ +#!/usr/bin/env bash +# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +set -e +wget https://github.com/bazelbuild/bazelisk/releases/download/v1.16.0/bazelisk-linux-amd64 +mv bazelisk-linux-amd64 bazel +chmod +x bazel +sudo mv bazel /usr/local/bin + diff --git a/ci/install_buildifier.sh b/ci/install_buildifier.sh new file mode 100755 index 0000000..56172ec --- /dev/null +++ b/ci/install_buildifier.sh @@ -0,0 +1,28 @@ +#!/usr/bin/env bash +# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +set -e +# Download buildifier. +wget https://github.com/bazelbuild/buildtools/releases/download/4.2.3/buildifier-linux-amd64 +mv buildifier-linux-amd64 buildifier +chmod +x buildifier +sudo mv buildifier /usr/local/bin/. + +# Download buildozer. +wget https://github.com/bazelbuild/buildtools/releases/download/4.2.3/buildozer-linux-amd64 +mv buildozer-linux-amd64 buildozer +chmod +x buildozer +sudo mv buildozer /usr/local/bin/. diff --git a/ci/install_qemu.sh b/ci/install_qemu.sh new file mode 100755 index 0000000..5259978 --- /dev/null +++ b/ci/install_qemu.sh @@ -0,0 +1,31 @@ +#!/bin/bash +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# Parameters: +# ${1} Optional. Path to install QEMU. +LINUX_PORTABLE_URL="https://download.qemu.org/qemu-6.2.0.tar.xz" +TEMP_ARCHIVE="/tmp/qemu.tar.xz" + +INSTALL_PREFIX=${1:-"/usr/local"} + +echo >&2 "Downloading from url: ${LINUX_PORTABLE_URL}" +wget ${LINUX_PORTABLE_URL} -O ${TEMP_ARCHIVE} >&2 + +TEMP_DIR="$(mktemp -d)" +tar xJf ${TEMP_ARCHIVE} --strip-components=1 --directory ${TEMP_DIR} >&2 +cd ${TEMP_DIR} +./configure --prefix=${INSTALL_PREFIX} +make -j8 +make install diff --git a/ci/issue_on_error.py b/ci/issue_on_error.py new file mode 100644 index 0000000..8437e34 --- /dev/null +++ b/ci/issue_on_error.py @@ -0,0 +1,98 @@ +# issue_on_error_post.py +# Requires python 3.6+ and GitHub CLI in the environment. +# +# Creates or updates an issue on action failure related to a workflow. +# +# Looks though REPO for open issues with FLAG_LABEL. If none, +# creates a new issue. If there is an open issue with FLAG_LABEL, +# looks for WORKFLOW in body. If none, creates an issue. If an +# issue for WORKFLOW exists, makes adds a comment. +# +# Requires the environment provide the variables in the block below. +# TOKEN must have access to update issues. +# +# If called with an optional PR_NUMBER and PR_LINK the issue will +# include a link to the PR. + + +from datetime import datetime +import os +import json +import subprocess + +REPO_NAME = os.environ['REPO'] +WORKFLOW = os.environ['WORKFLOW'] +FLAG_LABEL = os.environ['FLAG_LABEL'] +RUN_NUMBER = os.environ['RUN_NUMBER'] +RUN_ID = os.environ['RUN_ID'] +# optional variables +PR_NUMBER = os.getenv('PR_NUMBER') +PR_LINK = os.getenv('PR_LINK') + +def get_tagged_issues(flag_label, workflow): + issues = subprocess.check_output(["gh", "issue", "list", + "--state", "open", + "--label", flag_label, + "--json", "title,number,body"], + encoding="utf-8") + issues = json.loads(issues) + tagged_issues =[] + for issue in issues: + if workflow in issue["body"]: + tagged_issues.append(issue) + return(tagged_issues) + +def create_issue(flag_label, workflow, run_number, run_id, repo_name, pr_number, pr_link): + + run_link = f"http://github.com/{repo_name}/actions/runs/{run_id}" + body_string = "" + title_string = f"{workflow} CI Run Failed" + if pr_number: + body_string = f"PR {pr_number} ({pr_link}) had a CI failure: \n" + title_string = f"PR #{pr_number} CI Run Failed" + body_string += f"{workflow} [run number {run_number}]({run_link}) failed. \n\n" + body_string += "This issue has been automatically generated for " + body_string += "notification purposes." + + new_issue = subprocess.check_output(["gh", "issue", "create", + "--title", title_string, + "--body", body_string, + "--label", flag_label], + encoding="utf-8") + return(new_issue) + + +def add_comment(issue_number, run_number, run_id, repo_name): + dt_string = datetime.now().strftime("%d/%m/%Y %H:%M:%S") + run_link = f"http://github.com/{repo_name}/actions/runs/{run_id}" + msg_string = f"Error reoccurred: {dt_string}\n" + msg_string += f"[Run number: {run_number}]({run_link})\n" + subprocess.run(["gh", "issue", "comment", issue_number, + "--body", msg_string]) + return() + +if __name__ == "__main__": + tagged_issues = get_tagged_issues(FLAG_LABEL, WORKFLOW) + + # The logic catches the case where an issue exists for the workflow + # but we are testing against a PR and want a created issue to link to the PR. + # Otherwise, we just add a comment to the existing issue. + if not tagged_issues: + create_issue(FLAG_LABEL, WORKFLOW, RUN_NUMBER, RUN_ID, REPO_NAME, PR_NUMBER, PR_LINK) + else: + for issue in tagged_issues: + if PR_NUMBER: + if PR_LINK in issue["body"]: + add_comment(str(issue["number"]), RUN_NUMBER, RUN_ID, REPO_NAME) + else: + create_issue(FLAG_LABEL, WORKFLOW, RUN_NUMBER, RUN_ID, REPO_NAME, + PR_NUMBER, PR_LINK) + else: + add_comment(str(issue["number"]), RUN_NUMBER, RUN_ID, REPO_NAME) + for issue in tagged_issues: + print(issue["number"]) + + + + + diff --git a/ci/sync_from_upstream_tf.sh b/ci/sync_from_upstream_tf.sh new file mode 100755 index 0000000..094df65 --- /dev/null +++ b/ci/sync_from_upstream_tf.sh @@ -0,0 +1,75 @@ +#!/usr/bin/env bash +# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +# +# Sync's the shared TfLite / TFLM code from the upstream Tensorflow repo. +# +# While the standalone TFLM repo is under development, we are also sync'ing all +# of the TFLM code via this script. +# + +set -e + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +ROOT_DIR=${SCRIPT_DIR}/.. +cd "${ROOT_DIR}" + +rm -rf /tmp/tensorflow + +git clone https://github.com/tensorflow/tensorflow.git --depth=1 /tmp/tensorflow + +SHARED_TFL_CODE=$( + * [Background](#background) + * [Data Files in Examples](#data-files-in-examples) + + + + + +# Background + +TFLM is designed to run on microcontrollers and other platforms without dynamic +memory allocation and without filesystems. This means that data files such as +TFLite models and test inputs must be built into the binary. + +Historically, data files have been included as cc arrays generated manually +using `xxd -i > data_file.cc` + +# Data Files in Examples + +In order to clean up examples, make test inputs easier to understand, and +include TFLite models directly, TFLM has moved to generating the cc and header +files during the build process using a python script which `make` and `bazel` +call. To include data files in an example, generator inputs should be supplied +to the `microlite_test` call in the example's Makefile and `generate_cc_arrays` +should be used to create cc and header sources in the BUILD file. + +For reference, see the +[Makefile](https://github.com/tensorflow/tflite-micro/blob/main/tensorflow/lite/micro/examples/hello_world/Makefile.inc) +and [BUILD](https://github.com/tensorflow/tflite-micro/blob/main/tensorflow/lite/micro/examples/hello_world/BUILD) +files in the [hello_world +example](https://github.com/tensorflow/tflite-micro/tree/main/tensorflow/lite/micro/examples/hello_world). + +The generated cc and header files can be found in +`gen//genfiles/`. diff --git a/docs/continuous_integration.md b/docs/continuous_integration.md new file mode 100644 index 0000000..cee8636 --- /dev/null +++ b/docs/continuous_integration.md @@ -0,0 +1,51 @@ +# Continuous Integration Docs Contents +* [User Facing CI](#user-facing-ci) + * [`ci:run` - Run Tests](##`ci:run`) + * [`ci:ready_to_merge` - Send To Merge Queue](#ci:ready_to_merge) +* [Manually Running Tests](#manually-running-tests) +* [Sync From Tensorflow Repository](#sync-from-the-tensorflow-repository) +* [Merge Queue Details](#merge-queue-details) + +# User Facing CI +The continuous integration system is controlled by applying labels to PRs. There are only two important labels: `ci:run` runs the testing suite, and `ci:ready_to_merge` places a PR in the merge queue. + ## `ci:run` + The `ci:run` label runs the [main testing suite](../.github/workflows/ci.yml) against the PR. For details of the tests involved, examine the linked file. The `ci:run` tag is self-removing. + ## `ci:ready_to_merge` + After all tests from `ci:run` have passed, the Google CLA has been agreed to, and a reviewer has approved the PR, applying the `ci:ready_to_merge` label will enter the PR into the merge queue. Unless there is a conflict with other PR's in the queue, this should be a fire and forget operation. In the case of a conflict due to code that is merged before a given PR, you will need to troubleshoot your code manually. +# Manually Running Tests +Tests can also be run manually on the command line within a docker container, which can be built with: + ``` + docker build -t tflm-ci -f ci/Dockerfile.micro . + ``` + + or use the tflm-ci docker image from [here](https://github.com/users/TFLM-bot/packages/container/package/tflm-ci). + + You will still need to copy or mount your fork of tflite-micro on to this docker container prior to running any tests. To run the built Docker image interactively and mount your local copy of tflite-micro on the container, run: + ``` + docker run -v /path/to/local/tflite-micro:/path/to/docker/tflite-micro -it tflm-ci /bin/bash + ``` + This way changes from your local fork will be reflected in the Docker container. + + You can also view or remove your instantiated containers by doing: + ``` + docker ps --all + docker rm + ``` +# Sync From The Tensorflow Repository +While TfLite Micro and TfLite are in separate GitHub repositories, the two +projects continue to share common code. + +The [TensorFlow repo](https://github.com/tensorflow/tensorflow) is the single source of truth for this +shared code. As a result, any changes to this shared code must be made in the +[TensorFlow repo](https://github.com/tensorflow/tensorflow) which will then automatically sync'd via a scheduled +[GitHub workflow](../.github/workflows/sync.yml). +# Merge Queue Details +This section is probably only of interest if you plan to be doing surgery on the CI system. +## Mergify +We use [Mergify](https://mergify.com/) for our merge queue. [The documentation](https://docs.mergify.com/) is reasonably straight forward. +## Config File +Our [mergify.yml](../.github/mergify.yml) is fairly standard. It triggers on `ci:ready_to_merge` label and requires all branch protection checks to pass before a PR can be added to the queue. When the PR is merged it removes the label. +## `ci:run` In Queue +The one slightly complicated wrinkle in our system is the test suite being run only when the `ci:run` label is applied. As soon as the tests are run, the label is [removed](../.github/workflows/remove-labels.yml). + +As the queue processes each PR, it creates a temporary branch and merges in the results of the previous passing PRs my merging main. This merge resets all the test results, so the `ci:run` tag must be [reapplied](../.github/workflows/apply_cirun.yml) in queue. diff --git a/docs/python.md b/docs/python.md new file mode 100644 index 0000000..00437bd --- /dev/null +++ b/docs/python.md @@ -0,0 +1,62 @@ + + * [Using Bazel](#using-bazel) + * [Manual Setup Illustration](#manual-setup-illustration) + + + + + +Writing and using Python scripts from the TFLM repository is currently in the +prototyping stage. As such, the instructions below are somewhat sparse and +subject to change. + + +* [TensorFlow Python style guide](https://www.tensorflow.org/community/contribute/code_style#python_style) + + +# Using Bazel + +We use Bazel as our default build system for Python and the continuous +integration infrastrucutre only runs the Python unit tests via Bazel. + +When using Bazel with Python, all the environment setup is handled as part of the +build. + +Some example commands: +```sh +bazel test tensorflow/lite/tools:flatbuffer_utils_test +bazel build tensorflow/lite/tools:visualize + +bazel-bin/tensorflow/lite/tools/visualize tensorflow/lite/micro/models/person_detect.tflite tensorflow/lite/micro/models/person_detect.tflite.html +``` + +# Manual Setup Illustration + +For advanced users that would like to use the Python code in the TFLM repository +independent of bazel, here is one approach. + +Please note that this setup is unsupported and will need users to debug various +issues on their own. It is described here for illustrative purposes only. + +```sh +# The cloned tflite-micro folder needs to be renamed to tflite_micro +mv tflite-micro tflite_micro +# To set up a specific Python version, make sure `python` is pointed to the +# desired version. For example, call `python3.11 -m venv tflite_micro/venv`. +python -m venv tflite_micro/venv +echo "export PYTHONPATH=\${PYTHONPATH}:${PWD}" >> tflite_micro/venv/bin/activate +cd tflite_micro +source venv/bin/activate +pip install --upgrade pip +pip install -r third_party/python_requirements.txt + +# (Optional) +pip install ipython +``` + +Run some tests and binaries: +```sh +python tensorflow/lite/tools/flatbuffer_utils_test.py +python tensorflow/lite/tools/visualize.py tensorflow/lite/micro/models/person_detect.tflite tensorflow/lite/micro/models/person_detect.tflite.html +``` + diff --git a/python/BUILD b/python/BUILD new file mode 100644 index 0000000..e69de29 diff --git a/python/py_pkg_cc_deps.bzl b/python/py_pkg_cc_deps.bzl new file mode 100644 index 0000000..d6f2121 --- /dev/null +++ b/python/py_pkg_cc_deps.bzl @@ -0,0 +1,167 @@ +"""Repository rule for creating an external repository `name` with C-language +dependencies from the Python package `pkg`, published by rules_python. Set +`pkg` using the `requirement` function from rules_python. + +The top-level Bazel package in the created repository provides two targets for +use as dependencies in C-language targets elsewhere. + + * `:cc_headers`--for including headers + * `:cc_library`--for including headers and linking against a library + +The mandatory `includes` attribute should be set to a list of +include dirs to be added to the compile command line. + +The optional `libs` attribute should be set to a list of libraries to link with +the binary target. + +Specify all paths relative to the parent directory in which the package is +extracted (e.g., site-packages/). Thus paths will begin with the package's +Python namespace or module name. Note this name may differ from the Python +distribution package name---e.g., the distribution package `tensorflow-gpu` +distributes the Python namespace package `tensorflow`. To see what paths are +available, it might help to examine the directory tree in the external +repository created for the package by rules_python. The external repository is +created in the bazel cache; in the example below, in a subdirectory +`external/tflm_pip_deps_numpy`. + +For example, to use the headers from NumPy: + +1. Add Python dependencies (numpy is named in python_requirements.txt), via the +usual method, to an external repository named `tflm_pip_deps` via @rules_python +in the WORKSPACE: +``` + load("@rules_python//python:pip.bzl", "pip_parse") + pip_parse( + name = "tflm_pip_deps", + requirements_lock = "@//third_party:python_requirements.txt", + ) + load("@tflm_pip_deps//:requirements.bzl", "install_deps") + install_deps() +``` + +2. Use the repository rule `py_pkg_cc_deps` in the WORKSPACE to create an +external repository with a target `@numpy_cc_deps//:cc_headers`, passing the +`:pkg` target from @tflm_pip_deps, obtained via requirement(), and an +`includes` path based on an examination of the package and the desired #include +paths in the C code: +``` + load("@tflm_pip_deps//:requirements.bzl", "requirement") + load("@//python:py_pkg_cc_deps.bzl", "py_pkg_cc_deps") + py_pkg_cc_deps( + name = "numpy_cc_deps", + pkg = requirement("numpy"), + includes = ["numpy/core/include"], + ) +``` + +3. Use the cc_library target `@numpy_cc_deps//:cc_headers` in a BUILD file as +a dependency to a rule that needs the headers, e.g., the cc_library()-based +pybind_library(): +``` + pybind_library( + name = "your_extension_lib", + srcs = [...], + deps = ["@numpy_cc_deps//:cc_headers", ...], + ) +``` + +See the test target //python/tests:cc_dep_link_test elsewhere for an example +which links against a library shipped in a Python package. +""" + +# This extends the standard rules_python rules to expose C-language dependences +# contained in some Python packages like NumPy. It extends rules_python to +# avoid duplicating the download mechanism, and to ensure the Python package +# versions used throughout the WORKSPACE are consistent. + +def _rules_python_path(ctx, pkg): + # Make an absolute path to the rules_python repository for the Python + # package `pkg`. + + # WARNING: To get a filesystem path via ctx.path(), its argument must be a + # label to a non-generated file. ctx.path() does not work on non-file + # A standard technique for finding the path to a repository (see, + # e.g., rules_go) is to use the repository's BUILD file; however, the exact + # name of the build file is an implementation detail of rules_python. + build_file = pkg.relative(":BUILD.bazel") + abspath = ctx.path(build_file).dirname + return abspath + +def _join_paths(a, b): + result = "" + if type(a) == "string" and type(b) == "string": + result = "/".join((a, b)) + + elif type(a) == "path" and type(b) == "string": + # Append components of string b to path a, because path.get_child() + # requires one component at a time. + result = a + for x in b.split("/"): + result = result.get_child(x) + + return result + +def _make_build_file(basedir, include_paths, libs): + template = """\ +package( + default_visibility = ["//visibility:public"], +) + +cc_library( + name = "cc_headers", + hdrs = glob(%s, allow_empty=False, exclude_directories=1), + includes = %s, +) + +cc_library( + name = "cc_library", + srcs = %s, + deps = [":cc_headers"], +) +""" + hdrs = [(_join_paths(basedir, inc) + "/**") for inc in include_paths] + includes = [_join_paths(basedir, inc) for inc in include_paths] + srcs = [_join_paths(basedir, lib) for lib in libs] + + return template % (hdrs, includes, srcs) + +def _py_pkg_cc_deps(ctx): + # Create a repository with the directory tree: + # repository/ + # |- _site --> @specific_rules_python_pkg/site-packages + # \_ BUILD + # + # When debugging, it might help to examine the tree and BUILD file of this + # repository, created in the bazel cache. + + # Symlink to the rules_python repository of pkg + srcdir = _join_paths(_rules_python_path(ctx, ctx.attr.pkg), "site-packages") + destdir = "_site" + ctx.symlink(srcdir, destdir) + + # Write a BUILD file publishing targets + ctx.file( + "BUILD", + content = _make_build_file(destdir, ctx.attr.includes, ctx.attr.libs), + executable = False, + ) + +py_pkg_cc_deps = repository_rule( + implementation = _py_pkg_cc_deps, + local = True, + attrs = { + "pkg": attr.label( + doc = "Python package target via rules_python's requirement()", + mandatory = True, + ), + "includes": attr.string_list( + doc = "list of include dirs", + mandatory = True, + allow_empty = False, + ), + "libs": attr.string_list( + doc = "list of libraries against which to link", + mandatory = False, + ), + }, +) diff --git a/python/tests/BUILD b/python/tests/BUILD new file mode 100644 index 0000000..8e82b2f --- /dev/null +++ b/python/tests/BUILD @@ -0,0 +1,10 @@ +cc_test( + name = "cc_deps_link_test", + size = "small", + srcs = [ + "cc_deps_link_test.cc", + ], + deps = [ + "@tensorflow_cc_deps//:cc_library", + ], +) diff --git a/python/tests/cc_deps_link_test.cc b/python/tests/cc_deps_link_test.cc new file mode 100644 index 0000000..f68444b --- /dev/null +++ b/python/tests/cc_deps_link_test.cc @@ -0,0 +1,12 @@ +// A simple program to test the py_pkg_cc_deps repository rule by building and +// linking against the Tensorflow library shipping in the Tensorflow Python +// package. + +#include + +int main(int argc, char* argv[]) { + const char* ptr = "test"; + const size_t n = 4; + tensorflow::PrintMemory(ptr, n); + return 0; +} diff --git a/python/tflite_micro/BUILD b/python/tflite_micro/BUILD new file mode 100644 index 0000000..24efc9f --- /dev/null +++ b/python/tflite_micro/BUILD @@ -0,0 +1,97 @@ +load("@pybind11_bazel//:build_defs.bzl", "pybind_extension") +load("@tflm_pip_deps//:requirements.bzl", "requirement") +load( + "//tensorflow/lite/micro:build_def.bzl", + "micro_copts", +) +load( + "//tensorflow:extra_rules.bzl", + "tflm_python_op_resolver_friends", +) + +package( + features = ["-layering_check"], + licenses = ["notice"], +) + +package_group( + name = "op_resolver_friends", + packages = tflm_python_op_resolver_friends(), +) + +cc_library( + name = "python_ops_resolver", + srcs = [ + "python_ops_resolver.cc", + ], + hdrs = [ + "python_ops_resolver.h", + ], + copts = micro_copts(), + visibility = [ + ":op_resolver_friends", + "//tensorflow/lite/micro/integration_tests:__subpackages__", + "//tensorflow/lite/micro/python/interpreter/src:__subpackages__", + ], + deps = [ + "//tensorflow/lite/micro:micro_compatibility", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro/kernels:micro_ops", + ], +) + +pybind_extension( + name = "_runtime", + # target = _runtime.so because pybind_extension() appends suffix + srcs = [ + "_runtime.cc", + "interpreter_wrapper.cc", + "interpreter_wrapper.h", + "numpy_utils.cc", + "numpy_utils.h", + "pybind11_lib.h", + "python_utils.cc", + "python_utils.h", + "shared_library.h", + ], + deps = [ + ":python_ops_resolver", + "//tensorflow/lite/micro:micro_framework", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:recording_allocators", + "@numpy_cc_deps//:cc_headers", + ], +) + +py_library( + name = "runtime", + srcs = [ + "runtime.py", + ], + data = [ + ":_runtime.so", + ], + srcs_version = "PY3", + visibility = ["//visibility:public"], + deps = [ + requirement("numpy"), + "//tensorflow/lite/tools:flatbuffer_utils", + ], +) + +py_test( + name = "runtime_test", + srcs = ["runtime_test.py"], + python_version = "PY3", + tags = [ + "noasan", + "nomsan", # Python doesn't like these symbols in _runtime.so + "noubsan", + ], + deps = [ + requirement("numpy"), + requirement("tensorflow-cpu"), + ":runtime", + "//tensorflow/lite/micro/testing:generate_test_models_lib", + ], +) diff --git a/python/tflite_micro/README.md b/python/tflite_micro/README.md new file mode 100644 index 0000000..cf5f638 --- /dev/null +++ b/python/tflite_micro/README.md @@ -0,0 +1,158 @@ +# TFLM Python Interpreter + +The TFLM interpreter can be invoked from Python by using the Python interpreter +wrapper in this directory. + +## Usage + +There are two ways to import the Python wrapper, either by using Bazel/Blaze, or +in near future by installing a PyPi package. + +### Bazel + +#### Build + +The only package that needs to be included in the `BUILD` file is +`//python/tflite_micro:runtime`. It contains all +the correct dependencies to build the Python interpreter. + +### PyPi + +Work in progress. + +### Examples + +Depending on the workflow, the package import path may be slightly different. + +A simple end-to-end example is the test +`python/tflite_micro/runtime_test.py:testCompareWithTFLite()`. +It shows how to compare inference results between TFLite and TFLM. + +A basic usage of the TFLM Python interpreter looks like the following. The input +to the Python interpreter should be a converted TFLite flatbuffer in either +bytearray format or file format. + +``` +# For the Bazel workflow +from tflite_micro.python.tflite_micro import runtime + + +# If model is a bytearray +tflm_interpreter = runtime.Interpreter.from_bytes(model_data) +# If model is a file +tflm_interpreter = runtime.Interpreter.from_file(model_filepath) + +# Run inference on TFLM using an ndarray `data_x` +tflm_interpreter.set_input(data_x, 0) +tflm_interpreter.invoke() +tflm_output = tflm_interpreter.get_output(0) +``` + +Input and output tensor details can also be queried using the Python API: + +``` +print(tflm_interpreter.get_input_details(0)) +print(tflm_interpreter.get_output_details(0)) +``` + +## Technical Details + +The Python interpreter uses [pybind11](https://github.com/pybind/pybind11) to +expose an evolving set of C++ APIs. The Bazel build leverages the +[pybind11_bazel extension](https://github.com/pybind/pybind11_bazel). + +The most updated Python APIs can be found in +`python/tflite_micro/runtime.py`. + +## Custom Ops + +The Python interpreter works with models with +[custom ops](https://www.tensorflow.org/lite/guide/ops_custom) but special steps +need to be taken to make sure that it can retrieve the right implementation. +This is currently compatible with the Bazel workflow only. + +1. Implement the custom op in C++ + +Assuming that the custom is already implemented according to the linked guide, + +``` +// custom_op.cc +TfLiteRegistration *Register_YOUR_CUSTOM_OP() { + // Do custom op stuff +} + +// custom_op.h +TfLiteRegistration *Register_YOUR_CUSTOM_OP(); +``` + +2. Implement a custom op Registerer + +A Registerer of the following signature is required to wrap the custom op and +add it to TFLM's ops resolver. For example, + +``` +#include "custom_op.h" +#include "tensorflow/lite/micro/all_ops_resolver.h" + +namespace tflite { + +extern "C" bool SomeCustomRegisterer(tflite::PythonOpsResolver* resolver) { + TfLiteStatus status = resolver->AddCustom("CustomOp", tflite::Register_YOUR_CUSTOM_OP()); + if (status != kTfLiteOk) { + return false; + } + return true; +} +``` + +3. Include the implementation of custom op and registerer in the caller's build + +For the Bazel workflow, it's recommended to create a package that includes the +custom op's and the registerer's implementation, because it needs to be included +in the target that calls the Python interpreter with custom ops. + +4. Pass the registerer into the Python interpreter during instantiation + +For example, + +``` +interpreter = runtime.Interpreter.from_file( + model_path=model_path, + custom_op_registerers=['SomeCustomRegisterer']) +``` + +The interpreter will then perform a dynamic lookup for the symbol called +`SomeCustomRegisterer()` and call it. This ensures that the custom op is +properly included in TFLM's op resolver. This approach is very similar to +TFLite's custom op support. + +## Print Allocations + +The Python interpreter can also be used to print memory arena allocations. This +is very helpful to figure out actual memory arena usage. + +For example, + +``` +tflm_interpreter.print_allocations() +``` + +will print + +``` +[RecordingMicroAllocator] Arena allocation total 10016 bytes +[RecordingMicroAllocator] Arena allocation head 7744 bytes +[RecordingMicroAllocator] Arena allocation tail 2272 bytes +[RecordingMicroAllocator] 'TfLiteEvalTensor data' used 312 bytes with alignment overhead (requested 312 bytes for 13 allocations) +[RecordingMicroAllocator] 'Persistent TfLiteTensor data' used 224 bytes with alignment overhead (requested 224 bytes for 2 tensors) +[RecordingMicroAllocator] 'Persistent TfLiteTensor quantization data' used 64 bytes with alignment overhead (requested 64 bytes for 4 allocations) +[RecordingMicroAllocator] 'Persistent buffer data' used 640 bytes with alignment overhead (requested 608 bytes for 10 allocations) +[RecordingMicroAllocator] 'NodeAndRegistration struct' used 440 bytes with alignment overhead (requested 440 bytes for 5 NodeAndRegistration structs) +``` + +10016 bytes is the actual memory arena size. + +During instantiation via the class methods `runtime.Interpreter.from_file` +or `runtime.Interpreter.from_bytes`, if `arena_size` is not explicitly +specified, the interpreter will default to a heuristic which is 10x the model +size. This can be adjusted manually if desired. diff --git a/python/tflite_micro/_runtime.cc b/python/tflite_micro/_runtime.cc new file mode 100644 index 0000000..824b3b4 --- /dev/null +++ b/python/tflite_micro/_runtime.cc @@ -0,0 +1,63 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include +#include + +#include "python/tflite_micro/interpreter_wrapper.h" +#include "python/tflite_micro/pybind11_lib.h" + +namespace py = pybind11; +using tflite::InterpreterWrapper; + +PYBIND11_MODULE(_runtime, m) { + m.doc() = "TFLite Micro Runtime Extension"; + + py::class_(m, "InterpreterWrapper") + .def(py::init([](const py::bytes& data, + const std::vector& registerers_by_name, + size_t arena_size, int num_resource_variables) { + return std::unique_ptr( + new InterpreterWrapper(data.ptr(), registerers_by_name, arena_size, + num_resource_variables)); + })) + .def("PrintAllocations", &InterpreterWrapper::PrintAllocations) + .def("Invoke", &InterpreterWrapper::Invoke) + .def("Reset", &InterpreterWrapper::Reset) + .def( + "SetInputTensor", + [](InterpreterWrapper& self, py::handle& x, size_t index) { + self.SetInputTensor(x.ptr(), index); + }, + py::arg("x"), py::arg("index")) + .def( + "GetOutputTensor", + [](InterpreterWrapper& self, size_t index) { + return tflite::PyoOrThrow(self.GetOutputTensor(index)); + }, + py::arg("index")) + .def( + "GetInputTensorDetails", + [](InterpreterWrapper& self, size_t index) { + return tflite::PyoOrThrow(self.GetInputTensorDetails(index)); + }, + py::arg("index")) + .def( + "GetOutputTensorDetails", + [](InterpreterWrapper& self, size_t index) { + return tflite::PyoOrThrow(self.GetOutputTensorDetails(index)); + }, + py::arg("index")); +} diff --git a/python/tflite_micro/interpreter_wrapper.cc b/python/tflite_micro/interpreter_wrapper.cc new file mode 100644 index 0000000..41c4f7a --- /dev/null +++ b/python/tflite_micro/interpreter_wrapper.cc @@ -0,0 +1,369 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "python/tflite_micro/interpreter_wrapper.h" + +// Disallow Numpy 1.7 deprecated symbols. +#define NPY_NO_DEPRECATED_API NPY_1_7_API_VERSION +// See https://numpy.org/doc/1.16/reference/c-api.array.html#importing-the-api +#define NO_IMPORT_ARRAY +#define PY_ARRAY_UNIQUE_SYMBOL tflite_micro_python_interpreter_array_api +#include +#include + +#include "python/tflite_micro/numpy_utils.h" +#include "python/tflite_micro/pybind11_lib.h" +#include "python/tflite_micro/python_ops_resolver.h" +#include "python/tflite_micro/python_utils.h" +#include "python/tflite_micro/shared_library.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/micro_interpreter.h" +#include "tensorflow/lite/micro/recording_micro_allocator.h" + +namespace tflite { +namespace { +// This function looks up the registerer symbol based on the string name +// `registerer_name`. A registerer in this case is a function that calls the +// `AddCustom` API of `PythonOpsResolver` for custom ops that need to be +// registered with the interpreter. +bool AddCustomOpRegistererByName(const char* registerer_name, + tflite::PythonOpsResolver* resolver) { + // Registerer functions take a pointer to a PythonOpsResolver as an input + // parameter and return TfLiteStatus. + typedef bool (*RegistererFunctionType)(tflite::PythonOpsResolver*); + + // Look for the Registerer function by name. + RegistererFunctionType registerer = reinterpret_cast( + SharedLibrary::GetSymbol(registerer_name)); + + // Fail in an informative way if the function was not found. + if (registerer == nullptr) { + MicroPrintf("Looking up symbol '%s' failed with error '%s'.", + registerer_name, SharedLibrary::GetError()); + return false; + } + + // Call the registerer with the resolver. + if (!registerer(resolver)) { + MicroPrintf( + "%s failed to register op. Check that total number of " + "ops doesn't exceed the maximum allowed by PythonOpsResolver.", + registerer_name); + return false; + } + + return true; +} + +PyObject* PyArrayFromFloatVector(const float* data, npy_intp size) { + void* pydata = malloc(size * sizeof(float)); + memcpy(pydata, data, size * sizeof(float)); + PyObject* obj = PyArray_SimpleNewFromData(1, &size, NPY_FLOAT32, pydata); + PyArray_ENABLEFLAGS(reinterpret_cast(obj), NPY_ARRAY_OWNDATA); + return obj; +} + +PyObject* PyArrayFromIntVector(const int* data, npy_intp size) { + void* pydata = malloc(size * sizeof(int)); + memcpy(pydata, data, size * sizeof(int)); + PyObject* obj = PyArray_SimpleNewFromData(1, &size, NPY_INT32, pydata); + PyArray_ENABLEFLAGS(reinterpret_cast(obj), NPY_ARRAY_OWNDATA); + return obj; +} + +// Check if the tensor is valid for TFLM +bool CheckTensor(const TfLiteTensor* tensor) { + if (tensor == nullptr) { + PyErr_SetString(PyExc_IndexError, + "Tensor is out of bound, please check tensor index."); + return false; + } + + if (tensor->type == kTfLiteString || tensor->type == kTfLiteResource || + tensor->type == kTfLiteVariant) { + PyErr_SetString(PyExc_ValueError, + "TFLM doesn't support strings, resource variables, or " + "variants as outputs."); + return false; + } + + if (tensor->sparsity != nullptr) { + PyErr_SetString(PyExc_ValueError, "TFLM doesn't support sparse tensors"); + return false; + } + + int py_type_num = TfLiteTypeToPyArrayType(tensor->type); + if (py_type_num == NPY_NOTYPE) { + PyErr_SetString(PyExc_ValueError, "Unknown tensor type."); + return false; + } + + if (tensor->bytes == 0 && tensor->data.data != nullptr) { + PyErr_SetString(PyExc_ValueError, "Invalid tensor size of 0."); + return false; + } + + if (tensor->bytes > 0 && tensor->data.data == nullptr) { + PyErr_SetString(PyExc_ValueError, "Null tensor pointer."); + return false; + } + return true; +} + +PyObject* GetTensorSize(const TfLiteTensor* tensor) { + PyObject* np_array = + PyArrayFromIntVector(tensor->dims->data, tensor->dims->size); + + return PyArray_Return(reinterpret_cast(np_array)); +} + +PyObject* GetTensorType(const TfLiteTensor* tensor) { + int code = TfLiteTypeToPyArrayType(tensor->type); + return PyArray_TypeObjectFromType(code); +} + +// Create a python dictionary object that contains the general (can be +// channel-wise quantized) affiene quantization information about the tensor. +PyObject* GetTensorQuantizationParameters(const TfLiteTensor* tensor) { + const TfLiteQuantization quantization = tensor->quantization; + float* scales_data = nullptr; + int32_t* zero_points_data = nullptr; + int32_t scales_size = 0; + int32_t zero_points_size = 0; + int32_t quantized_dimension = 0; + if (quantization.type == kTfLiteAffineQuantization) { + const TfLiteAffineQuantization* q_params = + reinterpret_cast(quantization.params); + if (q_params->scale) { + scales_data = q_params->scale->data; + scales_size = q_params->scale->size; + } + if (q_params->zero_point) { + zero_points_data = q_params->zero_point->data; + zero_points_size = q_params->zero_point->size; + } + quantized_dimension = q_params->quantized_dimension; + } + PyObject* scales_array = PyArrayFromFloatVector(scales_data, scales_size); + PyObject* zero_points_array = + PyArrayFromIntVector(zero_points_data, zero_points_size); + + PyObject* result = PyDict_New(); + PyDict_SetItemString(result, "scales", scales_array); + PyDict_SetItemString(result, "zero_points", zero_points_array); + PyDict_SetItemString(result, "quantized_dimension", + PyLong_FromLong(quantized_dimension)); + return result; +} + +PyObject* GetTensorDetails(const TfLiteTensor* tensor) { + if (!CheckTensor(tensor)) { + return nullptr; + } + + PyObject* tensor_type = GetTensorType(tensor); + PyObject* tensor_size = GetTensorSize(tensor); + PyObject* tensor_quantization_parameters = + GetTensorQuantizationParameters(tensor); + + PyObject* result = PyDict_New(); + PyDict_SetItemString(result, "dtype", tensor_type); + PyDict_SetItemString(result, "shape", tensor_size); + PyDict_SetItemString(result, "quantization_parameters", + tensor_quantization_parameters); + + return result; +} + +} // namespace + +InterpreterWrapper::~InterpreterWrapper() { + // We don't use a unique_ptr for the interpreter because we need to call its + // destructor before we call Py_DECREF(model_). This ensures that the model + // is still in scope when MicroGraph:FreeSubgraphs() is called. Otherwise, + // a segmentation fault could occur. + if (interpreter_ != nullptr) { + delete interpreter_; + } + + // Undo any references incremented + Py_DECREF(model_); +} + +InterpreterWrapper::InterpreterWrapper( + PyObject* model_data, const std::vector& registerers_by_name, + size_t arena_size, int num_resource_variables) { + interpreter_ = nullptr; + + // `model_data` is used as a raw pointer beyond the scope of this + // constructor, so we need to increment the reference count so that Python + // doesn't destroy it during the lifetime of this interpreter. + Py_INCREF(model_data); + + // Get the input array contained in `model_data` as a byte array + char* buf = nullptr; + Py_ssize_t length; + if (ConvertFromPyString(model_data, &buf, &length) == -1 || buf == nullptr) { + ThrowValueError( + "TFLM cannot convert model data from Python object to char *"); + } + + const Model* model = GetModel(buf); + model_ = model_data; + memory_arena_ = std::unique_ptr(new uint8_t[arena_size]); + allocator_ = RecordingMicroAllocator::Create(memory_arena_.get(), arena_size); + MicroResourceVariables* resource_variables_ = nullptr; + if (num_resource_variables > 0) + resource_variables_ = + MicroResourceVariables::Create(allocator_, num_resource_variables); + + for (const std::string& registerer : registerers_by_name) { + if (!AddCustomOpRegistererByName(registerer.c_str(), + &python_ops_resolver_)) { + ThrowRuntimeError( + ("TFLM could not register custom op via " + registerer).c_str()); + } + } + + interpreter_ = new MicroInterpreter(model, python_ops_resolver_, allocator_, + resource_variables_); + + TfLiteStatus status = interpreter_->AllocateTensors(); + if (status != kTfLiteOk) { + ThrowRuntimeError("TFLM failed to allocate tensors"); + } + + // This must be called before using any PyArray_* APIs. It essentially sets + // up the lookup table that maps PyArray_* macros to the correct APIs. + ImportNumpy(); +} + +void InterpreterWrapper::PrintAllocations() { allocator_->PrintAllocations(); } + +int InterpreterWrapper::Invoke() { + TfLiteStatus status = interpreter_->Invoke(); + if (status == kTfLiteError) { + ThrowRuntimeError("Interpreter invocation failed."); + } + return status; +} + +int InterpreterWrapper::Reset() { return interpreter_->Reset(); } + +// 1. Check that tensor and input array are safe to access +// 2. Verify that input array metadata matches tensor metadata +// 3. Copy input buffer into target input tensor +void InterpreterWrapper::SetInputTensor(PyObject* data, size_t index) { + std::unique_ptr array_safe(PyArray_FromAny( + /*op=*/data, + /*dtype=*/nullptr, + /*min_depth=*/0, + /*max_depth=*/0, + /*requirements=*/NPY_ARRAY_CARRAY, + /*context=*/nullptr)); + if (!array_safe) { + ThrowValueError("TFLM cannot convert input to PyArray"); + } + + PyArrayObject* array = reinterpret_cast(array_safe.get()); + + TfLiteTensor* tensor = interpreter_->input(index); + if (!CheckTensor(tensor)) { + throw pybind11::error_already_set(); + } + + if (TfLiteTypeFromPyArray(array) != tensor->type) { + std::string err_str = + "Cannot set tensor: Got value of type " + + std::string(TfLiteTypeGetName(TfLiteTypeFromPyArray(array))) + + " but expected type " + TfLiteTypeGetName(tensor->type) + + " for input " + std::to_string(index); + ThrowValueError(err_str.c_str()); + } + + if (PyArray_NDIM(array) != tensor->dims->size) { + std::string err_str = "Cannot set tensor: Dimension mismatch. Got " + + std::to_string(PyArray_NDIM(array)) + + " but expected " + + std::to_string(tensor->dims->size) + " for input " + + std::to_string(index); + ThrowValueError(err_str.c_str()); + } + + for (int j = 0; j < PyArray_NDIM(array); j++) { + if (tensor->dims->data[j] != PyArray_SHAPE(array)[j]) { + std::string err_str = + "Cannot set tensor: Dimension mismatch. Got " + + std::to_string(PyArray_SHAPE(array)[j]) + " but expected " + + std::to_string(tensor->dims->data[j]) + " for dimension " + + std::to_string(j) + " of input " + std::to_string(index); + ThrowValueError(err_str.c_str()); + } + } + + if (tensor->data.data == nullptr && tensor->bytes) { + ThrowValueError("Cannot set tensor: Tensor is non-empty but has nullptr."); + } + + size_t size = PyArray_NBYTES(array); + if (size != tensor->bytes) { + std::string err_str = "numpy array had " + std::to_string(size) + + " bytes but expected " + + std::to_string(tensor->bytes) + " bytes."; + ThrowValueError(err_str.c_str()); + } + + memcpy(tensor->data.data, PyArray_DATA(array), size); +} + +// 1. Check that output tensor is supported and safe to access +// 2. Allocate a buffer and copy output tensor data into it +// 3. Set PyArray metadata and transfer ownership to caller +PyObject* InterpreterWrapper::GetOutputTensor(size_t index) const { + const TfLiteTensor* tensor = interpreter_->output(index); + if (!CheckTensor(tensor)) { + return nullptr; + } + // Allocate a new buffer with output data to be returned to Python. New memory + // is allocated here to prevent hard to debug issues in Python, like data + // potentially changing under the hood, which imposes an implicit requirement + // that the user needs to be aware of. + void* data = malloc(tensor->bytes); + memcpy(data, tensor->data.data, tensor->bytes); + + PyObject* np_array; + std::vector dims(tensor->dims->data, + tensor->dims->data + tensor->dims->size); + int py_type_num = TfLiteTypeToPyArrayType(tensor->type); + np_array = + PyArray_SimpleNewFromData(dims.size(), dims.data(), py_type_num, data); + + // Transfer ownership to Python so that there's Python will take care of + // releasing this buffer + PyArray_ENABLEFLAGS(reinterpret_cast(np_array), + NPY_ARRAY_OWNDATA); + + return PyArray_Return(reinterpret_cast(np_array)); +} + +PyObject* InterpreterWrapper::GetInputTensorDetails(size_t index) const { + return GetTensorDetails(interpreter_->input(index)); +} + +PyObject* InterpreterWrapper::GetOutputTensorDetails(size_t index) const { + return GetTensorDetails(interpreter_->output(index)); +} + +} // namespace tflite diff --git a/python/tflite_micro/interpreter_wrapper.h b/python/tflite_micro/interpreter_wrapper.h new file mode 100644 index 0000000..1ead5af --- /dev/null +++ b/python/tflite_micro/interpreter_wrapper.h @@ -0,0 +1,51 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_MICRO_TOOLS_PYTHON_INTERPRETER_WRAPPER_H_ +#define TENSORFLOW_LITE_MICRO_TOOLS_PYTHON_INTERPRETER_WRAPPER_H_ + +#include + +#include "python/tflite_micro/python_ops_resolver.h" +#include "tensorflow/lite/micro/micro_interpreter.h" +#include "tensorflow/lite/micro/recording_micro_allocator.h" + +namespace tflite { + +class InterpreterWrapper { + public: + InterpreterWrapper(PyObject* model_data, + const std::vector& registerers_by_name, + size_t arena_size, int num_resource_variables); + ~InterpreterWrapper(); + + void PrintAllocations(); + int Invoke(); + int Reset(); + void SetInputTensor(PyObject* data, size_t index); + PyObject* GetOutputTensor(size_t index) const; + PyObject* GetInputTensorDetails(size_t index) const; + PyObject* GetOutputTensorDetails(size_t index) const; + + private: + tflite::RecordingMicroAllocator* allocator_; + const PyObject* model_; + std::unique_ptr memory_arena_; + tflite::PythonOpsResolver python_ops_resolver_; + tflite::MicroInterpreter* interpreter_; +}; + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_TOOLS_PYTHON_INTERPRETER_WRAPPER_H_ diff --git a/python/tflite_micro/numpy_utils.cc b/python/tflite_micro/numpy_utils.cc new file mode 100644 index 0000000..4a4aad8 --- /dev/null +++ b/python/tflite_micro/numpy_utils.cc @@ -0,0 +1,124 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "python/tflite_micro/numpy_utils.h" + +// Disallow Numpy 1.7 deprecated symbols. +#define NPY_NO_DEPRECATED_API NPY_1_7_API_VERSION +// Since we are calling `import_array()` here, define PY_ARRAY_UNIQUE_SYMBOL +// here and NO_IMPORT_ARRAY everywhere else arrayobject.h is included +// See https://numpy.org/doc/1.16/reference/c-api.array.html#importing-the-api +#define PY_ARRAY_UNIQUE_SYMBOL tflite_micro_python_interpreter_array_api +#include + +#include "tensorflow/lite/c/c_api_types.h" + +namespace tflite { + +void* ImportNumpy() { + // import_array() is actually a macro that returns NULL (in Python3), hence + // this wrapper function with a return type of void*. + import_array(); + return nullptr; +} + +int TfLiteTypeToPyArrayType(TfLiteType tf_lite_type) { + switch (tf_lite_type) { + case kTfLiteFloat32: + return NPY_FLOAT32; + case kTfLiteFloat16: + return NPY_FLOAT16; + case kTfLiteFloat64: + return NPY_FLOAT64; + case kTfLiteInt32: + return NPY_INT32; + case kTfLiteUInt32: + return NPY_UINT32; + case kTfLiteUInt16: + return NPY_UINT16; + case kTfLiteInt16: + return NPY_INT16; + case kTfLiteUInt8: + return NPY_UINT8; + case kTfLiteInt4: + // TODO(b/246806634): NPY_INT4 currently doesn't exist + return NPY_BYTE; + case kTfLiteInt8: + return NPY_INT8; + case kTfLiteInt64: + return NPY_INT64; + case kTfLiteUInt64: + return NPY_UINT64; + case kTfLiteString: + return NPY_STRING; + case kTfLiteBool: + return NPY_BOOL; + case kTfLiteComplex64: + return NPY_COMPLEX64; + case kTfLiteComplex128: + return NPY_COMPLEX128; + case kTfLiteResource: + case kTfLiteVariant: + return NPY_OBJECT; + case kTfLiteNoType: + return NPY_NOTYPE; + // Avoid default so compiler errors created when new types are made. + } + return NPY_NOTYPE; +} + +TfLiteType TfLiteTypeFromPyType(int py_type) { + switch (py_type) { + case NPY_FLOAT32: + return kTfLiteFloat32; + case NPY_FLOAT16: + return kTfLiteFloat16; + case NPY_FLOAT64: + return kTfLiteFloat64; + case NPY_INT32: + return kTfLiteInt32; + case NPY_UINT32: + return kTfLiteUInt32; + case NPY_INT16: + return kTfLiteInt16; + case NPY_UINT8: + return kTfLiteUInt8; + case NPY_INT8: + return kTfLiteInt8; + case NPY_INT64: + return kTfLiteInt64; + case NPY_UINT64: + return kTfLiteUInt64; + case NPY_BOOL: + return kTfLiteBool; + case NPY_OBJECT: + case NPY_STRING: + case NPY_UNICODE: + return kTfLiteString; + case NPY_COMPLEX64: + return kTfLiteComplex64; + case NPY_COMPLEX128: + return kTfLiteComplex128; + // Avoid default so compiler errors created when new types are made. + } + return kTfLiteNoType; +} + +TfLiteType TfLiteTypeFromPyArray(const PyArrayObject* array) { + int pyarray_type = PyArray_TYPE(array); + return TfLiteTypeFromPyType(pyarray_type); +} + +} // namespace tflite diff --git a/python/tflite_micro/numpy_utils.h b/python/tflite_micro/numpy_utils.h new file mode 100644 index 0000000..d9e0576 --- /dev/null +++ b/python/tflite_micro/numpy_utils.h @@ -0,0 +1,33 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_MICRO_TOOLS_PYTHON_INTERPRETER_NUMPY_UTILS_H_ +#define TENSORFLOW_LITE_MICRO_TOOLS_PYTHON_INTERPRETER_NUMPY_UTILS_H_ + +// Disallow Numpy 1.7 deprecated symbols. +#define NPY_NO_DEPRECATED_API NPY_1_7_API_VERSION +#include + +#include "tensorflow/lite/c/c_api_types.h" + +namespace tflite { + +void* ImportNumpy(); +int TfLiteTypeToPyArrayType(TfLiteType tf_lite_type); +TfLiteType TfLiteTypeFromPyType(int py_type); +TfLiteType TfLiteTypeFromPyArray(const PyArrayObject* array); + +} // namespace tflite + +#endif diff --git a/python/tflite_micro/pybind11_lib.h b/python/tflite_micro/pybind11_lib.h new file mode 100644 index 0000000..c0e46ae --- /dev/null +++ b/python/tflite_micro/pybind11_lib.h @@ -0,0 +1,64 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include +#include + +#ifndef TENSORFLOW_LITE_MICRO_PYTHON_INTERPRETER_SRC_PYBIND11_LIB_H_ +#define TENSORFLOW_LITE_MICRO_PYTHON_INTERPRETER_SRC_PYBIND11_LIB_H_ + +namespace py = pybind11; + +namespace tflite { + +// Convert PyObject* to py::object with no error handling. + +inline py::object Pyo(PyObject* ptr) { + return py::reinterpret_steal(ptr); +} + +// Raise an exception if the PyErrOccurred flag is set or else return the Python +// object. + +inline py::object PyoOrThrow(PyObject* ptr) { + if (PyErr_Occurred() || ptr == nullptr) { + throw py::error_already_set(); + } + return Pyo(ptr); +} + +[[noreturn]] inline void ThrowTypeError(const char* error_message) { + PyErr_SetString(PyExc_TypeError, error_message); + throw pybind11::error_already_set(); +} + +[[noreturn]] inline void ThrowValueError(const char* error_message) { + PyErr_SetString(PyExc_ValueError, error_message); + throw pybind11::error_already_set(); +} + +[[noreturn]] inline void ThrowIndexError(const char* error_message) { + PyErr_SetString(PyExc_IndexError, error_message); + throw pybind11::error_already_set(); +} + +[[noreturn]] inline void ThrowRuntimeError(const char* error_message) { + PyErr_SetString(PyExc_RuntimeError, error_message); + throw pybind11::error_already_set(); +} + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_PYTHON_INTERPRETER_SRC_PYBIND11_LIB_H_ \ No newline at end of file diff --git a/python/tflite_micro/python_ops_resolver.cc b/python/tflite_micro/python_ops_resolver.cc new file mode 100644 index 0000000..37e864b --- /dev/null +++ b/python/tflite_micro/python_ops_resolver.cc @@ -0,0 +1,124 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "python/tflite_micro/python_ops_resolver.h" + +#include "tensorflow/lite/micro/kernels/micro_ops.h" + +namespace tflite { + +PythonOpsResolver::PythonOpsResolver() { + // Please keep this list of Builtin Operators in alphabetical order. + AddAbs(); + AddAdd(); + AddAddN(); + AddArgMax(); + AddArgMin(); + AddAssignVariable(); + AddAveragePool2D(); + AddBatchToSpaceNd(); + AddBroadcastArgs(); + AddBroadcastTo(); + AddCallOnce(); + AddCast(); + AddCeil(); + AddCircularBuffer(); + AddConcatenation(); + AddConv2D(); + AddCos(); + AddCumSum(); + AddDepthToSpace(); + AddDepthwiseConv2D(); + AddDequantize(); + AddDetectionPostprocess(); + AddDiv(); + AddElu(); + AddEqual(); + AddEthosU(); + AddExp(); + AddExpandDims(); + AddFill(); + AddFloor(); + AddFloorDiv(); + AddFloorMod(); + AddFullyConnected(); + AddGather(); + AddGatherNd(); + AddGreater(); + AddGreaterEqual(); + AddHardSwish(); + AddIf(); + AddL2Normalization(); + AddL2Pool2D(); + AddLeakyRelu(); + AddLess(); + AddLessEqual(); + AddLog(); + AddLogicalAnd(); + AddLogicalNot(); + AddLogicalOr(); + AddLogistic(); + AddLogSoftmax(); + AddMaxPool2D(); + AddMaximum(); + AddMean(); + AddMinimum(); + AddMirrorPad(); + AddMul(); + AddNeg(); + AddNotEqual(); + AddPack(); + AddPad(); + AddPadV2(); + AddPrelu(); + AddQuantize(); + AddReadVariable(); + AddReduceMax(); + AddRelu(); + AddRelu6(); + AddReshape(); + AddResizeBilinear(); + AddResizeNearestNeighbor(); + AddRound(); + AddRsqrt(); + AddSelectV2(); + AddShape(); + AddSin(); + AddSlice(); + AddSoftmax(); + AddSpaceToBatchNd(); + AddSpaceToDepth(); + AddSplit(); + AddSplitV(); + AddSqrt(); + AddSquare(); + AddSquaredDifference(); + AddSqueeze(); + AddStridedSlice(); + AddSub(); + AddSum(); + AddSvdf(); + AddTanh(); + AddTranspose(); + AddTransposeConv(); + AddUnidirectionalSequenceLSTM(); + AddUnpack(); + AddVarHandle(); + AddWhile(); + AddWindow(); + AddZerosLike(); +} + +} // namespace tflite diff --git a/python/tflite_micro/python_ops_resolver.h b/python/tflite_micro/python_ops_resolver.h new file mode 100644 index 0000000..ae0a756 --- /dev/null +++ b/python/tflite_micro/python_ops_resolver.h @@ -0,0 +1,36 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_MICRO_PYTHON_OPS_RESOLVER_H_ +#define TENSORFLOW_LITE_MICRO_PYTHON_OPS_RESOLVER_H_ + +#include "tensorflow/lite/micro/compatibility.h" +#include "tensorflow/lite/micro/micro_mutable_op_resolver.h" + +namespace tflite { + +// PythonOpsResolver is used to register all the Ops for the TFLM Python +// interpreter. This is ok since code size is not a concern from Python and +// the goal is to be able to run any model supported by TFLM in a flexible way +class PythonOpsResolver : public MicroMutableOpResolver<200> { + public: + PythonOpsResolver(); + + private: + TF_LITE_REMOVE_VIRTUAL_DELETE +}; + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_PYTHON_OPS_RESOLVER_H_ diff --git a/python/tflite_micro/python_utils.cc b/python/tflite_micro/python_utils.cc new file mode 100644 index 0000000..0fc9e47 --- /dev/null +++ b/python/tflite_micro/python_utils.cc @@ -0,0 +1,43 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "python/tflite_micro/python_utils.h" + +#include + +namespace tflite { + +int ConvertFromPyString(PyObject* obj, char** data, Py_ssize_t* length) { +#if PY_MAJOR_VERSION >= 3 + if (PyUnicode_Check(obj)) { + // const_cast<> is for CPython 3.7 finally adding const to the API. + *data = const_cast(PyUnicode_AsUTF8AndSize(obj, length)); + return *data == nullptr ? -1 : 0; + } + return PyBytes_AsStringAndSize(obj, data, length); +#else + return PyString_AsStringAndSize(obj, data, length); +#endif +} + +PyObject* ConvertToPyString(const char* data, size_t length) { +#if PY_MAJOR_VERSION >= 3 + return PyBytes_FromStringAndSize(data, length); +#else + return PyString_FromStringAndSize(data, length); +#endif +} + +} // namespace tflite diff --git a/python/tflite_micro/python_utils.h b/python/tflite_micro/python_utils.h new file mode 100644 index 0000000..9532eaf --- /dev/null +++ b/python/tflite_micro/python_utils.h @@ -0,0 +1,31 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_MICRO_TOOLS_PYTHON_INTERPRETER_PYTHON_UTILS_H_ +#define TENSORFLOW_LITE_MICRO_TOOLS_PYTHON_INTERPRETER_PYTHON_UTILS_H_ + +#include + +namespace tflite { + +struct PyDecrefDeleter { + void operator()(PyObject* p) const { Py_DECREF(p); } +}; + +int ConvertFromPyString(PyObject* obj, char** data, Py_ssize_t* length); +PyObject* ConvertToPyString(const char* data, size_t length); + +} // namespace tflite + +#endif diff --git a/python/tflite_micro/runtime.py b/python/tflite_micro/runtime.py new file mode 100644 index 0000000..06a62b0 --- /dev/null +++ b/python/tflite_micro/runtime.py @@ -0,0 +1,212 @@ +# Copyright 2022 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Python package for TFLM Python Interpreter""" + +import os + +from tflite_micro.python.tflite_micro import _runtime +from tflite_micro.tensorflow.lite.tools import flatbuffer_utils + + +class Interpreter(object): + + def __init__(self, model_data, custom_op_registerers, arena_size): + if model_data is None: + raise ValueError("Model must not be None") + + if not isinstance(custom_op_registerers, list) or not all( + isinstance(s, str) for s in custom_op_registerers): + raise ValueError("Custom ops registerers must be a list of strings") + + # This is a heuristic to ensure that the arena is sufficiently sized. + if arena_size is None: + arena_size = len(model_data) * 10 + + # Some models make use of resource variables ops, get the count here + num_resource_variables = flatbuffer_utils.count_resource_variables( + model_data) + print("Number of resource variables the model uses = ", + num_resource_variables) + + self._interpreter = _runtime.InterpreterWrapper(model_data, + custom_op_registerers, + arena_size, + num_resource_variables) + + @classmethod + def from_file(self, model_path, custom_op_registerers=[], arena_size=None): + """Instantiates a TFLM interpreter from a model .tflite filepath. + + Args: + model_path: Filepath to the .tflite model + custom_op_registerers: List of strings, each of which is the name of a + custom OP registerer + arena_size: Tensor arena size in bytes. If unused, tensor arena size will + default to 10 times the model size. + + Returns: + An Interpreter instance + """ + if model_path is None or not os.path.isfile(model_path): + raise ValueError("Invalid model file path") + + with open(model_path, "rb") as f: + model_data = f.read() + + return Interpreter(model_data, custom_op_registerers, arena_size) + + @classmethod + def from_bytes(self, model_data, custom_op_registerers=[], arena_size=None): + """Instantiates a TFLM interpreter from a model in byte array. + + Args: + model_data: Model in byte array format + custom_op_registerers: List of strings, each of which is the name of a + custom OP registerer + arena_size: Tensor arena size in bytes. If unused, tensor arena size will + default to 10 times the model size. + + Returns: + An Interpreter instance + """ + + return Interpreter(model_data, custom_op_registerers, arena_size) + + def print_allocations(self): + """Invoke the RecordingMicroAllocator to print the arena usage. + + This should be called after `invoke()`. + + Returns: + This method does not return anything, but It dumps the arena + usage to stderr. + """ + self._interpreter.PrintAllocations() + + def invoke(self): + """Invoke the TFLM interpreter to run an inference. + + This should be called after `set_input()`. + + Returns: + Status code of the C++ invoke function. A RuntimeError will be raised as + well upon any error. + """ + return self._interpreter.Invoke() + + def reset(self): + """Reset the model state to be what you would expect when the interpreter is first + + created. i.e. after Init and Prepare is called for the very first time. + + This should be called after invoke stateful model like LSTM. + + Returns: + Status code of the C++ invoke function. A RuntimeError will be raised as + well upon any error. + """ + return self._interpreter.Reset() + + def set_input(self, input_data, index): + """Set input data into input tensor. + + This should be called before `invoke()`. + + Args: + input_data: Input data in numpy array format. The numpy array format is + chosen to be consistent with TFLite interpreter. + index: An integer between 0 and the number of input tensors (exclusive) + consistent with the order defined in the list of inputs in the .tflite + model + """ + if input_data is None: + raise ValueError("Input data must not be None") + if index is None or index < 0: + raise ValueError("Index must be a non-negative integer") + + self._interpreter.SetInputTensor(input_data, index) + + def get_output(self, index): + """Get data from output tensor. + + The output data correspond to the most recent `invoke()`. + + Args: + index: An integer between 0 and the number of output tensors (exclusive) + consistent with the order defined in the list of outputs in the .tflite + model + + Returns: + Output data in numpy array format. The numpy array format is chosen to + be consistent with TFLite interpreter. + """ + if index is None or index < 0: + raise ValueError("Index must be a non-negative integer") + + return self._interpreter.GetOutputTensor(index) + + def get_input_details(self, index): + """Get input tensor information + + Args: + index (int): An integer between 0 and the number of output tensors + (exclusive) consistent with the order defined in the list of outputs + in the .tflite model + + Returns: + A dictionary from input index to tensor details where each item is a + dictionary with details about an input tensor. Each dictionary contains + the following fields that describe the tensor: + + `shape`: The shape of the tensor. + + `dtype`: The numpy data type (such as `np.int32` or `np.uint8`). + + `quantization_parameters`: A dictionary of parameters used to quantize + the tensor: + ~ `scales`: List of scales (one if per-tensor quantization). + ~ `zero_points`: List of zero_points (one if per-tensor quantization). + ~ `quantized_dimension`: Specifies the dimension of per-axis + quantization, in the case of multiple scales/zero_points. + + """ + if index is None or index < 0: + raise ValueError("Index must be a non-negative integer") + + return self._interpreter.GetInputTensorDetails(index) + + def get_output_details(self, index): + """Get output tensor information + + Args: + index (int): An integer between 0 and the number of output tensors + (exclusive) consistent with the order defined in the list of outputs + in the .tflite model + + Returns: + A dictionary from input index to tensor details where each item is a + dictionary with details about an input tensor. Each dictionary contains + the following fields that describe the tensor: + + `shape`: The shape of the tensor. + + `dtype`: The numpy data type (such as `np.int32` or `np.uint8`). + + `quantization_parameters`: A dictionary of parameters used to quantize + the tensor: + ~ `scales`: List of scales (one if per-tensor quantization). + ~ `zero_points`: List of zero_points (one if per-tensor quantization). + ~ `quantized_dimension`: Specifies the dimension of per-axis + quantization, in the case of multiple scales/zero_points. + + """ + if index is None or index < 0: + raise ValueError("Index must be a non-negative integer") + + return self._interpreter.GetOutputTensorDetails(index) diff --git a/python/tflite_micro/runtime_test.py b/python/tflite_micro/runtime_test.py new file mode 100644 index 0000000..6a127fc --- /dev/null +++ b/python/tflite_micro/runtime_test.py @@ -0,0 +1,269 @@ +# Copyright 2022 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Basic Python test for the TFLM interpreter""" + +# Steps to debug with gdb: +# 1. bazel build python/tflite_micro:runtime_test +# 2. gdb python +# 3. (gdb) run bazel-out/k8-fastbuild/bin/python/tflite_micro/runtime_test + +import gc +import weakref +import numpy as np +import tensorflow as tf + +from tensorflow.python.framework import test_util +from tensorflow.python.platform import test +from tflite_micro.tensorflow.lite.micro.testing import generate_test_models +from tflite_micro.python.tflite_micro import runtime + + +class ConvModelTests(test_util.TensorFlowTestCase): + filename = "/tmp/interpreter_test_conv_model.tflite" + input_shape = (1, 16, 16, 1) + output_shape = (1, 10) + + def testInitErrorHandling(self): + with self.assertRaisesWithPredicateMatch(ValueError, + "Invalid model file path"): + runtime.Interpreter.from_file("wrong.tflite") + + def testInput(self): + model_data = generate_test_models.generate_conv_model(False) + tflm_interpreter = runtime.Interpreter.from_bytes(model_data) + + data_x = np.random.randint(-127, 127, self.input_shape, dtype=np.int8) + tflm_interpreter.set_input(data_x, 0) + + # Test input tensor details + input_details = tflm_interpreter.get_input_details(0) + self.assertAllEqual(input_details["shape"], self.input_shape) + # Single channel int8 quantization + self.assertEqual(input_details["dtype"], np.int8) + self.assertEqual(len(input_details["quantization_parameters"]["scales"]), + 1) + self.assertEqual( + input_details["quantization_parameters"]["quantized_dimension"], 0) + # TODO(b/247808903): check only the types here to make sure that all arrays are properly set up. + self.assertEqual(input_details["quantization_parameters"]["scales"].dtype, + np.float32) + self.assertEqual( + input_details["quantization_parameters"]["zero_points"].dtype, + np.int32) + + def testInputErrorHandling(self): + model_data = generate_test_models.generate_conv_model(True, self.filename) + tflm_interpreter = runtime.Interpreter.from_bytes(model_data) + + data_x = np.random.randint(-127, 127, self.input_shape, dtype=np.int8) + # Try to access out of bound data + with self.assertRaisesWithPredicateMatch(IndexError, + "Tensor is out of bound"): + tflm_interpreter.set_input(data_x, 1) + # Pass data with wrong dimension + with self.assertRaisesWithPredicateMatch(ValueError, + "Dimension mismatch."): + reshaped_data = data_x.reshape((1, 16, 16, 1, 1)) + tflm_interpreter.set_input(reshaped_data, 0) + # Pass data with wrong dimension in one axis + with self.assertRaisesWithPredicateMatch(ValueError, + "Dimension mismatch."): + reshaped_data = data_x.reshape((1, 2, 128, 1)) + tflm_interpreter.set_input(reshaped_data, 0) + # Pass data with wrong type + with self.assertRaisesWithPredicateMatch(ValueError, "Got value of type"): + float_data = data_x.astype(np.float32) + tflm_interpreter.set_input(float_data, 0) + # Reach wrong details + with self.assertRaisesWithPredicateMatch(IndexError, + "Tensor is out of bound"): + tflm_interpreter.get_input_details(1) + + def testOutput(self): + model_data = generate_test_models.generate_conv_model(True, self.filename) + tflm_interpreter = runtime.Interpreter.from_bytes(model_data) + + # Initial output values are all 0 + output = tflm_interpreter.get_output(0) + init_output = np.zeros(self.output_shape) + self.assertAllEqual(output, init_output) + + # Test the output tensor details + output_details = tflm_interpreter.get_output_details(0) + self.assertAllEqual(output_details["shape"], self.output_shape) + # Single channel int8 quantization + self.assertEqual(output_details["dtype"], np.int8) + self.assertEqual(len(output_details["quantization_parameters"]["scales"]), + 1) + self.assertEqual( + output_details["quantization_parameters"]["quantized_dimension"], 0) + # TODO(b/247808903): check only the types here to make sure that all arrays are properly set up. + self.assertEqual(output_details["quantization_parameters"]["scales"].dtype, + np.float32) + self.assertEqual( + output_details["quantization_parameters"]["zero_points"].dtype, + np.int32) + + def testOutputErrorHandling(self): + model_data = generate_test_models.generate_conv_model(True, self.filename) + tflm_interpreter = runtime.Interpreter.from_bytes(model_data) + # Try to access out of bound data + with self.assertRaisesWithPredicateMatch(IndexError, + "Tensor is out of bound"): + tflm_interpreter.get_output(1) + with self.assertRaisesWithPredicateMatch(IndexError, + "Tensor is out of bound"): + tflm_interpreter.get_output_details(1) + + def testCompareWithTFLite(self): + model_data = generate_test_models.generate_conv_model(True, self.filename) + + # TFLM interpreter + tflm_interpreter = runtime.Interpreter.from_bytes(model_data) + + # TFLite interpreter + tflite_interpreter = tf.lite.Interpreter( + model_content=model_data, + experimental_op_resolver_type=\ + tf.lite.experimental.OpResolverType.BUILTIN_REF) + tflite_interpreter.allocate_tensors() + tflite_output_details = tflite_interpreter.get_output_details()[0] + tflite_input_details = tflite_interpreter.get_input_details()[0] + + num_steps = 100 + for i in range(0, num_steps): + # Create random input + data_x = np.random.randint(-127, 127, self.input_shape, dtype=np.int8) + + # Run inference on TFLite + tflite_interpreter.set_tensor(tflite_input_details["index"], data_x) + tflite_interpreter.invoke() + tflite_output = tflite_interpreter.get_tensor( + tflite_output_details["index"]) + + # Run inference on TFLM + tflm_interpreter.set_input(data_x, 0) + tflm_interpreter.invoke() + tflm_output = tflm_interpreter.get_output(0) + + # Check that TFLM output has correct metadata + self.assertDTypeEqual(tflm_output, np.int8) + self.assertEqual(tflm_output.shape, self.output_shape) + self.assertAllEqual(tflite_output, tflm_output) + + def _helperModelFromFileAndBufferEqual(self): + model_data = generate_test_models.generate_conv_model(True, self.filename) + + file_interpreter = runtime.Interpreter.from_file(self.filename) + bytes_interpreter = runtime.Interpreter.from_bytes(model_data) + + num_steps = 100 + for i in range(0, num_steps): + data_x = np.random.randint(-127, 127, self.input_shape, dtype=np.int8) + + file_interpreter.set_input(data_x, 0) + file_interpreter.invoke() + file_output = file_interpreter.get_output(0) + + bytes_interpreter.set_input(data_x, 0) + bytes_interpreter.invoke() + bytes_output = bytes_interpreter.get_output(0) + + self.assertDTypeEqual(file_output, np.int8) + self.assertEqual(file_output.shape, self.output_shape) + self.assertDTypeEqual(bytes_output, np.int8) + self.assertEqual(bytes_output.shape, self.output_shape) + # Same interpreter and model, should expect all equal + self.assertAllEqual(file_output, bytes_output) + + def testModelFromFileAndBufferEqual(self): + self._helperModelFromFileAndBufferEqual() + + def testMultipleInterpreters(self): + model_data = generate_test_models.generate_conv_model(False) + + interpreters = [ + runtime.Interpreter.from_bytes(model_data) for i in range(10) + ] + + num_steps = 100 + for i in range(0, num_steps): + data_x = np.random.randint(-127, 127, self.input_shape, dtype=np.int8) + + prev_output = None + for interpreter in interpreters: + interpreter.set_input(data_x, 0) + interpreter.invoke() + output = interpreter.get_output(0) + if prev_output is None: + prev_output = output + + self.assertDTypeEqual(output, np.int8) + self.assertEqual(output.shape, self.output_shape) + self.assertAllEqual(output, prev_output) + + def _helperNoop(self): + pass + + def _helperOutputTensorMemoryLeak(self): + interpreter = runtime.Interpreter.from_file(self.filename) + int_ref = weakref.finalize(interpreter, self._helperNoop) + some_output = interpreter.get_output(0) + output_ref = weakref.finalize(some_output, self._helperNoop) + return (int_ref, output_ref) + + def testOutputTensorMemoryLeak(self): + generate_test_models.generate_conv_model(True, self.filename) + + int_ref, output_ref = self._helperOutputTensorMemoryLeak() + # Output obtained in the helper function should be out of scope now, perform + # garbage collection and check that the weakref is dead. If it's still + # alive, it means that the output's reference count isn't 0 by garbage + # collection. Since it's already out of scope, this means a memory leak. + # + # An example of how this could be true is if there's an additional + # reference increment (e.g. `Py_INCREF` or `py::cast`` instead of + # `py::reinterpret_steal``) somewhere in the C++ code. + gc.collect() + self.assertFalse(int_ref.alive) + self.assertFalse(output_ref.alive) + + # TODO(b/240162715): Add a test case to register a custom OP + + def testMalformedCustomOps(self): + model_data = generate_test_models.generate_conv_model(False) + custom_op_registerers = [("wrong", "format")] + with self.assertRaisesWithPredicateMatch(ValueError, + "must be a list of strings"): + interpreter = runtime.Interpreter.from_bytes(model_data, + custom_op_registerers) + + custom_op_registerers = "WrongFormat" + with self.assertRaisesWithPredicateMatch(ValueError, + "must be a list of strings"): + interpreter = runtime.Interpreter.from_bytes(model_data, + custom_op_registerers) + + def testNonExistentCustomOps(self): + model_data = generate_test_models.generate_conv_model(False) + custom_op_registerers = ["SomeRandomOp"] + with self.assertRaisesWithPredicateMatch( + RuntimeError, "TFLM could not register custom op via SomeRandomOp"): + interpreter = runtime.Interpreter.from_bytes(model_data, + custom_op_registerers) + + +if __name__ == "__main__": + test.main() diff --git a/python/tflite_micro/shared_library.h b/python/tflite_micro/shared_library.h new file mode 100644 index 0000000..bc57c88 --- /dev/null +++ b/python/tflite_micro/shared_library.h @@ -0,0 +1,40 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +// This file is forked from TFLite's implementation in +// //depot/google3/third_party/tensorflow/lite/shared_library.h and contains a +// subset of it that's required by the TFLM interpreter. The Windows' ifdef is +// removed because TFLM doesn't support Windows. + +#ifndef TENSORFLOW_LITE_MICRO_TOOLS_PYTHON_INTERPRETER_SHARED_LIBRARY_H_ +#define TENSORFLOW_LITE_MICRO_TOOLS_PYTHON_INTERPRETER_SHARED_LIBRARY_H_ + +#include + +namespace tflite { + +// SharedLibrary provides a uniform set of APIs across different platforms to +// handle dynamic library operations +class SharedLibrary { + public: + static inline void* GetSymbol(const char* symbol) { + return dlsym(RTLD_DEFAULT, symbol); + } + static inline const char* GetError() { return dlerror(); } +}; + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_TOOLS_PYTHON_INTERPRETER_SHARED_LIBRARY_H_ diff --git a/python/tflite_micro/signal/BUILD b/python/tflite_micro/signal/BUILD new file mode 100644 index 0000000..c7cbb40 --- /dev/null +++ b/python/tflite_micro/signal/BUILD @@ -0,0 +1,57 @@ +load("//python/tflite_micro/signal:tflm_signal.bzl", "py_tflm_signal_library") +load("//tensorflow:extra_rules.bzl", "tflm_signal_friends") +load("@tflm_pip_deps//:requirements.bzl", "requirement") + +package( + licenses = ["notice"], +) + +package_group( + name = "signal_friends", + packages = tflm_signal_friends(), +) + +cc_library( + name = "ops_lib", + visibility = [":signal_friends"], + deps = [ + ":window_op_cc", + ], +) + +py_library( + name = "ops", + srcs = [ + "__init__.py", + "ops/__init__.py", + ], + srcs_version = "PY3", + deps = [ + ":window_op", + ], +) + +py_tflm_signal_library( + name = "window_op", + srcs = ["ops/window_op.py"], + cc_op_defs = ["//signal/tensorflow_core/ops:window_op"], + cc_op_kernels = [ + "//signal/tensorflow_core/kernels:window_kernel", + ], +) + +py_test( + name = "window_op_test", + srcs = ["ops/window_op_test.py"], + data = [ + "//python/tflite_micro/signal/ops/testdata:window_test1.txt", + ], + python_version = "PY3", + srcs_version = "PY3", + deps = [ + ":window_op", + "//python/tflite_micro/signal/utils:util", + requirement("numpy"), + requirement("tensorflow-cpu"), + ], +) diff --git a/python/tflite_micro/signal/__init__.py b/python/tflite_micro/signal/__init__.py new file mode 100644 index 0000000..a0cbc61 --- /dev/null +++ b/python/tflite_micro/signal/__init__.py @@ -0,0 +1 @@ +# Empty file required by setuptools.find_packages to recognize this as a package diff --git a/python/tflite_micro/signal/ops/__init__.py b/python/tflite_micro/signal/ops/__init__.py new file mode 100644 index 0000000..b7e12b3 --- /dev/null +++ b/python/tflite_micro/signal/ops/__init__.py @@ -0,0 +1 @@ +# Empty file required by setuptools.find_packages to recognize this as a package diff --git a/python/tflite_micro/signal/ops/testdata/BUILD b/python/tflite_micro/signal/ops/testdata/BUILD new file mode 100644 index 0000000..fc565e9 --- /dev/null +++ b/python/tflite_micro/signal/ops/testdata/BUILD @@ -0,0 +1,11 @@ +# Description: +# Test data for the signal library + +package( + default_visibility = ["//signal:__subpackages__"], + licenses = ["notice"], +) + +exports_files([ + "window_test1.txt", +]) diff --git a/python/tflite_micro/signal/ops/testdata/window_test1.txt b/python/tflite_micro/signal/ops/testdata/window_test1.txt new file mode 100644 index 0000000..47af9f4 --- /dev/null +++ b/python/tflite_micro/signal/ops/testdata/window_test1.txt @@ -0,0 +1,367 @@ +hann int16 12 +1 1 0 -2 -1 2 2 -1 -3 0 3 3 -2 -2 1 3 1 -3 -2 2 2 -1 -1 1 2 1 -2 0 1 0 -2 -2 2 3 -2 -3 1 4 1 -5 -4 1 3 -1 -3 -1 3 3 -2 -3 1 2 -1 -3 -1 2 2 0 -1 0 0 -1 -1 -1 2 2 -1 -1 0 1 0 -2 -1 2 2 -1 -2 -1 1 2 -1 -2 1 2 0 -2 1 2 -1 -5 -1 17 32 31 18 7 5 9 10 8 11 19 26 23 12 0 -11 -25 -34 -31 -19 -14 -22 -29 -11 24 42 27 -1 -11 2 14 10 4 11 26 32 22 6 -11 -22 -30 -31 -30 -26 -26 -27 -33 -45 -59 -63 -58 -50 -48 -48 -40 -26 -16 -17 -23 -21 -11 0 9 18 28 37 44 55 65 66 56 40 22 4 -13 -28 -33 -29 -25 -28 -35 -40 -38 -31 -30 -36 -40 -32 -13 7 18 20 28 45 59 61 54 50 57 72 89 101 97 80 65 65 65 50 29 31 50 53 25 -3 -3 10 8 -13 -23 -15 -10 -18 -22 -18 -22 -39 -45 -27 -7 -11 -16 5 26 1 -50 -59 -15 5 -38 -81 -54 0 -15 -86 -108 -46 -3 -50 -113 -85 -4 7 -57 -77 -6 53 10 -60 -37 44 56 -12 -33 41 99 52 -18 6 77 62 -28 -55 20 73 20 -38 12 95 68 -51 -94 -17 43 -7 -63 -7 88 78 -19 -47 34 84 11 -85 -73 14 47 8 -2 58 99 55 -15 -24 9 1 -52 -70 -20 34 30 -11 -27 -8 -3 -40 -81 -80 -38 4 18 21 35 53 48 16 -13 -16 -5 -4 -6 5 29 39 24 11 20 33 17 -27 -57 -55 -41 -36 -36 -24 -5 -4 -26 -53 -67 -69 -77 -79 -56 -8 40 63 66 70 83 88 72 51 53 77 91 84 73 85 108 115 96 80 78 75 53 28 23 34 38 33 32 35 25 -3 -31 -48 -63 -86 -105 -106 -92 -85 -90 -100 -115 -138 -161 -170 -168 -168 -179 -178 -157 -127 -110 -107 -102 +0 0 0 -1 -1 0 0 -1 -1 0 0 0 -1 -1 0 0 0 -1 -1 0 0 -1 -1 0 0 0 -1 0 0 0 -1 -1 0 0 -1 -1 0 0 0 -1 -1 0 0 -1 -1 -1 0 0 -1 -1 0 0 -1 -1 -1 0 0 0 -1 0 0 -1 -1 -1 0 0 -1 -1 0 0 0 -1 -1 0 0 -1 -1 -1 0 0 -1 -1 0 0 0 -1 0 0 -1 -3 -1 7 14 13 8 3 2 4 4 3 5 9 13 12 6 0 -7 -14 -20 -18 -12 -9 -14 -18 -7 14 26 17 -1 -8 1 9 6 2 7 18 22 15 4 -8 -17 -23 -24 -23 -20 -20 -21 -26 -36 -47 -51 -47 -41 -40 -40 -34 -22 -14 -15 -20 -18 -10 0 7 15 24 32 39 49 58 59 51 36 20 3 -13 -27 -31 -28 -24 -27 -34 -39 -37 -30 -29 -35 -39 -32 -13 6 17 19 27 44 58 60 53 49 56 71 88 100 96 79 64 64 64 49 29 31 49 52 24 -3 -3 9 7 -13 -23 -15 -10 -18 -22 -18 -22 -39 -45 -27 -7 -11 -16 4 25 0 -49 -57 -15 4 -36 -77 -51 0 -14 -80 -100 -43 -3 -46 -103 -77 -4 6 -51 -68 -6 46 8 -52 -32 37 47 -11 -28 33 81 42 -15 4 61 49 -22 -43 15 55 15 -29 8 70 49 -37 -68 -13 30 -5 -44 -5 59 51 -13 -31 21 53 6 -53 -45 8 28 4 -2 33 56 30 -9 -14 4 0 -28 -36 -11 16 14 -6 -13 -4 -2 -18 -36 -35 -17 1 7 8 13 20 18 5 -5 -6 -2 -2 -3 1 9 12 7 3 5 9 4 -8 -15 -15 -11 -9 -9 -6 -2 -1 -6 -11 -14 -14 -15 -15 -10 -2 6 9 9 10 11 11 9 6 6 8 9 8 7 7 9 9 7 6 5 5 3 1 1 1 1 1 1 1 0 -1 -1 -2 -2 -2 -3 -2 -2 -2 -2 -2 -2 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +66 56 40 22 4 -13 -28 -33 -29 -25 -28 -35 -40 -38 -31 -30 -36 -40 -32 -13 7 18 20 28 45 59 61 54 50 57 72 89 101 97 80 65 65 65 50 29 31 50 53 25 -3 -3 10 8 -13 -23 -15 -10 -18 -22 -18 -22 -39 -45 -27 -7 -11 -16 5 26 1 -50 -59 -15 5 -38 -81 -54 0 -15 -86 -108 -46 -3 -50 -113 -85 -4 7 -57 -77 -6 53 10 -60 -37 44 56 -12 -33 41 99 52 -18 6 77 62 -28 -55 20 73 20 -38 12 95 68 -51 -94 -17 43 -7 -63 -7 88 78 -19 -47 34 84 11 -85 -73 14 47 8 -2 58 99 55 -15 -24 9 1 -52 -70 -20 34 30 -11 -27 -8 -3 -40 -81 -80 -38 4 18 21 35 53 48 16 -13 -16 -5 -4 -6 5 29 39 24 11 20 33 17 -27 -57 -55 -41 -36 -36 -24 -5 -4 -26 -53 -67 -69 -77 -79 -56 -8 40 63 66 70 83 88 72 51 53 77 91 84 73 85 108 115 96 80 78 75 53 28 23 34 38 33 32 35 25 -3 -31 -48 -63 -86 -105 -106 -92 -85 -90 -100 -115 -138 -161 -170 -168 -168 -179 -178 -157 -127 -110 -107 -102 -88 -78 -78 -82 -75 -61 -44 -28 -17 -8 2 12 21 22 18 19 28 39 43 44 47 47 40 27 16 12 13 14 10 -4 -29 -48 -48 -31 -19 -17 -19 -12 -5 -12 -27 -31 -21 -4 5 -1 -9 -10 -2 14 32 43 52 60 70 73 66 54 52 69 98 125 142 146 151 164 182 195 203 207 218 238 263 293 315 327 325 320 314 307 292 275 268 270 269 251 226 203 181 157 130 112 103 93 72 46 21 1 -23 -49 -80 -116 -156 -196 -225 -245 -264 -292 -333 -375 -409 -433 -456 -486 -516 -536 -548 -550 -544 -539 -537 -539 -534 -516 -483 -447 -419 -392 -355 -306 -257 -217 -178 -129 -69 -10 43 96 155 220 278 324 357 378 391 401 407 411 411 410 401 384 359 335 313 287 257 229 206 185 +0 0 0 0 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 0 0 0 1 2 2 2 2 3 4 5 6 6 5 4 5 5 4 2 3 5 5 2 -1 -1 1 1 -2 -4 -3 -2 -3 -4 -4 -4 -8 -9 -6 -2 -3 -4 1 5 0 -13 -15 -4 1 -11 -23 -16 0 -5 -27 -34 -15 -1 -17 -39 -30 -2 2 -22 -30 -3 20 4 -25 -16 18 24 -6 -15 18 45 24 -9 2 38 31 -15 -29 10 39 10 -21 6 53 39 -30 -56 -11 26 -5 -40 -5 55 50 -13 -31 22 56 7 -59 -51 9 33 5 -2 42 73 40 -12 -19 6 0 -41 -55 -16 27 24 -9 -23 -7 -3 -34 -68 -68 -33 3 15 18 30 46 42 14 -12 -15 -5 -4 -6 4 26 36 22 10 18 31 16 -26 -55 -53 -40 -35 -35 -24 -5 -4 -26 -52 -66 -68 -76 -78 -56 -8 39 62 65 69 82 87 71 50 52 76 90 83 73 85 107 114 95 79 77 74 52 27 22 33 37 32 31 34 24 -3 -31 -47 -62 -84 -103 -103 -89 -82 -87 -96 -110 -132 -153 -161 -158 -158 -167 -166 -146 -117 -101 -98 -93 -80 -71 -70 -73 -67 -54 -39 -25 -15 -7 1 10 17 18 14 15 22 31 34 35 37 36 31 20 12 9 9 10 7 -3 -21 -35 -35 -22 -14 -12 -13 -9 -4 -8 -18 -20 -14 -3 3 -1 -6 -6 -2 8 18 24 29 33 38 39 34 28 26 34 48 61 68 68 70 74 81 86 87 88 91 97 105 115 121 124 120 116 112 107 99 91 87 86 83 76 67 59 51 43 35 29 26 23 17 10 4 0 -5 -11 -17 -23 -30 -37 -41 -43 -44 -47 -52 -56 -59 -60 -61 -62 -64 -63 -62 -60 -56 -53 -51 -48 -45 -42 -37 -32 -29 -25 -22 -18 -14 -11 -9 -6 -3 -1 1 2 4 5 6 6 6 6 5 5 4 3 3 2 2 1 1 0 0 0 0 0 0 0 +-4 -6 5 29 39 24 11 20 33 17 -27 -57 -55 -41 -36 -36 -24 -5 -4 -26 -53 -67 -69 -77 -79 -56 -8 40 63 66 70 83 88 72 51 53 77 91 84 73 85 108 115 96 80 78 75 53 28 23 34 38 33 32 35 25 -3 -31 -48 -63 -86 -105 -106 -92 -85 -90 -100 -115 -138 -161 -170 -168 -168 -179 -178 -157 -127 -110 -107 -102 -88 -78 -78 -82 -75 -61 -44 -28 -17 -8 2 12 21 22 18 19 28 39 43 44 47 47 40 27 16 12 13 14 10 -4 -29 -48 -48 -31 -19 -17 -19 -12 -5 -12 -27 -31 -21 -4 5 -1 -9 -10 -2 14 32 43 52 60 70 73 66 54 52 69 98 125 142 146 151 164 182 195 203 207 218 238 263 293 315 327 325 320 314 307 292 275 268 270 269 251 226 203 181 157 130 112 103 93 72 46 21 1 -23 -49 -80 -116 -156 -196 -225 -245 -264 -292 -333 -375 -409 -433 -456 -486 -516 -536 -548 -550 -544 -539 -537 -539 -534 -516 -483 -447 -419 -392 -355 -306 -257 -217 -178 -129 -69 -10 43 96 155 220 278 324 357 378 391 401 407 411 411 410 401 384 359 335 313 287 257 229 206 185 163 140 123 109 98 89 87 83 75 64 62 67 63 51 44 55 73 82 79 85 102 116 116 112 116 124 128 123 118 107 91 75 74 74 53 7 -40 -69 -87 -113 -144 -165 -178 -201 -238 -273 -302 -333 -375 -411 -418 -402 -392 -401 -409 -401 -387 -383 -384 -366 -327 -284 -251 -216 -171 -122 -82 -42 8 66 110 133 152 184 226 259 283 304 320 323 310 294 290 294 288 262 225 187 158 130 99 61 22 -11 -34 -46 -58 -81 -115 -149 -171 -179 -180 -185 -189 -179 -155 -135 -130 -127 -107 -68 -31 -14 -6 16 53 87 108 123 150 185 214 226 226 222 224 228 232 233 230 229 235 246 244 224 188 155 124 88 44 5 -20 -36 -59 -96 -139 -180 -215 -251 -290 -328 -357 -371 -379 -381 -383 -385 -382 -374 +0 -1 0 0 0 0 0 0 0 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -3 -3 -3 -3 -1 1 3 3 3 4 5 4 3 4 6 7 7 6 8 11 12 10 9 9 9 7 3 3 5 5 5 5 6 4 -1 -6 -10 -13 -18 -23 -24 -22 -21 -22 -25 -30 -37 -44 -48 -48 -49 -54 -55 -50 -41 -36 -36 -35 -31 -28 -29 -31 -29 -24 -18 -12 -7 -4 0 5 9 9 8 8 13 18 20 21 23 24 20 14 8 6 7 7 5 -3 -17 -29 -29 -19 -12 -11 -12 -8 -4 -8 -18 -21 -15 -3 3 -1 -7 -8 -2 10 23 31 38 45 53 55 50 42 40 54 78 100 114 119 124 135 151 163 171 176 186 205 228 255 276 288 288 285 281 276 264 250 245 248 248 233 210 190 170 148 123 106 98 89 69 44 20 0 -23 -48 -79 -114 -154 -193 -222 -242 -262 -290 -331 -373 -407 -432 -455 -485 -515 -536 -548 -550 -544 -539 -537 -539 -534 -516 -483 -447 -418 -391 -354 -305 -256 -216 -177 -128 -69 -10 42 94 151 214 270 314 345 365 376 385 389 392 390 388 378 360 336 312 290 265 236 209 187 167 147 125 109 96 86 78 75 71 64 54 52 56 52 42 36 45 59 66 63 67 80 90 90 86 88 93 96 91 87 78 65 53 52 51 36 4 -28 -47 -58 -75 -94 -107 -114 -127 -148 -168 -183 -200 -222 -240 -241 -228 -220 -221 -223 -215 -205 -199 -197 -185 -163 -139 -121 -103 -80 -56 -37 -19 3 28 45 54 61 72 87 98 105 110 114 112 105 98 94 93 89 79 67 54 44 35 26 16 5 -3 -9 -11 -14 -18 -25 -32 -35 -36 -35 -35 -34 -31 -26 -22 -21 -19 -16 -10 -5 -2 -1 1 5 9 11 12 13 16 18 18 17 15 15 14 13 13 12 11 10 10 9 8 6 4 3 2 1 0 -1 -1 -1 -2 -2 -2 -2 -2 -2 -2 -2 -1 -1 -1 -1 -1 -1 0 +292 275 268 270 269 251 226 203 181 157 130 112 103 93 72 46 21 1 -23 -49 -80 -116 -156 -196 -225 -245 -264 -292 -333 -375 -409 -433 -456 -486 -516 -536 -548 -550 -544 -539 -537 -539 -534 -516 -483 -447 -419 -392 -355 -306 -257 -217 -178 -129 -69 -10 43 96 155 220 278 324 357 378 391 401 407 411 411 410 401 384 359 335 313 287 257 229 206 185 163 140 123 109 98 89 87 83 75 64 62 67 63 51 44 55 73 82 79 85 102 116 116 112 116 124 128 123 118 107 91 75 74 74 53 7 -40 -69 -87 -113 -144 -165 -178 -201 -238 -273 -302 -333 -375 -411 -418 -402 -392 -401 -409 -401 -387 -383 -384 -366 -327 -284 -251 -216 -171 -122 -82 -42 8 66 110 133 152 184 226 259 283 304 320 323 310 294 290 294 288 262 225 187 158 130 99 61 22 -11 -34 -46 -58 -81 -115 -149 -171 -179 -180 -185 -189 -179 -155 -135 -130 -127 -107 -68 -31 -14 -6 16 53 87 108 123 150 185 214 226 226 222 224 228 232 233 230 229 235 246 244 224 188 155 124 88 44 5 -20 -36 -59 -96 -139 -180 -215 -251 -290 -328 -357 -371 -379 -381 -383 -385 -382 -374 -361 -345 -326 -304 -283 -263 -239 -195 -134 -75 -34 -2 41 96 140 153 156 180 235 294 328 347 368 396 417 422 422 426 424 415 408 409 407 392 367 335 288 221 148 94 57 15 -42 -101 -156 -210 -256 -270 -263 -289 -364 -428 -424 -390 -405 -458 -456 -375 -321 -360 -405 -345 -228 -190 -232 -205 -58 65 42 -19 55 224 295 223 193 313 440 409 295 294 407 446 345 265 324 414 380 269 243 303 301 190 100 123 158 87 -34 -68 -18 -18 -111 -190 -182 -159 -207 -284 -291 -237 -225 -282 -322 -291 -242 -238 -261 -248 -203 -180 -191 -191 -156 -122 -113 -109 -81 -44 -32 -37 -29 1 28 35 29 31 43 61 88 118 135 129 115 119 139 152 139 111 92 85 80 62 37 25 31 43 39 13 -13 -18 -5 6 +0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 -1 -2 -3 -4 -5 -7 -9 -10 -12 -14 -17 -20 -24 -27 -30 -33 -37 -41 -44 -47 -49 -51 -53 -56 -58 -58 -57 -55 -54 -53 -50 -45 -39 -34 -29 -22 -12 -2 7 18 30 44 58 69 79 86 92 97 101 105 107 110 110 108 104 99 95 89 82 74 68 63 56 49 44 40 37 34 34 33 30 26 26 29 27 22 20 25 34 39 38 42 51 59 60 59 62 67 70 68 66 61 52 44 44 44 32 4 -26 -44 -56 -74 -95 -110 -120 -137 -164 -190 -213 -237 -269 -298 -306 -297 -292 -302 -310 -307 -299 -298 -302 -290 -261 -229 -204 -177 -141 -101 -69 -36 6 56 94 114 131 160 198 228 251 271 287 291 281 267 265 270 266 243 209 175 148 122 93 57 20 -11 -33 -45 -57 -79 -112 -146 -168 -176 -177 -182 -187 -177 -154 -134 -129 -127 -107 -68 -31 -14 -6 15 52 86 107 123 150 184 213 225 225 221 223 227 230 231 228 227 232 243 240 220 184 152 121 85 42 4 -20 -35 -57 -93 -134 -172 -205 -238 -274 -309 -335 -346 -352 -353 -353 -353 -349 -340 -326 -310 -291 -270 -250 -231 -209 -170 -116 -65 -29 -2 34 80 115 125 127 145 188 234 259 272 286 305 318 319 317 317 312 303 295 293 288 275 255 230 195 148 98 61 37 9 -27 -64 -97 -129 -155 -162 -156 -169 -210 -243 -237 -215 -221 -246 -241 -195 -165 -182 -201 -169 -110 -90 -108 -94 -27 28 18 -9 22 91 118 88 74 118 163 148 105 102 139 149 112 84 101 126 113 78 68 83 81 49 25 30 38 20 -8 -16 -4 -4 -23 -38 -35 -30 -37 -49 -49 -39 -35 -43 -47 -41 -33 -31 -32 -30 -23 -20 -20 -19 -15 -11 -10 -9 -7 -4 -3 -3 -2 0 1 1 1 1 1 2 2 3 3 3 2 2 2 2 2 1 1 0 0 0 0 0 0 0 0 0 -1 -1 -1 0 +310 294 290 294 288 262 225 187 158 130 99 61 22 -11 -34 -46 -58 -81 -115 -149 -171 -179 -180 -185 -189 -179 -155 -135 -130 -127 -107 -68 -31 -14 -6 16 53 87 108 123 150 185 214 226 226 222 224 228 232 233 230 229 235 246 244 224 188 155 124 88 44 5 -20 -36 -59 -96 -139 -180 -215 -251 -290 -328 -357 -371 -379 -381 -383 -385 -382 -374 -361 -345 -326 -304 -283 -263 -239 -195 -134 -75 -34 -2 41 96 140 153 156 180 235 294 328 347 368 396 417 422 422 426 424 415 408 409 407 392 367 335 288 221 148 94 57 15 -42 -101 -156 -210 -256 -270 -263 -289 -364 -428 -424 -390 -405 -458 -456 -375 -321 -360 -405 -345 -228 -190 -232 -205 -58 65 42 -19 55 224 295 223 193 313 440 409 295 294 407 446 345 265 324 414 380 269 243 303 301 190 100 123 158 87 -34 -68 -18 -18 -111 -190 -182 -159 -207 -284 -291 -237 -225 -282 -322 -291 -242 -238 -261 -248 -203 -180 -191 -191 -156 -122 -113 -109 -81 -44 -32 -37 -29 1 28 35 29 31 43 61 88 118 135 129 115 119 139 152 139 111 92 85 80 62 37 25 31 43 39 13 -13 -18 -5 6 6 0 -3 -3 -8 -16 -26 -35 -43 -46 -47 -53 -61 -62 -51 -41 -47 -63 -68 -53 -40 -44 -57 -62 -59 -61 -67 -70 -67 -68 -72 -58 -30 -13 -22 -40 -33 -7 9 3 -4 8 31 46 47 47 48 44 38 37 39 40 42 62 90 109 107 98 103 116 119 113 112 123 137 147 150 145 127 100 77 65 51 30 4 -14 -28 -45 -72 -92 -102 -114 -137 -160 -169 -161 -154 -158 -168 -172 -167 -160 -157 -157 -154 -148 -133 -119 -113 -120 -131 -138 -132 -118 -102 -89 -77 -63 -48 -40 -51 -67 -76 -66 -44 -20 5 35 61 74 74 80 107 142 164 165 161 164 172 179 182 185 194 201 202 197 182 157 127 101 90 89 91 92 99 115 120 94 34 -32 -78 -101 -110 -110 -83 -26 42 86 93 68 +0 0 0 0 0 0 0 0 0 0 0 0 0 -1 -1 -1 -1 -2 -3 -4 -5 -6 -6 -7 -7 -8 -7 -7 -7 -7 -7 -5 -2 -1 -1 1 4 7 9 11 14 18 22 25 26 27 28 30 32 33 34 35 37 40 42 39 34 29 24 17 9 1 -5 -9 -14 -24 -35 -47 -57 -68 -81 -93 -104 -111 -116 -119 -123 -126 -128 -128 -127 -124 -119 -114 -108 -102 -95 -79 -55 -32 -15 -1 18 43 63 71 73 86 114 145 165 177 191 208 223 229 232 238 240 238 237 241 243 237 224 207 180 140 95 61 37 9 -29 -69 -108 -146 -180 -192 -189 -210 -266 -316 -316 -293 -307 -351 -352 -292 -252 -285 -323 -278 -185 -155 -191 -170 -49 54 35 -17 47 193 255 194 169 276 390 365 264 265 369 406 315 243 299 384 354 251 228 285 285 180 95 117 151 83 -33 -66 -18 -18 -109 -187 -179 -157 -204 -281 -288 -235 -224 -281 -321 -290 -242 -238 -261 -248 -203 -180 -191 -191 -156 -122 -113 -109 -81 -44 -32 -37 -29 0 27 34 28 30 42 60 86 115 132 126 112 115 134 146 133 106 88 81 76 58 34 23 29 40 36 12 -12 -17 -5 5 5 0 -3 -3 -8 -15 -23 -31 -38 -40 -40 -45 -52 -52 -43 -34 -39 -52 -55 -43 -32 -35 -45 -48 -46 -47 -51 -53 -50 -50 -53 -42 -22 -10 -16 -28 -23 -5 5 1 -3 5 19 28 29 28 29 26 22 21 22 22 23 34 48 58 56 50 52 58 59 55 53 58 63 67 67 63 55 42 32 26 20 11 1 -6 -11 -17 -26 -33 -35 -39 -45 -52 -53 -50 -46 -46 -48 -48 -46 -42 -41 -40 -38 -35 -31 -27 -25 -26 -27 -28 -26 -22 -19 -16 -13 -11 -8 -6 -8 -10 -11 -9 -6 -3 0 3 6 7 6 7 9 11 12 11 10 10 10 10 9 9 8 8 7 7 6 4 3 2 2 1 1 1 1 1 1 0 0 -1 -1 -1 -1 -1 -1 -1 0 0 0 0 +407 446 345 265 324 414 380 269 243 303 301 190 100 123 158 87 -34 -68 -18 -18 -111 -190 -182 -159 -207 -284 -291 -237 -225 -282 -322 -291 -242 -238 -261 -248 -203 -180 -191 -191 -156 -122 -113 -109 -81 -44 -32 -37 -29 1 28 35 29 31 43 61 88 118 135 129 115 119 139 152 139 111 92 85 80 62 37 25 31 43 39 13 -13 -18 -5 6 6 0 -3 -3 -8 -16 -26 -35 -43 -46 -47 -53 -61 -62 -51 -41 -47 -63 -68 -53 -40 -44 -57 -62 -59 -61 -67 -70 -67 -68 -72 -58 -30 -13 -22 -40 -33 -7 9 3 -4 8 31 46 47 47 48 44 38 37 39 40 42 62 90 109 107 98 103 116 119 113 112 123 137 147 150 145 127 100 77 65 51 30 4 -14 -28 -45 -72 -92 -102 -114 -137 -160 -169 -161 -154 -158 -168 -172 -167 -160 -157 -157 -154 -148 -133 -119 -113 -120 -131 -138 -132 -118 -102 -89 -77 -63 -48 -40 -51 -67 -76 -66 -44 -20 5 35 61 74 74 80 107 142 164 165 161 164 172 179 182 185 194 201 202 197 182 157 127 101 90 89 91 92 99 115 120 94 34 -32 -78 -101 -110 -110 -83 -26 42 86 93 68 22 -36 -99 -139 -130 -73 13 98 165 206 223 205 151 71 1 -28 -7 42 101 175 269 354 375 311 197 85 -1 -67 -111 -110 -55 27 97 138 149 126 55 -59 -178 -261 -295 -294 -267 -215 -145 -77 -39 -46 -92 -167 -251 -324 -368 -373 -344 -284 -203 -116 -46 -12 -17 -39 -67 -97 -129 -151 -143 -103 -39 32 92 132 142 126 95 59 14 -43 -97 -129 -129 -106 -73 -38 -12 -8 -33 -79 -124 -158 -181 -198 -205 -184 -130 -57 11 58 80 77 53 13 -29 -56 -58 -35 5 57 118 182 229 242 216 168 116 77 54 47 61 92 135 174 198 197 174 138 99 56 12 -20 -25 -2 36 75 119 156 171 143 88 43 25 17 4 -4 22 79 135 158 151 136 121 105 76 45 22 17 33 62 88 97 +0 0 0 0 0 0 1 0 1 1 2 1 0 1 2 1 -1 -2 -1 -1 -3 -6 -6 -6 -8 -12 -13 -11 -12 -15 -19 -18 -16 -17 -19 -19 -17 -16 -17 -18 -16 -13 -13 -13 -10 -6 -5 -5 -5 0 4 5 4 5 7 10 16 22 26 26 24 25 30 34 32 26 22 21 20 16 10 7 9 12 11 4 -5 -6 -2 2 2 0 -2 -2 -4 -7 -11 -15 -18 -20 -21 -23 -27 -28 -24 -20 -23 -31 -34 -27 -21 -23 -30 -33 -32 -34 -37 -40 -38 -40 -42 -35 -18 -8 -14 -25 -21 -5 5 1 -3 5 20 31 32 32 33 31 27 26 28 29 31 46 68 83 82 76 80 91 94 90 90 100 112 121 125 121 107 85 65 56 44 26 3 -13 -25 -41 -65 -83 -93 -104 -126 -148 -157 -150 -144 -148 -158 -163 -159 -153 -150 -151 -148 -143 -129 -116 -110 -117 -128 -136 -130 -117 -101 -88 -77 -63 -48 -40 -51 -67 -76 -66 -44 -20 4 34 60 74 74 79 106 141 163 164 160 163 171 177 180 183 192 198 199 194 178 154 124 98 87 86 88 88 95 110 114 89 32 -31 -74 -95 -103 -103 -78 -25 38 78 84 61 19 -33 -89 -124 -115 -65 11 84 142 176 189 173 126 59 0 -24 -6 34 81 139 212 277 291 239 150 64 -1 -50 -82 -81 -40 19 68 96 103 86 37 -40 -119 -172 -193 -190 -170 -136 -90 -48 -24 -28 -55 -98 -145 -184 -206 -206 -187 -153 -108 -61 -24 -7 -9 -20 -33 -46 -60 -69 -65 -46 -17 13 38 54 57 49 36 22 5 -16 -35 -46 -45 -36 -24 -13 -4 -3 -10 -23 -36 -44 -49 -52 -53 -46 -32 -14 2 12 17 16 10 2 -6 -11 -11 -7 0 9 18 27 32 33 28 21 14 9 6 5 6 9 12 15 16 15 13 9 6 3 0 -2 -2 -1 1 3 4 5 5 4 2 1 0 0 0 -1 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 +-102 -114 -137 -160 -169 -161 -154 -158 -168 -172 -167 -160 -157 -157 -154 -148 -133 -119 -113 -120 -131 -138 -132 -118 -102 -89 -77 -63 -48 -40 -51 -67 -76 -66 -44 -20 5 35 61 74 74 80 107 142 164 165 161 164 172 179 182 185 194 201 202 197 182 157 127 101 90 89 91 92 99 115 120 94 34 -32 -78 -101 -110 -110 -83 -26 42 86 93 68 22 -36 -99 -139 -130 -73 13 98 165 206 223 205 151 71 1 -28 -7 42 101 175 269 354 375 311 197 85 -1 -67 -111 -110 -55 27 97 138 149 126 55 -59 -178 -261 -295 -294 -267 -215 -145 -77 -39 -46 -92 -167 -251 -324 -368 -373 -344 -284 -203 -116 -46 -12 -17 -39 -67 -97 -129 -151 -143 -103 -39 32 92 132 142 126 95 59 14 -43 -97 -129 -129 -106 -73 -38 -12 -8 -33 -79 -124 -158 -181 -198 -205 -184 -130 -57 11 58 80 77 53 13 -29 -56 -58 -35 5 57 118 182 229 242 216 168 116 77 54 47 61 92 135 174 198 197 174 138 99 56 12 -20 -25 -2 36 75 119 156 171 143 88 43 25 17 4 -4 22 79 135 158 151 136 121 105 76 45 22 17 33 62 88 97 90 69 40 4 -33 -67 -97 -128 -156 -166 -149 -118 -101 -112 -147 -185 -214 -232 -242 -248 -253 -246 -226 -196 -165 -150 -150 -155 -152 -141 -124 -110 -95 -78 -53 -34 -27 -33 -35 -25 -11 -8 -11 -10 0 13 21 27 42 64 79 80 72 66 69 82 96 105 107 105 108 116 124 126 122 117 116 116 111 101 94 95 97 84 53 20 7 12 12 0 -14 -19 -22 -44 -81 -100 -85 -55 -39 -43 -42 -24 -6 -8 -27 -42 -38 -22 -5 2 0 -3 1 16 28 34 28 19 10 -3 -16 -21 -8 16 35 46 53 66 79 82 75 72 79 90 96 95 97 102 102 90 65 38 11 -14 -35 -50 -57 -60 -67 -77 -85 -94 -106 -118 -128 -132 -130 -125 -115 -94 -60 -26 -2 7 8 12 22 31 42 58 +0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -2 -2 -2 -3 -3 -3 -3 -3 -4 -4 -5 -4 -4 -4 -4 -3 -3 -3 -3 -5 -5 -5 -4 -2 0 2 5 6 7 8 11 15 19 20 20 21 23 25 27 28 31 33 34 35 33 29 24 20 18 19 20 21 23 27 29 24 8 -9 -22 -29 -32 -33 -26 -9 13 28 31 23 7 -13 -37 -52 -50 -29 5 39 67 86 94 88 66 31 0 -14 -4 20 49 86 135 181 194 164 105 46 -1 -38 -63 -64 -33 15 57 83 91 78 34 -38 -115 -170 -195 -196 -180 -147 -100 -54 -28 -33 -66 -121 -184 -239 -274 -281 -261 -218 -157 -91 -37 -10 -14 -32 -55 -80 -107 -125 -120 -87 -33 27 78 113 123 109 83 52 12 -39 -88 -117 -117 -97 -67 -35 -12 -8 -31 -74 -117 -150 -172 -189 -196 -177 -125 -55 10 56 77 75 51 12 -29 -56 -58 -35 4 56 117 180 227 240 215 167 115 76 53 46 60 92 135 173 197 196 173 137 98 55 11 -20 -25 -2 35 74 117 153 168 140 86 42 24 16 3 -4 21 75 129 150 143 128 114 98 71 41 20 15 30 56 80 87 81 61 35 3 -30 -59 -85 -111 -135 -143 -127 -100 -85 -94 -122 -153 -175 -188 -195 -198 -201 -193 -176 -152 -127 -114 -113 -116 -113 -103 -90 -79 -68 -55 -37 -24 -19 -23 -24 -17 -8 -6 -7 -7 0 7 12 16 24 37 45 45 40 36 37 43 50 54 54 52 53 56 59 59 56 53 52 51 48 43 39 38 39 33 20 7 2 4 4 0 -5 -7 -8 -15 -26 -31 -26 -16 -12 -12 -12 -7 -2 -2 -7 -10 -9 -5 -2 0 0 -1 0 2 4 5 4 3 1 -1 -3 -3 -2 2 4 5 5 7 8 8 6 6 6 7 7 6 6 6 6 5 3 1 0 -1 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -1 -1 -1 -1 -1 0 0 0 0 0 0 0 +-129 -106 -73 -38 -12 -8 -33 -79 -124 -158 -181 -198 -205 -184 -130 -57 11 58 80 77 53 13 -29 -56 -58 -35 5 57 118 182 229 242 216 168 116 77 54 47 61 92 135 174 198 197 174 138 99 56 12 -20 -25 -2 36 75 119 156 171 143 88 43 25 17 4 -4 22 79 135 158 151 136 121 105 76 45 22 17 33 62 88 97 90 69 40 4 -33 -67 -97 -128 -156 -166 -149 -118 -101 -112 -147 -185 -214 -232 -242 -248 -253 -246 -226 -196 -165 -150 -150 -155 -152 -141 -124 -110 -95 -78 -53 -34 -27 -33 -35 -25 -11 -8 -11 -10 0 13 21 27 42 64 79 80 72 66 69 82 96 105 107 105 108 116 124 126 122 117 116 116 111 101 94 95 97 84 53 20 7 12 12 0 -14 -19 -22 -44 -81 -100 -85 -55 -39 -43 -42 -24 -6 -8 -27 -42 -38 -22 -5 2 0 -3 1 16 28 34 28 19 10 -3 -16 -21 -8 16 35 46 53 66 79 82 75 72 79 90 96 95 97 102 102 90 65 38 11 -14 -35 -50 -57 -60 -67 -77 -85 -94 -106 -118 -128 -132 -130 -125 -115 -94 -60 -26 -2 7 8 12 22 31 42 58 86 122 154 174 184 190 192 187 178 174 178 183 180 175 171 171 162 141 112 85 53 9 -40 -80 -103 -117 -136 -162 -187 -203 -220 -244 -271 -292 -295 -286 -270 -257 -256 -266 -273 -263 -237 -212 -196 -186 -166 -133 -97 -63 -31 -2 22 39 58 79 98 109 113 119 126 130 131 136 149 163 164 148 122 97 83 78 80 80 82 92 110 122 117 96 67 35 6 -21 -38 -42 -36 -31 -34 -42 -55 -72 -93 -125 -155 -169 -158 -132 -112 -104 -94 -75 -52 -37 -27 -15 4 25 45 69 98 127 150 169 186 205 223 239 248 245 228 205 188 178 170 160 148 133 112 88 68 51 29 0 -23 -31 -29 -30 -42 -57 -71 -90 -123 -163 -195 -217 -234 -250 -265 -275 -282 -290 -291 -286 -276 -271 -268 -262 -251 -241 +0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -2 -3 -2 -1 0 1 1 1 1 0 -1 -2 -3 -2 0 2 5 9 12 14 13 11 8 5 4 3 5 8 13 17 21 22 20 16 12 7 1 -3 -4 -1 5 12 20 27 31 27 17 8 5 3 0 -1 5 19 33 40 39 36 33 29 22 13 6 5 10 20 29 33 31 24 14 1 -13 -26 -39 -52 -64 -70 -64 -52 -45 -51 -68 -86 -102 -112 -119 -124 -128 -126 -118 -104 -89 -82 -83 -87 -87 -81 -73 -65 -57 -48 -33 -22 -17 -21 -23 -17 -8 -6 -8 -7 0 9 14 19 30 46 57 59 53 49 52 62 74 81 83 83 86 93 100 102 100 96 96 97 93 85 80 81 84 73 46 17 6 10 10 0 -13 -18 -21 -41 -75 -93 -80 -52 -37 -41 -40 -23 -6 -8 -26 -41 -37 -22 -5 1 0 -3 0 15 27 33 27 18 9 -3 -16 -21 -8 15 34 45 52 65 78 82 75 71 78 89 95 94 96 101 101 89 64 37 10 -14 -35 -50 -57 -59 -66 -76 -83 -92 -103 -115 -124 -127 -125 -120 -110 -90 -57 -25 -2 6 7 11 20 28 38 52 77 109 137 154 162 166 167 162 153 148 151 154 151 145 141 140 132 114 89 67 41 7 -32 -62 -79 -89 -103 -121 -138 -149 -160 -175 -193 -206 -206 -197 -184 -173 -171 -176 -178 -170 -151 -134 -122 -114 -101 -80 -58 -37 -18 -2 12 21 31 42 51 56 57 59 62 63 62 64 69 74 73 65 52 41 34 31 32 31 31 34 40 44 41 33 22 11 1 -7 -12 -13 -11 -10 -10 -12 -15 -19 -24 -32 -38 -40 -37 -30 -25 -22 -20 -15 -10 -7 -5 -3 0 4 6 10 14 17 19 21 22 24 25 25 25 23 21 18 15 14 12 11 10 8 6 4 3 2 1 0 -1 -2 -1 -1 -2 -2 -2 -2 -3 -3 -3 -3 -3 -3 -3 -2 -2 -2 -1 -1 -1 -1 -1 -1 -1 0 +-14 -19 -22 -44 -81 -100 -85 -55 -39 -43 -42 -24 -6 -8 -27 -42 -38 -22 -5 2 0 -3 1 16 28 34 28 19 10 -3 -16 -21 -8 16 35 46 53 66 79 82 75 72 79 90 96 95 97 102 102 90 65 38 11 -14 -35 -50 -57 -60 -67 -77 -85 -94 -106 -118 -128 -132 -130 -125 -115 -94 -60 -26 -2 7 8 12 22 31 42 58 86 122 154 174 184 190 192 187 178 174 178 183 180 175 171 171 162 141 112 85 53 9 -40 -80 -103 -117 -136 -162 -187 -203 -220 -244 -271 -292 -295 -286 -270 -257 -256 -266 -273 -263 -237 -212 -196 -186 -166 -133 -97 -63 -31 -2 22 39 58 79 98 109 113 119 126 130 131 136 149 163 164 148 122 97 83 78 80 80 82 92 110 122 117 96 67 35 6 -21 -38 -42 -36 -31 -34 -42 -55 -72 -93 -125 -155 -169 -158 -132 -112 -104 -94 -75 -52 -37 -27 -15 4 25 45 69 98 127 150 169 186 205 223 239 248 245 228 205 188 178 170 160 148 133 112 88 68 51 29 0 -23 -31 -29 -30 -42 -57 -71 -90 -123 -163 -195 -217 -234 -250 -265 -275 -282 -290 -291 -286 -276 -271 -268 -262 -251 -241 -238 -230 -208 -171 -133 -97 -62 -24 10 37 58 80 102 119 134 149 171 198 224 244 250 248 242 231 220 205 188 168 145 123 109 101 94 77 53 27 3 -21 -44 -63 -71 -72 -74 -78 -83 -87 -96 -114 -139 -162 -178 -186 -186 -178 -164 -147 -133 -119 -102 -80 -57 -40 -26 -6 26 65 100 119 125 130 135 134 129 127 131 134 137 141 152 167 173 171 165 161 156 154 161 178 192 192 183 177 175 169 155 138 125 113 97 83 80 80 69 43 9 -18 -28 -31 -33 -42 -57 -72 -79 -83 -87 -95 -108 -119 -123 -117 -105 -97 -98 -106 -109 -103 -93 -87 -85 -84 -85 -92 -100 -104 -103 -102 -106 -110 -114 -119 -125 -128 -119 -104 -92 -86 -82 -69 -51 -37 -31 -27 -21 -7 14 37 51 50 41 36 +0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 0 -1 0 0 1 1 1 0 0 -1 -1 -2 -1 1 2 3 4 5 7 7 7 7 8 10 11 11 12 13 14 12 9 5 1 -3 -7 -9 -11 -12 -14 -16 -18 -21 -24 -27 -31 -32 -33 -32 -31 -26 -17 -8 -1 2 2 3 7 10 14 19 30 43 56 64 69 73 75 75 73 72 75 79 79 78 78 79 76 67 54 42 26 4 -21 -43 -56 -64 -75 -91 -106 -117 -129 -144 -162 -177 -181 -178 -170 -164 -165 -174 -180 -176 -160 -145 -135 -130 -117 -95 -70 -46 -23 -2 16 29 43 60 75 84 88 94 100 104 106 110 122 134 136 124 103 82 71 67 69 69 71 81 97 108 105 86 60 31 5 -20 -36 -39 -34 -30 -32 -40 -53 -69 -89 -120 -149 -163 -153 -128 -109 -102 -92 -74 -52 -37 -27 -15 3 24 44 68 97 126 149 168 185 204 222 238 247 245 228 204 187 177 169 159 147 132 111 87 67 50 28 0 -23 -31 -29 -30 -42 -56 -70 -88 -120 -158 -188 -209 -225 -239 -252 -261 -267 -273 -273 -267 -257 -251 -247 -240 -229 -219 -215 -207 -186 -152 -118 -86 -55 -21 8 31 49 67 85 99 110 122 139 160 179 194 197 194 188 178 168 155 141 125 106 89 78 72 66 54 36 18 2 -15 -30 -42 -47 -47 -48 -49 -52 -54 -59 -69 -82 -95 -103 -106 -104 -99 -90 -79 -71 -62 -53 -41 -29 -20 -13 -3 12 29 44 52 54 55 56 54 51 50 50 50 50 51 54 58 59 57 53 51 48 46 47 51 54 53 49 46 44 42 37 32 28 25 20 17 16 15 13 7 1 -4 -5 -5 -6 -7 -9 -10 -11 -11 -11 -12 -13 -13 -13 -12 -10 -9 -9 -9 -9 -8 -7 -6 -6 -5 -5 -5 -5 -5 -5 -4 -4 -4 -4 -4 -3 -3 -3 -2 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 0 0 0 0 0 0 +67 35 6 -21 -38 -42 -36 -31 -34 -42 -55 -72 -93 -125 -155 -169 -158 -132 -112 -104 -94 -75 -52 -37 -27 -15 4 25 45 69 98 127 150 169 186 205 223 239 248 245 228 205 188 178 170 160 148 133 112 88 68 51 29 0 -23 -31 -29 -30 -42 -57 -71 -90 -123 -163 -195 -217 -234 -250 -265 -275 -282 -290 -291 -286 -276 -271 -268 -262 -251 -241 -238 -230 -208 -171 -133 -97 -62 -24 10 37 58 80 102 119 134 149 171 198 224 244 250 248 242 231 220 205 188 168 145 123 109 101 94 77 53 27 3 -21 -44 -63 -71 -72 -74 -78 -83 -87 -96 -114 -139 -162 -178 -186 -186 -178 -164 -147 -133 -119 -102 -80 -57 -40 -26 -6 26 65 100 119 125 130 135 134 129 127 131 134 137 141 152 167 173 171 165 161 156 154 161 178 192 192 183 177 175 169 155 138 125 113 97 83 80 80 69 43 9 -18 -28 -31 -33 -42 -57 -72 -79 -83 -87 -95 -108 -119 -123 -117 -105 -97 -98 -106 -109 -103 -93 -87 -85 -84 -85 -92 -100 -104 -103 -102 -106 -110 -114 -119 -125 -128 -119 -104 -92 -86 -82 -69 -51 -37 -31 -27 -21 -7 14 37 51 50 41 36 37 32 8 -21 -37 -33 -29 -42 -65 -83 -89 -98 -124 -158 -184 -197 -205 -212 -213 -205 -191 -177 -166 -149 -133 -116 -96 -69 -39 -12 9 31 61 95 127 153 172 180 180 175 173 179 193 211 220 219 216 218 231 247 257 261 260 260 263 275 291 300 291 268 244 230 221 206 185 165 153 148 134 109 77 52 39 31 15 -2 -13 -19 -26 -44 -61 -67 -67 -77 -95 -111 -121 -133 -152 -171 -176 -174 -177 -190 -208 -220 -227 -228 -226 -227 -242 -264 -277 -268 -243 -223 -208 -191 -161 -128 -97 -78 -63 -46 -24 5 33 59 79 94 102 104 102 101 98 86 59 30 7 -8 -24 -43 -58 -72 -96 -134 -169 -186 -188 -193 -208 -223 -230 -227 -222 -210 -193 -180 -180 -190 -190 -177 -160 -143 -120 -81 -37 -8 2 11 +0 0 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -3 -3 -3 -3 -3 -3 -3 -3 -2 -2 -1 -1 0 1 2 3 5 7 9 11 13 15 17 20 21 22 22 21 20 19 19 19 18 17 15 12 10 7 4 0 -4 -6 -6 -6 -9 -12 -15 -20 -28 -38 -46 -53 -59 -64 -70 -75 -79 -83 -85 -86 -85 -85 -86 -86 -84 -83 -84 -83 -76 -64 -51 -38 -25 -10 4 15 24 34 44 53 61 69 80 95 109 121 125 126 125 121 117 111 103 93 82 70 63 59 56 46 32 16 1 -14 -29 -42 -47 -48 -50 -54 -58 -61 -68 -81 -100 -118 -131 -138 -139 -134 -125 -113 -103 -93 -80 -64 -46 -33 -22 -5 21 53 83 99 105 110 115 115 111 110 114 118 121 125 136 150 156 155 151 148 144 142 150 166 180 181 173 168 166 161 148 132 120 109 94 80 78 78 67 42 8 -18 -28 -31 -33 -42 -57 -72 -79 -83 -87 -95 -108 -119 -123 -117 -105 -97 -98 -106 -109 -103 -93 -87 -85 -84 -85 -92 -100 -103 -102 -101 -105 -108 -112 -117 -122 -125 -116 -101 -89 -83 -79 -66 -49 -36 -30 -26 -20 -7 12 34 46 45 37 32 33 28 7 -19 -33 -29 -26 -37 -57 -72 -76 -83 -105 -132 -153 -162 -168 -172 -172 -164 -152 -139 -130 -115 -102 -88 -73 -52 -29 -9 6 22 43 66 88 105 117 121 119 115 112 115 122 132 136 134 130 130 136 143 147 147 145 143 142 147 153 155 148 135 121 112 106 97 85 75 68 65 58 46 32 21 15 12 5 -1 -5 -7 -10 -16 -21 -23 -22 -25 -30 -34 -37 -39 -44 -48 -48 -46 -46 -48 -51 -52 -52 -51 -49 -48 -50 -52 -53 -50 -44 -39 -35 -31 -25 -20 -14 -11 -9 -6 -3 0 3 6 8 9 9 9 8 8 7 6 3 1 0 -1 -2 -3 -3 -4 -4 -5 -6 -6 -6 -5 -5 -5 -5 -4 -4 -3 -3 -2 -2 -2 -2 -1 -1 -1 -1 -1 -1 -1 0 0 +173 171 165 161 156 154 161 178 192 192 183 177 175 169 155 138 125 113 97 83 80 80 69 43 9 -18 -28 -31 -33 -42 -57 -72 -79 -83 -87 -95 -108 -119 -123 -117 -105 -97 -98 -106 -109 -103 -93 -87 -85 -84 -85 -92 -100 -104 -103 -102 -106 -110 -114 -119 -125 -128 -119 -104 -92 -86 -82 -69 -51 -37 -31 -27 -21 -7 14 37 51 50 41 36 37 32 8 -21 -37 -33 -29 -42 -65 -83 -89 -98 -124 -158 -184 -197 -205 -212 -213 -205 -191 -177 -166 -149 -133 -116 -96 -69 -39 -12 9 31 61 95 127 153 172 180 180 175 173 179 193 211 220 219 216 218 231 247 257 261 260 260 263 275 291 300 291 268 244 230 221 206 185 165 153 148 134 109 77 52 39 31 15 -2 -13 -19 -26 -44 -61 -67 -67 -77 -95 -111 -121 -133 -152 -171 -176 -174 -177 -190 -208 -220 -227 -228 -226 -227 -242 -264 -277 -268 -243 -223 -208 -191 -161 -128 -97 -78 -63 -46 -24 5 33 59 79 94 102 104 102 101 98 86 59 30 7 -8 -24 -43 -58 -72 -96 -134 -169 -186 -188 -193 -208 -223 -230 -227 -222 -210 -193 -180 -180 -190 -190 -177 -160 -143 -120 -81 -37 -8 2 11 30 51 65 72 86 114 149 179 201 218 231 239 240 237 243 260 283 302 308 304 296 286 269 242 208 179 159 147 134 114 90 69 52 34 9 -18 -37 -45 -50 -59 -71 -76 -71 -59 -46 -39 -40 -45 -47 -41 -28 -12 5 24 40 51 58 67 80 95 113 129 137 131 118 110 115 123 120 110 101 99 95 87 78 67 53 28 -5 -34 -55 -71 -91 -118 -145 -165 -180 -196 -220 -249 -275 -293 -310 -327 -342 -344 -332 -311 -285 -264 -250 -235 -211 -175 -131 -88 -46 -4 33 60 79 98 126 153 170 173 170 166 162 155 146 132 111 91 74 59 42 29 27 29 24 3 -18 -25 -18 -12 -13 -17 -19 -21 -25 -30 -35 -41 -47 -49 -44 -34 -29 -26 -19 -12 -12 -20 -24 -13 4 18 19 17 +0 0 0 0 0 0 0 0 0 1 1 1 1 1 2 2 2 2 2 1 2 2 2 1 0 -1 -2 -2 -2 -3 -4 -5 -6 -6 -7 -8 -9 -11 -11 -11 -11 -10 -11 -12 -13 -13 -12 -12 -12 -13 -13 -15 -17 -18 -18 -19 -20 -21 -23 -25 -27 -28 -27 -24 -22 -21 -21 -18 -14 -10 -9 -8 -7 -3 4 11 16 16 13 12 12 11 2 -8 -15 -13 -12 -17 -27 -35 -38 -43 -55 -71 -85 -92 -97 -102 -105 -102 -97 -91 -87 -79 -72 -64 -53 -39 -23 -7 5 18 36 57 77 94 108 114 115 113 113 119 129 143 151 152 151 154 165 178 187 192 193 195 199 210 224 233 228 211 194 184 178 168 152 136 127 124 113 92 65 44 33 27 13 -2 -12 -17 -24 -40 -56 -62 -62 -71 -88 -104 -113 -125 -143 -162 -167 -166 -169 -182 -200 -212 -220 -221 -220 -222 -237 -259 -272 -264 -240 -221 -206 -190 -160 -128 -97 -78 -63 -46 -24 4 32 58 78 94 102 103 101 100 97 85 58 29 6 -8 -24 -43 -58 -72 -95 -133 -167 -183 -185 -189 -203 -217 -223 -220 -214 -202 -185 -172 -172 -180 -180 -167 -150 -134 -112 -75 -35 -8 1 9 27 45 58 63 75 100 130 155 173 186 196 202 201 197 201 213 230 244 247 242 234 224 209 186 159 135 119 109 98 83 65 49 36 23 6 -13 -26 -31 -34 -39 -47 -49 -46 -38 -29 -24 -25 -27 -28 -24 -17 -7 2 13 21 27 30 34 40 47 56 62 65 61 54 50 51 54 52 46 42 40 38 34 30 25 19 10 -2 -12 -19 -24 -30 -38 -46 -51 -54 -57 -63 -69 -75 -77 -80 -82 -83 -81 -76 -70 -62 -56 -51 -47 -41 -33 -24 -16 -8 -1 5 8 11 13 16 19 20 20 19 17 16 15 13 11 9 7 5 4 2 1 1 1 1 0 -1 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 0 0 0 +-61 -67 -67 -77 -95 -111 -121 -133 -152 -171 -176 -174 -177 -190 -208 -220 -227 -228 -226 -227 -242 -264 -277 -268 -243 -223 -208 -191 -161 -128 -97 -78 -63 -46 -24 5 33 59 79 94 102 104 102 101 98 86 59 30 7 -8 -24 -43 -58 -72 -96 -134 -169 -186 -188 -193 -208 -223 -230 -227 -222 -210 -193 -180 -180 -190 -190 -177 -160 -143 -120 -81 -37 -8 2 11 30 51 65 72 86 114 149 179 201 218 231 239 240 237 243 260 283 302 308 304 296 286 269 242 208 179 159 147 134 114 90 69 52 34 9 -18 -37 -45 -50 -59 -71 -76 -71 -59 -46 -39 -40 -45 -47 -41 -28 -12 5 24 40 51 58 67 80 95 113 129 137 131 118 110 115 123 120 110 101 99 95 87 78 67 53 28 -5 -34 -55 -71 -91 -118 -145 -165 -180 -196 -220 -249 -275 -293 -310 -327 -342 -344 -332 -311 -285 -264 -250 -235 -211 -175 -131 -88 -46 -4 33 60 79 98 126 153 170 173 170 166 162 155 146 132 111 91 74 59 42 29 27 29 24 3 -18 -25 -18 -12 -13 -17 -19 -21 -25 -30 -35 -41 -47 -49 -44 -34 -29 -26 -19 -12 -12 -20 -24 -13 4 18 19 17 18 25 29 29 28 39 58 74 77 78 88 109 124 126 123 129 142 154 156 147 137 126 115 103 89 72 59 52 51 49 41 27 10 -10 -29 -44 -52 -57 -62 -62 -56 -50 -52 -61 -66 -61 -52 -45 -34 -14 5 6 -8 -16 -6 14 29 39 52 74 96 109 110 112 117 123 121 112 106 109 117 116 92 55 17 -11 -36 -66 -97 -131 -165 -199 -225 -244 -259 -276 -289 -290 -283 -275 -273 -268 -253 -226 -194 -166 -144 -127 -110 -90 -67 -45 -25 -6 6 11 12 15 24 28 23 19 29 51 71 79 77 78 80 78 63 44 27 12 -5 -23 -33 -33 -27 -28 -41 -55 -61 -58 -56 -58 -61 -56 -44 -29 -14 -5 -7 -12 -10 1 12 20 32 54 77 89 95 114 151 182 182 165 152 164 +0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -2 -3 -3 -4 -4 -5 -5 -6 -7 -8 -9 -10 -9 -9 -9 -9 -8 -7 -6 -5 -5 -4 -2 0 2 4 7 8 9 10 10 11 11 10 7 3 0 -2 -4 -7 -10 -12 -17 -24 -32 -36 -37 -40 -44 -49 -52 -52 -53 -51 -49 -47 -48 -52 -53 -51 -47 -43 -37 -26 -12 -3 0 3 10 18 23 26 32 44 58 72 82 91 98 103 105 106 110 120 133 145 150 150 149 146 139 127 111 97 87 82 75 65 52 40 31 20 5 -12 -24 -29 -33 -39 -47 -51 -48 -41 -32 -28 -29 -32 -34 -30 -21 -9 3 18 30 38 44 52 62 75 90 103 110 106 96 91 95 103 101 93 86 85 82 75 68 59 47 24 -5 -31 -50 -65 -84 -109 -135 -154 -168 -184 -207 -235 -261 -279 -296 -314 -329 -332 -321 -302 -277 -258 -245 -231 -208 -173 -130 -87 -46 -4 32 59 78 97 125 152 169 172 169 165 161 155 146 131 110 90 73 58 41 28 26 28 23 2 -18 -25 -18 -12 -13 -17 -19 -21 -25 -30 -34 -40 -46 -48 -43 -33 -28 -25 -18 -12 -12 -19 -23 -13 3 16 17 15 16 22 25 25 24 34 50 64 66 66 74 92 104 105 101 106 115 124 125 117 108 98 89 79 68 54 44 38 37 35 29 19 7 -8 -21 -31 -36 -39 -42 -41 -37 -33 -34 -39 -41 -38 -32 -27 -21 -9 2 3 -5 -9 -4 7 15 20 26 37 47 53 52 52 54 56 54 49 45 46 48 47 37 21 6 -5 -14 -25 -35 -46 -57 -67 -74 -78 -81 -85 -87 -85 -81 -77 -74 -71 -65 -57 -47 -40 -33 -29 -24 -19 -14 -9 -5 -2 1 1 1 2 3 4 3 2 3 6 8 9 8 8 8 7 5 3 2 0 -1 -2 -3 -3 -2 -2 -3 -3 -3 -3 -3 -3 -3 -2 -2 -1 -1 -1 -1 -1 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +-55 -71 -91 -118 -145 -165 -180 -196 -220 -249 -275 -293 -310 -327 -342 -344 -332 -311 -285 -264 -250 -235 -211 -175 -131 -88 -46 -4 33 60 79 98 126 153 170 173 170 166 162 155 146 132 111 91 74 59 42 29 27 29 24 3 -18 -25 -18 -12 -13 -17 -19 -21 -25 -30 -35 -41 -47 -49 -44 -34 -29 -26 -19 -12 -12 -20 -24 -13 4 18 19 17 18 25 29 29 28 39 58 74 77 78 88 109 124 126 123 129 142 154 156 147 137 126 115 103 89 72 59 52 51 49 41 27 10 -10 -29 -44 -52 -57 -62 -62 -56 -50 -52 -61 -66 -61 -52 -45 -34 -14 5 6 -8 -16 -6 14 29 39 52 74 96 109 110 112 117 123 121 112 106 109 117 116 92 55 17 -11 -36 -66 -97 -131 -165 -199 -225 -244 -259 -276 -289 -290 -283 -275 -273 -268 -253 -226 -194 -166 -144 -127 -110 -90 -67 -45 -25 -6 6 11 12 15 24 28 23 19 29 51 71 79 77 78 80 78 63 44 27 12 -5 -23 -33 -33 -27 -28 -41 -55 -61 -58 -56 -58 -61 -56 -44 -29 -14 -5 -7 -12 -10 1 12 20 32 54 77 89 95 114 151 182 182 165 152 164 188 208 215 205 188 174 170 165 150 119 90 70 61 53 41 29 19 12 3 -7 -20 -31 -38 -40 -40 -38 -29 -12 4 5 -13 -38 -60 -77 -88 -96 -99 -102 -106 -113 -117 -120 -120 -117 -112 -107 -103 -89 -63 -37 -25 -24 -22 -9 4 6 1 6 25 44 49 42 37 38 39 34 30 32 41 45 43 40 36 31 24 10 -9 -31 -50 -60 -60 -53 -46 -40 -40 -44 -49 -50 -42 -27 -10 11 31 52 65 72 79 93 113 131 138 132 116 100 94 98 98 85 64 48 45 48 50 53 59 63 58 50 49 59 70 69 59 54 56 59 50 33 19 17 17 13 -2 -13 -13 0 7 1 -12 -22 -29 -37 -41 -41 -42 -54 -80 -107 -123 -130 -138 -147 -152 -155 -159 -167 -177 -190 -200 -204 +0 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -3 -3 -4 -5 -6 -6 -6 -6 -7 -7 -7 -7 -6 -5 -4 -2 -1 1 3 4 5 8 10 12 13 13 13 14 14 14 13 11 10 8 7 5 3 3 4 3 0 -3 -5 -4 -3 -3 -4 -4 -5 -6 -7 -8 -10 -12 -12 -11 -9 -8 -8 -6 -4 -4 -6 -8 -5 1 5 6 5 6 8 10 10 10 15 22 29 31 32 37 47 54 56 56 59 67 73 76 72 69 64 59 54 47 39 32 29 28 28 23 15 5 -7 -18 -28 -33 -37 -40 -41 -37 -34 -36 -42 -46 -43 -37 -32 -25 -11 3 4 -6 -13 -5 10 22 30 40 58 76 87 89 91 96 101 100 94 89 92 100 99 79 47 14 -10 -32 -59 -88 -119 -150 -182 -207 -225 -240 -257 -270 -272 -267 -260 -259 -255 -242 -217 -187 -160 -140 -124 -107 -88 -66 -45 -25 -6 5 10 11 14 23 27 22 18 28 50 70 78 76 77 79 78 63 43 26 11 -5 -23 -33 -33 -27 -28 -41 -55 -61 -58 -56 -58 -60 -55 -44 -29 -14 -5 -7 -12 -10 0 11 19 30 51 72 83 88 106 140 168 167 151 138 148 169 186 191 181 165 152 148 143 129 101 76 59 51 44 33 23 15 9 2 -6 -16 -25 -30 -31 -31 -29 -22 -9 2 3 -10 -28 -43 -55 -62 -67 -68 -69 -71 -75 -77 -78 -77 -74 -70 -66 -63 -54 -38 -22 -15 -14 -13 -5 2 3 0 3 12 22 24 20 17 17 18 15 13 14 17 19 17 16 14 12 9 3 -4 -12 -18 -21 -21 -18 -16 -13 -13 -14 -15 -15 -12 -8 -3 2 7 12 15 16 18 20 24 27 27 25 22 18 16 16 16 13 9 7 6 6 6 6 7 7 6 5 5 5 6 6 4 4 4 4 3 2 1 0 0 0 -1 -1 -1 0 0 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +-165 -199 -225 -244 -259 -276 -289 -290 -283 -275 -273 -268 -253 -226 -194 -166 -144 -127 -110 -90 -67 -45 -25 -6 6 11 12 15 24 28 23 19 29 51 71 79 77 78 80 78 63 44 27 12 -5 -23 -33 -33 -27 -28 -41 -55 -61 -58 -56 -58 -61 -56 -44 -29 -14 -5 -7 -12 -10 1 12 20 32 54 77 89 95 114 151 182 182 165 152 164 188 208 215 205 188 174 170 165 150 119 90 70 61 53 41 29 19 12 3 -7 -20 -31 -38 -40 -40 -38 -29 -12 4 5 -13 -38 -60 -77 -88 -96 -99 -102 -106 -113 -117 -120 -120 -117 -112 -107 -103 -89 -63 -37 -25 -24 -22 -9 4 6 1 6 25 44 49 42 37 38 39 34 30 32 41 45 43 40 36 31 24 10 -9 -31 -50 -60 -60 -53 -46 -40 -40 -44 -49 -50 -42 -27 -10 11 31 52 65 72 79 93 113 131 138 132 116 100 94 98 98 85 64 48 45 48 50 53 59 63 58 50 49 59 70 69 59 54 56 59 50 33 19 17 17 13 -2 -13 -13 0 7 1 -12 -22 -29 -37 -41 -41 -42 -54 -80 -107 -123 -130 -138 -147 -152 -155 -159 -167 -177 -190 -200 -204 -191 -163 -126 -92 -66 -53 -54 -60 -56 -37 -21 -25 -48 -71 -87 -99 -104 -96 -79 -68 -67 -62 -34 6 34 51 72 105 131 141 145 157 167 160 139 118 105 86 60 36 18 -2 -35 -75 -95 -86 -67 -57 -61 -61 -51 -34 -30 -35 -34 -13 14 24 12 -2 -10 -16 -31 -43 -34 -4 18 18 3 -1 11 25 30 35 45 64 86 101 105 97 78 55 39 27 19 18 24 33 38 38 40 52 71 87 84 61 31 21 46 85 98 72 32 5 -9 -24 -50 -70 -75 -77 -86 -92 -85 -71 -68 -78 -83 -72 -51 -32 -15 8 42 75 101 118 136 156 171 172 154 122 92 66 38 -1 -39 -58 -65 -72 -91 -116 -132 -130 -115 -96 -83 -82 -92 -96 -84 -61 -43 -37 -36 -36 -35 -36 -35 -30 +0 -1 -1 -1 -1 -1 -1 -1 -2 -2 -2 -3 -3 -3 -3 -3 -3 -3 -3 -3 -2 -2 -1 -1 0 0 0 0 1 1 1 1 1 3 5 5 6 6 7 7 6 4 2 1 -1 -3 -5 -5 -4 -5 -7 -9 -10 -10 -10 -11 -12 -11 -9 -6 -3 -2 -2 -3 -3 0 2 5 8 14 21 25 27 33 46 56 58 53 50 56 65 74 78 76 71 67 67 66 61 49 38 30 26 23 18 13 8 5 1 -4 -11 -16 -20 -22 -22 -21 -16 -7 2 2 -8 -23 -36 -47 -54 -60 -63 -65 -69 -74 -78 -80 -81 -80 -78 -75 -73 -64 -46 -27 -19 -18 -17 -7 3 4 0 4 19 34 39 33 29 30 32 28 25 26 34 38 36 34 31 27 21 8 -8 -28 -45 -55 -55 -49 -43 -37 -37 -41 -46 -47 -40 -26 -10 10 29 49 62 69 76 90 109 127 134 129 113 98 92 96 96 84 63 47 44 47 49 52 58 62 57 49 48 59 70 68 58 53 55 58 49 32 18 16 16 12 -2 -13 -13 0 6 0 -12 -22 -29 -36 -40 -40 -41 -52 -77 -103 -117 -124 -131 -139 -143 -145 -148 -155 -163 -174 -183 -185 -173 -147 -113 -82 -59 -47 -48 -53 -49 -32 -18 -22 -41 -60 -73 -82 -85 -78 -64 -55 -53 -49 -27 4 25 38 54 78 96 102 104 112 118 112 96 81 71 57 39 23 11 -2 -23 -48 -59 -53 -41 -35 -36 -36 -30 -20 -17 -20 -19 -7 7 12 6 -2 -5 -8 -15 -21 -16 -2 8 7 1 -1 4 10 12 13 17 24 31 36 37 33 26 18 12 8 5 5 7 9 10 10 10 13 18 21 20 14 7 4 9 17 19 14 6 0 -2 -5 -9 -12 -12 -12 -13 -13 -12 -10 -9 -10 -10 -8 -6 -4 -2 0 3 5 7 8 9 9 10 9 8 6 4 2 1 -1 -2 -2 -2 -2 -3 -3 -3 -3 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +-60 -53 -46 -40 -40 -44 -49 -50 -42 -27 -10 11 31 52 65 72 79 93 113 131 138 132 116 100 94 98 98 85 64 48 45 48 50 53 59 63 58 50 49 59 70 69 59 54 56 59 50 33 19 17 17 13 -2 -13 -13 0 7 1 -12 -22 -29 -37 -41 -41 -42 -54 -80 -107 -123 -130 -138 -147 -152 -155 -159 -167 -177 -190 -200 -204 -191 -163 -126 -92 -66 -53 -54 -60 -56 -37 -21 -25 -48 -71 -87 -99 -104 -96 -79 -68 -67 -62 -34 6 34 51 72 105 131 141 145 157 167 160 139 118 105 86 60 36 18 -2 -35 -75 -95 -86 -67 -57 -61 -61 -51 -34 -30 -35 -34 -13 14 24 12 -2 -10 -16 -31 -43 -34 -4 18 18 3 -1 11 25 30 35 45 64 86 101 105 97 78 55 39 27 19 18 24 33 38 38 40 52 71 87 84 61 31 21 46 85 98 72 32 5 -9 -24 -50 -70 -75 -77 -86 -92 -85 -71 -68 -78 -83 -72 -51 -32 -15 8 42 75 101 118 136 156 171 172 154 122 92 66 38 -1 -39 -58 -65 -72 -91 -116 -132 -130 -115 -96 -83 -82 -92 -96 -84 -61 -43 -37 -36 -36 -35 -36 -35 -30 -27 -29 -26 -10 10 27 35 37 44 54 68 74 72 68 80 104 121 119 107 109 123 120 91 54 36 34 27 16 13 12 -10 -53 -92 -104 -104 -112 -123 -121 -107 -103 -111 -115 -106 -95 -95 -92 -70 -32 -2 14 27 53 87 118 138 150 154 155 160 168 168 156 141 136 137 127 109 105 123 145 143 121 97 88 82 70 54 41 35 28 20 6 -20 -48 -67 -65 -53 -52 -67 -92 -118 -134 -134 -117 -101 -111 -142 -167 -167 -156 -163 -185 -202 -205 -205 -219 -231 -221 -195 -178 -180 -183 -167 -135 -109 -101 -99 -95 -88 -81 -74 -64 -53 -46 -43 -37 -23 -3 15 27 31 29 24 29 50 76 87 79 65 55 50 37 12 -13 -32 -44 -50 -52 -50 -47 -50 -55 -63 -72 -78 -72 -51 -24 -7 3 +0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 0 0 0 1 1 1 2 3 3 3 3 3 3 3 4 3 3 2 2 2 3 3 4 4 4 4 4 5 6 7 6 6 6 7 6 4 2 2 2 2 -1 -3 -3 0 1 0 -3 -5 -7 -8 -10 -10 -10 -14 -20 -28 -33 -36 -39 -42 -45 -47 -49 -53 -57 -63 -67 -70 -67 -59 -46 -35 -26 -21 -22 -25 -23 -16 -9 -11 -22 -32 -40 -46 -50 -47 -39 -34 -34 -32 -18 3 18 27 39 58 74 80 84 92 99 96 85 73 65 54 38 23 11 -2 -24 -52 -66 -60 -48 -41 -44 -45 -38 -26 -23 -27 -26 -10 10 18 9 -2 -8 -13 -26 -36 -28 -4 15 15 2 -1 9 21 26 30 39 56 76 90 94 87 70 50 35 24 17 16 22 30 35 35 37 49 67 83 80 58 29 20 44 82 95 70 31 4 -9 -24 -50 -70 -75 -77 -86 -92 -85 -71 -68 -78 -83 -72 -51 -32 -15 7 41 74 100 117 135 155 170 171 152 121 91 65 37 -1 -39 -57 -64 -71 -89 -113 -128 -126 -111 -93 -80 -79 -88 -91 -80 -58 -41 -35 -34 -34 -33 -33 -32 -28 -25 -27 -24 -9 8 23 30 32 37 46 57 62 60 56 66 85 98 96 85 86 97 94 70 41 27 25 20 11 9 8 -8 -38 -66 -74 -73 -78 -84 -82 -72 -68 -73 -74 -68 -60 -59 -57 -43 -20 -2 8 15 30 48 65 74 80 81 80 81 84 83 76 67 64 63 58 48 46 53 61 59 49 39 34 31 26 20 14 12 9 6 2 -7 -16 -21 -20 -16 -16 -19 -26 -32 -36 -35 -30 -25 -27 -33 -38 -37 -33 -34 -37 -39 -38 -37 -38 -39 -36 -31 -27 -26 -26 -23 -18 -14 -12 -12 -11 -10 -8 -7 -6 -5 -4 -4 -3 -2 -1 0 1 1 1 1 1 1 2 2 2 1 1 1 0 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +78 55 39 27 19 18 24 33 38 38 40 52 71 87 84 61 31 21 46 85 98 72 32 5 -9 -24 -50 -70 -75 -77 -86 -92 -85 -71 -68 -78 -83 -72 -51 -32 -15 8 42 75 101 118 136 156 171 172 154 122 92 66 38 -1 -39 -58 -65 -72 -91 -116 -132 -130 -115 -96 -83 -82 -92 -96 -84 -61 -43 -37 -36 -36 -35 -36 -35 -30 -27 -29 -26 -10 10 27 35 37 44 54 68 74 72 68 80 104 121 119 107 109 123 120 91 54 36 34 27 16 13 12 -10 -53 -92 -104 -104 -112 -123 -121 -107 -103 -111 -115 -106 -95 -95 -92 -70 -32 -2 14 27 53 87 118 138 150 154 155 160 168 168 156 141 136 137 127 109 105 123 145 143 121 97 88 82 70 54 41 35 28 20 6 -20 -48 -67 -65 -53 -52 -67 -92 -118 -134 -134 -117 -101 -111 -142 -167 -167 -156 -163 -185 -202 -205 -205 -219 -231 -221 -195 -178 -180 -183 -167 -135 -109 -101 -99 -95 -88 -81 -74 -64 -53 -46 -43 -37 -23 -3 15 27 31 29 24 29 50 76 87 79 65 55 50 37 12 -13 -32 -44 -50 -52 -50 -47 -50 -55 -63 -72 -78 -72 -51 -24 -7 3 22 60 104 147 183 218 251 275 290 305 323 334 333 330 330 327 306 267 235 227 232 217 171 122 96 94 94 78 50 25 8 -7 -29 -58 -94 -123 -139 -142 -150 -165 -179 -174 -157 -153 -168 -183 -172 -145 -133 -143 -156 -150 -127 -109 -109 -124 -142 -149 -140 -119 -102 -98 -103 -98 -77 -53 -37 -29 -14 7 22 20 3 -21 -42 -56 -66 -78 -99 -126 -145 -155 -161 -174 -184 -182 -170 -160 -151 -128 -80 -21 26 62 102 149 182 188 182 187 211 241 260 262 250 232 215 202 188 160 130 115 126 147 165 174 177 173 153 125 106 110 119 117 100 82 74 69 58 40 19 -2 -19 -28 -26 -14 -6 -9 -28 -51 -68 -71 -71 -73 -77 -76 -77 -89 -109 -117 -105 -82 -68 -59 -39 -3 29 38 33 35 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 1 2 2 0 0 -1 -1 -3 -4 -4 -5 -5 -6 -6 -5 -5 -6 -7 -7 -5 -3 -2 0 4 8 11 14 17 20 23 24 22 18 14 10 6 -1 -8 -12 -13 -15 -20 -26 -30 -30 -28 -24 -21 -21 -25 -26 -24 -18 -13 -12 -11 -12 -12 -12 -12 -11 -10 -11 -10 -4 3 10 13 14 18 22 28 32 31 30 36 48 57 57 52 54 61 61 47 28 19 18 14 8 7 6 -6 -32 -55 -63 -64 -70 -78 -77 -69 -68 -74 -77 -72 -65 -66 -64 -50 -23 -2 10 19 39 64 88 104 114 118 120 125 132 133 125 114 110 112 105 90 88 103 123 122 104 84 76 71 61 47 36 31 25 18 5 -19 -45 -62 -61 -50 -49 -63 -87 -112 -128 -128 -113 -98 -107 -138 -162 -163 -153 -160 -182 -199 -202 -202 -217 -229 -219 -194 -177 -179 -183 -167 -135 -109 -101 -99 -95 -88 -81 -74 -64 -53 -46 -43 -37 -23 -3 14 26 30 28 23 28 49 74 85 77 63 53 48 35 11 -13 -31 -43 -48 -50 -48 -45 -48 -52 -59 -68 -73 -67 -47 -22 -7 2 19 53 92 130 161 191 219 238 249 261 274 282 279 275 273 268 249 216 188 180 183 170 133 94 73 71 70 58 36 18 5 -6 -21 -41 -66 -85 -95 -96 -100 -109 -117 -112 -100 -97 -105 -113 -105 -87 -79 -84 -90 -85 -71 -61 -60 -67 -75 -78 -72 -60 -51 -48 -50 -47 -36 -25 -17 -13 -7 2 9 8 1 -9 -17 -22 -25 -29 -36 -44 -50 -52 -53 -56 -58 -56 -51 -47 -43 -36 -22 -6 6 15 24 35 41 41 39 39 42 47 49 48 44 39 35 32 29 23 18 15 16 18 20 20 19 18 15 12 9 9 10 9 7 5 5 4 3 2 1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -2 -2 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 0 0 0 +20 6 -20 -48 -67 -65 -53 -52 -67 -92 -118 -134 -134 -117 -101 -111 -142 -167 -167 -156 -163 -185 -202 -205 -205 -219 -231 -221 -195 -178 -180 -183 -167 -135 -109 -101 -99 -95 -88 -81 -74 -64 -53 -46 -43 -37 -23 -3 15 27 31 29 24 29 50 76 87 79 65 55 50 37 12 -13 -32 -44 -50 -52 -50 -47 -50 -55 -63 -72 -78 -72 -51 -24 -7 3 22 60 104 147 183 218 251 275 290 305 323 334 333 330 330 327 306 267 235 227 232 217 171 122 96 94 94 78 50 25 8 -7 -29 -58 -94 -123 -139 -142 -150 -165 -179 -174 -157 -153 -168 -183 -172 -145 -133 -143 -156 -150 -127 -109 -109 -124 -142 -149 -140 -119 -102 -98 -103 -98 -77 -53 -37 -29 -14 7 22 20 3 -21 -42 -56 -66 -78 -99 -126 -145 -155 -161 -174 -184 -182 -170 -160 -151 -128 -80 -21 26 62 102 149 182 188 182 187 211 241 260 262 250 232 215 202 188 160 130 115 126 147 165 174 177 173 153 125 106 110 119 117 100 82 74 69 58 40 19 -2 -19 -28 -26 -14 -6 -9 -28 -51 -68 -71 -71 -73 -77 -76 -77 -89 -109 -117 -105 -82 -68 -59 -39 -3 29 38 33 35 53 74 85 82 68 45 15 -16 -44 -60 -59 -40 -22 -33 -75 -114 -111 -68 -25 -21 -53 -79 -75 -51 -30 -19 -12 -8 -15 -39 -70 -88 -85 -70 -58 -54 -47 -34 -20 -14 -17 -16 -7 4 0 -18 -37 -41 -28 -14 -15 -30 -46 -50 -52 -67 -96 -116 -107 -78 -57 -57 -64 -50 -17 6 5 -11 -12 11 41 60 73 86 99 105 102 104 112 123 134 151 182 214 226 217 201 193 193 190 179 166 148 128 116 115 108 77 26 -16 -37 -57 -101 -153 -183 -182 -184 -213 -266 -312 -338 -347 -355 -376 -409 -444 -466 -470 -467 -466 -462 -446 -420 -402 -402 -400 -379 -332 -273 -212 -155 -100 -41 17 68 109 141 177 228 297 376 446 495 517 527 540 558 570 561 533 496 455 419 406 425 459 465 417 330 246 +0 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -2 -2 -2 -3 -4 -4 -4 -5 -6 -7 -7 -8 -9 -10 -11 -10 -10 -11 -11 -11 -10 -8 -8 -8 -9 -8 -8 -8 -7 -6 -6 -6 -5 -3 -1 2 3 4 4 3 4 8 13 16 15 12 11 10 7 2 -3 -8 -11 -13 -14 -14 -13 -14 -16 -19 -22 -24 -23 -17 -8 -3 1 7 21 37 54 69 84 99 110 118 127 137 144 146 148 150 151 144 128 114 112 116 111 88 64 51 51 51 43 28 14 4 -5 -18 -36 -58 -77 -88 -91 -97 -108 -118 -116 -106 -105 -116 -128 -121 -103 -96 -104 -114 -111 -95 -82 -83 -95 -110 -116 -110 -95 -82 -79 -84 -80 -64 -44 -31 -25 -12 5 18 17 2 -19 -37 -50 -59 -70 -89 -114 -132 -142 -148 -161 -171 -169 -159 -150 -142 -121 -76 -20 24 59 97 143 175 182 176 182 206 235 255 257 246 228 212 200 186 158 129 114 125 146 164 173 176 172 152 125 106 109 118 116 99 81 73 68 57 39 18 -2 -19 -28 -26 -14 -6 -9 -28 -50 -67 -69 -69 -71 -75 -73 -74 -85 -104 -111 -100 -78 -64 -56 -37 -3 26 34 30 31 47 66 75 72 60 39 13 -14 -38 -52 -51 -34 -19 -28 -63 -94 -91 -56 -21 -17 -42 -62 -59 -40 -23 -15 -10 -6 -12 -29 -51 -64 -61 -50 -41 -38 -32 -23 -14 -10 -12 -11 -5 2 0 -12 -23 -25 -17 -9 -9 -17 -26 -28 -29 -36 -51 -61 -55 -40 -29 -28 -31 -24 -8 2 2 -5 -6 4 17 24 29 33 38 39 37 37 39 42 45 50 59 68 70 66 59 56 54 52 48 43 37 31 28 27 24 17 5 -4 -8 -12 -20 -29 -33 -32 -31 -35 -42 -47 -49 -48 -48 -48 -51 -53 -53 -51 -48 -46 -44 -40 -36 -33 -31 -29 -26 -22 -17 -12 -9 -5 -2 0 2 3 4 5 6 7 8 9 9 8 7 6 6 5 4 3 2 1 1 1 0 0 0 0 0 0 +-145 -155 -161 -174 -184 -182 -170 -160 -151 -128 -80 -21 26 62 102 149 182 188 182 187 211 241 260 262 250 232 215 202 188 160 130 115 126 147 165 174 177 173 153 125 106 110 119 117 100 82 74 69 58 40 19 -2 -19 -28 -26 -14 -6 -9 -28 -51 -68 -71 -71 -73 -77 -76 -77 -89 -109 -117 -105 -82 -68 -59 -39 -3 29 38 33 35 53 74 85 82 68 45 15 -16 -44 -60 -59 -40 -22 -33 -75 -114 -111 -68 -25 -21 -53 -79 -75 -51 -30 -19 -12 -8 -15 -39 -70 -88 -85 -70 -58 -54 -47 -34 -20 -14 -17 -16 -7 4 0 -18 -37 -41 -28 -14 -15 -30 -46 -50 -52 -67 -96 -116 -107 -78 -57 -57 -64 -50 -17 6 5 -11 -12 11 41 60 73 86 99 105 102 104 112 123 134 151 182 214 226 217 201 193 193 190 179 166 148 128 116 115 108 77 26 -16 -37 -57 -101 -153 -183 -182 -184 -213 -266 -312 -338 -347 -355 -376 -409 -444 -466 -470 -467 -466 -462 -446 -420 -402 -402 -400 -379 -332 -273 -212 -155 -100 -41 17 68 109 141 177 228 297 376 446 495 517 527 540 558 570 561 533 496 455 419 406 425 459 465 417 330 246 195 169 146 121 111 141 207 281 326 339 332 325 330 369 466 631 838 1042 1189 1237 1166 997 786 588 427 307 245 245 255 179 -43 -381 -758 -1120 -1444 -1697 -1824 -1795 -1648 -1472 -1353 -1346 -1473 -1705 -1974 -2197 -2324 -2327 -2201 -1968 -1694 -1471 -1367 -1394 -1520 -1694 -1859 -1944 -1888 -1672 -1323 -887 -423 -2 317 505 601 676 790 965 1194 1466 1772 2092 2397 2650 2809 2828 2681 2413 2118 1889 1759 1707 1700 1719 1737 1724 1660 1552 1407 1216 959 655 374 196 167 277 455 604 637 539 357 162 4 -96 -128 -86 51 326 748 1243 1625 1710 1457 981 468 49 -204 -264 -154 22 121 54 -181 -540 -977 -1445 -1868 -2171 -2312 -2282 -2082 -1757 -1433 -1275 -1366 -1649 -1992 -2306 -2560 -2727 -2781 -2736 -2668 -2629 -2607 -2566 -2533 -2567 -2663 -2720 -2642 -2415 -2090 -1675 -1151 -543 48 544 +0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 0 1 2 3 3 3 4 5 6 8 8 9 9 9 9 9 8 7 6 8 9 11 13 14 14 13 11 10 11 12 13 11 10 9 9 8 5 2 -1 -4 -5 -5 -3 -2 -2 -6 -11 -15 -16 -16 -17 -19 -19 -20 -23 -29 -32 -30 -24 -20 -18 -12 -1 9 12 11 11 18 26 30 30 25 17 5 -7 -19 -26 -26 -18 -10 -15 -35 -53 -53 -33 -13 -11 -27 -41 -39 -27 -17 -11 -7 -5 -9 -23 -41 -52 -51 -43 -36 -34 -30 -22 -13 -10 -12 -11 -5 2 0 -13 -26 -30 -21 -11 -11 -23 -35 -38 -40 -52 -75 -91 -84 -62 -46 -46 -52 -41 -14 4 4 -10 -11 9 35 51 63 75 86 92 90 92 100 110 121 137 166 196 208 201 187 180 181 179 169 157 141 122 111 110 104 74 25 -16 -37 -56 -100 -151 -181 -180 -182 -211 -264 -310 -337 -346 -354 -375 -409 -444 -466 -470 -467 -466 -462 -446 -420 -402 -402 -400 -378 -331 -272 -211 -154 -100 -41 16 67 107 138 173 223 290 366 433 479 499 507 518 534 543 533 504 468 427 392 378 394 424 427 381 300 223 175 151 130 107 97 123 180 243 280 290 282 274 276 307 385 518 683 843 955 986 922 781 611 453 326 232 184 182 188 130 -32 -274 -538 -787 -1004 -1168 -1242 -1209 -1098 -969 -881 -866 -937 -1072 -1226 -1347 -1407 -1391 -1299 -1146 -973 -834 -764 -769 -826 -907 -981 -1010 -967 -843 -657 -434 -204 -1 147 230 269 298 342 410 499 601 712 825 928 1005 1044 1030 956 842 723 631 575 545 530 524 517 501 470 429 379 319 245 162 90 46 38 61 98 126 129 106 67 29 0 -17 -22 -14 7 48 107 171 215 217 178 114 52 5 -21 -26 -15 1 10 4 -14 -39 -67 -93 -113 -123 -122 -113 -96 -76 -57 -47 -47 -52 -57 -60 -60 -58 -53 -46 -40 -35 -30 -25 -21 -18 -15 -12 -10 -7 -5 -3 -1 -1 0 0 +134 151 182 214 226 217 201 193 193 190 179 166 148 128 116 115 108 77 26 -16 -37 -57 -101 -153 -183 -182 -184 -213 -266 -312 -338 -347 -355 -376 -409 -444 -466 -470 -467 -466 -462 -446 -420 -402 -402 -400 -379 -332 -273 -212 -155 -100 -41 17 68 109 141 177 228 297 376 446 495 517 527 540 558 570 561 533 496 455 419 406 425 459 465 417 330 246 195 169 146 121 111 141 207 281 326 339 332 325 330 369 466 631 838 1042 1189 1237 1166 997 786 588 427 307 245 245 255 179 -43 -381 -758 -1120 -1444 -1697 -1824 -1795 -1648 -1472 -1353 -1346 -1473 -1705 -1974 -2197 -2324 -2327 -2201 -1968 -1694 -1471 -1367 -1394 -1520 -1694 -1859 -1944 -1888 -1672 -1323 -887 -423 -2 317 505 601 676 790 965 1194 1466 1772 2092 2397 2650 2809 2828 2681 2413 2118 1889 1759 1707 1700 1719 1737 1724 1660 1552 1407 1216 959 655 374 196 167 277 455 604 637 539 357 162 4 -96 -128 -86 51 326 748 1243 1625 1710 1457 981 468 49 -204 -264 -154 22 121 54 -181 -540 -977 -1445 -1868 -2171 -2312 -2282 -2082 -1757 -1433 -1275 -1366 -1649 -1992 -2306 -2560 -2727 -2781 -2736 -2668 -2629 -2607 -2566 -2533 -2567 -2663 -2720 -2642 -2415 -2090 -1675 -1151 -543 48 544 953 1334 1694 2006 2272 2539 2817 3058 3229 3361 3491 3569 3490 3224 2845 2439 2031 1625 1270 1006 790 533 239 -1 -149 -285 -486 -708 -859 -941 -1026 -1089 -991 -665 -241 100 327 542 774 928 942 896 894 930 936 940 1027 1183 1253 1121 839 523 188 -206 -598 -824 -800 -613 -380 -94 307 761 1095 1216 1210 1193 1179 1136 1121 1235 1459 1636 1649 1528 1342 1055 590 0 -538 -921 -1220 -1548 -1883 -2139 -2328 -2575 -2953 -3408 -3864 -4327 -4846 -5401 -5910 -6317 -6593 -6674 -6489 -6060 -5506 -4908 -4215 -3362 -2416 -1519 -698 174 1197 2299 3321 4217 5060 5889 6609 7082 7236 7085 6671 6054 5322 4576 3872 3233 2651 2093 1494 819 103 -574 -1195 -1810 -2432 -2948 -3221 -3197 -2943 -2551 -2117 -1754 -1532 -1386 -1146 -721 -203 249 607 962 1346 1673 1884 2046 2236 2395 2384 2186 1915 1665 1404 1071 693 334 +0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 -1 -1 -2 -4 -6 -7 -8 -8 -10 -14 -17 -20 -21 -23 -26 -30 -34 -38 -40 -42 -44 -46 -46 -46 -46 -48 -49 -49 -45 -38 -31 -24 -16 -7 2 11 19 25 33 44 60 78 96 109 118 124 130 138 145 147 143 137 128 121 120 129 143 148 136 110 84 68 60 53 44 42 54 81 113 133 141 141 140 145 165 212 293 395 500 580 613 587 510 408 310 228 166 135 136 144 102 -26 -225 -454 -678 -885 -1054 -1146 -1142 -1061 -959 -891 -897 -992 -1161 -1358 -1528 -1632 -1651 -1578 -1424 -1238 -1085 -1018 -1048 -1152 -1296 -1434 -1513 -1481 -1323 -1055 -713 -343 -2 260 417 500 567 667 821 1022 1263 1536 1825 2103 2339 2493 2524 2406 2176 1920 1721 1610 1570 1571 1596 1619 1614 1560 1464 1332 1156 914 627 359 188 161 268 442 588 622 527 350 159 3 -95 -127 -86 50 323 743 1237 1619 1705 1454 979 467 48 -204 -264 -154 21 120 53 -181 -539 -975 -1441 -1860 -2159 -2297 -2264 -2063 -1738 -1415 -1257 -1344 -1619 -1951 -2253 -2495 -2650 -2695 -2644 -2571 -2526 -2496 -2449 -2409 -2432 -2513 -2557 -2474 -2252 -1941 -1549 -1060 -498 43 493 859 1197 1512 1780 2005 2228 2457 2651 2782 2877 2970 3016 2929 2687 2354 2004 1656 1315 1020 801 624 417 185 -1 -114 -216 -366 -528 -634 -688 -743 -781 -703 -467 -168 68 222 364 515 610 613 576 568 584 580 576 621 707 739 652 481 296 105 -114 -325 -442 -423 -319 -195 -48 152 371 526 574 562 544 529 501 485 525 609 671 663 603 519 400 219 0 -192 -322 -417 -518 -616 -684 -727 -786 -880 -991 -1096 -1197 -1307 -1418 -1511 -1572 -1597 -1571 -1485 -1347 -1189 -1027 -856 -662 -461 -280 -125 29 199 369 514 629 727 813 877 902 885 830 749 650 545 447 361 286 223 167 113 58 6 -37 -72 -103 -129 -146 -148 -137 -117 -94 -72 -55 -44 -36 -27 -16 -4 4 8 12 15 15 15 13 12 10 8 5 3 2 1 0 0 0 +2118 1889 1759 1707 1700 1719 1737 1724 1660 1552 1407 1216 959 655 374 196 167 277 455 604 637 539 357 162 4 -96 -128 -86 51 326 748 1243 1625 1710 1457 981 468 49 -204 -264 -154 22 121 54 -181 -540 -977 -1445 -1868 -2171 -2312 -2282 -2082 -1757 -1433 -1275 -1366 -1649 -1992 -2306 -2560 -2727 -2781 -2736 -2668 -2629 -2607 -2566 -2533 -2567 -2663 -2720 -2642 -2415 -2090 -1675 -1151 -543 48 544 953 1334 1694 2006 2272 2539 2817 3058 3229 3361 3491 3569 3490 3224 2845 2439 2031 1625 1270 1006 790 533 239 -1 -149 -285 -486 -708 -859 -941 -1026 -1089 -991 -665 -241 100 327 542 774 928 942 896 894 930 936 940 1027 1183 1253 1121 839 523 188 -206 -598 -824 -800 -613 -380 -94 307 761 1095 1216 1210 1193 1179 1136 1121 1235 1459 1636 1649 1528 1342 1055 590 0 -538 -921 -1220 -1548 -1883 -2139 -2328 -2575 -2953 -3408 -3864 -4327 -4846 -5401 -5910 -6317 -6593 -6674 -6489 -6060 -5506 -4908 -4215 -3362 -2416 -1519 -698 174 1197 2299 3321 4217 5060 5889 6609 7082 7236 7085 6671 6054 5322 4576 3872 3233 2651 2093 1494 819 103 -574 -1195 -1810 -2432 -2948 -3221 -3197 -2943 -2551 -2117 -1754 -1532 -1386 -1146 -721 -203 249 607 962 1346 1673 1884 2046 2236 2395 2384 2186 1915 1665 1404 1071 693 334 -10 -367 -682 -860 -900 -919 -982 -995 -829 -479 -44 421 919 1401 1755 1951 2107 2347 2617 2755 2703 2557 2397 2173 1805 1331 858 409 -88 -652 -1176 -1581 -1928 -2332 -2782 -3169 -3481 -3858 -4405 -5023 -5532 -5928 -6391 -7018 -7640 -7981 -7938 -7626 -7174 -6584 -5809 -4873 -3855 -2809 -1748 -674 441 1651 2959 4239 5342 6279 7189 8094 8780 9006 8795 8395 7957 7408 6650 5771 4959 4273 3604 2855 2048 1238 407 -489 -1415 -2272 -2992 -3557 -3927 -4034 -3872 -3552 -3205 -2866 -2476 -2006 -1533 -1153 -860 -544 -123 364 811 1160 1435 1658 1798 1817 1729 1577 1358 1028 597 155 -222 -550 -899 -1263 -1540 -1648 -1627 -1584 -1560 -1485 -1283 -949 -533 -39 563 1250 1914 2453 2869 3230 3546 3754 3821 3796 3724 3562 3262 2888 2542 2192 1692 1002 297 -243 -672 -1167 -1743 -2250 -2666 -3195 -3985 -4867 -5573 -6127 -6826 -7773 -8674 -9163 -9212 -9063 +0 0 0 1 2 3 4 5 7 8 9 9 9 7 4 2 2 5 9 14 16 15 11 5 0 -4 -6 -4 2 17 42 74 103 115 104 74 37 4 -19 -25 -16 2 12 6 -22 -67 -125 -192 -259 -313 -345 -354 -335 -293 -247 -228 -252 -315 -392 -468 -536 -589 -618 -626 -628 -637 -649 -656 -665 -692 -737 -771 -769 -720 -638 -524 -368 -178 16 185 332 475 617 745 861 983 1112 1230 1324 1404 1486 1546 1539 1447 1299 1133 959 780 620 499 398 272 124 -1 -80 -155 -268 -396 -487 -541 -598 -643 -593 -403 -148 62 205 344 497 604 620 596 601 632 643 653 721 839 897 810 612 385 139 -155 -454 -631 -617 -477 -298 -75 244 611 886 991 994 987 982 953 947 1050 1249 1409 1429 1333 1177 931 523 0 -483 -831 -1107 -1411 -1725 -1969 -2152 -2391 -2754 -3191 -3632 -4083 -4591 -5135 -5639 -6048 -6333 -6430 -6271 -5873 -5351 -4783 -4118 -3292 -2371 -1494 -688 171 1183 2277 3294 4188 5031 5863 6586 7062 7221 7076 6666 6051 5320 4576 3872 3232 2649 2091 1492 817 102 -573 -1190 -1800 -2416 -2925 -3191 -3162 -2905 -2514 -2082 -1722 -1500 -1354 -1117 -701 -197 240 584 923 1288 1596 1791 1938 2109 2251 2232 2038 1778 1538 1291 980 631 302 -10 -330 -609 -764 -795 -807 -857 -863 -715 -411 -38 355 771 1167 1452 1603 1718 1900 2102 2196 2137 2005 1864 1675 1380 1008 644 304 -65 -477 -851 -1133 -1368 -1638 -1934 -2180 -2369 -2597 -2933 -3307 -3601 -3814 -4064 -4409 -4742 -4891 -4805 -4558 -4232 -3833 -3336 -2762 -2155 -1548 -950 -361 232 857 1514 2136 2650 3065 3454 3823 4079 4113 3948 3703 3448 3154 2779 2367 1995 1686 1395 1083 761 450 145 -171 -484 -760 -979 -1137 -1227 -1231 -1154 -1033 -909 -793 -668 -527 -392 -287 -209 -129 -29 80 175 242 291 326 342 334 308 271 226 165 92 23 -32 -77 -120 -161 -189 -194 -183 -171 -160 -146 -120 -85 -45 -4 42 89 129 156 172 182 186 185 175 162 147 130 109 89 71 56 39 21 5 -5 -10 -16 -20 -22 -22 -22 -23 -22 -20 -17 -14 -10 -7 -5 -3 0 +-1220 -1548 -1883 -2139 -2328 -2575 -2953 -3408 -3864 -4327 -4846 -5401 -5910 -6317 -6593 -6674 -6489 -6060 -5506 -4908 -4215 -3362 -2416 -1519 -698 174 1197 2299 3321 4217 5060 5889 6609 7082 7236 7085 6671 6054 5322 4576 3872 3233 2651 2093 1494 819 103 -574 -1195 -1810 -2432 -2948 -3221 -3197 -2943 -2551 -2117 -1754 -1532 -1386 -1146 -721 -203 249 607 962 1346 1673 1884 2046 2236 2395 2384 2186 1915 1665 1404 1071 693 334 -10 -367 -682 -860 -900 -919 -982 -995 -829 -479 -44 421 919 1401 1755 1951 2107 2347 2617 2755 2703 2557 2397 2173 1805 1331 858 409 -88 -652 -1176 -1581 -1928 -2332 -2782 -3169 -3481 -3858 -4405 -5023 -5532 -5928 -6391 -7018 -7640 -7981 -7938 -7626 -7174 -6584 -5809 -4873 -3855 -2809 -1748 -674 441 1651 2959 4239 5342 6279 7189 8094 8780 9006 8795 8395 7957 7408 6650 5771 4959 4273 3604 2855 2048 1238 407 -489 -1415 -2272 -2992 -3557 -3927 -4034 -3872 -3552 -3205 -2866 -2476 -2006 -1533 -1153 -860 -544 -123 364 811 1160 1435 1658 1798 1817 1729 1577 1358 1028 597 155 -222 -550 -899 -1263 -1540 -1648 -1627 -1584 -1560 -1485 -1283 -949 -533 -39 563 1250 1914 2453 2869 3230 3546 3754 3821 3796 3724 3562 3262 2888 2542 2192 1692 1002 297 -243 -672 -1167 -1743 -2250 -2666 -3195 -3985 -4867 -5573 -6127 -6826 -7773 -8674 -9163 -9212 -9063 -8831 -8388 -7623 -6647 -5644 -4664 -3618 -2438 -1109 368 1928 3425 4744 5967 7262 8567 9534 9897 9808 9637 9502 9175 8470 7553 6730 6052 5287 4268 3085 1898 718 -526 -1778 -2887 -3812 -4634 -5351 -5785 -5777 -5441 -5069 -4840 -4683 -4439 -4047 -3534 -2915 -2178 -1358 -532 266 1055 1813 2438 2845 3052 3149 3189 3155 3035 2873 2718 2563 2353 2046 1637 1141 601 82 -341 -636 -830 -935 -905 -673 -257 202 544 727 857 1041 1277 1484 1634 1802 2058 2370 2623 2725 2661 2473 2224 1944 1592 1083 401 -315 -906 -1399 -2021 -2912 -3929 -4823 -5596 -6527 -7777 -9105 -10087 -10561 -10722 -10792 -10732 -10347 -9569 -8541 -7439 -6329 -5161 -3842 -2302 -564 1241 2994 4716 6514 8327 9852 10820 11302 11607 11882 11944 11597 10956 10314 9760 9087 8093 6841 5541 4272 2952 1504 -33 -1556 -2966 -4169 -5116 -5863 -6519 -7082 -7388 -7323 -7006 -6673 -6380 -5953 -5267 -4444 -3680 +0 -1 -1 -2 -3 -6 -8 -12 -17 -25 -34 -44 -57 -71 -86 -98 -108 -114 -116 -114 -109 -96 -75 -52 -26 6 51 105 163 222 285 353 421 478 517 536 532 509 471 426 379 331 284 235 175 100 13 -77 -166 -261 -363 -457 -518 -533 -508 -455 -391 -335 -302 -282 -240 -156 -46 56 142 232 334 427 494 551 618 678 693 651 583 519 448 350 231 114 -4 -131 -249 -320 -342 -356 -388 -401 -341 -201 -19 182 405 629 801 906 995 1127 1277 1366 1362 1308 1245 1146 966 723 472 228 -50 -375 -685 -933 -1153 -1412 -1705 -1967 -2187 -2453 -2834 -3270 -3642 -3947 -4302 -4776 -5255 -5548 -5574 -5409 -5141 -4763 -4244 -3595 -2870 -2111 -1325 -516 340 1284 2320 3352 4258 5043 5820 6602 7215 7453 7330 7046 6725 6302 5693 4973 4300 3728 3163 2520 1818 1105 365 -442 -1284 -2071 -2740 -3274 -3630 -3746 -3611 -3326 -3013 -2705 -2346 -1908 -1463 -1104 -826 -525 -119 352 788 1130 1401 1623 1764 1786 1703 1556 1342 1018 592 153 -221 -548 -896 -1260 -1537 -1646 -1626 -1584 -1560 -1485 -1283 -949 -533 -39 562 1247 1908 2444 2856 3211 3521 3723 3784 3753 3675 3509 3207 2833 2488 2141 1648 973 287 -235 -648 -1121 -1669 -2147 -2535 -3027 -3761 -4575 -5218 -5713 -6338 -7185 -7982 -8392 -8396 -8218 -7967 -7528 -6805 -5901 -4983 -4094 -3157 -2115 -956 315 1640 2894 3981 4973 6010 7040 7776 8012 7878 7681 7513 7194 6588 5825 5146 4586 3971 3177 2275 1386 519 -377 -1262 -2028 -2650 -3188 -3641 -3894 -3847 -3582 -3300 -3114 -2978 -2789 -2512 -2166 -1765 -1302 -802 -310 152 597 1013 1343 1545 1634 1661 1656 1614 1529 1425 1327 1231 1111 950 747 512 265 35 -146 -266 -341 -377 -358 -261 -98 75 198 259 299 355 426 485 522 562 627 705 762 772 736 666 583 496 396 262 94 -73 -202 -302 -423 -591 -774 -919 -1032 -1164 -1341 -1517 -1621 -1635 -1600 -1552 -1483 -1375 -1220 -1045 -872 -711 -555 -394 -226 -53 109 252 376 493 595 666 689 678 654 626 589 532 468 407 357 306 250 193 142 99 61 28 -1 -23 -39 -47 -49 -48 -45 -40 -33 -26 -19 -14 -8 -5 -3 -2 0 +-1415 -2272 -2992 -3557 -3927 -4034 -3872 -3552 -3205 -2866 -2476 -2006 -1533 -1153 -860 -544 -123 364 811 1160 1435 1658 1798 1817 1729 1577 1358 1028 597 155 -222 -550 -899 -1263 -1540 -1648 -1627 -1584 -1560 -1485 -1283 -949 -533 -39 563 1250 1914 2453 2869 3230 3546 3754 3821 3796 3724 3562 3262 2888 2542 2192 1692 1002 297 -243 -672 -1167 -1743 -2250 -2666 -3195 -3985 -4867 -5573 -6127 -6826 -7773 -8674 -9163 -9212 -9063 -8831 -8388 -7623 -6647 -5644 -4664 -3618 -2438 -1109 368 1928 3425 4744 5967 7262 8567 9534 9897 9808 9637 9502 9175 8470 7553 6730 6052 5287 4268 3085 1898 718 -526 -1778 -2887 -3812 -4634 -5351 -5785 -5777 -5441 -5069 -4840 -4683 -4439 -4047 -3534 -2915 -2178 -1358 -532 266 1055 1813 2438 2845 3052 3149 3189 3155 3035 2873 2718 2563 2353 2046 1637 1141 601 82 -341 -636 -830 -935 -905 -673 -257 202 544 727 857 1041 1277 1484 1634 1802 2058 2370 2623 2725 2661 2473 2224 1944 1592 1083 401 -315 -906 -1399 -2021 -2912 -3929 -4823 -5596 -6527 -7777 -9105 -10087 -10561 -10722 -10792 -10732 -10347 -9569 -8541 -7439 -6329 -5161 -3842 -2302 -564 1241 2994 4716 6514 8327 9852 10820 11302 11607 11882 11944 11597 10956 10314 9760 9087 8093 6841 5541 4272 2952 1504 -33 -1556 -2966 -4169 -5116 -5863 -6519 -7082 -7388 -7323 -7006 -6673 -6380 -5953 -5267 -4444 -3680 -2964 -2129 -1119 -88 814 1618 2420 3194 3812 4219 4446 4493 4315 3978 3653 3427 3195 2843 2450 2177 2000 1730 1284 804 441 157 -159 -456 -585 -523 -394 -259 -31 319 642 792 849 994 1212 1289 1124 889 793 812 759 572 345 110 -252 -821 -1507 -2226 -3089 -4249 -5601 -6814 -7687 -8346 -8970 -9485 -9666 -9484 -9162 -8889 -8620 -8201 -7549 -6671 -5568 -4275 -2870 -1374 315 2237 4159 5732 6867 7819 8822 9773 10394 10634 10734 10919 11144 11212 11011 10594 10046 9357 8444 7251 5824 4284 2755 1322 24 -1160 -2303 -3461 -4581 -5541 -6326 -7083 -7881 -8532 -8781 -8657 -8430 -8196 -7734 -6894 -5921 -5147 -4520 -3701 -2603 -1506 -599 290 1338 2374 3107 3557 3962 4354 4539 4490 4456 4577 4681 4601 4490 4532 4574 4308 3744 3219 2892 2549 1984 1357 957 796 656 436 244 155 67 -118 -368 -609 -841 -1088 -1311 -1477 -1630 -1817 -2012 -2201 +0 -1 -2 -3 -5 -8 -11 -13 -15 -17 -17 -17 -15 -13 -12 -8 -3 6 17 26 36 46 55 61 63 62 58 47 29 8 -13 -34 -58 -86 -111 -125 -130 -134 -139 -139 -126 -98 -58 -5 65 152 243 325 396 464 528 581 613 632 641 634 601 549 500 444 354 216 65 -56 -159 -283 -434 -576 -700 -862 -1103 -1380 -1621 -1825 -2082 -2428 -2773 -2996 -3079 -3098 -3084 -2992 -2777 -2472 -2142 -1806 -1429 -981 -455 153 820 1484 2092 2679 3317 3980 4503 4755 4789 4780 4788 4695 4400 3984 3603 3287 2913 2385 1748 1089 417 -311 -1063 -1748 -2336 -2876 -3362 -3678 -3717 -3542 -3337 -3223 -3153 -3021 -2784 -2457 -2047 -1545 -974 -385 194 778 1349 1831 2155 2333 2428 2480 2474 2399 2290 2183 2074 1919 1681 1354 951 504 69 -291 -545 -716 -811 -790 -591 -227 179 485 652 773 943 1163 1359 1503 1665 1910 2209 2455 2561 2510 2342 2114 1854 1523 1040 386 -305 -878 -1360 -1970 -2845 -3847 -4733 -5504 -6432 -7677 -9003 -9991 -10476 -10649 -10732 -10685 -10312 -9544 -8525 -7430 -6325 -5159 -3842 -2302 -564 1240 2992 4712 6506 8310 9825 10783 11252 11541 11800 11847 11486 10832 10180 9617 8936 7940 6697 5412 4162 2868 1457 -32 -1500 -2849 -3991 -4882 -5574 -6176 -6683 -6945 -6857 -6533 -6196 -5898 -5478 -4824 -4051 -3337 -2674 -1911 -999 -79 718 1420 2111 2769 3285 3612 3782 3797 3621 3315 3023 2816 2606 2301 1967 1735 1581 1356 998 620 337 118 -120 -340 -432 -383 -286 -186 -22 223 446 544 577 669 806 848 731 571 504 510 471 350 208 65 -149 -478 -866 -1262 -1727 -2342 -3043 -3649 -4056 -4337 -4591 -4780 -4796 -4631 -4403 -4200 -4005 -3747 -3390 -2943 -2413 -1821 -1200 -564 126 883 1610 2174 2553 2848 3146 3411 3552 3554 3508 3489 3479 3418 3279 3080 2847 2588 2275 1903 1488 1065 667 311 5 -258 -498 -725 -930 -1091 -1205 -1306 -1405 -1471 -1463 -1391 -1305 -1223 -1113 -953 -787 -656 -553 -434 -293 -162 -62 28 124 210 261 283 299 311 306 286 267 258 246 226 206 193 180 157 126 99 81 65 46 28 17 13 9 5 2 1 0 -1 -3 -3 -3 -3 -3 -2 -2 -1 -1 0 +1041 1277 1484 1634 1802 2058 2370 2623 2725 2661 2473 2224 1944 1592 1083 401 -315 -906 -1399 -2021 -2912 -3929 -4823 -5596 -6527 -7777 -9105 -10087 -10561 -10722 -10792 -10732 -10347 -9569 -8541 -7439 -6329 -5161 -3842 -2302 -564 1241 2994 4716 6514 8327 9852 10820 11302 11607 11882 11944 11597 10956 10314 9760 9087 8093 6841 5541 4272 2952 1504 -33 -1556 -2966 -4169 -5116 -5863 -6519 -7082 -7388 -7323 -7006 -6673 -6380 -5953 -5267 -4444 -3680 -2964 -2129 -1119 -88 814 1618 2420 3194 3812 4219 4446 4493 4315 3978 3653 3427 3195 2843 2450 2177 2000 1730 1284 804 441 157 -159 -456 -585 -523 -394 -259 -31 319 642 792 849 994 1212 1289 1124 889 793 812 759 572 345 110 -252 -821 -1507 -2226 -3089 -4249 -5601 -6814 -7687 -8346 -8970 -9485 -9666 -9484 -9162 -8889 -8620 -8201 -7549 -6671 -5568 -4275 -2870 -1374 315 2237 4159 5732 6867 7819 8822 9773 10394 10634 10734 10919 11144 11212 11011 10594 10046 9357 8444 7251 5824 4284 2755 1322 24 -1160 -2303 -3461 -4581 -5541 -6326 -7083 -7881 -8532 -8781 -8657 -8430 -8196 -7734 -6894 -5921 -5147 -4520 -3701 -2603 -1506 -599 290 1338 2374 3107 3557 3962 4354 4539 4490 4456 4577 4681 4601 4490 4532 4574 4308 3744 3219 2892 2549 1984 1357 957 796 656 436 244 155 67 -118 -368 -609 -841 -1088 -1311 -1477 -1630 -1817 -2012 -2201 -2500 -3051 -3813 -4564 -5141 -5587 -6008 -6394 -6622 -6618 -6450 -6267 -6174 -6144 -6018 -5654 -5102 -4542 -4049 -3441 -2508 -1313 -156 762 1530 2330 3171 3925 4546 5116 5722 6370 7016 7616 8108 8428 8582 8614 8506 8179 7639 7033 6488 5940 5234 4368 3498 2722 1964 1090 92 -944 -1948 -2885 -3698 -4319 -4755 -5069 -5297 -5407 -5389 -5313 -5250 -5180 -5016 -4696 -4243 -3708 -3119 -2491 -1849 -1248 -743 -357 -72 155 355 528 659 755 848 950 1047 1117 1167 1212 1253 1281 1293 1313 1361 1437 1513 1575 1645 1752 1867 1903 1785 1553 1321 1150 973 685 289 -106 -406 -611 -783 -959 -1136 -1294 -1421 -1508 -1553 -1569 -1576 -1581 -1594 -1624 -1655 -1646 -1569 -1455 -1349 -1251 -1114 -935 -764 -656 -597 -550 -514 -515 -550 -566 -535 -480 -434 -404 -383 -373 -371 -347 -251 -71 150 356 530 694 888 1118 1343 1513 1628 1744 1911 2116 2295 2424 +0 0 0 1 2 4 6 8 11 14 16 17 18 17 14 5 -6 -18 -30 -47 -75 -112 -150 -189 -240 -308 -390 -463 -521 -566 -609 -645 -660 -648 -611 -564 -506 -435 -341 -215 -56 127 321 529 763 1018 1255 1437 1561 1669 1772 1848 1862 1824 1777 1739 1674 1541 1346 1124 893 637 334 -8 -367 -719 -1038 -1308 -1539 -1758 -1959 -2095 -2130 -2087 -2035 -1993 -1903 -1722 -1486 -1258 -1035 -760 -408 -33 308 626 955 1285 1563 1763 1893 1947 1903 1786 1668 1592 1509 1365 1196 1079 1007 885 667 424 236 85 -88 -255 -332 -301 -230 -153 -19 193 393 491 533 631 779 838 739 591 533 552 521 397 242 78 -181 -594 -1101 -1642 -2300 -3192 -4245 -5211 -5929 -6492 -7035 -7501 -7705 -7618 -7418 -7251 -7084 -6788 -6293 -5600 -4707 -3638 -2458 -1185 273 1951 3650 5060 6095 6979 7917 8816 9424 9691 9829 10047 10300 10409 10266 9918 9442 8829 7998 6893 5556 4100 2646 1273 23 -1125 -2238 -3373 -4475 -5425 -6208 -6966 -7766 -8422 -8683 -8575 -8363 -8140 -7691 -6864 -5901 -5134 -4512 -3697 -2602 -1506 -599 290 1338 2373 3105 3554 3957 4345 4526 4474 4436 4551 4649 4563 4447 4481 4514 4244 3681 3158 2831 2489 1933 1318 927 769 631 418 233 147 63 -112 -348 -573 -788 -1015 -1218 -1366 -1500 -1664 -1834 -1996 -2256 -2739 -3404 -4052 -4539 -4904 -5243 -5545 -5707 -5667 -5488 -5297 -5183 -5121 -4981 -4647 -4162 -3678 -3253 -2743 -1984 -1030 -122 587 1169 1765 2382 2921 3352 3737 4139 4564 4975 5347 5635 5796 5839 5798 5663 5383 4972 4524 4124 3731 3248 2676 2117 1626 1158 634 52 -535 -1089 -1590 -2009 -2313 -2509 -2634 -2711 -2725 -2674 -2595 -2523 -2448 -2331 -2146 -1905 -1636 -1352 -1061 -773 -512 -299 -141 -28 58 131 192 235 263 289 317 342 356 364 369 373 372 366 363 366 377 386 391 398 412 427 422 385 324 268 226 185 126 51 -19 -68 -99 -122 -144 -164 -179 -189 -193 -190 -184 -177 -170 -164 -159 -155 -146 -133 -117 -103 -90 -76 -60 -46 -37 -32 -28 -24 -23 -22 -21 -19 -15 -13 -11 -9 -8 -7 -6 -4 -1 1 3 4 4 4 4 4 4 3 2 1 1 0 0 +10394 10634 10734 10919 11144 11212 11011 10594 10046 9357 8444 7251 5824 4284 2755 1322 24 -1160 -2303 -3461 -4581 -5541 -6326 -7083 -7881 -8532 -8781 -8657 -8430 -8196 -7734 -6894 -5921 -5147 -4520 -3701 -2603 -1506 -599 290 1338 2374 3107 3557 3962 4354 4539 4490 4456 4577 4681 4601 4490 4532 4574 4308 3744 3219 2892 2549 1984 1357 957 796 656 436 244 155 67 -118 -368 -609 -841 -1088 -1311 -1477 -1630 -1817 -2012 -2201 -2500 -3051 -3813 -4564 -5141 -5587 -6008 -6394 -6622 -6618 -6450 -6267 -6174 -6144 -6018 -5654 -5102 -4542 -4049 -3441 -2508 -1313 -156 762 1530 2330 3171 3925 4546 5116 5722 6370 7016 7616 8108 8428 8582 8614 8506 8179 7639 7033 6488 5940 5234 4368 3498 2722 1964 1090 92 -944 -1948 -2885 -3698 -4319 -4755 -5069 -5297 -5407 -5389 -5313 -5250 -5180 -5016 -4696 -4243 -3708 -3119 -2491 -1849 -1248 -743 -357 -72 155 355 528 659 755 848 950 1047 1117 1167 1212 1253 1281 1293 1313 1361 1437 1513 1575 1645 1752 1867 1903 1785 1553 1321 1150 973 685 289 -106 -406 -611 -783 -959 -1136 -1294 -1421 -1508 -1553 -1569 -1576 -1581 -1594 -1624 -1655 -1646 -1569 -1455 -1349 -1251 -1114 -935 -764 -656 -597 -550 -514 -515 -550 -566 -535 -480 -434 -404 -383 -373 -371 -347 -251 -71 150 356 530 694 888 1118 1343 1513 1628 1744 1911 2116 2295 2424 2526 2606 2624 2534 2356 2149 1949 1740 1482 1176 849 537 259 8 -241 -489 -702 -830 -879 -905 -953 -999 -983 -897 -795 -736 -727 -738 -748 -755 -760 -759 -757 -750 -726 -667 -586 -519 -482 -446 -371 -256 -137 -34 68 186 312 404 436 394 279 97 -117 -321 -480 -599 -680 -706 -654 -551 -453 -396 -363 -316 -252 -190 -125 -23 133 300 429 504 556 600 619 587 517 438 368 291 189 47 -139 -368 -628 -896 -1149 -1371 -1548 -1651 -1660 -1595 -1500 -1398 -1273 -1118 -971 -873 -797 -667 -445 -165 125 423 742 1053 1311 1524 1731 1923 2044 2093 2146 2253 2341 2314 2191 2071 1969 1790 1488 1148 869 623 332 16 -226 -379 -537 -753 -961 -1086 -1146 -1202 -1248 -1241 -1196 -1155 -1106 -990 -816 -688 -663 -677 -631 -515 -384 -259 -130 5 122 229 354 464 487 407 319 305 332 299 +0 2 5 7 13 21 29 36 44 52 57 58 55 48 35 19 0 -22 -49 -81 -118 -157 -197 -239 -289 -338 -376 -398 -416 -433 -437 -415 -378 -349 -324 -281 -208 -127 -54 27 130 243 333 399 464 532 578 596 615 658 698 712 721 754 788 767 690 612 569 517 415 292 212 182 154 105 60 39 17 -32 -102 -173 -245 -325 -400 -462 -521 -594 -673 -753 -873 -1089 -1389 -1698 -1951 -2164 -2372 -2573 -2717 -2767 -2747 -2716 -2724 -2759 -2749 -2627 -2411 -2183 -1978 -1708 -1264 -672 -82 402 819 1265 1747 2193 2575 2937 3330 3757 4193 4609 4968 5230 5390 5476 5471 5323 5028 4682 4367 4041 3599 3036 2456 1930 1407 788 67 -697 -1451 -2168 -2803 -3303 -3668 -3943 -4154 -4276 -4296 -4268 -4251 -4226 -4123 -3887 -3537 -3113 -2637 -2120 -1584 -1076 -645 -312 -64 136 315 471 591 681 768 865 958 1027 1078 1125 1168 1199 1215 1238 1289 1366 1443 1507 1579 1687 1804 1843 1734 1513 1290 1125 954 673 284 -105 -402 -606 -777 -953 -1130 -1289 -1417 -1504 -1550 -1568 -1575 -1581 -1594 -1624 -1655 -1646 -1569 -1454 -1348 -1249 -1112 -932 -761 -653 -593 -546 -510 -510 -543 -558 -527 -471 -425 -395 -374 -363 -360 -336 -242 -69 143 339 503 657 837 1050 1257 1410 1511 1612 1758 1937 2091 2197 2278 2338 2342 2249 2079 1886 1700 1508 1277 1006 722 453 217 6 -200 -402 -573 -672 -707 -722 -754 -784 -765 -692 -608 -558 -547 -550 -552 -552 -550 -544 -537 -527 -505 -459 -399 -350 -321 -294 -242 -165 -88 -22 42 113 188 241 257 229 160 54 -66 -177 -261 -321 -359 -367 -335 -278 -225 -194 -175 -150 -118 -87 -57 -11 57 127 179 206 223 236 239 222 192 159 131 101 64 15 -46 -118 -197 -274 -343 -399 -439 -457 -448 -419 -384 -348 -309 -264 -223 -194 -173 -140 -91 -33 23 77 132 181 218 244 267 286 293 289 285 287 286 271 246 222 201 175 138 101 73 49 25 1 -16 -25 -33 -43 -51 -54 -53 -52 -50 -46 -41 -36 -32 -26 -19 -15 -13 -12 -10 -7 -5 -3 -2 0 0 1 1 1 0 0 0 0 0 0 +848 950 1047 1117 1167 1212 1253 1281 1293 1313 1361 1437 1513 1575 1645 1752 1867 1903 1785 1553 1321 1150 973 685 289 -106 -406 -611 -783 -959 -1136 -1294 -1421 -1508 -1553 -1569 -1576 -1581 -1594 -1624 -1655 -1646 -1569 -1455 -1349 -1251 -1114 -935 -764 -656 -597 -550 -514 -515 -550 -566 -535 -480 -434 -404 -383 -373 -371 -347 -251 -71 150 356 530 694 888 1118 1343 1513 1628 1744 1911 2116 2295 2424 2526 2606 2624 2534 2356 2149 1949 1740 1482 1176 849 537 259 8 -241 -489 -702 -830 -879 -905 -953 -999 -983 -897 -795 -736 -727 -738 -748 -755 -760 -759 -757 -750 -726 -667 -586 -519 -482 -446 -371 -256 -137 -34 68 186 312 404 436 394 279 97 -117 -321 -480 -599 -680 -706 -654 -551 -453 -396 -363 -316 -252 -190 -125 -23 133 300 429 504 556 600 619 587 517 438 368 291 189 47 -139 -368 -628 -896 -1149 -1371 -1548 -1651 -1660 -1595 -1500 -1398 -1273 -1118 -971 -873 -797 -667 -445 -165 125 423 742 1053 1311 1524 1731 1923 2044 2093 2146 2253 2341 2314 2191 2071 1969 1790 1488 1148 869 623 332 16 -226 -379 -537 -753 -961 -1086 -1146 -1202 -1248 -1241 -1196 -1155 -1106 -990 -816 -688 -663 -677 -631 -515 -384 -259 -130 5 122 229 354 464 487 407 319 305 332 299 175 30 -82 -195 -380 -636 -889 -1076 -1197 -1286 -1350 -1365 -1318 -1214 -1072 -900 -706 -500 -291 -87 111 299 477 661 870 1084 1248 1319 1317 1286 1228 1105 911 703 532 377 194 1 -131 -183 -215 -282 -362 -419 -461 -528 -604 -631 -600 -573 -585 -590 -511 -356 -192 -56 95 296 521 717 868 987 1082 1144 1188 1242 1303 1328 1299 1250 1216 1177 1059 837 570 336 162 7 -162 -327 -461 -571 -703 -880 -1072 -1239 -1368 -1476 -1573 -1636 -1628 -1528 -1355 -1151 -961 -795 -631 -437 -218 -21 136 288 489 725 918 1009 1017 1013 1014 970 838 651 474 335 207 60 -98 -248 -391 -540 -689 -823 -928 -1001 -1029 -1004 -940 -867 -782 -660 -489 -314 -182 -84 27 163 293 399 513 651 779 830 806 766 749 732 678 597 531 503 506 515 514 496 465 429 386 308 176 15 +0 0 0 0 1 2 3 4 5 7 9 11 14 17 21 25 30 35 37 36 33 32 30 23 10 -5 -18 -29 -39 -51 -65 -78 -91 -102 -112 -119 -126 -134 -142 -152 -163 -169 -169 -164 -159 -154 -142 -125 -106 -95 -90 -86 -83 -86 -95 -101 -99 -92 -86 -82 -81 -81 -83 -80 -60 -18 37 90 139 187 245 316 390 450 496 544 610 691 767 828 881 929 955 942 893 832 769 700 607 491 361 232 114 3 -111 -228 -332 -399 -430 -449 -481 -512 -511 -474 -426 -400 -401 -413 -424 -434 -443 -448 -453 -454 -445 -414 -369 -330 -311 -291 -245 -171 -93 -24 46 129 219 286 312 285 203 71 -88 -242 -364 -459 -525 -550 -513 -436 -362 -319 -294 -258 -208 -158 -105 -20 112 255 367 434 482 523 543 518 458 390 330 262 171 42 -128 -339 -581 -832 -1072 -1284 -1456 -1558 -1573 -1517 -1432 -1339 -1223 -1078 -939 -846 -775 -650 -435 -162 122 415 731 1039 1296 1509 1717 1909 2032 2083 2138 2246 2336 2311 2189 2069 1968 1790 1488 1147 868 622 331 15 -226 -378 -535 -749 -955 -1078 -1136 -1189 -1232 -1223 -1177 -1134 -1083 -968 -796 -669 -643 -655 -608 -495 -368 -248 -124 4 115 215 331 432 452 376 293 279 302 271 157 26 -74 -174 -336 -559 -776 -934 -1032 -1102 -1149 -1154 -1107 -1012 -888 -740 -576 -405 -234 -70 87 234 371 509 665 821 937 981 971 939 888 791 646 493 369 259 132 0 -88 -121 -140 -182 -231 -264 -287 -324 -366 -378 -354 -334 -336 -335 -286 -197 -105 -30 50 153 266 361 430 481 519 540 551 567 585 585 562 532 508 482 426 330 220 127 60 2 -58 -115 -158 -191 -230 -282 -335 -378 -408 -430 -446 -453 -439 -402 -347 -287 -233 -188 -145 -98 -48 -5 27 56 93 133 163 173 169 162 156 144 120 89 62 42 25 7 -12 -27 -41 -53 -65 -73 -79 -80 -78 -72 -64 -56 -47 -38 -26 -16 -9 -4 1 5 9 12 14 16 18 17 15 12 10 9 7 5 4 3 2 2 1 1 0 0 0 0 0 0 +189 47 -139 -368 -628 -896 -1149 -1371 -1548 -1651 -1660 -1595 -1500 -1398 -1273 -1118 -971 -873 -797 -667 -445 -165 125 423 742 1053 1311 1524 1731 1923 2044 2093 2146 2253 2341 2314 2191 2071 1969 1790 1488 1148 869 623 332 16 -226 -379 -537 -753 -961 -1086 -1146 -1202 -1248 -1241 -1196 -1155 -1106 -990 -816 -688 -663 -677 -631 -515 -384 -259 -130 5 122 229 354 464 487 407 319 305 332 299 175 30 -82 -195 -380 -636 -889 -1076 -1197 -1286 -1350 -1365 -1318 -1214 -1072 -900 -706 -500 -291 -87 111 299 477 661 870 1084 1248 1319 1317 1286 1228 1105 911 703 532 377 194 1 -131 -183 -215 -282 -362 -419 -461 -528 -604 -631 -600 -573 -585 -590 -511 -356 -192 -56 95 296 521 717 868 987 1082 1144 1188 1242 1303 1328 1299 1250 1216 1177 1059 837 570 336 162 7 -162 -327 -461 -571 -703 -880 -1072 -1239 -1368 -1476 -1573 -1636 -1628 -1528 -1355 -1151 -961 -795 -631 -437 -218 -21 136 288 489 725 918 1009 1017 1013 1014 970 838 651 474 335 207 60 -98 -248 -391 -540 -689 -823 -928 -1001 -1029 -1004 -940 -867 -782 -660 -489 -314 -182 -84 27 163 293 399 513 651 779 830 806 766 749 732 678 597 531 503 506 515 514 496 465 429 386 308 176 15 -118 -193 -232 -276 -341 -404 -443 -455 -444 -414 -363 -282 -167 -39 61 107 148 253 417 538 535 466 468 576 680 664 561 491 488 454 314 128 6 -48 -124 -260 -392 -461 -523 -652 -821 -929 -953 -980 -1071 -1155 -1124 -971 -790 -645 -505 -329 -135 26 131 202 269 355 477 612 702 707 654 599 547 444 279 117 23 -20 -72 -137 -163 -146 -149 -228 -344 -405 -379 -309 -250 -207 -166 -123 -97 -101 -130 -167 -186 -170 -112 -31 46 113 181 251 308 318 275 206 161 173 222 264 271 261 261 260 217 115 -11 -99 -130 -133 -157 -212 -269 -290 -272 -259 -285 -339 -360 -311 -220 -138 -72 26 171 311 378 381 400 485 601 678 701 718 765 821 827 767 666 553 434 310 195 98 18 -67 -165 -275 -407 -567 -736 -877 -969 -1028 -1078 -1100 +0 0 -1 -1 -1 -2 -4 -5 -7 -10 -12 -13 -15 -16 -17 -17 -17 -17 -17 -16 -12 -5 3 14 27 41 56 69 85 101 115 125 136 152 167 175 174 174 174 166 145 117 93 69 38 1 -29 -51 -75 -109 -144 -169 -185 -201 -216 -222 -221 -220 -218 -201 -171 -149 -148 -155 -149 -125 -96 -67 -35 1 33 64 102 138 148 127 101 99 110 102 61 10 -30 -73 -145 -247 -351 -433 -491 -538 -575 -592 -582 -546 -490 -419 -334 -241 -143 -44 55 153 247 348 465 588 687 737 746 738 714 651 544 425 326 233 121 0 -85 -120 -142 -188 -244 -286 -318 -367 -425 -448 -430 -415 -428 -436 -381 -268 -146 -43 73 230 408 566 691 792 875 933 976 1027 1086 1114 1097 1063 1041 1014 918 730 500 296 143 6 -146 -295 -419 -521 -644 -810 -991 -1151 -1276 -1382 -1479 -1544 -1543 -1453 -1293 -1102 -923 -766 -610 -424 -212 -21 132 281 479 712 904 995 1005 1003 1005 963 833 648 472 334 206 59 -98 -248 -391 -540 -689 -823 -928 -1001 -1028 -1003 -938 -865 -779 -657 -486 -312 -181 -84 26 160 288 391 502 635 759 806 781 740 721 703 649 569 504 476 477 484 481 462 431 396 355 282 160 13 -107 -174 -208 -246 -302 -355 -387 -395 -383 -355 -309 -239 -141 -33 50 87 120 204 334 428 423 365 364 444 519 503 421 365 359 331 227 91 4 -34 -87 -179 -267 -311 -349 -430 -535 -598 -606 -616 -665 -708 -681 -581 -466 -376 -290 -187 -76 14 71 108 141 184 244 308 348 345 314 282 254 202 125 51 9 -9 -31 -57 -66 -58 -58 -87 -128 -148 -136 -108 -86 -70 -55 -40 -31 -31 -39 -49 -53 -48 -31 -9 11 28 43 59 70 70 59 43 32 34 42 48 48 44 43 41 33 17 -2 -14 -18 -17 -20 -25 -31 -32 -28 -26 -27 -31 -31 -25 -17 -10 -5 1 10 17 19 18 18 20 23 24 23 22 21 21 19 16 12 9 6 4 2 0 0 -1 -1 -2 -2 -2 -2 -2 -1 -1 -1 0 +-461 -571 -703 -880 -1072 -1239 -1368 -1476 -1573 -1636 -1628 -1528 -1355 -1151 -961 -795 -631 -437 -218 -21 136 288 489 725 918 1009 1017 1013 1014 970 838 651 474 335 207 60 -98 -248 -391 -540 -689 -823 -928 -1001 -1029 -1004 -940 -867 -782 -660 -489 -314 -182 -84 27 163 293 399 513 651 779 830 806 766 749 732 678 597 531 503 506 515 514 496 465 429 386 308 176 15 -118 -193 -232 -276 -341 -404 -443 -455 -444 -414 -363 -282 -167 -39 61 107 148 253 417 538 535 466 468 576 680 664 561 491 488 454 314 128 6 -48 -124 -260 -392 -461 -523 -652 -821 -929 -953 -980 -1071 -1155 -1124 -971 -790 -645 -505 -329 -135 26 131 202 269 355 477 612 702 707 654 599 547 444 279 117 23 -20 -72 -137 -163 -146 -149 -228 -344 -405 -379 -309 -250 -207 -166 -123 -97 -101 -130 -167 -186 -170 -112 -31 46 113 181 251 308 318 275 206 161 173 222 264 271 261 261 260 217 115 -11 -99 -130 -133 -157 -212 -269 -290 -272 -259 -285 -339 -360 -311 -220 -138 -72 26 171 311 378 381 400 485 601 678 701 718 765 821 827 767 666 553 434 310 195 98 18 -67 -165 -275 -407 -567 -736 -877 -969 -1028 -1078 -1100 -1064 -964 -852 -778 -748 -724 -676 -612 -550 -478 -375 -248 -133 -64 -25 31 124 221 281 308 352 441 539 583 568 535 522 531 535 513 480 457 459 477 478 437 361 285 231 201 188 200 247 307 342 340 326 321 316 288 241 198 170 147 110 59 2 -52 -91 -112 -137 -198 -303 -417 -496 -528 -546 -583 -632 -676 -714 -766 -833 -880 -870 -802 -708 -618 -536 -459 -386 -316 -235 -129 -9 100 187 264 351 442 523 579 602 590 555 534 554 605 646 659 662 677 679 621 492 335 206 119 65 36 35 49 49 21 -26 -72 -113 -164 -221 -255 -254 -231 -215 -218 -230 -238 -244 -252 -267 -278 -264 -215 -150 -98 -72 -61 -55 -62 -99 -151 -183 -169 -120 -69 -51 -82 -155 -231 -262 -240 -203 -181 -157 -88 29 145 207 221 219 231 +0 -1 -1 -1 -2 -3 -4 -6 -7 -10 -12 -13 -13 -13 -13 -12 -11 -9 -5 -1 3 8 15 24 33 39 43 46 50 51 47 39 30 22 14 4 -8 -21 -35 -51 -68 -85 -100 -113 -121 -123 -120 -116 -109 -95 -73 -49 -30 -14 4 29 54 75 100 132 162 179 179 175 176 177 168 152 139 135 139 145 149 147 141 133 123 100 58 5 -42 -69 -85 -103 -130 -157 -175 -184 -183 -174 -155 -123 -74 -18 27 49 69 121 203 266 269 238 243 303 364 360 309 274 276 260 182 75 3 -30 -76 -162 -247 -294 -337 -425 -541 -619 -642 -667 -737 -803 -790 -689 -567 -467 -369 -243 -101 19 99 154 207 276 374 483 559 567 529 488 449 367 232 98 19 -18 -62 -119 -142 -128 -131 -202 -306 -362 -341 -279 -227 -189 -153 -114 -90 -94 -122 -157 -175 -161 -107 -30 43 108 173 241 297 308 267 200 157 169 217 259 267 257 258 257 215 114 -11 -99 -130 -133 -157 -212 -269 -290 -272 -259 -285 -339 -360 -311 -220 -138 -72 25 170 309 375 377 396 479 593 668 689 704 748 801 805 745 645 534 418 297 186 93 17 -64 -156 -259 -382 -529 -684 -811 -892 -942 -983 -998 -960 -866 -761 -691 -661 -636 -590 -531 -474 -410 -320 -210 -112 -54 -21 25 101 178 225 245 278 345 419 449 434 405 392 395 394 374 347 327 325 334 332 300 245 191 153 132 122 128 157 192 212 208 197 191 186 167 138 112 95 81 59 31 1 -28 -47 -57 -68 -97 -146 -197 -231 -242 -246 -258 -274 -288 -299 -315 -336 -348 -337 -305 -264 -226 -192 -161 -132 -106 -77 -42 -3 30 55 76 99 122 140 151 153 146 134 125 126 134 139 137 134 133 129 114 87 57 34 19 10 5 5 6 6 2 -4 -9 -13 -18 -23 -25 -24 -21 -19 -18 -18 -18 -17 -17 -17 -16 -14 -11 -7 -5 -3 -3 -2 -2 -3 -4 -5 -4 -3 -2 -1 -2 -2 -3 -3 -2 -2 -1 -1 -1 0 0 0 0 0 0 +-250 -207 -166 -123 -97 -101 -130 -167 -186 -170 -112 -31 46 113 181 251 308 318 275 206 161 173 222 264 271 261 261 260 217 115 -11 -99 -130 -133 -157 -212 -269 -290 -272 -259 -285 -339 -360 -311 -220 -138 -72 26 171 311 378 381 400 485 601 678 701 718 765 821 827 767 666 553 434 310 195 98 18 -67 -165 -275 -407 -567 -736 -877 -969 -1028 -1078 -1100 -1064 -964 -852 -778 -748 -724 -676 -612 -550 -478 -375 -248 -133 -64 -25 31 124 221 281 308 352 441 539 583 568 535 522 531 535 513 480 457 459 477 478 437 361 285 231 201 188 200 247 307 342 340 326 321 316 288 241 198 170 147 110 59 2 -52 -91 -112 -137 -198 -303 -417 -496 -528 -546 -583 -632 -676 -714 -766 -833 -880 -870 -802 -708 -618 -536 -459 -386 -316 -235 -129 -9 100 187 264 351 442 523 579 602 590 555 534 554 605 646 659 662 677 679 621 492 335 206 119 65 36 35 49 49 21 -26 -72 -113 -164 -221 -255 -254 -231 -215 -218 -230 -238 -244 -252 -267 -278 -264 -215 -150 -98 -72 -61 -55 -62 -99 -151 -183 -169 -120 -69 -51 -82 -155 -231 -262 -240 -203 -181 -157 -88 29 145 207 221 219 231 261 303 352 403 450 498 541 558 529 465 411 395 412 425 420 397 364 315 234 109 -42 -178 -267 -320 -372 -438 -494 -518 -524 -550 -601 -652 -679 -688 -687 -666 -612 -535 -453 -376 -305 -251 -230 -237 -237 -195 -100 35 192 337 440 490 510 524 532 522 495 474 469 470 451 398 309 195 72 -46 -160 -271 -364 -410 -402 -369 -349 -360 -375 -369 -336 -296 -260 -215 -133 -2 147 267 332 348 336 290 208 124 98 149 228 277 284 275 263 220 126 14 -64 -77 -42 9 53 81 82 37 -67 -213 -354 -439 -443 -377 -268 -141 -28 49 84 92 88 81 80 111 198 325 429 461 423 356 290 218 123 24 -40 -46 -15 20 32 21 -2 -39 -107 -196 -259 -252 -183 -109 -72 -55 -20 32 61 43 -2 -23 5 68 142 214 +0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 1 2 3 5 5 5 4 4 4 6 8 9 10 11 11 10 6 -1 -6 -9 -9 -12 -17 -22 -25 -25 -25 -28 -35 -39 -35 -26 -17 -10 3 23 44 56 58 64 80 103 120 129 136 150 166 173 165 147 126 102 75 48 25 4 -19 -46 -78 -119 -169 -225 -274 -310 -337 -361 -376 -372 -344 -311 -290 -284 -281 -267 -247 -226 -200 -160 -108 -59 -29 -12 14 58 106 137 152 177 225 280 307 304 290 287 296 303 294 279 269 274 288 292 271 226 181 148 130 123 133 166 208 235 236 228 227 226 208 176 146 126 110 83 45 1 -41 -72 -89 -110 -160 -246 -341 -408 -437 -456 -490 -535 -576 -612 -661 -723 -768 -764 -709 -629 -552 -482 -415 -351 -288 -216 -119 -9 92 174 247 329 417 495 550 574 564 533 514 535 586 627 642 646 662 666 610 484 330 203 117 64 35 34 48 48 20 -26 -72 -113 -164 -221 -255 -254 -231 -215 -218 -230 -238 -244 -252 -266 -277 -263 -214 -149 -97 -72 -61 -55 -61 -97 -148 -179 -165 -117 -67 -50 -79 -149 -221 -250 -228 -192 -171 -147 -83 26 134 190 202 199 209 235 271 314 357 397 437 472 483 455 398 349 333 345 354 347 326 296 255 187 86 -34 -140 -208 -247 -285 -332 -372 -386 -387 -402 -435 -468 -482 -484 -478 -459 -417 -361 -302 -248 -199 -162 -147 -149 -148 -120 -61 20 113 196 252 277 285 288 288 279 261 246 239 236 223 194 148 92 33 -22 -72 -120 -158 -175 -169 -152 -141 -143 -146 -140 -125 -108 -93 -76 -46 -1 48 85 103 106 100 84 58 34 26 39 58 68 68 64 60 48 27 2 -13 -16 -8 1 9 13 13 5 -11 -32 -51 -61 -59 -49 -33 -17 -4 5 8 9 8 7 6 8 14 23 29 29 25 20 15 10 5 1 -2 -2 -1 0 0 0 -1 -1 -3 -4 -4 -4 -3 -2 -1 -1 -1 0 0 0 -1 -1 0 0 0 0 +-386 -316 -235 -129 -9 100 187 264 351 442 523 579 602 590 555 534 554 605 646 659 662 677 679 621 492 335 206 119 65 36 35 49 49 21 -26 -72 -113 -164 -221 -255 -254 -231 -215 -218 -230 -238 -244 -252 -267 -278 -264 -215 -150 -98 -72 -61 -55 -62 -99 -151 -183 -169 -120 -69 -51 -82 -155 -231 -262 -240 -203 -181 -157 -88 29 145 207 221 219 231 261 303 352 403 450 498 541 558 529 465 411 395 412 425 420 397 364 315 234 109 -42 -178 -267 -320 -372 -438 -494 -518 -524 -550 -601 -652 -679 -688 -687 -666 -612 -535 -453 -376 -305 -251 -230 -237 -237 -195 -100 35 192 337 440 490 510 524 532 522 495 474 469 470 451 398 309 195 72 -46 -160 -271 -364 -410 -402 -369 -349 -360 -375 -369 -336 -296 -260 -215 -133 -2 147 267 332 348 336 290 208 124 98 149 228 277 284 275 263 220 126 14 -64 -77 -42 9 53 81 82 37 -67 -213 -354 -439 -443 -377 -268 -141 -28 49 84 92 88 81 80 111 198 325 429 461 423 356 290 218 123 24 -40 -46 -15 20 32 21 -2 -39 -107 -196 -259 -252 -183 -109 -72 -55 -20 32 61 43 -2 -23 5 68 142 214 287 346 363 312 208 101 41 39 59 69 63 55 43 1 -93 -225 -365 -490 -589 -651 -667 -643 -602 -577 -593 -655 -747 -841 -893 -873 -773 -622 -461 -321 -210 -129 -73 -32 0 20 26 45 119 255 399 482 495 496 522 546 519 458 436 485 553 572 535 486 448 403 332 245 165 104 62 43 38 12 -58 -163 -263 -333 -371 -389 -377 -333 -275 -237 -226 -219 -198 -168 -137 -102 -41 56 190 343 468 517 490 459 488 555 578 527 477 500 570 599 563 522 517 495 396 260 171 149 140 127 159 233 269 228 207 266 234 -161 -900 -1531 -1573 -975 -106 658 1164 1304 876 -170 -1404 -2104 -1848 -854 312 1212 1655 1508 693 -584 -1773 -2326 -2086 -1315 -397 343 627 269 -639 -1639 -2164 -1945 -1136 -128 703 1084 875 169 -664 -1135 -962 -226 +0 -1 -1 -1 -1 0 0 0 1 2 3 4 5 6 7 7 9 11 13 15 16 19 21 20 18 13 8 5 3 1 1 2 3 1 -2 -6 -10 -14 -20 -24 -25 -24 -24 -25 -27 -30 -32 -34 -37 -40 -40 -34 -25 -17 -13 -11 -11 -12 -20 -31 -39 -37 -27 -16 -13 -20 -39 -60 -69 -65 -57 -52 -46 -27 8 45 66 72 73 78 91 108 128 149 170 192 213 224 216 194 174 171 181 190 191 184 171 151 114 54 -22 -92 -139 -169 -200 -238 -273 -290 -297 -316 -350 -385 -406 -417 -421 -414 -385 -341 -292 -245 -201 -168 -155 -162 -163 -136 -71 24 137 243 321 361 379 393 403 399 381 368 367 371 359 319 250 159 59 -39 -134 -228 -308 -349 -345 -319 -303 -315 -330 -326 -299 -265 -234 -194 -121 -2 134 245 306 323 313 271 195 117 92 141 217 265 272 264 254 213 122 13 -63 -76 -42 8 52 79 81 36 -67 -212 -353 -438 -442 -376 -268 -141 -28 48 83 92 88 80 79 110 197 324 427 459 421 354 288 216 121 23 -40 -46 -15 19 31 20 -2 -38 -104 -190 -250 -243 -176 -104 -69 -53 -19 30 57 40 -2 -22 4 62 129 194 258 310 324 276 183 88 35 33 50 59 53 46 36 0 -77 -185 -298 -397 -474 -519 -528 -505 -469 -446 -454 -497 -562 -627 -659 -638 -560 -446 -327 -226 -146 -89 -50 -22 0 13 16 28 75 160 247 295 299 296 307 317 298 259 243 267 300 306 282 252 229 203 164 119 79 49 28 19 17 5 -26 -70 -110 -137 -150 -154 -146 -127 -103 -87 -81 -77 -68 -57 -45 -33 -13 17 56 99 132 143 132 120 124 138 139 124 109 111 123 125 114 102 98 91 70 44 28 23 21 18 22 32 35 29 25 31 26 -18 -93 -150 -147 -87 -9 52 88 93 59 -11 -85 -119 -98 -43 14 51 65 55 23 -19 -51 -60 -49 -28 -8 5 9 3 -8 -16 -18 -14 -7 -1 2 2 1 0 -1 -1 -1 0 +-133 -2 147 267 332 348 336 290 208 124 98 149 228 277 284 275 263 220 126 14 -64 -77 -42 9 53 81 82 37 -67 -213 -354 -439 -443 -377 -268 -141 -28 49 84 92 88 81 80 111 198 325 429 461 423 356 290 218 123 24 -40 -46 -15 20 32 21 -2 -39 -107 -196 -259 -252 -183 -109 -72 -55 -20 32 61 43 -2 -23 5 68 142 214 287 346 363 312 208 101 41 39 59 69 63 55 43 1 -93 -225 -365 -490 -589 -651 -667 -643 -602 -577 -593 -655 -747 -841 -893 -873 -773 -622 -461 -321 -210 -129 -73 -32 0 20 26 45 119 255 399 482 495 496 522 546 519 458 436 485 553 572 535 486 448 403 332 245 165 104 62 43 38 12 -58 -163 -263 -333 -371 -389 -377 -333 -275 -237 -226 -219 -198 -168 -137 -102 -41 56 190 343 468 517 490 459 488 555 578 527 477 500 570 599 563 522 517 495 396 260 171 149 140 127 159 233 269 228 207 266 234 -161 -900 -1531 -1573 -975 -106 658 1164 1304 876 -170 -1404 -2104 -1848 -854 312 1212 1655 1508 693 -584 -1773 -2326 -2086 -1315 -397 343 627 269 -639 -1639 -2164 -1945 -1136 -128 703 1084 875 169 -664 -1135 -962 -226 717 1474 1786 1584 986 265 -249 -343 -14 546 1064 1305 1162 696 100 -365 -496 -258 178 527 575 304 -147 -599 -899 -932 -671 -247 101 189 34 -219 -423 -491 -372 -72 292 534 543 348 69 -164 -246 -138 104 366 555 614 493 204 -79 -102 178 483 530 341 111 -153 -575 -1002 -952 -227 695 1102 831 276 -264 -845 -1383 -1344 -328 1221 2285 2276 1463 404 -651 -1587 -2011 -1481 -160 1112 1548 1060 114 -864 -1677 -2125 -1938 -1077 78 977 1344 1258 881 301 -325 -667 -454 170 715 812 556 294 142 -91 -547 -1017 -1178 -998 -773 -757 -901 -1023 -1091 -1210 -1328 -1143 -390 769 1734 1923 1323 526 156 286 540 738 1150 1893 2348 1694 53 -1363 -1576 -940 -483 -454 -217 379 365 -1049 -3031 -3803 -2702 -886 200 389 408 476 113 -759 -1304 -725 698 1914 2276 2035 1740 1572 +0 -1 0 0 0 0 0 0 0 0 0 1 2 3 3 4 4 4 2 0 -2 -3 -2 0 1 3 3 1 -4 -12 -20 -27 -29 -26 -20 -11 -3 4 7 8 8 8 8 12 23 39 54 61 58 51 43 33 19 3 -7 -9 -3 3 6 4 -1 -9 -24 -45 -61 -62 -46 -28 -19 -15 -6 9 17 12 -1 -8 1 22 47 73 100 123 132 116 78 39 16 15 24 28 26 23 18 0 -43 -105 -173 -236 -288 -323 -337 -330 -313 -305 -318 -356 -412 -470 -507 -502 -450 -367 -276 -195 -129 -81 -46 -21 0 13 17 29 80 173 274 335 347 351 374 394 379 337 324 364 419 437 412 378 351 318 264 196 133 84 50 35 31 10 -50 -139 -226 -287 -322 -340 -331 -294 -245 -212 -203 -198 -180 -154 -126 -94 -38 51 177 321 439 487 464 436 465 531 555 507 460 484 553 583 549 511 507 486 390 256 169 147 138 126 158 231 268 227 206 265 233 -161 -900 -1531 -1573 -975 -106 657 1162 1301 873 -170 -1398 -2093 -1836 -848 309 1198 1633 1485 681 -574 -1736 -2273 -2033 -1278 -385 331 604 258 -612 -1564 -2058 -1843 -1072 -121 658 1010 812 156 -611 -1040 -877 -205 646 1322 1594 1406 870 232 -218 -298 -13 467 905 1103 975 580 82 -300 -405 -209 142 420 454 238 -115 -462 -688 -707 -505 -184 74 138 24 -157 -301 -345 -259 -50 198 359 361 229 44 -106 -157 -87 64 224 335 366 290 118 -46 -58 99 266 287 182 58 -80 -295 -505 -473 -111 333 520 386 126 -119 -373 -600 -573 -138 500 919 898 566 153 -243 -579 -718 -518 -55 371 506 338 35 -264 -500 -618 -550 -298 21 256 343 312 213 70 -75 -149 -98 35 145 159 105 54 25 -16 -92 -164 -183 -149 -112 -105 -120 -131 -134 -142 -150 -123 -40 75 161 170 111 41 11 20 36 47 69 106 123 83 2 -59 -63 -35 -17 -15 -7 9 8 -23 -57 -64 -40 -12 2 3 3 3 0 -4 -5 -2 1 2 1 0 0 0 +-198 -168 -137 -102 -41 56 190 343 468 517 490 459 488 555 578 527 477 500 570 599 563 522 517 495 396 260 171 149 140 127 159 233 269 228 207 266 234 -161 -900 -1531 -1573 -975 -106 658 1164 1304 876 -170 -1404 -2104 -1848 -854 312 1212 1655 1508 693 -584 -1773 -2326 -2086 -1315 -397 343 627 269 -639 -1639 -2164 -1945 -1136 -128 703 1084 875 169 -664 -1135 -962 -226 717 1474 1786 1584 986 265 -249 -343 -14 546 1064 1305 1162 696 100 -365 -496 -258 178 527 575 304 -147 -599 -899 -932 -671 -247 101 189 34 -219 -423 -491 -372 -72 292 534 543 348 69 -164 -246 -138 104 366 555 614 493 204 -79 -102 178 483 530 341 111 -153 -575 -1002 -952 -227 695 1102 831 276 -264 -845 -1383 -1344 -328 1221 2285 2276 1463 404 -651 -1587 -2011 -1481 -160 1112 1548 1060 114 -864 -1677 -2125 -1938 -1077 78 977 1344 1258 881 301 -325 -667 -454 170 715 812 556 294 142 -91 -547 -1017 -1178 -998 -773 -757 -901 -1023 -1091 -1210 -1328 -1143 -390 769 1734 1923 1323 526 156 286 540 738 1150 1893 2348 1694 53 -1363 -1576 -940 -483 -454 -217 379 365 -1049 -3031 -3803 -2702 -886 200 389 408 476 113 -759 -1304 -725 698 1914 2276 2035 1740 1572 1386 1108 843 688 649 693 765 707 345 -293 -893 -1140 -1056 -945 -981 -979 -707 -331 -260 -552 -726 -393 141 236 -161 -402 -19 625 889 763 815 1305 1813 1839 1404 857 378 -55 -377 -443 -257 23 250 270 -131 -938 -1641 -1645 -1017 -477 -466 -638 -499 -185 -142 -305 -189 236 377 -79 -604 -522 105 683 939 1050 1088 846 304 -193 -385 -398 -393 -250 132 469 389 -59 -500 -758 -945 -1046 -850 -357 70 172 140 283 546 666 635 676 822 835 607 375 390 576 661 532 314 171 153 182 95 -212 -626 -897 -952 -1016 -1257 -1469 -1352 -1022 -940 -1238 -1477 -1240 -734 -489 -622 -727 -486 -61 239 405 640 986 1260 1319 1209 1057 955 972 1111 1238 1160 868 602 552 573 343 -168 -550 -408 146 555 431 -85 -585 -884 -1067 -1148 -961 -497 -127 -259 -830 -1358 -1485 +0 -1 -1 -1 -1 0 0 1 2 2 3 3 4 6 7 7 7 9 11 13 14 14 16 16 14 10 7 6 6 6 8 13 17 15 14 20 18 -14 -80 -143 -154 -100 -12 73 136 159 111 -23 -195 -303 -276 -133 50 201 285 268 127 -112 -349 -472 -437 -284 -89 78 147 65 -159 -419 -568 -525 -315 -37 204 322 266 52 -213 -372 -322 -78 250 525 650 588 374 102 -99 -139 -6 228 453 565 512 312 45 -170 -235 -124 86 261 289 155 -77 -317 -482 -507 -370 -139 57 108 19 -130 -253 -298 -228 -45 183 339 349 226 45 -110 -166 -94 71 254 389 435 353 147 -58 -76 132 362 401 260 85 -120 -451 -793 -759 -183 562 898 682 228 -221 -710 -1169 -1144 -281 1052 1981 1985 1284 356 -578 -1417 -1805 -1337 -146 1013 1417 975 105 -803 -1564 -1990 -1822 -1017 73 928 1282 1204 846 289 -315 -647 -442 165 698 794 545 289 139 -90 -541 -1008 -1169 -992 -769 -754 -898 -1021 -1089 -1209 -1328 -1143 -390 769 1734 1922 1322 525 155 285 538 735 1144 1882 2331 1680 52 -1348 -1556 -927 -475 -446 -213 370 355 -1020 -2938 -3675 -2604 -851 191 371 387 450 106 -714 -1221 -676 648 1769 2094 1863 1585 1425 1250 994 752 610 572 608 667 613 297 -251 -760 -964 -887 -788 -812 -805 -577 -268 -209 -441 -575 -309 109 182 -124 -305 -15 465 655 557 589 935 1285 1291 975 589 257 -38 -251 -292 -168 14 158 169 -82 -575 -994 -984 -600 -278 -268 -362 -279 -102 -78 -164 -100 122 192 -40 -300 -255 50 322 436 479 488 373 131 -83 -161 -164 -159 -99 51 177 144 -22 -179 -265 -323 -350 -278 -115 21 52 41 82 154 184 171 177 210 207 147 88 89 127 142 111 63 33 29 33 16 -37 -105 -145 -148 -152 -181 -203 -180 -131 -115 -146 -166 -134 -76 -48 -59 -65 -41 -5 18 28 43 62 75 74 63 52 43 41 43 45 39 26 17 14 13 7 -4 -10 -6 1 6 4 -1 -4 -5 -5 -4 -3 -1 -1 -1 -1 -1 0 +-160 1112 1548 1060 114 -864 -1677 -2125 -1938 -1077 78 977 1344 1258 881 301 -325 -667 -454 170 715 812 556 294 142 -91 -547 -1017 -1178 -998 -773 -757 -901 -1023 -1091 -1210 -1328 -1143 -390 769 1734 1923 1323 526 156 286 540 738 1150 1893 2348 1694 53 -1363 -1576 -940 -483 -454 -217 379 365 -1049 -3031 -3803 -2702 -886 200 389 408 476 113 -759 -1304 -725 698 1914 2276 2035 1740 1572 1386 1108 843 688 649 693 765 707 345 -293 -893 -1140 -1056 -945 -981 -979 -707 -331 -260 -552 -726 -393 141 236 -161 -402 -19 625 889 763 815 1305 1813 1839 1404 857 378 -55 -377 -443 -257 23 250 270 -131 -938 -1641 -1645 -1017 -477 -466 -638 -499 -185 -142 -305 -189 236 377 -79 -604 -522 105 683 939 1050 1088 846 304 -193 -385 -398 -393 -250 132 469 389 -59 -500 -758 -945 -1046 -850 -357 70 172 140 283 546 666 635 676 822 835 607 375 390 576 661 532 314 171 153 182 95 -212 -626 -897 -952 -1016 -1257 -1469 -1352 -1022 -940 -1238 -1477 -1240 -734 -489 -622 -727 -486 -61 239 405 640 986 1260 1319 1209 1057 955 972 1111 1238 1160 868 602 552 573 343 -168 -550 -408 146 555 431 -85 -585 -884 -1067 -1148 -961 -497 -127 -259 -830 -1358 -1485 -1283 -994 -709 -417 -191 -122 -129 -42 165 379 568 802 1041 1111 987 893 991 1112 1053 965 1158 1528 1553 997 335 173 455 623 491 505 944 1296 845 -277 -1145 -1204 -917 -992 -1353 -1341 -789 -322 -483 -973 -1107 -751 -405 -468 -759 -828 -467 178 763 928 535 -143 -614 -647 -419 -86 457 1144 1363 559 -828 -1600 -1167 -268 3 -460 -867 -764 -532 -634 -904 -931 -772 -831 -1164 -1316 -937 -253 210 202 -79 -284 -202 135 527 726 622 354 205 356 693 906 785 451 226 260 381 395 404 629 943 904 381 -90 89 763 1180 962 556 530 808 909 739 708 1046 1412 1373 993 685 634 640 491 262 173 338 688 1026 1107 801 277 -78 -52 167 246 136 55 88 31 -252 -535 -509 -276 -312 -844 -1501 -1731 -1478 -1219 -1372 -1852 -2297 -2550 -2742 -2977 -3143 -3121 +0 0 0 0 0 -2 -5 -8 -9 -7 0 7 12 14 11 4 -6 -13 -10 3 18 22 17 9 5 -4 -24 -47 -59 -53 -44 -46 -58 -70 -79 -92 -107 -97 -35 71 169 197 142 59 18 34 68 98 158 272 350 262 8 -227 -272 -168 -90 -87 -43 76 76 -227 -674 -870 -636 -215 49 99 107 128 31 -216 -380 -216 212 597 727 665 581 537 483 395 307 255 246 268 302 284 141 -123 -381 -495 -466 -425 -449 -455 -334 -160 -127 -274 -366 -202 73 124 -87 -219 -11 349 503 438 474 769 1083 1113 860 531 237 -35 -243 -289 -170 15 168 183 -91 -652 -1153 -1167 -729 -346 -341 -471 -372 -139 -108 -234 -146 183 295 -63 -482 -420 85 557 771 869 906 710 256 -165 -330 -344 -341 -219 115 414 345 -53 -449 -684 -857 -954 -779 -329 64 159 130 264 513 628 601 642 784 799 582 361 376 558 642 518 306 167 150 178 93 -210 -619 -889 -945 -1010 -1250 -1463 -1348 -1020 -939 -1237 -1476 -1240 -734 -489 -622 -727 -486 -61 238 404 638 982 1254 1311 1200 1048 945 961 1096 1219 1140 851 589 539 558 333 -163 -532 -394 140 531 411 -81 -555 -835 -1003 -1075 -897 -462 -118 -239 -761 -1238 -1347 -1158 -893 -633 -371 -169 -108 -113 -37 142 324 483 677 873 926 816 733 808 900 845 769 915 1198 1207 768 256 131 341 463 362 368 682 928 599 -195 -796 -829 -624 -668 -901 -883 -514 -208 -308 -612 -688 -461 -246 -280 -448 -482 -269 100 426 511 290 -77 -324 -337 -215 -44 226 558 654 264 -385 -731 -524 -119 1 -196 -363 -314 -215 -251 -351 -354 -288 -303 -416 -460 -321 -85 68 64 -25 -87 -61 39 149 200 167 92 52 88 167 213 179 100 48 54 77 77 76 115 168 155 63 -15 13 113 169 132 73 67 98 106 82 76 107 138 128 88 57 50 48 35 17 11 20 38 54 54 36 11 -4 -2 5 7 3 1 2 0 -5 -9 -8 -4 -4 -9 -13 -12 -9 -6 -5 -5 -5 -4 -3 -2 -1 0 +-945 -1046 -850 -357 70 172 140 283 546 666 635 676 822 835 607 375 390 576 661 532 314 171 153 182 95 -212 -626 -897 -952 -1016 -1257 -1469 -1352 -1022 -940 -1238 -1477 -1240 -734 -489 -622 -727 -486 -61 239 405 640 986 1260 1319 1209 1057 955 972 1111 1238 1160 868 602 552 573 343 -168 -550 -408 146 555 431 -85 -585 -884 -1067 -1148 -961 -497 -127 -259 -830 -1358 -1485 -1283 -994 -709 -417 -191 -122 -129 -42 165 379 568 802 1041 1111 987 893 991 1112 1053 965 1158 1528 1553 997 335 173 455 623 491 505 944 1296 845 -277 -1145 -1204 -917 -992 -1353 -1341 -789 -322 -483 -973 -1107 -751 -405 -468 -759 -828 -467 178 763 928 535 -143 -614 -647 -419 -86 457 1144 1363 559 -828 -1600 -1167 -268 3 -460 -867 -764 -532 -634 -904 -931 -772 -831 -1164 -1316 -937 -253 210 202 -79 -284 -202 135 527 726 622 354 205 356 693 906 785 451 226 260 381 395 404 629 943 904 381 -90 89 763 1180 962 556 530 808 909 739 708 1046 1412 1373 993 685 634 640 491 262 173 338 688 1026 1107 801 277 -78 -52 167 246 136 55 88 31 -252 -535 -509 -276 -312 -844 -1501 -1731 -1478 -1219 -1372 -1852 -2297 -2550 -2742 -2977 -3143 -3121 -3041 -3184 -3666 -4262 -4595 -4449 -3942 -3403 -3104 -3056 -3049 -2845 -2347 -1572 -592 462 1391 2047 2501 3015 3756 4585 5191 5436 5492 5624 5896 6145 6187 6002 5705 5398 5108 4811 4488 4136 3740 3284 2776 2289 1943 1805 1759 1526 896 -6 -783 -1124 -1100 -1041 -1181 -1499 -1846 -2116 -2246 -2191 -1988 -1763 -1635 -1614 -1658 -1740 -1791 -1691 -1453 -1360 -1689 -2316 -2784 -2897 -3049 -3801 -5196 -6628 -7371 -7157 -6314 -5539 -5513 -6480 -8002 -9152 -9120 -7710 -5406 -3049 -1443 -962 -1281 -1481 -637 1471 4153 6314 7269 7220 7021 7435 8553 9832 10655 10768 10255 9335 8316 7553 7136 6627 5402 3361 1179 -264 -646 -383 -263 -969 -2718 -5028 -6918 -7557 -6853 -5362 -3781 -2664 -2404 -3010 -3809 -3802 -2599 -823 605 1424 2005 2616 3157 3571 4041 4636 5091 5151 4913 4583 4139 3486 2813 2449 2406 2377 2157 1787 1252 365 -809 -1688 -1637 -694 377 747 153 -1061 +0 -1 -1 -1 0 0 0 0 2 3 4 5 7 9 7 5 6 10 13 12 8 4 4 6 3 -9 -27 -42 -47 -54 -71 -89 -87 -70 -68 -94 -118 -105 -66 -46 -61 -75 -53 -7 28 49 81 130 174 189 180 163 153 161 191 220 213 165 118 111 119 74 -38 -126 -97 35 138 110 -23 -158 -245 -303 -334 -287 -152 -40 -83 -272 -454 -508 -448 -355 -259 -156 -73 -48 -51 -17 67 158 241 347 459 498 450 414 468 534 514 478 583 781 806 526 179 93 250 348 278 289 549 764 505 -168 -702 -748 -577 -631 -871 -873 -520 -215 -326 -663 -762 -522 -285 -332 -544 -599 -342 131 567 697 405 -110 -474 -504 -329 -69 364 918 1103 455 -681 -1325 -973 -225 2 -392 -743 -659 -462 -554 -794 -822 -686 -742 -1045 -1188 -850 -231 192 185 -74 -264 -189 126 495 685 589 336 195 340 665 872 758 437 219 253 372 386 396 618 929 892 376 -90 88 757 1173 957 554 528 806 907 738 707 1045 1412 1373 992 684 633 639 490 261 172 336 684 1018 1098 793 273 -77 -52 164 241 133 53 85 30 -245 -517 -491 -266 -299 -806 -1427 -1640 -1395 -1146 -1285 -1727 -2133 -2358 -2524 -2727 -2865 -2830 -2744 -2858 -3273 -3784 -4057 -3905 -3440 -2952 -2676 -2617 -2595 -2405 -1970 -1311 -490 379 1134 1657 2008 2403 2970 3595 4037 4192 4199 4261 4429 4574 4563 4384 4126 3867 3622 3378 3119 2844 2544 2210 1848 1506 1264 1161 1118 958 556 -4 -474 -672 -649 -606 -679 -850 -1032 -1166 -1221 -1174 -1049 -916 -837 -814 -823 -850 -861 -799 -676 -622 -759 -1022 -1207 -1234 -1275 -1560 -2091 -2617 -2855 -2716 -2348 -2018 -1967 -2263 -2736 -3059 -2982 -2464 -1689 -930 -430 -280 -364 -410 -172 386 1061 1570 1760 1699 1606 1651 1845 2057 2161 2118 1952 1720 1482 1301 1188 1064 836 501 169 -37 -86 -49 -33 -114 -306 -541 -710 -740 -640 -476 -319 -213 -182 -216 -258 -243 -157 -47 31 70 92 111 124 130 136 143 144 132 113 96 77 57 41 31 27 22 17 12 7 1 -3 -5 -4 -1 0 0 0 0 +-937 -253 210 202 -79 -284 -202 135 527 726 622 354 205 356 693 906 785 451 226 260 381 395 404 629 943 904 381 -90 89 763 1180 962 556 530 808 909 739 708 1046 1412 1373 993 685 634 640 491 262 173 338 688 1026 1107 801 277 -78 -52 167 246 136 55 88 31 -252 -535 -509 -276 -312 -844 -1501 -1731 -1478 -1219 -1372 -1852 -2297 -2550 -2742 -2977 -3143 -3121 -3041 -3184 -3666 -4262 -4595 -4449 -3942 -3403 -3104 -3056 -3049 -2845 -2347 -1572 -592 462 1391 2047 2501 3015 3756 4585 5191 5436 5492 5624 5896 6145 6187 6002 5705 5398 5108 4811 4488 4136 3740 3284 2776 2289 1943 1805 1759 1526 896 -6 -783 -1124 -1100 -1041 -1181 -1499 -1846 -2116 -2246 -2191 -1988 -1763 -1635 -1614 -1658 -1740 -1791 -1691 -1453 -1360 -1689 -2316 -2784 -2897 -3049 -3801 -5196 -6628 -7371 -7157 -6314 -5539 -5513 -6480 -8002 -9152 -9120 -7710 -5406 -3049 -1443 -962 -1281 -1481 -637 1471 4153 6314 7269 7220 7021 7435 8553 9832 10655 10768 10255 9335 8316 7553 7136 6627 5402 3361 1179 -264 -646 -383 -263 -969 -2718 -5028 -6918 -7557 -6853 -5362 -3781 -2664 -2404 -3010 -3809 -3802 -2599 -823 605 1424 2005 2616 3157 3571 4041 4636 5091 5151 4913 4583 4139 3486 2813 2449 2406 2377 2157 1787 1252 365 -809 -1688 -1637 -694 377 747 153 -1061 -2378 -3567 -4795 -6287 -7837 -8771 -8547 -7413 -6393 -6557 -8286 -11010 -13449 -14215 -12600 -9121 -5300 -2739 -2193 -3196 -4314 -3925 -1294 2703 6142 7627 7415 6915 7257 8527 10115 11391 11990 11841 11158 10255 9302 8308 7309 6338 5257 3952 2699 1910 1332 10 -2501 -5226 -6659 -6501 -5985 -6475 -8141 -10002 -10734 -9436 -6271 -2792 -1110 -2098 -4511 -6115 -5498 -2650 1557 5704 8014 7438 4951 3052 3449 5517 7325 7662 6720 5325 4247 3990 4504 4972 4484 3090 1752 1182 1155 1140 1022 939 832 666 770 1408 2135 2120 1148 -44 -539 -48 943 1416 181 -3294 -7917 -11197 -11136 -8135 -4873 -4196 -7060 -12089 -16635 -18361 -16475 -12078 -7417 -4523 -4150 -5567 -7060 -6835 -4119 183 4073 6079 6454 6654 7751 9528 11029 11684 11752 11853 12280 12749 12651 11564 9581 7229 5125 3678 2974 2689 2080 318 -2789 -6309 -8655 -8770 -7038 -5058 -4458 -5761 -8138 -10106 -10500 -8964 -5928 -2366 458 1508 +0 -1 0 0 -1 -1 -1 0 2 4 4 2 1 3 8 13 13 8 4 6 9 11 12 21 34 35 16 -5 4 40 66 57 35 35 57 68 58 59 92 131 134 101 73 71 75 60 33 22 46 98 153 171 128 46 -14 -10 30 46 26 11 18 6 -56 -123 -120 -67 -78 -216 -394 -467 -409 -346 -399 -552 -701 -797 -877 -974 -1051 -1067 -1062 -1136 -1336 -1585 -1744 -1723 -1557 -1370 -1274 -1278 -1299 -1233 -1036 -706 -271 214 657 983 1221 1495 1892 2346 2696 2867 2940 3055 3248 3434 3505 3446 3320 3183 3052 2911 2750 2566 2349 2087 1785 1489 1278 1201 1183 1038 616 -5 -550 -798 -789 -754 -863 -1106 -1375 -1590 -1703 -1676 -1534 -1372 -1283 -1277 -1322 -1398 -1450 -1380 -1195 -1126 -1408 -1944 -2354 -2465 -2611 -3276 -4506 -5784 -6470 -6319 -5605 -4944 -4948 -5846 -7256 -8341 -8352 -7095 -4997 -2831 -1346 -901 -1205 -1398 -604 1398 3962 6044 6981 6955 6784 7204 8310 9579 10407 10541 10062 9180 8194 7455 7055 6563 5358 3338 1172 -263 -644 -382 -263 -968 -2717 -5026 -6917 -7557 -6853 -5361 -3780 -2663 -2402 -3005 -3799 -3790 -2588 -819 600 1412 1985 2586 3116 3518 3973 4548 4984 5031 4787 4453 4010 3368 2709 2352 2303 2267 2050 1692 1181 343 -758 -1574 -1520 -642 346 684 139 -963 -2146 -3202 -4280 -5581 -6919 -7699 -7458 -6429 -5510 -5615 -7050 -9306 -11289 -11849 -10429 -7496 -4324 -2218 -1762 -2548 -3412 -3078 -1007 2084 4696 5779 5570 5147 5352 6228 7317 8162 8503 8314 7755 7052 6329 5592 4866 4171 3421 2542 1715 1199 826 6 -1514 -3124 -3928 -3784 -3437 -3670 -4550 -5512 -5831 -5053 -3309 -1451 -569 -1058 -2238 -2986 -2642 -1252 723 2605 3598 3281 2145 1299 1441 2262 2947 3024 2602 2020 1579 1453 1606 1735 1532 1032 572 377 360 347 304 273 235 184 207 369 545 527 278 -11 -124 -11 203 296 36 -649 -1508 -2064 -1985 -1403 -812 -675 -1093 -1804 -2393 -2538 -2189 -1540 -908 -531 -467 -599 -724 -670 -385 16 343 485 488 475 524 607 662 658 619 584 563 544 500 423 322 224 145 94 68 56 39 5 -41 -82 -98 -84 -57 -35 -26 -26 -28 -28 -21 -11 -5 -2 0 0 +-8002 -9152 -9120 -7710 -5406 -3049 -1443 -962 -1281 -1481 -637 1471 4153 6314 7269 7220 7021 7435 8553 9832 10655 10768 10255 9335 8316 7553 7136 6627 5402 3361 1179 -264 -646 -383 -263 -969 -2718 -5028 -6918 -7557 -6853 -5362 -3781 -2664 -2404 -3010 -3809 -3802 -2599 -823 605 1424 2005 2616 3157 3571 4041 4636 5091 5151 4913 4583 4139 3486 2813 2449 2406 2377 2157 1787 1252 365 -809 -1688 -1637 -694 377 747 153 -1061 -2378 -3567 -4795 -6287 -7837 -8771 -8547 -7413 -6393 -6557 -8286 -11010 -13449 -14215 -12600 -9121 -5300 -2739 -2193 -3196 -4314 -3925 -1294 2703 6142 7627 7415 6915 7257 8527 10115 11391 11990 11841 11158 10255 9302 8308 7309 6338 5257 3952 2699 1910 1332 10 -2501 -5226 -6659 -6501 -5985 -6475 -8141 -10002 -10734 -9436 -6271 -2792 -1110 -2098 -4511 -6115 -5498 -2650 1557 5704 8014 7438 4951 3052 3449 5517 7325 7662 6720 5325 4247 3990 4504 4972 4484 3090 1752 1182 1155 1140 1022 939 832 666 770 1408 2135 2120 1148 -44 -539 -48 943 1416 181 -3294 -7917 -11197 -11136 -8135 -4873 -4196 -7060 -12089 -16635 -18361 -16475 -12078 -7417 -4523 -4150 -5567 -7060 -6835 -4119 183 4073 6079 6454 6654 7751 9528 11029 11684 11752 11853 12280 12749 12651 11564 9581 7229 5125 3678 2974 2689 2080 318 -2789 -6309 -8655 -8770 -7038 -5058 -4458 -5761 -8138 -10106 -10500 -8964 -5928 -2366 458 1508 679 -933 -1727 -748 1743 4691 7028 8121 7886 6762 5491 4658 4362 4363 4440 4447 4166 3391 2226 1111 415 167 199 378 573 563 192 -401 -829 -707 138 1550 2944 3509 2851 1466 384 260 969 1977 2725 2530 613 -3208 -7805 -11112 -11563 -9323 -6310 -5045 -7117 -12066 -17495 -20536 -19701 -15650 -10444 -6236 -4369 -4920 -6573 -7157 -5094 -676 4193 7654 9297 10029 10897 12175 13391 14020 14092 14192 14771 15494 15405 13856 11113 8003 5233 3225 2265 2225 2138 648 -2704 -6777 -9681 -10283 -8878 -6733 -5280 -5447 -7138 -9181 -10089 -9168 -6841 -4018 -1457 247 657 -48 -807 -437 1265 3440 5178 6202 6584 6308 5457 4519 4024 3966 3914 3640 3283 2895 2242 1206 87 -686 -939 -709 -90 633 885 275 -849 -1587 -1274 42 1801 3268 3733 2900 1360 358 731 2112 3407 3857 3352 1916 -585 -4006 -7520 -9936 -10454 -9208 -7350 -6612 -8326 -12365 -16983 -19905 -19746 -16577 +0 -3 -5 -6 -7 -6 -4 -4 -6 -9 -5 11 39 70 94 105 116 139 179 228 273 304 317 314 304 298 304 304 266 177 66 -16 -42 -26 -19 -74 -217 -424 -614 -705 -671 -550 -407 -300 -282 -369 -486 -505 -360 -119 90 220 322 435 544 636 744 882 1001 1045 1027 989 919 797 662 593 598 607 566 481 346 103 -236 -503 -500 -217 120 244 51 -363 -831 -1273 -1747 -2338 -2974 -3397 -3375 -2983 -2623 -2741 -3529 -4772 -5934 -6383 -5756 -4238 -2504 -1317 -1071 -1586 -2174 -2009 -673 1426 3288 4143 4085 3864 4112 4896 5887 6718 7165 7166 6837 6364 5843 5281 4701 4125 3460 2631 1816 1299 916 6 -1757 -3707 -4772 -4703 -4372 -4776 -6061 -7514 -8135 -7216 -4837 -2172 -871 -1660 -3596 -4912 -4452 -2162 1279 4720 6679 6243 4184 2596 2953 4754 6352 6685 5898 4700 3770 3561 4042 4485 4065 2816 1604 1087 1067 1058 952 879 782 628 729 1338 2037 2029 1102 -43 -521 -47 916 1379 176 -3225 -7769 -11012 -10973 -8030 -4819 -4157 -7004 -12007 -16542 -18281 -16419 -12046 -7403 -4518 -4147 -5565 -7059 -6835 -4119 182 4071 6074 6446 6641 7730 9495 10980 11618 11671 11757 12163 12605 12487 11394 9421 7093 5017 3592 2897 2612 2015 307 -2687 -6060 -8286 -8368 -6691 -4792 -4207 -5416 -7620 -9423 -9749 -8286 -5455 -2167 417 1367 612 -838 -1542 -664 1538 4117 6132 7042 6796 5789 4671 3937 3661 3636 3674 3654 3398 2745 1787 885 328 130 154 291 438 426 144 -299 -612 -517 99 1110 2087 2463 1981 1008 261 175 645 1301 1773 1627 389 -2016 -4844 -6810 -6999 -5572 -3722 -2937 -4087 -6838 -9777 -11316 -10702 -8380 -5511 -3240 -2236 -2480 -3261 -3495 -2448 -320 1948 3496 4174 4424 4722 5183 5597 5750 5669 5602 5719 5878 5727 5047 3963 2794 1788 1077 740 711 667 197 -806 -1971 -2745 -2845 -2393 -1768 -1350 -1356 -1729 -2161 -2308 -2037 -1477 -841 -296 48 125 -9 -144 -76 210 552 801 925 946 871 724 575 492 464 439 391 336 283 209 106 7 -55 -72 -51 -7 40 53 15 -45 -79 -59 1 71 119 125 89 38 9 16 44 64 64 49 24 -7 -39 -61 -68 -59 -41 -26 -18 -17 -16 -13 -10 -5 0 +4484 3090 1752 1182 1155 1140 1022 939 832 666 770 1408 2135 2120 1148 -44 -539 -48 943 1416 181 -3294 -7917 -11197 -11136 -8135 -4873 -4196 -7060 -12089 -16635 -18361 -16475 -12078 -7417 -4523 -4150 -5567 -7060 -6835 -4119 183 4073 6079 6454 6654 7751 9528 11029 11684 11752 11853 12280 12749 12651 11564 9581 7229 5125 3678 2974 2689 2080 318 -2789 -6309 -8655 -8770 -7038 -5058 -4458 -5761 -8138 -10106 -10500 -8964 -5928 -2366 458 1508 679 -933 -1727 -748 1743 4691 7028 8121 7886 6762 5491 4658 4362 4363 4440 4447 4166 3391 2226 1111 415 167 199 378 573 563 192 -401 -829 -707 138 1550 2944 3509 2851 1466 384 260 969 1977 2725 2530 613 -3208 -7805 -11112 -11563 -9323 -6310 -5045 -7117 -12066 -17495 -20536 -19701 -15650 -10444 -6236 -4369 -4920 -6573 -7157 -5094 -676 4193 7654 9297 10029 10897 12175 13391 14020 14092 14192 14771 15494 15405 13856 11113 8003 5233 3225 2265 2225 2138 648 -2704 -6777 -9681 -10283 -8878 -6733 -5280 -5447 -7138 -9181 -10089 -9168 -6841 -4018 -1457 247 657 -48 -807 -437 1265 3440 5178 6202 6584 6308 5457 4519 4024 3966 3914 3640 3283 2895 2242 1206 87 -686 -939 -709 -90 633 885 275 -849 -1587 -1274 42 1801 3268 3733 2900 1360 358 731 2112 3407 3857 3352 1916 -585 -4006 -7520 -9936 -10454 -9208 -7350 -6612 -8326 -12365 -16983 -19905 -19746 -16577 -11620 -6694 -3596 -3187 -4585 -5515 -3924 452 5977 10451 12654 12940 12645 12934 14019 15317 16237 16641 16583 15880 14229 11694 8795 6060 3695 1747 367 -401 -1020 -2406 -5178 -8880 -12038 -13161 -11912 -9383 -7256 -6634 -7505 -9006 -9999 -9515 -7124 -3228 952 3898 4597 3183 1050 190 1906 5729 9642 11574 10832 8211 5068 2591 1587 2231 3703 4457 3379 698 -2330 -4497 -5311 -4909 -3792 -2655 -2042 -1982 -2103 -2143 -2123 -1960 -1240 348 2533 4623 6035 6473 5876 4602 3625 3975 5677 7479 7905 6510 3983 1251 -1242 -3519 -5739 -7958 -9935 -11118 -11174 -10691 -11071 -13305 -16824 -19752 -20340 -18116 -13898 -9176 -5528 -4017 -4461 -5191 -4015 68 5903 10976 13340 13047 11933 12013 13865 16368 17918 17842 16660 15181 13647 11835 9645 7304 5048 2946 1056 -465 -1627 -2868 -4830 -7713 -10803 -12743 -12486 -10283 -7748 -6642 -7408 -8855 -9318 -8014 -5335 -2230 430 2099 2709 2703 2684 2912 3303 3829 4613 5531 +0 0 0 0 1 2 2 3 3 3 5 11 20 23 14 -1 -9 -1 19 32 4 -94 -246 -378 -408 -322 -209 -193 -349 -638 -939 -1103 -1050 -817 -531 -343 -332 -469 -626 -638 -404 18 437 682 756 813 987 1265 1524 1680 1753 1834 1972 2122 2180 2060 1766 1376 1008 746 622 580 462 72 -657 -1528 -2154 -2242 -1848 -1364 -1234 -1633 -2367 -3011 -3202 -2800 -1895 -774 153 515 237 -333 -630 -279 661 1816 2774 3267 3234 2826 2337 2018 1924 1958 2028 2066 1968 1629 1086 551 209 85 103 199 306 305 105 -225 -470 -406 80 914 1759 2123 1747 909 241 165 623 1286 1793 1684 412 -2183 -5368 -7724 -8119 -6613 -4522 -3650 -5199 -8900 -13024 -15428 -14930 -11967 -8055 -4851 -3427 -3891 -5240 -5749 -4124 -552 3445 6334 7749 8417 9210 10358 11465 12082 12220 12383 12964 13678 13674 12367 9973 7219 4744 2939 2074 2047 1976 601 -2522 -6346 -9100 -9704 -8410 -6401 -5038 -5215 -6856 -8845 -9750 -8884 -6648 -3915 -1424 241 644 -48 -796 -432 1250 3407 5136 6159 6547 6280 5438 4506 4016 3961 3911 3638 3282 2895 2242 1205 86 -686 -938 -708 -90 630 881 273 -844 -1575 -1262 41 1777 3220 3671 2845 1331 349 712 2052 3301 3727 3229 1840 -561 -3823 -7150 -9413 -9865 -8655 -6882 -6165 -7731 -11430 -15628 -18229 -17997 -15032 -10483 -6008 -3210 -2830 -4048 -4841 -3424 391 5151 8948 10766 10937 10613 10780 11602 12587 13244 13472 13319 12658 11251 9170 6841 4673 2825 1323 275 -299 -753 -1758 -3746 -6363 -8538 -9241 -8280 -6454 -4938 -4466 -4997 -5928 -6509 -6122 -4530 -2028 590 2388 2782 1902 619 110 1094 3246 5388 6377 5884 4396 2673 1346 812 1124 1837 2176 1623 329 -1083 -2055 -2385 -2166 -1644 -1131 -854 -813 -847 -847 -823 -744 -462 126 903 1613 2062 2163 1920 1470 1131 1212 1690 2174 2240 1800 1073 328 -318 -876 -1390 -1873 -2273 -2471 -2412 -2237 -2247 -2619 -3204 -3641 -3626 -3123 -2315 -1475 -856 -600 -642 -718 -534 8 722 1286 1498 1401 1223 1176 1293 1450 1509 1424 1260 1085 922 754 579 411 266 145 48 -20 -65 -106 -163 -240 -306 -327 -290 -216 -146 -111 -109 -115 -105 -77 -43 -16 2 9 9 7 5 3 2 1 1 0 +5233 3225 2265 2225 2138 648 -2704 -6777 -9681 -10283 -8878 -6733 -5280 -5447 -7138 -9181 -10089 -9168 -6841 -4018 -1457 247 657 -48 -807 -437 1265 3440 5178 6202 6584 6308 5457 4519 4024 3966 3914 3640 3283 2895 2242 1206 87 -686 -939 -709 -90 633 885 275 -849 -1587 -1274 42 1801 3268 3733 2900 1360 358 731 2112 3407 3857 3352 1916 -585 -4006 -7520 -9936 -10454 -9208 -7350 -6612 -8326 -12365 -16983 -19905 -19746 -16577 -11620 -6694 -3596 -3187 -4585 -5515 -3924 452 5977 10451 12654 12940 12645 12934 14019 15317 16237 16641 16583 15880 14229 11694 8795 6060 3695 1747 367 -401 -1020 -2406 -5178 -8880 -12038 -13161 -11912 -9383 -7256 -6634 -7505 -9006 -9999 -9515 -7124 -3228 952 3898 4597 3183 1050 190 1906 5729 9642 11574 10832 8211 5068 2591 1587 2231 3703 4457 3379 698 -2330 -4497 -5311 -4909 -3792 -2655 -2042 -1982 -2103 -2143 -2123 -1960 -1240 348 2533 4623 6035 6473 5876 4602 3625 3975 5677 7479 7905 6510 3983 1251 -1242 -3519 -5739 -7958 -9935 -11118 -11174 -10691 -11071 -13305 -16824 -19752 -20340 -18116 -13898 -9176 -5528 -4017 -4461 -5191 -4015 68 5903 10976 13340 13047 11933 12013 13865 16368 17918 17842 16660 15181 13647 11835 9645 7304 5048 2946 1056 -465 -1627 -2868 -4830 -7713 -10803 -12743 -12486 -10283 -7748 -6642 -7408 -8855 -9318 -8014 -5335 -2230 430 2099 2709 2703 2684 2912 3303 3829 4613 5531 6076 5849 4883 3350 1306 -876 -2165 -1697 -26 1026 107 -2389 -4826 -5776 -4812 -2447 134 1596 1382 290 -231 535 2133 3747 4972 5715 5917 5747 5722 6161 6667 6540 5697 4819 4551 4825 4969 4293 2574 274 -1712 -2891 -3837 -5716 -8876 -12145 -13865 -13510 -12134 -11361 -12207 -14608 -17535 -19380 -18730 -15282 -10216 -5513 -2738 -2128 -2473 -1865 991 5713 10436 13334 14082 13875 14078 15021 16007 16228 15528 14356 13178 11976 10348 8041 5290 2566 148 -1890 -3392 -4190 -4610 -5579 -7785 -10692 -12661 -12313 -9823 -6842 -5169 -5365 -6447 -6801 -5486 -2848 -43 2033 3321 4299 5278 6086 6343 5893 4988 4129 3812 4277 5293 6058 5505 3062 -607 -3732 -4651 -3181 -825 406 -483 -2894 -5254 -6135 -5118 -2949 -951 -71 -304 -900 -1040 -317 1169 2901 4178 4604 4497 4624 5345 6185 6400 5857 5145 4926 5317 5853 5748 4348 1715 -1081 -2645 -2600 -2177 -3212 -6474 -10987 -14779 -16218 -14990 -12352 +0 0 1 1 2 1 -8 -24 -43 -58 -61 -55 -51 -62 -93 -135 -168 -173 -144 -94 -38 6 20 -2 -30 -18 54 157 255 327 371 378 347 305 287 300 312 306 290 269 219 123 9 -78 -111 -87 -12 84 122 39 -127 -246 -205 6 310 582 688 552 267 72 152 455 756 882 788 464 -146 -1024 -1974 -2679 -2892 -2610 -2138 -1970 -2539 -3862 -5428 -6508 -6600 -5666 -4057 -2388 -1310 -1186 -1740 -2136 -1550 181 2451 4368 5387 5607 5578 5807 6403 7116 7670 7995 8097 7877 7170 5984 4569 3197 1978 948 202 -225 -578 -1382 -3014 -5238 -7195 -7966 -7300 -5824 -4559 -4218 -4829 -5862 -6582 -6335 -4796 -2197 654 2709 3227 2257 752 137 1392 4225 7177 8694 8208 6278 3908 2015 1244 1764 2951 3579 2735 569 -1915 -3722 -4427 -4121 -3206 -2259 -1749 -1709 -1824 -1870 -1864 -1731 -1101 310 2273 4170 5472 5899 5381 4234 3350 3690 5293 7002 7430 6142 3772 1189 -1185 -3369 -5513 -7667 -9601 -10774 -10858 -10417 -10815 -13026 -16508 -19425 -20043 -17882 -13742 -9089 -5484 -3990 -4436 -5169 -4002 67 5891 10962 13330 13040 11930 12013 13865 16364 17909 17828 16639 15151 13610 11794 9602 7262 5013 2922 1045 -460 -1606 -2826 -4750 -7569 -10577 -12448 -12166 -9992 -7508 -6419 -7137 -8505 -8920 -7647 -5072 -2113 405 1972 2536 2520 2492 2691 3039 3506 4204 5015 5481 5249 4358 2973 1152 -769 -1890 -1472 -23 878 91 -2020 -4051 -4815 -3983 -2011 109 1292 1110 231 -183 419 1659 2889 3801 4330 4444 4277 4220 4500 4822 4686 4040 3383 3163 3318 3381 2889 1713 180 -1115 -1860 -2440 -3591 -5509 -7443 -8392 -8075 -7158 -6613 -7010 -8278 -9800 -10679 -10175 -8182 -5390 -2865 -1402 -1073 -1227 -911 476 2698 4848 6090 6322 6121 6100 6395 6690 6656 6247 5667 5102 4543 3847 2928 1886 895 50 -632 -1109 -1340 -1440 -1702 -2319 -3109 -3589 -3406 -2648 -1796 -1322 -1335 -1562 -1601 -1255 -633 -10 425 673 845 1005 1121 1130 1015 830 663 590 637 761 837 731 390 -75 -438 -523 -342 -85 39 -46 -257 -443 -490 -388 -211 -65 -5 -19 -51 -55 -16 53 123 165 168 151 143 151 158 148 122 96 81 77 75 64 41 13 -8 -15 -12 -8 -9 -13 -14 -11 -8 -4 0 +6035 6473 5876 4602 3625 3975 5677 7479 7905 6510 3983 1251 -1242 -3519 -5739 -7958 -9935 -11118 -11174 -10691 -11071 -13305 -16824 -19752 -20340 -18116 -13898 -9176 -5528 -4017 -4461 -5191 -4015 68 5903 10976 13340 13047 11933 12013 13865 16368 17918 17842 16660 15181 13647 11835 9645 7304 5048 2946 1056 -465 -1627 -2868 -4830 -7713 -10803 -12743 -12486 -10283 -7748 -6642 -7408 -8855 -9318 -8014 -5335 -2230 430 2099 2709 2703 2684 2912 3303 3829 4613 5531 6076 5849 4883 3350 1306 -876 -2165 -1697 -26 1026 107 -2389 -4826 -5776 -4812 -2447 134 1596 1382 290 -231 535 2133 3747 4972 5715 5917 5747 5722 6161 6667 6540 5697 4819 4551 4825 4969 4293 2574 274 -1712 -2891 -3837 -5716 -8876 -12145 -13865 -13510 -12134 -11361 -12207 -14608 -17535 -19380 -18730 -15282 -10216 -5513 -2738 -2128 -2473 -1865 991 5713 10436 13334 14082 13875 14078 15021 16007 16228 15528 14356 13178 11976 10348 8041 5290 2566 148 -1890 -3392 -4190 -4610 -5579 -7785 -10692 -12661 -12313 -9823 -6842 -5169 -5365 -6447 -6801 -5486 -2848 -43 2033 3321 4299 5278 6086 6343 5893 4988 4129 3812 4277 5293 6058 5505 3062 -607 -3732 -4651 -3181 -825 406 -483 -2894 -5254 -6135 -5118 -2949 -951 -71 -304 -900 -1040 -317 1169 2901 4178 4604 4497 4624 5345 6185 6400 5857 5145 4926 5317 5853 5748 4348 1715 -1081 -2645 -2600 -2177 -3212 -6474 -10987 -14779 -16218 -14990 -12352 -10517 -11211 -14306 -17766 -19128 -17197 -12590 -7059 -2502 -102 292 184 1439 4804 9282 13065 15043 15487 15472 15831 16573 17038 16545 15037 13186 11786 10954 9947 7878 4580 823 -2276 -4014 -4486 -4443 -4862 -6424 -9073 -11921 -13672 -13428 -11339 -8551 -6434 -5664 -5852 -5937 -4955 -2649 392 3124 4730 5149 5026 5093 5542 5989 5910 5079 3681 2204 1238 1086 1340 994 -700 -3360 -5611 -6245 -5230 -3603 -2511 -2434 -3132 -3952 -4177 -3348 -1594 409 1905 2625 2890 3242 4058 5392 6963 8274 8925 8945 8771 8737 8681 8177 7135 5994 5231 4852 4406 3439 1810 -341 -2666 -4632 -5710 -5834 -5730 -6587 -9145 -12890 -16168 -17255 -15641 -12505 -9985 -9700 -11643 -14247 -15500 -14200 -10529 -5720 -1293 1645 2775 2674 2568 3682 6402 9875 12566 13463 12878 12014 11757 11914 11696 10772 9645 8969 8674 8007 6360 3906 1389 -537 -1670 -2154 -2295 -2527 -3328 -4922 -6953 -8549 -8822 -7515 -5322 -3522 -3077 -3874 -4831 +0 1 2 3 4 7 15 25 34 36 27 10 -12 -40 -75 -117 -165 -210 -235 -248 -284 -377 -522 -666 -745 -717 -594 -422 -273 -212 -252 -312 -256 4 422 830 1064 1098 1057 1120 1357 1678 1924 2003 1952 1856 1739 1571 1332 1050 753 455 169 -78 -281 -512 -891 -1469 -2126 -2586 -2613 -2220 -1722 -1520 -1744 -2145 -2319 -2049 -1401 -602 118 594 787 805 818 909 1055 1251 1541 1890 2121 2086 1778 1245 495 -340 -855 -683 -11 428 45 -1036 -2130 -2594 -2199 -1137 63 766 674 143 -117 273 1108 1976 2662 3104 3260 3211 3242 3537 3880 3857 3404 2916 2788 2994 3121 2729 1655 178 -1127 -1925 -2583 -3890 -6105 -8442 -9736 -9582 -8695 -8219 -8917 -10775 -13053 -14559 -14194 -11686 -7879 -4289 -2148 -1683 -1972 -1499 802 4659 8576 11035 11737 11646 11898 12780 13705 13985 13465 12526 11566 10572 9185 7177 4747 2314 134 -1723 -3107 -3856 -4262 -5180 -7259 -10011 -11901 -11619 -9305 -6505 -4932 -5136 -6193 -6552 -5302 -2760 -42 1980 3243 4208 5178 5984 6250 5816 4931 4089 3781 4247 5263 6031 5486 3053 -606 -3728 -4648 -3180 -825 406 -483 -2894 -5252 -6131 -5112 -2944 -949 -71 -303 -895 -1033 -315 1157 2868 4123 4536 4422 4537 5232 6041 6235 5691 4985 4760 5122 5621 5502 4148 1630 -1024 -2496 -2444 -2039 -2995 -6011 -10156 -13600 -14852 -13662 -11201 -9488 -10062 -12770 -15771 -16887 -15094 -10986 -6122 -2157 -88 248 155 1207 4004 7682 10736 12270 12537 12427 12619 13105 13360 12869 11597 10082 8931 8228 7404 5810 3345 595 -1631 -2847 -3150 -3089 -3344 -4372 -6107 -7937 -8999 -8741 -7295 -5437 -4042 -3516 -3587 -3594 -2962 -1563 228 1793 2680 2877 2769 2766 2967 3159 3070 2599 1854 1093 604 521 633 461 -320 -1509 -2476 -2707 -2227 -1506 -1030 -980 -1237 -1531 -1585 -1245 -581 145 665 897 965 1059 1296 1683 2123 2464 2595 2535 2426 2354 2278 2090 1775 1451 1231 1109 978 742 378 -70 -525 -883 -1053 -1040 -988 -1097 -1470 -1996 -2412 -2482 -2162 -1661 -1273 -1187 -1365 -1601 -1666 -1457 -1031 -534 -115 138 221 202 183 249 407 593 708 709 635 551 502 471 428 362 299 254 222 185 133 73 23 -8 -22 -25 -22 -21 -23 -28 -31 -30 -24 -15 -7 -3 -2 -1 0 +148 -1890 -3392 -4190 -4610 -5579 -7785 -10692 -12661 -12313 -9823 -6842 -5169 -5365 -6447 -6801 -5486 -2848 -43 2033 3321 4299 5278 6086 6343 5893 4988 4129 3812 4277 5293 6058 5505 3062 -607 -3732 -4651 -3181 -825 406 -483 -2894 -5254 -6135 -5118 -2949 -951 -71 -304 -900 -1040 -317 1169 2901 4178 4604 4497 4624 5345 6185 6400 5857 5145 4926 5317 5853 5748 4348 1715 -1081 -2645 -2600 -2177 -3212 -6474 -10987 -14779 -16218 -14990 -12352 -10517 -11211 -14306 -17766 -19128 -17197 -12590 -7059 -2502 -102 292 184 1439 4804 9282 13065 15043 15487 15472 15831 16573 17038 16545 15037 13186 11786 10954 9947 7878 4580 823 -2276 -4014 -4486 -4443 -4862 -6424 -9073 -11921 -13672 -13428 -11339 -8551 -6434 -5664 -5852 -5937 -4955 -2649 392 3124 4730 5149 5026 5093 5542 5989 5910 5079 3681 2204 1238 1086 1340 994 -700 -3360 -5611 -6245 -5230 -3603 -2511 -2434 -3132 -3952 -4177 -3348 -1594 409 1905 2625 2890 3242 4058 5392 6963 8274 8925 8945 8771 8737 8681 8177 7135 5994 5231 4852 4406 3439 1810 -341 -2666 -4632 -5710 -5834 -5730 -6587 -9145 -12890 -16168 -17255 -15641 -12505 -9985 -9700 -11643 -14247 -15500 -14200 -10529 -5720 -1293 1645 2775 2674 2568 3682 6402 9875 12566 13463 12878 12014 11757 11914 11696 10772 9645 8969 8674 8007 6360 3906 1389 -537 -1670 -2154 -2295 -2527 -3328 -4922 -6953 -8549 -8822 -7515 -5322 -3522 -3077 -3874 -4831 -4840 -3621 -1766 -92 926 1303 1382 1540 1869 2075 1816 1163 576 400 583 933 1343 1558 1076 -270 -1738 -2189 -1331 -59 570 395 -49 -330 -331 74 970 2040 2711 2864 2991 3519 4282 4877 5190 5311 5200 4837 4492 4425 4382 3813 2655 1494 866 719 683 482 -92 -1221 -2691 -3779 -3935 -3433 -3005 -2968 -3213 -3909 -5526 -7922 -9953 -10436 -9313 -7561 -6229 -5935 -6894 -8801 -10597 -10959 -9380 -6571 -3718 -1607 -486 -271 -458 -118 1480 4096 6674 8300 8876 8911 8918 9134 9576 10110 10550 10756 10645 10133 9150 7768 6192 4633 3257 2223 1556 908 -308 -2338 -4743 -6731 -7767 -7791 -7117 -6359 -6244 -7033 -8075 -8319 -7363 -5774 -4328 -3252 -2389 -1673 -1060 -255 990 2348 3111 2983 2432 2172 2578 3612 4891 5675 5201 3475 1523 544 799 1598 2176 2238 1769 862 -105 -576 -415 -124 -193 -451 -406 -2 325 323 187 144 125 90 295 952 +0 -1 -2 -4 -6 -11 -21 -37 -56 -70 -68 -56 -50 -61 -84 -100 -92 -54 -1 47 85 121 163 205 232 233 213 189 187 225 298 363 350 207 -44 -283 -372 -268 -74 37 -48 -297 -565 -689 -600 -361 -122 -10 -43 -130 -156 -50 187 483 720 820 828 880 1051 1254 1339 1264 1143 1126 1251 1417 1429 1111 450 -292 -732 -737 -634 -957 -1975 -3431 -4724 -5302 -5011 -4222 -3672 -3999 -5212 -6606 -7258 -6659 -4971 -2841 -1027 -43 124 79 634 2156 4239 6069 7106 7441 7554 7853 8351 8718 8595 7933 7059 6402 6035 5558 4464 2629 479 -1343 -2399 -2716 -2723 -3018 -4036 -5769 -7669 -8899 -8839 -7550 -5756 -4378 -3896 -4068 -4169 -3515 -1899 283 2281 3488 3832 3775 3859 4237 4618 4596 3982 2910 1756 994 879 1093 816 -580 -2801 -4710 -5279 -4450 -3085 -2165 -2111 -2733 -3469 -3688 -2973 -1423 367 1718 2380 2633 2968 3734 4983 6464 7714 8356 8407 8276 8276 8252 7801 6830 5756 5039 4688 4269 3341 1763 -334 -2611 -4545 -5616 -5749 -5656 -6514 -9058 -12787 -16058 -17159 -15573 -12463 -9959 -9682 -11629 -14237 -15493 -14197 -10529 -5720 -1293 1644 2772 2670 2562 3672 6380 9831 12495 13370 12774 11899 11624 11759 11524 10593 9463 8780 8472 7801 6179 3784 1342 -518 -1604 -2062 -2190 -2403 -3153 -4645 -6536 -8005 -8226 -6978 -4920 -3241 -2818 -3531 -4381 -4367 -3250 -1577 -82 817 1143 1205 1335 1610 1776 1545 982 483 333 482 766 1095 1261 864 -216 -1375 -1717 -1036 -46 435 299 -37 -246 -245 54 701 1461 1922 2010 2078 2420 2913 3282 3455 3495 3384 3111 2855 2779 2719 2336 1606 892 510 418 392 273 -52 -673 -1462 -2024 -2077 -1784 -1538 -1496 -1594 -1909 -2656 -3743 -4625 -4768 -4182 -3336 -2700 -2528 -2882 -3610 -4264 -4327 -3633 -2494 -1383 -586 -174 -95 -157 -40 483 1309 2083 2530 2643 2591 2527 2526 2581 2653 2696 2675 2578 2384 2093 1725 1336 969 660 437 296 167 -55 -403 -790 -1082 -1203 -1163 -1024 -879 -830 -897 -988 -975 -827 -621 -444 -319 -223 -149 -90 -21 74 167 210 190 146 122 135 178 224 242 205 127 51 16 22 40 50 46 33 14 -2 -8 -5 -2 -2 -4 -3 -1 1 0 0 0 0 0 0 0 +2625 2890 3242 4058 5392 6963 8274 8925 8945 8771 8737 8681 8177 7135 5994 5231 4852 4406 3439 1810 -341 -2666 -4632 -5710 -5834 -5730 -6587 -9145 -12890 -16168 -17255 -15641 -12505 -9985 -9700 -11643 -14247 -15500 -14200 -10529 -5720 -1293 1645 2775 2674 2568 3682 6402 9875 12566 13463 12878 12014 11757 11914 11696 10772 9645 8969 8674 8007 6360 3906 1389 -537 -1670 -2154 -2295 -2527 -3328 -4922 -6953 -8549 -8822 -7515 -5322 -3522 -3077 -3874 -4831 -4840 -3621 -1766 -92 926 1303 1382 1540 1869 2075 1816 1163 576 400 583 933 1343 1558 1076 -270 -1738 -2189 -1331 -59 570 395 -49 -330 -331 74 970 2040 2711 2864 2991 3519 4282 4877 5190 5311 5200 4837 4492 4425 4382 3813 2655 1494 866 719 683 482 -92 -1221 -2691 -3779 -3935 -3433 -3005 -2968 -3213 -3909 -5526 -7922 -9953 -10436 -9313 -7561 -6229 -5935 -6894 -8801 -10597 -10959 -9380 -6571 -3718 -1607 -486 -271 -458 -118 1480 4096 6674 8300 8876 8911 8918 9134 9576 10110 10550 10756 10645 10133 9150 7768 6192 4633 3257 2223 1556 908 -308 -2338 -4743 -6731 -7767 -7791 -7117 -6359 -6244 -7033 -8075 -8319 -7363 -5774 -4328 -3252 -2389 -1673 -1060 -255 990 2348 3111 2983 2432 2172 2578 3612 4891 5675 5201 3475 1523 544 799 1598 2176 2238 1769 862 -105 -576 -415 -124 -193 -451 -406 -2 325 323 187 144 125 90 295 952 1772 2174 1908 1298 907 1116 1946 3027 3744 3660 2925 2141 1769 1804 1991 2152 2141 1734 804 -477 -1838 -3260 -4901 -6649 -8011 -8538 -8329 -8004 -8301 -9600 -11623 -13418 -13856 -12440 -9726 -6896 -4899 -4003 -3903 -3935 -3226 -1093 2333 5902 8196 8660 8063 7789 8705 10586 12494 13585 13609 12805 11528 10045 8609 7457 6610 5749 4452 2620 568 -1305 -2894 -4364 -5790 -6960 -7553 -7557 -7396 -7611 -8338 -9152 -9417 -8797 -7426 -5739 -4220 -3250 -2928 -2942 -2732 -1898 -482 1176 2728 3911 4540 4617 4455 4465 4777 5150 5326 5307 5202 4968 4445 3631 2753 2077 1714 1606 1559 1291 591 -482 -1543 -2110 -1922 -1128 -242 165 -138 -810 -1197 -854 155 1408 2469 3039 3026 2652 2426 2769 3581 4312 4529 4290 3913 3533 3010 2233 1306 406 -376 -1022 -1525 -2000 -2800 -4316 -6544 -8842 -10273 -10289 -9209 -8050 -7843 -8934 -10774 -12277 -12471 -11049 -8512 -5873 -4105 -3594 -3929 -4133 +0 0 1 2 6 13 22 30 39 49 59 69 77 80 77 76 80 82 72 41 -9 -76 -144 -193 -214 -227 -282 -420 -636 -853 -974 -940 -797 -676 -694 -882 -1138 -1306 -1259 -982 -560 -133 176 311 313 314 469 850 1364 1806 2008 1993 1929 1957 2053 2084 1985 1836 1764 1759 1675 1372 867 317 -127 -405 -536 -587 -664 -897 -1362 -1971 -2486 -2628 -2292 -1662 -1126 -1006 -1295 -1652 -1690 -1292 -644 -35 351 504 545 619 766 867 773 503 254 179 266 433 634 748 525 -134 -876 -1121 -692 -32 305 214 -28 -185 -188 42 564 1203 1620 1733 1832 2183 2689 3100 3338 3456 3422 3220 3023 3010 3013 2650 1864 1059 620 520 498 355 -69 -918 -2040 -2890 -3035 -2671 -2357 -2348 -2562 -3140 -4474 -6462 -8180 -8638 -7763 -6347 -5265 -5050 -5903 -7585 -9190 -9563 -8233 -5801 -3301 -1435 -437 -245 -416 -108 1355 3769 6168 7706 8275 8343 8382 8618 9071 9611 10065 10296 10223 9761 8841 7527 6016 4514 3181 2176 1526 892 -304 -2308 -4690 -6667 -7705 -7738 -7078 -6332 -6223 -7015 -8060 -8309 -7358 -5772 -4327 -3252 -2389 -1673 -1060 -255 988 2343 3102 2972 2421 2159 2560 3582 4844 5611 5133 3424 1497 533 782 1560 2120 2174 1714 832 -102 -554 -398 -119 -184 -428 -384 -2 304 301 173 133 115 82 268 863 1598 1951 1703 1152 800 979 1697 2624 3226 3133 2488 1809 1484 1503 1647 1768 1746 1403 645 -381 -1454 -2557 -3813 -5128 -6126 -6471 -6257 -5959 -6123 -7013 -8408 -9615 -9828 -8735 -6761 -4743 -3334 -2695 -2599 -2591 -2100 -704 1483 3707 5086 5306 4879 4655 5134 6161 7174 7697 7605 7055 6262 5378 4542 3874 3382 2896 2208 1279 272 -617 -1345 -1994 -2600 -3071 -3274 -3218 -3092 -3122 -3355 -3613 -3647 -3338 -2762 -2091 -1506 -1135 -1001 -984 -894 -607 -151 358 812 1137 1286 1277 1200 1171 1221 1281 1289 1249 1190 1103 959 759 558 408 326 296 277 222 98 -78 -239 -315 -277 -156 -33 21 -17 -95 -135 -92 15 137 230 269 254 211 183 198 242 274 272 241 206 174 138 95 51 14 -13 -32 -44 -52 -65 -91 -124 -147 -151 -134 -104 -77 -64 -62 -61 -54 -43 -30 -17 -8 -4 -2 -1 0 +-458 -118 1480 4096 6674 8300 8876 8911 8918 9134 9576 10110 10550 10756 10645 10133 9150 7768 6192 4633 3257 2223 1556 908 -308 -2338 -4743 -6731 -7767 -7791 -7117 -6359 -6244 -7033 -8075 -8319 -7363 -5774 -4328 -3252 -2389 -1673 -1060 -255 990 2348 3111 2983 2432 2172 2578 3612 4891 5675 5201 3475 1523 544 799 1598 2176 2238 1769 862 -105 -576 -415 -124 -193 -451 -406 -2 325 323 187 144 125 90 295 952 1772 2174 1908 1298 907 1116 1946 3027 3744 3660 2925 2141 1769 1804 1991 2152 2141 1734 804 -477 -1838 -3260 -4901 -6649 -8011 -8538 -8329 -8004 -8301 -9600 -11623 -13418 -13856 -12440 -9726 -6896 -4899 -4003 -3903 -3935 -3226 -1093 2333 5902 8196 8660 8063 7789 8705 10586 12494 13585 13609 12805 11528 10045 8609 7457 6610 5749 4452 2620 568 -1305 -2894 -4364 -5790 -6960 -7553 -7557 -7396 -7611 -8338 -9152 -9417 -8797 -7426 -5739 -4220 -3250 -2928 -2942 -2732 -1898 -482 1176 2728 3911 4540 4617 4455 4465 4777 5150 5326 5307 5202 4968 4445 3631 2753 2077 1714 1606 1559 1291 591 -482 -1543 -2110 -1922 -1128 -242 165 -138 -810 -1197 -854 155 1408 2469 3039 3026 2652 2426 2769 3581 4312 4529 4290 3913 3533 3010 2233 1306 406 -376 -1022 -1525 -2000 -2800 -4316 -6544 -8842 -10273 -10289 -9209 -8050 -7843 -8934 -10774 -12277 -12471 -11049 -8512 -5873 -4105 -3594 -3929 -4133 -3260 -1037 1886 4370 5603 5682 5449 5786 6967 8602 10032 10797 10821 10320 9612 8953 8477 8176 7907 7437 6615 5514 4337 3173 1904 438 -1021 -2104 -2715 -3246 -4216 -5751 -7461 -8766 -9274 -8934 -8025 -7099 -6720 -7035 -7545 -7436 -6217 -4111 -1838 -95 829 1113 1259 1808 2982 4493 5744 6287 6115 5577 5067 4840 4972 5309 5499 5158 4188 2897 1775 1169 1112 1385 1633 1538 993 197 -492 -818 -778 -543 -295 -165 -209 -366 -484 -416 -151 211 554 805 912 856 690 528 476 568 753 917 901 612 135 -302 -528 -581 -621 -757 -1032 -1540 -2403 -3551 -4564 -4909 -4402 -3471 -2937 -3478 -5141 -7264 -8838 -9099 -8022 -6367 -5217 -5251 -6274 -7377 -7600 -6580 -4650 -2536 -959 -343 -575 -899 -291 1744 4658 7225 8546 8678 8386 8406 8982 9932 10904 11575 11740 11336 10452 9313 8215 7391 6804 6133 5036 3473 1728 113 -1279 -2516 -3634 -4576 -5337 -6053 +0 -1 0 3 8 16 23 30 39 51 65 81 100 120 137 148 151 146 130 107 83 62 48 30 -12 -93 -203 -309 -384 -411 -402 -382 -398 -476 -578 -630 -588 -487 -384 -304 -234 -172 -114 -29 116 287 396 396 336 312 384 559 785 944 896 619 280 103 157 324 455 483 393 197 -25 -140 -104 -32 -51 -122 -113 -1 94 96 57 44 39 29 98 325 618 775 695 482 344 432 768 1217 1535 1529 1245 927 780 809 909 999 1011 833 392 -237 -927 -1669 -2547 -3508 -4290 -4638 -4590 -4473 -4704 -5513 -6765 -7915 -8282 -7529 -5961 -4280 -3078 -2545 -2511 -2562 -2124 -728 1570 4015 5636 6019 5661 5524 6237 7657 9126 10019 10130 9619 8736 7680 6639 5800 5183 4546 3548 2104 459 -1065 -2379 -3612 -4826 -5842 -6384 -6430 -6333 -6560 -7231 -7986 -8266 -7767 -6593 -5123 -3788 -2932 -2655 -2682 -2502 -1747 -446 1091 2543 3661 4267 4356 4220 4244 4557 4929 5115 5112 5026 4813 4319 3537 2689 2033 1681 1579 1536 1274 584 -478 -1531 -2096 -1912 -1124 -242 164 -138 -810 -1197 -854 154 1408 2469 3038 3024 2650 2423 2763 3571 4297 4509 4265 3886 3504 2981 2207 1289 400 -370 -1003 -1493 -1954 -2729 -4194 -6342 -8545 -9897 -9883 -8816 -7681 -7457 -8463 -10167 -11540 -11677 -10302 -7904 -5429 -3778 -3292 -3581 -3748 -2941 -931 1683 3879 4946 4987 4754 5017 6004 7365 8535 9125 9082 8601 7955 7357 6914 6619 6351 5928 5230 4323 3373 2447 1455 331 -767 -1567 -2003 -2372 -3050 -4121 -5292 -6156 -6447 -6145 -5461 -4779 -4474 -4631 -4911 -4784 -3953 -2583 -1141 -59 501 665 742 1052 1712 2545 3209 3464 3321 2985 2673 2514 2544 2675 2728 2518 2012 1368 824 533 499 611 707 654 415 80 -198 -323 -302 -207 -110 -61 -75 -128 -166 -140 -50 67 172 245 271 248 195 146 128 149 192 228 218 144 30 -68 -114 -122 -126 -149 -197 -284 -429 -613 -760 -789 -682 -518 -423 -481 -683 -926 -1082 -1067 -901 -684 -535 -515 -586 -654 -641 -526 -352 -182 -65 -22 -35 -51 -16 86 213 308 338 317 282 260 254 254 252 243 220 188 153 120 92 70 54 41 28 15 5 0 -3 -4 -3 -3 -2 0 +-2928 -2942 -2732 -1898 -482 1176 2728 3911 4540 4617 4455 4465 4777 5150 5326 5307 5202 4968 4445 3631 2753 2077 1714 1606 1559 1291 591 -482 -1543 -2110 -1922 -1128 -242 165 -138 -810 -1197 -854 155 1408 2469 3039 3026 2652 2426 2769 3581 4312 4529 4290 3913 3533 3010 2233 1306 406 -376 -1022 -1525 -2000 -2800 -4316 -6544 -8842 -10273 -10289 -9209 -8050 -7843 -8934 -10774 -12277 -12471 -11049 -8512 -5873 -4105 -3594 -3929 -4133 -3260 -1037 1886 4370 5603 5682 5449 5786 6967 8602 10032 10797 10821 10320 9612 8953 8477 8176 7907 7437 6615 5514 4337 3173 1904 438 -1021 -2104 -2715 -3246 -4216 -5751 -7461 -8766 -9274 -8934 -8025 -7099 -6720 -7035 -7545 -7436 -6217 -4111 -1838 -95 829 1113 1259 1808 2982 4493 5744 6287 6115 5577 5067 4840 4972 5309 5499 5158 4188 2897 1775 1169 1112 1385 1633 1538 993 197 -492 -818 -778 -543 -295 -165 -209 -366 -484 -416 -151 211 554 805 912 856 690 528 476 568 753 917 901 612 135 -302 -528 -581 -621 -757 -1032 -1540 -2403 -3551 -4564 -4909 -4402 -3471 -2937 -3478 -5141 -7264 -8838 -9099 -8022 -6367 -5217 -5251 -6274 -7377 -7600 -6580 -4650 -2536 -959 -343 -575 -899 -291 1744 4658 7225 8546 8678 8386 8406 8982 9932 10904 11575 11740 11336 10452 9313 8215 7391 6804 6133 5036 3473 1728 113 -1279 -2516 -3634 -4576 -5337 -6053 -6926 -8009 -9058 -9641 -9491 -8801 -8153 -8020 -8352 -8627 -8288 -7143 -5456 -3765 -2598 -2149 -2142 -2010 -1297 58 1767 3367 4422 4738 4557 4466 4927 5843 6668 6957 6692 6149 5604 5250 5190 5337 5367 4941 4005 2825 1750 1001 626 493 300 -239 -1117 -1993 -2526 -2679 -2650 -2596 -2541 -2492 -2467 -2391 -2095 -1502 -748 -60 426 712 889 1082 1400 1833 2251 2563 2789 2886 2568 1556 54 -1187 -1500 -981 -458 -788 -2213 -4305 -6315 -7567 -7729 -6975 -5949 -5457 -6009 -7469 -9055 -9715 -8798 -6587 -4220 -2891 -2919 -3510 -3465 -2140 167 2644 4616 5859 6470 6697 6901 7520 8786 10386 11518 11493 10406 9118 8505 8673 8927 8490 7188 5486 3980 2940 2245 1582 672 -621 -2205 -3795 -5060 -5832 -6192 -6391 -6720 -7393 -8438 -9586 -10355 -10360 -9623 -8526 -7476 -6700 -6316 -6383 -6692 -6617 -5478 -3218 -592 1403 2274 2236 1915 1998 2984 4869 6973 8278 8225 7183 6125 5858 +0 -1 -2 -2 -1 2 7 13 19 25 30 35 45 57 68 77 86 93 93 84 70 58 53 54 57 51 25 -23 -77 -112 -109 -68 -16 11 -10 -62 -96 -72 13 131 241 311 325 297 284 338 456 572 625 616 583 546 483 371 225 72 -70 -195 -301 -406 -586 -932 -1454 -2023 -2418 -2492 -2292 -2058 -2059 -2408 -2981 -3480 -3627 -3291 -2596 -1834 -1312 -1175 -1314 -1413 -1139 -370 686 1624 2125 2200 2151 2327 2857 3595 4271 4678 4773 4633 4390 4159 4004 3928 3860 3689 3333 2821 2253 1674 1019 237 -563 -1176 -1539 -1864 -2454 -3393 -4460 -5306 -5684 -5545 -5042 -4514 -4324 -4579 -4967 -4951 -4185 -2798 -1265 -67 582 789 902 1307 2178 3313 4275 4722 4634 4264 3907 3764 3898 4198 4383 4143 3390 2363 1458 967 926 1162 1380 1308 850 169 -427 -714 -683 -480 -262 -148 -188 -331 -439 -380 -139 194 512 747 850 801 648 498 450 539 718 877 865 589 130 -293 -514 -567 -607 -742 -1013 -1515 -2368 -3506 -4513 -4863 -4367 -3448 -2921 -3463 -5124 -7245 -8821 -9088 -8017 -6364 -5216 -5251 -6274 -7376 -7597 -6576 -4645 -2532 -957 -342 -573 -894 -290 1729 4613 7143 8435 8550 8246 8247 8793 9701 10624 11247 11375 10954 10069 8944 7864 7051 6468 5809 4751 3264 1617 105 -1188 -2326 -3344 -4191 -4865 -5489 -6248 -7188 -8085 -8559 -8379 -7725 -7114 -6955 -7198 -7387 -7052 -6038 -4580 -3139 -2151 -1766 -1748 -1628 -1042 46 1397 2640 3439 3654 3484 3384 3701 4349 4917 5081 4840 4406 3974 3686 3607 3670 3651 3325 2666 1859 1139 643 397 309 186 -147 -677 -1192 -1490 -1560 -1522 -1472 -1421 -1374 -1341 -1281 -1106 -781 -383 -31 211 347 427 511 650 837 1010 1130 1208 1228 1073 638 21 -469 -581 -373 -171 -288 -790 -1503 -2159 -2530 -2527 -2230 -1858 -1665 -1790 -2172 -2567 -2688 -2372 -1729 -1079 -720 -707 -827 -793 -476 36 553 936 1152 1232 1234 1229 1296 1462 1668 1782 1714 1496 1259 1129 1105 1091 994 807 589 408 287 209 140 56 -50 -167 -272 -343 -372 -372 -361 -355 -365 -388 -410 -410 -380 -325 -265 -212 -172 -147 -135 -126 -110 -81 -42 -7 13 18 15 10 8 10 13 13 10 6 3 1 0 +-484 -416 -151 211 554 805 912 856 690 528 476 568 753 917 901 612 135 -302 -528 -581 -621 -757 -1032 -1540 -2403 -3551 -4564 -4909 -4402 -3471 -2937 -3478 -5141 -7264 -8838 -9099 -8022 -6367 -5217 -5251 -6274 -7377 -7600 -6580 -4650 -2536 -959 -343 -575 -899 -291 1744 4658 7225 8546 8678 8386 8406 8982 9932 10904 11575 11740 11336 10452 9313 8215 7391 6804 6133 5036 3473 1728 113 -1279 -2516 -3634 -4576 -5337 -6053 -6926 -8009 -9058 -9641 -9491 -8801 -8153 -8020 -8352 -8627 -8288 -7143 -5456 -3765 -2598 -2149 -2142 -2010 -1297 58 1767 3367 4422 4738 4557 4466 4927 5843 6668 6957 6692 6149 5604 5250 5190 5337 5367 4941 4005 2825 1750 1001 626 493 300 -239 -1117 -1993 -2526 -2679 -2650 -2596 -2541 -2492 -2467 -2391 -2095 -1502 -748 -60 426 712 889 1082 1400 1833 2251 2563 2789 2886 2568 1556 54 -1187 -1500 -981 -458 -788 -2213 -4305 -6315 -7567 -7729 -6975 -5949 -5457 -6009 -7469 -9055 -9715 -8798 -6587 -4220 -2891 -2919 -3510 -3465 -2140 167 2644 4616 5859 6470 6697 6901 7520 8786 10386 11518 11493 10406 9118 8505 8673 8927 8490 7188 5486 3980 2940 2245 1582 672 -621 -2205 -3795 -5060 -5832 -6192 -6391 -6720 -7393 -8438 -9586 -10355 -10360 -9623 -8526 -7476 -6700 -6316 -6383 -6692 -6617 -5478 -3218 -592 1403 2274 2236 1915 1998 2984 4869 6973 8278 8225 7183 6125 5858 6537 7687 8494 8243 6819 4885 3434 3010 3322 3636 3390 2478 1154 -142 -978 -1195 -1046 -976 -1227 -1689 -2109 -2317 -2253 -1941 -1510 -1159 -983 -867 -640 -275 111 411 606 725 816 950 1148 1285 1156 690 38 -585 -1085 -1485 -1819 -2136 -2552 -3204 -4104 -5076 -5865 -6285 -6312 -6099 -5918 -5984 -6269 -6507 -6387 -5784 -4827 -3831 -3131 -2886 -2928 -2829 -2202 -1002 434 1608 2188 2197 2032 2217 3029 4263 5383 5946 5892 5498 5148 5141 5543 6114 6384 6003 5030 3906 3102 2816 2907 3004 2690 1779 491 -644 -1209 -1205 -979 -909 -1188 -1817 -2596 -3152 -3157 -2636 -2016 -1761 -1973 -2364 -2576 -2449 -2040 -1525 -1108 -928 -966 -1071 -1099 -1013 -818 -524 -192 18 -17 -200 -258 -22 410 801 1029 1148 1260 1410 1629 1949 2343 2690 2884 2945 2994 3123 3319 3505 3610 3593 3458 3269 3132 3099 3098 2991 2696 2240 1721 1231 818 481 169 -195 +0 -1 -1 0 0 1 2 2 3 2 3 4 7 10 11 8 2 -6 -12 -14 -16 -22 -32 -52 -89 -141 -195 -226 -218 -184 -166 -209 -328 -492 -633 -689 -641 -537 -463 -490 -615 -757 -817 -739 -545 -311 -123 -46 -80 -130 -44 269 748 1202 1473 1546 1545 1600 1767 2015 2281 2498 2608 2593 2459 2255 2043 1889 1785 1653 1393 984 502 33 -391 -786 -1162 -1496 -1784 -2069 -2419 -2857 -3300 -3585 -3601 -3408 -3219 -3227 -3426 -3606 -3529 -3096 -2407 -1691 -1187 -999 -1012 -966 -634 28 890 1722 2297 2499 2439 2425 2714 3265 3778 3994 3894 3626 3349 3177 3180 3312 3371 3141 2576 1838 1151 666 421 335 206 -167 -785 -1414 -1811 -1938 -1936 -1915 -1892 -1873 -1870 -1829 -1616 -1169 -587 -48 339 571 719 882 1150 1517 1876 2151 2357 2455 2198 1340 46 -1036 -1317 -867 -407 -704 -1987 -3884 -5727 -6897 -7078 -6419 -5499 -5067 -5603 -6994 -8512 -9168 -8335 -6263 -4027 -2768 -2804 -3382 -3349 -2074 162 2576 4508 5735 6348 6585 6799 7422 8687 10287 11425 11414 10347 9077 8475 8649 8909 8479 7182 5483 3979 2940 2245 1581 671 -621 -2203 -3788 -5047 -5813 -6165 -6356 -6675 -7334 -8358 -9479 -10222 -10209 -9464 -8366 -7320 -6545 -6155 -6203 -6485 -6395 -5278 -3091 -567 1338 2161 2118 1807 1878 2793 4539 6474 7651 7568 6577 5582 5311 5897 6898 7581 7317 6019 4287 2996 2610 2862 3113 2884 2094 968 -119 -810 -983 -854 -791 -986 -1347 -1668 -1817 -1753 -1497 -1155 -879 -739 -646 -473 -201 80 294 429 509 567 653 781 864 769 454 24 -377 -690 -933 -1129 -1309 -1545 -1915 -2421 -2955 -3368 -3562 -3528 -3361 -3215 -3204 -3308 -3381 -3269 -2915 -2395 -1871 -1505 -1364 -1361 -1293 -989 -443 188 684 914 901 817 875 1172 1617 2001 2165 2101 1919 1759 1718 1812 1953 1993 1830 1498 1135 879 778 783 788 687 442 118 -152 -277 -268 -212 -191 -242 -358 -495 -581 -563 -455 -336 -283 -306 -353 -371 -339 -271 -195 -136 -109 -109 -116 -113 -100 -77 -47 -17 1 -2 -15 -18 -2 24 45 54 56 57 60 64 71 78 83 81 75 69 65 62 58 52 46 38 31 25 21 17 13 9 6 3 1 0 0 0 0 +-6315 -7567 -7729 -6975 -5949 -5457 -6009 -7469 -9055 -9715 -8798 -6587 -4220 -2891 -2919 -3510 -3465 -2140 167 2644 4616 5859 6470 6697 6901 7520 8786 10386 11518 11493 10406 9118 8505 8673 8927 8490 7188 5486 3980 2940 2245 1582 672 -621 -2205 -3795 -5060 -5832 -6192 -6391 -6720 -7393 -8438 -9586 -10355 -10360 -9623 -8526 -7476 -6700 -6316 -6383 -6692 -6617 -5478 -3218 -592 1403 2274 2236 1915 1998 2984 4869 6973 8278 8225 7183 6125 5858 6537 7687 8494 8243 6819 4885 3434 3010 3322 3636 3390 2478 1154 -142 -978 -1195 -1046 -976 -1227 -1689 -2109 -2317 -2253 -1941 -1510 -1159 -983 -867 -640 -275 111 411 606 725 816 950 1148 1285 1156 690 38 -585 -1085 -1485 -1819 -2136 -2552 -3204 -4104 -5076 -5865 -6285 -6312 -6099 -5918 -5984 -6269 -6507 -6387 -5784 -4827 -3831 -3131 -2886 -2928 -2829 -2202 -1002 434 1608 2188 2197 2032 2217 3029 4263 5383 5946 5892 5498 5148 5141 5543 6114 6384 6003 5030 3906 3102 2816 2907 3004 2690 1779 491 -644 -1209 -1205 -979 -909 -1188 -1817 -2596 -3152 -3157 -2636 -2016 -1761 -1973 -2364 -2576 -2449 -2040 -1525 -1108 -928 -966 -1071 -1099 -1013 -818 -524 -192 18 -17 -200 -258 -22 410 801 1029 1148 1260 1410 1629 1949 2343 2690 2884 2945 2994 3123 3319 3505 3610 3593 3458 3269 3132 3099 3098 2991 2696 2240 1721 1231 818 481 169 -195 -651 -1169 -1687 -2151 -2531 -2809 -3002 -3169 -3387 -3664 -3911 -4012 -3934 -3780 -3716 -3828 -4019 -4065 -3825 -3395 -3042 -2962 -3106 -3255 -3231 -3001 -2626 -2195 -1823 -1624 -1613 -1646 -1531 -1216 -826 -527 -391 -386 -417 -341 -20 547 1156 1542 1615 1541 1549 1743 2083 2459 2752 2860 2771 2618 2572 2675 2804 2815 2667 2393 2049 1710 1478 1420 1466 1478 1366 1134 827 493 218 95 141 255 321 315 278 216 91 -93 -255 -297 -185 33 252 339 208 -106 -459 -696 -752 -664 -503 -343 -252 -282 -431 -625 -761 -750 -565 -276 -24 67 -18 -160 -204 -66 209 497 673 675 548 417 416 626 1023 1471 1772 1787 1556 1267 1119 1166 1330 1504 1613 1585 1367 999 623 348 139 -120 -451 -763 -980 -1160 -1393 -1686 -1974 -2247 -2546 -2835 -2976 -2875 -2626 -2443 -2462 -2644 -2832 -2868 -2667 -2276 -1889 -1712 -1783 -1923 -1904 -1643 +0 -2 -4 -6 -8 -11 -17 -26 -40 -55 -61 -54 -41 -33 -38 -52 -58 -41 3 61 118 165 200 225 252 297 375 476 568 606 586 547 541 586 638 642 573 462 352 274 219 162 72 -70 -259 -465 -645 -775 -856 -920 -1003 -1145 -1356 -1597 -1785 -1847 -1774 -1624 -1472 -1360 -1322 -1378 -1487 -1514 -1290 -780 -148 358 596 602 529 566 867 1450 2126 2584 2628 2348 2047 2002 2282 2741 3094 3064 2587 1891 1355 1211 1362 1519 1443 1073 509 -64 -447 -556 -495 -469 -600 -838 -1063 -1186 -1171 -1025 -809 -630 -542 -485 -363 -158 64 242 362 438 500 589 721 816 743 449 25 -390 -731 -1011 -1252 -1485 -1792 -2273 -2941 -3672 -4285 -4636 -4699 -4582 -4485 -4576 -4835 -5062 -5009 -4574 -3848 -3078 -2535 -2355 -2407 -2342 -1836 -842 366 1368 1873 1893 1762 1934 2658 3763 4778 5307 5287 4959 4667 4685 5076 5625 5900 5573 4689 3657 2915 2657 2753 2855 2566 1702 471 -621 -1169 -1168 -952 -886 -1161 -1779 -2548 -3100 -3111 -2602 -1994 -1745 -1958 -2348 -2562 -2439 -2034 -1521 -1106 -927 -966 -1071 -1099 -1013 -818 -524 -192 17 -17 -200 -258 -22 408 796 1021 1138 1248 1394 1607 1920 2304 2639 2823 2876 2917 3034 3216 3386 3477 3450 3310 3118 2977 2935 2923 2811 2524 2088 1597 1137 752 440 154 -177 -588 -1050 -1506 -1910 -2235 -2466 -2620 -2749 -2919 -3138 -3328 -3392 -3303 -3151 -3076 -3146 -3279 -3291 -3073 -2707 -2406 -2323 -2416 -2511 -2471 -2275 -1973 -1634 -1345 -1187 -1167 -1180 -1086 -854 -575 -363 -267 -260 -278 -225 -14 351 734 968 1002 944 937 1041 1228 1431 1580 1620 1548 1442 1397 1432 1479 1462 1364 1205 1016 834 710 670 681 675 613 500 358 209 91 38 56 100 124 119 103 78 32 -33 -88 -100 -61 10 78 103 61 -31 -131 -193 -203 -175 -129 -86 -62 -67 -99 -139 -165 -157 -115 -55 -5 12 -4 -28 -34 -11 32 74 96 93 72 53 50 73 114 158 181 174 145 112 94 93 100 107 109 100 82 56 32 17 6 -6 -18 -28 -34 -36 -40 -44 -46 -48 -48 -48 -44 -38 -30 -24 -20 -19 -16 -13 -10 -7 -4 -3 -2 -1 -1 0 +5148 5141 5543 6114 6384 6003 5030 3906 3102 2816 2907 3004 2690 1779 491 -644 -1209 -1205 -979 -909 -1188 -1817 -2596 -3152 -3157 -2636 -2016 -1761 -1973 -2364 -2576 -2449 -2040 -1525 -1108 -928 -966 -1071 -1099 -1013 -818 -524 -192 18 -17 -200 -258 -22 410 801 1029 1148 1260 1410 1629 1949 2343 2690 2884 2945 2994 3123 3319 3505 3610 3593 3458 3269 3132 3099 3098 2991 2696 2240 1721 1231 818 481 169 -195 -651 -1169 -1687 -2151 -2531 -2809 -3002 -3169 -3387 -3664 -3911 -4012 -3934 -3780 -3716 -3828 -4019 -4065 -3825 -3395 -3042 -2962 -3106 -3255 -3231 -3001 -2626 -2195 -1823 -1624 -1613 -1646 -1531 -1216 -826 -527 -391 -386 -417 -341 -20 547 1156 1542 1615 1541 1549 1743 2083 2459 2752 2860 2771 2618 2572 2675 2804 2815 2667 2393 2049 1710 1478 1420 1466 1478 1366 1134 827 493 218 95 141 255 321 315 278 216 91 -93 -255 -297 -185 33 252 339 208 -106 -459 -696 -752 -664 -503 -343 -252 -282 -431 -625 -761 -750 -565 -276 -24 67 -18 -160 -204 -66 209 497 673 675 548 417 416 626 1023 1471 1772 1787 1556 1267 1119 1166 1330 1504 1613 1585 1367 999 623 348 139 -120 -451 -763 -980 -1160 -1393 -1686 -1974 -2247 -2546 -2835 -2976 -2875 -2626 -2443 -2462 -2644 -2832 -2868 -2667 -2276 -1889 -1712 -1783 -1923 -1904 -1643 -1218 -760 -399 -260 -394 -665 -797 -602 -150 315 603 663 529 273 12 -110 -26 189 407 530 506 312 13 -216 -188 105 458 631 548 322 125 65 159 358 571 709 732 672 587 514 474 507 668 970 1338 1651 1840 1917 1940 1954 2002 2153 2442 2770 2930 2799 2487 2263 2299 2511 2661 2573 2257 1836 1429 1085 813 606 449 288 64 -247 -584 -838 -941 -934 -931 -1014 -1169 -1305 -1333 -1240 -1073 -906 -786 -716 -665 -589 -436 -188 104 320 362 246 114 121 310 589 802 822 618 268 -66 -253 -272 -217 -234 -420 -791 -1253 -1645 -1838 -1833 -1756 -1725 -1752 -1791 -1826 -1867 -1877 -1770 -1525 -1265 -1130 -1134 -1137 -985 -669 -325 -130 -146 -277 -353 -295 -166 -94 -133 -240 -365 -503 -645 -733 -732 -695 -733 -898 -1126 -1288 -1280 -1109 -896 -811 -929 -1143 -1239 -1091 -760 -428 -238 -211 +0 1 2 4 7 11 13 13 13 15 19 24 25 19 6 -10 -21 -23 -21 -22 -31 -52 -81 -107 -116 -105 -87 -81 -98 -125 -146 -148 -130 -104 -80 -71 -78 -91 -98 -95 -81 -54 -21 2 -2 -25 -33 -3 56 115 153 177 202 234 280 347 431 512 567 597 626 674 737 801 849 870 860 835 821 835 856 847 783 667 524 384 261 157 56 -67 -228 -417 -615 -800 -961 -1088 -1186 -1276 -1390 -1532 -1666 -1739 -1736 -1698 -1698 -1779 -1899 -1954 -1868 -1685 -1533 -1516 -1614 -1718 -1730 -1631 -1447 -1227 -1034 -933 -939 -971 -916 -736 -507 -328 -246 -246 -269 -222 -14 364 778 1049 1110 1071 1087 1236 1492 1778 2010 2109 2062 1966 1949 2045 2162 2189 2091 1892 1633 1373 1196 1158 1204 1223 1138 951 698 419 186 81 122 222 281 278 246 192 81 -84 -232 -271 -170 30 232 314 193 -100 -432 -657 -713 -632 -480 -329 -243 -272 -417 -606 -740 -731 -552 -271 -24 65 -18 -158 -202 -66 207 493 669 672 546 415 415 625 1022 1470 1771 1787 1556 1266 1118 1165 1328 1501 1608 1579 1360 993 618 345 137 -119 -446 -752 -964 -1139 -1364 -1647 -1924 -2184 -2468 -2740 -2868 -2762 -2514 -2331 -2341 -2505 -2673 -2696 -2498 -2123 -1754 -1583 -1641 -1762 -1736 -1490 -1099 -683 -357 -231 -348 -584 -696 -523 -130 269 513 560 444 227 9 -91 -22 153 326 422 400 244 10 -167 -144 79 344 469 404 235 90 46 112 251 396 487 498 452 390 338 308 326 424 609 830 1011 1113 1145 1144 1137 1149 1219 1364 1526 1591 1498 1312 1175 1176 1265 1320 1256 1084 867 663 495 365 267 194 122 26 -102 -235 -331 -365 -355 -347 -370 -417 -456 -456 -415 -351 -290 -246 -219 -199 -172 -124 -53 28 83 92 61 27 28 70 130 173 171 125 52 -13 -47 -49 -38 -39 -68 -123 -187 -237 -254 -244 -224 -211 -206 -202 -197 -192 -184 -166 -136 -107 -91 -86 -82 -67 -43 -20 -8 -8 -14 -17 -13 -7 -4 -5 -8 -11 -13 -15 -16 -14 -12 -11 -12 -13 -13 -11 -8 -6 -4 -4 -4 -3 -2 -1 -1 -1 0 +-255 -297 -185 33 252 339 208 -106 -459 -696 -752 -664 -503 -343 -252 -282 -431 -625 -761 -750 -565 -276 -24 67 -18 -160 -204 -66 209 497 673 675 548 417 416 626 1023 1471 1772 1787 1556 1267 1119 1166 1330 1504 1613 1585 1367 999 623 348 139 -120 -451 -763 -980 -1160 -1393 -1686 -1974 -2247 -2546 -2835 -2976 -2875 -2626 -2443 -2462 -2644 -2832 -2868 -2667 -2276 -1889 -1712 -1783 -1923 -1904 -1643 -1218 -760 -399 -260 -394 -665 -797 -602 -150 315 603 663 529 273 12 -110 -26 189 407 530 506 312 13 -216 -188 105 458 631 548 322 125 65 159 358 571 709 732 672 587 514 474 507 668 970 1338 1651 1840 1917 1940 1954 2002 2153 2442 2770 2930 2799 2487 2263 2299 2511 2661 2573 2257 1836 1429 1085 813 606 449 288 64 -247 -584 -838 -941 -934 -931 -1014 -1169 -1305 -1333 -1240 -1073 -906 -786 -716 -665 -589 -436 -188 104 320 362 246 114 121 310 589 802 822 618 268 -66 -253 -272 -217 -234 -420 -791 -1253 -1645 -1838 -1833 -1756 -1725 -1752 -1791 -1826 -1867 -1877 -1770 -1525 -1265 -1130 -1134 -1137 -985 -669 -325 -130 -146 -277 -353 -295 -166 -94 -133 -240 -365 -503 -645 -733 -732 -695 -733 -898 -1126 -1288 -1280 -1109 -896 -811 -929 -1143 -1239 -1091 -760 -428 -238 -211 -256 -236 -63 245 583 863 1083 1280 1440 1510 1500 1522 1684 1953 2170 2187 2009 1788 1704 1816 2033 2209 2258 2180 2018 1830 1711 1734 1869 1960 1856 1563 1240 1061 1071 1181 1235 1112 796 398 78 -58 -10 143 275 261 52 -235 -400 -357 -232 -229 -394 -598 -687 -639 -531 -450 -435 -486 -564 -614 -607 -547 -458 -384 -384 -507 -701 -836 -838 -769 -761 -876 -1079 -1306 -1500 -1595 -1548 -1425 -1352 -1393 -1476 -1496 -1421 -1286 -1101 -869 -643 -485 -391 -297 -165 -17 137 312 487 582 551 454 402 431 507 585 632 593 411 109 -212 -459 -606 -680 -718 -782 -945 -1213 -1484 -1626 -1609 -1517 -1452 -1457 -1527 -1622 -1679 -1636 -1488 -1296 -1131 -1016 -956 -944 -937 -843 -612 -315 -84 29 94 198 371 585 809 1002 1114 1132 1121 1170 1299 1470 1641 1791 1890 1909 1880 1893 2007 2187 2374 2539 +0 -1 -1 0 0 0 0 -1 -3 -4 -6 -6 -5 -4 -4 -5 -8 -12 -16 -18 -15 -8 -1 2 -1 -7 -9 -4 10 26 37 40 34 28 29 47 81 123 157 166 152 129 120 130 155 183 205 210 188 143 92 53 22 -20 -78 -136 -181 -221 -275 -343 -414 -485 -566 -649 -701 -697 -654 -625 -647 -713 -784 -813 -776 -678 -577 -535 -570 -629 -637 -562 -426 -272 -146 -97 -150 -258 -315 -243 -62 131 256 287 233 122 5 -52 -13 90 198 262 254 159 6 -114 -101 57 252 352 310 184 72 38 95 216 349 440 459 427 377 334 311 337 449 660 920 1147 1291 1359 1390 1413 1462 1587 1817 2080 2220 2140 1918 1760 1802 1985 2121 2066 1827 1497 1174 897 677 508 379 245 54 -213 -507 -732 -826 -825 -827 -906 -1050 -1178 -1209 -1131 -983 -834 -727 -665 -621 -552 -410 -178 98 304 345 235 109 116 299 570 779 800 603 262 -65 -249 -269 -215 -232 -417 -785 -1245 -1636 -1830 -1827 -1752 -1722 -1750 -1790 -1826 -1867 -1877 -1770 -1525 -1265 -1130 -1133 -1135 -983 -667 -324 -130 -146 -275 -350 -292 -164 -93 -131 -236 -358 -492 -629 -713 -710 -672 -707 -863 -1078 -1229 -1217 -1051 -846 -763 -870 -1066 -1151 -1009 -700 -392 -217 -192 -231 -212 -57 217 514 757 944 1110 1241 1292 1276 1286 1413 1627 1795 1797 1638 1447 1368 1447 1607 1732 1756 1681 1543 1386 1285 1290 1378 1431 1342 1119 879 744 744 812 840 748 529 261 50 -38 -7 89 170 159 31 -141 -236 -208 -134 -130 -221 -330 -374 -343 -281 -234 -223 -245 -280 -300 -292 -259 -213 -176 -173 -224 -304 -356 -351 -316 -307 -346 -418 -496 -558 -581 -553 -498 -463 -466 -483 -479 -444 -393 -328 -253 -183 -135 -106 -78 -43 -5 33 73 111 129 118 94 81 84 96 107 112 102 68 17 -33 -69 -88 -94 -96 -100 -116 -143 -167 -175 -165 -149 -136 -130 -129 -130 -128 -118 -101 -83 -68 -58 -51 -47 -44 -37 -25 -12 -3 0 2 5 8 12 15 16 16 14 12 11 10 10 9 7 6 5 3 2 1 1 0 0 +-1333 -1240 -1073 -906 -786 -716 -665 -589 -436 -188 104 320 362 246 114 121 310 589 802 822 618 268 -66 -253 -272 -217 -234 -420 -791 -1253 -1645 -1838 -1833 -1756 -1725 -1752 -1791 -1826 -1867 -1877 -1770 -1525 -1265 -1130 -1134 -1137 -985 -669 -325 -130 -146 -277 -353 -295 -166 -94 -133 -240 -365 -503 -645 -733 -732 -695 -733 -898 -1126 -1288 -1280 -1109 -896 -811 -929 -1143 -1239 -1091 -760 -428 -238 -211 -256 -236 -63 245 583 863 1083 1280 1440 1510 1500 1522 1684 1953 2170 2187 2009 1788 1704 1816 2033 2209 2258 2180 2018 1830 1711 1734 1869 1960 1856 1563 1240 1061 1071 1181 1235 1112 796 398 78 -58 -10 143 275 261 52 -235 -400 -357 -232 -229 -394 -598 -687 -639 -531 -450 -435 -486 -564 -614 -607 -547 -458 -384 -384 -507 -701 -836 -838 -769 -761 -876 -1079 -1306 -1500 -1595 -1548 -1425 -1352 -1393 -1476 -1496 -1421 -1286 -1101 -869 -643 -485 -391 -297 -165 -17 137 312 487 582 551 454 402 431 507 585 632 593 411 109 -212 -459 -606 -680 -718 -782 -945 -1213 -1484 -1626 -1609 -1517 -1452 -1457 -1527 -1622 -1679 -1636 -1488 -1296 -1131 -1016 -956 -944 -937 -843 -612 -315 -84 29 94 198 371 585 809 1002 1114 1132 1121 1170 1299 1470 1641 1791 1890 1909 1880 1893 2007 2187 2374 2539 2671 2741 2737 2726 2796 2932 3013 2967 2851 2770 2736 2668 2507 2266 2002 1757 1551 1368 1156 860 472 47 -344 -663 -915 -1125 -1326 -1549 -1808 -2078 -2322 -2518 -2658 -2731 -2731 -2676 -2598 -2512 -2410 -2285 -2144 -1983 -1780 -1526 -1264 -1051 -918 -838 -758 -642 -482 -307 -164 -90 -98 -161 -241 -300 -317 -282 -215 -162 -177 -263 -354 -366 -266 -108 31 116 162 198 242 320 465 672 869 972 964 911 878 871 835 714 494 216 -47 -236 -362 -500 -734 -1092 -1520 -1910 -2148 -2201 -2134 -2090 -2174 -2365 -2526 -2524 -2352 -2126 -1983 -1957 -1960 -1887 -1698 -1423 -1105 -773 -461 -203 16 274 625 1026 1381 1637 1829 2027 2269 2539 2774 2916 2964 2990 3049 3102 3068 2930 2759 2605 2454 2311 2230 2223 2208 2104 1932 1765 1618 1482 1385 1344 1284 1142 1010 1018 1109 1057 791 543 585 928 1302 1326 661 -709 +0 -1 -1 -1 -1 -2 -2 -3 -2 -2 0 2 3 2 1 1 5 11 16 19 15 7 -3 -9 -10 -9 -10 -20 -40 -67 -93 -111 -117 -119 -124 -133 -143 -154 -166 -176 -174 -157 -136 -127 -133 -140 -126 -89 -45 -19 -22 -43 -57 -50 -29 -17 -25 -46 -72 -103 -135 -159 -163 -159 -173 -218 -281 -330 -336 -299 -248 -230 -271 -341 -378 -341 -243 -140 -80 -73 -90 -85 -23 91 221 334 427 515 590 631 638 659 742 876 991 1016 949 859 832 900 1024 1130 1173 1150 1080 994 942 969 1059 1125 1080 921 741 642 656 732 775 706 512 259 51 -39 -7 97 189 181 36 -167 -287 -259 -170 -169 -294 -450 -521 -489 -410 -351 -342 -385 -450 -494 -492 -447 -377 -318 -321 -426 -593 -712 -718 -663 -660 -765 -948 -1153 -1332 -1424 -1390 -1286 -1226 -1270 -1352 -1377 -1314 -1195 -1027 -814 -605 -458 -371 -283 -158 -17 131 300 470 563 535 442 392 421 497 575 622 585 406 107 -211 -456 -603 -678 -716 -780 -944 -1212 -1483 -1626 -1609 -1517 -1452 -1457 -1527 -1621 -1677 -1633 -1485 -1292 -1127 -1011 -950 -937 -929 -834 -605 -311 -83 28 92 193 361 568 783 968 1073 1087 1073 1116 1234 1392 1548 1683 1769 1779 1745 1749 1846 2002 2163 2302 2409 2459 2442 2419 2468 2573 2629 2572 2457 2371 2327 2255 2104 1888 1656 1443 1265 1107 928 685 373 36 -268 -512 -700 -853 -997 -1154 -1334 -1518 -1680 -1805 -1886 -1918 -1899 -1841 -1768 -1691 -1605 -1504 -1396 -1276 -1132 -959 -785 -645 -556 -501 -448 -374 -277 -174 -92 -50 -54 -87 -128 -156 -163 -143 -107 -80 -86 -125 -165 -168 -120 -48 13 49 67 81 97 126 180 254 323 354 343 318 300 291 272 228 154 65 -14 -69 -103 -139 -198 -287 -389 -476 -521 -519 -489 -465 -470 -495 -513 -497 -448 -392 -354 -338 -327 -304 -263 -213 -159 -107 -62 -26 1 32 70 110 141 160 170 179 191 202 209 208 200 190 183 174 161 144 126 111 97 84 75 68 62 53 44 37 30 24 20 17 14 10 8 6 6 4 2 1 1 1 0 0 0 0 +-1352 -1393 -1476 -1496 -1421 -1286 -1101 -869 -643 -485 -391 -297 -165 -17 137 312 487 582 551 454 402 431 507 585 632 593 411 109 -212 -459 -606 -680 -718 -782 -945 -1213 -1484 -1626 -1609 -1517 -1452 -1457 -1527 -1622 -1679 -1636 -1488 -1296 -1131 -1016 -956 -944 -937 -843 -612 -315 -84 29 94 198 371 585 809 1002 1114 1132 1121 1170 1299 1470 1641 1791 1890 1909 1880 1893 2007 2187 2374 2539 2671 2741 2737 2726 2796 2932 3013 2967 2851 2770 2736 2668 2507 2266 2002 1757 1551 1368 1156 860 472 47 -344 -663 -915 -1125 -1326 -1549 -1808 -2078 -2322 -2518 -2658 -2731 -2731 -2676 -2598 -2512 -2410 -2285 -2144 -1983 -1780 -1526 -1264 -1051 -918 -838 -758 -642 -482 -307 -164 -90 -98 -161 -241 -300 -317 -282 -215 -162 -177 -263 -354 -366 -266 -108 31 116 162 198 242 320 465 672 869 972 964 911 878 871 835 714 494 216 -47 -236 -362 -500 -734 -1092 -1520 -1910 -2148 -2201 -2134 -2090 -2174 -2365 -2526 -2524 -2352 -2126 -1983 -1957 -1960 -1887 -1698 -1423 -1105 -773 -461 -203 16 274 625 1026 1381 1637 1829 2027 2269 2539 2774 2916 2964 2990 3049 3102 3068 2930 2759 2605 2454 2311 2230 2223 2208 2104 1932 1765 1618 1482 1385 1344 1284 1142 1010 1018 1109 1057 791 543 585 928 1302 1326 661 -709 -2164 -2682 -1797 -256 607 252 -655 -1236 -1404 -1616 -1998 -2174 -1958 -1720 -1953 -2659 -3310 -3287 -2334 -832 313 274 -968 -2496 -3209 -2701 -1425 -167 548 693 629 705 868 723 128 -436 -350 312 800 607 92 -10 517 1224 1596 1503 1088 536 46 -207 -252 -323 -521 -621 -401 -93 -228 -970 -1782 -1959 -1375 -556 -45 102 130 96 -186 -782 -1451 -1925 -2195 -2408 -2572 -2552 -2341 -2115 -2011 -1969 -1875 -1729 -1568 -1346 -992 -563 -210 -5 105 184 274 469 888 1489 2014 2203 2032 1704 1434 1325 1390 1628 1990 2378 2676 2806 2772 2648 2500 2331 2097 1780 1411 1041 729 572 685 1052 1487 1770 1822 1673 1346 913 601 674 1120 1587 1769 1706 1648 1681 1662 1482 1168 741 131 -692 -1573 -2278 -2651 -2644 -2304 -1833 -1541 -1609 -1926 -2291 -2666 -3102 -3441 -3371 -2820 -2107 -1543 -1046 -374 400 885 850 477 53 +0 -1 -1 -2 -2 -3 -3 -3 -3 -3 -3 -3 -2 -1 1 4 8 10 11 10 10 12 15 19 23 23 17 5 -11 -25 -35 -41 -46 -53 -68 -92 -119 -137 -143 -142 -143 -150 -165 -183 -197 -201 -190 -173 -157 -147 -143 -147 -151 -141 -106 -57 -16 5 18 40 77 126 179 229 262 274 278 299 340 396 453 507 549 568 573 591 641 714 793 867 932 977 996 1013 1060 1135 1189 1193 1169 1157 1164 1156 1105 1017 914 816 732 657 564 426 237 24 -179 -350 -490 -612 -731 -866 -1025 -1194 -1352 -1486 -1589 -1653 -1674 -1661 -1632 -1597 -1551 -1488 -1412 -1321 -1199 -1039 -870 -731 -645 -595 -544 -465 -353 -227 -123 -68 -75 -124 -186 -234 -249 -223 -172 -131 -144 -215 -291 -303 -222 -91 26 98 138 170 209 279 408 593 771 867 865 821 796 793 764 656 456 200 -44 -221 -341 -472 -696 -1039 -1451 -1829 -2064 -2121 -2063 -2026 -2113 -2305 -2468 -2472 -2308 -2091 -1954 -1932 -1938 -1870 -1685 -1414 -1099 -770 -460 -203 15 273 624 1025 1380 1637 1829 2026 2267 2537 2770 2910 2956 2979 3035 3084 3047 2906 2732 2575 2422 2277 2192 2181 2161 2055 1882 1715 1567 1432 1334 1290 1229 1089 960 964 1046 993 740 506 543 857 1198 1214 602 -643 -1953 -2407 -1604 -228 535 221 -572 -1072 -1210 -1384 -1700 -1838 -1644 -1434 -1617 -2186 -2700 -2662 -1875 -664 247 214 -753 -1926 -2454 -2047 -1071 -125 404 506 455 505 615 507 88 -300 -239 210 532 399 59 -7 328 768 990 921 658 320 27 -121 -145 -184 -292 -343 -218 -50 -121 -504 -912 -988 -683 -272 -22 48 60 43 -84 -345 -629 -820 -918 -988 -1035 -1008 -907 -803 -748 -718 -669 -604 -536 -450 -325 -180 -66 -2 31 53 77 129 239 390 514 548 492 401 328 294 299 340 403 467 509 517 494 456 416 374 324 265 202 143 96 72 83 123 166 190 186 163 125 80 50 53 84 113 119 108 98 94 87 73 53 31 5 -26 -53 -71 -76 -68 -54 -39 -29 -27 -29 -30 -30 -30 -28 -24 -16 -10 -6 -3 -1 0 0 0 0 0 +878 871 835 714 494 216 -47 -236 -362 -500 -734 -1092 -1520 -1910 -2148 -2201 -2134 -2090 -2174 -2365 -2526 -2524 -2352 -2126 -1983 -1957 -1960 -1887 -1698 -1423 -1105 -773 -461 -203 16 274 625 1026 1381 1637 1829 2027 2269 2539 2774 2916 2964 2990 3049 3102 3068 2930 2759 2605 2454 2311 2230 2223 2208 2104 1932 1765 1618 1482 1385 1344 1284 1142 1010 1018 1109 1057 791 543 585 928 1302 1326 661 -709 -2164 -2682 -1797 -256 607 252 -655 -1236 -1404 -1616 -1998 -2174 -1958 -1720 -1953 -2659 -3310 -3287 -2334 -832 313 274 -968 -2496 -3209 -2701 -1425 -167 548 693 629 705 868 723 128 -436 -350 312 800 607 92 -10 517 1224 1596 1503 1088 536 46 -207 -252 -323 -521 -621 -401 -93 -228 -970 -1782 -1959 -1375 -556 -45 102 130 96 -186 -782 -1451 -1925 -2195 -2408 -2572 -2552 -2341 -2115 -2011 -1969 -1875 -1729 -1568 -1346 -992 -563 -210 -5 105 184 274 469 888 1489 2014 2203 2032 1704 1434 1325 1390 1628 1990 2378 2676 2806 2772 2648 2500 2331 2097 1780 1411 1041 729 572 685 1052 1487 1770 1822 1673 1346 913 601 674 1120 1587 1769 1706 1648 1681 1662 1482 1168 741 131 -692 -1573 -2278 -2651 -2644 -2304 -1833 -1541 -1609 -1926 -2291 -2666 -3102 -3441 -3371 -2820 -2107 -1543 -1046 -374 400 885 850 477 53 -345 -752 -1057 -1104 -950 -792 -625 -239 352 767 714 447 474 865 1148 964 531 253 100 -282 -920 -1339 -1166 -639 -298 -324 -464 -501 -494 -529 -509 -349 -207 -266 -424 -383 -39 420 798 1057 1185 1070 686 257 33 -38 -237 -650 -1021 -1055 -774 -460 -334 -398 -550 -722 -870 -928 -840 -649 -520 -637 -1043 -1568 -1945 -2026 -1900 -1750 -1636 -1432 -999 -381 187 473 443 315 391 859 1663 2553 3237 3535 3475 3287 3259 3490 3777 3793 3405 2801 2265 1928 1748 1691 1773 1964 2128 2124 1920 1571 1142 697 328 93 -69 -307 -684 -1073 -1285 -1296 -1284 -1443 -1768 -2103 -2331 -2436 -2383 -2067 -1515 -1010 -847 -955 -938 -571 -141 -109 -503 -832 -650 -55 448 513 260 49 79 246 360 386 428 494 422 132 -144 -29 515 1031 1034 542 4 -272 -433 -772 -1272 -1574 -1395 -849 -343 -200 -377 -576 +0 0 0 0 0 0 -1 -1 -2 -3 -6 -9 -15 -22 -28 -33 -36 -40 -46 -55 -65 -72 -73 -72 -73 -78 -84 -87 -84 -76 -63 -47 -30 -14 1 20 49 86 122 152 179 207 243 285 325 356 377 397 421 446 457 453 443 433 422 411 411 423 434 426 404 380 359 339 325 325 319 291 265 274 306 299 230 161 178 289 416 433 220 -243 -756 -957 -655 -96 230 97 -259 -498 -576 -676 -851 -943 -864 -773 -893 -1236 -1564 -1580 -1140 -413 157 140 -503 -1317 -1719 -1468 -786 -94 310 397 366 415 518 437 78 -271 -220 198 514 395 60 -7 347 832 1097 1044 763 380 32 -150 -185 -239 -388 -467 -304 -72 -176 -755 -1398 -1550 -1097 -447 -37 83 106 79 -156 -657 -1227 -1638 -1880 -2076 -2231 -2227 -2055 -1868 -1786 -1758 -1683 -1560 -1422 -1227 -909 -519 -195 -5 97 172 257 442 841 1415 1921 2108 1951 1641 1385 1283 1350 1586 1943 2328 2625 2759 2731 2613 2471 2308 2080 1767 1403 1036 726 570 683 1050 1485 1769 1821 1673 1346 912 600 673 1118 1583 1764 1700 1640 1671 1650 1470 1156 732 129 -682 -1547 -2236 -2596 -2583 -2245 -1782 -1494 -1555 -1856 -2201 -2553 -2960 -3272 -3194 -2661 -1981 -1445 -976 -348 369 814 778 434 48 -312 -675 -944 -981 -839 -696 -546 -208 303 656 607 377 397 720 950 792 433 204 80 -225 -728 -1051 -907 -493 -228 -246 -349 -373 -365 -387 -369 -251 -147 -187 -295 -264 -27 282 531 695 771 688 436 161 20 -24 -144 -389 -603 -615 -445 -261 -187 -220 -299 -387 -460 -483 -430 -328 -258 -312 -502 -741 -904 -926 -854 -773 -709 -610 -418 -157 75 186 171 119 145 312 593 891 1106 1181 1135 1050 1017 1064 1124 1102 965 774 610 506 446 420 429 462 486 471 414 328 231 137 62 17 -13 -53 -114 -173 -199 -194 -185 -200 -235 -269 -286 -286 -268 -223 -156 -99 -79 -85 -80 -46 -11 -8 -35 -54 -40 -4 23 25 11 2 3 9 12 11 12 12 9 2 -3 -1 7 13 11 5 0 -2 -3 -4 -5 -5 -3 -2 -1 -1 -1 0 +-1568 -1346 -992 -563 -210 -5 105 184 274 469 888 1489 2014 2203 2032 1704 1434 1325 1390 1628 1990 2378 2676 2806 2772 2648 2500 2331 2097 1780 1411 1041 729 572 685 1052 1487 1770 1822 1673 1346 913 601 674 1120 1587 1769 1706 1648 1681 1662 1482 1168 741 131 -692 -1573 -2278 -2651 -2644 -2304 -1833 -1541 -1609 -1926 -2291 -2666 -3102 -3441 -3371 -2820 -2107 -1543 -1046 -374 400 885 850 477 53 -345 -752 -1057 -1104 -950 -792 -625 -239 352 767 714 447 474 865 1148 964 531 253 100 -282 -920 -1339 -1166 -639 -298 -324 -464 -501 -494 -529 -509 -349 -207 -266 -424 -383 -39 420 798 1057 1185 1070 686 257 33 -38 -237 -650 -1021 -1055 -774 -460 -334 -398 -550 -722 -870 -928 -840 -649 -520 -637 -1043 -1568 -1945 -2026 -1900 -1750 -1636 -1432 -999 -381 187 473 443 315 391 859 1663 2553 3237 3535 3475 3287 3259 3490 3777 3793 3405 2801 2265 1928 1748 1691 1773 1964 2128 2124 1920 1571 1142 697 328 93 -69 -307 -684 -1073 -1285 -1296 -1284 -1443 -1768 -2103 -2331 -2436 -2383 -2067 -1515 -1010 -847 -955 -938 -571 -141 -109 -503 -832 -650 -55 448 513 260 49 79 246 360 386 428 494 422 132 -144 -29 515 1031 1034 542 4 -272 -433 -772 -1272 -1574 -1395 -849 -343 -200 -377 -576 -585 -515 -620 -911 -1082 -861 -339 125 303 262 201 225 288 263 26 -426 -921 -1265 -1446 -1590 -1697 -1606 -1321 -1178 -1494 -2110 -2510 -2436 -2162 -2113 -2386 -2745 -2961 -2969 -2789 -2459 -2070 -1737 -1488 -1244 -945 -628 -362 -133 158 602 1173 1727 2133 2450 2896 3573 4258 4607 4523 4252 4070 4041 4113 4266 4452 4498 4277 3905 3623 3482 3306 2976 2569 2188 1798 1338 889 581 401 236 66 -42 -95 -232 -578 -1096 -1631 -2015 -2170 -2152 -2143 -2268 -2434 -2447 -2292 -2179 -2241 -2301 -2082 -1594 -1128 -913 -891 -887 -833 -787 -794 -821 -775 -538 -38 607 1093 1178 903 544 335 316 417 578 740 836 834 761 639 448 196 -30 -137 -130 -97 -75 -35 38 115 149 102 -56 -278 -394 -244 97 309 182 -131 -311 -274 -221 -333 -540 -691 -755 -773 -724 -581 -470 -586 -951 -1407 -1815 -2151 -2383 -2411 -2255 +0 -1 -1 -1 -1 -1 0 0 1 2 6 11 19 24 26 24 23 24 29 37 51 67 82 94 101 104 106 106 103 93 79 62 46 38 49 79 118 149 161 156 131 93 64 75 131 194 225 226 227 241 247 229 187 123 22 -124 -290 -434 -522 -537 -483 -396 -343 -369 -454 -555 -664 -793 -904 -909 -781 -598 -449 -312 -115 124 282 277 159 18 -121 -269 -386 -411 -361 -307 -247 -97 144 320 304 193 209 388 524 447 250 121 48 -140 -464 -686 -606 -338 -160 -177 -256 -280 -280 -304 -297 -206 -124 -161 -260 -238 -25 267 513 687 779 712 461 174 22 -27 -167 -461 -732 -764 -566 -340 -249 -299 -417 -553 -671 -722 -659 -514 -415 -512 -845 -1279 -1599 -1677 -1584 -1469 -1383 -1219 -856 -329 162 412 388 278 347 766 1492 2303 2935 3221 3182 3024 3012 3240 3521 3551 3200 2643 2145 1832 1667 1618 1702 1892 2056 2058 1865 1530 1115 682 321 91 -68 -304 -677 -1063 -1275 -1288 -1277 -1437 -1762 -2098 -2327 -2434 -2382 -2066 -1515 -1010 -847 -955 -938 -571 -141 -109 -502 -830 -648 -55 444 508 257 48 77 242 354 378 419 482 411 128 -140 -29 496 990 989 517 3 -258 -409 -726 -1191 -1468 -1296 -785 -316 -184 -344 -523 -528 -463 -554 -809 -956 -756 -296 108 261 224 171 190 241 219 21 -351 -752 -1025 -1162 -1268 -1342 -1260 -1028 -909 -1143 -1599 -1886 -1814 -1595 -1544 -1727 -1967 -2101 -2085 -1939 -1692 -1409 -1170 -991 -819 -616 -404 -231 -84 98 368 709 1032 1258 1425 1662 2024 2379 2538 2456 2276 2147 2099 2104 2149 2208 2196 2054 1844 1683 1590 1484 1312 1113 931 751 548 357 229 155 89 24 -16 -34 -81 -198 -367 -534 -644 -678 -657 -639 -660 -690 -677 -618 -572 -573 -573 -505 -376 -259 -203 -193 -186 -169 -155 -152 -152 -139 -93 -7 97 169 175 129 75 44 40 51 67 83 89 85 74 59 39 16 -3 -11 -10 -7 -5 -3 2 6 7 4 -3 -11 -15 -9 3 8 4 -4 -7 -6 -4 -5 -7 -8 -8 -7 -5 -4 -3 -3 -3 -3 -3 -2 -2 -1 0 +3237 3535 3475 3287 3259 3490 3777 3793 3405 2801 2265 1928 1748 1691 1773 1964 2128 2124 1920 1571 1142 697 328 93 -69 -307 -684 -1073 -1285 -1296 -1284 -1443 -1768 -2103 -2331 -2436 -2383 -2067 -1515 -1010 -847 -955 -938 -571 -141 -109 -503 -832 -650 -55 448 513 260 49 79 246 360 386 428 494 422 132 -144 -29 515 1031 1034 542 4 -272 -433 -772 -1272 -1574 -1395 -849 -343 -200 -377 -576 -585 -515 -620 -911 -1082 -861 -339 125 303 262 201 225 288 263 26 -426 -921 -1265 -1446 -1590 -1697 -1606 -1321 -1178 -1494 -2110 -2510 -2436 -2162 -2113 -2386 -2745 -2961 -2969 -2789 -2459 -2070 -1737 -1488 -1244 -945 -628 -362 -133 158 602 1173 1727 2133 2450 2896 3573 4258 4607 4523 4252 4070 4041 4113 4266 4452 4498 4277 3905 3623 3482 3306 2976 2569 2188 1798 1338 889 581 401 236 66 -42 -95 -232 -578 -1096 -1631 -2015 -2170 -2152 -2143 -2268 -2434 -2447 -2292 -2179 -2241 -2301 -2082 -1594 -1128 -913 -891 -887 -833 -787 -794 -821 -775 -538 -38 607 1093 1178 903 544 335 316 417 578 740 836 834 761 639 448 196 -30 -137 -130 -97 -75 -35 38 115 149 102 -56 -278 -394 -244 97 309 182 -131 -311 -274 -221 -333 -540 -691 -755 -773 -724 -581 -470 -586 -951 -1407 -1815 -2151 -2383 -2411 -2255 -2154 -2323 -2686 -2984 -3120 -3243 -3462 -3658 -3632 -3373 -3057 -2850 -2765 -2680 -2431 -1968 -1440 -1061 -819 -421 357 1338 2127 2604 3037 3647 4282 4642 4716 4795 5091 5516 5840 5966 5958 5915 5861 5750 5527 5194 4831 4520 4218 3793 3167 2403 1630 929 339 -117 -463 -767 -1074 -1374 -1687 -2110 -2685 -3273 -3693 -3964 -4243 -4516 -4525 -4118 -3540 -3228 -3364 -3721 -3939 -3812 -3343 -2672 -2003 -1480 -1072 -613 -25 586 1083 1473 1839 2193 2476 2687 2893 3124 3353 3577 3809 3959 3868 3517 3126 2908 2820 2656 2355 2065 1866 1606 1107 459 -55 -291 -386 -533 -748 -924 -1033 -1159 -1371 -1645 -1946 -2272 -2577 -2747 -2708 -2547 -2425 -2415 -2475 -2591 -2800 -3068 -3254 -3257 -3153 -3107 -3187 -3350 -3570 -3857 -4159 -4341 -4325 -4178 -4018 -3905 -3849 -3875 -3986 -4094 -4090 -3963 -3766 -3489 -3048 -2402 -1619 -820 -94 501 967 1406 1976 2733 3567 4350 5072 5772 +0 0 1 2 3 6 10 12 14 15 15 15 16 18 22 28 35 39 40 36 29 19 10 3 -3 -13 -30 -50 -64 -69 -73 -87 -113 -143 -167 -185 -191 -175 -135 -95 -83 -98 -101 -65 -17 -14 -65 -111 -90 -8 66 79 41 8 13 43 66 73 84 100 88 28 -32 -7 121 249 257 138 1 -74 -120 -219 -370 -469 -426 -266 -110 -66 -127 -197 -205 -184 -226 -339 -411 -334 -134 50 124 109 85 97 127 118 11 -198 -436 -608 -707 -789 -856 -822 -687 -622 -800 -1147 -1384 -1362 -1226 -1214 -1389 -1620 -1770 -1797 -1710 -1527 -1301 -1105 -958 -810 -623 -419 -244 -91 108 418 823 1224 1528 1772 2115 2635 3169 3460 3427 3251 3138 3143 3225 3373 3548 3612 3462 3185 2977 2881 2755 2497 2171 1861 1539 1153 770 506 351 208 58 -38 -86 -210 -525 -999 -1494 -1855 -2006 -1999 -1999 -2124 -2288 -2309 -2172 -2072 -2139 -2203 -2000 -1536 -1090 -885 -866 -865 -814 -771 -780 -808 -764 -532 -38 601 1084 1169 897 541 333 315 416 577 739 835 833 761 639 447 195 -30 -137 -130 -97 -75 -35 37 114 147 101 -56 -275 -389 -240 95 302 177 -128 -303 -266 -214 -321 -519 -662 -721 -735 -686 -549 -442 -549 -887 -1307 -1678 -1980 -2183 -2198 -2045 -1944 -2085 -2398 -2649 -2755 -2847 -3021 -3173 -3131 -2888 -2601 -2409 -2321 -2234 -2012 -1618 -1175 -859 -658 -336 282 1049 1654 2008 2322 2763 3216 3455 3478 3502 3682 3952 4141 4189 4141 4068 3987 3870 3679 3418 3144 2907 2681 2382 1965 1472 986 555 199 -69 -266 -435 -601 -758 -917 -1130 -1417 -1701 -1890 -1998 -2105 -2206 -2175 -1946 -1645 -1475 -1511 -1642 -1707 -1624 -1398 -1096 -806 -585 -416 -233 -10 213 386 514 628 732 809 858 903 952 998 1040 1079 1095 1042 923 799 723 682 625 538 458 402 336 224 90 -11 -54 -69 -92 -125 -149 -160 -173 -198 -228 -259 -290 -316 -322 -305 -274 -249 -237 -231 -230 -236 -245 -247 -233 -214 -198 -192 -189 -189 -191 -191 -186 -172 -154 -136 -122 -110 -100 -93 -86 -77 -66 -56 -46 -35 -23 -14 -6 -1 2 3 3 3 3 2 2 1 0 +-578 -1096 -1631 -2015 -2170 -2152 -2143 -2268 -2434 -2447 -2292 -2179 -2241 -2301 -2082 -1594 -1128 -913 -891 -887 -833 -787 -794 -821 -775 -538 -38 607 1093 1178 903 544 335 316 417 578 740 836 834 761 639 448 196 -30 -137 -130 -97 -75 -35 38 115 149 102 -56 -278 -394 -244 97 309 182 -131 -311 -274 -221 -333 -540 -691 -755 -773 -724 -581 -470 -586 -951 -1407 -1815 -2151 -2383 -2411 -2255 -2154 -2323 -2686 -2984 -3120 -3243 -3462 -3658 -3632 -3373 -3057 -2850 -2765 -2680 -2431 -1968 -1440 -1061 -819 -421 357 1338 2127 2604 3037 3647 4282 4642 4716 4795 5091 5516 5840 5966 5958 5915 5861 5750 5527 5194 4831 4520 4218 3793 3167 2403 1630 929 339 -117 -463 -767 -1074 -1374 -1687 -2110 -2685 -3273 -3693 -3964 -4243 -4516 -4525 -4118 -3540 -3228 -3364 -3721 -3939 -3812 -3343 -2672 -2003 -1480 -1072 -613 -25 586 1083 1473 1839 2193 2476 2687 2893 3124 3353 3577 3809 3959 3868 3517 3126 2908 2820 2656 2355 2065 1866 1606 1107 459 -55 -291 -386 -533 -748 -924 -1033 -1159 -1371 -1645 -1946 -2272 -2577 -2747 -2708 -2547 -2425 -2415 -2475 -2591 -2800 -3068 -3254 -3257 -3153 -3107 -3187 -3350 -3570 -3857 -4159 -4341 -4325 -4178 -4018 -3905 -3849 -3875 -3986 -4094 -4090 -3963 -3766 -3489 -3048 -2402 -1619 -820 -94 501 967 1406 1976 2733 3567 4350 5072 5772 6408 6915 7325 7710 7999 8003 7706 7373 7272 7360 7330 6986 6412 5823 5301 4733 3971 3039 2149 1487 978 356 -503 -1382 -2019 -2479 -3089 -3963 -4795 -5219 -5221 -5071 -4983 -5011 -5166 -5424 -5616 -5548 -5242 -4920 -4650 -4221 -3462 -2523 -1674 -971 -318 235 561 784 1259 2094 2937 3435 3704 4133 4796 5339 5461 5296 5199 5312 5456 5370 4990 4510 4239 4285 4367 4001 3012 1775 837 340 -10 -476 -997 -1411 -1759 -2191 -2677 -3027 -3173 -3272 -3480 -3762 -3953 -3968 -3847 -3685 -3558 -3488 -3427 -3290 -3087 -2954 -2994 -3097 -3024 -2686 -2281 -2097 -2214 -2459 -2630 -2713 -2834 -3066 -3300 -3381 -3313 -3283 -3445 -3721 -3893 -3858 -3726 -3619 -3513 -3304 -2996 -2674 -2328 -1800 -990 -55 707 1152 1433 1832 2490 3334 4210 5053 5880 6673 7324 7734 7923 8019 8123 8226 8238 8102 7844 7577 7401 7311 7162 6768 6022 4948 3673 2404 1346 569 -106 -961 -2080 -3241 +0 -1 -1 -2 -3 -5 -6 -8 -11 -14 -16 -18 -22 -26 -27 -24 -19 -18 -19 -21 -22 -23 -25 -28 -29 -22 -2 27 53 62 50 32 21 21 29 43 59 70 73 70 62 45 21 -4 -17 -16 -13 -10 -5 5 17 23 16 -10 -48 -71 -45 18 60 36 -28 -68 -61 -51 -79 -131 -172 -193 -203 -196 -161 -134 -171 -284 -430 -567 -688 -780 -806 -771 -753 -829 -979 -1110 -1184 -1256 -1367 -1472 -1490 -1410 -1302 -1236 -1220 -1204 -1111 -915 -681 -510 -400 -209 179 684 1105 1373 1626 1981 2359 2594 2672 2753 2963 3253 3490 3610 3651 3670 3681 3655 3555 3380 3179 3009 2839 2580 2178 1670 1144 658 242 -85 -339 -566 -800 -1033 -1279 -1614 -2071 -2546 -2896 -3135 -3383 -3628 -3664 -3359 -2910 -2672 -2804 -3124 -3330 -3244 -2863 -2303 -1737 -1292 -941 -542 -23 523 971 1328 1667 1998 2267 2472 2674 2900 3126 3349 3580 3735 3664 3343 2982 2783 2708 2558 2275 2000 1813 1564 1081 449 -54 -287 -381 -527 -740 -916 -1025 -1152 -1364 -1638 -1940 -2266 -2572 -2744 -2707 -2546 -2425 -2415 -2475 -2591 -2799 -3066 -3251 -3251 -3145 -3097 -3173 -3332 -3546 -3826 -4120 -4293 -4270 -4117 -3952 -3832 -3769 -3786 -3884 -3979 -3964 -3830 -3629 -3352 -2918 -2292 -1540 -777 -89 470 905 1310 1834 2526 3282 3983 4622 5233 5780 6205 6538 6844 7061 7024 6723 6393 6267 6301 6236 5904 5381 4853 4387 3889 3239 2460 1726 1185 773 279 -392 -1066 -1544 -1879 -2321 -2950 -3537 -3813 -3777 -3634 -3535 -3519 -3591 -3731 -3822 -3735 -3490 -3239 -3027 -2716 -2201 -1585 -1039 -596 -193 140 330 456 722 1186 1641 1892 2012 2212 2530 2773 2794 2668 2579 2593 2621 2536 2318 2060 1903 1890 1892 1703 1258 728 336 134 -4 -181 -371 -514 -628 -765 -915 -1012 -1038 -1046 -1087 -1148 -1178 -1154 -1091 -1020 -959 -916 -876 -819 -748 -696 -685 -689 -653 -562 -463 -413 -422 -454 -469 -468 -472 -493 -511 -505 -477 -454 -458 -475 -477 -453 -419 -389 -361 -324 -280 -237 -197 -144 -75 -4 47 73 86 103 131 164 193 215 232 244 246 239 224 205 188 172 154 134 114 98 83 69 57 46 33 21 12 6 2 0 -1 -1 -1 0 +1839 2193 2476 2687 2893 3124 3353 3577 3809 3959 3868 3517 3126 2908 2820 2656 2355 2065 1866 1606 1107 459 -55 -291 -386 -533 -748 -924 -1033 -1159 -1371 -1645 -1946 -2272 -2577 -2747 -2708 -2547 -2425 -2415 -2475 -2591 -2800 -3068 -3254 -3257 -3153 -3107 -3187 -3350 -3570 -3857 -4159 -4341 -4325 -4178 -4018 -3905 -3849 -3875 -3986 -4094 -4090 -3963 -3766 -3489 -3048 -2402 -1619 -820 -94 501 967 1406 1976 2733 3567 4350 5072 5772 6408 6915 7325 7710 7999 8003 7706 7373 7272 7360 7330 6986 6412 5823 5301 4733 3971 3039 2149 1487 978 356 -503 -1382 -2019 -2479 -3089 -3963 -4795 -5219 -5221 -5071 -4983 -5011 -5166 -5424 -5616 -5548 -5242 -4920 -4650 -4221 -3462 -2523 -1674 -971 -318 235 561 784 1259 2094 2937 3435 3704 4133 4796 5339 5461 5296 5199 5312 5456 5370 4990 4510 4239 4285 4367 4001 3012 1775 837 340 -10 -476 -997 -1411 -1759 -2191 -2677 -3027 -3173 -3272 -3480 -3762 -3953 -3968 -3847 -3685 -3558 -3488 -3427 -3290 -3087 -2954 -2994 -3097 -3024 -2686 -2281 -2097 -2214 -2459 -2630 -2713 -2834 -3066 -3300 -3381 -3313 -3283 -3445 -3721 -3893 -3858 -3726 -3619 -3513 -3304 -2996 -2674 -2328 -1800 -990 -55 707 1152 1433 1832 2490 3334 4210 5053 5880 6673 7324 7734 7923 8019 8123 8226 8238 8102 7844 7577 7401 7311 7162 6768 6022 4948 3673 2404 1346 569 -106 -961 -2080 -3241 -4167 -4813 -5319 -5784 -6201 -6603 -7074 -7571 -7890 -7861 -7514 -6955 -6232 -5436 -4784 -4410 -4121 -3587 -2761 -1928 -1282 -674 130 1132 2181 3216 4204 4970 5348 5490 5767 6287 6756 6903 6863 6926 7108 7168 6965 6555 5948 5061 3968 2969 2286 1824 1381 926 476 -142 -1156 -2406 -3356 -3661 -3580 -3662 -4149 -4815 -5308 -5462 -5295 -4935 -4615 -4575 -4805 -4992 -4817 -4322 -3817 -3464 -3086 -2463 -1696 -1154 -1081 -1346 -1590 -1559 -1288 -1021 -1010 -1330 -1842 -2317 -2619 -2780 -2892 -2979 -3043 -3183 -3534 -4044 -4418 -4405 -4073 -3688 -3364 -2961 -2381 -1761 -1257 -766 -56 863 1696 2250 2694 3324 4201 5128 5944 6642 7250 7728 8046 8247 8358 8337 8175 7952 7735 7463 7045 6529 6041 5585 4995 4128 3050 1972 1040 244 -515 -1304 -2096 -2815 -3441 -4041 -4712 -5474 -6247 -6922 -7388 -7537 -7338 -6939 -6586 -6376 -6156 -5776 -5290 -4820 -4283 -3463 -2350 -1201 -199 750 1763 2736 +0 0 1 1 3 6 9 12 16 22 26 28 29 32 36 38 39 38 39 37 28 12 -2 -10 -15 -22 -32 -43 -51 -62 -78 -99 -125 -154 -185 -208 -217 -215 -215 -226 -243 -266 -301 -345 -382 -399 -402 -413 -441 -482 -533 -598 -669 -723 -746 -745 -741 -744 -758 -787 -834 -884 -909 -907 -887 -845 -759 -614 -425 -222 -27 142 281 418 602 853 1139 1422 1695 1972 2237 2466 2668 2866 3034 3098 3042 2966 2982 3076 3120 3027 2828 2614 2421 2198 1875 1460 1049 737 492 182 -262 -730 -1081 -1347 -1703 -2215 -2718 -2997 -3039 -2992 -2979 -3033 -3166 -3367 -3528 -3528 -3373 -3203 -3061 -2811 -2331 -1717 -1152 -675 -224 166 401 567 919 1544 2186 2580 2806 3160 3698 4152 4282 4187 4144 4266 4417 4380 4100 3732 3533 3596 3691 3404 2578 1529 725 296 -9 -421 -886 -1260 -1579 -1977 -2428 -2759 -2906 -3011 -3217 -3493 -3686 -3716 -3616 -3478 -3371 -3316 -3270 -3150 -2965 -2846 -2894 -3001 -2939 -2618 -2229 -2053 -2173 -2419 -2592 -2678 -2803 -3037 -3274 -3358 -3295 -3269 -3434 -3712 -3886 -3854 -3724 -3618 -3513 -3304 -2996 -2674 -2327 -1799 -989 -55 705 1148 1426 1821 2472 3307 4169 4996 5803 6575 7202 7588 7756 7833 7914 7993 7982 7829 7556 7277 7084 6975 6808 6411 5682 4650 3438 2241 1249 525 -98 -881 -1896 -2939 -3760 -4320 -4748 -5135 -5475 -5796 -6173 -6566 -6800 -6731 -6394 -5879 -5231 -4531 -3960 -3625 -3362 -2904 -2218 -1537 -1014 -529 101 873 1667 2437 3158 3699 3944 4010 4171 4504 4791 4846 4770 4763 4836 4824 4637 4314 3871 3255 2522 1865 1418 1117 835 553 280 -83 -664 -1364 -1876 -2018 -1945 -1961 -2189 -2502 -2717 -2753 -2627 -2410 -2218 -2162 -2233 -2281 -2163 -1907 -1655 -1475 -1290 -1011 -683 -456 -419 -511 -592 -568 -460 -357 -346 -445 -603 -741 -818 -848 -862 -867 -863 -881 -953 -1062 -1130 -1096 -987 -868 -770 -658 -514 -369 -256 -151 -11 159 302 387 448 533 650 764 854 917 962 984 984 966 938 895 838 778 721 661 593 521 457 399 337 263 183 111 54 12 -24 -56 -83 -104 -116 -126 -134 -141 -145 -146 -139 -126 -108 -90 -74 -61 -50 -40 -30 -22 -15 -10 -5 -2 -1 0 0 0 +-2677 -3027 -3173 -3272 -3480 -3762 -3953 -3968 -3847 -3685 -3558 -3488 -3427 -3290 -3087 -2954 -2994 -3097 -3024 -2686 -2281 -2097 -2214 -2459 -2630 -2713 -2834 -3066 -3300 -3381 -3313 -3283 -3445 -3721 -3893 -3858 -3726 -3619 -3513 -3304 -2996 -2674 -2328 -1800 -990 -55 707 1152 1433 1832 2490 3334 4210 5053 5880 6673 7324 7734 7923 8019 8123 8226 8238 8102 7844 7577 7401 7311 7162 6768 6022 4948 3673 2404 1346 569 -106 -961 -2080 -3241 -4167 -4813 -5319 -5784 -6201 -6603 -7074 -7571 -7890 -7861 -7514 -6955 -6232 -5436 -4784 -4410 -4121 -3587 -2761 -1928 -1282 -674 130 1132 2181 3216 4204 4970 5348 5490 5767 6287 6756 6903 6863 6926 7108 7168 6965 6555 5948 5061 3968 2969 2286 1824 1381 926 476 -142 -1156 -2406 -3356 -3661 -3580 -3662 -4149 -4815 -5308 -5462 -5295 -4935 -4615 -4575 -4805 -4992 -4817 -4322 -3817 -3464 -3086 -2463 -1696 -1154 -1081 -1346 -1590 -1559 -1288 -1021 -1010 -1330 -1842 -2317 -2619 -2780 -2892 -2979 -3043 -3183 -3534 -4044 -4418 -4405 -4073 -3688 -3364 -2961 -2381 -1761 -1257 -766 -56 863 1696 2250 2694 3324 4201 5128 5944 6642 7250 7728 8046 8247 8358 8337 8175 7952 7735 7463 7045 6529 6041 5585 4995 4128 3050 1972 1040 244 -515 -1304 -2096 -2815 -3441 -4041 -4712 -5474 -6247 -6922 -7388 -7537 -7338 -6939 -6586 -6376 -6156 -5776 -5290 -4820 -4283 -3463 -2350 -1201 -199 750 1763 2736 3515 4173 4886 5596 6077 6349 6705 7263 7721 7762 7494 7250 7085 6756 6157 5455 4799 4127 3388 2685 2046 1290 322 -622 -1287 -1826 -2614 -3631 -4399 -4618 -4574 -4690 -4951 -5053 -4964 -4964 -5153 -5251 -5007 -4562 -4201 -3947 -3598 -3079 -2534 -2085 -1695 -1317 -1017 -860 -806 -783 -788 -821 -793 -625 -418 -402 -686 -1144 -1599 -2011 -2444 -2867 -3151 -3258 -3349 -3603 -3981 -4245 -4218 -3970 -3706 -3543 -3462 -3381 -3208 -2830 -2166 -1279 -352 504 1349 2246 3093 3747 4247 4801 5499 6195 6747 7220 7743 8263 8569 8575 8419 8236 8003 7634 7149 6621 6051 5400 4729 4132 3558 2812 1817 737 -232 -1096 -1992 -2925 -3750 -4417 -5063 -5774 -6391 -6687 -6685 -6638 -6712 -6806 -6759 -6538 -6159 -5564 -4723 -3787 -2964 -2230 -1380 -365 595 1322 1927 2614 3343 3934 4386 4891 5500 6026 6342 6572 6836 6985 6804 6361 5920 5548 5048 4325 3579 3013 2523 1875 1054 250 +0 -1 -2 -3 -5 -8 -11 -14 -17 -21 -25 -29 -33 -37 -40 -44 -50 -59 -64 -63 -59 -60 -69 -83 -97 -108 -122 -141 -163 -179 -187 -198 -220 -252 -279 -292 -298 -305 -312 -309 -294 -275 -251 -203 -117 -7 90 153 198 263 371 516 676 841 1013 1189 1350 1472 1559 1626 1699 1775 1830 1853 1846 1835 1841 1868 1879 1824 1665 1402 1068 716 410 177 -34 -315 -696 -1108 -1455 -1717 -1938 -2151 -2353 -2557 -2793 -3047 -3237 -3286 -3200 -3014 -2750 -2441 -2186 -2049 -1947 -1724 -1349 -957 -647 -345 67 597 1167 1746 2316 2777 3030 3152 3356 3708 4037 4177 4205 4298 4465 4557 4480 4266 3914 3369 2670 2020 1572 1267 969 656 341 -103 -845 -1775 -2499 -2751 -2713 -2801 -3200 -3746 -4163 -4320 -4221 -3964 -3737 -3732 -3949 -4132 -4015 -3628 -3227 -2948 -2643 -2123 -1471 -1007 -949 -1189 -1412 -1392 -1156 -922 -916 -1213 -1687 -2133 -2421 -2582 -2697 -2790 -2861 -3004 -3348 -3845 -4216 -4217 -3912 -3553 -3251 -2870 -2314 -1716 -1228 -750 -55 848 1671 2220 2663 3292 4167 5092 5910 6612 7225 7707 8030 8236 8351 8332 8173 7952 7735 7461 7041 6524 6033 5574 4981 4113 3036 1960 1032 242 -511 -1290 -2069 -2774 -3384 -3966 -4614 -5348 -6087 -6726 -7159 -7284 -7070 -6665 -6305 -6084 -5853 -5472 -4992 -4531 -4011 -3229 -2182 -1111 -184 686 1606 2480 3170 3745 4361 4967 5364 5572 5850 6298 6654 6645 6376 6127 5946 5631 5095 4482 3914 3341 2721 2140 1617 1011 250 -480 -985 -1384 -1964 -2703 -3245 -3374 -3309 -3361 -3512 -3548 -3451 -3414 -3507 -3535 -3334 -3003 -2735 -2540 -2288 -1935 -1573 -1278 -1026 -788 -600 -501 -463 -444 -441 -453 -431 -335 -221 -209 -352 -577 -794 -982 -1175 -1355 -1464 -1489 -1504 -1590 -1726 -1808 -1763 -1629 -1492 -1399 -1341 -1283 -1193 -1031 -773 -447 -121 168 440 717 965 1142 1264 1395 1558 1713 1818 1894 1979 2055 2075 2018 1925 1829 1727 1597 1450 1302 1152 995 842 712 592 451 281 109 -34 -152 -265 -373 -459 -518 -569 -621 -656 -655 -624 -589 -566 -544 -512 -468 -417 -355 -284 -214 -157 -110 -64 -16 23 48 64 81 94 100 101 102 103 100 92 85 76 66 54 43 33 24 17 11 6 3 1 0 0 0 +-1010 -1330 -1842 -2317 -2619 -2780 -2892 -2979 -3043 -3183 -3534 -4044 -4418 -4405 -4073 -3688 -3364 -2961 -2381 -1761 -1257 -766 -56 863 1696 2250 2694 3324 4201 5128 5944 6642 7250 7728 8046 8247 8358 8337 8175 7952 7735 7463 7045 6529 6041 5585 4995 4128 3050 1972 1040 244 -515 -1304 -2096 -2815 -3441 -4041 -4712 -5474 -6247 -6922 -7388 -7537 -7338 -6939 -6586 -6376 -6156 -5776 -5290 -4820 -4283 -3463 -2350 -1201 -199 750 1763 2736 3515 4173 4886 5596 6077 6349 6705 7263 7721 7762 7494 7250 7085 6756 6157 5455 4799 4127 3388 2685 2046 1290 322 -622 -1287 -1826 -2614 -3631 -4399 -4618 -4574 -4690 -4951 -5053 -4964 -4964 -5153 -5251 -5007 -4562 -4201 -3947 -3598 -3079 -2534 -2085 -1695 -1317 -1017 -860 -806 -783 -788 -821 -793 -625 -418 -402 -686 -1144 -1599 -2011 -2444 -2867 -3151 -3258 -3349 -3603 -3981 -4245 -4218 -3970 -3706 -3543 -3462 -3381 -3208 -2830 -2166 -1279 -352 504 1349 2246 3093 3747 4247 4801 5499 6195 6747 7220 7743 8263 8569 8575 8419 8236 8003 7634 7149 6621 6051 5400 4729 4132 3558 2812 1817 737 -232 -1096 -1992 -2925 -3750 -4417 -5063 -5774 -6391 -6687 -6685 -6638 -6712 -6806 -6759 -6538 -6159 -5564 -4723 -3787 -2964 -2230 -1380 -365 595 1322 1927 2614 3343 3934 4386 4891 5500 6026 6342 6572 6836 6985 6804 6361 5920 5548 5048 4325 3579 3013 2523 1875 1054 250 -447 -1135 -1886 -2615 -3244 -3820 -4387 -4847 -5068 -5072 -4997 -4922 -4834 -4731 -4649 -4570 -4401 -4109 -3768 -3439 -3070 -2598 -2086 -1665 -1352 -1015 -570 -123 122 90 -114 -314 -406 -394 -351 -386 -603 -1031 -1573 -2072 -2455 -2788 -3172 -3608 -4011 -4320 -4533 -4643 -4597 -4404 -4187 -4057 -3950 -3682 -3181 -2558 -1930 -1287 -570 182 889 1602 2447 3421 4323 4990 5473 5941 6479 7056 7617 8119 8475 8599 8548 8500 8552 8573 8348 7809 7112 6448 5848 5167 4253 3113 1942 952 178 -536 -1350 -2280 -3232 -4135 -4971 -5702 -6269 -6635 -6809 -6803 -6659 -6481 -6365 -6251 -5952 -5378 -4680 -4069 -3582 -3079 -2453 -1692 -797 228 1253 2058 2576 3013 3599 4309 4921 5314 5569 5802 6025 6215 6380 6466 6311 5826 5146 4510 4001 3512 2950 2350 1770 1175 517 -162 -771 -1310 -1870 -2499 -3149 -3733 -4192 -4477 -4549 -4456 -4342 -4329 -4411 -4501 -4538 -4488 -4265 -3781 -3109 -2485 +0 -1 -1 -2 -4 -6 -8 -11 -14 -18 -25 -33 -43 -50 -53 -55 -56 -56 -50 -41 -33 -22 -2 29 62 88 115 152 207 270 335 398 461 522 575 624 667 702 724 741 757 765 756 733 707 683 636 548 421 283 155 37 -83 -218 -362 -502 -635 -770 -928 -1111 -1308 -1494 -1642 -1725 -1728 -1681 -1639 -1630 -1616 -1557 -1464 -1367 -1246 -1032 -717 -376 -64 245 589 935 1227 1488 1779 2080 2305 2458 2646 2922 3166 3244 3190 3141 3125 3033 2812 2534 2267 1982 1654 1332 1030 660 167 -329 -690 -992 -1441 -2030 -2493 -2652 -2663 -2767 -2959 -3059 -3042 -3081 -3237 -3339 -3222 -2970 -2766 -2628 -2422 -2096 -1743 -1450 -1191 -935 -729 -623 -589 -578 -587 -617 -601 -478 -323 -313 -538 -905 -1275 -1616 -1979 -2339 -2590 -2697 -2792 -3025 -3365 -3612 -3612 -3422 -3214 -3092 -3039 -2985 -2848 -2526 -1944 -1154 -320 459 1235 2066 2858 3478 3959 4495 5168 5845 6391 6863 7387 7909 8230 8260 8135 7980 7776 7438 6983 6481 5937 5310 4659 4078 3518 2785 1802 731 -231 -1092 -1986 -2918 -3743 -4412 -5060 -5772 -6390 -6687 -6685 -6637 -6709 -6802 -6751 -6526 -6143 -5545 -4703 -3766 -2944 -2213 -1367 -361 587 1302 1895 2564 3272 3842 4273 4752 5329 5822 6109 6312 6543 6664 6468 6025 5586 5214 4726 4032 3322 2784 2321 1717 960 226 -404 -1019 -1684 -2322 -2864 -3353 -3828 -4204 -4368 -4343 -4252 -4161 -4058 -3944 -3848 -3756 -3590 -3327 -3027 -2742 -2428 -2038 -1623 -1285 -1034 -770 -429 -92 89 65 -83 -225 -288 -277 -244 -266 -411 -694 -1048 -1364 -1598 -1794 -2017 -2267 -2490 -2648 -2744 -2775 -2712 -2564 -2405 -2299 -2208 -2029 -1728 -1370 -1019 -669 -292 91 441 782 1175 1616 2008 2279 2457 2620 2807 3004 3183 3330 3409 3394 3309 3224 3179 3122 2977 2726 2430 2155 1911 1651 1328 949 578 276 50 -149 -364 -599 -827 -1029 -1204 -1342 -1435 -1475 -1470 -1424 -1351 -1276 -1213 -1153 -1061 -927 -780 -654 -555 -460 -353 -234 -106 29 153 241 289 323 369 421 458 470 469 463 455 444 431 412 379 328 271 222 183 150 116 86 59 36 14 -5 -18 -28 -36 -42 -47 -49 -48 -43 -37 -31 -25 -20 -16 -13 -9 -6 -4 -2 -1 0 +-352 504 1349 2246 3093 3747 4247 4801 5499 6195 6747 7220 7743 8263 8569 8575 8419 8236 8003 7634 7149 6621 6051 5400 4729 4132 3558 2812 1817 737 -232 -1096 -1992 -2925 -3750 -4417 -5063 -5774 -6391 -6687 -6685 -6638 -6712 -6806 -6759 -6538 -6159 -5564 -4723 -3787 -2964 -2230 -1380 -365 595 1322 1927 2614 3343 3934 4386 4891 5500 6026 6342 6572 6836 6985 6804 6361 5920 5548 5048 4325 3579 3013 2523 1875 1054 250 -447 -1135 -1886 -2615 -3244 -3820 -4387 -4847 -5068 -5072 -4997 -4922 -4834 -4731 -4649 -4570 -4401 -4109 -3768 -3439 -3070 -2598 -2086 -1665 -1352 -1015 -570 -123 122 90 -114 -314 -406 -394 -351 -386 -603 -1031 -1573 -2072 -2455 -2788 -3172 -3608 -4011 -4320 -4533 -4643 -4597 -4404 -4187 -4057 -3950 -3682 -3181 -2558 -1930 -1287 -570 182 889 1602 2447 3421 4323 4990 5473 5941 6479 7056 7617 8119 8475 8599 8548 8500 8552 8573 8348 7809 7112 6448 5848 5167 4253 3113 1942 952 178 -536 -1350 -2280 -3232 -4135 -4971 -5702 -6269 -6635 -6809 -6803 -6659 -6481 -6365 -6251 -5952 -5378 -4680 -4069 -3582 -3079 -2453 -1692 -797 228 1253 2058 2576 3013 3599 4309 4921 5314 5569 5802 6025 6215 6380 6466 6311 5826 5146 4510 4001 3512 2950 2350 1770 1175 517 -162 -771 -1310 -1870 -2499 -3149 -3733 -4192 -4477 -4549 -4456 -4342 -4329 -4411 -4501 -4538 -4488 -4265 -3781 -3109 -2485 -2078 -1806 -1462 -984 -500 -133 113 244 186 -64 -296 -253 36 221 -9 -631 -1394 -2113 -2741 -3229 -3521 -3688 -3941 -4389 -4886 -5184 -5222 -5152 -5117 -5115 -5074 -4943 -4666 -4149 -3365 -2451 -1613 -913 -225 583 1490 2374 3192 4014 4905 5825 6702 7516 8277 8922 9327 9450 9422 9423 9483 9464 9251 8881 8438 7908 7208 6365 5515 4740 3939 2972 1824 620 -523 -1547 -2424 -3163 -3865 -4667 -5575 -6415 -6996 -7305 -7461 -7561 -7621 -7635 -7587 -7386 -6901 -6115 -5201 -4366 -3667 -3011 -2310 -1555 -758 99 1005 1880 2637 3270 3846 4420 4979 5448 5743 5831 5768 5674 5627 5591 5475 5242 4935 4574 4103 3459 2678 1890 1197 597 25 -585 -1233 -1866 -2421 -2872 -3237 -3557 -3835 -4027 -4088 -4027 -3907 -3782 -3660 -3509 -3293 -3007 -2675 -2314 -1937 -1558 -1198 -862 -526 -172 144 319 307 191 121 147 166 39 -259 -629 -972 -1312 -1751 -2348 -3034 -3659 +0 0 0 1 3 7 11 16 24 34 46 58 73 92 110 125 139 154 168 177 183 187 187 181 173 163 152 129 89 38 -14 -66 -127 -198 -269 -335 -405 -487 -567 -624 -655 -681 -722 -765 -793 -800 -785 -739 -653 -545 -443 -346 -222 -61 102 235 355 497 657 798 917 1055 1221 1378 1492 1591 1700 1785 1785 1714 1637 1572 1467 1288 1091 940 806 612 352 85 -157 -405 -687 -973 -1231 -1480 -1732 -1951 -2079 -2120 -2128 -2133 -2133 -2125 -2124 -2124 -2080 -1975 -1840 -1707 -1547 -1330 -1084 -879 -724 -552 -315 -69 69 51 -67 -186 -243 -239 -216 -240 -379 -656 -1012 -1349 -1616 -1857 -2136 -2455 -2759 -3003 -3183 -3293 -3294 -3186 -3059 -2993 -2941 -2766 -2411 -1956 -1489 -1002 -447 143 708 1286 1981 2790 3552 4129 4561 4986 5476 6003 6521 6997 7349 7503 7502 7503 7591 7652 7492 7044 6448 5876 5355 4754 3931 2890 1810 891 167 -506 -1279 -2168 -3084 -3959 -4775 -5494 -6058 -6430 -6617 -6629 -6505 -6345 -6246 -6148 -5865 -5309 -4628 -4031 -3554 -3058 -2440 -1685 -795 227 1250 2055 2574 3011 3598 4309 4921 5312 5566 5797 6017 6202 6362 6443 6283 5793 5110 4473 3962 3472 2911 2315 1740 1152 506 -159 -752 -1273 -1813 -2415 -3034 -3586 -4013 -4272 -4325 -4222 -4098 -4070 -4130 -4197 -4214 -4149 -3925 -3463 -2834 -2254 -1875 -1621 -1305 -874 -442 -117 98 211 160 -55 -252 -214 30 184 -8 -519 -1138 -1711 -2202 -2574 -2785 -2893 -3066 -3385 -3737 -3929 -3923 -3836 -3775 -3737 -3671 -3542 -3310 -2914 -2339 -1686 -1098 -615 -150 383 969 1527 2029 2521 3044 3569 4056 4491 4882 5192 5355 5354 5265 5192 5151 5067 4880 4613 4317 3984 3575 3107 2649 2239 1830 1357 818 273 -227 -659 -1014 -1298 -1556 -1843 -2159 -2434 -2602 -2661 -2662 -2640 -2605 -2552 -2481 -2361 -2155 -1865 -1550 -1270 -1040 -833 -623 -409 -194 24 243 442 603 726 830 924 1010 1072 1093 1074 1027 977 936 898 847 781 709 632 544 440 327 221 134 64 2 -58 -115 -166 -204 -230 -245 -255 -260 -257 -246 -228 -207 -187 -168 -150 -131 -111 -91 -72 -55 -40 -28 -19 -10 -3 2 4 3 1 0 1 0 0 -1 -2 -2 -2 -2 -2 -1 0 +7112 6448 5848 5167 4253 3113 1942 952 178 -536 -1350 -2280 -3232 -4135 -4971 -5702 -6269 -6635 -6809 -6803 -6659 -6481 -6365 -6251 -5952 -5378 -4680 -4069 -3582 -3079 -2453 -1692 -797 228 1253 2058 2576 3013 3599 4309 4921 5314 5569 5802 6025 6215 6380 6466 6311 5826 5146 4510 4001 3512 2950 2350 1770 1175 517 -162 -771 -1310 -1870 -2499 -3149 -3733 -4192 -4477 -4549 -4456 -4342 -4329 -4411 -4501 -4538 -4488 -4265 -3781 -3109 -2485 -2078 -1806 -1462 -984 -500 -133 113 244 186 -64 -296 -253 36 221 -9 -631 -1394 -2113 -2741 -3229 -3521 -3688 -3941 -4389 -4886 -5184 -5222 -5152 -5117 -5115 -5074 -4943 -4666 -4149 -3365 -2451 -1613 -913 -225 583 1490 2374 3192 4014 4905 5825 6702 7516 8277 8922 9327 9450 9422 9423 9483 9464 9251 8881 8438 7908 7208 6365 5515 4740 3939 2972 1824 620 -523 -1547 -2424 -3163 -3865 -4667 -5575 -6415 -6996 -7305 -7461 -7561 -7621 -7635 -7587 -7386 -6901 -6115 -5201 -4366 -3667 -3011 -2310 -1555 -758 99 1005 1880 2637 3270 3846 4420 4979 5448 5743 5831 5768 5674 5627 5591 5475 5242 4935 4574 4103 3459 2678 1890 1197 597 25 -585 -1233 -1866 -2421 -2872 -3237 -3557 -3835 -4027 -4088 -4027 -3907 -3782 -3660 -3509 -3293 -3007 -2675 -2314 -1937 -1558 -1198 -862 -526 -172 144 319 307 191 121 147 166 39 -259 -629 -972 -1312 -1751 -2348 -3034 -3659 -4112 -4415 -4686 -5017 -5368 -5604 -5647 -5567 -5497 -5471 -5380 -5094 -4594 -3952 -3247 -2516 -1773 -1035 -265 606 1598 2645 3654 4615 5559 6472 7275 7927 8473 8979 9436 9777 9958 9986 9881 9658 9340 8962 8530 7997 7311 6483 5577 4649 3687 2630 1455 246 -858 -1772 -2571 -3400 -4318 -5228 -5994 -6576 -7020 -7365 -7616 -7783 -7873 -7858 -7687 -7362 -6934 -6423 -5799 -5074 -4344 -3669 -2983 -2182 -1280 -381 476 1347 2255 3109 3837 4467 5042 5518 5831 6040 6251 6438 6465 6294 6051 5836 5572 5131 4535 3904 3270 2577 1861 1240 712 112 -652 -1399 -1891 -2149 -2423 -2831 -3220 -3406 -3415 -3396 -3389 -3307 -3135 -2954 -2811 -2644 -2411 -2175 -1984 -1775 -1458 -1083 -805 -692 -651 -568 -436 -310 -229 -223 -359 -680 -1115 -1527 -1875 -2223 -2623 -3020 -3362 -3704 -4139 -4656 -5134 -5483 -5708 -5845 -5894 -5850 -5749 -5603 -5334 -4850 -4180 -3464 -2805 -2188 -1540 -827 -43 830 +0 1 2 3 5 6 5 3 0 -4 -10 -19 -31 -47 -65 -84 -105 -125 -143 -158 -171 -184 -198 -211 -218 -213 -200 -187 -177 -163 -139 -102 -51 15 89 155 205 253 318 401 481 544 598 651 706 760 813 858 872 837 767 698 642 584 508 418 326 223 101 -33 -162 -283 -416 -572 -742 -905 -1043 -1145 -1194 -1202 -1202 -1228 -1283 -1341 -1384 -1402 -1364 -1237 -1040 -850 -726 -645 -533 -366 -190 -52 44 98 76 -27 -127 -110 15 99 -5 -294 -659 -1016 -1339 -1602 -1775 -1888 -2048 -2316 -2616 -2817 -2878 -2880 -2900 -2938 -2954 -2916 -2789 -2512 -2063 -1522 -1014 -581 -145 379 980 1580 2148 2731 3373 4048 4705 5330 5930 6454 6813 6969 7013 7078 7186 7236 7134 6907 6616 6253 5745 5112 4464 3866 3236 2459 1520 520 -443 -1317 -2076 -2726 -3352 -4073 -4894 -5664 -6211 -6521 -6696 -6821 -6911 -6959 -6948 -6797 -6379 -5678 -4850 -4088 -3447 -2842 -2189 -1479 -724 94 965 1811 2548 3168 3737 4306 4863 5333 5635 5734 5683 5600 5563 5537 5430 5206 4907 4553 4088 3449 2672 1887 1196 596 24 -585 -1233 -1866 -2420 -2870 -3234 -3551 -3825 -4014 -4071 -4005 -3881 -3752 -3626 -3470 -3251 -2963 -2631 -2271 -1897 -1522 -1168 -838 -510 -167 138 306 293 182 115 139 156 36 -243 -587 -903 -1213 -1612 -2151 -2766 -3318 -3710 -3963 -4183 -4454 -4739 -4919 -4928 -4828 -4738 -4685 -4578 -4306 -3856 -3294 -2688 -2068 -1447 -838 -213 483 1263 2074 2842 3559 4250 4904 5465 5900 6249 6558 6825 7005 7062 7011 6867 6642 6355 6032 5679 5263 4758 4170 3545 2920 2288 1611 880 147 -507 -1032 -1477 -1927 -2414 -2881 -3257 -3521 -3704 -3827 -3898 -3922 -3906 -3837 -3694 -3478 -3222 -2934 -2604 -2239 -1883 -1563 -1247 -895 -515 -151 184 511 838 1132 1368 1559 1723 1844 1906 1930 1951 1963 1925 1830 1715 1614 1501 1346 1159 971 791 606 425 275 153 23 -133 -276 -361 -397 -432 -488 -537 -548 -529 -507 -488 -457 -417 -377 -344 -310 -271 -234 -204 -174 -136 -96 -68 -56 -50 -41 -30 -20 -14 -13 -19 -34 -52 -66 -75 -82 -89 -94 -96 -95 -96 -98 -97 -92 -84 -76 -67 -56 -47 -39 -30 -22 -15 -10 -6 -3 -2 -1 -1 0 +-7621 -7635 -7587 -7386 -6901 -6115 -5201 -4366 -3667 -3011 -2310 -1555 -758 99 1005 1880 2637 3270 3846 4420 4979 5448 5743 5831 5768 5674 5627 5591 5475 5242 4935 4574 4103 3459 2678 1890 1197 597 25 -585 -1233 -1866 -2421 -2872 -3237 -3557 -3835 -4027 -4088 -4027 -3907 -3782 -3660 -3509 -3293 -3007 -2675 -2314 -1937 -1558 -1198 -862 -526 -172 144 319 307 191 121 147 166 39 -259 -629 -972 -1312 -1751 -2348 -3034 -3659 -4112 -4415 -4686 -5017 -5368 -5604 -5647 -5567 -5497 -5471 -5380 -5094 -4594 -3952 -3247 -2516 -1773 -1035 -265 606 1598 2645 3654 4615 5559 6472 7275 7927 8473 8979 9436 9777 9958 9986 9881 9658 9340 8962 8530 7997 7311 6483 5577 4649 3687 2630 1455 246 -858 -1772 -2571 -3400 -4318 -5228 -5994 -6576 -7020 -7365 -7616 -7783 -7873 -7858 -7687 -7362 -6934 -6423 -5799 -5074 -4344 -3669 -2983 -2182 -1280 -381 476 1347 2255 3109 3837 4467 5042 5518 5831 6040 6251 6438 6465 6294 6051 5836 5572 5131 4535 3904 3270 2577 1861 1240 712 112 -652 -1399 -1891 -2149 -2423 -2831 -3220 -3406 -3415 -3396 -3389 -3307 -3135 -2954 -2811 -2644 -2411 -2175 -1984 -1775 -1458 -1083 -805 -692 -651 -568 -436 -310 -229 -223 -359 -680 -1115 -1527 -1875 -2223 -2623 -3020 -3362 -3704 -4139 -4656 -5134 -5483 -5708 -5845 -5894 -5850 -5749 -5603 -5334 -4850 -4180 -3464 -2805 -2188 -1540 -827 -43 830 1801 2803 3733 4561 5374 6265 7208 8070 8720 9134 9374 9542 9703 9843 9876 9732 9425 9031 8581 8021 7308 6497 5674 4819 3817 2649 1469 450 -441 -1395 -2500 -3605 -4504 -5178 -5792 -6453 -7092 -7583 -7896 -8094 -8205 -8187 -8001 -7661 -7215 -6715 -6213 -5704 -5098 -4304 -3365 -2446 -1645 -863 59 1113 2121 2948 3643 4322 4985 5519 5872 6102 6300 6494 6661 6756 6717 6480 6046 5515 4997 4503 3974 3377 2754 2150 1544 899 232 -399 -966 -1492 -1996 -2467 -2881 -3229 -3489 -3628 -3630 -3547 -3470 -3448 -3450 -3408 -3280 -3077 -2828 -2549 -2261 -1980 -1717 -1469 -1233 -1029 -875 -772 -717 -723 -814 -969 -1109 -1183 -1234 -1371 -1665 -2079 -2530 -2963 -3355 -3687 -3955 -4190 -4450 -4727 -4936 -5009 -4975 -4915 -4857 -4736 -4475 -4069 -3561 -2989 -2358 -1669 -924 -131 701 1547 2382 3196 3996 4782 5526 6198 6788 7304 7755 8132 8418 8615 8748 8834 8851 8743 8473 8067 +0 -2 -4 -6 -9 -12 -14 -15 -17 -17 -16 -13 -8 1 13 27 43 61 80 102 127 154 178 196 211 224 240 256 270 276 278 274 261 233 191 143 95 50 2 -55 -121 -192 -261 -323 -380 -436 -489 -535 -565 -580 -583 -586 -588 -585 -568 -536 -494 -441 -382 -317 -251 -187 -117 -40 33 77 76 48 31 39 45 11 -76 -188 -297 -410 -560 -768 -1015 -1251 -1436 -1575 -1707 -1866 -2037 -2170 -2230 -2240 -2255 -2287 -2291 -2208 -2027 -1775 -1484 -1169 -838 -498 -130 300 805 1353 1898 2434 2976 3515 4008 4429 4801 5155 5492 5766 5951 6043 6055 5993 5867 5697 5487 5205 4812 4316 3753 3163 2535 1828 1021 174 -615 -1282 -1879 -2508 -3215 -3928 -4543 -5029 -5415 -5729 -5973 -6155 -6276 -6312 -6224 -6005 -5699 -5316 -4834 -4259 -3672 -3122 -2555 -1881 -1110 -333 417 1189 2001 2775 3443 4029 4571 5028 5339 5557 5777 5977 6027 5892 5687 5506 5278 4877 4326 3737 3140 2482 1798 1201 691 109 -637 -1370 -1856 -2114 -2388 -2795 -3184 -3374 -3388 -3373 -3370 -3293 -3125 -2947 -2806 -2641 -2410 -2174 -1984 -1775 -1458 -1083 -805 -692 -651 -567 -435 -309 -228 -222 -357 -675 -1105 -1510 -1851 -2191 -2580 -2964 -3292 -3619 -4033 -4525 -4975 -5299 -5499 -5614 -5643 -5582 -5466 -5308 -5034 -4559 -3914 -3230 -2605 -2023 -1418 -758 -40 752 1624 2515 3331 4048 4744 5498 6289 6998 7515 7820 7975 8065 8144 8204 8173 7997 7687 7311 6892 6393 5778 5094 4413 3716 2918 2007 1103 334 -326 -1020 -1809 -2584 -3195 -3636 -4026 -4439 -4826 -5105 -5257 -5328 -5341 -5267 -5087 -4813 -4478 -4115 -3761 -3410 -3008 -2506 -1933 -1387 -920 -476 32 595 1119 1531 1864 2177 2473 2694 2821 2882 2926 2966 2990 2980 2910 2759 2527 2262 2010 1777 1538 1281 1024 783 550 313 79 -134 -316 -477 -624 -753 -859 -939 -989 -1004 -979 -931 -887 -858 -836 -803 -751 -684 -611 -534 -459 -390 -327 -271 -220 -178 -146 -125 -111 -108 -118 -134 -148 -151 -151 -161 -187 -224 -260 -291 -313 -327 -334 -335 -337 -339 -334 -320 -299 -278 -257 -234 -206 -174 -141 -110 -80 -52 -27 -4 16 32 44 53 58 61 62 59 54 49 43 35 28 23 17 10 6 4 2 0 +5042 5518 5831 6040 6251 6438 6465 6294 6051 5836 5572 5131 4535 3904 3270 2577 1861 1240 712 112 -652 -1399 -1891 -2149 -2423 -2831 -3220 -3406 -3415 -3396 -3389 -3307 -3135 -2954 -2811 -2644 -2411 -2175 -1984 -1775 -1458 -1083 -805 -692 -651 -568 -436 -310 -229 -223 -359 -680 -1115 -1527 -1875 -2223 -2623 -3020 -3362 -3704 -4139 -4656 -5134 -5483 -5708 -5845 -5894 -5850 -5749 -5603 -5334 -4850 -4180 -3464 -2805 -2188 -1540 -827 -43 830 1801 2803 3733 4561 5374 6265 7208 8070 8720 9134 9374 9542 9703 9843 9876 9732 9425 9031 8581 8021 7308 6497 5674 4819 3817 2649 1469 450 -441 -1395 -2500 -3605 -4504 -5178 -5792 -6453 -7092 -7583 -7896 -8094 -8205 -8187 -8001 -7661 -7215 -6715 -6213 -5704 -5098 -4304 -3365 -2446 -1645 -863 59 1113 2121 2948 3643 4322 4985 5519 5872 6102 6300 6494 6661 6756 6717 6480 6046 5515 4997 4503 3974 3377 2754 2150 1544 899 232 -399 -966 -1492 -1996 -2467 -2881 -3229 -3489 -3628 -3630 -3547 -3470 -3448 -3450 -3408 -3280 -3077 -2828 -2549 -2261 -1980 -1717 -1469 -1233 -1029 -875 -772 -717 -723 -814 -969 -1109 -1183 -1234 -1371 -1665 -2079 -2530 -2963 -3355 -3687 -3955 -4190 -4450 -4727 -4936 -5009 -4975 -4915 -4857 -4736 -4475 -4069 -3561 -2989 -2358 -1669 -924 -131 701 1547 2382 3196 3996 4782 5526 6198 6788 7304 7755 8132 8418 8615 8748 8834 8851 8743 8473 8067 7594 7096 6550 5912 5178 4389 3586 2769 1917 1031 137 -741 -1605 -2456 -3271 -3998 -4602 -5093 -5499 -5841 -6116 -6321 -6464 -6536 -6503 -6343 -6080 -5780 -5480 -5137 -4655 -3989 -3201 -2433 -1777 -1189 -537 242 1057 1754 2294 2789 3365 4000 4543 4874 5015 5070 5099 5079 4959 4732 4450 4165 3868 3495 3000 2428 1877 1387 914 403 -125 -598 -988 -1336 -1684 -2008 -2252 -2391 -2459 -2524 -2635 -2790 -2920 -2956 -2887 -2757 -2602 -2419 -2206 -2004 -1840 -1680 -1474 -1256 -1114 -1069 -1022 -902 -778 -787 -937 -1072 -1077 -1031 -1119 -1426 -1837 -2174 -2379 -2545 -2799 -3165 -3541 -3792 -3874 -3855 -3845 -3898 -3968 -3970 -3866 -3700 -3530 -3336 -3022 -2530 -1931 -1357 -859 -350 279 1012 1742 2398 3004 3604 4186 4726 5254 5816 6393 6880 7207 7393 7519 7632 7712 7711 7590 7344 6996 6594 6187 5780 5329 4792 4160 3452 2690 1888 1070 279 -463 -1169 -1860 -2519 -3090 -3542 +0 1 2 4 7 12 17 21 26 32 38 41 43 43 42 37 30 23 14 2 -17 -40 -59 -73 -89 -112 -138 -157 -169 -180 -192 -199 -200 -200 -202 -201 -193 -184 -176 -166 -143 -112 -87 -78 -77 -70 -56 -42 -32 -33 -54 -106 -180 -255 -324 -397 -484 -576 -662 -752 -866 -1005 -1141 -1255 -1344 -1416 -1467 -1496 -1509 -1511 -1476 -1375 -1216 -1032 -856 -684 -493 -271 -15 283 628 999 1359 1695 2038 2425 2845 3246 3576 3817 3991 4135 4280 4419 4511 4521 4452 4339 4189 3979 3682 3324 2947 2542 2043 1438 809 251 -250 -802 -1456 -2127 -2692 -3134 -3550 -4005 -4456 -4821 -5080 -5269 -5401 -5451 -5386 -5213 -4963 -4668 -4363 -4046 -3653 -3114 -2459 -1805 -1225 -649 44 851 1635 2293 2856 3417 3973 4432 4753 4977 5177 5374 5551 5670 5677 5513 5176 4752 4333 3929 3487 2981 2444 1919 1385 810 210 -364 -885 -1373 -1845 -2291 -2687 -3024 -3280 -3424 -3439 -3373 -3311 -3301 -3314 -3284 -3170 -2982 -2748 -2484 -2209 -1939 -1685 -1445 -1215 -1016 -866 -765 -712 -719 -810 -965 -1106 -1180 -1232 -1370 -1664 -2078 -2530 -2963 -3355 -3687 -3954 -4187 -4445 -4718 -4923 -4992 -4954 -4888 -4824 -4698 -4433 -4024 -3515 -2946 -2319 -1638 -905 -128 683 1503 2308 3088 3849 4592 5289 5913 6453 6918 7317 7643 7881 8032 8122 8165 8144 8006 7722 7314 6850 6368 5846 5248 4571 3852 3128 2401 1652 882 116 -627 -1348 -2048 -2708 -3286 -3754 -4124 -4417 -4656 -4837 -4957 -5028 -5041 -4973 -4807 -4568 -4303 -4042 -3753 -3368 -2859 -2271 -1709 -1236 -818 -366 162 703 1154 1493 1794 2139 2512 2819 2986 3035 3030 3007 2956 2847 2681 2486 2295 2101 1871 1582 1261 960 698 453 196 -61 -283 -460 -611 -757 -886 -976 -1019 -1028 -1036 -1061 -1102 -1131 -1122 -1074 -1005 -929 -845 -755 -670 -602 -537 -461 -383 -332 -311 -290 -250 -210 -207 -240 -267 -261 -243 -256 -317 -397 -455 -483 -501 -534 -584 -632 -654 -646 -620 -596 -582 -571 -549 -514 -472 -432 -391 -340 -272 -199 -133 -81 -32 23 80 131 171 203 229 251 266 277 286 293 293 285 270 253 236 218 197 176 154 131 109 90 74 59 45 33 23 15 8 3 0 -1 -2 -2 -2 -1 0 +232 -399 -966 -1492 -1996 -2467 -2881 -3229 -3489 -3628 -3630 -3547 -3470 -3448 -3450 -3408 -3280 -3077 -2828 -2549 -2261 -1980 -1717 -1469 -1233 -1029 -875 -772 -717 -723 -814 -969 -1109 -1183 -1234 -1371 -1665 -2079 -2530 -2963 -3355 -3687 -3955 -4190 -4450 -4727 -4936 -5009 -4975 -4915 -4857 -4736 -4475 -4069 -3561 -2989 -2358 -1669 -924 -131 701 1547 2382 3196 3996 4782 5526 6198 6788 7304 7755 8132 8418 8615 8748 8834 8851 8743 8473 8067 7594 7096 6550 5912 5178 4389 3586 2769 1917 1031 137 -741 -1605 -2456 -3271 -3998 -4602 -5093 -5499 -5841 -6116 -6321 -6464 -6536 -6503 -6343 -6080 -5780 -5480 -5137 -4655 -3989 -3201 -2433 -1777 -1189 -537 242 1057 1754 2294 2789 3365 4000 4543 4874 5015 5070 5099 5079 4959 4732 4450 4165 3868 3495 3000 2428 1877 1387 914 403 -125 -598 -988 -1336 -1684 -2008 -2252 -2391 -2459 -2524 -2635 -2790 -2920 -2956 -2887 -2757 -2602 -2419 -2206 -2004 -1840 -1680 -1474 -1256 -1114 -1069 -1022 -902 -778 -787 -937 -1072 -1077 -1031 -1119 -1426 -1837 -2174 -2379 -2545 -2799 -3165 -3541 -3792 -3874 -3855 -3845 -3898 -3968 -3970 -3866 -3700 -3530 -3336 -3022 -2530 -1931 -1357 -859 -350 279 1012 1742 2398 3004 3604 4186 4726 5254 5816 6393 6880 7207 7393 7519 7632 7712 7711 7590 7344 6996 6594 6187 5780 5329 4792 4160 3452 2690 1888 1070 279 -463 -1169 -1860 -2519 -3090 -3542 -3917 -4278 -4643 -4959 -5172 -5284 -5347 -5378 -5351 -5226 -5011 -4740 -4437 -4096 -3694 -3212 -2673 -2147 -1696 -1304 -889 -382 182 707 1153 1563 1983 2383 2711 2962 3160 3291 3311 3249 3196 3213 3237 3164 2969 2716 2463 2192 1874 1521 1183 902 687 513 329 88 -199 -463 -650 -793 -971 -1198 -1398 -1497 -1510 -1508 -1525 -1542 -1538 -1530 -1547 -1598 -1673 -1757 -1828 -1864 -1865 -1857 -1858 -1847 -1816 -1791 -1823 -1920 -2023 -2085 -2121 -2184 -2291 -2398 -2459 -2485 -2512 -2551 -2581 -2592 -2595 -2598 -2573 -2488 -2346 -2192 -2060 -1928 -1749 -1505 -1217 -924 -649 -377 -79 271 672 1094 1507 1901 2290 2672 3021 3324 3612 3913 4219 4495 4733 4951 5154 5287 5307 5242 5174 5145 5104 4959 4671 4302 3942 3632 3321 2935 2454 1944 1486 1093 695 232 -274 -739 -1104 -1398 -1682 -1974 -2220 -2380 -2483 -2603 -2771 -2933 -3022 -3016 -2964 -2912 -2877 -2830 -2729 -2552 +0 -1 -1 -2 -3 -5 -8 -12 -16 -21 -25 -29 -34 -39 -45 -50 -55 -58 -60 -60 -58 -57 -54 -50 -46 -41 -38 -36 -36 -39 -46 -59 -71 -81 -89 -104 -133 -176 -225 -277 -329 -379 -425 -471 -522 -579 -630 -666 -688 -707 -725 -734 -719 -678 -614 -533 -435 -318 -182 -27 146 333 529 731 940 1158 1374 1584 1781 1968 2145 2304 2447 2565 2667 2758 2828 2858 2831 2757 2651 2531 2385 2198 1964 1699 1415 1114 786 430 58 -322 -709 -1103 -1495 -1858 -2175 -2448 -2686 -2898 -3082 -3235 -3359 -3449 -3482 -3446 -3351 -3231 -3106 -2950 -2710 -2353 -1914 -1473 -1089 -738 -338 153 679 1141 1509 1856 2264 2721 3124 3387 3521 3595 3653 3674 3622 3490 3312 3128 2931 2672 2313 1888 1471 1096 728 323 -102 -488 -812 -1106 -1404 -1686 -1904 -2035 -2106 -2176 -2286 -2435 -2563 -2610 -2563 -2461 -2336 -2183 -2001 -1827 -1686 -1546 -1363 -1167 -1039 -1001 -961 -852 -737 -749 -894 -1027 -1035 -994 -1082 -1382 -1785 -2119 -2324 -2492 -2747 -3113 -3490 -3743 -3831 -3819 -3815 -3872 -3946 -3953 -3853 -3691 -3524 -3332 -3020 -2529 -1931 -1357 -859 -350 278 1011 1739 2393 2995 3591 4167 4699 5218 5769 6332 6802 7113 7284 7394 7488 7550 7532 7395 7136 6779 6371 5960 5551 5101 4572 3954 3269 2538 1774 1001 260 -430 -1081 -1712 -2307 -2817 -3212 -3534 -3840 -4145 -4403 -4566 -4638 -4666 -4664 -4612 -4475 -4264 -4007 -3725 -3414 -3058 -2640 -2181 -1739 -1363 -1040 -703 -300 141 545 881 1184 1489 1773 1999 2163 2285 2358 2348 2281 2221 2209 2202 2129 1976 1787 1603 1410 1191 955 734 552 415 306 194 51 -115 -263 -364 -437 -528 -642 -738 -778 -773 -760 -757 -753 -739 -723 -719 -730 -752 -776 -793 -794 -780 -762 -748 -730 -704 -680 -678 -700 -722 -728 -725 -730 -749 -767 -768 -758 -749 -742 -732 -717 -700 -682 -658 -619 -569 -516 -472 -429 -378 -315 -247 -182 -124 -70 -15 46 111 175 233 283 329 369 401 423 441 458 473 482 485 484 480 468 447 418 391 368 345 315 280 242 207 179 152 125 97 71 50 33 19 5 -7 -16 -21 -24 -25 -26 -25 -23 -21 -18 -16 -13 -11 -9 -6 -4 -3 -2 -1 0 +-2206 -2004 -1840 -1680 -1474 -1256 -1114 -1069 -1022 -902 -778 -787 -937 -1072 -1077 -1031 -1119 -1426 -1837 -2174 -2379 -2545 -2799 -3165 -3541 -3792 -3874 -3855 -3845 -3898 -3968 -3970 -3866 -3700 -3530 -3336 -3022 -2530 -1931 -1357 -859 -350 279 1012 1742 2398 3004 3604 4186 4726 5254 5816 6393 6880 7207 7393 7519 7632 7712 7711 7590 7344 6996 6594 6187 5780 5329 4792 4160 3452 2690 1888 1070 279 -463 -1169 -1860 -2519 -3090 -3542 -3917 -4278 -4643 -4959 -5172 -5284 -5347 -5378 -5351 -5226 -5011 -4740 -4437 -4096 -3694 -3212 -2673 -2147 -1696 -1304 -889 -382 182 707 1153 1563 1983 2383 2711 2962 3160 3291 3311 3249 3196 3213 3237 3164 2969 2716 2463 2192 1874 1521 1183 902 687 513 329 88 -199 -463 -650 -793 -971 -1198 -1398 -1497 -1510 -1508 -1525 -1542 -1538 -1530 -1547 -1598 -1673 -1757 -1828 -1864 -1865 -1857 -1858 -1847 -1816 -1791 -1823 -1920 -2023 -2085 -2121 -2184 -2291 -2398 -2459 -2485 -2512 -2551 -2581 -2592 -2595 -2598 -2573 -2488 -2346 -2192 -2060 -1928 -1749 -1505 -1217 -924 -649 -377 -79 271 672 1094 1507 1901 2290 2672 3021 3324 3612 3913 4219 4495 4733 4951 5154 5287 5307 5242 5174 5145 5104 4959 4671 4302 3942 3632 3321 2935 2454 1944 1486 1093 695 232 -274 -739 -1104 -1398 -1682 -1974 -2220 -2380 -2483 -2603 -2771 -2933 -3022 -3016 -2964 -2912 -2877 -2830 -2729 -2552 -2326 -2117 -1963 -1820 -1611 -1317 -1014 -764 -542 -277 38 326 540 715 904 1111 1293 1427 1524 1590 1621 1631 1639 1643 1624 1584 1551 1522 1446 1296 1120 991 913 829 686 500 314 147 -18 -200 -413 -633 -819 -936 -1018 -1139 -1318 -1474 -1524 -1496 -1517 -1651 -1831 -1948 -1979 -1977 -1986 -2001 -2010 -2036 -2092 -2152 -2180 -2165 -2129 -2086 -2035 -1981 -1922 -1841 -1712 -1544 -1379 -1255 -1164 -1077 -979 -879 -788 -706 -626 -543 -442 -313 -168 -41 64 172 314 476 619 729 845 1002 1193 1370 1511 1632 1771 1934 2090 2205 2270 2323 2403 2522 2649 2736 2764 2756 2750 2758 2759 2719 2636 2524 2416 2322 2223 2084 1888 1658 1444 1272 1121 948 732 482 219 -35 -260 -444 -599 -762 -950 -1135 -1276 -1369 -1452 -1558 -1675 -1755 -1768 -1736 -1694 -1674 -1673 -1664 -1618 -1535 -1446 -1382 -1320 -1206 -1012 -776 -573 -431 -318 -188 +0 -1 -1 -2 -2 -3 -3 -4 -5 -6 -6 -7 -9 -13 -14 -16 -19 -27 -39 -51 -61 -73 -87 -107 -130 -150 -166 -177 -190 -206 -224 -239 -247 -251 -253 -253 -242 -214 -172 -127 -85 -36 29 113 204 293 382 478 578 679 783 900 1027 1145 1242 1317 1385 1453 1517 1564 1588 1584 1554 1508 1456 1399 1325 1224 1091 930 744 535 311 83 -142 -366 -595 -824 -1033 -1211 -1368 -1526 -1692 -1844 -1963 -2047 -2111 -2164 -2195 -2185 -2134 -2055 -1958 -1839 -1688 -1493 -1263 -1032 -829 -647 -448 -196 94 373 617 849 1092 1331 1536 1700 1839 1941 1978 1966 1958 1994 2033 2011 1909 1767 1621 1459 1261 1034 813 626 482 363 235 63 -146 -342 -484 -596 -736 -917 -1079 -1165 -1185 -1193 -1216 -1239 -1246 -1248 -1272 -1323 -1395 -1475 -1546 -1586 -1597 -1601 -1612 -1612 -1594 -1582 -1619 -1714 -1816 -1881 -1924 -1991 -2099 -2207 -2273 -2308 -2343 -2389 -2426 -2446 -2459 -2470 -2455 -2382 -2254 -2112 -1991 -1869 -1700 -1467 -1189 -905 -637 -371 -78 267 664 1083 1494 1888 2277 2660 3010 3315 3604 3908 4215 4492 4731 4951 5154 5285 5304 5238 5167 5134 5090 4942 4650 4277 3915 3602 3289 2902 2422 1915 1461 1072 680 226 -267 -719 -1070 -1351 -1621 -1896 -2126 -2271 -2361 -2466 -2615 -2757 -2830 -2813 -2752 -2692 -2648 -2592 -2488 -2314 -2099 -1900 -1753 -1616 -1423 -1156 -885 -663 -468 -238 32 275 453 595 748 912 1054 1155 1224 1267 1281 1278 1274 1267 1241 1200 1165 1132 1066 946 810 710 647 582 476 343 213 98 -12 -132 -269 -408 -521 -588 -632 -698 -798 -881 -899 -871 -872 -936 -1024 -1074 -1076 -1059 -1048 -1040 -1029 -1026 -1038 -1051 -1048 -1023 -990 -953 -914 -874 -833 -784 -716 -634 -555 -496 -451 -409 -365 -321 -282 -247 -214 -182 -145 -101 -53 -13 19 50 89 131 166 191 215 249 288 322 345 362 382 404 424 433 432 428 428 434 441 439 427 411 395 381 366 346 322 295 271 249 227 204 176 146 121 101 84 67 49 30 13 -2 -14 -22 -28 -33 -38 -42 -43 -43 -42 -40 -39 -37 -34 -29 -25 -22 -19 -16 -14 -11 -9 -7 -5 -4 -2 -1 -1 -1 -1 0 +-2121 -2184 -2291 -2398 -2459 -2485 -2512 -2551 -2581 -2592 -2595 -2598 -2573 -2488 -2346 -2192 -2060 -1928 -1749 -1505 -1217 -924 -649 -377 -79 271 672 1094 1507 1901 2290 2672 3021 3324 3612 3913 4219 4495 4733 4951 5154 5287 5307 5242 5174 5145 5104 4959 4671 4302 3942 3632 3321 2935 2454 1944 1486 1093 695 232 -274 -739 -1104 -1398 -1682 -1974 -2220 -2380 -2483 -2603 -2771 -2933 -3022 -3016 -2964 -2912 -2877 -2830 -2729 -2552 -2326 -2117 -1963 -1820 -1611 -1317 -1014 -764 -542 -277 38 326 540 715 904 1111 1293 1427 1524 1590 1621 1631 1639 1643 1624 1584 1551 1522 1446 1296 1120 991 913 829 686 500 314 147 -18 -200 -413 -633 -819 -936 -1018 -1139 -1318 -1474 -1524 -1496 -1517 -1651 -1831 -1948 -1979 -1977 -1986 -2001 -2010 -2036 -2092 -2152 -2180 -2165 -2129 -2086 -2035 -1981 -1922 -1841 -1712 -1544 -1379 -1255 -1164 -1077 -979 -879 -788 -706 -626 -543 -442 -313 -168 -41 64 172 314 476 619 729 845 1002 1193 1370 1511 1632 1771 1934 2090 2205 2270 2323 2403 2522 2649 2736 2764 2756 2750 2758 2759 2719 2636 2524 2416 2322 2223 2084 1888 1658 1444 1272 1121 948 732 482 219 -35 -260 -444 -599 -762 -950 -1135 -1276 -1369 -1452 -1558 -1675 -1755 -1768 -1736 -1694 -1674 -1673 -1664 -1618 -1535 -1446 -1382 -1320 -1206 -1012 -776 -573 -431 -318 -188 -41 98 219 343 488 643 782 905 1015 1110 1177 1201 1179 1130 1086 1082 1127 1190 1210 1148 1021 878 744 597 408 180 -40 -211 -341 -484 -689 -949 -1202 -1389 -1516 -1644 -1809 -1972 -2063 -2066 -2047 -2071 -2135 -2181 -2164 -2102 -2035 -1972 -1885 -1753 -1590 -1431 -1295 -1169 -1024 -845 -644 -452 -280 -114 57 226 373 498 617 739 855 946 1009 1045 1067 1076 1074 1060 1039 1023 1017 1004 960 876 778 693 626 570 527 498 473 435 389 352 323 286 237 211 248 333 403 426 432 473 553 637 691 716 728 746 786 855 934 975 950 893 860 858 831 742 617 525 474 420 326 218 134 59 -56 -218 -369 -455 -485 -509 -559 -627 -684 -713 -714 -710 -725 -769 -820 -836 -798 -709 -593 -471 -356 -253 -150 -32 95 201 274 341 438 560 667 734 781 842 921 1003 +0 -1 -2 -2 -4 -5 -7 -9 -12 -15 -18 -21 -25 -28 -31 -33 -35 -37 -37 -35 -32 -27 -21 -13 -3 10 28 50 74 100 129 160 192 224 258 296 336 378 419 461 504 542 570 588 606 629 650 658 645 618 588 562 533 488 422 346 273 208 136 47 -58 -160 -246 -320 -396 -479 -553 -609 -652 -702 -767 -832 -879 -899 -904 -910 -920 -926 -913 -873 -813 -756 -716 -677 -612 -510 -401 -308 -223 -116 16 141 238 321 412 516 610 685 744 788 816 834 851 866 869 860 854 850 819 744 651 584 545 501 420 310 197 93 -12 -131 -272 -422 -552 -637 -701 -792 -926 -1046 -1093 -1083 -1109 -1218 -1363 -1464 -1500 -1512 -1532 -1557 -1577 -1611 -1668 -1729 -1765 -1766 -1750 -1727 -1697 -1663 -1625 -1567 -1466 -1331 -1196 -1096 -1022 -951 -870 -785 -708 -637 -568 -495 -405 -289 -156 -39 59 161 295 449 586 693 806 959 1145 1319 1460 1581 1720 1884 2041 2158 2227 2284 2367 2489 2619 2709 2741 2737 2734 2745 2749 2711 2630 2520 2414 2320 2222 2084 1888 1657 1443 1271 1119 946 730 480 218 -35 -259 -441 -594 -754 -938 -1119 -1255 -1344 -1422 -1522 -1633 -1706 -1714 -1678 -1632 -1608 -1602 -1588 -1539 -1455 -1365 -1299 -1236 -1125 -940 -718 -528 -395 -290 -171 -37 87 195 304 430 564 682 784 874 950 1001 1015 989 941 898 889 919 963 971 915 807 688 578 460 311 136 -31 -158 -252 -354 -499 -681 -853 -976 -1054 -1131 -1231 -1328 -1374 -1360 -1333 -1333 -1358 -1371 -1343 -1289 -1232 -1179 -1112 -1021 -914 -811 -724 -645 -557 -453 -340 -235 -144 -58 28 110 179 235 286 337 383 417 437 444 445 441 432 418 402 388 378 365 342 305 265 231 204 182 164 151 140 126 110 97 87 75 60 52 60 78 92 94 93 98 112 125 131 131 129 128 130 137 144 145 136 123 114 109 101 86 69 56 48 41 30 19 11 4 -5 -16 -25 -29 -30 -29 -30 -31 -32 -31 -29 -27 -25 -24 -24 -22 -19 -15 -12 -8 -6 -4 -2 -1 0 1 1 1 1 1 1 0 0 0 0 0 +-626 -543 -442 -313 -168 -41 64 172 314 476 619 729 845 1002 1193 1370 1511 1632 1771 1934 2090 2205 2270 2323 2403 2522 2649 2736 2764 2756 2750 2758 2759 2719 2636 2524 2416 2322 2223 2084 1888 1658 1444 1272 1121 948 732 482 219 -35 -260 -444 -599 -762 -950 -1135 -1276 -1369 -1452 -1558 -1675 -1755 -1768 -1736 -1694 -1674 -1673 -1664 -1618 -1535 -1446 -1382 -1320 -1206 -1012 -776 -573 -431 -318 -188 -41 98 219 343 488 643 782 905 1015 1110 1177 1201 1179 1130 1086 1082 1127 1190 1210 1148 1021 878 744 597 408 180 -40 -211 -341 -484 -689 -949 -1202 -1389 -1516 -1644 -1809 -1972 -2063 -2066 -2047 -2071 -2135 -2181 -2164 -2102 -2035 -1972 -1885 -1753 -1590 -1431 -1295 -1169 -1024 -845 -644 -452 -280 -114 57 226 373 498 617 739 855 946 1009 1045 1067 1076 1074 1060 1039 1023 1017 1004 960 876 778 693 626 570 527 498 473 435 389 352 323 286 237 211 248 333 403 426 432 473 553 637 691 716 728 746 786 855 934 975 950 893 860 858 831 742 617 525 474 420 326 218 134 59 -56 -218 -369 -455 -485 -509 -559 -627 -684 -713 -714 -710 -725 -769 -820 -836 -798 -709 -593 -471 -356 -253 -150 -32 95 201 274 341 438 560 667 734 781 842 921 1003 1064 1095 1092 1064 1016 949 848 697 515 323 131 -73 -299 -538 -769 -982 -1183 -1391 -1619 -1846 -2031 -2157 -2255 -2363 -2470 -2513 -2458 -2351 -2258 -2185 -2087 -1942 -1776 -1608 -1421 -1201 -968 -743 -511 -255 1 223 422 639 872 1060 1178 1269 1388 1514 1586 1609 1650 1748 1854 1891 1846 1757 1663 1563 1433 1258 1034 791 574 405 241 29 -230 -467 -633 -754 -896 -1061 -1191 -1251 -1273 -1298 -1315 -1281 -1179 -1058 -971 -918 -852 -728 -550 -349 -145 63 282 506 725 944 1164 1379 1555 1664 1702 1696 1666 1632 1601 1585 1573 1537 1458 1338 1194 1048 910 781 647 493 321 164 67 29 -3 -72 -164 -229 -252 -259 -272 -274 -247 -207 -189 -189 -165 -91 12 115 212 307 385 427 437 447 468 475 456 439 455 486 466 363 220 112 57 9 -86 -225 -371 -502 -625 -745 -843 -912 +0 -1 -1 -1 -1 -1 0 0 1 2 4 5 8 11 15 20 25 30 37 44 53 62 70 78 88 99 113 125 136 145 155 165 175 183 188 191 192 195 197 194 184 170 155 142 131 115 93 64 30 -6 -39 -69 -97 -127 -164 -203 -236 -261 -286 -317 -351 -379 -393 -398 -399 -406 -417 -426 -425 -414 -400 -392 -384 -360 -309 -243 -184 -141 -107 -65 -15 34 79 127 185 248 308 364 416 463 501 520 520 507 496 502 532 571 590 569 514 449 386 314 218 97 -23 -118 -194 -278 -402 -560 -719 -841 -929 -1021 -1137 -1254 -1328 -1345 -1348 -1379 -1438 -1484 -1489 -1462 -1429 -1399 -1351 -1269 -1162 -1056 -964 -879 -776 -647 -497 -352 -220 -91 45 181 301 406 507 611 712 794 852 889 913 927 931 924 911 903 902 896 861 790 705 631 573 524 487 462 441 407 365 332 305 271 226 201 238 320 389 412 419 460 540 623 678 704 717 736 777 846 926 968 944 889 857 855 829 741 616 524 473 420 326 217 133 58 -56 -218 -369 -454 -483 -507 -556 -622 -678 -705 -705 -700 -713 -755 -803 -817 -778 -689 -575 -456 -343 -243 -144 -31 90 190 258 320 410 522 619 678 718 771 839 909 959 982 974 944 896 832 739 604 443 276 111 -62 -251 -449 -637 -807 -965 -1127 -1301 -1472 -1607 -1692 -1755 -1823 -1889 -1905 -1847 -1751 -1666 -1597 -1510 -1392 -1260 -1130 -988 -826 -659 -501 -341 -168 0 143 268 401 541 649 712 758 818 881 910 911 922 963 1007 1012 973 912 850 787 710 614 496 373 266 184 108 12 -100 -199 -265 -310 -361 -419 -462 -475 -474 -473 -470 -448 -403 -354 -318 -294 -267 -222 -164 -102 -42 17 76 132 185 234 281 324 355 369 367 354 337 321 304 292 280 264 242 214 184 156 130 107 85 62 39 19 7 3 -1 -8 -16 -21 -22 -21 -21 -20 -17 -14 -12 -11 -9 -5 0 4 8 11 12 13 12 11 10 9 8 7 6 6 5 3 1 0 0 0 -1 -1 -1 -1 -1 -1 -1 0 +778 693 626 570 527 498 473 435 389 352 323 286 237 211 248 333 403 426 432 473 553 637 691 716 728 746 786 855 934 975 950 893 860 858 831 742 617 525 474 420 326 218 134 59 -56 -218 -369 -455 -485 -509 -559 -627 -684 -713 -714 -710 -725 -769 -820 -836 -798 -709 -593 -471 -356 -253 -150 -32 95 201 274 341 438 560 667 734 781 842 921 1003 1064 1095 1092 1064 1016 949 848 697 515 323 131 -73 -299 -538 -769 -982 -1183 -1391 -1619 -1846 -2031 -2157 -2255 -2363 -2470 -2513 -2458 -2351 -2258 -2185 -2087 -1942 -1776 -1608 -1421 -1201 -968 -743 -511 -255 1 223 422 639 872 1060 1178 1269 1388 1514 1586 1609 1650 1748 1854 1891 1846 1757 1663 1563 1433 1258 1034 791 574 405 241 29 -230 -467 -633 -754 -896 -1061 -1191 -1251 -1273 -1298 -1315 -1281 -1179 -1058 -971 -918 -852 -728 -550 -349 -145 63 282 506 725 944 1164 1379 1555 1664 1702 1696 1666 1632 1601 1585 1573 1537 1458 1338 1194 1048 910 781 647 493 321 164 67 29 -3 -72 -164 -229 -252 -259 -272 -274 -247 -207 -189 -189 -165 -91 12 115 212 307 385 427 437 447 468 475 456 439 455 486 466 363 220 112 57 9 -86 -225 -371 -502 -625 -745 -843 -912 -989 -1132 -1350 -1582 -1756 -1852 -1895 -1919 -1941 -1959 -1960 -1925 -1843 -1737 -1631 -1520 -1377 -1192 -995 -798 -593 -374 -173 -26 80 203 376 579 769 921 1033 1114 1174 1222 1242 1212 1132 1033 929 799 618 415 245 114 -42 -274 -556 -811 -997 -1154 -1341 -1562 -1754 -1867 -1910 -1937 -1972 -1989 -1948 -1860 -1760 -1658 -1513 -1293 -1000 -676 -344 4 384 794 1198 1554 1844 2100 2371 2662 2928 3130 3271 3365 3398 3361 3278 3180 3049 2850 2597 2349 2124 1871 1555 1213 885 557 202 -146 -439 -709 -1013 -1312 -1494 -1544 -1585 -1706 -1832 -1844 -1755 -1671 -1623 -1524 -1332 -1108 -911 -711 -465 -199 23 194 347 486 602 733 919 1126 1280 1367 1443 1530 1578 1547 1459 1344 1192 1005 837 727 625 460 251 78 -49 -205 -431 -665 -829 -938 -1053 -1185 -1289 -1351 -1395 -1451 -1527 -1620 -1708 -1751 -1729 -1675 -1642 -1617 +0 0 0 0 0 0 1 1 1 1 2 2 2 2 3 4 6 8 9 10 14 18 21 24 26 29 33 39 46 51 53 53 54 58 59 56 49 44 42 39 31 22 14 6 -7 -27 -48 -61 -68 -74 -84 -98 -110 -119 -124 -127 -134 -147 -162 -170 -167 -154 -132 -108 -84 -62 -38 -9 24 54 75 96 127 166 203 229 249 275 307 342 371 390 397 395 385 367 334 280 211 135 55 -32 -132 -242 -352 -457 -559 -669 -791 -916 -1024 -1104 -1172 -1247 -1323 -1366 -1355 -1314 -1280 -1255 -1215 -1146 -1062 -974 -871 -746 -609 -473 -329 -166 0 148 284 434 599 736 827 900 994 1095 1158 1186 1228 1313 1404 1445 1423 1366 1304 1235 1142 1010 837 645 471 335 200 24 -195 -398 -542 -650 -777 -926 -1046 -1105 -1131 -1159 -1181 -1156 -1070 -965 -890 -845 -788 -676 -513 -327 -137 59 267 481 691 903 1117 1328 1502 1612 1653 1652 1627 1597 1570 1558 1549 1517 1441 1325 1184 1040 904 777 644 491 320 163 66 28 -3 -72 -164 -229 -252 -259 -272 -274 -247 -207 -189 -188 -164 -91 11 113 209 302 378 418 427 436 456 461 441 424 438 466 446 346 209 106 53 8 -81 -210 -345 -465 -576 -683 -769 -827 -893 -1016 -1205 -1405 -1551 -1626 -1654 -1665 -1673 -1678 -1668 -1628 -1547 -1448 -1350 -1250 -1124 -966 -800 -637 -469 -294 -135 -21 61 153 282 430 567 672 747 798 832 858 863 833 770 695 618 525 402 266 155 71 -27 -168 -337 -485 -589 -672 -771 -886 -981 -1029 -1038 -1038 -1041 -1034 -997 -938 -874 -810 -727 -611 -465 -309 -155 1 166 338 500 637 741 829 918 1009 1088 1140 1166 1174 1161 1123 1071 1016 952 869 773 683 602 517 419 318 226 138 48 -35 -101 -158 -219 -275 -304 -304 -302 -315 -327 -318 -293 -269 -252 -228 -192 -154 -121 -91 -57 -24 2 20 35 47 56 64 77 89 96 97 97 97 94 87 76 66 54 42 33 26 21 14 7 1 -2 -5 -9 -12 -13 -13 -12 -12 -11 -10 -8 -7 -6 -5 -4 -3 -2 -1 -1 0 +-1179 -1058 -971 -918 -852 -728 -550 -349 -145 63 282 506 725 944 1164 1379 1555 1664 1702 1696 1666 1632 1601 1585 1573 1537 1458 1338 1194 1048 910 781 647 493 321 164 67 29 -3 -72 -164 -229 -252 -259 -272 -274 -247 -207 -189 -189 -165 -91 12 115 212 307 385 427 437 447 468 475 456 439 455 486 466 363 220 112 57 9 -86 -225 -371 -502 -625 -745 -843 -912 -989 -1132 -1350 -1582 -1756 -1852 -1895 -1919 -1941 -1959 -1960 -1925 -1843 -1737 -1631 -1520 -1377 -1192 -995 -798 -593 -374 -173 -26 80 203 376 579 769 921 1033 1114 1174 1222 1242 1212 1132 1033 929 799 618 415 245 114 -42 -274 -556 -811 -997 -1154 -1341 -1562 -1754 -1867 -1910 -1937 -1972 -1989 -1948 -1860 -1760 -1658 -1513 -1293 -1000 -676 -344 4 384 794 1198 1554 1844 2100 2371 2662 2928 3130 3271 3365 3398 3361 3278 3180 3049 2850 2597 2349 2124 1871 1555 1213 885 557 202 -146 -439 -709 -1013 -1312 -1494 -1544 -1585 -1706 -1832 -1844 -1755 -1671 -1623 -1524 -1332 -1108 -911 -711 -465 -199 23 194 347 486 602 733 919 1126 1280 1367 1443 1530 1578 1547 1459 1344 1192 1005 837 727 625 460 251 78 -49 -205 -431 -665 -829 -938 -1053 -1185 -1289 -1351 -1395 -1451 -1527 -1620 -1708 -1751 -1729 -1675 -1642 -1617 -1542 -1403 -1263 -1154 -1029 -848 -661 -547 -484 -380 -215 -73 -14 4 19 -15 -147 -306 -371 -357 -420 -662 -992 -1268 -1476 -1702 -1969 -2186 -2306 -2400 -2550 -2704 -2737 -2603 -2402 -2231 -2075 -1838 -1468 -998 -475 88 680 1256 1782 2284 2817 3392 3940 4400 4781 5124 5431 5656 5780 5827 5808 5668 5366 4949 4516 4099 3620 3015 2326 1653 1042 457 -133 -705 -1239 -1756 -2266 -2718 -3072 -3364 -3685 -4035 -4311 -4419 -4381 -4273 -4106 -3814 -3372 -2844 -2314 -1791 -1244 -682 -144 353 828 1290 1685 1976 2195 2423 2667 2853 2920 2912 2917 2954 2964 2909 2823 2734 2589 2316 1947 1602 1332 1057 676 217 -201 -520 -777 -992 -1122 -1168 -1222 -1359 -1496 -1498 -1382 -1331 -1452 -1627 -1681 -1613 -1560 -1567 -1536 -1404 -1266 -1225 -1223 -1128 -947 -828 -837 -865 -799 -700 -705 -823 -935 -992 -1101 -1356 -1669 -1879 -1964 -2070 -2310 -2651 -2987 -3303 +0 -1 -1 -1 -2 -2 -2 -2 -1 0 1 4 6 10 15 20 25 31 35 39 42 46 49 53 57 60 62 61 58 55 51 46 41 33 22 12 5 2 -1 -7 -17 -24 -28 -30 -32 -34 -32 -28 -27 -28 -25 -15 1 19 36 54 70 81 85 90 97 102 101 100 107 117 115 92 57 30 15 2 -26 -68 -114 -157 -200 -244 -282 -312 -346 -404 -492 -589 -667 -718 -749 -773 -797 -819 -835 -835 -814 -780 -746 -707 -651 -573 -486 -396 -299 -192 -90 -14 42 110 207 323 435 528 601 657 701 739 761 752 711 656 597 520 406 276 164 77 -29 -191 -391 -576 -715 -835 -980 -1153 -1306 -1403 -1448 -1482 -1521 -1548 -1528 -1471 -1403 -1332 -1225 -1055 -822 -560 -287 3 324 675 1025 1339 1599 1832 2080 2350 2599 2793 2935 3035 3081 3063 3001 2926 2818 2646 2421 2199 1996 1765 1472 1153 844 533 194 -141 -425 -688 -985 -1279 -1460 -1512 -1556 -1678 -1806 -1821 -1736 -1656 -1610 -1514 -1325 -1104 -908 -710 -465 -199 22 193 346 486 602 732 918 1125 1278 1364 1439 1524 1571 1538 1449 1333 1180 993 826 716 614 451 245 76 -48 -200 -418 -643 -799 -901 -1009 -1131 -1226 -1280 -1317 -1364 -1430 -1511 -1586 -1619 -1591 -1534 -1497 -1467 -1392 -1260 -1128 -1025 -909 -745 -577 -475 -418 -326 -183 -62 -12 3 15 -13 -120 -248 -298 -285 -333 -520 -772 -978 -1129 -1290 -1480 -1628 -1701 -1754 -1845 -1938 -1942 -1828 -1670 -1535 -1412 -1238 -978 -657 -310 56 432 788 1105 1399 1704 2027 2323 2560 2745 2903 3035 3116 3139 3119 3064 2944 2745 2493 2240 2001 1739 1424 1080 755 467 201 -58 -301 -518 -721 -912 -1073 -1190 -1277 -1371 -1470 -1538 -1543 -1498 -1429 -1343 -1219 -1053 -868 -690 -521 -353 -189 -39 92 211 320 408 465 502 538 575 596 592 573 555 544 528 501 470 439 400 345 279 221 176 134 82 25 -23 -56 -80 -98 -105 -104 -103 -109 -114 -108 -94 -85 -88 -92 -89 -80 -72 -67 -61 -52 -43 -38 -35 -29 -22 -18 -16 -15 -12 -10 -8 -8 -8 -7 -7 -6 -6 -6 -4 -3 -2 -2 -1 0 +3398 3361 3278 3180 3049 2850 2597 2349 2124 1871 1555 1213 885 557 202 -146 -439 -709 -1013 -1312 -1494 -1544 -1585 -1706 -1832 -1844 -1755 -1671 -1623 -1524 -1332 -1108 -911 -711 -465 -199 23 194 347 486 602 733 919 1126 1280 1367 1443 1530 1578 1547 1459 1344 1192 1005 837 727 625 460 251 78 -49 -205 -431 -665 -829 -938 -1053 -1185 -1289 -1351 -1395 -1451 -1527 -1620 -1708 -1751 -1729 -1675 -1642 -1617 -1542 -1403 -1263 -1154 -1029 -848 -661 -547 -484 -380 -215 -73 -14 4 19 -15 -147 -306 -371 -357 -420 -662 -992 -1268 -1476 -1702 -1969 -2186 -2306 -2400 -2550 -2704 -2737 -2603 -2402 -2231 -2075 -1838 -1468 -998 -475 88 680 1256 1782 2284 2817 3392 3940 4400 4781 5124 5431 5656 5780 5827 5808 5668 5366 4949 4516 4099 3620 3015 2326 1653 1042 457 -133 -705 -1239 -1756 -2266 -2718 -3072 -3364 -3685 -4035 -4311 -4419 -4381 -4273 -4106 -3814 -3372 -2844 -2314 -1791 -1244 -682 -144 353 828 1290 1685 1976 2195 2423 2667 2853 2920 2912 2917 2954 2964 2909 2823 2734 2589 2316 1947 1602 1332 1057 676 217 -201 -520 -777 -992 -1122 -1168 -1222 -1359 -1496 -1498 -1382 -1331 -1452 -1627 -1681 -1613 -1560 -1567 -1536 -1404 -1266 -1225 -1223 -1128 -947 -828 -837 -865 -799 -700 -705 -823 -935 -992 -1101 -1356 -1669 -1879 -1964 -2070 -2310 -2651 -2987 -3303 -3649 -4022 -4342 -4567 -4729 -4853 -4882 -4739 -4427 -4020 -3551 -2990 -2323 -1608 -893 -127 777 1807 2825 3718 4507 5283 6074 6799 7382 7825 8172 8433 8577 8588 8488 8303 8019 7596 7000 6228 5320 4347 3380 2442 1484 454 -617 -1615 -2432 -3080 -3655 -4230 -4791 -5276 -5656 -5937 -6103 -6129 -6021 -5825 -5556 -5180 -4665 -4046 -3385 -2697 -1954 -1166 -395 331 1046 1775 2465 3023 3420 3696 3887 3984 3989 3940 3849 3685 3431 3141 2882 2637 2320 1889 1413 994 661 372 84 -208 -482 -721 -909 -1028 -1069 -1036 -953 -846 -733 -612 -461 -260 -37 135 191 144 71 26 -8 -78 -187 -272 -304 -334 -433 -597 -744 -834 -924 -1085 -1294 -1466 -1572 -1671 -1817 -1987 -2141 -2299 -2523 -2799 -3042 -3202 -3343 -3581 -3938 -4336 -4688 -4983 -5263 -5538 -5748 -5821 -5737 -5530 -5236 -4839 -4287 -3553 -2673 -1712 -720 292 1328 2387 3440 4425 5265 5950 6556 7168 +0 0 1 2 3 5 6 8 9 10 10 9 8 6 2 -3 -8 -14 -22 -31 -39 -44 -50 -58 -68 -73 -75 -77 -81 -81 -76 -67 -59 -49 -34 -16 1 16 30 45 58 75 98 126 150 167 183 203 218 222 217 208 191 167 144 129 115 87 49 15 -11 -45 -96 -153 -196 -228 -262 -303 -339 -365 -386 -412 -445 -483 -521 -547 -553 -548 -549 -553 -539 -501 -461 -430 -391 -329 -261 -221 -199 -159 -92 -32 -7 1 8 -7 -70 -148 -182 -178 -212 -339 -516 -669 -791 -925 -1085 -1222 -1307 -1379 -1485 -1595 -1636 -1576 -1472 -1385 -1304 -1169 -945 -650 -313 58 457 854 1225 1587 1977 2405 2823 3182 3492 3779 4042 4248 4380 4455 4479 4408 4207 3913 3599 3292 2930 2459 1911 1368 868 383 -113 -600 -1061 -1514 -1966 -2372 -2697 -2970 -3272 -3602 -3869 -3987 -3973 -3895 -3761 -3510 -3117 -2641 -2158 -1677 -1170 -644 -137 335 789 1234 1618 1903 2121 2347 2591 2779 2852 2850 2862 2904 2920 2871 2791 2707 2568 2300 1936 1594 1327 1054 674 216 -201 -520 -777 -992 -1122 -1168 -1222 -1359 -1495 -1496 -1379 -1327 -1446 -1618 -1670 -1601 -1546 -1550 -1517 -1384 -1245 -1202 -1198 -1102 -923 -805 -812 -836 -770 -673 -675 -786 -889 -940 -1039 -1275 -1563 -1752 -1824 -1914 -2126 -2428 -2723 -2995 -3292 -3610 -3876 -4055 -4175 -4260 -4260 -4110 -3816 -3442 -3022 -2528 -1950 -1341 -740 -105 633 1462 2269 2963 3564 4142 4724 5243 5644 5929 6138 6277 6325 6273 6140 5949 5687 5333 4865 4283 3619 2925 2250 1607 965 292 -393 -1015 -1510 -1888 -2213 -2529 -2826 -3071 -3248 -3365 -3411 -3378 -3271 -3119 -2932 -2692 -2388 -2039 -1680 -1317 -939 -551 -184 151 469 783 1068 1287 1429 1515 1563 1572 1544 1494 1431 1342 1223 1096 985 881 758 603 441 303 196 108 23 -58 -130 -190 -233 -256 -259 -244 -219 -188 -159 -129 -94 -52 -8 24 34 24 11 4 -2 -12 -27 -38 -41 -43 -53 -70 -84 -90 -95 -107 -121 -130 -133 -134 -138 -143 -145 -147 -152 -158 -161 -158 -154 -153 -156 -159 -158 -155 -150 -142 -134 -123 -108 -92 -77 -63 -49 -34 -22 -12 -5 1 4 6 6 5 3 2 1 0 +-4381 -4273 -4106 -3814 -3372 -2844 -2314 -1791 -1244 -682 -144 353 828 1290 1685 1976 2195 2423 2667 2853 2920 2912 2917 2954 2964 2909 2823 2734 2589 2316 1947 1602 1332 1057 676 217 -201 -520 -777 -992 -1122 -1168 -1222 -1359 -1496 -1498 -1382 -1331 -1452 -1627 -1681 -1613 -1560 -1567 -1536 -1404 -1266 -1225 -1223 -1128 -947 -828 -837 -865 -799 -700 -705 -823 -935 -992 -1101 -1356 -1669 -1879 -1964 -2070 -2310 -2651 -2987 -3303 -3649 -4022 -4342 -4567 -4729 -4853 -4882 -4739 -4427 -4020 -3551 -2990 -2323 -1608 -893 -127 777 1807 2825 3718 4507 5283 6074 6799 7382 7825 8172 8433 8577 8588 8488 8303 8019 7596 7000 6228 5320 4347 3380 2442 1484 454 -617 -1615 -2432 -3080 -3655 -4230 -4791 -5276 -5656 -5937 -6103 -6129 -6021 -5825 -5556 -5180 -4665 -4046 -3385 -2697 -1954 -1166 -395 331 1046 1775 2465 3023 3420 3696 3887 3984 3989 3940 3849 3685 3431 3141 2882 2637 2320 1889 1413 994 661 372 84 -208 -482 -721 -909 -1028 -1069 -1036 -953 -846 -733 -612 -461 -260 -37 135 191 144 71 26 -8 -78 -187 -272 -304 -334 -433 -597 -744 -834 -924 -1085 -1294 -1466 -1572 -1671 -1817 -1987 -2141 -2299 -2523 -2799 -3042 -3202 -3343 -3581 -3938 -4336 -4688 -4983 -5263 -5538 -5748 -5821 -5737 -5530 -5236 -4839 -4287 -3553 -2673 -1712 -720 292 1328 2387 3440 4425 5265 5950 6556 7168 7762 8222 8480 8595 8656 8652 8488 8121 7619 7064 6427 5613 4608 3511 2449 1461 496 -498 -1498 -2409 -3134 -3649 -4023 -4350 -4643 -4824 -4809 -4630 -4406 -4231 -4076 -3832 -3440 -2942 -2421 -1899 -1330 -696 -46 552 1097 1652 2235 2781 3184 3405 3489 3506 3461 3308 3022 2637 2223 1824 1437 1047 669 346 112 -32 -127 -209 -296 -364 -377 -343 -316 -350 -426 -468 -420 -317 -222 -143 -33 125 292 432 560 694 786 755 591 364 140 -86 -352 -651 -942 -1186 -1386 -1553 -1694 -1814 -1899 -1929 -1909 -1891 -1928 -2005 -2040 -2009 -1995 -2116 -2377 -2647 -2802 -2846 -2900 -3067 -3354 -3704 -4069 -4439 -4816 -5171 -5452 -5615 -5645 -5548 -5310 -4885 -4232 -3372 -2392 -1391 -403 617 1707 2831 3883 4763 5469 6071 6632 7131 7489 7662 7689 7643 7550 7370 7054 6613 6099 5525 4849 4029 3095 2147 1269 469 -305 -1088 -1850 -2498 -2960 -3234 -3383 -3474 +0 -2 -3 -3 -5 -6 -7 -7 -6 -4 -1 2 7 14 21 28 36 45 55 66 74 82 90 99 108 115 120 125 127 122 109 96 84 71 48 16 -17 -44 -69 -93 -110 -120 -132 -153 -176 -184 -177 -177 -201 -234 -251 -250 -251 -261 -265 -251 -234 -234 -241 -229 -199 -179 -186 -198 -189 -170 -176 -211 -246 -268 -305 -385 -486 -560 -599 -647 -739 -867 -999 -1129 -1274 -1435 -1582 -1699 -1795 -1880 -1928 -1907 -1816 -1681 -1512 -1296 -1025 -722 -408 -60 367 868 1379 1844 2271 2703 3155 3587 3952 4250 4502 4712 4860 4931 4940 4897 4792 4597 4289 3865 3341 2763 2174 1589 976 302 -416 -1099 -1673 -2141 -2567 -3001 -3434 -3817 -4132 -4379 -4543 -4605 -4563 -4455 -4286 -4030 -3659 -3200 -2699 -2167 -1582 -952 -325 273 871 1489 2083 2572 2928 3185 3370 3476 3501 3478 3416 3289 3079 2833 2613 2403 2124 1738 1306 922 616 348 78 -197 -457 -686 -868 -985 -1027 -999 -921 -820 -713 -597 -451 -255 -37 132 188 142 70 25 -8 -78 -186 -271 -303 -334 -433 -597 -744 -834 -924 -1085 -1294 -1466 -1572 -1670 -1815 -1984 -2136 -2292 -2512 -2784 -3022 -3177 -3312 -3541 -3888 -4273 -4611 -4890 -5153 -5410 -5601 -5657 -5560 -5344 -5045 -4648 -4104 -3390 -2542 -1622 -680 274 1243 2225 3193 4090 4844 5448 5974 6499 7002 7378 7569 7629 7641 7593 7406 7042 6566 6048 5468 4744 3867 2926 2026 1200 404 -404 -1204 -1921 -2479 -2862 -3130 -3355 -3551 -3656 -3613 -3447 -3250 -3091 -2949 -2746 -2440 -2066 -1683 -1307 -905 -469 -31 363 714 1062 1420 1746 1976 2086 2111 2095 2041 1925 1735 1494 1242 1005 780 560 352 179 57 -17 -64 -103 -143 -172 -176 -157 -142 -155 -185 -200 -176 -131 -90 -57 -13 47 108 157 199 242 268 252 193 116 43 -27 -105 -190 -268 -329 -374 -408 -434 -452 -460 -454 -437 -421 -417 -420 -414 -396 -380 -391 -424 -457 -467 -458 -449 -458 -483 -512 -541 -566 -590 -606 -613 -604 -579 -544 -496 -433 -357 -270 -182 -100 -28 39 102 159 204 234 251 259 262 261 252 237 217 195 175 154 132 109 89 71 54 38 24 14 7 2 -2 -3 -4 -4 -3 -2 -1 0 +2882 2637 2320 1889 1413 994 661 372 84 -208 -482 -721 -909 -1028 -1069 -1036 -953 -846 -733 -612 -461 -260 -37 135 191 144 71 26 -8 -78 -187 -272 -304 -334 -433 -597 -744 -834 -924 -1085 -1294 -1466 -1572 -1671 -1817 -1987 -2141 -2299 -2523 -2799 -3042 -3202 -3343 -3581 -3938 -4336 -4688 -4983 -5263 -5538 -5748 -5821 -5737 -5530 -5236 -4839 -4287 -3553 -2673 -1712 -720 292 1328 2387 3440 4425 5265 5950 6556 7168 7762 8222 8480 8595 8656 8652 8488 8121 7619 7064 6427 5613 4608 3511 2449 1461 496 -498 -1498 -2409 -3134 -3649 -4023 -4350 -4643 -4824 -4809 -4630 -4406 -4231 -4076 -3832 -3440 -2942 -2421 -1899 -1330 -696 -46 552 1097 1652 2235 2781 3184 3405 3489 3506 3461 3308 3022 2637 2223 1824 1437 1047 669 346 112 -32 -127 -209 -296 -364 -377 -343 -316 -350 -426 -468 -420 -317 -222 -143 -33 125 292 432 560 694 786 755 591 364 140 -86 -352 -651 -942 -1186 -1386 -1553 -1694 -1814 -1899 -1929 -1909 -1891 -1928 -2005 -2040 -2009 -1995 -2116 -2377 -2647 -2802 -2846 -2900 -3067 -3354 -3704 -4069 -4439 -4816 -5171 -5452 -5615 -5645 -5548 -5310 -4885 -4232 -3372 -2392 -1391 -403 617 1707 2831 3883 4763 5469 6071 6632 7131 7489 7662 7689 7643 7550 7370 7054 6613 6099 5525 4849 4029 3095 2147 1269 469 -305 -1088 -1850 -2498 -2960 -3234 -3383 -3474 -3513 -3449 -3249 -2946 -2596 -2227 -1829 -1414 -1021 -668 -322 76 528 982 1374 1684 1943 2190 2439 2653 2744 2636 2342 1967 1617 1308 982 596 178 -212 -534 -758 -856 -816 -674 -502 -332 -142 96 374 639 857 1012 1107 1156 1189 1231 1267 1245 1151 1025 923 871 859 880 910 881 712 390 14 -315 -604 -960 -1448 -1992 -2439 -2717 -2867 -2963 -3014 -2980 -2848 -2672 -2502 -2333 -2121 -1865 -1640 -1546 -1611 -1766 -1911 -2006 -2091 -2242 -2490 -2822 -3210 -3637 -4095 -4550 -4953 -5275 -5498 -5603 -5549 -5306 -4874 -4284 -3567 -2745 -1830 -838 219 1327 2449 3522 4477 5274 5909 6411 6824 7160 7386 7427 7253 6934 6591 6269 5885 5335 4623 3853 3113 2390 1628 825 53 -636 -1255 -1848 -2403 -2846 -3095 -3144 -3069 -2963 -2844 -2641 -2277 -1757 -1199 -728 -368 -34 354 779 1150 1404 1571 1737 1944 2152 2295 2354 2365 2350 2267 2053 1704 +0 0 1 1 1 1 1 1 0 -2 -4 -6 -9 -12 -14 -16 -16 -16 -16 -15 -12 -8 -2 4 6 5 3 1 -1 -5 -11 -17 -20 -23 -31 -46 -60 -71 -82 -102 -127 -151 -169 -188 -213 -244 -273 -306 -349 -403 -454 -496 -538 -597 -679 -773 -865 -949 -1036 -1124 -1203 -1257 -1275 -1266 -1233 -1172 -1067 -909 -702 -462 -200 82 386 710 1048 1381 1682 1945 2191 2450 2709 2932 3088 3195 3284 3350 3350 3267 3124 2952 2736 2432 2032 1576 1118 678 234 -240 -732 -1196 -1580 -1868 -2091 -2296 -2486 -2621 -2650 -2588 -2497 -2430 -2373 -2261 -2056 -1781 -1484 -1179 -836 -443 -30 359 722 1099 1504 1892 2189 2366 2449 2486 2479 2392 2207 1944 1654 1370 1088 800 515 269 87 -26 -102 -168 -240 -297 -310 -284 -264 -294 -361 -399 -360 -274 -193 -125 -29 110 259 385 502 626 712 688 541 334 129 -80 -329 -610 -886 -1120 -1313 -1477 -1617 -1737 -1824 -1859 -1845 -1833 -1874 -1954 -1993 -1967 -1958 -2081 -2343 -2613 -2771 -2819 -2877 -3047 -3336 -3688 -4056 -4428 -4807 -5165 -5449 -5613 -5644 -5548 -5310 -4884 -4230 -3370 -2390 -1389 -402 614 1699 2815 3856 4724 5416 6002 6546 7026 7364 7517 7527 7465 7356 7161 6835 6390 5875 5306 4641 3844 2942 2033 1197 440 -286 -1015 -1718 -2309 -2724 -2962 -3084 -3151 -3170 -3096 -2900 -2616 -2292 -1955 -1596 -1227 -880 -572 -274 64 443 818 1137 1383 1584 1772 1959 2114 2169 2067 1821 1517 1236 991 737 443 131 -155 -387 -544 -608 -573 -469 -346 -226 -96 63 246 415 551 643 695 717 728 745 757 734 669 588 523 486 473 478 487 464 369 199 7 -157 -295 -462 -685 -926 -1115 -1220 -1265 -1285 -1284 -1246 -1169 -1076 -988 -904 -805 -694 -598 -552 -563 -604 -639 -656 -669 -701 -760 -841 -934 -1031 -1133 -1227 -1300 -1349 -1368 -1357 -1306 -1214 -1083 -925 -747 -557 -361 -160 40 236 422 586 719 816 881 921 942 950 941 908 849 778 708 642 576 497 409 324 248 180 116 55 3 -39 -71 -98 -119 -131 -133 -125 -113 -100 -89 -75 -59 -41 -26 -14 -7 -1 4 8 10 11 10 9 8 7 6 4 2 1 1 0 0 +786 755 591 364 140 -86 -352 -651 -942 -1186 -1386 -1553 -1694 -1814 -1899 -1929 -1909 -1891 -1928 -2005 -2040 -2009 -1995 -2116 -2377 -2647 -2802 -2846 -2900 -3067 -3354 -3704 -4069 -4439 -4816 -5171 -5452 -5615 -5645 -5548 -5310 -4885 -4232 -3372 -2392 -1391 -403 617 1707 2831 3883 4763 5469 6071 6632 7131 7489 7662 7689 7643 7550 7370 7054 6613 6099 5525 4849 4029 3095 2147 1269 469 -305 -1088 -1850 -2498 -2960 -3234 -3383 -3474 -3513 -3449 -3249 -2946 -2596 -2227 -1829 -1414 -1021 -668 -322 76 528 982 1374 1684 1943 2190 2439 2653 2744 2636 2342 1967 1617 1308 982 596 178 -212 -534 -758 -856 -816 -674 -502 -332 -142 96 374 639 857 1012 1107 1156 1189 1231 1267 1245 1151 1025 923 871 859 880 910 881 712 390 14 -315 -604 -960 -1448 -1992 -2439 -2717 -2867 -2963 -3014 -2980 -2848 -2672 -2502 -2333 -2121 -1865 -1640 -1546 -1611 -1766 -1911 -2006 -2091 -2242 -2490 -2822 -3210 -3637 -4095 -4550 -4953 -5275 -5498 -5603 -5549 -5306 -4874 -4284 -3567 -2745 -1830 -838 219 1327 2449 3522 4477 5274 5909 6411 6824 7160 7386 7427 7253 6934 6591 6269 5885 5335 4623 3853 3113 2390 1628 825 53 -636 -1255 -1848 -2403 -2846 -3095 -3144 -3069 -2963 -2844 -2641 -2277 -1757 -1199 -728 -368 -34 354 779 1150 1404 1571 1737 1944 2152 2295 2354 2365 2350 2267 2053 1704 1284 855 417 -75 -617 -1121 -1461 -1575 -1512 -1375 -1218 -997 -642 -150 376 824 1163 1437 1676 1852 1919 1874 1754 1582 1361 1106 853 642 499 436 451 498 501 410 235 11 -261 -604 -1018 -1441 -1806 -2087 -2307 -2489 -2627 -2683 -2621 -2434 -2159 -1856 -1565 -1279 -1000 -776 -679 -731 -890 -1102 -1371 -1742 -2241 -2846 -3508 -4184 -4829 -5388 -5810 -6079 -6227 -6284 -6238 -6031 -5622 -5041 -4348 -3568 -2665 -1621 -508 582 1631 2707 3840 4916 5769 6356 6790 7197 7554 7705 7546 7132 6629 6153 5693 5163 4518 3794 3058 2347 1659 984 327 -304 -908 -1482 -2000 -2412 -2667 -2761 -2757 -2737 -2720 -2613 -2293 -1746 -1121 -613 -285 -11 363 861 1341 1660 1809 1915 2072 2245 2323 2278 2191 2120 1996 1688 1209 737 418 195 -118 -581 -1021 -1232 -1191 -1071 -1002 -926 -682 -215 339 795 1115 1420 1793 2143 2273 2117 1828 1592 1420 1162 +0 0 0 0 0 -1 -1 -3 -5 -7 -10 -13 -17 -21 -25 -29 -32 -36 -41 -47 -53 -57 -62 -72 -88 -105 -120 -131 -144 -162 -190 -223 -260 -301 -345 -392 -436 -473 -501 -518 -520 -501 -455 -379 -281 -171 -52 81 235 407 579 737 878 1010 1143 1270 1380 1459 1513 1550 1579 1590 1567 1512 1435 1338 1206 1029 812 578 351 132 -89 -325 -565 -781 -946 -1058 -1131 -1188 -1227 -1231 -1184 -1096 -985 -863 -723 -569 -419 -280 -138 32 232 440 627 782 917 1052 1190 1316 1382 1348 1216 1037 865 710 541 333 100 -122 -311 -448 -512 -494 -414 -312 -209 -91 61 243 420 570 681 753 795 826 864 898 892 832 748 680 648 645 666 695 679 553 305 11 -252 -486 -778 -1182 -1637 -2019 -2265 -2407 -2505 -2565 -2552 -2455 -2318 -2184 -2048 -1873 -1656 -1464 -1388 -1454 -1602 -1742 -1838 -1925 -2073 -2312 -2632 -3006 -3419 -3865 -4311 -4709 -5033 -5264 -5382 -5346 -5128 -4723 -4163 -3476 -2682 -1792 -823 215 1307 2417 3482 4434 5231 5868 6375 6794 7135 7366 7412 7244 6928 6587 6267 5885 5335 4621 3851 3110 2387 1624 822 52 -634 -1248 -1836 -2384 -2819 -3061 -3104 -3025 -2914 -2791 -2586 -2225 -1712 -1166 -706 -356 -33 339 745 1097 1334 1488 1639 1827 2014 2139 2185 2186 2162 2076 1871 1545 1158 767 372 -67 -545 -984 -1275 -1366 -1304 -1178 -1037 -843 -539 -126 311 677 948 1163 1346 1476 1517 1469 1364 1220 1040 838 640 477 368 318 326 356 355 287 163 7 -178 -407 -678 -949 -1176 -1343 -1467 -1564 -1631 -1645 -1587 -1455 -1274 -1081 -899 -725 -559 -428 -369 -392 -470 -573 -702 -878 -1112 -1390 -1686 -1977 -2244 -2462 -2609 -2682 -2699 -2676 -2608 -2474 -2262 -1991 -1684 -1354 -991 -591 -182 203 557 904 1255 1571 1801 1938 2022 2092 2141 2131 2033 1871 1694 1530 1378 1215 1033 842 659 491 336 193 62 -57 -162 -256 -334 -388 -413 -412 -397 -379 -362 -334 -281 -205 -126 -66 -30 -2 33 76 112 132 136 136 140 143 139 128 115 104 91 72 47 26 14 6 -4 -15 -24 -26 -23 -18 -15 -12 -8 -3 2 5 6 6 6 5 4 2 1 0 0 0 +-1766 -1911 -2006 -2091 -2242 -2490 -2822 -3210 -3637 -4095 -4550 -4953 -5275 -5498 -5603 -5549 -5306 -4874 -4284 -3567 -2745 -1830 -838 219 1327 2449 3522 4477 5274 5909 6411 6824 7160 7386 7427 7253 6934 6591 6269 5885 5335 4623 3853 3113 2390 1628 825 53 -636 -1255 -1848 -2403 -2846 -3095 -3144 -3069 -2963 -2844 -2641 -2277 -1757 -1199 -728 -368 -34 354 779 1150 1404 1571 1737 1944 2152 2295 2354 2365 2350 2267 2053 1704 1284 855 417 -75 -617 -1121 -1461 -1575 -1512 -1375 -1218 -997 -642 -150 376 824 1163 1437 1676 1852 1919 1874 1754 1582 1361 1106 853 642 499 436 451 498 501 410 235 11 -261 -604 -1018 -1441 -1806 -2087 -2307 -2489 -2627 -2683 -2621 -2434 -2159 -1856 -1565 -1279 -1000 -776 -679 -731 -890 -1102 -1371 -1742 -2241 -2846 -3508 -4184 -4829 -5388 -5810 -6079 -6227 -6284 -6238 -6031 -5622 -5041 -4348 -3568 -2665 -1621 -508 582 1631 2707 3840 4916 5769 6356 6790 7197 7554 7705 7546 7132 6629 6153 5693 5163 4518 3794 3058 2347 1659 984 327 -304 -908 -1482 -2000 -2412 -2667 -2761 -2757 -2737 -2720 -2613 -2293 -1746 -1121 -613 -285 -11 363 861 1341 1660 1809 1915 2072 2245 2323 2278 2191 2120 1996 1688 1209 737 418 195 -118 -581 -1021 -1232 -1191 -1071 -1002 -926 -682 -215 339 795 1115 1420 1793 2143 2273 2117 1828 1592 1420 1162 751 317 44 -48 -96 -196 -290 -291 -248 -312 -551 -863 -1117 -1297 -1510 -1812 -2120 -2311 -2358 -2336 -2297 -2198 -1958 -1574 -1123 -684 -270 118 442 619 582 340 -49 -553 -1194 -2022 -3008 -4021 -4916 -5640 -6249 -6778 -7162 -7284 -7112 -6722 -6214 -5620 -4926 -4139 -3284 -2369 -1373 -302 797 1870 2905 3900 4820 5603 6219 6692 7052 7276 7299 7107 6770 6371 5913 5319 4540 3644 2775 2013 1315 598 -143 -806 -1289 -1603 -1849 -2097 -2306 -2376 -2274 -2088 -1913 -1737 -1460 -1035 -552 -134 195 527 932 1362 1712 1938 2089 2229 2359 2425 2370 2186 1923 1648 1388 1104 744 308 -132 -510 -826 -1113 -1359 -1509 -1526 -1438 -1283 -1054 -720 -285 188 634 1029 1377 1672 1896 2039 2095 2046 1881 1630 1361 1119 878 592 282 48 -34 0 23 -47 -191 -332 -435 -533 -679 -888 -1132 -1364 -1549 -1672 -1742 -1769 -1742 -1637 -1437 -1154 -812 +0 -1 -1 -2 -3 -5 -8 -11 -16 -23 -32 -40 -51 -62 -73 -82 -89 -92 -90 -83 -71 -52 -26 7 48 96 150 205 260 311 361 409 456 499 531 548 553 555 555 548 522 474 413 349 280 199 105 7 -88 -181 -276 -372 -458 -516 -542 -547 -547 -542 -520 -462 -368 -259 -162 -85 -9 85 193 293 368 423 480 551 625 683 717 738 751 741 686 582 448 304 151 -28 -235 -435 -577 -634 -621 -575 -519 -433 -284 -68 171 382 549 690 818 918 966 958 911 834 728 600 470 358 282 250 262 293 299 248 144 6 -164 -384 -655 -938 -1189 -1390 -1553 -1694 -1807 -1865 -1841 -1727 -1548 -1343 -1144 -944 -745 -583 -515 -559 -687 -858 -1076 -1378 -1787 -2286 -2840 -3413 -3969 -4460 -4843 -5103 -5264 -5347 -5341 -5198 -4876 -4399 -3817 -3150 -2366 -1447 -456 525 1478 2467 3516 4523 5332 5901 6330 6738 7100 7270 7148 6780 6324 5890 5467 4973 4365 3676 2971 2286 1620 963 320 -299 -895 -1463 -1978 -2390 -2646 -2743 -2742 -2725 -2711 -2606 -2289 -1744 -1121 -613 -285 -11 363 860 1340 1658 1806 1911 2066 2237 2312 2265 2176 2102 1976 1669 1193 726 411 191 -116 -568 -995 -1198 -1155 -1035 -966 -890 -653 -206 322 753 1052 1334 1678 1998 2110 1956 1682 1457 1294 1053 677 284 39 -43 -85 -173 -254 -253 -214 -268 -469 -730 -938 -1082 -1250 -1490 -1730 -1871 -1894 -1863 -1817 -1724 -1523 -1214 -859 -519 -203 87 325 452 421 243 -35 -389 -830 -1391 -2047 -2707 -3273 -3713 -4068 -4361 -4554 -4576 -4414 -4120 -3761 -3359 -2906 -2410 -1886 -1343 -768 -167 432 1001 1532 2026 2466 2823 3085 3267 3388 3437 3391 3246 3039 2810 2562 2264 1897 1494 1116 794 509 226 -54 -294 -460 -560 -632 -701 -754 -760 -711 -637 -570 -506 -414 -287 -149 -36 49 131 225 320 391 430 450 466 478 477 451 402 342 284 231 177 115 45 -19 -71 -110 -142 -167 -177 -172 -155 -132 -104 -68 -26 15 50 77 98 113 120 122 118 107 92 74 58 44 32 19 8 1 -1 0 0 -1 -4 -5 -6 -6 -7 -8 -8 -8 -7 -6 -5 -4 -3 -2 -1 -1 0 +1631 2707 3840 4916 5769 6356 6790 7197 7554 7705 7546 7132 6629 6153 5693 5163 4518 3794 3058 2347 1659 984 327 -304 -908 -1482 -2000 -2412 -2667 -2761 -2757 -2737 -2720 -2613 -2293 -1746 -1121 -613 -285 -11 363 861 1341 1660 1809 1915 2072 2245 2323 2278 2191 2120 1996 1688 1209 737 418 195 -118 -581 -1021 -1232 -1191 -1071 -1002 -926 -682 -215 339 795 1115 1420 1793 2143 2273 2117 1828 1592 1420 1162 751 317 44 -48 -96 -196 -290 -291 -248 -312 -551 -863 -1117 -1297 -1510 -1812 -2120 -2311 -2358 -2336 -2297 -2198 -1958 -1574 -1123 -684 -270 118 442 619 582 340 -49 -553 -1194 -2022 -3008 -4021 -4916 -5640 -6249 -6778 -7162 -7284 -7112 -6722 -6214 -5620 -4926 -4139 -3284 -2369 -1373 -302 797 1870 2905 3900 4820 5603 6219 6692 7052 7276 7299 7107 6770 6371 5913 5319 4540 3644 2775 2013 1315 598 -143 -806 -1289 -1603 -1849 -2097 -2306 -2376 -2274 -2088 -1913 -1737 -1460 -1035 -552 -134 195 527 932 1362 1712 1938 2089 2229 2359 2425 2370 2186 1923 1648 1388 1104 744 308 -132 -510 -826 -1113 -1359 -1509 -1526 -1438 -1283 -1054 -720 -285 188 634 1029 1377 1672 1896 2039 2095 2046 1881 1630 1361 1119 878 592 282 48 -34 0 23 -47 -191 -332 -435 -533 -679 -888 -1132 -1364 -1549 -1672 -1742 -1769 -1742 -1637 -1437 -1154 -812 -437 -74 201 313 242 46 -235 -645 -1291 -2233 -3371 -4491 -5407 -6089 -6642 -7145 -7533 -7630 -7324 -6685 -5891 -5044 -4111 -3025 -1835 -673 393 1428 2495 3511 4302 4806 5164 5570 6042 6391 6436 6223 5970 5834 5739 5455 4857 4041 3215 2474 1733 866 -108 -1024 -1747 -2284 -2732 -3102 -3300 -3240 -2965 -2621 -2295 -1933 -1428 -768 -96 424 761 1047 1421 1864 2206 2328 2301 2306 2423 2540 2493 2253 1951 1710 1517 1271 928 554 232 -37 -317 -627 -885 -984 -925 -806 -693 -531 -218 234 679 992 1197 1409 1660 1846 1873 1772 1655 1565 1445 1245 1015 841 735 626 461 286 164 81 -62 -323 -659 -970 -1220 -1459 -1738 -2021 -2222 -2308 -2324 -2334 -2329 -2245 -2043 -1765 -1491 -1250 -1024 -809 -670 -676 -830 -1082 -1434 -1971 -2743 -3641 -4467 -5132 -5730 -6362 -6932 -7167 -6886 -6188 -5337 -4480 -3527 -2307 -818 714 2040 3116 4104 5142 +0 0 1 3 7 12 18 24 33 43 51 57 63 69 73 75 75 71 64 54 42 27 10 -11 -34 -59 -86 -111 -132 -146 -156 -165 -174 -177 -165 -133 -90 -52 -26 -2 35 88 144 186 211 234 264 298 321 327 326 328 320 281 208 131 77 37 -24 -118 -214 -266 -265 -246 -236 -225 -170 -55 88 214 308 402 521 638 693 661 584 520 474 397 262 113 16 -18 -37 -76 -115 -118 -102 -131 -235 -374 -493 -583 -690 -842 -1002 -1111 -1152 -1159 -1158 -1125 -1018 -831 -602 -372 -149 65 250 355 338 200 -30 -335 -732 -1255 -1890 -2557 -3163 -3671 -4114 -4513 -4821 -4957 -4892 -4673 -4364 -3986 -3530 -2995 -2399 -1748 -1023 -227 603 1429 2240 3033 3779 4430 4957 5375 5709 5934 5998 5882 5642 5347 4997 4525 3887 3140 2406 1756 1154 527 -127 -720 -1157 -1447 -1677 -1912 -2112 -2187 -2102 -1939 -1784 -1627 -1373 -977 -523 -128 186 504 895 1312 1654 1877 2029 2171 2304 2374 2325 2149 1894 1626 1372 1093 738 305 -132 -508 -824 -1111 -1357 -1508 -1525 -1438 -1283 -1054 -720 -285 187 633 1027 1374 1667 1889 2030 2083 2032 1865 1614 1345 1104 865 582 276 46 -34 0 22 -46 -185 -320 -418 -511 -648 -845 -1073 -1288 -1456 -1566 -1625 -1643 -1611 -1507 -1316 -1052 -737 -395 -67 179 277 213 40 -206 -560 -1113 -1912 -2869 -3796 -4539 -5076 -5498 -5872 -6145 -6178 -5883 -5329 -4659 -3956 -3198 -2334 -1404 -511 295 1062 1840 2564 3112 3443 3662 3910 4199 4395 4379 4188 3974 3839 3735 3509 3087 2538 1995 1516 1048 517 -64 -596 -1004 -1295 -1527 -1710 -1793 -1735 -1565 -1362 -1175 -975 -709 -375 -47 200 353 478 637 822 955 991 961 945 974 1002 965 854 725 622 541 443 317 185 75 -12 -99 -192 -264 -287 -263 -223 -187 -140 -56 58 164 233 273 313 358 386 379 348 315 288 257 214 169 135 113 93 66 39 21 10 -8 -38 -75 -105 -126 -143 -163 -180 -188 -185 -176 -167 -158 -144 -123 -100 -79 -62 -47 -35 -27 -25 -28 -34 -41 -51 -64 -77 -84 -86 -84 -83 -78 -69 -56 -43 -30 -20 -13 -7 -2 0 1 1 1 0 +-1849 -2097 -2306 -2376 -2274 -2088 -1913 -1737 -1460 -1035 -552 -134 195 527 932 1362 1712 1938 2089 2229 2359 2425 2370 2186 1923 1648 1388 1104 744 308 -132 -510 -826 -1113 -1359 -1509 -1526 -1438 -1283 -1054 -720 -285 188 634 1029 1377 1672 1896 2039 2095 2046 1881 1630 1361 1119 878 592 282 48 -34 0 23 -47 -191 -332 -435 -533 -679 -888 -1132 -1364 -1549 -1672 -1742 -1769 -1742 -1637 -1437 -1154 -812 -437 -74 201 313 242 46 -235 -645 -1291 -2233 -3371 -4491 -5407 -6089 -6642 -7145 -7533 -7630 -7324 -6685 -5891 -5044 -4111 -3025 -1835 -673 393 1428 2495 3511 4302 4806 5164 5570 6042 6391 6436 6223 5970 5834 5739 5455 4857 4041 3215 2474 1733 866 -108 -1024 -1747 -2284 -2732 -3102 -3300 -3240 -2965 -2621 -2295 -1933 -1428 -768 -96 424 761 1047 1421 1864 2206 2328 2301 2306 2423 2540 2493 2253 1951 1710 1517 1271 928 554 232 -37 -317 -627 -885 -984 -925 -806 -693 -531 -218 234 679 992 1197 1409 1660 1846 1873 1772 1655 1565 1445 1245 1015 841 735 626 461 286 164 81 -62 -323 -659 -970 -1220 -1459 -1738 -2021 -2222 -2308 -2324 -2334 -2329 -2245 -2043 -1765 -1491 -1250 -1024 -809 -670 -676 -830 -1082 -1434 -1971 -2743 -3641 -4467 -5132 -5730 -6362 -6932 -7167 -6886 -6188 -5337 -4480 -3527 -2307 -818 714 2040 3116 4104 5142 6117 6751 6911 6788 6672 6600 6303 5582 4623 3853 3458 3155 2551 1629 781 319 61 -457 -1413 -2461 -3134 -3397 -3661 -4232 -4900 -5175 -4876 -4325 -3905 -3564 -2902 -1721 -332 791 1548 2271 3235 4236 4808 4774 4460 4289 4277 4069 3456 2680 2149 1929 1694 1196 593 248 248 301 141 -151 -285 -159 22 46 -43 -13 241 583 812 911 1031 1291 1619 1853 1926 1930 1967 1997 1892 1607 1254 965 735 441 27 -410 -735 -935 -1109 -1325 -1533 -1657 -1704 -1773 -1932 -2132 -2276 -2320 -2331 -2393 -2502 -2539 -2396 -2100 -1792 -1559 -1333 -988 -527 -122 58 22 -87 -187 -363 -742 -1315 -1927 -2459 -2946 -3490 -4078 -4540 -4734 -4691 -4566 -4425 -4168 -3657 -2906 -2082 -1320 -595 203 1105 2005 2763 3339 3800 4213 4562 4772 4810 4735 4625 4485 4255 3908 3512 3157 2829 2430 1904 1315 754 226 -329 -937 -1542 -2090 -2567 -2967 -3242 -3341 +0 -1 -2 -2 -3 -5 -6 -6 -7 -6 -4 -2 1 5 12 19 28 36 43 51 60 68 73 73 70 65 59 50 36 16 -8 -31 -53 -76 -98 -115 -122 -122 -114 -99 -71 -30 20 71 120 168 213 251 281 301 305 291 261 226 192 156 109 53 9 -7 0 4 -11 -44 -79 -106 -133 -174 -234 -306 -378 -440 -487 -519 -540 -544 -524 -470 -386 -278 -153 -27 73 116 91 17 -93 -260 -530 -934 -1436 -1947 -2386 -2734 -3034 -3320 -3559 -3666 -3577 -3317 -2969 -2582 -2136 -1596 -983 -366 216 798 1413 2016 2503 2834 3086 3371 3702 3966 4042 3956 3840 3797 3777 3631 3269 2749 2211 1719 1216 614 -78 -741 -1277 -1685 -2034 -2331 -2501 -2478 -2287 -2039 -1800 -1529 -1139 -617 -78 345 625 866 1184 1564 1864 1980 1970 1987 2101 2216 2188 1988 1731 1526 1361 1146 841 504 212 -35 -294 -583 -826 -922 -870 -761 -657 -505 -208 224 652 955 1156 1365 1612 1798 1829 1734 1623 1539 1423 1228 1003 832 729 621 458 284 163 80 -62 -323 -659 -970 -1220 -1459 -1738 -2021 -2221 -2307 -2322 -2330 -2323 -2238 -2035 -1756 -1481 -1240 -1015 -800 -662 -667 -817 -1062 -1404 -1926 -2673 -3538 -4329 -4960 -5521 -6111 -6636 -6839 -6547 -5862 -5037 -4211 -3303 -2151 -760 659 1877 2853 3740 4662 5518 6058 6168 6025 5890 5792 5499 4840 3984 3298 2942 2666 2141 1357 646 262 49 -370 -1135 -1962 -2479 -2664 -2848 -3264 -3747 -3922 -3663 -3220 -2881 -2604 -2100 -1234 -236 555 1075 1561 2201 2851 3201 3142 2902 2759 2719 2556 2144 1642 1300 1152 999 696 340 140 138 165 76 -81 -151 -83 11 23 -22 -7 115 275 377 416 462 569 701 788 805 791 791 788 732 609 466 351 262 153 9 -138 -241 -299 -347 -405 -457 -482 -483 -491 -521 -560 -582 -578 -565 -564 -573 -565 -518 -440 -364 -307 -254 -183 -94 -22 9 3 -14 -28 -53 -103 -175 -246 -301 -346 -392 -439 -466 -464 -438 -405 -373 -333 -277 -208 -141 -85 -36 11 58 98 126 142 150 154 153 147 136 121 107 94 79 64 51 40 31 23 15 8 4 0 -2 -3 -4 -3 -2 -2 -1 0 +928 554 232 -37 -317 -627 -885 -984 -925 -806 -693 -531 -218 234 679 992 1197 1409 1660 1846 1873 1772 1655 1565 1445 1245 1015 841 735 626 461 286 164 81 -62 -323 -659 -970 -1220 -1459 -1738 -2021 -2222 -2308 -2324 -2334 -2329 -2245 -2043 -1765 -1491 -1250 -1024 -809 -670 -676 -830 -1082 -1434 -1971 -2743 -3641 -4467 -5132 -5730 -6362 -6932 -7167 -6886 -6188 -5337 -4480 -3527 -2307 -818 714 2040 3116 4104 5142 6117 6751 6911 6788 6672 6600 6303 5582 4623 3853 3458 3155 2551 1629 781 319 61 -457 -1413 -2461 -3134 -3397 -3661 -4232 -4900 -5175 -4876 -4325 -3905 -3564 -2902 -1721 -332 791 1548 2271 3235 4236 4808 4774 4460 4289 4277 4069 3456 2680 2149 1929 1694 1196 593 248 248 301 141 -151 -285 -159 22 46 -43 -13 241 583 812 911 1031 1291 1619 1853 1926 1930 1967 1997 1892 1607 1254 965 735 441 27 -410 -735 -935 -1109 -1325 -1533 -1657 -1704 -1773 -1932 -2132 -2276 -2320 -2331 -2393 -2502 -2539 -2396 -2100 -1792 -1559 -1333 -988 -527 -122 58 22 -87 -187 -363 -742 -1315 -1927 -2459 -2946 -3490 -4078 -4540 -4734 -4691 -4566 -4425 -4168 -3657 -2906 -2082 -1320 -595 203 1105 2005 2763 3339 3800 4213 4562 4772 4810 4735 4625 4485 4255 3908 3512 3157 2829 2430 1904 1315 754 226 -329 -937 -1542 -2090 -2567 -2967 -3242 -3341 -3277 -3102 -2826 -2409 -1828 -1156 -507 68 621 1211 1812 2299 2563 2626 2614 2609 2545 2298 1865 1401 1063 856 670 466 337 377 542 693 751 750 763 808 843 835 789 730 680 649 641 661 733 879 1090 1314 1486 1595 1687 1791 1865 1823 1622 1323 1004 680 290 -199 -732 -1213 -1600 -1926 -2230 -2503 -2705 -2830 -2902 -2931 -2895 -2791 -2673 -2594 -2556 -2493 -2362 -2184 -2005 -1835 -1644 -1422 -1195 -990 -806 -659 -624 -794 -1184 -1680 -2099 -2329 -2386 -2360 -2294 -2141 -1822 -1356 -868 -474 -155 221 727 1272 1707 2000 2258 2541 2732 2660 2322 1923 1653 1481 1241 891 608 586 803 1035 1119 1099 1117 1191 1180 967 611 255 -67 -463 -1001 -1575 -2004 -2239 -2404 -2599 -2734 -2621 -2198 -1610 -1039 -511 71 747 1393 1853 2107 2281 2467 2613 2609 2459 2293 2202 2113 1895 1549 1240 1093 1045 946 764 625 644 +0 0 0 -1 -1 -2 -3 -4 -5 -5 -5 -5 -3 2 8 14 19 26 34 42 48 50 51 52 52 49 43 38 36 33 25 17 10 5 -5 -25 -53 -82 -109 -137 -171 -208 -239 -260 -273 -286 -297 -299 -283 -254 -223 -194 -165 -135 -116 -121 -153 -207 -283 -400 -574 -786 -993 -1174 -1349 -1541 -1725 -1832 -1808 -1668 -1477 -1270 -1026 -688 -250 222 651 1018 1371 1757 2135 2408 2517 2523 2531 2555 2488 2245 1896 1610 1472 1367 1125 731 356 148 28 -220 -690 -1221 -1580 -1739 -1903 -2233 -2624 -2812 -2687 -2417 -2213 -2047 -1690 -1016 -199 478 948 1409 2032 2693 3093 3107 2935 2855 2878 2768 2376 1862 1508 1368 1213 865 433 182 184 226 106 -116 -220 -124 17 36 -35 -11 195 475 667 753 859 1083 1368 1576 1649 1663 1705 1742 1660 1418 1113 861 659 397 24 -374 -674 -861 -1026 -1231 -1430 -1552 -1602 -1674 -1831 -2027 -2172 -2221 -2239 -2306 -2418 -2461 -2329 -2047 -1751 -1527 -1308 -972 -520 -121 57 21 -87 -186 -361 -739 -1311 -1922 -2455 -2943 -3488 -4077 -4539 -4734 -4691 -4565 -4423 -4165 -3653 -2901 -2077 -1316 -593 201 1097 1988 2736 3301 3750 4151 4486 4682 4709 4625 4506 4357 4123 3776 3383 3032 2708 2318 1810 1245 711 212 -309 -874 -1432 -1932 -2363 -2718 -2955 -3030 -2957 -2784 -2523 -2139 -1614 -1015 -443 58 535 1036 1541 1943 2151 2188 2163 2144 2075 1860 1498 1116 840 671 521 359 257 285 407 515 553 547 551 578 597 586 548 502 462 436 426 435 477 565 692 825 922 977 1021 1070 1100 1061 931 749 561 374 157 -107 -387 -631 -819 -971 -1107 -1223 -1300 -1337 -1349 -1339 -1300 -1232 -1159 -1105 -1069 -1023 -951 -863 -777 -697 -612 -518 -427 -346 -276 -221 -204 -254 -370 -513 -626 -678 -677 -653 -619 -562 -466 -338 -211 -112 -36 49 156 266 346 393 429 468 486 458 386 308 255 220 178 123 80 74 98 121 125 118 114 116 110 85 51 20 -6 -34 -68 -101 -121 -127 -127 -129 -126 -112 -87 -59 -36 -16 2 19 32 38 39 37 36 33 29 23 18 15 11 8 5 3 2 1 0 0 0 0 +27 -410 -735 -935 -1109 -1325 -1533 -1657 -1704 -1773 -1932 -2132 -2276 -2320 -2331 -2393 -2502 -2539 -2396 -2100 -1792 -1559 -1333 -988 -527 -122 58 22 -87 -187 -363 -742 -1315 -1927 -2459 -2946 -3490 -4078 -4540 -4734 -4691 -4566 -4425 -4168 -3657 -2906 -2082 -1320 -595 203 1105 2005 2763 3339 3800 4213 4562 4772 4810 4735 4625 4485 4255 3908 3512 3157 2829 2430 1904 1315 754 226 -329 -937 -1542 -2090 -2567 -2967 -3242 -3341 -3277 -3102 -2826 -2409 -1828 -1156 -507 68 621 1211 1812 2299 2563 2626 2614 2609 2545 2298 1865 1401 1063 856 670 466 337 377 542 693 751 750 763 808 843 835 789 730 680 649 641 661 733 879 1090 1314 1486 1595 1687 1791 1865 1823 1622 1323 1004 680 290 -199 -732 -1213 -1600 -1926 -2230 -2503 -2705 -2830 -2902 -2931 -2895 -2791 -2673 -2594 -2556 -2493 -2362 -2184 -2005 -1835 -1644 -1422 -1195 -990 -806 -659 -624 -794 -1184 -1680 -2099 -2329 -2386 -2360 -2294 -2141 -1822 -1356 -868 -474 -155 221 727 1272 1707 2000 2258 2541 2732 2660 2322 1923 1653 1481 1241 891 608 586 803 1035 1119 1099 1117 1191 1180 967 611 255 -67 -463 -1001 -1575 -2004 -2239 -2404 -2599 -2734 -2621 -2198 -1610 -1039 -511 71 747 1393 1853 2107 2281 2467 2613 2609 2459 2293 2202 2113 1895 1549 1240 1093 1045 946 764 625 644 771 856 846 835 922 1066 1129 1053 940 923 1001 1046 963 819 745 754 719 539 293 157 185 254 221 95 -4 -26 -63 -225 -495 -767 -1001 -1260 -1608 -1999 -2321 -2536 -2699 -2871 -3019 -3054 -2958 -2798 -2646 -2496 -2285 -2001 -1688 -1387 -1099 -804 -526 -301 -130 27 191 329 403 430 475 568 663 681 602 489 408 353 273 145 10 -78 -115 -144 -182 -192 -144 -72 -42 -76 -121 -113 -65 -41 -81 -147 -177 -149 -93 -43 7 93 225 369 475 521 518 488 437 362 260 135 4 -108 -185 -225 -238 -224 -158 -20 183 405 608 800 1015 1249 1443 1527 1499 1423 1351 1264 1097 843 568 347 192 61 -59 -127 -108 -30 43 89 143 233 334 397 408 411 450 528 591 595 551 529 583 697 787 791 750 736 768 771 678 524 392 304 192 -4 -241 -432 +0 -1 -1 -1 -2 -3 -5 -6 -8 -10 -14 -18 -22 -27 -31 -36 -42 -48 -51 -49 -46 -45 -42 -34 -20 -5 2 1 -5 -10 -21 -45 -84 -131 -176 -223 -279 -344 -403 -442 -460 -469 -476 -469 -429 -356 -266 -176 -83 29 164 310 443 555 654 750 840 908 946 960 967 967 945 893 826 764 703 621 499 354 208 64 -96 -280 -471 -653 -821 -970 -1084 -1142 -1145 -1107 -1030 -896 -694 -448 -201 27 254 506 771 996 1130 1179 1194 1212 1202 1104 910 695 535 438 348 245 180 204 298 387 425 430 444 476 503 505 483 453 427 412 412 430 482 585 733 894 1021 1108 1184 1270 1336 1318 1184 975 747 510 219 -153 -565 -944 -1255 -1524 -1778 -2011 -2190 -2309 -2385 -2426 -2413 -2343 -2260 -2208 -2189 -2149 -2049 -1906 -1760 -1620 -1460 -1270 -1073 -894 -731 -601 -572 -731 -1095 -1560 -1958 -2181 -2243 -2227 -2174 -2036 -1739 -1299 -834 -457 -150 214 706 1239 1667 1958 2215 2498 2691 2625 2295 1904 1639 1470 1234 887 605 584 801 1033 1118 1098 1116 1191 1180 966 610 254 -67 -463 -999 -1570 -1996 -2227 -2388 -2579 -2708 -2592 -2170 -1587 -1022 -502 69 729 1357 1800 2041 2204 2376 2509 2497 2346 2179 2085 1993 1781 1450 1156 1014 965 870 699 569 583 695 768 755 741 813 935 985 913 810 790 851 884 808 682 616 619 586 436 235 125 146 199 171 73 -4 -20 -48 -168 -366 -561 -725 -903 -1141 -1404 -1614 -1745 -1837 -1933 -2010 -2011 -1926 -1800 -1683 -1568 -1419 -1227 -1022 -829 -649 -468 -303 -171 -73 14 103 176 212 223 243 286 328 332 289 231 189 161 122 63 4 -34 -49 -60 -74 -76 -56 -28 -16 -28 -44 -40 -23 -14 -27 -47 -56 -46 -28 -13 1 25 60 96 121 129 125 114 99 80 56 28 0 -22 -36 -42 -43 -39 -27 -4 28 60 87 110 134 159 176 178 168 152 138 123 102 74 47 27 14 4 -4 -9 -7 -2 2 4 6 9 13 14 13 12 12 13 13 12 10 8 8 9 8 7 6 5 4 3 2 1 0 0 0 -1 -1 0 +-806 -659 -624 -794 -1184 -1680 -2099 -2329 -2386 -2360 -2294 -2141 -1822 -1356 -868 -474 -155 221 727 1272 1707 2000 2258 2541 2732 2660 2322 1923 1653 1481 1241 891 608 586 803 1035 1119 1099 1117 1191 1180 967 611 255 -67 -463 -1001 -1575 -2004 -2239 -2404 -2599 -2734 -2621 -2198 -1610 -1039 -511 71 747 1393 1853 2107 2281 2467 2613 2609 2459 2293 2202 2113 1895 1549 1240 1093 1045 946 764 625 644 771 856 846 835 922 1066 1129 1053 940 923 1001 1046 963 819 745 754 719 539 293 157 185 254 221 95 -4 -26 -63 -225 -495 -767 -1001 -1260 -1608 -1999 -2321 -2536 -2699 -2871 -3019 -3054 -2958 -2798 -2646 -2496 -2285 -2001 -1688 -1387 -1099 -804 -526 -301 -130 27 191 329 403 430 475 568 663 681 602 489 408 353 273 145 10 -78 -115 -144 -182 -192 -144 -72 -42 -76 -121 -113 -65 -41 -81 -147 -177 -149 -93 -43 7 93 225 369 475 521 518 488 437 362 260 135 4 -108 -185 -225 -238 -224 -158 -20 183 405 608 800 1015 1249 1443 1527 1499 1423 1351 1264 1097 843 568 347 192 61 -59 -127 -108 -30 43 89 143 233 334 397 408 411 450 528 591 595 551 529 583 697 787 791 750 736 768 771 678 524 392 304 192 -4 -241 -432 -564 -711 -927 -1172 -1355 -1461 -1551 -1689 -1849 -1945 -1933 -1849 -1765 -1705 -1629 -1475 -1241 -992 -799 -665 -527 -336 -123 44 144 229 344 458 508 488 467 507 573 583 507 405 350 346 331 266 163 74 23 -13 -84 -210 -349 -431 -432 -392 -372 -374 -350 -270 -167 -81 -1 101 211 274 263 223 207 193 114 -58 -282 -489 -651 -792 -936 -1081 -1181 -1190 -1100 -940 -738 -505 -253 -3 223 438 659 871 1022 1089 1105 1112 1113 1084 1030 975 932 900 881 875 866 831 774 711 646 569 484 413 352 281 210 168 153 121 59 22 58 145 245 376 548 679 642 440 234 173 250 378 515 672 808 824 649 306 -123 -527 -797 -879 -807 -675 -559 -512 -592 -839 -1181 -1479 -1627 -1628 -1505 -1237 -831 -414 -129 -15 6 2 -17 -28 23 158 332 500 667 826 +0 -1 -1 -1 -2 -4 -6 -8 -11 -14 -16 -18 -18 -16 -12 -7 -3 4 15 29 43 56 70 85 100 105 99 88 81 78 69 53 38 39 57 78 89 92 98 111 115 99 65 28 -8 -57 -128 -210 -277 -322 -359 -403 -440 -437 -379 -287 -192 -98 13 151 291 399 468 521 580 632 649 628 601 593 584 537 450 369 333 326 302 249 208 220 269 305 308 310 349 412 445 423 385 385 426 453 424 367 340 350 339 258 143 77 93 129 114 50 -3 -15 -35 -126 -281 -441 -583 -744 -962 -1210 -1423 -1574 -1696 -1826 -1943 -1988 -1947 -1863 -1782 -1699 -1572 -1391 -1186 -984 -788 -582 -385 -223 -97 20 144 251 310 334 372 449 528 546 487 398 335 292 227 121 8 -67 -99 -125 -158 -168 -127 -64 -38 -68 -109 -102 -59 -38 -75 -136 -164 -139 -87 -41 6 87 213 350 453 498 497 470 422 350 252 131 3 -106 -182 -222 -235 -222 -157 -20 181 402 604 796 1011 1245 1440 1525 1497 1422 1350 1264 1097 842 567 346 191 60 -59 -127 -108 -30 42 88 141 230 329 391 401 403 440 515 575 578 533 511 561 669 753 754 713 697 724 724 634 488 363 280 176 -4 -220 -392 -509 -639 -828 -1041 -1197 -1283 -1354 -1465 -1594 -1666 -1645 -1563 -1482 -1422 -1349 -1213 -1013 -804 -642 -531 -417 -264 -96 33 110 173 258 340 374 356 337 363 406 409 352 278 238 232 220 175 106 47 14 -9 -53 -129 -212 -258 -255 -229 -214 -212 -196 -149 -91 -44 -1 52 107 138 130 108 99 91 52 -27 -127 -216 -283 -338 -392 -444 -476 -470 -426 -357 -275 -184 -91 -2 76 146 215 278 319 332 329 323 315 299 277 255 238 223 213 205 198 184 167 148 131 111 92 76 62 48 34 26 23 18 8 3 7 18 29 44 61 72 65 43 21 15 21 30 38 48 54 52 38 17 -7 -26 -37 -38 -32 -25 -19 -16 -17 -22 -28 -32 -31 -28 -23 -17 -10 -4 -2 -1 0 0 -1 -1 0 0 0 0 0 0 +-65 -41 -81 -147 -177 -149 -93 -43 7 93 225 369 475 521 518 488 437 362 260 135 4 -108 -185 -225 -238 -224 -158 -20 183 405 608 800 1015 1249 1443 1527 1499 1423 1351 1264 1097 843 568 347 192 61 -59 -127 -108 -30 43 89 143 233 334 397 408 411 450 528 591 595 551 529 583 697 787 791 750 736 768 771 678 524 392 304 192 -4 -241 -432 -564 -711 -927 -1172 -1355 -1461 -1551 -1689 -1849 -1945 -1933 -1849 -1765 -1705 -1629 -1475 -1241 -992 -799 -665 -527 -336 -123 44 144 229 344 458 508 488 467 507 573 583 507 405 350 346 331 266 163 74 23 -13 -84 -210 -349 -431 -432 -392 -372 -374 -350 -270 -167 -81 -1 101 211 274 263 223 207 193 114 -58 -282 -489 -651 -792 -936 -1081 -1181 -1190 -1100 -940 -738 -505 -253 -3 223 438 659 871 1022 1089 1105 1112 1113 1084 1030 975 932 900 881 875 866 831 774 711 646 569 484 413 352 281 210 168 153 121 59 22 58 145 245 376 548 679 642 440 234 173 250 378 515 672 808 824 649 306 -123 -527 -797 -879 -807 -675 -559 -512 -592 -839 -1181 -1479 -1627 -1628 -1505 -1237 -831 -414 -129 -15 6 2 -17 -28 23 158 332 500 667 826 921 905 818 729 646 550 444 357 282 179 45 -88 -220 -406 -664 -928 -1118 -1213 -1233 -1189 -1092 -972 -847 -708 -558 -457 -458 -519 -547 -509 -453 -418 -371 -277 -174 -123 -125 -137 -147 -182 -237 -259 -198 -52 136 323 481 601 699 781 829 815 754 702 698 711 684 612 541 502 468 406 323 260 234 216 174 118 83 81 76 12 -111 -229 -284 -287 -302 -356 -412 -410 -346 -262 -196 -140 -55 84 259 421 527 582 627 682 716 683 587 486 425 387 318 183 0 -183 -327 -434 -531 -635 -722 -753 -722 -662 -605 -542 -446 -321 -199 -101 -16 62 113 113 79 56 73 126 190 255 309 348 378 406 429 434 417 403 418 453 465 416 312 194 100 34 -34 -104 -149 -139 -86 -36 -11 10 54 97 104 72 35 8 -32 -111 -215 -304 +0 -1 -1 -1 -1 -1 -1 -1 0 0 1 2 4 5 6 7 7 6 5 3 0 -4 -6 -8 -9 -9 -7 -1 9 21 34 48 64 84 103 115 119 119 119 117 107 86 61 38 22 7 -8 -17 -15 -5 6 13 22 38 57 70 75 78 88 107 123 128 122 121 137 168 195 202 196 198 212 218 197 156 119 94 61 -2 -81 -148 -197 -254 -338 -436 -515 -566 -613 -680 -759 -813 -824 -802 -779 -766 -745 -686 -587 -477 -391 -330 -266 -172 -64 23 77 124 189 255 287 280 271 299 342 352 310 251 219 219 212 173 107 49 15 -9 -58 -146 -246 -306 -310 -284 -272 -276 -261 -203 -127 -62 -1 78 165 216 209 179 167 157 93 -49 -236 -411 -551 -674 -802 -932 -1025 -1039 -966 -830 -656 -451 -228 -3 202 399 603 801 944 1011 1030 1041 1046 1022 975 926 889 861 846 842 836 805 752 692 631 557 474 406 346 277 207 166 151 120 58 21 57 144 244 375 547 678 641 440 234 172 249 377 514 670 805 821 646 304 -123 -523 -790 -870 -797 -666 -550 -503 -580 -820 -1151 -1438 -1577 -1574 -1450 -1189 -796 -395 -123 -15 5 1 -16 -27 21 146 305 457 607 748 830 812 730 647 570 482 387 309 243 153 38 -75 -185 -339 -550 -763 -912 -983 -991 -948 -864 -763 -659 -547 -427 -347 -345 -387 -404 -372 -328 -300 -264 -195 -121 -85 -86 -93 -98 -120 -155 -167 -126 -33 84 197 291 359 412 454 476 461 421 386 379 380 360 317 276 252 232 198 155 122 108 98 78 52 35 34 31 4 -45 -91 -110 -109 -113 -130 -147 -144 -119 -88 -65 -45 -18 25 77 122 149 160 168 178 183 169 142 114 97 85 68 38 0 -37 -63 -80 -95 -110 -121 -121 -112 -99 -87 -75 -60 -41 -25 -12 -2 6 11 11 7 4 6 10 14 18 20 22 22 22 22 21 19 17 16 16 15 12 8 4 2 0 -1 -2 -3 -2 -1 -1 -1 0 0 0 0 0 0 0 -1 -1 -1 0 +223 438 659 871 1022 1089 1105 1112 1113 1084 1030 975 932 900 881 875 866 831 774 711 646 569 484 413 352 281 210 168 153 121 59 22 58 145 245 376 548 679 642 440 234 173 250 378 515 672 808 824 649 306 -123 -527 -797 -879 -807 -675 -559 -512 -592 -839 -1181 -1479 -1627 -1628 -1505 -1237 -831 -414 -129 -15 6 2 -17 -28 23 158 332 500 667 826 921 905 818 729 646 550 444 357 282 179 45 -88 -220 -406 -664 -928 -1118 -1213 -1233 -1189 -1092 -972 -847 -708 -558 -457 -458 -519 -547 -509 -453 -418 -371 -277 -174 -123 -125 -137 -147 -182 -237 -259 -198 -52 136 323 481 601 699 781 829 815 754 702 698 711 684 612 541 502 468 406 323 260 234 216 174 118 83 81 76 12 -111 -229 -284 -287 -302 -356 -412 -410 -346 -262 -196 -140 -55 84 259 421 527 582 627 682 716 683 587 486 425 387 318 183 0 -183 -327 -434 -531 -635 -722 -753 -722 -662 -605 -542 -446 -321 -199 -101 -16 62 113 113 79 56 73 126 190 255 309 348 378 406 429 434 417 403 418 453 465 416 312 194 100 34 -34 -104 -149 -139 -86 -36 -11 10 54 97 104 72 35 8 -32 -111 -215 -304 -364 -420 -491 -573 -638 -669 -677 -691 -725 -751 -725 -635 -521 -428 -373 -332 -282 -224 -163 -89 -12 48 82 118 185 279 355 396 419 450 492 524 539 547 552 551 537 508 478 453 430 395 345 303 297 323 339 306 222 118 16 -77 -157 -212 -243 -274 -310 -326 -304 -254 -209 -181 -165 -157 -172 -218 -274 -330 -385 -437 -466 -465 -453 -463 -498 -526 -511 -448 -356 -263 -182 -107 -19 90 184 207 151 93 113 201 268 258 221 243 337 417 406 317 230 215 278 382 480 532 514 429 304 166 34 -88 -181 -210 -158 -51 45 84 69 41 34 48 53 41 30 47 95 139 140 88 11 -65 -129 -190 -247 -280 -270 -226 -174 -139 -126 -120 -113 -105 -110 -127 -152 -164 -157 -129 -86 -34 11 33 25 17 41 89 126 121 94 85 +0 0 0 0 1 2 2 3 4 6 7 7 8 10 11 12 14 15 16 16 16 16 15 13 12 11 8 7 7 6 3 1 3 9 17 28 43 57 56 41 22 17 26 42 60 82 102 109 89 44 -19 -82 -129 -147 -140 -121 -104 -98 -117 -171 -248 -320 -362 -373 -355 -300 -207 -106 -34 -5 1 0 -5 -9 7 49 106 163 222 282 321 322 297 271 245 212 175 143 115 74 19 -39 -98 -183 -304 -432 -529 -583 -603 -590 -551 -498 -441 -374 -299 -249 -253 -291 -310 -293 -264 -247 -222 -168 -107 -77 -79 -88 -95 -119 -156 -173 -134 -36 93 224 337 426 500 564 605 601 561 527 528 543 527 476 424 396 373 326 261 212 192 178 145 99 70 68 65 10 -97 -200 -250 -254 -269 -318 -370 -370 -314 -239 -180 -129 -51 77 241 394 495 549 593 648 683 653 563 468 410 375 308 178 0 -180 -321 -427 -524 -627 -714 -746 -717 -658 -602 -540 -445 -321 -199 -101 -16 61 112 113 79 55 72 125 189 254 308 346 376 403 426 430 413 398 412 446 457 408 305 189 97 33 -33 -101 -144 -134 -83 -35 -11 9 50 91 97 67 32 7 -30 -102 -196 -276 -329 -377 -439 -509 -564 -588 -591 -600 -625 -644 -617 -537 -438 -357 -309 -273 -231 -182 -131 -71 -10 37 63 91 141 211 266 294 309 328 355 375 382 384 383 378 365 341 318 298 279 254 219 190 184 197 205 182 130 68 9 -44 -88 -117 -133 -147 -164 -170 -156 -128 -104 -89 -80 -75 -80 -100 -124 -146 -167 -187 -195 -191 -183 -183 -193 -200 -191 -164 -127 -92 -63 -36 -7 28 57 63 44 27 32 55 72 67 56 60 81 98 92 70 49 44 56 75 91 98 91 73 50 26 5 -14 -27 -30 -21 -7 5 9 7 4 3 4 4 3 2 3 7 9 9 5 0 -4 -7 -10 -12 -12 -11 -9 -6 -5 -4 -4 -3 -3 -3 -3 -3 -3 -2 -2 -1 -1 0 0 0 0 0 0 0 0 0 0 +-346 -262 -196 -140 -55 84 259 421 527 582 627 682 716 683 587 486 425 387 318 183 0 -183 -327 -434 -531 -635 -722 -753 -722 -662 -605 -542 -446 -321 -199 -101 -16 62 113 113 79 56 73 126 190 255 309 348 378 406 429 434 417 403 418 453 465 416 312 194 100 34 -34 -104 -149 -139 -86 -36 -11 10 54 97 104 72 35 8 -32 -111 -215 -304 -364 -420 -491 -573 -638 -669 -677 -691 -725 -751 -725 -635 -521 -428 -373 -332 -282 -224 -163 -89 -12 48 82 118 185 279 355 396 419 450 492 524 539 547 552 551 537 508 478 453 430 395 345 303 297 323 339 306 222 118 16 -77 -157 -212 -243 -274 -310 -326 -304 -254 -209 -181 -165 -157 -172 -218 -274 -330 -385 -437 -466 -465 -453 -463 -498 -526 -511 -448 -356 -263 -182 -107 -19 90 184 207 151 93 113 201 268 258 221 243 337 417 406 317 230 215 278 382 480 532 514 429 304 166 34 -88 -181 -210 -158 -51 45 84 69 41 34 48 53 41 30 47 95 139 140 88 11 -65 -129 -190 -247 -280 -270 -226 -174 -139 -126 -120 -113 -105 -110 -127 -152 -164 -157 -129 -86 -34 11 33 25 17 41 89 126 121 94 85 113 152 167 148 107 61 20 -11 -34 -49 -55 -41 -11 24 51 70 87 92 69 25 -17 -29 -21 -16 -34 -67 -97 -107 -91 -62 -44 -48 -67 -81 -77 -63 -51 -50 -75 -127 -194 -250 -290 -327 -370 -406 -405 -358 -288 -229 -200 -193 -185 -157 -99 -25 44 92 112 115 118 132 155 172 169 162 178 227 296 355 398 435 479 515 523 491 430 355 288 229 171 105 31 -42 -111 -174 -226 -264 -290 -311 -322 -313 -280 -242 -227 -238 -258 -260 -245 -232 -236 -246 -237 -194 -122 -43 24 73 104 123 142 180 237 291 321 326 332 350 366 367 353 338 318 291 258 234 222 212 198 173 135 83 41 26 32 27 -3 -35 -35 -4 35 68 106 151 184 181 140 75 -2 -82 -147 -182 -196 -217 -271 -349 -419 -466 -499 -533 +0 -1 -1 -1 -1 0 0 1 2 3 4 5 6 7 7 7 7 7 6 4 0 -6 -11 -15 -20 -26 -31 -35 -36 -35 -35 -33 -29 -22 -15 -8 -2 5 10 10 7 5 7 14 22 31 39 46 52 58 63 67 66 67 72 80 85 79 61 39 20 7 -8 -24 -36 -34 -22 -10 -3 2 14 27 30 21 10 2 -11 -37 -72 -104 -128 -150 -179 -214 -243 -260 -268 -279 -298 -314 -309 -276 -230 -193 -171 -155 -134 -108 -80 -45 -7 24 42 62 99 151 195 221 237 258 286 309 322 331 338 341 337 322 307 294 283 262 232 206 204 224 238 217 159 85 11 -57 -117 -160 -185 -210 -240 -254 -239 -201 -167 -146 -134 -129 -142 -181 -229 -277 -326 -372 -399 -401 -393 -404 -438 -465 -454 -400 -320 -238 -166 -98 -18 82 170 192 140 87 106 189 253 245 210 232 323 401 392 307 223 209 271 373 470 523 506 423 300 164 33 -88 -180 -210 -158 -51 44 83 68 40 33 48 53 40 29 46 94 138 139 87 10 -65 -129 -189 -245 -277 -267 -223 -172 -137 -124 -118 -111 -103 -107 -123 -147 -158 -151 -124 -82 -33 10 31 23 15 38 82 115 110 85 77 101 136 149 131 94 53 17 -10 -30 -42 -47 -35 -10 20 42 57 70 74 55 19 -14 -23 -17 -13 -26 -51 -73 -80 -68 -46 -32 -35 -48 -57 -54 -44 -35 -34 -50 -84 -127 -161 -185 -206 -230 -249 -246 -214 -170 -134 -115 -110 -104 -87 -54 -14 23 47 57 57 58 64 74 81 78 73 79 100 128 151 166 178 192 203 202 186 159 129 102 79 58 35 10 -14 -35 -54 -68 -77 -83 -87 -87 -83 -72 -61 -55 -57 -60 -58 -53 -49 -48 -49 -46 -36 -22 -8 3 11 16 18 20 24 31 37 39 38 37 37 37 35 32 29 26 23 19 16 15 13 11 9 7 4 1 1 1 0 -1 -2 -1 -1 0 1 1 2 2 2 1 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +-182 -107 -19 90 184 207 151 93 113 201 268 258 221 243 337 417 406 317 230 215 278 382 480 532 514 429 304 166 34 -88 -181 -210 -158 -51 45 84 69 41 34 48 53 41 30 47 95 139 140 88 11 -65 -129 -190 -247 -280 -270 -226 -174 -139 -126 -120 -113 -105 -110 -127 -152 -164 -157 -129 -86 -34 11 33 25 17 41 89 126 121 94 85 113 152 167 148 107 61 20 -11 -34 -49 -55 -41 -11 24 51 70 87 92 69 25 -17 -29 -21 -16 -34 -67 -97 -107 -91 -62 -44 -48 -67 -81 -77 -63 -51 -50 -75 -127 -194 -250 -290 -327 -370 -406 -405 -358 -288 -229 -200 -193 -185 -157 -99 -25 44 92 112 115 118 132 155 172 169 162 178 227 296 355 398 435 479 515 523 491 430 355 288 229 171 105 31 -42 -111 -174 -226 -264 -290 -311 -322 -313 -280 -242 -227 -238 -258 -260 -245 -232 -236 -246 -237 -194 -122 -43 24 73 104 123 142 180 237 291 321 326 332 350 366 367 353 338 318 291 258 234 222 212 198 173 135 83 41 26 32 27 -3 -35 -35 -4 35 68 106 151 184 181 140 75 -2 -82 -147 -182 -196 -217 -271 -349 -419 -466 -499 -533 -558 -551 -504 -444 -394 -360 -325 -292 -273 -274 -270 -241 -198 -168 -157 -143 -116 -89 -79 -90 -103 -102 -89 -69 -45 -15 29 83 140 192 225 240 236 220 195 162 123 84 64 67 75 56 8 -40 -66 -81 -111 -152 -171 -145 -94 -51 -24 7 58 127 192 236 256 271 303 352 382 365 321 295 309 333 323 280 241 236 250 251 219 176 149 158 194 237 275 310 337 348 332 293 241 184 123 63 13 -26 -57 -104 -173 -254 -321 -360 -379 -391 -396 -379 -329 -264 -215 -198 -201 -206 -204 -203 -207 -211 -190 -136 -62 5 42 54 60 73 91 107 127 145 142 114 80 70 75 52 -19 -103 -153 -165 -180 -224 -283 -321 -324 -300 -265 -231 -199 -167 -133 -103 -83 -76 -78 -76 -67 -57 -57 -67 -72 -64 -55 -80 -165 -286 +0 -1 -1 0 0 0 0 0 0 1 1 2 2 2 4 6 6 5 4 4 7 10 14 17 18 16 12 7 1 -5 -11 -13 -11 -4 3 6 5 3 3 4 5 4 3 5 11 17 17 11 1 -10 -20 -30 -40 -47 -47 -41 -33 -27 -25 -25 -24 -23 -25 -30 -36 -40 -40 -33 -23 -10 3 9 7 5 12 27 40 39 31 29 39 54 60 55 40 23 7 -5 -14 -21 -24 -18 -5 10 23 32 41 44 33 12 -9 -15 -11 -9 -19 -37 -54 -60 -52 -36 -26 -29 -41 -50 -48 -40 -33 -32 -49 -83 -128 -167 -196 -223 -255 -283 -285 -254 -207 -166 -147 -143 -138 -118 -76 -20 33 71 87 90 94 106 125 140 138 134 148 190 250 302 340 374 415 449 459 433 381 316 258 206 155 95 28 -39 -103 -162 -211 -248 -273 -294 -306 -298 -268 -232 -219 -230 -250 -252 -239 -227 -231 -241 -233 -191 -121 -43 23 72 103 122 141 179 236 290 320 325 331 349 365 367 353 337 317 290 257 233 221 211 197 172 134 82 40 25 31 26 -3 -35 -35 -4 34 66 102 145 177 173 134 71 -2 -78 -139 -172 -184 -203 -252 -323 -386 -427 -455 -484 -504 -495 -450 -395 -348 -316 -284 -254 -236 -235 -230 -204 -167 -141 -130 -118 -95 -73 -64 -72 -82 -80 -70 -54 -35 -12 21 61 103 140 162 171 167 154 135 111 83 56 42 44 48 36 5 -26 -41 -50 -68 -91 -101 -85 -54 -29 -14 3 31 67 101 122 131 136 150 171 183 172 149 134 138 146 139 119 100 96 100 99 84 66 55 57 69 82 93 103 110 111 103 89 71 53 34 17 3 -7 -15 -26 -42 -60 -74 -80 -82 -82 -81 -75 -63 -49 -39 -35 -34 -34 -32 -31 -30 -30 -26 -18 -8 0 4 5 6 7 8 9 10 11 10 8 5 4 4 2 -2 -6 -8 -8 -8 -9 -10 -10 -10 -8 -7 -5 -4 -3 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +171 105 31 -42 -111 -174 -226 -264 -290 -311 -322 -313 -280 -242 -227 -238 -258 -260 -245 -232 -236 -246 -237 -194 -122 -43 24 73 104 123 142 180 237 291 321 326 332 350 366 367 353 338 318 291 258 234 222 212 198 173 135 83 41 26 32 27 -3 -35 -35 -4 35 68 106 151 184 181 140 75 -2 -82 -147 -182 -196 -217 -271 -349 -419 -466 -499 -533 -558 -551 -504 -444 -394 -360 -325 -292 -273 -274 -270 -241 -198 -168 -157 -143 -116 -89 -79 -90 -103 -102 -89 -69 -45 -15 29 83 140 192 225 240 236 220 195 162 123 84 64 67 75 56 8 -40 -66 -81 -111 -152 -171 -145 -94 -51 -24 7 58 127 192 236 256 271 303 352 382 365 321 295 309 333 323 280 241 236 250 251 219 176 149 158 194 237 275 310 337 348 332 293 241 184 123 63 13 -26 -57 -104 -173 -254 -321 -360 -379 -391 -396 -379 -329 -264 -215 -198 -201 -206 -204 -203 -207 -211 -190 -136 -62 5 42 54 60 73 91 107 127 145 142 114 80 70 75 52 -19 -103 -153 -165 -180 -224 -283 -321 -324 -300 -265 -231 -199 -167 -133 -103 -83 -76 -78 -76 -67 -57 -57 -67 -72 -64 -55 -80 -165 -286 -386 -428 -430 -431 -437 -418 -364 -298 -252 -213 -160 -90 -32 3 36 92 177 268 348 417 474 519 542 542 530 523 527 540 550 561 579 590 572 521 453 377 280 159 44 -29 -70 -119 -188 -245 -263 -263 -271 -279 -252 -199 -151 -116 -65 13 74 88 83 118 188 231 205 147 117 125 136 111 58 10 1 34 94 144 160 155 158 178 199 204 194 185 187 197 210 226 239 237 220 188 147 95 38 -16 -63 -115 -173 -223 -248 -251 -257 -286 -333 -383 -425 -458 -483 -505 -516 -510 -475 -418 -356 -303 -257 -205 -140 -69 -12 23 43 60 78 88 76 47 14 -11 -39 -86 -152 -213 -254 -281 -314 -351 -377 -382 -385 -396 -399 -369 -319 -288 -299 -313 -279 -188 -89 -27 6 52 137 241 332 394 434 460 473 474 466 455 +0 0 0 -1 -1 -1 -1 -1 -2 -2 -3 -3 -3 -3 -3 -4 -5 -5 -6 -6 -7 -7 -8 -7 -5 -2 1 3 5 6 8 10 15 19 22 24 26 29 32 34 34 34 34 32 30 28 28 28 27 24 20 12 6 4 5 4 -1 -7 -7 -1 7 14 23 34 43 43 34 19 -1 -23 -41 -52 -57 -65 -83 -109 -134 -153 -167 -183 -195 -197 -184 -166 -150 -140 -129 -118 -112 -115 -115 -105 -88 -76 -72 -67 -55 -43 -39 -45 -52 -53 -47 -37 -25 -9 15 46 79 110 130 141 141 133 119 100 77 53 41 43 49 37 5 -28 -46 -57 -78 -108 -123 -105 -69 -38 -18 5 43 97 148 183 200 214 241 282 309 297 263 244 257 279 273 238 206 203 216 219 192 155 132 141 174 213 249 282 308 320 306 272 224 172 115 59 12 -25 -55 -100 -167 -245 -311 -349 -369 -381 -387 -372 -323 -260 -212 -196 -199 -205 -203 -202 -206 -211 -190 -136 -62 4 41 53 59 73 91 106 126 144 141 113 79 69 74 51 -19 -103 -152 -164 -178 -221 -279 -315 -318 -294 -259 -225 -193 -162 -129 -99 -80 -73 -75 -72 -64 -54 -54 -63 -67 -60 -51 -74 -151 -260 -349 -385 -384 -383 -386 -367 -318 -259 -218 -183 -137 -77 -27 2 29 75 144 216 279 332 374 406 421 418 405 396 395 401 405 409 418 422 405 365 314 259 190 107 29 -20 -46 -77 -120 -154 -164 -162 -165 -167 -149 -116 -87 -66 -37 7 40 47 43 61 96 116 101 71 56 59 63 50 26 4 0 14 39 59 64 61 61 67 73 74 69 64 63 65 68 72 74 72 65 54 41 26 10 -5 -17 -29 -42 -53 -57 -56 -56 -60 -68 -76 -81 -85 -87 -88 -86 -82 -74 -63 -52 -42 -35 -27 -18 -9 -2 2 4 5 7 7 6 3 1 -1 -3 -6 -10 -13 -14 -14 -15 -15 -15 -14 -13 -13 -12 -10 -8 -7 -6 -6 -5 -3 -1 -1 0 0 0 1 1 1 0 0 0 0 0 0 +275 310 337 348 332 293 241 184 123 63 13 -26 -57 -104 -173 -254 -321 -360 -379 -391 -396 -379 -329 -264 -215 -198 -201 -206 -204 -203 -207 -211 -190 -136 -62 5 42 54 60 73 91 107 127 145 142 114 80 70 75 52 -19 -103 -153 -165 -180 -224 -283 -321 -324 -300 -265 -231 -199 -167 -133 -103 -83 -76 -78 -76 -67 -57 -57 -67 -72 -64 -55 -80 -165 -286 -386 -428 -430 -431 -437 -418 -364 -298 -252 -213 -160 -90 -32 3 36 92 177 268 348 417 474 519 542 542 530 523 527 540 550 561 579 590 572 521 453 377 280 159 44 -29 -70 -119 -188 -245 -263 -263 -271 -279 -252 -199 -151 -116 -65 13 74 88 83 118 188 231 205 147 117 125 136 111 58 10 1 34 94 144 160 155 158 178 199 204 194 185 187 197 210 226 239 237 220 188 147 95 38 -16 -63 -115 -173 -223 -248 -251 -257 -286 -333 -383 -425 -458 -483 -505 -516 -510 -475 -418 -356 -303 -257 -205 -140 -69 -12 23 43 60 78 88 76 47 14 -11 -39 -86 -152 -213 -254 -281 -314 -351 -377 -382 -385 -396 -399 -369 -319 -288 -299 -313 -279 -188 -89 -27 6 52 137 241 332 394 434 460 473 474 466 455 447 448 449 445 429 410 391 363 316 259 212 195 185 152 85 15 -16 -9 -2 -26 -70 -102 -103 -88 -84 -93 -101 -102 -95 -82 -62 -37 -20 -22 -36 -51 -60 -66 -66 -58 -50 -45 -37 -13 27 59 66 55 55 80 115 129 124 128 171 240 292 297 273 253 255 259 240 203 181 188 198 177 118 42 -23 -70 -103 -111 -99 -85 -104 -165 -234 -276 -283 -279 -279 -281 -286 -301 -321 -320 -294 -267 -277 -307 -309 -263 -204 -165 -142 -112 -84 -94 -142 -188 -207 -205 -209 -218 -224 -236 -267 -304 -318 -287 -235 -192 -160 -119 -53 27 96 138 169 212 285 377 449 477 480 494 538 580 577 521 450 412 411 409 371 301 224 158 106 66 35 0 -52 -117 -169 -198 -223 -264 -311 -328 -317 -309 -327 -343 -327 -287 -253 -233 +0 0 0 0 0 0 0 0 0 0 0 -1 -1 -2 -3 -4 -6 -7 -8 -10 -11 -11 -11 -9 -8 -8 -9 -10 -11 -11 -12 -13 -13 -10 -5 0 3 4 5 6 8 10 13 16 16 13 10 9 10 7 -3 -16 -25 -28 -32 -40 -53 -62 -64 -61 -56 -50 -45 -39 -32 -25 -21 -20 -21 -21 -19 -17 -17 -20 -22 -20 -18 -27 -56 -98 -135 -153 -157 -161 -166 -162 -144 -120 -104 -90 -69 -40 -15 1 16 42 83 128 169 206 238 265 281 285 283 284 290 301 311 322 336 348 341 315 277 233 175 101 28 -19 -47 -80 -127 -167 -181 -183 -191 -198 -181 -144 -111 -86 -49 9 56 67 64 91 147 182 163 118 94 101 111 91 48 8 0 28 80 124 138 135 138 157 176 182 174 166 169 179 192 207 220 220 205 176 138 89 35 -16 -61 -111 -167 -215 -240 -244 -250 -279 -326 -375 -418 -451 -476 -499 -511 -506 -472 -416 -355 -302 -257 -205 -140 -69 -12 22 42 60 78 87 75 46 13 -11 -39 -86 -152 -212 -253 -279 -312 -348 -373 -377 -379 -389 -391 -361 -311 -280 -290 -303 -269 -181 -86 -26 5 49 129 226 310 367 402 425 435 434 424 412 403 402 400 395 378 359 341 314 272 221 180 164 155 126 70 12 -14 -8 -2 -21 -56 -80 -81 -68 -65 -71 -76 -76 -71 -60 -45 -27 -15 -16 -26 -36 -41 -45 -44 -39 -33 -29 -24 -9 16 36 39 32 32 46 66 73 69 70 92 128 154 154 139 127 126 126 115 95 84 85 88 78 51 17 -10 -29 -42 -44 -39 -33 -39 -61 -84 -97 -97 -94 -92 -90 -90 -92 -96 -94 -84 -74 -75 -81 -79 -66 -50 -39 -33 -25 -19 -20 -29 -37 -40 -38 -38 -38 -38 -38 -42 -46 -46 -40 -32 -25 -20 -14 -6 2 9 13 15 18 24 30 33 34 32 31 32 32 30 25 20 17 16 14 12 9 6 4 2 1 0 0 -1 -2 -2 -2 -2 -2 -2 -2 -2 -1 -1 -1 -1 -1 -1 0 +187 197 210 226 239 237 220 188 147 95 38 -16 -63 -115 -173 -223 -248 -251 -257 -286 -333 -383 -425 -458 -483 -505 -516 -510 -475 -418 -356 -303 -257 -205 -140 -69 -12 23 43 60 78 88 76 47 14 -11 -39 -86 -152 -213 -254 -281 -314 -351 -377 -382 -385 -396 -399 -369 -319 -288 -299 -313 -279 -188 -89 -27 6 52 137 241 332 394 434 460 473 474 466 455 447 448 449 445 429 410 391 363 316 259 212 195 185 152 85 15 -16 -9 -2 -26 -70 -102 -103 -88 -84 -93 -101 -102 -95 -82 -62 -37 -20 -22 -36 -51 -60 -66 -66 -58 -50 -45 -37 -13 27 59 66 55 55 80 115 129 124 128 171 240 292 297 273 253 255 259 240 203 181 188 198 177 118 42 -23 -70 -103 -111 -99 -85 -104 -165 -234 -276 -283 -279 -279 -281 -286 -301 -321 -320 -294 -267 -277 -307 -309 -263 -204 -165 -142 -112 -84 -94 -142 -188 -207 -205 -209 -218 -224 -236 -267 -304 -318 -287 -235 -192 -160 -119 -53 27 96 138 169 212 285 377 449 477 480 494 538 580 577 521 450 412 411 409 371 301 224 158 106 66 35 0 -52 -117 -169 -198 -223 -264 -311 -328 -317 -309 -327 -343 -327 -287 -253 -233 -201 -142 -70 -14 24 54 83 98 99 99 107 114 98 65 33 24 40 64 84 96 108 134 163 185 190 192 204 228 255 281 309 338 349 326 283 253 249 251 238 209 179 151 115 69 23 -19 -64 -124 -191 -251 -297 -329 -354 -377 -400 -420 -429 -423 -404 -383 -370 -368 -381 -401 -404 -378 -331 -298 -290 -278 -235 -176 -139 -140 -153 -148 -140 -158 -193 -213 -203 -175 -154 -147 -135 -108 -71 -26 28 89 145 182 204 234 287 355 413 441 446 457 481 486 444 365 294 255 231 196 146 106 81 59 21 -40 -100 -137 -145 -134 -130 -150 -169 -159 -127 -103 -109 -123 -113 -86 -72 -77 -74 -45 -9 13 40 96 169 216 219 211 225 256 284 310 341 363 352 307 260 231 211 176 131 100 91 88 75 48 16 -7 +0 0 0 0 0 0 0 0 0 0 0 -1 -1 -2 -3 -4 -5 -5 -6 -7 -9 -11 -14 -16 -18 -20 -23 -24 -24 -23 -21 -19 -17 -14 -11 -6 -1 1 3 5 7 9 8 5 1 -2 -5 -12 -22 -31 -38 -44 -51 -59 -65 -69 -71 -76 -79 -75 -67 -63 -67 -72 -66 -46 -23 -7 1 14 37 68 96 117 132 143 151 154 155 155 156 159 163 165 162 158 154 146 129 108 90 84 81 68 38 6 -8 -5 -1 -13 -36 -53 -54 -47 -45 -51 -56 -58 -54 -48 -37 -22 -12 -14 -23 -32 -38 -42 -43 -38 -33 -30 -25 -9 18 41 46 39 39 57 84 95 92 96 129 183 225 231 214 200 203 208 194 165 148 155 165 148 99 35 -20 -61 -90 -97 -87 -76 -93 -148 -211 -249 -257 -255 -256 -259 -265 -280 -300 -300 -277 -252 -263 -292 -295 -252 -196 -159 -138 -109 -82 -92 -139 -185 -204 -202 -206 -216 -222 -234 -265 -302 -317 -286 -235 -192 -160 -119 -53 26 95 138 169 211 284 376 448 476 478 492 535 576 573 516 445 407 405 403 364 295 219 154 103 64 33 0 -51 -113 -162 -189 -213 -251 -294 -309 -297 -289 -304 -318 -301 -263 -231 -212 -182 -128 -63 -13 21 47 72 84 85 84 91 96 82 54 27 19 32 51 67 76 85 105 126 142 145 145 153 169 188 205 223 242 247 228 196 173 169 168 158 137 116 97 73 43 14 -12 -39 -75 -113 -147 -171 -187 -198 -208 -218 -225 -227 -220 -207 -193 -184 -180 -184 -190 -188 -173 -149 -132 -126 -119 -99 -73 -56 -56 -60 -57 -53 -58 -69 -75 -70 -59 -51 -47 -43 -33 -22 -8 7 24 39 47 52 58 69 83 94 97 96 95 97 95 84 67 52 43 38 31 22 15 11 8 2 -6 -13 -17 -17 -15 -14 -15 -16 -15 -11 -9 -9 -9 -8 -6 -5 -5 -4 -3 -1 0 1 3 5 6 6 5 5 5 5 5 4 4 3 2 2 1 1 0 0 0 0 0 0 0 0 0 +-283 -279 -279 -281 -286 -301 -321 -320 -294 -267 -277 -307 -309 -263 -204 -165 -142 -112 -84 -94 -142 -188 -207 -205 -209 -218 -224 -236 -267 -304 -318 -287 -235 -192 -160 -119 -53 27 96 138 169 212 285 377 449 477 480 494 538 580 577 521 450 412 411 409 371 301 224 158 106 66 35 0 -52 -117 -169 -198 -223 -264 -311 -328 -317 -309 -327 -343 -327 -287 -253 -233 -201 -142 -70 -14 24 54 83 98 99 99 107 114 98 65 33 24 40 64 84 96 108 134 163 185 190 192 204 228 255 281 309 338 349 326 283 253 249 251 238 209 179 151 115 69 23 -19 -64 -124 -191 -251 -297 -329 -354 -377 -400 -420 -429 -423 -404 -383 -370 -368 -381 -401 -404 -378 -331 -298 -290 -278 -235 -176 -139 -140 -153 -148 -140 -158 -193 -213 -203 -175 -154 -147 -135 -108 -71 -26 28 89 145 182 204 234 287 355 413 441 446 457 481 486 444 365 294 255 231 196 146 106 81 59 21 -40 -100 -137 -145 -134 -130 -150 -169 -159 -127 -103 -109 -123 -113 -86 -72 -77 -74 -45 -9 13 40 96 169 216 219 211 225 256 284 310 341 363 352 307 260 231 211 176 131 100 91 88 75 48 16 -7 -9 8 27 23 -8 -43 -60 -57 -50 -58 -83 -114 -147 -173 -190 -198 -205 -224 -258 -302 -344 -363 -356 -340 -338 -353 -367 -363 -340 -300 -244 -177 -118 -81 -58 -17 51 116 145 145 144 163 186 200 204 198 179 146 110 91 91 106 121 117 84 30 -17 -38 -45 -67 -108 -137 -129 -103 -97 -125 -157 -163 -142 -112 -86 -69 -60 -60 -62 -57 -40 -15 8 19 21 16 15 22 33 37 32 22 13 -4 -26 -40 -27 2 18 2 -23 -22 15 62 89 96 105 123 142 159 174 190 204 211 219 235 251 252 231 207 200 211 223 221 202 170 143 123 113 98 63 11 -35 -58 -59 -52 -43 -27 -18 -33 -66 -91 -82 -59 -58 -88 -114 -105 -72 -54 -75 -111 -134 -140 -149 -174 -199 -207 -194 -175 -166 -170 -180 -185 +0 -1 -1 -1 -1 -1 -1 -2 -2 -2 -2 -3 -3 -3 -3 -3 -3 -3 -2 -3 -4 -6 -7 -7 -8 -9 -10 -11 -14 -17 -18 -18 -15 -13 -12 -10 -5 2 8 12 16 21 30 42 52 58 61 65 74 83 86 80 72 68 70 72 68 57 44 32 22 14 7 0 -13 -29 -43 -51 -59 -72 -87 -93 -93 -93 -100 -108 -105 -94 -85 -80 -71 -51 -26 -6 9 20 32 39 40 41 45 49 43 29 15 11 18 30 41 47 54 68 84 97 101 104 112 127 144 161 179 199 208 197 173 157 156 159 153 136 117 100 77 46 15 -14 -45 -88 -137 -182 -217 -243 -264 -284 -304 -322 -331 -330 -317 -303 -295 -296 -309 -328 -332 -313 -276 -251 -246 -237 -202 -152 -121 -123 -135 -131 -125 -142 -174 -193 -185 -160 -142 -136 -125 -101 -67 -25 26 83 137 173 194 224 275 341 399 427 433 445 469 475 435 358 289 251 228 194 144 105 80 58 20 -40 -100 -137 -145 -134 -130 -150 -169 -159 -127 -103 -109 -123 -113 -86 -72 -77 -74 -45 -9 12 39 94 166 211 214 206 219 248 275 299 328 348 336 292 247 218 199 165 122 93 84 81 69 43 14 -7 -9 7 24 20 -8 -38 -53 -50 -44 -50 -71 -97 -124 -145 -158 -163 -168 -182 -208 -241 -273 -285 -277 -263 -259 -268 -276 -271 -251 -220 -177 -127 -84 -57 -41 -12 34 78 96 95 93 104 118 125 126 121 108 87 64 52 52 60 67 64 45 16 -9 -20 -24 -34 -54 -67 -62 -49 -46 -58 -71 -72 -62 -48 -36 -29 -25 -24 -25 -22 -15 -6 2 6 7 5 4 7 10 11 9 6 3 -2 -8 -11 -7 0 4 0 -6 -5 3 12 18 18 19 22 25 27 28 30 31 31 31 32 33 32 28 24 22 22 22 21 18 15 12 9 8 7 4 0 -3 -4 -4 -3 -2 -2 -1 -2 -3 -3 -3 -2 -2 -2 -3 -2 -2 -1 -1 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +-203 -175 -154 -147 -135 -108 -71 -26 28 89 145 182 204 234 287 355 413 441 446 457 481 486 444 365 294 255 231 196 146 106 81 59 21 -40 -100 -137 -145 -134 -130 -150 -169 -159 -127 -103 -109 -123 -113 -86 -72 -77 -74 -45 -9 13 40 96 169 216 219 211 225 256 284 310 341 363 352 307 260 231 211 176 131 100 91 88 75 48 16 -7 -9 8 27 23 -8 -43 -60 -57 -50 -58 -83 -114 -147 -173 -190 -198 -205 -224 -258 -302 -344 -363 -356 -340 -338 -353 -367 -363 -340 -300 -244 -177 -118 -81 -58 -17 51 116 145 145 144 163 186 200 204 198 179 146 110 91 91 106 121 117 84 30 -17 -38 -45 -67 -108 -137 -129 -103 -97 -125 -157 -163 -142 -112 -86 -69 -60 -60 -62 -57 -40 -15 8 19 21 16 15 22 33 37 32 22 13 -4 -26 -40 -27 2 18 2 -23 -22 15 62 89 96 105 123 142 159 174 190 204 211 219 235 251 252 231 207 200 211 223 221 202 170 143 123 113 98 63 11 -35 -58 -59 -52 -43 -27 -18 -33 -66 -91 -82 -59 -58 -88 -114 -105 -72 -54 -75 -111 -134 -140 -149 -174 -199 -207 -194 -175 -166 -170 -180 -185 -183 -174 -166 -158 -149 -135 -112 -85 -57 -35 -16 1 19 41 62 81 101 120 131 131 123 121 126 129 124 109 88 59 22 -19 -58 -96 -129 -147 -137 -116 -105 -111 -120 -117 -106 -97 -94 -92 -95 -102 -98 -73 -40 -27 -43 -61 -57 -40 -32 -31 -17 14 40 45 40 53 85 117 140 154 167 172 172 175 183 183 165 140 132 148 166 169 161 160 174 187 180 147 107 80 74 77 74 68 64 66 57 26 -22 -64 -92 -109 -115 -113 -110 -114 -123 -126 -120 -116 -121 -125 -111 -87 -74 -70 -48 -1 44 54 37 19 21 31 45 64 87 103 99 82 69 66 73 85 90 80 56 42 51 65 53 21 1 16 45 63 71 78 74 50 18 6 14 7 -30 -69 -86 -104 -154 -221 -257 -243 -223 -236 +0 -1 -1 -1 -1 -1 -1 -1 0 0 0 1 1 2 3 5 6 8 9 10 12 13 13 12 10 10 9 8 7 5 4 3 1 -3 -8 -11 -12 -12 -12 -14 -17 -17 -14 -12 -13 -16 -15 -12 -10 -12 -12 -7 -2 2 6 17 31 41 43 42 47 55 63 70 80 87 87 78 68 62 58 49 38 29 27 27 23 15 5 -3 -4 2 9 8 -4 -17 -24 -23 -21 -25 -36 -50 -65 -78 -87 -92 -97 -108 -126 -150 -174 -186 -185 -180 -181 -192 -203 -203 -193 -173 -143 -105 -71 -50 -36 -11 32 73 93 94 94 108 125 136 140 137 125 103 78 65 66 78 90 87 63 22 -14 -30 -36 -53 -87 -111 -105 -85 -80 -104 -131 -137 -121 -96 -74 -60 -53 -53 -55 -51 -36 -14 7 17 19 14 13 20 30 34 29 20 12 -4 -25 -39 -26 1 17 1 -23 -22 14 60 86 93 103 120 139 156 172 188 202 209 217 233 250 251 230 206 199 210 222 221 202 169 142 122 112 97 62 10 -35 -58 -59 -52 -43 -27 -18 -33 -65 -90 -81 -58 -57 -86 -111 -102 -70 -52 -72 -106 -128 -133 -141 -164 -187 -194 -181 -162 -153 -156 -165 -168 -166 -157 -149 -141 -132 -119 -98 -74 -50 -30 -14 0 15 34 51 66 82 97 105 104 97 94 98 99 94 82 66 43 16 -14 -42 -69 -92 -104 -96 -80 -72 -75 -80 -78 -69 -63 -60 -58 -59 -63 -60 -44 -24 -16 -25 -35 -32 -23 -18 -17 -9 7 20 22 19 25 40 55 65 70 74 75 74 74 76 75 66 55 51 56 61 61 57 55 59 62 58 46 33 24 22 22 20 18 17 17 14 6 -6 -16 -22 -25 -25 -24 -23 -23 -24 -24 -22 -20 -21 -21 -18 -13 -11 -10 -7 -1 5 6 4 2 2 3 4 5 7 8 7 5 4 4 4 4 4 3 2 1 2 2 1 0 0 0 1 1 1 1 1 0 0 0 0 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +21 16 15 22 33 37 32 22 13 -4 -26 -40 -27 2 18 2 -23 -22 15 62 89 96 105 123 142 159 174 190 204 211 219 235 251 252 231 207 200 211 223 221 202 170 143 123 113 98 63 11 -35 -58 -59 -52 -43 -27 -18 -33 -66 -91 -82 -59 -58 -88 -114 -105 -72 -54 -75 -111 -134 -140 -149 -174 -199 -207 -194 -175 -166 -170 -180 -185 -183 -174 -166 -158 -149 -135 -112 -85 -57 -35 -16 1 19 41 62 81 101 120 131 131 123 121 126 129 124 109 88 59 22 -19 -58 -96 -129 -147 -137 -116 -105 -111 -120 -117 -106 -97 -94 -92 -95 -102 -98 -73 -40 -27 -43 -61 -57 -40 -32 -31 -17 14 40 45 40 53 85 117 140 154 167 172 172 175 183 183 165 140 132 148 166 169 161 160 174 187 180 147 107 80 74 77 74 68 64 66 57 26 -22 -64 -92 -109 -115 -113 -110 -114 -123 -126 -120 -116 -121 -125 -111 -87 -74 -70 -48 -1 44 54 37 19 21 31 45 64 87 103 99 82 69 66 73 85 90 80 56 42 51 65 53 21 1 16 45 63 71 78 74 50 18 6 14 7 -30 -69 -86 -104 -154 -221 -257 -243 -223 -236 -270 -285 -276 -277 -301 -323 -308 -265 -233 -234 -255 -267 -251 -215 -172 -130 -98 -79 -71 -60 -34 0 31 60 98 146 188 208 211 224 250 268 263 246 242 258 270 265 241 214 194 180 172 169 166 165 166 165 151 127 107 106 118 115 90 60 48 56 58 34 -10 -51 -74 -79 -72 -53 -29 -12 -17 -37 -44 -27 2 20 26 33 44 44 29 13 13 19 13 -7 -31 -46 -56 -63 -65 -70 -81 -95 -99 -93 -100 -126 -153 -157 -139 -125 -122 -120 -109 -94 -83 -73 -64 -65 -76 -86 -83 -72 -61 -52 -28 16 64 99 115 117 108 93 85 88 94 83 47 3 -24 -31 -31 -46 -77 -111 -132 -136 -125 -107 -93 -92 -104 -118 -117 -101 -89 -91 -104 -110 -105 -94 -82 -73 -61 -50 -39 -32 -33 -41 -39 -13 +0 0 0 0 0 0 0 0 0 -1 -1 -1 -1 0 0 0 -1 -1 0 1 2 2 3 4 5 6 7 8 10 11 12 14 15 17 16 15 15 17 19 20 19 17 15 13 13 11 8 1 -5 -9 -9 -9 -7 -5 -4 -6 -13 -18 -17 -12 -13 -19 -26 -25 -17 -14 -19 -29 -36 -38 -42 -50 -58 -62 -60 -55 -54 -56 -61 -64 -64 -63 -61 -59 -57 -53 -45 -35 -24 -15 -7 0 8 18 28 37 47 57 63 64 61 61 65 68 66 59 48 32 12 -11 -34 -57 -78 -89 -84 -72 -66 -71 -78 -77 -70 -65 -64 -63 -66 -71 -69 -52 -29 -20 -32 -45 -43 -31 -25 -24 -14 10 31 35 31 42 68 95 115 127 139 144 145 148 156 157 143 122 115 130 147 150 144 144 157 170 164 135 98 74 68 72 69 64 60 62 54 24 -22 -62 -89 -106 -112 -111 -108 -112 -121 -124 -119 -115 -120 -124 -111 -87 -74 -70 -48 -1 43 53 36 18 20 31 45 63 86 102 98 81 68 65 72 84 89 79 55 41 50 64 52 20 0 15 43 61 68 75 71 48 17 5 13 6 -29 -65 -81 -97 -143 -205 -237 -223 -204 -214 -244 -256 -247 -246 -266 -284 -269 -230 -201 -201 -217 -226 -211 -180 -143 -107 -80 -64 -58 -48 -27 0 24 46 74 110 141 154 155 163 180 192 186 172 168 177 183 178 160 140 126 115 109 106 103 101 100 98 89 73 61 60 65 63 48 32 25 29 29 17 -5 -25 -36 -38 -34 -25 -14 -6 -8 -16 -19 -12 0 7 10 12 16 16 10 4 4 6 4 -3 -10 -15 -17 -19 -19 -20 -22 -25 -26 -24 -25 -30 -36 -35 -30 -27 -25 -24 -21 -18 -15 -13 -11 -11 -12 -13 -12 -10 -9 -7 -4 1 7 10 11 11 10 8 7 7 7 5 3 0 -2 -2 -2 -3 -4 -5 -6 -5 -5 -4 -3 -3 -3 -3 -3 -2 -2 -2 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +174 187 180 147 107 80 74 77 74 68 64 66 57 26 -22 -64 -92 -109 -115 -113 -110 -114 -123 -126 -120 -116 -121 -125 -111 -87 -74 -70 -48 -1 44 54 37 19 21 31 45 64 87 103 99 82 69 66 73 85 90 80 56 42 51 65 53 21 1 16 45 63 71 78 74 50 18 6 14 7 -30 -69 -86 -104 -154 -221 -257 -243 -223 -236 -270 -285 -276 -277 -301 -323 -308 -265 -233 -234 -255 -267 -251 -215 -172 -130 -98 -79 -71 -60 -34 0 31 60 98 146 188 208 211 224 250 268 263 246 242 258 270 265 241 214 194 180 172 169 166 165 166 165 151 127 107 106 118 115 90 60 48 56 58 34 -10 -51 -74 -79 -72 -53 -29 -12 -17 -37 -44 -27 2 20 26 33 44 44 29 13 13 19 13 -7 -31 -46 -56 -63 -65 -70 -81 -95 -99 -93 -100 -126 -153 -157 -139 -125 -122 -120 -109 -94 -83 -73 -64 -65 -76 -86 -83 -72 -61 -52 -28 16 64 99 115 117 108 93 85 88 94 83 47 3 -24 -31 -31 -46 -77 -111 -132 -136 -125 -107 -93 -92 -104 -118 -117 -101 -89 -91 -104 -110 -105 -94 -82 -73 -61 -50 -39 -32 -33 -41 -39 -13 18 27 6 -14 -2 30 47 41 39 58 82 100 124 173 226 243 224 211 225 244 236 211 210 237 256 240 206 185 179 165 131 94 68 51 30 3 -16 -21 -28 -55 -92 -110 -100 -87 -95 -124 -143 -140 -128 -120 -116 -102 -84 -73 -68 -67 -71 -82 -85 -60 -21 -12 -38 -64 -52 -20 -12 -39 -70 -68 -34 6 33 43 48 71 120 171 182 157 142 169 203 189 132 89 84 82 47 7 -1 13 -7 -76 -150 -187 -192 -209 -242 -269 -268 -249 -236 -237 -248 -255 -259 -264 -277 -290 -288 -267 -240 -226 -228 -223 -189 -135 -88 -58 -30 23 89 137 141 124 126 162 213 252 266 271 273 277 285 286 271 241 217 216 227 221 199 191 218 254 246 179 90 31 13 14 8 -10 -32 -51 -67 -86 -110 -142 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 -1 -2 -3 -3 -3 -3 -4 -4 -5 -5 -5 -6 -6 -6 -5 -5 -5 -4 -1 3 4 2 1 1 2 4 6 9 11 11 10 8 8 10 12 13 12 8 6 8 11 9 3 0 3 9 13 15 17 17 12 4 1 3 1 -9 -20 -26 -31 -47 -70 -83 -80 -75 -81 -95 -102 -101 -103 -115 -126 -122 -107 -96 -98 -109 -116 -111 -97 -79 -61 -47 -38 -35 -30 -18 0 16 31 52 79 103 116 119 128 145 158 157 148 148 160 169 168 155 139 127 119 115 114 114 114 116 117 108 91 78 78 87 86 68 45 37 43 45 26 -8 -41 -60 -65 -60 -44 -25 -11 -15 -32 -38 -24 1 17 22 29 39 39 26 11 11 17 11 -7 -29 -43 -53 -59 -62 -67 -77 -91 -95 -90 -97 -122 -148 -153 -136 -122 -120 -118 -107 -93 -82 -73 -64 -65 -76 -86 -83 -72 -61 -52 -28 15 63 98 114 117 108 92 84 87 93 82 46 2 -24 -31 -31 -46 -77 -110 -131 -135 -123 -105 -92 -90 -102 -115 -114 -98 -86 -88 -100 -105 -100 -90 -78 -69 -58 -47 -37 -30 -31 -38 -36 -12 16 24 5 -13 -2 26 41 35 33 49 69 84 104 144 187 199 182 170 180 194 186 165 163 182 195 181 154 137 132 120 94 67 48 35 20 2 -11 -15 -19 -37 -60 -71 -64 -55 -59 -76 -87 -84 -76 -70 -67 -58 -47 -41 -37 -36 -38 -43 -44 -31 -11 -6 -19 -31 -25 -10 -6 -18 -31 -29 -15 2 13 16 18 26 44 62 64 54 48 56 66 60 41 27 25 23 13 1 -1 3 -2 -19 -37 -45 -44 -47 -53 -57 -55 -49 -45 -44 -45 -44 -44 -43 -43 -44 -42 -37 -32 -29 -28 -27 -22 -15 -10 -6 -3 2 7 10 10 8 8 10 12 14 14 13 12 11 11 10 9 7 6 5 5 4 3 3 3 3 2 1 0 0 0 0 0 -1 -1 -1 -1 -1 -1 0 +13 19 13 -7 -31 -46 -56 -63 -65 -70 -81 -95 -99 -93 -100 -126 -153 -157 -139 -125 -122 -120 -109 -94 -83 -73 -64 -65 -76 -86 -83 -72 -61 -52 -28 16 64 99 115 117 108 93 85 88 94 83 47 3 -24 -31 -31 -46 -77 -111 -132 -136 -125 -107 -93 -92 -104 -118 -117 -101 -89 -91 -104 -110 -105 -94 -82 -73 -61 -50 -39 -32 -33 -41 -39 -13 18 27 6 -14 -2 30 47 41 39 58 82 100 124 173 226 243 224 211 225 244 236 211 210 237 256 240 206 185 179 165 131 94 68 51 30 3 -16 -21 -28 -55 -92 -110 -100 -87 -95 -124 -143 -140 -128 -120 -116 -102 -84 -73 -68 -67 -71 -82 -85 -60 -21 -12 -38 -64 -52 -20 -12 -39 -70 -68 -34 6 33 43 48 71 120 171 182 157 142 169 203 189 132 89 84 82 47 7 -1 13 -7 -76 -150 -187 -192 -209 -242 -269 -268 -249 -236 -237 -248 -255 -259 -264 -277 -290 -288 -267 -240 -226 -228 -223 -189 -135 -88 -58 -30 23 89 137 141 124 126 162 213 252 266 271 273 277 285 286 271 241 217 216 227 221 199 191 218 254 246 179 90 31 13 14 8 -10 -32 -51 -67 -86 -110 -142 -168 -173 -149 -119 -112 -130 -143 -118 -60 -3 27 32 43 69 99 113 113 119 149 199 241 250 227 202 195 195 173 123 79 71 96 108 77 22 -20 -32 -33 -45 -64 -85 -110 -150 -199 -228 -220 -198 -198 -225 -249 -249 -233 -226 -235 -246 -247 -243 -236 -218 -169 -98 -38 -21 -42 -65 -53 -5 49 72 60 36 29 51 94 127 130 101 77 86 123 149 138 108 85 69 47 25 34 65 65 -8 -113 -170 -141 -70 -25 -47 -114 -182 -199 -140 -31 60 81 44 -3 -23 -12 23 73 117 130 104 63 44 55 80 94 85 61 41 46 61 53 15 -24 -30 -13 -14 -52 -95 -112 -99 -73 -53 -49 -64 -88 -94 -75 -54 -51 -51 -27 13 29 8 -16 -15 -3 -3 -6 1 10 1 -11 7 54 93 98 92 +0 0 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -2 -3 -3 -3 -3 -4 -4 -4 -4 -4 -3 -3 -3 -4 -5 -5 -5 -4 -4 -3 1 5 8 10 10 10 9 9 9 11 10 5 0 -4 -5 -5 -8 -13 -19 -23 -25 -24 -21 -19 -19 -22 -26 -26 -24 -21 -23 -26 -29 -28 -26 -23 -21 -18 -15 -12 -10 -11 -14 -14 -5 6 9 2 -6 -1 11 18 16 15 24 34 43 54 77 103 112 105 101 109 121 118 107 109 125 137 130 113 103 101 94 76 55 40 30 18 1 -11 -14 -19 -36 -61 -74 -68 -60 -66 -87 -101 -100 -92 -87 -85 -76 -63 -55 -52 -52 -55 -64 -67 -48 -17 -10 -31 -53 -43 -17 -11 -33 -60 -58 -30 5 28 37 42 62 106 152 163 141 128 154 185 173 122 82 78 76 44 6 -1 12 -7 -73 -145 -181 -186 -203 -236 -263 -262 -244 -232 -234 -245 -252 -257 -262 -275 -289 -287 -266 -240 -226 -228 -223 -189 -135 -88 -58 -30 22 88 136 140 123 125 161 212 250 264 268 270 273 281 281 266 236 212 210 221 214 192 184 210 243 235 170 85 29 12 13 7 -10 -30 -48 -62 -79 -101 -129 -152 -156 -133 -106 -99 -115 -125 -103 -52 -3 22 27 36 57 81 92 92 96 119 158 190 196 176 155 149 147 129 91 58 51 69 77 54 15 -14 -23 -23 -31 -43 -56 -72 -97 -127 -144 -137 -122 -120 -135 -147 -145 -134 -129 -132 -136 -135 -131 -125 -114 -87 -50 -19 -11 -21 -31 -25 -3 21 31 26 15 12 20 37 50 50 38 28 31 43 52 47 36 27 22 14 7 10 18 18 -3 -31 -45 -37 -18 -7 -12 -27 -41 -43 -30 -7 11 15 8 -1 -4 -2 3 11 17 18 14 8 5 6 9 10 9 6 4 4 5 4 1 -2 -3 -1 -1 -4 -6 -6 -5 -4 -3 -2 -3 -3 -3 -3 -2 -2 -2 -1 0 0 0 -1 -1 -1 -1 -1 0 0 0 -1 0 0 0 0 0 +142 169 203 189 132 89 84 82 47 7 -1 13 -7 -76 -150 -187 -192 -209 -242 -269 -268 -249 -236 -237 -248 -255 -259 -264 -277 -290 -288 -267 -240 -226 -228 -223 -189 -135 -88 -58 -30 23 89 137 141 124 126 162 213 252 266 271 273 277 285 286 271 241 217 216 227 221 199 191 218 254 246 179 90 31 13 14 8 -10 -32 -51 -67 -86 -110 -142 -168 -173 -149 -119 -112 -130 -143 -118 -60 -3 27 32 43 69 99 113 113 119 149 199 241 250 227 202 195 195 173 123 79 71 96 108 77 22 -20 -32 -33 -45 -64 -85 -110 -150 -199 -228 -220 -198 -198 -225 -249 -249 -233 -226 -235 -246 -247 -243 -236 -218 -169 -98 -38 -21 -42 -65 -53 -5 49 72 60 36 29 51 94 127 130 101 77 86 123 149 138 108 85 69 47 25 34 65 65 -8 -113 -170 -141 -70 -25 -47 -114 -182 -199 -140 -31 60 81 44 -3 -23 -12 23 73 117 130 104 63 44 55 80 94 85 61 41 46 61 53 15 -24 -30 -13 -14 -52 -95 -112 -99 -73 -53 -49 -64 -88 -94 -75 -54 -51 -51 -27 13 29 8 -16 -15 -3 -3 -6 1 10 1 -11 7 54 93 98 92 97 101 86 65 69 92 96 68 36 28 34 35 38 54 67 51 8 -14 10 58 80 60 26 17 42 70 62 19 -23 -23 6 25 16 4 18 40 34 -1 -26 -9 33 55 35 -9 -54 -92 -120 -135 -127 -99 -74 -83 -127 -171 -169 -116 -59 -50 -85 -119 -115 -90 -85 -109 -127 -112 -75 -51 -47 -37 -4 33 35 4 -19 9 78 141 167 162 145 123 105 110 139 159 133 69 30 57 118 142 107 60 56 105 169 206 202 166 122 89 74 61 31 -16 -60 -97 -142 -197 -231 -219 -179 -157 -169 -175 -140 -81 -44 -51 -76 -88 -84 -80 -84 -102 -127 -143 -140 -126 -127 -158 -189 -186 -137 -69 -23 -18 -38 -35 20 102 133 76 0 0 76 126 78 5 29 155 258 236 137 92 148 242 287 271 +0 0 0 0 0 0 0 0 0 0 -1 0 -1 -1 -2 -3 -4 -4 -6 -7 -7 -8 -8 -8 -10 -11 -12 -13 -14 -16 -17 -17 -16 -16 -17 -17 -16 -12 -8 -6 -3 2 9 15 16 15 16 21 29 36 39 41 43 46 49 50 49 45 42 43 47 47 44 43 51 61 61 45 23 8 3 3 2 -3 -10 -16 -22 -29 -37 -49 -59 -62 -55 -45 -43 -51 -57 -48 -25 -2 11 13 18 30 45 52 53 57 72 98 121 127 117 106 104 105 95 68 44 40 55 63 46 13 -13 -20 -21 -29 -42 -56 -73 -100 -134 -156 -152 -138 -140 -160 -179 -181 -171 -167 -175 -185 -188 -186 -183 -170 -133 -78 -31 -17 -35 -54 -44 -5 40 60 50 30 24 43 81 110 114 89 68 76 110 134 125 98 77 63 43 23 31 60 61 -8 -108 -162 -135 -68 -25 -46 -111 -177 -194 -137 -31 58 79 43 -3 -23 -12 22 72 116 129 103 62 43 54 79 93 84 60 41 46 60 52 14 -24 -30 -13 -14 -52 -95 -112 -99 -73 -53 -49 -64 -87 -93 -74 -53 -50 -50 -27 12 27 7 -16 -15 -3 -3 -6 0 9 0 -11 6 49 85 89 83 87 90 76 57 60 80 83 58 31 23 28 29 31 45 55 41 6 -12 8 46 63 47 20 13 32 53 46 14 -17 -17 4 17 11 2 12 27 23 -1 -18 -6 21 35 22 -6 -34 -57 -73 -81 -75 -58 -43 -48 -71 -95 -92 -63 -32 -26 -44 -60 -58 -44 -41 -52 -60 -52 -34 -23 -21 -16 -2 13 14 1 -8 3 29 51 59 56 49 41 34 35 43 48 39 20 8 15 31 37 27 14 13 24 38 45 43 34 24 17 14 11 5 -3 -10 -16 -22 -30 -34 -31 -24 -21 -21 -21 -16 -9 -5 -5 -8 -8 -8 -7 -7 -8 -9 -10 -9 -8 -7 -8 -9 -8 -6 -3 -1 -1 -2 -1 0 2 2 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 +138 108 85 69 47 25 34 65 65 -8 -113 -170 -141 -70 -25 -47 -114 -182 -199 -140 -31 60 81 44 -3 -23 -12 23 73 117 130 104 63 44 55 80 94 85 61 41 46 61 53 15 -24 -30 -13 -14 -52 -95 -112 -99 -73 -53 -49 -64 -88 -94 -75 -54 -51 -51 -27 13 29 8 -16 -15 -3 -3 -6 1 10 1 -11 7 54 93 98 92 97 101 86 65 69 92 96 68 36 28 34 35 38 54 67 51 8 -14 10 58 80 60 26 17 42 70 62 19 -23 -23 6 25 16 4 18 40 34 -1 -26 -9 33 55 35 -9 -54 -92 -120 -135 -127 -99 -74 -83 -127 -171 -169 -116 -59 -50 -85 -119 -115 -90 -85 -109 -127 -112 -75 -51 -47 -37 -4 33 35 4 -19 9 78 141 167 162 145 123 105 110 139 159 133 69 30 57 118 142 107 60 56 105 169 206 202 166 122 89 74 61 31 -16 -60 -97 -142 -197 -231 -219 -179 -157 -169 -175 -140 -81 -44 -51 -76 -88 -84 -80 -84 -102 -127 -143 -140 -126 -127 -158 -189 -186 -137 -69 -23 -18 -38 -35 20 102 133 76 0 0 76 126 78 5 29 155 258 236 137 92 148 242 287 271 238 225 231 236 228 205 173 139 113 95 75 43 4 -20 -15 9 30 32 22 5 -21 -55 -67 -37 10 11 -57 -148 -193 -181 -164 -191 -247 -290 -305 -311 -329 -350 -350 -325 -298 -282 -265 -228 -182 -157 -159 -148 -84 -1 31 -6 -40 -1 85 122 81 23 17 49 71 81 114 160 168 133 120 172 230 206 111 55 93 156 148 79 47 82 113 79 22 19 63 68 -3 -79 -79 -24 14 11 19 61 89 53 -11 -21 40 94 77 12 -34 -43 -52 -71 -59 -1 53 43 -9 -35 -3 44 63 72 96 117 98 50 29 39 28 -33 -91 -80 -24 -8 -60 -109 -85 -16 19 0 -8 37 101 126 106 80 66 54 28 9 9 20 24 20 19 21 17 4 -9 -9 6 34 62 72 46 -11 -65 -78 +0 0 0 0 0 0 0 0 0 -1 -1 -2 -2 -1 -1 -1 -2 -4 -5 -4 -1 1 2 1 -1 -1 -1 1 3 6 7 6 4 2 3 6 7 7 5 3 4 6 5 1 -3 -4 -2 -2 -8 -14 -17 -16 -12 -9 -9 -12 -17 -18 -15 -11 -11 -12 -6 2 6 1 -4 -4 -1 -1 -2 0 2 0 -4 2 17 30 32 31 33 36 31 24 26 35 37 27 14 11 14 15 16 24 30 23 3 -7 4 28 40 30 13 8 22 38 34 10 -14 -14 3 14 9 2 11 24 21 -1 -17 -6 21 36 23 -7 -38 -64 -85 -96 -92 -72 -55 -62 -95 -129 -129 -89 -46 -39 -67 -95 -92 -73 -69 -89 -105 -93 -63 -43 -40 -32 -4 28 30 3 -17 7 69 125 149 146 131 112 96 101 128 147 124 64 28 53 111 134 102 57 53 101 163 199 196 161 119 87 72 59 30 -16 -60 -97 -141 -196 -230 -219 -179 -157 -169 -175 -140 -81 -44 -51 -76 -88 -84 -80 -84 -102 -127 -143 -140 -126 -127 -157 -188 -184 -136 -68 -23 -18 -38 -35 19 99 128 73 0 0 72 120 74 4 27 145 241 220 127 85 136 221 261 245 214 201 206 209 201 179 150 120 97 81 63 36 3 -17 -13 7 24 25 17 3 -17 -44 -53 -29 7 8 -43 -111 -143 -133 -119 -137 -176 -204 -212 -214 -224 -236 -234 -214 -194 -182 -169 -144 -113 -97 -97 -89 -50 -1 17 -4 -23 -1 46 65 42 11 8 24 35 39 54 75 78 60 53 75 99 87 46 22 37 61 57 29 17 29 40 27 7 6 20 21 -1 -25 -24 -7 3 3 5 16 22 13 -3 -5 9 20 16 2 -7 -9 -10 -14 -11 -1 8 6 -2 -6 -1 6 8 9 11 13 11 5 2 3 2 -3 -8 -7 -2 -1 -5 -7 -6 -1 1 0 -1 1 3 4 3 2 1 1 0 0 0 0 0 0 0 0 0 0 -1 -1 0 0 0 0 0 -1 -1 0 +145 123 105 110 139 159 133 69 30 57 118 142 107 60 56 105 169 206 202 166 122 89 74 61 31 -16 -60 -97 -142 -197 -231 -219 -179 -157 -169 -175 -140 -81 -44 -51 -76 -88 -84 -80 -84 -102 -127 -143 -140 -126 -127 -158 -189 -186 -137 -69 -23 -18 -38 -35 20 102 133 76 0 0 76 126 78 5 29 155 258 236 137 92 148 242 287 271 238 225 231 236 228 205 173 139 113 95 75 43 4 -20 -15 9 30 32 22 5 -21 -55 -67 -37 10 11 -57 -148 -193 -181 -164 -191 -247 -290 -305 -311 -329 -350 -350 -325 -298 -282 -265 -228 -182 -157 -159 -148 -84 -1 31 -6 -40 -1 85 122 81 23 17 49 71 81 114 160 168 133 120 172 230 206 111 55 93 156 148 79 47 82 113 79 22 19 63 68 -3 -79 -79 -24 14 11 19 61 89 53 -11 -21 40 94 77 12 -34 -43 -52 -71 -59 -1 53 43 -9 -35 -3 44 63 72 96 117 98 50 29 39 28 -33 -91 -80 -24 -8 -60 -109 -85 -16 19 0 -8 37 101 126 106 80 66 54 28 9 9 20 24 20 19 21 17 4 -9 -9 6 34 62 72 46 -11 -65 -78 -48 -8 -5 -42 -83 -86 -59 -48 -76 -115 -120 -91 -66 -76 -100 -108 -101 -112 -145 -162 -142 -105 -90 -99 -96 -59 -11 10 -6 -31 -27 14 62 85 80 77 104 146 159 133 113 132 160 138 71 33 71 129 129 69 25 32 48 24 -17 -19 10 18 -11 -34 -16 11 -6 -58 -85 -65 -36 -45 -85 -111 -98 -63 -39 -31 -23 2 29 29 -4 -38 -34 11 60 80 84 91 93 71 36 19 30 44 34 10 -5 9 58 130 185 171 81 -10 -19 47 88 37 -63 -105 -54 14 12 -60 -125 -132 -102 -90 -118 -158 -176 -172 -170 -163 -137 -101 -94 -128 -160 -146 -110 -115 -164 -188 -137 -56 -24 -43 -43 11 74 99 96 113 152 167 143 120 137 169 161 94 14 -17 4 38 50 44 45 57 59 45 +0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 1 2 3 4 3 3 2 2 2 1 -1 -3 -5 -8 -11 -14 -14 -12 -11 -13 -14 -12 -7 -4 -5 -8 -10 -10 -9 -10 -13 -17 -19 -20 -19 -19 -25 -31 -31 -24 -13 -5 -4 -8 -8 4 22 29 17 0 0 18 32 20 1 8 43 75 70 41 28 47 79 95 92 83 80 84 87 86 79 68 55 46 39 31 18 1 -9 -7 4 14 15 10 2 -11 -29 -35 -20 5 5 -32 -83 -110 -104 -96 -113 -148 -176 -187 -194 -207 -223 -226 -212 -197 -188 -179 -156 -126 -110 -112 -105 -61 -1 22 -5 -30 -1 64 93 62 17 13 38 56 65 92 130 138 110 100 144 194 175 95 47 80 136 129 69 41 73 101 71 19 17 57 62 -3 -74 -74 -23 13 10 17 57 84 50 -11 -21 38 91 74 11 -34 -43 -52 -70 -59 -1 52 42 -9 -35 -3 43 62 71 95 116 97 49 28 39 28 -33 -91 -80 -24 -8 -60 -109 -85 -16 18 0 -8 36 99 124 104 78 64 52 27 8 8 19 23 19 18 20 16 3 -9 -9 5 31 57 66 42 -11 -60 -71 -44 -8 -5 -38 -74 -76 -52 -42 -66 -99 -103 -77 -56 -64 -83 -89 -83 -91 -117 -130 -113 -83 -71 -77 -74 -45 -9 7 -5 -23 -20 10 43 59 55 52 70 98 105 87 73 84 101 86 44 20 42 77 76 40 14 18 26 13 -10 -11 5 9 -6 -18 -8 5 -3 -28 -40 -30 -17 -20 -37 -48 -41 -26 -16 -13 -9 0 10 10 -2 -14 -12 3 19 25 26 27 27 20 10 5 8 11 8 2 -2 2 13 28 39 35 16 -2 -4 8 15 6 -11 -17 -9 2 1 -9 -17 -17 -13 -11 -14 -17 -19 -17 -16 -15 -12 -9 -8 -10 -11 -10 -7 -7 -9 -10 -7 -3 -1 -2 -2 0 2 2 2 2 2 2 2 1 1 1 1 0 0 -1 0 0 0 0 0 0 0 0 +22 19 63 68 -3 -79 -79 -24 14 11 19 61 89 53 -11 -21 40 94 77 12 -34 -43 -52 -71 -59 -1 53 43 -9 -35 -3 44 63 72 96 117 98 50 29 39 28 -33 -91 -80 -24 -8 -60 -109 -85 -16 19 0 -8 37 101 126 106 80 66 54 28 9 9 20 24 20 19 21 17 4 -9 -9 6 34 62 72 46 -11 -65 -78 -48 -8 -5 -42 -83 -86 -59 -48 -76 -115 -120 -91 -66 -76 -100 -108 -101 -112 -145 -162 -142 -105 -90 -99 -96 -59 -11 10 -6 -31 -27 14 62 85 80 77 104 146 159 133 113 132 160 138 71 33 71 129 129 69 25 32 48 24 -17 -19 10 18 -11 -34 -16 11 -6 -58 -85 -65 -36 -45 -85 -111 -98 -63 -39 -31 -23 2 29 29 -4 -38 -34 11 60 80 84 91 93 71 36 19 30 44 34 10 -5 9 58 130 185 171 81 -10 -19 47 88 37 -63 -105 -54 14 12 -60 -125 -132 -102 -90 -118 -158 -176 -172 -170 -163 -137 -101 -94 -128 -160 -146 -110 -115 -164 -188 -137 -56 -24 -43 -43 11 74 99 96 113 152 167 143 120 137 169 161 94 14 -17 4 38 50 44 45 57 59 45 34 51 81 94 84 73 66 40 -17 -70 -75 -48 -42 -84 -128 -131 -95 -68 -69 -77 -62 -30 3 23 36 53 72 87 90 77 55 30 5 -7 1 23 33 17 -14 -34 -38 -37 -28 -5 23 21 -18 -45 -10 59 83 26 -44 -50 3 42 30 4 11 39 40 -2 -43 -42 -8 21 30 31 44 61 68 63 57 53 45 31 20 18 22 20 16 18 26 36 47 58 64 46 2 -37 -40 -7 26 30 -2 -42 -64 -62 -51 -56 -77 -96 -92 -64 -44 -44 -52 -48 -30 -12 2 10 5 -13 -30 -28 -13 -7 -26 -51 -62 -56 -36 -3 36 61 62 46 42 54 63 61 64 82 89 62 23 4 12 12 -17 -50 -59 -51 -61 -101 -136 -125 -71 -21 -8 -34 -61 -64 -45 -31 -41 -58 -57 -33 +0 0 0 0 -1 -1 -1 -1 0 0 0 0 0 0 -1 -1 0 1 1 0 -1 -2 -2 -3 -3 -1 2 1 -1 -2 -1 2 4 4 6 8 7 4 2 3 2 -4 -10 -9 -3 -1 -8 -15 -12 -3 2 0 -2 6 17 22 19 15 12 10 5 1 1 4 5 4 4 5 4 1 -3 -3 1 10 18 22 14 -4 -22 -27 -17 -3 -2 -16 -32 -34 -24 -20 -32 -49 -52 -40 -30 -35 -46 -51 -48 -54 -71 -81 -72 -54 -47 -53 -52 -33 -7 5 -4 -18 -16 8 37 51 49 47 65 92 102 86 74 87 107 93 48 22 49 91 92 49 18 23 35 18 -13 -15 7 14 -9 -27 -13 8 -5 -48 -70 -54 -31 -38 -72 -95 -84 -55 -34 -28 -21 1 25 25 -4 -35 -31 10 54 73 77 84 86 66 33 17 28 41 32 9 -5 8 56 125 179 166 79 -10 -19 46 86 36 -63 -105 -54 13 11 -60 -125 -132 -102 -90 -118 -158 -176 -172 -170 -163 -137 -101 -94 -128 -160 -146 -110 -115 -163 -187 -136 -56 -24 -43 -43 10 72 96 93 109 147 161 137 115 131 161 153 89 13 -16 3 35 46 40 41 52 53 40 30 45 72 83 74 64 57 34 -15 -60 -64 -41 -36 -71 -106 -108 -78 -56 -56 -62 -50 -24 2 17 27 40 54 64 66 56 39 21 3 -5 0 15 22 11 -10 -23 -25 -24 -18 -4 14 12 -11 -27 -6 34 47 14 -25 -28 1 22 15 2 5 19 19 -1 -21 -20 -4 9 13 13 19 25 28 25 22 20 17 11 7 6 7 6 5 6 8 11 14 17 19 13 0 -11 -11 -2 6 7 -1 -10 -15 -14 -12 -12 -16 -19 -18 -12 -8 -8 -9 -8 -5 -2 0 1 0 -2 -4 -4 -2 -1 -3 -5 -6 -5 -4 -1 2 4 4 2 2 3 3 3 2 3 3 2 0 0 0 0 -1 -2 -2 -1 -1 -2 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +-34 11 60 80 84 91 93 71 36 19 30 44 34 10 -5 9 58 130 185 171 81 -10 -19 47 88 37 -63 -105 -54 14 12 -60 -125 -132 -102 -90 -118 -158 -176 -172 -170 -163 -137 -101 -94 -128 -160 -146 -110 -115 -164 -188 -137 -56 -24 -43 -43 11 74 99 96 113 152 167 143 120 137 169 161 94 14 -17 4 38 50 44 45 57 59 45 34 51 81 94 84 73 66 40 -17 -70 -75 -48 -42 -84 -128 -131 -95 -68 -69 -77 -62 -30 3 23 36 53 72 87 90 77 55 30 5 -7 1 23 33 17 -14 -34 -38 -37 -28 -5 23 21 -18 -45 -10 59 83 26 -44 -50 3 42 30 4 11 39 40 -2 -43 -42 -8 21 30 31 44 61 68 63 57 53 45 31 20 18 22 20 16 18 26 36 47 58 64 46 2 -37 -40 -7 26 30 -2 -42 -64 -62 -51 -56 -77 -96 -92 -64 -44 -44 -52 -48 -30 -12 2 10 5 -13 -30 -28 -13 -7 -26 -51 -62 -56 -36 -3 36 61 62 46 42 54 63 61 64 82 89 62 23 4 12 12 -17 -50 -59 -51 -61 -101 -136 -125 -71 -21 -8 -34 -61 -64 -45 -31 -41 -58 -57 -33 -6 3 5 21 51 68 60 53 72 106 123 112 96 91 87 70 51 55 77 81 45 -8 -40 -44 -49 -75 -101 -104 -82 -67 -84 -109 -102 -61 -28 -42 -88 -114 -97 -68 -76 -113 -127 -87 -19 20 14 -6 -6 21 48 49 38 42 76 107 94 38 -6 8 57 70 19 -44 -58 -22 13 8 -19 -31 -17 -5 -12 -31 -35 -11 23 29 -5 -49 -56 -22 18 29 25 43 86 111 94 52 38 68 105 113 95 85 101 133 145 125 90 66 49 30 10 9 30 48 34 -3 -28 -31 -34 -55 -74 -67 -44 -39 -58 -75 -66 -44 -38 -45 -42 -7 46 87 101 92 77 69 71 83 99 114 117 94 56 28 19 19 6 -25 -51 -52 -35 -19 -23 -47 -74 -90 -91 -86 -89 -97 -101 -97 -96 -106 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 2 3 3 2 -1 -1 1 3 1 -3 -5 -3 0 0 -4 -8 -9 -8 -7 -10 -14 -16 -17 -17 -17 -15 -12 -12 -16 -21 -20 -16 -17 -25 -30 -23 -10 -5 -8 -8 2 14 20 20 24 33 38 33 29 34 43 42 25 3 -5 1 11 15 13 14 18 19 15 11 18 29 34 31 28 26 16 -7 -30 -32 -21 -19 -38 -59 -61 -45 -33 -34 -39 -32 -16 1 12 19 28 39 48 50 44 32 17 2 -5 0 14 20 10 -10 -23 -26 -25 -19 -4 15 14 -13 -32 -8 42 60 19 -33 -38 2 32 23 3 8 30 31 -2 -35 -35 -7 17 25 26 37 51 58 54 49 46 39 27 17 16 19 18 14 16 23 33 43 53 59 43 1 -35 -38 -7 24 28 -2 -41 -62 -61 -50 -55 -76 -94 -91 -63 -44 -44 -52 -48 -30 -12 1 9 4 -13 -30 -28 -13 -7 -26 -51 -62 -56 -36 -3 35 60 61 45 41 53 62 60 63 81 87 61 22 3 11 11 -17 -49 -58 -50 -59 -98 -131 -120 -68 -20 -8 -32 -58 -60 -42 -29 -38 -54 -52 -30 -6 2 4 18 45 59 52 45 62 90 104 94 80 75 72 57 41 44 61 64 35 -7 -32 -34 -38 -57 -76 -78 -61 -49 -61 -79 -73 -43 -20 -29 -60 -77 -65 -45 -50 -73 -81 -55 -12 12 8 -4 -4 12 27 27 21 23 41 57 49 19 -4 4 28 34 9 -21 -27 -11 5 3 -9 -14 -8 -3 -5 -13 -14 -5 8 10 -2 -18 -20 -8 5 9 7 13 25 32 26 14 10 17 26 28 23 20 23 29 31 26 18 12 9 5 1 1 4 7 5 -1 -5 -5 -5 -8 -10 -8 -5 -5 -6 -8 -7 -4 -4 -4 -4 -1 3 5 6 5 4 3 3 3 3 4 3 2 1 0 0 0 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +16 18 26 36 47 58 64 46 2 -37 -40 -7 26 30 -2 -42 -64 -62 -51 -56 -77 -96 -92 -64 -44 -44 -52 -48 -30 -12 2 10 5 -13 -30 -28 -13 -7 -26 -51 -62 -56 -36 -3 36 61 62 46 42 54 63 61 64 82 89 62 23 4 12 12 -17 -50 -59 -51 -61 -101 -136 -125 -71 -21 -8 -34 -61 -64 -45 -31 -41 -58 -57 -33 -6 3 5 21 51 68 60 53 72 106 123 112 96 91 87 70 51 55 77 81 45 -8 -40 -44 -49 -75 -101 -104 -82 -67 -84 -109 -102 -61 -28 -42 -88 -114 -97 -68 -76 -113 -127 -87 -19 20 14 -6 -6 21 48 49 38 42 76 107 94 38 -6 8 57 70 19 -44 -58 -22 13 8 -19 -31 -17 -5 -12 -31 -35 -11 23 29 -5 -49 -56 -22 18 29 25 43 86 111 94 52 38 68 105 113 95 85 101 133 145 125 90 66 49 30 10 9 30 48 34 -3 -28 -31 -34 -55 -74 -67 -44 -39 -58 -75 -66 -44 -38 -45 -42 -7 46 87 101 92 77 69 71 83 99 114 117 94 56 28 19 19 6 -25 -51 -52 -35 -19 -23 -47 -74 -90 -91 -86 -89 -97 -101 -97 -96 -106 -122 -127 -124 -123 -116 -91 -56 -48 -76 -112 -116 -89 -67 -71 -88 -95 -94 -97 -107 -110 -98 -72 -40 -14 -5 -8 -5 19 56 82 91 88 83 88 105 139 168 157 101 48 50 99 134 120 81 65 70 65 37 20 31 53 58 49 55 73 69 31 -4 1 33 51 32 1 -9 -7 -13 -25 -27 -18 -22 -49 -72 -64 -35 -16 -15 -12 13 43 57 58 61 67 65 60 71 93 87 32 -39 -63 -26 18 18 -15 -32 -14 5 -17 -70 -102 -79 -23 19 23 0 -18 -24 -28 -34 -47 -60 -78 -100 -120 -128 -128 -134 -150 -171 -181 -170 -147 -122 -102 -81 -56 -40 -44 -60 -56 -18 31 50 34 25 54 100 114 90 67 73 90 80 37 0 -7 8 19 10 -8 -11 14 58 94 104 97 86 78 +0 0 0 0 0 0 0 0 0 -1 -1 -1 0 0 -1 -1 -2 -2 -2 -2 -2 -3 -3 -3 -2 -2 -3 -3 -2 -1 0 0 0 -1 -3 -3 -2 -1 -3 -5 -7 -6 -4 -1 4 7 7 6 5 7 9 9 10 13 15 11 4 0 2 2 -4 -11 -14 -12 -15 -25 -34 -32 -19 -6 -3 -10 -18 -20 -14 -10 -14 -19 -20 -12 -3 1 1 7 19 26 23 21 29 44 52 48 42 40 39 32 24 26 37 40 22 -5 -21 -24 -27 -41 -56 -59 -47 -39 -49 -65 -61 -37 -18 -27 -56 -73 -63 -45 -51 -76 -86 -60 -14 13 9 -5 -5 15 35 36 28 31 57 81 72 29 -5 6 45 56 15 -36 -48 -19 10 6 -17 -27 -15 -5 -11 -28 -31 -10 20 25 -5 -45 -51 -21 16 26 23 39 80 103 88 49 35 64 100 108 91 81 97 128 140 121 87 64 48 29 9 8 29 47 33 -3 -28 -31 -34 -55 -74 -67 -44 -39 -58 -75 -66 -44 -38 -45 -42 -7 45 86 100 91 76 68 70 82 97 112 115 92 54 27 18 18 5 -25 -50 -50 -34 -19 -22 -45 -70 -85 -86 -81 -83 -90 -93 -89 -88 -97 -111 -114 -111 -110 -103 -80 -49 -42 -66 -96 -99 -76 -57 -60 -73 -79 -77 -79 -86 -88 -78 -57 -32 -11 -4 -7 -4 14 41 59 65 63 58 61 72 95 114 105 67 31 32 63 85 75 50 39 42 38 21 11 17 30 32 27 29 39 36 16 -3 0 16 24 15 0 -5 -4 -6 -12 -12 -8 -10 -21 -29 -26 -14 -7 -6 -5 4 15 19 19 19 21 20 18 21 27 24 8 -11 -17 -7 4 4 -4 -8 -4 1 -4 -15 -21 -16 -5 3 3 0 -3 -4 -5 -5 -7 -8 -10 -13 -15 -15 -14 -14 -15 -16 -17 -15 -12 -10 -8 -6 -4 -3 -3 -4 -3 -1 1 1 1 0 1 2 2 2 1 1 1 1 0 0 -1 0 0 0 -1 -1 0 0 0 0 0 0 0 +-56 -22 18 29 25 43 86 111 94 52 38 68 105 113 95 85 101 133 145 125 90 66 49 30 10 9 30 48 34 -3 -28 -31 -34 -55 -74 -67 -44 -39 -58 -75 -66 -44 -38 -45 -42 -7 46 87 101 92 77 69 71 83 99 114 117 94 56 28 19 19 6 -25 -51 -52 -35 -19 -23 -47 -74 -90 -91 -86 -89 -97 -101 -97 -96 -106 -122 -127 -124 -123 -116 -91 -56 -48 -76 -112 -116 -89 -67 -71 -88 -95 -94 -97 -107 -110 -98 -72 -40 -14 -5 -8 -5 19 56 82 91 88 83 88 105 139 168 157 101 48 50 99 134 120 81 65 70 65 37 20 31 53 58 49 55 73 69 31 -4 1 33 51 32 1 -9 -7 -13 -25 -27 -18 -22 -49 -72 -64 -35 -16 -15 -12 13 43 57 58 61 67 65 60 71 93 87 32 -39 -63 -26 18 18 -15 -32 -14 5 -17 -70 -102 -79 -23 19 23 0 -18 -24 -28 -34 -47 -60 -78 -100 -120 -128 -128 -134 -150 -171 -181 -170 -147 -122 -102 -81 -56 -40 -44 -60 -56 -18 31 50 34 25 54 100 114 90 67 73 90 80 37 0 -7 8 19 10 -8 -11 14 58 94 104 97 86 78 66 60 78 116 139 114 49 -18 -53 -47 -10 29 37 4 -46 -66 -45 -8 17 31 44 49 39 32 52 77 63 -1 -72 -95 -77 -62 -68 -74 -61 -44 -43 -50 -34 7 37 36 25 32 54 60 42 24 24 20 -13 -57 -73 -55 -42 -55 -76 -75 -58 -52 -59 -56 -42 -38 -52 -64 -54 -38 -38 -36 -12 31 50 31 15 41 102 146 143 116 99 105 121 135 150 161 149 100 34 1 18 47 46 13 -15 -8 16 22 2 -28 -46 -47 -34 -18 -21 -46 -63 -43 0 15 -15 -54 -64 -54 -58 -75 -73 -39 -3 10 14 31 47 38 10 -8 7 37 62 80 97 113 122 127 125 106 62 13 -21 -32 -36 -50 -68 -88 -118 -152 -174 -165 -128 -92 -79 -86 -94 -94 -92 -101 -114 -113 -83 -41 +0 -1 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 2 3 2 2 1 1 1 0 0 1 2 1 -1 -2 -2 -3 -4 -6 -6 -4 -4 -6 -7 -7 -5 -5 -6 -5 -1 5 11 13 13 11 10 11 13 17 20 21 17 11 5 3 4 1 -6 -13 -13 -9 -5 -7 -13 -21 -26 -27 -26 -28 -31 -33 -32 -33 -37 -43 -46 -46 -46 -45 -36 -23 -20 -32 -47 -50 -39 -30 -32 -41 -45 -45 -47 -53 -55 -50 -37 -21 -8 -3 -5 -3 10 31 47 52 51 49 53 64 86 105 99 64 31 32 65 90 81 55 45 49 46 26 14 22 39 43 36 41 55 53 24 -4 0 26 40 25 0 -8 -6 -11 -21 -23 -16 -19 -43 -63 -56 -31 -15 -14 -11 11 38 51 52 55 61 60 55 66 87 81 30 -37 -60 -25 17 17 -15 -31 -14 4 -17 -69 -100 -78 -23 18 22 0 -18 -24 -28 -34 -47 -60 -78 -100 -120 -128 -128 -134 -150 -171 -181 -170 -147 -122 -102 -81 -56 -40 -44 -60 -56 -18 30 49 33 24 52 97 111 87 65 70 86 77 35 0 -7 7 17 9 -8 -11 13 53 86 95 88 78 70 59 53 69 102 122 100 42 -16 -46 -41 -9 24 31 3 -39 -55 -37 -7 13 24 34 38 30 24 39 58 47 -1 -54 -70 -56 -45 -49 -52 -43 -31 -30 -34 -23 4 24 23 15 20 33 36 25 14 14 11 -8 -33 -41 -31 -23 -30 -41 -39 -30 -27 -30 -28 -21 -18 -25 -30 -25 -17 -17 -16 -6 12 20 12 5 15 37 53 51 40 33 35 39 43 46 49 44 29 9 0 4 12 11 3 -4 -2 3 4 0 -6 -10 -10 -7 -4 -4 -8 -11 -7 0 2 -3 -8 -9 -7 -8 -9 -9 -5 -1 0 1 2 3 3 0 -1 0 2 3 4 5 5 5 5 4 3 2 0 -1 -1 -1 -2 -2 -2 -2 -2 -2 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +57 58 61 67 65 60 71 93 87 32 -39 -63 -26 18 18 -15 -32 -14 5 -17 -70 -102 -79 -23 19 23 0 -18 -24 -28 -34 -47 -60 -78 -100 -120 -128 -128 -134 -150 -171 -181 -170 -147 -122 -102 -81 -56 -40 -44 -60 -56 -18 31 50 34 25 54 100 114 90 67 73 90 80 37 0 -7 8 19 10 -8 -11 14 58 94 104 97 86 78 66 60 78 116 139 114 49 -18 -53 -47 -10 29 37 4 -46 -66 -45 -8 17 31 44 49 39 32 52 77 63 -1 -72 -95 -77 -62 -68 -74 -61 -44 -43 -50 -34 7 37 36 25 32 54 60 42 24 24 20 -13 -57 -73 -55 -42 -55 -76 -75 -58 -52 -59 -56 -42 -38 -52 -64 -54 -38 -38 -36 -12 31 50 31 15 41 102 146 143 116 99 105 121 135 150 161 149 100 34 1 18 47 46 13 -15 -8 16 22 2 -28 -46 -47 -34 -18 -21 -46 -63 -43 0 15 -15 -54 -64 -54 -58 -75 -73 -39 -3 10 14 31 47 38 10 -8 7 37 62 80 97 113 122 127 125 106 62 13 -21 -32 -36 -50 -68 -88 -118 -152 -174 -165 -128 -92 -79 -86 -94 -94 -92 -101 -114 -113 -83 -41 -16 -24 -56 -78 -76 -59 -42 -34 -32 -25 -5 32 74 114 139 153 161 168 179 191 193 168 120 69 41 35 39 35 21 -3 -24 -34 -25 -16 -34 -73 -100 -94 -68 -56 -63 -64 -48 -29 -26 -35 -40 -37 -31 -25 -17 -8 -8 -13 -13 -5 5 7 2 -7 -16 -16 6 45 75 71 46 34 43 48 28 2 -1 13 19 1 -20 -30 -39 -59 -71 -55 -28 -25 -50 -63 -43 -23 -51 -120 -170 -162 -120 -89 -79 -65 -43 -35 -55 -81 -76 -35 24 65 78 70 63 77 105 124 119 103 101 119 141 150 144 132 118 110 109 116 128 142 164 188 202 189 154 116 96 86 71 39 -7 -46 -71 -84 -91 -104 -129 -170 -217 -248 -250 -237 -241 -267 -283 -263 -226 -216 -244 -271 -257 -217 -199 -212 -221 -194 +0 0 0 0 0 0 0 0 0 0 -1 -1 -1 0 0 -1 -1 -1 0 -1 -2 -3 -3 -1 0 0 0 -1 -2 -2 -2 -3 -4 -6 -8 -10 -11 -11 -12 -14 -17 -19 -19 -17 -15 -13 -11 -8 -6 -7 -9 -9 -3 5 8 6 4 10 19 23 18 14 16 20 18 8 0 -2 2 5 2 -3 -4 4 17 29 33 31 28 26 23 21 28 43 52 44 19 -8 -22 -20 -5 12 16 1 -22 -31 -22 -4 8 15 22 25 20 16 27 41 34 -1 -41 -55 -45 -37 -41 -45 -38 -28 -28 -32 -22 4 24 23 16 21 37 41 29 17 17 14 -10 -43 -55 -42 -32 -43 -59 -59 -46 -42 -48 -45 -35 -31 -43 -53 -46 -32 -33 -31 -11 26 43 27 13 36 90 130 128 104 89 95 110 124 138 149 138 93 31 0 17 44 43 12 -15 -8 15 21 1 -28 -45 -47 -34 -18 -21 -46 -63 -43 0 14 -15 -54 -64 -54 -58 -75 -73 -39 -3 10 14 30 46 37 9 -8 6 36 61 79 96 112 120 125 123 104 60 12 -21 -32 -36 -49 -66 -86 -114 -146 -167 -158 -122 -88 -75 -81 -89 -88 -86 -94 -105 -104 -76 -38 -15 -22 -50 -70 -68 -52 -37 -30 -28 -22 -5 27 62 95 115 125 131 136 143 152 152 131 93 53 31 26 29 26 15 -3 -18 -25 -18 -12 -24 -51 -69 -64 -46 -37 -42 -42 -31 -19 -17 -22 -25 -23 -19 -15 -10 -5 -5 -8 -8 -3 2 3 1 -4 -8 -8 2 21 34 32 20 14 18 20 11 0 -1 5 7 0 -8 -11 -14 -21 -25 -19 -10 -8 -16 -20 -13 -7 -15 -34 -46 -43 -31 -23 -20 -16 -10 -8 -12 -17 -16 -7 4 11 13 12 10 12 16 18 17 14 13 15 17 17 16 14 12 10 10 10 10 11 12 13 13 12 9 6 5 4 3 1 -1 -2 -3 -3 -3 -3 -3 -4 -5 -5 -4 -4 -3 -3 -3 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 0 +99 105 121 135 150 161 149 100 34 1 18 47 46 13 -15 -8 16 22 2 -28 -46 -47 -34 -18 -21 -46 -63 -43 0 15 -15 -54 -64 -54 -58 -75 -73 -39 -3 10 14 31 47 38 10 -8 7 37 62 80 97 113 122 127 125 106 62 13 -21 -32 -36 -50 -68 -88 -118 -152 -174 -165 -128 -92 -79 -86 -94 -94 -92 -101 -114 -113 -83 -41 -16 -24 -56 -78 -76 -59 -42 -34 -32 -25 -5 32 74 114 139 153 161 168 179 191 193 168 120 69 41 35 39 35 21 -3 -24 -34 -25 -16 -34 -73 -100 -94 -68 -56 -63 -64 -48 -29 -26 -35 -40 -37 -31 -25 -17 -8 -8 -13 -13 -5 5 7 2 -7 -16 -16 6 45 75 71 46 34 43 48 28 2 -1 13 19 1 -20 -30 -39 -59 -71 -55 -28 -25 -50 -63 -43 -23 -51 -120 -170 -162 -120 -89 -79 -65 -43 -35 -55 -81 -76 -35 24 65 78 70 63 77 105 124 119 103 101 119 141 150 144 132 118 110 109 116 128 142 164 188 202 189 154 116 96 86 71 39 -7 -46 -71 -84 -91 -104 -129 -170 -217 -248 -250 -237 -241 -267 -283 -263 -226 -216 -244 -271 -257 -217 -199 -212 -221 -194 -147 -113 -92 -60 -2 68 133 187 229 256 274 299 337 372 391 401 419 440 439 403 355 315 274 218 156 118 108 96 57 1 -42 -66 -94 -138 -185 -212 -220 -233 -257 -286 -299 -298 -300 -319 -349 -363 -351 -316 -280 -256 -241 -220 -176 -111 -37 30 85 125 148 159 175 212 262 306 330 347 378 420 448 441 405 356 316 293 287 283 257 199 127 67 28 -10 -63 -129 -189 -235 -259 -267 -279 -312 -362 -400 -403 -379 -362 -369 -385 -380 -358 -348 -358 -362 -329 -266 -219 -211 -211 -169 -84 -11 2 -28 -34 18 91 121 108 97 127 181 214 218 218 232 254 264 262 264 282 305 314 305 290 279 270 250 214 176 158 158 153 127 90 69 69 65 35 -8 -39 -55 -81 -127 -173 -194 -191 -182 -181 -191 -200 -208 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 -1 0 0 0 -1 -2 -2 -2 -1 -1 -2 -3 -2 0 0 -1 -4 -5 -4 -5 -6 -6 -4 -1 0 1 3 5 4 1 -1 0 4 8 11 14 17 19 21 21 18 11 2 -5 -7 -8 -11 -16 -21 -28 -37 -44 -43 -34 -25 -22 -25 -28 -28 -29 -32 -37 -37 -28 -15 -6 -9 -21 -30 -29 -23 -17 -14 -14 -11 -3 13 32 51 63 71 76 80 87 94 97 85 62 36 21 19 21 19 11 -2 -14 -21 -15 -10 -21 -46 -63 -60 -44 -37 -42 -43 -33 -20 -18 -25 -29 -27 -23 -19 -13 -6 -6 -10 -10 -4 3 5 1 -6 -13 -13 4 36 61 58 38 28 36 40 23 1 -1 11 16 0 -18 -27 -36 -54 -65 -51 -26 -24 -47 -59 -41 -22 -48 -114 -162 -155 -115 -86 -76 -63 -42 -34 -54 -79 -75 -35 23 63 76 69 62 76 104 123 118 102 100 118 140 149 143 131 117 110 109 115 127 141 163 187 201 188 153 115 95 85 70 38 -7 -46 -70 -83 -90 -102 -126 -166 -211 -240 -241 -228 -231 -255 -270 -250 -214 -204 -229 -253 -239 -201 -184 -195 -202 -176 -133 -102 -83 -54 -2 59 116 162 197 219 233 252 282 310 323 329 341 356 352 321 280 247 213 168 119 89 81 71 42 0 -31 -48 -67 -97 -129 -146 -150 -157 -172 -189 -195 -192 -191 -201 -217 -223 -213 -189 -166 -149 -139 -125 -99 -62 -21 16 44 64 75 80 86 103 125 144 153 158 169 185 194 187 169 146 127 115 111 107 95 72 45 23 9 -4 -21 -42 -60 -72 -78 -78 -80 -87 -98 -105 -104 -95 -88 -87 -89 -85 -78 -73 -73 -72 -63 -50 -40 -37 -36 -28 -14 -2 0 -4 -5 2 11 14 12 10 13 17 19 19 18 18 19 18 17 16 16 17 16 15 13 11 10 9 7 5 4 4 3 2 1 1 1 0 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +-71 -55 -28 -25 -50 -63 -43 -23 -51 -120 -170 -162 -120 -89 -79 -65 -43 -35 -55 -81 -76 -35 24 65 78 70 63 77 105 124 119 103 101 119 141 150 144 132 118 110 109 116 128 142 164 188 202 189 154 116 96 86 71 39 -7 -46 -71 -84 -91 -104 -129 -170 -217 -248 -250 -237 -241 -267 -283 -263 -226 -216 -244 -271 -257 -217 -199 -212 -221 -194 -147 -113 -92 -60 -2 68 133 187 229 256 274 299 337 372 391 401 419 440 439 403 355 315 274 218 156 118 108 96 57 1 -42 -66 -94 -138 -185 -212 -220 -233 -257 -286 -299 -298 -300 -319 -349 -363 -351 -316 -280 -256 -241 -220 -176 -111 -37 30 85 125 148 159 175 212 262 306 330 347 378 420 448 441 405 356 316 293 287 283 257 199 127 67 28 -10 -63 -129 -189 -235 -259 -267 -279 -312 -362 -400 -403 -379 -362 -369 -385 -380 -358 -348 -358 -362 -329 -266 -219 -211 -211 -169 -84 -11 2 -28 -34 18 91 121 108 97 127 181 214 218 218 232 254 264 262 264 282 305 314 305 290 279 270 250 214 176 158 158 153 127 90 69 69 65 35 -8 -39 -55 -81 -127 -173 -194 -191 -182 -181 -191 -200 -208 -209 -200 -182 -163 -146 -126 -100 -67 -37 -14 -1 2 -8 -22 -17 16 65 99 105 101 108 126 122 80 33 31 84 144 160 124 77 43 21 -3 -22 -26 -32 -62 -116 -162 -172 -147 -118 -113 -139 -178 -209 -215 -203 -183 -154 -113 -68 -41 -35 -35 -29 -24 -27 -16 27 87 120 106 78 66 70 68 63 75 105 131 138 131 130 138 146 155 164 163 138 97 61 45 47 50 44 31 23 21 17 -2 -33 -59 -72 -80 -91 -106 -117 -127 -141 -154 -159 -151 -137 -110 -72 -42 -44 -68 -79 -59 -31 -16 9 58 110 118 87 74 96 116 103 88 120 184 221 216 201 197 181 136 93 88 107 98 51 7 -3 6 -1 -25 -46 -47 -38 -34 -41 -55 -65 -63 -55 -50 -60 -72 -68 -41 -8 5 -3 -13 +0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -2 -1 -2 -1 -1 -1 -2 -2 -2 -1 0 2 2 2 2 3 5 6 6 6 6 8 10 11 11 11 10 10 10 11 13 15 19 22 25 25 21 16 14 13 11 6 -2 -9 -14 -16 -18 -22 -27 -37 -49 -57 -59 -58 -60 -69 -75 -71 -63 -62 -71 -81 -79 -68 -64 -70 -74 -67 -52 -41 -34 -23 -1 26 52 75 93 107 116 129 148 167 178 186 197 211 214 199 178 161 142 115 83 64 59 53 32 0 -25 -39 -57 -84 -114 -132 -139 -149 -166 -187 -197 -199 -202 -218 -241 -253 -247 -225 -201 -186 -177 -163 -132 -84 -29 22 65 97 116 125 139 170 212 249 271 287 315 352 378 375 346 306 274 255 251 249 228 177 113 60 25 -10 -58 -119 -175 -219 -242 -250 -263 -295 -343 -381 -385 -363 -348 -356 -373 -369 -348 -340 -350 -355 -323 -262 -216 -209 -209 -168 -84 -11 1 -28 -34 17 90 120 107 96 126 181 214 217 217 231 253 263 261 263 280 303 311 302 287 275 266 246 210 172 154 154 149 123 87 66 66 62 33 -8 -38 -53 -77 -120 -162 -181 -178 -169 -167 -175 -183 -189 -189 -180 -163 -145 -129 -111 -88 -59 -32 -12 -1 1 -7 -19 -15 13 53 80 84 80 85 98 94 61 25 23 63 107 118 90 55 30 14 -3 -16 -18 -22 -42 -78 -107 -112 -95 -76 -71 -87 -110 -127 -129 -120 -107 -89 -65 -39 -23 -20 -19 -16 -13 -14 -9 13 42 57 50 36 30 31 29 27 31 43 53 55 51 50 52 54 56 58 56 47 32 19 14 14 15 13 9 6 5 4 -1 -9 -15 -18 -19 -21 -24 -26 -27 -29 -31 -31 -28 -25 -19 -12 -7 -7 -11 -12 -9 -5 -3 1 6 12 12 8 7 8 10 8 7 9 13 14 13 12 11 9 6 4 3 4 3 1 0 -1 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 -1 0 +28 -10 -63 -129 -189 -235 -259 -267 -279 -312 -362 -400 -403 -379 -362 -369 -385 -380 -358 -348 -358 -362 -329 -266 -219 -211 -211 -169 -84 -11 2 -28 -34 18 91 121 108 97 127 181 214 218 218 232 254 264 262 264 282 305 314 305 290 279 270 250 214 176 158 158 153 127 90 69 69 65 35 -8 -39 -55 -81 -127 -173 -194 -191 -182 -181 -191 -200 -208 -209 -200 -182 -163 -146 -126 -100 -67 -37 -14 -1 2 -8 -22 -17 16 65 99 105 101 108 126 122 80 33 31 84 144 160 124 77 43 21 -3 -22 -26 -32 -62 -116 -162 -172 -147 -118 -113 -139 -178 -209 -215 -203 -183 -154 -113 -68 -41 -35 -35 -29 -24 -27 -16 27 87 120 106 78 66 70 68 63 75 105 131 138 131 130 138 146 155 164 163 138 97 61 45 47 50 44 31 23 21 17 -2 -33 -59 -72 -80 -91 -106 -117 -127 -141 -154 -159 -151 -137 -110 -72 -42 -44 -68 -79 -59 -31 -16 9 58 110 118 87 74 96 116 103 88 120 184 221 216 201 197 181 136 93 88 107 98 51 7 -3 6 -1 -25 -46 -47 -38 -34 -41 -55 -65 -63 -55 -50 -60 -72 -68 -41 -8 5 -3 -13 -9 11 30 28 -2 -42 -66 -58 -26 6 20 17 1 -30 -64 -82 -62 -19 16 20 9 7 17 20 10 -2 -13 -34 -66 -90 -92 -94 -117 -155 -174 -159 -139 -142 -159 -172 -171 -161 -152 -137 -118 -104 -95 -82 -64 -52 -45 -31 -7 12 10 -8 -16 -8 9 24 42 70 105 133 141 132 119 111 109 100 82 67 71 94 109 99 74 49 25 -10 -39 -46 -33 -27 -38 -44 -26 -6 -18 -56 -76 -58 -38 -49 -79 -78 -32 13 16 -9 -12 30 89 124 125 116 125 160 198 201 163 116 100 123 149 147 127 115 118 105 65 21 7 15 15 -3 -9 7 17 -6 -42 -41 0 34 17 -26 -39 -11 14 5 -16 -9 23 47 42 24 11 -1 -21 -42 -58 -82 -123 -168 -183 -163 -125 -101 -99 -115 +0 -1 -1 -1 -1 -1 -1 -1 -2 -2 -3 -4 -4 -5 -5 -6 -7 -8 -8 -9 -10 -11 -11 -9 -9 -9 -10 -8 -5 -1 0 -2 -3 1 6 9 8 8 11 16 20 22 23 26 29 32 33 35 38 43 46 47 46 46 46 44 39 33 31 32 32 27 19 15 16 15 8 -3 -11 -15 -23 -36 -51 -58 -59 -57 -58 -63 -67 -72 -73 -72 -67 -61 -56 -49 -40 -27 -16 -6 -1 0 -4 -10 -8 7 30 47 51 50 54 64 63 42 17 16 46 80 90 71 44 25 12 -2 -14 -17 -21 -40 -75 -106 -114 -98 -80 -77 -96 -124 -147 -153 -146 -133 -113 -84 -51 -31 -27 -27 -23 -19 -22 -13 21 69 97 86 64 54 58 57 53 63 89 112 119 114 114 121 129 138 147 147 125 88 55 41 43 46 41 29 21 19 16 -2 -32 -57 -70 -78 -88 -103 -114 -124 -138 -151 -157 -149 -135 -109 -72 -42 -44 -68 -79 -59 -31 -16 8 57 109 117 86 74 96 115 102 87 119 183 220 215 200 195 179 134 92 87 105 96 50 6 -3 5 -1 -25 -45 -46 -37 -33 -40 -53 -62 -60 -52 -47 -57 -68 -64 -38 -8 4 -3 -12 -9 9 26 24 -2 -37 -58 -51 -23 5 17 14 0 -26 -53 -68 -51 -16 12 15 7 5 13 15 7 -2 -10 -26 -49 -66 -67 -68 -83 -109 -121 -110 -95 -96 -106 -114 -112 -104 -97 -87 -74 -64 -58 -50 -38 -31 -26 -18 -4 6 5 -5 -9 -5 4 12 20 34 50 62 65 60 53 48 47 42 34 27 28 37 42 37 27 17 8 -4 -14 -16 -11 -9 -12 -14 -8 -2 -6 -16 -21 -16 -10 -13 -20 -19 -8 2 3 -2 -3 5 16 22 22 19 20 25 30 29 23 16 13 15 18 17 14 12 12 10 6 1 0 1 1 -1 -1 0 1 -1 -3 -3 0 1 0 -1 -2 -1 0 0 -1 -1 0 0 0 0 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +138 97 61 45 47 50 44 31 23 21 17 -2 -33 -59 -72 -80 -91 -106 -117 -127 -141 -154 -159 -151 -137 -110 -72 -42 -44 -68 -79 -59 -31 -16 9 58 110 118 87 74 96 116 103 88 120 184 221 216 201 197 181 136 93 88 107 98 51 7 -3 6 -1 -25 -46 -47 -38 -34 -41 -55 -65 -63 -55 -50 -60 -72 -68 -41 -8 5 -3 -13 -9 11 30 28 -2 -42 -66 -58 -26 6 20 17 1 -30 -64 -82 -62 -19 16 20 9 7 17 20 10 -2 -13 -34 -66 -90 -92 -94 -117 -155 -174 -159 -139 -142 -159 -172 -171 -161 -152 -137 -118 -104 -95 -82 -64 -52 -45 -31 -7 12 10 -8 -16 -8 9 24 42 70 105 133 141 132 119 111 109 100 82 67 71 94 109 99 74 49 25 -10 -39 -46 -33 -27 -38 -44 -26 -6 -18 -56 -76 -58 -38 -49 -79 -78 -32 13 16 -9 -12 30 89 124 125 116 125 160 198 201 163 116 100 123 149 147 127 115 118 105 65 21 7 15 15 -3 -9 7 17 -6 -42 -41 0 34 17 -26 -39 -11 14 5 -16 -9 23 47 42 24 11 -1 -21 -42 -58 -82 -123 -168 -183 -163 -125 -101 -99 -115 -138 -157 -158 -149 -148 -163 -172 -154 -109 -68 -49 -43 -45 -48 -54 -52 -32 -8 1 -8 -10 13 58 98 116 127 145 170 179 158 125 104 96 83 56 38 46 71 80 61 33 14 0 -22 -49 -59 -46 -25 -21 -36 -56 -63 -56 -45 -43 -56 -83 -117 -158 -193 -204 -189 -157 -130 -111 -83 -41 -5 10 16 35 66 89 95 104 132 170 199 223 251 278 285 268 247 241 247 250 244 231 208 174 133 96 74 68 68 54 12 -51 -109 -134 -140 -157 -196 -229 -229 -197 -158 -138 -140 -151 -153 -135 -103 -80 -77 -83 -78 -64 -67 -88 -100 -78 -36 1 23 42 64 74 64 58 68 79 64 41 43 75 91 66 25 9 14 7 -19 -29 -4 22 4 -47 -80 -78 -72 -94 -125 -128 -106 -91 -103 -121 -119 +0 0 0 0 0 0 0 0 0 0 0 -1 -1 -1 -1 -2 -2 -2 -3 -3 -4 -5 -5 -6 -6 -5 -4 -2 -3 -4 -5 -4 -2 -2 0 4 8 9 7 6 9 11 11 9 14 22 28 28 27 28 26 21 14 14 18 17 9 1 -1 1 -1 -6 -11 -11 -9 -9 -11 -15 -18 -17 -16 -15 -18 -22 -21 -13 -3 1 -2 -5 -4 3 10 10 -1 -17 -27 -24 -11 2 8 7 0 -14 -30 -39 -30 -10 7 9 4 3 8 10 5 -2 -8 -20 -38 -52 -54 -56 -70 -94 -107 -99 -88 -91 -103 -112 -113 -108 -103 -94 -82 -73 -67 -59 -46 -38 -33 -23 -6 9 7 -7 -13 -7 7 18 33 56 85 108 115 109 99 93 92 85 70 57 61 82 95 87 65 43 22 -10 -36 -42 -31 -25 -36 -41 -25 -6 -17 -53 -72 -56 -37 -47 -76 -76 -31 12 15 -9 -12 29 87 121 123 114 123 158 196 199 162 115 99 122 148 146 126 114 117 105 65 20 6 14 14 -3 -9 6 16 -6 -42 -41 0 33 16 -26 -39 -11 13 4 -16 -9 22 45 40 23 10 -1 -20 -40 -55 -78 -116 -157 -170 -151 -116 -93 -91 -105 -125 -141 -142 -133 -131 -144 -151 -134 -94 -59 -42 -37 -38 -41 -45 -43 -27 -7 0 -7 -8 10 45 75 88 96 108 126 132 115 90 74 68 58 38 26 31 47 53 40 21 9 0 -14 -31 -37 -28 -15 -13 -21 -33 -36 -32 -25 -24 -30 -44 -61 -81 -98 -102 -93 -76 -62 -52 -38 -19 -3 4 6 14 27 35 37 40 50 63 72 79 87 95 95 87 78 75 75 74 70 65 57 46 34 24 18 16 16 12 2 -12 -23 -28 -28 -30 -37 -41 -40 -33 -26 -22 -21 -22 -22 -18 -14 -10 -10 -10 -9 -7 -7 -9 -9 -7 -3 0 1 2 4 4 3 3 3 3 2 1 1 2 2 1 0 0 0 0 -1 -1 -1 0 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +-39 -46 -33 -27 -38 -44 -26 -6 -18 -56 -76 -58 -38 -49 -79 -78 -32 13 16 -9 -12 30 89 124 125 116 125 160 198 201 163 116 100 123 149 147 127 115 118 105 65 21 7 15 15 -3 -9 7 17 -6 -42 -41 0 34 17 -26 -39 -11 14 5 -16 -9 23 47 42 24 11 -1 -21 -42 -58 -82 -123 -168 -183 -163 -125 -101 -99 -115 -138 -157 -158 -149 -148 -163 -172 -154 -109 -68 -49 -43 -45 -48 -54 -52 -32 -8 1 -8 -10 13 58 98 116 127 145 170 179 158 125 104 96 83 56 38 46 71 80 61 33 14 0 -22 -49 -59 -46 -25 -21 -36 -56 -63 -56 -45 -43 -56 -83 -117 -158 -193 -204 -189 -157 -130 -111 -83 -41 -5 10 16 35 66 89 95 104 132 170 199 223 251 278 285 268 247 241 247 250 244 231 208 174 133 96 74 68 68 54 12 -51 -109 -134 -140 -157 -196 -229 -229 -197 -158 -138 -140 -151 -153 -135 -103 -80 -77 -83 -78 -64 -67 -88 -100 -78 -36 1 23 42 64 74 64 58 68 79 64 41 43 75 91 66 25 9 14 7 -19 -29 -4 22 4 -47 -80 -78 -72 -94 -125 -128 -106 -91 -103 -121 -119 -98 -83 -77 -74 -65 -61 -63 -58 -34 -2 11 -6 -36 -51 -43 -24 -5 12 31 45 41 21 -2 -16 -20 -24 -35 -55 -83 -103 -97 -69 -45 -43 -42 -17 25 48 30 5 13 46 60 46 35 59 104 131 126 120 132 154 168 168 173 193 218 231 226 204 176 153 142 145 152 147 125 95 79 77 77 60 24 -6 -13 3 23 29 19 -1 -15 -15 -5 -3 -19 -47 -74 -100 -128 -158 -175 -173 -169 -182 -216 -243 -244 -225 -197 -173 -153 -135 -121 -109 -98 -90 -90 -102 -119 -124 -105 -65 -23 -5 -11 -24 -20 5 37 68 96 115 117 99 76 70 85 100 97 85 81 83 79 54 13 -30 -61 -81 -86 -78 -66 -60 -70 -94 -128 -152 -152 -136 -127 -128 -115 -74 -32 -24 -49 -58 -25 16 18 -9 +0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -1 0 0 -1 -1 0 2 4 4 4 5 7 9 10 9 6 6 8 10 11 10 9 10 9 6 2 0 1 1 -1 -2 0 2 -1 -7 -7 0 5 2 -5 -8 -3 2 1 -4 -2 5 10 9 5 2 -1 -6 -12 -17 -24 -36 -51 -56 -51 -40 -34 -34 -40 -49 -57 -58 -56 -57 -64 -68 -62 -45 -29 -21 -19 -20 -22 -25 -25 -16 -4 0 -4 -6 6 30 51 62 68 79 95 101 90 72 61 57 50 34 23 28 45 51 39 21 9 0 -15 -34 -42 -33 -18 -16 -27 -41 -47 -42 -34 -33 -43 -65 -92 -124 -153 -163 -152 -128 -107 -92 -69 -35 -5 8 13 29 56 77 82 91 116 150 177 200 226 252 259 245 227 222 229 233 228 217 196 164 126 91 70 65 65 52 11 -50 -107 -131 -138 -155 -193 -226 -227 -195 -157 -137 -140 -151 -153 -135 -103 -80 -77 -83 -78 -64 -67 -88 -100 -78 -36 0 22 41 63 73 63 57 67 78 63 40 42 73 89 64 24 8 13 6 -19 -28 -4 21 3 -45 -76 -74 -68 -89 -117 -119 -98 -84 -95 -111 -108 -89 -75 -69 -66 -58 -54 -55 -51 -30 -2 9 -6 -31 -43 -36 -20 -5 9 24 35 32 16 -2 -13 -16 -19 -27 -41 -62 -76 -71 -50 -32 -31 -30 -12 17 32 19 3 8 29 38 28 21 36 62 78 74 69 75 87 93 92 93 103 115 120 115 102 87 74 68 68 70 67 56 41 34 32 32 24 9 -3 -6 1 8 10 6 -1 -6 -6 -2 -1 -6 -15 -23 -30 -37 -44 -48 -46 -44 -46 -53 -58 -56 -50 -43 -37 -32 -27 -24 -21 -18 -16 -15 -17 -19 -19 -16 -9 -4 -1 -2 -3 -3 0 3 6 8 10 9 7 5 5 5 6 5 4 4 4 3 2 0 -2 -3 -3 -3 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 0 0 0 +278 285 268 247 241 247 250 244 231 208 174 133 96 74 68 68 54 12 -51 -109 -134 -140 -157 -196 -229 -229 -197 -158 -138 -140 -151 -153 -135 -103 -80 -77 -83 -78 -64 -67 -88 -100 -78 -36 1 23 42 64 74 64 58 68 79 64 41 43 75 91 66 25 9 14 7 -19 -29 -4 22 4 -47 -80 -78 -72 -94 -125 -128 -106 -91 -103 -121 -119 -98 -83 -77 -74 -65 -61 -63 -58 -34 -2 11 -6 -36 -51 -43 -24 -5 12 31 45 41 21 -2 -16 -20 -24 -35 -55 -83 -103 -97 -69 -45 -43 -42 -17 25 48 30 5 13 46 60 46 35 59 104 131 126 120 132 154 168 168 173 193 218 231 226 204 176 153 142 145 152 147 125 95 79 77 77 60 24 -6 -13 3 23 29 19 -1 -15 -15 -5 -3 -19 -47 -74 -100 -128 -158 -175 -173 -169 -182 -216 -243 -244 -225 -197 -173 -153 -135 -121 -109 -98 -90 -90 -102 -119 -124 -105 -65 -23 -5 -11 -24 -20 5 37 68 96 115 117 99 76 70 85 100 97 85 81 83 79 54 13 -30 -61 -81 -86 -78 -66 -60 -70 -94 -128 -152 -152 -136 -127 -128 -115 -74 -32 -24 -49 -58 -25 16 18 -9 -14 23 63 61 29 16 38 68 78 77 94 140 194 236 255 266 282 303 317 310 281 249 225 204 180 161 164 179 176 140 90 59 43 19 -22 -63 -86 -99 -112 -122 -123 -129 -155 -192 -207 -193 -186 -206 -240 -254 -243 -235 -229 -197 -130 -64 -39 -43 -31 15 70 101 106 103 101 97 94 102 120 134 134 126 120 115 105 89 70 55 52 63 75 73 46 11 -7 5 29 31 -5 -61 -100 -107 -106 -132 -174 -198 -177 -129 -100 -100 -108 -105 -90 -79 -76 -78 -74 -63 -42 -26 -32 -60 -90 -98 -92 -100 -131 -161 -161 -132 -98 -88 -96 -109 -111 -94 -64 -27 4 23 33 40 47 52 49 41 30 27 34 52 77 98 108 111 116 130 143 147 146 160 198 236 241 214 194 202 219 206 168 140 +0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 -2 -3 -4 -4 -5 -7 -9 -10 -9 -8 -7 -8 -9 -10 -9 -7 -6 -6 -7 -7 -6 -7 -9 -11 -9 -5 0 2 5 8 10 9 8 10 12 10 7 7 13 17 12 5 1 3 1 -5 -7 -1 5 1 -13 -22 -22 -21 -28 -38 -40 -34 -30 -34 -41 -41 -35 -30 -29 -28 -25 -24 -25 -24 -14 -1 4 -3 -16 -23 -20 -12 -3 5 15 22 20 10 -2 -9 -11 -14 -20 -31 -48 -60 -57 -41 -27 -27 -26 -11 15 30 19 3 8 30 40 31 24 41 73 92 90 86 96 113 125 126 131 147 168 179 177 161 140 122 114 118 124 121 104 79 66 65 65 51 20 -6 -12 2 20 25 17 -1 -14 -14 -5 -3 -18 -44 -69 -94 -121 -150 -166 -165 -162 -175 -208 -235 -236 -219 -192 -169 -150 -133 -119 -108 -97 -89 -89 -102 -119 -124 -105 -65 -23 -5 -11 -24 -20 4 36 68 96 114 116 98 75 69 84 99 96 84 80 82 78 53 12 -30 -60 -80 -85 -77 -65 -59 -68 -91 -124 -146 -146 -130 -121 -122 -109 -70 -30 -23 -46 -54 -24 14 16 -9 -13 20 56 54 25 14 33 58 67 65 79 118 162 196 211 218 230 245 254 247 222 195 175 157 137 122 123 133 129 102 65 42 30 13 -16 -44 -59 -67 -75 -81 -81 -83 -99 -121 -129 -119 -113 -124 -142 -148 -140 -134 -128 -109 -71 -35 -21 -23 -16 7 34 49 50 48 46 44 42 44 52 57 56 51 48 45 40 33 26 20 18 21 25 24 15 3 -3 1 8 9 -2 -17 -27 -29 -28 -33 -43 -47 -41 -29 -22 -21 -22 -21 -18 -15 -14 -14 -13 -11 -7 -4 -5 -9 -12 -13 -12 -12 -15 -18 -17 -13 -10 -8 -9 -9 -9 -7 -5 -2 0 1 1 1 2 2 1 1 1 0 0 1 1 2 2 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 +-15 -15 -5 -3 -19 -47 -74 -100 -128 -158 -175 -173 -169 -182 -216 -243 -244 -225 -197 -173 -153 -135 -121 -109 -98 -90 -90 -102 -119 -124 -105 -65 -23 -5 -11 -24 -20 5 37 68 96 115 117 99 76 70 85 100 97 85 81 83 79 54 13 -30 -61 -81 -86 -78 -66 -60 -70 -94 -128 -152 -152 -136 -127 -128 -115 -74 -32 -24 -49 -58 -25 16 18 -9 -14 23 63 61 29 16 38 68 78 77 94 140 194 236 255 266 282 303 317 310 281 249 225 204 180 161 164 179 176 140 90 59 43 19 -22 -63 -86 -99 -112 -122 -123 -129 -155 -192 -207 -193 -186 -206 -240 -254 -243 -235 -229 -197 -130 -64 -39 -43 -31 15 70 101 106 103 101 97 94 102 120 134 134 126 120 115 105 89 70 55 52 63 75 73 46 11 -7 5 29 31 -5 -61 -100 -107 -106 -132 -174 -198 -177 -129 -100 -100 -108 -105 -90 -79 -76 -78 -74 -63 -42 -26 -32 -60 -90 -98 -92 -100 -131 -161 -161 -132 -98 -88 -96 -109 -111 -94 -64 -27 4 23 33 40 47 52 49 41 30 27 34 52 77 98 108 111 116 130 143 147 146 160 198 236 241 214 194 202 219 206 168 140 144 157 145 112 85 75 72 68 59 50 29 -2 -29 -33 -23 -13 -15 -34 -68 -113 -149 -158 -141 -122 -115 -113 -105 -90 -79 -83 -98 -107 -102 -84 -68 -65 -64 -48 -14 15 22 16 15 27 39 44 48 52 51 46 53 73 87 78 63 65 72 46 -15 -64 -65 -40 -42 -77 -107 -107 -96 -114 -160 -207 -236 -246 -249 -251 -251 -255 -256 -250 -237 -224 -210 -186 -154 -134 -129 -130 -110 -65 -12 28 51 66 83 107 137 167 191 211 230 247 258 251 234 218 208 208 218 239 259 260 235 209 207 223 223 189 144 121 128 142 141 124 105 87 67 35 -6 -52 -88 -106 -112 -119 -132 -141 -129 -111 -109 -132 -159 -167 -162 -164 -181 -202 -204 -183 -157 -144 -144 -134 -110 -85 -82 -100 -113 -93 -54 -24 -12 1 +0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -2 -3 -3 -4 -5 -5 -5 -5 -4 -4 -4 -4 -4 -4 -4 -5 -6 -7 -6 -4 -2 -1 -1 -2 -2 0 3 6 9 11 12 11 8 8 10 13 13 12 12 12 12 8 2 -6 -12 -16 -17 -16 -14 -13 -16 -22 -31 -37 -38 -35 -34 -35 -32 -21 -10 -8 -15 -19 -8 5 6 -4 -5 8 22 22 11 6 15 27 31 32 40 60 85 105 116 123 133 145 154 153 141 127 116 107 96 87 90 100 99 80 52 34 25 11 -14 -40 -55 -63 -73 -80 -81 -86 -105 -131 -143 -135 -131 -147 -172 -184 -178 -174 -171 -148 -99 -49 -31 -34 -25 11 55 81 85 84 82 80 78 85 101 114 114 108 104 100 92 78 62 49 46 56 68 66 42 10 -7 4 27 29 -5 -58 -95 -102 -102 -127 -168 -191 -172 -126 -98 -98 -106 -103 -89 -78 -75 -77 -74 -63 -42 -26 -32 -60 -90 -98 -92 -100 -131 -161 -161 -132 -98 -88 -96 -109 -111 -94 -64 -27 3 22 32 39 46 51 48 40 29 26 33 50 75 95 104 107 111 124 136 140 138 151 186 221 225 199 180 186 201 188 153 126 129 140 129 99 75 65 62 58 50 42 24 -2 -25 -28 -20 -11 -13 -28 -55 -91 -118 -124 -110 -95 -88 -86 -79 -67 -59 -61 -71 -77 -73 -59 -48 -45 -44 -33 -10 9 14 10 9 16 24 26 29 31 30 26 30 41 48 42 34 34 37 23 -8 -33 -33 -20 -21 -37 -50 -49 -44 -51 -70 -89 -99 -101 -101 -100 -98 -97 -96 -92 -85 -79 -72 -63 -51 -43 -41 -40 -33 -19 -4 7 13 17 21 26 33 39 43 46 49 51 52 49 44 40 37 35 36 38 40 38 33 28 27 28 27 22 16 12 13 13 13 10 8 6 5 2 -1 -4 -6 -6 -6 -6 -7 -7 -6 -5 -4 -5 -5 -5 -4 -4 -4 -4 -3 -3 -2 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +75 73 46 11 -7 5 29 31 -5 -61 -100 -107 -106 -132 -174 -198 -177 -129 -100 -100 -108 -105 -90 -79 -76 -78 -74 -63 -42 -26 -32 -60 -90 -98 -92 -100 -131 -161 -161 -132 -98 -88 -96 -109 -111 -94 -64 -27 4 23 33 40 47 52 49 41 30 27 34 52 77 98 108 111 116 130 143 147 146 160 198 236 241 214 194 202 219 206 168 140 144 157 145 112 85 75 72 68 59 50 29 -2 -29 -33 -23 -13 -15 -34 -68 -113 -149 -158 -141 -122 -115 -113 -105 -90 -79 -83 -98 -107 -102 -84 -68 -65 -64 -48 -14 15 22 16 15 27 39 44 48 52 51 46 53 73 87 78 63 65 72 46 -15 -64 -65 -40 -42 -77 -107 -107 -96 -114 -160 -207 -236 -246 -249 -251 -251 -255 -256 -250 -237 -224 -210 -186 -154 -134 -129 -130 -110 -65 -12 28 51 66 83 107 137 167 191 211 230 247 258 251 234 218 208 208 218 239 259 260 235 209 207 223 223 189 144 121 128 142 141 124 105 87 67 35 -6 -52 -88 -106 -112 -119 -132 -141 -129 -111 -109 -132 -159 -167 -162 -164 -181 -202 -204 -183 -157 -144 -144 -134 -110 -85 -82 -100 -113 -93 -54 -24 -12 1 27 52 56 50 52 57 52 32 16 17 26 27 28 42 67 81 75 65 64 59 42 21 17 23 14 -20 -50 -51 -30 -13 -15 -26 -30 -28 -32 -41 -47 -49 -53 -63 -73 -73 -70 -71 -88 -109 -123 -124 -106 -73 -40 -20 -15 -16 -8 16 40 55 66 83 105 115 114 111 122 138 149 149 144 138 117 81 48 40 61 88 97 89 82 85 86 66 23 -21 -41 -32 -14 -12 -35 -64 -73 -57 -41 -45 -70 -85 -65 -18 20 20 -8 -30 -28 -9 3 0 -10 -13 -7 4 10 0 -23 -45 -53 -43 -27 -5 20 40 42 30 26 47 84 106 103 95 97 103 103 96 92 89 78 58 44 38 24 -9 -42 -49 -28 -8 -8 -19 -18 0 14 11 -3 -12 -11 -2 7 12 9 1 -7 -12 +0 0 0 0 -1 0 0 0 -1 -1 -1 -1 -2 -2 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -4 -4 -3 -3 -2 -2 -4 -6 -7 -7 -8 -11 -14 -15 -13 -10 -10 -11 -13 -14 -12 -9 -4 0 3 4 6 7 8 8 7 5 5 6 10 16 21 23 25 27 31 35 37 38 43 54 66 70 63 59 63 69 67 56 47 50 56 52 41 32 29 28 27 24 20 12 -1 -13 -15 -11 -7 -8 -17 -34 -57 -76 -81 -74 -65 -62 -62 -58 -51 -45 -48 -58 -64 -61 -51 -42 -41 -41 -31 -10 9 14 10 10 18 26 30 33 36 36 33 38 53 64 58 47 49 55 35 -12 -51 -52 -33 -35 -63 -88 -89 -81 -96 -136 -177 -203 -213 -216 -220 -221 -226 -228 -224 -213 -203 -191 -170 -142 -124 -120 -121 -103 -61 -12 26 48 62 79 102 131 160 184 204 223 240 252 245 229 214 204 205 215 236 256 258 233 208 206 222 222 188 143 120 127 142 141 123 104 86 66 34 -6 -52 -88 -106 -112 -119 -131 -140 -128 -110 -108 -130 -156 -164 -158 -160 -176 -196 -197 -176 -151 -138 -137 -127 -104 -80 -77 -94 -105 -86 -50 -22 -11 0 24 46 49 44 45 50 45 27 13 14 22 22 23 35 55 66 61 52 51 47 33 16 13 17 10 -16 -38 -38 -23 -10 -11 -19 -22 -20 -23 -29 -32 -33 -36 -42 -48 -47 -45 -45 -55 -67 -75 -75 -63 -43 -23 -12 -9 -9 -5 8 21 28 33 41 52 56 54 52 56 63 66 65 62 58 48 33 19 15 23 33 36 32 29 29 29 22 7 -7 -13 -10 -5 -4 -10 -18 -20 -15 -11 -12 -17 -21 -15 -4 4 4 -2 -6 -6 -2 0 0 -2 -3 -2 0 1 0 -4 -6 -7 -6 -4 -1 2 3 3 2 2 3 6 7 6 6 5 5 5 4 4 3 3 2 1 1 0 -1 -1 -2 -1 -1 -1 -1 -1 0 0 0 -1 -1 -1 -1 0 0 0 0 -1 0 +-210 -186 -154 -134 -129 -130 -110 -65 -12 28 51 66 83 107 137 167 191 211 230 247 258 251 234 218 208 208 218 239 259 260 235 209 207 223 223 189 144 121 128 142 141 124 105 87 67 35 -6 -52 -88 -106 -112 -119 -132 -141 -129 -111 -109 -132 -159 -167 -162 -164 -181 -202 -204 -183 -157 -144 -144 -134 -110 -85 -82 -100 -113 -93 -54 -24 -12 1 27 52 56 50 52 57 52 32 16 17 26 27 28 42 67 81 75 65 64 59 42 21 17 23 14 -20 -50 -51 -30 -13 -15 -26 -30 -28 -32 -41 -47 -49 -53 -63 -73 -73 -70 -71 -88 -109 -123 -124 -106 -73 -40 -20 -15 -16 -8 16 40 55 66 83 105 115 114 111 122 138 149 149 144 138 117 81 48 40 61 88 97 89 82 85 86 66 23 -21 -41 -32 -14 -12 -35 -64 -73 -57 -41 -45 -70 -85 -65 -18 20 20 -8 -30 -28 -9 3 0 -10 -13 -7 4 10 0 -23 -45 -53 -43 -27 -5 20 40 42 30 26 47 84 106 103 95 97 103 103 96 92 89 78 58 44 38 24 -9 -42 -49 -28 -8 -8 -19 -18 0 14 11 -3 -12 -11 -2 7 12 9 1 -7 -12 -19 -36 -55 -66 -63 -56 -53 -51 -46 -39 -40 -52 -78 -108 -139 -156 -150 -127 -105 -99 -98 -85 -73 -79 -94 -86 -40 9 17 -16 -46 -35 7 37 33 14 6 15 29 35 32 31 34 36 31 23 20 26 28 18 0 -11 -10 -8 -17 -33 -42 -35 -17 2 11 5 -6 -8 6 26 37 35 31 31 30 19 -5 -35 -58 -55 -25 15 31 10 -22 -30 -5 20 16 -10 -26 -14 13 31 27 5 -23 -36 -35 -35 -49 -66 -59 -29 -8 -21 -49 -58 -44 -35 -40 -43 -26 -1 13 19 28 38 38 32 40 67 89 87 74 79 105 124 119 108 115 132 142 140 136 135 121 87 45 18 5 -3 0 23 56 71 51 13 -9 -3 6 2 -12 -18 -2 26 42 33 8 -11 -14 -14 -22 -37 +0 -1 -1 -1 -1 -1 -1 -1 -1 0 0 0 0 1 1 2 3 3 4 5 6 7 7 7 7 8 9 10 12 13 13 12 13 15 15 14 11 10 11 13 13 12 11 9 7 4 -1 -7 -13 -16 -17 -19 -22 -24 -23 -20 -21 -26 -32 -34 -34 -36 -41 -47 -49 -45 -40 -37 -38 -37 -31 -25 -24 -30 -35 -30 -18 -8 -5 0 9 18 20 18 19 22 20 12 6 7 11 11 12 18 30 37 35 31 31 29 21 10 8 12 7 -11 -28 -29 -17 -8 -9 -16 -18 -17 -20 -26 -30 -32 -35 -42 -49 -49 -48 -49 -61 -76 -87 -88 -76 -53 -30 -15 -12 -13 -7 12 30 42 51 65 83 92 92 90 100 114 124 125 121 117 100 69 41 34 53 77 86 79 73 76 77 60 21 -20 -38 -30 -14 -12 -33 -61 -70 -55 -40 -44 -68 -82 -63 -18 19 19 -8 -30 -28 -9 2 0 -10 -13 -7 3 9 0 -23 -45 -53 -43 -27 -5 19 40 42 29 25 46 83 105 102 94 96 102 102 95 91 88 76 57 43 37 23 -9 -41 -48 -28 -8 -8 -19 -18 0 13 10 -3 -12 -11 -2 6 11 8 0 -7 -11 -18 -33 -50 -59 -56 -50 -47 -45 -40 -34 -35 -44 -66 -91 -116 -129 -123 -103 -85 -79 -78 -67 -57 -61 -72 -66 -31 6 12 -12 -34 -26 4 25 22 9 4 10 19 23 20 19 21 22 19 14 12 15 16 10 0 -7 -6 -5 -10 -18 -23 -19 -9 1 5 2 -3 -4 2 11 16 15 13 13 12 7 -3 -14 -23 -21 -10 5 11 3 -8 -11 -2 6 4 -4 -8 -5 3 8 7 1 -6 -9 -9 -9 -12 -15 -13 -7 -2 -5 -10 -11 -8 -7 -7 -7 -5 -1 1 2 3 4 4 3 4 7 9 8 6 7 8 9 9 7 7 8 8 7 7 6 5 3 1 0 0 -1 0 0 1 1 0 0 -1 -1 0 0 -1 -1 -1 0 0 0 0 -1 -1 -1 -1 0 +86 66 23 -21 -41 -32 -14 -12 -35 -64 -73 -57 -41 -45 -70 -85 -65 -18 20 20 -8 -30 -28 -9 3 0 -10 -13 -7 4 10 0 -23 -45 -53 -43 -27 -5 20 40 42 30 26 47 84 106 103 95 97 103 103 96 92 89 78 58 44 38 24 -9 -42 -49 -28 -8 -8 -19 -18 0 14 11 -3 -12 -11 -2 7 12 9 1 -7 -12 -19 -36 -55 -66 -63 -56 -53 -51 -46 -39 -40 -52 -78 -108 -139 -156 -150 -127 -105 -99 -98 -85 -73 -79 -94 -86 -40 9 17 -16 -46 -35 7 37 33 14 6 15 29 35 32 31 34 36 31 23 20 26 28 18 0 -11 -10 -8 -17 -33 -42 -35 -17 2 11 5 -6 -8 6 26 37 35 31 31 30 19 -5 -35 -58 -55 -25 15 31 10 -22 -30 -5 20 16 -10 -26 -14 13 31 27 5 -23 -36 -35 -35 -49 -66 -59 -29 -8 -21 -49 -58 -44 -35 -40 -43 -26 -1 13 19 28 38 38 32 40 67 89 87 74 79 105 124 119 108 115 132 142 140 136 135 121 87 45 18 5 -3 0 23 56 71 51 13 -9 -3 6 2 -12 -18 -2 26 42 33 8 -11 -14 -14 -22 -37 -48 -48 -47 -44 -29 2 26 19 -10 -32 -19 20 43 27 -22 -70 -79 -47 -11 -13 -47 -77 -77 -67 -73 -93 -107 -110 -121 -141 -149 -129 -89 -59 -55 -71 -86 -78 -42 -1 13 -4 -16 7 52 78 73 56 52 62 71 73 72 66 57 49 35 13 -10 -8 24 55 62 55 69 96 106 87 75 97 136 155 144 133 135 142 136 120 111 105 97 79 51 24 1 -16 -29 -38 -38 -25 -14 -23 -53 -81 -91 -96 -110 -123 -116 -85 -60 -57 -69 -77 -81 -85 -92 -99 -109 -118 -112 -79 -33 -7 -11 -25 -24 -3 27 56 82 97 98 97 107 130 143 127 101 82 73 56 27 2 -4 -10 -36 -82 -118 -133 -137 -149 -159 -152 -135 -141 -183 -231 -240 -206 -168 -158 -166 -166 -153 -141 -142 -145 -129 -85 +0 0 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -1 0 0 -1 -1 -1 -1 0 0 -1 -1 -1 0 0 0 -2 -4 -4 -4 -3 -1 1 3 4 3 2 5 9 12 13 12 13 14 15 14 14 14 13 10 8 7 4 -2 -9 -11 -7 -2 -2 -5 -5 0 3 2 -1 -4 -4 -1 2 3 2 0 -3 -5 -7 -13 -21 -25 -24 -22 -21 -21 -19 -17 -18 -23 -35 -49 -64 -73 -71 -62 -52 -50 -50 -44 -38 -42 -51 -47 -23 5 9 -10 -27 -21 4 22 20 8 3 9 18 22 21 20 22 24 21 15 14 18 20 13 0 -9 -8 -7 -13 -26 -33 -28 -14 1 8 4 -5 -7 4 21 30 29 26 26 25 16 -5 -31 -51 -49 -23 13 27 9 -20 -28 -5 18 14 -10 -25 -14 12 29 25 4 -22 -35 -34 -34 -48 -64 -58 -29 -8 -21 -49 -58 -44 -35 -40 -43 -26 -1 12 18 27 37 37 31 39 66 88 87 74 78 104 123 118 107 114 131 141 139 135 133 119 86 44 17 4 -3 0 22 54 68 49 12 -9 -3 5 1 -12 -18 -2 24 39 30 7 -11 -13 -13 -21 -34 -44 -44 -42 -40 -26 1 22 16 -9 -28 -17 16 36 22 -19 -58 -65 -39 -9 -11 -38 -61 -60 -52 -56 -71 -81 -82 -90 -103 -108 -93 -64 -42 -39 -49 -59 -53 -28 -1 8 -3 -11 4 32 47 44 33 30 36 40 41 40 36 30 26 18 6 -6 -5 11 26 29 25 32 43 47 38 32 41 56 63 57 52 52 53 50 43 39 36 33 26 16 7 0 -5 -9 -12 -11 -7 -4 -7 -14 -21 -23 -23 -26 -28 -26 -18 -13 -12 -14 -15 -15 -15 -16 -16 -17 -18 -17 -11 -5 -1 -2 -3 -3 -1 2 5 7 8 8 7 8 9 9 8 6 4 3 2 1 0 -1 -1 -2 -3 -4 -4 -4 -4 -3 -3 -2 -2 -3 -3 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +-22 -30 -5 20 16 -10 -26 -14 13 31 27 5 -23 -36 -35 -35 -49 -66 -59 -29 -8 -21 -49 -58 -44 -35 -40 -43 -26 -1 13 19 28 38 38 32 40 67 89 87 74 79 105 124 119 108 115 132 142 140 136 135 121 87 45 18 5 -3 0 23 56 71 51 13 -9 -3 6 2 -12 -18 -2 26 42 33 8 -11 -14 -14 -22 -37 -48 -48 -47 -44 -29 2 26 19 -10 -32 -19 20 43 27 -22 -70 -79 -47 -11 -13 -47 -77 -77 -67 -73 -93 -107 -110 -121 -141 -149 -129 -89 -59 -55 -71 -86 -78 -42 -1 13 -4 -16 7 52 78 73 56 52 62 71 73 72 66 57 49 35 13 -10 -8 24 55 62 55 69 96 106 87 75 97 136 155 144 133 135 142 136 120 111 105 97 79 51 24 1 -16 -29 -38 -38 -25 -14 -23 -53 -81 -91 -96 -110 -123 -116 -85 -60 -57 -69 -77 -81 -85 -92 -99 -109 -118 -112 -79 -33 -7 -11 -25 -24 -3 27 56 82 97 98 97 107 130 143 127 101 82 73 56 27 2 -4 -10 -36 -82 -118 -133 -137 -149 -159 -152 -135 -141 -183 -231 -240 -206 -168 -158 -166 -166 -153 -141 -142 -145 -129 -85 -34 3 19 34 54 74 82 78 79 95 110 107 89 78 87 105 114 108 96 78 62 54 64 71 47 -5 -49 -60 -52 -57 -80 -99 -101 -105 -127 -151 -152 -129 -109 -107 -118 -123 -122 -119 -114 -101 -76 -40 10 67 116 133 131 133 156 186 199 204 223 252 262 247 235 250 274 272 247 229 233 234 205 161 140 136 119 71 21 -1 -3 -9 -35 -71 -108 -141 -176 -210 -250 -284 -295 -270 -220 -176 -163 -175 -190 -193 -181 -155 -120 -91 -77 -74 -68 -50 -19 23 65 90 88 76 81 110 142 151 139 125 116 111 107 113 125 128 118 97 71 48 34 33 42 47 37 12 -16 -39 -54 -63 -76 -99 -122 -130 -120 -114 -126 -141 -127 -89 -59 -57 -63 -48 -13 20 39 56 73 80 81 92 122 146 +0 -1 -1 0 0 -1 -1 -1 0 0 0 0 -1 -1 -1 -1 -1 -2 -2 -1 -1 -1 -2 -2 -2 -2 -2 -2 -2 -1 0 1 1 2 2 2 3 5 7 8 7 8 11 13 13 13 14 17 19 20 20 20 19 14 7 3 0 -1 0 4 11 15 11 2 -3 -1 1 0 -4 -5 -1 7 12 9 2 -4 -5 -5 -8 -13 -17 -18 -18 -17 -12 0 10 7 -5 -14 -9 8 18 12 -11 -33 -38 -23 -6 -7 -24 -40 -41 -36 -40 -51 -59 -62 -69 -81 -87 -77 -54 -36 -34 -45 -55 -50 -28 -1 8 -3 -11 4 35 54 51 39 37 44 51 53 53 49 43 37 26 10 -8 -7 19 44 50 44 56 79 88 73 63 82 116 133 124 116 118 125 120 107 99 94 87 71 46 22 0 -15 -28 -36 -36 -24 -14 -22 -51 -78 -88 -93 -107 -120 -113 -83 -59 -56 -68 -76 -80 -84 -91 -99 -109 -118 -112 -79 -33 -7 -11 -25 -24 -3 26 56 82 96 97 96 106 129 142 126 100 81 72 55 26 1 -4 -10 -36 -81 -116 -130 -134 -145 -155 -147 -131 -136 -176 -221 -229 -196 -159 -149 -156 -155 -143 -131 -131 -133 -118 -78 -31 2 16 30 47 64 71 67 68 81 93 90 74 65 72 86 92 87 77 62 49 42 49 54 35 -4 -37 -45 -39 -42 -58 -71 -72 -74 -89 -104 -104 -87 -73 -71 -77 -80 -78 -75 -71 -62 -46 -24 5 38 66 75 73 73 84 99 104 105 114 126 129 120 112 118 127 124 110 101 100 99 85 66 56 53 46 26 7 -1 -2 -4 -12 -24 -36 -46 -55 -65 -75 -83 -84 -75 -60 -47 -42 -44 -47 -46 -42 -35 -26 -20 -16 -15 -13 -10 -4 3 10 14 13 11 11 15 18 19 17 14 13 11 10 11 11 11 9 7 5 3 2 2 2 2 1 0 -1 -2 -3 -3 -3 -4 -4 -4 -3 -3 -3 -3 -2 -2 -1 -1 -1 -1 -1 0 0 0 0 0 0 0 0 0 +97 79 51 24 1 -16 -29 -38 -38 -25 -14 -23 -53 -81 -91 -96 -110 -123 -116 -85 -60 -57 -69 -77 -81 -85 -92 -99 -109 -118 -112 -79 -33 -7 -11 -25 -24 -3 27 56 82 97 98 97 107 130 143 127 101 82 73 56 27 2 -4 -10 -36 -82 -118 -133 -137 -149 -159 -152 -135 -141 -183 -231 -240 -206 -168 -158 -166 -166 -153 -141 -142 -145 -129 -85 -34 3 19 34 54 74 82 78 79 95 110 107 89 78 87 105 114 108 96 78 62 54 64 71 47 -5 -49 -60 -52 -57 -80 -99 -101 -105 -127 -151 -152 -129 -109 -107 -118 -123 -122 -119 -114 -101 -76 -40 10 67 116 133 131 133 156 186 199 204 223 252 262 247 235 250 274 272 247 229 233 234 205 161 140 136 119 71 21 -1 -3 -9 -35 -71 -108 -141 -176 -210 -250 -284 -295 -270 -220 -176 -163 -175 -190 -193 -181 -155 -120 -91 -77 -74 -68 -50 -19 23 65 90 88 76 81 110 142 151 139 125 116 111 107 113 125 128 118 97 71 48 34 33 42 47 37 12 -16 -39 -54 -63 -76 -99 -122 -130 -120 -114 -126 -141 -127 -89 -59 -57 -63 -48 -13 20 39 56 73 80 81 92 122 146 137 102 77 81 100 110 101 89 82 87 91 74 34 -2 -7 14 28 16 -6 -12 -2 7 6 -10 -32 -60 -78 -74 -59 -54 -68 -78 -60 -23 6 4 -25 -63 -83 -72 -39 -12 -8 -15 -14 -1 13 28 53 80 83 51 20 26 61 84 63 15 -23 -30 -17 -7 -15 -36 -55 -55 -37 -4 33 54 33 -27 -80 -88 -52 -20 -27 -56 -69 -56 -41 -46 -70 -100 -123 -130 -121 -107 -110 -133 -161 -183 -191 -190 -184 -183 -200 -228 -225 -172 -96 -54 -63 -86 -78 -33 19 56 78 99 123 147 155 136 100 72 73 93 108 109 101 89 71 54 53 70 96 115 127 134 133 124 110 99 78 38 -8 -32 -24 4 28 46 60 70 64 39 -1 -43 -74 -71 -34 12 21 -22 -77 -92 -60 -22 -23 -56 +0 0 0 0 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -2 -3 -3 -2 -2 -2 -3 -3 -3 -4 -4 -5 -6 -7 -7 -5 -3 -1 -1 -2 -2 -1 2 5 8 9 10 10 12 15 18 16 13 11 10 8 4 0 -1 -2 -7 -16 -24 -27 -29 -33 -36 -35 -32 -35 -46 -60 -63 -56 -47 -45 -49 -50 -47 -45 -46 -48 -44 -30 -12 1 6 12 20 28 32 31 32 39 46 46 39 35 39 48 53 51 46 38 31 27 33 37 25 -3 -28 -34 -30 -33 -47 -59 -61 -64 -78 -94 -96 -83 -71 -70 -78 -82 -83 -81 -79 -71 -54 -29 7 48 84 98 97 99 118 142 153 158 174 199 208 198 190 203 225 225 205 192 196 199 175 138 121 118 104 62 18 -1 -3 -9 -32 -65 -99 -130 -163 -195 -234 -266 -278 -255 -209 -168 -156 -168 -183 -186 -175 -151 -117 -89 -76 -73 -67 -50 -19 22 64 89 87 75 80 109 141 150 138 124 115 110 106 113 125 127 117 96 70 47 33 32 41 46 36 11 -16 -39 -54 -63 -75 -98 -120 -127 -117 -111 -123 -137 -123 -86 -57 -55 -60 -46 -13 18 36 52 67 73 74 84 111 132 123 91 68 71 88 96 88 77 70 74 77 62 28 -2 -6 11 22 12 -5 -10 -2 5 4 -8 -25 -46 -59 -56 -44 -40 -50 -56 -43 -17 4 2 -18 -43 -56 -48 -26 -8 -6 -10 -9 -1 7 16 31 46 47 28 11 14 33 44 33 7 -12 -16 -9 -4 -8 -18 -26 -26 -17 -2 14 22 13 -12 -33 -35 -21 -8 -11 -21 -25 -20 -15 -16 -23 -32 -39 -40 -37 -32 -32 -37 -44 -49 -49 -48 -45 -44 -46 -51 -49 -36 -20 -11 -12 -16 -14 -6 3 8 12 14 17 20 20 17 12 8 8 9 11 10 9 7 5 4 4 5 6 7 7 7 7 6 5 4 3 1 -1 -1 -1 0 0 0 1 1 0 0 -1 -1 -1 -1 -1 0 0 -1 -1 -1 -1 -1 -1 0 +-35 -71 -108 -141 -176 -210 -250 -284 -295 -270 -220 -176 -163 -175 -190 -193 -181 -155 -120 -91 -77 -74 -68 -50 -19 23 65 90 88 76 81 110 142 151 139 125 116 111 107 113 125 128 118 97 71 48 34 33 42 47 37 12 -16 -39 -54 -63 -76 -99 -122 -130 -120 -114 -126 -141 -127 -89 -59 -57 -63 -48 -13 20 39 56 73 80 81 92 122 146 137 102 77 81 100 110 101 89 82 87 91 74 34 -2 -7 14 28 16 -6 -12 -2 7 6 -10 -32 -60 -78 -74 -59 -54 -68 -78 -60 -23 6 4 -25 -63 -83 -72 -39 -12 -8 -15 -14 -1 13 28 53 80 83 51 20 26 61 84 63 15 -23 -30 -17 -7 -15 -36 -55 -55 -37 -4 33 54 33 -27 -80 -88 -52 -20 -27 -56 -69 -56 -41 -46 -70 -100 -123 -130 -121 -107 -110 -133 -161 -183 -191 -190 -184 -183 -200 -228 -225 -172 -96 -54 -63 -86 -78 -33 19 56 78 99 123 147 155 136 100 72 73 93 108 109 101 89 71 54 53 70 96 115 127 134 133 124 110 99 78 38 -8 -32 -24 4 28 46 60 70 64 39 -1 -43 -74 -71 -34 12 21 -22 -77 -92 -60 -22 -23 -56 -84 -83 -63 -47 -43 -32 -1 37 60 58 45 42 51 65 65 57 50 52 55 55 50 42 40 47 65 78 67 30 -3 -2 27 45 33 14 15 28 33 36 52 70 61 29 17 43 66 47 2 -15 8 28 14 -2 9 33 26 -12 -33 -11 29 38 5 -43 -69 -54 -20 -2 -13 -31 -18 26 62 58 24 -11 -23 -21 -18 -22 -32 -42 -45 -41 -33 -37 -66 -110 -151 -173 -170 -152 -138 -143 -163 -172 -154 -119 -84 -48 -3 39 58 52 47 70 109 130 113 82 70 95 134 155 146 128 121 129 142 145 134 109 83 65 64 72 67 42 11 -11 -18 -20 -29 -48 -75 -111 -143 -155 -145 -126 -121 -131 -146 -151 -135 -108 -86 -82 -94 -99 -77 -41 -20 -26 -47 -55 -38 -6 25 37 37 38 +0 -1 -1 -1 -1 -1 -1 -1 -2 -2 -2 -2 -2 -2 -3 -3 -4 -3 -3 -3 -2 -3 -3 -2 -1 0 2 4 4 4 4 6 9 10 9 9 9 9 9 10 12 13 12 10 8 5 4 4 5 6 5 1 -3 -7 -10 -12 -15 -19 -25 -27 -26 -25 -28 -33 -30 -22 -15 -15 -17 -13 -4 5 11 16 22 24 25 30 40 49 47 36 28 30 37 42 39 35 33 36 38 32 14 -1 -4 6 13 7 -3 -6 -2 3 3 -6 -18 -33 -43 -42 -34 -32 -40 -47 -36 -14 3 2 -16 -41 -54 -47 -26 -8 -6 -11 -10 -1 9 19 37 57 60 37 14 19 46 64 48 11 -19 -24 -14 -6 -13 -30 -46 -46 -31 -4 27 45 28 -24 -70 -77 -46 -18 -24 -50 -62 -51 -38 -42 -65 -93 -114 -121 -113 -101 -104 -126 -153 -174 -183 -182 -177 -177 -194 -221 -219 -168 -94 -53 -62 -85 -77 -33 18 55 77 98 122 146 154 135 99 71 72 92 107 109 101 88 70 53 52 69 95 114 126 133 132 123 108 97 76 37 -8 -32 -24 3 27 44 58 67 61 37 -1 -42 -71 -68 -33 11 19 -21 -72 -86 -56 -21 -21 -51 -76 -75 -57 -42 -38 -29 -1 32 51 49 38 35 42 54 53 46 40 42 44 43 39 32 31 36 49 59 50 22 -3 -2 19 32 23 9 10 19 22 24 34 46 39 18 10 27 40 28 1 -9 4 16 8 -2 5 18 14 -7 -18 -6 14 19 2 -21 -34 -26 -10 -1 -6 -14 -8 11 25 23 9 -5 -9 -8 -7 -9 -12 -15 -16 -14 -11 -12 -21 -34 -45 -51 -49 -43 -38 -38 -42 -43 -38 -29 -20 -11 -1 8 11 10 8 12 19 22 18 13 10 14 19 21 19 16 14 15 15 15 13 10 7 5 5 5 5 3 0 -1 -2 -2 -2 -3 -4 -5 -6 -6 -5 -4 -4 -4 -4 -4 -3 -2 -2 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 0 0 0 +-41 -46 -70 -100 -123 -130 -121 -107 -110 -133 -161 -183 -191 -190 -184 -183 -200 -228 -225 -172 -96 -54 -63 -86 -78 -33 19 56 78 99 123 147 155 136 100 72 73 93 108 109 101 89 71 54 53 70 96 115 127 134 133 124 110 99 78 38 -8 -32 -24 4 28 46 60 70 64 39 -1 -43 -74 -71 -34 12 21 -22 -77 -92 -60 -22 -23 -56 -84 -83 -63 -47 -43 -32 -1 37 60 58 45 42 51 65 65 57 50 52 55 55 50 42 40 47 65 78 67 30 -3 -2 27 45 33 14 15 28 33 36 52 70 61 29 17 43 66 47 2 -15 8 28 14 -2 9 33 26 -12 -33 -11 29 38 5 -43 -69 -54 -20 -2 -13 -31 -18 26 62 58 24 -11 -23 -21 -18 -22 -32 -42 -45 -41 -33 -37 -66 -110 -151 -173 -170 -152 -138 -143 -163 -172 -154 -119 -84 -48 -3 39 58 52 47 70 109 130 113 82 70 95 134 155 146 128 121 129 142 145 134 109 83 65 64 72 67 42 11 -11 -18 -20 -29 -48 -75 -111 -143 -155 -145 -126 -121 -131 -146 -151 -135 -108 -86 -82 -94 -99 -77 -41 -20 -26 -47 -55 -38 -6 25 37 37 38 45 47 38 30 42 69 81 69 64 90 128 132 100 73 79 97 92 65 39 23 8 -1 5 9 -14 -66 -103 -96 -67 -62 -81 -91 -79 -69 -89 -129 -146 -119 -60 -13 -12 -46 -66 -38 15 39 16 -10 13 71 109 100 71 56 53 45 32 27 33 29 3 -32 -59 -64 -48 -17 8 10 -15 -46 -63 -66 -66 -68 -68 -64 -59 -60 -64 -70 -74 -65 -40 -11 5 -4 -26 -42 -39 -14 19 44 49 41 40 51 64 67 62 60 65 67 61 63 81 107 122 120 107 95 83 69 53 41 39 49 59 62 54 36 15 -5 -19 -28 -39 -58 -86 -119 -153 -182 -201 -198 -175 -151 -149 -177 -217 -240 -223 -163 -89 -43 -40 -47 -25 36 92 106 84 64 74 116 165 198 207 195 190 209 247 272 +0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -2 -3 -3 -3 -4 -5 -5 -4 -3 -2 -2 -3 -3 -2 0 2 3 5 6 8 9 9 7 5 5 7 9 10 9 9 7 6 6 8 12 15 17 19 19 19 17 16 13 6 -2 -7 -5 0 5 9 13 16 15 9 -1 -11 -20 -20 -10 3 6 -7 -24 -29 -20 -8 -8 -20 -30 -30 -23 -18 -17 -13 -1 14 24 24 19 18 22 29 29 26 23 24 26 27 25 21 20 24 34 42 36 16 -2 -2 15 26 19 8 9 17 20 22 33 45 40 19 11 29 45 32 1 -11 5 20 10 -2 6 24 19 -10 -26 -9 22 30 3 -35 -56 -45 -17 -2 -11 -27 -16 22 53 49 20 -10 -21 -19 -16 -20 -29 -38 -41 -38 -31 -35 -62 -103 -141 -162 -160 -144 -131 -136 -156 -165 -148 -115 -82 -47 -3 38 56 50 46 68 107 128 111 81 69 94 133 154 145 127 120 128 141 144 133 109 83 64 63 71 66 41 10 -11 -18 -20 -29 -48 -75 -110 -142 -153 -143 -124 -119 -128 -143 -147 -131 -105 -83 -79 -90 -95 -74 -39 -19 -25 -45 -52 -36 -6 23 33 33 34 40 42 33 26 37 60 70 59 55 77 108 111 83 60 65 79 75 52 31 18 6 -1 3 6 -11 -51 -78 -72 -50 -46 -59 -66 -57 -49 -62 -89 -100 -81 -40 -9 -8 -30 -42 -24 9 23 9 -6 7 41 62 56 39 30 28 24 16 14 16 14 1 -16 -29 -31 -23 -8 3 4 -7 -20 -27 -28 -27 -27 -27 -25 -22 -22 -23 -25 -26 -22 -14 -4 1 -2 -8 -13 -12 -4 5 11 12 10 9 12 14 14 13 12 13 13 11 11 14 18 20 19 16 14 11 9 7 5 4 5 6 6 5 3 1 -1 -2 -3 -3 -5 -6 -8 -10 -11 -11 -10 -9 -7 -6 -7 -8 -8 -7 -5 -3 -1 -1 -1 -1 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 +-45 -41 -33 -37 -66 -110 -151 -173 -170 -152 -138 -143 -163 -172 -154 -119 -84 -48 -3 39 58 52 47 70 109 130 113 82 70 95 134 155 146 128 121 129 142 145 134 109 83 65 64 72 67 42 11 -11 -18 -20 -29 -48 -75 -111 -143 -155 -145 -126 -121 -131 -146 -151 -135 -108 -86 -82 -94 -99 -77 -41 -20 -26 -47 -55 -38 -6 25 37 37 38 45 47 38 30 42 69 81 69 64 90 128 132 100 73 79 97 92 65 39 23 8 -1 5 9 -14 -66 -103 -96 -67 -62 -81 -91 -79 -69 -89 -129 -146 -119 -60 -13 -12 -46 -66 -38 15 39 16 -10 13 71 109 100 71 56 53 45 32 27 33 29 3 -32 -59 -64 -48 -17 8 10 -15 -46 -63 -66 -66 -68 -68 -64 -59 -60 -64 -70 -74 -65 -40 -11 5 -4 -26 -42 -39 -14 19 44 49 41 40 51 64 67 62 60 65 67 61 63 81 107 122 120 107 95 83 69 53 41 39 49 59 62 54 36 15 -5 -19 -28 -39 -58 -86 -119 -153 -182 -201 -198 -175 -151 -149 -177 -217 -240 -223 -163 -89 -43 -40 -47 -25 36 92 106 84 64 74 116 165 198 207 195 190 209 247 272 267 238 207 191 193 210 228 219 177 125 93 83 70 43 12 -10 -33 -63 -90 -92 -76 -59 -64 -78 -86 -80 -70 -73 -93 -110 -104 -71 -48 -61 -97 -115 -97 -58 -25 -2 10 4 -29 -68 -74 -27 37 69 48 12 6 37 67 59 31 10 12 13 -13 -60 -102 -123 -130 -136 -149 -169 -186 -191 -180 -159 -145 -132 -112 -81 -56 -49 -56 -54 -36 -7 20 42 51 47 38 36 47 58 64 68 70 67 61 64 78 83 60 25 14 41 79 96 87 76 74 82 87 93 103 115 119 115 108 110 118 122 108 77 46 29 18 -2 -35 -61 -67 -70 -99 -144 -174 -163 -133 -119 -129 -143 -144 -136 -137 -148 -150 -137 -121 -115 -117 -112 -90 -57 -32 -27 -37 -43 -31 -7 12 20 32 61 95 101 67 28 +0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -2 -2 -2 -2 -1 -1 0 1 1 1 2 3 5 4 3 3 5 7 9 9 8 8 9 11 12 11 10 8 6 6 8 7 5 1 -2 -3 -3 -5 -8 -13 -19 -25 -28 -27 -24 -24 -27 -31 -33 -30 -25 -21 -20 -24 -26 -21 -12 -6 -8 -14 -17 -12 -2 7 12 12 12 15 16 13 11 15 26 31 27 26 37 54 57 44 32 36 45 43 31 19 11 4 -1 2 4 -8 -36 -57 -54 -38 -36 -48 -54 -48 -42 -55 -81 -92 -76 -39 -9 -8 -31 -45 -26 10 27 11 -8 9 51 79 73 52 42 40 34 24 21 25 22 2 -26 -48 -53 -40 -15 6 8 -13 -40 -54 -57 -58 -60 -60 -57 -53 -54 -58 -64 -68 -60 -37 -11 4 -4 -25 -40 -37 -14 17 41 46 39 38 49 61 64 60 58 63 65 59 61 79 105 120 118 106 94 82 68 52 40 38 48 58 61 53 36 15 -5 -19 -28 -39 -58 -86 -119 -153 -181 -200 -197 -174 -150 -148 -175 -214 -236 -219 -160 -87 -42 -39 -46 -25 34 88 101 79 60 69 109 154 184 192 180 174 191 225 246 240 213 184 169 170 184 198 189 152 107 79 70 58 35 9 -9 -27 -52 -73 -74 -61 -47 -50 -61 -66 -61 -53 -55 -69 -81 -76 -51 -35 -43 -68 -80 -67 -40 -17 -2 6 2 -19 -43 -46 -17 22 41 28 6 3 20 37 32 16 5 6 6 -7 -31 -51 -61 -63 -65 -70 -78 -84 -85 -79 -68 -61 -55 -46 -32 -22 -19 -21 -20 -13 -3 6 14 16 15 11 10 13 16 18 18 18 17 15 15 18 19 13 5 3 8 16 18 16 14 13 14 14 14 15 17 17 15 14 14 14 14 12 8 4 2 1 -1 -3 -5 -6 -6 -7 -10 -11 -10 -8 -6 -6 -7 -6 -5 -5 -5 -5 -4 -3 -3 -3 -2 -2 -1 -1 -1 -1 -1 -1 -1 0 0 0 0 0 0 0 0 +-74 -65 -40 -11 5 -4 -26 -42 -39 -14 19 44 49 41 40 51 64 67 62 60 65 67 61 63 81 107 122 120 107 95 83 69 53 41 39 49 59 62 54 36 15 -5 -19 -28 -39 -58 -86 -119 -153 -182 -201 -198 -175 -151 -149 -177 -217 -240 -223 -163 -89 -43 -40 -47 -25 36 92 106 84 64 74 116 165 198 207 195 190 209 247 272 267 238 207 191 193 210 228 219 177 125 93 83 70 43 12 -10 -33 -63 -90 -92 -76 -59 -64 -78 -86 -80 -70 -73 -93 -110 -104 -71 -48 -61 -97 -115 -97 -58 -25 -2 10 4 -29 -68 -74 -27 37 69 48 12 6 37 67 59 31 10 12 13 -13 -60 -102 -123 -130 -136 -149 -169 -186 -191 -180 -159 -145 -132 -112 -81 -56 -49 -56 -54 -36 -7 20 42 51 47 38 36 47 58 64 68 70 67 61 64 78 83 60 25 14 41 79 96 87 76 74 82 87 93 103 115 119 115 108 110 118 122 108 77 46 29 18 -2 -35 -61 -67 -70 -99 -144 -174 -163 -133 -119 -129 -143 -144 -136 -137 -148 -150 -137 -121 -115 -117 -112 -90 -57 -32 -27 -37 -43 -31 -7 12 20 32 61 95 101 67 28 22 53 91 110 114 122 136 143 133 122 124 139 152 142 110 68 33 13 11 25 45 51 29 -17 -59 -70 -49 -18 -3 -9 -16 -15 -13 -27 -56 -83 -86 -66 -46 -42 -51 -58 -55 -52 -49 -45 -35 -31 -37 -44 -46 -39 -33 -33 -47 -74 -111 -138 -135 -112 -97 -111 -133 -133 -111 -97 -103 -107 -95 -66 -29 10 48 65 58 58 90 138 164 162 164 192 209 180 124 89 93 108 110 108 109 101 78 62 75 105 121 117 107 91 62 37 46 77 76 23 -17 10 61 49 -27 -74 -35 21 7 -61 -92 -61 -32 -50 -76 -62 -33 -42 -83 -95 -59 -25 -36 -71 -86 -73 -62 -68 -67 -43 -8 7 1 -5 8 36 55 43 15 9 37 68 62 32 21 49 75 56 5 -31 -35 -39 -68 -88 +0 -1 -1 -1 0 -1 -1 -1 -1 -1 0 0 0 0 0 0 1 1 1 1 1 1 1 2 2 4 5 5 5 5 4 4 3 2 2 3 4 5 4 3 1 -1 -3 -4 -5 -8 -11 -16 -22 -27 -30 -31 -29 -26 -26 -32 -40 -46 -44 -34 -19 -10 -9 -11 -6 8 22 27 22 17 20 32 47 58 63 60 60 68 82 92 93 84 75 71 73 81 90 88 72 52 39 35 30 19 5 -5 -16 -31 -44 -46 -39 -31 -34 -42 -47 -44 -39 -41 -53 -64 -61 -42 -29 -37 -60 -72 -61 -37 -17 -2 6 2 -20 -47 -51 -19 25 48 34 8 4 27 49 44 23 7 9 10 -11 -48 -82 -99 -106 -111 -123 -140 -156 -161 -153 -136 -125 -114 -98 -71 -50 -44 -50 -49 -33 -7 18 38 46 43 35 33 43 54 60 64 66 63 58 61 74 79 57 24 13 39 77 93 85 74 72 80 86 92 102 114 118 114 107 109 117 121 107 76 45 29 18 -2 -35 -61 -67 -70 -99 -144 -174 -163 -133 -119 -128 -142 -143 -135 -135 -146 -147 -134 -118 -112 -114 -109 -87 -55 -31 -26 -36 -41 -30 -7 11 18 29 56 87 92 61 25 19 47 81 97 100 107 118 124 114 104 105 117 127 118 91 55 26 10 8 19 35 39 22 -14 -46 -54 -37 -14 -3 -7 -12 -11 -10 -19 -39 -58 -59 -45 -31 -28 -34 -38 -35 -33 -31 -28 -22 -19 -22 -26 -27 -23 -19 -19 -26 -40 -59 -72 -70 -57 -49 -55 -64 -63 -52 -45 -47 -48 -42 -29 -13 4 19 25 22 22 33 50 58 56 56 64 68 57 38 27 27 31 31 29 29 26 19 15 18 24 27 25 23 19 12 7 8 14 13 3 -3 1 9 7 -4 -11 -5 2 0 -8 -11 -7 -4 -5 -8 -6 -3 -4 -7 -7 -4 -2 -3 -5 -5 -4 -3 -3 -3 -2 -1 0 0 -1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 -1 -1 -1 -1 0 +20 42 51 47 38 36 47 58 64 68 70 67 61 64 78 83 60 25 14 41 79 96 87 76 74 82 87 93 103 115 119 115 108 110 118 122 108 77 46 29 18 -2 -35 -61 -67 -70 -99 -144 -174 -163 -133 -119 -129 -143 -144 -136 -137 -148 -150 -137 -121 -115 -117 -112 -90 -57 -32 -27 -37 -43 -31 -7 12 20 32 61 95 101 67 28 22 53 91 110 114 122 136 143 133 122 124 139 152 142 110 68 33 13 11 25 45 51 29 -17 -59 -70 -49 -18 -3 -9 -16 -15 -13 -27 -56 -83 -86 -66 -46 -42 -51 -58 -55 -52 -49 -45 -35 -31 -37 -44 -46 -39 -33 -33 -47 -74 -111 -138 -135 -112 -97 -111 -133 -133 -111 -97 -103 -107 -95 -66 -29 10 48 65 58 58 90 138 164 162 164 192 209 180 124 89 93 108 110 108 109 101 78 62 75 105 121 117 107 91 62 37 46 77 76 23 -17 10 61 49 -27 -74 -35 21 7 -61 -92 -61 -32 -50 -76 -62 -33 -42 -83 -95 -59 -25 -36 -71 -86 -73 -62 -68 -67 -43 -8 7 1 -5 8 36 55 43 15 9 37 68 62 32 21 49 75 56 5 -31 -35 -39 -68 -88 -63 0 42 23 -40 -98 -123 -121 -112 -100 -81 -55 -42 -53 -77 -82 -58 -25 -12 -19 -12 20 59 64 35 3 -7 2 12 11 10 12 21 28 24 7 -10 -5 27 60 65 46 32 39 53 54 42 32 31 27 22 26 40 49 31 6 -3 8 15 6 1 17 42 43 19 -1 9 33 43 34 19 11 10 6 3 -4 -20 -41 -54 -52 -43 -41 -50 -57 -56 -54 -58 -72 -82 -86 -81 -77 -77 -73 -59 -46 -44 -53 -55 -46 -35 -38 -43 -44 -48 -63 -75 -54 -5 31 27 10 23 57 65 40 22 53 104 124 105 90 104 124 121 98 77 68 65 61 56 39 10 -22 -35 -24 0 12 4 -20 -49 -74 -88 -94 -95 -94 -95 -100 -107 -108 -97 -78 -62 -55 -45 -25 1 21 35 50 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 2 2 2 2 2 3 3 4 5 6 6 6 6 7 8 9 8 6 4 2 1 -1 -4 -7 -8 -9 -13 -20 -25 -24 -20 -19 -21 -24 -25 -25 -26 -29 -30 -28 -26 -25 -26 -26 -22 -14 -8 -7 -10 -12 -9 -2 3 5 9 19 30 33 22 9 7 18 33 40 43 47 53 57 54 50 52 60 67 63 50 31 15 6 5 12 22 26 15 -9 -32 -39 -28 -11 -2 -6 -10 -9 -8 -17 -35 -52 -55 -42 -30 -28 -34 -39 -38 -36 -34 -32 -25 -22 -27 -32 -34 -29 -25 -25 -36 -57 -86 -108 -106 -89 -78 -90 -108 -109 -92 -81 -86 -90 -81 -57 -25 8 41 56 50 51 79 123 147 146 148 174 191 165 114 82 86 101 103 101 103 96 74 59 72 101 116 113 103 88 60 36 45 75 74 22 -17 9 60 48 -27 -74 -35 20 6 -61 -92 -61 -32 -50 -76 -62 -33 -42 -83 -95 -59 -25 -36 -71 -86 -73 -62 -68 -67 -43 -8 6 0 -5 7 34 53 41 14 8 35 64 58 30 19 46 70 52 4 -29 -33 -36 -62 -80 -57 0 37 20 -36 -87 -108 -105 -97 -86 -69 -47 -36 -45 -64 -68 -48 -21 -10 -16 -10 15 45 49 26 2 -6 1 8 8 7 8 14 19 16 4 -7 -4 17 39 42 29 20 24 32 33 25 19 18 15 12 14 22 27 16 3 -2 4 7 3 0 8 20 20 8 -1 4 14 18 14 7 4 4 2 1 -2 -8 -15 -20 -19 -15 -14 -17 -19 -18 -17 -18 -21 -24 -24 -22 -21 -20 -19 -15 -11 -11 -12 -12 -10 -8 -8 -9 -9 -9 -11 -13 -9 -1 4 3 1 3 7 7 4 2 5 10 12 9 7 8 9 9 7 5 4 3 3 2 1 0 -1 -2 -1 0 0 0 -1 -2 -2 -2 -2 -2 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 0 0 0 +164 192 209 180 124 89 93 108 110 108 109 101 78 62 75 105 121 117 107 91 62 37 46 77 76 23 -17 10 61 49 -27 -74 -35 21 7 -61 -92 -61 -32 -50 -76 -62 -33 -42 -83 -95 -59 -25 -36 -71 -86 -73 -62 -68 -67 -43 -8 7 1 -5 8 36 55 43 15 9 37 68 62 32 21 49 75 56 5 -31 -35 -39 -68 -88 -63 0 42 23 -40 -98 -123 -121 -112 -100 -81 -55 -42 -53 -77 -82 -58 -25 -12 -19 -12 20 59 64 35 3 -7 2 12 11 10 12 21 28 24 7 -10 -5 27 60 65 46 32 39 53 54 42 32 31 27 22 26 40 49 31 6 -3 8 15 6 1 17 42 43 19 -1 9 33 43 34 19 11 10 6 3 -4 -20 -41 -54 -52 -43 -41 -50 -57 -56 -54 -58 -72 -82 -86 -81 -77 -77 -73 -59 -46 -44 -53 -55 -46 -35 -38 -43 -44 -48 -63 -75 -54 -5 31 27 10 23 57 65 40 22 53 104 124 105 90 104 124 121 98 77 68 65 61 56 39 10 -22 -35 -24 0 12 4 -20 -49 -74 -88 -94 -95 -94 -95 -100 -107 -108 -97 -78 -62 -55 -45 -25 1 21 35 50 68 87 105 124 142 140 112 74 49 51 65 70 60 44 38 41 40 29 20 25 30 11 -28 -47 -21 21 21 -25 -63 -48 -7 -4 -53 -99 -95 -61 -46 -59 -70 -53 -28 -20 -25 -15 11 28 22 8 21 64 113 143 144 129 115 110 107 98 84 76 78 81 72 53 44 47 46 29 5 -5 -1 -4 -20 -40 -41 -27 -17 -18 -22 -23 -21 -12 5 20 18 0 -15 -12 -2 -3 -11 -12 -2 -3 -26 -68 -111 -143 -157 -153 -141 -141 -160 -188 -211 -216 -204 -176 -144 -130 -140 -156 -155 -143 -138 -143 -139 -112 -81 -60 -53 -43 -30 -21 -17 -16 -24 -37 -34 5 62 96 98 102 125 151 151 140 154 191 208 194 174 177 193 188 166 162 186 206 204 188 177 161 119 71 56 86 119 110 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 2 2 2 2 1 1 1 2 2 0 -1 0 3 2 -2 -5 -3 1 0 -5 -8 -6 -3 -5 -8 -7 -4 -5 -10 -12 -8 -4 -5 -11 -13 -12 -10 -12 -12 -8 -2 1 0 -2 1 7 12 9 3 2 9 17 16 8 5 13 21 16 1 -10 -12 -13 -23 -31 -22 0 15 8 -16 -38 -49 -49 -46 -42 -35 -24 -19 -24 -36 -39 -28 -13 -6 -10 -7 10 30 33 18 1 -4 1 6 6 5 7 12 16 14 4 -7 -4 17 39 42 30 21 26 36 37 29 22 22 19 16 19 29 36 23 4 -3 6 11 4 0 13 34 35 15 -1 7 27 36 28 16 9 8 5 2 -4 -18 -37 -49 -47 -39 -38 -46 -53 -52 -51 -55 -68 -78 -82 -77 -74 -74 -70 -57 -45 -43 -52 -54 -45 -35 -38 -43 -44 -48 -63 -75 -54 -5 30 26 9 22 56 64 39 21 52 103 124 105 89 103 123 120 97 76 67 64 60 55 38 9 -22 -35 -24 0 11 3 -20 -48 -72 -86 -91 -92 -91 -91 -96 -102 -103 -92 -74 -59 -52 -42 -24 0 19 31 45 61 78 93 110 125 122 97 64 42 43 55 59 50 36 31 33 32 23 16 19 23 8 -22 -37 -17 15 15 -19 -47 -36 -6 -3 -38 -70 -67 -42 -32 -40 -47 -35 -19 -13 -16 -10 6 17 13 4 12 37 64 81 80 71 62 58 56 50 42 38 38 39 34 25 20 21 20 12 2 -3 -1 -2 -9 -16 -16 -11 -7 -7 -8 -9 -8 -5 1 6 5 0 -5 -4 -1 -1 -3 -4 -1 -1 -7 -17 -26 -32 -34 -33 -29 -28 -31 -35 -38 -38 -34 -29 -23 -20 -21 -22 -21 -19 -17 -17 -16 -13 -9 -6 -5 -4 -3 -2 -2 -2 -2 -3 -3 0 3 4 4 4 4 5 5 4 4 4 4 4 3 2 2 2 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +-43 -41 -50 -57 -56 -54 -58 -72 -82 -86 -81 -77 -77 -73 -59 -46 -44 -53 -55 -46 -35 -38 -43 -44 -48 -63 -75 -54 -5 31 27 10 23 57 65 40 22 53 104 124 105 90 104 124 121 98 77 68 65 61 56 39 10 -22 -35 -24 0 12 4 -20 -49 -74 -88 -94 -95 -94 -95 -100 -107 -108 -97 -78 -62 -55 -45 -25 1 21 35 50 68 87 105 124 142 140 112 74 49 51 65 70 60 44 38 41 40 29 20 25 30 11 -28 -47 -21 21 21 -25 -63 -48 -7 -4 -53 -99 -95 -61 -46 -59 -70 -53 -28 -20 -25 -15 11 28 22 8 21 64 113 143 144 129 115 110 107 98 84 76 78 81 72 53 44 47 46 29 5 -5 -1 -4 -20 -40 -41 -27 -17 -18 -22 -23 -21 -12 5 20 18 0 -15 -12 -2 -3 -11 -12 -2 -3 -26 -68 -111 -143 -157 -153 -141 -141 -160 -188 -211 -216 -204 -176 -144 -130 -140 -156 -155 -143 -138 -143 -139 -112 -81 -60 -53 -43 -30 -21 -17 -16 -24 -37 -34 5 62 96 98 102 125 151 151 140 154 191 208 194 174 177 193 188 166 162 186 206 204 188 177 161 119 71 56 86 119 110 60 16 7 17 17 1 -19 -33 -42 -49 -51 -47 -33 -14 2 3 -12 -33 -48 -60 -84 -110 -120 -113 -116 -142 -171 -170 -147 -140 -156 -161 -130 -87 -81 -113 -142 -137 -107 -76 -59 -57 -63 -68 -50 -5 42 61 49 33 40 67 96 115 130 149 161 159 141 120 109 103 84 62 57 69 70 39 -9 -35 -26 0 14 12 2 -14 -45 -86 -121 -140 -139 -123 -107 -109 -136 -167 -174 -163 -152 -153 -146 -117 -81 -55 -39 -19 3 12 13 21 45 68 85 101 125 147 159 161 146 111 67 48 71 104 106 72 55 75 116 128 108 80 65 56 46 35 24 13 2 -5 -7 -11 -21 -26 -22 -15 -17 -25 -20 4 37 62 64 55 48 52 60 55 29 5 2 16 27 22 18 26 40 33 4 -26 +0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -1 -2 -2 -2 -2 -3 -4 -3 -1 1 1 0 1 3 4 3 1 4 9 11 10 9 11 13 14 11 9 9 8 8 8 6 1 -4 -7 -5 0 2 0 -5 -11 -16 -20 -22 -23 -23 -24 -26 -29 -30 -27 -23 -19 -17 -14 -8 0 6 11 17 23 31 38 46 53 54 44 29 20 21 27 30 26 19 17 19 18 13 9 12 15 5 -15 -25 -12 11 11 -14 -36 -28 -5 -3 -32 -60 -59 -38 -29 -38 -46 -35 -19 -14 -17 -11 7 19 15 5 15 46 82 105 107 96 87 84 82 76 65 60 62 65 58 43 36 38 38 24 4 -5 -1 -4 -18 -35 -36 -24 -16 -17 -20 -21 -20 -11 4 18 16 0 -14 -12 -2 -3 -11 -12 -2 -3 -25 -66 -108 -139 -153 -150 -138 -139 -157 -185 -208 -214 -202 -175 -143 -130 -140 -156 -155 -143 -138 -143 -139 -112 -81 -60 -53 -43 -30 -21 -17 -16 -24 -37 -34 4 61 95 97 100 123 148 148 137 150 186 202 188 168 171 185 180 158 154 176 195 192 176 165 150 110 65 51 78 108 99 54 14 6 15 15 0 -17 -29 -37 -42 -44 -40 -28 -12 1 2 -10 -27 -39 -48 -67 -87 -94 -88 -89 -108 -129 -127 -109 -103 -113 -116 -93 -62 -57 -78 -97 -93 -72 -51 -39 -37 -41 -43 -32 -4 25 36 28 19 22 37 53 63 70 79 84 82 72 60 54 50 40 29 26 31 31 17 -4 -15 -11 0 5 4 0 -6 -17 -32 -44 -49 -48 -42 -35 -35 -43 -51 -52 -48 -44 -43 -40 -31 -21 -14 -10 -5 0 2 2 4 9 13 16 18 22 25 26 25 22 16 9 6 9 13 12 8 6 8 11 12 10 7 5 4 3 2 1 0 0 -1 -1 -1 -1 -2 -1 -1 -1 -1 -1 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +-21 -12 5 20 18 0 -15 -12 -2 -3 -11 -12 -2 -3 -26 -68 -111 -143 -157 -153 -141 -141 -160 -188 -211 -216 -204 -176 -144 -130 -140 -156 -155 -143 -138 -143 -139 -112 -81 -60 -53 -43 -30 -21 -17 -16 -24 -37 -34 5 62 96 98 102 125 151 151 140 154 191 208 194 174 177 193 188 166 162 186 206 204 188 177 161 119 71 56 86 119 110 60 16 7 17 17 1 -19 -33 -42 -49 -51 -47 -33 -14 2 3 -12 -33 -48 -60 -84 -110 -120 -113 -116 -142 -171 -170 -147 -140 -156 -161 -130 -87 -81 -113 -142 -137 -107 -76 -59 -57 -63 -68 -50 -5 42 61 49 33 40 67 96 115 130 149 161 159 141 120 109 103 84 62 57 69 70 39 -9 -35 -26 0 14 12 2 -14 -45 -86 -121 -140 -139 -123 -107 -109 -136 -167 -174 -163 -152 -153 -146 -117 -81 -55 -39 -19 3 12 13 21 45 68 85 101 125 147 159 161 146 111 67 48 71 104 106 72 55 75 116 128 108 80 65 56 46 35 24 13 2 -5 -7 -11 -21 -26 -22 -15 -17 -25 -20 4 37 62 64 55 48 52 60 55 29 5 2 16 27 22 18 26 40 33 4 -26 -35 -25 -16 -14 -21 -33 -54 -80 -106 -125 -137 -134 -118 -94 -77 -71 -62 -49 -39 -43 -55 -54 -33 -10 0 4 16 39 63 77 87 92 89 80 82 98 108 92 60 42 54 71 63 39 27 40 56 49 26 6 -6 -17 -39 -56 -60 -65 -84 -114 -125 -109 -86 -84 -98 -104 -89 -65 -52 -52 -58 -70 -93 -123 -137 -125 -94 -67 -53 -51 -52 -42 -16 16 37 42 47 66 90 98 86 78 87 98 83 47 11 -4 -6 -9 -21 -38 -50 -50 -32 -6 3 -13 -37 -38 -16 -2 -10 -28 -23 9 47 63 60 53 50 48 42 42 54 69 70 64 71 107 149 161 135 103 105 140 168 165 144 133 132 126 103 72 42 19 1 -14 -33 -66 -107 -129 -122 -107 -119 -151 -161 -131 -97 -101 -144 -180 +0 -1 0 0 0 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -3 -4 -4 -4 -4 -5 -7 -8 -9 -9 -9 -8 -7 -8 -10 -10 -10 -10 -11 -12 -10 -8 -6 -6 -5 -4 -3 -2 -2 -4 -5 -5 0 9 14 15 16 21 26 27 26 30 38 43 41 38 40 45 45 41 41 48 55 56 53 51 47 36 22 17 28 39 37 20 5 2 6 6 0 -8 -14 -18 -21 -22 -21 -15 -7 0 1 -6 -16 -24 -30 -43 -57 -63 -60 -63 -78 -95 -96 -84 -81 -91 -95 -78 -53 -50 -71 -90 -88 -69 -50 -39 -38 -43 -47 -35 -4 29 43 35 23 29 49 71 86 98 113 124 123 110 94 86 82 68 50 46 57 58 32 -8 -30 -23 0 12 10 1 -13 -40 -77 -109 -127 -127 -113 -98 -101 -126 -156 -163 -153 -143 -145 -139 -112 -78 -53 -38 -19 2 11 12 20 43 66 83 99 123 145 157 159 144 110 66 47 70 103 105 71 54 74 115 128 108 79 64 55 45 34 23 12 1 -5 -7 -11 -21 -26 -22 -15 -17 -25 -20 3 36 60 62 53 46 49 57 52 27 4 1 15 25 20 16 24 36 30 3 -24 -32 -23 -15 -13 -19 -29 -48 -70 -92 -108 -117 -114 -100 -79 -64 -59 -51 -40 -32 -35 -44 -43 -26 -8 0 3 12 29 46 56 62 65 63 56 56 67 73 61 39 27 35 45 40 24 16 24 33 29 15 3 -4 -10 -22 -31 -33 -35 -45 -60 -64 -55 -43 -42 -48 -50 -42 -30 -24 -23 -26 -30 -39 -51 -56 -50 -37 -26 -20 -19 -19 -15 -6 5 12 13 14 20 26 28 24 21 23 25 21 11 2 -1 -2 -2 -5 -8 -11 -10 -7 -2 0 -3 -7 -7 -3 -1 -2 -4 -4 1 5 7 6 5 5 4 3 3 4 5 5 4 4 6 8 9 7 5 4 5 6 6 4 4 3 3 2 1 0 0 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +-139 -123 -107 -109 -136 -167 -174 -163 -152 -153 -146 -117 -81 -55 -39 -19 3 12 13 21 45 68 85 101 125 147 159 161 146 111 67 48 71 104 106 72 55 75 116 128 108 80 65 56 46 35 24 13 2 -5 -7 -11 -21 -26 -22 -15 -17 -25 -20 4 37 62 64 55 48 52 60 55 29 5 2 16 27 22 18 26 40 33 4 -26 -35 -25 -16 -14 -21 -33 -54 -80 -106 -125 -137 -134 -118 -94 -77 -71 -62 -49 -39 -43 -55 -54 -33 -10 0 4 16 39 63 77 87 92 89 80 82 98 108 92 60 42 54 71 63 39 27 40 56 49 26 6 -6 -17 -39 -56 -60 -65 -84 -114 -125 -109 -86 -84 -98 -104 -89 -65 -52 -52 -58 -70 -93 -123 -137 -125 -94 -67 -53 -51 -52 -42 -16 16 37 42 47 66 90 98 86 78 87 98 83 47 11 -4 -6 -9 -21 -38 -50 -50 -32 -6 3 -13 -37 -38 -16 -2 -10 -28 -23 9 47 63 60 53 50 48 42 42 54 69 70 64 71 107 149 161 135 103 105 140 168 165 144 133 132 126 103 72 42 19 1 -14 -33 -66 -107 -129 -122 -107 -119 -151 -161 -131 -97 -101 -144 -180 -182 -172 -178 -194 -198 -185 -179 -185 -176 -139 -93 -66 -54 -39 -12 14 32 57 90 119 131 133 148 167 169 141 107 95 99 96 81 67 71 84 86 75 65 63 55 15 -49 -107 -133 -132 -136 -156 -178 -187 -184 -185 -189 -180 -150 -113 -93 -98 -107 -91 -42 13 47 59 80 136 207 251 249 219 199 198 210 228 247 262 262 248 227 207 187 169 154 142 128 107 78 36 -13 -66 -108 -131 -138 -144 -162 -202 -255 -298 -315 -311 -298 -284 -264 -244 -233 -225 -205 -167 -134 -121 -109 -71 -16 17 17 12 28 56 81 100 119 129 119 107 110 123 125 116 116 125 118 91 73 79 93 99 100 94 59 -11 -73 -80 -52 -49 -71 -59 12 70 42 -40 -80 -42 18 44 38 43 64 73 58 35 24 28 +0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 0 0 0 1 1 2 3 4 5 6 7 7 5 3 2 4 7 7 5 4 6 10 11 10 8 6 6 5 4 3 1 0 -1 -2 -2 -4 -5 -4 -3 -4 -5 -4 0 7 13 14 12 11 12 14 14 7 1 0 4 7 6 5 8 12 10 1 -9 -13 -9 -6 -6 -8 -13 -22 -33 -44 -53 -59 -59 -53 -43 -36 -33 -30 -24 -20 -22 -28 -28 -18 -6 0 2 8 21 35 44 50 54 53 48 50 60 67 58 38 27 35 47 42 26 18 27 39 34 18 4 -5 -13 -30 -43 -46 -50 -65 -89 -99 -87 -69 -68 -80 -85 -74 -54 -44 -44 -50 -60 -80 -107 -119 -110 -83 -60 -48 -46 -47 -38 -15 14 33 38 43 61 83 91 80 73 82 93 79 44 10 -4 -6 -9 -21 -38 -49 -49 -32 -6 2 -13 -37 -38 -16 -2 -10 -28 -23 8 46 62 59 52 49 48 42 41 53 68 69 63 70 106 148 160 134 102 104 138 165 162 141 130 129 123 100 69 40 18 0 -14 -32 -63 -102 -123 -116 -101 -112 -141 -150 -122 -90 -93 -132 -164 -165 -155 -159 -173 -175 -163 -157 -161 -152 -120 -80 -56 -46 -33 -10 11 26 46 72 94 103 104 115 128 129 106 80 70 73 70 58 48 50 58 59 51 44 42 36 9 -32 -69 -85 -83 -85 -96 -108 -112 -109 -108 -109 -102 -84 -63 -51 -53 -57 -48 -22 6 23 28 38 64 96 114 111 96 86 84 87 93 99 103 101 94 84 75 66 59 52 47 41 34 24 10 -4 -20 -31 -37 -38 -38 -42 -51 -62 -71 -73 -70 -65 -60 -54 -49 -45 -42 -37 -29 -23 -20 -17 -11 -3 2 2 1 3 6 9 10 12 12 11 9 9 9 9 8 7 7 7 5 3 3 4 4 3 3 1 -1 -3 -3 -2 -2 -2 -1 0 0 0 -1 -1 -1 0 0 0 0 0 0 0 0 0 0 +-16 16 37 42 47 66 90 98 86 78 87 98 83 47 11 -4 -6 -9 -21 -38 -50 -50 -32 -6 3 -13 -37 -38 -16 -2 -10 -28 -23 9 47 63 60 53 50 48 42 42 54 69 70 64 71 107 149 161 135 103 105 140 168 165 144 133 132 126 103 72 42 19 1 -14 -33 -66 -107 -129 -122 -107 -119 -151 -161 -131 -97 -101 -144 -180 -182 -172 -178 -194 -198 -185 -179 -185 -176 -139 -93 -66 -54 -39 -12 14 32 57 90 119 131 133 148 167 169 141 107 95 99 96 81 67 71 84 86 75 65 63 55 15 -49 -107 -133 -132 -136 -156 -178 -187 -184 -185 -189 -180 -150 -113 -93 -98 -107 -91 -42 13 47 59 80 136 207 251 249 219 199 198 210 228 247 262 262 248 227 207 187 169 154 142 128 107 78 36 -13 -66 -108 -131 -138 -144 -162 -202 -255 -298 -315 -311 -298 -284 -264 -244 -233 -225 -205 -167 -134 -121 -109 -71 -16 17 17 12 28 56 81 100 119 129 119 107 110 123 125 116 116 125 118 91 73 79 93 99 100 94 59 -11 -73 -80 -52 -49 -71 -59 12 70 42 -40 -80 -42 18 44 38 43 64 73 58 35 24 28 40 50 47 31 14 9 3 -23 -55 -61 -36 -7 -4 -21 -29 -20 -9 -12 -26 -42 -54 -54 -47 -37 -31 -28 -27 -29 -47 -73 -89 -79 -50 -22 -1 16 36 51 47 23 -4 -9 13 32 10 -48 -92 -80 -23 22 21 -15 -34 -12 42 90 101 72 33 17 30 57 82 95 91 63 23 -5 -5 13 25 20 0 -29 -61 -85 -90 -83 -84 -98 -103 -85 -58 -41 -38 -48 -71 -99 -114 -94 -52 -24 -21 -26 -23 -16 -16 -14 1 18 27 36 52 62 50 29 26 48 63 51 25 16 27 40 51 65 87 106 115 118 116 111 113 129 151 160 146 124 102 82 61 45 33 14 -22 -62 -97 -126 -154 -177 -176 -154 -134 -133 -141 -143 -142 -147 -147 -127 -90 -65 -67 -78 -64 -19 30 52 50 42 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 -1 -1 -1 -1 -2 -2 -1 -1 0 -1 -2 -2 -1 -1 -1 -2 -2 0 3 4 4 4 4 4 4 4 5 7 8 7 9 14 20 23 20 15 16 23 28 29 26 25 25 25 21 15 9 4 0 -4 -9 -17 -29 -35 -34 -31 -35 -45 -50 -41 -31 -34 -49 -62 -64 -62 -65 -73 -76 -72 -71 -75 -73 -59 -40 -29 -24 -18 -6 6 15 27 43 59 66 68 76 88 90 76 58 53 56 55 47 39 42 50 52 46 40 40 35 9 -33 -72 -90 -90 -94 -109 -125 -133 -132 -134 -139 -133 -112 -85 -71 -75 -83 -71 -33 10 37 47 64 110 170 207 207 183 168 168 179 196 214 228 229 218 201 184 167 152 139 129 117 98 72 33 -13 -62 -102 -124 -131 -137 -155 -194 -245 -288 -305 -302 -290 -277 -258 -239 -229 -222 -202 -165 -133 -120 -109 -71 -16 16 16 11 27 55 80 99 118 129 119 106 109 122 124 115 115 124 117 90 72 78 92 97 98 92 58 -11 -72 -79 -51 -48 -69 -58 11 67 40 -39 -77 -40 16 41 35 40 59 67 53 32 21 25 36 44 41 27 12 7 2 -20 -48 -53 -31 -6 -4 -18 -25 -17 -8 -10 -21 -34 -43 -43 -37 -29 -24 -22 -21 -22 -35 -54 -65 -57 -36 -16 -1 11 24 34 31 15 -3 -6 8 20 6 -30 -56 -48 -14 12 12 -9 -20 -7 22 48 53 37 16 8 14 27 39 44 42 28 10 -3 -3 5 10 8 0 -12 -24 -33 -34 -31 -30 -35 -36 -29 -19 -14 -12 -15 -22 -29 -33 -27 -15 -7 -6 -7 -6 -4 -4 -4 0 3 5 7 9 11 8 4 4 7 9 7 3 2 3 5 6 7 9 11 11 11 10 9 9 10 11 11 9 7 6 4 3 2 1 0 -1 -3 -4 -4 -5 -5 -5 -4 -3 -3 -3 -2 -2 -2 -2 -1 -1 -1 -1 -1 -1 -1 0 0 0 0 +154 142 128 107 78 36 -13 -66 -108 -131 -138 -144 -162 -202 -255 -298 -315 -311 -298 -284 -264 -244 -233 -225 -205 -167 -134 -121 -109 -71 -16 17 17 12 28 56 81 100 119 129 119 107 110 123 125 116 116 125 118 91 73 79 93 99 100 94 59 -11 -73 -80 -52 -49 -71 -59 12 70 42 -40 -80 -42 18 44 38 43 64 73 58 35 24 28 40 50 47 31 14 9 3 -23 -55 -61 -36 -7 -4 -21 -29 -20 -9 -12 -26 -42 -54 -54 -47 -37 -31 -28 -27 -29 -47 -73 -89 -79 -50 -22 -1 16 36 51 47 23 -4 -9 13 32 10 -48 -92 -80 -23 22 21 -15 -34 -12 42 90 101 72 33 17 30 57 82 95 91 63 23 -5 -5 13 25 20 0 -29 -61 -85 -90 -83 -84 -98 -103 -85 -58 -41 -38 -48 -71 -99 -114 -94 -52 -24 -21 -26 -23 -16 -16 -14 1 18 27 36 52 62 50 29 26 48 63 51 25 16 27 40 51 65 87 106 115 118 116 111 113 129 151 160 146 124 102 82 61 45 33 14 -22 -62 -97 -126 -154 -177 -176 -154 -134 -133 -141 -143 -142 -147 -147 -127 -90 -65 -67 -78 -64 -19 30 52 50 42 44 50 54 57 69 81 82 76 75 84 90 79 64 67 88 104 101 88 75 61 41 23 19 28 32 24 3 -18 -38 -51 -52 -42 -36 -38 -47 -52 -55 -59 -70 -88 -105 -108 -80 -30 8 2 -34 -60 -50 -22 -6 -5 2 28 63 80 71 47 33 33 37 31 18 13 23 36 34 15 -4 -18 -25 -34 -41 -50 -62 -75 -73 -51 -26 -15 -17 -17 -6 19 47 62 46 2 -36 -37 -7 11 -9 -47 -67 -54 -27 -7 -4 -25 -60 -84 -70 -22 26 45 43 41 47 55 66 77 80 70 62 78 111 119 84 33 13 31 61 79 80 76 71 60 46 26 0 -36 -69 -91 -97 -95 -87 -69 -44 -23 -14 -6 7 17 4 -38 -83 -95 -62 -11 17 6 -25 -45 -40 -22 -6 -4 -19 -42 +0 0 0 0 0 0 -1 -1 -1 -1 -1 -2 -2 -3 -4 -5 -6 -6 -7 -7 -7 -7 -8 -8 -8 -7 -6 -6 -6 -4 -1 1 1 0 2 4 6 8 10 12 11 10 11 13 14 14 14 16 16 13 10 12 14 16 17 16 10 -3 -15 -17 -11 -11 -16 -14 2 16 10 -11 -21 -12 4 12 11 12 19 22 18 11 8 9 13 17 17 11 5 3 1 -10 -23 -26 -16 -4 -2 -10 -14 -10 -5 -6 -13 -21 -28 -28 -25 -20 -17 -16 -15 -17 -27 -42 -52 -47 -30 -14 -1 9 22 32 30 14 -3 -6 8 21 6 -34 -65 -57 -17 15 15 -12 -26 -10 31 68 77 56 25 13 23 45 66 77 74 52 19 -5 -5 11 21 17 0 -26 -54 -76 -80 -75 -76 -89 -94 -78 -54 -38 -36 -45 -67 -93 -108 -89 -50 -23 -21 -25 -23 -16 -16 -14 0 17 26 35 51 60 49 28 25 47 62 50 24 15 26 39 50 64 86 105 114 118 116 110 112 128 150 159 145 123 101 81 60 44 32 13 -22 -62 -96 -124 -151 -173 -172 -150 -130 -129 -136 -138 -136 -141 -140 -121 -85 -62 -63 -73 -60 -18 27 47 45 38 39 44 48 50 60 71 71 65 64 71 76 66 53 55 72 85 82 71 60 48 32 18 14 21 24 18 2 -14 -29 -38 -38 -31 -26 -27 -33 -36 -38 -40 -47 -58 -69 -70 -51 -19 4 1 -21 -36 -30 -13 -4 -3 1 15 34 42 37 24 16 16 18 15 8 6 10 16 15 6 -2 -8 -11 -14 -17 -20 -25 -29 -28 -19 -10 -6 -6 -6 -2 6 14 18 13 0 -11 -11 -2 2 -3 -12 -17 -13 -7 -2 -1 -6 -13 -17 -14 -5 4 7 7 6 7 8 9 10 10 8 7 9 12 12 8 3 1 2 5 6 6 5 4 3 2 1 0 -2 -4 -4 -4 -4 -3 -3 -2 -1 -1 -1 0 0 0 -1 -1 -1 -1 -1 0 0 -1 -1 -1 -1 -1 -1 -1 0 +-103 -85 -58 -41 -38 -48 -71 -99 -114 -94 -52 -24 -21 -26 -23 -16 -16 -14 1 18 27 36 52 62 50 29 26 48 63 51 25 16 27 40 51 65 87 106 115 118 116 111 113 129 151 160 146 124 102 82 61 45 33 14 -22 -62 -97 -126 -154 -177 -176 -154 -134 -133 -141 -143 -142 -147 -147 -127 -90 -65 -67 -78 -64 -19 30 52 50 42 44 50 54 57 69 81 82 76 75 84 90 79 64 67 88 104 101 88 75 61 41 23 19 28 32 24 3 -18 -38 -51 -52 -42 -36 -38 -47 -52 -55 -59 -70 -88 -105 -108 -80 -30 8 2 -34 -60 -50 -22 -6 -5 2 28 63 80 71 47 33 33 37 31 18 13 23 36 34 15 -4 -18 -25 -34 -41 -50 -62 -75 -73 -51 -26 -15 -17 -17 -6 19 47 62 46 2 -36 -37 -7 11 -9 -47 -67 -54 -27 -7 -4 -25 -60 -84 -70 -22 26 45 43 41 47 55 66 77 80 70 62 78 111 119 84 33 13 31 61 79 80 76 71 60 46 26 0 -36 -69 -91 -97 -95 -87 -69 -44 -23 -14 -6 7 17 4 -38 -83 -95 -62 -11 17 6 -25 -45 -40 -22 -6 -4 -19 -42 -57 -52 -24 13 39 38 9 -27 -44 -30 -8 -2 -17 -32 -29 -11 9 21 22 18 11 21 45 68 66 44 24 31 53 67 62 47 38 48 66 73 60 41 42 61 73 57 31 25 38 42 19 -13 -28 -27 -25 -21 -12 -3 -21 -70 -119 -130 -102 -62 -36 -40 -63 -95 -117 -122 -120 -122 -128 -123 -101 -75 -65 -67 -58 -25 11 29 33 45 68 82 70 42 30 49 77 83 66 45 47 71 96 105 94 78 70 74 79 77 69 60 57 53 33 -3 -40 -60 -63 -57 -53 -65 -91 -121 -139 -135 -122 -110 -103 -92 -79 -71 -76 -80 -67 -37 -13 -11 -22 -24 -10 5 12 15 27 50 70 77 71 65 76 105 132 130 99 70 75 110 131 108 63 38 45 61 58 37 19 3 -10 -17 +0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 0 0 1 1 2 1 1 1 2 3 2 1 0 1 2 3 4 6 8 10 11 11 11 12 14 17 19 18 16 14 11 9 6 5 2 -4 -12 -18 -24 -31 -36 -37 -34 -30 -31 -34 -35 -36 -38 -39 -35 -25 -19 -20 -24 -20 -6 9 16 16 14 15 17 19 21 26 31 32 30 30 35 38 34 28 30 40 48 47 42 36 30 20 11 9 14 17 13 1 -11 -22 -30 -31 -25 -22 -23 -29 -33 -35 -38 -46 -58 -70 -72 -54 -21 5 1 -24 -43 -36 -16 -5 -4 1 21 47 61 54 36 25 26 29 24 14 10 18 29 28 12 -4 -16 -22 -30 -36 -44 -55 -67 -65 -46 -24 -14 -16 -16 -6 17 43 57 42 1 -34 -35 -7 10 -9 -45 -65 -53 -27 -7 -4 -25 -59 -83 -69 -22 25 44 42 40 46 54 65 76 79 69 61 77 110 118 83 33 13 30 60 78 79 75 70 59 45 25 0 -36 -69 -90 -96 -94 -86 -68 -44 -23 -14 -6 6 16 3 -37 -80 -91 -59 -11 16 5 -24 -42 -38 -21 -6 -4 -18 -39 -52 -47 -22 11 34 33 7 -24 -38 -26 -7 -2 -15 -27 -25 -10 7 17 17 14 8 16 35 52 50 33 18 23 39 48 44 33 26 33 45 50 40 27 27 40 47 36 19 15 23 25 11 -8 -17 -16 -15 -12 -7 -2 -12 -38 -63 -68 -53 -32 -18 -20 -31 -45 -55 -56 -54 -54 -56 -53 -43 -31 -27 -27 -23 -10 4 10 11 15 23 27 22 13 9 14 22 24 18 12 12 18 24 26 22 18 16 16 17 16 13 11 10 9 5 -1 -7 -10 -10 -9 -8 -9 -13 -16 -18 -16 -14 -12 -11 -10 -8 -7 -7 -7 -6 -3 -1 -1 -2 -2 -1 0 0 0 1 1 2 2 2 1 1 2 2 2 1 0 0 1 1 0 0 0 0 0 0 0 0 0 -1 0 +-17 -17 -6 19 47 62 46 2 -36 -37 -7 11 -9 -47 -67 -54 -27 -7 -4 -25 -60 -84 -70 -22 26 45 43 41 47 55 66 77 80 70 62 78 111 119 84 33 13 31 61 79 80 76 71 60 46 26 0 -36 -69 -91 -97 -95 -87 -69 -44 -23 -14 -6 7 17 4 -38 -83 -95 -62 -11 17 6 -25 -45 -40 -22 -6 -4 -19 -42 -57 -52 -24 13 39 38 9 -27 -44 -30 -8 -2 -17 -32 -29 -11 9 21 22 18 11 21 45 68 66 44 24 31 53 67 62 47 38 48 66 73 60 41 42 61 73 57 31 25 38 42 19 -13 -28 -27 -25 -21 -12 -3 -21 -70 -119 -130 -102 -62 -36 -40 -63 -95 -117 -122 -120 -122 -128 -123 -101 -75 -65 -67 -58 -25 11 29 33 45 68 82 70 42 30 49 77 83 66 45 47 71 96 105 94 78 70 74 79 77 69 60 57 53 33 -3 -40 -60 -63 -57 -53 -65 -91 -121 -139 -135 -122 -110 -103 -92 -79 -71 -76 -80 -67 -37 -13 -11 -22 -24 -10 5 12 15 27 50 70 77 71 65 76 105 132 130 99 70 75 110 131 108 63 38 45 61 58 37 19 3 -10 -17 -17 -25 -54 -87 -85 -44 -1 0 -36 -70 -83 -90 -109 -130 -133 -121 -118 -140 -170 -183 -168 -149 -146 -154 -144 -106 -57 -29 -28 -39 -36 -16 21 65 99 110 109 117 144 166 163 144 142 171 204 211 195 184 193 210 219 212 198 185 170 153 131 105 82 61 41 11 -23 -49 -64 -79 -102 -126 -134 -121 -99 -83 -79 -87 -95 -100 -93 -80 -71 -74 -90 -104 -101 -82 -60 -56 -67 -70 -56 -34 -22 -23 -25 -30 -41 -51 -35 7 46 50 29 16 24 26 0 -37 -46 -16 18 23 7 1 23 46 48 37 38 58 75 66 40 25 33 45 49 52 62 70 56 20 -10 -21 -23 -39 -72 -103 -110 -92 -72 -69 -84 -94 -81 -56 -42 -52 -79 -101 -110 -106 -92 -70 -45 -25 -14 -10 -11 -12 -8 2 +0 -1 -1 0 0 0 0 0 -1 -1 -1 0 -1 -1 -1 -1 -1 -1 -1 -1 -2 -3 -3 -1 0 1 1 1 2 2 3 4 5 4 4 5 8 10 7 3 1 3 6 8 9 9 9 7 6 3 0 -6 -12 -16 -17 -17 -17 -14 -9 -5 -3 -2 1 3 0 -10 -21 -25 -17 -3 4 1 -8 -14 -13 -7 -2 -2 -7 -15 -20 -19 -9 4 14 14 3 -11 -19 -13 -4 -1 -8 -15 -14 -6 4 10 10 8 5 10 23 35 35 23 13 17 30 38 36 27 22 29 40 45 37 26 27 39 48 37 20 17 26 29 13 -10 -21 -20 -19 -16 -9 -3 -16 -54 -92 -102 -80 -50 -29 -33 -52 -78 -97 -101 -101 -103 -109 -105 -87 -65 -57 -59 -51 -23 9 25 29 40 61 74 64 38 27 45 71 77 62 42 44 67 91 100 90 75 67 71 76 75 67 58 55 52 32 -3 -40 -60 -63 -57 -53 -65 -91 -121 -139 -135 -122 -110 -103 -92 -79 -71 -76 -80 -67 -37 -13 -11 -22 -24 -10 4 11 14 26 49 68 75 69 63 74 102 127 125 95 67 71 104 124 102 59 35 42 56 53 34 17 2 -10 -16 -16 -23 -49 -78 -76 -39 -1 0 -32 -60 -71 -77 -92 -109 -111 -100 -97 -114 -137 -146 -133 -117 -114 -119 -111 -81 -43 -22 -21 -29 -27 -12 14 45 68 75 74 78 95 109 106 92 90 107 126 129 118 109 113 122 125 120 110 101 92 81 69 54 41 30 20 5 -12 -24 -30 -37 -46 -56 -59 -52 -42 -35 -32 -35 -37 -38 -35 -30 -26 -26 -31 -35 -34 -27 -19 -18 -20 -21 -16 -10 -6 -7 -7 -8 -10 -13 -9 1 9 10 5 3 4 4 0 -7 -8 -3 2 3 1 0 3 5 5 4 4 6 7 6 3 2 2 3 3 3 4 4 3 1 -1 -2 -2 -2 -3 -4 -4 -3 -3 -2 -2 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +68 82 70 42 30 49 77 83 66 45 47 71 96 105 94 78 70 74 79 77 69 60 57 53 33 -3 -40 -60 -63 -57 -53 -65 -91 -121 -139 -135 -122 -110 -103 -92 -79 -71 -76 -80 -67 -37 -13 -11 -22 -24 -10 5 12 15 27 50 70 77 71 65 76 105 132 130 99 70 75 110 131 108 63 38 45 61 58 37 19 3 -10 -17 -17 -25 -54 -87 -85 -44 -1 0 -36 -70 -83 -90 -109 -130 -133 -121 -118 -140 -170 -183 -168 -149 -146 -154 -144 -106 -57 -29 -28 -39 -36 -16 21 65 99 110 109 117 144 166 163 144 142 171 204 211 195 184 193 210 219 212 198 185 170 153 131 105 82 61 41 11 -23 -49 -64 -79 -102 -126 -134 -121 -99 -83 -79 -87 -95 -100 -93 -80 -71 -74 -90 -104 -101 -82 -60 -56 -67 -70 -56 -34 -22 -23 -25 -30 -41 -51 -35 7 46 50 29 16 24 26 0 -37 -46 -16 18 23 7 1 23 46 48 37 38 58 75 66 40 25 33 45 49 52 62 70 56 20 -10 -21 -23 -39 -72 -103 -110 -92 -72 -69 -84 -94 -81 -56 -42 -52 -79 -101 -110 -106 -92 -70 -45 -25 -14 -10 -11 -12 -8 2 12 8 -9 -26 -15 21 50 37 -13 -61 -71 -46 -18 -8 -6 7 31 48 48 38 29 29 35 50 69 78 71 54 42 47 63 81 100 113 114 99 82 72 70 64 49 29 12 4 10 17 19 10 -2 -11 -15 -24 -31 -22 6 31 33 19 14 36 63 70 53 33 30 42 51 43 22 5 4 16 24 28 33 45 50 34 5 -7 6 30 44 49 60 77 83 67 38 7 -22 -49 -67 -71 -70 -66 -63 -60 -66 -95 -130 -146 -143 -144 -169 -201 -206 -181 -162 -176 -205 -220 -204 -182 -174 -187 -203 -203 -184 -149 -110 -74 -43 -9 29 62 82 91 101 111 114 105 100 116 149 176 187 194 210 219 201 157 126 124 123 93 45 15 26 49 52 35 17 2 -12 -23 -25 -24 -37 -66 +0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 -1 -2 -3 -4 -4 -3 -4 -6 -9 -10 -11 -10 -10 -10 -9 -8 -8 -9 -9 -8 -5 -2 -2 -4 -4 -2 0 1 2 4 8 12 14 13 13 15 22 29 29 23 16 18 28 34 29 17 10 13 18 17 11 6 0 -4 -6 -6 -9 -20 -33 -33 -18 -1 0 -15 -30 -36 -40 -49 -59 -61 -57 -56 -68 -84 -91 -85 -77 -76 -82 -78 -58 -32 -17 -16 -23 -21 -10 12 39 60 68 68 74 92 108 107 95 95 116 140 146 136 130 138 151 159 156 147 138 128 116 101 81 64 48 32 8 -19 -40 -53 -66 -86 -106 -114 -103 -85 -72 -69 -76 -84 -89 -83 -72 -64 -67 -82 -95 -93 -76 -56 -52 -63 -66 -53 -33 -21 -22 -24 -29 -40 -50 -34 6 44 48 28 15 23 25 0 -37 -46 -16 17 22 6 0 22 45 47 36 37 57 74 66 40 24 32 44 48 51 61 69 55 19 -10 -21 -23 -39 -72 -102 -109 -91 -71 -68 -82 -92 -79 -55 -41 -50 -76 -97 -105 -101 -87 -66 -43 -24 -13 -10 -11 -11 -8 1 10 7 -9 -24 -14 18 43 32 -12 -53 -61 -39 -16 -7 -5 5 25 38 38 30 22 22 27 38 52 59 53 40 30 34 45 58 70 79 79 68 55 48 46 42 31 18 7 2 6 10 11 5 -2 -7 -9 -14 -18 -13 3 16 17 9 7 18 31 34 25 15 13 19 22 18 9 2 1 6 9 11 12 17 18 12 1 -3 2 10 14 15 18 23 24 19 10 1 -6 -13 -18 -18 -17 -16 -15 -14 -15 -20 -27 -29 -28 -27 -31 -35 -35 -30 -26 -27 -30 -31 -28 -24 -22 -22 -23 -22 -19 -15 -11 -7 -4 -1 2 4 5 5 6 6 6 5 4 4 5 6 6 6 5 5 4 3 2 2 1 1 0 0 0 0 0 0 0 0 -1 -1 -1 -1 -1 0 +-90 -104 -101 -82 -60 -56 -67 -70 -56 -34 -22 -23 -25 -30 -41 -51 -35 7 46 50 29 16 24 26 0 -37 -46 -16 18 23 7 1 23 46 48 37 38 58 75 66 40 25 33 45 49 52 62 70 56 20 -10 -21 -23 -39 -72 -103 -110 -92 -72 -69 -84 -94 -81 -56 -42 -52 -79 -101 -110 -106 -92 -70 -45 -25 -14 -10 -11 -12 -8 2 12 8 -9 -26 -15 21 50 37 -13 -61 -71 -46 -18 -8 -6 7 31 48 48 38 29 29 35 50 69 78 71 54 42 47 63 81 100 113 114 99 82 72 70 64 49 29 12 4 10 17 19 10 -2 -11 -15 -24 -31 -22 6 31 33 19 14 36 63 70 53 33 30 42 51 43 22 5 4 16 24 28 33 45 50 34 5 -7 6 30 44 49 60 77 83 67 38 7 -22 -49 -67 -71 -70 -66 -63 -60 -66 -95 -130 -146 -143 -144 -169 -201 -206 -181 -162 -176 -205 -220 -204 -182 -174 -187 -203 -203 -184 -149 -110 -74 -43 -9 29 62 82 91 101 111 114 105 100 116 149 176 187 194 210 219 201 157 126 124 123 93 45 15 26 49 52 35 17 2 -12 -23 -25 -24 -37 -66 -82 -53 4 52 64 49 33 22 13 8 17 44 71 77 68 64 78 87 67 25 -11 -23 -20 -20 -26 -27 -22 -22 -31 -36 -23 0 11 7 -6 -20 -35 -45 -50 -54 -65 -79 -78 -49 -18 -13 -47 -90 -111 -107 -101 -108 -120 -118 -87 -42 -8 1 -8 -6 19 49 60 55 63 86 100 90 75 80 101 109 94 75 74 84 81 60 28 -5 -35 -49 -46 -26 -5 8 16 18 9 -7 -14 -1 20 29 21 8 -1 -11 -27 -44 -51 -39 -16 2 6 7 18 41 52 32 -13 -46 -46 -26 -19 -30 -31 -10 12 2 -26 -30 6 41 29 -26 -69 -65 -38 -22 -30 -43 -39 -23 -8 -9 -27 -50 -59 -42 -10 14 18 12 11 20 33 52 76 98 103 86 61 47 41 30 13 -2 -4 8 +0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 0 1 0 0 0 0 0 -2 -2 -1 0 1 0 0 1 3 3 2 3 4 6 6 3 2 3 5 5 6 7 9 7 2 -2 -4 -4 -7 -13 -19 -21 -18 -15 -14 -18 -21 -18 -13 -10 -13 -20 -26 -29 -29 -26 -20 -14 -8 -5 -4 -4 -4 -3 0 4 2 -4 -10 -6 8 19 14 -6 -26 -31 -20 -8 -4 -3 3 14 23 23 18 14 14 18 26 36 42 39 30 23 26 36 47 59 68 69 61 51 45 45 41 32 19 8 2 6 11 13 7 -2 -8 -11 -18 -24 -17 4 23 25 14 10 28 50 56 42 26 24 34 42 36 18 4 3 13 20 24 28 39 44 30 4 -7 5 27 40 45 55 71 77 62 35 6 -21 -47 -64 -68 -68 -64 -61 -59 -65 -93 -127 -143 -141 -142 -167 -199 -204 -180 -161 -175 -204 -220 -204 -182 -174 -187 -203 -203 -184 -149 -110 -74 -43 -9 28 61 81 90 100 110 113 104 99 114 147 173 183 190 205 213 195 152 122 119 118 89 43 14 24 46 49 32 15 1 -12 -22 -24 -22 -34 -60 -74 -48 3 46 56 43 28 19 11 6 14 37 59 64 56 52 63 70 53 19 -9 -19 -16 -16 -20 -21 -17 -17 -23 -27 -17 0 7 4 -5 -14 -24 -31 -34 -36 -43 -51 -50 -31 -12 -8 -29 -54 -66 -63 -58 -62 -68 -66 -48 -23 -5 0 -5 -4 9 23 28 25 29 39 44 39 32 34 42 44 37 29 28 31 30 21 9 -2 -12 -17 -16 -9 -2 2 4 5 2 -2 -4 -1 5 7 5 1 -1 -3 -6 -10 -11 -8 -4 0 1 1 2 6 8 4 -2 -7 -7 -4 -3 -4 -4 -2 1 0 -3 -3 0 3 2 -2 -5 -5 -3 -2 -2 -3 -2 -1 -1 -1 -1 -2 -2 -2 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 -1 0 +6 30 44 49 60 77 83 67 38 7 -22 -49 -67 -71 -70 -66 -63 -60 -66 -95 -130 -146 -143 -144 -169 -201 -206 -181 -162 -176 -205 -220 -204 -182 -174 -187 -203 -203 -184 -149 -110 -74 -43 -9 29 62 82 91 101 111 114 105 100 116 149 176 187 194 210 219 201 157 126 124 123 93 45 15 26 49 52 35 17 2 -12 -23 -25 -24 -37 -66 -82 -53 4 52 64 49 33 22 13 8 17 44 71 77 68 64 78 87 67 25 -11 -23 -20 -20 -26 -27 -22 -22 -31 -36 -23 0 11 7 -6 -20 -35 -45 -50 -54 -65 -79 -78 -49 -18 -13 -47 -90 -111 -107 -101 -108 -120 -118 -87 -42 -8 1 -8 -6 19 49 60 55 63 86 100 90 75 80 101 109 94 75 74 84 81 60 28 -5 -35 -49 -46 -26 -5 8 16 18 9 -7 -14 -1 20 29 21 8 -1 -11 -27 -44 -51 -39 -16 2 6 7 18 41 52 32 -13 -46 -46 -26 -19 -30 -31 -10 12 2 -26 -30 6 41 29 -26 -69 -65 -38 -22 -30 -43 -39 -23 -8 -9 -27 -50 -59 -42 -10 14 18 12 11 20 33 52 76 98 103 86 61 47 41 30 13 -2 -4 8 19 25 19 3 -14 -24 -25 -17 -12 -14 -21 -18 -8 -1 -14 -44 -66 -70 -69 -78 -95 -91 -59 -23 -9 -14 -13 2 13 2 -21 -33 -27 -15 -13 -18 -26 -27 -22 -17 -20 -31 -45 -48 -37 -18 4 25 40 42 28 7 -8 -9 5 22 29 26 17 15 22 31 35 37 33 23 8 -1 0 4 -5 -29 -49 -47 -29 -13 -20 -37 -41 -19 12 21 0 -35 -52 -39 -11 13 25 25 11 -3 0 30 74 99 93 69 55 53 52 38 19 -1 -12 -14 -5 8 13 3 -22 -46 -51 -28 8 35 45 53 71 90 94 78 62 58 64 63 56 51 46 37 25 18 15 1 -25 -43 -39 -26 -26 -36 -31 -7 7 -4 -24 -21 7 39 57 56 43 28 28 50 72 62 20 -19 -24 -2 +0 0 0 0 0 0 0 0 0 0 -1 -1 -1 -1 -1 -1 -2 -2 -2 -3 -4 -5 -5 -5 -7 -8 -9 -9 -8 -10 -12 -14 -13 -13 -13 -15 -17 -18 -17 -14 -11 -8 -5 -2 3 7 10 12 13 15 17 16 16 19 25 31 34 36 41 44 42 33 27 28 28 22 11 3 6 13 14 9 4 0 -4 -8 -8 -8 -13 -23 -29 -19 1 19 24 18 13 8 5 3 7 19 31 34 31 29 36 41 32 12 -6 -12 -11 -11 -14 -15 -13 -13 -18 -21 -14 0 6 4 -4 -13 -22 -29 -33 -36 -43 -53 -53 -34 -13 -10 -34 -64 -80 -78 -74 -80 -90 -89 -66 -33 -7 0 -7 -5 15 39 48 44 51 71 83 75 63 68 86 93 81 65 64 74 71 53 25 -5 -32 -45 -43 -24 -5 7 14 16 8 -7 -14 -1 19 27 20 7 -1 -11 -27 -43 -50 -39 -16 1 5 6 17 40 51 31 -13 -46 -46 -26 -19 -30 -31 -10 11 2 -26 -30 5 40 28 -26 -69 -65 -38 -22 -30 -43 -39 -23 -8 -9 -27 -50 -58 -42 -10 13 17 11 10 19 31 49 72 92 97 80 57 43 38 27 11 -2 -4 7 17 22 16 2 -13 -22 -22 -15 -11 -12 -18 -16 -7 -1 -12 -37 -54 -57 -56 -63 -76 -72 -46 -18 -7 -11 -10 1 9 1 -16 -24 -20 -11 -10 -13 -18 -19 -15 -12 -14 -20 -29 -31 -23 -12 2 14 23 24 16 3 -5 -5 2 11 15 13 8 7 10 15 16 17 15 10 3 -1 0 1 -3 -12 -20 -19 -12 -5 -8 -14 -15 -7 4 7 0 -12 -17 -12 -4 3 7 6 2 -1 0 7 17 23 21 15 11 11 10 7 3 -1 -3 -3 -1 1 2 0 -4 -7 -7 -4 0 4 5 5 7 8 8 6 5 4 4 4 3 3 2 2 1 0 0 0 -1 -2 -2 -1 -1 -1 -1 -1 0 -1 -1 -1 0 0 0 0 0 0 0 0 0 0 0 -1 -1 0 +-35 -49 -46 -26 -5 8 16 18 9 -7 -14 -1 20 29 21 8 -1 -11 -27 -44 -51 -39 -16 2 6 7 18 41 52 32 -13 -46 -46 -26 -19 -30 -31 -10 12 2 -26 -30 6 41 29 -26 -69 -65 -38 -22 -30 -43 -39 -23 -8 -9 -27 -50 -59 -42 -10 14 18 12 11 20 33 52 76 98 103 86 61 47 41 30 13 -2 -4 8 19 25 19 3 -14 -24 -25 -17 -12 -14 -21 -18 -8 -1 -14 -44 -66 -70 -69 -78 -95 -91 -59 -23 -9 -14 -13 2 13 2 -21 -33 -27 -15 -13 -18 -26 -27 -22 -17 -20 -31 -45 -48 -37 -18 4 25 40 42 28 7 -8 -9 5 22 29 26 17 15 22 31 35 37 33 23 8 -1 0 4 -5 -29 -49 -47 -29 -13 -20 -37 -41 -19 12 21 0 -35 -52 -39 -11 13 25 25 11 -3 0 30 74 99 93 69 55 53 52 38 19 -1 -12 -14 -5 8 13 3 -22 -46 -51 -28 8 35 45 53 71 90 94 78 62 58 64 63 56 51 46 37 25 18 15 1 -25 -43 -39 -26 -26 -36 -31 -7 7 -4 -24 -21 7 39 57 56 43 28 28 50 72 62 20 -19 -24 -2 8 3 3 17 23 8 -11 -12 -3 -21 -71 -115 -115 -85 -65 -73 -88 -97 -103 -111 -110 -87 -56 -49 -69 -80 -59 -21 0 -1 -5 0 1 -12 -28 -29 -20 -19 -33 -47 -49 -38 -25 -21 -25 -31 -25 2 41 64 55 28 6 2 8 13 12 7 6 13 24 32 27 18 17 24 20 13 22 55 89 91 60 33 37 62 82 77 55 36 43 70 90 82 56 44 58 72 53 15 -6 6 24 13 -21 -45 -44 -35 -45 -70 -86 -76 -60 -55 -59 -46 -7 31 36 9 -10 10 55 76 54 15 4 36 80 99 86 64 59 67 61 29 -10 -30 -28 -29 -47 -68 -67 -46 -27 -26 -35 -36 -32 -37 -60 -86 -95 -84 -69 -65 -69 -69 -57 -46 -53 -79 -101 -103 -88 -74 -71 -72 -73 -77 +0 -1 -1 -1 -1 0 0 0 0 -1 -1 -1 0 0 0 0 -1 -1 -1 -2 -2 -2 -1 0 0 0 0 1 2 1 -1 -3 -3 -2 -2 -3 -3 -1 1 0 -3 -4 0 4 3 -4 -9 -9 -6 -4 -5 -7 -7 -4 -2 -2 -5 -10 -12 -9 -3 3 3 2 2 4 8 13 19 26 28 24 17 13 12 9 4 -1 -2 2 6 8 6 1 -6 -10 -10 -7 -5 -6 -9 -8 -4 -1 -7 -21 -32 -34 -34 -39 -48 -47 -31 -13 -5 -8 -8 1 7 1 -13 -20 -17 -10 -8 -12 -17 -18 -15 -12 -14 -21 -31 -33 -26 -13 2 17 28 30 20 5 -6 -7 3 16 22 20 13 11 17 24 28 30 27 19 6 -1 0 3 -5 -25 -43 -42 -26 -12 -18 -34 -37 -18 10 19 0 -33 -49 -37 -11 12 23 23 10 -3 0 28 71 95 89 66 53 51 50 37 18 -1 -12 -14 -5 7 12 2 -22 -46 -51 -28 7 34 44 52 70 90 94 77 61 57 63 62 55 50 45 36 24 17 14 0 -25 -43 -39 -26 -26 -36 -31 -7 6 -4 -24 -21 6 37 54 53 40 26 26 46 66 57 18 -18 -22 -2 7 2 2 15 20 7 -10 -11 -3 -18 -61 -98 -97 -71 -54 -60 -72 -79 -83 -89 -87 -69 -44 -38 -53 -61 -45 -16 0 -1 -4 0 0 -9 -20 -20 -14 -13 -22 -31 -32 -25 -16 -14 -16 -19 -16 1 24 37 31 15 3 1 4 6 6 3 3 6 11 15 12 8 7 10 8 5 9 23 37 37 24 13 14 23 30 28 19 12 14 23 29 26 17 13 17 20 15 4 -2 1 6 3 -6 -11 -11 -8 -10 -15 -18 -15 -12 -11 -11 -8 -2 4 5 1 -2 1 7 9 6 1 0 3 8 9 8 5 4 5 4 2 -1 -2 -2 -2 -3 -4 -4 -2 -2 -1 -2 -2 -1 -1 -2 -2 -2 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +12 21 0 -35 -52 -39 -11 13 25 25 11 -3 0 30 74 99 93 69 55 53 52 38 19 -1 -12 -14 -5 8 13 3 -22 -46 -51 -28 8 35 45 53 71 90 94 78 62 58 64 63 56 51 46 37 25 18 15 1 -25 -43 -39 -26 -26 -36 -31 -7 7 -4 -24 -21 7 39 57 56 43 28 28 50 72 62 20 -19 -24 -2 8 3 3 17 23 8 -11 -12 -3 -21 -71 -115 -115 -85 -65 -73 -88 -97 -103 -111 -110 -87 -56 -49 -69 -80 -59 -21 0 -1 -5 0 1 -12 -28 -29 -20 -19 -33 -47 -49 -38 -25 -21 -25 -31 -25 2 41 64 55 28 6 2 8 13 12 7 6 13 24 32 27 18 17 24 20 13 22 55 89 91 60 33 37 62 82 77 55 36 43 70 90 82 56 44 58 72 53 15 -6 6 24 13 -21 -45 -44 -35 -45 -70 -86 -76 -60 -55 -59 -46 -7 31 36 9 -10 10 55 76 54 15 4 36 80 99 86 64 59 67 61 29 -10 -30 -28 -29 -47 -68 -67 -46 -27 -26 -35 -36 -32 -37 -60 -86 -95 -84 -69 -65 -69 -69 -57 -46 -53 -79 -101 -103 -88 -74 -71 -72 -73 -77 -85 -93 -86 -58 -22 2 -1 -22 -40 -45 -38 -30 -17 1 18 27 35 48 61 52 23 4 30 89 131 130 105 91 91 84 64 51 63 82 89 78 69 73 79 77 67 61 64 69 72 68 62 54 53 56 56 47 31 16 4 -14 -45 -78 -97 -95 -87 -91 -104 -108 -91 -65 -57 -71 -88 -86 -66 -48 -43 -50 -53 -49 -48 -56 -72 -90 -99 -91 -64 -30 -9 -5 -9 -6 1 6 15 36 67 86 89 92 112 138 147 139 133 141 146 129 103 89 97 115 129 134 127 110 93 89 93 91 70 45 26 8 -28 -72 -95 -82 -57 -52 -80 -118 -136 -127 -108 -97 -97 -105 -107 -110 -123 -141 -147 -127 -96 -77 -81 -89 -88 -78 -60 -36 -11 -1 -17 -42 -48 -29 -1 21 34 44 49 48 +0 0 0 -1 -1 -1 -1 0 0 0 0 -1 0 0 0 1 1 1 1 1 1 1 0 -1 -1 -1 -1 0 0 0 -2 -3 -4 -2 0 2 3 4 6 8 9 7 6 6 7 7 7 6 6 5 3 2 2 0 -5 -8 -8 -5 -6 -8 -7 -2 1 -1 -6 -6 1 9 14 15 11 7 8 14 21 19 6 -7 -9 -1 2 1 1 6 8 3 -5 -5 -2 -9 -31 -50 -51 -39 -30 -34 -42 -47 -51 -56 -56 -45 -30 -26 -37 -44 -33 -12 0 -1 -3 0 0 -8 -18 -18 -13 -13 -22 -31 -33 -26 -17 -15 -18 -22 -18 1 29 46 40 20 4 1 6 9 9 5 4 10 19 25 21 14 13 19 16 10 18 46 76 78 52 28 32 54 72 68 49 32 38 63 82 75 51 40 54 67 49 14 -6 5 22 12 -21 -44 -43 -34 -44 -69 -85 -75 -59 -55 -59 -46 -7 30 35 8 -10 9 54 75 53 14 3 35 79 99 86 63 58 66 60 28 -10 -30 -28 -29 -47 -68 -67 -46 -27 -26 -35 -36 -32 -37 -59 -84 -93 -82 -67 -63 -67 -66 -55 -44 -51 -75 -95 -97 -82 -69 -66 -66 -67 -70 -77 -84 -77 -52 -20 1 -1 -20 -35 -39 -33 -26 -15 0 14 22 28 38 48 41 18 3 23 68 100 98 78 67 67 61 46 36 44 57 61 53 46 49 52 50 43 39 40 43 44 41 37 32 31 32 32 26 17 8 2 -8 -24 -41 -50 -48 -44 -45 -50 -52 -43 -30 -26 -32 -39 -37 -28 -20 -18 -20 -21 -19 -18 -21 -26 -32 -34 -31 -21 -10 -3 -2 -3 -2 0 1 4 9 17 21 21 21 25 30 31 29 26 27 27 23 18 15 16 18 19 19 18 15 12 11 11 10 7 4 2 0 -3 -7 -9 -7 -5 -4 -6 -8 -9 -8 -6 -5 -5 -5 -5 -5 -5 -5 -5 -4 -3 -2 -2 -2 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 0 0 0 0 +43 70 90 82 56 44 58 72 53 15 -6 6 24 13 -21 -45 -44 -35 -45 -70 -86 -76 -60 -55 -59 -46 -7 31 36 9 -10 10 55 76 54 15 4 36 80 99 86 64 59 67 61 29 -10 -30 -28 -29 -47 -68 -67 -46 -27 -26 -35 -36 -32 -37 -60 -86 -95 -84 -69 -65 -69 -69 -57 -46 -53 -79 -101 -103 -88 -74 -71 -72 -73 -77 -85 -93 -86 -58 -22 2 -1 -22 -40 -45 -38 -30 -17 1 18 27 35 48 61 52 23 4 30 89 131 130 105 91 91 84 64 51 63 82 89 78 69 73 79 77 67 61 64 69 72 68 62 54 53 56 56 47 31 16 4 -14 -45 -78 -97 -95 -87 -91 -104 -108 -91 -65 -57 -71 -88 -86 -66 -48 -43 -50 -53 -49 -48 -56 -72 -90 -99 -91 -64 -30 -9 -5 -9 -6 1 6 15 36 67 86 89 92 112 138 147 139 133 141 146 129 103 89 97 115 129 134 127 110 93 89 93 91 70 45 26 8 -28 -72 -95 -82 -57 -52 -80 -118 -136 -127 -108 -97 -97 -105 -107 -110 -123 -141 -147 -127 -96 -77 -81 -89 -88 -78 -60 -36 -11 -1 -17 -42 -48 -29 -1 21 34 44 49 48 46 51 57 48 30 22 37 60 60 38 20 20 25 23 14 9 8 -5 -29 -41 -24 12 38 41 35 36 52 74 98 110 106 88 73 78 95 105 98 81 72 70 64 46 19 -7 -20 -20 -16 -24 -44 -70 -93 -111 -125 -135 -135 -127 -121 -126 -140 -148 -142 -132 -132 -142 -141 -118 -88 -77 -88 -81 -31 36 65 46 17 23 52 69 63 63 85 104 97 71 58 73 99 117 122 117 103 90 95 116 138 132 106 89 90 92 72 33 1 -14 -22 -37 -53 -58 -52 -59 -94 -142 -167 -158 -137 -132 -143 -144 -124 -98 -83 -81 -85 -97 -117 -130 -117 -81 -45 -33 -43 -53 -56 -47 -22 12 39 41 25 7 -4 -11 -18 -15 5 26 37 51 89 147 189 192 161 126 111 126 166 217 251 254 +0 0 0 0 0 0 0 0 0 0 -1 0 0 0 -1 -1 -1 -1 -1 -2 -3 -3 -2 -2 -3 -2 -1 1 1 0 -1 0 3 5 3 1 0 3 7 9 8 6 6 7 7 3 -2 -4 -4 -5 -8 -11 -11 -8 -5 -5 -7 -7 -7 -8 -13 -19 -22 -20 -17 -16 -18 -18 -15 -13 -15 -23 -30 -31 -27 -24 -23 -24 -25 -27 -30 -34 -32 -22 -9 0 -1 -9 -17 -19 -17 -14 -8 0 8 12 16 23 29 25 11 2 15 46 70 70 57 50 51 48 37 30 37 49 54 48 43 46 50 50 44 40 43 46 49 47 43 38 37 40 40 34 23 12 3 -11 -35 -61 -77 -76 -70 -74 -85 -89 -75 -54 -48 -60 -75 -74 -57 -42 -38 -44 -47 -44 -43 -50 -65 -82 -90 -83 -59 -28 -9 -5 -9 -6 0 5 14 34 63 82 85 88 108 133 142 135 129 138 143 126 101 87 95 113 127 133 126 109 92 88 92 90 69 44 25 8 -28 -72 -95 -82 -57 -52 -80 -118 -136 -127 -108 -97 -97 -104 -106 -109 -121 -139 -144 -125 -94 -75 -79 -87 -85 -75 -58 -35 -11 -1 -17 -40 -45 -28 -1 19 31 40 44 43 41 45 50 42 26 19 32 52 51 32 17 16 20 19 11 7 6 -5 -24 -33 -19 9 29 31 26 27 39 55 72 80 76 63 51 54 66 72 66 54 47 46 41 29 12 -5 -13 -13 -10 -15 -26 -41 -54 -63 -70 -75 -74 -68 -64 -66 -72 -75 -71 -65 -64 -68 -66 -54 -40 -34 -39 -35 -13 14 26 18 6 8 19 25 22 21 29 34 31 22 18 22 29 34 34 32 27 23 24 28 33 31 24 19 19 19 14 6 0 -3 -4 -7 -9 -10 -9 -9 -14 -20 -23 -21 -17 -16 -17 -16 -13 -10 -8 -8 -8 -8 -9 -10 -8 -6 -3 -2 -3 -3 -3 -3 -1 0 1 1 0 0 -1 -1 -1 -1 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 +-99 -91 -64 -30 -9 -5 -9 -6 1 6 15 36 67 86 89 92 112 138 147 139 133 141 146 129 103 89 97 115 129 134 127 110 93 89 93 91 70 45 26 8 -28 -72 -95 -82 -57 -52 -80 -118 -136 -127 -108 -97 -97 -105 -107 -110 -123 -141 -147 -127 -96 -77 -81 -89 -88 -78 -60 -36 -11 -1 -17 -42 -48 -29 -1 21 34 44 49 48 46 51 57 48 30 22 37 60 60 38 20 20 25 23 14 9 8 -5 -29 -41 -24 12 38 41 35 36 52 74 98 110 106 88 73 78 95 105 98 81 72 70 64 46 19 -7 -20 -20 -16 -24 -44 -70 -93 -111 -125 -135 -135 -127 -121 -126 -140 -148 -142 -132 -132 -142 -141 -118 -88 -77 -88 -81 -31 36 65 46 17 23 52 69 63 63 85 104 97 71 58 73 99 117 122 117 103 90 95 116 138 132 106 89 90 92 72 33 1 -14 -22 -37 -53 -58 -52 -59 -94 -142 -167 -158 -137 -132 -143 -144 -124 -98 -83 -81 -85 -97 -117 -130 -117 -81 -45 -33 -43 -53 -56 -47 -22 12 39 41 25 7 -4 -11 -18 -15 5 26 37 51 89 147 189 192 161 126 111 126 166 217 251 254 226 189 159 142 131 118 93 46 -11 -47 -44 -14 3 -11 -50 -83 -87 -64 -34 -17 -22 -39 -45 -32 -23 -38 -72 -101 -103 -88 -80 -86 -100 -108 -109 -99 -80 -58 -45 -40 -29 -3 27 39 32 20 23 38 55 67 68 55 33 14 14 23 21 2 -13 -7 6 2 -25 -51 -58 -50 -45 -47 -46 -44 -44 -51 -61 -76 -89 -94 -86 -71 -66 -76 -92 -100 -99 -100 -97 -88 -73 -65 -64 -57 -32 -2 7 -14 -42 -42 -9 27 31 1 -38 -63 -69 -61 -37 -9 18 37 60 92 127 145 144 138 138 143 147 153 157 147 124 103 96 102 110 116 118 112 97 88 100 112 97 56 24 33 59 62 32 -2 -17 -21 -33 -50 -57 -55 -57 -80 -118 -143 -130 -84 -48 -53 -88 -108 -96 -75 -72 -84 +0 -1 -1 -1 -1 -1 -1 -1 0 0 0 0 0 0 1 1 1 2 3 3 3 3 4 4 3 3 4 5 6 7 7 6 5 6 6 6 5 3 2 0 -3 -8 -11 -10 -7 -7 -11 -16 -19 -19 -17 -16 -16 -18 -19 -20 -23 -27 -29 -26 -21 -17 -18 -21 -21 -19 -15 -10 -3 -1 -5 -12 -14 -9 -1 6 10 14 16 16 16 18 20 17 11 8 14 24 24 15 8 8 11 10 6 4 3 -3 -15 -21 -13 6 19 21 18 19 28 41 55 63 61 51 43 47 58 65 61 51 46 45 42 30 12 -5 -14 -14 -12 -18 -32 -51 -68 -82 -94 -102 -103 -98 -94 -99 -110 -118 -114 -107 -107 -116 -116 -98 -74 -65 -75 -69 -27 31 56 40 14 20 46 61 56 56 77 94 88 65 53 67 92 109 114 110 97 85 90 111 132 127 102 86 87 89 70 32 0 -14 -22 -37 -53 -58 -52 -59 -94 -142 -167 -158 -137 -132 -143 -144 -124 -98 -83 -81 -85 -97 -117 -130 -117 -81 -45 -33 -43 -53 -56 -47 -22 11 38 40 24 6 -4 -11 -18 -15 4 24 35 48 84 139 178 180 150 117 103 116 152 198 228 230 203 169 141 126 115 103 81 39 -10 -41 -38 -12 2 -10 -42 -69 -71 -52 -28 -14 -18 -31 -36 -25 -18 -29 -55 -76 -76 -65 -58 -62 -71 -76 -76 -69 -55 -40 -30 -27 -19 -2 17 24 19 12 13 22 32 38 39 31 18 7 7 12 11 1 -7 -4 2 0 -13 -25 -27 -23 -21 -21 -20 -19 -19 -21 -25 -31 -35 -36 -32 -26 -24 -27 -32 -34 -33 -32 -31 -27 -22 -19 -19 -16 -9 -1 1 -4 -11 -10 -3 5 6 0 -8 -13 -14 -12 -7 -2 2 5 9 13 18 20 19 17 16 16 16 16 16 14 11 9 8 8 8 8 7 7 5 4 5 5 4 2 0 1 1 1 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +85 104 97 71 58 73 99 117 122 117 103 90 95 116 138 132 106 89 90 92 72 33 1 -14 -22 -37 -53 -58 -52 -59 -94 -142 -167 -158 -137 -132 -143 -144 -124 -98 -83 -81 -85 -97 -117 -130 -117 -81 -45 -33 -43 -53 -56 -47 -22 12 39 41 25 7 -4 -11 -18 -15 5 26 37 51 89 147 189 192 161 126 111 126 166 217 251 254 226 189 159 142 131 118 93 46 -11 -47 -44 -14 3 -11 -50 -83 -87 -64 -34 -17 -22 -39 -45 -32 -23 -38 -72 -101 -103 -88 -80 -86 -100 -108 -109 -99 -80 -58 -45 -40 -29 -3 27 39 32 20 23 38 55 67 68 55 33 14 14 23 21 2 -13 -7 6 2 -25 -51 -58 -50 -45 -47 -46 -44 -44 -51 -61 -76 -89 -94 -86 -71 -66 -76 -92 -100 -99 -100 -97 -88 -73 -65 -64 -57 -32 -2 7 -14 -42 -42 -9 27 31 1 -38 -63 -69 -61 -37 -9 18 37 60 92 127 145 144 138 138 143 147 153 157 147 124 103 96 102 110 116 118 112 97 88 100 112 97 56 24 33 59 62 32 -2 -17 -21 -33 -50 -57 -55 -57 -80 -118 -143 -130 -84 -48 -53 -88 -108 -96 -75 -72 -84 -90 -90 -91 -84 -54 -7 23 24 25 56 98 119 110 97 99 112 134 164 191 189 155 117 105 107 97 73 59 59 43 0 -47 -67 -69 -75 -93 -102 -97 -98 -118 -148 -164 -160 -155 -158 -158 -149 -145 -158 -188 -212 -209 -184 -149 -119 -107 -103 -93 -68 -36 -8 15 40 73 105 125 128 117 98 78 76 93 116 121 112 108 115 118 101 70 48 52 69 90 100 97 87 75 74 79 83 82 80 78 66 33 -8 -40 -54 -62 -71 -82 -85 -77 -71 -74 -81 -75 -57 -47 -69 -110 -132 -111 -73 -61 -79 -94 -83 -68 -74 -88 -85 -65 -57 -69 -73 -52 -15 12 24 30 32 30 27 31 36 33 21 20 36 65 87 90 73 53 50 71 91 94 84 86 101 106 87 72 88 123 133 105 66 +0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 2 1 0 0 -1 -1 -2 -3 -3 -3 -4 -6 -9 -11 -11 -10 -10 -12 -13 -11 -10 -9 -9 -10 -11 -14 -16 -15 -11 -7 -5 -7 -9 -9 -8 -4 2 7 7 4 1 -1 -3 -4 -4 1 6 9 13 23 39 52 54 46 37 33 39 53 70 83 86 78 67 57 52 49 45 36 18 -5 -20 -19 -7 1 -5 -23 -39 -42 -31 -17 -9 -12 -20 -24 -17 -13 -21 -40 -57 -59 -51 -47 -51 -60 -66 -67 -62 -51 -37 -29 -27 -20 -2 18 26 22 13 16 26 39 48 49 40 24 10 10 17 16 1 -11 -6 4 1 -21 -42 -48 -42 -38 -40 -39 -38 -38 -44 -53 -67 -79 -83 -77 -64 -60 -69 -84 -92 -91 -93 -90 -82 -69 -61 -61 -54 -31 -2 6 -14 -41 -41 -9 26 30 0 -38 -62 -68 -60 -37 -9 17 36 59 91 126 144 143 137 137 142 146 152 156 147 124 102 95 101 109 115 117 111 96 87 99 111 96 55 23 32 58 60 31 -2 -17 -21 -32 -49 -55 -53 -55 -77 -113 -136 -123 -79 -45 -50 -82 -100 -89 -69 -66 -77 -82 -81 -82 -75 -48 -7 20 20 21 47 83 100 92 80 81 92 109 132 153 150 122 91 81 82 74 55 44 43 31 0 -34 -49 -49 -53 -65 -71 -67 -66 -79 -98 -107 -103 -99 -100 -99 -92 -88 -95 -111 -124 -121 -105 -84 -66 -59 -56 -50 -36 -19 -5 7 19 35 49 58 58 52 43 33 32 38 47 48 44 41 43 43 36 24 16 17 23 29 31 30 26 22 21 22 22 22 20 19 16 7 -2 -10 -12 -14 -15 -17 -17 -15 -14 -14 -14 -13 -10 -8 -11 -16 -19 -15 -10 -8 -10 -11 -9 -7 -8 -9 -8 -6 -5 -6 -6 -4 -1 0 1 1 1 1 1 1 1 1 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +-92 -100 -99 -100 -97 -88 -73 -65 -64 -57 -32 -2 7 -14 -42 -42 -9 27 31 1 -38 -63 -69 -61 -37 -9 18 37 60 92 127 145 144 138 138 143 147 153 157 147 124 103 96 102 110 116 118 112 97 88 100 112 97 56 24 33 59 62 32 -2 -17 -21 -33 -50 -57 -55 -57 -80 -118 -143 -130 -84 -48 -53 -88 -108 -96 -75 -72 -84 -90 -90 -91 -84 -54 -7 23 24 25 56 98 119 110 97 99 112 134 164 191 189 155 117 105 107 97 73 59 59 43 0 -47 -67 -69 -75 -93 -102 -97 -98 -118 -148 -164 -160 -155 -158 -158 -149 -145 -158 -188 -212 -209 -184 -149 -119 -107 -103 -93 -68 -36 -8 15 40 73 105 125 128 117 98 78 76 93 116 121 112 108 115 118 101 70 48 52 69 90 100 97 87 75 74 79 83 82 80 78 66 33 -8 -40 -54 -62 -71 -82 -85 -77 -71 -74 -81 -75 -57 -47 -69 -110 -132 -111 -73 -61 -79 -94 -83 -68 -74 -88 -85 -65 -57 -69 -73 -52 -15 12 24 30 32 30 27 31 36 33 21 20 36 65 87 90 73 53 50 71 91 94 84 86 101 106 87 72 88 123 133 105 66 43 42 44 43 40 44 65 104 137 141 122 121 149 160 120 53 17 24 26 -6 -53 -82 -100 -128 -169 -204 -232 -265 -299 -317 -318 -313 -305 -285 -263 -264 -284 -286 -255 -222 -221 -236 -218 -165 -124 -116 -104 -53 20 67 80 90 117 150 170 184 212 246 263 258 250 252 259 253 232 200 168 140 125 127 139 144 130 116 112 116 107 82 57 50 49 33 3 -19 -25 -29 -41 -56 -57 -51 -58 -77 -93 -99 -108 -128 -133 -99 -47 -17 -24 -33 -9 28 45 43 54 87 110 95 61 51 74 104 113 102 86 73 65 62 53 25 -14 -36 -25 5 22 25 24 26 18 0 -14 -14 -13 -32 -60 -78 -78 -74 -82 -98 -116 -129 -137 -134 -131 -138 -154 -161 -144 -113 -85 -78 -80 -78 -65 -48 -41 -47 +0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 -1 -1 -1 -1 0 0 0 -1 -2 -3 -3 -2 -1 0 1 2 4 7 8 9 9 9 10 11 12 13 13 12 10 10 11 12 14 15 14 13 12 14 17 15 9 4 5 10 11 6 -1 -4 -5 -8 -12 -14 -14 -15 -21 -31 -39 -36 -24 -14 -16 -27 -34 -31 -25 -25 -29 -32 -33 -34 -32 -21 -3 9 9 10 23 41 51 48 43 45 52 63 78 93 93 78 59 54 56 51 39 32 32 24 0 -28 -40 -42 -46 -57 -64 -61 -63 -76 -97 -108 -107 -105 -108 -109 -104 -102 -113 -135 -154 -153 -136 -111 -90 -82 -79 -72 -53 -29 -7 11 32 59 85 102 105 97 82 65 64 79 99 104 97 94 101 104 90 62 43 47 62 82 92 89 80 69 69 74 78 77 76 74 63 31 -8 -39 -53 -61 -70 -81 -84 -76 -70 -73 -80 -75 -57 -47 -69 -110 -132 -111 -73 -61 -79 -94 -83 -68 -74 -88 -85 -65 -57 -69 -73 -52 -15 11 23 29 31 29 26 30 35 32 20 19 35 63 84 87 70 51 48 67 86 89 79 81 94 99 81 66 81 113 121 95 59 38 37 39 38 35 38 56 90 118 120 103 102 125 133 99 43 13 19 20 -5 -42 -65 -78 -99 -130 -155 -175 -198 -221 -232 -231 -225 -217 -201 -183 -182 -194 -193 -170 -147 -144 -152 -139 -104 -77 -72 -63 -32 11 38 45 50 65 82 92 98 111 127 134 130 124 123 124 119 107 91 75 61 54 54 58 59 52 45 43 44 39 29 20 17 16 11 0 -7 -8 -9 -13 -17 -17 -15 -16 -21 -24 -25 -27 -31 -31 -22 -11 -4 -5 -7 -2 5 8 7 8 13 17 14 8 7 9 13 13 11 9 7 6 6 4 2 -2 -3 -2 0 1 1 1 1 0 0 -1 -1 -1 -2 -3 -3 -3 -2 -2 -3 -3 -3 -3 -2 -2 -2 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +52 69 90 100 97 87 75 74 79 83 82 80 78 66 33 -8 -40 -54 -62 -71 -82 -85 -77 -71 -74 -81 -75 -57 -47 -69 -110 -132 -111 -73 -61 -79 -94 -83 -68 -74 -88 -85 -65 -57 -69 -73 -52 -15 12 24 30 32 30 27 31 36 33 21 20 36 65 87 90 73 53 50 71 91 94 84 86 101 106 87 72 88 123 133 105 66 43 42 44 43 40 44 65 104 137 141 122 121 149 160 120 53 17 24 26 -6 -53 -82 -100 -128 -169 -204 -232 -265 -299 -317 -318 -313 -305 -285 -263 -264 -284 -286 -255 -222 -221 -236 -218 -165 -124 -116 -104 -53 20 67 80 90 117 150 170 184 212 246 263 258 250 252 259 253 232 200 168 140 125 127 139 144 130 116 112 116 107 82 57 50 49 33 3 -19 -25 -29 -41 -56 -57 -51 -58 -77 -93 -99 -108 -128 -133 -99 -47 -17 -24 -33 -9 28 45 43 54 87 110 95 61 51 74 104 113 102 86 73 65 62 53 25 -14 -36 -25 5 22 25 24 26 18 0 -14 -14 -13 -32 -60 -78 -78 -74 -82 -98 -116 -129 -137 -134 -131 -138 -154 -161 -144 -113 -85 -78 -80 -78 -65 -48 -41 -47 -65 -87 -104 -108 -97 -73 -45 -30 -33 -41 -37 -19 2 17 33 56 78 84 75 64 70 88 98 89 74 65 70 78 80 72 65 59 54 46 38 35 40 44 43 40 45 49 46 30 7 -9 -16 -18 -18 -24 -38 -57 -75 -86 -88 -84 -76 -65 -57 -55 -52 -34 -4 28 50 62 58 38 8 -7 2 20 19 -1 -11 9 46 58 30 -8 -21 -6 3 -16 -44 -42 -4 37 35 -9 -55 -67 -49 -31 -30 -39 -38 -24 -10 -6 -4 10 37 63 69 59 57 76 108 124 116 100 100 105 96 63 27 8 8 9 3 -5 -14 -25 -43 -57 -56 -35 -9 8 6 -13 -41 -60 -58 -41 -20 -5 1 -5 -25 -55 -77 -77 -67 -67 -76 -74 -50 -21 -8 -12 -14 -12 -17 -37 -53 -51 -31 -3 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 -1 -2 -2 -2 -3 -3 -3 -3 -3 -4 -4 -3 -3 -4 -7 -8 -8 -5 -5 -6 -8 -7 -7 -7 -9 -9 -7 -7 -9 -9 -7 -2 1 3 4 4 4 4 5 6 6 3 3 7 13 18 19 16 12 12 17 23 24 22 23 28 30 25 21 27 39 43 35 22 15 14 16 15 15 17 25 41 56 58 51 52 65 71 54 24 8 11 12 -3 -27 -42 -52 -68 -91 -111 -128 -149 -170 -183 -186 -185 -183 -173 -162 -164 -179 -182 -165 -145 -146 -158 -147 -113 -86 -81 -74 -38 14 48 58 66 87 112 128 140 163 191 206 204 199 202 209 206 190 165 140 117 105 108 119 124 112 101 98 102 94 73 51 45 44 30 2 -18 -24 -27 -39 -53 -54 -49 -55 -74 -89 -95 -104 -124 -129 -96 -46 -17 -24 -33 -9 27 44 42 53 86 109 94 60 50 73 103 112 101 85 72 64 62 53 24 -14 -36 -25 4 21 24 23 25 17 0 -14 -14 -13 -32 -60 -77 -77 -73 -80 -96 -113 -125 -132 -129 -126 -132 -147 -153 -136 -107 -80 -73 -75 -73 -60 -44 -38 -43 -59 -79 -93 -96 -86 -65 -40 -27 -29 -36 -32 -17 1 14 27 46 63 68 60 51 55 69 76 68 56 49 52 58 59 52 47 42 38 32 26 24 27 29 28 26 29 31 29 18 4 -6 -10 -11 -11 -14 -22 -33 -42 -48 -48 -45 -41 -34 -30 -28 -26 -17 -2 13 23 28 26 16 3 -3 0 8 7 -1 -5 3 17 21 10 -3 -8 -3 0 -6 -14 -13 -2 10 9 -3 -15 -18 -13 -8 -8 -10 -9 -6 -3 -2 -1 1 7 11 12 10 9 12 16 18 16 13 13 13 11 7 3 0 0 0 0 -1 -2 -2 -4 -5 -4 -3 -1 0 0 -1 -2 -3 -3 -2 -1 -1 0 -1 -1 -2 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +49 33 3 -19 -25 -29 -41 -56 -57 -51 -58 -77 -93 -99 -108 -128 -133 -99 -47 -17 -24 -33 -9 28 45 43 54 87 110 95 61 51 74 104 113 102 86 73 65 62 53 25 -14 -36 -25 5 22 25 24 26 18 0 -14 -14 -13 -32 -60 -78 -78 -74 -82 -98 -116 -129 -137 -134 -131 -138 -154 -161 -144 -113 -85 -78 -80 -78 -65 -48 -41 -47 -65 -87 -104 -108 -97 -73 -45 -30 -33 -41 -37 -19 2 17 33 56 78 84 75 64 70 88 98 89 74 65 70 78 80 72 65 59 54 46 38 35 40 44 43 40 45 49 46 30 7 -9 -16 -18 -18 -24 -38 -57 -75 -86 -88 -84 -76 -65 -57 -55 -52 -34 -4 28 50 62 58 38 8 -7 2 20 19 -1 -11 9 46 58 30 -8 -21 -6 3 -16 -44 -42 -4 37 35 -9 -55 -67 -49 -31 -30 -39 -38 -24 -10 -6 -4 10 37 63 69 59 57 76 108 124 116 100 100 105 96 63 27 8 8 9 3 -5 -14 -25 -43 -57 -56 -35 -9 8 6 -13 -41 -60 -58 -41 -20 -5 1 -5 -25 -55 -77 -77 -67 -67 -76 -74 -50 -21 -8 -12 -14 -12 -17 -37 -53 -51 -31 -3 23 30 6 -35 -56 -37 1 11 -15 -41 -31 -4 8 -3 -9 4 25 34 31 24 18 13 17 31 52 64 62 52 41 35 27 19 20 31 51 67 68 56 37 20 8 6 10 22 36 44 43 37 27 17 5 -9 -21 -26 -28 -36 -47 -52 -45 -32 -31 -41 -53 -49 -24 6 28 32 23 9 1 4 17 30 36 41 54 78 99 104 96 92 103 116 115 106 103 121 136 121 78 40 40 66 69 29 -25 -41 -16 -2 -47 -126 -174 -163 -130 -115 -121 -128 -132 -150 -175 -181 -161 -147 -161 -192 -209 -204 -189 -172 -144 -110 -88 -86 -85 -68 -39 -11 21 60 93 100 85 84 114 154 164 139 121 134 163 165 132 96 94 122 144 136 116 106 108 102 81 57 37 12 -22 -52 -65 -65 +0 0 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -2 -3 -2 -1 -1 -1 -1 -1 0 1 1 2 3 5 5 3 3 4 7 8 7 6 6 5 5 5 2 -2 -5 -3 0 2 3 3 3 2 0 -3 -3 -3 -6 -12 -15 -16 -16 -18 -22 -26 -30 -33 -33 -33 -36 -41 -44 -40 -33 -25 -24 -25 -25 -21 -16 -14 -17 -23 -32 -38 -41 -37 -29 -18 -13 -14 -18 -16 -9 0 7 15 26 36 40 36 31 35 45 50 46 39 35 38 43 45 41 37 34 32 27 23 21 25 27 27 26 29 32 30 20 4 -7 -12 -13 -13 -18 -28 -43 -56 -65 -67 -65 -59 -51 -45 -44 -42 -28 -4 22 41 51 48 31 6 -6 1 17 16 -1 -10 7 40 51 26 -8 -20 -6 2 -15 -41 -39 -4 34 32 -9 -53 -64 -47 -30 -29 -38 -37 -24 -10 -6 -4 9 36 61 67 58 56 75 107 123 115 99 99 104 95 62 26 7 7 9 3 -5 -14 -25 -43 -57 -56 -35 -9 7 5 -13 -41 -60 -58 -41 -20 -5 0 -5 -25 -54 -75 -75 -65 -65 -73 -71 -48 -20 -8 -12 -14 -12 -16 -35 -49 -47 -29 -3 20 26 5 -32 -50 -33 0 9 -13 -36 -27 -4 6 -3 -8 3 20 27 24 19 14 10 13 23 39 48 46 38 30 25 19 13 14 21 35 46 46 37 24 13 5 3 6 13 22 26 26 22 15 9 2 -6 -12 -15 -16 -20 -25 -28 -24 -17 -16 -21 -26 -24 -12 2 12 14 9 3 0 1 6 11 13 15 20 28 35 36 32 30 33 37 35 32 30 35 38 33 21 10 10 16 16 6 -6 -10 -4 -1 -10 -25 -34 -31 -24 -20 -21 -21 -21 -23 -26 -26 -22 -19 -20 -23 -24 -22 -20 -17 -14 -10 -8 -7 -7 -5 -3 -1 1 3 4 4 3 3 4 5 5 4 3 3 3 3 2 1 1 1 1 1 0 0 0 0 0 0 0 0 -1 -1 -1 0 +-21 -6 3 -16 -44 -42 -4 37 35 -9 -55 -67 -49 -31 -30 -39 -38 -24 -10 -6 -4 10 37 63 69 59 57 76 108 124 116 100 100 105 96 63 27 8 8 9 3 -5 -14 -25 -43 -57 -56 -35 -9 8 6 -13 -41 -60 -58 -41 -20 -5 1 -5 -25 -55 -77 -77 -67 -67 -76 -74 -50 -21 -8 -12 -14 -12 -17 -37 -53 -51 -31 -3 23 30 6 -35 -56 -37 1 11 -15 -41 -31 -4 8 -3 -9 4 25 34 31 24 18 13 17 31 52 64 62 52 41 35 27 19 20 31 51 67 68 56 37 20 8 6 10 22 36 44 43 37 27 17 5 -9 -21 -26 -28 -36 -47 -52 -45 -32 -31 -41 -53 -49 -24 6 28 32 23 9 1 4 17 30 36 41 54 78 99 104 96 92 103 116 115 106 103 121 136 121 78 40 40 66 69 29 -25 -41 -16 -2 -47 -126 -174 -163 -130 -115 -121 -128 -132 -150 -175 -181 -161 -147 -161 -192 -209 -204 -189 -172 -144 -110 -88 -86 -85 -68 -39 -11 21 60 93 100 85 84 114 154 164 139 121 134 163 165 132 96 94 122 144 136 116 106 108 102 81 57 37 12 -22 -52 -65 -65 -65 -70 -75 -84 -100 -114 -103 -64 -26 -19 -35 -40 -21 3 5 -6 -6 10 28 31 23 16 5 -19 -49 -60 -40 1 35 49 54 52 35 5 -16 -8 18 34 31 26 40 53 37 -9 -47 -56 -45 -37 -39 -35 -22 -14 -14 -11 6 28 39 36 21 3 -22 -52 -77 -91 -99 -103 -100 -82 -54 -38 -47 -79 -109 -120 -106 -82 -56 -25 11 40 39 16 -2 7 32 38 9 -35 -66 -66 -37 2 24 4 -36 -49 -5 58 82 57 30 36 59 74 87 121 164 178 158 131 115 97 59 28 30 56 58 24 -12 -13 12 30 23 4 -8 -9 -15 -30 -42 -41 -32 -28 -30 -28 -19 -14 -35 -73 -90 -69 -34 -23 -43 -54 -32 12 49 63 58 40 19 3 -1 2 8 14 18 21 17 13 +0 -1 0 -1 -1 -1 -1 0 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 1 2 2 2 2 3 5 6 6 6 6 7 6 4 2 0 0 0 0 -1 -2 -3 -6 -7 -8 -5 -2 1 0 -3 -7 -10 -10 -8 -4 -1 0 -2 -6 -12 -18 -18 -16 -17 -19 -19 -14 -6 -3 -4 -5 -4 -6 -12 -17 -17 -11 -2 8 10 2 -14 -22 -15 0 4 -7 -18 -14 -2 3 -2 -5 1 11 16 15 11 9 6 8 16 27 34 34 29 23 20 15 11 11 18 31 41 42 35 23 13 5 3 6 14 24 30 30 26 19 12 3 -7 -16 -20 -22 -28 -37 -41 -36 -26 -25 -33 -43 -40 -20 4 23 26 19 7 0 3 14 26 31 36 47 69 88 93 87 83 94 106 106 98 96 113 127 114 73 38 38 63 66 27 -25 -40 -16 -2 -46 -124 -171 -161 -129 -114 -120 -127 -131 -149 -175 -181 -161 -147 -161 -192 -209 -204 -189 -172 -144 -110 -88 -86 -85 -68 -39 -11 20 59 92 99 84 83 112 151 161 136 118 130 158 160 127 92 90 117 137 129 110 100 101 95 75 53 34 11 -21 -48 -60 -59 -59 -63 -67 -75 -89 -101 -90 -56 -23 -17 -30 -34 -18 2 4 -5 -5 8 22 24 18 12 3 -15 -38 -46 -31 0 25 35 39 37 24 3 -12 -6 12 22 20 17 26 34 23 -6 -30 -35 -28 -23 -24 -21 -13 -8 -8 -7 3 14 20 18 10 1 -11 -26 -37 -43 -46 -48 -45 -37 -24 -17 -20 -33 -44 -48 -42 -32 -21 -10 3 13 13 5 -1 2 9 11 2 -11 -19 -19 -10 0 6 0 -9 -12 -2 12 17 11 6 7 11 13 15 20 27 28 24 19 16 13 7 3 3 6 6 2 -2 -2 1 2 1 0 -1 -1 -2 -2 -3 -3 -2 -2 -2 -2 -1 -1 -2 -3 -3 -2 -1 -1 -1 -1 -1 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 +96 92 103 116 115 106 103 121 136 121 78 40 40 66 69 29 -25 -41 -16 -2 -47 -126 -174 -163 -130 -115 -121 -128 -132 -150 -175 -181 -161 -147 -161 -192 -209 -204 -189 -172 -144 -110 -88 -86 -85 -68 -39 -11 21 60 93 100 85 84 114 154 164 139 121 134 163 165 132 96 94 122 144 136 116 106 108 102 81 57 37 12 -22 -52 -65 -65 -65 -70 -75 -84 -100 -114 -103 -64 -26 -19 -35 -40 -21 3 5 -6 -6 10 28 31 23 16 5 -19 -49 -60 -40 1 35 49 54 52 35 5 -16 -8 18 34 31 26 40 53 37 -9 -47 -56 -45 -37 -39 -35 -22 -14 -14 -11 6 28 39 36 21 3 -22 -52 -77 -91 -99 -103 -100 -82 -54 -38 -47 -79 -109 -120 -106 -82 -56 -25 11 40 39 16 -2 7 32 38 9 -35 -66 -66 -37 2 24 4 -36 -49 -5 58 82 57 30 36 59 74 87 121 164 178 158 131 115 97 59 28 30 56 58 24 -12 -13 12 30 23 4 -8 -9 -15 -30 -42 -41 -32 -28 -30 -28 -19 -14 -35 -73 -90 -69 -34 -23 -43 -54 -32 12 49 63 58 40 19 3 -1 2 8 14 18 21 17 13 10 3 -17 -39 -36 -2 35 48 38 28 32 33 29 23 27 28 14 -10 -28 -30 -19 -9 -4 3 17 32 39 36 25 20 25 31 26 13 1 0 9 17 18 8 -9 -26 -40 -47 -47 -43 -41 -44 -46 -42 -33 -32 -42 -57 -59 -40 -14 5 2 -18 -39 -43 -34 -27 -36 -48 -40 -16 9 16 16 22 33 43 51 56 52 31 11 9 22 23 -4 -40 -65 -79 -102 -130 -147 -150 -149 -148 -140 -126 -117 -118 -110 -85 -54 -32 -20 -4 22 43 58 74 93 105 97 79 77 105 144 165 154 123 104 115 148 181 201 208 214 224 234 238 227 208 195 196 191 152 77 10 -16 -12 -20 -49 -76 -82 -82 -94 -114 -124 -125 -133 -149 -154 -143 -136 -144 -158 -160 -153 -145 -141 -136 -133 -130 -119 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 -1 -1 -1 -2 -4 -6 -6 -5 -5 -6 -6 -7 -8 -10 -11 -11 -10 -12 -15 -17 -18 -17 -17 -15 -12 -10 -10 -10 -9 -5 -2 2 8 13 15 13 13 19 27 30 26 23 27 34 35 29 21 22 29 35 34 30 28 29 28 23 16 11 3 -8 -17 -22 -23 -23 -25 -28 -32 -38 -45 -41 -26 -11 -8 -15 -18 -10 1 2 -3 -3 4 13 15 11 8 2 -11 -27 -33 -23 0 19 28 31 30 20 3 -10 -5 11 21 19 16 26 35 24 -7 -33 -39 -32 -27 -28 -26 -17 -11 -11 -9 4 21 30 28 16 2 -18 -42 -63 -75 -82 -86 -84 -69 -46 -33 -41 -69 -95 -105 -94 -73 -50 -23 9 36 35 14 -2 6 29 35 8 -33 -63 -63 -36 1 22 3 -35 -48 -5 56 79 55 29 35 57 72 85 119 162 176 156 130 114 96 58 27 29 55 57 23 -12 -13 12 29 22 3 -8 -9 -15 -30 -42 -41 -32 -28 -30 -28 -19 -14 -35 -72 -89 -68 -34 -23 -42 -53 -31 11 46 60 55 37 17 2 -1 1 7 12 16 19 15 11 9 2 -16 -35 -32 -2 30 41 32 23 27 27 24 19 22 23 11 -9 -23 -24 -16 -8 -4 2 12 24 29 26 18 14 18 22 18 9 0 0 6 11 11 5 -6 -17 -26 -30 -30 -27 -25 -27 -28 -25 -19 -19 -24 -32 -33 -22 -8 2 1 -10 -20 -21 -17 -13 -17 -22 -18 -8 3 6 6 9 13 16 19 21 19 11 3 3 7 7 -2 -13 -21 -25 -31 -38 -42 -42 -41 -39 -36 -32 -29 -28 -26 -19 -12 -7 -5 -1 4 7 10 12 15 16 15 11 11 14 19 21 18 14 11 12 15 17 18 18 18 17 17 17 15 13 11 11 10 7 3 0 -1 -1 -1 -2 -3 -3 -2 -2 -3 -3 -2 -2 -2 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +39 16 -2 7 32 38 9 -35 -66 -66 -37 2 24 4 -36 -49 -5 58 82 57 30 36 59 74 87 121 164 178 158 131 115 97 59 28 30 56 58 24 -12 -13 12 30 23 4 -8 -9 -15 -30 -42 -41 -32 -28 -30 -28 -19 -14 -35 -73 -90 -69 -34 -23 -43 -54 -32 12 49 63 58 40 19 3 -1 2 8 14 18 21 17 13 10 3 -17 -39 -36 -2 35 48 38 28 32 33 29 23 27 28 14 -10 -28 -30 -19 -9 -4 3 17 32 39 36 25 20 25 31 26 13 1 0 9 17 18 8 -9 -26 -40 -47 -47 -43 -41 -44 -46 -42 -33 -32 -42 -57 -59 -40 -14 5 2 -18 -39 -43 -34 -27 -36 -48 -40 -16 9 16 16 22 33 43 51 56 52 31 11 9 22 23 -4 -40 -65 -79 -102 -130 -147 -150 -149 -148 -140 -126 -117 -118 -110 -85 -54 -32 -20 -4 22 43 58 74 93 105 97 79 77 105 144 165 154 123 104 115 148 181 201 208 214 224 234 238 227 208 195 196 191 152 77 10 -16 -12 -20 -49 -76 -82 -82 -94 -114 -124 -125 -133 -149 -154 -143 -136 -144 -158 -160 -153 -145 -141 -136 -133 -130 -119 -92 -59 -48 -48 -31 11 51 61 52 54 73 77 53 26 15 17 10 -2 0 15 17 -4 -26 -23 -1 16 10 -11 -30 -39 -38 -35 -36 -45 -58 -64 -50 -21 -5 -17 -52 -80 -80 -57 -28 -4 15 25 16 -9 -27 -19 7 26 33 42 53 52 29 4 -5 -5 -12 -20 -8 18 21 -12 -42 -33 -6 -14 -55 -76 -51 -9 -3 -34 -60 -54 -29 -11 -16 -34 -35 3 60 78 32 -28 -23 47 99 80 34 40 102 150 138 91 56 49 54 62 69 71 65 62 60 48 30 36 89 152 166 125 80 77 100 100 59 6 -20 -10 12 23 20 23 44 63 49 -3 -56 -78 -78 -83 -84 -63 -28 -21 -48 -66 -44 -10 -9 -49 -86 -96 -97 -108 -119 -117 -104 -93 -79 -42 5 25 -8 -67 +0 0 -1 0 0 0 0 -1 -1 -1 -1 0 0 0 -1 -1 -1 1 1 1 0 1 1 2 3 4 7 8 7 6 6 5 3 1 2 4 4 2 -2 -2 1 3 2 0 -1 -2 -2 -4 -6 -6 -5 -5 -5 -5 -4 -3 -7 -14 -18 -14 -8 -5 -10 -13 -8 2 12 16 15 10 5 0 -1 0 2 4 5 6 5 4 3 1 -7 -15 -14 -1 13 19 15 11 13 14 12 10 12 13 6 -5 -14 -15 -10 -5 -3 1 9 17 21 20 14 11 14 18 15 7 0 0 5 10 11 5 -6 -18 -27 -32 -33 -30 -29 -32 -33 -31 -25 -24 -32 -43 -45 -31 -11 3 1 -15 -32 -35 -28 -23 -30 -40 -34 -14 7 13 13 18 28 37 44 49 46 27 9 8 19 20 -4 -37 -61 -74 -96 -122 -139 -142 -142 -141 -134 -121 -113 -114 -107 -83 -53 -32 -20 -4 21 42 57 73 91 104 96 78 76 104 143 164 153 122 103 114 147 181 201 207 213 223 233 237 226 207 194 194 189 150 76 9 -16 -12 -20 -49 -75 -81 -80 -92 -111 -120 -121 -128 -143 -147 -136 -129 -136 -149 -150 -143 -135 -131 -126 -122 -119 -108 -83 -53 -43 -43 -28 9 44 52 44 46 62 65 44 21 12 13 8 -2 0 11 13 -4 -21 -18 -1 12 7 -9 -23 -29 -28 -26 -26 -32 -41 -45 -35 -15 -4 -12 -34 -52 -51 -36 -18 -3 9 14 9 -6 -16 -11 3 14 17 22 27 27 14 2 -3 -3 -6 -10 -4 8 9 -6 -19 -15 -3 -6 -23 -31 -20 -4 -2 -13 -22 -19 -10 -4 -6 -11 -11 0 17 22 9 -8 -7 12 25 19 8 9 23 33 29 19 11 9 10 11 12 12 10 9 9 7 4 4 11 19 20 14 8 8 10 9 5 0 -2 -1 0 1 1 1 2 3 2 -1 -3 -4 -4 -4 -3 -2 -1 -1 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 0 -1 0 +22 23 -4 -40 -65 -79 -102 -130 -147 -150 -149 -148 -140 -126 -117 -118 -110 -85 -54 -32 -20 -4 22 43 58 74 93 105 97 79 77 105 144 165 154 123 104 115 148 181 201 208 214 224 234 238 227 208 195 196 191 152 77 10 -16 -12 -20 -49 -76 -82 -82 -94 -114 -124 -125 -133 -149 -154 -143 -136 -144 -158 -160 -153 -145 -141 -136 -133 -130 -119 -92 -59 -48 -48 -31 11 51 61 52 54 73 77 53 26 15 17 10 -2 0 15 17 -4 -26 -23 -1 16 10 -11 -30 -39 -38 -35 -36 -45 -58 -64 -50 -21 -5 -17 -52 -80 -80 -57 -28 -4 15 25 16 -9 -27 -19 7 26 33 42 53 52 29 4 -5 -5 -12 -20 -8 18 21 -12 -42 -33 -6 -14 -55 -76 -51 -9 -3 -34 -60 -54 -29 -11 -16 -34 -35 3 60 78 32 -28 -23 47 99 80 34 40 102 150 138 91 56 49 54 62 69 71 65 62 60 48 30 36 89 152 166 125 80 77 100 100 59 6 -20 -10 12 23 20 23 44 63 49 -3 -56 -78 -78 -83 -84 -63 -28 -21 -48 -66 -44 -10 -9 -49 -86 -96 -97 -108 -119 -117 -104 -93 -79 -42 5 25 -8 -67 -97 -78 -39 -15 -12 -2 20 28 1 -33 -38 -10 12 6 -12 -19 -21 -36 -54 -51 -31 -23 -35 -44 -31 -5 5 -7 -22 -32 -44 -60 -71 -65 -49 -35 -30 -32 -33 -24 2 43 80 92 82 64 55 63 79 96 105 103 101 109 126 132 109 66 32 29 46 56 49 37 25 7 -18 -36 -26 3 18 -4 -52 -90 -97 -75 -48 -36 -40 -52 -53 -40 -20 -6 -8 -17 -16 3 21 18 -12 -30 -12 30 49 24 -14 -17 17 45 41 23 26 44 45 21 -1 9 40 48 14 -35 -49 -2 67 106 93 66 62 74 57 2 -55 -72 -58 -53 -78 -108 -114 -89 -46 -5 14 6 -20 -35 -32 -22 -17 -6 33 96 145 164 159 145 117 63 3 -25 -16 -9 -39 -84 -93 -39 31 58 28 -22 +0 0 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -2 -2 -2 -2 -2 -2 -2 -1 -1 -1 0 1 2 2 3 4 4 4 4 6 9 11 11 9 8 9 13 16 19 21 22 25 27 29 28 27 26 28 28 23 12 1 -3 -3 -4 -10 -15 -17 -18 -21 -26 -29 -30 -33 -38 -40 -38 -37 -40 -45 -47 -46 -45 -45 -44 -44 -44 -41 -33 -22 -18 -18 -12 4 20 24 21 22 31 33 23 11 6 7 4 -1 0 7 8 -3 -14 -13 -1 8 5 -7 -17 -23 -23 -21 -22 -28 -36 -40 -32 -14 -4 -12 -35 -54 -54 -39 -20 -3 10 17 11 -7 -20 -15 5 19 25 32 40 40 22 3 -4 -5 -10 -17 -7 14 17 -11 -36 -29 -6 -13 -48 -67 -45 -8 -3 -31 -54 -49 -27 -11 -15 -32 -33 2 55 73 30 -27 -22 44 94 76 32 38 98 145 134 88 54 47 52 60 67 70 64 61 59 47 29 35 88 151 165 124 79 76 99 100 59 5 -20 -10 11 22 19 22 43 62 48 -3 -56 -78 -77 -82 -83 -62 -28 -21 -47 -65 -43 -10 -9 -48 -83 -92 -93 -103 -113 -110 -98 -87 -74 -39 4 22 -8 -61 -88 -71 -35 -14 -11 -2 17 24 0 -29 -33 -9 10 5 -10 -16 -18 -30 -44 -41 -25 -19 -28 -34 -24 -4 3 -6 -17 -24 -32 -43 -51 -46 -35 -25 -21 -22 -22 -16 1 27 50 57 50 39 33 37 46 55 60 58 56 60 68 70 57 34 16 14 22 27 23 17 11 3 -9 -16 -12 1 7 -2 -21 -36 -38 -29 -18 -14 -15 -19 -19 -14 -7 -2 -3 -6 -5 0 5 4 -4 -8 -4 7 11 5 -4 -4 3 9 8 4 4 8 8 3 -1 1 6 7 2 -5 -7 -1 8 12 10 7 6 7 5 0 -5 -6 -5 -4 -6 -7 -7 -6 -3 -1 0 0 -1 -2 -2 -1 -1 -1 0 2 2 2 2 1 1 0 0 -1 -1 -1 -1 -1 -1 -1 0 0 0 0 +-29 -11 -16 -34 -35 3 60 78 32 -28 -23 47 99 80 34 40 102 150 138 91 56 49 54 62 69 71 65 62 60 48 30 36 89 152 166 125 80 77 100 100 59 6 -20 -10 12 23 20 23 44 63 49 -3 -56 -78 -78 -83 -84 -63 -28 -21 -48 -66 -44 -10 -9 -49 -86 -96 -97 -108 -119 -117 -104 -93 -79 -42 5 25 -8 -67 -97 -78 -39 -15 -12 -2 20 28 1 -33 -38 -10 12 6 -12 -19 -21 -36 -54 -51 -31 -23 -35 -44 -31 -5 5 -7 -22 -32 -44 -60 -71 -65 -49 -35 -30 -32 -33 -24 2 43 80 92 82 64 55 63 79 96 105 103 101 109 126 132 109 66 32 29 46 56 49 37 25 7 -18 -36 -26 3 18 -4 -52 -90 -97 -75 -48 -36 -40 -52 -53 -40 -20 -6 -8 -17 -16 3 21 18 -12 -30 -12 30 49 24 -14 -17 17 45 41 23 26 44 45 21 -1 9 40 48 14 -35 -49 -2 67 106 93 66 62 74 57 2 -55 -72 -58 -53 -78 -108 -114 -89 -46 -5 14 6 -20 -35 -32 -22 -17 -6 33 96 145 164 159 145 117 63 3 -25 -16 -9 -39 -84 -93 -39 31 58 28 -22 -55 -75 -105 -143 -170 -178 -181 -187 -174 -127 -77 -71 -110 -131 -89 -19 9 -13 -25 14 69 82 54 43 79 126 145 137 136 142 128 81 35 25 50 81 96 98 93 85 72 56 47 45 42 28 4 -6 2 12 -1 -29 -49 -42 -29 -36 -70 -106 -119 -107 -88 -81 -89 -94 -74 -32 3 -8 -48 -81 -82 -74 -75 -75 -49 -6 27 32 34 55 88 109 109 99 90 83 77 75 75 79 81 81 86 93 93 76 39 -1 -28 -31 -18 -3 2 0 -3 -10 -26 -43 -41 -23 -20 -60 -129 -171 -153 -93 -51 -61 -107 -141 -136 -102 -69 -56 -62 -73 -80 -80 -71 -48 -17 4 1 -22 -37 -35 -26 -28 -32 -13 24 53 51 31 27 46 68 65 36 14 19 42 46 23 5 23 66 90 80 63 +0 -1 -1 -1 -1 0 0 0 0 -1 -1 0 0 0 0 0 1 2 2 2 1 1 1 2 2 2 2 2 2 2 1 2 5 10 11 9 6 6 8 9 5 0 -3 -2 1 2 2 3 6 9 7 -1 -9 -13 -14 -15 -16 -12 -6 -5 -11 -15 -10 -3 -3 -12 -22 -25 -26 -30 -33 -34 -31 -28 -25 -14 1 8 -3 -23 -34 -28 -15 -6 -5 -1 7 11 0 -14 -17 -5 5 2 -6 -9 -10 -18 -27 -26 -16 -12 -19 -24 -17 -3 2 -4 -13 -19 -26 -36 -43 -40 -31 -22 -19 -21 -22 -16 1 28 53 62 56 44 38 44 56 69 76 75 75 81 95 100 84 51 25 22 36 44 39 30 20 5 -16 -31 -22 2 15 -4 -46 -79 -86 -67 -43 -33 -36 -47 -49 -37 -19 -6 -8 -16 -15 2 19 16 -12 -29 -12 28 47 23 -14 -17 16 43 40 22 25 43 44 20 -1 8 39 47 13 -35 -49 -2 66 105 92 65 61 74 57 1 -55 -72 -58 -53 -78 -108 -114 -89 -46 -5 13 5 -20 -35 -32 -22 -17 -6 32 93 140 158 153 139 112 60 2 -24 -16 -9 -37 -79 -87 -37 28 53 25 -20 -50 -68 -94 -127 -151 -157 -158 -163 -150 -109 -66 -61 -93 -110 -74 -16 7 -11 -21 11 54 64 42 33 60 95 108 101 100 103 92 58 24 17 34 55 65 65 61 55 46 36 29 28 26 17 2 -4 1 6 -1 -17 -28 -24 -16 -20 -37 -56 -61 -54 -44 -40 -43 -45 -35 -15 1 -4 -21 -35 -35 -31 -31 -30 -19 -3 10 11 12 19 30 36 35 31 28 25 22 21 21 21 21 21 21 23 22 17 8 -1 -7 -7 -4 -1 0 0 -1 -2 -5 -7 -7 -4 -3 -9 -18 -22 -19 -11 -6 -7 -11 -14 -13 -10 -6 -5 -5 -6 -6 -6 -5 -3 -1 0 0 -1 -2 -2 -1 -1 -1 -1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +-53 -40 -20 -6 -8 -17 -16 3 21 18 -12 -30 -12 30 49 24 -14 -17 17 45 41 23 26 44 45 21 -1 9 40 48 14 -35 -49 -2 67 106 93 66 62 74 57 2 -55 -72 -58 -53 -78 -108 -114 -89 -46 -5 14 6 -20 -35 -32 -22 -17 -6 33 96 145 164 159 145 117 63 3 -25 -16 -9 -39 -84 -93 -39 31 58 28 -22 -55 -75 -105 -143 -170 -178 -181 -187 -174 -127 -77 -71 -110 -131 -89 -19 9 -13 -25 14 69 82 54 43 79 126 145 137 136 142 128 81 35 25 50 81 96 98 93 85 72 56 47 45 42 28 4 -6 2 12 -1 -29 -49 -42 -29 -36 -70 -106 -119 -107 -88 -81 -89 -94 -74 -32 3 -8 -48 -81 -82 -74 -75 -75 -49 -6 27 32 34 55 88 109 109 99 90 83 77 75 75 79 81 81 86 93 93 76 39 -1 -28 -31 -18 -3 2 0 -3 -10 -26 -43 -41 -23 -20 -60 -129 -171 -153 -93 -51 -61 -107 -141 -136 -102 -69 -56 -62 -73 -80 -80 -71 -48 -17 4 1 -22 -37 -35 -26 -28 -32 -13 24 53 51 31 27 46 68 65 36 14 19 42 46 23 5 23 66 90 80 63 70 79 56 5 -15 24 96 139 131 93 59 47 48 39 10 -22 -34 -27 -29 -55 -72 -55 -35 -57 -116 -137 -84 -20 -17 -67 -91 -54 0 11 -14 -22 10 51 68 67 71 85 91 80 59 46 44 49 57 70 89 111 128 139 147 151 145 131 119 105 78 42 27 47 84 94 69 39 25 11 -23 -56 -52 -19 -9 -50 -104 -118 -91 -73 -100 -151 -181 -177 -157 -151 -162 -176 -183 -178 -167 -158 -149 -131 -108 -89 -82 -78 -58 -18 18 23 -8 -46 -61 -44 -15 0 -2 -7 4 26 45 46 44 51 62 58 36 20 33 64 76 51 18 8 18 25 16 10 26 52 63 51 33 18 5 -6 -9 -2 0 -15 -36 -33 1 39 51 40 29 36 53 68 80 94 103 104 98 93 92 85 +0 -1 -1 -1 -1 -1 -1 0 0 0 -1 -1 -1 0 0 0 -1 -1 0 1 1 0 0 1 1 0 -1 0 1 2 0 -3 -4 -1 4 8 7 5 5 6 5 0 -6 -9 -7 -7 -10 -15 -16 -13 -7 -1 2 0 -4 -7 -6 -5 -4 -2 6 20 32 37 37 35 29 16 0 -7 -5 -3 -12 -26 -29 -13 9 18 9 -8 -20 -27 -39 -54 -65 -69 -72 -76 -72 -54 -33 -31 -49 -59 -41 -9 4 -7 -13 6 34 41 28 22 42 68 79 76 77 81 74 47 20 15 30 50 60 62 59 55 47 37 31 30 28 19 2 -5 1 8 -1 -22 -37 -32 -22 -28 -54 -83 -94 -85 -71 -66 -73 -77 -61 -27 2 -7 -41 -69 -71 -64 -66 -66 -44 -6 23 28 30 49 79 99 99 91 83 77 71 70 70 74 76 77 82 89 89 73 37 -1 -28 -31 -18 -3 1 0 -3 -10 -26 -43 -41 -23 -20 -60 -129 -171 -153 -93 -51 -61 -107 -141 -136 -102 -69 -56 -62 -73 -80 -80 -71 -48 -17 3 0 -22 -37 -35 -26 -28 -32 -13 23 51 49 29 26 44 65 62 34 13 17 39 43 21 4 21 60 82 72 57 63 70 49 4 -14 21 83 120 112 79 50 39 40 32 8 -19 -28 -22 -24 -44 -57 -44 -28 -44 -89 -104 -64 -15 -13 -49 -66 -39 0 7 -10 -16 6 34 45 44 46 54 57 50 36 28 26 29 33 40 51 62 71 76 79 80 76 68 60 52 38 20 12 22 39 42 30 17 10 4 -10 -23 -21 -8 -4 -19 -39 -43 -33 -26 -35 -51 -60 -57 -50 -47 -49 -52 -52 -50 -46 -42 -39 -33 -27 -21 -19 -18 -13 -4 3 4 -2 -9 -11 -8 -3 0 -1 -2 0 3 5 5 5 5 6 6 3 1 3 5 6 4 1 0 1 1 0 0 1 2 2 2 1 0 0 -1 -1 -1 0 -1 -1 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +88 109 109 99 90 83 77 75 75 79 81 81 86 93 93 76 39 -1 -28 -31 -18 -3 2 0 -3 -10 -26 -43 -41 -23 -20 -60 -129 -171 -153 -93 -51 -61 -107 -141 -136 -102 -69 -56 -62 -73 -80 -80 -71 -48 -17 4 1 -22 -37 -35 -26 -28 -32 -13 24 53 51 31 27 46 68 65 36 14 19 42 46 23 5 23 66 90 80 63 70 79 56 5 -15 24 96 139 131 93 59 47 48 39 10 -22 -34 -27 -29 -55 -72 -55 -35 -57 -116 -137 -84 -20 -17 -67 -91 -54 0 11 -14 -22 10 51 68 67 71 85 91 80 59 46 44 49 57 70 89 111 128 139 147 151 145 131 119 105 78 42 27 47 84 94 69 39 25 11 -23 -56 -52 -19 -9 -50 -104 -118 -91 -73 -100 -151 -181 -177 -157 -151 -162 -176 -183 -178 -167 -158 -149 -131 -108 -89 -82 -78 -58 -18 18 23 -8 -46 -61 -44 -15 0 -2 -7 4 26 45 46 44 51 62 58 36 20 33 64 76 51 18 8 18 25 16 10 26 52 63 51 33 18 5 -6 -9 -2 0 -15 -36 -33 1 39 51 40 29 36 53 68 80 94 103 104 98 93 92 85 66 44 32 34 43 53 55 47 39 46 70 85 61 4 -39 -37 -9 0 -18 -34 -25 -14 -33 -75 -103 -104 -87 -64 -40 -24 -35 -75 -110 -110 -92 -93 -124 -147 -133 -99 -83 -92 -106 -100 -85 -77 -72 -59 -37 -19 -11 -10 -6 2 16 39 74 105 118 104 80 64 67 78 84 73 44 6 -21 -21 -4 3 -19 -53 -72 -67 -60 -71 -90 -93 -80 -73 -88 -104 -91 -52 -22 -26 -51 -64 -50 -26 -12 -3 16 48 79 102 128 155 166 149 126 128 164 196 197 176 158 154 157 160 159 153 134 109 93 88 77 55 33 26 19 -4 -35 -50 -51 -63 -89 -103 -89 -83 -119 -178 -202 -171 -129 -126 -147 -139 -92 -52 -54 -69 -39 37 102 103 62 36 56 93 107 102 102 112 115 111 119 142 +0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 0 -1 -1 -1 -1 -1 0 0 -1 -1 -2 -2 -3 -2 -2 -4 -9 -12 -11 -8 -5 -6 -10 -14 -14 -11 -8 -7 -8 -9 -11 -11 -10 -7 -3 0 0 -4 -7 -7 -5 -6 -7 -3 5 11 11 7 6 11 16 16 9 3 5 11 13 6 1 7 21 29 26 21 24 28 20 1 -6 9 37 55 53 38 25 20 21 17 4 -11 -17 -13 -15 -28 -37 -29 -19 -31 -63 -75 -47 -12 -10 -39 -53 -32 0 6 -9 -14 6 32 43 43 46 56 61 54 40 31 30 34 40 50 65 81 95 104 111 115 111 101 93 83 62 33 21 38 69 77 57 32 21 9 -20 -49 -46 -17 -8 -45 -93 -106 -82 -66 -91 -138 -166 -163 -146 -141 -152 -165 -173 -168 -159 -151 -143 -126 -104 -86 -80 -76 -57 -18 17 22 -8 -46 -61 -44 -15 0 -2 -7 3 25 44 45 43 50 61 57 35 20 33 63 75 50 17 7 17 24 15 9 25 51 62 50 32 17 4 -6 -9 -2 0 -15 -35 -32 0 37 48 38 27 34 50 63 74 87 95 96 90 85 83 77 59 39 28 30 37 46 47 40 33 39 59 71 51 3 -33 -31 -8 0 -15 -28 -20 -11 -26 -58 -79 -79 -66 -48 -30 -18 -26 -54 -79 -78 -64 -64 -85 -99 -89 -66 -55 -60 -68 -63 -53 -48 -44 -36 -22 -12 -7 -6 -4 1 8 20 39 54 60 52 39 31 32 36 39 33 19 2 -10 -9 -2 1 -8 -21 -28 -26 -23 -26 -33 -33 -28 -25 -29 -34 -29 -16 -7 -8 -15 -18 -14 -7 -4 -1 3 11 18 22 27 32 33 29 23 23 29 33 32 28 24 22 22 22 21 19 16 12 10 9 7 5 3 2 1 -1 -3 -4 -4 -5 -6 -6 -5 -5 -6 -8 -8 -7 -5 -4 -5 -4 -3 -2 -2 -2 -1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 +-100 -151 -181 -177 -157 -151 -162 -176 -183 -178 -167 -158 -149 -131 -108 -89 -82 -78 -58 -18 18 23 -8 -46 -61 -44 -15 0 -2 -7 4 26 45 46 44 51 62 58 36 20 33 64 76 51 18 8 18 25 16 10 26 52 63 51 33 18 5 -6 -9 -2 0 -15 -36 -33 1 39 51 40 29 36 53 68 80 94 103 104 98 93 92 85 66 44 32 34 43 53 55 47 39 46 70 85 61 4 -39 -37 -9 0 -18 -34 -25 -14 -33 -75 -103 -104 -87 -64 -40 -24 -35 -75 -110 -110 -92 -93 -124 -147 -133 -99 -83 -92 -106 -100 -85 -77 -72 -59 -37 -19 -11 -10 -6 2 16 39 74 105 118 104 80 64 67 78 84 73 44 6 -21 -21 -4 3 -19 -53 -72 -67 -60 -71 -90 -93 -80 -73 -88 -104 -91 -52 -22 -26 -51 -64 -50 -26 -12 -3 16 48 79 102 128 155 166 149 126 128 164 196 197 176 158 154 157 160 159 153 134 109 93 88 77 55 33 26 19 -4 -35 -50 -51 -63 -89 -103 -89 -83 -119 -178 -202 -171 -129 -126 -147 -139 -92 -52 -54 -69 -39 37 102 103 62 36 56 93 107 102 102 112 115 111 119 142 144 107 59 45 60 59 29 0 4 17 6 -21 -28 -19 -34 -83 -124 -113 -71 -57 -91 -132 -136 -106 -89 -99 -117 -116 -104 -106 -129 -146 -142 -124 -113 -110 -106 -99 -95 -87 -67 -39 -36 -77 -129 -139 -104 -59 -40 -32 -9 22 36 39 58 93 104 65 12 8 51 81 57 10 4 51 96 97 69 66 92 113 107 99 128 181 204 175 126 105 117 122 91 45 14 8 9 -4 -30 -65 -106 -141 -153 -130 -90 -54 -36 -30 -36 -50 -67 -73 -65 -56 -51 -41 -14 20 38 42 45 54 54 39 24 19 14 -3 -14 14 87 159 181 154 127 127 143 146 119 87 74 82 85 68 42 26 13 -18 -63 -95 -102 -105 -130 -173 -211 -230 -241 -245 -240 -228 -221 -225 -225 -214 -202 -200 -203 -193 -173 -149 +0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -2 -2 -2 -2 -2 -2 -2 -1 0 0 -1 -2 -3 -2 -1 0 -1 -1 0 1 2 3 3 3 4 4 3 1 3 6 8 5 2 0 2 3 2 1 3 8 10 8 5 3 0 -2 -2 -1 0 -4 -8 -8 0 9 12 10 7 9 14 19 23 27 31 32 31 30 30 29 23 15 11 12 16 20 21 18 15 19 29 36 26 1 -18 -18 -5 0 -9 -17 -13 -8 -18 -40 -56 -57 -48 -36 -23 -14 -21 -45 -66 -67 -57 -58 -78 -94 -86 -65 -55 -62 -72 -69 -59 -54 -51 -42 -27 -14 -9 -8 -5 1 12 29 57 81 92 82 63 51 54 63 69 60 36 5 -18 -18 -4 2 -17 -47 -64 -60 -54 -64 -81 -84 -73 -67 -81 -96 -85 -49 -21 -25 -48 -61 -48 -25 -12 -3 15 46 76 98 124 151 162 145 123 125 161 193 194 174 156 152 156 159 158 152 133 108 92 87 76 55 33 25 18 -4 -35 -50 -51 -63 -89 -103 -89 -83 -118 -177 -200 -169 -127 -124 -144 -136 -90 -51 -53 -67 -38 35 97 98 58 34 52 87 100 95 94 103 105 101 108 128 129 96 52 39 52 51 25 0 3 14 5 -18 -24 -16 -29 -69 -102 -92 -58 -46 -72 -104 -106 -82 -69 -76 -88 -87 -77 -78 -94 -105 -101 -88 -79 -76 -73 -67 -64 -58 -44 -26 -23 -49 -81 -86 -63 -36 -24 -19 -6 12 20 21 31 49 54 33 6 4 25 39 27 4 1 23 43 42 29 28 38 46 43 39 49 68 75 63 44 36 39 40 29 14 4 2 2 -2 -9 -18 -29 -38 -40 -33 -22 -13 -9 -7 -8 -11 -14 -15 -13 -11 -10 -8 -3 3 5 6 6 7 7 4 2 2 1 -1 -2 1 8 14 15 12 9 9 9 9 7 4 3 4 3 2 1 0 0 -1 -2 -3 -3 -3 -3 -3 -4 -3 -3 -3 -2 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 0 +-80 -73 -88 -104 -91 -52 -22 -26 -51 -64 -50 -26 -12 -3 16 48 79 102 128 155 166 149 126 128 164 196 197 176 158 154 157 160 159 153 134 109 93 88 77 55 33 26 19 -4 -35 -50 -51 -63 -89 -103 -89 -83 -119 -178 -202 -171 -129 -126 -147 -139 -92 -52 -54 -69 -39 37 102 103 62 36 56 93 107 102 102 112 115 111 119 142 144 107 59 45 60 59 29 0 4 17 6 -21 -28 -19 -34 -83 -124 -113 -71 -57 -91 -132 -136 -106 -89 -99 -117 -116 -104 -106 -129 -146 -142 -124 -113 -110 -106 -99 -95 -87 -67 -39 -36 -77 -129 -139 -104 -59 -40 -32 -9 22 36 39 58 93 104 65 12 8 51 81 57 10 4 51 96 97 69 66 92 113 107 99 128 181 204 175 126 105 117 122 91 45 14 8 9 -4 -30 -65 -106 -141 -153 -130 -90 -54 -36 -30 -36 -50 -67 -73 -65 -56 -51 -41 -14 20 38 42 45 54 54 39 24 19 14 -3 -14 14 87 159 181 154 127 127 143 146 119 87 74 82 85 68 42 26 13 -18 -63 -95 -102 -105 -130 -173 -211 -230 -241 -245 -240 -228 -221 -225 -225 -214 -202 -200 -203 -193 -173 -149 -115 -59 9 57 70 68 81 111 140 156 160 162 169 177 179 161 130 112 124 147 149 128 113 115 116 104 96 110 124 102 49 14 25 53 51 11 -33 -42 -28 -20 -32 -49 -49 -28 -9 -10 -23 -34 -37 -41 -55 -73 -86 -94 -102 -103 -80 -38 -1 3 -24 -49 -48 -26 -10 -21 -50 -69 -54 -17 15 17 -5 -31 -45 -43 -32 -23 -19 -21 -27 -24 -14 3 22 35 40 40 42 50 61 69 72 76 84 97 107 108 94 73 62 64 66 45 6 -23 -30 -23 -19 -28 -41 -56 -60 -40 6 49 61 43 25 26 32 22 10 17 39 46 27 11 14 17 -9 -58 -86 -75 -48 -41 -55 -65 -60 -62 -90 -127 -143 -127 -105 -95 -88 -63 -23 13 30 40 55 68 59 30 6 4 15 19 +0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 0 1 1 2 3 4 4 3 4 6 7 8 8 7 8 8 9 10 10 9 8 7 7 6 5 3 2 2 -1 -5 -7 -7 -9 -13 -15 -14 -13 -20 -30 -35 -31 -24 -24 -29 -29 -20 -12 -12 -16 -10 8 25 26 16 9 15 26 31 30 31 34 36 36 39 48 50 38 21 16 22 22 11 0 1 7 2 -10 -13 -9 -16 -39 -59 -55 -35 -29 -46 -68 -71 -56 -48 -54 -65 -65 -59 -61 -76 -87 -85 -76 -70 -69 -67 -63 -62 -57 -45 -26 -25 -53 -89 -97 -74 -42 -29 -24 -7 16 26 29 43 71 80 50 9 6 40 65 46 8 3 42 80 81 58 56 78 97 92 86 112 159 181 156 113 94 106 111 83 41 12 7 8 -4 -29 -62 -101 -135 -146 -125 -87 -53 -35 -30 -35 -49 -66 -72 -64 -56 -51 -41 -14 19 37 41 44 53 53 38 23 18 13 -3 -14 14 87 158 180 153 126 126 142 145 118 86 73 81 84 67 41 25 12 -18 -62 -93 -100 -103 -126 -168 -204 -221 -231 -234 -229 -216 -209 -212 -211 -200 -188 -185 -187 -177 -158 -136 -104 -53 8 50 61 59 70 96 120 133 136 136 141 147 148 132 106 90 99 117 117 100 87 88 88 78 72 81 91 74 35 10 17 37 35 7 -23 -29 -19 -14 -21 -32 -32 -18 -6 -7 -14 -21 -22 -24 -32 -42 -49 -52 -56 -56 -43 -20 -1 1 -12 -24 -24 -13 -5 -10 -23 -31 -24 -8 6 6 -3 -13 -18 -17 -12 -9 -7 -8 -10 -9 -5 0 6 10 11 11 11 13 16 18 18 18 20 22 24 23 20 15 12 12 12 8 1 -4 -5 -4 -3 -5 -6 -8 -8 -6 0 5 6 4 2 2 2 1 0 1 2 3 1 0 0 0 -1 -3 -4 -4 -2 -2 -2 -3 -2 -2 -3 -3 -3 -3 -2 -2 -1 -1 -1 0 0 0 0 0 0 0 0 0 0 0 +117 122 91 45 14 8 9 -4 -30 -65 -106 -141 -153 -130 -90 -54 -36 -30 -36 -50 -67 -73 -65 -56 -51 -41 -14 20 38 42 45 54 54 39 24 19 14 -3 -14 14 87 159 181 154 127 127 143 146 119 87 74 82 85 68 42 26 13 -18 -63 -95 -102 -105 -130 -173 -211 -230 -241 -245 -240 -228 -221 -225 -225 -214 -202 -200 -203 -193 -173 -149 -115 -59 9 57 70 68 81 111 140 156 160 162 169 177 179 161 130 112 124 147 149 128 113 115 116 104 96 110 124 102 49 14 25 53 51 11 -33 -42 -28 -20 -32 -49 -49 -28 -9 -10 -23 -34 -37 -41 -55 -73 -86 -94 -102 -103 -80 -38 -1 3 -24 -49 -48 -26 -10 -21 -50 -69 -54 -17 15 17 -5 -31 -45 -43 -32 -23 -19 -21 -27 -24 -14 3 22 35 40 40 42 50 61 69 72 76 84 97 107 108 94 73 62 64 66 45 6 -23 -30 -23 -19 -28 -41 -56 -60 -40 6 49 61 43 25 26 32 22 10 17 39 46 27 11 14 17 -9 -58 -86 -75 -48 -41 -55 -65 -60 -62 -90 -127 -143 -127 -105 -95 -88 -63 -23 13 30 40 55 68 59 30 6 4 15 19 11 2 1 2 -3 -16 -30 -41 -45 -36 -15 17 47 64 65 56 47 43 43 47 53 56 56 51 50 55 65 69 64 63 68 70 54 23 -4 -7 15 37 39 13 -23 -44 -37 -23 -16 -15 -8 11 26 31 30 34 39 42 45 54 56 37 3 -17 -20 -25 -49 -84 -100 -96 -90 -98 -117 -143 -169 -181 -170 -146 -136 -150 -160 -136 -91 -61 -59 -71 -73 -67 -57 -34 7 51 75 78 72 73 80 88 103 123 137 130 104 79 72 78 82 69 46 25 19 21 19 10 6 5 -5 -31 -58 -61 -41 -21 -20 -30 -34 -30 -35 -53 -79 -103 -114 -108 -90 -81 -100 -135 -141 -95 -29 -2 -25 -53 -41 -1 22 12 2 26 71 101 95 74 66 76 82 71 57 62 90 122 141 152 159 159 +0 0 0 0 0 0 0 -1 -1 -1 -1 -2 -2 -2 -2 -1 -1 -1 -1 -2 -2 -3 -3 -2 -2 -2 -1 0 1 2 2 3 3 2 1 1 1 -1 -2 1 8 16 19 17 14 15 18 19 16 12 11 12 13 11 7 4 2 -4 -13 -20 -22 -23 -29 -40 -50 -56 -60 -63 -63 -62 -62 -64 -66 -64 -62 -63 -65 -64 -58 -51 -41 -22 3 21 26 26 31 44 57 65 68 70 74 79 81 74 61 53 60 72 75 65 58 60 62 56 52 61 70 58 28 8 14 32 31 6 -21 -27 -19 -14 -22 -33 -33 -20 -7 -7 -17 -25 -27 -30 -41 -54 -65 -71 -78 -79 -62 -30 -1 2 -20 -40 -39 -22 -9 -18 -42 -58 -46 -15 12 14 -5 -28 -40 -38 -29 -21 -18 -19 -25 -22 -13 2 20 32 37 37 39 47 57 65 68 72 80 93 103 104 91 71 60 62 64 44 5 -23 -30 -23 -19 -28 -41 -56 -60 -40 5 48 60 42 24 26 32 21 9 16 38 45 26 10 13 16 -9 -58 -86 -75 -48 -41 -55 -64 -59 -61 -88 -124 -139 -123 -102 -92 -85 -61 -22 12 28 37 51 63 54 27 5 3 13 17 9 1 0 1 -3 -15 -27 -36 -39 -31 -13 14 39 53 53 46 38 34 34 37 41 43 43 39 38 41 48 51 47 46 49 50 38 16 -3 -5 10 24 25 8 -15 -29 -24 -15 -10 -10 -5 6 15 18 17 19 21 23 24 28 29 19 1 -9 -10 -13 -24 -40 -47 -44 -41 -44 -51 -61 -71 -75 -69 -58 -53 -57 -60 -50 -33 -22 -21 -24 -24 -22 -18 -11 2 14 21 21 19 19 20 21 24 28 31 28 22 16 14 15 15 12 8 4 3 3 2 1 0 0 -1 -4 -8 -8 -5 -3 -3 -3 -4 -3 -3 -5 -6 -8 -8 -7 -6 -5 -6 -7 -7 -5 -2 -1 -1 -2 -2 -1 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 +-27 -24 -14 3 22 35 40 40 42 50 61 69 72 76 84 97 107 108 94 73 62 64 66 45 6 -23 -30 -23 -19 -28 -41 -56 -60 -40 6 49 61 43 25 26 32 22 10 17 39 46 27 11 14 17 -9 -58 -86 -75 -48 -41 -55 -65 -60 -62 -90 -127 -143 -127 -105 -95 -88 -63 -23 13 30 40 55 68 59 30 6 4 15 19 11 2 1 2 -3 -16 -30 -41 -45 -36 -15 17 47 64 65 56 47 43 43 47 53 56 56 51 50 55 65 69 64 63 68 70 54 23 -4 -7 15 37 39 13 -23 -44 -37 -23 -16 -15 -8 11 26 31 30 34 39 42 45 54 56 37 3 -17 -20 -25 -49 -84 -100 -96 -90 -98 -117 -143 -169 -181 -170 -146 -136 -150 -160 -136 -91 -61 -59 -71 -73 -67 -57 -34 7 51 75 78 72 73 80 88 103 123 137 130 104 79 72 78 82 69 46 25 19 21 19 10 6 5 -5 -31 -58 -61 -41 -21 -20 -30 -34 -30 -35 -53 -79 -103 -114 -108 -90 -81 -100 -135 -141 -95 -29 -2 -25 -53 -41 -1 22 12 2 26 71 101 95 74 66 76 82 71 57 62 90 122 141 152 159 159 147 126 108 91 71 61 71 96 104 74 32 19 38 56 53 44 52 67 62 38 15 9 6 -11 -38 -51 -41 -18 -7 -28 -66 -96 -102 -90 -84 -89 -95 -96 -90 -83 -70 -48 -29 -27 -36 -33 -11 19 47 65 67 38 -8 -43 -45 -35 -47 -82 -108 -100 -71 -61 -81 -108 -115 -92 -56 -31 -29 -46 -60 -53 -33 -26 -38 -51 -45 -27 -20 -29 -42 -51 -68 -101 -129 -130 -104 -75 -58 -47 -30 -12 -3 1 12 38 65 84 96 114 136 156 159 152 155 171 189 189 175 164 170 186 201 214 229 238 228 200 174 162 151 127 88 52 17 -22 -63 -89 -95 -100 -114 -133 -133 -109 -79 -64 -65 -77 -89 -102 -111 -103 -81 -55 -51 -75 -109 -127 -120 -99 -79 -62 -51 -56 -76 -91 -82 -57 -38 -24 +0 -1 -1 0 0 0 0 0 0 0 0 0 0 0 1 1 1 2 1 1 1 1 2 1 0 -1 -2 -2 -1 -2 -3 -4 -4 -3 0 3 4 3 2 2 3 2 1 1 4 5 3 1 1 2 -2 -9 -14 -13 -9 -8 -11 -13 -12 -13 -19 -28 -32 -30 -25 -24 -22 -17 -7 3 8 11 15 20 17 9 1 1 5 6 3 0 0 0 -2 -7 -12 -17 -19 -16 -7 7 20 28 29 26 22 20 20 23 26 28 29 26 26 29 35 38 36 36 39 41 32 13 -3 -5 9 23 25 8 -16 -30 -25 -16 -12 -11 -6 7 18 22 21 25 29 31 34 41 43 28 2 -14 -16 -21 -40 -69 -83 -80 -76 -83 -99 -122 -145 -156 -148 -128 -120 -133 -143 -122 -82 -56 -54 -65 -67 -62 -53 -32 6 47 70 73 68 69 76 84 98 118 132 125 101 76 70 76 80 67 45 24 18 20 18 9 5 4 -5 -31 -58 -61 -41 -21 -20 -30 -34 -30 -35 -53 -79 -103 -114 -108 -90 -81 -100 -134 -140 -94 -29 -2 -25 -53 -41 -1 21 11 1 25 68 97 90 70 62 71 77 66 53 57 83 112 129 139 144 144 132 113 96 80 62 53 61 83 89 63 27 16 31 46 43 36 42 54 49 30 11 7 4 -9 -30 -39 -31 -14 -6 -21 -48 -69 -73 -64 -59 -62 -65 -65 -60 -55 -46 -31 -19 -17 -23 -21 -7 11 27 37 38 21 -5 -24 -25 -19 -25 -43 -56 -51 -36 -30 -39 -52 -54 -43 -26 -14 -13 -20 -26 -22 -14 -11 -15 -20 -17 -10 -8 -11 -15 -18 -23 -33 -41 -40 -31 -22 -17 -14 -9 -4 -1 0 2 8 14 18 20 23 27 30 30 28 27 29 31 30 27 24 24 25 26 27 28 27 25 21 17 15 14 11 7 4 1 -2 -5 -6 -6 -6 -7 -7 -7 -5 -4 -3 -3 -3 -3 -3 -3 -3 -2 -1 -1 -1 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +-59 -71 -73 -67 -57 -34 7 51 75 78 72 73 80 88 103 123 137 130 104 79 72 78 82 69 46 25 19 21 19 10 6 5 -5 -31 -58 -61 -41 -21 -20 -30 -34 -30 -35 -53 -79 -103 -114 -108 -90 -81 -100 -135 -141 -95 -29 -2 -25 -53 -41 -1 22 12 2 26 71 101 95 74 66 76 82 71 57 62 90 122 141 152 159 159 147 126 108 91 71 61 71 96 104 74 32 19 38 56 53 44 52 67 62 38 15 9 6 -11 -38 -51 -41 -18 -7 -28 -66 -96 -102 -90 -84 -89 -95 -96 -90 -83 -70 -48 -29 -27 -36 -33 -11 19 47 65 67 38 -8 -43 -45 -35 -47 -82 -108 -100 -71 -61 -81 -108 -115 -92 -56 -31 -29 -46 -60 -53 -33 -26 -38 -51 -45 -27 -20 -29 -42 -51 -68 -101 -129 -130 -104 -75 -58 -47 -30 -12 -3 1 12 38 65 84 96 114 136 156 159 152 155 171 189 189 175 164 170 186 201 214 229 238 228 200 174 162 151 127 88 52 17 -22 -63 -89 -95 -100 -114 -133 -133 -109 -79 -64 -65 -77 -89 -102 -111 -103 -81 -55 -51 -75 -109 -127 -120 -99 -79 -62 -51 -56 -76 -91 -82 -57 -38 -24 6 57 102 112 99 96 111 116 94 62 55 71 77 47 -1 -37 -40 -32 -31 -48 -68 -79 -87 -106 -131 -138 -115 -85 -79 -95 -108 -101 -91 -100 -125 -143 -144 -133 -117 -94 -65 -49 -49 -52 -37 -13 -1 -8 -14 2 39 69 72 53 33 32 48 61 62 57 58 65 68 63 62 67 73 70 58 49 42 42 46 52 48 25 -7 -31 -38 -30 -17 -8 -16 -41 -72 -86 -73 -43 -15 2 10 11 12 22 49 85 119 144 165 177 168 143 118 105 99 88 77 77 81 70 43 18 14 25 32 25 14 8 2 -3 -1 10 18 10 -6 -11 -2 7 2 -11 -22 -33 -51 -77 -99 -108 -108 -108 -111 -118 -132 -153 -175 -186 -179 -153 -111 -63 -22 1 15 39 77 108 113 93 73 69 77 81 +0 -1 -1 -1 -1 -1 0 0 0 0 0 0 0 0 1 1 2 2 2 1 1 2 2 2 1 0 0 0 0 0 0 0 -1 -3 -5 -5 -4 -2 -2 -3 -4 -4 -4 -6 -10 -13 -15 -15 -13 -12 -15 -21 -23 -16 -5 -1 -5 -11 -9 -1 4 2 0 5 16 24 23 18 17 20 22 20 16 18 27 38 45 49 53 54 51 44 39 33 26 23 28 38 42 30 13 8 16 25 24 20 24 32 30 18 7 4 3 -6 -21 -28 -23 -11 -4 -17 -39 -57 -61 -55 -52 -56 -60 -62 -58 -55 -47 -32 -20 -19 -25 -23 -8 13 33 47 48 28 -6 -33 -35 -27 -37 -64 -85 -80 -57 -49 -66 -89 -95 -77 -47 -27 -25 -40 -52 -46 -29 -23 -34 -46 -40 -25 -18 -27 -39 -47 -63 -93 -120 -121 -97 -71 -55 -45 -29 -12 -3 0 11 36 62 81 93 111 132 152 156 149 152 168 186 187 173 162 169 185 200 213 228 237 227 199 173 162 151 126 87 51 16 -22 -63 -89 -95 -100 -114 -132 -132 -108 -78 -64 -64 -76 -88 -100 -109 -101 -79 -54 -50 -73 -105 -122 -115 -94 -75 -59 -48 -53 -71 -85 -76 -53 -35 -22 5 51 91 99 87 84 96 100 81 53 46 60 64 39 -1 -31 -33 -26 -25 -39 -54 -62 -68 -82 -101 -105 -87 -64 -59 -70 -79 -73 -65 -71 -87 -99 -98 -90 -78 -62 -43 -32 -32 -33 -23 -8 -1 -5 -9 1 22 39 40 29 17 17 25 31 31 28 28 31 32 29 28 30 32 30 25 20 17 17 18 20 18 9 -3 -12 -14 -11 -6 -3 -6 -14 -23 -27 -22 -13 -5 0 2 2 3 5 11 20 27 31 35 37 34 28 22 19 17 15 12 12 12 10 6 2 1 3 3 2 1 0 0 -1 -1 0 1 0 -1 -1 -1 0 0 -1 -2 -2 -3 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -3 -2 -2 -1 -1 0 0 0 0 0 0 0 0 0 0 0 +-42 -51 -68 -101 -129 -130 -104 -75 -58 -47 -30 -12 -3 1 12 38 65 84 96 114 136 156 159 152 155 171 189 189 175 164 170 186 201 214 229 238 228 200 174 162 151 127 88 52 17 -22 -63 -89 -95 -100 -114 -133 -133 -109 -79 -64 -65 -77 -89 -102 -111 -103 -81 -55 -51 -75 -109 -127 -120 -99 -79 -62 -51 -56 -76 -91 -82 -57 -38 -24 6 57 102 112 99 96 111 116 94 62 55 71 77 47 -1 -37 -40 -32 -31 -48 -68 -79 -87 -106 -131 -138 -115 -85 -79 -95 -108 -101 -91 -100 -125 -143 -144 -133 -117 -94 -65 -49 -49 -52 -37 -13 -1 -8 -14 2 39 69 72 53 33 32 48 61 62 57 58 65 68 63 62 67 73 70 58 49 42 42 46 52 48 25 -7 -31 -38 -30 -17 -8 -16 -41 -72 -86 -73 -43 -15 2 10 11 12 22 49 85 119 144 165 177 168 143 118 105 99 88 77 77 81 70 43 18 14 25 32 25 14 8 2 -3 -1 10 18 10 -6 -11 -2 7 2 -11 -22 -33 -51 -77 -99 -108 -108 -108 -111 -118 -132 -153 -175 -186 -179 -153 -111 -63 -22 1 15 39 77 108 113 93 73 69 77 81 73 66 76 99 119 122 107 81 52 28 14 5 -8 -32 -56 -64 -54 -50 -65 -93 -116 -135 -159 -175 -162 -123 -84 -75 -91 -100 -83 -49 -16 5 15 15 15 12 -2 -35 -72 -84 -56 -1 53 86 98 98 90 87 95 111 125 131 132 128 120 114 115 126 128 113 88 70 61 53 38 22 12 4 -8 -24 -30 -23 -12 -11 -30 -54 -68 -69 -65 -68 -76 -77 -68 -53 -47 -47 -43 -26 3 31 40 34 29 35 46 52 54 71 97 107 81 36 12 27 55 61 40 14 0 1 11 21 26 19 4 -9 -12 -9 -10 -23 -38 -50 -56 -60 -66 -68 -68 -74 -95 -123 -135 -122 -103 -106 -138 -179 -202 -195 -171 -148 -145 -165 -184 -176 -143 -115 -109 -109 -92 -64 -50 -49 -26 28 82 102 95 106 +0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 0 0 1 1 2 2 3 4 4 5 5 6 8 8 8 8 9 11 12 14 16 18 18 16 15 15 14 13 9 5 1 -3 -9 -12 -14 -15 -18 -21 -22 -19 -14 -12 -12 -15 -18 -21 -24 -23 -18 -13 -13 -19 -28 -33 -32 -27 -22 -18 -15 -17 -24 -29 -27 -19 -13 -9 2 20 37 41 37 37 43 46 38 25 23 30 33 21 -1 -18 -19 -16 -16 -24 -35 -41 -46 -56 -71 -75 -64 -48 -45 -55 -63 -60 -55 -61 -77 -89 -91 -85 -76 -62 -43 -33 -33 -36 -26 -10 -1 -6 -11 1 28 50 53 39 25 24 37 47 48 45 46 52 55 51 50 55 60 58 49 41 35 36 39 45 42 22 -7 -28 -35 -28 -16 -8 -15 -38 -67 -80 -69 -41 -15 1 9 10 11 21 47 81 114 139 160 172 164 139 115 103 97 86 76 76 80 69 42 17 13 24 31 24 13 7 1 -3 -1 9 17 9 -6 -11 -2 6 1 -11 -22 -33 -51 -77 -98 -107 -107 -106 -109 -116 -129 -149 -170 -180 -173 -147 -107 -61 -21 0 14 36 72 100 104 85 67 63 70 73 65 59 67 87 105 107 93 70 44 23 11 4 -7 -27 -47 -53 -45 -41 -53 -75 -92 -106 -124 -135 -124 -94 -64 -56 -68 -74 -61 -36 -12 3 10 10 10 8 -2 -24 -47 -55 -36 -1 32 52 59 58 53 50 54 62 69 72 71 68 63 59 58 63 63 55 42 33 28 24 17 9 5 1 -4 -10 -13 -10 -5 -5 -12 -20 -25 -25 -23 -23 -25 -25 -22 -17 -14 -14 -13 -8 0 8 10 8 7 8 10 11 11 14 19 21 15 6 2 4 9 9 6 2 0 0 1 2 3 2 0 -1 -2 -1 -1 -3 -4 -4 -5 -5 -5 -5 -5 -5 -6 -7 -7 -6 -5 -4 -5 -6 -6 -5 -4 -4 -3 -3 -3 -3 -2 -2 -1 -1 -1 -1 -1 -1 -1 0 0 0 0 0 +-17 -8 -16 -41 -72 -86 -73 -43 -15 2 10 11 12 22 49 85 119 144 165 177 168 143 118 105 99 88 77 77 81 70 43 18 14 25 32 25 14 8 2 -3 -1 10 18 10 -6 -11 -2 7 2 -11 -22 -33 -51 -77 -99 -108 -108 -108 -111 -118 -132 -153 -175 -186 -179 -153 -111 -63 -22 1 15 39 77 108 113 93 73 69 77 81 73 66 76 99 119 122 107 81 52 28 14 5 -8 -32 -56 -64 -54 -50 -65 -93 -116 -135 -159 -175 -162 -123 -84 -75 -91 -100 -83 -49 -16 5 15 15 15 12 -2 -35 -72 -84 -56 -1 53 86 98 98 90 87 95 111 125 131 132 128 120 114 115 126 128 113 88 70 61 53 38 22 12 4 -8 -24 -30 -23 -12 -11 -30 -54 -68 -69 -65 -68 -76 -77 -68 -53 -47 -47 -43 -26 3 31 40 34 29 35 46 52 54 71 97 107 81 36 12 27 55 61 40 14 0 1 11 21 26 19 4 -9 -12 -9 -10 -23 -38 -50 -56 -60 -66 -68 -68 -74 -95 -123 -135 -122 -103 -106 -138 -179 -202 -195 -171 -148 -145 -165 -184 -176 -143 -115 -109 -109 -92 -64 -50 -49 -26 28 82 102 95 106 139 159 145 115 108 130 155 156 140 125 119 119 120 122 120 109 89 75 71 77 80 75 68 67 69 67 55 37 18 0 -12 -13 -1 8 6 -1 -2 4 9 11 12 21 29 31 30 39 50 57 50 35 18 -6 -30 -44 -36 -14 0 -3 -17 -30 -40 -44 -45 -40 -37 -44 -53 -49 -32 -15 -11 -21 -36 -52 -69 -78 -73 -56 -43 -40 -40 -36 -34 -43 -59 -69 -68 -66 -76 -93 -106 -112 -113 -110 -102 -87 -75 -67 -58 -45 -21 7 25 26 15 16 44 84 105 98 85 89 100 98 77 55 45 45 44 37 31 22 4 -25 -52 -63 -59 -47 -36 -29 -22 -14 -9 -10 -16 -21 -28 -36 -36 -24 -9 -12 -32 -38 -10 32 52 38 21 27 50 65 66 58 57 65 75 77 66 51 +0 -1 -1 -1 -1 -1 -1 -1 -1 0 0 0 0 0 0 1 1 2 3 4 4 4 3 3 3 3 3 3 3 3 2 1 0 1 2 1 1 0 0 -1 -1 1 1 1 -1 -2 -1 0 0 -2 -4 -6 -9 -13 -18 -20 -20 -21 -22 -24 -28 -34 -39 -43 -43 -38 -28 -17 -6 0 4 11 22 32 34 29 23 22 25 27 25 23 27 36 45 47 42 32 21 11 5 2 -4 -15 -26 -30 -26 -25 -32 -47 -59 -70 -83 -93 -87 -67 -47 -42 -52 -58 -49 -29 -10 3 9 9 9 7 -2 -23 -48 -56 -38 -1 36 59 68 69 64 62 69 81 93 98 100 97 92 88 90 99 102 90 71 57 50 43 31 18 10 3 -7 -21 -27 -21 -11 -10 -27 -49 -62 -63 -59 -62 -70 -71 -63 -50 -44 -45 -41 -25 2 29 38 32 27 33 44 50 52 69 94 104 79 35 11 26 54 60 39 13 0 0 10 20 25 18 3 -9 -12 -9 -10 -23 -38 -50 -56 -60 -66 -68 -68 -74 -95 -123 -134 -121 -102 -105 -136 -176 -198 -191 -167 -144 -141 -160 -178 -170 -137 -110 -104 -104 -87 -61 -47 -46 -25 25 75 93 86 96 125 142 129 102 95 114 135 135 120 107 101 100 100 101 99 89 72 60 57 61 63 58 52 51 52 50 41 27 13 0 -9 -10 -1 5 4 -1 -2 2 5 7 7 13 18 19 18 23 30 34 29 20 10 -4 -17 -25 -20 -8 0 -2 -9 -16 -20 -22 -22 -19 -18 -21 -24 -22 -14 -7 -5 -9 -15 -21 -27 -30 -28 -21 -16 -14 -14 -13 -12 -14 -19 -22 -21 -20 -22 -26 -29 -30 -29 -28 -25 -21 -18 -15 -13 -10 -5 1 4 4 2 2 7 13 16 14 12 12 13 12 9 6 5 4 4 3 2 1 0 -2 -4 -5 -4 -3 -3 -2 -2 -1 -1 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -1 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +-65 -68 -76 -77 -68 -53 -47 -47 -43 -26 3 31 40 34 29 35 46 52 54 71 97 107 81 36 12 27 55 61 40 14 0 1 11 21 26 19 4 -9 -12 -9 -10 -23 -38 -50 -56 -60 -66 -68 -68 -74 -95 -123 -135 -122 -103 -106 -138 -179 -202 -195 -171 -148 -145 -165 -184 -176 -143 -115 -109 -109 -92 -64 -50 -49 -26 28 82 102 95 106 139 159 145 115 108 130 155 156 140 125 119 119 120 122 120 109 89 75 71 77 80 75 68 67 69 67 55 37 18 0 -12 -13 -1 8 6 -1 -2 4 9 11 12 21 29 31 30 39 50 57 50 35 18 -6 -30 -44 -36 -14 0 -3 -17 -30 -40 -44 -45 -40 -37 -44 -53 -49 -32 -15 -11 -21 -36 -52 -69 -78 -73 -56 -43 -40 -40 -36 -34 -43 -59 -69 -68 -66 -76 -93 -106 -112 -113 -110 -102 -87 -75 -67 -58 -45 -21 7 25 26 15 16 44 84 105 98 85 89 100 98 77 55 45 45 44 37 31 22 4 -25 -52 -63 -59 -47 -36 -29 -22 -14 -9 -10 -16 -21 -28 -36 -36 -24 -9 -12 -32 -38 -10 32 52 38 21 27 50 65 66 58 57 65 75 77 66 51 51 71 91 88 63 41 40 44 39 20 8 10 23 27 10 -18 -25 0 31 23 -30 -68 -47 17 55 25 -47 -91 -52 50 131 110 3 -78 -50 43 79 21 -37 -13 51 47 -24 -46 58 191 186 13 -146 -88 157 323 196 -128 -301 -118 239 373 122 -252 -359 -110 186 177 -126 -381 -291 48 256 94 -261 -427 -256 18 70 -145 -379 -394 -207 -26 -21 -172 -317 -299 -115 83 119 -15 -147 -119 49 194 206 132 82 106 165 199 190 154 139 166 209 207 132 30 -11 42 128 149 79 -23 -68 -37 8 5 -32 -45 -18 -3 -46 -120 -166 -171 -159 -141 -102 -68 -93 -180 -234 -172 -21 92 97 43 16 45 93 116 100 58 20 19 68 124 140 114 98 134 179 163 78 7 32 127 182 127 20 -25 +0 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 0 0 0 0 0 0 0 1 1 2 3 2 1 0 1 2 2 1 0 0 0 0 1 1 1 0 -1 -2 -1 -1 -3 -5 -6 -7 -8 -9 -10 -10 -11 -15 -20 -22 -21 -18 -19 -26 -35 -40 -40 -36 -32 -33 -38 -44 -43 -36 -30 -29 -30 -26 -19 -15 -15 -8 8 26 33 31 36 48 56 52 42 40 50 61 62 57 52 50 51 52 54 54 50 42 36 34 38 40 38 35 35 36 36 30 20 10 0 -7 -8 -1 4 3 -1 -2 2 5 7 7 13 19 21 20 27 35 40 35 25 13 -5 -23 -34 -28 -11 0 -3 -14 -24 -32 -36 -37 -33 -31 -37 -45 -42 -28 -13 -10 -19 -32 -46 -61 -69 -65 -50 -39 -37 -37 -33 -32 -40 -55 -65 -64 -62 -72 -88 -101 -107 -108 -106 -98 -84 -73 -65 -57 -44 -21 6 24 25 14 15 43 83 104 97 84 88 99 97 76 54 44 44 43 37 31 21 3 -25 -52 -63 -59 -47 -36 -29 -22 -14 -9 -10 -16 -21 -28 -36 -36 -24 -9 -12 -32 -37 -10 30 49 36 19 25 47 61 61 54 52 60 69 70 60 46 46 63 81 78 55 35 34 38 33 17 6 8 19 22 8 -15 -21 0 24 18 -24 -54 -37 13 42 18 -36 -68 -39 36 94 78 2 -55 -35 29 53 14 -25 -9 33 30 -16 -29 35 117 112 7 -87 -52 90 183 109 -71 -164 -64 126 193 62 -127 -179 -54 89 83 -59 -175 -131 21 110 40 -110 -176 -103 7 27 -56 -141 -144 -74 -10 -8 -58 -104 -96 -36 25 35 -5 -42 -33 13 50 52 32 19 24 37 44 41 32 28 32 39 38 23 5 -2 6 19 22 11 -4 -10 -5 0 0 -4 -5 -2 -1 -5 -11 -14 -14 -13 -11 -7 -5 -6 -11 -13 -9 -1 3 3 1 0 1 2 2 2 1 0 0 0 1 1 1 0 0 1 0 0 0 0 0 0 0 0 0 +-40 -36 -34 -43 -59 -69 -68 -66 -76 -93 -106 -112 -113 -110 -102 -87 -75 -67 -58 -45 -21 7 25 26 15 16 44 84 105 98 85 89 100 98 77 55 45 45 44 37 31 22 4 -25 -52 -63 -59 -47 -36 -29 -22 -14 -9 -10 -16 -21 -28 -36 -36 -24 -9 -12 -32 -38 -10 32 52 38 21 27 50 65 66 58 57 65 75 77 66 51 51 71 91 88 63 41 40 44 39 20 8 10 23 27 10 -18 -25 0 31 23 -30 -68 -47 17 55 25 -47 -91 -52 50 131 110 3 -78 -50 43 79 21 -37 -13 51 47 -24 -46 58 191 186 13 -146 -88 157 323 196 -128 -301 -118 239 373 122 -252 -359 -110 186 177 -126 -381 -291 48 256 94 -261 -427 -256 18 70 -145 -379 -394 -207 -26 -21 -172 -317 -299 -115 83 119 -15 -147 -119 49 194 206 132 82 106 165 199 190 154 139 166 209 207 132 30 -11 42 128 149 79 -23 -68 -37 8 5 -32 -45 -18 -3 -46 -120 -166 -171 -159 -141 -102 -68 -93 -180 -234 -172 -21 92 97 43 16 45 93 116 100 58 20 19 68 124 140 114 98 134 179 163 78 7 32 127 182 127 20 -25 26 96 90 8 -65 -62 5 58 27 -76 -162 -155 -78 -38 -98 -175 -147 -14 73 11 -124 -175 -111 -42 -68 -152 -197 -167 -107 -77 -90 -117 -111 -46 44 87 39 -37 -57 -2 53 42 -16 -45 0 92 164 170 116 61 47 54 35 1 14 88 137 83 -14 -11 118 229 177 7 -113 -93 -10 15 -50 -141 -177 -139 -67 -32 -76 -162 -193 -121 -17 -2 -99 -196 -170 -39 53 6 -123 -183 -98 50 117 51 -61 -95 -16 109 190 187 124 62 70 178 326 383 286 118 56 174 343 362 186 -13 -38 113 244 200 32 -68 -8 103 89 -78 -247 -262 -136 -19 -19 -111 -194 -222 -230 -255 -268 -226 -134 -58 -63 -156 -257 -270 -161 -16 30 -54 -147 -105 59 177 146 50 43 137 209 180 125 133 182 180 131 120 +0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -2 -2 -2 -2 -2 -2 -1 0 0 0 0 0 1 3 5 5 4 5 6 6 5 4 3 3 3 3 3 2 0 -3 -7 -8 -8 -7 -5 -5 -4 -3 -2 -2 -3 -4 -6 -7 -8 -5 -2 -3 -8 -9 -3 7 12 9 5 7 13 18 19 17 17 20 23 25 22 17 17 25 33 32 23 15 15 17 15 8 3 4 10 12 4 -9 -12 0 15 11 -16 -35 -25 8 29 13 -26 -51 -30 28 76 64 1 -48 -31 26 49 13 -24 -9 33 31 -17 -32 39 132 130 9 -105 -64 114 238 145 -97 -229 -91 184 290 95 -200 -287 -89 150 144 -104 -316 -243 40 216 79 -224 -368 -222 15 61 -129 -337 -352 -186 -24 -20 -157 -291 -276 -107 77 110 -15 -139 -113 46 184 196 126 78 102 159 192 184 150 135 162 205 203 130 29 -11 41 126 147 78 -23 -68 -37 7 4 -32 -45 -18 -3 -46 -120 -166 -171 -159 -141 -102 -68 -93 -179 -233 -171 -21 90 95 42 15 44 91 113 97 56 19 18 65 119 134 108 93 126 168 153 73 6 29 117 167 116 18 -23 23 86 80 7 -58 -55 4 50 23 -66 -138 -132 -66 -32 -82 -144 -120 -12 58 8 -99 -138 -87 -33 -52 -116 -148 -125 -79 -57 -66 -84 -79 -33 30 59 26 -25 -38 -2 34 27 -11 -29 0 56 99 101 68 35 26 30 19 0 7 47 72 43 -8 -6 58 111 85 3 -53 -43 -5 6 -22 -61 -74 -58 -27 -13 -30 -62 -72 -45 -7 -1 -34 -66 -56 -13 16 1 -37 -54 -28 13 31 13 -16 -24 -4 25 43 41 26 12 14 35 62 70 50 20 9 27 53 53 26 -2 -6 14 29 23 3 -8 -1 10 8 -7 -21 -21 -11 -2 -2 -8 -12 -13 -13 -13 -13 -10 -6 -3 -3 -5 -8 -7 -4 -1 0 -1 -3 -2 0 1 1 0 0 0 0 0 0 0 0 0 0 0 +-21 -172 -317 -299 -115 83 119 -15 -147 -119 49 194 206 132 82 106 165 199 190 154 139 166 209 207 132 30 -11 42 128 149 79 -23 -68 -37 8 5 -32 -45 -18 -3 -46 -120 -166 -171 -159 -141 -102 -68 -93 -180 -234 -172 -21 92 97 43 16 45 93 116 100 58 20 19 68 124 140 114 98 134 179 163 78 7 32 127 182 127 20 -25 26 96 90 8 -65 -62 5 58 27 -76 -162 -155 -78 -38 -98 -175 -147 -14 73 11 -124 -175 -111 -42 -68 -152 -197 -167 -107 -77 -90 -117 -111 -46 44 87 39 -37 -57 -2 53 42 -16 -45 0 92 164 170 116 61 47 54 35 1 14 88 137 83 -14 -11 118 229 177 7 -113 -93 -10 15 -50 -141 -177 -139 -67 -32 -76 -162 -193 -121 -17 -2 -99 -196 -170 -39 53 6 -123 -183 -98 50 117 51 -61 -95 -16 109 190 187 124 62 70 178 326 383 286 118 56 174 343 362 186 -13 -38 113 244 200 32 -68 -8 103 89 -78 -247 -262 -136 -19 -19 -111 -194 -222 -230 -255 -268 -226 -134 -58 -63 -156 -257 -270 -161 -16 30 -54 -147 -105 59 177 146 50 43 137 209 180 125 133 182 180 131 120 175 209 152 71 72 142 163 83 -19 -53 -42 -64 -122 -141 -91 -48 -85 -169 -203 -163 -115 -110 -130 -126 -111 -119 -127 -72 51 132 85 -30 -60 58 201 207 74 -40 1 148 244 189 42 -54 -17 116 232 226 89 -87 -175 -119 14 89 18 -145 -254 -202 -32 87 39 -117 -198 -96 89 157 33 -147 -182 -34 144 176 40 -125 -176 -98 13 52 2 -78 -111 -75 -23 -20 -60 -58 35 148 137 -26 -194 -181 14 200 205 50 -98 -130 -71 -3 39 55 32 -44 -133 -150 -55 77 127 54 -54 -84 -13 74 108 97 98 109 66 -55 -160 -117 85 280 269 44 -173 -152 97 323 302 79 -94 -54 112 191 84 -101 -188 -109 42 106 2 -193 -305 -224 -15 136 100 -80 -248 -270 -130 84 229 200 27 -135 -128 50 +0 -1 -1 -1 -1 0 0 -1 -1 -1 0 1 1 1 1 1 2 3 3 3 3 4 6 6 4 1 -1 1 6 7 4 -2 -5 -3 0 0 -3 -4 -2 -1 -5 -13 -18 -20 -19 -18 -13 -10 -13 -26 -35 -27 -4 15 16 7 2 8 18 23 20 12 4 4 16 30 34 29 25 36 49 46 22 2 9 39 58 41 6 -9 9 34 32 2 -25 -25 1 23 11 -32 -69 -68 -35 -18 -45 -82 -70 -7 35 5 -63 -90 -58 -23 -37 -83 -109 -94 -61 -45 -53 -70 -67 -28 26 53 24 -24 -37 -2 34 27 -11 -31 0 63 115 120 83 44 34 39 26 0 10 67 105 64 -11 -9 94 183 143 5 -93 -77 -9 12 -43 -120 -152 -120 -59 -28 -67 -144 -172 -109 -16 -2 -90 -179 -156 -36 48 5 -115 -172 -93 47 110 48 -59 -91 -16 105 183 181 120 60 68 174 319 376 281 116 55 172 340 359 184 -13 -38 112 243 199 31 -68 -8 103 89 -78 -247 -262 -136 -19 -19 -111 -194 -221 -229 -253 -266 -224 -133 -58 -62 -154 -252 -264 -157 -16 29 -53 -142 -101 56 168 138 47 40 128 195 167 116 122 167 164 119 108 157 187 135 63 63 124 142 71 -17 -46 -36 -55 -103 -118 -76 -40 -70 -137 -164 -130 -91 -87 -102 -98 -85 -91 -96 -54 37 96 61 -22 -43 40 139 142 50 -27 0 97 158 121 26 -34 -11 71 140 135 52 -51 -101 -68 7 49 9 -78 -135 -105 -17 43 19 -58 -96 -46 41 71 14 -65 -79 -15 60 72 16 -50 -69 -38 4 18 0 -28 -38 -26 -8 -7 -19 -18 10 43 38 -8 -53 -48 3 49 49 11 -23 -29 -16 -1 7 10 6 -9 -24 -26 -10 12 19 8 -8 -12 -2 9 13 11 11 11 6 -6 -15 -11 7 22 20 3 -12 -10 5 18 15 3 -5 -3 4 6 2 -4 -6 -3 0 2 0 -4 -5 -3 -1 1 0 -1 -2 -2 -1 0 0 0 0 -1 -1 0 +-99 -196 -170 -39 53 6 -123 -183 -98 50 117 51 -61 -95 -16 109 190 187 124 62 70 178 326 383 286 118 56 174 343 362 186 -13 -38 113 244 200 32 -68 -8 103 89 -78 -247 -262 -136 -19 -19 -111 -194 -222 -230 -255 -268 -226 -134 -58 -63 -156 -257 -270 -161 -16 30 -54 -147 -105 59 177 146 50 43 137 209 180 125 133 182 180 131 120 175 209 152 71 72 142 163 83 -19 -53 -42 -64 -122 -141 -91 -48 -85 -169 -203 -163 -115 -110 -130 -126 -111 -119 -127 -72 51 132 85 -30 -60 58 201 207 74 -40 1 148 244 189 42 -54 -17 116 232 226 89 -87 -175 -119 14 89 18 -145 -254 -202 -32 87 39 -117 -198 -96 89 157 33 -147 -182 -34 144 176 40 -125 -176 -98 13 52 2 -78 -111 -75 -23 -20 -60 -58 35 148 137 -26 -194 -181 14 200 205 50 -98 -130 -71 -3 39 55 32 -44 -133 -150 -55 77 127 54 -54 -84 -13 74 108 97 98 109 66 -55 -160 -117 85 280 269 44 -173 -152 97 323 302 79 -94 -54 112 191 84 -101 -188 -109 42 106 2 -193 -305 -224 -15 136 100 -80 -248 -270 -130 84 229 200 27 -135 -128 50 240 283 170 19 -57 -40 22 71 72 14 -74 -129 -108 -32 23 -3 -81 -123 -78 1 13 -70 -152 -142 -58 -17 -87 -195 -226 -159 -87 -94 -150 -160 -96 -18 20 19 7 -18 -68 -101 -60 54 138 110 1 -73 -42 54 123 117 66 35 68 142 183 142 61 44 133 242 244 120 -3 4 124 217 176 32 -83 -66 56 152 118 -23 -147 -160 -97 -56 -73 -89 -52 -8 -35 -120 -156 -58 114 219 182 76 21 50 85 63 16 21 89 136 90 -29 -122 -112 -12 85 88 -16 -147 -200 -139 -37 5 -37 -101 -127 -131 -150 -181 -178 -143 -122 -144 -170 -142 -63 2 8 -25 -37 1 64 107 103 58 8 2 64 161 219 186 93 28 55 148 218 200 117 42 35 91 153 150 62 -51 -90 -17 84 93 -3 +0 -1 -1 -1 0 0 -1 -1 -1 0 0 0 -1 -2 -1 1 3 3 2 1 1 5 10 12 10 4 2 7 16 19 10 -1 -3 7 17 15 2 -6 -1 9 8 -8 -27 -30 -16 -3 -3 -15 -27 -32 -35 -40 -44 -38 -24 -11 -12 -30 -51 -55 -34 -4 6 -13 -35 -26 14 45 38 13 11 38 60 53 38 41 58 58 43 41 61 74 55 26 27 54 64 33 -8 -23 -18 -28 -54 -64 -42 -23 -41 -82 -100 -81 -58 -57 -68 -67 -60 -65 -70 -41 28 75 49 -18 -36 35 123 128 46 -26 0 96 160 125 28 -37 -12 80 162 160 63 -63 -128 -88 10 66 13 -111 -196 -158 -26 68 31 -94 -161 -79 73 129 27 -124 -154 -29 123 151 34 -110 -155 -87 11 46 1 -71 -101 -69 -22 -19 -56 -54 32 138 128 -25 -184 -173 13 191 196 48 -95 -126 -69 -3 38 53 31 -44 -132 -149 -55 76 125 53 -54 -84 -13 73 107 96 97 108 65 -55 -160 -117 84 279 268 43 -173 -152 96 321 299 78 -94 -54 110 188 82 -100 -185 -107 40 102 1 -187 -294 -216 -15 129 95 -76 -235 -254 -122 78 212 184 24 -124 -117 45 216 253 151 16 -51 -36 19 61 62 11 -63 -110 -91 -27 19 -3 -67 -100 -63 0 10 -55 -119 -110 -45 -13 -66 -146 -167 -117 -63 -68 -107 -113 -67 -13 13 12 4 -12 -45 -65 -39 33 85 67 0 -44 -25 31 70 66 36 19 36 76 96 73 31 22 65 118 117 56 -2 1 55 95 76 13 -35 -28 22 60 45 -9 -55 -59 -35 -20 -25 -30 -17 -3 -11 -37 -47 -17 32 60 49 19 5 12 20 14 3 4 19 28 18 -6 -24 -21 -3 14 14 -3 -23 -30 -20 -6 0 -5 -13 -15 -15 -17 -19 -18 -14 -11 -13 -14 -11 -5 0 0 -2 -3 0 3 4 4 2 0 0 1 4 5 4 1 0 0 2 2 2 1 0 0 0 0 0 0 -1 -1 -1 0 0 0 +-111 -75 -23 -20 -60 -58 35 148 137 -26 -194 -181 14 200 205 50 -98 -130 -71 -3 39 55 32 -44 -133 -150 -55 77 127 54 -54 -84 -13 74 108 97 98 109 66 -55 -160 -117 85 280 269 44 -173 -152 97 323 302 79 -94 -54 112 191 84 -101 -188 -109 42 106 2 -193 -305 -224 -15 136 100 -80 -248 -270 -130 84 229 200 27 -135 -128 50 240 283 170 19 -57 -40 22 71 72 14 -74 -129 -108 -32 23 -3 -81 -123 -78 1 13 -70 -152 -142 -58 -17 -87 -195 -226 -159 -87 -94 -150 -160 -96 -18 20 19 7 -18 -68 -101 -60 54 138 110 1 -73 -42 54 123 117 66 35 68 142 183 142 61 44 133 242 244 120 -3 4 124 217 176 32 -83 -66 56 152 118 -23 -147 -160 -97 -56 -73 -89 -52 -8 -35 -120 -156 -58 114 219 182 76 21 50 85 63 16 21 89 136 90 -29 -122 -112 -12 85 88 -16 -147 -200 -139 -37 5 -37 -101 -127 -131 -150 -181 -178 -143 -122 -144 -170 -142 -63 2 8 -25 -37 1 64 107 103 58 8 2 64 161 219 186 93 28 55 148 218 200 117 42 35 91 153 150 62 -51 -90 -17 84 93 -3 -91 -79 -8 13 -45 -98 -74 -15 -14 -88 -159 -154 -89 -44 -61 -109 -127 -88 -26 2 -23 -62 -64 -25 17 38 43 47 40 14 -9 4 48 85 84 54 18 -7 -8 28 87 117 81 14 -9 40 107 117 62 1 -20 -10 5 11 19 24 18 -4 -24 -30 -20 -10 -13 -29 -44 -42 -28 -27 -55 -85 -81 -39 -8 -28 -81 -113 -100 -68 -50 -43 -24 6 21 20 27 60 104 128 123 95 55 22 20 60 114 133 99 40 6 21 72 115 109 52 -9 -20 25 70 73 50 50 76 75 21 -34 -28 29 49 -31 -166 -253 -229 -140 -90 -132 -215 -241 -187 -123 -135 -211 -263 -241 -190 -177 -202 -193 -121 -39 -27 -90 -149 -126 -29 54 51 -18 -63 -21 78 150 143 81 36 50 112 168 168 114 53 +0 -1 -1 -1 -1 -1 0 0 0 -1 -2 -2 0 2 2 0 -2 -3 -2 -1 0 1 0 -2 -5 -6 -3 3 6 2 -4 -6 -1 5 7 7 7 9 5 -6 -16 -12 9 31 31 5 -23 -21 13 46 45 12 -16 -9 19 34 15 -20 -37 -23 8 22 0 -45 -72 -55 -4 34 26 -22 -69 -77 -38 25 69 62 8 -45 -43 17 83 100 61 7 -22 -16 8 28 29 5 -32 -56 -48 -15 10 -2 -39 -60 -39 0 6 -36 -79 -75 -32 -10 -48 -109 -129 -92 -51 -56 -90 -97 -59 -12 12 12 4 -12 -45 -68 -41 36 94 76 0 -52 -31 39 89 86 49 26 51 108 141 110 47 34 106 194 197 97 -3 3 103 182 148 27 -72 -57 48 132 103 -21 -131 -143 -88 -51 -67 -82 -48 -8 -33 -112 -146 -55 107 206 172 72 20 47 81 60 15 20 86 132 87 -29 -120 -111 -12 83 87 -16 -146 -199 -139 -37 4 -37 -101 -127 -131 -150 -181 -178 -143 -122 -144 -170 -142 -63 1 7 -25 -37 0 63 105 101 57 7 1 62 157 213 181 90 27 53 142 209 191 111 39 33 85 143 140 57 -48 -84 -16 76 84 -3 -83 -71 -8 11 -40 -87 -65 -14 -13 -76 -136 -131 -75 -37 -51 -90 -104 -72 -21 1 -19 -49 -50 -20 12 28 32 34 29 10 -7 2 34 59 58 37 12 -5 -6 18 56 75 51 8 -6 24 64 69 36 0 -12 -6 2 6 10 12 9 -3 -13 -16 -10 -5 -7 -14 -21 -20 -13 -12 -24 -37 -34 -16 -4 -12 -32 -43 -38 -25 -18 -16 -9 2 6 6 8 18 30 37 34 26 14 5 5 14 27 31 22 8 1 4 14 22 20 9 -2 -4 4 11 11 7 7 10 9 2 -5 -4 3 5 -4 -17 -24 -21 -12 -8 -10 -16 -17 -12 -8 -8 -12 -13 -12 -9 -8 -8 -7 -4 -2 -1 -3 -4 -3 -1 0 0 -1 -1 -1 0 0 0 0 0 0 0 0 0 0 0 +-73 -89 -52 -8 -35 -120 -156 -58 114 219 182 76 21 50 85 63 16 21 89 136 90 -29 -122 -112 -12 85 88 -16 -147 -200 -139 -37 5 -37 -101 -127 -131 -150 -181 -178 -143 -122 -144 -170 -142 -63 2 8 -25 -37 1 64 107 103 58 8 2 64 161 219 186 93 28 55 148 218 200 117 42 35 91 153 150 62 -51 -90 -17 84 93 -3 -91 -79 -8 13 -45 -98 -74 -15 -14 -88 -159 -154 -89 -44 -61 -109 -127 -88 -26 2 -23 -62 -64 -25 17 38 43 47 40 14 -9 4 48 85 84 54 18 -7 -8 28 87 117 81 14 -9 40 107 117 62 1 -20 -10 5 11 19 24 18 -4 -24 -30 -20 -10 -13 -29 -44 -42 -28 -27 -55 -85 -81 -39 -8 -28 -81 -113 -100 -68 -50 -43 -24 6 21 20 27 60 104 128 123 95 55 22 20 60 114 133 99 40 6 21 72 115 109 52 -9 -20 25 70 73 50 50 76 75 21 -34 -28 29 49 -31 -166 -253 -229 -140 -90 -132 -215 -241 -187 -123 -135 -211 -263 -241 -190 -177 -202 -193 -121 -39 -27 -90 -149 -126 -29 54 51 -18 -63 -21 78 150 143 81 36 50 112 168 168 114 53 42 94 159 183 156 108 73 65 83 116 137 122 82 39 16 11 18 33 41 18 -33 -72 -57 -2 46 52 25 -7 -26 -18 27 82 93 36 -38 -57 -13 28 18 -15 -11 38 79 62 4 -42 -40 -2 29 20 -22 -65 -82 -70 -60 -67 -83 -81 -55 -39 -60 -101 -105 -42 52 99 51 -49 -108 -65 49 139 140 73 23 43 115 165 147 84 37 35 59 69 59 61 81 88 46 -33 -95 -100 -75 -63 -81 -95 -93 -94 -124 -169 -184 -144 -77 -35 -46 -92 -131 -130 -94 -51 -31 -38 -47 -46 -25 6 34 36 -2 -51 -69 -38 17 59 82 105 132 142 124 101 100 119 142 165 188 203 189 152 123 117 123 120 107 97 82 55 29 34 71 100 84 44 33 63 87 60 3 -34 -22 4 +0 -1 -1 -1 -1 -1 -1 -1 0 1 1 0 0 0 1 0 0 0 1 3 2 -1 -4 -4 -1 3 3 -1 -8 -11 -8 -3 0 -3 -8 -10 -11 -13 -17 -17 -14 -13 -16 -20 -17 -8 0 1 -4 -6 0 9 17 17 9 1 0 12 31 44 38 20 6 12 34 52 49 29 11 9 25 43 43 18 -16 -29 -6 27 31 -2 -32 -29 -3 4 -18 -38 -30 -7 -6 -37 -68 -67 -40 -20 -28 -51 -60 -43 -13 0 -12 -32 -34 -14 9 20 23 26 22 8 -6 2 28 51 51 33 11 -5 -6 18 57 77 54 9 -7 27 75 82 44 0 -15 -8 3 8 14 18 13 -4 -19 -24 -16 -9 -11 -24 -37 -35 -24 -23 -47 -73 -70 -34 -7 -25 -72 -100 -89 -61 -45 -39 -22 5 19 18 24 55 96 119 115 89 52 20 19 57 109 128 95 38 5 20 70 112 106 51 -9 -20 24 69 72 49 49 75 74 20 -34 -28 28 48 -31 -166 -253 -229 -140 -90 -132 -215 -241 -187 -123 -135 -210 -261 -239 -188 -175 -200 -190 -119 -39 -27 -88 -145 -123 -29 52 48 -18 -61 -20 73 141 134 75 33 46 103 154 153 103 48 37 84 141 162 137 94 63 56 71 99 116 103 68 32 13 9 14 26 32 14 -27 -57 -45 -2 35 39 18 -6 -20 -14 19 58 65 25 -27 -40 -9 18 11 -10 -8 24 50 38 2 -26 -25 -2 17 11 -13 -37 -46 -39 -33 -36 -44 -43 -29 -20 -30 -50 -51 -20 24 45 22 -22 -47 -28 20 57 56 28 8 16 42 60 52 29 12 11 19 22 18 18 24 25 13 -10 -26 -27 -20 -16 -20 -23 -22 -21 -27 -36 -38 -29 -15 -7 -9 -16 -22 -21 -15 -8 -5 -6 -7 -6 -4 0 3 3 -1 -5 -7 -4 1 4 6 7 8 9 7 5 5 5 6 7 7 7 6 4 3 2 2 2 2 1 1 0 0 0 0 0 0 0 0 0 0 0 0 -1 -1 0 +-24 6 21 20 27 60 104 128 123 95 55 22 20 60 114 133 99 40 6 21 72 115 109 52 -9 -20 25 70 73 50 50 76 75 21 -34 -28 29 49 -31 -166 -253 -229 -140 -90 -132 -215 -241 -187 -123 -135 -211 -263 -241 -190 -177 -202 -193 -121 -39 -27 -90 -149 -126 -29 54 51 -18 -63 -21 78 150 143 81 36 50 112 168 168 114 53 42 94 159 183 156 108 73 65 83 116 137 122 82 39 16 11 18 33 41 18 -33 -72 -57 -2 46 52 25 -7 -26 -18 27 82 93 36 -38 -57 -13 28 18 -15 -11 38 79 62 4 -42 -40 -2 29 20 -22 -65 -82 -70 -60 -67 -83 -81 -55 -39 -60 -101 -105 -42 52 99 51 -49 -108 -65 49 139 140 73 23 43 115 165 147 84 37 35 59 69 59 61 81 88 46 -33 -95 -100 -75 -63 -81 -95 -93 -94 -124 -169 -184 -144 -77 -35 -46 -92 -131 -130 -94 -51 -31 -38 -47 -46 -25 6 34 36 -2 -51 -69 -38 17 59 82 105 132 142 124 101 100 119 142 165 188 203 189 152 123 117 123 120 107 97 82 55 29 34 71 100 84 44 33 63 87 60 3 -34 -22 4 -5 -64 -138 -178 -167 -134 -127 -163 -208 -216 -181 -145 -146 -171 -177 -144 -101 -97 -141 -183 -170 -107 -54 -58 -92 -91 -32 34 50 21 10 49 111 151 151 132 118 124 150 171 151 81 14 7 55 87 56 -12 -47 -23 22 42 30 14 14 26 28 7 -17 -24 -8 10 2 -28 -56 -65 -62 -69 -84 -96 -93 -70 -38 -18 -28 -53 -56 -9 58 88 56 5 -5 48 115 136 89 10 -51 -61 -24 18 22 -36 -130 -204 -206 -136 -49 -13 -55 -133 -175 -151 -97 -78 -117 -171 -182 -139 -75 -36 -35 -47 -40 2 62 103 101 67 53 91 169 242 288 304 294 259 216 207 241 275 250 169 101 94 129 147 117 51 -10 -35 -22 11 35 33 11 -29 -80 -129 -148 -139 -134 -167 -213 -228 -211 -210 -248 -291 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 0 0 0 1 3 3 1 -1 -1 1 3 3 2 2 4 4 1 -3 -3 2 4 -3 -16 -25 -24 -16 -11 -16 -27 -31 -25 -17 -20 -32 -41 -39 -32 -31 -37 -36 -24 -8 -6 -19 -33 -28 -7 12 12 -5 -17 -6 21 41 40 23 10 15 34 53 54 38 18 14 33 57 68 59 41 28 26 34 48 58 52 36 17 7 5 8 15 20 8 -17 -37 -30 -2 24 28 13 -4 -15 -11 15 48 55 21 -24 -36 -9 17 11 -10 -8 25 53 42 2 -30 -29 -2 20 14 -17 -48 -62 -53 -46 -52 -65 -64 -44 -31 -48 -82 -86 -35 42 81 42 -42 -92 -56 41 119 121 63 20 37 102 147 131 75 33 31 54 63 54 56 75 82 43 -32 -90 -96 -72 -61 -78 -92 -90 -92 -121 -165 -180 -141 -76 -35 -46 -91 -130 -129 -94 -51 -31 -38 -47 -46 -25 5 33 35 -2 -51 -69 -38 16 58 81 104 131 141 123 100 99 118 140 163 185 200 185 149 120 114 119 116 103 93 78 52 27 32 67 94 79 41 30 58 80 55 2 -32 -21 3 -5 -58 -124 -159 -148 -118 -111 -142 -180 -185 -155 -123 -123 -143 -147 -119 -83 -79 -114 -146 -135 -84 -43 -45 -71 -69 -25 25 36 15 7 35 78 106 104 90 80 83 99 112 98 52 8 4 34 53 33 -8 -28 -14 12 23 16 7 7 13 14 3 -9 -13 -4 4 0 -14 -27 -30 -28 -31 -37 -41 -39 -29 -16 -8 -11 -21 -21 -4 20 30 19 1 -2 15 35 41 26 2 -15 -17 -7 4 5 -9 -32 -49 -48 -31 -11 -3 -12 -27 -34 -28 -18 -14 -20 -28 -29 -21 -11 -5 -5 -6 -5 0 6 11 10 6 4 8 14 19 21 21 19 16 12 11 12 13 11 7 3 3 4 4 3 1 -1 -1 -1 0 0 0 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +37 35 59 69 59 61 81 88 46 -33 -95 -100 -75 -63 -81 -95 -93 -94 -124 -169 -184 -144 -77 -35 -46 -92 -131 -130 -94 -51 -31 -38 -47 -46 -25 6 34 36 -2 -51 -69 -38 17 59 82 105 132 142 124 101 100 119 142 165 188 203 189 152 123 117 123 120 107 97 82 55 29 34 71 100 84 44 33 63 87 60 3 -34 -22 4 -5 -64 -138 -178 -167 -134 -127 -163 -208 -216 -181 -145 -146 -171 -177 -144 -101 -97 -141 -183 -170 -107 -54 -58 -92 -91 -32 34 50 21 10 49 111 151 151 132 118 124 150 171 151 81 14 7 55 87 56 -12 -47 -23 22 42 30 14 14 26 28 7 -17 -24 -8 10 2 -28 -56 -65 -62 -69 -84 -96 -93 -70 -38 -18 -28 -53 -56 -9 58 88 56 5 -5 48 115 136 89 10 -51 -61 -24 18 22 -36 -130 -204 -206 -136 -49 -13 -55 -133 -175 -151 -97 -78 -117 -171 -182 -139 -75 -36 -35 -47 -40 2 62 103 101 67 53 91 169 242 288 304 294 259 216 207 241 275 250 169 101 94 129 147 117 51 -10 -35 -22 11 35 33 11 -29 -80 -129 -148 -139 -134 -167 -213 -228 -211 -210 -248 -291 -300 -289 -290 -296 -263 -191 -130 -113 -113 -93 -57 -28 -1 39 83 96 86 97 152 204 200 147 113 131 176 201 187 163 157 175 196 199 180 165 177 209 234 227 196 167 143 111 66 23 2 3 3 -15 -50 -91 -123 -134 -126 -117 -124 -153 -175 -167 -144 -148 -186 -217 -198 -149 -120 -130 -135 -98 -41 -15 -37 -72 -81 -57 -9 49 86 70 23 6 57 129 142 90 45 65 115 125 88 48 55 94 121 105 58 23 43 104 143 105 28 3 63 135 139 77 29 34 46 12 -60 -118 -135 -130 -125 -125 -128 -134 -135 -130 -129 -142 -156 -151 -125 -107 -108 -107 -91 -71 -74 -99 -117 -106 -76 -49 -36 -41 -59 -71 -58 -25 -4 -7 -6 25 77 100 76 38 36 73 105 90 46 23 50 106 141 125 +0 0 0 0 0 0 0 0 0 -1 -1 -1 -1 -1 -2 -2 -2 -2 -3 -4 -5 -5 -3 -2 -2 -4 -6 -6 -5 -3 -2 -3 -3 -4 -2 0 2 3 -1 -5 -7 -4 1 6 9 12 16 18 17 14 14 18 22 27 32 36 34 28 24 23 25 25 23 22 19 13 7 8 18 26 23 12 9 18 26 18 0 -12 -8 1 -2 -23 -51 -67 -64 -52 -51 -66 -86 -91 -78 -63 -65 -77 -81 -67 -48 -47 -69 -91 -86 -55 -29 -31 -50 -50 -18 19 28 12 5 28 66 91 92 81 74 78 96 111 99 53 9 4 37 60 39 -9 -34 -17 16 30 22 10 10 19 21 5 -14 -19 -7 8 1 -23 -47 -54 -52 -58 -71 -82 -80 -61 -33 -16 -25 -47 -50 -9 52 79 50 4 -5 44 106 126 82 9 -48 -58 -23 17 20 -35 -125 -197 -200 -132 -48 -13 -54 -131 -172 -149 -96 -77 -116 -170 -181 -139 -75 -36 -35 -47 -40 1 61 102 100 67 53 90 168 241 287 303 293 258 215 205 239 272 247 167 99 92 126 144 114 49 -10 -35 -22 10 33 31 10 -28 -77 -123 -140 -131 -126 -156 -198 -211 -195 -193 -227 -264 -271 -260 -259 -263 -233 -168 -114 -98 -98 -80 -49 -24 -1 32 68 78 70 78 122 162 158 115 87 101 134 152 140 121 115 127 141 142 127 115 123 143 159 152 130 109 93 71 41 14 1 1 1 -9 -30 -53 -71 -76 -71 -65 -68 -82 -93 -87 -74 -75 -93 -106 -96 -71 -56 -60 -61 -44 -18 -7 -16 -30 -33 -23 -4 18 31 25 8 2 19 43 46 28 14 19 34 36 24 13 14 24 30 26 14 5 9 23 30 21 5 0 11 24 24 13 4 5 7 1 -9 -17 -18 -17 -16 -15 -15 -15 -14 -13 -13 -13 -14 -13 -10 -8 -8 -7 -6 -5 -4 -5 -6 -5 -4 -2 -2 -2 -2 -2 -2 -1 -1 -1 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +56 5 -5 48 115 136 89 10 -51 -61 -24 18 22 -36 -130 -204 -206 -136 -49 -13 -55 -133 -175 -151 -97 -78 -117 -171 -182 -139 -75 -36 -35 -47 -40 2 62 103 101 67 53 91 169 242 288 304 294 259 216 207 241 275 250 169 101 94 129 147 117 51 -10 -35 -22 11 35 33 11 -29 -80 -129 -148 -139 -134 -167 -213 -228 -211 -210 -248 -291 -300 -289 -290 -296 -263 -191 -130 -113 -113 -93 -57 -28 -1 39 83 96 86 97 152 204 200 147 113 131 176 201 187 163 157 175 196 199 180 165 177 209 234 227 196 167 143 111 66 23 2 3 3 -15 -50 -91 -123 -134 -126 -117 -124 -153 -175 -167 -144 -148 -186 -217 -198 -149 -120 -130 -135 -98 -41 -15 -37 -72 -81 -57 -9 49 86 70 23 6 57 129 142 90 45 65 115 125 88 48 55 94 121 105 58 23 43 104 143 105 28 3 63 135 139 77 29 34 46 12 -60 -118 -135 -130 -125 -125 -128 -134 -135 -130 -129 -142 -156 -151 -125 -107 -108 -107 -91 -71 -74 -99 -117 -106 -76 -49 -36 -41 -59 -71 -58 -25 -4 -7 -6 25 77 100 76 38 36 73 105 90 46 23 50 106 141 125 66 12 6 47 100 123 108 80 67 69 69 64 57 49 35 16 13 31 38 6 -49 -71 -40 2 9 -4 5 35 41 7 -30 -24 17 49 50 35 14 -9 -27 -22 -3 -12 -64 -125 -139 -102 -55 -42 -62 -88 -102 -110 -104 -81 -41 -9 -13 -48 -89 -96 -66 -27 -12 -24 -35 -26 -4 7 10 12 17 13 6 14 44 71 73 56 56 75 86 63 29 17 38 61 55 27 1 -9 -14 -25 -44 -63 -69 -53 -24 2 4 -13 -27 -12 28 58 53 22 -8 -14 5 35 61 68 59 51 54 68 82 94 107 117 109 83 58 51 56 54 38 25 24 26 17 0 -14 -17 -11 -9 -11 -17 -28 -43 -60 -72 -68 -52 -41 -49 -63 -54 -23 -7 -34 -76 -81 -43 -8 -17 -46 -57 -42 -28 +0 0 -1 0 0 0 0 0 -1 -1 -1 0 0 -1 -2 -3 -4 -3 -2 -1 -2 -4 -6 -6 -4 -4 -5 -8 -9 -8 -5 -3 -3 -4 -3 0 4 8 8 6 5 9 18 27 33 37 37 34 29 29 35 42 40 28 17 16 23 27 23 10 -3 -8 -5 2 8 7 2 -8 -21 -35 -41 -40 -39 -50 -65 -72 -68 -69 -83 -100 -105 -104 -106 -111 -100 -74 -52 -46 -47 -39 -25 -13 -1 17 37 44 40 46 74 101 100 75 58 69 94 109 103 91 88 100 114 117 107 99 108 129 146 144 126 108 94 73 44 15 1 2 2 -11 -36 -66 -90 -99 -94 -88 -94 -117 -135 -130 -113 -118 -149 -175 -161 -122 -99 -108 -113 -83 -35 -13 -32 -63 -71 -50 -8 43 76 62 20 5 51 117 130 82 41 60 107 117 82 45 52 89 115 100 55 22 41 100 138 102 27 2 61 132 136 76 28 33 45 11 -60 -118 -135 -130 -125 -125 -128 -134 -135 -130 -129 -142 -156 -151 -125 -107 -108 -107 -91 -71 -74 -99 -116 -105 -76 -49 -36 -41 -58 -70 -57 -25 -4 -7 -6 24 73 95 72 35 33 68 98 83 42 21 46 97 128 113 59 10 5 41 88 107 94 69 57 59 58 54 47 40 28 13 10 25 30 4 -39 -56 -32 1 6 -4 3 26 30 5 -22 -18 12 34 34 24 9 -7 -18 -15 -2 -8 -41 -79 -87 -63 -34 -26 -37 -52 -59 -63 -59 -45 -23 -5 -7 -25 -46 -49 -33 -14 -6 -12 -17 -12 -2 3 4 5 7 5 2 5 17 26 27 20 19 26 29 21 9 5 11 18 16 7 0 -3 -4 -7 -12 -16 -17 -13 -6 0 0 -3 -6 -3 5 10 9 3 -2 -3 0 5 8 9 7 6 6 7 9 10 10 11 10 7 4 4 4 3 2 1 1 1 0 0 -1 -1 -1 -1 -1 -1 -1 -2 -2 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +57 129 142 90 45 65 115 125 88 48 55 94 121 105 58 23 43 104 143 105 28 3 63 135 139 77 29 34 46 12 -60 -118 -135 -130 -125 -125 -128 -134 -135 -130 -129 -142 -156 -151 -125 -107 -108 -107 -91 -71 -74 -99 -117 -106 -76 -49 -36 -41 -59 -71 -58 -25 -4 -7 -6 25 77 100 76 38 36 73 105 90 46 23 50 106 141 125 66 12 6 47 100 123 108 80 67 69 69 64 57 49 35 16 13 31 38 6 -49 -71 -40 2 9 -4 5 35 41 7 -30 -24 17 49 50 35 14 -9 -27 -22 -3 -12 -64 -125 -139 -102 -55 -42 -62 -88 -102 -110 -104 -81 -41 -9 -13 -48 -89 -96 -66 -27 -12 -24 -35 -26 -4 7 10 12 17 13 6 14 44 71 73 56 56 75 86 63 29 17 38 61 55 27 1 -9 -14 -25 -44 -63 -69 -53 -24 2 4 -13 -27 -12 28 58 53 22 -8 -14 5 35 61 68 59 51 54 68 82 94 107 117 109 83 58 51 56 54 38 25 24 26 17 0 -14 -17 -11 -9 -11 -17 -28 -43 -60 -72 -68 -52 -41 -49 -63 -54 -23 -7 -34 -76 -81 -43 -8 -17 -46 -57 -42 -28 -24 -17 -6 -5 -20 -25 -7 15 17 7 10 29 32 4 -34 -45 -26 -7 -8 -28 -34 -11 34 68 69 51 54 90 133 146 133 127 148 173 172 144 110 89 84 87 89 85 71 49 21 -7 -32 -41 -31 -16 -13 -29 -47 -53 -46 -49 -84 -150 -218 -253 -247 -224 -216 -235 -253 -249 -227 -208 -200 -192 -175 -153 -130 -91 -38 7 23 9 -4 6 34 59 71 68 58 48 44 44 43 44 58 85 109 117 117 130 152 156 123 70 35 34 50 53 32 5 -5 5 18 22 15 16 26 31 25 20 31 58 86 99 99 86 62 36 19 7 -11 -30 -34 -21 -16 -39 -75 -89 -74 -56 -46 -33 -6 13 7 -11 -3 31 58 53 29 14 11 15 22 31 30 8 -25 -34 -8 28 34 4 +0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 1 3 2 0 0 1 4 5 3 1 1 2 0 -4 -8 -9 -9 -9 -10 -11 -12 -12 -13 -13 -15 -17 -17 -15 -14 -14 -15 -13 -11 -12 -16 -19 -18 -14 -9 -7 -8 -12 -15 -13 -6 -1 -2 -2 6 19 25 19 10 9 20 30 26 14 7 15 34 47 42 23 4 2 17 37 47 42 32 27 28 29 27 25 21 15 7 6 14 18 2 -25 -37 -21 1 4 -3 2 19 23 4 -18 -15 10 29 30 21 8 -6 -18 -15 -2 -8 -44 -86 -96 -71 -39 -30 -45 -64 -75 -82 -78 -61 -32 -7 -11 -38 -70 -76 -53 -22 -10 -20 -29 -22 -4 5 8 10 14 11 5 12 38 62 64 49 50 67 77 57 26 15 35 56 51 25 0 -9 -14 -24 -42 -61 -67 -52 -24 1 3 -13 -27 -12 27 57 52 21 -8 -14 4 34 60 67 58 50 53 67 81 93 106 117 109 82 57 50 55 53 37 24 23 25 16 0 -14 -17 -11 -9 -11 -17 -28 -43 -59 -70 -66 -51 -40 -48 -61 -52 -22 -7 -33 -72 -76 -41 -8 -16 -43 -53 -39 -26 -22 -16 -6 -5 -18 -22 -7 13 14 5 8 24 26 3 -29 -37 -22 -6 -7 -23 -27 -9 26 52 52 38 40 66 98 106 96 91 104 121 119 99 74 59 55 57 57 54 45 30 13 -5 -20 -25 -19 -10 -8 -17 -27 -30 -25 -27 -45 -78 -112 -128 -123 -110 -104 -112 -118 -114 -102 -92 -87 -82 -74 -63 -53 -36 -15 2 8 3 -2 2 11 19 23 21 18 14 13 12 12 12 15 22 27 29 28 30 34 34 26 14 7 6 9 9 5 0 -1 0 2 3 2 2 3 3 3 2 3 6 8 9 9 7 5 2 1 0 -1 -2 -3 -2 -1 -2 -4 -4 -3 -3 -2 -2 -1 0 0 -1 -1 0 0 0 0 0 0 0 0 0 0 0 -1 -1 -1 0 0 0 +86 63 29 17 38 61 55 27 1 -9 -14 -25 -44 -63 -69 -53 -24 2 4 -13 -27 -12 28 58 53 22 -8 -14 5 35 61 68 59 51 54 68 82 94 107 117 109 83 58 51 56 54 38 25 24 26 17 0 -14 -17 -11 -9 -11 -17 -28 -43 -60 -72 -68 -52 -41 -49 -63 -54 -23 -7 -34 -76 -81 -43 -8 -17 -46 -57 -42 -28 -24 -17 -6 -5 -20 -25 -7 15 17 7 10 29 32 4 -34 -45 -26 -7 -8 -28 -34 -11 34 68 69 51 54 90 133 146 133 127 148 173 172 144 110 89 84 87 89 85 71 49 21 -7 -32 -41 -31 -16 -13 -29 -47 -53 -46 -49 -84 -150 -218 -253 -247 -224 -216 -235 -253 -249 -227 -208 -200 -192 -175 -153 -130 -91 -38 7 23 9 -4 6 34 59 71 68 58 48 44 44 43 44 58 85 109 117 117 130 152 156 123 70 35 34 50 53 32 5 -5 5 18 22 15 16 26 31 25 20 31 58 86 99 99 86 62 36 19 7 -11 -30 -34 -21 -16 -39 -75 -89 -74 -56 -46 -33 -6 13 7 -11 -3 31 58 53 29 14 11 15 22 31 30 8 -25 -34 -8 28 34 4 -31 -45 -46 -56 -82 -107 -114 -115 -122 -134 -135 -122 -116 -130 -148 -140 -103 -62 -34 -17 5 30 48 60 75 97 113 115 113 117 122 121 113 106 106 104 91 74 70 86 108 120 109 85 66 58 48 25 7 13 37 47 22 -20 -43 -43 -43 -60 -79 -87 -89 -95 -105 -109 -105 -103 -103 -95 -76 -55 -41 -27 -7 13 23 29 39 53 53 44 42 67 102 114 92 59 44 52 63 63 55 52 55 53 40 27 23 19 -2 -39 -69 -70 -55 -53 -69 -79 -69 -48 -45 -63 -87 -101 -101 -88 -66 -42 -27 -30 -47 -66 -77 -83 -81 -74 -69 -65 -57 -38 -14 2 4 -2 -3 5 15 26 36 49 60 65 58 41 25 22 32 43 42 36 33 37 38 23 -7 -37 -40 -4 43 59 34 5 +0 0 0 0 0 0 0 0 0 -1 -1 -1 -1 -1 -1 -1 -1 0 0 -1 -1 -1 0 1 1 0 -1 -1 0 1 3 4 3 3 3 5 6 7 9 10 10 8 6 5 6 6 4 3 3 3 2 0 -3 -3 -2 -2 -3 -4 -6 -9 -13 -16 -16 -12 -10 -12 -16 -14 -7 -2 -10 -22 -24 -13 -3 -6 -15 -19 -15 -10 -9 -7 -3 -2 -8 -10 -3 6 6 2 4 12 14 1 -16 -21 -13 -4 -4 -14 -18 -6 17 35 36 27 29 50 75 83 77 74 88 104 105 89 69 56 54 56 58 56 47 33 14 -5 -23 -30 -23 -12 -10 -22 -35 -40 -35 -38 -65 -117 -171 -201 -197 -180 -175 -192 -208 -207 -190 -175 -170 -164 -150 -132 -113 -80 -34 6 20 8 -4 5 30 53 65 62 53 44 41 41 40 41 54 80 103 112 112 125 146 151 119 68 34 33 49 52 31 4 -5 4 17 21 14 15 25 30 24 19 30 57 85 99 99 85 61 35 18 6 -11 -30 -34 -21 -16 -39 -75 -89 -74 -56 -46 -33 -6 12 6 -11 -3 29 55 50 27 13 10 14 20 29 28 7 -24 -32 -8 25 30 3 -28 -41 -42 -50 -73 -94 -100 -100 -106 -115 -115 -104 -98 -109 -123 -116 -85 -51 -28 -14 3 23 37 46 57 73 84 85 83 85 88 86 80 74 73 71 61 49 46 56 70 77 69 53 40 35 29 14 4 7 21 26 12 -12 -24 -24 -23 -32 -41 -44 -45 -47 -51 -52 -49 -48 -47 -42 -33 -24 -18 -12 -3 5 8 11 14 19 18 15 14 22 33 36 28 17 13 15 17 17 14 13 14 13 9 6 5 4 -1 -9 -14 -14 -11 -10 -13 -14 -12 -8 -7 -10 -13 -14 -14 -12 -9 -5 -4 -4 -5 -7 -8 -8 -7 -6 -6 -5 -4 -3 -1 0 0 -1 -1 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 -1 -1 -1 -1 0 0 0 0 +34 59 71 68 58 48 44 44 43 44 58 85 109 117 117 130 152 156 123 70 35 34 50 53 32 5 -5 5 18 22 15 16 26 31 25 20 31 58 86 99 99 86 62 36 19 7 -11 -30 -34 -21 -16 -39 -75 -89 -74 -56 -46 -33 -6 13 7 -11 -3 31 58 53 29 14 11 15 22 31 30 8 -25 -34 -8 28 34 4 -31 -45 -46 -56 -82 -107 -114 -115 -122 -134 -135 -122 -116 -130 -148 -140 -103 -62 -34 -17 5 30 48 60 75 97 113 115 113 117 122 121 113 106 106 104 91 74 70 86 108 120 109 85 66 58 48 25 7 13 37 47 22 -20 -43 -43 -43 -60 -79 -87 -89 -95 -105 -109 -105 -103 -103 -95 -76 -55 -41 -27 -7 13 23 29 39 53 53 44 42 67 102 114 92 59 44 52 63 63 55 52 55 53 40 27 23 19 -2 -39 -69 -70 -55 -53 -69 -79 -69 -48 -45 -63 -87 -101 -101 -88 -66 -42 -27 -30 -47 -66 -77 -83 -81 -74 -69 -65 -57 -38 -14 2 4 -2 -3 5 15 26 36 49 60 65 58 41 25 22 32 43 42 36 33 37 38 23 -7 -37 -40 -4 43 59 34 5 12 45 53 23 -4 9 41 40 -3 -42 -39 -8 13 16 17 24 28 26 26 34 40 38 39 52 69 78 71 55 40 19 -4 -23 -29 -21 -9 1 2 -7 -30 -57 -77 -79 -62 -38 -21 -17 -20 -30 -45 -63 -69 -53 -27 -7 0 5 10 -5 -37 -67 -71 -54 -43 -51 -62 -61 -53 -48 -40 -17 21 54 69 74 74 68 56 56 84 132 167 168 150 139 149 158 148 119 95 93 105 106 78 31 -2 -10 -11 -35 -78 -113 -123 -127 -149 -183 -199 -182 -155 -155 -179 -202 -207 -197 -181 -161 -138 -122 -125 -142 -148 -137 -117 -104 -103 -98 -83 -60 -39 -16 6 18 16 17 44 97 147 174 174 158 139 127 138 169 205 215 199 179 168 160 145 134 137 139 116 74 45 53 75 80 67 62 +0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 2 2 2 1 0 0 1 1 1 0 -1 0 0 1 0 0 1 2 1 1 2 4 7 9 9 8 6 4 2 0 -2 -4 -5 -4 -3 -7 -13 -15 -13 -10 -9 -7 -2 2 1 -3 -1 7 13 12 7 3 2 4 6 8 8 2 -8 -11 -3 9 11 1 -11 -17 -17 -21 -32 -42 -46 -47 -51 -57 -58 -53 -52 -59 -68 -66 -49 -30 -17 -9 2 15 24 31 40 52 62 64 64 67 71 71 67 64 64 64 57 47 45 55 71 79 73 57 45 40 33 17 5 9 27 34 16 -16 -33 -33 -34 -47 -62 -69 -71 -77 -86 -89 -87 -86 -86 -80 -65 -47 -36 -24 -7 11 20 25 34 47 47 39 38 61 93 104 85 54 41 48 59 59 52 49 52 50 38 26 22 18 -2 -39 -68 -69 -54 -53 -68 -78 -69 -48 -45 -63 -87 -101 -101 -88 -66 -42 -27 -30 -47 -66 -77 -83 -81 -74 -69 -65 -57 -38 -14 1 3 -2 -3 4 14 25 35 48 58 63 56 39 24 21 30 41 40 34 31 35 35 21 -7 -35 -38 -4 39 54 30 4 10 40 47 20 -4 7 35 34 -3 -36 -34 -7 10 13 14 19 22 21 20 27 31 29 30 40 52 59 53 40 29 13 -3 -17 -21 -15 -7 0 1 -5 -20 -38 -51 -51 -40 -24 -14 -11 -13 -18 -27 -37 -40 -31 -16 -4 0 2 5 -3 -19 -34 -36 -27 -21 -25 -29 -28 -24 -22 -18 -8 8 22 27 29 28 25 20 20 29 46 57 56 49 44 46 48 44 34 26 25 28 27 19 7 -1 -3 -3 -8 -17 -24 -25 -25 -29 -34 -36 -32 -26 -25 -28 -31 -30 -28 -25 -21 -17 -15 -15 -16 -16 -14 -11 -10 -9 -8 -7 -5 -3 -2 0 1 0 0 2 4 5 6 5 4 3 3 3 3 3 3 2 2 1 1 1 0 0 0 0 0 0 0 0 0 0 0 +42 67 102 114 92 59 44 52 63 63 55 52 55 53 40 27 23 19 -2 -39 -69 -70 -55 -53 -69 -79 -69 -48 -45 -63 -87 -101 -101 -88 -66 -42 -27 -30 -47 -66 -77 -83 -81 -74 -69 -65 -57 -38 -14 2 4 -2 -3 5 15 26 36 49 60 65 58 41 25 22 32 43 42 36 33 37 38 23 -7 -37 -40 -4 43 59 34 5 12 45 53 23 -4 9 41 40 -3 -42 -39 -8 13 16 17 24 28 26 26 34 40 38 39 52 69 78 71 55 40 19 -4 -23 -29 -21 -9 1 2 -7 -30 -57 -77 -79 -62 -38 -21 -17 -20 -30 -45 -63 -69 -53 -27 -7 0 5 10 -5 -37 -67 -71 -54 -43 -51 -62 -61 -53 -48 -40 -17 21 54 69 74 74 68 56 56 84 132 167 168 150 139 149 158 148 119 95 93 105 106 78 31 -2 -10 -11 -35 -78 -113 -123 -127 -149 -183 -199 -182 -155 -155 -179 -202 -207 -197 -181 -161 -138 -122 -125 -142 -148 -137 -117 -104 -103 -98 -83 -60 -39 -16 6 18 16 17 44 97 147 174 174 158 139 127 138 169 205 215 199 179 168 160 145 134 137 139 116 74 45 53 75 80 67 62 76 86 71 41 23 30 51 66 59 28 -6 -28 -30 -29 -38 -59 -69 -59 -42 -42 -62 -88 -96 -85 -69 -60 -67 -88 -112 -130 -135 -129 -115 -103 -95 -97 -105 -106 -88 -65 -54 -60 -69 -59 -29 3 28 41 41 26 4 -4 9 34 47 38 20 3 -9 -23 -32 -32 -24 -15 -10 -4 1 0 -14 -26 -27 -22 -23 -33 -37 -29 -18 -17 -25 -31 -26 -14 6 32 57 67 58 52 75 118 155 162 148 136 134 139 141 136 120 91 57 34 27 33 43 47 37 9 -26 -46 -44 -32 -33 -51 -71 -81 -84 -94 -104 -99 -84 -79 -89 -99 -94 -85 -91 -106 -96 -50 10 44 53 60 75 79 68 60 73 91 88 58 30 23 31 33 20 1 -7 -2 7 13 14 16 17 16 7 4 21 47 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 -1 -2 -2 -2 -2 -3 -4 -3 -3 -3 -4 -5 -7 -7 -6 -5 -4 -3 -3 -5 -7 -8 -9 -9 -9 -9 -8 -8 -6 -2 0 0 -1 -1 0 2 4 6 9 11 13 12 8 5 5 7 10 10 9 8 9 10 6 -3 -12 -13 -2 13 19 11 1 4 16 19 8 -2 3 16 16 -2 -18 -17 -4 5 7 7 11 13 12 12 16 20 19 20 27 36 42 39 30 22 10 -3 -14 -18 -13 -6 0 1 -5 -20 -38 -51 -53 -42 -26 -15 -12 -15 -22 -33 -46 -51 -40 -21 -6 0 3 7 -4 -30 -53 -57 -44 -35 -42 -51 -51 -45 -41 -34 -15 17 46 59 64 64 60 49 49 75 119 151 153 137 127 137 146 137 111 89 87 99 100 74 29 -2 -10 -11 -34 -76 -111 -121 -125 -147 -180 -197 -180 -154 -154 -178 -201 -206 -197 -181 -161 -138 -122 -125 -142 -148 -137 -117 -104 -103 -98 -83 -60 -39 -16 5 17 15 16 43 95 145 171 171 155 136 124 134 164 198 207 191 171 160 152 137 126 129 130 108 68 41 48 69 73 61 56 68 77 63 36 20 26 44 57 50 23 -6 -24 -26 -25 -32 -49 -57 -48 -34 -34 -50 -70 -75 -66 -53 -46 -51 -66 -83 -95 -98 -93 -82 -73 -67 -67 -72 -72 -59 -43 -36 -39 -44 -38 -18 1 16 24 24 15 2 -3 5 18 25 20 10 1 -5 -12 -16 -16 -12 -8 -5 -2 0 0 -7 -12 -12 -10 -10 -14 -15 -12 -7 -7 -9 -11 -9 -5 1 10 17 20 17 15 21 32 41 42 37 33 32 32 32 30 25 19 11 6 5 6 7 8 6 1 -5 -7 -7 -5 -5 -7 -9 -10 -10 -11 -11 -10 -8 -8 -8 -8 -8 -7 -7 -7 -6 -3 0 2 2 2 2 2 2 1 2 2 2 1 0 0 0 0 0 0 -1 -1 0 0 0 0 0 0 0 0 0 0 +167 168 150 139 149 158 148 119 95 93 105 106 78 31 -2 -10 -11 -35 -78 -113 -123 -127 -149 -183 -199 -182 -155 -155 -179 -202 -207 -197 -181 -161 -138 -122 -125 -142 -148 -137 -117 -104 -103 -98 -83 -60 -39 -16 6 18 16 17 44 97 147 174 174 158 139 127 138 169 205 215 199 179 168 160 145 134 137 139 116 74 45 53 75 80 67 62 76 86 71 41 23 30 51 66 59 28 -6 -28 -30 -29 -38 -59 -69 -59 -42 -42 -62 -88 -96 -85 -69 -60 -67 -88 -112 -130 -135 -129 -115 -103 -95 -97 -105 -106 -88 -65 -54 -60 -69 -59 -29 3 28 41 41 26 4 -4 9 34 47 38 20 3 -9 -23 -32 -32 -24 -15 -10 -4 1 0 -14 -26 -27 -22 -23 -33 -37 -29 -18 -17 -25 -31 -26 -14 6 32 57 67 58 52 75 118 155 162 148 136 134 139 141 136 120 91 57 34 27 33 43 47 37 9 -26 -46 -44 -32 -33 -51 -71 -81 -84 -94 -104 -99 -84 -79 -89 -99 -94 -85 -91 -106 -96 -50 10 44 53 60 75 79 68 60 73 91 88 58 30 23 31 33 20 1 -7 -2 7 13 14 16 17 16 7 4 21 47 57 36 13 16 37 41 12 -29 -48 -36 -9 15 24 24 21 21 22 18 7 3 9 19 18 6 -17 -48 -77 -85 -62 -25 -2 0 0 5 -1 -35 -76 -96 -85 -66 -63 -73 -80 -75 -63 -53 -57 -77 -105 -119 -112 -98 -91 -88 -78 -56 -34 -18 -6 13 37 53 51 40 31 32 38 37 29 28 47 73 93 100 106 112 106 84 51 28 22 26 30 29 23 15 8 6 4 -3 -16 -29 -41 -57 -81 -95 -89 -73 -67 -87 -112 -122 -117 -114 -119 -121 -106 -87 -86 -109 -133 -123 -75 -13 28 35 22 9 10 24 48 77 100 112 108 102 108 138 177 198 187 157 144 160 185 189 168 146 139 137 125 99 71 51 36 30 30 24 -1 -44 -77 -83 -71 -58 -55 -62 -85 -124 -163 -171 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 -1 -1 -1 -2 -3 -4 -4 -5 -7 -8 -8 -7 -8 -9 -11 -12 -12 -12 -11 -10 -10 -10 -12 -14 -13 -12 -11 -12 -12 -10 -8 -5 -3 0 2 2 2 7 16 25 31 32 30 27 25 28 36 45 49 46 43 41 40 38 36 37 39 33 22 13 16 23 26 22 21 26 30 25 15 8 11 20 26 24 11 -3 -13 -14 -14 -18 -28 -33 -29 -21 -21 -32 -46 -50 -45 -37 -33 -37 -50 -64 -75 -79 -77 -69 -63 -59 -61 -66 -68 -57 -43 -36 -40 -47 -41 -20 2 19 29 29 18 2 -3 6 25 35 29 15 2 -8 -19 -26 -26 -20 -13 -9 -4 0 0 -12 -23 -24 -19 -20 -29 -33 -26 -16 -16 -23 -28 -24 -13 5 29 52 62 54 48 70 111 146 154 141 130 128 133 136 131 116 88 55 33 26 32 42 46 36 8 -26 -46 -44 -32 -33 -51 -71 -81 -84 -94 -104 -99 -84 -79 -89 -99 -94 -85 -91 -106 -96 -50 9 43 52 59 74 77 66 58 71 88 85 56 29 22 29 31 19 0 -7 -2 6 12 13 14 15 14 6 3 19 42 51 32 11 14 32 35 10 -26 -42 -31 -8 12 20 20 17 17 17 14 5 2 7 14 14 4 -13 -37 -58 -64 -46 -19 -2 0 0 3 -1 -25 -52 -65 -57 -44 -42 -47 -51 -48 -40 -33 -35 -47 -62 -70 -65 -56 -51 -49 -43 -30 -18 -10 -4 6 18 25 24 18 14 14 17 16 12 11 19 29 37 39 41 42 39 30 18 9 7 8 9 9 7 4 2 1 1 -1 -5 -8 -11 -15 -20 -23 -21 -17 -15 -19 -23 -25 -23 -22 -22 -21 -18 -14 -14 -17 -20 -17 -10 -2 3 4 2 0 1 2 4 6 8 8 8 7 7 8 10 11 9 7 6 6 7 6 5 4 3 3 2 2 1 0 0 0 0 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +-26 -14 6 32 57 67 58 52 75 118 155 162 148 136 134 139 141 136 120 91 57 34 27 33 43 47 37 9 -26 -46 -44 -32 -33 -51 -71 -81 -84 -94 -104 -99 -84 -79 -89 -99 -94 -85 -91 -106 -96 -50 10 44 53 60 75 79 68 60 73 91 88 58 30 23 31 33 20 1 -7 -2 7 13 14 16 17 16 7 4 21 47 57 36 13 16 37 41 12 -29 -48 -36 -9 15 24 24 21 21 22 18 7 3 9 19 18 6 -17 -48 -77 -85 -62 -25 -2 0 0 5 -1 -35 -76 -96 -85 -66 -63 -73 -80 -75 -63 -53 -57 -77 -105 -119 -112 -98 -91 -88 -78 -56 -34 -18 -6 13 37 53 51 40 31 32 38 37 29 28 47 73 93 100 106 112 106 84 51 28 22 26 30 29 23 15 8 6 4 -3 -16 -29 -41 -57 -81 -95 -89 -73 -67 -87 -112 -122 -117 -114 -119 -121 -106 -87 -86 -109 -133 -123 -75 -13 28 35 22 9 10 24 48 77 100 112 108 102 108 138 177 198 187 157 144 160 185 189 168 146 139 137 125 99 71 51 36 30 30 24 -1 -44 -77 -83 -71 -58 -55 -62 -85 -124 -163 -171 -139 -91 -62 -66 -85 -98 -92 -78 -67 -69 -78 -85 -75 -48 -14 20 41 50 52 57 68 72 56 21 -9 -11 17 55 82 87 80 73 72 74 77 81 88 93 89 72 55 46 51 62 69 75 79 71 46 11 -20 -44 -60 -68 -64 -53 -51 -62 -74 -72 -67 -73 -90 -103 -99 -89 -84 -77 -62 -41 -31 -31 -22 0 23 25 12 7 20 40 48 44 41 47 54 54 44 31 22 5 -24 -63 -93 -107 -108 -106 -94 -62 -24 -4 -14 -28 -20 2 3 -17 -32 -16 8 6 -16 -21 9 45 48 22 4 13 31 28 2 -28 -41 -41 -42 -55 -75 -83 -66 -26 8 12 -16 -50 -56 -26 23 61 72 58 41 37 49 62 61 47 37 45 63 74 66 45 20 -1 -14 -22 -29 -38 -53 -68 +0 -1 0 0 0 0 0 0 0 0 1 1 1 1 1 2 2 2 2 2 1 0 0 1 1 1 1 0 -2 -3 -3 -2 -3 -4 -6 -7 -7 -8 -10 -10 -9 -9 -10 -12 -12 -11 -12 -15 -14 -8 1 6 8 9 12 14 12 11 14 18 18 12 6 5 7 7 4 0 -2 -1 1 3 4 4 5 4 2 1 7 16 19 12 4 5 14 15 4 -12 -20 -16 -4 6 10 10 9 9 10 8 3 1 4 9 9 3 -10 -27 -43 -48 -36 -15 -2 0 0 3 -1 -22 -48 -62 -55 -43 -42 -49 -54 -52 -44 -37 -41 -55 -76 -87 -82 -73 -68 -67 -60 -43 -27 -15 -5 10 29 42 41 32 25 26 31 31 24 23 40 62 80 87 93 98 94 74 45 25 19 23 27 26 21 13 7 5 3 -3 -16 -28 -40 -55 -78 -92 -87 -71 -66 -85 -110 -120 -115 -113 -118 -120 -105 -87 -86 -109 -133 -123 -75 -13 27 34 21 8 9 24 48 76 99 111 107 101 107 137 176 196 185 155 142 158 182 186 165 143 136 133 121 96 68 49 34 28 28 22 -1 -42 -73 -79 -67 -55 -52 -58 -79 -114 -149 -156 -126 -82 -56 -59 -76 -87 -81 -68 -58 -60 -67 -72 -63 -41 -12 16 33 40 41 45 53 56 43 16 -7 -9 12 40 60 63 57 52 51 51 53 55 59 62 59 47 35 29 32 38 42 45 47 42 27 6 -12 -25 -34 -38 -35 -29 -27 -33 -38 -37 -34 -36 -44 -49 -46 -41 -38 -34 -27 -18 -13 -13 -9 0 8 9 4 2 7 13 16 14 13 15 16 16 13 9 6 1 -7 -17 -24 -27 -27 -25 -22 -14 -6 -1 -3 -6 -4 0 0 -3 -6 -3 1 0 -3 -3 1 5 5 2 0 1 3 2 0 -3 -4 -4 -4 -4 -6 -6 -4 -2 0 0 -1 -3 -3 -1 0 1 2 1 0 0 0 1 0 0 0 0 0 0 0 0 0 -1 -1 -1 -1 -1 -1 0 +22 26 30 29 23 15 8 6 4 -3 -16 -29 -41 -57 -81 -95 -89 -73 -67 -87 -112 -122 -117 -114 -119 -121 -106 -87 -86 -109 -133 -123 -75 -13 28 35 22 9 10 24 48 77 100 112 108 102 108 138 177 198 187 157 144 160 185 189 168 146 139 137 125 99 71 51 36 30 30 24 -1 -44 -77 -83 -71 -58 -55 -62 -85 -124 -163 -171 -139 -91 -62 -66 -85 -98 -92 -78 -67 -69 -78 -85 -75 -48 -14 20 41 50 52 57 68 72 56 21 -9 -11 17 55 82 87 80 73 72 74 77 81 88 93 89 72 55 46 51 62 69 75 79 71 46 11 -20 -44 -60 -68 -64 -53 -51 -62 -74 -72 -67 -73 -90 -103 -99 -89 -84 -77 -62 -41 -31 -31 -22 0 23 25 12 7 20 40 48 44 41 47 54 54 44 31 22 5 -24 -63 -93 -107 -108 -106 -94 -62 -24 -4 -14 -28 -20 2 3 -17 -32 -16 8 6 -16 -21 9 45 48 22 4 13 31 28 2 -28 -41 -41 -42 -55 -75 -83 -66 -26 8 12 -16 -50 -56 -26 23 61 72 58 41 37 49 62 61 47 37 45 63 74 66 45 20 -1 -14 -22 -29 -38 -53 -68 -75 -72 -58 -43 -28 -15 -6 0 -5 -14 -21 -21 -19 -18 -9 9 30 36 26 12 6 5 3 5 21 39 35 19 23 62 115 146 143 123 112 114 124 125 105 61 27 31 61 77 57 16 -17 -35 -48 -51 -51 -60 -83 -100 -93 -72 -67 -77 -67 -33 -8 -19 -48 -60 -56 -67 -96 -114 -100 -69 -47 -44 -52 -61 -60 -41 -18 -11 -17 -17 6 29 28 7 1 23 47 47 28 11 16 30 35 31 30 47 74 82 55 13 -7 9 27 17 -13 -28 -17 7 26 27 12 -14 -46 -70 -82 -80 -65 -43 -29 -42 -70 -74 -26 54 105 96 46 6 4 39 78 92 72 46 43 60 57 14 -50 -86 -71 -31 -3 -5 -28 -46 -45 -25 -2 7 4 -3 -5 -4 -5 -9 -13 -18 -24 -21 +0 0 0 0 0 0 0 0 0 -1 -1 -1 -1 -1 -2 -2 -2 -2 -2 -3 -3 -4 -4 -4 -5 -5 -5 -4 -5 -6 -8 -8 -5 -1 2 2 1 0 0 2 4 7 10 12 12 12 13 18 24 28 27 24 23 26 31 33 30 27 27 27 26 21 15 11 8 7 7 6 -1 -12 -22 -24 -21 -18 -17 -20 -28 -41 -55 -59 -49 -33 -23 -25 -33 -38 -37 -32 -28 -29 -34 -37 -34 -22 -7 9 19 24 25 28 34 36 29 11 -5 -6 9 30 46 49 46 43 43 44 47 50 55 59 57 46 36 30 34 42 47 52 55 50 32 7 -15 -33 -45 -52 -49 -41 -40 -49 -59 -57 -54 -59 -73 -85 -82 -74 -71 -65 -53 -35 -27 -27 -20 0 20 22 10 6 17 36 43 40 37 43 49 50 41 29 20 4 -23 -60 -89 -103 -104 -103 -91 -61 -24 -4 -14 -28 -20 1 2 -17 -32 -16 7 5 -16 -21 8 44 47 21 3 12 30 28 2 -28 -41 -41 -42 -55 -75 -83 -66 -26 7 11 -16 -50 -56 -26 22 59 70 56 39 35 47 59 58 45 35 42 59 70 62 42 18 -1 -13 -21 -27 -35 -49 -62 -68 -65 -52 -39 -25 -14 -6 0 -5 -12 -18 -18 -16 -16 -8 7 24 29 20 9 4 3 2 3 16 29 26 14 16 45 83 104 101 86 77 78 84 84 69 40 17 19 38 48 35 9 -11 -21 -29 -30 -30 -34 -47 -56 -51 -39 -36 -41 -35 -17 -4 -10 -24 -29 -27 -31 -44 -51 -44 -30 -20 -19 -21 -25 -24 -16 -7 -5 -7 -6 2 9 9 2 0 7 13 13 7 3 4 7 8 7 7 11 16 18 11 2 -2 1 5 3 -3 -5 -3 1 4 4 1 -2 -7 -9 -11 -10 -8 -5 -3 -5 -7 -7 -3 4 7 6 3 0 0 2 4 4 3 1 1 2 1 0 -2 -3 -2 -1 -1 -1 -1 -1 -1 -1 -1 0 0 -1 -1 -1 -1 -1 -1 -1 -1 0 +48 44 41 47 54 54 44 31 22 5 -24 -63 -93 -107 -108 -106 -94 -62 -24 -4 -14 -28 -20 2 3 -17 -32 -16 8 6 -16 -21 9 45 48 22 4 13 31 28 2 -28 -41 -41 -42 -55 -75 -83 -66 -26 8 12 -16 -50 -56 -26 23 61 72 58 41 37 49 62 61 47 37 45 63 74 66 45 20 -1 -14 -22 -29 -38 -53 -68 -75 -72 -58 -43 -28 -15 -6 0 -5 -14 -21 -21 -19 -18 -9 9 30 36 26 12 6 5 3 5 21 39 35 19 23 62 115 146 143 123 112 114 124 125 105 61 27 31 61 77 57 16 -17 -35 -48 -51 -51 -60 -83 -100 -93 -72 -67 -77 -67 -33 -8 -19 -48 -60 -56 -67 -96 -114 -100 -69 -47 -44 -52 -61 -60 -41 -18 -11 -17 -17 6 29 28 7 1 23 47 47 28 11 16 30 35 31 30 47 74 82 55 13 -7 9 27 17 -13 -28 -17 7 26 27 12 -14 -46 -70 -82 -80 -65 -43 -29 -42 -70 -74 -26 54 105 96 46 6 4 39 78 92 72 46 43 60 57 14 -50 -86 -71 -31 -3 -5 -28 -46 -45 -25 -2 7 4 -3 -5 -4 -5 -9 -13 -18 -24 -21 -1 34 58 52 24 10 29 60 71 60 58 82 113 121 104 93 96 95 72 48 50 69 75 55 30 18 12 -2 -24 -40 -45 -44 -46 -50 -60 -79 -98 -113 -127 -143 -149 -128 -89 -58 -51 -58 -63 -63 -71 -84 -94 -97 -100 -97 -81 -54 -30 -12 4 16 16 4 3 29 63 77 73 75 92 104 96 83 85 94 88 68 53 55 64 71 81 92 85 56 22 2 -11 -39 -81 -113 -123 -124 -132 -141 -144 -146 -162 -183 -182 -153 -120 -117 -136 -140 -108 -59 -30 -32 -39 -22 16 48 59 65 76 98 126 160 206 256 288 294 290 289 291 283 266 250 237 217 180 140 113 100 80 36 -19 -56 -57 -47 -61 -104 -147 -170 -178 -193 -210 -212 -198 -194 -219 -248 -248 -219 -195 -196 -209 -211 -200 -185 +0 0 0 0 0 0 0 0 0 0 -1 -1 -1 -2 -2 -2 -2 -2 -1 -1 -1 -1 -1 0 0 -1 -2 -1 0 0 -1 -2 0 3 3 1 0 1 2 2 0 -3 -5 -5 -5 -7 -10 -12 -10 -4 1 1 -3 -9 -10 -5 4 11 14 11 8 7 10 14 14 11 9 11 16 19 18 12 5 -1 -5 -7 -10 -13 -18 -24 -27 -26 -22 -16 -11 -6 -3 0 -3 -6 -9 -10 -9 -9 -5 4 14 17 12 5 3 2 1 2 11 21 19 10 13 35 66 86 85 74 68 70 77 79 67 39 17 20 41 52 39 11 -12 -25 -35 -37 -38 -45 -62 -76 -71 -56 -52 -60 -53 -27 -7 -16 -39 -49 -47 -56 -81 -96 -85 -59 -41 -38 -46 -54 -53 -37 -16 -10 -16 -16 5 26 25 6 0 21 43 44 26 10 15 28 33 29 28 45 71 79 53 12 -7 8 26 16 -13 -28 -17 6 25 26 11 -14 -46 -70 -82 -80 -65 -43 -29 -42 -70 -74 -26 53 104 95 45 5 3 38 77 91 71 45 42 59 56 13 -49 -85 -70 -31 -3 -5 -27 -45 -44 -24 -2 6 3 -3 -5 -4 -5 -9 -12 -17 -22 -20 -1 30 51 46 21 8 25 52 61 51 49 69 94 100 86 76 78 76 57 38 39 54 58 42 22 13 9 -2 -18 -30 -33 -32 -33 -36 -42 -55 -67 -77 -85 -95 -97 -83 -57 -37 -32 -36 -39 -38 -42 -49 -54 -55 -56 -54 -45 -29 -16 -7 2 8 7 1 1 13 29 35 32 33 39 44 40 34 34 37 34 25 19 20 22 24 27 30 27 17 6 0 -4 -12 -23 -32 -34 -33 -34 -36 -35 -35 -38 -41 -40 -33 -25 -24 -26 -26 -20 -11 -5 -6 -7 -4 2 6 7 8 9 11 14 17 21 25 26 26 24 23 22 20 17 15 14 12 9 6 5 4 3 1 -1 -2 -2 -2 -2 -3 -3 -3 -3 -3 -3 -3 -2 -2 -2 -2 -1 -1 -1 -1 -1 -1 -1 0 +6 29 28 7 1 23 47 47 28 11 16 30 35 31 30 47 74 82 55 13 -7 9 27 17 -13 -28 -17 7 26 27 12 -14 -46 -70 -82 -80 -65 -43 -29 -42 -70 -74 -26 54 105 96 46 6 4 39 78 92 72 46 43 60 57 14 -50 -86 -71 -31 -3 -5 -28 -46 -45 -25 -2 7 4 -3 -5 -4 -5 -9 -13 -18 -24 -21 -1 34 58 52 24 10 29 60 71 60 58 82 113 121 104 93 96 95 72 48 50 69 75 55 30 18 12 -2 -24 -40 -45 -44 -46 -50 -60 -79 -98 -113 -127 -143 -149 -128 -89 -58 -51 -58 -63 -63 -71 -84 -94 -97 -100 -97 -81 -54 -30 -12 4 16 16 4 3 29 63 77 73 75 92 104 96 83 85 94 88 68 53 55 64 71 81 92 85 56 22 2 -11 -39 -81 -113 -123 -124 -132 -141 -144 -146 -162 -183 -182 -153 -120 -117 -136 -140 -108 -59 -30 -32 -39 -22 16 48 59 65 76 98 126 160 206 256 288 294 290 289 291 283 266 250 237 217 180 140 113 100 80 36 -19 -56 -57 -47 -61 -104 -147 -170 -178 -193 -210 -212 -198 -194 -219 -248 -248 -219 -195 -196 -209 -211 -200 -185 -171 -149 -127 -110 -105 -97 -78 -49 -27 -24 -29 -24 4 45 80 97 105 108 111 110 114 135 169 199 210 205 201 200 201 199 190 174 156 142 132 126 116 96 67 28 -12 -41 -49 -48 -56 -72 -90 -99 -104 -110 -111 -106 -98 -95 -91 -83 -79 -83 -88 -74 -47 -26 -27 -33 -29 -10 8 19 29 44 54 57 63 80 104 122 124 122 124 128 119 92 56 33 30 32 23 2 -21 -36 -48 -60 -66 -67 -73 -94 -118 -130 -126 -120 -123 -122 -111 -97 -94 -101 -101 -88 -77 -76 -71 -49 -17 -4 -15 -22 6 67 118 129 106 83 87 110 127 122 104 97 112 126 118 86 57 56 74 78 54 22 6 2 -8 -29 -47 -56 -64 -84 -106 -117 -118 -123 -140 -159 -171 -179 -185 -184 -168 -149 -137 -123 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 0 -1 0 0 0 -1 -2 -1 0 1 1 0 -1 -3 -5 -6 -7 -6 -4 -3 -4 -7 -8 -3 6 12 11 5 0 0 5 11 14 11 7 7 10 10 2 -10 -18 -15 -7 -1 -2 -7 -12 -12 -7 -1 1 1 -1 -2 -2 -2 -3 -5 -6 -9 -8 -1 12 21 19 9 3 11 24 29 25 24 35 49 54 47 43 45 45 35 23 25 35 38 29 16 9 6 -2 -14 -23 -27 -26 -28 -31 -37 -50 -62 -72 -82 -94 -99 -86 -60 -40 -36 -41 -45 -45 -51 -61 -69 -72 -75 -73 -62 -42 -24 -10 3 12 12 3 2 23 51 63 60 62 77 88 82 71 73 82 77 60 47 49 57 64 73 83 77 51 20 1 -11 -37 -77 -107 -117 -118 -126 -135 -139 -141 -157 -178 -177 -150 -118 -115 -134 -138 -107 -59 -30 -32 -39 -22 15 47 58 64 75 97 125 159 205 256 288 293 289 288 290 282 265 249 235 215 178 138 111 98 78 35 -19 -55 -56 -46 -60 -102 -143 -165 -172 -186 -202 -203 -189 -184 -207 -234 -233 -205 -182 -182 -193 -194 -183 -168 -155 -134 -114 -98 -93 -86 -69 -43 -24 -21 -25 -21 3 37 66 79 85 87 89 87 90 105 131 153 160 155 150 148 148 145 137 124 110 99 91 86 78 64 44 18 -8 -27 -32 -31 -35 -45 -55 -60 -62 -65 -64 -61 -55 -53 -50 -45 -42 -44 -46 -38 -24 -13 -13 -16 -14 -5 3 8 12 18 22 23 25 31 40 46 46 44 44 44 40 30 18 10 9 9 6 0 -6 -10 -13 -16 -17 -17 -18 -23 -27 -29 -28 -26 -25 -25 -22 -18 -17 -18 -17 -15 -12 -12 -11 -7 -3 -1 -2 -3 0 7 12 12 9 7 7 8 9 8 7 6 6 7 6 4 2 2 2 2 1 0 0 0 -1 -1 -1 -1 -1 -2 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +81 92 85 56 22 2 -11 -39 -81 -113 -123 -124 -132 -141 -144 -146 -162 -183 -182 -153 -120 -117 -136 -140 -108 -59 -30 -32 -39 -22 16 48 59 65 76 98 126 160 206 256 288 294 290 289 291 283 266 250 237 217 180 140 113 100 80 36 -19 -56 -57 -47 -61 -104 -147 -170 -178 -193 -210 -212 -198 -194 -219 -248 -248 -219 -195 -196 -209 -211 -200 -185 -171 -149 -127 -110 -105 -97 -78 -49 -27 -24 -29 -24 4 45 80 97 105 108 111 110 114 135 169 199 210 205 201 200 201 199 190 174 156 142 132 126 116 96 67 28 -12 -41 -49 -48 -56 -72 -90 -99 -104 -110 -111 -106 -98 -95 -91 -83 -79 -83 -88 -74 -47 -26 -27 -33 -29 -10 8 19 29 44 54 57 63 80 104 122 124 122 124 128 119 92 56 33 30 32 23 2 -21 -36 -48 -60 -66 -67 -73 -94 -118 -130 -126 -120 -123 -122 -111 -97 -94 -101 -101 -88 -77 -76 -71 -49 -17 -4 -15 -22 6 67 118 129 106 83 87 110 127 122 104 97 112 126 118 86 57 56 74 78 54 22 6 2 -8 -29 -47 -56 -64 -84 -106 -117 -118 -123 -140 -159 -171 -179 -185 -184 -168 -149 -137 -123 -86 -28 26 51 53 59 81 110 131 144 154 162 161 153 143 140 146 145 124 89 57 47 56 61 44 12 -12 -21 -21 -28 -41 -55 -68 -77 -78 -70 -63 -71 -94 -118 -133 -144 -154 -165 -161 -142 -121 -108 -101 -88 -65 -41 -24 -19 -13 7 42 81 107 117 118 119 118 112 103 102 116 129 127 108 94 96 109 117 112 98 79 64 51 41 37 31 14 -7 -20 -14 1 3 -16 -37 -41 -26 -17 -28 -51 -62 -50 -30 -21 -35 -57 -65 -51 -25 -10 -15 -39 -60 -70 -63 -50 -35 -14 11 34 45 42 37 39 44 40 22 9 15 35 46 35 17 16 37 51 27 -33 -85 -91 -59 -38 -52 -81 -84 -61 -47 -67 -99 -109 -97 -83 -83 -82 -73 -62 -49 -33 -10 6 12 14 26 35 26 +0 0 0 0 0 0 -1 -1 -1 -1 -1 -1 -2 -2 -2 -3 -3 -4 -4 -4 -4 -4 -5 -5 -4 -3 -2 -2 -2 -2 0 2 3 4 5 7 10 13 18 23 28 30 31 32 34 34 33 33 32 31 26 21 18 16 13 6 -4 -11 -12 -10 -13 -23 -33 -39 -42 -47 -53 -55 -52 -53 -61 -71 -73 -66 -60 -62 -67 -69 -67 -64 -60 -54 -47 -41 -40 -38 -31 -20 -12 -11 -13 -11 1 20 36 45 49 51 54 54 57 69 87 104 112 111 110 111 113 114 110 102 93 85 80 78 72 61 43 18 -8 -28 -33 -33 -39 -51 -64 -71 -75 -80 -82 -79 -73 -72 -69 -64 -61 -65 -70 -59 -38 -21 -22 -27 -24 -9 6 15 24 37 46 49 54 69 91 107 110 108 111 115 107 83 51 30 27 29 21 1 -20 -34 -46 -58 -63 -65 -71 -91 -115 -126 -123 -117 -121 -120 -109 -96 -93 -100 -100 -88 -77 -76 -71 -49 -17 -4 -15 -22 5 66 117 129 106 82 86 109 126 121 103 96 111 125 117 85 56 55 73 76 53 21 5 1 -8 -29 -46 -55 -62 -81 -102 -112 -113 -117 -133 -150 -161 -167 -172 -171 -155 -137 -125 -112 -78 -26 23 45 46 51 70 95 112 123 131 136 135 127 118 115 119 117 99 70 45 36 43 47 33 9 -10 -16 -16 -21 -30 -40 -49 -55 -55 -49 -43 -48 -63 -78 -87 -93 -98 -104 -100 -88 -74 -65 -60 -52 -38 -24 -14 -11 -8 3 22 42 54 58 58 58 56 52 47 46 52 56 55 45 39 39 43 46 43 37 29 23 18 14 12 10 4 -3 -7 -5 0 0 -5 -11 -12 -7 -5 -7 -13 -15 -12 -7 -5 -8 -12 -13 -10 -5 -2 -3 -7 -10 -11 -10 -8 -5 -2 1 4 5 4 3 3 4 3 1 0 1 2 3 2 1 0 2 2 1 -2 -4 -4 -3 -2 -2 -3 -3 -2 -1 -2 -2 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 0 0 0 0 0 0 +119 92 56 33 30 32 23 2 -21 -36 -48 -60 -66 -67 -73 -94 -118 -130 -126 -120 -123 -122 -111 -97 -94 -101 -101 -88 -77 -76 -71 -49 -17 -4 -15 -22 6 67 118 129 106 83 87 110 127 122 104 97 112 126 118 86 57 56 74 78 54 22 6 2 -8 -29 -47 -56 -64 -84 -106 -117 -118 -123 -140 -159 -171 -179 -185 -184 -168 -149 -137 -123 -86 -28 26 51 53 59 81 110 131 144 154 162 161 153 143 140 146 145 124 89 57 47 56 61 44 12 -12 -21 -21 -28 -41 -55 -68 -77 -78 -70 -63 -71 -94 -118 -133 -144 -154 -165 -161 -142 -121 -108 -101 -88 -65 -41 -24 -19 -13 7 42 81 107 117 118 119 118 112 103 102 116 129 127 108 94 96 109 117 112 98 79 64 51 41 37 31 14 -7 -20 -14 1 3 -16 -37 -41 -26 -17 -28 -51 -62 -50 -30 -21 -35 -57 -65 -51 -25 -10 -15 -39 -60 -70 -63 -50 -35 -14 11 34 45 42 37 39 44 40 22 9 15 35 46 35 17 16 37 51 27 -33 -85 -91 -59 -38 -52 -81 -84 -61 -47 -67 -99 -109 -97 -83 -83 -82 -73 -62 -49 -33 -10 6 12 14 26 35 26 4 -5 11 36 48 38 22 21 38 58 62 42 13 2 10 23 25 16 8 1 -12 -28 -27 -2 31 49 42 20 1 -8 -4 3 12 18 21 19 8 -7 -18 -16 -4 5 9 16 33 53 58 48 43 58 83 90 70 48 43 54 61 58 62 77 87 71 41 24 38 63 69 47 16 0 -2 -3 -15 -38 -57 -67 -68 -76 -96 -122 -138 -132 -114 -96 -84 -70 -49 -27 -12 -11 -11 3 34 68 89 94 91 92 94 88 74 55 35 13 -10 -28 -38 -40 -41 -50 -66 -81 -83 -78 -73 -76 -84 -90 -89 -84 -83 -90 -102 -112 -117 -124 -141 -158 -158 -139 -119 -120 -128 -117 -83 -48 -36 -33 -15 18 42 48 58 83 110 118 112 118 139 159 160 154 149 148 151 164 182 190 171 141 +0 0 0 0 0 0 0 0 -1 -1 -1 -1 -1 -1 -1 -2 -2 -3 -3 -3 -4 -4 -4 -4 -4 -4 -5 -5 -4 -5 -5 -3 -2 -1 -2 -2 0 5 10 12 10 8 9 12 14 14 13 12 15 18 17 13 9 9 12 13 9 4 1 0 -2 -7 -11 -13 -16 -21 -27 -30 -31 -34 -39 -46 -50 -54 -57 -58 -54 -49 -46 -43 -31 -10 9 18 20 22 31 44 53 60 65 70 71 68 65 65 68 69 60 44 28 24 29 32 23 6 -7 -12 -12 -17 -24 -33 -41 -47 -48 -44 -40 -46 -61 -77 -88 -96 -104 -113 -111 -99 -85 -77 -73 -64 -48 -31 -18 -15 -10 5 32 63 83 92 94 95 95 91 84 84 96 108 107 91 80 82 94 102 98 86 70 57 45 36 33 28 12 -7 -19 -13 0 2 -16 -35 -39 -25 -17 -27 -49 -60 -49 -30 -21 -35 -56 -64 -51 -25 -10 -15 -39 -60 -70 -63 -50 -35 -14 10 33 44 41 36 38 44 40 21 8 14 34 45 34 16 15 36 50 26 -33 -85 -90 -59 -38 -52 -80 -83 -60 -46 -65 -96 -106 -94 -80 -80 -78 -70 -59 -47 -31 -10 5 11 12 23 31 23 3 -5 9 31 42 33 19 18 32 49 52 35 10 1 8 18 20 12 6 0 -10 -22 -22 -2 23 37 31 14 0 -6 -3 2 8 12 14 13 5 -5 -12 -11 -3 3 5 10 20 32 35 28 25 33 47 50 39 26 23 28 32 30 31 38 43 34 19 11 17 28 30 20 6 0 -1 -2 -7 -16 -23 -26 -26 -28 -35 -43 -48 -45 -38 -31 -27 -22 -15 -8 -4 -4 -3 0 8 16 21 22 20 20 20 18 15 10 6 2 -2 -5 -7 -7 -7 -8 -10 -12 -12 -10 -9 -9 -10 -10 -10 -9 -8 -8 -9 -9 -9 -9 -10 -11 -10 -8 -7 -6 -6 -5 -4 -2 -2 -2 -1 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 +37 31 14 -7 -20 -14 1 3 -16 -37 -41 -26 -17 -28 -51 -62 -50 -30 -21 -35 -57 -65 -51 -25 -10 -15 -39 -60 -70 -63 -50 -35 -14 11 34 45 42 37 39 44 40 22 9 15 35 46 35 17 16 37 51 27 -33 -85 -91 -59 -38 -52 -81 -84 -61 -47 -67 -99 -109 -97 -83 -83 -82 -73 -62 -49 -33 -10 6 12 14 26 35 26 4 -5 11 36 48 38 22 21 38 58 62 42 13 2 10 23 25 16 8 1 -12 -28 -27 -2 31 49 42 20 1 -8 -4 3 12 18 21 19 8 -7 -18 -16 -4 5 9 16 33 53 58 48 43 58 83 90 70 48 43 54 61 58 62 77 87 71 41 24 38 63 69 47 16 0 -2 -3 -15 -38 -57 -67 -68 -76 -96 -122 -138 -132 -114 -96 -84 -70 -49 -27 -12 -11 -11 3 34 68 89 94 91 92 94 88 74 55 35 13 -10 -28 -38 -40 -41 -50 -66 -81 -83 -78 -73 -76 -84 -90 -89 -84 -83 -90 -102 -112 -117 -124 -141 -158 -158 -139 -119 -120 -128 -117 -83 -48 -36 -33 -15 18 42 48 58 83 110 118 112 118 139 159 160 154 149 148 151 164 182 190 171 141 117 105 91 72 62 61 52 18 -21 -40 -46 -58 -80 -96 -98 -97 -95 -80 -44 -12 -6 -10 -1 31 58 73 92 121 145 137 110 95 97 103 99 89 84 91 100 108 108 98 83 70 60 42 13 -17 -32 -37 -47 -72 -102 -122 -131 -141 -165 -205 -238 -245 -228 -213 -226 -241 -222 -171 -133 -137 -153 -139 -93 -61 -65 -76 -60 -28 -8 -2 12 46 81 99 108 125 147 153 147 145 155 163 153 137 129 130 120 92 60 40 28 15 -12 -41 -65 -81 -93 -106 -124 -141 -151 -152 -157 -178 -206 -213 -187 -148 -128 -131 -136 -134 -130 -129 -121 -97 -70 -58 -52 -28 22 65 75 61 57 84 129 165 179 179 179 197 236 279 306 304 279 242 208 184 176 182 187 174 137 91 58 50 46 21 -29 -76 +0 0 0 -1 -1 -1 0 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -2 -1 -1 -1 -2 -3 -4 -4 -3 -3 -1 0 2 3 3 3 3 4 3 2 0 1 4 5 4 2 2 5 7 4 -6 -15 -16 -11 -8 -10 -16 -18 -13 -11 -15 -23 -26 -24 -21 -22 -22 -20 -18 -14 -10 -3 1 3 4 8 11 8 1 -2 4 13 18 14 8 8 15 24 26 18 5 0 4 10 11 7 3 0 -7 -15 -15 -2 16 26 23 11 0 -5 -3 1 7 10 12 11 5 -5 -12 -11 -3 3 6 10 22 36 40 34 30 41 60 66 52 36 32 41 47 45 48 60 69 57 33 19 31 52 57 39 13 0 -2 -3 -14 -34 -51 -60 -61 -68 -87 -111 -126 -121 -105 -89 -78 -65 -46 -26 -12 -11 -11 2 32 65 85 90 87 89 91 85 72 53 34 12 -10 -28 -38 -40 -41 -50 -66 -81 -83 -78 -73 -76 -84 -90 -89 -84 -83 -90 -102 -112 -117 -124 -141 -158 -158 -139 -119 -120 -127 -116 -82 -48 -36 -33 -15 17 40 46 56 80 105 113 107 112 132 150 150 144 139 137 140 151 167 173 155 127 105 94 81 63 54 53 45 15 -19 -35 -40 -50 -68 -81 -82 -80 -78 -65 -36 -10 -5 -8 -1 23 44 55 69 90 106 100 79 68 68 72 68 61 57 61 66 71 70 63 52 43 37 25 7 -11 -19 -22 -27 -41 -58 -68 -72 -76 -88 -107 -122 -124 -114 -105 -109 -114 -104 -79 -60 -61 -67 -60 -39 -26 -27 -31 -24 -11 -3 -1 4 16 27 33 35 39 45 46 43 42 43 45 41 35 32 32 29 21 13 8 6 3 -3 -9 -13 -15 -17 -19 -21 -23 -24 -23 -23 -25 -28 -28 -23 -18 -15 -15 -14 -14 -13 -12 -11 -8 -6 -5 -4 -2 1 3 3 3 2 3 5 6 6 5 5 5 5 5 5 5 4 3 2 1 1 1 1 0 0 0 0 0 0 0 -1 0 +-138 -132 -114 -96 -84 -70 -49 -27 -12 -11 -11 3 34 68 89 94 91 92 94 88 74 55 35 13 -10 -28 -38 -40 -41 -50 -66 -81 -83 -78 -73 -76 -84 -90 -89 -84 -83 -90 -102 -112 -117 -124 -141 -158 -158 -139 -119 -120 -128 -117 -83 -48 -36 -33 -15 18 42 48 58 83 110 118 112 118 139 159 160 154 149 148 151 164 182 190 171 141 117 105 91 72 62 61 52 18 -21 -40 -46 -58 -80 -96 -98 -97 -95 -80 -44 -12 -6 -10 -1 31 58 73 92 121 145 137 110 95 97 103 99 89 84 91 100 108 108 98 83 70 60 42 13 -17 -32 -37 -47 -72 -102 -122 -131 -141 -165 -205 -238 -245 -228 -213 -226 -241 -222 -171 -133 -137 -153 -139 -93 -61 -65 -76 -60 -28 -8 -2 12 46 81 99 108 125 147 153 147 145 155 163 153 137 129 130 120 92 60 40 28 15 -12 -41 -65 -81 -93 -106 -124 -141 -151 -152 -157 -178 -206 -213 -187 -148 -128 -131 -136 -134 -130 -129 -121 -97 -70 -58 -52 -28 22 65 75 61 57 84 129 165 179 179 179 197 236 279 306 304 279 242 208 184 176 182 187 174 137 91 58 50 46 21 -29 -76 -88 -70 -60 -76 -101 -115 -119 -128 -138 -135 -122 -123 -142 -163 -169 -176 -189 -204 -203 -195 -190 -187 -166 -129 -99 -82 -59 -17 25 39 27 23 51 98 138 157 165 182 208 225 214 177 140 122 117 104 76 54 60 87 109 103 76 53 53 69 75 54 12 -25 -42 -40 -34 -35 -45 -72 -115 -159 -179 -177 -162 -155 -151 -137 -109 -71 -37 -24 -38 -64 -72 -42 4 29 20 1 1 12 13 0 -8 6 18 9 -18 -30 -12 15 14 -20 -61 -77 -66 -45 -40 -57 -85 -103 -98 -77 -55 -48 -49 -48 -37 -25 -19 -19 -20 -20 -15 1 30 64 90 105 110 119 139 167 187 190 182 181 203 232 236 203 158 133 134 140 136 130 132 136 119 79 42 29 37 42 32 22 25 34 23 -14 -66 -106 +0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 0 0 1 1 1 1 1 2 1 1 1 0 -1 -2 -2 -2 -3 -3 -4 -5 -6 -6 -6 -6 -7 -8 -8 -8 -9 -10 -11 -13 -14 -16 -18 -21 -22 -20 -18 -19 -21 -20 -15 -9 -7 -7 -3 3 8 10 12 18 25 28 27 30 36 42 44 43 43 44 46 51 58 62 57 48 40 37 33 26 23 23 20 7 -9 -17 -20 -26 -36 -44 -45 -46 -45 -39 -22 -6 -4 -6 -1 16 31 39 50 67 82 78 64 56 57 62 60 55 52 57 64 70 71 65 55 47 41 29 9 -13 -23 -27 -35 -54 -76 -92 -100 -108 -128 -160 -187 -194 -182 -172 -183 -197 -183 -142 -111 -115 -130 -119 -80 -53 -57 -67 -53 -25 -8 -2 10 41 73 90 98 115 135 142 137 135 145 153 144 130 123 124 115 88 57 38 27 14 -12 -41 -64 -80 -92 -105 -123 -140 -150 -151 -157 -178 -206 -213 -187 -148 -128 -131 -136 -134 -130 -129 -121 -97 -70 -58 -52 -28 21 64 74 60 56 83 127 162 176 175 175 192 229 271 296 293 268 232 199 175 167 172 176 163 128 84 53 46 42 19 -27 -69 -80 -63 -54 -68 -90 -101 -104 -111 -119 -116 -104 -104 -120 -136 -140 -145 -155 -166 -164 -156 -151 -147 -130 -100 -76 -63 -45 -13 18 28 19 16 36 68 95 107 112 122 138 148 139 113 89 76 72 63 45 32 35 50 62 58 42 29 28 36 39 28 6 -13 -21 -20 -17 -17 -21 -33 -52 -71 -78 -76 -68 -64 -61 -55 -43 -27 -14 -9 -14 -23 -25 -15 1 9 6 0 0 3 3 0 -3 1 4 2 -5 -8 -3 3 3 -5 -13 -16 -13 -9 -8 -10 -15 -17 -16 -12 -8 -7 -7 -7 -5 -3 -3 -3 -3 -2 -2 0 2 5 6 7 7 7 8 9 9 9 8 7 8 8 7 6 4 3 3 2 2 2 1 1 1 0 0 0 0 0 0 0 0 0 0 -1 -1 0 +81 99 108 125 147 153 147 145 155 163 153 137 129 130 120 92 60 40 28 15 -12 -41 -65 -81 -93 -106 -124 -141 -151 -152 -157 -178 -206 -213 -187 -148 -128 -131 -136 -134 -130 -129 -121 -97 -70 -58 -52 -28 22 65 75 61 57 84 129 165 179 179 179 197 236 279 306 304 279 242 208 184 176 182 187 174 137 91 58 50 46 21 -29 -76 -88 -70 -60 -76 -101 -115 -119 -128 -138 -135 -122 -123 -142 -163 -169 -176 -189 -204 -203 -195 -190 -187 -166 -129 -99 -82 -59 -17 25 39 27 23 51 98 138 157 165 182 208 225 214 177 140 122 117 104 76 54 60 87 109 103 76 53 53 69 75 54 12 -25 -42 -40 -34 -35 -45 -72 -115 -159 -179 -177 -162 -155 -151 -137 -109 -71 -37 -24 -38 -64 -72 -42 4 29 20 1 1 12 13 0 -8 6 18 9 -18 -30 -12 15 14 -20 -61 -77 -66 -45 -40 -57 -85 -103 -98 -77 -55 -48 -49 -48 -37 -25 -19 -19 -20 -20 -15 1 30 64 90 105 110 119 139 167 187 190 182 181 203 232 236 203 158 133 134 140 136 130 132 136 119 79 42 29 37 42 32 22 25 34 23 -14 -66 -106 -129 -143 -152 -158 -161 -166 -175 -184 -189 -187 -172 -147 -127 -121 -127 -121 -90 -51 -38 -51 -61 -43 -11 -3 -25 -45 -36 -9 1 -18 -38 -30 -2 17 11 -3 -3 10 15 4 -4 7 31 40 18 -12 -24 -19 -25 -55 -88 -93 -69 -44 -40 -47 -40 -24 -20 -38 -55 -46 -19 5 12 14 20 32 48 66 82 89 85 88 105 123 122 103 83 72 67 65 72 87 90 70 39 22 25 34 43 53 57 44 14 -2 13 41 51 33 14 18 34 44 38 29 27 32 44 50 46 39 42 63 78 66 28 -5 -15 -15 -32 -57 -63 -44 -26 -39 -70 -89 -76 -55 -49 -58 -62 -55 -50 -69 -101 -118 -108 -88 -83 -90 -84 -57 -38 -47 -73 -75 -48 -20 -22 -41 -50 -36 -17 -10 -13 -21 -32 -34 +0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 -1 -2 -3 -3 -4 -5 -6 -7 -8 -9 -9 -11 -14 -15 -14 -12 -11 -12 -13 -13 -13 -14 -13 -11 -9 -8 -7 -4 3 9 11 9 9 13 22 29 32 34 35 39 49 60 67 69 65 58 51 47 46 49 51 49 39 27 17 15 14 6 -10 -26 -31 -25 -22 -29 -39 -45 -47 -52 -57 -57 -52 -54 -63 -74 -78 -82 -90 -99 -100 -97 -96 -96 -87 -69 -54 -45 -33 -10 14 22 15 13 30 59 84 97 103 115 133 146 140 117 94 83 80 72 53 38 42 62 79 75 56 39 40 52 57 42 9 -20 -34 -33 -28 -29 -37 -60 -96 -134 -152 -151 -139 -134 -131 -120 -96 -63 -33 -22 -35 -58 -66 -39 3 26 18 0 0 11 12 0 -8 5 17 8 -18 -29 -12 14 13 -20 -60 -76 -65 -45 -40 -57 -85 -103 -98 -77 -55 -48 -49 -48 -37 -25 -19 -19 -20 -20 -15 0 29 63 89 104 109 118 138 166 185 188 180 178 200 228 232 199 154 129 130 136 131 125 127 130 113 75 39 27 34 39 29 20 23 31 21 -13 -61 -97 -117 -129 -136 -141 -143 -146 -153 -160 -163 -161 -147 -125 -107 -101 -106 -100 -74 -42 -31 -41 -49 -34 -9 -3 -20 -35 -28 -7 0 -14 -28 -22 -2 11 7 -3 -3 6 9 2 -3 4 19 25 11 -8 -15 -12 -15 -33 -51 -53 -39 -25 -22 -26 -22 -13 -11 -20 -28 -23 -10 2 5 6 8 14 20 28 34 36 34 34 40 46 45 37 29 25 22 21 23 27 28 21 11 6 7 9 11 13 14 10 3 -1 2 9 11 6 2 3 6 8 6 4 4 5 6 7 6 5 5 8 9 7 3 -1 -2 -2 -3 -6 -6 -4 -2 -3 -5 -6 -5 -4 -3 -3 -3 -3 -2 -3 -4 -4 -4 -3 -2 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +-72 -42 4 29 20 1 1 12 13 0 -8 6 18 9 -18 -30 -12 15 14 -20 -61 -77 -66 -45 -40 -57 -85 -103 -98 -77 -55 -48 -49 -48 -37 -25 -19 -19 -20 -20 -15 1 30 64 90 105 110 119 139 167 187 190 182 181 203 232 236 203 158 133 134 140 136 130 132 136 119 79 42 29 37 42 32 22 25 34 23 -14 -66 -106 -129 -143 -152 -158 -161 -166 -175 -184 -189 -187 -172 -147 -127 -121 -127 -121 -90 -51 -38 -51 -61 -43 -11 -3 -25 -45 -36 -9 1 -18 -38 -30 -2 17 11 -3 -3 10 15 4 -4 7 31 40 18 -12 -24 -19 -25 -55 -88 -93 -69 -44 -40 -47 -40 -24 -20 -38 -55 -46 -19 5 12 14 20 32 48 66 82 89 85 88 105 123 122 103 83 72 67 65 72 87 90 70 39 22 25 34 43 53 57 44 14 -2 13 41 51 33 14 18 34 44 38 29 27 32 44 50 46 39 42 63 78 66 28 -5 -15 -15 -32 -57 -63 -44 -26 -39 -70 -89 -76 -55 -49 -58 -62 -55 -50 -69 -101 -118 -108 -88 -83 -90 -84 -57 -38 -47 -73 -75 -48 -20 -22 -41 -50 -36 -17 -10 -13 -21 -32 -34 -16 18 45 43 22 16 34 55 64 65 76 91 93 83 79 91 100 83 44 4 -10 2 24 38 35 16 -10 -28 -28 -19 -10 -14 -26 -35 -39 -34 -15 6 17 0 -38 -69 -68 -37 3 29 30 5 -31 -48 -29 9 25 -11 -74 -117 -115 -74 -33 -27 -58 -98 -111 -88 -54 -36 -37 -32 -9 22 53 79 95 95 85 84 103 126 138 138 141 144 142 147 171 192 180 127 78 66 77 72 37 2 -13 -18 -31 -49 -51 -35 -11 6 3 -16 -32 -22 6 13 -31 -92 -115 -86 -52 -64 -107 -127 -99 -55 -46 -82 -124 -134 -105 -63 -50 -73 -104 -108 -74 -28 -4 -11 -21 -16 3 19 17 15 28 52 63 38 -7 -41 -38 -10 9 1 -23 -32 -14 20 45 41 10 -28 -57 -66 -61 -62 +0 -1 0 0 0 0 0 0 0 0 -1 0 0 0 -1 -1 -1 0 0 -1 -2 -3 -3 -2 -2 -3 -4 -5 -5 -5 -4 -3 -4 -4 -3 -2 -2 -2 -2 -2 -2 0 3 7 10 12 14 15 19 24 27 29 29 30 34 41 43 38 31 26 28 30 30 29 31 32 29 20 11 7 10 11 9 6 7 10 7 -5 -23 -37 -46 -52 -56 -59 -62 -65 -70 -75 -78 -79 -74 -64 -57 -55 -59 -57 -43 -25 -19 -26 -31 -23 -6 -2 -14 -25 -20 -6 0 -11 -23 -18 -2 10 6 -2 -2 6 9 2 -3 4 20 27 12 -9 -17 -14 -18 -40 -65 -69 -52 -34 -31 -36 -31 -19 -16 -31 -44 -37 -16 4 9 11 16 26 40 56 70 76 73 76 92 108 108 91 74 64 60 59 65 80 83 64 36 20 23 32 40 50 54 42 13 -2 12 39 49 32 13 17 33 43 37 28 26 31 43 49 45 38 41 62 77 65 27 -5 -15 -15 -32 -57 -63 -44 -26 -39 -70 -89 -76 -55 -49 -58 -62 -55 -50 -68 -100 -116 -106 -86 -81 -88 -82 -56 -37 -46 -70 -72 -46 -19 -21 -39 -47 -34 -16 -10 -12 -20 -30 -31 -15 16 40 38 19 14 29 47 55 55 64 76 78 69 65 74 81 67 35 3 -8 1 18 29 26 12 -8 -21 -21 -14 -8 -11 -19 -25 -28 -24 -11 4 11 0 -25 -45 -44 -24 1 17 18 2 -19 -28 -17 5 13 -7 -41 -63 -61 -39 -17 -14 -29 -48 -54 -42 -26 -17 -17 -15 -4 9 22 32 38 37 32 31 38 45 49 48 48 48 46 46 53 58 53 36 22 18 20 18 9 0 -4 -5 -8 -11 -12 -8 -3 1 0 -3 -6 -4 0 2 -5 -14 -17 -12 -7 -9 -14 -15 -12 -6 -5 -9 -12 -12 -9 -6 -4 -6 -8 -7 -5 -2 -1 -1 -1 -1 0 0 0 0 0 1 1 0 -1 -1 -1 -1 0 0 -1 -1 -1 0 0 0 0 -1 -1 -1 -1 0 +67 65 72 87 90 70 39 22 25 34 43 53 57 44 14 -2 13 41 51 33 14 18 34 44 38 29 27 32 44 50 46 39 42 63 78 66 28 -5 -15 -15 -32 -57 -63 -44 -26 -39 -70 -89 -76 -55 -49 -58 -62 -55 -50 -69 -101 -118 -108 -88 -83 -90 -84 -57 -38 -47 -73 -75 -48 -20 -22 -41 -50 -36 -17 -10 -13 -21 -32 -34 -16 18 45 43 22 16 34 55 64 65 76 91 93 83 79 91 100 83 44 4 -10 2 24 38 35 16 -10 -28 -28 -19 -10 -14 -26 -35 -39 -34 -15 6 17 0 -38 -69 -68 -37 3 29 30 5 -31 -48 -29 9 25 -11 -74 -117 -115 -74 -33 -27 -58 -98 -111 -88 -54 -36 -37 -32 -9 22 53 79 95 95 85 84 103 126 138 138 141 144 142 147 171 192 180 127 78 66 77 72 37 2 -13 -18 -31 -49 -51 -35 -11 6 3 -16 -32 -22 6 13 -31 -92 -115 -86 -52 -64 -107 -127 -99 -55 -46 -82 -124 -134 -105 -63 -50 -73 -104 -108 -74 -28 -4 -11 -21 -16 3 19 17 15 28 52 63 38 -7 -41 -38 -10 9 1 -23 -32 -14 20 45 41 10 -28 -57 -66 -61 -62 -77 -97 -107 -102 -87 -70 -52 -39 -42 -56 -71 -61 -32 -7 -4 -20 -34 -36 -31 -21 -18 -20 -22 -16 8 30 31 15 9 31 63 86 105 132 163 170 148 132 154 206 235 212 159 128 139 168 178 157 114 74 49 40 40 42 46 48 44 28 6 -8 -7 0 -6 -34 -72 -100 -101 -77 -59 -76 -125 -165 -159 -120 -89 -86 -92 -72 -21 33 56 41 6 -27 -34 -15 6 5 -12 -17 8 41 49 28 3 0 25 58 79 82 77 76 72 54 28 27 56 78 59 15 -10 -4 -2 -16 -23 -10 -10 -47 -83 -66 -11 -1 -66 -145 -162 -118 -74 -66 -74 -75 -64 -48 -30 -15 -12 -15 -15 -6 0 1 10 32 46 36 25 33 54 55 19 -38 -81 -92 -68 -27 -4 -21 -65 -89 -66 -23 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 1 0 0 0 1 1 1 1 1 1 2 2 2 2 2 4 5 4 2 -1 -2 -2 -4 -6 -7 -5 -4 -5 -9 -12 -11 -8 -8 -9 -10 -10 -9 -13 -19 -23 -22 -18 -18 -20 -19 -14 -9 -12 -19 -20 -13 -6 -7 -12 -15 -11 -6 -4 -5 -7 -11 -12 -6 6 16 15 8 6 13 22 26 27 32 39 41 37 36 42 47 39 21 1 -6 1 12 20 18 8 -6 -16 -16 -11 -6 -9 -16 -22 -24 -22 -10 3 10 0 -26 -46 -46 -26 2 20 21 3 -23 -35 -22 6 18 -9 -57 -90 -89 -58 -26 -22 -47 -79 -90 -72 -45 -30 -31 -27 -8 18 45 68 82 82 74 74 91 112 123 124 127 131 130 135 158 178 167 118 73 62 72 68 35 1 -13 -18 -30 -48 -50 -35 -11 5 2 -16 -32 -22 5 12 -31 -92 -115 -86 -52 -64 -107 -127 -99 -55 -46 -82 -124 -134 -105 -63 -50 -73 -104 -108 -74 -28 -4 -11 -21 -16 2 18 16 14 27 50 61 36 -7 -40 -37 -10 8 0 -22 -31 -14 18 42 38 9 -26 -53 -61 -56 -57 -70 -88 -96 -91 -77 -62 -46 -34 -37 -48 -61 -52 -27 -6 -4 -17 -28 -30 -25 -17 -15 -16 -18 -13 6 22 23 11 6 22 45 61 74 92 113 116 100 88 102 135 152 136 101 80 86 102 107 93 67 43 28 22 22 23 24 25 23 14 3 -5 -4 0 -3 -17 -34 -46 -46 -34 -26 -33 -53 -68 -64 -48 -35 -33 -35 -27 -8 11 19 13 1 -9 -11 -5 1 1 -4 -5 2 10 12 6 0 0 5 12 17 17 15 14 13 9 4 4 9 12 9 2 -2 -1 -1 -3 -3 -2 -2 -6 -9 -7 -2 -1 -6 -12 -13 -9 -6 -5 -5 -5 -4 -3 -2 -1 -1 -1 -1 -1 0 0 0 0 0 0 0 0 0 0 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +141 144 142 147 171 192 180 127 78 66 77 72 37 2 -13 -18 -31 -49 -51 -35 -11 6 3 -16 -32 -22 6 13 -31 -92 -115 -86 -52 -64 -107 -127 -99 -55 -46 -82 -124 -134 -105 -63 -50 -73 -104 -108 -74 -28 -4 -11 -21 -16 3 19 17 15 28 52 63 38 -7 -41 -38 -10 9 1 -23 -32 -14 20 45 41 10 -28 -57 -66 -61 -62 -77 -97 -107 -102 -87 -70 -52 -39 -42 -56 -71 -61 -32 -7 -4 -20 -34 -36 -31 -21 -18 -20 -22 -16 8 30 31 15 9 31 63 86 105 132 163 170 148 132 154 206 235 212 159 128 139 168 178 157 114 74 49 40 40 42 46 48 44 28 6 -8 -7 0 -6 -34 -72 -100 -101 -77 -59 -76 -125 -165 -159 -120 -89 -86 -92 -72 -21 33 56 41 6 -27 -34 -15 6 5 -12 -17 8 41 49 28 3 0 25 58 79 82 77 76 72 54 28 27 56 78 59 15 -10 -4 -2 -16 -23 -10 -10 -47 -83 -66 -11 -1 -66 -145 -162 -118 -74 -66 -74 -75 -64 -48 -30 -15 -12 -15 -15 -6 0 1 10 32 46 36 25 33 54 55 19 -38 -81 -92 -68 -27 -4 -21 -65 -89 -66 -23 -10 -44 -91 -110 -91 -59 -39 -40 -53 -66 -67 -63 -62 -66 -68 -68 -65 -54 -32 -1 18 12 -7 -8 21 61 79 62 31 13 22 50 83 101 96 77 62 68 91 105 92 65 52 73 112 130 112 78 61 73 91 95 80 62 56 57 56 51 46 35 11 -15 -19 0 15 2 -20 -8 33 57 28 -24 -42 -13 16 6 -36 -73 -78 -55 -20 8 16 2 -11 -5 19 32 24 11 13 29 48 61 70 68 46 9 -23 -36 -37 -49 -79 -104 -98 -63 -33 -39 -76 -109 -110 -85 -64 -57 -56 -56 -59 -67 -69 -61 -47 -33 -21 -1 25 39 26 -11 -54 -75 -57 -8 36 41 16 -4 5 20 7 -33 -59 -51 -22 -8 -26 -64 -96 -103 -84 -60 -64 -98 -133 -132 -100 -78 -100 -155 -199 -215 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 -1 -1 -1 -2 -1 -1 0 0 -1 -2 -1 0 0 -2 -5 -7 -6 -4 -5 -8 -10 -8 -5 -5 -8 -13 -14 -12 -8 -6 -9 -14 -15 -11 -5 -1 -2 -4 -3 0 3 3 2 5 10 13 8 -2 -10 -9 -3 2 0 -7 -9 -4 5 13 12 3 -9 -19 -22 -21 -22 -27 -35 -39 -38 -34 -28 -21 -16 -18 -24 -31 -27 -15 -4 -2 -10 -17 -18 -16 -11 -10 -11 -12 -9 4 16 17 8 5 17 36 50 62 79 99 105 92 83 99 134 154 141 107 87 95 116 124 111 81 53 35 29 29 31 34 36 33 21 4 -7 -6 0 -5 -28 -60 -83 -85 -65 -50 -65 -108 -143 -138 -105 -79 -76 -82 -65 -19 29 50 37 5 -25 -32 -14 5 4 -12 -17 7 38 46 26 2 0 24 56 76 79 75 74 70 53 27 26 55 77 58 14 -10 -4 -2 -16 -23 -10 -10 -47 -83 -66 -11 -1 -66 -145 -162 -118 -74 -66 -74 -75 -64 -48 -30 -15 -12 -15 -15 -6 0 0 9 31 44 34 24 31 51 52 18 -36 -77 -87 -64 -26 -4 -20 -60 -82 -61 -21 -10 -40 -82 -98 -81 -52 -35 -35 -46 -57 -58 -54 -53 -56 -57 -56 -54 -44 -26 -1 14 9 -6 -7 16 46 59 46 22 9 15 35 58 70 66 52 42 45 60 69 59 41 33 45 69 79 67 46 35 42 52 53 44 34 30 30 29 26 23 17 5 -8 -10 0 6 0 -9 -4 14 24 11 -10 -17 -6 6 2 -14 -27 -28 -20 -7 2 5 0 -4 -2 5 9 6 3 3 7 12 15 16 16 10 1 -5 -8 -8 -10 -16 -20 -18 -11 -6 -7 -12 -17 -16 -12 -9 -8 -7 -7 -7 -8 -8 -6 -5 -3 -2 -1 1 2 1 -1 -4 -5 -4 -1 1 1 0 -1 0 0 0 -1 -2 -2 -1 -1 -1 -1 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +56 41 6 -27 -34 -15 6 5 -12 -17 8 41 49 28 3 0 25 58 79 82 77 76 72 54 28 27 56 78 59 15 -10 -4 -2 -16 -23 -10 -10 -47 -83 -66 -11 -1 -66 -145 -162 -118 -74 -66 -74 -75 -64 -48 -30 -15 -12 -15 -15 -6 0 1 10 32 46 36 25 33 54 55 19 -38 -81 -92 -68 -27 -4 -21 -65 -89 -66 -23 -10 -44 -91 -110 -91 -59 -39 -40 -53 -66 -67 -63 -62 -66 -68 -68 -65 -54 -32 -1 18 12 -7 -8 21 61 79 62 31 13 22 50 83 101 96 77 62 68 91 105 92 65 52 73 112 130 112 78 61 73 91 95 80 62 56 57 56 51 46 35 11 -15 -19 0 15 2 -20 -8 33 57 28 -24 -42 -13 16 6 -36 -73 -78 -55 -20 8 16 2 -11 -5 19 32 24 11 13 29 48 61 70 68 46 9 -23 -36 -37 -49 -79 -104 -98 -63 -33 -39 -76 -109 -110 -85 -64 -57 -56 -56 -59 -67 -69 -61 -47 -33 -21 -1 25 39 26 -11 -54 -75 -57 -8 36 41 16 -4 5 20 7 -33 -59 -51 -22 -8 -26 -64 -96 -103 -84 -60 -64 -98 -133 -132 -100 -78 -100 -155 -199 -215 -207 -190 -165 -135 -115 -122 -141 -141 -104 -60 -40 -53 -63 -43 7 67 113 133 132 135 167 219 246 216 167 167 238 317 328 262 177 127 127 157 193 213 203 171 148 162 197 227 228 203 163 128 115 133 167 191 189 171 160 164 170 160 129 90 62 59 72 78 63 41 33 41 48 34 -7 -46 -42 21 105 147 126 84 86 140 185 172 112 57 38 37 32 11 -17 -59 -112 -161 -191 -209 -244 -304 -370 -403 -388 -351 -327 -330 -346 -357 -350 -334 -311 -287 -269 -265 -268 -265 -245 -217 -199 -202 -220 -236 -235 -218 -192 -173 -164 -155 -139 -115 -89 -55 -13 36 72 80 66 48 45 60 88 112 113 94 76 83 113 140 143 122 96 78 75 84 91 77 43 5 -18 -22 -16 -7 -2 -6 -12 -9 +0 0 0 -1 -1 -1 0 0 -1 -1 0 0 0 0 0 0 0 1 1 1 1 2 2 1 1 1 2 3 2 0 -1 -1 -1 -2 -2 -1 -1 -4 -8 -7 -2 -1 -8 -17 -19 -15 -10 -9 -11 -11 -10 -8 -5 -3 -3 -3 -3 -2 0 0 2 6 10 8 5 7 13 14 4 -11 -23 -27 -20 -9 -2 -7 -21 -30 -23 -8 -4 -16 -34 -41 -35 -23 -16 -17 -22 -28 -29 -28 -28 -30 -32 -32 -31 -26 -16 -1 9 6 -4 -5 11 33 43 34 17 7 12 29 49 61 58 47 38 43 58 68 60 43 35 49 77 90 78 55 43 52 66 70 59 46 42 43 43 39 36 27 8 -13 -16 0 12 1 -17 -7 27 48 23 -21 -37 -12 14 5 -32 -66 -71 -50 -19 7 14 1 -11 -5 17 29 22 10 12 27 45 58 67 65 44 8 -23 -36 -37 -48 -78 -103 -97 -63 -33 -39 -76 -109 -110 -85 -64 -57 -56 -56 -59 -67 -69 -61 -47 -33 -21 -1 24 38 25 -11 -54 -75 -57 -8 35 40 15 -4 4 19 6 -33 -58 -50 -22 -8 -26 -62 -92 -99 -80 -57 -61 -93 -125 -124 -93 -73 -93 -142 -182 -195 -187 -171 -148 -120 -102 -108 -124 -123 -90 -52 -35 -45 -53 -36 5 55 92 107 106 107 132 171 191 166 127 126 178 235 241 191 128 91 90 110 134 146 138 115 98 106 128 146 144 127 101 78 69 79 98 111 108 96 89 90 92 85 68 46 31 29 35 38 30 19 15 18 21 14 -4 -20 -18 8 42 58 48 31 31 50 65 60 38 19 12 11 9 3 -6 -18 -32 -45 -52 -55 -63 -76 -90 -95 -89 -78 -71 -70 -71 -71 -67 -62 -56 -50 -45 -43 -42 -40 -36 -30 -27 -26 -27 -28 -27 -24 -20 -17 -16 -14 -12 -10 -7 -4 -1 2 4 4 3 2 2 2 3 4 3 2 2 2 2 2 2 2 1 1 0 0 0 0 0 0 -1 -1 -1 -1 -1 -1 -1 0 +-20 8 16 2 -11 -5 19 32 24 11 13 29 48 61 70 68 46 9 -23 -36 -37 -49 -79 -104 -98 -63 -33 -39 -76 -109 -110 -85 -64 -57 -56 -56 -59 -67 -69 -61 -47 -33 -21 -1 25 39 26 -11 -54 -75 -57 -8 36 41 16 -4 5 20 7 -33 -59 -51 -22 -8 -26 -64 -96 -103 -84 -60 -64 -98 -133 -132 -100 -78 -100 -155 -199 -215 -207 -190 -165 -135 -115 -122 -141 -141 -104 -60 -40 -53 -63 -43 7 67 113 133 132 135 167 219 246 216 167 167 238 317 328 262 177 127 127 157 193 213 203 171 148 162 197 227 228 203 163 128 115 133 167 191 189 171 160 164 170 160 129 90 62 59 72 78 63 41 33 41 48 34 -7 -46 -42 21 105 147 126 84 86 140 185 172 112 57 38 37 32 11 -17 -59 -112 -161 -191 -209 -244 -304 -370 -403 -388 -351 -327 -330 -346 -357 -350 -334 -311 -287 -269 -265 -268 -265 -245 -217 -199 -202 -220 -236 -235 -218 -192 -173 -164 -155 -139 -115 -89 -55 -13 36 72 80 66 48 45 60 88 112 113 94 76 83 113 140 143 122 96 78 75 84 91 77 43 5 -18 -22 -16 -7 -2 -6 -12 -9 22 68 99 98 76 63 69 82 90 97 101 99 92 84 89 105 120 123 99 58 23 19 38 50 36 5 -16 -18 -13 -12 -15 -21 -36 -57 -66 -55 -32 -24 -41 -72 -92 -92 -78 -58 -43 -43 -58 -79 -87 -74 -47 -25 -17 -9 14 46 68 69 63 67 80 85 75 64 75 99 111 102 88 86 88 87 81 84 103 125 131 120 104 94 97 105 108 98 86 89 107 123 117 96 84 85 88 84 77 79 90 103 106 97 78 59 43 31 17 -4 -22 -32 -36 -52 -77 -97 -94 -78 -66 -74 -93 -105 -102 -90 -79 -80 -88 -91 -76 -50 -25 -5 14 23 3 -43 -80 -74 -38 -17 -38 -78 -102 -101 -99 -113 -135 -148 -139 -126 -131 -153 -170 -166 -143 -121 -111 -103 -92 -88 -93 -97 -88 -74 +0 0 0 0 -1 -1 0 0 0 0 0 0 0 0 0 0 0 0 -1 -1 -1 -2 -3 -4 -4 -3 -2 -2 -4 -6 -7 -6 -5 -4 -5 -5 -5 -6 -7 -6 -5 -4 -3 -1 2 4 3 -2 -8 -11 -9 -2 5 6 2 -1 0 3 1 -7 -13 -12 -5 -2 -7 -16 -24 -27 -23 -17 -18 -28 -39 -40 -31 -25 -32 -51 -67 -74 -73 -68 -61 -51 -44 -48 -56 -57 -43 -26 -18 -23 -28 -20 3 31 53 63 64 66 84 112 127 113 89 90 131 177 185 150 103 74 75 95 118 132 127 108 95 105 129 151 153 138 112 88 80 94 119 138 138 126 119 123 128 122 99 70 48 46 57 62 51 33 27 33 40 28 -6 -40 -36 18 91 128 110 74 76 124 166 155 101 51 34 34 29 10 -16 -56 -106 -152 -181 -199 -233 -292 -356 -389 -375 -341 -318 -322 -338 -350 -344 -329 -307 -284 -266 -263 -266 -264 -244 -217 -199 -202 -220 -236 -235 -218 -192 -173 -164 -155 -139 -115 -89 -55 -13 35 71 79 65 47 44 59 86 110 111 92 74 81 110 136 138 117 92 74 71 80 86 72 40 4 -17 -21 -15 -7 -2 -6 -11 -9 19 61 88 86 67 55 60 71 77 83 85 83 77 70 73 86 97 99 79 46 18 14 29 38 27 3 -13 -14 -10 -9 -11 -16 -26 -41 -46 -38 -22 -17 -28 -48 -60 -60 -50 -37 -27 -27 -36 -48 -52 -44 -27 -15 -10 -5 7 24 35 35 32 33 39 41 36 30 34 45 49 44 38 36 36 35 32 33 39 47 48 43 37 32 33 35 35 31 26 27 31 35 33 26 22 22 22 20 18 18 20 22 22 20 15 11 8 5 3 -1 -4 -6 -6 -8 -12 -14 -13 -10 -9 -9 -11 -12 -11 -9 -8 -8 -8 -8 -6 -4 -2 -1 0 1 0 -3 -4 -4 -2 -1 -2 -3 -3 -3 -3 -3 -3 -3 -3 -2 -2 -2 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +112 57 38 37 32 11 -17 -59 -112 -161 -191 -209 -244 -304 -370 -403 -388 -351 -327 -330 -346 -357 -350 -334 -311 -287 -269 -265 -268 -265 -245 -217 -199 -202 -220 -236 -235 -218 -192 -173 -164 -155 -139 -115 -89 -55 -13 36 72 80 66 48 45 60 88 112 113 94 76 83 113 140 143 122 96 78 75 84 91 77 43 5 -18 -22 -16 -7 -2 -6 -12 -9 22 68 99 98 76 63 69 82 90 97 101 99 92 84 89 105 120 123 99 58 23 19 38 50 36 5 -16 -18 -13 -12 -15 -21 -36 -57 -66 -55 -32 -24 -41 -72 -92 -92 -78 -58 -43 -43 -58 -79 -87 -74 -47 -25 -17 -9 14 46 68 69 63 67 80 85 75 64 75 99 111 102 88 86 88 87 81 84 103 125 131 120 104 94 97 105 108 98 86 89 107 123 117 96 84 85 88 84 77 79 90 103 106 97 78 59 43 31 17 -4 -22 -32 -36 -52 -77 -97 -94 -78 -66 -74 -93 -105 -102 -90 -79 -80 -88 -91 -76 -50 -25 -5 14 23 3 -43 -80 -74 -38 -17 -38 -78 -102 -101 -99 -113 -135 -148 -139 -126 -131 -153 -170 -166 -143 -121 -111 -103 -92 -88 -93 -97 -88 -74 -66 -69 -76 -88 -112 -142 -155 -137 -114 -112 -135 -150 -139 -119 -111 -114 -112 -96 -75 -56 -43 -21 10 44 68 74 73 72 75 77 81 99 134 164 170 154 144 153 170 167 141 120 122 140 149 137 116 100 94 96 103 106 92 56 21 15 37 57 51 29 17 20 23 17 19 28 24 -6 -34 -22 21 54 51 32 33 50 55 27 -8 -20 1 29 35 19 11 38 83 111 111 104 105 106 100 105 126 143 134 103 69 42 27 34 59 69 40 4 -1 13 0 -43 -74 -74 -67 -65 -45 -13 -14 -56 -86 -71 -49 -53 -51 -18 -1 -41 -90 -85 -52 -55 -78 -67 -43 -64 -101 -78 -19 -29 -104 -126 -61 -16 -27 -1 83 92 -18 -91 -13 58 -48 -228 -265 -181 -157 -206 -201 -163 -220 -321 +0 0 0 0 0 0 -1 -1 -1 -1 -2 -2 -3 -4 -5 -6 -7 -7 -7 -8 -9 -11 -11 -12 -12 -12 -12 -13 -14 -14 -14 -14 -13 -14 -16 -18 -19 -19 -18 -17 -17 -16 -15 -13 -11 -7 -2 4 9 11 9 7 7 9 15 19 20 17 14 16 23 30 31 27 22 18 18 21 23 20 11 1 -6 -7 -5 -3 -1 -2 -5 -4 7 24 36 36 28 24 27 32 36 40 43 42 40 37 40 48 56 59 48 28 11 9 19 26 19 2 -9 -11 -8 -7 -9 -13 -22 -35 -41 -35 -21 -16 -27 -47 -61 -62 -53 -40 -30 -30 -41 -57 -63 -54 -35 -19 -13 -7 10 35 52 53 49 52 63 68 60 52 61 81 92 85 74 73 75 74 70 73 90 110 116 107 93 84 87 95 98 90 79 82 99 115 109 90 79 80 83 80 73 76 86 99 102 94 76 57 42 30 16 -4 -22 -32 -36 -52 -77 -97 -94 -78 -66 -74 -93 -105 -102 -90 -79 -80 -88 -91 -76 -50 -25 -5 13 22 2 -43 -80 -74 -38 -17 -38 -77 -100 -99 -97 -110 -131 -144 -134 -122 -126 -146 -162 -158 -135 -114 -104 -97 -86 -82 -86 -89 -81 -68 -60 -62 -68 -79 -99 -125 -136 -119 -99 -96 -115 -127 -117 -100 -92 -94 -92 -78 -61 -45 -35 -17 7 33 51 56 54 53 55 56 58 70 95 115 118 105 97 102 113 109 91 77 77 87 92 83 70 59 55 55 59 60 51 30 11 8 19 29 26 14 8 9 11 8 8 12 10 -3 -15 -10 8 22 20 12 12 18 20 9 -3 -7 0 9 11 6 3 11 24 32 31 28 28 27 25 26 30 33 30 22 14 8 5 6 11 12 7 0 -1 2 0 -7 -11 -11 -9 -9 -6 -2 -2 -7 -9 -7 -5 -5 -5 -2 -1 -3 -7 -6 -4 -4 -5 -4 -2 -3 -4 -3 -1 -1 -3 -4 -2 -1 -1 -1 1 1 -1 -1 -1 0 -1 -2 -1 -1 -1 -1 -1 -1 -1 0 +97 105 108 98 86 89 107 123 117 96 84 85 88 84 77 79 90 103 106 97 78 59 43 31 17 -4 -22 -32 -36 -52 -77 -97 -94 -78 -66 -74 -93 -105 -102 -90 -79 -80 -88 -91 -76 -50 -25 -5 14 23 3 -43 -80 -74 -38 -17 -38 -78 -102 -101 -99 -113 -135 -148 -139 -126 -131 -153 -170 -166 -143 -121 -111 -103 -92 -88 -93 -97 -88 -74 -66 -69 -76 -88 -112 -142 -155 -137 -114 -112 -135 -150 -139 -119 -111 -114 -112 -96 -75 -56 -43 -21 10 44 68 74 73 72 75 77 81 99 134 164 170 154 144 153 170 167 141 120 122 140 149 137 116 100 94 96 103 106 92 56 21 15 37 57 51 29 17 20 23 17 19 28 24 -6 -34 -22 21 54 51 32 33 50 55 27 -8 -20 1 29 35 19 11 38 83 111 111 104 105 106 100 105 126 143 134 103 69 42 27 34 59 69 40 4 -1 13 0 -43 -74 -74 -67 -65 -45 -13 -14 -56 -86 -71 -49 -53 -51 -18 -1 -41 -90 -85 -52 -55 -78 -67 -43 -64 -101 -78 -19 -29 -104 -126 -61 -16 -27 -1 83 92 -18 -91 -13 58 -48 -228 -265 -181 -157 -206 -201 -163 -220 -321 -285 -151 -136 -217 -142 76 116 -92 -198 -20 116 -34 -191 -78 50 -112 -292 -108 159 23 -282 -179 162 96 -213 5 517 180 -835 -632 1322 2260 -501 -4922 -5689 -864 5103 6434 2759 -785 349 5242 8661 6383 -395 -5562 -3684 4077 10313 8727 662 -6824 -8723 -6075 -2639 -311 831 267 -2470 -5583 -5781 -2047 2740 4707 2725 -1238 -4507 -5605 -4439 -1892 503 1269 100 -1712 -2374 -1464 -571 -1570 -4108 -5500 -3823 -588 891 -180 -894 1269 4438 4256 -330 -5042 -4995 -341 4089 4399 1583 -377 784 3706 5630 5329 3577 1688 503 459 1609 3238 4146 3891 3305 3252 3264 2208 146 -1408 -1358 -372 169 -79 -350 -111 323 448 180 -307 -811 -999 -558 292 767 325 -733 -1677 -2154 -2313 -2377 -2401 -2389 -2363 -2317 -2307 -2496 -2892 -3062 -2438 -988 599 1507 1488 964 566 610 885 893 415 -176 -317 83 596 908 1156 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 2 2 1 1 1 1 0 -1 -1 -2 -2 -3 -5 -6 -6 -6 -5 -6 -8 -9 -10 -9 -8 -9 -10 -11 -9 -7 -4 -1 1 3 0 -7 -13 -13 -7 -4 -8 -15 -21 -21 -21 -25 -30 -34 -33 -31 -33 -40 -45 -45 -40 -35 -33 -31 -29 -28 -30 -32 -30 -26 -24 -25 -28 -33 -43 -55 -62 -56 -47 -47 -58 -66 -62 -54 -51 -53 -53 -47 -37 -28 -22 -11 5 23 36 40 40 40 42 44 47 58 80 99 104 95 90 97 109 108 92 79 82 95 102 95 81 70 67 69 75 78 68 42 15 11 28 44 39 22 13 16 18 13 15 23 20 -6 -29 -19 17 46 44 27 28 44 48 24 -8 -19 0 26 32 17 10 35 77 103 104 98 99 100 95 100 121 137 129 99 67 40 26 33 57 67 39 3 -1 12 0 -43 -74 -74 -67 -65 -45 -13 -14 -56 -86 -71 -49 -53 -51 -18 -1 -41 -90 -85 -52 -55 -78 -67 -43 -64 -100 -77 -19 -29 -102 -124 -60 -16 -27 -1 79 88 -18 -87 -13 54 -46 -215 -249 -169 -146 -191 -185 -150 -201 -292 -258 -136 -122 -193 -126 66 101 -80 -171 -18 98 -29 -161 -66 41 -93 -239 -88 127 18 -223 -141 126 74 -163 3 388 133 -616 -462 956 1619 -356 -3456 -3955 -595 3472 4330 1836 -517 227 3372 5506 4009 -246 -3409 -2230 2436 6083 5079 380 -3867 -4875 -3348 -1434 -167 438 138 -1264 -2814 -2868 -1000 1316 2223 1266 -566 -2024 -2473 -1924 -806 210 520 40 -676 -920 -556 -213 -572 -1466 -1921 -1307 -197 291 -58 -280 386 1321 1237 -94 -1395 -1347 -90 1045 1094 383 -89 179 823 1215 1114 725 332 95 84 286 558 690 625 511 485 469 305 19 -180 -167 -44 18 -9 -36 -11 30 39 15 -25 -62 -72 -38 18 46 18 -39 -83 -99 -99 -95 -88 -81 -74 -66 -60 -58 -61 -58 -41 -15 7 16 14 7 3 3 3 3 1 -1 -1 0 0 0 0 +1 29 35 19 11 38 83 111 111 104 105 106 100 105 126 143 134 103 69 42 27 34 59 69 40 4 -1 13 0 -43 -74 -74 -67 -65 -45 -13 -14 -56 -86 -71 -49 -53 -51 -18 -1 -41 -90 -85 -52 -55 -78 -67 -43 -64 -101 -78 -19 -29 -104 -126 -61 -16 -27 -1 83 92 -18 -91 -13 58 -48 -228 -265 -181 -157 -206 -201 -163 -220 -321 -285 -151 -136 -217 -142 76 116 -92 -198 -20 116 -34 -191 -78 50 -112 -292 -108 159 23 -282 -179 162 96 -213 5 517 180 -835 -632 1322 2260 -501 -4922 -5689 -864 5103 6434 2759 -785 349 5242 8661 6383 -395 -5562 -3684 4077 10313 8727 662 -6824 -8723 -6075 -2639 -311 831 267 -2470 -5583 -5781 -2047 2740 4707 2725 -1238 -4507 -5605 -4439 -1892 503 1269 100 -1712 -2374 -1464 -571 -1570 -4108 -5500 -3823 -588 891 -180 -894 1269 4438 4256 -330 -5042 -4995 -341 4089 4399 1583 -377 784 3706 5630 5329 3577 1688 503 459 1609 3238 4146 3891 3305 3252 3264 2208 146 -1408 -1358 -372 169 -79 -350 -111 323 448 180 -307 -811 -999 -558 292 767 325 -733 -1677 -2154 -2313 -2377 -2401 -2389 -2363 -2317 -2307 -2496 -2892 -3062 -2438 -988 599 1507 1488 964 566 610 885 893 415 -176 -317 83 596 908 1156 1517 1780 1655 1308 1207 1475 1746 1673 1295 804 261 -279 -609 -608 -519 -724 -1255 -1758 -1927 -1805 -1606 -1440 -1292 -1168 -1105 -1087 -1049 -981 -940 -975 -1083 -1228 -1350 -1376 -1248 -990 -720 -552 -500 -466 -343 -140 46 145 210 326 493 638 752 924 1222 1578 1870 2069 2237 2396 2473 2382 2140 1851 1593 1375 1176 1019 972 1036 1069 903 565 300 279 349 213 -156 -451 -448 -328 -421 -721 -890 -742 -526 -590 -929 -1218 -1256 -1188 -1266 -1518 -1725 -1673 -1354 -947 -658 -555 -542 -479 -361 -338 -523 -801 -894 -648 -222 69 101 46 131 336 440 332 176 200 359 374 107 -201 -179 230 672 775 546 298 247 274 163 -59 -148 10 197 133 -142 -295 -120 223 449 492 504 551 528 380 253 299 444 481 359 244 267 344 324 188 58 24 61 101 98 33 -72 -134 -57 +0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 2 2 1 1 0 0 0 1 2 1 0 -1 0 0 -3 -5 -5 -5 -5 -4 -1 -2 -5 -8 -7 -5 -6 -6 -3 -1 -6 -12 -12 -8 -8 -12 -11 -7 -11 -18 -14 -4 -6 -21 -26 -13 -4 -6 -1 19 22 -5 -24 -4 15 -14 -65 -78 -54 -48 -65 -65 -54 -74 -110 -100 -54 -50 -81 -54 29 45 -38 -82 -9 49 -15 -85 -36 22 -53 -138 -52 77 11 -143 -92 84 50 -115 2 284 100 -474 -363 769 1333 -300 -2979 -3487 -537 3205 4090 1774 -511 229 3489 5829 4343 -272 -3866 -2587 2891 7389 6313 483 -5034 -6494 -4564 -2000 -238 640 207 -1937 -4415 -4609 -1645 2218 3839 2239 -1025 -3757 -4705 -3752 -1610 430 1093 86 -1494 -2084 -1293 -507 -1402 -3687 -4962 -3467 -536 815 -166 -827 1178 4137 3984 -311 -4758 -4732 -325 3901 4211 1520 -364 757 3591 5470 5192 3494 1652 493 451 1585 3196 4099 3853 3278 3229 3245 2198 145 -1405 -1356 -372 168 -79 -350 -111 323 447 179 -307 -811 -998 -557 291 763 323 -728 -1664 -2134 -2288 -2347 -2366 -2350 -2319 -2269 -2254 -2433 -2811 -2968 -2356 -952 575 1442 1419 916 536 575 831 836 386 -164 -294 76 545 827 1048 1368 1597 1477 1161 1065 1294 1523 1450 1116 688 222 -236 -512 -507 -430 -595 -1024 -1424 -1548 -1439 -1270 -1130 -1005 -901 -845 -824 -789 -731 -694 -713 -784 -880 -958 -967 -868 -681 -490 -372 -333 -307 -224 -91 29 91 130 199 298 381 443 537 701 894 1045 1140 1215 1282 1304 1237 1095 932 790 671 565 481 451 473 479 398 244 127 116 143 85 -62 -175 -170 -122 -154 -258 -311 -254 -176 -193 -297 -381 -383 -354 -369 -431 -478 -451 -356 -243 -164 -135 -128 -110 -81 -73 -110 -163 -176 -124 -41 12 17 7 21 52 65 47 24 26 45 45 12 -23 -20 23 65 72 48 25 19 20 11 -4 -10 0 11 7 -8 -14 -6 8 16 16 15 15 13 8 5 5 7 7 4 2 2 2 2 1 0 0 0 0 0 0 -1 -1 0 +-3823 -588 891 -180 -894 1269 4438 4256 -330 -5042 -4995 -341 4089 4399 1583 -377 784 3706 5630 5329 3577 1688 503 459 1609 3238 4146 3891 3305 3252 3264 2208 146 -1408 -1358 -372 169 -79 -350 -111 323 448 180 -307 -811 -999 -558 292 767 325 -733 -1677 -2154 -2313 -2377 -2401 -2389 -2363 -2317 -2307 -2496 -2892 -3062 -2438 -988 599 1507 1488 964 566 610 885 893 415 -176 -317 83 596 908 1156 1517 1780 1655 1308 1207 1475 1746 1673 1295 804 261 -279 -609 -608 -519 -724 -1255 -1758 -1927 -1805 -1606 -1440 -1292 -1168 -1105 -1087 -1049 -981 -940 -975 -1083 -1228 -1350 -1376 -1248 -990 -720 -552 -500 -466 -343 -140 46 145 210 326 493 638 752 924 1222 1578 1870 2069 2237 2396 2473 2382 2140 1851 1593 1375 1176 1019 972 1036 1069 903 565 300 279 349 213 -156 -451 -448 -328 -421 -721 -890 -742 -526 -590 -929 -1218 -1256 -1188 -1266 -1518 -1725 -1673 -1354 -947 -658 -555 -542 -479 -361 -338 -523 -801 -894 -648 -222 69 101 46 131 336 440 332 176 200 359 374 107 -201 -179 230 672 775 546 298 247 274 163 -59 -148 10 197 133 -142 -295 -120 223 449 492 504 551 528 380 253 299 444 481 359 244 267 344 324 188 58 24 61 101 98 33 -72 -134 -57 139 298 297 184 93 73 66 47 86 220 348 325 148 -52 -155 -175 -170 -153 -118 -94 -123 -182 -204 -182 -173 -206 -230 -206 -178 -219 -306 -340 -280 -197 -181 -230 -261 -209 -92 23 75 56 33 87 219 320 291 161 54 27 9 -85 -228 -321 -329 -312 -337 -410 -497 -580 -660 -730 -762 -711 -566 -397 -320 -377 -456 -399 -211 -69 -96 -185 -140 50 207 206 118 87 133 175 186 209 271 346 397 411 375 283 177 131 157 189 162 97 61 90 180 305 414 444 379 286 240 235 208 132 58 28 34 55 74 80 61 28 12 9 -22 -86 -121 -82 -17 3 -15 -7 43 86 94 99 122 130 111 128 227 354 415 384 324 288 266 242 231 229 193 107 32 32 86 107 52 -36 -103 -146 -173 -190 -212 -285 -415 -514 +0 -1 0 -1 -2 2 11 14 -2 -29 -35 -3 38 49 20 -6 13 69 118 123 91 47 15 15 58 128 177 178 162 171 184 132 9 -96 -98 -29 13 -7 -32 -11 31 45 19 -35 -96 -123 -72 38 105 46 -110 -260 -347 -386 -410 -428 -441 -450 -456 -469 -523 -625 -681 -558 -233 145 374 380 253 152 168 250 259 123 -54 -99 26 194 303 395 529 634 602 486 457 571 689 673 531 336 111 -121 -269 -273 -238 -337 -593 -845 -941 -896 -810 -737 -672 -617 -592 -591 -579 -549 -533 -560 -631 -725 -807 -833 -765 -615 -453 -351 -322 -304 -226 -94 30 98 144 226 346 452 538 668 892 1163 1391 1554 1695 1832 1907 1852 1678 1463 1269 1104 952 831 798 857 891 757 477 255 238 300 184 -137 -396 -396 -292 -376 -648 -803 -673 -480 -541 -855 -1126 -1167 -1108 -1186 -1427 -1628 -1585 -1288 -904 -630 -534 -523 -463 -350 -329 -510 -783 -876 -636 -219 67 99 45 129 333 436 330 175 199 358 373 106 -201 -179 229 672 775 545 297 246 273 162 -59 -148 9 195 132 -141 -293 -119 220 442 483 494 539 515 370 245 289 429 463 344 233 254 327 306 177 54 22 56 93 90 30 -66 -123 -52 125 267 265 163 82 64 57 40 74 188 296 274 124 -44 -129 -144 -139 -124 -95 -75 -98 -143 -159 -141 -133 -157 -173 -154 -132 -160 -222 -244 -199 -139 -126 -159 -178 -141 -62 15 48 36 20 54 135 196 176 96 31 15 5 -49 -128 -177 -179 -168 -178 -214 -255 -293 -328 -357 -367 -336 -263 -182 -144 -167 -198 -170 -89 -29 -39 -74 -55 18 76 75 42 30 45 58 60 66 84 105 118 119 106 78 47 34 40 47 39 22 13 19 38 63 83 87 72 52 42 40 34 21 8 4 4 7 9 10 7 3 1 0 -3 -9 -12 -8 -2 0 -2 -1 2 5 5 5 6 6 5 5 8 12 13 11 9 7 6 5 4 3 2 1 0 0 0 0 0 -1 -1 -1 -1 -1 -1 -1 -1 0 +-742 -526 -590 -929 -1218 -1256 -1188 -1266 -1518 -1725 -1673 -1354 -947 -658 -555 -542 -479 -361 -338 -523 -801 -894 -648 -222 69 101 46 131 336 440 332 176 200 359 374 107 -201 -179 230 672 775 546 298 247 274 163 -59 -148 10 197 133 -142 -295 -120 223 449 492 504 551 528 380 253 299 444 481 359 244 267 344 324 188 58 24 61 101 98 33 -72 -134 -57 139 298 297 184 93 73 66 47 86 220 348 325 148 -52 -155 -175 -170 -153 -118 -94 -123 -182 -204 -182 -173 -206 -230 -206 -178 -219 -306 -340 -280 -197 -181 -230 -261 -209 -92 23 75 56 33 87 219 320 291 161 54 27 9 -85 -228 -321 -329 -312 -337 -410 -497 -580 -660 -730 -762 -711 -566 -397 -320 -377 -456 -399 -211 -69 -96 -185 -140 50 207 206 118 87 133 175 186 209 271 346 397 411 375 283 177 131 157 189 162 97 61 90 180 305 414 444 379 286 240 235 208 132 58 28 34 55 74 80 61 28 12 9 -22 -86 -121 -82 -17 3 -15 -7 43 86 94 99 122 130 111 128 227 354 415 384 324 288 266 242 231 229 193 107 32 32 86 107 52 -36 -103 -146 -173 -190 -212 -285 -415 -514 -472 -297 -130 -91 -142 -157 -115 -114 -215 -334 -357 -318 -352 -479 -540 -397 -133 31 12 -55 -22 73 82 -32 -125 -69 74 132 68 30 144 323 355 187 24 74 279 376 234 15 -44 78 203 218 189 200 206 103 -88 -245 -300 -306 -337 -367 -327 -215 -114 -82 -87 -48 74 227 314 276 151 38 13 55 89 70 27 5 5 -4 -41 -93 -138 -159 -144 -92 -27 -4 -57 -141 -162 -85 25 59 -16 -122 -163 -129 -95 -111 -148 -134 -59 19 56 81 157 277 360 327 204 87 47 67 74 20 -79 -164 -182 -112 16 146 230 246 209 161 149 202 290 353 362 350 353 344 265 126 31 56 156 207 154 51 -22 -49 -63 -95 -146 -211 -277 -323 -332 -323 -330 -346 -329 -258 -180 -147 -144 -95 33 165 201 123 25 -11 +0 -1 -1 -1 -2 -3 -4 -5 -7 -10 -12 -11 -10 -8 -8 -8 -8 -7 -8 -13 -21 -26 -21 -8 2 3 1 6 16 23 18 10 12 24 26 8 -17 -16 20 62 75 55 32 27 32 19 -8 -20 1 28 19 -22 -48 -20 38 80 90 95 108 107 79 54 66 101 113 86 60 68 90 87 52 16 6 18 30 30 10 -24 -45 -20 48 106 108 68 35 28 26 18 35 91 148 140 65 -24 -71 -82 -81 -74 -58 -47 -62 -94 -106 -97 -93 -112 -127 -116 -101 -126 -179 -201 -168 -120 -111 -143 -164 -133 -60 14 49 37 22 59 150 222 204 114 38 19 6 -63 -170 -242 -250 -239 -260 -319 -390 -459 -527 -587 -617 -580 -466 -329 -267 -317 -386 -340 -181 -60 -84 -162 -123 44 183 183 105 78 120 159 170 192 250 321 370 384 352 267 167 124 149 180 155 93 58 87 174 297 404 434 371 281 236 231 205 130 57 27 33 54 73 79 60 27 11 8 -22 -86 -121 -82 -17 2 -15 -7 42 85 93 98 121 128 109 126 224 348 408 376 317 281 259 235 223 221 185 102 30 30 81 101 49 -34 -97 -137 -161 -176 -196 -261 -379 -467 -426 -267 -117 -81 -126 -138 -101 -99 -186 -286 -304 -269 -296 -400 -447 -327 -109 25 9 -44 -18 57 63 -25 -96 -53 55 98 50 21 104 231 251 131 16 50 189 253 155 9 -29 50 129 136 117 122 124 61 -52 -143 -173 -174 -189 -203 -178 -116 -61 -43 -45 -25 36 110 150 130 70 17 5 24 38 29 11 2 2 -2 -16 -36 -52 -58 -52 -33 -10 -2 -19 -46 -51 -26 7 17 -5 -34 -44 -34 -25 -28 -36 -32 -14 4 12 16 31 54 68 60 36 14 7 10 11 2 -12 -23 -25 -15 1 17 25 26 21 15 13 17 24 28 27 25 23 21 15 7 1 2 7 8 6 1 -1 -2 -2 -3 -4 -5 -6 -6 -5 -5 -4 -4 -3 -2 -2 -1 -1 -1 0 0 0 0 0 0 +133 175 186 209 271 346 397 411 375 283 177 131 157 189 162 97 61 90 180 305 414 444 379 286 240 235 208 132 58 28 34 55 74 80 61 28 12 9 -22 -86 -121 -82 -17 3 -15 -7 43 86 94 99 122 130 111 128 227 354 415 384 324 288 266 242 231 229 193 107 32 32 86 107 52 -36 -103 -146 -173 -190 -212 -285 -415 -514 -472 -297 -130 -91 -142 -157 -115 -114 -215 -334 -357 -318 -352 -479 -540 -397 -133 31 12 -55 -22 73 82 -32 -125 -69 74 132 68 30 144 323 355 187 24 74 279 376 234 15 -44 78 203 218 189 200 206 103 -88 -245 -300 -306 -337 -367 -327 -215 -114 -82 -87 -48 74 227 314 276 151 38 13 55 89 70 27 5 5 -4 -41 -93 -138 -159 -144 -92 -27 -4 -57 -141 -162 -85 25 59 -16 -122 -163 -129 -95 -111 -148 -134 -59 19 56 81 157 277 360 327 204 87 47 67 74 20 -79 -164 -182 -112 16 146 230 246 209 161 149 202 290 353 362 350 353 344 265 126 31 56 156 207 154 51 -22 -49 -63 -95 -146 -211 -277 -323 -332 -323 -330 -346 -329 -258 -180 -147 -144 -95 33 165 201 123 25 -11 7 10 -34 -100 -163 -226 -270 -239 -110 37 83 15 -62 -48 29 74 51 13 4 -3 -49 -113 -139 -104 -57 -38 -40 -35 -27 -46 -95 -132 -116 -72 -48 -61 -72 -64 -71 -130 -212 -253 -236 -217 -232 -247 -201 -96 8 56 49 36 70 163 259 261 139 -7 -36 66 177 192 143 127 160 176 141 97 85 91 101 128 171 171 106 30 28 83 114 115 165 296 414 400 251 87 25 84 197 256 188 15 -135 -149 -68 -25 -85 -145 -87 51 116 41 -80 -118 -64 1 21 12 7 3 -6 -9 -9 -44 -129 -210 -192 -60 72 70 -65 -178 -151 -55 -66 -234 -409 -418 -285 -188 -231 -323 -303 -143 41 126 99 29 -6 27 109 165 130 28 -41 -20 28 10 -68 -92 6 153 222 168 61 -16 -27 16 86 137 +0 0 0 0 0 0 1 1 1 1 1 1 1 2 2 1 1 1 3 7 10 12 11 9 8 9 8 6 2 1 1 3 4 5 4 2 0 0 -2 -9 -12 -9 -2 0 -2 -1 5 11 12 14 18 20 17 21 39 63 76 73 63 58 55 52 51 52 45 25 7 8 22 28 14 -11 -30 -44 -53 -60 -68 -94 -139 -176 -165 -106 -48 -34 -54 -61 -46 -46 -89 -140 -153 -138 -156 -216 -247 -185 -63 14 5 -28 -12 37 42 -17 -67 -38 40 73 38 17 83 190 212 113 14 45 175 239 150 9 -29 51 136 148 129 139 144 73 -64 -178 -220 -226 -251 -276 -248 -165 -88 -64 -69 -38 58 182 254 225 124 31 10 46 75 59 23 4 4 -4 -36 -83 -123 -142 -130 -83 -25 -4 -53 -130 -150 -79 23 55 -16 -116 -155 -123 -91 -107 -143 -130 -58 18 54 78 153 271 353 321 201 85 46 66 73 19 -79 -164 -182 -112 15 145 229 245 208 161 149 201 289 352 361 349 352 342 263 125 30 55 154 204 152 50 -22 -49 -62 -93 -143 -206 -269 -313 -320 -311 -316 -331 -313 -245 -170 -139 -135 -89 30 152 184 112 22 -10 6 8 -31 -89 -144 -199 -236 -208 -95 31 70 12 -53 -41 24 60 41 10 3 -3 -39 -89 -109 -81 -44 -29 -31 -27 -20 -34 -69 -95 -83 -51 -34 -42 -49 -44 -48 -86 -138 -163 -151 -137 -144 -152 -122 -58 4 32 28 20 39 89 140 139 73 -4 -19 33 87 93 68 59 74 80 63 42 36 38 42 52 68 67 41 11 10 30 40 40 56 98 135 127 78 26 7 24 55 70 50 3 -35 -38 -17 -6 -20 -33 -19 10 23 8 -16 -22 -12 0 3 1 1 0 -1 -2 -2 -6 -16 -25 -22 -7 7 6 -7 -16 -13 -5 -5 -17 -28 -27 -18 -11 -13 -16 -14 -7 1 4 3 0 -1 0 2 3 2 0 -1 -1 0 0 -1 -1 0 0 0 0 0 -1 -1 0 0 0 +-27 -4 -57 -141 -162 -85 25 59 -16 -122 -163 -129 -95 -111 -148 -134 -59 19 56 81 157 277 360 327 204 87 47 67 74 20 -79 -164 -182 -112 16 146 230 246 209 161 149 202 290 353 362 350 353 344 265 126 31 56 156 207 154 51 -22 -49 -63 -95 -146 -211 -277 -323 -332 -323 -330 -346 -329 -258 -180 -147 -144 -95 33 165 201 123 25 -11 7 10 -34 -100 -163 -226 -270 -239 -110 37 83 15 -62 -48 29 74 51 13 4 -3 -49 -113 -139 -104 -57 -38 -40 -35 -27 -46 -95 -132 -116 -72 -48 -61 -72 -64 -71 -130 -212 -253 -236 -217 -232 -247 -201 -96 8 56 49 36 70 163 259 261 139 -7 -36 66 177 192 143 127 160 176 141 97 85 91 101 128 171 171 106 30 28 83 114 115 165 296 414 400 251 87 25 84 197 256 188 15 -135 -149 -68 -25 -85 -145 -87 51 116 41 -80 -118 -64 1 21 12 7 3 -6 -9 -9 -44 -129 -210 -192 -60 72 70 -65 -178 -151 -55 -66 -234 -409 -418 -285 -188 -231 -323 -303 -143 41 126 99 29 -6 27 109 165 130 28 -41 -20 28 10 -68 -92 6 153 222 168 61 -16 -27 16 86 137 132 82 46 66 100 73 -26 -124 -153 -121 -82 -72 -92 -120 -120 -53 79 206 221 83 -113 -222 -169 -15 99 88 -1 -45 26 141 166 61 -62 -62 78 222 233 119 5 14 159 346 463 454 349 233 183 211 247 204 68 -68 -90 3 78 12 -161 -276 -237 -134 -122 -217 -296 -258 -159 -113 -146 -184 -160 -89 -23 6 -5 -31 -34 6 56 55 0 -33 23 135 198 157 47 -46 -75 -44 -8 -19 -102 -203 -239 -190 -122 -104 -116 -80 31 147 175 94 -33 -117 -122 -81 -59 -86 -125 -121 -68 -17 -8 -37 -69 -89 -90 -75 -62 -83 -141 -181 -151 -80 -48 -80 -113 -78 3 51 27 -29 -54 -30 20 61 67 48 48 104 193 241 204 113 52 56 91 93 39 -43 -115 -145 -117 -38 61 122 100 12 -63 -55 +0 -1 -1 -1 -1 -1 0 0 -1 -1 -2 -2 -1 -2 -2 -2 -1 0 1 1 4 7 11 11 7 3 2 3 3 1 -5 -10 -12 -8 1 11 18 20 18 15 14 20 31 39 42 42 44 45 36 18 4 8 25 34 26 9 -5 -10 -13 -20 -31 -46 -62 -74 -79 -79 -83 -89 -87 -70 -50 -42 -42 -29 10 51 64 40 8 -4 2 3 -13 -38 -62 -88 -107 -97 -46 15 35 6 -28 -22 13 34 24 6 1 -2 -25 -58 -73 -55 -31 -21 -23 -20 -16 -27 -56 -78 -70 -44 -30 -38 -46 -41 -46 -85 -140 -169 -159 -148 -160 -172 -142 -69 5 40 35 26 52 122 196 199 107 -6 -29 52 141 154 115 103 131 145 117 81 71 77 86 110 148 149 93 26 24 74 102 103 149 269 379 368 232 80 23 78 185 241 178 14 -129 -143 -66 -25 -83 -141 -85 49 113 40 -79 -117 -64 0 20 11 6 2 -6 -9 -9 -44 -129 -210 -192 -60 71 70 -65 -178 -151 -55 -66 -234 -408 -417 -284 -187 -230 -321 -301 -142 40 124 97 28 -6 26 106 160 125 27 -40 -20 26 9 -65 -88 5 143 207 156 56 -15 -25 14 78 124 119 73 41 58 88 64 -23 -108 -132 -104 -70 -61 -78 -101 -100 -44 64 166 177 66 -90 -175 -132 -12 75 66 -1 -34 19 102 120 43 -44 -44 54 152 158 80 3 9 103 222 294 285 216 142 110 126 145 118 39 -39 -51 1 42 6 -85 -144 -122 -68 -61 -106 -143 -122 -74 -52 -66 -82 -70 -38 -10 2 -3 -13 -14 2 20 20 0 -12 7 45 64 50 14 -15 -23 -13 -3 -6 -28 -54 -62 -48 -30 -25 -27 -18 6 30 35 18 -7 -22 -22 -14 -10 -14 -20 -19 -10 -3 -2 -5 -9 -11 -11 -9 -7 -9 -14 -17 -13 -7 -4 -6 -8 -5 0 2 1 -2 -3 -2 0 2 2 1 1 2 4 5 3 1 0 0 1 0 0 -1 -1 -1 -1 -1 0 0 0 0 -1 0 +165 296 414 400 251 87 25 84 197 256 188 15 -135 -149 -68 -25 -85 -145 -87 51 116 41 -80 -118 -64 1 21 12 7 3 -6 -9 -9 -44 -129 -210 -192 -60 72 70 -65 -178 -151 -55 -66 -234 -409 -418 -285 -188 -231 -323 -303 -143 41 126 99 29 -6 27 109 165 130 28 -41 -20 28 10 -68 -92 6 153 222 168 61 -16 -27 16 86 137 132 82 46 66 100 73 -26 -124 -153 -121 -82 -72 -92 -120 -120 -53 79 206 221 83 -113 -222 -169 -15 99 88 -1 -45 26 141 166 61 -62 -62 78 222 233 119 5 14 159 346 463 454 349 233 183 211 247 204 68 -68 -90 3 78 12 -161 -276 -237 -134 -122 -217 -296 -258 -159 -113 -146 -184 -160 -89 -23 6 -5 -31 -34 6 56 55 0 -33 23 135 198 157 47 -46 -75 -44 -8 -19 -102 -203 -239 -190 -122 -104 -116 -80 31 147 175 94 -33 -117 -122 -81 -59 -86 -125 -121 -68 -17 -8 -37 -69 -89 -90 -75 -62 -83 -141 -181 -151 -80 -48 -80 -113 -78 3 51 27 -29 -54 -30 20 61 67 48 48 104 193 241 204 113 52 56 91 93 39 -43 -115 -145 -117 -38 61 122 100 12 -63 -55 39 143 186 163 128 125 146 163 163 163 171 173 149 107 74 62 42 -11 -74 -79 0 100 126 63 -9 -13 44 107 131 111 47 -48 -138 -175 -160 -145 -162 -173 -131 -42 15 -21 -128 -217 -207 -107 -10 5 -36 -22 89 188 122 -98 -267 -214 -15 97 -5 -193 -251 -122 52 107 33 -49 -25 97 219 259 213 136 88 81 90 78 35 -19 -64 -100 -138 -157 -126 -52 -4 -26 -85 -102 -58 -6 12 12 23 22 -28 -101 -108 -23 72 80 -9 -116 -169 -164 -143 -128 -113 -82 -33 11 31 33 40 70 106 117 103 82 65 44 13 -9 -7 10 27 47 83 122 138 122 93 59 11 -41 -42 46 177 236 163 18 -73 -58 -3 -2 -71 -144 -158 -115 -57 -23 -10 0 22 48 57 35 -2 -31 -22 29 +0 0 0 0 0 0 0 0 0 1 1 0 -2 -2 -1 -1 -2 -3 -2 1 2 1 -3 -4 -3 0 0 0 0 0 -1 -1 -1 -3 -10 -16 -16 -6 6 6 -7 -19 -17 -7 -8 -29 -53 -56 -40 -28 -35 -50 -49 -24 7 22 18 5 -2 5 22 35 28 6 -10 -5 6 2 -18 -25 1 43 64 50 18 -5 -9 5 28 46 46 29 16 24 37 28 -11 -50 -63 -51 -35 -32 -41 -54 -55 -25 37 98 107 41 -57 -114 -88 -8 53 47 -1 -26 14 80 96 35 -38 -38 47 137 146 75 3 9 104 230 311 308 240 161 128 149 176 147 49 -51 -67 2 59 9 -125 -215 -186 -106 -98 -175 -240 -211 -131 -94 -122 -155 -136 -76 -20 5 -5 -28 -30 5 49 49 0 -30 20 123 181 144 43 -43 -70 -42 -8 -18 -97 -193 -229 -182 -118 -101 -113 -78 30 143 170 92 -33 -116 -121 -80 -59 -86 -124 -121 -68 -17 -8 -37 -69 -89 -90 -75 -62 -83 -141 -181 -151 -80 -48 -80 -113 -78 2 50 26 -29 -54 -30 19 60 65 47 46 101 188 234 197 109 50 53 87 88 37 -41 -109 -137 -110 -36 56 112 92 10 -58 -50 35 128 166 144 113 109 127 141 140 139 145 146 125 89 61 50 34 -9 -60 -63 0 78 98 48 -7 -10 33 79 96 81 33 -35 -98 -123 -112 -100 -111 -117 -88 -28 9 -14 -82 -137 -129 -66 -7 2 -22 -13 51 106 68 -55 -146 -115 -8 50 -3 -98 -125 -60 24 50 15 -23 -12 42 94 110 89 55 35 31 34 29 13 -7 -23 -35 -48 -53 -42 -17 -2 -8 -26 -30 -17 -2 3 3 5 5 -7 -24 -25 -6 15 16 -2 -23 -33 -31 -26 -23 -19 -14 -6 1 4 4 5 8 12 13 11 8 6 4 1 -1 -1 0 2 3 5 7 8 6 4 2 0 -2 -2 1 5 7 4 0 -2 -2 -1 -1 -2 -2 -2 -2 -1 -1 -1 0 0 0 0 0 -1 -1 -1 0 +23 135 198 157 47 -46 -75 -44 -8 -19 -102 -203 -239 -190 -122 -104 -116 -80 31 147 175 94 -33 -117 -122 -81 -59 -86 -125 -121 -68 -17 -8 -37 -69 -89 -90 -75 -62 -83 -141 -181 -151 -80 -48 -80 -113 -78 3 51 27 -29 -54 -30 20 61 67 48 48 104 193 241 204 113 52 56 91 93 39 -43 -115 -145 -117 -38 61 122 100 12 -63 -55 39 143 186 163 128 125 146 163 163 163 171 173 149 107 74 62 42 -11 -74 -79 0 100 126 63 -9 -13 44 107 131 111 47 -48 -138 -175 -160 -145 -162 -173 -131 -42 15 -21 -128 -217 -207 -107 -10 5 -36 -22 89 188 122 -98 -267 -214 -15 97 -5 -193 -251 -122 52 107 33 -49 -25 97 219 259 213 136 88 81 90 78 35 -19 -64 -100 -138 -157 -126 -52 -4 -26 -85 -102 -58 -6 12 12 23 22 -28 -101 -108 -23 72 80 -9 -116 -169 -164 -143 -128 -113 -82 -33 11 31 33 40 70 106 117 103 82 65 44 13 -9 -7 10 27 47 83 122 138 122 93 59 11 -41 -42 46 177 236 163 18 -73 -58 -3 -2 -71 -144 -158 -115 -57 -23 -10 0 22 48 57 35 -2 -31 -22 29 101 140 92 -39 -162 -171 -60 55 65 -10 -47 35 175 252 210 100 10 -7 27 51 17 -63 -122 -113 -61 -30 -36 -36 6 68 103 86 37 -18 -57 -69 -62 -45 -23 14 59 73 25 -66 -139 -169 -171 -159 -118 -39 40 77 91 144 240 292 226 91 15 67 174 205 116 -9 -64 -34 1 -24 -75 -80 -29 -3 -68 -182 -232 -164 -54 -15 -72 -138 -135 -75 -45 -80 -137 -153 -132 -120 -138 -142 -93 -15 15 -27 -86 -89 -31 20 19 -12 -17 25 69 65 13 -46 -77 -69 -38 -2 33 62 81 92 101 113 130 139 124 86 42 12 4 18 47 84 98 61 -16 -86 -107 -86 -63 -48 -26 15 49 38 -13 -55 -50 -7 41 65 65 50 27 0 -17 -20 -8 12 28 33 27 31 59 101 125 105 +0 0 0 0 0 -1 -1 -1 -1 -1 -1 -2 -3 -3 -2 -2 -2 -2 0 3 4 2 -2 -4 -5 -4 -3 -4 -7 -7 -4 -2 -1 -3 -5 -7 -8 -7 -6 -8 -14 -19 -17 -9 -6 -10 -15 -11 0 7 4 -5 -9 -5 3 10 12 9 9 21 40 52 45 25 12 13 22 23 10 -12 -32 -42 -35 -12 18 38 31 3 -22 -19 13 51 67 60 48 48 57 65 66 68 72 74 65 48 33 28 19 -6 -37 -40 0 51 65 33 -5 -8 24 59 74 63 27 -29 -83 -106 -99 -90 -102 -110 -85 -28 9 -14 -87 -148 -143 -75 -8 3 -26 -16 65 138 90 -74 -203 -164 -12 75 -4 -153 -201 -98 42 87 27 -41 -21 81 185 220 182 117 76 70 78 68 31 -17 -58 -91 -126 -144 -116 -48 -4 -25 -80 -96 -55 -6 11 11 21 21 -27 -98 -105 -23 69 77 -9 -114 -166 -162 -141 -127 -112 -82 -33 10 30 32 39 69 105 116 102 81 64 44 13 -9 -7 9 26 46 82 121 137 121 92 58 10 -41 -42 45 174 231 159 17 -72 -57 -3 -2 -69 -139 -152 -110 -55 -22 -10 0 20 44 52 32 -2 -29 -21 26 91 125 82 -35 -144 -151 -53 47 56 -9 -40 29 146 210 173 82 8 -6 21 40 13 -50 -95 -88 -47 -23 -28 -27 4 49 74 61 26 -13 -40 -48 -43 -31 -16 9 38 46 15 -42 -87 -104 -104 -96 -70 -23 22 43 50 79 130 156 119 47 7 33 86 100 55 -5 -30 -16 0 -11 -33 -35 -13 -2 -28 -72 -90 -63 -21 -6 -26 -49 -47 -26 -15 -26 -43 -47 -40 -35 -40 -40 -26 -4 3 -7 -21 -21 -8 4 4 -3 -4 4 13 11 2 -8 -13 -12 -6 -1 4 8 10 11 12 13 14 14 12 8 3 1 0 1 3 6 6 3 -1 -5 -6 -5 -3 -3 -2 0 1 1 -1 -2 -2 -1 0 1 0 0 0 0 -1 -1 -1 0 0 0 0 0 0 0 0 0 +-138 -157 -126 -52 -4 -26 -85 -102 -58 -6 12 12 23 22 -28 -101 -108 -23 72 80 -9 -116 -169 -164 -143 -128 -113 -82 -33 11 31 33 40 70 106 117 103 82 65 44 13 -9 -7 10 27 47 83 122 138 122 93 59 11 -41 -42 46 177 236 163 18 -73 -58 -3 -2 -71 -144 -158 -115 -57 -23 -10 0 22 48 57 35 -2 -31 -22 29 101 140 92 -39 -162 -171 -60 55 65 -10 -47 35 175 252 210 100 10 -7 27 51 17 -63 -122 -113 -61 -30 -36 -36 6 68 103 86 37 -18 -57 -69 -62 -45 -23 14 59 73 25 -66 -139 -169 -171 -159 -118 -39 40 77 91 144 240 292 226 91 15 67 174 205 116 -9 -64 -34 1 -24 -75 -80 -29 -3 -68 -182 -232 -164 -54 -15 -72 -138 -135 -75 -45 -80 -137 -153 -132 -120 -138 -142 -93 -15 15 -27 -86 -89 -31 20 19 -12 -17 25 69 65 13 -46 -77 -69 -38 -2 33 62 81 92 101 113 130 139 124 86 42 12 4 18 47 84 98 61 -16 -86 -107 -86 -63 -48 -26 15 49 38 -13 -55 -50 -7 41 65 65 50 27 0 -17 -20 -8 12 28 33 27 31 59 101 125 105 43 -33 -80 -71 -7 63 84 38 -22 -25 36 89 66 -18 -85 -84 -36 -3 -13 -44 -59 -53 -49 -67 -95 -110 -99 -82 -81 -98 -113 -109 -98 -97 -109 -120 -119 -92 -41 12 39 29 6 2 21 44 59 75 87 80 51 41 75 122 122 56 -12 -15 49 113 115 62 17 29 89 145 150 115 96 129 194 233 211 146 88 62 62 61 56 51 54 46 10 -54 -115 -148 -153 -146 -131 -103 -66 -47 -67 -114 -156 -173 -176 -178 -175 -154 -120 -93 -90 -95 -73 -18 35 47 23 9 20 33 22 18 72 173 231 185 86 34 61 100 83 31 5 26 51 52 50 69 93 86 41 -2 -13 8 33 43 24 -20 -62 -67 -32 -1 -18 -80 -133 -140 -112 -74 -47 -42 -49 -52 -22 33 64 34 -33 -66 +0 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 0 0 0 -1 -2 -2 -1 1 1 -1 -4 -6 -6 -6 -6 -5 -4 -2 0 1 1 2 4 7 8 8 6 5 4 1 -1 -1 1 3 5 10 16 19 17 13 9 1 -7 -8 8 32 44 32 3 -16 -13 -1 -1 -17 -35 -40 -30 -15 -7 -3 0 6 14 17 10 -1 -11 -8 9 35 49 33 -15 -62 -67 -24 22 26 -5 -21 15 77 113 95 46 4 -4 13 25 8 -33 -64 -60 -33 -17 -20 -21 3 39 59 50 22 -11 -35 -43 -39 -29 -15 9 38 48 16 -45 -96 -118 -121 -113 -85 -29 29 56 67 108 181 223 174 70 11 52 138 164 93 -8 -53 -29 0 -21 -64 -69 -25 -3 -59 -159 -204 -145 -48 -14 -65 -125 -123 -69 -42 -74 -127 -143 -124 -113 -130 -134 -89 -15 14 -26 -83 -86 -30 19 18 -12 -17 24 67 63 12 -46 -77 -69 -38 -2 32 61 80 91 100 112 129 138 123 86 42 11 3 17 46 83 97 60 -16 -86 -107 -86 -63 -48 -26 14 48 37 -13 -54 -49 -7 39 62 62 48 25 0 -17 -19 -8 11 26 30 25 28 54 92 113 95 38 -30 -72 -64 -7 55 73 32 -19 -22 30 75 55 -16 -71 -70 -30 -3 -11 -36 -47 -42 -39 -52 -73 -84 -75 -62 -60 -72 -82 -79 -70 -69 -76 -83 -81 -62 -28 7 25 18 3 1 13 26 35 44 51 46 29 23 41 67 66 29 -7 -8 25 56 57 30 8 13 41 66 67 50 41 54 81 95 84 57 34 23 23 22 19 17 18 15 3 -18 -36 -46 -46 -43 -38 -29 -18 -13 -18 -29 -38 -41 -41 -40 -38 -33 -25 -19 -18 -18 -14 -4 5 7 3 1 2 4 2 2 8 20 25 19 8 3 5 8 6 2 0 1 3 3 3 3 4 4 1 -1 -1 0 1 1 0 -1 -2 -2 -1 -1 -1 -2 -2 -2 -1 -1 -1 -1 -1 -1 -1 0 0 0 -1 0 \ No newline at end of file diff --git a/python/tflite_micro/signal/ops/window_op.py b/python/tflite_micro/signal/ops/window_op.py new file mode 100644 index 0000000..b3a2361 --- /dev/null +++ b/python/tflite_micro/signal/ops/window_op.py @@ -0,0 +1,85 @@ +# Copyright 2022 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Use window op in python.""" + +import numpy as np +import tensorflow as tf +from tflite_micro.python.tflite_micro.signal.utils import util + +gen_window_op = util.load_custom_op('window') + + +def hann_window_weights(window_length, shift, dtype=np.int16): + arg = np.pi * 2 / window_length + index = np.arange(window_length) + weights = (0.5 - (0.5 * np.cos(arg * (index + 0.5)))) + if dtype == np.int16: + weights = np.round(weights * (2**shift)) + return weights.astype(dtype=dtype) + + +# We can calculate the result of sqrt(0.5-cos(x)/2) without sqrt as sin(x/2). +def square_root_hann_window_weights(window_length, shift, dtype=np.int16): + arg_half = np.pi / window_length + index = np.arange(window_length) + weights = np.sin(arg_half * (index + 0.5)) + if dtype == np.int16: + weights = np.round(weights * (2**shift)) + return weights.astype(dtype=dtype) + + +# In the so-called weighted overlap add (WOLA) method, a second window would +# be applied after the inverse FFT and prior to the final overlap-add to +# generate the output signal. This second window is known as a synthesis +# window and it is commonly chosen to be the same as the first window (which +# is applied before the FFT and is known as an analysis window). The pair +# of windows need to be normalized such that they together meet the constant +# WOLA (CWOLA) constraint. So if a signal goes through this procedure, it can +# be reconstructed with little distortion. For the square-root Hann window +# implemented above, the normalizing constant is given by +# sqrt((window_length / (2 * window_step)). +def square_root_hann_cwola_window_weights(window_length, + window_step, + shift, + dtype=np.int16): + arg_half = np.pi / window_length + norm = np.sqrt(window_length / (2.0 * window_step)) + index = np.arange(window_length) + weights = np.sin(arg_half * (index + 0.5)) / norm + if dtype == np.int16: + weights = np.round(weights * (2**shift)) + return weights.astype(dtype=dtype) + + +def _window_wrapper(window_fn, default_name): + """Wrapper around gen_window_op.window*.""" + + def _window(input_tensor, weight_tensor, shift, name=default_name): + with tf.name_scope(name) as name: + input_tensor = tf.convert_to_tensor(input_tensor, dtype=np.int16) + input_dim_list = input_tensor.shape.as_list() + weight_tensor = tf.convert_to_tensor(weight_tensor) + weight_dim_list = weight_tensor.shape.as_list() + if input_dim_list[-1] != weight_dim_list[0]: + raise ValueError("Innermost input dimension must match weights size") + return window_fn(input_tensor, weight_tensor, shift=shift, name=name) + + return _window + + +# TODO(b/286250473): change back name to "window" after name clash resolved +window = _window_wrapper(gen_window_op.signal_window, "signal_window") + +tf.no_gradient("signal_window") diff --git a/python/tflite_micro/signal/ops/window_op_test.py b/python/tflite_micro/signal/ops/window_op_test.py new file mode 100644 index 0000000..b9fc5bd --- /dev/null +++ b/python/tflite_micro/signal/ops/window_op_test.py @@ -0,0 +1,256 @@ +# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Tests for window ops.""" + +import os + +import numpy as np +import tensorflow as tf + +from tensorflow.python.platform import resource_loader +from tflite_micro.python.tflite_micro.signal.ops import window_op +from tflite_micro.python.tflite_micro.signal.utils import util + + +class WindowOpTest(tf.test.TestCase): + + _PREFIX_PATH = resource_loader.get_path_to_datafile('') + + def GetResource(self, filepath): + full_path = os.path.join(self._PREFIX_PATH, filepath) + with open(full_path, 'rt') as f: + file_text = f.read() + return file_text + + def testWeights(self): + expected_weights_hann_length_400_shift_12 = [ + 0, 1, 2, 3, 5, 8, 11, 14, 18, 23, 28, 33, 39, 46, 53, 60, 68, 77, 86, + 95, 105, 116, 127, 138, 150, 162, 175, 188, 202, 216, 231, 246, 261, + 277, 293, 310, 327, 345, 363, 382, 401, 420, 440, 460, 480, 501, 522, + 544, 566, 589, 611, 634, 658, 682, 706, 730, 755, 780, 806, 831, 857, + 884, 910, 937, 964, 992, 1019, 1047, 1075, 1104, 1133, 1161, 1191, + 1220, 1249, 1279, 1309, 1339, 1369, 1400, 1430, 1461, 1492, 1523, 1554, + 1586, 1617, 1648, 1680, 1712, 1744, 1775, 1807, 1839, 1871, 1903, 1935, + 1968, 2000, 2032, 2064, 2096, 2128, 2161, 2193, 2225, 2257, 2289, 2321, + 2352, 2384, 2416, 2448, 2479, 2510, 2542, 2573, 2604, 2635, 2666, 2696, + 2727, 2757, 2787, 2817, 2847, 2876, 2905, 2935, 2963, 2992, 3021, 3049, + 3077, 3104, 3132, 3159, 3186, 3212, 3239, 3265, 3290, 3316, 3341, 3366, + 3390, 3414, 3438, 3462, 3485, 3507, 3530, 3552, 3574, 3595, 3616, 3636, + 3656, 3676, 3695, 3714, 3733, 3751, 3769, 3786, 3803, 3819, 3835, 3850, + 3865, 3880, 3894, 3908, 3921, 3934, 3946, 3958, 3969, 3980, 3991, 4001, + 4010, 4019, 4028, 4036, 4043, 4050, 4057, 4063, 4068, 4073, 4078, 4082, + 4085, 4088, 4091, 4093, 4094, 4095, 4096, 4096, 4095, 4094, 4093, 4091, + 4088, 4085, 4082, 4078, 4073, 4068, 4063, 4057, 4050, 4043, 4036, 4028, + 4019, 4010, 4001, 3991, 3980, 3969, 3958, 3946, 3934, 3921, 3908, 3894, + 3880, 3865, 3850, 3835, 3819, 3803, 3786, 3769, 3751, 3733, 3714, 3695, + 3676, 3656, 3636, 3616, 3595, 3574, 3552, 3530, 3507, 3485, 3462, 3438, + 3414, 3390, 3366, 3341, 3316, 3290, 3265, 3239, 3212, 3186, 3159, 3132, + 3104, 3077, 3049, 3021, 2992, 2963, 2935, 2905, 2876, 2847, 2817, 2787, + 2757, 2727, 2696, 2666, 2635, 2604, 2573, 2542, 2510, 2479, 2448, 2416, + 2384, 2352, 2321, 2289, 2257, 2225, 2193, 2161, 2128, 2096, 2064, 2032, + 2000, 1968, 1935, 1903, 1871, 1839, 1807, 1775, 1744, 1712, 1680, 1648, + 1617, 1586, 1554, 1523, 1492, 1461, 1430, 1400, 1369, 1339, 1309, 1279, + 1249, 1220, 1191, 1161, 1133, 1104, 1075, 1047, 1019, 992, 964, 937, + 910, 884, 857, 831, 806, 780, 755, 730, 706, 682, 658, 634, 611, 589, + 566, 544, 522, 501, 480, 460, 440, 420, 401, 382, 363, 345, 327, 310, + 293, 277, 261, 246, 231, 216, 202, 188, 175, 162, 150, 138, 127, 116, + 105, 95, 86, 77, 68, 60, 53, 46, 39, 33, 28, 23, 18, 14, 11, 8, 5, 3, + 2, 1, 0 + ] + weights = window_op.hann_window_weights(400, 12) + self.assertAllEqual(weights, expected_weights_hann_length_400_shift_12) + expected_weights_squart_root_hann_cwola_length_256_shift_12 = [ + 25, 75, 126, 176, 226, 276, 326, 376, 426, 476, 526, 576, 626, 675, + 725, 774, 824, 873, 922, 971, 1020, 1068, 1117, 1165, 1213, 1261, 1309, + 1356, 1404, 1451, 1498, 1544, 1591, 1637, 1683, 1729, 1774, 1819, 1864, + 1909, 1953, 1997, 2041, 2084, 2127, 2170, 2213, 2255, 2296, 2338, 2379, + 2420, 2460, 2500, 2540, 2579, 2618, 2656, 2694, 2732, 2769, 2806, 2843, + 2878, 2914, 2949, 2984, 3018, 3052, 3085, 3118, 3150, 3182, 3214, 3244, + 3275, 3305, 3334, 3363, 3392, 3420, 3447, 3474, 3500, 3526, 3551, 3576, + 3600, 3624, 3647, 3670, 3692, 3713, 3734, 3755, 3775, 3794, 3812, 3831, + 3848, 3865, 3881, 3897, 3912, 3927, 3941, 3954, 3967, 3979, 3991, 4002, + 4012, 4022, 4031, 4040, 4048, 4055, 4062, 4068, 4074, 4079, 4083, 4087, + 4090, 4092, 4094, 4095, 4096, 4096, 4095, 4094, 4092, 4090, 4087, 4083, + 4079, 4074, 4068, 4062, 4055, 4048, 4040, 4031, 4022, 4012, 4002, 3991, + 3979, 3967, 3954, 3941, 3927, 3912, 3897, 3881, 3865, 3848, 3831, 3812, + 3794, 3775, 3755, 3734, 3713, 3692, 3670, 3647, 3624, 3600, 3576, 3551, + 3526, 3500, 3474, 3447, 3420, 3392, 3363, 3334, 3305, 3275, 3244, 3214, + 3182, 3150, 3118, 3085, 3052, 3018, 2984, 2949, 2914, 2878, 2843, 2806, + 2769, 2732, 2694, 2656, 2618, 2579, 2540, 2500, 2460, 2420, 2379, 2338, + 2296, 2255, 2213, 2170, 2127, 2084, 2041, 1997, 1953, 1909, 1864, 1819, + 1774, 1729, 1683, 1637, 1591, 1544, 1498, 1451, 1404, 1356, 1309, 1261, + 1213, 1165, 1117, 1068, 1020, 971, 922, 873, 824, 774, 725, 675, 626, + 576, 526, 476, 426, 376, 326, 276, 226, 176, 126, 75, 25 + ] + weights = window_op.square_root_hann_cwola_window_weights(256, 128, 12) + self.assertAllEqual( + weights, expected_weights_squart_root_hann_cwola_length_256_shift_12) + + def SingleWindowTest(self, filename): + lines = self.GetResource(filename).splitlines() + args = lines[0].split() + window_type = args[0] + dtype = args[1] + shift = int(args[2]) + func = tf.function(window_op.window) + input_size = len(lines[1].split()) + self.assertEqual(dtype, 'int16') + self.assertEqual(window_type, 'hann') + weights = window_op.hann_window_weights(input_size, shift) + concrete_function = func.get_concrete_function( + tf.TensorSpec(input_size, dtype=tf.int16), + tf.TensorSpec(input_size, dtype=tf.int16), + shift=shift) + # TODO(b/286252893): make test more robust (vs scipy) + interpreter = util.get_tflm_interpreter(concrete_function, func) + # Skip line 0, which contains the configuration params. + # Read lines in pairs + i = 1 + while i < len(lines): + in_frame = np.array([int(j) for j in lines[i].split()], dtype='int16') + out_frame_exp = [int(j) for j in lines[i + 1].split()] + # TFLite + interpreter.set_input(in_frame, 0) + interpreter.set_input(weights, 1) + interpreter.invoke() + out_frame = interpreter.get_output(0) + self.assertAllEqual(out_frame_exp, out_frame) + # TF + out_frame = self.evaluate( + window_op.window(in_frame, weights, shift=shift)) + self.assertAllEqual(out_frame_exp, out_frame) + i += 2 + + def RunMultiDimWindow(self, shift, dtype, in_frames, weights, + out_frames_exp): + func = tf.function(window_op.window) + # TFLite + concrete_function = func.get_concrete_function( + tf.TensorSpec(np.shape(in_frames), dtype=dtype), + tf.TensorSpec(np.shape(weights), dtype=dtype), + shift=shift) + interpreter = util.get_tflm_interpreter(concrete_function, func) + interpreter.set_input(in_frames, 0) + interpreter.set_input(weights, 1) + interpreter.invoke() + out_frame = interpreter.get_output(0) + self.assertAllEqual(out_frames_exp, out_frame) + # TF + out_frame = self.evaluate(window_op.window(in_frames, weights, + shift=shift)) + self.assertAllEqual(out_frames_exp, out_frame) + + def MultiDimWindowTest(self, filename): + lines = self.GetResource(filename).splitlines() + args = lines[0].split() + window_type = args[0] + dtype = args[1] + shift = int(args[2]) + input_size = len(lines[1].split()) + self.assertEqual(dtype, 'int16') + self.assertEqual(window_type, 'hann') + weights = window_op.hann_window_weights(input_size, shift) + + # Since the input starts at line 1, we must add 1. To avoid overflowing, + # instead subtract 7. + num_lines_multiple_of_eight = int(len(lines) - len(lines) % 8) - 7 + # Skip line 0, which contains the configuration params. + # Read lines in pairs + in_frames = np.array([[int(j) for j in lines[i].split()] + for i in range(1, num_lines_multiple_of_eight, 2)], + dtype='int16') + out_frames_exp = [[int(j) for j in lines[i + 1].split()] + for i in range(1, num_lines_multiple_of_eight, 2)] + self.RunMultiDimWindow(shift, dtype, in_frames, weights, out_frames_exp) + + # Expand outer dims to [4, x, input_size] to test >1 outer dim. + in_frames_multiple_outer_dims = np.reshape(in_frames, [4, -1, input_size]) + out_frames_exp_multiple_outer_dims = np.reshape(out_frames_exp, + [4, -1, input_size]) + self.RunMultiDimWindow(shift, dtype, in_frames_multiple_outer_dims, + weights, out_frames_exp_multiple_outer_dims) + + def testSingleFrame(self): + frame_in = [ + 165, 296, 414, 400, 251, 87, 25, 84, 197, 256, 188, 15, -135, -149, + -68, -25, -85, -145, -87, 51, 116, 41, -80, -118, -64, 1, 21, 12, 7, 3, + -6, -9, -9, -44, -129, -210, -192, -60, 72, 70, -65, -178, -151, -55, + -66, -234, -409, -418, -285, -188, -231, -323, -303, -143, 41, 126, 99, + 29, -6, 27, 109, 165, 130, 28, -41, -20, 28, 10, -68, -92, 6, 153, 222, + 168, 61, -16, -27, 16, 86, 137, 132, 82, 46, 66, 100, 73, -26, -124, + -153, -121, -82, -72, -92, -120, -120, -53, 79, 206, 221, 83, -113, + -222, -169, -15, 99, 88, -1, -45, 26, 141, 166, 61, -62, -62, 78, 222, + 233, 119, 5, 14, 159, 346, 463, 454, 349, 233, 183, 211, 247, 204, 68, + -68, -90, 3, 78, 12, -161, -276, -237, -134, -122, -217, -296, -258, + -159, -113, -146, -184, -160, -89, -23, 6, -5, -31, -34, 6, 56, 55, 0, + -33, 23, 135, 198, 157, 47, -46, -75, -44, -8, -19, -102, -203, -239, + -190, -122, -104, -116, -80, 31, 147, 175, 94, -33, -117, -122, -81, + -59, -86, -125, -121, -68, -17, -8, -37, -69, -89, -90, -75, -62, -83, + -141, -181, -151, -80, -48, -80, -113, -78, 3, 51, 27, -29, -54, -30, + 20, 61, 67, 48, 48, 104, 193, 241, 204, 113, 52, 56, 91, 93, 39, -43, + -115, -145, -117, -38, 61, 122, 100, 12, -63, -55, 39, 143, 186, 163, + 128, 125, 146, 163, 163, 163, 171, 173, 149, 107, 74, 62, 42, -11, -74, + -79, 0, 100, 126, 63, -9, -13, 44, 107, 131, 111, 47, -48, -138, -175, + -160, -145, -162, -173, -131, -42, 15, -21, -128, -217, -207, -107, + -10, 5, -36, -22, 89, 188, 122, -98, -267, -214, -15, 97, -5, -193, + -251, -122, 52, 107, 33, -49, -25, 97, 219, 259, 213, 136, 88, 81, 90, + 78, 35, -19, -64, -100, -138, -157, -126, -52, -4, -26, -85, -102, -58, + -6, 12, 12, 23, 22, -28, -101, -108, -23, 72, 80, -9, -116, -169, -164, + -143, -128, -113, -82, -33, 11, 31, 33, 40, 70, 106, 117, 103, 82, 65, + 44, 13, -9, -7, 10, 27, 47, 83, 122, 138, 122, 93, 59, 11, -41, -42, + 46, 177, 236, 163, 18, -73, -58, -3, -2, -71, -144, -158, -115, -57, + -23, -10, 0, 22, 48, 57, 35, -2, -31, -22, 29 + ] + + exp_out = [ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, -2, -2, -1, -1, -2, -3, -2, 1, 2, + 1, -3, -4, -3, 0, 0, 0, 0, 0, -1, -1, -1, -3, -10, -16, -16, -6, 6, 6, + -7, -19, -17, -7, -8, -29, -53, -56, -40, -28, -35, -50, -49, -24, 7, + 22, 18, 5, -2, 5, 22, 35, 28, 6, -10, -5, 6, 2, -18, -25, 1, 43, 64, + 50, 18, -5, -9, 5, 28, 46, 46, 29, 16, 24, 37, 28, -11, -50, -63, -51, + -35, -32, -41, -54, -55, -25, 37, 98, 107, 41, -57, -114, -88, -8, 53, + 47, -1, -26, 14, 80, 96, 35, -38, -38, 47, 137, 146, 75, 3, 9, 104, + 230, 311, 308, 240, 161, 128, 149, 176, 147, 49, -51, -67, 2, 59, 9, + -125, -215, -186, -106, -98, -175, -240, -211, -131, -94, -122, -155, + -136, -76, -20, 5, -5, -28, -30, 5, 49, 49, 0, -30, 20, 123, 181, 144, + 43, -43, -70, -42, -8, -18, -97, -193, -229, -182, -118, -101, -113, + -78, 30, 143, 170, 92, -33, -116, -121, -80, -59, -86, -124, -121, -68, + -17, -8, -37, -69, -89, -90, -75, -62, -83, -141, -181, -151, -80, -48, + -80, -113, -78, 2, 50, 26, -29, -54, -30, 19, 60, 65, 47, 46, 101, 188, + 234, 197, 109, 50, 53, 87, 88, 37, -41, -109, -137, -110, -36, 56, 112, + 92, 10, -58, -50, 35, 128, 166, 144, 113, 109, 127, 141, 140, 139, 145, + 146, 125, 89, 61, 50, 34, -9, -60, -63, 0, 78, 98, 48, -7, -10, 33, 79, + 96, 81, 33, -35, -98, -123, -112, -100, -111, -117, -88, -28, 9, -14, + -82, -137, -129, -66, -7, 2, -22, -13, 51, 106, 68, -55, -146, -115, + -8, 50, -3, -98, -125, -60, 24, 50, 15, -23, -12, 42, 94, 110, 89, 55, + 35, 31, 34, 29, 13, -7, -23, -35, -48, -53, -42, -17, -2, -8, -26, -30, + -17, -2, 3, 3, 5, 5, -7, -24, -25, -6, 15, 16, -2, -23, -33, -31, -26, + -23, -19, -14, -6, 1, 4, 4, 5, 8, 12, 13, 11, 8, 6, 4, 1, -1, -1, 0, 2, + 3, 5, 7, 8, 6, 4, 2, 0, -2, -2, 1, 5, 7, 4, 0, -2, -2, -1, -1, -2, -2, + -2, -2, -1, -1, -1, 0, 0, 0, 0, 0, -1, -1, -1, 0 + ] + weights = window_op.hann_window_weights(len(frame_in), 12) + frame_out = window_op.window(frame_in, weights, shift=12) + self.assertAllEqual(exp_out, frame_out) + + def testWindow(self): + self.SingleWindowTest('testdata/window_test1.txt') + + def testWindowLargeOuterDimension(self): + self.MultiDimWindowTest('testdata/window_test1.txt') + + +if __name__ == '__main__': + tf.test.main() diff --git a/python/tflite_micro/signal/tflm_signal.bzl b/python/tflite_micro/signal/tflm_signal.bzl new file mode 100644 index 0000000..ff86a7b --- /dev/null +++ b/python/tflite_micro/signal/tflm_signal.bzl @@ -0,0 +1,88 @@ +"""Build rule for wrapping a custom TF OP from .cc to python.""" + +# TODO(b/286890280): refactor to be more generic build target for any custom OP +def py_tflm_signal_library( + name, + srcs = [], + deps = [], + visibility = None, + cc_op_defs = [], + cc_op_kernels = []): + """Creates build rules for signal ops as shared libraries. + + Defines three targets: + + Python library that exposes all ops defined in `cc_op_defs` and `py_srcs`. + _cc + C++ library that registers any c++ ops in `cc_op_defs`, and includes the + kernels from `cc_op_kernels`. + ops/_.so + Shared library exposing the _cc library. + Args: + name: The name for the python library target build by this rule. + srcs: Python source files for the Python library. + deps: Dependencies for the Python library. + visibility: Visibility for the Python library. + cc_op_defs: A list of c++ src files containing REGISTER_OP definitions. + cc_op_kernels: A list of c++ targets containing kernels that are used + by the Python library. + """ + binary_path = "ops" + if srcs: + binary_path_end_pos = srcs[0].rfind("/") + binary_path = srcs[0][0:binary_path_end_pos] + binary_name = binary_path + "/_" + cc_op_kernels[0][1:] + ".so" + if cc_op_defs: + binary_name = "ops/_" + name + ".so" + library_name = name + "_cc" + native.cc_library( + name = library_name, + srcs = cc_op_defs, + copts = select({ + "//conditions:default": ["-pthread"], + }), + alwayslink = 1, + deps = + cc_op_kernels + + ["@tensorflow_cc_deps//:cc_library"] + + select({"//conditions:default": []}), + ) + + native.cc_binary( + name = binary_name, + copts = select({ + "//conditions:default": ["-pthread"], + }), + linkshared = 1, + linkopts = [], + deps = [ + ":" + library_name, + "@tensorflow_cc_deps//:cc_library", + ] + select({"//conditions:default": []}), + ) + + native.py_library( + name = name, + srcs = srcs, + srcs_version = "PY2AND3", + visibility = visibility, + data = [":" + binary_name], + deps = deps, + ) + +# A rule to build a TensorFlow OpKernel. +def tflm_signal_kernel_library( + name, + srcs = [], + hdrs = [], + deps = [], + copts = [], + alwayslink = 1): + native.cc_library( + name = name, + srcs = srcs, + hdrs = hdrs, + deps = deps, + copts = copts, + alwayslink = alwayslink, + ) diff --git a/python/tflite_micro/signal/utils/BUILD b/python/tflite_micro/signal/utils/BUILD new file mode 100644 index 0000000..12db349 --- /dev/null +++ b/python/tflite_micro/signal/utils/BUILD @@ -0,0 +1,18 @@ +# Signal python utilities. +load("@tflm_pip_deps//:requirements.bzl", "requirement") + +package( + default_visibility = [ + "//python/tflite_micro/signal:__subpackages__", + ], + licenses = ["notice"], +) + +py_library( + name = "util", + srcs = ["util.py"], + deps = [ + "//python/tflite_micro:runtime", + requirement("tensorflow-cpu"), + ], +) diff --git a/python/tflite_micro/signal/utils/util.py b/python/tflite_micro/signal/utils/util.py new file mode 100644 index 0000000..5a457b9 --- /dev/null +++ b/python/tflite_micro/signal/utils/util.py @@ -0,0 +1,42 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Python utility functions.""" +import tensorflow as tf +from tensorflow.python.framework import load_library +from tensorflow.python.platform import resource_loader +from tflite_micro.python.tflite_micro import runtime + + +# TODO(b/286889497): find better name and place for this function. +def get_tflm_interpreter(concrete_function, trackable_obj): + """Initialize a TFLite interpreter with a concerte function. + + Args: + concrete_function: A concrete function + + Returns: + TFLite interpreter object + """ + converter = tf.lite.TFLiteConverter.from_concrete_functions( + [concrete_function], trackable_obj) + converter.allow_custom_ops = True + tflite_model = converter.convert() + + return runtime.Interpreter.from_bytes(tflite_model, arena_size=500000) + + +def load_custom_op(name): + return load_library.load_op_library( + resource_loader.get_path_to_datafile('../ops/_' + name + '_op.so')) diff --git a/signal/micro/kernels/BUILD b/signal/micro/kernels/BUILD new file mode 100644 index 0000000..c0813a6 --- /dev/null +++ b/signal/micro/kernels/BUILD @@ -0,0 +1,56 @@ +load( + "//tensorflow/lite/micro:build_def.bzl", + "micro_copts", +) + +package( + licenses = ["notice"], +) + +cc_library( + name = "register_signal_ops", + srcs = [ + "window.cc", + ], + copts = micro_copts(), + visibility = [ + "//tensorflow/lite/micro", + ], + deps = [ + "//signal/src:window", + "//tensorflow/lite/kernels:kernel_util", + "//tensorflow/lite/kernels/internal:tensor", + "//tensorflow/lite/micro:flatbuffer_utils", + "//tensorflow/lite/micro:memory_helpers", + "//tensorflow/lite/micro:micro_common", + "//tensorflow/lite/micro:micro_context", + "//tensorflow/lite/micro:micro_utils", + "//tensorflow/lite/micro/kernels:kernel_util", + ], +) + +cc_library( + name = "window_flexbuffers_generated_data", + srcs = [ + "window_flexbuffers_generated_data.cc", + ], + hdrs = [ + "window_flexbuffers_generated_data.h", + ], +) + +cc_test( + name = "window_test", + srcs = [ + "window_test.cc", + ], + deps = [ + ":register_signal_ops", + ":window_flexbuffers_generated_data", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/kernels:kernel_runner", + "//tensorflow/lite/micro/testing:micro_test", + ], +) diff --git a/signal/micro/kernels/window.cc b/signal/micro/kernels/window.cc new file mode 100644 index 0000000..e850898 --- /dev/null +++ b/signal/micro/kernels/window.cc @@ -0,0 +1,122 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "signal/src/window.h" + +#include + +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/flatbuffer_utils.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/memory_helpers.h" +#include "tensorflow/lite/micro/micro_utils.h" + +namespace tflite { +namespace { + +constexpr int kInputTensor = 0; +constexpr int kWeightsTensor = 1; +constexpr int kOutputTensor = 0; + +// Indices into the init flexbuffer's vector. +// The parameter's name is in the comment that follows. +// Elements in the vectors are ordered alphabetically by parameter name. +constexpr int kShiftIndex = 0; // 'shift' + +struct TFLMSignalWindowParams { + int32_t shift; + int32_t input_size; +}; + +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + const uint8_t* buffer_t = reinterpret_cast(buffer); + + auto* params = + static_cast(context->AllocatePersistentBuffer( + context, sizeof(TFLMSignalWindowParams))); + + tflite::FlexbufferWrapper fbw(buffer_t, length); + params->shift = fbw.ElementAsInt32(kShiftIndex); + return params; +} + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + TF_LITE_ENSURE_EQ(context, NumInputs(node), 2); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* weights = + micro_context->AllocateTempInputTensor(node, kWeightsTensor); + TF_LITE_ENSURE(context, weights != nullptr); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + + TF_LITE_ENSURE(context, NumDimensions(input) >= 1); + TF_LITE_ENSURE_EQ(context, NumDimensions(weights), 1); + TF_LITE_ENSURE_EQ(context, NumDimensions(input), NumDimensions(output)); + + TF_LITE_ENSURE_TYPES_EQ(context, input->type, kTfLiteInt16); + TF_LITE_ENSURE_TYPES_EQ(context, weights->type, kTfLiteInt16); + TF_LITE_ENSURE_TYPES_EQ(context, output->type, kTfLiteInt16); + + auto* params = reinterpret_cast(node->user_data); + RuntimeShape input_shape = GetTensorShape(input); + params->input_size = input_shape.FlatSize(); + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(weights); + micro_context->DeallocateTempTfLiteTensor(output); + return kTfLiteOk; +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + auto* params = reinterpret_cast(node->user_data); + + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + const TfLiteEvalTensor* weights = + tflite::micro::GetEvalInput(context, node, kWeightsTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + + const int16_t* input_data = tflite::micro::GetTensorData(input); + const int16_t* weight_data = tflite::micro::GetTensorData(weights); + int16_t* output_data = tflite::micro::GetTensorData(output); + int weight_size = weights->dims->data[0]; + + for (int i = 0; i < params->input_size; i += weight_size) { + ::tflm_signal::ApplyWindow(&input_data[i], weight_data, weight_size, + params->shift, &output_data[i]); + } + return kTfLiteOk; +} +} // namespace + +// TODO(b/286250473): remove namespace once de-duped libraries +namespace tflm_signal { + +TFLMRegistration* Register_WINDOW() { + static TFLMRegistration r = tflite::micro::RegisterOp(Init, Prepare, Eval); + return &r; +} + +} // namespace tflm_signal +} // namespace tflite diff --git a/signal/micro/kernels/window_flexbuffers_generated_data.cc b/signal/micro/kernels/window_flexbuffers_generated_data.cc new file mode 100644 index 0000000..ab2bbc5 --- /dev/null +++ b/signal/micro/kernels/window_flexbuffers_generated_data.cc @@ -0,0 +1,29 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +// This file is generated. See: +// tensorflow/lite/micro/kernels/test_data_generation/README.md + +#include "signal/micro/kernels/window_flexbuffers_generated_data.h" + +const int g_gen_data_size_window_shift_12 = 16; +const unsigned char g_gen_data_window_shift_12[] = { + 0x73, 0x68, 0x69, 0x66, 0x74, 0x00, 0x01, 0x07, + 0x01, 0x01, 0x01, 0x0c, 0x04, 0x02, 0x24, 0x01, +}; +const int g_gen_data_size_window_shift_8 = 16; +const unsigned char g_gen_data_window_shift_8[] = { + 0x73, 0x68, 0x69, 0x66, 0x74, 0x00, 0x01, 0x07, + 0x01, 0x01, 0x01, 0x08, 0x04, 0x02, 0x24, 0x01, +}; diff --git a/signal/micro/kernels/window_flexbuffers_generated_data.h b/signal/micro/kernels/window_flexbuffers_generated_data.h new file mode 100644 index 0000000..cd26fc0 --- /dev/null +++ b/signal/micro/kernels/window_flexbuffers_generated_data.h @@ -0,0 +1,25 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef SIGNAL_MICRO_KERNELS_TEST_DATA_GENERATION_GENERATE_WINDOW_FLEXBUFFERS_DATA_H_ +#define SIGNAL_MICRO_KERNELS_TEST_DATA_GENERATION_GENERATE_WINDOW_FLEXBUFFERS_DATA_H_ + +extern const int g_gen_data_size_window_shift_12; +extern const unsigned char g_gen_data_window_shift_12[]; + +extern const int g_gen_data_size_window_shift_8; +extern const unsigned char g_gen_data_window_shift_8[]; + +#endif // SIGNAL_MICRO_KERNELS_TEST_DATA_GENERATION_GENERATE_WINDOW_FLEXBUFFERS_DATA_H_ diff --git a/signal/micro/kernels/window_test.cc b/signal/micro/kernels/window_test.cc new file mode 100644 index 0000000..c78f471 --- /dev/null +++ b/signal/micro/kernels/window_test.cc @@ -0,0 +1,162 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include "signal/micro/kernels/window_flexbuffers_generated_data.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { + +namespace { + +TfLiteStatus TestWindow(int* input1_dims_data, const int16_t* input1_data, + int* input2_dims_data, const int16_t* input2_data, + int* output_dims_data, const int16_t* golden, + const unsigned char* flexbuffers_data, + const unsigned int flexbuffers_data_size, + int16_t* output_data) { + TfLiteIntArray* input1_dims = IntArrayFromInts(input1_dims_data); + TfLiteIntArray* input2_dims = IntArrayFromInts(input2_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + + constexpr int kInputsSize = 1; + constexpr int kOutputsSize = 2; + constexpr int kTensorsSize = kInputsSize + kOutputsSize; + TfLiteTensor tensors[kTensorsSize] = { + CreateTensor(input1_data, input1_dims), + CreateTensor(input2_data, input2_dims), + CreateTensor(output_data, output_dims), + }; + + int inputs_array_data[] = {2, 0, 1}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 2}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const int output_len = ElementCount(*output_dims); + + TFLMRegistration* registration = tflite::tflm_signal::Register_WINDOW(); + micro::KernelRunner runner(*registration, tensors, kTensorsSize, inputs_array, + outputs_array, + /*builtin_data=*/nullptr); + + // TfLite uses a char* for the raw bytes whereas flexbuffers use an unsigned + // char*. This small discrepancy results in compiler warnings unless we + // reinterpret_cast right before passing in the flexbuffer bytes to the + // KernelRunner. + TF_LITE_ENSURE_STATUS(runner.InitAndPrepare( + reinterpret_cast(flexbuffers_data), flexbuffers_data_size)); + + TF_LITE_ENSURE_STATUS(runner.Invoke()); + + for (int i = 0; i < output_len; ++i) { + TF_LITE_MICRO_EXPECT_EQ(golden[i], output_data[i]); + } + + return kTfLiteOk; +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(WindowTestLength16Shift12) { + int input1_shape[] = {1, 16}; + int input2_shape[] = {1, 16}; + int output_shape[] = {1, 16}; + const int16_t input1[] = {0x000, 0x100, 0x200, 0x300, 0x400, 0x500, + 0x600, 0x700, 0x800, 0x900, 0xA00, 0xB00, + 0xC00, 0xD00, 0xE00, 0xF00}; + const int16_t input2[] = {0xF00, 0xE00, 0xD00, 0xC00, 0xB00, 0xA00, + 0x900, 0x800, 0x700, 0x600, 0x500, 0x400, + 0x300, 0x200, 0x100, 0x000}; + const int16_t golden[] = {0, 224, 416, 576, 704, 800, 864, 896, + 896, 864, 800, 704, 576, 416, 224, 0}; + + int16_t output[16]; + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + tflite::testing::TestWindow( + input1_shape, input1, input2_shape, input2, output_shape, golden, + g_gen_data_window_shift_12, g_gen_data_size_window_shift_12, output)); +} + +TF_LITE_MICRO_TEST(WindowTestLength32Shift8) { + int input1_shape[] = {1, 32}; + int input2_shape[] = {1, 32}; + int output_shape[] = {1, 32}; + const int16_t input1[] = {1221, 1920, 9531, 2795, 1826, 371, 8446, 850, + 3129, 8218, 4199, 8358, 205, 5268, 3263, 2849, + 8398, 1381, 6305, 668, 8867, 4651, 9121, 6141, + 1961, 3750, 8418, 8085, 3308, 1788, 1608, 4761}; + const int16_t input2[] = {1323, 764, 9100, 4220, 1745, 9311, 178, 9442, + 5676, 1817, 5433, 5837, 7635, 4539, 6548, 9690, + 6097, 4275, 1523, 3694, 7506, 2797, 5153, 172, + 2172, 4540, 6643, 7845, 1719, 7564, 1700, 5227}; + const int16_t golden[] = {6310, 5730, 32767, 32767, 12446, 13493, 5872, + 31350, 32767, 32767, 32767, 32767, 6113, 32767, + 32767, 32767, 32767, 23061, 32767, 9639, 32767, + 32767, 32767, 4125, 16637, 32767, 32767, 32767, + 22212, 32767, 10678, 32767}; + + int16_t output[32]; + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + tflite::testing::TestWindow( + input1_shape, input1, input2_shape, input2, output_shape, golden, + g_gen_data_window_shift_8, g_gen_data_size_window_shift_8, output)); +} + +TF_LITE_MICRO_TEST(WindowTestLength16Shift12OuterDims4) { + const int kOuterDim = 2; + int input1_shape[] = {3, kOuterDim, kOuterDim, 16}; + int input2_shape[] = {1, 16}; + int output_shape[] = {3, kOuterDim, kOuterDim, 16}; + const int16_t input1[] = { + 0x000, 0x100, 0x200, 0x300, 0x400, 0x500, 0x600, 0x700, 0x800, 0x900, + 0xA00, 0xB00, 0xC00, 0xD00, 0xE00, 0xF00, 0x000, 0x100, 0x200, 0x300, + 0x400, 0x500, 0x600, 0x700, 0x800, 0x900, 0xA00, 0xB00, 0xC00, 0xD00, + 0xE00, 0xF00, 0x000, 0x100, 0x200, 0x300, 0x400, 0x500, 0x600, 0x700, + 0x800, 0x900, 0xA00, 0xB00, 0xC00, 0xD00, 0xE00, 0xF00, 0x000, 0x100, + 0x200, 0x300, 0x400, 0x500, 0x600, 0x700, 0x800, 0x900, 0xA00, 0xB00, + 0xC00, 0xD00, 0xE00, 0xF00}; + const int16_t input2[] = {0xF00, 0xE00, 0xD00, 0xC00, 0xB00, 0xA00, + 0x900, 0x800, 0x700, 0x600, 0x500, 0x400, + 0x300, 0x200, 0x100, 0x000}; + const int16_t golden[] = { + 0, 224, 416, 576, 704, 800, 864, 896, 896, 864, 800, 704, 576, + 416, 224, 0, 0, 224, 416, 576, 704, 800, 864, 896, 896, 864, + 800, 704, 576, 416, 224, 0, 0, 224, 416, 576, 704, 800, 864, + 896, 896, 864, 800, 704, 576, 416, 224, 0, 0, 224, 416, 576, + 704, 800, 864, 896, 896, 864, 800, 704, 576, 416, 224, 0}; + + int16_t output[kOuterDim * kOuterDim * 16]; + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + tflite::testing::TestWindow( + input1_shape, input1, input2_shape, input2, output_shape, golden, + g_gen_data_window_shift_12, g_gen_data_size_window_shift_12, output)); +} + +TF_LITE_MICRO_TESTS_END diff --git a/signal/src/BUILD b/signal/src/BUILD new file mode 100644 index 0000000..b41a953 --- /dev/null +++ b/signal/src/BUILD @@ -0,0 +1,10 @@ +package( + default_visibility = ["//signal:__subpackages__"], + licenses = ["notice"], +) + +cc_library( + name = "window", + srcs = ["window.cc"], + hdrs = ["window.h"], +) diff --git a/signal/src/window.cc b/signal/src/window.cc new file mode 100644 index 0000000..c61282b --- /dev/null +++ b/signal/src/window.cc @@ -0,0 +1,36 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "signal/src/window.h" + +#include + +// TODO(b/286250473): remove namespace once de-duped libraries +namespace tflm_signal { + +void ApplyWindow(const int16_t* input, const int16_t* window, int size, + int shift, int16_t* output) { + for (int i = 0; i < size; ++i) { + int32_t raw = (static_cast(input[i]) * window[i]) >> shift; + if (raw < INT16_MIN) { + output[i] = INT16_MIN; + } else if (raw > INT16_MAX) { + output[i] = INT16_MAX; + } else { + output[i] = static_cast(raw); + } + } +} +} // namespace tflm_signal diff --git a/signal/src/window.h b/signal/src/window.h new file mode 100644 index 0000000..88e8d11 --- /dev/null +++ b/signal/src/window.h @@ -0,0 +1,31 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef SIGNAL_SRC_WINDOW_H_ +#define SIGNAL_SRC_WINDOW_H_ + +#include + +namespace tflm_signal { + +// Applies a window function to an input signal +// +// * `input` and `window` must be both of size `size` elements and are +// multiplied element-by element. +// * `shift` is a right shift to apply before writing the result to `output`. +void ApplyWindow(const int16_t* input, const int16_t* window, int size, + int shift, int16_t* output); +} // namespace tflm_signal +#endif // SIGNAL_SRC_WINDOW_H_ diff --git a/signal/tensorflow_core/kernels/BUILD b/signal/tensorflow_core/kernels/BUILD new file mode 100644 index 0000000..e6c2e42 --- /dev/null +++ b/signal/tensorflow_core/kernels/BUILD @@ -0,0 +1,15 @@ +load("//python/tflite_micro/signal:tflm_signal.bzl", "tflm_signal_kernel_library") + +package( + default_visibility = ["//python/tflite_micro/signal:__subpackages__"], + licenses = ["notice"], +) + +tflm_signal_kernel_library( + name = "window_kernel", + srcs = ["window_kernel.cc"], + deps = [ + "//signal/src:window", + "@tensorflow_cc_deps//:cc_library", + ], +) diff --git a/signal/tensorflow_core/kernels/window_kernel.cc b/signal/tensorflow_core/kernels/window_kernel.cc new file mode 100644 index 0000000..c0024d2 --- /dev/null +++ b/signal/tensorflow_core/kernels/window_kernel.cc @@ -0,0 +1,57 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "signal/src/window.h" +#include "tensorflow/core/framework/op_kernel.h" + +namespace tensorflow { +namespace signal { + +class WindowOp : public tensorflow::OpKernel { + public: + explicit WindowOp(tensorflow::OpKernelConstruction* context) + : tensorflow::OpKernel(context) { + OP_REQUIRES_OK(context, context->GetAttr("shift", &shift_)); + } + + void Compute(tensorflow::OpKernelContext* context) override { + const tensorflow::Tensor& input_tensor = context->input(0); + const int16_t* input = input_tensor.flat().data(); + const tensorflow::Tensor& weight_tensor = context->input(1); + const int16_t* weights = weight_tensor.flat().data(); + int weight_size = weight_tensor.flat().size(); + int outer_dims = + input_tensor.flat_inner_dims().dimensions().at(0); + + tensorflow::Tensor* output_tensor = nullptr; + OP_REQUIRES_OK(context, context->allocate_output(0, input_tensor.shape(), + &output_tensor)); + int16_t* output = output_tensor->flat().data(); + for (int i = 0; i < outer_dims; i++) { + tflm_signal::ApplyWindow(&input[i * weight_size], weights, weight_size, + shift_, &output[i * weight_size]); + } + } + + private: + int shift_; +}; + +// TODO(b/286250473): change back name to "Window" after name clash resolved +REGISTER_KERNEL_BUILDER(Name("SignalWindow").Device(tensorflow::DEVICE_CPU), + WindowOp); + +} // namespace signal +} // namespace tensorflow \ No newline at end of file diff --git a/signal/tensorflow_core/ops/BUILD b/signal/tensorflow_core/ops/BUILD new file mode 100644 index 0000000..3dbd65f --- /dev/null +++ b/signal/tensorflow_core/ops/BUILD @@ -0,0 +1,14 @@ +load("//python/tflite_micro/signal:tflm_signal.bzl", "tflm_signal_kernel_library") + +package( + default_visibility = ["//python/tflite_micro/signal:__subpackages__"], + licenses = ["notice"], +) + +tflm_signal_kernel_library( + name = "window_op", + srcs = ["window_op.cc"], + deps = [ + "@tensorflow_cc_deps//:cc_library", + ], +) diff --git a/signal/tensorflow_core/ops/window_op.cc b/signal/tensorflow_core/ops/window_op.cc new file mode 100644 index 0000000..24e51b2 --- /dev/null +++ b/signal/tensorflow_core/ops/window_op.cc @@ -0,0 +1,58 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/core/framework/op.h" +#include "tensorflow/core/framework/shape_inference.h" + +using tensorflow::shape_inference::InferenceContext; +using tensorflow::shape_inference::ShapeHandle; + +namespace tensorflow { +namespace signal { + +Status WindowShape(InferenceContext* c) { + ShapeHandle out; + TF_RETURN_IF_ERROR(c->WithRank(c->input(1), 1, &out)); + TF_RETURN_IF_ERROR(c->WithRankAtLeast(c->input(0), 1, &out)); + + shape_inference::DimensionHandle dim_in; + dim_in = c->Dim(c->input(0), -1); + + TF_RETURN_IF_ERROR(c->WithValue(c->Dim(c->input(1), 0), + InferenceContext::Value(dim_in), &dim_in)); + c->set_output(0, out); + return OkStatus(); +} + +// TODO(b/286250473): change back name to "Window" after name clash resolved +REGISTER_OP("SignalWindow") + .Attr("shift: int") + .Input("input: int16") + .Input("weights: int16") + .Output("output: int16") + .SetShapeFn([](InferenceContext* c) { return WindowShape(c); }) + .Doc(R"doc( +Apply a window to an input signal with a right shift to each element + +input: An N-D time domain input signal +weights: Constant 1-D window weights. Size must match innermost input dimension. +output: An N-D time domain output signal. Size must match input. + +shift: An amount of right shifts to perform on each element before writing +to the output +)doc"); + +} // namespace signal +} // namespace tensorflow \ No newline at end of file diff --git a/tensorflow/BUILD b/tensorflow/BUILD new file mode 100644 index 0000000..5b01f6e --- /dev/null +++ b/tensorflow/BUILD @@ -0,0 +1 @@ +licenses(["notice"]) diff --git a/tensorflow/extra_rules.bzl b/tensorflow/extra_rules.bzl new file mode 100644 index 0000000..9e20eda --- /dev/null +++ b/tensorflow/extra_rules.bzl @@ -0,0 +1,30 @@ +def tflm_kernel_friends(): + return [] + +def tflm_audio_frontend_friends(): + return [] + +def tflm_application_friends(): + return [] + +def tflm_signal_friends(): + return [] + +def tflm_python_op_resolver_friends(): + return [] + +def xtensa_fusion_f1_config(): + """Config setting for all Fusion F1 based cores.""" + return "//tensorflow/lite/micro/kernels:xtensa_fusion_f1_default" + +def xtensa_hifi_3z_config(): + """Config setting for all HiFi 3z based cores.""" + return "//tensorflow/lite/micro/kernels:xtensa_hifi_3z_default" + +def xtensa_hifi_5_config(): + """Config setting for all HiFi 5 based cores.""" + return "//tensorflow/lite/micro/kernels:xtensa_hifi_5_default" + +def xtensa_vision_p6_config(): + """Config setting for all Vision P6 based cores.""" + return "//tensorflow/lite/micro/kernels:xtensa_vision_p6_default" diff --git a/tensorflow/lite/BUILD b/tensorflow/lite/BUILD new file mode 100644 index 0000000..57cce63 --- /dev/null +++ b/tensorflow/lite/BUILD @@ -0,0 +1,33 @@ +package( + default_visibility = ["//visibility:public"], + licenses = ["notice"], +) + +cc_library( + name = "array", + srcs = ["array.cc"], + hdrs = ["array.h"], + deps = [ + "//tensorflow/lite/core/c:common", + ], +) + +cc_library( + name = "type_to_tflitetype", + hdrs = [ + "portable_type_to_tflitetype.h", + ], + deps = ["//tensorflow/lite/c:common"], +) + +cc_library( + name = "kernel_api", + hdrs = [ + "builtin_op_data.h", + "builtin_ops.h", + "context_util.h", + ], + deps = [ + "//tensorflow/lite/c:common", + ], +) diff --git a/tensorflow/lite/array.cc b/tensorflow/lite/array.cc new file mode 100644 index 0000000..1b1ff2e --- /dev/null +++ b/tensorflow/lite/array.cc @@ -0,0 +1,33 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/array.h" + +namespace tflite { +namespace array_internal { + +void TfLiteArrayDeleter::operator()(TfLiteIntArray* a) { + if (a) { + TfLiteIntArrayFree(a); + } +} +void TfLiteArrayDeleter::operator()(TfLiteFloatArray* a) { + if (a) { + TfLiteFloatArrayFree(a); + } +} + +} // namespace array_internal +} // namespace tflite diff --git a/tensorflow/lite/array.h b/tensorflow/lite/array.h new file mode 100644 index 0000000..5a60784 --- /dev/null +++ b/tensorflow/lite/array.h @@ -0,0 +1,123 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_ARRAY_H_ +#define TENSORFLOW_LITE_ARRAY_H_ + +#include +#include +#include +#include +#include + +#include "tensorflow/lite/core/c/common.h" + +namespace tflite { + +/// TfLite*Array helpers + +namespace array_internal { + +// Function object used as a deleter for unique_ptr holding TFLite*Array +// objects. +struct TfLiteArrayDeleter { + void operator()(TfLiteIntArray* a); + void operator()(TfLiteFloatArray* a); +}; + +// Maps T to the corresponding TfLiteArray type. +template +struct TfLiteArrayInfo; + +template <> +struct TfLiteArrayInfo { + using Type = TfLiteIntArray; +}; + +template <> +struct TfLiteArrayInfo { + using Type = TfLiteFloatArray; +}; + +} // namespace array_internal + +template +using TfLiteArrayUniquePtr = + std::unique_ptr::Type, + array_internal::TfLiteArrayDeleter>; + +// `unique_ptr` wrapper for `TfLiteIntArray`s. +using IntArrayUniquePtr = TfLiteArrayUniquePtr; + +// `unique_ptr` wrapper for `TfLiteFloatArray`s. +using FloatArrayUniquePtr = TfLiteArrayUniquePtr; + +// Allocates a TfLiteArray of given size using malloc. +// +// This builds an int array by default as this is the overwhelming part of the +// use cases. +template +TfLiteArrayUniquePtr BuildTfLiteArray(int size); + +// Allocates a TfLiteIntArray of given size using malloc. +template <> +inline IntArrayUniquePtr BuildTfLiteArray(const int size) { + return IntArrayUniquePtr(TfLiteIntArrayCreate(size)); +} + +// Allocates a TfLiteFloatArray of given size using malloc. +template <> +inline FloatArrayUniquePtr BuildTfLiteArray(const int size) { + return FloatArrayUniquePtr(TfLiteFloatArrayCreate(size)); +} + +// Allocates a TFLiteArray of given size and initializes it. +// +// `values` is expected to holds `size` elements. +template +TfLiteArrayUniquePtr BuildTfLiteArray(const int size, + const T* const values) { + auto array = BuildTfLiteArray(size); + if (array) { + memcpy(array->data, values, size * sizeof(T)); + } + return array; +} + +// Allocates a TFLiteArray and initializes it with the given values. +template +TfLiteArrayUniquePtr BuildTfLiteArray(const std::vector& values) { + return BuildTfLiteArray(static_cast(values.size()), values.data()); +} + +// Allocates a TFLiteArray and initializes it with the given values. +template +TfLiteArrayUniquePtr BuildTfLiteArray( + const std::initializer_list& values) { + return BuildTfLiteArray(static_cast(values.size()), values.begin()); +} + +// Allocates a TFLiteArray and initializes it with the given array. +inline IntArrayUniquePtr BuildTfLiteArray(const TfLiteIntArray& other) { + return BuildTfLiteArray(other.size, other.data); +} + +// Allocates a TFLiteArray and initializes it with the given array. +inline FloatArrayUniquePtr BuildTfLiteArray(const TfLiteFloatArray& other) { + return BuildTfLiteArray(other.size, other.data); +} + +} // namespace tflite + +#endif // TENSORFLOW_LITE_ARRAY_H_ diff --git a/tensorflow/lite/build_def.bzl b/tensorflow/lite/build_def.bzl new file mode 100644 index 0000000..e8fc49c --- /dev/null +++ b/tensorflow/lite/build_def.bzl @@ -0,0 +1,8 @@ +def tflite_copts(): + """Defines common compile time flags for TFLite libraries.""" + copts = [ + "-DFARMHASH_NO_CXX_STRING", + "-Wno-sign-compare", + "-fno-exceptions", # Exceptions are unused in TFLite. + ] + return copts diff --git a/tensorflow/lite/builtin_op_data.h b/tensorflow/lite/builtin_op_data.h new file mode 100644 index 0000000..161801c --- /dev/null +++ b/tensorflow/lite/builtin_op_data.h @@ -0,0 +1,22 @@ +/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +// Compatibility shim for new location of interface definitions. + +#ifndef TENSORFLOW_LITE_BUILTIN_OP_DATA_H_ +#define TENSORFLOW_LITE_BUILTIN_OP_DATA_H_ + +#include "tensorflow/lite/core/c/builtin_op_data.h" + +#endif // TENSORFLOW_LITE_BUILTIN_OP_DATA_H_ diff --git a/tensorflow/lite/builtin_ops.h b/tensorflow/lite/builtin_ops.h new file mode 100644 index 0000000..f9871ad --- /dev/null +++ b/tensorflow/lite/builtin_ops.h @@ -0,0 +1,197 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_BUILTIN_OPS_H_ +#define TENSORFLOW_LITE_BUILTIN_OPS_H_ + +// DO NOT EDIT MANUALLY: This file is automatically generated by +// `schema/builtin_ops_header/generator.cc`. + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +// The enum for builtin operators. +// Note: CUSTOM, DELEGATE, and PLACEHOLDER_FOR_GREATER_OP_CODES are 3 special +// ops which are not real built-in ops. +typedef enum { + kTfLiteBuiltinAdd = 0, + kTfLiteBuiltinAveragePool2d = 1, + kTfLiteBuiltinConcatenation = 2, + kTfLiteBuiltinConv2d = 3, + kTfLiteBuiltinDepthwiseConv2d = 4, + kTfLiteBuiltinDepthToSpace = 5, + kTfLiteBuiltinDequantize = 6, + kTfLiteBuiltinEmbeddingLookup = 7, + kTfLiteBuiltinFloor = 8, + kTfLiteBuiltinFullyConnected = 9, + kTfLiteBuiltinHashtableLookup = 10, + kTfLiteBuiltinL2Normalization = 11, + kTfLiteBuiltinL2Pool2d = 12, + kTfLiteBuiltinLocalResponseNormalization = 13, + kTfLiteBuiltinLogistic = 14, + kTfLiteBuiltinLshProjection = 15, + kTfLiteBuiltinLstm = 16, + kTfLiteBuiltinMaxPool2d = 17, + kTfLiteBuiltinMul = 18, + kTfLiteBuiltinRelu = 19, + kTfLiteBuiltinReluN1To1 = 20, + kTfLiteBuiltinRelu6 = 21, + kTfLiteBuiltinReshape = 22, + kTfLiteBuiltinResizeBilinear = 23, + kTfLiteBuiltinRnn = 24, + kTfLiteBuiltinSoftmax = 25, + kTfLiteBuiltinSpaceToDepth = 26, + kTfLiteBuiltinSvdf = 27, + kTfLiteBuiltinTanh = 28, + kTfLiteBuiltinConcatEmbeddings = 29, + kTfLiteBuiltinSkipGram = 30, + kTfLiteBuiltinCall = 31, + kTfLiteBuiltinCustom = 32, + kTfLiteBuiltinEmbeddingLookupSparse = 33, + kTfLiteBuiltinPad = 34, + kTfLiteBuiltinUnidirectionalSequenceRnn = 35, + kTfLiteBuiltinGather = 36, + kTfLiteBuiltinBatchToSpaceNd = 37, + kTfLiteBuiltinSpaceToBatchNd = 38, + kTfLiteBuiltinTranspose = 39, + kTfLiteBuiltinMean = 40, + kTfLiteBuiltinSub = 41, + kTfLiteBuiltinDiv = 42, + kTfLiteBuiltinSqueeze = 43, + kTfLiteBuiltinUnidirectionalSequenceLstm = 44, + kTfLiteBuiltinStridedSlice = 45, + kTfLiteBuiltinBidirectionalSequenceRnn = 46, + kTfLiteBuiltinExp = 47, + kTfLiteBuiltinTopkV2 = 48, + kTfLiteBuiltinSplit = 49, + kTfLiteBuiltinLogSoftmax = 50, + kTfLiteBuiltinDelegate = 51, + kTfLiteBuiltinBidirectionalSequenceLstm = 52, + kTfLiteBuiltinCast = 53, + kTfLiteBuiltinPrelu = 54, + kTfLiteBuiltinMaximum = 55, + kTfLiteBuiltinArgMax = 56, + kTfLiteBuiltinMinimum = 57, + kTfLiteBuiltinLess = 58, + kTfLiteBuiltinNeg = 59, + kTfLiteBuiltinPadv2 = 60, + kTfLiteBuiltinGreater = 61, + kTfLiteBuiltinGreaterEqual = 62, + kTfLiteBuiltinLessEqual = 63, + kTfLiteBuiltinSelect = 64, + kTfLiteBuiltinSlice = 65, + kTfLiteBuiltinSin = 66, + kTfLiteBuiltinTransposeConv = 67, + kTfLiteBuiltinSparseToDense = 68, + kTfLiteBuiltinTile = 69, + kTfLiteBuiltinExpandDims = 70, + kTfLiteBuiltinEqual = 71, + kTfLiteBuiltinNotEqual = 72, + kTfLiteBuiltinLog = 73, + kTfLiteBuiltinSum = 74, + kTfLiteBuiltinSqrt = 75, + kTfLiteBuiltinRsqrt = 76, + kTfLiteBuiltinShape = 77, + kTfLiteBuiltinPow = 78, + kTfLiteBuiltinArgMin = 79, + kTfLiteBuiltinFakeQuant = 80, + kTfLiteBuiltinReduceProd = 81, + kTfLiteBuiltinReduceMax = 82, + kTfLiteBuiltinPack = 83, + kTfLiteBuiltinLogicalOr = 84, + kTfLiteBuiltinOneHot = 85, + kTfLiteBuiltinLogicalAnd = 86, + kTfLiteBuiltinLogicalNot = 87, + kTfLiteBuiltinUnpack = 88, + kTfLiteBuiltinReduceMin = 89, + kTfLiteBuiltinFloorDiv = 90, + kTfLiteBuiltinReduceAny = 91, + kTfLiteBuiltinSquare = 92, + kTfLiteBuiltinZerosLike = 93, + kTfLiteBuiltinFill = 94, + kTfLiteBuiltinFloorMod = 95, + kTfLiteBuiltinRange = 96, + kTfLiteBuiltinResizeNearestNeighbor = 97, + kTfLiteBuiltinLeakyRelu = 98, + kTfLiteBuiltinSquaredDifference = 99, + kTfLiteBuiltinMirrorPad = 100, + kTfLiteBuiltinAbs = 101, + kTfLiteBuiltinSplitV = 102, + kTfLiteBuiltinUnique = 103, + kTfLiteBuiltinCeil = 104, + kTfLiteBuiltinReverseV2 = 105, + kTfLiteBuiltinAddN = 106, + kTfLiteBuiltinGatherNd = 107, + kTfLiteBuiltinCos = 108, + kTfLiteBuiltinWhere = 109, + kTfLiteBuiltinRank = 110, + kTfLiteBuiltinElu = 111, + kTfLiteBuiltinReverseSequence = 112, + kTfLiteBuiltinMatrixDiag = 113, + kTfLiteBuiltinQuantize = 114, + kTfLiteBuiltinMatrixSetDiag = 115, + kTfLiteBuiltinRound = 116, + kTfLiteBuiltinHardSwish = 117, + kTfLiteBuiltinIf = 118, + kTfLiteBuiltinWhile = 119, + kTfLiteBuiltinNonMaxSuppressionV4 = 120, + kTfLiteBuiltinNonMaxSuppressionV5 = 121, + kTfLiteBuiltinScatterNd = 122, + kTfLiteBuiltinSelectV2 = 123, + kTfLiteBuiltinDensify = 124, + kTfLiteBuiltinSegmentSum = 125, + kTfLiteBuiltinBatchMatmul = 126, + kTfLiteBuiltinPlaceholderForGreaterOpCodes = 127, + kTfLiteBuiltinCumsum = 128, + kTfLiteBuiltinCallOnce = 129, + kTfLiteBuiltinBroadcastTo = 130, + kTfLiteBuiltinRfft2d = 131, + kTfLiteBuiltinConv3d = 132, + kTfLiteBuiltinImag = 133, + kTfLiteBuiltinReal = 134, + kTfLiteBuiltinComplexAbs = 135, + kTfLiteBuiltinHashtable = 136, + kTfLiteBuiltinHashtableFind = 137, + kTfLiteBuiltinHashtableImport = 138, + kTfLiteBuiltinHashtableSize = 139, + kTfLiteBuiltinReduceAll = 140, + kTfLiteBuiltinConv3dTranspose = 141, + kTfLiteBuiltinVarHandle = 142, + kTfLiteBuiltinReadVariable = 143, + kTfLiteBuiltinAssignVariable = 144, + kTfLiteBuiltinBroadcastArgs = 145, + kTfLiteBuiltinRandomStandardNormal = 146, + kTfLiteBuiltinBucketize = 147, + kTfLiteBuiltinRandomUniform = 148, + kTfLiteBuiltinMultinomial = 149, + kTfLiteBuiltinGelu = 150, + kTfLiteBuiltinDynamicUpdateSlice = 151, + kTfLiteBuiltinRelu0To1 = 152, + kTfLiteBuiltinUnsortedSegmentProd = 153, + kTfLiteBuiltinUnsortedSegmentMax = 154, + kTfLiteBuiltinUnsortedSegmentSum = 155, + kTfLiteBuiltinAtan2 = 156, + kTfLiteBuiltinUnsortedSegmentMin = 157, + kTfLiteBuiltinSign = 158, + kTfLiteBuiltinBitcast = 159, + kTfLiteBuiltinBitwiseXor = 160, + kTfLiteBuiltinRightShift = 161, +} TfLiteBuiltinOperator; + +#ifdef __cplusplus +} // extern "C" +#endif // __cplusplus +#endif // TENSORFLOW_LITE_BUILTIN_OPS_H_ diff --git a/tensorflow/lite/c/BUILD b/tensorflow/lite/c/BUILD new file mode 100644 index 0000000..d4d02fd --- /dev/null +++ b/tensorflow/lite/c/BUILD @@ -0,0 +1,32 @@ +load( + "//tensorflow/lite:build_def.bzl", + "tflite_copts", +) + +package( + default_visibility = ["//visibility:public"], + licenses = ["notice"], +) + +cc_library( + name = "common", + hdrs = [ + "builtin_op_data.h", + "common.h", + ], + copts = tflite_copts(), + deps = [ + ":c_api_types", + "//tensorflow/lite/core/c:c_api_types", + "//tensorflow/lite/core/c:common", + ], +) + +cc_library( + name = "c_api_types", + hdrs = ["c_api_types.h"], + copts = tflite_copts(), + deps = [ + "//tensorflow/lite/core/c:c_api_types", + ], +) diff --git a/tensorflow/lite/c/builtin_op_data.h b/tensorflow/lite/c/builtin_op_data.h new file mode 100644 index 0000000..7628e5a --- /dev/null +++ b/tensorflow/lite/c/builtin_op_data.h @@ -0,0 +1,20 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_C_BUILTIN_OP_DATA_H_ +#define TENSORFLOW_LITE_C_BUILTIN_OP_DATA_H_ + +#include "tensorflow/lite/core/c/builtin_op_data.h" + +#endif // TENSORFLOW_LITE_C_BUILTIN_OP_DATA_H_ diff --git a/tensorflow/lite/c/c_api_types.h b/tensorflow/lite/c/c_api_types.h new file mode 100644 index 0000000..cdbf1fd --- /dev/null +++ b/tensorflow/lite/c/c_api_types.h @@ -0,0 +1,20 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_C_C_API_TYPES_H_ +#define TENSORFLOW_LITE_C_C_API_TYPES_H_ + +#include "tensorflow/lite/core/c/c_api_types.h" + +#endif // TENSORFLOW_LITE_C_C_API_TYPES_H_ diff --git a/tensorflow/lite/c/common.h b/tensorflow/lite/c/common.h new file mode 100644 index 0000000..e3e8001 --- /dev/null +++ b/tensorflow/lite/c/common.h @@ -0,0 +1,41 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +// This file defines common C types and APIs for implementing operations, +// delegates and other constructs in TensorFlow Lite. The actual operations and +// delegates can be defined using C++, but the interface between the interpreter +// and the operations are C. +// +// Summary of abstractions +// TF_LITE_ENSURE - Self-sufficient error checking +// TfLiteStatus - Status reporting +// TfLiteIntArray - stores tensor shapes (dims), +// TfLiteContext - allows an op to access the tensors +// TfLiteTensor - tensor (a multidimensional array) +// TfLiteNode - a single node or operation +// TfLiteRegistration - the implementation of a conceptual operation. +// TfLiteDelegate - allows delegation of nodes to alternative backends. +// +// Some abstractions in this file are created and managed by Interpreter. +// +// NOTE: The order of values in these structs are "semi-ABI stable". New values +// should be added only to the end of structs and never reordered. + +#ifndef TENSORFLOW_LITE_C_COMMON_H_ +#define TENSORFLOW_LITE_C_COMMON_H_ + +#include "tensorflow/lite/core/c/common.h" + +#endif // TENSORFLOW_LITE_C_COMMON_H_ diff --git a/tensorflow/lite/context_util.h b/tensorflow/lite/context_util.h new file mode 100644 index 0000000..cbbe9f1 --- /dev/null +++ b/tensorflow/lite/context_util.h @@ -0,0 +1,54 @@ +/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +/// \file +/// +/// This provides a few C++ helpers that are useful for manipulating C +/// structures in C++. +#ifndef TENSORFLOW_LITE_CONTEXT_UTIL_H_ +#define TENSORFLOW_LITE_CONTEXT_UTIL_H_ + +#include + +#include "tensorflow/lite/core/c/common.h" + +namespace tflite { + +/// Provides a range iterable wrapper for TfLiteIntArray* (C lists) that TfLite +/// C api uses. +// Can't use the google array_view, since we can't depend on even +// absl for embedded device reasons. +class TfLiteIntArrayView { + public: + /// Construct a view of a TfLiteIntArray*. Note, `int_array` should be + /// non-null and this view does not take ownership of it. + explicit TfLiteIntArrayView(const TfLiteIntArray* int_array) + : int_array_(int_array) {} + + TfLiteIntArrayView(const TfLiteIntArrayView&) = default; + TfLiteIntArrayView& operator=(const TfLiteIntArrayView& rhs) = default; + + typedef const int* const_iterator; + const_iterator begin() const { return int_array_->data; } + const_iterator end() const { return &int_array_->data[int_array_->size]; } + size_t size() const { return end() - begin(); } + int operator[](size_t pos) const { return int_array_->data[pos]; } + + private: + const TfLiteIntArray* int_array_; +}; + +} // namespace tflite + +#endif // TENSORFLOW_LITE_CONTEXT_UTIL_H_ diff --git a/tensorflow/lite/core/BUILD b/tensorflow/lite/core/BUILD new file mode 100644 index 0000000..2d753cc --- /dev/null +++ b/tensorflow/lite/core/BUILD @@ -0,0 +1,7 @@ +cc_library( + name = "macros", + hdrs = ["macros.h"], + visibility = [ + "//tensorflow/lite:__subpackages__", + ], +) diff --git a/tensorflow/lite/core/api/BUILD b/tensorflow/lite/core/api/BUILD new file mode 100644 index 0000000..ce6cd1a --- /dev/null +++ b/tensorflow/lite/core/api/BUILD @@ -0,0 +1,66 @@ +load("//tensorflow/lite:build_def.bzl", "tflite_copts") +load("//tensorflow/lite/micro:build_def.bzl", "micro_copts") + +package( + default_visibility = ["//visibility:private"], + licenses = ["notice"], +) + +cc_library( + name = "api", + srcs = [ + "flatbuffer_conversions.cc", + "tensor_utils.cc", + ], + hdrs = [ + "error_reporter.h", + "flatbuffer_conversions.h", + "op_resolver.h", + "tensor_utils.h", + ], + copts = tflite_copts() + micro_copts(), + visibility = ["//visibility:public"], + deps = [ + ":error_reporter", + ":op_resolver", + "//tensorflow/lite/c:common", + "//tensorflow/lite/kernels/internal:compatibility", + "//tensorflow/lite/schema:schema_fbs", + "//tensorflow/lite/schema:schema_utils", + "@flatbuffers//:runtime_cc", + ], +) + +# We define separate targets for "op_resolver" and "error_reporter", +# even though those headers are also exported by the "api" target, +# so that targets which only want to depend on these small abstract base +# class modules can express more fine-grained dependencies without +# pulling in tensor_utils and flatbuffer_conversions. + +cc_library( + name = "op_resolver", + srcs = ["op_resolver.cc"], + hdrs = ["op_resolver.h"], + copts = tflite_copts() + micro_copts(), + visibility = [ + "//visibility:public", + ], + deps = [ + ":error_reporter", + "//tensorflow/lite/c:common", + "//tensorflow/lite/schema:schema_fbs", + "//tensorflow/lite/schema:schema_utils", + "@flatbuffers//:runtime_cc", + ], +) + +cc_library( + name = "error_reporter", + srcs = ["error_reporter.cc"], + hdrs = ["error_reporter.h"], + copts = tflite_copts() + micro_copts(), + visibility = [ + "//visibility:public", + ], + deps = [], +) diff --git a/tensorflow/lite/core/api/error_reporter.cc b/tensorflow/lite/core/api/error_reporter.cc new file mode 100644 index 0000000..7070eaa --- /dev/null +++ b/tensorflow/lite/core/api/error_reporter.cc @@ -0,0 +1,38 @@ +/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/core/api/error_reporter.h" +#include + +namespace tflite { + +int ErrorReporter::Report(const char* format, ...) { + va_list args; + va_start(args, format); + int code = Report(format, args); + va_end(args); + return code; +} + +// TODO(aselle): Make the name of ReportError on context the same, so +// we can use the ensure functions w/o a context and w/ a reporter. +int ErrorReporter::ReportError(void*, const char* format, ...) { + va_list args; + va_start(args, format); + int code = Report(format, args); + va_end(args); + return code; +} + +} // namespace tflite diff --git a/tensorflow/lite/core/api/error_reporter.h b/tensorflow/lite/core/api/error_reporter.h new file mode 100644 index 0000000..99ab8cf --- /dev/null +++ b/tensorflow/lite/core/api/error_reporter.h @@ -0,0 +1,72 @@ +/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_CORE_API_ERROR_REPORTER_H_ +#define TENSORFLOW_LITE_CORE_API_ERROR_REPORTER_H_ + +#include + +namespace tflite { + +/// A functor that reports error to supporting system. Invoked similar to +/// printf. +/// +/// Usage: +/// ErrorReporter foo; +/// foo.Report("test %d", 5); +/// or +/// va_list args; +/// foo.Report("test %d", args); // where args is va_list +/// +/// Subclass ErrorReporter to provide another reporting destination. +/// For example, if you have a GUI program, you might redirect to a buffer +/// that drives a GUI error log box. +class ErrorReporter { + public: + virtual ~ErrorReporter() = default; + /// Converts `args` to character equivalents according to `format` string, + /// constructs the error string and report it. + /// Returns number of characters written or zero on success, and negative + /// number on error. + virtual int Report(const char* format, va_list args) = 0; + + /// Converts arguments to character equivalents according to `format` string, + /// constructs the error string and report it. + /// Returns number of characters written or zero on success, and negative + /// number on error. + int Report(const char* format, ...); + + /// Equivalent to `Report` above. The additional `void*` parameter is unused. + /// This method is for compatibility with macros that takes `TfLiteContext`, + /// like TF_LITE_ENSURE and related macros. + int ReportError(void*, const char* format, ...); +}; + +} // namespace tflite + +// You should not make bare calls to the error reporter, instead use the +// TF_LITE_REPORT_ERROR macro, since this allows message strings to be +// stripped when the binary size has to be optimized. If you are looking to +// reduce binary size, define TF_LITE_STRIP_ERROR_STRINGS when compiling and +// every call will be stubbed out, taking no memory. +#ifndef TF_LITE_STRIP_ERROR_STRINGS +#define TF_LITE_REPORT_ERROR(reporter, ...) \ + do { \ + static_cast(reporter)->Report(__VA_ARGS__); \ + } while (false) +#else // TF_LITE_STRIP_ERROR_STRINGS +#define TF_LITE_REPORT_ERROR(reporter, ...) +#endif // TF_LITE_STRIP_ERROR_STRINGS + +#endif // TENSORFLOW_LITE_CORE_API_ERROR_REPORTER_H_ diff --git a/tensorflow/lite/core/api/flatbuffer_conversions.cc b/tensorflow/lite/core/api/flatbuffer_conversions.cc new file mode 100644 index 0000000..9f955df --- /dev/null +++ b/tensorflow/lite/core/api/flatbuffer_conversions.cc @@ -0,0 +1,2515 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/core/api/flatbuffer_conversions.h" + +#include +#include +#include + +#include "flatbuffers/flatbuffers.h" // from @flatbuffers +#include "tensorflow/lite/core/api/error_reporter.h" +#include "tensorflow/lite/core/c/builtin_op_data.h" +#include "tensorflow/lite/core/c/common.h" +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/schema/schema_generated.h" + +namespace tflite { + +namespace { + +// Utility class for safely allocating POD data. This is useful for avoiding +// leaks in cases where op params are allocated but fail to propagate to the +// parsed op data (e.g., when model parameters are invalid). +class SafeBuiltinDataAllocator { + public: + class BuiltinDataDeleter { + public: + explicit BuiltinDataDeleter(BuiltinDataAllocator* allocator) + : allocator_(allocator) {} + + void operator()(void* data) { allocator_->Deallocate(data); } + + private: + BuiltinDataAllocator* allocator_; + }; + + template + using BuiltinDataPtr = std::unique_ptr; + + explicit SafeBuiltinDataAllocator(BuiltinDataAllocator* allocator) + : allocator_(allocator) {} + + template + BuiltinDataPtr Allocate() { + return BuiltinDataPtr(allocator_->AllocatePOD(), + BuiltinDataDeleter(allocator_)); + } + + private: + BuiltinDataAllocator* allocator_; +}; + +// All the Parse functions take some pointers as params and this function has +// the common DCHECKs to catch if any of those are nullptr. +void CheckParsePointerParams(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data) { + TFLITE_DCHECK(op != nullptr); + TFLITE_DCHECK(error_reporter != nullptr); + TFLITE_DCHECK(allocator != nullptr); + TFLITE_DCHECK(builtin_data != nullptr); +} + +// Copies the contents from the flatbuffer int vector `flatbuffer` into the +// int array `buffer`. `flat_vector` and `buffer` represent the same +// configuration operation for a given operation. +TfLiteStatus FlatBufferIntVectorToArray( + int max_size_of_buffer, const flatbuffers::Vector* flat_vector, + int* buffer, ErrorReporter* error_reporter, const char* op_name) { + if (!flat_vector) { + TF_LITE_REPORT_ERROR(error_reporter, + "Input array not provided for operation '%s'.\n", + op_name); + return kTfLiteError; + } else { + size_t num_dimensions = flat_vector->size(); + if (num_dimensions > max_size_of_buffer / sizeof(int)) { + TF_LITE_REPORT_ERROR( + error_reporter, + "Found too many dimensions in the input array of operation '%s'.\n", + op_name); + return kTfLiteError; + } else { + for (size_t i = 0; i < num_dimensions; ++i) { + buffer[i] = flat_vector->Get(i); + } + } + } + return kTfLiteOk; +} + +// Converts the flatbuffer activation to what is used at runtime. +TfLiteFusedActivation ConvertActivation(ActivationFunctionType activation) { + switch (activation) { + case ActivationFunctionType_NONE: + return kTfLiteActNone; + case ActivationFunctionType_RELU: + return kTfLiteActRelu; + case ActivationFunctionType_RELU_N1_TO_1: + return kTfLiteActReluN1To1; + case ActivationFunctionType_RELU6: + return kTfLiteActRelu6; + case ActivationFunctionType_TANH: + return kTfLiteActTanh; + case ActivationFunctionType_SIGN_BIT: + return kTfLiteActSignBit; + } + return kTfLiteActNone; +} + +// Converts the flatbuffer padding enum to what is used at runtime. +TfLitePadding ConvertPadding(Padding padding) { + switch (padding) { + case Padding_SAME: + return kTfLitePaddingSame; + case Padding_VALID: + return kTfLitePaddingValid; + } + return kTfLitePaddingUnknown; +} + +// Converts the flatbuffer mirror padding enum to what is used at runtime. +TfLiteMirrorPaddingMode ConvertMirrorPadding(MirrorPadMode padding) { + switch (padding) { + case MirrorPadMode_REFLECT: + return kTfLiteMirrorPaddingReflect; + case MirrorPadMode_SYMMETRIC: + return kTfLiteMirrorPaddingSymmetric; + } + return kTfLiteMirrorPaddingUnknown; +} + +#ifndef TF_LITE_STATIC_MEMORY +TfLiteStatus ParseOpDataTfLite(const Operator* op, BuiltinOperator op_type, + ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data) { + auto parseLSHProjectionType = [](LSHProjectionType type) { + switch (type) { + case LSHProjectionType_SPARSE: + return kTfLiteLshProjectionSparse; + case LSHProjectionType_DENSE: + return kTfLiteLshProjectionDense; + default: + return kTfLiteLshProjectionUnknown; + } + }; + auto parseCombinerType = [](CombinerType type) { + switch (type) { + case CombinerType_MEAN: + return kTfLiteCombinerTypeMean; + case CombinerType_SQRTN: + return kTfLiteCombinerTypeSqrtn; + case CombinerType_SUM: + default: + return kTfLiteCombinerTypeSum; + } + }; + + SafeBuiltinDataAllocator safe_allocator(allocator); + *builtin_data = nullptr; + switch (op_type) { + case BuiltinOperator_ABS: { + return ParseAbs(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_ADD: { + return ParseAdd(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_ADD_N: { + return ParseAddN(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_ARG_MAX: { + return ParseArgMax(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_ARG_MIN: { + return ParseArgMin(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_ASSIGN_VARIABLE: { + return ParseAssignVariable(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_AVERAGE_POOL_2D: { + return ParsePool(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_BATCH_MATMUL: { + return ParseBatchMatMul(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_BATCH_TO_SPACE_ND: { + return ParseBatchToSpaceNd(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_BROADCAST_ARGS: { + return ParseBroadcastArgs(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_BROADCAST_TO: { + return ParseBroadcastTo(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_CALL_ONCE: { + return ParseCallOnce(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_CEIL: { + return ParseCeil(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_CONCATENATION: { + return ParseConcatenation(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_CONV_2D: { + return ParseConv2D(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_CUMSUM: { + return ParseCumsum(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_DEPTH_TO_SPACE: { + return ParseDepthToSpace(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_DEPTHWISE_CONV_2D: { + return ParseDepthwiseConv2D(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_DEQUANTIZE: { + return ParseDequantize(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_DIV: { + return ParseDiv(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_ELU: { + return ParseElu(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_EMBEDDING_LOOKUP: { + return ParseEmbeddingLookup(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_EXP: { + return ParseExp(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_EXPAND_DIMS: { + return ParseExpandDims(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_FILL: { + return ParseFill(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_FLOOR: { + return ParseFloor(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_FLOOR_DIV: { + return ParseFloorDiv(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_FLOOR_MOD: { + return ParseFloorMod(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_FULLY_CONNECTED: { + return ParseFullyConnected(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_GATHER_ND: { + return ParseGatherNd(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_GREATER: { + return ParseGreater(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_GREATER_EQUAL: { + return ParseGreaterEqual(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_HARD_SWISH: { + return ParseHardSwish(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_L2_NORMALIZATION: { + return ParseL2Normalization(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_L2_POOL_2D: { + return ParsePool(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_LEAKY_RELU: { + return ParseLeakyRelu(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_LESS: { + return ParseLess(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_LESS_EQUAL: { + return ParseLessEqual(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_LOG: { + return ParseLog(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_LOGICAL_AND: { + return ParseLogicalAnd(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_LOGICAL_NOT: { + return ParseLogicalNot(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_LOGICAL_OR: { + return ParseLogicalOr(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_LOGISTIC: { + return ParseLogistic(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_LOG_SOFTMAX: { + return ParseLogSoftmax(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_LSTM: { + return ParseLSTM(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_MAXIMUM: { + return ParseMaximum(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_MAX_POOL_2D: { + return ParsePool(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_MIRROR_PAD: { + return ParseMirrorPad(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_MEAN: { + return ParseReducer(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_MINIMUM: { + return ParseMinimum(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_MUL: { + return ParseMul(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_NEG: { + return ParseNeg(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_NOT_EQUAL: { + return ParseNotEqual(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_PACK: { + return ParsePack(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_PAD: { + return ParsePad(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_PADV2: { + return ParsePadV2(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_POW: { + return ParsePow(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_PRELU: { + return ParsePrelu(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_QUANTIZE: { + return ParseQuantize(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_READ_VARIABLE: { + return ParseReadVariable(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_REDUCE_ANY: { + return ParseReducer(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_REDUCE_ALL: { + return ParseReducer(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_REDUCE_MAX: { + return ParseReducer(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_REDUCE_MIN: { + return ParseReducer(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_REDUCE_PROD: { + return ParseReducer(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_RELU: { + return ParseRelu(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_RELU6: { + return ParseRelu6(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_RESHAPE: { + return ParseReshape(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_RESIZE_BILINEAR: { + return ParseResizeBilinear(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_RESIZE_NEAREST_NEIGHBOR: { + return ParseResizeNearestNeighbor(op, error_reporter, allocator, + builtin_data); + } + + case BuiltinOperator_ROUND: { + return ParseRound(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_RSQRT: { + return ParseRsqrt(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_SELECT_V2: { + return ParseSelectV2(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_SHAPE: { + return ParseShape(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_SIN: { + return ParseSin(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_SOFTMAX: { + return ParseSoftmax(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_SPACE_TO_BATCH_ND: { + return ParseSpaceToBatchNd(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_SPACE_TO_DEPTH: { + return ParseSpaceToDepth(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_SPLIT: { + return ParseSplit(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_SPLIT_V: { + return ParseSplitV(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_SQRT: { + return ParseSqrt(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_SQUARE: { + return ParseSquare(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_SQUARED_DIFFERENCE: { + return ParseSquaredDifference(op, error_reporter, allocator, + builtin_data); + } + + case BuiltinOperator_SQUEEZE: { + return ParseSqueeze(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_STRIDED_SLICE: { + return ParseStridedSlice(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_SUB: { + return ParseSub(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_SUM: { + return ParseReducer(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_SVDF: { + return ParseSvdf(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_TANH: { + return ParseTanh(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_TRANSPOSE_CONV: { + return ParseTransposeConv(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_UNPACK: { + return ParseUnpack(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_VAR_HANDLE: { + return ParseVarHandle(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_ZEROS_LIKE: { + return ParseZerosLike(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_BITWISE_XOR: { + return ParseBitwiseXor(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_RIGHT_SHIFT: { + return ParseRightShift(op, error_reporter, allocator, builtin_data); + } + + case BuiltinOperator_CAST: { + return ParseCast(op, error_reporter, allocator, builtin_data); + } + case BuiltinOperator_LSH_PROJECTION: { + auto params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + if (const auto* lshParams = + op->builtin_options_as_LSHProjectionOptions()) { + params->type = parseLSHProjectionType(lshParams->type()); + } + *builtin_data = params.release(); + return kTfLiteOk; + } + case BuiltinOperator_UNIDIRECTIONAL_SEQUENCE_RNN: { + auto params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + if (const auto* sequence_rnn_params = + op->builtin_options_as_SequenceRNNOptions()) { + params->activation = + ConvertActivation(sequence_rnn_params->fused_activation_function()); + params->time_major = sequence_rnn_params->time_major(); + params->asymmetric_quantize_inputs = + sequence_rnn_params->asymmetric_quantize_inputs(); + } + *builtin_data = params.release(); + return kTfLiteOk; + } + case BuiltinOperator_BIDIRECTIONAL_SEQUENCE_RNN: { + auto params = + safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + if (const auto* bidi_sequence_rnn_params = + op->builtin_options_as_BidirectionalSequenceRNNOptions()) { + params->activation = ConvertActivation( + bidi_sequence_rnn_params->fused_activation_function()); + params->time_major = bidi_sequence_rnn_params->time_major(); + params->merge_outputs = bidi_sequence_rnn_params->merge_outputs(); + params->asymmetric_quantize_inputs = + bidi_sequence_rnn_params->asymmetric_quantize_inputs(); + } + *builtin_data = params.release(); + return kTfLiteOk; + } + case BuiltinOperator_RNN: { + auto params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + if (const auto* rnn_params = op->builtin_options_as_RNNOptions()) { + params->activation = + ConvertActivation(rnn_params->fused_activation_function()); + params->asymmetric_quantize_inputs = + rnn_params->asymmetric_quantize_inputs(); + } + *builtin_data = params.release(); + return kTfLiteOk; + } + case BuiltinOperator_EMBEDDING_LOOKUP_SPARSE: { + auto params = + safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + if (const auto* embedding_params = + op->builtin_options_as_EmbeddingLookupSparseOptions()) { + params->combiner = parseCombinerType(embedding_params->combiner()); + } + *builtin_data = params.release(); + return kTfLiteOk; + } + + case BuiltinOperator_HASHTABLE_LOOKUP: + // no-op. + return kTfLiteOk; + + case BuiltinOperator_LOCAL_RESPONSE_NORMALIZATION: { + auto params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + if (const auto* schema_params = + op->builtin_options_as_LocalResponseNormalizationOptions()) { + params->radius = schema_params->radius(); + params->bias = schema_params->bias(); + params->alpha = schema_params->alpha(); + params->beta = schema_params->beta(); + } + *builtin_data = params.release(); + return kTfLiteOk; + } + case BuiltinOperator_UNIDIRECTIONAL_SEQUENCE_LSTM: { + return ParseUnidirectionalSequenceLSTM(op, error_reporter, allocator, + builtin_data); + } + case BuiltinOperator_BIDIRECTIONAL_SEQUENCE_LSTM: { + auto params = + safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + if (const auto* bidi_lstm_params = + op->builtin_options_as_BidirectionalSequenceLSTMOptions()) { + params->activation = + ConvertActivation(bidi_lstm_params->fused_activation_function()); + params->cell_clip = bidi_lstm_params->cell_clip(); + params->proj_clip = bidi_lstm_params->proj_clip(); + params->merge_outputs = bidi_lstm_params->merge_outputs(); + params->time_major = bidi_lstm_params->time_major(); + params->asymmetric_quantize_inputs = + bidi_lstm_params->asymmetric_quantize_inputs(); + } + *builtin_data = params.release(); + return kTfLiteOk; + } + case BuiltinOperator_SKIP_GRAM: { + auto params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + if (const auto* skip_gram_params = + op->builtin_options_as_SkipGramOptions()) { + params->ngram_size = skip_gram_params->ngram_size(); + params->max_skip_size = skip_gram_params->max_skip_size(); + params->include_all_ngrams = skip_gram_params->include_all_ngrams(); + } + *builtin_data = params.release(); + return kTfLiteOk; + } + + case BuiltinOperator_GATHER: { + return ParseGather(op, error_reporter, allocator, builtin_data); + } + case BuiltinOperator_SPARSE_TO_DENSE: { + auto params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + if (const auto* sparse_to_dense_params = + op->builtin_options_as_SparseToDenseOptions()) { + params->validate_indices = sparse_to_dense_params->validate_indices(); + } + *builtin_data = params.release(); + return kTfLiteOk; + } + case BuiltinOperator_DELEGATE: { + TF_LITE_REPORT_ERROR(error_reporter, + "DELEGATE op shouldn't exist in model."); + return kTfLiteError; + } + case BuiltinOperator_FAKE_QUANT: { + auto params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + if (const auto* schema_params = + op->builtin_options_as_FakeQuantOptions()) { + params->min = schema_params->min(); + params->max = schema_params->max(); + params->num_bits = schema_params->num_bits(); + params->narrow_range = schema_params->narrow_range(); + } + *builtin_data = params.release(); + return kTfLiteOk; + } + case BuiltinOperator_ONE_HOT: { + auto params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + if (const auto* schema_params = op->builtin_options_as_OneHotOptions()) { + params->axis = schema_params->axis(); + } + *builtin_data = params.release(); + return kTfLiteOk; + } + case BuiltinOperator_UNIQUE: { + auto params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + const auto* unique_params = op->builtin_options_as_UniqueOptions(); + if (unique_params != nullptr) { + params->index_out_type = + unique_params->idx_out_type() == tflite::TensorType_INT64 + ? TfLiteType::kTfLiteInt64 + : TfLiteType::kTfLiteInt32; + } + *builtin_data = params.release(); + return kTfLiteOk; + } + case BuiltinOperator_REVERSE_SEQUENCE: { + auto params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + if (const auto* reverse_seq_params = + op->builtin_options_as_ReverseSequenceOptions()) { + params->seq_dim = reverse_seq_params->seq_dim(); + params->batch_dim = reverse_seq_params->batch_dim(); + } + *builtin_data = params.release(); + return kTfLiteOk; + } + case BuiltinOperator_IF: { + auto params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + if (const auto* if_params = op->builtin_options_as_IfOptions()) { + params->then_subgraph_index = if_params->then_subgraph_index(); + params->else_subgraph_index = if_params->else_subgraph_index(); + } + *builtin_data = params.release(); + return kTfLiteOk; + } + case BuiltinOperator_WHILE: { + auto params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + if (const auto* while_params = op->builtin_options_as_WhileOptions()) { + params->cond_subgraph_index = while_params->cond_subgraph_index(); + params->body_subgraph_index = while_params->body_subgraph_index(); + } + *builtin_data = params.release(); + return kTfLiteOk; + } + case BuiltinOperator_CONV_3D: + case BuiltinOperator_CONV_3D_TRANSPOSE: { + auto params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + if (const auto* conv3d_params = op->builtin_options_as_Conv3DOptions()) { + params->padding = ConvertPadding(conv3d_params->padding()); + params->activation = + ConvertActivation(conv3d_params->fused_activation_function()); + params->stride_depth = conv3d_params->stride_d(); + params->stride_height = conv3d_params->stride_h(); + params->stride_width = conv3d_params->stride_w(); + params->dilation_depth_factor = conv3d_params->dilation_d_factor(); + params->dilation_height_factor = conv3d_params->dilation_h_factor(); + params->dilation_width_factor = conv3d_params->dilation_w_factor(); + } + *builtin_data = params.release(); + return kTfLiteOk; + } + case BuiltinOperator_HASHTABLE: { + auto params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + if (const auto* hashtable_params = + op->builtin_options_as_HashtableOptions()) { + params->table_id = hashtable_params->table_id(); + TF_LITE_ENSURE_STATUS(ConvertTensorType( + hashtable_params->key_dtype(), ¶ms->key_dtype, error_reporter)); + TF_LITE_ENSURE_STATUS(ConvertTensorType(hashtable_params->value_dtype(), + ¶ms->value_dtype, + error_reporter)); + } + *builtin_data = params.release(); + return kTfLiteOk; + } + case BuiltinOperator_MULTINOMIAL: { + auto params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + if (const auto* multinomial_params = + op->builtin_options_as_RandomOptions()) { + params->seed = multinomial_params->seed(); + params->seed2 = multinomial_params->seed2(); + } + *builtin_data = params.release(); + return kTfLiteOk; + } + case BuiltinOperator_RANDOM_STANDARD_NORMAL: { + auto params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + if (const auto* random_std_normal_params = + op->builtin_options_as_RandomOptions()) { + params->seed = random_std_normal_params->seed(); + params->seed2 = random_std_normal_params->seed2(); + } + *builtin_data = params.release(); + return kTfLiteOk; + } + case BuiltinOperator_BUCKETIZE: { + auto params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + if (const auto* bucketize_params = + op->builtin_options_as_BucketizeOptions()) { + const flatbuffers::Vector* boundaries = + bucketize_params->boundaries(); + if (boundaries == nullptr) { + TF_LITE_REPORT_ERROR( + error_reporter, + "boundaries array not provided for operation 'bucketize'.\n"); + return kTfLiteError; + } + params->num_boundaries = boundaries->size(); + if (boundaries->data() == nullptr) { + TF_LITE_REPORT_ERROR(error_reporter, + "boundaries.data() returned nullptr for " + "operation 'bucketize'.\n"); + return kTfLiteError; + } + params->boundaries = boundaries->data(); + } + *builtin_data = params.release(); + return kTfLiteOk; + } + case BuiltinOperator_RANDOM_UNIFORM: { + auto params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + if (const auto* random_uniform_params = + op->builtin_options_as_RandomOptions()) { + params->seed = random_uniform_params->seed(); + params->seed2 = random_uniform_params->seed2(); + } + *builtin_data = params.release(); + return kTfLiteOk; + } + case BuiltinOperator_GELU: { + auto params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + if (const auto* gelu_params = op->builtin_options_as_GeluOptions()) { + params->approximate = gelu_params->approximate(); + } + *builtin_data = params.release(); + return kTfLiteOk; + } + + // Below are the ops with no builtin_data structure. + // TODO(aselle): Implement call in BuiltinOptions, but nullptrs are + // ok for now, since there is no call implementation either. + case BuiltinOperator_CALL: + case BuiltinOperator_COMPLEX_ABS: + case BuiltinOperator_CONCAT_EMBEDDINGS: + case BuiltinOperator_COS: + case BuiltinOperator_CUSTOM: + case BuiltinOperator_DENSIFY: + case BuiltinOperator_DYNAMIC_UPDATE_SLICE: + case BuiltinOperator_EQUAL: + case BuiltinOperator_HASHTABLE_FIND: + case BuiltinOperator_HASHTABLE_IMPORT: + case BuiltinOperator_HASHTABLE_SIZE: + case BuiltinOperator_IMAG: + case BuiltinOperator_MATRIX_DIAG: + case BuiltinOperator_MATRIX_SET_DIAG: + case BuiltinOperator_NON_MAX_SUPPRESSION_V4: + case BuiltinOperator_NON_MAX_SUPPRESSION_V5: + case BuiltinOperator_RELU_N1_TO_1: + case BuiltinOperator_RELU_0_TO_1: + case BuiltinOperator_SCATTER_ND: + case BuiltinOperator_SELECT: + case BuiltinOperator_SLICE: + case BuiltinOperator_TILE: + case BuiltinOperator_TOPK_V2: + case BuiltinOperator_TRANSPOSE: + case BuiltinOperator_RANGE: + case BuiltinOperator_RANK: + case BuiltinOperator_REAL: + case BuiltinOperator_RFFT2D: + case BuiltinOperator_SEGMENT_SUM: + case BuiltinOperator_REVERSE_V2: + case BuiltinOperator_UNSORTED_SEGMENT_MAX: + case BuiltinOperator_UNSORTED_SEGMENT_MIN: + case BuiltinOperator_UNSORTED_SEGMENT_PROD: + case BuiltinOperator_UNSORTED_SEGMENT_SUM: + case BuiltinOperator_ATAN2: + case BuiltinOperator_SIGN: + case BuiltinOperator_BITCAST: + case BuiltinOperator_WHERE: + return kTfLiteOk; + case BuiltinOperator_PLACEHOLDER_FOR_GREATER_OP_CODES: + return kTfLiteError; + } + return kTfLiteError; +} // NOLINT[readability/fn_size] +#endif // !defined(TF_LITE_STATIC_MEMORY) +} // namespace + +TfLiteStatus ConvertTensorType(TensorType tensor_type, TfLiteType* type, + ErrorReporter* error_reporter) { + switch (tensor_type) { + case TensorType_FLOAT16: + *type = kTfLiteFloat16; + return kTfLiteOk; + case TensorType_FLOAT32: + *type = kTfLiteFloat32; + return kTfLiteOk; + case TensorType_FLOAT64: + *type = kTfLiteFloat64; + return kTfLiteOk; + case TensorType_INT16: + *type = kTfLiteInt16; + return kTfLiteOk; + case TensorType_UINT16: + *type = kTfLiteUInt16; + return kTfLiteOk; + case TensorType_INT32: + *type = kTfLiteInt32; + return kTfLiteOk; + case TensorType_UINT32: + *type = kTfLiteUInt32; + return kTfLiteOk; + case TensorType_UINT8: + *type = kTfLiteUInt8; + return kTfLiteOk; + case TensorType_INT8: + *type = kTfLiteInt8; + return kTfLiteOk; + case TensorType_INT64: + *type = kTfLiteInt64; + return kTfLiteOk; + case TensorType_UINT64: + *type = kTfLiteUInt64; + return kTfLiteOk; + case TensorType_STRING: + *type = kTfLiteString; + return kTfLiteOk; + case TensorType_BOOL: + *type = kTfLiteBool; + return kTfLiteOk; + case TensorType_COMPLEX64: + *type = kTfLiteComplex64; + return kTfLiteOk; + case TensorType_COMPLEX128: + *type = kTfLiteComplex128; + return kTfLiteOk; + case TensorType_RESOURCE: + *type = kTfLiteResource; + return kTfLiteOk; + case TensorType_VARIANT: + *type = kTfLiteVariant; + return kTfLiteOk; + case TensorType_INT4: + *type = kTfLiteInt4; + return kTfLiteOk; + default: + *type = kTfLiteNoType; + TF_LITE_REPORT_ERROR(error_reporter, + "Unsupported data type %d in tensor\n", tensor_type); + return kTfLiteError; + } +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseAbs(const Operator*, ErrorReporter*, BuiltinDataAllocator*, + void**) { + return kTfLiteOk; +} + +TfLiteStatus ParseAdd(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data) { + CheckParsePointerParams(op, error_reporter, allocator, builtin_data); + + SafeBuiltinDataAllocator safe_allocator(allocator); + std::unique_ptr + params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + + const AddOptions* schema_params = op->builtin_options_as_AddOptions(); + + if (schema_params != nullptr) { + params->activation = + ConvertActivation(schema_params->fused_activation_function()); + params->pot_scale_int16 = schema_params->pot_scale_int16(); + } else { + // TODO(b/157480169): We should either return kTfLiteError or fill in some + // reasonable defaults in the params struct. We are not doing so until we + // better undertand the ramifications of changing the legacy behavior. + } + + *builtin_data = params.release(); + return kTfLiteOk; +} + +TfLiteStatus ParseAddN(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data) { + return kTfLiteOk; +} + +TfLiteStatus ParseArgMax(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data) { + CheckParsePointerParams(op, error_reporter, allocator, builtin_data); + + SafeBuiltinDataAllocator safe_allocator(allocator); + std::unique_ptr + params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + + const ArgMaxOptions* schema_params = op->builtin_options_as_ArgMaxOptions(); + + if (schema_params != nullptr) { + TF_LITE_ENSURE_STATUS(ConvertTensorType( + schema_params->output_type(), ¶ms->output_type, error_reporter)); + } else { + // TODO(b/157480169): We should either return kTfLiteError or fill in some + // reasonable defaults in the params struct. We are not doing so until we + // better undertand the ramifications of changing the legacy behavior. + } + + *builtin_data = params.release(); + return kTfLiteOk; +} + +TfLiteStatus ParseArgMin(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data) { + CheckParsePointerParams(op, error_reporter, allocator, builtin_data); + + SafeBuiltinDataAllocator safe_allocator(allocator); + std::unique_ptr + params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + + const ArgMinOptions* schema_params = op->builtin_options_as_ArgMinOptions(); + + if (schema_params != nullptr) { + TF_LITE_ENSURE_STATUS(ConvertTensorType( + schema_params->output_type(), ¶ms->output_type, error_reporter)); + } else { + // TODO(b/157480169): We should either return kTfLiteError or fill in some + // reasonable defaults in the params struct. We are not doing so until we + // better undertand the ramifications of changing the legacy behavior. + } + + *builtin_data = params.release(); + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseAssignVariable(const Operator*, ErrorReporter*, + BuiltinDataAllocator*, void**) { + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseBatchMatMul(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data) { + CheckParsePointerParams(op, error_reporter, allocator, builtin_data); + + SafeBuiltinDataAllocator safe_allocator(allocator); + auto params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + if (const auto* bmm_params = op->builtin_options_as_BatchMatMulOptions()) { + params->adj_x = bmm_params->adj_x(); + params->adj_y = bmm_params->adj_y(); + params->asymmetric_quantize_inputs = + bmm_params->asymmetric_quantize_inputs(); + } + *builtin_data = params.release(); + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseBatchToSpaceNd(const Operator*, ErrorReporter*, + BuiltinDataAllocator*, void**) { + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseBroadcastArgs(const Operator*, ErrorReporter*, + BuiltinDataAllocator*, void**) { + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseBroadcastTo(const Operator*, ErrorReporter*, + BuiltinDataAllocator*, void**) { + return kTfLiteOk; +} + +TfLiteStatus ParseCallOnce(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data) { + CheckParsePointerParams(op, error_reporter, allocator, builtin_data); + + SafeBuiltinDataAllocator safe_allocator(allocator); + std::unique_ptr + params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + + const CallOnceOptions* schema_params = + op->builtin_options_as_CallOnceOptions(); + + if (schema_params != nullptr) { + params->init_subgraph_index = schema_params->init_subgraph_index(); + + } else { + // TODO(b/157480169): We should either return kTfLiteError or fill in some + // reasonable defaults in the params struct. We are not doing so until we + // better undertand the ramifications of changing the legacy behavior. + } + + *builtin_data = params.release(); + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseCast(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data) { + CheckParsePointerParams(op, error_reporter, allocator, builtin_data); + + SafeBuiltinDataAllocator safe_allocator(allocator); + auto params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + if (const auto* schema_params = op->builtin_options_as_CastOptions()) { + TF_LITE_ENSURE_STATUS(ConvertTensorType( + schema_params->in_data_type(), ¶ms->in_data_type, error_reporter)); + TF_LITE_ENSURE_STATUS(ConvertTensorType(schema_params->out_data_type(), + ¶ms->out_data_type, + error_reporter)); + } + *builtin_data = params.release(); + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseCeil(const Operator*, ErrorReporter*, BuiltinDataAllocator*, + void**) { + return kTfLiteOk; +} + +TfLiteStatus ParseConcatenation(const Operator* op, + ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data) { + CheckParsePointerParams(op, error_reporter, allocator, builtin_data); + + SafeBuiltinDataAllocator safe_allocator(allocator); + std::unique_ptr + params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + + const ConcatenationOptions* schema_params = + op->builtin_options_as_ConcatenationOptions(); + + if (schema_params != nullptr) { + params->activation = + ConvertActivation(schema_params->fused_activation_function()); + params->axis = schema_params->axis(); + } else { + // TODO(b/157480169): We should either return kTfLiteError or fill in some + // reasonable defaults in the params struct. We are not doing so until we + // better undertand the ramifications of changing the legacy behavior. + } + + *builtin_data = params.release(); + return kTfLiteOk; +} + +TfLiteStatus ParseConv2D(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data) { + CheckParsePointerParams(op, error_reporter, allocator, builtin_data); + + SafeBuiltinDataAllocator safe_allocator(allocator); + std::unique_ptr + params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + + const Conv2DOptions* schema_params = op->builtin_options_as_Conv2DOptions(); + + if (schema_params != nullptr) { + params->padding = ConvertPadding(schema_params->padding()); + params->stride_width = schema_params->stride_w(); + params->stride_height = schema_params->stride_h(); + params->activation = + ConvertActivation(schema_params->fused_activation_function()); + + params->dilation_width_factor = schema_params->dilation_w_factor(); + params->dilation_height_factor = schema_params->dilation_h_factor(); + } else { + // TODO(b/157480169): We should either return kTfLiteError or fill in some + // reasonable defaults in the params struct. We are not doing so until we + // better undertand the ramifications of changing the legacy behavior. + } + + *builtin_data = params.release(); + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseCumsum(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data) { + CheckParsePointerParams(op, error_reporter, allocator, builtin_data); + + SafeBuiltinDataAllocator safe_allocator(allocator); + auto params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + if (const auto* cumsum_params = op->builtin_options_as_CumsumOptions()) { + params->exclusive = cumsum_params->exclusive(); + params->reverse = cumsum_params->reverse(); + } + *builtin_data = params.release(); + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseCos(const Operator*, ErrorReporter*, BuiltinDataAllocator*, + void**) { + return kTfLiteOk; +} + +TfLiteStatus ParseDepthToSpace(const Operator* op, + ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data) { + CheckParsePointerParams(op, error_reporter, allocator, builtin_data); + + SafeBuiltinDataAllocator safe_allocator(allocator); + std::unique_ptr + params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + + const auto* schema_params = op->builtin_options_as_DepthToSpaceOptions(); + if (schema_params != nullptr) { + params->block_size = schema_params->block_size(); + } else { + // TODO(b/157480169): We should either return kTfLiteError or fill in some + // reasonable defaults in the params struct. We are not doing so until we + // better undertand the ramifications of changing the legacy behavior. + } + + *builtin_data = params.release(); + return kTfLiteOk; +} + +TfLiteStatus ParseDepthwiseConv2D(const Operator* op, + ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data) { + CheckParsePointerParams(op, error_reporter, allocator, builtin_data); + + SafeBuiltinDataAllocator safe_allocator(allocator); + + std::unique_ptr + params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + + const DepthwiseConv2DOptions* schema_params = + op->builtin_options_as_DepthwiseConv2DOptions(); + + if (schema_params != nullptr) { + params->padding = ConvertPadding(schema_params->padding()); + params->stride_width = schema_params->stride_w(); + params->stride_height = schema_params->stride_h(); + params->depth_multiplier = schema_params->depth_multiplier(); + params->activation = + ConvertActivation(schema_params->fused_activation_function()); + + params->dilation_width_factor = schema_params->dilation_w_factor(); + params->dilation_height_factor = schema_params->dilation_h_factor(); + } else { + // TODO(b/157480169): We should either return kTfLiteError or fill in some + // reasonable defaults in the params struct. We are not doing so until we + // better undertand the ramifications of changing the legacy behavior. + } + + *builtin_data = params.release(); + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseDequantize(const Operator*, ErrorReporter*, + BuiltinDataAllocator*, void**) { + return kTfLiteOk; +} + +TfLiteStatus ParseDiv(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data) { + CheckParsePointerParams(op, error_reporter, allocator, builtin_data); + + SafeBuiltinDataAllocator safe_allocator(allocator); + auto params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + if (const auto* schema_params = op->builtin_options_as_DivOptions()) { + params->activation = + ConvertActivation(schema_params->fused_activation_function()); + } + *builtin_data = params.release(); + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseElu(const Operator*, ErrorReporter*, BuiltinDataAllocator*, + void**) { + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseEmbeddingLookup(const Operator*, ErrorReporter*, + BuiltinDataAllocator*, void**) { + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseEqual(const Operator*, ErrorReporter*, BuiltinDataAllocator*, + void**) { + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseExp(const Operator*, ErrorReporter*, BuiltinDataAllocator*, + void**) { + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseExpandDims(const Operator*, ErrorReporter*, + BuiltinDataAllocator*, void**) { + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseFill(const Operator*, ErrorReporter*, BuiltinDataAllocator*, + void**) { + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseFloor(const Operator*, ErrorReporter*, BuiltinDataAllocator*, + void**) { + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseFloorDiv(const Operator*, ErrorReporter*, + BuiltinDataAllocator*, void**) { + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseFloorMod(const Operator*, ErrorReporter*, + BuiltinDataAllocator*, void**) { + return kTfLiteOk; +} + +TfLiteStatus ParseFullyConnected(const Operator* op, + ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data) { + CheckParsePointerParams(op, error_reporter, allocator, builtin_data); + + SafeBuiltinDataAllocator safe_allocator(allocator); + + std::unique_ptr + params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + + const FullyConnectedOptions* schema_params = + op->builtin_options_as_FullyConnectedOptions(); + + if (schema_params != nullptr) { + params->activation = + ConvertActivation(schema_params->fused_activation_function()); + params->keep_num_dims = schema_params->keep_num_dims(); + params->asymmetric_quantize_inputs = + schema_params->asymmetric_quantize_inputs(); + + switch (schema_params->weights_format()) { + case FullyConnectedOptionsWeightsFormat_DEFAULT: + params->weights_format = kTfLiteFullyConnectedWeightsFormatDefault; + break; + case FullyConnectedOptionsWeightsFormat_SHUFFLED4x16INT8: + params->weights_format = + kTfLiteFullyConnectedWeightsFormatShuffled4x16Int8; + break; + default: + TF_LITE_REPORT_ERROR(error_reporter, + "Unhandled fully-connected weights format."); + return kTfLiteError; + } + } else { + // TODO(b/157480169): We should either return kTfLiteError or fill in some + // reasonable defaults in the params struct. We are not doing so until we + // better undertand the ramifications of changing the legacy behavior. + } + + *builtin_data = params.release(); + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseGather(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data) { + CheckParsePointerParams(op, error_reporter, allocator, builtin_data); + + SafeBuiltinDataAllocator safe_allocator(allocator); + auto params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + params->axis = 0; + params->batch_dims = 0; + if (const auto* gather_params = op->builtin_options_as_GatherOptions()) { + params->axis = gather_params->axis(); + params->batch_dims = gather_params->batch_dims(); + } + + *builtin_data = params.release(); + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseGatherNd(const Operator*, ErrorReporter*, + BuiltinDataAllocator*, void**) { + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseGreater(const Operator*, ErrorReporter*, + BuiltinDataAllocator*, void**) { + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseGreaterEqual(const Operator*, ErrorReporter*, + BuiltinDataAllocator*, void**) { + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseHardSwish(const Operator*, ErrorReporter*, + BuiltinDataAllocator*, void**) { + return kTfLiteOk; +} + +TfLiteStatus ParseIf(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data) { + CheckParsePointerParams(op, error_reporter, allocator, builtin_data); + + SafeBuiltinDataAllocator safe_allocator(allocator); + std::unique_ptr + params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + + const IfOptions* schema_params = op->builtin_options_as_IfOptions(); + + if (schema_params != nullptr) { + params->then_subgraph_index = schema_params->then_subgraph_index(); + params->else_subgraph_index = schema_params->else_subgraph_index(); + } else { + // TODO(b/157480169): We should either return kTfLiteError or fill in some + // reasonable defaults in the params struct. We are not doing so until we + // better undertand the ramifications of changing the legacy behavior. + } + + *builtin_data = params.release(); + return kTfLiteOk; +} + +TfLiteStatus ParseL2Normalization(const Operator* op, + ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data) { + CheckParsePointerParams(op, error_reporter, allocator, builtin_data); + + SafeBuiltinDataAllocator safe_allocator(allocator); + std::unique_ptr + params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + + const L2NormOptions* schema_params = op->builtin_options_as_L2NormOptions(); + + if (schema_params != nullptr) { + params->activation = + ConvertActivation(schema_params->fused_activation_function()); + } else { + // TODO(b/157480169): We should either return kTfLiteError or fill in some + // reasonable defaults in the params struct. We are not doing so until we + // better undertand the ramifications of changing the legacy behavior. + } + + *builtin_data = params.release(); + return kTfLiteOk; +} + +TfLiteStatus ParseLeakyRelu(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data) { + CheckParsePointerParams(op, error_reporter, allocator, builtin_data); + + SafeBuiltinDataAllocator safe_allocator(allocator); + auto params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + if (const auto* leaky_relu_params = + op->builtin_options_as_LeakyReluOptions()) { + params->alpha = leaky_relu_params->alpha(); + } + *builtin_data = params.release(); + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseLess(const Operator*, ErrorReporter*, BuiltinDataAllocator*, + void**) { + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseLessEqual(const Operator*, ErrorReporter*, + BuiltinDataAllocator*, void**) { + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseLog(const Operator*, ErrorReporter*, BuiltinDataAllocator*, + void**) { + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseLogicalAnd(const Operator*, ErrorReporter*, + BuiltinDataAllocator*, void**) { + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseLogicalNot(const Operator*, ErrorReporter*, + BuiltinDataAllocator*, void**) { + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseLogicalOr(const Operator*, ErrorReporter*, + BuiltinDataAllocator*, void**) { + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseLogistic(const Operator*, ErrorReporter*, + BuiltinDataAllocator*, void**) { + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseLogSoftmax(const Operator*, ErrorReporter*, + BuiltinDataAllocator*, void**) { + return kTfLiteOk; +} + +TfLiteStatus ParseLSTM(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data) { + CheckParsePointerParams(op, error_reporter, allocator, builtin_data); + + SafeBuiltinDataAllocator safe_allocator(allocator); + auto params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + if (const auto* lstm_params = op->builtin_options_as_LSTMOptions()) { + params->activation = + ConvertActivation(lstm_params->fused_activation_function()); + params->cell_clip = lstm_params->cell_clip(); + params->proj_clip = lstm_params->proj_clip(); + switch (lstm_params->kernel_type()) { + case LSTMKernelType_FULL: + params->kernel_type = kTfLiteLSTMFullKernel; + break; + case LSTMKernelType_BASIC: + params->kernel_type = kTfLiteLSTMBasicKernel; + break; + default: + TF_LITE_REPORT_ERROR(error_reporter, "Unhandled LSTM kernel type: %d", + lstm_params->kernel_type()); + return kTfLiteError; + } + params->asymmetric_quantize_inputs = + lstm_params->asymmetric_quantize_inputs(); + } else { + TF_LITE_REPORT_ERROR(error_reporter, "No valid LSTM builtin options exist"); + return kTfLiteError; + } + *builtin_data = params.release(); + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseMaximum(const Operator*, ErrorReporter*, + BuiltinDataAllocator*, void**) { + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseMinimum(const Operator*, ErrorReporter*, + BuiltinDataAllocator*, void**) { + return kTfLiteOk; +} + +TfLiteStatus ParseMirrorPad(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data) { + CheckParsePointerParams(op, error_reporter, allocator, builtin_data); + + SafeBuiltinDataAllocator safe_allocator(allocator); + std::unique_ptr + params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + + const MirrorPadOptions* schema_params = + op->builtin_options_as_MirrorPadOptions(); + + if (schema_params != nullptr) { + params->mode = ConvertMirrorPadding(schema_params->mode()); + } else { + // TODO(b/157480169): We should either return kTfLiteError or fill in some + // reasonable defaults in the params struct. We are not doing so until we + // better undertand the ramifications of changing the legacy behavior. + } + + *builtin_data = params.release(); + return kTfLiteOk; +} + +TfLiteStatus ParseMul(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data) { + CheckParsePointerParams(op, error_reporter, allocator, builtin_data); + + SafeBuiltinDataAllocator safe_allocator(allocator); + std::unique_ptr + params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + + const MulOptions* schema_params = op->builtin_options_as_MulOptions(); + + if (schema_params != nullptr) { + params->activation = + ConvertActivation(schema_params->fused_activation_function()); + } else { + // TODO(b/157480169): We should either return kTfLiteError or fill in some + // reasonable defaults in the params struct. We are not doing so until we + // better undertand the ramifications of changing the legacy behavior. + } + + *builtin_data = params.release(); + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseNeg(const Operator*, ErrorReporter*, BuiltinDataAllocator*, + void**) { + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseNotEqual(const Operator*, ErrorReporter*, + BuiltinDataAllocator*, void**) { + return kTfLiteOk; +} + +TfLiteStatus ParsePack(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data) { + CheckParsePointerParams(op, error_reporter, allocator, builtin_data); + + SafeBuiltinDataAllocator safe_allocator(allocator); + std::unique_ptr + params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + + const PackOptions* schema_params = op->builtin_options_as_PackOptions(); + + if (schema_params != nullptr) { + params->values_count = schema_params->values_count(); + params->axis = schema_params->axis(); + } else { + // TODO(b/157480169): We should either return kTfLiteError or fill in some + // reasonable defaults in the params struct. We are not doing so until we + // better undertand the ramifications of changing the legacy behavior. + } + + *builtin_data = params.release(); + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParsePad(const Operator*, ErrorReporter*, BuiltinDataAllocator*, + void**) { + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParsePadV2(const Operator*, ErrorReporter*, BuiltinDataAllocator*, + void**) { + return kTfLiteOk; +} + +TfLiteStatus ParsePool(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data) { + CheckParsePointerParams(op, error_reporter, allocator, builtin_data); + + SafeBuiltinDataAllocator safe_allocator(allocator); + std::unique_ptr + params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + + const Pool2DOptions* schema_params = op->builtin_options_as_Pool2DOptions(); + + if (schema_params != nullptr) { + params->padding = ConvertPadding(schema_params->padding()); + params->stride_width = schema_params->stride_w(); + params->stride_height = schema_params->stride_h(); + params->filter_width = schema_params->filter_width(); + params->filter_height = schema_params->filter_height(); + params->activation = + ConvertActivation(schema_params->fused_activation_function()); + } else { + // TODO(b/157480169): We should either return kTfLiteError or fill in some + // reasonable defaults in the params struct. We are not doing so until we + // better undertand the ramifications of changing the legacy behavior. + } + + *builtin_data = params.release(); + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParsePow(const Operator*, ErrorReporter*, BuiltinDataAllocator*, + void**) { + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParsePrelu(const Operator*, ErrorReporter*, BuiltinDataAllocator*, + void**) { + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseQuantize(const Operator*, ErrorReporter*, + BuiltinDataAllocator*, void**) { + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseReadVariable(const Operator*, ErrorReporter*, + BuiltinDataAllocator*, void**) { + return kTfLiteOk; +} + +TfLiteStatus ParseReducer(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data) { + CheckParsePointerParams(op, error_reporter, allocator, builtin_data); + + SafeBuiltinDataAllocator safe_allocator(allocator); + + std::unique_ptr + params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + + const ReducerOptions* schema_params = op->builtin_options_as_ReducerOptions(); + + if (schema_params != nullptr) { + params->keep_dims = schema_params->keep_dims(); + } else { + // TODO(b/157480169): We should either return kTfLiteError or fill in some + // reasonable defaults in the params struct. We are not doing so until we + // better undertand the ramifications of changing the legacy behavior. + } + + *builtin_data = params.release(); + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseRelu(const Operator*, ErrorReporter*, BuiltinDataAllocator*, + void**) { + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseRelu6(const Operator*, ErrorReporter*, BuiltinDataAllocator*, + void**) { + return kTfLiteOk; +} + +TfLiteStatus ParseReshape(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data) { + CheckParsePointerParams(op, error_reporter, allocator, builtin_data); + + SafeBuiltinDataAllocator safe_allocator(allocator); + + std::unique_ptr + params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + + const ReshapeOptions* schema_params = op->builtin_options_as_ReshapeOptions(); + + if (schema_params != nullptr) { + const flatbuffers::Vector* new_shape = schema_params->new_shape(); + if (new_shape != nullptr) { + TF_LITE_ENSURE_STATUS( + FlatBufferIntVectorToArray(sizeof(params->shape), new_shape, + params->shape, error_reporter, "reshape")); + params->num_dimensions = new_shape->size(); + } else { + // TODO(b/157480169) TODO(b/147203660): We should either return + // kTfLiteError or fill in some reasonable defaults in the params struct. + // We are not doing so until we better undertand the ramifications of + // changing the legacy behavior. + } + } else { + // TODO(b/157480169): We should either return kTfLiteError or fill in some + // reasonable defaults in the params struct. We are not doing so until we + // better undertand the ramifications of changing the legacy behavior. + } + + *builtin_data = params.release(); + return kTfLiteOk; +} + +TfLiteStatus ParseResizeBilinear(const Operator* op, + ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data) { + CheckParsePointerParams(op, error_reporter, allocator, builtin_data); + + SafeBuiltinDataAllocator safe_allocator(allocator); + std::unique_ptr + params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + + const ResizeBilinearOptions* schema_params = + op->builtin_options_as_ResizeBilinearOptions(); + + if (schema_params != nullptr) { + params->align_corners = schema_params->align_corners(); + params->half_pixel_centers = schema_params->half_pixel_centers(); + } else { + params->align_corners = false; + params->half_pixel_centers = false; + } + + *builtin_data = params.release(); + return kTfLiteOk; +} + +TfLiteStatus ParseResizeNearestNeighbor(const Operator* op, + ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data) { + CheckParsePointerParams(op, error_reporter, allocator, builtin_data); + + SafeBuiltinDataAllocator safe_allocator(allocator); + std::unique_ptr + params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + + const ResizeNearestNeighborOptions* schema_params = + op->builtin_options_as_ResizeNearestNeighborOptions(); + + if (schema_params != nullptr) { + params->align_corners = schema_params->align_corners(); + params->half_pixel_centers = schema_params->half_pixel_centers(); + } else { + params->align_corners = false; + params->half_pixel_centers = false; + } + + *builtin_data = params.release(); + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseRound(const Operator*, ErrorReporter*, BuiltinDataAllocator*, + void**) { + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseRsqrt(const Operator*, ErrorReporter*, BuiltinDataAllocator*, + void**) { + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseSelectV2(const Operator*, ErrorReporter*, + BuiltinDataAllocator*, void**) { + return kTfLiteOk; +} + +TfLiteStatus ParseShape(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data) { + SafeBuiltinDataAllocator safe_allocator(allocator); + std::unique_ptr + params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + + const ShapeOptions* schema_params = op->builtin_options_as_ShapeOptions(); + + if (schema_params != nullptr) { + TF_LITE_ENSURE_STATUS(ConvertTensorType(schema_params->out_type(), + ¶ms->out_type, error_reporter)); + } else { + // TODO(b/157480169): We should either return kTfLiteError or fill in some + // reasonable defaults in the params struct. We are not doing so until we + // better undertand the ramifications of changing the legacy behavior. + } + + *builtin_data = params.release(); + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseSin(const Operator*, ErrorReporter*, BuiltinDataAllocator*, + void**) { + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseSlice(const Operator*, ErrorReporter*, BuiltinDataAllocator*, + void**) { + return kTfLiteOk; +} + +TfLiteStatus ParseSoftmax(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data) { + CheckParsePointerParams(op, error_reporter, allocator, builtin_data); + + SafeBuiltinDataAllocator safe_allocator(allocator); + std::unique_ptr + params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + + const SoftmaxOptions* schema_params = op->builtin_options_as_SoftmaxOptions(); + + if (schema_params != nullptr) { + params->beta = schema_params->beta(); + } else { + // TODO(b/157480169): We should either return kTfLiteError or fill in some + // reasonable defaults in the params struct. We are not doing so until we + // better undertand the ramifications of changing the legacy behavior. + } + + *builtin_data = params.release(); + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseSpaceToBatchNd(const Operator*, ErrorReporter*, + BuiltinDataAllocator*, void**) { + return kTfLiteOk; +} + +TfLiteStatus ParseSpaceToDepth(const Operator* op, + ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data) { + CheckParsePointerParams(op, error_reporter, allocator, builtin_data); + + SafeBuiltinDataAllocator safe_allocator(allocator); + std::unique_ptr + params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + + const auto* schema_params = op->builtin_options_as_SpaceToDepthOptions(); + if (schema_params != nullptr) { + params->block_size = schema_params->block_size(); + } else { + // TODO(b/157480169): We should either return kTfLiteError or fill in some + // reasonable defaults in the params struct. We are not doing so until we + // better undertand the ramifications of changing the legacy behavior. + } + + *builtin_data = params.release(); + return kTfLiteOk; +} + +TfLiteStatus ParseSplit(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data) { + CheckParsePointerParams(op, error_reporter, allocator, builtin_data); + + SafeBuiltinDataAllocator safe_allocator(allocator); + std::unique_ptr + params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + + const SplitOptions* schema_params = op->builtin_options_as_SplitOptions(); + + if (schema_params != nullptr) { + params->num_splits = schema_params->num_splits(); + } else { + // TODO(b/157480169): We should either return kTfLiteError or fill in some + // reasonable defaults in the params struct. We are not doing so until we + // better undertand the ramifications of changing the legacy behavior. + } + + *builtin_data = params.release(); + return kTfLiteOk; +} + +TfLiteStatus ParseSplitV(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data) { + CheckParsePointerParams(op, error_reporter, allocator, builtin_data); + SafeBuiltinDataAllocator safe_allocator(allocator); + + std::unique_ptr + params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + + const SplitVOptions* schema_params = op->builtin_options_as_SplitVOptions(); + + if (schema_params != nullptr) { + params->num_splits = schema_params->num_splits(); + } else { + // TODO(b/157480169): We should either return kTfLiteError or fill in some + // reasonable defaults in the params struct. We are not doing so until we + // better undertand the ramifications of changing the legacy behavior. + } + + *builtin_data = params.release(); + return kTfLiteOk; +} + +TfLiteStatus ParseUnidirectionalSequenceLSTM(const Operator* op, + ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data) { + CheckParsePointerParams(op, error_reporter, allocator, builtin_data); + SafeBuiltinDataAllocator safe_allocator(allocator); + auto params = + safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + if (const auto* seq_lstm_params = + op->builtin_options_as_UnidirectionalSequenceLSTMOptions()) { + params->activation = + ConvertActivation(seq_lstm_params->fused_activation_function()); + params->cell_clip = seq_lstm_params->cell_clip(); + params->proj_clip = seq_lstm_params->proj_clip(); + params->time_major = seq_lstm_params->time_major(); + params->asymmetric_quantize_inputs = + seq_lstm_params->asymmetric_quantize_inputs(); + params->diagonal_recurrent_tensors = + seq_lstm_params->diagonal_recurrent_tensors(); + } + *builtin_data = params.release(); + return kTfLiteOk; +} + +TfLiteStatus ParseSqueeze(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data) { + CheckParsePointerParams(op, error_reporter, allocator, builtin_data); + SafeBuiltinDataAllocator safe_allocator(allocator); + + std::unique_ptr + params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + + const SqueezeOptions* schema_params = op->builtin_options_as_SqueezeOptions(); + + if (schema_params != nullptr) { + const auto* squeeze_dims = schema_params->squeeze_dims(); + if (squeeze_dims != nullptr) { + TF_LITE_ENSURE_STATUS(FlatBufferIntVectorToArray( + sizeof(params->squeeze_dims), squeeze_dims, params->squeeze_dims, + error_reporter, "squeeze")); + params->num_squeeze_dims = squeeze_dims->size(); + } else { + params->num_squeeze_dims = 0; + } + } else { + // TODO(b/157480169): We should either return kTfLiteError or fill in some + // reasonable defaults in the params struct. We are not doing so until we + // better undertand the ramifications of changing the legacy behavior. + } + + *builtin_data = params.release(); + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseSqrt(const Operator*, ErrorReporter*, BuiltinDataAllocator*, + void**) { + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseSquare(const Operator*, ErrorReporter*, BuiltinDataAllocator*, + void**) { + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseSquaredDifference(const Operator*, ErrorReporter*, + BuiltinDataAllocator*, void**) { + return kTfLiteOk; +} + +TfLiteStatus ParseStridedSlice(const Operator* op, + ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data) { + CheckParsePointerParams(op, error_reporter, allocator, builtin_data); + + SafeBuiltinDataAllocator safe_allocator(allocator); + std::unique_ptr + params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + + const StridedSliceOptions* schema_params = + op->builtin_options_as_StridedSliceOptions(); + + if (schema_params != nullptr) { + params->begin_mask = schema_params->begin_mask(); + params->end_mask = schema_params->end_mask(); + params->ellipsis_mask = schema_params->ellipsis_mask(); + params->new_axis_mask = schema_params->new_axis_mask(); + params->shrink_axis_mask = schema_params->shrink_axis_mask(); + params->offset = schema_params->offset(); + } else { + // TODO(b/157480169): We should either return kTfLiteError or fill in some + // reasonable defaults in the params struct. We are not doing so until we + // better undertand the ramifications of changing the legacy behavior. + } + + *builtin_data = params.release(); + return kTfLiteOk; +} + +TfLiteStatus ParseSub(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data) { + CheckParsePointerParams(op, error_reporter, allocator, builtin_data); + + SafeBuiltinDataAllocator safe_allocator(allocator); + std::unique_ptr + params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + + const SubOptions* schema_params = op->builtin_options_as_SubOptions(); + + if (schema_params != nullptr) { + params->activation = + ConvertActivation(schema_params->fused_activation_function()); + params->pot_scale_int16 = schema_params->pot_scale_int16(); + } else { + // TODO(b/157480169): We should either return kTfLiteError or fill in some + // reasonable defaults in the params struct. We are not doing so until we + // better undertand the ramifications of changing the legacy behavior. + } + + *builtin_data = params.release(); + return kTfLiteOk; +} + +TfLiteStatus ParseSvdf(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data) { + CheckParsePointerParams(op, error_reporter, allocator, builtin_data); + + SafeBuiltinDataAllocator safe_allocator(allocator); + std::unique_ptr + params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + + const SVDFOptions* schema_params = op->builtin_options_as_SVDFOptions(); + if (schema_params != nullptr) { + params->rank = schema_params->rank(); + params->activation = + ConvertActivation(schema_params->fused_activation_function()); + params->asymmetric_quantize_inputs = + schema_params->asymmetric_quantize_inputs(); + } else { + // TODO(b/157480169): We should either return kTfLiteError or fill in some + // reasonable defaults in the params struct. We are not doing so until we + // better undertand the ramifications of changing the legacy behavior. + } + + *builtin_data = params.release(); + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseTanh(const Operator*, ErrorReporter*, BuiltinDataAllocator*, + void**) { + return kTfLiteOk; +} +// +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseTranspose(const Operator*, ErrorReporter*, + BuiltinDataAllocator*, void**) { + return kTfLiteOk; +} + +TfLiteStatus ParseTransposeConv(const Operator* op, + ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data) { + CheckParsePointerParams(op, error_reporter, allocator, builtin_data); + + SafeBuiltinDataAllocator safe_allocator(allocator); + std::unique_ptr + params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + const TransposeConvOptions* transpose_conv_params = + op->builtin_options_as_TransposeConvOptions(); + if (transpose_conv_params != nullptr) { + params->padding = ConvertPadding(transpose_conv_params->padding()); + params->stride_width = transpose_conv_params->stride_w(); + params->stride_height = transpose_conv_params->stride_h(); + + params->activation = + ConvertActivation(transpose_conv_params->fused_activation_function()); + } else { + // TODO(b/157480169): We should either return kTfLiteError or fill in some + // reasonable defaults in the params struct. We are not doing so until we + // better undertand the ramifications of changing the legacy behavior. + } + *builtin_data = params.release(); + return kTfLiteOk; +} + +TfLiteStatus ParseUnpack(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data) { + CheckParsePointerParams(op, error_reporter, allocator, builtin_data); + + SafeBuiltinDataAllocator safe_allocator(allocator); + std::unique_ptr + params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + + const UnpackOptions* schema_params = op->builtin_options_as_UnpackOptions(); + + if (schema_params != nullptr) { + params->num = schema_params->num(); + params->axis = schema_params->axis(); + } else { + // TODO(b/157480169): We should either return kTfLiteError or fill in some + // reasonable defaults in the params struct. We are not doing so until we + // better undertand the ramifications of changing the legacy behavior. + } + + *builtin_data = params.release(); + return kTfLiteOk; +} + +TfLiteStatus ParseVarHandle(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data) { + CheckParsePointerParams(op, error_reporter, allocator, builtin_data); + + SafeBuiltinDataAllocator safe_allocator(allocator); + std::unique_ptr + params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + + const VarHandleOptions* schema_params = + op->builtin_options_as_VarHandleOptions(); + + if (schema_params != nullptr) { + if (schema_params->container()) { + params->container = schema_params->container()->c_str(); + } + if (schema_params->shared_name()) { + params->shared_name = schema_params->shared_name()->c_str(); + } + } else { + // TODO(b/157480169): We should either return kTfLiteError or fill in some + // reasonable defaults in the params struct. We are not doing so until we + // better undertand the ramifications of changing the legacy behavior. + } + + *builtin_data = params.release(); + return kTfLiteOk; +} + +TfLiteStatus ParseWhile(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data) { + CheckParsePointerParams(op, error_reporter, allocator, builtin_data); + + SafeBuiltinDataAllocator safe_allocator(allocator); + std::unique_ptr + params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + + const WhileOptions* schema_params = op->builtin_options_as_WhileOptions(); + + if (schema_params != nullptr) { + params->cond_subgraph_index = schema_params->cond_subgraph_index(); + params->body_subgraph_index = schema_params->body_subgraph_index(); + } else { + // TODO(b/157480169): We should either return kTfLiteError or fill in some + // reasonable defaults in the params struct. We are not doing so until we + // better undertand the ramifications of changing the legacy behavior. + } + + *builtin_data = params.release(); + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseZerosLike(const Operator*, ErrorReporter*, + BuiltinDataAllocator*, void**) { + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseBitwiseXor(const Operator*, ErrorReporter*, + BuiltinDataAllocator*, void**) { + return kTfLiteOk; +} + +// We have this parse function instead of directly returning kTfLiteOk from the +// switch-case in ParseOpData because this function is used as part of the +// selective registration for the OpResolver implementation in micro. +TfLiteStatus ParseRightShift(const Operator*, ErrorReporter*, + BuiltinDataAllocator*, void**) { + return kTfLiteOk; +} + +TfLiteStatus ParseOpData(const Operator* op, BuiltinOperator op_type, + ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data) { +// TODO(b/145762662): It would be preferable to have the build graph for TF Lite +// Micro not have the ParseOpData function at all. This would require splitting +// the current file into two separate files, one of which defines the +// ParseOpData function and the other that defines the operator specific parse +// functions (e.g. ParseAdd). +// +// Such a split was attempted but was not worth the effort at the time because +// of the following reasons: +// * We could either duplicate the functions and the SafeBuiltinDataAllocator +// class in the anonymous namespace of this file, or attempt to make a common +// library with these helper functions and class. +// * Making a common library with a separate build target was not feasible as +// it introduced circular dependencies due to the ErrorReporter and a common +// .cc and .h within the same api build target the also cause circular +// dependencies due to the BuiltinDataAllocator class. +// * If all the builtin operators were to have their own parse functions, or we +// were ok with some amount of code duplication, then this split of the .cc +// files would be a lot more feasible. +#ifdef TF_LITE_STATIC_MEMORY + TF_LITE_REPORT_ERROR( + error_reporter, + "ParseOpData is unsupported on TfLiteMicro, please use the operator " + "specific parse functions (e.g. ParseAdd etc.).\n"); + return kTfLiteError; +#else + return ParseOpDataTfLite(op, op_type, error_reporter, allocator, + builtin_data); +#endif +} + +} // namespace tflite diff --git a/tensorflow/lite/core/api/flatbuffer_conversions.h b/tensorflow/lite/core/api/flatbuffer_conversions.h new file mode 100644 index 0000000..9ffe397 --- /dev/null +++ b/tensorflow/lite/core/api/flatbuffer_conversions.h @@ -0,0 +1,425 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_CORE_API_FLATBUFFER_CONVERSIONS_H_ +#define TENSORFLOW_LITE_CORE_API_FLATBUFFER_CONVERSIONS_H_ + +// These functions transform codes and data structures that are defined in the +// flatbuffer serialization format into in-memory values that are used by the +// runtime API and interpreter. + +#include +#include +#include + +#include "tensorflow/lite/core/api/error_reporter.h" +#include "tensorflow/lite/core/c/common.h" +#include "tensorflow/lite/schema/schema_generated.h" + +namespace tflite { + +// Interface class for builtin data allocations. +class BuiltinDataAllocator { + public: + virtual void* Allocate(size_t size, size_t alignment_hint) = 0; + virtual void Deallocate(void* data) = 0; + + // Allocate a structure, but make sure it is a POD structure that doesn't + // require constructors to run. The reason we do this, is that Interpreter's C + // extension part will take ownership so destructors will not be run during + // deallocation. + template + T* AllocatePOD() { + // TODO(b/154346074): Change this to is_trivially_destructible when all + // platform targets support that properly. + static_assert(std::is_pod::value, "Builtin data structure must be POD."); + void* allocated_memory = this->Allocate(sizeof(T), alignof(T)); + return new (allocated_memory) T(); + } + + virtual ~BuiltinDataAllocator() {} +}; + +// Parse the appropriate data out of the op. +// +// This handles builtin data explicitly as there are flatbuffer schemas. +// If it returns kTfLiteOk, it passes the data out with `builtin_data`. The +// calling function has to pass in an allocator object, and this allocator +// will be called to reserve space for the output data. If the calling +// function's allocator reserves memory on the heap, then it's the calling +// function's responsibility to free it. +// If it returns kTfLiteError, `builtin_data` will be `nullptr`. +TfLiteStatus ParseOpData(const Operator* op, BuiltinOperator op_type, + ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +// Converts the tensor data type used in the flat buffer to the representation +// used by the runtime. +TfLiteStatus ConvertTensorType(TensorType tensor_type, TfLiteType* type, + ErrorReporter* error_reporter); + +TfLiteStatus ParseAbs(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParseAdd(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParseAddN(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParseArgMax(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParseArgMin(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParseAssignVariable(const Operator* op, + ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +TfLiteStatus ParseBatchMatMul(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +TfLiteStatus ParseBatchToSpaceNd(const Operator* op, + ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +TfLiteStatus ParseBroadcastArgs(const Operator* op, + ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +TfLiteStatus ParseBroadcastTo(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +TfLiteStatus ParseCallOnce(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +TfLiteStatus ParseCeil(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParseCast(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParseConcatenation(const Operator* op, + ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +TfLiteStatus ParseConv2D(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParseCos(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParseCumsum(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParseDepthToSpace(const Operator* op, + ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +TfLiteStatus ParseDepthwiseConv2D(const Operator* op, + ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +TfLiteStatus ParseDequantize(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +TfLiteStatus ParseDiv(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParseElu(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParseEmbeddingLookup(const Operator* op, + ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +TfLiteStatus ParseEqual(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParseExp(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParseExpandDims(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +TfLiteStatus ParseFill(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParseFloor(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParseFloorDiv(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +TfLiteStatus ParseFloorMod(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +TfLiteStatus ParseFullyConnected(const Operator* op, + ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +TfLiteStatus ParseGather(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParseGatherNd(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +TfLiteStatus ParseGreater(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParseGreaterEqual(const Operator* op, + ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +TfLiteStatus ParseHardSwish(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +TfLiteStatus ParseIf(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParseL2Normalization(const Operator* op, + ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +TfLiteStatus ParseLeakyRelu(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +TfLiteStatus ParseLess(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParseLessEqual(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +TfLiteStatus ParseLog(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParseLogicalAnd(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +TfLiteStatus ParseLogicalNot(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +TfLiteStatus ParseLogicalOr(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +TfLiteStatus ParseLogistic(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +TfLiteStatus ParseLogSoftmax(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +TfLiteStatus ParseLSTM(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParseMaximum(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParseMinimum(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParseMirrorPad(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +TfLiteStatus ParseMul(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParseNeg(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParseNotEqual(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +TfLiteStatus ParsePack(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParsePad(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParsePadV2(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParsePool(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParsePow(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParsePrelu(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParseQuantize(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +TfLiteStatus ParseReadVariable(const Operator* op, + ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +TfLiteStatus ParseReducer(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParseRelu(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParseRelu6(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParseReshape(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParseResizeBilinear(const Operator* op, + ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +TfLiteStatus ParseResizeNearestNeighbor(const Operator* op, + ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +TfLiteStatus ParseRound(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParseRsqrt(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParseSelectV2(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +TfLiteStatus ParseShape(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParseSin(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParseSlice(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParseSoftmax(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParseSpaceToBatchNd(const Operator* op, + ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +TfLiteStatus ParseSpaceToDepth(const Operator* op, + ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +TfLiteStatus ParseSplit(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParseSplitV(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParseSqueeze(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParseSqrt(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParseSquare(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParseSquaredDifference(const Operator* op, + ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +TfLiteStatus ParseStridedSlice(const Operator* op, + ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +TfLiteStatus ParseSub(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParseSvdf(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParseTanh(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParseTranspose(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +TfLiteStatus ParseTransposeConv(const Operator* op, + ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +TfLiteStatus ParseUnpack(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParseUnidirectionalSequenceLSTM(const Operator* op, + ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +TfLiteStatus ParseVarHandle(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +TfLiteStatus ParseWhile(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +TfLiteStatus ParseZerosLike(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +TfLiteStatus ParseBitwiseXor(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +TfLiteStatus ParseRightShift(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +} // namespace tflite + +#endif // TENSORFLOW_LITE_CORE_API_FLATBUFFER_CONVERSIONS_H_ diff --git a/tensorflow/lite/core/api/op_resolver.cc b/tensorflow/lite/core/api/op_resolver.cc new file mode 100644 index 0000000..ce5ae4f --- /dev/null +++ b/tensorflow/lite/core/api/op_resolver.cc @@ -0,0 +1,68 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/core/api/op_resolver.h" + +#include "flatbuffers/flatbuffers.h" // from @flatbuffers +#include "tensorflow/lite/core/api/error_reporter.h" +#include "tensorflow/lite/core/c/common.h" +#include "tensorflow/lite/schema/schema_utils.h" + +namespace tflite { + +TfLiteStatus GetRegistrationFromOpCode( + const OperatorCode* opcode, const OpResolver& op_resolver, + ErrorReporter* error_reporter, const TfLiteRegistration** registration) { + TfLiteStatus status = kTfLiteOk; + *registration = nullptr; + auto builtin_code = GetBuiltinCode(opcode); + int version = opcode->version(); + + if (builtin_code > BuiltinOperator_MAX) { + TF_LITE_REPORT_ERROR( + error_reporter, + "Op builtin_code out of range: %d. Are you using old TFLite binary " + "with newer model?", + builtin_code); + status = kTfLiteError; + } else if (builtin_code != BuiltinOperator_CUSTOM) { + *registration = op_resolver.FindOp(builtin_code, version); + if (*registration == nullptr) { + TF_LITE_REPORT_ERROR( + error_reporter, + "Didn't find op for builtin opcode '%s' version '%d'. " + "An older version of this builtin might be supported. " + "Are you using an old TFLite binary with a newer model?\n", + EnumNameBuiltinOperator(builtin_code), version); + status = kTfLiteError; + } + } else if (!opcode->custom_code()) { + TF_LITE_REPORT_ERROR( + error_reporter, + "Operator with CUSTOM builtin_code has no custom_code.\n"); + status = kTfLiteError; + } else { + const char* name = opcode->custom_code()->c_str(); + *registration = op_resolver.FindOp(name, version); + if (*registration == nullptr) { + // Do not report error for unresolved custom op, we do the final check + // while preparing ops. + status = kTfLiteError; + } + } + return status; +} + +} // namespace tflite diff --git a/tensorflow/lite/core/api/op_resolver.h b/tensorflow/lite/core/api/op_resolver.h new file mode 100644 index 0000000..e8a4e32 --- /dev/null +++ b/tensorflow/lite/core/api/op_resolver.h @@ -0,0 +1,136 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_CORE_API_OP_RESOLVER_H_ +#define TENSORFLOW_LITE_CORE_API_OP_RESOLVER_H_ + +#include +#include +#include + +#include "tensorflow/lite/core/api/error_reporter.h" +#include "tensorflow/lite/core/c/common.h" +#include "tensorflow/lite/schema/schema_generated.h" + +namespace tflite { + +/// Abstract interface that returns TfLiteRegistrations given op codes or custom +/// op names. This is the mechanism that ops being referenced in the flatbuffer +/// model are mapped to executable function pointers (TfLiteRegistrations). +/// +/// The lifetime of the TfLiteRegistration object whose address is +/// returned by FindOp must exceed the lifetime of any InterpreterBuilder or +/// Interpreter created with this OpResolver. +/// Likewise the lifetime of the TfLiteRegistrationExternal object referenced +/// from the TfLiteRegistration object, if any, must exceed the lifetime of +/// any InterpreterBuilder or Interpreter created with this OpResolver. +class OpResolver { + public: + /// Finds the op registration for a builtin operator by enum code. + virtual const TfLiteRegistration* FindOp(tflite::BuiltinOperator op, + int version) const = 0; + /// Finds the op registration of a custom operator by op name. + virtual const TfLiteRegistration* FindOp(const char* op, + int version) const = 0; + + // Represents a sequence of delegates. + using TfLiteDelegatePtrVector = + std::vector>; + + // Returns optional delegates for resolving and handling ops in the flatbuffer + // model. This may be used in addition to the standard TfLiteRegistration + // lookup for graph resolution. + // WARNING: This API is deprecated, GetDelegateCreators is preferred. + virtual TfLiteDelegatePtrVector GetDelegates(int num_threads) const { + return {}; + } + + // Represents a function that creates a TfLite delegate instance. + using TfLiteDelegateCreator = + std::function( + TfLiteContext* /*context*/)>; + + // Represents a sequence of delegate creator functions. + using TfLiteDelegateCreators = std::vector; + + // Returns a vector of delegate creators to create optional delegates for + // resolving and handling ops in the flatbuffer model. This may be used in + // addition to the standard TfLiteRegistration lookup for graph resolution. + // + // Note that this method is not used (will not be called) if you are using + // TF Lite in Google Play Services; the GetOpaqueDelegateCreators method + // (see below) is used for that case. + virtual TfLiteDelegateCreators GetDelegateCreators() const { return {}; } + + // TODO(b/202712825): it would be nice if we could avoid the need for separate + // "opaque" types & methods for use only with TF Lite in Google Play Services. + + // Represents an opaque delegate instance. + // WARNING: Experimental interface, subject to change. + using TfLiteOpaqueDelegatePtr = + std::unique_ptr; + + // Represents a function that creates an opaque delegate instance. + // WARNING: Experimental interface, subject to change. + using TfLiteOpaqueDelegateCreator = + std::function; + + // Represents a sequence of opaque delegate creator functions. + // WARNING: Experimental interface, subject to change. + using TfLiteOpaqueDelegateCreators = std::vector; + + // Returns a vector of opaque delegate creators to create optional opaque + // delegates for resolving and handling ops in the flatbuffer model. This may + // be used in addition to the standard TfLiteRegistration lookup for graph + // resolution. + // + // Note that this method will be called only if you are using TF Lite in + // Google Play Services; if you are using regular TF Lite, GetDelegateCreators + // (see above) is used instead. + // + // WARNING: Experimental interface, subject to change. + virtual TfLiteOpaqueDelegateCreators GetOpaqueDelegateCreators() const { + return {}; + } + + virtual ~OpResolver() {} + + private: + /// Returns true if this OpResolver may contain any "user defined" ops. + /// By "user defined" ops, we mean any op definitions other than those + /// contained in tflite::ops::builtin::BuiltinOpResolver. + /// + /// If this method returns true, it doesn't necessarily mean that the + /// OpResolver contains a user-defined op, just that the absence of + /// user-defined ops can't be guaranteed. + /// + /// Note that "user-defined" ops are not the same as "custom" ops; + /// BuiltinOpResolver may support certain "custom" ops, in addition to + /// "builtin" ops, and may not support all of the "builtin" op enum values. + virtual bool MayContainUserDefinedOps() const { return true; } + + friend class OpResolverInternal; +}; + +// Handles the logic for converting between an OperatorCode structure extracted +// from a flatbuffer and information about a registered operator +// implementation. +TfLiteStatus GetRegistrationFromOpCode(const OperatorCode* opcode, + const OpResolver& op_resolver, + ErrorReporter* error_reporter, + const TfLiteRegistration** registration); + +} // namespace tflite + +#endif // TENSORFLOW_LITE_CORE_API_OP_RESOLVER_H_ diff --git a/tensorflow/lite/core/api/tensor_utils.cc b/tensorflow/lite/core/api/tensor_utils.cc new file mode 100644 index 0000000..18a643c --- /dev/null +++ b/tensorflow/lite/core/api/tensor_utils.cc @@ -0,0 +1,50 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/core/api/tensor_utils.h" + +#include + +#include "tensorflow/lite/core/c/common.h" + +namespace tflite { + +TfLiteStatus ResetVariableTensor(TfLiteTensor* tensor) { + if (!tensor->is_variable) { + return kTfLiteOk; + } + // TODO(b/115961645): Implement - If a variable tensor has a buffer, reset it + // to the value of the buffer. + int value = 0; + if (tensor->type == kTfLiteInt8) { + value = tensor->params.zero_point; + } + // TODO(b/139446230): Provide a platform header to better handle these + // specific scenarios. +#if __ANDROID__ || defined(__x86_64__) || defined(__i386__) || \ + defined(__i386) || defined(__x86__) || defined(__X86__) || \ + defined(_X86_) || defined(_M_IX86) || defined(_M_X64) + memset(tensor->data.raw, value, tensor->bytes); +#else + char* raw_ptr = tensor->data.raw; + for (size_t i = 0; i < tensor->bytes; ++i) { + *raw_ptr = value; + raw_ptr++; + } +#endif + return kTfLiteOk; +} + +} // namespace tflite diff --git a/tensorflow/lite/core/api/tensor_utils.h b/tensorflow/lite/core/api/tensor_utils.h new file mode 100644 index 0000000..440da8a --- /dev/null +++ b/tensorflow/lite/core/api/tensor_utils.h @@ -0,0 +1,28 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_CORE_API_TENSOR_UTILS_H_ +#define TENSORFLOW_LITE_CORE_API_TENSOR_UTILS_H_ + +#include "tensorflow/lite/core/c/common.h" + +namespace tflite { + +// Resets a variable tensor to the default value. +TfLiteStatus ResetVariableTensor(TfLiteTensor* tensor); + +} // namespace tflite + +#endif // TENSORFLOW_LITE_CORE_API_TENSOR_UTILS_H_ diff --git a/tensorflow/lite/core/c/BUILD b/tensorflow/lite/core/c/BUILD new file mode 100644 index 0000000..411a933 --- /dev/null +++ b/tensorflow/lite/core/c/BUILD @@ -0,0 +1,28 @@ +load( + "//tensorflow/lite:build_def.bzl", + "tflite_copts", +) + +package( + default_visibility = ["//visibility:public"], + licenses = ["notice"], +) + +cc_library( + name = "common", + srcs = ["common.cc"], + hdrs = [ + "builtin_op_data.h", + "common.h", + ], + copts = tflite_copts(), + deps = [ + ":c_api_types", + ], +) + +cc_library( + name = "c_api_types", + hdrs = ["c_api_types.h"], + copts = tflite_copts(), +) diff --git a/tensorflow/lite/core/c/builtin_op_data.h b/tensorflow/lite/core/c/builtin_op_data.h new file mode 100644 index 0000000..e9c6eb3 --- /dev/null +++ b/tensorflow/lite/core/c/builtin_op_data.h @@ -0,0 +1,542 @@ +/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +/// WARNING: Users of TensorFlow Lite should not include this file directly, +/// but should instead include +/// "third_party/tensorflow/lite/c/builtin_op_data.h". +/// Only the TensorFlow Lite implementation itself should include this +/// file directly. +#ifndef TENSORFLOW_LITE_CORE_C_BUILTIN_OP_DATA_H_ +#define TENSORFLOW_LITE_CORE_C_BUILTIN_OP_DATA_H_ + +#include +#include + +#include "tensorflow/lite/core/c/common.h" + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +// TfLiteReshapeParams can't have dynamic data so we fix the maximum possible +// number of dimensions. +#define TFLITE_RESHAPE_PARAMS_MAX_DIMENSION_COUNT 8 + +// TODO(aselle): Consider using "if this then that" for testing. + +// Useful placeholder to put in otherwise empty structs to avoid size warnings. +typedef struct { + char dummy; +} EmptyStructPlaceholder; + +// IMPORTANT: All new members of structs must be added at the end to ensure +// backwards compatibility. + +// Possible padding types (for convolutions) +typedef enum { + kTfLitePaddingUnknown = 0, + kTfLitePaddingSame, + kTfLitePaddingValid, +} TfLitePadding; + +typedef enum { + kTfLiteMirrorPaddingUnknown = 0, + kTfLiteMirrorPaddingReflect, + kTfLiteMirrorPaddingSymmetric, +} TfLiteMirrorPaddingMode; + +// TODO(b/130259536): We should move this out of builtin_op_data. +typedef struct { + int width; + int height; + int width_offset; + int height_offset; +} TfLitePaddingValues; + +typedef struct { + TfLiteMirrorPaddingMode mode; +} TfLiteMirrorPaddingParams; + +// Possible fused activation functions. +typedef enum { + kTfLiteActNone = 0, + kTfLiteActRelu, + kTfLiteActReluN1To1, // min(max(-1, x), 1) + kTfLiteActRelu6, // min(max(0, x), 6) + kTfLiteActTanh, + kTfLiteActSignBit, + kTfLiteActSigmoid, +} TfLiteFusedActivation; + +typedef struct { + // Parameters for CONV_2D version 1. + TfLitePadding padding; + int stride_width; + int stride_height; + TfLiteFusedActivation activation; + + // Parameters for CONV_2D version 2. + // Note: Version 2 supports dilation values not equal to 1. + int dilation_width_factor; + int dilation_height_factor; +} TfLiteConvParams; + +typedef struct { + TfLitePadding padding; + int stride_width; + int stride_height; + int stride_depth; + int dilation_width_factor; + int dilation_height_factor; + int dilation_depth_factor; + TfLiteFusedActivation activation; +} TfLiteConv3DParams; + +typedef TfLiteConv3DParams TfLiteConv3DTransposeParams; + +typedef struct { + TfLitePadding padding; + int stride_width; + int stride_height; + int filter_width; + int filter_height; + TfLiteFusedActivation activation; + struct { + TfLitePaddingValues padding; + } computed; +} TfLitePoolParams; + +typedef struct { + // Parameters for DepthwiseConv version 1 or above. + TfLitePadding padding; + int stride_width; + int stride_height; + // `depth_multiplier` is redundant. It's used by CPU kernels in + // TensorFlow 2.0 or below, but ignored in versions above. + // + // The information can be deduced from the shape of input and the shape of + // weights. Since the TFLiteConverter toolchain doesn't support partially + // specified shapes, relying on `depth_multiplier` stops us from supporting + // graphs with dynamic shape tensors. + // + // Note: Some of the delegates (e.g. NNAPI, GPU) are still relying on this + // field. + int depth_multiplier; + TfLiteFusedActivation activation; + // Parameters for DepthwiseConv version 2 or above. + int dilation_width_factor; + int dilation_height_factor; +} TfLiteDepthwiseConvParams; + +typedef struct { + int rank; + TfLiteFusedActivation activation; + + // Parameter for SVDF version 4. + bool asymmetric_quantize_inputs; +} TfLiteSVDFParams; + +typedef struct { + TfLiteFusedActivation activation; + + // Parameter for RNN version 3. + bool asymmetric_quantize_inputs; +} TfLiteRNNParams; + +typedef struct { + bool time_major; + TfLiteFusedActivation activation; + + // Parameter for Sequence RNN version 3. + bool asymmetric_quantize_inputs; +} TfLiteSequenceRNNParams; + +typedef struct { + bool time_major; + TfLiteFusedActivation activation; + bool merge_outputs; + + // Parameter for Bidirectional RNN version 3. + bool asymmetric_quantize_inputs; +} TfLiteBidirectionalSequenceRNNParams; + +typedef enum { + kTfLiteFullyConnectedWeightsFormatDefault = 0, + kTfLiteFullyConnectedWeightsFormatShuffled4x16Int8 = 1, +} TfLiteFullyConnectedWeightsFormat; + +typedef struct { + // Parameters for FullyConnected version 1 or above. + TfLiteFusedActivation activation; + + // Parameters for FullyConnected version 2 or above. + TfLiteFullyConnectedWeightsFormat weights_format; + + // Parameters for FullyConnected version 5 or above. + // If set to true, then the number of dimensions in the input and the output + // tensors are the same. Furthermore, all but the last dimension of the input + // and output shapes will be equal. + bool keep_num_dims; + + // Parameters for FullyConnected version 7 or above. + // If set to true and the weights are quantized, then non constant inputs + // are quantized at evaluation time with asymmetric quantization. + bool asymmetric_quantize_inputs; +} TfLiteFullyConnectedParams; + +typedef enum { + kTfLiteLshProjectionUnknown = 0, + kTfLiteLshProjectionSparse = 1, + kTfLiteLshProjectionDense = 2, +} TfLiteLSHProjectionType; + +typedef struct { + TfLiteLSHProjectionType type; +} TfLiteLSHProjectionParams; + +typedef struct { + float beta; +} TfLiteSoftmaxParams; + +typedef struct { + int axis; + TfLiteFusedActivation activation; +} TfLiteConcatenationParams; + +typedef struct { + TfLiteFusedActivation activation; + // Parameter added for the version 4. + bool pot_scale_int16; +} TfLiteAddParams; + +typedef struct { + EmptyStructPlaceholder placeholder; +} TfLiteSpaceToBatchNDParams; + +typedef struct { + EmptyStructPlaceholder placeholder; +} TfLiteBatchToSpaceNDParams; + +typedef struct { + bool adj_x; + bool adj_y; + // Parameters for BatchMatMul version 4 or above. + // If set to true and the weights are quantized, then non constant inputs + // are quantized at evaluation time with asymmetric quantization. + bool asymmetric_quantize_inputs; +} TfLiteBatchMatMulParams; + +typedef struct { + TfLiteFusedActivation activation; +} TfLiteMulParams; + +typedef struct { + TfLiteFusedActivation activation; + // Parameter added for the version 5. + bool pot_scale_int16; +} TfLiteSubParams; + +typedef struct { + TfLiteFusedActivation activation; +} TfLiteDivParams; + +typedef struct { + TfLiteFusedActivation activation; +} TfLiteL2NormParams; + +typedef struct { + int radius; + float bias; + float alpha; + float beta; +} TfLiteLocalResponseNormParams; + +typedef enum { + kTfLiteLSTMFullKernel = 0, + kTfLiteLSTMBasicKernel +} TfLiteLSTMKernelType; + +typedef struct { + // Parameters for LSTM version 1. + TfLiteFusedActivation activation; + float cell_clip; + float proj_clip; + + // Parameters for LSTM version 2. + // kTfLiteLSTMBasicKernel is only supported in version 2 or above. + TfLiteLSTMKernelType kernel_type; + + // Parameters for LSTM version 4. + bool asymmetric_quantize_inputs; +} TfLiteLSTMParams; + +typedef struct { + // Parameters needed for the underlying LSTM. + TfLiteFusedActivation activation; + float cell_clip; + float proj_clip; + + // If set to true then the first dimension is time, otherwise batch. + bool time_major; + + // Parameter for unidirectional sequence RNN version 3. + bool asymmetric_quantize_inputs; + + // Parameter for unidirectional sequence RNN version 4. + bool diagonal_recurrent_tensors; +} TfLiteUnidirectionalSequenceLSTMParams; + +typedef struct { + // Parameters supported by version 1: + // Parameters inherited for the LSTM kernel. + TfLiteFusedActivation activation; + float cell_clip; + float proj_clip; + + // If true, store the outputs of both directions in the first output. + bool merge_outputs; + + // Parameters supported by version 2: + // If set to true then the first dimension is time, otherwise batch. + bool time_major; + + // Parameters supported by version 3: + // If set to true, then hybrid ops use asymmetric quantization for inputs. + bool asymmetric_quantize_inputs; +} TfLiteBidirectionalSequenceLSTMParams; + +typedef struct { + bool align_corners; + // half_pixel_centers assumes pixels are of half the actual dimensions, and + // yields more accurate resizes. Corresponds to the same argument for the + // original TensorFlow op in TF2.0. + bool half_pixel_centers; +} TfLiteResizeBilinearParams; + +typedef struct { + bool align_corners; + bool half_pixel_centers; +} TfLiteResizeNearestNeighborParams; + +typedef struct { + EmptyStructPlaceholder placeholder; +} TfLitePadParams; + +typedef struct { + EmptyStructPlaceholder placeholder; +} TfLitePadV2Params; + +typedef struct { + // These fields are only used in old models for backward compatibility. + // In the current implementation, we use the 2nd input of the op as the shape, + // and these fields are unused. + int shape[TFLITE_RESHAPE_PARAMS_MAX_DIMENSION_COUNT]; + int num_dimensions; +} TfLiteReshapeParams; + +typedef struct { + int ngram_size; + int max_skip_size; + bool include_all_ngrams; +} TfLiteSkipGramParams; + +typedef struct { + int block_size; +} TfLiteSpaceToDepthParams; + +typedef struct { + int block_size; +} TfLiteDepthToSpaceParams; + +typedef struct { + TfLiteType in_data_type; + TfLiteType out_data_type; +} TfLiteCastParams; + +typedef enum { + kTfLiteCombinerTypeSum = 0, + kTfLiteCombinerTypeMean = 1, + kTfLiteCombinerTypeSqrtn = 2, +} TfLiteCombinerType; + +typedef struct { + TfLiteCombinerType combiner; +} TfLiteEmbeddingLookupSparseParams; + +typedef struct { + int axis; + int batch_dims; +} TfLiteGatherParams; + +typedef struct { + EmptyStructPlaceholder placeholder; +} TfLiteTransposeParams; + +typedef struct { + bool keep_dims; +} TfLiteReducerParams; + +typedef struct { + int num_splits; +} TfLiteSplitParams; + +typedef struct { + int num_splits; +} TfLiteSplitVParams; + +typedef struct { + // TODO(ahentz): We can't have dynamic data in this struct, at least not yet. + // For now we will fix the maximum possible number of dimensions. + int squeeze_dims[8]; + int num_squeeze_dims; +} TfLiteSqueezeParams; + +typedef struct { + int begin_mask; + int end_mask; + int ellipsis_mask; + int new_axis_mask; + int shrink_axis_mask; + + // Parameters supported by version 8: + // If true, then the end tensor is an offset of the begin tensor. + bool offset; +} TfLiteStridedSliceParams; + +typedef struct { + TfLiteType output_type; +} TfLiteArgMaxParams; + +typedef struct { + TfLiteType output_type; +} TfLiteArgMinParams; + +typedef struct { + // Parameters supported by version 1: + TfLitePadding padding; + int stride_width; + int stride_height; + + // Parameters supported by version 4: + TfLiteFusedActivation activation; +} TfLiteTransposeConvParams; + +typedef struct { + bool validate_indices; +} TfLiteSparseToDenseParams; + +typedef struct { + TfLiteType out_type; +} TfLiteShapeParams; + +typedef struct { + EmptyStructPlaceholder placeholder; +} TfLiteRankParams; + +typedef struct { + // Parameters supported by version 1: + float min; + float max; + int num_bits; + + // Parameters supported by version 2: + bool narrow_range; +} TfLiteFakeQuantParams; + +typedef struct { + int values_count; + int axis; +} TfLitePackParams; + +typedef struct { + int axis; +} TfLiteOneHotParams; + +typedef struct { + int num; + int axis; +} TfLiteUnpackParams; + +typedef struct { + float alpha; +} TfLiteLeakyReluParams; + +typedef struct { + TfLiteType index_out_type; +} TfLiteUniqueParams; + +typedef struct { + int seq_dim; + int batch_dim; +} TfLiteReverseSequenceParams; + +typedef struct { + EmptyStructPlaceholder placeholder; +} TfLiteMatrixDiagParams; + +typedef struct { + EmptyStructPlaceholder placeholder; +} TfLiteMatrixSetDiagParams; + +typedef struct { + int then_subgraph_index; + int else_subgraph_index; +} TfLiteIfParams; + +typedef struct { + int cond_subgraph_index; + int body_subgraph_index; +} TfLiteWhileParams; + +typedef struct { + bool exclusive; + bool reverse; +} TfLiteCumsumParams; + +typedef struct { + int init_subgraph_index; +} TfLiteCallOnceParams; + +typedef struct { + int table_id; + TfLiteType key_dtype; + TfLiteType value_dtype; +} TfLiteHashtableParams; + +typedef struct { + const char* container; + const char* shared_name; +} TfLiteVarHandleParams; + +typedef struct { + int seed; + int seed2; +} TfLiteRandomParams; + +typedef struct { + int num_boundaries; + // This points to the memory stored in the model (flatbuffer), + // and is not owned. + const float* boundaries; +} TfLiteBucketizeParams; + +typedef struct { + bool approximate; +} TfLiteGeluParams; + +#ifdef __cplusplus +} // extern "C" +#endif // __cplusplus + +#endif // TENSORFLOW_LITE_CORE_C_BUILTIN_OP_DATA_H_ diff --git a/tensorflow/lite/core/c/c_api_types.h b/tensorflow/lite/core/c/c_api_types.h new file mode 100644 index 0000000..670ec1e --- /dev/null +++ b/tensorflow/lite/core/c/c_api_types.h @@ -0,0 +1,169 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +// This file declares types used by the pure C inference API defined in c_api.h, +// some of which are also used in the C++ and C kernel and interpreter APIs. + +/// WARNING: Users of TensorFlow Lite should not include this file directly, +/// but should instead include +/// "third_party/tensorflow/lite/c/c_api_types.h". +/// Only the TensorFlow Lite implementation itself should include this +/// file directly. +// IWYU pragma: private, include "third_party/tensorflow/lite/c/c_api_types.h" + +#ifndef TENSORFLOW_LITE_CORE_C_C_API_TYPES_H_ +#define TENSORFLOW_LITE_CORE_C_C_API_TYPES_H_ + +#include + +#ifdef __cplusplus +extern "C" { +#endif + +// Define TFL_CAPI_EXPORT macro to export a function properly with a shared +// library. +#ifdef SWIG +#define TFL_CAPI_EXPORT +#elif defined(TFL_STATIC_LIBRARY_BUILD) +#define TFL_CAPI_EXPORT +#else // not definded TFL_STATIC_LIBRARY_BUILD +#if defined(_WIN32) +#ifdef TFL_COMPILE_LIBRARY +#define TFL_CAPI_EXPORT __declspec(dllexport) +#else +#define TFL_CAPI_EXPORT __declspec(dllimport) +#endif // TFL_COMPILE_LIBRARY +#else +#define TFL_CAPI_EXPORT __attribute__((visibility("default"))) +#endif // _WIN32 +#endif // SWIG + +// Note that new error status values may be added in future in order to +// indicate more fine-grained internal states, therefore, applications should +// not rely on status values being members of the enum. +typedef enum TfLiteStatus { + kTfLiteOk = 0, + + // Generally referring to an error in the runtime (i.e. interpreter) + kTfLiteError = 1, + + // Generally referring to an error from a TfLiteDelegate itself. + kTfLiteDelegateError = 2, + + // Generally referring to an error in applying a delegate due to + // incompatibility between runtime and delegate, e.g., this error is returned + // when trying to apply a TF Lite delegate onto a model graph that's already + // immutable. + kTfLiteApplicationError = 3, + + // Generally referring to serialized delegate data not being found. + // See tflite::delegates::Serialization. + kTfLiteDelegateDataNotFound = 4, + + // Generally referring to data-writing issues in delegate serialization. + // See tflite::delegates::Serialization. + kTfLiteDelegateDataWriteError = 5, + + // Generally referring to data-reading issues in delegate serialization. + // See tflite::delegates::Serialization. + kTfLiteDelegateDataReadError = 6, + + // Generally referring to issues when the TF Lite model has ops that cannot be + // resolved at runtime. This could happen when the specific op is not + // registered or built with the TF Lite framework. + kTfLiteUnresolvedOps = 7, + + // Generally referring to invocation cancelled by the user. + // See `interpreter::Cancel`. + // TODO(b/194915839): Implement `interpreter::Cancel`. + // TODO(b/250636993): Cancellation triggered by `SetCancellationFunction` + // should also return this status code. + kTfLiteCancelled = 8, +} TfLiteStatus; + +// Types supported by tensor +typedef enum { + kTfLiteNoType = 0, + kTfLiteFloat32 = 1, + kTfLiteInt32 = 2, + kTfLiteUInt8 = 3, + kTfLiteInt64 = 4, + kTfLiteString = 5, + kTfLiteBool = 6, + kTfLiteInt16 = 7, + kTfLiteComplex64 = 8, + kTfLiteInt8 = 9, + kTfLiteFloat16 = 10, + kTfLiteFloat64 = 11, + kTfLiteComplex128 = 12, + kTfLiteUInt64 = 13, + kTfLiteResource = 14, + kTfLiteVariant = 15, + kTfLiteUInt32 = 16, + kTfLiteUInt16 = 17, + kTfLiteInt4 = 18, +} TfLiteType; + +// Legacy. Will be deprecated in favor of TfLiteAffineQuantization. +// If per-layer quantization is specified this field will still be populated in +// addition to TfLiteAffineQuantization. +// Parameters for asymmetric quantization. Quantized values can be converted +// back to float using: +// real_value = scale * (quantized_value - zero_point) +typedef struct TfLiteQuantizationParams { + float scale; + int32_t zero_point; +} TfLiteQuantizationParams; + +// -------------------------------------------------------------------------- +// Opaque types used by c_api.h, c_api_opaque.h and common.h. + +// TfLiteOpaqueContext is an opaque version of TfLiteContext; +typedef struct TfLiteOpaqueContext TfLiteOpaqueContext; + +// TfLiteOpaqueNode is an opaque version of TfLiteNode; +typedef struct TfLiteOpaqueNode TfLiteOpaqueNode; + +// TfLiteOpaqueTensor is an opaque version of TfLiteTensor; +typedef struct TfLiteOpaqueTensor TfLiteOpaqueTensor; + +// TfLiteDelegate: allows delegation of nodes to alternative backends. +// Forward declaration of concrete type declared in common.h. +typedef struct TfLiteDelegate TfLiteDelegate; + +// TfLiteOpaqueDelegateStruct: unconditionally opaque version of +// TfLiteDelegate; allows delegation of nodes to alternative backends. +// +// This is an abstract type that is intended to have the same +// role as TfLiteDelegate, but without exposing the implementation +// details of how delegates are implemented. +// WARNING: This is an experimental type and subject to change. +typedef struct TfLiteOpaqueDelegateStruct TfLiteOpaqueDelegateStruct; + +// TfLiteOpaqueDelegate: conditionally opaque version of +// TfLiteDelegate; allows delegation of nodes to alternative backends. +// For TF Lite in Play Services, this is an opaque type, +// but for regular TF Lite, this is just a typedef for TfLiteDelegate. +// WARNING: This is an experimental type and subject to change. +#if TFLITE_WITH_STABLE_ABI || TFLITE_USE_OPAQUE_DELEGATE +typedef TfLiteOpaqueDelegateStruct TfLiteOpaqueDelegate; +#else +typedef TfLiteDelegate TfLiteOpaqueDelegate; +#endif + +#ifdef __cplusplus +} // extern C +#endif +#endif // TENSORFLOW_LITE_CORE_C_C_API_TYPES_H_ diff --git a/tensorflow/lite/core/c/common.cc b/tensorflow/lite/core/c/common.cc new file mode 100644 index 0000000..3f52332 --- /dev/null +++ b/tensorflow/lite/core/c/common.cc @@ -0,0 +1,412 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/core/c/common.h" + +#ifndef TF_LITE_STATIC_MEMORY +#include +#endif // TF_LITE_STATIC_MEMORY + +#include +#include +#include + +#include "tensorflow/lite/core/c/c_api_types.h" +#ifdef TF_LITE_TENSORFLOW_PROFILER +#include "tensorflow/lite/tensorflow_profiler_logger.h" +#endif + +namespace { + +template +size_t TfLiteVarArrayGetSizeInBytes(const int size) { + constexpr size_t data_size = sizeof(std::declval().data[0]); + size_t computed_size = sizeof(T) + data_size * size; +#if defined(_MSC_VER) + // Context for why this is needed is in http://b/189926408#comment21 + computed_size -= data_size; +#endif + return computed_size; +} + +template +int TfLiteVarArrayEqualsArray(const T* const a, const int b_size, + const U* const b_data) { + static_assert(std::is_samedata[0]), const U&>::value, + "TfLiteVarArrayEqualsArray can only compare same type arrays"); + if (a == nullptr) { + return b_size == 0; + } + if (a->size != b_size) { + return 0; + } + return !memcmp(a->data, b_data, a->size * sizeof(a->data[0])); +} + +template +int TfLiteVarArrayEqual(const T* const a, const T* const b) { + // This goes first because null arrays must compare equal. + if (a == b) { + return 1; + } + if (a == nullptr || b == nullptr) { + return 0; + } + return TfLiteVarArrayEqualsArray(a, b->size, b->data); +} + +#ifndef TF_LITE_STATIC_MEMORY + +template +T* TfLiteVarArrayCreate(const int size) { + const size_t alloc_size = TfLiteVarArrayGetSizeInBytes(size); + if (alloc_size <= 0) { + return nullptr; + } + T* ret = (T*)malloc(alloc_size); + if (!ret) { + return nullptr; + } + ret->size = size; + return ret; +} + +template +T* TfLiteVarArrayCopy(const T* const src) { + if (!src) { + return nullptr; + } + T* const ret = TfLiteVarArrayCreate(src->size); + if (ret) { + memcpy(ret->data, src->data, src->size * sizeof(src->data[0])); + } + return ret; +} + +#endif // TF_LITE_STATIC_MEMORY + +template +void TfLiteVarArrayFree(T* a) { + free(a); +} + +} // namespace + +extern "C" { + +size_t TfLiteIntArrayGetSizeInBytes(int size) { + return TfLiteVarArrayGetSizeInBytes(size); +} + +int TfLiteIntArrayEqual(const TfLiteIntArray* a, const TfLiteIntArray* b) { + return TfLiteVarArrayEqual(a, b); +} + +int TfLiteIntArrayEqualsArray(const TfLiteIntArray* a, int b_size, + const int b_data[]) { + return TfLiteVarArrayEqualsArray(a, b_size, b_data); +} + +#ifndef TF_LITE_STATIC_MEMORY + +TfLiteIntArray* TfLiteIntArrayCreate(int size) { + return TfLiteVarArrayCreate(size); +} + +TfLiteIntArray* TfLiteIntArrayCopy(const TfLiteIntArray* src) { + return TfLiteVarArrayCopy(src); +} + +void TfLiteIntArrayFree(TfLiteIntArray* a) { TfLiteVarArrayFree(a); } + +#endif // TF_LITE_STATIC_MEMORY + +int TfLiteFloatArrayGetSizeInBytes(int size) { + return TfLiteVarArrayGetSizeInBytes(size); +} + +#ifndef TF_LITE_STATIC_MEMORY + +TfLiteFloatArray* TfLiteFloatArrayCreate(int size) { + return TfLiteVarArrayCreate(size); +} + +TfLiteFloatArray* TfLiteFloatArrayCopy(const TfLiteFloatArray* src) { + return TfLiteVarArrayCopy(src); +} + +void TfLiteFloatArrayFree(TfLiteFloatArray* a) { TfLiteVarArrayFree(a); } + +void TfLiteTensorDataFree(TfLiteTensor* t) { + if (t->allocation_type == kTfLiteVariantObject && t->data.data) { + delete static_cast(t->data.data); + } else if (t->allocation_type == kTfLiteDynamic || + t->allocation_type == kTfLitePersistentRo) { + if (t->data.raw) { +#ifdef TF_LITE_TENSORFLOW_PROFILER + tflite::PauseHeapMonitoring(/*pause=*/true); + tflite::OnTfLiteTensorDealloc(t); +#endif + free(t->data.raw); +#ifdef TF_LITE_TENSORFLOW_PROFILER + tflite::PauseHeapMonitoring(/*pause=*/false); +#endif + } + } + t->data.raw = nullptr; +} + +void TfLiteQuantizationFree(TfLiteQuantization* quantization) { + if (quantization->type == kTfLiteAffineQuantization) { + TfLiteAffineQuantization* q_params = + (TfLiteAffineQuantization*)(quantization->params); + if (q_params->scale) { + TfLiteFloatArrayFree(q_params->scale); + q_params->scale = nullptr; + } + if (q_params->zero_point) { + TfLiteIntArrayFree(q_params->zero_point); + q_params->zero_point = nullptr; + } + free(q_params); + } + quantization->params = nullptr; + quantization->type = kTfLiteNoQuantization; +} + +void TfLiteSparsityFree(TfLiteSparsity* sparsity) { + if (sparsity == nullptr) { + return; + } + + if (sparsity->traversal_order) { + TfLiteIntArrayFree(sparsity->traversal_order); + sparsity->traversal_order = nullptr; + } + + if (sparsity->block_map) { + TfLiteIntArrayFree(sparsity->block_map); + sparsity->block_map = nullptr; + } + + if (sparsity->dim_metadata) { + int i = 0; + for (; i < sparsity->dim_metadata_size; i++) { + TfLiteDimensionMetadata metadata = sparsity->dim_metadata[i]; + if (metadata.format == kTfLiteDimSparseCSR) { + TfLiteIntArrayFree(metadata.array_segments); + metadata.array_segments = nullptr; + TfLiteIntArrayFree(metadata.array_indices); + metadata.array_indices = nullptr; + } + } + free(sparsity->dim_metadata); + sparsity->dim_metadata = nullptr; + } + + free(sparsity); +} + +void TfLiteTensorFree(TfLiteTensor* t) { + TfLiteTensorDataFree(t); + if (t->dims) TfLiteIntArrayFree(t->dims); + t->dims = nullptr; + + if (t->dims_signature) { + TfLiteIntArrayFree((TfLiteIntArray*)t->dims_signature); + } + t->dims_signature = nullptr; + + TfLiteQuantizationFree(&t->quantization); + TfLiteSparsityFree(t->sparsity); + t->sparsity = nullptr; +} + +void TfLiteTensorReset(TfLiteType type, const char* name, TfLiteIntArray* dims, + TfLiteQuantizationParams quantization, char* buffer, + size_t size, TfLiteAllocationType allocation_type, + const void* allocation, bool is_variable, + TfLiteTensor* tensor) { + TfLiteTensorFree(tensor); + tensor->type = type; + tensor->name = name; + tensor->dims = dims; + tensor->params = quantization; + tensor->data.raw = buffer; + tensor->bytes = size; + tensor->allocation_type = allocation_type; + tensor->allocation = allocation; + tensor->is_variable = is_variable; + + tensor->quantization.type = kTfLiteNoQuantization; + tensor->quantization.params = nullptr; +} + +TfLiteStatus TfLiteTensorCopy(const TfLiteTensor* src, TfLiteTensor* dst) { + if (!src || !dst) return kTfLiteOk; + if (src->bytes != dst->bytes) return kTfLiteError; + if (src == dst) return kTfLiteOk; + dst->type = src->type; + if (dst->dims) TfLiteIntArrayFree(dst->dims); + dst->dims = TfLiteIntArrayCopy(src->dims); + if (src->allocation_type == kTfLiteVariantObject) { + if (dst->allocation_type != kTfLiteVariantObject) return kTfLiteError; + auto* dst_vd = static_cast(dst->data.data); + auto* src_vd = static_cast(src->data.data); + // Implicitly casted via return from `CloneTo`. Don't need static cast here. + dst->data.data = src_vd->CloneTo(dst_vd); + } else { + memcpy(dst->data.raw, src->data.raw, src->bytes); + } + dst->buffer_handle = src->buffer_handle; + dst->data_is_stale = src->data_is_stale; + dst->delegate = src->delegate; + + return kTfLiteOk; +} + +TfLiteStatus TfLiteTensorResizeMaybeCopy(size_t num_bytes, TfLiteTensor* tensor, + bool preserve_data) { + if (tensor->allocation_type != kTfLiteDynamic && + tensor->allocation_type != kTfLitePersistentRo) { + return kTfLiteOk; + } +#ifdef TF_LITE_TENSORFLOW_PROFILER + tflite::PauseHeapMonitoring(/*pause=*/true); +#endif + size_t alloc_bytes = num_bytes; + // TODO(b/145340303): Tensor data should be aligned. +#ifdef TFLITE_KERNEL_USE_XNNPACK + alloc_bytes += 16; // XNNPACK_EXTRA_BYTES = 16 +#endif + if (!tensor->data.data) { + tensor->data.data = (char*)malloc(alloc_bytes); +#ifdef TF_LITE_TENSORFLOW_PROFILER + tflite::OnTfLiteTensorAlloc(tensor, alloc_bytes); +#endif + } else if (num_bytes > tensor->bytes) { +#ifdef TF_LITE_TENSORFLOW_PROFILER + tflite::OnTfLiteTensorDealloc(tensor); +#endif + if (preserve_data) { + tensor->data.data = (char*)realloc(tensor->data.data, alloc_bytes); + } else { + // Calling free and malloc can be more efficient as it avoids needlessly + // copying the data when it is not required. + free(tensor->data.data); + tensor->data.data = (char*)malloc(alloc_bytes); + } +#ifdef TF_LITE_TENSORFLOW_PROFILER + tflite::OnTfLiteTensorAlloc(tensor, alloc_bytes); +#endif + } +#ifdef TF_LITE_TENSORFLOW_PROFILER + tflite::PauseHeapMonitoring(/*pause=*/false); +#endif + tensor->bytes = num_bytes; + if (tensor->data.data == nullptr && num_bytes != 0) { + // We are done allocating but tensor is pointing to null and a valid size + // was requested, so we error. + return kTfLiteError; + } + return kTfLiteOk; +} + +TfLiteStatus TfLiteTensorRealloc(size_t num_bytes, TfLiteTensor* tensor) { + return TfLiteTensorResizeMaybeCopy(num_bytes, tensor, true); +} +#endif // TF_LITE_STATIC_MEMORY + +const char* TfLiteTypeGetName(TfLiteType type) { + switch (type) { + case kTfLiteNoType: + return "NOTYPE"; + case kTfLiteFloat32: + return "FLOAT32"; + case kTfLiteUInt16: + return "UINT16"; + case kTfLiteInt16: + return "INT16"; + case kTfLiteInt32: + return "INT32"; + case kTfLiteUInt32: + return "UINT32"; + case kTfLiteUInt8: + return "UINT8"; + case kTfLiteInt8: + return "INT8"; + case kTfLiteInt64: + return "INT64"; + case kTfLiteUInt64: + return "UINT64"; + case kTfLiteBool: + return "BOOL"; + case kTfLiteComplex64: + return "COMPLEX64"; + case kTfLiteComplex128: + return "COMPLEX128"; + case kTfLiteString: + return "STRING"; + case kTfLiteFloat16: + return "FLOAT16"; + case kTfLiteFloat64: + return "FLOAT64"; + case kTfLiteResource: + return "RESOURCE"; + case kTfLiteVariant: + return "VARIANT"; + case kTfLiteInt4: + return "INT4"; + } + return "Unknown type"; +} + +TfLiteDelegate TfLiteDelegateCreate() { return TfLiteDelegate{}; } + +TfLiteOpaqueDelegate* TfLiteOpaqueDelegateCreate( + const TfLiteOpaqueDelegateBuilder* opaque_delegate_builder) { + if (!opaque_delegate_builder) return nullptr; + + TfLiteDelegate* result = new TfLiteDelegate{}; + result->opaque_delegate_builder = new TfLiteOpaqueDelegateBuilder{}; + *(result->opaque_delegate_builder) = *opaque_delegate_builder; + + return reinterpret_cast(result); +} + +void TfLiteOpaqueDelegateDelete(TfLiteOpaqueDelegate* opaque_delegate) { + if (!opaque_delegate) return; + + const TfLiteDelegate* tflite_delegate = + reinterpret_cast(opaque_delegate); + delete tflite_delegate->opaque_delegate_builder; + delete tflite_delegate; +} + +void* TfLiteOpaqueDelegateGetData(const TfLiteOpaqueDelegate* delegate) { + if (!delegate) return nullptr; + + // The following cast is safe only because this code is part of the + // TF Lite runtime implementation. Apps using TF Lite should not rely on + // 'TfLiteOpaqueDelegate' and 'TfLiteDelegate' being equivalent. + const auto* tflite_delegate = + reinterpret_cast(delegate); + + if (!tflite_delegate->opaque_delegate_builder) return tflite_delegate->data_; + + return tflite_delegate->opaque_delegate_builder->data; +} + +} // extern "C" diff --git a/tensorflow/lite/core/c/common.h b/tensorflow/lite/core/c/common.h new file mode 100644 index 0000000..9b9b4c6 --- /dev/null +++ b/tensorflow/lite/core/c/common.h @@ -0,0 +1,1358 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +// This file defines common C types and APIs for implementing operations, +// delegates and other constructs in TensorFlow Lite. The actual operations and +// delegates can be defined using C++, but the interface between the interpreter +// and the operations are C. +// +// Summary of abstractions +// TF_LITE_ENSURE - Self-sufficient error checking +// TfLiteStatus - Status reporting +// TfLiteIntArray - stores tensor shapes (dims), +// TfLiteContext - allows an op to access the tensors +// TfLiteTensor - tensor (a multidimensional array) +// TfLiteNode - a single node or operation +// TfLiteRegistration - the implementation of a conceptual operation. +// TfLiteDelegate - allows delegation of nodes to alternative backends. +// +// Some abstractions in this file are created and managed by Interpreter. +// +// NOTE: The order of values in these structs are "semi-ABI stable". New values +// should be added only to the end of structs and never reordered. + +/// WARNING: Users of TensorFlow Lite should not include this file directly, +/// but should instead include +/// "third_party/tensorflow/lite/c/common.h". +/// Only the TensorFlow Lite implementation itself should include this +/// file directly. +// IWYU pragma: private, include "third_party/tensorflow/lite/c/common.h" + +#ifndef TENSORFLOW_LITE_CORE_C_COMMON_H_ +#define TENSORFLOW_LITE_CORE_C_COMMON_H_ + +#include +#include +#include +#include + +#include "tensorflow/lite/core/c/c_api_types.h" // IWYU pragma: export + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +// The list of external context types known to TF Lite. This list exists solely +// to avoid conflicts and to ensure ops can share the external contexts they +// need. Access to the external contexts is controlled by one of the +// corresponding support files. +typedef enum TfLiteExternalContextType { + kTfLiteEigenContext = 0, // include eigen_support.h to use. + kTfLiteGemmLowpContext = 1, // include gemm_support.h to use. + kTfLiteEdgeTpuContext = 2, // Placeholder for Edge TPU support. + kTfLiteCpuBackendContext = 3, // include cpu_backend_context.h to use. + kTfLiteMaxExternalContexts = 4 +} TfLiteExternalContextType; + +// Forward declare so dependent structs and methods can reference these types +// prior to the struct definitions. +struct TfLiteContext; +struct TfLiteDelegate; +struct TfLiteRegistration; +struct TfLiteOpaqueDelegateBuilder; + +// An external context is a collection of information unrelated to the TF Lite +// framework, but useful to a subset of the ops. TF Lite knows very little +// about the actual contexts, but it keeps a list of them, and is able to +// refresh them if configurations like the number of recommended threads +// change. +typedef struct TfLiteExternalContext { + TfLiteExternalContextType type; + TfLiteStatus (*Refresh)(struct TfLiteContext* context); +} TfLiteExternalContext; + +#define kTfLiteOptionalTensor (-1) + +// Fixed size list of integers. Used for dimensions and inputs/outputs tensor +// indices +typedef struct TfLiteIntArray { + int size; + +#if defined(_MSC_VER) + // Context for why this is needed is in http://b/189926408#comment21 + int data[1]; +#elif (!defined(__clang__) && defined(__GNUC__) && __GNUC__ == 6 && \ + __GNUC_MINOR__ >= 1) || \ + defined(HEXAGON) || \ + (defined(__clang__) && __clang_major__ == 7 && __clang_minor__ == 1) + // gcc 6.1+ have a bug where flexible members aren't properly handled + // https://github.com/google/re2/commit/b94b7cd42e9f02673cd748c1ac1d16db4052514c + int data[0]; +#else + int data[]; +#endif +} TfLiteIntArray; + +// Given the size (number of elements) in a TfLiteIntArray, calculate its size +// in bytes. +size_t TfLiteIntArrayGetSizeInBytes(int size); + +#ifndef TF_LITE_STATIC_MEMORY +// Create a array of a given `size` (uninitialized entries). +// This returns a pointer, that you must free using TfLiteIntArrayFree(). +TfLiteIntArray* TfLiteIntArrayCreate(int size); +#endif + +// Check if two intarrays are equal. Returns 1 if they are equal, 0 otherwise. +int TfLiteIntArrayEqual(const TfLiteIntArray* a, const TfLiteIntArray* b); + +// Check if an intarray equals an array. Returns 1 if equals, 0 otherwise. +int TfLiteIntArrayEqualsArray(const TfLiteIntArray* a, int b_size, + const int b_data[]); + +#ifndef TF_LITE_STATIC_MEMORY +// Create a copy of an array passed as `src`. +// You are expected to free memory with TfLiteIntArrayFree +TfLiteIntArray* TfLiteIntArrayCopy(const TfLiteIntArray* src); + +// Free memory of array `a`. +void TfLiteIntArrayFree(TfLiteIntArray* a); +#endif // TF_LITE_STATIC_MEMORY + +// Fixed size list of floats. Used for per-channel quantization. +typedef struct TfLiteFloatArray { + int size; +#if defined(_MSC_VER) + // Context for why this is needed is in http://b/189926408#comment21 + float data[1]; +#elif (!defined(__clang__) && defined(__GNUC__) && __GNUC__ == 6 && \ + __GNUC_MINOR__ >= 1) || \ + defined(HEXAGON) || \ + (defined(__clang__) && __clang_major__ == 7 && __clang_minor__ == 1) + // gcc 6.1+ have a bug where flexible members aren't properly handled + // https://github.com/google/re2/commit/b94b7cd42e9f02673cd748c1ac1d16db4052514c + float data[0]; +#else + float data[]; +#endif +} TfLiteFloatArray; + +// Given the size (number of elements) in a TfLiteFloatArray, calculate its size +// in bytes. +int TfLiteFloatArrayGetSizeInBytes(int size); + +#ifndef TF_LITE_STATIC_MEMORY +// Create a array of a given `size` (uninitialized entries). +// This returns a pointer, that you must free using TfLiteFloatArrayFree(). +TfLiteFloatArray* TfLiteFloatArrayCreate(int size); + +// Create a copy of an array passed as `src`. +// You are expected to free memory with TfLiteFloatArrayFree. +TfLiteFloatArray* TfLiteFloatArrayCopy(const TfLiteFloatArray* src); + +// Free memory of array `a`. +void TfLiteFloatArrayFree(TfLiteFloatArray* a); +#endif // TF_LITE_STATIC_MEMORY + +// Since we must not depend on any libraries, define a minimal subset of +// error macros while avoiding names that have pre-conceived meanings like +// assert and check. + +// Try to make all reporting calls through TF_LITE_KERNEL_LOG rather than +// calling the context->ReportError function directly, so that message strings +// can be stripped out if the binary size needs to be severely optimized. +#ifndef TF_LITE_STRIP_ERROR_STRINGS +#define TF_LITE_KERNEL_LOG(context, ...) \ + do { \ + (context)->ReportError((context), __VA_ARGS__); \ + } while (false) + +#define TF_LITE_MAYBE_KERNEL_LOG(context, ...) \ + do { \ + if ((context) != nullptr) { \ + (context)->ReportError((context), __VA_ARGS__); \ + } \ + } while (false) +#else // TF_LITE_STRIP_ERROR_STRINGS +#define ARGS_UNUSED(...) (void)sizeof(#__VA_ARGS__) +#define TF_LITE_KERNEL_LOG(context, ...) ARGS_UNUSED(__VA_ARGS__) +#define TF_LITE_MAYBE_KERNEL_LOG(context, ...) ARGS_UNUSED(__VA_ARGS__) +#endif // TF_LITE_STRIP_ERROR_STRINGS + +// Check whether value is true, and if not return kTfLiteError from +// the current function (and report the error string msg). +#define TF_LITE_ENSURE_MSG(context, value, msg) \ + do { \ + if (!(value)) { \ + TF_LITE_KERNEL_LOG((context), __FILE__ " " msg); \ + return kTfLiteError; \ + } \ + } while (0) + +// Check whether the value `a` is true, and if not return kTfLiteError from +// the current function, while also reporting the location of the error. +#define TF_LITE_ENSURE(context, a) \ + do { \ + if (!(a)) { \ + TF_LITE_KERNEL_LOG((context), "%s:%d %s was not true.", __FILE__, \ + __LINE__, #a); \ + return kTfLiteError; \ + } \ + } while (0) + +#define TF_LITE_ENSURE_STATUS(a) \ + do { \ + const TfLiteStatus s = (a); \ + if (s != kTfLiteOk) { \ + return s; \ + } \ + } while (0) + +// Check whether the value `a == b` is true, and if not return kTfLiteError from +// the current function, while also reporting the location of the error. +// `a` and `b` may be evaluated more than once, so no side effects or +// extremely expensive computations should be done. +// NOTE: Use TF_LITE_ENSURE_TYPES_EQ if comparing TfLiteTypes. +#define TF_LITE_ENSURE_EQ(context, a, b) \ + do { \ + if ((a) != (b)) { \ + TF_LITE_KERNEL_LOG((context), "%s:%d %s != %s (%d != %d)", __FILE__, \ + __LINE__, #a, #b, (a), (b)); \ + return kTfLiteError; \ + } \ + } while (0) + +#define TF_LITE_ENSURE_TYPES_EQ(context, a, b) \ + do { \ + if ((a) != (b)) { \ + TF_LITE_KERNEL_LOG((context), "%s:%d %s != %s (%s != %s)", __FILE__, \ + __LINE__, #a, #b, TfLiteTypeGetName(a), \ + TfLiteTypeGetName(b)); \ + return kTfLiteError; \ + } \ + } while (0) + +#define TF_LITE_ENSURE_NEAR(context, a, b, epsilon) \ + do { \ + auto delta = ((a) > (b)) ? ((a) - (b)) : ((b) - (a)); \ + if (delta > epsilon) { \ + TF_LITE_KERNEL_LOG((context), "%s:%d %s not near %s (%f != %f)", \ + __FILE__, __LINE__, #a, #b, static_cast(a), \ + static_cast(b)); \ + return kTfLiteError; \ + } \ + } while (0) + +#define TF_LITE_ENSURE_OK(context, status) \ + do { \ + const TfLiteStatus s = (status); \ + if ((s) != kTfLiteOk) { \ + return s; \ + } \ + } while (0) + +// Single-precision complex data type compatible with the C99 definition. +typedef struct TfLiteComplex64 { + float re, im; // real and imaginary parts, respectively. +} TfLiteComplex64; + +// Double-precision complex data type compatible with the C99 definition. +typedef struct TfLiteComplex128 { + double re, im; // real and imaginary parts, respectively. +} TfLiteComplex128; + +// Half precision data type compatible with the C99 definition. +typedef struct TfLiteFloat16 { + uint16_t data; +} TfLiteFloat16; + +// Return the name of a given type, for error reporting purposes. +const char* TfLiteTypeGetName(TfLiteType type); + +// SupportedQuantizationTypes. +typedef enum TfLiteQuantizationType { + // No quantization. + kTfLiteNoQuantization = 0, + // Affine quantization (with support for per-channel quantization). + // Corresponds to TfLiteAffineQuantization. + kTfLiteAffineQuantization = 1, +} TfLiteQuantizationType; + +// Structure specifying the quantization used by the tensor, if-any. +typedef struct TfLiteQuantization { + // The type of quantization held by params. + TfLiteQuantizationType type; + // Holds an optional reference to a quantization param structure. The actual + // type depends on the value of the `type` field (see the comment there for + // the values and corresponding types). + void* params; +} TfLiteQuantization; + +// Parameters for asymmetric quantization across a dimension (i.e per output +// channel quantization). +// quantized_dimension specifies which dimension the scales and zero_points +// correspond to. +// For a particular value in quantized_dimension, quantized values can be +// converted back to float using: +// real_value = scale * (quantized_value - zero_point) +typedef struct TfLiteAffineQuantization { + TfLiteFloatArray* scale; + TfLiteIntArray* zero_point; + int32_t quantized_dimension; +} TfLiteAffineQuantization; + +/* A union of pointers that points to memory for a given tensor. */ +typedef union TfLitePtrUnion { + /* Do not access these members directly, if possible, use + * GetTensorData(tensor) instead, otherwise only access .data, as other + * members are deprecated. */ + int32_t* i32; + uint32_t* u32; + int64_t* i64; + uint64_t* u64; + float* f; + TfLiteFloat16* f16; + double* f64; + char* raw; + const char* raw_const; + uint8_t* uint8; + bool* b; + int16_t* i16; + uint16_t* ui16; + TfLiteComplex64* c64; + TfLiteComplex128* c128; + int8_t* int8; + /* Only use this member. */ + void* data; +} TfLitePtrUnion; + +// Memory allocation strategies. +// * kTfLiteMmapRo: Read-only memory-mapped data, or data externally allocated. +// * kTfLiteArenaRw: Arena allocated with no guarantees about persistence, +// and available during eval. +// * kTfLiteArenaRwPersistent: Arena allocated but persistent across eval, and +// only available during eval. +// * kTfLiteDynamic: Allocated during eval, or for string tensors. +// * kTfLitePersistentRo: Allocated and populated during prepare. This is +// useful for tensors that can be computed during prepare and treated +// as constant inputs for downstream ops (also in prepare). +// * kTfLiteCustom: Custom memory allocation provided by the user. See +// TfLiteCustomAllocation below. +// * kTfLiteVariantObject: Allocation is an arbitrary type-erased C++ object. +// Allocation and deallocation are done through `new` and `delete`. +typedef enum TfLiteAllocationType { + kTfLiteMemNone = 0, + kTfLiteMmapRo, + kTfLiteArenaRw, + kTfLiteArenaRwPersistent, + kTfLiteDynamic, + kTfLitePersistentRo, + kTfLiteCustom, + kTfLiteVariantObject, +} TfLiteAllocationType; + +// The delegates should use zero or positive integers to represent handles. +// -1 is reserved from unallocated status. +typedef int TfLiteBufferHandle; +enum { + kTfLiteNullBufferHandle = -1, +}; + +// Storage format of each dimension in a sparse tensor. +typedef enum TfLiteDimensionType { + kTfLiteDimDense = 0, + kTfLiteDimSparseCSR, +} TfLiteDimensionType; + +// Metadata to encode each dimension in a sparse tensor. +typedef struct TfLiteDimensionMetadata { + TfLiteDimensionType format; + int dense_size; + TfLiteIntArray* array_segments; + TfLiteIntArray* array_indices; +} TfLiteDimensionMetadata; + +// Parameters used to encode a sparse tensor. For detailed explanation of each +// field please refer to lite/schema/schema.fbs. +typedef struct TfLiteSparsity { + TfLiteIntArray* traversal_order; + TfLiteIntArray* block_map; + TfLiteDimensionMetadata* dim_metadata; + int dim_metadata_size; +} TfLiteSparsity; + +// Defines a custom memory allocation not owned by the runtime. +// `data` should be aligned to kDefaultTensorAlignment defined in +// lite/util.h. (Currently 64 bytes) +// NOTE: See Interpreter.SetCustomAllocationForTensor for details on usage. +typedef struct TfLiteCustomAllocation { + void* data; + size_t bytes; +} TfLiteCustomAllocation; + +// The flags used in `Interpreter::SetCustomAllocationForTensor`. +// Note that this is a bitmask, so the values should be 1, 2, 4, 8, ...etc. +typedef enum TfLiteCustomAllocationFlags { + kTfLiteCustomAllocationFlagsNone = 0, + // Skips checking whether allocation.data points to an aligned buffer as + // expected by the TFLite runtime. + // NOTE: Setting this flag can cause crashes when calling Invoke(). + // Use with caution. + kTfLiteCustomAllocationFlagsSkipAlignCheck = 1, +} TfLiteCustomAllocationFlags; + +// A tensor in the interpreter system which is a wrapper around a buffer of +// data including a dimensionality (or NULL if not currently defined). +#ifndef TF_LITE_STATIC_MEMORY +typedef struct TfLiteTensor { + // The data type specification for data stored in `data`. This affects + // what member of `data` union should be used. + TfLiteType type; + // A union of data pointers. The appropriate type should be used for a typed + // tensor based on `type`. + TfLitePtrUnion data; + // A pointer to a structure representing the dimensionality interpretation + // that the buffer should have. NOTE: the product of elements of `dims` + // and the element datatype size should be equal to `bytes` below. + TfLiteIntArray* dims; + // Quantization information. + TfLiteQuantizationParams params; + // How memory is mapped + // kTfLiteMmapRo: Memory mapped read only. + // i.e. weights + // kTfLiteArenaRw: Arena allocated read write memory + // (i.e. temporaries, outputs). + TfLiteAllocationType allocation_type; + // The number of bytes required to store the data of this Tensor. I.e. + // (bytes of each element) * dims[0] * ... * dims[n-1]. For example, if + // type is kTfLiteFloat32 and dims = {3, 2} then + // bytes = sizeof(float) * 3 * 2 = 4 * 3 * 2 = 24. + size_t bytes; + + // An opaque pointer to a tflite::MMapAllocation + const void* allocation; + + // Null-terminated name of this tensor. + const char* name; + + // The delegate which knows how to handle `buffer_handle`. + // WARNING: This is an experimental interface that is subject to change. + struct TfLiteDelegate* delegate; + + // An integer buffer handle that can be handled by `delegate`. + // The value is valid only when delegate is not null. + // WARNING: This is an experimental interface that is subject to change. + TfLiteBufferHandle buffer_handle; + + // If the delegate uses its own buffer (e.g. GPU memory), the delegate is + // responsible to set data_is_stale to true. + // `delegate->CopyFromBufferHandle` can be called to copy the data from + // delegate buffer. + // WARNING: This is an // experimental interface that is subject to change. + bool data_is_stale; + + // True if the tensor is a variable. + bool is_variable; + + // Quantization information. Replaces params field above. + TfLiteQuantization quantization; + + // Parameters used to encode a sparse tensor. + // This is optional. The field is NULL if a tensor is dense. + // WARNING: This is an experimental interface that is subject to change. + TfLiteSparsity* sparsity; + + // Optional. Encodes shapes with unknown dimensions with -1. This field is + // only populated when unknown dimensions exist in a read-write tensor (i.e. + // an input or output tensor). (e.g. `dims` contains [1, 1, 1, 3] and + // `dims_signature` contains [1, -1, -1, 3]). If no unknown dimensions exist + // then `dims_signature` is either null, or set to an empty array. Note that + // this field only exists when TF_LITE_STATIC_MEMORY is not defined. + const TfLiteIntArray* dims_signature; +} TfLiteTensor; + +// A structure representing an instance of a node. +// This structure only exhibits the inputs, outputs, user defined data and some +// node properties (like statefulness), not other features like the type. +typedef struct TfLiteNode { + // Inputs to this node expressed as indices into the simulator's tensors. + TfLiteIntArray* inputs; + + // Outputs to this node expressed as indices into the simulator's tensors. + TfLiteIntArray* outputs; + + // intermediate tensors to this node expressed as indices into the simulator's + // tensors. + TfLiteIntArray* intermediates; + + // Temporary tensors uses during the computations. This usually contains no + // tensors, but ops are allowed to change that if they need scratch space of + // any sort. + TfLiteIntArray* temporaries; + + // Opaque data provided by the node implementer through `Registration.init`. + void* user_data; + + // Opaque data provided to the node if the node is a builtin. This is usually + // a structure defined in builtin_op_data.h + void* builtin_data; + + // Custom initial data. This is the opaque data provided in the flatbuffer. + // WARNING: This is an experimental interface that is subject to change. + const void* custom_initial_data; + int custom_initial_data_size; + + // The pointer to the delegate. This is non-null only when the node is + // created by calling `interpreter.ModifyGraphWithDelegate`. + // WARNING: This is an experimental interface that is subject to change. + struct TfLiteDelegate* delegate; + + // Whether this op might have side effect (e.g. stateful op). + bool might_have_side_effect; +} TfLiteNode; +#else // defined(TF_LITE_STATIC_MEMORY)? +// NOTE: This flag is opt-in only at compile time. +// +// Specific reduced TfLiteTensor struct for TF Micro runtime. This struct +// contains only the minimum fields required to initialize and prepare a micro +// inference graph. The fields in this struct have been ordered from +// largest-to-smallest for optimal struct sizeof. +// +// This struct does not use: +// - allocation +// - buffer_handle +// - data_is_stale +// - delegate +// - dims_signature +// - name +// - sparsity +typedef struct TfLiteTensor { + // TODO(b/155784997): Consider consolidating these quantization fields: + // Quantization information. Replaces params field above. + TfLiteQuantization quantization; + + // Quantization information. + TfLiteQuantizationParams params; + + // A union of data pointers. The appropriate type should be used for a typed + // tensor based on `type`. + TfLitePtrUnion data; + + // A pointer to a structure representing the dimensionality interpretation + // that the buffer should have. NOTE: the product of elements of `dims` + // and the element datatype size should be equal to `bytes` below. + TfLiteIntArray* dims; + + // The number of bytes required to store the data of this Tensor. I.e. + // (bytes of each element) * dims[0] * ... * dims[n-1]. For example, if + // type is kTfLiteFloat32 and dims = {3, 2} then + // bytes = sizeof(float) * 3 * 2 = 4 * 3 * 2 = 24. + size_t bytes; + + // The data type specification for data stored in `data`. This affects + // what member of `data` union should be used. + TfLiteType type; + + // How memory is mapped + // kTfLiteMmapRo: Memory mapped read only. + // i.e. weights + // kTfLiteArenaRw: Arena allocated read write memory + // (i.e. temporaries, outputs). + TfLiteAllocationType allocation_type; + + // True if the tensor is a variable. + bool is_variable; +} TfLiteTensor; + +// Specific reduced TfLiteNode struct for TF Micro runtime. This struct contains +// only the minimum fields required to represent a node. +// +// This struct does not use: +// - delegate +// - intermediates +// - temporaries +typedef struct TfLiteNode { + // Inputs to this node expressed as indices into the simulator's tensors. + TfLiteIntArray* inputs; + + // Outputs to this node expressed as indices into the simulator's tensors. + TfLiteIntArray* outputs; + + // intermediate tensors to this node expressed as indices into the simulator's + // tensors. + TfLiteIntArray* intermediates; + + // Opaque data provided by the node implementer through `Registration.init`. + void* user_data; + + // Opaque data provided to the node if the node is a builtin. This is usually + // a structure defined in builtin_op_data.h + void* builtin_data; + + // Custom initial data. This is the opaque data provided in the flatbuffer. + // WARNING: This is an experimental interface that is subject to change. + const void* custom_initial_data; + int custom_initial_data_size; +} TfLiteNode; +#endif // TF_LITE_STATIC_MEMORY + +// Light-weight tensor struct for TF Micro runtime. Provides the minimal amount +// of information required for a kernel to run during TfLiteRegistration::Eval. +// TODO(b/160955687): Move this field into TF_LITE_STATIC_MEMORY when TFLM +// builds with this flag by default internally. +typedef struct TfLiteEvalTensor { + // A union of data pointers. The appropriate type should be used for a typed + // tensor based on `type`. + TfLitePtrUnion data; + + // A pointer to a structure representing the dimensionality interpretation + // that the buffer should have. + TfLiteIntArray* dims; + + // The data type specification for data stored in `data`. This affects + // what member of `data` union should be used. + TfLiteType type; +} TfLiteEvalTensor; + +#ifndef TF_LITE_STATIC_MEMORY +// Free data memory of tensor `t`. +void TfLiteTensorDataFree(TfLiteTensor* t); + +// Free quantization data. +void TfLiteQuantizationFree(TfLiteQuantization* quantization); + +// Free sparsity parameters. +void TfLiteSparsityFree(TfLiteSparsity* sparsity); + +// Free memory of tensor `t`. +void TfLiteTensorFree(TfLiteTensor* t); + +// Set all of a tensor's fields (and free any previously allocated data). +void TfLiteTensorReset(TfLiteType type, const char* name, TfLiteIntArray* dims, + TfLiteQuantizationParams quantization, char* buffer, + size_t size, TfLiteAllocationType allocation_type, + const void* allocation, bool is_variable, + TfLiteTensor* tensor); + +// Copies the contents of 'src' in 'dst'. +// Function does nothing if either 'src' or 'dst' is passed as nullptr and +// return kTfLiteOk. +// Returns kTfLiteError if 'src' and 'dst' doesn't have matching data size. +// Note function copies contents, so it won't create new data pointer +// or change allocation type. +// All Tensor related properties will be copied from 'src' to 'dst' like +// quantization, sparsity, ... +TfLiteStatus TfLiteTensorCopy(const TfLiteTensor* src, TfLiteTensor* dst); + +// Change the size of the memory block owned by `tensor` to `num_bytes`. +// Tensors with allocation types other than `kTfLiteDynamic` will be ignored and +// a kTfLiteOk will be returned. +// `tensor`'s internal data buffer will be assigned a pointer +// which can safely be passed to free or realloc if `num_bytes` is zero. +// If `preserve_data` is true, tensor data will be unchanged in the range from +// the start of the region up to the minimum of the old and new sizes. In the +// case of NULL tensor, or an error allocating new memory, returns +// `kTfLiteError`. +TfLiteStatus TfLiteTensorResizeMaybeCopy(size_t num_bytes, TfLiteTensor* tensor, + bool preserve_data); + +// Change the size of the memory block owned by `tensor` to `num_bytes`. +// Tensors with allocation types other than kTfLiteDynamic will be ignored and +// a kTfLiteOk will be returned. +// `tensor`'s internal data buffer will be assigned a pointer +// which can safely be passed to free or realloc if `num_bytes` is zero. +// Tensor data will be unchanged in the range from the start of the region up to +// the minimum of the old and new sizes. In the case +// of NULL tensor, or an error allocating new memory, returns `kTfLiteError`. +TfLiteStatus TfLiteTensorRealloc(size_t num_bytes, TfLiteTensor* tensor); +#endif // TF_LITE_STATIC_MEMORY + +// WARNING: This is an experimental interface that is subject to change. +// +// Currently, TfLiteDelegateParams has to be allocated in a way that it's +// trivially destructable. It will be stored as `builtin_data` field in +// `TfLiteNode` of the delegate node. +// +// See also the `CreateDelegateParams` function in `interpreter.cc` details. +typedef struct TfLiteDelegateParams { + struct TfLiteDelegate* delegate; + TfLiteIntArray* nodes_to_replace; + TfLiteIntArray* input_tensors; + TfLiteIntArray* output_tensors; +} TfLiteDelegateParams; + +// WARNING: This is an experimental interface that is subject to change. +// +// Currently, TfLiteOpaqueDelegateParams has to be allocated in a way that it's +// trivially destructable. It will be stored as `builtin_data` field in +// `TfLiteNode` of the delegate node. +// +// See also the `CreateOpaqueDelegateParams` function in `subgraph.cc` +// details. +typedef struct TfLiteOpaqueDelegateParams { + TfLiteOpaqueDelegate* delegate; + void* delegate_data; + TfLiteIntArray* nodes_to_replace; + TfLiteIntArray* input_tensors; + TfLiteIntArray* output_tensors; +} TfLiteOpaqueDelegateParams; + +typedef struct TfLiteContext { + // Number of tensors in the context. + size_t tensors_size; + + // The execution plan contains a list of the node indices in execution + // order. execution_plan->size is the current number of nodes. And, + // execution_plan->data[0] is the first node that needs to be run. + // TfLiteDelegates can traverse the current execution plan by iterating + // through each member of this array and using GetNodeAndRegistration() to + // access details about a node. i.e. + // + // TfLiteIntArray* execution_plan; + // TF_LITE_ENSURE_STATUS(context->GetExecutionPlan(context, &execution_plan)); + // for (int exec_index = 0; exec_index < execution_plan->size; exec_index++) { + // int node_index = execution_plan->data[exec_index]; + // TfLiteNode* node; + // TfLiteRegistration* reg; + // context->GetNodeAndRegistration(context, node_index, &node, ®); + // } + // Note: the memory pointed by '`*execution_plan` is OWNED by TfLite runtime. + // Future calls to GetExecutionPlan invalidates earlier outputs. The following + // code snippet shows the issue of such an invocation pattern. After calling + // CheckNode, subsequent access to `plan_1st` is undefined. + // + // void CheckNode(const TfLiteNode* node) { + // ... + // TfLiteIntArray* plan_2nd; + // TF_LITE_ENSURE_STATUS(context->GetExecutionPlan(context, &plan_2nd)); + // ... + // } + // + // TfLiteIntArray* plan_1st; + // TF_LITE_ENSURE_STATUS(context->GetExecutionPlan(context, &plan_1st)); + // for (int exec_index = 0; exec_index < plan_1st->size; exec_index++) { + // int node_index = plan_1st->data[exec_index]; + // TfLiteNode* node; + // TfLiteRegistration* reg; + // context->GetNodeAndRegistration(context, node_index, &node, ®); + // CheckNode(node); + // } + // + // WARNING: This is an experimental interface that is subject to change. + TfLiteStatus (*GetExecutionPlan)(struct TfLiteContext* context, + TfLiteIntArray** execution_plan); + + // An array of tensors in the interpreter context (of length `tensors_size`) + TfLiteTensor* tensors; + + // opaque full context ptr (an opaque c++ data structure) + void* impl_; + + // Request memory pointer be resized. Updates dimensions on the tensor. + // NOTE: ResizeTensor takes ownership of newSize. + TfLiteStatus (*ResizeTensor)(struct TfLiteContext*, TfLiteTensor* tensor, + TfLiteIntArray* new_size); + // Request that an error be reported with format string msg. + void (*ReportError)(struct TfLiteContext*, const char* msg, ...); + + // Add `tensors_to_add` tensors, preserving pre-existing Tensor entries. If + // non-null, the value pointed to by `first_new_tensor_index` will be set to + // the index of the first new tensor. + TfLiteStatus (*AddTensors)(struct TfLiteContext*, int tensors_to_add, + int* first_new_tensor_index); + + // Get a Tensor node by node_index. + // WARNING: This is an experimental interface that is subject to change. + TfLiteStatus (*GetNodeAndRegistration)( + struct TfLiteContext*, int node_index, TfLiteNode** node, + struct TfLiteRegistration** registration); + + // Replace ops with one or more stub delegate operations. This function + // does not take ownership of `nodes_to_replace`. + TfLiteStatus (*ReplaceNodeSubsetsWithDelegateKernels)( + struct TfLiteContext*, struct TfLiteRegistration registration, + const TfLiteIntArray* nodes_to_replace, struct TfLiteDelegate* delegate); + + // Number of threads that are recommended to subsystems like gemmlowp and + // eigen. + int recommended_num_threads; + + // Access external contexts by type. + // WARNING: This is an experimental interface that is subject to change. + TfLiteExternalContext* (*GetExternalContext)(struct TfLiteContext*, + TfLiteExternalContextType); + // Set the value of a external context. Does not take ownership of the + // pointer. + // WARNING: This is an experimental interface that is subject to change. + void (*SetExternalContext)(struct TfLiteContext*, TfLiteExternalContextType, + TfLiteExternalContext*); + + // Flag for allowing float16 precision for FP32 calculation. + // default: false. + // WARNING: This is an experimental API and subject to change. + bool allow_fp32_relax_to_fp16; + + // Pointer to the op-level profiler, if set; nullptr otherwise. + void* profiler; + + // Allocate persistent buffer which has the same life time as the interpreter. + // Returns nullptr on failure. + // The memory is allocated from heap for TFL, and from tail in TFLM. + // This method is only available in Init or Prepare stage. + // WARNING: This is an experimental interface that is subject to change. + void* (*AllocatePersistentBuffer)(struct TfLiteContext* ctx, size_t bytes); + + // Allocate a buffer which will be deallocated right after invoke phase. + // The memory is allocated from heap in TFL, and from volatile arena in TFLM. + // This method is only available in invoke stage. + // NOTE: If possible use RequestScratchBufferInArena method to avoid memory + // allocation during inference time. + // WARNING: This is an experimental interface that is subject to change. + TfLiteStatus (*AllocateBufferForEval)(struct TfLiteContext* ctx, size_t bytes, + void** ptr); + + // Request a scratch buffer in the arena through static memory planning. + // This method is only available in Prepare stage and the buffer is allocated + // by the interpreter between Prepare and Eval stage. In Eval stage, + // GetScratchBuffer API can be used to fetch the address. + // WARNING: This is an experimental interface that is subject to change. + TfLiteStatus (*RequestScratchBufferInArena)(struct TfLiteContext* ctx, + size_t bytes, int* buffer_idx); + + // Get the scratch buffer pointer. + // This method is only available in Eval stage. + // WARNING: This is an experimental interface that is subject to change. + void* (*GetScratchBuffer)(struct TfLiteContext* ctx, int buffer_idx); + + // Resize the memory pointer of the `tensor`. This method behaves the same as + // `ResizeTensor`, except that it makes a copy of the shape array internally + // so the shape array could be deallocated right afterwards. + // WARNING: This is an experimental interface that is subject to change. + TfLiteStatus (*ResizeTensorExplicit)(struct TfLiteContext* ctx, + TfLiteTensor* tensor, int dims, + const int* shape); + + // This method provides a preview of post-delegation partitioning. Each + // TfLiteDelegateParams in the referenced array corresponds to one instance of + // the delegate kernel. + // Example usage: + // + // TfLiteIntArray* nodes_to_replace = ...; + // TfLiteDelegateParams* params_array; + // int num_partitions = 0; + // TF_LITE_ENSURE_STATUS(context->PreviewDelegatePartitioning( + // context, delegate, nodes_to_replace, ¶ms_array, &num_partitions)); + // for (int idx = 0; idx < num_partitions; idx++) { + // const auto& partition_params = params_array[idx]; + // ... + // } + // + // NOTE: The context owns the memory referenced by partition_params_array. It + // will be cleared with another call to PreviewDelegateParitioning, or after + // TfLiteDelegateParams::Prepare returns. + // + // WARNING: This is an experimental interface that is subject to change. + TfLiteStatus (*PreviewDelegatePartitioning)( + struct TfLiteContext* context, const TfLiteIntArray* nodes_to_replace, + TfLiteDelegateParams** partition_params_array, int* num_partitions); + + // Returns a TfLiteTensor struct for a given index. + // WARNING: This is an experimental interface that is subject to change. + // WARNING: This method may not be available on all platforms. + TfLiteTensor* (*GetTensor)(const struct TfLiteContext* context, + int tensor_idx); + + // Returns a TfLiteEvalTensor struct for a given index. + // WARNING: This is an experimental interface that is subject to change. + // WARNING: This method may not be available on all platforms. + TfLiteEvalTensor* (*GetEvalTensor)(const struct TfLiteContext* context, + int tensor_idx); + + // Retrieves named metadata buffer from the TFLite model. + // Returns kTfLiteOk if metadata is successfully obtained from the flatbuffer + // Model: that is, there exists a `metadata` entry with given `name` string. + // (see TFLite's schema.fbs). + // The corresponding `buffer` information is populated in `ptr` & `bytes`. + // The data from `ptr` is valid for the lifetime of the Interpreter. + // + // WARNING: This is an experimental interface that is subject to change. + TfLiteStatus (*GetModelMetadata)(const struct TfLiteContext* context, + const char* name, const char** ptr, + size_t* bytes); + + // Retrieves the corresponding TfLiteContext of a subgraph that the given + // subgraph_index points to and switches to the delegate context for that + // subgraph. If an invalid subgraph index is given, returns kTfLiteError. + // NOTE: This function is expected to be paired with ReleaseSubgraphContext() + // once the delegate preparation is done and/or the delegate context functions + // are no longer needed. + // + // WARNING: This is an experimental interface that is subject to change. + TfLiteStatus (*AcquireSubgraphContext)( + struct TfLiteContext* context, int subgraph_index, + struct TfLiteContext** acquired_context); + // Releases the subgraph context by switching back to the TFLite kernel + // context for the subgraph that the given subgraph_index points to. + // NOTE: This function is expected to be used after AcquireSubgraphContext() + // once the delegate preparation is done and/or the delegate context functions + // are no longer needed. + // + // WARNING: This is an experimental interface that is subject to change. + TfLiteStatus (*ReleaseSubgraphContext)(struct TfLiteContext* context, + int subgraph_index); +} TfLiteContext; + +// `TfLiteRegistrationExternal` is an external version of `TfLiteRegistration` +// for C API which doesn't use internal types (such as `TfLiteContext`) but only +// uses stable API types (such as `TfLiteOpaqueContext`). The purpose of each +// field is the exactly the same as with `TfLiteRegistration`. +typedef struct TfLiteRegistrationExternal TfLiteRegistrationExternal; + +typedef struct TfLiteRegistration { + // Initializes the op from serialized data. + // Called only *once* for the lifetime of the op, so any one-time allocations + // should be made here (unless they depend on tensor sizes). + // + // If a built-in op: + // `buffer` is the op's params data (TfLiteLSTMParams*). + // `length` is zero. + // If custom op: + // `buffer` is the op's `custom_options`. + // `length` is the size of the buffer. + // + // Returns a type-punned (i.e. void*) opaque data (e.g. a primitive pointer + // or an instance of a struct). + // + // The returned pointer will be stored with the node in the `user_data` field, + // accessible within prepare and invoke functions below. + // NOTE: if the data is already in the desired format, simply implement this + // function to return `nullptr` and implement the free function to be a no-op. + void* (*init)(TfLiteContext* context, const char* buffer, size_t length); + + // The pointer `buffer` is the data previously returned by an init invocation. + void (*free)(TfLiteContext* context, void* buffer); + + // prepare is called when the inputs this node depends on have been resized. + // context->ResizeTensor() can be called to request output tensors to be + // resized. + // Can be called multiple times for the lifetime of the op. + // + // Returns kTfLiteOk on success. + TfLiteStatus (*prepare)(TfLiteContext* context, TfLiteNode* node); + + // Execute the node (should read node->inputs and output to node->outputs). + // Returns kTfLiteOk on success. + TfLiteStatus (*invoke)(TfLiteContext* context, TfLiteNode* node); + + // profiling_string is called during summarization of profiling information + // in order to group executions together. Providing a value here will cause a + // given op to appear multiple times is the profiling report. This is + // particularly useful for custom ops that can perform significantly + // different calculations depending on their `user-data`. + const char* (*profiling_string)(const TfLiteContext* context, + const TfLiteNode* node); + + // Builtin codes. If this kernel refers to a builtin this is the code + // of the builtin. This is so we can do marshaling to other frameworks like + // NN API. + // Note: It is the responsibility of the registration binder to set this + // properly. + int32_t builtin_code; + + // Custom op name. If the op is a builtin, this will be null. + // Note: It is the responsibility of the registration binder to set this + // properly. + // WARNING: This is an experimental interface that is subject to change. + const char* custom_name; + + // The version of the op. + // Note: It is the responsibility of the registration binder to set this + // properly. + int version; + + // The external version of `TfLiteRegistration`. Since we can't use internal + // types (such as `TfLiteContext`) for C API to maintain ABI stability. + // C API user will provide `TfLiteRegistrationExternal` to implement custom + // ops. We keep it inside of `TfLiteRegistration` and use it to route + // callbacks properly. + TfLiteRegistrationExternal* registration_external; + + // Retrieves asynchronous kernel. + // + // If the `async_kernel` field is nullptr, it means the operation described by + // this TfLiteRegistration object does not support asynchronous execution. + // Otherwise, the function that the field points to should only be called for + // delegate kernel nodes, i.e. `node` should be a delegate kernel node created + // by applying a delegate. + // If the function returns nullptr, that means that the underlying delegate + // does not support asynchronous execution for this `node`. + struct TfLiteAsyncKernel* (*async_kernel)(TfLiteContext* context, + TfLiteNode* node); +} TfLiteRegistration; + +/// \private +// Old version of `TfLiteRegistration` to maintain binary backward +// compatibility. +// The legacy registration type must be a POD struct type whose field types must +// be a prefix of the field types in TfLiteRegistration, and offset of the first +// field in TfLiteRegistration that is not present in the legacy registration +// type must be greater than or equal to the size of the legacy registration +// type. +// WARNING: This structure is deprecated / not an official part of the +// API. It should be only used for binary backward compatibility. +typedef struct TfLiteRegistration_V2 { + void* (*init)(TfLiteContext* context, const char* buffer, size_t length); + void (*free)(TfLiteContext* context, void* buffer); + TfLiteStatus (*prepare)(TfLiteContext* context, TfLiteNode* node); + TfLiteStatus (*invoke)(TfLiteContext* context, TfLiteNode* node); + const char* (*profiling_string)(const TfLiteContext* context, + const TfLiteNode* node); + int32_t builtin_code; + const char* custom_name; + int version; + TfLiteRegistrationExternal* registration_external; +} TfLiteRegistration_V2; + +/// \private +// Old version of `TfLiteRegistration` to maintain binary backward +// compatibility. +// The legacy registration type must be a POD struct type whose field types must +// be a prefix of the field types in TfLiteRegistration, and offset of the first +// field in TfLiteRegistration that is not present in the legacy registration +// type must be greater than or equal to the size of the legacy registration +// type. +// WARNING: This structure is deprecated / not an official part of the +// API. It should be only used for binary backward compatibility. +typedef struct TfLiteRegistration_V1 { + void* (*init)(TfLiteContext* context, const char* buffer, size_t length); + void (*free)(TfLiteContext* context, void* buffer); + TfLiteStatus (*prepare)(TfLiteContext* context, TfLiteNode* node); + TfLiteStatus (*invoke)(TfLiteContext* context, TfLiteNode* node); + const char* (*profiling_string)(const TfLiteContext* context, + const TfLiteNode* node); + int32_t builtin_code; + const char* custom_name; + int version; +} TfLiteRegistration_V1; + +// The flags used in `TfLiteDelegate`. Note that this is a bitmask, so the +// values should be 1, 2, 4, 8, ...etc. +typedef enum TfLiteDelegateFlags { + kTfLiteDelegateFlagsNone = 0, + // The flag is set if the delegate can handle dynamic sized tensors. + // For example, the output shape of a `Resize` op with non-constant shape + // can only be inferred when the op is invoked. + // In this case, the Delegate is responsible for calling + // `SetTensorToDynamic` to mark the tensor as a dynamic tensor, and calling + // `ResizeTensor` when invoking the op. + // + // If the delegate isn't capable to handle dynamic tensors, this flag need + // to be set to false. + kTfLiteDelegateFlagsAllowDynamicTensors = 1, + + // This flag can be used by delegates (that allow dynamic tensors) to ensure + // applicable tensor shapes are automatically propagated in the case of tensor + // resizing. + // This means that non-dynamic (allocation_type != kTfLiteDynamic) I/O tensors + // of a delegate kernel will have correct shapes before its Prepare() method + // is called. The runtime leverages TFLite builtin ops in the original + // execution plan to propagate shapes. + // + // A few points to note: + // 1. This requires kTfLiteDelegateFlagsAllowDynamicTensors. If that flag is + // false, this one is redundant since the delegate kernels are re-initialized + // every time tensors are resized. + // 2. Enabling this flag adds some overhead to AllocateTensors(), since extra + // work is required to prepare the original execution plan. + // 3. This flag requires that the original execution plan only have ops with + // valid registrations (and not 'dummy' custom ops like with Flex). + // WARNING: This feature is experimental and subject to change. + kTfLiteDelegateFlagsRequirePropagatedShapes = 2, + + // This flag can be used by delegates to request per-operator profiling. If a + // node is a delegate node, this flag will be checked before profiling. If + // set, then the node will not be profiled. The delegate will then add per + // operator information using Profiler::EventType::OPERATOR_INVOKE_EVENT and + // the results will appear in the operator-wise Profiling section and not in + // the Delegate internal section. + kTfLiteDelegateFlagsPerOperatorProfiling = 4 +} TfLiteDelegateFlags; + +// WARNING: This is an experimental interface that is subject to change. +typedef struct TfLiteDelegate { + // Data that delegate needs to identify itself. This data is owned by the + // delegate. The delegate is owned in the user code, so the delegate is + // responsible for deallocating this when it is destroyed. + void* data_; + + // Invoked by ModifyGraphWithDelegate. This prepare is called, giving the + // delegate a view of the current graph through TfLiteContext*. It typically + // will look at the nodes and call ReplaceNodeSubsetsWithDelegateKernels() + // to ask the TensorFlow lite runtime to create macro-nodes to represent + // delegated subgraphs of the original graph. + TfLiteStatus (*Prepare)(TfLiteContext* context, + struct TfLiteDelegate* delegate); + + // Copy the data from delegate buffer handle into raw memory of the given + // 'tensor'. Note that the delegate is allowed to allocate the raw bytes as + // long as it follows the rules for kTfLiteDynamic tensors, in which case this + // cannot be null. + TfLiteStatus (*CopyFromBufferHandle)(TfLiteContext* context, + struct TfLiteDelegate* delegate, + TfLiteBufferHandle buffer_handle, + TfLiteTensor* tensor); + + // Copy the data from raw memory of the given 'tensor' to delegate buffer + // handle. This can be null if the delegate doesn't use its own buffer. + TfLiteStatus (*CopyToBufferHandle)(TfLiteContext* context, + struct TfLiteDelegate* delegate, + TfLiteBufferHandle buffer_handle, + TfLiteTensor* tensor); + + // Free the Delegate Buffer Handle. Note: This only frees the handle, but + // this doesn't release the underlying resource (e.g. textures). The + // resources are either owned by application layer or the delegate. + // This can be null if the delegate doesn't use its own buffer. + void (*FreeBufferHandle)(TfLiteContext* context, + struct TfLiteDelegate* delegate, + TfLiteBufferHandle* handle); + + // Bitmask flags. See the comments in `TfLiteDelegateFlags`. + int64_t flags; + + // The opaque delegate builder associated with this object. If set then the + // TF Lite runtime will give precedence to this field. E.g. instead of + // invoking 'Prepare' via the function pointer inside the 'TfLiteDelegate' + // object, the runtime will first check if the corresponding function + // pointer inside 'opaque_delegate_builder' is set and if so invoke that. + // + // If this field is non-null, then the 'Prepare' field (of the + // 'TfLiteDelegate') should be null. + struct TfLiteOpaqueDelegateBuilder* opaque_delegate_builder; +} TfLiteDelegate; + +// Build a 'null' delegate, with all the fields properly set to their default +// values. +TfLiteDelegate TfLiteDelegateCreate(void); + +// `TfLiteOpaqueDelegateBuilder` is used for constructing +// `TfLiteOpaqueDelegate`, see `TfLiteOpaqueDelegateCreate` below. Note: +// This struct is not ABI stable. +// +// For forward source compatibility `TfLiteOpaqueDelegateBuilder` objects should +// be brace-initialized, so that all fields (including any that might be added +// in the future) get zero-initialized. The purpose of each field is exactly +// the same as with `TfLiteDelegate`. +// +// WARNING: This is an experimental interface that is subject to change. +typedef struct TfLiteOpaqueDelegateBuilder { + // Data that delegate needs to identify itself. This data is owned by the + // delegate. The delegate is owned in the user code, so the delegate is + // responsible for deallocating this when it is destroyed. + void* data; + // Invoked by ModifyGraphWithDelegate. This prepare is called, giving the + // delegate a view of the current graph through TfLiteContext*. It typically + // will look at the nodes and call ReplaceNodeSubsetsWithDelegateKernels() + // to ask the TensorFlow lite runtime to create macro-nodes to represent + // delegated subgraphs of the original graph. + TfLiteStatus (*Prepare)(TfLiteOpaqueContext* context, // NOLINT + TfLiteOpaqueDelegate* delegate, void* data); + // Copies the data from delegate buffer handle into raw memory of the given + // 'tensor'. Note that the delegate is allowed to allocate the raw bytes as + // long as it follows the rules for kTfLiteDynamic tensors, in which case this + // cannot be null. + TfLiteStatus (*CopyFromBufferHandle)( // NOLINT + TfLiteOpaqueContext* context, TfLiteOpaqueDelegate* delegate, void* data, + TfLiteBufferHandle buffer_handle, TfLiteOpaqueTensor* tensor); + // Copies the data from raw memory of the given 'tensor' to delegate buffer + // handle. This can be null if the delegate doesn't use its own buffer. + TfLiteStatus (*CopyToBufferHandle)( // NOLINT + TfLiteOpaqueContext* context, TfLiteOpaqueDelegate* delegate, void* data, + TfLiteBufferHandle buffer_handle, TfLiteOpaqueTensor* tensor); + // Frees the Delegate Buffer Handle. Note: This only frees the handle, but + // this doesn't release the underlying resource (e.g. textures). The + // resources are either owned by application layer or the delegate. + // This can be null if the delegate doesn't use its own buffer. + void (*FreeBufferHandle)(TfLiteOpaqueContext* context, // NOLINT + TfLiteOpaqueDelegate* delegate, void* data, + TfLiteBufferHandle* handle); + // Bitmask flags. See the comments in `TfLiteDelegateFlags`. + int64_t flags; +} TfLiteOpaqueDelegateBuilder; + +// Creates an opaque delegate and returns its address. The opaque delegate will +// behave according to the provided 'opaque_delegate_builder'. The lifetime of +// the objects pointed to by any of the fields within the +// 'opaque_delegate_builder' must outlive the returned +// 'TfLiteOpaqueDelegate' and any 'TfLiteInterpreter', +// 'TfLiteInterpreterOptions', 'tflite::Interpreter', or +// 'tflite::InterpreterBuilder' that the delegate is added to. The returned +// address should be passed to 'TfLiteOpaqueDelegateDelete' for deletion. If +// 'opaque_delegate_builder' is a null pointer, then a null pointer will be +// returned. +TfLiteOpaqueDelegate* TfLiteOpaqueDelegateCreate( + const TfLiteOpaqueDelegateBuilder* opaque_delegate_builder); + +// Deletes the provided opaque 'delegate'. This function has no effect if the +// 'delegate' is a null pointer. +void TfLiteOpaqueDelegateDelete(TfLiteOpaqueDelegate* delegate); + +// Returns a pointer to the data associated with the provided opaque 'delegate'. +// +// A null pointer will be returned when: +// - The 'delegate' is null. +// - The 'data' field of the 'TfLiteOpaqueDelegateBuilder' used to construct the +// 'delegate' was null. +// - Or in case of any other error. +// - The 'delegate' has been constructed via a 'TfLiteOpaqueDelegateBuilder', +// but the 'data' field of the 'TfLiteOpaqueDelegateBuilder' is null. +// +// The data_ field of 'delegate' will be returned if the +// 'opaque_delegate_builder' field is null. +void* TfLiteOpaqueDelegateGetData(const TfLiteOpaqueDelegate* delegate); + +#ifdef __cplusplus +} // extern "C" + +#include + +// --- TFLITE VARIANT TENSORS ---- +// Programming languges usually define "variant" as a type that can hold an +// unbounded set of types. See std::any +// (https://en.cppreference.com/w/cpp/utility/any) for a related standard +// library construct. In tensorflow, variant tensors have a data member which is +// an Object that is destructible and copy constructible. +// Variant tensors are commonly used to represent non trivial data +// semantics that don't fit into simple primitives, such as lists of tensors and +// datasets. Additionally, they can facilitate containers for optimizing +// memory movement of tensor data. +// +// The following set of classes define the variant tensor member for tflite. +// They implement a type-erased container intended to be used behind the +// `data.data : void*` member of `TfLiteTensor`s. Runtime functions interact +// the variant member at the level of a `VariantData`, whereas kernels +// operate with the full knowledge of the un-erased type. The `VariantData` +// class provides abstract methods for destroying and copying `VariantData`. +// Invoking these methods will dispatch to the erased type opaquely. +// The contents of any object of type derived from `AbstractVariant` can be +// written to `TfLiteTensor::data::data : void*` from kernels. If the runtime +// were to copy such a tensor through `TfLiteTensorCopy`, the destination data +// member will contain the result of invoking the erased type's copy +// constructor. Similar for the runtime releasing tensors from memory, the +// erased type's destructor will be invoked. There are a few caveats to consider +// to use these safely, which we discuss below. +// +// EXAMPLE: READING VARIANT TENSORS +// ``` +// // retrieve input with `type == kTfLiteVariant` +// TfLiteTensor* input = ... +// // must first static cast to `VariantData`, more on this below. +// VariantData* vd_input = static_cast(t->data.data); +// CustomType* typed_input = +// static_cast(vd_input); +// // do custom work on `typed_input`... +// ``` +// +// EXAMPLE: WRITING VARIANT TENSORS +// ``` +// TfLiteTensor* output = ... +// // construct a new variant object behind the target tensor +// TfLiteVariantRealloc(output, args...); +// // again must static cast to `VariantData*` before writing to `void*`. +// output->data.data = static_cast(typed_output); +// ``` +// +// WHY STATIC CAST TO `VariantData*` +// The Standard defines a `reinterpret_cast` from a derived type to its +// parents as undefined behavior when the parent is a non-standard layout. +// https://en.cppreference.com/w/cpp/language/reinterpret_cast (see bullet 5). +// Due to the `VariantData` having virtual members it is indeed non-standard +// layout, and any type derived from `VariantData` fails to be +// "transparently-replaceable". I.e. implicit cast from derived to base in this +// case may adjust the pointer and by definition `reinterpret_cast` will not +// the adjust the pointer. +// Thus, dereferencing a pointer of type `VariantData` which addresses +// the first byte of an object of said derived type is UB unless it was first +// implicitly or statically casted to a `VariantData`. Writing the object of +// derived type directly to `void*` which is dereferenced as a `VariantData` is +// then UB, and so the intermediate cast through `VariantData` must be enforced. +// A good example of this issue is ellucidate in the bottom code snippet +// here: https://en.cppreference.com/w/cpp/utility/launder. +class VariantData { + public: + // All variant objects must be able to be destroyed and copied. + virtual ~VariantData() = default; + // A "virtual copy-constructor". Often the destination tensor of a variant + // copy may have been previously allocated in a prior call to inference. We + // allow the copy to target the destinations buffer (`maybe_alloc`), + // for potential reuse and optimizations. `maybe_alloc` must be of the same + // underlying derived type. References to whatever object is at + // `maybe_alloc` may be invalidated. + virtual VariantData* CloneTo(VariantData* maybe_alloc) const = 0; +}; + +// Concrete implementations extend `AbstractVariantData` with CRPT. +template +class AbstractVariantData : public VariantData { + public: + VariantData* CloneTo(VariantData* maybe_alloc) const override { + if (maybe_alloc != nullptr) { + // If the output is still allocated, then its object may still be + // in its life time and the destructor must be called before re-using the + // buffer. + // This may actual have a non-negligle effect on perfomance if the + // destructor is complex. A future iteration may + // introduce copy or move asignment semantics, allowing for the + // underlying implementation to optimize for this case. + auto* derived = static_cast(maybe_alloc); + derived->~ErasedDerived(); + return new (derived) + ErasedDerived(static_cast(*this)); + } + return new ErasedDerived(static_cast(*this)); + } + + protected: + AbstractVariantData() = default; + AbstractVariantData(const AbstractVariantData&) = default; + AbstractVariantData(AbstractVariantData&&) = delete; +}; + +// Analogous to `TfLiteTensorRealloc` for allocation of tensors whose +// data member points to an arbitrary C++ object. `VariantType` refers +// to the erased type of said object and `VariantArgs` refers to +// a list of argument types with which to construct a new `VariantType`. +// `VariantArgs` must match a constructor of `VariantType`. +template +TfLiteStatus TfLiteTensorVariantRealloc(TfLiteTensor* t, + VariantArgs&&... args) { + if (t->type != kTfLiteVariant) return kTfLiteError; + VariantType* new_vd; + if (t->data.raw != nullptr) { + auto* target_vd = static_cast(t->data.data); + target_vd->~VariantData(); + // As above, we assume if `t` is already allocated then it was allocated + // with the same `VariantType` as templated. + new_vd = new (t->data.raw) VariantType(std::forward(args)...); + } else { + new_vd = new VariantType(std::forward(args)...); + } + t->data.data = static_cast(new_vd); + t->allocation_type = kTfLiteVariantObject; + return kTfLiteOk; +} + +#endif // __cplusplus +#endif // TENSORFLOW_LITE_CORE_C_COMMON_H_ diff --git a/tensorflow/lite/core/macros.h b/tensorflow/lite/core/macros.h new file mode 100644 index 0000000..d329ded --- /dev/null +++ b/tensorflow/lite/core/macros.h @@ -0,0 +1,78 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +// This provides utility macros and functions that are inherently platform +// specific or shared across runtime & converter. +#ifndef TENSORFLOW_LITE_CORE_MACROS_H_ +#define TENSORFLOW_LITE_CORE_MACROS_H_ + +#ifdef __has_builtin +#define TFLITE_HAS_BUILTIN(x) __has_builtin(x) +#else +#define TFLITE_HAS_BUILTIN(x) 0 +#endif + +#if (!defined(__NVCC__)) && (TFLITE_HAS_BUILTIN(__builtin_expect) || \ + (defined(__GNUC__) && __GNUC__ >= 3)) +#define TFLITE_EXPECT_FALSE(cond) __builtin_expect(cond, false) +#define TFLITE_EXPECT_TRUE(cond) __builtin_expect(!!(cond), true) +#else +#define TFLITE_EXPECT_FALSE(cond) (cond) +#define TFLITE_EXPECT_TRUE(cond) (cond) +#endif + +#ifdef _WIN32 +#define TFLITE_NOINLINE __declspec(noinline) +#else +#ifdef __has_attribute +#if __has_attribute(noinline) +#define TFLITE_NOINLINE __attribute__((noinline)) +#else +#define TFLITE_NOINLINE +#endif // __has_attribute(noinline) +#else +#define TFLITE_NOINLINE +#endif // __has_attribute +#endif // _WIN32 + +// Normally we'd use ABSL_HAVE_ATTRIBUTE_WEAK and ABSL_ATTRIBUTE_WEAK, but +// we avoid the absl dependency for binary size reasons. +#ifdef __has_attribute +#define TFLITE_HAS_ATTRIBUTE(x) __has_attribute(x) +#else +#define TFLITE_HAS_ATTRIBUTE(x) 0 +#endif + +#if (TFLITE_HAS_ATTRIBUTE(weak) || \ + (defined(__GNUC__) && !defined(__clang__))) && \ + !(defined(__llvm__) && defined(_WIN32)) && !defined(__MINGW32__) +#undef TFLITE_ATTRIBUTE_WEAK +#define TFLITE_ATTRIBUTE_WEAK __attribute__((weak)) +#define TFLITE_HAS_ATTRIBUTE_WEAK 1 +#else +#define TFLITE_ATTRIBUTE_WEAK +#define TFLITE_HAS_ATTRIBUTE_WEAK 0 +#endif + +#ifndef TF_LITE_STATIC_MEMORY +// maximum size of a valid flatbuffer +inline constexpr unsigned int flatbuffer_size_max = 2147483648; +// If none zero then the buffer is stored outside of the flatbuffers, string +inline constexpr char tflite_metadata_buffer_location[] = "buffer_location"; +// field for minimum runtime version, string +inline constexpr char tflite_metadata_min_runtime_version[] = + "min_runtime_version"; +#endif + +#endif // TENSORFLOW_LITE_CORE_MACROS_H_ diff --git a/tensorflow/lite/experimental/microfrontend/README.md b/tensorflow/lite/experimental/microfrontend/README.md new file mode 100644 index 0000000..a9ea13e --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/README.md @@ -0,0 +1,6 @@ +This directory contains the subset of functionality that is needed to run the +micro_speech example with TFLM. + +The source of truth for the experimental microfrontend in TfLite is at: +https://github.com/tensorflow/tflite-micro/blob/main/tensorflow/lite/experimental/microfrontend + diff --git a/tensorflow/lite/experimental/microfrontend/lib/BUILD b/tensorflow/lite/experimental/microfrontend/lib/BUILD new file mode 100644 index 0000000..d42b16d --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/BUILD @@ -0,0 +1,213 @@ +# Library for generating feature vectors from audio data +package( + default_visibility = ["//visibility:public"], + licenses = ["notice"], +) + +cc_library( + name = "bits", + hdrs = ["bits.h"], +) + +cc_library( + name = "kiss_fft_int16", + srcs = [ + "kiss_fft_int16.cc", + ], + hdrs = [ + "kiss_fft_common.h", + "kiss_fft_int16.h", + ], + deps = [ + "@kissfft//:kiss_fftr", + ], +) + +cc_library( + name = "fft", + srcs = [ + "fft.cc", + "fft_util.cc", + ], + hdrs = [ + "fft.h", + "fft_util.h", + ], + deps = [ + ":kiss_fft_int16", + ], +) + +cc_library( + name = "filterbank", + srcs = [ + "filterbank.c", + "filterbank_util.c", + ], + hdrs = [ + "filterbank.h", + "filterbank_util.h", + ], + deps = [ + ":bits", + ":fft", + ], +) + +cc_library( + name = "frontend", + srcs = [ + "frontend.c", + "frontend_util.c", + ], + hdrs = [ + "frontend.h", + "frontend_util.h", + ], + deps = [ + ":bits", + ":fft", + ":filterbank", + ":log_scale", + ":noise_reduction", + ":pcan_gain_control", + ":window", + ], +) + +cc_library( + name = "log_scale", + srcs = [ + "log_lut.c", + "log_scale.c", + "log_scale_util.c", + ], + hdrs = [ + "log_lut.h", + "log_scale.h", + "log_scale_util.h", + ], + deps = [ + ":bits", + ], +) + +cc_library( + name = "noise_reduction", + srcs = [ + "noise_reduction.c", + "noise_reduction_util.c", + ], + hdrs = [ + "noise_reduction.h", + "noise_reduction_util.h", + ], +) + +cc_library( + name = "pcan_gain_control", + srcs = [ + "pcan_gain_control.c", + "pcan_gain_control_util.c", + ], + hdrs = [ + "pcan_gain_control.h", + "pcan_gain_control_util.h", + ], + deps = [ + ":bits", + ], +) + +cc_library( + name = "window", + srcs = [ + "window.c", + "window_util.c", + ], + hdrs = [ + "window.h", + "window_util.h", + ], +) + +cc_test( + name = "fft_test", + srcs = ["fft_test.cc"], + deps = [ + ":fft", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "filterbank_test", + srcs = ["filterbank_test.cc"], + # Setting copts for experimental code to [], but this code should be fixed + # to build with the default copts (micro_copts()) + copts = [], + deps = [ + ":filterbank", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "frontend_test", + srcs = ["frontend_test.cc"], + # Setting copts for experimental code to [], but this code should be fixed + # to build with the default copts (micro_copts()) + copts = [], + deps = [ + ":frontend", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "log_scale_test", + srcs = ["log_scale_test.cc"], + # Setting copts for experimental code to [], but this code should be fixed + # to build with the default copts (micro_copts()) + copts = [], + deps = [ + ":log_scale", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "noise_reduction_test", + srcs = ["noise_reduction_test.cc"], + # Setting copts for experimental code to [], but this code should be fixed + # to build with the default copts (micro_copts()) + copts = [], + deps = [ + ":noise_reduction", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "pcan_gain_control_test", + srcs = ["pcan_gain_control_test.cc"], + # Setting copts for experimental code to [], but this code should be fixed + # to build with the default copts (micro_copts()) + copts = [], + deps = [ + ":pcan_gain_control", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "window_test", + srcs = ["window_test.cc"], + # Setting copts for experimental code to [], but this code should be fixed + # to build with the default copts (micro_copts()) + copts = [], + deps = [ + ":window", + "//tensorflow/lite/micro/testing:micro_test", + ], +) diff --git a/tensorflow/lite/experimental/microfrontend/lib/README.md b/tensorflow/lite/experimental/microfrontend/lib/README.md new file mode 100644 index 0000000..ba5e82c --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/README.md @@ -0,0 +1,65 @@ +# Audio "frontend" library for feature generation + +A feature generation library (also called frontend) that receives raw audio +input, and produces filter banks (a vector of values). + +The raw audio input is expected to be 16-bit PCM features, with a configurable +sample rate. More specifically the audio signal goes through a pre-emphasis +filter (optionally); then gets sliced into (potentially overlapping) frames and +a window function is applied to each frame; afterwards, we do a Fourier +transform on each frame (or more specifically a Short-Time Fourier Transform) +and calculate the power spectrum; and subsequently compute the filter banks. + +By default the library is configured with a set of defaults to perform the +different processing tasks. This takes place with the frontend_util.c function: + +```c++ +void FrontendFillConfigWithDefaults(struct FrontendConfig* config) +``` + +A single invocation looks like: + +```c++ +struct FrontendConfig frontend_config; +FrontendFillConfigWithDefaults(&frontend_config); +int sample_rate = 16000; +FrontendPopulateState(&frontend_config, &frontend_state, sample_rate); +int16_t* audio_data = ; // PCM audio samples at 16KHz. +size_t audio_size = ; // Number of audio samples. +size_t num_samples_read; // How many samples were processed. +struct FrontendOutput output = + FrontendProcessSamples( + &frontend_state, audio_data, audio_size, &num_samples_read); +for (i = 0; i < output.size; ++i) { + printf("%d ", output.values[i]); // Print the feature vector. +} +``` + +Something to note in the above example is that the frontend consumes as many +samples needed from the audio data to produce a single feature vector (according +to the frontend configuration). If not enough samples were available to generate +a feature vector, the returned size will be 0 and the values pointer will be +`NULL`. + +An example of how to use the frontend is provided in frontend_main.cc and its +binary frontend_main. This example, expects a path to a file containing `int16` +PCM features at a sample rate of 16KHz, and upon execution will printing out +the coefficients according to the frontend default configuration. + +## Extra features +Extra features of this frontend library include a noise reduction module, as +well as a gain control module. + +**Noise cancellation**. Removes stationary noise from each channel of the signal +using a low pass filter. + +**Gain control**. A novel automatic gain control based dynamic compression to +replace the widely used static (such as log or root) compression. Disabled +by default. + +## Memory map +The binary frontend_memmap_main shows a sample usage of how to avoid all the +initialization code in your application, by first running +"frontend_generate_memmap" to create a header/source file that uses a baked in +frontend state. This command could be automated as part of your build process, +or you can just use the output directly. diff --git a/tensorflow/lite/experimental/microfrontend/lib/bits.h b/tensorflow/lite/experimental/microfrontend/lib/bits.h new file mode 100644 index 0000000..04b3ba6 --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/bits.h @@ -0,0 +1,102 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_BITS_H_ +#define TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_BITS_H_ + +#ifdef __cplusplus +#include + +extern "C" { +#endif + +static inline int CountLeadingZeros32Slow(uint64_t n) { + int zeroes = 28; + if (n >> 16) zeroes -= 16, n >>= 16; + if (n >> 8) zeroes -= 8, n >>= 8; + if (n >> 4) zeroes -= 4, n >>= 4; + return "\4\3\2\2\1\1\1\1\0\0\0\0\0\0\0"[n] + zeroes; +} + +static inline int CountLeadingZeros32(uint32_t n) { +#if defined(_MSC_VER) + unsigned long result = 0; // NOLINT(runtime/int) + if (_BitScanReverse(&result, n)) { + return 31 - result; + } + return 32; +#elif defined(__GNUC__) + + // Handle 0 as a special case because __builtin_clz(0) is undefined. + if (n == 0) { + return 32; + } + return __builtin_clz(n); +#else + return CountLeadingZeros32Slow(n); +#endif +} + +static inline int MostSignificantBit32(uint32_t n) { + return 32 - CountLeadingZeros32(n); +} + +static inline int CountLeadingZeros64Slow(uint64_t n) { + int zeroes = 60; + if (n >> 32) zeroes -= 32, n >>= 32; + if (n >> 16) zeroes -= 16, n >>= 16; + if (n >> 8) zeroes -= 8, n >>= 8; + if (n >> 4) zeroes -= 4, n >>= 4; + return "\4\3\2\2\1\1\1\1\0\0\0\0\0\0\0"[n] + zeroes; +} + +static inline int CountLeadingZeros64(uint64_t n) { +#if defined(_MSC_VER) && defined(_M_X64) + // MSVC does not have __builtin_clzll. Use _BitScanReverse64. + unsigned long result = 0; // NOLINT(runtime/int) + if (_BitScanReverse64(&result, n)) { + return 63 - result; + } + return 64; +#elif defined(_MSC_VER) + // MSVC does not have __builtin_clzll. Compose two calls to _BitScanReverse + unsigned long result = 0; // NOLINT(runtime/int) + if ((n >> 32) && _BitScanReverse(&result, n >> 32)) { + return 31 - result; + } + if (_BitScanReverse(&result, n)) { + return 63 - result; + } + return 64; +#elif defined(__GNUC__) + + // Handle 0 as a special case because __builtin_clzll(0) is undefined. + if (n == 0) { + return 64; + } + return __builtin_clzll(n); +#else + return CountLeadingZeros64Slow(n); +#endif +} + +static inline int MostSignificantBit64(uint64_t n) { + return 64 - CountLeadingZeros64(n); +} + +#ifdef __cplusplus +} // extern "C" +#endif + +#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_BITS_H_ diff --git a/tensorflow/lite/experimental/microfrontend/lib/fft.cc b/tensorflow/lite/experimental/microfrontend/lib/fft.cc new file mode 100644 index 0000000..bcdd9cc --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/fft.cc @@ -0,0 +1,52 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/experimental/microfrontend/lib/fft.h" + +#include + +#include "tensorflow/lite/experimental/microfrontend/lib/kiss_fft_int16.h" + +void FftCompute(struct FftState* state, const int16_t* input, + int input_scale_shift) { + const size_t input_size = state->input_size; + const size_t fft_size = state->fft_size; + + int16_t* fft_input = state->input; + // First, scale the input by the given shift. + size_t i; + for (i = 0; i < input_size; ++i) { + fft_input[i] = static_cast(static_cast(input[i]) + << input_scale_shift); + } + // Zero out whatever else remains in the top part of the input. + for (; i < fft_size; ++i) { + fft_input[i] = 0; + } + + // Apply the FFT. + kissfft_fixed16::kiss_fftr( + reinterpret_cast(state->scratch), + state->input, + reinterpret_cast(state->output)); +} + +void FftInit(struct FftState* state) { + // All the initialization is done in FftPopulateState() +} + +void FftReset(struct FftState* state) { + memset(state->input, 0, state->fft_size * sizeof(*state->input)); + memset(state->output, 0, (state->fft_size / 2 + 1) * sizeof(*state->output)); +} diff --git a/tensorflow/lite/experimental/microfrontend/lib/fft.h b/tensorflow/lite/experimental/microfrontend/lib/fft.h new file mode 100644 index 0000000..aaffa69 --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/fft.h @@ -0,0 +1,50 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_FFT_H_ +#define TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_FFT_H_ + +#include +#include + +#ifdef __cplusplus +extern "C" { +#endif + +struct complex_int16_t { + int16_t real; + int16_t imag; +}; + +struct FftState { + int16_t* input; + struct complex_int16_t* output; + size_t fft_size; + size_t input_size; + void* scratch; + size_t scratch_size; +}; + +void FftCompute(struct FftState* state, const int16_t* input, + int input_scale_shift); + +void FftInit(struct FftState* state); + +void FftReset(struct FftState* state); + +#ifdef __cplusplus +} // extern "C" +#endif + +#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_FFT_H_ diff --git a/tensorflow/lite/experimental/microfrontend/lib/fft_io.c b/tensorflow/lite/experimental/microfrontend/lib/fft_io.c new file mode 100644 index 0000000..820221c --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/fft_io.c @@ -0,0 +1,33 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/experimental/microfrontend/lib/fft_io.h" + +void FftWriteMemmapPreamble(FILE* fp, const struct FftState* state) { + fprintf(fp, "static int16_t fft_input[%zu];\n", state->fft_size); + fprintf(fp, "static struct complex_int16_t fft_output[%zu];\n", + state->fft_size / 2 + 1); + fprintf(fp, "static char fft_scratch[%zu];\n", state->scratch_size); + fprintf(fp, "\n"); +} + +void FftWriteMemmap(FILE* fp, const struct FftState* state, + const char* variable) { + fprintf(fp, "%s->input = fft_input;\n", variable); + fprintf(fp, "%s->output = fft_output;\n", variable); + fprintf(fp, "%s->fft_size = %zu;\n", variable, state->fft_size); + fprintf(fp, "%s->input_size = %zu;\n", variable, state->input_size); + fprintf(fp, "%s->scratch = fft_scratch;\n", variable); + fprintf(fp, "%s->scratch_size = %zu;\n", variable, state->scratch_size); +} diff --git a/tensorflow/lite/experimental/microfrontend/lib/fft_io.h b/tensorflow/lite/experimental/microfrontend/lib/fft_io.h new file mode 100644 index 0000000..7a59af6 --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/fft_io.h @@ -0,0 +1,34 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_FFT_IO_H_ +#define TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_FFT_IO_H_ + +#include + +#include "tensorflow/lite/experimental/microfrontend/lib/fft.h" + +#ifdef __cplusplus +extern "C" { +#endif + +void FftWriteMemmapPreamble(FILE* fp, const struct FftState* state); +void FftWriteMemmap(FILE* fp, const struct FftState* state, + const char* variable); + +#ifdef __cplusplus +} // extern "C" +#endif + +#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_FFT_IO_H_ diff --git a/tensorflow/lite/experimental/microfrontend/lib/fft_test.cc b/tensorflow/lite/experimental/microfrontend/lib/fft_test.cc new file mode 100644 index 0000000..cfca64c --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/fft_test.cc @@ -0,0 +1,54 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/experimental/microfrontend/lib/fft.h" + +#include "tensorflow/lite/experimental/microfrontend/lib/fft_util.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace { + +const int16_t kFakeWindow[] = { + 0, 1151, 0, -5944, 0, 13311, 0, -21448, 0, 28327, 0, -32256, 0, 32255, + 0, -28328, 0, 21447, 0, -13312, 0, 5943, 0, -1152, 0}; +const int kScaleShift = 0; + +} // namespace + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(FftTest_CheckOutputValues) { + struct FftState state; + TF_LITE_MICRO_EXPECT( + FftPopulateState(&state, sizeof(kFakeWindow) / sizeof(kFakeWindow[0]))); + + FftInit(&state); + FftCompute(&state, kFakeWindow, kScaleShift); + + const struct complex_int16_t expected[] = { + {0, 0}, {-10, 9}, {-20, 0}, {-9, -10}, {0, 25}, {-119, 119}, + {-887, 0}, {3000, 3000}, {0, -6401}, {-3000, 3000}, {886, 0}, {118, 119}, + {0, 25}, {9, -10}, {19, 0}, {9, 9}, {0, 0}}; + TF_LITE_MICRO_EXPECT_EQ(state.fft_size / 2 + 1, + sizeof(expected) / sizeof(expected[0])); + unsigned int i; + for (i = 0; i <= state.fft_size / 2; ++i) { + TF_LITE_MICRO_EXPECT_EQ(state.output[i].real, expected[i].real); + TF_LITE_MICRO_EXPECT_EQ(state.output[i].imag, expected[i].imag); + } + + FftFreeStateContents(&state); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/experimental/microfrontend/lib/fft_util.cc b/tensorflow/lite/experimental/microfrontend/lib/fft_util.cc new file mode 100644 index 0000000..ed3dc8f --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/fft_util.cc @@ -0,0 +1,70 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/experimental/microfrontend/lib/fft_util.h" + +#include + +#include "tensorflow/lite/experimental/microfrontend/lib/kiss_fft_int16.h" + +int FftPopulateState(struct FftState* state, size_t input_size) { + state->input_size = input_size; + state->fft_size = 1; + while (state->fft_size < state->input_size) { + state->fft_size <<= 1; + } + + state->input = reinterpret_cast( + malloc(state->fft_size * sizeof(*state->input))); + if (state->input == nullptr) { + fprintf(stderr, "Failed to alloc fft input buffer\n"); + return 0; + } + + state->output = reinterpret_cast( + malloc((state->fft_size / 2 + 1) * sizeof(*state->output) * 2)); + if (state->output == nullptr) { + fprintf(stderr, "Failed to alloc fft output buffer\n"); + return 0; + } + + // Ask kissfft how much memory it wants. + size_t scratch_size = 0; + kissfft_fixed16::kiss_fftr_cfg kfft_cfg = kissfft_fixed16::kiss_fftr_alloc( + state->fft_size, 0, nullptr, &scratch_size); + if (kfft_cfg != nullptr) { + fprintf(stderr, "Kiss memory sizing failed.\n"); + return 0; + } + state->scratch = malloc(scratch_size); + if (state->scratch == nullptr) { + fprintf(stderr, "Failed to alloc fft scratch buffer\n"); + return 0; + } + state->scratch_size = scratch_size; + // Let kissfft configure the scratch space we just allocated + kfft_cfg = kissfft_fixed16::kiss_fftr_alloc(state->fft_size, 0, + state->scratch, &scratch_size); + if (kfft_cfg != state->scratch) { + fprintf(stderr, "Kiss memory preallocation strategy failed.\n"); + return 0; + } + return 1; +} + +void FftFreeStateContents(struct FftState* state) { + free(state->input); + free(state->output); + free(state->scratch); +} diff --git a/tensorflow/lite/experimental/microfrontend/lib/fft_util.h b/tensorflow/lite/experimental/microfrontend/lib/fft_util.h new file mode 100644 index 0000000..6a47130 --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/fft_util.h @@ -0,0 +1,34 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_FFT_UTIL_H_ +#define TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_FFT_UTIL_H_ + +#include "tensorflow/lite/experimental/microfrontend/lib/fft.h" + +#ifdef __cplusplus +extern "C" { +#endif + +// Prepares and FFT for the given input size. +int FftPopulateState(struct FftState* state, size_t input_size); + +// Frees any allocated buffers. +void FftFreeStateContents(struct FftState* state); + +#ifdef __cplusplus +} // extern "C" +#endif + +#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_FFT_UTIL_H_ diff --git a/tensorflow/lite/experimental/microfrontend/lib/filterbank.c b/tensorflow/lite/experimental/microfrontend/lib/filterbank.c new file mode 100644 index 0000000..80f8738 --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/filterbank.c @@ -0,0 +1,134 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/experimental/microfrontend/lib/filterbank.h" + +#include + +#include "tensorflow/lite/experimental/microfrontend/lib/bits.h" + +void FilterbankConvertFftComplexToEnergy(struct FilterbankState* state, + struct complex_int16_t* fft_output, + int32_t* energy) { + const int end_index = state->end_index; + int i; + energy += state->start_index; + fft_output += state->start_index; + for (i = state->start_index; i < end_index; ++i) { + const int32_t real = fft_output->real; + const int32_t imag = fft_output->imag; + fft_output++; + const uint32_t mag_squared = (real * real) + (imag * imag); + *energy++ = mag_squared; + } +} + +void FilterbankAccumulateChannels(struct FilterbankState* state, + const int32_t* energy) { + uint64_t* work = state->work; + uint64_t weight_accumulator = 0; + uint64_t unweight_accumulator = 0; + + const int16_t* channel_frequency_starts = state->channel_frequency_starts; + const int16_t* channel_weight_starts = state->channel_weight_starts; + const int16_t* channel_widths = state->channel_widths; + + int num_channels_plus_1 = state->num_channels + 1; + int i; + for (i = 0; i < num_channels_plus_1; ++i) { + const int32_t* magnitudes = energy + *channel_frequency_starts++; + const int16_t* weights = state->weights + *channel_weight_starts; + const int16_t* unweights = state->unweights + *channel_weight_starts++; + const int width = *channel_widths++; + int j; + for (j = 0; j < width; ++j) { + weight_accumulator += *weights++ * ((uint64_t)*magnitudes); + unweight_accumulator += *unweights++ * ((uint64_t)*magnitudes); + ++magnitudes; + } + *work++ = weight_accumulator; + weight_accumulator = unweight_accumulator; + unweight_accumulator = 0; + } +} + +static uint16_t Sqrt32(uint32_t num) { + if (num == 0) { + return 0; + } + uint32_t res = 0; + int max_bit_number = 32 - MostSignificantBit32(num); + max_bit_number |= 1; + uint32_t bit = 1U << (31 - max_bit_number); + int iterations = (31 - max_bit_number) / 2 + 1; + while (iterations--) { + if (num >= res + bit) { + num -= res + bit; + res = (res >> 1U) + bit; + } else { + res >>= 1U; + } + bit >>= 2U; + } + // Do rounding - if we have the bits. + if (num > res && res != 0xFFFF) { + ++res; + } + return res; +} + +static uint32_t Sqrt64(uint64_t num) { + // Take a shortcut and just use 32 bit operations if the upper word is all + // clear. This will cause a slight off by one issue for numbers close to 2^32, + // but it probably isn't going to matter (and gives us a big performance win). + if ((num >> 32) == 0) { + return Sqrt32((uint32_t)num); + } + uint64_t res = 0; + int max_bit_number = 64 - MostSignificantBit64(num); + max_bit_number |= 1; + uint64_t bit = 1ULL << (63 - max_bit_number); + int iterations = (63 - max_bit_number) / 2 + 1; + while (iterations--) { + if (num >= res + bit) { + num -= res + bit; + res = (res >> 1U) + bit; + } else { + res >>= 1U; + } + bit >>= 2U; + } + // Do rounding - if we have the bits. + if (num > res && res != 0xFFFFFFFFLL) { + ++res; + } + return res; +} + +uint32_t* FilterbankSqrt(struct FilterbankState* state, int scale_down_shift) { + const int num_channels = state->num_channels; + const uint64_t* work = state->work + 1; + // Reuse the work buffer since we're fine clobbering it at this point to hold + // the output. + uint32_t* output = (uint32_t*)state->work; + int i; + for (i = 0; i < num_channels; ++i) { + *output++ = Sqrt64(*work++) >> scale_down_shift; + } + return (uint32_t*)state->work; +} + +void FilterbankReset(struct FilterbankState* state) { + memset(state->work, 0, (state->num_channels + 1) * sizeof(*state->work)); +} diff --git a/tensorflow/lite/experimental/microfrontend/lib/filterbank.h b/tensorflow/lite/experimental/microfrontend/lib/filterbank.h new file mode 100644 index 0000000..1e6d388 --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/filterbank.h @@ -0,0 +1,63 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_FILTERBANK_H_ +#define TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_FILTERBANK_H_ + +#include +#include + +#include "tensorflow/lite/experimental/microfrontend/lib/fft.h" + +#define kFilterbankBits 12 + +#ifdef __cplusplus +extern "C" { +#endif + +struct FilterbankState { + int num_channels; + int start_index; + int end_index; + int16_t* channel_frequency_starts; + int16_t* channel_weight_starts; + int16_t* channel_widths; + int16_t* weights; + int16_t* unweights; + uint64_t* work; +}; + +// Converts the relevant complex values of an FFT output into energy (the +// square magnitude). +void FilterbankConvertFftComplexToEnergy(struct FilterbankState* state, + struct complex_int16_t* fft_output, + int32_t* energy); + +// Computes the mel-scale filterbank on the given energy array. Output is cached +// internally - to fetch it, you need to call FilterbankSqrt. +void FilterbankAccumulateChannels(struct FilterbankState* state, + const int32_t* energy); + +// Applies an integer square root to the 64 bit intermediate values of the +// filterbank, and returns a pointer to them. Memory will be invalidated the +// next time FilterbankAccumulateChannels is called. +uint32_t* FilterbankSqrt(struct FilterbankState* state, int scale_down_shift); + +void FilterbankReset(struct FilterbankState* state); + +#ifdef __cplusplus +} // extern "C" +#endif + +#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_FILTERBANK_H_ diff --git a/tensorflow/lite/experimental/microfrontend/lib/filterbank_io.c b/tensorflow/lite/experimental/microfrontend/lib/filterbank_io.c new file mode 100644 index 0000000..6ce4c7c --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/filterbank_io.c @@ -0,0 +1,67 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/experimental/microfrontend/lib/filterbank_io.h" + +static void PrintArray(FILE* fp, const char* name, const int16_t* values, + size_t size) { + fprintf(fp, "static int16_t filterbank_%s[] = {", name); + int i; + for (i = 0; i < size; ++i) { + fprintf(fp, "%d", values[i]); + if (i < size - 1) { + fprintf(fp, ", "); + } + } + fprintf(fp, "};\n"); +} + +void FilterbankWriteMemmapPreamble(FILE* fp, + const struct FilterbankState* state) { + const int num_channels_plus_1 = state->num_channels + 1; + + PrintArray(fp, "channel_frequency_starts", state->channel_frequency_starts, + num_channels_plus_1); + PrintArray(fp, "channel_weight_starts", state->channel_weight_starts, + num_channels_plus_1); + PrintArray(fp, "channel_widths", state->channel_widths, num_channels_plus_1); + int num_weights = 0; + int i; + for (i = 0; i < num_channels_plus_1; ++i) { + num_weights += state->channel_widths[i]; + } + PrintArray(fp, "weights", state->weights, num_weights); + PrintArray(fp, "unweights", state->unweights, num_weights); + + fprintf(fp, "static uint64_t filterbank_work[%d];\n", num_channels_plus_1); + fprintf(fp, "\n"); +} + +void FilterbankWriteMemmap(FILE* fp, const struct FilterbankState* state, + const char* variable) { + fprintf(fp, "%s->num_channels = %d;\n", variable, state->num_channels); + fprintf(fp, "%s->start_index = %d;\n", variable, state->start_index); + fprintf(fp, "%s->end_index = %d;\n", variable, state->end_index); + + fprintf( + fp, + "%s->channel_frequency_starts = filterbank_channel_frequency_starts;\n", + variable); + fprintf(fp, "%s->channel_weight_starts = filterbank_channel_weight_starts;\n", + variable); + fprintf(fp, "%s->channel_widths = filterbank_channel_widths;\n", variable); + fprintf(fp, "%s->weights = filterbank_weights;\n", variable); + fprintf(fp, "%s->unweights = filterbank_unweights;\n", variable); + fprintf(fp, "%s->work = filterbank_work;\n", variable); +} diff --git a/tensorflow/lite/experimental/microfrontend/lib/filterbank_io.h b/tensorflow/lite/experimental/microfrontend/lib/filterbank_io.h new file mode 100644 index 0000000..5fc9684 --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/filterbank_io.h @@ -0,0 +1,35 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_FILTERBANK_IO_H_ +#define TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_FILTERBANK_IO_H_ + +#include + +#include "tensorflow/lite/experimental/microfrontend/lib/filterbank.h" + +#ifdef __cplusplus +extern "C" { +#endif + +void FilterbankWriteMemmapPreamble(FILE* fp, + const struct FilterbankState* state); +void FilterbankWriteMemmap(FILE* fp, const struct FilterbankState* state, + const char* variable); + +#ifdef __cplusplus +} // extern "C" +#endif + +#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_FILTERBANK_IO_H_ diff --git a/tensorflow/lite/experimental/microfrontend/lib/filterbank_test.cc b/tensorflow/lite/experimental/microfrontend/lib/filterbank_test.cc new file mode 100644 index 0000000..cb5d3d8 --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/filterbank_test.cc @@ -0,0 +1,219 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/experimental/microfrontend/lib/filterbank.h" + +#include + +#include "tensorflow/lite/experimental/microfrontend/lib/filterbank_util.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace { + +const int kSampleRate = 1000; +const int kSpectrumSize = 17; +const int kStartIndex = 1; +const int kEndIndex = 15; +const int32_t kEnergy[] = {-1, 181, 400, 181, 625, 28322, + 786769, 18000000, 40972801, 18000000, 784996, 28085, + 625, 181, 361, -1, -1}; +const uint64_t kWork[] = {1835887, 61162970173, 258694800000}; +const int kScaleShift = 0; + +// Test filterbank generation using scaled-down defaults. +class FilterbankTestConfig { + public: + FilterbankTestConfig() { + config_.num_channels = 2; + config_.lower_band_limit = 8.0; + config_.upper_band_limit = 450.0; + } + + struct FilterbankConfig config_; +}; + +} // namespace + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(FilterbankTest_CheckStartIndex) { + FilterbankTestConfig config; + struct FilterbankState state; + TF_LITE_MICRO_EXPECT(FilterbankPopulateState(&config.config_, &state, + kSampleRate, kSpectrumSize)); + + TF_LITE_MICRO_EXPECT_EQ(state.start_index, kStartIndex); + + FilterbankFreeStateContents(&state); +} + +TF_LITE_MICRO_TEST(FilterbankTest_CheckEndIndex) { + FilterbankTestConfig config; + struct FilterbankState state; + TF_LITE_MICRO_EXPECT(FilterbankPopulateState(&config.config_, &state, + kSampleRate, kSpectrumSize)); + + TF_LITE_MICRO_EXPECT_EQ(state.end_index, kEndIndex); + + FilterbankFreeStateContents(&state); +} + +TF_LITE_MICRO_TEST(FilterbankTest_CheckChannelFrequencyStarts) { + FilterbankTestConfig config; + struct FilterbankState state; + TF_LITE_MICRO_EXPECT(FilterbankPopulateState(&config.config_, &state, + kSampleRate, kSpectrumSize)); + + const int16_t expected[] = {0, 4, 8}; + TF_LITE_MICRO_EXPECT_EQ(state.num_channels + 1, + sizeof(expected) / sizeof(expected[0])); + int i; + for (i = 0; i <= state.num_channels; ++i) { + TF_LITE_MICRO_EXPECT_EQ(state.channel_frequency_starts[i], expected[i]); + } + + FilterbankFreeStateContents(&state); +} + +TF_LITE_MICRO_TEST(FilterbankTest_CheckChannelWeightStarts) { + FilterbankTestConfig config; + struct FilterbankState state; + TF_LITE_MICRO_EXPECT(FilterbankPopulateState(&config.config_, &state, + kSampleRate, kSpectrumSize)); + + const int16_t expected[] = {0, 8, 16}; + TF_LITE_MICRO_EXPECT_EQ(state.num_channels + 1, + sizeof(expected) / sizeof(expected[0])); + int i; + for (i = 0; i <= state.num_channels; ++i) { + TF_LITE_MICRO_EXPECT_EQ(state.channel_weight_starts[i], expected[i]); + } + + FilterbankFreeStateContents(&state); +} + +TF_LITE_MICRO_TEST(FilterbankTest_CheckChannelWidths) { + FilterbankTestConfig config; + struct FilterbankState state; + TF_LITE_MICRO_EXPECT(FilterbankPopulateState(&config.config_, &state, + kSampleRate, kSpectrumSize)); + + const int16_t expected[] = {8, 8, 8}; + TF_LITE_MICRO_EXPECT_EQ(state.num_channels + 1, + sizeof(expected) / sizeof(expected[0])); + int i; + for (i = 0; i <= state.num_channels; ++i) { + TF_LITE_MICRO_EXPECT_EQ(state.channel_widths[i], expected[i]); + } + + FilterbankFreeStateContents(&state); +} + +TF_LITE_MICRO_TEST(FilterbankTest_CheckWeights) { + FilterbankTestConfig config; + struct FilterbankState state; + TF_LITE_MICRO_EXPECT(FilterbankPopulateState(&config.config_, &state, + kSampleRate, kSpectrumSize)); + + const int16_t expected[] = {0, 3277, 2217, 1200, 222, 0, 0, 0, + 0, 3376, 2468, 1591, 744, 0, 0, 0, + 0, 4020, 3226, 2456, 1708, 983, 277, 0}; + TF_LITE_MICRO_EXPECT_EQ(state.channel_weight_starts[state.num_channels] + + state.channel_widths[state.num_channels], + sizeof(expected) / sizeof(expected[0])); + int i; + for (i = 0; i < sizeof(expected) / sizeof(expected[0]); ++i) { + TF_LITE_MICRO_EXPECT_EQ(state.weights[i], expected[i]); + } + + FilterbankFreeStateContents(&state); +} + +TF_LITE_MICRO_TEST(FilterbankTest_CheckUnweights) { + FilterbankTestConfig config; + struct FilterbankState state; + TF_LITE_MICRO_EXPECT(FilterbankPopulateState(&config.config_, &state, + kSampleRate, kSpectrumSize)); + + const int16_t expected[] = {0, 819, 1879, 2896, 3874, 0, 0, 0, + 0, 720, 1628, 2505, 3352, 0, 0, 0, + 0, 76, 870, 1640, 2388, 3113, 3819, 0}; + TF_LITE_MICRO_EXPECT_EQ(state.channel_weight_starts[state.num_channels] + + state.channel_widths[state.num_channels], + sizeof(expected) / sizeof(expected[0])); + int i; + for (i = 0; i < sizeof(expected) / sizeof(expected[0]); ++i) { + TF_LITE_MICRO_EXPECT_EQ(state.unweights[i], expected[i]); + } + + FilterbankFreeStateContents(&state); +} + +TF_LITE_MICRO_TEST(FilterbankTest_CheckConvertFftComplexToEnergy) { + struct FilterbankState state; + state.start_index = kStartIndex; + state.end_index = kEndIndex; + + struct complex_int16_t fake_fft[] = { + {0, 0}, {-10, 9}, {-20, 0}, {-9, -10}, {0, 25}, {-119, 119}, + {-887, 0}, {3000, 3000}, {0, -6401}, {-3000, 3000}, {886, 0}, {118, 119}, + {0, 25}, {9, -10}, {19, 0}, {9, 9}, {0, 0}}; + int32_t* energy = reinterpret_cast(fake_fft); + FilterbankConvertFftComplexToEnergy(&state, fake_fft, energy); + + int i; + for (i = state.start_index; i < state.end_index; ++i) { + TF_LITE_MICRO_EXPECT_EQ(energy[i], kEnergy[i]); + } +} + +TF_LITE_MICRO_TEST(FilterbankTest_CheckAccumulateChannels) { + FilterbankTestConfig config; + struct FilterbankState state; + TF_LITE_MICRO_EXPECT(FilterbankPopulateState(&config.config_, &state, + kSampleRate, kSpectrumSize)); + + FilterbankAccumulateChannels(&state, kEnergy); + + TF_LITE_MICRO_EXPECT_EQ(state.num_channels + 1, + sizeof(kWork) / sizeof(kWork[0])); + int i; + for (i = 0; i <= state.num_channels; ++i) { + TF_LITE_MICRO_EXPECT_EQ(state.work[i], kWork[i]); + } + + FilterbankFreeStateContents(&state); +} + +TF_LITE_MICRO_TEST(FilterbankTest_CheckSqrt) { + FilterbankTestConfig config; + struct FilterbankState state; + TF_LITE_MICRO_EXPECT(FilterbankPopulateState(&config.config_, &state, + kSampleRate, kSpectrumSize)); + std::memcpy(state.work, kWork, sizeof(kWork)); + + uint32_t* scaled_filterbank = FilterbankSqrt(&state, kScaleShift); + + const uint32_t expected[] = {247311, 508620}; + TF_LITE_MICRO_EXPECT_EQ(state.num_channels, + sizeof(expected) / sizeof(expected[0])); + int i; + for (i = 0; i < state.num_channels; ++i) { + TF_LITE_MICRO_EXPECT_EQ(scaled_filterbank[i], expected[i]); + } + + FilterbankFreeStateContents(&state); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/experimental/microfrontend/lib/filterbank_util.c b/tensorflow/lite/experimental/microfrontend/lib/filterbank_util.c new file mode 100644 index 0000000..f18ebf5 --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/filterbank_util.c @@ -0,0 +1,220 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/experimental/microfrontend/lib/filterbank_util.h" + +#include +#include +#include + +#define kFilterbankIndexAlignment 4 +#define kFilterbankChannelBlockSize 4 + +void FilterbankFillConfigWithDefaults(struct FilterbankConfig* config) { + config->num_channels = 32; + config->lower_band_limit = 125.0f; + config->upper_band_limit = 7500.0f; + config->output_scale_shift = 7; +} + +static float FreqToMel(float freq) { return 1127.0 * log1p(freq / 700.0); } + +static void CalculateCenterFrequencies(const int num_channels, + const float lower_frequency_limit, + const float upper_frequency_limit, + float* center_frequencies) { + assert(lower_frequency_limit >= 0.0f); + assert(upper_frequency_limit > lower_frequency_limit); + + const float mel_low = FreqToMel(lower_frequency_limit); + const float mel_hi = FreqToMel(upper_frequency_limit); + const float mel_span = mel_hi - mel_low; + const float mel_spacing = mel_span / ((float)num_channels); + int i; + for (i = 0; i < num_channels; ++i) { + center_frequencies[i] = mel_low + (mel_spacing * (i + 1)); + } +} + +static void QuantizeFilterbankWeights(const float float_weight, int16_t* weight, + int16_t* unweight) { + *weight = floor(float_weight * (1 << kFilterbankBits) + 0.5); + *unweight = floor((1.0 - float_weight) * (1 << kFilterbankBits) + 0.5); +} + +int FilterbankPopulateState(const struct FilterbankConfig* config, + struct FilterbankState* state, int sample_rate, + int spectrum_size) { + state->num_channels = config->num_channels; + const int num_channels_plus_1 = config->num_channels + 1; + + // How should we align things to index counts given the byte alignment? + const int index_alignment = + (kFilterbankIndexAlignment < sizeof(int16_t) + ? 1 + : kFilterbankIndexAlignment / sizeof(int16_t)); + + state->channel_frequency_starts = + malloc(num_channels_plus_1 * sizeof(*state->channel_frequency_starts)); + state->channel_weight_starts = + malloc(num_channels_plus_1 * sizeof(*state->channel_weight_starts)); + state->channel_widths = + malloc(num_channels_plus_1 * sizeof(*state->channel_widths)); + state->work = malloc(num_channels_plus_1 * sizeof(*state->work)); + + float* center_mel_freqs = + malloc(num_channels_plus_1 * sizeof(*center_mel_freqs)); + int16_t* actual_channel_starts = + malloc(num_channels_plus_1 * sizeof(*actual_channel_starts)); + int16_t* actual_channel_widths = + malloc(num_channels_plus_1 * sizeof(*actual_channel_widths)); + + if (state->channel_frequency_starts == NULL || + state->channel_weight_starts == NULL || state->channel_widths == NULL || + center_mel_freqs == NULL || actual_channel_starts == NULL || + actual_channel_widths == NULL) { + free(center_mel_freqs); + free(actual_channel_starts); + free(actual_channel_widths); + fprintf(stderr, "Failed to allocate channel buffers\n"); + return 0; + } + + CalculateCenterFrequencies(num_channels_plus_1, config->lower_band_limit, + config->upper_band_limit, center_mel_freqs); + + // Always exclude DC. + const float hz_per_sbin = 0.5 * sample_rate / ((float)spectrum_size - 1); + state->start_index = 1.5 + config->lower_band_limit / hz_per_sbin; + state->end_index = 0; // Initialized to zero here, but actually set below. + + // For each channel, we need to figure out what frequencies belong to it, and + // how much padding we need to add so that we can efficiently multiply the + // weights and unweights for accumulation. To simplify the multiplication + // logic, all channels will have some multiplication to do (even if there are + // no frequencies that accumulate to that channel) - they will be directed to + // a set of zero weights. + int chan_freq_index_start = state->start_index; + int weight_index_start = 0; + int needs_zeros = 0; + + int chan; + for (chan = 0; chan < num_channels_plus_1; ++chan) { + // Keep jumping frequencies until we overshoot the bound on this channel. + int freq_index = chan_freq_index_start; + while (FreqToMel((freq_index)*hz_per_sbin) <= center_mel_freqs[chan]) { + ++freq_index; + } + + const int width = freq_index - chan_freq_index_start; + actual_channel_starts[chan] = chan_freq_index_start; + actual_channel_widths[chan] = width; + + if (width == 0) { + // This channel doesn't actually get anything from the frequencies, it's + // always zero. We need then to insert some 'zero' weights into the + // output, and just redirect this channel to do a single multiplication at + // this point. For simplicity, the zeros are placed at the beginning of + // the weights arrays, so we have to go and update all the other + // weight_starts to reflect this shift (but only once). + state->channel_frequency_starts[chan] = 0; + state->channel_weight_starts[chan] = 0; + state->channel_widths[chan] = kFilterbankChannelBlockSize; + if (!needs_zeros) { + needs_zeros = 1; + int j; + for (j = 0; j < chan; ++j) { + state->channel_weight_starts[j] += kFilterbankChannelBlockSize; + } + weight_index_start += kFilterbankChannelBlockSize; + } + } else { + // How far back do we need to go to ensure that we have the proper + // alignment? + const int aligned_start = + (chan_freq_index_start / index_alignment) * index_alignment; + const int aligned_width = (chan_freq_index_start - aligned_start + width); + const int padded_width = + (((aligned_width - 1) / kFilterbankChannelBlockSize) + 1) * + kFilterbankChannelBlockSize; + + state->channel_frequency_starts[chan] = aligned_start; + state->channel_weight_starts[chan] = weight_index_start; + state->channel_widths[chan] = padded_width; + weight_index_start += padded_width; + } + chan_freq_index_start = freq_index; + } + + // Allocate the two arrays to store the weights - weight_index_start contains + // the index of what would be the next set of weights that we would need to + // add, so that's how many weights we need to allocate. + state->weights = calloc(weight_index_start, sizeof(*state->weights)); + state->unweights = calloc(weight_index_start, sizeof(*state->unweights)); + + // If the alloc failed, we also need to nuke the arrays. + if (state->weights == NULL || state->unweights == NULL) { + free(center_mel_freqs); + free(actual_channel_starts); + free(actual_channel_widths); + fprintf(stderr, "Failed to allocate weights or unweights\n"); + return 0; + } + + // Next pass, compute all the weights. Since everything has been memset to + // zero, we only need to fill in the weights that correspond to some frequency + // for a channel. + const float mel_low = FreqToMel(config->lower_band_limit); + for (chan = 0; chan < num_channels_plus_1; ++chan) { + int frequency = actual_channel_starts[chan]; + const int num_frequencies = actual_channel_widths[chan]; + const int frequency_offset = + frequency - state->channel_frequency_starts[chan]; + const int weight_start = state->channel_weight_starts[chan]; + const float denom_val = (chan == 0) ? mel_low : center_mel_freqs[chan - 1]; + + int j; + for (j = 0; j < num_frequencies; ++j, ++frequency) { + const float weight = + (center_mel_freqs[chan] - FreqToMel(frequency * hz_per_sbin)) / + (center_mel_freqs[chan] - denom_val); + + // Make the float into an integer for the weights (and unweights). + const int weight_index = weight_start + frequency_offset + j; + QuantizeFilterbankWeights(weight, state->weights + weight_index, + state->unweights + weight_index); + } + if (frequency > state->end_index) { + state->end_index = frequency; + } + } + + free(center_mel_freqs); + free(actual_channel_starts); + free(actual_channel_widths); + if (state->end_index >= spectrum_size) { + fprintf(stderr, "Filterbank end_index is above spectrum size.\n"); + return 0; + } + return 1; +} + +void FilterbankFreeStateContents(struct FilterbankState* state) { + free(state->channel_frequency_starts); + free(state->channel_weight_starts); + free(state->channel_widths); + free(state->weights); + free(state->unweights); + free(state->work); +} diff --git a/tensorflow/lite/experimental/microfrontend/lib/filterbank_util.h b/tensorflow/lite/experimental/microfrontend/lib/filterbank_util.h new file mode 100644 index 0000000..781d102 --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/filterbank_util.h @@ -0,0 +1,50 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_FILTERBANK_UTIL_H_ +#define TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_FILTERBANK_UTIL_H_ + +#include "tensorflow/lite/experimental/microfrontend/lib/filterbank.h" + +#ifdef __cplusplus +extern "C" { +#endif + +struct FilterbankConfig { + // number of frequency channel buckets for filterbank + int num_channels; + // maximum frequency to include + float upper_band_limit; + // minimum frequency to include + float lower_band_limit; + // unused + int output_scale_shift; +}; + +// Fills the frontendConfig with "sane" defaults. +void FilterbankFillConfigWithDefaults(struct FilterbankConfig* config); + +// Allocates any buffers. +int FilterbankPopulateState(const struct FilterbankConfig* config, + struct FilterbankState* state, int sample_rate, + int spectrum_size); + +// Frees any allocated buffers. +void FilterbankFreeStateContents(struct FilterbankState* state); + +#ifdef __cplusplus +} // extern "C" +#endif + +#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_FILTERBANK_UTIL_H_ diff --git a/tensorflow/lite/experimental/microfrontend/lib/frontend.c b/tensorflow/lite/experimental/microfrontend/lib/frontend.c new file mode 100644 index 0000000..9de2a87 --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/frontend.c @@ -0,0 +1,72 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/experimental/microfrontend/lib/frontend.h" + +#include "tensorflow/lite/experimental/microfrontend/lib/bits.h" + +struct FrontendOutput FrontendProcessSamples(struct FrontendState* state, + const int16_t* samples, + size_t num_samples, + size_t* num_samples_read) { + struct FrontendOutput output; + output.values = NULL; + output.size = 0; + + // Try to apply the window - if it fails, return and wait for more data. + if (!WindowProcessSamples(&state->window, samples, num_samples, + num_samples_read)) { + return output; + } + + // Apply the FFT to the window's output (and scale it so that the fixed point + // FFT can have as much resolution as possible). + int input_shift = + 15 - MostSignificantBit32(state->window.max_abs_output_value); + FftCompute(&state->fft, state->window.output, input_shift); + + // We can re-ruse the fft's output buffer to hold the energy. + int32_t* energy = (int32_t*)state->fft.output; + + FilterbankConvertFftComplexToEnergy(&state->filterbank, state->fft.output, + energy); + + FilterbankAccumulateChannels(&state->filterbank, energy); + uint32_t* scaled_filterbank = FilterbankSqrt(&state->filterbank, input_shift); + + // Apply noise reduction. + NoiseReductionApply(&state->noise_reduction, scaled_filterbank); + + if (state->pcan_gain_control.enable_pcan) { + PcanGainControlApply(&state->pcan_gain_control, scaled_filterbank); + } + + // Apply the log and scale. + int correction_bits = + MostSignificantBit32(state->fft.fft_size) - 1 - (kFilterbankBits / 2); + uint16_t* logged_filterbank = + LogScaleApply(&state->log_scale, scaled_filterbank, + state->filterbank.num_channels, correction_bits); + + output.size = state->filterbank.num_channels; + output.values = logged_filterbank; + return output; +} + +void FrontendReset(struct FrontendState* state) { + WindowReset(&state->window); + FftReset(&state->fft); + FilterbankReset(&state->filterbank); + NoiseReductionReset(&state->noise_reduction); +} diff --git a/tensorflow/lite/experimental/microfrontend/lib/frontend.h b/tensorflow/lite/experimental/microfrontend/lib/frontend.h new file mode 100644 index 0000000..883df5f --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/frontend.h @@ -0,0 +1,64 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_FRONTEND_H_ +#define TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_FRONTEND_H_ + +#include +#include + +#include "tensorflow/lite/experimental/microfrontend/lib/fft.h" +#include "tensorflow/lite/experimental/microfrontend/lib/filterbank.h" +#include "tensorflow/lite/experimental/microfrontend/lib/log_scale.h" +#include "tensorflow/lite/experimental/microfrontend/lib/noise_reduction.h" +#include "tensorflow/lite/experimental/microfrontend/lib/pcan_gain_control.h" +#include "tensorflow/lite/experimental/microfrontend/lib/window.h" + +#ifdef __cplusplus +extern "C" { +#endif + +struct FrontendState { + struct WindowState window; + struct FftState fft; + struct FilterbankState filterbank; + struct NoiseReductionState noise_reduction; + struct PcanGainControlState pcan_gain_control; + struct LogScaleState log_scale; +}; + +struct FrontendOutput { + const uint16_t* values; + size_t size; +}; + +// Main entry point to processing frontend samples. Updates num_samples_read to +// contain the number of samples that have been consumed from the input array. +// Returns a struct containing the generated output. If not enough samples were +// added to generate a feature vector, the returned size will be 0 and the +// values pointer will be NULL. Note that the output pointer will be invalidated +// as soon as FrontendProcessSamples is called again, so copy the contents +// elsewhere if you need to use them later. +struct FrontendOutput FrontendProcessSamples(struct FrontendState* state, + const int16_t* samples, + size_t num_samples, + size_t* num_samples_read); + +void FrontendReset(struct FrontendState* state); + +#ifdef __cplusplus +} // extern "C" +#endif + +#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_FRONTEND_H_ diff --git a/tensorflow/lite/experimental/microfrontend/lib/frontend_io.c b/tensorflow/lite/experimental/microfrontend/lib/frontend_io.c new file mode 100644 index 0000000..b422d07 --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/frontend_io.c @@ -0,0 +1,69 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/experimental/microfrontend/lib/frontend_io.h" + +#include + +#include "tensorflow/lite/experimental/microfrontend/lib/fft_io.h" +#include "tensorflow/lite/experimental/microfrontend/lib/filterbank_io.h" +#include "tensorflow/lite/experimental/microfrontend/lib/log_scale_io.h" +#include "tensorflow/lite/experimental/microfrontend/lib/noise_reduction_io.h" +#include "tensorflow/lite/experimental/microfrontend/lib/window_io.h" + +int WriteFrontendStateMemmap(const char* header, const char* source, + const struct FrontendState* state) { + // Write a header that just has our init function. + FILE* fp = fopen(header, "w"); + if (!fp) { + fprintf(stderr, "Failed to open header '%s' for write\n", header); + return 0; + } + fprintf(fp, "#ifndef FRONTEND_STATE_MEMMAP_H_\n"); + fprintf(fp, "#define FRONTEND_STATE_MEMMAP_H_\n"); + fprintf(fp, "\n"); + fprintf(fp, "#include \"frontend.h\"\n"); + fprintf(fp, "\n"); + fprintf(fp, "struct FrontendState* GetFrontendStateMemmap();\n"); + fprintf(fp, "\n"); + fprintf(fp, "#endif // FRONTEND_STATE_MEMMAP_H_\n"); + fclose(fp); + + // Write out the source file that actually has everything in it. + fp = fopen(source, "w"); + if (!fp) { + fprintf(stderr, "Failed to open source '%s' for write\n", source); + return 0; + } + fprintf(fp, "#include \"%s\"\n", header); + fprintf(fp, "\n"); + WindowWriteMemmapPreamble(fp, &state->window); + FftWriteMemmapPreamble(fp, &state->fft); + FilterbankWriteMemmapPreamble(fp, &state->filterbank); + NoiseReductionWriteMemmapPreamble(fp, &state->noise_reduction); + fprintf(fp, "static struct FrontendState state;\n"); + fprintf(fp, "struct FrontendState* GetFrontendStateMemmap() {\n"); + WindowWriteMemmap(fp, &state->window, " (&state.window)"); + FftWriteMemmap(fp, &state->fft, " (&state.fft)"); + FilterbankWriteMemmap(fp, &state->filterbank, " (&state.filterbank)"); + NoiseReductionWriteMemmap(fp, &state->noise_reduction, + " (&state.noise_reduction)"); + LogScaleWriteMemmap(fp, &state->log_scale, " (&state.log_scale)"); + fprintf(fp, " FftInit(&state.fft);\n"); + fprintf(fp, " FrontendReset(&state);\n"); + fprintf(fp, " return &state;\n"); + fprintf(fp, "}\n"); + fclose(fp); + return 1; +} diff --git a/tensorflow/lite/experimental/microfrontend/lib/frontend_io.h b/tensorflow/lite/experimental/microfrontend/lib/frontend_io.h new file mode 100644 index 0000000..0d59eda --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/frontend_io.h @@ -0,0 +1,31 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_FRONTEND_IO_H_ +#define TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_FRONTEND_IO_H_ + +#include "tensorflow/lite/experimental/microfrontend/lib/frontend.h" + +#ifdef __cplusplus +extern "C" { +#endif + +int WriteFrontendStateMemmap(const char* header, const char* source, + const struct FrontendState* state); + +#ifdef __cplusplus +} // extern "C" +#endif + +#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_FRONTEND_IO_H_ diff --git a/tensorflow/lite/experimental/microfrontend/lib/frontend_main.c b/tensorflow/lite/experimental/microfrontend/lib/frontend_main.c new file mode 100644 index 0000000..861778c --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/frontend_main.c @@ -0,0 +1,71 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include + +#include "tensorflow/lite/experimental/microfrontend/lib/frontend.h" +#include "tensorflow/lite/experimental/microfrontend/lib/frontend_util.h" + +int main(int argc, char** argv) { + struct FrontendConfig frontend_config; + FrontendFillConfigWithDefaults(&frontend_config); + + char* filename = argv[1]; + int sample_rate = 16000; + + struct FrontendState frontend_state; + if (!FrontendPopulateState(&frontend_config, &frontend_state, sample_rate)) { + fprintf(stderr, "Failed to populate frontend state\n"); + FrontendFreeStateContents(&frontend_state); + return 1; + } + + FILE* fp = fopen(filename, "r"); + if (fp == NULL) { + fprintf(stderr, "Failed to open %s for read\n", filename); + return 1; + } + fseek(fp, 0L, SEEK_END); + size_t audio_file_size = ftell(fp) / sizeof(int16_t); + fseek(fp, 0L, SEEK_SET); + int16_t* audio_data = malloc(audio_file_size * sizeof(int16_t)); + int16_t* original_audio_data = audio_data; + if (audio_file_size != + fread(audio_data, sizeof(int16_t), audio_file_size, fp)) { + fprintf(stderr, "Failed to read in all audio data\n"); + fclose(fp); + return 1; + } + + while (audio_file_size > 0) { + size_t num_samples_read; + struct FrontendOutput output = FrontendProcessSamples( + &frontend_state, audio_data, audio_file_size, &num_samples_read); + audio_data += num_samples_read; + audio_file_size -= num_samples_read; + + if (output.values != NULL) { + int i; + for (i = 0; i < output.size; ++i) { + printf("%d ", output.values[i]); + } + printf("\n"); + } + } + + FrontendFreeStateContents(&frontend_state); + free(original_audio_data); + fclose(fp); + return 0; +} diff --git a/tensorflow/lite/experimental/microfrontend/lib/frontend_memmap_generator.c b/tensorflow/lite/experimental/microfrontend/lib/frontend_memmap_generator.c new file mode 100644 index 0000000..548028c --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/frontend_memmap_generator.c @@ -0,0 +1,48 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include + +#include "tensorflow/lite/experimental/microfrontend/lib/frontend.h" +#include "tensorflow/lite/experimental/microfrontend/lib/frontend_io.h" +#include "tensorflow/lite/experimental/microfrontend/lib/frontend_util.h" + +int main(int argc, char** argv) { + if (argc != 3) { + fprintf(stderr, + "%s requires exactly two parameters - the names of the header and " + "source files to save\n", + argv[0]); + return 1; + } + struct FrontendConfig frontend_config; + FrontendFillConfigWithDefaults(&frontend_config); + + int sample_rate = 16000; + struct FrontendState frontend_state; + if (!FrontendPopulateState(&frontend_config, &frontend_state, sample_rate)) { + fprintf(stderr, "Failed to populate frontend state\n"); + FrontendFreeStateContents(&frontend_state); + return 1; + } + + if (!WriteFrontendStateMemmap(argv[1], argv[2], &frontend_state)) { + fprintf(stderr, "Failed to write memmap\n"); + FrontendFreeStateContents(&frontend_state); + return 1; + } + + FrontendFreeStateContents(&frontend_state); + return 0; +} diff --git a/tensorflow/lite/experimental/microfrontend/lib/frontend_memmap_main.c b/tensorflow/lite/experimental/microfrontend/lib/frontend_memmap_main.c new file mode 100644 index 0000000..e9c89b5 --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/frontend_memmap_main.c @@ -0,0 +1,60 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include + +#include "memmap.h" +#include "tensorflow/lite/experimental/microfrontend/lib/frontend.h" + +int main(int argc, char** argv) { + struct FrontendState* frontend_state = GetFrontendStateMemmap(); + + char* filename = argv[1]; + FILE* fp = fopen(filename, "r"); + if (fp == NULL) { + fprintf(stderr, "Failed to open %s for read\n", filename); + return 1; + } + fseek(fp, 0L, SEEK_END); + size_t audio_file_size = ftell(fp) / sizeof(int16_t); + fseek(fp, 0L, SEEK_SET); + int16_t* audio_data = malloc(audio_file_size * sizeof(int16_t)); + int16_t* original_audio_data = audio_data; + if (audio_file_size != + fread(audio_data, sizeof(int16_t), audio_file_size, fp)) { + fprintf(stderr, "Failed to read in all audio data\n"); + fclose(fp); + return 1; + } + + while (audio_file_size > 0) { + size_t num_samples_read; + struct FrontendOutput output = FrontendProcessSamples( + frontend_state, audio_data, audio_file_size, &num_samples_read); + audio_data += num_samples_read; + audio_file_size -= num_samples_read; + + if (output.values != NULL) { + int i; + for (i = 0; i < output.size; ++i) { + printf("%d ", output.values[i]); + } + printf("\n"); + } + } + + free(original_audio_data); + fclose(fp); + return 0; +} diff --git a/tensorflow/lite/experimental/microfrontend/lib/frontend_test.cc b/tensorflow/lite/experimental/microfrontend/lib/frontend_test.cc new file mode 100644 index 0000000..9c981de --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/frontend_test.cc @@ -0,0 +1,131 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/experimental/microfrontend/lib/frontend.h" + +#include "tensorflow/lite/experimental/microfrontend/lib/frontend_util.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace { + +const int kSampleRate = 1000; +const int kWindowSamples = 25; +const int kStepSamples = 10; +const int16_t kFakeAudioData[] = { + 0, 32767, 0, -32768, 0, 32767, 0, -32768, 0, 32767, 0, -32768, + 0, 32767, 0, -32768, 0, 32767, 0, -32768, 0, 32767, 0, -32768, + 0, 32767, 0, -32768, 0, 32767, 0, -32768, 0, 32767, 0, -32768}; + +// Test end-to-end frontend behaviors. +class FrontendTestConfig { + public: + FrontendTestConfig() { + config_.window.size_ms = 25; + config_.window.step_size_ms = 10; + config_.noise_reduction.smoothing_bits = 10; + config_.filterbank.num_channels = 2; + config_.filterbank.lower_band_limit = 8.0; + config_.filterbank.upper_band_limit = 450.0; + config_.noise_reduction.smoothing_bits = 10; + config_.noise_reduction.even_smoothing = 0.025; + config_.noise_reduction.odd_smoothing = 0.06; + config_.noise_reduction.min_signal_remaining = 0.05; + config_.pcan_gain_control.enable_pcan = true; + config_.pcan_gain_control.strength = 0.95; + config_.pcan_gain_control.offset = 80.0; + config_.pcan_gain_control.gain_bits = 21; + config_.log_scale.enable_log = true; + config_.log_scale.scale_shift = 6; + } + + struct FrontendConfig config_; +}; + +} // namespace + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(FrontendTest_CheckOutputValues) { + FrontendTestConfig config; + struct FrontendState state; + TF_LITE_MICRO_EXPECT( + FrontendPopulateState(&config.config_, &state, kSampleRate)); + size_t num_samples_read; + + struct FrontendOutput output = FrontendProcessSamples( + &state, kFakeAudioData, + sizeof(kFakeAudioData) / sizeof(kFakeAudioData[0]), &num_samples_read); + + const uint16_t expected[] = {479, 425}; + TF_LITE_MICRO_EXPECT_EQ(output.size, sizeof(expected) / sizeof(expected[0])); + int i; + for (i = 0; i < output.size; ++i) { + TF_LITE_MICRO_EXPECT_EQ(output.values[i], expected[i]); + } + + FrontendFreeStateContents(&state); +} + +TF_LITE_MICRO_TEST(FrontendTest_CheckConsecutiveWindow) { + FrontendTestConfig config; + struct FrontendState state; + TF_LITE_MICRO_EXPECT( + FrontendPopulateState(&config.config_, &state, kSampleRate)); + size_t num_samples_read; + + FrontendProcessSamples(&state, kFakeAudioData, + sizeof(kFakeAudioData) / sizeof(kFakeAudioData[0]), + &num_samples_read); + struct FrontendOutput output = FrontendProcessSamples( + &state, kFakeAudioData + kWindowSamples, + sizeof(kFakeAudioData) / sizeof(kFakeAudioData[0]) - kWindowSamples, + &num_samples_read); + + const int16_t expected[] = {436, 378}; + TF_LITE_MICRO_EXPECT_EQ(output.size, sizeof(expected) / sizeof(expected[0])); + int i; + for (i = 0; i < output.size; ++i) { + TF_LITE_MICRO_EXPECT_EQ(output.values[i], expected[i]); + } + + FrontendFreeStateContents(&state); +} + +TF_LITE_MICRO_TEST(FrontendTest_CheckNotEnoughSamples) { + FrontendTestConfig config; + struct FrontendState state; + TF_LITE_MICRO_EXPECT( + FrontendPopulateState(&config.config_, &state, kSampleRate)); + size_t num_samples_read; + + FrontendProcessSamples(&state, kFakeAudioData, + sizeof(kFakeAudioData) / sizeof(kFakeAudioData[0]), + &num_samples_read); + FrontendProcessSamples( + &state, kFakeAudioData + kWindowSamples, + sizeof(kFakeAudioData) / sizeof(kFakeAudioData[0]) - kWindowSamples, + &num_samples_read); + struct FrontendOutput output = FrontendProcessSamples( + &state, kFakeAudioData + kWindowSamples + kStepSamples, + sizeof(kFakeAudioData) / sizeof(kFakeAudioData[0]) - kWindowSamples - + kStepSamples, + &num_samples_read); + + TF_LITE_MICRO_EXPECT_EQ(output.size, 0); + TF_LITE_MICRO_EXPECT(output.values == nullptr); + + FrontendFreeStateContents(&state); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/experimental/microfrontend/lib/frontend_util.c b/tensorflow/lite/experimental/microfrontend/lib/frontend_util.c new file mode 100644 index 0000000..27224f6 --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/frontend_util.c @@ -0,0 +1,85 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/experimental/microfrontend/lib/frontend_util.h" + +#include +#include + +#include "tensorflow/lite/experimental/microfrontend/lib/bits.h" + +void FrontendFillConfigWithDefaults(struct FrontendConfig* config) { + WindowFillConfigWithDefaults(&config->window); + FilterbankFillConfigWithDefaults(&config->filterbank); + NoiseReductionFillConfigWithDefaults(&config->noise_reduction); + PcanGainControlFillConfigWithDefaults(&config->pcan_gain_control); + LogScaleFillConfigWithDefaults(&config->log_scale); +} + +int FrontendPopulateState(const struct FrontendConfig* config, + struct FrontendState* state, int sample_rate) { + memset(state, 0, sizeof(*state)); + + if (!WindowPopulateState(&config->window, &state->window, sample_rate)) { + fprintf(stderr, "Failed to populate window state\n"); + return 0; + } + + if (!FftPopulateState(&state->fft, state->window.size)) { + fprintf(stderr, "Failed to populate fft state\n"); + return 0; + } + FftInit(&state->fft); + + if (!FilterbankPopulateState(&config->filterbank, &state->filterbank, + sample_rate, state->fft.fft_size / 2 + 1)) { + fprintf(stderr, "Failed to populate filterbank state\n"); + return 0; + } + + if (!NoiseReductionPopulateState(&config->noise_reduction, + &state->noise_reduction, + state->filterbank.num_channels)) { + fprintf(stderr, "Failed to populate noise reduction state\n"); + return 0; + } + + int input_correction_bits = + MostSignificantBit32(state->fft.fft_size) - 1 - (kFilterbankBits / 2); + if (!PcanGainControlPopulateState( + &config->pcan_gain_control, &state->pcan_gain_control, + state->noise_reduction.estimate, state->filterbank.num_channels, + state->noise_reduction.smoothing_bits, input_correction_bits)) { + fprintf(stderr, "Failed to populate pcan gain control state\n"); + return 0; + } + + if (!LogScalePopulateState(&config->log_scale, &state->log_scale)) { + fprintf(stderr, "Failed to populate log scale state\n"); + return 0; + } + + FrontendReset(state); + + // All good, return a true value. + return 1; +} + +void FrontendFreeStateContents(struct FrontendState* state) { + WindowFreeStateContents(&state->window); + FftFreeStateContents(&state->fft); + FilterbankFreeStateContents(&state->filterbank); + NoiseReductionFreeStateContents(&state->noise_reduction); + PcanGainControlFreeStateContents(&state->pcan_gain_control); +} diff --git a/tensorflow/lite/experimental/microfrontend/lib/frontend_util.h b/tensorflow/lite/experimental/microfrontend/lib/frontend_util.h new file mode 100644 index 0000000..895ce6c --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/frontend_util.h @@ -0,0 +1,52 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_FRONTEND_UTIL_H_ +#define TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_FRONTEND_UTIL_H_ + +#include "tensorflow/lite/experimental/microfrontend/lib/fft_util.h" +#include "tensorflow/lite/experimental/microfrontend/lib/filterbank_util.h" +#include "tensorflow/lite/experimental/microfrontend/lib/frontend.h" +#include "tensorflow/lite/experimental/microfrontend/lib/log_scale_util.h" +#include "tensorflow/lite/experimental/microfrontend/lib/noise_reduction_util.h" +#include "tensorflow/lite/experimental/microfrontend/lib/pcan_gain_control_util.h" +#include "tensorflow/lite/experimental/microfrontend/lib/window_util.h" + +#ifdef __cplusplus +extern "C" { +#endif + +struct FrontendConfig { + struct WindowConfig window; + struct FilterbankConfig filterbank; + struct NoiseReductionConfig noise_reduction; + struct PcanGainControlConfig pcan_gain_control; + struct LogScaleConfig log_scale; +}; + +// Fills the frontendConfig with "sane" defaults. +void FrontendFillConfigWithDefaults(struct FrontendConfig* config); + +// Allocates any buffers. +int FrontendPopulateState(const struct FrontendConfig* config, + struct FrontendState* state, int sample_rate); + +// Frees any allocated buffers. +void FrontendFreeStateContents(struct FrontendState* state); + +#ifdef __cplusplus +} // extern "C" +#endif + +#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_FRONTEND_UTIL_H_ diff --git a/tensorflow/lite/experimental/microfrontend/lib/kiss_fft_common.h b/tensorflow/lite/experimental/microfrontend/lib/kiss_fft_common.h new file mode 100644 index 0000000..33556da --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/kiss_fft_common.h @@ -0,0 +1,48 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_KISS_FFT_COMMON_H_ +#define TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_KISS_FFT_COMMON_H_ + +// This header file should be included in all variants of kiss_fft_$type.{h,cc} +// so that their sub-included source files do not mistakenly wrap libc header +// files within their kissfft_$type namespaces. +// E.g, This header avoids kissfft_int16.h containing: +// namespace kiss_fft_int16 { +// #include "kiss_fft.h" +// } +// where kiss_fft_.h contains: +// #include +// +// TRICK: By including the following header files here, their preprocessor +// header guards prevent them being re-defined inside of the kiss_fft_$type +// namespaces declared within the kiss_fft_$type.{h,cc} sources. +// Note that the original kiss_fft*.h files are untouched since they +// may be used in libraries that include them directly. + +#include +#include +#include +#include +#include + +#ifdef FIXED_POINT +#include +#endif + +#ifdef USE_SIMD +#include +#endif +#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_KISS_FFT_COMMON_H_ diff --git a/tensorflow/lite/experimental/microfrontend/lib/kiss_fft_int16.cc b/tensorflow/lite/experimental/microfrontend/lib/kiss_fft_int16.cc new file mode 100644 index 0000000..f1e781b --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/kiss_fft_int16.cc @@ -0,0 +1,10 @@ +#include + +#include "tensorflow/lite/experimental/microfrontend/lib/kiss_fft_common.h" + +#define FIXED_POINT 16 +namespace kissfft_fixed16 { +#include "kiss_fft.c" +#include "tools/kiss_fftr.c" +} // namespace kissfft_fixed16 +#undef FIXED_POINT diff --git a/tensorflow/lite/experimental/microfrontend/lib/kiss_fft_int16.h b/tensorflow/lite/experimental/microfrontend/lib/kiss_fft_int16.h new file mode 100644 index 0000000..beee99a --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/kiss_fft_int16.h @@ -0,0 +1,33 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_KISS_FFT_INT16_H_ +#define TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_KISS_FFT_INT16_H_ + +#include "tensorflow/lite/experimental/microfrontend/lib/kiss_fft_common.h" + +// Wrap 16-bit kiss fft in its own namespace. Enables us to link an application +// with different kiss fft resultions (16/32 bit interger, float, double) +// without getting a linker error. +#define FIXED_POINT 16 +namespace kissfft_fixed16 { +#include "kiss_fft.h" +#include "tools/kiss_fftr.h" +} // namespace kissfft_fixed16 +#undef FIXED_POINT +#undef kiss_fft_scalar +#undef KISS_FFT_H + +#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_KISS_FFT_INT16_H_ diff --git a/tensorflow/lite/experimental/microfrontend/lib/log_lut.c b/tensorflow/lite/experimental/microfrontend/lib/log_lut.c new file mode 100644 index 0000000..f59618e --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/log_lut.c @@ -0,0 +1,30 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/experimental/microfrontend/lib/log_lut.h" +const uint16_t kLogLut[] +#ifndef _MSC_VER + __attribute__((aligned(4))) +#endif // _MSV_VER + = {0, 224, 442, 654, 861, 1063, 1259, 1450, 1636, 1817, 1992, 2163, + 2329, 2490, 2646, 2797, 2944, 3087, 3224, 3358, 3487, 3611, 3732, 3848, + 3960, 4068, 4172, 4272, 4368, 4460, 4549, 4633, 4714, 4791, 4864, 4934, + 5001, 5063, 5123, 5178, 5231, 5280, 5326, 5368, 5408, 5444, 5477, 5507, + 5533, 5557, 5578, 5595, 5610, 5622, 5631, 5637, 5640, 5641, 5638, 5633, + 5626, 5615, 5602, 5586, 5568, 5547, 5524, 5498, 5470, 5439, 5406, 5370, + 5332, 5291, 5249, 5203, 5156, 5106, 5054, 5000, 4944, 4885, 4825, 4762, + 4697, 4630, 4561, 4490, 4416, 4341, 4264, 4184, 4103, 4020, 3935, 3848, + 3759, 3668, 3575, 3481, 3384, 3286, 3186, 3084, 2981, 2875, 2768, 2659, + 2549, 2437, 2323, 2207, 2090, 1971, 1851, 1729, 1605, 1480, 1353, 1224, + 1094, 963, 830, 695, 559, 421, 282, 142, 0, 0}; diff --git a/tensorflow/lite/experimental/microfrontend/lib/log_lut.h b/tensorflow/lite/experimental/microfrontend/lib/log_lut.h new file mode 100644 index 0000000..b2448a3 --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/log_lut.h @@ -0,0 +1,40 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_LOG_LUT_H_ +#define TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_LOG_LUT_H_ + +#include + +#ifdef __cplusplus +extern "C" { +#endif + +// Number of segments in the log lookup table. The table will be kLogSegments+1 +// in length (with some padding). +#define kLogSegments 128 +#define kLogSegmentsLog2 7 + +// Scale used by lookup table. +#define kLogScale 65536 +#define kLogScaleLog2 16 +#define kLogCoeff 45426 + +extern const uint16_t kLogLut[]; + +#ifdef __cplusplus +} // extern "C" +#endif + +#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_LOG_LUT_H_ diff --git a/tensorflow/lite/experimental/microfrontend/lib/log_scale.c b/tensorflow/lite/experimental/microfrontend/lib/log_scale.c new file mode 100644 index 0000000..c27a50a --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/log_scale.c @@ -0,0 +1,83 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/experimental/microfrontend/lib/log_scale.h" + +#include "tensorflow/lite/experimental/microfrontend/lib/bits.h" +#include "tensorflow/lite/experimental/microfrontend/lib/log_lut.h" + +#define kuint16max 0x0000FFFF + +// The following functions implement integer logarithms of various sizes. The +// approximation is calculated according to method described in +// www.inti.gob.ar/electronicaeinformatica/instrumentacion/utic/ +// publicaciones/SPL2007/Log10-spl07.pdf +// It first calculates log2 of the input and then converts it to natural +// logarithm. + +static uint32_t Log2FractionPart(const uint32_t x, const uint32_t log2x) { + // Part 1 + int32_t frac = x - (1LL << log2x); + if (log2x < kLogScaleLog2) { + frac <<= kLogScaleLog2 - log2x; + } else { + frac >>= log2x - kLogScaleLog2; + } + // Part 2 + const uint32_t base_seg = frac >> (kLogScaleLog2 - kLogSegmentsLog2); + const uint32_t seg_unit = + (((uint32_t)1) << kLogScaleLog2) >> kLogSegmentsLog2; + + const int32_t c0 = kLogLut[base_seg]; + const int32_t c1 = kLogLut[base_seg + 1]; + const int32_t seg_base = seg_unit * base_seg; + const int32_t rel_pos = ((c1 - c0) * (frac - seg_base)) >> kLogScaleLog2; + return frac + c0 + rel_pos; +} + +static uint32_t Log(const uint32_t x, const uint32_t scale_shift) { + const uint32_t integer = MostSignificantBit32(x) - 1; + const uint32_t fraction = Log2FractionPart(x, integer); + const uint32_t log2 = (integer << kLogScaleLog2) + fraction; + const uint32_t round = kLogScale / 2; + const uint32_t loge = (((uint64_t)kLogCoeff) * log2 + round) >> kLogScaleLog2; + // Finally scale to our output scale + const uint32_t loge_scaled = ((loge << scale_shift) + round) >> kLogScaleLog2; + return loge_scaled; +} + +uint16_t* LogScaleApply(struct LogScaleState* state, uint32_t* signal, + int signal_size, int correction_bits) { + const int scale_shift = state->scale_shift; + uint16_t* output = (uint16_t*)signal; + uint16_t* ret = output; + int i; + for (i = 0; i < signal_size; ++i) { + uint32_t value = *signal++; + if (state->enable_log) { + if (correction_bits < 0) { + value >>= -correction_bits; + } else { + value <<= correction_bits; + } + if (value > 1) { + value = Log(value, scale_shift); + } else { + value = 0; + } + } + *output++ = (value < kuint16max) ? value : kuint16max; + } + return ret; +} diff --git a/tensorflow/lite/experimental/microfrontend/lib/log_scale.h b/tensorflow/lite/experimental/microfrontend/lib/log_scale.h new file mode 100644 index 0000000..a383f32 --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/log_scale.h @@ -0,0 +1,39 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_LOG_SCALE_H_ +#define TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_LOG_SCALE_H_ + +#include +#include + +#ifdef __cplusplus +extern "C" { +#endif + +struct LogScaleState { + int enable_log; + int scale_shift; +}; + +// Applies a fixed point logarithm to the signal and converts it to 16 bit. Note +// that the signal array will be modified. +uint16_t* LogScaleApply(struct LogScaleState* state, uint32_t* signal, + int signal_size, int correction_bits); + +#ifdef __cplusplus +} // extern "C" +#endif + +#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_LOG_SCALE_H_ diff --git a/tensorflow/lite/experimental/microfrontend/lib/log_scale_io.c b/tensorflow/lite/experimental/microfrontend/lib/log_scale_io.c new file mode 100644 index 0000000..a04760d --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/log_scale_io.c @@ -0,0 +1,21 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/experimental/microfrontend/lib/log_scale_io.h" + +void LogScaleWriteMemmap(FILE* fp, const struct LogScaleState* state, + const char* variable) { + fprintf(fp, "%s->enable_log = %d;\n", variable, state->enable_log); + fprintf(fp, "%s->scale_shift = %d;\n", variable, state->scale_shift); +} diff --git a/tensorflow/lite/experimental/microfrontend/lib/log_scale_io.h b/tensorflow/lite/experimental/microfrontend/lib/log_scale_io.h new file mode 100644 index 0000000..9d447ac --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/log_scale_io.h @@ -0,0 +1,33 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_LOG_SCALE_IO_H_ +#define TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_LOG_SCALE_IO_H_ + +#include + +#include "tensorflow/lite/experimental/microfrontend/lib/log_scale.h" + +#ifdef __cplusplus +extern "C" { +#endif + +void LogScaleWriteMemmap(FILE* fp, const struct LogScaleState* state, + const char* variable); + +#ifdef __cplusplus +} // extern "C" +#endif + +#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_LOG_SCALE_IO_H_ diff --git a/tensorflow/lite/experimental/microfrontend/lib/log_scale_test.cc b/tensorflow/lite/experimental/microfrontend/lib/log_scale_test.cc new file mode 100644 index 0000000..3f2ce20 --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/log_scale_test.cc @@ -0,0 +1,63 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/experimental/microfrontend/lib/log_scale.h" + +#include "tensorflow/lite/experimental/microfrontend/lib/log_scale_util.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace { + +const int kScaleShift = 6; +const int kCorrectionBits = -1; + +} // namespace + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(LogScaleTest_CheckOutputValues) { + struct LogScaleState state; + state.enable_log = true; + state.scale_shift = kScaleShift; + + uint32_t fake_signal[] = {3578, 1533}; + uint16_t* output = LogScaleApply(&state, fake_signal, + sizeof(fake_signal) / sizeof(fake_signal[0]), + kCorrectionBits); + + const uint16_t expected[] = {479, 425}; + int i; + for (i = 0; i < sizeof(expected) / sizeof(expected[0]); ++i) { + TF_LITE_MICRO_EXPECT_EQ(output[i], expected[i]); + } +} + +TF_LITE_MICRO_TEST(LogScaleTest_CheckOutputValuesNoLog) { + struct LogScaleState state; + state.enable_log = false; + state.scale_shift = kScaleShift; + + uint32_t fake_signal[] = {85964, 45998}; + uint16_t* output = LogScaleApply(&state, fake_signal, + sizeof(fake_signal) / sizeof(fake_signal[0]), + kCorrectionBits); + + const uint16_t expected[] = {65535, 45998}; + int i; + for (i = 0; i < sizeof(expected) / sizeof(expected[0]); ++i) { + TF_LITE_MICRO_EXPECT_EQ(output[i], expected[i]); + } +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/experimental/microfrontend/lib/log_scale_util.c b/tensorflow/lite/experimental/microfrontend/lib/log_scale_util.c new file mode 100644 index 0000000..0e3dd1d --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/log_scale_util.c @@ -0,0 +1,27 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/experimental/microfrontend/lib/log_scale_util.h" + +void LogScaleFillConfigWithDefaults(struct LogScaleConfig* config) { + config->enable_log = 1; + config->scale_shift = 6; +} + +int LogScalePopulateState(const struct LogScaleConfig* config, + struct LogScaleState* state) { + state->enable_log = config->enable_log; + state->scale_shift = config->scale_shift; + return 1; +} diff --git a/tensorflow/lite/experimental/microfrontend/lib/log_scale_util.h b/tensorflow/lite/experimental/microfrontend/lib/log_scale_util.h new file mode 100644 index 0000000..11f7d9e --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/log_scale_util.h @@ -0,0 +1,45 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_LOG_SCALE_UTIL_H_ +#define TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_LOG_SCALE_UTIL_H_ + +#include +#include + +#include "tensorflow/lite/experimental/microfrontend/lib/log_scale.h" + +#ifdef __cplusplus +extern "C" { +#endif + +struct LogScaleConfig { + // set to false (0) to disable this module + int enable_log; + // scale results by 2^(scale_shift) + int scale_shift; +}; + +// Populates the LogScaleConfig with "sane" default values. +void LogScaleFillConfigWithDefaults(struct LogScaleConfig* config); + +// Allocates any buffers. +int LogScalePopulateState(const struct LogScaleConfig* config, + struct LogScaleState* state); + +#ifdef __cplusplus +} // extern "C" +#endif + +#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_LOG_SCALE_UTIL_H_ diff --git a/tensorflow/lite/experimental/microfrontend/lib/noise_reduction.c b/tensorflow/lite/experimental/microfrontend/lib/noise_reduction.c new file mode 100644 index 0000000..16b30e6 --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/noise_reduction.c @@ -0,0 +1,51 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/experimental/microfrontend/lib/noise_reduction.h" + +#include + +void NoiseReductionApply(struct NoiseReductionState* state, uint32_t* signal) { + int i; + for (i = 0; i < state->num_channels; ++i) { + const uint32_t smoothing = + ((i & 1) == 0) ? state->even_smoothing : state->odd_smoothing; + const uint32_t one_minus_smoothing = (1 << kNoiseReductionBits) - smoothing; + + // Update the estimate of the noise. + const uint32_t signal_scaled_up = signal[i] << state->smoothing_bits; + uint32_t estimate = + (((uint64_t)signal_scaled_up * smoothing) + + ((uint64_t)state->estimate[i] * one_minus_smoothing)) >> + kNoiseReductionBits; + state->estimate[i] = estimate; + + // Make sure that we can't get a negative value for the signal - estimate. + if (estimate > signal_scaled_up) { + estimate = signal_scaled_up; + } + + const uint32_t floor = + ((uint64_t)signal[i] * state->min_signal_remaining) >> + kNoiseReductionBits; + const uint32_t subtracted = + (signal_scaled_up - estimate) >> state->smoothing_bits; + const uint32_t output = subtracted > floor ? subtracted : floor; + signal[i] = output; + } +} + +void NoiseReductionReset(struct NoiseReductionState* state) { + memset(state->estimate, 0, sizeof(*state->estimate) * state->num_channels); +} diff --git a/tensorflow/lite/experimental/microfrontend/lib/noise_reduction.h b/tensorflow/lite/experimental/microfrontend/lib/noise_reduction.h new file mode 100644 index 0000000..46d3f52 --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/noise_reduction.h @@ -0,0 +1,46 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_NOISE_REDUCTION_H_ +#define TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_NOISE_REDUCTION_H_ + +#define kNoiseReductionBits 14 + +#include +#include + +#ifdef __cplusplus +extern "C" { +#endif + +struct NoiseReductionState { + int smoothing_bits; + uint16_t even_smoothing; + uint16_t odd_smoothing; + uint16_t min_signal_remaining; + int num_channels; + uint32_t* estimate; +}; + +// Removes stationary noise from each channel of the signal using a low pass +// filter. +void NoiseReductionApply(struct NoiseReductionState* state, uint32_t* signal); + +void NoiseReductionReset(struct NoiseReductionState* state); + +#ifdef __cplusplus +} // extern "C" +#endif + +#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_NOISE_REDUCTION_H_ diff --git a/tensorflow/lite/experimental/microfrontend/lib/noise_reduction_io.c b/tensorflow/lite/experimental/microfrontend/lib/noise_reduction_io.c new file mode 100644 index 0000000..19c32b3 --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/noise_reduction_io.c @@ -0,0 +1,34 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/experimental/microfrontend/lib/noise_reduction_io.h" + +void NoiseReductionWriteMemmapPreamble( + FILE* fp, const struct NoiseReductionState* state) { + fprintf(fp, "static uint32_t noise_reduction_estimate[%zu];\n", + state->num_channels); + fprintf(fp, "\n"); +} + +void NoiseReductionWriteMemmap(FILE* fp, + const struct NoiseReductionState* state, + const char* variable) { + fprintf(fp, "%s->even_smoothing = %d;\n", variable, state->even_smoothing); + fprintf(fp, "%s->odd_smoothing = %d;\n", variable, state->odd_smoothing); + fprintf(fp, "%s->min_signal_remaining = %d;\n", variable, + state->min_signal_remaining); + fprintf(fp, "%s->num_channels = %d;\n", variable, state->num_channels); + + fprintf(fp, "%s->estimate = noise_reduction_estimate;\n", variable); +} diff --git a/tensorflow/lite/experimental/microfrontend/lib/noise_reduction_io.h b/tensorflow/lite/experimental/microfrontend/lib/noise_reduction_io.h new file mode 100644 index 0000000..ded5211 --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/noise_reduction_io.h @@ -0,0 +1,36 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_NOISE_REDUCTION_IO_H_ +#define TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_NOISE_REDUCTION_IO_H_ + +#include + +#include "tensorflow/lite/experimental/microfrontend/lib/noise_reduction.h" + +#ifdef __cplusplus +extern "C" { +#endif + +void NoiseReductionWriteMemmapPreamble(FILE* fp, + const struct NoiseReductionState* state); +void NoiseReductionWriteMemmap(FILE* fp, + const struct NoiseReductionState* state, + const char* variable); + +#ifdef __cplusplus +} // extern "C" +#endif + +#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_NOISE_REDUCTION_IO_H_ diff --git a/tensorflow/lite/experimental/microfrontend/lib/noise_reduction_test.cc b/tensorflow/lite/experimental/microfrontend/lib/noise_reduction_test.cc new file mode 100644 index 0000000..027f688 --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/noise_reduction_test.cc @@ -0,0 +1,81 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/experimental/microfrontend/lib/noise_reduction.h" + +#include "tensorflow/lite/experimental/microfrontend/lib/noise_reduction_util.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace { + +const int kNumChannels = 2; + +// Test noise reduction using default config values. +class NoiseReductionTestConfig { + public: + NoiseReductionTestConfig() { + config_.smoothing_bits = 10; + config_.even_smoothing = 0.025; + config_.odd_smoothing = 0.06; + config_.min_signal_remaining = 0.05; + } + + struct NoiseReductionConfig config_; +}; + +} // namespace + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(NoiseReductionTest_TestNoiseReductionEstimate) { + NoiseReductionTestConfig config; + struct NoiseReductionState state; + TF_LITE_MICRO_EXPECT( + NoiseReductionPopulateState(&config.config_, &state, kNumChannels)); + + uint32_t signal[] = {247311, 508620}; + NoiseReductionApply(&state, signal); + + const uint32_t expected[] = {6321887, 31248341}; + TF_LITE_MICRO_EXPECT_EQ(state.num_channels, + sizeof(expected) / sizeof(expected[0])); + int i; + for (i = 0; i < state.num_channels; ++i) { + TF_LITE_MICRO_EXPECT_EQ(state.estimate[i], expected[i]); + } + + NoiseReductionFreeStateContents(&state); +} + +TF_LITE_MICRO_TEST(NoiseReductionTest_TestNoiseReduction) { + NoiseReductionTestConfig config; + struct NoiseReductionState state; + TF_LITE_MICRO_EXPECT( + NoiseReductionPopulateState(&config.config_, &state, kNumChannels)); + + uint32_t signal[] = {247311, 508620}; + NoiseReductionApply(&state, signal); + + const uint32_t expected[] = {241137, 478104}; + TF_LITE_MICRO_EXPECT_EQ(state.num_channels, + sizeof(expected) / sizeof(expected[0])); + int i; + for (i = 0; i < state.num_channels; ++i) { + TF_LITE_MICRO_EXPECT_EQ(signal[i], expected[i]); + } + + NoiseReductionFreeStateContents(&state); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/experimental/microfrontend/lib/noise_reduction_util.c b/tensorflow/lite/experimental/microfrontend/lib/noise_reduction_util.c new file mode 100644 index 0000000..a6c9234 --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/noise_reduction_util.c @@ -0,0 +1,45 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/experimental/microfrontend/lib/noise_reduction_util.h" + +#include + +void NoiseReductionFillConfigWithDefaults(struct NoiseReductionConfig* config) { + config->smoothing_bits = 10; + config->even_smoothing = 0.025; + config->odd_smoothing = 0.06; + config->min_signal_remaining = 0.05; +} + +int NoiseReductionPopulateState(const struct NoiseReductionConfig* config, + struct NoiseReductionState* state, + int num_channels) { + state->smoothing_bits = config->smoothing_bits; + state->odd_smoothing = config->odd_smoothing * (1 << kNoiseReductionBits); + state->even_smoothing = config->even_smoothing * (1 << kNoiseReductionBits); + state->min_signal_remaining = + config->min_signal_remaining * (1 << kNoiseReductionBits); + state->num_channels = num_channels; + state->estimate = calloc(state->num_channels, sizeof(*state->estimate)); + if (state->estimate == NULL) { + fprintf(stderr, "Failed to alloc estimate buffer\n"); + return 0; + } + return 1; +} + +void NoiseReductionFreeStateContents(struct NoiseReductionState* state) { + free(state->estimate); +} diff --git a/tensorflow/lite/experimental/microfrontend/lib/noise_reduction_util.h b/tensorflow/lite/experimental/microfrontend/lib/noise_reduction_util.h new file mode 100644 index 0000000..fa55539 --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/noise_reduction_util.h @@ -0,0 +1,50 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_NOISE_REDUCTION_UTIL_H_ +#define TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_NOISE_REDUCTION_UTIL_H_ + +#include "tensorflow/lite/experimental/microfrontend/lib/noise_reduction.h" + +#ifdef __cplusplus +extern "C" { +#endif + +struct NoiseReductionConfig { + // scale the signal up by 2^(smoothing_bits) before reduction + int smoothing_bits; + // smoothing coefficient for even-numbered channels + float even_smoothing; + // smoothing coefficient for odd-numbered channels + float odd_smoothing; + // fraction of signal to preserve (1.0 disables this module) + float min_signal_remaining; +}; + +// Populates the NoiseReductionConfig with "sane" default values. +void NoiseReductionFillConfigWithDefaults(struct NoiseReductionConfig* config); + +// Allocates any buffers. +int NoiseReductionPopulateState(const struct NoiseReductionConfig* config, + struct NoiseReductionState* state, + int num_channels); + +// Frees any allocated buffers. +void NoiseReductionFreeStateContents(struct NoiseReductionState* state); + +#ifdef __cplusplus +} // extern "C" +#endif + +#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_NOISE_REDUCTION_UTIL_H_ diff --git a/tensorflow/lite/experimental/microfrontend/lib/pcan_gain_control.c b/tensorflow/lite/experimental/microfrontend/lib/pcan_gain_control.c new file mode 100644 index 0000000..22d5876 --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/pcan_gain_control.c @@ -0,0 +1,56 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/experimental/microfrontend/lib/pcan_gain_control.h" + +#include "tensorflow/lite/experimental/microfrontend/lib/bits.h" + +int16_t WideDynamicFunction(const uint32_t x, const int16_t* lut) { + if (x <= 2) { + return lut[x]; + } + + const int16_t interval = MostSignificantBit32(x); + lut += 4 * interval - 6; + + const int16_t frac = + ((interval < 11) ? (x << (11 - interval)) : (x >> (interval - 11))) & + 0x3FF; + + int32_t result = ((int32_t)lut[2] * frac) >> 5; + result += (int32_t)((uint32_t)lut[1] << 5); + result *= frac; + result = (result + (1 << 14)) >> 15; + result += lut[0]; + return (int16_t)result; +} + +uint32_t PcanShrink(const uint32_t x) { + if (x < (2 << kPcanSnrBits)) { + return (x * x) >> (2 + 2 * kPcanSnrBits - kPcanOutputBits); + } else { + return (x >> (kPcanSnrBits - kPcanOutputBits)) - (1 << kPcanOutputBits); + } +} + +void PcanGainControlApply(struct PcanGainControlState* state, + uint32_t* signal) { + int i; + for (i = 0; i < state->num_channels; ++i) { + const uint32_t gain = + WideDynamicFunction(state->noise_estimate[i], state->gain_lut); + const uint32_t snr = ((uint64_t)signal[i] * gain) >> state->snr_shift; + signal[i] = PcanShrink(snr); + } +} diff --git a/tensorflow/lite/experimental/microfrontend/lib/pcan_gain_control.h b/tensorflow/lite/experimental/microfrontend/lib/pcan_gain_control.h new file mode 100644 index 0000000..3f6222b --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/pcan_gain_control.h @@ -0,0 +1,47 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_PCAN_GAIN_CONTROL_H_ +#define TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_PCAN_GAIN_CONTROL_H_ + +#include +#include + +#define kPcanSnrBits 12 +#define kPcanOutputBits 6 + +#ifdef __cplusplus +extern "C" { +#endif + +// Details at https://research.google/pubs/pub45911.pdf +struct PcanGainControlState { + int enable_pcan; + uint32_t* noise_estimate; + int num_channels; + int16_t* gain_lut; + int32_t snr_shift; +}; + +int16_t WideDynamicFunction(const uint32_t x, const int16_t* lut); + +uint32_t PcanShrink(const uint32_t x); + +void PcanGainControlApply(struct PcanGainControlState* state, uint32_t* signal); + +#ifdef __cplusplus +} // extern "C" +#endif + +#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_PCAN_GAIN_CONTROL_H_ diff --git a/tensorflow/lite/experimental/microfrontend/lib/pcan_gain_control_test.cc b/tensorflow/lite/experimental/microfrontend/lib/pcan_gain_control_test.cc new file mode 100644 index 0000000..f6ecd71 --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/pcan_gain_control_test.cc @@ -0,0 +1,65 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/experimental/microfrontend/lib/pcan_gain_control.h" + +#include "tensorflow/lite/experimental/microfrontend/lib/pcan_gain_control_util.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace { + +const int kNumChannels = 2; +const int kSmoothingBits = 10; +const int kCorrectionBits = -1; + +// Test pcan auto gain control using default config values. +class PcanGainControlTestConfig { + public: + PcanGainControlTestConfig() { + config_.enable_pcan = 1; + config_.strength = 0.95; + config_.offset = 80.0; + config_.gain_bits = 21; + } + + struct PcanGainControlConfig config_; +}; + +} // namespace + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(PcanGainControlTest_TestPcanGainControl) { + uint32_t estimate[] = {6321887, 31248341}; + PcanGainControlTestConfig config; + struct PcanGainControlState state; + TF_LITE_MICRO_EXPECT(PcanGainControlPopulateState( + &config.config_, &state, estimate, kNumChannels, kSmoothingBits, + kCorrectionBits)); + + uint32_t signal[] = {241137, 478104}; + PcanGainControlApply(&state, signal); + + const uint32_t expected[] = {3578, 1533}; + TF_LITE_MICRO_EXPECT_EQ(state.num_channels, + sizeof(expected) / sizeof(expected[0])); + int i; + for (i = 0; i < state.num_channels; ++i) { + TF_LITE_MICRO_EXPECT_EQ(signal[i], expected[i]); + } + + PcanGainControlFreeStateContents(&state); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/experimental/microfrontend/lib/pcan_gain_control_util.c b/tensorflow/lite/experimental/microfrontend/lib/pcan_gain_control_util.c new file mode 100644 index 0000000..e850d43 --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/pcan_gain_control_util.c @@ -0,0 +1,92 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/experimental/microfrontend/lib/pcan_gain_control_util.h" + +#include +#include + +#define kint16max 0x00007FFF + +void PcanGainControlFillConfigWithDefaults( + struct PcanGainControlConfig* config) { + config->enable_pcan = 0; + config->strength = 0.95; + config->offset = 80.0; + config->gain_bits = 21; +} + +int16_t PcanGainLookupFunction(const struct PcanGainControlConfig* config, + int32_t input_bits, uint32_t x) { + const float x_as_float = ((float)x) / ((uint32_t)1 << input_bits); + const float gain_as_float = + ((uint32_t)1 << config->gain_bits) * + powf(x_as_float + config->offset, -config->strength); + + if (gain_as_float > kint16max) { + return kint16max; + } + return (int16_t)(gain_as_float + 0.5f); +} + +int PcanGainControlPopulateState(const struct PcanGainControlConfig* config, + struct PcanGainControlState* state, + uint32_t* noise_estimate, + const int num_channels, + const uint16_t smoothing_bits, + const int32_t input_correction_bits) { + state->enable_pcan = config->enable_pcan; + if (!state->enable_pcan) { + return 1; + } + state->noise_estimate = noise_estimate; + state->num_channels = num_channels; + state->gain_lut = malloc(kWideDynamicFunctionLUTSize * sizeof(int16_t)); + if (state->gain_lut == NULL) { + fprintf(stderr, "Failed to allocate gain LUT\n"); + return 0; + } + state->snr_shift = config->gain_bits - input_correction_bits - kPcanSnrBits; + + const int32_t input_bits = smoothing_bits - input_correction_bits; + state->gain_lut[0] = PcanGainLookupFunction(config, input_bits, 0); + state->gain_lut[1] = PcanGainLookupFunction(config, input_bits, 1); + state->gain_lut -= 6; + int interval; + for (interval = 2; interval <= kWideDynamicFunctionBits; ++interval) { + const uint32_t x0 = (uint32_t)1 << (interval - 1); + const uint32_t x1 = x0 + (x0 >> 1); + const uint32_t x2 = + (interval == kWideDynamicFunctionBits) ? x0 + (x0 - 1) : 2 * x0; + + const int16_t y0 = PcanGainLookupFunction(config, input_bits, x0); + const int16_t y1 = PcanGainLookupFunction(config, input_bits, x1); + const int16_t y2 = PcanGainLookupFunction(config, input_bits, x2); + + const int32_t diff1 = (int32_t)y1 - y0; + const int32_t diff2 = (int32_t)y2 - y0; + const int32_t a1 = 4 * diff1 - diff2; + const int32_t a2 = diff2 - a1; + + state->gain_lut[4 * interval] = y0; + state->gain_lut[4 * interval + 1] = (int16_t)a1; + state->gain_lut[4 * interval + 2] = (int16_t)a2; + } + state->gain_lut += 6; + return 1; +} + +void PcanGainControlFreeStateContents(struct PcanGainControlState* state) { + free(state->gain_lut); +} diff --git a/tensorflow/lite/experimental/microfrontend/lib/pcan_gain_control_util.h b/tensorflow/lite/experimental/microfrontend/lib/pcan_gain_control_util.h new file mode 100644 index 0000000..d4bfaa2 --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/pcan_gain_control_util.h @@ -0,0 +1,57 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_PCAN_GAIN_CONTROL_UTIL_H_ +#define TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_PCAN_GAIN_CONTROL_UTIL_H_ + +#include "tensorflow/lite/experimental/microfrontend/lib/pcan_gain_control.h" + +#define kWideDynamicFunctionBits 32 +#define kWideDynamicFunctionLUTSize (4 * kWideDynamicFunctionBits - 3) + +#ifdef __cplusplus +extern "C" { +#endif + +struct PcanGainControlConfig { + // set to false (0) to disable this module + int enable_pcan; + // gain normalization exponent (0.0 disables, 1.0 full strength) + float strength; + // positive value added in the normalization denominator + float offset; + // number of fractional bits in the gain + int gain_bits; +}; + +void PcanGainControlFillConfigWithDefaults( + struct PcanGainControlConfig* config); + +int16_t PcanGainLookupFunction(const struct PcanGainControlConfig* config, + int32_t input_bits, uint32_t x); + +int PcanGainControlPopulateState(const struct PcanGainControlConfig* config, + struct PcanGainControlState* state, + uint32_t* noise_estimate, + const int num_channels, + const uint16_t smoothing_bits, + const int32_t input_correction_bits); + +void PcanGainControlFreeStateContents(struct PcanGainControlState* state); + +#ifdef __cplusplus +} // extern "C" +#endif + +#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_PCAN_GAIN_CONTROL_UTIL_H_ diff --git a/tensorflow/lite/experimental/microfrontend/lib/window.c b/tensorflow/lite/experimental/microfrontend/lib/window.c new file mode 100644 index 0000000..10da676 --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/window.c @@ -0,0 +1,70 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/experimental/microfrontend/lib/window.h" + +#include + +int WindowProcessSamples(struct WindowState* state, const int16_t* samples, + size_t num_samples, size_t* num_samples_read) { + const int size = state->size; + + // Copy samples from the samples buffer over to our local input. + size_t max_samples_to_copy = state->size - state->input_used; + if (max_samples_to_copy > num_samples) { + max_samples_to_copy = num_samples; + } + memcpy(state->input + state->input_used, samples, + max_samples_to_copy * sizeof(*samples)); + *num_samples_read = max_samples_to_copy; + state->input_used += max_samples_to_copy; + + if (state->input_used < state->size) { + // We don't have enough samples to compute a window. + return 0; + } + + // Apply the window to the input. + const int16_t* coefficients = state->coefficients; + const int16_t* input = state->input; + int16_t* output = state->output; + int i; + int16_t max_abs_output_value = 0; + for (i = 0; i < size; ++i) { + int16_t new_value = + (((int32_t)*input++) * *coefficients++) >> kFrontendWindowBits; + *output++ = new_value; + if (new_value < 0) { + new_value = -new_value; + } + if (new_value > max_abs_output_value) { + max_abs_output_value = new_value; + } + } + // Shuffle the input down by the step size, and update how much we have used. + memmove(state->input, state->input + state->step, + sizeof(*state->input) * (state->size - state->step)); + state->input_used -= state->step; + state->max_abs_output_value = max_abs_output_value; + + // Indicate that the output buffer is valid for the next stage. + return 1; +} + +void WindowReset(struct WindowState* state) { + memset(state->input, 0, state->size * sizeof(*state->input)); + memset(state->output, 0, state->size * sizeof(*state->output)); + state->input_used = 0; + state->max_abs_output_value = 0; +} diff --git a/tensorflow/lite/experimental/microfrontend/lib/window.h b/tensorflow/lite/experimental/microfrontend/lib/window.h new file mode 100644 index 0000000..bad8151 --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/window.h @@ -0,0 +1,49 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_WINDOW_H_ +#define TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_WINDOW_H_ + +#include +#include + +#define kFrontendWindowBits 12 + +#ifdef __cplusplus +extern "C" { +#endif + +struct WindowState { + size_t size; + int16_t* coefficients; + size_t step; + + int16_t* input; + size_t input_used; + int16_t* output; + int16_t max_abs_output_value; +}; + +// Applies a window to the samples coming in, stepping forward at the given +// rate. +int WindowProcessSamples(struct WindowState* state, const int16_t* samples, + size_t num_samples, size_t* num_samples_read); + +void WindowReset(struct WindowState* state); + +#ifdef __cplusplus +} // extern "C" +#endif + +#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_WINDOW_H_ diff --git a/tensorflow/lite/experimental/microfrontend/lib/window_io.c b/tensorflow/lite/experimental/microfrontend/lib/window_io.c new file mode 100644 index 0000000..d12cac2 --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/window_io.c @@ -0,0 +1,43 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/experimental/microfrontend/lib/window_io.h" + +void WindowWriteMemmapPreamble(FILE* fp, const struct WindowState* state) { + fprintf(fp, "static int16_t window_coefficients[] = {\n"); + int i; + for (i = 0; i < state->size; ++i) { + fprintf(fp, "%d", state->coefficients[i]); + if (i < state->size - 1) { + fprintf(fp, ", "); + } + } + fprintf(fp, "};\n"); + fprintf(fp, "static int16_t window_input[%zu];\n", state->size); + fprintf(fp, "static int16_t window_output[%zu];\n", state->size); + fprintf(fp, "\n"); +} + +void WindowWriteMemmap(FILE* fp, const struct WindowState* state, + const char* variable) { + fprintf(fp, "%s->size = %zu;\n", variable, state->size); + fprintf(fp, "%s->coefficients = window_coefficients;\n", variable); + fprintf(fp, "%s->step = %zu;\n", variable, state->step); + + fprintf(fp, "%s->input = window_input;\n", variable); + fprintf(fp, "%s->input_used = %zu;\n", variable, state->input_used); + fprintf(fp, "%s->output = window_output;\n", variable); + fprintf(fp, "%s->max_abs_output_value = %d;\n", variable, + state->max_abs_output_value); +} diff --git a/tensorflow/lite/experimental/microfrontend/lib/window_io.h b/tensorflow/lite/experimental/microfrontend/lib/window_io.h new file mode 100644 index 0000000..a76b2dc --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/window_io.h @@ -0,0 +1,34 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_WINDOW_IO_H_ +#define TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_WINDOW_IO_H_ + +#include + +#include "tensorflow/lite/experimental/microfrontend/lib/window.h" + +#ifdef __cplusplus +extern "C" { +#endif + +void WindowWriteMemmapPreamble(FILE* fp, const struct WindowState* state); +void WindowWriteMemmap(FILE* fp, const struct WindowState* state, + const char* variable); + +#ifdef __cplusplus +} // extern "C" +#endif + +#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_WINDOW_IO_H_ diff --git a/tensorflow/lite/experimental/microfrontend/lib/window_test.cc b/tensorflow/lite/experimental/microfrontend/lib/window_test.cc new file mode 100644 index 0000000..8ed7694 --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/window_test.cc @@ -0,0 +1,177 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/experimental/microfrontend/lib/window.h" + +#include "tensorflow/lite/experimental/microfrontend/lib/window_util.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace { + +const int kSampleRate = 1000; +const int kWindowSamples = 25; +const int kStepSamples = 10; +const int16_t kFakeAudioData[] = { + 0, 32767, 0, -32768, 0, 32767, 0, -32768, 0, 32767, 0, -32768, + 0, 32767, 0, -32768, 0, 32767, 0, -32768, 0, 32767, 0, -32768, + 0, 32767, 0, -32768, 0, 32767, 0, -32768, 0, 32767, 0, -32768}; + +// Test window function behaviors using default config values. +class WindowTestConfig { + public: + WindowTestConfig() { + config_.size_ms = 25; + config_.step_size_ms = 10; + } + + struct WindowConfig config_; +}; + +} // namespace + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(WindowState_CheckCoefficients) { + WindowTestConfig config; + struct WindowState state; + TF_LITE_MICRO_EXPECT( + WindowPopulateState(&config.config_, &state, kSampleRate)); + + const int16_t expected[] = {16, 144, 391, 743, 1176, 1664, 2177, + 2681, 3145, 3541, 3843, 4032, 4096, 4032, + 3843, 3541, 3145, 2681, 2177, 1664, 1176, + 743, 391, 144, 16}; + TF_LITE_MICRO_EXPECT_EQ(state.size, sizeof(expected) / sizeof(expected[0])); + int i; + for (i = 0; i < state.size; ++i) { + TF_LITE_MICRO_EXPECT_EQ(state.coefficients[i], expected[i]); + } + + WindowFreeStateContents(&state); +} + +TF_LITE_MICRO_TEST(WindowState_CheckResidualInput) { + WindowTestConfig config; + struct WindowState state; + TF_LITE_MICRO_EXPECT( + WindowPopulateState(&config.config_, &state, kSampleRate)); + size_t num_samples_read; + + TF_LITE_MICRO_EXPECT(WindowProcessSamples( + &state, kFakeAudioData, + sizeof(kFakeAudioData) / sizeof(kFakeAudioData[0]), &num_samples_read)); + + int i; + for (i = kStepSamples; i < kWindowSamples; ++i) { + TF_LITE_MICRO_EXPECT_EQ(state.input[i - kStepSamples], kFakeAudioData[i]); + } + + WindowFreeStateContents(&state); +} + +TF_LITE_MICRO_TEST(WindowState_CheckOutputValues) { + WindowTestConfig config; + struct WindowState state; + TF_LITE_MICRO_EXPECT( + WindowPopulateState(&config.config_, &state, kSampleRate)); + size_t num_samples_read; + + TF_LITE_MICRO_EXPECT(WindowProcessSamples( + &state, kFakeAudioData, + sizeof(kFakeAudioData) / sizeof(kFakeAudioData[0]), &num_samples_read)); + + const int16_t expected[] = { + 0, 1151, 0, -5944, 0, 13311, 0, -21448, 0, 28327, 0, -32256, 0, 32255, + 0, -28328, 0, 21447, 0, -13312, 0, 5943, 0, -1152, 0}; + TF_LITE_MICRO_EXPECT_EQ(state.size, sizeof(expected) / sizeof(expected[0])); + int i; + for (i = 0; i < state.size; ++i) { + TF_LITE_MICRO_EXPECT_EQ(state.output[i], expected[i]); + } + + WindowFreeStateContents(&state); +} + +TF_LITE_MICRO_TEST(WindowState_CheckMaxAbsValue) { + WindowTestConfig config; + struct WindowState state; + TF_LITE_MICRO_EXPECT( + WindowPopulateState(&config.config_, &state, kSampleRate)); + size_t num_samples_read; + + TF_LITE_MICRO_EXPECT(WindowProcessSamples( + &state, kFakeAudioData, + sizeof(kFakeAudioData) / sizeof(kFakeAudioData[0]), &num_samples_read)); + + TF_LITE_MICRO_EXPECT_EQ(state.max_abs_output_value, 32256); + + WindowFreeStateContents(&state); +} + +TF_LITE_MICRO_TEST(WindowState_CheckConsecutiveWindow) { + WindowTestConfig config; + struct WindowState state; + TF_LITE_MICRO_EXPECT( + WindowPopulateState(&config.config_, &state, kSampleRate)); + size_t num_samples_read; + + TF_LITE_MICRO_EXPECT(WindowProcessSamples( + &state, kFakeAudioData, + sizeof(kFakeAudioData) / sizeof(kFakeAudioData[0]), &num_samples_read)); + TF_LITE_MICRO_EXPECT(WindowProcessSamples( + &state, kFakeAudioData + kWindowSamples, + sizeof(kFakeAudioData) / sizeof(kFakeAudioData[0]) - kWindowSamples, + &num_samples_read)); + + const int16_t expected[] = { + 0, -1152, 0, 5943, 0, -13312, 0, 21447, 0, -28328, 0, 32255, 0, -32256, + 0, 28327, 0, -21448, 0, 13311, 0, -5944, 0, 1151, 0}; + TF_LITE_MICRO_EXPECT_EQ(state.size, sizeof(expected) / sizeof(expected[0])); + int i; + for (i = 0; i < state.size; ++i) { + TF_LITE_MICRO_EXPECT_EQ(state.output[i], expected[i]); + } + + WindowFreeStateContents(&state); +} + +TF_LITE_MICRO_TEST(WindowState_CheckNotEnoughSamples) { + WindowTestConfig config; + struct WindowState state; + TF_LITE_MICRO_EXPECT( + WindowPopulateState(&config.config_, &state, kSampleRate)); + size_t num_samples_read; + + TF_LITE_MICRO_EXPECT(WindowProcessSamples( + &state, kFakeAudioData, + sizeof(kFakeAudioData) / sizeof(kFakeAudioData[0]), &num_samples_read)); + TF_LITE_MICRO_EXPECT(WindowProcessSamples( + &state, kFakeAudioData + kWindowSamples, + sizeof(kFakeAudioData) / sizeof(kFakeAudioData[0]) - kWindowSamples, + &num_samples_read)); + TF_LITE_MICRO_EXPECT_EQ( + false, WindowProcessSamples( + &state, kFakeAudioData + kWindowSamples + kStepSamples, + sizeof(kFakeAudioData) / sizeof(kFakeAudioData[0]) - + kWindowSamples - kStepSamples, + &num_samples_read)); + + TF_LITE_MICRO_EXPECT_EQ( + state.input_used, + sizeof(kFakeAudioData) / sizeof(kFakeAudioData[0]) - 2 * kStepSamples); + + WindowFreeStateContents(&state); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/experimental/microfrontend/lib/window_util.c b/tensorflow/lite/experimental/microfrontend/lib/window_util.c new file mode 100644 index 0000000..eee6e7b --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/window_util.c @@ -0,0 +1,73 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/experimental/microfrontend/lib/window_util.h" + +#include +#include +#include +#include + +// Some platforms don't have M_PI +#ifndef M_PI +#define M_PI 3.14159265358979323846 +#endif + +void WindowFillConfigWithDefaults(struct WindowConfig* config) { + config->size_ms = 25; + config->step_size_ms = 10; +} + +int WindowPopulateState(const struct WindowConfig* config, + struct WindowState* state, int sample_rate) { + state->size = config->size_ms * sample_rate / 1000; + state->step = config->step_size_ms * sample_rate / 1000; + + state->coefficients = malloc(state->size * sizeof(*state->coefficients)); + if (state->coefficients == NULL) { + fprintf(stderr, "Failed to allocate window coefficients\n"); + return 0; + } + + // Populate the window values. + const float arg = M_PI * 2.0 / ((float)state->size); + int i; + for (i = 0; i < state->size; ++i) { + float float_value = 0.5 - (0.5 * cos(arg * (i + 0.5))); + // Scale it to fixed point and round it. + state->coefficients[i] = + floor(float_value * (1 << kFrontendWindowBits) + 0.5); + } + + state->input_used = 0; + state->input = malloc(state->size * sizeof(*state->input)); + if (state->input == NULL) { + fprintf(stderr, "Failed to allocate window input\n"); + return 0; + } + + state->output = malloc(state->size * sizeof(*state->output)); + if (state->output == NULL) { + fprintf(stderr, "Failed to allocate window output\n"); + return 0; + } + + return 1; +} + +void WindowFreeStateContents(struct WindowState* state) { + free(state->coefficients); + free(state->input); + free(state->output); +} diff --git a/tensorflow/lite/experimental/microfrontend/lib/window_util.h b/tensorflow/lite/experimental/microfrontend/lib/window_util.h new file mode 100644 index 0000000..68e4de9 --- /dev/null +++ b/tensorflow/lite/experimental/microfrontend/lib/window_util.h @@ -0,0 +1,45 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_WINDOW_UTIL_H_ +#define TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_WINDOW_UTIL_H_ + +#include "tensorflow/lite/experimental/microfrontend/lib/window.h" + +#ifdef __cplusplus +extern "C" { +#endif + +struct WindowConfig { + // length of window frame in milliseconds + size_t size_ms; + // length of step for next frame in milliseconds + size_t step_size_ms; +}; + +// Populates the WindowConfig with "sane" default values. +void WindowFillConfigWithDefaults(struct WindowConfig* config); + +// Allocates any buffers. +int WindowPopulateState(const struct WindowConfig* config, + struct WindowState* state, int sample_rate); + +// Frees any allocated buffers. +void WindowFreeStateContents(struct WindowState* state); + +#ifdef __cplusplus +} // extern "C" +#endif + +#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICROFRONTEND_LIB_WINDOW_UTIL_H_ diff --git a/tensorflow/lite/kernels/BUILD b/tensorflow/lite/kernels/BUILD new file mode 100644 index 0000000..6e1efce --- /dev/null +++ b/tensorflow/lite/kernels/BUILD @@ -0,0 +1,47 @@ +load("//tensorflow/lite:build_def.bzl", "tflite_copts") +load("//tensorflow/lite/micro:build_def.bzl", "micro_copts") + +package( + default_visibility = [ + "//visibility:public", + ], + licenses = ["notice"], +) + +cc_library( + name = "op_macros", + hdrs = [ + "op_macros.h", + ], + copts = tflite_copts(), + deps = ["//tensorflow/lite/micro:debug_log"], +) + +cc_library( + name = "kernel_util", + srcs = [ + "kernel_util.cc", + ], + hdrs = [ + "kernel_util.h", + ], + copts = tflite_copts() + micro_copts(), + deps = [ + "//tensorflow/lite:array", + "//tensorflow/lite:kernel_api", + "//tensorflow/lite/c:common", + "//tensorflow/lite/kernels/internal:cppmath", + "//tensorflow/lite/kernels/internal:quantization_util", + ], +) + +cc_library( + name = "padding", + srcs = [], + hdrs = ["padding.h"], + copts = tflite_copts(), + deps = [ + "//tensorflow/lite/c:common", + "//tensorflow/lite/kernels/internal:types", + ], +) diff --git a/tensorflow/lite/kernels/internal/BUILD b/tensorflow/lite/kernels/internal/BUILD new file mode 100644 index 0000000..17b868a --- /dev/null +++ b/tensorflow/lite/kernels/internal/BUILD @@ -0,0 +1,169 @@ +load("//tensorflow/lite:build_def.bzl", "tflite_copts") +load("//tensorflow/lite/micro:build_def.bzl", "micro_copts") + +package( + default_visibility = [ + "//visibility:public", + ], + licenses = ["notice"], +) + +cc_library( + name = "common", + srcs = ["common.cc"], + hdrs = [ + "common.h", + "optimized/neon_check.h", + ], + copts = tflite_copts(), + deps = [ + ":cppmath", + ":types", + "//tensorflow/lite/core:macros", + "@gemmlowp//:fixedpoint", + ], +) + +cc_library( + name = "compatibility", + hdrs = ["compatibility.h"], + copts = tflite_copts(), + deps = [ + "//tensorflow/lite/kernels:op_macros", + ], +) + +cc_library( + name = "cppmath", + srcs = [], + hdrs = [ + "cppmath.h", + "max.h", + "min.h", + ], + copts = tflite_copts(), +) + +cc_library( + name = "quantization_util", + srcs = ["quantization_util.cc"], + hdrs = ["quantization_util.h"], + copts = tflite_copts() + micro_copts(), + deps = [ + ":compatibility", + ":cppmath", + ":types", + ], +) + +cc_library( + name = "reference", + srcs = ["tensor_ctypes.cc"], + hdrs = [ + "portable_tensor.h", + "tensor_ctypes.h", + ], + copts = tflite_copts(), + deps = [ + ":types", + "//tensorflow/lite/c:common", + "//tensorflow/lite/core:macros", + ], +) + +cc_library( + name = "reference_base", + srcs = glob([ + "reference/*.cc", + ]), + hdrs = glob([ + "reference/*.h", + "reference/integer_ops/*.h", + ]), + copts = tflite_copts(), + # We are disabling parse_headers for this header-only target so that the + # external and internal builds are consistent. The primary issue here is + # that parse_headers is not supported with bazel and the TFLM team would + # really like to have all build errors be reproducible from the OSS build as + # well. + # + # See b/175817116 for more details. + features = ["-parse_headers"], + deps = [ + ":common", + ":compatibility", + ":cppmath", + ":quantization_util", + ":strided_slice_logic", + ":tensor", + ":types", + "//tensorflow/lite/c:common", + "//tensorflow/lite/core:macros", + "//tensorflow/lite/kernels:kernel_util", + "//tensorflow/lite/kernels:op_macros", + "@gemmlowp//:fixedpoint", + "@ruy//ruy/profiler:instrumentation", + ], +) + +cc_library( + name = "strided_slice_logic", + srcs = [], + hdrs = [ + "strided_slice_logic.h", + ], + copts = tflite_copts(), + deps = [ + ":compatibility", + ":types", + ], +) + +cc_library( + name = "tensor", + hdrs = [ + "portable_tensor.h", + "tensor_ctypes.h", + ], + copts = tflite_copts(), + deps = [ + ":types", + "//tensorflow/lite/c:common", + ], +) + +cc_library( + name = "types", + hdrs = [ + "runtime_shape.h", + "types.h", + ], + copts = tflite_copts(), + deps = [ + ":compatibility", + ], +) + +cc_library( + name = "tensor_utils_no_eigen", + srcs = [ + "portable_tensor_utils.cc", + "reference/portable_tensor_utils.cc", + "tensor_utils.cc", + ], + hdrs = [ + "portable_tensor_utils.h", + "reference/portable_tensor_utils.h", + "reference/portable_tensor_utils_impl.h", + ], + copts = tflite_copts(), + deps = [ + ":common", + ":compatibility", + ":cppmath", + "//tensorflow/lite/c:common", + "//tensorflow/lite/core:macros", + "//tensorflow/lite/kernels:op_macros", + "@gemmlowp", + ], +) diff --git a/tensorflow/lite/kernels/internal/common.cc b/tensorflow/lite/kernels/internal/common.cc new file mode 100644 index 0000000..1654ab8 --- /dev/null +++ b/tensorflow/lite/kernels/internal/common.cc @@ -0,0 +1,55 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/common.h" + +namespace tflite { + +int32_t MultiplyByQuantizedMultiplier(int32_t x, int32_t quantized_multiplier, + int shift) { + using gemmlowp::RoundingDivideByPOT; + using gemmlowp::SaturatingRoundingDoublingHighMul; + int left_shift = shift > 0 ? shift : 0; + int right_shift = shift > 0 ? 0 : -shift; + return RoundingDivideByPOT(SaturatingRoundingDoublingHighMul( + x * (1 << left_shift), quantized_multiplier), + right_shift); +} + +int32_t MultiplyByQuantizedMultiplier(int64_t x, int32_t quantized_multiplier, + int shift) { + // Inputs: + // - quantized_multiplier has fixed point at bit 31 + // - shift is -31 to +7 (negative for right shift) + // + // Assumptions: The following input ranges are assumed + // - quantize_scale>=0 (the usual range is (1<<30) to (1>>31)-1) + // - scaling is chosen so final scaled result fits in int32_t + // - input x is in the range -(1<<47) <= x < (1<<47) + assert(quantized_multiplier >= 0); + assert(shift >= -31 && shift < 8); + assert(x >= -(static_cast(1) << 47) && + x < (static_cast(1) << 47)); + + int32_t reduced_multiplier = (quantized_multiplier < 0x7FFF0000) + ? ((quantized_multiplier + (1 << 15)) >> 16) + : 0x7FFF; + int total_shift = 15 - shift; + x = (x * (int64_t)reduced_multiplier) + ((int64_t)1 << (total_shift - 1)); + int32_t result = x >> total_shift; + return result; +} + +} // namespace tflite diff --git a/tensorflow/lite/kernels/internal/common.h b/tensorflow/lite/kernels/internal/common.h new file mode 100644 index 0000000..05184df --- /dev/null +++ b/tensorflow/lite/kernels/internal/common.h @@ -0,0 +1,1243 @@ +/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_COMMON_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_COMMON_H_ + +#include +#ifndef ALLOW_SLOW_GENERIC_DEPTHWISECONV_FALLBACK +#ifdef GEMMLOWP_ALLOW_SLOW_SCALAR_FALLBACK +#define ALLOW_SLOW_GENERIC_DEPTHWISECONV_FALLBACK +#endif +#endif + +#include +#include + +#include "fixedpoint/fixedpoint.h" +#include "tensorflow/lite/core/macros.h" +#include "tensorflow/lite/kernels/internal/cppmath.h" +#include "tensorflow/lite/kernels/internal/optimized/neon_check.h" +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { + +constexpr int kReverseShift = -1; + +inline void GetActivationMinMax(FusedActivationFunctionType ac, + float* output_activation_min, + float* output_activation_max) { + switch (ac) { + case FusedActivationFunctionType::kNone: + *output_activation_min = std::numeric_limits::lowest(); + *output_activation_max = std::numeric_limits::max(); + break; + case FusedActivationFunctionType::kRelu: + *output_activation_min = 0.f; + *output_activation_max = std::numeric_limits::max(); + break; + case FusedActivationFunctionType::kRelu1: + *output_activation_min = -1.f; + *output_activation_max = 1.f; + break; + case FusedActivationFunctionType::kRelu6: + *output_activation_min = 0.f; + *output_activation_max = 6.f; + break; + } +} + +template +inline T ActivationFunctionWithMinMax(T x, T output_activation_min, + T output_activation_max) { + using std::max; + using std::min; + return min(max(x, output_activation_min), output_activation_max); +} + +// Legacy function, left for compatibility only. +template +float ActivationFunction(float x) { + float output_activation_min, output_activation_max; + GetActivationMinMax(Ac, &output_activation_min, &output_activation_max); + return ActivationFunctionWithMinMax(x, output_activation_min, + output_activation_max); +} + +inline void BiasAndClamp(float clamp_min, float clamp_max, int bias_size, + const float* bias_data, int array_size, + float* array_data) { + if (bias_size == 0) return; + // Note: see b/132215220: in May 2019 we thought it would be OK to replace + // this with the Eigen one-liner: + // return (array.colwise() + bias).cwiseMin(clamp_max).cwiseMin(clamp_max). + // This turned out to severely regress performance: +4ms (i.e. 8%) on + // MobileNet v2 / 1.0 / 224. So we keep custom NEON code for now. + TFLITE_DCHECK_EQ((array_size % bias_size), 0); +#ifdef USE_NEON + float* array_ptr = array_data; + float* array_end_ptr = array_ptr + array_size; + const auto clamp_min_vec = vdupq_n_f32(clamp_min); + const auto clamp_max_vec = vdupq_n_f32(clamp_max); + for (; array_ptr != array_end_ptr; array_ptr += bias_size) { + int i = 0; + for (; i <= bias_size - 16; i += 16) { + auto b0 = vld1q_f32(bias_data + i); + auto b1 = vld1q_f32(bias_data + i + 4); + auto b2 = vld1q_f32(bias_data + i + 8); + auto b3 = vld1q_f32(bias_data + i + 12); + auto a0 = vld1q_f32(array_ptr + i); + auto a1 = vld1q_f32(array_ptr + i + 4); + auto a2 = vld1q_f32(array_ptr + i + 8); + auto a3 = vld1q_f32(array_ptr + i + 12); + auto x0 = vaddq_f32(a0, b0); + auto x1 = vaddq_f32(a1, b1); + auto x2 = vaddq_f32(a2, b2); + auto x3 = vaddq_f32(a3, b3); + x0 = vmaxq_f32(clamp_min_vec, x0); + x1 = vmaxq_f32(clamp_min_vec, x1); + x2 = vmaxq_f32(clamp_min_vec, x2); + x3 = vmaxq_f32(clamp_min_vec, x3); + x0 = vminq_f32(clamp_max_vec, x0); + x1 = vminq_f32(clamp_max_vec, x1); + x2 = vminq_f32(clamp_max_vec, x2); + x3 = vminq_f32(clamp_max_vec, x3); + vst1q_f32(array_ptr + i, x0); + vst1q_f32(array_ptr + i + 4, x1); + vst1q_f32(array_ptr + i + 8, x2); + vst1q_f32(array_ptr + i + 12, x3); + } + for (; i <= bias_size - 4; i += 4) { + auto b = vld1q_f32(bias_data + i); + auto a = vld1q_f32(array_ptr + i); + auto x = vaddq_f32(a, b); + x = vmaxq_f32(clamp_min_vec, x); + x = vminq_f32(clamp_max_vec, x); + vst1q_f32(array_ptr + i, x); + } + for (; i < bias_size; i++) { + array_ptr[i] = ActivationFunctionWithMinMax(array_ptr[i] + bias_data[i], + clamp_min, clamp_max); + } + } +#else // not NEON + for (int array_offset = 0; array_offset < array_size; + array_offset += bias_size) { + for (int i = 0; i < bias_size; i++) { + array_data[array_offset + i] = ActivationFunctionWithMinMax( + array_data[array_offset + i] + bias_data[i], clamp_min, clamp_max); + } + } +#endif +} + +// Single-rounding MultiplyByQuantizedMultiplier +#if TFLITE_SINGLE_ROUNDING +inline int32_t MultiplyByQuantizedMultiplier(int32_t x, + int32_t quantized_multiplier, + int shift) { + TFLITE_DCHECK(quantized_multiplier >= 0); + TFLITE_DCHECK(shift >= -31 && shift <= 30); + + const int64_t total_shift = 31 - shift; + const int64_t round = static_cast(1) << (total_shift - 1); + int64_t result = x * static_cast(quantized_multiplier) + round; + result = result >> total_shift; + + TFLITE_DCHECK(result >= std::numeric_limits::min() && + result <= std::numeric_limits::max()); + return static_cast(result); +} + +inline int32_t MultiplyByQuantizedMultiplierSmallerThanOneExp( + int32_t x, int32_t quantized_multiplier, int shift) { + TFLITE_DCHECK_LE(shift, 0); + return MultiplyByQuantizedMultiplier(x, quantized_multiplier, shift); +} + +inline int32_t MultiplyByQuantizedMultiplierGreaterThanOne( + int32_t x, int32_t quantized_multiplier, int shift) { + TFLITE_DCHECK_GE(shift, 0); + return MultiplyByQuantizedMultiplier(x, quantized_multiplier, shift); +} + +inline int32_t MultiplyByQuantizedMultiplier(int64_t x, + int32_t quantized_multiplier, + int shift) { + // Inputs: + // - quantized_multiplier has fixed point at bit 31 + // - shift is -31 to +7 (negative for right shift) + // + // Assumptions: The following input ranges are assumed + // - quantize_scale>=0 (the usual range is (1<<30) to (1>>31)-1) + // - scaling is chosen so final scaled result fits in int32_t + // - input x is in the range -(1<<47) <= x < (1<<47) + TFLITE_DCHECK(quantized_multiplier >= 0); + TFLITE_DCHECK(shift >= -31 && shift < 8); + TFLITE_DCHECK(x >= -(static_cast(1) << 47) && + x < (static_cast(1) << 47)); + + const int32_t reduced_multiplier = + (quantized_multiplier < 0x7FFF0000) + ? ((quantized_multiplier + (1 << 15)) >> 16) + : 0x7FFF; + const int64_t total_shift = 15 - shift; + const int64_t round = static_cast(1) << (total_shift - 1); + int64_t result = x * static_cast(reduced_multiplier) + round; + result = result >> total_shift; + + TFLITE_DCHECK(result >= std::numeric_limits::min() && + result <= std::numeric_limits::max()); + return static_cast(result); +} + +#ifdef USE_NEON +inline int32x4x4_t MultiplyByQuantizedMultiplier4Rows( + int32x4x4_t input_val, int32_t quantized_multiplier, int shift) { + TFLITE_DCHECK(quantized_multiplier >= 0); + + const int right_shift = std::min(-1, shift); + const int left_shift = shift - right_shift; + + const int32x4_t multiplier_dup = vdupq_n_s32(quantized_multiplier); + const int32x4_t left_shift_dup = vdupq_n_s32(left_shift); + const int32x4_t right_shift_dup = vdupq_n_s32(right_shift); + + int32x4x4_t result; + result.val[0] = vrshlq_s32( + vqdmulhq_s32(vshlq_s32(input_val.val[0], left_shift_dup), multiplier_dup), + right_shift_dup); + + result.val[1] = vrshlq_s32( + vqdmulhq_s32(vshlq_s32(input_val.val[1], left_shift_dup), multiplier_dup), + right_shift_dup); + + result.val[2] = vrshlq_s32( + vqdmulhq_s32(vshlq_s32(input_val.val[2], left_shift_dup), multiplier_dup), + right_shift_dup); + + result.val[3] = vrshlq_s32( + vqdmulhq_s32(vshlq_s32(input_val.val[3], left_shift_dup), multiplier_dup), + right_shift_dup); + + return result; +} +#endif // USE_NEON +// Double-rounding MultiplyByQuantizedMultiplier +#else +inline int32_t MultiplyByQuantizedMultiplierSmallerThanOneExp( + int32_t x, int32_t quantized_multiplier, int left_shift) { + using gemmlowp::RoundingDivideByPOT; + using gemmlowp::SaturatingRoundingDoublingHighMul; + return RoundingDivideByPOT( + SaturatingRoundingDoublingHighMul(x, quantized_multiplier), -left_shift); +} + +inline int32_t MultiplyByQuantizedMultiplierGreaterThanOne( + int32_t x, int32_t quantized_multiplier, int left_shift) { + using gemmlowp::SaturatingRoundingDoublingHighMul; + return SaturatingRoundingDoublingHighMul(x * (1 << left_shift), + quantized_multiplier); +} + +TFLITE_NOINLINE int32_t MultiplyByQuantizedMultiplier( + int32_t x, int32_t quantized_multiplier, int shift); + +TFLITE_NOINLINE int32_t MultiplyByQuantizedMultiplier( + int64_t x, int32_t quantized_multiplier, int shift); + +#ifdef USE_NEON +// Round uses ARM's rounding shift right. +inline int32x4x4_t MultiplyByQuantizedMultiplier4Rows( + int32x4x4_t input_val, int32_t quantized_multiplier, int shift) { + const int left_shift = std::max(shift, 0); + const int right_shift = std::min(shift, 0); + int32x4x4_t result; + + int32x4_t multiplier_dup = vdupq_n_s32(quantized_multiplier); + int32x4_t left_shift_dup = vdupq_n_s32(left_shift); + int32x4_t right_shift_dup = vdupq_n_s32(right_shift); + + result.val[0] = + vrshlq_s32(vqrdmulhq_s32(vshlq_s32(input_val.val[0], left_shift_dup), + multiplier_dup), + right_shift_dup); + + result.val[1] = + vrshlq_s32(vqrdmulhq_s32(vshlq_s32(input_val.val[1], left_shift_dup), + multiplier_dup), + right_shift_dup); + + result.val[2] = + vrshlq_s32(vqrdmulhq_s32(vshlq_s32(input_val.val[2], left_shift_dup), + multiplier_dup), + right_shift_dup); + + result.val[3] = + vrshlq_s32(vqrdmulhq_s32(vshlq_s32(input_val.val[3], left_shift_dup), + multiplier_dup), + right_shift_dup); + + return result; +} +#endif // USE_NEON +#endif // TFLITE_SINGLE_ROUNDING + +template +int CountLeadingZeros(T integer_input) { + static_assert(std::is_unsigned::value, + "Only unsigned integer types handled."); + if (integer_input == 0) { + return std::numeric_limits::digits; + } +#if defined(__GNUC__) + if (std::is_same::value) { + return __builtin_clz(integer_input); + } else if (std::is_same::value) { + return __builtin_clzll(integer_input); + } +#endif + const T one_in_leading_positive = static_cast(1) + << (std::numeric_limits::digits - 1); + int leading_zeros = 0; + while (integer_input < one_in_leading_positive) { + integer_input <<= 1; + ++leading_zeros; + } + return leading_zeros; +} + +template +inline int CountLeadingSignBits(T integer_input) { + static_assert(std::is_signed::value, "Only signed integer types handled."); +#if defined(__GNUC__) && !defined(__clang__) + return integer_input ? __builtin_clrsb(integer_input) + : std::numeric_limits::digits; +#else + using U = typename std::make_unsigned::type; + return integer_input >= 0 + ? CountLeadingZeros(static_cast(integer_input)) - 1 + : integer_input != std::numeric_limits::min() + ? CountLeadingZeros(2 * static_cast(-integer_input) - 1) + : 0; +#endif +} + +// Use "count leading zeros" helper functions to do a fast Floor(log_2(x)). +template +inline Integer FloorLog2(Integer n) { + static_assert(std::is_integral::value, ""); + static_assert(std::is_signed::value, ""); + static_assert(sizeof(Integer) == 4 || sizeof(Integer) == 8, ""); + TFLITE_CHECK_GT(n, 0); + if (sizeof(Integer) == 4) { + return 30 - CountLeadingSignBits(n); + } else { + return 62 - CountLeadingSignBits(n); + } +} + +namespace detail { + +// LUTPopulate takes an optional type-erased transform_params to allow passing +// extra parameters to the transform function pointer. const void* is used +// instead of std::function to be compatible with TFLite Micro +template +inline typename std::enable_if::value, + FloatT>::type +LUTTransform(Func transform, const void* /*transform_params*/, FloatT value) { + static_assert(std::is_floating_point::value, + "FloatT must be a floating-point type."); + return transform(value); +} + +template +inline typename std::enable_if< + std::is_same::value, FloatT>::type +LUTTransform(Func transform, const void* transform_params, FloatT value) { + static_assert(std::is_floating_point::value, + "FloatT must be a floating-point type."); + return transform(value, transform_params); +} + +// Use the same LUT generation code for both uint8_t and int8_t. Int8_t indexes +// will be directly casted to uint8_t, the int8 LUT will thus be ordered as [0, +// 1, ..., 127, -128, ..., -2, -1] instead of [-128, -127, ..., -1, 0, 1, ..., +// 126, 127]. +template +inline void LUTPopulateInt8(float input_scale, int32_t input_zero_point, + float output_scale, int32_t output_zero_point, + Func transform, const void* transform_params, + T* lut) { + static_assert( + std::is_same::value || std::is_same::value, + "T must be an uint8 or int8 type."); + uint8_t* lut_uint8 = reinterpret_cast(lut); + const float inverse_scale = 1 / output_scale; + int32_t maxval = std::numeric_limits::max(); + int32_t minval = std::numeric_limits::min(); + for (int32_t val = minval; val <= maxval; ++val) { + const float dequantized = input_scale * (val - input_zero_point); + const float transformed = + LUTTransform(transform, transform_params, dequantized); + const float rescaled = TfLiteRound(transformed * inverse_scale); + const int32_t quantized = + static_cast(rescaled + output_zero_point); + lut_uint8[static_cast(static_cast(val))] = static_cast( + static_cast(std::max(std::min(maxval, quantized), minval))); + } +} + +// Keep floating-point type configurable for backward compatibility. float +// should be used for FloatT by default. +template +inline void LUTPopulateInt16(FloatT input_scale, int32_t input_zero_point, + FloatT output_scale, int32_t output_zero_point, + Func transform, const void* transform_params, + int16_t* lut) { + static_assert(std::is_floating_point::value, + "FloatT must be a floating-point type."); + const FloatT input_min = + input_scale * (std::numeric_limits::min() - input_zero_point); + const FloatT input_max = + input_scale * (std::numeric_limits::max() - input_zero_point); + const FloatT output_min = + output_scale * (std::numeric_limits::min() - output_zero_point); + const FloatT output_max = + output_scale * (std::numeric_limits::max() - output_zero_point); + + const int nb_steps = 512; + const FloatT step = (input_max - input_min) / nb_steps; + const FloatT half_step = step / 2; + const FloatT output_scaling_inv = + static_cast(std::numeric_limits::max() - + std::numeric_limits::min() + 1) / + (output_max - output_min); + const FloatT table_min = + static_cast(std::numeric_limits::min()); + const FloatT table_max = + static_cast(std::numeric_limits::max()); + + for (int i = 0; i < nb_steps; i++) { + const FloatT val = + LUTTransform(transform, transform_params, input_min + i * step); + const FloatT val_midpoint = LUTTransform( + transform, transform_params, input_min + i * step + half_step); + const FloatT val_next = LUTTransform(transform, transform_params, + input_min + (i + 1) * step); + + const FloatT sample_val = TfLiteRound(val * output_scaling_inv); + const FloatT midpoint_interp_val = + TfLiteRound((val_next * output_scaling_inv + + TfLiteRound(val * output_scaling_inv)) / + 2); + const FloatT midpoint_val = TfLiteRound(val_midpoint * output_scaling_inv); + const FloatT midpoint_err = midpoint_interp_val - midpoint_val; + const FloatT bias = TfLiteRound(midpoint_err / 2); + + lut[i] = static_cast(std::min( + std::max(sample_val - bias, table_min), table_max)); + } + + lut[nb_steps] = static_cast(std::min( + std::max(TfLiteRound(LUTTransform( + transform, transform_params, input_max) * + output_scaling_inv), + table_min), + table_max)); +} + +} // namespace detail + +template +inline typename std::enable_if::value || + std::is_same::value, + void>::type +LUTPopulate(float input_scale, int32_t input_zero_point, float output_scale, + int32_t output_zero_point, float (*transform)(float), T* lut) { + detail::LUTPopulateInt8(input_scale, input_zero_point, output_scale, + output_zero_point, transform, nullptr, lut); +} + +template +inline typename std::enable_if::value || + std::is_same::value, + void>::type +LUTPopulate(float input_scale, int32_t input_zero_point, float output_scale, + int32_t output_zero_point, float (*transform)(float, const void*), + const void* transform_params, T* lut) { + detail::LUTPopulateInt8(input_scale, input_zero_point, output_scale, + output_zero_point, transform, transform_params, lut); +} + +template +inline typename std::enable_if::value, void>::type +LUTPopulate(float input_scale, int32_t input_zero_point, float output_scale, + int32_t output_zero_point, float (*transform)(float), T* lut) { + detail::LUTPopulateInt16(input_scale, input_zero_point, output_scale, + output_zero_point, transform, nullptr, lut); +} + +template +inline typename std::enable_if::value, void>::type +LUTPopulate(float input_scale, int32_t input_zero_point, float output_scale, + int32_t output_zero_point, float (*transform)(float, const void*), + const void* transform_params, T* lut) { + detail::LUTPopulateInt16(input_scale, input_zero_point, output_scale, + output_zero_point, transform, + transform_params, lut); +} + +// Deprecated, avoid usage and prefer the float version. Kept for +// backward-compatiblity. +template +inline typename std::enable_if::value, void>::type +LUTPopulate(double input_scale, int32_t input_zero_point, double output_scale, + int32_t output_zero_point, double (*transform)(double), T* lut) { + detail::LUTPopulateInt16(input_scale, input_zero_point, output_scale, + output_zero_point, transform, nullptr, lut); +} + +// The size of the LUT depends on the type of input. For uint8 and int8 inputs a +// simple 256 entries LUT is used. For int16 inputs the high 9 bits are used for +// indexing and the 7 remaining bits are used for interpolation. We thus use a +// 513-entries LUT for int16 cases, 512 for the 9-bit indexing and 1 extra entry +// to interpolate the last value. +template +constexpr int LUTSize() { + static_assert(std::is_same::value || + std::is_same::value || + std::is_same::value, + "Only LUTs with uint8, int8 or int16 inputs are supported."); + // As per c++11: constexpr methods cannot have more than one return statement. + return (std::is_same::value || std::is_same::value) + ? 256 + : 513; +} + +// int16_t -> int16_t table lookup with interpolation +// LUT must have 513 values +inline int16_t LUTLookup(int16_t value, const int16_t* lut) { + // 512 base values, lut[513] is only used to calculate the slope + const uint16_t index = static_cast(256 + (value >> 7)); + assert(index < 512 && "LUT index out of range."); + const int16_t offset = value & 0x7f; + + // Base and slope are Q0.x + const int16_t base = lut[index]; + const int16_t slope = lut[index + 1] - lut[index]; + + // Q0.x * Q0.7 = Q0.(x + 7) + // Round and convert from Q0.(x + 7) to Q0.x + const int delta = (slope * offset + 64) >> 7; + + // Q0.15 + Q0.15 + return static_cast(base + delta); +} + +// int8_t -> int8_t table lookup without interpolation +// LUT must have 256 values +// LUTPopulate has ordered the LUT so that indexing it with an +// int8_t is just done by casting it to an uint8_t. +inline int8_t LUTLookup(int8_t value, const int8_t* lut) { + return lut[static_cast(value)]; +} + +// uint8_t -> uint8_t table lookup without interpolation +// LUT must have 256 values +inline uint8_t LUTLookup(uint8_t value, const uint8_t* lut) { + return lut[value]; +} + +// Table of sigmoid(i/24) at 0.16 format - 256 elements. + +// We use combined sigmoid and tanh look-up table, since +// tanh(x) = 2*sigmoid(2*x) -1. +// Both functions are symmetric, so the LUT table is only needed +// for the absolute value of the input. +static const uint16_t sigmoid_table_uint16[256] = { + 32768, 33451, 34133, 34813, 35493, 36169, 36843, 37513, 38180, 38841, 39498, + 40149, 40794, 41432, 42064, 42688, 43304, 43912, 44511, 45102, 45683, 46255, + 46817, 47369, 47911, 48443, 48964, 49475, 49975, 50464, 50942, 51409, 51865, + 52311, 52745, 53169, 53581, 53983, 54374, 54755, 55125, 55485, 55834, 56174, + 56503, 56823, 57133, 57433, 57724, 58007, 58280, 58544, 58800, 59048, 59288, + 59519, 59743, 59959, 60168, 60370, 60565, 60753, 60935, 61110, 61279, 61441, + 61599, 61750, 61896, 62036, 62172, 62302, 62428, 62549, 62666, 62778, 62886, + 62990, 63090, 63186, 63279, 63368, 63454, 63536, 63615, 63691, 63765, 63835, + 63903, 63968, 64030, 64090, 64148, 64204, 64257, 64308, 64357, 64405, 64450, + 64494, 64536, 64576, 64614, 64652, 64687, 64721, 64754, 64786, 64816, 64845, + 64873, 64900, 64926, 64950, 64974, 64997, 65019, 65039, 65060, 65079, 65097, + 65115, 65132, 65149, 65164, 65179, 65194, 65208, 65221, 65234, 65246, 65258, + 65269, 65280, 65291, 65301, 65310, 65319, 65328, 65337, 65345, 65352, 65360, + 65367, 65374, 65381, 65387, 65393, 65399, 65404, 65410, 65415, 65420, 65425, + 65429, 65433, 65438, 65442, 65445, 65449, 65453, 65456, 65459, 65462, 65465, + 65468, 65471, 65474, 65476, 65479, 65481, 65483, 65485, 65488, 65489, 65491, + 65493, 65495, 65497, 65498, 65500, 65501, 65503, 65504, 65505, 65507, 65508, + 65509, 65510, 65511, 65512, 65513, 65514, 65515, 65516, 65517, 65517, 65518, + 65519, 65520, 65520, 65521, 65522, 65522, 65523, 65523, 65524, 65524, 65525, + 65525, 65526, 65526, 65526, 65527, 65527, 65528, 65528, 65528, 65529, 65529, + 65529, 65529, 65530, 65530, 65530, 65530, 65531, 65531, 65531, 65531, 65531, + 65532, 65532, 65532, 65532, 65532, 65532, 65533, 65533, 65533, 65533, 65533, + 65533, 65533, 65533, 65534, 65534, 65534, 65534, 65534, 65534, 65534, 65534, + 65534, 65534, 65535}; + +// TODO(b/77858996): Add these to gemmlowp. +template +IntegerType SaturatingAddNonGemmlowp(IntegerType a, IntegerType b) { + static_assert(std::is_same::value, "unimplemented"); + return a; +} + +template <> +inline std::int32_t SaturatingAddNonGemmlowp(std::int32_t a, std::int32_t b) { + std::int64_t a64 = a; + std::int64_t b64 = b; + std::int64_t sum = a64 + b64; + return static_cast(std::min( + static_cast(std::numeric_limits::max()), + std::max( + static_cast(std::numeric_limits::min()), + sum))); +} + +template +gemmlowp::FixedPoint SaturatingAddNonGemmlowp( + gemmlowp::FixedPoint a, + gemmlowp::FixedPoint b) { + return gemmlowp::FixedPoint::FromRaw( + SaturatingAddNonGemmlowp(a.raw(), b.raw())); +} + +template +IntegerType SaturatingSub(IntegerType a, IntegerType b) { + static_assert(std::is_same::value, "unimplemented"); + return a; +} + +template <> +inline std::int16_t SaturatingSub(std::int16_t a, std::int16_t b) { + std::int32_t a32 = a; + std::int32_t b32 = b; + std::int32_t diff = a32 - b32; + return static_cast( + std::min(static_cast(32767), + std::max(static_cast(-32768), diff))); +} + +template <> +inline std::int32_t SaturatingSub(std::int32_t a, std::int32_t b) { + std::int64_t a64 = a; + std::int64_t b64 = b; + std::int64_t diff = a64 - b64; + return static_cast(std::min( + static_cast(std::numeric_limits::max()), + std::max( + static_cast(std::numeric_limits::min()), + diff))); +} + +template +gemmlowp::FixedPoint SaturatingSub( + gemmlowp::FixedPoint a, + gemmlowp::FixedPoint b) { + return gemmlowp::FixedPoint::FromRaw( + SaturatingSub(a.raw(), b.raw())); +} +// End section to be moved to gemmlowp. + +template +IntegerType SaturatingRoundingMultiplyByPOTParam(IntegerType x, int exponent) { + if (exponent == 0) { + return x; + } + using ScalarIntegerType = + typename gemmlowp::FixedPointRawTypeTraits::ScalarRawType; + const IntegerType min = + gemmlowp::Dup(std::numeric_limits::min()); + const IntegerType max = + gemmlowp::Dup(std::numeric_limits::max()); + const int ScalarIntegerTypeBits = 8 * sizeof(ScalarIntegerType); + + const std::int32_t threshold = + ((1 << (ScalarIntegerTypeBits - 1 - exponent)) - 1); + const IntegerType positive_mask = + gemmlowp::MaskIfGreaterThan(x, gemmlowp::Dup(threshold)); + const IntegerType negative_mask = + gemmlowp::MaskIfLessThan(x, gemmlowp::Dup(-threshold)); + + IntegerType result = gemmlowp::ShiftLeft(x, exponent); + result = gemmlowp::SelectUsingMask(positive_mask, max, result); + result = gemmlowp::SelectUsingMask(negative_mask, min, result); + return result; +} + +// If we want to leave IntegerBits fixed, then multiplication +// by a power of two has to be saturating/rounding, not exact anymore. +template +gemmlowp::FixedPoint +SaturatingRoundingMultiplyByPOTParam( + gemmlowp::FixedPoint a, int exponent) { + return gemmlowp::FixedPoint::FromRaw( + SaturatingRoundingMultiplyByPOTParam(a.raw(), exponent)); +} + +// Convert int32_t multiplier to int16_t with rounding. +inline void DownScaleInt32ToInt16Multiplier(int32_t multiplier_int32_t, + int16_t* multiplier_int16_t) { + TFLITE_DCHECK_GE(multiplier_int32_t, 0); + static constexpr int32_t kRoundingOffset = 1 << 15; + if (multiplier_int32_t >= + std::numeric_limits::max() - kRoundingOffset) { + *multiplier_int16_t = std::numeric_limits::max(); + return; + } + const int32_t result = (multiplier_int32_t + kRoundingOffset) >> 16; + TFLITE_DCHECK_LE(result << 16, multiplier_int32_t + kRoundingOffset); + TFLITE_DCHECK_GT(result << 16, multiplier_int32_t - kRoundingOffset); + *multiplier_int16_t = result; + TFLITE_DCHECK_EQ(*multiplier_int16_t, result); +} + +// Minimum output bits to accommodate log of maximum input range. It actually +// does not matter if one considers, say, [-64,64] or [-64,64). +// +// For example, run this through Octave: +// [0:127; ... +// ceil(log(abs( log(2.^(0:127))+1 ))/log(2)); ... +// ceil(log(abs( log(2.^(0:127))+1 ))/log(2))] +constexpr int min_log_x_output_bits(int input_bits) { + return input_bits > 90 ? 7 + : input_bits > 44 ? 6 + : input_bits > 21 ? 5 + : input_bits > 10 ? 4 + : input_bits > 4 ? 3 + : input_bits > 1 ? 2 + : 1; +} + +// Although currently the name of this function says that it cannot handle +// values less than 1, in practice it can handle as low as 1/x_max, where +// x_max is the largest representable input. In other words, the output range +// is symmetric. +template +inline gemmlowp::FixedPoint +log_x_for_x_greater_than_or_equal_to_1_impl( + gemmlowp::FixedPoint input_val) { + // assert(__builtin_clz(0u) >= std::numeric_limits::digits - 1); + // assert(__builtin_clz(0u) <= std::numeric_limits::digits); + using FixedPoint0 = gemmlowp::FixedPoint; + // The reason for accumulating the result with an extra bit of headroom is + // that z_pow_2_adj * log_2 might be saturated, and adding num_scaled * + // recip_denom will otherwise introduce an error. + static constexpr int kAccumIntegerBits = OutputIntegerBits + 1; + using FixedPointAccum = gemmlowp::FixedPoint; + + const FixedPoint0 log_2 = GEMMLOWP_CHECKED_FIXEDPOINT_CONSTANT( + FixedPoint0, 1488522236, std::log(2.0)); + const FixedPoint0 sqrt_sqrt_half = GEMMLOWP_CHECKED_FIXEDPOINT_CONSTANT( + FixedPoint0, 1805811301, std::sqrt(std::sqrt(0.5))); + const FixedPoint0 sqrt_half = GEMMLOWP_CHECKED_FIXEDPOINT_CONSTANT( + FixedPoint0, 1518500250, std::sqrt(0.5)); + const FixedPoint0 one_quarter = + GEMMLOWP_CHECKED_FIXEDPOINT_CONSTANT(FixedPoint0, 536870912, 1.0 / 4.0); + + const FixedPoint0 alpha_n = GEMMLOWP_CHECKED_FIXEDPOINT_CONSTANT( + FixedPoint0, 117049297, 11.0 / 240.0 * std::sqrt(std::sqrt(2.0))); + const FixedPoint0 alpha_d = GEMMLOWP_CHECKED_FIXEDPOINT_CONSTANT( + FixedPoint0, 127690142, 1.0 / 20.0 * std::sqrt(std::sqrt(2.0))); + const FixedPoint0 alpha_i = GEMMLOWP_CHECKED_FIXEDPOINT_CONSTANT( + FixedPoint0, 1057819769, + 2.0 / std::sqrt(std::sqrt(2.0)) - std::sqrt(std::sqrt(2.0))); + const FixedPoint0 alpha_f = GEMMLOWP_CHECKED_FIXEDPOINT_CONSTANT( + FixedPoint0, 638450708, 1.0 / 4.0 * std::sqrt(std::sqrt(2.0))); + + const FixedPointAccum shifted_quarter = + gemmlowp::Rescale(one_quarter); + + // Reinterpret the input value as Q0.31, because we will figure out the + // required shift "ourselves" instead of using, say, Rescale. + FixedPoint0 z_a = FixedPoint0::FromRaw(input_val.raw()); + // z_a_pow_2 = input_integer_bits - z_a_headroom; + int z_a_headroom_plus_1 = CountLeadingZeros(static_cast(z_a.raw())); + FixedPoint0 r_a_tmp = + SaturatingRoundingMultiplyByPOTParam(z_a, (z_a_headroom_plus_1 - 1)); + const int32_t r_a_raw = + SaturatingRoundingMultiplyByPOTParam((r_a_tmp * sqrt_half).raw(), 1); + // z_pow_2_adj = max(z_pow_2_a - 0.75, z_pow_2_b - 0.25); + // z_pow_2_adj = max(InputIntegerBits - z_a_headroom_plus_1 + 0.25, + // InputIntegerBits - z_b_headroom - 0.25); + const FixedPointAccum z_a_pow_2_adj = SaturatingAddNonGemmlowp( + FixedPointAccum::FromRaw(SaturatingRoundingMultiplyByPOTParam( + static_cast(InputIntegerBits - z_a_headroom_plus_1), + 31 - kAccumIntegerBits)), + shifted_quarter); + + // z_b is treated like z_a, but premultiplying by sqrt(0.5). + FixedPoint0 z_b = z_a * sqrt_half; + int z_b_headroom = CountLeadingZeros(static_cast(z_b.raw())) - 1; + const int32_t r_b_raw = + SaturatingRoundingMultiplyByPOTParam(z_a.raw(), z_b_headroom); + const FixedPointAccum z_b_pow_2_adj = SaturatingSub( + FixedPointAccum::FromRaw(SaturatingRoundingMultiplyByPOTParam( + static_cast(InputIntegerBits - z_b_headroom), + 31 - kAccumIntegerBits)), + shifted_quarter); + + const FixedPoint0 r = FixedPoint0::FromRaw(std::min(r_a_raw, r_b_raw)); + const FixedPointAccum z_pow_2_adj = FixedPointAccum::FromRaw( + std::max(z_a_pow_2_adj.raw(), z_b_pow_2_adj.raw())); + + const FixedPoint0 p = gemmlowp::RoundingHalfSum(r, sqrt_sqrt_half); + FixedPoint0 q = r - sqrt_sqrt_half; + q = q + q; + + const FixedPoint0 common_sq = q * q; + const FixedPoint0 num = q * r + q * common_sq * alpha_n; + const FixedPoint0 denom_minus_one_0 = + p * (alpha_i + q + alpha_d * common_sq) + alpha_f * q; + const FixedPoint0 recip_denom = + one_over_one_plus_x_for_x_in_0_1(denom_minus_one_0); + + const FixedPointAccum num_scaled = gemmlowp::Rescale(num); + return gemmlowp::Rescale(z_pow_2_adj * log_2 + + num_scaled * recip_denom); +} + +template +inline gemmlowp::FixedPoint +log_x_for_x_greater_than_or_equal_to_1( + gemmlowp::FixedPoint input_val) { + static_assert( + OutputIntegerBits >= min_log_x_output_bits(InputIntegerBits), + "Output integer bits must be sufficient to accommodate logs of inputs."); + return log_x_for_x_greater_than_or_equal_to_1_impl( + input_val); +} + +inline int32_t GetReciprocal(int32_t x, int x_integer_digits, + int* num_bits_over_unit) { + int headroom_plus_one = CountLeadingZeros(static_cast(x)); + // This is the number of bits to the left of the binary point above 1.0. + // Consider x=1.25. In that case shifted_scale=0.8 and + // no later adjustment will be needed. + *num_bits_over_unit = x_integer_digits - headroom_plus_one; + const int32_t shifted_sum_minus_one = + static_cast((static_cast(x) << headroom_plus_one) - + (static_cast(1) << 31)); + + gemmlowp::FixedPoint shifted_scale = + gemmlowp::one_over_one_plus_x_for_x_in_0_1( + gemmlowp::FixedPoint::FromRaw(shifted_sum_minus_one)); + return shifted_scale.raw(); +} + +inline void GetInvSqrtQuantizedMultiplierExp(int32_t input, int reverse_shift, + int32_t* output_inv_sqrt, + int* output_shift) { + TFLITE_DCHECK_GE(input, 0); + if (input <= 1) { + // Handle the input value 1 separately to avoid overflow in that case + // in the general computation below (b/143972021). Also handle 0 as if it + // were a 1. 0 is an invalid input here (divide by zero) and 1 is a valid + // but rare/unrealistic input value. We can expect both to occur in some + // incompletely trained models, but probably not in fully trained models. + *output_inv_sqrt = std::numeric_limits::max(); + *output_shift = 0; + return; + } + TFLITE_DCHECK_GT(input, 1); + *output_shift = 11; + while (input >= (1 << 29)) { + input /= 4; + ++*output_shift; + } + const unsigned max_left_shift_bits = + CountLeadingZeros(static_cast(input)) - 1; + const unsigned max_left_shift_bit_pairs = max_left_shift_bits / 2; + const unsigned left_shift_bit_pairs = max_left_shift_bit_pairs - 1; + *output_shift -= left_shift_bit_pairs; + input <<= 2 * left_shift_bit_pairs; + TFLITE_DCHECK_GE(input, (1 << 27)); + TFLITE_DCHECK_LT(input, (1 << 29)); + using gemmlowp::FixedPoint; + using gemmlowp::Rescale; + using gemmlowp::SaturatingRoundingMultiplyByPOT; + // Using 3 integer bits gives us enough room for the internal arithmetic in + // this Newton-Raphson iteration. + using F3 = FixedPoint; + using F0 = FixedPoint; + const F3 fixedpoint_input = F3::FromRaw(input >> 1); + const F3 fixedpoint_half_input = + SaturatingRoundingMultiplyByPOT<-1>(fixedpoint_input); + const F3 fixedpoint_half_three = + GEMMLOWP_CHECKED_FIXEDPOINT_CONSTANT(F3, (1 << 28) + (1 << 27), 1.5); + // Newton-Raphson iteration + // Naive unoptimized starting guess: x = 1 + F3 x = F3::One(); + // Naive unoptimized number of iterations: 5 + for (int i = 0; i < 5; i++) { + const F3 x3 = Rescale<3>(x * x * x); + x = Rescale<3>(fixedpoint_half_three * x - fixedpoint_half_input * x3); + } + const F0 fixedpoint_half_sqrt_2 = + GEMMLOWP_CHECKED_FIXEDPOINT_CONSTANT(F0, 1518500250, std::sqrt(2.) / 2.); + x = x * fixedpoint_half_sqrt_2; + *output_inv_sqrt = x.raw(); + if (*output_shift < 0) { + *output_inv_sqrt <<= -*output_shift; + *output_shift = 0; + } + // Convert right shift (right is positive) to left shift. + *output_shift *= reverse_shift; +} + +// DO NOT USE THIS STRUCT FOR NEW FUNCTIONALITY BEYOND IMPLEMENTING +// BROADCASTING. +// +// NdArrayDesc describes the shape and memory layout of an N-dimensional +// rectangular array of numbers. +// +// NdArrayDesc is basically identical to Dims defined in types.h. +// However, as Dims is to be deprecated, this class exists as an adaptor +// to enable simple unoptimized implementations of element-wise broadcasting +// operations. +template +struct NdArrayDesc { + // The "extent" of each dimension. Indices along dimension d must be in the + // half-open interval [0, extents[d]). + int extents[N]; + + // The number of *elements* (not bytes) between consecutive indices of each + // dimension. + int strides[N]; +}; + +// DO NOT USE THIS FUNCTION FOR NEW FUNCTIONALITY BEYOND IMPLEMENTING +// BROADCASTING. +// +// Same as Offset(), except takes as NdArrayDesc instead of Dims. +inline int SubscriptToIndex(const NdArrayDesc<4>& desc, int i0, int i1, int i2, + int i3) { + TFLITE_DCHECK(i0 >= 0 && i0 < desc.extents[0]); + TFLITE_DCHECK(i1 >= 0 && i1 < desc.extents[1]); + TFLITE_DCHECK(i2 >= 0 && i2 < desc.extents[2]); + TFLITE_DCHECK(i3 >= 0 && i3 < desc.extents[3]); + return i0 * desc.strides[0] + i1 * desc.strides[1] + i2 * desc.strides[2] + + i3 * desc.strides[3]; +} + +inline int SubscriptToIndex(const NdArrayDesc<5>& desc, int indexes[5]) { + return indexes[0] * desc.strides[0] + indexes[1] * desc.strides[1] + + indexes[2] * desc.strides[2] + indexes[3] * desc.strides[3] + + indexes[4] * desc.strides[4]; +} + +inline int SubscriptToIndex(const NdArrayDesc<8>& desc, int indexes[8]) { + return indexes[0] * desc.strides[0] + indexes[1] * desc.strides[1] + + indexes[2] * desc.strides[2] + indexes[3] * desc.strides[3] + + indexes[4] * desc.strides[4] + indexes[5] * desc.strides[5] + + indexes[6] * desc.strides[6] + indexes[7] * desc.strides[7]; +} + +// Given the dimensions of the operands for an element-wise binary broadcast, +// adjusts them so that they can be directly iterated over with simple loops. +// Returns the adjusted dims as instances of NdArrayDesc in 'desc0_out' and +// 'desc1_out'. 'desc0_out' and 'desc1_out' cannot be nullptr. +// +// This function assumes that the two input shapes are compatible up to +// broadcasting and the shorter one has already been prepended with 1s to be the +// same length. E.g., if shape0 is (1, 16, 16, 64) and shape1 is (1, 64), +// shape1 must already have been prepended to be (1, 1, 1, 64). Recall that +// Dims refer to shapes in reverse order. In this case, input0_dims will be +// (64, 16, 16, 1) and input1_dims will be (64, 1, 1, 1). +// +// When two shapes are compatible up to broadcasting, for each dimension d, +// the input extents are either equal, or one of them is 1. +// +// This function performs the following for each dimension d: +// - If the extents are equal, then do nothing since the loop that walks over +// both of the input arrays is correct. +// - Otherwise, one (and only one) of the extents must be 1. Say extent0 is 1 +// and extent1 is e1. Then set extent0 to e1 and stride0 *to 0*. This allows +// array0 to be referenced *at any index* in dimension d and still access the +// same slice. +template +inline void NdArrayDescsForElementwiseBroadcast(const Dims& input0_dims, + const Dims& input1_dims, + NdArrayDesc* desc0_out, + NdArrayDesc* desc1_out) { + TFLITE_DCHECK(desc0_out != nullptr); + TFLITE_DCHECK(desc1_out != nullptr); + + // Copy dims to desc. + for (int i = 0; i < N; ++i) { + desc0_out->extents[i] = input0_dims.sizes[i]; + desc0_out->strides[i] = input0_dims.strides[i]; + desc1_out->extents[i] = input1_dims.sizes[i]; + desc1_out->strides[i] = input1_dims.strides[i]; + } + + // Walk over each dimension. If the extents are equal do nothing. + // Otherwise, set the desc with extent 1 to have extent equal to the other and + // stride 0. + for (int i = 0; i < N; ++i) { + const int extent0 = ArraySize(input0_dims, i); + const int extent1 = ArraySize(input1_dims, i); + if (extent0 != extent1) { + if (extent0 == 1) { + desc0_out->strides[i] = 0; + desc0_out->extents[i] = extent1; + } else { + TFLITE_DCHECK_EQ(extent1, 1); + desc1_out->strides[i] = 0; + desc1_out->extents[i] = extent0; + } + } + } +} + +// Copies dims to desc, calculating strides. +template +TFLITE_NOINLINE void CopyDimsToDesc(const RuntimeShape& input_shape, + NdArrayDesc* desc_out) { + int desc_stride = 1; + for (int i = N - 1; i >= 0; --i) { + desc_out->extents[i] = input_shape.Dims(i); + desc_out->strides[i] = desc_stride; + desc_stride *= input_shape.Dims(i); + } +} + +template +inline void NdArrayDescsForElementwiseBroadcast( + const RuntimeShape& input0_shape, const RuntimeShape& input1_shape, + NdArrayDesc* desc0_out, NdArrayDesc* desc1_out) { + TFLITE_DCHECK(desc0_out != nullptr); + TFLITE_DCHECK(desc1_out != nullptr); + + auto extended_input0_shape = RuntimeShape::ExtendedShape(N, input0_shape); + auto extended_input1_shape = RuntimeShape::ExtendedShape(N, input1_shape); + + // Copy dims to desc, calculating strides. + CopyDimsToDesc(extended_input0_shape, desc0_out); + CopyDimsToDesc(extended_input1_shape, desc1_out); + + // Walk over each dimension. If the extents are equal do nothing. + // Otherwise, set the desc with extent 1 to have extent equal to the other and + // stride 0. + for (int i = 0; i < N; ++i) { + const int extent0 = extended_input0_shape.Dims(i); + const int extent1 = extended_input1_shape.Dims(i); + if (extent0 != extent1) { + if (extent0 == 1) { + desc0_out->strides[i] = 0; + desc0_out->extents[i] = extent1; + } else { + TFLITE_DCHECK_EQ(extent1, 1); + desc1_out->strides[i] = 0; + desc1_out->extents[i] = extent0; + } + } + } +} + +template +inline void NdArrayDescsForElementwiseBroadcast( + const RuntimeShape& input0_shape, const RuntimeShape& input1_shape, + const RuntimeShape& input2_shape, NdArrayDesc* desc0_out, + NdArrayDesc* desc1_out, NdArrayDesc* desc2_out) { + TFLITE_DCHECK(desc0_out != nullptr); + TFLITE_DCHECK(desc1_out != nullptr); + TFLITE_DCHECK(desc2_out != nullptr); + + auto extended_input0_shape = RuntimeShape::ExtendedShape(N, input0_shape); + auto extended_input1_shape = RuntimeShape::ExtendedShape(N, input1_shape); + auto extended_input2_shape = RuntimeShape::ExtendedShape(N, input2_shape); + + // Copy dims to desc, calculating strides. + CopyDimsToDesc(extended_input0_shape, desc0_out); + CopyDimsToDesc(extended_input1_shape, desc1_out); + CopyDimsToDesc(extended_input2_shape, desc2_out); + + // Walk over each dimension. If the extents are equal do nothing. + // Otherwise, set the desc with extent 1 to have extent equal to the other and + // stride 0. + for (int i = 0; i < N; ++i) { + const int extent0 = extended_input0_shape.Dims(i); + const int extent1 = extended_input1_shape.Dims(i); + const int extent2 = extended_input2_shape.Dims(i); + + int extent = extent0; + if (extent1 != 1) extent = extent1; + if (extent2 != 1) extent = extent2; + + TFLITE_DCHECK(extent0 == 1 || extent0 == extent); + TFLITE_DCHECK(extent1 == 1 || extent1 == extent); + TFLITE_DCHECK(extent2 == 1 || extent2 == extent); + + if (!(extent0 == extent1 && extent1 == extent2)) { + if (extent0 == 1) { + desc0_out->strides[i] = 0; + desc0_out->extents[i] = extent; + } + if (extent1 == 1) { + desc1_out->strides[i] = 0; + desc1_out->extents[i] = extent; + } + if (extent2 == 1) { + desc2_out->strides[i] = 0; + desc2_out->extents[i] = extent; + } + } + } +} + +// Detailed implementation of NDOpsHelper, the indexes must be a zero array. +// This implementation is equivalent to N nested loops. Ex, if N=4, it can be +// re-writen as: +// for (int b = 0; b < output.extents[0]; ++b) { +// for (int y = 0; y < output.extents[1]; ++y) { +// for (int x = 0; x < output.extents[2]; ++x) { +// for (int c = 0; c < output.extents[3]; ++c) { +// calc({b,y,x,c}); +// } +// } +// } +// } +template +typename std::enable_if::type NDOpsHelperImpl( + const NdArrayDesc& output, const Calc& calc, int indexes[N]) { + for (indexes[DIM] = 0; indexes[DIM] < output.extents[DIM]; ++indexes[DIM]) { + NDOpsHelperImpl(output, calc, indexes); + } +} + +template +typename std::enable_if::type NDOpsHelperImpl( + const NdArrayDesc& output, const Calc& calc, int indexes[N]) { + for (indexes[DIM] = 0; indexes[DIM] < output.extents[DIM]; ++indexes[DIM]) { + calc(indexes); + } +} + +// Execute the calc function in the innermost iteration based on the shape of +// the output. The calc function should take a single argument of type int[N]. +template +inline void NDOpsHelper(const NdArrayDesc& output, const Calc& calc) { + int indexes[N] = {0}; + NDOpsHelperImpl(output, calc, indexes); +} +// Copied from gemmlowp::RoundDown when we dropped direct dependency on +// gemmlowp. +// +// Returns the runtime argument rounded down to the nearest multiple of +// the fixed Modulus. +template +Integer RoundDown(Integer i) { + return i - (i % Modulus); +} + +// Copied from gemmlowp::RoundUp when we dropped direct dependency on +// gemmlowp. +// +// Returns the runtime argument rounded up to the nearest multiple of +// the fixed Modulus. +template +Integer RoundUp(Integer i) { + return RoundDown(i + Modulus - 1); +} + +// Copied from gemmlowp::CeilQuotient when we dropped direct dependency on +// gemmlowp. +// +// Returns the quotient a / b rounded up ('ceil') to the nearest integer. +template +Integer CeilQuotient(Integer a, Integer b) { + return (a + b - 1) / b; +} + +// This function is a copy of gemmlowp::HowManyThreads, copied when we dropped +// the direct dependency of internal/optimized/ on gemmlowp. +// +// It computes a reasonable number of threads to use for a GEMM of shape +// (rows, cols, depth). +// +// TODO(b/131910176): get rid of this function by switching each call site +// to its own more sensible logic for its own workload. +template +inline int LegacyHowManyThreads(int max_num_threads, int rows, int cols, + int depth) { + // Early-exit in the default case where multi-threading is disabled. + if (max_num_threads == 1) { + return 1; + } + + // Ensure that each thread has KernelRows rows to process, if at all possible. + int thread_count = std::min(max_num_threads, rows / KernelRows); + + // Limit the number of threads according to the overall size of the problem. + if (thread_count > 1) { + // Empirically determined value. + static constexpr std::uint64_t min_cubic_size_per_thread = 64 * 1024; + + // We can only multiply two out of three sizes without risking overflow + const std::uint64_t cubic_size = + std::uint64_t(rows) * std::uint64_t(cols) * std::uint64_t(depth); + + thread_count = std::min( + thread_count, static_cast(cubic_size / min_cubic_size_per_thread)); + } + + if (thread_count < 1) { + thread_count = 1; + } + + assert(thread_count > 0 && thread_count <= max_num_threads); + return thread_count; +} + +template +void optimized_ops_preload_l1_stream(const T* ptr) { +#ifdef __GNUC__ + // builtin offered by GCC-compatible compilers including clang + __builtin_prefetch(ptr, /* 0 means read */ 0, /* 0 means no locality */ 0); +#else + (void)ptr; +#endif +} + +template +void optimized_ops_preload_l1_keep(const T* ptr) { +#ifdef __GNUC__ + // builtin offered by GCC-compatible compilers including clang + __builtin_prefetch(ptr, /* 0 means read */ 0, /* 3 means high locality */ 3); +#else + (void)ptr; +#endif +} + +template +void optimized_ops_prefetch_write_l1_keep(const T* ptr) { +#ifdef __GNUC__ + // builtin offered by GCC-compatible compilers including clang + __builtin_prefetch(ptr, /* 1 means write */ 1, /* 3 means high locality */ 3); +#else + (void)ptr; +#endif +} + +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_COMMON_H_ diff --git a/tensorflow/lite/kernels/internal/compatibility.h b/tensorflow/lite/kernels/internal/compatibility.h new file mode 100644 index 0000000..7ba66ed --- /dev/null +++ b/tensorflow/lite/kernels/internal/compatibility.h @@ -0,0 +1,122 @@ +/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_COMPATIBILITY_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_COMPATIBILITY_H_ + +#include + +#include "tensorflow/lite/kernels/op_macros.h" + +#ifndef TFLITE_DCHECK +#define TFLITE_DCHECK(condition) (condition) ? (void)0 : TFLITE_ASSERT_FALSE +#endif + +#ifndef TFLITE_DCHECK_EQ +#define TFLITE_DCHECK_EQ(x, y) ((x) == (y)) ? (void)0 : TFLITE_ASSERT_FALSE +#endif + +#ifndef TFLITE_DCHECK_NE +#define TFLITE_DCHECK_NE(x, y) ((x) != (y)) ? (void)0 : TFLITE_ASSERT_FALSE +#endif + +#ifndef TFLITE_DCHECK_GE +#define TFLITE_DCHECK_GE(x, y) ((x) >= (y)) ? (void)0 : TFLITE_ASSERT_FALSE +#endif + +#ifndef TFLITE_DCHECK_GT +#define TFLITE_DCHECK_GT(x, y) ((x) > (y)) ? (void)0 : TFLITE_ASSERT_FALSE +#endif + +#ifndef TFLITE_DCHECK_LE +#define TFLITE_DCHECK_LE(x, y) ((x) <= (y)) ? (void)0 : TFLITE_ASSERT_FALSE +#endif + +#ifndef TFLITE_DCHECK_LT +#define TFLITE_DCHECK_LT(x, y) ((x) < (y)) ? (void)0 : TFLITE_ASSERT_FALSE +#endif + +// TODO(ahentz): Clean up: We should stick to the DCHECK versions. +#ifndef TFLITE_CHECK +#define TFLITE_CHECK(condition) (condition) ? (void)0 : TFLITE_ABORT +#endif + +#ifndef TFLITE_CHECK_EQ +#define TFLITE_CHECK_EQ(x, y) ((x) == (y)) ? (void)0 : TFLITE_ABORT +#endif + +#ifndef TFLITE_CHECK_NE +#define TFLITE_CHECK_NE(x, y) ((x) != (y)) ? (void)0 : TFLITE_ABORT +#endif + +#ifndef TFLITE_CHECK_GE +#define TFLITE_CHECK_GE(x, y) ((x) >= (y)) ? (void)0 : TFLITE_ABORT +#endif + +#ifndef TFLITE_CHECK_GT +#define TFLITE_CHECK_GT(x, y) ((x) > (y)) ? (void)0 : TFLITE_ABORT +#endif + +#ifndef TFLITE_CHECK_LE +#define TFLITE_CHECK_LE(x, y) ((x) <= (y)) ? (void)0 : TFLITE_ABORT +#endif + +#ifndef TFLITE_CHECK_LT +#define TFLITE_CHECK_LT(x, y) ((x) < (y)) ? (void)0 : TFLITE_ABORT +#endif + +#ifndef TF_LITE_STATIC_MEMORY +// TODO(b/162019032): Consider removing these type-aliases. +using int8 = std::int8_t; +using uint8 = std::uint8_t; +using int16 = std::int16_t; +using uint16 = std::uint16_t; +using int32 = std::int32_t; +using uint32 = std::uint32_t; +#endif // !defined(TF_LITE_STATIC_MEMORY) + +// Allow for cross-compiler usage of function signatures - currently used for +// specifying named RUY profiler regions in templated methods. +#if defined(_MSC_VER) +#define TFLITE_PRETTY_FUNCTION __FUNCSIG__ +#elif defined(__GNUC__) +#define TFLITE_PRETTY_FUNCTION __PRETTY_FUNCTION__ +#else +#define TFLITE_PRETTY_FUNCTION __func__ +#endif + +// TFLITE_DEPRECATED() +// +// Duplicated from absl/base/macros.h to avoid pulling in that library. +// Marks a deprecated class, struct, enum, function, method and variable +// declarations. The macro argument is used as a custom diagnostic message (e.g. +// suggestion of a better alternative). +// +// Example: +// +// class TFLITE_DEPRECATED("Use Bar instead") Foo {...}; +// TFLITE_DEPRECATED("Use Baz instead") void Bar() {...} +// +// Every usage of a deprecated entity will trigger a warning when compiled with +// clang's `-Wdeprecated-declarations` option. This option is turned off by +// default, but the warnings will be reported by clang-tidy. +#if defined(__clang__) && __cplusplus >= 201103L +#define TFLITE_DEPRECATED(message) __attribute__((deprecated(message))) +#endif + +#ifndef TFLITE_DEPRECATED +#define TFLITE_DEPRECATED(message) +#endif + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_COMPATIBILITY_H_ diff --git a/tensorflow/lite/kernels/internal/cppmath.h b/tensorflow/lite/kernels/internal/cppmath.h new file mode 100644 index 0000000..c97cc31 --- /dev/null +++ b/tensorflow/lite/kernels/internal/cppmath.h @@ -0,0 +1,40 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_CPPMATH_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_CPPMATH_H_ + +#include + +namespace tflite { + +#if defined(TF_LITE_USE_GLOBAL_CMATH_FUNCTIONS) || \ + (defined(__ANDROID__) && !defined(__NDK_MAJOR__)) || defined(__ZEPHYR__) +#define TF_LITE_GLOBAL_STD_PREFIX +#else +#define TF_LITE_GLOBAL_STD_PREFIX std +#endif + +#define DECLARE_STD_GLOBAL_SWITCH1(tf_name, std_name) \ + template \ + inline T tf_name(const T x) { \ + return TF_LITE_GLOBAL_STD_PREFIX::std_name(x); \ + } + +DECLARE_STD_GLOBAL_SWITCH1(TfLiteRound, round); +DECLARE_STD_GLOBAL_SWITCH1(TfLiteExpm1, expm1); + +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_CPPMATH_H_ diff --git a/tensorflow/lite/kernels/internal/max.h b/tensorflow/lite/kernels/internal/max.h new file mode 100644 index 0000000..c181002 --- /dev/null +++ b/tensorflow/lite/kernels/internal/max.h @@ -0,0 +1,35 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_MAX_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_MAX_H_ + +#include + +namespace tflite { + +#if defined(TF_LITE_USE_GLOBAL_MAX) || defined(__ZEPHYR__) +inline float TfLiteMax(const float& x, const float& y) { + return std::max(x, y); +} +#else +template +inline T TfLiteMax(const T& x, const T& y) { + return std::fmax(x, y); +} +#endif + +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_MAX_H_ diff --git a/tensorflow/lite/kernels/internal/min.h b/tensorflow/lite/kernels/internal/min.h new file mode 100644 index 0000000..62035dc --- /dev/null +++ b/tensorflow/lite/kernels/internal/min.h @@ -0,0 +1,35 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_MIN_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_MIN_H_ + +#include + +namespace tflite { + +#if defined(TF_LITE_USE_GLOBAL_MIN) || defined(__ZEPHYR__) +inline float TfLiteMin(const float& x, const float& y) { + return std::min(x, y); +} +#else +template +inline T TfLiteMin(const T& x, const T& y) { + return std::fmin(x, y); +} +#endif + +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_MIN_H_ diff --git a/tensorflow/lite/kernels/internal/optimized/neon_check.h b/tensorflow/lite/kernels/internal/optimized/neon_check.h new file mode 100644 index 0000000..7df1129 --- /dev/null +++ b/tensorflow/lite/kernels/internal/optimized/neon_check.h @@ -0,0 +1,20 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_OPTIMIZED_NEON_CHECK_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_OPTIMIZED_NEON_CHECK_H_ + +// TFLM does not need to utilize any Neon optimizations. + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_OPTIMIZED_NEON_CHECK_H_ diff --git a/tensorflow/lite/kernels/internal/portable_tensor.h b/tensorflow/lite/kernels/internal/portable_tensor.h new file mode 100644 index 0000000..1eee621 --- /dev/null +++ b/tensorflow/lite/kernels/internal/portable_tensor.h @@ -0,0 +1,118 @@ +/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_PORTABLE_TENSOR_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_PORTABLE_TENSOR_H_ + +#include + +#include "tensorflow/lite/core/c/common.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { + +// A list of tensors in a format that can be used by kernels like split and +// concatenation. +template +class VectorOfTensors { + public: + // Build with the tensors in 'tensor_list'. + VectorOfTensors(const TfLiteContext& context, + const TfLiteIntArray& tensor_list) { + int num_tensors = tensor_list.size; + + all_data_.reserve(num_tensors); + all_shape_.reserve(num_tensors); + all_shape_ptr_.reserve(num_tensors); + + for (int i = 0; i < num_tensors; ++i) { + TfLiteTensor* t = &context.tensors[tensor_list.data[i]]; + all_data_.push_back(GetTensorData(t)); + all_shape_.push_back(GetTensorShape(t)); + } + + // Taking the pointer from inside a std::vector is only OK if the vector is + // never modified, so we populate all_shape in the previous loop and then we + // are free to grab iterators here. + for (int i = 0; i < num_tensors; ++i) { + all_shape_ptr_.push_back(&all_shape_[i]); + } + } + // Return a pointer to the data pointers of all tensors in the list. For + // example: + // float* const* f = v.data(); + // f[0][1] is the second element of the first tensor. + T* const* data() const { return all_data_.data(); } + + // Return a pointer the shape pointers of all tensors in the list. For + // example: + // const RuntimeShape* const* d = v.dims(); + // dims[1] are the dimensions of the second tensor in the list. + const RuntimeShape* const* shapes() const { return all_shape_ptr_.data(); } + + private: + std::vector all_data_; + std::vector all_shape_; + std::vector all_shape_ptr_; +}; + +// A list of quantized tensors in a format that can be used by kernels like +// split and concatenation. +class VectorOfQuantizedTensors : public VectorOfTensors { + public: + // Build with the tensors in 'tensor_list'. + VectorOfQuantizedTensors(const TfLiteContext& context, + const TfLiteIntArray& tensor_list) + : VectorOfTensors(context, tensor_list) { + for (int i = 0; i < tensor_list.size; ++i) { + TfLiteTensor* t = &context.tensors[tensor_list.data[i]]; + zero_point_.push_back(t->params.zero_point); + scale_.push_back(t->params.scale); + } + } + + const float* scale() const { return scale_.data(); } + const int32_t* zero_point() const { return zero_point_.data(); } + + private: + std::vector zero_point_; + std::vector scale_; +}; + +// Writes randomly accessed values from `input` sequentially into `output`. +template +class SequentialTensorWriter { + public: + SequentialTensorWriter(const TfLiteTensor* input, TfLiteTensor* output) { + input_data_ = GetTensorData(input); + output_ptr_ = GetTensorData(output); + } + SequentialTensorWriter(const T* input_data, T* output_data) + : input_data_(input_data), output_ptr_(output_data) {} + + void Write(int position) { *output_ptr_++ = input_data_[position]; } + void WriteN(int position, int len) { + memcpy(output_ptr_, &input_data_[position], sizeof(T) * len); + output_ptr_ += len; + } + + private: + const T* input_data_; + T* output_ptr_; +}; + +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_PORTABLE_TENSOR_H_ diff --git a/tensorflow/lite/kernels/internal/portable_tensor_utils.cc b/tensorflow/lite/kernels/internal/portable_tensor_utils.cc new file mode 100644 index 0000000..024043d --- /dev/null +++ b/tensorflow/lite/kernels/internal/portable_tensor_utils.cc @@ -0,0 +1,92 @@ +/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_TENSOR_UTILS_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_TENSOR_UTILS_H_ + +#include "tensorflow/lite/kernels/internal/portable_tensor_utils.h" + +#include +#include +#include + +#include "tensorflow/lite/core/c/builtin_op_data.h" + +#if defined(_MSC_VER) +#define __restrict__ __restrict +#endif + +namespace tflite { + +// Not all backends support CpuBackendContext usage, so forward declare to avoid +// pulling in its implementation. Use of CpuBackendContext in method +// implementations is purely optional. +class CpuBackendContext; + +namespace tensor_utils { + +// Apply Rectified Linear to elements of a vector. +void ApplyReluToVector(const float* __restrict__ vector, int v_size, + float* __restrict__ result) { + for (int v = 0; v < v_size; v++) { + result[v] = std::max(0.0f, vector[v]); + } +} + +// Apply Rectified Linear 1 (cap to [-1;1]) to elements of a vector +void ApplyRelu1ToVector(const float* __restrict__ vector, int v_size, + float* __restrict__ result) { + for (int v = 0; v < v_size; v++) { + result[v] = std::max(-1.0f, std::min(vector[v], 1.0f)); + } +} + +// Apply Rectified Linear 6 (cap to [0;6]) to elements of a vector +void ApplyRelu6ToVector(const float* __restrict__ vector, int v_size, + float* __restrict__ result) { + for (int v = 0; v < v_size; v++) { + result[v] = std::max(0.0f, std::min(vector[v], 6.0f)); + } +} + +// Apply signbit to elements of a vector +void ApplySignbitToVector(const float* __restrict__ vector, int v_size, + float* __restrict__ result) { + for (int v = 0; v < v_size; v++) { + result[v] = std::signbit(vector[v]); + } +} + +void UnpackDenseInt4IntoInt8(const int8_t* src_buffer, int num_elements, + int8_t* dst_buffer) { + for (int i = 0; i < num_elements / 2; i++) { + int8_t byte = src_buffer[i]; + // Shift left first so that sign is properly extended when shifted right + int8_t lower = static_cast(byte << 4) >> 4; + int8_t higher = byte >> 4; + dst_buffer[2 * i] = lower; + dst_buffer[2 * i + 1] = higher; + } + + // If the buffer size is odd, extract the final lower nibble. + if (num_elements % 2 != 0) { + dst_buffer[num_elements - 1] = + static_cast(src_buffer[num_elements / 2] << 4) >> 4; + } +} + +} // namespace tensor_utils +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_TENSOR_UTILS_H_ diff --git a/tensorflow/lite/kernels/internal/portable_tensor_utils.h b/tensorflow/lite/kernels/internal/portable_tensor_utils.h new file mode 100644 index 0000000..c28892c --- /dev/null +++ b/tensorflow/lite/kernels/internal/portable_tensor_utils.h @@ -0,0 +1,623 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_PORTABLE_TENSOR_UTILS_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_PORTABLE_TENSOR_UTILS_H_ + +#include +#include +#include + +#include "tensorflow/lite/core/c/builtin_op_data.h" +#include "tensorflow/lite/core/c/common.h" + +#if defined(_MSC_VER) +#define __restrict__ __restrict +#endif + +namespace tflite { + +// Not all backends support CpuBackendContext usage, so forward declare to avoid +// pulling in its implementation. Use of CpuBackendContext in method +// implementations is purely optional. +class CpuBackendContext; + +namespace tensor_utils { + +// Multiplies a matrix with a scalar and reduce the result on each row to a +// scalar. +// Parameters: +// - matrix: matrix of size n_row * n_col +// - scalar: the scalar that is multiplied to each element in the matrix +// - n_row: the row count of the matrix +// - n_col: the column count of the matrix +// - output: the 32bit output +// Note: We do not need saturation because the int8 * int8 is safe from overflow +// in (2^31-1) / (2^14) = 131072, which is bigger than the n_row. Non-zero +// initial output value is not exceptionally large. +void MatrixScalarMultiplyAccumulate(const int8_t* matrix, int32_t scalar, + int32_t n_row, int32_t n_col, + int32_t* output); + +// Add another vector for each batch in the batch vector. +template +void VectorBatchVectorAdd(const T* vector, int v_size, int n_batch, + T* batch_vector) { + for (int b = 0; b < n_batch; b++) { + for (int i = 0; i < v_size; ++i) { + batch_vector[i] += vector[i]; + } + batch_vector += v_size; + } +} + +// Cwise product of two vectors. +template +inline void VectorVectorCwiseProduct(const T* vector1, const T* vector2, + int v_size, T* result) { + for (int v = 0; v < v_size; v++) { + *result++ = *vector1++ * *vector2++; + } +} + +// Cwise product of a vector and a batch-vector. +template +inline void VectorBatchVectorCwiseProduct(const T* vector, int v_size, + const T* batch_vector, int n_batch, + T* result) { + for (int b = 0; b < n_batch; b++) { + VectorVectorCwiseProduct(vector, batch_vector, v_size, result); + // Update the pointers. + result += v_size; + batch_vector += v_size; + } +} + +// Cwise product and accumulate of two vectors. Since it's a MAC operation, the +// assumption here is that result array is initialized to valid values. +template +inline void VectorVectorCwiseProductAccumulate(const T* __restrict__ vector1, + const T* __restrict__ vector2, + int v_size, + T* __restrict__ result) { + for (int v = 0; v < v_size; v++) { + *result++ += *vector1++ * *vector2++; + } +} + +// Cwise product and accumulate of a vector and a batch-vector. Since it's a MAC +// operation, the assumption here is that result array is initialized to valid +// values. +template +inline void VectorBatchVectorCwiseProductAccumulate(const T* vector, int v_size, + const T* batch_vector, + int n_batch, T* result) { + for (int b = 0; b < n_batch; b++) { + VectorVectorCwiseProductAccumulate(vector, batch_vector, v_size, result); + // Update the pointers. + result += v_size; + batch_vector += v_size; + } +} + +// Batch vector initialization with another vector. +template +void VectorBatchVectorAssign(const T* vector, int v_size, int n_batch, + T* batch_vector) { + for (int b = 0; b < n_batch; b++) { + std::copy_n(vector, v_size, batch_vector + b * v_size); + } +} + +// Checks if all entries of vector are zero for float. +bool IsZeroVector(const float* vector, int v_size); + +// Checks if all entries of vector are zero for int8. +bool IsZeroVector(const int8_t* vector, int v_size); + +// Quantizes a buffer of floating point values using a symmetric quantization +// (i.e. linear quantization without an offset) to 8-bit signed integers. +// It also outputs the range (min, max) of the floating point buffer, and the +// scaling factor used to quantize the values. +void SymmetricQuantizeFloats(const float* values, const int size, + int8_t* quantized_values, float* min_value, + float* max_value, float* scaling_factor); + +// Quantizes a buffer of floating point values using a symmetric quantization +// (i.e. linear quantization without an offset) to 8-bit signed integers. +// It uses the range (min, max) provided to the function to calculate the +// appropriate scaling factor to quantize the values. +void SymmetricQuantizeFloats(const float* values, const int size, + int8_t* quantized_values, float min_value, + float max_value, float* scaling_factor); + +void AsymmetricQuantizeFloats(const float* values, const int size, + int8_t* quantized_values, float* scaling_factor, + int32_t* offset); + +// Helper function to quantize floats. +// float_data_ptr input float vectors +// n_batch number of input vectors +// n_data size of a single input vector +// quantized_data_ptr (out) vector with quantized data +// scaling_factors (out) scaling factors (one per vector) +// zero_points (out) zero points (one per vector) +// do_asymmetric controls if the quantization should be asymmetric. +inline void BatchQuantizeFloats(const float* float_data_ptr, int n_batch, + int n_data, int8_t* quantized_data_ptr, + float* scaling_factors, int32_t* zero_points, + bool do_asymmetric) { + for (int b = 0; b < n_batch; ++b) { + const int offset = b * n_data; + if (do_asymmetric) { + tensor_utils::AsymmetricQuantizeFloats( + float_data_ptr + offset, n_data, quantized_data_ptr + offset, + &scaling_factors[b], &zero_points[b]); + } else { + float unused_min, unused_max; + tensor_utils::SymmetricQuantizeFloats( + float_data_ptr + offset, n_data, quantized_data_ptr + offset, + &unused_min, &unused_max, &scaling_factors[b]); + } + } +} + +// Multiplies a matrix by a "batched" vector (i.e. a matrix with a batch +// dimension composed by input vectors independent from each other). The result +// of the multiplication is accumulated to the passed result buffer. +// More specifically, for a matrix M of shape [n, i] and a batched-vector +// of shape [i, batch] it will first compute the product of shape [n, batch]. +// This product will be accumulated to the result buffer. +void MatrixBatchVectorMultiplyAccumulate(const float* matrix, int m_rows, + int m_cols, const float* vector, + int n_batch, float* result); + +// Same as the function above, but the matrix is a sparse tensor with block +// pattern 1x4. +// This function assumes that m_cols is a multiple of the block size (4 in this +// case) so that there's no incomplete block. +void SparseMatrixBatchVectorMultiplyAccumulate1x4( + const float* __restrict__ matrix, const int32_t* __restrict__ segments, + const int32_t* __restrict__ indices, int m_rows, int m_cols, + const float* __restrict__ vector, int n_batch, float* __restrict__ result); + +// Same as the function above, but the matrix is stored in block compressed +// sparse row format with block pattern 1x16 which consists of two arrays: +// 1. A matrix array stores non-zero blocks of the matrix in row major. +// 2. A ledger array stores nrows groups, one group per row. Each group starts +// with an integer representing the number of non-zero blocks for the +// corresponding row and follows with column indexes of the first element +// of each non-zero block. +// This function assumes that +// 1. m_cols is a multiple of 16 so that all blocks are full blocks. +// 2. m_cols < 254 * 16 so that block index can be represented by uint8. +void SparseMatrixBatchVectorMultiplyAccumulate( + const float* __restrict__ matrix, const uint8_t* __restrict__ ledger, + int m_rows, int m_cols, const float* __restrict__ vector, int n_batch, + float* __restrict__ result); + +// Same as the function above, but for values quantized using symmetric +// quantization (e.g. by calling SymmetricQuantizeFloats). +// The passed scaling factors is a buffer of the quantization scaling factors +// that will be used to dequentize the products into the final result buffer. +// These scaling factors are the multiplication of the matrix scaling factor +// by the vector's scaling factor, one per batch (i.e. this allows quantizing +// each batch in the batch-vector matrix independently). +void MatrixBatchVectorMultiplyAccumulate( + const int8_t* __restrict__ matrix, const int m_rows, const int m_cols, + const int8_t* __restrict__ vectors, + const float* __restrict__ scaling_factors, int n_batch, + float* __restrict__ result); + +// Same as the function above except that vector values +// are quantized with asymmetric quantization per-batch and the matrix +// is quantized per row. +void MatrixBatchVectorMultiplyAccumulate( + const int8_t* __restrict__ matrix, const int m_rows, const int m_cols, + const int8_t* __restrict__ vectors, + const float* __restrict__ scaling_factors, int n_batch, + float* __restrict__ result, const float* __restrict__ per_channel_scale, + const int32_t* __restrict__ input_offset); + +// Same as the function above, but the matrix is a sparse tensor with block +// pattern 1x16. +// This function assumes that m_cols is a multiple of the block size (16 in this +// case) so that there's no incomplete block. Also, it assumes all offsets of +// input, output and filter are zero. +void SparseMatrixBatchVectorMultiplyAccumulate1x16( + const int8_t* __restrict__ matrix, const int32_t* __restrict__ segments, + const int32_t* __restrict__ indices, int m_rows, int m_cols, + const int8_t* __restrict__ vector, const int32_t* __restrict__ bias_vector, + int n_batch, const int32_t input_offset, const int32_t output_multiplier, + const int32_t output_shift, const int32_t output_offset, + const int32_t output_activation_min, const int32_t output_activation_max, + int8_t* __restrict__ result); + +// Same as the function above, but the matrix is stored in block compressed +// sparse row format with block pattern 1x16 which consists of two arrays: +// 1. A matrix array stores non-zero blocks of the matrix in row major. +// 2. A ledger array stores nrows groups, one group per row. Each group starts +// with an integer representing the number of non-zero blocks for the +// corresponding row followed by column index of the first element of +// each non-zero block. +// This function assumes that +// 1. m_cols is a multiple of 16 so that all blocks are full blocks. +// 2. m_cols < 254 * 16 so that block index can be represented by uint8. +void SparseMatrixBatchVectorMultiplyAccumulate( + const int8_t* __restrict__ matrix, const uint8_t* __restrict__ ledger, + const int m_rows, const int m_cols, const int8_t* __restrict__ vectors, + const float* __restrict__ scaling_factors, int n_batch, + float* __restrict__ result); + +// Same as the above 8, 8, 8 integer matmul except for the presence of zero +// point and non-accumulative. +// TODO(b/148688698): remove this function by folding zero point calculation in +// prepare() function. +void MatrixBatchVectorMultiply(const int8_t* input, int32_t input_zeropoint, + const int8_t* input_to_gate_weights, + int32_t input_to_gate_effective_scale_a, + int32_t input_to_gate_effective_scale_b, + int32_t n_batch, int32_t n_input, int32_t n_cell, + int8_t* gate_output, int8_t gate_output_zp); + +// Same as above but has 16 bit and 8 bit input and 8 bit output. +// Used in projection when hidden is 16bit. +void MatrixBatchVectorMultiply(const int16_t* hidden, + const int8_t* hidden_to_output_weights, + int32_t proj_effective_scale_a, + int32_t proj_effective_scale_b, + const int32_t* gate_bias, int32_t n_batch, + int32_t n_hidden, int32_t n_output, + int32_t output_zp, int8_t* proj_output); + +// Apply Layer Normalization (https://arxiv.org/abs/1607.06450) to a Quantized +// vector. +// Parameters: +// - input: batch vector of size n_batch * n_input; 16 bit. +// - layer_norm_weights: the quantized layer normalization weights. +// - bias: the bias for the layer normalization. +// - layer_norm_scale_a: multiplier for scale factor. +// - layer_norm_scale_b: shift for scale factor. +// - variance_limit: the guard to make sure the inverse does not overflow. +// - n_batch: the number of batches. +// - n_input: the size for input and output. +// - output: the 16 bit output +void ApplyLayerNorm(const int16_t* input, const int16_t* layer_norm_weights, + const int32_t* bias, int32_t layer_norm_scale_a, + int32_t layer_norm_scale_b, int32_t variance_limit, + int n_batch, int n_input, int16_t* output); + +// Same as above but the internal calculation is done in float. +void ApplyLayerNormFloat(const int16_t* input, + const int16_t* layer_norm_weights, + int32_t layer_norm_scale_a, int32_t layer_norm_scale_b, + const int32_t* bias, int n_batch, int n_input, + int16_t* output); + +// Apply Sigmoid to a quantized vector. +// Parameters: +// - input: batch vector of size n_batch * n_input; 16 bit. +// - n_batch: the number of batches. +// - n_input: the size for input and output. +// - output: the 16 bit output +// The input is in Q3.12 format and the output is in Q0.15 format. +void ApplySigmoid(const int16_t* input, int32_t n_batch, int32_t n_input, + int16_t* output); + +// Same as above but the internal calcualtion is float. +void ApplySigmoidFloat(const int16_t* input, int32_t n_batch, int32_t n_input, + int16_t* output); + +// Apply Tanh to a quantized vector. +// Parameters: +// - integer_bits: the integer bits of the input. +// Currently supports 0, 1, 2, 3, 4, 5, 6. +// - input: batch vector of size n_batch * n_input; 16 bit. +// - n_batch: the number of batches. +// - n_input: the size for input and output. +// - output: the 16 bit output +// The input is in Qm.15-m format and the output is in Q0.15 format. +void ApplyTanh(int32_t intger_bits, const int16_t* input, int32_t n_batch, + int32_t n_input, int16_t* output); + +// Apply Tanh to a quantized vector. Tbe internal calculation is in float. +// - Input has 2^(integer_bits) as scale. +// - Output has Q0.15 as scale. +void ApplyTanhFloat(const int16_t* input, int32_t n_batch, int32_t n_input, + int32_t integer_bits, int16_t* output); + +// Element-wise multiplication of two quantized vectors. +// Parameters: +// - input_1: batch vector of size n_batch * n_input; 16 bit. +// - input_2: batch vector of size n_batch * n_input; 16 bit. +// - n_batch: the number of batches. +// - n_input: the size for input and output. +// - shift: the shift needed to produce the output. +// - output: the 16 bit output of size n_batch * n_input. +// Output does not need to be initialized. +void CwiseMul(const int16_t* input_1, const int16_t* input_2, int n_batch, + int n_input, int shift, int16_t* output); + +// Element-wise multiplication of two quantized vectors. +// Parameters: +// - input_1: batch vector of size n_batch * n_input; 16 bit. +// - input_2: batch vector of size n_batch * n_input; 16 bit. +// - n_batch: the number of batches. +// - n_input: the size for input and output. +// - shift: the shift needed to produce the output. +// - output: the 8 bit output of size n_batch * n_input. +// Output does not need to be initialized. +void CwiseMul(const int16_t* input_1, const int16_t* input_2, int n_batch, + int n_input, int shift, int8_t* output); + +// Element-wise multiplication of two quantized vectors with rescaling. +// Parameters: +// - input_1: batch vector of size n_batch * n_input; 16 bit. +// - input_2: batch vector of size n_batch * n_input; 16 bit. +// - multiplier: the multiplier part of scale. +// - shift: the shift part of scale. +// - n_batch: the number of batches. +// - n_input: the size for input and output. +// - output: the 8 bit output of size n_batch * n_input. +// - output_zp: the zero point of output. +// Output does not need to be initialized. +// Multiplier ("m") and shift ("s") are connected to scale ("s") with s = m * +// 2^(s - 31). +void CwiseMul(const int16_t* input_1, const int16_t* input_2, + int32_t multiplier, int32_t shift, int32_t n_batch, + int32_t n_input, int32_t output_zp, int8_t* output); + +// Element-wise saturating addition of two quantized vectors without rescaling. +// Parameters: +// - input_1: batch vector of size n_batch * n_input; 16 bit. +// - input_2: batch vector of size n_batch * n_input; 16 bit. +// - n_batch: the number of batches. +// - n_input: the size for input and output. +// - output: the 8 bit output of size n_batch * n_input. +// Output does not need to be initialized. +void CwiseAdd(const int16_t* input_1, const int16_t* input_2, int n_batch, + int n_input, int16_t* output); + +// Element-wise in-place clipping of a vector. Overloaded for float, int16_t, +// int8_t. Parameters: +// - vector: vector of size v_size. +// - v_size: the size of the vector. +// - clipping_value: the value used for clipping. +void CwiseClipping(float* vector, const int v_size, const float clipping_value); +void CwiseClipping(int16_t* vector, const int v_size, + const int16_t clipping_value); +void CwiseClipping(int8_t* vector, const int v_size, + const int8_t clipping_value); + +// Dot product of two vectors. +float VectorVectorDotProduct(const float* vector1, const float* vector2, + int v_size); + +// Dot product of two batch vectors of size n_batch * v_size: +// vector1 = [x_1_1, x_1_2, ..., x_1_vsize, +// x_2_1, x_2_2, ..., x_2_vsize, +// ... +// x_nbatch_1,..., x_nbatch_vsize] +// vector2 = [y_1_1, y_1_2, ..., y_1_vsize, +// y_2_1, y_2_2, ..., y_2_vsize, +// ... +// y_nbatch_1,..., y_nbatch_vsize] +// Then result will be a vector of n_batch size starting from 'result': +// [x_1_1 * y_1_1 + x_1_2 * y_1_2 + ... + x_1_vsize * y_1_vsize, +// x_2_1 * y_2_1 + x_2_2 * y_2_2 + ... + x_2_vsize * y_2_vsize, +// ... +// x_nbatch_1 * y_nbatch_1 + ... + x_nbatch_vsize * y_nbatch_vsize] +template +inline void BatchVectorBatchVectorDotProduct(const T* vector1, const T* vector2, + int v_size, int n_batch, + T* result) { + for (int b = 0; b < n_batch; b++) { + result[b] = VectorVectorDotProduct(vector1, vector2, v_size); + vector1 += v_size; + vector2 += v_size; + } +} + +// Same as above but input is 16bit and output is 32bit. +void BatchVectorBatchVectorDotProduct(const int16_t* vector1, + const int16_t* vector2, int v_size, + int n_batch, int32_t* result); + +// Same as above, but inputs are 16bit integer and output is 16bit integer. +void VectorBatchVectorCwiseProductAccumulate(const int16_t* vector, int v_size, + const int16_t* batch_vector, + int n_batch, int32_t multiplier, + int shift, int16_t* result); + +// Compute "1.0f - elements of vector" (used in CIFG). +void Sub1Vector(const float* vector, int v_size, float* result); + +// Compute "1.0f - elements of vector" (used in CIFG) for int16 input. +// "vector" has range [0, 32767] because it is the output of sigmoid function. +void Sub1Vector(const int16_t* vector, int v_size, int16_t* result); + +// Reduce-sum on a float input vector: +// input_vector: float pointer to input vector. +// output_vector: float pointer to vector. +// output_size: output vector size. +// reduction_size: number of consecutive elements from input vector which are +// added to get one element of output. +void ReductionSumVector(const float* input_vector, float* output_vector, + int output_size, int reduction_size); + +// Same as above but input/output is 32 bit integer. +void ReductionSumVector(const int32_t* input_vector, int32_t* output_vector, + int output_size, int reduction_size); + +// Same as above but input is 8 bit integer. +void ReductionSumVector(const int8_t* input_vector, int32_t* output_vector, + int output_size, int reduction_size); + +// Multiply all elements of vector with a scalar. +void VectorScalarMultiply(const int8_t* vector, int v_size, float scale, + float* result); + +// Layer norm for each batch. +void MeanStddevNormalization(const float* input_vector, float* output_vector, + int v_size, int n_batch); + +// Saturate Add with rescale on both inputs. +void TwoGateSaturatingAdd(const int8_t* input, int8_t input_zp, + const int8_t* recurrent, int8_t recurrent_zp, + int32_t input_effective_scale_a, + int32_t input_effective_scale_b, + int32_t recurrent_effective_scale_a, + int32_t recurrent_effective_scale_b, int32_t n_batch, + int32_t n_cell, int16_t* output); + +// Same as the function above, but provide a scratch buffer for the +// int8 x int8 -> int32 and a CpuBackendContext for the accumulator +// computation. +void MatrixBatchVectorMultiplyAccumulate( + const int8_t* __restrict__ matrix, const int m_rows, const int m_cols, + const int8_t* __restrict__ vectors, + const float* __restrict__ scaling_factors, int n_batch, + int32_t* __restrict__ scratch, float* __restrict__ result, + CpuBackendContext* __restrict__ context); + +// Same as the function above except that can make use of cached row sums. +void MatrixBatchVectorMultiplyAccumulate( + const int8_t* __restrict__ matrix, const int m_rows, const int m_cols, + const int8_t* __restrict__ vectors, const float* scaling_factors, + int n_batch, float* __restrict__ result, const float* per_channel_scale, + const int32_t* input_offset, int32_t* scratch, int32_t* row_sums, + bool* compute_row_sums, CpuBackendContext* context); + +// Same as the function above, but provides separate scaling factor for the +// matrix and the vectors. The scaling factors are multiplied in the +// scaling_factor_scratch buffer. +inline void MatrixBatchVectorMultiplyAccumulate( + const int8_t* __restrict__ matrix, const int m_rows, const int m_cols, + const int8_t* __restrict__ vectors, const float matrix_scaling_factor, + const float* vector_scaling_factors, int n_batch, + float* __restrict__ result, const float* per_channel_scale, + const int32_t* input_offset, int32_t* scratch, int32_t* row_sums, + bool* compute_row_sums, float* scaling_factor_scratch, + CpuBackendContext* context) { + for (int b = 0; b < n_batch; ++b) { + scaling_factor_scratch[b] = + vector_scaling_factors[b] * matrix_scaling_factor; + } + MatrixBatchVectorMultiplyAccumulate(matrix, m_rows, m_cols, vectors, + scaling_factor_scratch, n_batch, result, + per_channel_scale, input_offset, scratch, + row_sums, compute_row_sums, context); +} + +// Multiplies a matrix by a "batched" vector (i.e. a matrix with a batch +// dimension composed by input vectors independent from each other). The result +// of the multiplication is accumulated to the passed result buffer. +// More specifically, for a matrix M of shape [n, i] and a batched-vector +// of shape [i, batch] it will first compute the product of shape [n, batch]. +// This product will be accumulated to the result buffer, +// Parameters: +// - input: batch vector of size n_batch * n_input +// - bias: vector of size b_input +// - input_to_gate_weights: matrix of size n_input * n_output +// - multiplier: scalar +// - shift: scalar +// - n_batch: the batch size +// - n_input: the input size +// - n_output: the output size +// - output_zp: the zero point of the output. +// - scratch: batch vector of size n_batch * n_output +// - output: the 16 bit output +// Notes: +// - this is used for gate matmul: for non-cifg it is for input, forget, +// cell, output gates; for cifg, it is for forget, cell, output gates. +// - multiplier and shift combined gives the scale. +// - assumes input zero point is 0. +// - scratch is created for optimization purpose only. +// TODO(b/152066492): this can be removed if some future optimization +// work makes it unnecessary. +void MatrixBatchVectorMultiplyAccumulate( + const int8_t* input, const int32_t* bias, + const int8_t* input_to_gate_weights, int32_t multiplier, int32_t shift, + int32_t n_batch, int32_t n_input, int32_t n_output, int32_t output_zp, + int32_t* scratch, int16_t* output, CpuBackendContext* context); + +// Multiplies a matrix by a "batched" vector (i.e. a matrix with a batch +// dimension composed by input vectors independent from each other). The result +// of the multiplication is accumulated to the passed result buffer. +// More specifically, for a matrix M of shape [n, i] and a batched-vector +// of shape [i, batch] it will first compute the product of shape [n, batch]. +// This product will be accumulated to the result buffer, +// Parameters: +// - input: batch vector of size n_batch * n_input +// - bias: vector of size b_input +// - input_to_gate_weights: matrix of size n_input * n_output +// - multiplier: scalar +// - shift: scalar +// - n_batch: the batch size +// - n_input: the input size +// - n_output: the output size +// - output_zp: the zero point of the output. +// - scratch: batch vector of size n_batch * n_output +// - output: the 8 bit output +// Notes: +// - this is used for projection matmul. +// - multiplier and shift combined gives the scale. +// - assumes input zero point is 0. +// - scratch is created for optimization purpose only. +// TODO(b/152066492): this can be removed if some future optimization +// work makes it unnecessary. +void MatrixBatchVectorMultiplyAccumulate( + const int8_t* input, const int32_t* bias, + const int8_t* input_to_gate_weights, int32_t multiplier, int32_t shift, + int32_t n_batch, int32_t n_input, int32_t n_output, int32_t output_zp, + int32_t* scratch, int8_t* output, CpuBackendContext* context); + +// Apply Rectified Linear to elements of a vector. +void ApplyReluToVector(const float* __restrict__ vector, int v_size, + float* __restrict__ result); + +// Apply Rectified Linear 1 (cap to [-1;1]) to elements of a vector +void ApplyRelu1ToVector(const float* __restrict__ vector, int v_size, + float* __restrict__ result); + +// Apply Rectified Linear 6 (cap to [0;6]) to elements of a vector +void ApplyRelu6ToVector(const float* __restrict__ vector, int v_size, + float* __restrict__ result); + +// Apply signbit to elements of a vector +void ApplySignbitToVector(const float* __restrict__ vector, int v_size, + float* __restrict__ result); + +// Unpack or inflate `src_buffer` by taking each element and splitting it as +// two elements into `dst_buffer`. +// Parameters: +// src_buffer : Densely packed buffer containing int4 values +// num_elements : Number of elements stored in the buffer. Note that this can +// be smaller than the size of `src_buffer` by 1 if it's odd, +// in which case the last nibble in `src_buffer` is ignored. +// This should be equal to the size of `dst_buffer`. +// dst_buffer : Buffer to unpack into. Should be allocated by the caller. +// Size should be at least `num_elements`. +// Notes: +// For example, given `src_buffer = {0x12, 0x34};`, calling this function +// will return `dst_buffer = {0x02, 0x01, 0x04, 0x03}`. +void UnpackDenseInt4IntoInt8(const int8_t* src_buffer, int num_elements, + int8_t* dst_buffer); + +} // namespace tensor_utils + +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_PORTABLE_TENSOR_UTILS_H_ diff --git a/tensorflow/lite/kernels/internal/quantization_util.cc b/tensorflow/lite/kernels/internal/quantization_util.cc new file mode 100644 index 0000000..62045d6 --- /dev/null +++ b/tensorflow/lite/kernels/internal/quantization_util.cc @@ -0,0 +1,416 @@ +/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/quantization_util.h" + +#include +#include +#include + +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/kernels/internal/cppmath.h" + +namespace tflite { + +namespace { +// These constants are used to manipulate the binary representation of doubles. +// Double-precision binary64 floating point format is: +// Bit | 63 | 62-52 | 51-0 | +// | Sign | Exponent | Fraction | +// To avoid 64-bit integers as much as possible, I break this into high and +// low 32-bit chunks. High is: +// Bit | 31 | 30-20 | 19-0 | +// | Sign | Exponent | High Fraction | +// Low is: +// Bit | 31-0 | +// | Low Fraction | +// We then access the components through logical bit-wise operations to +// extract the parts needed, with the positions and masks derived from the +// layout shown above. +constexpr uint64_t kSignMask = 0x8000000000000000LL; +constexpr uint64_t kExponentMask = 0x7ff0000000000000LL; +constexpr int32_t kExponentShift = 52; +constexpr int32_t kExponentBias = 1023; +constexpr uint32_t kExponentIsBadNum = 0x7ff; +constexpr uint64_t kFractionMask = 0x000fffffffc00000LL; +constexpr uint32_t kFractionShift = 22; +constexpr uint32_t kFractionRoundingMask = 0x003fffff; +constexpr uint32_t kFractionRoundingThreshold = 0x00200000; +} // namespace + +void QuantizeMultiplier(double double_multiplier, int32_t* quantized_multiplier, + int* shift) { +#if TFLITE_SINGLE_ROUNDING + // Single-rounding MultiplyByQuantizedMultiplier only supports positive + // multipliers. + // TFLITE_DCHECK(double_multiplier >= 0); +#endif + if (double_multiplier == 0.) { + *quantized_multiplier = 0; + *shift = 0; + return; + } +#ifdef TFLITE_EMULATE_FLOAT + // If we're trying to avoid the use of floating-point instructions (for + // example on microcontrollers) then use an alternative implementation + // that only requires integer and bitwise operations. To enable this, you + // need to set the define during the build process for your platform. + int64_t q_fixed = IntegerFrExp(double_multiplier, shift); +#else // TFLITE_EMULATE_FLOAT + const double q = std::frexp(double_multiplier, shift); + auto q_fixed = static_cast(TfLiteRound(q * (1LL << 31))); +#endif // TFLITE_EMULATE_FLOAT + TFLITE_CHECK(q_fixed <= (1LL << 31)); + if (q_fixed == (1LL << 31)) { + q_fixed /= 2; + ++*shift; + } + TFLITE_CHECK_LE(q_fixed, std::numeric_limits::max()); + // A shift amount smaller than -31 would cause all bits to be shifted out + // and thus all results would be zero. We implement that instead with + // q_fixed==0, so as to avoid hitting issues with right-shift + // operations with shift amounts greater than 31. Note that this happens + // roughly when abs(double_multiplier) < 2^-31 and the present handling means + // that we're effectively flushing tiny double_multiplier's to zero. + // We could conceivably handle values in the range (roughly) [32, 63] + // as 'denormals' i.e. (shift==0, q_fixed < 2^30). In that point of view + // the present handling is just doing 'flush denormals to zero'. We could + // reconsider and actually generate nonzero denormals if a need arises. + if (*shift < -31) { + *shift = 0; + q_fixed = 0; + } +#if TFLITE_SINGLE_ROUNDING + // Single-rounding MultiplyByQuantizedMultiplier doesn't support a shift > 30, + // saturate it. + if (*shift > 30) { + *shift = 30; + q_fixed = (1LL << 31) - 1; + } +#endif + *quantized_multiplier = static_cast(q_fixed); +} + +void QuantizeMultiplierGreaterThanOne(double double_multiplier, + int32_t* quantized_multiplier, + int* left_shift) { + TFLITE_CHECK_GT(double_multiplier, 1.); + QuantizeMultiplier(double_multiplier, quantized_multiplier, left_shift); + TFLITE_CHECK_GE(*left_shift, 0); +} + +void QuantizeMultiplierSmallerThanOneExp(double double_multiplier, + int32_t* quantized_multiplier, + int* left_shift) { + TFLITE_CHECK_LT(double_multiplier, 1.); + TFLITE_CHECK_GT(double_multiplier, 0.); + int shift; + QuantizeMultiplier(double_multiplier, quantized_multiplier, &shift); + TFLITE_CHECK_LE(shift, 0); + *left_shift = shift; +} + +int64_t IntegerFrExp(double input, int* shift) { + // Make sure our assumptions about the double layout hold. + TFLITE_CHECK_EQ(8, sizeof(double)); + + // We want to access the bits of the input double value directly, which is + // tricky to do safely, so use a union to handle the casting. + union { + double double_value; + uint64_t double_as_uint; + } cast_union; + cast_union.double_value = input; + const uint64_t u = cast_union.double_as_uint; + + // If the bitfield is all zeros apart from the sign bit, this is a normalized + // zero value, so return standard values for this special case. + if ((u & ~kSignMask) == 0) { + *shift = 0; + return 0; + } + + // Deal with NaNs and Infs, which are always indicated with a fixed pattern in + // the exponent, and distinguished by whether the fractions are zero or + // non-zero. + const uint32_t exponent_part = ((u & kExponentMask) >> kExponentShift); + if (exponent_part == kExponentIsBadNum) { + *shift = std::numeric_limits::max(); + if (u & kFractionMask) { + // NaN, so just return zero (with the exponent set to INT_MAX). + return 0; + } else { + // Infinity, so return +/- INT_MAX. + if (u & kSignMask) { + return std::numeric_limits::min(); + } else { + return std::numeric_limits::max(); + } + } + } + + // The shift is fairly easy to extract from the high bits of the double value, + // just by masking it out and applying a bias. The std::frexp() implementation + // always returns values between 0.5 and 1.0 though, whereas the exponent + // assumes 1.0 to 2.0 is the standard range, so I add on one to match that + // interface. + *shift = (exponent_part - kExponentBias) + 1; + + // There's an implicit high bit in the double format definition, so make sure + // we include that at the top, and then reconstruct the rest of the fractional + // value from the remaining fragments. + int64_t fraction = 0x40000000 + ((u & kFractionMask) >> kFractionShift); + + // We're cutting off some bits at the bottom, so to exactly match the standard + // frexp implementation here we'll apply rounding by adding one to the least + // significant bit of the result if the discarded portion is over half of the + // maximum. + if ((u & kFractionRoundingMask) > kFractionRoundingThreshold) { + fraction += 1; + } + // Negate the fraction if the sign bit was set. + if (u & kSignMask) { + fraction *= -1; + } + + return fraction; +} + +double DoubleFromFractionAndShift(int64_t fraction, int shift) { + union { + double double_value; + uint64_t double_as_uint; + } result; + + // Detect NaNs and infinities. + if (shift == std::numeric_limits::max()) { + if (fraction == 0) { + return std::numeric_limits::quiet_NaN(); + } else if (fraction > 0) { + return std::numeric_limits::infinity(); + } else { + return -std::numeric_limits::infinity(); + } + } + + // Return a normalized zero for a zero fraction. + if (fraction == 0) { + result.double_as_uint = 0; + return result.double_value; + } + + bool is_negative = (fraction < 0); + int64_t encoded_fraction = is_negative ? -fraction : fraction; + int64_t encoded_shift = (shift - 1); + while (encoded_fraction < 0x40000000) { + encoded_fraction *= 2; + encoded_shift -= 1; + } + while (encoded_fraction > 0x80000000) { + encoded_fraction /= 2; + encoded_shift += 1; + } + encoded_fraction -= 0x40000000; + if (encoded_shift < -1022) { + encoded_shift = -1023; + } else if (encoded_shift > 1022) { + encoded_shift = 1023; + } + encoded_shift += kExponentBias; + uint64_t encoded_sign = is_negative ? kSignMask : 0; + result.double_as_uint = encoded_sign | (encoded_shift << kExponentShift) | + (encoded_fraction << kFractionShift); + return result.double_value; +} + +double IntegerDoubleMultiply(double a, double b) { + int a_shift; + const int64_t a_fraction = IntegerFrExp(a, &a_shift); + int b_shift; + const int64_t b_fraction = IntegerFrExp(b, &b_shift); + // Detect NaNs and infinities. + if (a_shift == std::numeric_limits::max() || + (b_shift == std::numeric_limits::max())) { + return std::numeric_limits::quiet_NaN(); + } + const int result_shift = a_shift + b_shift + 1; + const int64_t result_fraction = (a_fraction * b_fraction) >> 32; + return DoubleFromFractionAndShift(result_fraction, result_shift); +} + +int IntegerDoubleCompare(double a, double b) { + int a_shift; + const int64_t a_fraction = IntegerFrExp(a, &a_shift); + int b_shift; + const int64_t b_fraction = IntegerFrExp(b, &b_shift); + + // Detect NaNs and infinities. + if (a_shift == std::numeric_limits::max() || + (b_shift == std::numeric_limits::max())) { + return 1; + } + + if ((a_fraction == 0) && (b_fraction < 0)) { + return 1; + } else if ((a_fraction < 0) && (b_fraction == 0)) { + return -1; + } else if (a_shift < b_shift) { + return -1; + } else if (a_shift > b_shift) { + return 1; + } else if (a_fraction < b_fraction) { + return -1; + } else if (a_fraction > b_fraction) { + return 1; + } else { + return 0; + } +} + +void PreprocessSoftmaxScaling(double beta, double input_scale, + int input_integer_bits, + int32_t* quantized_multiplier, int* left_shift) { + // If the overall multiplier (input and beta) is large, then exp() of an + // input difference of 1 scaled by this will be large. In other words, we + // can cap the multiplier and know that, when it is used, the output will be + // (round to) zero wherever the input is not at the maximum value. + + // If the overall scale is less than one, and input_integer_bits=0, then the + // result is double equivalent of Q0.31 (actually with more precision). Thus + // this generates a Q(input_integer_bits).(31-input_integer_bits) + // representation. +#if TFLITE_SINGLE_ROUNDING + const double max_real_multiplier = (1LL << 30) - 1.0; +#else + const double max_real_multiplier = (1LL << 31) - 1.0; +#endif + +#ifdef TFLITE_EMULATE_FLOAT + const double input_beta = IntegerDoubleMultiply(beta, input_scale); + int shift; + int64_t fraction = IntegerFrExp(input_beta, &shift); + shift += (31 - input_integer_bits); + double input_beta_real_multiplier = + DoubleFromFractionAndShift(fraction, shift); + if (IntegerDoubleCompare(input_beta_real_multiplier, max_real_multiplier) > + 0) { + input_beta_real_multiplier = max_real_multiplier; + } +#else // TFLITE_EMULATE_FLOAT + const double input_beta_real_multiplier = + std::min(beta * input_scale * (1 << (31 - input_integer_bits)), + max_real_multiplier); +#endif // TFLITE_EMULATE_FLOAT + + QuantizeMultiplierGreaterThanOne(input_beta_real_multiplier, + quantized_multiplier, left_shift); +} + +void PreprocessLogSoftmaxScalingExp(double beta, double input_scale, + int input_integer_bits, + int32_t* quantized_multiplier, + int* left_shift, + int32_t* reverse_scaling_divisor, + int* reverse_scaling_left_shift) { + PreprocessSoftmaxScaling(beta, input_scale, input_integer_bits, + quantized_multiplier, left_shift); + + // Also calculate what amounts to the inverse scaling factor for the input. + const double real_reverse_scaling_divisor = + (1 << (31 - *left_shift)) / static_cast(*quantized_multiplier); + tflite::QuantizeMultiplierSmallerThanOneExp(real_reverse_scaling_divisor, + reverse_scaling_divisor, + reverse_scaling_left_shift); +} + +int CalculateInputRadius(int input_integer_bits, int input_left_shift, + int total_signed_bits) { +#ifdef TFLITE_EMULATE_FLOAT + int64_t result = (1 << input_integer_bits) - 1; + result <<= (total_signed_bits - input_integer_bits); + result >>= input_left_shift; + return result; +#else // TFLITE_EMULATE_FLOAT + const double max_input_rescaled = + 1.0 * ((1 << input_integer_bits) - 1) * + (1LL << (total_signed_bits - input_integer_bits)) / + (1LL << input_left_shift); + // Tighten bound using floor. Suppose that we could use the exact value. + // After scaling the difference, the result would be at the maximum. Thus we + // must ensure that our value has lower magnitude. + return static_cast(std::floor(max_input_rescaled)); +#endif // TFLITE_EMULATE_FLOAT +} + +void NudgeQuantizationRange(const float min, const float max, + const int quant_min, const int quant_max, + float* nudged_min, float* nudged_max, + float* nudged_scale) { + // This code originates from tensorflow/core/kernels/fake_quant_ops_functor.h. + const float quant_min_float = static_cast(quant_min); + const float quant_max_float = static_cast(quant_max); + *nudged_scale = (max - min) / (quant_max_float - quant_min_float); + const float zero_point_from_min = quant_min_float - min / *nudged_scale; + uint16_t nudged_zero_point; + if (zero_point_from_min < quant_min_float) { + nudged_zero_point = static_cast(quant_min); + } else if (zero_point_from_min > quant_max_float) { + nudged_zero_point = static_cast(quant_max); + } else { + nudged_zero_point = static_cast(TfLiteRound(zero_point_from_min)); + } + *nudged_min = (quant_min_float - nudged_zero_point) * (*nudged_scale); + *nudged_max = (quant_max_float - nudged_zero_point) * (*nudged_scale); +} + +void FakeQuantizeArray(const float nudged_scale, const float nudged_min, + const float nudged_max, const float* input_data, + float* output_data, const float size) { + // This code originates from tensorflow/core/kernels/fake_quant_ops_functor.h. + const float inv_nudged_scale = 1.0f / nudged_scale; + + for (int i = 0; i < size; i++) { + const float src_val = input_data[i]; + const float clamped = std::min(nudged_max, std::max(nudged_min, src_val)); + const float clamped_shifted = clamped - nudged_min; + const float dst_val = + TfLiteRound(clamped_shifted * inv_nudged_scale) * nudged_scale + + nudged_min; + output_data[i] = dst_val; + } +} + +bool CheckedLog2(const float x, int* log2_result) { + // Using TfLiteRound instead of std::round and std::log instead of + // std::log2 to work around these functions being missing in a toolchain + // used in some TensorFlow tests as of May 2018. + const float x_log2 = std::log(x) * (1.0f / std::log(2.0f)); + const float x_log2_rounded = TfLiteRound(x_log2); + const float x_log2_fracpart = x_log2 - x_log2_rounded; + + *log2_result = static_cast(x_log2_rounded); + return std::abs(x_log2_fracpart) < 1e-3f; +} + +void QuantizeMultiplierArray(const double* effective_scales, size_t size, + int32_t* effective_scale_significand, + int* effective_shift) { + for (size_t i = 0; i < size; ++i) { + QuantizeMultiplier(effective_scales[i], &effective_scale_significand[i], + &effective_shift[i]); + } +} + +} // namespace tflite diff --git a/tensorflow/lite/kernels/internal/quantization_util.h b/tensorflow/lite/kernels/internal/quantization_util.h new file mode 100644 index 0000000..0ee914b --- /dev/null +++ b/tensorflow/lite/kernels/internal/quantization_util.h @@ -0,0 +1,292 @@ +/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_QUANTIZATION_UTIL_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_QUANTIZATION_UTIL_H_ + +#include +#include +#include + +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/kernels/internal/cppmath.h" +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { + +// Given the min and max values of a float array, return +// reasonable quantization parameters to use for this array. +template +QuantizationParams ChooseQuantizationParams(double rmin, double rmax, + bool narrow_range) { + const T qmin = std::numeric_limits::min() + (narrow_range ? 1 : 0); + const T qmax = std::numeric_limits::max(); + const double qmin_double = qmin; + const double qmax_double = qmax; + // 0 should always be a representable value. Let's assume that the initial + // min,max range contains 0. + TFLITE_CHECK_LE(rmin, 0.); + TFLITE_CHECK_GE(rmax, 0.); + if (rmin == rmax) { + // Special case where the min,max range is a point. Should be {0}. + TFLITE_CHECK_EQ(rmin, 0.); + TFLITE_CHECK_EQ(rmax, 0.); + QuantizationParams quantization_params; + quantization_params.zero_point = 0; + quantization_params.scale = 0.; + return quantization_params; + } + + // General case. + // + // First determine the scale. + const double scale = (rmax - rmin) / (qmax_double - qmin_double); + + // Zero-point computation. + // First the initial floating-point computation. The zero-point can be + // determined from solving an affine equation for any known pair + // (real value, corresponding quantized value). + // We know two such pairs: (rmin, qmin) and (rmax, qmax). + // The arithmetic error on the zero point computed from either pair + // will be roughly machine_epsilon * (sum of absolute values of terms) + // so we want to use the variant that adds the smaller terms. + const double zero_point_from_min = qmin_double - rmin / scale; + const double zero_point_from_max = qmax_double - rmax / scale; + const double zero_point_from_min_error = + std::abs(qmin_double) + std::abs(rmin / scale); + const double zero_point_from_max_error = + std::abs(qmax_double) + std::abs(rmax / scale); + + const double zero_point_double = + zero_point_from_min_error < zero_point_from_max_error + ? zero_point_from_min + : zero_point_from_max; + + // Now we need to nudge the zero point to be an integer + // (our zero points are integer, and this is motivated by the requirement + // to be able to represent the real value "0" exactly as a quantized value, + // which is required in multiple places, for example in Im2col with SAME + // padding). + T nudged_zero_point = 0; + if (zero_point_double < qmin_double) { + nudged_zero_point = qmin; + } else if (zero_point_double > qmax_double) { + nudged_zero_point = qmax; + } else { + nudged_zero_point = static_cast(round(zero_point_double)); + } + // The zero point should always be in the range of quantized value, + // [qmin, qmax]. + TFLITE_CHECK_GE(nudged_zero_point, qmin); + TFLITE_CHECK_LE(nudged_zero_point, qmax); + + // Finally, store the result nudged quantization params. + QuantizationParams quantization_params; + quantization_params.zero_point = nudged_zero_point; + quantization_params.scale = scale; + return quantization_params; +} + +template +QuantizationParams ChooseQuantizationParams(double rmin, double rmax) { + return ChooseQuantizationParams(rmin, rmax, false); +} + +// Converts a floating-point number to an integer. For all inputs x where +// static_cast(x) is legal according to the C++ standard, the result +// is identical to that cast (i.e. the result is x with its fractional part +// truncated whenever that is representable as IntOut). +// +// static_cast would cause undefined behavior for the following cases, which +// have well-defined behavior for this function: +// +// 1. If x is NaN, the result is zero. +// +// 2. If the truncated form of x is above the representable range of IntOut, +// the result is std::numeric_limits::max(). +// +// 3. If the truncated form of x is below the representable range of IntOut, +// the result is std::numeric_limits::min(). +// +// Note that cases #2 and #3 cover infinities as well as finite numbers. +// +// The range of FloatIn must include the range of IntOut, otherwise +// the results are undefined. +// TODO(sfeuz): Replace by absl::SafeCast once available. +template +IntOut SafeCast(FloatIn x) { + static_assert(!std::numeric_limits::is_integer, + "FloatIn is integer"); + static_assert(std::numeric_limits::is_integer, + "IntOut is not integer"); + static_assert(std::numeric_limits::radix == 2, "IntOut is base 2"); + + // Special case NaN, for which the logic below doesn't work. + if (std::isnan(x)) { + return 0; + } + + // Negative values all clip to zero for unsigned results. + if (!std::numeric_limits::is_signed && x < 0) { + return 0; + } + + // Handle infinities. + if (std::isinf(x)) { + return x < 0 ? std::numeric_limits::min() + : std::numeric_limits::max(); + } + + // Set exp such that x == f * 2^exp for some f with |f| in [0.5, 1.0), + // unless x is zero in which case exp == 0. Note that this implies that the + // magnitude of x is strictly less than 2^exp. + int exp = 0; + std::frexp(x, &exp); + + // Let N be the number of non-sign bits in the representation of IntOut. If + // the magnitude of x is strictly less than 2^N, the truncated version of x + // is representable as IntOut. The only representable integer for which this + // is not the case is kMin for signed types (i.e. -2^N), but that is covered + // by the fall-through below. + if (exp <= std::numeric_limits::digits) { + return x; + } + + // Handle numbers with magnitude >= 2^N. + return x < 0 ? std::numeric_limits::min() + : std::numeric_limits::max(); +} + +// Decompose a double multiplier into a Q0.31 int32 representation of its +// significand, and shift representation of NEGATIVE its exponent --- +// this is intended as a RIGHT-shift. +// +// Restricted to the case where the multiplier < 1 (and non-negative). +void QuantizeMultiplierSmallerThanOneExp(double double_multiplier, + int32_t* quantized_multiplier, + int* left_shift); + +// Decompose a double multiplier into a Q0.31 int32 representation of its +// significand, and shift representation of its exponent. +// +// Restricted to the case where the multiplier > 1. +void QuantizeMultiplierGreaterThanOne(double double_multiplier, + int32_t* quantized_multiplier, + int* left_shift); + +// Decompose a double multiplier into a Q0.31 int32 representation of its +// significand, and shift representation of its exponent. +// +// Handles an arbitrary positive multiplier. The 'shift' output-value is +// basically the 'floating-point exponent' of the multiplier: +// Negative for a right-shift (when the multiplier is <1), positive for a +// left-shift (when the multiplier is >1) +void QuantizeMultiplier(double double_multiplier, int32_t* quantized_multiplier, + int* shift); + +// Splits a double input value into a returned fraction, and a shift value from +// the exponent, using only bitwise and integer operations to support +// microcontrollers and other environments without floating-point support. +// +// This is designed to be a replacement for how std::frexp() is used within the +// QuantizeMultiplier() function, and so has a different signature than the +// standard version, returning a 64-bit integer rather than a double. This +// result has a maximum value of 1<<31, with the fraction expressed as a +// proportion of that maximum. +// +// std::frexp() returns NaNs and infinities unmodified, but since we're +// returning integers that can't represent those values, instead we return +// a shift of std::numeric_limits::max() for all bad numbers, with an int64 +// result of 0 for NaNs, std:numeric_limits::max() for +INFINITY, and +// std::numeric_limits::min() for -INFINITY. Denormalized inputs will +// result in return values that end up truncating some bits at the end, +// reflecting the loss of precision inherent in denormalization. +int64_t IntegerFrExp(double input, int* shift); + +// Converts an integer fraction in the format produced by IntegerFrExp (where +// 0x40000000 is 1.0) and an exponent shift (between -1022 and +1022) into an +// IEEE binary64 double format result. The implementation uses only integer and +// bitwise operators, so no floating point hardware support or emulation is +// needed. This is here so quantized operations can run non-time-critical +// preparation calculations on microcontrollers and other platforms without +// float support. +double DoubleFromFractionAndShift(int64_t fraction, int shift); + +// Performs a multiplication of two numbers in double format, using only integer +// and bitwise instructions. This is aimed at supporting housekeeping functions +// for quantized operations on microcontrollers without floating-point hardware. +double IntegerDoubleMultiply(double a, double b); + +// Returns -1 if a is less than b, 0 if a and b are equal, and +1 if a is +// greater than b. It is implemented using only integer and logical instructions +// so that it can be easily run on microcontrollers for quantized operations. +int IntegerDoubleCompare(double a, double b); + +// This first creates a multiplier in a double equivalent of +// Q(input_integer_bits).(31-input_integer_bits) representation, with extra +// precision in the double's fractional bits. It then splits the result into +// significand and exponent. +void PreprocessSoftmaxScaling(double beta, double input_scale, + int input_integer_bits, + int32_t* quantized_multiplier, int* left_shift); +// Like PreprocessSoftmaxScaling, but inverse scaling factors also calculated. +void PreprocessLogSoftmaxScalingExp(double beta, double input_scale, + int input_integer_bits, + int32_t* quantized_multiplier, + int* left_shift, + int32_t* reverse_scaling_divisor, + int* reverse_scaling_left_shift); +// Calculate the largest input that will result in a within-bounds intermediate +// result within MultiplyByQuantizedMultiplierGreaterThanOne. In other words, +// it must not overflow before we reduce the value by multiplication by the +// input multiplier. The negative radius is used as the minimum difference in +// Softmax. +int CalculateInputRadius(int input_integer_bits, int input_left_shift, + int total_signed_bits = 31); + +// Nudges a min/max quantization range to ensure zero is zero. +// Gymnastics with nudged zero point is to ensure that real zero maps to +// an integer, which is required for e.g. zero-padding in convolutional layers. +// Outputs nudged_min, nudged_max, nudged_scale. +void NudgeQuantizationRange(const float min, const float max, + const int quant_min, const int quant_max, + float* nudged_min, float* nudged_max, + float* nudged_scale); + +// Fake quantizes (quantizes and dequantizes) input_data using the scale, +// nudged_min, and nudged_max from NudgeQuantizationRange. This matches the code +// in TensorFlow's FakeQuantizeWithMinMaxVarsFunctor. +void FakeQuantizeArray(const float nudged_scale, const float nudged_min, + const float nudged_max, const float* input_data, + float* output_data, const float size); + +// If x is approximately a power of two (with any positive or negative +// exponent), stores that exponent (i.e. log2(x)) in *log2_result, otherwise +// returns false. +bool CheckedLog2(const float x, int* log2_result); + +// Decomposes an array of double multipliers into a Q0.31 int32 representation +// of its significand, and shift representation of its exponent. +// +// Handles an arbitrary multiplier. The 'shift' output-value is +// basically the 'floating-point exponent' of the multiplier: +// Negative for a right-shift (when the multiplier is <1), positive for a +// left-shift (when the multiplier is >1) +void QuantizeMultiplierArray(const double* effective_scales, size_t size, + int32_t* effective_scale_significand, + int* effective_shift); + +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_QUANTIZATION_UTIL_H_ diff --git a/tensorflow/lite/kernels/internal/reference/add.h b/tensorflow/lite/kernels/internal/reference/add.h new file mode 100644 index 0000000..b89a57b --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/add.h @@ -0,0 +1,502 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_ADD_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_ADD_H_ + +#include +#include + +#include "fixedpoint/fixedpoint.h" +#include "tensorflow/lite/kernels/internal/common.h" + +namespace tflite { + +namespace reference_ops { + +template +inline void Add(const ArithmeticParams& params, + const RuntimeShape& input1_shape, const T* input1_data, + const RuntimeShape& input2_shape, const T* input2_data, + const RuntimeShape& output_shape, T* output_data) { + T activation_min, activation_max; + GetActivationParams(params, &activation_min, &activation_max); + + const int flat_size = + MatchingElementsSize(input1_shape, input2_shape, output_shape); + for (int i = 0; i < flat_size; ++i) { + output_data[i] = ActivationFunctionWithMinMax( + input1_data[i] + input2_data[i], activation_min, activation_max); + } +} + +// Element-wise add that can often be used for inner loop of broadcast add as +// well as the non-broadcast add. + +// This function is used for 8-bit as well as for 16-bit, but the accumulator +// is 32-bit for both cases. The overflow does not happen due to the +// choice of the shift (20 or 15, accordingly - see add.cc for more comments). +template +inline void AddElementwise(int size, const ArithmeticParams& params, + const T* input1_data, const T* input2_data, + T* output_data) { + TFLITE_DCHECK_GT(params.input1_offset, -std::numeric_limits::max()); + TFLITE_DCHECK_GT(params.input2_offset, -std::numeric_limits::max()); + TFLITE_DCHECK_LT(params.input1_offset, std::numeric_limits::max()); + TFLITE_DCHECK_LT(params.input2_offset, std::numeric_limits::max()); + + for (int i = 0; i < size; ++i) { + const int32_t input1_val = params.input1_offset + input1_data[i]; + const int32_t input2_val = params.input2_offset + input2_data[i]; + const int32_t shifted_input1_val = input1_val * (1 << params.left_shift); + const int32_t shifted_input2_val = input2_val * (1 << params.left_shift); + const int32_t scaled_input1_val = + MultiplyByQuantizedMultiplierSmallerThanOneExp( + shifted_input1_val, params.input1_multiplier, params.input1_shift); + const int32_t scaled_input2_val = + MultiplyByQuantizedMultiplierSmallerThanOneExp( + shifted_input2_val, params.input2_multiplier, params.input2_shift); + const int32_t raw_sum = scaled_input1_val + scaled_input2_val; + const int32_t raw_output = + MultiplyByQuantizedMultiplierSmallerThanOneExp( + raw_sum, params.output_multiplier, params.output_shift) + + params.output_offset; + const int32_t clamped_output = + std::min(params.quantized_activation_max, + std::max(params.quantized_activation_min, raw_output)); + output_data[i] = static_cast(clamped_output); + } +} + +// Scalar-broadcast add that can be used for inner loop of more general +// broadcast add, so that, for example, scalar-broadcast with batch will still +// be fast. +inline void AddScalarBroadcast(int size, const ArithmeticParams& params, + uint8_t input1_data, const uint8_t* input2_data, + uint8_t* output_data) { + TFLITE_DCHECK_GT(params.input1_offset, -256); + TFLITE_DCHECK_GT(params.input2_offset, -256); + TFLITE_DCHECK_LT(params.input1_offset, 256); + TFLITE_DCHECK_LT(params.input2_offset, 256); + + const int32_t input1_val = params.input1_offset + input1_data; + const int32_t shifted_input1_val = input1_val * (1 << params.left_shift); + const int32_t scaled_input1_val = + MultiplyByQuantizedMultiplierSmallerThanOneExp( + shifted_input1_val, params.input1_multiplier, params.input1_shift); + for (int i = 0; i < size; ++i) { + const int32_t input2_val = params.input2_offset + input2_data[i]; + const int32_t shifted_input2_val = input2_val * (1 << params.left_shift); + const int32_t scaled_input2_val = + MultiplyByQuantizedMultiplierSmallerThanOneExp( + shifted_input2_val, params.input2_multiplier, params.input2_shift); + const int32_t raw_sum = scaled_input1_val + scaled_input2_val; + const int32_t raw_output = + MultiplyByQuantizedMultiplierSmallerThanOneExp( + raw_sum, params.output_multiplier, params.output_shift) + + params.output_offset; + const int32_t clamped_output = + std::min(params.quantized_activation_max, + std::max(params.quantized_activation_min, raw_output)); + output_data[i] = static_cast(clamped_output); + } +} + +inline void Add(const ArithmeticParams& params, + const RuntimeShape& input1_shape, const uint8_t* input1_data, + const RuntimeShape& input2_shape, const uint8_t* input2_data, + const RuntimeShape& output_shape, uint8_t* output_data) { + TFLITE_DCHECK_LE(params.quantized_activation_min, + params.quantized_activation_max); + const int flat_size = + MatchingElementsSize(input1_shape, input2_shape, output_shape); + + TFLITE_DCHECK_GT(params.input1_offset, -256); + TFLITE_DCHECK_GT(params.input2_offset, -256); + TFLITE_DCHECK_LT(params.input1_offset, 256); + TFLITE_DCHECK_LT(params.input2_offset, 256); + AddElementwise(flat_size, params, input1_data, input2_data, output_data); +} + +inline void AddGeneralParamScale(const ArithmeticParams& params, + const RuntimeShape& input1_shape, + const int16_t* input1_data, + const RuntimeShape& input2_shape, + const int16_t* input2_data, + const RuntimeShape& output_shape, + int16_t* output_data) { + TFLITE_DCHECK_LE(params.quantized_activation_min, + params.quantized_activation_max); + const int flat_size = + MatchingElementsSize(input1_shape, input2_shape, output_shape); + + int max_value = std::numeric_limits::max(); + + TFLITE_DCHECK_GT(params.input1_offset, -max_value); + TFLITE_DCHECK_GT(params.input2_offset, -max_value); + TFLITE_DCHECK_LT(params.input1_offset, max_value); + TFLITE_DCHECK_LT(params.input2_offset, max_value); + AddElementwise(flat_size, params, input1_data, input2_data, output_data); +} + +inline void Add(const ArithmeticParams& params, + const RuntimeShape& input1_shape, const int16_t* input1_data, + const RuntimeShape& input2_shape, const int16_t* input2_data, + const RuntimeShape& output_shape, int16_t* output_data, + bool pot_scale = true) { + if (!pot_scale) { + AddGeneralParamScale(params, input1_shape, input1_data, input2_shape, + input2_data, output_shape, output_data); + return; + } + + TFLITE_DCHECK_LE(params.quantized_activation_min, + params.quantized_activation_max); + + const int input1_shift = params.input1_shift; + const int flat_size = + MatchingElementsSize(input1_shape, input2_shape, output_shape); + const int16_t output_activation_min = params.quantized_activation_min; + const int16_t output_activation_max = params.quantized_activation_max; + + TFLITE_DCHECK(input1_shift == 0 || params.input2_shift == 0); + TFLITE_DCHECK_LE(input1_shift, 0); + TFLITE_DCHECK_LE(params.input2_shift, 0); + const int16_t* not_shift_input = + input1_shift == 0 ? input1_data : input2_data; + const int16_t* shift_input = input1_shift == 0 ? input2_data : input1_data; + const int input_right_shift = + input1_shift == 0 ? -params.input2_shift : -input1_shift; + + for (int i = 0; i < flat_size; i++) { + // F0 uses 0 integer bits, range [-1, 1]. + using F0 = gemmlowp::FixedPoint; + + F0 input_ready_scaled = F0::FromRaw(not_shift_input[i]); + F0 scaled_input = F0::FromRaw( + gemmlowp::RoundingDivideByPOT(shift_input[i], input_right_shift)); + F0 result = gemmlowp::SaturatingAdd(scaled_input, input_ready_scaled); + const int16_t raw_output = result.raw(); + const int16_t clamped_output = std::min( + output_activation_max, std::max(output_activation_min, raw_output)); + output_data[i] = clamped_output; + } +} + +template +inline typename std::enable_if::value || dummy, void>::type +BroadcastAdd6DSlow(const ArithmeticParams& params, + const RuntimeShape& input1_shape, const T* input1_data, + const RuntimeShape& input2_shape, const T* input2_data, + const RuntimeShape& output_shape, T* output_data) { + NdArrayDesc<6> desc1; + NdArrayDesc<6> desc2; + NdArrayDescsForElementwiseBroadcast(input1_shape, input2_shape, &desc1, + &desc2); + const RuntimeShape extended_output_shape = + RuntimeShape::ExtendedShape(6, output_shape); + + T activation_min, activation_max; + GetActivationParams(params, &activation_min, &activation_max); + + // In Tensorflow, the dimensions are canonically named (batch_number, row, + // col, channel), with extents (batches, height, width, depth), with the + // trailing dimension changing most rapidly (channels has the smallest stride, + // typically 1 element). + // + // In generated C code, we store arrays with the dimensions reversed. The + // first dimension has smallest stride. + // + // We name our variables by their Tensorflow convention, but generate C code + // nesting loops such that the innermost loop has the smallest stride for the + // best cache behavior. + size_t input1_offset_a = 0; + size_t input2_offset_a = 0; + size_t output_offset_a = 0; + for (int a = 0; a < extended_output_shape.Dims(0); ++a) { + size_t input1_offset_d = input1_offset_a; + size_t input2_offset_d = input2_offset_a; + size_t output_offset_d = output_offset_a; + for (int d = 0; d < extended_output_shape.Dims(1); ++d) { + size_t input1_offset_b = input1_offset_d; + size_t input2_offset_b = input2_offset_d; + size_t output_offset_b = output_offset_d; + for (int b = 0; b < extended_output_shape.Dims(2); ++b) { + size_t input1_offset_y = input1_offset_b; + size_t input2_offset_y = input2_offset_b; + size_t output_offset_y = output_offset_b; + for (int y = 0; y < extended_output_shape.Dims(3); ++y) { + size_t input1_offset_x = input1_offset_y; + size_t input2_offset_x = input2_offset_y; + size_t output_offset_x = output_offset_y; + for (int x = 0; x < extended_output_shape.Dims(4); ++x) { + size_t input1_offset_c = input1_offset_x; + size_t input2_offset_c = input2_offset_x; + size_t output_offset_c = output_offset_x; + for (int c = 0; c < extended_output_shape.Dims(5); ++c) { + output_data[output_offset_c] = ActivationFunctionWithMinMax( + input1_data[input1_offset_c] + input2_data[input2_offset_c], + activation_min, activation_max); + input1_offset_c += desc1.strides[5]; + input2_offset_c += desc2.strides[5]; + ++output_offset_c; + } + input1_offset_x += desc1.strides[4]; + input2_offset_x += desc2.strides[4]; + output_offset_x += extended_output_shape.Dims(5); + } + input1_offset_y += desc1.strides[3]; + input2_offset_y += desc2.strides[3]; + output_offset_y += + extended_output_shape.Dims(4) * extended_output_shape.Dims(5); + } + input1_offset_b += desc1.strides[2]; + input2_offset_b += desc2.strides[2]; + output_offset_b += extended_output_shape.Dims(3) * + extended_output_shape.Dims(4) * + extended_output_shape.Dims(5); + } + input1_offset_d += desc1.strides[1]; + input2_offset_d += desc2.strides[1]; + output_offset_d += + extended_output_shape.Dims(2) * extended_output_shape.Dims(3) * + extended_output_shape.Dims(4) * extended_output_shape.Dims(5); + } + input1_offset_a += desc1.strides[0]; + input2_offset_a += desc2.strides[0]; + output_offset_a += + extended_output_shape.Dims(1) * extended_output_shape.Dims(2) * + extended_output_shape.Dims(3) * extended_output_shape.Dims(4) * + extended_output_shape.Dims(5); + } +} + +// This function is used for 8-bit as well as for 16-bit, but the accumulator +// is 32-bit for both cases. The overflow does not happen due to the +// choice of the shift (20 or 15, accordingly - see add.cc for more comments). +template +inline typename std::enable_if::value, void>::type +BroadcastAdd6DSlow(const ArithmeticParams& params, + const RuntimeShape& input1_shape, const T* input1_data, + const RuntimeShape& input2_shape, const T* input2_data, + const RuntimeShape& output_shape, T* output_data) { + NdArrayDesc<6> desc1; + NdArrayDesc<6> desc2; + NdArrayDescsForElementwiseBroadcast(input1_shape, input2_shape, &desc1, + &desc2); + const RuntimeShape extended_output_shape = + RuntimeShape::ExtendedShape(6, output_shape); + + // In Tensorflow, the dimensions are canonically named (batch_number, row, + // col, channel), with extents (batches, height, width, depth), with the + // trailing dimension changing most rapidly (channels has the smallest stride, + // typically 1 element). + // + // In generated C code, we store arrays with the dimensions reversed. The + // first dimension has smallest stride. + // + // We name our variables by their Tensorflow convention, but generate C code + // nesting loops such that the innermost loop has the smallest stride for the + // best cache behavior. + size_t input1_offset_a = 0; + size_t input2_offset_a = 0; + size_t output_offset_a = 0; + for (int a = 0; a < extended_output_shape.Dims(0); ++a) { + size_t input1_offset_d = input1_offset_a; + size_t input2_offset_d = input2_offset_a; + size_t output_offset_d = output_offset_a; + for (int d = 0; d < extended_output_shape.Dims(1); ++d) { + size_t input1_offset_b = input1_offset_d; + size_t input2_offset_b = input2_offset_d; + size_t output_offset_b = output_offset_d; + for (int b = 0; b < extended_output_shape.Dims(2); ++b) { + size_t input1_offset_y = input1_offset_b; + size_t input2_offset_y = input2_offset_b; + size_t output_offset_y = output_offset_b; + for (int y = 0; y < extended_output_shape.Dims(3); ++y) { + size_t input1_offset_x = input1_offset_y; + size_t input2_offset_x = input2_offset_y; + size_t output_offset_x = output_offset_y; + for (int x = 0; x < extended_output_shape.Dims(4); ++x) { + size_t input1_offset_c = input1_offset_x; + size_t input2_offset_c = input2_offset_x; + size_t output_offset_c = output_offset_x; + for (int c = 0; c < extended_output_shape.Dims(5); ++c) { + const int32_t input1_val = + params.input1_offset + input1_data[input1_offset_c]; + const int32_t input2_val = + params.input2_offset + input2_data[input2_offset_c]; + const int32_t shifted_input1_val = + input1_val * (1 << params.left_shift); + const int32_t shifted_input2_val = + input2_val * (1 << params.left_shift); + const int32_t scaled_input1_val = + MultiplyByQuantizedMultiplierSmallerThanOneExp( + shifted_input1_val, params.input1_multiplier, + params.input1_shift); + const int32_t scaled_input2_val = + MultiplyByQuantizedMultiplierSmallerThanOneExp( + shifted_input2_val, params.input2_multiplier, + params.input2_shift); + const int32_t raw_sum = scaled_input1_val + scaled_input2_val; + const int32_t raw_output = + MultiplyByQuantizedMultiplierSmallerThanOneExp( + raw_sum, params.output_multiplier, params.output_shift) + + params.output_offset; + const int32_t clamped_output = std::min( + params.quantized_activation_max, + std::max(params.quantized_activation_min, raw_output)); + output_data[output_offset_c] = static_cast(clamped_output); + input1_offset_c += desc1.strides[5]; + input2_offset_c += desc2.strides[5]; + ++output_offset_c; + } + input1_offset_x += desc1.strides[4]; + input2_offset_x += desc2.strides[4]; + output_offset_x += extended_output_shape.Dims(5); + } + input1_offset_y += desc1.strides[3]; + input2_offset_y += desc2.strides[3]; + output_offset_y += + extended_output_shape.Dims(4) * extended_output_shape.Dims(5); + } + input1_offset_b += desc1.strides[2]; + input2_offset_b += desc2.strides[2]; + output_offset_b += extended_output_shape.Dims(3) * + extended_output_shape.Dims(4) * + extended_output_shape.Dims(5); + } + input1_offset_d += desc1.strides[1]; + input2_offset_d += desc2.strides[1]; + output_offset_d += + extended_output_shape.Dims(2) * extended_output_shape.Dims(3) * + extended_output_shape.Dims(4) * extended_output_shape.Dims(5); + } + input1_offset_a += desc1.strides[0]; + input2_offset_a += desc2.strides[0]; + output_offset_a += + extended_output_shape.Dims(1) * extended_output_shape.Dims(2) * + extended_output_shape.Dims(3) * extended_output_shape.Dims(4) * + extended_output_shape.Dims(5); + } +} + +template +inline void BroadcastAdd4DSlow( + const ArithmeticParams& params, const RuntimeShape& input1_shape, + const T* input1_data, const RuntimeShape& input2_shape, + const T* input2_data, const RuntimeShape& output_shape, T* output_data) { + return BroadcastAdd6DSlow(params, input1_shape, input1_data, input2_shape, + input2_data, output_shape, output_data); +} + +inline void BroadcastAddFivefold(const ArithmeticParams& unswitched_params, + const RuntimeShape& unswitched_input1_shape, + const uint8_t* unswitched_input1_data, + const RuntimeShape& unswitched_input2_shape, + const uint8_t* unswitched_input2_data, + const RuntimeShape& output_shape, + uint8_t* output_data) { + ArithmeticParams switched_params = unswitched_params; + switched_params.input1_offset = unswitched_params.input2_offset; + switched_params.input1_multiplier = unswitched_params.input2_multiplier; + switched_params.input1_shift = unswitched_params.input2_shift; + switched_params.input2_offset = unswitched_params.input1_offset; + switched_params.input2_multiplier = unswitched_params.input1_multiplier; + switched_params.input2_shift = unswitched_params.input1_shift; + + const bool use_unswitched = + unswitched_params.broadcast_category == + tflite::BroadcastableOpCategory::kFirstInputBroadcastsFast; + + const ArithmeticParams& params = + use_unswitched ? unswitched_params : switched_params; + const uint8_t* input1_data = + use_unswitched ? unswitched_input1_data : unswitched_input2_data; + const uint8_t* input2_data = + use_unswitched ? unswitched_input2_data : unswitched_input1_data; + + // Fivefold nested loops. The second input resets its position for each + // iteration of the second loop. The first input resets its position at the + // beginning of the fourth loop. The innermost loop is an elementwise add of + // sections of the arrays. + uint8_t* output_data_ptr = output_data; + const uint8_t* input1_data_ptr = input1_data; + const uint8_t* input2_data_reset = input2_data; + // In the fivefold pattern, y0, y2 and y4 are not broadcast, and so shared + // between input shapes. y3 for input 1 is always broadcast, and so the + // dimension there is 1, whereas optionally y1 might be broadcast for input 2. + // Put another way, + // input1.shape.FlatSize = y0 * y1 * y2 * y4, + // input2.shape.FlatSize = y0 * y2 * y3 * y4. + int y0 = params.broadcast_shape[0]; + int y1 = params.broadcast_shape[1]; + int y2 = params.broadcast_shape[2]; + int y3 = params.broadcast_shape[3]; + int y4 = params.broadcast_shape[4]; + if (y4 > 1) { + // General fivefold pattern, with y4 > 1 so there is a non-broadcast inner + // dimension. + for (int i0 = 0; i0 < y0; ++i0) { + const uint8_t* input2_data_ptr; + for (int i1 = 0; i1 < y1; ++i1) { + input2_data_ptr = input2_data_reset; + for (int i2 = 0; i2 < y2; ++i2) { + for (int i3 = 0; i3 < y3; ++i3) { + AddElementwise(y4, params, input1_data_ptr, input2_data_ptr, + output_data_ptr); + input2_data_ptr += y4; + output_data_ptr += y4; + } + // We have broadcast y4 of input1 data y3 times, and now move on. + input1_data_ptr += y4; + } + } + // We have broadcast y2*y3*y4 of input2 data y1 times, and now move on. + input2_data_reset = input2_data_ptr; + } + } else { + // Special case of y4 == 1, in which the innermost loop is a single element + // and can be combined with the next (y3) as an inner broadcast. + // + // Note that this handles the case of pure scalar broadcast when + // y0 == y1 == y2 == 1. With low overhead it handles cases such as scalar + // broadcast with batch (as y2 > 1). + // + // NOTE The process is the same as the above general case except simplified + // for y4 == 1 and the loop over y3 is contained within the + // AddScalarBroadcast function. + for (int i0 = 0; i0 < y0; ++i0) { + const uint8_t* input2_data_ptr; + for (int i1 = 0; i1 < y1; ++i1) { + input2_data_ptr = input2_data_reset; + for (int i2 = 0; i2 < y2; ++i2) { + AddScalarBroadcast(y3, params, *input1_data_ptr, input2_data_ptr, + output_data_ptr); + input2_data_ptr += y3; + output_data_ptr += y3; + input1_data_ptr += 1; + } + } + input2_data_reset = input2_data_ptr; + } + } +} + +} // namespace reference_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_ADD_H_ diff --git a/tensorflow/lite/kernels/internal/reference/add_n.h b/tensorflow/lite/kernels/internal/reference/add_n.h new file mode 100644 index 0000000..b6b5882 --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/add_n.h @@ -0,0 +1,86 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_ADD_N_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_ADD_N_H_ + +#include +#include + +#include "tensorflow/lite/kernels/internal/common.h" + +namespace tflite { +namespace reference_ops { + +// T is expected to be either float or int. +template +inline void AddN(const RuntimeShape& input_shape, const size_t num_inputs, + const T* const* input_data, T* output_data) { + // All inputs and output should have the same shape, this is checked during + // Prepare stage. + const size_t size = input_shape.FlatSize(); + for (size_t i = 0; i < size; ++i) { + T x = 0; + for (size_t j = 0; j < num_inputs; ++j) { + x += input_data[j][i]; + } + output_data[i] = x; + } +} + +inline void AddN(const ArithmeticParams& params, + const RuntimeShape& input_shape, const size_t num_inputs, + const int8_t* const* input_data, int8_t* output_data) { + TFLITE_DCHECK_LE(params.quantized_activation_min, + params.quantized_activation_max); + // Input offset is negative input zero point. Activation tensors are + // asymmetric quantized so they span the full int8 range. + // All inputs should have same zero-point and scale, this is checked during + // Prepare stage. + TFLITE_DCHECK_GE(-params.input1_offset, std::numeric_limits::min()); + TFLITE_DCHECK_LE(-params.input1_offset, std::numeric_limits::max()); + + // All inputs and output should have the same shape, this is checked during + // Prepare stage. + const size_t size = input_shape.FlatSize(); + for (size_t i = 0; i < size; ++i) { + // accumulate in scaled_x before clamping to avoid overflow + const int32_t x = params.input1_offset; // x = 0 + const int32_t shifted_x = x * (1 << params.left_shift); + int32_t scaled_x = MultiplyByQuantizedMultiplierSmallerThanOneExp( + shifted_x, params.input1_multiplier, params.input1_shift); + + for (size_t j = 0; j < num_inputs; ++j) { + const int32_t y = params.input1_offset + input_data[j][i]; + const int32_t shifted_y = y * (1 << params.left_shift); + int32_t scaled_y = MultiplyByQuantizedMultiplierSmallerThanOneExp( + shifted_y, params.input1_multiplier, params.input1_shift); + scaled_x += scaled_y; + } + + const int32_t raw_output = + MultiplyByQuantizedMultiplierSmallerThanOneExp( + scaled_x, params.output_multiplier, params.output_shift) + + params.output_offset; + const int32_t clamped_output = + std::min(params.quantized_activation_max, + std::max(params.quantized_activation_min, raw_output)); + output_data[i] = static_cast(clamped_output); + } +} + +} // namespace reference_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_ADD_N_H_ diff --git a/tensorflow/lite/kernels/internal/reference/arg_min_max.h b/tensorflow/lite/kernels/internal/reference/arg_min_max.h new file mode 100644 index 0000000..8154fbf --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/arg_min_max.h @@ -0,0 +1,88 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_ARG_MIN_MAX_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_ARG_MIN_MAX_H_ + +#include + +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { + +namespace reference_ops { + +template +std::function GetComparefunction(bool is_arg_max) { + if (is_arg_max) { + return std::greater(); + } else { + return std::less(); + } +} + +template +void ArgMinMax(const RuntimeShape& input1_shape, const T1* input1_data, + const T3* input2_data, const RuntimeShape& output_shape, + T2* output_data, const Cmp& cmp) { + TFLITE_DCHECK_GT(input1_shape.DimensionsCount(), 0); + TFLITE_DCHECK_EQ(input1_shape.DimensionsCount() - 1, + output_shape.DimensionsCount()); + int axis = input2_data[0]; + if (axis < 0) { + axis += input1_shape.DimensionsCount(); + } + const int axis_size = input1_shape.Dims(axis); + + int outer_size = 1; + for (int i = 0; i < axis; ++i) { + TFLITE_DCHECK_EQ(input1_shape.Dims(i), output_shape.Dims(i)); + outer_size *= input1_shape.Dims(i); + } + + int inner_size = 1; + const int dims_count = input1_shape.DimensionsCount(); + for (int i = axis + 1; i < dims_count; ++i) { + TFLITE_DCHECK_EQ(input1_shape.Dims(i), output_shape.Dims(i - 1)); + inner_size *= input1_shape.Dims(i); + } + for (int outer = 0; outer < outer_size; ++outer) { + for (int inner = 0; inner < inner_size; ++inner) { + auto min_max_value = input1_data[outer * axis_size * inner_size + inner]; + T2 min_max_index = 0; + for (int i = 1; i < axis_size; ++i) { + const auto& curr_value = + input1_data[(outer * axis_size + i) * inner_size + inner]; + if (cmp(curr_value, min_max_value)) { + min_max_value = curr_value; + min_max_index = static_cast(i); + } + } + output_data[outer * inner_size + inner] = min_max_index; + } + } +} + +template +void ArgMinMax(const RuntimeShape& input1_shape, const T1* input1_data, + const T3* input2_data, const RuntimeShape& output_shape, + T2* output_data, const bool is_arg_max) { + ArgMinMax(input1_shape, input1_data, input2_data, output_shape, output_data, + GetComparefunction(is_arg_max)); +} + +} // namespace reference_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_ARG_MIN_MAX_H_ diff --git a/tensorflow/lite/kernels/internal/reference/batch_matmul.h b/tensorflow/lite/kernels/internal/reference/batch_matmul.h new file mode 100644 index 0000000..767ad6a --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/batch_matmul.h @@ -0,0 +1,275 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_BATCH_MATMUL_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_BATCH_MATMUL_H_ + +#include +#include + +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/kernels/internal/portable_tensor_utils.h" +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { +namespace reference_ops { +namespace batch_matmul { + +// Determine which dimension is the broadcast dimension. +inline int broadcast_dim(int lhs_dim, int rhs_dim) { + if (lhs_dim == rhs_dim) return lhs_dim; + if (lhs_dim == 1) return rhs_dim; + TFLITE_DCHECK_EQ(rhs_dim, 1); + return lhs_dim; +} + +// Compute the "extent" for iterating on this dimension. +// If we are broadcasting, then don't advance (i.e return 0). +inline int extent(const RuntimeShape& shape, int x) { + if (shape.Dims(x) == 1) { + return 0; + } + int prod = 1; + for (int i = x + 1; i < shape.DimensionsCount(); ++i) { + prod *= shape.Dims(i); + } + return prod; +} + +} // namespace batch_matmul + +template +inline void BatchMatMul(const RuntimeShape& lhs_shape, const Ta* lhs_data, + const RuntimeShape& rhs_shape, const Tb* rhs_data, + const RuntimeShape& output_shape, Tout* output_data) { + const RuntimeShape extended_lhs_shape = + RuntimeShape::ExtendedShape(5, lhs_shape); + const RuntimeShape extended_rhs_shape = + RuntimeShape::ExtendedShape(5, rhs_shape); + + const int batch_dim0 = batch_matmul::broadcast_dim( + extended_lhs_shape.Dims(0), extended_rhs_shape.Dims(0)); + const int batch_dim1 = batch_matmul::broadcast_dim( + extended_lhs_shape.Dims(1), extended_rhs_shape.Dims(1)); + const int batch_dim2 = batch_matmul::broadcast_dim( + extended_lhs_shape.Dims(2), extended_rhs_shape.Dims(2)); + + const int lhs_ext0 = batch_matmul::extent(extended_lhs_shape, 0); + const int lhs_ext1 = batch_matmul::extent(extended_lhs_shape, 1); + const int lhs_ext2 = batch_matmul::extent(extended_lhs_shape, 2); + const int rhs_ext0 = batch_matmul::extent(extended_rhs_shape, 0); + const int rhs_ext1 = batch_matmul::extent(extended_rhs_shape, 1); + const int rhs_ext2 = batch_matmul::extent(extended_rhs_shape, 2); + + // Set params for each matrix multiply. + const int lhs_rows = extended_lhs_shape.Dims(3); + const int rhs_cols = extended_rhs_shape.Dims(4); + const int accum_depth = extended_lhs_shape.Dims(4); + + for (int b0 = 0; b0 < batch_dim0; ++b0) { + const Ta* lhs_ptr0 = lhs_data + (b0 * lhs_ext0); + const Tb* rhs_ptr0 = rhs_data + (b0 * rhs_ext0); + for (int b1 = 0; b1 < batch_dim1; ++b1) { + const Ta* lhs_ptr1 = lhs_ptr0 + b1 * lhs_ext1; + const Tb* rhs_ptr1 = rhs_ptr0 + b1 * rhs_ext1; + for (int b2 = 0; b2 < batch_dim2; ++b2) { + const Ta* lhs_ptr2 = lhs_ptr1 + b2 * lhs_ext2; + const Tb* rhs_ptr2 = rhs_ptr1 + b2 * rhs_ext2; + Tout* out_ptr = output_data + ((b0 * batch_dim1 * batch_dim2) + + b1 * batch_dim2 + b2) * + lhs_rows * rhs_cols; + for (int j = 0; j < rhs_cols; ++j) { + for (int i = 0; i < lhs_rows; ++i) { + Tout total = 0; + for (int k = 0; k < accum_depth; ++k) { + total += static_cast(lhs_ptr2[accum_depth * i + k]) * + static_cast(rhs_ptr2[j * accum_depth + k]); + } + int idx = lhs_rows * j + i; + out_ptr[idx] = total; + } + } + } + } + } +} + +inline void BatchMatMul(const RuntimeShape& lhs_shape, const int8_t* lhs_data, + const RuntimeShape& rhs_shape, const int8_t* rhs_data, + const float* scaling_factors, + const int32_t* input_offset, int32_t* row_sums, + const RuntimeShape& output_shape, float* output_data, + bool* compute_row_sums) { + const RuntimeShape extended_lhs_shape = + RuntimeShape::ExtendedShape(5, lhs_shape); + const RuntimeShape extended_rhs_shape = + RuntimeShape::ExtendedShape(5, rhs_shape); + + const int batch_dim0 = batch_matmul::broadcast_dim( + extended_lhs_shape.Dims(0), extended_rhs_shape.Dims(0)); + const int batch_dim1 = batch_matmul::broadcast_dim( + extended_lhs_shape.Dims(1), extended_rhs_shape.Dims(1)); + const int batch_dim2 = batch_matmul::broadcast_dim( + extended_lhs_shape.Dims(2), extended_rhs_shape.Dims(2)); + + const int lhs_ext0 = batch_matmul::extent(extended_lhs_shape, 0); + const int lhs_ext1 = batch_matmul::extent(extended_lhs_shape, 1); + const int lhs_ext2 = batch_matmul::extent(extended_lhs_shape, 2); + const int rhs_ext0 = batch_matmul::extent(extended_rhs_shape, 0); + const int rhs_ext1 = batch_matmul::extent(extended_rhs_shape, 1); + const int rhs_ext2 = batch_matmul::extent(extended_rhs_shape, 2); + + // Set params for each matrix multiply. + const int lhs_rows = extended_lhs_shape.Dims(3); + const int rhs_cols = extended_rhs_shape.Dims(4); + const int accum_depth = extended_lhs_shape.Dims(4); + + const int ioff_ext0 = rhs_ext0 == 0 ? 0 : rhs_cols; + const int ioff_ext1 = rhs_ext1 == 0 ? 0 : rhs_cols; + const int ioff_ext2 = rhs_ext2 == 0 ? 0 : rhs_cols; + const int woff_ext0 = lhs_ext0 == 0 ? 0 : lhs_rows; + const int woff_ext1 = lhs_ext1 == 0 ? 0 : lhs_rows; + const int woff_ext2 = lhs_ext2 == 0 ? 0 : lhs_rows; + + if (!compute_row_sums || *compute_row_sums) { + int num_weights_matrices = 1; + for (int i = 1; i < extended_lhs_shape.DimensionsCount() - 2; ++i) { + num_weights_matrices *= extended_lhs_shape.Dims(i); + } + tensor_utils::ReductionSumVector( + lhs_data, row_sums, num_weights_matrices * lhs_rows, accum_depth); + if (compute_row_sums) { + *compute_row_sums = false; + } + } + + for (int b0 = 0; b0 < batch_dim0; ++b0) { + const int8_t* lhs_ptr0 = lhs_data + (b0 * lhs_ext0); + const int8_t* rhs_ptr0 = rhs_data + (b0 * rhs_ext0); + const int32_t* ioff_ptr0 = input_offset + (b0 * ioff_ext0); + const float* scale_ptr0 = scaling_factors + (b0 * ioff_ext0); + const int32_t* woff_ptr0 = row_sums + (b0 * woff_ext0); + for (int b1 = 0; b1 < batch_dim1; ++b1) { + const int8_t* lhs_ptr1 = lhs_ptr0 + b1 * lhs_ext1; + const int8_t* rhs_ptr1 = rhs_ptr0 + b1 * rhs_ext1; + const int32_t* ioff_ptr1 = ioff_ptr0 + (b1 * ioff_ext1); + const float* scale_ptr1 = scale_ptr0 + (b1 * ioff_ext1); + const int32_t* woff_ptr1 = woff_ptr0 + (b1 * woff_ext1); + for (int b2 = 0; b2 < batch_dim2; ++b2) { + const int8_t* lhs_ptr2 = lhs_ptr1 + b2 * lhs_ext2; + const int8_t* rhs_ptr2 = rhs_ptr1 + b2 * rhs_ext2; + const int32_t* ioff_ptr2 = ioff_ptr1 + (b2 * ioff_ext2); + const float* scale_ptr2 = scale_ptr1 + (b2 * ioff_ext2); + const int32_t* woff_ptr2 = woff_ptr1 + (b2 * woff_ext2); + float* out_ptr = output_data + ((b0 * batch_dim1 * batch_dim2) + + b1 * batch_dim2 + b2) * + lhs_rows * rhs_cols; + for (int j = 0; j < rhs_cols; ++j) { + const float batch_scaling_factor = scale_ptr2[j]; + const float batch_offset = static_cast(ioff_ptr2[j]); + for (int i = 0; i < lhs_rows; ++i) { + int32_t total = 0; + for (int k = 0; k < accum_depth; ++k) { + total += + lhs_ptr2[accum_depth * i + k] * rhs_ptr2[j * accum_depth + k]; + } + int32_t row_sum = woff_ptr2[i]; + total -= row_sum * batch_offset; + int idx = lhs_rows * j + i; + out_ptr[idx] += batch_scaling_factor * total; + } + } + } + } + } +} + +template +inline void BatchMatMul(const FullyConnectedParams& params, + const RuntimeShape& lhs_shape, const T* lhs_data, + const RuntimeShape& rhs_shape, const T* rhs_data, + const RuntimeShape& output_shape, T* output_data) { + const RuntimeShape extended_lhs_shape = + RuntimeShape::ExtendedShape(5, lhs_shape); + const RuntimeShape extended_rhs_shape = + RuntimeShape::ExtendedShape(5, rhs_shape); + + const int batch_dim0 = batch_matmul::broadcast_dim( + extended_lhs_shape.Dims(0), extended_rhs_shape.Dims(0)); + const int batch_dim1 = batch_matmul::broadcast_dim( + extended_lhs_shape.Dims(1), extended_rhs_shape.Dims(1)); + const int batch_dim2 = batch_matmul::broadcast_dim( + extended_lhs_shape.Dims(2), extended_rhs_shape.Dims(2)); + + const int lhs_ext0 = batch_matmul::extent(extended_lhs_shape, 0); + const int lhs_ext1 = batch_matmul::extent(extended_lhs_shape, 1); + const int lhs_ext2 = batch_matmul::extent(extended_lhs_shape, 2); + const int rhs_ext0 = batch_matmul::extent(extended_rhs_shape, 0); + const int rhs_ext1 = batch_matmul::extent(extended_rhs_shape, 1); + const int rhs_ext2 = batch_matmul::extent(extended_rhs_shape, 2); + + // Set params for each matrix multiply. + const int lhs_rows = extended_lhs_shape.Dims(3); + const int rhs_cols = extended_rhs_shape.Dims(4); + const int accum_depth = extended_lhs_shape.Dims(4); + + const int32_t input_offset = params.input_offset; + const int32_t filter_offset = params.weights_offset; + const int32_t output_offset = params.output_offset; + const int32_t output_multiplier = params.output_multiplier; + const int output_shift = params.output_shift; + const int32_t output_activation_min = params.quantized_activation_min; + const int32_t output_activation_max = params.quantized_activation_max; + TFLITE_DCHECK_LE(output_activation_min, output_activation_max); + + for (int b0 = 0; b0 < batch_dim0; ++b0) { + const T* lhs_ptr0 = lhs_data + (b0 * lhs_ext0); + const T* rhs_ptr0 = rhs_data + (b0 * rhs_ext0); + for (int b1 = 0; b1 < batch_dim1; ++b1) { + const T* lhs_ptr1 = lhs_ptr0 + b1 * lhs_ext1; + const T* rhs_ptr1 = rhs_ptr0 + b1 * rhs_ext1; + for (int b2 = 0; b2 < batch_dim2; ++b2) { + const T* lhs_ptr2 = lhs_ptr1 + b2 * lhs_ext2; + const T* rhs_ptr2 = rhs_ptr1 + b2 * rhs_ext2; + T* out_ptr = output_data + + ((b0 * batch_dim1 * batch_dim2) + b1 * batch_dim2 + b2) * + lhs_rows * rhs_cols; + + for (int j = 0; j < rhs_cols; ++j) { + for (int i = 0; i < lhs_rows; ++i) { + AccumT total = 0; + for (int k = 0; k < accum_depth; ++k) { + AccumT lhs_val = lhs_ptr2[accum_depth * i + k]; + AccumT rhs_val = rhs_ptr2[accum_depth * j + k]; + total += (lhs_val + filter_offset) * (rhs_val + input_offset); + } + int32_t total_scaled = MultiplyByQuantizedMultiplier( + total, output_multiplier, output_shift); + total_scaled += output_offset; + total_scaled = std::max(total_scaled, output_activation_min); + total_scaled = std::min(total_scaled, output_activation_max); + const int idx = lhs_rows * j + i; + out_ptr[idx] = static_cast(total_scaled); + } + } + } + } + } +} + +} // namespace reference_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_BATCH_MATMUL_H_ diff --git a/tensorflow/lite/kernels/internal/reference/batch_to_space_nd.h b/tensorflow/lite/kernels/internal/reference/batch_to_space_nd.h new file mode 100644 index 0000000..cda46a2 --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/batch_to_space_nd.h @@ -0,0 +1,101 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_BATCH_TO_SPACE_ND_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_BATCH_TO_SPACE_ND_H_ + +#include + +#include "ruy/profiler/instrumentation.h" // from @ruy +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { +namespace reference_ops { + +// TODO(b/135760455): Move this method anonymous namespace in a cc file. +inline RuntimeShape ExtendShapeBatchToSpace(const RuntimeShape& shape) { + if (shape.DimensionsCount() == 4) { + return shape; + } + RuntimeShape new_shape(4, 1); + new_shape.SetDim(0, shape.Dims(0)); + new_shape.SetDim(1, shape.Dims(1)); + new_shape.SetDim(3, shape.Dims(2)); + return new_shape; +} + +template +inline void BatchToSpaceND(const RuntimeShape& unextended_input1_shape, + const T* input1_data, + const RuntimeShape& unextended_input2_shape, + const int32_t* block_shape_data, + const RuntimeShape& unextended_input3_shape, + const int32_t* crops_data, + const RuntimeShape& unextended_output_shape, + T* output_data) { + ruy::profiler::ScopeLabel label("BatchToSpaceND"); + TFLITE_DCHECK_GE(unextended_input1_shape.DimensionsCount(), 3); + TFLITE_DCHECK_LE(unextended_input1_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(unextended_input1_shape.DimensionsCount(), + unextended_output_shape.DimensionsCount()); + + const RuntimeShape input1_shape = + ExtendShapeBatchToSpace(unextended_input1_shape); + const RuntimeShape output_shape = + ExtendShapeBatchToSpace(unextended_output_shape); + + const int output_width = output_shape.Dims(2); + const int output_height = output_shape.Dims(1); + const int output_batch_size = output_shape.Dims(0); + + const int depth = input1_shape.Dims(3); + const int input_width = input1_shape.Dims(2); + const int input_height = input1_shape.Dims(1); + const int input_batch_size = input1_shape.Dims(0); + + const int block_shape_height = block_shape_data[0]; + const int block_shape_width = + unextended_input1_shape.DimensionsCount() == 4 ? block_shape_data[1] : 1; + const int crops_top = crops_data[0]; + const int crops_left = + unextended_input1_shape.DimensionsCount() == 4 ? crops_data[2] : 0; + for (int in_batch = 0; in_batch < input_batch_size; ++in_batch) { + const int out_batch = in_batch % output_batch_size; + const int spatial_offset = in_batch / output_batch_size; + for (int in_h = 0; in_h < input_height; ++in_h) { + const int out_h = in_h * block_shape_height + + spatial_offset / block_shape_width - crops_top; + if (out_h < 0 || out_h >= output_height) { + continue; + } + for (int in_w = 0; in_w < input_width; ++in_w) { + const int out_w = in_w * block_shape_width + + spatial_offset % block_shape_width - crops_left; + + if (out_w < 0 || out_w >= output_width) { + continue; + } + T* out = output_data + Offset(output_shape, out_batch, out_h, out_w, 0); + const T* in = + input1_data + Offset(input1_shape, in_batch, in_h, in_w, 0); + memcpy(out, in, depth * sizeof(T)); + } + } + } +} + +} // namespace reference_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_BATCH_TO_SPACE_ND_H_ diff --git a/tensorflow/lite/kernels/internal/reference/binary_function.h b/tensorflow/lite/kernels/internal/reference/binary_function.h new file mode 100644 index 0000000..0b124af --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/binary_function.h @@ -0,0 +1,91 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_BINARY_FUNCTION_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_BINARY_FUNCTION_H_ + +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { + +namespace reference_ops { + +// Also appears to duplicate MinimumMaximum. +// +// R: Result type. T1: Input 1 type. T2: Input 2 type. +template +inline void BroadcastBinaryFunction4DSlow( + const RuntimeShape& unextended_input1_shape, const T1* input1_data, + const RuntimeShape& unextended_input2_shape, const T2* input2_data, + const RuntimeShape& unextended_output_shape, R* output_data, + R (*func)(T1, T2)) { + TFLITE_DCHECK_LE(unextended_input1_shape.DimensionsCount(), 4); + TFLITE_DCHECK_LE(unextended_input2_shape.DimensionsCount(), 4); + TFLITE_DCHECK_LE(unextended_output_shape.DimensionsCount(), 4); + const RuntimeShape output_shape = + RuntimeShape::ExtendedShape(4, unextended_output_shape); + + NdArrayDesc<4> desc1; + NdArrayDesc<4> desc2; + NdArrayDescsForElementwiseBroadcast(unextended_input1_shape, + unextended_input2_shape, &desc1, &desc2); + + const int* dims_data = + reinterpret_cast(output_shape.DimsDataUpTo5D()); + for (int b = 0; b < output_shape.Dims(0); ++b) { + int out_idx_b = b * dims_data[1]; + int in_idx1_b = desc1.strides[0] * b; + int in_idx2_b = desc2.strides[0] * b; + for (int y = 0; y < output_shape.Dims(1); ++y) { + int out_idx_y = (out_idx_b + y) * dims_data[2]; + int in_idx1_y = in_idx1_b + desc1.strides[1] * y; + int in_idx2_y = in_idx2_b + desc2.strides[1] * y; + for (int x = 0; x < output_shape.Dims(2); ++x) { + int out_idx_x = (out_idx_y + x) * dims_data[3]; + int in1_idx = in_idx1_y + desc1.strides[2] * x; + int in2_idx = in_idx2_y + desc2.strides[2] * x; + for (int c = 0; c < output_shape.Dims(3); ++c) { + auto out_idx = out_idx_x + c; + auto in1_val = input1_data[in1_idx]; + auto in2_val = input2_data[in2_idx]; + output_data[out_idx] = func(in1_val, in2_val); + in1_idx += desc1.strides[3]; + in2_idx += desc2.strides[3]; + } + } + } + } +} + +// R: Result type. T1: Input 1 type. T2: Input 2 type. +template +inline void BinaryFunction(const RuntimeShape& input1_shape, + const T1* input1_data, + const RuntimeShape& input2_shape, + const T2* input2_data, + const RuntimeShape& output_shape, R* output_data, + R (*func)(T1, T2)) { + const int flat_size = + MatchingFlatSize(input1_shape, input2_shape, output_shape); + for (int i = 0; i < flat_size; ++i) { + output_data[i] = func(input1_data[i], input2_data[i]); + } +} + +} // namespace reference_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_BINARY_FUNCTION_H_ diff --git a/tensorflow/lite/kernels/internal/reference/broadcast_args.h b/tensorflow/lite/kernels/internal/reference/broadcast_args.h new file mode 100644 index 0000000..d93c316 --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/broadcast_args.h @@ -0,0 +1,56 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_BROADCAST_ARGS_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_BROADCAST_ARGS_H_ + +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { +namespace reference_ops { + +template +void BroadcastArgs(const RuntimeShape& input1_shape, const T* input1_data, + const RuntimeShape& input2_shape, const T* input2_data, + const RuntimeShape& output_shape, T* output_data) { + // Gets data at the backward index i of the shape tensor. Returns 1 if the + // index is out of range. + auto get_shape_data = [](const RuntimeShape& shape, const T* data, + int backward_idx) -> T { + int forward_idx = shape.FlatSize() - 1 - backward_idx; + if (forward_idx < 0) return 1; + return data[forward_idx]; + }; + + int output_num_elements = output_shape.FlatSize(); + for (int i = 0; i < output_num_elements; ++i) { + int backward_i = output_num_elements - 1 - i; + int shape1_i = get_shape_data(input1_shape, input1_data, i); + int shape2_i = get_shape_data(input2_shape, input2_data, i); + if (shape1_i == 1) { + output_data[backward_i] = shape2_i; + } else if (shape2_i == 1) { + output_data[backward_i] = shape1_i; + } else { + TFLITE_CHECK_EQ(shape1_i, shape2_i); + output_data[backward_i] = shape1_i; + } + } +} + +} // namespace reference_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_BROADCAST_ARGS_H_ diff --git a/tensorflow/lite/kernels/internal/reference/broadcast_to.h b/tensorflow/lite/kernels/internal/reference/broadcast_to.h new file mode 100644 index 0000000..f106b2b --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/broadcast_to.h @@ -0,0 +1,97 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_BROADCAST_TO_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_BROADCAST_TO_H_ + +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/kernel_util.h" + +namespace tflite { +namespace reference_ops { +template +void BroadcastImpl(const NdArrayDesc& input_desc, const char* input_data, + const NdArrayDesc& output_desc, char* output_data, + int indexes[N], int dim, const int last_broadcasting_dim, + const int type_size) { + // Copy data from input to output. + if (dim == last_broadcasting_dim) { + int copy_size = output_desc.strides[dim] * type_size; + const char* data_src = + input_data + SubscriptToIndex(input_desc, indexes) * type_size; + char* data_dst = + output_data + SubscriptToIndex(output_desc, indexes) * type_size; + for (int i = 0; i < output_desc.extents[dim]; ++i, data_dst += copy_size) { + memcpy(data_dst, data_src, copy_size); + } + return; + } + + // Recursive call to find the next broadcasting. + for (indexes[dim] = 0; indexes[dim] < input_desc.extents[dim]; + ++indexes[dim]) { + BroadcastImpl(input_desc, input_data, output_desc, output_data, indexes, + dim + 1, last_broadcasting_dim, type_size); + } + + // Duplicate data in output tensor. + indexes[dim] = 0; + if (input_desc.extents[dim] != output_desc.extents[dim]) { + int copy_size = output_desc.strides[dim] * type_size; + char* data_src = + output_data + SubscriptToIndex(output_desc, indexes) * type_size; + char* data_dst = data_src + copy_size; + for (int i = 1; i < output_desc.extents[dim]; ++i, data_dst += copy_size) { + memcpy(data_dst, data_src, copy_size); + } + } +} + +template +inline void BroadcastTo(const RuntimeShape& unextended_input_shape, + const char* input_data, + const RuntimeShape& unextended_output_shape, + char* output_data, TfLiteType data_type) { + NdArrayDesc input_desc; + NdArrayDesc output_desc; + CopyDimsToDesc(RuntimeShape::ExtendedShape(N, unextended_input_shape), + &input_desc); + CopyDimsToDesc(RuntimeShape::ExtendedShape(N, unextended_output_shape), + &output_desc); + + // Get the last dimension has broadcasting. At this dimension, the data is + // copied from input tensor to output tensor. + int last_broadcast_dim = -1; + for (int i = N - 1; i >= 0; --i) { + if (input_desc.extents[i] != output_desc.extents[i]) { + last_broadcast_dim = i; + break; + } + } + + // If non-broadcasting, just copy data from input to output tensor. + if (last_broadcast_dim == -1) { + memcpy(output_data, input_data, + unextended_input_shape.FlatSize() * TfLiteTypeGetSize(data_type)); + return; + } + + // Broadcasting using memcpy. + int indexes[N] = {0}; + BroadcastImpl(input_desc, input_data, output_desc, output_data, indexes, 0, + last_broadcast_dim, TfLiteTypeGetSize(data_type)); +} +} // namespace reference_ops +} // namespace tflite +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_BROADCAST_TO_H_ diff --git a/tensorflow/lite/kernels/internal/reference/ceil.h b/tensorflow/lite/kernels/internal/reference/ceil.h new file mode 100644 index 0000000..66d1dc3 --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/ceil.h @@ -0,0 +1,37 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_CEIL_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_CEIL_H_ + +#include + +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { + +namespace reference_ops { + +inline void Ceil(const RuntimeShape& input_shape, const float* input_data, + const RuntimeShape& output_shape, float* output_data) { + const int flat_size = MatchingFlatSize(input_shape, output_shape); + + for (int i = 0; i < flat_size; ++i) { + output_data[i] = std::ceil(input_data[i]); + } +} + +} // namespace reference_ops +} // namespace tflite +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_CEIL_H_ diff --git a/tensorflow/lite/kernels/internal/reference/comparisons.cc b/tensorflow/lite/kernels/internal/reference/comparisons.cc new file mode 100644 index 0000000..86b4a6a --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/comparisons.cc @@ -0,0 +1,37 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/reference/comparisons.h" + +namespace tflite { +namespace reference_ops { + +BroadcastComparison4DSlowCommon BroadcastComparison4DSlowPreprocess( + const RuntimeShape& unextended_input1_shape, + const RuntimeShape& unextended_input2_shape, + const RuntimeShape& unextended_output_shape) { + TFLITE_DCHECK_LE(unextended_input1_shape.DimensionsCount(), 4); + TFLITE_DCHECK_LE(unextended_input2_shape.DimensionsCount(), 4); + TFLITE_DCHECK_LE(unextended_output_shape.DimensionsCount(), 4); + NdArrayDesc<4> desc1; + NdArrayDesc<4> desc2; + NdArrayDescsForElementwiseBroadcast(unextended_input1_shape, + unextended_input2_shape, &desc1, &desc2); + return {RuntimeShape::ExtendedShape(4, unextended_output_shape), desc1, + desc2}; +} + +} // namespace reference_ops +} // namespace tflite diff --git a/tensorflow/lite/kernels/internal/reference/comparisons.h b/tensorflow/lite/kernels/internal/reference/comparisons.h new file mode 100644 index 0000000..3558319 --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/comparisons.h @@ -0,0 +1,271 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_COMPARISONS_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_COMPARISONS_H_ + +#include "tensorflow/lite/core/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { + +namespace reference_ops { + +template +inline bool EqualFn(T lhs, T rhs) { + return lhs == rhs; +} + +template +inline bool NotEqualFn(T lhs, T rhs) { + return lhs != rhs; +} + +template +inline bool GreaterFn(T lhs, T rhs) { + return lhs > rhs; +} +template +inline bool GreaterEqualFn(T lhs, T rhs) { + return lhs >= rhs; +} +template +inline bool LessFn(T lhs, T rhs) { + return lhs < rhs; +} +template +inline bool LessEqualFn(T lhs, T rhs) { + return lhs <= rhs; +} + +template +using ComparisonFn = bool (*)(T, T); + +template F> +inline void ComparisonImpl( + const ComparisonParams& op_params, const RuntimeShape& input1_shape, + const T* input1_data, const RuntimeShape& input2_shape, + const T* input2_data, const RuntimeShape& output_shape, bool* output_data) { + const int64_t flatsize = + MatchingFlatSize(input1_shape, input2_shape, output_shape); + for (int64_t i = 0; i < flatsize; ++i) { + output_data[i] = F(input1_data[i], input2_data[i]); + } +} + +template F> +inline void Comparison(const ComparisonParams& op_params, + const RuntimeShape& input1_shape, + const float* input1_data, + const RuntimeShape& input2_shape, + const float* input2_data, + const RuntimeShape& output_shape, bool* output_data) { + ComparisonImpl(op_params, input1_shape, input1_data, input2_shape, + input2_data, output_shape, output_data); +} + +template F> +inline void ComparisonWithScaling( + const ComparisonParams& op_params, const RuntimeShape& input1_shape, + const T* input1_data, const RuntimeShape& input2_shape, + const T* input2_data, const RuntimeShape& output_shape, bool* output_data) { + int left_shift = op_params.left_shift; + int32_t input1_offset = op_params.input1_offset; + int32_t input1_multiplier = op_params.input1_multiplier; + int input1_shift = op_params.input1_shift; + int32_t input2_offset = op_params.input2_offset; + int32_t input2_multiplier = op_params.input2_multiplier; + int input2_shift = op_params.input2_shift; + + const int64_t flatsize = + MatchingFlatSize(input1_shape, input2_shape, output_shape); + for (int64_t i = 0; i < flatsize; ++i) { + const int32_t input1_val = input1_offset + input1_data[i]; + const int32_t input2_val = input2_offset + input2_data[i]; + const int32_t shifted_input1_val = input1_val * (1 << left_shift); + const int32_t shifted_input2_val = input2_val * (1 << left_shift); + const int32_t scaled_input1_val = + MultiplyByQuantizedMultiplierSmallerThanOneExp( + shifted_input1_val, input1_multiplier, input1_shift); + const int32_t scaled_input2_val = + MultiplyByQuantizedMultiplierSmallerThanOneExp( + shifted_input2_val, input2_multiplier, input2_shift); + output_data[i] = F(scaled_input1_val, scaled_input2_val); + } +} + +struct BroadcastComparison4DSlowCommon { + const RuntimeShape output_shape; + NdArrayDesc<4> desc1; + NdArrayDesc<4> desc2; +}; + +TFLITE_NOINLINE +BroadcastComparison4DSlowCommon BroadcastComparison4DSlowPreprocess( + const RuntimeShape& unextended_input1_shape, + const RuntimeShape& unextended_input2_shape, + const RuntimeShape& unextended_output_shape); + +template F> +inline void BroadcastComparison4DSlowImpl( + const ComparisonParams& op_params, + const RuntimeShape& unextended_input1_shape, const T* input1_data, + const RuntimeShape& unextended_input2_shape, const T* input2_data, + const RuntimeShape& unextended_output_shape, bool* output_data) { + const BroadcastComparison4DSlowCommon dims = + BroadcastComparison4DSlowPreprocess(unextended_input1_shape, + unextended_input2_shape, + unextended_output_shape); + + for (int b = 0; b < dims.output_shape.Dims(0); ++b) { + for (int y = 0; y < dims.output_shape.Dims(1); ++y) { + for (int x = 0; x < dims.output_shape.Dims(2); ++x) { + for (int c = 0; c < dims.output_shape.Dims(3); ++c) { + output_data[Offset(dims.output_shape, b, y, x, c)] = + F(input1_data[SubscriptToIndex(dims.desc1, b, y, x, c)], + input2_data[SubscriptToIndex(dims.desc2, b, y, x, c)]); + } + } + } + } +} + +template F> +inline void BroadcastComparison4DSlow(const ComparisonParams& op_params, + const RuntimeShape& input1_shape, + const float* input1_data, + const RuntimeShape& input2_shape, + const float* input2_data, + const RuntimeShape& output_shape, + bool* output_data) { + BroadcastComparison4DSlowImpl(op_params, input1_shape, input1_data, + input2_shape, input2_data, + output_shape, output_data); +} + +template F> +inline void BroadcastComparison4DSlowWithScaling( + const ComparisonParams& op_params, + const RuntimeShape& unextended_input1_shape, const T* input1_data, + const RuntimeShape& unextended_input2_shape, const T* input2_data, + const RuntimeShape& unextended_output_shape, bool* output_data) { + const BroadcastComparison4DSlowCommon dims = + BroadcastComparison4DSlowPreprocess(unextended_input1_shape, + unextended_input2_shape, + unextended_output_shape); + + int left_shift = op_params.left_shift; + int32_t input1_offset = op_params.input1_offset; + int32_t input1_multiplier = op_params.input1_multiplier; + int input1_shift = op_params.input1_shift; + int32_t input2_offset = op_params.input2_offset; + int32_t input2_multiplier = op_params.input2_multiplier; + int input2_shift = op_params.input2_shift; + + for (int b = 0; b < dims.output_shape.Dims(0); ++b) { + for (int y = 0; y < dims.output_shape.Dims(1); ++y) { + for (int x = 0; x < dims.output_shape.Dims(2); ++x) { + for (int c = 0; c < dims.output_shape.Dims(3); ++c) { + const int32_t input1_val = + input1_offset + + input1_data[SubscriptToIndex(dims.desc1, b, y, x, c)]; + const int32_t input2_val = + input2_offset + + input2_data[SubscriptToIndex(dims.desc2, b, y, x, c)]; + const int32_t shifted_input1_val = input1_val * (1 << left_shift); + const int32_t shifted_input2_val = input2_val * (1 << left_shift); + const int32_t scaled_input1_val = + MultiplyByQuantizedMultiplierSmallerThanOneExp( + shifted_input1_val, input1_multiplier, input1_shift); + const int32_t scaled_input2_val = + MultiplyByQuantizedMultiplierSmallerThanOneExp( + shifted_input2_val, input2_multiplier, input2_shift); + output_data[Offset(dims.output_shape, b, y, x, c)] = + F(scaled_input1_val, scaled_input2_val); + } + } + } + } +} + +#define TFLITE_COMPARISON_OP(name) \ + inline void name(const ComparisonParams& op_params, \ + const RuntimeShape& input1_shape, const float* input1_data, \ + const RuntimeShape& input2_shape, const float* input2_data, \ + const RuntimeShape& output_shape, bool* output_data) { \ + Comparison(op_params, input1_shape, input1_data, input2_shape, \ + input2_data, output_shape, output_data); \ + } \ + template \ + inline void name##NoScaling( \ + const ComparisonParams& op_params, const RuntimeShape& input1_shape, \ + const T* input1_data, const RuntimeShape& input2_shape, \ + const T* input2_data, const RuntimeShape& output_shape, \ + bool* output_data) { \ + ComparisonImpl(op_params, input1_shape, input1_data, \ + input2_shape, input2_data, output_shape, \ + output_data); \ + } \ + template \ + inline void name##WithScaling( \ + const ComparisonParams& op_params, const RuntimeShape& input1_shape, \ + const T* input1_data, const RuntimeShape& input2_shape, \ + const T* input2_data, const RuntimeShape& output_shape, \ + bool* output_data) { \ + ComparisonWithScaling(op_params, input1_shape, input1_data, \ + input2_shape, input2_data, \ + output_shape, output_data); \ + } \ + template \ + inline void Broadcast4DSlow##name##NoScaling( \ + const ComparisonParams& op_params, const RuntimeShape& input1_shape, \ + const T* input1_data, const RuntimeShape& input2_shape, \ + const T* input2_data, const RuntimeShape& output_shape, \ + bool* output_data) { \ + BroadcastComparison4DSlowImpl( \ + op_params, input1_shape, input1_data, input2_shape, input2_data, \ + output_shape, output_data); \ + } \ + inline void Broadcast4DSlow##name( \ + const ComparisonParams& op_params, const RuntimeShape& input1_shape, \ + const float* input1_data, const RuntimeShape& input2_shape, \ + const float* input2_data, const RuntimeShape& output_shape, \ + bool* output_data) { \ + BroadcastComparison4DSlow(op_params, input1_shape, input1_data, \ + input2_shape, input2_data, \ + output_shape, output_data); \ + } \ + template \ + inline void Broadcast4DSlow##name##WithScaling( \ + const ComparisonParams& op_params, const RuntimeShape& input1_shape, \ + const T* input1_data, const RuntimeShape& input2_shape, \ + const T* input2_data, const RuntimeShape& output_shape, \ + bool* output_data) { \ + BroadcastComparison4DSlowWithScaling( \ + op_params, input1_shape, input1_data, input2_shape, input2_data, \ + output_shape, output_data); \ + } +TFLITE_COMPARISON_OP(Equal); +TFLITE_COMPARISON_OP(NotEqual); +TFLITE_COMPARISON_OP(Greater); +TFLITE_COMPARISON_OP(GreaterEqual); +TFLITE_COMPARISON_OP(Less); +TFLITE_COMPARISON_OP(LessEqual); +#undef TFLITE_COMPARISON_OP + +} // namespace reference_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_COMPARISONS_H_ diff --git a/tensorflow/lite/kernels/internal/reference/concatenation.h b/tensorflow/lite/kernels/internal/reference/concatenation.h new file mode 100644 index 0000000..9d2ecbe --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/concatenation.h @@ -0,0 +1,141 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_CONCATENATION_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_CONCATENATION_H_ + +#include + +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/kernels/internal/cppmath.h" +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { +namespace reference_ops { + +template +inline void Concatenation(const ConcatenationParams& params, + const RuntimeShape* const* input_shapes, + const Scalar* const* input_data, + const RuntimeShape& output_shape, + Scalar* output_data) { + int axis = params.axis; + int inputs_count = params.inputs_count; + const int concat_dimensions = output_shape.DimensionsCount(); + TFLITE_DCHECK_LT(axis, concat_dimensions); + + int64_t concat_size = 0; + for (int i = 0; i < inputs_count; i++) { + TFLITE_DCHECK_EQ(input_shapes[i]->DimensionsCount(), concat_dimensions); + for (int j = 0; j < concat_dimensions; j++) { + if (j != axis) { + MatchingDim(*input_shapes[i], j, output_shape, j); + } + } + concat_size += input_shapes[i]->Dims(axis); + } + TFLITE_DCHECK_EQ(concat_size, output_shape.Dims(axis)); + int64_t outer_size = 1; + for (int i = 0; i < axis; ++i) { + outer_size *= output_shape.Dims(i); + } + // For all input arrays, + // FlatSize() = outer_size * Dims(axis) * base_inner_size; + int64_t base_inner_size = 1; + for (int i = axis + 1; i < concat_dimensions; ++i) { + base_inner_size *= output_shape.Dims(i); + } + + Scalar* output_ptr = output_data; + for (int k = 0; k < outer_size; k++) { + for (int i = 0; i < inputs_count; ++i) { + const int copy_size = input_shapes[i]->Dims(axis) * base_inner_size; + const Scalar* input_ptr = input_data[i] + k * copy_size; + memcpy(output_ptr, input_ptr, copy_size * sizeof(Scalar)); + output_ptr += copy_size; + } + } +} + +// TODO(b/174275780): The quantized implementation of concatentation isn't fully +// quantized as it takes scale as a floating point value. This should be fixed +// when optimizng this routine further. +inline void ConcatenationWithScaling(const ConcatenationParams& params, + const RuntimeShape* const* input_shapes, + const uint8_t* const* input_data, + const RuntimeShape& output_shape, + uint8_t* output_data) { + int axis = params.axis; + const int32_t* input_zeropoint = params.input_zeropoint; + const float* input_scale = params.input_scale; + int inputs_count = params.inputs_count; + const int32_t output_zeropoint = params.output_zeropoint; + const float output_scale = params.output_scale; + + const int concat_dimensions = output_shape.DimensionsCount(); + TFLITE_DCHECK_LT(axis, concat_dimensions); + + int64_t concat_size = 0; + for (int i = 0; i < inputs_count; i++) { + TFLITE_DCHECK_EQ(input_shapes[i]->DimensionsCount(), concat_dimensions); + for (int j = 0; j < concat_dimensions; j++) { + if (j != axis) { + MatchingDim(*input_shapes[i], j, output_shape, j); + } + } + concat_size += input_shapes[i]->Dims(axis); + } + TFLITE_DCHECK_EQ(concat_size, output_shape.Dims(axis)); + int64_t outer_size = 1; + for (int i = 0; i < axis; ++i) { + outer_size *= output_shape.Dims(i); + } + // For all input arrays, + // FlatSize() = outer_size * Dims(axis) * base_inner_size; + int64_t base_inner_size = 1; + for (int i = axis + 1; i < concat_dimensions; ++i) { + base_inner_size *= output_shape.Dims(i); + } + + const float inverse_output_scale = 1.f / output_scale; + uint8_t* output_ptr = output_data; + for (int k = 0; k < outer_size; k++) { + for (int i = 0; i < inputs_count; ++i) { + const int copy_size = input_shapes[i]->Dims(axis) * base_inner_size; + const uint8_t* input_ptr = input_data[i] + k * copy_size; + if (input_zeropoint[i] == output_zeropoint && + input_scale[i] == output_scale) { + memcpy(output_ptr, input_ptr, copy_size); + } else { + const float scale = input_scale[i] * inverse_output_scale; + const float bias = -input_zeropoint[i] * scale; + for (int j = 0; j < copy_size; ++j) { + const int32_t value = static_cast(tflite::TfLiteRound( + input_ptr[j] * scale + bias)) + + output_zeropoint; + output_ptr[j] = static_cast( + std::max(std::min(255, value), 0)); + } + } + output_ptr += copy_size; + } + } +} + +} // namespace reference_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_CONCATENATION_H_ diff --git a/tensorflow/lite/kernels/internal/reference/conv.h b/tensorflow/lite/kernels/internal/reference/conv.h new file mode 100644 index 0000000..3c9f9fc --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/conv.h @@ -0,0 +1,289 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_CONV_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_CONV_H_ + +#include + +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { + +namespace reference_ops { + +inline void Conv(const ConvParams& params, const RuntimeShape& input_shape, + const float* input_data, const RuntimeShape& filter_shape, + const float* filter_data, const RuntimeShape& bias_shape, + const float* bias_data, const RuntimeShape& output_shape, + float* output_data, const RuntimeShape& im2col_shape, + float* im2col_data) { + const int stride_width = params.stride_width; + const int stride_height = params.stride_height; + const int dilation_width_factor = params.dilation_width_factor; + const int dilation_height_factor = params.dilation_height_factor; + const int pad_width = params.padding_values.width; + const int pad_height = params.padding_values.height; + const float output_activation_min = params.float_activation_min; + const float output_activation_max = params.float_activation_max; + TFLITE_DCHECK_EQ(input_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(filter_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(output_shape.DimensionsCount(), 4); + + (void)im2col_data; // only used in optimized code. + (void)im2col_shape; // only used in optimized code. + const int batches = MatchingDim(input_shape, 0, output_shape, 0); + const int input_depth = input_shape.Dims(3); + const int output_depth = MatchingDim(filter_shape, 0, output_shape, 3); + if (bias_data) { + TFLITE_DCHECK_EQ(bias_shape.FlatSize(), output_depth); + } + const int input_height = input_shape.Dims(1); + const int input_width = input_shape.Dims(2); + const int filter_height = filter_shape.Dims(1); + const int filter_width = filter_shape.Dims(2); + const int filter_input_depth = filter_shape.Dims(3); + const int groups = input_depth / filter_input_depth; + TFLITE_DCHECK_NE(groups, 0); + TFLITE_DCHECK_EQ(input_depth % filter_input_depth, 0); + const int filters_per_group = output_depth / groups; + TFLITE_DCHECK_NE(filters_per_group, 0); + const int output_height = output_shape.Dims(1); + const int output_width = output_shape.Dims(2); + + for (int batch = 0; batch < batches; ++batch) { + for (int out_y = 0; out_y < output_height; ++out_y) { + const int in_y_origin = (out_y * stride_height) - pad_height; + for (int out_x = 0; out_x < output_width; ++out_x) { + const int in_x_origin = (out_x * stride_width) - pad_width; + for (int out_channel = 0; out_channel < output_depth; ++out_channel) { + auto group = out_channel / filters_per_group; + float total = 0.f; + for (int filter_y = 0; filter_y < filter_height; ++filter_y) { + const int in_y = in_y_origin + dilation_height_factor * filter_y; + for (int filter_x = 0; filter_x < filter_width; ++filter_x) { + const int in_x = in_x_origin + dilation_width_factor * filter_x; + + // Zero padding by omitting the areas outside the image. + const bool is_point_inside_image = + (in_x >= 0) && (in_x < input_width) && (in_y >= 0) && + (in_y < input_height); + + if (!is_point_inside_image) { + continue; + } + for (int in_channel = 0; in_channel < filter_input_depth; + ++in_channel) { + float input_value = + input_data[Offset(input_shape, batch, in_y, in_x, + in_channel + group * filter_input_depth)]; + float filter_value = filter_data[Offset( + filter_shape, out_channel, filter_y, filter_x, in_channel)]; + total += (input_value * filter_value); + } + } + } + float bias_value = 0.0f; + if (bias_data) { + bias_value = bias_data[out_channel]; + } + output_data[Offset(output_shape, batch, out_y, out_x, out_channel)] = + ActivationFunctionWithMinMax(total + bias_value, + output_activation_min, + output_activation_max); + } + } + } + } +} + +inline void Conv(const ConvParams& params, const RuntimeShape& input_shape, + const uint8_t* input_data, const RuntimeShape& filter_shape, + const uint8_t* filter_data, const RuntimeShape& bias_shape, + const int32_t* bias_data, const RuntimeShape& output_shape, + uint8_t* output_data, const RuntimeShape& im2col_shape, + uint8_t* im2col_data, void* cpu_backend_context) { + (void)cpu_backend_context; // only used in optimized code. + (void)im2col_data; // only used in optimized code. + (void)im2col_shape; // only used in optimized code. + const int stride_width = params.stride_width; + const int stride_height = params.stride_height; + const int dilation_width_factor = params.dilation_width_factor; + const int dilation_height_factor = params.dilation_height_factor; + const int pad_width = params.padding_values.width; + const int pad_height = params.padding_values.height; + const int32_t input_offset = params.input_offset; + const int32_t filter_offset = params.weights_offset; + const int32_t output_offset = params.output_offset; + const int32_t output_multiplier = params.output_multiplier; + const int output_shift = params.output_shift; + const int32_t output_activation_min = params.quantized_activation_min; + const int32_t output_activation_max = params.quantized_activation_max; + TFLITE_DCHECK_LE(output_activation_min, output_activation_max); + + TFLITE_DCHECK_EQ(input_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(filter_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(output_shape.DimensionsCount(), 4); + const int batches = MatchingDim(input_shape, 0, output_shape, 0); + const int input_depth = input_shape.Dims(3); + const int output_depth = MatchingDim(filter_shape, 0, output_shape, 3); + if (bias_data) { + TFLITE_DCHECK_EQ(bias_shape.FlatSize(), output_depth); + } + const int input_height = input_shape.Dims(1); + const int input_width = input_shape.Dims(2); + const int filter_height = filter_shape.Dims(1); + const int filter_width = filter_shape.Dims(2); + const int filter_input_depth = filter_shape.Dims(3); + const int groups = input_depth / filter_input_depth; + TFLITE_DCHECK_EQ(input_depth % filter_input_depth, 0); + const int filters_per_group = output_depth / groups; + const int output_height = output_shape.Dims(1); + const int output_width = output_shape.Dims(2); + for (int batch = 0; batch < batches; ++batch) { + for (int out_y = 0; out_y < output_height; ++out_y) { + const int in_y_origin = (out_y * stride_height) - pad_height; + for (int out_x = 0; out_x < output_width; ++out_x) { + const int in_x_origin = (out_x * stride_width) - pad_width; + for (int out_channel = 0; out_channel < output_depth; ++out_channel) { + auto group = out_channel / filters_per_group; + int32_t acc = 0; + for (int filter_y = 0; filter_y < filter_height; ++filter_y) { + const int in_y = in_y_origin + dilation_height_factor * filter_y; + for (int filter_x = 0; filter_x < filter_width; ++filter_x) { + const int in_x = in_x_origin + dilation_width_factor * filter_x; + + // Zero padding by omitting the areas outside the image. + const bool is_point_inside_image = + (in_x >= 0) && (in_x < input_width) && (in_y >= 0) && + (in_y < input_height); + + if (!is_point_inside_image) { + continue; + } + + for (int in_channel = 0; in_channel < filter_input_depth; + ++in_channel) { + int32_t input_val = + input_data[Offset(input_shape, batch, in_y, in_x, + in_channel + group * filter_input_depth)]; + int32_t filter_val = filter_data[Offset( + filter_shape, out_channel, filter_y, filter_x, in_channel)]; + acc += + (filter_val + filter_offset) * (input_val + input_offset); + } + } + } + if (bias_data) { + acc += bias_data[out_channel]; + } + acc = MultiplyByQuantizedMultiplier(acc, output_multiplier, + output_shift); + acc += output_offset; + acc = std::max(acc, output_activation_min); + acc = std::min(acc, output_activation_max); + output_data[Offset(output_shape, batch, out_y, out_x, out_channel)] = + static_cast(acc); + } + } + } + } +} + +inline void HybridConvPerChannel( + const ConvParams& params, float* scaling_factors_ptr, + const RuntimeShape& input_shape, const int8_t* input_data, + const RuntimeShape& filter_shape, const int8_t* filter_data, + const RuntimeShape& bias_shape, const float* bias_data, + const RuntimeShape& output_shape, float* output_data, + const RuntimeShape& im2col_shape, int8_t* im2col_data, + const float* per_channel_scale, int32_t* input_offset) { + (void)im2col_data; // only used in optimized code. + (void)im2col_shape; // only used in optimized code. + const int stride_width = params.stride_width; + const int stride_height = params.stride_height; + const int dilation_width_factor = params.dilation_width_factor; + const int dilation_height_factor = params.dilation_height_factor; + const int pad_width = params.padding_values.width; + const int pad_height = params.padding_values.height; + const float output_activation_min = params.float_activation_min; + const float output_activation_max = params.float_activation_max; + TFLITE_DCHECK_EQ(input_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(filter_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(output_shape.DimensionsCount(), 4); + const int batches = MatchingDim(input_shape, 0, output_shape, 0); + const int input_depth = input_shape.Dims(3); + const int output_depth = MatchingDim(filter_shape, 0, output_shape, 3); + if (bias_data) { + TFLITE_DCHECK_EQ(bias_shape.FlatSize(), output_depth); + } + const int input_height = input_shape.Dims(1); + const int input_width = input_shape.Dims(2); + const int filter_height = filter_shape.Dims(1); + const int filter_width = filter_shape.Dims(2); + const int filter_input_depth = filter_shape.Dims(3); + const int groups = input_depth / filter_input_depth; + TFLITE_DCHECK_EQ(input_depth % filter_input_depth, 0); + const int filters_per_group = output_depth / groups; + const int output_height = output_shape.Dims(1); + const int output_width = output_shape.Dims(2); + for (int batch = 0; batch < batches; ++batch) { + for (int out_y = 0; out_y < output_height; ++out_y) { + for (int out_x = 0; out_x < output_width; ++out_x) { + for (int out_channel = 0; out_channel < output_depth; ++out_channel) { + auto group = out_channel / filters_per_group; + const int in_x_origin = (out_x * stride_width) - pad_width; + const int in_y_origin = (out_y * stride_height) - pad_height; + int32_t acc = 0; + for (int filter_y = 0; filter_y < filter_height; ++filter_y) { + for (int filter_x = 0; filter_x < filter_width; ++filter_x) { + for (int in_channel = 0; in_channel < filter_input_depth; + ++in_channel) { + const int in_x = in_x_origin + dilation_width_factor * filter_x; + const int in_y = + in_y_origin + dilation_height_factor * filter_y; + // If the location is outside the bounds of the input image, + // use zero as a default value. + if ((in_x >= 0) && (in_x < input_width) && (in_y >= 0) && + (in_y < input_height)) { + int32_t input_val = input_data[Offset( + input_shape, batch, in_y, in_x, + in_channel + group * filter_input_depth)]; + int32_t filter_val = + filter_data[Offset(filter_shape, out_channel, filter_y, + filter_x, in_channel)]; + acc += filter_val * (input_val - input_offset[batch]); + } + } + } + } + float acc_float = + acc * per_channel_scale[out_channel] * scaling_factors_ptr[batch]; + if (bias_data) { + acc_float += bias_data[out_channel]; + } + output_data[Offset(output_shape, batch, out_y, out_x, out_channel)] = + ActivationFunctionWithMinMax(acc_float, output_activation_min, + output_activation_max); + } + } + } + } +} + +} // namespace reference_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_CONV_H_ diff --git a/tensorflow/lite/kernels/internal/reference/cumsum.h b/tensorflow/lite/kernels/internal/reference/cumsum.h new file mode 100644 index 0000000..7cbc87c --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/cumsum.h @@ -0,0 +1,175 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_CUMSUM_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_CUMSUM_H_ + +#include +#include +#include + +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/compatibility.h" + +namespace tflite { +namespace reference_ops { + +template +inline void CumSum(const T* input_data, const RuntimeShape& shape, int32_t axis, + bool exclusive, bool reverse, T* output_data) { + const int32_t rank = shape.DimensionsCount(); + TFLITE_DCHECK_GE(rank, 1); + TFLITE_DCHECK_GE(axis, 0); + TFLITE_DCHECK_LT(axis, rank); + + size_t inner = 1; + size_t outer = 1; + size_t depth = 1; + for (int32_t i = 0; i < rank; i++) { + if (i < axis) + inner *= shape.Dims(i); + else if (i > axis) + outer *= shape.Dims(i); + else + depth = shape.Dims(i); + } + + for (size_t outer_index = 0; outer_index < outer; outer_index++) { + size_t outer_index_adj; + if (reverse) + outer_index_adj = (outer - 1) - outer_index; + else + outer_index_adj = outer_index; + for (size_t inner_index = 0; inner_index < inner; inner_index++) { + T accumulator = 0; + size_t inner_index_adj; + if (reverse) + inner_index_adj = (inner - 1) - inner_index; + else + inner_index_adj = inner_index; + for (size_t depth_index = 0; depth_index < depth; depth_index++) { + size_t depth_index_adj; + if (reverse) + depth_index_adj = (depth - 1) - depth_index; + else + depth_index_adj = depth_index; + + size_t index = outer_index_adj; + index += inner_index_adj * depth * outer; + index += depth_index_adj * outer; + + if (exclusive) { + output_data[index] = accumulator; + accumulator += input_data[index]; + } else { + accumulator += input_data[index]; + output_data[index] = accumulator; + } + } + } + } +} + +// +// Quantized INT8 CUMSUM +// +inline void CumSum(const ArithmeticParams& params, const int8_t* input_data, + const RuntimeShape& shape, int32_t axis, bool exclusive, + bool reverse, int8_t* output_data) { + TFLITE_DCHECK_LE(params.quantized_activation_min, + params.quantized_activation_max); + // Input offset is negative input zero point. Activation tensors are + // asymmetric quantized so they span the full int8 range. + // All inputs should have same zero-point and scale, this is checked during + // Prepare stage. + TFLITE_DCHECK_GE(-params.input1_offset, std::numeric_limits::min()); + TFLITE_DCHECK_LE(-params.input1_offset, std::numeric_limits::max()); + + const int32_t rank = shape.DimensionsCount(); + TFLITE_DCHECK_GE(rank, 1); + TFLITE_DCHECK_GE(axis, 0); + TFLITE_DCHECK_LT(axis, rank); + + size_t inner = 1; + size_t outer = 1; + size_t depth = 1; + for (int32_t i = 0; i < rank; i++) { + if (i < axis) + inner *= shape.Dims(i); + else if (i > axis) + outer *= shape.Dims(i); + else + depth = shape.Dims(i); + } + + for (size_t outer_index = 0; outer_index < outer; outer_index++) { + size_t outer_index_adj; + if (reverse) + outer_index_adj = (outer - 1) - outer_index; + else + outer_index_adj = outer_index; + for (size_t inner_index = 0; inner_index < inner; inner_index++) { + int32_t accumulator = params.input1_offset; // accumulator = 0 + accumulator *= (1 << params.left_shift); + accumulator = MultiplyByQuantizedMultiplierSmallerThanOneExp( + accumulator, params.input1_multiplier, params.input1_shift); + + size_t inner_index_adj; + if (reverse) + inner_index_adj = (inner - 1) - inner_index; + else + inner_index_adj = inner_index; + + for (size_t depth_index = 0; depth_index < depth; depth_index++) { + size_t depth_index_adj; + if (reverse) + depth_index_adj = (depth - 1) - depth_index; + else + depth_index_adj = depth_index; + + size_t index = outer_index_adj; + index += inner_index_adj * depth * outer; + index += depth_index_adj * outer; + + const int32_t y = params.input1_offset + input_data[index]; + const int32_t shifted_y = y * (1 << params.left_shift); + const int32_t scaled_y = MultiplyByQuantizedMultiplierSmallerThanOneExp( + shifted_y, params.input1_multiplier, params.input1_shift); + + int32_t scaled_output; + if (exclusive) { + scaled_output = accumulator; + accumulator += scaled_y; + } else { + accumulator += scaled_y; + scaled_output = accumulator; + } + + const int32_t raw_output = + MultiplyByQuantizedMultiplierSmallerThanOneExp( + scaled_output, params.output_multiplier, params.output_shift) + + params.output_offset; + const int32_t clamped_output = + std::min(params.quantized_activation_max, + std::max(params.quantized_activation_min, raw_output)); + output_data[index] = static_cast(clamped_output); + } + } + } +} + +} // namespace reference_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_CUMSUM_H_ diff --git a/tensorflow/lite/kernels/internal/reference/depth_to_space.h b/tensorflow/lite/kernels/internal/reference/depth_to_space.h new file mode 100644 index 0000000..23cff28 --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/depth_to_space.h @@ -0,0 +1,79 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_DEPTH_TO_SPACE_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_DEPTH_TO_SPACE_H_ + +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { +namespace reference_ops { + +template +inline void DepthToSpace(const tflite::DepthToSpaceParams& op_params, + const RuntimeShape& unextended_input_shape, + const T* input_data, + const RuntimeShape& unextended_output_shape, + T* output_data) { + TFLITE_DCHECK_LE(unextended_input_shape.DimensionsCount(), 4); + TFLITE_DCHECK_LE(unextended_output_shape.DimensionsCount(), 4); + const RuntimeShape input_shape = + RuntimeShape::ExtendedShape(4, unextended_input_shape); + const RuntimeShape output_shape = + RuntimeShape::ExtendedShape(4, unextended_output_shape); + + const int input_depth = input_shape.Dims(3); + const int input_width = input_shape.Dims(2); + const int input_height = input_shape.Dims(1); + const int input_batch = input_shape.Dims(0); + + const int output_depth = output_shape.Dims(3); + const int output_width = output_shape.Dims(2); + const int output_height = output_shape.Dims(1); + const int output_batch = output_shape.Dims(0); + + const int32_t block_size = op_params.block_size; + + TFLITE_DCHECK_EQ(input_width * block_size, output_width); + TFLITE_DCHECK_EQ(input_height * block_size, output_height); + TFLITE_DCHECK_EQ(input_depth, output_depth * block_size * block_size); + TFLITE_DCHECK_EQ(input_batch, output_batch); + + for (int out_b = 0; out_b < output_batch; ++out_b) { + for (int out_h = 0; out_h < output_height; ++out_h) { + for (int out_w = 0; out_w < output_width; ++out_w) { + for (int out_d = 0; out_d < output_depth; ++out_d) { + const int in_d = + out_d + ((out_h % block_size) * block_size + out_w % block_size) * + output_depth; + + const int in_w = out_w / block_size; + const int in_h = out_h / block_size; + const int in_b = out_b; + + const int input_index = Offset(input_shape, in_b, in_h, in_w, in_d); + const int output_index = + Offset(output_shape, out_b, out_h, out_w, out_d); + + output_data[output_index] = input_data[input_index]; + } + } + } + } +} + +} // namespace reference_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_DEPTH_TO_SPACE_H_ diff --git a/tensorflow/lite/kernels/internal/reference/depthwiseconv_float.h b/tensorflow/lite/kernels/internal/reference/depthwiseconv_float.h new file mode 100644 index 0000000..0cecb16 --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/depthwiseconv_float.h @@ -0,0 +1,100 @@ +/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_DEPTHWISECONV_FLOAT_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_DEPTHWISECONV_FLOAT_H_ + +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { +namespace reference_ops { + +inline void DepthwiseConv( + const DepthwiseParams& params, const RuntimeShape& input_shape, + const float* input_data, const RuntimeShape& filter_shape, + const float* filter_data, const RuntimeShape& bias_shape, + const float* bias_data, const RuntimeShape& output_shape, + float* output_data) { + const int stride_width = params.stride_width; + const int stride_height = params.stride_height; + const int dilation_width_factor = params.dilation_width_factor; + const int dilation_height_factor = params.dilation_height_factor; + const int pad_width = params.padding_values.width; + const int pad_height = params.padding_values.height; + const int depth_multiplier = params.depth_multiplier; + const float output_activation_min = params.float_activation_min; + const float output_activation_max = params.float_activation_max; + TFLITE_DCHECK_EQ(input_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(filter_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(output_shape.DimensionsCount(), 4); + + const int batches = MatchingDim(input_shape, 0, output_shape, 0); + const int output_depth = MatchingDim(filter_shape, 3, output_shape, 3); + const int input_height = input_shape.Dims(1); + const int input_width = input_shape.Dims(2); + const int input_depth = input_shape.Dims(3); + const int filter_height = filter_shape.Dims(1); + const int filter_width = filter_shape.Dims(2); + const int output_height = output_shape.Dims(1); + const int output_width = output_shape.Dims(2); + TFLITE_DCHECK_EQ(output_depth, input_depth * depth_multiplier); + TFLITE_DCHECK_EQ(bias_shape.FlatSize(), output_depth); + + for (int b = 0; b < batches; ++b) { + for (int out_y = 0; out_y < output_height; ++out_y) { + for (int out_x = 0; out_x < output_width; ++out_x) { + for (int ic = 0; ic < input_depth; ++ic) { + for (int m = 0; m < depth_multiplier; m++) { + const int oc = m + ic * depth_multiplier; + const int in_x_origin = (out_x * stride_width) - pad_width; + const int in_y_origin = (out_y * stride_height) - pad_height; + float total = 0.f; + for (int filter_y = 0; filter_y < filter_height; ++filter_y) { + for (int filter_x = 0; filter_x < filter_width; ++filter_x) { + const int in_x = in_x_origin + dilation_width_factor * filter_x; + const int in_y = + in_y_origin + dilation_height_factor * filter_y; + // If the location is outside the bounds of the input image, + // use zero as a default value. + if ((in_x >= 0) && (in_x < input_width) && (in_y >= 0) && + (in_y < input_height)) { + float input_value = + input_data[Offset(input_shape, b, in_y, in_x, ic)]; + float filter_value = filter_data[Offset( + filter_shape, 0, filter_y, filter_x, oc)]; + total += (input_value * filter_value); + } + } + } + float bias_value = 0.0f; + if (bias_data) { + bias_value = bias_data[oc]; + } + output_data[Offset(output_shape, b, out_y, out_x, oc)] = + ActivationFunctionWithMinMax(total + bias_value, + output_activation_min, + output_activation_max); + } + } + } + } + } +} + +} // end namespace reference_ops +} // end namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_DEPTHWISECONV_FLOAT_H_ diff --git a/tensorflow/lite/kernels/internal/reference/depthwiseconv_uint8.h b/tensorflow/lite/kernels/internal/reference/depthwiseconv_uint8.h new file mode 100644 index 0000000..d4fba13 --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/depthwiseconv_uint8.h @@ -0,0 +1,319 @@ +/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_DEPTHWISECONV_UINT8_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_DEPTHWISECONV_UINT8_H_ + +#include + +#include "fixedpoint/fixedpoint.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { + +// Used in tests and template parameters to control which version of depthwise +// convolution is called. Primarily for reference code, and specializations +// forced in tests. +enum class DepthwiseConvImplementation { + // Run all tests against kUseStandardEntry even if also testing another + // kernel, since we need to be sure that the main DepthwiseConv() function in + // optimized_ops.h dispatches to a correctly-executing kernel. + kNone = 0, // The "default" option: use the normal + // DepthwiseConv kernel (entry) function. + kUseGenericKernel, // Forced use of generic kernel. + kUseNeon3x3, // 3x3 kernel that uses NEON when available. + kUseNeon3x3DotProduct, // 3x3 kernel that uses dot-product enabled NEON + // when available. + kUseCModel3x3DotProduct, // 3x3 kernel, reference C model that is intended + // to match overall design NEON code. + kUseUnwound3x3DotProduct, // 3x3 kernel, reference C model with unwound loops + // and some arrays. + kUseIntrinsics3x3DotProduct, // 3x3 kernel using NEON intrinsics. +}; + +// Category of depthwise convolution output rounding. +enum class DepthwiseConvOutputRounding { + kNone = 0, // Invalid: specific method must be specified. + kAwayFromZero, // Original method: exact halves rounded away from zero. + kUpward, // Halves towards +infinity: adds 0.5 before truncate. + // This is where a future kNearestEven would be placed. +}; + +// Category of depthwise convolution depth multiplication. +enum class DepthwiseConvDepthMultiplication { + kNoMultiplication = 0, // Depth multiplier = 1. + kUnitInputDepth, // Input depth = 1, output depth = depth multiplier. +}; + +namespace reference_ops { +namespace depthwise_conv { + +template +inline int32_t DepthwiseConvRound(int32_t x, int32_t quantized_multiplier, + int shift) { + TFLITE_DCHECK_NE(output_rounding, DepthwiseConvOutputRounding::kNone); + return MultiplyByQuantizedMultiplier(x, quantized_multiplier, shift); +} + +// Single-rounding MultiplyByQuantizedMultiplier +#if TFLITE_SINGLE_ROUNDING +template <> +inline int32_t DepthwiseConvRound( + int32_t x, int32_t quantized_multiplier, int shift) { + using gemmlowp::RoundingDivideByPOT; + using gemmlowp::SaturatingRoundingDoublingHighMul; + int left_shift = shift > 0 ? shift : 0; + int right_shift = shift > 0 ? 0 : -shift; + return RoundingDivideByPOT(SaturatingRoundingDoublingHighMul( + x * (1 << left_shift), quantized_multiplier), + right_shift); +} + +template <> +inline int32_t DepthwiseConvRound( + int32_t x, int32_t quantized_multiplier, int shift) { + return MultiplyByQuantizedMultiplier(x, quantized_multiplier, shift); +} +// Double-rounding MultiplyByQuantizedMultiplier +#else +template <> +inline int32_t DepthwiseConvRound( + int32_t x, int32_t quantized_multiplier, int shift) { + return MultiplyByQuantizedMultiplier(x, quantized_multiplier, shift); +} + +template <> +inline int32_t DepthwiseConvRound( + int32_t x, int32_t quantized_multiplier, int shift) { + using gemmlowp::SaturatingRoundingDoublingHighMul; + const int left_shift = shift > 0 ? shift : 0; + const int right_shift = shift > 0 ? 0 : -shift; + const int rounding_offset = right_shift > 0 ? 1 << (right_shift - 1) : 0; + return (SaturatingRoundingDoublingHighMul(x * (1 << left_shift), + quantized_multiplier) + + rounding_offset) >> + right_shift; +} +#endif // TFLITE_SINGLE_ROUNDING + +template +struct DepthwiseConvBasicKernel { + static inline void Run( + const DepthwiseParams& params, const RuntimeShape& input_shape, + const uint8_t* input_data, const RuntimeShape& filter_shape, + const uint8_t* filter_data, const RuntimeShape& bias_shape, + const int32_t* bias_data, const RuntimeShape& output_shape, + uint8_t* output_data) { + const int stride_width = params.stride_width; + const int stride_height = params.stride_height; + const int dilation_width_factor = params.dilation_width_factor; + const int dilation_height_factor = params.dilation_height_factor; + const int pad_width = params.padding_values.width; + const int pad_height = params.padding_values.height; + const int depth_multiplier = params.depth_multiplier; + const int32_t output_activation_min = params.quantized_activation_min; + const int32_t output_activation_max = params.quantized_activation_max; + const int32_t input_offset = params.input_offset; + const int32_t filter_offset = params.weights_offset; + const int32_t output_offset = params.output_offset; + const int32_t output_multiplier = params.output_multiplier; + const int output_shift = params.output_shift; + TFLITE_DCHECK_EQ(input_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(filter_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(output_shape.DimensionsCount(), 4); + + TFLITE_DCHECK_LE(output_activation_min, output_activation_max); + const int batches = MatchingDim(input_shape, 0, output_shape, 0); + const int output_depth = MatchingDim(filter_shape, 3, output_shape, 3); + const int input_height = input_shape.Dims(1); + const int input_width = input_shape.Dims(2); + const int input_depth = input_shape.Dims(3); + const int filter_height = filter_shape.Dims(1); + const int filter_width = filter_shape.Dims(2); + const int output_height = output_shape.Dims(1); + const int output_width = output_shape.Dims(2); + TFLITE_DCHECK_EQ(output_depth, input_depth * depth_multiplier); + TFLITE_DCHECK_EQ(bias_shape.FlatSize(), output_depth); + + for (int b = 0; b < batches; ++b) { + for (int out_y = 0; out_y < output_height; ++out_y) { + for (int out_x = 0; out_x < output_width; ++out_x) { + for (int ic = 0; ic < input_depth; ++ic) { + for (int m = 0; m < depth_multiplier; m++) { + const int oc = m + ic * depth_multiplier; + const int in_x_origin = (out_x * stride_width) - pad_width; + const int in_y_origin = (out_y * stride_height) - pad_height; + int32_t acc = 0; + for (int filter_y = 0; filter_y < filter_height; ++filter_y) { + for (int filter_x = 0; filter_x < filter_width; ++filter_x) { + const int in_x = + in_x_origin + dilation_width_factor * filter_x; + const int in_y = + in_y_origin + dilation_height_factor * filter_y; + // If the location is outside the bounds of the input image, + // use zero as a default value. + if ((in_x >= 0) && (in_x < input_width) && (in_y >= 0) && + (in_y < input_height)) { + int32_t input_val = + input_data[Offset(input_shape, b, in_y, in_x, ic)]; + int32_t filter_val = filter_data[Offset( + filter_shape, 0, filter_y, filter_x, oc)]; + acc += (filter_val + filter_offset) * + (input_val + input_offset); + } + } + } + if (bias_data) { + acc += bias_data[oc]; + } + acc = DepthwiseConvRound(acc, output_multiplier, + output_shift); + acc += output_offset; + acc = std::max(acc, output_activation_min); + acc = std::min(acc, output_activation_max); + output_data[Offset(output_shape, b, out_y, out_x, oc)] = + static_cast(acc); + } + } + } + } + } + } + + // TODO(b/148596273): Reconcile reference versions, perhaps with common + // MultiplyByQuantizedMultiplier or DepthwiseConvRound function. + static inline void RunPerChannel( + const DepthwiseParams& params, const RuntimeShape& input_shape, + const int8_t* input_data, const RuntimeShape& filter_shape, + const int8_t* filter_data, const RuntimeShape& bias_shape, + const int32_t* bias_data, const RuntimeShape& output_shape, + int8_t* output_data) { + // Get parameters. + // TODO(b/141565753): Re-introduce ScopedProfilingLabel on Micro. + const int stride_width = params.stride_width; + const int stride_height = params.stride_height; + const int dilation_width_factor = params.dilation_width_factor; + const int dilation_height_factor = params.dilation_height_factor; + const int pad_width = params.padding_values.width; + const int pad_height = params.padding_values.height; + const int depth_multiplier = params.depth_multiplier; + const int32_t input_offset = params.input_offset; + const int32_t output_offset = params.output_offset; + const int32_t output_activation_min = params.quantized_activation_min; + const int32_t output_activation_max = params.quantized_activation_max; + const int32_t* output_multiplier = params.output_multiplier_per_channel; + const int32_t* output_shift = params.output_shift_per_channel; + + // Check dimensions of the tensors. + TFLITE_DCHECK_EQ(input_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(filter_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(output_shape.DimensionsCount(), 4); + + TFLITE_DCHECK_LE(output_activation_min, output_activation_max); + const int batches = MatchingDim(input_shape, 0, output_shape, 0); + const int output_depth = MatchingDim(filter_shape, 3, output_shape, 3); + const int input_height = input_shape.Dims(1); + const int input_width = input_shape.Dims(2); + const int input_depth = input_shape.Dims(3); + const int filter_height = filter_shape.Dims(1); + const int filter_width = filter_shape.Dims(2); + const int output_height = output_shape.Dims(1); + const int output_width = output_shape.Dims(2); + TFLITE_DCHECK_EQ(output_depth, input_depth * depth_multiplier); + TFLITE_DCHECK_EQ(bias_shape.FlatSize(), output_depth); + + for (int batch = 0; batch < batches; ++batch) { + for (int out_y = 0; out_y < output_height; ++out_y) { + for (int out_x = 0; out_x < output_width; ++out_x) { + for (int in_channel = 0; in_channel < input_depth; ++in_channel) { + for (int m = 0; m < depth_multiplier; ++m) { + const int output_channel = m + in_channel * depth_multiplier; + const int in_x_origin = (out_x * stride_width) - pad_width; + const int in_y_origin = (out_y * stride_height) - pad_height; + int32_t acc = 0; + for (int filter_y = 0; filter_y < filter_height; ++filter_y) { + for (int filter_x = 0; filter_x < filter_width; ++filter_x) { + const int in_x = + in_x_origin + dilation_width_factor * filter_x; + const int in_y = + in_y_origin + dilation_height_factor * filter_y; + // Zero padding by omitting the areas outside the image. + const bool is_point_inside_image = + (in_x >= 0) && (in_x < input_width) && (in_y >= 0) && + (in_y < input_height); + if (is_point_inside_image) { + int32_t input_val = input_data[Offset( + input_shape, batch, in_y, in_x, in_channel)]; + int32_t filter_val = filter_data[Offset( + filter_shape, 0, filter_y, filter_x, output_channel)]; + // Accumulate with 32 bits accumulator. + // In the nudging process during model quantization, we + // force real value of 0.0 be represented by a quantized + // value. This guarantees that the input_offset is a int8_t, + // even though it is represented using int32_t. int32_t += + // int8_t + // * (int8_t - int8_t) so the highest value we can get from + // each accumulation is [-127, 127] * ([-128, 127] - + // [-128, 127]), which is [-32512, 32512]. log2(32512) + // = 14.98, which means we can accumulate at least 2^16 + // multiplications without overflow. The accumulator is + // applied to a filter so the accumulation logic will hold + // as long as the filter size (filter_y * filter_x * + // in_channel) does not exceed 2^16, which is the case in + // all the models we have seen so far. + acc += filter_val * (input_val + input_offset); + } + } + } + if (bias_data) { + acc += bias_data[output_channel]; + } + acc = DepthwiseConvRound( + acc, output_multiplier[output_channel], + output_shift[output_channel]); + acc += output_offset; + acc = std::max(acc, output_activation_min); + acc = std::min(acc, output_activation_max); + output_data[Offset(output_shape, batch, out_y, out_x, + output_channel)] = static_cast(acc); + } + } + } + } + } + } +}; + +} // namespace depthwise_conv + +inline void DepthwiseConv( + const DepthwiseParams& params, const RuntimeShape& input_shape, + const uint8_t* input_data, const RuntimeShape& filter_shape, + const uint8_t* filter_data, const RuntimeShape& bias_shape, + const int32_t* bias_data, const RuntimeShape& output_shape, + uint8_t* output_data) { + return depthwise_conv::DepthwiseConvBasicKernel< + DepthwiseConvOutputRounding::kAwayFromZero>::Run(params, input_shape, + input_data, filter_shape, + filter_data, bias_shape, + bias_data, output_shape, + output_data); +} + +} // namespace reference_ops +} // end namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_DEPTHWISECONV_UINT8_H_ diff --git a/tensorflow/lite/kernels/internal/reference/dequantize.h b/tensorflow/lite/kernels/internal/reference/dequantize.h new file mode 100644 index 0000000..b90951f --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/dequantize.h @@ -0,0 +1,78 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_DEQUANTIZE_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_DEQUANTIZE_H_ + +#include + +#include + +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { + +namespace reference_ops { + +// Dequantizes into a float without rounding. +template +inline void Dequantize(const tflite::DequantizationParams& op_params, + const RuntimeShape& input_shape, + const InputT* input_data, + const RuntimeShape& output_shape, OutputT* output_data) { + int32_t zero_point = op_params.zero_point; + const double scale = op_params.scale; + const int flat_size = MatchingFlatSize(input_shape, output_shape); + + for (int i = 0; i < flat_size; i++) { + const int32_t val = input_data[i]; + const OutputT result = static_cast(scale * (val - zero_point)); + output_data[i] = result; + } +} + +// Dequantizes per-channel quantized tensor to float. +template +inline void PerChannelDequantize( + const tflite::PerChannelDequantizationParams& op_params, + const RuntimeShape& input_shape, const T* input_data, + const RuntimeShape& output_shape, float* output_data) { + // Ensure flat size is same. + MatchingFlatSize(input_shape, output_shape); + + const int32_t* zero_point = op_params.zero_point; + const float* scale = op_params.scale; + const int32_t quantized_dimension = op_params.quantized_dimension; + const int32_t num_dims = input_shape.DimensionsCount(); + const int32_t* dims_data = input_shape.DimsData(); + std::vector current_dim(num_dims, 0); + + do { + size_t offset = + ReducedOutputOffset(num_dims, reinterpret_cast(dims_data), + current_dim.data(), 0, nullptr); + const int channel = current_dim[quantized_dimension]; + const int32_t val = input_data[offset]; + const float result = + static_cast(scale[channel] * (val - zero_point[channel])); + output_data[offset] = result; + } while (NextIndex(num_dims, reinterpret_cast(dims_data), + current_dim.data())); +} + +} // namespace reference_ops + +} // namespace tflite +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_DEQUANTIZE_H_ diff --git a/tensorflow/lite/kernels/internal/reference/div.h b/tensorflow/lite/kernels/internal/reference/div.h new file mode 100644 index 0000000..df8da1b --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/div.h @@ -0,0 +1,247 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_DIV_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_DIV_H_ + +#include + +#include "tensorflow/lite/kernels/internal/common.h" + +namespace tflite { + +namespace reference_ops { + +template +inline void DivCheckArithmeticParams(const ArithmeticParams& params) { + TFLITE_DCHECK_LE(params.quantized_activation_min, + params.quantized_activation_max); + // Input offset is negative input zero point. Activation tensors are + // asymmetric quantized so they span the full int8 range. + constexpr int32_t max_value = + static_cast(std::numeric_limits::max()); + TFLITE_DCHECK_GE(params.input1_offset, -max_value); + TFLITE_DCHECK_LE(params.input1_offset, max_value); + TFLITE_DCHECK_GE(params.input2_offset, -max_value); + TFLITE_DCHECK_LE(params.input2_offset, max_value); + TFLITE_DCHECK_GE(params.output_offset, -max_value); + TFLITE_DCHECK_LE(params.output_offset, max_value); +} + +// Element-wise div that can often be used for inner loop of broadcast Div as +// well as the non-broadcast Div. +template +inline void DivElementwise(int size, const ArithmeticParams& params, + const T* input1_data, const T* input2_data, + T* output_data) { + DivCheckArithmeticParams(params); + + for (int i = 0; i < size; ++i) { + int32_t input1_val = params.input1_offset + input1_data[i]; + int32_t input2_val = params.input2_offset + input2_data[i]; + TFLITE_DCHECK_NE(input2_val, 0); + if (input2_val < 0) { + // Invert signs to avoid a negative input2_val as input2_inv needs to be + // positive to be used as multiplier of MultiplyByQuantizedMultiplier. + input1_val = -input1_val; + input2_val = -input2_val; + } + int recip_shift; + const int32_t input2_inv = GetReciprocal(input2_val, 31, &recip_shift); + const int headroom = CountLeadingSignBits(input1_val); + const int32_t unscaled_quotient = + MultiplyByQuantizedMultiplierGreaterThanOne(input1_val, input2_inv, + headroom); + const int total_shift = params.output_shift - recip_shift - headroom; + const int32_t unclamped_result = + params.output_offset + + MultiplyByQuantizedMultiplierSmallerThanOneExp( + unscaled_quotient, params.output_multiplier, total_shift); + const int32_t clamped_output = + std::min(params.quantized_activation_max, + std::max(params.quantized_activation_min, unclamped_result)); + output_data[i] = static_cast(clamped_output); + } +} + +inline void Div(const ArithmeticParams& params, + const RuntimeShape& input1_shape, const uint8_t* input1_data, + const RuntimeShape& input2_shape, const uint8_t* input2_data, + const RuntimeShape& output_shape, uint8_t* output_data) { + TFLITE_DCHECK_LE(params.quantized_activation_min, + params.quantized_activation_max); + const int flat_size = + MatchingElementsSize(input1_shape, input2_shape, output_shape); + + DivElementwise(flat_size, params, input1_data, input2_data, output_data); +} + +inline void Div(const ArithmeticParams& params, + const RuntimeShape& input1_shape, const int8_t* input1_data, + const RuntimeShape& input2_shape, const int8_t* input2_data, + const RuntimeShape& output_shape, int8_t* output_data) { + TFLITE_DCHECK_LE(params.quantized_activation_min, + params.quantized_activation_max); + const int flat_size = + MatchingElementsSize(input1_shape, input2_shape, output_shape); + + DivElementwise(flat_size, params, input1_data, input2_data, output_data); +} + +template +inline void BroadcastDivSlowQuantized( + const ArithmeticParams& params, const RuntimeShape& unextended_input1_shape, + const T* input1_data, const RuntimeShape& unextended_input2_shape, + const T* input2_data, const RuntimeShape& unextended_output_shape, + T* output_data) { + TFLITE_DCHECK_LE(unextended_input1_shape.DimensionsCount(), N); + TFLITE_DCHECK_LE(unextended_input2_shape.DimensionsCount(), N); + TFLITE_DCHECK_LE(unextended_output_shape.DimensionsCount(), N); + + NdArrayDesc desc1; + NdArrayDesc desc2; + NdArrayDesc output_desc; + NdArrayDescsForElementwiseBroadcast(unextended_input1_shape, + unextended_input2_shape, &desc1, &desc2); + CopyDimsToDesc(RuntimeShape::ExtendedShape(N, unextended_output_shape), + &output_desc); + + DivCheckArithmeticParams(params); + + auto div_func = [&](int indexes[N]) { + int32_t input1_val = + params.input1_offset + input1_data[SubscriptToIndex(desc1, indexes)]; + int32_t input2_val = + params.input2_offset + input2_data[SubscriptToIndex(desc2, indexes)]; + TFLITE_DCHECK_NE(input2_val, 0); + if (input2_val < 0) { + // Invert signs to avoid a negative input2_val as input2_inv needs to be + // positive to be used as multiplier of MultiplyByQuantizedMultiplier. + input1_val = -input1_val; + input2_val = -input2_val; + } + int recip_shift; + const int32_t input2_inv = GetReciprocal(input2_val, 31, &recip_shift); + const int headroom = CountLeadingSignBits(input1_val); + const int32_t unscaled_quotient = + MultiplyByQuantizedMultiplierGreaterThanOne(input1_val, input2_inv, + headroom); + const int total_shift = params.output_shift - recip_shift - headroom; + const int32_t unclamped_result = + params.output_offset + + MultiplyByQuantizedMultiplierSmallerThanOneExp( + unscaled_quotient, params.output_multiplier, total_shift); + const int32_t clamped_output = + std::min(params.quantized_activation_max, + std::max(params.quantized_activation_min, unclamped_result)); + output_data[SubscriptToIndex(output_desc, indexes)] = + static_cast(clamped_output); + }; + NDOpsHelper(output_desc, div_func); +} + +template +inline void BroadcastDivSlow(const ArithmeticParams& params, + const RuntimeShape& unextended_input1_shape, + const uint8_t* input1_data, + const RuntimeShape& unextended_input2_shape, + const uint8_t* input2_data, + const RuntimeShape& unextended_output_shape, + uint8_t* output_data) { + BroadcastDivSlowQuantized( + params, unextended_input1_shape, input1_data, unextended_input2_shape, + input2_data, unextended_output_shape, output_data); +} + +template +inline void BroadcastDivSlow(const ArithmeticParams& params, + const RuntimeShape& unextended_input1_shape, + const int8_t* input1_data, + const RuntimeShape& unextended_input2_shape, + const int8_t* input2_data, + const RuntimeShape& unextended_output_shape, + int8_t* output_data) { + BroadcastDivSlowQuantized( + params, unextended_input1_shape, input1_data, unextended_input2_shape, + input2_data, unextended_output_shape, output_data); +} + +// TODO(jiawen): We can implement BroadcastDiv on buffers of arbitrary +// dimensionality if the runtime code does a single loop over one dimension +// that handles broadcasting as the base case. The code generator would then +// generate max(D1, D2) nested for loops. +template +void BroadcastDivSlow(const ArithmeticParams& params, + const RuntimeShape& unextended_input1_shape, + const T* input1_data, + const RuntimeShape& unextended_input2_shape, + const T* input2_data, + const RuntimeShape& unextended_output_shape, + T* output_data) { + T output_activation_min; + T output_activation_max; + GetActivationParams(params, &output_activation_min, &output_activation_max); + + TFLITE_DCHECK_LE(unextended_input1_shape.DimensionsCount(), N); + TFLITE_DCHECK_LE(unextended_input2_shape.DimensionsCount(), N); + TFLITE_DCHECK_LE(unextended_output_shape.DimensionsCount(), N); + + NdArrayDesc desc1; + NdArrayDesc desc2; + NdArrayDesc output_desc; + NdArrayDescsForElementwiseBroadcast(unextended_input1_shape, + unextended_input2_shape, &desc1, &desc2); + CopyDimsToDesc(RuntimeShape::ExtendedShape(N, unextended_output_shape), + &output_desc); + + // In Tensorflow, the dimensions are canonically named (batch_number, row, + // col, channel), with extents (batches, height, width, depth), with the + // trailing dimension changing most rapidly (channels has the smallest + // stride, typically 1 element). + // + // In generated C code, we store arrays with the dimensions reversed. The + // first dimension has smallest stride. + + auto div_func = [&](int indexes[N]) { + output_data[SubscriptToIndex(output_desc, indexes)] = + ActivationFunctionWithMinMax( + input1_data[SubscriptToIndex(desc1, indexes)] / + input2_data[SubscriptToIndex(desc2, indexes)], + output_activation_min, output_activation_max); + }; + NDOpsHelper(output_desc, div_func); +} + +template +inline void Div(const ArithmeticParams& params, + const RuntimeShape& input1_shape, const T* input1_data, + const RuntimeShape& input2_shape, const T* input2_data, + const RuntimeShape& output_shape, T* output_data) { + T output_activation_min; + T output_activation_max; + GetActivationParams(params, &output_activation_min, &output_activation_max); + + const int flat_size = + MatchingElementsSize(input1_shape, input2_shape, output_shape); + for (int i = 0; i < flat_size; ++i) { + output_data[i] = ActivationFunctionWithMinMax( + input1_data[i] / input2_data[i], output_activation_min, + output_activation_max); + } +} + +} // namespace reference_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_DIV_H_ diff --git a/tensorflow/lite/kernels/internal/reference/elu.h b/tensorflow/lite/kernels/internal/reference/elu.h new file mode 100644 index 0000000..3dc9358 --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/elu.h @@ -0,0 +1,37 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_ELU_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_ELU_H_ + +#include "tensorflow/lite/kernels/internal/cppmath.h" +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { + +namespace reference_ops { + +inline void Elu(const RuntimeShape& input_shape, const float* input_data, + const RuntimeShape& output_shape, float* output_data) { + const int flat_size = MatchingFlatSize(input_shape, output_shape); + for (int i = 0; i < flat_size; ++i) { + const float val = input_data[i]; + output_data[i] = val < 0.0f ? TfLiteExpm1(val) : val; + } +} + +} // namespace reference_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_ELU_H_ diff --git a/tensorflow/lite/kernels/internal/reference/exp.h b/tensorflow/lite/kernels/internal/reference/exp.h new file mode 100644 index 0000000..134ee13 --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/exp.h @@ -0,0 +1,38 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_EXP_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_EXP_H_ + +#include + +#include "ruy/profiler/instrumentation.h" // from @ruy +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { +namespace reference_ops { + +template +inline void Exp(const T* input_data, const size_t num_elements, + T* output_data) { + ruy::profiler::ScopeLabel label("Exp"); + for (size_t idx = 0; idx < num_elements; ++idx) { + output_data[idx] = std::exp(input_data[idx]); + } +} + +} // namespace reference_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_EXP_H_ diff --git a/tensorflow/lite/kernels/internal/reference/fill.h b/tensorflow/lite/kernels/internal/reference/fill.h new file mode 100644 index 0000000..16630e6 --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/fill.h @@ -0,0 +1,38 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_FILL_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_FILL_H_ + +#include + +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { +namespace reference_ops { + +template +void Fill(const RuntimeShape& value_shape, const T* value_data, + const RuntimeShape& output_shape, T* output_data) { + TFLITE_DCHECK_EQ(value_shape.DimensionsCount(), 0); + const int flat_size = output_shape.FlatSize(); + for (int i = 0; i < flat_size; ++i) { + output_data[i] = *value_data; + } +} + +} // namespace reference_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_FILL_H_ diff --git a/tensorflow/lite/kernels/internal/reference/floor.h b/tensorflow/lite/kernels/internal/reference/floor.h new file mode 100644 index 0000000..0693fd4 --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/floor.h @@ -0,0 +1,39 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_FLOOR_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_FLOOR_H_ + +#include + +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { + +namespace reference_ops { + +inline void Floor(const RuntimeShape& input_shape, const float* input_data, + const RuntimeShape& output_shape, float* output_data) { + const int flat_size = MatchingFlatSize(input_shape, output_shape); + + for (int i = 0; i < flat_size; i++) { + int offset = i; + output_data[offset] = std::floor(input_data[offset]); + } +} + +} // namespace reference_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_FLOOR_H_ diff --git a/tensorflow/lite/kernels/internal/reference/floor_div.h b/tensorflow/lite/kernels/internal/reference/floor_div.h new file mode 100644 index 0000000..e75d473 --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/floor_div.h @@ -0,0 +1,35 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_FLOOR_DIV_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_FLOOR_DIV_H_ + +#include +#include + +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { +namespace reference_ops { + +template +T FloorDiv(T input1, T input2) { + return std::floor(std::divides()(static_cast(input1), + static_cast(input2))); +} + +} // namespace reference_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_FLOOR_DIV_H_ diff --git a/tensorflow/lite/kernels/internal/reference/floor_mod.h b/tensorflow/lite/kernels/internal/reference/floor_mod.h new file mode 100644 index 0000000..20ce18b --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/floor_mod.h @@ -0,0 +1,44 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_FLOOR_MOD_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_FLOOR_MOD_H_ + +#include +#include + +namespace tflite { + +namespace reference_ops { + +template +T FloorMod(T input1, T input2) { + struct FloatMod { + float operator()(const float lhs, const float rhs) const { + return std::fmod(lhs, rhs); + } + }; + using ModFunc = typename std::conditional::value, + std::modulus, FloatMod>::type; + ModFunc mod_func; + T trunc_mod = mod_func(input1, input2); + return (trunc_mod != 0) && ((input2 < 0) != (trunc_mod < 0)) + ? (trunc_mod + input2) + : trunc_mod; +} + +} // namespace reference_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_FLOOR_MOD_H_ diff --git a/tensorflow/lite/kernels/internal/reference/fully_connected.h b/tensorflow/lite/kernels/internal/reference/fully_connected.h new file mode 100644 index 0000000..ba51cbc --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/fully_connected.h @@ -0,0 +1,323 @@ +/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_FULLY_CONNECTED_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_FULLY_CONNECTED_H_ + +#include + +#include "ruy/profiler/instrumentation.h" // from @ruy +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/cppmath.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { +namespace reference_ops { + +inline void FullyConnected( + const FullyConnectedParams& params, const RuntimeShape& input_shape, + const float* input_data, const RuntimeShape& weights_shape, + const float* weights_data, const RuntimeShape& bias_shape, + const float* bias_data, const RuntimeShape& output_shape, + float* output_data) { + const float output_activation_min = params.float_activation_min; + const float output_activation_max = params.float_activation_max; + // TODO(b/62193649): This really should be: + // const int batches = ArraySize(output_dims, 1); + // but the current --variable_batch hack consists in overwriting the 3rd + // dimension with the runtime batch size, as we don't keep track for each + // array of which dimension is the batch dimension in it. + const int output_dims_count = output_shape.DimensionsCount(); + const int weights_dims_count = weights_shape.DimensionsCount(); + const int batches = FlatSizeSkipDim(output_shape, output_dims_count - 1); + const int output_depth = MatchingDim(weights_shape, weights_dims_count - 2, + output_shape, output_dims_count - 1); + const int accum_depth = weights_shape.Dims(weights_dims_count - 1); + for (int b = 0; b < batches; ++b) { + for (int out_c = 0; out_c < output_depth; ++out_c) { + float total = 0.f; + for (int d = 0; d < accum_depth; ++d) { + total += input_data[b * accum_depth + d] * + weights_data[out_c * accum_depth + d]; + } + float bias_value = 0.0f; + if (bias_data) { + bias_value = bias_data[out_c]; + } + output_data[out_c + output_depth * b] = ActivationFunctionWithMinMax( + total + bias_value, output_activation_min, output_activation_max); + } + } +} + +inline void FullyConnected( + const FullyConnectedParams& params, const RuntimeShape& input_shape, + const uint8_t* input_data, const RuntimeShape& filter_shape, + const uint8_t* filter_data, const RuntimeShape& bias_shape, + const int32_t* bias_data, const RuntimeShape& output_shape, + uint8_t* output_data) { + const int32_t input_offset = params.input_offset; + const int32_t filter_offset = params.weights_offset; + const int32_t output_offset = params.output_offset; + const int32_t output_multiplier = params.output_multiplier; + const int output_shift = params.output_shift; + const int32_t output_activation_min = params.quantized_activation_min; + const int32_t output_activation_max = params.quantized_activation_max; + TFLITE_DCHECK_GE(filter_shape.DimensionsCount(), 2); + TFLITE_DCHECK_GE(output_shape.DimensionsCount(), 1); + + TFLITE_DCHECK_LE(output_activation_min, output_activation_max); + // TODO(b/62193649): This really should be: + // const int batches = ArraySize(output_dims, 1); + // but the current --variable_batch hack consists in overwriting the 3rd + // dimension with the runtime batch size, as we don't keep track for each + // array of which dimension is the batch dimension in it. + const int output_dim_count = output_shape.DimensionsCount(); + const int filter_dim_count = filter_shape.DimensionsCount(); + const int batches = FlatSizeSkipDim(output_shape, output_dim_count - 1); + const int output_depth = MatchingDim(filter_shape, filter_dim_count - 2, + output_shape, output_dim_count - 1); + const int accum_depth = filter_shape.Dims(filter_dim_count - 1); + for (int b = 0; b < batches; ++b) { + for (int out_c = 0; out_c < output_depth; ++out_c) { + int32_t acc = 0; + for (int d = 0; d < accum_depth; ++d) { + int32_t input_val = input_data[b * accum_depth + d]; + int32_t filter_val = filter_data[out_c * accum_depth + d]; + acc += (filter_val + filter_offset) * (input_val + input_offset); + } + if (bias_data) { + acc += bias_data[out_c]; + } + acc = MultiplyByQuantizedMultiplier(acc, output_multiplier, output_shift); + acc += output_offset; + acc = std::max(acc, output_activation_min); + acc = std::min(acc, output_activation_max); + output_data[out_c + output_depth * b] = static_cast(acc); + } + } +} + +inline void FullyConnected( + const FullyConnectedParams& params, const RuntimeShape& input_shape, + const uint8_t* input_data, const RuntimeShape& filter_shape, + const uint8_t* filter_data, const RuntimeShape& bias_shape, + const int32_t* bias_data, const RuntimeShape& output_shape, + int16_t* output_data) { + const int32_t input_offset = params.input_offset; + const int32_t filter_offset = params.weights_offset; + const int32_t output_offset = params.output_offset; + const int32_t output_multiplier = params.output_multiplier; + const int output_shift = params.output_shift; + const int32_t output_activation_min = params.quantized_activation_min; + const int32_t output_activation_max = params.quantized_activation_max; + + TFLITE_DCHECK_LE(output_activation_min, output_activation_max); + TFLITE_DCHECK_EQ(output_offset, 0); + // TODO(b/62193649): This really should be: + // const int batches = ArraySize(output_dims, 1); + // but the current --variable_batch hack consists in overwriting the 3rd + // dimension with the runtime batch size, as we don't keep track for each + // array of which dimension is the batch dimension in it. + const int output_dim_count = output_shape.DimensionsCount(); + const int filter_dim_count = filter_shape.DimensionsCount(); + const int batches = FlatSizeSkipDim(output_shape, output_dim_count - 1); + const int output_depth = MatchingDim(filter_shape, filter_dim_count - 2, + output_shape, output_dim_count - 1); + const int accum_depth = filter_shape.Dims(filter_dim_count - 1); + for (int b = 0; b < batches; ++b) { + for (int out_c = 0; out_c < output_depth; ++out_c) { + // Internal accumulation. + // Initialize accumulator with the bias-value. + int32_t accum = bias_data[out_c]; + // Accumulation loop. + for (int d = 0; d < accum_depth; ++d) { + int16_t input_val = input_data[b * accum_depth + d] + input_offset; + int16_t filter_val = + filter_data[out_c * accum_depth + d] + filter_offset; + accum += filter_val * input_val; + } + // Down-scale the final int32_t accumulator to the scale used by our + // (16-bit, typically 3 integer bits) fixed-point format. The quantized + // multiplier and shift here have been pre-computed offline + // (e.g. by toco). + accum = + MultiplyByQuantizedMultiplier(accum, output_multiplier, output_shift); + // Saturate, cast to int16_t, and store to output array. + accum = std::max(accum, output_activation_min - output_offset); + accum = std::min(accum, output_activation_max - output_offset); + accum += output_offset; + output_data[out_c + output_depth * b] = accum; + } + } +} + +inline void ShuffledFullyConnected( + const FullyConnectedParams& params, const RuntimeShape& input_shape, + const uint8_t* input_data, const RuntimeShape& weights_shape, + const uint8_t* shuffled_weights_data, const RuntimeShape& bias_shape, + const int32_t* bias_data, const RuntimeShape& output_shape, + int16_t* output_data, uint8_t* shuffled_input_workspace_data) { + const int32_t output_multiplier = params.output_multiplier; + const int output_shift = params.output_shift; + const int32_t output_activation_min = params.quantized_activation_min; + const int32_t output_activation_max = params.quantized_activation_max; + TFLITE_DCHECK_LE(output_activation_min, output_activation_max); + + TFLITE_DCHECK_GE(input_shape.DimensionsCount(), 1); + TFLITE_DCHECK_GE(weights_shape.DimensionsCount(), 2); + TFLITE_DCHECK_GE(output_shape.DimensionsCount(), 1); + // TODO(b/62193649): This really should be: + // const int batches = ArraySize(output_dims, 1); + // but the current --variable_batch hack consists in overwriting the 3rd + // dimension with the runtime batch size, as we don't keep track for each + // array of which dimension is the batch dimension in it. + const int output_dim_count = output_shape.DimensionsCount(); + const int weights_dim_count = weights_shape.DimensionsCount(); + const int batches = FlatSizeSkipDim(output_shape, output_dim_count - 1); + const int output_depth = MatchingDim(weights_shape, weights_dim_count - 2, + output_shape, output_dim_count - 1); + const int accum_depth = weights_shape.Dims(weights_dim_count - 1); + TFLITE_DCHECK((accum_depth % 16) == 0); + TFLITE_DCHECK((output_depth % 4) == 0); + + // Shuffling and xoring of input activations into the workspace buffer + uint8_t* shuffled_input_workspace_ptr = shuffled_input_workspace_data; + if (batches == 1) { + for (int i = 0; i < accum_depth; i++) { + shuffled_input_workspace_data[i] = input_data[i] ^ 0x80; + } + } else if (batches == 4) { + for (int c = 0; c < accum_depth; c += 16) { + for (int b = 0; b < 4; b++) { + const uint8_t* src_data_ptr = input_data + b * accum_depth + c; + for (int j = 0; j < 16; j++) { + uint8_t src_val = *src_data_ptr++; + // Flip the sign bit, so that the kernel will only need to + // reinterpret these uint8_t values as int8_t, getting for free the + // subtraction of the zero_point value 128. + uint8_t dst_val = src_val ^ 0x80; + *shuffled_input_workspace_ptr++ = dst_val; + } + } + } + } else { + TFLITE_DCHECK(false); + return; + } + + // Actual computation + if (batches == 1) { + int16_t* output_ptr = output_data; + // Shuffled weights have had their sign bit (0x80) pre-flipped (xor'd) + // so that just reinterpreting them as int8_t values is equivalent to + // subtracting 128 from them, thus implementing for free the subtraction of + // the zero_point value 128. + const int8_t* shuffled_weights_ptr = + reinterpret_cast(shuffled_weights_data); + // Likewise, we preshuffled and pre-xored the input data above. + const int8_t* shuffled_input_data = + reinterpret_cast(shuffled_input_workspace_data); + for (int c = 0; c < output_depth; c += 4) { + // Internal accumulation. + // Initialize accumulator with the bias-value. + int32_t accum[4] = {0}; + // Accumulation loop. + for (int d = 0; d < accum_depth; d += 16) { + for (int i = 0; i < 4; i++) { + for (int j = 0; j < 16; j++) { + int8_t input_val = shuffled_input_data[d + j]; + int8_t weights_val = *shuffled_weights_ptr++; + accum[i] += weights_val * input_val; + } + } + } + for (int i = 0; i < 4; i++) { + // Add bias value + int32_t acc = accum[i] + bias_data[c + i]; + // Down-scale the final int32_t accumulator to the scale used by our + // (16-bit, typically 3 integer bits) fixed-point format. The quantized + // multiplier and shift here have been pre-computed offline + // (e.g. by toco). + acc = + MultiplyByQuantizedMultiplier(acc, output_multiplier, output_shift); + // Saturate, cast to int16_t, and store to output array. + acc = std::max(acc, output_activation_min); + acc = std::min(acc, output_activation_max); + output_ptr[c + i] = acc; + } + } + } else if (batches == 4) { + int16_t* output_ptr = output_data; + // Shuffled weights have had their sign bit (0x80) pre-flipped (xor'd) + // so that just reinterpreting them as int8_t values is equivalent to + // subtracting 128 from them, thus implementing for free the subtraction of + // the zero_point value 128. + const int8_t* shuffled_weights_ptr = + reinterpret_cast(shuffled_weights_data); + // Likewise, we preshuffled and pre-xored the input data above. + const int8_t* shuffled_input_data = + reinterpret_cast(shuffled_input_workspace_data); + for (int c = 0; c < output_depth; c += 4) { + const int8_t* shuffled_input_ptr = shuffled_input_data; + // Accumulation loop. + // Internal accumulation. + // Initialize accumulator with the bias-value. + int32_t accum[4][4]; + for (int i = 0; i < 4; i++) { + for (int b = 0; b < 4; b++) { + accum[i][b] = 0; + } + } + for (int d = 0; d < accum_depth; d += 16) { + for (int i = 0; i < 4; i++) { + for (int b = 0; b < 4; b++) { + for (int j = 0; j < 16; j++) { + int8_t input_val = shuffled_input_ptr[16 * b + j]; + int8_t weights_val = shuffled_weights_ptr[16 * i + j]; + accum[i][b] += weights_val * input_val; + } + } + } + shuffled_input_ptr += 64; + shuffled_weights_ptr += 64; + } + for (int i = 0; i < 4; i++) { + for (int b = 0; b < 4; b++) { + // Add bias value + int32_t acc = accum[i][b] + bias_data[c + i]; + // Down-scale the final int32_t accumulator to the scale used by our + // (16-bit, typically 3 integer bits) fixed-point format. The + // quantized multiplier and shift here have been pre-computed offline + // (e.g. by toco). + acc = MultiplyByQuantizedMultiplier(acc, output_multiplier, + output_shift); + // Saturate, cast to int16_t, and store to output array. + acc = std::max(acc, output_activation_min); + acc = std::min(acc, output_activation_max); + output_ptr[b * output_depth + c + i] = acc; + } + } + } + } else { + TFLITE_DCHECK(false); + return; + } +} + +} // namespace reference_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_FULLY_CONNECTED_H_ diff --git a/tensorflow/lite/kernels/internal/reference/hard_swish.h b/tensorflow/lite/kernels/internal/reference/hard_swish.h new file mode 100644 index 0000000..81fcd63 --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/hard_swish.h @@ -0,0 +1,168 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_HARD_SWISH_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_HARD_SWISH_H_ + +#include + +#include "ruy/profiler/instrumentation.h" // from @ruy +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { +namespace reference_ops { + +inline int16_t SaturatingLeftShift(int16_t value, int amount) { + int64_t result = static_cast(value) * (1 << amount); + result = std::min(result, std::numeric_limits::max()); + result = std::max(result, std::numeric_limits::min()); + return result; +} + +// Similar to ARM instruction SQDMULH. +// Similar to gemmlowp::SaturatingRoundingDoublingHighMul except +// rounding to zero instead of to nearest (SQRDMULH). +inline std::int16_t SaturatingDoublingHighMul(std::int16_t a, std::int16_t b) { + bool overflow = a == b && a == std::numeric_limits::min(); + std::int32_t a_32(a); + std::int32_t b_32(b); + std::int32_t ab_32 = a_32 * b_32; + std::int16_t ab_x2_high16 = static_cast((ab_32) / (1 << 15)); + return overflow ? std::numeric_limits::max() : ab_x2_high16; +} + +template +inline void HardSwish(const RuntimeShape& input_shape, const T* input_data, + const RuntimeShape& output_shape, T* output_data) { + ruy::profiler::ScopeLabel label("ReferenceHardSwish/Float"); + auto matching_size = MatchingFlatSize(input_shape, output_shape); + const T* in_end = input_data + matching_size; + for (; input_data < in_end; input_data++, output_data++) { + const float in = *input_data; + *output_data = + in * std::min(static_cast(6), std::max(static_cast(0), in + 3)) / + 6; + } +} + +template +inline void HardSwish(const HardSwishParams& params, + const RuntimeShape& input_shape, const T* input_data, + const RuntimeShape& output_shape, T* output_data) { + ruy::profiler::ScopeLabel label("ReferenceHardSwish/Quantized"); + + const int flat_size = MatchingFlatSize(input_shape, output_shape); + + for (int i = 0; i < flat_size; i++) { + const int16_t input_value = input_data[i] - params.input_zero_point; + // Left-shift as much as we can without overflow/saturation to put + // significant bits in the high bits of our 16-bit fixedpoint values, so + // that fixed-point approximate computations below are as accurate as + // possible. + const int16_t input_value_on_hires_input_scale = input_value * (1 << 7); + // Compute the input value on essentially the output scale, just not + // right-shifted yet. This is the value that we'll use in the (x >= +3) + // case, and that in the general case we'll multiply against the "relu-ish" + // fixed-point multiplier in [0, 1]. + const int16_t input_value_on_preshift_output_scale = + gemmlowp::SaturatingRoundingDoublingHighMul( + input_value_on_hires_input_scale, + params.output_multiplier_fixedpoint_int16); + // Now compute the "relu-ish multiplier". In the (-3 <= x <= +3) case, that + // is just an affine rescaling of x from [-3, 3] to [0, 1]. In the general + // case, it is just that plus saturation at the boundaries of [-3, 3]. + // First, we rescale from [-3, 3] to [-1, 1], saturating. + // That is done by rescaling the input value with a fixed-point multiplier + // (reluish_multiplier_fixedpoint) and bit-shift such that we represent + // that input value on the scale where the real value 3.0f is represented + // by the quantized value 32768. (+32768 is actually not representable as + // int16_t, so this saturates at +32767, and that is seen empirically to be + // a negligible contribution to numerical error/bias). + // + // This code is careful to correctly implement any magnitude of multiplier, + // involving either a right shift or a left shift, with correct saturation + // behavior in the left-shift case. This forces this code to be more + // complicated, but is necessary for real applications: a partially + // trained quantized MobileNet v3-small model that motivated this code + // exhibits some large [min, max] range boundaries, of the order of + // magnitude of 10 or 100 depending on layers. + // + // The next few lines are basically just an ordinary + // MultiplyByQuantizedMultiplier, except that we are more careful here + // about the fine details of saturation when left-shifting, because here + // overflow in left-shift is a common case, not an anomaly as + // MultiplyByQuantizedMultiplier assumes. + int16_t reluish_value = input_value_on_hires_input_scale; + // Shift left, saturating, as much as we can while ensuring that this + // saturation will not contribute to the result. That is, left shift amount + // reduced by 1. + if (params.reluish_multiplier_exponent > 0) { + reluish_value = SaturatingLeftShift( + reluish_value, params.reluish_multiplier_exponent - 1); + } + // Apply the fixed-point multiplier, dividing the value by a divisor + // ranging in [1, 2]. + reluish_value = gemmlowp::SaturatingRoundingDoublingHighMul( + reluish_value, params.reluish_multiplier_fixedpoint_int16); + // Apply the last bit of left-shift. Thus, in the left-shifting case, if + // any saturation affects the result, it is happening here --- any + // saturation having occurred above is overwritten here, not affecting the + // result. + if (params.reluish_multiplier_exponent > 0) { + reluish_value = SaturatingLeftShift(reluish_value, 1); + } + // Shift right, in the right-shifting case. + if (params.reluish_multiplier_exponent < 0) { + reluish_value = gemmlowp::RoundingDivideByPOT( + reluish_value, -params.reluish_multiplier_exponent); + } + // At this point we have rescaled the value into a 16bit fixedpoint + // reluish_value in [-1, 1]. + // We now convert that to a 16bit fixedpoint value in [0, 1]. + reluish_value = (reluish_value + (1 << 15)) >> 1; + // Use of SaturatingDoublingHighMul here is important to cancel the biases + // from the above SaturatingRoundingDoublingHighMul. + // + // On a partially trained MobileNet-v3-small, + // + // | bias on | ImageNet + // | quantized | Top-1 + // Operation used here | values | accuracy (50k) + // --------------------------------------+------------+----------- + // SaturatingDoublingHighMul | -0.0024 | 58.920 + // SaturatingRoundingDoublingHighMul | -0.0067 | 58.064 + // + // In activations_test, this is covered by this testcase: + // QuantizedActivationsOpTest.HardSwishBias + // + const int16_t preshift_output_value = SaturatingDoublingHighMul( + reluish_value, input_value_on_preshift_output_scale); + // We were so far operating on the pre-shift output scale. Now we finally + // apply that output shift, arriving at the final output scale. + int16_t output_value = gemmlowp::RoundingDivideByPOT( + preshift_output_value, -params.output_multiplier_exponent); + output_value += params.output_zero_point; + output_value = + std::min(output_value, std::numeric_limits::max()); + output_value = + std::max(output_value, std::numeric_limits::min()); + output_data[i] = output_value; + } +} + +} // namespace reference_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_HARD_SWISH_H_ diff --git a/tensorflow/lite/kernels/internal/reference/integer_ops/add.h b/tensorflow/lite/kernels/internal/reference/integer_ops/add.h new file mode 100644 index 0000000..579964d --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/integer_ops/add.h @@ -0,0 +1,218 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_INTEGER_OPS_ADD_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_INTEGER_OPS_ADD_H_ + +#include +#include + +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { +namespace reference_integer_ops { + +inline void CheckArithmeticParams(const ArithmeticParams& params) { + TFLITE_DCHECK_LE(params.quantized_activation_min, + params.quantized_activation_max); + // Input offset is negative input zero point. Activation tensors are + // asymmetric quantized so they span the full int8 range. + TFLITE_DCHECK_GE(-params.input1_offset, std::numeric_limits::min()); + TFLITE_DCHECK_GE(-params.input2_offset, std::numeric_limits::min()); + TFLITE_DCHECK_LE(-params.input1_offset, std::numeric_limits::max()); + TFLITE_DCHECK_LE(-params.input2_offset, std::numeric_limits::max()); +} + +// TODO(b/270589088): move to a more appropriate file (b/270589088#comment2) +template +void ElementWise(int size, const ArithmeticParams& params, const T* input1_data, + const T* input2_data, T* output_data, + void (*check_arithmetic_params)(const ArithmeticParams&), + T (*binary_func)(T, T, const ArithmeticParams&)) { + CheckArithmeticParams(params); + for (int i = 0; i < size; ++i) { + output_data[i] = binary_func(input1_data[i], input2_data[i], params); + } +} +// TODO(b/270589088): move to a more appropriate file. (b/270589088#comment2) +template +void BroadcastBinaryFunction6DSlow( + const ArithmeticParams& params, const RuntimeShape& input1_shape, + const T* input1_data, const RuntimeShape& input2_shape, + const T* input2_data, const RuntimeShape& output_shape, T* output_data, + void (*check_arithmetic_params)(const ArithmeticParams&), + T (*binary_func)(T, T, const ArithmeticParams&)) { + NdArrayDesc<6> desc1; + NdArrayDesc<6> desc2; + NdArrayDescsForElementwiseBroadcast(input1_shape, input2_shape, &desc1, + &desc2); + const RuntimeShape extended_output_shape = + RuntimeShape::ExtendedShape(6, output_shape); + + // In Tensorflow, the dimensions are canonically named (batch_number, row, + // col, channel), with extents (batches, height, width, depth), with the + // trailing dimension changing most rapidly (channels has the smallest stride, + // typically 1 element). + // + // In generated C code, we store arrays with the dimensions reversed. The + // first dimension has smallest stride. + // + // We name our variables by their Tensorflow convention, but generate C code + // nesting loops such that the innermost loop has the smallest stride for the + // best cache behavior. + size_t input1_offset_a = 0; + size_t input2_offset_a = 0; + size_t output_offset_a = 0; + for (int a = 0; a < extended_output_shape.Dims(0); ++a) { + size_t input1_offset_d = input1_offset_a; + size_t input2_offset_d = input2_offset_a; + size_t output_offset_d = output_offset_a; + for (int d = 0; d < extended_output_shape.Dims(1); ++d) { + size_t input1_offset_b = input1_offset_d; + size_t input2_offset_b = input2_offset_d; + size_t output_offset_b = output_offset_d; + for (int b = 0; b < extended_output_shape.Dims(2); ++b) { + size_t input1_offset_y = input1_offset_b; + size_t input2_offset_y = input2_offset_b; + size_t output_offset_y = output_offset_b; + for (int y = 0; y < extended_output_shape.Dims(3); ++y) { + size_t input1_offset_x = input1_offset_y; + size_t input2_offset_x = input2_offset_y; + size_t output_offset_x = output_offset_y; + for (int x = 0; x < extended_output_shape.Dims(4); ++x) { + size_t input1_offset_c = input1_offset_x; + size_t input2_offset_c = input2_offset_x; + size_t output_offset_c = output_offset_x; + for (int c = 0; c < extended_output_shape.Dims(5); ++c) { + output_data[output_offset_c] = + binary_func(input1_data[input1_offset_c], + input2_data[input2_offset_c], params); + input1_offset_c += desc1.strides[5]; + input2_offset_c += desc2.strides[5]; + ++output_offset_c; + } + input1_offset_x += desc1.strides[4]; + input2_offset_x += desc2.strides[4]; + output_offset_x += extended_output_shape.Dims(5); + } + input1_offset_y += desc1.strides[3]; + input2_offset_y += desc2.strides[3]; + output_offset_y += + extended_output_shape.Dims(4) * extended_output_shape.Dims(5); + } + input1_offset_b += desc1.strides[2]; + input2_offset_b += desc2.strides[2]; + output_offset_b += extended_output_shape.Dims(3) * + extended_output_shape.Dims(4) * + extended_output_shape.Dims(5); + } + input1_offset_d += desc1.strides[1]; + input2_offset_d += desc2.strides[1]; + output_offset_d += + extended_output_shape.Dims(2) * extended_output_shape.Dims(3) * + extended_output_shape.Dims(4) * extended_output_shape.Dims(5); + } + input1_offset_a += desc1.strides[0]; + input2_offset_a += desc2.strides[0]; + output_offset_a += + extended_output_shape.Dims(1) * extended_output_shape.Dims(2) * + extended_output_shape.Dims(3) * extended_output_shape.Dims(4) * + extended_output_shape.Dims(5); + } +} + +template +void BroadcastBinaryFunction4DSlow( + const ArithmeticParams& params, const RuntimeShape& input1_shape, + const T* input1_data, const RuntimeShape& input2_shape, + const T* input2_data, const RuntimeShape& output_shape, T* output_data, + void (*check_arithmetic_params)(const ArithmeticParams&), + T (*binary_func)(T, T, const ArithmeticParams&)) { + BroadcastBinaryFunction6DSlow(params, input1_shape, input1_data, input2_shape, + input2_data, output_shape, output_data, + check_arithmetic_params, binary_func); +} + +inline int8_t AddFunc(int8_t x, int8_t y, const ArithmeticParams& params) { + const int32_t input1_val = params.input1_offset + x; + const int32_t input2_val = params.input2_offset + y; + const int32_t shifted_input1_val = input1_val * (1 << params.left_shift); + const int32_t shifted_input2_val = input2_val * (1 << params.left_shift); + const int32_t scaled_input1_val = + MultiplyByQuantizedMultiplierSmallerThanOneExp( + shifted_input1_val, params.input1_multiplier, params.input1_shift); + const int32_t scaled_input2_val = + MultiplyByQuantizedMultiplierSmallerThanOneExp( + shifted_input2_val, params.input2_multiplier, params.input2_shift); + const int32_t raw_sum = scaled_input1_val + scaled_input2_val; + const int32_t raw_output = + MultiplyByQuantizedMultiplierSmallerThanOneExp( + raw_sum, params.output_multiplier, params.output_shift) + + params.output_offset; + const int32_t clamped_output = + std::min(params.quantized_activation_max, + std::max(params.quantized_activation_min, raw_output)); + return static_cast(clamped_output); +} + +// Element-wise add that can often be used for inner loop of broadcast add as +// well as the non-broadcast add. +inline void AddElementwise(int size, const ArithmeticParams& params, + const int8_t* input1_data, const int8_t* input2_data, + int8_t* output_data) { + ElementWise(size, params, input1_data, input2_data, output_data, + CheckArithmeticParams, AddFunc); +} + +inline void Add(const ArithmeticParams& params, + const RuntimeShape& input1_shape, const int8_t* input1_data, + const RuntimeShape& input2_shape, const int8_t* input2_data, + const RuntimeShape& output_shape, int8_t* output_data) { + CheckArithmeticParams(params); + + const int flat_size = + MatchingElementsSize(input1_shape, input2_shape, output_shape); + + AddElementwise(flat_size, params, input1_data, input2_data, output_data); +} + +inline void BroadcastAdd6DSlow(const ArithmeticParams& params, + const RuntimeShape& input1_shape, + const int8_t* input1_data, + const RuntimeShape& input2_shape, + const int8_t* input2_data, + const RuntimeShape& output_shape, + int8_t* output_data) { + BroadcastBinaryFunction6DSlow(params, input1_shape, input1_data, input2_shape, + input2_data, output_shape, output_data, + CheckArithmeticParams, AddFunc); +} + +inline void BroadcastAdd4DSlow(const ArithmeticParams& params, + const RuntimeShape& input1_shape, + const int8_t* input1_data, + const RuntimeShape& input2_shape, + const int8_t* input2_data, + const RuntimeShape& output_shape, + int8_t* output_data) { + BroadcastBinaryFunction6DSlow(params, input1_shape, input1_data, input2_shape, + input2_data, output_shape, output_data, + CheckArithmeticParams, AddFunc); +} + +} // namespace reference_integer_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_INTEGER_OPS_ADD_H_ diff --git a/tensorflow/lite/kernels/internal/reference/integer_ops/conv.h b/tensorflow/lite/kernels/internal/reference/integer_ops/conv.h new file mode 100644 index 0000000..eac0057 --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/integer_ops/conv.h @@ -0,0 +1,241 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_INTEGER_OPS_CONV_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_INTEGER_OPS_CONV_H_ + +#include + +#include "tensorflow/lite/kernels/internal/common.h" + +namespace tflite { +namespace reference_integer_ops { + +// Fixed-point per-channel-quantization convolution reference kernel. +inline void ConvPerChannel( + const ConvParams& params, const int32_t* output_multiplier, + const int32_t* output_shift, const RuntimeShape& input_shape, + const int8_t* input_data, const RuntimeShape& filter_shape, + const int8_t* filter_data, const RuntimeShape& bias_shape, + const int32_t* bias_data, const RuntimeShape& output_shape, + int8_t* output_data) { + // Get parameters. + const int32_t input_offset = params.input_offset; // r = s(q - Z) + const int stride_width = params.stride_width; + const int stride_height = params.stride_height; + const int dilation_width_factor = params.dilation_width_factor; + const int dilation_height_factor = params.dilation_height_factor; + const int pad_width = params.padding_values.width; + const int pad_height = params.padding_values.height; + const int32_t output_offset = params.output_offset; + + // Set min and max value of the output. + const int32_t output_activation_min = params.quantized_activation_min; + const int32_t output_activation_max = params.quantized_activation_max; + + // Consistency check. + TFLITE_DCHECK_LE(output_activation_min, output_activation_max); + TFLITE_DCHECK_EQ(input_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(filter_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(output_shape.DimensionsCount(), 4); + const int batches = MatchingDim(input_shape, 0, output_shape, 0); + const int input_depth = input_shape.Dims(3); + const int output_depth = MatchingDim(filter_shape, 0, output_shape, 3); + if (bias_data) { + TFLITE_DCHECK_EQ(bias_shape.FlatSize(), output_depth); + } + + // Check dimensions of the tensors. + const int input_height = input_shape.Dims(1); + const int input_width = input_shape.Dims(2); + const int filter_height = filter_shape.Dims(1); + const int filter_width = filter_shape.Dims(2); + const int filter_input_depth = filter_shape.Dims(3); + const int groups = input_depth / filter_input_depth; + TFLITE_DCHECK_NE(groups, 0); + TFLITE_DCHECK_EQ(input_depth % filter_input_depth, 0); + const int filters_per_group = output_depth / groups; + TFLITE_DCHECK_NE(filters_per_group, 0); + const int output_height = output_shape.Dims(1); + const int output_width = output_shape.Dims(2); + for (int batch = 0; batch < batches; ++batch) { + for (int out_y = 0; out_y < output_height; ++out_y) { + const int in_y_origin = (out_y * stride_height) - pad_height; + for (int out_x = 0; out_x < output_width; ++out_x) { + const int in_x_origin = (out_x * stride_width) - pad_width; + for (int out_channel = 0; out_channel < output_depth; ++out_channel) { + auto group = out_channel / filters_per_group; + int32_t acc = 0; + for (int filter_y = 0; filter_y < filter_height; ++filter_y) { + const int in_y = in_y_origin + dilation_height_factor * filter_y; + for (int filter_x = 0; filter_x < filter_width; ++filter_x) { + const int in_x = in_x_origin + dilation_width_factor * filter_x; + + // Zero padding by omitting the areas outside the image. + const bool is_point_inside_image = + (in_x >= 0) && (in_x < input_width) && (in_y >= 0) && + (in_y < input_height); + + if (!is_point_inside_image) { + continue; + } + + for (int in_channel = 0; in_channel < filter_input_depth; + ++in_channel) { + int32_t input_val = + input_data[Offset(input_shape, batch, in_y, in_x, + in_channel + group * filter_input_depth)]; + int32_t filter_val = filter_data[Offset( + filter_shape, out_channel, filter_y, filter_x, in_channel)]; + // Accumulate with 32 bits accumulator. + // In the nudging process during model quantization, we force + // real value of 0.0 be represented by a quantized value. This + // guarantees that the input_offset is a int8_t, even though + // it is represented using int32_t. int32_t += int8_t * + // (int8_t - int8_t) so the highest value we can get from each + // accumulation is [-127, 127] * ([-128, 127] - + // [-128, 127]), which is [-32512, 32512]. log2(32512) + // = 14.98, which means we can accumulate at least 2^16 + // multiplications without overflow. The accumulator is + // applied to a filter so the accumulation logic will hold as + // long as the filter size (filter_y * filter_x * in_channel) + // does not exceed 2^16, which is the case in all the models + // we have seen so far. + // TODO(b/174275578): Add a check to make sure the + // accumulator depth is smaller than 2^16. + acc += filter_val * (input_val + input_offset); + } + } + } + + if (bias_data) { + acc += bias_data[out_channel]; + } + acc = MultiplyByQuantizedMultiplier( + acc, output_multiplier[out_channel], output_shift[out_channel]); + acc += output_offset; + acc = std::max(acc, output_activation_min); + acc = std::min(acc, output_activation_max); + output_data[Offset(output_shape, batch, out_y, out_x, out_channel)] = + static_cast(acc); + } + } + } + } +} + + +// Fixed-point per-channel-quantization convolution reference kernel. +// 16-bit data and 8-bit filter +template +inline void ConvPerChannel( + const ConvParams& params, const int32_t* output_multiplier, + const int32_t* output_shift, const RuntimeShape& input_shape, + const int16_t* input_data, const RuntimeShape& filter_shape, + const int8_t* filter_data, const RuntimeShape& bias_shape, + const AccumScalar* bias_data, const RuntimeShape& output_shape, + int16_t* output_data) { + // Get parameters. + const int stride_width = params.stride_width; + const int stride_height = params.stride_height; + const int dilation_width_factor = params.dilation_width_factor; + const int dilation_height_factor = params.dilation_height_factor; + const int pad_width = params.padding_values.width; + const int pad_height = params.padding_values.height; + + // Set min and max value of the output. + const int32_t output_activation_min = params.quantized_activation_min; + const int32_t output_activation_max = params.quantized_activation_max; + + // Consistency check. + TFLITE_DCHECK_LE(output_activation_min, output_activation_max); + TFLITE_DCHECK_EQ(input_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(filter_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(output_shape.DimensionsCount(), 4); + const int batches = MatchingDim(input_shape, 0, output_shape, 0); + const int input_depth = input_shape.Dims(3); + const int output_depth = MatchingDim(filter_shape, 0, output_shape, 3); + if (bias_data) { + TFLITE_DCHECK_EQ(bias_shape.FlatSize(), output_depth); + } + + // Check dimensions of the tensors. + const int input_height = input_shape.Dims(1); + const int input_width = input_shape.Dims(2); + const int filter_height = filter_shape.Dims(1); + const int filter_width = filter_shape.Dims(2); + const int filter_input_depth = filter_shape.Dims(3); + const int groups = input_depth / filter_input_depth; + TFLITE_DCHECK_EQ(input_depth % filter_input_depth, 0); + const int filters_per_group = output_depth / groups; + const int output_height = output_shape.Dims(1); + const int output_width = output_shape.Dims(2); + for (int batch = 0; batch < batches; ++batch) { + for (int out_y = 0; out_y < output_height; ++out_y) { + const int in_y_origin = (out_y * stride_height) - pad_height; + for (int out_x = 0; out_x < output_width; ++out_x) { + const int in_x_origin = (out_x * stride_width) - pad_width; + for (int out_channel = 0; out_channel < output_depth; ++out_channel) { + auto group = out_channel / filters_per_group; + AccumScalar acc = 0; + for (int filter_y = 0; filter_y < filter_height; ++filter_y) { + const int in_y = in_y_origin + dilation_height_factor * filter_y; + for (int filter_x = 0; filter_x < filter_width; ++filter_x) { + const int in_x = in_x_origin + dilation_width_factor * filter_x; + + // Zero padding by omitting the areas outside the image. + const bool is_point_inside_image = + (in_x >= 0) && (in_x < input_width) && (in_y >= 0) && + (in_y < input_height); + + if (!is_point_inside_image) { + continue; + } + + for (int in_channel = 0; in_channel < filter_input_depth; + ++in_channel) { + int32_t input_val = + input_data[Offset(input_shape, batch, in_y, in_x, + in_channel + group * filter_input_depth)]; + int32_t filter_val = filter_data[Offset( + filter_shape, out_channel, filter_y, filter_x, in_channel)]; + // Accumulate with 64 bits accumulator. + // int64_t += int8_t * int16_t so the highest value we can + // get from each accumulation is [-127, 127] * ([-32768, + // 32767] - + // [-32768, 32767]), which is [-8322945, 8322945]. + // log2(8322945) = 22.99. + acc += filter_val * input_val; + } + } + } + if (bias_data) { + acc += bias_data[out_channel]; + } + int32_t scaled_acc = MultiplyByQuantizedMultiplier( + acc, output_multiplier[out_channel], output_shift[out_channel]); + scaled_acc = std::max(scaled_acc, output_activation_min); + scaled_acc = std::min(scaled_acc, output_activation_max); + output_data[Offset(output_shape, batch, out_y, out_x, out_channel)] = + static_cast(scaled_acc); + } + } + } + } +} + +} // namespace reference_integer_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_INTEGER_OPS_CONV_H_ diff --git a/tensorflow/lite/kernels/internal/reference/integer_ops/depthwise_conv.h b/tensorflow/lite/kernels/internal/reference/integer_ops/depthwise_conv.h new file mode 100644 index 0000000..7676fce --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/integer_ops/depthwise_conv.h @@ -0,0 +1,291 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_INTEGER_OPS_DEPTHWISE_CONV_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_INTEGER_OPS_DEPTHWISE_CONV_H_ + +#include + +#include "tensorflow/lite/kernels/internal/common.h" + +namespace tflite { +namespace reference_integer_ops { +inline void DepthwiseConvPerChannel( + const DepthwiseParams& params, const int32_t* output_multiplier, + const int32_t* output_shift, const RuntimeShape& input_shape, + const int8_t* input_data, const RuntimeShape& filter_shape, + const int8_t* filter_data, const RuntimeShape& bias_shape, + const int32_t* bias_data, const RuntimeShape& output_shape, + int8_t* output_data) { + // Get parameters. + // TODO(b/141565753): Re-introduce ScopedProfilingLabel on Micro. + const int stride_width = params.stride_width; + const int stride_height = params.stride_height; + const int dilation_width_factor = params.dilation_width_factor; + const int dilation_height_factor = params.dilation_height_factor; + const int pad_width = params.padding_values.width; + const int pad_height = params.padding_values.height; + const int depth_multiplier = params.depth_multiplier; + const int32_t input_offset = params.input_offset; + const int32_t output_offset = params.output_offset; + const int32_t output_activation_min = params.quantized_activation_min; + const int32_t output_activation_max = params.quantized_activation_max; + + // Check dimensions of the tensors. + TFLITE_DCHECK_EQ(input_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(filter_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(output_shape.DimensionsCount(), 4); + + TFLITE_DCHECK_LE(output_activation_min, output_activation_max); + const int batches = MatchingDim(input_shape, 0, output_shape, 0); + const int output_depth = MatchingDim(filter_shape, 3, output_shape, 3); + const int input_height = input_shape.Dims(1); + const int input_width = input_shape.Dims(2); + const int input_depth = input_shape.Dims(3); + const int filter_height = filter_shape.Dims(1); + const int filter_width = filter_shape.Dims(2); + const int output_height = output_shape.Dims(1); + const int output_width = output_shape.Dims(2); + TFLITE_DCHECK_EQ(output_depth, input_depth * depth_multiplier); + TFLITE_DCHECK_EQ(bias_shape.FlatSize(), output_depth); + + for (int batch = 0; batch < batches; ++batch) { + for (int out_y = 0; out_y < output_height; ++out_y) { + for (int out_x = 0; out_x < output_width; ++out_x) { + for (int in_channel = 0; in_channel < input_depth; ++in_channel) { + for (int m = 0; m < depth_multiplier; ++m) { + const int output_channel = m + in_channel * depth_multiplier; + const int in_x_origin = (out_x * stride_width) - pad_width; + const int in_y_origin = (out_y * stride_height) - pad_height; + int32_t acc = 0; + for (int filter_y = 0; filter_y < filter_height; ++filter_y) { + for (int filter_x = 0; filter_x < filter_width; ++filter_x) { + const int in_x = in_x_origin + dilation_width_factor * filter_x; + const int in_y = + in_y_origin + dilation_height_factor * filter_y; + // Zero padding by omitting the areas outside the image. + const bool is_point_inside_image = + (in_x >= 0) && (in_x < input_width) && (in_y >= 0) && + (in_y < input_height); + if (is_point_inside_image) { + int32_t input_val = input_data[Offset( + input_shape, batch, in_y, in_x, in_channel)]; + int32_t filter_val = filter_data[Offset( + filter_shape, 0, filter_y, filter_x, output_channel)]; + // Accumulate with 32 bits accumulator. + // In the nudging process during model quantization, we force + // real value of 0.0 be represented by a quantized value. This + // guarantees that the input_offset is a int8_t, even though + // it is represented using int32_t. int32_t += int8_t * + // (int8_t - int8_t) so the highest value we can get from each + // accumulation is [-127, 127] * ([-128, 127] - + // [-128, 127]), which is [-32512, 32512]. log2(32512) + // = 14.98, which means we can accumulate at least 2^16 + // multiplications without overflow. The accumulator is + // applied to a filter so the accumulation logic will hold as + // long as the filter size (filter_y * filter_x * in_channel) + // does not exceed 2^16, which is the case in all the models + // we have seen so far. + // TODO(b/174275578): Add a check to make sure the + // accumulator depth is smaller than 2^16. + acc += filter_val * (input_val + input_offset); + } + } + } + if (bias_data) { + acc += bias_data[output_channel]; + } + acc = MultiplyByQuantizedMultiplier( + acc, output_multiplier[output_channel], + output_shift[output_channel]); + acc += output_offset; + acc = std::max(acc, output_activation_min); + acc = std::min(acc, output_activation_max); + output_data[Offset(output_shape, batch, out_y, out_x, + output_channel)] = static_cast(acc); + } + } + } + } + } +} + +inline void DepthwiseConvPerChannel( + const DepthwiseParams& params, const int32_t* output_multiplier, + const int32_t* output_shift, const RuntimeShape& input_shape, + const int16_t* input_data, const RuntimeShape& filter_shape, + const int8_t* filter_data, const RuntimeShape& bias_shape, + const std::int64_t* bias_data, const RuntimeShape& output_shape, + int16_t* output_data) { + // Get parameters. + const int stride_width = params.stride_width; + const int stride_height = params.stride_height; + const int dilation_width_factor = params.dilation_width_factor; + const int dilation_height_factor = params.dilation_height_factor; + const int pad_width = params.padding_values.width; + const int pad_height = params.padding_values.height; + const int depth_multiplier = params.depth_multiplier; + const int32_t output_activation_min = params.quantized_activation_min; + const int32_t output_activation_max = params.quantized_activation_max; + + // Check dimensions of the tensors. + TFLITE_DCHECK_EQ(input_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(filter_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(output_shape.DimensionsCount(), 4); + + TFLITE_DCHECK_LE(output_activation_min, output_activation_max); + const int batches = MatchingDim(input_shape, 0, output_shape, 0); + const int output_depth = MatchingDim(filter_shape, 3, output_shape, 3); + const int input_height = input_shape.Dims(1); + const int input_width = input_shape.Dims(2); + const int input_depth = input_shape.Dims(3); + const int filter_height = filter_shape.Dims(1); + const int filter_width = filter_shape.Dims(2); + const int output_height = output_shape.Dims(1); + const int output_width = output_shape.Dims(2); + TFLITE_DCHECK_EQ(output_depth, input_depth * depth_multiplier); + TFLITE_DCHECK_EQ(bias_shape.FlatSize(), output_depth); + + for (int batch = 0; batch < batches; ++batch) { + for (int out_y = 0; out_y < output_height; ++out_y) { + for (int out_x = 0; out_x < output_width; ++out_x) { + for (int in_channel = 0; in_channel < input_depth; ++in_channel) { + for (int m = 0; m < depth_multiplier; ++m) { + const int output_channel = m + in_channel * depth_multiplier; + const int in_x_origin = (out_x * stride_width) - pad_width; + const int in_y_origin = (out_y * stride_height) - pad_height; + std::int64_t acc = 0; + for (int filter_y = 0; filter_y < filter_height; ++filter_y) { + for (int filter_x = 0; filter_x < filter_width; ++filter_x) { + const int in_x = in_x_origin + dilation_width_factor * filter_x; + const int in_y = + in_y_origin + dilation_height_factor * filter_y; + // Zero padding by omitting the areas outside the image. + const bool is_point_inside_image = + (in_x >= 0) && (in_x < input_width) && (in_y >= 0) && + (in_y < input_height); + if (is_point_inside_image) { + int32_t input_val = input_data[Offset( + input_shape, batch, in_y, in_x, in_channel)]; + int32_t filter_val = filter_data[Offset( + filter_shape, 0, filter_y, filter_x, output_channel)]; + // Accumulate with 64 bits accumulator. + // We assume maximum of 2^16 accumulations as with the 8-bit + // case so actually the value in the accumulator should not + // exceed 40 bits + acc += static_cast(filter_val) * + static_cast(input_val); + } + } + } + if (bias_data) { + acc += bias_data[output_channel]; + } + int32_t scaled_acc = MultiplyByQuantizedMultiplier( + acc, output_multiplier[output_channel], + output_shift[output_channel]); + scaled_acc = std::max(scaled_acc, output_activation_min); + scaled_acc = std::min(scaled_acc, output_activation_max); + output_data[Offset(output_shape, batch, out_y, out_x, + output_channel)] = + static_cast(scaled_acc); + } + } + } + } + } +} + +inline void DepthwiseConvHybridPerChannel( + const DepthwiseParams& params, float* scaling_factors_ptr, + const RuntimeShape& input_shape, const int8_t* input_data, + const RuntimeShape& filter_shape, const int8_t* filter_data, + const RuntimeShape& bias_shape, const float* bias_data, + const RuntimeShape& output_shape, float* output_data, + const float* per_channel_scale, int32_t* input_offset) { + const int stride_width = params.stride_width; + const int stride_height = params.stride_height; + const int dilation_width_factor = params.dilation_width_factor; + const int dilation_height_factor = params.dilation_height_factor; + const int pad_width = params.padding_values.width; + const int pad_height = params.padding_values.height; + const int depth_multiplier = params.depth_multiplier; + const float output_activation_min = params.float_activation_min; + const float output_activation_max = params.float_activation_max; + // Check dimensions of the tensors. + TFLITE_DCHECK_EQ(input_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(filter_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(output_shape.DimensionsCount(), 4); + + const int batches = MatchingDim(input_shape, 0, output_shape, 0); + const int output_depth = MatchingDim(filter_shape, 3, output_shape, 3); + const int input_height = input_shape.Dims(1); + const int input_width = input_shape.Dims(2); + const int input_depth = input_shape.Dims(3); + const int filter_height = filter_shape.Dims(1); + const int filter_width = filter_shape.Dims(2); + const int output_height = output_shape.Dims(1); + const int output_width = output_shape.Dims(2); + const int bias_depth = bias_shape.FlatSize(); + TFLITE_DCHECK_EQ(output_depth, input_depth * depth_multiplier); + TFLITE_DCHECK_EQ(bias_depth, output_depth); + + for (int batch = 0; batch < batches; ++batch) { + for (int out_y = 0; out_y < output_height; ++out_y) { + for (int out_x = 0; out_x < output_width; ++out_x) { + for (int in_channel = 0; in_channel < input_depth; ++in_channel) { + for (int m = 0; m < depth_multiplier; ++m) { + const int output_channel = m + in_channel * depth_multiplier; + const int in_x_origin = (out_x * stride_width) - pad_width; + const int in_y_origin = (out_y * stride_height) - pad_height; + int32_t acc = 0; + for (int filter_y = 0; filter_y < filter_height; ++filter_y) { + for (int filter_x = 0; filter_x < filter_width; ++filter_x) { + const int in_x = in_x_origin + dilation_width_factor * filter_x; + const int in_y = + in_y_origin + dilation_height_factor * filter_y; + // Zero padding by omitting the areas outside the image. + const bool is_point_inside_image = + (in_x >= 0) && (in_x < input_width) && (in_y >= 0) && + (in_y < input_height); + if (is_point_inside_image) { + int32_t input_val = input_data[Offset( + input_shape, batch, in_y, in_x, in_channel)]; + int32_t filter_val = filter_data[Offset( + filter_shape, 0, filter_y, filter_x, output_channel)]; + acc += filter_val * (input_val - input_offset[batch]); + } + } + } + float acc_float = static_cast(acc); + acc_float *= + per_channel_scale[output_channel] * scaling_factors_ptr[batch]; + if (bias_data && output_channel < bias_depth) { + acc_float += bias_data[output_channel]; + } + output_data[Offset(output_shape, batch, out_y, out_x, + output_channel)] = + ActivationFunctionWithMinMax(acc_float, output_activation_min, + output_activation_max); + } + } + } + } + } +} + +} // namespace reference_integer_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_INTEGER_OPS_DEPTHWISE_CONV_H_ diff --git a/tensorflow/lite/kernels/internal/reference/integer_ops/fully_connected.h b/tensorflow/lite/kernels/internal/reference/integer_ops/fully_connected.h new file mode 100644 index 0000000..3a74402 --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/integer_ops/fully_connected.h @@ -0,0 +1,126 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_INTEGER_OPS_FULLY_CONNECTED_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_INTEGER_OPS_FULLY_CONNECTED_H_ + +#include + +#include "tensorflow/lite/kernels/internal/common.h" + +namespace tflite { +namespace reference_integer_ops { + +// For per-channel functions, since it is defined in quantization spec that +// weights are symmetric +// (https://www.tensorflow.org/lite/performance/quantization_spec#symmetric_vs_asymmetric), +// zero_point (params.weights_offset) is always 0. +// However, for per-tensor functions, params.weights_offset is still applied for +// backward compatibility. +template +void FullyConnectedPerChannel( + const FullyConnectedParams& params, const int32_t* output_multiplier, + const int* output_shift, const RuntimeShape& input_shape, + const InputType* input_data, const RuntimeShape& filter_shape, + const WeightType* filter_data, const RuntimeShape& bias_shape, + const BiasType* bias_data, const RuntimeShape& output_shape, + OutputType* output_data) { + const int32_t input_offset = params.input_offset; + const int32_t output_offset = params.output_offset; + const int32_t output_activation_min = params.quantized_activation_min; + const int32_t output_activation_max = params.quantized_activation_max; + TFLITE_DCHECK_GE(filter_shape.DimensionsCount(), 2); + TFLITE_DCHECK_EQ(output_shape.DimensionsCount(), 2); + + TFLITE_DCHECK_LE(output_activation_min, output_activation_max); + const int filter_dim_count = filter_shape.DimensionsCount(); + const int batches = output_shape.Dims(0); + const int output_depth = output_shape.Dims(1); + TFLITE_DCHECK_LE(output_depth, filter_shape.Dims(filter_dim_count - 2)); + const int accum_depth = filter_shape.Dims(filter_dim_count - 1); + for (int b = 0; b < batches; ++b) { + for (int out_c = 0; out_c < output_depth; ++out_c) { + BiasType acc = 0; + for (int d = 0; d < accum_depth; ++d) { + int32_t input_val = input_data[b * accum_depth + d]; + int32_t filter_val = filter_data[out_c * accum_depth + d]; + acc += filter_val * (input_val + input_offset); + } + if (bias_data) { + acc += bias_data[out_c]; + } + int32_t acc_scaled = MultiplyByQuantizedMultiplier( + acc, output_multiplier[out_c], output_shift[out_c]); + acc_scaled += output_offset; + acc_scaled = std::max(acc_scaled, output_activation_min); + acc_scaled = std::min(acc_scaled, output_activation_max); + output_data[out_c + output_depth * b] = + static_cast(acc_scaled); + } + } +} + +template +void FullyConnected(const FullyConnectedParams& params, + const RuntimeShape& input_shape, + const InputType* input_data, + const RuntimeShape& filter_shape, + const WeightType* filter_data, + const RuntimeShape& bias_shape, const BiasType* bias_data, + const RuntimeShape& output_shape, OutputType* output_data) { + const int32_t input_offset = params.input_offset; + const int32_t filter_offset = params.weights_offset; + const int32_t output_offset = params.output_offset; + const int32_t output_multiplier = params.output_multiplier; + const int output_shift = params.output_shift; + const int32_t output_activation_min = params.quantized_activation_min; + const int32_t output_activation_max = params.quantized_activation_max; + TFLITE_DCHECK_GE(filter_shape.DimensionsCount(), 2); + TFLITE_DCHECK_GE(output_shape.DimensionsCount(), 1); + + TFLITE_DCHECK_LE(output_activation_min, output_activation_max); + const int filter_dim_count = filter_shape.DimensionsCount(); + const int output_dim_count = output_shape.DimensionsCount(); + const int batches = FlatSizeSkipDim(output_shape, output_dim_count - 1); + const int output_depth = output_shape.Dims(output_dim_count - 1); + TFLITE_DCHECK_LE(output_depth, filter_shape.Dims(filter_dim_count - 2)); + const int accum_depth = filter_shape.Dims(filter_dim_count - 1); + for (int b = 0; b < batches; ++b) { + for (int out_c = 0; out_c < output_depth; ++out_c) { + BiasType acc = 0; + for (int d = 0; d < accum_depth; ++d) { + int32_t input_val = input_data[b * accum_depth + d]; + int32_t filter_val = filter_data[out_c * accum_depth + d]; + acc += (filter_val + filter_offset) * (input_val + input_offset); + } + if (bias_data) { + acc += bias_data[out_c]; + } + int32_t acc_scaled = + MultiplyByQuantizedMultiplier(acc, output_multiplier, output_shift); + acc_scaled += output_offset; + acc_scaled = std::max(acc_scaled, output_activation_min); + acc_scaled = std::min(acc_scaled, output_activation_max); + output_data[out_c + output_depth * b] = + static_cast(acc_scaled); + } + } +} + +} // namespace reference_integer_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_INTEGER_OPS_FULLY_CONNECTED_H_ diff --git a/tensorflow/lite/kernels/internal/reference/integer_ops/l2normalization.h b/tensorflow/lite/kernels/internal/reference/integer_ops/l2normalization.h new file mode 100644 index 0000000..164a836 --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/integer_ops/l2normalization.h @@ -0,0 +1,67 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_INTEGER_OPS_L2NORMALIZATION_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_INTEGER_OPS_L2NORMALIZATION_H_ + +#include + +#include "tensorflow/lite/kernels/internal/common.h" + +namespace tflite { +namespace reference_integer_ops { + +inline void L2Normalization(int32_t input_zero_point, int32_t outer_size, + int32_t depth, const int8_t* input_data, + int8_t* output_data) { + static constexpr int8_t kMinInt8 = std::numeric_limits::min(); + static constexpr int8_t kMaxInt8 = std::numeric_limits::max(); + // The output scale must be in sync with Prepare(). + // Output is in 1/128 scale so the actual output range is nudged from [-1, 1] + // to [-1, 127/128]. + static constexpr int32_t kOutputScale = 7; + for (int outer_index = 0; outer_index < outer_size; ++outer_index) { + // int32_t = (int8_t - int8_t) ^ 2. + // ([-128, 127] - [-128, 127]) ^ 2 = [0, (2^8 - 1)^2] so the accumulator is + // safe from overflowing in at least 2^16 steps. + int32_t acc = 0; + for (int inner_index = 0; inner_index < depth; ++inner_index) { + int32_t input = + input_data[depth * outer_index + inner_index] - input_zero_point; + acc += input * input; + } + int32_t inv_l2norm_multiplier; + int inv_l2norm_shift; + GetInvSqrtQuantizedMultiplierExp(acc, kReverseShift, &inv_l2norm_multiplier, + &inv_l2norm_shift); + + for (int inner_index = 0; inner_index < depth; ++inner_index) { + int32_t input = + input_data[depth * outer_index + inner_index] - input_zero_point; + + // Rescale and downcast. Rescale is folded into the division. + int32_t output_in_q24 = MultiplyByQuantizedMultiplier( + input, inv_l2norm_multiplier, inv_l2norm_shift + kOutputScale); + output_in_q24 = + std::min(static_cast(kMaxInt8), + std::max(static_cast(kMinInt8), output_in_q24)); + output_data[depth * outer_index + inner_index] = + static_cast(output_in_q24); + } + } +} +} // namespace reference_integer_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_INTEGER_OPS_L2NORMALIZATION_H_ diff --git a/tensorflow/lite/kernels/internal/reference/integer_ops/logistic.h b/tensorflow/lite/kernels/internal/reference/integer_ops/logistic.h new file mode 100644 index 0000000..16eff13 --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/integer_ops/logistic.h @@ -0,0 +1,121 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_INTEGER_OPS_LOGISTIC_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_INTEGER_OPS_LOGISTIC_H_ + +#include +#include + +#include "tensorflow/lite/kernels/internal/common.h" + +namespace tflite { +namespace reference_integer_ops { + +inline void Logistic(int32_t input_zero_point, int32_t input_range_radius, + int32_t input_multiplier, int32_t input_left_shift, + int32_t input_size, const int8_t* input_data, + int8_t* output_data) { + // Integer bits must be in sync with Prepare() function. + static constexpr int32_t kInputIntegerBits = 4; + static constexpr int32_t kOutputIntegerBits = 8; + static constexpr int8_t kMinInt8 = std::numeric_limits::min(); + static constexpr int8_t kMaxInt8 = std::numeric_limits::max(); + static constexpr int32_t kOutputZeroPoint = -128; + + for (int i = 0; i < input_size; ++i) { + const int32_t input = + static_cast(input_data[i]) - input_zero_point; + if (input <= -input_range_radius) { + output_data[i] = kMinInt8; + } else if (input >= input_range_radius) { + output_data[i] = kMaxInt8; + } else { + const int32_t input_in_q4 = MultiplyByQuantizedMultiplier( + input, input_multiplier, input_left_shift); + using FixedPoint4 = gemmlowp::FixedPoint; + const int32_t output_in_q0 = + gemmlowp::logistic(FixedPoint4::FromRaw(input_in_q4)).raw(); + + // Rescale and downcast. + using gemmlowp::RoundingDivideByPOT; + int32_t output_in_q23 = + RoundingDivideByPOT(output_in_q0, 31 - kOutputIntegerBits); + output_in_q23 = std::min(std::max(output_in_q23 + kOutputZeroPoint, + static_cast(kMinInt8)), + static_cast(kMaxInt8)); + output_data[i] = static_cast(output_in_q23); + } + } +} + +inline void Logistic(int32_t input_multiplier, int32_t input_left_shift, + int32_t input_size, const int16_t* ptr_input_data, + int16_t* ptr_output_data) { + // We use the LUT for sigmoid and take into account, that + // tanh(x) = 2*sigmoid(2*x) - 1 + + // We scale by 3/4 to expand range [-8,8]->[-10.7,10.7]. + // In case of general parameter scale, multiplier 3 is taken into account + // in TanhPrepare function and it is included in + // input_multiplier already. + + TFLITE_DCHECK_GE(input_left_shift, 0); + if (input_multiplier == 0) { // power of two case + input_multiplier = 3 << input_left_shift; + input_left_shift = 0; + } + + int32_t round = (input_left_shift > 0) ? 1 << (input_left_shift - 1) : 0; + + for (int i = 0; i < input_size; ++i, ptr_input_data++, ptr_output_data++) { + int32_t input_data = + ((*ptr_input_data) * input_multiplier + round) >> input_left_shift; + + // We do interpolation on unsigned values. + uint32_t abs_input_data = abs(input_data); + + // We divide by 2 power of 9, because + // we need to divide by 2 in power of 7 for + // the input conversion + 1/4 from the scale above. + + // Define uh as uint32_t type not to make this function overflow. + uint32_t uh = abs_input_data >> 9; + uint32_t result; + + if (uh >= 255) { + // Saturate to maximum. + result = 0x7FFF << 10; + } else { + uint32_t ua = sigmoid_table_uint16[uh]; + uint32_t ub = sigmoid_table_uint16[uh + 1]; + uint32_t ut = abs_input_data & 0x1ff; + // Interpolation is done using the fractional bit. + result = (ua << 9) + ut * (ub - ua); + } + + result = (input_data >= 0) ? (result + (1 << 9)) + : ((1 << (16 + 9)) - result + (1 << 9) - 1); + + // Back to 16-bit. + result >>= 10; + + *ptr_output_data = result; + } +} + +} // namespace reference_integer_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_INTEGER_OPS_LOGISTIC_H_ diff --git a/tensorflow/lite/kernels/internal/reference/integer_ops/mean.h b/tensorflow/lite/kernels/internal/reference/integer_ops/mean.h new file mode 100644 index 0000000..7e3f690 --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/integer_ops/mean.h @@ -0,0 +1,18 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_INTEGER_OPS_MEAN_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_INTEGER_OPS_MEAN_H_ + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_INTEGER_OPS_MEAN_H_ diff --git a/tensorflow/lite/kernels/internal/reference/integer_ops/mul.h b/tensorflow/lite/kernels/internal/reference/integer_ops/mul.h new file mode 100644 index 0000000..0506618 --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/integer_ops/mul.h @@ -0,0 +1,133 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_INTEGER_OPS_MUL_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_INTEGER_OPS_MUL_H_ + +#include + +#include "fixedpoint/fixedpoint.h" +#include "ruy/profiler/instrumentation.h" // from @ruy +#include "tensorflow/lite/kernels/internal/common.h" + +namespace tflite { +namespace reference_integer_ops { + +template +void MulElementwise(int size, const ArithmeticParams& params, + const InputType* input1_data, const InputType* input2_data, + OutputType* output_data) { + for (int i = 0; i < size; ++i) { + const int32_t input1_val = params.input1_offset + input1_data[i]; + const int32_t input2_val = params.input2_offset + input2_data[i]; + const int32_t unclamped_result = + params.output_offset + + MultiplyByQuantizedMultiplier(input1_val * input2_val, + params.output_multiplier, + params.output_shift); + const int32_t clamped_output = + std::min(params.quantized_activation_max, + std::max(params.quantized_activation_min, unclamped_result)); + output_data[i] = static_cast(clamped_output); + } +} + +template +inline void Mul(const ArithmeticParams& params, + const RuntimeShape& input1_shape, const T* input1_data, + const RuntimeShape& input2_shape, const T* input2_data, + const RuntimeShape& output_shape, T* output_data) { + TFLITE_DCHECK_LE(params.quantized_activation_min, + params.quantized_activation_max); + ruy::profiler::ScopeLabel label("Mul/8bit"); + const int flat_size = + MatchingElementsSize(input1_shape, input2_shape, output_shape); + + MulElementwise(flat_size, params, input1_data, input2_data, output_data); +} + +// Mul with 16 bit inputs and int8_t outputs. +inline void Mul(const ArithmeticParams& params, + const RuntimeShape& input1_shape, const int16_t* input1_data, + const RuntimeShape& input2_shape, const int16_t* input2_data, + const RuntimeShape& output_shape, int8_t* output_data) { + ruy::profiler::ScopeLabel label("Mul/Int16Int8"); + int32_t output_offset = params.output_offset; + int32_t output_activation_min = params.quantized_activation_min; + int32_t output_activation_max = params.quantized_activation_max; + TFLITE_DCHECK_LE(output_activation_min, output_activation_max); + + const int flat_size = + MatchingElementsSize(input1_shape, input2_shape, output_shape); + + for (int i = 0; i < flat_size; i++) { + // F0 uses 0 integer bits, range [-1, 1]. + using F0 = gemmlowp::FixedPoint; + + F0 unclamped_result = + F0::FromRaw(input1_data[i]) * F0::FromRaw(input2_data[i]); + int16_t rescaled_result = + gemmlowp::RoundingDivideByPOT(unclamped_result.raw(), 8); + int16_t clamped_result = std::min( + output_activation_max - output_offset, rescaled_result); + clamped_result = std::max(output_activation_min - output_offset, + clamped_result); + output_data[i] = output_offset + clamped_result; + } +} + +template +inline void BroadcastMul4DSlow( + const ArithmeticParams& params, const RuntimeShape& input1_shape, + const T* input1_data, const RuntimeShape& input2_shape, + const T* input2_data, const RuntimeShape& output_shape, T* output_data) { + ruy::profiler::ScopeLabel label("BroadcastMul4DSlow"); + + NdArrayDesc<4> desc1; + NdArrayDesc<4> desc2; + // The input shapes are extended as part of NdArrayDesc initialization. + NdArrayDescsForElementwiseBroadcast(input1_shape, input2_shape, &desc1, + &desc2); + const RuntimeShape extended_output_shape = + RuntimeShape::ExtendedShape(4, output_shape); + + for (int b = 0; b < extended_output_shape.Dims(0); ++b) { + for (int y = 0; y < extended_output_shape.Dims(1); ++y) { + for (int x = 0; x < extended_output_shape.Dims(2); ++x) { + for (int c = 0; c < extended_output_shape.Dims(3); ++c) { + const int32_t input1_val = + params.input1_offset + + input1_data[SubscriptToIndex(desc1, b, y, x, c)]; + const int32_t input2_val = + params.input2_offset + + input2_data[SubscriptToIndex(desc2, b, y, x, c)]; + const int32_t unclamped_result = + params.output_offset + + MultiplyByQuantizedMultiplier(input1_val * input2_val, + params.output_multiplier, + params.output_shift); + const int32_t clamped_output = std::min( + params.quantized_activation_max, + std::max(params.quantized_activation_min, unclamped_result)); + output_data[Offset(extended_output_shape, b, y, x, c)] = + static_cast(clamped_output); + } + } + } + } +} + +} // namespace reference_integer_ops +} // namespace tflite +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_INTEGER_OPS_MUL_H_ diff --git a/tensorflow/lite/kernels/internal/reference/integer_ops/pooling.h b/tensorflow/lite/kernels/internal/reference/integer_ops/pooling.h new file mode 100644 index 0000000..4dc31d9 --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/integer_ops/pooling.h @@ -0,0 +1,264 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_INTEGER_OPS_POOLING_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_INTEGER_OPS_POOLING_H_ + +#include +#include + +#include "tensorflow/lite/kernels/internal/common.h" + +namespace tflite { +namespace reference_integer_ops { + +inline bool AveragePool(const PoolParams& params, + const RuntimeShape& input_shape, + const int8_t* input_data, + const RuntimeShape& output_shape, int8_t* output_data) { + TFLITE_DCHECK_LE(params.quantized_activation_min, + params.quantized_activation_max); + TFLITE_DCHECK_EQ(input_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(output_shape.DimensionsCount(), 4); + const int batches = MatchingDim(input_shape, 0, output_shape, 0); + const int depth = MatchingDim(input_shape, 3, output_shape, 3); + const int input_height = input_shape.Dims(1); + const int input_width = input_shape.Dims(2); + const int output_height = output_shape.Dims(1); + const int output_width = output_shape.Dims(2); + const int stride_height = params.stride_height; + const int stride_width = params.stride_width; + for (int batch = 0; batch < batches; ++batch) { + for (int out_y = 0; out_y < output_height; ++out_y) { + for (int out_x = 0; out_x < output_width; ++out_x) { + for (int channel = 0; channel < depth; ++channel) { + const int in_x_origin = + (out_x * stride_width) - params.padding_values.width; + const int in_y_origin = + (out_y * stride_height) - params.padding_values.height; + // Compute the boundaries of the filter region clamped so as to + // ensure that the filter window fits in the input array. + const int filter_x_start = std::max(0, -in_x_origin); + const int filter_x_end = + std::min(params.filter_width, input_width - in_x_origin); + const int filter_y_start = std::max(0, -in_y_origin); + const int filter_y_end = + std::min(params.filter_height, input_height - in_y_origin); + int32_t acc = 0; + int filter_count = 0; + for (int filter_y = filter_y_start; filter_y < filter_y_end; + ++filter_y) { + for (int filter_x = filter_x_start; filter_x < filter_x_end; + ++filter_x) { + const int in_x = in_x_origin + filter_x; + const int in_y = in_y_origin + filter_y; + acc += + input_data[Offset(input_shape, batch, in_y, in_x, channel)]; + filter_count++; + } + } + if (filter_count == 0) return false; + // Round to the closest integer value. + acc = acc > 0 ? (acc + filter_count / 2) / filter_count + : (acc - filter_count / 2) / filter_count; + acc = std::max(acc, params.quantized_activation_min); + acc = std::min(acc, params.quantized_activation_max); + output_data[Offset(output_shape, batch, out_y, out_x, channel)] = + static_cast(acc); + } + } + } + } + return true; +} + +inline void MaxPool(const PoolParams& params, const RuntimeShape& input_shape, + const int8_t* input_data, const RuntimeShape& output_shape, + int8_t* output_data) { + TFLITE_DCHECK_LE(params.quantized_activation_min, + params.quantized_activation_max); + TFLITE_DCHECK_GE(params.quantized_activation_min, + std::numeric_limits::min()); + TFLITE_DCHECK_LE(params.quantized_activation_max, + std::numeric_limits::max()); + TFLITE_DCHECK_EQ(input_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(output_shape.DimensionsCount(), 4); + const int batches = MatchingDim(input_shape, 0, output_shape, 0); + const int depth = MatchingDim(input_shape, 3, output_shape, 3); + const int input_height = input_shape.Dims(1); + const int input_width = input_shape.Dims(2); + const int output_height = output_shape.Dims(1); + const int output_width = output_shape.Dims(2); + const int stride_height = params.stride_height; + const int stride_width = params.stride_width; + for (int batch = 0; batch < batches; ++batch) { + for (int out_y = 0; out_y < output_height; ++out_y) { + for (int out_x = 0; out_x < output_width; ++out_x) { + for (int channel = 0; channel < depth; ++channel) { + const int in_x_origin = + (out_x * stride_width) - params.padding_values.width; + const int in_y_origin = + (out_y * stride_height) - params.padding_values.height; + // Compute the boundaries of the filter region clamped so as to + // ensure that the filter window fits in the input array. + const int filter_x_start = std::max(0, -in_x_origin); + const int filter_x_end = + std::min(params.filter_width, input_width - in_x_origin); + const int filter_y_start = std::max(0, -in_y_origin); + const int filter_y_end = + std::min(params.filter_height, input_height - in_y_origin); + int8_t max = std::numeric_limits::lowest(); + for (int filter_y = filter_y_start; filter_y < filter_y_end; + ++filter_y) { + for (int filter_x = filter_x_start; filter_x < filter_x_end; + ++filter_x) { + const int in_x = in_x_origin + filter_x; + const int in_y = in_y_origin + filter_y; + max = std::max( + max, + input_data[Offset(input_shape, batch, in_y, in_x, channel)]); + } + } + max = std::max(max, params.quantized_activation_min); + max = std::min(max, params.quantized_activation_max); + output_data[Offset(output_shape, batch, out_y, out_x, channel)] = + static_cast(max); + } + } + } + } +} + +inline bool AveragePool(const PoolParams& params, + const RuntimeShape& input_shape, + const int16_t* input_data, + const RuntimeShape& output_shape, + int16_t* output_data) { + TFLITE_DCHECK_LE(params.quantized_activation_min, + params.quantized_activation_max); + TFLITE_DCHECK_EQ(input_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(output_shape.DimensionsCount(), 4); + const int batches = MatchingDim(input_shape, 0, output_shape, 0); + const int depth = MatchingDim(input_shape, 3, output_shape, 3); + const int input_height = input_shape.Dims(1); + const int input_width = input_shape.Dims(2); + const int output_height = output_shape.Dims(1); + const int output_width = output_shape.Dims(2); + const int stride_height = params.stride_height; + const int stride_width = params.stride_width; + for (int batch = 0; batch < batches; ++batch) { + for (int out_y = 0; out_y < output_height; ++out_y) { + for (int out_x = 0; out_x < output_width; ++out_x) { + for (int channel = 0; channel < depth; ++channel) { + const int in_x_origin = + (out_x * stride_width) - params.padding_values.width; + const int in_y_origin = + (out_y * stride_height) - params.padding_values.height; + // Compute the boundaries of the filter region clamped so as to + // ensure that the filter window fits in the input array. + const int filter_x_start = std::max(0, -in_x_origin); + const int filter_x_end = + std::min(params.filter_width, input_width - in_x_origin); + const int filter_y_start = std::max(0, -in_y_origin); + const int filter_y_end = + std::min(params.filter_height, input_height - in_y_origin); + int32_t acc = 0; + int filter_count = 0; + for (int filter_y = filter_y_start; filter_y < filter_y_end; + ++filter_y) { + for (int filter_x = filter_x_start; filter_x < filter_x_end; + ++filter_x) { + const int in_x = in_x_origin + filter_x; + const int in_y = in_y_origin + filter_y; + acc += + input_data[Offset(input_shape, batch, in_y, in_x, channel)]; + filter_count++; + } + } + if (filter_count == 0) return false; + // Round to the closest integer value. + acc = acc > 0 ? (acc + filter_count / 2) / filter_count + : (acc - filter_count / 2) / filter_count; + acc = std::max(acc, params.quantized_activation_min); + acc = std::min(acc, params.quantized_activation_max); + output_data[Offset(output_shape, batch, out_y, out_x, channel)] = + static_cast(acc); + } + } + } + } + return true; +} + +inline void MaxPool(const PoolParams& params, const RuntimeShape& input_shape, + const int16_t* input_data, const RuntimeShape& output_shape, + int16_t* output_data) { + TFLITE_DCHECK_LE(params.quantized_activation_min, + params.quantized_activation_max); + TFLITE_DCHECK_GE(params.quantized_activation_min, + std::numeric_limits::min()); + TFLITE_DCHECK_LE(params.quantized_activation_max, + std::numeric_limits::max()); + TFLITE_DCHECK_EQ(input_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(output_shape.DimensionsCount(), 4); + const int batches = MatchingDim(input_shape, 0, output_shape, 0); + const int depth = MatchingDim(input_shape, 3, output_shape, 3); + const int input_height = input_shape.Dims(1); + const int input_width = input_shape.Dims(2); + const int output_height = output_shape.Dims(1); + const int output_width = output_shape.Dims(2); + const int stride_height = params.stride_height; + const int stride_width = params.stride_width; + for (int batch = 0; batch < batches; ++batch) { + for (int out_y = 0; out_y < output_height; ++out_y) { + for (int out_x = 0; out_x < output_width; ++out_x) { + for (int channel = 0; channel < depth; ++channel) { + const int in_x_origin = + (out_x * stride_width) - params.padding_values.width; + const int in_y_origin = + (out_y * stride_height) - params.padding_values.height; + // Compute the boundaries of the filter region clamped so as to + // ensure that the filter window fits in the input array. + const int filter_x_start = std::max(0, -in_x_origin); + const int filter_x_end = + std::min(params.filter_width, input_width - in_x_origin); + const int filter_y_start = std::max(0, -in_y_origin); + const int filter_y_end = + std::min(params.filter_height, input_height - in_y_origin); + int16_t max = std::numeric_limits::lowest(); + for (int filter_y = filter_y_start; filter_y < filter_y_end; + ++filter_y) { + for (int filter_x = filter_x_start; filter_x < filter_x_end; + ++filter_x) { + const int in_x = in_x_origin + filter_x; + const int in_y = in_y_origin + filter_y; + max = std::max( + max, + input_data[Offset(input_shape, batch, in_y, in_x, channel)]); + } + } + max = std::max(max, params.quantized_activation_min); + max = std::min(max, params.quantized_activation_max); + output_data[Offset(output_shape, batch, out_y, out_x, channel)] = + static_cast(max); + } + } + } + } +} + +} // namespace reference_integer_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_INTEGER_OPS_POOLING_H_ diff --git a/tensorflow/lite/kernels/internal/reference/integer_ops/tanh.h b/tensorflow/lite/kernels/internal/reference/integer_ops/tanh.h new file mode 100644 index 0000000..7b1e003 --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/integer_ops/tanh.h @@ -0,0 +1,117 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_INTEGER_OPS_TANH_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_INTEGER_OPS_TANH_H_ + +#include +#include + +#include "fixedpoint/fixedpoint.h" +#include "tensorflow/lite/kernels/internal/common.h" + +namespace tflite { +namespace reference_integer_ops { + +inline void Tanh(int32_t input_zero_point, int32_t input_range_radius, + int32_t input_multiplier, int32_t input_shift, + const RuntimeShape& input_shape, const int8_t* input_data, + const RuntimeShape& output_shape, int8_t* output_data) { + // Integer bits must be in sync with Prepare() function. + static constexpr int32_t kInputIntegerBits = 4; + static constexpr int32_t kOutputScale = 7; + static constexpr int32_t kMinInt8 = std::numeric_limits::min(); + static constexpr int32_t kMaxInt8 = std::numeric_limits::max(); + using F4 = gemmlowp::FixedPoint; + + const int flat_size = MatchingFlatSize(input_shape, output_shape); + + for (int i = 0; i < flat_size; ++i) { + const int32_t input = + static_cast(input_data[i]) - input_zero_point; + if (input <= -input_range_radius) { + output_data[i] = kMinInt8; + } else if (input >= input_range_radius) { + output_data[i] = kMaxInt8; + } else { + const int32_t input_in_q4 = + MultiplyByQuantizedMultiplier(input, input_multiplier, input_shift); + const int32_t output_in_q0 = + gemmlowp::tanh(F4::FromRaw(input_in_q4)).raw(); + + // Rescale and downcast. + using gemmlowp::RoundingDivideByPOT; + int32_t output_in_q24 = + RoundingDivideByPOT(output_in_q0, 31 - kOutputScale); + output_in_q24 = std::min(std::max(output_in_q24, kMinInt8), kMaxInt8); + output_data[i] = static_cast(output_in_q24); + } + } +} + +inline void Tanh(int32_t input_multiplier, int32_t input_left_shift, + const RuntimeShape& input_shape, const int16_t* ptr_input_data, + const RuntimeShape& output_shape, int16_t* ptr_output_data) { + // We use the LUT for sigmoid and take into account, that + // tanh(x) = 2*sigmoid(2*x) - 1 + + // We scale by 3/4 to expand range [-8,8]->[-10.7,10.7]. + // In case of general parameter scale, multiplier 3 is taken into account + // in TanhPrepare function and it is included in + // input_multiplier already. + + if (input_multiplier == 0) { // power of two case + input_multiplier = 3 << input_left_shift; + input_left_shift = 0; + } + + int32_t round = (input_left_shift > 0) ? 1 << (input_left_shift - 1) : 0; + + int flat_size = MatchingFlatSize(input_shape, output_shape); + + for (int i = 0; i < flat_size; ++i, ptr_input_data++, ptr_output_data++) { + int32_t input_data = + ((*ptr_input_data) * input_multiplier + round) >> input_left_shift; + + uint32_t abs_input_data = abs(input_data); + uint32_t uh = abs_input_data >> 8; + int32_t result; + + if (uh >= 255) { + // Saturate to maximum. + result = 0xFFFF << 8; + } else { + uint32_t ua = sigmoid_table_uint16[uh]; + uint32_t ub = sigmoid_table_uint16[uh + 1]; + + uint8_t ut = abs_input_data & 0xFF; + + result = (ua << 8) + ut * (ub - ua); + } + + result = (input_data >= 0) + ? (result - (1 << (14 + 9)) + (1 << (9 - 2))) + : (-result + (1 << (14 + 9)) + (1 << (9 - 2)) - 1); + + // Convert back to 16-bit. + result >>= (9 - 1); + + *ptr_output_data = result; + } +} + +} // namespace reference_integer_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_INTEGER_OPS_TANH_H_ diff --git a/tensorflow/lite/kernels/internal/reference/integer_ops/transpose_conv.h b/tensorflow/lite/kernels/internal/reference/integer_ops/transpose_conv.h new file mode 100644 index 0000000..40f99ce --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/integer_ops/transpose_conv.h @@ -0,0 +1,224 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_INTEGER_OPS_TRANSPOSE_CONV_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_INTEGER_OPS_TRANSPOSE_CONV_H_ + +#include + +#include "tensorflow/lite/kernels/internal/common.h" + +namespace tflite { +namespace reference_integer_ops { + +// Fixed-point per-channel-quantization transpose convolution reference kernel. +inline void TransposeConv( + const ConvParams& params, const int32_t* output_multiplier, + const int32_t* output_shift, const RuntimeShape& input_shape, + const int8_t* input_data, const RuntimeShape& filter_shape, + const int8_t* filter_data, const RuntimeShape& bias_shape, + const int32_t* bias_data, const RuntimeShape& output_shape, + int8_t* output_data, const RuntimeShape& im2col_shape, int8_t* im2col_data, + int32_t* scratch_buffer) { + const int stride_width = params.stride_width; + const int stride_height = params.stride_height; + const int pad_width = params.padding_values.width; + const int pad_height = params.padding_values.height; + TFLITE_DCHECK_EQ(input_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(filter_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(output_shape.DimensionsCount(), 4); + (void)im2col_data; // only used in optimized code. + (void)im2col_shape; // only used in optimized code. + + const int batches = MatchingDim(input_shape, 0, output_shape, 0); + const int input_depth = MatchingDim(input_shape, 3, filter_shape, 3); + const int output_depth = MatchingDim(filter_shape, 0, output_shape, 3); + if (bias_data) { + TFLITE_DCHECK_EQ(bias_shape.FlatSize(), output_depth); + } + const int input_height = input_shape.Dims(1); + const int input_width = input_shape.Dims(2); + const int filter_height = filter_shape.Dims(1); + const int filter_width = filter_shape.Dims(2); + const int output_height = output_shape.Dims(1); + const int output_width = output_shape.Dims(2); + const int32_t input_offset = params.input_offset; + const int32_t output_offset = params.output_offset; + const int32_t output_activation_min = params.quantized_activation_min; + const int32_t output_activation_max = params.quantized_activation_max; + TFLITE_DCHECK_LE(output_activation_min, output_activation_max); + + const int num_elements = output_shape.FlatSize(); + // We need to initialize scratch_buffer to all 0s, as we apply the same + // 'scatter' based trick as in float version. + memset(scratch_buffer, 0, num_elements * sizeof(int32_t)); + + // Loop through input elements one at a time. + for (int batch = 0; batch < batches; ++batch) { + for (int in_y = 0; in_y < input_height; ++in_y) { + for (int in_x = 0; in_x < input_width; ++in_x) { + for (int in_channel = 0; in_channel < input_depth; ++in_channel) { + // Loop through the output elements it will influence. + const int out_x_origin = (in_x * stride_width) - pad_width; + const int out_y_origin = (in_y * stride_height) - pad_height; + for (int filter_y = 0; filter_y < filter_height; ++filter_y) { + for (int filter_x = 0; filter_x < filter_width; ++filter_x) { + for (int out_channel = 0; out_channel < output_depth; + ++out_channel) { + // Compute output element location. + const int out_x = out_x_origin + filter_x; + const int out_y = out_y_origin + filter_y; + // We cannot accumulate out of bounds. + if ((out_x >= 0) && (out_x < output_width) && (out_y >= 0) && + (out_y < output_height)) { + const int8_t input_value = input_data[Offset( + input_shape, batch, in_y, in_x, in_channel)]; + const int8_t filter_value = + filter_data[Offset(filter_shape, out_channel, filter_y, + filter_x, in_channel)]; + scratch_buffer[Offset(output_shape, batch, out_y, out_x, + out_channel)] += + (input_value + input_offset) * filter_value; + } + } + } + } + } + } + } + } + + for (int batch = 0; batch < batches; ++batch) { + for (int out_y = 0; out_y < output_height; ++out_y) { + for (int out_x = 0; out_x < output_width; ++out_x) { + for (int out_channel = 0; out_channel < output_depth; ++out_channel) { + int32_t acc = scratch_buffer[Offset(output_shape, batch, out_y, out_x, + out_channel)]; + if (bias_data) { + acc += bias_data[out_channel]; + } + acc = MultiplyByQuantizedMultiplier( + acc, output_multiplier[out_channel], output_shift[out_channel]); + acc += output_offset; + acc = std::max(acc, output_activation_min); + acc = std::min(acc, output_activation_max); + output_data[Offset(output_shape, batch, out_y, out_x, out_channel)] = + static_cast(acc); + } + } + } + } +} + +// int16_t input (zero_point=0), int8_t filter, int32 or int64 accumulator +template +inline void TransposeConv( + const ConvParams& params, const int32_t* output_multiplier, + const int32_t* output_shift, const RuntimeShape& input_shape, + const int16_t* input_data, const RuntimeShape& filter_shape, + const int8_t* filter_data, const RuntimeShape& bias_shape, + const Scalar* bias_data, const RuntimeShape& output_shape, + int16_t* output_data, const RuntimeShape& im2col_shape, int8_t* im2col_data, + Scalar* scratch_buffer) { + const int stride_width = params.stride_width; + const int stride_height = params.stride_height; + const int pad_width = params.padding_values.width; + const int pad_height = params.padding_values.height; + TFLITE_DCHECK_EQ(input_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(filter_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(output_shape.DimensionsCount(), 4); + (void)im2col_data; // only used in optimized code. + (void)im2col_shape; // only used in optimized code. + + const int batches = MatchingDim(input_shape, 0, output_shape, 0); + const int input_depth = MatchingDim(input_shape, 3, filter_shape, 3); + const int output_depth = MatchingDim(filter_shape, 0, output_shape, 3); + if (bias_data) { + TFLITE_DCHECK_EQ(bias_shape.FlatSize(), output_depth); + } + const int input_height = input_shape.Dims(1); + const int input_width = input_shape.Dims(2); + const int filter_height = filter_shape.Dims(1); + const int filter_width = filter_shape.Dims(2); + const int output_height = output_shape.Dims(1); + const int output_width = output_shape.Dims(2); + const int32_t output_activation_min = params.quantized_activation_min; + const int32_t output_activation_max = params.quantized_activation_max; + TFLITE_DCHECK_LE(output_activation_min, output_activation_max); + + const int num_elements = output_shape.FlatSize(); + // We need to initialize scratch_buffer to all 0s, as we apply the same + // 'scatter' based trick as in float version. + memset(scratch_buffer, 0, num_elements * sizeof(Scalar)); + + // Loop through input elements one at a time. + for (int batch = 0; batch < batches; ++batch) { + for (int in_y = 0; in_y < input_height; ++in_y) { + for (int in_x = 0; in_x < input_width; ++in_x) { + for (int in_channel = 0; in_channel < input_depth; ++in_channel) { + // Loop through the output elements it will influence. + const int out_x_origin = (in_x * stride_width) - pad_width; + const int out_y_origin = (in_y * stride_height) - pad_height; + for (int filter_y = 0; filter_y < filter_height; ++filter_y) { + for (int filter_x = 0; filter_x < filter_width; ++filter_x) { + for (int out_channel = 0; out_channel < output_depth; + ++out_channel) { + // Compute output element location. + const int out_x = out_x_origin + filter_x; + const int out_y = out_y_origin + filter_y; + // We cannot accumulate out of bounds. + if ((out_x >= 0) && (out_x < output_width) && (out_y >= 0) && + (out_y < output_height)) { + const int32_t input_value = input_data[Offset( + input_shape, batch, in_y, in_x, in_channel)]; + const int32_t filter_value = + filter_data[Offset(filter_shape, out_channel, filter_y, + filter_x, in_channel)]; + scratch_buffer[Offset(output_shape, batch, out_y, out_x, + out_channel)] += + input_value * filter_value; + } + } + } + } + } + } + } + } + + for (int batch = 0; batch < batches; ++batch) { + for (int out_y = 0; out_y < output_height; ++out_y) { + for (int out_x = 0; out_x < output_width; ++out_x) { + for (int out_channel = 0; out_channel < output_depth; ++out_channel) { + Scalar acc = scratch_buffer[Offset(output_shape, batch, out_y, out_x, + out_channel)]; + if (bias_data) { + acc += bias_data[out_channel]; + } + int32_t scaled_acc = MultiplyByQuantizedMultiplier( + acc, output_multiplier[out_channel], output_shift[out_channel]); + scaled_acc = std::max(scaled_acc, output_activation_min); + scaled_acc = std::min(scaled_acc, output_activation_max); + output_data[Offset(output_shape, batch, out_y, out_x, out_channel)] = + static_cast(scaled_acc); + } + } + } + } +} + +} // namespace reference_integer_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_INTEGER_OPS_TRANSPOSE_CONV_H_ diff --git a/tensorflow/lite/kernels/internal/reference/l2normalization.h b/tensorflow/lite/kernels/internal/reference/l2normalization.h new file mode 100644 index 0000000..e5c91bf --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/l2normalization.h @@ -0,0 +1,90 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_L2NORMALIZATION_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_L2NORMALIZATION_H_ + +#include +#include + +#include "tensorflow/lite/core/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { + +namespace reference_ops { + +inline void L2Normalization(const tflite::L2NormalizationParams& op_params, + const RuntimeShape& input_shape, + const float* input_data, + const RuntimeShape& output_shape, + float* output_data, float epsilon = 1e-6) { + const int trailing_dim = input_shape.DimensionsCount() - 1; + const int outer_size = + MatchingFlatSizeSkipDim(input_shape, trailing_dim, output_shape); + const int depth = + MatchingDim(input_shape, trailing_dim, output_shape, trailing_dim); + for (int i = 0; i < outer_size; ++i) { + float squared_l2_norm = 0; + for (int c = 0; c < depth; ++c) { + const float val = input_data[depth * i + c]; + squared_l2_norm += val * val; + } + float l2_norm = std::sqrt(squared_l2_norm); + l2_norm = std::max(l2_norm, epsilon); + for (int c = 0; c < depth; ++c) { + output_data[depth * i + c] = input_data[depth * i + c] / l2_norm; + } + } +} + +inline void L2Normalization(const tflite::L2NormalizationParams& op_params, + const RuntimeShape& input_shape, + const uint8_t* input_data, + const RuntimeShape& output_shape, + uint8_t* output_data) { + const int trailing_dim = input_shape.DimensionsCount() - 1; + const int depth = + MatchingDim(input_shape, trailing_dim, output_shape, trailing_dim); + const int outer_size = + MatchingFlatSizeSkipDim(input_shape, trailing_dim, output_shape); + const int32_t input_zero_point = op_params.input_zero_point; + + for (int i = 0; i < outer_size; ++i) { + int32_t square_l2_norm = 0; + for (int c = 0; c < depth; c++) { + int32_t diff = input_data[depth * i + c] - input_zero_point; + square_l2_norm += diff * diff; + } + int32_t inv_l2norm_multiplier; + int inv_l2norm_shift; + GetInvSqrtQuantizedMultiplierExp(square_l2_norm, kReverseShift, + &inv_l2norm_multiplier, &inv_l2norm_shift); + for (int c = 0; c < depth; c++) { + int32_t diff = input_data[depth * i + c] - input_zero_point; + int32_t rescaled_diff = MultiplyByQuantizedMultiplierSmallerThanOneExp( + 128 * diff, inv_l2norm_multiplier, inv_l2norm_shift); + int32_t unclamped_output_val = 128 + rescaled_diff; + int32_t output_val = + std::min(static_cast(255), + std::max(static_cast(0), unclamped_output_val)); + output_data[depth * i + c] = static_cast(output_val); + } + } +} + +} // namespace reference_ops +} // namespace tflite +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_L2NORMALIZATION_H_ diff --git a/tensorflow/lite/kernels/internal/reference/leaky_relu.h b/tensorflow/lite/kernels/internal/reference/leaky_relu.h new file mode 100644 index 0000000..06f691a --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/leaky_relu.h @@ -0,0 +1,69 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_LEAKY_RELU_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_LEAKY_RELU_H_ + +#include +#include + +#include "tensorflow/lite/kernels/internal/common.h" + +namespace tflite { +namespace reference_ops { + +inline void LeakyRelu(const tflite::LeakyReluParams& params, + const RuntimeShape& input_shape, const float* input_data, + const RuntimeShape& output_shape, float* output_data) { + const int flat_size = MatchingFlatSize(input_shape, output_shape); + for (int i = 0; i < flat_size; ++i) { + const float val = input_data[i]; + // Note that alpha might be > 1 or < 0, so we don't use std::max here. + output_data[i] = val > 0 ? val : val * params.alpha; + } +} + +template +inline void QuantizeLeakyRelu(const LeakyReluParams& params, + const RuntimeShape& input_shape, + const T* input_data, + const RuntimeShape& output_shape, + T* output_data) { + const int flat_size = MatchingFlatSize(input_shape, output_shape); + static const int32_t quantized_min = std::numeric_limits::min(); + static const int32_t quantized_max = std::numeric_limits::max(); + for (int i = 0; i < flat_size; ++i) { + const int32_t input_value = input_data[i] - params.input_offset; + int32_t unclamped_output; + if (input_value >= 0) { + unclamped_output = params.output_offset + + MultiplyByQuantizedMultiplier( + input_value, params.output_multiplier_identity, + params.output_shift_identity); + } else { + unclamped_output = params.output_offset + + MultiplyByQuantizedMultiplier( + input_value, params.output_multiplier_alpha, + params.output_shift_alpha); + } + const T clamped_output = + std::min(quantized_max, std::max(quantized_min, unclamped_output)); + output_data[i] = static_cast(clamped_output); + } +} + +} // namespace reference_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_LEAKY_RELU_H_ diff --git a/tensorflow/lite/kernels/internal/reference/log_softmax.h b/tensorflow/lite/kernels/internal/reference/log_softmax.h new file mode 100644 index 0000000..394dd3a --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/log_softmax.h @@ -0,0 +1,256 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_LOG_SOFTMAX_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_LOG_SOFTMAX_H_ + +#include +#include +#include + +#include "fixedpoint/fixedpoint.h" +#include "tensorflow/lite/kernels/internal/common.h" + +namespace tflite { +namespace reference_ops { + +inline void LogSoftmax(const SoftmaxParams& params, + const RuntimeShape& input_shape, const float* input_data, + const RuntimeShape& output_shape, float* output_data) { + const int trailing_dim = input_shape.DimensionsCount() - 1; + const int outer_size = + MatchingFlatSizeSkipDim(input_shape, trailing_dim, output_shape); + const int depth = + MatchingDim(input_shape, trailing_dim, output_shape, trailing_dim); + + for (int i = 0; i < outer_size; ++i) { + // Find max element value which we'll use to ensure numerical stability + // taking advantage of the following equality: + // log(exp(x[i])/sum(exp(x[i]))) == log(exp(x[i]+C)/sum(exp(x[i]+C))) + float max = std::numeric_limits::lowest(); + for (int c = 0; c < depth; ++c) { + max = std::max(max, input_data[i * depth + c]); + } + + // Compute sum. + float sum = 0.f; + for (int c = 0; c < depth; ++c) { + sum += std::exp(input_data[i * depth + c] - max); + } + + // Compute result. + const float log_sum = std::log(sum); + for (int c = 0; c < depth; ++c) { + output_data[i * depth + c] = input_data[i * depth + c] - max - log_sum; + } + } +} + +inline void LogSoftmax(const SoftmaxParams& params, + const RuntimeShape& input_shape, + const uint8_t* input_data, + const RuntimeShape& output_shape, uint8_t* output_data) { + const int32_t input_multiplier = params.input_multiplier; + const int32_t input_left_shift = params.input_left_shift; + const int32_t reverse_scaling_divisor = params.reverse_scaling_divisor; + const int32_t reverse_scaling_right_shift = + params.reverse_scaling_right_shift; + const int diff_min = params.diff_min; + // The representation chosen for the input to the exp() function is Q5.26. + // We need to leave extra space since values that we skip might be as large + // as -32 before multiplying by input_beta_multiplier, and therefore as + // large as -16 afterwards. Note that exp(-8) is definitely not + // insignificant to accumulation, but exp(-16) definitely is. + static constexpr int kScaledDiffIntegerBits = 5; + static constexpr int kAccumulationIntegerBits = 12; + static constexpr int kOutputIntegerBits = 4; + using FixedPointScaledDiff = + gemmlowp::FixedPoint; + using FixedPointAccum = + gemmlowp::FixedPoint; + + const int trailing_dim = input_shape.DimensionsCount() - 1; + const int outer_size = + MatchingFlatSizeSkipDim(input_shape, trailing_dim, output_shape); + const int depth = + MatchingDim(input_shape, trailing_dim, output_shape, trailing_dim); + + for (int i = 0; i < outer_size; ++i) { + uint8_t max_in_row = 0; + for (int c = 0; c < depth; ++c) { + max_in_row = std::max(max_in_row, input_data[i * depth + c]); + } + + FixedPointAccum sum_of_exps = FixedPointAccum::Zero(); + for (int c = 0; c < depth; ++c) { + int32_t input_diff = + static_cast(input_data[i * depth + c]) - max_in_row; + if (input_diff >= diff_min) { + const int32_t input_diff_rescaled = + MultiplyByQuantizedMultiplierGreaterThanOne( + input_diff, input_multiplier, input_left_shift); + const FixedPointScaledDiff scaled_diff_f8 = + FixedPointScaledDiff::FromRaw(input_diff_rescaled); + sum_of_exps = sum_of_exps + gemmlowp::Rescale( + exp_on_negative_values(scaled_diff_f8)); + } + } + + const int32_t fixed_log_sum_of_exps = + log_x_for_x_greater_than_or_equal_to_1( + sum_of_exps) + .raw(); + + // rescaled_diff_min is smallest representable in + // Q(kScaledDiffIntegerBits).(31-kScaledDiffIntegerBits) plus the + // log-sub-exps that will be subtracted in the loop. + // + // The thresholds diff_min, etc are negative. + const int rescaled_diff_min = + fixed_log_sum_of_exps + std::numeric_limits::lowest(); + const int adjusted_diff_min = + std::max(static_cast( + diff_min - 1), // Note use of > below instead of >= above. + MultiplyByQuantizedMultiplierSmallerThanOneExp( + rescaled_diff_min, reverse_scaling_divisor, + -reverse_scaling_right_shift)); + + for (int c = 0; c < depth; ++c) { + int32_t input_diff = + static_cast(input_data[i * depth + c]) - max_in_row; + if (input_diff > adjusted_diff_min) { + const int32_t input_diff_rescaled = + MultiplyByQuantizedMultiplierGreaterThanOne( + input_diff, input_multiplier, input_left_shift); + int32_t unsat_output = + gemmlowp::RoundingDivideByPOT( + (input_diff_rescaled - fixed_log_sum_of_exps), + 31 - kScaledDiffIntegerBits - kOutputIntegerBits) + + 255; + + output_data[i * depth + c] = static_cast( + std::max(std::min(unsat_output, static_cast(255)), + static_cast(0))); + } else { + // Set output to smallest value. + output_data[i * depth + c] = 0; + } + } + } +} + +template +inline void LogSoftmaxQuantized(const SoftmaxParams& params, + const size_t outer_size, const size_t depth, + const RuntimeShape& input_shape, + const T* input_data, + const RuntimeShape& output_shape, + T* output_data) { + const int32_t input_multiplier = params.input_multiplier; + const int32_t input_left_shift = params.input_left_shift; + const int32_t reverse_scaling_divisor = params.reverse_scaling_divisor; + const int32_t reverse_scaling_right_shift = + params.reverse_scaling_right_shift; + const int diff_min = params.diff_min; + + static constexpr T kMinT8 = std::numeric_limits::min(); + static constexpr T kMaxT8 = std::numeric_limits::max(); + static constexpr int32_t kMinInt32 = std::numeric_limits::min(); + + // All IntegerBits must agree with Prepare function. + // Input is chosen as Q5.26 so exp(-1 * 2^5 * 2^-1) = exp(-16) is negligible. + static constexpr int kInputIntegerBits = 5; + static constexpr int kAccumulationIntegerBits = 12; + static constexpr int kOutputIntegerBits = 4; + using F5 = gemmlowp::FixedPoint; + using F12 = gemmlowp::FixedPoint; + + for (size_t outer_index = 0; outer_index < outer_size; ++outer_index) { + T max_in_row = kMinT8; + for (size_t inner_index = 0; inner_index < depth; ++inner_index) { + max_in_row = + std::max(max_in_row, input_data[outer_index * depth + inner_index]); + } + + // Accumulator "sum_of_exps_in_q12" is safe from overflowing in 2^12 steps. + F12 sum_of_exps_in_q12 = F12::FromRaw(0); + for (size_t inner_index = 0; inner_index < depth; ++inner_index) { + int32_t input_diff = + static_cast(input_data[outer_index * depth + inner_index]) - + max_in_row; + if (input_diff >= diff_min) { + const int32_t input_diff_in_q5 = MultiplyByQuantizedMultiplier( + input_diff, input_multiplier, input_left_shift); + sum_of_exps_in_q12 = + sum_of_exps_in_q12 + + gemmlowp::Rescale( + exp_on_negative_values(F5::FromRaw(input_diff_in_q5))); + } + } + + const int32_t log_sum_of_exps_in_q5 = + log_x_for_x_greater_than_or_equal_to_1( + sum_of_exps_in_q12) + .raw(); + + // Potentially reduced the valid range. shifted_log_sum_of_exps_in_q5 is + // smallest representable in Q5.26 plus the log_sum_of_exps. + const int32_t shifted_log_sum_of_exps_in_q5 = + log_sum_of_exps_in_q5 + kMinInt32; + const int32_t adjusted_diff_min = + std::max(static_cast(diff_min - 1), + MultiplyByQuantizedMultiplier(shifted_log_sum_of_exps_in_q5, + reverse_scaling_divisor, + -reverse_scaling_right_shift)); + + for (size_t inner_index = 0; inner_index < depth; ++inner_index) { + int32_t input_diff = + static_cast(input_data[outer_index * depth + inner_index]) - + max_in_row; + // Note use of > below instead of >= above. + if (input_diff > adjusted_diff_min) { + const int32_t input_diff_in_q5 = MultiplyByQuantizedMultiplier( + input_diff, input_multiplier, input_left_shift); + + // Rescale and downcast. + int32_t output_in_q27 = + gemmlowp::RoundingDivideByPOT( + (input_diff_in_q5 - log_sum_of_exps_in_q5), + 31 - kInputIntegerBits - kOutputIntegerBits) + + kMaxT8; + + output_in_q27 = + std::max(std::min(output_in_q27, static_cast(kMaxT8)), + static_cast(kMinT8)); + output_data[outer_index * depth + inner_index] = + static_cast(output_in_q27); + } else { + output_data[outer_index * depth + inner_index] = kMinT8; + } + } + } +} + +inline void LogSoftmax(const SoftmaxParams& params, const size_t outer_size, + const size_t depth, const RuntimeShape& input_shape, + const int8_t* input_data, + const RuntimeShape& output_shape, int8_t* output_data) { + LogSoftmaxQuantized(params, outer_size, depth, input_shape, input_data, + output_shape, output_data); +} + +} // namespace reference_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_LOG_SOFTMAX_H_ diff --git a/tensorflow/lite/kernels/internal/reference/logistic.h b/tensorflow/lite/kernels/internal/reference/logistic.h new file mode 100644 index 0000000..64b7133 --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/logistic.h @@ -0,0 +1,132 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_LOGISTIC_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_LOGISTIC_H_ + +#include + +#include "fixedpoint/fixedpoint.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/cppmath.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/types.h" +#include "tensorflow/lite/kernels/op_macros.h" + +namespace tflite { +namespace reference_ops { + +inline void Logistic(const RuntimeShape& input_shape, const float* input_data, + const RuntimeShape& output_shape, float* output_data) { + const float cutoff_upper = 16.619047164916992188f; + const float cutoff_lower = -9.f; + + const int flat_size = MatchingFlatSize(input_shape, output_shape); + + // Rational for using approximation in reference kernel. + // 0. This approximation gives enough precision for float. + // 1. This works around an issue on an embedded chipset where exp() does not + // return correctly as expected - exp(x) should return inf when overflown + // not 1.701417 IEEE 754 defines representation for inf. + // 2. This will speed up calculation and is matching the behavior in the + // optimized kernels. (check the definition of scalar_logistic_op) + + for (int i = 0; i < flat_size; i++) { + float val = input_data[i]; + float result; + if (val > cutoff_upper) { + result = 1.0f; + } else if (val < cutoff_lower) { + result = std::exp(val); + } else { + result = 1.f / (1.f + std::exp(-val)); + } + output_data[i] = result; + } +} + +// Convenience version that allows, for example, generated-code calls to be +// uniform between data types. +inline void Logistic(const LogisticParams&, const RuntimeShape& input_shape, + const float* input_data, const RuntimeShape& output_shape, + float* output_data) { + // Drop params: not needed. + Logistic(input_shape, input_data, output_shape, output_data); +} + +inline void Logistic(const LogisticParams& params, + const RuntimeShape& input_shape, const int16_t* input_data, + const RuntimeShape& output_shape, int16_t* output_data) { + const int flat_size = MatchingFlatSize(input_shape, output_shape); + + for (int i = 0; i < flat_size; i++) { + // F0 uses 0 integer bits, range [-1, 1]. + // This is the return type of math functions such as tanh, logistic, + // whose range is in [-1, 1]. + using F0 = gemmlowp::FixedPoint; + // F3 uses 3 integer bits, range [-8, 8], the input range expected here. + using F3 = gemmlowp::FixedPoint; + + const F3 input = F3::FromRaw(input_data[i]); + F0 output = gemmlowp::logistic(input); + output_data[i] = output.raw(); + } +} + +// Quantized int8_t logistic activation. Cheats by dequantizing and +// requantizing around the floating point logistic method. This implementation +// is slow on platforms without a floating point unit. + +// TODO(b/141211002): Delete this int8_t implementation once we can reuse the +// approach used in TFLite for int8_t Logistic. +inline void Logistic(const RuntimeShape& input_shape, const int8_t* input_data, + float input_scale, int input_zero_point, + const RuntimeShape& output_shape, int8_t* output_data, + float output_scale, int output_zero_point) { + const float cutoff_upper = 16.619047164916992188f; + const float cutoff_lower = -9.f; + + const int flat_size = MatchingFlatSize(input_shape, output_shape); + + // Rational for using approximation in reference kernel. + // 0. This approximation gives enough precision for float. + // 1. This works around an issue on an embedded chipset where exp() does not + // return correctly as expected - exp(x) should return inf when overflown + // not 1.701417 IEEE 754 defines representation for inf. + // 2. This will speed up calculation and is matching the behavior in the + // optimized kernels. (check the definition of scalar_logistic_op) + + for (int i = 0; i < flat_size; i++) { + // Dequantize. + float val = + static_cast((input_data[i] - input_zero_point) * input_scale); + float result; + if (val > cutoff_upper) { + result = 1.0f; + } else if (val < cutoff_lower) { + result = std::exp(val); + } else { + result = 1.f / (1.f + std::exp(-val)); + } + // Requantize + int8_t output = + static_cast(result / output_scale + output_zero_point); + output_data[i] = output; + } +} + +} // namespace reference_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_LOGISTIC_H_ diff --git a/tensorflow/lite/kernels/internal/reference/lstm_cell.h b/tensorflow/lite/kernels/internal/reference/lstm_cell.h new file mode 100644 index 0000000..17b113e --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/lstm_cell.h @@ -0,0 +1,422 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_LSTM_CELL_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_LSTM_CELL_H_ + +#include +#include +#include + +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/reference/concatenation.h" +#include "tensorflow/lite/kernels/internal/reference/fully_connected.h" +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { +namespace reference_ops { + +inline void LstmCell( + const LstmCellParams& params, const RuntimeShape& unextended_input_shape, + const float* input_data, const RuntimeShape& unextended_prev_activ_shape, + const float* prev_activ_data, const RuntimeShape& weights_shape, + const float* weights_data, const RuntimeShape& unextended_bias_shape, + const float* bias_data, const RuntimeShape& unextended_prev_state_shape, + const float* prev_state_data, + const RuntimeShape& unextended_output_state_shape, float* output_state_data, + const RuntimeShape& unextended_output_activ_shape, float* output_activ_data, + const RuntimeShape& unextended_concat_temp_shape, float* concat_temp_data, + const RuntimeShape& unextended_activ_temp_shape, float* activ_temp_data) { + TFLITE_DCHECK_LE(unextended_input_shape.DimensionsCount(), 4); + TFLITE_DCHECK_LE(unextended_prev_activ_shape.DimensionsCount(), 4); + TFLITE_DCHECK_LE(unextended_bias_shape.DimensionsCount(), 4); + TFLITE_DCHECK_LE(unextended_prev_state_shape.DimensionsCount(), 4); + TFLITE_DCHECK_LE(unextended_output_state_shape.DimensionsCount(), 4); + TFLITE_DCHECK_LE(unextended_output_activ_shape.DimensionsCount(), 4); + TFLITE_DCHECK_LE(unextended_concat_temp_shape.DimensionsCount(), 4); + TFLITE_DCHECK_LE(unextended_activ_temp_shape.DimensionsCount(), 4); + const RuntimeShape input_shape = + RuntimeShape::ExtendedShape(4, unextended_input_shape); + const RuntimeShape prev_activ_shape = + RuntimeShape::ExtendedShape(4, unextended_prev_activ_shape); + const RuntimeShape bias_shape = + RuntimeShape::ExtendedShape(4, unextended_bias_shape); + const RuntimeShape prev_state_shape = + RuntimeShape::ExtendedShape(4, unextended_prev_state_shape); + const RuntimeShape output_state_shape = + RuntimeShape::ExtendedShape(4, unextended_output_state_shape); + const RuntimeShape output_activ_shape = + RuntimeShape::ExtendedShape(4, unextended_output_activ_shape); + const RuntimeShape concat_temp_shape = + RuntimeShape::ExtendedShape(4, unextended_concat_temp_shape); + const RuntimeShape activ_temp_shape = + RuntimeShape::ExtendedShape(4, unextended_activ_temp_shape); + TFLITE_DCHECK_GE(weights_shape.DimensionsCount(), 2); + + const int weights_dim_count = weights_shape.DimensionsCount(); + const int batches = + MatchingDim(input_shape, 0, prev_activ_shape, 0, prev_state_shape, 0, + output_state_shape, 0, output_activ_shape, 0); + const int height = + MatchingDim(input_shape, 1, prev_activ_shape, 1, prev_state_shape, 1, + output_state_shape, 1, output_activ_shape, 1); + const int width = + MatchingDim(input_shape, 2, prev_activ_shape, 2, prev_state_shape, 2, + output_state_shape, 2, output_activ_shape, 2); + const int input_depth = input_shape.Dims(3); + const int prev_activ_depth = prev_activ_shape.Dims(3); + const int total_input_depth = prev_activ_depth + input_depth; + TFLITE_DCHECK_EQ(weights_shape.Dims(weights_dim_count - 1), + total_input_depth); + TFLITE_DCHECK_EQ(FlatSizeSkipDim(bias_shape, 3), 1); + const int intern_activ_depth = + MatchingDim(weights_shape, weights_dim_count - 2, bias_shape, 3); + TFLITE_DCHECK_EQ(weights_shape.FlatSize(), + intern_activ_depth * total_input_depth); + TFLITE_DCHECK_EQ(intern_activ_depth % 4, 0); + const int output_depth = + MatchingDim(prev_state_shape, 3, prev_activ_shape, 3, output_state_shape, + 3, output_activ_shape, 3); + TFLITE_DCHECK_EQ(output_depth, intern_activ_depth / 4); + + // Concatenate prev_activ and input data together + float const* concat_input_arrays_data[2] = {input_data, prev_activ_data}; + const RuntimeShape* concat_input_arrays_shapes[2] = {&input_shape, + &prev_activ_shape}; + tflite::ConcatenationParams concat_params; + concat_params.axis = 3; + concat_params.inputs_count = 2; + Concatenation(concat_params, concat_input_arrays_shapes, + concat_input_arrays_data, concat_temp_shape, concat_temp_data); + + // Fully connected + tflite::FullyConnectedParams fc_params; + fc_params.float_activation_min = std::numeric_limits::lowest(); + fc_params.float_activation_max = std::numeric_limits::max(); + FullyConnected(fc_params, concat_temp_shape, concat_temp_data, weights_shape, + weights_data, bias_shape, bias_data, activ_temp_shape, + activ_temp_data); + + // Memory state update (the LSTM "guts") + for (int b = 0; b < batches; ++b) { + for (int w = 0; w < width; ++w) { + for (int h = 0; h < height; ++h) { + for (int c = 0; c < output_depth; ++c) { + const float input_gate = + 1.f / + (1.f + std::exp(-activ_temp_data[Offset(activ_temp_shape, b, h, w, + 0 * output_depth + c)])); + const float new_input = std::tanh(activ_temp_data[Offset( + activ_temp_shape, b, h, w, 1 * output_depth + c)]); + const float forget_gate = + 1.f / + (1.f + std::exp(-activ_temp_data[Offset(activ_temp_shape, b, h, w, + 2 * output_depth + c)])); + const float output_gate = + 1.f / + (1.f + std::exp(-activ_temp_data[Offset(activ_temp_shape, b, h, w, + 3 * output_depth + c)])); + const float new_state = + input_gate * new_input + + forget_gate * + prev_state_data[Offset(prev_state_shape, b, h, w, c)]; + output_state_data[Offset(output_state_shape, b, h, w, c)] = new_state; + output_activ_data[Offset(output_activ_shape, b, h, w, c)] = + output_gate * std::tanh(new_state); + } + } + } + } +} + +// Quantized LSTM cell implementation. +// The quantization of the input, output arrays is as follows: +// - The input activations are quantized as uint8 on the interval +// [-1, 127/128]. +// The rationale for that is that is the natural interval for output +// activations (see next point) and these need to be concatenated together. +// We could accommodate different ranges by re-scaling, but we empirically +// found that setting the input activations range to be [-1, 127/128] in the +// first place, removing the need for re-scaling, greatly improves accuracy. +// - The output activations are quantized as uint8 on the interval +// [-1, 127/128]. +// The rationale for that is that the definition of a LSTM cell makes them +// intrinsically constrained in [-1, 1]; tweaking that to [-1, 127/128] +// makes for simpler, more accurate fixed-point arithmetic. +// - The output-at-previous-timestep state array is obviously quantized as +// the output activations. +// - The internal LSTM memory (not the output-at-previous-timestep, the other +// internal state array) is int16-quantized and may use any power-of-two, +// symmetric range i.e. [-2^N, 2^N * 32767/32768] for any N, which we call +// StateIntegerBits below, see the below discussion of that template +// parameter ("The StateIntegerBits template parameter"). +// - The output of the internal fully-connected node is int16-quantized +// on the interval [-8, 8 * 32767/32768], the rationale for which is +// explained just below ("Why [-8, 8] for fully-connected output?"). +// +// +// === The StateIntegerBits template parameter === +// +// The StateIntegerBits template parameter controls the fixed-point format used +// to represent the internal memory of the LSTM cell (not the +// output-at-previous-timestep, the other internal state array). It's currently +// a template parameter so that the model can control that. The most typical +// value for StateIntegerBits is 4. Other plausible values are anywhere between +// 3 and 5. We might eventually standardize on a single supported value, e.g. 4, +// and drop that template parameter. The reason why it can't be a runtime +// parameter is that this controls the fixed-point format used, i.e. we need to +// generate actually different code based on it. In particular, we generate code +// for a fixed-point tanh() implementation for that format, which internally +// uses a fixed-point exp() implementation, which internally uses a +// barrel-shifter with a number of steps that depends on StateIntegerBits. +// Another consequence of that is that a higher value of StateIntegerBits +// results in a more expensive implementation (more barrel shifter steps +// needed). +// +// +// === Why [-8, 8] for fully-connected output? === +// +// This array is only fed to Logistic and Tanh functions, for which +// the quantized implementation will want to use fixed-point arithmetic, +// requiring a power-of-two representation interval. Thus, we should right +// away quantize this array to a power-of-two interval; otherwise, +// implementation will need to rescale that, losing any benefit that a tighter +// representation interval might otherwise yield, while introducing some +// numerical error and computational overhead. +// +// Now, Logistic and Tanh +// are nearly constant (nearly equal to their horizontal asymptotes) +// outside of a small bounded interval around 0: +// +// Logistic(4) = 1 - 1.8e-2 Tanh(4) = 1 - 6.7e-4 +// Logistic(8) = 1 - 3.4e-4 Tanh(8) = 1 - 2.3e-7 +// Logistic(16) = 1 - 1.1e-7 Tanh(16) = 1 - 2.5e-14 +// +// From this, we see that clamping to [-4, 4] would be too inaccurate +// (the error of 1.8e-2 on Logistic would be felt even in 8bit precision) +// while clamping to [-16, 16] would make no difference even in float32. +// However, for a fixed-point implementation in 16-bit integers, using 5 +// integer bits to represent the [-16, 16] range would leave only 11 +// fractional bits, giving an increment of 2^-11 = 4.9e-4 between consecutive +// representable values. Notice that is higher than the +// worst-case clamping error with clamping to [-8, 8]: 3.4e-4 for Logistic. +// Using [-8, 8] thus seems like the better compromise overall, enjoying +// an increment of 2.4e-4 between representable values and a worst-case +// clamping error of 3.4e-4, both better than the increment of 4.9e-4 with +// [-16, 16]. +// +// Moreover, all other things being equal, it is nice to choose the narrower +// representation range, as that makes the implementation of fixed-point +// math functions a little cheaper (each integer bit requires an additional +// barrel-shifter atep in the implementation of exp(-x)). That is further +// reason to prefer [-8, 8] over [-16, 16]. The choice of [-16, 16] would make +// sense for 32-bit float or 32-bit fixed-point quantization, but we are +// aiming for 16-bit fixed-point quantization of these internal nodes here. +// +template +inline void LstmCell(const LstmCellParams& params, + const RuntimeShape& unextended_input_shape, + const uint8_t* input_data_uint8, + const RuntimeShape& unextended_prev_activ_shape, + const uint8_t* prev_activ_data_uint8, + const RuntimeShape& weights_shape, + const uint8_t* weights_data_uint8, + const RuntimeShape& unextended_bias_shape, + const int32_t* bias_data_int32, + const RuntimeShape& unextended_prev_state_shape, + const int16_t* prev_state_data_int16, + const RuntimeShape& unextended_output_state_shape, + int16_t* output_state_data_int16, + const RuntimeShape& unextended_output_activ_shape, + uint8_t* output_activ_data_uint8, + const RuntimeShape& unextended_concat_temp_shape, + uint8_t* concat_temp_data_uint8, + const RuntimeShape& unextended_activ_temp_shape, + int16_t* activ_temp_data_int16, void* gemmlowp_context) { + (void)gemmlowp_context; // only used in optimized code. + int32_t weights_zero_point = params.weights_zero_point; + int32_t accum_multiplier = params.accum_multiplier; + int accum_shift = params.accum_shift; + TFLITE_DCHECK_LE(unextended_input_shape.DimensionsCount(), 4); + TFLITE_DCHECK_LE(unextended_prev_activ_shape.DimensionsCount(), 4); + TFLITE_DCHECK_LE(unextended_bias_shape.DimensionsCount(), 4); + TFLITE_DCHECK_LE(unextended_prev_state_shape.DimensionsCount(), 4); + TFLITE_DCHECK_LE(unextended_output_state_shape.DimensionsCount(), 4); + TFLITE_DCHECK_LE(unextended_output_activ_shape.DimensionsCount(), 4); + TFLITE_DCHECK_LE(unextended_concat_temp_shape.DimensionsCount(), 4); + TFLITE_DCHECK_LE(unextended_activ_temp_shape.DimensionsCount(), 4); + const RuntimeShape input_shape = + RuntimeShape::ExtendedShape(4, unextended_input_shape); + const RuntimeShape prev_activ_shape = + RuntimeShape::ExtendedShape(4, unextended_prev_activ_shape); + const RuntimeShape bias_shape = + RuntimeShape::ExtendedShape(4, unextended_bias_shape); + const RuntimeShape prev_state_shape = + RuntimeShape::ExtendedShape(4, unextended_prev_state_shape); + const RuntimeShape output_state_shape = + RuntimeShape::ExtendedShape(4, unextended_output_state_shape); + const RuntimeShape output_activ_shape = + RuntimeShape::ExtendedShape(4, unextended_output_activ_shape); + const RuntimeShape concat_temp_shape = + RuntimeShape::ExtendedShape(4, unextended_concat_temp_shape); + const RuntimeShape activ_temp_shape = + RuntimeShape::ExtendedShape(4, unextended_activ_temp_shape); + TFLITE_DCHECK_GE(weights_shape.DimensionsCount(), 2); + + // Gather dimensions information, and perform consistency checks. + const int weights_dim_count = weights_shape.DimensionsCount(); + const int outer_size = MatchingFlatSizeSkipDim( + input_shape, 3, prev_activ_shape, prev_state_shape, output_state_shape, + output_activ_shape); + const int input_depth = input_shape.Dims(3); + const int prev_activ_depth = prev_activ_shape.Dims(3); + const int total_input_depth = prev_activ_depth + input_depth; + TFLITE_DCHECK_EQ(weights_shape.Dims(weights_dim_count - 1), + total_input_depth); + const int intern_activ_depth = + MatchingDim(weights_shape, weights_dim_count - 2, bias_shape, 3); + TFLITE_DCHECK_EQ(weights_shape.FlatSize(), + intern_activ_depth * total_input_depth); + TFLITE_DCHECK_EQ(FlatSizeSkipDim(bias_shape, 3), 1); + TFLITE_DCHECK_EQ(intern_activ_depth % 4, 0); + const int output_depth = + MatchingDim(prev_state_shape, 3, prev_activ_shape, 3, output_state_shape, + 3, output_activ_shape, 3); + TFLITE_DCHECK_EQ(output_depth, intern_activ_depth / 4); + const int fc_batches = FlatSizeSkipDim(activ_temp_shape, 3); + const int fc_output_depth = + MatchingDim(weights_shape, weights_dim_count - 2, activ_temp_shape, 3); + const int fc_accum_depth = total_input_depth; + TFLITE_DCHECK_EQ(fc_output_depth, 4 * output_depth); + + // Depth-concatenate prev_activ and input data together. + uint8_t const* concat_input_arrays_data[2] = {input_data_uint8, + prev_activ_data_uint8}; + const RuntimeShape* concat_input_arrays_shapes[2] = {&input_shape, + &prev_activ_shape}; + tflite::ConcatenationParams concat_params; + concat_params.axis = 3; + concat_params.inputs_count = 2; + Concatenation(concat_params, concat_input_arrays_shapes, + concat_input_arrays_data, concat_temp_shape, + concat_temp_data_uint8); + + // Implementation of the fully connected node inside the LSTM cell. + // The operands are 8-bit integers, the accumulators are internally 32bit + // integers, and the output is 16-bit fixed-point with 3 integer bits so + // the output range is [-2^3, 2^3] == [-8, 8]. The rationale for that + // is explained in the function comment above. + for (int b = 0; b < fc_batches; ++b) { + for (int out_c = 0; out_c < fc_output_depth; ++out_c) { + // Internal accumulation. + // Initialize accumulator with the bias-value. + int32_t accum = bias_data_int32[out_c]; + // Accumulation loop. + for (int d = 0; d < fc_accum_depth; ++d) { + int16_t input_val = + concat_temp_data_uint8[b * fc_accum_depth + d] - 128; + int16_t weights_val = + weights_data_uint8[out_c * fc_accum_depth + d] - weights_zero_point; + accum += input_val * weights_val; + } + // Down-scale the final int32 accumulator to the scale used by our + // (16-bit, using 3 integer bits) fixed-point format. The quantized + // multiplier and shift here have been pre-computed offline + // (e.g. by toco). + accum = + MultiplyByQuantizedMultiplier(accum, accum_multiplier, accum_shift); + // Saturate, cast to int16, and store to the temporary activations array. + accum = std::max(-32768, std::min(32767, accum)); + activ_temp_data_int16[out_c + fc_output_depth * b] = accum; + } + } + + // Rest of the LSTM cell: tanh and logistic math functions, and some adds + // and muls, all done in 16-bit fixed-point. + for (int b = 0; b < outer_size; ++b) { + for (int c = 0; c < output_depth; ++c) { + // Define the fixed-point data types that we will use here. All use + // int16 as the underlying integer type i.e. all are 16-bit fixed-point. + // They only differ by the number of integral vs. fractional bits, + // determining the range of values that they can represent. + // + // F0 uses 0 integer bits, range [-1, 1]. + // This is the return type of math functions such as tanh, logistic, + // whose range is in [-1, 1]. + using F0 = gemmlowp::FixedPoint; + // F3 uses 3 integer bits, range [-8, 8]. + // This is the range of the previous fully-connected node's output, + // which is our input here. + using F3 = gemmlowp::FixedPoint; + // FS uses StateIntegerBits integer bits, range [-2^StateIntegerBits, + // 2^StateIntegerBits]. It's used to represent the internal state, whose + // number of integer bits is currently dictated by the model. See comment + // on the StateIntegerBits template parameter above. + using FS = gemmlowp::FixedPoint; + // Implementation of input gate, using fixed-point logistic function. + F3 input_gate_input = F3::FromRaw( + activ_temp_data_int16[b * fc_output_depth + 0 * output_depth + c]); + F0 input_gate_output = gemmlowp::logistic(input_gate_input); + // Implementation of input modulation gate, using fixed-point tanh + // function. + F3 input_modulation_gate_input = F3::FromRaw( + activ_temp_data_int16[b * fc_output_depth + 1 * output_depth + c]); + F0 input_modulation_gate_output = + gemmlowp::tanh(input_modulation_gate_input); + // Implementation of forget gate, using fixed-point logistic function. + F3 forget_gate_input = F3::FromRaw( + activ_temp_data_int16[b * fc_output_depth + 2 * output_depth + c]); + F0 forget_gate_output = gemmlowp::logistic(forget_gate_input); + // Implementation of output gate, using fixed-point logistic function. + F3 output_gate_input = F3::FromRaw( + activ_temp_data_int16[b * fc_output_depth + 3 * output_depth + c]); + F0 output_gate_output = gemmlowp::logistic(output_gate_input); + // Implementation of internal multiplication nodes, still in fixed-point. + F0 input_times_input_modulation = + input_gate_output * input_modulation_gate_output; + FS prev_state = FS::FromRaw(prev_state_data_int16[b * output_depth + c]); + FS prev_state_times_forget_state = forget_gate_output * prev_state; + // Implementation of internal addition node, saturating. + FS new_state = gemmlowp::SaturatingAdd( + gemmlowp::Rescale(input_times_input_modulation), + prev_state_times_forget_state); + // Implementation of last internal Tanh node, still in fixed-point. + // Since a Tanh fixed-point implementation is specialized for a given + // number or integer bits, and each specialization can have a substantial + // code size, and we already used above a Tanh on an input with 3 integer + // bits, and per the table in the above function comment there is no + // significant accuracy to be lost by clamping to [-8, +8] for a + // 3-integer-bits representation, let us just do that. This helps people + // porting this to targets where code footprint must be minimized. + F3 new_state_f3 = gemmlowp::Rescale<3>(new_state); + F0 output_activ_int16 = output_gate_output * gemmlowp::tanh(new_state_f3); + // Store the new internal state back to memory, as 16-bit integers. + // Note: here we store the original value with StateIntegerBits, not + // the rescaled 3-integer-bits value fed to tanh. + output_state_data_int16[b * output_depth + c] = new_state.raw(); + // Down-scale the output activations to 8-bit integers, saturating, + // and store back to memory. + int16_t rescaled_output_activ = + gemmlowp::RoundingDivideByPOT(output_activ_int16.raw(), 8); + int16_t clamped_output_activ = std::max( + -128, std::min(127, rescaled_output_activ)); + output_activ_data_uint8[b * output_depth + c] = + 128 + clamped_output_activ; + } + } +} + +} // namespace reference_ops +} // namespace tflite +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_LSTM_CELL_H_ diff --git a/tensorflow/lite/kernels/internal/reference/maximum_minimum.h b/tensorflow/lite/kernels/internal/reference/maximum_minimum.h new file mode 100644 index 0000000..cd11b41 --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/maximum_minimum.h @@ -0,0 +1,64 @@ +/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_MAXIMUM_MINIMUM_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_MAXIMUM_MINIMUM_H_ + +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { +namespace reference_ops { + +template +void MaximumMinimumBroadcastSlow(const RuntimeShape& unextended_input1_shape, + const T* input1_data, + const RuntimeShape& unextended_input2_shape, + const T* input2_data, + const RuntimeShape& unextended_output_shape, + T* output_data, Op op) { + // Uses element-wise calculation if broadcast is not required. + if (unextended_input1_shape == unextended_input2_shape) { + const int flat_size = + MatchingElementsSize(unextended_input1_shape, unextended_input2_shape, + unextended_output_shape); + for (int i = 0; i < flat_size; ++i) { + output_data[i] = op(input1_data[i], input2_data[i]); + } + } else { + TFLITE_DCHECK_LE(unextended_input1_shape.DimensionsCount(), N); + TFLITE_DCHECK_LE(unextended_input2_shape.DimensionsCount(), N); + TFLITE_DCHECK_LE(unextended_output_shape.DimensionsCount(), N); + + NdArrayDesc desc1; + NdArrayDesc desc2; + NdArrayDesc output_desc; + NdArrayDescsForElementwiseBroadcast( + unextended_input1_shape, unextended_input2_shape, &desc1, &desc2); + CopyDimsToDesc(RuntimeShape::ExtendedShape(N, unextended_output_shape), + &output_desc); + + auto maxmin_func = [&](int indexes[N]) { + output_data[SubscriptToIndex(output_desc, indexes)] = + op(input1_data[SubscriptToIndex(desc1, indexes)], + input2_data[SubscriptToIndex(desc2, indexes)]); + }; + NDOpsHelper(output_desc, maxmin_func); + } +} + +} // namespace reference_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_MAXIMUM_MINIMUM_H_ diff --git a/tensorflow/lite/kernels/internal/reference/mul.h b/tensorflow/lite/kernels/internal/reference/mul.h new file mode 100644 index 0000000..2767fef --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/mul.h @@ -0,0 +1,218 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_MUL_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_MUL_H_ + +#include +#include + +#include "tensorflow/lite/kernels/internal/common.h" + +namespace tflite { + +namespace reference_ops { + +// Element-wise mul that can often be used for inner loop of broadcast Mul as +// well as the non-broadcast Mul. +inline void MulElementwise(int size, const ArithmeticParams& params, + const uint8_t* input1_data, + const uint8_t* input2_data, uint8_t* output_data) { + for (int i = 0; i < size; ++i) { + const int32_t input1_val = params.input1_offset + input1_data[i]; + const int32_t input2_val = params.input2_offset + input2_data[i]; + const int32_t unclamped_result = + params.output_offset + + MultiplyByQuantizedMultiplier(input1_val * input2_val, + params.output_multiplier, + params.output_shift); + const int32_t clamped_output = + std::min(params.quantized_activation_max, + std::max(params.quantized_activation_min, unclamped_result)); + output_data[i] = static_cast(clamped_output); + } +} + +template +inline void Mul(const ArithmeticParams& params, + const RuntimeShape& input1_shape, const T* input1_data, + const RuntimeShape& input2_shape, const T* input2_data, + const RuntimeShape& output_shape, T* output_data) { + T output_activation_min; + T output_activation_max; + GetActivationParams(params, &output_activation_min, &output_activation_max); + + const int flat_size = + MatchingExtendedShapeFlatSize(input1_shape, input2_shape, output_shape); + for (int i = 0; i < flat_size; ++i) { + output_data[i] = ActivationFunctionWithMinMax( + input1_data[i] * input2_data[i], output_activation_min, + output_activation_max); + } +} + +inline void Mul(const ArithmeticParams& params, + const RuntimeShape& input1_shape, + const std::complex* input1_data, + const RuntimeShape& input2_shape, + const std::complex* input2_data, + const RuntimeShape& output_shape, + std::complex* output_data) { + const int flat_size = + MatchingExtendedShapeFlatSize(input1_shape, input2_shape, output_shape); + for (int i = 0; i < flat_size; ++i) { + output_data[i] = input1_data[i] * input2_data[i]; + } +} + +inline void Mul(const ArithmeticParams& params, + const RuntimeShape& input1_shape, const uint8_t* input1_data, + const RuntimeShape& input2_shape, const uint8_t* input2_data, + const RuntimeShape& output_shape, uint8_t* output_data) { + TFLITE_DCHECK_LE(params.quantized_activation_min, + params.quantized_activation_max); + const int flat_size = + MatchingExtendedShapeFlatSize(input1_shape, input2_shape, output_shape); + + MulElementwise(flat_size, params, input1_data, input2_data, output_data); +} + +inline void BroadcastMul4DSlow(const ArithmeticParams& params, + const RuntimeShape& input1_shape, + const uint8_t* input1_data, + const RuntimeShape& input2_shape, + const uint8_t* input2_data, + const RuntimeShape& output_shape, + uint8_t* output_data) { + NdArrayDesc<4> desc1; + NdArrayDesc<4> desc2; + NdArrayDescsForElementwiseBroadcast(input1_shape, input2_shape, &desc1, + &desc2); + const RuntimeShape extended_output_shape = + RuntimeShape::ExtendedShape(4, output_shape); + + for (int b = 0; b < extended_output_shape.Dims(0); ++b) { + for (int y = 0; y < extended_output_shape.Dims(1); ++y) { + for (int x = 0; x < extended_output_shape.Dims(2); ++x) { + for (int c = 0; c < extended_output_shape.Dims(3); ++c) { + const int32_t input1_val = + params.input1_offset + + input1_data[SubscriptToIndex(desc1, b, y, x, c)]; + const int32_t input2_val = + params.input2_offset + + input2_data[SubscriptToIndex(desc2, b, y, x, c)]; + const int32_t unclamped_result = + params.output_offset + + MultiplyByQuantizedMultiplier(input1_val * input2_val, + params.output_multiplier, + params.output_shift); + const int32_t clamped_output = std::min( + params.quantized_activation_max, + std::max(params.quantized_activation_min, unclamped_result)); + output_data[Offset(extended_output_shape, b, y, x, c)] = + static_cast(clamped_output); + } + } + } + } +} + +template +inline typename std::enable_if< + !is_small_integer::value || enable_for_short_integers, void>::type +BroadcastMul4DSlow(const ArithmeticParams& params, + const RuntimeShape& unextended_input1_shape, + const T* input1_data, + const RuntimeShape& unextended_input2_shape, + const T* input2_data, + const RuntimeShape& unextended_output_shape, + T* output_data) { + T output_activation_min; + T output_activation_max; + GetActivationParams(params, &output_activation_min, &output_activation_max); + + TFLITE_DCHECK_LE(unextended_input1_shape.DimensionsCount(), 4); + TFLITE_DCHECK_LE(unextended_input2_shape.DimensionsCount(), 4); + TFLITE_DCHECK_LE(unextended_output_shape.DimensionsCount(), 4); + const RuntimeShape output_shape = + RuntimeShape::ExtendedShape(4, unextended_output_shape); + + NdArrayDesc<4> desc1; + NdArrayDesc<4> desc2; + NdArrayDescsForElementwiseBroadcast(unextended_input1_shape, + unextended_input2_shape, &desc1, &desc2); + + // In Tensorflow, the dimensions are canonically named (batch_number, row, + // col, channel), with extents (batches, height, width, depth), with the + // trailing dimension changing most rapidly (channels has the smallest stride, + // typically 1 element). + // + // In generated C code, we store arrays with the dimensions reversed. The + // first dimension has smallest stride. + // + // We name our variables by their Tensorflow convention, but generate C code + // nesting loops such that the innermost loop has the smallest stride for the + // best cache behavior. + for (int b = 0; b < output_shape.Dims(0); ++b) { + for (int y = 0; y < output_shape.Dims(1); ++y) { + for (int x = 0; x < output_shape.Dims(2); ++x) { + for (int c = 0; c < output_shape.Dims(3); ++c) { + output_data[Offset(output_shape, b, y, x, c)] = + ActivationFunctionWithMinMax( + input1_data[SubscriptToIndex(desc1, b, y, x, c)] * + input2_data[SubscriptToIndex(desc2, b, y, x, c)], + output_activation_min, output_activation_max); + } + } + } + } +} + +inline void BroadcastMul4DSlow(const ArithmeticParams& params, + const RuntimeShape& unextended_input1_shape, + const std::complex* input1_data, + const RuntimeShape& unextended_input2_shape, + const std::complex* input2_data, + const RuntimeShape& unextended_output_shape, + std::complex* output_data) { + TFLITE_DCHECK_LE(unextended_input1_shape.DimensionsCount(), 4); + TFLITE_DCHECK_LE(unextended_input2_shape.DimensionsCount(), 4); + TFLITE_DCHECK_LE(unextended_output_shape.DimensionsCount(), 4); + const RuntimeShape output_shape = + RuntimeShape::ExtendedShape(4, unextended_output_shape); + + NdArrayDesc<4> desc1; + NdArrayDesc<4> desc2; + NdArrayDescsForElementwiseBroadcast(unextended_input1_shape, + unextended_input2_shape, &desc1, &desc2); + + for (int b = 0; b < output_shape.Dims(0); ++b) { + for (int y = 0; y < output_shape.Dims(1); ++y) { + for (int x = 0; x < output_shape.Dims(2); ++x) { + for (int c = 0; c < output_shape.Dims(3); ++c) { + output_data[Offset(output_shape, b, y, x, c)] = + input1_data[SubscriptToIndex(desc1, b, y, x, c)] * + input2_data[SubscriptToIndex(desc2, b, y, x, c)]; + } + } + } + } +} + +} // namespace reference_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_MUL_H_ diff --git a/tensorflow/lite/kernels/internal/reference/neg.h b/tensorflow/lite/kernels/internal/reference/neg.h new file mode 100644 index 0000000..e127883 --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/neg.h @@ -0,0 +1,37 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_NEG_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_NEG_H_ + +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { + +namespace reference_ops { + +template +inline void Negate(const RuntimeShape& input_shape, const T* input_data, + const RuntimeShape& output_shape, T* output_data) { + const int flat_size = MatchingFlatSize(input_shape, output_shape); + + for (int i = 0; i < flat_size; ++i) { + output_data[i] = -input_data[i]; + } +} + +} // namespace reference_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_NEG_H_ diff --git a/tensorflow/lite/kernels/internal/reference/pad.h b/tensorflow/lite/kernels/internal/reference/pad.h new file mode 100644 index 0000000..2758944 --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/pad.h @@ -0,0 +1,169 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_PAD_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_PAD_H_ + +#include + +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { + +namespace reference_ops { + +// TFLite Pad supports activation tensors with up to 5 dimensions. +constexpr int PadKernelMaxDimensionCount() { return 5; } + +// There are two versions of pad: Pad and PadV2. In PadV2 there is a second +// scalar input that provides the padding value. Therefore pad_value_ptr can be +// equivalent to a simple input1_data. For Pad, it should point to a zero +// value. +// +// Note that two typenames are required, so that T=P=int32_t is considered a +// specialization distinct from P=int32_t. +template +inline void PadImpl(const tflite::PadParams& op_params, + const RuntimeShape& input_shape, const T* input_data, + const P* pad_value_ptr, const RuntimeShape& output_shape, + T* output_data) { + const RuntimeShape ext_input_shape = + RuntimeShape::ExtendedShape(PadKernelMaxDimensionCount(), input_shape); + const RuntimeShape ext_output_shape = + RuntimeShape::ExtendedShape(PadKernelMaxDimensionCount(), output_shape); + TFLITE_DCHECK_LE(op_params.left_padding_count, PadKernelMaxDimensionCount()); + TFLITE_DCHECK_LE(op_params.right_padding_count, PadKernelMaxDimensionCount()); + + // Runtime calls are currently fixed at 5 dimensions. Copy inputs so we can + // pad them to 5 dims (yes, we are "padding the padding"). + int left_padding_copy[PadKernelMaxDimensionCount()]; + for (int i = 0; i < PadKernelMaxDimensionCount(); i++) { + left_padding_copy[i] = 0; + } + for (int i = 0; i < op_params.left_padding_count; ++i) { + left_padding_copy[i + PadKernelMaxDimensionCount() - + op_params.left_padding_count] = op_params.left_padding[i]; + } + int right_padding_copy[PadKernelMaxDimensionCount()]; + for (int i = 0; i < PadKernelMaxDimensionCount(); i++) { + right_padding_copy[i] = 0; + } + for (int i = 0; i < op_params.right_padding_count; ++i) { + right_padding_copy[i + PadKernelMaxDimensionCount() - + op_params.right_padding_count] = + op_params.right_padding[i]; + } + + const int output_batch = ext_output_shape.Dims(0); + const int output_plane = ext_output_shape.Dims(1); + const int output_height = ext_output_shape.Dims(2); + const int output_width = ext_output_shape.Dims(3); + const int output_depth = ext_output_shape.Dims(4); + + const int left_b_padding = left_padding_copy[0]; + const int left_p_padding = left_padding_copy[1]; + const int left_h_padding = left_padding_copy[2]; + const int left_w_padding = left_padding_copy[3]; + const int left_d_padding = left_padding_copy[4]; + + const int right_b_padding = right_padding_copy[0]; + const int right_p_padding = right_padding_copy[1]; + const int right_h_padding = right_padding_copy[2]; + const int right_w_padding = right_padding_copy[3]; + const int right_d_padding = right_padding_copy[4]; + + const T pad_value = *pad_value_ptr; + + const T* in_ptr = input_data; + T* out_ptr = output_data; + for (int out_b = 0; out_b < output_batch; ++out_b) { + for (int out_p = 0; out_p < output_plane; ++out_p) { + for (int out_h = 0; out_h < output_height; ++out_h) { + for (int out_w = 0; out_w < output_width; ++out_w) { + for (int out_d = 0; out_d < output_depth; ++out_d) { + if (out_b < left_b_padding || + out_b >= output_batch - right_b_padding || + out_p < left_p_padding || + out_p >= output_plane - right_p_padding || + out_h < left_h_padding || + out_h >= output_height - right_h_padding || + out_w < left_w_padding || + out_w >= output_width - right_w_padding || + out_d < left_d_padding || + out_d >= output_depth - right_d_padding) { + *out_ptr++ = pad_value; + } else { + *out_ptr++ = *in_ptr++; + } + } + } + } + } + } +} + +template +inline void Pad(const tflite::PadParams& op_params, + const RuntimeShape& input_shape, const T* input_data, + const P* pad_value_ptr, const RuntimeShape& output_shape, + T* output_data) { + PadImpl(op_params, input_shape, input_data, pad_value_ptr, output_shape, + output_data); +} + +// The second (pad-value) input can be int32_t when, say, the first is uint8_t. +template +inline void Pad(const tflite::PadParams& op_params, + const RuntimeShape& input_shape, const T* input_data, + const int32_t* pad_value_ptr, const RuntimeShape& output_shape, + T* output_data) { + const T converted_pad_value = static_cast(*pad_value_ptr); + PadImpl(op_params, input_shape, input_data, &converted_pad_value, + output_shape, output_data); +} + +// This version avoids conflicting template matching. +template <> +inline void Pad(const tflite::PadParams& op_params, + const RuntimeShape& input_shape, const int32_t* input_data, + const int32_t* pad_value_ptr, const RuntimeShape& output_shape, + int32_t* output_data) { + PadImpl(op_params, input_shape, input_data, pad_value_ptr, output_shape, + output_data); +} + +template +inline void PadImageStyle(const tflite::PadParams& op_params, + const RuntimeShape& input_shape, const T* input_data, + const P* pad_value_ptr, + const RuntimeShape& output_shape, T* output_data) { + Pad(op_params, input_shape, input_data, pad_value_ptr, output_shape, + output_data); +} + +template +inline void PadImageStyle(const tflite::PadParams& op_params, + const RuntimeShape& input_shape, + const float* input_data, const P* pad_value_ptr, + const RuntimeShape& output_shape, + float* output_data) { + Pad(op_params, input_shape, input_data, pad_value_ptr, output_shape, + output_data); +} + +} // namespace reference_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_PAD_H_ diff --git a/tensorflow/lite/kernels/internal/reference/pooling.h b/tensorflow/lite/kernels/internal/reference/pooling.h new file mode 100644 index 0000000..fe17484 --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/pooling.h @@ -0,0 +1,303 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_POOLING_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_POOLING_H_ + +#include + +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/cppmath.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { +namespace reference_ops { + +inline bool AveragePool(const PoolParams& params, + const RuntimeShape& input_shape, + const float* input_data, + const RuntimeShape& output_shape, float* output_data) { + TFLITE_DCHECK_EQ(input_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(output_shape.DimensionsCount(), 4); + const int batches = MatchingDim(input_shape, 0, output_shape, 0); + const int depth = MatchingDim(input_shape, 3, output_shape, 3); + const int input_height = input_shape.Dims(1); + const int input_width = input_shape.Dims(2); + const int output_height = output_shape.Dims(1); + const int output_width = output_shape.Dims(2); + const int stride_height = params.stride_height; + const int stride_width = params.stride_width; + for (int batch = 0; batch < batches; ++batch) { + for (int out_y = 0; out_y < output_height; ++out_y) { + for (int out_x = 0; out_x < output_width; ++out_x) { + for (int channel = 0; channel < depth; ++channel) { + const int in_x_origin = + (out_x * stride_width) - params.padding_values.width; + const int in_y_origin = + (out_y * stride_height) - params.padding_values.height; + // Compute the boundaries of the filter region clamped so as to + // ensure that the filter window fits in the input array. + const int filter_x_start = std::max(0, -in_x_origin); + const int filter_x_end = + std::min(params.filter_width, input_width - in_x_origin); + const int filter_y_start = std::max(0, -in_y_origin); + const int filter_y_end = + std::min(params.filter_height, input_height - in_y_origin); + float total = 0.f; + float filter_count = 0; + for (int filter_y = filter_y_start; filter_y < filter_y_end; + ++filter_y) { + for (int filter_x = filter_x_start; filter_x < filter_x_end; + ++filter_x) { + const int in_x = in_x_origin + filter_x; + const int in_y = in_y_origin + filter_y; + total += + input_data[Offset(input_shape, batch, in_y, in_x, channel)]; + filter_count++; + } + } + if (filter_count == 0) return false; + const float average = total / filter_count; + output_data[Offset(output_shape, batch, out_y, out_x, channel)] = + ActivationFunctionWithMinMax(average, params.float_activation_min, + params.float_activation_max); + } + } + } + } + return true; +} + +inline bool AveragePool(const PoolParams& params, + const RuntimeShape& input_shape, + const uint8_t* input_data, + const RuntimeShape& output_shape, + uint8_t* output_data) { + TFLITE_DCHECK_LE(params.quantized_activation_min, + params.quantized_activation_max); + TFLITE_DCHECK_EQ(input_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(output_shape.DimensionsCount(), 4); + const int batches = MatchingDim(input_shape, 0, output_shape, 0); + const int depth = MatchingDim(input_shape, 3, output_shape, 3); + const int input_height = input_shape.Dims(1); + const int input_width = input_shape.Dims(2); + const int output_height = output_shape.Dims(1); + const int output_width = output_shape.Dims(2); + const int stride_height = params.stride_height; + const int stride_width = params.stride_width; + for (int batch = 0; batch < batches; ++batch) { + for (int out_y = 0; out_y < output_height; ++out_y) { + for (int out_x = 0; out_x < output_width; ++out_x) { + for (int channel = 0; channel < depth; ++channel) { + const int in_x_origin = + (out_x * stride_width) - params.padding_values.width; + const int in_y_origin = + (out_y * stride_height) - params.padding_values.height; + // Compute the boundaries of the filter region clamped so as to + // ensure that the filter window fits in the input array. + const int filter_x_start = std::max(0, -in_x_origin); + const int filter_x_end = + std::min(params.filter_width, input_width - in_x_origin); + const int filter_y_start = std::max(0, -in_y_origin); + const int filter_y_end = + std::min(params.filter_height, input_height - in_y_origin); + int32_t acc = 0; + int filter_count = 0; + for (int filter_y = filter_y_start; filter_y < filter_y_end; + ++filter_y) { + for (int filter_x = filter_x_start; filter_x < filter_x_end; + ++filter_x) { + const int in_x = in_x_origin + filter_x; + const int in_y = in_y_origin + filter_y; + acc += + input_data[Offset(input_shape, batch, in_y, in_x, channel)]; + filter_count++; + } + } + if (filter_count == 0) return false; + acc = (acc + filter_count / 2) / filter_count; + acc = std::max(acc, params.quantized_activation_min); + acc = std::min(acc, params.quantized_activation_max); + output_data[Offset(output_shape, batch, out_y, out_x, channel)] = + static_cast(acc); + } + } + } + } + return true; +} + +inline void L2Pool(const PoolParams& params, const RuntimeShape& input_shape, + const float* input_data, const RuntimeShape& output_shape, + float* output_data) { + TFLITE_DCHECK_EQ(input_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(output_shape.DimensionsCount(), 4); + const int batches = MatchingDim(input_shape, 0, output_shape, 0); + const int depth = MatchingDim(input_shape, 3, output_shape, 3); + const int input_height = input_shape.Dims(1); + const int input_width = input_shape.Dims(2); + const int output_height = output_shape.Dims(1); + const int output_width = output_shape.Dims(2); + const int stride_height = params.stride_height; + const int stride_width = params.stride_width; + for (int batch = 0; batch < batches; ++batch) { + for (int out_y = 0; out_y < output_height; ++out_y) { + for (int out_x = 0; out_x < output_width; ++out_x) { + for (int channel = 0; channel < depth; ++channel) { + const int in_x_origin = + (out_x * stride_width) - params.padding_values.width; + const int in_y_origin = + (out_y * stride_height) - params.padding_values.height; + // Compute the boundaries of the filter region clamped so as to + // ensure that the filter window fits in the input array. + const int filter_x_start = std::max(0, -in_x_origin); + const int filter_x_end = + std::min(params.filter_width, input_width - in_x_origin); + const int filter_y_start = std::max(0, -in_y_origin); + const int filter_y_end = + std::min(params.filter_height, input_height - in_y_origin); + float sum_squares = 0.f; + int filter_count = 0; + for (int filter_y = filter_y_start; filter_y < filter_y_end; + ++filter_y) { + for (int filter_x = filter_x_start; filter_x < filter_x_end; + ++filter_x) { + const int in_x = in_x_origin + filter_x; + const int in_y = in_y_origin + filter_y; + const float val = + input_data[Offset(input_shape, batch, in_y, in_x, channel)]; + sum_squares += val * val; + filter_count++; + } + } + const float l2pool_result = std::sqrt(sum_squares / filter_count); + output_data[Offset(output_shape, batch, out_y, out_x, channel)] = + ActivationFunctionWithMinMax(l2pool_result, + params.float_activation_min, + params.float_activation_max); + } + } + } + } +} + +inline void MaxPool(const PoolParams& params, const RuntimeShape& input_shape, + const float* input_data, const RuntimeShape& output_shape, + float* output_data) { + TFLITE_DCHECK_EQ(input_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(output_shape.DimensionsCount(), 4); + const int batches = MatchingDim(input_shape, 0, output_shape, 0); + const int depth = MatchingDim(input_shape, 3, output_shape, 3); + const int input_height = input_shape.Dims(1); + const int input_width = input_shape.Dims(2); + const int output_height = output_shape.Dims(1); + const int output_width = output_shape.Dims(2); + const int stride_height = params.stride_height; + const int stride_width = params.stride_width; + for (int batch = 0; batch < batches; ++batch) { + for (int out_y = 0; out_y < output_height; ++out_y) { + for (int out_x = 0; out_x < output_width; ++out_x) { + for (int channel = 0; channel < depth; ++channel) { + const int in_x_origin = + (out_x * stride_width) - params.padding_values.width; + const int in_y_origin = + (out_y * stride_height) - params.padding_values.height; + // Compute the boundaries of the filter region clamped so as to + // ensure that the filter window fits in the input array. + const int filter_x_start = std::max(0, -in_x_origin); + const int filter_x_end = + std::min(params.filter_width, input_width - in_x_origin); + const int filter_y_start = std::max(0, -in_y_origin); + const int filter_y_end = + std::min(params.filter_height, input_height - in_y_origin); + float max = std::numeric_limits::lowest(); + for (int filter_y = filter_y_start; filter_y < filter_y_end; + ++filter_y) { + for (int filter_x = filter_x_start; filter_x < filter_x_end; + ++filter_x) { + const int in_x = in_x_origin + filter_x; + const int in_y = in_y_origin + filter_y; + max = std::max( + max, + input_data[Offset(input_shape, batch, in_y, in_x, channel)]); + } + } + output_data[Offset(output_shape, batch, out_y, out_x, channel)] = + ActivationFunctionWithMinMax(max, params.float_activation_min, + params.float_activation_max); + } + } + } + } +} + +inline void MaxPool(const PoolParams& params, const RuntimeShape& input_shape, + const uint8_t* input_data, const RuntimeShape& output_shape, + uint8_t* output_data) { + TFLITE_DCHECK_LE(params.quantized_activation_min, + params.quantized_activation_max); + TFLITE_DCHECK_GE(params.quantized_activation_min, 0); + TFLITE_DCHECK_LE(params.quantized_activation_max, 255); + TFLITE_DCHECK_EQ(input_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(output_shape.DimensionsCount(), 4); + const int batches = MatchingDim(input_shape, 0, output_shape, 0); + const int depth = MatchingDim(input_shape, 3, output_shape, 3); + const int input_height = input_shape.Dims(1); + const int input_width = input_shape.Dims(2); + const int output_height = output_shape.Dims(1); + const int output_width = output_shape.Dims(2); + const int stride_height = params.stride_height; + const int stride_width = params.stride_width; + for (int batch = 0; batch < batches; ++batch) { + for (int out_y = 0; out_y < output_height; ++out_y) { + for (int out_x = 0; out_x < output_width; ++out_x) { + for (int channel = 0; channel < depth; ++channel) { + const int in_x_origin = + (out_x * stride_width) - params.padding_values.width; + const int in_y_origin = + (out_y * stride_height) - params.padding_values.height; + // Compute the boundaries of the filter region clamped so as to + // ensure that the filter window fits in the input array. + const int filter_x_start = std::max(0, -in_x_origin); + const int filter_x_end = + std::min(params.filter_width, input_width - in_x_origin); + const int filter_y_start = std::max(0, -in_y_origin); + const int filter_y_end = + std::min(params.filter_height, input_height - in_y_origin); + uint8_t max = 0; + for (int filter_y = filter_y_start; filter_y < filter_y_end; + ++filter_y) { + for (int filter_x = filter_x_start; filter_x < filter_x_end; + ++filter_x) { + const int in_x = in_x_origin + filter_x; + const int in_y = in_y_origin + filter_y; + max = std::max( + max, + input_data[Offset(input_shape, batch, in_y, in_x, channel)]); + } + } + max = std::max(max, params.quantized_activation_min); + max = std::min(max, params.quantized_activation_max); + output_data[Offset(output_shape, batch, out_y, out_x, channel)] = + static_cast(max); + } + } + } + } +} +} // namespace reference_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_POOLING_H_ diff --git a/tensorflow/lite/kernels/internal/reference/portable_tensor_utils.cc b/tensorflow/lite/kernels/internal/reference/portable_tensor_utils.cc new file mode 100644 index 0000000..d386203 --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/portable_tensor_utils.cc @@ -0,0 +1,809 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include +#include +#include +#include +#include +#include + +#include "fixedpoint/fixedpoint.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/kernels/internal/cppmath.h" +#include "tensorflow/lite/kernels/internal/reference/portable_tensor_utils_impl.h" + +#if defined(_MSC_VER) +#define __restrict__ __restrict +#endif + +namespace tflite { +namespace tensor_utils { + +namespace { +const int32_t kInt16Max = std::numeric_limits::max(); +const int32_t kInt16Min = std::numeric_limits::min(); +} // namespace + +void PortableSymmetricQuantizeFloats(const float* values, const int size, + int8_t* quantized_values, float* min_value, + float* max_value, float* scaling_factor) { + auto minmax = std::minmax_element(values, values + size); + *min_value = *minmax.first; + *max_value = *minmax.second; + + PortableSymmetricQuantizeFloats(values, size, quantized_values, *min_value, + *max_value, scaling_factor); +} + +void PortableSymmetricQuantizeFloats(const float* values, const int size, + int8_t* quantized_values, float min_value, + float max_value, float* scaling_factor) { + const int32_t kScale = 127; + const float range = std::max(std::abs(min_value), std::abs(max_value)); + if (range == 0) { + memset(quantized_values, 0, size * sizeof(int8_t)); + *scaling_factor = 1; + return; + } + *scaling_factor = range / kScale; + const float scaling_factor_inv = kScale / range; + for (int i = 0; i < size; ++i) { + const int32_t quantized_value = + static_cast(TfLiteRound(values[i] * scaling_factor_inv)); + // Clamp: just in case some odd numeric offset. + quantized_values[i] = static_cast( + std::min(kScale, std::max(-kScale, quantized_value))); + } +} + +void PortableAsymmetricQuantizeFloats(const float* values, const int size, + int8_t* quantized_values, + float* scaling_factor, int32_t* offset) { + const int32_t kMinScale = -128; + const int32_t kMaxScale = 127; + const double qmin_double = kMinScale; + const double qmax_double = kMaxScale; + const auto minmax = std::minmax_element(values, values + size); + const double rmin = static_cast(std::min(0.0f, *minmax.first)); + const double rmax = static_cast(std::max(0.0f, *minmax.second)); + if (rmin == rmax) { + memset(quantized_values, 0, size * sizeof(int8_t)); + *scaling_factor = 1; + *offset = 0; + return; + } else { + double scale = (rmax - rmin) / (qmax_double - qmin_double); + const double zero_point_from_min = qmin_double - rmin / scale; + const double zero_point_from_max = qmax_double - rmax / scale; + const double zero_point_from_min_error = + std::abs(qmin_double) + std::abs(rmin / scale); + const double zero_point_from_max_error = + std::abs(qmax_double) + std::abs(rmax / scale); + const double zero_point_double = + zero_point_from_min_error < zero_point_from_max_error + ? zero_point_from_min + : zero_point_from_max; + int8_t nudged_zero_point = 0; + if (zero_point_double <= qmin_double) { + nudged_zero_point = kMinScale; + } else if (zero_point_double >= qmax_double) { + nudged_zero_point = kMaxScale; + } else { + nudged_zero_point = static_cast(round(zero_point_double)); + } + *scaling_factor = scale; + *offset = nudged_zero_point; + } + const float scaling_factor_inv = 1.0f / *scaling_factor; + for (int i = 0; i < size; ++i) { + const int32_t quantized_value = static_cast( + TfLiteRound(*offset + values[i] * scaling_factor_inv)); + quantized_values[i] = + std::min(kMaxScale, std::max(kMinScale, quantized_value)); + } +} + +void PortableMatrixBatchVectorMultiplyAccumulate(const float* matrix, + int m_rows, int m_cols, + const float* vector, + int n_batch, float* result) { + float* result_in_batch = result; + for (int b = 0; b < n_batch; b++) { + const float* matrix_ptr = matrix; + for (int r = 0; r < m_rows; r++) { + float dot_prod = 0.0f; + const float* vector_in_batch = vector + b * m_cols; + for (int c = 0; c < m_cols; c++) { + dot_prod += *matrix_ptr++ * *vector_in_batch++; + } + *result_in_batch += dot_prod; + ++result_in_batch; + } + } +} + +void PortableMatrixBatchVectorMultiplyAccumulate( + const int8_t* __restrict__ matrix, const int m_rows, const int m_cols, + const int8_t* __restrict__ vectors, const float* scaling_factors, + int n_batch, float* __restrict__ result) { + for (int batch = 0; batch < n_batch; ++batch, vectors += m_cols) { + const float batch_scaling_factor = scaling_factors[batch]; + // Get the address of the first row. + const int8_t* row_ptr = matrix; + for (int row = 0; row < m_rows; ++row) { + // Initialize the dot product sum for the row to 0. + int32_t dotprod = 0; +#if defined(__GNUC__) + // Prefetch the row to cache. + __builtin_prefetch(row_ptr, 0 /* prefetch for read */, + 3 /* temporal locality */); +#endif + for (int col = 0; col < m_cols; ++col, ++row_ptr) { + dotprod += (*row_ptr) * (vectors[col]); + } // for col + *result += dotprod * batch_scaling_factor; + ++result; + } // for row + } // for batch +} + +void PortableMatrixBatchVectorMultiplyAccumulate( + const int8_t* __restrict__ matrix, const int m_rows, const int m_cols, + const int8_t* __restrict__ vectors, const float* scaling_factors, + int n_batch, float* __restrict__ result, const float* per_channel_scale, + const int32_t* input_offset, int32_t* scratch, int32_t* row_sums, + bool* compute_row_sums, CpuBackendContext* context) { + if (input_offset == nullptr) { + PortableMatrixBatchVectorMultiplyAccumulate( + matrix, m_rows, m_cols, vectors, scaling_factors, n_batch, result); + return; + } + if (!compute_row_sums || *compute_row_sums) { + PortableReductionSumVector(matrix, row_sums, m_rows, m_cols); + if (compute_row_sums) { + *compute_row_sums = false; + } + } + + for (int batch = 0; batch < n_batch; ++batch, vectors += m_cols) { + const float batch_scaling_factor = scaling_factors[batch]; + const int32_t batch_offset = input_offset[batch]; + const int8_t* row_ptr = matrix; + for (int row = 0; row < m_rows; ++row) { + int32_t dotprod = 0; + float scale = batch_scaling_factor; + if (per_channel_scale) { + scale *= per_channel_scale[row]; + } +#if defined(__GNUC__) + // Prefetch the row to cache. + __builtin_prefetch(row_ptr, 0 /* prefetch for read */, + 3 /* temporal locality */); +#endif + for (int col = 0; col < m_cols; ++col, ++row_ptr) { + dotprod += (*row_ptr) * vectors[col]; + } // for col + dotprod -= row_sums[row] * batch_offset; + *result += dotprod * scale; + ++result; + } // for row + } // for batch +} + +void PortableSparseMatrixBatchVectorMultiplyAccumulate1x4( + const float* __restrict__ matrix, const int32_t* __restrict__ segments, + const int32_t* __restrict__ indices, int m_rows, int m_cols, + const float* __restrict__ vector, int n_batch, float* __restrict__ result) { + const int kBlockSize = 4; + TFLITE_DCHECK_EQ(m_cols % kBlockSize, 0); + for (int batch = 0; batch < n_batch; batch++) { + const float* matrix_ptr = matrix; + for (int row = 0; row < m_rows; row++) { + float dot_prod = 0.0f; + const float* vector_in_batch = vector + batch * m_cols; + for (int i = segments[row]; i < segments[row + 1]; i++) { + const int block_start_index = indices[i] * kBlockSize; + const float* vector_block_in_batch_ptr = + vector_in_batch + block_start_index; + for (int c = 0; c < kBlockSize; c++) { + dot_prod += *matrix_ptr++ * *vector_block_in_batch_ptr++; + } + } + result[batch * m_rows + row] += dot_prod; + } + } +} + +void PortableSparseMatrixBatchVectorMultiplyAccumulate1x16( + const int8_t* __restrict__ matrix, const int32_t* __restrict__ segments, + const int32_t* __restrict__ indices, int m_rows, int m_cols, + const int8_t* __restrict__ vector, const int32_t* __restrict__ bias_vector, + int n_batch, const int32_t input_offset, const int32_t output_multiplier, + const int32_t output_shift, const int32_t output_offset, + const int32_t output_activation_min, const int32_t output_activation_max, + int8_t* __restrict__ result) { + const int kBlockSize = 16; + TFLITE_DCHECK_EQ(m_cols % kBlockSize, 0); + for (int batch = 0; batch < n_batch; ++batch) { + const int8_t* matrix_ptr = matrix; + for (int row = 0; row < m_rows; ++row) { + int32_t dot_prod = 0; + const int8_t* vector_in_batch = vector + batch * m_cols; + for (int i = segments[row]; i < segments[row + 1]; ++i) { + const int block_start_index = indices[i] * kBlockSize; + const int8_t* vector_block_in_batch_ptr = + vector_in_batch + block_start_index; + for (int c = 0; c < kBlockSize; c++) { + dot_prod += *matrix_ptr * *vector_block_in_batch_ptr++; + dot_prod += *matrix_ptr++ * input_offset; + } + } + const int32_t bias_value = bias_vector != nullptr ? bias_vector[row] : 0; + dot_prod = MultiplyByQuantizedMultiplier(dot_prod + bias_value, + output_multiplier, output_shift); + dot_prod += output_offset; + result[batch * m_rows + row] = + static_cast(ActivationFunctionWithMinMax( + dot_prod, output_activation_min, output_activation_max)); + } + } +} + +void PortableSparseMatrixBatchVectorMultiplyAccumulate( + const float* __restrict__ matrix, const uint8_t* __restrict__ ledger, + int m_rows, int m_cols, const float* __restrict__ vector, int n_batch, + float* __restrict__ result) { + const int kBlockSize = 16; + TFLITE_DCHECK_EQ( // NOLINT + m_cols % kBlockSize, 0); + for (int batch = 0; batch < n_batch; batch++) { + const float* matrix_ptr = matrix; + const uint8_t* ledger_ptr = ledger; + for (int row = 0; row < m_rows; row++) { + float dot_prod = 0.0f; + int num_nonzero_blocks = *ledger_ptr++; + if (num_nonzero_blocks > 0) { + const float* vector_in_batch = vector + batch * m_cols; + for (int i = 0; i < num_nonzero_blocks; i++) { + const int block_start_index = *ledger_ptr++ * kBlockSize; + const float* vector_block_in_batch_ptr = + vector_in_batch + block_start_index; + for (int c = 0; c < kBlockSize; c++) { + dot_prod += *matrix_ptr++ * *vector_block_in_batch_ptr++; + } + } + } + result[batch * m_rows + row] += dot_prod; + } + } +} + +void PortableSparseMatrixBatchVectorMultiplyAccumulate( + const int8_t* __restrict__ matrix, const uint8_t* ledger, const int m_rows, + const int m_cols, const int8_t* __restrict__ vectors, + const float* scaling_factors, int n_batch, float* __restrict__ result) { + static const int kBlockSize = 16; + TFLITE_DCHECK_EQ( // NOLINT + m_cols % kBlockSize, 0); + for (int batch = 0; batch < n_batch; ++batch, vectors += m_cols) { + const float batch_scaling_factor = scaling_factors[batch]; + const uint8_t* ledger_ptr = ledger; + // Get the address of the first row. + const int8_t* row_ptr = matrix; + for (int row = 0; row < m_rows; ++row) { + // Initialize the dot product sum for the row to 0. + int32_t dotprod = 0; +#if defined(__GNUC__) + // Prefetch the row to cache. + __builtin_prefetch(row_ptr, 0 /* prefetch for read */, + 3 /* temporal locality */); +#endif + int num_nonzero_blocks = *ledger_ptr++; + for (int i = 0; i < num_nonzero_blocks; i++) { + const int block_start_index = *ledger_ptr++ * kBlockSize; + const int8_t* vector_block_ptr = vectors + block_start_index; + for (int c = 0; c < kBlockSize; c++) { + dotprod += (*row_ptr++) * (*vector_block_ptr++); + } // for block + } // for num_nonzero_blocks + result[batch * m_rows + row] += dotprod * batch_scaling_factor; + } // for row + } // for batch +} + +template +void PortableMatrixBatchVectorMultiplyAccumulateImpl( + const int8_t* input, const int32_t* bias, + const int8_t* input_to_gate_weights, int32_t multiplier, int32_t shift, + int32_t n_batch, int32_t n_input, int32_t n_output, int32_t output_zp, + T* output) { + const int16_t output_max = std::numeric_limits::max(); + const int16_t output_min = std::numeric_limits::min(); + for (int batch = 0; batch < n_batch; ++batch) { + for (int row = 0; row < n_output; ++row) { + int32_t acc = bias[row]; + for (int col = 0; col < n_input; ++col) { + int8_t input_val = input[batch * n_input + col]; + int8_t weights_val = input_to_gate_weights[row * n_input + col]; + acc += input_val * weights_val; + } + acc = MultiplyByQuantizedMultiplier(acc, multiplier, shift); + acc += output_zp; + acc += output[batch * n_output + row]; + if (acc > output_max) { + acc = output_max; + } + if (acc < output_min) { + acc = output_min; + } + output[batch * n_output + row] = static_cast(acc); + } + } +} + +void PortableMatrixBatchVectorMultiplyAccumulate( + const int8_t* input, const int32_t* bias, + const int8_t* input_to_gate_weights, int32_t multiplier, int32_t shift, + int32_t n_batch, int32_t n_input, int32_t n_output, int32_t output_zp, + int32_t* scratch, int16_t* output, CpuBackendContext* context) { + PortableMatrixBatchVectorMultiplyAccumulateImpl( + input, bias, input_to_gate_weights, multiplier, shift, n_batch, n_input, + n_output, output_zp, output); +} + +void PortableMatrixBatchVectorMultiplyAccumulate( + const int8_t* input, const int32_t* bias, + const int8_t* input_to_gate_weights, int32_t multiplier, int32_t shift, + int32_t n_batch, int32_t n_input, int32_t n_output, int32_t output_zp, + int32_t* scratch, int8_t* output, CpuBackendContext* context) { + PortableMatrixBatchVectorMultiplyAccumulateImpl( + input, bias, input_to_gate_weights, multiplier, shift, n_batch, n_input, + n_output, output_zp, output); +} + +void PortableMatrixBatchVectorMultiply(const int8_t* input, + int32_t input_zeropoint, + const int8_t* input_to_gate_weights, + int32_t input_to_gate_effective_scale_a, + int32_t input_to_gate_effective_scale_b, + int32_t n_batch, int32_t n_input, + int32_t n_cell, int8_t* gate_output, + int8_t gate_output_zp) { + const int32_t int8_max = std::numeric_limits::max(); + const int32_t int8_min = std::numeric_limits::min(); + for (int batch = 0; batch < n_batch; ++batch) { + for (int row = 0; row < n_cell; ++row) { + int32_t acc = 0; + for (int col = 0; col < n_input; ++col) { + int32_t input_val = input[batch * n_input + col]; + int8_t weights_val = input_to_gate_weights[row * n_input + col]; + acc += (input_val - input_zeropoint) * weights_val; + } + acc = MultiplyByQuantizedMultiplier(acc, input_to_gate_effective_scale_a, + input_to_gate_effective_scale_b); + acc += gate_output_zp; + if (acc > int8_max) { + acc = int8_max; + } + if (acc < int8_min) { + acc = int8_min; + } + gate_output[batch * n_cell + row] = static_cast(acc); + } + } +} + +void PortableMatrixBatchVectorMultiply( + const int16_t* hidden, const int8_t* hidden_to_output_weights, + int32_t proj_effective_scale_a, int32_t proj_effective_scale_b, + const int32_t* gate_bias, int32_t n_batch, int32_t n_hidden, + int32_t n_output, int32_t output_zp, int8_t* proj_output) { + const int16_t int8_max = std::numeric_limits::max(); + const int16_t int8_min = std::numeric_limits::min(); + for (int batch = 0; batch < n_batch; ++batch) { + for (int row = 0; row < n_output; ++row) { + int64_t acc = gate_bias[row]; + for (int col = 0; col < n_hidden; ++col) { + int16_t input_val = hidden[batch * n_hidden + col]; + int8_t weights_val = hidden_to_output_weights[row * n_hidden + col]; + int64_t curr = acc; + acc += input_val * weights_val; + if (input_val * weights_val > 0 && acc < curr) { + acc = std::numeric_limits::max(); + } + if (input_val * weights_val < 0 && acc > curr) { + acc = std::numeric_limits::min(); + } + } + acc = MultiplyByQuantizedMultiplier(acc, proj_effective_scale_a, + proj_effective_scale_b); + acc += output_zp; + if (acc > int8_max) { + acc = int8_max; + } + if (acc < int8_min) { + acc = int8_min; + } + proj_output[batch * n_output + row] = acc; + } + } +} + +void PortableApplyLayerNorm(const int16_t* input, + const int16_t* layer_norm_weights, + const int32_t* bias, int32_t layer_norm_scale_a, + int32_t layer_norm_scale_b, int32_t variance_limit, + int n_batch, int n_input, int16_t* output) { + // The square of std::pow(2, 10), which is the extra factor that makes sure + // normalized values has enough resolution. + static const int kTwoToPower20 = 1 << 20; + for (int i = 0; i < n_batch; ++i) { + int64_t sum = 0; + int64_t sum_sq = 0; + for (int j = 0; j < n_input; ++j) { + const int32_t index = i * n_input + j; + int32_t val = static_cast(input[index]); + sum += val; + sum_sq += val * val; + } + int32_t mean = + static_cast(static_cast(sum) * 1024 / n_input); + // TODO(b/173994730): Avoids overflow but only works for POT n_input. + int32_t temp = kTwoToPower20 / n_input; + int64_t variance = + sum_sq * temp - static_cast(mean) * static_cast(mean); + int32_t variance2 = static_cast(variance / kTwoToPower20); + if (variance2 < 1) { + variance2 = variance_limit; + } + int32_t stddev_inverse_a; + int stddev_inverse_b; + GetInvSqrtQuantizedMultiplierExp(variance2, /*reverse_shift*/ -1, + &stddev_inverse_a, &stddev_inverse_b); + + for (int j = 0; j < n_input; ++j) { + const int32_t index = i * n_input + j; + int32_t val = static_cast(input[index]); + int32_t shifted = 1024 * val - mean; + int32_t rescaled = MultiplyByQuantizedMultiplier( + shifted, stddev_inverse_a, stddev_inverse_b); + // TODO(jianlijianli): Saturate this. + int64_t val3 = rescaled * layer_norm_weights[j] + bias[j]; + int32_t val4 = + static_cast((val3 > 0 ? val3 + 512 : val3 - 512) / 1024); + int32_t val5 = MultiplyByQuantizedMultiplier(val4, layer_norm_scale_a, + layer_norm_scale_b + 12); + val5 = std::min(std::max(kInt16Min, val5), kInt16Max); + output[index] = static_cast(val5); + } + } +} + +void PortableApplyLayerNormFloat(const int16_t* input, + const int16_t* layer_norm_weights, + int32_t layer_norm_scale_a, + int32_t layer_norm_scale_b, + const int32_t* bias, int n_batch, int n_input, + int16_t* output) { + const int32_t int16_max = std::numeric_limits::max(); + const int32_t int16_min = std::numeric_limits::min(); + const float layer_norm_scale = + layer_norm_scale_a * + std::pow(2.0, static_cast(layer_norm_scale_b - 31)); + const float bias_scale = + static_cast(std::pow(2.0, -10)) * layer_norm_scale; + + for (int batch = 0; batch < n_batch; ++batch) { + float sum = 0.0f; + float sum_sq = 0.0f; + for (int i = 0; i < n_input; ++i) { + const int index = batch * n_input + i; + const float value = static_cast(input[index]); + sum += value; + sum_sq += value * value; + } + const float mean = sum / n_input; + float stddev_inv = 0.0f; + const float variance = sum_sq / n_input - mean * mean; + if (variance == 0) { + stddev_inv = 1.0f / std::sqrt(1e-8f); + } else { + stddev_inv = 1.0f / std::sqrt(variance); + } + for (int i = 0; i < n_input; ++i) { + const int index = batch * n_input + i; + const float normalized_value = + (static_cast(input[index]) - mean) * stddev_inv; + const float weighted_normalized_value = + normalized_value * layer_norm_weights[i] * layer_norm_scale + + bias[i] * bias_scale; + const int32_t quant_output = static_cast(round( + weighted_normalized_value * static_cast(std::pow(2, 12)))); + output[index] = std::min(int16_max, std::max(int16_min, quant_output)); + } + } +} + +void PortableMatrixScalarMultiplyAccumulate(const int8_t* matrix, + int32_t scalar, int32_t n_row, + int32_t n_col, int32_t* output) { + for (int i = 0; i < n_row; ++i) { + int32_t row_sum = 0; + for (int j = 0; j < n_col; ++j) { + row_sum += *matrix++; + } + output[i] += row_sum * scalar; + } +} + +void PortableApplySigmoid(const int16_t* input, int32_t n_batch, + int32_t n_input, int16_t* output) { + for (int batch = 0; batch < n_batch; ++batch) { + for (int c = 0; c < n_input; c++) { + using F3 = gemmlowp::FixedPoint; + using F0 = gemmlowp::FixedPoint; + const int index = batch * n_input + c; + F3 sigmoid_input = F3::FromRaw(input[index]); + F0 sigmoid_output = gemmlowp::logistic(sigmoid_input); + output[index] = sigmoid_output.raw(); + } + } +} + +void PortableApplySigmoidFloat(const int16_t* input, int32_t n_batch, + int32_t n_input, int16_t* output) { + const int32_t int16_max = std::numeric_limits::max(); + const int32_t int16_min = std::numeric_limits::min(); + for (int batch = 0; batch < n_batch; ++batch) { + for (int i = 0; i < n_input; ++i) { + const int index = batch * n_input + i; + const float float_input = + input[index] * static_cast(std::pow(2, -12)); + const float float_output = 1.0f / (1.0f + std::exp(-float_input)); + const int32_t quant_output = static_cast( + float_output * static_cast(std::pow(2, 15))); + const int32_t quant_output_clamped = + std::min(int16_max, std::max(int16_min, quant_output)); + output[index] = static_cast(quant_output_clamped); + } + } +} + +template +void PortableApplyTanhImpl(const int16_t* input, int32_t n_batch, + int32_t n_input, int16_t* output) { + using FX = gemmlowp::FixedPoint; + using F0 = gemmlowp::FixedPoint; + for (int batch = 0; batch < n_batch; ++batch) { + for (int i = 0; i < n_input; ++i) { + const int index = batch * n_input + i; + FX tanh_input = FX::FromRaw(input[index]); + F0 tanh_output = gemmlowp::tanh(tanh_input); + output[index] = tanh_output.raw(); + } + } +} + +void PortableApplyTanh(int32_t integer_bits, const int16_t* input, + int32_t n_batch, int32_t n_input, int16_t* output) { + assert(integer_bits <= 6); +#define DISPATCH_TANH(i) \ + case i: \ + PortableApplyTanhImpl(input, n_batch, n_input, output); \ + break; + switch (integer_bits) { + DISPATCH_TANH(0); + DISPATCH_TANH(1); + DISPATCH_TANH(2); + DISPATCH_TANH(3); + DISPATCH_TANH(4); + DISPATCH_TANH(5); + DISPATCH_TANH(6); + default: + return; + } +#undef DISPATCH_TANH +} + +void PortableApplyTanhFloat(const int16_t* input, int32_t n_batch, + int32_t n_input, int32_t integer_bits, + int16_t* output) { + const int32_t int16_max = std::numeric_limits::max(); + const int32_t int16_min = std::numeric_limits::min(); + const double two = 2.0; + for (int batch = 0; batch < n_batch; ++batch) { + for (int i = 0; i < n_input; ++i) { + const int index = batch * n_input + i; + const float float_input = + input[index] * std::pow(two, static_cast(integer_bits)); + const float float_output = std::tanh(float_input); + const int32_t quant_output = static_cast( + float_output * static_cast(std::pow(2, 15))); + const int32_t quant_output_clamped = + std::min(int16_max, std::max(int16_min, quant_output)); + output[index] = static_cast(quant_output_clamped); + } + } +} + +void PortableCwiseMul(const int16_t* input_1, const int16_t* input_2, + int n_batch, int n_input, int shift, int16_t* output) { + for (int batch = 0; batch < n_batch; ++batch) { + for (int i = 0; i < n_input; ++i) { + const int index = batch * n_input + i; + const int16_t a = input_1[index]; + const int16_t b = input_2[index]; + const int32_t value = static_cast(a) * static_cast(b); + output[index] = + static_cast(gemmlowp::RoundingDivideByPOT(value, shift)); + } + } +} + +void PortableCwiseMul(const int16_t* input_1, const int16_t* input_2, + int32_t multiplier, int32_t shift, int32_t n_batch, + int32_t n_input, int32_t output_zp, int8_t* output) { + for (int batch = 0; batch < n_batch; ++batch) { + for (int i = 0; i < n_input; ++i) { + const int index = batch * n_input + i; + const int16_t a = input_1[index]; + const int16_t b = input_2[index]; + int32_t value = static_cast(a) * static_cast(b); + value = MultiplyByQuantizedMultiplier(value, multiplier, shift); + value += output_zp; + value = std::min(std::max(static_cast(-128), value), + static_cast(127)); + + output[index] = static_cast(value); + } + } +} + +void PortableCwiseAdd(const int16_t* input_1, const int16_t* input_2, + int n_batch, int n_input, int16_t* output) { + for (int batch = 0; batch < n_batch; ++batch) { + for (int i = 0; i < n_input; ++i) { + const int index = batch * n_input + i; + int32_t sum = input_1[index] + input_2[index]; + const int32_t sum_clamped = std::min(kInt16Max, std::max(kInt16Min, sum)); + output[index] = static_cast(sum_clamped); + } + } +} + +float PortableVectorVectorDotProduct(const float* vector1, const float* vector2, + int v_size) { + float result = 0.0; + for (int v = 0; v < v_size; v++) { + result += *vector1++ * *vector2++; + } + return result; +} + +namespace { +inline int32_t VectorVectorDotProduct(const int16_t* vector1, + const int16_t* vector2, int v_size) { + int32_t result = 0; + for (int v = 0; v < v_size; v++) { + result += *vector1++ * *vector2++; + } + return result; +} +} // namespace + +void PortableBatchVectorBatchVectorDotProduct(const int16_t* vector1, + const int16_t* vector2, + int v_size, int n_batch, + int32_t* result) { + for (int b = 0; b < n_batch; b++) { + result[b] = VectorVectorDotProduct(vector1, vector2, v_size); + vector1 += v_size; + vector2 += v_size; + } +} + +void PortableVectorBatchVectorCwiseProductAccumulate( + const int16_t* vector, int v_size, const int16_t* batch_vector, int n_batch, + int32_t multiplier, int shift, int16_t* result) { + for (int b = 0; b < n_batch; b++) { + for (int v = 0; v < v_size; v++) { + int32_t prod = vector[v] * *batch_vector++; + prod = MultiplyByQuantizedMultiplier(prod, multiplier, shift); + int32_t output = prod + *result; + output = std::max(std::min(static_cast(32767), output), + static_cast(-32768)); + *result++ = output; + } + } +} + +void PortableSub1Vector(const float* vector, int v_size, float* result) { + for (int v = 0; v < v_size; v++) { + *result++ = 1.0f - *vector++; + } +} + +void PortableSub1Vector(const int16_t* vector, int v_size, int16_t* result) { + static const int16_t kOne = 32767; + for (int v = 0; v < v_size; v++) { + *result++ = kOne - *vector++; + } +} + +void PortableVectorScalarMultiply(const int8_t* vector, const int v_size, + const float scale, float* result) { + for (int v = 0; v < v_size; ++v) { + *result++ = scale * *vector++; + } +} + +void PortableMeanStddevNormalization(const float* __restrict__ input_vector, + float* __restrict__ output_vector, + int v_size, int n_batch) { + for (int batch = 0; batch < n_batch; ++batch) { + float sum = 0.0f; + for (int i = 0; i < v_size; ++i) { + sum += input_vector[i]; + } + const float mean = sum / v_size; + float sum_diff_sq = 0.0f; + for (int i = 0; i < v_size; ++i) { + const float diff = input_vector[i] - mean; + sum_diff_sq += diff * diff; + } + const float variance = sum_diff_sq / v_size; + constexpr float kNormalizationConstant = 1e-8f; + const float stddev_inv = + 1.0f / std::sqrt(variance + kNormalizationConstant); + for (int i = 0; i < v_size; ++i) { + output_vector[i] = (input_vector[i] - mean) * stddev_inv; + } + input_vector += v_size; + output_vector += v_size; + } +} + +void PortableTwoGateSaturatingAdd(const int8_t* input, int8_t input_zp, + const int8_t* recurrent, int8_t recurrent_zp, + int32_t input_effective_scale_a, + int32_t input_effective_scale_b, + int32_t recurrent_effective_scale_a, + int32_t recurrent_effective_scale_b, + int32_t n_batch, int32_t n_cell, + int16_t* output) { + const int32_t int16_max = std::numeric_limits::max(); + const int32_t int16_min = std::numeric_limits::min(); + for (int i = 0; i < n_batch * n_cell; ++i) { + int32_t x = static_cast(input[i]) - static_cast(input_zp); + int32_t h = + static_cast(recurrent[i]) - static_cast(recurrent_zp); + int32_t x_scaled = MultiplyByQuantizedMultiplier(x, input_effective_scale_a, + input_effective_scale_b); + int32_t h_scaled = MultiplyByQuantizedMultiplier( + h, recurrent_effective_scale_a, recurrent_effective_scale_b); + int32_t y = h_scaled + x_scaled; + if (y > int16_max) { + y = int16_max; + } + if (y < int16_min) { + y = int16_min; + } + output[i] = static_cast(y); + } +} + +} // namespace tensor_utils +} // namespace tflite diff --git a/tensorflow/lite/kernels/internal/reference/portable_tensor_utils.h b/tensorflow/lite/kernels/internal/reference/portable_tensor_utils.h new file mode 100644 index 0000000..0416db0 --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/portable_tensor_utils.h @@ -0,0 +1,333 @@ +/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_PORTABLE_TENSOR_UTILS_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_PORTABLE_TENSOR_UTILS_H_ + +#include "tensorflow/lite/kernels/internal/reference/portable_tensor_utils_impl.h" + +#if defined(_MSC_VER) +#define __restrict__ __restrict +#endif + +namespace tflite { +namespace tensor_utils { + +// Check if all entries of a vector are zero for float. +bool IsZeroVector(const float* vector, int v_size) { + return PortableIsZeroVector(vector, v_size); +} + +// Check if all entries of a vector are zero for int8_t. +bool IsZeroVector(const int8_t* vector, int v_size) { + return PortableIsZeroVector(vector, v_size); +} + +void SymmetricQuantizeFloats(const float* values, const int size, + int8_t* quantized_values, float* min, float* max, + float* scaling_factor) { + PortableSymmetricQuantizeFloats(values, size, quantized_values, min, max, + scaling_factor); +} + +void SymmetricQuantizeFloats(const float* values, const int size, + int8_t* quantized_values, float min_value, + float max_value, float* scaling_factor) { + PortableSymmetricQuantizeFloats(values, size, quantized_values, min_value, + max_value, scaling_factor); +} + +void AsymmetricQuantizeFloats(const float* values, const int size, + int8_t* quantized_values, float* scaling_factor, + int32_t* offset) { + PortableAsymmetricQuantizeFloats(values, size, quantized_values, + scaling_factor, offset); +} + +void MatrixBatchVectorMultiplyAccumulate(const float* matrix, int m_rows, + int m_cols, const float* vector, + int n_batch, float* result) { + PortableMatrixBatchVectorMultiplyAccumulate(matrix, m_rows, m_cols, vector, + n_batch, result); +} + +void MatrixBatchVectorMultiplyAccumulate(const int8_t* __restrict__ matrix, + const int m_rows, const int m_cols, + const int8_t* __restrict__ vector, + const float* scaling_factors, + int n_batch, + float* __restrict__ result) { + PortableMatrixBatchVectorMultiplyAccumulate(matrix, m_rows, m_cols, vector, + scaling_factors, n_batch, result); +} + +void MatrixBatchVectorMultiplyAccumulate( + const int8_t* __restrict__ matrix, const int m_rows, const int m_cols, + const int8_t* __restrict__ vectors, const float* scaling_factors, + int n_batch, float* __restrict__ result, const float* per_channel_scale, + const int32_t* input_offset, int32_t* scratch, int32_t* row_sums, + bool* compute_row_sums, CpuBackendContext* context) { + PortableMatrixBatchVectorMultiplyAccumulate( + matrix, m_rows, m_cols, vectors, scaling_factors, n_batch, result, + per_channel_scale, input_offset, scratch, row_sums, compute_row_sums, + context); +} + +void MatrixBatchVectorMultiplyAccumulate(const int8_t* __restrict__ matrix, + const int m_rows, const int m_cols, + const int8_t* __restrict__ vector, + const float* scaling_factors, + int n_batch, int32_t* scratch, + float* __restrict__ result, + CpuBackendContext* context) { + PortableMatrixBatchVectorMultiplyAccumulate(matrix, m_rows, m_cols, vector, + scaling_factors, n_batch, result); +} + +void SparseMatrixBatchVectorMultiplyAccumulate1x4( + const float* __restrict__ matrix, const int32_t* __restrict__ segments, + const int32_t* __restrict__ indices, int m_rows, int m_cols, + const float* __restrict__ vector, int n_batch, float* __restrict__ result) { + PortableSparseMatrixBatchVectorMultiplyAccumulate1x4( + matrix, segments, indices, m_rows, m_cols, vector, n_batch, result); +} + +void SparseMatrixBatchVectorMultiplyAccumulate( + const float* __restrict__ matrix, const uint8_t* __restrict__ ledger, + int m_rows, int m_cols, const float* __restrict__ vector, int n_batch, + float* __restrict__ result) { + PortableSparseMatrixBatchVectorMultiplyAccumulate( + matrix, ledger, m_rows, m_cols, vector, n_batch, result); +} + +void SparseMatrixBatchVectorMultiplyAccumulate1x16( + const int8_t* __restrict__ matrix, const int32_t* __restrict__ segments, + const int32_t* __restrict__ indices, int m_rows, int m_cols, + const int8_t* __restrict__ vector, const int32_t* __restrict__ bias_vector, + int n_batch, const int32_t input_offset, const int32_t output_multiplier, + const int32_t output_shift, const int32_t output_offset, + const int32_t output_activation_min, const int32_t output_activation_max, + + int8_t* __restrict__ result) { + PortableSparseMatrixBatchVectorMultiplyAccumulate1x16( + matrix, segments, indices, m_rows, m_cols, vector, bias_vector, n_batch, + input_offset, output_multiplier, output_shift, output_offset, + output_activation_min, output_activation_max, result); +} + +void SparseMatrixBatchVectorMultiplyAccumulate( + const int8_t* __restrict__ matrix, const uint8_t* ledger, const int m_rows, + const int m_cols, const int8_t* __restrict__ vectors, + const float* scaling_factors, int n_batch, float* __restrict__ result) { + PortableSparseMatrixBatchVectorMultiplyAccumulate( + matrix, ledger, m_rows, m_cols, vectors, scaling_factors, n_batch, + result); +} + +void MatrixBatchVectorMultiplyAccumulate( + const int8_t* input, const int32_t* bias, + const int8_t* input_to_gate_weights, int32_t multiplier, int32_t shift, + int32_t n_batch, int32_t n_input, int32_t n_output, int32_t output_zp, + int32_t* scratch, int16_t* output, CpuBackendContext* context) { + PortableMatrixBatchVectorMultiplyAccumulate( + input, bias, input_to_gate_weights, multiplier, shift, n_batch, n_input, + n_output, output_zp, scratch, output, context); +} + +void MatrixBatchVectorMultiplyAccumulate( + const int8_t* input, const int32_t* bias, + const int8_t* input_to_gate_weights, int32_t multiplier, int32_t shift, + int32_t n_batch, int32_t n_input, int32_t n_output, int32_t output_zp, + int32_t* scratch, int8_t* output, CpuBackendContext* context) { + PortableMatrixBatchVectorMultiplyAccumulate( + input, bias, input_to_gate_weights, multiplier, shift, n_batch, n_input, + n_output, output_zp, scratch, output, context); +} + +void MatrixScalarMultiplyAccumulate(const int8_t* matrix, int32_t scalar, + int32_t n_row, int32_t n_col, + int32_t* output) { + PortableMatrixScalarMultiplyAccumulate(matrix, scalar, n_row, n_col, output); +} + +void MatrixBatchVectorMultiply(const int8_t* input, int32_t input_zeropoint, + const int8_t* input_to_gate_weights, + int32_t input_to_gate_effective_scale_a, + int32_t input_to_gate_effective_scale_b, + int32_t n_batch, int32_t n_input, int32_t n_cell, + int8_t* gate_output, int8_t gate_output_zp) { + PortableMatrixBatchVectorMultiply( + input, input_zeropoint, input_to_gate_weights, + input_to_gate_effective_scale_a, input_to_gate_effective_scale_b, n_batch, + n_input, n_cell, gate_output, gate_output_zp); +} + +void MatrixBatchVectorMultiply(const int16_t* hidden, + const int8_t* hidden_to_output_weights, + int32_t proj_effective_scale_a, + int32_t proj_effective_scale_b, + const int32_t* gate_bias, int32_t n_batch, + int32_t n_hidden, int32_t n_output, + int32_t output_zp, int8_t* proj_output) { + PortableMatrixBatchVectorMultiply(hidden, hidden_to_output_weights, + proj_effective_scale_a, + proj_effective_scale_b, gate_bias, n_batch, + n_hidden, n_output, output_zp, proj_output); +} + +void ApplyLayerNorm(const int16_t* input, const int16_t* layer_norm_weights, + const int32_t* bias, int32_t layer_norm_scale_a, + int32_t layer_norm_scale_b, int32_t variance_limit, + int n_batch, int n_input, int16_t* output) { + PortableApplyLayerNorm(input, layer_norm_weights, bias, layer_norm_scale_a, + layer_norm_scale_b, variance_limit, n_batch, n_input, + output); +} + +void ApplyLayerNormFloat(const int16_t* input, + const int16_t* layer_norm_weights, + int32_t layer_norm_scale_a, int32_t layer_norm_scale_b, + const int32_t* bias, int n_batch, int n_input, + int16_t* output) { + PortableApplyLayerNormFloat(input, layer_norm_weights, layer_norm_scale_a, + layer_norm_scale_b, bias, n_batch, n_input, + output); +} + +void ApplySigmoid(const int16_t* input, int32_t n_batch, int32_t n_input, + int16_t* output) { + PortableApplySigmoid(input, n_batch, n_input, output); +} + +void ApplySigmoidFloat(const int16_t* input, int32_t n_batch, int32_t n_input, + int16_t* output) { + PortableApplySigmoidFloat(input, n_batch, n_input, output); +} + +void ApplyTanh(int32_t integer_bits, const int16_t* input, int32_t n_batch, + int32_t n_input, int16_t* output) { + PortableApplyTanh(integer_bits, input, n_batch, n_input, output); +} + +void ApplyTanhFloat(const int16_t* input, int32_t n_batch, int32_t n_input, + int32_t integer_bits, int16_t* output) { + PortableApplyTanhFloat(input, n_batch, n_input, integer_bits, output); +} + +void CwiseMul(const int16_t* input_1, const int16_t* input_2, int n_batch, + int n_input, int shift, int16_t* output) { + PortableCwiseMul(input_1, input_2, n_batch, n_input, shift, output); +} + +void CwiseMul(const int16_t* input_1, const int16_t* input_2, + int32_t multiplier, int32_t shift, int32_t n_batch, + int32_t n_input, int32_t output_zp, int8_t* output) { + PortableCwiseMul(input_1, input_2, multiplier, shift, n_batch, n_input, + output_zp, output); +} + +void CwiseAdd(const int16_t* input_1, const int16_t* input_2, int n_batch, + int n_input, int16_t* output) { + PortableCwiseAdd(input_1, input_2, n_batch, n_input, output); +} + +void CwiseClipping(float* vector, const int v_size, + const float clipping_value) { + PortableCwiseClipping(vector, v_size, clipping_value); +} + +void CwiseClipping(int16_t* vector, const int v_size, + const int16_t clipping_value) { + PortableCwiseClipping(vector, v_size, clipping_value); +} + +void CwiseClipping(int8_t* vector, const int v_size, + const int8_t clipping_value) { + PortableCwiseClipping(vector, v_size, clipping_value); +} + +void VectorBatchVectorCwiseProductAccumulate(const int16_t* vector, int v_size, + const int16_t* batch_vector, + int n_batch, int32_t multiplier, + int shift, int16_t* result) { + PortableVectorBatchVectorCwiseProductAccumulate( + vector, v_size, batch_vector, n_batch, multiplier, shift, result); +} + +float VectorVectorDotProduct(const float* vector1, const float* vector2, + int v_size) { + return PortableVectorVectorDotProduct(vector1, vector2, v_size); +} + +void BatchVectorBatchVectorDotProduct(const int16_t* vector1, + const int16_t* vector2, int v_size, + int n_batch, int32_t* result) { + PortableBatchVectorBatchVectorDotProduct(vector1, vector2, v_size, n_batch, + result); +} + +void Sub1Vector(const float* vector, int v_size, float* result) { + PortableSub1Vector(vector, v_size, result); +} + +void Sub1Vector(const int16_t* vector, int v_size, int16_t* result) { + PortableSub1Vector(vector, v_size, result); +} + +// Multiply all elements of vector with a scalar. +void VectorScalarMultiply(const int8_t* vector, int v_size, float scale, + float* result) { + PortableVectorScalarMultiply(vector, v_size, scale, result); +} + +void ReductionSumVector(const float* input_vector, float* output_vector, + int output_size, int reduction_size) { + PortableReductionSumVector(input_vector, output_vector, output_size, + reduction_size); +} + +void ReductionSumVector(const int32_t* input_vector, int32_t* output_vector, + int output_size, int reduction_size) { + PortableReductionSumVector(input_vector, output_vector, output_size, + reduction_size); +} + +void ReductionSumVector(const int8_t* input_vector, int32_t* output_vector, + int output_size, int reduction_size) { + PortableReductionSumVector(input_vector, output_vector, output_size, + reduction_size); +} + +void MeanStddevNormalization(const float* input_vector, float* output_vector, + int v_size, int n_batch) { + PortableMeanStddevNormalization(input_vector, output_vector, v_size, n_batch); +} + +void TwoGateSaturatingAdd(const int8_t* input, int8_t input_zp, + const int8_t* recurrent, int8_t recurrent_zp, + int32_t input_effective_scale_a, + int32_t input_effective_scale_b, + int32_t recurrent_effective_scale_a, + int32_t recurrent_effective_scale_b, int32_t n_batch, + int32_t n_cell, int16_t* output) { + PortableTwoGateSaturatingAdd( + input, input_zp, recurrent, recurrent_zp, input_effective_scale_a, + input_effective_scale_b, recurrent_effective_scale_a, + recurrent_effective_scale_b, n_batch, n_cell, output); +} + +} // namespace tensor_utils +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_PORTABLE_TENSOR_UTILS_H_ diff --git a/tensorflow/lite/kernels/internal/reference/portable_tensor_utils_impl.h b/tensorflow/lite/kernels/internal/reference/portable_tensor_utils_impl.h new file mode 100644 index 0000000..6c404d5 --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/portable_tensor_utils_impl.h @@ -0,0 +1,244 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_PORTABLE_TENSOR_UTILS_IMPL_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_PORTABLE_TENSOR_UTILS_IMPL_H_ + +#include +#include + +#if defined(_MSC_VER) +#define __restrict__ __restrict +#endif + +namespace tflite { + +// Not all backends support CpuBackendContext usage, so forward declare to avoid +// pulling in its implementation. +class CpuBackendContext; + +namespace tensor_utils { + +template +bool PortableIsZeroVector(const T* vector, int v_size) { + for (int i = 0; i < v_size; ++i) { + if (vector[i] != 0) { + return false; + } + } + return true; +} + +void PortableSymmetricQuantizeFloats(const float* values, const int size, + int8_t* quantized_values, float* min_value, + float* max_value, float* scaling_factor); + +void PortableSymmetricQuantizeFloats(const float* values, const int size, + int8_t* quantized_values, float min_value, + float max_value, float* scaling_factor); + +void PortableAsymmetricQuantizeFloats(const float* values, const int size, + int8_t* quantized_values, + float* scaling_factor, int32_t* offset); + +// Multiply a matrix by a batch vector, and store results in a batch-size +// vector. +void PortableMatrixBatchVectorMultiplyAccumulate(const float* matrix, + int m_rows, int m_cols, + const float* vector, + int n_batch, float* result); + +void PortableMatrixBatchVectorMultiplyAccumulate( + const int8_t* __restrict__ matrix, const int m_rows, const int m_cols, + const int8_t* __restrict__ vectors, const float* scaling_factors, + int n_batch, float* __restrict__ result); + +void PortableMatrixBatchVectorMultiplyAccumulate( + const int8_t* __restrict__ matrix, const int m_rows, const int m_cols, + const int8_t* __restrict__ vectors, const float* scaling_factors, + int n_batch, float* __restrict__ result, const float* per_channel_scale, + const int32_t* input_offset, int32_t* scratch, int32_t* row_sums, + bool* compute_row_sums, CpuBackendContext* context); + +void PortableMatrixBatchVectorMultiplyAccumulate( + const int8_t* __restrict__ matrix, const int m_rows, const int m_cols, + const int8_t* __restrict__ vector, const float* scaling_factors, + int n_batch, int32_t* scratch, float* __restrict__ result, + CpuBackendContext* context); + +void PortableSparseMatrixBatchVectorMultiplyAccumulate1x4( + const float* __restrict__ matrix, const int32_t* __restrict__ segments, + const int32_t* __restrict__ indices, int m_rows, int m_cols, + const float* __restrict__ vector, int n_batch, float* __restrict__ result); + +void PortableSparseMatrixBatchVectorMultiplyAccumulate( + const float* __restrict__ matrix, const uint8_t* __restrict__ ledger, + int m_rows, int m_cols, const float* __restrict__ vector, int n_batch, + float* __restrict__ result); + +void PortableSparseMatrixBatchVectorMultiplyAccumulate1x16( + const int8_t* __restrict__ matrix, const int32_t* __restrict__ segments, + const int32_t* __restrict__ indices, int m_rows, int m_cols, + const int8_t* __restrict__ vector, const int32_t* __restrict__ bias_vector, + int n_batch, const int32_t input_offset, const int32_t output_multiplier, + const int32_t output_shift, const int32_t output_offset, + const int32_t output_activation_min, const int32_t output_activation_max, + int8_t* __restrict__ result); + +void PortableSparseMatrixBatchVectorMultiplyAccumulate( + const int8_t* __restrict__ matrix, const uint8_t* ledger, const int m_rows, + const int m_cols, const int8_t* __restrict__ vectors, + const float* scaling_factors, int n_batch, float* __restrict__ result); + +// Dot product of two vectors. +float PortableVectorVectorDotProduct(const float* vector1, const float* vector2, + int v_size); + +void PortableBatchVectorBatchVectorDotProduct(const int16_t* vector1, + const int16_t* vector2, + int v_size, int n_batch, + int32_t* result); + +void PortableVectorBatchVectorCwiseProductAccumulate( + const int16_t* vector, int v_size, const int16_t* batch_vector, int n_batch, + int32_t multiplier, int shift, int16_t* result); + +void PortableMatrixBatchVectorMultiplyAccumulate( + const int8_t* input, const int32_t* bias, + const int8_t* input_to_gate_weights, int32_t multiplier, int32_t shift, + int32_t n_batch, int32_t n_input, int32_t n_output, int32_t output_zp, + int32_t* scratch, int16_t* output, CpuBackendContext* context); + +void PortableMatrixBatchVectorMultiplyAccumulate( + const int8_t* input, const int32_t* bias, + const int8_t* input_to_gate_weights, int32_t multiplier, int32_t shift, + int32_t n_batch, int32_t n_input, int32_t n_output, int32_t output_zp, + int32_t* scratch, int8_t* output, CpuBackendContext* context); + +void PortableMatrixBatchVectorMultiply(const int8_t* input, + int32_t input_zeropoint, + const int8_t* input_to_gate_weights, + int32_t input_to_gate_effective_scale_a, + int32_t input_to_gate_effective_scale_b, + int32_t n_batch, int32_t n_input, + int32_t n_cell, int8_t* gate_output, + int8_t gate_output_zp); + +void PortableMatrixBatchVectorMultiply( + const int16_t* hidden, const int8_t* hidden_to_output_weights, + int32_t proj_effective_scale_a, int32_t proj_effective_scale_b, + const int32_t* gate_bias, int32_t n_batch, int32_t n_hidden, + int32_t n_output, int32_t output_zp, int8_t* proj_output); + +void PortableMatrixScalarMultiplyAccumulate(const int8_t* matrix, + int32_t scalar, int32_t n_row, + int32_t n_col, int32_t* output); + +void PortableApplyLayerNorm(const int16_t* input, + const int16_t* layer_norm_weights, + const int32_t* bias, int32_t layer_norm_scale_a, + int32_t layer_norm_scale_b, int32_t variance_limit, + int n_batch, int n_input, int16_t* output); + +void PortableApplyLayerNormFloat(const int16_t* input, + const int16_t* layer_norm_weights, + int32_t layer_norm_scale_a, + int32_t layer_norm_scale_b, + const int32_t* bias, int n_batch, int n_input, + int16_t* output); + +void PortableApplySigmoid(const int16_t* input, int32_t n_batch, + int32_t n_input, int16_t* output); + +void PortableApplySigmoidFloat(const int16_t* input, int32_t n_batch, + int32_t n_input, int16_t* output); + +void PortableApplyTanh(int32_t integer_bits, const int16_t* input, + int32_t n_batch, int32_t n_input, int16_t* output); + +void PortableApplyTanhFloat(const int16_t* input, int32_t n_batch, + int32_t n_input, int32_t integer_bits, + int16_t* output); + +void PortableCwiseMul(const int16_t* input_1, const int16_t* input_2, + int n_batch, int n_input, int shift, int16_t* output); + +void PortableCwiseMul(const int16_t* input_1, const int16_t* input_2, + int32_t multiplier, int32_t shift, int32_t n_batch, + int32_t n_input, int32_t output_zp, int8_t* output); + +void PortableCwiseAdd(const int16_t* input_1, const int16_t* input_2, + int n_batch, int n_input, int16_t* output); + +template +void PortableCwiseClipping(T* vector, const int v_size, + const T& clipping_value) { + for (int i = 0; i < v_size; i++) { + vector[i] = std::max(std::min(clipping_value, vector[i]), + static_cast(-clipping_value)); + } +} + +// Batch vector initialization with another vector. +void PortableVectorBatchVectorAssign(const float* vector, int v_size, + int n_batch, float* batch_vector); + +// Compute "1.0f - elements of vector" (used in CIFG). +void PortableSub1Vector(const float* vector, int v_size, float* result); + +void PortableSub1Vector(const int16_t* vector, int v_size, int16_t* result); + +// Multiply all elements of vector with a scalar. +void PortableVectorScalarMultiply(const int8_t* vector, int v_size, float scale, + float* result); + +// Reduce-sum on a vector: +// input_vector: pointer to input vector. +// output_vector: pointer to vector. +// output_size: output vector size. +// reduction_size: number of consecutive elements from input vector which are +// added to get one element of output. +template +void PortableReductionSumVector(const INPUT* input_vector, + OUTPUT* output_vector, int output_size, + int reduction_size) { + for (int o = 0; o < output_size; o++) { + OUTPUT result = 0; + for (int r = 0; r < reduction_size; r++) { + result += input_vector[r]; + } + output_vector[o] = result; + input_vector += reduction_size; + } +} + +// Layer norm for each batch. +void PortableMeanStddevNormalization(const float* __restrict__ input_vector, + float* __restrict__ output_vector, + int v_size, int n_batch); + +// Saturate Add. +void PortableTwoGateSaturatingAdd(const int8_t* input, int8_t input_zp, + const int8_t* recurrent, int8_t recurrent_zp, + int32_t input_effective_scale_a, + int32_t input_effective_scale_b, + int32_t recurrent_effective_scale_a, + int32_t recurrent_effective_scale_b, + int32_t n_batch, int32_t n_cell, + int16_t* output); + +} // namespace tensor_utils +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_PORTABLE_TENSOR_UTILS_IMPL_H_ diff --git a/tensorflow/lite/kernels/internal/reference/prelu.h b/tensorflow/lite/kernels/internal/reference/prelu.h new file mode 100644 index 0000000..aa9901d --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/prelu.h @@ -0,0 +1,111 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_PRELU_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_PRELU_H_ + +#include + +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { + +namespace reference_ops { + +// Broadcast prelu to output_shape for quantized uint8_t/int8_t data. +template +inline void BroadcastPrelu4DSlow( + const PreluParams& params, const RuntimeShape& input_shape, + const T* input_data, const RuntimeShape& alpha_shape, const T* alpha_data, + const RuntimeShape& output_shape, T* output_data) { + TFLITE_DCHECK_LE(input_shape.DimensionsCount(), 4); + TFLITE_DCHECK_LE(alpha_shape.DimensionsCount(), 4); + TFLITE_DCHECK_LE(output_shape.DimensionsCount(), 4); + const RuntimeShape extended_output_shape = + RuntimeShape::ExtendedShape(4, output_shape); + NdArrayDesc<4> desc1; + NdArrayDesc<4> desc2; + NdArrayDescsForElementwiseBroadcast(input_shape, alpha_shape, &desc1, &desc2); + + for (int b = 0; b < extended_output_shape.Dims(0); ++b) { + for (int y = 0; y < extended_output_shape.Dims(1); ++y) { + for (int x = 0; x < extended_output_shape.Dims(2); ++x) { + for (int c = 0; c < extended_output_shape.Dims(3); ++c) { + int output_index = Offset(extended_output_shape, b, y, x, c); + int input_index = SubscriptToIndex(desc1, b, y, x, c); + const int32_t input_value = + params.input_offset + input_data[input_index]; + int32_t output_value; + if (input_value >= 0) { + output_value = MultiplyByQuantizedMultiplier( + input_value, params.output_multiplier_1, params.output_shift_1); + } else { + auto alpha_index = SubscriptToIndex(desc2, b, y, x, c); + const int32_t alpha_value = + params.alpha_offset + alpha_data[alpha_index]; + + output_value = MultiplyByQuantizedMultiplier( + input_value * alpha_value, params.output_multiplier_2, + params.output_shift_2); + } + output_value += params.output_offset; + + const int32_t quantized_min = std::numeric_limits::min(); + const int32_t quantized_max = std::numeric_limits::max(); + const int32_t clamped_output = + std::min(quantized_max, std::max(quantized_min, output_value)); + output_data[output_index] = static_cast(clamped_output); + } + } + } + } +} + +template +inline void Prelu(const PreluParams& params, const RuntimeShape& input_shape, + const T* input_data, const RuntimeShape& alpha_shape, + const T* alpha_data, const RuntimeShape& output_shape, + T* output_data) { + const int32_t quantized_min = std::numeric_limits::min(); + const int32_t quantized_max = std::numeric_limits::max(); + + const int flat_size = + MatchingElementsSize(input_shape, alpha_shape, output_shape); + for (int i = 0; i < flat_size; ++i) { + const int32_t input_value = params.input_offset + input_data[i]; + int32_t output_value; + if (input_value >= 0) { + output_value = MultiplyByQuantizedMultiplier( + input_value, params.output_multiplier_1, params.output_shift_1); + } else { + const int32_t alpha_value = params.alpha_offset + alpha_data[i]; + + output_value = MultiplyByQuantizedMultiplier(input_value * alpha_value, + params.output_multiplier_2, + params.output_shift_2); + } + output_value += params.output_offset; + + const int32_t clamped_output = + std::min(quantized_max, std::max(quantized_min, output_value)); + output_data[i] = static_cast(clamped_output); + } +} + +} // namespace reference_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_PRELU_H_ diff --git a/tensorflow/lite/kernels/internal/reference/process_broadcast_shapes.h b/tensorflow/lite/kernels/internal/reference/process_broadcast_shapes.h new file mode 100644 index 0000000..bda2769 --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/process_broadcast_shapes.h @@ -0,0 +1,140 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_PROCESS_BROADCAST_SHAPES_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_PROCESS_BROADCAST_SHAPES_H_ + +#include + +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { + +namespace reference_ops { + +// Consolidates dimensions in broadcast inputs, checks for five-fold pattern. +// +// For example, if sequence of dimensions of one input is +// ..., 1, 3, 1, 7, 9, 5,... and the other is ..., 2, 3, 1, 7, 1, 1, ... +// we can consolidate these as +// ..., 1, 3*7, 9*5, ... and 2, 3*7, 1. +// +// The category is updated in the less-frequent case of shapes that are +// not suited to a fivefold-loop broadcast. +// +// Falls back to generic pattern when it does not know how to process properly. +// +// Returns true iff there is some sort of broadcast, which includes five-fold +// patterns and falling back to generic broadcast. +inline bool ProcessBroadcastShapes(const RuntimeShape& shape0, + const RuntimeShape& shape1, + tflite::ArithmeticParams* params) { + const int dims_count = + std::max(shape0.DimensionsCount(), shape1.DimensionsCount()); + + params->broadcast_category = BroadcastableOpCategory::kGenericBroadcast; + RuntimeShape scalar_shape(dims_count, 1); + + auto extended_shape0 = RuntimeShape::ExtendedShape(dims_count, shape0); + auto extended_shape1 = RuntimeShape::ExtendedShape(dims_count, shape1); + + // Check for "exact" match, implicitly accepting any scalar shapes. + if (extended_shape0 == extended_shape1) { + params->broadcast_category = BroadcastableOpCategory::kNonBroadcast; + return false; + } + + for (int i = dims_count - 1; i >= 0; --i) { + if (extended_shape0.Dims(i) == extended_shape1.Dims(i)) { + continue; + } else if (extended_shape0.Dims(i) == 1) { + params->broadcast_category = + BroadcastableOpCategory::kFirstInputBroadcastsFast; + break; + } else if (extended_shape1.Dims(i) == 1) { + params->broadcast_category = + BroadcastableOpCategory::kSecondInputBroadcastsFast; + break; + } else { + // This case is erroneous: there is a dimension that does not match and + // is not a broadcast from one shape to the other. + params->broadcast_category = BroadcastableOpCategory::kGenericBroadcast; + return true; + } + } + + if (params->broadcast_category != + BroadcastableOpCategory::kFirstInputBroadcastsFast && + params->broadcast_category != + BroadcastableOpCategory::kSecondInputBroadcastsFast) { + // This is unreachable because at least one else clause in the above loop + // must be reached. + TFLITE_DCHECK(false); + params->broadcast_category = BroadcastableOpCategory::kNonBroadcast; + return false; + } + + // From this point it is assumed contractually that corresponding dimensions + // in shape0 and shape1 are either (a) equal or (b) one or other equals 1. + const bool swap_inputs = params->broadcast_category == + BroadcastableOpCategory::kSecondInputBroadcastsFast; + const RuntimeShape* shape_a = + swap_inputs ? &extended_shape1 : &extended_shape0; + const RuntimeShape* shape_b = + swap_inputs ? &extended_shape0 : &extended_shape1; + + int i = dims_count - 1; + params->broadcast_shape[0] = 1; + params->broadcast_shape[1] = 1; + params->broadcast_shape[2] = 1; + params->broadcast_shape[3] = 1; + params->broadcast_shape[4] = 1; + // y_0 is greedy: include dims if both or neither equal 1: in other words, + // test for equality rather than (shape_a->Dims(i) != 1). + while (i >= 0 && shape_a->Dims(i) == shape_b->Dims(i)) { + params->broadcast_shape[4] *= shape_b->Dims(i); + --i; + } + // Here either input_a or input_b has dim of 1 (if i >= 0). If it is input_b + // that has the unit dimension, the next two loops are not entered. + while (i >= 0 && shape_a->Dims(i) == 1) { + params->broadcast_shape[3] *= shape_b->Dims(i); + --i; + } + while (i >= 0 && shape_a->Dims(i) == shape_b->Dims(i)) { + params->broadcast_shape[2] *= shape_a->Dims(i); + --i; + } + // Here either input_a or input_b has dim of 1 (if i >= 0). + while (i >= 0 && shape_b->Dims(i) == 1) { + params->broadcast_shape[1] *= shape_a->Dims(i); + --i; + } + while (i >= 0 && shape_a->Dims(i) == shape_b->Dims(i)) { + params->broadcast_shape[0] *= shape_b->Dims(i); + --i; + } + + // Rarer case is when the broadcast dimensions cannot be handled by a fivefold + // loop. + if (i >= 0) { + params->broadcast_category = BroadcastableOpCategory::kGenericBroadcast; + } + return true; +} + +} // namespace reference_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_PROCESS_BROADCAST_SHAPES_H_ diff --git a/tensorflow/lite/kernels/internal/reference/quantize.h b/tensorflow/lite/kernels/internal/reference/quantize.h new file mode 100644 index 0000000..f304b64 --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/quantize.h @@ -0,0 +1,89 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_QUANTIZE_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_QUANTIZE_H_ + +#include +#include +#include + +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/kernels/internal/cppmath.h" +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { + +namespace reference_ops { + +template +inline void AffineQuantize(const tflite::QuantizationParams& op_params, + const RuntimeShape& input_shape, + const InputT* input_data, + const RuntimeShape& output_shape, + OutputT* output_data) { + const int32_t zero_point = op_params.zero_point; + const double scale = op_params.scale; + const int flat_size = MatchingFlatSize(input_shape, output_shape); + static constexpr int32_t min_val = std::numeric_limits::min(); + static constexpr int32_t max_val = std::numeric_limits::max(); + + for (int i = 0; i < flat_size; i++) { + const InputT val = input_data[i]; + int32_t unclamped = + static_cast(TfLiteRound(val / static_cast(scale))) + + zero_point; + int32_t clamped = std::min(std::max(unclamped, min_val), max_val); + output_data[i] = clamped; + } +} + +// Quantizes per-channel. +template +inline void PerChannelQuantize( + const tflite::PerChannelQuantizationParams& op_params, + const RuntimeShape& input_shape, const InputT* input_data, + const RuntimeShape& output_shape, OutputT* output_data) { + // Ensure flat size is same. + MatchingFlatSize(input_shape, output_shape); + + const int32_t* zero_point = op_params.zero_point; + const float* scale = op_params.scale; + const int32_t quantized_dimension = op_params.quantized_dimension; + const int32_t num_dims = input_shape.DimensionsCount(); + const int32_t* dims_data = input_shape.DimsData(); + std::vector current_dim(num_dims, 0); + static constexpr int32_t min_val = std::numeric_limits::min(); + static constexpr int32_t max_val = std::numeric_limits::max(); + + do { + size_t offset = + ReducedOutputOffset(num_dims, reinterpret_cast(dims_data), + current_dim.data(), 0, nullptr); + const InputT val = input_data[offset]; + const int channel = current_dim[quantized_dimension]; + int32_t unclamped = static_cast(TfLiteRound( + val / static_cast(scale[channel]))) + + zero_point[channel]; + int32_t clamped = std::min(std::max(unclamped, min_val), max_val); + output_data[offset] = static_cast(clamped); + } while (NextIndex(num_dims, reinterpret_cast(dims_data), + current_dim.data())); +} + +} // namespace reference_ops + +} // namespace tflite +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_QUANTIZE_H_ diff --git a/tensorflow/lite/kernels/internal/reference/reduce.h b/tensorflow/lite/kernels/internal/reference/reduce.h new file mode 100644 index 0000000..5b795ea --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/reduce.h @@ -0,0 +1,491 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_REDUCE_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_REDUCE_H_ + +#include + +#include "ruy/profiler/instrumentation.h" // from @ruy +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/cppmath.h" +#include "tensorflow/lite/kernels/internal/max.h" +#include "tensorflow/lite/kernels/internal/min.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/types.h" + +// Check if the reduction at index is the first one along the dimensions given +// in axis. +inline bool IsFirstReduction(const int* index, const int num_axis, + const int* axis) { + if (num_axis == 0) { + return true; + } + + TFLITE_DCHECK(index != nullptr); + TFLITE_DCHECK(axis != nullptr); + for (int axis_idx = 0; axis_idx < num_axis; ++axis_idx) { + if (index[axis[axis_idx]] != 0) { + return false; + } + } + + return true; +} + +namespace tflite { + +namespace reference_ops { + +// A generic reduce method that can be used for reduce_sum, reduce_mean, etc. +// This method iterates through input data and reduce elements along the +// dimensions given in axis. +template +inline bool Reduce(const In* input_data, const int* input_dims, + const int* output_dims, const int input_num_dims, + const int output_num_dims, const int* axis, + const int num_axis, int* input_iter, + Out reducer(Out current, const In in), Out* output_data) { + // Reset input iterator. + for (int idx = 0; idx < input_num_dims; ++idx) { + input_iter[idx] = 0; + } + // Iterate through input_data. + do { + size_t input_offset = + ReducedOutputOffset(input_num_dims, input_dims, input_iter, 0, nullptr); + size_t output_offset = ReducedOutputOffset(input_num_dims, input_dims, + input_iter, num_axis, axis); + output_data[output_offset] = + reducer(output_data[output_offset], input_data[input_offset]); + } while (NextIndex(input_num_dims, input_dims, input_iter)); + return true; +} + +// Similar to above Reduce function but takes two reducer functions. +// The 'reducer_first' is called with the first value of the reduction, +// 'reducer_next' is then called for all the others. +template +inline bool Reduce(const In* input_data, const int* input_dims, + const int* output_dims, const int input_num_dims, + const int output_num_dims, const int* axis, + const int num_axis, int* input_iter, + const std::function& reducer_first, + const std::function& reducer_next, + Out* output_data) { + // Reset input iterator. + for (int idx = 0; idx < input_num_dims; ++idx) { + input_iter[idx] = 0; + } + // Iterate through input_data. + do { + size_t input_offset = + ReducedOutputOffset(input_num_dims, input_dims, input_iter, 0, nullptr); + size_t output_offset = ReducedOutputOffset(input_num_dims, input_dims, + input_iter, num_axis, axis); + if (IsFirstReduction(input_iter, num_axis, axis)) { + output_data[output_offset] = reducer_first(input_data[input_offset]); + } else { + output_data[output_offset] = + reducer_next(output_data[output_offset], input_data[input_offset]); + } + } while (NextIndex(input_num_dims, input_dims, input_iter)); + return true; +} + +// This method parses the input 'axis' to remove duplicates and handle negative +// values, and returns a valid 'out_axis' +inline bool ResolveAxis(const int num_dims, const int* axis, + const int64_t num_axis, int* out_axis, + int* out_num_axis) { + *out_num_axis = 0; // Just in case. + // Short-circuit axis resolution for scalars; the axis will go unused. + if (num_dims == 0) { + return true; + } + // o(n^2) is fine since out_num_axis should be really small, mostly <= 4 + for (int64_t idx = 0; idx < num_axis; ++idx) { + // Handle negative index. A positive index 'p_idx' can be represented as a + // negative index 'n_idx' as: n_idx = p_idx-num_dims + // eg: For num_dims=3, [0, 1, 2] is the same as [-3, -2, -1] */ + int current = axis[idx] < 0 ? (axis[idx] + num_dims) : axis[idx]; + TFLITE_DCHECK(current >= 0 && current < num_dims); + if (current < 0 || current >= num_dims) { + return false; + } + bool is_dup = false; + for (int j = 0; j < *out_num_axis; ++j) { + if (out_axis[j] == current) { + is_dup = true; + break; + } + } + if (!is_dup) { + out_axis[*out_num_axis] = current; + *out_num_axis += 1; + } + } + return true; +} + +// This method expects that output_data has been initialized. +template +inline bool ReduceSumImpl(const In* input_data, const int* input_dims, + const int* output_dims, const int input_num_dims, + const int output_num_dims, const int* axis, + const int num_axis, int* input_iter, + Out* output_data) { + auto reducer = [](const Out current, const In in) -> Out { + const Out actual_in = static_cast(in); + return current + actual_in; + }; + return Reduce(input_data, input_dims, output_dims, input_num_dims, + output_num_dims, axis, num_axis, input_iter, reducer, + output_data); +} + +template +inline bool InitTensorDataForReduce(const int* dims, const int num_dims, + const T init_value, T* data) { + size_t num_elements = 1; + for (int idx = 0; idx < num_dims; ++idx) { + size_t current = static_cast(dims[idx]); + // Overflow prevention. + if (current > 0 && + num_elements > std::numeric_limits::max() / current) { + return false; + } + num_elements *= current; + } + for (size_t idx = 0; idx < num_elements; ++idx) { + data[idx] = init_value; + } + return true; +} + +// Computes the generic value (i.e., sum/max/min/prod) of elements across +// dimensions given in axis. It needs to pass in init_value and reducer. +template +inline bool ReduceGeneric(const T* input_data, const int* input_dims, + const int input_num_dims, T* output_data, + const int* output_dims, const int output_num_dims, + const int* axis, const int64_t num_axis_dimensions, + bool keep_dims, int* temp_index, int* resolved_axis, + T init_value, + T reducer(const T current, const T in)) { + // Reset output data. + if (!InitTensorDataForReduce(output_dims, output_num_dims, init_value, + output_data)) { + return false; + } + + // Return early when input shape has zero dim. This is done after initializing + // data for output tensor because there are cases that the input tensor is + // empty but output tensor is not. In that case, output tensor should be + // filled with init_value. + for (int i = 0; i < input_num_dims; ++i) { + if (input_dims[i] == 0) return true; + } + + // Resolve axis. + int num_resolved_axis = 0; + if (!ResolveAxis(input_num_dims, axis, num_axis_dimensions, resolved_axis, + &num_resolved_axis)) { + return false; + } + + return Reduce(input_data, input_dims, output_dims, input_num_dims, + output_num_dims, resolved_axis, num_resolved_axis, + temp_index, reducer, output_data); +} + +// Computes the mean of elements across dimensions given in axis. +// It does so in two stages, first calculates the sum of elements along the axis +// then divides it by the number of element in axis. +template +inline bool Mean(const T* input_data, const int* input_dims, + const int input_num_dims, T* output_data, + const int* output_dims, const int output_num_dims, + const int* axis, const int num_axis_dimensions, bool keep_dims, + int* temp_index, int* resolved_axis, U* temp_sum) { + ruy::profiler::ScopeLabel label("Mean"); + // Reset output data. + size_t num_outputs = 1; + for (int idx = 0; idx < output_num_dims; ++idx) { + size_t current = static_cast(output_dims[idx]); + // Overflow prevention. + if (num_outputs > std::numeric_limits::max() / current) { + return false; + } + num_outputs *= current; + } + for (size_t idx = 0; idx < num_outputs; ++idx) { + output_data[idx] = T(); + temp_sum[idx] = U(); + } + + // Resolve axis. + int num_resolved_axis = 0; + if (!ResolveAxis(input_num_dims, axis, num_axis_dimensions, resolved_axis, + &num_resolved_axis)) { + return false; + } + + if (!ReduceSumImpl(input_data, input_dims, output_dims, input_num_dims, + output_num_dims, resolved_axis, num_resolved_axis, + temp_index, temp_sum)) { + return false; + } + + // Calculate mean by dividing output_data by num of aggregated element. + size_t num_elements_in_axis = 1; + for (int idx = 0; idx < num_resolved_axis; ++idx) { + size_t current = static_cast(input_dims[resolved_axis[idx]]); + // Overflow prevention. + if (current > (std::numeric_limits::max() / num_elements_in_axis)) { + return false; + } + num_elements_in_axis *= current; + } + + if (num_elements_in_axis > 0) { + for (size_t idx = 0; idx < num_outputs; ++idx) { + output_data[idx] = + static_cast(temp_sum[idx] / static_cast(num_elements_in_axis)); + } + } + return true; +} + +inline void Mean(const tflite::MeanParams& op_params, + const RuntimeShape& unextended_input_shape, + const float* input_data, + const RuntimeShape& unextended_output_shape, + float* output_data) { + ruy::profiler::ScopeLabel label("Mean4D"); + + // Current implementation only supports dimension equals 4 and simultaneous + // reduction over width and height. + TFLITE_CHECK_EQ(unextended_input_shape.DimensionsCount(), 4); + TFLITE_CHECK_LE(unextended_output_shape.DimensionsCount(), 4); + const RuntimeShape input_shape = + RuntimeShape::ExtendedShape(4, unextended_input_shape); + const RuntimeShape output_shape = + RuntimeShape::ExtendedShape(4, unextended_output_shape); + + const int output_batch = output_shape.Dims(0); + const int output_height = output_shape.Dims(1); + const int output_width = output_shape.Dims(2); + const int output_depth = output_shape.Dims(3); + + const int input_height = input_shape.Dims(1); + const int input_width = input_shape.Dims(2); + + TFLITE_CHECK_EQ(op_params.axis_count, 2); + TFLITE_CHECK((op_params.axis[0] == 1 && op_params.axis[1] == 2) || + (op_params.axis[0] == 2 && op_params.axis[1] == 1)); + TFLITE_CHECK_EQ(output_height, 1); + TFLITE_CHECK_EQ(output_width, 1); + + for (int out_b = 0; out_b < output_batch; ++out_b) { + for (int out_d = 0; out_d < output_depth; ++out_d) { + float value = 0; + for (int in_h = 0; in_h < input_height; ++in_h) { + for (int in_w = 0; in_w < input_width; ++in_w) { + value += input_data[Offset(input_shape, out_b, in_h, in_w, out_d)]; + } + } + output_data[Offset(output_shape, out_b, 0, 0, out_d)] = + value / (input_width * input_height); + } + } +} + +// Computes the mean of elements across dimensions given in axis. +// It does so in two stages, first calculates the sum of elements along the axis +// then divides it by the number of element in axis for quantized values. +template +inline bool QuantizedMeanOrSum(const T* input_data, int32_t input_zero_point, + const int* input_dims, const int input_num_dims, + T* output_data, int32_t output_multiplier, + int output_shift, int32_t output_zero_point, + const int* output_dims, + const int output_num_dims, const int* axis, + const int num_axis_dimensions, bool keep_dims, + int* temp_index, int* resolved_axis, U* temp_sum, + bool compute_sum) { + const int32_t kMinValue = std::numeric_limits::min(); + const int32_t kMaxValue = std::numeric_limits::max(); + const bool uint8_case = std::is_same::value; + const bool int16_case = std::is_same::value; + if (uint8_case) { + ruy::profiler::ScopeLabel label(compute_sum ? "Sum/Uint8" : "Mean/Uint8"); + } else if (int16_case) { + ruy::profiler::ScopeLabel label(compute_sum ? "Sum/Int16" : "Mean/Int16"); + } else { + ruy::profiler::ScopeLabel label(compute_sum ? "Sum/Int8" : "Mean/Int8"); + } + // Reset output data. + size_t num_outputs = 1; + for (int idx = 0; idx < output_num_dims; ++idx) { + size_t current = static_cast(output_dims[idx]); + // Overflow prevention. + if (num_outputs > std::numeric_limits::max() / current) { + return false; + } + num_outputs *= current; + } + for (size_t idx = 0; idx < num_outputs; ++idx) { + output_data[idx] = T(); + temp_sum[idx] = U(); + } + + // Return early when input shape has zero dim. This is done after initializing + // data for output tensor because there are cases that the input tensor is + // empty but output tensor is not. In that case, output tensor should be + // filled with init_value. + for (int i = 0; i < input_num_dims; ++i) { + if (input_dims[i] == 0) return true; + } + + // Resolve axis. + int num_resolved_axis = 0; + if (!ResolveAxis(input_num_dims, axis, num_axis_dimensions, resolved_axis, + &num_resolved_axis)) { + return false; + } + + if (!ReduceSumImpl(input_data, input_dims, output_dims, input_num_dims, + output_num_dims, resolved_axis, num_resolved_axis, + temp_index, temp_sum)) { + return false; + } + + // Calculate mean by dividing output_data by num of aggregated element. + int64_t num_elements_in_axis = 1; + for (int idx = 0; idx < num_resolved_axis; ++idx) { + size_t current = static_cast(input_dims[resolved_axis[idx]]); + // Overflow prevention. + if (current > static_cast(std::numeric_limits::max() / + num_elements_in_axis)) { + return false; + } + num_elements_in_axis *= current; + } + + if (num_elements_in_axis == 0) { + return true; + } + + // Readapt output rescaling when calculating the mean to integrate a + // 1/num_elements_in_axis multiplier. + if (!compute_sum) { + TFLITE_DCHECK_GE(num_elements_in_axis, 0); + int shift = + 63 - CountLeadingZeros(static_cast(num_elements_in_axis)); + // To avoid any overflow risk 'shift' should be <= 32 and to satisfy + // 'MultiplyByQuantizedMultiplier' pre-conditions 'output_shift - shift' + // should be >= -31. Clamp the value at the price of some precision loss. + shift = std::min(shift, 32); + shift = std::min(shift, 31 + output_shift); + output_multiplier = static_cast( + (static_cast(output_multiplier) << shift) / + num_elements_in_axis); + output_shift = output_shift - shift; + } + + for (size_t idx = 0; idx < num_outputs; ++idx) { + const U shifted_sum = + static_cast(temp_sum[idx] - input_zero_point * num_elements_in_axis); + int32_t output = MultiplyByQuantizedMultiplier( + shifted_sum, output_multiplier, output_shift) + + output_zero_point; + output = std::min(std::max(output, kMinValue), kMaxValue); + output_data[idx] = static_cast(output); + } + return true; +} + +template +inline bool QuantizedMeanOrSumExtraArgs( + const T* input_data, int32_t input_zero_point, float input_scale, + const int* input_dims, const int input_num_dims, T* output_data, + float output_scale, int32_t output_multiplier, int output_shift, + int32_t output_zero_point, const int* output_dims, + const int output_num_dims, const int* axis, const int num_axis_dimensions, + bool keep_dims, int* temp_index, int* resolved_axis, U* temp_sum, + bool compute_sum) { + return QuantizedMeanOrSum( + input_data, input_zero_point, input_dims, input_num_dims, output_data, + output_multiplier, output_shift, output_zero_point, output_dims, + output_num_dims, axis, num_axis_dimensions, keep_dims, temp_index, + resolved_axis, temp_sum, compute_sum); +} + +template +inline bool QuantizedReduceProd(const T* input_data, int32_t input_zero_point, + const RuntimeShape& input_shape, T* output_data, + int32_t output_zero_point, + const RuntimeShape& output_shape, + const int* axis, + const int64_t num_axis_dimensions, + bool keep_dims, int* temp_index, + int* resolved_axis, int32_t* temp_prod, + int32_t scaling_multiplier, int scaling_shift) { + const int32_t kMinValue = std::numeric_limits::min(); + const int32_t kMaxValue = std::numeric_limits::max(); + + // Resolve axis. + int num_resolved_axis = 0; + if (!ResolveAxis(input_shape.DimensionsCount(), axis, num_axis_dimensions, + resolved_axis, &num_resolved_axis)) { + return false; + } + + // Calculate the reduced product by rescaling each multiplication step to + // avoid an overflow. + auto reducer_first = [&](T in) -> int32_t { return in - input_zero_point; }; + + auto reducer_next = [&](int32_t current, T in) -> int32_t { + const int64_t result = + static_cast(current) * (in - input_zero_point); + return MultiplyByQuantizedMultiplier(result, scaling_multiplier, + scaling_shift); + }; + + if (!Reduce( + input_data, input_shape.DimsData(), output_shape.DimsData(), + input_shape.DimensionsCount(), output_shape.DimensionsCount(), + resolved_axis, num_resolved_axis, temp_index, reducer_first, + reducer_next, temp_prod)) { + return false; + } + + for (int i = 0; i < output_shape.FlatSize(); i++) { + int32_t result = + MultiplyByQuantizedMultiplier(static_cast(temp_prod[i]), + scaling_multiplier, scaling_shift) + + output_zero_point; + result = std::min(std::max(result, kMinValue), kMaxValue); + output_data[i] = static_cast(result); + } + + return true; +} + +} // namespace reference_ops + +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_REDUCE_H_ diff --git a/tensorflow/lite/kernels/internal/reference/requantize.h b/tensorflow/lite/kernels/internal/reference/requantize.h new file mode 100644 index 0000000..f35f6fc --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/requantize.h @@ -0,0 +1,70 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_REQUANTIZE_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_REQUANTIZE_H_ + +#include + +#include "ruy/profiler/instrumentation.h" // from @ruy +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { +namespace reference_ops { + +template +inline void Requantize(const input_type* input_data, int32_t size, + int32_t effective_scale_multiplier, + int32_t effective_scale_shift, int32_t input_zeropoint, + int32_t output_zeropoint, output_type* output_data) { + ruy::profiler::ScopeLabel label("Requantize"); + const bool same_scale = + (effective_scale_multiplier == 1 << 30 && effective_scale_shift == 1); + if (same_scale) { + const bool mixed_type_int8_uint8 = + std::is_same::value && + std::is_same::value; + const bool mixed_type_uint8_int8 = + std::is_same::value && + std::is_same::value; + const int32_t zero_point_diff = input_zeropoint - output_zeropoint; + // Fast path to do requantization for the case when just a shift of 128 is + // needed. + if ((mixed_type_int8_uint8 && zero_point_diff == -128) || + (mixed_type_uint8_int8 && zero_point_diff == 128)) { + for (int i = 0; i < size; ++i) { + output_data[i] = input_data[i] ^ 0x80; + } + return; + } + } + static constexpr int32_t kMinOutput = std::numeric_limits::min(); + static constexpr int32_t kMaxOutput = std::numeric_limits::max(); + for (int i = 0; i < size; ++i) { + const int32_t input = input_data[i] - input_zeropoint; + const int32_t output = + MultiplyByQuantizedMultiplier(input, effective_scale_multiplier, + effective_scale_shift) + + output_zeropoint; + const int32_t clamped_output = + std::max(std::min(output, kMaxOutput), kMinOutput); + output_data[i] = static_cast(clamped_output); + } +} + +} // namespace reference_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_REQUANTIZE_H_ diff --git a/tensorflow/lite/kernels/internal/reference/resize_bilinear.h b/tensorflow/lite/kernels/internal/reference/resize_bilinear.h new file mode 100644 index 0000000..bf9a88a --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/resize_bilinear.h @@ -0,0 +1,233 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_RESIZE_BILINEAR_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_RESIZE_BILINEAR_H_ + +#include +#include +#include +#include + +#include "tensorflow/lite/kernels/internal/cppmath.h" +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { +namespace reference_ops { + +inline void ComputeInterpolationValues(const float value, const float scale, + const bool half_pixel_centers, + int32_t input_size, float* scaled_value, + int32_t* lower_bound, + int32_t* upper_bound) { + if (half_pixel_centers) { + *scaled_value = (value + 0.5f) * scale - 0.5f; + } else { + *scaled_value = value * scale; + } + float scaled_value_floor = std::floor(*scaled_value); + *lower_bound = std::max(static_cast(scaled_value_floor), + static_cast(0)); + *upper_bound = + std::min(static_cast(std::ceil(*scaled_value)), input_size - 1); +} + +template +inline void ResizeBilinear(const tflite::ResizeBilinearParams& op_params, + const RuntimeShape& unextended_input_shape, + const T* input_data, + const RuntimeShape& unextended_output_size_shape, + const int32_t* output_size_data, + const RuntimeShape& unextended_output_shape, + T* output_data) { + // If half_pixel_centers is True, align_corners must be False. + TFLITE_DCHECK(!op_params.half_pixel_centers || !op_params.align_corners); + TFLITE_DCHECK_LE(unextended_input_shape.DimensionsCount(), 4); + TFLITE_DCHECK_LE(unextended_output_size_shape.DimensionsCount(), 4); + TFLITE_DCHECK_LE(unextended_output_shape.DimensionsCount(), 4); + const RuntimeShape input_shape = + RuntimeShape::ExtendedShape(4, unextended_input_shape); + const RuntimeShape output_size_shape = + RuntimeShape::ExtendedShape(4, unextended_output_size_shape); + const RuntimeShape output_shape = + RuntimeShape::ExtendedShape(4, unextended_output_shape); + + int32_t batches = MatchingDim(input_shape, 0, output_shape, 0); + int32_t input_height = input_shape.Dims(1); + int32_t input_width = input_shape.Dims(2); + int32_t depth = MatchingDim(input_shape, 3, output_shape, 3); + + TFLITE_DCHECK_EQ(output_size_shape.Dims(0), 1); + TFLITE_DCHECK_EQ(output_size_shape.Dims(1), 1); + TFLITE_DCHECK_EQ(output_size_shape.Dims(2), 1); + TFLITE_DCHECK_EQ(output_size_shape.Dims(3), 2); + int32_t output_height = + output_size_data[Offset(output_size_shape, 0, 0, 0, 0)]; + int32_t output_width = + output_size_data[Offset(output_size_shape, 0, 0, 0, 1)]; + + float height_scale = static_cast(input_height) / output_height; + float width_scale = static_cast(input_width) / output_width; + if (op_params.align_corners && output_height > 1) { + height_scale = static_cast(input_height - 1) / (output_height - 1); + } + if (op_params.align_corners && output_width > 1) { + width_scale = static_cast(input_width - 1) / (output_width - 1); + } + const float rounding_offset = std::numeric_limits::is_integer ? .5f : .0f; + + for (int b = 0; b < batches; ++b) { + for (int y = 0; y < output_height; ++y) { + float input_y; + int32_t y0, y1; + ComputeInterpolationValues(y, height_scale, op_params.half_pixel_centers, + input_height, &input_y, &y0, &y1); + for (int x = 0; x < output_width; ++x) { + float input_x; + int32_t x0, x1; + ComputeInterpolationValues(x, width_scale, op_params.half_pixel_centers, + input_width, &input_x, &x0, &x1); + for (int c = 0; c < depth; ++c) { + T interpolation = + static_cast(input_data[Offset(input_shape, b, y0, x0, c)] * + (1 - (input_y - y0)) * (1 - (input_x - x0)) + + input_data[Offset(input_shape, b, y1, x0, c)] * + (input_y - y0) * (1 - (input_x - x0)) + + input_data[Offset(input_shape, b, y0, x1, c)] * + (1 - (input_y - y0)) * (input_x - x0) + + input_data[Offset(input_shape, b, y1, x1, c)] * + (input_y - y0) * (input_x - x0) + + rounding_offset); + output_data[Offset(output_shape, b, y, x, c)] = interpolation; + } + } + } + } +} + +inline void ComputeInterpolationValuesInteger( + const int32_t value, const int32_t scale_10, const bool half_pixel_centers, + int32_t input_size, int32_t* scaled_value, int32_t* lower_bound, + int32_t* upper_bound) { + if (half_pixel_centers) { + *scaled_value = value * scale_10 + scale_10 / 2 - (1 << 9); + } else { + *scaled_value = value * scale_10; + } + constexpr int32_t zero = 0; + *lower_bound = std::max(*scaled_value / (1 << 10), zero); + *upper_bound = + std::min((*scaled_value + (1 << 10) - 1) / (1 << 10), input_size - 1); +} + +// Same as above but doesn't use any floating-point for the resize +template +inline void ResizeBilinearInteger( + const tflite::ResizeBilinearParams& op_params, + const RuntimeShape& unextended_input_shape, const T* input_data, + const RuntimeShape& unextended_output_size_shape, + const int32_t* output_size_data, + const RuntimeShape& unextended_output_shape, T* output_data) { + // If half_pixel_centers is True, align_corners must be False. + TFLITE_DCHECK(!op_params.half_pixel_centers || !op_params.align_corners); + TFLITE_DCHECK_LE(unextended_input_shape.DimensionsCount(), 4); + TFLITE_DCHECK_LE(unextended_output_size_shape.DimensionsCount(), 4); + TFLITE_DCHECK_LE(unextended_output_shape.DimensionsCount(), 4); + const RuntimeShape input_shape = + RuntimeShape::ExtendedShape(4, unextended_input_shape); + const RuntimeShape output_size_shape = + RuntimeShape::ExtendedShape(4, unextended_output_size_shape); + const RuntimeShape output_shape = + RuntimeShape::ExtendedShape(4, unextended_output_shape); + + const int32_t batches = MatchingDim(input_shape, 0, output_shape, 0); + const int32_t input_height = input_shape.Dims(1); + const int32_t input_width = input_shape.Dims(2); + const int32_t depth = MatchingDim(input_shape, 3, output_shape, 3); + + TFLITE_DCHECK_EQ(output_size_shape.Dims(0), 1); + TFLITE_DCHECK_EQ(output_size_shape.Dims(1), 1); + TFLITE_DCHECK_EQ(output_size_shape.Dims(2), 1); + TFLITE_DCHECK_EQ(output_size_shape.Dims(3), 2); + const int32_t output_height = + output_size_data[Offset(output_size_shape, 0, 0, 0, 0)]; + const int32_t output_width = + output_size_data[Offset(output_size_shape, 0, 0, 0, 1)]; + + int32_t height_scale_10 = + ((1 << 10) * input_height + output_height / 2) / output_height; + int32_t width_scale_10 = + ((1 << 10) * input_width + output_width / 2) / output_width; + if (op_params.align_corners && output_height > 1) { + height_scale_10 = + ((1 << 10) * (input_height - 1) + (output_height - 1) / 2) / + (output_height - 1); + } + if (op_params.align_corners && output_width > 1) { + width_scale_10 = ((1 << 10) * (input_width - 1) + (output_width - 1) / 2) / + (output_width - 1); + } + + for (int b = 0; b < batches; ++b) { + for (int y = 0; y < output_height; ++y) { + int32_t input_y, y0, y1; + ComputeInterpolationValuesInteger(y, height_scale_10, + op_params.half_pixel_centers, + input_height, &input_y, &y0, &y1); + for (int x = 0; x < output_width; ++x) { + int32_t input_x, x0, x1; + ComputeInterpolationValuesInteger(x, width_scale_10, + op_params.half_pixel_centers, + input_width, &input_x, &x0, &x1); + for (int c = 0; c < depth; ++c) { + const int64_t output_20_ll = + static_cast( + input_data[Offset(input_shape, b, y0, x0, c)]) * + ((1 << 10) - (input_y - (1 << 10) * y0)) * + ((1 << 10) - (input_x - (1 << 10) * x0)); + const int64_t output_20_lu = + static_cast( + input_data[Offset(input_shape, b, y1, x0, c)]) * + (input_y - (1 << 10) * y0) * + ((1 << 10) - (input_x - (1 << 10) * x0)); + const int64_t output_20_rl = + static_cast( + input_data[Offset(input_shape, b, y0, x1, c)]) * + ((1 << 10) - (input_y - (1 << 10) * y0)) * + (input_x - (1 << 10) * x0); + const int64_t output_20_ru = + static_cast( + input_data[Offset(input_shape, b, y1, x1, c)]) * + (input_y - (1 << 10) * y0) * (input_x - (1 << 10) * x0); + const int64_t output_20 = + output_20_ll + output_20_lu + output_20_rl + output_20_ru; +#if TFLITE_SINGLE_ROUNDING + const int64_t round = 1 << 19; + const T interpolation = static_cast((output_20 + round) >> 20); +#else + const int64_t round = (output_20 > 0) ? (1 << 19) : -(1 << 19); + const T interpolation = + static_cast((output_20 + round) / (1 << 20)); +#endif // TFLITE_SINGLE_ROUNDING + output_data[Offset(output_shape, b, y, x, c)] = interpolation; + } + } + } + } +} + +} // namespace reference_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_RESIZE_BILINEAR_H_ diff --git a/tensorflow/lite/kernels/internal/reference/resize_nearest_neighbor.h b/tensorflow/lite/kernels/internal/reference/resize_nearest_neighbor.h new file mode 100644 index 0000000..bf0b757 --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/resize_nearest_neighbor.h @@ -0,0 +1,102 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_RESIZE_NEAREST_NEIGHBOR_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_RESIZE_NEAREST_NEIGHBOR_H_ + +#include +#include + +#include "tensorflow/lite/kernels/internal/cppmath.h" +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { + +namespace reference_ops { + +inline int32_t GetNearestNeighbor(const int input_value, + const int32_t input_size, + const int32_t output_size, + const bool align_corners, + const bool half_pixel_centers) { + const float scale = + (align_corners && output_size > 1) + ? (input_size - 1) / static_cast(output_size - 1) + : input_size / static_cast(output_size); + const float offset = half_pixel_centers ? 0.5f : 0.0f; + int32_t output_value = std::min( + align_corners + ? static_cast(TfLiteRound((input_value + offset) * scale)) + : static_cast(std::floor((input_value + offset) * scale)), + input_size - 1); + if (half_pixel_centers) { + output_value = std::max(static_cast(0), output_value); + } + return output_value; +} + +template +inline void ResizeNearestNeighbor( + const tflite::ResizeNearestNeighborParams& op_params, + const RuntimeShape& unextended_input_shape, const T* input_data, + const RuntimeShape& output_size_shape, const int32_t* output_size_data, + const RuntimeShape& unextended_output_shape, T* output_data) { + TFLITE_DCHECK_LE(unextended_input_shape.DimensionsCount(), 4); + TFLITE_DCHECK_LE(unextended_output_shape.DimensionsCount(), 4); + + const RuntimeShape input_shape = + RuntimeShape::ExtendedShape(4, unextended_input_shape); + const RuntimeShape output_shape = + RuntimeShape::ExtendedShape(4, unextended_output_shape); + + int32_t batches = MatchingDim(input_shape, 0, output_shape, 0); + int32_t input_height = input_shape.Dims(1); + int32_t input_width = input_shape.Dims(2); + int32_t depth = MatchingDim(input_shape, 3, output_shape, 3); + + // The Tensorflow version of this op allows resize on the width and height + // axis only. + TFLITE_DCHECK_EQ(output_size_shape.FlatSize(), 2); + int32_t output_height = output_size_data[0]; + int32_t output_width = output_size_data[1]; + + const int col_offset = input_shape.Dims(3); + const int row_offset = input_shape.Dims(2) * col_offset; + const int batch_offset = input_shape.Dims(1) * row_offset; + + const T* input_ptr = input_data; + T* output_ptr = output_data; + for (int b = 0; b < batches; ++b) { + for (int y = 0; y < output_height; ++y) { + int32_t in_y = GetNearestNeighbor(y, input_height, output_height, + op_params.align_corners, + op_params.half_pixel_centers); + const T* y_input_ptr = input_ptr + in_y * row_offset; + for (int x = 0; x < output_width; ++x) { + int32_t in_x = GetNearestNeighbor(x, input_width, output_width, + op_params.align_corners, + op_params.half_pixel_centers); + const T* x_input_ptr = y_input_ptr + in_x * col_offset; + memcpy(output_ptr, x_input_ptr, depth * sizeof(T)); + output_ptr += depth; + } + } + input_ptr += batch_offset; + } +} + +} // namespace reference_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_RESIZE_NEAREST_NEIGHBOR_H_ diff --git a/tensorflow/lite/kernels/internal/reference/round.h b/tensorflow/lite/kernels/internal/reference/round.h new file mode 100644 index 0000000..9bd8f3f --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/round.h @@ -0,0 +1,51 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_ROUND_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_ROUND_H_ + +#include + +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { + +namespace reference_ops { + +inline float RoundToNearest(float value) { + auto floor_val = std::floor(value); + auto diff = value - floor_val; + if ((diff < 0.5f) || + ((diff == 0.5f) && (static_cast(floor_val) % 2 == 0))) { + return floor_val; + } else { + return floor_val = floor_val + 1.0f; + } +} + +inline void Round(const RuntimeShape& input_shape, const float* input_data, + const RuntimeShape& output_shape, float* output_data) { + const int flat_size = MatchingFlatSize(input_shape, output_shape); + for (int i = 0; i < flat_size; ++i) { + // Note that this implementation matches that of tensorFlow tf.round + // and corresponds to the bankers rounding method. + // cfenv (for fesetround) is not yet supported universally on Android, so + // using a work around. + output_data[i] = RoundToNearest(input_data[i]); + } +} + +} // namespace reference_ops +} // namespace tflite +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_ROUND_H_ diff --git a/tensorflow/lite/kernels/internal/reference/select.h b/tensorflow/lite/kernels/internal/reference/select.h new file mode 100644 index 0000000..82b6097 --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/select.h @@ -0,0 +1,151 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_SELECT_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_SELECT_H_ + +#include + +#include "ruy/profiler/instrumentation.h" // from @ruy +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { +namespace reference_ops { + +template +void Select(const RuntimeShape& input_condition_shape, + const D* input_condition_data, const RuntimeShape& input_x_shape, + const T* input_x_data, const RuntimeShape& input_y_shape, + const T* input_y_data, const RuntimeShape& output_shape, + T* output_data) { + ruy::profiler::ScopeLabel label("Select"); + int64_t flatsize; + // Allow select operator executions on mixed scalar tensors and one element + // tensors. + if (input_condition_shape.FlatSize() == 1 && input_x_shape.FlatSize() == 1 && + input_y_shape.FlatSize() == 1 && output_shape.FlatSize() == 1) { + flatsize = 1; + } else { + flatsize = MatchingFlatSize(input_condition_shape, input_x_shape, + input_y_shape, output_shape); + } + for (int64_t i = 0; i < flatsize; ++i) { + output_data[i] = + input_condition_data[i] ? input_x_data[i] : input_y_data[i]; + } +} + +template +void RankOneSelect(const RuntimeShape& input_condition_shape, + const D* input_condition_data, + const RuntimeShape& input_x_shape, const T* input_x_data, + const RuntimeShape& input_y_shape, const T* input_y_data, + const RuntimeShape& output_shape, T* output_data) { + ruy::profiler::ScopeLabel label("Select/RankOneSelect"); + const int64_t outer_size = input_condition_shape.FlatSize(); + int64_t inner_size; + if (input_condition_shape.DimensionsCount() == 0) { + inner_size = MatchingFlatSize(input_x_shape, input_y_shape, output_shape); + } else { + TFLITE_DCHECK_EQ( + MatchingDim(input_x_shape, 0, input_y_shape, 0, output_shape, 0), + outer_size); + inner_size = + MatchingFlatSizeSkipDim(input_x_shape, 0, input_y_shape, output_shape); + } + + int64_t offset = 0; + for (int64_t i = 0; i < outer_size; i++) { + const T* input_data = input_condition_data[i] ? input_x_data : input_y_data; + memcpy(output_data + offset, input_data + offset, inner_size * sizeof(T)); + offset += inner_size; + } +} + +template +void BroadcastSelect5DSlow(const RuntimeShape& input_condition_shape, + const D* input_condition_data, + const RuntimeShape& input_x_shape, + const T* input_x_data, + const RuntimeShape& input_y_shape, + const T* input_y_data, + const RuntimeShape& output_shape, T* output_data) { + ruy::profiler::ScopeLabel label("Select/BroadcastSelectSlow"); + TFLITE_DCHECK_LE(input_condition_shape.DimensionsCount(), 5); + TFLITE_DCHECK_LE(input_x_shape.DimensionsCount(), 5); + TFLITE_DCHECK_LE(input_y_shape.DimensionsCount(), 5); + TFLITE_DCHECK_LE(output_shape.DimensionsCount(), 5); + + NdArrayDesc<5> desc_condition; + NdArrayDesc<5> desc_x; + NdArrayDesc<5> desc_y; + NdArrayDesc<5> desc_output; + const RuntimeShape extended_output_shape = + RuntimeShape::ExtendedShape(5, output_shape); + CopyDimsToDesc(extended_output_shape, &desc_output); + NdArrayDescsForElementwiseBroadcast(input_condition_shape, input_x_shape, + input_y_shape, &desc_condition, &desc_x, + &desc_y); + + // In Tensorflow, the dimensions are canonically named (batch_number, row, + // col, channel), with extents (batches, height, width, depth), with the + // trailing dimension changing most rapidly (channels has the smallest + // stride, typically 1 element). + // + // In generated C code, we store arrays with the dimensions reversed. The + // first dimension has smallest stride. + // + // We name our variables by their Tensorflow convention, but generate C code + // nesting loops such that the innermost loop has the smallest stride for + // the best cache behavior. + for (int n = 0; n < desc_output.extents[0]; ++n) { + int out_idx_n = desc_output.extents[1] * n; + int cond_idx_n = desc_condition.strides[0] * n; + int in_idx1_n = desc_x.strides[0] * n; + int in_idx2_n = desc_y.strides[0] * n; + for (int b = 0; b < desc_output.extents[1]; ++b) { + int out_idx_b = (out_idx_n + b) * desc_output.extents[2]; + int cond_idx_b = cond_idx_n + desc_condition.strides[1] * b; + int in_idx1_b = in_idx1_n + desc_x.strides[1] * b; + int in_idx2_b = in_idx2_n + desc_y.strides[1] * b; + for (int y = 0; y < desc_output.extents[2]; ++y) { + int out_idx_y = (out_idx_b + y) * desc_output.extents[3]; + int cond_idx_y = cond_idx_b + desc_condition.strides[2] * y; + int in_idx1_y = in_idx1_b + desc_x.strides[2] * y; + int in_idx2_y = in_idx2_b + desc_y.strides[2] * y; + for (int x = 0; x < desc_output.extents[3]; ++x) { + int out_idx = (out_idx_y + x) * desc_output.extents[4]; + int cond_idx = cond_idx_y + desc_condition.strides[3] * x; + int in_idx1 = in_idx1_y + desc_x.strides[3] * x; + int in_idx2 = in_idx2_y + desc_y.strides[3] * x; + for (int c = 0; c < desc_output.extents[4]; ++c) { + output_data[out_idx] = input_condition_data[cond_idx] + ? input_x_data[in_idx1] + : input_y_data[in_idx2]; + out_idx++; + cond_idx += desc_condition.strides[4]; + in_idx1 += desc_x.strides[4]; + in_idx2 += desc_y.strides[4]; + } + } + } + } + } +} + +} // namespace reference_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_SELECT_H_ diff --git a/tensorflow/lite/kernels/internal/reference/slice.h b/tensorflow/lite/kernels/internal/reference/slice.h new file mode 100644 index 0000000..cb73ea0 --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/slice.h @@ -0,0 +1,80 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_SLICE_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_SLICE_H_ + +#include "tensorflow/lite/kernels/internal/portable_tensor.h" +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { + +namespace reference_ops { + +template +inline void Slice(const tflite::SliceParams& op_params, + const RuntimeShape& input_shape, + const RuntimeShape& output_shape, + SequentialTensorWriter* writer) { + const RuntimeShape ext_shape = RuntimeShape::ExtendedShape(5, input_shape); + TFLITE_DCHECK_LE(op_params.begin_count, 5); + TFLITE_DCHECK_LE(op_params.size_count, 5); + const int begin_count = op_params.begin_count; + const int size_count = op_params.size_count; + // We front-pad the begin and size vectors. + int start[5]; + int stop[5]; + for (int i = 0; i < 5; ++i) { + int padded_i = 5 - i; + start[i] = + begin_count < padded_i ? 0 : op_params.begin[begin_count - padded_i]; + stop[i] = + (size_count < padded_i || op_params.size[size_count - padded_i] == -1) + ? ext_shape.Dims(i) + : start[i] + op_params.size[size_count - padded_i]; + } + + for (int i0 = start[0]; i0 < stop[0]; ++i0) { + for (int i1 = start[1]; i1 < stop[1]; ++i1) { + for (int i2 = start[2]; i2 < stop[2]; ++i2) { + for (int i3 = start[3]; i3 < stop[3]; ++i3) { + for (int i4 = start[4]; i4 < stop[4]; ++i4) { + writer->Write(Offset(ext_shape, i0, i1, i2, i3, i4)); + } + } + } + } + } +} + +template +inline void Slice(const tflite::SliceParams& op_params, + const RuntimeShape& input_shape, const T* input_data, + const RuntimeShape& output_shape, T* output_data) { + SequentialTensorWriter writer(input_data, output_data); + return Slice(op_params, input_shape, output_shape, &writer); +} + +template +inline void Slice(const tflite::SliceParams& op_params, + const RuntimeShape& input_shape, const TfLiteTensor* input, + const RuntimeShape& output_shape, TfLiteTensor* output) { + SequentialTensorWriter writer(input, output); + return Slice(op_params, input_shape, output_shape, &writer); +} + +} // namespace reference_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_SLICE_H_ diff --git a/tensorflow/lite/kernels/internal/reference/softmax.h b/tensorflow/lite/kernels/internal/reference/softmax.h new file mode 100644 index 0000000..c09a7ea --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/softmax.h @@ -0,0 +1,233 @@ +/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_SOFTMAX_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_SOFTMAX_H_ + +#include +#include + +#include "fixedpoint/fixedpoint.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/cppmath.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/types.h" +#include "tensorflow/lite/kernels/op_macros.h" + +namespace tflite { +namespace reference_ops { + +inline void Softmax(const SoftmaxParams& params, + const RuntimeShape& input_shape, const float* input_data, + const RuntimeShape& output_shape, float* output_data) { + const int trailing_dim = input_shape.DimensionsCount() - 1; + const int outer_size = + MatchingFlatSizeSkipDim(input_shape, trailing_dim, output_shape); + const int depth = + MatchingDim(input_shape, trailing_dim, output_shape, trailing_dim); + + for (int i = 0; i < outer_size; ++i) { + // Find max element value which we'll use to ensure numerical stability + // taking advantage of the following equality: + // exp(x[i])/sum(exp(x[i])) == exp(x[i]+C)/sum(exp(x[i]+C)) + float max = std::numeric_limits::lowest(); + for (int c = 0; c < depth; ++c) { + max = std::max(max, input_data[i * depth + c]); + } + + // Compute sum. + float sum = 0.f; + for (int c = 0; c < depth; ++c) { + const float exp_c = std::exp((input_data[i * depth + c] - max) * + static_cast(params.beta)); + output_data[i * depth + c] = exp_c; + sum += exp_c; + } + + // Compute result. + for (int c = 0; c < depth; ++c) { + output_data[i * depth + c] = output_data[i * depth + c] / sum; + } + } +} + +// Quantized softmax with int8_t/uint8_t input and int8_t/uint8_t/int16_t +// output. +template +inline void Softmax(const SoftmaxParams& params, + const RuntimeShape& input_shape, const InputT* input_data, + const RuntimeShape& output_shape, OutputT* output_data) { + const int32_t input_beta_multiplier = params.input_multiplier; + const int32_t input_beta_left_shift = params.input_left_shift; + const int diff_min = params.diff_min; + // The representation chosen for the input to the exp() function is Q5.26. + // We need to leave extra space since values that we skip might be as large as + // -32 before multiplying by input_beta_multiplier, and therefore as large as + // -16 afterwards. Note that exp(-8) is definitely not insignificant to + // accumulation, but exp(-16) definitely is. + static const int kScaledDiffIntegerBits = 5; + static const int kAccumulationIntegerBits = 12; + using FixedPointScaledDiff = + gemmlowp::FixedPoint; + using FixedPointAccum = + gemmlowp::FixedPoint; + using FixedPoint0 = gemmlowp::FixedPoint; + + const int trailing_dim = input_shape.DimensionsCount() - 1; + const int outer_size = + MatchingFlatSizeSkipDim(input_shape, trailing_dim, output_shape); + const int depth = + MatchingDim(input_shape, trailing_dim, output_shape, trailing_dim); + + for (int i = 0; i < outer_size; ++i) { + InputT max_in_row = std::numeric_limits::min(); + for (int c = 0; c < depth; ++c) { + max_in_row = std::max(max_in_row, input_data[i * depth + c]); + } + + FixedPointAccum sum_of_exps = FixedPointAccum::Zero(); + for (int c = 0; c < depth; ++c) { + int32_t input_diff = + static_cast(input_data[i * depth + c]) - max_in_row; + if (input_diff >= diff_min) { + const int32_t input_diff_rescaled = + MultiplyByQuantizedMultiplierGreaterThanOne( + input_diff, input_beta_multiplier, input_beta_left_shift); + const FixedPointScaledDiff scaled_diff_f8 = + FixedPointScaledDiff::FromRaw(input_diff_rescaled); + sum_of_exps = sum_of_exps + gemmlowp::Rescale( + exp_on_negative_values(scaled_diff_f8)); + } + } + + int num_bits_over_unit; + FixedPoint0 shifted_scale = FixedPoint0::FromRaw(GetReciprocal( + sum_of_exps.raw(), kAccumulationIntegerBits, &num_bits_over_unit)); + + for (int c = 0; c < depth; ++c) { + int32_t input_diff = + static_cast(input_data[i * depth + c]) - max_in_row; + if (input_diff >= diff_min) { + const int32_t input_diff_rescaled = + MultiplyByQuantizedMultiplierGreaterThanOne( + input_diff, input_beta_multiplier, input_beta_left_shift); + const FixedPointScaledDiff scaled_diff_f8 = + FixedPointScaledDiff::FromRaw(input_diff_rescaled); + + FixedPoint0 exp_in_0 = exp_on_negative_values(scaled_diff_f8); + int32_t unsat_output = gemmlowp::RoundingDivideByPOT( + (shifted_scale * exp_in_0).raw(), + num_bits_over_unit + 31 - (sizeof(OutputT) * 8)); + + const int32_t shifted_output = + unsat_output + + static_cast(std::numeric_limits::min()); + + output_data[i * depth + c] = static_cast(std::max( + std::min(shifted_output, + static_cast(std::numeric_limits::max())), + static_cast(std::numeric_limits::min()))); + } else { + output_data[i * depth + c] = std::numeric_limits::min(); + } + } + } +} + +// Computes exp(input - max_input) +inline int16_t SoftMaxCalculateExp(const SoftmaxParams& params, + const int16_t* input_data, const int depth, + int16_t max_in_row, int i, int c) { + int32_t input_diff = input_data[i * depth + c] - max_in_row; + // scale the input_diff such that [-65535, 0] correspond to [-10.0, 0.0] + // exp lut generated with range [-10, 0], as exp(-10) is negligible. + int32_t scaled_diff = MultiplyByQuantizedMultiplier( + input_diff, params.input_multiplier, params.input_left_shift); + // recenter to [-32768, 32767] + int32_t sym_scaled_diff = scaled_diff + 32767; + int16_t sat_sym_scaled_diff = + std::min(std::max(sym_scaled_diff, static_cast(-32768)), + static_cast(32767)); + // apply the exp() LUT activation function + return LUTLookup(sat_sym_scaled_diff, params.exp_lut); +} +// Quantized softmax with int16_t input and int16_t output. +inline void SoftmaxInt16(const SoftmaxParams& params, + const RuntimeShape& input_shape, + const int16_t* input_data, + const RuntimeShape& output_shape, + int16_t* output_data) { + const int trailing_dim = input_shape.DimensionsCount() - 1; + const int outer_size = + MatchingFlatSizeSkipDim(input_shape, trailing_dim, output_shape); + const int depth = + MatchingDim(input_shape, trailing_dim, output_shape, trailing_dim); + + for (int i = 0; i < outer_size; ++i) { + // Find the largest element + int16_t max_in_row = std::numeric_limits::min(); + for (int c = 0; c < depth; ++c) { + max_in_row = std::max(max_in_row, input_data[i * depth + c]); + } + + // This loops computes the exp values and their sum. We will need the exp + // values later on in the function so we cache them in the output_data + // buffer. This is an optimization done to avoid calculating the exp values + // twice making use of the output_data buffer as scratch memory. + int32_t sum_of_exps = 0; // Q16.15 fixed point format. + int16_t* exp_results_Q015 = output_data + i * depth; + for (int c = 0; c < depth; ++c) { + exp_results_Q015[c] = + SoftMaxCalculateExp(params, input_data, depth, max_in_row, i, c); + sum_of_exps += exp_results_Q015[c]; + } + + // Compute the reciprocal 1/sum_of_exps + uint8_t headroom_plus_one = + CountLeadingZeros(static_cast(sum_of_exps)); + int32_t shifted_sum = + ((static_cast(sum_of_exps) << (headroom_plus_one - 1)) + + (1 << 13)) >> + 14; + // since the LUT computes 1/(1 + x) we need to first compute x = (sum - 1). + // also, the LUT expects a symmetrical input, so we must also recenter x + // from [0, 65535] to [-32768, 32767]. + int32_t sym_shifted_sum = shifted_sum + (-((1 << 15) + (1 << 16))); + int16_t sat_sym_shifted_sum = static_cast( + std::min(std::max(sym_shifted_sum, static_cast(-32768)), + static_cast(32767))); + // apply 1/(1 + x) LUT activation function + int16_t reciprocal_scale_Q015 = + LUTLookup(sat_sym_shifted_sum, params.one_over_one_plus_x_lut); + + // Rescale the exp_result with reciprocal + // range of output is [0, 32767] correspond to [0.0, 1.0] + for (int c = 0; c < depth; ++c) { + uint8_t right_shift = 31 - headroom_plus_one; + int64_t round = 1 << (right_shift - 1); + int32_t result = (static_cast(exp_results_Q015[c]) * + static_cast(reciprocal_scale_Q015) + + round) >> + right_shift; + output_data[i * depth + c] = static_cast( + std::min(std::max(result, static_cast(0)), + static_cast(32767))); + } + } +} + +} // namespace reference_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_SOFTMAX_H_ diff --git a/tensorflow/lite/kernels/internal/reference/space_to_batch_nd.h b/tensorflow/lite/kernels/internal/reference/space_to_batch_nd.h new file mode 100644 index 0000000..7f84415 --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/space_to_batch_nd.h @@ -0,0 +1,109 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_SPACE_TO_BATCH_ND_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_SPACE_TO_BATCH_ND_H_ + +#include + +#include "ruy/profiler/instrumentation.h" // from @ruy +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { +namespace reference_ops { + +// TODO(b/135760455): Move this method anonymous namespace in a cc file. +inline RuntimeShape ExtendShapeSpaceToBatch(const RuntimeShape& shape) { + if (shape.DimensionsCount() == 4) { + return shape; + } + RuntimeShape new_shape(4, 1); + new_shape.SetDim(0, shape.Dims(0)); + new_shape.SetDim(1, shape.Dims(1)); + new_shape.SetDim(3, shape.Dims(2)); + return new_shape; +} + +template +inline void SpaceToBatchND(const SpaceToBatchParams& params, + const RuntimeShape& unextended_input1_shape, + const T* input1_data, + const RuntimeShape& unextended_input2_shape, + const int32_t* block_shape_data, + const RuntimeShape& unextended_input3_shape, + const int32_t* paddings_data, + const RuntimeShape& unextended_output_shape, + T* output_data) { + ruy::profiler::ScopeLabel label("SpaceToBatchND"); + TFLITE_DCHECK_GE(unextended_input1_shape.DimensionsCount(), 3); + TFLITE_DCHECK_LE(unextended_input1_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(unextended_input1_shape.DimensionsCount(), + unextended_output_shape.DimensionsCount()); + + // Extends the input/output shape from 3D to 4D if needed, NHC -> NH1C. + const RuntimeShape input1_shape = + ExtendShapeSpaceToBatch(unextended_input1_shape); + const RuntimeShape output_shape = + ExtendShapeSpaceToBatch(unextended_output_shape); + + const int depth = input1_shape.Dims(3); + const int input_width = input1_shape.Dims(2); + const int input_height = input1_shape.Dims(1); + const int input_batch_size = input1_shape.Dims(0); + + const int output_width = output_shape.Dims(2); + const int output_height = output_shape.Dims(1); + const int output_batch_size = output_shape.Dims(0); + + const int block_shape_height = block_shape_data[0]; + const int block_shape_width = + unextended_input1_shape.DimensionsCount() == 4 ? block_shape_data[1] : 1; + const int padding_top = paddings_data[0]; + const int padding_left = + unextended_input1_shape.DimensionsCount() == 4 ? paddings_data[2] : 0; + + // For uint8 quantized, the correct padding "zero value" is the output offset. + const int32_t pad_value = params.output_offset; + for (int out_b = 0; out_b < output_batch_size; ++out_b) { + int input_batch = out_b % input_batch_size; + int shift_w = (out_b / input_batch_size) % block_shape_width; + int shift_h = (out_b / input_batch_size) / block_shape_width; + for (int out_h = 0; out_h < output_height; ++out_h) { + for (int out_w = 0; out_w < output_width; ++out_w) { + T* out = output_data + Offset(output_shape, out_b, out_h, out_w, 0); + if (out_h * block_shape_height + shift_h < padding_top || + out_h * block_shape_height + shift_h >= + padding_top + input_height || + out_w * block_shape_width + shift_w < padding_left || + out_w * block_shape_width + shift_w >= padding_left + input_width) { + // This may not execute correctly when pad_value != 0 and T != uint8. + memset(out, pad_value, depth * sizeof(T)); + } else { + const T* in = + input1_data + + Offset(input1_shape, input_batch, + (out_h * block_shape_height + shift_h) - padding_top, + (out_w * block_shape_width + shift_w) - padding_left, 0); + memcpy(out, in, depth * sizeof(T)); + } + } + } + } +} + +} // namespace reference_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_SPACE_TO_BATCH_ND_H_ diff --git a/tensorflow/lite/kernels/internal/reference/space_to_depth.h b/tensorflow/lite/kernels/internal/reference/space_to_depth.h new file mode 100644 index 0000000..7ad4654 --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/space_to_depth.h @@ -0,0 +1,80 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_SPACE_TO_DEPTH_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_SPACE_TO_DEPTH_H_ + +#include + +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { +namespace reference_ops { + +template +inline void SpaceToDepth(const tflite::SpaceToDepthParams& op_params, + const RuntimeShape& unextended_input_shape, + const T* input_data, + const RuntimeShape& unextended_output_shape, + T* output_data) { + TFLITE_DCHECK_LE(unextended_input_shape.DimensionsCount(), 4); + TFLITE_DCHECK_LE(unextended_output_shape.DimensionsCount(), 4); + const RuntimeShape input_shape = + RuntimeShape::ExtendedShape(4, unextended_input_shape); + const RuntimeShape output_shape = + RuntimeShape::ExtendedShape(4, unextended_output_shape); + + const int input_depth = input_shape.Dims(3); + const int input_width = input_shape.Dims(2); + const int input_height = input_shape.Dims(1); + const int input_batch = input_shape.Dims(0); + + const int output_depth = output_shape.Dims(3); + const int output_width = output_shape.Dims(2); + const int output_height = output_shape.Dims(1); + const int output_batch = output_shape.Dims(0); + + const int32_t block_size = op_params.block_size; + + TFLITE_DCHECK_EQ(input_width, output_width * block_size); + TFLITE_DCHECK_EQ(input_height, output_height * block_size); + TFLITE_DCHECK_EQ(input_depth * block_size * block_size, output_depth); + TFLITE_DCHECK_EQ(input_batch, output_batch); + + for (int in_b = 0; in_b < input_batch; ++in_b) { + for (int in_h = 0; in_h < input_height; ++in_h) { + for (int in_w = 0; in_w < input_width; ++in_w) { + for (int in_d = 0; in_d < input_depth; ++in_d) { + const int out_d = + in_d + ((in_h % block_size) * block_size + in_w % block_size) * + input_depth; + const int out_w = in_w / block_size; + const int out_h = in_h / block_size; + const int out_b = in_b; + + const int input_index = Offset(input_shape, in_b, in_h, in_w, in_d); + const int output_index = + Offset(output_shape, out_b, out_h, out_w, out_d); + + output_data[output_index] = input_data[input_index]; + } + } + } + } +} + +} // namespace reference_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_SPACE_TO_DEPTH_H_ diff --git a/tensorflow/lite/kernels/internal/reference/strided_slice.h b/tensorflow/lite/kernels/internal/reference/strided_slice.h new file mode 100644 index 0000000..b76baaa --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/strided_slice.h @@ -0,0 +1,147 @@ +/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_STRIDED_SLICE_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_STRIDED_SLICE_H_ + +#include "ruy/profiler/instrumentation.h" // from @ruy +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/kernels/internal/portable_tensor.h" +#include "tensorflow/lite/kernels/internal/strided_slice_logic.h" +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { + +namespace reference_ops { + +template +inline void StridedSlice(const tflite::StridedSliceParams& op_params, + const RuntimeShape& unextended_input_shape, + const RuntimeShape& unextended_output_shape, + SequentialTensorWriter* writer) { + ruy::profiler::ScopeLabel label("StridedSlice"); + + // Note that the output_shape is not used herein. + tflite::StridedSliceParams params_copy = op_params; + + TFLITE_DCHECK_LE(unextended_input_shape.DimensionsCount(), 5); + TFLITE_DCHECK_LE(unextended_output_shape.DimensionsCount(), 5); + const RuntimeShape input_shape = + RuntimeShape::ExtendedShape(5, unextended_input_shape); + const RuntimeShape output_shape = + RuntimeShape::ExtendedShape(5, unextended_output_shape); + + // Reverse and pad to 5 dimensions because that is what the runtime code + // requires (ie. all shapes must be 5D and are given backwards). + strided_slice::StridedSlicePadIndices(¶ms_copy, 5); + + const int start_0 = + strided_slice::StridedSliceStartForAxis(params_copy, input_shape, 0); + const int stop_0 = strided_slice::StridedSliceEndForAxis( + params_copy, input_shape, 0, start_0); + const int start_1 = + strided_slice::StridedSliceStartForAxis(params_copy, input_shape, 1); + const int stop_1 = strided_slice::StridedSliceEndForAxis( + params_copy, input_shape, 1, start_1); + const int start_2 = + strided_slice::StridedSliceStartForAxis(params_copy, input_shape, 2); + const int stop_2 = strided_slice::StridedSliceEndForAxis( + params_copy, input_shape, 2, start_2); + const int start_3 = + strided_slice::StridedSliceStartForAxis(params_copy, input_shape, 3); + const int stop_3 = strided_slice::StridedSliceEndForAxis( + params_copy, input_shape, 3, start_3); + const int start_4 = + strided_slice::StridedSliceStartForAxis(params_copy, input_shape, 4); + const int stop_4 = strided_slice::StridedSliceEndForAxis( + params_copy, input_shape, 4, start_4); + + auto lc = [&](int end, int stride, int index) { + if (stride < 0) { + return index > end; + } else { + return index < end; + } + }; + // With a static_cast it is not possible to initialize + // a variable of type 'const int *' + // with an rvalue of type 'const int32_t *' (aka 'const long *'). + // reinterpret_cast is required to handle this casting. + const int* shape = reinterpret_cast(input_shape.DimsData()); + const int* stride = reinterpret_cast(params_copy.strides); + const bool inner_stride_is_1 = params_copy.strides[4] == 1; + + for (int offset_0 = start_0; lc(stop_0, stride[0], offset_0); + offset_0 += stride[0]) { + for (int offset_1 = start_1; lc(stop_1, stride[1], offset_1); + offset_1 += stride[1]) { + for (int offset_2 = start_2; lc(stop_2, stride[2], offset_2); + offset_2 += stride[2]) { + for (int offset_3 = start_3; lc(stop_3, stride[3], offset_3); + offset_3 += stride[3]) { + // When the stride is 1, the inner loop is equivalent to the + // optimized slice inner loop. Otherwise, it is identical to the + // strided_slice reference implementation inner loop. + if (inner_stride_is_1) { + const int len = stop_4 - start_4; + int index = start_4 + offset_3 * shape[4] + + offset_2 * shape[3] * shape[4] + + offset_1 * shape[2] * shape[3] * shape[4] + + offset_0 * shape[1] * shape[2] * shape[3] * shape[4]; + if (len > 0) { + writer->WriteN(index, len); + } + } else { + for (int offset_4 = start_4; lc(stop_4, stride[4], offset_4); + offset_4 += stride[4]) { + int index = offset_4 + offset_3 * shape[4] + + offset_2 * shape[3] * shape[4] + + offset_1 * shape[2] * shape[3] * shape[4] + + offset_0 * shape[1] * shape[2] * shape[3] * shape[4]; + writer->Write(index); + } + } + } + } + } + } +} + +template +inline void StridedSlice(const tflite::StridedSliceParams& op_params, + const RuntimeShape& unextended_input_shape, + const T* input_data, + const RuntimeShape& unextended_output_shape, + T* output_data) { + SequentialTensorWriter writer(input_data, output_data); + StridedSlice(op_params, unextended_input_shape, unextended_output_shape, + &writer); +} + +template +inline void StridedSlice(const tflite::StridedSliceParams& op_params, + const RuntimeShape& unextended_input_shape, + const TfLiteTensor* input, + const RuntimeShape& unextended_output_shape, + TfLiteTensor* output) { + SequentialTensorWriter writer(input, output); + StridedSlice(op_params, unextended_input_shape, unextended_output_shape, + &writer); +} + +} // namespace reference_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_STRIDED_SLICE_H_ diff --git a/tensorflow/lite/kernels/internal/reference/sub.h b/tensorflow/lite/kernels/internal/reference/sub.h new file mode 100644 index 0000000..d0ebc95 --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/sub.h @@ -0,0 +1,479 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_SUB_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_SUB_H_ + +#include + +#include +#include + +#include "ruy/profiler/instrumentation.h" // from @ruy +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { + +namespace reference_ops { + +inline void SubNonBroadcast(const ArithmeticParams& params, + const RuntimeShape& input1_shape, + const float* input1_data, + const RuntimeShape& input2_shape, + const float* input2_data, + const RuntimeShape& output_shape, + float* output_data) { + const int flat_size = + MatchingElementsSize(input1_shape, input2_shape, output_shape); + for (int i = 0; i < flat_size; ++i) { + output_data[i] = ActivationFunctionWithMinMax( + input1_data[i] - input2_data[i], params.float_activation_min, + params.float_activation_max); + } +} + +inline void SubNonBroadcast(const ArithmeticParams& params, + const RuntimeShape& input1_shape, + const int32_t* input1_data, + const RuntimeShape& input2_shape, + const int32_t* input2_data, + const RuntimeShape& output_shape, + int32_t* output_data) { + const int flat_size = + MatchingElementsSize(input1_shape, input2_shape, output_shape); + for (int i = 0; i < flat_size; ++i) { + output_data[i] = ActivationFunctionWithMinMax( + input1_data[i] - input2_data[i], params.quantized_activation_min, + params.quantized_activation_max); + } +} + +// TODO(b/151345304): We can implement BroadcastSub on buffers of arbitrary +// dimensionality if the runtime code does a single loop over one dimension +// that handles broadcasting as the base case. The code generator would then +// generate max(D1, D2) nested for loops. +template +inline void BroadcastSubSlow(const ArithmeticParams& params, + const RuntimeShape& input1_shape, + const float* input1_data, + const RuntimeShape& input2_shape, + const float* input2_data, + const RuntimeShape& output_shape, + float* output_data) { + ruy::profiler::ScopeLabel label("BroadcastSubSlow/float"); + TFLITE_DCHECK_LE(input1_shape.DimensionsCount(), N); + TFLITE_DCHECK_LE(input2_shape.DimensionsCount(), N); + TFLITE_DCHECK_LE(output_shape.DimensionsCount(), N); + NdArrayDesc desc1; + NdArrayDesc desc2; + NdArrayDesc output_desc; + NdArrayDescsForElementwiseBroadcast(input1_shape, input2_shape, &desc1, + &desc2); + CopyDimsToDesc(RuntimeShape::ExtendedShape(N, output_shape), &output_desc); + + // In Tensorflow, the dimensions are canonically named (batch_number, row, + // col, channel), with extents (batches, height, width, depth), with the + // trailing dimension changing most rapidly (channels has the smallest stride, + // typically 1 element). + // + // In generated C code, we store arrays with the dimensions reversed. The + // first dimension has smallest stride. + // + // We name our variables by their Tensorflow convention, but generate C code + // nesting loops such that the innermost loop has the smallest stride for the + // best cache behavior. + auto sub_func = [&](int indexes[N]) { + output_data[SubscriptToIndex(output_desc, indexes)] = + ActivationFunctionWithMinMax( + input1_data[SubscriptToIndex(desc1, indexes)] - + input2_data[SubscriptToIndex(desc2, indexes)], + params.float_activation_min, params.float_activation_max); + }; + NDOpsHelper(output_desc, sub_func); +} + +template +inline void BroadcastSubSlow(const ArithmeticParams& params, + const RuntimeShape& input1_shape, + const int32_t* input1_data, + const RuntimeShape& input2_shape, + const int32_t* input2_data, + const RuntimeShape& output_shape, + int32_t* output_data) { + ruy::profiler::ScopeLabel label("BroadcastSubSlow/int32_t"); + TFLITE_DCHECK_LE(input1_shape.DimensionsCount(), N); + TFLITE_DCHECK_LE(input2_shape.DimensionsCount(), N); + TFLITE_DCHECK_LE(output_shape.DimensionsCount(), N); + NdArrayDesc desc1; + NdArrayDesc desc2; + NdArrayDesc output_desc; + NdArrayDescsForElementwiseBroadcast(input1_shape, input2_shape, &desc1, + &desc2); + CopyDimsToDesc(RuntimeShape::ExtendedShape(N, output_shape), &output_desc); + + // In Tensorflow, the dimensions are canonically named (batch_number, row, + // col, channel), with extents (batches, height, width, depth), with the + // trailing dimension changing most rapidly (channels has the smallest stride, + // typically 1 element). + // + // In generated C code, we store arrays with the dimensions reversed. The + // first dimension has smallest stride. + // + // We name our variables by their Tensorflow convention, but generate C code + // nesting loops such that the innermost loop has the smallest stride for the + // best cache behavior. + auto sub_func = [&](int indexes[N]) { + output_data[SubscriptToIndex(output_desc, indexes)] = + ActivationFunctionWithMinMax( + input1_data[SubscriptToIndex(desc1, indexes)] - + input2_data[SubscriptToIndex(desc2, indexes)], + params.quantized_activation_min, params.quantized_activation_max); + }; + NDOpsHelper(output_desc, sub_func); +} + +template +void BroadcastSubSlow(const ArithmeticParams& params, + const RuntimeShape& input1_shape, + const int64_t* input1_data, + const RuntimeShape& input2_shape, + const int64_t* input2_data, + const RuntimeShape& output_shape, int64_t* output_data) { + ruy::profiler::ScopeLabel label("BroadcastSubSlow/int64_t"); + TFLITE_DCHECK_LE(input1_shape.DimensionsCount(), N); + TFLITE_DCHECK_LE(input2_shape.DimensionsCount(), N); + TFLITE_DCHECK_LE(output_shape.DimensionsCount(), N); + NdArrayDesc desc1; + NdArrayDesc desc2; + NdArrayDesc output_desc; + NdArrayDescsForElementwiseBroadcast(input1_shape, input2_shape, &desc1, + &desc2); + CopyDimsToDesc(RuntimeShape::ExtendedShape(N, output_shape), &output_desc); + + // In Tensorflow, the dimensions are canonically named (batch_number, row, + // col, channel), with extents (batches, height, width, depth), with the + // trailing dimension changing most rapidly (channels has the smallest stride, + // typically 1 element). + // + // In generated C code, we store arrays with the dimensions reversed. The + // first dimension has smallest stride. + // + // We name our variables by their Tensorflow convention, but generate C code + // nesting loops such that the innermost loop has the smallest stride for the + // best cache behavior. + auto sub_func = [&](int indexes[N]) { + output_data[SubscriptToIndex(output_desc, indexes)] = + ActivationFunctionWithMinMax( + input1_data[SubscriptToIndex(desc1, indexes)] - + input2_data[SubscriptToIndex(desc2, indexes)], + params.int64_activation_min, params.int64_activation_max); + }; + NDOpsHelper(output_desc, sub_func); +} + +template +void BroadcastSubSlow(const ArithmeticParams& params, + const RuntimeShape& input1_shape, const T* input1_data, + const RuntimeShape& input2_shape, const T* input2_data, + const RuntimeShape& output_shape, T* output_data) { + ruy::profiler::ScopeLabel label("BroadcastSubSlow/templated"); + TFLITE_DCHECK_LE(input1_shape.DimensionsCount(), N); + TFLITE_DCHECK_LE(input2_shape.DimensionsCount(), N); + TFLITE_DCHECK_LE(output_shape.DimensionsCount(), N); + NdArrayDesc desc1; + NdArrayDesc desc2; + NdArrayDesc output_desc; + NdArrayDescsForElementwiseBroadcast(input1_shape, input2_shape, &desc1, + &desc2); + CopyDimsToDesc(RuntimeShape::ExtendedShape(N, output_shape), &output_desc); + + // In Tensorflow, the dimensions are canonically named (batch_number, row, + // col, channel), with extents (batches, height, width, depth), with the + // trailing dimension changing most rapidly (channels has the smallest stride, + // typically 1 element). + // + // In generated C code, we store arrays with the dimensions reversed. The + // first dimension has smallest stride. + // + // We name our variables by their Tensorflow convention, but generate C code + // nesting loops such that the innermost loop has the smallest stride for the + // best cache behavior. + auto sub_func = [&](int indexes[N]) { + output_data[SubscriptToIndex(output_desc, indexes)] = + ActivationFunctionWithMinMax( + input1_data[SubscriptToIndex(desc1, indexes)] - + input2_data[SubscriptToIndex(desc2, indexes)], + params.quantized_activation_min, params.quantized_activation_max); + }; + NDOpsHelper(output_desc, sub_func); +} + +template +inline void BroadcastSub16POTSlow(const ArithmeticParams& params, + const RuntimeShape& input1_shape, + const int16_t* input1_data, + const RuntimeShape& input2_shape, + const int16_t* input2_data, + const RuntimeShape& output_shape, + int16_t* output_data) { + ruy::profiler::ScopeLabel label("BroadcastSub16POTSlow/int16_t"); + NdArrayDesc desc1; + NdArrayDesc desc2; + NdArrayDesc output_desc; + NdArrayDescsForElementwiseBroadcast(input1_shape, input2_shape, &desc1, + &desc2); + CopyDimsToDesc(RuntimeShape::ExtendedShape(N, output_shape), &output_desc); + + // In Tensorflow, the dimensions are canonically named (batch_number, row, + // col, channel), with extents (batches, height, width, depth), with the + // trailing dimension changing most rapidly (channels has the smallest stride, + // typically 1 element). + // + // In generated C code, we store arrays with the dimensions reversed. The + // first dimension has smallest stride. + // + // We name our variables by their Tensorflow convention, but generate C code + // nesting loops such that the innermost loop has the smallest stride for the + // best cache behavior. + auto sub_func = [&](int indexes[N]) { + const int32_t input1_val = input1_data[SubscriptToIndex(desc1, indexes)]; + const int32_t input2_val = input2_data[SubscriptToIndex(desc2, indexes)]; + const int32_t scaled_input1_val = + gemmlowp::RoundingDivideByPOT(input1_val, -params.input1_shift); + const int32_t scaled_input2_val = + gemmlowp::RoundingDivideByPOT(input2_val, -params.input2_shift); + const int32_t raw_output = scaled_input1_val - scaled_input2_val; + const int32_t clamped_output = + std::min(params.quantized_activation_max, + std::max(params.quantized_activation_min, raw_output)); + output_data[SubscriptToIndex(output_desc, indexes)] = + static_cast(clamped_output); + }; + NDOpsHelper(output_desc, sub_func); +} + +template +void BroadcastQuantSubSlow(const ArithmeticParams& params, + const RuntimeShape& input1_shape, + const T* input1_data, + const RuntimeShape& input2_shape, + const T* input2_data, + const RuntimeShape& output_shape, T* output_data) { + ruy::profiler::ScopeLabel label("BroadcastQuantSubSlow/T"); + TFLITE_DCHECK_LE(input1_shape.DimensionsCount(), N); + TFLITE_DCHECK_LE(input2_shape.DimensionsCount(), N); + TFLITE_DCHECK_LE(output_shape.DimensionsCount(), N); + NdArrayDesc desc1; + NdArrayDesc desc2; + NdArrayDesc output_desc; + NdArrayDescsForElementwiseBroadcast(input1_shape, input2_shape, &desc1, + &desc2); + CopyDimsToDesc(RuntimeShape::ExtendedShape(N, output_shape), &output_desc); + + // In Tensorflow, the dimensions are canonically named (batch_number, row, + // col, channel), with extents (batches, height, width, depth), with the + // trailing dimension changing most rapidly (channels has the smallest stride, + // typically 1 element). + // + // In generated C code, we store arrays with the dimensions reversed. The + // first dimension has smallest stride. + // + // We name our variables by their Tensorflow convention, but generate C code + // nesting loops such that the innermost loop has the smallest stride for the + // best cache behavior. + auto sub_func = [&](int indexes[N]) { + const int32_t input1_val = + params.input1_offset + input1_data[SubscriptToIndex(desc1, indexes)]; + const int32_t input2_val = + params.input2_offset + input2_data[SubscriptToIndex(desc2, indexes)]; + const int32_t shifted_input1_val = input1_val * (1 << params.left_shift); + const int32_t shifted_input2_val = input2_val * (1 << params.left_shift); + const int32_t scaled_input1_val = + MultiplyByQuantizedMultiplierSmallerThanOneExp( + shifted_input1_val, params.input1_multiplier, params.input1_shift); + const int32_t scaled_input2_val = + MultiplyByQuantizedMultiplierSmallerThanOneExp( + shifted_input2_val, params.input2_multiplier, params.input2_shift); + const int32_t raw_sub = scaled_input1_val - scaled_input2_val; + const int32_t raw_output = + MultiplyByQuantizedMultiplierSmallerThanOneExp( + raw_sub, params.output_multiplier, params.output_shift) + + params.output_offset; + const int32_t clamped_output = + std::min(params.quantized_activation_max, + std::max(params.quantized_activation_min, raw_output)); + output_data[SubscriptToIndex(output_desc, indexes)] = + static_cast(clamped_output); + }; + NDOpsHelper(output_desc, sub_func); +} + +// Element-wise add that can often be used for inner loop of broadcast add as +// well as the non-broadcast add. +template +inline void SubElementwise(int size, const ArithmeticParams& params, + const T* input1_data, const T* input2_data, + T* output_data) { + for (int i = 0; i < size; ++i) { + const int32_t input1_val = params.input1_offset + input1_data[i]; + const int32_t input2_val = params.input2_offset + input2_data[i]; + const int32_t shifted_input1_val = input1_val * (1 << params.left_shift); + const int32_t shifted_input2_val = input2_val * (1 << params.left_shift); + const int32_t scaled_input1_val = + MultiplyByQuantizedMultiplierSmallerThanOneExp( + shifted_input1_val, params.input1_multiplier, params.input1_shift); + const int32_t scaled_input2_val = + MultiplyByQuantizedMultiplierSmallerThanOneExp( + shifted_input2_val, params.input2_multiplier, params.input2_shift); + const int32_t raw_sub = scaled_input1_val - scaled_input2_val; + const int32_t raw_output = + MultiplyByQuantizedMultiplierSmallerThanOneExp( + raw_sub, params.output_multiplier, params.output_shift) + + params.output_offset; + const int32_t clamped_output = + std::min(params.quantized_activation_max, + std::max(params.quantized_activation_min, raw_output)); + output_data[i] = static_cast(clamped_output); + } +} + +inline void Sub(const ArithmeticParams& params, + const RuntimeShape& input1_shape, const uint8_t* input1_data, + const RuntimeShape& input2_shape, const uint8_t* input2_data, + const RuntimeShape& output_shape, uint8_t* output_data) { + TFLITE_DCHECK_LE(params.quantized_activation_min, + params.quantized_activation_max); + const int flat_size = + MatchingElementsSize(input1_shape, input2_shape, output_shape); + + TFLITE_DCHECK_GT(params.input1_offset, -256); + TFLITE_DCHECK_GT(params.input2_offset, -256); + TFLITE_DCHECK_LT(params.input1_offset, 256); + TFLITE_DCHECK_LT(params.input2_offset, 256); + SubElementwise(flat_size, params, input1_data, input2_data, output_data); +} + +inline void Sub(const ArithmeticParams& params, + const RuntimeShape& input1_shape, const int8_t* input1_data, + const RuntimeShape& input2_shape, const int8_t* input2_data, + const RuntimeShape& output_shape, int8_t* output_data) { + TFLITE_DCHECK_LE(params.quantized_activation_min, + params.quantized_activation_max); + + const int flat_size = + MatchingElementsSize(input1_shape, input2_shape, output_shape); + + TFLITE_DCHECK_GE(params.input1_offset, -128); + TFLITE_DCHECK_GE(params.input2_offset, -128); + // offset = -quantization_params.zero_point in PrepareGeneralSubOp(). + // So it's maximum can be 128 not 127. + TFLITE_DCHECK_LE(params.input1_offset, 128); + TFLITE_DCHECK_LE(params.input2_offset, 128); + SubElementwise(flat_size, params, input1_data, input2_data, output_data); +} + +inline void Sub(const ArithmeticParams& params, + const RuntimeShape& input1_shape, const int16_t* input1_data, + const RuntimeShape& input2_shape, const int16_t* input2_data, + const RuntimeShape& output_shape, int16_t* output_data) { + TFLITE_DCHECK_LE(params.quantized_activation_min, + params.quantized_activation_max); + + const int flat_size = + MatchingElementsSize(input1_shape, input2_shape, output_shape); + + TFLITE_DCHECK_EQ(params.input1_offset, 0); + TFLITE_DCHECK_EQ(params.input2_offset, 0); + SubElementwise(flat_size, params, input1_data, input2_data, output_data); +} + +template +void Sub(const ArithmeticParams& params, const RuntimeShape& input1_shape, + const T* input1_data, const RuntimeShape& input2_shape, + const T* input2_data, const RuntimeShape& output_shape, + T* output_data) { + NdArrayDesc<4> desc1; + NdArrayDesc<4> desc2; + NdArrayDescsForElementwiseBroadcast(input1_shape, input2_shape, &desc1, + &desc2); + const RuntimeShape extended_output_shape = + RuntimeShape::ExtendedShape(4, output_shape); + + // In Tensorflow, the dimensions are canonically named (batch_number, row, + // col, channel), with extents (batches, height, width, depth), with the + // trailing dimension changing most rapidly (channels has the smallest stride, + // typically 1 element). + // + // In generated C code, we store arrays with the dimensions reversed. The + // first dimension has smallest stride. + // + // We name our variables by their Tensorflow convention, but generate C code + // nesting loops such that the innermost loop has the smallest stride for the + // best cache behavior. + for (int b = 0; b < extended_output_shape.Dims(0); ++b) { + for (int y = 0; y < extended_output_shape.Dims(1); ++y) { + for (int x = 0; x < extended_output_shape.Dims(2); ++x) { + for (int c = 0; c < extended_output_shape.Dims(3); ++c) { + output_data[Offset(extended_output_shape, b, y, x, c)] = + input1_data[SubscriptToIndex(desc1, b, y, x, c)] - + input2_data[SubscriptToIndex(desc2, b, y, x, c)]; + } + } + } + } +} + +inline void SetActivationMinMax(const ArithmeticParams& params, + int32_t* activation_min, + int32_t* activation_max) { + *activation_min = params.quantized_activation_min; + *activation_max = params.quantized_activation_max; +} + +inline void SetActivationMinMax(const ArithmeticParams& params, + float* activation_min, float* activation_max) { + *activation_min = params.float_activation_min; + *activation_max = params.float_activation_max; +} + +inline void SetActivationMinMax(const ArithmeticParams& params, + int64_t* activation_min, + int64_t* activation_max) { + *activation_min = params.int64_activation_min; + *activation_max = params.int64_activation_max; +} + +template +inline void SubWithActivation( + const ArithmeticParams& params, const RuntimeShape& input1_shape, + const T* input1_data, const RuntimeShape& input2_shape, + const T* input2_data, const RuntimeShape& output_shape, T* output_data) { + ruy::profiler::ScopeLabel label("SubWithActivation"); + const int flat_size = + MatchingElementsSize(input1_shape, input2_shape, output_shape); + T activation_min, activation_max; + SetActivationMinMax(params, &activation_min, &activation_max); + + for (int i = 0; i < flat_size; ++i) { + output_data[i] = ActivationFunctionWithMinMax( + input1_data[i] - input2_data[i], activation_min, activation_max); + } +} + +} // namespace reference_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_SUB_H_ diff --git a/tensorflow/lite/kernels/internal/reference/tanh.h b/tensorflow/lite/kernels/internal/reference/tanh.h new file mode 100644 index 0000000..3a05c47 --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/tanh.h @@ -0,0 +1,129 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_TANH_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_TANH_H_ + +#include + +#include "fixedpoint/fixedpoint.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/cppmath.h" +#include "tensorflow/lite/kernels/internal/types.h" +#include "tensorflow/lite/kernels/op_macros.h" + +namespace tflite { +namespace reference_ops { + +inline void Tanh(const RuntimeShape& input_shape, const float* input_data, + const RuntimeShape& output_shape, float* output_data) { + const int flat_size = MatchingFlatSize(input_shape, output_shape); + + for (int i = 0; i < flat_size; i++) { + float val = input_data[i]; + float result = std::tanh(val); + output_data[i] = result; + } +} + +// Convenience version that allows, for example, generated-code calls to be +// uniform between data types. +inline void Tanh(const TanhParams&, const RuntimeShape& input_shape, + const float* input_data, const RuntimeShape& output_shape, + float* output_data) { + // Drop params: not needed. + Tanh(input_shape, input_data, output_shape, output_data); +} + +inline void Tanh(const TanhParams& params, const RuntimeShape& input_shape, + const int16_t* input_data, const RuntimeShape& output_shape, + int16_t* output_data) { + const int input_left_shift = params.input_left_shift; + // Support for shifts is limited until we have a parameterized version of + // SaturatingRoundingMultiplyByPOT(). + TFLITE_DCHECK_GE(input_left_shift, 0); + TFLITE_DCHECK_LE(input_left_shift, 1); + + const int flat_size = MatchingFlatSize(input_shape, output_shape); + + // F0 uses 0 integer bits, range [-1, 1]. + // This is the return type of math functions such as tanh, logistic, + // whose range is in [-1, 1]. + using F0 = gemmlowp::FixedPoint; + // F3 uses 3 integer bits, range [-8, 8], the input range expected here. + using F3 = gemmlowp::FixedPoint; + + if (input_left_shift == 0) { + for (int i = 0; i < flat_size; i++) { + F3 input = F3::FromRaw(input_data[i]); + F0 output = gemmlowp::tanh(input); + output_data[i] = output.raw(); + } + } else { + for (int i = 0; i < flat_size; i++) { + F3 input = F3::FromRaw( + gemmlowp::SaturatingRoundingMultiplyByPOT<1>(input_data[i])); + F0 output = gemmlowp::tanh(input); + output_data[i] = output.raw(); + } + } +} + +inline void Tanh(const TanhParams& params, const RuntimeShape& input_shape, + const uint8_t* input_data, const RuntimeShape& output_shape, + uint8_t* output_data) { + const int32_t input_zero_point = params.input_zero_point; + const int32_t input_range_radius = params.input_range_radius; + const int32_t input_multiplier = params.input_multiplier; + const int input_left_shift = params.input_left_shift; + const int32_t output_zero_point = 128; + const int flat_size = MatchingFlatSize(input_shape, output_shape); + + for (int i = 0; i < flat_size; i++) { + const uint8_t input_val_u8 = input_data[i]; + const int32_t input_val_centered = + static_cast(input_val_u8) - input_zero_point; + uint8_t output_val; + if (input_val_centered <= -input_range_radius) { + output_val = 0; + } else if (input_val_centered >= input_range_radius) { + output_val = 255; + } else { + const int32_t input_val_rescaled = + MultiplyByQuantizedMultiplierGreaterThanOne( + input_val_centered, input_multiplier, input_left_shift); + using FixedPoint4 = gemmlowp::FixedPoint; + using FixedPoint0 = gemmlowp::FixedPoint; + const FixedPoint4 input_val_f4 = FixedPoint4::FromRaw(input_val_rescaled); + const FixedPoint0 output_val_f0 = gemmlowp::tanh(input_val_f4); + // Convert from Q0.31 to Q24.7. + using gemmlowp::RoundingDivideByPOT; + int32_t output_val_s32 = RoundingDivideByPOT(output_val_f0.raw(), 24); + output_val_s32 += output_zero_point; + if (output_val_s32 == 256) { + output_val_s32 = 255; + } + // Reinterpret as Q0.7, encoded in uint8_t. + TFLITE_DCHECK_GE(output_val_s32, 0); + TFLITE_DCHECK_LE(output_val_s32, 255); + output_val = static_cast(output_val_s32); + } + output_data[i] = output_val; + } +} + +} // namespace reference_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_TANH_H_ diff --git a/tensorflow/lite/kernels/internal/reference/transpose.h b/tensorflow/lite/kernels/internal/reference/transpose.h new file mode 100644 index 0000000..7e2bf7b --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/transpose.h @@ -0,0 +1,203 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_TRANSPOSE_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_TRANSPOSE_H_ + +#include + +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { + +namespace reference_ops { + +namespace transpose_internal { + +// Recursively explores all the dimensions of the output tensor and writes the +// corresponding input tensor data. +// +// - depth: the current depth of the recursion. +// - dims: tensor dimension count, also `perm` size. +// - perm: permutation array. +// - input_data: Running input data pointer. If depth == num_dims-1, this points +// to the first element of the last dimension to traverse. +// - input_stride: Reverse partial product of input shapes. +// - output_data: Running output data pointer. If depth == num_dims-1, this +// points to the first element of the last dimension to traverse. +// - output_stride: Reverse partial product of output shapes. +// - output_shape: Shape of the output tensor. +// +// ## Algorithm explanation +// +// Assume a 3D tensor T with a shape of [I, J, K] stored in row major order. +// T[i, j, k] is at position `i*J*K + j*K + k` in the tensor buffer. +// +// If we want to go through the whole tensor iteratively, we can use loops. +// +// ``` +// for(i = 0; i < I; ++i) { +// for(j = 0; j < J; ++j) { +// for(k = 0; k < K; ++k) { +// T.data[i*J*K + j*K + k] = ... +// } +// } +// } +// ``` +// +// We can also compute the offset as we go through the loops. +// +// ``` +// stride_i = K * J; +// stride_j = K; +// stride_k = 1; +// for(i = 0; i < I; ++i) { +// offset_i = i * stride_i; +// offset_j = 0; +// for(j = 0; j < J; ++j) { +// offset_j += stride_j; +// offset_k = 0; +// for(k = 0; k < K; ++k) { +// offset_k += stride_k; +// T.data[offset_i + offset_j + offset_k] = ... +// } +// } +// } +// ``` +// +// This nicely extends to a recursive version which is the base of this +// algorithm and supports any number of dimensions. +// +// ``` +// shape = [I, J, K] +// strides = [K*J, K, 1] +// void recurse(T* data, shape, strides, depth = 0) { +// if(depth == shape.size) { +// *data = ... +// } else { +// for(a = 0; a < shape[depth]; ++a) { +// recurse(data, shape, strides, depth+1); +// data += strides[depth]; +// } +// } +// } +// ``` +template +void TransposeImpl(const int depth, const int dims, const int32_t* perm, + const T* input_data, const int* input_stride, T* output_data, + const int* output_stride, const int32_t* output_shape) { + const int dimension_size = output_shape[depth]; + if (depth == dims - 1) { + const int loop_stride = input_stride[perm[depth]]; + for (int i = 0; i < dimension_size; ++i) { + output_data[i] = *input_data; + input_data += loop_stride; + } + } else { + for (int i = 0; i < dimension_size; ++i) { + TransposeImpl(depth + 1, dims, perm, input_data, input_stride, + output_data, output_stride, output_shape); + + input_data += input_stride[perm[depth]]; + output_data += output_stride[depth]; + } + } +} + +// Compile-time switch to get the storage type of the transposition. +template +struct TransposeStorageType; + +template <> +struct TransposeStorageType<1> { + using type = int8_t; +}; + +template <> +struct TransposeStorageType<2> { + using type = int16_t; +}; + +template <> +struct TransposeStorageType<4> { + using type = int32_t; +}; + +template <> +struct TransposeStorageType<8> { + using type = int64_t; +}; + +// Sets up the stride arrays for the recursive transpose algorithm. +// +// Implementation notes: +// +// This is a reverse partial product. We could use standard algorithms to +// implement this but the result is not a readable and is tricky to get right +// because the first element must be set to 1, which leads to offset +// shenanigans: +// +// ``` +// stride[dims - 1] = 1; +// std::partial_sum(std::make_reverse_iterator(shape + dims), +// std::make_reverse_iterator(shape + 1), +// stride.rend() - input_rank + 1, std::multiplies()); +// ``` +// +// Note that Abseil isn't used in kernels implementation. That would make the +// above solution more readable. +inline void SetupTransposeStrides( + std::array& stride, const int32_t* shape, + const int dims) { + stride[dims - 1] = 1; + for (int i = dims - 2; i >= 0; --i) { + stride[i] = stride[i + 1] * shape[i + 1]; + } +} + +} // namespace transpose_internal + +// Copies a tensor to an other buffer and permutes its dimensions. +// +// Note: template parameter N is not used anymore. It is kept for API +// compatibility with TFLite micro. +template +void Transpose(const TransposeParams& params, const RuntimeShape& input_shape, + const T* input_data, const RuntimeShape& output_shape, + T* output_data) { + using transpose_internal::SetupTransposeStrides; + using transpose_internal::TransposeImpl; + using transpose_internal::TransposeStorageType; + // Transpose kernel only does rearranging values not numeric evaluations on + // each cell. It's safe to implement per size of scalar type and this trick + // keeps the total code size in a reasonable range. + using StorageType = typename TransposeStorageType::type; + const StorageType* const input_data_storage = + reinterpret_cast(input_data); + StorageType* const output_data_storage = + reinterpret_cast(output_data); + + const int dims = input_shape.DimensionsCount(); + std::array input_stride, output_stride; + SetupTransposeStrides(input_stride, input_shape.DimsData(), dims); + SetupTransposeStrides(output_stride, output_shape.DimsData(), dims); + TransposeImpl(0, dims, ¶ms.perm[0], input_data_storage, + input_stride.data(), output_data_storage, output_stride.data(), + output_shape.DimsData()); +} + +} // namespace reference_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_TRANSPOSE_H_ diff --git a/tensorflow/lite/kernels/internal/reference/transpose_conv.h b/tensorflow/lite/kernels/internal/reference/transpose_conv.h new file mode 100644 index 0000000..8a51e0f --- /dev/null +++ b/tensorflow/lite/kernels/internal/reference/transpose_conv.h @@ -0,0 +1,225 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_TRANSPOSE_CONV_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_TRANSPOSE_CONV_H_ + +#include + +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { + +namespace reference_ops { + +inline void TransposeConv( + const ConvParams& params, const RuntimeShape& input_shape, + const float* input_data, const RuntimeShape& filter_shape, + const float* filter_data, const RuntimeShape& bias_shape, + const float* bias_data, const RuntimeShape& output_shape, + float* output_data, const RuntimeShape& im2col_shape, float* im2col_data) { + const int stride_width = params.stride_width; + const int stride_height = params.stride_height; + const int pad_width = params.padding_values.width; + const int pad_height = params.padding_values.height; + TFLITE_DCHECK_EQ(input_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(filter_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(output_shape.DimensionsCount(), 4); + (void)im2col_data; // only used in optimized code. + (void)im2col_shape; // only used in optimized code. + + const int batches = MatchingDim(input_shape, 0, output_shape, 0); + const int input_depth = MatchingDim(input_shape, 3, filter_shape, 3); + const int output_depth = MatchingDim(filter_shape, 0, output_shape, 3); + const int input_height = input_shape.Dims(1); + const int input_width = input_shape.Dims(2); + const int filter_height = filter_shape.Dims(1); + const int filter_width = filter_shape.Dims(2); + const int output_height = output_shape.Dims(1); + const int output_width = output_shape.Dims(2); + const float output_activation_min = params.float_activation_min; + const float output_activation_max = params.float_activation_max; + if (bias_data) { + TFLITE_DCHECK_EQ(bias_shape.FlatSize(), output_depth); + } + + // Although transpose convolution simplifies to convolution with transposed + // weights for strides of 1, non-unitary striding complicates matters. To + // keep this reference implementation as clear as possible, we use a + // "scatter" access pattern, where we loop through all the input elements, + // computing their influence on the output, rather than looping through the + // output elements in the typical "gather" access pattern of a conv. We + // therefore must initialize the output array to zero. + const int num_elements = output_shape.FlatSize(); + for (int i = 0; i < num_elements; i++) { + output_data[i] = 0.0f; + } + + // Loop through input elements one at a time. + for (int batch = 0; batch < batches; ++batch) { + for (int in_y = 0; in_y < input_height; ++in_y) { + for (int in_x = 0; in_x < input_width; ++in_x) { + for (int in_channel = 0; in_channel < input_depth; ++in_channel) { + // Loop through the output elements it will influence + const int out_x_origin = (in_x * stride_width) - pad_width; + const int out_y_origin = (in_y * stride_height) - pad_height; + for (int filter_y = 0; filter_y < filter_height; ++filter_y) { + for (int filter_x = 0; filter_x < filter_width; ++filter_x) { + for (int out_channel = 0; out_channel < output_depth; + ++out_channel) { + // Compute output element location + const int out_x = out_x_origin + filter_x; + const int out_y = out_y_origin + filter_y; + // We cannot accumulate out of bounds + if ((out_x >= 0) && (out_x < output_width) && (out_y >= 0) && + (out_y < output_height)) { + float input_value = input_data[Offset( + input_shape, batch, in_y, in_x, in_channel)]; + float filter_value = + filter_data[Offset(filter_shape, out_channel, filter_y, + filter_x, in_channel)]; + output_data[Offset(output_shape, batch, out_y, out_x, + out_channel)] += + input_value * filter_value; + } + } + } + } + } + } + } + } + + for (int batch = 0; batch < batches; ++batch) { + for (int out_y = 0; out_y < output_height; ++out_y) { + for (int out_x = 0; out_x < output_width; ++out_x) { + for (int out_channel = 0; out_channel < output_depth; ++out_channel) { + float acc = output_data[Offset(output_shape, batch, out_y, out_x, + out_channel)]; + if (bias_data) acc += bias_data[out_channel]; + + output_data[Offset(output_shape, batch, out_y, out_x, out_channel)] = + ActivationFunctionWithMinMax(acc, output_activation_min, + output_activation_max); + } + } + } + } +} + +inline void TransposeConv( + const ConvParams& params, const RuntimeShape& input_shape, + const uint8_t* input_data, const RuntimeShape& filter_shape, + const uint8_t* filter_data, const RuntimeShape& bias_shape, + const int32_t* bias_data, const RuntimeShape& output_shape, + uint8_t* output_data, const RuntimeShape& im2col_shape, + uint8_t* im2col_data, int32_t* scratch_buffer) { + const int stride_width = params.stride_width; + const int stride_height = params.stride_height; + const int pad_width = params.padding_values.width; + const int pad_height = params.padding_values.height; + TFLITE_DCHECK_EQ(input_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(filter_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(output_shape.DimensionsCount(), 4); + (void)im2col_data; // only used in optimized code. + (void)im2col_shape; // only used in optimized code. + + const int batches = MatchingDim(input_shape, 0, output_shape, 0); + const int input_depth = MatchingDim(input_shape, 3, filter_shape, 3); + const int output_depth = MatchingDim(filter_shape, 0, output_shape, 3); + const int input_height = input_shape.Dims(1); + const int input_width = input_shape.Dims(2); + const int filter_height = filter_shape.Dims(1); + const int filter_width = filter_shape.Dims(2); + const int output_height = output_shape.Dims(1); + const int output_width = output_shape.Dims(2); + const int32_t input_offset = params.input_offset; + const int32_t filter_offset = params.weights_offset; + const int32_t output_offset = params.output_offset; + const int32_t output_multiplier = params.output_multiplier; + const int output_shift = params.output_shift; + const int32_t output_activation_min = params.quantized_activation_min; + const int32_t output_activation_max = params.quantized_activation_max; + TFLITE_DCHECK_LE(output_activation_min, output_activation_max); + if (bias_data) { + TFLITE_DCHECK_EQ(bias_shape.FlatSize(), output_depth); + } + + const int num_elements = output_shape.FlatSize(); + // We need to initialize scratch_buffer to all 0s, as we apply the same + // 'scatter' based trick as in float version. + memset(scratch_buffer, 0, num_elements * sizeof(int32_t)); + + // Loop through input elements one at a time. + for (int batch = 0; batch < batches; ++batch) { + for (int in_y = 0; in_y < input_height; ++in_y) { + for (int in_x = 0; in_x < input_width; ++in_x) { + for (int in_channel = 0; in_channel < input_depth; ++in_channel) { + // Loop through the output elements it will influence. + const int out_x_origin = (in_x * stride_width) - pad_width; + const int out_y_origin = (in_y * stride_height) - pad_height; + for (int filter_y = 0; filter_y < filter_height; ++filter_y) { + for (int filter_x = 0; filter_x < filter_width; ++filter_x) { + for (int out_channel = 0; out_channel < output_depth; + ++out_channel) { + // Compute output element location. + const int out_x = out_x_origin + filter_x; + const int out_y = out_y_origin + filter_y; + // We cannot accumulate out of bounds. + if ((out_x >= 0) && (out_x < output_width) && (out_y >= 0) && + (out_y < output_height)) { + uint8_t input_value = input_data[Offset( + input_shape, batch, in_y, in_x, in_channel)]; + uint8_t filter_value = + filter_data[Offset(filter_shape, out_channel, filter_y, + filter_x, in_channel)]; + scratch_buffer[Offset(output_shape, batch, out_y, out_x, + out_channel)] += + (input_value + input_offset) * + (filter_value + filter_offset); + } + } + } + } + } + } + } + } + for (int batch = 0; batch < batches; ++batch) { + for (int out_y = 0; out_y < output_height; ++out_y) { + for (int out_x = 0; out_x < output_width; ++out_x) { + for (int out_channel = 0; out_channel < output_depth; ++out_channel) { + int32_t acc = scratch_buffer[Offset(output_shape, batch, out_y, out_x, + out_channel)]; + if (bias_data) { + acc += bias_data[out_channel]; + } + int32_t scaled_acc = MultiplyByQuantizedMultiplier( + acc, output_multiplier, output_shift); + scaled_acc += output_offset; + scaled_acc = std::max(scaled_acc, output_activation_min); + scaled_acc = std::min(scaled_acc, output_activation_max); + output_data[Offset(output_shape, batch, out_y, out_x, out_channel)] = + static_cast(scaled_acc); + } + } + } + } +} + +} // namespace reference_ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_TRANSPOSE_CONV_H_ diff --git a/tensorflow/lite/kernels/internal/runtime_shape.h b/tensorflow/lite/kernels/internal/runtime_shape.h new file mode 100644 index 0000000..0e4df2c --- /dev/null +++ b/tensorflow/lite/kernels/internal/runtime_shape.h @@ -0,0 +1,166 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_RUNTIME_SHAPE_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_RUNTIME_SHAPE_H_ + +#include "tensorflow/lite/kernels/internal/compatibility.h" + +namespace tflite { + +template +struct Dims { + int sizes[N]; + int strides[N]; +}; + +class RuntimeShape { + public: + RuntimeShape& operator=(RuntimeShape const&) = delete; + + // RuntimeShape in TFLM supports up to 6 dimensions. + // The name kMaxSmallSize comes from the same file of the upstream + // tensorflow lite repo and need to be kept the same for max reuse. + static constexpr int kMaxSmallSize = 6; + + RuntimeShape() : size_(0) {} + + explicit RuntimeShape(int dimensions_count) : size_(dimensions_count) { + TFLITE_DCHECK_LE(dimensions_count, kMaxSmallSize); + } + + RuntimeShape(int shape_size, int32_t value) : size_(shape_size) { + TFLITE_DCHECK_LE(shape_size, kMaxSmallSize); + for (int i = 0; i < shape_size; ++i) { + SetDim(i, value); + } + } + + RuntimeShape(int dimensions_count, const int32_t* dims_data) + : size_(dimensions_count) { + // check of dimensions_count handled by ReplaceWith() + ReplaceWith(dimensions_count, dims_data); + } + + bool operator==(const RuntimeShape& comp) const { + return this->size_ == comp.size_ && + std::memcmp(DimsData(), comp.DimsData(), size_ * sizeof(int32_t)) == + 0; + } + + ~RuntimeShape() {} + + int32_t DimensionsCount() const { return size_; } + int32_t Dims(int i) const { + TFLITE_DCHECK_GE(i, 0); + TFLITE_DCHECK_LT(i, size_); + return dims_[i]; + } + void SetDim(int i, int32_t val) { + TFLITE_DCHECK_GE(i, 0); + TFLITE_DCHECK_LT(i, size_); + dims_[i] = val; + } + + static RuntimeShape ExtendedShape(int new_shape_size, + const RuntimeShape& shape) { + TFLITE_DCHECK_LE(new_shape_size, kMaxSmallSize); + return RuntimeShape(new_shape_size, shape, 1); + } + int32_t* DimsData() { return dims_; } + const int32_t* DimsData() const { return dims_; } + const int32_t* DimsDataUpTo5D() const { return dims_; } + + void ReplaceWith(int dimensions_count, const int32_t* dims_data) { + TFLITE_DCHECK_LE(dimensions_count, kMaxSmallSize); + size_ = dimensions_count; + int32_t* dst_dims = DimsData(); + std::memcpy(dst_dims, dims_data, dimensions_count * sizeof(int32_t)); + } + + // Returns the total count of elements, that is the size when flattened into a + // vector. + int FlatSize() const { + int buffer_size = 1; + const int* dims_data = reinterpret_cast(DimsData()); + for (int i = 0; i < size_; i++) { + buffer_size *= dims_data[i]; + } + return buffer_size; + } + + private: + // For use only by ExtendedShape(), written to guarantee (return-value) copy + // elision in C++17. + // This creates a shape padded to the desired size with the specified value. + RuntimeShape(int new_shape_size, const RuntimeShape& shape, int pad_value) + : size_(new_shape_size) { + // If the following check fails, it is likely because a 4D-only kernel is + // being used with an array of larger dimension count. + TFLITE_CHECK_GE(new_shape_size, shape.DimensionsCount()); + const int size_increase = new_shape_size - shape.DimensionsCount(); + for (int i = 0; i < size_increase; ++i) { + SetDim(i, pad_value); + } + std::memcpy(DimsData() + size_increase, shape.DimsData(), + sizeof(int32_t) * shape.DimensionsCount()); + } + + int32_t size_; + union { + int32_t dims_[kMaxSmallSize]; + }; +}; + +// Since tensors with '0' in their shape are valid in TF, these offset functions +// allow that as long as the corresponding index is also 0. It is upto the +// calling ops to ensure that they perform verification checks on tensor shapes +// if they don't support a particular behavior. + +inline int Offset(const RuntimeShape& shape, int i0, int i1, int i2, int i3) { + TFLITE_DCHECK_EQ(shape.DimensionsCount(), 4); + const int* dims_data = reinterpret_cast(shape.DimsData()); + TFLITE_DCHECK((dims_data[0] == 0 && i0 == 0) || + (i0 >= 0 && i0 < dims_data[0])); + TFLITE_DCHECK((dims_data[1] == 0 && i1 == 0) || + (i1 >= 0 && i1 < dims_data[1])); + TFLITE_DCHECK((dims_data[2] == 0 && i2 == 0) || + (i2 >= 0 && i2 < dims_data[2])); + TFLITE_DCHECK((dims_data[3] == 0 && i3 == 0) || + (i3 >= 0 && i3 < dims_data[3])); + return ((i0 * dims_data[1] + i1) * dims_data[2] + i2) * dims_data[3] + i3; +} + +inline int Offset(const RuntimeShape& shape, int i0, int i1, int i2, int i3, + int i4) { + TFLITE_DCHECK_EQ(shape.DimensionsCount(), 5); + const int* dims_data = reinterpret_cast(shape.DimsData()); + TFLITE_DCHECK((dims_data[0] == 0 && i0 == 0) || + (i0 >= 0 && i0 < dims_data[0])); + TFLITE_DCHECK((dims_data[1] == 0 && i1 == 0) || + (i1 >= 0 && i1 < dims_data[1])); + TFLITE_DCHECK((dims_data[2] == 0 && i2 == 0) || + (i2 >= 0 && i2 < dims_data[2])); + TFLITE_DCHECK((dims_data[3] == 0 && i3 == 0) || + (i3 >= 0 && i3 < dims_data[3])); + TFLITE_DCHECK((dims_data[4] == 0 && i4 == 0) || + (i4 >= 0 && i4 < dims_data[4])); + return (((i0 * dims_data[1] + i1) * dims_data[2] + i2) * dims_data[3] + i3) * + dims_data[4] + + i4; +} + +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_RUNTIME_SHAPE_H_ diff --git a/tensorflow/lite/kernels/internal/strided_slice_logic.h b/tensorflow/lite/kernels/internal/strided_slice_logic.h new file mode 100644 index 0000000..449cac0 --- /dev/null +++ b/tensorflow/lite/kernels/internal/strided_slice_logic.h @@ -0,0 +1,278 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_STRIDED_SLICE_LOGIC_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_STRIDED_SLICE_LOGIC_H_ + +#include +#include + +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { +namespace strided_slice { + +// Use until std::clamp() is available from C++17. +inline int Clamp(const int v, const int lo, const int hi) { + TFLITE_DCHECK(!(hi < lo)); + if (hi < v) return hi; + if (v < lo) return lo; + return v; +} + +inline void StridedSlicePadIndices(tflite::StridedSliceParams* p, + int dim_count) { + // Add indices and mask bits to fully include extra dimensions + TFLITE_CHECK_LE(dim_count, 5); + TFLITE_CHECK_GE(dim_count, p->start_indices_count); + TFLITE_CHECK_EQ(p->start_indices_count, p->stop_indices_count); + TFLITE_CHECK_EQ(p->stop_indices_count, p->strides_count); + + const int pad_count = dim_count - p->start_indices_count; + + // Pad indices at start, so move arrays by pad_count. + for (int i = p->start_indices_count - 1; i >= 0; --i) { + p->strides[i + pad_count] = p->strides[i]; + p->start_indices[i + pad_count] = p->start_indices[i]; + p->stop_indices[i + pad_count] = p->stop_indices[i]; + } + for (int i = 0; i < pad_count; ++i) { + p->start_indices[i] = 0; + p->stop_indices[i] = 1; + p->strides[i] = 1; + } + + // Pad masks with 0s or 1s as required. + p->shrink_axis_mask <<= pad_count; + p->ellipsis_mask <<= pad_count; + p->new_axis_mask <<= pad_count; + p->begin_mask <<= pad_count; + p->end_mask <<= pad_count; + p->begin_mask |= (1 << pad_count) - 1; + p->end_mask |= (1 << pad_count) - 1; + + p->start_indices_count = dim_count; + p->stop_indices_count = dim_count; + p->strides_count = dim_count; +} + +// Return the index for the first element along that axis. This index will be a +// positive integer between [0, axis_size] (or [-1, axis_size -1] if stride < 0) +// that can be used to index directly into the data. +inline int StridedSliceStartForAxis(const tflite::StridedSliceParams& params, + const RuntimeShape& input_shape, + int32_t axis) { + const int32_t axis_size = input_shape.Dims(axis); + int32_t start = params.start_indices[axis]; + const int32_t stride = params.strides[axis]; + const int32_t begin_mask = (params.begin_mask & 1 << axis); + if (start < 0) { + start += axis_size; + } + if (stride > 0) { + start = Clamp(start, 0, axis_size); + } else { + start = Clamp(start, -1, axis_size - 1); + } + if (begin_mask) { + if (stride > 0) { + start = 0; + } else { + start = axis_size - 1; + } + } + return start; +} + +inline int StridedSliceEndForAxis(const tflite::StridedSliceParams& params, + const RuntimeShape& input_shape, int axis, + int start) { + const auto shrink_axis_mask = params.shrink_axis_mask; + const bool shrink_axis = shrink_axis_mask & (1 << axis); + const int axis_size = input_shape.Dims(axis); + const bool offset = params.offset; + if (shrink_axis) { + if (start >= axis_size) { + return start; + } else { + return start + 1; + } + } + const auto* indices = params.stop_indices; + int end = indices[axis]; + if (offset) { + end += start; + } + const int32_t stride = params.strides[axis]; + const int32_t end_mask = (params.end_mask & 1 << axis); + if (end < 0) { + end += axis_size; + } + if (stride > 0) { + end = Clamp(end, 0, axis_size); + } else { + end = Clamp(end, -1, axis_size - 1); + } + if (end_mask) { + if (stride > 0) { + end = axis_size; + } else { + end = -1; + } + } + return end; +} + +// Return the index for the first element along that axis. This index will be a +// positive integer between [0, axis_size] (or [-1, axis_size -1] if stride < 0) +// that can be used to index directly into the data. +inline int StartForAxis(const tflite::StridedSliceParams& params, + const RuntimeShape& input_shape, int axis) { + const auto begin_mask = params.begin_mask; + const auto* start_indices = params.start_indices; + const auto* strides = params.strides; + const int axis_size = input_shape.Dims(axis); + if (axis_size == 0) { + return 0; + } + // Begin with the specified index. + int start = start_indices[axis]; + + // begin_mask override + if (begin_mask & 1 << axis) { + if (strides[axis] > 0) { + // Forward iteration - use the first element. These values will get + // clamped below (Note: We could have set them to 0 and axis_size-1, but + // use lowest() and max() to maintain symmetry with StopForAxis()) + start = std::numeric_limits::lowest(); + } else { + // Backward iteration - use the last element. + start = std::numeric_limits::max(); + } + } + + // Handle negative indices + if (start < 0) { + start += axis_size; + } + + // Clamping + if (strides[axis] > 0) { + // Forward iteration + start = Clamp(start, 0, axis_size); + } else { + // Backward iteration + start = Clamp(start, -1, axis_size - 1); + } + + return start; +} + +// Return the "real" index for the end of iteration along that axis. This is an +// "end" in the traditional C sense, in that it points to one past the last +// element. ie. So if you were iterating through all elements of a 1D array of +// size 4, this function would return 4 as the stop, because it is one past the +// "real" indices of 0, 1, 2 & 3. +inline int StopForAxis(const tflite::StridedSliceParams& params, + const RuntimeShape& input_shape, int axis, + int start_for_axis) { + const auto end_mask = params.end_mask; + const auto shrink_axis_mask = params.shrink_axis_mask; + const auto* stop_indices = params.stop_indices; + const auto* strides = params.strides; + const int axis_size = input_shape.Dims(axis); + if (axis_size == 0) { + return 0; + } + + // Begin with the specified index + const bool shrink_axis = shrink_axis_mask & (1 << axis); + int stop = stop_indices[axis]; + + // When shrinking an axis, the end position does not matter (and can be + // incorrect when negative indexing is used, see Issue #19260). Always use + // start_for_axis + 1 to generate a length 1 slice, since start_for_axis has + // already been adjusted for negative indices. + if (shrink_axis) { + return start_for_axis + 1; + } + + // end_mask override + if (end_mask & (1 << axis)) { + if (strides[axis] > 0) { + // Forward iteration - use the last element. These values will get + // clamped below + stop = std::numeric_limits::max(); + } else { + // Backward iteration - use the first element. + stop = std::numeric_limits::lowest(); + } + } + + // Handle negative indices + if (stop < 0) { + stop += axis_size; + } + + // Clamping + // Because the end index points one past the last element, we need slightly + // different clamping ranges depending on the direction. + if (strides[axis] > 0) { + // Forward iteration + stop = Clamp(stop, 0, axis_size); + } else { + // Backward iteration + stop = Clamp(stop, -1, axis_size - 1); + } + + return stop; +} + +inline bool LoopCondition(int index, int stop, int stride) { + // True when we have reached the end of an axis and should loop. + return stride > 0 ? index >= stop : index <= stop; +} + +inline tflite::StridedSliceParams BuildStridedSliceParams( + int begin_mask, int end_mask, int shrink_axis_mask, + const std::vector& start_indices, const std::vector& stop_indices, + const std::vector& strides) { + tflite::StridedSliceParams op_params{}; + const int dims_count = start_indices.size(); + + op_params.start_indices_count = dims_count; + op_params.stop_indices_count = dims_count; + op_params.strides_count = dims_count; + for (int i = 0; i < dims_count; ++i) { + op_params.start_indices[i] = start_indices[i]; + op_params.stop_indices[i] = stop_indices[i]; + op_params.strides[i] = strides[i]; + } + + op_params.begin_mask = begin_mask; + op_params.ellipsis_mask = 0; + op_params.end_mask = end_mask; + op_params.new_axis_mask = 0; + op_params.shrink_axis_mask = shrink_axis_mask; + + return op_params; +} + +} // namespace strided_slice + +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_STRIDED_SLICE_LOGIC_H_ diff --git a/tensorflow/lite/kernels/internal/tensor_ctypes.cc b/tensorflow/lite/kernels/internal/tensor_ctypes.cc new file mode 100644 index 0000000..6bd58fc --- /dev/null +++ b/tensorflow/lite/kernels/internal/tensor_ctypes.cc @@ -0,0 +1,37 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" + +#include + +namespace tflite { + +RuntimeShape GetTensorShape(const TfLiteTensor* tensor) { + if (tensor == nullptr) { + return RuntimeShape(); + } + + TfLiteIntArray* dims = tensor->dims; + const int dims_size = dims->size; + const int32_t* dims_data = reinterpret_cast(dims->data); + return RuntimeShape(dims_size, dims_data); +} + +RuntimeShape GetTensorShape(std::vector data) { + return RuntimeShape(data.size(), data.data()); +} + +} // namespace tflite diff --git a/tensorflow/lite/kernels/internal/tensor_ctypes.h b/tensorflow/lite/kernels/internal/tensor_ctypes.h new file mode 100644 index 0000000..9a7205c --- /dev/null +++ b/tensorflow/lite/kernels/internal/tensor_ctypes.h @@ -0,0 +1,42 @@ +/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_TENSOR_CTYPES_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_TENSOR_CTYPES_H_ + +#include + +#include "tensorflow/lite/core/c/common.h" +#include "tensorflow/lite/core/macros.h" +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { + +template +inline T* GetTensorData(TfLiteTensor* tensor) { + return tensor != nullptr ? reinterpret_cast(tensor->data.raw) : nullptr; +} + +template +inline const T* GetTensorData(const TfLiteTensor* tensor) { + return tensor != nullptr ? reinterpret_cast(tensor->data.raw) + : nullptr; +} + +TFLITE_NOINLINE RuntimeShape GetTensorShape(const TfLiteTensor* tensor); +RuntimeShape GetTensorShape(std::vector data); + +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_TENSOR_CTYPES_H_ diff --git a/tensorflow/lite/kernels/internal/tensor_utils.cc b/tensorflow/lite/kernels/internal/tensor_utils.cc new file mode 100644 index 0000000..7e5d981 --- /dev/null +++ b/tensorflow/lite/kernels/internal/tensor_utils.cc @@ -0,0 +1,25 @@ +/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +============================================================================== +*/ + +// internal/reference/portable_tensor_utils.h has the implementation of the +// functions declared in internal/portable_tensor_utils.h. This somewhat +// confusing setup is derived from how the code is organized in TfLite where it +// is used to select between NEON, SSE and portable implementaitons. See +// https://github.com/tensorflow/tensorflow/blob/d76c23975c4a3a0d7987cfe3f45c76566df06180/tensorflow/lite/kernels/internal/tensor_utils.cc +// for how the code is written in TfLite. + +#include "tensorflow/lite/kernels/internal/portable_tensor_utils.h" +#include "tensorflow/lite/kernels/internal/reference/portable_tensor_utils.h" diff --git a/tensorflow/lite/kernels/internal/types.h b/tensorflow/lite/kernels/internal/types.h new file mode 100644 index 0000000..b775ca8 --- /dev/null +++ b/tensorflow/lite/kernels/internal/types.h @@ -0,0 +1,1095 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_TYPES_H_ +#define TENSORFLOW_LITE_KERNELS_INTERNAL_TYPES_H_ + +#include +#include +#include +#include + +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/kernels/internal/runtime_shape.h" + +namespace tflite { + +enum class FusedActivationFunctionType : uint8_t { + kNone, + kRelu6, + kRelu1, + kRelu +}; +enum class PaddingType : uint8_t { kNone, kSame, kValid }; + +struct PaddingValues { + int16_t width; + int16_t height; + // offset is used for calculating "remaining" padding, for example, `width` + // is 1 and `width_offset` is 1, so padding_left is 1 while padding_right is + // 1 + 1 = 2. + int16_t width_offset; + // Same as width_offset except it's over the height dimension. + int16_t height_offset; +}; + +struct Padding3DValues { + int16_t width; + int16_t height; + int16_t depth; + // offset is used for calculating "remaining" padding, for example, `width` + // is 1 and `width_offset` is 1, so padding_left is 1 while padding_right is + // 1 + 1 = 2. + int16_t width_offset; + // Same as width_offset except it's over the height dimension. + int16_t height_offset; + // Same as width_offset except it's over the depth dimension. + int16_t depth_offset; +}; + +// This enumeration allows for non-default formats for the weights array +// of a fully-connected operator, allowing the use of special optimized +// runtime paths. +enum class FullyConnectedWeightsFormat : uint8_t { + // Default format (flat 2D layout, the inner contiguous dimension + // is input_depth, the outer non-contiguous dimension is output_depth) + kDefault, + // Summary: optimized layout for fast CPU runtime implementation, + // aimed specifically at ARM CPUs at the moment, and specialized for + // 8-bit quantized layers. + // + // The use case we're concerned with here is: 8-bit quantization, + // large weights matrix that doesn't fit in cache (e.g. 4096x2048 in + // a key application that drove this), very small batch size (e.g. 1 -- 4). + // + // Even with 8-bit quantization of weights, the performance of memory + // accesses to the weights can become the dominant issue when + // the batch size is small, so each weight value is used in only a few + // arithmetic ops, i.e. the fully-connected node has a low arithmetic + // intensity. The specific issues that arise are of three kinds: + // (1) One may, ideally, max out DRAM bandwidth, i.e. be truly memory + // bound. That's the "good" issue to run into. + // (2) One may run into sub-optimal pre-fetching: the data hasn't been + // prefetched into the cache by the time we need it. + // (3) One may run into cache aliasing: multiple values that are + // pre-fetched, alias each other in the L1 cache (which typically + // has only 4-way set associativity in ARM CPUs) and thus evict + // each other before we get to using them. + // + // The point of this shuffling is to avoid issues (2) and (3) so that + // we get as fast as possible given only the hard constraint (1). + // This is achieved by turning the difficulty into a solution: the + // difficulty, that each value loaded from memory is used only in + // one kernel iteration, making this operation memory-intensive, hints at + // the solution, of shuffling the weights so that they are stored in the + // exact order as the kernel needs to load them, so that the memory + // accesses made by the kernel are trivial. This solves (2) because the + // trivial memory access pattern allows the CPU's automatic prefetching + // to perform very well (no need even for preload instructions), and this + // solves (3) because the values being loaded concurrently are now + // contiguous in the address space, thus don't alias each other in the cache. + // + // On ARM, we typically want our kernel to process a 4x16 block of weights + // at a time, because: + // - 16 is the number of bytes in a NEON register. + // - 4 is how many rows we need to handle concurrently in the kernel in + // order to have sufficient mutual independence of instructions to + // maximize arithmetic throughput. + // + // Finally, the 'Int8' part in the name refers to the fact that this + // weights format has each weights value encoded as a signed int8_t value, + // even if the data type of the weights buffer is uint8_t. This is intended + // to save runtime kernels the effort to have to XOR the top bit of these + // bytes before using them in signed arithmetic, see this file for more + // explanations on the 'signed int8_t trick' in matrix multiplication kernels: + // + // tensorflow/lite/toco/graph_transformations/ensure_uint8_weights_safe_for_fast_int8_kernels.cc + // + kShuffled4x16Int8, +}; + +// Quantization parameters, determining the mapping of quantized values +// to real values (i.e. determining how quantized values are mathematically +// interpreted). +// +// The correspondence is as follows: +// +// real_value = scale * (quantized_value - zero_point); +// +// In other words, zero_point designates which quantized value corresponds to +// the real 0 value, and scale designates the difference between the real values +// corresponding to consecutive quantized values differing by 1. +struct QuantizationParams { + int32_t zero_point = 0; + double scale = 0.0; +}; + +inline bool operator==(const QuantizationParams& qp1, + const QuantizationParams& qp2) { + return qp1.zero_point == qp2.zero_point && qp1.scale == qp2.scale; +} + +// Quantization parameters for each channel, determining the mapping of +// quantized values to real values. See QuantizationParams for a single set of +// parameters per tensor. This has one parameters set per each channel. +// +// The correspondence is as follows: +// +// real_value = scale[channel] * (quantized_value - zero_point[channel]); +// +struct PerChannelQuantizationParams { + // The following members typically point to the corresponding members of a + // TfLiteAffineQuantization struct. + const float* scale; + const int32_t* zero_point; + int32_t quantized_dimension; +}; + +// Gets next index to iterate through a multidimensional array. +inline bool NextIndex(const int num_dims, const int* dims, int* current) { + if (num_dims == 0) { + return false; + } + TFLITE_DCHECK(dims != nullptr); + TFLITE_DCHECK(current != nullptr); + int carry = 1; + for (int idx = num_dims - 1; idx >= 0; --idx) { + int current_val = current[idx] + carry; + TFLITE_DCHECK_GE(dims[idx], current_val); + if (dims[idx] == current_val) { + current[idx] = 0; + } else { + current[idx] = current_val; + carry = 0; + break; + } + } + return (carry == 0); +} + +// Gets offset of index if reducing on axis. When reducing, the flattened offset +// will not change, if the input index changes on the given axis. For example, +// if you have a 3D tensor and you are reducing to 2D by eliminating axis 0, +// then index (0, 1, 2) and index (1, 1, 2) will map to the same flattened +// offset. +// TODO(kanlig): uses Dims to represent dimensions. +inline size_t ReducedOutputOffset(const int num_dims, const int* dims, + const int* index, const int num_axis, + const int* axis) { + if (num_dims == 0) { + return 0; + } + TFLITE_DCHECK(dims != nullptr); + TFLITE_DCHECK(index != nullptr); + size_t offset = 0; + for (int idx = 0; idx < num_dims; ++idx) { + // if we need to skip this axis + bool is_axis = false; + if (axis != nullptr) { + for (int axis_idx = 0; axis_idx < num_axis; ++axis_idx) { + if (idx == axis[axis_idx]) { + is_axis = true; + break; + } + } + } + if (!is_axis) { + offset = offset * static_cast(dims[idx]) + + static_cast(index[idx]); + } + } + return offset; +} + +// Since tensors with '0' in their shape are valid in TF, these offset functions +// allow that as long as the corresponding index is also 0. It is upto the +// calling ops to ensure that they perform verification checks on tensor shapes +// if they don't support a particular behavior. + +inline int Offset(const Dims<4>& dims, int i0, int i1, int i2, int i3) { + TFLITE_DCHECK((i0 == 0 && dims.sizes[0] == 0) || + (i0 >= 0 && i0 < dims.sizes[0])); + TFLITE_DCHECK((i1 == 0 && dims.sizes[1] == 0) || + (i1 >= 0 && i1 < dims.sizes[1])); + TFLITE_DCHECK((i2 == 0 && dims.sizes[2] == 0) || + (i2 >= 0 && i2 < dims.sizes[2])); + TFLITE_DCHECK((i3 == 0 && dims.sizes[3] == 0) || + (i3 >= 0 && i3 < dims.sizes[3])); + return i0 * dims.strides[0] + i1 * dims.strides[1] + i2 * dims.strides[2] + + i3 * dims.strides[3]; +} + +inline int Offset(const Dims<4>& dims, int* index) { + return Offset(dims, index[0], index[1], index[2], index[3]); +} + +// Get array size, DCHECKing that the dim index is in range. +// +// Note that this will be phased out with Dims<4>, since RuntimeShape::Dims() +// already performs this check. +template +int ArraySize(const Dims& array, int index) { + TFLITE_DCHECK(index >= 0 && index < N); + return array.sizes[index]; +} + +// Get common array size, DCHECKing that they all agree. +template +int MatchingArraySize(const ArrayType1& array1, int index1, + const ArrayType2& array2, int index2) { + TFLITE_DCHECK_EQ(ArraySize(array1, index1), ArraySize(array2, index2)); + return ArraySize(array1, index1); +} + +template +int MatchingArraySize(const ArrayType1& array1, int index1, + const ArrayType2& array2, int index2, Args... args) { + TFLITE_DCHECK_EQ(ArraySize(array1, index1), ArraySize(array2, index2)); + return MatchingArraySize(array1, index1, args...); +} + +// Get common shape dim, DCHECKing that they all agree. +inline int MatchingDim(const RuntimeShape& shape1, int index1, + const RuntimeShape& shape2, int index2) { + TFLITE_DCHECK_EQ(shape1.Dims(index1), shape2.Dims(index2)); + return std::min(shape1.Dims(index1), shape2.Dims(index2)); +} + +template +int MatchingDim(const RuntimeShape& shape1, int index1, + const RuntimeShape& shape2, int index2, Args... args) { + TFLITE_DCHECK_EQ(shape1.Dims(index1), shape2.Dims(index2)); + return MatchingDim(shape1, index1, args...); +} + +// Will be phased out with Dims<4>, replaced by RuntimeShape::FlatSize(). +template +inline int FlatSize(const Dims& dims) { + int flat_size = 1; + for (int i = 0; i < N; ++i) { + flat_size *= dims.sizes[i]; + } + return flat_size; +} + +TFLITE_DEPRECATED("Prefer FlatSize.") +inline int RequiredBufferSizeForDims(const Dims<4>& dims) { + return FlatSize(dims); +} + +inline int MatchingElementsSize(const RuntimeShape& shape, + const RuntimeShape& check_shape_0) { + const int size_1 = shape.FlatSize(); + const int size_2 = check_shape_0.FlatSize(); + TFLITE_CHECK_EQ(size_1, size_2); + return size_1; +} + +inline int MatchingElementsSize(const RuntimeShape& shape, + const RuntimeShape& check_shape_0, + const RuntimeShape& check_shape_1) { + const int size_1 = shape.FlatSize(); + const int size_2 = check_shape_0.FlatSize(); + const int size_3 = check_shape_1.FlatSize(); + TFLITE_CHECK_EQ(size_1, size_2); + TFLITE_CHECK_EQ(size_2, size_3); + return size_1; +} + +// Flat size calculation, checking that dimensions match with one or more other +// arrays. +inline int MatchingFlatSize(const RuntimeShape& shape, + const RuntimeShape& check_shape_0) { + TFLITE_DCHECK_EQ(shape.DimensionsCount(), check_shape_0.DimensionsCount()); + const int dims_count = shape.DimensionsCount(); + for (int i = 0; i < dims_count; ++i) { + TFLITE_DCHECK_EQ(shape.Dims(i), check_shape_0.Dims(i)); + } + return shape.FlatSize(); +} + +inline int MatchingFlatSize(const RuntimeShape& shape, + const RuntimeShape& check_shape_0, + const RuntimeShape& check_shape_1) { + TFLITE_DCHECK_EQ(shape.DimensionsCount(), check_shape_0.DimensionsCount()); + const int dims_count = shape.DimensionsCount(); + for (int i = 0; i < dims_count; ++i) { + TFLITE_DCHECK_EQ(shape.Dims(i), check_shape_0.Dims(i)); + } + return MatchingFlatSize(shape, check_shape_1); +} + +inline int MatchingFlatSize(const RuntimeShape& shape, + const RuntimeShape& check_shape_0, + const RuntimeShape& check_shape_1, + const RuntimeShape& check_shape_2) { + TFLITE_DCHECK_EQ(shape.DimensionsCount(), check_shape_0.DimensionsCount()); + const int dims_count = shape.DimensionsCount(); + for (int i = 0; i < dims_count; ++i) { + TFLITE_DCHECK_EQ(shape.Dims(i), check_shape_0.Dims(i)); + } + return MatchingFlatSize(shape, check_shape_1, check_shape_2); +} + +inline int MatchingFlatSize(const RuntimeShape& shape, + const RuntimeShape& check_shape_0, + const RuntimeShape& check_shape_1, + const RuntimeShape& check_shape_2, + const RuntimeShape& check_shape_3) { + TFLITE_DCHECK_EQ(shape.DimensionsCount(), check_shape_0.DimensionsCount()); + const int dims_count = shape.DimensionsCount(); + for (int i = 0; i < dims_count; ++i) { + TFLITE_DCHECK_EQ(shape.Dims(i), check_shape_0.Dims(i)); + } + return MatchingFlatSize(shape, check_shape_1, check_shape_2, check_shape_3); +} + +// Flat size calculation, checking that dimensions match with one or more other +// arrays. +template +inline int MatchingFlatSize(const Dims& dims, const Dims& check_dims_0) { + for (int i = 0; i < N; ++i) { + TFLITE_DCHECK_EQ(ArraySize(dims, i), ArraySize(check_dims_0, i)); + } + return FlatSize(dims); +} + +template +inline int MatchingFlatSize(const Dims& dims, const Dims& check_dims_0, + const Dims& check_dims_1) { + for (int i = 0; i < N; ++i) { + TFLITE_DCHECK_EQ(ArraySize(dims, i), ArraySize(check_dims_0, i)); + } + return MatchingFlatSize(dims, check_dims_1); +} + +template +inline int MatchingFlatSize(const Dims& dims, const Dims& check_dims_0, + const Dims& check_dims_1, + const Dims& check_dims_2) { + for (int i = 0; i < N; ++i) { + TFLITE_DCHECK_EQ(ArraySize(dims, i), ArraySize(check_dims_0, i)); + } + return MatchingFlatSize(dims, check_dims_1, check_dims_2); +} + +template +inline int MatchingFlatSize(const Dims& dims, const Dims& check_dims_0, + const Dims& check_dims_1, + const Dims& check_dims_2, + const Dims& check_dims_3) { + for (int i = 0; i < N; ++i) { + TFLITE_DCHECK_EQ(ArraySize(dims, i), ArraySize(check_dims_0, i)); + } + return MatchingFlatSize(dims, check_dims_1, check_dims_2, check_dims_3); +} + +// Flat size calculation, checking if their extended shapes match. +inline int MatchingExtendedShapeFlatSize(const RuntimeShape& shape, + const RuntimeShape& check_shape_0) { + const int shape_dims = shape.DimensionsCount(); + const int check_shape_0_dims = check_shape_0.DimensionsCount(); + const int min_dims = std::min(shape_dims, check_shape_0_dims); + + for (int i = 0; i < min_dims; ++i) { + TFLITE_DCHECK_EQ(shape.Dims(shape_dims - 1 - i), + check_shape_0.Dims(check_shape_0_dims - 1 - i)); + } + for (int i = min_dims; i < shape_dims; ++i) { + TFLITE_DCHECK_EQ(shape.Dims(shape_dims - 1 - i), 1); + } + for (int i = min_dims; i < check_shape_0_dims; ++i) { + TFLITE_DCHECK_EQ(check_shape_0.Dims(check_shape_0_dims - 1 - i), 1); + } + return shape.FlatSize(); +} + +inline int MatchingExtendedShapeFlatSize(const RuntimeShape& shape, + const RuntimeShape& check_shape_0, + const RuntimeShape& check_shape_1) { + const int flat_size = MatchingExtendedShapeFlatSize(shape, check_shape_0); + TFLITE_DCHECK_EQ(MatchingExtendedShapeFlatSize(shape, check_shape_1), + flat_size); + return flat_size; +} + +inline int MatchingExtendedShapeFlatSize(const RuntimeShape& shape, + const RuntimeShape& check_shape_0, + const RuntimeShape& check_shape_1, + const RuntimeShape& check_shape_2) { + const int flat_size = MatchingExtendedShapeFlatSize(shape, check_shape_0); + TFLITE_DCHECK_EQ( + MatchingExtendedShapeFlatSize(shape, check_shape_1, check_shape_2), + flat_size); + return flat_size; +} + +inline int MatchingExtendedShapeFlatSize(const RuntimeShape& shape, + const RuntimeShape& check_shape_0, + const RuntimeShape& check_shape_1, + const RuntimeShape& check_shape_2, + const RuntimeShape& check_shape_3) { + const int flat_size = MatchingExtendedShapeFlatSize(shape, check_shape_0); + TFLITE_DCHECK_EQ(MatchingExtendedShapeFlatSize(shape, check_shape_1, + check_shape_2, check_shape_3), + flat_size); + return flat_size; +} + +// Data is required to be contiguous, and so many operators can use either the +// full array flat size or the flat size with one dimension skipped (commonly +// the depth). +template +inline int FlatSizeSkipDim(const Dims& dims, int skip_dim) { + TFLITE_DCHECK(skip_dim >= 0 && skip_dim < N); + int flat_size = 1; + for (int i = 0; i < N; ++i) { + flat_size *= (i == skip_dim) ? 1 : dims.sizes[i]; + } + return flat_size; +} + +// A combination of MatchingFlatSize() and FlatSizeSkipDim(). +template +inline int MatchingFlatSizeSkipDim(const Dims& dims, int skip_dim, + const Dims& check_dims_0) { + for (int i = 0; i < N; ++i) { + if (i != skip_dim) { + TFLITE_DCHECK_EQ(ArraySize(dims, i), ArraySize(check_dims_0, i)); + } + } + return FlatSizeSkipDim(dims, skip_dim); +} + +template +inline int MatchingFlatSizeSkipDim(const Dims& dims, int skip_dim, + const Dims& check_dims_0, + const Dims& check_dims_1) { + for (int i = 0; i < N; ++i) { + if (i != skip_dim) { + TFLITE_DCHECK_EQ(ArraySize(dims, i), ArraySize(check_dims_0, i)); + } + } + return MatchingFlatSizeSkipDim(dims, skip_dim, check_dims_1); +} + +template +inline int MatchingFlatSizeSkipDim(const Dims& dims, int skip_dim, + const Dims& check_dims_0, + const Dims& check_dims_1, + const Dims& check_dims_2) { + for (int i = 0; i < N; ++i) { + if (i != skip_dim) { + TFLITE_DCHECK_EQ(ArraySize(dims, i), ArraySize(check_dims_0, i)); + } + } + return MatchingFlatSizeSkipDim(dims, skip_dim, check_dims_1, check_dims_2); +} + +template +inline int MatchingFlatSizeSkipDim(const Dims& dims, int skip_dim, + const Dims& check_dims_0, + const Dims& check_dims_1, + const Dims& check_dims_2, + const Dims& check_dims_3) { + for (int i = 0; i < N; ++i) { + if (i != skip_dim) { + TFLITE_DCHECK_EQ(ArraySize(dims, i), ArraySize(check_dims_0, i)); + } + } + return MatchingFlatSizeSkipDim(dims, skip_dim, check_dims_1, check_dims_2, + check_dims_3); +} + +// Data is required to be contiguous, and so many operators can use either the +// full array flat size or the flat size with one dimension skipped (commonly +// the depth). +inline int FlatSizeSkipDim(const RuntimeShape& shape, int skip_dim) { + const int dims_count = shape.DimensionsCount(); + TFLITE_DCHECK(skip_dim >= 0 && skip_dim < dims_count); + const auto* dims_data = shape.DimsData(); + int flat_size = 1; + for (int i = 0; i < dims_count; ++i) { + flat_size *= (i == skip_dim) ? 1 : dims_data[i]; + } + return flat_size; +} + +// A combination of MatchingFlatSize() and FlatSizeSkipDim(). +inline int MatchingFlatSizeSkipDim(const RuntimeShape& shape, int skip_dim, + const RuntimeShape& check_shape_0) { + const int dims_count = shape.DimensionsCount(); + for (int i = 0; i < dims_count; ++i) { + if (i != skip_dim) { + TFLITE_DCHECK_EQ(shape.Dims(i), check_shape_0.Dims(i)); + } + } + return FlatSizeSkipDim(shape, skip_dim); +} + +inline int MatchingFlatSizeSkipDim(const RuntimeShape& shape, int skip_dim, + const RuntimeShape& check_shape_0, + const RuntimeShape& check_shape_1) { + const int dims_count = shape.DimensionsCount(); + for (int i = 0; i < dims_count; ++i) { + if (i != skip_dim) { + TFLITE_DCHECK_EQ(shape.Dims(i), check_shape_0.Dims(i)); + } + } + return MatchingFlatSizeSkipDim(shape, skip_dim, check_shape_1); +} + +inline int MatchingFlatSizeSkipDim(const RuntimeShape& shape, int skip_dim, + const RuntimeShape& check_shape_0, + const RuntimeShape& check_shape_1, + const RuntimeShape& check_shape_2) { + const int dims_count = shape.DimensionsCount(); + for (int i = 0; i < dims_count; ++i) { + if (i != skip_dim) { + TFLITE_DCHECK_EQ(shape.Dims(i), check_shape_0.Dims(i)); + } + } + return MatchingFlatSizeSkipDim(shape, skip_dim, check_shape_1, check_shape_2); +} + +inline int MatchingFlatSizeSkipDim(const RuntimeShape& shape, int skip_dim, + const RuntimeShape& check_shape_0, + const RuntimeShape& check_shape_1, + const RuntimeShape& check_shape_2, + const RuntimeShape& check_shape_3) { + const int dims_count = shape.DimensionsCount(); + for (int i = 0; i < dims_count; ++i) { + if (i != skip_dim) { + TFLITE_DCHECK_EQ(shape.Dims(i), check_shape_0.Dims(i)); + } + } + return MatchingFlatSizeSkipDim(shape, skip_dim, check_shape_1, check_shape_2, + check_shape_3); +} + +template +bool IsPackedWithoutStrides(const Dims& dims) { + int expected_stride = 1; + for (int d = 0; d < N; d++) { + if (dims.strides[d] != expected_stride) return false; + expected_stride *= dims.sizes[d]; + } + return true; +} + +template +void ComputeStrides(Dims* dims) { + dims->strides[0] = 1; + for (int d = 1; d < N; d++) { + dims->strides[d] = dims->strides[d - 1] * dims->sizes[d - 1]; + } +} + +enum class BroadcastableOpCategory : uint8_t { + kNone, + kNonBroadcast, // Matching input shapes. + kFirstInputBroadcastsFast, // Fivefold nested loops. + kSecondInputBroadcastsFast, // Fivefold nested loops. + kGenericBroadcast, // Fall-back. +}; + +struct MinMax { + float min; + float max; +}; +static_assert(sizeof(MinMax) == 8, ""); + +struct ActivationParams { + FusedActivationFunctionType activation_type; + // uint8_t, etc, activation params. + int32_t quantized_activation_min; + int32_t quantized_activation_max; +}; + +struct ReluParams : public ActivationParams { + int32_t input_offset; + int32_t output_offset; + int32_t output_multiplier; + int output_shift; +}; + +// Styles of resizing op usages. For example, kImageStyle can be used with a Pad +// op for pattern-specific optimization. +enum class ResizingCategory : uint8_t { + kNone, + kImageStyle, // 4D, operating on inner dimensions, say {0, a, b, 0}. + kGenericResize, +}; + +// For Add, Sub, Mul ops. +struct ArithmeticParams { + // Shape dependent / common to data / op types. + BroadcastableOpCategory broadcast_category; + // uint8_t inference params. + int32_t input1_offset; + int32_t input2_offset; + int32_t output_offset; + int32_t output_multiplier; + int output_shift; + // Add / Sub, not Mul, uint8_t inference params. + int left_shift; + int32_t input1_multiplier; + int input1_shift; + int32_t input2_multiplier; + int input2_shift; + + // TODO(b/158622529): Union the following activation params. + // uint8_t, etc, activation params. + int32_t quantized_activation_min; + int32_t quantized_activation_max; + // float activation params. + float float_activation_min; + float float_activation_max; + // int64_t activation params. + int64_t int64_activation_min; + int64_t int64_activation_max; + // int16_t activation params. + int16_t int16_activation_min; + int16_t int16_activation_max; + + // Processed output dimensions. + // Let input "a" be the one that broadcasts in the faster-changing dimension. + // Then, after coalescing, for shapes {a0, a1, a2, a3, a4} and + // {b0, b1, b2, b3, b4}, + // broadcast_shape[4] = b0 = a0. + // broadcast_shape[3] = b1; a1 = 1. + // broadcast_shape[2] = b2 = a2. + // broadcast_shape[1] = a3; b3 = 1. + // broadcast_shape[0] = b4 = a4. + int broadcast_shape[5]; +}; + +struct ConcatenationParams { + int8_t axis; + const int32_t* input_zeropoint; + const float* input_scale; + uint16_t inputs_count; + int32_t output_zeropoint; + float output_scale; +}; + +struct ComparisonParams { + // uint8_t inference params. + int left_shift; + int32_t input1_offset; + int32_t input1_multiplier; + int input1_shift; + int32_t input2_offset; + int32_t input2_multiplier; + int input2_shift; + // Shape dependent / common to inference types. + bool is_broadcast; +}; + +struct ConvParams { + PaddingType padding_type; + PaddingValues padding_values; + // TODO(starka): This was just "stride", so check that width+height is OK. + int16_t stride_width; + int16_t stride_height; + int16_t dilation_width_factor; + int16_t dilation_height_factor; + // uint8_t inference params. + // TODO(b/65838351): Use smaller types if appropriate. + int32_t input_offset; + int32_t weights_offset; + int32_t output_offset; + int32_t output_multiplier; + int output_shift; + // uint8_t, etc, activation params. + int32_t quantized_activation_min; + int32_t quantized_activation_max; + // float activation params. + float float_activation_min; + float float_activation_max; +}; + +struct Conv3DParams { + Padding3DValues padding_values; + int stride_width; + int stride_height; + int stride_depth; + int dilation_width; + int dilation_height; + int dilation_depth; + // float activation params. + float float_activation_min; + float float_activation_max; +}; + +typedef Conv3DParams Conv3DTransposeParams; + +struct DepthToSpaceParams { + int32_t block_size; +}; + +struct DepthwiseParams { + PaddingType padding_type; + PaddingValues padding_values; + int16_t stride_width; + int16_t stride_height; + int16_t dilation_width_factor; + int16_t dilation_height_factor; + int16_t depth_multiplier; + // uint8_t inference params. + // TODO(b/65838351): Use smaller types if appropriate. + int32_t input_offset; + int32_t weights_offset; + int32_t output_offset; + int32_t output_multiplier; + int output_shift; + // uint8_t, etc, activation params. + int32_t quantized_activation_min; + int32_t quantized_activation_max; + // float activation params. + float float_activation_min; + float float_activation_max; + const int32_t* output_multiplier_per_channel; + const int32_t* output_shift_per_channel; +}; + +struct DequantizationParams { + double scale; + int32_t zero_point; +}; + +struct PerChannelDequantizationParams { + const float* scale; + const int32_t* zero_point; + int32_t quantized_dimension; +}; + +struct FakeQuantParams { + MinMax minmax; + int32_t num_bits; +}; + +struct FullyConnectedParams { + // uint8_t inference params. + // TODO(b/65838351): Use smaller types if appropriate. + int32_t input_offset; + int32_t weights_offset; + int32_t output_offset; + int32_t output_multiplier; + int output_shift; + // uint8_t, etc, activation params. + int32_t quantized_activation_min; + int32_t quantized_activation_max; + // float activation params. + float float_activation_min; + float float_activation_max; + // Mark the operands as cacheable if they are unchanging, e.g. weights. + bool lhs_cacheable; + bool rhs_cacheable; + FullyConnectedWeightsFormat weights_format; +}; + +struct GatherParams { + int16_t axis; + int16_t batch_dims; +}; + +struct L2NormalizationParams { + // uint8_t inference params. + int32_t input_zero_point; +}; + +struct LocalResponseNormalizationParams { + int32_t range; + double bias; + double alpha; + double beta; +}; + +struct HardSwishParams { + // zero_point of the input activations. + int16_t input_zero_point; + // zero_point of the output activations. + int16_t output_zero_point; + // 16bit fixed-point component of the multiplier to apply to go from the + // "high-res input scale", which is the input scale multiplied by 2^7, to the + // "relu-ish scale", which 3.0/32768. + // See the implementation of HardSwishPrepare. + int16_t reluish_multiplier_fixedpoint_int16; + // exponent/bit-shift component of the aforementioned multiplier. + int reluish_multiplier_exponent; + // 16bit fixed-point component of the multiplier to apply to go from the + // "high-res input scale", which is the input scale multiplied by 2^7, to the + // output scale. + // See the implementation of HardSwishPrepare. + int16_t output_multiplier_fixedpoint_int16; + // exponent/bit-shift component of the aforementioned multiplier. + int output_multiplier_exponent; +}; + +struct LogisticParams { + // uint8_t inference params. + int32_t input_zero_point; + int32_t input_range_radius; + int32_t input_multiplier; + int input_left_shift; +}; + +struct LstmCellParams { + int32_t weights_zero_point; + int32_t accum_multiplier; + int accum_shift; + int state_integer_bits; +}; + +struct MeanParams { + int8_t axis_count; + int16_t axis[4]; +}; + +struct PackParams { + int8_t axis; + const int32_t* input_zeropoint; + const float* input_scale; + uint16_t inputs_count; + int32_t output_zeropoint; + float output_scale; +}; + +struct PadParams { + int8_t left_padding_count; + int32_t left_padding[5]; + int8_t right_padding_count; + int32_t right_padding[5]; + ResizingCategory resizing_category; +}; + +struct PreluParams { + int32_t input_offset; + int32_t alpha_offset; + int32_t output_offset; + int32_t output_multiplier_1; + int output_shift_1; + int32_t output_multiplier_2; + int output_shift_2; +}; + +struct PoolParams { + FusedActivationFunctionType activation; + PaddingType padding_type; + PaddingValues padding_values; + int stride_height; + int stride_width; + int filter_height; + int filter_width; + // uint8_t, etc, activation params. + int32_t quantized_activation_min; + int32_t quantized_activation_max; + // float activation params. + float float_activation_min; + float float_activation_max; +}; + +struct ReshapeParams { + int8_t shape_count; + int32_t shape[4]; +}; + +struct ResizeBilinearParams { + bool align_corners; + // half_pixel_centers assumes pixels are of half the actual dimensions, and + // yields more accurate resizes. Corresponds to the same argument for the + // original TensorFlow op in TF2.0. + bool half_pixel_centers; +}; + +struct ResizeNearestNeighborParams { + bool align_corners; + bool half_pixel_centers; +}; + +struct SliceParams { + int8_t begin_count; + int32_t begin[5]; + int8_t size_count; + int32_t size[5]; +}; + +struct SoftmaxParams { + // beta is not really used (not a Tensorflow parameter) and not implemented + // for LogSoftmax. + double beta; + // uint8_t inference params. Used even when beta defaults to 1.0. + int32_t input_multiplier; + int32_t input_left_shift; + // Reverse scaling is only used by LogSoftmax. + int32_t reverse_scaling_divisor; + int32_t reverse_scaling_right_shift; + int diff_min; + int32_t zero_point; + float scale; + float* table; + // int16 LUT for exp(x), where x uniform distributed between [-10.0 , 0.0] + int16_t* exp_lut; + // int16 LUT for 1 / (1 + x), where x uniform distributed between [0.0 , 1.0] + int16_t* one_over_one_plus_x_lut; + uint8_t* uint8_table1; + uint8_t* uint8_table2; +}; + +struct SpaceToBatchParams { + // "Zero" padding for uint8_t means padding with the output offset. + int32_t output_offset; +}; + +struct SpaceToDepthParams { + int32_t block_size; +}; + +struct SplitParams { + // Graphs that split into, say, 2000 nodes are encountered. The indices in + // OperatorEdges are of type uint16_t. + uint16_t num_split; + int16_t axis; +}; + +struct SqueezeParams { + int8_t squeeze_dims_count; + int32_t squeeze_dims[4]; +}; + +struct StridedSliceParams { + int8_t start_indices_count; + int32_t start_indices[5]; + int8_t stop_indices_count; + int32_t stop_indices[5]; + int8_t strides_count; + int32_t strides[5]; + + uint16_t begin_mask; + uint16_t ellipsis_mask; + uint16_t end_mask; + uint16_t new_axis_mask; + uint16_t shrink_axis_mask; + bool offset; +}; + +struct TanhParams { + int32_t input_zero_point; + int32_t input_range_radius; + int32_t input_multiplier; + int input_left_shift; +}; + +constexpr int kTransposeMaxDimensions = 6; + +struct TransposeParams { + int8_t perm_count; + int32_t perm[kTransposeMaxDimensions]; +}; + +struct UnpackParams { + uint16_t num_split; + int16_t axis; +}; + +struct LeakyReluParams { + float alpha; + int32_t input_offset; + int32_t output_offset; + int32_t output_multiplier_alpha; + int32_t output_shift_alpha; + int32_t output_multiplier_identity; + int32_t output_shift_identity; +}; + +template +inline void SetActivationParams(float min, float max, P* params) { + params->float_activation_min = min; + params->float_activation_max = max; +} + +template +inline void SetActivationParams(int32_t min, int32_t max, P* params) { + params->quantized_activation_min = min; + params->quantized_activation_max = max; +} + +template +inline void SetActivationParams(uint32_t min, uint32_t max, P* params) { + params->quantized_activation_min = min; + params->quantized_activation_max = max; +} + +template +inline void SetActivationParams(int16_t min, int16_t max, P* params) { + params->int16_activation_min = min; + params->int16_activation_max = max; +} + +template +inline void SetActivationParams(int64_t min, int64_t max, P* params) { + params->int64_activation_min = min; + params->int64_activation_max = max; +} + +template +inline void GetActivationParams(const P& params, int32_t* min, int32_t* max) { + *min = params.quantized_activation_min; + *max = params.quantized_activation_max; +} + +template +inline void GetActivationParams(const P& params, uint32_t* min, uint32_t* max) { + *min = params.quantized_activation_min; + *max = params.quantized_activation_max; +} + +template +inline void GetActivationParams(const P& params, int16_t* min, int16_t* max) { + *min = params.int16_activation_min; + *max = params.int16_activation_max; +} + +template +inline void GetActivationParams(const P& params, float* min, float* max) { + *min = params.float_activation_min; + *max = params.float_activation_max; +} + +template +inline void GetActivationParams(const P& params, int64_t* min, int64_t* max) { + *min = params.int64_activation_min; + *max = params.int64_activation_max; +} + +// Type trait to check of given type has size smaller than 4 bytes. +template +struct is_small_integer + : public std::integral_constant::value || + std::is_same::value || + std::is_same::value || + std::is_same::value> {}; + +// Type trait to check of given type is int32 or int64. +template +struct is_int32_or_int64 + : public std::integral_constant::value || + std::is_same::value> { +}; + +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_TYPES_H_ diff --git a/tensorflow/lite/kernels/kernel_util.cc b/tensorflow/lite/kernels/kernel_util.cc new file mode 100644 index 0000000..fce1f43 --- /dev/null +++ b/tensorflow/lite/kernels/kernel_util.cc @@ -0,0 +1,596 @@ +/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/kernels/kernel_util.h" + +#include +#include + +#include +#include +#include +#include + +#ifndef TF_LITE_STATIC_MEMORY +#include +#endif // TF_LITE_STATIC_MEMORY + +#ifndef TF_LITE_STATIC_MEMORY +#include "tensorflow/lite/array.h" +#endif // TF_LITE_STATIC_MEMORY + +#include "tensorflow/lite/context_util.h" +#include "tensorflow/lite/core/c/builtin_op_data.h" +#include "tensorflow/lite/core/c/common.h" +#include "tensorflow/lite/kernels/internal/cppmath.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" + +#if defined(__APPLE__) +#include "TargetConditionals.h" +#endif + +namespace tflite { + +namespace { + +// Assumes tensor_index is a valid index (in bounds) +inline TfLiteTensor* GetTensorAtIndex(const TfLiteContext* context, + int tensor_index) { + if (context->tensors != nullptr) { + return &context->tensors[tensor_index]; + } else { + return context->GetTensor(context, tensor_index); + } +} + +// Validate in a single place to reduce binary size +inline TfLiteStatus ValidateTensorIndexingSafe(const TfLiteContext* context, + int index, int max_size, + const int* tensor_indices, + int* tensor_index) { + if (index < 0 || index >= max_size) { + TF_LITE_KERNEL_LOG(const_cast(context), + "Invalid tensor index %d (not in [0, %d))\n", index, + max_size); + return kTfLiteError; + } + if (tensor_indices[index] == kTfLiteOptionalTensor) { + TF_LITE_KERNEL_LOG(const_cast(context), + "Tensor at index %d was optional but was expected\n", + index); + return kTfLiteError; + } + + *tensor_index = tensor_indices[index]; + return kTfLiteOk; +} + +// Same as above but returns -1 for invalid inputs instead of status + logging +// error. +inline int ValidateTensorIndexing(const TfLiteContext* context, int index, + int max_size, const int* tensor_indices) { + if (index >= 0 && index < max_size) { + const int tensor_index = tensor_indices[index]; + if (tensor_index != kTfLiteOptionalTensor) { + return tensor_index; + } + } + return -1; +} + +inline TfLiteTensor* GetMutableInput(const TfLiteContext* context, + const TfLiteNode* node, int index) { + const int tensor_index = ValidateTensorIndexing( + context, index, node->inputs->size, node->inputs->data); + if (tensor_index < 0) { + return nullptr; + } + return GetTensorAtIndex(context, tensor_index); +} + +inline TfLiteStatus GetMutableInputSafe(const TfLiteContext* context, + const TfLiteNode* node, int index, + const TfLiteTensor** tensor) { + int tensor_index; + TF_LITE_ENSURE_OK( + context, ValidateTensorIndexingSafe(context, index, node->inputs->size, + node->inputs->data, &tensor_index)); + *tensor = GetTensorAtIndex(context, tensor_index); + return kTfLiteOk; +} + +} // anonymous namespace. + +const TfLiteTensor* GetInput(const TfLiteContext* context, + const TfLiteNode* node, int index) { + return GetMutableInput(context, node, index); +} + +TfLiteStatus GetInputSafe(const TfLiteContext* context, const TfLiteNode* node, + int index, const TfLiteTensor** tensor) { + return GetMutableInputSafe(context, node, index, tensor); +} + +TfLiteTensor* GetVariableInput(TfLiteContext* context, const TfLiteNode* node, + int index) { + TfLiteTensor* tensor = GetMutableInput(context, node, index); + if (tensor == nullptr) return nullptr; + return tensor->is_variable ? tensor : nullptr; +} + +TfLiteTensor* GetOutput(TfLiteContext* context, const TfLiteNode* node, + int index) { + const int tensor_index = ValidateTensorIndexing( + context, index, node->outputs->size, node->outputs->data); + if (tensor_index < 0) { + return nullptr; + } + return GetTensorAtIndex(context, tensor_index); +} + +TfLiteStatus GetOutputSafe(const TfLiteContext* context, const TfLiteNode* node, + int index, TfLiteTensor** tensor) { + int tensor_index; + TF_LITE_ENSURE_OK( + context, ValidateTensorIndexingSafe(context, index, node->outputs->size, + node->outputs->data, &tensor_index)); + *tensor = GetTensorAtIndex(context, tensor_index); + return kTfLiteOk; +} + +const TfLiteTensor* GetOptionalInputTensor(const TfLiteContext* context, + const TfLiteNode* node, int index) { + return GetInput(context, node, index); +} + +#ifndef TF_LITE_STATIC_MEMORY +TfLiteTensor* GetTemporary(TfLiteContext* context, const TfLiteNode* node, + int index) { + const int tensor_index = ValidateTensorIndexing( + context, index, node->temporaries->size, node->temporaries->data); + if (tensor_index < 0) { + return nullptr; + } + return GetTensorAtIndex(context, tensor_index); +} + +TfLiteStatus GetTemporarySafe(const TfLiteContext* context, + const TfLiteNode* node, int index, + TfLiteTensor** tensor) { + int tensor_index; + TF_LITE_ENSURE_OK(context, ValidateTensorIndexingSafe( + context, index, node->temporaries->size, + node->temporaries->data, &tensor_index)); + *tensor = GetTensorAtIndex(context, tensor_index); + return kTfLiteOk; +} + +const TfLiteTensor* GetIntermediates(TfLiteContext* context, + const TfLiteNode* node, int index) { + const int tensor_index = ValidateTensorIndexing( + context, index, node->intermediates->size, node->intermediates->data); + if (tensor_index < 0) { + return nullptr; + } + return GetTensorAtIndex(context, tensor_index); +} + +TfLiteStatus GetIntermediatesSafe(const TfLiteContext* context, + const TfLiteNode* node, int index, + TfLiteTensor** tensor) { + int tensor_index; + TF_LITE_ENSURE_OK(context, ValidateTensorIndexingSafe( + context, index, node->intermediates->size, + node->intermediates->data, &tensor_index)); + *tensor = GetTensorAtIndex(context, tensor_index); + return kTfLiteOk; +} +#endif // TF_LITE_STATIC_MEMORY + +// Per-axis +TfLiteStatus PopulateConvolutionQuantizationParams( + TfLiteContext* context, const TfLiteTensor* input, + const TfLiteTensor* filter, const TfLiteTensor* bias, TfLiteTensor* output, + const TfLiteFusedActivation& activation, int32_t* multiplier, int* shift, + int32_t* output_activation_min, int32_t* output_activation_max, + int32_t* per_channel_multiplier, int32_t* per_channel_shift) { + const auto* affine_quantization = + reinterpret_cast(filter->quantization.params); + return PopulateConvolutionQuantizationParams( + context, input, filter, bias, output, activation, multiplier, shift, + output_activation_min, output_activation_max, per_channel_multiplier, + per_channel_shift, affine_quantization->scale->size); +} + +// Per-axis & per-tensor +TfLiteStatus PopulateConvolutionQuantizationParams( + TfLiteContext* context, const TfLiteTensor* input, + const TfLiteTensor* filter, const TfLiteTensor* bias, TfLiteTensor* output, + const TfLiteFusedActivation& activation, int32_t* multiplier, int* shift, + int32_t* output_activation_min, int32_t* output_activation_max, + int32_t* per_channel_multiplier, int32_t* per_channel_shift, + int num_channels) { + TF_LITE_ENSURE_EQ(context, input->quantization.type, + kTfLiteAffineQuantization); + TF_LITE_ENSURE_EQ(context, filter->quantization.type, + kTfLiteAffineQuantization); + // TODO(jianlijianli): Enable bias type check and bias scale == input scale + // * filter scale for each channel in affine quantization once bias + // quantization is properly populated. + // TF_LITE_ENSURE_EQ(context, bias->quantization.type, + // kTfLiteAffineQuantization); + + // Check data type. + const auto* affine_quantization = + reinterpret_cast(filter->quantization.params); + TF_LITE_ENSURE(context, affine_quantization); + TF_LITE_ENSURE(context, affine_quantization->scale); + const bool is_per_channel = affine_quantization->scale->size > 1; + if (is_per_channel) { + // Currently only Int8/Int16 is supported for per channel quantization. + TF_LITE_ENSURE(context, + input->type == kTfLiteInt8 || input->type == kTfLiteInt16); + TF_LITE_ENSURE(context, + filter->type == kTfLiteInt8 || filter->type == kTfLiteInt4); + TF_LITE_ENSURE_EQ(context, affine_quantization->scale->size, num_channels); + TF_LITE_ENSURE_EQ( + context, num_channels, + filter->dims->data[affine_quantization->quantized_dimension]); + } + + // Populate multiplier and shift using affine quantization. + const float input_scale = input->params.scale; + const float output_scale = output->params.scale; + const float* filter_scales = affine_quantization->scale->data; + for (int i = 0; i < num_channels; ++i) { + // If per-tensor quantization parameter is specified, broadcast it along the + // quantization dimension (channels_out). + const float scale = is_per_channel ? filter_scales[i] : filter_scales[0]; + const double filter_scale = static_cast(scale); + const double effective_output_scale = static_cast(input_scale) * + filter_scale / + static_cast(output_scale); + int32_t significand; + int channel_shift; + QuantizeMultiplier(effective_output_scale, &significand, &channel_shift); + per_channel_multiplier[i] = significand; + per_channel_shift[i] = channel_shift; + } + + // Populate scalar quantization parameters. + // This check on legacy quantization parameters is kept only for backward + // compatibility. + if (input->type == kTfLiteUInt8) { + // Check bias scale == input scale * filter scale. + double real_multiplier = 0.0; + TF_LITE_ENSURE_STATUS(GetQuantizedConvolutionMultipler( + context, input, filter, bias, output, &real_multiplier)); + int exponent; + + // Populate quantization parameters with multiplier and shift. + QuantizeMultiplier(real_multiplier, multiplier, &exponent); + *shift = -exponent; + } + if (input->type == kTfLiteInt8 || input->type == kTfLiteUInt8 || + input->type == kTfLiteInt16) { + TF_LITE_ENSURE_STATUS(CalculateActivationRangeQuantized( + context, activation, output, output_activation_min, + output_activation_max)); + } + return kTfLiteOk; +} + +TfLiteStatus GetQuantizedConvolutionMultipler(TfLiteContext* context, + const TfLiteTensor* input, + const TfLiteTensor* filter, + const TfLiteTensor* bias, + TfLiteTensor* output, + double* multiplier) { + const double input_product_scale = static_cast(input->params.scale) * + static_cast(filter->params.scale); + // The following conditions must be guaranteed by the training pipeline. + if (bias) { + const double bias_scale = static_cast(bias->params.scale); + // Here we're making sure the input_product_scale & bias_scale are about the + // same. Since we have: + // (output - output_zp) * output_scale = + // input_product_scale * input_product + bias * bias_scale ---- (0) + // + // (0) equals: + // (input_product + bias) * input_product_scale ----- (1) + // + + // bias * (bias_scale - input_product_scale) ------ (2) + // + // For the real kernel computation, we're doing (1), so we really need to + // make sure (2) has minimum impact on the output, so: + // bias * (bias_scale - input_product_scale) / output_scale should be + // a small number for an integer. + // Since normally bias should be within a small range. + // We should expect (bias_scale - input_product_scale) / output_scale to + // be a small number like 0.02. + const double scale_diff = std::abs(input_product_scale - bias_scale); + const double output_scale = static_cast(output->params.scale); + + TF_LITE_ENSURE(context, scale_diff / output_scale <= 0.02); + } + return GetQuantizedConvolutionMultipler(context, input, filter, output, + multiplier); +} + +TfLiteStatus GetQuantizedConvolutionMultipler(TfLiteContext* context, + const TfLiteTensor* input, + const TfLiteTensor* filter, + TfLiteTensor* output, + double* multiplier) { + const double input_product_scale = + static_cast(input->params.scale * filter->params.scale); + TF_LITE_ENSURE(context, input_product_scale >= 0); + *multiplier = input_product_scale / static_cast(output->params.scale); + + return kTfLiteOk; +} + +namespace { + +inline TfLiteStatus Quantize(TfLiteContext* context, float scale, + int32_t zero_point, float f, int32_t& q) { + const float tmp = TfLiteRound(f / scale); + const bool no_integer_overflow_from_quantization = + (tmp >= static_cast(std::numeric_limits::min()) && + tmp <= static_cast(std::numeric_limits::max())); + TF_LITE_ENSURE(context, no_integer_overflow_from_quantization); + q = zero_point + static_cast(tmp); + return kTfLiteOk; +} + +TfLiteStatus CalculateActivationRangeQuantizedImpl( + TfLiteContext* context, TfLiteFusedActivation activation, int32_t qmin, + int32_t qmax, TfLiteTensor* output, int32_t* act_min, int32_t* act_max) { + const auto scale = output->params.scale; + const auto zero_point = output->params.zero_point; + + int32_t tmp_q; + if (activation == kTfLiteActRelu) { + TF_LITE_ENSURE_OK(context, + Quantize(context, scale, zero_point, 0.0, tmp_q)); + *act_min = std::max(qmin, tmp_q); + *act_max = qmax; + } else if (activation == kTfLiteActRelu6) { + TF_LITE_ENSURE_OK(context, + Quantize(context, scale, zero_point, 0.0, tmp_q)); + *act_min = std::max(qmin, tmp_q); + TF_LITE_ENSURE_OK(context, + Quantize(context, scale, zero_point, 6.0, tmp_q)); + *act_max = std::min(qmax, tmp_q); + } else if (activation == kTfLiteActReluN1To1) { + TF_LITE_ENSURE_OK(context, + Quantize(context, scale, zero_point, -1.0, tmp_q)); + *act_min = std::max(qmin, tmp_q); + TF_LITE_ENSURE_OK(context, + Quantize(context, scale, zero_point, 1.0, tmp_q)); + *act_max = std::min(qmax, tmp_q); + } else { + *act_min = qmin; + *act_max = qmax; + } + return kTfLiteOk; +} +} // namespace + +TfLiteStatus CalculateActivationRangeQuantized(TfLiteContext* context, + TfLiteFusedActivation activation, + TfLiteTensor* output, + int32_t* act_min, + int32_t* act_max) { + int32_t qmin = 0; + int32_t qmax = 0; + if (output->type == kTfLiteUInt8) { + qmin = std::numeric_limits::min(); + qmax = std::numeric_limits::max(); + } else if (output->type == kTfLiteInt8) { + qmin = std::numeric_limits::min(); + qmax = std::numeric_limits::max(); + } else if (output->type == kTfLiteInt16) { + qmin = std::numeric_limits::min(); + qmax = std::numeric_limits::max(); + } else { + TF_LITE_ENSURE(context, false); + } + + return CalculateActivationRangeQuantizedImpl(context, activation, qmin, qmax, + output, act_min, act_max); +} + +bool HaveSameShapes(const TfLiteTensor* input1, const TfLiteTensor* input2) { + return TfLiteIntArrayEqual(input1->dims, input2->dims); +} + +#ifndef TF_LITE_STATIC_MEMORY +TfLiteStatus GetOutputShapeFromInput(TfLiteContext* context, + const TfLiteTensor* input, + TfLiteIntArray** output_shape) { + if (NumDimensions(input) != 1) { + TF_LITE_KERNEL_LOG(const_cast(context), + "Invalid %dD input tensor (must be a 1D tensor).", + NumDimensions(input)); + return kTfLiteError; + } + const int output_dims = SizeOfDimension(input, 0); + IntArrayUniquePtr shape(TfLiteIntArrayCreate(output_dims)); + for (int i = 0; i < output_dims; i++) { + shape->data[i] = input->data.i32[i]; + } + *output_shape = shape.release(); + return kTfLiteOk; +} + +// TODO(b/172067338): Having this function be part of TF_LITE_STATIC_MEMORY +// build results in a 6KB size increase, even though the function is unsused for +// that build. What appears to be happening is that while the linker drops the +// unsused function, the string library that gets pulled in is not dropped, +// resulting in the increased binary size. +std::string GetShapeDebugString(const TfLiteIntArray* shape) { + std::string str; + for (int d = 0; d < shape->size; ++d) { + if (str.empty()) + str = "[" + std::to_string(shape->data[d]); + else + // Don't add space after "," to make the output consistent with + // tensorflow::shape_inference::InferenceContext::DebugString() + str += "," + std::to_string(shape->data[d]); + } + if (str.empty()) { + str = "[]"; + } else { + str += "]"; + } + return str; +} + +TfLiteStatus CalculateShapeForBroadcast(TfLiteContext* context, + const TfLiteTensor* input1, + const TfLiteTensor* input2, + TfLiteIntArray** output_shape) { + const int dims1 = NumDimensions(input1); + const int dims2 = NumDimensions(input2); + const int out_dims = std::max(dims1, dims2); + + IntArrayUniquePtr shape(TfLiteIntArrayCreate(out_dims)); + for (int i = 0; i < out_dims; ++i) { + const int d1 = i >= dims1 ? 1 : SizeOfDimension(input1, dims1 - i - 1); + const int d2 = i >= dims2 ? 1 : SizeOfDimension(input2, dims2 - i - 1); + if (!(d1 == d2 || d1 == 1 || d2 == 1)) { + TF_LITE_KERNEL_LOG(context, + "Given shapes, %s and %s, are not broadcastable.", + GetShapeDebugString(input1->dims).c_str(), + GetShapeDebugString(input2->dims).c_str()); + return kTfLiteError; + } + + if (d1 == 0 || d2 == 0) { + shape->data[out_dims - i - 1] = 0; + } else { + shape->data[out_dims - i - 1] = std::max(d1, d2); + } + } + *output_shape = shape.release(); + return kTfLiteOk; +} + +TfLiteStatus CalculateShapeForBroadcast(TfLiteContext* context, + const TfLiteTensor* input1, + const TfLiteTensor* input2, + const TfLiteTensor* input3, + TfLiteIntArray** output_shape) { + const int dims1 = NumDimensions(input1); + const int dims2 = NumDimensions(input2); + const int dims3 = NumDimensions(input3); + const int out_dims = std::max(std::max(dims1, dims2), dims3); + IntArrayUniquePtr shape(TfLiteIntArrayCreate(out_dims)); + for (int i = 0; i < out_dims; ++i) { + const int d1 = i >= dims1 ? 1 : SizeOfDimension(input1, dims1 - i - 1); + const int d2 = i >= dims2 ? 1 : SizeOfDimension(input2, dims2 - i - 1); + const int d3 = i >= dims3 ? 1 : SizeOfDimension(input3, dims3 - i - 1); + const int min_value = std::min(std::min(d1, d2), d3); + int max_value = std::max(std::max(d1, d2), d3); + // If one dimention is 0, others must be 0 or 1. + if (min_value == 0) max_value = 0; + if (!(d1 == 1 || d1 == max_value) || !(d2 == 1 || d2 == max_value) || + !(d3 == 1 || d3 == max_value)) { + TF_LITE_KERNEL_LOG(context, + "Given shapes, %s, %s and %s, are not broadcastable.", + GetShapeDebugString(input1->dims).c_str(), + GetShapeDebugString(input2->dims).c_str(), + GetShapeDebugString(input3->dims).c_str()); + return kTfLiteError; + } + shape->data[out_dims - i - 1] = max_value; + } + *output_shape = shape.release(); + return kTfLiteOk; +} +#endif // TF_LITE_STATIC_MEMORY + +// Size of string is not constant, return 0 in such case. +int TfLiteTypeGetSize(TfLiteType type) { + switch (type) { + case kTfLiteUInt8: + static_assert(sizeof(uint8_t) == 1, ""); + return 1; + case kTfLiteInt8: + static_assert(sizeof(int8_t) == 1, ""); + return 1; + case kTfLiteBool: + return sizeof(bool); + case kTfLiteUInt16: + static_assert(sizeof(uint16_t) == 2, ""); + return 2; + case kTfLiteInt16: + static_assert(sizeof(int16_t) == 2, ""); + return 2; + case kTfLiteFloat16: + static_assert(sizeof(int16_t) == 2, ""); + return 2; + case kTfLiteFloat32: + static_assert(sizeof(float) == 4, ""); + return 4; + case kTfLiteInt32: + static_assert(sizeof(int32_t) == 4, ""); + return 4; + case kTfLiteUInt32: + static_assert(sizeof(uint32_t) == 4, ""); + return 4; + case kTfLiteInt64: + static_assert(sizeof(int64_t) == 8, ""); + return 8; + case kTfLiteUInt64: + static_assert(sizeof(uint64_t) == 8, ""); + return 8; + case kTfLiteFloat64: + static_assert(sizeof(double) == 8, ""); + return 8; + case kTfLiteComplex64: + static_assert(sizeof(std::complex) == 8, ""); + return 8; + case kTfLiteComplex128: + static_assert(sizeof(std::complex) == 16, ""); + return 16; + default: + return 0; + } +} + +bool IsMobilePlatform() { +#if defined(ANDROID) || defined(__ANDROID__) + return true; +#elif defined(__APPLE__) +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE + return true; +#endif +#endif + return false; +} + +bool HasUnspecifiedDimension(const TfLiteTensor* tensor) { +#ifndef TF_LITE_STATIC_MEMORY + if (tensor->dims_signature) { + for (int i : TfLiteIntArrayView(tensor->dims_signature)) { + if (i == -1) return true; + } + } +#endif // TF_LITE_STATIC_MEMORY + return false; +} + +} // namespace tflite diff --git a/tensorflow/lite/kernels/kernel_util.h b/tensorflow/lite/kernels/kernel_util.h new file mode 100644 index 0000000..24061ab --- /dev/null +++ b/tensorflow/lite/kernels/kernel_util.h @@ -0,0 +1,331 @@ +/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_KERNEL_UTIL_H_ +#define TENSORFLOW_LITE_KERNELS_KERNEL_UTIL_H_ + +#include + +#include +#ifndef TF_LITE_STATIC_MEMORY +#include +#endif // TF_LITE_STATIC_MEMORY + +#include "tensorflow/lite/core/c/builtin_op_data.h" +#include "tensorflow/lite/core/c/common.h" + +namespace tflite { + +// A fair number of functions in this header have historically been inline. +// It is ok to change functions to not be inline if the latency with +// benchmark_model for MobileNet + MobileBERT is unaffected. If such a change is +// made, move the newly non-inlined function declarations to the top of this +// header file. + +// Note: You must check if result is not null: +// +// TfLiteTensor* my_tensor = GetInput(context, node, kMyTensorIdx); +// TF_LITE_ENSURE(context, my_tensor != nullptr); +// +// This is because the index might point to the optional tensor constant +// (kTfLiteOptionalTensor) in which case there is no tensor to return. +const TfLiteTensor* GetInput(const TfLiteContext* context, + const TfLiteNode* node, int index); + +// Same as `GetInput` but returns boolean and uses output argument for tensor. +// +// TfLiteTensor* my_tensor; +// TF_LITE_ENSURE_OK(context, +// GetInputSafe(context, node, kMyTensorIdx, &my_tensor)); +// // can use my_tensor directly from here onwards, it is not nullptr +// +// Should be used in cases where the binary size is too large. +TfLiteStatus GetInputSafe(const TfLiteContext* context, const TfLiteNode* node, + int index, const TfLiteTensor** tensor); + +// Note: You must check if result is not null: +// +// TfLiteTensor* my_tensor = GetVariableInput(context, node, kMyTensorIdx); +// TF_LITE_ENSURE(context, my_tensor != nullptr); +// +// This is because the index might point to the optional tensor constant +// (kTfLiteOptionalTensor) in which case there is no tensor to return. +TfLiteTensor* GetVariableInput(TfLiteContext* context, const TfLiteNode* node, + int index); + +// Note: You must check if result is not null: +// +// TfLiteTensor* my_tensor = GetOutput(context, node, kMyTensorIdx); +// TF_LITE_ENSURE(context, my_tensor != nullptr); +// +// This is because the index might point to the optional tensor constant +// (kTfLiteOptionalTensor) in which case there is no tensor to return. +TfLiteTensor* GetOutput(TfLiteContext* context, const TfLiteNode* node, + int index); + +// Same as `GetOutput` but returns boolean and uses output argument for tensor. +// +// TfLiteTensor* my_tensor; +// TF_LITE_ENSURE_OK(context, +// GetOutputSafe(context, node, kMyTensorIdx, &my_tensor)); +// // can use my_tensor directly from here onwards, it is not nullptr +// +// Should be used in cases where the binary size is too large. +TfLiteStatus GetOutputSafe(const TfLiteContext* context, const TfLiteNode* node, + int index, TfLiteTensor** tensor); + +// Note: You must check if result is not null: +// +// TfLiteTensor* my_tensor = GetOptionalInputTensor(context, node, kIdx); +// TF_LITE_ENSURE(context, my_tensor != nullptr); +// +// This is because the index might point to the optional tensor constant +// (kTfLiteOptionalTensor) in which case there is no tensor to return. +// +// Deprecated. GetInput has the same functionality. +const TfLiteTensor* GetOptionalInputTensor(const TfLiteContext* context, + const TfLiteNode* node, int index); + +#ifndef TF_LITE_STATIC_MEMORY +// Note: You must check if result is not null: +// +// TfLiteTensor* my_tensor = GetTemporary(context, node, kMyTensorIdx); +// TF_LITE_ENSURE(context, my_tensor != nullptr); +// +// This is because the index might point to the optional tensor constant +// (kTfLiteOptionalTensor) in which case there is no tensor to return. +TfLiteTensor* GetTemporary(TfLiteContext* context, const TfLiteNode* node, + int index); + +// Same as `GetTemporary` but returns boolean and uses output argument for +// tensor. +// +// TfLiteTensor* my_tensor; +// TF_LITE_ENSURE_OK(context, +// GetTemporarySafe(context, node, kMyTensorIdx, +// &my_tensor)); +// // can use my_tensor directly from here onwards, it is not nullptr +// +// Should be used in cases where the binary size is too large. +TfLiteStatus GetTemporarySafe(const TfLiteContext* context, + const TfLiteNode* node, int index, + TfLiteTensor** tensor); + +// Note: You must check if result is not null: +// +// TfLiteTensor* my_tensor = GetIntermediates(context, node, kMyTensorIdx); +// TF_LITE_ENSURE(context, my_tensor != nullptr); +// +// This is because the index might point to the optional tensor constant +// (kTfLiteOptionalTensor) in which case there is no tensor to return. +const TfLiteTensor* GetIntermediates(TfLiteContext* context, + const TfLiteNode* node, int index); + +// Same as `GetIntermediates` but returns boolean and uses output argument for +// tensor. +// +// TfLiteTensor* my_tensor; +// TF_LITE_ENSURE_OK(context, +// GetIntermediatesSafe(context, node, kMyTensorIdx, +// &my_tensor)); +// // can use my_tensor directly from here onwards, it is not nullptr +// +// Should be used in cases where the binary size is too large. +TfLiteStatus GetIntermediatesSafe(const TfLiteContext* context, + const TfLiteNode* node, int index, + TfLiteTensor** tensor); +#endif // TF_LITE_STATIC_MEMORY + +inline int NumDimensions(const TfLiteTensor* t) { return t->dims->size; } +inline int SizeOfDimension(const TfLiteTensor* t, int dim) { + return t->dims->data[dim]; +} + +inline int NumInputs(const TfLiteNode* node) { + return node->inputs == nullptr ? 0 : node->inputs->size; +} +inline int NumOutputs(const TfLiteNode* node) { + return node->outputs == nullptr ? 0 : node->outputs->size; +} + +#ifndef TF_LITE_STATIC_MEMORY +inline int NumIntermediates(const TfLiteNode* node) { + return node->intermediates->size; +} +#endif // TF_LITE_STATIC_MEMORY + +inline int64_t NumElements(const TfLiteIntArray* dims) { + int64_t count = 1; + for (int i = 0; i < dims->size; ++i) { + count *= dims->data[i]; + } + return count; +} + +inline int64_t NumElements(const TfLiteTensor* t) { + return NumElements(t->dims); +} + +inline int64_t NumElements(const int* dims, int num_dims) { + int64_t count = 1; + for (int i = 0; i < num_dims; ++i) { + count *= dims[i]; + } + return count; +} + +// Determines whether tensor is constant. +// TODO(b/138199592): Introduce new query which checks for constant OR +// persistent-read-only, which would be useful for most tensor kernels that +// are potentially dynamic based on the input tensor value availability at the +// time of prepare. +inline bool IsConstantTensor(const TfLiteTensor* tensor) { + return tensor->allocation_type == kTfLiteMmapRo; +} + +inline bool IsConstantOrPersistentTensor(const TfLiteTensor* tensor) { + return IsConstantTensor(tensor) || + (tensor->allocation_type == kTfLitePersistentRo); +} + +// Determines whether tensor is dynamic. Note that a tensor can be non-const and +// not dynamic. This function specifically checks for a dynamic tensor. +inline bool IsDynamicTensor(const TfLiteTensor* tensor) { + return tensor->allocation_type == kTfLiteDynamic; +} +#ifndef TF_LITE_STATIC_MEMORY +// Sets tensor to dynamic. +inline void SetTensorToDynamic(TfLiteTensor* tensor) { + if (tensor->allocation_type != kTfLiteDynamic) { + TfLiteTensorDataFree(tensor); + tensor->allocation_type = kTfLiteDynamic; + } +} + +// Sets tensor to persistent and read-only. +inline void SetTensorToPersistentRo(TfLiteTensor* tensor) { + if (tensor->allocation_type != kTfLitePersistentRo) { + TfLiteTensorDataFree(tensor); + tensor->allocation_type = kTfLitePersistentRo; + } +} +#endif // TF_LITE_STATIC_MEMORY + +// Determines whether it is a hybrid op - one that has float inputs and +// quantized weights. +inline bool IsHybridOp(const TfLiteTensor* input, const TfLiteTensor* weight) { + return ((weight->type == kTfLiteUInt8 || weight->type == kTfLiteInt8) && + input->type == kTfLiteFloat32); +} + +// Check dimensionality match and populate OpData for Conv and DepthwiseConv. +TfLiteStatus PopulateConvolutionQuantizationParams( + TfLiteContext* context, const TfLiteTensor* input, + const TfLiteTensor* filter, const TfLiteTensor* bias, TfLiteTensor* output, + const TfLiteFusedActivation& activation, int32_t* multiplier, int* shift, + int32_t* output_activation_min, int32_t* output_activation_max, + int32_t* per_channel_multiplier, int32_t* per_channel_shift); + +TfLiteStatus PopulateConvolutionQuantizationParams( + TfLiteContext* context, const TfLiteTensor* input, + const TfLiteTensor* filter, const TfLiteTensor* bias, TfLiteTensor* output, + const TfLiteFusedActivation& activation, int32_t* multiplier, int* shift, + int32_t* output_activation_min, int32_t* output_activation_max, + int32_t* per_channel_multiplier, int32_t* per_channel_shift, + int num_channels); + +// Calculates the multiplication factor for a quantized convolution (or +// quantized depthwise convolution) involving the given tensors. Returns an +// error if the scales of the tensors are not compatible. +TfLiteStatus GetQuantizedConvolutionMultipler(TfLiteContext* context, + const TfLiteTensor* input, + const TfLiteTensor* filter, + const TfLiteTensor* bias, + TfLiteTensor* output, + double* multiplier); + +TfLiteStatus GetQuantizedConvolutionMultipler(TfLiteContext* context, + const TfLiteTensor* input, + const TfLiteTensor* filter, + TfLiteTensor* output, + double* multiplier); + +// Calculates the useful quantized range of an activation layer given its +// activation tensor. +TfLiteStatus CalculateActivationRangeQuantized(TfLiteContext* context, + TfLiteFusedActivation activation, + TfLiteTensor* output, + int32_t* act_min, + int32_t* act_max); + +// Calculates the useful range of an activation layer given its activation +// tensor.a +template +void CalculateActivationRange(TfLiteFusedActivation activation, + T* activation_min, T* activation_max) { + if (activation == kTfLiteActRelu) { + *activation_min = 0; + *activation_max = std::numeric_limits::max(); + } else if (activation == kTfLiteActRelu6) { + *activation_min = 0; + *activation_max = 6; + } else if (activation == kTfLiteActReluN1To1) { + *activation_min = -1; + *activation_max = 1; + } else { + *activation_min = std::numeric_limits::lowest(); + *activation_max = std::numeric_limits::max(); + } +} + +// Return true if the given tensors have the same shape. +bool HaveSameShapes(const TfLiteTensor* input1, const TfLiteTensor* input2); + +#if !defined(TF_LITE_STATIC_MEMORY) +// Gets the output shape from the input tensor. +TfLiteStatus GetOutputShapeFromInput(TfLiteContext* context, + const TfLiteTensor* input, + TfLiteIntArray** output_shape); + +std::string GetShapeDebugString(const TfLiteIntArray* shape); + +#endif // !defined(TF_LITE_STATIC_MEMORY) + +// Calculates the output_shape that is necessary for element-wise operations +// with broadcasting involving the two input tensors. +TfLiteStatus CalculateShapeForBroadcast(TfLiteContext* context, + const TfLiteTensor* input1, + const TfLiteTensor* input2, + TfLiteIntArray** output_shape); + +// Calculates the output_shape that is necessary for element-wise operations +// with broadcasting involving the three input tensors. +TfLiteStatus CalculateShapeForBroadcast(TfLiteContext* context, + const TfLiteTensor* input1, + const TfLiteTensor* input2, + const TfLiteTensor* input3, + TfLiteIntArray** output_shape); + +// Return the size of given type in bytes. Return 0 in case of string. +int TfLiteTypeGetSize(TfLiteType type); + +// Whether the current platform is mobile (Android or iOS). +bool IsMobilePlatform(); + +// Returns whether there is unspecified dimension in the tensor's dim signature. +bool HasUnspecifiedDimension(const TfLiteTensor* tensor); + +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_KERNEL_UTIL_H_ diff --git a/tensorflow/lite/kernels/op_macros.h b/tensorflow/lite/kernels/op_macros.h new file mode 100644 index 0000000..4255d25 --- /dev/null +++ b/tensorflow/lite/kernels/op_macros.h @@ -0,0 +1,38 @@ +/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_OP_MACROS_H_ +#define TENSORFLOW_LITE_KERNELS_OP_MACROS_H_ + +#include "tensorflow/lite/micro/debug_log.h" + +#if !defined(TF_LITE_MCU_DEBUG_LOG) +#include +#define TFLITE_ABORT abort() +#else +inline void AbortImpl() { + DebugLog("HALTED\n"); + while (1) { + } +} +#define TFLITE_ABORT AbortImpl(); +#endif + +#if defined(NDEBUG) +#define TFLITE_ASSERT_FALSE (static_cast(0)) +#else +#define TFLITE_ASSERT_FALSE TFLITE_ABORT +#endif + +#endif // TENSORFLOW_LITE_KERNELS_OP_MACROS_H_ diff --git a/tensorflow/lite/kernels/padding.h b/tensorflow/lite/kernels/padding.h new file mode 100644 index 0000000..cc9d596 --- /dev/null +++ b/tensorflow/lite/kernels/padding.h @@ -0,0 +1,115 @@ +/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_PADDING_H_ +#define TENSORFLOW_LITE_KERNELS_PADDING_H_ + +#include "tensorflow/lite/core/c/builtin_op_data.h" +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { + +inline int ComputePadding(int stride, int dilation_rate, int in_size, + int filter_size, int out_size) { + int effective_filter_size = (filter_size - 1) * dilation_rate + 1; + int padding = ((out_size - 1) * stride + effective_filter_size - in_size) / 2; + return padding > 0 ? padding : 0; +} + +// It's not guaranteed that padding is symmetric. It's important to keep +// offset for algorithms need all paddings. +inline int ComputePaddingWithOffset(int stride, int dilation_rate, int in_size, + int filter_size, int out_size, + int* offset) { + int effective_filter_size = (filter_size - 1) * dilation_rate + 1; + int total_padding = + ((out_size - 1) * stride + effective_filter_size - in_size); + total_padding = total_padding > 0 ? total_padding : 0; + *offset = total_padding % 2; + return total_padding / 2; +} + +// Matching GetWindowedOutputSize in TensorFlow. +inline int ComputeOutSize(TfLitePadding padding, int image_size, + int filter_size, int stride, int dilation_rate = 1) { + int effective_filter_size = (filter_size - 1) * dilation_rate + 1; + + // TODO(b/186448822): This uses 0 since the function has no other way to + // report error case + if (stride == 0) return 0; + + switch (padding) { + case kTfLitePaddingSame: + return (image_size + stride - 1) / stride; + case kTfLitePaddingValid: + return (image_size + stride - effective_filter_size) / stride; + default: + return 0; + } +} + +inline TfLitePaddingValues ComputePaddingHeightWidth( + int stride_height, int stride_width, int dilation_rate_height, + int dilation_rate_width, int in_height, int in_width, int filter_height, + int filter_width, TfLitePadding padding, int* out_height, int* out_width) { + *out_width = ComputeOutSize(padding, in_width, filter_width, stride_width, + dilation_rate_width); + *out_height = ComputeOutSize(padding, in_height, filter_height, stride_height, + dilation_rate_height); + + TfLitePaddingValues padding_values; + int offset = 0; + padding_values.height = + ComputePaddingWithOffset(stride_height, dilation_rate_height, in_height, + filter_height, *out_height, &offset); + padding_values.height_offset = offset; + padding_values.width = + ComputePaddingWithOffset(stride_width, dilation_rate_width, in_width, + filter_width, *out_width, &offset); + padding_values.width_offset = offset; + return padding_values; +} + +inline Padding3DValues ComputePadding3DValues( + int stride_height, int stride_width, int stride_depth, + int dilation_rate_height, int dilation_rate_width, int dilation_rate_depth, + int in_height, int in_width, int in_depth, int filter_height, + int filter_width, int filter_depth, TfLitePadding padding, int* out_height, + int* out_width, int* out_depth) { + *out_width = ComputeOutSize(padding, in_width, filter_width, stride_width, + dilation_rate_width); + *out_height = ComputeOutSize(padding, in_height, filter_height, stride_height, + dilation_rate_height); + *out_depth = ComputeOutSize(padding, in_depth, filter_depth, stride_depth, + dilation_rate_depth); + + Padding3DValues padding_values; + int offset = 0; + padding_values.depth = + ComputePaddingWithOffset(stride_depth, dilation_rate_depth, in_depth, + filter_depth, *out_depth, &offset); + padding_values.depth_offset = offset; + padding_values.height = + ComputePaddingWithOffset(stride_height, dilation_rate_height, in_height, + filter_height, *out_height, &offset); + padding_values.height_offset = offset; + padding_values.width = + ComputePaddingWithOffset(stride_width, dilation_rate_width, in_width, + filter_width, *out_width, &offset); + padding_values.width_offset = offset; + return padding_values; +} +} // namespace tflite + +#endif // TENSORFLOW_LITE_KERNELS_PADDING_H_ diff --git a/tensorflow/lite/micro/BUILD b/tensorflow/lite/micro/BUILD new file mode 100644 index 0000000..72c23f3 --- /dev/null +++ b/tensorflow/lite/micro/BUILD @@ -0,0 +1,601 @@ +load("@bazel_skylib//:bzl_library.bzl", "bzl_library") +load( + "//tensorflow/lite/micro:build_def.bzl", + "micro_copts", +) + +package( + default_visibility = ["//visibility:public"], + features = [ + "-layering_check", # buildozer: disable=no-layering-check, TODO(b/177257333): consider enabling layering check + "-parse_headers", # buildozer: disable=no-parse-headers, paser_headers is unavailable with bazel (http://b/175817117#comment4) + ], + licenses = ["notice"], +) + +package_group( + name = "micro", + packages = ["//tensorflow/lite/micro/..."], +) + +cc_library( + name = "micro_compatibility", + hdrs = [ + "compatibility.h", + ], + copts = micro_copts(), +) + +cc_library( + # TODO(b/187093492): Rename to micro_interpreter. + name = "micro_framework", + srcs = [ + "micro_interpreter.cc", + ], + hdrs = [ + "micro_interpreter.h", + ], + copts = micro_copts(), + deps = [ + ":memory_helpers", + ":micro_allocator", + ":micro_context", + ":micro_graph", + ":micro_profiler_interface", + ":op_resolvers", + "//tensorflow/lite:type_to_tflitetype", + "//tensorflow/lite/c:common", + "//tensorflow/lite/kernels/internal:tensor", + "//tensorflow/lite/micro/tflite_bridge:flatbuffer_conversions_bridge", + "//tensorflow/lite/schema:schema_fbs", + "//tensorflow/lite/schema:schema_utils", + "@flatbuffers//:runtime_cc", + ], +) + +cc_library( + name = "micro_context", + srcs = [ + "micro_context.cc", + ], + hdrs = [ + "micro_context.h", + ], + copts = micro_copts(), + deps = [ + ":memory_helpers", + ":micro_allocator", + ":micro_graph", + ":micro_log", + ":micro_profiler_interface", + "//tensorflow/lite/c:common", + ], +) + +cc_library( + name = "micro_common", + hdrs = [ + "micro_common.h", + ], + copts = micro_copts(), + deps = [ + "//tensorflow/lite/c:common", + ], +) + +cc_library( + name = "fake_micro_context", + srcs = [ + "fake_micro_context.cc", + ], + hdrs = [ + "fake_micro_context.h", + ], + copts = micro_copts(), + deps = [ + ":memory_helpers", + ":micro_allocator", + ":micro_context", + ":micro_log", + ":mock_micro_graph", + "//tensorflow/lite/c:common", + ], +) + +cc_library( + name = "micro_graph", + srcs = ["micro_graph.cc"], + hdrs = ["micro_graph.h"], + deps = [ + ":memory_helpers", + ":micro_allocator", + ":micro_common", + ":micro_log", + ":micro_profiler", + ":micro_resource_variable", + "//tensorflow/lite/c:common", + "//tensorflow/lite/kernels/internal:compatibility", + "//tensorflow/lite/schema:schema_fbs", + "@flatbuffers//:runtime_cc", + ], +) + +cc_library( + name = "mock_micro_graph", + srcs = ["mock_micro_graph.cc"], + hdrs = ["mock_micro_graph.h"], + deps = [ + ":micro_allocator", + ":micro_graph", + ":test_helpers", + "//tensorflow/lite/c:common", + "//tensorflow/lite/schema:schema_fbs", + ], +) + +cc_library( + name = "micro_allocator", + srcs = [ + "micro_allocation_info.cc", + "micro_allocator.cc", + ], + hdrs = [ + "micro_allocation_info.h", + "micro_allocator.h", + ], + copts = micro_copts(), + deps = [ + ":flatbuffer_utils", + ":memory_helpers", + ":micro_arena_constants", + ":micro_common", + ":micro_compatibility", + ":micro_log", + "//tensorflow/lite/kernels:kernel_util", + "//tensorflow/lite/kernels/internal:compatibility", + "//tensorflow/lite/micro/arena_allocator:ibuffer_allocator", + "//tensorflow/lite/micro/arena_allocator:non_persistent_arena_buffer_allocator", + "//tensorflow/lite/micro/arena_allocator:persistent_arena_buffer_allocator", + "//tensorflow/lite/micro/arena_allocator:simple_memory_allocator", + "//tensorflow/lite/micro/memory_planner:greedy_memory_planner", + "//tensorflow/lite/micro/memory_planner:micro_memory_planner", + "//tensorflow/lite/micro/tflite_bridge:flatbuffer_conversions_bridge", + "//tensorflow/lite/schema:schema_fbs", + "//tensorflow/lite/schema:schema_utils", + "@flatbuffers//:runtime_cc", + ], +) + +cc_library( + name = "micro_arena_constants", + hdrs = [ + "micro_arena_constants.h", + ], + copts = micro_copts(), + deps = [], +) + +cc_library( + name = "flatbuffer_utils", + srcs = ["flatbuffer_utils.cc"], + hdrs = ["flatbuffer_utils.h"], + deps = [ + "//tensorflow/lite/c:common", + "//tensorflow/lite/schema:schema_fbs", + "@flatbuffers//:runtime_cc", + ], +) + +cc_library( + name = "memory_helpers", + srcs = ["memory_helpers.cc"], + hdrs = ["memory_helpers.h"], + deps = [ + "//tensorflow/lite/c:common", + "//tensorflow/lite/kernels/internal:reference", + "//tensorflow/lite/micro/tflite_bridge:flatbuffer_conversions_bridge", + "//tensorflow/lite/schema:schema_fbs", + "@flatbuffers//:runtime_cc", + ], +) + +cc_library( + name = "test_helpers", + srcs = [ + "test_helper_custom_ops.cc", + "test_helpers.cc", + ], + hdrs = [ + "test_helper_custom_ops.h", + "test_helpers.h", + ], + copts = micro_copts(), + deps = [ + ":memory_helpers", + ":micro_utils", + ":op_resolvers", + "//tensorflow/lite:type_to_tflitetype", + "//tensorflow/lite/c:common", + "//tensorflow/lite/kernels:kernel_util", + "//tensorflow/lite/kernels/internal:compatibility", + "//tensorflow/lite/kernels/internal:tensor", + "//tensorflow/lite/schema:schema_fbs", + "@flatbuffers//:runtime_cc", + ], +) + +cc_library( + name = "op_resolvers", + srcs = [ + "micro_op_resolver.cc", + ], + hdrs = [ + "micro_mutable_op_resolver.h", + "micro_op_resolver.h", + ], + copts = micro_copts(), + deps = [ + ":micro_compatibility", + ":micro_log", + "//tensorflow/lite/c:common", + "//tensorflow/lite/core/api", + "//tensorflow/lite/kernels:op_macros", + "//tensorflow/lite/kernels/internal:compatibility", + "//tensorflow/lite/micro/kernels:micro_ops", + "//tensorflow/lite/micro/tflite_bridge:flatbuffer_conversions_bridge", + "//tensorflow/lite/schema:schema_fbs", + ], +) + +cc_library( + name = "debug_log", + srcs = [ + "debug_log.cc", + ], + hdrs = [ + "debug_log.h", + ], + copts = micro_copts(), +) + +cc_library( + name = "micro_log", + srcs = [ + "micro_log.cc", + ], + hdrs = [ + "micro_log.h", + ], + copts = micro_copts(), + deps = [ + ":debug_log", + ":micro_string", + ], +) + +cc_library( + name = "micro_resource_variable", + srcs = [ + "micro_resource_variable.cc", + ], + hdrs = [ + "micro_resource_variable.h", + ], + copts = micro_copts(), + deps = [ + ":micro_allocator", + ":micro_log", + ":micro_utils", + "//tensorflow/lite/c:common", + "//tensorflow/lite/kernels/internal:compatibility", + ], +) + +cc_library( + name = "micro_string", + srcs = [ + "micro_string.cc", + ], + hdrs = [ + "micro_string.h", + ], + copts = micro_copts(), +) + +cc_library( + name = "micro_time", + srcs = [ + "micro_time.cc", + ], + hdrs = [ + "micro_time.h", + ], + copts = micro_copts() + ["-DTF_LITE_USE_CTIME"], + deps = ["//tensorflow/lite/c:common"], +) + +cc_library( + name = "micro_profiler_interface", + hdrs = [ + "micro_profiler_interface.h", + ], + copts = micro_copts(), +) + +cc_library( + name = "micro_profiler", + srcs = [ + "micro_profiler.cc", + ], + hdrs = [ + "micro_profiler.h", + ], + copts = micro_copts(), + deps = [ + ":micro_compatibility", + ":micro_log", + ":micro_profiler_interface", + ":micro_time", + "//tensorflow/lite/kernels/internal:compatibility", + ], +) + +cc_library( + name = "micro_utils", + srcs = [ + "micro_utils.cc", + ], + hdrs = [ + "micro_utils.h", + ], + copts = micro_copts(), + deps = [ + ":memory_helpers", + ":micro_log", + "//tensorflow/lite/c:common", + "//tensorflow/lite/kernels:op_macros", + ], +) + +cc_library( + name = "recording_allocators", + srcs = [ + "recording_micro_allocator.cc", + ], + hdrs = [ + "recording_micro_allocator.h", + "recording_micro_interpreter.h", + ], + copts = micro_copts(), + deps = [ + ":micro_allocator", + ":micro_compatibility", + ":micro_framework", + ":micro_log", + "//tensorflow/lite/kernels/internal:compatibility", + "//tensorflow/lite/micro/arena_allocator:recording_simple_memory_allocator", + ], +) + +cc_library( + name = "system_setup", + srcs = [ + "system_setup.cc", + ], + hdrs = [ + "system_setup.h", + ], + copts = micro_copts(), +) + +cc_test( + name = "micro_log_test", + srcs = [ + "micro_log_test.cc", + ], + deps = [ + ":micro_log", + ":system_setup", + ], +) + +cc_test( + name = "micro_mutable_op_resolver_test", + srcs = [ + "micro_mutable_op_resolver_test.cc", + ], + deps = [ + ":micro_framework", + ":op_resolvers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "micro_context_test", + srcs = [ + "micro_context_test.cc", + ], + deps = [ + ":micro_allocator", + ":micro_context", + ":test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "fake_micro_context_test", + srcs = [ + "fake_micro_context_test.cc", + ], + deps = [ + ":fake_micro_context", + ":micro_allocator", + ":test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "micro_interpreter_test", + srcs = [ + "micro_interpreter_test.cc", + ], + deps = [ + ":micro_compatibility", + ":micro_framework", + ":micro_profiler_interface", + ":micro_utils", + ":op_resolvers", + ":recording_allocators", + ":test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "micro_allocator_test", + srcs = [ + "micro_allocator_test.cc", + ], + deps = [ + ":memory_helpers", + ":micro_allocator", + ":micro_arena_constants", + ":test_helpers", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro/memory_planner:memory_plan_struct", + "//tensorflow/lite/micro/memory_planner:non_persistent_buffer_planner_shim", + "//tensorflow/lite/micro/testing:micro_test", + "//tensorflow/lite/micro/testing:test_conv_model", + ], +) + +cc_test( + name = "micro_allocation_info_test", + srcs = [ + "micro_allocation_info_test.cc", + ], + deps = [ + ":micro_allocator", + ":test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "recording_micro_allocator_test", + srcs = [ + "recording_micro_allocator_test.cc", + ], + deps = [ + ":micro_allocator", + ":op_resolvers", + ":recording_allocators", + ":test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + "//tensorflow/lite/micro/testing:test_conv_model", + ], +) + +cc_test( + name = "flatbuffer_utils_test", + srcs = [ + "flatbuffer_utils_test.cc", + ], + tags = [ + "nomsan", # TODO(b/192311485): See http://b/192311485#comment2 + ], + deps = [ + ":flatbuffer_utils", + ":test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "memory_helpers_test", + srcs = [ + "memory_helpers_test.cc", + ], + deps = [ + ":memory_helpers", + ":test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "testing_helpers_test", + srcs = [ + "testing_helpers_test.cc", + ], + deps = [ + ":micro_framework", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "micro_utils_test", + srcs = [ + "micro_utils_test.cc", + ], + deps = [ + ":micro_utils", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "micro_string_test", + srcs = [ + "micro_string_test.cc", + ], + deps = [ + ":micro_string", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "micro_time_test", + srcs = [ + "micro_time_test.cc", + ], + deps = [ + ":micro_time", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "micro_resource_variable_test", + srcs = ["micro_resource_variable_test.cc"], + deps = [ + ":micro_resource_variable", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "memory_arena_threshold_test", + srcs = [ + "memory_arena_threshold_test.cc", + ], + deps = [ + ":op_resolvers", + ":recording_allocators", + "//tensorflow/lite/micro/benchmarks:keyword_scrambled_model_data", + "//tensorflow/lite/micro/testing:micro_test", + "//tensorflow/lite/micro/testing:test_conv_model", + ], +) + +bzl_library( + name = "build_def_bzl", + srcs = ["build_def.bzl"], + visibility = [":micro"], +) diff --git a/tensorflow/lite/micro/arc_custom/micro_time.cc b/tensorflow/lite/micro/arc_custom/micro_time.cc new file mode 100644 index 0000000..12e23f7 --- /dev/null +++ b/tensorflow/lite/micro/arc_custom/micro_time.cc @@ -0,0 +1,42 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +// ARC Platform micro timer implementation. + +// Use default timer of arc_timer API delivered with MetaWare toolkit. + +#include "tensorflow/lite/micro/micro_time.h" + +#include + +#include + +namespace tflite { + +uint32_t ticks_per_second() { + const unsigned long clocs_per_sec_real = _timer_clocks_per_sec(); + if (clocs_per_sec_real < + static_cast(std::numeric_limits::max())) + return static_cast(clocs_per_sec_real); + else + return std::numeric_limits::max(); +} + +uint32_t GetCurrentTimeTicks() { + uint32_t ticks_real = _timer_default_read(); + return ticks_real; +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/arc_custom/system_setup.cc b/tensorflow/lite/micro/arc_custom/system_setup.cc new file mode 100644 index 0000000..c27c955 --- /dev/null +++ b/tensorflow/lite/micro/arc_custom/system_setup.cc @@ -0,0 +1,25 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/system_setup.h" + +#include + +namespace tflite { + +// Only the timer need to be reset for the custom arc platform +void InitializeTarget() { _timer_default_reset(); } + +} // namespace tflite diff --git a/tensorflow/lite/micro/arc_emsdp/debug_log.cc b/tensorflow/lite/micro/arc_emsdp/debug_log.cc new file mode 100644 index 0000000..1b4d641 --- /dev/null +++ b/tensorflow/lite/micro/arc_emsdp/debug_log.cc @@ -0,0 +1,111 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/debug_log.h" + +#include +#include +#include + +// Print to debug console by default. One can define next to extend destinations +// set: EMSDP_LOG_TO_MEMORY +// : fill .debug_log memory region (data section) with passed chars. +// EMSDP_LOG_TO_HOST +// : Use MetaWare HostLink to print output log. Requires Synopsys MetaWare +// debugger +// EMSDP_LOG_TO_UART +// : use default debug UART (out to FTDI channel 0). The same USB Port is used +// for JTAG. +#define EMSDP_LOG_TO_UART + +// Memory size for symbols dump in EMSDP_LOG_TO_MEMORY destination +#define EMSDP_LOG_TO_MEMORY_SIZE (2 * 1024) + +// EMSDP Debug UART related defines (registers and bits) +#define EMSDP_DBG_UART_BASE (0xF0004000U) +#define DW_UART_CPR_FIFO_STAT (1 << 10) +#define DW_UART_USR_TFNF (0x02) +#define DW_UART_LSR_TXD_EMPTY (0x20) + +// EMSDP UART registers map (only necessairy fields) +typedef volatile struct dw_uart_reg { + uint32_t DATA; /* data in/out and DLL */ + uint32_t RES1[4]; + uint32_t LSR; /* Line Status Register */ + uint32_t RES2[25]; + uint32_t USR; /* UART status register */ + uint32_t RES3[29]; + uint32_t CPR; /* Component parameter register */ +} DW_UART_REG; + +// For simplicity we assume U-boot has already initialized debug console during +// application loading (or on reset). Hence, we use only status and data +// registers to organize blocking loop for printing symbols. No input and no IRQ +// handling. See embarc_osp repository for full EMSDP uart driver. +// (https://github.com/foss-for-synopsys-dwc-arc-processors/embarc_osp) +void DbgUartSendStr(const char* s) { + DW_UART_REG* uart_reg_ptr = (DW_UART_REG*)(EMSDP_DBG_UART_BASE); + const char* src = s; + while (*src) { + // Check uart status to send char + bool uart_is_ready = false; + if (uart_reg_ptr->CPR & DW_UART_CPR_FIFO_STAT) + uart_is_ready = ((uart_reg_ptr->USR & DW_UART_USR_TFNF) != 0); + else + uart_is_ready = ((uart_reg_ptr->LSR & DW_UART_LSR_TXD_EMPTY) != 0); + + // Send char if uart is ready. + if (uart_is_ready) uart_reg_ptr->DATA = *src++; + } +} + +// Simple dump of symbols to a pre-allocated memory region. +// When total log exceeds memory region size, cursor is moved to its begining. +// The memory region can be viewed afterward with debugger. +// It can be viewed/read with debugger afterward. +void LogToMem(const char* s) { + static int cursor = 0; +#pragma Bss(".debug_log") + static volatile char debug_log_mem[EMSDP_LOG_TO_MEMORY_SIZE]; +#pragma Bss() + + const char* src = s; + while (*src) { + debug_log_mem[cursor] = *src++; + cursor = (cursor < EMSDP_LOG_TO_MEMORY_SIZE) ? cursor + 1 : 0; + } + debug_log_mem[cursor] = '^'; +} + +extern "C" void DebugLog(const char* s) { +#ifndef TF_LITE_STRIP_ERROR_STRINGS + +#if defined EMSDP_LOG_TO_UART + DbgUartSendStr(s); +#endif + +#if defined EMSDP_LOG_TO_MEMORY +#warning \ + "EMSDP_LOG_TO_MEMORY is defined. View .debug_log memory region for stdout" + LogToMem(s); +#endif + +#if defined EMSDP_LOG_TO_HOST +#warning "EMSDP_LOG_TO_HOST is defined. Ensure hostlib is linked." + fprintf(stderr, "%s", s); +#endif + +#endif // TF_LITE_STRIP_ERROR_STRINGS +} diff --git a/tensorflow/lite/micro/arena_allocator/BUILD b/tensorflow/lite/micro/arena_allocator/BUILD new file mode 100644 index 0000000..bd5ea70 --- /dev/null +++ b/tensorflow/lite/micro/arena_allocator/BUILD @@ -0,0 +1,133 @@ +load( + "//tensorflow/lite/micro:build_def.bzl", + "micro_copts", +) + +package( + default_visibility = ["//visibility:public"], + # Disabling layering_check because of http://b/177257332 + features = ["-layering_check"], + licenses = ["notice"], +) + +cc_library( + name = "ibuffer_allocator", + hdrs = [ + "ibuffer_allocator.h", + ], + copts = micro_copts(), + deps = [ + "//tensorflow/lite/c:common", + ], +) + +cc_library( + name = "non_persistent_arena_buffer_allocator", + srcs = ["non_persistent_arena_buffer_allocator.cc"], + hdrs = ["non_persistent_arena_buffer_allocator.h"], + copts = micro_copts(), + deps = [ + ":ibuffer_allocator", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:memory_helpers", + "//tensorflow/lite/micro:micro_arena_constants", + "//tensorflow/lite/micro:micro_compatibility", + "//tensorflow/lite/micro:micro_log", + ], +) + +cc_test( + name = "non_persistent_arena_buffer_allocator_test", + srcs = ["non_persistent_arena_buffer_allocator_test.cc"], + deps = [ + ":non_persistent_arena_buffer_allocator", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_library( + name = "persistent_arena_buffer_allocator", + srcs = ["persistent_arena_buffer_allocator.cc"], + hdrs = ["persistent_arena_buffer_allocator.h"], + copts = micro_copts(), + deps = [ + ":ibuffer_allocator", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:memory_helpers", + "//tensorflow/lite/micro:micro_arena_constants", + "//tensorflow/lite/micro:micro_compatibility", + "//tensorflow/lite/micro:micro_log", + ], +) + +cc_test( + name = "persistent_arena_buffer_allocator_test", + srcs = ["persistent_arena_buffer_allocator_test.cc"], + deps = [ + ":persistent_arena_buffer_allocator", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_library( + name = "simple_memory_allocator", + srcs = [ + "single_arena_buffer_allocator.cc", + ], + hdrs = [ + "single_arena_buffer_allocator.h", + ], + copts = micro_copts(), + deps = [ + ":ibuffer_allocator", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:memory_helpers", + "//tensorflow/lite/micro:micro_arena_constants", + "//tensorflow/lite/micro:micro_compatibility", + "//tensorflow/lite/micro:micro_log", + ], +) + +cc_test( + name = "simple_memory_allocator_test", + srcs = [ + "single_arena_buffer_allocator_test.cc", + ], + deps = [ + "//tensorflow/lite/micro:micro_framework", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_library( + name = "recording_simple_memory_allocator", + srcs = [ + "recording_single_arena_buffer_allocator.cc", + ], + hdrs = [ + "recording_single_arena_buffer_allocator.h", + ], + copts = micro_copts(), + deps = [ + ":simple_memory_allocator", + "//tensorflow/lite/kernels/internal:compatibility", + "//tensorflow/lite/micro:micro_compatibility", + "//tensorflow/lite/micro:micro_framework", + ], +) + +cc_test( + name = "recording_simple_memory_allocator_test", + srcs = [ + "recording_single_arena_buffer_allocator_test.cc", + ], + deps = [ + ":recording_simple_memory_allocator", + "//tensorflow/lite/micro:micro_framework", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) diff --git a/tensorflow/lite/micro/arena_allocator/ibuffer_allocator.h b/tensorflow/lite/micro/arena_allocator/ibuffer_allocator.h new file mode 100644 index 0000000..b92d6b2 --- /dev/null +++ b/tensorflow/lite/micro/arena_allocator/ibuffer_allocator.h @@ -0,0 +1,100 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_MICRO_ARENA_ALLOCATOR_IBUFFER_ALLOCATOR_H_ +#define TENSORFLOW_LITE_MICRO_ARENA_ALLOCATOR_IBUFFER_ALLOCATOR_H_ + +#include +#include + +#include "tensorflow/lite/c/c_api_types.h" + +namespace tflite { +// Interface classes that the TFLM framework relies on to get buffers it needs. +// There are two types of buffers that the TFLM framework requires: persistent +// and non-persistent. Persistent buffers, once allocated, are never freed by +// the TFLM framework. Non-persist buffers can be allocated and deallocated by +// the TFLM framework. This file defines two interfaces classes that TFLM +// framework will rely on to manage these buffers. + +// Interface class for managing persistent buffers. +class IPersistentBufferAllocator { + public: + IPersistentBufferAllocator() {} + virtual ~IPersistentBufferAllocator() {} + + // Allocates persistent memory. The persistent buffer is never freed. + virtual uint8_t* AllocatePersistentBuffer(size_t size, size_t alignment) = 0; + + // Returns the size of all persistent allocations in bytes. + virtual size_t GetPersistentUsedBytes() const = 0; +}; + +// Interface class for managing non-persistent buffers. +// The default non-persistent buffers are temp buffers that are not resizable. +// Support of at least one resizable buffer is required. +class INonPersistentBufferAllocator { + public: + INonPersistentBufferAllocator() {} + virtual ~INonPersistentBufferAllocator() {} + + // Allocates a temporary buffer. This buffer is not resizable. + virtual uint8_t* AllocateTemp(size_t size, size_t alignment) = 0; + + // Signals that a temporary buffer is no longer needed. + virtual void DeallocateTemp(uint8_t* buf) = 0; + + // Returns true if all temporary buffers are already deallocated. + virtual bool IsAllTempDeallocated() = 0; + + // Signals that all temporary allocations can be reclaimed. TFLM calls this + // API when it knows that all temporary buffers that it requested has been + // deallocated. The goal of API is to facilitate implementations of + // INonPersistentBufferAllocator can reuse buffer with some reasonable + // complexity. + virtual TfLiteStatus ResetTempAllocations() = 0; + + // Returns a buffer that is resizable viable ResizeBuffer(). + virtual uint8_t* AllocateResizableBuffer(size_t size, size_t alignment) = 0; + + // Resizes a buffer that is previously returned by the + // AllocateResizableBuffer. + virtual TfLiteStatus ResizeBuffer(uint8_t* resizable_buf, size_t size, + size_t alignment) = 0; + + // Frees up the memory occupied by the resizable buffer. + virtual TfLiteStatus DeallocateResizableBuffer(uint8_t* resizable_buf) = 0; + + // Returns a pointer pointing to the start of the overlay memory, which is + // used for activation tensors and scratch buffers by kernels at Invoke stage. + virtual uint8_t* GetOverlayMemoryAddress() const = 0; + + // Reserves the size of the overlay memory. This overlay is reserved for the + // kernels at Invoke stage. This is referred to as the overlay because before + // Invoket state, the same memory can be used for temp buffers. The layout of + // the memory is planned by the memory planner separately at Invoke stage. + virtual TfLiteStatus ReserveNonPersistentOverlayMemory(size_t size, + size_t alignment) = 0; + + // Returns the size of non-persistent buffer in use. + virtual size_t GetNonPersistentUsedBytes() const = 0; + + // Returns the number of bytes available with a given alignment. This number + // takes in account any temporary allocations. + virtual size_t GetAvailableMemory(size_t alignment) const = 0; +}; + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_ARENA_ALLOCATOR_IBUFFER_ALLOCATOR_H_ diff --git a/tensorflow/lite/micro/arena_allocator/non_persistent_arena_buffer_allocator.cc b/tensorflow/lite/micro/arena_allocator/non_persistent_arena_buffer_allocator.cc new file mode 100644 index 0000000..a8f00ea --- /dev/null +++ b/tensorflow/lite/micro/arena_allocator/non_persistent_arena_buffer_allocator.cc @@ -0,0 +1,170 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/micro/arena_allocator/non_persistent_arena_buffer_allocator.h" + +#include "tensorflow/lite/micro/memory_helpers.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { + +NonPersistentArenaBufferAllocator::NonPersistentArenaBufferAllocator( + uint8_t* buffer, size_t buffer_size) + : buffer_head_(buffer), + buffer_tail_(buffer + buffer_size), + head_temp_(buffer), + next_temp_(buffer) {} + +NonPersistentArenaBufferAllocator::~NonPersistentArenaBufferAllocator() {} + +// Allocates a temporary buffer. This buffer is not resizable. +uint8_t* NonPersistentArenaBufferAllocator::AllocateTemp(size_t size, + size_t alignment) { + uint8_t* const aligned_result = AlignPointerUp(next_temp_, alignment); + const size_t available_memory = buffer_tail_ - aligned_result; + if (available_memory < size) { + MicroPrintf( + "Failed to allocate temp memory. Requested: %u, " + "available %u, missing: %u", + size, available_memory, size - available_memory); + return nullptr; + } + next_temp_ = aligned_result + size; + temp_buffer_ptr_check_sum_ ^= reinterpret_cast(aligned_result); + temp_buffer_count_++; + return aligned_result; +} + +// Signals that a temporary buffer is no longer needed. +void NonPersistentArenaBufferAllocator::DeallocateTemp(uint8_t* temp_buf) { + temp_buffer_ptr_check_sum_ ^= reinterpret_cast(temp_buf); + temp_buffer_count_--; +} + +// Returns true if all temporary buffers are already deallocated. +bool NonPersistentArenaBufferAllocator::IsAllTempDeallocated() { + if (temp_buffer_count_ != 0 || temp_buffer_ptr_check_sum_ != 0) { + MicroPrintf( + "Number of allocated temp buffers: %d. Checksum passing status: %d", + temp_buffer_count_, !temp_buffer_ptr_check_sum_); + return false; + } + return true; +} + +// Signals that all temporary allocations can be reclaimed. TFLM calls this +// API when it knows that all temporary buffers that it requested has been +// deallocated. The goal of API is to facilitate implementations of +// INonPersistentBufferAllocator can reuse buffer with some reasonable +// complexity. +TfLiteStatus NonPersistentArenaBufferAllocator::ResetTempAllocations() { + if (!IsAllTempDeallocated()) { + MicroPrintf( + "All temp buffers must be freed before calling ResetTempAllocations()"); + return kTfLiteError; + } + next_temp_ = head_temp_; + return kTfLiteOk; +} + +// Returns a buffer that is resizable viable ResizeBuffer(). +uint8_t* NonPersistentArenaBufferAllocator::AllocateResizableBuffer( + size_t size, size_t alignment) { + // Only supports one resizable buffer, which starts at the buffer head. + uint8_t* expected_resizable_buf = AlignPointerUp(buffer_head_, alignment); + + if (resizable_buffer_allocated_) { + MicroPrintf( + "Cannot allocate a new resizable buffer when one is already allocated"); + return nullptr; + } + + if (ResizeBuffer(expected_resizable_buf, size, alignment) == kTfLiteOk) { + resizable_buffer_allocated_ = true; + return expected_resizable_buf; + } + return nullptr; +} + +// Resizes a buffer that is previously returned by the AllocateResizableBuffer. +// Note that ResizeBuffer(old_resizable_buf, 0, 1) effectively deallocates +// a previous allocated resizable buffer. +TfLiteStatus NonPersistentArenaBufferAllocator::ResizeBuffer( + uint8_t* resizable_buf, size_t size, size_t alignment) { + // Only supports one resizable buffer, which starts at the buffer head. + uint8_t* expect_resizable_buf = AlignPointerUp(buffer_head_, alignment); + if (resizable_buf != expect_resizable_buf) { + MicroPrintf("Internal error: buffer is not resizable"); + return kTfLiteError; + } + if (head_temp_ != next_temp_) { + MicroPrintf("ResetTempAllocations() is not called before ResizeBuffer()."); + return kTfLiteError; + } + + const size_t available_memory = buffer_tail_ - expect_resizable_buf; + if (available_memory < size) { + MicroPrintf( + "Failed to resize buffer. Requested: %u, available %u, missing: %u", + size, available_memory, size - available_memory); + return kTfLiteError; + } + head_temp_ = expect_resizable_buf + size; + next_temp_ = head_temp_; + + return kTfLiteOk; +} + +// Frees up the memory occupied by the resizable buffer. +TfLiteStatus NonPersistentArenaBufferAllocator::DeallocateResizableBuffer( + uint8_t* resizable_buf) { + TfLiteStatus status = ResizeBuffer(resizable_buf, 0, 1); + if (status == kTfLiteOk) { + resizable_buffer_allocated_ = false; + } + return status; +} + +// Returns a pointer pointing to the start of the overlay memory, which is +// used for activation tensors and scratch buffers by kernels at Invoke stage. +uint8_t* NonPersistentArenaBufferAllocator::GetOverlayMemoryAddress() const { + return buffer_head_; +} + +// Reserves the size of the overlay memory. This overlay is reserved for the +// kernels at Invoke stage. This is referred to as the overlay because before +// Invoket state, the same memory can be used for temp buffers. The layout of +// the memory is planned by the memory planner separately at Invoke stage. +TfLiteStatus +NonPersistentArenaBufferAllocator::ReserveNonPersistentOverlayMemory( + size_t size, size_t alignment) { + uint8_t* expect_resizable_buf = AlignPointerUp(buffer_head_, alignment); + return ResizeBuffer(expect_resizable_buf, size, alignment); +} + +// Returns the size of non-persistent buffer in use. +size_t NonPersistentArenaBufferAllocator::GetNonPersistentUsedBytes() const { + return (next_temp_ - buffer_head_); +} + +// Returns the number of bytes available with a given alignment. This number +// takes in account any temporary allocations. +size_t NonPersistentArenaBufferAllocator::GetAvailableMemory( + size_t alignment) const { + uint8_t* const aligned_temp = AlignPointerUp(next_temp_, alignment); + uint8_t* const aligned_tail = AlignPointerDown(buffer_tail_, alignment); + return aligned_tail - aligned_temp; +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/arena_allocator/non_persistent_arena_buffer_allocator.h b/tensorflow/lite/micro/arena_allocator/non_persistent_arena_buffer_allocator.h new file mode 100644 index 0000000..ebd3764 --- /dev/null +++ b/tensorflow/lite/micro/arena_allocator/non_persistent_arena_buffer_allocator.h @@ -0,0 +1,104 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_MICRO_ARENA_ALLOCATOR_NON_PERSISTENT_ARENA_BUFFER_ALLOCATOR_H_ +#define TENSORFLOW_LITE_MICRO_ARENA_ALLOCATOR_NON_PERSISTENT_ARENA_BUFFER_ALLOCATOR_H_ + +#include +#include + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/arena_allocator/ibuffer_allocator.h" +#include "tensorflow/lite/micro/compatibility.h" + +namespace tflite { + +// Implement INonPersistentBufferAllocator on an arena that is dedicated for +// non-persistent buffers. +class NonPersistentArenaBufferAllocator : public INonPersistentBufferAllocator { + public: + NonPersistentArenaBufferAllocator(uint8_t* buffer, size_t buffer_size); + virtual ~NonPersistentArenaBufferAllocator(); + + // Allocates a temporary buffer. This buffer is not resizable. + uint8_t* AllocateTemp(size_t size, size_t alignment) override; + + // Signals that a temporary buffer is no longer needed. + void DeallocateTemp(uint8_t* buf) override; + + // Returns true if all temporary buffers are already deallocated. + bool IsAllTempDeallocated() override; + + // Signals that all temporary allocations can be reclaimed. TFLM calls this + // API when it knows that all temporary buffers that it requested has been + // deallocated. + TfLiteStatus ResetTempAllocations() override; + + // Returns a buffer that is resizable viable ResizeBuffer(). + uint8_t* AllocateResizableBuffer(size_t size, size_t alignment) override; + + // Resizes a buffer that is previously returned by the + // AllocateResizableBuffer. + TfLiteStatus ResizeBuffer(uint8_t* resizable_buf, size_t size, + size_t alignment) override; + + // Frees up the memory occupied by the resizable buffer. + TfLiteStatus DeallocateResizableBuffer(uint8_t* resizable_buf) override; + + // Returns a pointer pointing to the start of the overlay memory, which is + // used for activation tensors and scratch buffers by kernels at Invoke stage. + uint8_t* GetOverlayMemoryAddress() const override; + + // Reserves the size of the overlay memory. This overlay is reserved for the + // kernels at Invoke stage. This is referred to as the overlay because before + // Invoket state, the same memory can be used for temp buffers. The layout of + // the memory is planned by the memory planner separately at Invoke stage. + TfLiteStatus ReserveNonPersistentOverlayMemory(size_t size, + size_t alignment) override; + + // Returns the size of non-persistent buffer in use. + size_t GetNonPersistentUsedBytes() const override; + + // Returns the number of bytes available with a given alignment. This number + // takes in account any temporary allocations. + size_t GetAvailableMemory(size_t alignment) const override; + + TF_LITE_REMOVE_VIRTUAL_DELETE + + private: + // The memory arena that this allocator manages. + uint8_t* const buffer_head_; + uint8_t* const buffer_tail_; + + // The whole region is split into two parts: + // buffer_head_ to head_temp_ - 1 belongs to the only resizable buffer. + // head_temp_ to buffer_tail_ can be used for (non-resizable) temp buffers. + uint8_t* head_temp_; + + // next_temp_ points to the next available temp buffer allocation address and + // its range is between head_temp_ and buffer_tail_ + uint8_t* next_temp_; + + // XOR Check sum for outstanding temp buffers. + // If all temp buffers are deallocated OR no temp buffers are allocated, + // temp_buffer_ptr_check_sum_ == nullptr. + intptr_t temp_buffer_ptr_check_sum_ = 0; + // Count of outstanding temp buffers. + int temp_buffer_count_ = 0; + bool resizable_buffer_allocated_ = false; +}; + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_ARENA_ALLOCATOR_NON_PERSISTENT_ARENA_BUFFER_ALLOCATOR_H_ diff --git a/tensorflow/lite/micro/arena_allocator/non_persistent_arena_buffer_allocator_test.cc b/tensorflow/lite/micro/arena_allocator/non_persistent_arena_buffer_allocator_test.cc new file mode 100644 index 0000000..e94cc8c --- /dev/null +++ b/tensorflow/lite/micro/arena_allocator/non_persistent_arena_buffer_allocator_test.cc @@ -0,0 +1,193 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/arena_allocator/non_persistent_arena_buffer_allocator.h" + +#include + +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +TF_LITE_MICRO_TESTS_BEGIN + +// Test the creation of the resizable buffer and exercise resize. +TF_LITE_MICRO_TEST(TestResizableBuffer) { + constexpr size_t arena_size = 1024; + uint8_t arena[arena_size]; + tflite::NonPersistentArenaBufferAllocator allocator(arena, arena_size); + + uint8_t* resizable_buf = allocator.AllocateResizableBuffer(10, 1); + TF_LITE_MICRO_EXPECT(resizable_buf == arena); + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + allocator.ResizeBuffer(resizable_buf, /*size=*/100, /*alignment=*/1)); + TF_LITE_MICRO_EXPECT_EQ(static_cast(100), + allocator.GetNonPersistentUsedBytes()); + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + allocator.ResizeBuffer(resizable_buf, /*size=*/10, /*alignment=*/1)); + TF_LITE_MICRO_EXPECT_EQ(static_cast(10), + allocator.GetNonPersistentUsedBytes()); + + TF_LITE_MICRO_EXPECT_EQ( + allocator.ResizeBuffer(resizable_buf, /*size=*/1000, /*alignment=*/1), + kTfLiteOk); + TF_LITE_MICRO_EXPECT_EQ(static_cast(1000), + allocator.GetNonPersistentUsedBytes()); +} + +// Test allocate and deallocate temp buffer. +TF_LITE_MICRO_TEST(TestTempBuffer) { + constexpr size_t arena_size = 1024; + uint8_t arena[arena_size]; + tflite::NonPersistentArenaBufferAllocator allocator(arena, arena_size); + + constexpr size_t allocation_size = 100; + uint8_t* temp = allocator.AllocateTemp(/*size=*/allocation_size, + /*alignment=*/1); + TF_LITE_MICRO_EXPECT_EQ(allocation_size, + allocator.GetNonPersistentUsedBytes()); + + TF_LITE_MICRO_EXPECT_EQ(allocator.GetAvailableMemory(/*alignment=*/1), + arena_size - allocation_size); + + // Reset temp allocations and ensure GetAvailableMemory() is back to the + // starting size: + allocator.DeallocateTemp(temp); + allocator.ResetTempAllocations(); + + TF_LITE_MICRO_EXPECT_EQ(static_cast(0), + allocator.GetNonPersistentUsedBytes()); + TF_LITE_MICRO_EXPECT_EQ(allocator.GetAvailableMemory(/*alignment=*/1), + arena_size); +} + +// Resizable buffer cannot be allocated if there is still outstanding temp +// buffers. +TF_LITE_MICRO_TEST(TestAllocateResizeFailIfTempStillExists) { + constexpr size_t arena_size = 1024; + uint8_t arena[arena_size]; + tflite::NonPersistentArenaBufferAllocator allocator(arena, arena_size); + + constexpr size_t allocation_size = 100; + uint8_t* temp = allocator.AllocateTemp(/*size=*/allocation_size, + /*alignment=*/1); + // Deallocate does not free up temp buffer. + allocator.DeallocateTemp(temp); + + TF_LITE_MICRO_EXPECT(allocator.AllocateResizableBuffer(allocation_size, 1) == + nullptr); +} + +// Resizable buffer can be allocated if there are no outstanding temp buffers. +TF_LITE_MICRO_TEST(TestAllocateResizePassIfNoTemp) { + constexpr size_t arena_size = 1024; + uint8_t arena[arena_size]; + tflite::NonPersistentArenaBufferAllocator allocator(arena, arena_size); + + constexpr size_t allocation_size = 100; + uint8_t* temp = allocator.AllocateTemp(/*size=*/allocation_size, + /*alignment=*/1); + // Deallocate does not free up temp buffer. + allocator.DeallocateTemp(temp); + TF_LITE_MICRO_EXPECT_EQ(allocator.ResetTempAllocations(), kTfLiteOk); + + TF_LITE_MICRO_EXPECT(allocator.AllocateResizableBuffer(allocation_size, 1) == + arena); +} + +// Cannot allocate more than one resizable buffer. +TF_LITE_MICRO_TEST(TestAllocateResizableFailIfResizableExists) { + constexpr size_t arena_size = 1024; + uint8_t arena[arena_size]; + tflite::NonPersistentArenaBufferAllocator allocator(arena, arena_size); + + constexpr size_t allocation_size = 100; + TF_LITE_MICRO_EXPECT( + allocator.AllocateResizableBuffer(/*size=*/allocation_size, + /*alignment=*/1) != nullptr); + + TF_LITE_MICRO_EXPECT( + allocator.AllocateResizableBuffer(/*size=*/allocation_size, + /*alignment=*/1) == nullptr); +} + +// ResetTempAllocations() fail if there are still outstanding temp buffers +TF_LITE_MICRO_TEST(TestResetTempFailIfTempStillExists) { + constexpr size_t arena_size = 1024; + uint8_t arena[arena_size]; + tflite::NonPersistentArenaBufferAllocator allocator(arena, arena_size); + + constexpr size_t allocation_size = 100; + allocator.AllocateTemp(/*size=*/allocation_size, + /*alignment=*/1); + + TF_LITE_MICRO_EXPECT_EQ(allocator.ResetTempAllocations(), kTfLiteError); +} + +// Request more than allocated size for temp will fail +TF_LITE_MICRO_TEST(TestAllocateTempFailIfExceedAllowance) { + constexpr size_t arena_size = 1024; + uint8_t arena[arena_size]; + tflite::NonPersistentArenaBufferAllocator allocator(arena, arena_size); + + TF_LITE_MICRO_EXPECT(allocator.AllocateTemp(/*size=*/arena_size + 1, + /*alignment=*/1) == nullptr); +} + +// Request more than allocated size for resizable will fail +TF_LITE_MICRO_TEST(TestAllocateTempFailIfExceedAllowance) { + constexpr size_t arena_size = 1024; + uint8_t arena[arena_size]; + tflite::NonPersistentArenaBufferAllocator allocator(arena, arena_size); + + TF_LITE_MICRO_EXPECT(allocator.AllocateResizableBuffer( + /*size=*/arena_size + 1, /*alignment=*/1) == + nullptr); + + constexpr size_t allocation_size = 100; + uint8_t* resizable_buffer = + allocator.AllocateResizableBuffer(/*size=*/allocation_size, + /*alignment=*/1); + TF_LITE_MICRO_EXPECT(resizable_buffer == arena); + + TF_LITE_MICRO_EXPECT_EQ( + allocator.ResizeBuffer(resizable_buffer, /*size=*/arena_size + 1, + /*alignment=*/1), + kTfLiteError); +} + +// GetNonPersistentUsedBytes() reports memory for both resizable buffer and temp +// buffers. +TF_LITE_MICRO_TEST(TestGetNonPersistentUsedBytes) { + constexpr size_t arena_size = 1024; + uint8_t arena[arena_size]; + tflite::NonPersistentArenaBufferAllocator allocator(arena, arena_size); + + constexpr size_t allocation_size = 100; + TF_LITE_MICRO_EXPECT( + arena == allocator.AllocateResizableBuffer(/*size=*/allocation_size, + /*alignment=*/1)); + + TF_LITE_MICRO_EXPECT( + allocator.AllocateTemp(/*size=*/arena_size - allocation_size, + /*alignment=*/1) != nullptr); + + TF_LITE_MICRO_EXPECT_EQ(allocator.GetNonPersistentUsedBytes(), arena_size); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/arena_allocator/persistent_arena_buffer_allocator.cc b/tensorflow/lite/micro/arena_allocator/persistent_arena_buffer_allocator.cc new file mode 100644 index 0000000..a770bc9 --- /dev/null +++ b/tensorflow/lite/micro/arena_allocator/persistent_arena_buffer_allocator.cc @@ -0,0 +1,52 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/micro/arena_allocator/persistent_arena_buffer_allocator.h" + +#include "tensorflow/lite/micro/memory_helpers.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { + +PersistentArenaBufferAllocator::PersistentArenaBufferAllocator( + uint8_t* buffer, size_t buffer_size) + : buffer_head_(buffer), + buffer_tail_(buffer + buffer_size), + tail_temp_(buffer_tail_) {} + +PersistentArenaBufferAllocator::~PersistentArenaBufferAllocator() {} + +uint8_t* PersistentArenaBufferAllocator::AllocatePersistentBuffer( + size_t size, size_t alignment) { + uint8_t* const aligned_result = + AlignPointerDown(tail_temp_ - size, alignment); + if (aligned_result < buffer_head_) { +#ifndef TF_LITE_STRIP_ERROR_STRINGS + const size_t missing_memory = buffer_head_ - aligned_result; + MicroPrintf( + "Failed to allocate tail memory. Requested: %u, " + "available %u, missing: %u", + size, size - missing_memory, missing_memory); +#endif + return nullptr; + } + tail_temp_ = aligned_result; + return aligned_result; +} + +size_t PersistentArenaBufferAllocator::GetPersistentUsedBytes() const { + return buffer_tail_ - tail_temp_; +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/arena_allocator/persistent_arena_buffer_allocator.h b/tensorflow/lite/micro/arena_allocator/persistent_arena_buffer_allocator.h new file mode 100644 index 0000000..2c8e3dc --- /dev/null +++ b/tensorflow/lite/micro/arena_allocator/persistent_arena_buffer_allocator.h @@ -0,0 +1,58 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_MICRO_ARENA_ALLOCATOR_PERSISTENT_ARENA_BUFFER_ALLOCATOR_H_ +#define TENSORFLOW_LITE_MICRO_ARENA_ALLOCATOR_PERSISTENT_ARENA_BUFFER_ALLOCATOR_H_ + +#include +#include + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/arena_allocator/ibuffer_allocator.h" +#include "tensorflow/lite/micro/compatibility.h" + +namespace tflite { + +// PersistentArenaBufferAllocator is an implementatation of +// IPersistentBufferAllocator interface on an arena that is dedicated for +// persistent buffers. +class PersistentArenaBufferAllocator : public IPersistentBufferAllocator { + public: + PersistentArenaBufferAllocator(uint8_t* buffer, size_t buffer_size); + virtual ~PersistentArenaBufferAllocator(); + + // Allocates persistent memory. The persistent buffer is never freed. + // Returns nullptr if errors occured. + uint8_t* AllocatePersistentBuffer(size_t size, size_t alignment) override; + + // Returns the size of all persistent allocations in bytes. + size_t GetPersistentUsedBytes() const override; + + TF_LITE_REMOVE_VIRTUAL_DELETE + private: + // The memory arena that this allocator manages. + uint8_t* const buffer_head_; + uint8_t* const buffer_tail_; + + // The whole region is split into two parts: + // tail_temp_ to buffer_tail_ contains allocated buffers; + // buffer_head_ to tail_temp_ - 1 belongs to still available spaces. + // So in essence, the allocated region grows from the bottom and emulates + // SingleArenaBufferAllocator's persistent part. + uint8_t* tail_temp_; +}; + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_ARENA_ALLOCATOR_PERSISTENT_ARENA_BUFFER_ALLOCATOR_H_ diff --git a/tensorflow/lite/micro/arena_allocator/persistent_arena_buffer_allocator_test.cc b/tensorflow/lite/micro/arena_allocator/persistent_arena_buffer_allocator_test.cc new file mode 100644 index 0000000..984b8a1 --- /dev/null +++ b/tensorflow/lite/micro/arena_allocator/persistent_arena_buffer_allocator_test.cc @@ -0,0 +1,97 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/arena_allocator/persistent_arena_buffer_allocator.h" + +#include + +#include "tensorflow/lite/micro/testing/micro_test.h" + +TF_LITE_MICRO_TESTS_BEGIN + +// Test that the right amount of memory are allocated. +TF_LITE_MICRO_TEST(TestGetPersistentUsedBytes) { + constexpr size_t arena_size = 1024; + uint8_t arena[arena_size]; + tflite::PersistentArenaBufferAllocator allocator(arena, arena_size); + + const size_t size1 = 10; + allocator.AllocatePersistentBuffer(size1, 1); + TF_LITE_MICRO_EXPECT_EQ(size1, allocator.GetPersistentUsedBytes()); + + const size_t size2 = 15; + allocator.AllocatePersistentBuffer(size2, 1); + + TF_LITE_MICRO_EXPECT_EQ(size1 + size2, allocator.GetPersistentUsedBytes()); +} + +// Test allocation shall fail if total memory exceeds the limit. +TF_LITE_MICRO_TEST(TestAllocatePersistBufferShallFailIfExceedLimit) { + constexpr size_t arena_size = 1024; + uint8_t arena[arena_size]; + tflite::PersistentArenaBufferAllocator allocator(arena, arena_size); + + const size_t size1 = 10; + uint8_t* persist1 = allocator.AllocatePersistentBuffer(size1, 1); + TF_LITE_MICRO_EXPECT(persist1 != nullptr); + + const size_t size2 = arena_size - size1 + 1; + uint8_t* persist2 = allocator.AllocatePersistentBuffer(size2, 1); + + TF_LITE_MICRO_EXPECT(persist2 == nullptr); +} + +// Test allocation shall pass if total memory does not exceed the limit. +TF_LITE_MICRO_TEST(TestAllocatePersistBufferShallPassIfWithinLimit) { + constexpr size_t arena_size = 1024; + uint8_t arena[arena_size]; + tflite::PersistentArenaBufferAllocator allocator(arena, arena_size); + + const size_t size1 = 10; + uint8_t* persist1 = allocator.AllocatePersistentBuffer(size1, 1); + TF_LITE_MICRO_EXPECT(persist1 != nullptr); + + const size_t size2 = arena_size - size1; + uint8_t* persist2 = allocator.AllocatePersistentBuffer(size2, 1); + + TF_LITE_MICRO_EXPECT(persist2 != nullptr); + TF_LITE_MICRO_EXPECT_EQ(arena_size, allocator.GetPersistentUsedBytes()); +} + +// Test alignment works. +TF_LITE_MICRO_TEST(TestAllocatePersistBufferAligns) { + constexpr size_t arena_size = 1024; + uint8_t arena[arena_size]; + tflite::PersistentArenaBufferAllocator allocator(arena, arena_size); + + const size_t size1 = 10; + const size_t alignment = 16; + uint8_t* persist1 = allocator.AllocatePersistentBuffer(size1, alignment); + TF_LITE_MICRO_EXPECT(persist1 != nullptr); + TF_LITE_MICRO_EXPECT_EQ( + (reinterpret_cast(persist1)) % alignment, + static_cast(0)); + TF_LITE_MICRO_EXPECT_GE(allocator.GetPersistentUsedBytes(), size1); + + const size_t size2 = 16; + uint8_t* persist2 = allocator.AllocatePersistentBuffer(size2, alignment); + TF_LITE_MICRO_EXPECT(persist2 != nullptr); + TF_LITE_MICRO_EXPECT_EQ( + (reinterpret_cast(persist2)) % alignment, + static_cast(0)); + TF_LITE_MICRO_EXPECT_EQ(static_cast(persist1 - persist2), size2); + TF_LITE_MICRO_EXPECT_GE(allocator.GetPersistentUsedBytes(), size1); +} +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/arena_allocator/recording_single_arena_buffer_allocator.cc b/tensorflow/lite/micro/arena_allocator/recording_single_arena_buffer_allocator.cc new file mode 100644 index 0000000..e21e364 --- /dev/null +++ b/tensorflow/lite/micro/arena_allocator/recording_single_arena_buffer_allocator.cc @@ -0,0 +1,85 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/arena_allocator/recording_single_arena_buffer_allocator.h" + +#include + +#include "tensorflow/lite/kernels/internal/compatibility.h" + +namespace tflite { + +RecordingSingleArenaBufferAllocator::RecordingSingleArenaBufferAllocator( + uint8_t* buffer_head, size_t buffer_size) + : SingleArenaBufferAllocator(buffer_head, buffer_size), + requested_head_bytes_(0), + requested_tail_bytes_(0), + used_bytes_(0), + alloc_count_(0) {} + +RecordingSingleArenaBufferAllocator::~RecordingSingleArenaBufferAllocator() {} + +RecordingSingleArenaBufferAllocator* +RecordingSingleArenaBufferAllocator::Create(uint8_t* buffer_head, + size_t buffer_size) { + TFLITE_DCHECK(buffer_head != nullptr); + RecordingSingleArenaBufferAllocator tmp = + RecordingSingleArenaBufferAllocator(buffer_head, buffer_size); + + uint8_t* allocator_buffer = tmp.AllocatePersistentBuffer( + sizeof(RecordingSingleArenaBufferAllocator), + alignof(RecordingSingleArenaBufferAllocator)); + // Use the default copy constructor to populate internal states. + return new (allocator_buffer) RecordingSingleArenaBufferAllocator(tmp); +} + +size_t RecordingSingleArenaBufferAllocator::GetRequestedBytes() const { + return requested_head_bytes_ + requested_tail_bytes_; +} + +size_t RecordingSingleArenaBufferAllocator::GetUsedBytes() const { + return used_bytes_; +} + +size_t RecordingSingleArenaBufferAllocator::GetAllocatedCount() const { + return alloc_count_; +} + +TfLiteStatus RecordingSingleArenaBufferAllocator::ResizeBuffer( + uint8_t* resizable_buf, size_t size, size_t alignment) { + const uint8_t* previous_head = head(); + TfLiteStatus status = + SingleArenaBufferAllocator::ResizeBuffer(resizable_buf, size, alignment); + if (status == kTfLiteOk) { + used_bytes_ += head() - previous_head; + requested_head_bytes_ = size; + } + return status; +} + +uint8_t* RecordingSingleArenaBufferAllocator::AllocatePersistentBuffer( + size_t size, size_t alignment) { + const uint8_t* previous_tail = tail(); + uint8_t* result = + SingleArenaBufferAllocator::AllocatePersistentBuffer(size, alignment); + if (result != nullptr) { + used_bytes_ += previous_tail - tail(); + requested_tail_bytes_ += size; + alloc_count_++; + } + return result; +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/arena_allocator/recording_single_arena_buffer_allocator.h b/tensorflow/lite/micro/arena_allocator/recording_single_arena_buffer_allocator.h new file mode 100644 index 0000000..94e55a3 --- /dev/null +++ b/tensorflow/lite/micro/arena_allocator/recording_single_arena_buffer_allocator.h @@ -0,0 +1,63 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_ARENA_ALLOCATOR_RECORDING_SINGLE_ARENA_BUFFER_ALLOCATOR_H_ +#define TENSORFLOW_LITE_MICRO_ARENA_ALLOCATOR_RECORDING_SINGLE_ARENA_BUFFER_ALLOCATOR_H_ + +#include "tensorflow/lite/micro/arena_allocator/single_arena_buffer_allocator.h" +#include "tensorflow/lite/micro/compatibility.h" + +namespace tflite { + +// Utility class used to log allocations of a SingleArenaBufferAllocator. Should +// only be used in debug/evaluation settings or unit tests to evaluate +// allocation usage. +class RecordingSingleArenaBufferAllocator : public SingleArenaBufferAllocator { + public: + RecordingSingleArenaBufferAllocator(uint8_t* buffer_head, size_t buffer_size); + // TODO(b/157615197): Cleanup constructors/destructor and use factory + // functions. + ~RecordingSingleArenaBufferAllocator() override; + + static RecordingSingleArenaBufferAllocator* Create(uint8_t* buffer_head, + size_t buffer_size); + + // Returns the number of bytes requested from the head or tail. + size_t GetRequestedBytes() const; + + // Returns the number of bytes actually allocated from the head or tail. This + // value will be >= to the number of requested bytes due to padding and + // alignment. + size_t GetUsedBytes() const; + + // Returns the number of alloc calls from the head or tail. + size_t GetAllocatedCount() const; + + TfLiteStatus ResizeBuffer(uint8_t* resizable_buf, size_t size, + size_t alignment) override; + uint8_t* AllocatePersistentBuffer(size_t size, size_t alignment) override; + + private: + size_t requested_head_bytes_; + size_t requested_tail_bytes_; + size_t used_bytes_; + size_t alloc_count_; + + TF_LITE_REMOVE_VIRTUAL_DELETE +}; + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_ARENA_ALLOCATOR_RECORDING_SINGLE_ARENA_BUFFER_ALLOCATOR_H_ diff --git a/tensorflow/lite/micro/arena_allocator/recording_single_arena_buffer_allocator_test.cc b/tensorflow/lite/micro/arena_allocator/recording_single_arena_buffer_allocator_test.cc new file mode 100644 index 0000000..a25ad50 --- /dev/null +++ b/tensorflow/lite/micro/arena_allocator/recording_single_arena_buffer_allocator_test.cc @@ -0,0 +1,146 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/arena_allocator/recording_single_arena_buffer_allocator.h" + +#include + +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(TestRecordsTailAllocations) { + constexpr size_t arena_size = 1024; + uint8_t arena[arena_size]; + tflite::RecordingSingleArenaBufferAllocator allocator(arena, arena_size); + + uint8_t* result = + allocator.AllocatePersistentBuffer(/*size=*/10, /*alignment=*/1); + TF_LITE_MICRO_EXPECT(result != nullptr); + TF_LITE_MICRO_EXPECT_EQ(allocator.GetUsedBytes(), static_cast(10)); + TF_LITE_MICRO_EXPECT_EQ(allocator.GetRequestedBytes(), + static_cast(10)); + TF_LITE_MICRO_EXPECT_EQ(allocator.GetAllocatedCount(), + static_cast(1)); + + result = allocator.AllocatePersistentBuffer(/*size=*/20, /*alignment=*/1); + TF_LITE_MICRO_EXPECT(result != nullptr); + TF_LITE_MICRO_EXPECT_EQ(allocator.GetUsedBytes(), static_cast(30)); + TF_LITE_MICRO_EXPECT_EQ(allocator.GetRequestedBytes(), + static_cast(30)); + TF_LITE_MICRO_EXPECT_EQ(allocator.GetAllocatedCount(), + static_cast(2)); +} + +TF_LITE_MICRO_TEST(TestRecordsMisalignedTailAllocations) { + constexpr size_t arena_size = 1024; + uint8_t arena[arena_size]; + tflite::RecordingSingleArenaBufferAllocator allocator(arena, arena_size); + + uint8_t* result = + allocator.AllocatePersistentBuffer(/*size=*/10, /*alignment=*/12); + TF_LITE_MICRO_EXPECT(result != nullptr); + // Validate used bytes in 8 byte range that can included alignment of 12: + TF_LITE_MICRO_EXPECT_GE(allocator.GetUsedBytes(), static_cast(10)); + TF_LITE_MICRO_EXPECT_LE(allocator.GetUsedBytes(), static_cast(20)); + TF_LITE_MICRO_EXPECT_EQ(allocator.GetRequestedBytes(), + static_cast(10)); + TF_LITE_MICRO_EXPECT_EQ(allocator.GetAllocatedCount(), + static_cast(1)); +} + +TF_LITE_MICRO_TEST(TestDoesNotRecordFailedTailAllocations) { + constexpr size_t arena_size = 1024; + uint8_t arena[arena_size]; + tflite::RecordingSingleArenaBufferAllocator allocator(arena, arena_size); + + uint8_t* result = + allocator.AllocatePersistentBuffer(/*size=*/2048, /*alignment=*/1); + TF_LITE_MICRO_EXPECT(result == nullptr); + TF_LITE_MICRO_EXPECT_EQ(allocator.GetUsedBytes(), static_cast(0)); + TF_LITE_MICRO_EXPECT_EQ(allocator.GetRequestedBytes(), + static_cast(0)); + TF_LITE_MICRO_EXPECT_EQ(allocator.GetAllocatedCount(), + static_cast(0)); +} + +TF_LITE_MICRO_TEST(TestRecordsHeadSizeAdjustment) { + constexpr size_t arena_size = 1024; + uint8_t arena[arena_size]; + tflite::RecordingSingleArenaBufferAllocator allocator(arena, arena_size); + + uint8_t* resizable_buf = allocator.AllocateResizableBuffer(0, 1); + TF_LITE_MICRO_EXPECT(resizable_buf != nullptr); + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + allocator.ResizeBuffer(resizable_buf, /*size=*/5, /*alignment=*/1)); + TF_LITE_MICRO_EXPECT_EQ(allocator.GetUsedBytes(), static_cast(5)); + TF_LITE_MICRO_EXPECT_EQ(allocator.GetRequestedBytes(), + static_cast(5)); + // Head adjustments do not count as an allocation: + TF_LITE_MICRO_EXPECT_EQ(allocator.GetAllocatedCount(), + static_cast(0)); + + uint8_t* result = + allocator.AllocatePersistentBuffer(/*size=*/15, /*alignment=*/1); + TF_LITE_MICRO_EXPECT(result != nullptr); + TF_LITE_MICRO_EXPECT_EQ(allocator.GetUsedBytes(), static_cast(20)); + TF_LITE_MICRO_EXPECT_EQ(allocator.GetRequestedBytes(), + static_cast(20)); + TF_LITE_MICRO_EXPECT_EQ(allocator.GetAllocatedCount(), + static_cast(1)); +} + +TF_LITE_MICRO_TEST(TestRecordsMisalignedHeadSizeAdjustments) { + constexpr size_t arena_size = 1024; + uint8_t arena[arena_size]; + tflite::RecordingSingleArenaBufferAllocator allocator(arena, arena_size); + uint8_t* resizable_buf = allocator.AllocateResizableBuffer(0, 12); + TF_LITE_MICRO_EXPECT(resizable_buf != nullptr); + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + allocator.ResizeBuffer(resizable_buf, /*size=*/10, /*alignment=*/12)); + // Validate used bytes in 8 byte range that can included alignment of 12: + TF_LITE_MICRO_EXPECT_GE(allocator.GetUsedBytes(), static_cast(10)); + TF_LITE_MICRO_EXPECT_LE(allocator.GetUsedBytes(), static_cast(20)); + TF_LITE_MICRO_EXPECT_EQ(allocator.GetRequestedBytes(), + static_cast(10)); + // Head adjustments do not count as an allocation: + TF_LITE_MICRO_EXPECT_EQ(allocator.GetAllocatedCount(), + static_cast(0)); +} + +TF_LITE_MICRO_TEST(TestDoesNotRecordFailedTailAllocations) { + constexpr size_t arena_size = 1024; + uint8_t arena[arena_size]; + tflite::RecordingSingleArenaBufferAllocator allocator(arena, arena_size); + + uint8_t* resizable_buf = allocator.AllocateResizableBuffer(0, 1); + TF_LITE_MICRO_EXPECT(resizable_buf != nullptr); + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteError, allocator.ResizeBuffer(resizable_buf, + /*size=*/2048, /*alignment=*/1)); + TF_LITE_MICRO_EXPECT_EQ(allocator.GetUsedBytes(), static_cast(0)); + TF_LITE_MICRO_EXPECT_EQ(allocator.GetRequestedBytes(), + static_cast(0)); + TF_LITE_MICRO_EXPECT_EQ(allocator.GetAllocatedCount(), + static_cast(0)); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/arena_allocator/single_arena_buffer_allocator.cc b/tensorflow/lite/micro/arena_allocator/single_arena_buffer_allocator.cc new file mode 100644 index 0000000..8655cfd --- /dev/null +++ b/tensorflow/lite/micro/arena_allocator/single_arena_buffer_allocator.cc @@ -0,0 +1,199 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/arena_allocator/single_arena_buffer_allocator.h" + +#include +#include +#include + +#include "tensorflow/lite/c/c_api_types.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/memory_helpers.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { + +SingleArenaBufferAllocator::SingleArenaBufferAllocator(uint8_t* buffer_head, + uint8_t* buffer_tail) + : buffer_head_(buffer_head), + buffer_tail_(buffer_tail), + head_(buffer_head), + tail_(buffer_tail), + temp_(buffer_head_) {} + +SingleArenaBufferAllocator::SingleArenaBufferAllocator(uint8_t* buffer, + size_t buffer_size) + : SingleArenaBufferAllocator(buffer, buffer + buffer_size) {} + +/* static */ +SingleArenaBufferAllocator* SingleArenaBufferAllocator::Create( + uint8_t* buffer_head, size_t buffer_size) { + TFLITE_DCHECK(buffer_head != nullptr); + SingleArenaBufferAllocator tmp = + SingleArenaBufferAllocator(buffer_head, buffer_size); + + // Allocate enough bytes from the buffer to create a + // SingleArenaBufferAllocator. The new instance will use the current adjusted + // tail buffer from the tmp allocator instance. + uint8_t* allocator_buffer = tmp.AllocatePersistentBuffer( + sizeof(SingleArenaBufferAllocator), alignof(SingleArenaBufferAllocator)); + // Use the default copy constructor to populate internal states. + return new (allocator_buffer) SingleArenaBufferAllocator(tmp); +} + +SingleArenaBufferAllocator::~SingleArenaBufferAllocator() {} + +uint8_t* SingleArenaBufferAllocator::AllocateResizableBuffer(size_t size, + size_t alignment) { + // Only supports one resizable buffer, which starts at the buffer head. + uint8_t* expect_resizable_buf = AlignPointerUp(buffer_head_, alignment); + if (ResizeBuffer(expect_resizable_buf, size, alignment) == kTfLiteOk) { + return expect_resizable_buf; + } + return nullptr; +} + +TfLiteStatus SingleArenaBufferAllocator::DeallocateResizableBuffer( + uint8_t* resizable_buf) { + return ResizeBuffer(resizable_buf, 0, 1); +} + +TfLiteStatus SingleArenaBufferAllocator::ReserveNonPersistentOverlayMemory( + size_t size, size_t alignment) { + uint8_t* expect_resizable_buf = AlignPointerUp(buffer_head_, alignment); + return ResizeBuffer(expect_resizable_buf, size, alignment); +} + +TfLiteStatus SingleArenaBufferAllocator::ResizeBuffer(uint8_t* resizable_buf, + size_t size, + size_t alignment) { + // Only supports one resizable buffer, which starts at the buffer head. + uint8_t* expect_resizable_buf = AlignPointerUp(buffer_head_, alignment); + if (head_ != temp_ || resizable_buf != expect_resizable_buf) { + MicroPrintf( + "Internal error: either buffer is not resizable or " + "ResetTempAllocations() is not called before ResizeBuffer()."); + return kTfLiteError; + } + + uint8_t* const aligned_result = AlignPointerUp(buffer_head_, alignment); + const size_t available_memory = tail_ - aligned_result; + if (available_memory < size) { + MicroPrintf( + "Failed to resize buffer. Requested: %u, available %u, missing: %u", + size, available_memory, size - available_memory); + return kTfLiteError; + } + head_ = aligned_result + size; + temp_ = head_; + + return kTfLiteOk; +} + +uint8_t* SingleArenaBufferAllocator::AllocatePersistentBuffer( + size_t size, size_t alignment) { + uint8_t* const aligned_result = AlignPointerDown(tail_ - size, alignment); + if (aligned_result < head_) { +#ifndef TF_LITE_STRIP_ERROR_STRINGS + const size_t missing_memory = head_ - aligned_result; + MicroPrintf( + "Failed to allocate tail memory. Requested: %u, " + "available %u, missing: %u", + size, size - missing_memory, missing_memory); +#endif + return nullptr; + } + tail_ = aligned_result; + return aligned_result; +} + +uint8_t* SingleArenaBufferAllocator::AllocateTemp(size_t size, + size_t alignment) { + uint8_t* const aligned_result = AlignPointerUp(temp_, alignment); + const size_t available_memory = tail_ - aligned_result; + if (available_memory < size) { + MicroPrintf( + "Failed to allocate temp memory. Requested: %u, " + "available %u, missing: %u", + size, available_memory, size - available_memory); + return nullptr; + } + temp_ = aligned_result + size; + temp_buffer_ptr_check_sum_ ^= (reinterpret_cast(aligned_result)); + temp_buffer_count_++; + return aligned_result; +} + +void SingleArenaBufferAllocator::DeallocateTemp(uint8_t* temp_buf) { + temp_buffer_ptr_check_sum_ ^= (reinterpret_cast(temp_buf)); + temp_buffer_count_--; +} + +bool SingleArenaBufferAllocator::IsAllTempDeallocated() { + if (temp_buffer_count_ != 0 || temp_buffer_ptr_check_sum_ != 0) { + MicroPrintf( + "Number of allocated temp buffers: %d. Checksum passing status: %d", + temp_buffer_count_, !temp_buffer_ptr_check_sum_); + return false; + } + return true; +} + +TfLiteStatus SingleArenaBufferAllocator::ResetTempAllocations() { + // TODO(b/209453859): enable error check based on IsAllTempDeallocated after + // all AllocateTemp have been paird with DeallocateTemp + if (!IsAllTempDeallocated()) { + MicroPrintf( + "All temp buffers must be freed before calling ResetTempAllocations()"); + return kTfLiteError; + } + temp_ = head_; + return kTfLiteOk; +} + +uint8_t* SingleArenaBufferAllocator::GetOverlayMemoryAddress() const { + return buffer_head_; +} + +size_t SingleArenaBufferAllocator::GetNonPersistentUsedBytes() const { + return std::max(head_ - buffer_head_, temp_ - buffer_head_); +} + +size_t SingleArenaBufferAllocator::GetPersistentUsedBytes() const { + return buffer_tail_ - tail_; +} + +size_t SingleArenaBufferAllocator::GetAvailableMemory(size_t alignment) const { + uint8_t* const aligned_temp = AlignPointerUp(temp_, alignment); + uint8_t* const aligned_tail = AlignPointerDown(tail_, alignment); + return aligned_tail - aligned_temp; +} + +size_t SingleArenaBufferAllocator::GetUsedBytes() const { + return GetPersistentUsedBytes() + GetNonPersistentUsedBytes(); +} + +size_t SingleArenaBufferAllocator::GetBufferSize() const { + return buffer_tail_ - buffer_head_; +} + +uint8_t* SingleArenaBufferAllocator::head() const { return head_; } + +uint8_t* SingleArenaBufferAllocator::tail() const { return tail_; } + +} // namespace tflite diff --git a/tensorflow/lite/micro/arena_allocator/single_arena_buffer_allocator.h b/tensorflow/lite/micro/arena_allocator/single_arena_buffer_allocator.h new file mode 100644 index 0000000..a2e3958 --- /dev/null +++ b/tensorflow/lite/micro/arena_allocator/single_arena_buffer_allocator.h @@ -0,0 +1,144 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_ARENA_ALLOCATOR_SINGLE_ARENA_BUFFER_ALLOCATOR_H_ +#define TENSORFLOW_LITE_MICRO_ARENA_ALLOCATOR_SINGLE_ARENA_BUFFER_ALLOCATOR_H_ + +#include +#include + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/arena_allocator/ibuffer_allocator.h" +#include "tensorflow/lite/micro/compatibility.h" + +namespace tflite { + +// TODO(petewarden): This allocator never frees up or reuses any memory, even +// though we have enough information about lifetimes of the tensors to do so. +// This makes it pretty wasteful, so we should use a more intelligent method. +class SingleArenaBufferAllocator : public INonPersistentBufferAllocator, + public IPersistentBufferAllocator { + public: + // TODO(b/157615197): Cleanup constructors/destructor and use factory + // functions. + SingleArenaBufferAllocator(uint8_t* buffer_head, uint8_t* buffer_tail); + SingleArenaBufferAllocator(uint8_t* buffer, size_t buffer_size); + virtual ~SingleArenaBufferAllocator(); + + // Creates a new SingleArenaBufferAllocator from a given buffer head and size. + static SingleArenaBufferAllocator* Create(uint8_t* buffer_head, + size_t buffer_size); + + // Resizes a buffer that is previously returned by the + // AllocateResizableBuffer. In current implementation, it Adjusts the head + // (lowest address and moving upwards) memory allocation to a given size. + // Calls to this method will also invalidate all temporary allocation values + // (it sets the location of temp space at the end of the head section). This + // call will fail if a chain of allocations through AllocateTemp() have not + // been cleaned up with a call to ResetTempAllocations(). + virtual TfLiteStatus ResizeBuffer(uint8_t* resizable_buf, size_t size, + size_t alignment) override; + + // Returns a buffer that is resizable viable ResizeBuffer(). Only one + // resizable buffer is currently supported. + virtual uint8_t* AllocateResizableBuffer(size_t size, + size_t alignment) override; + + // Frees up the memory occupied by the resizable buffer + virtual TfLiteStatus DeallocateResizableBuffer( + uint8_t* resizable_buf) override; + + // Reserves the non-persistent memory that is planned by the memory planner. + virtual TfLiteStatus ReserveNonPersistentOverlayMemory( + size_t size, size_t alignment) override; + + // Allocates persistent memory starting at the tail of the arena (highest + // address and moving downwards). + virtual uint8_t* AllocatePersistentBuffer(size_t size, + size_t alignment) override; + + // Allocates a temporary buffer from the head of the arena (lowest address and + // moving upwards) but does not update the actual head allocation size or + // position. The returned buffer is guaranteed until either + // ResetTempAllocations() is called or another call to AllocateFromHead(). + // Repeat calls to this function will create a chain of temp allocations. All + // calls to AllocateTemp() must end with a call to ResetTempAllocations(). If + // AllocateFromHead() is called before a call to ResetTempAllocations(), it + // will fail with an error message. + virtual uint8_t* AllocateTemp(size_t size, size_t alignment) override; + + // Signals that a temporary buffer is no longer needed. This is currently for + // book-keeping purpose and the memory region are not immediately available + // for re-use. The deallocated memory region are only reclaimed after + // ResetTempAllocations is called as it is right now. + virtual void DeallocateTemp(uint8_t* buf) override; + + // Returns true if all temporary buffers are already deallocated. + virtual bool IsAllTempDeallocated() override; + + // Resets a chain of temporary allocations back to the current head of the + // arena (lowest address). + virtual TfLiteStatus ResetTempAllocations() override; + + // Returns a pointer to the buffer currently assigned to the head section. + // This buffer is set by calling SetHeadSize(). + uint8_t* GetOverlayMemoryAddress() const override; + + // Returns the size of the head section in bytes. + size_t GetNonPersistentUsedBytes() const override; + + // Returns the size of all allocations in the tail section in bytes. + size_t GetPersistentUsedBytes() const override; + + // Returns the number of bytes available with a given alignment. This number + // takes in account any temporary allocations. + size_t GetAvailableMemory(size_t alignment) const override; + + // Returns the number of used bytes in the allocator. This number takes in + // account any temporary allocations. + size_t GetUsedBytes() const; + + TF_LITE_REMOVE_VIRTUAL_DELETE + + protected: + // Returns a pointer to the current end of the head buffer. + uint8_t* head() const; + + // Returns a pointer to the current end of the tail buffer. + uint8_t* tail() const; + + private: + size_t GetBufferSize() const; + uint8_t* buffer_head_; + uint8_t* buffer_tail_; + uint8_t* head_; + uint8_t* tail_; + uint8_t* temp_; + + // The combination of the checksum of outstanding temporary buffer pointers + // AND the count of outstanding temporary buffer provide a low cost mechanism + // to audit temporary buffers' allocation and deallocation. + // + // XOR Check sum for outstanding temp buffers. + // If all temp buffers are deallocated OR no temp buffers are allocated, + // temp_buffer_ptr_check_sum_ == nullptr. + intptr_t temp_buffer_ptr_check_sum_ = 0; + // Count of outstanding temp buffers. + int temp_buffer_count_ = 0; +}; + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_ARENA_ALLOCATOR_SINGLE_ARENA_BUFFER_ALLOCATOR_H_ diff --git a/tensorflow/lite/micro/arena_allocator/single_arena_buffer_allocator_test.cc b/tensorflow/lite/micro/arena_allocator/single_arena_buffer_allocator_test.cc new file mode 100644 index 0000000..9779c4e --- /dev/null +++ b/tensorflow/lite/micro/arena_allocator/single_arena_buffer_allocator_test.cc @@ -0,0 +1,307 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/arena_allocator/single_arena_buffer_allocator.h" + +#include + +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(TestEnsureHeadSizeSimpleAlignment) { + constexpr size_t arena_size = 1024; + uint8_t arena[arena_size]; + tflite::SingleArenaBufferAllocator allocator(arena, arena_size); + + uint8_t* resizable_buf = allocator.AllocateResizableBuffer(0, 1); + TF_LITE_MICRO_EXPECT(resizable_buf != nullptr); + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + allocator.ResizeBuffer(resizable_buf, /*size=*/100, /*alignment=*/1)); + TF_LITE_MICRO_EXPECT_EQ(static_cast(100), + allocator.GetNonPersistentUsedBytes()); + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + allocator.ResizeBuffer(resizable_buf, /*size=*/10, /*alignment=*/1)); + TF_LITE_MICRO_EXPECT_EQ(static_cast(10), + allocator.GetNonPersistentUsedBytes()); + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + allocator.ResizeBuffer(resizable_buf, /*size=*/1000, /*alignment=*/1)); + TF_LITE_MICRO_EXPECT_EQ(static_cast(1000), + allocator.GetNonPersistentUsedBytes()); +} + +TF_LITE_MICRO_TEST(TestAdjustHeadSizeMisalignment) { + constexpr size_t arena_size = 1024; + uint8_t arena[arena_size]; + tflite::SingleArenaBufferAllocator allocator(arena, arena_size); + + uint8_t* resizable_buf = allocator.AllocateResizableBuffer(0, 12); + TF_LITE_MICRO_EXPECT(resizable_buf != nullptr); + + // First head adjustment of 100 bytes (aligned 12): + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + allocator.ResizeBuffer(resizable_buf, /*size=*/100, /*alignment=*/12)); + + // Offset alignment of 12 can lead to allocation within 8 byte range of + // requested bytes based to arena alignment at runtime: + TF_LITE_MICRO_EXPECT_GE(allocator.GetNonPersistentUsedBytes(), 100); + TF_LITE_MICRO_EXPECT_LE(allocator.GetNonPersistentUsedBytes(), 100 + 11); + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + allocator.ResizeBuffer(resizable_buf, /*size=*/10, /*alignment=*/12)); + TF_LITE_MICRO_EXPECT_GE(allocator.GetNonPersistentUsedBytes(), 10); + TF_LITE_MICRO_EXPECT_LE(allocator.GetNonPersistentUsedBytes(), 100 + 11); + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + allocator.ResizeBuffer(resizable_buf, /*size=*/1000, /*alignment=*/12)); + TF_LITE_MICRO_EXPECT_GE(allocator.GetNonPersistentUsedBytes(), 1000); + TF_LITE_MICRO_EXPECT_LE(allocator.GetNonPersistentUsedBytes(), 1000 + 11); +} + +TF_LITE_MICRO_TEST(TestAdjustHeadSizeMisalignedHandlesCorrectBytesAvailable) { + constexpr size_t arena_size = 1024; + uint8_t arena[arena_size]; + tflite::SingleArenaBufferAllocator allocator(arena, arena_size); + + uint8_t* resizable_buf = allocator.AllocateResizableBuffer(0, 12); + TF_LITE_MICRO_EXPECT(resizable_buf != nullptr); + + // First head adjustment of 100 bytes (aligned 12): + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + allocator.ResizeBuffer(resizable_buf, /*size=*/100, /*alignment=*/12)); + + // allocator.GetAvailableMemory() should also report the actual amount of + // memory available based on a requested offset (12): + size_t aligned_available_bytes = + allocator.GetAvailableMemory(/*alignment=*/12); + TF_LITE_MICRO_EXPECT_LE(aligned_available_bytes, arena_size - 100); + TF_LITE_MICRO_EXPECT_GE(aligned_available_bytes, arena_size - 100 - 24); + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + allocator.ResizeBuffer(resizable_buf, /*size=*/10, /*alignment=*/12)); + aligned_available_bytes = allocator.GetAvailableMemory(/*alignment=*/12); + + TF_LITE_MICRO_EXPECT_LE(aligned_available_bytes, arena_size - 10); + TF_LITE_MICRO_EXPECT_GE(aligned_available_bytes, arena_size - 10 - 24); + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + allocator.ResizeBuffer(resizable_buf, /*size=*/1000, /*alignment=*/12)); + aligned_available_bytes = allocator.GetAvailableMemory(/*alignment=*/12); + TF_LITE_MICRO_EXPECT_LE(aligned_available_bytes, arena_size - 1000); + TF_LITE_MICRO_EXPECT_GE(aligned_available_bytes, arena_size - 1000 - 24); +} + +TF_LITE_MICRO_TEST(TestGetAvailableMemory) { + constexpr size_t arena_size = 1024; + uint8_t arena[arena_size]; + tflite::SingleArenaBufferAllocator allocator(arena, arena_size); + + uint8_t* resizable_buf = allocator.AllocateResizableBuffer(0, 1); + TF_LITE_MICRO_EXPECT(resizable_buf != nullptr); + + constexpr size_t allocation_size = 100; + allocator.ResizeBuffer(resizable_buf, /*size=*/allocation_size, + /*alignment=*/1); + allocator.AllocatePersistentBuffer(/*size=*/allocation_size, + /*alignment=*/1); + + TF_LITE_MICRO_EXPECT_EQ(allocator.GetAvailableMemory(/*alignment=*/1), + arena_size - allocation_size * 2); +} + +TF_LITE_MICRO_TEST(TestGetAvailableMemoryWithTempAllocations) { + constexpr size_t arena_size = 1024; + uint8_t arena[arena_size]; + tflite::SingleArenaBufferAllocator allocator(arena, arena_size); + + constexpr size_t allocation_size = 100; + uint8_t* temp = allocator.AllocateTemp(/*size=*/allocation_size, + /*alignment=*/1); + + TF_LITE_MICRO_EXPECT_EQ(allocator.GetAvailableMemory(/*alignment=*/1), + arena_size - allocation_size); + + // Reset temp allocations and ensure GetAvailableMemory() is back to the + // starting size: + allocator.DeallocateTemp(temp); + allocator.ResetTempAllocations(); + + TF_LITE_MICRO_EXPECT_EQ(allocator.GetAvailableMemory(/*alignment=*/1), + arena_size); +} + +TF_LITE_MICRO_TEST(TestGetUsedBytes) { + constexpr size_t arena_size = 1024; + uint8_t arena[arena_size]; + tflite::SingleArenaBufferAllocator allocator(arena, arena_size); + TF_LITE_MICRO_EXPECT_EQ(allocator.GetUsedBytes(), static_cast(0)); + uint8_t* resizable_buf = allocator.AllocateResizableBuffer(0, 1); + TF_LITE_MICRO_EXPECT(resizable_buf != nullptr); + + constexpr size_t allocation_size = 100; + allocator.ResizeBuffer(resizable_buf, /*size=*/allocation_size, + /*alignment=*/1); + allocator.AllocatePersistentBuffer(/*size=*/allocation_size, + /*alignment=*/1); + + TF_LITE_MICRO_EXPECT_EQ(allocator.GetUsedBytes(), allocation_size * 2); +} + +TF_LITE_MICRO_TEST(TestGetUsedBytesTempAllocations) { + constexpr size_t arena_size = 1024; + uint8_t arena[arena_size]; + tflite::SingleArenaBufferAllocator allocator(arena, arena_size); + + constexpr size_t allocation_size = 100; + uint8_t* temp = allocator.AllocateTemp(/*size=*/allocation_size, + /*alignment=*/1); + + TF_LITE_MICRO_EXPECT_EQ(allocator.GetUsedBytes(), allocation_size); + + // Reset temp allocations and ensure GetUsedBytes() is back to the starting + // size: + allocator.DeallocateTemp(temp); + allocator.ResetTempAllocations(); + + TF_LITE_MICRO_EXPECT_EQ(allocator.GetUsedBytes(), static_cast(0)); +} + +TF_LITE_MICRO_TEST(TestJustFits) { + constexpr size_t arena_size = 1024; + uint8_t arena[arena_size]; + tflite::SingleArenaBufferAllocator allocator(arena, arena_size); + + uint8_t* result = allocator.AllocatePersistentBuffer(arena_size, 1); + TF_LITE_MICRO_EXPECT(nullptr != result); +} + +TF_LITE_MICRO_TEST(TestAligned) { + constexpr size_t arena_size = 1024; + uint8_t arena[arena_size]; + tflite::SingleArenaBufferAllocator allocator(arena, arena_size); + + uint8_t* result = allocator.AllocatePersistentBuffer(1, 1); + TF_LITE_MICRO_EXPECT(nullptr != result); + + result = allocator.AllocatePersistentBuffer(16, 4); + TF_LITE_MICRO_EXPECT(nullptr != result); + TF_LITE_MICRO_EXPECT_EQ(static_cast(0), + reinterpret_cast(result) & 3); +} + +TF_LITE_MICRO_TEST(TestMultipleTooLarge) { + constexpr size_t arena_size = 1024; + uint8_t arena[arena_size]; + tflite::SingleArenaBufferAllocator allocator(arena, arena_size); + + uint8_t* result = allocator.AllocatePersistentBuffer(768, 1); + TF_LITE_MICRO_EXPECT(nullptr != result); + + result = allocator.AllocatePersistentBuffer(768, 1); + TF_LITE_MICRO_EXPECT(nullptr == result); +} + +TF_LITE_MICRO_TEST(TestTempAllocations) { + constexpr size_t arena_size = 1024; + uint8_t arena[arena_size]; + tflite::SingleArenaBufferAllocator allocator(arena, arena_size); + + uint8_t* temp1 = allocator.AllocateTemp(100, 1); + TF_LITE_MICRO_EXPECT(nullptr != temp1); + + uint8_t* temp2 = allocator.AllocateTemp(100, 1); + TF_LITE_MICRO_EXPECT(nullptr != temp2); + + // Expect that the next micro allocation is 100 bytes away from each other. + TF_LITE_MICRO_EXPECT_EQ(temp2 - temp1, 100); +} + +TF_LITE_MICRO_TEST(TestResetTempAllocations) { + constexpr size_t arena_size = 1024; + uint8_t arena[arena_size]; + tflite::SingleArenaBufferAllocator allocator(arena, arena_size); + + uint8_t* temp1 = allocator.AllocateTemp(100, 1); + TF_LITE_MICRO_EXPECT(nullptr != temp1); + + allocator.DeallocateTemp(temp1); + allocator.ResetTempAllocations(); + + uint8_t* temp2 = allocator.AllocateTemp(100, 1); + TF_LITE_MICRO_EXPECT(nullptr != temp2); + + // Reset temp allocations should have the same start address: + TF_LITE_MICRO_EXPECT_EQ(temp2 - temp1, 0); +} + +TF_LITE_MICRO_TEST(TestEnsureHeadSizeWithoutResettingTemp) { + constexpr size_t arena_size = 1024; + uint8_t arena[arena_size]; + tflite::SingleArenaBufferAllocator allocator(arena, arena_size); + uint8_t* resizable_buf = allocator.AllocateResizableBuffer(0, 1); + TF_LITE_MICRO_EXPECT(resizable_buf != nullptr); + + uint8_t* temp = allocator.AllocateTemp(100, 1); + TF_LITE_MICRO_EXPECT(nullptr != temp); + + // Adjustment to head should fail since temp allocation was not followed by a + // call to ResetTempAllocations(). + TF_LITE_MICRO_EXPECT_EQ(kTfLiteError, + allocator.ResizeBuffer(resizable_buf, 100, 1)); + + allocator.DeallocateTemp(temp); + allocator.ResetTempAllocations(); + + // Reduce head size back to zero. + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + allocator.ResizeBuffer(resizable_buf, 0, 1)); + + // The most recent head allocation should be in the same location as the + // original temp allocation pointer. + TF_LITE_MICRO_EXPECT(temp == allocator.GetOverlayMemoryAddress()); +} + +TF_LITE_MICRO_TEST(TestIsAllTempDeallocated) { + constexpr size_t arena_size = 1024; + uint8_t arena[arena_size]; + tflite::SingleArenaBufferAllocator allocator(arena, arena_size); + + uint8_t* temp1 = allocator.AllocateTemp(100, 1); + TF_LITE_MICRO_EXPECT(allocator.IsAllTempDeallocated() == false); + + uint8_t* temp2 = allocator.AllocateTemp(100, 1); + TF_LITE_MICRO_EXPECT(allocator.IsAllTempDeallocated() == false); + + allocator.DeallocateTemp(temp1); + TF_LITE_MICRO_EXPECT(allocator.IsAllTempDeallocated() == false); + + allocator.DeallocateTemp(temp2); + TF_LITE_MICRO_EXPECT(allocator.IsAllTempDeallocated() == true); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/benchmarks/BUILD b/tensorflow/lite/micro/benchmarks/BUILD new file mode 100644 index 0000000..808f6ac --- /dev/null +++ b/tensorflow/lite/micro/benchmarks/BUILD @@ -0,0 +1,103 @@ +# Description: +# TensorFlow Lite microcontroller benchmarks. +package( + # Disabling layering_check because of http://b/177257332 + features = ["-layering_check"], + licenses = ["notice"], +) + +package_group( + name = "micro_top_level", + packages = ["//tensorflow/lite/micro"], +) + +cc_library( + name = "micro_benchmark", + hdrs = [ + "micro_benchmark.h", + ], + visibility = [ + "//visibility:public", + ], + deps = [ + "//tensorflow/lite/micro:micro_framework", + "//tensorflow/lite/micro:micro_log", + "//tensorflow/lite/micro:micro_resource_variable", + "//tensorflow/lite/micro:micro_time", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:recording_allocators", + ], +) + +cc_library( + name = "keyword_scrambled_model_data", + srcs = [ + "//tensorflow/lite/micro/models:generated_keyword_scrambled_model_cc", + ], + hdrs = [ + "//tensorflow/lite/micro/models:generated_keyword_scrambled_model_hdr", + ], + visibility = [ + ":micro_top_level", + ], +) + +cc_binary( + name = "keyword_benchmark", + srcs = ["keyword_benchmark.cc"], + deps = [ + ":keyword_scrambled_model_data", + ":micro_benchmark", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:micro_framework", + "//tensorflow/lite/micro:micro_log", + "//tensorflow/lite/micro:micro_profiler", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:system_setup", + ], +) + +cc_library( + name = "keyword_scrambled_8bit_model_data", + srcs = [ + "//tensorflow/lite/micro/models:generated_keyword_scrambled_8bit_model_cc", + ], + hdrs = [ + "//tensorflow/lite/micro/models:generated_keyword_scrambled_8bit_model_hdr", + ], + visibility = [ + ":micro_top_level", + ], +) + +cc_binary( + name = "keyword_benchmark_8bit", + srcs = ["keyword_benchmark_8bit.cc"], + deps = [ + ":keyword_scrambled_8bit_model_data", + ":micro_benchmark", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:micro_framework", + "//tensorflow/lite/micro:micro_log", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:system_setup", + ], +) + +cc_binary( + name = "person_detection_benchmark", + srcs = ["person_detection_benchmark.cc"], + deps = [ + ":micro_benchmark", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:micro_framework", + "//tensorflow/lite/micro:micro_log", + "//tensorflow/lite/micro:micro_utils", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:system_setup", + "//tensorflow/lite/micro/examples/person_detection:model_settings", + "//tensorflow/lite/micro/examples/person_detection:person_detect_model_data", + "//tensorflow/lite/micro/examples/person_detection:simple_images_test_data", + "//tensorflow/lite/schema:schema_fbs", + ], +) diff --git a/tensorflow/lite/micro/benchmarks/Makefile.inc b/tensorflow/lite/micro/benchmarks/Makefile.inc new file mode 100644 index 0000000..00f62ba --- /dev/null +++ b/tensorflow/lite/micro/benchmarks/Makefile.inc @@ -0,0 +1,48 @@ +KEYWORD_BENCHMARK_SRCS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/benchmarks/keyword_benchmark.cc + +KEYWORD_BENCHMARK_GENERATOR_INPUTS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/models/keyword_scrambled.tflite + +KEYWORD_BENCHMARK_HDRS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/benchmarks/micro_benchmark.h + +KEYWORD_BENCHMARK_8BIT_SRCS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/benchmarks/keyword_benchmark_8bit.cc + +KEYWORD_BENCHMARK_8BIT_GENERATOR_INPUTS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/models/keyword_scrambled_8bit.tflite + +KEYWORD_BENCHMARK_8BIT_HDRS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/benchmarks/micro_benchmark.h + +PERSON_DETECTION_BENCHMARK_SRCS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/benchmarks/person_detection_benchmark.cc + +PERSON_DETECTION_BENCHMARK_GENERATOR_INPUTS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/person_detection/testdata/person.bmp \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/person_detection/testdata/no_person.bmp + +ifneq ($(CO_PROCESSOR),ethos_u) + PERSON_DETECTION_BENCHMARK_GENERATOR_INPUTS += \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/models/person_detect.tflite +else + # Ethos-U use a Vela optimized version of the original model. + PERSON_DETECTION_BENCHMARK_SRCS += \ + $(GENERATED_SRCS_DIR)$(TENSORFLOW_ROOT)tensorflow/lite/micro/models/person_detect_model_data_vela.cc +endif + +PERSON_DETECTION_BENCHMARK_HDRS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/person_detection/model_settings.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/benchmarks/micro_benchmark.h + +# Builds a standalone binary. +$(eval $(call microlite_test,keyword_benchmark,\ +$(KEYWORD_BENCHMARK_SRCS),$(KEYWORD_BENCHMARK_HDRS),$(KEYWORD_BENCHMARK_GENERATOR_INPUTS))) + +# Builds a standalone binary. +$(eval $(call microlite_test,keyword_benchmark_8bit,\ +$(KEYWORD_BENCHMARK_8BIT_SRCS),$(KEYWORD_BENCHMARK_8BIT_HDRS),$(KEYWORD_BENCHMARK_8BIT_GENERATOR_INPUTS))) + +$(eval $(call microlite_test,person_detection_benchmark,\ +$(PERSON_DETECTION_BENCHMARK_SRCS),$(PERSON_DETECTION_BENCHMARK_HDRS),$(PERSON_DETECTION_BENCHMARK_GENERATOR_INPUTS))) diff --git a/tensorflow/lite/micro/benchmarks/README.md b/tensorflow/lite/micro/benchmarks/README.md new file mode 100644 index 0000000..1031a58 --- /dev/null +++ b/tensorflow/lite/micro/benchmarks/README.md @@ -0,0 +1,98 @@ +# TFLite for Microcontrollers Benchmarks + +These benchmarks are for measuring the performance of key models and workloads. +They are meant to be used as part of the model optimization process for a given +platform. + +## Table of contents + +- [Keyword Benchmark](#keyword-benchmark) +- [Person Detection Benchmark](#person-detection-benchmark) +- [Run on x86](#run-on-x86) +- [Run on Xtensa XPG Simulator](#run-on-xtensa-xpg-simulator) +- [Run on Sparkfun Edge](#run-on-sparkfun-edge) +- [Run on FVP based on Arm Corstone-300 software](#run-on-fvp-based-on-arm-corstone-300-software) + +## Keyword benchmark + +The keyword benchmark contains a model for keyword detection with scrambled +weights and biases. This model is meant to test performance on a platform only. +Since the weights are scrambled, the output is meaningless. In order to validate +the accuracy of optimized kernels, please run the kernel tests. + +## Person detection benchmark + +The keyword benchmark provides a way to evaluate the performance of the 250KB +visual wakewords model. + +## Run on x86 + +To run the keyword benchmark on x86, run + +``` +make -f tensorflow/lite/micro/tools/make/Makefile run_keyword_benchmark +``` + +To run the person detection benchmark on x86, run + +``` +make -f tensorflow/lite/micro/tools/make/Makefile run_person_detection_benchmark +``` + +## Run on Xtensa XPG Simulator + +To run the keyword benchmark on the Xtensa XPG simulator, you will need a valid +Xtensa toolchain and license. With these set up, run: + +``` +make -f tensorflow/lite/micro/tools/make/Makefile TARGET=xtensa OPTIMIZED_KERNEL_DIR=xtensa TARGET_ARCH= XTENSA_CORE= run_keyword_benchmark -j18 +``` + +## Run on Sparkfun Edge +The following instructions will help you build and deploy this benchmark on the +[SparkFun Edge development board](https://sparkfun.com/products/15170). + + +If you're new to using this board, we recommend walking through the +[AI on a microcontroller with TensorFlow Lite and SparkFun Edge](https://codelabs.developers.google.com/codelabs/sparkfun-tensorflow) +codelab to get an understanding of the workflow. + +Build binary using + +``` +make -f tensorflow/lite/micro/tools/make/Makefile TARGET=sparkfun_edge person_detection_benchmark_bin +``` + +Refer to flashing instructions in the [Person Detection Example](https://github.com/tensorflow/tflite-micro/blob/main/tensorflow/lite/micro/examples/person_detection/README.md#running-on-sparkfun-edge). + +## Run on FVP based on Arm Corstone-300 software + +For more info about the Corstone-300 software see: +[tensorflow/lite/micro/cortex_m_corstone_300/README.md](../cortex_m_corstone_300/README.md). + +Disclaimer: Executing the benchmark test on the Corstone-300 software will +provide a general metric of instructions executed. The estimates are not cycle +accurate, however it aligns to instruction per cycle, and is a consistent +environment. This means it can detect if code changes changed performance. + +The person detection benchmark can also run with Ethos-U enabled, as the +downloaded model will be optimized for Ethos-U. For more info see: +[tensorflow/lite/micro/kernels/ethos_u/README.md](../kernels/ethos_u/README.md). + +To run the keyword benchmark on FVP: + +``` +make -j -f tensorflow/lite/micro/tools/make/Makefile TARGET=cortex_m_corstone_300 TARGET_ARCH=cortex-m55 run_keyword_benchmark +``` + +To run the person detection benchmark on FVP: + +``` +make -j -f tensorflow/lite/micro/tools/make/Makefile TARGET=cortex_m_corstone_300 TARGET_ARCH=cortex-m55 run_person_detection_benchmark +``` + +To run the person detection benchmark on FVP with Ethos-U: + +``` +make -j -f tensorflow/lite/micro/tools/make/Makefile CO_PROCESSOR=ethos_u TARGET=cortex_m_corstone_300 TARGET_ARCH=cortex-m55 run_person_detection_benchmark +``` diff --git a/tensorflow/lite/micro/benchmarks/keyword_benchmark.cc b/tensorflow/lite/micro/benchmarks/keyword_benchmark.cc new file mode 100644 index 0000000..3695c11 --- /dev/null +++ b/tensorflow/lite/micro/benchmarks/keyword_benchmark.cc @@ -0,0 +1,102 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include +#include + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/benchmarks/micro_benchmark.h" +#include "tensorflow/lite/micro/kernels/fully_connected.h" +#include "tensorflow/lite/micro/kernels/softmax.h" +#include "tensorflow/lite/micro/kernels/svdf.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_mutable_op_resolver.h" +#include "tensorflow/lite/micro/micro_profiler.h" +#include "tensorflow/lite/micro/models/keyword_scrambled_model_data.h" +#include "tensorflow/lite/micro/system_setup.h" + +/* + * Keyword Spotting Benchmark for performance optimizations. The model used in + * this benchmark only serves as a reference. The values assigned to the model + * weights and parameters are not representative of the original model. + */ + +namespace tflite { + +using KeywordBenchmarkRunner = MicroBenchmarkRunner; +using KeywordOpResolver = MicroMutableOpResolver<6>; + +// Create an area of memory to use for input, output, and intermediate arrays. +// Align arena to 16 bytes to avoid alignment warnings on certain platforms. +constexpr int kTensorArenaSize = 21 * 1024; +alignas(16) uint8_t tensor_arena[kTensorArenaSize]; + +uint8_t benchmark_runner_buffer[sizeof(KeywordBenchmarkRunner)]; +uint8_t op_resolver_buffer[sizeof(KeywordOpResolver)]; + +// Initialize benchmark runner instance explicitly to avoid global init order +// issues on Sparkfun. Use new since static variables within a method +// are automatically surrounded by locking, which breaks bluepill. +KeywordBenchmarkRunner* CreateBenchmarkRunner(MicroProfiler* profiler) { + // We allocate the KeywordOpResolver from a global buffer because the object's + // lifetime must exceed that of the KeywordBenchmarkRunner object. + KeywordOpResolver* op_resolver = new (op_resolver_buffer) KeywordOpResolver(); + op_resolver->AddFullyConnected(tflite::Register_FULLY_CONNECTED_INT8()); + op_resolver->AddQuantize(); + op_resolver->AddSoftmax(tflite::Register_SOFTMAX_INT8_INT16()); + op_resolver->AddSvdf(tflite::Register_SVDF_INT8()); + + return new (benchmark_runner_buffer) + KeywordBenchmarkRunner(g_keyword_scrambled_model_data, op_resolver, + tensor_arena, kTensorArenaSize, profiler); +} + +void KeywordRunNIerations(int iterations, const char* tag, + KeywordBenchmarkRunner& benchmark_runner, + MicroProfiler& profiler) { + int32_t ticks = 0; + for (int i = 0; i < iterations; ++i) { + benchmark_runner.SetRandomInput(i); + profiler.ClearEvents(); + benchmark_runner.RunSingleIteration(); + ticks += profiler.GetTotalTicks(); + } + MicroPrintf("%s took %d ticks (%d ms)", tag, ticks, TicksToMs(ticks)); +} + +} // namespace tflite + +int main(int argc, char** argv) { + tflite::InitializeTarget(); + tflite::MicroProfiler profiler; + + uint32_t event_handle = profiler.BeginEvent("InitializeKeywordRunner"); + tflite::KeywordBenchmarkRunner* benchmark_runner = + CreateBenchmarkRunner(&profiler); + profiler.EndEvent(event_handle); + profiler.Log(); + MicroPrintf(""); // null MicroPrintf serves as a newline. + + tflite::KeywordRunNIerations(1, "KeywordRunNIerations(1)", *benchmark_runner, + profiler); + profiler.Log(); + MicroPrintf(""); // null MicroPrintf serves as a newline. + + tflite::KeywordRunNIerations(10, "KeywordRunNIerations(10)", + *benchmark_runner, profiler); + MicroPrintf(""); // null MicroPrintf serves as a newline. + + benchmark_runner->PrintAllocations(); +} diff --git a/tensorflow/lite/micro/benchmarks/keyword_benchmark_8bit.cc b/tensorflow/lite/micro/benchmarks/keyword_benchmark_8bit.cc new file mode 100644 index 0000000..e592850 --- /dev/null +++ b/tensorflow/lite/micro/benchmarks/keyword_benchmark_8bit.cc @@ -0,0 +1,102 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include +#include + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/benchmarks/micro_benchmark.h" +#include "tensorflow/lite/micro/kernels/fully_connected.h" +#include "tensorflow/lite/micro/kernels/softmax.h" +#include "tensorflow/lite/micro/kernels/svdf.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_mutable_op_resolver.h" +#include "tensorflow/lite/micro/micro_profiler.h" +#include "tensorflow/lite/micro/models/keyword_scrambled_8bit_model_data.h" +#include "tensorflow/lite/micro/system_setup.h" + +/* + * Keyword Spotting Benchmark for performance optimizations. The model used in + * this benchmark only serves as a reference. The values assigned to the model + * weights and parameters are not representative of the original model. + */ + +namespace tflite { + +using KeywordBenchmarkRunner = MicroBenchmarkRunner; +using KeywordOpResolver = MicroMutableOpResolver<6>; + +// Create an area of memory to use for input, output, and intermediate arrays. +// Align arena to 16 bytes to avoid alignment warnings on certain platforms. +constexpr int kTensorArenaSize = 21 * 1024; +alignas(16) uint8_t tensor_arena[kTensorArenaSize]; + +uint8_t benchmark_runner_buffer[sizeof(KeywordBenchmarkRunner)]; +uint8_t op_resolver_buffer[sizeof(KeywordOpResolver)]; + +// Initialize benchmark runner instance explicitly to avoid global init order +// issues on Sparkfun. Use new since static variables within a method +// are automatically surrounded by locking, which breaks bluepill. +KeywordBenchmarkRunner* CreateBenchmarkRunner(MicroProfiler* profiler) { + // We allocate the KeywordOpResolver from a global buffer because the object's + // lifetime must exceed that of the KeywordBenchmarkRunner object. + KeywordOpResolver* op_resolver = new (op_resolver_buffer) KeywordOpResolver(); + op_resolver->AddFullyConnected(tflite::Register_FULLY_CONNECTED_INT8()); + op_resolver->AddQuantize(); + op_resolver->AddSoftmax(tflite::Register_SOFTMAX_INT8_INT16()); + op_resolver->AddSvdf(tflite::Register_SVDF_INT8()); + + return new (benchmark_runner_buffer) + KeywordBenchmarkRunner(g_keyword_scrambled_8bit_model_data, op_resolver, + tensor_arena, kTensorArenaSize, profiler); +} + +void KeywordRunNIerations(int iterations, const char* tag, + KeywordBenchmarkRunner& benchmark_runner, + MicroProfiler& profiler) { + int32_t ticks = 0; + for (int i = 0; i < iterations; ++i) { + benchmark_runner.SetRandomInput(i); + profiler.ClearEvents(); + benchmark_runner.RunSingleIteration(); + ticks += profiler.GetTotalTicks(); + } + MicroPrintf("%s took %d ticks (%d ms)", tag, ticks, TicksToMs(ticks)); +} + +} // namespace tflite + +int main(int argc, char** argv) { + tflite::InitializeTarget(); + tflite::MicroProfiler profiler; + + uint32_t event_handle = profiler.BeginEvent("InitializeKeywordRunner"); + tflite::KeywordBenchmarkRunner* benchmark_runner = + CreateBenchmarkRunner(&profiler); + profiler.EndEvent(event_handle); + profiler.Log(); + MicroPrintf(""); // null MicroPrintf serves as a newline. + + tflite::KeywordRunNIerations(1, "KeywordRunNIerations(1)", *benchmark_runner, + profiler); + profiler.Log(); + MicroPrintf(""); // null MicroPrintf serves as a newline. + + tflite::KeywordRunNIerations(10, "KeywordRunNIerations(10)", + *benchmark_runner, profiler); + MicroPrintf(""); // null MicroPrintf serves as a newline. + + benchmark_runner->PrintAllocations(); +} diff --git a/tensorflow/lite/micro/benchmarks/micro_benchmark.h b/tensorflow/lite/micro/benchmarks/micro_benchmark.h new file mode 100644 index 0000000..6ade682 --- /dev/null +++ b/tensorflow/lite/micro/benchmarks/micro_benchmark.h @@ -0,0 +1,95 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_BENCHMARKS_MICRO_BENCHMARK_H_ +#define TENSORFLOW_LITE_MICRO_BENCHMARKS_MICRO_BENCHMARK_H_ + +#include + +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_op_resolver.h" +#include "tensorflow/lite/micro/micro_profiler_interface.h" +#include "tensorflow/lite/micro/micro_resource_variable.h" +#include "tensorflow/lite/micro/micro_time.h" +#include "tensorflow/lite/micro/recording_micro_interpreter.h" + +namespace tflite { + +template +class MicroBenchmarkRunner { + public: + // The lifetimes of model, op_resolver, tensor_arena, profiler must exceed + // that of the created MicroBenchmarkRunner object. + MicroBenchmarkRunner(const uint8_t* model, + const tflite::MicroOpResolver* op_resolver, + uint8_t* tensor_arena, int tensor_arena_size, + MicroProfilerInterface* profiler, + int num_resource_variables = 0) + : allocator_( + RecordingMicroAllocator::Create(tensor_arena, tensor_arena_size)), + interpreter_( + GetModel(model), *op_resolver, allocator_, + MicroResourceVariables::Create(allocator_, num_resource_variables), + profiler) { + interpreter_.AllocateTensors(); + } + + void RunSingleIteration() { + // Run the model on this input and make sure it succeeds. + TfLiteStatus invoke_status = interpreter_.Invoke(); + if (invoke_status == kTfLiteError) { + MicroPrintf("Invoke failed."); + } + } + + int NumInputs() { return interpreter_.inputs().size(); } + + void SetRandomInput(const int random_seed, int input_index = 0) { + // The pseudo-random number generator is initialized to a constant seed + std::srand(random_seed); + TfLiteTensor* input = interpreter_.input(input_index); + + // Pre-populate input tensor with random values. + int input_length = input->bytes / sizeof(inputT); + inputT* input_values = tflite::GetTensorData(input); + for (int i = 0; i < input_length; i++) { + // Pre-populate input tensor with a random value based on a constant seed. + input_values[i] = static_cast( + std::rand() % (std::numeric_limits::max() - + std::numeric_limits::min() + 1)); + } + } + + void SetInput(const inputT* custom_input, int input_index = 0) { + TfLiteTensor* input = interpreter_.input(input_index); + inputT* input_buffer = tflite::GetTensorData(input); + int input_length = input->bytes / sizeof(inputT); + for (int i = 0; i < input_length; i++) { + input_buffer[i] = custom_input[i]; + } + } + + void PrintAllocations() const { + interpreter_.GetMicroAllocator().PrintAllocations(); + } + + private: + tflite::RecordingMicroAllocator* allocator_; + tflite::RecordingMicroInterpreter interpreter_; +}; + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_BENCHMARKS_MICRO_BENCHMARK_H_ diff --git a/tensorflow/lite/micro/benchmarks/person_detection_benchmark.cc b/tensorflow/lite/micro/benchmarks/person_detection_benchmark.cc new file mode 100644 index 0000000..e21789b --- /dev/null +++ b/tensorflow/lite/micro/benchmarks/person_detection_benchmark.cc @@ -0,0 +1,120 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/benchmarks/micro_benchmark.h" +#include "tensorflow/lite/micro/examples/person_detection/model_settings.h" +#include "tensorflow/lite/micro/examples/person_detection/testdata/no_person_image_data.h" +#include "tensorflow/lite/micro/examples/person_detection/testdata/person_image_data.h" +#include "tensorflow/lite/micro/kernels/conv.h" +#include "tensorflow/lite/micro/kernels/fully_connected.h" +#include "tensorflow/lite/micro/micro_interpreter.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_mutable_op_resolver.h" +#include "tensorflow/lite/micro/micro_profiler.h" +#include "tensorflow/lite/micro/micro_utils.h" +#include "tensorflow/lite/micro/models/person_detect_model_data.h" +#include "tensorflow/lite/micro/system_setup.h" +#include "tensorflow/lite/schema/schema_generated.h" + +/* + * Person Detection benchmark. Evaluates runtime performance of the visual + * wakewords person detection model. This is the same model found in + * exmaples/person_detection. + */ + +namespace tflite { + +using PersonDetectionOpResolver = MicroMutableOpResolver<6>; +using PersonDetectionBenchmarkRunner = MicroBenchmarkRunner; + +// Create an area of memory to use for input, output, and intermediate arrays. +// Align arena to 16 bytes to avoid alignment warnings on certain platforms. +constexpr int kTensorArenaSize = 135 * 1024; +alignas(16) uint8_t tensor_arena[kTensorArenaSize]; + +uint8_t op_resolver_buffer[sizeof(PersonDetectionOpResolver)]; +uint8_t benchmark_runner_buffer[sizeof(PersonDetectionBenchmarkRunner)]; + +// Initialize benchmark runner instance explicitly to avoid global init order +// issues on Sparkfun. Use new since static variables within a method +// are automatically surrounded by locking, which breaks bluepill. +PersonDetectionBenchmarkRunner* CreateBenchmarkRunner(MicroProfiler* profiler) { + // We allocate PersonDetectionOpResolver from a global buffer + // because the object's lifetime must exceed that of the + // PersonDetectionBenchmarkRunner object. + PersonDetectionOpResolver* op_resolver = + new (op_resolver_buffer) PersonDetectionOpResolver(); + op_resolver->AddFullyConnected(tflite::Register_FULLY_CONNECTED_INT8()); + op_resolver->AddConv2D(tflite::Register_CONV_2D_INT8REF()); + op_resolver->AddDepthwiseConv2D(); + op_resolver->AddSoftmax(); + op_resolver->AddAveragePool2D(tflite::Register_AVERAGE_POOL_2D_INT8()); + op_resolver->AddReshape(); + return new (benchmark_runner_buffer) + PersonDetectionBenchmarkRunner(g_person_detect_model_data, op_resolver, + tensor_arena, kTensorArenaSize, profiler); +} + +void PersonDetectionNIerations(const int8_t* input, int iterations, + const char* tag, + PersonDetectionBenchmarkRunner& benchmark_runner, + MicroProfiler& profiler) { + benchmark_runner.SetInput(input); + uint32_t ticks = 0; + for (int i = 0; i < iterations; ++i) { + profiler.ClearEvents(); + benchmark_runner.RunSingleIteration(); + ticks += profiler.GetTotalTicks(); + } + MicroPrintf("%s took %u ticks (%u ms)", tag, ticks, TicksToMs(ticks)); +} + +} // namespace tflite + +int main(int argc, char** argv) { + tflite::InitializeTarget(); + + tflite::MicroProfiler profiler; + + uint32_t event_handle = profiler.BeginEvent("InitializeBenchmarkRunner"); + tflite::PersonDetectionBenchmarkRunner* benchmark_runner = + CreateBenchmarkRunner(&profiler); + profiler.EndEvent(event_handle); + profiler.Log(); + MicroPrintf(""); // null MicroPrintf serves as a newline. + + tflite::PersonDetectionNIerations( + reinterpret_cast(g_person_image_data), 1, + "WithPersonDataIterations(1)", *benchmark_runner, profiler); + profiler.Log(); + MicroPrintf(""); // null MicroPrintf serves as a newline. + + tflite::PersonDetectionNIerations( + reinterpret_cast(g_no_person_image_data), 1, + "NoPersonDataIterations(1)", *benchmark_runner, profiler); + profiler.Log(); + MicroPrintf(""); // null MicroPrintf serves as a newline. + + tflite::PersonDetectionNIerations( + reinterpret_cast(g_person_image_data), 10, + "WithPersonDataIterations(10)", *benchmark_runner, profiler); + MicroPrintf(""); // null MicroPrintf serves as a newline. + + tflite::PersonDetectionNIerations( + reinterpret_cast(g_no_person_image_data), 10, + "NoPersonDataIterations(10)", *benchmark_runner, profiler); + MicroPrintf(""); // null MicroPrintf serves as a newline. +} diff --git a/tensorflow/lite/micro/bluepill/debug_log.cc b/tensorflow/lite/micro/bluepill/debug_log.cc new file mode 100644 index 0000000..3fd2d52 --- /dev/null +++ b/tensorflow/lite/micro/bluepill/debug_log.cc @@ -0,0 +1,27 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/debug_log.h" + +// For Arm Cortex-M devices, calling SYS_WRITE0 will output the zero-terminated +// string pointed to by R1 to any debug console that's attached to the system. +extern "C" void DebugLog(const char* s) { + asm("mov r0, #0x04\n" // SYS_WRITE0 + "mov r1, %[str]\n" + "bkpt #0xAB\n" + : + : [str] "r"(s) + : "r0", "r1"); +} diff --git a/tensorflow/lite/micro/build_def.bzl b/tensorflow/lite/micro/build_def.bzl new file mode 100644 index 0000000..258107a --- /dev/null +++ b/tensorflow/lite/micro/build_def.bzl @@ -0,0 +1,79 @@ +def micro_copts(): + return [ + "-Wall", + "-Werror", + "-Wnon-virtual-dtor", + "-DFLATBUFFERS_LOCALE_INDEPENDENT=0", + ] + +def generate_cc_arrays(name, src, out, visibility = None): + native.genrule( + name = name, + srcs = [ + src, + ], + outs = [ + out, + ], + cmd = "$(location //tensorflow/lite/micro/tools:generate_cc_arrays) $@ $<", + tools = ["//tensorflow/lite/micro/tools:generate_cc_arrays"], + visibility = visibility, + ) + +def tflm_kernel_cc_library( + name, + srcs = [], + hdrs = [], + accelerated_srcs = {}, + deps = [], + **kwargs): + """Creates a cc_library with the optional accelerated target sources. + + Note: + Bazel macros cannot evaluate a select() statement. Therefore, the accelerated_srcs and + accelerated_hdrs are passed as a dictionary, and the select statement is generated from the + supplied dictionary. + + Args: + name: The name of the target. + srcs: The non-accelerated TFLM kernel source files. + hdrs: The non-accelerated TFLM kernel header files. + accelerated_srcs: A dictionary organized as {target: accelerated tflm kernel sources}. + deps: The library's dependencies. + **kwargs: Arguments passed into the cc_library. + """ + + all_srcs = { + "//conditions:default": srcs, + } + + all_hdrs = { + "//conditions:default": hdrs, + } + + # Identify all of the sources for each target. This ends up creating a dictionary for both the + # sources and headers that looks like the following: + # { + # "target1" : [target1_srcs] + [reference_srcs that aren't accelerated], + # "target2" : [target2_srcs] + [reference_srcs that aren't accelerated], + # "//conditions:default": [reference_srcs] + # } + for target in accelerated_srcs: + target_srcs = accelerated_srcs[target] + target_src_filenames = [src.split("/")[-1] for src in target_srcs] + all_target_srcs = target_srcs + + # Filter out all reference ops that have accelerated implementations. + for src in srcs: + if src not in target_src_filenames: + all_target_srcs.append(src) + + all_srcs[target] = all_target_srcs + + native.cc_library( + name = name, + srcs = select(all_srcs), + hdrs = hdrs, + deps = deps, + **kwargs + ) diff --git a/tensorflow/lite/micro/ceva/micro_time.cc b/tensorflow/lite/micro/ceva/micro_time.cc new file mode 100644 index 0000000..15bb872 --- /dev/null +++ b/tensorflow/lite/micro/ceva/micro_time.cc @@ -0,0 +1,14 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ diff --git a/tensorflow/lite/micro/ceva/system_setup.cc b/tensorflow/lite/micro/ceva/system_setup.cc new file mode 100644 index 0000000..f885b14 --- /dev/null +++ b/tensorflow/lite/micro/ceva/system_setup.cc @@ -0,0 +1,34 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/system_setup.h" + +#include + +#include "tensorflow/lite/micro/micro_time.h" + +namespace tflite { + +uint32_t ticks_per_second() { return 100e6; } + +uint32_t GetCurrentTimeTicks() { return static_cast(clock()); } + +void InitializeTarget() { + // start clock for profiler + reset_clock(); + start_clock(); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/chre/debug_log.cc b/tensorflow/lite/micro/chre/debug_log.cc new file mode 100644 index 0000000..23bb82e --- /dev/null +++ b/tensorflow/lite/micro/chre/debug_log.cc @@ -0,0 +1,22 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/debug_log.h" + +#include + +extern "C" void DebugLog(const char* s) { + chreLog(CHRE_LOG_DEBUG, "[TFL_MICRO] %s", s); +} diff --git a/tensorflow/lite/micro/compatibility.h b/tensorflow/lite/micro/compatibility.h new file mode 100644 index 0000000..49acb28 --- /dev/null +++ b/tensorflow/lite/micro/compatibility.h @@ -0,0 +1,32 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_MICRO_COMPATIBILITY_H_ +#define TENSORFLOW_LITE_MICRO_COMPATIBILITY_H_ + +// C++ will automatically create class-specific delete operators for virtual +// objects, which by default call the global delete function. For embedded +// applications we want to avoid this, and won't be calling new/delete on these +// objects, so we need to override the default implementation with one that does +// nothing to avoid linking in ::delete(). +// This macro needs to be included in all subclasses of a virtual base class in +// the private section. +#ifdef TF_LITE_STATIC_MEMORY +#define TF_LITE_REMOVE_VIRTUAL_DELETE \ + void operator delete(void* p) {} +#else +#define TF_LITE_REMOVE_VIRTUAL_DELETE +#endif + +#endif // TENSORFLOW_LITE_MICRO_COMPATIBILITY_H_ diff --git a/tensorflow/lite/micro/cortex_m_corstone_300/README.md b/tensorflow/lite/micro/cortex_m_corstone_300/README.md new file mode 100644 index 0000000..94935ac --- /dev/null +++ b/tensorflow/lite/micro/cortex_m_corstone_300/README.md @@ -0,0 +1,48 @@ + + +# Running a fixed virtual platform based on Arm(R) Corstone(TM)-300 software + +This target makes use of a fixed virtual platform (FVP) based on Arm +Corstone-300 software. +- [More info about Arm Corstone-300]( +https://developer.arm.com/ip-products/subsystem/corstone/corstone-300) +- [More info about FVPs](https://developer.arm.com/tools-and-software/simulation-models/fixed-virtual-platforms) + +Building the Corstone-300 based target has the following dependencies: + +- [Arm Ethos-U Core Platform](https://review.mlplatform.org/admin/repos/ml/ethos-u/ethos-u-core-platform) + - Arm Ethos-U Core Platform provides the linker file as well as UART and + retarget functions. +- [CMSIS](https://github.com/ARM-software/CMSIS_5) + - CMSIS provides startup functionality, e.g. for setting up interrupt + handlers and clock speed. + +Both these repositories are downloaded automatically by the build process in +TFLM. + +# General build info + +You can compile the Corstone-300 target for multiple Cortex-M CPUs. See below. + +Required parameters: + +- ```TARGET```: cortex_m_corstone_300 +- ```TARGET_ARCH```: cortex-mXX. Replace XX with either of the options in the [Corstone-300 makefile](https://github.com/tensorflow/tflite-micro/tree/main/tensorflow/lite/micro/tools/make/targets/cortex_m_corstone_300_makefile.inc) + +# How to run + +Note that Corstone-300 emulates a Cortex-M55 system, but it is backwards +compatible. This means one could run code compiled for e.g. a Cortex-M7. + +Some examples: + +``` +make -j -f tensorflow/lite/micro/tools/make/Makefile CO_PROCESSOR=ethos_u TARGET=cortex_m_corstone_300 TARGET_ARCH=cortex-m55 test_network_tester_test +make -j -f tensorflow/lite/micro/tools/make/Makefile OPTIMIZED_KERNEL_DIR=cmsis_nn TARGET=cortex_m_corstone_300 TARGET_ARCH=cortex-m55 test_network_tester_test +make -j -f tensorflow/lite/micro/tools/make/Makefile CO_PROCESSOR=ethos_u OPTIMIZED_KERNEL_DIR=cmsis_nn TARGET=cortex_m_corstone_300 TARGET_ARCH=cortex-m55 test_network_tester_test +make -j -f tensorflow/lite/micro/tools/make/Makefile TARGET=cortex_m_corstone_300 TARGET_ARCH=cortex-m55 test_network_tester_test +make -j -f tensorflow/lite/micro/tools/make/Makefile TARGET=cortex_m_corstone_300 TARGET_ARCH=cortex-m55 test_kernel_fully_connected_test +make -j -f tensorflow/lite/micro/tools/make/Makefile OPTIMIZED_KERNEL_DIR=cmsis_nn TARGET=cortex_m_corstone_300 TARGET_ARCH=cortex-m7+fp test_kernel_fully_connected_test +make -j -f tensorflow/lite/micro/tools/make/Makefile TARGET=cortex_m_corstone_300 TARGET_ARCH=cortex-m3 test_kernel_fully_connected_test +make -j -f tensorflow/lite/micro/tools/make/Makefile TARGET=cortex_m_corstone_300 TARGET_ARCH=cortex-m55 BUILD_TYPE=release_with_logs TOOLCHAIN=armclang test_network_tester_test +``` diff --git a/tensorflow/lite/micro/cortex_m_corstone_300/micro_time.cc b/tensorflow/lite/micro/cortex_m_corstone_300/micro_time.cc new file mode 100644 index 0000000..a7db6e4 --- /dev/null +++ b/tensorflow/lite/micro/cortex_m_corstone_300/micro_time.cc @@ -0,0 +1,21 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +// This file is empty to ensure that a specialized implementation of +// micro_time.h is used (instead of the default implementation from +// tensorflow/lite/micro/micro_time.cc). +// +// The actual target-specific implementation of micro_time.h is in +// system_setup.cc since that allows us to consolidate all the target-specific +// specializations into one source file. diff --git a/tensorflow/lite/micro/cortex_m_corstone_300/system_setup.cc b/tensorflow/lite/micro/cortex_m_corstone_300/system_setup.cc new file mode 100644 index 0000000..95a11b2 --- /dev/null +++ b/tensorflow/lite/micro/cortex_m_corstone_300/system_setup.cc @@ -0,0 +1,103 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifdef ETHOS_U +#include "ethosu_driver.h" +#endif + +// This is set in micro/tools/make/targets/cortex_m_corstone_300_makefile.inc. +// It is needed for the calls to NVIC_SetVector()/NVIC_EnableIR() and for the +// DWT and PMU counters. +#include CMSIS_DEVICE_ARM_CORTEX_M_XX_HEADER_FILE + +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_time.h" +#include "tensorflow/lite/micro/system_setup.h" + +namespace tflite { + +namespace { +constexpr uint32_t kClocksPerSecond = 25e6; +} // namespace + +uint32_t ticks_per_second() { return kClocksPerSecond; } + +uint32_t GetCurrentTimeTicks() { +#if (!defined(TF_LITE_STRIP_ERROR_STRINGS) && !defined(ARMCM0)) +#ifdef ARMCM55 + return ARM_PMU_Get_CCNTR(); +#else + return DWT->CYCCNT; +#endif +#else + return 0; +#endif +} + +#ifdef ETHOS_U +#if defined(ETHOSU_FAST_MEMORY_SIZE) && ETHOSU_FAST_MEMORY_SIZE > 0 +__attribute__((aligned(16), section(".bss.ethosu_scratch"))) +uint8_t ethosu0_scratch[ETHOSU_FAST_MEMORY_SIZE]; +#else +#define ethosu0_scratch 0 +#define ETHOSU_FAST_MEMORY_SIZE 0 +#endif + +struct ethosu_driver ethosu0_driver; + +void ethosuIrqHandler0() { ethosu_irq_handler(ðosu0_driver); } +#endif + +extern "C" { +void uart_init(void); +} + +void InitializeTarget() { + uart_init(); + +#if (!defined(TF_LITE_STRIP_ERROR_STRINGS) && !defined(ARMCM0)) +#ifdef ARMCM55 + ARM_PMU_Enable(); + DCB->DEMCR |= DCB_DEMCR_TRCENA_Msk; + + ARM_PMU_CYCCNT_Reset(); + ARM_PMU_CNTR_Enable(PMU_CNTENSET_CCNTR_ENABLE_Msk); + +#else + CoreDebug->DEMCR |= CoreDebug_DEMCR_TRCENA_Msk; + + // Reset and enable DWT cycle counter. + DWT->CYCCNT = 0; + DWT->CTRL |= 1UL; + +#endif +#endif + +#ifdef ETHOS_U + constexpr int ethosu_base_address = 0x48102000; + constexpr int ethosu_irq = 56; + + // Initialize Ethos-U NPU driver. + if (ethosu_init(ðosu0_driver, reinterpret_cast(ethosu_base_address), + ethosu0_scratch, ETHOSU_FAST_MEMORY_SIZE, 1, 1)) { + MicroPrintf("Failed to initialize Ethos-U driver"); + } + NVIC_SetVector(static_cast(ethosu_irq), + (uint32_t)ðosuIrqHandler0); + NVIC_EnableIRQ(static_cast(ethosu_irq)); +#endif +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/cortex_m_generic/README.md b/tensorflow/lite/micro/cortex_m_generic/README.md new file mode 100644 index 0000000..c7abb54 --- /dev/null +++ b/tensorflow/lite/micro/cortex_m_generic/README.md @@ -0,0 +1,65 @@ + + +# Generic Cortex-Mx customizations + +The customization requires a definition where the debug log goes to. The purpose +of the generic Cortex-Mx target is to generate a TFLM library file for use in +application projects outside of this repo. As the chip HAL and the board +specific layer are only defined in the application project, the TFLM library +cannot write the debug log anywhere. Instead, we allow the application layer to +register a callback function for writing the TFLM kernel debug log. + +# Usage + +See debug_log_callback.h + +# How to build + +Required parameters: + + - TARGET: cortex_m_generic + - TARGET_ARCH: cortex-mXX For all options see: [link](https://github.com/tensorflow/tflite-micro/tree/main/tensorflow/lite/micro/tools/make/targets/cortex_m_generic_makefile.inc) + +Optional parameters: + + - TOOLCHAIN: gcc (default) or armclang + - For Cortex-M55, ARM Compiler 6.14 or later is required. + +Some examples: + +Building with arm-gcc + +``` +make -f tensorflow/lite/micro/tools/make/Makefile TARGET=cortex_m_generic TARGET_ARCH=cortex-m7 microlite +make -f tensorflow/lite/micro/tools/make/Makefile TARGET=cortex_m_generic TARGET_ARCH=cortex-m7 OPTIMIZED_KERNEL_DIR=cmsis_nn microlite + +make -f tensorflow/lite/micro/tools/make/Makefile TARGET=cortex_m_generic TARGET_ARCH=cortex-m4 OPTIMIZED_KERNEL_DIR=cmsis_nn microlite +make -f tensorflow/lite/micro/tools/make/Makefile TARGET=cortex_m_generic TARGET_ARCH=cortex-m4+fp OPTIMIZED_KERNEL_DIR=cmsis_nn microlite +``` + +Building with armclang + +``` +make -f tensorflow/lite/micro/tools/make/Makefile TOOLCHAIN=armclang TARGET=cortex_m_generic TARGET_ARCH=cortex-m55 microlite +make -f tensorflow/lite/micro/tools/make/Makefile TOOLCHAIN=armclang TARGET=cortex_m_generic TARGET_ARCH=cortex-m55 OPTIMIZED_KERNEL_DIR=cmsis_nn microlite +make -f tensorflow/lite/micro/tools/make/Makefile TOOLCHAIN=armclang TARGET=cortex_m_generic TARGET_ARCH=cortex-m55+nofp OPTIMIZED_KERNEL_DIR=cmsis_nn microlite +``` + +The Tensorflow Lite Micro makefiles download a specific version of the arm-gcc +compiler to tensorflow/lite/micro/tools/make/downloads/gcc_embedded. + +If desired, a different version can be used by providing `TARGET_TOOLCHAIN_ROOT` +option to the Makefile: + +``` +make -f tensorflow/lite/micro/tools/make/Makefile TARGET=cortex_m_generic TARGET_ARCH=cortex-m4+fp TARGET_TOOLCHAIN_ROOT=/path/to/arm-gcc/ microlite +``` + +Similarly, `OPTIMIZED_KERNEL_DIR=cmsis_nn` downloads a specific version of CMSIS to +tensorflow/lite/micro/tools/make/downloads/cmsis. While this is the only version +that is regularly tested, you can use your own version of CMSIS as well by +providing `CMSIS_PATH` to the Makefile: + +``` +make -f tensorflow/lite/micro/tools/make/Makefile TARGET=cortex_m_generic TARGET_ARCH=cortex-m4+fp OPTIMIZED_KERNEL_DIR=cmsis_nn CMSIS_PATH=/path/to/own/cmsis microlite +``` diff --git a/tensorflow/lite/micro/cortex_m_generic/debug_log.cc b/tensorflow/lite/micro/cortex_m_generic/debug_log.cc new file mode 100644 index 0000000..bc79d43 --- /dev/null +++ b/tensorflow/lite/micro/cortex_m_generic/debug_log.cc @@ -0,0 +1,43 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +// Implementation for the DebugLog() function that prints to the debug logger on +// an generic Cortex-M device. + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +#include "tensorflow/lite/micro/debug_log.h" + +#include "tensorflow/lite/micro/cortex_m_generic/debug_log_callback.h" + +static DebugLogCallback debug_log_callback = nullptr; + +void RegisterDebugLogCallback(void (*cb)(const char* s)) { + debug_log_callback = cb; +} + +void DebugLog(const char* s) { +#ifndef TF_LITE_STRIP_ERROR_STRINGS + if (debug_log_callback != nullptr) { + debug_log_callback(s); + } +#endif +} + +#ifdef __cplusplus +} // extern "C" +#endif // __cplusplus diff --git a/tensorflow/lite/micro/cortex_m_generic/debug_log_callback.h b/tensorflow/lite/micro/cortex_m_generic/debug_log_callback.h new file mode 100644 index 0000000..c1afd19 --- /dev/null +++ b/tensorflow/lite/micro/cortex_m_generic/debug_log_callback.h @@ -0,0 +1,49 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_MICRO_CORTEX_M_GENERIC_DEBUG_LOG_CALLBACK_H_ +#define TENSORFLOW_LITE_MICRO_CORTEX_M_GENERIC_DEBUG_LOG_CALLBACK_H_ + +// The application layer must implement and register a callback before calling +// the network in a way similar to +// +// void debug_log_printf(const char* s) +// { +// printf(s); +// } +// +// int main(void) +// { +// // Register callback for printing debug log +// RegisterDebugLogCallback(debug_log_printf); +// +// // now call the network +// TfLiteStatus invoke_status = interpreter->Invoke(); +// } + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +typedef void (*DebugLogCallback)(const char* s); + +// Registers and application-specific callback for debug logging. It must be +// called before the first call to DebugLog(). +void RegisterDebugLogCallback(DebugLogCallback callback); + +#ifdef __cplusplus +} // extern "C" +#endif // __cplusplus + +#endif // TENSORFLOW_LITE_MICRO_CORTEX_M_GENERIC_DEBUG_LOG_CALLBACK_H_ diff --git a/tensorflow/lite/micro/cortex_m_generic/micro_time.cc b/tensorflow/lite/micro/cortex_m_generic/micro_time.cc new file mode 100644 index 0000000..265bd34 --- /dev/null +++ b/tensorflow/lite/micro/cortex_m_generic/micro_time.cc @@ -0,0 +1,81 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/micro_time.h" + +// Set in micro/tools/make/targets/cortex_m_generic_makefile.inc. +// Needed for the DWT and PMU counters. +#ifdef CMSIS_DEVICE_ARM_CORTEX_M_XX_HEADER_FILE +#include CMSIS_DEVICE_ARM_CORTEX_M_XX_HEADER_FILE +#endif + +namespace tflite { + +#if defined(PROJECT_GENERATION) + +// Stub functions for the project_generation target since these will be replaced +// by the target-specific implementation in the overall infrastructure that the +// TFLM project generation will be a part of. +uint32_t ticks_per_second() { return 0; } +uint32_t GetCurrentTimeTicks() { return 0; } + +#else + +uint32_t ticks_per_second() { return 0; } + +uint32_t GetCurrentTimeTicks() { + static bool is_initialized = false; + + if (!is_initialized) { +#if (!defined(TF_LITE_STRIP_ERROR_STRINGS) && !defined(ARMCM0) && \ + !defined(ARMCM0plus)) +#ifdef ARM_MODEL_USE_PMU_COUNTERS + ARM_PMU_Enable(); + DCB->DEMCR |= DCB_DEMCR_TRCENA_Msk; + + ARM_PMU_CYCCNT_Reset(); + ARM_PMU_CNTR_Enable(PMU_CNTENSET_CCNTR_ENABLE_Msk); + +#else +#ifdef ARMCM7 + DWT->LAR = 0xC5ACCE55; +#endif + CoreDebug->DEMCR |= CoreDebug_DEMCR_TRCENA_Msk; + + // Reset and DWT cycle counter. + DWT->CYCCNT = 0; + DWT->CTRL |= 1UL; + +#endif +#endif + + is_initialized = true; + } + +#if (!defined(TF_LITE_STRIP_ERROR_STRINGS) && !defined(ARMCM0) && \ + !defined(ARMCM0plus)) +#ifdef ARM_MODEL_USE_PMU_COUNTERS + return ARM_PMU_Get_CCNTR(); +#else + return DWT->CYCCNT; +#endif +#else + return 0; +#endif +} + +#endif // defined(PROJECT_GENERATION) + +} // namespace tflite diff --git a/tensorflow/lite/micro/debug_log.cc b/tensorflow/lite/micro/debug_log.cc new file mode 100644 index 0000000..46ca253 --- /dev/null +++ b/tensorflow/lite/micro/debug_log.cc @@ -0,0 +1,50 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +// Reference implementation of the DebugLog() function that's required for a +// platform to support the TensorFlow Lite for Microcontrollers library. This is +// the only function that's absolutely required to be available on a target +// device, since it's used for communicating test results back to the host so +// that we can verify the implementation is working correctly. +// It's designed to be as easy as possible to supply an implementation though. +// On platforms that have a POSIX stack or C library, it can be written as a +// single call to `fprintf(stderr, "%s", s)` to output a string to the error +// stream of the console, but if there's no OS or C library available, there's +// almost always an equivalent way to write out a string to some serial +// interface that can be used instead. For example on Arm M-series MCUs, calling +// the `bkpt #0xAB` assembler instruction will output the string in r1 to +// whatever debug serial connection is available. If you're running mbed, you +// can do the same by creating `Serial pc(USBTX, USBRX)` and then calling +// `pc.printf("%s", s)`. +// To add an equivalent function for your own platform, create your own +// implementation file, and place it in a subfolder with named after the OS +// you're targeting. For example, see the Cortex M bare metal version in +// tensorflow/lite/micro/bluepill/debug_log.cc or the mbed one on +// tensorflow/lite/micro/mbed/debug_log.cc. + +#include "tensorflow/lite/micro/debug_log.h" + +#ifndef TF_LITE_STRIP_ERROR_STRINGS +#include +#endif + +extern "C" void DebugLog(const char* s) { +#ifndef TF_LITE_STRIP_ERROR_STRINGS + // Reusing TF_LITE_STRIP_ERROR_STRINGS to disable DebugLog completely to get + // maximum reduction in binary size. This is because we have DebugLog calls + // via TF_LITE_CHECK that are not stubbed out by TF_LITE_REPORT_ERROR. + fprintf(stderr, "%s", s); +#endif +} diff --git a/tensorflow/lite/micro/debug_log.h b/tensorflow/lite/micro/debug_log.h new file mode 100644 index 0000000..c2840d0 --- /dev/null +++ b/tensorflow/lite/micro/debug_log.h @@ -0,0 +1,31 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_MICRO_DEBUG_LOG_H_ +#define TENSORFLOW_LITE_MICRO_DEBUG_LOG_H_ + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +// This function should be implemented by each target platform, and provide a +// way for strings to be output to some text stream. For more information, see +// tensorflow/lite/micro/debug_log.cc. +void DebugLog(const char* s); + +#ifdef __cplusplus +} // extern "C" +#endif // __cplusplus + +#endif // TENSORFLOW_LITE_MICRO_DEBUG_LOG_H_ diff --git a/tensorflow/lite/micro/docs/arm.md b/tensorflow/lite/micro/docs/arm.md new file mode 100644 index 0000000..8f8705e --- /dev/null +++ b/tensorflow/lite/micro/docs/arm.md @@ -0,0 +1,47 @@ + +* [Arm(R) IP support in Tensorflow Lite for Microcontrollers (TFLM)](#arm-ip) + * [Arm(R) Cortex(R)-M processor family](#cortex-m) + * [CMSIS-NN optimized library](#cmsis-nn) + * [Arm(R) Ethos(TM)-U microNPU family](#ethos-u) + * [Arm(R) Corstone(TM)-300 FVP](#corstone-300) + + +# Arm(R) IP support in Tensorflow Lite for Microcontrollers (TFLM) + +This doc outlines how to use Arm IP with TFLM. The following sub chapters +contain more details of the respective IP. + +## Arm(R) Cortex(R)-M processor family +Arm's Cortex-M processor support is fully integrated to TFLM. To build a TFLM +library for any Cortex-M processor, check out the [Cortex-M generic readme](https://github.com/tensorflow/tflite-micro/tree/main/tensorflow/lite/micro/cortex_m_generic/README.md). +Additionally, CMSIS-NN provides optimal performance executing machine learning +workloads on Cortex-M. See the [sub chapter CMSIS-NN](#cmsis-nn). + + +## CMSIS-NN optimized library +Common Microcontroller Software Interface Standard for Neural Networks +(CMSIS-NN) is a collection of efficient neural network kernels developed to +maximize performance on Cortex-M processors. The CMSIS-NN optimized kernel are +highly integrated to TFLM. For more information how to utilize these kernels, +see [CMSIS-NN readme](https://github.com/tensorflow/tflite-micro/tree/main/tensorflow/lite/micro/kernels/cmsis_nn/README.md). + + +## Arm(R) Ethos(TM)-U microNPU family +The Ethos-U microNPU (Neural Processing Unit) family consist of [Ethos-U55](https://www.arm.com/products/silicon-ip-cpu/ethos/ethos-u55) +and [Ethos-U65](https://www.arm.com/products/silicon-ip-cpu/ethos/ethos-u65). +Ethos-U55 is designed to accelerate ML inference in area-constrained embedded +and IoT devices, whereas Ethos-U65 extends its applicability to be used as an +Cortex-M subsystem to a larger Arm Cortex-A, Cortex-R and Neoverse-based system. + +To get started with TFLM and Ethos-U, see the [Ethos-U readme](https://github.com/tensorflow/tflite-micro/tree/main/tensorflow/lite/micro/kernels/ethos_u/README.md). + + +## Arm(R) Corstone(TM)-300 FVP +[Corstone-300](https://developer.arm.com/Processors/Corstone-300) is a hardware +reference design based on the Arm Cortex-M55 processor, which integrates the +Ethos-U55 microNPU. The [Corstone-300 FVP](https://developer.arm.com/tools-and-software/open-source-software/arm-platforms-software/arm-ecosystem-fvps) +(Fixed Virtual Platform) is a model of the hardware which enables execution of +full software stacks ahead of silicon. + +To get started with TFLM and Corstone-300 FVP, see the [Corstone-300 readme](https://github.com/tensorflow/tflite-micro/tree/main/tensorflow/lite/micro/cortex_m_corstone_300/README.md). + diff --git a/tensorflow/lite/micro/docs/images/preallocated_tensors/preallocated_tensors_bg_1.png b/tensorflow/lite/micro/docs/images/preallocated_tensors/preallocated_tensors_bg_1.png new file mode 100644 index 0000000..0bc0dbe Binary files /dev/null and b/tensorflow/lite/micro/docs/images/preallocated_tensors/preallocated_tensors_bg_1.png differ diff --git a/tensorflow/lite/micro/docs/images/preallocated_tensors/preallocated_tensors_bg_2.png b/tensorflow/lite/micro/docs/images/preallocated_tensors/preallocated_tensors_bg_2.png new file mode 100644 index 0000000..25deaf4 Binary files /dev/null and b/tensorflow/lite/micro/docs/images/preallocated_tensors/preallocated_tensors_bg_2.png differ diff --git a/tensorflow/lite/micro/docs/images/preallocated_tensors/preallocated_tensors_impl1.png b/tensorflow/lite/micro/docs/images/preallocated_tensors/preallocated_tensors_impl1.png new file mode 100644 index 0000000..d88b912 Binary files /dev/null and b/tensorflow/lite/micro/docs/images/preallocated_tensors/preallocated_tensors_impl1.png differ diff --git a/tensorflow/lite/micro/docs/images/tflm_continuous_integration_1.png b/tensorflow/lite/micro/docs/images/tflm_continuous_integration_1.png new file mode 100644 index 0000000..acecc0e Binary files /dev/null and b/tensorflow/lite/micro/docs/images/tflm_continuous_integration_1.png differ diff --git a/tensorflow/lite/micro/docs/logging.md b/tensorflow/lite/micro/docs/logging.md new file mode 100644 index 0000000..5d429a1 --- /dev/null +++ b/tensorflow/lite/micro/docs/logging.md @@ -0,0 +1,44 @@ + + + +* [Message Logging in TFLite Micro](#message-logging-in-tflite-micro) + * [To use MicroPrintf in your application code or kernel implementations:](#to-use-microprintf-in-your-application-code-or-kernel-implementations) + * [Include this header file:](#include-this-header-file) + * [Introduce this Bazel BUILD dependency:](#introduce-this-bazel-build-dependency) + * [Example usage:](#example-usage) + * [Do Not Use:](#do-not-use) + + + + + +# Message Logging in TFLite Micro + +TFLM currently support `MicroPrintf` to log errors or messages to the terminal. This is a light-weight printf-lite utility available to log messages to the terminal. The `MicroPrintf` calls are designed to be ignored or optimized-out by the compiler, during deployment, if `TF_LITE_STRIP_ERROR_STRINGS` environment flag is set. This is useful to reduce the binary size of the TFLM application. + + +## To use MicroPrintf in your application code or kernel implementations: +### Include this header file: +```c++ +#include "tensorflow/lite/micro/micro_log.h" +``` + +### Introduce this Bazel BUILD dependency: +```c++ +"//tensorflow/lite/micro:micro_log", +``` + +### Example usage: +```c++ +size_t buffer_size = 1024; +... +MicroPrintf("Failed to allocate buffer of size- %d", buffer_size); + +MicroPrintf("TFLM is the best! Bring ML to Embedded targets!"); +``` + +## Do Not Use: +TFLM does not support/recommend the use of `TF_LITE_KERNEL_LOG` and `TF_LITE_REPORT_ERROR` to log errors or messages to the terminal. diff --git a/tensorflow/lite/micro/docs/memory_management.md b/tensorflow/lite/micro/docs/memory_management.md new file mode 100644 index 0000000..0a7fd67 --- /dev/null +++ b/tensorflow/lite/micro/docs/memory_management.md @@ -0,0 +1,218 @@ + + + + + + * [Memory Management in TensorFlow Lite Micro](#memory-management-in-tensorflow-lite-micro) + * [Tensor Arena](#tensor-arena) + * [Head Section](#head-section) + * [Offline planned tensor allocations](#offline-planned-tensor-allocations) + * [Temporary Section](#temporary-section) + * [Tail Section](#tail-section) + * [Recording Memory APIs](#recording-memory-apis) + * [Allocation Section Details](#allocation-section-details) + + + + + +# Memory Management in TensorFlow Lite Micro + +This document outlines how memory is managed internally by TensorFlow Lite Micro +(TFLM) today. It outlines the "online" allocation strategy used by the default +TFLM APIs for loading a model into a shared tensor arena. + +## Tensor Arena + +The main "working" space for TFLM allocations is inside a single `char` or +`int8_t` buffer. This buffer can be managed by passing it directly into a +`tflite::MicroInterpreter` constructor or through a `tflite::MicroAllocator` +instance that can be passed into a `tflite::MicroInterpreter` constructor. +Internally, the `tflite::MicroAllocator` classifies allocations into 3 different +sections: + +* **Head** - non-persistent allocations. +* **Temporary** - short term "scoped" allocations. +* **Tail** - persistent allocations. + +The illustration below represents typical allocations in TFLM: + +``` +-------------------------------------------------------------------------------- +| | | | +| HEAD |<-- TEMPORARY -->| TAIL | +| | | | +-------------------------------------------------------------------------------- +* Lowest Address Highest Address * +``` + +### Head Section + +This non-persistent section typically holds shared Tensor buffers. This section +does not allocate small iterative chunks, it can only be set by a specific +length for the entire section. + +This allocation length of this section is managed by the +`tflite::GreedyMemoryPlanner`. That memory planner looks at the entire graph of +a model and tries to reuse as many buffers as possible to create the smallest +length for the head. The Tensor buffers for this section can be accessed via a +`TfLiteEvalTensor` or `TfLiteTensor` instance on the `tflite::MicroInterpreter`. + +#### Offline planned tensor allocations + +All, or a subset of, tensors can be allocated using an offline planner. An +offline planner performs tensor allocation on e.g. a host PC. The offline tensor +allocation plan is added to model metadata. See format below. + +For each non-constant tensor in the `tensors:[Tensor]` list of the subgraph, a +byte offset to the start of the head section of the memory arena is given. -1 +indicates that the tensor will be allocated at runtime by the +`tflite::GreedyMemoryPlanner`. The offline plan is permitted to overlap buffers +if it knows that the data will not be used at the same time. + +The offline tensor allocation plan will be encoded in the `metadata:[Metadata]` +field of the model, using the following encoding: + +| Metadata component | Value | +|-|-| +| name:string | “OfflineMemoryAllocation” | +| buffer:unit | Index of buffer containing offline tensor allocation data | + +The buffer contents for the offline tensor allocation is a list of 32-bit +integers of the following format: + +| Offset | Value | +|-|-| +| 0 | Offline allocation format version | +| 1 | Number of subgraphs | +| 2 | Number offsets following: n | +| 3 | Byte offset of tensor #0 or -1 to allocate at runtime | +| 4 | Byte offset of tensor #1 or -1 to allocate at runtime | +| ... | ... | +| 3+(n-1) | Byte offset of tensor #(n-1) or -1 to allocate at runtime | + +Note that offsets 0 (the version) and 1 (the number of subgraphs) are currently +ignored by the micro memory allocator. In case of multiple subgraphs, it assumes +all tensors for all subgraphs are concatenated: all tensors for the first +subgraph are first, followed by those of the second subgraph, etc. + +The `tflite::GreedyMemoryPlanner` treats the provided offline tensor allocation +plan as constant fixed offset to the start of the head section and will attempt +to fit any other tensors (such as scratch tensors added a runtime using the +`RequestScratchBufferInArena` API of `TfLiteContext`) around those fixed +offsets. + +### Temporary Section + +This section is used to allocate "scoped" or short-term, non-guaranteed buffers. +Allocations from this section start from the current end address of the head +section and grow towards the tail section. An allocation chain can be reset (and +must be reset before adjusting the head) and moves the current allocation start +address back to the end of the head section. + +TFLM currently uses these allocations for a scope allocation of large C structs +or scratch memory that is expected to be valid for at least the lifetime of a +method call. This section. + +### Tail Section + +This section holds all persistent allocations used by TFLM. This section +contains many random sized allocations and grows towards the end of the head +section. Allocations in this section come from a variety of areas inside of +TFLM. TFLM provides a [recording API](#Recording-Memory-APIs) to assist with +auditing the contents of this section. + +## Recording Memory APIs + +TFLM provides simple APIs for auditing memory usage in the shared tensor arena. +These APIs are opt-in and require some additional memory overhead and a working +debug logging implementation +[(reference implementation)](https://github.com/tensorflow/tflite-micro/blob/main/tensorflow/lite/micro/debug_log.cc). + +A typical bare-bones TFLM interpreter setup looks as such: + +```c++ +// Buffer for the tensor arena: +size_t tensor_arena_size = 2048; +uint8_t tensor_arena[tensor_arena_size]; + +// Interpreter using the shared tensor arena above: +tflite::MicroInterpreter interpreter( + tflite::GetModel(my_model_data), ops_resolver, + tensor_arena, tensor_arena_size); + +// Invoke one time which will allocate internals: +if (interpreter.Invoke() != kTfLiteOk) { + MicroPrintf("Exception during invoke()!"); +} +``` + +Recording API can simply be used by including the `RecordingMicroInterpreter` +class (`recording_micro_interpreter.h`) and replace `tflite::MicroInterpreter` +with `tflite::RecordingMicroInterpreter`. The same call to `invoke()` is +performed, but another call is made to `PrintAllocations()` which will output +detailed allocation logging: + +```c++ +// Add an include to the recording API: +#include "recording_micro_interpreter.h" + +// Simply change the class name from 'MicroInterpreter' to 'RecordingMicroInterpreter': +tflite::RecordingMicroInterpreter interpreter( + tflite::GetModel(my_model_data), ops_resolver, + tensor_arena, tensor_arena_size); + +// Invoke one time which will allocate internals: +if (interpreter.Invoke() != kTfLiteOk) { + MicroPrintf("Exception during invoke()!"); +} + +// Print out detailed allocation information: +interpreter.GetMicroAllocator().PrintAllocations(); +``` + +The output of this call will look something similar to this (output from the +[memory_arena_threshold_test](https://github.com/tensorflow/tflite-micro/blob/main/tensorflow/lite/micro/memory_arena_threshold_test.cc#L205)): + +```bash +[RecordingMicroAllocator] Arena allocation total 9568 bytes +[RecordingMicroAllocator] Arena allocation head 7744 bytes +[RecordingMicroAllocator] Arena allocation tail 1824 bytes +[RecordingMicroAllocator] 'TfLiteEvalTensor data' used 360 bytes with alignment overhead (requested 360 bytes for 15 allocations) +[RecordingMicroAllocator] 'Persistent TfLiteTensor data' used 0 bytes with alignment overhead (requested 0 bytes for 0 tensors) +[RecordingMicroAllocator] 'Persistent TfLiteTensor quantization data' used 0 bytes with alignment overhead (requested 0 bytes for 0 allocations) +[RecordingMicroAllocator] 'TfLiteTensor variable buffer data' used 0 bytes with alignment overhead (requested 0 bytes for 0 allocations) +[RecordingMicroAllocator] 'NodeAndRegistration struct' used 392 bytes with alignment overhead (requested 392 bytes for 7 NodeAndRegistration structs) +[RecordingMicroAllocator] 'Operator runtime data' used 136 bytes with alignment overhead (requested 136 bytes for 5 OpData structs) +``` + +### Allocation Section Details + +More information about each recorded allocation section: + +* 'TfLiteEvalTensor data' + * C struct that holds the data type, dimension, and a pointer to the + buffer representing the Tensor. +* 'Persistent TfLiteTensor data' + * C struct that holds more information than a `TfLiteEvalTensor` struct in + the graph. + * Allocations in this bucket will only show up when accessing tensors from + the accessors on `tflite::MicroInterpreter`. +* 'Persistent TfLiteTensor quantization data' + * Length of persistent quantization data assigned to persistent + `TfLiteTensor` structs. + * Allocations in this bucket will only show up when accessing tensors from + the accessors on `tflite::MicroInterpreter`. +* 'TfLiteTensor variable buffer data' + * Length of buffer data from a variable tensor (retains data throughout + calls to `invoke()`). +* 'NodeAndRegistration struct' + * C struct that holds a `TfLiteRegistration` and `TfLiteNode` struct + instance. + * Each operator in a model will contain one `NodeAndRegistration` struct. +* 'Operator runtime data' + * Persistent allocations of data cached by TFLM kernels (e.g. quantization + params, multipliers, etc). diff --git a/tensorflow/lite/micro/docs/new_platform_support.md b/tensorflow/lite/micro/docs/new_platform_support.md new file mode 100644 index 0000000..692d98e --- /dev/null +++ b/tensorflow/lite/micro/docs/new_platform_support.md @@ -0,0 +1,148 @@ + + + + * [Porting to a new platform](#porting-to-a-new-platform) + * [Step 1: Build TFLM Static Library with Reference Kernels](#step-1-build-tflm-static-library-with-reference-kernels) + * [Step 2: Customize Logging and Timing Function for your Platform](#step-2-customize-logging-and-timing-function-for-your-platform) + * [Step 3: Running the hello_world Example](#step-3-running-the-hello_world-example) + * [Step 4: Building and Customizing Additional Examples](#step-4-building-and-customizing-additional-examples) + * [Step 5: Integrating Optimized Kernel Implementations](#step-5-integrating-optimized-kernel-implementations) + * [Advanced Integration Topics](#advanced-integration-topics) + * [Getting Help](#getting-help) + + + + + +# Porting to a new platform + +At its core, TFLM is a portable library that can be used on a variety of target +hardware to run inference on TfLite models. + +Prior to integrating TFLM with a specific hardware involves tasks that is +outside the scope of the TFLM project, including: + + * Toolchain setup - TFLM requires support for C++11 + * Set up and installation of board-specific SDKs and IDEs + * Compiler flags and Linker setup + * Integrating peripherals such as cameras, microphones and accelerometers to + provide the sensor inputs for the ML models. + +In this guide we outline our recommended approach for integrating TFLM with a +new target hardware assuming that you have already set up a development and +debugging environment for you board independent of TLFLM. + + +## Step 1: Build TFLM Static Library with Reference Kernels + +Use the TFLM project generation script to create a directory tree containing +only the sources that are necessary to build the code TFLM library. + +```bash +python3 tensorflow/lite/micro/tools/project_generation/create_tflm_tree.py \ + -e hello_world \ + -e micro_speech \ + -e person_detection \ + /tmp/tflm-tree +``` + +This will create a folder that looks like the following at the top-level: +```bash +examples LICENSE tensorflow third_party +``` + +All the code in the `tensorflow` and `third_party` folders can be compiled into +a single static library (for example `libtflm.a`) using your platform-specific +build system. + +TFLM's third party dependencies are spearated out in case there is a need to +have shared libraries for the third party code to avoid symbol collisions. + +Note that for IDEs, it might be sufficient to simply include the +folder created by the TFLM project generation script into the overall IDE tree. + +## Step 2: Customize Logging and Timing Function for your Platform + +Replace the following files with a version that is specific to your target +platform: + + * [debug\_log.cc](https://github.com/tensorflow/tflite-micro/blob/main/tensorflow/lite/micro/debug_log.cc) + * [micro\_time.cc](https://github.com/tensorflow/tflite-micro/blob/main/tensorflow/lite/micro/micro_time.cc) + * [system\_setup.cc](https://github.com/tensorflow/tflite-micro/blob/main/tensorflow/lite/micro/system_setup.cc) + +These can be placed anywhere in your directory tree. The only requirement is +that when linking TFLM into a binary, the implementations of the functions in +[debug\_log.h](https://github.com/tensorflow/tflite-micro/blob/main/tensorflow/lite/micro/debug_log.h), +[micro\_time.h](https://github.com/tensorflow/tflite-micro/blob/main/tensorflow/lite/micro/micro_time.h) +and [system\_setup.h](https://github.com/tensorflow/tflite-micro/blob/main/tensorflow/lite/micro/debug_log.h) +can be found. + +For example, the implementations of these functions for: + * [Sparkfun Edge](https://github.com/advaitjain/tflite-micro-sparkfun-edge-examples/tree/120f68ace95ae3d66963977ac7754acd0c86540d/tensorflow/lite/micro/sparkfun_edge) +is the implementation of these functions for the Sparkfun Edge. + + +## Step 3: Running the hello\_world Example + +Once you have completed step 2, you should be set up to run the `hello_world` +example and see the output over the UART. + +``` +cp -r /tmp/tflm-tree/examples/hello_world +``` +The `hello_world` example should not need any customization and you should be +able to directly build and run it. + +## Step 4: Building and Customizing Additional Examples + +We recommend that you fork the [TFLM examples](https://github.com/tensorflow/tflite-micro/tree/main/tensorflow/lite/micro/examples) +and then modify them as needed (to add support for peripherals etc.) to run on +your target platform. + +## Step 5: Integrating Optimized Kernel Implementations + +TFLM has optimized kernel implementations for a variety of targets that are in +sub-folders of the [kernels directory](https://github.com/tensorflow/tflite-micro/tree/main/tensorflow/lite/micro/kernels). + +It is possible to use the project generation script to create a tree with these +optimized kernel implementations (and associated third party dependencies). + +For example: +``` +python3 tensorflow/lite/micro/tools/project_generation/create_tflm_tree.py \ + -e hello_world -e micro_speech -e person_detection \ + --makefile_options="TARGET=cortex_m_generic OPTIMIZED_KERNEL_DIR=cmsis_nn TARGET_ARCH=project_generation" \ + /tmp/tflm-cmsis +``` + +will create an output tree with all the sources and headers needed to use the +optimized [cmsis\_nn kernels](https://github.com/tensorflow/tflite-micro/tree/main/tensorflow/lite/micro/kernels/cmsis_nn) for Cortex-M platforms. + + +# Advanced Integration Topics + +In order to have tighter coupling between your platform-specific TFLM +integration and the upstream TFLM repository, you might want to consider the +following: + + 1. Set up a GitHub repository for your platform + 1. Nightly sync between TFLM and your platform-specific GitHub repository + 1. Using GitHub actions for CI + +For some pointers on how to set this up, we refer you to the GitHub repositories +that integrated TFLM for the: + * [Arduino](https://github.com/tensorflow/tflite-micro-arduino-examples): supported by the TFLM team + * [Sparkfun Edge](https://github.com/advaitjain/tflite-micro-sparkfun-edge-examples): for demonstration purposes only, not officially supported. + +Once you are set up with continuous integration and the ability to integrate +newer versions of TFLM with your platform, feel free to add a build badge to +TFLM's [Community Supported TFLM Examples](https://github.com/tensorflow/tflite-micro#community-supported-tflm-examples). + +# Getting Help + +[Here are some ways](https://github.com/tensorflow/tflite-micro#getting-help) that you can +reach out to get help. + diff --git a/tensorflow/lite/micro/docs/offline_memory_plan.md b/tensorflow/lite/micro/docs/offline_memory_plan.md new file mode 100644 index 0000000..114ce5c --- /dev/null +++ b/tensorflow/lite/micro/docs/offline_memory_plan.md @@ -0,0 +1,75 @@ + + +* [Offline Memory Plan](#offline-memory-plan) + * [Background and Motivation](#background-and-motivation) + * [Usage](#usage) + + + + + +# Offline Memory Plan Via NonpersistentMemoryPlannerShim + +This doc outline how to use the NonPersistentMemoryPlannerShim class to work +with a external tooling that can plan the offset of each non persistent buffer +for the Model within the TFLM arena. + +This is an experimental feature right now and subjected to change. Comments are +welcome! + +## Background and Motivation + +The +[(memory management page)](memory_management.md#offline-planned-tensor-allocations) +describes a way to specify the offset of each non persistent buffer in a +flatbuffer model file. This document describe an alternative that allows the +offset of each non persistent buffer for the Model within the TFLM arena to be +specified by a C++ struct. The approach in this document is an early stage +exploration of what the next version of offline memory planning in TFLM might +look like. + +If the NonPersistentMemoryPlannerShim is used, then the final binary does not +have any of the symbols associated with the GreedyMemoryPlanner which results in +a reduced memory footprint. + +Additionally, the offline planning of the non-persistent buffers can be used to +have a more efficient utilization compared to the GreedyMemoryPlanner. + +## Usage + +The more effecient memory plan above can be represented by the following C++ +struct + +```cc +const struct BufferPlan kOfflineNonPersistentBufferPlan = { + .buffer_count = 9, + .buffer_plan_entries = { + [0] = { .offset = 0 }, + [1] = { .offset = 400 }, + [2] = { .offset = 801 }, + [3] = { .offset = 400 }, + [4] = { .offset = 811 }, + [5] = { .offset = 601 }, + [6] = { .offset = 814 }, + [7] = { .offset = 601 }, + [8] = { .offset = 801 }, + } +}; +``` + +Then you can create a NonPersistentBufferPlannerShim and provide it to the +Interpreter such as below + +```cc +// The arena includes both persistent buffers and non-persistent buffers. +constexpr int kArenaSize = 2*1048; +uint8_t tensor_arena[kArenaSize]; + +tflite::NonPersistentMemoryPlannerShim planner(&kOfflineNonPersistentBufferPlan); + +tflite::MicroAllocator * allocator = tflite::MicroAllocator::Create( + tensor_arena, arena_size, &planner); + +tflite::MicroInterpreter interpreter(model, op_resolver, allocator); +``` + diff --git a/tensorflow/lite/micro/docs/online_memory_allocation_overview.md b/tensorflow/lite/micro/docs/online_memory_allocation_overview.md new file mode 100644 index 0000000..d7469a1 --- /dev/null +++ b/tensorflow/lite/micro/docs/online_memory_allocation_overview.md @@ -0,0 +1,124 @@ + + +* [Online Memory Allocation Overview in TensorFlow Lite Micro](#online-memory-allocation-overview-in-tensorflow-lite-micro) + * [Arena](#arena) + * [Existing buffers in the flatbuffer](#existing-buffers-in-the-flatbuffer) + * [Model Init Phase](#model-init-phase) + * [Model Prepare Phase](#model-prepare-phase) + * [Finish Model Allocation Phase](#finish-model-allocation-phase) + + + + + +# Online Memory Allocation Overview in TensorFlow Lite Micro + +This document outlines how "online" memory is managed in TensorFlow Lite Micro +(TFLM). + +## Arena + +Online memory planning strategically places allocations in a single `uint8_t` +buffer array. The buffer is split into two main sections: the “head” and the +“tail”. Generally, non-persistent allocations are placed in the “head” and +persistent allocations are placed in the “tail”. More details about the arena +can be [found here](memory_management.md#tensor-arena). + +## Existing buffers in the flatbuffer + +The TFLite flatbuffer model contains a variety of information required to run a +model in TFLite or TFLM. The TFLM online memory planner will walk the main +subgraph and find all tensors required for the model (represented as +`TfLiteTensor` and `TfLiteEvalTensor` C structs at runtime). Persistent tensors +in the flatbuffer (e.g. weight tensors) will point at a buffer inlined in the +flatbuffer. These buffers are reused during online memory planning. The +corresponding C structures will point back at the buffer packed into the +flatbuffer. + +## Model Init Phase + +Either through the first call of `MicroInterpreter::Invoke()` or an explicit +call to `MicroInterpreter::AllocateTensors()` the online model allocation will +begin. The `MicroInterpreter` instance will invoke +`MicroAllocator::StartModelAllocation()`. This function will begin pulling data +out of the serialized flatbuffer and begin walking through the main subgraph. + +The method `MicroAllocator::StartModelAllocation()` begins allocation in the +following order: +* Initializes internal state for scratch buffer allocations +* Allocates a list of `TfLiteEvalTensor` C structs based on the number of tensors +in the subgraph. +* Allocations are persistent and stored in the tail section. +* Tensors that reference buffers in the flatbuffer are assigned at this point. +* Allocates a list of `TfLiteRegistration` and `TfLiteNode` C structs for every +operator in the model subgraph +* Allocations are persistent and stored in the +tail section. +* Walks back through the list of subgraph operators and assigns +all C structs with relevant information from the flatbuffer. + +At the conclusion of this phase, the operator kernel implementations are ready +for calls to the `TfLiteRegistration::init()` function. The `MicroInterpreter` +walks through the operator list and invokes all operator implementations that +have this function. Typically, operator implementations return the object to +store in the `user_data` field of a `TfLiteNode` struct. + +## Model Prepare Phase + +After the interpreter has initialized all operator kernels, another pass through +the subgraph is done. This time, each operator implementations that provides a +`TfLiteRegistration::prepare()` function is called. This phase in TFLM is used +for kernels to verify capabilities from model information, validate shapes, +allocate any scratch buffers requested (through +`TfLiteContext::GetScratchBuffer()`), and calculate quantization runtime data. + +At this time, operator implementation will request tensor data through the +`TfLiteTensor` C struct. This struct is heavier and contains more information +that operators will need during this phase of initialization. Internally, TFLM +will allocate these instances per request in the temp section. The temp section +is the space between the head and the tail in the arena. During the prepare +phase, nothing is yet been placed in the head section. This extra space between +the head and tail is used to allocate buffers that are available until +`MicroAllocator::ResetTempAllocations()` is called. Additional information +[available here](memory_management.md#temporary-section). + +NOTE: The `TfLiteTensor` struct is only available in TFLM during +`TfLiteRegistration::prepare()`, after this allocation phase tensor data can +only be accessed via a `TfLiteEvalTensor` struct. + +Additionally, at this time each operator implementation may request scratch +buffer requests through `TfLiteContext::RequestScratchBufferInArena()`. These +requests are limited to `kMaxScratchBuffersPerOp` and are stored in an instance +variable for each operator prepare block. All requests are eventually moved to +the head section when the interpreter moves to the next operator. + +After each call to `TfLiteRegistration::prepare()` the `MicroInterpreter` calls +`MicroAllocator::FinishPrepareNodeAllocations()`. This method resets temp +allocations and begins to store all scratch buffer requests inside the head +section of the arena. + +After all operators have been prepared, the `MicroInterpreter` calls +`MicroAllocator::FinishModelAllocation()` to begin finalizing the online memory +plan. + +## Finish Model Allocation Phase + +The last phase of online memory planning is handled in +`MicroAllocator::FinishModelAllocation()`. This function performs the following +tasks + +* Allocates space in the tail for all persistent buffer requests that are + currently in the head. +* Commits Static Memory Plan + * Uses the `GreedyMemoryPlanner` to optimize the non-persistent space in + the head. + * Optimizes for the operator that requires the largest byte-width buffer. + * Allocates pointers in the tail that provide pointers into shared space + and offsets in the head. + * Sets the size of the head based on the result of + `GreedyMemoryPlanner::GetMaxiumMemorySize()`. +* Allocates variable tensor buffers in the tail section. + +Once TFLM has finalized online model allocation, all buffers are prepared and +ready for optimal speed for inference. The system no longer enables operator +implementations to allocate scratch buffers after this point. diff --git a/tensorflow/lite/micro/docs/optimized_kernel_implementations.md b/tensorflow/lite/micro/docs/optimized_kernel_implementations.md new file mode 100644 index 0000000..4a5c81a --- /dev/null +++ b/tensorflow/lite/micro/docs/optimized_kernel_implementations.md @@ -0,0 +1,200 @@ + + + + + + +* [Summary](#summary) +* [High-Level Steps](#high-level-steps) + * [Why not Optimize the Reference Kernels](#why-not-optimize-the-reference-kernels) +* [Software Architecture](#software-architecture) + * [Hardware-specific NN library](#hardware-specific-nn-library) + * [Optimized Kernels](#optimized-kernels) + * [Build System Integration](#build-system-integration) + * [Testing and Continuous Integration](#testing-and-continuous-integration) + + + + + +# Summary + +This guide describes the recommended high-level architecture and steps to add +hardware-specific optimized kernels to TfLite Micro. + +The goal with these optimizations and the process that we recommend to getting +them merged into the TfLite Micro codebase is to have a measurable and +documented performance improvement on a benchmark of interest. + +Once the optimizations are merged, they will indeed be used for more than the +benchmark but the context for why the optimizations were added is still very +important. + +# High-Level Steps + +1. Pick a benchmark that you would like to measure the performance for. + + * Existing benchmarks are in the [benchmarks directory](../benchmarks). + * If none of the existing benchmarks capture your use-case, then please + create a github issue or start a thread on micro@tensorflow.org to + figure out how to add in a new benchmark. + * If adding a publicly-available benchmark to the TFLM codebase is + determined to be infeasible, then a fall-back would be to have an + internal benchmark that can be used to document the benefits of adding + in the optimizations via PR descriptions. + * Adding optimized code without any associated benchmarks will need very + strong justification and will most likely not be permitted. + +1. Do the groundwork and architecture needed to be able to add in optimizations + for your target (more details in the + [software architecture](#software-architecture) section). + +1. Create one pull request for each optimized kernel with the PR description + clearly stating the commands that were used to measure the performance + improvement. + + * This context is important even if the toolchain is proprietary and there + are currently a small number of users. + * See [this PR](https://github.com/tensorflow/tensorflow/pull/47098) + as an example. + * At minimum the latency with and without the particular optimized + kernel should be documented. + [Additional context](https://github.com/tensorflow/tensorflow/pull/46746) + may also be desirable. + * Here is some + [general guidance](https://testing.googleblog.com/2017/09/code-health-providing-context-with.html) + on writing + [good PR descriptions](https://google.github.io/eng-practices/review/developer/cl-descriptions.html) + +## Why Not Optimize the Portable Reference Kernels? + +We would like to explicitly point out (as have others) that the reference kernel +implementations are not performant and there are plenty of opportunities to +speed them up. This is by design and the reference kernels are meant to be a +shared starting point to then be optimized in a target specific optimized kernel +implementation. + +Two previous discussions on this topic are on +[PR #42477](https://github.com/tensorflow/tensorflow/pull/42477) and +[PR #45227](https://github.com/tensorflow/tensorflow/pull/45227) + +Our current point of view on this topic is that while optimizing shared +reference code in a portable manner is attractive, we are making an explicit +choice to not go down that path and instead rely on target-specific optimized +implementations. The TFLM codebase has a growing list of optimized kernel +implementations, and we are investing in making the process of adding new +implementations smoother. + +# Software Architecture + +The optimized kernel architecture is composed of the following three modules: + +1. Hardware-specific NN library +1. Optimized Kernels +1. Build System Integration + +## Hardware-specific NN library + +This library uses knowledge of the hardware and compiler to implement the +underlying operations. Examples of this are +[CMSIS-NN](https://github.com/ARM-software/CMSIS_5/tree/develop/CMSIS/NN) from +ARM and [NNLib](https://github.com/foss-xtensa/nnlib-hifi4) from Cadence. + +The benefits of having this API separation are: + +1. The NN library does not need to follow the style guide of the rest of the + TFLM code. +1. Releases of the NN library can be made independent of TFLM +1. The same NN library can be used and tested independent of TFLM. +1. The maintainers of the NN library have full control over the development + process that they would like to follow. + +## Optimized Kernels + +These will be (hopefully thin) wrappers that act as the glue between TFLM and +the NN library. + +The goal here is to delegate as much work as possible to the NN library while +still allowing the two APIs (TFLM and NN library) to be independent of each +other. If there is a performance degradation due to this (for example, +unnecessary memory copies) then we can evaluate those on a case-by-case basis. + +This code will be reviewed and merged in the TFLM github repository and must +follow the development style of the TFLM codebase. + +Some amount of refactoring of the existing code may be needed to ensure that +code is suitably shared between the reference and optimized kernels. There is +currently no fixed recipe for this refactor and we will evaluate on a +case-by-case basis during the PR review. + +For example, to add an optimized implementation for `fully_conntected` for the +Xtensa Fusion F1 the steps were: +* [PR 1](https://github.com/tensorflow/tensorflow/pull/45464): refactor for +reference fallbacks and a baseline latency. +* [PR 2](https://github.com/tensorflow/tensorflow/pull/46242): refactor to share +code between reference and optimized kernels. +* [PR 3](https://github.com/tensorflow/tensorflow/pull/46411): add the code needed +to use the optimized NN lib and document the latency improvement. + +## Build System Integration + +This module is the least defined but we strongly recommend the following: 1. A +single target makefile.inc for all the architectures that you would like to +support along with optional target-specific +[system_setup.cc](../cortex_m_corstone_300/system_setup.cc). See +[cortex_m_generic_makefile.inc](../tools/make/targets/cortex_m_generic_makefile.inc) +and [xtensa_makefile.inc](../tools/make/targets/xtensa_makefile.inc) as +examples. + +1. A single `ext_libs.inc` (and associated scripts) that downloads any external + dependencies (including the NN library). For example: + + * [cmsis_nn.inc](../tools/make/ext_libs/cmsis_nn.inc) and + [cmsis_download.sh](../tools/make/ext_libs/cmsis_download.sh) + * [xtensa.inc](../tools/make/ext_libs/xtensa.inc) and + [xtensa_download.sh](../tools/make/ext_libs/xtensa_download.sh) + +1. The optimized kernels will then live in a kernels subdirectory (e.g. + [kernels/cmsis_nn](../kernels/cmsis_nn) and + [kernels/xtensa](../kernels/xtensa)) + +Two development workflows that the TFLM team would like to encourage and +support: + +1. Export static library + headers into target-specific development environment + + * Build a static libtensorflow-microlite.a using the TFLM makefile with: + `make -f tensorflow/lite/micro/tools/make/Makefile TARGET= + OPTIMIZED_KERNEL_DIR= microlite` + * Use the static library and any TFLM headers as part of the overall + application (with its own build system). + +1. Integrate TFLM with IDE: + + * This has historically been done using the TFLM Makefile’s support for + project generation. + + * However, given the learning curve and high-maintenance overhead, we are + moving away from supporting project generation via the Makefile and are + encouraging future IDE integrations to be done outside of the TFLM + Makefiles. + + * The TFLM team is currently working through the details on this topic. + +## Testing and Continuous Integration + +The kernel tests are the primary method of ensuring that the optimized kernel +implementations are accurate. + +Currently, most of the tests require the optimizations to be bit-exact to the +quantized reference implementation. We can revisit this requirement if it ends +up having a high associated cost on the latency. + +We strongly encourage optimized kernel implementations to have an associated +continuous build that runs through all the unit tests and publishes a build +badge to the +[TFLM community supported builds](../README.md#community-supported-builds) +table. Running the units tests once a day is often a good place to start. diff --git a/tensorflow/lite/micro/docs/porting_reference_ops.md b/tensorflow/lite/micro/docs/porting_reference_ops.md new file mode 100644 index 0000000..1ffb3e3 --- /dev/null +++ b/tensorflow/lite/micro/docs/porting_reference_ops.md @@ -0,0 +1,385 @@ + +[small PRs]: https://google.github.io/eng-practices/review/developer/small-cls.html +[Micro Contributing Guidelines]: https://github.com/tensorflow/tflite-micro/blob/main/CONTRIBUTING.md +[Providing Context]: https://testing.googleblog.com/2017/09/code-health-providing-context-with.html +[`ParseOpDataTfLite()`]: https://github.com/tensorflow/tensorflow/blob/d8394a6d774f5e3c02d97f1fc18ff445199db598/tensorflow/lite/core/api/flatbuffer_conversions.cc#L135 +[PR #45307]: https://github.com/tensorflow/tensorflow/pull/45307 +[PR #46021]: https://github.com/tensorflow/tensorflow/pull/46021 +[PR #45311]: https://github.com/tensorflow/tensorflow/pull/45311 +[PR #45457]: https://github.com/tensorflow/tensorflow/pull/45457 +[PR #45646]: https://github.com/tensorflow/tensorflow/pull/45646 +[PR #45647]: https://github.com/tensorflow/tensorflow/pull/45647 +[pre-submit checklist]: https://github.com/tensorflow/tflite-micro/blob/main/CONTRIBUTING.md#before-submitting-your-pr +[reference_ops.h]: https://github.com/tensorflow/tensorflow/blob/92f459e6b917fa5099ef5317d14c5100d33a86f0/tensorflow/lite/kernels/internal/reference/reference_ops.h +[general porting guidelines]: #general-porting-guidelines + +# Porting Reference Ops from Lite to Micro + +This is a guide to porting reference ops from Lite to Micro. It explains, +step-by-step, the recommended code changes and the process for submitting them +for review and acceptance. The process results in multiple pull requests, or +PRs. Multiple, [small PRs][] are easier for the project to review and merge. + +The [Micro Contributing Guidelines][] are prerequisite reading. They cover +general code health, maintainability, style, and submission, as well as how to +setup a development environment. This guide contains step-by-step instructions +for the specific task of porting reference ops from Lite to Micro. + + + + * [Porting Reference Ops from Lite to Micro](#porting-reference-ops-from-lite-to-micro) + * [1. Look for a port already in progress](#1-look-for-a-port-already-in-progress) + * [2. Open a GitHub issue to track the port](#2-open-a-github-issue-to-track-the-port) + * [3. Extract Lite's code for parsing op parameters to a function (PR1)](#3-extract-lites-code-for-parsing-op-parameters-to-a-function-pr1) + * [4. Extract the reference for the op to a standalone header (PR2)](#4-extract-the-reference-for-the-op-to-a-standalone-header-pr2) + * [5. Port the op from Lite to Micro (PR3)](#5-port-the-op-from-lite-to-micro-pr3) + * [General Guidelines](#general-guidelines) + * [Check each commit for formatting, lint, and unit-test passage](#check-each-commit-for-formatting-lint-and-unit-test-passage) + * [Maintain a 1:1 correspondence between Micro and Lite versions of unit tests](#maintain-a-11-correspondence-between-micro-and-lite-versions-of-unit-tests) + * [Notes](#notes) + * [Frequently Asked Questions](#frequently-asked-questions) + * [Can I use malloc/free or new/delete in my operator code?](#can-i-use-mallocfree-or-newdelete-in-my-operator-code) + * [Can I use static variable allocation in my operator code?](#can-i-use-static-variable-allocation-in-my-operator-code) + * [How do I allocate persistent memory?](#how-do-i-allocate-persistent-memory) + * [When am I allowed to allocate persistent memory?](#when-am-i-allowed-to-allocate-persistent-memory) + * [How do I allocate/use temporary memory?](#how-do-i-allocateuse-temporary-memory) + * [When can I allocate/use temporary memory?](#when-can-i-allocateuse-temporary-memory) + * [Can I resize my input/output tensors?](#can-i-resize-my-inputoutput-tensors) + * [Can I change the shape of tensors in my operator code?](#can-i-change-the-shape-of-tensors-in-my-operator-code) + * [When can I change the shape of tensors in my operator code?](#when-can-i-change-the-shape-of-tensors-in-my-operator-code) + * [Can I modify a TfLiteTensor or TfLiteEvalTensor?](#can-i-modify-a-tflitetensor-or-tfliteevaltensor) + + + + + +## 1. Look for a port already in progress + +Begin by searching the tflite-micro GitHub repository for issues containing the +name of the op under consideration to ensure someone isn't already working on a +port. + +## 2. Open a GitHub issue to track the port + +Open a GitHub issue to announce your intent to port the op, and to begin a +record of your work. Document the entire process of porting the op in this +issue. Link constituent PRs to this issue. See the article [Providing +Context][] for background on documenting your work via bug reports. + +## 3. Extract Lite's code for parsing op parameters to a function (PR1) + +Now we begin changing, testing, and submitting code. This step will result in +the first pull request, PR1. + +1. Extract the code for parsing op parameters out of the switch statement in + [`ParseOpDataTfLite()`][] in `lite/core/api/flatbuffer_conversions.cc` into + a standalone function, and call that function from the switch statement. + This standalone function is now available to be called by the Micro op + resolver, which also needs to parse the op parameters, in a future change. + A simple example is [PR #45307][], and a more complicated example is [PR + #46021][]. + +1. Use `clang-format` to make sure the code is properly formatted. + + ```shell + clang-format --style=google -i $(git ls-files -m | grep -E '\.cc|\.h') + ``` + +1. Make sure your code is lint-free. + + ```shell + cpplint.py $(git ls-files -m) + ``` + +1. Create a single commit containing the change. Observe the guidelines for + good commit log messages found in the article [Providing Context][]. + A good example is commit [0664214](https://github.com/tensorflow/tensorflow/pull/45307/commits/0664214792ad2357f6224e7002661894775cb512). + +1. Since this change modifies the op's implementation in Lite, test the change + with the relevant Lite unit tests. + + ```shell + bazel test tensorflow/lite/kernels:all + ``` + +1. Create and submit the PR. Write a [good PR description][], and be sure to + link to the GitHub issue created to document the port. A good example is + [PR #45307][]. + + [good PR description]: https://google.github.io/eng-practices/review/developer/cl-descriptions.html + +## 4. Extract the reference for the op to a standalone header (PR2) + +Move the reference implementation of the op in [reference_ops.h][] to a standalone header so that +Micro can include it without including unrelated dependencies via +reference_ops.h. + +A good example is [PR #45311][]. + +1. Copy an existing header from `tensorflow/lite/kernels/internal/reference/` + to `tensorflow/lite/kernels/internal/reference/NEW_OP.H` to create the + boilerplate. Replace `NEW_OP.H` with the name of the new operator. + +1. Move the implementation from + `tensorflow/lite/kernels/internal/reference/reference_ops.h` to + `tensorflow/lite/kernels/internal/reference/NEW_OP.H`. + +1. Add the new header to the build by adding to the library definitions + `reference_base` and `legacy_reference_base` in the file + `tensorflow/lite/kernels/internal/BUILD`. See, for example, + [this change for operator FILL](https://github.com/tensorflow/tensorflow/pull/45311/commits/92f459e6b917fa5099ef5317d14c5100d33a86f0#diff-0b0fc9e1affece3c5a141ee9326f882876b6b958bc8b12a7c01d7540dc04983e). + +1. Use the program `clang-format` to make sure the code is properly formatted. + + ```shell + clang-format --style=google -i $(git ls-files -m | grep -E '\.cc|\.h') + ``` + + Do not clang-format existing code in `BUILD` or `reference_ops.h`. + +1. Make sure your code is lint-free. + + ```shell + cpplint.py $(git ls-files -m) + ``` + + Do not modify code in `BUILD` or `reference_ops.h` to satisfy `cpplint.py`. + +1. Create a single commit containing the change. Observe the guidelines for + good commit log messages found in the article [Providing Context][]. + A good example is commit [92f459e](https://github.com/tensorflow/tensorflow/commit/92f459e6b917fa5099ef5317d14c5100d33a86f0). + +1. Since this change modifies the op's implementation in Lite, test the change + with the relevant Lite unit tests. + + ```shell + bazel test tensorflow/lite/kernels:all + ``` + +1. Create and submit the PR. Write a [good PR description][], and be sure to + link to the GitHub issue created to document the port. A good example is + [PR #45311][]. + +## 5. Port the op from Lite to Micro (PR3) + +1. Copy the kernel and test from Lite to Micro. + + In the first commit of this PR, copy the kernel and test from Lite to Micro + without making any modifications and without adding them to the build. + + A good example is commit [a2ca1fd](https://github.com/tensorflow/tensorflow/commit/a2ca1fd7a174438f736c0435dd3e4e618612fdee). + + This copy action is in its own commit in order to create readable, reviewable diffs + when modifications are made in later commits. If the files were copied and + modified in one step, the modifications would not appear as a diff of the Lite + version. Instead, the files would simply appear at the destination path in + their final form. + + +1. Remove Lite-specific code from copies + + In the second commit of this PR, remove the bulk of Lite-specific code from + the files copied to micro in the previous step. + + A good example is commit [a5a87b4](https://github.com/tensorflow/tensorflow/commit/a5a87b420b87a1f832e241db3a5b724207ea700a). + + This bulk-delete action is in its own commit for reasons similar to + those given in the step above: to produce a more readable, reviewable diff in this + step and in the next. Because the files are not yet added to the build, they + need not (and obviously won't) compiler or function. What to delete now as + opposed to deleting in the next commit is somewhat subjective, but make + deletes in order to: + + - Flatten the namespace down to `tflite`. + - Stop resizing output tensors. + - Remove input and output types other than `int8`, `int16`, and `float32`. + - Stop using gmock and gtest. + - etc. + +1. Port the op and the test + + Make the necessary changes to the micro kernel, header, and test to make the op + implementation suitable for micro. Include these in the build. + + This step requires the most creativity, and may receive the most feedback + during review. Maintain good atomicity in your commits. Considering its + scope, this step will consist of more than one commit. A good example is + the changes made in [PR #45647][]. + +1. Use `clang-format` to make sure the code is properly formatted. + + ```shell + $ clang-format --style=google -i $(git ls-files -m | grep -E '\.cc|\.h') + ``` + + Do not clang-format existing code in `BUILD` or `reference_ops.h`. + +1. Make sure the code is lint-free. + + ```shell + $ cpplint.py $(git ls-files -m) + ``` + + Do not modify code in `BUILD` or `reference_ops.h` to satisfy `cpplint.py`. + +1. Make sure the port passes all applicable tests. + + ```shell + $ bazel test tensorflow/lite/micro/kernels:${op}_test + $ bazel test tensorflow/lite/micro/kernels:all + $ make -f tensorflow/lite/micro/tools/make/Makefile test_kernel_${op}_test + $ make -f tensorflow/lite/micro/tools/make/Makefile test + ``` + + See the general [Micro Contributing Guidelines][] for other testing ideas, + including the use of address sanitizers. + +1. Create and submit the PR. Write a [good PR description][], and be sure to + link to the GitHub issue created to document the port. A good example is + [PR #45647][]. + +# General Guidelines + +## Check each commit for formatting, lint, and unit-test passage + +Check each commit against the [pre-submit checklist][] in the micro +Contributing Guidelines. Specifically, make sure your code: + +1. Is formatted with clang-format. +1. Passes a lint check. +1. Passes all unit tests. + + ```shell + $ make -s -j8 -f tensorflow/lite/micro/tools/make/Makefile test + ``` + +CI runs these checks on all PRs, and will hold up your PR if any of these checks fail. + +## Maintain a 1:1 correspondence between Micro and Lite versions of unit tests + +To the extent possible, maintain a 1:1 correspondence between Micro and Lite +versions of unit tests. Avoid cleanup of merely stylistic issues, e.g., by +replacing the hardcoded literal `3.40282e+038` with +`std::numeric_limits::max()`. Any changes between the Micro and Lite +versions of a test put a burden on future maintainers to figure out whether the +differences are actually significant or just stylistic. + +# Notes + +* There was discussion of commits vs. PRs in [#45387](https://github.com/tensorflow/tensorflow/issues/45387). + +* [TensorFlow Lite 8-bit quantization specification](https://www.tensorflow.org/lite/performance/quantization_spec) + +# Frequently Asked Questions + +## Can I use malloc/free or new/delete in my operator code? +No. All memory allocation in TensorFlow Lite Micro (TFLM) is done using C++ +stack based automatic allocation, or through specialized TFLM persistent +and temporary allocation methods. + +## Can I use static variable allocation in my operator code? +No. This is due to the call ordering of C++ static constructors being +platform/compiler dependent. + +## How do I allocate persistent memory? +Use `TfLiteContext::AllocatePersistentBuffer` to allocate persistent memory. +Memory allocated by this method will remain valid throughout the lifetime of +the `tflite::MicroInterpreter` instance. + +An example code snippet looks like ([leaky_relu.cc](../kernels/leaky_relu.cc)): +```C++ +void* LeakyReluInit(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(LeakyReluOpData)); +} +``` + +## When am I allowed to allocate persistent memory? +The `TfLiteContext::AllocatePersistentBuffer` method may only be called within +the scope of your operator's `Init` and `Prepare` methods. + +## How do I allocate/use temporary memory? +Use the `TfLiteContext::RequestScratchBufferInArena` and +`TfLiteContext::GetScratchBuffer` methods. The temporary memory is shared +between all operators, and is only valid for your operator within the scope +of your operator's `Invoke` method. Do not attempt to use temporary memory +to share data between operator invocations. Temporary memory is to be used +only as pre-allocated storage during the execution scope of your operator's +`Invoke` method. + +An example code snippet looks like ([add_n.cc](../kernels/add_n.cc)): +```C++ +if (output->type == kTfLiteFloat32) { + // Allocate scratch buffer space for pointer to each tensor's data + // and store the scratch buffer index in the node's user_data + int scratch_index; + size_t scratch_size = sizeof(float*) * num_inputs; + TF_LITE_ENSURE_OK(context, context->RequestScratchBufferInArena( + context, scratch_size, &scratch_index)); + node->user_data = + reinterpret_castuser_data)>(scratch_index); + } +``` +And to use the buffer: +```C++ +int scratch_index = + static_cast(reinterpret_cast(node->user_data)); +void* scratch_buffer = context->GetScratchBuffer(context, scratch_index); +``` + +## When can I allocate/use temporary memory? +The `TfLiteContext::RequestScratchBufferInArena` method is available only within +the scope of your operator's `Prepare` method. +The `TfLiteContext::GetScratchBuffer` method is available only within +the scope of your operator's `Invoke` method. + +## Can I resize my input/output tensors? +No. The storage space for each input/output tensor is a fixed, calculated value +determined at the time the TensorFlow Lite (TfLite) model converter is executed. +During the `Init` phase of the `tflite::MicroInterpreter` all tensor storage is +allocated by the `tflite::MicroInterpreter` instance, using the calculated values +of the model converter. +For more information see: [Memory Allocation Overview](online_memory_allocation_overview.md) + +## Can I change the shape of tensors in my operator code? +Yes. The new shape must not exceed the storage space indicated by the old shape. +Because tensor shape values may live in memory that is not directly writable +(ex. Flash, EEPROM, ROM), a special method must be called before modification +is attempted. The `tflite::micro::CreateWritableTensorDimsWithCopy` method will +move the tensor shape values to guaranteed persistent writable memory. + +An example code snippet looks like ([l2_pool_2d.cc](../kernels/l2_pool_2d.cc)): +```C++ +// the output variable is a TfLiteTensor* +TfLiteEvalTensor* output_eval = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); +TF_LITE_ENSURE_OK(context, tflite::micro::CreateWritableTensorDimsWithCopy( + context, output, output_eval)); +output->dims->data[kBatchRank] = batches; +output->dims->data[kHeightRank] = out_height; +output->dims->data[kWidthRank] = out_width; +output->dims->data[kChannelRank] = channels_out; +``` + +## When can I change the shape of tensors in my operator code? +Tensor shape values can be modified any time after the +`tflite::micro::CreateWritableTensorDimsWithCopy` method has been called. +This means that tensor shape values can be modified within the scope of +your operator's `Prepare` or `Invoke` methods. +The `tflite::micro::CreateWritableTensorDimsWithCopy` method may +only be called within the scope of your operator's `Prepare` method. + +## Can I modify a `TfLiteTensor` or `TfLiteEvalTensor`? +No. The `tflite::MicroInterpreter` is the owner and manipulator of these data +structures. Your code should not modify these data structures. The only +directly allowed modification of tensors is to change their data values, or +their shape values. + +## How do I fix optimized kernel unit test failures? +Kernel unit tests for all optimizated kernels should pass. By default kernel unit +tests for the newly added op may fail for optimized kernels as they may not have the +correct references. In this case, we should let the optimized kernels fall back +to the newly added reference kernels. For example, refer to this [this commit](https://github.com/tensorflow/tflite-micro/pull/1274/commits/d36c9dd598dcbf352f2c60463fd0d4153703a1cd). diff --git a/tensorflow/lite/micro/docs/profiling.md b/tensorflow/lite/micro/docs/profiling.md new file mode 100644 index 0000000..16c6b51 --- /dev/null +++ b/tensorflow/lite/micro/docs/profiling.md @@ -0,0 +1,47 @@ + + + + + + * [Profiling](#profiling) + * [API](#api) + * [Per-Op Profiling](#per-op-profiling) + * [Subroutine Profiling](#subroutine-profiling) + + + + + +# Profiling + +This doc outlines how to use the TFLite Micro profiler to gather information +about per-op invoke duration and to use the profiler to identify bottlenecks +from within operator kernels and other TFLite Micro routines. + +## API + +The MicroInterpreter class constructor contains an optional profiler argument. +This profiler must be an instance of the tflite::Profiler class, and should +implement the BeginEvent and EndEvent methods. There is a default implementation +in tensorflow/lite/micro/micro_profiler.cc which can be used for most purposes. + +The best practice for profiling across multiple invocations is to reset or call +`ClearEvents()` in between invocations. + +## Per-Op Profiling + +There is a feature in the MicroInterpreter to enable per-op profiling. To enable +this, provide a MicroProfiler to the MicroInterpreter's constructor then build +with a non-release build to disable the NDEBUG define surrounding the +ScopedOperatorProfile within the MicroInterpreter. + +## Subroutine Profiling + +In order to further dig into performance of specific routines, the MicroProfiler +can be used directly from the TFLiteContext or a new MicroProfiler can be +created if the TFLiteContext is not available where the profiling needs to +happen. The MicroProfiler's BeginEvent and EndEvent can be called directly, or +wrapped using a [ScopedProfile](../../lite/core/api/profiler.h). diff --git a/tensorflow/lite/micro/docs/qemu.md b/tensorflow/lite/micro/docs/qemu.md new file mode 100644 index 0000000..a8b51db --- /dev/null +++ b/tensorflow/lite/micro/docs/qemu.md @@ -0,0 +1,38 @@ + + * [Installation](#installlation) + * [Software Emulation with QEMU](#software-emulation-with-qemu) + * [Running Unit Tests](#running-unit-tests) + * [Useful External Links for QEMU](#useful-external-links-for-qemu) + + + + + +# Installlation +Our test scripts assume that the non static `user` mode installation of QEMU is +available in the PATH. For example, if using QEMU for ARM testing, please make +sure `qemu-arm` is installed and available to the test scripts. + +You can use `ci/install_qemu.sh` to download, build and install the version of +qemu that is used as part of the CI. + +# Software Emulation with QEMU +TensorFlow Lite Micro makes use of [QEMU](https://qemu.org) to +for testing cross compiled tests. + +QEMU can quickly test unit tests that are cross compiled for non x64\_86 +hardware. + +# Running Unit Tests +All unit tests can be ran using +`tensorflow/lite/micro/tools/ci_build/test_cortex_m_qemu.sh` for the cortex-m +processor. + +# Useful External Links for QEMU +The current QEMU implementation uses `user` mode. The documentation for [user +mode is here](https://www.qemu.org/docs/master/user/index.html). + +QEMU uses ARM +[semihosting](https://github.com/ARM-software/abi-aa/blob/main/semihosting/semihosting.rst) +to replace newlib system calls for specific boards with the host OS. Further +documentation on how this works is contained in `cortex_m_qemu_makefile.inc`. diff --git a/tensorflow/lite/micro/docs/renode.md b/tensorflow/lite/micro/docs/renode.md new file mode 100644 index 0000000..f99b751 --- /dev/null +++ b/tensorflow/lite/micro/docs/renode.md @@ -0,0 +1,139 @@ + + + + + + * [Software Emulation with Renode](#software-emulation-with-renode) + * [Installation](#installation) + * [Running Unit Tests](#running-unit-tests) + * [Under the hood of the Testing Infrastructure](#under-the-hood-of-the-testing-infrastructure) + * [Running a non-test Binary with Renode](#running-a-non-test-binary-with-renode) + * [Useful External Links for Renode and Robot Documentation](#useful-external-links-for-renode-and-robot-documentation) + + + + + +# Software Emulation with Renode + +TensorFlow Lite Micro makes use of [Renode](https://github.com/renode/renode) to +for software emulation. + +Here, we document how Renode is used as part of the TFLM project. For more +general use of Renode, please refer to the [Renode +documentation](https://renode.readthedocs.io/en/latest/). + +You can also read more about Renode from a [publicly available slide deck](https://docs.google.com/presentation/d/1j0gjI4pVkgF9CWvxaxr5XuCKakEB25YX2n-iFxlYKnE/edit). + +# Installation + +Renode can be installed and used in a variety of ways, as documented in the +[Renode README](https://github.com/renode/renode/blob/master/README.rst#installation/). For the purpose of Tensorflow +Lite Micro, we make use of the portable version for Linux. + +Portable renode will be automatically installed when using the TfLite Micro +Makefile to `tensorflow/lite/micro/tools/make/downloads/renode`. + +The Makefile internally calls the `renode_download.sh` script: + +``` +tensorflow/lite/micro/tools/make/renode_download.sh tensorflow/lite/micro/tools/make/downloads +``` + +# Running Unit Tests + +All the tests for a specific platform (e.g. bluepill) can be run with: + +``` +make -f tensorflow/lite/micro/tools/make/Makefile TARGET=bluepill test +``` + + * This makes use of the robot framework from Renode. + * Note that the tests can currently not be run in parallel. + * It takes about 25 second to complete all tests, including around 3 seconds for suite startup/teardown and average 0.38 second per test. + +## Under the hood of the Testing Infrastructure + +Describe how we wait for a particular string on the UART. Some pointers into the +robot files as well as any relevant documentation from Renode. + +A test failure is the absence of a specific string on the UART so the test will +wait for a specific timeout period (configured in the .robot) file before +failing. + + * What this means in practice is that a failing test will take longer to finish + than a test that passes. + + * If needed, an optimization on this would be to have a specific failure + message as well so that both success and failure can be detected quickly. + +# Running a non-test Binary with Renode + +Renode can also be used to run and debug binaries interactively. For example, +to debug `kernel_addr_test` on Bluepill platform, run Renode: + +``` +tensorflow/lite/micro/tools/make/downloads/renode/renode +``` +and issue following commands: +``` +# Create platform +include @tensorflow/lite/micro/testing/bluepill_nontest.resc +# Load ELF file +sysbus LoadELF @gen/bluepill_x86_64_default/bin/kernel_add_test +# Start simulation +start + +# To run again: +Clear +include @tensorflow/lite/micro/testing/bluepill_nontest.resc +sysbus LoadELF @gen/bluepill_cortex-m3_default/bin/keyword_benchmark +start + +``` + +To make repeat runs a bit easier, you can put all the commands into a +single line (up arrow will show the last command in the Renode terminal): +``` +Clear; include @tensorflow/lite/micro/testing/bluepill_nontest.resc; sysbus LoadELF @gen/bluepill_x86_64_default/bin/kernel_add_test; start +``` + +You can also connect GDB to the simulation. +To do that, start the GDB server in Renode before issuing the `start` command: +``` +machine StartGdbServer 3333 +``` +Than you can connect from GDB with: +``` +target remote localhost:3333 +``` + +For further reference please see the [Renode documentation](https://renode.readthedocs.io/en/latest/). + +# Useful External Links for Renode and Robot Documentation + + * [Testing with Renode](https://renode.readthedocs.io/en/latest/introduction/testing.html?highlight=robot#running-the-robot-test-script) + + * [Robot Testing Framework on Github](https://github.com/robotframework/robotframework). For someone new to + the Robot Framework, the documentation can be a bit hard to navigate, so + here are some links that are relevant to the use of the Robot Framework with + Renode for TFLM: + + * [Creating Test Data](http://robotframework.org/robotframework/latest/RobotFrameworkUserGuide.html#creating-test-data) + section of the user guide. + + * Renode-specific additions to the Robot test description format are in the + [RobotFrameworkEngine directory](https://github.com/renode/renode/tree/master/src/Renode/RobotFrameworkEngine). For example, + + * [Start Emulation](https://github.com/renode/renode/blob/master/src/Renode/RobotFrameworkEngine/RenodeKeywords.cs#L41-L42) + * [Wait For Line On Uart](https://github.com/renode/renode/blob/master/src/Renode/RobotFrameworkEngine/UartKeywords.cs#L62-L63) + is where `Wait For Line On Uart` is defined. + + * Some documentation for all the [Standard Libraries](http://robotframework.org/robotframework/#standard-libraries) + that define commands such as: + + * [Remove File](http://robotframework.org/robotframework/latest/libraries/OperatingSystem.html#Remove%20File) + * [List Files In Directory](https://robotframework.org/robotframework/latest/libraries/OperatingSystem.html#List%20Files%20In%20Directory) diff --git a/tensorflow/lite/micro/docs/resource_variables.md b/tensorflow/lite/micro/docs/resource_variables.md new file mode 100644 index 0000000..912df21 --- /dev/null +++ b/tensorflow/lite/micro/docs/resource_variables.md @@ -0,0 +1,49 @@ + + + + + +* [Resource Variables](#resource-variables) + * [API](#api) + * [Lifecycle](#lifecycle) + + + + + +# Resource Variables + +This doc outlines how to use the TFLite Micro Resource Variables class to use +the VAR_HANDLE, ASSIGN_RESOURCE and READ_RESOURCE operators. This feature is +optional in order to prevent binary bloat on resource constrained systems. + +## API + +The MicroResourceVariables factory method takes a MicroAllocator and an int +indicating the number of resource varibles to support. This allows the +application to choose the correct number of variables based on the model. + +## Lifecycle + +When the ResourceVariables class is created in the application, it contains an +array of N ResourceVariable handles. The index into this array is the Resource +ID. + +On the first call to Prepare in the VAR_HANDLE op, a new resource ID is reserved +and the resource ID value is referenced from within the output tensor of +VAR_HANDLE. On the first call to Prepare in ASSIGN_VARIABLE, the specified ID +found in the input index tensor is updated based on the size of the input value +tensor, and its resource buffer is allocated. + +Future invocations of READ_VARIABLE and ASSIGN_VARIABLE read and write to and +from the allocated resource buffer. + +The lifecycle must follow the pattern: +VAR_HANDLE Prepare() -> ASSIGN_VARIABLE Prepare() -> Other calls + +Note that VAR_HANDLE Prepare() and ASSIGN_VARIABLE Prepare() may be called more +that once, across multiple subgraphs. Only the first call to each will generate +a new resource ID or allocate a resource buffer. diff --git a/tensorflow/lite/micro/docs/rfc/001_preallocated_tensors.md b/tensorflow/lite/micro/docs/rfc/001_preallocated_tensors.md new file mode 100644 index 0000000..435b526 --- /dev/null +++ b/tensorflow/lite/micro/docs/rfc/001_preallocated_tensors.md @@ -0,0 +1,162 @@ + + + + +* [Pre-allocated tensors](#pre-allocated-tensors) + * [Background](#background) + * [Current status](#current-status) + * [Proposed implementation](#proposed-implementation) + * [Performance overview](#performance-overview) + * [Cycle aspect](#cycle-aspect) + * [Memory aspect](#memory-aspect) + + + + +# Pre-allocated tensors + +## Background + +Tensors are allocated differently depending on the type of tensor. Weight +tensors are located in the flatbuffer, which is allocated by the application +that calls TensorFlow Lite Micro. EvalTensors are allocated in the tensor arena, +either offline planned as specified in the flatbuffers metadata (described in +this +[RFC](https://docs.google.com/document/d/16aTSHL5wxsq99t6adVbBz1U3K8Y5tBDAvs16iroZDEU)), +or allocated during runtime by the +[memory planner](https://github.com/tensorflow/tflite-micro/tree/main/tensorflow/lite/micro/memory_planner) +(online planned), see +[RFC](https://docs.google.com/document/d/1akpqu0uiPQshmCrnV6dOEFgYM4tCCnI8Zce85PnjHMI). +The tensor arena is allocated by MicroAllocator in TensorFlow Lite Micro, and +the model buffer (represented by a .tflite-file) is allocated by the application +using TensorFlow Lite Micro. An illustration of this can be seen in the image +below. + +![Image of two blocks](../images/preallocated_tensors/preallocated_tensors_bg_1.png) + +Is some use cases it could be advantageous to place some of the EvalTensors +outside of the tensor arena, for example: * When sensor output data is stored in +its own defined buffer, outside the tensor arena, and therefore needs to be +copied into the tensor arena before inference. * When the tensor is to be +consumed from a memory location outside the tensor arena, e.g. a separate memory +bank DSP. \ +Details regarding the impact on the number of clock cycles and memory +consumption can be found under “Performance overview”. In this RFC we present an +option to allow an application to provide pre-allocated buffers to TensorFlow +Lite Micro for selected tensors. An illustration of the resulting memory layout +with pre-allocated tensors can be seen in the figure below. + +![Image of three blocks](../images/preallocated_tensors/preallocated_tensors_bg_2.png) + +## Current status + +The purpose of pre-allocating tensors is to reduce the number of clock cycles, +and our initial motivation for this feature was that avoiding the copying of the +buffer described in the Background section would reduce the number of cycles +consumed by the application. + +Our second motivation was that by using a buffer outside of the memory arena, +there was an opportunity to significantly reduce the required size of the memory +arena. + +An initial investigation into these matters, using the person detection model as +an example, indicates that the performance gain might not be very significant in +many use cases. The reduction in the number of clock cycles looks to be ~1%. +Details regarding this can be found in the Performance overview section. + +The reduction in the size of the memory arena is not straightforward to +estimate. As described in the Performance overview section, it depends on the +size of other tensors in the network. In the worst case scenario it might not +reduce the memory arena size at all. If the pre allocated buffer is much larger +than the second largest buffer, then the reduction in size may be significant. + +Therefore, our current position is that the performance gain expected from pre +allocating the tensors does not motivate the increased complexity that this +feature would introduce to the TensorFlow Lite Micro framework. + +## Proposed implementation + +MicroAllocator initializes all tensors to nullptr, and during the allocation +process only allocates the tensors whose data field is nullptr. The application +tells the MicroInterpreter which tensor is preallocated, and supplies a memory +buffer using the RegisterPreallocatedTensor() function. + +The MicroInterpreter then assigns the pre-allocated buffer to the tensor +data-field. If the tensor in question is marked as offline planned, as described +in this [RFC](https://docs.google.com/document/d/16aTSHL5wxsq99t6adVbBz1U3K8Y5tBDAvs16iroZDEU), +the MicroInterpreter should not pre-allocated it, and instead return an error. + +If multiple tensors are to be pre-allocated, multiple calls to +RegisterPreallocatedTensor() are required. An example can be seen in the MSC +below. + +![MSC](../images/preallocated_tensors/preallocated_tensors_impl1.png) + +## Performance overview + +### Cycle aspect + +In this section we try to estimate the number of clock cycles one memcpy() takes +in relation to the total inference time for the person_detection model. The +reason for looking closer at this model is that it has a relatively large input +data size, which should make the cycle consumption of a memcpy() relatively +large. Please note that these numbers are approximate and based on calculations, +not actual benchmarking numbers. + +A word aligned memcpy() consumes somewhere between 1 - 4 bytes per cycle +depending on which CPU is used. The input size for the person_detection model +is 96x96 = 9216 bytes. On a reference system without accelerators one memcpy() +of 9216 bytes corresponds to, in order of magnitudes, ~0.01% of the total amount +of clock cycles for one inference. The ratio will differ depending on the input +size and the number of inferences/second. + +When using an accelerator, the total inference time will be significantly less +which means that the memcpy()-call will consume a larger part of the total +inference time. Approximations show that one memcpy() of 9216 bytes will consume +~1% of the total execution time for a reference system utilizing an ML HW +accelerator. + +### Memory aspect + +In this section we'll look at memory savings aspects of pre-allocating tensors +outside the tensor arena. The default memory planner in TFLu is +[GreedyPlanner](https://github.com/tensorflow/tflite-micro/blob/main/tensorflow/lite/micro/memory_planner/greedy_memory_planner.h) +(see +[RFC](https://docs.google.com/document/d/1akpqu0uiPQshmCrnV6dOEFgYM4tCCnI8Zce85PnjHMI)). +One good tool for understanding tensor layout in the tensor arena is using +[PrintMemoryPlan API](https://github.com/tensorflow/tflite-micro/blob/73c5fa4d2bfbfd974552957818de2ab18ff42f39/tensorflow/lite/micro/memory_planner/greedy_memory_planner.h#L84). +If we print the calculated memory layout for the +[person detection model](https://storage.googleapis.com/download.tensorflow.org/data/tf_lite_micro_person_data_int8_grayscale_2020_06_23.zip), +the tensor arena looks like this at each layer: + +`Layer 1: +00000000000000000000000000tttttttttttttt........................................ +Layer 2: +00000000000000000000000000...........................999999999999999999999999999 +Layer 3: +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa999999999999999999999999999 +Layer 4: +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbb.............. +Layer 5: +cccccccccccccccccccccccccc...........................bbbbbbbbbbbbb.............. +Layer 6: +ccccccccccccccccccccccccccddddddddddddddddddddddddddd...........................` + +The horizontal axis shows offset from the start of the tensor arena. The +vertical axis shows execution order. The dots are "unused" memory for that +specific layer. The letters and numbers represent the EvalTensor index, mapped +to 0-9, then a-z. 't' is the input tensor of layer 1 (equivalent to the input +data to the model) and '0' is the output tensor of layer 1. Hence, '0' is also +the input tensor to layer 2, and '9' is the output tensor of layer 2. And so on. + +The reason for showing this illustration is that it becomes obvious that it is +**the largest combination of simultaneously used tensors, of your model, that +defines how large the tensor arena needs to be.** In this example, it's Layer 3. + +The combined size of tensors 'a' and '9' defines the size needed for the tensors +arena. As a consequence, to save tensor arena memory by pre-allocation, we must +start by pre-allocating tensor 'a' or '9' outside the arena. This will make the +total size of the tensor arena smaller, which will reduce the total memory +footprint of TensorFlow Lite Micro if the pre-allocated tensor is already +allocated outside of the memory arena, like in the examples given in the +Background section. diff --git a/tensorflow/lite/micro/docs/rfc/002_16x8_quantization_port.md b/tensorflow/lite/micro/docs/rfc/002_16x8_quantization_port.md new file mode 100644 index 0000000..4fd369b --- /dev/null +++ b/tensorflow/lite/micro/docs/rfc/002_16x8_quantization_port.md @@ -0,0 +1,156 @@ + +# TensorFlow Lite for Microcontrollers Port of 16x8 Quantized Operators + +| Status | Proposed | +:-------------- |:----------------------------------------------------------- | +| **RFC #2** | [46767](https://github.com/tensorflow/tensorflow/pull/46767)| +| **Author(s)** | Daniel Situnayake (dan@edgeimpulse.com) | +| **Sponsor** | Pete Warden (petewarden@google.com) | +| **Updated** | 2021-01-28 | + +## Objective + +TensorFlow Lite has kernel implementations that support 8 bit quantized weights +but use 16 bit activations. We wish to port these implementations to TensorFlow +Lite for Microcontrollers. The increased precision available for activations can +improve performance for some quantized models. + +Arm have agreed to support the initiative by adding the necessary 16x8 APIs to +CMSIS-NN and porting the CMSIS-NN kernels. + +### Goals +- Port a subset of 16x8 reference kernels from TensorFlow Lite to TensorFlow Lite Micro +- Avoid increasing default code size or arena size of TensorFlow Lite Micro +- Lay the groundwork for creating a CMSIS-NN port of the 16x8 kernels + +### Non-goals +- Port every single operator to 16x8; we only plan to port a subset of those with existing reference implementations + +## Motivation + +Some networks that suffer unacceptable degradation when quantized with 8 bit weights +and 8 bit activations perform adequately when quantized with 8 bit weights and 16 +bit activations. The [TensorFlow Lite documentation](https://www.tensorflow.org/lite/performance/post_training_integer_quant_16x8) states the following: + +> [16x8 quantization] mode can improve accuracy of the quantized model significantly, when activations are sensitive to the quantization, while still achieving almost 3-4x reduction in model size. Moreover, this fully quantized model can be consumed by integer-only hardware accelerators. + +Edge Impulse, a company that deploys TensorFlow Lite for Microcontrollers as part of its embedded +machine learning pipeline, has gathered feedback from customers with production models for which 8 bit +quantization results in unacceptable degradation but for whom 16x8 is fine. + +While 16x8 quantization is well supported within TensorFlow Lite, it is not currently supported +within TensorFlow Lite for Microcontrollers. Porting the TensorFlow Lite reference kernels is +relatively straightforward and will improve adoption of TensorFlow Lite for Microcontrollers with users +for whom degradation is too severe with full 8 bit quantization. + +## User Benefit + +The headline would be "16x8 kernels improve accuracy for quantized models on microcontrollers without +increasing model size". + +Users would benefit in the following ways: + +- Improved accuracy for quantized models without increasing model size (in exchange for additional + runtime memory usage) +- Improved performance under certain conditions (for example, 16x8 CMSIS-NN kernels will run faster) + than 8 bit kernels since less unpacking is required) + +## Design Proposal + +We propose that the 16x8 kernels are ported from the TensorFlow Lite reference kernels to +TensorFlow Lite for Microcontrollers following the process in the [Porting TensorFlow Lite Ops to Micro](https://docs.google.com/document/d/1KLJTPWm4TUKB9YyIqFJl9VCP0ZMJDt_P8RNpRmwqMxw/edit#heading=h.5x0d5h95i329) +guide. + +We wish to ensure that the following kernels are compatible with 16x8 mode: + +- Conv2D +- MaxPool2D +- DepthwiseConv2D +- FullyConnected +- Relu +- Relu6 +- Tanh +- Softmax +- Pad +- Reshape +- Pack +- Unpack +- Add +- Mul + +Adding the 16x8 kernels directly to TFLM alongside the existing kernels would increase the default code size by an unacceptable amount. Instead, we will make use of the kernel registration API currently under development by the TFLM team. The use of this is demonstrated in the +[Keyword benchmark code](https://github.com/tensorflow/tensorflow/blob/a30d20b632b4ffbfd437ccf8ee205fef0917a3eb/tensorflow/lite/micro/benchmarks/keyword_benchmark.cc#L56). +By doing this, the end user can decide which kernels and dependencies they want to include (e.g. 8 bit, 16x8, +or float32). + +For example, the following could be registered: + +``` +// Support for all datatypes +op_resolver->AddFullyConnected(tflite::Register_FULLY_CONNECTED); +// Support for 8 bit quantized models +op_resolver->AddFullyConnected(tflite::Register_FULLY_CONNECTED_INT8); +// Support for 16x8 quantized models +op_resolver->AddFullyConnected(tflite::Register_FULLY_CONNECTED_INT16X8()); +``` + +This means that kernels not currently using this registration API will need to be refactored to use it. Currently only **FullyConnected** uses the API. + +The following associated tasks will be required to support this work: + +- Build or port unit tests for the new kernels +- Prove that code memory is not impacted by running benchmarks before and after the port + +### Alternatives Considered +* An alternative would be to add the 16x8 kernels without using the new kernel registration API, but this would + result in a major increase in code size. + +### Performance Implications +- Impact on memory usage for current modes (int8 and float32) will be minimal. This will be confirmed by + benchmarking of current performance against performance of the submitted changes. +- When 16x8 mode is used, RAM usage will be approximately 2x. Latency may change depending on the target + platform. +- End to end and unit tests will be updated to prove that the new implementations are operating correctly. + +### Dependencies +- No additional dependencies will be added to TensorFlow +- No other parts of TensorFlow will be affected + +### Engineering Impact +- Impact on binary size should be minimal +- Test times may increase due to additional kernel unit tests +- The reference kernels already exist within TensorFlow Lite so there will be minimal additional maintenance + +### Platforms and Environments +- The proposed changes will work on all currently supported platforms + +### Best Practices +- TensorFlow Lite for Microcontrollers should be updated to indicate that 16x8 kernels are now available + +### Tutorials and Examples +- A benchmark will be added to [`tensorflow/lite/micro/benchmarks`](https://github.com/tensorflow/tensorflow/tree/975335bc83bf3cb80a71a04ed407725508709808/tensorflow/lite/micro/benchmarks) that demonstrates the use of the ops that provide a 16x8 kernel. +- A Colab will be created that demonstrates quantizing a model in 16x8 mode and exporting it as a C header file for use with TensorFlow Lite for Microcontrollers + +### Compatibility +- This work will improve compatibility and feature parity between TensorFlow Lite and TensorFlow Lite for Microcontrollers + +### User Impact +- Since TFLM does not have a versioning system the feature can be rolled out as any other commit + +## Implementation plan + +The work will be broken down into a series of pull requests, some for the benchmarks and some for each kernel. + +Benchmark pull requests: +- PR1: Create a new benchmark in [`tensorflow/lite/micro/benchmarks`](https://github.com/tensorflow/tensorflow/tree/975335bc83bf3cb80a71a04ed407725508709808/tensorflow/lite/micro/benchmarks) that attempts to run a 16x8 model that includes the kernels mentioned in this RFC. The model’s weights and biases can be random. The benchmark should use the MicroMutableOpResolver. The PR should include the Colab used to generate the model. +- PR2: Port the person_detection and keyword benchmarks to use the MicroMutableOpResolver. +- PR3: Add code to both benchmarks that prints the arena size using the [`RecordingMemoryAllocator`](https://github.com/tensorflow/tensorflow/blob/ee87d58a6504375c28f21ea303f0eefa29118c38/tensorflow/lite/micro/docs/memory_management.md#recording-memory-apis). + +For each kernel: +- PR1: Refactor the implementation to support the new kernel variant registration API. +- PR2: Add 16x8 support and make sure that the benchmark binary and arena sizes are unchanged. + +Note that @njeffrie from the TF Lite Micro team also plans to prepare PR(s) for the kernels that are of interest internally +(without using the kernel variant registation API for binary size). This will provide some quick examples of porting the kernels. + +## Questions and Discussion Topics diff --git a/tensorflow/lite/micro/examples/hello_world/BUILD b/tensorflow/lite/micro/examples/hello_world/BUILD new file mode 100644 index 0000000..f2b41b3 --- /dev/null +++ b/tensorflow/lite/micro/examples/hello_world/BUILD @@ -0,0 +1,83 @@ +# Description: +# TensorFlow Lite for Microcontrollers "hello world" example. +load("@tflm_pip_deps//:requirements.bzl", "requirement") +load( + "//tensorflow/lite/micro:build_def.bzl", + "micro_copts", +) + +package( + # Disabling layering_check because of http://b/177257332 + features = ["-layering_check"], + licenses = ["notice"], +) + +cc_library( + name = "model", + srcs = [ + "//tensorflow/lite/micro/examples/hello_world/models:generated_hello_world_float_model_cc", + "//tensorflow/lite/micro/examples/hello_world/models:generated_hello_world_int8_model_cc", + ], + hdrs = [ + "//tensorflow/lite/micro/examples/hello_world/models:generated_hello_world_float_model_hdr", + "//tensorflow/lite/micro/examples/hello_world/models:generated_hello_world_int8_model_hdr", + ], + copts = micro_copts(), +) + +cc_test( + name = "hello_world_test", + srcs = [ + "hello_world_test.cc", + ], + deps = [ + ":model", + "//tensorflow/lite/micro:micro_framework", + "//tensorflow/lite/micro:micro_log", + "//tensorflow/lite/micro:micro_profiler", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:recording_allocators", + "//tensorflow/lite/micro/testing:micro_test", + "//tensorflow/lite/schema:schema_fbs", + ], +) + +py_binary( + name = "evaluate", + srcs = ["evaluate.py"], + data = ["//tensorflow/lite/micro/examples/hello_world/models:hello_world_float.tflite"], + python_version = "PY3", + srcs_version = "PY3", + deps = [ + "@absl_py//absl:app", + "@absl_py//absl/flags", + "@absl_py//absl/logging", + requirement("numpy"), + requirement("tensorflow-cpu"), + "//python/tflite_micro:runtime", + ], +) + +py_binary( + name = "evaluate_test", + srcs = ["evaluate_test.py"], + data = [ + "//tensorflow/lite/micro/examples/hello_world/models:hello_world_float.tflite", + "//tensorflow/lite/micro/examples/hello_world/models:hello_world_int8.tflite", + ], + python_version = "PY3", + srcs_version = "PY3", + deps = [ + ":evaluate", + ], +) + +py_binary( + name = "train", + srcs = ["train.py"], + srcs_version = "PY3", + deps = [ + requirement("numpy"), + requirement("tensorflow-cpu"), + ], +) diff --git a/tensorflow/lite/micro/examples/hello_world/Makefile.inc b/tensorflow/lite/micro/examples/hello_world/Makefile.inc new file mode 100644 index 0000000..bfcd52e --- /dev/null +++ b/tensorflow/lite/micro/examples/hello_world/Makefile.inc @@ -0,0 +1,37 @@ +HELLO_WORLD_TEST_SRCS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/hello_world/hello_world_test.cc + +HELLO_WORLD_SRCS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/hello_world/hello_world_test.cc + +HELLO_WORLD_HDRS := + +HELLO_WORLD_GENERATOR_INPUTS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/hello_world/models/hello_world_float.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/hello_world/models/hello_world_int8.tflite + +HELLO_WORLD_GENERATED_SRCS := \ +$(GENERATED_SRCS_DIR)$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/hello_world/models/hello_world_float_model_data.cc \ +$(GENERATED_SRCS_DIR)$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/hello_world/models/hello_world_int8_model_data.cc + +HELLO_WORLD_GENERATED_HDRS := \ +$(GENERATED_SRCS_DIR)$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/hello_world/models/hello_world_float_model_data.h \ +$(GENERATED_SRCS_DIR)$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/hello_world/models/hello_world_int8_model_data.h + +# Tests loading and running the sine model. +$(eval $(call microlite_test,hello_world_test,\ +$(HELLO_WORLD_TEST_SRCS),,$(HELLO_WORLD_GENERATOR_INPUTS))) + +# Builds a standalone binary. +$(eval $(call microlite_test,hello_world,\ +$(HELLO_WORLD_SRCS),,$(HELLO_WORLD_GENERATOR_INPUTS))) + +# Add sources and headers generated from $(HELLO_WORLD_GENERATOR_INPUTS). +HELLO_WORLD_SRCS += $(HELLO_WORLD_GENERATED_SRCS) +HELLO_WORLD_HDRS += $(HELLO_WORLD_GENERATED_HDRS) + +list_hello_world_example_sources: + @echo $(HELLO_WORLD_SRCS) + +list_hello_world_example_headers: + @echo $(HELLO_WORLD_HDRS) diff --git a/tensorflow/lite/micro/examples/hello_world/README.md b/tensorflow/lite/micro/examples/hello_world/README.md new file mode 100644 index 0000000..313a6af --- /dev/null +++ b/tensorflow/lite/micro/examples/hello_world/README.md @@ -0,0 +1,94 @@ + + +# Hello World Example + +This example is designed to demonstrate the absolute basics of using [TensorFlow +Lite for Microcontrollers](https://www.tensorflow.org/lite/microcontrollers). +It includes the full end-to-end workflow of training a model, converting it for +use with TensorFlow Lite for Microcontrollers for running inference on a +microcontroller. + +## Table of contents + +- [Run the evaluate.py script on a development machine](#run-the-evaluate-script-on-a-development-machine) +- [Run the tests on a development machine](#run-the-tests-on-a-development-machine) +- [Train your own model](#train-your-own-model) + +## Run the evaluate.py script on a development machine +The evaluate.py script runs the hello_world.tflite model with x_values in the +range of [0, 2*PI]. The script plots a diagram of the predicted value of sinwave +using TFLM interpreter and compare that prediction with the actual value +generated by the numpy lib. +```bash +bazel build :evaluate +bazel run :evaluate +bazel run :evaluate -- --use_tflite +``` +![TFLM hello_world sinwave prediction VS actual values](images/hello_world_tflm.png) ![TFLM hello_world sinwave prediction VS actual values](images/hello_world_tflite.png) + +## Run the evaluate_test.py script on a development machine +These tests verify the input/output as well as the prediction of the +hello_world.tflite model. There is a test to also verify the correctness of +the model by running both TFLM and TFlite interpreter and then comparing the +prediction from both interpreters. +```bash +bazel build :evaluate_test +bazel run :evaluate_test +``` + +## Run the tests on a development machine + +Run the cc test using bazel +```bash +bazel run tensorflow/lite/micro/examples/hello_world:hello_world_test +``` +And to run it using make +```bash +make -f tensorflow/lite/micro/tools/make/Makefile test_hello_world_test +``` + +The source for the test is [hello_world_test.cc](hello_world_test.cc). +It's a fairly small amount of code that creates an interpreter, gets a handle to +a model that's been compiled into the program, and then invokes the interpreter +with the model and sample inputs. + +## Train your own model + +So far you have used an existing trained model to run inference on +microcontrollers. If you wish to train your own model, here are the scripts +that can help you to achieve that. + +```bash +bazel build tensorflow/lite/micro/examples/hello_world:train +``` +And to run it +```bash +bazel-bin/tensorflow/lite/micro/examples/hello_world/train --save_tf_model +--save_dir=/tmp/model_created/ +``` +The above script will create a TF model and TFlite model inside the +`/tmp/model_created` directory. + +Now the above model is a `float` model. Means it can take floating point input +and can produce floating point output. + +If we want a fully quantized model we can use the `ptq.py` script inside the +quantization directory. The `ptq.py` script can take a floating point TF model +and can produce a quantized model. + +Build the `ptq.py` script like +```bash +bazel build tensorflow/lite/micro/examples/hello_world/quantization:ptq +``` + +Then we can run the `ptq` script to convert the float model to quant model as +follows. Note that we are using the directory (`/tmp/model_created`) of the +TF model as the source_model_dir here. The quant model +(named `hello_world_int8.tflite`) will be created inside the target_dir. +The `ptq.py` script will convert the `TF model` found inside the +`/tmp/model_created` folder and convert it to a `int8` TFlite model. +```bash +bazel-bin/tensorflow/lite/micro/examples/hello_world/quantization/ptq +--source_model_dir=/tmp/model_created --target_dir=/tmp/quant_model/ +``` + diff --git a/tensorflow/lite/micro/examples/hello_world/evaluate.py b/tensorflow/lite/micro/examples/hello_world/evaluate.py new file mode 100644 index 0000000..8b6f948 --- /dev/null +++ b/tensorflow/lite/micro/examples/hello_world/evaluate.py @@ -0,0 +1,140 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import tensorflow as tf +from absl import app +from absl import flags +import numpy as np +import matplotlib.pyplot as plt +from tensorflow.python.platform import resource_loader +from tflite_micro.python.tflite_micro import runtime + +_USE_TFLITE_INTERPRETER = flags.DEFINE_bool( + 'use_tflite', + False, + 'Inference with the TF Lite interpreter instead of the TFLM interpreter', +) + +_PREFIX_PATH = resource_loader.get_path_to_datafile('') + + +def invoke_tflm_interpreter(input_shape, interpreter, x_value, input_index, + output_index): + input_data = np.reshape(x_value, input_shape) + interpreter.set_input(input_data, input_index) + interpreter.invoke() + y_quantized = np.reshape(interpreter.get_output(output_index), -1)[0] + return y_quantized + + +def invoke_tflite_interpreter(input_shape, interpreter, x_value, input_index, + output_index): + input_data = np.reshape(x_value, input_shape) + interpreter.set_tensor(input_index, input_data) + interpreter.invoke() + tflite_output = interpreter.get_tensor(output_index) + y_quantized = np.reshape(tflite_output, -1)[0] + return y_quantized + + +# Generate a list of 1000 random floats in the range of 0 to 2*pi. +def generate_random_int8_input(sample_count=1000): + # Generate a uniformly distributed set of random numbers in the range from + # 0 to 2π, which covers a complete sine wave oscillation + np.random.seed(42) + x_values = np.random.uniform(low=0, high=2 * np.pi, + size=sample_count).astype(np.int8) + return x_values + + +# Generate a list of 1000 random floats in the range of 0 to 2*pi. +def generate_random_float_input(sample_count=1000): + # Generate a uniformly distributed set of random numbers in the range from + # 0 to 2π, which covers a complete sine wave oscillation + np.random.seed(42) + x_values = np.random.uniform(low=0, high=2 * np.pi, + size=sample_count).astype(np.float32) + return x_values + + +# Invoke the tflm interpreter with x_values in the range of [0, 2*PI] and +# returns the prediction of the interpreter. +def get_tflm_prediction(model_path, x_values): + # Create the tflm interpreter + tflm_interpreter = runtime.Interpreter.from_file(model_path) + + input_shape = np.array(tflm_interpreter.get_input_details(0).get('shape')) + + y_predictions = np.empty(x_values.size, dtype=np.float32) + + for i, x_value in enumerate(x_values): + y_predictions[i] = invoke_tflm_interpreter(input_shape, + tflm_interpreter, + x_value, + input_index=0, + output_index=0) + return y_predictions + + +# Invoke the tflite interpreter with x_values in the range of [0, 2*PI] and +# returns the prediction of the interpreter. +def get_tflite_prediction(model_path, x_values): + # TFLite interpreter + tflite_interpreter = tf.lite.Interpreter( + model_path=model_path, + experimental_op_resolver_type=tf.lite.experimental.OpResolverType. + BUILTIN_REF, + ) + tflite_interpreter.allocate_tensors() + + input_details = tflite_interpreter.get_input_details()[0] + output_details = tflite_interpreter.get_output_details()[0] + input_shape = np.array(input_details.get('shape')) + + y_predictions = np.empty(x_values.size, dtype=np.float32) + + for i, x_value in enumerate(x_values): + y_predictions[i] = invoke_tflite_interpreter( + input_shape, + tflite_interpreter, + x_value, + input_details['index'], + output_details['index'], + ) + return y_predictions + + +def main(_): + model_path = os.path.join(_PREFIX_PATH, 'models/hello_world_float.tflite') + + x_values = generate_random_float_input() + + # Calculate the corresponding sine values + y_true_values = np.sin(x_values).astype(np.float32) + + if _USE_TFLITE_INTERPRETER.value: + y_predictions = get_tflite_prediction(model_path, x_values) + plt.plot(x_values, y_predictions, 'b.', label='TFLite Prediction') + else: + y_predictions = get_tflm_prediction(model_path, x_values) + plt.plot(x_values, y_predictions, 'b.', label='TFLM Prediction') + + plt.plot(x_values, y_true_values, 'r.', label='Actual values') + plt.legend() + plt.show() + + +if __name__ == '__main__': + app.run(main) diff --git a/tensorflow/lite/micro/examples/hello_world/evaluate_test.py b/tensorflow/lite/micro/examples/hello_world/evaluate_test.py new file mode 100644 index 0000000..6de8490 --- /dev/null +++ b/tensorflow/lite/micro/examples/hello_world/evaluate_test.py @@ -0,0 +1,64 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import numpy as np + +from tensorflow.python.framework import test_util +from tensorflow.python.platform import resource_loader +from tensorflow.python.platform import test +from tflite_micro.python.tflite_micro import runtime +from tflite_micro.tensorflow.lite.micro.examples.hello_world import evaluate + +PREFIX_PATH = resource_loader.get_path_to_datafile('') + + +class HelloWorldFloatModelTest(test_util.TensorFlowTestCase): + model_path = os.path.join(PREFIX_PATH, 'models/hello_world_float.tflite') + input_shape = (1, 1) + output_shape = (1, 1) + tflm_interpreter = runtime.Interpreter.from_file(model_path) + + def test_compare_with_tflite(self): + x_values = evaluate.generate_random_float_input() + + tflm_y_predictions = evaluate.get_tflm_prediction(self.model_path, + x_values) + + tflite_y_predictions = evaluate.get_tflite_prediction( + self.model_path, x_values) + + self.assertAllEqual(tflm_y_predictions, tflite_y_predictions) + + +class HelloWorldQuantModelTest(test_util.TensorFlowTestCase): + model_path = os.path.join(PREFIX_PATH, 'models/hello_world_int8.tflite') + input_shape = (1, 1) + output_shape = (1, 1) + tflm_interpreter = runtime.Interpreter.from_file(model_path) + + def test_compare_with_tflite(self): + x_values = evaluate.generate_random_int8_input() + + tflm_y_predictions = evaluate.get_tflm_prediction(self.model_path, + x_values) + + tflite_y_predictions = evaluate.get_tflite_prediction( + self.model_path, x_values) + + self.assertAllEqual(tflm_y_predictions, tflite_y_predictions) + + +if __name__ == '__main__': + test.main() diff --git a/tensorflow/lite/micro/examples/hello_world/hello_world_test.cc b/tensorflow/lite/micro/examples/hello_world/hello_world_test.cc new file mode 100644 index 0000000..1d8be4b --- /dev/null +++ b/tensorflow/lite/micro/examples/hello_world/hello_world_test.cc @@ -0,0 +1,159 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include "tensorflow/lite/core/c/common.h" +#include "tensorflow/lite/micro/examples/hello_world/models/hello_world_float_model_data.h" +#include "tensorflow/lite/micro/examples/hello_world/models/hello_world_int8_model_data.h" +#include "tensorflow/lite/micro/micro_interpreter.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_mutable_op_resolver.h" +#include "tensorflow/lite/micro/micro_profiler.h" +#include "tensorflow/lite/micro/recording_micro_interpreter.h" +#include "tensorflow/lite/micro/system_setup.h" +#include "tensorflow/lite/schema/schema_generated.h" + +namespace { +using HelloWorldOpResolver = tflite::MicroMutableOpResolver<1>; + +TfLiteStatus RegisterOps(HelloWorldOpResolver& op_resolver) { + TF_LITE_ENSURE_STATUS(op_resolver.AddFullyConnected()); + return kTfLiteOk; +} +} // namespace + +TfLiteStatus ProfileMemoryAndLatency() { + tflite::MicroProfiler profiler; + HelloWorldOpResolver op_resolver; + TF_LITE_ENSURE_STATUS(RegisterOps(op_resolver)); + + // Arena size just a round number. The exact arena usage can be determined + // using the RecordingMicroInterpreter. + constexpr int kTensorArenaSize = 3000; + uint8_t tensor_arena[kTensorArenaSize]; + constexpr int kNumResourceVariables = 24; + + tflite::RecordingMicroAllocator* allocator( + tflite::RecordingMicroAllocator::Create(tensor_arena, kTensorArenaSize)); + tflite::RecordingMicroInterpreter interpreter( + tflite::GetModel(g_hello_world_float_model_data), op_resolver, allocator, + tflite::MicroResourceVariables::Create(allocator, kNumResourceVariables), + &profiler); + + TF_LITE_ENSURE_STATUS(interpreter.AllocateTensors()); + TFLITE_CHECK_EQ(interpreter.inputs_size(), 1); + interpreter.input(0)->data.f[0] = 1.f; + TF_LITE_ENSURE_STATUS(interpreter.Invoke()); + + MicroPrintf(""); // Print an empty new line + profiler.LogTicksPerTagCsv(); + + MicroPrintf(""); // Print an empty new line + interpreter.GetMicroAllocator().PrintAllocations(); + return kTfLiteOk; +} + +TfLiteStatus LoadFloatModelAndPerformInference() { + const tflite::Model* model = + ::tflite::GetModel(g_hello_world_float_model_data); + TFLITE_CHECK_EQ(model->version(), TFLITE_SCHEMA_VERSION); + + HelloWorldOpResolver op_resolver; + TF_LITE_ENSURE_STATUS(RegisterOps(op_resolver)); + + // Arena size just a round number. The exact arena usage can be determined + // using the RecordingMicroInterpreter. + constexpr int kTensorArenaSize = 3000; + uint8_t tensor_arena[kTensorArenaSize]; + + tflite::MicroInterpreter interpreter(model, op_resolver, tensor_arena, + kTensorArenaSize); + TF_LITE_ENSURE_STATUS(interpreter.AllocateTensors()); + + // Check if the predicted output is within a small range of the + // expected output + float epsilon = 0.05f; + constexpr int kNumTestValues = 4; + float golden_inputs[kNumTestValues] = {0.f, 1.f, 3.f, 5.f}; + + for (int i = 0; i < kNumTestValues; ++i) { + interpreter.input(0)->data.f[0] = golden_inputs[i]; + TF_LITE_ENSURE_STATUS(interpreter.Invoke()); + float y_pred = interpreter.output(0)->data.f[0]; + TFLITE_CHECK_LE(abs(sin(golden_inputs[i]) - y_pred), epsilon); + } + + return kTfLiteOk; +} + +TfLiteStatus LoadQuantModelAndPerformInference() { + // Map the model into a usable data structure. This doesn't involve any + // copying or parsing, it's a very lightweight operation. + const tflite::Model* model = + ::tflite::GetModel(g_hello_world_int8_model_data); + TFLITE_CHECK_EQ(model->version(), TFLITE_SCHEMA_VERSION); + + HelloWorldOpResolver op_resolver; + TF_LITE_ENSURE_STATUS(RegisterOps(op_resolver)); + + // Arena size just a round number. The exact arena usage can be determined + // using the RecordingMicroInterpreter. + constexpr int kTensorArenaSize = 3000; + uint8_t tensor_arena[kTensorArenaSize]; + + tflite::MicroInterpreter interpreter(model, op_resolver, tensor_arena, + kTensorArenaSize); + + TF_LITE_ENSURE_STATUS(interpreter.AllocateTensors()); + + TfLiteTensor* input = interpreter.input(0); + TFLITE_CHECK_NE(input, nullptr); + + TfLiteTensor* output = interpreter.output(0); + TFLITE_CHECK_NE(output, nullptr); + + float output_scale = output->params.scale; + int output_zero_point = output->params.zero_point; + + // Check if the predicted output is within a small range of the + // expected output + float epsilon = 0.05; + + constexpr int kNumTestValues = 4; + float golden_inputs_float[kNumTestValues] = {0.77, 1.57, 2.3, 3.14}; + + // The int8 values are calculated using the following formula + // (golden_inputs_float[i] / input->params.scale + input->params.scale) + int8_t golden_inputs_int8[kNumTestValues] = {-96, -63, -34, 0}; + + for (int i = 0; i < kNumTestValues; ++i) { + input->data.int8[0] = golden_inputs_int8[i]; + TF_LITE_ENSURE_STATUS(interpreter.Invoke()); + float y_pred = (output->data.int8[0] - output_zero_point) * output_scale; + TFLITE_CHECK_LE(abs(sin(golden_inputs_float[i]) - y_pred), epsilon); + } + + return kTfLiteOk; +} + +int main(int argc, char* argv[]) { + tflite::InitializeTarget(); + TF_LITE_ENSURE_STATUS(ProfileMemoryAndLatency()); + TF_LITE_ENSURE_STATUS(LoadFloatModelAndPerformInference()); + TF_LITE_ENSURE_STATUS(LoadQuantModelAndPerformInference()); + MicroPrintf("~~~ALL TESTS PASSED~~~\n"); + return kTfLiteOk; +} diff --git a/tensorflow/lite/micro/examples/hello_world/images/hello_world_tflite.png b/tensorflow/lite/micro/examples/hello_world/images/hello_world_tflite.png new file mode 100644 index 0000000..56b2221 Binary files /dev/null and b/tensorflow/lite/micro/examples/hello_world/images/hello_world_tflite.png differ diff --git a/tensorflow/lite/micro/examples/hello_world/images/hello_world_tflm.png b/tensorflow/lite/micro/examples/hello_world/images/hello_world_tflm.png new file mode 100644 index 0000000..a89fa08 Binary files /dev/null and b/tensorflow/lite/micro/examples/hello_world/images/hello_world_tflm.png differ diff --git a/tensorflow/lite/micro/examples/hello_world/models/BUILD b/tensorflow/lite/micro/examples/hello_world/models/BUILD new file mode 100644 index 0000000..4f025b0 --- /dev/null +++ b/tensorflow/lite/micro/examples/hello_world/models/BUILD @@ -0,0 +1,37 @@ +load("//tensorflow/lite/micro:build_def.bzl", "generate_cc_arrays") + +package( + default_visibility = ["//visibility:public"], +) + +exports_files( + srcs = [ + "hello_world_float.tflite", + "hello_world_int8.tflite", + ], + visibility = ["//tensorflow/lite/micro/examples/hello_world:__subpackages__"], +) + +generate_cc_arrays( + name = "generated_hello_world_float_model_cc", + src = "hello_world_float.tflite", + out = "hello_world_float_model_data.cc", +) + +generate_cc_arrays( + name = "generated_hello_world_float_model_hdr", + src = "hello_world_float.tflite", + out = "hello_world_float_model_data.h", +) + +generate_cc_arrays( + name = "generated_hello_world_int8_model_cc", + src = "hello_world_int8.tflite", + out = "hello_world_int8_model_data.cc", +) + +generate_cc_arrays( + name = "generated_hello_world_int8_model_hdr", + src = "hello_world_int8.tflite", + out = "hello_world_int8_model_data.h", +) diff --git a/tensorflow/lite/micro/examples/hello_world/models/hello_world_float.tflite b/tensorflow/lite/micro/examples/hello_world/models/hello_world_float.tflite new file mode 100644 index 0000000..f741b3a Binary files /dev/null and b/tensorflow/lite/micro/examples/hello_world/models/hello_world_float.tflite differ diff --git a/tensorflow/lite/micro/examples/hello_world/models/hello_world_int8.tflite b/tensorflow/lite/micro/examples/hello_world/models/hello_world_int8.tflite new file mode 100644 index 0000000..9a379ea Binary files /dev/null and b/tensorflow/lite/micro/examples/hello_world/models/hello_world_int8.tflite differ diff --git a/tensorflow/lite/micro/examples/hello_world/quantization/BUILD b/tensorflow/lite/micro/examples/hello_world/quantization/BUILD new file mode 100644 index 0000000..ecba316 --- /dev/null +++ b/tensorflow/lite/micro/examples/hello_world/quantization/BUILD @@ -0,0 +1,17 @@ +load("@tflm_pip_deps//:requirements.bzl", "requirement") + +py_binary( + name = "ptq", + srcs = ["ptq.py"], + data = ["//tensorflow/lite/micro/examples/hello_world/models:hello_world_float.tflite"], + python_version = "PY3", + srcs_version = "PY3", + deps = [ + "@absl_py//absl:app", + "@absl_py//absl/flags", + "@absl_py//absl/logging", + requirement("numpy"), + requirement("tensorflow-cpu"), + "//python/tflite_micro:runtime", + ], +) diff --git a/tensorflow/lite/micro/examples/hello_world/quantization/ptq.py b/tensorflow/lite/micro/examples/hello_world/quantization/ptq.py new file mode 100644 index 0000000..bfab0d0 --- /dev/null +++ b/tensorflow/lite/micro/examples/hello_world/quantization/ptq.py @@ -0,0 +1,116 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================= +"""This script can create a quant(int8) model from the saved TF model. + +Run: +Build the train.py script +`bazel build tensorflow/lite/micro/examples/hello_world/quantization:train` + +The following command first creates the trained TF float model that we will quantize later +`bazel-bin/tensorflow/lite/micro/examples/hello_world/train --save_tf_model --save_dir=/tmp/float_model/` + +Build the ptq.py script +`bazel build tensorflow/lite/micro/examples/hello_world/quantization:ptq` + +Then we can run the ptq script to convert the float model to quant model as follows. +Note that we are using the directory of the TF model as the source_model_dir here. +The quant model (named hello_world_int8.tflite) will be created inside the target_dir. +`bazel-bin/tensorflow/lite/micro/examples/hello_world/quantization/ptq --source_model_dir=/tmp/float_model --target_dir=/tmp/quant_model/` +""" +import math +import os + +from absl import app +from absl import flags +from absl import logging +import numpy as np +import tensorflow as tf + +FLAGS = flags.FLAGS + +flags.DEFINE_string("source_model_dir", "/tmp/float_model/", + "the directory where the trained model can be found.") +flags.DEFINE_string("target_dir", "/tmp/quant_model", + "the directory to save the quant model.") + + +def get_data(): + """ + The code will generate a set of random `x` values + """ + # Generate a uniformly distributed set of random numbers in the range from + # 0 to 2π, which covers a complete sine wave oscillation + x_values = np.random.uniform(low=0, high=2 * math.pi, + size=1000).astype(np.float32) + + # Shuffle the values to guarantee they're not in order + np.random.shuffle(x_values) + + return x_values + + +def save_tflite_model(tflite_model, target_dir, model_name): + """save the converted tflite model + Args: + tflite_model (binary): the converted model in serialized format. + save_dir (str): the save directory + model_name (str): model name to be saved + """ + if not os.path.exists(target_dir): + os.makedirs(target_dir) + save_path = os.path.join(target_dir, model_name) + with open(save_path, "wb") as f: + f.write(tflite_model) + logging.info("Tflite model saved to %s", target_dir) + + +def convert_quantized_tflite_model(source_model_dir, x_values): + """Convert the save TF model to tflite model, then save it as .tflite + flatbuffer format + + Args: + source_model_dir (tf.keras.Model): the trained hello_world flaot Model dir + x_train (numpy.array): list of the training data + + Returns: + The converted model in serialized format. + """ + + # Convert the model to the TensorFlow Lite format with quantization + def representative_dataset(num_samples=500): + for i in range(num_samples): + yield [x_values[i].reshape(1, 1)] + + converter = tf.lite.TFLiteConverter.from_saved_model(source_model_dir) + converter.optimizations = [tf.lite.Optimize.DEFAULT] + converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8] + converter.inference_input_type = tf.int8 + converter.inference_output_type = tf.int8 + converter.representative_dataset = representative_dataset + tflite_model = converter.convert() + return tflite_model + + +def main(_): + x_values = get_data() + quantized_tflite_model = convert_quantized_tflite_model( + FLAGS.source_model_dir, x_values) + save_tflite_model(quantized_tflite_model, + FLAGS.target_dir, + model_name="hello_world_int8.tflite") + + +if __name__ == "__main__": + app.run(main) \ No newline at end of file diff --git a/tensorflow/lite/micro/examples/hello_world/train.py b/tensorflow/lite/micro/examples/hello_world/train.py new file mode 100644 index 0000000..3a2322c --- /dev/null +++ b/tensorflow/lite/micro/examples/hello_world/train.py @@ -0,0 +1,141 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================= +"""hellow_world model training for sinwave recognition + +Run: +`bazel build tensorflow/lite/micro/examples/hello_world:train` +`bazel-bin/tensorflow/lite/micro/examples/hello_world/train --save_tf_model --save_dir=/tmp/model_created/` +""" +import math +import os + +from absl import app +from absl import flags +from absl import logging +import numpy as np +import tensorflow as tf + +FLAGS = flags.FLAGS + +flags.DEFINE_integer("epochs", 500, "number of epochs to train the model.") +flags.DEFINE_string("save_dir", "/tmp/hello_world_models", + "the directory to save the trained model.") +flags.DEFINE_boolean("save_tf_model", False, + "store the original unconverted tf model.") + + +def get_data(): + """ + The code will generate a set of random `x` values,calculate their sine + values. + """ + # Generate a uniformly distributed set of random numbers in the range from + # 0 to 2π, which covers a complete sine wave oscillation + x_values = np.random.uniform(low=0, high=2 * math.pi, + size=1000).astype(np.float32) + + # Shuffle the values to guarantee they're not in order + np.random.shuffle(x_values) + + # Calculate the corresponding sine values + y_values = np.sin(x_values).astype(np.float32) + + return (x_values, y_values) + + +def create_model() -> tf.keras.Model: + model = tf.keras.Sequential() + + # First layer takes a scalar input and feeds it through 16 "neurons". The + # neurons decide whether to activate based on the 'relu' activation function. + model.add(tf.keras.layers.Dense(16, activation='relu', input_shape=(1, ))) + + # The new second and third layer will help the network learn more complex + # representations + model.add(tf.keras.layers.Dense(16, activation='relu')) + + # Final layer is a single neuron, since we want to output a single value + model.add(tf.keras.layers.Dense(1)) + + # Compile the model using the standard 'adam' optimizer and the mean squared + # error or 'mse' loss function for regression. + model.compile(optimizer='adam', loss='mse', metrics=['mae']) + + return model + + +def convert_tflite_model(model): + """Convert the save TF model to tflite model, then save it as .tflite flatbuffer format + Args: + model (tf.keras.Model): the trained hello_world Model + Returns: + The converted model in serialized format. + """ + converter = tf.lite.TFLiteConverter.from_keras_model(model) + tflite_model = converter.convert() + return tflite_model + + +def save_tflite_model(tflite_model, save_dir, model_name): + """save the converted tflite model + Args: + tflite_model (binary): the converted model in serialized format. + save_dir (str): the save directory + model_name (str): model name to be saved + """ + if not os.path.exists(save_dir): + os.makedirs(save_dir) + save_path = os.path.join(save_dir, model_name) + with open(save_path, "wb") as f: + f.write(tflite_model) + logging.info("Tflite model saved to %s", save_dir) + + +def train_model(epochs, x_values, y_values): + """Train keras hello_world model + Args: epochs (int) : number of epochs to train the model + x_train (numpy.array): list of the training data + y_train (numpy.array): list of the corresponding array + Returns: + tf.keras.Model: A trained keras hello_world model + """ + model = create_model() + model.fit(x_values, + y_values, + epochs=epochs, + validation_split=0.2, + batch_size=64, + verbose=2) + + if FLAGS.save_tf_model: + model.save(FLAGS.save_dir, save_format="tf") + logging.info("TF model saved to %s", FLAGS.save_dir) + + return model + + +def main(_): + x_values, y_values = get_data() + trained_model = train_model(FLAGS.epochs, x_values, y_values) + + # Convert and save the model to .tflite + tflite_model = convert_tflite_model(trained_model) + save_tflite_model(tflite_model, + FLAGS.save_dir, + model_name="hello_world_float.tflite") + + +if __name__ == "__main__": + app.run(main) \ No newline at end of file diff --git a/tensorflow/lite/micro/examples/memory_footprint/BUILD b/tensorflow/lite/micro/examples/memory_footprint/BUILD new file mode 100644 index 0000000..d45507b --- /dev/null +++ b/tensorflow/lite/micro/examples/memory_footprint/BUILD @@ -0,0 +1,54 @@ +load( + "//tensorflow/lite/micro:build_def.bzl", + "generate_cc_arrays", + "micro_copts", +) + +package( + licenses = ["notice"], +) + +generate_cc_arrays( + name = "generated_simple_add_model_cc", + src = "models/simple_add_model.tflite", + out = "models/simple_add_model_model_data.cc", +) + +generate_cc_arrays( + name = "generated_simple_add_model_hdr", + src = "models/simple_add_model.tflite", + out = "models/simple_add_model_model_data.h", +) + +cc_library( + name = "simple_add_model_data", + srcs = [ + ":generated_simple_add_model_cc", + ], + hdrs = [ + ":generated_simple_add_model_hdr", + ], + copts = micro_copts(), +) + +cc_binary( + name = "baseline_memory_footprint", + srcs = ["baseline_memory_footprint.cc"], + deps = [ + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:system_setup", + ], +) + +cc_binary( + name = "interpreter_memory_footprint", + srcs = ["interpreter_memory_footprint.cc"], + deps = [ + ":simple_add_model_data", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:micro_profiler", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:system_setup", + "//tensorflow/lite/micro/benchmarks:micro_benchmark", + ], +) diff --git a/tensorflow/lite/micro/examples/memory_footprint/Makefile.inc b/tensorflow/lite/micro/examples/memory_footprint/Makefile.inc new file mode 100644 index 0000000..90b8250 --- /dev/null +++ b/tensorflow/lite/micro/examples/memory_footprint/Makefile.inc @@ -0,0 +1,26 @@ +BASELINE_MEMORY_FOOTPRINT_SRCS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/memory_footprint/baseline_memory_footprint.cc + +BASELINE_MEMORY_FOOTPRINT_HDRS := + +INTERPRETER_MEMORY_FOOTPRINT_SRCS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/memory_footprint/interpreter_memory_footprint.cc + +INTERPRETER_MEMORY_FOOTPRINT_HDRS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/benchmarks/micro_benchmark.h + +MEMORY_FOOTPRINT_GENERATOR_INPUTS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/memory_footprint/models/simple_add_model.tflite + +MEMORY_FOOTPRINT_GENERATED_SRCS := \ +$(GENERATED_SRCS_DIR)$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/memory_footprint/models/simple_add_model_data.cc + +MEMORY_FOOTPRINT_GENERATED_HDRS := \ +$(GENERATED_SRCS_DIR)$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/memory_footprint/models/simple_add_model_data.h + +# Builds standalone binaries for profiling memory footprint. +$(eval $(call microlite_test,baseline_memory_footprint,\ +$(BASELINE_MEMORY_FOOTPRINT_SRCS),$(BASELINE_MEMORY_FOOTPRINT_HDRS))) + +$(eval $(call microlite_test,interpreter_memory_footprint,\ +$(INTERPRETER_MEMORY_FOOTPRINT_SRCS),$(INTERPRETER_MEMORY_FOOTPRINT_HDRS),$(MEMORY_FOOTPRINT_GENERATOR_INPUTS))) \ No newline at end of file diff --git a/tensorflow/lite/micro/examples/memory_footprint/README.md b/tensorflow/lite/micro/examples/memory_footprint/README.md new file mode 100644 index 0000000..bf75ae2 --- /dev/null +++ b/tensorflow/lite/micro/examples/memory_footprint/README.md @@ -0,0 +1,125 @@ + + +* [TFLM Code Size FAQ](#tflm-code-size-faq) + * [Methodology to estimate code size of TFLM](#methodology-to-estimate-code-size-of-tflm) + * [Sample code size of the TFLM Framework](#sample-code-size-of-the-tflm-framework) + * [Tips to improve code size](#tips-to-improve-code-size) + * [Only register kernels that a model needs](#only-register-kernels-that-a-model-needs) + + + + + +# TFLM Code Size FAQ + +This document outlines basic steps to measure the code size of TFLM. On a +platform based on ELF file format, the code size refers to the text section size +of an ELF file. Additionally, this document outlines some common tips to keep +the code size small. + +Note that a complete application that depends on the TFLM typically would also +include a TFLite model in flatbuffer and a memory arena, which are in data +sections of an ELF file. Their size is an important aspect to the overall memory +footprint, but not discussed in this document. + +## Methodology to estimate code size of TFLM + +Based on the [architecture description](https://arxiv.org/pdf/2010.08678.pdf), +we further classify the source code into two categories: TFLM framework and +kernels as illustrated in the below diagram: + +![TFLM code size categories](images/tflm_code_size_category.png) + +TFLM Framework includes infrastructure such as interpreter, memory planner etc. +The size of TFLM Framework is a fixed cost of using TFLM and primarily includes +codes under tensorflow/lite/micro, but excludes those in +tensorflow/lite/micro/kernels. + +On the other hand, the code size contribution from the kernels depends on and +scales with the model that an application uses. This contribution from the +kernels mostly includes the codes in tensorflow/lite/micro/kernels as well as +third party libraries. + +To measure the size of the TFLM Framework that is independent of a model, the +methodology that is adopted in this document is as follows: + +1. Build the `baseline_memory_footprint` target in + `tensorflow/lite/micro/examples/memory_footprint/`. Estimate its code size + via a `size` command. +1. Build the `interpreter_memory_footprint` target in + `tensorflow/lite/micro/examples/memory_footprint/`. Estimate its code size + via a `size` command. +1. Subtract the two sizes from the above two steps provides the code size + estimation of the TFLM Framework. + +Step 1 gives the code size for a "no-op application" that would typically +include platform-specific initialization. We assume that this is a fixed size +that is independent of TFLM. + +Step 2 produces a binary that includes the code needed to create an interpreter +instance (i.e. the TFLM framework). It explicitly avoids pulling in any kernel +code such that the increase between step 2 and step 1 is a reasonable estimate +of the footprint of the TFLM framework. Note that since we do not register any +kernel code, the binary from step 2 can not run any actual inference. + +The code size estimation via the above steps also include additional system +libraries that need to be pulled in due the use of the TFLM. + +A similar process can be adopted to further estimate the size of kernels. For +example, the size of kernels used in keyword detection can be estimated by the +following steps + +1. Build the `keyword_benchmark` target in `tensorflow/lite/micro/benchmarks`. + Estimate its code size via a `size` command. +1. Subtract to get the code size difference between the `keyword_benchmark` and + `interpreter_memory_footprint` + +It may be worth noting that the above methodology will attribute the code size +from `MicroMutableOpResolver` towards the code size of kernels, instead of +counting them in the code size estimation of the TFLM Framework. We adopt this +methodology due to its simplicity, robustness and the ability to include the +contribution of system libraries. + +## Sample code size of the TFLM Framework + +The below code size number of the TFLM Framework is shown as references only. + +For a 64 bit x86 platform, the TFLM code size obtained through the above method +is 20411 bytes. + +For an embedded bluepill ARM platform, the TFLM code size obtained through the +above method is 9732 bytes. + +## Tips to improve code size + +### Only register kernels that a model needs + +One common issue that leads to unnecessary large code size is forgetting to only +register only kernels that a model needs and ending up registering all kernels. + +Therefore, when moving off the exploration stage, it is better to only register +for kernels that the model needs to have a smaller footprint. The following code +snipet shows how to do so using the keyword detection as an example: + +```cc + // Create OpResolver class with up to 6 kernel support. + using KeywordOpResolver = MicroMutableOpResolver<6>; + + // Avoid the usage of new by placement new + uint8_t op_resolver_buffer[sizeof(KeywordOpResolver)]; + KeywordOpResolver* op_resolver = new (op_resolver_buffer) KeywordOpResolver(); + + // Only add the required kernel + op_resolver->AddFullyConnected(tflite::Register_FULLY_CONNECTED_INT8()); + op_resolver->AddQuantize(); + op_resolver->AddSoftmax(tflite::Register_SOFTMAX_INT8_INT16()); + op_resolver->AddSvdf(tflite::Register_SVDF_INT8()); + + ... + + // Pass the OpResolver to the interpreter + tflite::MicroInterpreter * interpreter = tflite::MicroInterpeter::Create( + g_keyword_scrambled_model_data, op_resolver, tensor_arena, kTensorArenaSize, profiler); +``` + +TODO(b/201351077): add more tips to improve code size. diff --git a/tensorflow/lite/micro/examples/memory_footprint/baseline_memory_footprint.cc b/tensorflow/lite/micro/examples/memory_footprint/baseline_memory_footprint.cc new file mode 100644 index 0000000..7a934b7 --- /dev/null +++ b/tensorflow/lite/micro/examples/memory_footprint/baseline_memory_footprint.cc @@ -0,0 +1,21 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/micro/system_setup.h" + +// This file provides a skeleton application without the TFLM Framework +// (interpreter, memory planner etc). This is used to measure the bare minimum +// application code size of a specific target platform without the TFLM +// Framework. Please see README.md for more information. +int main(int argc, char** argv) { tflite::InitializeTarget(); } diff --git a/tensorflow/lite/micro/examples/memory_footprint/create_adder_model.py b/tensorflow/lite/micro/examples/memory_footprint/create_adder_model.py new file mode 100644 index 0000000..4c24f43 --- /dev/null +++ b/tensorflow/lite/micro/examples/memory_footprint/create_adder_model.py @@ -0,0 +1,55 @@ +# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Creates a simple tflite model that adds two input tensor of size 1.""" + +from absl import app +import tensorflow as tf + + +def main(_): + input_shape = (128, 128, 1) + x1 = tf.keras.layers.Input(input_shape) + x2 = tf.keras.layers.Input(input_shape) + + added = tf.keras.layers.Add()([x1, x2]) + model = tf.keras.models.Model(inputs=[x1, x2], outputs=added) + + converter = tf.lite.TFLiteConverter.from_keras_model(model) + converter.optimizations = [tf.lite.Optimize.DEFAULT] + # Enforce integer only quantization + converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8] + converter.inference_input_type = tf.int8 + converter.inference_output_type = tf.int8 + + # Fix random seed to keep the model reproducible. + tf.random.set_seed(3) + + # Convert the model to the TensorFlow Lite format with quantization and + # quantization requires a representative data set + def representative_dataset(): + for i in range(500): + yield ([ + tf.random.normal(input_shape, seed=i), + tf.random.normal(input_shape, seed=i * 2) + ]) + + converter.representative_dataset = representative_dataset + model_tflite = converter.convert() + + open("simple_add_model.tflite", "wb").write(model_tflite) + + +if __name__ == '__main__': + app.run(main) diff --git a/tensorflow/lite/micro/examples/memory_footprint/images/tflm_code_size_category.png b/tensorflow/lite/micro/examples/memory_footprint/images/tflm_code_size_category.png new file mode 100644 index 0000000..27ab86d Binary files /dev/null and b/tensorflow/lite/micro/examples/memory_footprint/images/tflm_code_size_category.png differ diff --git a/tensorflow/lite/micro/examples/memory_footprint/interpreter_memory_footprint.cc b/tensorflow/lite/micro/examples/memory_footprint/interpreter_memory_footprint.cc new file mode 100644 index 0000000..036429b --- /dev/null +++ b/tensorflow/lite/micro/examples/memory_footprint/interpreter_memory_footprint.cc @@ -0,0 +1,60 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include +#include + +#include "tensorflow/lite/micro/benchmarks/micro_benchmark.h" +#include "tensorflow/lite/micro/examples/memory_footprint/models/simple_add_model_model_data.h" +#include "tensorflow/lite/micro/micro_mutable_op_resolver.h" +#include "tensorflow/lite/micro/micro_profiler.h" +#include "tensorflow/lite/micro/system_setup.h" + +// Use MicroBenchmarkRunner to avoid boiler plate code and more easily compare +// the size with other benchmarks such as keyword_benchmark. +using InterpreterMemoryFootprintRunner = tflite::MicroBenchmarkRunner; +using InterpreterMemoryFootprintOpResolver = tflite::MicroMutableOpResolver<6>; + +// This binary includes the TFLM Framework (interpreter, memory planner etc), +// but without any kernels. This is used to measure the code size of the TFLM +// Framework. This binary will produce a run time error by design since no OP is +// registered. Please see README.md for more info. +int main(int argc, char** argv) { + // Arbitrary size. + constexpr int kTensorArenaSize = 1024; + alignas(16) uint8_t tensor_arena[kTensorArenaSize]; + uint8_t runner_buffer[sizeof(InterpreterMemoryFootprintRunner)]; + + tflite::InitializeTarget(); + tflite::MicroProfiler profiler; + + InterpreterMemoryFootprintOpResolver op_resolver; + + // Do NOT allocate any OP so that the binary does not include any kernels. + + // Use placement new as this is the standar way to create a new object in + // the TFLM code base. This is to avoid unnecessary dynamic memory allocation + // code in the binary. + // We pass an arbitrary model to the benchmark runner to ensure that the TFLM + // framework can successfully go through all the steps needed to perform the + // initialization and memory planning needed prior to running inference on a + // model. The specifics of model itself (size, ops ...) are not important + // since we do not actually run any inference. + InterpreterMemoryFootprintRunner* runner = new (runner_buffer) + InterpreterMemoryFootprintRunner(g_simple_add_model_model_data, + &op_resolver, tensor_arena, + kTensorArenaSize, &profiler); + + runner->RunSingleIteration(); +} diff --git a/tensorflow/lite/micro/examples/memory_footprint/models/simple_add_model.tflite b/tensorflow/lite/micro/examples/memory_footprint/models/simple_add_model.tflite new file mode 100644 index 0000000..2a80c50 Binary files /dev/null and b/tensorflow/lite/micro/examples/memory_footprint/models/simple_add_model.tflite differ diff --git a/tensorflow/lite/micro/examples/micro_speech/BUILD b/tensorflow/lite/micro/examples/micro_speech/BUILD new file mode 100644 index 0000000..71741f3 --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/BUILD @@ -0,0 +1,453 @@ +# Description: +# TensorFlow Lite microcontroller example. +load("//tensorflow/lite/micro:build_def.bzl", "generate_cc_arrays") + +package( + default_visibility = ["//visibility:public"], + # Disabling layering_check because of http://b/177257332 + features = ["-layering_check"], + licenses = ["notice"], +) + +cc_library( + name = "simple_model_settings", + srcs = [ + "simple_features/simple_model_settings.cc", + ], + hdrs = [ + "simple_features/simple_model_settings.h", + ], +) + +generate_cc_arrays( + name = "generated_yes_1000ms_wav_cc", + src = "testdata/yes_1000ms.wav", + out = "testdata/yes_1000ms_audio_data.cc", +) + +generate_cc_arrays( + name = "generated_yes_1000ms_wav_hdr", + src = "testdata/yes_1000ms.wav", + out = "testdata/yes_1000ms_audio_data.h", +) + +generate_cc_arrays( + name = "generated_no_1000ms_wav_cc", + src = "testdata/no_1000ms.wav", + out = "testdata/no_1000ms_audio_data.cc", +) + +generate_cc_arrays( + name = "generated_no_1000ms_wav_hdr", + src = "testdata/no_1000ms.wav", + out = "testdata/no_1000ms_audio_data.h", +) + +generate_cc_arrays( + name = "generated_yes_30ms_wav_cc", + src = "testdata/yes_30ms.wav", + out = "testdata/yes_30ms_audio_data.cc", +) + +generate_cc_arrays( + name = "generated_yes_30ms_wav_hdr", + src = "testdata/yes_30ms.wav", + out = "testdata/yes_30ms_audio_data.h", +) + +generate_cc_arrays( + name = "generated_no_30ms_wav_cc", + src = "testdata/no_30ms.wav", + out = "testdata/no_30ms_audio_data.cc", +) + +generate_cc_arrays( + name = "generated_no_30ms_wav_hdr", + src = "testdata/no_30ms.wav", + out = "testdata/no_30ms_audio_data.h", +) + +generate_cc_arrays( + name = "generated_micro_speech_model_cc", + src = "micro_speech.tflite", + out = "micro_speech_model_data.cc", +) + +generate_cc_arrays( + name = "generated_micro_speech_model_hdr", + src = "micro_speech.tflite", + out = "micro_speech_model_data.h", +) + +cc_library( + name = "micro_speech_model_data", + srcs = [ + ":generated_micro_speech_model_cc", + ], + hdrs = [ + ":generated_micro_speech_model_hdr", + ], +) + +cc_library( + name = "simple_features_test_data", + srcs = [ + "simple_features/no_simple_features_data.cc", + "simple_features/yes_simple_features_data.cc", + ], + hdrs = [ + "simple_features/no_simple_features_data.h", + "simple_features/yes_simple_features_data.h", + ], +) + +cc_test( + name = "micro_speech_test", + srcs = [ + "micro_speech_test.cc", + ], + deps = [ + ":micro_speech_model_data", + "//tensorflow/lite/micro:micro_framework", + "//tensorflow/lite/micro:micro_log", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro/examples/micro_speech/micro_features:micro_features_test_data", + "//tensorflow/lite/micro/testing:micro_test", + "//tensorflow/lite/schema:schema_fbs", + ], +) + +cc_library( + name = "audio_sample_test_data", + srcs = [ + ":generated_no_30ms_wav_cc", + ":generated_yes_30ms_wav_cc", + ], + hdrs = [ + ":generated_no_30ms_wav_hdr", + ":generated_yes_30ms_wav_hdr", + ], +) + +cc_library( + name = "audio_large_sample_test_data", + srcs = [ + ":generated_no_1000ms_wav_cc", + ":generated_yes_1000ms_wav_cc", + ], + hdrs = [ + ":generated_no_1000ms_wav_hdr", + ":generated_yes_1000ms_wav_hdr", + ], +) + +cc_library( + name = "simple_features_generator_test_data", + srcs = [ + "simple_features/no_power_spectrum_data.cc", + "simple_features/yes_power_spectrum_data.cc", + ], + hdrs = [ + "simple_features/no_power_spectrum_data.h", + "simple_features/yes_power_spectrum_data.h", + ], +) + +cc_library( + name = "simple_features_generator_reference", + srcs = [ + "simple_features/simple_features_generator.cc", + ], + hdrs = [ + "simple_features/simple_features_generator.h", + ], + deps = [ + ":simple_model_settings", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:micro_log", + ], +) + +cc_test( + name = "simple_features_generator_reference_test", + srcs = [ + "simple_features/simple_features_generator_test.cc", + ], + deps = [ + ":audio_sample_test_data", + ":simple_features_generator_reference", + ":simple_features_generator_test_data", + ":simple_model_settings", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:micro_framework", + "//tensorflow/lite/micro:micro_log", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_library( + name = "simple_features_generator_fixed", + srcs = [ + "simple_features/fixed_point/simple_features_generator.cc", + ], + hdrs = [ + "simple_features/simple_features_generator.h", + ], + deps = [ + ":simple_model_settings", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:micro_log", + ], +) + +cc_test( + name = "simple_features_generator_fixed_test", + srcs = [ + "simple_features/simple_features_generator_test.cc", + ], + deps = [ + ":audio_sample_test_data", + ":simple_features_generator_fixed", + ":simple_features_generator_test_data", + ":simple_model_settings", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:micro_framework", + "//tensorflow/lite/micro:micro_log", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_library( + name = "audio_provider", + srcs = [ + "audio_provider.cc", + ], + hdrs = [ + "audio_provider.h", + ], + deps = [ + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro/examples/micro_speech/micro_features:micro_model_settings", + ], +) + +cc_library( + name = "audio_provider_mock", + srcs = [ + "audio_provider_mock.cc", + ], + hdrs = [ + "audio_provider.h", + ], + deps = [ + ":audio_large_sample_test_data", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro/examples/micro_speech/micro_features:micro_model_settings", + ], +) + +cc_test( + name = "audio_provider_test", + srcs = [ + "audio_provider_test.cc", + ], + deps = [ + ":audio_provider", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:micro_framework", + "//tensorflow/lite/micro/examples/micro_speech/micro_features:micro_model_settings", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "audio_provider_mock_test", + srcs = [ + "audio_provider_mock_test.cc", + ], + deps = [ + ":audio_large_sample_test_data", + ":audio_provider_mock", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:micro_framework", + "//tensorflow/lite/micro/examples/micro_speech/micro_features:micro_model_settings", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_library( + name = "feature_provider", + srcs = [ + "feature_provider.cc", + ], + hdrs = [ + "feature_provider.h", + ], + deps = [ + ":audio_provider", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:micro_log", + "//tensorflow/lite/micro/examples/micro_speech/micro_features:micro_features_generator", + "//tensorflow/lite/micro/examples/micro_speech/micro_features:micro_model_settings", + ], +) + +cc_test( + name = "feature_provider_test", + srcs = [ + "feature_provider_test.cc", + ], + deps = [ + ":audio_provider", + ":feature_provider", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:micro_framework", + "//tensorflow/lite/micro/examples/micro_speech/micro_features:micro_model_settings", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_library( + name = "feature_provider_mock", + srcs = [ + "feature_provider.cc", + ], + hdrs = [ + "feature_provider.h", + ], + deps = [ + ":audio_provider_mock", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:micro_log", + "//tensorflow/lite/micro/examples/micro_speech/micro_features:micro_features_generator", + "//tensorflow/lite/micro/examples/micro_speech/micro_features:micro_model_settings", + ], +) + +cc_test( + name = "feature_provider_mock_test", + size = "small", + srcs = [ + "feature_provider_mock_test.cc", + ], + tags = [ + "noasan", # TODO(b/179930607): Fix with asan. + ], + deps = [ + ":feature_provider_mock", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:micro_framework", + "//tensorflow/lite/micro/examples/micro_speech/micro_features:micro_features_test_data", + "//tensorflow/lite/micro/examples/micro_speech/micro_features:micro_model_settings", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_library( + name = "recognize_commands", + srcs = [ + "recognize_commands.cc", + ], + hdrs = [ + "recognize_commands.h", + ], + deps = [ + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:micro_log", + "//tensorflow/lite/micro/examples/micro_speech/micro_features:micro_model_settings", + ], +) + +cc_test( + name = "recognize_commands_test", + srcs = [ + "recognize_commands_test.cc", + ], + tags = [ + "no_oss", # TODO(122853023): Resolve issues and re-enable. + ], + deps = [ + ":recognize_commands", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:micro_framework", + "//tensorflow/lite/micro:micro_log", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_library( + name = "command_responder", + srcs = [ + "command_responder.cc", + ], + hdrs = [ + "command_responder.h", + ], + deps = [ + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:micro_log", + ], +) + +cc_test( + name = "command_responder_test", + srcs = [ + "command_responder_test.cc", + ], + deps = [ + ":command_responder", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:micro_framework", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_binary( + name = "micro_speech", + srcs = [ + "main.cc", + "main_functions.cc", + "main_functions.h", + ], + deps = [ + ":audio_provider", + ":command_responder", + ":feature_provider", + ":micro_speech_model_data", + ":recognize_commands", + "//tensorflow/lite/micro:micro_framework", + "//tensorflow/lite/micro:micro_log", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:system_setup", + "//tensorflow/lite/micro/examples/micro_speech/micro_features:micro_model_settings", + "//tensorflow/lite/schema:schema_fbs", + ], +) + +cc_binary( + name = "micro_speech_mock", + srcs = [ + "main.cc", + "main_functions.cc", + "main_functions.h", + ], + deps = [ + ":audio_provider_mock", + ":command_responder", + ":feature_provider", + ":micro_speech_model_data", + ":recognize_commands", + "//tensorflow/lite/micro:micro_framework", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:system_setup", + "//tensorflow/lite/micro/examples/micro_speech/micro_features:micro_model_settings", + "//tensorflow/lite/schema:schema_fbs", + ], +) + +sh_test( + name = "micro_speech_binary_mock_test", + srcs = ["micro_speech_binary_mock_test.sh"], + data = [":micro_speech_mock"], +) diff --git a/tensorflow/lite/micro/examples/micro_speech/Makefile.inc b/tensorflow/lite/micro/examples/micro_speech/Makefile.inc new file mode 100644 index 0000000..d2ceab5 --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/Makefile.inc @@ -0,0 +1,299 @@ + +INCLUDES += \ + -I$(MAKEFILE_DIR)/downloads/kissfft + +KISSFFT_LIB_SRCS := + +KISSFFT_LIB_HDRS := \ +$(MAKEFILE_DIR)/downloads/kissfft/COPYING \ +$(MAKEFILE_DIR)/downloads/kissfft/kiss_fft.c \ +$(MAKEFILE_DIR)/downloads/kissfft/kiss_fft.h \ +$(MAKEFILE_DIR)/downloads/kissfft/_kiss_fft_guts.h \ +$(MAKEFILE_DIR)/downloads/kissfft/tools/kiss_fftr.c \ +$(MAKEFILE_DIR)/downloads/kissfft/tools/kiss_fftr.h + +MICRO_SPEECH_TEST_SRCS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_speech_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/no_micro_features_data.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/yes_micro_features_data.cc + +MICRO_SPEECH_TEST_HDRS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/no_micro_features_data.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/yes_micro_features_data.h \ + +SIMPLE_FEATURES_GENERATOR_TEST_SRCS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/simple_features/simple_features_generator.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/simple_features/simple_features_generator_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/simple_features/no_power_spectrum_data.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/simple_features/yes_power_spectrum_data.cc + +SIMPLE_FEATURES_GENERATOR_TEST_HDRS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/simple_features/simple_model_settings.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/simple_features/simple_features_generator.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/simple_features/no_power_spectrum_data.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/simple_features/yes_power_spectrum_data.h + +MICRO_FEATURES_LIB_SRCS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/fft.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/fft_util.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/filterbank.c \ +$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/filterbank_util.c \ +$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/frontend.c \ +$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/frontend_util.c \ +$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/kiss_fft_int16.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/log_lut.c \ +$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/log_scale.c \ +$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/log_scale_util.c \ +$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/noise_reduction.c \ +$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/noise_reduction_util.c \ +$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/pcan_gain_control.c \ +$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/pcan_gain_control_util.c \ +$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/window.c \ +$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/window_util.c \ +$(KISSFFT_LIB_SRCS) + +MICRO_FEATURES_LIB_HDRS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/bits.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/fft.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/fft_util.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/filterbank.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/filterbank_util.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/frontend.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/frontend_util.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/kiss_fft_common.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/kiss_fft_int16.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/log_lut.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/log_scale.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/log_scale_util.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/noise_reduction.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/noise_reduction_util.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/pcan_gain_control.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/pcan_gain_control_util.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/window.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/window_util.h \ +$(KISSFFT_LIB_HDRS) + +MICRO_FEATURES_GENERATOR_SRCS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/micro_features_generator.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.cc \ +$(MICRO_FEATURES_LIB_SRCS) + +MICRO_FEATURES_GENERATOR_HDRS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/micro_features_generator.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.h \ +$(MICRO_FEATURES_LIB_HDRS) + +MICRO_FEATURES_GENERATOR_TEST_SRCS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/micro_features_generator_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/no_feature_data_slice.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/yes_feature_data_slice.cc \ +$(MICRO_FEATURES_GENERATOR_SRCS) + +MICRO_FEATURES_GENERATOR_TEST_HDRS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/no_feature_data_slice.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/yes_feature_data_slice.h \ +$(MICRO_FEATURES_GENERATOR_HDRS) + +AUDIO_PROVIDER_TEST_SRCS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/audio_provider_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/audio_provider.cc + +AUDIO_PROVIDER_TEST_HDRS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/audio_provider.h \ + +AUDIO_PROVIDER_MOCK_TEST_SRCS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/audio_provider_mock_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/audio_provider_mock.cc + +AUDIO_PROVIDER_MOCK_TEST_HDRS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/audio_provider.h \ + +FEATURE_PROVIDER_TEST_SRCS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/feature_provider_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/audio_provider.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/feature_provider.cc \ +$(MICRO_FEATURES_GENERATOR_SRCS) + +FEATURE_PROVIDER_TEST_HDRS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/audio_provider.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/feature_provider.h \ +$(MICRO_FEATURES_GENERATOR_HDRS) + +FEATURE_PROVIDER_MOCK_TEST_SRCS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/feature_provider_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/audio_provider_mock.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/feature_provider.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/no_micro_features_data.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/yes_micro_features_data.cc \ +$(MICRO_FEATURES_GENERATOR_SRCS) + +FEATURE_PROVIDER_MOCK_TEST_HDRS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/audio_provider.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/feature_provider.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/no_micro_features_data.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/yes_micro_features_data.h \ +$(MICRO_FEATURES_GENERATOR_HDRS) + +RECOGNIZE_COMMANDS_TEST_SRCS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/recognize_commands_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/recognize_commands.cc + +RECOGNIZE_COMMANDS_TEST_HDRS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/recognize_commands.h + +COMMAND_RESPONDER_TEST_SRCS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/command_responder_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/command_responder.cc + +COMMAND_RESPONDER_TEST_HDRS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/command_responder.h + +MICRO_SPEECH_SRCS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/main.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/main_functions.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/audio_provider.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/feature_provider.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/no_micro_features_data.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/yes_micro_features_data.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/recognize_commands.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/command_responder.cc \ +$(MICRO_FEATURES_GENERATOR_SRCS) + +MICRO_SPEECH_HDRS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/audio_provider.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/feature_provider.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/no_micro_features_data.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/yes_micro_features_data.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/recognize_commands.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/command_responder.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/main_functions.h \ +$(MICRO_FEATURES_GENERATOR_HDRS) + +MICRO_SPEECH_MOCK_SRCS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/main.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/main_functions.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/audio_provider_mock.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/feature_provider.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/no_micro_features_data.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/yes_micro_features_data.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/recognize_commands.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/command_responder.cc \ +$(MICRO_FEATURES_GENERATOR_SRCS) + +MICRO_SPEECH_MOCK_HDRS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/audio_provider.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/feature_provider.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/micro_features_generator.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/no_micro_features_data.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/yes_micro_features_data.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/recognize_commands.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/command_responder.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/main_functions.h \ +$(MICRO_FEATURES_GENERATOR_HDRS) + +MICRO_SPEECH_GENERATOR_INPUTS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_speech.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/testdata/no_1000ms.wav \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/testdata/no_30ms.wav \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/testdata/yes_1000ms.wav \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/testdata/yes_30ms.wav + +MICRO_SPEECH_GENERATED_SRCS := \ +$(GENERATED_SRCS_DIR)$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_speech_model_data.cc \ +$(GENERATED_SRCS_DIR)$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/testdata/no_1000ms_audio_data.cc \ +$(GENERATED_SRCS_DIR)$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/testdata/no_30ms_audio_data.cc \ +$(GENERATED_SRCS_DIR)$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/testdata/yes_1000ms_audio_data.cc \ +$(GENERATED_SRCS_DIR)$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/testdata/yes_30ms_audio_data.cc + +MICRO_SPEECH_GENERATED_HDRS := \ +$(GENERATED_SRCS_DIR)$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_speech_model_data.h \ +$(GENERATED_SRCS_DIR)$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/testdata/no_1000ms_audio_data.h \ +$(GENERATED_SRCS_DIR)$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/testdata/no_30ms_audio_data.h \ +$(GENERATED_SRCS_DIR)$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/testdata/yes_1000ms_audio_data.h \ +$(GENERATED_SRCS_DIR)$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/testdata/yes_30ms_audio_data.h + +#Find any platform - specific rules for this example. +include $(wildcard $(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/*/Makefile.inc) + +# TODO(b/161489252): Disabling warnings for this example until we have a better +# way to build third_party code with a reduced list of CFLAGS. +CCFLAGS := $(filter-out $(CC_WARNINGS),$(CCFLAGS)) + +# Test the code for feature generation. +ifneq ($(TARGET_ARCH), $(filter $(TARGET_ARCH), hifi5 hifi3z)) + $(eval $(call microlite_test,micro_features_generator_test,\ + $(MICRO_FEATURES_GENERATOR_TEST_SRCS),$(MICRO_FEATURES_GENERATOR_TEST_HDRS),$(MICRO_SPEECH_GENERATOR_INPUTS))) +endif + +# Tests loading and running a speech model. +$(eval $(call microlite_test,micro_speech_test,\ +$(MICRO_SPEECH_TEST_SRCS),$(MICRO_SPEECH_TEST_HDRS),$(MICRO_SPEECH_GENERATOR_INPUTS))) + +# TODO(b/268568089): This test is taking very long time to finish; causing the +# CI to run for a long time to finish. +ifneq ($(TARGET_ARCH), $(filter $(TARGET_ARCH), hifimini hifi5 hifi3z)) + # Test the code for feature generation. + $(eval $(call microlite_test,simple_features_generator_test,\ + $(SIMPLE_FEATURES_GENERATOR_TEST_SRCS),$(SIMPLE_FEATURES_GENERATOR_TEST_HDRS),$(MICRO_SPEECH_GENERATOR_INPUTS))) +endif + +# Tests the audio provider module. +$(eval $(call microlite_test,audio_provider_test,\ +$(AUDIO_PROVIDER_TEST_SRCS),$(AUDIO_PROVIDER_TEST_HDRS))) + +# Tests the audio provider mock module. +$(eval $(call microlite_test,audio_provider_mock_test,\ +$(AUDIO_PROVIDER_MOCK_TEST_SRCS),$(AUDIO_PROVIDER_MOCK_TEST_HDRS),$(MICRO_SPEECH_GENERATOR_INPUTS))) + +# Tests the feature provider module. +ifneq ($(TARGET_ARCH), hifi3z) + $(eval $(call microlite_test,feature_provider_test,\ + $(FEATURE_PROVIDER_TEST_SRCS),$(FEATURE_PROVIDER_TEST_HDRS))) +endif + +# Tests the feature provider module using the mock audio provider. +ifneq ($(TARGET_ARCH), hifi3z) + $(eval $(call microlite_test,feature_provider_mock_test,\ + $(FEATURE_PROVIDER_MOCK_TEST_SRCS),$(FEATURE_PROVIDER_MOCK_TEST_HDRS),$(MICRO_SPEECH_GENERATOR_INPUTS))) +endif + +# Tests the command recognizer module. +$(eval $(call microlite_test,recognize_commands_test,\ +$(RECOGNIZE_COMMANDS_TEST_SRCS),$(RECOGNIZE_COMMANDS_TEST_HDRS))) + +# Tests responding to a command. +$(eval $(call microlite_test,command_responder_test,\ +$(COMMAND_RESPONDER_TEST_SRCS),$(COMMAND_RESPONDER_TEST_HDRS))) + +# Builds a standalone speech command recognizer binary. +$(eval $(call microlite_test,micro_speech,\ +$(MICRO_SPEECH_SRCS),$(MICRO_SPEECH_HDRS),$(MICRO_SPEECH_GENERATOR_INPUTS))) + +# Builds a standalone speech command recognizer binary using fake audio input. +$(eval $(call microlite_test,micro_speech_mock,\ +$(MICRO_SPEECH_MOCK_SRCS),$(MICRO_SPEECH_MOCK_HDRS),$(MICRO_SPEECH_GENERATOR_INPUTS))) + +# Add sources and headers generated from $(MICRO_SPEECH_GENERATOR_INPUTS). +MICRO_SPEECH_SRCS += $(MICRO_SPEECH_GENERATED_SRCS) +MICRO_SPEECH_HDRS += $(MICRO_SPEECH_GENERATED_HDRS) + +MICRO_SPEECH_MOCK_SRCS += $(MICRO_SPEECH_GENERATED_SRCS) +MICRO_SPEECH_MOCK_HDRS += $(MICRO_SPEECH_GENERATED_HDRS) + +list_micro_speech_example_sources: + @echo $(MICRO_SPEECH_SRCS) + +list_micro_speech_example_headers: + @echo $(MICRO_SPEECH_HDRS) + +list_micro_speech_mock_example_sources: + @echo $(MICRO_SPEECH_MOCK_SRCS) + +list_micro_speech_mock_example_headers: + @echo $(MICRO_SPEECH_MOCK_HDRS) diff --git a/tensorflow/lite/micro/examples/micro_speech/README.md b/tensorflow/lite/micro/examples/micro_speech/README.md new file mode 100644 index 0000000..8a4aa77 --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/README.md @@ -0,0 +1,322 @@ + + +# Micro Speech Example + +This example shows how to run a 20 kB model that can recognize 2 keywords, +"yes" and "no", from speech data. + +The application listens to its surroundings with a microphone and indicates +when it has detected a word by lighting an LED or displaying data on a +screen, depending on the capabilities of the device. + +![Animation on Arduino](images/animation_on_arduino.gif) + +The code has a small footprint (for example, around 22 kilobytes on a Cortex +M3) and only uses about 10 kilobytes of RAM for working memory, so it's able to +run on systems like an STM32F103 with only 20 kilobytes of total SRAM and 64 +kilobytes of Flash. + +## Table of contents + +- [Deploy to STM32F746](#deploy-to-STM32F746) +- [Deploy to NXP FRDM K66F](#deploy-to-nxp-frdm-k66f) +- [Deploy to CEVA BX1/SP500](#deploy-to-ceva-bx1) +- [Run on macOS](#run-on-macos) +- [Run the tests on a development machine](#run-the-tests-on-a-development-machine) +- [Train your own model](#train-your-own-model) + +## Deploy to STM32F746 + +The following instructions will help you build and deploy the example to the +[STM32F7 discovery kit](https://os.mbed.com/platforms/ST-Discovery-F746NG/) +using [ARM Mbed](https://github.com/ARMmbed/mbed-cli). + +Before we begin, you'll need the following: + +- STM32F7 discovery kit board +- Mini-USB cable +- ARM Mbed CLI ([installation instructions](https://os.mbed.com/docs/mbed-os/v6.9/quick-start/build-with-mbed-cli.html). Check it out for MacOS Catalina - [mbed-cli is broken on MacOS Catalina #930](https://github.com/ARMmbed/mbed-cli/issues/930#issuecomment-660550734)) +- Python 3 and pip3 + +Since Mbed requires a special folder structure for projects, we'll first run a +command to generate a subfolder containing the required source files in this +structure: + +``` +make -f tensorflow/lite/micro/tools/make/Makefile TARGET=disco_f746ng OPTIMIZED_KERNEL_DIR=cmsis_nn generate_micro_speech_mbed_project +``` + +Running the make command will result in the creation of a new folder: + +``` +gen/disco_f746ng_cortex-m4_default/prj/micro_speech/mbed +``` + +This folder contains all of the example's dependencies structured in the correct +way for Mbed to be able to build it. + +Change into the directory and run the following commands. + +First, tell Mbed that the current directory is the root of an Mbed project: + +``` +mbed config root . +``` + +Next, tell Mbed to download the dependencies and prepare to build: + +``` +mbed deploy +``` + +Older versions of Mbed will build the project using C++98. However, TensorFlow Lite +requires C++11. If needed, run the following Python snippet to modify the Mbed +configuration files so that it uses C++11: + +``` +python -c 'import fileinput, glob; +for filename in glob.glob("mbed-os/tools/profiles/*.json"): + for line in fileinput.input(filename, inplace=True): + print(line.replace("\"-std=gnu++98\"","\"-std=c++11\", \"-fpermissive\""))' +``` + +Note: Mbed has a dependency to an old version of arm_math.h and cmsis_gcc.h (adapted from the general [CMSIS-NN MBED example](https://github.com/tensorflow/tflite-micro/blob/main/tensorflow/lite/micro/kernels/cmsis_nn#example-2---mbed)). Therefore you need to copy the newer version as follows: +```bash +cp tensorflow/lite/micro/tools/make/downloads/cmsis/CMSIS/DSP/Include/\ +arm_math.h mbed-os/cmsis/TARGET_CORTEX_M/arm_math.h +cp tensorflow/lite/micro/tools/make/downloads/cmsis/CMSIS/Core/Include/\ +cmsis_gcc.h mbed-os/cmsis/TARGET_CORTEX_M/cmsis_gcc.h +``` + +Finally, run the following command to compile: + +``` +mbed compile -m DISCO_F746NG -t GCC_ARM +``` + +This should result in a binary at the following path: + +``` +./BUILD/DISCO_F746NG/GCC_ARM/mbed.bin +``` + +To deploy, plug in your STM board and copy the file to it. On macOS, you can do +this with the following command: + +``` +cp ./BUILD/DISCO_F746NG/GCC_ARM/mbed.bin /Volumes/DIS_F746NG/ +``` + +Copying the file will initiate the flashing process. + +The inference results are logged by the board while the program is running. +To view it, establish a serial connection to the board +using a baud rate of `9600`. On OSX and Linux, the following command should +work, replacing `/dev/tty.devicename` with the name of your device as it appears +in `/dev`: + +``` +screen /dev/tty.devicename 9600 +``` + +You will see a line output for every word that is detected: + +``` +Heard yes (201) @4056ms +Heard no (205) @6448ms +Heard unknown (201) @13696ms +Heard yes (205) @15000ms +``` + +The number after each detected word is its score. By default, the program only +considers matches as valid if their score is over 200, so all of the scores you +see will be at least 200. + +To stop viewing the debug output with `screen`, hit `Ctrl+A`, immediately +followed by the `K` key, then hit the `Y` key. + +## Deploy to NXP FRDM K66F + +The following instructions will help you build and deploy the example to the +[NXP FRDM K66F](https://www.nxp.com/design/development-boards/freedom-development-boards/mcu-boards/freedom-development-platform-for-kinetis-k66-k65-and-k26-mcus:FRDM-K66F) +using [ARM Mbed](https://github.com/ARMmbed/mbed-cli). + +1. Download + [the TensorFlow source code](https://github.com/tensorflow/tensorflow). +2. Follow instructions from + [mbed website](https://os.mbed.com/docs/mbed-os/v5.13/tools/installation-and-setup.html) + to setup and install mbed CLI. +3. Compile TensorFlow with the following command to generate mbed project: + + ``` + make -f tensorflow/lite/micro/tools/make/Makefile TARGET=mbed TAGS="nxp_k66f" generate_micro_speech_mbed_project + ``` + +4. Change into the following directory that has been generated: + `gen/mbed_cortex-m4/prj/micro_speech/mbed` + +5. Create an Mbed project using the generated files, run ensuring your + environment is using Python 2.7: `mbed config root .` + +6. Next, tell Mbed to download the dependencies and prepare to build: `mbed + deploy` + +7. Finally, we can run the following command to compile the code: `mbed compile + -m K66F -t GCC_ARM` + +8. For some Mbed compilers (such as GCC), you may get compile error in + mbed_rtc_time.cpp. Go to `mbed-os/platform/mbed_rtc_time.h` and comment line + 32 and line 37: + + ``` + //#if !defined(__GNUC__) || defined(__CC_ARM) || defined(__clang__) + struct timeval { + time_t tv_sec; + int32_t tv_usec; + }; + //#endif + ``` + +9. If your system does not recognize the board with the `mbed detect` command. + Follow the instructions for setting up + [DAPLink](https://armmbed.github.io/DAPLink/?board=FRDM-K66F) for the + [K66F](https://os.mbed.com/platforms/FRDM-K66F/). + +10. Connect the USB cable to the micro USB port. When the Ethernet port is + facing towards you, the micro USB port is left of the Ethernet port. + +11. To compile and flash in a single step, add the `--flash` option: + + ``` + mbed compile -m K66F -t GCC_ARM --flash + ``` + +12. Disconnect USB cable from the device to power down the device and connect + back the power cable to start running the model. + +13. Connect to serial port with baud rate of 9600 and correct serial device to + view the output from the MCU. In linux, you can run the following screen + command if the serial device is `/dev/ttyACM0`: + + ``` + sudo screen /dev/ttyACM0 9600 + ``` + +14. Saying "Yes" will print "Yes" and "No" will print "No" on the serial port. + +15. A loopback path from microphone to headset jack is enabled. Headset jack is + in black color. If there is no output on the serial port, you can connect + headphone to headphone port to check if audio loopback path is working. + +## Deploy to CEVA-BX1 + +The following instructions will help you build and deploy the sample to the +[CEVA-BX1](https://www.ceva-dsp.com/product/ceva-bx1-sound/) or [CEVA-SP500](https://www.ceva-dsp.com/product/ceva-senspro/) + +1. Contact CEVA at [sales@ceva-dsp.com](mailto:sales@ceva-dsp.com) +2. For BX1: +2.1. Download and install CEVA-BX Toolbox v18.0.2 +2.2. Set the TARGET_TOOLCHAIN_ROOT variable in + /tensorflow/lite/micro/tools/make/templates/ceva_bx1/ceva_app_makefile.tpl + To your installation location. For example: TARGET_TOOLCHAIN_ROOT := + /home/myuser/work/CEVA-ToolBox/V18/BX +2.3. Generate the Makefile for the project: /tensorflow$ make -f + tensorflow/lite/micro/tools/make/Makefile TARGET=ceva TARGET_ARCH=CEVA_BX1 + generate_micro_speech_make_project +3. For SensPro (SP500): +3.1. Download and install CEVA-SP Toolbox v20 +3.2. Set the TARGET_TOOLCHAIN_ROOT variable in + /tensorflow/lite/micro/tools/make/templates/ceva_SP500/ceva_app_makefile.tpl + To your installation location. For example: TARGET_TOOLCHAIN_ROOT := + /home/myuser/work/CEVA-ToolBox/V20/SensPro +3.3. Generate the Makefile for the project: /tensorflow$ make -f + tensorflow/lite/micro/tools/make/Makefile TARGET=ceva TARGET_ARCH=CEVA_SP500 + generate_micro_speech_make_project +5. Build the project: + /gen/ceva_bx1/prj/micro_speech/make$ make +6. This should build the project and create a file called micro_speech.elf. +7. The supplied configuration reads input from a files and expects a file + called input.wav (easily changed in audio_provider.cc) to be placed in the + same directory of the .elf file +8. We used Google's speech command dataset: V0.0.2: + http://download.tensorflow.org/data/speech_commands_v0.02.tar.gz V0.0.1: + http://download.tensorflow.org/data/speech_commands_v0.01.tar.gz +9. Follow CEVA Toolbox instructions for creating a debug target and running the + project. +10. Output should look like: Heard silence (208) @352ms Heard no (201) @1696ms + Heard yes (203) @3904ms + +## Run on macOS + +The example contains an audio provider compatible with macOS. If you have access +to a Mac, you can run the example on your development machine. + +First, use the following command to build it: + +``` +make -f tensorflow/lite/micro/tools/make/Makefile micro_speech +``` + +Once the build completes, you can run the example with the following command: + +``` +gen/osx_x86_64/bin/micro_speech +``` + +You might see a pop-up asking for microphone access. If so, grant it, and the +program will start. + +Try saying "yes" and "no". You should see output that looks like the following: + +``` +Heard yes (201) @4056ms +Heard no (205) @6448ms +Heard unknown (201) @13696ms +Heard yes (205) @15000ms +Heard yes (205) @16856ms +Heard unknown (204) @18704ms +Heard no (206) @21000ms +``` + +The number after each detected word is its score. By default, the recognize +commands component only considers matches as valid if their score is over 200, +so all of the scores you see will be at least 200. + +The number after the score is the number of milliseconds since the program was +started. + +If you don't see any output, make sure your Mac's internal microphone is +selected in the Mac's *Sound* menu, and that its input volume is turned up high +enough. + +## Run the tests on a development machine + +To compile and test this example on a desktop Linux or macOS machine, download +[the TensorFlow source code](https://github.com/tensorflow/tensorflow), `cd` +into the source directory from a terminal, and then run the following command: + +``` +make -f tensorflow/lite/micro/tools/make/Makefile test_micro_speech_test +``` + +This will take a few minutes, and downloads frameworks the code uses like +[CMSIS](https://developer.arm.com/embedded/cmsis) and +[flatbuffers](https://google.github.io/flatbuffers/). Once that process has +finished, you should see a series of files get compiled, followed by some +logging output from a test, which should conclude with `~~~ALL TESTS PASSED~~~`. + +If you see this, it means that a small program has been built and run that loads +the trained TensorFlow model, runs some example inputs through it, and got the +expected outputs. + +To understand how TensorFlow Lite does this, you can look at the source in +[micro_speech_test.cc](micro_speech_test.cc). +It's a fairly small amount of code that creates an interpreter, gets a handle to +a model that's been compiled into the program, and then invokes the interpreter +with the model and sample inputs. + +## Train your own model + +So far you have used an existing trained model to run inference on +microcontrollers. If you wish to train your own model, follow the instructions +given in the [train/](train/) directory. diff --git a/tensorflow/lite/micro/examples/micro_speech/audio_provider.cc b/tensorflow/lite/micro/examples/micro_speech/audio_provider.cc new file mode 100644 index 0000000..5ca425d --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/audio_provider.cc @@ -0,0 +1,38 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/examples/micro_speech/audio_provider.h" + +#include "tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.h" + +namespace { +int16_t g_dummy_audio_data[kMaxAudioSampleSize]; +int32_t g_latest_audio_timestamp = 0; +} // namespace + +TfLiteStatus GetAudioSamples(int start_ms, int duration_ms, + int* audio_samples_size, int16_t** audio_samples) { + for (int i = 0; i < kMaxAudioSampleSize; ++i) { + g_dummy_audio_data[i] = 0; + } + *audio_samples_size = kMaxAudioSampleSize; + *audio_samples = g_dummy_audio_data; + return kTfLiteOk; +} + +int32_t LatestAudioTimestamp() { + g_latest_audio_timestamp += 100; + return g_latest_audio_timestamp; +} diff --git a/tensorflow/lite/micro/examples/micro_speech/audio_provider.h b/tensorflow/lite/micro/examples/micro_speech/audio_provider.h new file mode 100644 index 0000000..d3aab2c --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/audio_provider.h @@ -0,0 +1,44 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_AUDIO_PROVIDER_H_ +#define TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_AUDIO_PROVIDER_H_ + +#include "tensorflow/lite/c/common.h" + +// This is an abstraction around an audio source like a microphone, and is +// expected to return 16-bit PCM sample data for a given point in time. The +// sample data itself should be used as quickly as possible by the caller, since +// to allow memory optimizations there are no guarantees that the samples won't +// be overwritten by new data in the future. In practice, implementations should +// ensure that there's a reasonable time allowed for clients to access the data +// before any reuse. +// The reference implementation can have no platform-specific dependencies, so +// it just returns an array filled with zeros. For real applications, you should +// ensure there's a specialized implementation that accesses hardware APIs. +TfLiteStatus GetAudioSamples(int start_ms, int duration_ms, + int* audio_samples_size, int16_t** audio_samples); + +// Returns the time that audio data was last captured in milliseconds. There's +// no contract about what time zero represents, the accuracy, or the granularity +// of the result. Subsequent calls will generally not return a lower value, but +// even that's not guaranteed if there's an overflow wraparound. +// The reference implementation of this function just returns a constantly +// incrementing value for each call, since it would need a non-portable platform +// call to access time information. For real applications, you'll need to write +// your own platform-specific implementation. +int32_t LatestAudioTimestamp(); + +#endif // TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_AUDIO_PROVIDER_H_ diff --git a/tensorflow/lite/micro/examples/micro_speech/audio_provider_mock.cc b/tensorflow/lite/micro/examples/micro_speech/audio_provider_mock.cc new file mode 100644 index 0000000..fe3ad16 --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/audio_provider_mock.cc @@ -0,0 +1,54 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/examples/micro_speech/audio_provider.h" +#include "tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.h" +#include "tensorflow/lite/micro/examples/micro_speech/testdata/no_1000ms_audio_data.h" +#include "tensorflow/lite/micro/examples/micro_speech/testdata/yes_1000ms_audio_data.h" + +namespace { +int16_t g_dummy_audio_data[kMaxAudioSampleSize]; +int32_t g_latest_audio_timestamp = 0; +} // namespace + +TfLiteStatus GetAudioSamples(int start_ms, int duration_ms, + int* audio_samples_size, int16_t** audio_samples) { + const int yes_start = (0 * kAudioSampleFrequency) / 1000; + const int yes_end = (1000 * kAudioSampleFrequency) / 1000; + const int no_start = (4000 * kAudioSampleFrequency) / 1000; + const int no_end = (5000 * kAudioSampleFrequency) / 1000; + const int wraparound = (8000 * kAudioSampleFrequency) / 1000; + const int start_sample = (start_ms * kAudioSampleFrequency) / 1000; + for (int i = 0; i < kMaxAudioSampleSize; ++i) { + const int sample_index = (start_sample + i) % wraparound; + int16_t sample; + if ((sample_index >= yes_start) && (sample_index < yes_end)) { + sample = g_yes_1000ms_audio_data[sample_index - yes_start]; + } else if ((sample_index >= no_start) && (sample_index < no_end)) { + sample = g_no_1000ms_audio_data[sample_index - no_start]; + } else { + sample = 0; + } + g_dummy_audio_data[i] = sample; + } + *audio_samples_size = kMaxAudioSampleSize; + *audio_samples = g_dummy_audio_data; + return kTfLiteOk; +} + +int32_t LatestAudioTimestamp() { + g_latest_audio_timestamp += 100; + return g_latest_audio_timestamp; +} diff --git a/tensorflow/lite/micro/examples/micro_speech/audio_provider_mock_test.cc b/tensorflow/lite/micro/examples/micro_speech/audio_provider_mock_test.cc new file mode 100644 index 0000000..b15749e --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/audio_provider_mock_test.cc @@ -0,0 +1,68 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/examples/micro_speech/audio_provider.h" +#include "tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.h" +#include "tensorflow/lite/micro/examples/micro_speech/testdata/no_1000ms_audio_data.h" +#include "tensorflow/lite/micro/examples/micro_speech/testdata/yes_1000ms_audio_data.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(TestAudioProviderMock) { + int audio_samples_size = 0; + int16_t* audio_samples = nullptr; + TfLiteStatus get_status = GetAudioSamples( + 0, kFeatureSliceDurationMs, &audio_samples_size, &audio_samples); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, get_status); + TF_LITE_MICRO_EXPECT_LE(audio_samples_size, kMaxAudioSampleSize); + TF_LITE_MICRO_EXPECT(audio_samples != nullptr); + for (int i = 0; i < audio_samples_size; ++i) { + TF_LITE_MICRO_EXPECT_EQ(g_yes_1000ms_audio_data[i], audio_samples[i]); + } + + get_status = GetAudioSamples(500, kFeatureSliceDurationMs, + &audio_samples_size, &audio_samples); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, get_status); + TF_LITE_MICRO_EXPECT_LE(audio_samples_size, kMaxAudioSampleSize); + TF_LITE_MICRO_EXPECT(audio_samples != nullptr); + for (int i = 0; i < audio_samples_size; ++i) { + TF_LITE_MICRO_EXPECT_EQ(g_yes_1000ms_audio_data[i + 8000], + audio_samples[i]); + } + + get_status = GetAudioSamples(1500, kFeatureSliceDurationMs, + &audio_samples_size, &audio_samples); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, get_status); + TF_LITE_MICRO_EXPECT_LE(audio_samples_size, kMaxAudioSampleSize); + TF_LITE_MICRO_EXPECT(audio_samples != nullptr); + for (int i = 0; i < audio_samples_size; ++i) { + TF_LITE_MICRO_EXPECT_EQ(0, audio_samples[i]); + } + + get_status = GetAudioSamples(12250, kFeatureSliceDurationMs, + &audio_samples_size, &audio_samples); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, get_status); + TF_LITE_MICRO_EXPECT_LE(audio_samples_size, kMaxAudioSampleSize); + TF_LITE_MICRO_EXPECT(audio_samples != nullptr); + for (int i = 0; i < audio_samples_size; ++i) { + TF_LITE_MICRO_EXPECT_EQ(g_no_1000ms_audio_data[i + 4000], audio_samples[i]); + } +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/examples/micro_speech/audio_provider_test.cc b/tensorflow/lite/micro/examples/micro_speech/audio_provider_test.cc new file mode 100644 index 0000000..fb403c0 --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/audio_provider_test.cc @@ -0,0 +1,66 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/examples/micro_speech/audio_provider.h" + +#include + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(TestAudioProvider) { + int audio_samples_size = 0; + int16_t* audio_samples = nullptr; + TfLiteStatus get_status = GetAudioSamples( + 0, kFeatureSliceDurationMs, &audio_samples_size, &audio_samples); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, get_status); + TF_LITE_MICRO_EXPECT_LE(audio_samples_size, kMaxAudioSampleSize); + TF_LITE_MICRO_EXPECT(audio_samples != nullptr); + + // Make sure we can read all of the returned memory locations. + int total = 0; + for (int i = 0; i < audio_samples_size; ++i) { + total += audio_samples[i]; + } + (void)total; +} + +TF_LITE_MICRO_TEST(TestTimer) { + // Make sure that the technically-undefined overflow behavior we rely on below + // works on this platform. It's still not guaranteed, but at least this is a + // smoke check. Turn off when running with ASan, as it will complain about + // the following undefined behavior. +#ifndef ADDRESS_SANITIZER + int32_t overflow_value = std::numeric_limits::max(); + overflow_value += 1; + TF_LITE_MICRO_EXPECT_EQ(std::numeric_limits::min(), overflow_value); +#endif + + const int32_t first_time = LatestAudioTimestamp(); + const int32_t second_time = LatestAudioTimestamp(); + + // It's possible that the timer may have wrapped around from +BIG_NUM to + // -BIG_NUM between the first and second calls, since we're storing + // milliseconds in a 32-bit integer. It's not reasonable that the call itself + // would have taken more than 2^31 milliseconds though, so look at the + // difference and rely on integer overflow to ensure it's accurate. + const int32_t time_delta = (second_time - first_time); + TF_LITE_MICRO_EXPECT_LE(0, time_delta); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/examples/micro_speech/command_responder.cc b/tensorflow/lite/micro/examples/micro_speech/command_responder.cc new file mode 100644 index 0000000..2184478 --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/command_responder.cc @@ -0,0 +1,28 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/examples/micro_speech/command_responder.h" + +#include "tensorflow/lite/micro/micro_log.h" + +// The default implementation writes out the name of the recognized command +// to the error console. Real applications will want to take some custom +// action instead, and should implement their own versions of this function. +void RespondToCommand(int32_t current_time, const char* found_command, + uint8_t score, bool is_new_command) { + if (is_new_command) { + MicroPrintf("Heard %s (%d) @%dms", found_command, score, current_time); + } +} diff --git a/tensorflow/lite/micro/examples/micro_speech/command_responder.h b/tensorflow/lite/micro/examples/micro_speech/command_responder.h new file mode 100644 index 0000000..a1acb99 --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/command_responder.h @@ -0,0 +1,30 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +// Provides an interface to take an action based on an audio command. + +#ifndef TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_COMMAND_RESPONDER_H_ +#define TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_COMMAND_RESPONDER_H_ + +#include "tensorflow/lite/c/common.h" + +// Called every time the results of an audio recognition run are available. The +// human-readable name of any recognized command is in the `found_command` +// argument, `score` has the numerical confidence, and `is_new_command` is set +// if the previous command was different to this one. +void RespondToCommand(int32_t current_time, const char* found_command, + uint8_t score, bool is_new_command); + +#endif // TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_COMMAND_RESPONDER_H_ diff --git a/tensorflow/lite/micro/examples/micro_speech/command_responder_test.cc b/tensorflow/lite/micro/examples/micro_speech/command_responder_test.cc new file mode 100644 index 0000000..e02f7ae --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/command_responder_test.cc @@ -0,0 +1,29 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/examples/micro_speech/command_responder.h" + +#include "tensorflow/lite/micro/testing/micro_test.h" + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(TestCallability) { + // This will have external side-effects (like printing to the debug console + // or lighting an LED) that are hard to observe, so the most we can do is + // make sure the call doesn't crash. + RespondToCommand(0, "foo", 0, true); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/examples/micro_speech/feature_provider.cc b/tensorflow/lite/micro/examples/micro_speech/feature_provider.cc new file mode 100644 index 0000000..a4a6635 --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/feature_provider.cc @@ -0,0 +1,119 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/examples/micro_speech/feature_provider.h" + +#include "tensorflow/lite/micro/examples/micro_speech/audio_provider.h" +#include "tensorflow/lite/micro/examples/micro_speech/micro_features/micro_features_generator.h" +#include "tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.h" +#include "tensorflow/lite/micro/micro_log.h" + +FeatureProvider::FeatureProvider(int feature_size, int8_t* feature_data) + : feature_size_(feature_size), + feature_data_(feature_data), + is_first_run_(true) { + // Initialize the feature data to default values. + for (int n = 0; n < feature_size_; ++n) { + feature_data_[n] = 0; + } +} + +FeatureProvider::~FeatureProvider() {} + +TfLiteStatus FeatureProvider::PopulateFeatureData(int32_t last_time_in_ms, + int32_t time_in_ms, + int* how_many_new_slices) { + if (feature_size_ != kFeatureElementCount) { + MicroPrintf("Requested feature_data_ size %d doesn't match %d", + feature_size_, kFeatureElementCount); + return kTfLiteError; + } + + // Quantize the time into steps as long as each window stride, so we can + // figure out which audio data we need to fetch. + const int last_step = (last_time_in_ms / kFeatureSliceStrideMs); + const int current_step = (time_in_ms / kFeatureSliceStrideMs); + + int slices_needed = current_step - last_step; + // If this is the first call, make sure we don't use any cached information. + if (is_first_run_) { + TfLiteStatus init_status = InitializeMicroFeatures(); + if (init_status != kTfLiteOk) { + return init_status; + } + is_first_run_ = false; + slices_needed = kFeatureSliceCount; + } + if (slices_needed > kFeatureSliceCount) { + slices_needed = kFeatureSliceCount; + } + *how_many_new_slices = slices_needed; + + const int slices_to_keep = kFeatureSliceCount - slices_needed; + const int slices_to_drop = kFeatureSliceCount - slices_to_keep; + // If we can avoid recalculating some slices, just move the existing data + // up in the spectrogram, to perform something like this: + // last time = 80ms current time = 120ms + // +-----------+ +-----------+ + // | data@20ms | --> | data@60ms | + // +-----------+ -- +-----------+ + // | data@40ms | -- --> | data@80ms | + // +-----------+ -- -- +-----------+ + // | data@60ms | -- -- | | + // +-----------+ -- +-----------+ + // | data@80ms | -- | | + // +-----------+ +-----------+ + if (slices_to_keep > 0) { + for (int dest_slice = 0; dest_slice < slices_to_keep; ++dest_slice) { + int8_t* dest_slice_data = + feature_data_ + (dest_slice * kFeatureSliceSize); + const int src_slice = dest_slice + slices_to_drop; + const int8_t* src_slice_data = + feature_data_ + (src_slice * kFeatureSliceSize); + for (int i = 0; i < kFeatureSliceSize; ++i) { + dest_slice_data[i] = src_slice_data[i]; + } + } + } + // Any slices that need to be filled in with feature data have their + // appropriate audio data pulled, and features calculated for that slice. + if (slices_needed > 0) { + for (int new_slice = slices_to_keep; new_slice < kFeatureSliceCount; + ++new_slice) { + const int new_step = (current_step - kFeatureSliceCount + 1) + new_slice; + const int32_t slice_start_ms = (new_step * kFeatureSliceStrideMs); + int16_t* audio_samples = nullptr; + int audio_samples_size = 0; + // TODO(petewarden): Fix bug that leads to non-zero slice_start_ms + GetAudioSamples((slice_start_ms > 0 ? slice_start_ms : 0), + kFeatureSliceDurationMs, &audio_samples_size, + &audio_samples); + if (audio_samples_size < kMaxAudioSampleSize) { + MicroPrintf("Audio data size %d too small, want %d", audio_samples_size, + kMaxAudioSampleSize); + return kTfLiteError; + } + int8_t* new_slice_data = feature_data_ + (new_slice * kFeatureSliceSize); + size_t num_samples_read; + TfLiteStatus generate_status = GenerateMicroFeatures( + audio_samples, audio_samples_size, kFeatureSliceSize, new_slice_data, + &num_samples_read); + if (generate_status != kTfLiteOk) { + return generate_status; + } + } + } + return kTfLiteOk; +} diff --git a/tensorflow/lite/micro/examples/micro_speech/feature_provider.h b/tensorflow/lite/micro/examples/micro_speech/feature_provider.h new file mode 100644 index 0000000..2a2ef8f --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/feature_provider.h @@ -0,0 +1,50 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_FEATURE_PROVIDER_H_ +#define TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_FEATURE_PROVIDER_H_ + +#include "tensorflow/lite/c/common.h" + +// Binds itself to an area of memory intended to hold the input features for an +// audio-recognition neural network model, and fills that data area with the +// features representing the current audio input, for example from a microphone. +// The audio features themselves are a two-dimensional array, made up of +// horizontal slices representing the frequencies at one point in time, stacked +// on top of each other to form a spectrogram showing how those frequencies +// changed over time. +class FeatureProvider { + public: + // Create the provider, and bind it to an area of memory. This memory should + // remain accessible for the lifetime of the provider object, since subsequent + // calls will fill it with feature data. The provider does no memory + // management of this data. + FeatureProvider(int feature_size, int8_t* feature_data); + ~FeatureProvider(); + + // Fills the feature data with information from audio inputs, and returns how + // many feature slices were updated. + TfLiteStatus PopulateFeatureData(int32_t last_time_in_ms, int32_t time_in_ms, + int* how_many_new_slices); + + private: + int feature_size_; + int8_t* feature_data_; + // Make sure we don't try to use cached information if this is the first call + // into the provider. + bool is_first_run_; +}; + +#endif // TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_FEATURE_PROVIDER_H_ diff --git a/tensorflow/lite/micro/examples/micro_speech/feature_provider_mock_test.cc b/tensorflow/lite/micro/examples/micro_speech/feature_provider_mock_test.cc new file mode 100644 index 0000000..6fe5e43 --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/feature_provider_mock_test.cc @@ -0,0 +1,58 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/examples/micro_speech/feature_provider.h" +#include "tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.h" +#include "tensorflow/lite/micro/examples/micro_speech/micro_features/no_micro_features_data.h" +#include "tensorflow/lite/micro/examples/micro_speech/micro_features/yes_micro_features_data.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(TestFeatureProviderMockYes) { + int8_t feature_data[kFeatureElementCount]; + FeatureProvider feature_provider(kFeatureElementCount, feature_data); + + int how_many_new_slices = 0; + TfLiteStatus populate_status = feature_provider.PopulateFeatureData( + /* last_time_in_ms= */ 0, /* time_in_ms= */ 970, &how_many_new_slices); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, populate_status); + TF_LITE_MICRO_EXPECT_EQ(kFeatureSliceCount, how_many_new_slices); + + for (int i = 0; i < kFeatureElementCount; ++i) { + TF_LITE_MICRO_EXPECT_EQ(g_yes_micro_f2e59fea_nohash_1_data[i], + feature_data[i]); + } +} + +TF_LITE_MICRO_TEST(TestFeatureProviderMockNo) { + int8_t feature_data[kFeatureElementCount]; + FeatureProvider feature_provider(kFeatureElementCount, feature_data); + + int how_many_new_slices = 0; + TfLiteStatus populate_status = feature_provider.PopulateFeatureData( + /* last_time_in_ms= */ 4000, + /* time_in_ms= */ 4970, &how_many_new_slices); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, populate_status); + TF_LITE_MICRO_EXPECT_EQ(kFeatureSliceCount, how_many_new_slices); + + for (int i = 0; i < kFeatureElementCount; ++i) { + TF_LITE_MICRO_EXPECT_EQ(g_no_micro_f9643d42_nohash_4_data[i], + feature_data[i]); + } +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/examples/micro_speech/feature_provider_test.cc b/tensorflow/lite/micro/examples/micro_speech/feature_provider_test.cc new file mode 100644 index 0000000..2582e8c --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/feature_provider_test.cc @@ -0,0 +1,35 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/examples/micro_speech/feature_provider.h" + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(TestFeatureProvider) { + int8_t feature_data[kFeatureElementCount]; + FeatureProvider feature_provider(kFeatureElementCount, feature_data); + + int how_many_new_slices = 0; + TfLiteStatus populate_status = feature_provider.PopulateFeatureData( + /* last_time_in_ms= */ 0, /* time_in_ms= */ 10000, &how_many_new_slices); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, populate_status); + TF_LITE_MICRO_EXPECT_EQ(kFeatureSliceCount, how_many_new_slices); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/examples/micro_speech/images/animation_on_arduino.gif b/tensorflow/lite/micro/examples/micro_speech/images/animation_on_arduino.gif new file mode 100644 index 0000000..66ab9c1 Binary files /dev/null and b/tensorflow/lite/micro/examples/micro_speech/images/animation_on_arduino.gif differ diff --git a/tensorflow/lite/micro/examples/micro_speech/images/model_architecture.png b/tensorflow/lite/micro/examples/micro_speech/images/model_architecture.png new file mode 100644 index 0000000..ce91fad Binary files /dev/null and b/tensorflow/lite/micro/examples/micro_speech/images/model_architecture.png differ diff --git a/tensorflow/lite/micro/examples/micro_speech/main.cc b/tensorflow/lite/micro/examples/micro_speech/main.cc new file mode 100644 index 0000000..f35c472 --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/main.cc @@ -0,0 +1,27 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/examples/micro_speech/main_functions.h" + +// This is the default main used on systems that have the standard C entry +// point. Other devices (for example FreeRTOS or ESP32) that have different +// requirements for entry code (like an app_main function) should specialize +// this main.cc file in a target-specific subfolder. +int main(int argc, char* argv[]) { + setup(); + while (true) { + loop(); + } +} diff --git a/tensorflow/lite/micro/examples/micro_speech/main_functions.cc b/tensorflow/lite/micro/examples/micro_speech/main_functions.cc new file mode 100644 index 0000000..c92636a --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/main_functions.cc @@ -0,0 +1,163 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/examples/micro_speech/main_functions.h" + +#include "tensorflow/lite/micro/examples/micro_speech/audio_provider.h" +#include "tensorflow/lite/micro/examples/micro_speech/command_responder.h" +#include "tensorflow/lite/micro/examples/micro_speech/feature_provider.h" +#include "tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.h" +#include "tensorflow/lite/micro/examples/micro_speech/micro_speech_model_data.h" +#include "tensorflow/lite/micro/examples/micro_speech/recognize_commands.h" +#include "tensorflow/lite/micro/micro_interpreter.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_mutable_op_resolver.h" +#include "tensorflow/lite/micro/system_setup.h" +#include "tensorflow/lite/schema/schema_generated.h" + +// Globals, used for compatibility with Arduino-style sketches. +namespace { +const tflite::Model* model = nullptr; +tflite::MicroInterpreter* interpreter = nullptr; +TfLiteTensor* model_input = nullptr; +FeatureProvider* feature_provider = nullptr; +RecognizeCommands* recognizer = nullptr; +int32_t previous_time = 0; + +// Create an area of memory to use for input, output, and intermediate arrays. +// The size of this will depend on the model you're using, and may need to be +// determined by experimentation. +constexpr int kTensorArenaSize = 10 * 1024; +uint8_t tensor_arena[kTensorArenaSize]; +int8_t feature_buffer[kFeatureElementCount]; +int8_t* model_input_buffer = nullptr; +} // namespace + +// The name of this function is important for Arduino compatibility. +void setup() { + tflite::InitializeTarget(); + + // Map the model into a usable data structure. This doesn't involve any + // copying or parsing, it's a very lightweight operation. + model = tflite::GetModel(g_micro_speech_model_data); + if (model->version() != TFLITE_SCHEMA_VERSION) { + MicroPrintf( + "Model provided is schema version %d not equal " + "to supported version %d.", + model->version(), TFLITE_SCHEMA_VERSION); + return; + } + + // Pull in only the operation implementations we need. + // This relies on a complete list of all the ops needed by this graph. + + // NOLINTNEXTLINE(runtime-global-variables) + static tflite::MicroMutableOpResolver<4> micro_op_resolver; + if (micro_op_resolver.AddDepthwiseConv2D() != kTfLiteOk) { + return; + } + if (micro_op_resolver.AddFullyConnected() != kTfLiteOk) { + return; + } + if (micro_op_resolver.AddSoftmax() != kTfLiteOk) { + return; + } + if (micro_op_resolver.AddReshape() != kTfLiteOk) { + return; + } + + // Build an interpreter to run the model with. + static tflite::MicroInterpreter static_interpreter( + model, micro_op_resolver, tensor_arena, kTensorArenaSize); + interpreter = &static_interpreter; + + // Allocate memory from the tensor_arena for the model's tensors. + TfLiteStatus allocate_status = interpreter->AllocateTensors(); + if (allocate_status != kTfLiteOk) { + MicroPrintf("AllocateTensors() failed"); + return; + } + + // Get information about the memory area to use for the model's input. + model_input = interpreter->input(0); + if ((model_input->dims->size != 2) || (model_input->dims->data[0] != 1) || + (model_input->dims->data[1] != + (kFeatureSliceCount * kFeatureSliceSize)) || + (model_input->type != kTfLiteInt8)) { + MicroPrintf("Bad input tensor parameters in model"); + return; + } + model_input_buffer = model_input->data.int8; + + // Prepare to access the audio spectrograms from a microphone or other source + // that will provide the inputs to the neural network. + // NOLINTNEXTLINE(runtime-global-variables) + static FeatureProvider static_feature_provider(kFeatureElementCount, + feature_buffer); + feature_provider = &static_feature_provider; + + static RecognizeCommands static_recognizer; + recognizer = &static_recognizer; + + previous_time = 0; +} + +// The name of this function is important for Arduino compatibility. +void loop() { + // Fetch the spectrogram for the current time. + const int32_t current_time = LatestAudioTimestamp(); + int how_many_new_slices = 0; + TfLiteStatus feature_status = feature_provider->PopulateFeatureData( + previous_time, current_time, &how_many_new_slices); + if (feature_status != kTfLiteOk) { + MicroPrintf("Feature generation failed"); + return; + } + previous_time = current_time; + // If no new audio samples have been received since last time, don't bother + // running the network model. + if (how_many_new_slices == 0) { + return; + } + + // Copy feature buffer to input tensor + for (int i = 0; i < kFeatureElementCount; i++) { + model_input_buffer[i] = feature_buffer[i]; + } + + // Run the model on the spectrogram input and make sure it succeeds. + TfLiteStatus invoke_status = interpreter->Invoke(); + if (invoke_status != kTfLiteOk) { + MicroPrintf("Invoke failed"); + return; + } + + // Obtain a pointer to the output tensor + TfLiteTensor* output = interpreter->output(0); + // Determine whether a command was recognized based on the output of inference + const char* found_command = nullptr; + uint8_t score = 0; + bool is_new_command = false; + TfLiteStatus process_status = recognizer->ProcessLatestResults( + output, current_time, &found_command, &score, &is_new_command); + if (process_status != kTfLiteOk) { + MicroPrintf("RecognizeCommands::ProcessLatestResults() failed"); + return; + } + // Do something based on the recognized command. The default implementation + // just prints to the error console, but you should replace this with your + // own function for a real application. + RespondToCommand(current_time, found_command, score, is_new_command); +} diff --git a/tensorflow/lite/micro/examples/micro_speech/main_functions.h b/tensorflow/lite/micro/examples/micro_speech/main_functions.h new file mode 100644 index 0000000..0ac0677 --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/main_functions.h @@ -0,0 +1,37 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MAIN_FUNCTIONS_H_ +#define TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MAIN_FUNCTIONS_H_ + +// Expose a C friendly interface for main functions. +#ifdef __cplusplus +extern "C" { +#endif + +// Initializes all data needed for the example. The name is important, and needs +// to be setup() for Arduino compatibility. +void setup(); + +// Runs one iteration of data gathering and inference. This should be called +// repeatedly from the application code. The name needs to be loop() for Arduino +// compatibility. +void loop(); + +#ifdef __cplusplus +} +#endif + +#endif // TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MAIN_FUNCTIONS_H_ diff --git a/tensorflow/lite/micro/examples/micro_speech/micro_features/BUILD b/tensorflow/lite/micro/examples/micro_speech/micro_features/BUILD new file mode 100644 index 0000000..1077435 --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/micro_features/BUILD @@ -0,0 +1,78 @@ +# Library for generating feature vectors from audio data +package( + default_visibility = ["//visibility:public"], + # Disabling layering_check because of http://b/177257332 + features = ["-layering_check"], + licenses = ["notice"], +) + +cc_library( + name = "micro_model_settings", + srcs = [ + "micro_model_settings.cc", + ], + hdrs = [ + "micro_model_settings.h", + ], +) + +cc_library( + name = "micro_features_test_data", + srcs = [ + "no_micro_features_data.cc", + "yes_micro_features_data.cc", + ], + hdrs = [ + "no_micro_features_data.h", + "yes_micro_features_data.h", + ], +) + +cc_library( + name = "micro_features_generator", + srcs = [ + "micro_features_generator.cc", + ], + hdrs = [ + "micro_features_generator.h", + ], + deps = [ + ":micro_model_settings", + "//tensorflow/lite/c:common", + "//tensorflow/lite/experimental/microfrontend/lib:frontend", + "//tensorflow/lite/micro:micro_log", + ], +) + +cc_library( + name = "micro_features_generator_test_data", + srcs = [ + "no_feature_data_slice.cc", + "yes_feature_data_slice.cc", + ], + hdrs = [ + "no_feature_data_slice.h", + "yes_feature_data_slice.h", + ], +) + +cc_test( + name = "micro_features_generator_test", + size = "small", + srcs = [ + "micro_features_generator_test.cc", + ], + tags = [ + "noasan", # TODO(b/179930607): Fix with asan. + ], + deps = [ + ":micro_features_generator", + ":micro_features_generator_test_data", + ":micro_model_settings", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:micro_framework", + "//tensorflow/lite/micro:micro_log", + "//tensorflow/lite/micro/examples/micro_speech:audio_sample_test_data", + "//tensorflow/lite/micro/testing:micro_test", + ], +) diff --git a/tensorflow/lite/micro/examples/micro_speech/micro_features/micro_features_generator.cc b/tensorflow/lite/micro/examples/micro_speech/micro_features/micro_features_generator.cc new file mode 100644 index 0000000..3dbb5d3 --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/micro_features/micro_features_generator.cc @@ -0,0 +1,113 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/examples/micro_speech/micro_features/micro_features_generator.h" + +#include +#include + +#include "tensorflow/lite/experimental/microfrontend/lib/frontend.h" +#include "tensorflow/lite/experimental/microfrontend/lib/frontend_util.h" +#include "tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace { + +FrontendState g_micro_features_state; +bool g_is_first_time = true; + +} // namespace + +TfLiteStatus InitializeMicroFeatures() { + FrontendConfig config; + config.window.size_ms = kFeatureSliceDurationMs; + config.window.step_size_ms = kFeatureSliceStrideMs; + config.noise_reduction.smoothing_bits = 10; + config.filterbank.num_channels = kFeatureSliceSize; + config.filterbank.lower_band_limit = 125.0; + config.filterbank.upper_band_limit = 7500.0; + config.noise_reduction.smoothing_bits = 10; + config.noise_reduction.even_smoothing = 0.025; + config.noise_reduction.odd_smoothing = 0.06; + config.noise_reduction.min_signal_remaining = 0.05; + config.pcan_gain_control.enable_pcan = 1; + config.pcan_gain_control.strength = 0.95; + config.pcan_gain_control.offset = 80.0; + config.pcan_gain_control.gain_bits = 21; + config.log_scale.enable_log = 1; + config.log_scale.scale_shift = 6; + if (!FrontendPopulateState(&config, &g_micro_features_state, + kAudioSampleFrequency)) { + MicroPrintf("FrontendPopulateState() failed"); + return kTfLiteError; + } + g_is_first_time = true; + return kTfLiteOk; +} + +// This is not exposed in any header, and is only used for testing, to ensure +// that the state is correctly set up before generating results. +void SetMicroFeaturesNoiseEstimates(const uint32_t* estimate_presets) { + for (int i = 0; i < g_micro_features_state.filterbank.num_channels; ++i) { + g_micro_features_state.noise_reduction.estimate[i] = estimate_presets[i]; + } +} + +TfLiteStatus GenerateMicroFeatures(const int16_t* input, int input_size, + int output_size, int8_t* output, + size_t* num_samples_read) { + const int16_t* frontend_input; + if (g_is_first_time) { + frontend_input = input; + g_is_first_time = false; + } else { + frontend_input = input + 160; + } + FrontendOutput frontend_output = FrontendProcessSamples( + &g_micro_features_state, frontend_input, input_size, num_samples_read); + + for (size_t i = 0; i < frontend_output.size; ++i) { + // These scaling values are derived from those used in input_data.py in the + // training pipeline. + // The feature pipeline outputs 16-bit signed integers in roughly a 0 to 670 + // range. In training, these are then arbitrarily divided by 25.6 to get + // float values in the rough range of 0.0 to 26.0. This scaling is performed + // for historical reasons, to match up with the output of other feature + // generators. + // The process is then further complicated when we quantize the model. This + // means we have to scale the 0.0 to 26.0 real values to the -128 to 127 + // signed integer numbers. + // All this means that to get matching values from our integer feature + // output into the tensor input, we have to perform: + // input = (((feature / 25.6) / 26.0) * 256) - 128 + // To simplify this and perform it in 32-bit integer math, we rearrange to: + // input = (feature * 256) / (25.6 * 26.0) - 128 + constexpr int32_t value_scale = 256; + constexpr int32_t value_div = static_cast((25.6f * 26.0f) + 0.5f); + int32_t value = + ((frontend_output.values[i] * value_scale) + (value_div / 2)) / + value_div; + value -= 128; + if (value < -128) { + value = -128; + } + if (value > 127) { + value = 127; + } + output[i] = value; + } + + return kTfLiteOk; +} diff --git a/tensorflow/lite/micro/examples/micro_speech/micro_features/micro_features_generator.h b/tensorflow/lite/micro/examples/micro_speech/micro_features/micro_features_generator.h new file mode 100644 index 0000000..7ee0d2b --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/micro_features/micro_features_generator.h @@ -0,0 +1,30 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MICRO_FEATURES_MICRO_FEATURES_GENERATOR_H_ +#define TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MICRO_FEATURES_MICRO_FEATURES_GENERATOR_H_ + +#include "tensorflow/lite/c/common.h" + +// Sets up any resources needed for the feature generation pipeline. +TfLiteStatus InitializeMicroFeatures(); + +// Converts audio sample data into a more compact form that's appropriate for +// feeding into a neural network. +TfLiteStatus GenerateMicroFeatures(const int16_t* input, int input_size, + int output_size, int8_t* output, + size_t* num_samples_read); + +#endif // TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MICRO_FEATURES_MICRO_FEATURES_GENERATOR_H_ diff --git a/tensorflow/lite/micro/examples/micro_speech/micro_features/micro_features_generator_test.cc b/tensorflow/lite/micro/examples/micro_speech/micro_features/micro_features_generator_test.cc new file mode 100644 index 0000000..53ab443 --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/micro_features/micro_features_generator_test.cc @@ -0,0 +1,95 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/examples/micro_speech/micro_features/micro_features_generator.h" + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/examples/micro_speech/micro_features/no_feature_data_slice.h" +#include "tensorflow/lite/micro/examples/micro_speech/micro_features/yes_feature_data_slice.h" +#include "tensorflow/lite/micro/examples/micro_speech/testdata/no_30ms_audio_data.h" +#include "tensorflow/lite/micro/examples/micro_speech/testdata/yes_30ms_audio_data.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +// This is a test-only API, not exposed in any public headers, so declare it. +void SetMicroFeaturesNoiseEstimates(const uint32_t* estimate_presets); + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(TestMicroFeaturesGeneratorYes) { + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, InitializeMicroFeatures()); + + // The micro features pipeline retains state from previous calls to help + // estimate the background noise. Unfortunately this makes it harder to + // exactly reproduce results in a test environment, so use a known snapshot + // of the parameters at the point that the golden feature values were + // created. + const uint32_t yes_estimate_presets[] = { + 1062898, 2644477, 1257642, 1864718, 412722, 725703, 395721, 474082, + 173046, 255856, 158966, 153736, 69181, 199100, 144493, 227740, + 110573, 164330, 79666, 144650, 122947, 476799, 398553, 497493, + 322152, 1140005, 566716, 690605, 308902, 347481, 109891, 170457, + 73901, 100975, 42963, 72325, 34183, 20207, 6640, 9468, + }; + SetMicroFeaturesNoiseEstimates(yes_estimate_presets); + + int8_t yes_calculated_data[g_yes_feature_data_slice_size]; + size_t num_samples_read; + TfLiteStatus yes_status = GenerateMicroFeatures( + g_yes_30ms_audio_data, g_yes_30ms_audio_data_size, + g_yes_feature_data_slice_size, yes_calculated_data, &num_samples_read); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, yes_status); + + for (int i = 0; i < g_yes_feature_data_slice_size; ++i) { + const int expected = g_yes_feature_data_slice[i]; + const int actual = yes_calculated_data[i]; + TF_LITE_MICRO_EXPECT_EQ(expected, actual); + if (expected != actual) { + MicroPrintf("Expected value %d but found %d", expected, actual); + } + } +} + +TF_LITE_MICRO_TEST(TestMicroFeaturesGeneratorNo) { + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, InitializeMicroFeatures()); + // As we did for the previous features, set known good noise state + // parameters. + const uint32_t no_estimate_presets[] = { + 2563964, 1909393, 559801, 538670, 203643, 175959, 75088, 139491, + 59691, 95307, 43865, 129263, 52517, 80058, 51330, 100731, + 76674, 76262, 15497, 22598, 13778, 21460, 8946, 17806, + 10023, 18810, 8002, 10842, 7578, 9983, 6267, 10759, + 8946, 18488, 9691, 39785, 9939, 17835, 9671, 18512, + }; + SetMicroFeaturesNoiseEstimates(no_estimate_presets); + + int8_t no_calculated_data[g_no_feature_data_slice_size]; + size_t num_samples_read; + TfLiteStatus no_status = GenerateMicroFeatures( + g_no_30ms_audio_data, g_no_30ms_audio_data_size, + g_no_feature_data_slice_size, no_calculated_data, &num_samples_read); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, no_status); + + for (size_t i = 0; i < g_no_feature_data_slice_size; ++i) { + const int expected = g_no_feature_data_slice[i]; + const int actual = no_calculated_data[i]; + TF_LITE_MICRO_EXPECT_EQ(expected, actual); + if (expected != actual) { + MicroPrintf("Expected value %d but found %d", expected, actual); + } + } +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.cc b/tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.cc new file mode 100644 index 0000000..47d12ba --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.cc @@ -0,0 +1,23 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.h" + +const char* kCategoryLabels[kCategoryCount] = { + "silence", + "unknown", + "yes", + "no", +}; diff --git a/tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.h b/tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.h new file mode 100644 index 0000000..e542213 --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.h @@ -0,0 +1,43 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MICRO_FEATURES_MICRO_MODEL_SETTINGS_H_ +#define TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MICRO_FEATURES_MICRO_MODEL_SETTINGS_H_ + +// Keeping these as constant expressions allow us to allocate fixed-sized arrays +// on the stack for our working memory. + +// The size of the input time series data we pass to the FFT to produce the +// frequency information. This has to be a power of two, and since we're dealing +// with 30ms of 16KHz inputs, which means 480 samples, this is the next value. +constexpr int kMaxAudioSampleSize = 512; +constexpr int kAudioSampleFrequency = 16000; + +// The following values are derived from values used during model training. +// If you change the way you preprocess the input, update all these constants. +constexpr int kFeatureSliceSize = 40; +constexpr int kFeatureSliceCount = 49; +constexpr int kFeatureElementCount = (kFeatureSliceSize * kFeatureSliceCount); +constexpr int kFeatureSliceStrideMs = 20; +constexpr int kFeatureSliceDurationMs = 30; + +// Variables for the model's output categories. +constexpr int kSilenceIndex = 0; +constexpr int kUnknownIndex = 1; +// If you modify the output categories, you need to update the following values. +constexpr int kCategoryCount = 4; +extern const char* kCategoryLabels[kCategoryCount]; + +#endif // TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MICRO_FEATURES_MICRO_MODEL_SETTINGS_H_ diff --git a/tensorflow/lite/micro/examples/micro_speech/micro_features/no_feature_data_slice.cc b/tensorflow/lite/micro/examples/micro_speech/micro_features/no_feature_data_slice.cc new file mode 100644 index 0000000..684f702 --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/micro_features/no_feature_data_slice.cc @@ -0,0 +1,25 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +// See the header for documentation on the meaning of this data. + +#include "tensorflow/lite/micro/examples/micro_speech/micro_features/no_feature_data_slice.h" + +alignas(16) const int8_t + g_no_feature_data_slice[g_no_feature_data_slice_size] = { + 89, 68, 96, 83, 111, 96, 115, 87, 99, 76, 105, 84, 105, 86, + 113, 91, 108, 87, 110, 78, 80, 46, 22, 74, 88, 72, 103, 86, + 80, 68, 48, 24, 68, 48, 55, 36, 108, 90, 90, 63, +}; diff --git a/tensorflow/lite/micro/examples/micro_speech/micro_features/no_feature_data_slice.h b/tensorflow/lite/micro/examples/micro_speech/micro_features/no_feature_data_slice.h new file mode 100644 index 0000000..01e6605 --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/micro_features/no_feature_data_slice.h @@ -0,0 +1,29 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +// This data was extracted from the larger feature data held in +// no_features_data.cc and consists of the 29th spectrogram slice of 43 values. +// This is the expected result of running the sample data in +// no_30ms_sample_data.cc through the preprocessing pipeline. + +#ifndef TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MICRO_FEATURES_NO_FEATURE_DATA_SLICE_H_ +#define TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MICRO_FEATURES_NO_FEATURE_DATA_SLICE_H_ + +#include + +constexpr int g_no_feature_data_slice_size = 40; +extern const int8_t g_no_feature_data_slice[]; + +#endif // TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MICRO_FEATURES_NO_FEATURE_DATA_SLICE_H_ diff --git a/tensorflow/lite/micro/examples/micro_speech/micro_features/no_micro_features_data.cc b/tensorflow/lite/micro/examples/micro_speech/micro_features/no_micro_features_data.cc new file mode 100644 index 0000000..f481486 --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/micro_features/no_micro_features_data.cc @@ -0,0 +1,188 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/examples/micro_speech/micro_features/no_micro_features_data.h" + +// Golden test values for the expected spectrogram from a "no" sample file +// speech_commands_test_set_v0.02/no/f9643d42_nohash_4.wav. + +const int g_no_micro_f9643d42_nohash_4_width = 40; +const int g_no_micro_f9643d42_nohash_4_height = 49; +alignas(16) const signed char g_no_micro_f9643d42_nohash_4_data[] = { + 103, 78, 64, 76, 75, 54, 53, 67, 77, 60, 56, 70, + 76, 71, 68, 58, 74, 32, 23, -2, -18, 11, 13, 15, + 9, 20, 5, -7, -18, -2, -10, -18, -10, -12, 9, 7, + -33, -12, -4, -18, 57, 17, 55, 62, 70, 45, 61, 37, + 67, 52, 48, 47, 55, 46, 57, 47, 73, 17, 27, 20, + 19, 8, 15, -6, -1, 10, -12, -29, -6, -23, -18, -3, + -1, 5, 3, -4, -12, -8, -1, -14, 65, 48, 58, 43, + 48, 19, 39, 39, 57, 57, 58, 55, 67, 58, 49, 50, + 70, 27, 9, 16, 37, 4, 25, 4, 11, 9, 7, -33, + -7, -12, 3, -6, -29, -7, -7, -18, -12, -18, -2, -1, + 0, 31, 60, -8, 51, 59, 70, 40, 71, 57, 52, 38, + 66, 48, 17, 6, 59, 8, 15, 7, 18, 4, 18, -23, + -8, -4, -3, -12, -3, -26, 1, 10, 2, -29, -29, -37, + -7, -4, 6, -33, 67, 44, 59, -4, 64, 51, 68, 55, + 74, 9, 40, 15, 57, 33, 60, 18, 40, 25, 27, -20, + 25, -16, 6, 17, -10, -12, -23, -43, -23, -23, -29, -37, + -4, -16, -16, -60, -20, -23, -10, -29, -12, 15, 12, -37, + 27, 15, 61, 44, 50, 8, 48, 22, 49, -18, 46, 33, + 42, 34, 46, -8, 4, -18, -43, -43, -10, 1, -10, -16, + -10, -77, -16, -33, 11, -26, -23, -37, 0, -8, -16, -29, + 42, 40, 68, 24, 47, 46, 53, -128, 30, 2, 42, 21, + 21, -4, 43, 2, 43, 5, 32, -26, 7, -37, -43, -23, + -2, -8, 2, -37, -50, -60, -1, -7, -33, -77, -6, -18, + -16, -50, -12, -33, 53, 8, 52, 18, 51, 35, 69, 26, + 44, 8, 27, -128, 21, -33, 17, -14, 38, -128, -14, -18, + 17, -20, -14, -37, 8, -60, -33, -33, -33, -43, -12, -29, + -12, -128, -33, -60, -26, -77, -26, -50, 57, 29, 11, 30, + 53, -10, 45, 15, 18, -10, 42, 2, 31, -29, 10, -4, + 42, -37, -50, -128, -4, -43, -20, -77, -14, -26, -33, -128, + -12, -43, -8, -33, -33, -60, -43, -77, -12, -60, -26, -50, + 40, -23, 36, 35, 50, -2, 37, 27, 26, -77, 49, -7, + 28, -43, 6, 11, 41, -37, 33, -26, -14, -12, -6, -33, + -16, -26, -20, -77, -14, -43, -8, -50, -14, -37, -26, -77, + -26, -77, -14, -29, 50, -60, 25, -26, 57, 38, 51, 1, + 50, 1, 53, -18, 30, -23, 11, -128, 18, -43, 20, -26, + -10, -26, -12, -128, -50, -60, -37, -77, -20, -43, -50, -128, + -77, -128, -77, -128, -33, -77, -20, -60, 53, -10, -37, -128, + 10, -128, 60, 18, -8, 13, 37, -37, 8, -128, 3, -77, + 32, -29, 14, 10, -12, -77, -37, -77, -37, -60, -23, -128, + -43, -50, -16, -77, -6, -33, 0, -60, -43, -128, -16, -60, + 20, -2, 51, 19, 43, 2, 63, 20, 60, -4, 42, -50, + 4, -128, 2, -3, 32, -33, -26, -128, -18, -128, -33, -43, + -7, -60, -50, -77, -29, -77, -23, -128, -16, -26, -23, -60, + -37, -77, -37, -128, -1, -33, 39, 48, 60, 5, 8, -128, + 44, 11, 4, 0, 13, -77, -2, -20, 33, -128, -33, -77, + -8, -128, -14, -128, -33, -18, -12, -77, -16, -128, -37, -128, + -12, -77, -60, -128, -23, -60, -23, -128, 36, -50, 46, -128, + 66, 39, 18, -14, -12, -77, -20, -6, 24, -128, 28, -26, + 21, -77, -6, -33, 1, -128, -43, -128, -1, -50, -37, -128, + -50, -128, -33, -128, -18, -128, -60, -8, -7, -60, -60, -128, + -6, -29, 20, -1, 73, 40, -43, -14, 33, -43, 33, -3, + 15, -29, 29, -43, 20, -60, -29, -128, -20, -26, 4, -77, + -16, -60, -33, -50, -29, -128, -60, -128, -77, -128, -37, -50, + 0, -77, -33, -128, 39, 8, 47, 10, 62, 16, 2, 1, + 10, 7, 4, -7, 6, -128, -77, -50, 19, -77, -77, -128, + -77, -128, -50, -128, -60, -60, -33, -50, -37, -128, -128, -128, + -60, -128, -37, -60, -18, -128, -33, -77, 37, 23, 29, -128, + -128, -128, -16, -128, -16, -33, 21, -20, -8, -60, -2, -60, + 11, -128, -50, -128, -50, -128, -29, -77, -16, -128, -26, -128, + -50, -77, -43, -128, -128, -128, -50, -128, -33, -128, -33, -50, + -23, -128, 24, -128, -128, -77, 4, -23, 32, -128, 1, -26, + -14, -128, 10, -77, -4, -128, 1, -50, -8, -77, -77, -77, + -23, -128, -50, -43, -33, -128, -43, -128, -128, -128, -43, -128, + -50, -128, -128, -128, 44, 15, 14, -128, 9, -128, 21, 0, + 29, -7, 18, -7, -7, -128, -33, -50, 14, -60, -60, -128, + -60, -128, -37, -128, -43, -128, -20, -128, -50, -128, -43, -77, + -26, -128, -60, -50, -60, -128, -77, -128, -3, -128, 14, -77, + -26, 11, 47, -77, -7, -77, 45, -43, -12, 14, 37, -60, + 22, -4, 5, -77, -14, -128, -10, -60, 22, -77, -12, -60, + -50, -128, -60, -128, -60, -128, -43, -128, -50, -128, -77, -50, + 27, -37, 33, -128, 4, -29, -4, -50, -20, -128, 6, -37, + -33, -128, -50, -128, 34, 15, -43, -128, -20, -50, -3, -37, + -37, -77, -77, -128, -43, -128, -128, -128, 4, -26, -26, 27, + 0, -128, -29, -60, 35, -26, 23, -128, -29, -77, 19, 14, + 28, -128, -16, -7, 31, -1, 17, 11, 60, 44, 8, 11, + 18, -128, -33, -60, -1, -128, -43, -128, -23, -128, -128, -128, + 59, 43, 35, 61, 37, -77, -77, -50, 116, 88, 98, 69, + 78, 53, 78, 40, 48, 7, 29, -18, -2, -14, 5, 12, + 65, 35, 31, -12, 33, -2, -6, -1, 44, -29, -14, -60, + -4, -43, -37, -128, 29, 18, 38, 51, 8, -128, -12, -37, + 115, 91, 113, 77, 89, 36, 60, 44, 49, 36, 27, 31, + 63, 30, 62, 14, 55, 49, 42, 0, 45, 17, -23, 1, + 30, -37, -50, -77, -8, -60, 9, -60, -12, -50, 13, 4, + 23, -6, 28, 13, 107, 78, 101, 73, 89, 46, 63, 17, + 34, -43, -6, 30, 67, 40, 77, 21, 53, 39, 38, 12, + -6, 5, 28, -2, 18, -43, 0, -128, -29, -77, 18, -128, + -2, -77, 39, 35, 38, 35, 50, 29, 100, 70, 94, 69, + 86, 50, 45, 38, 45, 12, 58, 64, 74, 36, 77, 45, + 78, 62, 8, -60, 38, 6, 21, 7, 8, -37, -1, -20, + 48, -37, 8, -10, 8, 13, 45, 39, 38, 22, 49, 25, + 94, 63, 87, 66, 84, -128, 29, 20, 55, 51, 80, 36, + 62, 30, 81, 72, 68, 37, 51, 27, 54, 22, 16, -29, + 4, 9, 57, 15, 35, -43, -77, -20, 4, 6, 37, -1, + 40, 31, 47, 14, 89, 68, 96, 83, 111, 96, 115, 87, + 99, 76, 105, 84, 105, 86, 113, 91, 108, 87, 110, 78, + 80, 46, 22, 74, 88, 72, 103, 86, 80, 68, 48, 24, + 68, 48, 55, 36, 108, 90, 90, 63, 83, 63, 87, 64, + 90, 92, 113, 88, 102, 79, 109, 83, 100, 89, 109, 60, + 56, 21, 75, 62, 81, 45, 63, 73, 93, 65, 94, 80, + 89, 81, 73, 3, 43, 60, 102, 70, 84, 67, 99, 74, + 78, 57, 79, 50, 93, 82, 98, 56, 77, 70, 91, 71, + 85, 82, 86, 13, 45, -18, 48, 40, 53, 28, 85, 60, + 65, 52, 86, 78, 76, 46, 73, 19, 35, 54, 75, 40, + 71, 60, 82, 37, 69, 42, 62, 40, 96, 70, 85, 77, + 70, 68, 103, 84, 94, 69, 81, -128, -128, -128, -43, -37, + 40, 2, 48, 45, 76, 37, 65, 16, 43, 18, 58, 20, + 27, 12, 71, 31, 53, 44, 88, 47, 50, 33, 39, 8, + 89, 57, 88, 69, 72, 63, 100, 68, 81, -77, -10, -128, + -128, -128, -128, -128, 13, -77, 8, 27, 60, 28, 41, -128, + -37, -128, 28, -43, -18, -128, 47, -37, 45, 27, 51, -29, + 15, 39, 52, 30, 49, -33, 65, 15, 76, 71, 90, 19, + 46, -128, -16, -128, -128, -128, -128, -128, -128, -128, -18, -128, + -20, -128, 32, -128, 21, -33, 45, -128, -128, -128, -12, -128, + -6, -14, 43, -128, -128, -128, -128, -128, 52, -18, 69, -43, + 78, 55, 42, -128, -29, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, 14, -128, -16, -128, -128, -128, 7, -128, + -128, -128, -128, -128, -128, -128, 12, -128, -128, -128, -128, -16, + 59, -50, 35, -128, 42, 0, 47, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -33, -128, -23, -128, + -128, -128, -23, -128, -128, -128, -128, -128, -128, -128, -33, -128, + -128, -128, -128, -128, -128, -128, -8, -128, 36, -50, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -37, -128, -128, -60, -10, -128, -128, -128, -128, -128, + -128, -128, 21, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -12, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -77, -128, -128, -128, -29, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -29, -128, -128, -128, -128, -128, -128, -128, -128, -128, -50, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, +}; diff --git a/tensorflow/lite/micro/examples/micro_speech/micro_features/no_micro_features_data.h b/tensorflow/lite/micro/examples/micro_speech/micro_features/no_micro_features_data.h new file mode 100644 index 0000000..8c1b6d5 --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/micro_features/no_micro_features_data.h @@ -0,0 +1,23 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MICRO_FEATURES_NO_MICRO_FEATURES_DATA_H_ +#define TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MICRO_FEATURES_NO_MICRO_FEATURES_DATA_H_ + +extern const int g_no_micro_f9643d42_nohash_4_width; +extern const int g_no_micro_f9643d42_nohash_4_height; +extern const signed char g_no_micro_f9643d42_nohash_4_data[]; + +#endif // TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MICRO_FEATURES_NO_MICRO_FEATURES_DATA_H_ diff --git a/tensorflow/lite/micro/examples/micro_speech/micro_features/yes_feature_data_slice.cc b/tensorflow/lite/micro/examples/micro_speech/micro_features/yes_feature_data_slice.cc new file mode 100644 index 0000000..e3d006a --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/micro_features/yes_feature_data_slice.cc @@ -0,0 +1,25 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +// See the header for documentation on the meaning of this data. + +#include "tensorflow/lite/micro/examples/micro_speech/micro_features/yes_feature_data_slice.h" + +alignas(16) const int8_t + g_yes_feature_data_slice[g_yes_feature_data_slice_size] = { + 86, 88, 108, 75, 108, 76, 98, 64, 75, 61, 71, 66, 85, -1, + -77, -128, 46, 61, 92, 69, 100, 93, 113, 80, 108, 93, 113, 91, + 110, 80, 85, 15, -33, -128, 12, -50, 34, 50, 70, 55, +}; diff --git a/tensorflow/lite/micro/examples/micro_speech/micro_features/yes_feature_data_slice.h b/tensorflow/lite/micro/examples/micro_speech/micro_features/yes_feature_data_slice.h new file mode 100644 index 0000000..18faadc --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/micro_features/yes_feature_data_slice.h @@ -0,0 +1,29 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +// This data was extracted from the larger feature data held in +// no_micro_features_data.cc and consists of the 26th spectrogram slice of 40 +// values. This is the expected result of running the sample data in +// yes_30ms_sample_data.cc through the preprocessing pipeline. + +#ifndef TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MICRO_FEATURES_YES_FEATURE_DATA_SLICE_H_ +#define TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MICRO_FEATURES_YES_FEATURE_DATA_SLICE_H_ + +#include + +constexpr int g_yes_feature_data_slice_size = 40; +extern const int8_t g_yes_feature_data_slice[]; + +#endif // TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MICRO_FEATURES_YES_FEATURE_DATA_SLICE_H_ diff --git a/tensorflow/lite/micro/examples/micro_speech/micro_features/yes_micro_features_data.cc b/tensorflow/lite/micro/examples/micro_speech/micro_features/yes_micro_features_data.cc new file mode 100644 index 0000000..7ee5387 --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/micro_features/yes_micro_features_data.cc @@ -0,0 +1,188 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/examples/micro_speech/micro_features/yes_micro_features_data.h" + +// Golden test values for the expected spectrogram from a "yes" sample file +// speech_commands_test_set_v0.02/yes/f2e59fea_nohash_1.wav. + +const int g_yes_micro_f2e59fea_nohash_1_width = 40; +const int g_yes_micro_f2e59fea_nohash_1_height = 49; +alignas(16) const signed char g_yes_micro_f2e59fea_nohash_1_data[] = { + 116, 98, 118, 95, 106, 85, 101, 81, 67, -18, -33, -12, + -26, -128, 9, 34, 56, 45, 9, -12, 5, 30, 23, 28, + 0, -18, 0, -128, -60, -50, -50, -37, -60, -60, -50, -26, + -33, -50, -33, -50, 83, 61, 81, 55, 76, 61, 73, 64, + 38, -8, -37, -20, -18, -20, 48, 29, 52, 41, 55, 18, + 25, 37, 44, 37, 8, 15, -6, -60, -128, -50, -37, -37, + -18, -37, -26, -29, -37, -60, -50, -60, 95, 59, 52, -4, + 54, -18, 68, 43, 31, -18, -26, -33, -37, -29, 33, 7, + -3, 8, 26, 24, 36, 6, 36, 23, 14, 8, -29, -37, + -37, -37, -50, -50, -26, -8, -26, -37, -18, -37, -60, -77, + 50, 48, 83, 44, 56, -128, -33, -60, 1, -26, -60, -43, + -14, -23, -18, -43, -26, -33, 13, -77, -43, -77, -33, -37, + 16, -12, -37, -50, -50, -77, -20, -43, -60, -128, -60, -77, + -37, -77, -60, -128, 37, -10, 65, -7, 28, -128, 10, -77, + -37, -128, -77, -128, -77, -43, -128, -128, -77, -128, -128, -128, + -128, -128, -14, -128, -43, -50, -37, -77, -128, -128, -77, -43, + -29, -43, -20, -60, -37, -43, -50, -128, -77, -128, -18, -128, + -60, -128, -128, -128, -77, -128, -77, -128, -128, -128, -60, -37, + -20, -128, -60, -128, -128, -128, -60, -128, -77, -60, -128, -50, + -60, -128, -77, -128, -50, -60, -37, -60, -50, -77, -77, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -37, -128, + -128, -128, -128, -128, -77, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -77, -60, -128, -128, -50, -128, -50, -128, + -50, -128, -77, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -77, -128, -77, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -77, -128, -77, -128, -77, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -77, -128, -128, -128, + -128, -77, -50, -128, -128, -77, -77, -128, -128, -128, -50, -128, + 85, 43, 65, 53, 69, 60, 45, 3, 46, -12, 9, -23, + 32, -1, -128, -128, -128, -128, -1, 37, 38, 33, 43, 36, + 58, 70, 68, 39, 6, 10, 32, 6, 8, -23, -77, -128, + -29, -128, -77, -128, 101, 87, 102, 91, 110, 88, 101, 83, + 110, 95, 111, 83, 81, 84, 106, 90, 93, 82, 98, 91, + 108, 95, 118, 97, 118, 97, 116, 96, 113, 90, 110, 96, + 107, 85, 94, 66, 69, 36, 29, 0, 100, 60, 105, 68, + 92, 93, 113, 92, 107, 85, 107, 83, 104, 91, 105, 85, + 112, 88, 101, 80, 101, 79, 96, 80, 98, 80, 105, 83, + 98, 81, 103, 71, 100, 79, 83, 78, 91, 47, 50, 13, + 108, 81, 93, 78, 98, 76, 105, 76, 98, 40, 77, 72, + 81, 62, 93, 77, 96, 80, 98, 61, 97, 69, 88, 61, + 71, 56, 98, 68, 97, 72, 89, 51, 81, 61, 88, 75, + 86, 56, 48, 13, 71, 22, 84, 66, 76, -7, 48, 61, + 77, 62, 91, 65, 95, 74, 88, 59, 75, 58, 83, 55, + 87, 55, 76, 43, 76, -3, 56, 60, 79, 57, 71, 54, + 82, 33, 74, 71, 91, 45, 18, -7, 61, 56, 77, 41, + 73, 42, 82, 49, 59, 63, 82, 65, 66, 38, 83, 34, + 48, -8, 46, 20, 54, 33, 54, 6, 48, 16, 60, 37, + 58, 22, 58, 14, 65, 53, 75, -4, 42, 16, 16, -50, + 22, -128, 80, 54, 43, -50, 42, -128, -10, -77, 28, -29, + 68, 43, 73, 2, 25, -60, 47, 14, 45, 7, 66, 4, + 62, 37, 71, 7, 46, -10, 44, 22, 55, 53, 57, -29, + 26, -10, -3, -128, 38, -128, 46, -10, 16, -128, -10, -26, + 60, -7, 65, 38, 70, -60, 35, -8, 42, -29, 6, -128, + 34, -128, 36, -60, 44, -12, -2, -128, -7, -60, -60, -128, + -23, -128, 31, -33, 22, -77, -37, -43, -128, -128, 3, -128, + -23, -128, 17, -77, 43, -77, -7, -128, -20, -128, 17, -43, + 32, -128, -43, -128, -128, -77, 21, -128, -50, -128, -128, -128, + -128, -128, -128, -128, -37, -128, -16, -128, -50, -26, -6, -128, + -128, -128, -128, -128, -23, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -16, -128, 36, -7, 16, -128, -128, -128, -128, -128, + -77, -128, -37, -128, -50, -128, -128, -128, -128, -128, -18, -128, + 11, -128, -16, -77, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -26, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -20, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -50, -128, -77, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -77, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -1, -18, 5, -128, + 40, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, 4, -128, 63, 66, 75, -128, + 70, 60, 34, -128, -128, -128, -128, -128, -128, -128, -128, -128, + 87, 86, 95, 76, 91, 62, 72, -6, -50, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, 64, 83, 104, 70, + 98, 90, 111, 89, 109, 80, 71, -128, -128, -128, -128, -128, + -20, -6, 27, 33, 86, 88, 108, 75, 108, 76, 98, 64, + 75, 61, 71, 66, 85, -1, -77, -128, 46, 61, 92, 69, + 100, 93, 113, 80, 108, 93, 113, 91, 110, 80, 85, 15, + -33, -128, 12, -50, 34, 50, 70, 55, 84, 72, 108, 81, + 111, 88, 100, 80, 84, 73, 97, 86, 99, 65, 85, 43, + 96, 78, 107, 94, 118, 98, 115, 92, 118, 94, 111, 93, + 111, 86, 99, 52, 32, -16, 48, 31, 81, 74, 85, 64, + 78, 64, 98, 70, 110, 92, 96, 73, 100, 72, 94, 73, + 98, 76, 85, 67, 101, 83, 101, 83, 112, 89, 98, 85, + 105, 78, 98, 72, 102, 80, 95, 23, 19, -8, 52, 57, + 103, 91, 95, 65, 74, 8, 77, 49, 96, 76, 100, 87, + 105, 81, 94, 62, 94, 78, 81, 72, 99, 82, 101, 78, + 108, 65, 82, 70, 100, 63, 79, 58, 80, 59, 87, 48, + 50, 57, 93, 67, 86, 80, 103, 56, 77, 31, 81, 57, + 62, 41, 96, 85, 91, 71, 101, 76, 89, 78, 95, 76, + 96, 79, 103, 81, 103, 48, 70, 57, 88, 66, 84, 11, + 85, 67, 104, 37, 38, 67, 90, 54, 81, 62, 90, 52, + 78, -60, 54, -8, 68, 40, 55, 8, 77, 52, 66, 31, + 55, 13, 60, 26, 69, 42, 63, -29, 57, -128, -3, -128, + 3, -128, -29, -60, 52, -43, 63, 56, 86, 75, 95, 75, + 85, 63, 82, 10, 50, -128, 31, -77, 0, -77, -23, -128, + 12, -77, 51, -3, 58, -14, 44, 0, 48, 4, 53, 47, + 28, -128, -128, -128, -37, -128, -3, -128, 49, 61, 100, 90, + 117, 88, 107, 94, 112, 64, 96, 83, -128, -128, 7, -128, + -77, -128, -23, -128, -23, -128, 16, -37, 65, -8, 48, 20, + 14, -77, 57, -18, -43, -128, -128, -128, -128, -128, -128, -128, + 24, 12, 74, 76, 105, 76, 99, 80, 108, 79, 103, 85, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + 42, -128, -8, -128, -50, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -60, -128, -128, 5, 73, 53, 93, 70, 101, 73, + 94, 57, 86, 66, -18, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -50, -128, 36, -128, -128, -128, -128, -128, -20, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, 23, 37, + 75, 54, 97, 70, 83, 52, 85, 65, 7, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -43, -128, 23, -128, -43, -128, + -33, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -26, -37, 65, 33, 76, 37, 73, 50, 77, 47, + -12, -128, -128, -128, -128, -128, -128, -128, -128, -128, -7, -14, + -4, -128, -14, -128, 18, -60, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -26, -60, 71, 42, 68, 53, + 81, 49, 73, 36, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -18, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, 15, -26, + 44, -18, 59, 39, 57, 20, 62, 26, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, 49, -128, 30, 8, 69, 27, 62, 38, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -43, -128, 28, -37, 48, -10, + 48, 11, 74, 37, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -77, -128, 11, -128, -7, -60, -77, -4, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -8, -128, -50, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, +}; diff --git a/tensorflow/lite/micro/examples/micro_speech/micro_features/yes_micro_features_data.h b/tensorflow/lite/micro/examples/micro_speech/micro_features/yes_micro_features_data.h new file mode 100644 index 0000000..cd1ad10 --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/micro_features/yes_micro_features_data.h @@ -0,0 +1,23 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MICRO_FEATURES_YES_MICRO_FEATURES_DATA_H_ +#define TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MICRO_FEATURES_YES_MICRO_FEATURES_DATA_H_ + +extern const int g_yes_micro_f2e59fea_nohash_1_width; +extern const int g_yes_micro_f2e59fea_nohash_1_height; +extern const signed char g_yes_micro_f2e59fea_nohash_1_data[]; + +#endif // TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MICRO_FEATURES_YES_MICRO_FEATURES_DATA_H_ diff --git a/tensorflow/lite/micro/examples/micro_speech/micro_speech.tflite b/tensorflow/lite/micro/examples/micro_speech/micro_speech.tflite new file mode 100644 index 0000000..4d10b2e Binary files /dev/null and b/tensorflow/lite/micro/examples/micro_speech/micro_speech.tflite differ diff --git a/tensorflow/lite/micro/examples/micro_speech/micro_speech_binary_mock_test.sh b/tensorflow/lite/micro/examples/micro_speech/micro_speech_binary_mock_test.sh new file mode 100755 index 0000000..0515d7c --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/micro_speech_binary_mock_test.sh @@ -0,0 +1,30 @@ +#!/bin/bash +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# +# Bash unit tests for the example binary. + +set -e + +OUTPUT_LOG_FILE=${TEST_TMPDIR}/output_log.txt +${TEST_SRCDIR}/${TEST_WORKSPACE}/tensorflow/lite/micro/examples/micro_speech/micro_speech_mock 2>&1 | head > ${OUTPUT_LOG_FILE} + +if ! grep -q 'Heard ' ${OUTPUT_LOG_FILE}; then + echo "ERROR: Expected logs not found in output '${OUTPUT_LOG_FILE}'" + exit 1 +fi + +echo +echo "SUCCESS: micro_speech_binary_mock_test PASSED" diff --git a/tensorflow/lite/micro/examples/micro_speech/micro_speech_test.cc b/tensorflow/lite/micro/examples/micro_speech/micro_speech_test.cc new file mode 100644 index 0000000..56cb156 --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/micro_speech_test.cc @@ -0,0 +1,144 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/examples/micro_speech/micro_features/no_micro_features_data.h" +#include "tensorflow/lite/micro/examples/micro_speech/micro_features/yes_micro_features_data.h" +#include "tensorflow/lite/micro/examples/micro_speech/micro_speech_model_data.h" +#include "tensorflow/lite/micro/micro_interpreter.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_mutable_op_resolver.h" +#include "tensorflow/lite/micro/testing/micro_test.h" +#include "tensorflow/lite/schema/schema_generated.h" + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(TestInvoke) { + // Map the model into a usable data structure. This doesn't involve any + // copying or parsing, it's a very lightweight operation. + const tflite::Model* model = ::tflite::GetModel(g_micro_speech_model_data); + if (model->version() != TFLITE_SCHEMA_VERSION) { + MicroPrintf( + "Model provided is schema version %d not equal " + "to supported version %d.\n", + model->version(), TFLITE_SCHEMA_VERSION); + } + + // Pull in only the operation implementations we need. + // This relies on a complete list of all the ops needed by this graph. + + tflite::MicroMutableOpResolver<4> micro_op_resolver; + micro_op_resolver.AddDepthwiseConv2D(); + micro_op_resolver.AddFullyConnected(); + micro_op_resolver.AddReshape(); + micro_op_resolver.AddSoftmax(); + + // Create an area of memory to use for input, output, and intermediate arrays. +#if (defined(XTENSA) && defined(VISION_P6)) + constexpr int tensor_arena_size = 28 * 1024; +#elif defined(XTENSA) + constexpr int tensor_arena_size = 15 * 1024; +#elif defined(HEXAGON) + constexpr int tensor_arena_size = 25 * 1024; +#else + constexpr int tensor_arena_size = 10 * 1024; +#endif + alignas(16) uint8_t tensor_arena[tensor_arena_size]; + + // Build an interpreter to run the model with. + tflite::MicroInterpreter interpreter(model, micro_op_resolver, tensor_arena, + tensor_arena_size); + interpreter.AllocateTensors(); + + // Get information about the memory area to use for the model's input. + TfLiteTensor* input = interpreter.input(0); + + // Make sure the input has the properties we expect. + TF_LITE_MICRO_EXPECT(input != nullptr); + TF_LITE_MICRO_EXPECT_EQ(2, input->dims->size); + TF_LITE_MICRO_EXPECT_EQ(1, input->dims->data[0]); + TF_LITE_MICRO_EXPECT_EQ(1960, input->dims->data[1]); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteInt8, input->type); + + // Copy a spectrogram created from a .wav audio file of someone saying "Yes", + // into the memory area used for the input. + const int8_t* yes_features_data = g_yes_micro_f2e59fea_nohash_1_data; + for (size_t i = 0; i < input->bytes; ++i) { + input->data.int8[i] = yes_features_data[i]; + } + + // Run the model on this input and make sure it succeeds. + TfLiteStatus invoke_status = interpreter.Invoke(); + if (invoke_status != kTfLiteOk) { + MicroPrintf("Invoke failed\n"); + } + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, invoke_status); + + // Get the output from the model, and make sure it's the expected size and + // type. + TfLiteTensor* output = interpreter.output(0); + TF_LITE_MICRO_EXPECT_EQ(2, output->dims->size); + TF_LITE_MICRO_EXPECT_EQ(1, output->dims->data[0]); + TF_LITE_MICRO_EXPECT_EQ(4, output->dims->data[1]); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteInt8, output->type); + + // There are four possible classes in the output, each with a score. + const int kSilenceIndex = 0; + const int kUnknownIndex = 1; + const int kYesIndex = 2; + const int kNoIndex = 3; + + // Make sure that the expected "Yes" score is higher than the other classes. + uint8_t silence_score = output->data.int8[kSilenceIndex] + 128; + uint8_t unknown_score = output->data.int8[kUnknownIndex] + 128; + uint8_t yes_score = output->data.int8[kYesIndex] + 128; + uint8_t no_score = output->data.int8[kNoIndex] + 128; + TF_LITE_MICRO_EXPECT_GT(yes_score, silence_score); + TF_LITE_MICRO_EXPECT_GT(yes_score, unknown_score); + TF_LITE_MICRO_EXPECT_GT(yes_score, no_score); + + // Now test with a different input, from a recording of "No". + const int8_t* no_features_data = g_no_micro_f9643d42_nohash_4_data; + for (size_t i = 0; i < input->bytes; ++i) { + input->data.int8[i] = no_features_data[i]; + } + + // Run the model on this "No" input. + invoke_status = interpreter.Invoke(); + if (invoke_status != kTfLiteOk) { + MicroPrintf("Invoke failed\n"); + } + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, invoke_status); + + // Get the output from the model, and make sure it's the expected size and + // type. + output = interpreter.output(0); + TF_LITE_MICRO_EXPECT_EQ(2, output->dims->size); + TF_LITE_MICRO_EXPECT_EQ(1, output->dims->data[0]); + TF_LITE_MICRO_EXPECT_EQ(4, output->dims->data[1]); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteInt8, output->type); + + // Make sure that the expected "No" score is higher than the other classes. + silence_score = output->data.int8[kSilenceIndex] + 128; + unknown_score = output->data.int8[kUnknownIndex] + 128; + yes_score = output->data.int8[kYesIndex] + 128; + no_score = output->data.int8[kNoIndex] + 128; + TF_LITE_MICRO_EXPECT_GT(no_score, silence_score); + TF_LITE_MICRO_EXPECT_GT(no_score, unknown_score); + TF_LITE_MICRO_EXPECT_GT(no_score, yes_score); + + MicroPrintf("Ran successfully\n"); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/examples/micro_speech/recognize_commands.cc b/tensorflow/lite/micro/examples/micro_speech/recognize_commands.cc new file mode 100644 index 0000000..99edb47 --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/recognize_commands.cc @@ -0,0 +1,139 @@ +/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/examples/micro_speech/recognize_commands.h" + +#include + +#include "tensorflow/lite/micro/micro_log.h" + +RecognizeCommands::RecognizeCommands(int32_t average_window_duration_ms, + uint8_t detection_threshold, + int32_t suppression_ms, + int32_t minimum_count) + : average_window_duration_ms_(average_window_duration_ms), + detection_threshold_(detection_threshold), + suppression_ms_(suppression_ms), + minimum_count_(minimum_count), + previous_results_() { + previous_top_label_ = "silence"; + previous_top_label_time_ = std::numeric_limits::min(); +} + +TfLiteStatus RecognizeCommands::ProcessLatestResults( + const TfLiteTensor* latest_results, const int32_t current_time_ms, + const char** found_command, uint8_t* score, bool* is_new_command) { + if ((latest_results->dims->size != 2) || + (latest_results->dims->data[0] != 1) || + (latest_results->dims->data[1] != kCategoryCount)) { + MicroPrintf( + "The results for recognition should contain %d elements, but there are " + "%d in an %d-dimensional shape", + kCategoryCount, latest_results->dims->data[1], + latest_results->dims->size); + return kTfLiteError; + } + + if (latest_results->type != kTfLiteInt8) { + MicroPrintf( + "The results for recognition should be int8_t elements, but are %d", + latest_results->type); + return kTfLiteError; + } + + if ((!previous_results_.empty()) && + (current_time_ms < previous_results_.front().time_)) { + MicroPrintf( + "Results must be fed in increasing time order, but received a " + "timestamp of %d that was earlier than the previous one of %d", + current_time_ms, previous_results_.front().time_); + return kTfLiteError; + } + + // Add the latest results to the head of the queue. + previous_results_.push_back({current_time_ms, latest_results->data.int8}); + + // Prune any earlier results that are too old for the averaging window. + const int64_t time_limit = current_time_ms - average_window_duration_ms_; + while ((!previous_results_.empty()) && + previous_results_.front().time_ < time_limit) { + previous_results_.pop_front(); + } + + // If there are too few results, assume the result will be unreliable and + // bail. + const int64_t how_many_results = previous_results_.size(); + const int64_t earliest_time = previous_results_.front().time_; + const int64_t samples_duration = current_time_ms - earliest_time; + if ((how_many_results < minimum_count_) || + (samples_duration < (average_window_duration_ms_ / 4))) { + *found_command = previous_top_label_; + *score = 0; + *is_new_command = false; + return kTfLiteOk; + } + + // Calculate the average score across all the results in the window. + int32_t average_scores[kCategoryCount]; + for (int offset = 0; offset < previous_results_.size(); ++offset) { + PreviousResultsQueue::Result previous_result = + previous_results_.from_front(offset); + const int8_t* scores = previous_result.scores; + for (int i = 0; i < kCategoryCount; ++i) { + if (offset == 0) { + average_scores[i] = scores[i] + 128; + } else { + average_scores[i] += scores[i] + 128; + } + } + } + for (int i = 0; i < kCategoryCount; ++i) { + average_scores[i] /= how_many_results; + } + + // Find the current highest scoring category. + int current_top_index = 0; + int32_t current_top_score = 0; + for (int i = 0; i < kCategoryCount; ++i) { + if (average_scores[i] > current_top_score) { + current_top_score = average_scores[i]; + current_top_index = i; + } + } + const char* current_top_label = kCategoryLabels[current_top_index]; + + // If we've recently had another label trigger, assume one that occurs too + // soon afterwards is a bad result. + int64_t time_since_last_top; + if ((previous_top_label_ == kCategoryLabels[0]) || + (previous_top_label_time_ == std::numeric_limits::min())) { + time_since_last_top = std::numeric_limits::max(); + } else { + time_since_last_top = current_time_ms - previous_top_label_time_; + } + if ((current_top_score > detection_threshold_) && + ((current_top_label != previous_top_label_) || + (time_since_last_top > suppression_ms_))) { + previous_top_label_ = current_top_label; + previous_top_label_time_ = current_time_ms; + *is_new_command = true; + } else { + *is_new_command = false; + } + *found_command = current_top_label; + *score = current_top_score; + + return kTfLiteOk; +} diff --git a/tensorflow/lite/micro/examples/micro_speech/recognize_commands.h b/tensorflow/lite/micro/examples/micro_speech/recognize_commands.h new file mode 100644 index 0000000..8a5a895 --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/recognize_commands.h @@ -0,0 +1,151 @@ +/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_RECOGNIZE_COMMANDS_H_ +#define TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_RECOGNIZE_COMMANDS_H_ + +#include + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.h" +#include "tensorflow/lite/micro/micro_log.h" + +// Partial implementation of std::dequeue, just providing the functionality +// that's needed to keep a record of previous neural network results over a +// short time period, so they can be averaged together to produce a more +// accurate overall prediction. This doesn't use any dynamic memory allocation +// so it's a better fit for microcontroller applications, but this does mean +// there are hard limits on the number of results it can store. +class PreviousResultsQueue { + public: + PreviousResultsQueue() : front_index_(0), size_(0) {} + + // Data structure that holds an inference result, and the time when it + // was recorded. + struct Result { + Result() : time_(0), scores() {} + Result(int32_t time, int8_t* input_scores) : time_(time) { + for (int i = 0; i < kCategoryCount; ++i) { + scores[i] = input_scores[i]; + } + } + int32_t time_; + int8_t scores[kCategoryCount]; + }; + + int size() { return size_; } + bool empty() { return size_ == 0; } + Result& front() { return results_[front_index_]; } + Result& back() { + int back_index = front_index_ + (size_ - 1); + if (back_index >= kMaxResults) { + back_index -= kMaxResults; + } + return results_[back_index]; + } + + void push_back(const Result& entry) { + if (size() >= kMaxResults) { + MicroPrintf("Couldn't push_back latest result, too many already!"); + return; + } + size_ += 1; + back() = entry; + } + + Result pop_front() { + if (size() <= 0) { + MicroPrintf("Couldn't pop_front result, none present!"); + return Result(); + } + Result result = front(); + front_index_ += 1; + if (front_index_ >= kMaxResults) { + front_index_ = 0; + } + size_ -= 1; + return result; + } + + // Most of the functions are duplicates of dequeue containers, but this + // is a helper that makes it easy to iterate through the contents of the + // queue. + Result& from_front(int offset) { + if ((offset < 0) || (offset >= size_)) { + MicroPrintf("Attempt to read beyond the end of the queue!"); + offset = size_ - 1; + } + int index = front_index_ + offset; + if (index >= kMaxResults) { + index -= kMaxResults; + } + return results_[index]; + } + + private: + static constexpr int kMaxResults = 50; + Result results_[kMaxResults]; + + int front_index_; + int size_; +}; + +// This class is designed to apply a very primitive decoding model on top of the +// instantaneous results from running an audio recognition model on a single +// window of samples. It applies smoothing over time so that noisy individual +// label scores are averaged, increasing the confidence that apparent matches +// are real. +// To use it, you should create a class object with the configuration you +// want, and then feed results from running a TensorFlow model into the +// processing method. The timestamp for each subsequent call should be +// increasing from the previous, since the class is designed to process a stream +// of data over time. +class RecognizeCommands { + public: + // labels should be a list of the strings associated with each one-hot score. + // The window duration controls the smoothing. Longer durations will give a + // higher confidence that the results are correct, but may miss some commands. + // The detection threshold has a similar effect, with high values increasing + // the precision at the cost of recall. The minimum count controls how many + // results need to be in the averaging window before it's seen as a reliable + // average. This prevents erroneous results when the averaging window is + // initially being populated for example. The suppression argument disables + // further recognitions for a set time after one has been triggered, which can + // help reduce spurious recognitions. + explicit RecognizeCommands(int32_t average_window_duration_ms = 1000, + uint8_t detection_threshold = 200, + int32_t suppression_ms = 1500, + int32_t minimum_count = 3); + + // Call this with the results of running a model on sample data. + TfLiteStatus ProcessLatestResults(const TfLiteTensor* latest_results, + const int32_t current_time_ms, + const char** found_command, uint8_t* score, + bool* is_new_command); + + private: + // Configuration + int32_t average_window_duration_ms_; + uint8_t detection_threshold_; + int32_t suppression_ms_; + int32_t minimum_count_; + + // Working variables + PreviousResultsQueue previous_results_; + const char* previous_top_label_; + int32_t previous_top_label_time_; +}; + +#endif // TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_RECOGNIZE_COMMANDS_H_ diff --git a/tensorflow/lite/micro/examples/micro_speech/recognize_commands_test.cc b/tensorflow/lite/micro/examples/micro_speech/recognize_commands_test.cc new file mode 100644 index 0000000..7c1e4c6 --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/recognize_commands_test.cc @@ -0,0 +1,199 @@ +/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/examples/micro_speech/recognize_commands.h" + +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(PreviousResultsQueueBasic) { + PreviousResultsQueue queue; + TF_LITE_MICRO_EXPECT_EQ(0, queue.size()); + + int8_t scores_a[4] = {0, 0, 0, 1}; + queue.push_back({0, scores_a}); + TF_LITE_MICRO_EXPECT_EQ(1, queue.size()); + TF_LITE_MICRO_EXPECT_EQ(0, queue.front().time_); + TF_LITE_MICRO_EXPECT_EQ(0, queue.back().time_); + + int8_t scores_b[4] = {0, 0, 1, 0}; + queue.push_back({1, scores_b}); + TF_LITE_MICRO_EXPECT_EQ(2, queue.size()); + TF_LITE_MICRO_EXPECT_EQ(0, queue.front().time_); + TF_LITE_MICRO_EXPECT_EQ(1, queue.back().time_); + + PreviousResultsQueue::Result pop_result = queue.pop_front(); + TF_LITE_MICRO_EXPECT_EQ(0, pop_result.time_); + TF_LITE_MICRO_EXPECT_EQ(1, queue.size()); + TF_LITE_MICRO_EXPECT_EQ(1, queue.front().time_); + TF_LITE_MICRO_EXPECT_EQ(1, queue.back().time_); + + int8_t scores_c[4] = {0, 1, 0, 0}; + queue.push_back({2, scores_c}); + TF_LITE_MICRO_EXPECT_EQ(2, queue.size()); + TF_LITE_MICRO_EXPECT_EQ(1, queue.front().time_); + TF_LITE_MICRO_EXPECT_EQ(2, queue.back().time_); +} + +TF_LITE_MICRO_TEST(PreviousResultsQueuePushPop) { + PreviousResultsQueue queue; + TF_LITE_MICRO_EXPECT_EQ(0, queue.size()); + + for (int i = 0; i < 123; ++i) { + int8_t scores[4] = {0, 0, 0, 1}; + queue.push_back({i, scores}); + TF_LITE_MICRO_EXPECT_EQ(1, queue.size()); + TF_LITE_MICRO_EXPECT_EQ(i, queue.front().time_); + TF_LITE_MICRO_EXPECT_EQ(i, queue.back().time_); + + PreviousResultsQueue::Result pop_result = queue.pop_front(); + TF_LITE_MICRO_EXPECT_EQ(i, pop_result.time_); + TF_LITE_MICRO_EXPECT_EQ(0, queue.size()); + } +} + +TF_LITE_MICRO_TEST(RecognizeCommandsTestBasic) { + RecognizeCommands recognize_commands; + + const int8_t result_data[] = {127, -128, -128, -128}; + int result_dims[] = {2, 1, 4}; + TfLiteTensor results = tflite::testing::CreateQuantizedTensor( + result_data, tflite::testing::IntArrayFromInts(result_dims), -128.0f, + 127.0f); + + const char* found_command; + uint8_t score; + bool is_new_command; + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, recognize_commands.ProcessLatestResults( + &results, 0, &found_command, &score, &is_new_command)); +} + +TF_LITE_MICRO_TEST(RecognizeCommandsTestFindCommands) { + RecognizeCommands recognize_commands(1000, 51); + + const int8_t yes_data[] = {-128, -128, 127, -128}; + int yes_dims[] = {2, 1, 4}; + TfLiteTensor yes_results = tflite::testing::CreateQuantizedTensor( + yes_data, tflite::testing::IntArrayFromInts(yes_dims), -128.0f, 127.0f); + + bool has_found_new_command = false; + const char* new_command; + for (int i = 0; i < 10; ++i) { + const char* found_command; + uint8_t score; + bool is_new_command; + int32_t current_time_ms = 0 + (i * 100); + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, recognize_commands.ProcessLatestResults( + &yes_results, current_time_ms, &found_command, &score, + &is_new_command)); + if (is_new_command) { + TF_LITE_MICRO_EXPECT(!has_found_new_command); + has_found_new_command = true; + new_command = found_command; + } + } + TF_LITE_MICRO_EXPECT(has_found_new_command); + if (has_found_new_command) { + TF_LITE_MICRO_EXPECT_EQ(0, tflite::testing::TestStrcmp("yes", new_command)); + } + + const int8_t no_data[] = {-128, -128, -128, 127}; + int no_dims[] = {2, 1, 4}; + TfLiteTensor no_results = tflite::testing::CreateQuantizedTensor( + no_data, tflite::testing::IntArrayFromInts(no_dims), -128.0f, 127.0f); + has_found_new_command = false; + new_command = ""; + uint8_t score; + for (int i = 0; i < 10; ++i) { + const char* found_command; + bool is_new_command; + int32_t current_time_ms = 1000 + (i * 100); + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, recognize_commands.ProcessLatestResults( + &no_results, current_time_ms, &found_command, &score, + &is_new_command)); + if (is_new_command) { + TF_LITE_MICRO_EXPECT(!has_found_new_command); + has_found_new_command = true; + new_command = found_command; + } + } + TF_LITE_MICRO_EXPECT(has_found_new_command); + if (has_found_new_command) { + TF_LITE_MICRO_EXPECT_EQ(231, score); + TF_LITE_MICRO_EXPECT_EQ(0, tflite::testing::TestStrcmp("no", new_command)); + } +} + +TF_LITE_MICRO_TEST(RecognizeCommandsTestBadInputLength) { + RecognizeCommands recognize_commands(1000, 51); + + const int8_t bad_data[] = {-128, -128, 127}; + int bad_dims[] = {2, 1, 3}; + TfLiteTensor bad_results = tflite::testing::CreateQuantizedTensor( + bad_data, tflite::testing::IntArrayFromInts(bad_dims), -128.0f, 127.0f); + + const char* found_command; + uint8_t score; + bool is_new_command; + TF_LITE_MICRO_EXPECT_NE( + kTfLiteOk, recognize_commands.ProcessLatestResults( + &bad_results, 0, &found_command, &score, &is_new_command)); +} + +TF_LITE_MICRO_TEST(RecognizeCommandsTestBadInputTimes) { + RecognizeCommands recognize_commands(1000, 51); + + const int8_t result_data[] = {-128, -128, 127, -128}; + int result_dims[] = {2, 1, 4}; + TfLiteTensor results = tflite::testing::CreateQuantizedTensor( + result_data, tflite::testing::IntArrayFromInts(result_dims), -128.0f, + 127.0f); + + const char* found_command; + uint8_t score; + bool is_new_command; + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, recognize_commands.ProcessLatestResults( + &results, 100, &found_command, &score, &is_new_command)); + TF_LITE_MICRO_EXPECT_NE( + kTfLiteOk, recognize_commands.ProcessLatestResults( + &results, 0, &found_command, &score, &is_new_command)); +} + +TF_LITE_MICRO_TEST(RecognizeCommandsTestTooFewInputs) { + RecognizeCommands recognize_commands(1000, 51); + + const int8_t result_data[] = {-128, -128, 127, -128}; + int result_dims[] = {2, 1, 4}; + TfLiteTensor results = tflite::testing::CreateQuantizedTensor( + result_data, tflite::testing::IntArrayFromInts(result_dims), -128.0f, + 127.0f); + + const char* found_command; + uint8_t score; + bool is_new_command; + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, recognize_commands.ProcessLatestResults( + &results, 100, &found_command, &score, &is_new_command)); + TF_LITE_MICRO_EXPECT_EQ(0, score); + TF_LITE_MICRO_EXPECT_EQ(false, is_new_command); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/examples/micro_speech/simple_features/CMSIS/simple_features_generator.cc b/tensorflow/lite/micro/examples/micro_speech/simple_features/CMSIS/simple_features_generator.cc new file mode 100644 index 0000000..33c1e24 --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/simple_features/CMSIS/simple_features_generator.cc @@ -0,0 +1,96 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/examples/micro_speech/simple_features/simple_features_generator.h" + +#include "tensorflow/lite/micro/micro_log.h" + +extern "C" { +#define IFFT_FLAG_R 0 +#define BIT_REVERSE_FLAG 1 +#define FFT_SIZE 512 +#define FFT_SIZE_DIV2 256 +#include + +#include "arm_cmplx_mag_squared_q10p6.h" +#include "tensorflow/lite/micro/examples/micro_speech/CMSIS/hanning.h" +} + +void quantize(q15_t* bufA, q15_t* bufB, uint8_t* output); + +q15_t bufA[FFT_SIZE]; +q15_t bufB[FFT_SIZE]; +arm_rfft_instance_q15 S_arm_fft; +arm_status arm_math_status; + +namespace { +// These constants allow us to allocate fixed-sized arrays on the stack for our +// working memory. +constexpr int kInputSize = 512; +constexpr int kAverageWindowSize = 6; +constexpr int kOutputSize = + ((kInputSize / 2) + (kAverageWindowSize - 1)) / kAverageWindowSize; +} // namespace + +TfLiteStatus GenerateSimpleFeatures(const int16_t* input, int input_size, + int output_size, uint8_t* output) { + if (input_size > kInputSize) { + MicroPrintf("Input size %d larger than %d", input_size, kInputSize); + return kTfLiteError; + } + if (output_size != kOutputSize) { + MicroPrintf("Requested output size %d doesn't match %d", output_size, + kOutputSize); + return kTfLiteError; + } + + // 30ms at 16 kHz = 480 samples + // We want to pad the rest of the 512-sample buffer with zeros + arm_mult_q15((q15_t*)input, g_hanning, bufB, 480); + int i; + for (i = 480; i < 512; i++) { + bufB[i] = 0; + } + + // Should move init code outside of Preprocess() function + arm_math_status = + arm_rfft_init_q15(&S_arm_fft, FFT_SIZE, IFFT_FLAG_R, BIT_REVERSE_FLAG); + arm_rfft_q15(&S_arm_fft, bufB, bufA); + + // The rfft function packs data as follows: + // {real[0], real[N/2], real[1], imag[1], ..., real[N/2-1], imag[N/2-1]} + // Below we pack as follows: + // {real[0], 0, real[1], imag[1], ..., real[N/2-1], imag[N/2-1, real[N/2], 0} + bufA[FFT_SIZE_DIV2] = bufA[1]; + bufA[FFT_SIZE_DIV2 + 1] = 0; + bufA[1] = 0; + arm_cmplx_mag_squared_q10p6(bufA, bufB, FFT_SIZE_DIV2 + 1); + + quantize(bufA, bufB, output); + + return kTfLiteOk; +} + +void quantize(q15_t* bufA, q15_t* bufB, uint8_t* output) { + int i; + for (i = 0; i < 42; i++) { + arm_mean_q15(bufB + 6 * i, 6, bufA + i); + } + arm_mean_q15(bufB + 252, 5, bufA + 42); + + for (i = 0; i < 43; i++) { + output[i] = (uint8_t)(bufA[i] >> 5); + } +} diff --git a/tensorflow/lite/micro/examples/micro_speech/simple_features/fixed_point/simple_features_generator.cc b/tensorflow/lite/micro/examples/micro_speech/simple_features/fixed_point/simple_features_generator.cc new file mode 100644 index 0000000..03e8b27 --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/simple_features/fixed_point/simple_features_generator.cc @@ -0,0 +1,212 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +// Reference implementation of the preprocessing pipeline, with the same +// results as the audio tutorial at +// https://www.tensorflow.org/tutorials/sequences/audio_recognition +// This module takes 30ms of PCM-encoded signed 16-bit audio samples (at 16KHz, +// so 480 values), and extracts a power spectrum of frequencies. There are 43 +// frequency bands in the result, derived from the original 256 output from the +// discrete Fourier transform, and averaged together in groups of 6. +// It's expected that most platforms will have optimized versions of the +// functions used here, for example replacing the DFT with an FFT, so this +// version shouldn't be used where performance is critical. +// This implementation uses fixed point for any non-constant calculations, +// instead of floating point, to help show how this can work on platforms that +// don't have good float support. + +#include "tensorflow/lite/micro/examples/micro_speech/simple_features/simple_features_generator.h" + +#include + +#include "tensorflow/lite/micro/examples/micro_speech/simple_features/simple_model_settings.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace { + +// q format notation: qx.y => 1 sign bit, x-1 integer bits, y fraction bits. +// Use standard (non-saturating) arithmetic with signed ints of size x+y bits. +// Sacrifice some precision to avoid use of 64-bit ints. + +// q1.15 * q1.15 => q2.30 +inline int32_t Q1_15_FixedMultiply_Q2_30(int16_t a, int16_t b) { + int32_t big_a = a; + int32_t big_b = b; + return big_a * big_b; +} + +// q2.30 * q2.30 => q10.22 +inline int32_t Q2_30_FixedMultiply_Q10_22(int32_t a, int32_t b) { + // q2.30 result + int32_t tmp = (a >> 15) * (b >> 15); + // q10.22 result + return tmp >> 8; +} + +// q10.22 * q10.22 => q10.22 +// Will overflow if product is >= 512. +// Largest product in small test set is 465.25 +inline int32_t Q10_22_FixedMultiply_Q10_22(int32_t a, int32_t b) { + // q10.22 result + return (a >> 11) * (b >> 11); +} + +// float => q2.30 +// No checking for saturation. Only used for inputs in range [-1, 1]. +inline int32_t FloatToFixed_Q2_30(float input) { + return static_cast(roundf(input * (1 << 30))); +} + +// Performs a discrete Fourier transform on the real inputs. This corresponds to +// rdft() in the FFT package at http://www.kurims.kyoto-u.ac.jp/~ooura/fft.html, +// and to kiss_fftr() in KISSFFT at https://github.com/mborgerding/kissfft. +// It takes in an array of float real values, and returns a result of the same +// length with q10.22 fixed point real and imaginary components interleaved, so +// fourier_output[0] is the first real value, fourier_output[1] is the first +// imaginary, fourier_output[2] is the second real, and so on. +// The calling function should ensure that the array passed in as fourier_output +// is at least time_series_size in length. Most optimized FFT implementations +// require the length to be a power of two as well, but this version doesn't +// enforce that. + +// input: q2.30 fixed point. output: q10.22 fixed point. +// Outputs interpreted as q10.22 fixed point are un-scaled. +void CalculateDiscreteFourierTransform(int32_t* time_series, + int time_series_size, + int32_t* fourier_output) { + for (int i = 0; i < time_series_size / 2; ++i) { + int32_t real = 0; + for (int j = 0; j < time_series_size; ++j) { + const int32_t real_scale = + FloatToFixed_Q2_30(cos(j * i * M_PI * 2 / time_series_size)); + real += Q2_30_FixedMultiply_Q10_22(time_series[j], real_scale); + } + int32_t imaginary = 0; + for (int j = 0; j < time_series_size; ++j) { + const int32_t imaginary_scale = + FloatToFixed_Q2_30(sin(j * i * M_PI * 2 / time_series_size)); + imaginary -= Q2_30_FixedMultiply_Q10_22(time_series[j], imaginary_scale); + } + fourier_output[(i * 2) + 0] = real; + fourier_output[(i * 2) + 1] = imaginary; + } +} + +// Produces a simple sine curve that is used to ensure frequencies at the center +// of the current sample window are weighted more heavily than those at the end. +// q1.15 output format. +void CalculatePeriodicHann(int window_length, int16_t* window_function) { + for (int i = 0; i < window_length; ++i) { + const float real_value = (0.5 - 0.5 * cos((2 * M_PI * i) / window_length)); + int tmp = static_cast(roundf(real_value * (1 << 15))); + // Saturate the 0x8000 value to 0x7fff + if (tmp > 0x7fff) tmp = 0x7fff; + window_function[i] = tmp; + } +} + +} // namespace + +TfLiteStatus GenerateSimpleFeatures(const int16_t* input, int input_size, + int output_size, uint8_t* output) { + // Ensure our input and output data arrays are valid. + if (input_size > kMaxAudioSampleSize) { + MicroPrintf("Input size %d larger than %d", input_size, + kMaxAudioSampleSize); + return kTfLiteError; + } + if (output_size != kFeatureSliceSize) { + MicroPrintf("Requested output size %d doesn't match %d", output_size, + kFeatureSliceSize); + return kTfLiteError; + } + + // Pre-calculate the window function we'll be applying to the input data. + // In a real application, we'd calculate this table once in an initialization + // function and store it for repeated reuse. + // q1.15 format. + int16_t window_function[kMaxAudioSampleSize]; + CalculatePeriodicHann(input_size, window_function); + + // Apply the window function to our time series input, and pad it with zeroes + // to the next power of two. + int32_t fixed_input[kMaxAudioSampleSize]; + for (int i = 0; i < kMaxAudioSampleSize; ++i) { + if (i < input_size) { + // input is int16_t. Treat as q1.15 fixed point value in range [-1,1) + // window_function is also q1.15 fixed point number + fixed_input[i] = Q1_15_FixedMultiply_Q2_30(input[i], window_function[i]); + } else { + fixed_input[i] = 0; + } + } + + // Pull the frequency data from the time series sample. + // Calculated in q10.22 format from q2.30 inputs. + int32_t fourier_values[kMaxAudioSampleSize]; + CalculateDiscreteFourierTransform(fixed_input, kMaxAudioSampleSize, + fourier_values); + + // We have the complex numbers giving us information about each frequency + // band, but all we want to know is how strong each frequency is, so calculate + // the squared magnitude by adding together the squares of each component. + int32_t power_spectrum[kMaxAudioSampleSize / 2]; + for (int i = 0; i < (kMaxAudioSampleSize / 2); ++i) { + const int32_t real = fourier_values[(i * 2) + 0]; + const int32_t imaginary = fourier_values[(i * 2) + 1]; + // q10.22 results + power_spectrum[i] = Q10_22_FixedMultiply_Q10_22(real, real) + + Q10_22_FixedMultiply_Q10_22(imaginary, imaginary); + } + + // Finally, reduce the size of the output by averaging together six adjacent + // frequencies into each slot, producing an array of 43 values. + // Power_spectrum numbers are q10.22. Divide by kAverageWindowSize inside + // loop to prevent overflow. + for (int i = 0; i < kFeatureSliceSize; ++i) { + int32_t average = 0; + for (int j = 0; j < kAverageWindowSize; ++j) { + const int index = (i * kAverageWindowSize) + j; + if (index < (kMaxAudioSampleSize / 2)) { + average += power_spectrum[index] / kAverageWindowSize; + } + } + // Quantize the result into eight bits, effectively multiplying by two. + // The 127.5 constant here has to match the features_max value defined in + // tensorflow/examples/speech_commands/input_data.py, and this also assumes + // that features_min is zero. + // + // q10.22 input + // integer output + // + // output = (input - features_min) * + // (output_max - output_min) / (features_max - features_min) + // == (input) * (255) / (127.5) + // == input * 2 + // == input << 1 + // Also want to round to nearest integer and only keep integer bits + // => ((input << 1) + 0x200000) >> 22 + // == (input + 0x100000) >> 21 + int32_t quantized_average = (average + 0x100000) >> 21; + if (quantized_average < 0) { + quantized_average = 0; + } + if (quantized_average > 255) { + quantized_average = 255; + } + output[i] = quantized_average; + } + return kTfLiteOk; +} diff --git a/tensorflow/lite/micro/examples/micro_speech/simple_features/model.cc b/tensorflow/lite/micro/examples/micro_speech/simple_features/model.cc new file mode 100644 index 0000000..e8fea5b --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/simple_features/model.cc @@ -0,0 +1,1674 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +// This is a standard TensorFlow Lite FlatBuffer model file that has been +// converted into a C data array, so it can be easily compiled into a binary +// for devices that don't have a file system. It was created using the command: +// xxd -i model.tflite > model.cc + +#include "tensorflow/lite/micro/examples/micro_speech/simple_features/model.h" + +const unsigned char g_model[] = { + 0x18, 0x00, 0x00, 0x00, 0x54, 0x46, 0x4c, 0x33, 0x00, 0x00, 0x0e, 0x00, + 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0c, 0x00, 0x10, 0x00, 0x14, 0x00, + 0x0e, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x08, 0x4d, 0x00, 0x00, + 0x0c, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, + 0x01, 0x00, 0x00, 0x00, 0xf4, 0x47, 0x00, 0x00, 0x0f, 0x00, 0x00, 0x00, + 0x54, 0x4f, 0x43, 0x4f, 0x20, 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, + 0x65, 0x64, 0x2e, 0x00, 0x09, 0x00, 0x00, 0x00, 0xd4, 0x47, 0x00, 0x00, + 0xb4, 0x47, 0x00, 0x00, 0xe4, 0x02, 0x00, 0x00, 0xb4, 0x02, 0x00, 0x00, + 0xac, 0x02, 0x00, 0x00, 0x1c, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, + 0x0c, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xb8, 0xb3, 0xff, 0xff, + 0xbc, 0xb3, 0xff, 0xff, 0xc0, 0xb3, 0xff, 0xff, 0x1e, 0xb4, 0xff, 0xff, + 0x04, 0x00, 0x00, 0x00, 0x80, 0x02, 0x00, 0x00, 0x89, 0xa5, 0xe8, 0xc1, + 0xb1, 0x89, 0x5b, 0xc6, 0x4f, 0x9b, 0xd3, 0x74, 0x93, 0x88, 0xff, 0xaf, + 0x89, 0xff, 0xf4, 0x70, 0xcc, 0x75, 0x78, 0xbf, 0x92, 0xcd, 0xa9, 0xa8, + 0xd6, 0x6a, 0x6f, 0x7b, 0x7f, 0xd8, 0xa8, 0xb1, 0xe6, 0x32, 0x21, 0x70, + 0xa0, 0x9c, 0x6f, 0xc8, 0xc6, 0x59, 0x67, 0x93, 0x97, 0xca, 0x3f, 0xde, + 0xcb, 0x74, 0x7c, 0xb5, 0xa4, 0xd9, 0x66, 0xc6, 0x87, 0x98, 0xa5, 0xd0, + 0xbb, 0xb9, 0xc2, 0xb2, 0xaa, 0x79, 0x25, 0xb9, 0x6d, 0x5a, 0xc8, 0x7f, + 0x70, 0x85, 0x79, 0xbc, 0x6a, 0x9b, 0xd1, 0x9a, 0x9c, 0x51, 0x53, 0x71, + 0x89, 0xc0, 0xb4, 0xac, 0xae, 0x47, 0x67, 0x70, 0x79, 0xd2, 0x81, 0xa5, + 0xd2, 0x09, 0x38, 0x82, 0x74, 0xc9, 0x5d, 0xaf, 0xc1, 0x4f, 0x53, 0x99, + 0xcb, 0xb7, 0x3a, 0xba, 0xe8, 0x7f, 0x76, 0xb9, 0xb3, 0xd3, 0x60, 0xc0, + 0x93, 0x9f, 0x87, 0xbd, 0xd0, 0xb8, 0xca, 0xc1, 0xb6, 0x6c, 0x01, 0xc1, + 0x5c, 0x5d, 0xb2, 0x82, 0x76, 0x77, 0x39, 0xbc, 0x72, 0x6a, 0xc3, 0xb4, + 0x79, 0x21, 0x48, 0x42, 0x86, 0xa6, 0xbd, 0xaf, 0xae, 0x23, 0x9c, 0x69, + 0x78, 0xc3, 0x6b, 0xb3, 0xab, 0x43, 0xb2, 0x88, 0x71, 0xc6, 0x6b, 0xbe, + 0xc3, 0x75, 0xc2, 0xc3, 0xa5, 0xcf, 0x32, 0xbe, 0xcb, 0xb0, 0xb8, 0xc1, + 0x9c, 0xcf, 0x64, 0xc4, 0xb4, 0x96, 0xa8, 0xb9, 0xcb, 0xc0, 0xc0, 0xb8, + 0xb8, 0x77, 0x65, 0xc0, 0xc4, 0xb3, 0xc5, 0x77, 0x9b, 0x61, 0xd4, 0xac, + 0x7e, 0x36, 0xb1, 0xae, 0x36, 0x36, 0xb8, 0x39, 0x6b, 0x70, 0x9c, 0xb5, + 0x88, 0x5c, 0xb3, 0x6a, 0xad, 0xc5, 0x7b, 0xb4, 0xad, 0xaa, 0xc4, 0x84, + 0x5e, 0xc4, 0x67, 0xc1, 0xde, 0xba, 0xcf, 0xbd, 0xa0, 0xd3, 0x35, 0xb3, + 0xe7, 0xc8, 0xb8, 0xb8, 0xaf, 0xb4, 0x59, 0xb8, 0xb4, 0xac, 0xac, 0xaa, + 0xc7, 0xad, 0xc8, 0xb6, 0xac, 0x99, 0xa0, 0xcb, 0xc1, 0xc8, 0xcb, 0x89, + 0xc3, 0xac, 0xca, 0x8b, 0x97, 0x1f, 0xbd, 0xbf, 0x13, 0xad, 0xc8, 0x41, + 0x56, 0x3c, 0x86, 0xb2, 0x61, 0xc4, 0xbb, 0x71, 0xba, 0x92, 0x8d, 0xc3, + 0x86, 0xcb, 0xc5, 0x8d, 0x88, 0xc8, 0x6a, 0xbf, 0x9c, 0xcd, 0xcd, 0xc0, + 0x81, 0xb1, 0x47, 0xb5, 0xf0, 0xce, 0xb1, 0xc1, 0xaa, 0xa8, 0x54, 0xcb, + 0xbc, 0xc7, 0xc5, 0x8e, 0xc3, 0xce, 0xc7, 0xb9, 0xb9, 0xa1, 0xc5, 0xbd, + 0xb8, 0xb8, 0xb7, 0x81, 0xb6, 0xba, 0xd2, 0x90, 0xbc, 0x96, 0xbe, 0xba, + 0x53, 0xb5, 0xc7, 0x3c, 0x3c, 0x1f, 0x90, 0xaa, 0x5a, 0xb8, 0xba, 0x7e, + 0xbc, 0x9e, 0xc2, 0xb1, 0x6e, 0xc0, 0xc4, 0x91, 0xf0, 0xb5, 0x60, 0xad, + 0x73, 0xba, 0xcd, 0xba, 0x6e, 0x94, 0x39, 0xb5, 0xe4, 0xbe, 0xb4, 0xb5, + 0xa0, 0xa9, 0x51, 0xac, 0xbc, 0xc2, 0xb3, 0x8a, 0xbd, 0x9a, 0xca, 0xb3, + 0xbf, 0xaf, 0xb5, 0x9a, 0xb9, 0xc3, 0xb6, 0x92, 0xb5, 0xc1, 0xb0, 0x95, + 0xd6, 0xcc, 0xbb, 0xbb, 0xa9, 0xb9, 0xac, 0x4a, 0x62, 0x27, 0xa7, 0xa7, + 0x30, 0xbd, 0xb1, 0x73, 0xa1, 0x74, 0xc2, 0xb7, 0x58, 0xc0, 0xae, 0x8f, + 0xe1, 0xac, 0x4e, 0xb0, 0x55, 0xc9, 0xc8, 0x9f, 0x83, 0x8e, 0x3e, 0xd5, + 0xb5, 0xbe, 0xcd, 0xb2, 0xa6, 0xc8, 0x64, 0xac, 0xc0, 0xc8, 0xaf, 0x99, + 0xc5, 0x9e, 0xb8, 0xbd, 0xa9, 0xc2, 0xb3, 0x81, 0xb4, 0xc2, 0xb4, 0x8f, + 0xbc, 0xb8, 0x9c, 0x88, 0xbe, 0xc6, 0xbf, 0xba, 0xc8, 0xb4, 0xab, 0x5b, + 0x92, 0x51, 0xb1, 0x9a, 0x44, 0xb9, 0xab, 0x80, 0xa5, 0x3e, 0xc0, 0xa5, + 0x5c, 0xb6, 0xa8, 0xa2, 0xb3, 0x9a, 0x6b, 0xb3, 0x34, 0xc6, 0x7e, 0x96, + 0xcb, 0x88, 0x48, 0xc6, 0xa3, 0xbb, 0xd2, 0xa2, 0xaf, 0xd0, 0x6e, 0xae, + 0xb4, 0xce, 0xc8, 0x8f, 0xd7, 0xad, 0xc8, 0xb0, 0xae, 0xb7, 0xb2, 0x70, + 0xb9, 0xad, 0xc1, 0xa0, 0xcb, 0xa2, 0xb0, 0x9b, 0xbe, 0xd3, 0xca, 0xb6, + 0xbd, 0xaf, 0xa9, 0x82, 0xa1, 0xd7, 0xbc, 0x9b, 0x8b, 0xac, 0xaa, 0xac, + 0xad, 0x37, 0xb7, 0xb6, 0x46, 0xae, 0xa9, 0xbd, 0x6b, 0x90, 0x5e, 0xcd, + 0x23, 0xa4, 0x76, 0xa1, 0xc4, 0x96, 0x50, 0xcc, 0x95, 0x99, 0x93, 0xa7, + 0xb2, 0xe1, 0x7c, 0xbd, 0xbd, 0xb5, 0xbf, 0x9a, 0xca, 0x80, 0xd7, 0xae, + 0x79, 0xa8, 0xaa, 0xb2, 0xbc, 0x51, 0xda, 0xa3, 0x80, 0x8b, 0xa2, 0xc8, + 0xd1, 0x94, 0xe1, 0xc4, 0xbd, 0xae, 0xae, 0xcc, 0xb3, 0xca, 0xd5, 0xa1, + 0xd5, 0xa7, 0xaf, 0xd2, 0xb4, 0x8d, 0xcc, 0xc8, 0x63, 0xa3, 0xa4, 0xdf, + 0x6f, 0x7e, 0x98, 0xdf, 0x1b, 0x7b, 0x43, 0x99, 0xb0, 0x99, 0x71, 0xdb, + 0x63, 0x7b, 0x69, 0x9c, 0xba, 0xcd, 0x90, 0xd0, 0xb6, 0xa6, 0x9e, 0x95, + 0x50, 0xb6, 0xff, 0xff, 0xae, 0xb6, 0xff, 0xff, 0x04, 0x00, 0x00, 0x00, + 0x20, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0xc7, 0x05, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x77, 0x00, 0x00, 0x00, + 0xda, 0xb6, 0xff, 0xff, 0x04, 0x00, 0x00, 0x00, 0xc0, 0x44, 0x00, 0x00, + 0x2c, 0x30, 0x38, 0x5a, 0x3d, 0x4c, 0x44, 0x3b, 0x48, 0x48, 0x44, 0x57, + 0x3f, 0x43, 0x45, 0x3a, 0x24, 0x32, 0x21, 0x5c, 0x3f, 0x3a, 0x38, 0x3a, + 0x35, 0x35, 0x2f, 0x51, 0x3c, 0x3a, 0x45, 0x3a, 0x3b, 0x41, 0x39, 0x55, + 0x3c, 0x41, 0x39, 0x44, 0x3a, 0x40, 0x37, 0x48, 0x33, 0x47, 0x36, 0x3e, + 0x3c, 0x41, 0x3f, 0x3e, 0x3e, 0x47, 0x36, 0x3e, 0x41, 0x33, 0x3e, 0x3b, + 0x3a, 0x46, 0x45, 0x40, 0x48, 0x3a, 0x35, 0x4b, 0x45, 0x4d, 0x3c, 0x49, + 0x42, 0x44, 0x3c, 0x4c, 0x3e, 0x3c, 0x44, 0x32, 0x33, 0x41, 0x36, 0x4b, + 0x38, 0x3b, 0x3c, 0x38, 0x3b, 0x45, 0x34, 0x46, 0x40, 0x4e, 0x44, 0x35, + 0x43, 0x36, 0x3d, 0x40, 0x3e, 0x48, 0x40, 0x34, 0x3a, 0x46, 0x45, 0x43, + 0x45, 0x3f, 0x47, 0x37, 0x36, 0x35, 0x44, 0x3a, 0x3e, 0x37, 0x39, 0x40, + 0x3a, 0x3f, 0x3f, 0x4c, 0x3e, 0x41, 0x43, 0x35, 0x3f, 0x3d, 0x3d, 0x4c, + 0x3c, 0x4a, 0x46, 0x3c, 0x3a, 0x41, 0x40, 0x4e, 0x36, 0x47, 0x40, 0x3b, + 0x47, 0x42, 0x38, 0x4d, 0x48, 0x47, 0x3c, 0x3c, 0x33, 0x3b, 0x3e, 0x42, + 0x3f, 0x3e, 0x3a, 0x3d, 0x32, 0x39, 0x41, 0x46, 0x3a, 0x3a, 0x3e, 0x3e, + 0x47, 0x48, 0x4e, 0x36, 0x44, 0x40, 0x41, 0x45, 0x3a, 0x3c, 0x38, 0x55, + 0x2e, 0x26, 0x2f, 0x32, 0x3f, 0x41, 0x3e, 0x4c, 0x45, 0x36, 0x40, 0x31, + 0x17, 0x2e, 0x14, 0x53, 0x34, 0x30, 0x34, 0x3f, 0x2e, 0x44, 0x2b, 0x4e, + 0x34, 0x3e, 0x34, 0x43, 0x3d, 0x35, 0x3f, 0x46, 0x39, 0x40, 0x38, 0x3e, + 0x35, 0x3b, 0x35, 0x45, 0x3d, 0x40, 0x38, 0x37, 0x40, 0x3e, 0x32, 0x3e, + 0x41, 0x39, 0x30, 0x41, 0x3a, 0x32, 0x3e, 0x3d, 0x39, 0x31, 0x33, 0x3e, + 0x41, 0x47, 0x40, 0x47, 0x35, 0x33, 0x3c, 0x32, 0x40, 0x3c, 0x42, 0x49, + 0x34, 0x38, 0x39, 0x37, 0x39, 0x35, 0x40, 0x4d, 0x37, 0x43, 0x42, 0x3e, + 0x3f, 0x3c, 0x3e, 0x51, 0x36, 0x37, 0x42, 0x41, 0x36, 0x31, 0x43, 0x3d, + 0x46, 0x43, 0x37, 0x46, 0x32, 0x45, 0x42, 0x36, 0x3f, 0x42, 0x42, 0x41, + 0x3d, 0x46, 0x39, 0x41, 0x3c, 0x3f, 0x38, 0x3c, 0x43, 0x43, 0x3d, 0x3c, + 0x3d, 0x41, 0x38, 0x42, 0x3a, 0x3d, 0x43, 0x42, 0x41, 0x40, 0x39, 0x36, + 0x3a, 0x3c, 0x3c, 0x4f, 0x44, 0x36, 0x39, 0x35, 0x46, 0x46, 0x36, 0x4a, + 0x3a, 0x42, 0x43, 0x39, 0x3f, 0x3d, 0x3c, 0x47, 0x38, 0x3f, 0x43, 0x40, + 0x36, 0x3c, 0x45, 0x3b, 0x33, 0x36, 0x3b, 0x39, 0x3c, 0x35, 0x40, 0x38, + 0x40, 0x3e, 0x3f, 0x48, 0x3f, 0x34, 0x40, 0x53, 0x26, 0x2c, 0x29, 0x39, + 0x2a, 0x38, 0x3f, 0x45, 0x32, 0x31, 0x4a, 0x37, 0x1c, 0x28, 0x09, 0x43, + 0x35, 0x3b, 0x33, 0x3c, 0x32, 0x3f, 0x28, 0x41, 0x36, 0x35, 0x3a, 0x37, + 0x41, 0x39, 0x32, 0x3c, 0x40, 0x3c, 0x3c, 0x32, 0x38, 0x39, 0x37, 0x44, + 0x3a, 0x33, 0x41, 0x36, 0x37, 0x3c, 0x35, 0x3a, 0x3d, 0x30, 0x3d, 0x41, + 0x37, 0x3c, 0x45, 0x3a, 0x37, 0x2f, 0x36, 0x3c, 0x3a, 0x3d, 0x39, 0x48, + 0x46, 0x33, 0x3a, 0x3e, 0x40, 0x3d, 0x3b, 0x52, 0x38, 0x45, 0x34, 0x47, + 0x39, 0x36, 0x37, 0x56, 0x42, 0x3f, 0x33, 0x36, 0x38, 0x3f, 0x40, 0x53, + 0x3e, 0x37, 0x3d, 0x3c, 0x48, 0x3a, 0x3d, 0x33, 0x39, 0x40, 0x3e, 0x35, + 0x3d, 0x46, 0x38, 0x36, 0x37, 0x43, 0x3a, 0x3c, 0x40, 0x38, 0x39, 0x3b, + 0x39, 0x3a, 0x42, 0x3d, 0x34, 0x3f, 0x35, 0x43, 0x3a, 0x35, 0x46, 0x3a, + 0x48, 0x38, 0x3b, 0x48, 0x3c, 0x35, 0x42, 0x3d, 0x3a, 0x3d, 0x38, 0x42, + 0x3e, 0x3c, 0x33, 0x39, 0x34, 0x30, 0x42, 0x44, 0x41, 0x3d, 0x3c, 0x39, + 0x3c, 0x3a, 0x39, 0x41, 0x3d, 0x44, 0x3c, 0x40, 0x3f, 0x3e, 0x42, 0x3f, + 0x37, 0x40, 0x39, 0x3b, 0x42, 0x43, 0x49, 0x37, 0x39, 0x46, 0x35, 0x3c, + 0x3e, 0x39, 0x45, 0x52, 0x24, 0x2d, 0x38, 0x35, 0x3a, 0x3a, 0x3c, 0x44, + 0x39, 0x32, 0x51, 0x3f, 0x16, 0x34, 0x0a, 0x49, 0x39, 0x38, 0x39, 0x3e, + 0x2f, 0x36, 0x24, 0x3f, 0x37, 0x34, 0x38, 0x3b, 0x34, 0x34, 0x30, 0x3b, + 0x3d, 0x36, 0x35, 0x42, 0x33, 0x40, 0x37, 0x35, 0x43, 0x3f, 0x3f, 0x39, + 0x3a, 0x43, 0x36, 0x3e, 0x39, 0x3d, 0x3f, 0x3d, 0x47, 0x3b, 0x39, 0x37, + 0x35, 0x42, 0x3f, 0x3b, 0x41, 0x3a, 0x42, 0x4b, 0x3d, 0x3f, 0x3d, 0x3e, + 0x38, 0x3b, 0x34, 0x4e, 0x3f, 0x39, 0x36, 0x43, 0x39, 0x35, 0x41, 0x4d, + 0x3c, 0x39, 0x43, 0x33, 0x37, 0x3b, 0x41, 0x48, 0x3c, 0x3f, 0x39, 0x32, + 0x35, 0x3d, 0x42, 0x35, 0x3d, 0x3e, 0x37, 0x3b, 0x38, 0x3a, 0x44, 0x36, + 0x42, 0x35, 0x48, 0x40, 0x3a, 0x44, 0x44, 0x39, 0x43, 0x41, 0x3c, 0x37, + 0x47, 0x3b, 0x42, 0x42, 0x45, 0x3a, 0x40, 0x46, 0x35, 0x3f, 0x3a, 0x48, + 0x35, 0x44, 0x3f, 0x37, 0x33, 0x3e, 0x45, 0x49, 0x39, 0x43, 0x47, 0x37, + 0x3f, 0x3f, 0x3b, 0x44, 0x38, 0x3d, 0x39, 0x42, 0x37, 0x3e, 0x40, 0x45, + 0x3b, 0x3f, 0x40, 0x34, 0x42, 0x3f, 0x43, 0x3c, 0x43, 0x41, 0x38, 0x38, + 0x38, 0x41, 0x55, 0x33, 0x33, 0x39, 0x39, 0x3c, 0x35, 0x39, 0x38, 0x42, + 0x27, 0x26, 0x32, 0x41, 0x41, 0x32, 0x3f, 0x47, 0x3a, 0x38, 0x48, 0x37, + 0x11, 0x27, 0x08, 0x49, 0x35, 0x42, 0x3c, 0x2e, 0x34, 0x43, 0x25, 0x3b, + 0x3a, 0x33, 0x37, 0x30, 0x3c, 0x36, 0x2d, 0x3c, 0x3b, 0x39, 0x3b, 0x40, + 0x46, 0x3a, 0x30, 0x42, 0x35, 0x32, 0x36, 0x3a, 0x3a, 0x34, 0x34, 0x33, + 0x3d, 0x30, 0x3b, 0x42, 0x41, 0x3f, 0x3d, 0x3b, 0x44, 0x3d, 0x41, 0x41, + 0x3d, 0x3f, 0x40, 0x51, 0x42, 0x42, 0x36, 0x45, 0x30, 0x40, 0x32, 0x4f, + 0x3a, 0x3c, 0x40, 0x39, 0x3d, 0x3b, 0x3e, 0x4b, 0x3d, 0x37, 0x42, 0x46, + 0x40, 0x40, 0x47, 0x3d, 0x35, 0x3c, 0x3f, 0x46, 0x37, 0x37, 0x3a, 0x2e, + 0x3d, 0x3c, 0x3a, 0x46, 0x3a, 0x44, 0x3c, 0x3a, 0x32, 0x44, 0x31, 0x41, + 0x43, 0x36, 0x49, 0x39, 0x3d, 0x37, 0x3f, 0x41, 0x3b, 0x3b, 0x3c, 0x42, + 0x3c, 0x34, 0x3f, 0x3b, 0x40, 0x3e, 0x48, 0x47, 0x3e, 0x3c, 0x38, 0x39, + 0x3f, 0x35, 0x39, 0x3f, 0x3e, 0x3e, 0x3b, 0x43, 0x41, 0x40, 0x43, 0x41, + 0x3f, 0x37, 0x39, 0x41, 0x46, 0x32, 0x3d, 0x41, 0x36, 0x3f, 0x3e, 0x3f, + 0x36, 0x48, 0x43, 0x3d, 0x43, 0x3f, 0x34, 0x3d, 0x34, 0x35, 0x4f, 0x32, + 0x3c, 0x3f, 0x3d, 0x3f, 0x39, 0x3c, 0x3d, 0x47, 0x23, 0x36, 0x33, 0x45, + 0x37, 0x2e, 0x42, 0x42, 0x39, 0x34, 0x4f, 0x3f, 0x19, 0x2b, 0x01, 0x50, + 0x35, 0x3f, 0x37, 0x3c, 0x33, 0x35, 0x25, 0x32, 0x38, 0x3e, 0x40, 0x40, + 0x2f, 0x38, 0x35, 0x3d, 0x31, 0x42, 0x44, 0x3c, 0x3a, 0x3d, 0x2d, 0x3e, + 0x3b, 0x3e, 0x3d, 0x31, 0x3b, 0x37, 0x35, 0x31, 0x36, 0x35, 0x34, 0x31, + 0x41, 0x3a, 0x33, 0x32, 0x3c, 0x31, 0x3e, 0x3d, 0x40, 0x3b, 0x34, 0x45, + 0x36, 0x39, 0x3e, 0x3f, 0x3c, 0x45, 0x37, 0x4b, 0x42, 0x3d, 0x33, 0x43, + 0x3e, 0x40, 0x35, 0x4e, 0x38, 0x36, 0x3a, 0x33, 0x38, 0x44, 0x3f, 0x3c, + 0x3f, 0x40, 0x3a, 0x3c, 0x3c, 0x3c, 0x44, 0x29, 0x3a, 0x40, 0x35, 0x3a, + 0x3d, 0x48, 0x3b, 0x30, 0x45, 0x41, 0x45, 0x40, 0x37, 0x32, 0x3a, 0x35, + 0x3f, 0x38, 0x3b, 0x43, 0x3b, 0x3f, 0x33, 0x40, 0x3b, 0x40, 0x38, 0x33, + 0x39, 0x3c, 0x3c, 0x3f, 0x43, 0x33, 0x43, 0x40, 0x43, 0x3d, 0x33, 0x42, + 0x40, 0x32, 0x3e, 0x36, 0x40, 0x38, 0x43, 0x40, 0x44, 0x38, 0x34, 0x3c, + 0x3e, 0x39, 0x47, 0x43, 0x40, 0x3b, 0x3f, 0x3f, 0x3c, 0x3b, 0x4b, 0x33, + 0x36, 0x49, 0x32, 0x41, 0x48, 0x45, 0x57, 0x3a, 0x40, 0x42, 0x40, 0x46, + 0x36, 0x35, 0x3c, 0x46, 0x22, 0x2e, 0x33, 0x3e, 0x3c, 0x39, 0x44, 0x4d, + 0x3f, 0x41, 0x51, 0x44, 0x15, 0x2e, 0x02, 0x4e, 0x39, 0x3a, 0x3c, 0x35, + 0x30, 0x38, 0x1e, 0x31, 0x40, 0x3b, 0x39, 0x3d, 0x3a, 0x37, 0x35, 0x36, + 0x46, 0x36, 0x3c, 0x3e, 0x39, 0x3e, 0x32, 0x40, 0x3b, 0x35, 0x42, 0x41, + 0x41, 0x38, 0x41, 0x35, 0x42, 0x36, 0x3c, 0x42, 0x3d, 0x41, 0x35, 0x31, + 0x3f, 0x44, 0x3e, 0x41, 0x3f, 0x35, 0x42, 0x4b, 0x3e, 0x36, 0x37, 0x34, + 0x36, 0x3d, 0x40, 0x49, 0x41, 0x3e, 0x3d, 0x3b, 0x38, 0x37, 0x40, 0x47, + 0x35, 0x32, 0x43, 0x38, 0x36, 0x3b, 0x33, 0x47, 0x33, 0x34, 0x3d, 0x47, + 0x3c, 0x37, 0x3d, 0x2b, 0x3a, 0x36, 0x3b, 0x3d, 0x43, 0x38, 0x35, 0x32, + 0x32, 0x37, 0x43, 0x36, 0x3f, 0x48, 0x38, 0x30, 0x3a, 0x3c, 0x42, 0x34, + 0x37, 0x3c, 0x37, 0x40, 0x48, 0x3e, 0x35, 0x3b, 0x3f, 0x38, 0x39, 0x3e, + 0x37, 0x35, 0x36, 0x3d, 0x3b, 0x3c, 0x40, 0x3d, 0x34, 0x40, 0x46, 0x42, + 0x3f, 0x3c, 0x3c, 0x3e, 0x40, 0x40, 0x3d, 0x3f, 0x3f, 0x44, 0x46, 0x41, + 0x32, 0x43, 0x40, 0x41, 0x3c, 0x42, 0x39, 0x38, 0x48, 0x44, 0x3d, 0x38, + 0x34, 0x40, 0x4e, 0x31, 0x3c, 0x42, 0x39, 0x48, 0x3c, 0x33, 0x3e, 0x40, + 0x20, 0x27, 0x39, 0x45, 0x45, 0x36, 0x47, 0x4c, 0x35, 0x3e, 0x4a, 0x36, + 0x16, 0x2f, 0x04, 0x4f, 0x3a, 0x35, 0x36, 0x3a, 0x2d, 0x36, 0x21, 0x34, + 0x3b, 0x32, 0x3d, 0x3c, 0x3c, 0x3f, 0x3b, 0x3b, 0x41, 0x46, 0x40, 0x3d, + 0x3b, 0x44, 0x33, 0x42, 0x34, 0x33, 0x3e, 0x45, 0x3f, 0x46, 0x39, 0x33, + 0x3b, 0x37, 0x37, 0x37, 0x42, 0x47, 0x3c, 0x35, 0x31, 0x41, 0x44, 0x3a, + 0x3b, 0x33, 0x39, 0x44, 0x42, 0x33, 0x3d, 0x3f, 0x43, 0x33, 0x41, 0x4a, + 0x35, 0x46, 0x36, 0x3e, 0x39, 0x41, 0x41, 0x4c, 0x34, 0x3d, 0x38, 0x33, + 0x3c, 0x3f, 0x43, 0x44, 0x37, 0x35, 0x35, 0x3c, 0x43, 0x34, 0x3e, 0x2d, + 0x3f, 0x35, 0x38, 0x3c, 0x33, 0x35, 0x43, 0x2a, 0x40, 0x33, 0x34, 0x40, + 0x3d, 0x38, 0x36, 0x2d, 0x36, 0x3c, 0x43, 0x3d, 0x37, 0x3d, 0x39, 0x38, + 0x3b, 0x3e, 0x3c, 0x46, 0x35, 0x35, 0x43, 0x44, 0x39, 0x40, 0x34, 0x39, + 0x3d, 0x34, 0x40, 0x45, 0x38, 0x35, 0x3e, 0x39, 0x3c, 0x44, 0x48, 0x44, + 0x41, 0x3e, 0x3c, 0x45, 0x3a, 0x3c, 0x3c, 0x46, 0x3a, 0x40, 0x39, 0x43, + 0x35, 0x35, 0x3e, 0x45, 0x3a, 0x34, 0x3c, 0x39, 0x46, 0x3a, 0x4f, 0x35, + 0x32, 0x3d, 0x36, 0x41, 0x32, 0x38, 0x3f, 0x45, 0x2d, 0x34, 0x2a, 0x35, + 0x43, 0x3f, 0x41, 0x49, 0x41, 0x3c, 0x4b, 0x3f, 0x17, 0x31, 0x02, 0x4f, + 0x30, 0x38, 0x39, 0x40, 0x33, 0x3a, 0x25, 0x38, 0x35, 0x3c, 0x39, 0x35, + 0x34, 0x41, 0x34, 0x43, 0x40, 0x40, 0x46, 0x3d, 0x40, 0x38, 0x3f, 0x3b, + 0x35, 0x39, 0x3c, 0x39, 0x34, 0x38, 0x3f, 0x36, 0x3a, 0x38, 0x44, 0x3f, + 0x3f, 0x38, 0x3c, 0x33, 0x41, 0x42, 0x38, 0x33, 0x3c, 0x3b, 0x3c, 0x46, + 0x38, 0x3b, 0x3f, 0x33, 0x3f, 0x48, 0x3b, 0x49, 0x3f, 0x3a, 0x3d, 0x3f, + 0x47, 0x3d, 0x30, 0x45, 0x36, 0x42, 0x3d, 0x36, 0x43, 0x38, 0x3b, 0x3d, + 0x3c, 0x30, 0x3b, 0x43, 0x3d, 0x41, 0x34, 0x2e, 0x43, 0x3d, 0x43, 0x46, + 0x43, 0x3c, 0x3c, 0x2e, 0x3c, 0x43, 0x34, 0x43, 0x3e, 0x43, 0x3f, 0x2b, + 0x45, 0x40, 0x3a, 0x43, 0x36, 0x39, 0x3f, 0x3d, 0x3a, 0x3c, 0x35, 0x3b, + 0x36, 0x3f, 0x45, 0x3e, 0x45, 0x40, 0x3f, 0x36, 0x45, 0x42, 0x35, 0x3e, + 0x3a, 0x3a, 0x3f, 0x40, 0x3e, 0x3c, 0x39, 0x46, 0x43, 0x3e, 0x3f, 0x3f, + 0x40, 0x3c, 0x40, 0x4b, 0x41, 0x35, 0x3b, 0x3e, 0x49, 0x32, 0x3e, 0x41, + 0x31, 0x37, 0x3d, 0x3b, 0x3f, 0x45, 0x50, 0x3a, 0x3f, 0x3c, 0x44, 0x36, + 0x43, 0x37, 0x3d, 0x4b, 0x29, 0x39, 0x2f, 0x38, 0x45, 0x36, 0x40, 0x4e, + 0x39, 0x3f, 0x48, 0x43, 0x23, 0x3c, 0x06, 0x51, 0x37, 0x3b, 0x3e, 0x3b, + 0x28, 0x45, 0x2b, 0x37, 0x3f, 0x33, 0x3f, 0x41, 0x31, 0x36, 0x33, 0x3a, + 0x3a, 0x35, 0x3b, 0x33, 0x3e, 0x36, 0x35, 0x40, 0x3a, 0x34, 0x3a, 0x38, + 0x34, 0x3a, 0x3a, 0x34, 0x42, 0x45, 0x40, 0x3e, 0x40, 0x38, 0x39, 0x34, + 0x38, 0x37, 0x3f, 0x3e, 0x3c, 0x32, 0x3f, 0x46, 0x3f, 0x44, 0x3b, 0x3e, + 0x44, 0x45, 0x36, 0x3e, 0x36, 0x3f, 0x3b, 0x40, 0x39, 0x34, 0x38, 0x41, + 0x42, 0x3e, 0x3d, 0x47, 0x3e, 0x45, 0x33, 0x40, 0x3e, 0x3a, 0x44, 0x3d, + 0x3c, 0x3a, 0x3a, 0x2c, 0x3a, 0x3d, 0x35, 0x45, 0x3c, 0x41, 0x36, 0x30, + 0x32, 0x32, 0x3a, 0x3b, 0x35, 0x3c, 0x43, 0x2d, 0x35, 0x3f, 0x41, 0x37, + 0x3f, 0x46, 0x34, 0x39, 0x3c, 0x43, 0x40, 0x3e, 0x3e, 0x36, 0x3e, 0x3c, + 0x37, 0x3a, 0x3d, 0x3a, 0x3c, 0x38, 0x44, 0x41, 0x3f, 0x3b, 0x3c, 0x47, + 0x40, 0x3b, 0x41, 0x47, 0x3e, 0x45, 0x39, 0x3e, 0x37, 0x45, 0x4b, 0x4c, + 0x37, 0x37, 0x37, 0x3c, 0x3c, 0x3d, 0x40, 0x38, 0x39, 0x3e, 0x43, 0x3f, + 0x38, 0x45, 0x51, 0x3c, 0x31, 0x34, 0x3b, 0x48, 0x46, 0x41, 0x40, 0x40, + 0x2c, 0x39, 0x32, 0x42, 0x3c, 0x2e, 0x49, 0x4d, 0x3c, 0x3f, 0x45, 0x38, + 0x20, 0x38, 0x03, 0x55, 0x33, 0x3e, 0x32, 0x39, 0x32, 0x3b, 0x24, 0x2b, + 0x42, 0x35, 0x45, 0x32, 0x2e, 0x3b, 0x2f, 0x3f, 0x3c, 0x37, 0x39, 0x3b, + 0x34, 0x34, 0x3d, 0x36, 0x3d, 0x39, 0x3b, 0x30, 0x3c, 0x3e, 0x40, 0x32, + 0x3d, 0x3c, 0x3c, 0x3e, 0x33, 0x33, 0x3f, 0x3a, 0x33, 0x3e, 0x46, 0x36, + 0x3a, 0x3d, 0x40, 0x40, 0x3f, 0x41, 0x3a, 0x42, 0x34, 0x32, 0x34, 0x46, + 0x3b, 0x31, 0x40, 0x37, 0x37, 0x32, 0x3e, 0x47, 0x3f, 0x3b, 0x3e, 0x43, + 0x49, 0x45, 0x3a, 0x3d, 0x3e, 0x44, 0x40, 0x31, 0x39, 0x3e, 0x3b, 0x2d, + 0x3b, 0x3a, 0x33, 0x3d, 0x39, 0x37, 0x3e, 0x32, 0x41, 0x3c, 0x3a, 0x37, + 0x3b, 0x40, 0x39, 0x2f, 0x3e, 0x3f, 0x47, 0x32, 0x3e, 0x3b, 0x3e, 0x3e, + 0x40, 0x3e, 0x40, 0x3c, 0x41, 0x39, 0x38, 0x46, 0x45, 0x32, 0x47, 0x31, + 0x36, 0x47, 0x37, 0x49, 0x3a, 0x3f, 0x47, 0x3a, 0x41, 0x3b, 0x3c, 0x4f, + 0x3e, 0x36, 0x3b, 0x47, 0x35, 0x39, 0x41, 0x4e, 0x3d, 0x3e, 0x3b, 0x46, + 0x38, 0x39, 0x3b, 0x45, 0x3e, 0x3f, 0x44, 0x42, 0x44, 0x3f, 0x55, 0x3b, + 0x41, 0x3d, 0x43, 0x43, 0x37, 0x3f, 0x3d, 0x4c, 0x28, 0x3d, 0x36, 0x3c, + 0x3e, 0x3e, 0x48, 0x50, 0x3e, 0x39, 0x45, 0x41, 0x22, 0x37, 0x07, 0x4f, + 0x2e, 0x33, 0x38, 0x3f, 0x31, 0x3a, 0x1b, 0x36, 0x34, 0x38, 0x3c, 0x37, + 0x37, 0x3e, 0x36, 0x35, 0x36, 0x3b, 0x3d, 0x38, 0x42, 0x48, 0x3d, 0x40, + 0x40, 0x44, 0x3d, 0x39, 0x37, 0x3b, 0x3d, 0x33, 0x3d, 0x35, 0x42, 0x3c, + 0x39, 0x3e, 0x43, 0x2d, 0x3c, 0x40, 0x43, 0x43, 0x45, 0x35, 0x3c, 0x44, + 0x34, 0x3c, 0x3d, 0x31, 0x39, 0x40, 0x39, 0x3d, 0x3e, 0x34, 0x3e, 0x3b, + 0x40, 0x38, 0x42, 0x4a, 0x40, 0x3b, 0x35, 0x3d, 0x36, 0x38, 0x35, 0x42, + 0x3c, 0x3c, 0x3d, 0x3b, 0x38, 0x39, 0x45, 0x28, 0x3a, 0x37, 0x37, 0x35, + 0x3a, 0x3d, 0x35, 0x2a, 0x3c, 0x3f, 0x37, 0x34, 0x37, 0x3f, 0x3e, 0x2b, + 0x39, 0x43, 0x3b, 0x45, 0x35, 0x36, 0x36, 0x42, 0x33, 0x38, 0x3b, 0x35, + 0x31, 0x3f, 0x41, 0x41, 0x3c, 0x41, 0x45, 0x42, 0x3b, 0x3c, 0x39, 0x46, + 0x3c, 0x3e, 0x3a, 0x41, 0x39, 0x3d, 0x41, 0x4b, 0x40, 0x3f, 0x43, 0x3d, + 0x39, 0x39, 0x44, 0x44, 0x37, 0x42, 0x3f, 0x44, 0x3e, 0x37, 0x42, 0x35, + 0x44, 0x3f, 0x40, 0x42, 0x3f, 0x3a, 0x47, 0x3d, 0x38, 0x3a, 0x3b, 0x3a, + 0x42, 0x36, 0x3a, 0x97, 0x32, 0x31, 0x30, 0x36, 0x47, 0x3e, 0x46, 0x51, + 0x42, 0x34, 0x50, 0x34, 0x26, 0x3b, 0x06, 0x55, 0x3c, 0x3b, 0x2d, 0x3a, + 0x37, 0x37, 0x1b, 0x32, 0x39, 0x3d, 0x36, 0x40, 0x3b, 0x3f, 0x33, 0x33, + 0x3d, 0x37, 0x35, 0x37, 0x44, 0x3f, 0x35, 0x39, 0x33, 0x3c, 0x43, 0x39, + 0x3f, 0x42, 0x3e, 0x34, 0x38, 0x38, 0x39, 0x3c, 0x48, 0x3c, 0x2f, 0x30, + 0x40, 0x3c, 0x41, 0x3e, 0x3f, 0x3e, 0x36, 0x43, 0x40, 0x3c, 0x36, 0x43, + 0x43, 0x38, 0x3a, 0x47, 0x3e, 0x37, 0x39, 0x3a, 0x43, 0x45, 0x38, 0x43, + 0x3b, 0x45, 0x37, 0x44, 0x36, 0x45, 0x3a, 0x3e, 0x3e, 0x3e, 0x3d, 0x33, + 0x39, 0x36, 0x48, 0x33, 0x30, 0x42, 0x33, 0x39, 0x37, 0x3a, 0x3f, 0x34, + 0x34, 0x40, 0x40, 0x40, 0x3f, 0x3d, 0x3f, 0x33, 0x41, 0x40, 0x3b, 0x43, + 0x3b, 0x3a, 0x40, 0x3a, 0x38, 0x3e, 0x38, 0x3b, 0x38, 0x42, 0x40, 0x40, + 0x41, 0x35, 0x37, 0x38, 0x3b, 0x3c, 0x39, 0x4b, 0x32, 0x39, 0x42, 0x3c, + 0x36, 0x3d, 0x32, 0x52, 0x3a, 0x31, 0x40, 0x40, 0x3a, 0x43, 0x3d, 0x46, + 0x3c, 0x3e, 0x3e, 0x33, 0x3f, 0x41, 0x4d, 0x37, 0x39, 0x39, 0x3e, 0x3b, + 0x40, 0x39, 0x53, 0x2d, 0x46, 0x3c, 0x32, 0x42, 0x3d, 0x40, 0x40, 0x4d, + 0x2e, 0x34, 0x39, 0x3b, 0x46, 0x3b, 0x42, 0x4f, 0x3d, 0x39, 0x4e, 0x36, + 0x1a, 0x31, 0x0e, 0x56, 0x36, 0x42, 0x38, 0x44, 0x36, 0x3a, 0x20, 0x30, + 0x36, 0x34, 0x37, 0x38, 0x40, 0x41, 0x2a, 0x35, 0x3b, 0x3b, 0x3a, 0x38, + 0x33, 0x39, 0x36, 0x41, 0x43, 0x39, 0x35, 0x3d, 0x37, 0x3d, 0x33, 0x31, + 0x45, 0x33, 0x3f, 0x3b, 0x44, 0x38, 0x39, 0x34, 0x38, 0x39, 0x38, 0x3d, + 0x3a, 0x3a, 0x41, 0x40, 0x44, 0x3e, 0x3f, 0x45, 0x34, 0x31, 0x34, 0x43, + 0x3b, 0x34, 0x42, 0x3c, 0x3c, 0x43, 0x35, 0x45, 0x36, 0x38, 0x3d, 0x3c, + 0x3f, 0x3d, 0x3e, 0x45, 0x41, 0x43, 0x35, 0x3f, 0x40, 0x3f, 0x3a, 0x34, + 0x3d, 0x32, 0x41, 0x3d, 0x48, 0x42, 0x37, 0x2a, 0x3c, 0x3a, 0x3e, 0x49, + 0x38, 0x36, 0x38, 0x2e, 0x36, 0x37, 0x34, 0x3e, 0x3c, 0x43, 0x43, 0x39, + 0x39, 0x3b, 0x44, 0x46, 0x44, 0x43, 0x37, 0x46, 0x43, 0x34, 0x3b, 0x35, + 0x42, 0x41, 0x3f, 0x3d, 0x3d, 0x3a, 0x42, 0x3e, 0x38, 0x47, 0x3d, 0x49, + 0x45, 0x49, 0x3a, 0x3c, 0x3e, 0x37, 0x40, 0x46, 0x41, 0x33, 0x45, 0x36, + 0x37, 0x44, 0x49, 0x3b, 0x44, 0x40, 0x33, 0x46, 0x37, 0x39, 0x4e, 0x3a, + 0x43, 0x38, 0x3a, 0x42, 0x3a, 0x3d, 0x45, 0x50, 0x26, 0x34, 0x3b, 0x3c, + 0x46, 0x46, 0x4c, 0x54, 0x3f, 0x35, 0x4e, 0x47, 0x21, 0x39, 0x0e, 0x54, + 0x3a, 0x3a, 0x2f, 0x40, 0x2d, 0x3a, 0x1f, 0x31, 0x31, 0x42, 0x34, 0x45, + 0x37, 0x36, 0x30, 0x3b, 0x3a, 0x3a, 0x36, 0x40, 0x32, 0x36, 0x3c, 0x3c, + 0x37, 0x42, 0x35, 0x3e, 0x39, 0x47, 0x36, 0x32, 0x41, 0x30, 0x42, 0x39, + 0x39, 0x44, 0x37, 0x30, 0x41, 0x3b, 0x3d, 0x3d, 0x43, 0x3b, 0x38, 0x45, + 0x3b, 0x3a, 0x39, 0x3a, 0x31, 0x33, 0x43, 0x46, 0x3f, 0x41, 0x44, 0x3f, + 0x3b, 0x44, 0x3a, 0x4c, 0x33, 0x33, 0x33, 0x3e, 0x37, 0x3e, 0x45, 0x45, + 0x36, 0x42, 0x3e, 0x43, 0x40, 0x34, 0x36, 0x31, 0x38, 0x34, 0x41, 0x3b, + 0x32, 0x38, 0x3e, 0x29, 0x47, 0x33, 0x37, 0x45, 0x3c, 0x3d, 0x43, 0x2c, + 0x36, 0x3a, 0x3c, 0x40, 0x3d, 0x46, 0x3c, 0x37, 0x40, 0x44, 0x37, 0x38, + 0x3e, 0x41, 0x3c, 0x40, 0x33, 0x3f, 0x44, 0x32, 0x44, 0x3a, 0x43, 0x42, + 0x3e, 0x38, 0x44, 0x3b, 0x41, 0x48, 0x3f, 0x4e, 0x3f, 0x44, 0x35, 0x45, + 0x34, 0x3f, 0x42, 0x4b, 0x37, 0x37, 0x3e, 0x45, 0x46, 0x45, 0x46, 0x3d, + 0x3e, 0x39, 0x3b, 0x3a, 0x46, 0x3a, 0x56, 0x35, 0x46, 0x3d, 0x40, 0x3b, + 0x36, 0x39, 0x3f, 0x54, 0x27, 0x2b, 0x34, 0x3c, 0x48, 0x3d, 0x49, 0x4c, + 0x3e, 0x3d, 0x4e, 0x42, 0x25, 0x3b, 0x10, 0x4d, 0x30, 0x36, 0x3e, 0x36, + 0x2e, 0x31, 0x1d, 0x37, 0x3a, 0x39, 0x33, 0x3f, 0x39, 0x38, 0x2e, 0x36, + 0x44, 0x3e, 0x41, 0x37, 0x3b, 0x30, 0x3b, 0x48, 0x31, 0x39, 0x41, 0x3e, + 0x37, 0x37, 0x34, 0x2f, 0x35, 0x3b, 0x3a, 0x3e, 0x45, 0x3e, 0x3f, 0x35, + 0x39, 0x39, 0x3b, 0x44, 0x43, 0x3c, 0x3e, 0x46, 0x40, 0x3a, 0x36, 0x45, + 0x41, 0x40, 0x36, 0x44, 0x3a, 0x37, 0x47, 0x47, 0x3d, 0x36, 0x43, 0x4e, + 0x3b, 0x38, 0x40, 0x48, 0x44, 0x43, 0x45, 0x3f, 0x43, 0x3c, 0x3b, 0x37, + 0x43, 0x41, 0x39, 0x2f, 0x3d, 0x45, 0x3e, 0x3e, 0x42, 0x40, 0x41, 0x2f, + 0x47, 0x38, 0x3a, 0x48, 0x3e, 0x35, 0x37, 0x2a, 0x34, 0x38, 0x41, 0x3b, + 0x3d, 0x37, 0x3b, 0x35, 0x38, 0x3e, 0x41, 0x3c, 0x41, 0x43, 0x3d, 0x46, + 0x47, 0x47, 0x3d, 0x35, 0x48, 0x41, 0x3d, 0x3e, 0x34, 0x47, 0x38, 0x38, + 0x39, 0x3e, 0x38, 0x4d, 0x43, 0x36, 0x42, 0x40, 0x3e, 0x41, 0x3f, 0x4c, + 0x3e, 0x3e, 0x37, 0x44, 0x3e, 0x3b, 0x47, 0x3e, 0x3f, 0x3b, 0x39, 0x3c, + 0x3c, 0x3c, 0x53, 0x3b, 0x3b, 0x32, 0x3e, 0x3f, 0x32, 0x3c, 0x37, 0x4b, + 0x33, 0x30, 0x2f, 0x41, 0x47, 0x42, 0x49, 0x4f, 0x3b, 0x42, 0x4c, 0x44, + 0x1f, 0x37, 0x16, 0x4e, 0x3b, 0x3f, 0x30, 0x36, 0x35, 0x38, 0x26, 0x36, + 0x32, 0x3b, 0x38, 0x3c, 0x30, 0x3e, 0x34, 0x3e, 0x3d, 0x34, 0x39, 0x3c, + 0x36, 0x47, 0x34, 0x41, 0x31, 0x39, 0x44, 0x3e, 0x39, 0x41, 0x32, 0x36, + 0x3b, 0x3f, 0x32, 0x3d, 0x36, 0x3e, 0x40, 0x3d, 0x45, 0x32, 0x45, 0x42, + 0x38, 0x43, 0x40, 0x42, 0x34, 0x3a, 0x43, 0x38, 0x47, 0x3f, 0x41, 0x47, + 0x34, 0x44, 0x41, 0x39, 0x3c, 0x46, 0x36, 0x4f, 0x41, 0x3e, 0x38, 0x38, + 0x3a, 0x3b, 0x43, 0x44, 0x37, 0x3f, 0x35, 0x43, 0x34, 0x3d, 0x40, 0x32, + 0x3a, 0x3b, 0x3d, 0x34, 0x35, 0x43, 0x31, 0x2c, 0x3b, 0x36, 0x38, 0x41, + 0x3c, 0x38, 0x3d, 0x31, 0x45, 0x46, 0x42, 0x41, 0x33, 0x3f, 0x3f, 0x3a, + 0x36, 0x3f, 0x3c, 0x3c, 0x3c, 0x3e, 0x39, 0x3e, 0x40, 0x37, 0x47, 0x3e, + 0x35, 0x39, 0x3d, 0x3d, 0x37, 0x36, 0x3e, 0x45, 0x38, 0x3d, 0x45, 0x43, + 0x3a, 0x32, 0x3b, 0x3a, 0x32, 0x3c, 0x3d, 0x43, 0x3d, 0x33, 0x3b, 0x3d, + 0x46, 0x3a, 0x44, 0x45, 0x3b, 0x3e, 0x3c, 0x42, 0x37, 0x37, 0x52, 0x2a, + 0x3a, 0x35, 0x35, 0x3f, 0x40, 0x38, 0x40, 0x5b, 0x35, 0x32, 0x2b, 0x3d, + 0x4a, 0x3c, 0x46, 0x56, 0x44, 0x30, 0x4d, 0x39, 0x20, 0x32, 0x0f, 0x4f, + 0x33, 0x3c, 0x35, 0x35, 0x3a, 0x45, 0x29, 0x3b, 0x31, 0x38, 0x34, 0x38, + 0x42, 0x45, 0x37, 0x3e, 0x37, 0x2e, 0x36, 0x43, 0x3f, 0x38, 0x2f, 0x41, + 0x3f, 0x41, 0x3c, 0x31, 0x37, 0x36, 0x37, 0x39, 0x41, 0x3a, 0x3a, 0x40, + 0x3e, 0x47, 0x3d, 0x37, 0x3c, 0x38, 0x35, 0x39, 0x3a, 0x43, 0x3f, 0x42, + 0x42, 0x38, 0x3e, 0x40, 0x3c, 0x3a, 0x45, 0x48, 0x37, 0x3a, 0x3e, 0x35, + 0x3a, 0x3d, 0x45, 0x4a, 0x3d, 0x37, 0x38, 0x3a, 0x3d, 0x46, 0x46, 0x41, + 0x37, 0x41, 0x40, 0x48, 0x37, 0x34, 0x3b, 0x2c, 0x39, 0x34, 0x37, 0x35, + 0x3a, 0x43, 0x39, 0x2e, 0x39, 0x3f, 0x40, 0x3e, 0x40, 0x40, 0x3c, 0x2d, + 0x3e, 0x3c, 0x37, 0x39, 0x3c, 0x3b, 0x3d, 0x3f, 0x41, 0x48, 0x3b, 0x3d, + 0x3b, 0x41, 0x45, 0x3e, 0x3a, 0x38, 0x3f, 0x3c, 0x3d, 0x3e, 0x40, 0x42, + 0x46, 0x38, 0x43, 0x34, 0x35, 0x47, 0x3d, 0x46, 0x3f, 0x3e, 0x32, 0x3f, + 0x3e, 0x3d, 0x47, 0x46, 0x38, 0x41, 0x45, 0x3f, 0x34, 0x3f, 0x41, 0x43, + 0x3e, 0x3e, 0x44, 0x3b, 0x3b, 0x36, 0x51, 0x32, 0x37, 0x3c, 0x42, 0x43, + 0x33, 0x39, 0x42, 0x61, 0x2c, 0x3b, 0x2e, 0x39, 0x42, 0x39, 0x42, 0x54, + 0x3c, 0x3a, 0x48, 0x35, 0x26, 0x34, 0x15, 0x51, 0x35, 0x40, 0x36, 0x3c, + 0x2d, 0x37, 0x25, 0x38, 0x33, 0x3d, 0x3d, 0x39, 0x3e, 0x3b, 0x2e, 0x4b, + 0x3d, 0x3b, 0x42, 0x37, 0x37, 0x40, 0x37, 0x40, 0x35, 0x45, 0x37, 0x37, + 0x3f, 0x41, 0x36, 0x39, 0x3c, 0x32, 0x3e, 0x38, 0x41, 0x40, 0x3e, 0x3f, + 0x3b, 0x3c, 0x43, 0x35, 0x3e, 0x3d, 0x44, 0x44, 0x3a, 0x36, 0x39, 0x3f, + 0x3a, 0x31, 0x42, 0x4d, 0x40, 0x33, 0x40, 0x45, 0x44, 0x3d, 0x40, 0x49, + 0x41, 0x3f, 0x42, 0x3a, 0x34, 0x46, 0x38, 0x46, 0x42, 0x34, 0x3a, 0x40, + 0x40, 0x41, 0x3d, 0x32, 0x35, 0x48, 0x35, 0x3e, 0x44, 0x41, 0x40, 0x2c, + 0x46, 0x38, 0x38, 0x3f, 0x36, 0x40, 0x38, 0x2a, 0x43, 0x41, 0x3e, 0x35, + 0x46, 0x3a, 0x45, 0x46, 0x46, 0x42, 0x3a, 0x3b, 0x40, 0x38, 0x35, 0x43, + 0x38, 0x3d, 0x3b, 0x41, 0x36, 0x44, 0x3f, 0x3f, 0x34, 0x3e, 0x3c, 0x3d, + 0x49, 0x36, 0x37, 0x4b, 0x38, 0x3c, 0x43, 0x37, 0x3a, 0x3f, 0x31, 0x45, + 0x3b, 0x39, 0x3f, 0x40, 0x37, 0x3c, 0x42, 0x3f, 0x3c, 0x33, 0x40, 0x3b, + 0x32, 0x3c, 0x52, 0x31, 0x3d, 0x44, 0x3b, 0x31, 0x46, 0x38, 0x40, 0x60, + 0x2b, 0x3c, 0x37, 0x34, 0x43, 0x38, 0x45, 0x57, 0x37, 0x39, 0x49, 0x33, + 0x2d, 0x3f, 0x18, 0x4e, 0x39, 0x39, 0x32, 0x3b, 0x34, 0x3b, 0x2c, 0x45, + 0x33, 0x37, 0x45, 0x42, 0x3d, 0x37, 0x2a, 0x4c, 0x3d, 0x3f, 0x3c, 0x36, + 0x37, 0x3c, 0x39, 0x47, 0x3d, 0x44, 0x3d, 0x40, 0x3d, 0x41, 0x34, 0x3e, + 0x40, 0x34, 0x3b, 0x3a, 0x41, 0x36, 0x37, 0x40, 0x3e, 0x3f, 0x3a, 0x36, + 0x3e, 0x35, 0x3b, 0x48, 0x41, 0x40, 0x3c, 0x42, 0x34, 0x41, 0x3f, 0x44, + 0x34, 0x39, 0x33, 0x39, 0x39, 0x47, 0x40, 0x48, 0x38, 0x3a, 0x43, 0x43, + 0x48, 0x3a, 0x3f, 0x46, 0x35, 0x3a, 0x33, 0x36, 0x32, 0x3c, 0x40, 0x34, + 0x40, 0x3a, 0x42, 0x3a, 0x39, 0x38, 0x41, 0x35, 0x3a, 0x3f, 0x35, 0x40, + 0x3f, 0x39, 0x39, 0x36, 0x38, 0x40, 0x3e, 0x3e, 0x3a, 0x31, 0x32, 0x44, + 0x40, 0x47, 0x3a, 0x3c, 0x43, 0x43, 0x46, 0x48, 0x40, 0x35, 0x3d, 0x37, + 0x44, 0x37, 0x33, 0x44, 0x3b, 0x3e, 0x3f, 0x37, 0x36, 0x3a, 0x38, 0x47, + 0x3a, 0x44, 0x36, 0x42, 0x3e, 0x44, 0x34, 0x46, 0x33, 0x43, 0x44, 0x3e, + 0x30, 0x48, 0x37, 0x38, 0x33, 0x3c, 0x46, 0x42, 0x38, 0x3d, 0x50, 0x39, + 0x33, 0x38, 0x3e, 0x40, 0x3b, 0x2b, 0x3b, 0x5f, 0x2b, 0x32, 0x2f, 0x37, + 0x3f, 0x3a, 0x40, 0x4e, 0x34, 0x38, 0x47, 0x37, 0x27, 0x2b, 0x1b, 0x4f, + 0x36, 0x38, 0x3a, 0x3a, 0x3b, 0x38, 0x2e, 0x3f, 0x3f, 0x42, 0x42, 0x42, + 0x36, 0x3e, 0x3c, 0x55, 0x39, 0x40, 0x44, 0x43, 0x3e, 0x33, 0x3c, 0x43, + 0x38, 0x44, 0x3b, 0x46, 0x3f, 0x45, 0x34, 0x38, 0x3c, 0x41, 0x42, 0x3d, + 0x42, 0x36, 0x43, 0x3f, 0x3c, 0x39, 0x3e, 0x39, 0x39, 0x42, 0x33, 0x47, + 0x36, 0x3d, 0x3f, 0x3b, 0x40, 0x39, 0x3b, 0x49, 0x36, 0x40, 0x3d, 0x41, + 0x40, 0x34, 0x3b, 0x4e, 0x3b, 0x36, 0x3b, 0x45, 0x40, 0x32, 0x3b, 0x49, + 0x37, 0x38, 0x3a, 0x47, 0x37, 0x40, 0x3e, 0x38, 0x40, 0x3f, 0x3c, 0x3a, + 0x47, 0x41, 0x42, 0x30, 0x40, 0x3c, 0x42, 0x3f, 0x31, 0x44, 0x39, 0x38, + 0x3b, 0x38, 0x42, 0x43, 0x41, 0x35, 0x3a, 0x39, 0x3e, 0x38, 0x39, 0x3e, + 0x3c, 0x42, 0x3d, 0x49, 0x47, 0x3c, 0x3f, 0x35, 0x41, 0x3a, 0x36, 0x43, + 0x43, 0x3b, 0x39, 0x3b, 0x36, 0x43, 0x43, 0x4e, 0x3e, 0x35, 0x37, 0x3b, + 0x3f, 0x37, 0x41, 0x48, 0x32, 0x44, 0x43, 0x32, 0x38, 0x39, 0x45, 0x39, + 0x3e, 0x3d, 0x35, 0x39, 0x35, 0x39, 0x50, 0x37, 0x39, 0x40, 0x43, 0x47, + 0x32, 0x2a, 0x40, 0x62, 0x24, 0x30, 0x36, 0x3e, 0x41, 0x32, 0x47, 0x58, + 0x39, 0x36, 0x44, 0x34, 0x26, 0x34, 0x1e, 0x50, 0x3c, 0x3b, 0x3f, 0x42, + 0x35, 0x3d, 0x2a, 0x4e, 0x40, 0x38, 0x36, 0x31, 0x3a, 0x30, 0x37, 0x4b, + 0x3c, 0x3b, 0x3b, 0x41, 0x3b, 0x3c, 0x2e, 0x45, 0x44, 0x3f, 0x3b, 0x35, + 0x3e, 0x33, 0x37, 0x3d, 0x40, 0x39, 0x39, 0x37, 0x40, 0x3e, 0x3a, 0x3e, + 0x3c, 0x3c, 0x45, 0x40, 0x3c, 0x3f, 0x3a, 0x51, 0x47, 0x3a, 0x34, 0x39, + 0x3b, 0x34, 0x44, 0x4c, 0x36, 0x3d, 0x3a, 0x35, 0x34, 0x36, 0x38, 0x4b, + 0x3f, 0x40, 0x3f, 0x3e, 0x40, 0x41, 0x47, 0x43, 0x32, 0x38, 0x46, 0x44, + 0x46, 0x43, 0x43, 0x37, 0x39, 0x49, 0x37, 0x36, 0x3e, 0x3d, 0x37, 0x3c, + 0x39, 0x37, 0x34, 0x43, 0x45, 0x32, 0x3a, 0x3a, 0x38, 0x43, 0x3b, 0x40, + 0x3b, 0x3f, 0x3d, 0x41, 0x40, 0x3d, 0x3a, 0x3b, 0x48, 0x37, 0x3d, 0x41, + 0x40, 0x3e, 0x38, 0x41, 0x3d, 0x3a, 0x38, 0x49, 0x40, 0x3c, 0x42, 0x41, + 0x3a, 0x38, 0x38, 0x4c, 0x3e, 0x41, 0x40, 0x3b, 0x3d, 0x3e, 0x3c, 0x46, + 0x3e, 0x42, 0x41, 0x38, 0x42, 0x42, 0x41, 0x3e, 0x3e, 0x37, 0x3c, 0x43, + 0x43, 0x3b, 0x54, 0x2b, 0x45, 0x3b, 0x43, 0x41, 0x41, 0x26, 0x3f, 0x60, + 0x25, 0x2b, 0x2e, 0x3a, 0x40, 0x31, 0x40, 0x49, 0x40, 0x31, 0x46, 0x3c, + 0x1e, 0x2a, 0x1a, 0x47, 0x33, 0x37, 0x37, 0x34, 0x31, 0x36, 0x25, 0x41, + 0x2e, 0x36, 0x35, 0x33, 0x33, 0x34, 0x31, 0x45, 0x3a, 0x3f, 0x3d, 0x40, + 0x3c, 0x41, 0x30, 0x3c, 0x3f, 0x46, 0x37, 0x3c, 0x3a, 0x3c, 0x36, 0x3a, + 0x47, 0x3d, 0x31, 0x3f, 0x40, 0x3e, 0x36, 0x44, 0x41, 0x3d, 0x36, 0x3f, + 0x37, 0x3f, 0x34, 0x4b, 0x31, 0x47, 0x43, 0x3e, 0x3e, 0x3a, 0x3b, 0x4b, + 0x37, 0x32, 0x38, 0x3d, 0x37, 0x47, 0x46, 0x4d, 0x36, 0x3c, 0x3f, 0x3a, + 0x41, 0x31, 0x47, 0x43, 0x3d, 0x3d, 0x3e, 0x35, 0x3d, 0x46, 0x49, 0x2a, + 0x37, 0x3c, 0x39, 0x3d, 0x47, 0x3c, 0x34, 0x2c, 0x3e, 0x38, 0x47, 0x32, + 0x36, 0x36, 0x41, 0x38, 0x35, 0x44, 0x48, 0x3b, 0x39, 0x3e, 0x38, 0x3e, + 0x40, 0x36, 0x37, 0x46, 0x39, 0x3b, 0x34, 0x45, 0x40, 0x3b, 0x48, 0x36, + 0x34, 0x44, 0x37, 0x46, 0x3f, 0x42, 0x33, 0x36, 0x43, 0x3c, 0x41, 0x46, + 0x31, 0x42, 0x43, 0x44, 0x44, 0x3e, 0x42, 0x3b, 0x3b, 0x3a, 0x3c, 0x37, + 0x42, 0x41, 0x46, 0x38, 0x41, 0x3b, 0x40, 0x44, 0x37, 0x3c, 0x4c, 0x2e, + 0x3a, 0x3e, 0x3b, 0x36, 0x33, 0x27, 0x37, 0x5d, 0x27, 0x34, 0x32, 0x41, + 0x41, 0x3f, 0x40, 0x5d, 0x40, 0x3d, 0x48, 0x39, 0x2e, 0x30, 0x1f, 0x3f, + 0x38, 0x3f, 0x40, 0x33, 0x40, 0x38, 0x31, 0x3f, 0x42, 0x3e, 0x3b, 0x3a, + 0x42, 0x36, 0x3a, 0x42, 0x3c, 0x3b, 0x3d, 0x41, 0x3d, 0x40, 0x40, 0x3e, + 0x36, 0x41, 0x47, 0x3d, 0x33, 0x32, 0x33, 0x44, 0x3e, 0x3a, 0x3e, 0x3d, + 0x45, 0x3f, 0x38, 0x3f, 0x40, 0x3a, 0x3c, 0x46, 0x32, 0x42, 0x3c, 0x51, + 0x33, 0x38, 0x3a, 0x38, 0x41, 0x34, 0x45, 0x4e, 0x35, 0x3c, 0x42, 0x3e, + 0x3f, 0x45, 0x44, 0x4e, 0x39, 0x47, 0x3a, 0x33, 0x3e, 0x3b, 0x45, 0x42, + 0x37, 0x3a, 0x3e, 0x33, 0x41, 0x48, 0x32, 0x2a, 0x3b, 0x37, 0x3f, 0x3d, + 0x3a, 0x42, 0x41, 0x2f, 0x34, 0x3e, 0x49, 0x3b, 0x38, 0x3e, 0x3d, 0x3a, + 0x37, 0x3c, 0x44, 0x41, 0x39, 0x42, 0x3f, 0x39, 0x40, 0x35, 0x3d, 0x41, + 0x3b, 0x45, 0x44, 0x48, 0x3d, 0x42, 0x36, 0x33, 0x3e, 0x44, 0x3f, 0x41, + 0x42, 0x40, 0x49, 0x34, 0x48, 0x41, 0x3f, 0x40, 0x3c, 0x45, 0x47, 0x34, + 0x41, 0x37, 0x47, 0x3e, 0x41, 0x41, 0x39, 0x42, 0x3f, 0x3a, 0x46, 0x33, + 0x39, 0x41, 0x38, 0x38, 0x3e, 0x42, 0x41, 0x38, 0x35, 0x32, 0x33, 0x38, + 0x3a, 0x3f, 0x45, 0x66, 0x33, 0x47, 0x38, 0x3c, 0x41, 0x2f, 0x48, 0x55, + 0x33, 0x3e, 0x49, 0x3b, 0x3c, 0x30, 0x24, 0x45, 0x3c, 0x44, 0x43, 0x32, + 0x3d, 0x3f, 0x35, 0x3b, 0x3e, 0x36, 0x38, 0x3a, 0x36, 0x37, 0x3b, 0x41, + 0x38, 0x42, 0x3e, 0x43, 0x39, 0x3f, 0x3c, 0x40, 0x37, 0x43, 0x3e, 0x3b, + 0x3d, 0x35, 0x35, 0x3d, 0x43, 0x3f, 0x3a, 0x35, 0x37, 0x3c, 0x31, 0x47, + 0x44, 0x45, 0x40, 0x32, 0x44, 0x36, 0x38, 0x51, 0x3c, 0x41, 0x45, 0x37, + 0x39, 0x44, 0x3e, 0x4f, 0x3c, 0x3a, 0x38, 0x40, 0x3f, 0x34, 0x39, 0x4e, + 0x3d, 0x39, 0x45, 0x3f, 0x3e, 0x3c, 0x3b, 0x42, 0x3b, 0x3b, 0x34, 0x3d, + 0x41, 0x44, 0x39, 0x2e, 0x37, 0x44, 0x45, 0x37, 0x3d, 0x41, 0x3f, 0x33, + 0x3f, 0x3e, 0x3e, 0x40, 0x44, 0x3f, 0x37, 0x32, 0x35, 0x3e, 0x43, 0x41, + 0x39, 0x37, 0x35, 0x3f, 0x48, 0x3d, 0x43, 0x49, 0x38, 0x35, 0x3f, 0x48, + 0x3b, 0x3a, 0x34, 0x3f, 0x3c, 0x44, 0x3a, 0x40, 0x36, 0x35, 0x44, 0x36, + 0x44, 0x3b, 0x3d, 0x38, 0x3c, 0x44, 0x47, 0x3a, 0x3b, 0x45, 0x41, 0x3a, + 0x39, 0x35, 0x44, 0x3a, 0x49, 0x36, 0x48, 0x31, 0x42, 0x43, 0x42, 0x34, + 0x41, 0x40, 0x4d, 0x36, 0x3e, 0x35, 0x39, 0x3b, 0x3f, 0x41, 0x38, 0x39, + 0x3c, 0x44, 0x3f, 0x39, 0x3a, 0x36, 0x3d, 0x36, 0x3a, 0x3a, 0x34, 0x3b, + 0x38, 0x2f, 0x40, 0x34, 0x32, 0x4d, 0x43, 0x45, 0x4e, 0x3f, 0x48, 0x35, + 0x3b, 0x4d, 0x4f, 0x39, 0x42, 0x36, 0x46, 0x36, 0x4a, 0x3c, 0x37, 0x41, + 0x40, 0x43, 0x50, 0x36, 0x3e, 0x39, 0x44, 0x40, 0x36, 0x47, 0x3f, 0x36, + 0x45, 0x40, 0x45, 0x41, 0x3b, 0x37, 0x41, 0x39, 0x3b, 0x48, 0x37, 0x34, + 0x41, 0x45, 0x49, 0x3f, 0x39, 0x49, 0x3f, 0x3a, 0x42, 0x34, 0x38, 0x37, + 0x44, 0x34, 0x3c, 0x3d, 0x40, 0x47, 0x3a, 0x36, 0x3f, 0x3c, 0x41, 0x3e, + 0x47, 0x46, 0x46, 0x43, 0x3f, 0x38, 0x3b, 0x40, 0x3f, 0x48, 0x3b, 0x4c, + 0x3d, 0x4b, 0x34, 0x3b, 0x44, 0x43, 0x3c, 0x49, 0x38, 0x42, 0x41, 0x36, + 0x33, 0x36, 0x40, 0x46, 0x40, 0x3a, 0x42, 0x3c, 0x3d, 0x35, 0x3c, 0x52, + 0x3e, 0x40, 0x43, 0x43, 0x41, 0x3b, 0x3e, 0x44, 0x3f, 0x40, 0x40, 0x43, + 0x3d, 0x3f, 0x36, 0x42, 0x3f, 0x3c, 0x34, 0x3d, 0x33, 0x41, 0x3c, 0x39, + 0x34, 0x43, 0x3f, 0x34, 0x3c, 0x3a, 0x3a, 0x37, 0x42, 0x41, 0x40, 0x3e, + 0x3d, 0x3c, 0x41, 0x3c, 0x38, 0x33, 0x49, 0x46, 0x40, 0x40, 0x3a, 0x46, + 0x38, 0x3c, 0x37, 0x34, 0x3e, 0x3d, 0x32, 0x38, 0x3c, 0x4c, 0x3a, 0x34, + 0x35, 0x32, 0x39, 0x40, 0x3a, 0x58, 0x40, 0x46, 0x42, 0x33, 0x45, 0x39, + 0x34, 0x4f, 0x53, 0x45, 0x43, 0x3e, 0x41, 0x36, 0x3e, 0x3f, 0x40, 0x47, + 0x4e, 0x3d, 0x53, 0x2b, 0x41, 0x36, 0x3e, 0x38, 0x47, 0x41, 0x3f, 0x34, + 0x47, 0x40, 0x38, 0x39, 0x3d, 0x42, 0x3f, 0x3c, 0x48, 0x3a, 0x35, 0x3c, + 0x45, 0x49, 0x3c, 0x33, 0x33, 0x3f, 0x3c, 0x46, 0x43, 0x3f, 0x45, 0x31, + 0x35, 0x43, 0x46, 0x3a, 0x45, 0x3c, 0x37, 0x3a, 0x37, 0x36, 0x35, 0x3f, + 0x38, 0x49, 0x34, 0x3f, 0x3c, 0x42, 0x49, 0x3e, 0x3e, 0x3c, 0x39, 0x49, + 0x3e, 0x3c, 0x3b, 0x43, 0x44, 0x45, 0x39, 0x4b, 0x47, 0x47, 0x3e, 0x33, + 0x3c, 0x31, 0x34, 0x4f, 0x45, 0x43, 0x40, 0x3d, 0x42, 0x3b, 0x43, 0x50, + 0x3c, 0x3b, 0x37, 0x42, 0x47, 0x42, 0x3e, 0x4a, 0x3f, 0x3a, 0x48, 0x3d, + 0x48, 0x45, 0x3e, 0x40, 0x3a, 0x3c, 0x3d, 0x39, 0x41, 0x42, 0x3c, 0x42, + 0x43, 0x3c, 0x3b, 0x3d, 0x47, 0x49, 0x38, 0x3c, 0x46, 0x3a, 0x3c, 0x3f, + 0x3a, 0x46, 0x3a, 0x3b, 0x3d, 0x3a, 0x49, 0x46, 0x38, 0x40, 0x3e, 0x38, + 0x37, 0x32, 0x40, 0x3c, 0x42, 0x3d, 0x3b, 0x40, 0x3a, 0x38, 0x49, 0x33, + 0x40, 0x38, 0x2b, 0x3a, 0x3c, 0x4f, 0x4d, 0x3e, 0x35, 0x3d, 0x3b, 0x40, + 0x3a, 0x54, 0x3e, 0x3e, 0x43, 0x30, 0x47, 0x3d, 0x3b, 0x53, 0x52, 0x4a, + 0x43, 0x41, 0x49, 0x37, 0x3b, 0x35, 0x44, 0x3c, 0x45, 0x40, 0x4f, 0x36, + 0x4b, 0x42, 0x41, 0x3a, 0x41, 0x44, 0x47, 0x32, 0x43, 0x35, 0x3f, 0x37, + 0x43, 0x41, 0x43, 0x36, 0x3f, 0x3b, 0x3d, 0x38, 0x3d, 0x40, 0x42, 0x36, + 0x44, 0x3a, 0x39, 0x47, 0x37, 0x34, 0x42, 0x3a, 0x37, 0x38, 0x37, 0x3f, + 0x36, 0x3b, 0x45, 0x3f, 0x3f, 0x3d, 0x39, 0x3d, 0x39, 0x41, 0x37, 0x3f, + 0x3f, 0x3d, 0x3f, 0x41, 0x43, 0x41, 0x45, 0x43, 0x41, 0x3c, 0x3e, 0x40, + 0x40, 0x39, 0x41, 0x4f, 0x47, 0x42, 0x46, 0x48, 0x3b, 0x3b, 0x3c, 0x46, + 0x47, 0x3e, 0x46, 0x37, 0x38, 0x3d, 0x38, 0x52, 0x36, 0x46, 0x3c, 0x3a, + 0x3b, 0x37, 0x48, 0x4b, 0x3f, 0x42, 0x3c, 0x36, 0x40, 0x37, 0x33, 0x4c, + 0x39, 0x34, 0x41, 0x34, 0x3f, 0x3b, 0x35, 0x4b, 0x3b, 0x45, 0x43, 0x31, + 0x3e, 0x39, 0x30, 0x3d, 0x32, 0x43, 0x44, 0x3c, 0x3e, 0x38, 0x43, 0x41, + 0x3e, 0x37, 0x41, 0x39, 0x39, 0x44, 0x43, 0x38, 0x3f, 0x37, 0x48, 0x3f, + 0x3b, 0x44, 0x37, 0x3f, 0x3a, 0x3f, 0x3b, 0x33, 0x42, 0x3e, 0x2f, 0x42, + 0x44, 0x4f, 0x52, 0x3c, 0x34, 0x33, 0x39, 0x46, 0x31, 0x55, 0x43, 0x4e, + 0x49, 0x38, 0x4d, 0x48, 0x34, 0x4d, 0x5c, 0x4d, 0x49, 0x37, 0x4f, 0x40, + 0x3c, 0x3d, 0x41, 0x42, 0x3f, 0x51, 0x4b, 0x2f, 0x46, 0x35, 0x39, 0x3c, + 0x49, 0x3d, 0x4e, 0x32, 0x43, 0x47, 0x31, 0x3e, 0x42, 0x4a, 0x4c, 0x39, + 0x43, 0x46, 0x3e, 0x3f, 0x44, 0x3c, 0x42, 0x30, 0x3e, 0x34, 0x3b, 0x3b, + 0x3a, 0x3c, 0x42, 0x3d, 0x3d, 0x48, 0x48, 0x36, 0x3a, 0x45, 0x38, 0x40, + 0x3c, 0x41, 0x3f, 0x49, 0x42, 0x41, 0x38, 0x3d, 0x3d, 0x44, 0x3b, 0x3d, + 0x35, 0x48, 0x43, 0x3b, 0x32, 0x41, 0x3e, 0x3a, 0x46, 0x41, 0x40, 0x54, + 0x38, 0x3f, 0x3c, 0x36, 0x3b, 0x36, 0x43, 0x50, 0x38, 0x3c, 0x44, 0x3b, + 0x43, 0x47, 0x32, 0x50, 0x3d, 0x46, 0x3d, 0x3b, 0x39, 0x37, 0x3b, 0x4a, + 0x47, 0x43, 0x46, 0x3d, 0x3d, 0x41, 0x43, 0x45, 0x3b, 0x3c, 0x39, 0x47, + 0x43, 0x42, 0x39, 0x4c, 0x34, 0x41, 0x45, 0x3b, 0x38, 0x3e, 0x37, 0x3f, + 0x45, 0x43, 0x39, 0x42, 0x3c, 0x3d, 0x3d, 0x3c, 0x48, 0x39, 0x3b, 0x3a, + 0x46, 0x45, 0x3d, 0x3a, 0x3f, 0x3a, 0x45, 0x36, 0x3d, 0x43, 0x36, 0x43, + 0x42, 0x3d, 0x41, 0x3f, 0x3a, 0x3f, 0x31, 0x37, 0x48, 0x4f, 0x4e, 0x36, + 0x30, 0x3a, 0x3e, 0x3e, 0x38, 0x57, 0x40, 0x47, 0x47, 0x38, 0x4f, 0x46, + 0x3d, 0x4a, 0x50, 0x4c, 0x42, 0x3b, 0x4d, 0x3d, 0x3d, 0x33, 0x40, 0x41, + 0x48, 0x4b, 0x46, 0x39, 0x4d, 0x30, 0x45, 0x38, 0x48, 0x3c, 0x48, 0x3b, + 0x4d, 0x40, 0x3b, 0x40, 0x46, 0x41, 0x51, 0x34, 0x40, 0x43, 0x3f, 0x42, + 0x45, 0x42, 0x3e, 0x35, 0x3d, 0x38, 0x37, 0x3a, 0x42, 0x40, 0x43, 0x3c, + 0x3c, 0x3d, 0x43, 0x40, 0x45, 0x3a, 0x3e, 0x3a, 0x3e, 0x40, 0x43, 0x35, + 0x37, 0x3f, 0x3f, 0x3e, 0x39, 0x3f, 0x47, 0x38, 0x3e, 0x44, 0x3b, 0x3c, + 0x3b, 0x32, 0x40, 0x3e, 0x42, 0x45, 0x3a, 0x52, 0x3a, 0x3e, 0x45, 0x40, + 0x41, 0x48, 0x3f, 0x4e, 0x3e, 0x42, 0x3d, 0x39, 0x3a, 0x33, 0x3f, 0x4b, + 0x3e, 0x38, 0x36, 0x3e, 0x31, 0x41, 0x3a, 0x40, 0x3b, 0x37, 0x3f, 0x3e, + 0x3e, 0x3f, 0x35, 0x44, 0x3d, 0x42, 0x3d, 0x44, 0x42, 0x3f, 0x3e, 0x44, + 0x3e, 0x45, 0x37, 0x3a, 0x3b, 0x42, 0x3f, 0x41, 0x3b, 0x3f, 0x41, 0x41, + 0x3e, 0x34, 0x47, 0x39, 0x46, 0x46, 0x37, 0x39, 0x3f, 0x45, 0x39, 0x39, + 0x3a, 0x40, 0x38, 0x3a, 0x31, 0x34, 0x3a, 0x41, 0x38, 0x41, 0x3a, 0x41, + 0x44, 0x37, 0x2d, 0x41, 0x43, 0x4d, 0x4b, 0x3b, 0x2c, 0x30, 0x42, 0x3b, + 0x31, 0x56, 0x43, 0x47, 0x47, 0x38, 0x50, 0x44, 0x40, 0x52, 0x5a, 0x50, + 0x44, 0x3f, 0x4b, 0x35, 0x3a, 0x36, 0x41, 0x44, 0x47, 0x4e, 0x52, 0x36, + 0x45, 0x39, 0x38, 0x3c, 0x42, 0x44, 0x40, 0x3b, 0x4b, 0x38, 0x35, 0x35, + 0x3f, 0x40, 0x4f, 0x39, 0x3d, 0x37, 0x34, 0x3e, 0x41, 0x4c, 0x40, 0x37, + 0x3d, 0x3b, 0x37, 0x37, 0x40, 0x42, 0x35, 0x39, 0x41, 0x42, 0x3d, 0x34, + 0x3c, 0x37, 0x3a, 0x3d, 0x46, 0x46, 0x46, 0x3f, 0x44, 0x3d, 0x3c, 0x40, + 0x3c, 0x3a, 0x3d, 0x3b, 0x3b, 0x41, 0x47, 0x3a, 0x43, 0x43, 0x43, 0x3b, + 0x3e, 0x3e, 0x42, 0x46, 0x36, 0x37, 0x45, 0x35, 0x3c, 0x3b, 0x31, 0x4b, + 0x3c, 0x3e, 0x3a, 0x3a, 0x42, 0x42, 0x34, 0x47, 0x37, 0x34, 0x41, 0x3d, + 0x3e, 0x39, 0x43, 0x47, 0x31, 0x3b, 0x40, 0x3b, 0x42, 0x3d, 0x44, 0x44, + 0x37, 0x39, 0x44, 0x3b, 0x40, 0x3a, 0x3d, 0x44, 0x3c, 0x40, 0x42, 0x3b, + 0x40, 0x3e, 0x32, 0x3d, 0x3c, 0x3e, 0x44, 0x3e, 0x47, 0x3d, 0x3f, 0x2e, + 0x3e, 0x3d, 0x3f, 0x3b, 0x3b, 0x43, 0x43, 0x3c, 0x3a, 0x3c, 0x3a, 0x36, + 0x38, 0x46, 0x30, 0x3e, 0x3f, 0x35, 0x3e, 0x34, 0x3c, 0x34, 0x32, 0x4a, + 0x41, 0x48, 0x48, 0x3f, 0x34, 0x37, 0x42, 0x43, 0x36, 0x59, 0x42, 0x3f, + 0x4b, 0x3d, 0x5d, 0x45, 0x3b, 0x51, 0x51, 0x4c, 0x41, 0x40, 0x4d, 0x36, + 0x3f, 0x34, 0x39, 0x3d, 0x4a, 0x4b, 0x4f, 0x33, 0x48, 0x32, 0x3c, 0x32, + 0x48, 0x4c, 0x4d, 0x3a, 0x49, 0x3a, 0x3a, 0x2e, 0x4b, 0x44, 0x4f, 0x33, + 0x3a, 0x48, 0x34, 0x43, 0x38, 0x45, 0x44, 0x35, 0x3b, 0x3f, 0x40, 0x37, + 0x35, 0x34, 0x38, 0x3e, 0x41, 0x3e, 0x3b, 0x47, 0x41, 0x47, 0x3c, 0x3c, + 0x39, 0x40, 0x3e, 0x45, 0x36, 0x41, 0x3f, 0x3f, 0x3c, 0x44, 0x3f, 0x43, + 0x3d, 0x3c, 0x49, 0x42, 0x3e, 0x3f, 0x48, 0x37, 0x43, 0x37, 0x43, 0x3d, + 0x32, 0x42, 0x44, 0x39, 0x36, 0x37, 0x40, 0x46, 0x47, 0x3d, 0x3a, 0x42, + 0x3f, 0x38, 0x37, 0x48, 0x39, 0x40, 0x3c, 0x37, 0x33, 0x38, 0x38, 0x40, + 0x41, 0x3c, 0x3f, 0x3b, 0x40, 0x3a, 0x47, 0x46, 0x3a, 0x37, 0x42, 0x47, + 0x3b, 0x3f, 0x3b, 0x40, 0x33, 0x3f, 0x3a, 0x3c, 0x38, 0x3a, 0x36, 0x38, + 0x36, 0x40, 0x48, 0x42, 0x48, 0x3c, 0x43, 0x36, 0x32, 0x3b, 0x34, 0x39, + 0x38, 0x46, 0x37, 0x3b, 0x44, 0x34, 0x36, 0x38, 0x3c, 0x43, 0x33, 0x3c, + 0x3b, 0x45, 0x38, 0x38, 0x44, 0x33, 0x36, 0x4a, 0x46, 0x4c, 0x4a, 0x34, + 0x36, 0x37, 0x43, 0x42, 0x33, 0x58, 0x43, 0x48, 0x44, 0x38, 0x5f, 0x3f, + 0x3c, 0x4d, 0x53, 0x52, 0x43, 0x47, 0x52, 0x3e, 0x3b, 0x2d, 0x3b, 0x3a, + 0x4b, 0x49, 0x53, 0x38, 0x4c, 0x2f, 0x38, 0x31, 0x42, 0x40, 0x48, 0x3f, + 0x44, 0x3c, 0x3c, 0x34, 0x46, 0x3f, 0x49, 0x3a, 0x43, 0x3d, 0x34, 0x42, + 0x36, 0x47, 0x51, 0x3c, 0x3d, 0x39, 0x39, 0x3a, 0x3b, 0x35, 0x35, 0x41, + 0x47, 0x3c, 0x3b, 0x43, 0x3f, 0x45, 0x3e, 0x40, 0x3c, 0x3f, 0x3c, 0x42, + 0x3b, 0x3e, 0x38, 0x3f, 0x3f, 0x41, 0x39, 0x39, 0x3d, 0x43, 0x4f, 0x3d, + 0x48, 0x3b, 0x44, 0x45, 0x3d, 0x3b, 0x49, 0x43, 0x44, 0x3d, 0x37, 0x3b, + 0x3c, 0x45, 0x46, 0x44, 0x35, 0x3e, 0x32, 0x35, 0x34, 0x3b, 0x40, 0x43, + 0x3e, 0x45, 0x37, 0x3d, 0x3f, 0x43, 0x36, 0x3f, 0x3f, 0x43, 0x39, 0x44, + 0x3e, 0x3e, 0x45, 0x40, 0x3e, 0x44, 0x3b, 0x3e, 0x42, 0x42, 0x3b, 0x3d, + 0x3a, 0x40, 0x39, 0x3a, 0x32, 0x36, 0x41, 0x30, 0x39, 0x46, 0x33, 0x3f, + 0x46, 0x40, 0x3c, 0x31, 0x41, 0x3a, 0x3f, 0x3f, 0x3b, 0x36, 0x3f, 0x38, + 0x36, 0x3e, 0x35, 0x35, 0x3b, 0x3d, 0x3f, 0x39, 0x46, 0x37, 0x3a, 0x47, + 0x37, 0x39, 0x2c, 0x55, 0x40, 0x4b, 0x4a, 0x39, 0x35, 0x42, 0x3d, 0x40, + 0x3a, 0x54, 0x41, 0x48, 0x51, 0x3b, 0x61, 0x3e, 0x3e, 0x4d, 0x51, 0x52, + 0x3e, 0x43, 0x52, 0x41, 0x48, 0x2d, 0x35, 0x35, 0x4b, 0x44, 0x4d, 0x3c, + 0x54, 0x33, 0x39, 0x27, 0x4a, 0x44, 0x4a, 0x41, 0x3c, 0x3a, 0x31, 0x2f, + 0x3d, 0x42, 0x48, 0x3f, 0x42, 0x40, 0x44, 0x3b, 0x40, 0x3e, 0x49, 0x3a, + 0x3c, 0x35, 0x30, 0x3e, 0x3e, 0x3d, 0x36, 0x3a, 0x3e, 0x3a, 0x4a, 0x3e, + 0x3d, 0x49, 0x40, 0x43, 0x3e, 0x45, 0x3f, 0x3c, 0x3b, 0x42, 0x3a, 0x39, + 0x3b, 0x47, 0x3f, 0x39, 0x49, 0x46, 0x3d, 0x34, 0x32, 0x44, 0x46, 0x42, + 0x47, 0x39, 0x49, 0x48, 0x3b, 0x38, 0x45, 0x45, 0x37, 0x38, 0x46, 0x46, + 0x37, 0x42, 0x35, 0x34, 0x45, 0x42, 0x35, 0x43, 0x3b, 0x3a, 0x43, 0x43, + 0x40, 0x42, 0x35, 0x3f, 0x38, 0x3f, 0x3a, 0x3a, 0x3b, 0x3f, 0x3e, 0x36, + 0x3f, 0x3c, 0x48, 0x3b, 0x3a, 0x41, 0x41, 0x35, 0x33, 0x3f, 0x3b, 0x45, + 0x48, 0x36, 0x40, 0x38, 0x47, 0x3d, 0x35, 0x40, 0x41, 0x42, 0x41, 0x37, + 0x41, 0x3e, 0x36, 0x48, 0x3e, 0x3c, 0x32, 0x39, 0x41, 0x40, 0x38, 0x3f, + 0x46, 0x43, 0x33, 0x40, 0x43, 0x43, 0x3a, 0x49, 0x3f, 0x35, 0x2c, 0x5d, + 0x43, 0x49, 0x52, 0x3b, 0x3c, 0x41, 0x40, 0x4a, 0x33, 0x50, 0x41, 0x46, + 0x52, 0x41, 0x68, 0x48, 0x44, 0x53, 0x54, 0x55, 0x42, 0x42, 0x57, 0x44, + 0x47, 0x35, 0x35, 0x3e, 0x4b, 0x44, 0x4e, 0x38, 0x55, 0x2f, 0x36, 0x2d, + 0x40, 0x48, 0x4b, 0x41, 0x48, 0x36, 0x32, 0x32, 0x44, 0x42, 0x47, 0x42, + 0x48, 0x3d, 0x3d, 0x39, 0x3e, 0x35, 0x4b, 0x39, 0x38, 0x3a, 0x39, 0x46, + 0x38, 0x3f, 0x3a, 0x42, 0x4b, 0x45, 0x3e, 0x32, 0x46, 0x43, 0x3b, 0x40, + 0x45, 0x41, 0x3e, 0x43, 0x37, 0x3d, 0x43, 0x3b, 0x46, 0x48, 0x42, 0x3b, + 0x3d, 0x48, 0x4a, 0x3c, 0x3b, 0x42, 0x40, 0x3c, 0x3a, 0x42, 0x38, 0x47, + 0x3b, 0x3b, 0x3d, 0x41, 0x3f, 0x38, 0x3f, 0x4a, 0x44, 0x3f, 0x47, 0x3a, + 0x47, 0x44, 0x43, 0x43, 0x34, 0x3d, 0x3a, 0x3c, 0x47, 0x3f, 0x3e, 0x39, + 0x42, 0x4a, 0x40, 0x36, 0x40, 0x41, 0x42, 0x3f, 0x3f, 0x43, 0x39, 0x38, + 0x3c, 0x3b, 0x4c, 0x2f, 0x41, 0x39, 0x40, 0x42, 0x3f, 0x42, 0x40, 0x36, + 0x3b, 0x45, 0x41, 0x41, 0x44, 0x45, 0x42, 0x37, 0x3d, 0x3a, 0x33, 0x3e, + 0x3b, 0x3b, 0x3c, 0x3d, 0x38, 0x49, 0x44, 0x39, 0x3f, 0x48, 0x3d, 0x41, + 0x42, 0x43, 0x44, 0x3e, 0x41, 0x3d, 0x32, 0x59, 0x45, 0x4b, 0x4b, 0x38, + 0x37, 0x3d, 0x48, 0x42, 0x3d, 0x52, 0x43, 0x46, 0x54, 0x48, 0x67, 0x4d, + 0x45, 0x4e, 0x49, 0x52, 0x45, 0x45, 0x58, 0x3b, 0x41, 0x38, 0x3f, 0x3f, + 0x49, 0x44, 0x4f, 0x48, 0x57, 0x31, 0x3c, 0x2a, 0x3e, 0x4c, 0x41, 0x40, + 0x47, 0x3f, 0x33, 0x34, 0x3f, 0x42, 0x48, 0x43, 0x4b, 0x38, 0x39, 0x3d, + 0x3f, 0x3e, 0x4b, 0x3f, 0x35, 0x36, 0x3c, 0x46, 0x3c, 0x45, 0x37, 0x3b, + 0x3c, 0x39, 0x41, 0x40, 0x41, 0x43, 0x44, 0x41, 0x45, 0x4f, 0x44, 0x43, + 0x44, 0x3c, 0x45, 0x34, 0x42, 0x45, 0x3f, 0x46, 0x3f, 0x43, 0x3d, 0x3a, + 0x39, 0x47, 0x45, 0x3d, 0x3f, 0x3b, 0x3d, 0x42, 0x38, 0x48, 0x48, 0x3b, + 0x3c, 0x3a, 0x3f, 0x41, 0x44, 0x4b, 0x44, 0x48, 0x41, 0x3c, 0x3d, 0x3c, + 0x3e, 0x3a, 0x4a, 0x3b, 0x49, 0x35, 0x3a, 0x3d, 0x41, 0x3f, 0x49, 0x39, + 0x44, 0x37, 0x3f, 0x3c, 0x42, 0x40, 0x4a, 0x46, 0x39, 0x38, 0x46, 0x37, + 0x41, 0x46, 0x41, 0x45, 0x40, 0x3b, 0x3b, 0x33, 0x3b, 0x39, 0x3c, 0x43, + 0x37, 0x3c, 0x44, 0x3d, 0x46, 0x39, 0x3c, 0x3c, 0x44, 0x48, 0x41, 0x44, + 0x41, 0x43, 0x46, 0x3b, 0x47, 0x41, 0x31, 0x41, 0x44, 0x40, 0x43, 0x42, + 0x3e, 0x43, 0x34, 0x65, 0x4f, 0x50, 0x4d, 0x3a, 0x37, 0x43, 0x4d, 0x4a, + 0x3d, 0x54, 0x40, 0x42, 0x5b, 0x3b, 0x71, 0x49, 0x44, 0x4f, 0x54, 0x56, + 0x48, 0x40, 0x52, 0x41, 0x42, 0x38, 0x3c, 0x49, 0x4a, 0x45, 0x51, 0x35, + 0x54, 0x2f, 0x35, 0x25, 0x4d, 0x3f, 0x4d, 0x43, 0x49, 0x33, 0x32, 0x3a, + 0x46, 0x48, 0x48, 0x3d, 0x43, 0x3a, 0x3c, 0x3a, 0x48, 0x40, 0x4b, 0x3b, + 0x45, 0x3b, 0x3f, 0x38, 0x37, 0x41, 0x31, 0x3b, 0x41, 0x43, 0x43, 0x37, + 0x48, 0x3f, 0x48, 0x37, 0x40, 0x4a, 0x43, 0x45, 0x3d, 0x39, 0x37, 0x37, + 0x3c, 0x3f, 0x47, 0x48, 0x43, 0x3e, 0x41, 0x3f, 0x3e, 0x38, 0x3e, 0x37, + 0x45, 0x45, 0x35, 0x44, 0x38, 0x3a, 0x49, 0x43, 0x40, 0x41, 0x40, 0x44, + 0x3c, 0x3e, 0x40, 0x38, 0x42, 0x41, 0x3c, 0x41, 0x3a, 0x3b, 0x3c, 0x3a, + 0x49, 0x3c, 0x42, 0x44, 0x3f, 0x39, 0x45, 0x32, 0x45, 0x43, 0x45, 0x39, + 0x43, 0x41, 0x4b, 0x39, 0x32, 0x3c, 0x3c, 0x36, 0x39, 0x3f, 0x46, 0x32, + 0x39, 0x35, 0x4f, 0x32, 0x3e, 0x40, 0x3d, 0x3e, 0x3a, 0x39, 0x4c, 0x38, + 0x43, 0x38, 0x49, 0x3b, 0x33, 0x39, 0x3b, 0x36, 0x36, 0x43, 0x3b, 0x3c, + 0x32, 0x3c, 0x3a, 0x45, 0x31, 0x3d, 0x37, 0x40, 0x3f, 0x3f, 0x35, 0xff, + 0x49, 0x4e, 0x4c, 0x3c, 0x36, 0x43, 0x46, 0x45, 0x41, 0x59, 0x44, 0x4a, + 0x53, 0x44, 0x71, 0x4a, 0x39, 0x4f, 0x50, 0x4b, 0x47, 0x42, 0x5a, 0x3c, + 0x45, 0x38, 0x3e, 0x42, 0x53, 0x43, 0x52, 0x3a, 0x52, 0x34, 0x31, 0x20, + 0x49, 0x4e, 0x46, 0x43, 0x4b, 0x3d, 0x2b, 0x27, 0x46, 0x46, 0x47, 0x41, + 0x42, 0x37, 0x39, 0x38, 0x45, 0x3f, 0x51, 0x3d, 0x48, 0x3f, 0x33, 0x3f, + 0x38, 0x45, 0x31, 0x38, 0x41, 0x3d, 0x47, 0x39, 0x42, 0x40, 0x4c, 0x3f, + 0x40, 0x42, 0x41, 0x41, 0x41, 0x42, 0x39, 0x35, 0x3f, 0x46, 0x45, 0x36, + 0x3f, 0x43, 0x3b, 0x39, 0x41, 0x38, 0x43, 0x37, 0x3d, 0x44, 0x3b, 0x40, + 0x36, 0x3d, 0x42, 0x41, 0x41, 0x3d, 0x38, 0x4a, 0x40, 0x4a, 0x4c, 0x38, + 0x3f, 0x40, 0x45, 0x3c, 0x3f, 0x4b, 0x43, 0x41, 0x43, 0x3e, 0x43, 0x3f, + 0x36, 0x40, 0x40, 0x39, 0x3f, 0x3a, 0x3a, 0x30, 0x41, 0x3c, 0x3c, 0x34, + 0x46, 0x38, 0x43, 0x34, 0x3a, 0x42, 0x43, 0x42, 0x40, 0x41, 0x49, 0x34, + 0x35, 0x40, 0x47, 0x3d, 0x3d, 0x3e, 0x4c, 0x33, 0x3c, 0x3b, 0x39, 0x43, + 0x3a, 0x3e, 0x3b, 0x37, 0x3f, 0x42, 0x31, 0x3d, 0x41, 0x3e, 0x32, 0x47, + 0x34, 0x41, 0x3d, 0x35, 0x39, 0x40, 0x38, 0x69, 0x4f, 0x4a, 0x49, 0x37, + 0x37, 0x44, 0x43, 0x46, 0x40, 0x58, 0x43, 0x48, 0x54, 0x46, 0x6c, 0x50, + 0x3a, 0x50, 0x50, 0x57, 0x47, 0x46, 0x5c, 0x40, 0x40, 0x39, 0x3e, 0x46, + 0x53, 0x46, 0x5c, 0x36, 0x4f, 0x32, 0x30, 0x2d, 0x4a, 0x48, 0x41, 0x45, + 0x47, 0x2f, 0x32, 0x2b, 0x43, 0x40, 0x43, 0x3c, 0x40, 0x44, 0x3e, 0x37, + 0x39, 0x3e, 0x48, 0x42, 0x45, 0x36, 0x47, 0x3f, 0x3b, 0x41, 0x35, 0x35, + 0x3b, 0x3e, 0x35, 0x43, 0x3e, 0x41, 0x3d, 0x36, 0x41, 0x3c, 0x40, 0x44, + 0x3d, 0x40, 0x35, 0x32, 0x48, 0x3e, 0x39, 0x42, 0x44, 0x3d, 0x39, 0x3b, + 0x3b, 0x45, 0x40, 0x4a, 0x3f, 0x41, 0x43, 0x39, 0x42, 0x44, 0x4c, 0x3c, + 0x3f, 0x3e, 0x3f, 0x43, 0x40, 0x42, 0x4c, 0x3b, 0x3e, 0x3d, 0x49, 0x42, + 0x40, 0x44, 0x40, 0x34, 0x36, 0x40, 0x45, 0x39, 0x42, 0x40, 0x3e, 0x44, + 0x45, 0x37, 0x3c, 0x38, 0x3e, 0x49, 0x3e, 0x3c, 0x41, 0x3d, 0x42, 0x32, + 0x40, 0x45, 0x3e, 0x36, 0x44, 0x3a, 0x4e, 0x38, 0x43, 0x38, 0x40, 0x38, + 0x49, 0x42, 0x40, 0x3d, 0x42, 0x48, 0x48, 0x3d, 0x41, 0x3a, 0x3f, 0x41, + 0x38, 0x3c, 0x44, 0x39, 0x3a, 0x32, 0x3a, 0x3e, 0x3d, 0x3b, 0x39, 0x38, + 0x3a, 0x43, 0x3a, 0x6b, 0x45, 0x50, 0x47, 0x33, 0x38, 0x48, 0x4d, 0x4f, + 0x39, 0x4b, 0x46, 0x4a, 0x4f, 0x42, 0x6f, 0x4b, 0x40, 0x55, 0x54, 0x50, + 0x42, 0x47, 0x5e, 0x46, 0x40, 0x34, 0x40, 0x47, 0x52, 0x46, 0x55, 0x3b, + 0x4f, 0x2b, 0x35, 0x33, 0x4c, 0x44, 0x44, 0x48, 0x47, 0x37, 0x35, 0x27, + 0x4a, 0x3b, 0x41, 0x40, 0x40, 0x3e, 0x36, 0x39, 0x3e, 0x3c, 0x45, 0x3f, + 0x4d, 0x41, 0x3d, 0x48, 0x47, 0x46, 0x33, 0x3d, 0x3d, 0x3e, 0x34, 0x3f, + 0x3e, 0x3a, 0x41, 0x35, 0x3b, 0x3e, 0x42, 0x3c, 0x42, 0x42, 0x40, 0x31, + 0x37, 0x40, 0x36, 0x42, 0x48, 0x39, 0x3d, 0x3c, 0x3a, 0x43, 0x39, 0x3d, + 0x47, 0x49, 0x43, 0x3d, 0x45, 0x39, 0x44, 0x37, 0x3e, 0x4d, 0x3d, 0x40, + 0x3d, 0x4c, 0x4d, 0x44, 0x3c, 0x3d, 0x46, 0x41, 0x41, 0x42, 0x40, 0x40, + 0x41, 0x3a, 0x3c, 0x3b, 0x3c, 0x44, 0x40, 0x34, 0x44, 0x38, 0x3b, 0x33, + 0x45, 0x45, 0x44, 0x3f, 0x3e, 0x3a, 0x3b, 0x3b, 0x43, 0x39, 0x3a, 0x45, + 0x3b, 0x3a, 0x4b, 0x39, 0x3d, 0x38, 0x41, 0x39, 0x42, 0x45, 0x43, 0x40, + 0x3e, 0x35, 0x44, 0x3f, 0x45, 0x41, 0x40, 0x3e, 0x43, 0x42, 0x37, 0x3a, + 0x38, 0x35, 0x3a, 0x48, 0x3e, 0x3b, 0x40, 0x38, 0x3c, 0x3c, 0x3b, 0x6a, + 0x48, 0x4d, 0x4d, 0x34, 0x38, 0x40, 0x4a, 0x45, 0x3c, 0x4f, 0x41, 0x4b, + 0x58, 0x46, 0x71, 0x49, 0x3d, 0x53, 0x44, 0x52, 0x42, 0x3e, 0x57, 0x4c, + 0x4c, 0x38, 0x40, 0x3b, 0x5c, 0x4c, 0x52, 0x3e, 0x4c, 0x2d, 0x32, 0x37, + 0x49, 0x3f, 0x41, 0x47, 0x4a, 0x3b, 0x2f, 0x26, 0x45, 0x40, 0x47, 0x42, + 0x3d, 0x39, 0x2d, 0x2c, 0x3f, 0x45, 0x46, 0x44, 0x48, 0x43, 0x42, 0x48, + 0x40, 0x41, 0x3b, 0x3b, 0x41, 0x3b, 0x39, 0x40, 0x3b, 0x47, 0x3f, 0x38, + 0x3f, 0x49, 0x3b, 0x35, 0x40, 0x45, 0x38, 0x35, 0x36, 0x34, 0x3e, 0x3d, + 0x46, 0x3e, 0x33, 0x38, 0x43, 0x48, 0x3f, 0x45, 0x31, 0x44, 0x38, 0x35, + 0x3c, 0x41, 0x4b, 0x44, 0x3d, 0x43, 0x38, 0x48, 0x3c, 0x39, 0x4a, 0x42, + 0x3d, 0x43, 0x3f, 0x49, 0x3e, 0x47, 0x49, 0x41, 0x3b, 0x3c, 0x47, 0x3a, + 0x3d, 0x40, 0x4a, 0x38, 0x3d, 0x3b, 0x47, 0x3a, 0x36, 0x47, 0x42, 0x46, + 0x3c, 0x3d, 0x45, 0x3b, 0x48, 0x3f, 0x38, 0x36, 0x39, 0x46, 0x43, 0x3a, + 0x41, 0x3d, 0x39, 0x39, 0x46, 0x37, 0x3f, 0x3f, 0x3a, 0x46, 0x3f, 0x39, + 0x49, 0x44, 0x42, 0x3a, 0x3a, 0x43, 0x3e, 0x42, 0x3d, 0x3d, 0x43, 0x40, + 0x43, 0x3c, 0x3f, 0x43, 0x40, 0x42, 0x3b, 0x57, 0x4a, 0x4f, 0x4a, 0x2d, + 0x3b, 0x48, 0x45, 0x42, 0x34, 0x4c, 0x3e, 0x4f, 0x4d, 0x40, 0x6c, 0x4b, + 0x3b, 0x4d, 0x4c, 0x57, 0x49, 0x3d, 0x5d, 0x44, 0x43, 0x29, 0x42, 0x3f, + 0x5b, 0x47, 0x4f, 0x3e, 0x54, 0x2e, 0x34, 0x34, 0x4b, 0x47, 0x46, 0x46, + 0x4b, 0x34, 0x36, 0x28, 0x3e, 0x3f, 0x42, 0x40, 0x3b, 0x38, 0x39, 0x42, + 0x49, 0x3d, 0x49, 0x47, 0x47, 0x3b, 0x43, 0x34, 0x39, 0x36, 0x42, 0x3d, + 0x37, 0x40, 0x37, 0x38, 0x46, 0x42, 0x49, 0x37, 0x44, 0x3f, 0x38, 0x3e, + 0x36, 0x32, 0x33, 0x38, 0x40, 0x46, 0x42, 0x34, 0x41, 0x42, 0x3e, 0x38, + 0x44, 0x3e, 0x3f, 0x43, 0x3f, 0x43, 0x35, 0x3f, 0x4d, 0x3b, 0x43, 0x39, + 0x40, 0x47, 0x3f, 0x4a, 0x3a, 0x3f, 0x45, 0x45, 0x48, 0x42, 0x3b, 0x47, + 0x42, 0x4b, 0x47, 0x3e, 0x3c, 0x42, 0x46, 0x39, 0x41, 0x3f, 0x48, 0x33, + 0x45, 0x34, 0x3d, 0x30, 0x40, 0x4c, 0x40, 0x40, 0x39, 0x37, 0x40, 0x33, + 0x49, 0x42, 0x45, 0x38, 0x3c, 0x43, 0x45, 0x35, 0x37, 0x33, 0x34, 0x3b, + 0x3b, 0x38, 0x39, 0x41, 0x42, 0x40, 0x3e, 0x3e, 0x41, 0x33, 0x3a, 0x36, + 0x40, 0x3a, 0x3c, 0x45, 0x43, 0x3c, 0x40, 0x41, 0x49, 0x47, 0x35, 0x34, + 0x3a, 0x3d, 0x3a, 0x68, 0x4f, 0x48, 0x43, 0x36, 0x37, 0x3e, 0x45, 0x49, + 0x3a, 0x4d, 0x41, 0x3d, 0x46, 0x45, 0x65, 0x46, 0x38, 0x4d, 0x4a, 0x53, + 0x43, 0x41, 0x5d, 0x47, 0x41, 0x34, 0x39, 0x43, 0x4e, 0x48, 0x50, 0x38, + 0x53, 0x32, 0x30, 0x2e, 0x49, 0x4c, 0x4d, 0x3f, 0x46, 0x38, 0x34, 0x2b, + 0x44, 0x44, 0x41, 0x41, 0x36, 0x40, 0x3f, 0x32, 0x46, 0x38, 0x50, 0x45, + 0x3f, 0x3d, 0x3b, 0x36, 0x3b, 0x43, 0x3a, 0x34, 0x36, 0x3f, 0x39, 0x35, + 0x3c, 0x40, 0x40, 0x37, 0x3c, 0x39, 0x3d, 0x36, 0x48, 0x3d, 0x43, 0x34, + 0x3b, 0x46, 0x43, 0x41, 0x33, 0x3e, 0x44, 0x3d, 0x44, 0x44, 0x4c, 0x3c, + 0x37, 0x49, 0x42, 0x35, 0x45, 0x3a, 0x3c, 0x41, 0x3a, 0x45, 0x46, 0x41, + 0x3c, 0x48, 0x46, 0x36, 0x36, 0x42, 0x3b, 0x46, 0x42, 0x45, 0x44, 0x47, + 0x3f, 0x44, 0x3a, 0x35, 0x37, 0x46, 0x40, 0x38, 0x40, 0x3d, 0x36, 0x2c, + 0x34, 0x47, 0x40, 0x38, 0x3f, 0x3f, 0x44, 0x2d, 0x3b, 0x3d, 0x3e, 0x44, + 0x3c, 0x40, 0x3e, 0x33, 0x3c, 0x3a, 0x49, 0x40, 0x42, 0x42, 0x3a, 0x3b, + 0x33, 0x3d, 0x3c, 0x43, 0x3e, 0x3d, 0x3a, 0x3a, 0x48, 0x3e, 0x3c, 0x39, + 0x3f, 0x44, 0x37, 0x40, 0x3f, 0x3c, 0x3e, 0x3d, 0x38, 0x42, 0x34, 0x62, + 0x51, 0x47, 0x44, 0x3f, 0x32, 0x3c, 0x3f, 0x46, 0x3d, 0x46, 0x3e, 0x45, + 0x4a, 0x3e, 0x5d, 0x43, 0x45, 0x49, 0x4a, 0x55, 0x41, 0x3c, 0x5a, 0x44, + 0x43, 0x3b, 0x3c, 0x3a, 0x4b, 0x4e, 0x4d, 0x42, 0x49, 0x30, 0x3b, 0x38, + 0x42, 0x44, 0x51, 0x40, 0x48, 0x33, 0x3f, 0x2b, 0x3c, 0x41, 0x3c, 0x45, + 0x35, 0x39, 0x42, 0x37, 0x40, 0x46, 0x46, 0x3f, 0x41, 0x45, 0x42, 0x3d, + 0x43, 0x38, 0x3e, 0x38, 0x3c, 0x39, 0x40, 0x38, 0x37, 0x36, 0x3d, 0x3d, + 0x38, 0x47, 0x45, 0x3b, 0x45, 0x44, 0x42, 0x2e, 0x37, 0x40, 0x42, 0x42, + 0x3c, 0x36, 0x3b, 0x39, 0x44, 0x4d, 0x42, 0x3f, 0x3a, 0x3e, 0x45, 0x34, + 0x3c, 0x43, 0x47, 0x43, 0x3f, 0x48, 0x3b, 0x44, 0x3d, 0x44, 0x43, 0x3e, + 0x40, 0x4a, 0x31, 0x42, 0x42, 0x43, 0x48, 0x45, 0x3a, 0x42, 0x36, 0x2f, + 0x3c, 0x3e, 0x3b, 0x3b, 0x44, 0x3f, 0x3a, 0x2c, 0x47, 0x3f, 0x4a, 0x40, + 0x40, 0x40, 0x3c, 0x2a, 0x3e, 0x44, 0x40, 0x43, 0x3a, 0x42, 0x39, 0x34, + 0x49, 0x3e, 0x36, 0x42, 0x3f, 0x42, 0x33, 0x3b, 0x3c, 0x45, 0x39, 0x3f, + 0x3e, 0x3f, 0x41, 0x3d, 0x32, 0x3b, 0x31, 0x40, 0x3f, 0x44, 0x3c, 0x3f, + 0x40, 0x46, 0x45, 0x36, 0x36, 0x42, 0x30, 0x57, 0x47, 0x44, 0x48, 0x3f, + 0x35, 0x37, 0x3f, 0x3f, 0x38, 0x4a, 0x41, 0x46, 0x50, 0x3d, 0x5b, 0x41, + 0x3e, 0x3c, 0x4a, 0x54, 0x45, 0x41, 0x5b, 0x46, 0x3d, 0x3b, 0x43, 0x33, + 0x45, 0x4e, 0x43, 0x3b, 0x44, 0x37, 0x37, 0x32, 0x4c, 0x3d, 0x4c, 0x3f, + 0x49, 0x3b, 0x37, 0x3a, 0x33, 0x43, 0x3f, 0x40, 0x44, 0x36, 0x3b, 0x44, + 0x45, 0x40, 0x3c, 0x3c, 0x41, 0x44, 0x3b, 0x3d, 0x33, 0x37, 0x3c, 0x35, + 0x3d, 0x3f, 0x39, 0x38, 0x33, 0x43, 0x3e, 0x39, 0x3b, 0x3e, 0x41, 0x35, + 0x40, 0x46, 0x43, 0x35, 0x41, 0x3d, 0x32, 0x39, 0x3c, 0x40, 0x3e, 0x3f, + 0x42, 0x38, 0x3b, 0x45, 0x3a, 0x3d, 0x40, 0x36, 0x3a, 0x40, 0x46, 0x44, + 0x48, 0x45, 0x3f, 0x3a, 0x45, 0x45, 0x3c, 0x3b, 0x40, 0x4c, 0x39, 0x3a, + 0x38, 0x39, 0x46, 0x3a, 0x3e, 0x4b, 0x34, 0x39, 0x3d, 0x3f, 0x40, 0x39, + 0x45, 0x31, 0x45, 0x29, 0x3f, 0x38, 0x3a, 0x3f, 0x38, 0x3b, 0x36, 0x2d, + 0x43, 0x3d, 0x45, 0x3c, 0x46, 0x3f, 0x40, 0x3c, 0x3a, 0x3e, 0x3d, 0x38, + 0x3f, 0x3c, 0x3f, 0x42, 0x35, 0x3f, 0x3a, 0x43, 0x3d, 0x43, 0x3d, 0x33, + 0x3d, 0x48, 0x42, 0x3d, 0x45, 0x46, 0x3d, 0x35, 0x32, 0x44, 0x42, 0x37, + 0x3d, 0x40, 0x3c, 0x47, 0x4a, 0x45, 0x47, 0x2f, 0x33, 0x36, 0x3f, 0x42, + 0x38, 0x43, 0x3e, 0x3a, 0x41, 0x3f, 0x5f, 0x3f, 0x48, 0x3a, 0x44, 0x47, + 0x41, 0x3e, 0x57, 0x42, 0x41, 0x33, 0x34, 0x39, 0x42, 0x44, 0x42, 0x3c, + 0x49, 0x34, 0x37, 0x33, 0x47, 0x38, 0x43, 0x3d, 0x43, 0x3e, 0x3e, 0x36, + 0x41, 0x41, 0x37, 0x40, 0x39, 0x3e, 0x3b, 0x3b, 0x3e, 0x41, 0x3d, 0x3b, + 0x43, 0x3e, 0x39, 0x43, 0x2f, 0x3e, 0x33, 0x40, 0x45, 0x47, 0x30, 0x46, + 0x3f, 0x3f, 0x37, 0x42, 0x3d, 0x42, 0x43, 0x37, 0x38, 0x3c, 0x35, 0x34, + 0x41, 0x43, 0x3e, 0x3e, 0x3f, 0x49, 0x35, 0x35, 0x38, 0x36, 0x3a, 0x43, + 0x38, 0x46, 0x48, 0x36, 0x3f, 0x39, 0x3b, 0x3e, 0x48, 0x47, 0x41, 0x34, + 0x3b, 0x3c, 0x37, 0x3e, 0x40, 0x41, 0x3b, 0x3d, 0x43, 0x42, 0x3a, 0x39, + 0x3b, 0x43, 0x38, 0x2b, 0x43, 0x41, 0x48, 0x35, 0x44, 0x44, 0x3e, 0x2c, + 0x46, 0x40, 0x3e, 0x41, 0x38, 0x34, 0x35, 0x37, 0x34, 0x3f, 0x3d, 0x46, + 0x33, 0x3c, 0x3c, 0x2e, 0x3b, 0x45, 0x3d, 0x3e, 0x3a, 0x42, 0x3c, 0x36, + 0x3a, 0x42, 0x39, 0x43, 0x35, 0x39, 0x40, 0x44, 0x47, 0x41, 0x44, 0x3d, + 0x41, 0x3e, 0x38, 0x39, 0x45, 0x3a, 0x35, 0x43, 0x3f, 0x44, 0x41, 0x49, + 0x47, 0x3f, 0x44, 0x40, 0x38, 0x43, 0x40, 0x3e, 0x39, 0x42, 0x32, 0x3b, + 0x42, 0x47, 0x57, 0x37, 0x36, 0x38, 0x43, 0x49, 0x3b, 0x34, 0x54, 0x42, + 0x3d, 0x3f, 0x3e, 0x3b, 0x38, 0x41, 0x43, 0x3a, 0x44, 0x39, 0x34, 0x2c, + 0x38, 0x43, 0x4b, 0x3f, 0x40, 0x3e, 0x32, 0x33, 0x3d, 0x44, 0x45, 0x44, + 0x3e, 0x35, 0x37, 0x39, 0x40, 0x3e, 0x40, 0x3c, 0x34, 0x43, 0x37, 0x40, + 0x39, 0x3e, 0x3d, 0x43, 0x3a, 0x44, 0x43, 0x44, 0x3d, 0x3b, 0x45, 0x3b, + 0x3a, 0x3a, 0x3f, 0x37, 0x43, 0x3b, 0x33, 0x35, 0x40, 0x47, 0x3e, 0x3c, + 0x39, 0x3c, 0x34, 0x29, 0x3c, 0x3e, 0x46, 0x3e, 0x3c, 0x38, 0x3f, 0x2d, + 0x3d, 0x3d, 0x3f, 0x3f, 0x3d, 0x45, 0x3b, 0x32, 0x39, 0x3f, 0x41, 0x38, + 0x36, 0x3e, 0x3a, 0x35, 0x40, 0x3f, 0x3b, 0x32, 0x3c, 0x39, 0x3e, 0x35, + 0x3e, 0x45, 0x34, 0x38, 0x44, 0x39, 0x3f, 0x31, 0x34, 0x39, 0x3f, 0x38, + 0x44, 0x42, 0x3f, 0x3b, 0x39, 0x3d, 0x39, 0x3b, 0x44, 0x46, 0x38, 0x3d, + 0x45, 0x37, 0x40, 0x3a, 0x3a, 0x39, 0x35, 0x3c, 0x39, 0x40, 0x47, 0x3e, + 0x38, 0x42, 0x41, 0x3b, 0x48, 0x3f, 0x3a, 0x3e, 0x3d, 0x3f, 0x32, 0x3b, + 0x3f, 0x3d, 0x3e, 0x44, 0x43, 0x41, 0x44, 0x47, 0x48, 0x41, 0x41, 0x36, + 0x3a, 0x33, 0x3c, 0x3c, 0x37, 0x3e, 0x40, 0x34, 0x3f, 0x42, 0x53, 0x40, + 0x3f, 0x35, 0x3e, 0x46, 0x3a, 0x3e, 0x4b, 0x41, 0x46, 0x32, 0x39, 0x36, + 0x3b, 0x4f, 0x36, 0x3c, 0x40, 0x3a, 0x40, 0x40, 0x47, 0x3e, 0x49, 0x37, + 0x3f, 0x31, 0x3e, 0x40, 0x3b, 0x3f, 0x43, 0x44, 0x3a, 0x3d, 0x31, 0x41, + 0x41, 0x33, 0x43, 0x40, 0x3c, 0x3a, 0x41, 0x40, 0x37, 0x3f, 0x34, 0x3e, + 0x44, 0x42, 0x3d, 0x3f, 0x3f, 0x34, 0x36, 0x34, 0x31, 0x41, 0x32, 0x39, + 0x3e, 0x3d, 0x42, 0x35, 0x3e, 0x3a, 0x41, 0x47, 0x3d, 0x42, 0x33, 0x32, + 0x43, 0x42, 0x36, 0x41, 0x3e, 0x39, 0x46, 0x39, 0x35, 0x3d, 0x3d, 0x40, + 0x38, 0x44, 0x3d, 0x31, 0x44, 0x39, 0x3a, 0x45, 0x42, 0x41, 0x3d, 0x36, + 0x3f, 0x3c, 0x39, 0x3d, 0x32, 0x39, 0x42, 0x34, 0x3f, 0x38, 0x44, 0x3c, + 0x43, 0x45, 0x41, 0x2d, 0x44, 0x42, 0x3d, 0x3f, 0x44, 0x38, 0x3d, 0x35, + 0x3a, 0x48, 0x40, 0x3b, 0x3d, 0x36, 0x3b, 0x40, 0x3f, 0x3a, 0x3a, 0x3f, + 0x3c, 0x33, 0x39, 0x3c, 0x3c, 0x38, 0x47, 0x36, 0x3d, 0x41, 0x46, 0x41, + 0x34, 0x46, 0x48, 0x46, 0x3d, 0x3c, 0x40, 0x43, 0x3d, 0x41, 0x37, 0x3e, + 0x39, 0x47, 0x3f, 0x39, 0x46, 0x43, 0x3f, 0x41, 0x45, 0x37, 0x40, 0x3a, + 0x3d, 0x44, 0x3f, 0x3b, 0x3b, 0x40, 0x4f, 0x3d, 0x3d, 0x41, 0x3c, 0x43, + 0x3e, 0x46, 0x4e, 0x40, 0x3f, 0x34, 0x48, 0x29, 0x45, 0x44, 0x46, 0x41, + 0x45, 0x32, 0x3e, 0x38, 0x39, 0x3a, 0x3e, 0x3e, 0x4c, 0x34, 0x3c, 0x40, + 0x4a, 0x44, 0x3d, 0x46, 0x3b, 0x3e, 0x42, 0x42, 0x3a, 0x41, 0x43, 0x41, + 0x39, 0x3f, 0x3e, 0x3c, 0x36, 0x48, 0x3f, 0x3e, 0x3e, 0x37, 0x3f, 0x3f, + 0x3b, 0x40, 0x3e, 0x35, 0x32, 0x35, 0x3f, 0x33, 0x3f, 0x38, 0x43, 0x37, + 0x49, 0x38, 0x37, 0x3c, 0x3c, 0x40, 0x40, 0x3a, 0x3a, 0x46, 0x37, 0x34, + 0x34, 0x3b, 0x3d, 0x2f, 0x3a, 0x38, 0x3d, 0x46, 0x3d, 0x3b, 0x3d, 0x38, + 0x35, 0x37, 0x44, 0x3c, 0x3d, 0x3e, 0x40, 0x3a, 0x40, 0x33, 0x3e, 0x38, + 0x40, 0x3e, 0x45, 0x37, 0x3f, 0x3b, 0x3c, 0x40, 0x3b, 0x3c, 0x3b, 0x33, + 0x41, 0x3f, 0x3b, 0x42, 0x31, 0x3b, 0x3a, 0x39, 0x3d, 0x41, 0x39, 0x40, + 0x43, 0x45, 0x39, 0x3b, 0x3a, 0x42, 0x43, 0x3d, 0x3f, 0x40, 0x47, 0x39, + 0x37, 0x3f, 0x47, 0x3f, 0x45, 0x41, 0x39, 0x3a, 0x41, 0x38, 0x3c, 0x3c, + 0x39, 0x40, 0x39, 0x3b, 0x3b, 0x3e, 0x38, 0x3b, 0x37, 0x48, 0x41, 0x3f, + 0x3e, 0x37, 0x3d, 0x44, 0x3c, 0x3e, 0x40, 0x39, 0x41, 0x42, 0x3d, 0x45, + 0x3b, 0x3e, 0x4c, 0x3b, 0x3a, 0x3a, 0x3e, 0x47, 0x3c, 0x3f, 0x48, 0x3f, + 0x46, 0x3f, 0x39, 0x25, 0x44, 0x3a, 0x3b, 0x40, 0x41, 0x39, 0x39, 0x47, + 0x3b, 0x32, 0x49, 0x42, 0x41, 0x3a, 0x43, 0x41, 0x3e, 0x35, 0x37, 0x3d, + 0x49, 0x40, 0x45, 0x3b, 0x3c, 0x38, 0x48, 0x3c, 0x3c, 0x35, 0x3f, 0x41, + 0x41, 0x4c, 0x36, 0x39, 0x37, 0x3d, 0x3b, 0x3e, 0x44, 0x32, 0x3d, 0x3f, + 0x3a, 0x3b, 0x3a, 0x47, 0x38, 0x42, 0x36, 0x34, 0x43, 0x3f, 0x3e, 0x40, + 0x34, 0x31, 0x36, 0x33, 0x42, 0x37, 0x41, 0x41, 0x40, 0x3d, 0x3d, 0x37, + 0x43, 0x3a, 0x3e, 0x44, 0x43, 0x3c, 0x35, 0x38, 0x38, 0x3c, 0x43, 0x36, + 0x3a, 0x38, 0x40, 0x3f, 0x3d, 0x3e, 0x37, 0x3b, 0x41, 0x3a, 0x3b, 0x3d, + 0x3c, 0x41, 0x3c, 0x41, 0x47, 0x3f, 0x3f, 0x3b, 0x3d, 0x3f, 0x3b, 0x45, + 0x38, 0x38, 0x40, 0x38, 0x46, 0x42, 0x39, 0x3d, 0x3d, 0x3b, 0x42, 0x36, + 0x42, 0x41, 0x3e, 0x3e, 0x36, 0x3f, 0x37, 0x3f, 0x36, 0x48, 0x3b, 0x39, + 0x3d, 0x3f, 0x43, 0x3e, 0x3c, 0x40, 0x48, 0x46, 0x43, 0x36, 0x42, 0x39, + 0x46, 0x3c, 0x37, 0x38, 0x49, 0x37, 0x36, 0x39, 0x3e, 0x42, 0x48, 0x3a, + 0x3c, 0x3e, 0x42, 0x30, 0x3e, 0x34, 0x39, 0x3b, 0x46, 0x61, 0x46, 0x1e, + 0x4c, 0x3b, 0x40, 0x2d, 0x3c, 0x42, 0x32, 0x30, 0x49, 0x3e, 0x39, 0x34, + 0x30, 0x40, 0x31, 0x38, 0x40, 0x3d, 0x3c, 0x35, 0x3a, 0x36, 0x40, 0x3b, + 0x41, 0x40, 0x3b, 0x39, 0x37, 0x37, 0x3f, 0x3b, 0x3c, 0x3a, 0x40, 0x3a, + 0x36, 0x3c, 0x42, 0x39, 0x3e, 0x36, 0x40, 0x42, 0x39, 0x40, 0x3b, 0x34, + 0x37, 0x33, 0x36, 0x3f, 0x43, 0x33, 0x33, 0x27, 0x3d, 0x46, 0x40, 0x31, + 0x38, 0x3e, 0x41, 0x20, 0x3f, 0x39, 0x42, 0x35, 0x35, 0x45, 0x40, 0x1e, + 0x32, 0x35, 0x32, 0x3c, 0x35, 0x44, 0x46, 0x29, 0x3a, 0x3d, 0x37, 0x42, + 0x3b, 0x45, 0x3a, 0x26, 0x38, 0x40, 0x30, 0x37, 0x41, 0x40, 0x39, 0x2b, + 0x49, 0x3f, 0x43, 0x43, 0x40, 0x3a, 0x38, 0x29, 0x43, 0x3a, 0x37, 0x40, + 0x3f, 0x35, 0x3a, 0x28, 0x36, 0x3e, 0x3f, 0x43, 0x3c, 0x39, 0x42, 0x2c, + 0x38, 0x42, 0x38, 0x3d, 0x42, 0x38, 0x35, 0x2d, 0x34, 0x38, 0x3d, 0x43, + 0x46, 0x3e, 0x3c, 0x27, 0x3e, 0x40, 0x46, 0x39, 0x35, 0x3d, 0x42, 0x35, + 0x42, 0x36, 0x40, 0x3e, 0x3a, 0x3e, 0x3c, 0x37, 0x3a, 0x3c, 0x48, 0x48, + 0x48, 0x37, 0x3d, 0x38, 0x4b, 0x40, 0x43, 0x3b, 0x41, 0x46, 0x3c, 0x34, + 0x46, 0x3c, 0x3c, 0x3c, 0x4b, 0x64, 0x4a, 0x22, 0x52, 0x41, 0x42, 0x3b, + 0x42, 0x4a, 0x34, 0x37, 0x4b, 0x44, 0x3b, 0x4a, 0x38, 0x3f, 0x38, 0x3a, + 0x40, 0x41, 0x42, 0x3c, 0x33, 0x3e, 0x3c, 0x42, 0x2c, 0x4e, 0x47, 0x3f, + 0x38, 0x33, 0x39, 0x3f, 0x3b, 0x45, 0x37, 0x3a, 0x42, 0x42, 0x44, 0x3f, + 0x3c, 0x3c, 0x3e, 0x3d, 0x3c, 0x3c, 0x40, 0x2c, 0x3c, 0x3d, 0x42, 0x39, + 0x3a, 0x37, 0x43, 0x2a, 0x3d, 0x40, 0x41, 0x41, 0x46, 0x46, 0x42, 0x28, + 0x39, 0x3c, 0x37, 0x44, 0x46, 0x41, 0x47, 0x2b, 0x44, 0x33, 0x39, 0x3f, + 0x3f, 0x43, 0x3d, 0x23, 0x3a, 0x43, 0x41, 0x3b, 0x41, 0x42, 0x33, 0x1f, + 0x43, 0x3e, 0x3d, 0x40, 0x37, 0x33, 0x42, 0x28, 0x3b, 0x38, 0x37, 0x3c, + 0x34, 0x40, 0x44, 0x2a, 0x3c, 0x3a, 0x41, 0x37, 0x45, 0x3f, 0x3e, 0x26, + 0x41, 0x40, 0x35, 0x3d, 0x45, 0x3e, 0x3d, 0x29, 0x3c, 0x39, 0x3f, 0x3c, + 0x3d, 0x39, 0x38, 0x2d, 0x39, 0x38, 0x38, 0x44, 0x3c, 0x3e, 0x38, 0x26, + 0x40, 0x36, 0x39, 0x38, 0x3f, 0x32, 0x39, 0x35, 0x3d, 0x3e, 0x35, 0x3a, + 0x3f, 0x3f, 0x31, 0x35, 0x34, 0x45, 0x3e, 0x43, 0x48, 0x3b, 0x37, 0x39, + 0x4d, 0x46, 0x54, 0x40, 0x41, 0x4e, 0x3d, 0x38, 0x4d, 0x38, 0x3a, 0x3b, + 0x49, 0x5a, 0x4a, 0x1e, 0x5e, 0x39, 0x38, 0x37, 0x3a, 0x51, 0x3a, 0x3c, + 0x50, 0x3f, 0x40, 0x42, 0x33, 0x3b, 0x2e, 0x4a, 0x3f, 0x4a, 0x3b, 0x43, + 0x36, 0x3e, 0x3d, 0x42, 0x39, 0x46, 0x4b, 0x3c, 0x3b, 0x3b, 0x35, 0x3e, + 0x3d, 0x4b, 0x3f, 0x41, 0x3f, 0x3b, 0x42, 0x42, 0x38, 0x3a, 0x41, 0x3d, + 0x36, 0x41, 0x37, 0x2f, 0x38, 0x37, 0x3f, 0x34, 0x35, 0x35, 0x45, 0x30, + 0x31, 0x42, 0x31, 0x3a, 0x3a, 0x3e, 0x3d, 0x23, 0x3f, 0x43, 0x3b, 0x41, + 0x35, 0x3b, 0x40, 0x25, 0x45, 0x3e, 0x42, 0x3b, 0x31, 0x40, 0x36, 0x28, + 0x43, 0x42, 0x30, 0x42, 0x32, 0x32, 0x36, 0x2c, 0x35, 0x3a, 0x3d, 0x3a, + 0x3c, 0x36, 0x3e, 0x30, 0x41, 0x42, 0x38, 0x41, 0x41, 0x3e, 0x3c, 0x23, + 0x37, 0x40, 0x3c, 0x3e, 0x3e, 0x3a, 0x37, 0x2b, 0x36, 0x40, 0x41, 0x42, + 0x3e, 0x38, 0x44, 0x22, 0x46, 0x38, 0x33, 0x3b, 0x3a, 0x3a, 0x3a, 0x24, + 0x36, 0x3b, 0x38, 0x44, 0x34, 0x38, 0x40, 0x28, 0x38, 0x3d, 0x36, 0x44, + 0x31, 0x3e, 0x37, 0x37, 0x36, 0x3f, 0x47, 0x38, 0x3b, 0x3e, 0x2c, 0x4c, + 0x36, 0x3c, 0x3b, 0x41, 0x4c, 0x3d, 0x3d, 0x40, 0x49, 0x44, 0x52, 0x3f, + 0x3b, 0x4d, 0x3c, 0x3a, 0x4f, 0x3b, 0x36, 0x3b, 0x4a, 0x5f, 0x4e, 0x1f, + 0x57, 0x3c, 0x3d, 0x3d, 0x46, 0x59, 0x42, 0x45, 0x52, 0x3d, 0x3a, 0x41, + 0x31, 0x39, 0x39, 0x4f, 0x43, 0x4e, 0x3e, 0x37, 0x3a, 0x37, 0x33, 0x47, + 0x32, 0x45, 0x47, 0x43, 0x31, 0x33, 0x38, 0x43, 0x3e, 0x47, 0x3d, 0x32, + 0x3b, 0x39, 0x3c, 0x42, 0x3d, 0x47, 0x42, 0x40, 0x3d, 0x3f, 0x3c, 0x34, + 0x3b, 0x3e, 0x42, 0x3d, 0x43, 0x35, 0x42, 0x2c, 0x35, 0x3d, 0x3c, 0x3d, + 0x3a, 0x3c, 0x46, 0x25, 0x43, 0x35, 0x3d, 0x39, 0x3a, 0x3c, 0x40, 0x2b, + 0x33, 0x40, 0x3d, 0x46, 0x45, 0x37, 0x3c, 0x36, 0x43, 0x37, 0x3e, 0x3a, + 0x3c, 0x47, 0x3f, 0x38, 0x36, 0x3e, 0x3a, 0x42, 0x3c, 0x42, 0x33, 0x39, + 0x3c, 0x3a, 0x3c, 0x40, 0x48, 0x3b, 0x40, 0x32, 0x37, 0x47, 0x34, 0x38, + 0x33, 0x3d, 0x49, 0x2d, 0x36, 0x42, 0x3d, 0x3e, 0x47, 0x3c, 0x42, 0x2c, + 0x3b, 0x31, 0x3f, 0x3c, 0x3d, 0x3c, 0x3f, 0x2b, 0x41, 0x35, 0x33, 0x43, + 0x47, 0x39, 0x34, 0x2a, 0x3a, 0x3a, 0x40, 0x3d, 0x44, 0x3c, 0x39, 0x34, + 0x43, 0x40, 0x33, 0x3a, 0x3b, 0x42, 0x38, 0x3b, 0x34, 0x35, 0x40, 0x43, + 0x4b, 0x41, 0x3d, 0x38, 0x49, 0x44, 0x4d, 0x37, 0x3a, 0x4b, 0x40, 0x39, + 0x4e, 0x3b, 0x30, 0x38, 0x47, 0x5d, 0x50, 0x1f, 0x54, 0x35, 0x3a, 0x39, + 0x40, 0x4c, 0x46, 0x42, 0x52, 0x39, 0x39, 0x45, 0x41, 0x3c, 0x30, 0x5b, + 0x43, 0x4d, 0x4a, 0x3e, 0x31, 0x39, 0x41, 0x4c, 0x36, 0x44, 0x4c, 0x39, + 0x32, 0x41, 0x47, 0x3e, 0x34, 0x49, 0x45, 0x3b, 0x34, 0x3a, 0x3b, 0x47, + 0x43, 0x3e, 0x43, 0x32, 0x40, 0x3e, 0x3e, 0x38, 0x37, 0x3e, 0x37, 0x3a, + 0x3a, 0x40, 0x48, 0x2f, 0x3e, 0x3e, 0x46, 0x3a, 0x3e, 0x35, 0x49, 0x30, + 0x3a, 0x41, 0x3e, 0x39, 0x34, 0x45, 0x3d, 0x34, 0x48, 0x43, 0x43, 0x42, + 0x33, 0x39, 0x3b, 0x3f, 0x30, 0x46, 0x41, 0x39, 0x48, 0x3a, 0x3c, 0x3e, + 0x3f, 0x36, 0x40, 0x3d, 0x43, 0x40, 0x3e, 0x39, 0x44, 0x40, 0x44, 0x3b, + 0x43, 0x42, 0x39, 0x38, 0x3a, 0x3f, 0x3b, 0x3f, 0x38, 0x3d, 0x34, 0x30, + 0x34, 0x3d, 0x3f, 0x42, 0x44, 0x3e, 0x34, 0x32, 0x37, 0x46, 0x44, 0x38, + 0x3c, 0x45, 0x39, 0x2b, 0x41, 0x3c, 0x40, 0x40, 0x3a, 0x3a, 0x3c, 0x32, + 0x45, 0x42, 0x3d, 0x46, 0x38, 0x3b, 0x34, 0x35, 0x38, 0x43, 0x3d, 0x34, + 0x42, 0x3b, 0x38, 0x3d, 0x37, 0x43, 0x3f, 0x39, 0x4e, 0x39, 0x40, 0x3f, + 0x4d, 0x43, 0x49, 0x3f, 0x36, 0x41, 0x44, 0x39, 0x48, 0x3a, 0x35, 0x39, + 0x48, 0x59, 0x4e, 0x25, 0x58, 0x39, 0x42, 0x35, 0x43, 0x4e, 0x42, 0x3f, + 0x4a, 0x43, 0x3b, 0x3f, 0x3b, 0x37, 0x2b, 0x5a, 0x3d, 0x44, 0x3b, 0x40, + 0x31, 0x38, 0x37, 0x44, 0x32, 0x3e, 0x41, 0x3d, 0x2c, 0x42, 0x42, 0x3c, + 0x37, 0x45, 0x41, 0x41, 0x3d, 0x39, 0x41, 0x40, 0x3a, 0x46, 0x41, 0x40, + 0x40, 0x3d, 0x38, 0x31, 0x37, 0x3f, 0x42, 0x38, 0x3f, 0x3c, 0x48, 0x30, + 0x3e, 0x39, 0x3f, 0x3d, 0x3d, 0x44, 0x52, 0x35, 0x3b, 0x32, 0x42, 0x32, + 0x3a, 0x43, 0x39, 0x3b, 0x31, 0x43, 0x36, 0x3c, 0x3c, 0x3c, 0x41, 0x45, + 0x42, 0x49, 0x41, 0x3b, 0x42, 0x3e, 0x41, 0x44, 0x36, 0x41, 0x3f, 0x3c, + 0x3e, 0x47, 0x45, 0x41, 0x38, 0x41, 0x3f, 0x43, 0x35, 0x32, 0x41, 0x39, + 0x36, 0x47, 0x35, 0x42, 0x44, 0x3b, 0x3f, 0x34, 0x48, 0x41, 0x43, 0x42, + 0x36, 0x3e, 0x3c, 0x3d, 0x3d, 0x3b, 0x42, 0x44, 0x3a, 0x44, 0x36, 0x2a, + 0x41, 0x39, 0x3a, 0x41, 0x46, 0x3c, 0x44, 0x2f, 0x36, 0x39, 0x3b, 0x3f, + 0x38, 0x45, 0x3c, 0x3c, 0x3e, 0x41, 0x3c, 0x39, 0x3e, 0x40, 0x2f, 0x45, + 0x3b, 0x41, 0x40, 0x3c, 0x4e, 0x38, 0x3e, 0x48, 0x46, 0x40, 0x48, 0x44, + 0x40, 0x4a, 0x45, 0x3c, 0x4f, 0x39, 0x37, 0x3a, 0x4e, 0x59, 0x5c, 0x22, + 0x58, 0x32, 0x38, 0x34, 0x40, 0x4b, 0x43, 0x43, 0x4f, 0x3e, 0x39, 0x40, + 0x37, 0x3e, 0x2f, 0x55, 0x3f, 0x40, 0x38, 0x3f, 0x3a, 0x33, 0x37, 0x3d, + 0x34, 0x4c, 0x37, 0x3f, 0x32, 0x39, 0x45, 0x34, 0x44, 0x4c, 0x3f, 0x3b, + 0x3c, 0x36, 0x36, 0x43, 0x36, 0x47, 0x41, 0x46, 0x41, 0x3e, 0x41, 0x3a, + 0x43, 0x3a, 0x48, 0x42, 0x42, 0x3e, 0x4c, 0x36, 0x3d, 0x39, 0x43, 0x46, + 0x3d, 0x42, 0x42, 0x3b, 0x45, 0x43, 0x3c, 0x40, 0x39, 0x37, 0x34, 0x45, + 0x3f, 0x40, 0x34, 0x38, 0x43, 0x3f, 0x36, 0x47, 0x3f, 0x3b, 0x49, 0x3c, + 0x3a, 0x3a, 0x42, 0x4c, 0x37, 0x3e, 0x3b, 0x32, 0x47, 0x40, 0x45, 0x4d, + 0x39, 0x3b, 0x39, 0x40, 0x3e, 0x3c, 0x3d, 0x3a, 0x3d, 0x3b, 0x3e, 0x43, + 0x3e, 0x3f, 0x3a, 0x3c, 0x41, 0x40, 0x39, 0x3c, 0x3a, 0x38, 0x39, 0x37, + 0x36, 0x33, 0x43, 0x45, 0x3f, 0x45, 0x41, 0x30, 0x3b, 0x34, 0x3c, 0x39, + 0x3b, 0x45, 0x37, 0x2e, 0x36, 0x34, 0x36, 0x44, 0x3d, 0x40, 0x3a, 0x3c, + 0x3d, 0x3b, 0x38, 0x41, 0x42, 0x3a, 0x32, 0x4b, 0x38, 0x3e, 0x41, 0x46, + 0x57, 0x3a, 0x44, 0x48, 0x47, 0x45, 0x47, 0x3e, 0x43, 0x42, 0x45, 0x3b, + 0x50, 0x39, 0x37, 0x3f, 0x47, 0x51, 0x5e, 0x22, 0x59, 0x33, 0x3c, 0x37, + 0x43, 0x50, 0x49, 0x47, 0x46, 0x42, 0x39, 0x44, 0x44, 0x3d, 0x2f, 0x53, + 0x35, 0x41, 0x40, 0x3d, 0x2d, 0x35, 0x2f, 0x3e, 0x3f, 0x37, 0x38, 0x3e, + 0x30, 0x45, 0x46, 0x38, 0x33, 0x3c, 0x3e, 0x3b, 0x44, 0x42, 0x47, 0x49, + 0x43, 0x40, 0x3d, 0x3c, 0x38, 0x43, 0x3e, 0x38, 0x3d, 0x40, 0x36, 0x43, + 0x43, 0x3e, 0x40, 0x3c, 0x44, 0x47, 0x43, 0x3d, 0x41, 0x39, 0x3e, 0x45, + 0x39, 0x3d, 0x39, 0x40, 0x42, 0x40, 0x3b, 0x4a, 0x40, 0x41, 0x3f, 0x37, + 0x43, 0x41, 0x37, 0x4c, 0x3f, 0x3d, 0x38, 0x3a, 0x42, 0x46, 0x43, 0x4d, + 0x3c, 0x3a, 0x43, 0x3e, 0x3b, 0x3d, 0x46, 0x4a, 0x38, 0x3d, 0x3d, 0x39, + 0x3e, 0x3c, 0x3b, 0x3e, 0x3a, 0x40, 0x40, 0x34, 0x41, 0x3f, 0x3e, 0x3f, + 0x47, 0x3c, 0x32, 0x3a, 0x3c, 0x44, 0x3f, 0x42, 0x41, 0x43, 0x3e, 0x3a, + 0x3b, 0x42, 0x41, 0x39, 0x39, 0x37, 0x39, 0x3e, 0x3d, 0x33, 0x3e, 0x35, + 0x44, 0x37, 0x40, 0x35, 0x3f, 0x47, 0x37, 0x41, 0x35, 0x38, 0x47, 0x40, + 0x43, 0x44, 0x2e, 0x48, 0x35, 0x44, 0x41, 0x3c, 0x47, 0x3d, 0x3d, 0x52, + 0x48, 0x41, 0x44, 0x41, 0x42, 0x4b, 0x3e, 0x3d, 0x4e, 0x32, 0x34, 0x47, + 0x55, 0x57, 0x5f, 0x22, 0x57, 0x33, 0x40, 0x37, 0x40, 0x4a, 0x4d, 0x47, + 0x48, 0x38, 0x3e, 0x46, 0x37, 0x42, 0x28, 0x57, 0x38, 0x42, 0x36, 0x43, + 0x35, 0x37, 0x39, 0x39, 0x42, 0x39, 0x38, 0x3c, 0x35, 0x3c, 0x3c, 0x3a, + 0x3c, 0x4c, 0x45, 0x3f, 0x43, 0x3d, 0x45, 0x45, 0x40, 0x47, 0x3e, 0x3e, + 0x3d, 0x4b, 0x49, 0x35, 0x43, 0x3c, 0x36, 0x46, 0x3c, 0x46, 0x42, 0x44, + 0x3c, 0x42, 0x3d, 0x42, 0x44, 0x3c, 0x4a, 0x40, 0x40, 0x3c, 0x3b, 0x3c, + 0x35, 0x34, 0x2e, 0x46, 0x38, 0x3d, 0x38, 0x44, 0x41, 0x40, 0x3c, 0x52, + 0x3b, 0x3d, 0x3b, 0x3f, 0x42, 0x47, 0x44, 0x52, 0x44, 0x44, 0x39, 0x3f, + 0x43, 0x35, 0x3c, 0x4d, 0x39, 0x3d, 0x3b, 0x37, 0x3e, 0x38, 0x3e, 0x49, + 0x3a, 0x37, 0x3c, 0x49, 0x40, 0x41, 0x3c, 0x40, 0x3d, 0x38, 0x39, 0x3f, + 0x44, 0x3e, 0x42, 0x3e, 0x47, 0x40, 0x34, 0x46, 0x48, 0x37, 0x45, 0x3e, + 0x46, 0x3f, 0x35, 0x39, 0x38, 0x3f, 0x36, 0x2c, 0x40, 0x38, 0x3e, 0x3c, + 0x32, 0x3c, 0x46, 0x3a, 0x3f, 0x41, 0x36, 0x49, 0x42, 0x38, 0x36, 0x43, + 0x3d, 0x41, 0x46, 0x35, 0x4f, 0x3a, 0x41, 0x5c, 0x4a, 0x42, 0x4e, 0x42, + 0x46, 0x54, 0x3f, 0x45, 0x4c, 0x30, 0x33, 0x44, 0x56, 0x5d, 0x68, 0x26, + 0x60, 0x33, 0x3e, 0x3a, 0x42, 0x49, 0x52, 0x47, 0x51, 0x46, 0x40, 0x47, + 0x41, 0x3b, 0x1b, 0x4f, 0x3c, 0x45, 0x3d, 0x3d, 0x32, 0x2f, 0x3e, 0x3c, + 0x3c, 0x3f, 0x3b, 0x3c, 0x2c, 0x3a, 0x41, 0x3c, 0x35, 0x3e, 0x3e, 0x3c, + 0x3d, 0x3f, 0x3e, 0x40, 0x40, 0x44, 0x42, 0x3c, 0x3c, 0x3c, 0x41, 0x3c, + 0x3c, 0x3d, 0x3e, 0x3d, 0x3c, 0x3d, 0x4a, 0x46, 0x3f, 0x35, 0x33, 0x43, + 0x42, 0x41, 0x4d, 0x48, 0x48, 0x44, 0x3e, 0x41, 0x41, 0x36, 0x3c, 0x4c, + 0x34, 0x47, 0x42, 0x39, 0x3e, 0x43, 0x3a, 0x53, 0x3b, 0x3b, 0x42, 0x3d, + 0x41, 0x3c, 0x3e, 0x52, 0x3a, 0x44, 0x34, 0x43, 0x3d, 0x3d, 0x3a, 0x50, + 0x3e, 0x33, 0x41, 0x40, 0x3f, 0x38, 0x43, 0x42, 0x3b, 0x37, 0x3e, 0x43, + 0x3f, 0x3c, 0x41, 0x49, 0x40, 0x32, 0x40, 0x3e, 0x3b, 0x3e, 0x44, 0x3c, + 0x35, 0x37, 0x3d, 0x41, 0x34, 0x3f, 0x3a, 0x3c, 0x47, 0x32, 0x41, 0x3d, + 0x3c, 0x3a, 0x4a, 0x31, 0x43, 0x38, 0x45, 0x37, 0x49, 0x3c, 0x34, 0x3f, + 0x3d, 0x3d, 0x3d, 0x45, 0x47, 0x3e, 0x37, 0x48, 0x40, 0x3b, 0x45, 0x3d, + 0x4e, 0x42, 0x3f, 0x57, 0x4b, 0x43, 0x4b, 0x3d, 0x3f, 0x47, 0x4a, 0x43, + 0x4e, 0x30, 0x38, 0x45, 0x59, 0x60, 0x64, 0x2d, 0x5a, 0x2d, 0x34, 0x35, + 0x47, 0x54, 0x4e, 0x3f, 0x44, 0x45, 0x3c, 0x43, 0x3d, 0x40, 0x1c, 0x5a, + 0x36, 0x3f, 0x3a, 0x39, 0x37, 0x3c, 0x32, 0x3b, 0x2d, 0x4a, 0x42, 0x35, + 0x30, 0x41, 0x43, 0x3d, 0x3d, 0x45, 0x38, 0x36, 0x3e, 0x40, 0x3a, 0x4a, + 0x34, 0x3d, 0x44, 0x3c, 0x39, 0x3b, 0x52, 0x38, 0x40, 0x3b, 0x3f, 0x3f, + 0x35, 0x37, 0x46, 0x48, 0x38, 0x3b, 0x40, 0x36, 0x3d, 0x3a, 0x4f, 0x45, + 0x35, 0x3a, 0x35, 0x33, 0x37, 0x43, 0x42, 0x52, 0x37, 0x3b, 0x3d, 0x42, + 0x44, 0x3d, 0x48, 0x58, 0x33, 0x3f, 0x41, 0x44, 0x44, 0x3f, 0x3b, 0x52, + 0x47, 0x39, 0x32, 0x3b, 0x38, 0x35, 0x48, 0x50, 0x34, 0x30, 0x39, 0x43, + 0x42, 0x40, 0x3b, 0x4b, 0x43, 0x3d, 0x34, 0x44, 0x33, 0x39, 0x44, 0x4b, + 0x45, 0x3e, 0x3c, 0x3f, 0x3a, 0x3e, 0x3c, 0x45, 0x36, 0x3e, 0x3d, 0x40, + 0x43, 0x46, 0x37, 0x3d, 0x3b, 0x42, 0x43, 0x3f, 0x3a, 0x41, 0x48, 0x2f, + 0x3e, 0x39, 0x3a, 0x39, 0x3f, 0x3a, 0x41, 0x40, 0x40, 0x3c, 0x3b, 0x3b, + 0x3f, 0x40, 0x3e, 0x42, 0x38, 0x3f, 0x38, 0x3c, 0x49, 0x45, 0x3f, 0x62, + 0x55, 0x47, 0x4c, 0x3c, 0x3c, 0x4a, 0x4c, 0x46, 0x4f, 0x39, 0x3a, 0x3b, + 0x5e, 0x58, 0x6f, 0x2b, 0x5a, 0x2f, 0x3a, 0x35, 0x4b, 0x47, 0x4a, 0x46, + 0x45, 0x3e, 0x38, 0x4f, 0x3b, 0x3d, 0x21, 0x4b, 0x3d, 0x40, 0x37, 0x40, + 0x2d, 0x2c, 0x43, 0x3f, 0x2b, 0x3e, 0x3d, 0x39, 0x2f, 0x39, 0x44, 0x3c, + 0x39, 0x39, 0x43, 0x3b, 0x3d, 0x3b, 0x44, 0x39, 0x42, 0x42, 0x3e, 0x40, + 0x3b, 0x42, 0x53, 0x40, 0x32, 0x3d, 0x35, 0x3f, 0x3d, 0x45, 0x48, 0x46, + 0x3d, 0x43, 0x3c, 0x36, 0x35, 0x39, 0x3d, 0x4a, 0x39, 0x39, 0x3e, 0x41, + 0x38, 0x36, 0x3b, 0x53, 0x3c, 0x36, 0x32, 0x3b, 0x43, 0x3d, 0x42, 0x57, + 0x35, 0x2f, 0x38, 0x40, 0x2f, 0x3d, 0x3c, 0x4c, 0x40, 0x2f, 0x3a, 0x36, + 0x39, 0x3c, 0x3a, 0x51, 0x3d, 0x37, 0x39, 0x3c, 0x42, 0x40, 0x43, 0x52, + 0x3e, 0x42, 0x3e, 0x45, 0x36, 0x34, 0x42, 0x4b, 0x3a, 0x38, 0x37, 0x3f, + 0x36, 0x41, 0x3a, 0x45, 0x3e, 0x38, 0x35, 0x41, 0x35, 0x34, 0x37, 0x3c, + 0x3f, 0x31, 0x3c, 0x35, 0x33, 0x43, 0x36, 0x28, 0x44, 0x42, 0x3e, 0x42, + 0x3a, 0x41, 0x43, 0x35, 0x3d, 0x3f, 0x40, 0x3e, 0x3d, 0x33, 0x31, 0x41, + 0x3d, 0x40, 0x3b, 0x40, 0x51, 0x40, 0x3f, 0xfb, 0x51, 0x49, 0x4c, 0x3d, + 0x44, 0x4e, 0x47, 0x42, 0x50, 0x39, 0x39, 0x40, 0x59, 0x5d, 0x70, 0x2c, + 0x59, 0x39, 0x38, 0x2f, 0x46, 0x50, 0x51, 0x47, 0x4c, 0x3c, 0x39, 0x48, + 0x44, 0x3a, 0x1a, 0x51, 0x35, 0x3e, 0x34, 0x3a, 0x3d, 0x2b, 0x41, 0x39, + 0x37, 0x4d, 0x3e, 0x43, 0x38, 0x3b, 0x3a, 0x35, 0x36, 0x3a, 0x43, 0x39, + 0x39, 0x3a, 0x46, 0x3b, 0x39, 0x3c, 0x46, 0x36, 0x3e, 0x3d, 0x4b, 0x3d, + 0x3b, 0x46, 0x3a, 0x41, 0x31, 0x3c, 0x44, 0x4a, 0x37, 0x42, 0x39, 0x43, + 0x43, 0x3e, 0x40, 0x47, 0x3c, 0x3e, 0x3b, 0x43, 0x34, 0x3a, 0x43, 0x53, + 0x3f, 0x37, 0x39, 0x37, 0x3e, 0x3b, 0x46, 0x59, 0x37, 0x37, 0x33, 0x3d, + 0x38, 0x42, 0x36, 0x58, 0x2e, 0x32, 0x2b, 0x45, 0x32, 0x33, 0x36, 0x50, + 0x41, 0x3f, 0x37, 0x3d, 0x3f, 0x3d, 0x46, 0x49, 0x41, 0x38, 0x33, 0x3d, + 0x33, 0x32, 0x3a, 0x49, 0x41, 0x41, 0x3d, 0x33, 0x3b, 0x3b, 0x3a, 0x46, + 0x34, 0x44, 0x3f, 0x3b, 0x2f, 0x3f, 0x32, 0x3c, 0x3f, 0x43, 0x3e, 0x45, + 0x3a, 0x3c, 0x43, 0x26, 0x46, 0x37, 0x38, 0x3e, 0x36, 0x31, 0x3e, 0x34, + 0x39, 0x3a, 0x38, 0x42, 0x38, 0x3e, 0x32, 0x42, 0x37, 0x37, 0x3c, 0x3a, + 0x48, 0x44, 0x3a, 0x68, 0x56, 0x46, 0x4d, 0x47, 0x40, 0x4e, 0x42, 0x46, + 0x51, 0x40, 0x38, 0x43, 0x58, 0x5d, 0x6a, 0x31, 0x57, 0x32, 0x3c, 0x36, + 0x49, 0x56, 0x52, 0x48, 0x4b, 0x41, 0x2f, 0x4d, 0x31, 0x43, 0x1b, 0x4c, + 0x30, 0x44, 0x33, 0x36, 0x2c, 0x3d, 0x45, 0x3a, 0x35, 0x46, 0x3d, 0x39, + 0x2e, 0x38, 0x3f, 0x37, 0x41, 0x44, 0x46, 0x31, 0x33, 0x46, 0x37, 0x37, + 0x3f, 0x41, 0x45, 0x30, 0x46, 0x3b, 0x50, 0x3b, 0x40, 0x39, 0x42, 0x43, + 0x35, 0x37, 0x40, 0x44, 0x3b, 0x41, 0x3d, 0x37, 0x3a, 0x41, 0x3d, 0x46, + 0x36, 0x41, 0x38, 0x41, 0x38, 0x3d, 0x45, 0x58, 0x3d, 0x3a, 0x3d, 0x44, + 0x45, 0x38, 0x48, 0x5c, 0x3d, 0x39, 0x43, 0x45, 0x41, 0x3e, 0x4a, 0x56, + 0x40, 0x33, 0x30, 0x31, 0x42, 0x39, 0x38, 0x56, 0x30, 0x3a, 0x35, 0x3e, + 0x3f, 0x38, 0x36, 0x47, 0x3c, 0x3a, 0x3d, 0x3f, 0x37, 0x35, 0x3b, 0x4d, + 0x43, 0x36, 0x39, 0x37, 0x3e, 0x42, 0x3d, 0x3f, 0x40, 0x3f, 0x34, 0x3b, + 0x3f, 0x3e, 0x3b, 0x39, 0x3b, 0x3a, 0x3a, 0x3c, 0x34, 0x3f, 0x3c, 0x2a, + 0x49, 0x3b, 0x36, 0x3c, 0x35, 0x46, 0x38, 0x3b, 0x3c, 0x39, 0x38, 0x42, + 0x39, 0x36, 0x2e, 0x4a, 0x3d, 0x39, 0x3f, 0x3f, 0x4b, 0x45, 0x3e, 0x67, + 0x4b, 0x4b, 0x49, 0x3e, 0x3f, 0x53, 0x4c, 0x55, 0x47, 0x32, 0x3b, 0x39, + 0x54, 0x5b, 0x6f, 0x29, 0x5a, 0x34, 0x3e, 0x26, 0x45, 0x52, 0x59, 0x44, + 0x59, 0x39, 0x3c, 0x47, 0x36, 0x46, 0x16, 0x50, 0x32, 0x46, 0x34, 0x35, + 0x35, 0x2d, 0x39, 0x38, 0x2c, 0x42, 0x43, 0x3b, 0x32, 0x3f, 0x37, 0x2f, + 0x34, 0x43, 0x46, 0x3b, 0x3b, 0x41, 0x3c, 0x37, 0x3e, 0x43, 0x4b, 0x36, + 0x3e, 0x3c, 0x4c, 0x42, 0x40, 0x3f, 0x49, 0x40, 0x3c, 0x40, 0x3c, 0x48, + 0x35, 0x42, 0x3f, 0x42, 0x44, 0x40, 0x45, 0x4f, 0x3f, 0x3f, 0x40, 0x42, + 0x3b, 0x3d, 0x49, 0x55, 0x42, 0x39, 0x41, 0x3b, 0x3f, 0x38, 0x44, 0x60, + 0x34, 0x40, 0x3b, 0x3b, 0x35, 0x3d, 0x41, 0x4e, 0x35, 0x33, 0x30, 0x3a, + 0x3a, 0x32, 0x42, 0x4f, 0x33, 0x34, 0x2f, 0x38, 0x49, 0x38, 0x40, 0x4c, + 0x35, 0x38, 0x3e, 0x46, 0x3f, 0x3a, 0x3a, 0x45, 0x3b, 0x34, 0x2e, 0x39, + 0x32, 0x3e, 0x40, 0x48, 0x35, 0x44, 0x3a, 0x34, 0x3f, 0x35, 0x3b, 0x32, + 0x40, 0x43, 0x3e, 0x38, 0x3b, 0x43, 0x3c, 0x2b, 0x46, 0x43, 0x40, 0x32, + 0x42, 0x3b, 0x49, 0x2e, 0x3b, 0x3a, 0x3e, 0x41, 0x3c, 0x3f, 0x31, 0x3b, + 0x41, 0x33, 0x41, 0x3c, 0x4d, 0x40, 0x38, 0x68, 0x4c, 0x4c, 0x4e, 0x3f, + 0x3f, 0x54, 0x4a, 0x3d, 0x4c, 0x33, 0x3b, 0x3a, 0x5d, 0x60, 0x71, 0x2b, + 0x59, 0x33, 0x3c, 0x2c, 0x47, 0x52, 0x4f, 0x51, 0x56, 0x3d, 0x39, 0x44, + 0x35, 0x41, 0x1b, 0x4a, 0x35, 0x41, 0x37, 0x35, 0x2c, 0x35, 0x37, 0x35, + 0x38, 0x41, 0x38, 0x3e, 0x3c, 0x40, 0x3c, 0x2f, 0x38, 0x3e, 0x3f, 0x45, + 0x40, 0x3d, 0x3c, 0x35, 0x3c, 0x46, 0x43, 0x39, 0x37, 0x42, 0x4e, 0x3c, + 0x42, 0x46, 0x37, 0x33, 0x43, 0x3f, 0x47, 0x4a, 0x3d, 0x3e, 0x40, 0x40, + 0x40, 0x3f, 0x4b, 0x54, 0x36, 0x3f, 0x37, 0x40, 0x39, 0x39, 0x47, 0x51, + 0x3d, 0x39, 0x36, 0x36, 0x40, 0x40, 0x41, 0x5a, 0x38, 0x39, 0x42, 0x38, + 0x40, 0x39, 0x43, 0x50, 0x3a, 0x3a, 0x32, 0x3c, 0x3c, 0x35, 0x44, 0x4a, + 0x37, 0x35, 0x36, 0x3c, 0x35, 0x30, 0x48, 0x4b, 0x3c, 0x33, 0x37, 0x3e, + 0x42, 0x3c, 0x42, 0x4e, 0x41, 0x32, 0x3e, 0x33, 0x49, 0x39, 0x3e, 0x42, + 0x3d, 0x39, 0x37, 0x36, 0x35, 0x41, 0x3e, 0x37, 0x37, 0x3e, 0x3d, 0x38, + 0x3a, 0x3c, 0x41, 0x29, 0x3c, 0x3b, 0x39, 0x40, 0x43, 0x3d, 0x3e, 0x33, + 0x3f, 0x3f, 0x3e, 0x43, 0x43, 0x38, 0x38, 0x41, 0x3b, 0x38, 0x35, 0x3a, + 0x4b, 0x44, 0x44, 0x55, 0x4e, 0x44, 0x4d, 0x49, 0x3e, 0x53, 0x45, 0x3f, + 0x45, 0x3d, 0x36, 0x36, 0x4f, 0x5b, 0x6b, 0x28, 0x59, 0x34, 0x39, 0x34, + 0x4f, 0x4d, 0x52, 0x3e, 0x51, 0x34, 0x35, 0x4a, 0x3b, 0x3f, 0x21, 0x45, + 0x36, 0x3f, 0x38, 0x33, 0x2c, 0x37, 0x32, 0x2f, 0x2b, 0x44, 0x47, 0x3f, + 0x38, 0x3a, 0x3f, 0x2e, 0x41, 0x3f, 0x3d, 0x41, 0x35, 0x48, 0x43, 0x40, + 0x33, 0x44, 0x40, 0x38, 0x47, 0x44, 0x4c, 0x3d, 0x41, 0x3b, 0x39, 0x36, + 0x3e, 0x44, 0x49, 0x48, 0x3c, 0x3b, 0x34, 0x34, 0x3f, 0x3c, 0x42, 0x52, + 0x43, 0x41, 0x3c, 0x3c, 0x3d, 0x43, 0x48, 0x54, 0x39, 0x35, 0x39, 0x3c, + 0x43, 0x3c, 0x44, 0x5f, 0x39, 0x3d, 0x38, 0x3f, 0x36, 0x3d, 0x43, 0x58, + 0x33, 0x3d, 0x43, 0x33, 0x3f, 0x36, 0x39, 0x54, 0x3a, 0x37, 0x2d, 0x46, + 0x43, 0x41, 0x47, 0x46, 0x3e, 0x42, 0x34, 0x49, 0x3a, 0x3f, 0x38, 0x50, + 0x3a, 0x3b, 0x42, 0x3a, 0x3e, 0x3c, 0x3b, 0x40, 0x42, 0x45, 0x37, 0x3b, + 0x2f, 0x3b, 0x46, 0x30, 0x42, 0x3b, 0x3b, 0x44, 0x3b, 0x3e, 0x40, 0x1e, + 0x33, 0x40, 0x40, 0x3d, 0x39, 0x3a, 0x41, 0x33, 0x45, 0x3e, 0x3c, 0x3f, + 0x3f, 0x38, 0x31, 0x46, 0x3b, 0x35, 0x42, 0x39, 0x49, 0x3e, 0x3d, 0x66, + 0x53, 0x3f, 0x44, 0x40, 0x43, 0x45, 0x48, 0x45, 0x49, 0x2d, 0x3e, 0x3a, + 0x4f, 0x5a, 0x62, 0x27, 0x54, 0x37, 0x35, 0x34, 0x42, 0x50, 0x54, 0x43, + 0x4d, 0x38, 0x39, 0x48, 0x38, 0x4c, 0x21, 0x3f, 0x40, 0x3a, 0x3a, 0x2f, + 0x37, 0x2f, 0x29, 0x2c, 0x36, 0x47, 0x3f, 0x41, 0x31, 0x33, 0x3e, 0x32, + 0x3e, 0x40, 0x42, 0x40, 0x42, 0x3a, 0x46, 0x33, 0x44, 0x40, 0x3c, 0x43, + 0x3d, 0x41, 0x4d, 0x3d, 0x3c, 0x47, 0x46, 0x43, 0x42, 0x3e, 0x44, 0x4e, + 0x41, 0x3a, 0x44, 0x38, 0x45, 0x3b, 0x49, 0x4c, 0x40, 0x3f, 0x37, 0x3e, + 0x3e, 0x46, 0x41, 0x51, 0x3f, 0x39, 0x30, 0x40, 0x3e, 0x38, 0x43, 0x5b, + 0x33, 0x3e, 0x31, 0x42, 0x3d, 0x2f, 0x49, 0x57, 0x37, 0x31, 0x46, 0x44, + 0x3e, 0x35, 0x40, 0x55, 0x36, 0x35, 0x3d, 0x3c, 0x38, 0x33, 0x42, 0x52, + 0x3b, 0x39, 0x34, 0x31, 0x45, 0x34, 0x3c, 0x51, 0x33, 0x39, 0x3c, 0x40, + 0x36, 0x36, 0x42, 0x3e, 0x37, 0x3e, 0x3b, 0x40, 0x3d, 0x36, 0x41, 0x30, + 0x42, 0x45, 0x40, 0x49, 0x3d, 0x32, 0x46, 0x26, 0x40, 0x44, 0x3a, 0x3f, + 0x3d, 0x46, 0x45, 0x31, 0x33, 0x34, 0x3e, 0x37, 0x46, 0x3b, 0x32, 0x3a, + 0x3d, 0x31, 0x3c, 0x36, 0x50, 0x41, 0x3b, 0x5d, 0x53, 0x42, 0x44, 0x37, + 0x3e, 0x4d, 0x41, 0x4b, 0x49, 0x2f, 0x35, 0x3a, 0x4e, 0x59, 0x5d, 0x27, + 0x5c, 0x30, 0x3d, 0x3a, 0x46, 0x50, 0x57, 0x4a, 0x4c, 0x36, 0x37, 0x46, + 0x48, 0x41, 0x24, 0x49, 0x36, 0x3e, 0x41, 0x45, 0x37, 0x38, 0x2e, 0x2e, + 0x34, 0x3c, 0x38, 0x41, 0x36, 0x3d, 0x43, 0x36, 0x3e, 0x3e, 0x41, 0x3b, + 0x42, 0x3c, 0x43, 0x38, 0x3e, 0x3d, 0x41, 0x48, 0x47, 0x4c, 0x45, 0x3b, + 0x37, 0x41, 0x38, 0x41, 0x3d, 0x41, 0x46, 0x4e, 0x36, 0x45, 0x38, 0x39, + 0x42, 0x42, 0x37, 0x4c, 0x34, 0x46, 0x3c, 0x44, 0x4a, 0x39, 0x45, 0x53, + 0x3c, 0x3f, 0x41, 0x35, 0x3c, 0x45, 0x4c, 0x5a, 0x44, 0x41, 0x30, 0x35, + 0x40, 0x39, 0x42, 0x5a, 0x36, 0x36, 0x3a, 0x3b, 0x43, 0x35, 0x3c, 0x56, + 0x35, 0x38, 0x2b, 0x4a, 0x3c, 0x40, 0x45, 0x54, 0x37, 0x37, 0x3a, 0x44, + 0x42, 0x3b, 0x3d, 0x4a, 0x3f, 0x37, 0x3b, 0x35, 0x34, 0x3f, 0x40, 0x48, + 0x45, 0x3e, 0x37, 0x38, 0x41, 0x41, 0x3d, 0x37, 0x43, 0x3d, 0x3d, 0x45, + 0x3a, 0x38, 0x3f, 0x23, 0x4a, 0x37, 0x42, 0x3c, 0x3f, 0x43, 0x42, 0x33, + 0x37, 0x39, 0x35, 0x3b, 0x41, 0x36, 0x2f, 0x3b, 0x41, 0x3a, 0x44, 0x3d, + 0x3e, 0x45, 0x44, 0x50, 0x47, 0x47, 0x48, 0x3c, 0x3f, 0x45, 0x43, 0x3f, + 0x4a, 0x33, 0x3c, 0x3a, 0x52, 0x52, 0x5a, 0x23, 0x58, 0x31, 0x3b, 0x3b, + 0x47, 0x44, 0x54, 0x45, 0x42, 0x38, 0x38, 0x40, 0x43, 0x3f, 0x2a, 0x46, + 0x3b, 0x46, 0x3b, 0x46, 0x35, 0x37, 0x29, 0x35, 0x38, 0x41, 0x3a, 0x31, + 0x44, 0x41, 0x39, 0x36, 0x45, 0x41, 0x40, 0x3e, 0x40, 0x44, 0x47, 0x37, + 0x3f, 0x42, 0x49, 0x34, 0x46, 0x3d, 0x4b, 0x3d, 0x42, 0x3b, 0x42, 0x3e, + 0x41, 0x3b, 0x3f, 0x43, 0x47, 0x45, 0x47, 0x41, 0x40, 0x3a, 0x3d, 0x45, + 0x40, 0x36, 0x3b, 0x3b, 0x44, 0x37, 0x46, 0x55, 0x35, 0x42, 0x3f, 0x3a, + 0x41, 0x41, 0x44, 0x5c, 0x31, 0x44, 0x3d, 0x46, 0x39, 0x38, 0x46, 0x59, + 0x41, 0x3b, 0x3d, 0x39, 0x33, 0x3e, 0x41, 0x58, 0x33, 0x44, 0x34, 0x31, + 0x48, 0x3e, 0x4d, 0x56, 0x36, 0x3c, 0x37, 0x46, 0x46, 0x38, 0x45, 0x53, + 0x35, 0x3d, 0x3a, 0x31, 0x42, 0x48, 0x45, 0x44, 0x3b, 0x3b, 0x3c, 0x41, + 0x3d, 0x42, 0x3f, 0x2f, 0x38, 0x3c, 0x3e, 0x41, 0x44, 0x3a, 0x4a, 0x24, + 0x37, 0x3e, 0x37, 0x48, 0x40, 0x3f, 0x46, 0x3c, 0x47, 0x4a, 0x38, 0x47, + 0x34, 0x45, 0x31, 0x42, 0x43, 0x44, 0x3f, 0x3f, 0x49, 0x40, 0x3c, 0x41, + 0x4d, 0x43, 0x42, 0x39, 0x39, 0x48, 0x41, 0x38, 0x47, 0x3c, 0x3c, 0x42, + 0x44, 0x55, 0x62, 0x2a, 0x5c, 0x32, 0x3a, 0x37, 0x4c, 0x44, 0x4f, 0x3e, + 0x4e, 0x42, 0x3a, 0x42, 0x41, 0x4a, 0x35, 0x44, 0x45, 0x3b, 0x43, 0x41, + 0x33, 0x38, 0x28, 0x36, 0x40, 0x47, 0x3e, 0x3e, 0x3e, 0x39, 0x3a, 0x37, + 0x44, 0x44, 0x3f, 0x3b, 0x41, 0x3c, 0x45, 0x36, 0x38, 0x3a, 0x3c, 0x42, + 0x42, 0x3f, 0x59, 0x3c, 0x47, 0x3d, 0x38, 0x3a, 0x42, 0x44, 0x41, 0x46, + 0x3f, 0x43, 0x48, 0x42, 0x44, 0x35, 0x3f, 0x45, 0x36, 0x3f, 0x38, 0x3a, + 0x44, 0x3d, 0x3d, 0x4e, 0x3e, 0x45, 0x40, 0x42, 0x3c, 0x33, 0x43, 0x5a, + 0x38, 0x3e, 0x45, 0x3a, 0x3e, 0x42, 0x45, 0x52, 0x3c, 0x42, 0x3a, 0x38, + 0x3d, 0x3b, 0x4a, 0x57, 0x38, 0x37, 0x47, 0x44, 0x3e, 0x3c, 0x38, 0x48, + 0x36, 0x41, 0x3f, 0x41, 0x3a, 0x3a, 0x46, 0x47, 0x42, 0x40, 0x32, 0x33, + 0x43, 0x37, 0x41, 0x43, 0x3e, 0x40, 0x3d, 0x3a, 0x3e, 0x38, 0x42, 0x30, + 0x3e, 0x40, 0x46, 0x42, 0x40, 0x44, 0x42, 0x23, 0x31, 0x40, 0x3f, 0x3d, + 0x3b, 0x33, 0x40, 0x33, 0x41, 0x33, 0x43, 0x41, 0x3a, 0x3e, 0x36, 0x40, + 0x40, 0x45, 0x37, 0x42, 0x46, 0x42, 0x39, 0x48, 0x44, 0x40, 0x40, 0x45, + 0x3c, 0x49, 0x41, 0x3f, 0x4c, 0x3d, 0x2f, 0x3f, 0x47, 0x52, 0x54, 0x2c, + 0x55, 0x42, 0x44, 0x3b, 0x46, 0x4f, 0x48, 0x3c, 0x45, 0x39, 0x3f, 0x4b, + 0x3f, 0x3f, 0x36, 0x42, 0x41, 0x48, 0x44, 0x44, 0x36, 0x3b, 0x37, 0x40, + 0x39, 0x49, 0x3a, 0x35, 0x3e, 0x48, 0x31, 0x30, 0x44, 0x38, 0x4c, 0x3c, + 0x41, 0x3e, 0x46, 0x32, 0x44, 0x3b, 0x42, 0x3c, 0x38, 0x3a, 0x47, 0x3f, + 0x3a, 0x42, 0x3a, 0x43, 0x40, 0x4b, 0x47, 0x3c, 0x42, 0x46, 0x45, 0x42, + 0x3c, 0x46, 0x3d, 0x3f, 0x3e, 0x36, 0x38, 0x3e, 0x46, 0x3c, 0x4d, 0x43, + 0x49, 0x41, 0x48, 0x3c, 0x3d, 0x39, 0x43, 0x58, 0x3a, 0x41, 0x3f, 0x38, + 0x37, 0x3f, 0x46, 0x5d, 0x3c, 0x3c, 0x39, 0x36, 0x3d, 0x46, 0x43, 0x50, + 0x3a, 0x47, 0x39, 0x36, 0x41, 0x3f, 0x3e, 0x51, 0x31, 0x36, 0x3e, 0x3c, + 0x3c, 0x3a, 0x48, 0x41, 0x3a, 0x43, 0x49, 0x3e, 0x42, 0x46, 0x3f, 0x41, + 0x49, 0x33, 0x42, 0x41, 0x45, 0x40, 0x3d, 0x2b, 0x3d, 0x38, 0x40, 0x37, + 0x3a, 0x31, 0x45, 0x26, 0x33, 0x3d, 0x3f, 0x39, 0x36, 0x3c, 0x38, 0x33, + 0x34, 0x3f, 0x35, 0x44, 0x3a, 0x39, 0x32, 0x41, 0x35, 0x40, 0x3c, 0x3b, + 0x4a, 0x3f, 0x3e, 0x3e, 0x4a, 0x3e, 0x42, 0x35, 0x38, 0x43, 0x3c, 0x37, + 0x3d, 0x3c, 0x39, 0x43, 0x3f, 0x4e, 0x54, 0x33, 0x4b, 0x37, 0x43, 0x3b, + 0x43, 0x48, 0x43, 0x42, 0x3d, 0x46, 0x45, 0x49, 0x3a, 0x39, 0x36, 0x4a, + 0x48, 0x48, 0x37, 0x4b, 0x42, 0x47, 0x34, 0x34, 0x43, 0x42, 0x3a, 0x3d, + 0x3c, 0x46, 0x34, 0x39, 0x40, 0x3b, 0x3e, 0x3e, 0x37, 0x3d, 0x53, 0x3b, + 0x48, 0x3c, 0x43, 0x44, 0x3b, 0x3f, 0x57, 0x3d, 0x39, 0x3c, 0x39, 0x3a, + 0x3e, 0x3f, 0x43, 0x3e, 0x41, 0x47, 0x3c, 0x41, 0x40, 0x41, 0x37, 0x3f, + 0x3b, 0x43, 0x35, 0x3e, 0x45, 0x40, 0x47, 0x59, 0x41, 0x49, 0x3b, 0x3f, + 0x47, 0x49, 0x4b, 0x61, 0x39, 0x48, 0x39, 0x3e, 0x44, 0x34, 0x3b, 0x59, + 0x3c, 0x42, 0x45, 0x35, 0x42, 0x41, 0x39, 0x52, 0x42, 0x3c, 0x3d, 0x3e, + 0x3d, 0x4a, 0x4a, 0x4d, 0x3c, 0x34, 0x44, 0x3c, 0x41, 0x34, 0x38, 0x46, + 0x38, 0x45, 0x40, 0x45, 0x40, 0x3a, 0x3d, 0x44, 0x3a, 0x37, 0x3a, 0x3a, + 0x3b, 0x42, 0x40, 0x34, 0x3b, 0x3c, 0x42, 0x40, 0x3d, 0x32, 0x40, 0x27, + 0x37, 0x39, 0x37, 0x46, 0x48, 0x31, 0x40, 0x30, 0x42, 0x42, 0x3a, 0x40, + 0x3d, 0x37, 0x2a, 0x40, 0x41, 0x37, 0x3c, 0x4a, 0x46, 0x45, 0x3d, 0x34, + 0x48, 0x41, 0x42, 0x3e, 0x3f, 0x39, 0x3c, 0x3a, 0x4f, 0x3b, 0x32, 0x3e, + 0x43, 0x51, 0x4f, 0x2a, 0x46, 0x3a, 0x3d, 0x3b, 0x40, 0x3d, 0x4c, 0x3c, + 0x48, 0x40, 0x36, 0x4a, 0x3a, 0x38, 0x42, 0x43, 0x4c, 0x3d, 0x47, 0x47, + 0x33, 0x3f, 0x2d, 0x37, 0x4a, 0x43, 0x38, 0x3e, 0x49, 0x42, 0x42, 0x3d, + 0x43, 0x47, 0x41, 0x38, 0x46, 0x37, 0x46, 0x38, 0x47, 0x42, 0x49, 0x3d, + 0x3b, 0x37, 0x4c, 0x3c, 0x3a, 0x45, 0x3f, 0x37, 0x36, 0x3d, 0x3c, 0x40, + 0x3e, 0x45, 0x46, 0x41, 0x41, 0x40, 0x3c, 0x44, 0x47, 0x43, 0x37, 0x3f, + 0x3e, 0x3a, 0x3a, 0x4b, 0x3a, 0x36, 0x3d, 0x3f, 0x38, 0x3f, 0x3c, 0x58, + 0x40, 0x49, 0x3d, 0x42, 0x38, 0x3a, 0x47, 0x50, 0x3b, 0x49, 0x40, 0x44, + 0x3e, 0x3c, 0x38, 0x52, 0x3a, 0x3e, 0x44, 0x3c, 0x35, 0x44, 0x3a, 0x47, + 0x3e, 0x49, 0x3f, 0x47, 0x45, 0x39, 0x3b, 0x46, 0x44, 0x3e, 0x41, 0x46, + 0x40, 0x41, 0x40, 0x40, 0x3a, 0x35, 0x3e, 0x36, 0x3e, 0x3e, 0x3d, 0x35, + 0x3b, 0x3c, 0x38, 0x46, 0x3b, 0x3c, 0x41, 0x2c, 0x3f, 0x42, 0x38, 0x3b, + 0x36, 0x3b, 0x39, 0x40, 0x40, 0x38, 0x36, 0x33, 0x34, 0x42, 0x2f, 0x44, + 0x41, 0x40, 0x39, 0x35, 0x3b, 0x44, 0x42, 0x2c, 0x41, 0x3b, 0x44, 0x41, + 0x35, 0x44, 0x3b, 0x34, 0x44, 0x49, 0x36, 0x39, 0x3a, 0x52, 0x4d, 0x2b, + 0x40, 0x40, 0x3e, 0x39, 0x48, 0x42, 0x3c, 0x44, 0x46, 0x49, 0x3f, 0x54, + 0x43, 0x40, 0x2e, 0x40, 0x4f, 0x36, 0x3e, 0x3f, 0x38, 0x48, 0x44, 0x3c, + 0x44, 0x43, 0x41, 0x47, 0x40, 0x46, 0x40, 0x37, 0x41, 0x34, 0x3a, 0x41, + 0x41, 0x3b, 0x49, 0x39, 0x42, 0x38, 0x3d, 0x39, 0x34, 0x35, 0x43, 0x36, + 0x3e, 0x44, 0x3f, 0x40, 0x43, 0x40, 0x40, 0x3a, 0x47, 0x42, 0x3e, 0x42, + 0x46, 0x35, 0x3a, 0x46, 0x3c, 0x3c, 0x3c, 0x3d, 0x3f, 0x40, 0x43, 0x4c, + 0x3a, 0x37, 0x3f, 0x43, 0x47, 0x38, 0x42, 0x58, 0x42, 0x3b, 0x34, 0x37, + 0x3e, 0x48, 0x3c, 0x57, 0x44, 0x3c, 0x3d, 0x3a, 0x36, 0x48, 0x3c, 0x51, + 0x3d, 0x48, 0x45, 0x45, 0x38, 0x45, 0x40, 0x3f, 0x3b, 0x35, 0x3d, 0x3f, + 0x38, 0x47, 0x39, 0x3b, 0x36, 0x49, 0x43, 0x40, 0x3f, 0x46, 0x38, 0x40, + 0x3f, 0x3e, 0x39, 0x32, 0x47, 0x42, 0x35, 0x33, 0x39, 0x47, 0x3c, 0x36, + 0x3b, 0x37, 0x43, 0x35, 0x3b, 0x3b, 0x34, 0x3b, 0x38, 0x3d, 0x3e, 0x3a, + 0x35, 0x49, 0x38, 0x40, 0x3f, 0x3f, 0x3e, 0x37, 0x43, 0x3b, 0x3e, 0x3e, + 0x3b, 0x40, 0x44, 0x39, 0x3d, 0x3f, 0x31, 0x42, 0x42, 0x3b, 0x41, 0x3d, + 0x3e, 0x3c, 0x37, 0x34, 0x48, 0x3d, 0x49, 0x4a, 0x47, 0x36, 0x3a, 0x34, + 0x37, 0x36, 0x3e, 0x38, 0x33, 0x45, 0x39, 0x44, 0x34, 0x49, 0x3a, 0x3d, + 0x34, 0x31, 0x31, 0x3d, 0x34, 0x3d, 0x41, 0x3e, 0x49, 0x41, 0x34, 0x3f, + 0x3a, 0x42, 0x3e, 0x40, 0x3f, 0x33, 0x46, 0x3f, 0x34, 0x39, 0x37, 0x46, + 0x3e, 0x32, 0x3f, 0x45, 0x45, 0x41, 0x3b, 0x4b, 0x35, 0x35, 0x3b, 0x4a, + 0x3d, 0x43, 0x3b, 0x44, 0x3c, 0x38, 0x31, 0x43, 0x39, 0x35, 0x41, 0x45, + 0x37, 0x3e, 0x43, 0x47, 0x39, 0x40, 0x41, 0x41, 0x40, 0x32, 0x37, 0x3e, + 0x3d, 0x39, 0x3b, 0x49, 0x33, 0x35, 0x38, 0x41, 0x45, 0x37, 0x3c, 0x49, + 0x3b, 0x34, 0x34, 0x41, 0x3a, 0x3f, 0x3e, 0x47, 0x39, 0x3c, 0x34, 0x3a, + 0x38, 0x44, 0x40, 0x51, 0x3a, 0x37, 0x3b, 0x3f, 0x3d, 0x3a, 0x45, 0x48, + 0x3f, 0x46, 0x35, 0x43, 0x38, 0x43, 0x35, 0x4c, 0x42, 0x47, 0x44, 0x3d, + 0x40, 0x3a, 0x39, 0x4e, 0x3d, 0x37, 0x3c, 0x42, 0x40, 0x48, 0x44, 0x4c, + 0x31, 0x40, 0x42, 0x3b, 0x45, 0x45, 0x3f, 0x3e, 0x3d, 0x44, 0x3f, 0x31, + 0x3f, 0x44, 0x45, 0x37, 0x3e, 0x3d, 0x35, 0x3b, 0x2d, 0x44, 0x4a, 0x3a, + 0x2b, 0x37, 0x38, 0x46, 0x41, 0x39, 0x3c, 0x3c, 0x46, 0x33, 0x36, 0x3c, + 0x4b, 0x34, 0x49, 0x50, 0x30, 0x3c, 0x33, 0x41, 0x44, 0x33, 0x43, 0x39, + 0x36, 0x45, 0x33, 0x3b, 0x3d, 0x36, 0x47, 0x30, 0x42, 0x37, 0x49, 0x3e, + 0x3b, 0x49, 0x3d, 0x3b, 0x3a, 0x41, 0x38, 0x44, 0x42, 0x3b, 0x3f, 0x40, + 0x46, 0x35, 0x38, 0x3c, 0x48, 0x3a, 0x46, 0x41, 0x36, 0x36, 0x41, 0x3e, + 0x43, 0x3e, 0x32, 0x39, 0x3a, 0x41, 0x30, 0x3e, 0x40, 0x3e, 0x36, 0x3a, + 0x45, 0x45, 0x3a, 0x3c, 0x31, 0x3b, 0x47, 0x3f, 0x36, 0x3a, 0x3c, 0x41, + 0x3b, 0x41, 0x39, 0x46, 0x3f, 0x3c, 0x34, 0x3e, 0x41, 0x45, 0x41, 0x42, + 0x39, 0x40, 0x40, 0x44, 0x45, 0x42, 0x34, 0x3f, 0x3e, 0x31, 0x3b, 0x41, + 0x33, 0x43, 0x37, 0x44, 0x44, 0x3a, 0x36, 0x36, 0x48, 0x3c, 0x37, 0x47, + 0x39, 0x3e, 0x3e, 0x3c, 0x3c, 0x41, 0x3c, 0x44, 0x3b, 0x42, 0x3f, 0x3a, + 0x43, 0x3b, 0x3e, 0x48, 0x36, 0x3f, 0x3d, 0x34, 0x40, 0x43, 0x35, 0x4f, + 0x34, 0x39, 0x3b, 0x41, 0x40, 0x39, 0x37, 0x4c, 0x39, 0x36, 0x39, 0x39, + 0x47, 0x41, 0x43, 0x3f, 0x3f, 0x33, 0x42, 0x3f, 0x42, 0x40, 0x37, 0x40, + 0x3f, 0x34, 0x45, 0x3d, 0x2d, 0x3c, 0x44, 0x3b, 0x43, 0x37, 0x26, 0x50, + 0x43, 0x44, 0x3d, 0x43, 0x42, 0x2d, 0x3c, 0x33, 0x4a, 0x32, 0x4a, 0x53, + 0x33, 0x38, 0x27, 0x36, 0x42, 0x30, 0x47, 0x3d, 0x36, 0x45, 0x46, 0x36, + 0x3b, 0x3b, 0x40, 0x33, 0x37, 0x36, 0x44, 0x46, 0x3d, 0x35, 0x40, 0x38, + 0x3b, 0x40, 0x36, 0x3c, 0x3d, 0x37, 0x31, 0x41, 0x33, 0x3c, 0x38, 0x3f, + 0x43, 0x3a, 0x40, 0x49, 0x38, 0x39, 0x38, 0x3d, 0x43, 0x3d, 0x39, 0x3b, + 0x3d, 0x3f, 0x38, 0x42, 0x34, 0x43, 0x33, 0x3e, 0x43, 0x3e, 0x40, 0x42, + 0x3b, 0x45, 0x37, 0x44, 0x43, 0x39, 0x3c, 0x3d, 0x37, 0x44, 0x3a, 0x3b, + 0x47, 0x3f, 0x3a, 0x3c, 0x3a, 0x3b, 0x3f, 0x43, 0x3e, 0x3d, 0x46, 0x3e, + 0x37, 0x36, 0x3f, 0x40, 0x42, 0x42, 0x37, 0x36, 0x48, 0x35, 0x44, 0x44, + 0x39, 0x3c, 0x3b, 0x41, 0x44, 0x49, 0x3a, 0x40, 0x41, 0x36, 0x33, 0x3a, + 0x3c, 0x3d, 0x40, 0x3f, 0x43, 0x36, 0x3c, 0x3a, 0x3f, 0x4b, 0x32, 0x49, + 0x49, 0x3e, 0x3a, 0x3e, 0x3f, 0x41, 0x3c, 0x47, 0x40, 0x41, 0x45, 0x3e, + 0x47, 0x47, 0x3f, 0x47, 0x45, 0x3e, 0x31, 0x43, 0x4a, 0x44, 0x36, 0x40, + 0x41, 0x47, 0x3e, 0x42, 0x37, 0x40, 0x3b, 0x46, 0x37, 0x41, 0x3e, 0x3c, + 0x27, 0x40, 0x49, 0x42, 0x42, 0x39, 0x30, 0x49, 0x43, 0x38, 0x3d, 0x42, + 0x43, 0x2f, 0x3b, 0x37, 0x4b, 0x2d, 0x4f, 0x52, 0x30, 0x31, 0x2f, 0x3a, + 0x49, 0x38, 0x4f, 0x45, 0x2e, 0x47, 0x3a, 0x32, 0x33, 0x3f, 0x4a, 0x2e, + 0x33, 0x3b, 0x3e, 0x3e, 0x49, 0x45, 0x44, 0x38, 0x3c, 0x35, 0x45, 0x47, + 0x41, 0x3b, 0x3c, 0x48, 0x46, 0x39, 0x39, 0x3b, 0x3f, 0x41, 0x38, 0x42, + 0x3d, 0x46, 0x33, 0x41, 0x36, 0x3f, 0x3f, 0x3c, 0x33, 0x3e, 0x3e, 0x40, + 0x44, 0x40, 0x3c, 0x38, 0x46, 0x3a, 0x40, 0x36, 0x42, 0x35, 0x3f, 0x3b, + 0x3b, 0x43, 0x3c, 0x40, 0x40, 0x49, 0x2e, 0x39, 0x40, 0x3f, 0x45, 0x41, + 0x3f, 0x30, 0x42, 0x3d, 0x40, 0x3c, 0x3a, 0x3b, 0x3b, 0x40, 0x39, 0x42, + 0x3a, 0x3f, 0x3f, 0x3e, 0x35, 0x3b, 0x38, 0x45, 0x47, 0x35, 0x44, 0x3e, + 0x3b, 0x3f, 0x3f, 0x40, 0x3a, 0x35, 0x30, 0x49, 0x45, 0x35, 0x3b, 0x39, + 0x3b, 0x48, 0x3f, 0x37, 0x39, 0x40, 0x43, 0x45, 0x3d, 0x40, 0x41, 0x3a, + 0x33, 0x3d, 0x3a, 0x4b, 0x40, 0x42, 0x40, 0x42, 0x43, 0x39, 0x3c, 0x49, + 0x3e, 0x47, 0x3e, 0x44, 0x3f, 0x3a, 0x40, 0x41, 0x3f, 0x42, 0x42, 0x37, + 0x3e, 0x3b, 0x36, 0x3e, 0x3b, 0x3c, 0x48, 0x43, 0x2d, 0x46, 0x4a, 0x38, + 0x45, 0x3a, 0x29, 0x46, 0x40, 0x3c, 0x40, 0x44, 0x40, 0x33, 0x2f, 0x33, + 0x48, 0x2e, 0x51, 0x4f, 0x2b, 0x32, 0x2e, 0x2d, 0x45, 0x33, 0x4d, 0x41, + 0x29, 0x4b, 0x41, 0x39, 0x2f, 0x3a, 0x49, 0x31, 0x37, 0x40, 0x47, 0x4c, + 0x3e, 0x31, 0x41, 0x3f, 0x43, 0x37, 0x45, 0x4f, 0x41, 0x3c, 0x30, 0x4a, + 0x37, 0x37, 0x36, 0x39, 0x31, 0x3d, 0x36, 0x4b, 0x37, 0x44, 0x3c, 0x43, + 0x44, 0x36, 0x3f, 0x3b, 0x34, 0x3e, 0x3a, 0x35, 0x38, 0x3f, 0x33, 0x37, + 0x3b, 0x3d, 0x46, 0x38, 0x3b, 0x37, 0x38, 0x3b, 0x31, 0x3e, 0x3d, 0x3b, + 0x3d, 0x39, 0x35, 0x33, 0x33, 0x3c, 0x39, 0x39, 0x48, 0x39, 0x39, 0x3f, + 0x3e, 0x36, 0x47, 0x3a, 0x44, 0x40, 0x32, 0x3c, 0x37, 0x35, 0x40, 0x3f, + 0x3a, 0x38, 0x3b, 0x3d, 0x46, 0x45, 0x36, 0x43, 0x40, 0x3d, 0x41, 0x41, + 0x47, 0x3a, 0x3d, 0x3e, 0x43, 0x42, 0x32, 0x36, 0x41, 0x37, 0x3b, 0x35, + 0x36, 0x44, 0x36, 0x3c, 0x43, 0x32, 0x3e, 0x3e, 0x42, 0x45, 0x32, 0x3c, + 0x3a, 0x3b, 0x35, 0x43, 0x41, 0x3d, 0x44, 0x50, 0x43, 0x31, 0x3e, 0x44, + 0x44, 0x41, 0x3a, 0x44, 0x36, 0x39, 0x3b, 0x3c, 0x32, 0x38, 0x3b, 0x45, + 0x38, 0x43, 0x40, 0x42, 0x33, 0x3e, 0x4a, 0x42, 0x45, 0x39, 0x2f, 0x42, + 0x39, 0x35, 0x44, 0x3e, 0x39, 0x2f, 0x34, 0x33, 0x49, 0x29, 0x50, 0x4f, + 0x2b, 0x36, 0x34, 0x2d, 0x47, 0x33, 0x49, 0x3c, 0x33, 0x51, 0x49, 0x3f, + 0x34, 0x39, 0x4a, 0x2c, 0x34, 0x45, 0x4f, 0x47, 0x34, 0x42, 0x3a, 0x3d, + 0x36, 0x4a, 0x3b, 0x43, 0x36, 0x3f, 0x39, 0x4b, 0x38, 0x3a, 0x31, 0x3d, + 0x32, 0x42, 0x3a, 0x47, 0x48, 0x3e, 0x44, 0x3f, 0x39, 0x3e, 0x44, 0x35, + 0x41, 0x3c, 0x45, 0x3a, 0x3e, 0x3b, 0x3d, 0x2f, 0x37, 0x40, 0x3e, 0x43, + 0x39, 0x39, 0x33, 0x3b, 0x37, 0x3b, 0x37, 0x37, 0x37, 0x39, 0x36, 0x31, + 0x39, 0x3b, 0x41, 0x39, 0x3b, 0x40, 0x36, 0x37, 0x42, 0x39, 0x3a, 0x46, + 0x3f, 0x30, 0x38, 0x39, 0x35, 0x32, 0x3e, 0x3a, 0x43, 0x43, 0x3e, 0x33, + 0x42, 0x3f, 0x41, 0x3c, 0x46, 0x34, 0x34, 0x40, 0x43, 0x37, 0x32, 0x43, + 0x3c, 0x37, 0x36, 0x33, 0x3d, 0x36, 0x3a, 0x40, 0x39, 0x38, 0x32, 0x3e, + 0x32, 0x3d, 0x37, 0x49, 0x42, 0x47, 0x41, 0x3b, 0x3d, 0x3c, 0x3a, 0x37, + 0x3c, 0x45, 0x3a, 0x45, 0x36, 0x44, 0x3a, 0x3a, 0x3a, 0x3c, 0x43, 0x3b, + 0x3b, 0x35, 0x38, 0x47, 0x36, 0x40, 0x32, 0x43, 0x3e, 0x39, 0x42, 0x40, + 0x2c, 0x3c, 0x4c, 0x4c, 0x43, 0x3b, 0x37, 0x4a, 0x3f, 0x3c, 0x45, 0x44, + 0x3f, 0x30, 0x36, 0x31, 0x4f, 0x2f, 0x5d, 0x4b, 0x34, 0x34, 0x2d, 0x2b, + 0x44, 0x31, 0x4e, 0x40, 0x2e, 0x4d, 0x48, 0x3e, 0x37, 0x2b, 0x49, 0x25, + 0x31, 0x49, 0x44, 0x49, 0x39, 0x39, 0x4b, 0x3a, 0x3a, 0x41, 0x3e, 0x42, + 0x3c, 0x36, 0x36, 0x4a, 0x32, 0x44, 0x3e, 0x48, 0x3e, 0x3c, 0x37, 0x49, + 0x3d, 0x34, 0x3f, 0x37, 0x33, 0x36, 0x46, 0x3a, 0x3a, 0x31, 0x45, 0x3f, + 0x3a, 0x31, 0x3b, 0x33, 0x41, 0x42, 0x35, 0x39, 0x38, 0x44, 0x36, 0x3a, + 0x3f, 0x3b, 0x37, 0x3e, 0x3b, 0x38, 0x2f, 0x32, 0x44, 0x3d, 0x44, 0x41, + 0x39, 0x36, 0x3a, 0x34, 0x39, 0x38, 0x34, 0x3f, 0x3b, 0x37, 0x34, 0x34, + 0x40, 0x3d, 0x34, 0x3a, 0x46, 0x42, 0x3f, 0x34, 0x38, 0x33, 0x39, 0x44, + 0x3f, 0x41, 0x3c, 0x31, 0x40, 0x32, 0x3f, 0x37, 0x37, 0x41, 0x3e, 0x35, + 0x37, 0x48, 0x3b, 0x41, 0x3d, 0x3a, 0x3f, 0x35, 0x33, 0x3c, 0x36, 0x3b, + 0x3a, 0x48, 0x33, 0x42, 0x37, 0x33, 0x39, 0x41, 0x3c, 0x3d, 0x3b, 0x4d, + 0x39, 0x3a, 0x3e, 0x44, 0x3d, 0x41, 0x3b, 0x38, 0x49, 0x41, 0x3a, 0x38, + 0x34, 0x38, 0x38, 0x3c, 0x45, 0x3c, 0x37, 0x3b, 0x36, 0x3e, 0x4a, 0x4b, + 0x42, 0x3f, 0x32, 0x45, 0x46, 0x35, 0x46, 0x41, 0x38, 0x33, 0x39, 0x37, + 0x44, 0x2b, 0x60, 0x4a, 0x2a, 0x2e, 0x35, 0x2d, 0x43, 0x37, 0x51, 0x47, + 0x2f, 0x4d, 0x50, 0x3e, 0x3a, 0x33, 0x4f, 0x2a, 0x35, 0x45, 0x4a, 0x4c, + 0x3b, 0x3d, 0x43, 0x44, 0x3d, 0x3f, 0x4a, 0x3e, 0x49, 0x37, 0x2e, 0x4f, + 0x39, 0x3f, 0x32, 0x3c, 0x37, 0x3b, 0x39, 0x4d, 0x34, 0x3f, 0x46, 0x44, + 0x3d, 0x40, 0x3f, 0x40, 0x39, 0x33, 0x39, 0x3e, 0x3d, 0x40, 0x31, 0x30, + 0x35, 0x3d, 0x3e, 0x3a, 0x3e, 0x32, 0x31, 0x3e, 0x48, 0x3c, 0x40, 0x43, + 0x3f, 0x3f, 0x34, 0x2e, 0x3a, 0x3e, 0x3b, 0x43, 0x45, 0x32, 0x3a, 0x31, + 0x37, 0x38, 0x31, 0x35, 0x34, 0x3d, 0x42, 0x36, 0x46, 0x37, 0x32, 0x47, + 0x41, 0x3c, 0x35, 0x35, 0x36, 0x41, 0x3a, 0x3b, 0x42, 0x44, 0x36, 0x31, + 0x3c, 0x3d, 0x34, 0x34, 0x3b, 0x40, 0x40, 0x2e, 0x40, 0x46, 0x3b, 0x43, + 0x3f, 0x40, 0x3b, 0x3a, 0x32, 0x40, 0x46, 0x39, 0x3c, 0x49, 0x2f, 0x3d, + 0x49, 0x3e, 0x44, 0x3c, 0x3e, 0x35, 0x3f, 0x44, 0x41, 0x40, 0x3e, 0x47, + 0x3d, 0x40, 0x3f, 0x41, 0x3b, 0x41, 0x41, 0x3f, 0x40, 0x3f, 0x3e, 0x3e, + 0x3f, 0x43, 0x35, 0x40, 0x2b, 0x42, 0x45, 0x56, 0x40, 0x3c, 0x2f, 0x44, + 0x44, 0x3d, 0x3e, 0x3d, 0x40, 0x2d, 0x39, 0x31, 0x54, 0x2f, 0x61, 0x48, + 0x2e, 0x37, 0x37, 0x32, 0x3e, 0x2d, 0x52, 0x4d, 0x2d, 0x4d, 0x4c, 0x3a, + 0x3a, 0x31, 0x4e, 0x2d, 0x31, 0x48, 0x47, 0x54, 0x45, 0x38, 0x3b, 0x3d, + 0x42, 0x41, 0x44, 0x4a, 0x48, 0x42, 0x2f, 0x4d, 0x31, 0x34, 0x3a, 0x46, + 0x37, 0x44, 0x2c, 0x45, 0x46, 0x43, 0x40, 0x3f, 0x34, 0x33, 0x40, 0x39, + 0x32, 0x35, 0x3a, 0x40, 0x3f, 0x3f, 0x36, 0x32, 0x3f, 0x3d, 0x35, 0x48, + 0x3c, 0x48, 0x37, 0x39, 0x35, 0x3f, 0x37, 0x3d, 0x44, 0x46, 0x2d, 0x2a, + 0x47, 0x38, 0x3a, 0x39, 0x45, 0x3b, 0x40, 0x2d, 0x37, 0x33, 0x41, 0x3c, + 0x40, 0x35, 0x3f, 0x32, 0x3a, 0x36, 0x40, 0x41, 0x3a, 0x3c, 0x33, 0x31, + 0x42, 0x3f, 0x41, 0x3a, 0x41, 0x46, 0x38, 0x2f, 0x3c, 0x3d, 0x3d, 0x39, + 0x3b, 0x46, 0x41, 0x31, 0x46, 0x36, 0x40, 0x48, 0x3c, 0x33, 0x42, 0x32, + 0x3b, 0x40, 0x3f, 0x36, 0x37, 0x44, 0x34, 0x35, 0x32, 0x32, 0x37, 0x38, + 0x33, 0x3b, 0x37, 0x4a, 0x3f, 0x46, 0x3a, 0x41, 0x32, 0x37, 0x30, 0x3e, + 0x40, 0x35, 0x41, 0x40, 0x37, 0x41, 0x2b, 0x40, 0x3d, 0x3d, 0x32, 0x38, + 0x34, 0x3e, 0x47, 0x61, 0x43, 0x3b, 0x3c, 0x42, 0x46, 0x3d, 0x40, 0x4a, + 0x3c, 0x2d, 0x33, 0x35, 0x55, 0x38, 0x69, 0x4f, 0x33, 0x37, 0x30, 0x39, + 0x44, 0x2e, 0x58, 0x4b, 0x2a, 0x51, 0x4b, 0x3c, 0x39, 0x2e, 0x51, 0x2d, + 0x30, 0x4a, 0x42, 0x53, 0x3f, 0x39, 0x3e, 0x44, 0x3b, 0x40, 0x47, 0x44, + 0x47, 0x3e, 0x39, 0x4b, 0x40, 0x3d, 0x42, 0x39, 0x3b, 0x39, 0x32, 0x42, + 0x36, 0x36, 0x36, 0x42, 0x44, 0x34, 0x33, 0x40, 0x40, 0x40, 0x3a, 0x3a, + 0x41, 0x3f, 0x31, 0x30, 0x3f, 0x31, 0x30, 0x39, 0x46, 0x36, 0x35, 0x34, + 0x40, 0x43, 0x3c, 0x41, 0x31, 0x46, 0x35, 0x26, 0x44, 0x32, 0x3d, 0x35, + 0x3d, 0x3c, 0x36, 0x32, 0x39, 0x3a, 0x30, 0x40, 0x48, 0x3e, 0x38, 0x37, + 0x44, 0x3b, 0x3d, 0x42, 0x3d, 0x3c, 0x32, 0x2b, 0x3f, 0x41, 0x39, 0x3d, + 0x3e, 0x3f, 0x35, 0x2f, 0x46, 0x3d, 0x3d, 0x3b, 0x45, 0x37, 0x31, 0x35, + 0x44, 0x40, 0x3a, 0x45, 0x3a, 0x3c, 0x39, 0x31, 0x3b, 0x3d, 0x3b, 0x3a, + 0x43, 0x44, 0x39, 0x47, 0x44, 0x36, 0x3e, 0x39, 0x48, 0x3f, 0x39, 0x4b, + 0x3c, 0x36, 0x3d, 0x44, 0x44, 0x3f, 0x39, 0x43, 0x3f, 0x37, 0x3f, 0x37, + 0x3b, 0x3b, 0x38, 0x3b, 0x3f, 0x40, 0x31, 0x44, 0x30, 0x44, 0x46, 0x5b, + 0x46, 0x3f, 0x39, 0x40, 0x40, 0x37, 0x4a, 0x46, 0x3f, 0x36, 0x40, 0x39, + 0x59, 0x3e, 0x66, 0x57, 0x32, 0x34, 0x2e, 0x33, 0x46, 0x31, 0x58, 0x44, + 0x26, 0x4c, 0x4b, 0x3c, 0x39, 0x2e, 0x4d, 0x35, 0x32, 0x46, 0x52, 0x52, + 0x3e, 0x40, 0x39, 0x3c, 0x39, 0x3d, 0x53, 0x48, 0x41, 0x3c, 0x3b, 0x4d, + 0x3c, 0x3e, 0x38, 0x44, 0x3a, 0x3a, 0x29, 0x4a, 0x3c, 0x37, 0x36, 0x38, + 0x3a, 0x31, 0x37, 0x39, 0x3a, 0x40, 0x46, 0x32, 0x42, 0x38, 0x32, 0x2e, + 0x3a, 0x45, 0x44, 0x34, 0x34, 0x38, 0x32, 0x2e, 0x35, 0x40, 0x3a, 0x41, + 0x42, 0x3d, 0x37, 0x2c, 0x3f, 0x37, 0x3c, 0x3d, 0x3a, 0x36, 0x33, 0x35, + 0x3c, 0x34, 0x3c, 0x39, 0x3c, 0x3a, 0x37, 0x30, 0x30, 0x3e, 0x3d, 0x3a, + 0x44, 0x37, 0x36, 0x32, 0x36, 0x37, 0x36, 0x3a, 0x3c, 0x41, 0x3a, 0x35, + 0x36, 0x3a, 0x34, 0x40, 0x39, 0x40, 0x3e, 0x32, 0x34, 0x46, 0x33, 0x3f, + 0x36, 0x45, 0x3e, 0x35, 0x3f, 0x38, 0x3f, 0x3e, 0x3b, 0x3a, 0x36, 0x3b, + 0x36, 0x38, 0x32, 0x3f, 0x44, 0x3c, 0x35, 0x48, 0x38, 0x39, 0x31, 0x49, + 0x3d, 0x43, 0x36, 0x3f, 0x31, 0x43, 0x36, 0x3e, 0x3e, 0x41, 0x39, 0x3b, + 0x40, 0x42, 0x3c, 0x43, 0x36, 0x4a, 0x48, 0x67, 0x4e, 0x43, 0x36, 0x46, + 0x44, 0x3f, 0x4b, 0x4b, 0x3f, 0x38, 0x3c, 0x3c, 0x5e, 0x38, 0x70, 0x52, + 0x38, 0x32, 0x3b, 0x36, 0x4a, 0x2c, 0x52, 0x46, 0x29, 0x4f, 0x48, 0x42, + 0x2d, 0x2e, 0x4f, 0x28, 0x28, 0x45, 0x4d, 0x52, 0x42, 0x3e, 0x3f, 0x41, + 0x3c, 0x3a, 0x47, 0x50, 0x44, 0x45, 0x33, 0x4b, 0x3e, 0x3f, 0x42, 0x3d, + 0x43, 0x34, 0x27, 0x3f, 0x42, 0x3e, 0x43, 0x3e, 0x3a, 0x3c, 0x37, 0x3b, + 0x3f, 0x30, 0x3a, 0x3e, 0x3c, 0x34, 0x37, 0x24, 0x3d, 0x43, 0x40, 0x44, + 0x40, 0x46, 0x31, 0x2f, 0x43, 0x38, 0x38, 0x39, 0x3c, 0x34, 0x2d, 0x2a, + 0x38, 0x31, 0x43, 0x3b, 0x39, 0x3b, 0x32, 0x34, 0x3e, 0x39, 0x41, 0x3b, + 0x3e, 0x33, 0x3a, 0x2a, 0x41, 0x3f, 0x3c, 0x43, 0x3b, 0x3e, 0x35, 0x2c, + 0x38, 0x41, 0x33, 0x31, 0x3e, 0x3f, 0x3a, 0x3c, 0x3b, 0x35, 0x3f, 0x3d, + 0x42, 0x3a, 0x3c, 0x35, 0x3f, 0x40, 0x3c, 0x3e, 0x37, 0x41, 0x3d, 0x38, + 0x34, 0x31, 0x36, 0x3d, 0x3d, 0x47, 0x36, 0x44, 0x3f, 0x45, 0x3c, 0x3c, + 0x35, 0x36, 0x31, 0x4f, 0x46, 0x3a, 0x41, 0x42, 0x40, 0x32, 0x33, 0x41, + 0x34, 0x40, 0x3d, 0x43, 0x3b, 0x3a, 0x32, 0x3c, 0x42, 0x42, 0x3d, 0x43, + 0x37, 0x45, 0x45, 0xff, 0x4b, 0x45, 0x3b, 0x40, 0x43, 0x3e, 0x47, 0x49, + 0x3d, 0x3b, 0x3e, 0x33, 0x58, 0x35, 0x71, 0x54, 0x2f, 0x38, 0x38, 0x33, + 0x47, 0x35, 0x5b, 0x46, 0x2c, 0x4c, 0x43, 0x37, 0x36, 0x39, 0x4f, 0x30, + 0x26, 0x48, 0x51, 0x48, 0x46, 0x45, 0x3b, 0x39, 0x42, 0x50, 0x47, 0x4c, + 0x4b, 0x3b, 0x3d, 0x4d, 0x41, 0x34, 0x40, 0x44, 0x38, 0x32, 0x2d, 0x43, + 0x39, 0x36, 0x3b, 0x3b, 0x40, 0x3d, 0x37, 0x3c, 0x44, 0x39, 0x42, 0x37, + 0x38, 0x38, 0x32, 0x2f, 0x41, 0x40, 0x3f, 0x3a, 0x37, 0x35, 0x3b, 0x2a, + 0x37, 0x30, 0x3c, 0x37, 0x40, 0x38, 0x3a, 0x27, 0x44, 0x3d, 0x43, 0x40, + 0x35, 0x3f, 0x3e, 0x32, 0x3e, 0x3c, 0x40, 0x39, 0x39, 0x3a, 0x41, 0x31, + 0x3b, 0x3f, 0x34, 0x43, 0x3a, 0x38, 0x42, 0x2a, 0x47, 0x46, 0x3b, 0x38, + 0x47, 0x45, 0x39, 0x31, 0x43, 0x40, 0x37, 0x3a, 0x3d, 0x3e, 0x39, 0x30, + 0x36, 0x37, 0x3a, 0x43, 0x3f, 0x32, 0x31, 0x41, 0x45, 0x3e, 0x43, 0x38, + 0x3f, 0x37, 0x3c, 0x49, 0x3b, 0x33, 0x3d, 0x3a, 0x37, 0x44, 0x32, 0x50, + 0x39, 0x44, 0x3e, 0x3f, 0x3d, 0x41, 0x3e, 0x3e, 0x42, 0x44, 0x45, 0x3f, + 0x36, 0x3f, 0x37, 0x39, 0x3b, 0x3d, 0x3b, 0x3b, 0x2f, 0x46, 0x40, 0x6d, + 0x50, 0x45, 0x3b, 0x45, 0x46, 0x3b, 0x42, 0x48, 0x42, 0x3c, 0x39, 0x37, + 0x57, 0x3b, 0x6c, 0x5b, 0x32, 0x35, 0x3d, 0x39, 0x48, 0x31, 0x5c, 0x46, + 0x29, 0x4c, 0x3f, 0x3e, 0x37, 0x33, 0x58, 0x32, 0x2a, 0x43, 0x4c, 0x50, + 0x3b, 0x44, 0x3c, 0x41, 0x39, 0x48, 0x55, 0x4c, 0x42, 0x38, 0x3b, 0x51, + 0x3f, 0x38, 0x44, 0x46, 0x36, 0x3b, 0x38, 0x4a, 0x3f, 0x37, 0x36, 0x3c, + 0x31, 0x3d, 0x32, 0x39, 0x3b, 0x3f, 0x3e, 0x35, 0x38, 0x3f, 0x34, 0x2b, + 0x37, 0x36, 0x39, 0x40, 0x37, 0x41, 0x32, 0x27, 0x36, 0x33, 0x40, 0x3a, + 0x3f, 0x44, 0x3f, 0x25, 0x38, 0x34, 0x42, 0x3c, 0x3a, 0x40, 0x38, 0x31, + 0x49, 0x3e, 0x33, 0x3d, 0x31, 0x36, 0x39, 0x2b, 0x44, 0x2f, 0x43, 0x34, + 0x34, 0x37, 0x39, 0x33, 0x3b, 0x34, 0x42, 0x3c, 0x40, 0x45, 0x36, 0x31, + 0x43, 0x47, 0x3e, 0x3f, 0x40, 0x3a, 0x33, 0x34, 0x41, 0x44, 0x3a, 0x43, + 0x3e, 0x38, 0x36, 0x31, 0x42, 0x44, 0x40, 0x41, 0x44, 0x43, 0x33, 0x42, + 0x3d, 0x41, 0x3d, 0x3e, 0x3c, 0x39, 0x3e, 0x4f, 0x3f, 0x37, 0x31, 0x40, + 0x3b, 0x38, 0x35, 0x3b, 0x44, 0x41, 0x41, 0x37, 0x40, 0x42, 0x2d, 0x3d, + 0x39, 0x48, 0x44, 0x3e, 0x34, 0x48, 0x49, 0x6d, 0x45, 0x4b, 0x3a, 0x44, + 0x49, 0x40, 0x4d, 0x51, 0x3f, 0x34, 0x3b, 0x40, 0x52, 0x34, 0x6f, 0x56, + 0x33, 0x3e, 0x40, 0x39, 0x41, 0x32, 0x5d, 0x45, 0x2e, 0x51, 0x48, 0x3c, + 0x2e, 0x2e, 0x51, 0x39, 0x32, 0x45, 0x4a, 0x4c, 0x3b, 0x40, 0x40, 0x3b, + 0x36, 0x41, 0x54, 0x4e, 0x4a, 0x49, 0x3b, 0x4d, 0x3c, 0x41, 0x38, 0x47, + 0x3d, 0x3c, 0x37, 0x48, 0x3f, 0x42, 0x3e, 0x36, 0x39, 0x46, 0x37, 0x3e, + 0x3b, 0x38, 0x40, 0x3b, 0x39, 0x32, 0x3e, 0x29, 0x37, 0x35, 0x3c, 0x3d, + 0x37, 0x3b, 0x35, 0x2f, 0x32, 0x3b, 0x37, 0x3c, 0x40, 0x3e, 0x39, 0x27, + 0x3b, 0x38, 0x37, 0x36, 0x39, 0x37, 0x37, 0x35, 0x42, 0x3e, 0x3b, 0x43, + 0x41, 0x3c, 0x37, 0x2a, 0x3a, 0x3e, 0x38, 0x40, 0x36, 0x3e, 0x44, 0x2e, + 0x3e, 0x3a, 0x37, 0x3b, 0x3e, 0x41, 0x3d, 0x30, 0x3b, 0x3f, 0x41, 0x45, + 0x3a, 0x48, 0x37, 0x2f, 0x3a, 0x37, 0x34, 0x43, 0x42, 0x3d, 0x38, 0x41, + 0x3b, 0x3c, 0x39, 0x3c, 0x39, 0x47, 0x2e, 0x41, 0x42, 0x40, 0x32, 0x36, + 0x43, 0x40, 0x3d, 0x4c, 0x38, 0x3e, 0x3b, 0x41, 0x3d, 0x3b, 0x34, 0x43, + 0x43, 0x3f, 0x44, 0x3c, 0x3a, 0x33, 0x39, 0x42, 0x43, 0x3f, 0x33, 0x3d, + 0x33, 0x3e, 0x48, 0x6b, 0x48, 0x43, 0x36, 0x47, 0x49, 0x44, 0x4a, 0x49, + 0x3c, 0x31, 0x35, 0x3e, 0x5c, 0x34, 0x73, 0x53, 0x33, 0x3c, 0x32, 0x3b, + 0x43, 0x27, 0x59, 0x4e, 0x2b, 0x51, 0x4f, 0x37, 0x36, 0x34, 0x56, 0x34, + 0x32, 0x4f, 0x46, 0x50, 0x40, 0x40, 0x3c, 0x3e, 0x34, 0x37, 0x50, 0x49, + 0x43, 0x47, 0x3e, 0x52, 0x44, 0x38, 0x3b, 0x4f, 0x3a, 0x3d, 0x2b, 0x4c, + 0x40, 0x38, 0x3a, 0x35, 0x3a, 0x3a, 0x3d, 0x38, 0x3d, 0x3b, 0x37, 0x48, + 0x3d, 0x3d, 0x32, 0x30, 0x3a, 0x34, 0x3f, 0x3a, 0x3b, 0x3e, 0x35, 0x2f, + 0x3b, 0x3a, 0x45, 0x3d, 0x42, 0x33, 0x33, 0x24, 0x44, 0x39, 0x3c, 0x3d, + 0x41, 0x3c, 0x37, 0x2c, 0x3b, 0x36, 0x34, 0x41, 0x3d, 0x3f, 0x39, 0x32, + 0x3c, 0x40, 0x44, 0x3d, 0x41, 0x3d, 0x3a, 0x29, 0x3e, 0x3e, 0x43, 0x33, + 0x3f, 0x3e, 0x3e, 0x31, 0x38, 0x3a, 0x34, 0x3d, 0x3f, 0x3e, 0x3a, 0x3d, + 0x3e, 0x48, 0x45, 0x3d, 0x44, 0x37, 0x33, 0x3d, 0x45, 0x39, 0x40, 0x40, + 0x42, 0x3f, 0x3f, 0x3d, 0x3a, 0x3b, 0x41, 0x33, 0x41, 0x3c, 0x32, 0x55, + 0x43, 0x3a, 0x32, 0x40, 0x3c, 0x3e, 0x40, 0x43, 0x37, 0x3f, 0x40, 0x38, + 0x43, 0x41, 0x36, 0x42, 0x44, 0x3c, 0x32, 0x3f, 0x38, 0x42, 0x46, 0x59, + 0x4c, 0x41, 0x39, 0x47, 0x46, 0x46, 0x44, 0x44, 0x35, 0x42, 0x32, 0x39, + 0x4f, 0x34, 0x6d, 0x55, 0x31, 0x3b, 0x3a, 0x3f, 0x44, 0x2c, 0x5d, 0x43, + 0x26, 0x4a, 0x4f, 0x40, 0x36, 0x32, 0x4d, 0x33, 0x2f, 0x50, 0x4d, 0x57, + 0x3b, 0x40, 0x42, 0x44, 0x41, 0x3f, 0x52, 0x4e, 0x35, 0x41, 0x44, 0x52, + 0x40, 0x35, 0x39, 0x4b, 0x45, 0x34, 0x2c, 0x4a, 0x3b, 0x41, 0x31, 0x33, + 0x3f, 0x3a, 0x36, 0x3c, 0x3c, 0x33, 0x30, 0x38, 0x43, 0x3f, 0x32, 0x2d, + 0x3f, 0x3a, 0x38, 0x41, 0x39, 0x45, 0x36, 0x2e, 0x3c, 0x38, 0x45, 0x3f, + 0x40, 0x3f, 0x3e, 0x26, 0x41, 0x37, 0x3c, 0x44, 0x3f, 0x3f, 0x35, 0x37, + 0x46, 0x34, 0x37, 0x3e, 0x48, 0x38, 0x36, 0x34, 0x33, 0x39, 0x40, 0x3c, + 0x42, 0x3d, 0x3b, 0x31, 0x38, 0x3b, 0x44, 0x42, 0x45, 0x38, 0x41, 0x30, + 0x3d, 0x42, 0x36, 0x3f, 0x3b, 0x45, 0x37, 0x32, 0x3c, 0x37, 0x3d, 0x42, + 0x38, 0x3d, 0x2f, 0x31, 0x39, 0x40, 0x3f, 0x44, 0x3a, 0x41, 0x44, 0x46, + 0x3d, 0x3a, 0x32, 0x3b, 0x34, 0x47, 0x36, 0x4c, 0x47, 0x35, 0x3c, 0x33, + 0x3b, 0x3c, 0x30, 0x43, 0x43, 0x3f, 0x31, 0x40, 0x3a, 0x37, 0x30, 0x46, + 0x39, 0x3b, 0x42, 0x40, 0x2d, 0x3f, 0x3e, 0x6a, 0x50, 0x3b, 0x31, 0x54, + 0x47, 0x3d, 0x48, 0x4e, 0x3b, 0x41, 0x3a, 0x39, 0x49, 0x36, 0x64, 0x4e, + 0x32, 0x39, 0x3d, 0x37, 0x42, 0x2c, 0x5c, 0x43, 0x2a, 0x4b, 0x4b, 0x46, + 0x30, 0x29, 0x52, 0x31, 0x35, 0x44, 0x4a, 0x4b, 0x3d, 0x3b, 0x4e, 0x42, + 0x3d, 0x39, 0x42, 0x52, 0x3f, 0x36, 0x3e, 0x50, 0x3f, 0x32, 0x35, 0x3a, + 0x40, 0x39, 0x35, 0x48, 0x3b, 0x3e, 0x41, 0x43, 0x43, 0x45, 0x2f, 0x36, + 0x38, 0x34, 0x3f, 0x44, 0x32, 0x3f, 0x37, 0x33, 0x33, 0x35, 0x2e, 0x41, + 0x37, 0x3e, 0x38, 0x28, 0x49, 0x30, 0x46, 0x39, 0x3b, 0x30, 0x38, 0x28, + 0x3b, 0x3d, 0x3a, 0x43, 0x3f, 0x34, 0x43, 0x36, 0x39, 0x3c, 0x3e, 0x3e, + 0x39, 0x3b, 0x39, 0x32, 0x3c, 0x36, 0x3e, 0x38, 0x34, 0x3c, 0x3a, 0x2a, + 0x46, 0x3d, 0x40, 0x37, 0x3b, 0x39, 0x3b, 0x34, 0x38, 0x31, 0x43, 0x46, + 0x3b, 0x43, 0x39, 0x2b, 0x38, 0x40, 0x3e, 0x39, 0x35, 0x3d, 0x2c, 0x36, + 0x37, 0x40, 0x36, 0x40, 0x41, 0x38, 0x32, 0x3f, 0x36, 0x46, 0x34, 0x31, + 0x40, 0x3e, 0x3c, 0x4e, 0x42, 0x3d, 0x36, 0x3f, 0x42, 0x3f, 0x33, 0x40, + 0x34, 0x37, 0x3c, 0x3b, 0x31, 0x47, 0x32, 0x3c, 0x34, 0x3d, 0x42, 0x3b, + 0x37, 0x41, 0x3b, 0x64, 0x52, 0x40, 0x36, 0x4e, 0x46, 0x3f, 0x3f, 0x47, + 0x3c, 0x3a, 0x3a, 0x41, 0x4a, 0x32, 0x5e, 0x50, 0x2d, 0x39, 0x3a, 0x38, + 0x3d, 0x2c, 0x5a, 0x3e, 0x2e, 0x47, 0x3e, 0x3e, 0x33, 0x29, 0x4c, 0x35, + 0x30, 0x4d, 0x4d, 0x4d, 0x38, 0x42, 0x51, 0x47, 0x39, 0x3c, 0x43, 0x4b, + 0x42, 0x3f, 0x3a, 0x4b, 0x44, 0x3f, 0x3a, 0x44, 0x3e, 0x37, 0x30, 0x45, + 0x3d, 0x36, 0x34, 0x3f, 0x36, 0x35, 0x37, 0x36, 0x43, 0x3b, 0x37, 0x3e, + 0x35, 0x3e, 0x32, 0x34, 0x32, 0x38, 0x3c, 0x3a, 0x3a, 0x3c, 0x30, 0x2b, + 0x31, 0x37, 0x30, 0x42, 0x36, 0x37, 0x36, 0x2c, 0x3c, 0x31, 0x41, 0x37, + 0x44, 0x41, 0x3b, 0x37, 0x41, 0x3f, 0x38, 0x3b, 0x3a, 0x3a, 0x3c, 0x2f, + 0x47, 0x41, 0x3e, 0x33, 0x42, 0x3a, 0x32, 0x34, 0x44, 0x40, 0x43, 0x3d, + 0x34, 0x41, 0x38, 0x35, 0x35, 0x3b, 0x45, 0x38, 0x32, 0x37, 0x3c, 0x2e, + 0x39, 0x40, 0x30, 0x3e, 0x42, 0x35, 0x3d, 0x36, 0x3e, 0x3d, 0x39, 0x46, + 0x3f, 0x36, 0x37, 0x49, 0x41, 0x39, 0x3d, 0x3d, 0x33, 0x44, 0x42, 0x50, + 0x3d, 0x3c, 0x3e, 0x3f, 0x42, 0x42, 0x3b, 0x3d, 0x41, 0x31, 0x39, 0x3a, + 0x44, 0x34, 0x38, 0x47, 0x44, 0x38, 0x3b, 0x42, 0x30, 0x42, 0x44, 0x57, + 0x49, 0x3a, 0x39, 0x4f, 0x41, 0x3e, 0x40, 0x43, 0x37, 0x42, 0x3b, 0x48, + 0x50, 0x29, 0x5b, 0x44, 0x2c, 0x40, 0x3f, 0x3c, 0x46, 0x34, 0x5c, 0x41, + 0x2c, 0x48, 0x46, 0x46, 0x35, 0x32, 0x4c, 0x35, 0x2f, 0x3b, 0x48, 0x44, + 0x41, 0x41, 0x49, 0x45, 0x34, 0x37, 0x44, 0x45, 0x43, 0x3b, 0x42, 0x44, + 0x3a, 0x37, 0x48, 0x49, 0x34, 0x39, 0x33, 0x4a, 0x40, 0x3d, 0x33, 0x39, + 0x39, 0x3b, 0x30, 0x31, 0x3d, 0x47, 0x3c, 0x3a, 0x34, 0x3c, 0x3a, 0x2b, + 0x3a, 0x34, 0x41, 0x40, 0x42, 0x36, 0x44, 0x2c, 0x40, 0x47, 0x3b, 0x37, + 0x38, 0x42, 0x44, 0x29, 0x36, 0x3d, 0x3d, 0x36, 0x42, 0x3b, 0x35, 0x36, + 0x43, 0x39, 0x41, 0x3d, 0x45, 0x41, 0x31, 0x32, 0x40, 0x3d, 0x3c, 0x41, + 0x3e, 0x3d, 0x35, 0x34, 0x32, 0x38, 0x36, 0x3f, 0x3b, 0x3d, 0x39, 0x36, + 0x40, 0x3e, 0x3d, 0x3a, 0x3a, 0x3b, 0x3c, 0x32, 0x40, 0x34, 0x3a, 0x36, + 0x42, 0x47, 0x3e, 0x33, 0x3a, 0x44, 0x30, 0x39, 0x40, 0x3a, 0x36, 0x44, + 0x3c, 0x3b, 0x3f, 0x33, 0x3e, 0x3c, 0x35, 0x53, 0x43, 0x3c, 0x3f, 0x43, + 0x3d, 0x44, 0x33, 0x47, 0x42, 0x40, 0x37, 0x3b, 0x43, 0x3f, 0x33, 0x41, + 0x38, 0x42, 0x44, 0x3d, 0x2d, 0x3f, 0x46, 0x49, 0x4e, 0x3f, 0x36, 0x45, + 0x45, 0x39, 0x40, 0x42, 0x39, 0x39, 0x3a, 0x42, 0x45, 0x2c, 0x61, 0x44, + 0x30, 0x45, 0x38, 0x3a, 0x40, 0x37, 0x58, 0x39, 0x31, 0x3e, 0x3a, 0x3e, + 0x37, 0x32, 0x4a, 0x39, 0x2e, 0x47, 0x3e, 0x4e, 0x3f, 0x3e, 0x48, 0x45, + 0x3f, 0x48, 0x3a, 0x3f, 0x40, 0x36, 0x3a, 0x44, 0x36, 0x3e, 0x3d, 0x41, + 0x45, 0x36, 0x36, 0x4b, 0x3a, 0x3d, 0x45, 0x48, 0x38, 0x45, 0x39, 0x38, + 0x38, 0x3a, 0x42, 0x34, 0x3f, 0x34, 0x39, 0x34, 0x32, 0x3f, 0x3c, 0x3d, + 0x3d, 0x47, 0x3a, 0x2f, 0x3c, 0x3e, 0x3f, 0x39, 0x35, 0x42, 0x3c, 0x2a, + 0x3b, 0x35, 0x42, 0x44, 0x46, 0x39, 0x38, 0x39, 0x43, 0x3a, 0x38, 0x42, + 0x3d, 0x3a, 0x40, 0x35, 0x34, 0x39, 0x3a, 0x38, 0x43, 0x42, 0x42, 0x2d, + 0x31, 0x3b, 0x33, 0x40, 0x3b, 0x47, 0x35, 0x30, 0x3a, 0x3c, 0x3b, 0x47, + 0x3a, 0x3c, 0x38, 0x35, 0x3c, 0x35, 0x3e, 0x3e, 0x39, 0x3d, 0x39, 0x40, + 0x37, 0x33, 0x49, 0x38, 0x3c, 0x43, 0x34, 0x40, 0x39, 0x42, 0x3c, 0x3b, + 0x3e, 0x45, 0x3e, 0x51, 0x3d, 0x3f, 0x3b, 0x34, 0x37, 0x3c, 0x40, 0x47, + 0x3c, 0x41, 0x3f, 0x41, 0x37, 0x3e, 0x36, 0x3c, 0x42, 0x40, 0x3f, 0x3a, + 0x3b, 0x42, 0x44, 0x4b, 0x4b, 0x37, 0x41, 0x4d, 0x41, 0x45, 0x40, 0x41, + 0x40, 0x38, 0x37, 0x40, 0x42, 0x2c, 0x57, 0x43, 0x2d, 0x49, 0x3a, 0x3e, + 0x37, 0x2f, 0x52, 0x37, 0x31, 0x42, 0x3b, 0x3f, 0x39, 0x38, 0x48, 0x3c, + 0x37, 0x3d, 0x3a, 0x39, 0x3a, 0x45, 0x4b, 0x49, 0x3e, 0x44, 0x48, 0x49, + 0x3d, 0x39, 0x3c, 0x41, 0x41, 0x38, 0x45, 0x38, 0x33, 0x3d, 0x37, 0x47, + 0x34, 0x3f, 0x3b, 0x3d, 0x39, 0x34, 0x30, 0x39, 0x44, 0x36, 0x34, 0x3c, + 0x37, 0x38, 0x45, 0x34, 0x40, 0x33, 0x41, 0x3a, 0x3e, 0x3c, 0x3b, 0x3a, + 0x40, 0x3f, 0x3b, 0x3d, 0x3b, 0x46, 0x41, 0x2a, 0x3a, 0x3c, 0x42, 0x46, + 0x33, 0x3f, 0x2d, 0x3a, 0x45, 0x45, 0x38, 0x3b, 0x44, 0x34, 0x35, 0x3f, + 0x34, 0x43, 0x38, 0x3e, 0x41, 0x3b, 0x42, 0x38, 0x3d, 0x3f, 0x38, 0x45, + 0x3b, 0x35, 0x39, 0x3c, 0x43, 0x43, 0x38, 0x34, 0x44, 0x43, 0x2e, 0x39, + 0x39, 0x40, 0x39, 0x41, 0x41, 0x34, 0x3e, 0x44, 0x3d, 0x43, 0x3a, 0x3a, + 0x3b, 0x3b, 0x36, 0x45, 0x3c, 0x43, 0x3d, 0x48, 0x36, 0x36, 0x39, 0x55, + 0x35, 0x40, 0x3e, 0x49, 0x40, 0x3a, 0x3d, 0x3d, 0x34, 0x47, 0x40, 0x41, + 0x40, 0x47, 0x39, 0x3e, 0x3b, 0x38, 0x3c, 0x3a, 0x35, 0x3e, 0x41, 0x4a, + 0x4b, 0x3f, 0x36, 0x3d, 0x40, 0x3c, 0x39, 0x32, 0x33, 0x36, 0x30, 0x42, + 0x42, 0x36, 0x54, 0x48, 0x2e, 0x4c, 0x34, 0x3c, 0x39, 0x36, 0x4e, 0x37, + 0x2f, 0x3e, 0x30, 0x3d, 0x36, 0x3b, 0x45, 0x36, 0x37, 0x3e, 0x41, 0x4b, + 0x3b, 0x36, 0x45, 0x3b, 0x38, 0x45, 0x3e, 0x43, 0x48, 0x46, 0x44, 0x44, + 0x3e, 0x3b, 0x37, 0x3b, 0x3a, 0x3f, 0x3d, 0x44, 0x39, 0x38, 0x45, 0x43, + 0x3d, 0x35, 0x39, 0x2c, 0x44, 0x41, 0x36, 0x40, 0x3d, 0x39, 0x3d, 0x2f, + 0x3d, 0x39, 0x42, 0x3d, 0x36, 0x46, 0x43, 0x2c, 0x41, 0x3a, 0x30, 0x45, + 0x3f, 0x41, 0x35, 0x2b, 0x3b, 0x38, 0x3a, 0x44, 0x32, 0x32, 0x39, 0x3c, + 0x3a, 0x3a, 0x3c, 0x3a, 0x35, 0x40, 0x3b, 0x31, 0x36, 0x33, 0x35, 0x34, + 0x3c, 0x3b, 0x3d, 0x36, 0x48, 0x3b, 0x3f, 0x42, 0x3e, 0x33, 0x2f, 0x3a, + 0x49, 0x41, 0x39, 0x3e, 0x3c, 0x44, 0x3c, 0x39, 0x33, 0x39, 0x36, 0x35, + 0x3d, 0x42, 0x34, 0x3e, 0x38, 0x45, 0x40, 0x45, 0x3d, 0x48, 0x42, 0x4a, + 0x3f, 0x45, 0x38, 0x42, 0x44, 0x40, 0x34, 0x49, 0x44, 0x3d, 0x3a, 0x39, + 0x3e, 0x3a, 0x42, 0x3e, 0x48, 0x42, 0x3e, 0x3a, 0x3f, 0x3f, 0x32, 0x3b, + 0x38, 0x41, 0x3c, 0x39, 0x33, 0x45, 0x44, 0x3c, 0x48, 0x41, 0x41, 0x3d, + 0x3a, 0x3c, 0x37, 0x33, 0x41, 0x3f, 0x38, 0x3a, 0x3f, 0x37, 0x51, 0x3c, + 0x37, 0x3a, 0x43, 0x37, 0x40, 0x31, 0x4f, 0x34, 0x3b, 0x44, 0x45, 0x39, + 0x40, 0x33, 0x49, 0x33, 0x3e, 0x35, 0x44, 0x3d, 0x3b, 0x3f, 0x43, 0x41, + 0x43, 0x43, 0x48, 0x44, 0x46, 0x3b, 0x43, 0x3f, 0x3c, 0x3f, 0x3e, 0x3d, + 0x3b, 0x41, 0x3c, 0x43, 0x30, 0x34, 0x39, 0x33, 0x3f, 0x38, 0x36, 0x2e, + 0x33, 0x3f, 0x3c, 0x40, 0x3d, 0x3b, 0x3b, 0x31, 0x36, 0x41, 0x3b, 0x38, + 0x46, 0x36, 0x34, 0x31, 0x42, 0x44, 0x33, 0x35, 0x3f, 0x36, 0x3c, 0x30, + 0x3f, 0x31, 0x39, 0x3e, 0x3f, 0x47, 0x3e, 0x34, 0x36, 0x36, 0x34, 0x39, + 0x37, 0x46, 0x40, 0x33, 0x3b, 0x3a, 0x3f, 0x41, 0x37, 0x44, 0x3a, 0x3f, + 0x34, 0x45, 0x37, 0x33, 0x3f, 0x47, 0x41, 0x36, 0x39, 0x3e, 0x40, 0x38, + 0x41, 0x3d, 0x3d, 0x36, 0x40, 0x3a, 0x3b, 0x3b, 0x41, 0x3b, 0x3a, 0x3f, + 0x3f, 0x3b, 0x35, 0x42, 0x46, 0x3a, 0x30, 0x45, 0x40, 0x37, 0x39, 0x39, + 0x3d, 0x38, 0x3f, 0x45, 0x3f, 0x31, 0x32, 0x3b, 0x35, 0x3e, 0x3b, 0x38, + 0x3b, 0x44, 0x37, 0x39, 0x37, 0x42, 0x3f, 0x44, 0x38, 0x36, 0x37, 0x44, + 0x45, 0x46, 0x41, 0x3b, 0x46, 0x42, 0x43, 0x43, 0x3a, 0x4b, 0x37, 0x35, + 0x3b, 0x40, 0x32, 0x38, 0x41, 0x38, 0x4f, 0x3e, 0x36, 0x3f, 0x47, 0x3b, + 0x47, 0x3b, 0x4a, 0x2e, 0x3d, 0x45, 0x3b, 0x46, 0x3e, 0x38, 0x43, 0x38, + 0x41, 0x48, 0x3a, 0x39, 0x40, 0x45, 0x3b, 0x43, 0x40, 0x3e, 0x43, 0x41, + 0x41, 0x3e, 0x39, 0x3f, 0x35, 0x42, 0x33, 0x3f, 0x3d, 0x32, 0x45, 0x3c, + 0x41, 0x31, 0x45, 0x38, 0x43, 0x45, 0x41, 0x35, 0x35, 0x40, 0x44, 0x36, + 0x3a, 0x3b, 0x3c, 0x2c, 0x3e, 0x41, 0x33, 0x3d, 0x46, 0x34, 0x3b, 0x30, + 0x30, 0x42, 0x43, 0x3d, 0x3d, 0x3d, 0x43, 0x31, 0x3f, 0x40, 0x3a, 0x3f, + 0x48, 0x3e, 0x3b, 0x39, 0x44, 0x43, 0x3b, 0x3a, 0x42, 0x38, 0x38, 0x3b, + 0x3f, 0x44, 0x37, 0x3e, 0x45, 0x40, 0x41, 0x3b, 0x3c, 0x3a, 0x38, 0x37, + 0x3b, 0x33, 0x3f, 0x35, 0x43, 0x3d, 0x33, 0x41, 0x3b, 0x46, 0x39, 0x32, + 0x39, 0x3f, 0x3b, 0x39, 0x47, 0x3c, 0x3f, 0x39, 0x34, 0x3d, 0x3c, 0x46, + 0x3f, 0x3e, 0x3e, 0x44, 0x34, 0x40, 0x3f, 0x39, 0x3c, 0x38, 0x36, 0x45, + 0x42, 0x46, 0x3b, 0x44, 0x3a, 0x3d, 0x3b, 0x42, 0x3b, 0x3b, 0x3c, 0x45, + 0x42, 0x3d, 0x36, 0x37, 0x3d, 0x43, 0x3f, 0x48, 0xa6, 0xfb, 0xff, 0xff, + 0x04, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0xb3, 0x00, 0x00, 0x00, + 0x39, 0xff, 0xff, 0xff, 0xe5, 0xff, 0xff, 0xff, 0x30, 0x00, 0x00, 0x00, + 0x68, 0xfb, 0xff, 0xff, 0xbc, 0xfc, 0xff, 0xff, 0x20, 0x00, 0x00, 0x00, + 0x14, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0xe8, 0x03, 0x00, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x03, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x70, 0x02, 0x00, 0x00, + 0x70, 0x03, 0x00, 0x00, 0xf0, 0x00, 0x00, 0x00, 0xf0, 0x01, 0x00, 0x00, + 0x80, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x00, 0x00, 0x50, 0x01, 0x00, 0x00, + 0xa4, 0x02, 0x00, 0x00, 0xba, 0xfc, 0xff, 0xff, 0x00, 0x00, 0x00, 0x03, + 0x24, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, + 0x24, 0x00, 0x00, 0x00, 0x0e, 0x00, 0x00, 0x00, 0x6c, 0x61, 0x62, 0x65, + 0x6c, 0x73, 0x5f, 0x73, 0x6f, 0x66, 0x74, 0x6d, 0x61, 0x78, 0x00, 0x00, + 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, + 0x3c, 0xfd, 0xff, 0xff, 0x2c, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, + 0x14, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x80, 0x3b, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x7f, 0x3f, + 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x2a, 0xfd, 0xff, 0xff, + 0x00, 0x00, 0x00, 0x03, 0x1c, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00, + 0x08, 0x00, 0x00, 0x00, 0x1c, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, + 0x61, 0x64, 0x64, 0x5f, 0x31, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xa4, 0xfd, 0xff, 0xff, + 0x2c, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, + 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x58, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x09, 0x97, 0xf5, 0x3f, + 0x01, 0x00, 0x00, 0x00, 0x87, 0x35, 0xa0, 0x43, 0x01, 0x00, 0x00, 0x00, + 0xd6, 0xd7, 0x28, 0xc3, 0x92, 0xfd, 0xff, 0xff, 0x00, 0x00, 0x00, 0x03, + 0x1c, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, + 0x24, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x52, 0x65, 0x6c, 0x75, + 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x19, 0x00, 0x00, 0x00, 0x16, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, + 0x14, 0xfe, 0xff, 0xff, 0x2c, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, + 0x14, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x05, 0x80, 0xbf, 0x3f, 0x01, 0x00, 0x00, 0x00, 0x85, 0xc0, 0xbe, 0x43, + 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xfe, 0xff, 0xff, + 0x00, 0x00, 0x00, 0x03, 0x3c, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, + 0x08, 0x00, 0x00, 0x00, 0x44, 0x00, 0x00, 0x00, 0x25, 0x00, 0x00, 0x00, + 0x77, 0x65, 0x69, 0x67, 0x68, 0x74, 0x73, 0x5f, 0x71, 0x75, 0x61, 0x6e, + 0x74, 0x2f, 0x46, 0x61, 0x6b, 0x65, 0x51, 0x75, 0x61, 0x6e, 0x74, 0x57, + 0x69, 0x74, 0x68, 0x4d, 0x69, 0x6e, 0x4d, 0x61, 0x78, 0x56, 0x61, 0x72, + 0x73, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x0a, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, + 0xa4, 0xfe, 0xff, 0xff, 0x30, 0x00, 0x00, 0x00, 0x24, 0x00, 0x00, 0x00, + 0x18, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, + 0xae, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x17, 0xac, 0x6e, 0x3a, 0x01, 0x00, 0x00, 0x00, + 0x20, 0x4e, 0x97, 0x3d, 0x01, 0x00, 0x00, 0x00, 0xaf, 0x27, 0x21, 0xbe, + 0x96, 0xfe, 0xff, 0xff, 0x00, 0x00, 0x00, 0x03, 0x20, 0x00, 0x00, 0x00, + 0x04, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x28, 0x00, 0x00, 0x00, + 0x09, 0x00, 0x00, 0x00, 0x52, 0x65, 0x73, 0x68, 0x61, 0x70, 0x65, 0x5f, + 0x31, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x31, 0x00, 0x00, 0x00, 0x2b, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x1c, 0xff, 0xff, 0xff, 0x2c, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, + 0x14, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x3f, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0x42, + 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0a, 0xff, 0xff, 0xff, + 0x00, 0x00, 0x00, 0x02, 0x20, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, + 0x08, 0x00, 0x00, 0x00, 0x1c, 0x00, 0x00, 0x00, 0x0b, 0x00, 0x00, 0x00, + 0x43, 0x6f, 0x6e, 0x76, 0x32, 0x44, 0x5f, 0x62, 0x69, 0x61, 0x73, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0xfc, 0xfe, 0xff, 0xff, + 0x14, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x17, 0xac, 0xee, 0x39, 0x5a, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x03, + 0x48, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, + 0x54, 0x00, 0x00, 0x00, 0x31, 0x00, 0x00, 0x00, 0x77, 0x65, 0x69, 0x67, + 0x68, 0x74, 0x73, 0x5f, 0x71, 0x75, 0x61, 0x6e, 0x74, 0x5f, 0x31, 0x2f, + 0x46, 0x61, 0x6b, 0x65, 0x51, 0x75, 0x61, 0x6e, 0x74, 0x57, 0x69, 0x74, + 0x68, 0x4d, 0x69, 0x6e, 0x4d, 0x61, 0x78, 0x56, 0x61, 0x72, 0x73, 0x2f, + 0x74, 0x72, 0x61, 0x6e, 0x73, 0x70, 0x6f, 0x73, 0x65, 0x00, 0x00, 0x00, + 0x02, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x30, 0x11, 0x00, 0x00, + 0x0c, 0x00, 0x14, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0c, 0x00, 0x10, 0x00, + 0x0c, 0x00, 0x00, 0x00, 0x30, 0x00, 0x00, 0x00, 0x24, 0x00, 0x00, 0x00, + 0x18, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x3d, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x9d, 0xaf, 0xd0, 0x3a, 0x01, 0x00, 0x00, 0x00, + 0xe7, 0x29, 0x9e, 0x3e, 0x01, 0x00, 0x00, 0x00, 0x5b, 0x91, 0xc3, 0xbd, + 0x00, 0x00, 0x0e, 0x00, 0x18, 0x00, 0x08, 0x00, 0x07, 0x00, 0x0c, 0x00, + 0x10, 0x00, 0x14, 0x00, 0x0e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, + 0x20, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, + 0x28, 0x00, 0x00, 0x00, 0x0b, 0x00, 0x00, 0x00, 0x4d, 0x61, 0x74, 0x4d, + 0x75, 0x6c, 0x5f, 0x62, 0x69, 0x61, 0x73, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x04, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x0c, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x04, 0x00, 0x08, 0x00, 0x0c, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, + 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x62, 0x1b, 0x1c, 0x3b, + 0x03, 0x00, 0x00, 0x00, 0xb4, 0x00, 0x00, 0x00, 0x5c, 0x00, 0x00, 0x00, + 0x04, 0x00, 0x00, 0x00, 0xc0, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x09, + 0x02, 0x00, 0x00, 0x00, 0x1c, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, + 0x24, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x08, 0x00, 0x04, 0x00, + 0x06, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0x3f, 0x14, 0x00, 0x1c, 0x00, + 0x08, 0x00, 0x0c, 0x00, 0x10, 0x00, 0x07, 0x00, 0x14, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x18, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, + 0x01, 0x00, 0x00, 0x00, 0x1c, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, + 0x28, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, + 0x02, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x04, 0x00, 0x04, 0x00, 0x04, 0x00, 0x00, 0x00, 0x14, 0x00, 0x18, 0x00, + 0x00, 0x00, 0x08, 0x00, 0x0c, 0x00, 0x07, 0x00, 0x10, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x14, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, + 0x1c, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x30, 0x00, 0x00, 0x00, + 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x02, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, + 0x06, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x10, 0x00, + 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0c, 0x00, 0x0c, 0x00, 0x00, 0x00, + 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, + 0x03, 0x00, 0x00, 0x00, 0x24, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, + 0x04, 0x00, 0x00, 0x00, 0xfa, 0xff, 0xff, 0xff, 0x00, 0x19, 0x06, 0x00, + 0x06, 0x00, 0x05, 0x00, 0x06, 0x00, 0x00, 0x00, 0x00, 0x09, 0x06, 0x00, + 0x08, 0x00, 0x07, 0x00, 0x06, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04}; +const int g_model_len = 19800; diff --git a/tensorflow/lite/micro/examples/micro_speech/simple_features/model.h b/tensorflow/lite/micro/examples/micro_speech/simple_features/model.h new file mode 100644 index 0000000..b3e705e --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/simple_features/model.h @@ -0,0 +1,27 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +// This is a standard TensorFlow Lite FlatBuffer model file that has been +// converted into a C data array, so it can be easily compiled into a binary +// for devices that don't have a file system. It was created using the command: +// xxd -i model.tflite > model.cc + +#ifndef TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_SIMPLE_FEATURES_MODEL_H_ +#define TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_SIMPLE_FEATURES_MODEL_H_ + +extern const unsigned char g_model[]; +extern const int g_model_len; + +#endif // TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_SIMPLE_FEATURES_MODEL_H_ diff --git a/tensorflow/lite/micro/examples/micro_speech/simple_features/no_power_spectrum_data.cc b/tensorflow/lite/micro/examples/micro_speech/simple_features/no_power_spectrum_data.cc new file mode 100644 index 0000000..aff0242 --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/simple_features/no_power_spectrum_data.cc @@ -0,0 +1,23 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +// See the header for documentation on the meaning of this data. + +#include "tensorflow/lite/micro/examples/micro_speech/simple_features/no_power_spectrum_data.h" + +const uint8_t g_no_power_spectrum_data[g_no_power_spectrum_data_size] = { + 255, 7, 6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, +}; diff --git a/tensorflow/lite/micro/examples/micro_speech/simple_features/no_power_spectrum_data.h b/tensorflow/lite/micro/examples/micro_speech/simple_features/no_power_spectrum_data.h new file mode 100644 index 0000000..f203623 --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/simple_features/no_power_spectrum_data.h @@ -0,0 +1,29 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +// This data was extracted from the larger feature data held in +// no_features_data.cc and consists of the 29th spectrogram slice of 43 values. +// This is the expected result of running the sample data in +// no_30ms_sample_data.cc through the preprocessing pipeline. + +#ifndef TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_SIMPLE_FEATURES_NO_POWER_SPECTRUM_DATA_H_ +#define TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_SIMPLE_FEATURES_NO_POWER_SPECTRUM_DATA_H_ + +#include + +constexpr int g_no_power_spectrum_data_size = 43; +extern const uint8_t g_no_power_spectrum_data[]; + +#endif // TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_SIMPLE_FEATURES_NO_POWER_SPECTRUM_DATA_H_ diff --git a/tensorflow/lite/micro/examples/micro_speech/simple_features/no_simple_features_data.cc b/tensorflow/lite/micro/examples/micro_speech/simple_features/no_simple_features_data.cc new file mode 100644 index 0000000..2d7ae62 --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/simple_features/no_simple_features_data.cc @@ -0,0 +1,152 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/examples/micro_speech/simple_features/no_simple_features_data.h" + +/* File automatically created by + * tensorflow/examples/speech_commands/wav_to_features.py \ + * --sample_rate=16000 \ + * --clip_duration_ms=1000 \ + * --window_size_ms=30 \ + * --window_stride_ms=20 \ + * --feature_bin_count=40 \ + * --quantize=1 \ + * --preprocess="average" \ + * --input_wav="speech_commands_test_set_v0.02/no/f9643d42_nohash_4.wav" \ + * --output_c_file="no_simple_features_data.cc" \ + */ + +const int g_no_simple_f9643d42_nohash_4_width = 43; +const int g_no_simple_f9643d42_nohash_4_height = 49; +const unsigned char g_no_simple_f9643d42_nohash_4_data[] = { + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 67, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 139, 2, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 195, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 230, 2, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 7, + 6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 255, 7, 16, 1, 1, 0, 2, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 7, 22, 0, 1, 0, + 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 238, 5, 20, 3, 4, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 144, 4, 19, 3, 5, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 42, 6, 3, + 1, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 3, 1, 5, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 1, 3, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, +}; diff --git a/tensorflow/lite/micro/examples/micro_speech/simple_features/no_simple_features_data.h b/tensorflow/lite/micro/examples/micro_speech/simple_features/no_simple_features_data.h new file mode 100644 index 0000000..ff46134 --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/simple_features/no_simple_features_data.h @@ -0,0 +1,23 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_SIMPLE_FEATURES_NO_SIMPLE_FEATURES_DATA_H_ +#define TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_SIMPLE_FEATURES_NO_SIMPLE_FEATURES_DATA_H_ + +extern const int g_no_simple_f9643d42_nohash_4_width; +extern const int g_no_simple_f9643d42_nohash_4_height; +extern const unsigned char g_no_simple_f9643d42_nohash_4_data[]; + +#endif // TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_SIMPLE_FEATURES_NO_SIMPLE_FEATURES_DATA_H_ diff --git a/tensorflow/lite/micro/examples/micro_speech/simple_features/simple_features_generator.cc b/tensorflow/lite/micro/examples/micro_speech/simple_features/simple_features_generator.cc new file mode 100644 index 0000000..3733912 --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/simple_features/simple_features_generator.cc @@ -0,0 +1,149 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +// Reference implementation of the preprocessing pipeline, with the same +// results as the audio tutorial at +// https://www.tensorflow.org/tutorials/sequences/audio_recognition +// This module takes 30ms of PCM-encoded signed 16-bit audio samples (at 16KHz, +// so 480 values), and extracts a power spectrum of frequencies. There are 43 +// frequency bands in the result, derived from the original 256 output from the +// discrete Fourier transform, and averaged together in groups of 6. +// It's expected that most platforms will have optimized versions of the +// functions used here, for example replacing the DFT with an FFT, so this +// version shouldn't be used where performance is critical. + +#include "tensorflow/lite/micro/examples/micro_speech/simple_features/simple_features_generator.h" + +#include + +#include "tensorflow/lite/micro/examples/micro_speech/simple_features/simple_model_settings.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace { + +// Needed because some platforms don't have M_PI defined. +constexpr float kPi = 3.14159265358979323846f; + +// Performs a discrete Fourier transform on the real inputs. This corresponds to +// rdft() in the FFT package at http://www.kurims.kyoto-u.ac.jp/~ooura/fft.html, +// and to kiss_fftr() in KISSFFT at https://github.com/mborgerding/kissfft. +// It takes in an array of float real values, and returns a result of the same +// length with float real and imaginary components interleaved, so +// fourier_output[0] is the first real value, fourier_output[1] is the first +// imaginary, fourier_output[2] is the second real, and so on. +// The calling function should ensure that the array passed in as fourier_output +// is at least time_series_size in length. Most optimized FFT implementations +// require the length to be a power of two as well, but this version doesn't +// enforce that. +void CalculateDiscreteFourierTransform(float* time_series, int time_series_size, + float* fourier_output) { + for (int i = 0; i < time_series_size / 2; ++i) { + float real = 0; + for (int j = 0; j < time_series_size; ++j) { + real += time_series[j] * std::cos(j * i * kPi * 2 / time_series_size); + } + float imaginary = 0; + for (int j = 0; j < time_series_size; ++j) { + imaginary -= + time_series[j] * std::sin(j * i * kPi * 2 / time_series_size); + } + fourier_output[(i * 2) + 0] = real; + fourier_output[(i * 2) + 1] = imaginary; + } +} + +// Produces a simple sine curve that is used to ensure frequencies at the center +// of the current sample window are weighted more heavily than those at the end. +void CalculatePeriodicHann(int window_length, float* window_function) { + for (int i = 0; i < window_length; ++i) { + window_function[i] = 0.5f - 0.5f * std::cos((2 * kPi * i) / window_length); + } +} + +} // namespace + +TfLiteStatus GenerateSimpleFeatures(const int16_t* input, int input_size, + int output_size, uint8_t* output) { + // Ensure our input and output data arrays are valid. + if (input_size > kMaxAudioSampleSize) { + MicroPrintf("Input size %d larger than %d", input_size, + kMaxAudioSampleSize); + return kTfLiteError; + } + if (output_size != kFeatureSliceSize) { + MicroPrintf("Requested output size %d doesn't match %d", output_size, + kFeatureSliceSize); + return kTfLiteError; + } + + // Pre-calculate the window function we'll be applying to the input data. + // In a real application, we'd calculate this table once in an initialization + // function and store it for repeated reuse. + float window_function[kMaxAudioSampleSize]; + CalculatePeriodicHann(input_size, window_function); + + // Apply the window function to our time series input, and pad it with zeroes + // to the next power of two. + float float_input[kMaxAudioSampleSize]; + for (int i = 0; i < kMaxAudioSampleSize; ++i) { + if (i < input_size) { + float_input[i] = + (input[i] * window_function[i]) / static_cast(1 << 15); + } else { + float_input[i] = 0.0f; + } + } + + // Pull the frequency data from the time series sample. + float fourier_values[kMaxAudioSampleSize]; + CalculateDiscreteFourierTransform(float_input, kMaxAudioSampleSize, + fourier_values); + + // We have the complex numbers giving us information about each frequency + // band, but all we want to know is how strong each frequency is, so calculate + // the squared magnitude by adding together the squares of each component. + float power_spectrum[kMaxAudioSampleSize / 2]; + for (int i = 0; i < (kMaxAudioSampleSize / 2); ++i) { + const float real = fourier_values[(i * 2) + 0]; + const float imaginary = fourier_values[(i * 2) + 1]; + power_spectrum[i] = (real * real) + (imaginary * imaginary); + } + + // Finally, reduce the size of the output by averaging together six adjacent + // frequencies into each slot, producing an array of 43 values. + for (int i = 0; i < kFeatureSliceSize; ++i) { + float total = 0.0f; + for (int j = 0; j < kAverageWindowSize; ++j) { + const int index = (i * kAverageWindowSize) + j; + if (index < (kMaxAudioSampleSize / 2)) { + total += power_spectrum[index]; + } + } + const float average = total / kAverageWindowSize; + // Quantize the result into eight bits, effectively multiplying by two. + // The 127.5 constant here has to match the features_max value defined in + // tensorflow/examples/speech_commands/input_data.py, and this also assumes + // that features_min is zero. If it wasn't, we'd have to subtract it first. + int quantized_average = roundf(average * (255.0f / 127.5f)); + if (quantized_average < 0) { + quantized_average = 0; + } + if (quantized_average > 255) { + quantized_average = 255; + } + output[i] = quantized_average; + } + return kTfLiteOk; +} diff --git a/tensorflow/lite/micro/examples/micro_speech/simple_features/simple_features_generator.h b/tensorflow/lite/micro/examples/micro_speech/simple_features/simple_features_generator.h new file mode 100644 index 0000000..7beccea --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/simple_features/simple_features_generator.h @@ -0,0 +1,29 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_SIMPLE_FEATURES_SIMPLE_FEATURES_GENERATOR_H_ +#define TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_SIMPLE_FEATURES_SIMPLE_FEATURES_GENERATOR_H_ + +#include "tensorflow/lite/c/common.h" + +// Converts audio sample data into a more compact form that's appropriate for +// feeding into a neural network. There are reference implementations that use +// both floating point and fixed point available, but because the calculations +// involved can be time-consuming, it's recommended that you use or write +// specialized versions for your platform. +TfLiteStatus GenerateSimpleFeatures(const int16_t* input, int input_size, + int output_size, uint8_t* output); + +#endif // TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_SIMPLE_FEATURES_SIMPLE_FEATURES_GENERATOR_H_ diff --git a/tensorflow/lite/micro/examples/micro_speech/simple_features/simple_features_generator_test.cc b/tensorflow/lite/micro/examples/micro_speech/simple_features/simple_features_generator_test.cc new file mode 100644 index 0000000..f3babd1 --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/simple_features/simple_features_generator_test.cc @@ -0,0 +1,59 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/examples/micro_speech/simple_features/simple_features_generator.h" + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/examples/micro_speech/simple_features/no_power_spectrum_data.h" +#include "tensorflow/lite/micro/examples/micro_speech/simple_features/yes_power_spectrum_data.h" +#include "tensorflow/lite/micro/examples/micro_speech/testdata/no_30ms_audio_data.h" +#include "tensorflow/lite/micro/examples/micro_speech/testdata/yes_30ms_audio_data.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(TestSimpleFeaturesGenerator) { + uint8_t yes_calculated_data[g_yes_power_spectrum_data_size]; + TfLiteStatus yes_status = GenerateSimpleFeatures( + g_yes_30ms_audio_data, g_yes_30ms_audio_data_size, + g_yes_power_spectrum_data_size, yes_calculated_data); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, yes_status); + + for (int i = 0; i < g_yes_power_spectrum_data_size; ++i) { + TF_LITE_MICRO_EXPECT_EQ(g_yes_power_spectrum_data[i], + yes_calculated_data[i]); + if (g_yes_power_spectrum_data[i] != yes_calculated_data[i]) { + MicroPrintf("Expected value %d but found %d", + g_yes_power_spectrum_data[i], yes_calculated_data[i]); + } + } + + uint8_t no_calculated_data[g_yes_power_spectrum_data_size]; + TfLiteStatus no_status = + GenerateSimpleFeatures(g_no_30ms_audio_data, g_no_30ms_audio_data_size, + g_no_power_spectrum_data_size, no_calculated_data); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, no_status); + + for (int i = 0; i < g_no_power_spectrum_data_size; ++i) { + TF_LITE_MICRO_EXPECT_EQ(g_no_power_spectrum_data[i], no_calculated_data[i]); + if (g_no_power_spectrum_data[i] != no_calculated_data[i]) { + MicroPrintf("Expected value %d but found %d", g_no_power_spectrum_data[i], + no_calculated_data[i]); + } + } +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/examples/micro_speech/simple_features/simple_model_settings.cc b/tensorflow/lite/micro/examples/micro_speech/simple_features/simple_model_settings.cc new file mode 100644 index 0000000..e2cf661 --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/simple_features/simple_model_settings.cc @@ -0,0 +1,23 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/examples/micro_speech/simple_features/simple_model_settings.h" + +const char* kCategoryLabels[kCategoryCount] = { + "silence", + "unknown", + "yes", + "no", +}; diff --git a/tensorflow/lite/micro/examples/micro_speech/simple_features/simple_model_settings.h b/tensorflow/lite/micro/examples/micro_speech/simple_features/simple_model_settings.h new file mode 100644 index 0000000..9d129c8 --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/simple_features/simple_model_settings.h @@ -0,0 +1,43 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_SIMPLE_FEATURES_SIMPLE_MODEL_SETTINGS_H_ +#define TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_SIMPLE_FEATURES_SIMPLE_MODEL_SETTINGS_H_ + +// Keeping these as constant expressions allow us to allocate fixed-sized arrays +// on the stack for our working memory. + +// The size of the input time series data we pass to the FFT to produce the +// frequency information. This has to be a power of two, and since we're dealing +// with 30ms of 16KHz inputs, which means 480 samples, this is the next value. +constexpr int kMaxAudioSampleSize = 512; +constexpr int kAudioSampleFrequency = 16000; + +// All of these values are derived from the values used during model training, +// if you change your model you'll need to update these constants. +constexpr int kAverageWindowSize = 6; +constexpr int kFeatureSliceSize = + ((kMaxAudioSampleSize / 2) + (kAverageWindowSize - 1)) / kAverageWindowSize; +constexpr int kFeatureSliceCount = 49; +constexpr int kFeatureElementCount = (kFeatureSliceSize * kFeatureSliceCount); +constexpr int kFeatureSliceStrideMs = 20; +constexpr int kFeatureSliceDurationMs = 30; + +constexpr int kCategoryCount = 4; +constexpr int kSilenceIndex = 0; +constexpr int kUnknownIndex = 1; +extern const char* kCategoryLabels[kCategoryCount]; + +#endif // TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_SIMPLE_FEATURES_SIMPLE_MODEL_SETTINGS_H_ diff --git a/tensorflow/lite/micro/examples/micro_speech/simple_features/yes_power_spectrum_data.cc b/tensorflow/lite/micro/examples/micro_speech/simple_features/yes_power_spectrum_data.cc new file mode 100644 index 0000000..96a7c9a --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/simple_features/yes_power_spectrum_data.cc @@ -0,0 +1,23 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +// See the header for documentation on the meaning of this data. + +#include "tensorflow/lite/micro/examples/micro_speech/simple_features/yes_power_spectrum_data.h" + +const uint8_t g_yes_power_spectrum_data[g_yes_power_spectrum_data_size] = { + 8, 89, 8, 0, 0, 0, 0, 0, 0, 0, 0, 4, 13, 1, 6, 23, 20, 6, 4, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, +}; diff --git a/tensorflow/lite/micro/examples/micro_speech/simple_features/yes_power_spectrum_data.h b/tensorflow/lite/micro/examples/micro_speech/simple_features/yes_power_spectrum_data.h new file mode 100644 index 0000000..5264e62 --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/simple_features/yes_power_spectrum_data.h @@ -0,0 +1,29 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +// This data was extracted from the larger feature data held in +// no_features_data.cc and consists of the 26th spectrogram slice of 43 values. +// This is the expected result of running the sample data in +// yes_30ms_sample_data.cc through the preprocessing pipeline. + +#ifndef TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_SIMPLE_FEATURES_YES_POWER_SPECTRUM_DATA_H_ +#define TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_SIMPLE_FEATURES_YES_POWER_SPECTRUM_DATA_H_ + +#include + +constexpr int g_yes_power_spectrum_data_size = 43; +extern const uint8_t g_yes_power_spectrum_data[]; + +#endif // TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_SIMPLE_FEATURES_YES_POWER_SPECTRUM_DATA_H_ diff --git a/tensorflow/lite/micro/examples/micro_speech/simple_features/yes_simple_features_data.cc b/tensorflow/lite/micro/examples/micro_speech/simple_features/yes_simple_features_data.cc new file mode 100644 index 0000000..078f78d --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/simple_features/yes_simple_features_data.cc @@ -0,0 +1,158 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/examples/micro_speech/simple_features/yes_simple_features_data.h" + +/* File automatically created by + * tensorflow/examples/speech_commands/wav_to_features.py \ + * --sample_rate=16000 \ + * --clip_duration_ms=1000 \ + * --window_size_ms=30 \ + * --window_stride_ms=20 \ + * --feature_bin_count=40 \ + * --quantize=1 \ + * --preprocess="average" \ + * --input_wav="speech_commands_test_set_v0.02/yes/f2e59fea_nohash_1.wav" \ + * --output_c_file="yes_simple_features_data.cc" \ + */ + +const int g_yes_simple_f2e59fea_nohash_1_width = 43; +const int g_yes_simple_f2e59fea_nohash_1_height = 49; +const unsigned char g_yes_simple_f2e59fea_nohash_1_data[] = { + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 4, 5, 1, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 19, 1, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 3, 3, 1, 1, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 8, 89, 8, 0, 0, 0, 0, 0, 0, 0, 0, 4, 13, + 1, 6, 23, 20, 6, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 19, 177, 42, 1, + 1, 0, 0, 0, 0, 2, 3, 119, 51, 5, 139, 92, 58, 58, 15, 2, 1, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 13, 165, 176, 3, 1, 1, 0, 0, 1, 1, 32, 214, + 26, 19, 113, 103, 28, 22, 27, 3, 1, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 55, 128, + 27, 1, 1, 0, 1, 4, 2, 52, 93, 10, 28, 156, 10, 21, 21, 3, 3, + 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 14, 99, 32, 65, 7, 1, 2, 2, 6, 13, 121, + 36, 15, 11, 112, 125, 14, 5, 13, 4, 4, 2, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 24, 25, + 32, 5, 1, 0, 0, 0, 1, 0, 7, 5, 1, 1, 3, 3, 0, 3, 3, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 13, 13, 5, 1, 0, 0, 0, 0, 0, 3, + 4, 1, 0, 1, 2, 3, 1, 1, 1, 4, 8, 1, 2, 1, 3, 1, 1, + 0, 1, 1, 3, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, + 8, 2, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 2, 0, 2, + 1, 0, 2, 0, 2, 2, 3, 1, 1, 0, 1, 1, 4, 5, 1, 0, 1, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 2, 1, 0, 1, 3, 1, + 1, 3, 1, 1, 6, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 2, 6, 2, 4, 2, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, + 0, 0, 1, 2, 1, 1, 2, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 2, 1, 0, 0, 2, 3, 5, 2, 0, + 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 1, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 2, 3, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, +}; diff --git a/tensorflow/lite/micro/examples/micro_speech/simple_features/yes_simple_features_data.h b/tensorflow/lite/micro/examples/micro_speech/simple_features/yes_simple_features_data.h new file mode 100644 index 0000000..98c7e42 --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/simple_features/yes_simple_features_data.h @@ -0,0 +1,23 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_SIMPLE_FEATURES_YES_SIMPLE_FEATURES_DATA_H_ +#define TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_SIMPLE_FEATURES_YES_SIMPLE_FEATURES_DATA_H_ + +extern const int g_yes_simple_f2e59fea_nohash_1_width; +extern const int g_yes_simple_f2e59fea_nohash_1_height; +extern const unsigned char g_yes_simple_f2e59fea_nohash_1_data[]; + +#endif // TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_SIMPLE_FEATURES_YES_SIMPLE_FEATURES_DATA_H_ diff --git a/tensorflow/lite/micro/examples/micro_speech/testdata/no_1000ms.wav b/tensorflow/lite/micro/examples/micro_speech/testdata/no_1000ms.wav new file mode 100644 index 0000000..47a543a Binary files /dev/null and b/tensorflow/lite/micro/examples/micro_speech/testdata/no_1000ms.wav differ diff --git a/tensorflow/lite/micro/examples/micro_speech/testdata/no_30ms.wav b/tensorflow/lite/micro/examples/micro_speech/testdata/no_30ms.wav new file mode 100644 index 0000000..0508fae Binary files /dev/null and b/tensorflow/lite/micro/examples/micro_speech/testdata/no_30ms.wav differ diff --git a/tensorflow/lite/micro/examples/micro_speech/testdata/yes_1000ms.wav b/tensorflow/lite/micro/examples/micro_speech/testdata/yes_1000ms.wav new file mode 100644 index 0000000..8cc644d Binary files /dev/null and b/tensorflow/lite/micro/examples/micro_speech/testdata/yes_1000ms.wav differ diff --git a/tensorflow/lite/micro/examples/micro_speech/testdata/yes_30ms.wav b/tensorflow/lite/micro/examples/micro_speech/testdata/yes_30ms.wav new file mode 100644 index 0000000..b9a7d78 Binary files /dev/null and b/tensorflow/lite/micro/examples/micro_speech/testdata/yes_30ms.wav differ diff --git a/tensorflow/lite/micro/examples/micro_speech/train/README.md b/tensorflow/lite/micro/examples/micro_speech/train/README.md new file mode 100644 index 0000000..18a6846 --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/train/README.md @@ -0,0 +1,207 @@ + +# Micro Speech Training + +This example shows how to train a 20 kB model that can recognize 2 keywords, +"yes" and "no", from speech data. + +If the input does not belong to either categories, it is classified as "unknown" +and if the input is silent, it is classified as "silence". + +You can retrain it to recognize any combination of words (2 or more) from this +list: + +``` +yes +no +up +down +left +right +on +off +stop +go +``` + +The scripts used in training the model have been sourced from the +[Simple Audio Recognition](https://www.tensorflow.org/tutorials/audio/simple_audio) +tutorial. + +## Table of contents + +- [Overview](#overview) +- [Training](#training) +- [Trained Models](#trained-models) +- [Model Architecture](#model-architecture) +- [Dataset](#dataset) +- [Preprocessing Speech Input](#preprocessing-speech-input) +- [Other Training Methods](#other-training-methods) + +## Overview + +1. Dataset: Speech Commands, Version 2. + ([Download Link](https://storage.cloud.google.com/download.tensorflow.org/data/speech_commands_v0.02.tar.gz), + [Paper](https://arxiv.org/abs/1804.03209)) +2. Dataset Type: **Speech** +3. Deep Learning Framework: **TensorFlow 1.5** +4. Language: **Python 3.7** +5. Model Size: **<20 kB** +6. Model Category: **Multiclass Classification** + +## Training + +Train the model in the cloud using Google Colaboratory or locally using a +Jupyter Notebook. + + + + +
+ Google Colaboratory + + Jupyter Notebook +
+ +*Estimated Training Time: ~2 Hours.* + +For more options, refer to the [Other Training Methods](#other-training-methods) +section. + +## Trained Models + +| Download Link | [speech_commands.zip](https://storage.googleapis.com/download.tensorflow.org/models/tflite/micro/micro_speech_2020_04_13.zip) | +| ------------- |-------------| + +The `models` directory in the above zip file can be generated by following the +instructions in the [Training](#training) section above. It +includes the following 3 model files: + +| Name | Format | Target Framework | Target Device | +| :------------- | :----------- | :--------------- | :------------------------ | +| `model.pb` | Frozen | TensorFlow | Large-Scale/Cloud/Servers | +: : GraphDef : : : +| `model.tflite` | Fully | TensorFlow Lite | Mobile Devices | +: *(<20 kB)* : Quantized* : : : +: : TFLite Model : : : +| `model.cc` | C Source | TensorFlow Lite | Microcontrollers | +: : File : for : : +: : : Microcontrollers : : + +**Fully quantized implies that the model is **strictly int8** quantized +**including** the input(s) and output(s).* + + +## Model Architecture + +This is a simple model comprising of a Convolutional 2D layer, a Fully Connected +Layer or a MatMul Layer (output: logits) and a Softmax layer +(output: probabilities) as shown below. Refer to the [`tiny_conv`](https://github.com/tensorflow/tflite-micro/blob/main/tensorflow/examples/speech_commands/models.py#L673) +model architecture. + +![model_architecture.png](../images/model_architecture.png) + +*This image was derived from visualizing the 'model.tflite' file in +[Netron](https://github.com/lutzroeder/netron)* + +This doesn't produce a highly accurate model, but it's designed to be used as +the first stage of a pipeline, running on a low-energy piece of hardware that +can always be on, and then wake higher-power chips when a possible utterance has +been found, so that more accurate analysis can be done. Additionally, the model +takes in preprocessed speech input as a result of which we can leverage a +simpler model for accurate results. + +## Dataset + +The Speech Commands Dataset. ([Download Link](https://storage.cloud.google.com/download.tensorflow.org/data/speech_commands_v0.02.tar.gz), +[Paper](https://arxiv.org/abs/1804.03209)) consists of over 105,000 WAVE audio +files of people saying thirty different words. This data was collected by +Google and released under a CC BY license. You can help improve it by +contributing five minutes of your own voice. The archive is over 2GB, so this +part may take a while, but you should see progress logs, and once it's been +downloaded you won't need to do this again. + +## Preprocessing Speech Input + +In this section we discuss spectrograms, the preprocessed speech input to the +model. Here's an illustration of the process: + +![spectrogram diagram](https://storage.googleapis.com/download.tensorflow.org/example_images/spectrogram_diagram.png) + +The model doesn't take in raw audio sample data, instead it works with +spectrograms which are two dimensional arrays that are made up of slices of +frequency information, each taken from a different time window. + +The recipe for creating the spectrogram data is that each frequency slice is +created by running an FFT across a 30ms section of the audio sample data. The +input samples are treated as being between -1 and +1 as real values (encoded as +-32,768 and 32,767 in 16-bit signed integer samples). + +This results in an FFT with 256 entries. Every sequence of six entries is +averaged together, giving a total of 43 frequency buckets in the final slice. +The results are stored as unsigned eight-bit values, where 0 represents a real +number of zero, and 255 represents 127.5 as a real number. + +Each adjacent frequency entry is stored in ascending memory order (frequency +bucket 0 at data[0], bucket 1 at data[1], etc). The window for the frequency +analysis is then moved forward by 20ms, and the process repeated, storing the +results in the next memory row (for example bucket 0 in this moved window would +be in data[43 + 0], etc). This process happens 49 times in total, producing a +single channel image that is 43 pixels wide, and 49 rows high. + +In a complete application these spectrograms would be calculated at runtime from +microphone inputs, but the code for doing that is not yet included in this +sample code. The test uses spectrograms that have been pre-calculated from +one-second WAV files in the test dataset generated by running the following +commands: + +``` +python tensorflow/tensorflow/examples/speech_commands/wav_to_features.py \ +--input_wav=/tmp/speech_dataset/yes/f2e59fea_nohash_1.wav \ +--output_c_file=/tmp/yes_features_data.cc \ +--window_stride=20 --preprocess=average --quantize=1 + +python tensorflow/tensorflow/examples/speech_commands/wav_to_features.py \ +--input_wav=/tmp/speech_dataset/no/f9643d42_nohash_4.wav \ +--output_c_file=/tmp/no_features_data.cc \ +--window_stride=20 --preprocess=average --quantize=1 +``` + + +## Other Training Methods + +### Use [Google Cloud](https://cloud.google.com/). + +*Note: Google Cloud isn't free. You need to pay depending on how long you use +run the VM and what resources you use.* + +1. Create a Virtual Machine (VM) using a pre-configured Deep Learning VM Image. + +``` +export IMAGE_FAMILY="tf-latest-cpu" +export ZONE="us-west1-b" # Or any other required region +export INSTANCE_NAME="model-trainer" +export INSTANCE_TYPE="n1-standard-8" # or any other instance type +gcloud compute instances create $INSTANCE_NAME \ + --zone=$ZONE \ + --image-family=$IMAGE_FAMILY \ + --image-project=deeplearning-platform-release \ + --machine-type=$INSTANCE_TYPE \ + --boot-disk-size=120GB \ + --min-cpu-platform=Intel\ Skylake +``` + +2. As soon as instance has been created you can SSH to it: + +``` +gcloud compute ssh "jupyter@${INSTANCE_NAME}" +``` + +3. Train a model by following the instructions in the [`train_micro_speech_model.ipynb`](train_micro_speech_model.ipynb) +jupyter notebook. + +4. Finally, don't forget to remove the instance when training is done: + +``` +gcloud compute instances delete "${INSTANCE_NAME}" --zone="${ZONE}" +``` diff --git a/tensorflow/lite/micro/examples/micro_speech/train/train_micro_speech_model.ipynb b/tensorflow/lite/micro/examples/micro_speech/train/train_micro_speech_model.ipynb new file mode 100644 index 0000000..4408b1a --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/train/train_micro_speech_model.ipynb @@ -0,0 +1,610 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "pO4-CY_TCZZS" + }, + "source": [ + "# Train a Simple Audio Recognition Model" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "BaFfr7DHRmGF" + }, + "source": [ + "This notebook demonstrates how to train a 20 kB [Simple Audio Recognition](https://www.tensorflow.org/tutorials/sequences/audio_recognition) model to recognize keywords in speech.\n", + "\n", + "The model created in this notebook is used in the [micro_speech](https://github.com/tensorflow/tflite-micro/blob/main/tensorflow/lite/micro/examples/micro_speech) example for [TensorFlow Lite for MicroControllers](https://www.tensorflow.org/lite/microcontrollers/overview).\n", + "\n", + "\n", + " \n", + " \n", + "
\n", + " Run in Google Colab\n", + " \n", + " View source on GitHub\n", + "
\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "XaVtYN4nlCft" + }, + "source": [ + "**Training is much faster using GPU acceleration.** Before you proceed, ensure you are using a GPU runtime by going to **Runtime -> Change runtime type** and set **Hardware accelerator: GPU**. Training 15,000 iterations will take 1.5 - 2 hours on a GPU runtime.\n", + "\n", + "## Configure Defaults\n", + "\n", + "**MODIFY** the following constants for your specific use case." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "ludfxbNIaegy" + }, + "outputs": [], + "source": [ + "# A comma-delimited list of the words you want to train for.\n", + "# The options are: yes,no,up,down,left,right,on,off,stop,go\n", + "# All the other words will be used to train an \"unknown\" label and silent\n", + "# audio data with no spoken words will be used to train a \"silence\" label.\n", + "WANTED_WORDS = \"yes,no\"\n", + "\n", + "# The number of steps and learning rates can be specified as comma-separated\n", + "# lists to define the rate at each stage. For example,\n", + "# TRAINING_STEPS=12000,3000 and LEARNING_RATE=0.001,0.0001\n", + "# will run 12,000 training loops in total, with a rate of 0.001 for the first\n", + "# 8,000, and 0.0001 for the final 3,000.\n", + "TRAINING_STEPS = \"12000,3000\"\n", + "LEARNING_RATE = \"0.001,0.0001\"\n", + "\n", + "# Calculate the total number of steps, which is used to identify the checkpoint\n", + "# file name.\n", + "TOTAL_STEPS = str(sum(map(lambda string: int(string), TRAINING_STEPS.split(\",\"))))\n", + "\n", + "# Print the configuration to confirm it\n", + "print(\"Training these words: %s\" % WANTED_WORDS)\n", + "print(\"Training steps in each stage: %s\" % TRAINING_STEPS)\n", + "print(\"Learning rate in each stage: %s\" % LEARNING_RATE)\n", + "print(\"Total number of training steps: %s\" % TOTAL_STEPS)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "gCgeOpvY9pAi" + }, + "source": [ + "**DO NOT MODIFY** the following constants as they include filepaths used in this notebook and data that is shared during training and inference." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "Nd1iM1o2ymvA" + }, + "outputs": [], + "source": [ + "# Calculate the percentage of 'silence' and 'unknown' training samples required\n", + "# to ensure that we have equal number of samples for each label.\n", + "number_of_labels = WANTED_WORDS.count(',') + 1\n", + "number_of_total_labels = number_of_labels + 2 # for 'silence' and 'unknown' label\n", + "equal_percentage_of_training_samples = int(100.0/(number_of_total_labels))\n", + "SILENT_PERCENTAGE = equal_percentage_of_training_samples\n", + "UNKNOWN_PERCENTAGE = equal_percentage_of_training_samples\n", + "\n", + "# Constants which are shared during training and inference\n", + "PREPROCESS = 'micro'\n", + "WINDOW_STRIDE = 20\n", + "MODEL_ARCHITECTURE = 'tiny_conv' # Other options include: single_fc, conv,\n", + " # low_latency_conv, low_latency_svdf, tiny_embedding_conv\n", + "\n", + "# Constants used during training only\n", + "VERBOSITY = 'WARN'\n", + "EVAL_STEP_INTERVAL = '1000'\n", + "SAVE_STEP_INTERVAL = '1000'\n", + "\n", + "# Constants for training directories and filepaths\n", + "DATASET_DIR = 'dataset/'\n", + "LOGS_DIR = 'logs/'\n", + "TRAIN_DIR = 'train/' # for training checkpoints and other files.\n", + "\n", + "# Constants for inference directories and filepaths\n", + "import os\n", + "MODELS_DIR = 'models'\n", + "if not os.path.exists(MODELS_DIR):\n", + " os.mkdir(MODELS_DIR)\n", + "MODEL_TF = os.path.join(MODELS_DIR, 'model.pb')\n", + "MODEL_TFLITE = os.path.join(MODELS_DIR, 'model.tflite')\n", + "FLOAT_MODEL_TFLITE = os.path.join(MODELS_DIR, 'float_model.tflite')\n", + "MODEL_TFLITE_MICRO = os.path.join(MODELS_DIR, 'model.cc')\n", + "SAVED_MODEL = os.path.join(MODELS_DIR, 'saved_model')\n", + "\n", + "QUANT_INPUT_MIN = 0.0\n", + "QUANT_INPUT_MAX = 26.0\n", + "QUANT_INPUT_RANGE = QUANT_INPUT_MAX - QUANT_INPUT_MIN" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "6rLYpvtg9P4o" + }, + "source": [ + "## Setup Environment\n", + "\n", + "Install Dependencies" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "ed_XpUrU5DvY" + }, + "outputs": [], + "source": [ + "%tensorflow_version 1.x\n", + "import tensorflow as tf" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "T9Ty5mR58E4i" + }, + "source": [ + "**DELETE** any old data from previous runs\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "APGx0fEh7hFF" + }, + "outputs": [], + "source": [ + "!rm -rf {DATASET_DIR} {LOGS_DIR} {TRAIN_DIR} {MODELS_DIR}" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "GfEUlfFBizio" + }, + "source": [ + "Clone the TensorFlow Github Repository, which contains the relevant code required to run this tutorial." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "yZArmzT85SLq" + }, + "outputs": [], + "source": [ + "!git clone -q --depth 1 https://github.com/tensorflow/tensorflow" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "nS9swHLSi7Bi" + }, + "source": [ + "Load TensorBoard to visualize the accuracy and loss as training proceeds.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "q4qF1VxP3UE4" + }, + "outputs": [], + "source": [ + "%load_ext tensorboard\n", + "%tensorboard --logdir {LOGS_DIR}" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "x1J96Ron-O4R" + }, + "source": [ + "## Training\n", + "\n", + "The following script downloads the dataset and begin training." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "VJsEZx6lynbY" + }, + "outputs": [], + "source": [ + "!python tensorflow/tensorflow/examples/speech_commands/train.py \\\n", + "--data_dir={DATASET_DIR} \\\n", + "--wanted_words={WANTED_WORDS} \\\n", + "--silence_percentage={SILENT_PERCENTAGE} \\\n", + "--unknown_percentage={UNKNOWN_PERCENTAGE} \\\n", + "--preprocess={PREPROCESS} \\\n", + "--window_stride={WINDOW_STRIDE} \\\n", + "--model_architecture={MODEL_ARCHITECTURE} \\\n", + "--how_many_training_steps={TRAINING_STEPS} \\\n", + "--learning_rate={LEARNING_RATE} \\\n", + "--train_dir={TRAIN_DIR} \\\n", + "--summaries_dir={LOGS_DIR} \\\n", + "--verbosity={VERBOSITY} \\\n", + "--eval_step_interval={EVAL_STEP_INTERVAL} \\\n", + "--save_step_interval={SAVE_STEP_INTERVAL}" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "UczQKtqLi7OJ" + }, + "source": [ + "# Skipping the training\n", + "\n", + "If you don't want to spend an hour or two training the model from scratch, you can download pretrained checkpoints by uncommenting the lines below (removing the '#'s at the start of each line) and running them." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "RZw3VNlnla-J" + }, + "outputs": [], + "source": [ + "#!curl -O \"https://storage.googleapis.com/download.tensorflow.org/models/tflite/speech_micro_train_2020_05_10.tgz\"\n", + "#!tar xzf speech_micro_train_2020_05_10.tgz" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "XQUJLrdS-ftl" + }, + "source": [ + "## Generate a TensorFlow Model for Inference\n", + "\n", + "Combine relevant training results (graph, weights, etc) into a single file for inference. This process is known as freezing a model and the resulting model is known as a frozen model/graph, as it cannot be further re-trained after this process." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "xyc3_eLh9sAg" + }, + "outputs": [], + "source": [ + "!rm -rf {SAVED_MODEL}\n", + "!python tensorflow/tensorflow/examples/speech_commands/freeze.py \\\n", + "--wanted_words=$WANTED_WORDS \\\n", + "--window_stride_ms=$WINDOW_STRIDE \\\n", + "--preprocess=$PREPROCESS \\\n", + "--model_architecture=$MODEL_ARCHITECTURE \\\n", + "--start_checkpoint=$TRAIN_DIR$MODEL_ARCHITECTURE'.ckpt-'{TOTAL_STEPS} \\\n", + "--save_format=saved_model \\\n", + "--output_file={SAVED_MODEL}" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "_DBGDxVI-nKG" + }, + "source": [ + "## Generate a TensorFlow Lite Model\n", + "\n", + "Convert the frozen graph into a TensorFlow Lite model, which is fully quantized for use with embedded devices.\n", + "\n", + "The following cell will also print the model size, which will be under 20 kilobytes." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "RIitkqvGWmre" + }, + "outputs": [], + "source": [ + "import sys\n", + "# We add this path so we can import the speech processing modules.\n", + "sys.path.append(\"/content/tensorflow/tensorflow/examples/speech_commands/\")\n", + "import input_data\n", + "import models\n", + "import numpy as np" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "kzqECqMxgBh4" + }, + "outputs": [], + "source": [ + "SAMPLE_RATE = 16000\n", + "CLIP_DURATION_MS = 1000\n", + "WINDOW_SIZE_MS = 30.0\n", + "FEATURE_BIN_COUNT = 40\n", + "BACKGROUND_FREQUENCY = 0.8\n", + "BACKGROUND_VOLUME_RANGE = 0.1\n", + "TIME_SHIFT_MS = 100.0\n", + "\n", + "DATA_URL = 'https://storage.googleapis.com/download.tensorflow.org/data/speech_commands_v0.02.tar.gz'\n", + "VALIDATION_PERCENTAGE = 10\n", + "TESTING_PERCENTAGE = 10" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "rNQdAplJV1fz" + }, + "outputs": [], + "source": [ + "model_settings = models.prepare_model_settings(\n", + " len(input_data.prepare_words_list(WANTED_WORDS.split(','))),\n", + " SAMPLE_RATE, CLIP_DURATION_MS, WINDOW_SIZE_MS,\n", + " WINDOW_STRIDE, FEATURE_BIN_COUNT, PREPROCESS)\n", + "audio_processor = input_data.AudioProcessor(\n", + " DATA_URL, DATASET_DIR,\n", + " SILENT_PERCENTAGE, UNKNOWN_PERCENTAGE,\n", + " WANTED_WORDS.split(','), VALIDATION_PERCENTAGE,\n", + " TESTING_PERCENTAGE, model_settings, LOGS_DIR)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "lBj_AyCh1cC0" + }, + "outputs": [], + "source": [ + "with tf.Session() as sess:\n", + " float_converter = tf.lite.TFLiteConverter.from_saved_model(SAVED_MODEL)\n", + " float_tflite_model = float_converter.convert()\n", + " float_tflite_model_size = open(FLOAT_MODEL_TFLITE, \"wb\").write(float_tflite_model)\n", + " print(\"Float model is %d bytes\" % float_tflite_model_size)\n", + "\n", + " converter = tf.lite.TFLiteConverter.from_saved_model(SAVED_MODEL)\n", + " converter.optimizations = [tf.lite.Optimize.DEFAULT]\n", + " converter.inference_input_type = tf.lite.constants.INT8\n", + " converter.inference_output_type = tf.lite.constants.INT8\n", + " def representative_dataset_gen():\n", + " for i in range(100):\n", + " data, _ = audio_processor.get_data(1, i*1, model_settings,\n", + " BACKGROUND_FREQUENCY, \n", + " BACKGROUND_VOLUME_RANGE,\n", + " TIME_SHIFT_MS,\n", + " 'testing',\n", + " sess)\n", + " flattened_data = np.array(data.flatten(), dtype=np.float32).reshape(1, 1960)\n", + " yield [flattened_data]\n", + " converter.representative_dataset = representative_dataset_gen\n", + " tflite_model = converter.convert()\n", + " tflite_model_size = open(MODEL_TFLITE, \"wb\").write(tflite_model)\n", + " print(\"Quantized model is %d bytes\" % tflite_model_size)\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "EeLiDZTbLkzv" + }, + "source": [ + "## Testing the TensorFlow Lite model's accuracy\n", + "\n", + "Verify that the model we've exported is still accurate, using the TF Lite Python API and our test set." + ] + }, + { + "cell_type": "code", + "execution_count": 110, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "wQsEteKRLryJ" + }, + "outputs": [], + "source": [ + "# Helper function to run inference\n", + "def run_tflite_inference(tflite_model_path, model_type=\"Float\"):\n", + " # Load test data\n", + " np.random.seed(0) # set random seed for reproducible test results.\n", + " with tf.Session() as sess:\n", + " test_data, test_labels = audio_processor.get_data(\n", + " -1, 0, model_settings, BACKGROUND_FREQUENCY, BACKGROUND_VOLUME_RANGE,\n", + " TIME_SHIFT_MS, 'testing', sess)\n", + " test_data = np.expand_dims(test_data, axis=1).astype(np.float32)\n", + "\n", + " # Initialize the interpreter\n", + " interpreter = tf.lite.Interpreter(tflite_model_path,\n", + " experimental_op_resolver_type=tf.lite.experimental.OpResolverType.BUILTIN_REF)\n", + " interpreter.allocate_tensors()\n", + "\n", + " input_details = interpreter.get_input_details()[0]\n", + " output_details = interpreter.get_output_details()[0]\n", + "\n", + " # For quantized models, manually quantize the input data from float to integer\n", + " if model_type == \"Quantized\":\n", + " input_scale, input_zero_point = input_details[\"quantization\"]\n", + " test_data = test_data / input_scale + input_zero_point\n", + " test_data = test_data.astype(input_details[\"dtype\"])\n", + "\n", + " correct_predictions = 0\n", + " for i in range(len(test_data)):\n", + " interpreter.set_tensor(input_details[\"index\"], test_data[i])\n", + " interpreter.invoke()\n", + " output = interpreter.get_tensor(output_details[\"index\"])[0]\n", + " top_prediction = output.argmax()\n", + " correct_predictions += (top_prediction == test_labels[i])\n", + "\n", + " print('%s model accuracy is %f%% (Number of test samples=%d)' % (\n", + " model_type, (correct_predictions * 100) / len(test_data), len(test_data)))" + ] + }, + { + "cell_type": "code", + "execution_count": 111, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "l-pD52Na6jRa" + }, + "outputs": [], + "source": [ + "# Compute float model accuracy\n", + "run_tflite_inference(FLOAT_MODEL_TFLITE)\n", + "\n", + "# Compute quantized model accuracy\n", + "run_tflite_inference(MODEL_TFLITE, model_type='Quantized')" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "dt6Zqbxu-wIi" + }, + "source": [ + "## Generate a TensorFlow Lite for MicroControllers Model\n", + "Convert the TensorFlow Lite model into a C source file that can be loaded by TensorFlow Lite for Microcontrollers." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "XohZOTjR8ZyE" + }, + "outputs": [], + "source": [ + "# Install xxd if it is not available\n", + "!apt-get update && apt-get -qq install xxd\n", + "# Convert to a C source file\n", + "!xxd -i {MODEL_TFLITE} > {MODEL_TFLITE_MICRO}\n", + "# Update variable names\n", + "REPLACE_TEXT = MODEL_TFLITE.replace('/', '_').replace('.', '_')\n", + "!sed -i 's/'{REPLACE_TEXT}'/g_model/g' {MODEL_TFLITE_MICRO}" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "2pQnN0i_-0L2" + }, + "source": [ + "## Deploy to a Microcontroller\n", + "\n", + "Follow the instructions in the [micro_speech](https://github.com/tensorflow/tflite-micro/blob/main/tensorflow/lite/micro/examples/micro_speech) README.md for [TensorFlow Lite for MicroControllers](https://www.tensorflow.org/lite/microcontrollers/overview) to deploy this model on a specific microcontroller.\n", + "\n", + "**Reference Model:** If you have not modified this notebook, you can follow the instructions as is, to deploy the model. Refer to the [`micro_speech/train/models`](https://github.com/tensorflow/tflite-micro/blob/main/tensorflow/lite/micro/examples/micro_speech/train/models) directory to access the models generated in this notebook.\n", + "\n", + "**New Model:** If you have generated a new model to identify different words: (i) Update `kCategoryCount` and `kCategoryLabels` in [`micro_speech/micro_features/micro_model_settings.h`](https://github.com/tensorflow/tflite-micro/blob/main/tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.h) and (ii) Update the values assigned to the variables defined in [`micro_speech/micro_features/model.cc`](https://github.com/tensorflow/tflite-micro/blob/main/tensorflow/lite/micro/examples/micro_speech/micro_features/model.cc) with values displayed after running the following cell." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "eoYyh0VU8pca" + }, + "outputs": [], + "source": [ + "# Print the C source file\n", + "!cat {MODEL_TFLITE_MICRO}" + ] + } + ], + "metadata": { + "accelerator": "GPU", + "colab": { + "collapsed_sections": [], + "name": "train_micro_speech_model.ipynb", + "provenance": [], + "toc_visible": true + }, + "kernelspec": { + "display_name": "Python 3.9.13 ('venv': venv)", + "language": "python", + "name": "python3" + }, + "language_info": { + "name": "python", + "version": "3.9.13" + }, + "vscode": { + "interpreter": { + "hash": "22cb1d09959a40fdc50ccd77b5464bb60602aea13b58d7f13d7eaffcd0bc7c7d" + } + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/tensorflow/lite/micro/examples/micro_speech/train_speech_model.ipynb b/tensorflow/lite/micro/examples/micro_speech/train_speech_model.ipynb new file mode 100644 index 0000000..7decd27 --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/train_speech_model.ipynb @@ -0,0 +1 @@ +{"nbformat":4,"nbformat_minor":0,"metadata":{"colab":{"name":"Redirect","provenance":[],"collapsed_sections":[],"authorship_tag":"ABX9TyO1u6oks1qPVEQNnHFD3Cyo"},"kernelspec":{"name":"python3","display_name":"Python 3"}},"cells":[{"cell_type":"markdown","metadata":{"id":"86C-FMxpdZxv","colab_type":"text"},"source":["This Colab notebook has been moved to [https://github.com/tensorflow/tflite-micro/blob/main/tensorflow/lite/micro/examples/micro_speech/train/train_micro_speech_model.ipynb](https://github.com/tensorflow/tflite-micro/blob/main/tensorflow/lite/micro/examples/micro_speech/train/train_micro_speech_model.ipynb)\n"]}]} \ No newline at end of file diff --git a/tensorflow/lite/micro/examples/mnist_lstm/BUILD b/tensorflow/lite/micro/examples/mnist_lstm/BUILD new file mode 100644 index 0000000..6df2eef --- /dev/null +++ b/tensorflow/lite/micro/examples/mnist_lstm/BUILD @@ -0,0 +1,48 @@ +load("@tflm_pip_deps//:requirements.bzl", "requirement") + +py_binary( + name = "train", + srcs = ["train.py"], + srcs_version = "PY3", + deps = [ + requirement("numpy"), + requirement("tensorflow-cpu"), + ], +) + +py_binary( + name = "evaluate", + srcs = ["evaluate.py"], + srcs_version = "PY3", + deps = [ + "//python/tflite_micro:runtime", + "@absl_py//absl:app", + ], +) + +filegroup( + name = "sample_images", + srcs = glob(["samples/*.png"]), +) + +py_test( + name = "evaluate_test", + srcs = ["evaluate_test.py"], + data = [ + "trained_lstm.tflite", + "trained_lstm_int8.tflite", + ":sample_images", + ], + main = "evaluate_test.py", + python_version = "PY3", + tags = [ + "noasan", + "nomsan", # Python doesn't like these symbols + "noubsan", + ], + deps = [ + ":evaluate", + ":train", + "//tensorflow/lite/micro/tools:requantize_flatbuffer", + ], +) diff --git a/tensorflow/lite/micro/examples/mnist_lstm/evaluate.py b/tensorflow/lite/micro/examples/mnist_lstm/evaluate.py new file mode 100644 index 0000000..77c688f --- /dev/null +++ b/tensorflow/lite/micro/examples/mnist_lstm/evaluate.py @@ -0,0 +1,169 @@ +# Copyright 2022 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================= +"""LSTM model evaluation for MNIST recognition + +Run: +bazel build tensorflow/lite/micro/examples/mnist_lstm:evaluate +bazel-bin/tensorflow/lite/micro/examples/mnist_lstm/evaluate +--model_path=".tflite file path" --img_path="MNIST image path" + +""" +import os + +from absl import app +from absl import flags +from absl import logging +import numpy as np +from PIL import Image + +from tflite_micro.python.tflite_micro import runtime + +FLAGS = flags.FLAGS + +flags.DEFINE_string("model_path", "/tmp/lstm_trained_model/lstm.tflite", + "the trained model path.") +flags.DEFINE_string("img_path", "/tmp/samples/sample0.jpg", + "path for the image to be predicted.") + + +def read_img(img_path): + """Read MNIST image + + Args: + img_path (str): path to a MNIST image + + Returns: + np.array : image in the correct np.array format + """ + image = Image.open(img_path) + data = np.asarray(image, dtype=np.float32) + if data.shape not in [(28, 28), (28, 28, 1)]: + raise ValueError( + "Invalid input image shape (MNIST image should have shape 28*28 or 28*28*1)" + ) + # Normalize the image if necessary + if data.max() > 1: + data = data / 255.0 + # Model inference requires batch size one + data = data.reshape((1, 28, 28)) + return data + + +def quantize_input_data(data, input_details): + """quantize the input data using scale and zero point + + Args: + data (np.array in float): input data for the interpreter + input_details : output of get_input_details from the tflm interpreter. + """ + # Get input quantization parameters + data_type = input_details["dtype"] + input_quantization_parameters = input_details["quantization_parameters"] + input_scale, input_zero_point = input_quantization_parameters["scales"][ + 0], input_quantization_parameters["zero_points"][0] + # quantize the input data + data = data / input_scale + input_zero_point + return data.astype(data_type) + + +def dequantize_output_data(data, output_details): + """Dequantize the data + + Args: + data (int8 or int16): integer data that need to be dequantized + output_details : output of get_output_details from the tflm interpreter. + """ + output_quantization_parameters = output_details["quantization_parameters"] + output_scale, output_zero_point = output_quantization_parameters["scales"][ + 0], output_quantization_parameters["zero_points"][0] + # Caveat: tflm_output_quant need to be converted to float to avoid integer overflow during dequantization + # e.g., (tflm_output_quant -output_zero_point) and (tflm_output_quant + (-output_zero_point)) + # can produce different results (int8 calculation) + return output_scale * (data.astype("float") - output_zero_point) + + +def tflm_predict(tflm_interpreter, data): + """Predict using the tflm interpreter + + Args: + tflm_interpreter (Interpreter): TFLM interpreter + data (np.array): data that need to be predicted + + Returns: + prediction (np.array): predicted results from the model using TFLM interpreter + """ + tflm_interpreter.set_input(data, 0) + tflm_interpreter.invoke() + return tflm_interpreter.get_output(0) + + +def predict(interpreter, data): + """Use TFLM interpreter to predict a MNIST image + + Args: + interpreter (runtime.Interpreter): the TFLM python interpreter + data (np.array): data to be predicted + + Returns: + np.array : predicted probability (integer version if quantized) for each class (digit 0-9) + """ + + input_details = interpreter.get_input_details(0) + # Quantize the input if the model is quantized + if input_details["dtype"] != np.float32: + data = quantize_input_data(data, input_details) + interpreter.set_input(data, 0) + interpreter.invoke() + tflm_output = interpreter.get_output(0) + + # LSTM is stateful, reset the state after the usage since each image is independent + interpreter.reset() + output_details = interpreter.get_output_details(0) + if output_details["dtype"] == np.float32: + return tflm_output[0].astype("float") + # Dequantize the output for quantized model + return dequantize_output_data(tflm_output[0], output_details) + + +def predict_image(interpreter, image_path): + """Use TFLM interpreter to predict a MNIST image + + Args: + interpreter (runtime.Interpreter): the TFLM python interpreter + image_path (str): path for the image that need to be tested + + Returns: + np.array : predicted probability (integer version if quantized) for each class (digit 0-9) + """ + data = read_img(image_path) + return predict(interpreter, data) + + +def main(_): + if not os.path.exists(FLAGS.model_path): + raise ValueError( + "Model file does not exist. Please check the .tflite model path.") + if not os.path.exists(FLAGS.img_path): + raise ValueError("Image file does not exist. Please check the image path.") + + tflm_interpreter = runtime.Interpreter.from_file(FLAGS.model_path) + category_probabilities = predict_image(tflm_interpreter, FLAGS.img_path) + predicted_category = np.argmax(category_probabilities) + logging.info("Model predicts the image as %i with probability %.2f", + predicted_category, category_probabilities[predicted_category]) + + +if __name__ == "__main__": + app.run(main) diff --git a/tensorflow/lite/micro/examples/mnist_lstm/evaluate_test.py b/tensorflow/lite/micro/examples/mnist_lstm/evaluate_test.py new file mode 100644 index 0000000..a7d74cd --- /dev/null +++ b/tensorflow/lite/micro/examples/mnist_lstm/evaluate_test.py @@ -0,0 +1,221 @@ +# Copyright 2022 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================= +import os + +import numpy as np +import tensorflow as tf + +from tensorflow.python.framework import test_util +from tensorflow.python.platform import resource_loader +from tensorflow.python.platform import test +from tflite_micro.python.tflite_micro import runtime +from tflite_micro.tensorflow.lite.micro.examples.mnist_lstm import evaluate +from tflite_micro.tensorflow.lite.micro.tools import requantize_flatbuffer + +PREFIX_PATH = resource_loader.get_path_to_datafile("") + + +class LSTMFloatModelTest(test_util.TensorFlowTestCase): + + def setUp(self): + self.model_path = os.path.join(PREFIX_PATH, "trained_lstm.tflite") + self.input_shape = (1, 28, 28) + self.output_shape = (1, 10) + self.tflm_interpreter = runtime.Interpreter.from_file(self.model_path) + np.random.seed(42) #Seed the random number generator + + def testInputErrHandling(self): + wrong_size_image_path = os.path.join(PREFIX_PATH, "samples/resized9.png") + with self.assertRaisesWithPredicateMatch(ValueError, + "Invalid input image shape"): + evaluate.predict_image(self.tflm_interpreter, wrong_size_image_path) + + def testCompareWithTFLite(self): + tflite_interpreter = tf.lite.Interpreter( + model_path=self.model_path, + experimental_op_resolver_type=\ + tf.lite.experimental.OpResolverType.BUILTIN_REF) + tflite_interpreter.allocate_tensors() + tflite_output_details = tflite_interpreter.get_output_details()[0] + tflite_input_details = tflite_interpreter.get_input_details()[0] + + num_steps = 100 + for _ in range(0, num_steps): + # Clear the internal states of the TfLite and TFLM interpreters so that we can call invoke multiple times (LSTM is stateful). + tflite_interpreter.reset_all_variables() + self.tflm_interpreter.reset() + + # Give the same (random) input to both interpreters can confirm that the output is identical. + data_x = np.random.random(self.input_shape) + data_x = data_x.astype("float32") + + # Run inference on TFLite + tflite_interpreter.set_tensor(tflite_input_details["index"], data_x) + tflite_interpreter.invoke() + tflite_output = tflite_interpreter.get_tensor( + tflite_output_details["index"]) + + # Run inference on TFLM + tflm_output = evaluate.tflm_predict(self.tflm_interpreter, data_x) + + # Check that TFLM has correct output + self.assertDTypeEqual(tflm_output, np.float32) + self.assertEqual(tflm_output.shape, self.output_shape) + self.assertAllLess((tflite_output - tflm_output), 1e-5) + + def testModelAccuracy(self): + # Test prediction accuracy on digits 0-9 using sample images + for label in range(10): + image_path = os.path.join(PREFIX_PATH, f"samples/sample{label}.png") + # Run inference on the sample image + # Note that the TFLM state is reset inside the predict_image function. + category_probabilities = evaluate.predict_image(self.tflm_interpreter, + image_path) + # Check the prediction result + predicted_category = np.argmax(category_probabilities) + self.assertEqual(predicted_category, label) + + +class LSTMInt8ModelTest(test_util.TensorFlowTestCase): + + def setUp(self): + self.int8_model_path = os.path.join(PREFIX_PATH, + "trained_lstm_int8.tflite") + self.input_shape = (1, 28, 28) + self.output_shape = (1, 10) + self.tflm_interpreter_quant = runtime.Interpreter.from_file( + self.int8_model_path) + np.random.seed(42) #Seed the random number generator + + def testQuantOutputs(self): + # Get input/output information of the quantized model + input_details = self.tflm_interpreter_quant.get_input_details(0) + output_details = self.tflm_interpreter_quant.get_output_details(0) + + # Create a float model for results comparison + float_model_path = os.path.join(PREFIX_PATH, "trained_lstm.tflite") + tflm_interpreter_float = runtime.Interpreter.from_file(float_model_path) + + num_test = 10 + for _ in range(num_test): + # Clear the internal states of the TfLite and TFLM interpreters so that we can call invoke multiple times (LSTM is stateful). + self.tflm_interpreter_quant.reset() + tflm_interpreter_float.reset() + + data_x = np.random.random(self.input_shape) + data_x = data_x.astype("float32") + + # Run float inference on TFLM + tflm_output_float = evaluate.tflm_predict(tflm_interpreter_float, data_x) + + # Quantized the input data into int8 + data_x_quant = evaluate.quantize_input_data(data_x, input_details) + + # Run integer inference on the quantilzed TFLM model + tflm_output_quant = evaluate.tflm_predict(self.tflm_interpreter_quant, + data_x_quant) + # Check shape and type + self.assertDTypeEqual(tflm_output_quant, np.int8) + self.assertEqual(tflm_output_quant.shape, self.output_shape) + + # Convert the integer output back to float for comparison + tflm_output_quant_float = evaluate.dequantize_output_data( + tflm_output_quant, output_details) + # Make sure the difference is within the error margin + self.assertAllLess(abs(tflm_output_float - tflm_output_quant_float), + 1e-2) + + def testQuantModelAccuracy(self): + for label in range(10): + image_path = os.path.join(PREFIX_PATH, f"samples/sample{label}.png") + # Run integer inference (quantized) on the sample image + # Note that the TFLM state is reset inside the predict_image function. + category_probabilities_quant = evaluate.predict_image( + self.tflm_interpreter_quant, image_path) + # Check the prediction result + predicted_category = np.argmax(category_probabilities_quant) + # Check the prediction + self.assertEqual(predicted_category, label) + + +class LSTMInt16ModelTest(test_util.TensorFlowTestCase): + + def setUp(self): + # Convert the int8 model to int16 + self.int8_model_path = os.path.join(PREFIX_PATH, + "trained_lstm_int8.tflite") + self.requantizer = requantize_flatbuffer.Requantizer.from_file( + self.int8_model_path) + self.requantizer.requantize_8to16() + self.int16_model = self.requantizer.model_bytearray() + self.input_shape = (1, 28, 28) + self.output_shape = (1, 10) + self.tflm_interpreter_quant = runtime.Interpreter.from_bytes( + self.int16_model) + np.random.seed(42) #Seed the random number generator + + def testQuantOutputs(self): + # Get input/output information + input_details = self.tflm_interpreter_quant.get_input_details(0) + output_details = self.tflm_interpreter_quant.get_output_details(0) + + # Create a float model for results comparison + float_model_path = os.path.join(PREFIX_PATH, "trained_lstm.tflite") + tflm_interpreter_float = runtime.Interpreter.from_file(float_model_path) + + num_test = 10 + for _ in range(num_test): + # Clear the internal states of the TfLite and TFLM interpreters so that we can call invoke multiple times (LSTM is stateful). + self.tflm_interpreter_quant.reset() + tflm_interpreter_float.reset() + + data_x = np.random.random(self.input_shape) + data_x = data_x.astype("float32") + + # Run float inference on TFLM + tflm_output_float = evaluate.tflm_predict(tflm_interpreter_float, data_x) + + # Quantized the input data into int8 + data_x_quant = evaluate.quantize_input_data(data_x, input_details) + + # Run integer inference on the quantilzed TFLM model + tflm_output_quant = evaluate.tflm_predict(self.tflm_interpreter_quant, + data_x_quant) + # Check shape and type + self.assertDTypeEqual(tflm_output_quant, np.int16) + self.assertEqual(tflm_output_quant.shape, self.output_shape) + + # Convert the integer output back to float for comparison + tflm_output_quant_float = evaluate.dequantize_output_data( + tflm_output_quant, output_details) + # Make sure the difference is within the error margin + self.assertAllLess(abs(tflm_output_float - tflm_output_quant_float), + 1e-3) + + def testQuantModelAccuracy(self): + for label in range(10): + image_path = os.path.join(PREFIX_PATH, f"samples/sample{label}.png") + # Run integer inference (quantized) on the sample image + # Note that the TFLM state is reset inside the predict_image function. + category_probabilities_quant = evaluate.predict_image( + self.tflm_interpreter_quant, image_path) + # Check the prediction result + predicted_category = np.argmax(category_probabilities_quant) + # Check the prediction + self.assertEqual(predicted_category, label) + + +if __name__ == "__main__": + test.main() diff --git a/tensorflow/lite/micro/examples/mnist_lstm/samples/resized9.png b/tensorflow/lite/micro/examples/mnist_lstm/samples/resized9.png new file mode 100644 index 0000000..2717752 Binary files /dev/null and b/tensorflow/lite/micro/examples/mnist_lstm/samples/resized9.png differ diff --git a/tensorflow/lite/micro/examples/mnist_lstm/samples/sample0.png b/tensorflow/lite/micro/examples/mnist_lstm/samples/sample0.png new file mode 100644 index 0000000..9885aab Binary files /dev/null and b/tensorflow/lite/micro/examples/mnist_lstm/samples/sample0.png differ diff --git a/tensorflow/lite/micro/examples/mnist_lstm/samples/sample1.png b/tensorflow/lite/micro/examples/mnist_lstm/samples/sample1.png new file mode 100644 index 0000000..71d424e Binary files /dev/null and b/tensorflow/lite/micro/examples/mnist_lstm/samples/sample1.png differ diff --git a/tensorflow/lite/micro/examples/mnist_lstm/samples/sample2.png b/tensorflow/lite/micro/examples/mnist_lstm/samples/sample2.png new file mode 100644 index 0000000..1b1f72c Binary files /dev/null and b/tensorflow/lite/micro/examples/mnist_lstm/samples/sample2.png differ diff --git a/tensorflow/lite/micro/examples/mnist_lstm/samples/sample3.png b/tensorflow/lite/micro/examples/mnist_lstm/samples/sample3.png new file mode 100644 index 0000000..d4ea5a1 Binary files /dev/null and b/tensorflow/lite/micro/examples/mnist_lstm/samples/sample3.png differ diff --git a/tensorflow/lite/micro/examples/mnist_lstm/samples/sample4.png b/tensorflow/lite/micro/examples/mnist_lstm/samples/sample4.png new file mode 100644 index 0000000..c48b068 Binary files /dev/null and b/tensorflow/lite/micro/examples/mnist_lstm/samples/sample4.png differ diff --git a/tensorflow/lite/micro/examples/mnist_lstm/samples/sample5.png b/tensorflow/lite/micro/examples/mnist_lstm/samples/sample5.png new file mode 100644 index 0000000..0db53de Binary files /dev/null and b/tensorflow/lite/micro/examples/mnist_lstm/samples/sample5.png differ diff --git a/tensorflow/lite/micro/examples/mnist_lstm/samples/sample6.png b/tensorflow/lite/micro/examples/mnist_lstm/samples/sample6.png new file mode 100644 index 0000000..058d1c3 Binary files /dev/null and b/tensorflow/lite/micro/examples/mnist_lstm/samples/sample6.png differ diff --git a/tensorflow/lite/micro/examples/mnist_lstm/samples/sample7.png b/tensorflow/lite/micro/examples/mnist_lstm/samples/sample7.png new file mode 100644 index 0000000..94b4e6d Binary files /dev/null and b/tensorflow/lite/micro/examples/mnist_lstm/samples/sample7.png differ diff --git a/tensorflow/lite/micro/examples/mnist_lstm/samples/sample8.png b/tensorflow/lite/micro/examples/mnist_lstm/samples/sample8.png new file mode 100644 index 0000000..9a3fcd6 Binary files /dev/null and b/tensorflow/lite/micro/examples/mnist_lstm/samples/sample8.png differ diff --git a/tensorflow/lite/micro/examples/mnist_lstm/samples/sample9.png b/tensorflow/lite/micro/examples/mnist_lstm/samples/sample9.png new file mode 100644 index 0000000..247620a Binary files /dev/null and b/tensorflow/lite/micro/examples/mnist_lstm/samples/sample9.png differ diff --git a/tensorflow/lite/micro/examples/mnist_lstm/train.py b/tensorflow/lite/micro/examples/mnist_lstm/train.py new file mode 100644 index 0000000..fc2110e --- /dev/null +++ b/tensorflow/lite/micro/examples/mnist_lstm/train.py @@ -0,0 +1,205 @@ +# Copyright 2022 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================= +"""LSTM model training for MNIST recognition + +This script is based on: +https://www.tensorflow.org/lite/models/convert/rnn +https://colab.research.google.com/github/tensorflow/tensorflow/blob/master/tensorflow/lite/examples/experimental_new_converter/Keras_LSTM_fusion_Codelab.ipynb + +Run: +`bazel build tensorflow/lite/micro/examples/mnist_lstm:train` +`bazel-bin/tensorflow/lite/micro/examples/mnist_lstm/train` +""" +import os + +from absl import app +from absl import flags +from absl import logging +import numpy as np +import tensorflow as tf + +FLAGS = flags.FLAGS + +flags.DEFINE_integer("epochs", 20, "number of epochs to train the model.") +flags.DEFINE_string("save_dir", "/tmp/lstm_trained_model", + "the directory to save the trained model.") +flags.DEFINE_boolean("save_tf_model", False, + "store the original unconverted tf model.") +flags.DEFINE_boolean( + "quantize", False, + "convert and save the full integer (int8) quantized model.") + + +def create_model(units=20): + """Create a keras LSTM model for MNIST recognition + + Args: + units (int, optional): dimensionality of the output space for the model. + Defaults to 20. + + Returns: + tf.keras.Model: A Keras LSTM model + """ + + model = tf.keras.models.Sequential([ + tf.keras.layers.Input(shape=(28, 28), name="input"), + tf.keras.layers.LSTM(units, return_sequences=True), + tf.keras.layers.Flatten(), + tf.keras.layers.Dense(10, activation=tf.nn.softmax, name="output") + ]) + model.compile(optimizer="adam", + loss="sparse_categorical_crossentropy", + metrics=["accuracy"]) + model.summary() + return model + + +def get_train_data(): + """Get MNIST train and test data + + Returns: + tuple: (data, label) pairs for train and test + """ + (x_train, y_train), _ = tf.keras.datasets.mnist.load_data() + x_train = x_train / 255. # normalize pixel values to 0-1 + x_train = x_train.astype(np.float32) + return (x_train, y_train) + + +def train_lstm_model(epochs, x_train, y_train): + """Train keras LSTM model on MNIST dataset + + Args: epochs (int) : number of epochs to train the model + x_train (numpy.array): list of the training data + y_train (numpy.array): list of the corresponding array + + Returns: + tf.keras.Model: A trained keras LSTM model + """ + model = create_model() + callback = tf.keras.callbacks.EarlyStopping( + monitor="val_loss", + patience=3) #early stop if validation loss does not drop anymore + model.fit(x_train, + y_train, + epochs=epochs, + validation_split=0.2, + batch_size=32, + callbacks=[callback]) + return model + + +def convert_quantized_tflite_model(model, x_train): + """Convert the save TF model to tflite model, then save it as .tflite flatbuffer format + + See + https://www.tensorflow.org/lite/performance/post_training_integer_quant#convert_using_integer-only_quantization + + Args: + model (tf.keras.Model): the trained LSTM Model + x_train (numpy.array): list of the training data + + Returns: + The converted model in serialized format. + """ + + def representative_dataset_gen(num_samples=100): + for data in x_train[:num_samples]: + yield [data.reshape(1, 28, 28)] + + converter = tf.lite.TFLiteConverter.from_keras_model(model) + converter.optimizations = [tf.lite.Optimize.DEFAULT] + converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8] + converter.inference_input_type = tf.int8 + converter.inference_output_type = tf.int8 + converter.representative_dataset = representative_dataset_gen + tflite_model = converter.convert() + return tflite_model + + +def convert_tflite_model(model): + """Convert the save TF model to tflite model, then save it as .tflite flatbuffer format + + Args: + model (tf.keras.Model): the trained LSTM Model + + Returns: + The converted model in serialized format. + """ + converter = tf.lite.TFLiteConverter.from_keras_model(model) + tflite_model = converter.convert() + return tflite_model + + +def save_tflite_model(tflite_model, save_dir, model_name): + """save the converted tflite model + + Args: + tflite_model (binary): the converted model in serialized format. + save_dir (str): the save directory + model_name (str): model name to be saved + """ + if not os.path.exists(save_dir): + os.makedirs(save_dir) + save_path = os.path.join(save_dir, model_name) + with open(save_path, "wb") as f: + f.write(tflite_model) + logging.info("Tflite model saved to %s", save_dir) + + +def prepare_trained_model(trained_model): + """Fix the input of the trained model for inference + + Args: + trained_model (tf.keras.Model): the trained LSTM model + + Returns: + run_model (tf.keras.Model): the trained model with fixed input tensor size for inference + """ + # TFLite converter requires fixed shape input to work, alternative: b/225231544 + fixed_input = tf.keras.layers.Input(shape=[28, 28], + batch_size=1, + dtype=trained_model.inputs[0].dtype, + name="fixed_input") + fixed_output = trained_model(fixed_input) + run_model = tf.keras.models.Model(fixed_input, fixed_output) + return run_model + + +def main(_): + x_train, y_train = get_train_data() + trained_model = train_lstm_model(FLAGS.epochs, x_train, y_train) + run_model = prepare_trained_model(trained_model) + # Save the tf model + if FLAGS.save_tf_model: + run_model.save(FLAGS.save_dir, save_format="tf") + logging.info("TF model saved to %s", FLAGS.save_dir) + + # Convert and save the model to .tflite + tflite_model = convert_tflite_model(run_model) + save_tflite_model(tflite_model, + FLAGS.save_dir, + model_name="mnist_lstm.tflite") + + # Convert and save the quantized model + if FLAGS.quantize: + quantized_tflite_model = convert_quantized_tflite_model(run_model, x_train) + save_tflite_model(quantized_tflite_model, + FLAGS.save_dir, + model_name="mnist_lstm_quant.tflite") + + +if __name__ == "__main__": + app.run(main) diff --git a/tensorflow/lite/micro/examples/mnist_lstm/trained_lstm.tflite b/tensorflow/lite/micro/examples/mnist_lstm/trained_lstm.tflite new file mode 100644 index 0000000..424dfcc Binary files /dev/null and b/tensorflow/lite/micro/examples/mnist_lstm/trained_lstm.tflite differ diff --git a/tensorflow/lite/micro/examples/mnist_lstm/trained_lstm_int8.tflite b/tensorflow/lite/micro/examples/mnist_lstm/trained_lstm_int8.tflite new file mode 100644 index 0000000..636ea0b Binary files /dev/null and b/tensorflow/lite/micro/examples/mnist_lstm/trained_lstm_int8.tflite differ diff --git a/tensorflow/lite/micro/examples/network_tester/.gitignore b/tensorflow/lite/micro/examples/network_tester/.gitignore new file mode 100644 index 0000000..f266b9b --- /dev/null +++ b/tensorflow/lite/micro/examples/network_tester/.gitignore @@ -0,0 +1,3 @@ +input_data.h +expected_output_data.h +network_model.h diff --git a/tensorflow/lite/micro/examples/network_tester/Makefile.inc b/tensorflow/lite/micro/examples/network_tester/Makefile.inc new file mode 100644 index 0000000..7396323 --- /dev/null +++ b/tensorflow/lite/micro/examples/network_tester/Makefile.inc @@ -0,0 +1,52 @@ +NETWORK_TESTER_TEST_SRCS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/network_tester/network_tester_test.cc + +NETWORK_TESTER_GENERATOR_INPUTS:= + +ifeq ($(CO_PROCESSOR),ethos_u) + NETWORK_TESTER_TEST_SRCS += \ + $(GENERATED_SRCS_DIR)$(TENSORFLOW_ROOT)tensorflow/lite/micro/models/person_detect_model_data_vela.cc + + NETWORK_TESTER_GENERATOR_INPUTS := \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/person_detection/testdata/person.bmp +endif + +NETWORK_TESTER_TEST_HDRS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/network_tester/network_model.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/network_tester/input_data.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/network_tester/expected_output_data.h + +#Find any platform - specific rules for this example. +include $(wildcard tensorflow/lite/micro/examples/network_tester/*/Makefile.inc) + +ifdef NETWORK_MODEL + INCLUDES += -include $(NETWORK_MODEL) +endif + +ifdef OUTPUT_DATA + INCLUDES += -include $(OUTPUT_DATA) +endif + +ifdef INPUT_DATA + INCLUDES += -include $(INPUT_DATA) +endif + +ifdef ARENA_SIZE + CXXFLAGS += -DTENSOR_ARENA_SIZE=$(ARENA_SIZE) +endif + +ifdef NUM_BYTES_TO_PRINT + CXXFLAGS += -DNUM_BYTES_TO_PRINT=$(NUM_BYTES_TO_PRINT) +endif + +ifeq ($(COMPARE_OUTPUT_DATA),no) + CXXFLAGS += -DNO_COMPARE_OUTPUT_DATA +endif + +ifdef NUM_INFERENCES + CXXFLAGS += -DNUM_INFERENCES=$(NUM_INFERENCES) +endif + +# Builds a standalone object recognition binary. +$(eval $(call microlite_test,network_tester_test,\ +$(NETWORK_TESTER_TEST_SRCS),$(NETWORK_TESTER_TEST_HDRS),$(NETWORK_TESTER_GENERATOR_INPUTS))) diff --git a/tensorflow/lite/micro/examples/network_tester/README.md b/tensorflow/lite/micro/examples/network_tester/README.md new file mode 100644 index 0000000..7729f35 --- /dev/null +++ b/tensorflow/lite/micro/examples/network_tester/README.md @@ -0,0 +1,64 @@ +The aim of this application is to provide a quick way to test different +networks. + +It contains one testcase and a default network model (network_model.h), default +input data (input_data.h) and default expected output data +(expected_output_data.h). The header files were created using the `xxd` command. + +The default model is a single int8 DepthwiseConv2D operator, with an input shape +of {1, 8, 8, 16}, {1, 2, 2, 16} and {16} and an output shape of {1, 4, 4, 16}. + +When building the FVP target for Ethos-U (CO_PROCESSOR=ethos_u) the person +detect int8 model is used instead. The downloaded model is optimized for Ethos-U +with Ethos-U Vela. For more info see the following readmes: +tensorflow/lite/micro/kernels/ethos_u/README.md +tensorflow/lite/micro/cortex_m_corstone_300/README.md +tensorflow/lite/micro/examples/person_detection/README.md The following Vela +configuration has been used, which is compatible with the FVP build target +(TARGET=cortex_m_corstone_300). + +``` +vela --accelerator-config=ethos-u55-256 +``` + +In order to use another model, input data, or expected output data, simply +specify the path to the new header files when running make as seen below. + +The variables in the specified header files (array and array length) need to +have the same name and type as the ones in the default header files. The include +guards also needs to be the same. When swapping out the network model, it is +likely that the memory allocated by the interpreter needs to be increased to fit +the new model. This is done by using the `ARENA_SIZE` option when running +`make`. + +``` +make -f tensorflow/lite/micro/tools/make/Makefile network_tester_test \ + NETWORK_MODEL=path/to/network_model.h \ + INPUT_DATA=path/to/input_data.h \ + OUTPUT_DATA=path/to/expected_output_data.h \ + ARENA_SIZE= \ + NUM_BYTES_TO_PRINT= \ + COMPARE_OUTPUT_DATA=no +``` + +`NETWORK_MODEL`: The path to the network model header. \ +`INPUT_DATA`: The path to the input data. \ +`OUTPUT_DATA`: The path to the expected output data. \ +`ARENA_SIZE`: The size of the memory to be allocated (in bytes) by the +interpreter. \ +`NUM_BYTES_TO_PRINT`: The number of bytes of the output data to print. \ +If set to 0, all bytes of the output are printed. \ +`COMPARE_OUTPUT_DATA`: If set to "no" the output data is not compared to the +expected output data. This could be useful e.g. if the execution time needs to +be minimized, or there is no expected output data. If omitted, the output data +is compared to the expected output. `NUM_INFERENCES`: Define how many inferences +that are made. Defaults to 1. \ + +The output is printed in JSON format using printf: `num_of_outputs: 1 +output_begin [ { "dims": [4,1,2,2,1], "data_address": "0x000000", +"data":"0x06,0x08,0x0e,0x10" }] output_end` + +If there are multiple output tensors, the output will look like this: +`num_of_outputs: 2 output_begin [ { "dims": [4,1,2,2,1], "data_address": +"0x000000", "data":"0x06,0x08,0x0e,0x10" }, { "dims": [4,1,2,2,1], +"data_address": "0x111111", "data":"0x06,0x08,0x0e,0x10" }] output_end` diff --git a/tensorflow/lite/micro/examples/network_tester/network_tester_test.cc b/tensorflow/lite/micro/examples/network_tester/network_tester_test.cc new file mode 100644 index 0000000..e62e0c4 --- /dev/null +++ b/tensorflow/lite/micro/examples/network_tester/network_tester_test.cc @@ -0,0 +1,158 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/examples/network_tester/expected_output_data.h" +#include "tensorflow/lite/micro/examples/network_tester/input_data.h" +#include "tensorflow/lite/micro/examples/network_tester/network_model.h" +#include "tensorflow/lite/micro/micro_interpreter.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_mutable_op_resolver.h" +#include "tensorflow/lite/micro/micro_utils.h" +#include "tensorflow/lite/micro/testing/micro_test.h" +#include "tensorflow/lite/schema/schema_generated.h" + +#ifdef ETHOS_U +#include "tensorflow/lite/micro/examples/person_detection/testdata/person_image_data.h" +#include "tensorflow/lite/micro/models/person_detect_model_data.h" +#endif + +#ifndef TENSOR_ARENA_SIZE +#ifdef ETHOS_U +#define TENSOR_ARENA_SIZE (136 * 1024) +#else +#define TENSOR_ARENA_SIZE (5 * 1024) +#endif +#endif + +#ifndef NUM_INFERENCES +#define NUM_INFERENCES 1 +#endif + +uint8_t tensor_arena[TENSOR_ARENA_SIZE]; + +#ifdef NUM_BYTES_TO_PRINT +inline void print_output_data(TfLiteTensor* output) { + int num_bytes_to_print = + ((output->bytes < NUM_BYTES_TO_PRINT) || NUM_BYTES_TO_PRINT == 0) + ? output->bytes + : NUM_BYTES_TO_PRINT; + + int dims_size = output->dims->size; + printf("{\n"); + printf("\"dims\": [%d,", dims_size); + for (int i = 0; i < output->dims->size - 1; ++i) { + printf("%d,", output->dims->data[i]); + } + printf("%d],\n", output->dims->data[dims_size - 1]); + + printf("\"data_address\": \"%p\",\n", output->data.raw); + printf("\"data\":\""); + for (int i = 0; i < num_bytes_to_print - 1; ++i) { + if (i % 16 == 0 && i != 0) { + printf("\n"); + } + printf("0x%02x,", output->data.uint8[i]); + } + printf("0x%02x\"\n", output->data.uint8[num_bytes_to_print - 1]); + printf("}"); +} +#endif + +template +void check_output_elem(TfLiteTensor* output, const T* expected_output, + const int index) { + TF_LITE_MICRO_EXPECT_EQ(tflite::GetTensorData(output)[index], + expected_output[index]); +} + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(TestInvoke) { +#ifdef ETHOS_U + const tflite::Model* model = ::tflite::GetModel(g_person_detect_model_data); +#else + const tflite::Model* model = ::tflite::GetModel(network_model); +#endif + if (model->version() != TFLITE_SCHEMA_VERSION) { + MicroPrintf( + "Model provided is schema version %d not equal " + "to supported version %d.\n", + model->version(), TFLITE_SCHEMA_VERSION); + return kTfLiteError; + } + + tflite::MicroMutableOpResolver<6> resolver; + resolver.AddAveragePool2D(tflite::Register_AVERAGE_POOL_2D_INT8()); + resolver.AddConv2D(tflite::Register_CONV_2D_INT8()); + resolver.AddDepthwiseConv2D(tflite::Register_DEPTHWISE_CONV_2D_INT8()); + resolver.AddEthosU(); + resolver.AddReshape(); + resolver.AddSoftmax(tflite::Register_SOFTMAX_INT8()); + + tflite::MicroInterpreter interpreter(model, resolver, tensor_arena, + TENSOR_ARENA_SIZE); + + TfLiteStatus allocate_status = interpreter.AllocateTensors(); + if (allocate_status != kTfLiteOk) { + MicroPrintf("Tensor allocation failed\n"); + return kTfLiteError; + } + + for (int n = 0; n < NUM_INFERENCES; n++) { + for (size_t i = 0; i < interpreter.inputs_size(); ++i) { + TfLiteTensor* input = interpreter.input(i); +#ifdef ETHOS_U + memcpy(input->data.int8, g_person_image_data, input->bytes); +#else + memcpy(input->data.data, &input_data[i], input->bytes); +#endif + } + TfLiteStatus invoke_status = interpreter.Invoke(); + if (invoke_status != kTfLiteOk) { + MicroPrintf("Invoke failed\n"); + return kTfLiteError; + } + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, invoke_status); + +#ifdef NUM_BYTES_TO_PRINT + // Print all of the output data, or the first NUM_BYTES_TO_PRINT bytes, + // whichever comes first as well as the output shape. + printf("num_of_outputs: %d\n", interpreter.outputs_size()); + printf("output_begin\n"); + printf("[\n"); + for (int i = 0; i < interpreter.outputs_size(); i++) { + TfLiteTensor* output = interpreter.output(i); + print_output_data(output); + if (i != interpreter.outputs_size() - 1) { + printf(",\n"); + } + } + printf("]\n"); + printf("output_end\n"); +#endif + +#ifndef NO_COMPARE_OUTPUT_DATA + for (size_t i = 0; i < interpreter.outputs_size(); i++) { + TfLiteTensor* output = interpreter.output(i); + for (int j = 0; j < tflite::ElementCount(*(output->dims)); ++j) { + check_output_elem(output, &expected_output_data[i], j); + } + } +#endif + } + MicroPrintf("Ran successfully\n"); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/examples/person_detection/BUILD b/tensorflow/lite/micro/examples/person_detection/BUILD new file mode 100644 index 0000000..449c4be --- /dev/null +++ b/tensorflow/lite/micro/examples/person_detection/BUILD @@ -0,0 +1,157 @@ +# Description: +# TensorFlow Lite for Microcontrollers Vision Example. +load("//tensorflow/lite/micro:build_def.bzl", "generate_cc_arrays") + +package( + default_visibility = ["//visibility:public"], + # Disabling layering_check because of http://b/177257332 + features = ["-layering_check"], + licenses = ["notice"], +) + +cc_library( + name = "model_settings", + srcs = [ + "model_settings.cc", + ], + hdrs = [ + "model_settings.h", + ], +) + +generate_cc_arrays( + name = "generated_no_person_bmp_cc", + src = "testdata/no_person.bmp", + out = "testdata/no_person_image_data.cc", +) + +generate_cc_arrays( + name = "generated_no_person_bmp_hdr", + src = "testdata/no_person.bmp", + out = "testdata/no_person_image_data.h", +) + +generate_cc_arrays( + name = "generated_person_bmp_cc", + src = "testdata/person.bmp", + out = "testdata/person_image_data.cc", +) + +generate_cc_arrays( + name = "generated_person_bmp_hdr", + src = "testdata/person.bmp", + out = "testdata/person_image_data.h", +) + +cc_library( + name = "person_detect_model_data", + srcs = [ + "//tensorflow/lite/micro/models:generated_person_detect_model_cc", + ], + hdrs = [ + "//tensorflow/lite/micro/models:generated_person_detect_model_hdr", + ], +) + +cc_library( + name = "simple_images_test_data", + srcs = [ + ":generated_no_person_bmp_cc", + ":generated_person_bmp_cc", + ], + hdrs = [ + ":generated_no_person_bmp_hdr", + ":generated_person_bmp_hdr", + ], + deps = [ + ":model_settings", + ], +) + +cc_test( + name = "person_detection_test", + srcs = ["person_detection_test.cc"], + deps = [ + ":model_settings", + ":person_detect_model_data", + ":simple_images_test_data", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:micro_framework", + "//tensorflow/lite/micro:micro_log", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro/testing:micro_test", + "//tensorflow/lite/schema:schema_fbs", + ], +) + +cc_library( + name = "image_provider", + srcs = [ + "image_provider.cc", + ], + hdrs = [ + "image_provider.h", + ], + deps = [ + ":model_settings", + "//tensorflow/lite/c:common", + ], +) + +cc_test( + name = "image_provider_test", + srcs = [ + "image_provider_test.cc", + ], + deps = [ + ":image_provider", + ":model_settings", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_library( + name = "detection_responder", + srcs = [ + "detection_responder.cc", + ], + hdrs = [ + "detection_responder.h", + ], + deps = [ + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:micro_log", + ], +) + +cc_test( + name = "detection_responder_test", + srcs = [ + "detection_responder_test.cc", + ], + deps = [ + ":detection_responder", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_binary( + name = "person_detection", + srcs = [ + "main.cc", + "main_functions.cc", + "main_functions.h", + ], + deps = [ + ":detection_responder", + ":image_provider", + ":model_settings", + ":person_detect_model_data", + "//tensorflow/lite/micro:micro_framework", + "//tensorflow/lite/micro:micro_log", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:system_setup", + "//tensorflow/lite/schema:schema_fbs", + ], +) diff --git a/tensorflow/lite/micro/examples/person_detection/Makefile.inc b/tensorflow/lite/micro/examples/person_detection/Makefile.inc new file mode 100644 index 0000000..c142c7d --- /dev/null +++ b/tensorflow/lite/micro/examples/person_detection/Makefile.inc @@ -0,0 +1,85 @@ +person_detection_MODEL_SRCS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/person_detection/model_settings.cc + +person_detection_MODEL_HDRS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/person_detection/model_settings.h + +person_detection_TEST_SRCS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/person_detection/person_detection_test.cc \ +$(person_detection_MODEL_SRCS) + +person_detection_TEST_HDRS := \ +$(person_detection_MODEL_HDRS) + +IMAGE_PROVIDER_TEST_SRCS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/person_detection/image_provider.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/person_detection/image_provider_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/person_detection/model_settings.cc + +IMAGE_PROVIDER_TEST_HDRS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/person_detection/image_provider.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/person_detection/model_settings.h + +DETECTION_RESPONDER_TEST_SRCS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/person_detection/detection_responder.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/person_detection/detection_responder_test.cc + +DETECTION_RESPONDER_TEST_HDRS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/person_detection/detection_responder.h + +person_detection_SRCS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/person_detection/detection_responder.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/person_detection/image_provider.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/person_detection/main.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/person_detection/main_functions.cc \ +$(person_detection_MODEL_SRCS) + +person_detection_HDRS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/person_detection/detection_responder.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/person_detection/image_provider.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/person_detection/main_functions.h \ +$(person_detection_MODEL_HDRS) + +person_detection_GENERATOR_INPUTS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/models/person_detect.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/person_detection/testdata/person.bmp \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/person_detection/testdata/no_person.bmp + +person_detection_GENERATED_SRCS := \ +$(GENERATED_SRCS_DIR)$(TENSORFLOW_ROOT)tensorflow/lite/micro/models/person_detect_model_data.cc + +person_detection_GENERATED_HDRS := \ +$(GENERATED_SRCS_DIR)$(TENSORFLOW_ROOT)tensorflow/lite/micro/models/person_detect_model_data.h + +#Find any platform - specific rules for this example. +include $(wildcard $(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/person_detection/*/Makefile.inc) + +# TODO(b/268568089): This test is taking very long time to finish; causing the +# CI to run for a long time to finish. +ifneq ($(TARGET_ARCH), $(filter $(TARGET_ARCH), hifimini hifi3z)) + # Tests loading and running a vision model. + $(eval $(call microlite_test,person_detection_test,\ + $(person_detection_TEST_SRCS),$(person_detection_TEST_HDRS),$(person_detection_GENERATOR_INPUTS))) +endif + +# Tests the image provider module. +$(eval $(call microlite_test,image_provider_test,\ +$(IMAGE_PROVIDER_TEST_SRCS),$(IMAGE_PROVIDER_TEST_HDRS))) + +# Tests the detection responder module. +$(eval $(call microlite_test,detection_responder_test,\ +$(DETECTION_RESPONDER_TEST_SRCS),$(DETECTION_RESPONDER_TEST_HDRS))) + +# Builds a standalone object recognition binary. +$(eval $(call microlite_test,person_detection,\ +$(person_detection_SRCS),$(person_detection_HDRS),$(person_detection_GENERATOR_INPUTS))) + +# Add sources and headers generated from $(person_detection_GENERATOR_INPUTS). +person_detection_SRCS += $(person_detection_GENERATED_SRCS) +person_detection_HDRS += $(person_detection_GENERATED_HDRS) + +list_person_detection_example_sources: + @echo $(person_detection_SRCS) + +list_person_detection_example_headers: + @echo $(person_detection_HDRS) diff --git a/tensorflow/lite/micro/examples/person_detection/README.md b/tensorflow/lite/micro/examples/person_detection/README.md new file mode 100644 index 0000000..a490629 --- /dev/null +++ b/tensorflow/lite/micro/examples/person_detection/README.md @@ -0,0 +1,32 @@ +# Person detection example + +This example shows how you can use Tensorflow Lite to run a 250 kilobyte neural +network to recognize people in images. + +## Table of contents + +- [Run the tests on a development machine](#run-the-tests-on-a-development-machine) +- [Training your own model](#training-your-own-model) + + +## Run the tests on a development machine + +``` +make -f tensorflow/lite/micro/tools/make/Makefile third_party_downloads +make -f tensorflow/lite/micro/tools/make/Makefile test_person_detection_test +``` + +You should see a series of files get compiled, followed by some logging output +from a test, which should conclude with `~~~ALL TESTS PASSED~~~`. If you see +this, it means that a small program has been built and run that loads a trained +TensorFlow model, runs some example images through it, and got the expected +outputs. This particular test runs images with a and without a person in them, +and checks that the network correctly identifies them. + +To understand how TensorFlow Lite does this, you can look at +[person_detection_test.cc](person_detection_test.cc). + +## Training your own model + +You can train your own model with some easy-to-use scripts. See +[training_a_model.md](training_a_model.md) for instructions. diff --git a/tensorflow/lite/micro/examples/person_detection/detection_responder.cc b/tensorflow/lite/micro/examples/person_detection/detection_responder.cc new file mode 100644 index 0000000..f8e3a69 --- /dev/null +++ b/tensorflow/lite/micro/examples/person_detection/detection_responder.cc @@ -0,0 +1,26 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/examples/person_detection/detection_responder.h" + +#include "tensorflow/lite/micro/micro_log.h" + +// This dummy implementation writes person and no person scores to the error +// console. Real applications will want to take some custom action instead, and +// should implement their own versions of this function. +void RespondToDetection(int8_t person_score, int8_t no_person_score) { + MicroPrintf("person score:%d no person score %d", person_score, + no_person_score); +} diff --git a/tensorflow/lite/micro/examples/person_detection/detection_responder.h b/tensorflow/lite/micro/examples/person_detection/detection_responder.h new file mode 100644 index 0000000..d2d945f --- /dev/null +++ b/tensorflow/lite/micro/examples/person_detection/detection_responder.h @@ -0,0 +1,32 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +// Provides an interface to take an action based on the output from the person +// detection model. + +#ifndef TENSORFLOW_LITE_MICRO_EXAMPLES_PERSON_DETECTION_DETECTION_RESPONDER_H_ +#define TENSORFLOW_LITE_MICRO_EXAMPLES_PERSON_DETECTION_DETECTION_RESPONDER_H_ + +#include "tensorflow/lite/c/common.h" + +// Called every time the results of a person detection run are available. The +// `person_score` has the numerical confidence that the captured image contains +// a person, and `no_person_score` has the numerical confidence that the image +// does not contain a person. Typically if person_score > no person score, the +// image is considered to contain a person. This threshold may be adjusted for +// particular applications. +void RespondToDetection(int8_t person_score, int8_t no_person_score); + +#endif // TENSORFLOW_LITE_MICRO_EXAMPLES_PERSON_DETECTION_DETECTION_RESPONDER_H_ diff --git a/tensorflow/lite/micro/examples/person_detection/detection_responder_test.cc b/tensorflow/lite/micro/examples/person_detection/detection_responder_test.cc new file mode 100644 index 0000000..ae50ff7 --- /dev/null +++ b/tensorflow/lite/micro/examples/person_detection/detection_responder_test.cc @@ -0,0 +1,30 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/examples/person_detection/detection_responder.h" + +#include "tensorflow/lite/micro/testing/micro_test.h" + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(TestCallability) { + // This will have external side-effects (like printing to the debug console + // or lighting an LED) that are hard to observe, so the most we can do is + // make sure the call doesn't crash. + RespondToDetection(-100, 100); + RespondToDetection(100, 50); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/examples/person_detection/image_provider.cc b/tensorflow/lite/micro/examples/person_detection/image_provider.cc new file mode 100644 index 0000000..44ca831 --- /dev/null +++ b/tensorflow/lite/micro/examples/person_detection/image_provider.cc @@ -0,0 +1,26 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/examples/person_detection/image_provider.h" + +#include "tensorflow/lite/micro/examples/person_detection/model_settings.h" + +TfLiteStatus GetImage(int image_width, int image_height, int channels, + int8_t* image_data) { + for (int i = 0; i < image_width * image_height * channels; ++i) { + image_data[i] = 0; + } + return kTfLiteOk; +} diff --git a/tensorflow/lite/micro/examples/person_detection/image_provider.h b/tensorflow/lite/micro/examples/person_detection/image_provider.h new file mode 100644 index 0000000..f379992 --- /dev/null +++ b/tensorflow/lite/micro/examples/person_detection/image_provider.h @@ -0,0 +1,38 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_EXAMPLES_PERSON_DETECTION_IMAGE_PROVIDER_H_ +#define TENSORFLOW_LITE_MICRO_EXAMPLES_PERSON_DETECTION_IMAGE_PROVIDER_H_ + +#include "tensorflow/lite/c/common.h" + +// This is an abstraction around an image source like a camera, and is +// expected to return 8-bit sample data. The assumption is that this will be +// called in a low duty-cycle fashion in a low-power application. In these +// cases, the imaging sensor need not be run in a streaming mode, but rather can +// be idled in a relatively low-power mode between calls to GetImage(). The +// assumption is that the overhead and time of bringing the low-power sensor out +// of this standby mode is commensurate with the expected duty cycle of the +// application. The underlying sensor may actually be put into a streaming +// configuration, but the image buffer provided to GetImage should not be +// overwritten by the driver code until the next call to GetImage(); +// +// The reference implementation can have no platform-specific dependencies, so +// it just returns a static image. For real applications, you should +// ensure there's a specialized implementation that accesses hardware APIs. +TfLiteStatus GetImage(int image_width, int image_height, int channels, + int8_t* image_data); + +#endif // TENSORFLOW_LITE_MICRO_EXAMPLES_PERSON_DETECTION_IMAGE_PROVIDER_H_ diff --git a/tensorflow/lite/micro/examples/person_detection/image_provider_test.cc b/tensorflow/lite/micro/examples/person_detection/image_provider_test.cc new file mode 100644 index 0000000..570b74d --- /dev/null +++ b/tensorflow/lite/micro/examples/person_detection/image_provider_test.cc @@ -0,0 +1,40 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/examples/person_detection/image_provider.h" + +#include + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/examples/person_detection/model_settings.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(TestImageProvider) { + int8_t image_data[kMaxImageSize]; + TfLiteStatus get_status = + GetImage(kNumCols, kNumRows, kNumChannels, image_data); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, get_status); + + // Make sure we can read all of the returned memory locations. + uint32_t total = 0; + for (int i = 0; i < kMaxImageSize; ++i) { + total += image_data[i]; + } + (void)total; +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/examples/person_detection/main.cc b/tensorflow/lite/micro/examples/person_detection/main.cc new file mode 100644 index 0000000..b53d366 --- /dev/null +++ b/tensorflow/lite/micro/examples/person_detection/main.cc @@ -0,0 +1,27 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/examples/person_detection/main_functions.h" + +// This is the default main used on systems that have the standard C entry +// point. Other devices (for example FreeRTOS or ESP32) that have different +// requirements for entry code (like an app_main function) should specialize +// this main.cc file in a target-specific subfolder. +int main(int argc, char* argv[]) { + setup(); + while (true) { + loop(); + } +} diff --git a/tensorflow/lite/micro/examples/person_detection/main_functions.cc b/tensorflow/lite/micro/examples/person_detection/main_functions.cc new file mode 100644 index 0000000..f91a1a3 --- /dev/null +++ b/tensorflow/lite/micro/examples/person_detection/main_functions.cc @@ -0,0 +1,109 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/examples/person_detection/main_functions.h" + +#include "tensorflow/lite/micro/examples/person_detection/detection_responder.h" +#include "tensorflow/lite/micro/examples/person_detection/image_provider.h" +#include "tensorflow/lite/micro/examples/person_detection/model_settings.h" +#include "tensorflow/lite/micro/micro_interpreter.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_mutable_op_resolver.h" +#include "tensorflow/lite/micro/models/person_detect_model_data.h" +#include "tensorflow/lite/micro/system_setup.h" +#include "tensorflow/lite/schema/schema_generated.h" + +// Globals, used for compatibility with Arduino-style sketches. +namespace { +const tflite::Model* model = nullptr; +tflite::MicroInterpreter* interpreter = nullptr; +TfLiteTensor* input = nullptr; + +// In order to use optimized tensorflow lite kernels, a signed int8_t quantized +// model is preferred over the legacy unsigned model format. This means that +// throughout this project, input images must be converted from unisgned to +// signed format. The easiest and quickest way to convert from unsigned to +// signed 8-bit integers is to subtract 128 from the unsigned value to get a +// signed value. + +// An area of memory to use for input, output, and intermediate arrays. +constexpr int kTensorArenaSize = 136 * 1024; +alignas(16) static uint8_t tensor_arena[kTensorArenaSize]; +} // namespace + +// The name of this function is important for Arduino compatibility. +void setup() { + tflite::InitializeTarget(); + + // Map the model into a usable data structure. This doesn't involve any + // copying or parsing, it's a very lightweight operation. + model = tflite::GetModel(g_person_detect_model_data); + if (model->version() != TFLITE_SCHEMA_VERSION) { + MicroPrintf( + "Model provided is schema version %d not equal " + "to supported version %d.", + model->version(), TFLITE_SCHEMA_VERSION); + return; + } + + // Pull in only the operation implementations we need. + // This relies on a complete list of all the ops needed by this graph. + + // NOLINTNEXTLINE(runtime-global-variables) + static tflite::MicroMutableOpResolver<5> micro_op_resolver; + micro_op_resolver.AddAveragePool2D(tflite::Register_AVERAGE_POOL_2D_INT8()); + micro_op_resolver.AddConv2D(tflite::Register_CONV_2D_INT8()); + micro_op_resolver.AddDepthwiseConv2D( + tflite::Register_DEPTHWISE_CONV_2D_INT8()); + micro_op_resolver.AddReshape(); + micro_op_resolver.AddSoftmax(tflite::Register_SOFTMAX_INT8()); + + // Build an interpreter to run the model with. + // NOLINTNEXTLINE(runtime-global-variables) + static tflite::MicroInterpreter static_interpreter( + model, micro_op_resolver, tensor_arena, kTensorArenaSize); + interpreter = &static_interpreter; + + // Allocate memory from the tensor_arena for the model's tensors. + TfLiteStatus allocate_status = interpreter->AllocateTensors(); + if (allocate_status != kTfLiteOk) { + MicroPrintf("AllocateTensors() failed"); + return; + } + + // Get information about the memory area to use for the model's input. + input = interpreter->input(0); +} + +// The name of this function is important for Arduino compatibility. +void loop() { + // Get image from provider. + if (kTfLiteOk != + GetImage(kNumCols, kNumRows, kNumChannels, input->data.int8)) { + MicroPrintf("Image capture failed."); + } + + // Run the model on this input and make sure it succeeds. + if (kTfLiteOk != interpreter->Invoke()) { + MicroPrintf("Invoke failed."); + } + + TfLiteTensor* output = interpreter->output(0); + + // Process the inference results. + int8_t person_score = output->data.uint8[kPersonIndex]; + int8_t no_person_score = output->data.uint8[kNotAPersonIndex]; + RespondToDetection(person_score, no_person_score); +} diff --git a/tensorflow/lite/micro/examples/person_detection/main_functions.h b/tensorflow/lite/micro/examples/person_detection/main_functions.h new file mode 100644 index 0000000..2620097 --- /dev/null +++ b/tensorflow/lite/micro/examples/person_detection/main_functions.h @@ -0,0 +1,37 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_EXAMPLES_PERSON_DETECTION_MAIN_FUNCTIONS_H_ +#define TENSORFLOW_LITE_MICRO_EXAMPLES_PERSON_DETECTION_MAIN_FUNCTIONS_H_ + +// Expose a C friendly interface for main functions. +#ifdef __cplusplus +extern "C" { +#endif + +// Initializes all data needed for the example. The name is important, and needs +// to be setup() for Arduino compatibility. +void setup(); + +// Runs one iteration of data gathering and inference. This should be called +// repeatedly from the application code. The name needs to be loop() for Arduino +// compatibility. +void loop(); + +#ifdef __cplusplus +} +#endif + +#endif // TENSORFLOW_LITE_MICRO_EXAMPLES_PERSON_DETECTION_MAIN_FUNCTIONS_H_ diff --git a/tensorflow/lite/micro/examples/person_detection/model_settings.cc b/tensorflow/lite/micro/examples/person_detection/model_settings.cc new file mode 100644 index 0000000..f11d48a --- /dev/null +++ b/tensorflow/lite/micro/examples/person_detection/model_settings.cc @@ -0,0 +1,21 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/examples/person_detection/model_settings.h" + +const char* kCategoryLabels[kCategoryCount] = { + "notperson", + "person", +}; diff --git a/tensorflow/lite/micro/examples/person_detection/model_settings.h b/tensorflow/lite/micro/examples/person_detection/model_settings.h new file mode 100644 index 0000000..f94d58e --- /dev/null +++ b/tensorflow/lite/micro/examples/person_detection/model_settings.h @@ -0,0 +1,35 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_EXAMPLES_PERSON_DETECTION_MODEL_SETTINGS_H_ +#define TENSORFLOW_LITE_MICRO_EXAMPLES_PERSON_DETECTION_MODEL_SETTINGS_H_ + +// Keeping these as constant expressions allow us to allocate fixed-sized arrays +// on the stack for our working memory. + +// All of these values are derived from the values used during model training, +// if you change your model you'll need to update these constants. +constexpr int kNumCols = 96; +constexpr int kNumRows = 96; +constexpr int kNumChannels = 1; + +constexpr int kMaxImageSize = kNumCols * kNumRows * kNumChannels; + +constexpr int kCategoryCount = 2; +constexpr int kPersonIndex = 1; +constexpr int kNotAPersonIndex = 0; +extern const char* kCategoryLabels[kCategoryCount]; + +#endif // TENSORFLOW_LITE_MICRO_EXAMPLES_PERSON_DETECTION_MODEL_SETTINGS_H_ diff --git a/tensorflow/lite/micro/examples/person_detection/person_detection_binary_test.sh b/tensorflow/lite/micro/examples/person_detection/person_detection_binary_test.sh new file mode 100755 index 0000000..091b32e --- /dev/null +++ b/tensorflow/lite/micro/examples/person_detection/person_detection_binary_test.sh @@ -0,0 +1,31 @@ +#!/bin/bash +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# +# Bash unit tests for the example binary. + +set -e + +OUTPUT_LOG_FILE=${TEST_TMPDIR}/output_log.txt + +${TEST_SRCDIR}/${TEST_WORKSPACE}/tensorflow/lite/micro/examples/person_detection/person_detection 2>&1 | head > ${OUTPUT_LOG_FILE} + +if ! grep -q 'person score' ${OUTPUT_LOG_FILE}; then + echo "ERROR: Expected logs not found in output '${OUTPUT_LOG_FILE}'" + exit 1 +fi + +echo +echo "SUCCESS: person_detection_binary_test PASSED" diff --git a/tensorflow/lite/micro/examples/person_detection/person_detection_test.cc b/tensorflow/lite/micro/examples/person_detection/person_detection_test.cc new file mode 100644 index 0000000..679c26e --- /dev/null +++ b/tensorflow/lite/micro/examples/person_detection/person_detection_test.cc @@ -0,0 +1,131 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/examples/person_detection/model_settings.h" +#include "tensorflow/lite/micro/examples/person_detection/testdata/no_person_image_data.h" +#include "tensorflow/lite/micro/examples/person_detection/testdata/person_image_data.h" +#include "tensorflow/lite/micro/micro_interpreter.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_mutable_op_resolver.h" +#include "tensorflow/lite/micro/models/person_detect_model_data.h" +#include "tensorflow/lite/micro/testing/micro_test.h" +#include "tensorflow/lite/schema/schema_generated.h" + +// Create an area of memory to use for input, output, and intermediate arrays. +#if defined(XTENSA) && defined(VISION_P6) +constexpr int tensor_arena_size = 352 * 1024; +#else +constexpr int tensor_arena_size = 136 * 1024; +#endif // defined(XTENSA) && defined(VISION_P6) +uint8_t tensor_arena[tensor_arena_size]; + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(TestInvoke) { + // Map the model into a usable data structure. This doesn't involve any + // copying or parsing, it's a very lightweight operation. + const tflite::Model* model = ::tflite::GetModel(g_person_detect_model_data); + if (model->version() != TFLITE_SCHEMA_VERSION) { + MicroPrintf( + "Model provided is schema version %d not equal " + "to supported version %d.\n", + model->version(), TFLITE_SCHEMA_VERSION); + } + + // Pull in only the operation implementations we need. + // This relies on a complete list of all the ops needed by this graph. + // An easier approach is to just use the AllOpsResolver, but this will + // incur some penalty in code space for op implementations that are not + // needed by this graph. + tflite::MicroMutableOpResolver<5> micro_op_resolver; + micro_op_resolver.AddAveragePool2D(tflite::Register_AVERAGE_POOL_2D_INT8()); + micro_op_resolver.AddConv2D(tflite::Register_CONV_2D_INT8()); + micro_op_resolver.AddDepthwiseConv2D( + tflite::Register_DEPTHWISE_CONV_2D_INT8()); + micro_op_resolver.AddReshape(); + micro_op_resolver.AddSoftmax(tflite::Register_SOFTMAX_INT8()); + + // Build an interpreter to run the model with. + tflite::MicroInterpreter interpreter(model, micro_op_resolver, tensor_arena, + tensor_arena_size); + interpreter.AllocateTensors(); + + // Get information about the memory area to use for the model's input. + TfLiteTensor* input = interpreter.input(0); + + // Make sure the input has the properties we expect. + TF_LITE_MICRO_EXPECT(input != nullptr); + TF_LITE_MICRO_EXPECT_EQ(4, input->dims->size); + TF_LITE_MICRO_EXPECT_EQ(1, input->dims->data[0]); + TF_LITE_MICRO_EXPECT_EQ(kNumRows, input->dims->data[1]); + TF_LITE_MICRO_EXPECT_EQ(kNumCols, input->dims->data[2]); + TF_LITE_MICRO_EXPECT_EQ(kNumChannels, input->dims->data[3]); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteInt8, input->type); + + // Copy an image with a person into the memory area used for the input. + TFLITE_DCHECK_EQ(input->bytes, static_cast(g_person_image_data_size)); + memcpy(input->data.int8, g_person_image_data, input->bytes); + + // Run the model on this input and make sure it succeeds. + TfLiteStatus invoke_status = interpreter.Invoke(); + if (invoke_status != kTfLiteOk) { + MicroPrintf("Invoke failed\n"); + } + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, invoke_status); + + // Get the output from the model, and make sure it's the expected size and + // type. + TfLiteTensor* output = interpreter.output(0); + TF_LITE_MICRO_EXPECT_EQ(2, output->dims->size); + TF_LITE_MICRO_EXPECT_EQ(1, output->dims->data[0]); + TF_LITE_MICRO_EXPECT_EQ(kCategoryCount, output->dims->data[1]); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteInt8, output->type); + + // Make sure that the expected "Person" score is higher than the other class. + int8_t person_score = output->data.int8[kPersonIndex]; + int8_t no_person_score = output->data.int8[kNotAPersonIndex]; + MicroPrintf("person data. person score: %d, no person score: %d\n", + person_score, no_person_score); + TF_LITE_MICRO_EXPECT_GT(person_score, no_person_score); + + memcpy(input->data.int8, g_no_person_image_data, input->bytes); + + // Run the model on this "No Person" input. + invoke_status = interpreter.Invoke(); + if (invoke_status != kTfLiteOk) { + MicroPrintf("Invoke failed\n"); + } + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, invoke_status); + + // Get the output from the model, and make sure it's the expected size and + // type. + output = interpreter.output(0); + TF_LITE_MICRO_EXPECT_EQ(2, output->dims->size); + TF_LITE_MICRO_EXPECT_EQ(1, output->dims->data[0]); + TF_LITE_MICRO_EXPECT_EQ(kCategoryCount, output->dims->data[1]); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteInt8, output->type); + + // Make sure that the expected "No Person" score is higher. + person_score = output->data.int8[kPersonIndex]; + no_person_score = output->data.int8[kNotAPersonIndex]; + MicroPrintf("no person data. person score: %d, no person score: %d\n", + person_score, no_person_score); + TF_LITE_MICRO_EXPECT_GT(no_person_score, person_score); + + MicroPrintf("Ran successfully\n"); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/examples/person_detection/testdata/no_person.bmp b/tensorflow/lite/micro/examples/person_detection/testdata/no_person.bmp new file mode 100644 index 0000000..d1b3d23 Binary files /dev/null and b/tensorflow/lite/micro/examples/person_detection/testdata/no_person.bmp differ diff --git a/tensorflow/lite/micro/examples/person_detection/testdata/person.bmp b/tensorflow/lite/micro/examples/person_detection/testdata/person.bmp new file mode 100644 index 0000000..0230b11 Binary files /dev/null and b/tensorflow/lite/micro/examples/person_detection/testdata/person.bmp differ diff --git a/tensorflow/lite/micro/examples/person_detection/training_a_model.md b/tensorflow/lite/micro/examples/person_detection/training_a_model.md new file mode 100644 index 0000000..0f62572 --- /dev/null +++ b/tensorflow/lite/micro/examples/person_detection/training_a_model.md @@ -0,0 +1,384 @@ +## Person Detection Training + +In this document, you will learn how to generate a 250 KB binary classification +model to detect if a person is present in an input image or not. + +## Resources + +### Trained model + +The trained model file (C source file `person_detect_model_data.cc`) used in +this example to run person detection on various microcontrollers is available in +[person_detection.zip](https://storage.googleapis.com/download.tensorflow.org/data/tf_lite_micro_person_data_int8_grayscale_2020_01_13.zip). This document shows you how to generate the model file. + +### Dataset + +We use the [Visual Wake Words dataset](https://arxiv.org/abs/1906.05721) which +contains images that belong to two classes (person or not-person). This dataset is designed to be useful for benchmarking and testing embedded computer vision, since it represents a very common task, i.e, binary classification, that we need to accomplish with tight resource constraints. We're hoping to see it drive even better models for this and similar tasks. + +This is a large download (~40GB), so you'll need to make sure you have at least +100GB free on your drive to allow space for unpacking and further processing. + +### Model Architecture + +[MobileNets](https://arxiv.org/abs/1704.04861) are a family of efficient Convolutional Neural Networks for Mobile Vision, designed to provide good accuracy for as few weight parameters and arithmetic operations as possible. + +### Compute + +This model will take several hours to train on a powerful machine with GPUs and +several days with CPUs. Alternatively, we recommend using a +[Google Cloud Deep Learning VM](https://cloud.google.com/deep-learning-vm/) or +[Google Colab Pro](https://colab.research.google.com/signup) for faster training. + +### Framework + +We'll be training the models using the Slim library in TensorFlow 1. It is +still widely used but deprecated, so future versions of TensorFlow may not +support this approach. + +Keras is the recommended interface for building models in TensorFlow 2 and +future versions, but does not support all the features we need to build the +person detection model. We hope to publish Keras instructions in the future. + +## Code + +### Setup + +We will be running all commands from your home directory. You can place the +repository somewhere else, but you'll need to update all references to it. Now run this step initially: + +``` +! cd ~ +``` + +Clone the [TensorFlow models](https://github.com/tensorflow/models) github repository: + +``` +! git clone https://github.com/tensorflow/models.git +``` + +Specifically, we will be using `~/models/research/slim` a [library](https://github.com/tensorflow/models/tree/master/research/slim) for defining, training and evaluating models. However, in order to use it, you'll need to make sure its modules can be found by Python, and +install one dependency. Here's how to do this in an iPython notebook: + +``` +! pip install contextlib2 +import os +new_python_path = (os.environ.get("PYTHONPATH") or '') + ":models/research/slim" +%env PYTHONPATH=$new_python_path +``` + +### Download the Dataset + +The [Visual Wake Words dataset](https://arxiv.org/abs/1906.05721) contains images which belong to two classes: person (labelled as 1) and not-person (labelled as 0) and it is derived from the [COCO dataset](http://cocodataset.org/#explore) containing 80 categories (eg: cat, dog, umbrella, etc). You can download the dataset by running this script: + +``` +! python models/research/slim/download_and_convert_data.py \ +--logtostderr \ +--dataset_name=visualwakewords \ +--dataset_dir=person_detection_dataset \ +--foreground_class_of_interest='person' \ +--small_object_area_threshold=0.005 +``` + +This will take several minutes (~20 minutes or more) so you may have to wait for a while before you proceed onto the next part. When it's done, you'll have a set of TFRecords in the `person_detection_dataset/` directory holding the labeled image information. + +The script takes a long time as the COCO dataset does not have a label for each image, instead each image comes with a list of labelled bounding boxes. To create the Visual WakeWords dataset, we loop over every image and its bounding boxes and if an image has at least one bounding box labelled as 'person' with an area greater than 0.5% of the area of the image, then the entire image is labelled as "person", otherwise it is labelled as "non-person". + +### Train the model + +``` +! python models/research/slim/train_image_classifier.py \ + --alsologtostderr \ + --dataset_name=visualwakewords \ + --dataset_dir=person_detection_dataset \ + --dataset_split_name=train \ + --train_image_size=96 \ + --use_grayscale=True \ + --preprocessing_name=mobilenet_v1 \ + --model_name=mobilenet_v1_025 \ + --train_dir=person_detection_train \ + --save_summaries_secs=300 \ + --learning_rate=0.045 \ + --label_smoothing=0.1 \ + --learning_rate_decay_factor=0.98 \ + --num_epochs_per_decay=2.5 \ + --moving_average_decay=0.9999 \ + --batch_size=96 \ + --max_number_of_steps=1000000 +``` + +This will take a couple of days on a single-GPU v100 instance to complete all +one-million steps, but you should be able to get a fairly accurate model after +a few hours if you want to experiment early. + +- `--dataset_dir` parameter should match the one where you saved the +TFRecords from the Visual Wake Words build script from the previous step. +- `--preprocessing_name` controls how input images are modified before they're +fed into the model. It reduces each image to the size specified by `--train_image_size` (here 96), convert them to grayscale using `--use_grayscale=True` which is compatible with the monochrome [HM01B0](https://himax.com.tw/products/cmos-image-sensor/image-sensors/hm01b0/) camera we're using on the SparkFun Edge board and scale the pixel values from 0 to 255 integers into -1.0 to +1.0 floating point numbers (which will be [quantized](https://en.wikipedia.org/wiki/Quantization) after training). +- `--model_name` is the model architecture we'll be using; here it's `mobilenet_v1_025`. The 'mobilenet_v1' prefix tells the script to use the first version of MobileNet. We use V1 as it uses the least amount of RAM for its intermediate activation buffers compared to later versions. The '025' is the depth multiplier, which reduces the number of weight parameters. This low setting ensures the model fits within 250KB of Flash. +- `--train_dir` will contain the trained checkpoints and summaries. +- The `--learning_rate`, `--label_smoothing`, `--learning_rate_decay_factor`, +`--num_epochs_per_decay`, `--moving_average_decay` and `--batch_size` are all +parameters that control how weights are updated during the training +process. Training deep networks is still a bit of a dark art, so these exact +values we found through experimentation for this particular model. You can try +tweaking them to speed up training or gain a small boost in accuracy, but we +can't give much guidance for how to make those changes, and it's easy to get +combinations where the training accuracy never converges. +- The `--max_number_of_steps` defines how long the training should continue. +There's no good way to figure out this threshold in advance, you have to +experiment to tell when the accuracy of the model is no longer improving to +tell when to cut it off. In our case we default to a million steps, since with +this particular model we know that's a good point to stop. + +Once you start the script, you should see output that looks something like this: + +``` +INFO:tensorflow:global step 4670: loss = 0.7112 (0.251 sec/step) +I0928 00:16:21.774756 140518023943616 learning.py:507] global step 4670: loss = +0.7112 (0.251 sec/step) +INFO:tensorflow:global step 4680: loss = 0.6596 (0.227 sec/step) +I0928 00:16:24.365901 140518023943616 learning.py:507] global step 4680: loss = +0.6596 (0.227 sec/step) +``` + +Don't worry about the line duplication, this is just a side-effect of the way +TensorFlow log printing interacts with Python. Each line has two key bits of +information about the training process. +1. The `global step` is a count of how far +through the training we are. Since we've set the limit as a million steps, in +this case we're nearly five percent complete. The steps per second estimate is +also useful, since you can use it to estimate a rough duration for the whole +training process. In this case, we're completing about four steps a second, so +a million steps will take about 70 hours, or three days. +2. The `loss` is a measure of how close the partially-trained model's predictions are to the correct values, and lower values are *better*. This will show a lot of variation but should on an average decrease during training if the model is learning. This kind of variation is a lot easier to see in a graph, which is one of the main reasons to try TensorBoard. + +#### TensorBoard + +TensorBoard is a web application that lets you view data visualizations from +TensorFlow training sessions. You can start Tensorboard using the command line: +Run: `tensorboard --logdir person_detection_train`. Go to the URL it provides. + +It may take a little while for the graphs to have anything useful in them, since +the script only saves summaries every five minutes (or 300 seconds). The most important graph is +called `clone_loss` and this shows the progression of the same loss value +that's displayed on the logging output. It fluctuates a lot, but the +overall trend is downwards over time. If you don't see this sort of progression +after a few hours of training, it's a sign that your model isn't +converging to a good solution, and you may need to debug what's going wrong +either with your dataset or the training parameters. + +TensorBoard defaults to the 'Scalars' tab when it opens, but the other section +that can be useful during training is 'Images'. This shows a +random selection of the pictures the model is currently being trained on, +including any distortions and other preprocessing. This information isn't as +essential as the loss graphs, but it can be useful to ensure the dataset is what +you expect, and it is interesting to see the examples updating as training +progresses. + +### Evaluate the model + +(You don't need to wait until the model is fully trained, you +can check the accuracy of any checkpoints in the `--train_dir` folder.) + +``` +! python models/research/slim/eval_image_classifier.py \ + --alsologtostderr \ + --dataset_name=visualwakewords \ + --dataset_dir=person_detection_train \ + --dataset_split_name=val \ + --eval_image_size=96 \ + --use_grayscale=True \ + --preprocessing_name=mobilenet_v1 \ + --model_name=mobilenet_v1_025 \ + --train_dir=person_detection_train \ + --checkpoint_path=person_detection_train/model.ckpt-123456 +``` + +You'll need to make sure that `--checkpoint_path` is pointing to a valid set of +checkpoint data. Checkpoints are stored in three separate files, so the value +should be their common prefix. For example if you have a checkpoint file called +'model.ckpt-5179.data-00000-of-00001', the prefix would be 'model.ckpt-5179'. +The script should produce output that looks something like this: + +``` +INFO:tensorflow:Evaluation [406/406] +I0929 22:52:59.936022 140225887045056 evaluation.py:167] Evaluation [406/406] +eval/Accuracy[0.717438412]eval/Recall_5[1] +``` + +The important number here is the accuracy. It shows the proportion of the +images that were classified correctly, which is 72% in this case, after +converting to a percentage. If you follow the example script, you should expect +a fully-trained model to achieve an accuracy of around 84% after one million +steps, and show a loss of around 0.4. + +### Convert the TF model to a TF Lite model for Inference + +When the model has trained to an accuracy you're happy with, you'll need to +convert the results from the TensorFlow training environment into a form you +can run on an embedded device. As we've seen in previous chapters, this can be +a complex process, and tf.slim adds a few of its own wrinkles too. + +#### Generate the model graph + +Slim generates the architecture from the `model_name` every time one of its +scripts is run, so for a model to be used outside of Slim it needs to be saved +in a common format. We're going to use the GraphDef protobuf serialization +format, since that's understood by both Slim and the rest of TensorFlow. This contains the layout of the operations in the model, but doesn't yet have any of the weight data. + +``` +! python models/research/slim/export_inference_graph.py \ + --alsologtostderr \ + --dataset_name=visualwakewords \ + --image_size=96 \ + --use_grayscale=True \ + --model_name=mobilenet_v1_025 \ + --output_file=person_detection_graph.pb +``` + +You should have a new 'person_detection_graph.pb' file in +your home folder. + +#### Generate the frozen model graph (combine model graph and trained weights) + +The process of storing the trained weights together with the operation graph is +known as freezing. This converts all of the variables in the graph to +constants, after loading their values from a checkpoint file. The command below +uses a checkpoint from the millionth training step, but you can supply any +valid checkpoint path. The graph freezing script is stored inside the main +TensorFlow repository, so we have to download this from GitHub before running +this command. + +``` +! git clone https://github.com/tensorflow/tensorflow +! python tensorflow/tensorflow/python/tools/freeze_graph.py \ +--input_graph=person_detection_graph.pb \ +--input_checkpoint=person_detection_train/model.ckpt-1000000 \ +--input_binary=true \ +--output_node_names=MobilenetV1/Predictions/Reshape_1 \ +--output_graph=person_detection_frozen_graph.pb +``` + +After this, you should see a file called `person_detection_frozen_graph.pb` + +#### Generate the TensorFlow Lite File with Quantization + +[Quantization](https://en.wikipedia.org/wiki/Quantization) is a tricky and involved process, and it's still very much an +active area of research, so taking the float graph that we've trained so far +and converting it down to eight bit takes quite a bit of code. You can find +more of an explanation of what quantization is and how it works in the chapter +on latency optimization, but here we'll show you how to use it with the model +we've trained. The majority of the code is preparing example images to feed +into the trained network, so that the ranges of the activation layers in +typical use can be measured. We rely on the TFLiteConverter class to handle the +quantization and conversion into the TensorFlow Lite FlatBuffer file that we +need for the on-device inference engine. + +``` +import tensorflow.compat.v1 as tf +import io +import PIL +import numpy as np + +def representative_dataset_gen(): + + record_iterator = +tf.python_io.tf_record_iterator(path='person_detection_dataset/val.record-00000-of-00010') + + for _ in range(250): + string_record = next(record_iterator) + example = tf.train.Example() + example.ParseFromString(string_record) + image_stream = +io.BytesIO(example.features.feature['image/encoded'].bytes_list.value[0]) + image = PIL.Image.open(image_stream) + image = image.resize((96, 96)) + image = image.convert('L') + array = np.array(image) + array = np.expand_dims(array, axis=2) + array = np.expand_dims(array, axis=0) + array = ((array / 127.5) - 1.0).astype(np.float32) + yield([array]) + +converter = +tf.lite.TFLiteConverter.from_frozen_graph('person_detection_frozen_graph.pb', +['input'], ['MobilenetV1/Predictions/Reshape_1']) +converter.optimizations = [tf.lite.Optimize.DEFAULT] +converter.representative_dataset = representative_dataset_gen +converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8] +converter.inference_input_type = tf.int8 +converter.inference_output_type = tf.int8 + +tflite_quant_model = converter.convert() +open("person_detection_model.tflite", "wb").write(tflite_quant_model) +``` + +#### Generate the C source file + +The converter writes out a file, but most embedded devices don't have a file +system. To access the serialized data from our program, we have to compile it +into the executable and store it in Flash. The easiest way to do that is to +convert the file into a C data array. + +``` +# Install xxd if it is not available +! apt-get -qq install xxd +# Save the file as a C source file +! xxd -i person_detection_model.tflite > person_detect_model_data.cc +``` + +You can now replace the existing `person_detect_model_data.cc` file with the +version you've trained, and be able to run your own model on embedded devices. + +## Other resources +### Training for a different category + +To customize your model you can update the `foreground_class_of_interest` to one of the 80 categories from the COCO dataset and adjust the threshold by modifying `small_object_area_threshold`. Here's an example that looks for cars: + +``` +! python models/research/slim/download_and_convert_data.py \ +--logtostderr \ +--dataset_name=visualwakewords \ +--dataset_dir=car_dataset \ +--foreground_class_of_interest='car' \ +--small_object_area_threshold=0.005 +``` + +If the kind of object you're interested in isn't present in MS-COCO, you may be +able to use transfer learning to help you train on a custom dataset you've +gathered, even if it's much smaller. We don't have an example of this +yet, but we hope to share one soon. + +### Understanding the Model Architecture + +[MobileNets](https://arxiv.org/abs/1704.04861) are a family of architectures +designed to provide good accuracy for as few weight parameters and arithmetic +operations as possible. There are now multiple versions, but in our case we're +using the original v1 since it required the smallest amount of RAM at runtime. +The core concept behind the architecture is depthwise separable convolution. +This is a variant of classical two-dimensional convolutions that works in a +much more efficient way, without sacrificing very much accuracy. Regular +convolution calculates an output value based on applying a filter of a +particular size across all channels of the input. This means the number of +calculations involved in each output is width of the filter multiplied by +height, multiplied by the number of input channels. Depthwise convolution +breaks this large calculation into separate parts. First each input channel is +filtered by one or more rectangular filters to produce intermediate values. +These values are then combined using pointwise convolutions. This dramatically +reduces the number of calculations needed, and in practice produces similar +results to regular convolution. + +MobileNet v1 is a stack of 14 of these depthwise separable convolution layers +with an average pool, then a fully-connected layer followed by a softmax at the +end. We've specified a 'width multiplier' of 0.25, which has the effect of +reducing the number of computations down to around 60 million per inference, by +shrinking the number of channels in each activation layer by 75% compared to +the standard model. In essence it's very similar to a normal convolutional +neural network in operation, with each layer learning patterns in the input. +Earlier layers act more like edge recognition filters, spotting low-level +structure in the image, and later layers synthesize that information into more +abstract patterns that help with the final object classification. + + + diff --git a/tensorflow/lite/micro/examples/person_detection/utils/BUILD b/tensorflow/lite/micro/examples/person_detection/utils/BUILD new file mode 100644 index 0000000..980d803 --- /dev/null +++ b/tensorflow/lite/micro/examples/person_detection/utils/BUILD @@ -0,0 +1,42 @@ +load("@tflm_pip_deps//:requirements.bzl", "requirement") + +package( + features = ["-layering_check"], + licenses = ["notice"], +) + +py_binary( + name = "raw_to_bitmap", + srcs = ["raw_to_bitmap.py"], + python_version = "PY3", + srcs_version = "PY3ONLY", + deps = [ + requirement("numpy"), + ], +) + +py_library( + name = "raw_to_bitmap_lib", + srcs = ["raw_to_bitmap.py"], + srcs_version = "PY3", + deps = [ + requirement("numpy"), + ], +) + +py_test( + name = "raw_to_bitmap_test", + srcs = ["raw_to_bitmap_test.py"], + data = glob(["testdata/**"]), + python_version = "PY3", + tags = [ + "nomicro_static", # TF dep incompatible w/ TF_LITE_STATIC_MEMORY. + "notap", # TODO(b/186679612) + "noubsan", # TODO(b/144512025): Fix raw_to_bitmap_test to fix ubsan failure. + ], + deps = [ + ":raw_to_bitmap_lib", + requirement("tensorflow-cpu"), + requirement("numpy"), + ], +) diff --git a/tensorflow/lite/micro/examples/person_detection/utils/raw_to_bitmap.py b/tensorflow/lite/micro/examples/person_detection/utils/raw_to_bitmap.py new file mode 100644 index 0000000..a8330eb --- /dev/null +++ b/tensorflow/lite/micro/examples/person_detection/utils/raw_to_bitmap.py @@ -0,0 +1,199 @@ +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Convert raw bytes to a bitmap. + +Converts a raw image dumped to a file into a bitmap. The file must contain +complete bitmap images in 324 x 244 resolution, formatted as follows: + ++++ frame +++ + <16 one-byte values separated by spaces> +--- frame --- + +For example, the first line might look like: +0x00000000 C5 C3 CE D1 D9 DA D6 E3 E2 EB E9 EB DB E4 F5 FF + +The bitmaps are automatically saved to the same directory as the log file, and +are displayed by the script. +""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import argparse +import os +import os.path +import re + +import numpy as np + +_DICT_RESOLUTIONS = { + 'QVGA': (324, 244, 1), + 'GRAY': (96, 96, 1), + 'RGB': (96, 96, 3), +} + +_VERSION = 0 +_SUBVERSION = 1 + + +def check_file_existence(x): + if not os.path.isfile(x): + # Argparse uses the ArgumentTypeError to give a rejection message like: + # error: argument input: x does not exist + raise argparse.ArgumentTypeError('{0} does not exist'.format(x)) + return x + + +def show_and_save_bitmaps(input_file, bitmap_list, channels): + """Display and save a list of bitmaps. + + Args: + input_file: input file name + bitmap_list: list of numpy arrays to represent bitmap images + channels: color channel count + """ + try: + from PIL import Image # pylint: disable=g-import-not-at-top + except ImportError: + raise NotImplementedError('Image display and save not implemented.') + + for idx, bitmap in enumerate(bitmap_list): + path = os.path.dirname(os.path.abspath(input_file)) + basename = os.path.split(os.path.splitext(input_file)[0])[-1] + outputfile = os.path.join(path, basename + '_' + str(idx) + '.bmp') + + if channels == 3: + img = Image.fromarray(bitmap, 'RGB') + else: + img = Image.fromarray(bitmap, 'L') + + img.save(outputfile) + img.show() + + +def reshape_bitmaps(frame_list, width, height, channels): + """Reshape flat integer arrays. + + Args: + frame_list: list of 1-D arrays to represent raw image data + width: image width in pixels + height: image height in pixels + channels: color channel count + + Returns: + list of numpy arrays to represent bitmap images + """ + + bitmap_list = [] + for frame in frame_list: + shape = (height, width, channels) if channels > 1 else (height, width) + bitmap = np.reshape(frame, shape) + bitmap = np.flip(bitmap, 0) + bitmap_list.append(bitmap) + return bitmap_list + + +def parse_file(inputfile, width, height, channels): + """Convert log file to array of pixels. + + Args: + inputfile: log file to parse + width: image width in pixels + height: image height in pixels + channels: color channel count + + Returns: + list 1-D arrays to represent raw image data. + """ + + data = None + bytes_written = 0 + frame_start = False + frame_stop = False + frame_list = list() + + # collect all pixel data into an int array + for line in inputfile: + if line == '+++ frame +++\n': + frame_start = True + data = np.zeros(height * width * channels, dtype=np.uint8) + bytes_written = 0 + continue + elif line == '--- frame ---\n': + frame_stop = True + + if frame_start and not frame_stop: + linelist = re.findall(r"[\w']+", line) + + if len(linelist) != 17: + # drop this frame + frame_start = False + continue + + for item in linelist[1:]: + data[bytes_written] = int(item, base=16) + bytes_written += 1 + + elif frame_start and frame_stop: + if bytes_written == height * width * channels: + frame_list.append(data) + frame_start = False + frame_stop = False + + return frame_list + + +def main(): + parser = argparse.ArgumentParser( + description='This program converts raw data from HM01B0 to a bmp file.') + + parser.add_argument('-i', + '--input', + dest='inputfile', + required=True, + help='input file', + metavar='FILE', + type=check_file_existence) + + parser.add_argument( + '-r', + '--resolution', + dest='resolution', + required=False, + help='Resolution', + choices=['QVGA', 'RGB', 'GRAY'], + default='QVGA', + ) + + parser.add_argument('-v', + '--version', + help='Program version', + action='version', + version='%(prog)s {ver}'.format(ver='v%d.%d' % + (_VERSION, _SUBVERSION))) + + args = parser.parse_args() + + (width, height, + channels) = _DICT_RESOLUTIONS.get(args.resolution, + ('Resolution not supported', 0, 0, 0)) + frame_list = parse_file(open(args.inputfile), width, height, channels) + bitmap_list = reshape_bitmaps(frame_list, width, height, channels) + show_and_save_bitmaps(args.inputfile, bitmap_list, channels) + + +if __name__ == '__main__': + main() diff --git a/tensorflow/lite/micro/examples/person_detection/utils/raw_to_bitmap_test.py b/tensorflow/lite/micro/examples/person_detection/utils/raw_to_bitmap_test.py new file mode 100644 index 0000000..80320ba --- /dev/null +++ b/tensorflow/lite/micro/examples/person_detection/utils/raw_to_bitmap_test.py @@ -0,0 +1,120 @@ +# Copyright 2018 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Tests for raw to bitmap converter utility.""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import io + +import numpy as np + +from tflite_micro.tensorflow.lite.micro.examples.person_detection.utils.raw_to_bitmap import parse_file +from tflite_micro.tensorflow.lite.micro.examples.person_detection.utils.raw_to_bitmap import reshape_bitmaps +from tensorflow.python.platform import googletest + +_RGB_RAW = u""" ++++ frame +++ +0x0000 0x00 0x00 0x00 0x01 0x01 0x01 0x02 0x02 0x02 0x03 0x03 0x03 0x04 0x04 0x04 0x05 +0x0010 0x05 0x05 0x06 0x06 0x06 0x07 0x07 0x07 0x08 0x08 0x08 0x09 0x09 0x09 0x0a 0x0a +0x0020 0x0a 0x0b 0x0b 0x0b 0x0c 0x0c 0x0c 0x0d 0x0d 0x0d 0x0e 0x0e 0x0e 0x0f 0x0f 0x0f +--- frame --- +""" + +_RGB_FLAT = np.array([[ + 0, 0, 0, 1, 1, 1, 2, 2, 2, 3, 3, 3, 4, 4, 4, 5, 5, 5, 6, 6, 6, 7, 7, 7, 8, + 8, 8, 9, 9, 9, 10, 10, 10, 11, 11, 11, 12, 12, 12, 13, 13, 13, 14, 14, 14, + 15, 15, 15 +]]) + +_RGB_RESHAPED = np.array([[[[12, 12, 12], [13, 13, 13], [14, 14, 14], + [15, 15, 15]], + [[8, 8, 8], [9, 9, 9], [10, 10, 10], [11, 11, 11]], + [[4, 4, 4], [5, 5, 5], [6, 6, 6], [7, 7, 7]], + [[0, 0, 0], [1, 1, 1], [2, 2, 2], [3, 3, 3]]]]) + +_GRAYSCALE_RAW = u""" ++++ frame +++ +0x0000 0x00 0x01 0x02 0x03 0x04 0x05 0x06 0x07 0x08 0x09 0x0a 0x0b 0x0c 0x0d 0x0e 0x0f +--- frame --- +""" + +_GRAYSCALE_FLAT = np.array( + [[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]]) + +_GRAYSCALE_RESHAPED = np.array([[[12, 13, 14, 15], [8, 9, 10, 11], + [4, 5, 6, 7], [0, 1, 2, 3]]]) + +_GRAYSCALE_RAW_MULTI = u""" ++++ frame +++ +0x0000 0x00 0x01 0x02 0x03 0x04 0x05 0x06 0x07 0x08 0x09 0x0a 0x0b 0x0c 0x0d 0x0e 0x0f +--- frame --- ++++ frame +++ +0x0000 0x10 0x11 0x12 0x13 0x14 0x15 0x16 0x17 0x18 0x19 0x1a 0x1b 0x1c 0x1d 0x1e 0x1f +--- frame --- ++++ frame +++ +0x0000 0x20 0x21 0x22 0x23 0x24 0x25 0x26 0x27 0x28 0x29 0x2a 0x2b 0x2c 0x2d 0x2e 0x2f +--- frame --- ++++ frame +++ +0x0000 0x30 0x31 0x32 0x33 0x34 0x35 0x36 0x37 0x38 0x39 0x3a 0x3b 0x3c 0x3d 0x3e 0x3f +--- frame --- +""" + +_GRAYSCALE_FLAT_MULTI = [ + np.array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]), + np.array([16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]), + np.array([32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47]), + np.array([48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63]) +] + +_GRAYSCALE_RESHAPED_MULTI = [ + np.array([[12, 13, 14, 15], [8, 9, 10, 11], [4, 5, 6, 7], [0, 1, 2, 3]]), + np.array([[28, 29, 30, 31], [24, 25, 26, 27], [20, 21, 22, 23], + [16, 17, 18, 19]]), + np.array([[44, 45, 46, 47], [40, 41, 42, 43], [36, 37, 38, 39], + [32, 33, 34, 35]]), + np.array([[60, 61, 62, 63], [56, 57, 58, 59], [52, 53, 54, 55], + [48, 49, 50, 51]]) +] + + +class RawToBitmapTest(googletest.TestCase): + + def test_parse_rgb(self): + frame_list = parse_file(io.StringIO(_RGB_RAW), 4, 4, 3) + self.assertTrue(np.array_equal(_RGB_FLAT, frame_list)) + + def test_parse_grayscale(self): + frame_list = parse_file(io.StringIO(_GRAYSCALE_RAW), 4, 4, 1) + self.assertTrue(np.array_equal(_GRAYSCALE_FLAT, frame_list)) + + def test_reshape_rgb(self): + reshaped = reshape_bitmaps(_RGB_FLAT, 4, 4, 3) + self.assertTrue(np.array_equal(_RGB_RESHAPED, reshaped)) + + def test_reshape_grayscale(self): + reshaped = reshape_bitmaps(_GRAYSCALE_FLAT, 4, 4, 1) + self.assertTrue(np.array_equal(_GRAYSCALE_RESHAPED, reshaped)) + + def test_multiple_grayscale(self): + frame_list = parse_file(io.StringIO(_GRAYSCALE_RAW_MULTI), 4, 4, 1) + self.assertTrue(np.array_equal(_GRAYSCALE_FLAT_MULTI, frame_list)) + reshaped = reshape_bitmaps(frame_list, 4, 4, 1) + self.assertTrue(np.array_equal(_GRAYSCALE_RESHAPED_MULTI, reshaped)) + + +if __name__ == '__main__': + googletest.main() diff --git a/tensorflow/lite/micro/examples/recipes/BUILD b/tensorflow/lite/micro/examples/recipes/BUILD new file mode 100644 index 0000000..475e552 --- /dev/null +++ b/tensorflow/lite/micro/examples/recipes/BUILD @@ -0,0 +1,33 @@ +load("@tflm_pip_deps//:requirements.bzl", "requirement") + +package( + licenses = ["notice"], +) + +py_library( + name = "resource_variables_lib", + srcs = ["resource_variables_lib.py"], + srcs_version = "PY3", + visibility = ["//:__subpackages__"], + deps = [ + requirement("numpy"), + requirement("tensorflow-cpu"), + ], +) + +py_test( + name = "resource_variables_test", + srcs = ["resource_variables_test.py"], + srcs_version = "PY3", + tags = [ + "noasan", + "nomsan", # Python doesn't like these symbols + "noubsan", + ], + deps = [ + ":resource_variables_lib", + # TODO(b/286456378): update tflm_runtime to runtime when we are ready to + # remove the alias. + "//tensorflow/lite/micro/python/interpreter/src:tflm_runtime", + ], +) diff --git a/tensorflow/lite/micro/examples/recipes/resource_variables_lib.py b/tensorflow/lite/micro/examples/recipes/resource_variables_lib.py new file mode 100644 index 0000000..390b90d --- /dev/null +++ b/tensorflow/lite/micro/examples/recipes/resource_variables_lib.py @@ -0,0 +1,118 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================= +"""Simple TF model creation using resource variables. + +Model is built either with basic TF functions (concrete function model), or via +Keras. The model simply mimics an accumulator (via the persistent memory / state +functionality of resource variables), taking in two inputs: +1) A boolean for choosing addition/subtraction. +2) The value to add/subtract from the accumulator variable. + + +Useful links: +https://www.tensorflow.org/lite/models/convert/convert_models#convert_concrete_functions_ +https://www.tensorflow.org/guide/function#creating_tfvariables +https://www.tensorflow.org/api_docs/python/tf/Variable +https://www.tensorflow.org/api_docs/python/tf/function +""" + +import numpy as np +import tensorflow as tf + + +class CompareAndAccumulate(tf.Module): + """Accumulates a given value to the resource variable array (initialized as 0.). + + Accumulates add/subtract based on second boolean input. + """ + + def __init__(self, name): + super().__init__(name=name) + self._accum = tf.Variable( + initial_value=np.zeros((100,), dtype=np.float32), + trainable=False, + name="Accumulator", + dtype=tf.float32, + shape=[100], + ) + + @tf.function( + input_signature=[ + tf.TensorSpec(shape=[100], dtype=tf.float32, name="accum_val"), + tf.TensorSpec(shape=[1], dtype=tf.bool, name="accumulate_add"), + ] + ) + def __call__(self, accum_val, accumulate_add): + if accumulate_add: + self._accum.assign_add(accum_val) + else: + self._accum.assign_sub(accum_val) + return self._accum.read_value() + + +class CompareAndAccumulateKerasLayer(tf.keras.layers.Layer): + """Accumulates a given value to the resource variable array (initialized as 0.). + + Accumulates add/subtract based on second boolean input. + """ + + def __init__(self, name): + super().__init__(name=name) + self._accum = tf.Variable( + initial_value=[np.zeros((100,), dtype=np.float32)], + trainable=False, + name="Accumulator", + dtype=tf.float32, + shape=(1, 100), + ) + + def call(self, accum_val, accumulate_add): + @tf.function + def condtional_accumulate(accum_val, accumulate_add): + if accumulate_add: + self._accum.assign_add(accum_val) + else: + self._accum.assign_sub(accum_val) + condtional_accumulate(accum_val, accumulate_add) + return self._accum.read_value() + + +def get_model_from_concrete_function(): + """Accumulator model built via TF concrete functions.""" + model = CompareAndAccumulate("CompareAndAccumulate") + concrete_func = model.__call__.get_concrete_function() + converter = tf.lite.TFLiteConverter.from_concrete_functions( + [concrete_func], model + ) + return converter.convert() + + +def get_model_from_keras(): + """Accumulator model built via Keras custom layer.""" + input_layer_int = tf.keras.layers.Input( + shape=[100], dtype=tf.float32, name="accum_val" + ) + input_layer_bool = tf.keras.layers.Input( + shape=[1], dtype=tf.bool, name="accumulate_add" + ) + accumulate_out = CompareAndAccumulateKerasLayer("CompareAndAccumulate")( + input_layer_int, input_layer_bool + ) + + model = tf.keras.models.Model( + inputs=[input_layer_int, input_layer_bool], outputs=accumulate_out + ) + converter = tf.lite.TFLiteConverter.from_keras_model(model) + return converter.convert() diff --git a/tensorflow/lite/micro/examples/recipes/resource_variables_test.py b/tensorflow/lite/micro/examples/recipes/resource_variables_test.py new file mode 100644 index 0000000..ad8c79e --- /dev/null +++ b/tensorflow/lite/micro/examples/recipes/resource_variables_test.py @@ -0,0 +1,63 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================= +import numpy as np + +from tensorflow.python.framework import test_util +from tensorflow.python.platform import test +from tflite_micro.tensorflow.lite.micro.examples.recipes import resource_variables_lib + +# TODO(b/286456378): change tflm_runtime to runtime when we all other usage has +# been updated. +from tflite_micro.tensorflow.lite.micro.python.interpreter.src import tflm_runtime + + +class ResourceVariablesTest(test_util.TensorFlowTestCase): + + # Tests the custom accumulator model. Input conditional is [True], and + # accumulator value is array of 5.0. Given these inputs, we expect the output + # (variable value), to be accumulated by 5.0 each invoke. + def test_resource_variables_model(self): + model_keras = resource_variables_lib.get_model_from_keras() + tflm_interpreter = tflm_runtime.Interpreter.from_bytes(model_keras) + + tflm_interpreter.set_input([[True]], 0) + tflm_interpreter.set_input([np.full((100,), 15.0, dtype=np.float32)], 1) + tflm_interpreter.invoke() + self.assertAllEqual( + tflm_interpreter.get_output(0), + np.full((1, 100), 15.0, dtype=np.float32), + ) + + tflm_interpreter.set_input([[False]], 0) + tflm_interpreter.set_input([np.full((100,), 9.0, dtype=np.float32)], 1) + tflm_interpreter.invoke() + self.assertAllEqual( + tflm_interpreter.get_output(0), + np.full((1, 100), 6.0, dtype=np.float32), + ) + + # resets variables to initial value + tflm_interpreter.reset() + tflm_interpreter.set_input([[True]], 0) + tflm_interpreter.set_input([np.full((100,), 5.0, dtype=np.float32)], 1) + tflm_interpreter.invoke() + self.assertAllEqual( + tflm_interpreter.get_output(0), + np.full((1, 100), 5.0, dtype=np.float32), + ) + + +if __name__ == "__main__": + test.main() diff --git a/tensorflow/lite/micro/fake_micro_context.cc b/tensorflow/lite/micro/fake_micro_context.cc new file mode 100644 index 0000000..03ea6df --- /dev/null +++ b/tensorflow/lite/micro/fake_micro_context.cc @@ -0,0 +1,116 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/fake_micro_context.h" + +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/micro/arena_allocator/single_arena_buffer_allocator.h" +#include "tensorflow/lite/micro/micro_allocator.h" +#include "tensorflow/lite/micro/micro_arena_constants.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { +namespace { +// Dummy static variables to allow creation of dummy MicroAllocator. +// All tests are guarateed to run serially. +static constexpr int KDummyTensorArenaSize = 256; +static uint8_t dummy_tensor_arena[KDummyTensorArenaSize]; +} // namespace + +FakeMicroContext::FakeMicroContext(TfLiteTensor* tensors, + SingleArenaBufferAllocator* allocator, + MicroGraph* micro_graph) + : MicroContext( + MicroAllocator::Create(dummy_tensor_arena, KDummyTensorArenaSize), + nullptr, micro_graph), + tensors_(tensors), + allocator_(allocator) {} + +TfLiteTensor* FakeMicroContext::AllocateTempTfLiteTensor(int tensor_index) { + allocated_temp_count_++; + return &tensors_[tensor_index]; +} + +void FakeMicroContext::DeallocateTempTfLiteTensor(TfLiteTensor* tensor) { + allocated_temp_count_--; +} + +bool FakeMicroContext::IsAllTempTfLiteTensorDeallocated() { + return !allocated_temp_count_; +} + +uint8_t* FakeMicroContext::AllocateTempBuffer(size_t size, size_t alignment) { + allocated_temp_count_++; + return allocator_->AllocateTemp(size, alignment); +} + +void FakeMicroContext::DeallocateTempBuffer(uint8_t* buffer) { + allocated_temp_count_--; + allocator_->DeallocateTemp(buffer); +} + +TfLiteEvalTensor* FakeMicroContext::GetEvalTensor(int tensor_index) { + TfLiteEvalTensor* eval_tensor = + reinterpret_cast(allocator_->AllocateTemp( + sizeof(TfLiteEvalTensor), alignof(TfLiteEvalTensor))); + TFLITE_DCHECK(eval_tensor != nullptr); + + // In unit tests, the TfLiteTensor pointer contains the source of truth for + // buffers and values: + eval_tensor->data = tensors_[tensor_index].data; + eval_tensor->dims = tensors_[tensor_index].dims; + eval_tensor->type = tensors_[tensor_index].type; + return eval_tensor; +} + +void* FakeMicroContext::AllocatePersistentBuffer(size_t bytes) { + // FakeMicroContext use SingleArenaBufferAllocator, which does not + // automatically apply the buffer alignment like MicroAllocator. The buffer + // alignment is potentially wasteful but allows the fake_micro_context to work + // correctly with optimized kernels. + return allocator_->AllocatePersistentBuffer(bytes, + MicroArenaBufferAlignment()); +} + +TfLiteStatus FakeMicroContext::RequestScratchBufferInArena(size_t bytes, + int* buffer_index) { + TFLITE_DCHECK(buffer_index != nullptr); + + if (scratch_buffer_count_ == kNumScratchBuffers_) { + MicroPrintf("Exceeded the maximum number of scratch tensors allowed (%d).", + kNumScratchBuffers_); + return kTfLiteError; + } + + // For tests, we allocate scratch buffers from the tail and keep them around + // for the lifetime of model. This means that the arena size in the tests will + // be more than what we would have if the scratch buffers could share memory. + scratch_buffers_[scratch_buffer_count_] = + allocator_->AllocatePersistentBuffer(bytes, MicroArenaBufferAlignment()); + TFLITE_DCHECK(scratch_buffers_[scratch_buffer_count_] != nullptr); + + *buffer_index = scratch_buffer_count_++; + return kTfLiteOk; +} + +void* FakeMicroContext::GetScratchBuffer(int buffer_index) { + TFLITE_DCHECK(scratch_buffer_count_ <= kNumScratchBuffers_); + if (buffer_index >= scratch_buffer_count_) { + return nullptr; + } + return scratch_buffers_[buffer_index]; +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/fake_micro_context.h b/tensorflow/lite/micro/fake_micro_context.h new file mode 100644 index 0000000..b068f32 --- /dev/null +++ b/tensorflow/lite/micro/fake_micro_context.h @@ -0,0 +1,63 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_FAKE_MICRO_CONTEXT_H_ +#define TENSORFLOW_LITE_MICRO_FAKE_MICRO_CONTEXT_H_ + +#include "tensorflow/lite/micro/micro_context.h" +#include "tensorflow/lite/micro/micro_graph.h" + +namespace tflite { +// A fake of MicroContext for kernel util tests. +// TODO(b/272759060): FakeMicroContext currently inherits from MicroContext. +// Which allow tests to use functions from MicroContext that weren't added to +// FakeMicroContext in tests. This should be looked into further. + +class FakeMicroContext : public MicroContext { + public: + FakeMicroContext(TfLiteTensor* tensors, SingleArenaBufferAllocator* allocator, + MicroGraph* micro_graph); + + void* AllocatePersistentBuffer(size_t bytes) override; + TfLiteStatus RequestScratchBufferInArena(size_t bytes, + int* buffer_index) override; + void* GetScratchBuffer(int buffer_index) override; + + TfLiteTensor* AllocateTempTfLiteTensor(int tensor_index) override; + void DeallocateTempTfLiteTensor(TfLiteTensor* tensor) override; + bool IsAllTempTfLiteTensorDeallocated(); + + uint8_t* AllocateTempBuffer(size_t size, size_t alignment) override; + void DeallocateTempBuffer(uint8_t* buffer) override; + + TfLiteEvalTensor* GetEvalTensor(int tensor_index) override; + + private: + static constexpr int kNumScratchBuffers_ = 12; + + int scratch_buffer_count_ = 0; + uint8_t* scratch_buffers_[kNumScratchBuffers_]; + + TfLiteTensor* tensors_; + int allocated_temp_count_ = 0; + + SingleArenaBufferAllocator* allocator_; + + TF_LITE_REMOVE_VIRTUAL_DELETE +}; + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_FAKE_MICRO_CONTEXT_H_ diff --git a/tensorflow/lite/micro/fake_micro_context_test.cc b/tensorflow/lite/micro/fake_micro_context_test.cc new file mode 100644 index 0000000..264b7e7 --- /dev/null +++ b/tensorflow/lite/micro/fake_micro_context_test.cc @@ -0,0 +1,96 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/micro/fake_micro_context.h" + +#include + +#include "tensorflow/lite/micro/arena_allocator/single_arena_buffer_allocator.h" +#include "tensorflow/lite/micro/micro_allocator.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace { +using ::tflite::testing::CreateTensor; +using ::tflite::testing::IntArrayFromInts; + +tflite::FakeMicroContext CreateFakeMicroContext( + SingleArenaBufferAllocator* simple_memory_allocator, + MicroGraph* micro_graph) { + // Some targets do not support dynamic memory (i.e., no malloc or new), thus, + // the test need to place non-transitent memories in static variables. This is + // safe because tests are guarateed to run serially. + // Below structures are trivially destructible. + static TfLiteTensor tensors[2]; + static int input_shape[] = {1, 3}; + static int input_data[] = {1, 2, 3}; + + static int output_shape[] = {1, 3}; + static float output_data[3]; + + tensors[0] = CreateTensor(input_data, IntArrayFromInts(input_shape)); + tensors[1] = CreateTensor(output_data, IntArrayFromInts(output_shape)); + + tflite::FakeMicroContext fake_micro_context(tensors, simple_memory_allocator, + micro_graph); + return fake_micro_context; +} + +} // namespace +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(TestGetBeforeRequestScratchBufferWouldReturnNull) { + constexpr size_t kArenaSize = 1024; + uint8_t arena_buffer[kArenaSize]; + tflite::SingleArenaBufferAllocator simple_memory_allocator(arena_buffer, + kArenaSize); + tflite::MicroGraph dummy_micro_graph(nullptr, nullptr, nullptr, nullptr); + + tflite::FakeMicroContext micro_context = tflite::CreateFakeMicroContext( + &simple_memory_allocator, &dummy_micro_graph); + + TF_LITE_MICRO_EXPECT(micro_context.GetScratchBuffer(0) == nullptr); +} + +TF_LITE_MICRO_TEST(TestRequestScratchBufferAndThenGetShouldSucceed) { + constexpr size_t kArenaSize = 1024; + uint8_t arena_buffer[kArenaSize]; + tflite::SingleArenaBufferAllocator simple_memory_allocator(arena_buffer, + kArenaSize); + tflite::MicroGraph dummy_micro_graph(nullptr, nullptr, nullptr, nullptr); + + tflite::FakeMicroContext micro_context = tflite::CreateFakeMicroContext( + &simple_memory_allocator, &dummy_micro_graph); + + constexpr size_t kScratchBufferSize = 16; + int scratch_buffer_index = -1; + TF_LITE_MICRO_EXPECT_EQ(micro_context.RequestScratchBufferInArena( + kScratchBufferSize, &scratch_buffer_index), + kTfLiteOk); + TF_LITE_MICRO_EXPECT_EQ(scratch_buffer_index, 0); + TF_LITE_MICRO_EXPECT(micro_context.GetScratchBuffer(scratch_buffer_index) != + nullptr); + + TF_LITE_MICRO_EXPECT_EQ(micro_context.RequestScratchBufferInArena( + kScratchBufferSize, &scratch_buffer_index), + kTfLiteOk); + TF_LITE_MICRO_EXPECT_EQ(scratch_buffer_index, 1); + TF_LITE_MICRO_EXPECT(micro_context.GetScratchBuffer(scratch_buffer_index) != + nullptr); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/flatbuffer_utils.cc b/tensorflow/lite/micro/flatbuffer_utils.cc new file mode 100644 index 0000000..9996172 --- /dev/null +++ b/tensorflow/lite/micro/flatbuffer_utils.cc @@ -0,0 +1,84 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/flatbuffer_utils.h" + +namespace tflite { + +FlexbufferWrapper::FlexbufferWrapper(const uint8_t* buffer, size_t size) + : flexbuffers::Vector(flexbuffers::GetRoot(buffer, size).AsVector()) {} + +int64_t FlexbufferWrapper::ElementAsInt64(size_t i) const { + const uint8_t* elem = data_ + i * byte_width_; + return ::flexbuffers::ReadInt64(elem, byte_width_); +} + +uint64_t FlexbufferWrapper::ElementAsUInt64(size_t i) const { + const uint8_t* elem = data_ + i * byte_width_; + return ::flexbuffers::ReadUInt64(elem, byte_width_); +} + +int32_t FlexbufferWrapper::ElementAsInt32(size_t i) const { + return static_cast(ElementAsInt64(i)); +} + +bool FlexbufferWrapper::ElementAsBool(size_t i) const { + return static_cast(ElementAsUInt64(i)); +} + +double FlexbufferWrapper::ElementAsDouble(size_t i) const { + const uint8_t* elem = data_ + i * byte_width_; + return ::flexbuffers::ReadDouble(elem, byte_width_); +} + +float FlexbufferWrapper::ElementAsFloat(size_t i) const { + return static_cast(FlexbufferWrapper::ElementAsDouble(i)); +} + +// TODO(b/192589496): Ops must always be there. Remove this function when fixed +uint32_t NumSubgraphOperators(const SubGraph* subgraph) { + if (subgraph->operators() != nullptr) { + return subgraph->operators()->size(); + } else { + return 0; + } +} +// TODO(b/192589496): Ops must always be there. Remove this function when fixed +uint32_t NumSubgraphOperators(const Model* model, int subgraph_idx) { + const SubGraph* subgraph = model->subgraphs()->Get(subgraph_idx); + return NumSubgraphOperators(subgraph); +} + +TfLiteIntArray* FlatBufferVectorToTfLiteTypeArray( + const flatbuffers::Vector* flatbuffer_array) { + // On little-endian machines, TfLiteIntArray happens to have the same memory + // layout as flatbuffers:Vector, so we can reinterpret_cast the + // flatbuffer vector and avoid a copy and malloc. + // TODO(b/188459715): audit this usage of const_cast. + return const_cast( + reinterpret_cast(flatbuffer_array)); +} + +TfLiteFloatArray* FlatBufferVectorToTfLiteTypeArray( + const flatbuffers::Vector* flatbuffer_array) { + // On little-endian machines, TfLiteFloatArray happens to have the same memory + // layout as flatbuffers:Vector, so we can reinterpret_cast the + // flatbuffer vector and avoid a copy and malloc. + // TODO(b/188459715): audit this usage of const_cast. + return const_cast( + reinterpret_cast(flatbuffer_array)); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/flatbuffer_utils.h b/tensorflow/lite/micro/flatbuffer_utils.h new file mode 100644 index 0000000..b4e0cdc --- /dev/null +++ b/tensorflow/lite/micro/flatbuffer_utils.h @@ -0,0 +1,65 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef THIRD_PARTY_TFLITE_MICRO_TENSORFLOW_LITE_MICRO_FLATBUFFER_UTILS_H_ +#define THIRD_PARTY_TFLITE_MICRO_TENSORFLOW_LITE_MICRO_FLATBUFFER_UTILS_H_ + +#include "flatbuffers/flatbuffers.h" +#include "flatbuffers/flexbuffers.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/schema/schema_generated.h" + +namespace tflite { +// Kernels use flexbuffers::Map to pack their init parameters in a tflite file, +// with the parameter names as map keys and the parameter values as the +// corresponding map values. +// Accessing the map values using the flexbuffers:Map class is inline heavy, +// which can cause the code size to bloat beyond what's reasonable for a micro +// application. Use this class instead, when possible. +// FlexbufferWrapper takes advantage of the following properties of +// flexbuffers::Map: +// 1. It can be viewed as a flexbuffers::Vector of the values. +// 2. The values in the vector are ordered alphabetically by their keys. +// 3. All integer and Boolean values are stored as 64-bit numbers. +// 4. All floating point values are stored as double precision numbers. +// The properties are mentioned in the flexbuffers docs, but we rely on +// a unit test to catch design changes. +class FlexbufferWrapper : public flexbuffers::Vector { + public: + // Construct with a serialized flexbuffer 'buffer' of 'size' bytes + explicit FlexbufferWrapper(const uint8_t* buffer, size_t size); + int64_t ElementAsInt64(size_t i) const; + uint64_t ElementAsUInt64(size_t i) const; + int32_t ElementAsInt32(size_t i) const; + bool ElementAsBool(size_t i) const; + double ElementAsDouble(size_t i) const; + float ElementAsFloat(size_t i) const; +}; + +// Return the number of operators in a subgraph tflite +uint32_t NumSubgraphOperators(const SubGraph* subgraph); +uint32_t NumSubgraphOperators(const Model* model, int subgraph_idx); + +// Converts a flatbuffer array to a TfLiteArray. +// TODO(b/188459715): These function convert a const input to a non-const via a +// const_cast. It is unclear exactly why this is required. +TfLiteIntArray* FlatBufferVectorToTfLiteTypeArray( + const flatbuffers::Vector* flatbuffer_array); +TfLiteFloatArray* FlatBufferVectorToTfLiteTypeArray( + const flatbuffers::Vector* flatbuffer_array); + +} // namespace tflite + +#endif // THIRD_PARTY_TFLITE_MICRO_TENSORFLOW_LITE_MICRO_FLATBUFFER_UTILS_H_ diff --git a/tensorflow/lite/micro/flatbuffer_utils_test.cc b/tensorflow/lite/micro/flatbuffer_utils_test.cc new file mode 100644 index 0000000..faebce7 --- /dev/null +++ b/tensorflow/lite/micro/flatbuffer_utils_test.cc @@ -0,0 +1,81 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/flatbuffer_utils.h" + +#include + +#include "flatbuffers/flexbuffers.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(TestFlexbufferWrapper) { + struct TestParam { + std::string name; + std::string type; + std::string value; + }; + + TestParam params[] = { + {"xyz", "Int", "613"}, + {"Neuron", "Double", "13.22"}, + {"angle", "Int", "300"}, + {"llama", "Bool", "false"}, + {"Curl", "Float", "0.232"}, + {"aardvark", "Bool", "true"}, + {"ghost", "Double", "0.0000000001"}, + {"123stigma", "Bool", "true"}, + }; + // Index of elements sorted alphabetically by name + int params_sorted[] = {7, 4, 1, 5, 2, 6, 3, 0}; + + const int param_num = sizeof(params) / sizeof(params[0]); + + flexbuffers::Builder fbb; + fbb.Map([&]() { + for (int i = 0; i < param_num; i++) { + const std::string& param_value = params[i].value; + if (params[i].type == "Int") { + fbb.Int(params[i].name.c_str(), std::stoi(param_value)); + } else if (params[i].type == "Bool") { + fbb.Bool(params[i].name.c_str(), param_value == "true"); + } else if (params[i].type == "Double") { + fbb.Double(params[i].name.c_str(), std::stod(param_value)); + } else if (params[i].type == "Float") { + fbb.Float(params[i].name.c_str(), std::stof(param_value)); + } + } + }); + fbb.Finish(); + const std::vector buffer = fbb.GetBuffer(); + tflite::FlexbufferWrapper wrapper(buffer.data(), buffer.size()); + for (int i = 0; i < param_num; i++) { + std::string& param_value = params[params_sorted[i]].value; + if (params[params_sorted[i]].type == "Int") { + TF_LITE_MICRO_EXPECT(wrapper.ElementAsInt32(i) == std::stoi(param_value)); + } else if (params[params_sorted[i]].type == "Bool") { + TF_LITE_MICRO_EXPECT(wrapper.ElementAsBool(i) == (param_value == "true")); + } else if (params[params_sorted[i]].type == "Double") { + TF_LITE_MICRO_EXPECT(wrapper.ElementAsDouble(i) == + std::stod(param_value)); + } else if (params[params_sorted[i]].type == "Float") { + TF_LITE_MICRO_EXPECT(wrapper.ElementAsFloat(i) == std::stof(param_value)); + } + } +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/hexagon/micro_time.cc b/tensorflow/lite/micro/hexagon/micro_time.cc new file mode 100644 index 0000000..f4fc42e --- /dev/null +++ b/tensorflow/lite/micro/hexagon/micro_time.cc @@ -0,0 +1,23 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +// This file is empty to ensure that a specialized implementation of +// micro_time.h is used (instead of the default implementation from +// tensorflow/lite/micro/micro_time.cc). +// +// The actual target-specific implementation of micro_time.h is in +// system_setup.cc since that allows us to consolidate all the target-specific +// specializations into one source file. +// +// diff --git a/tensorflow/lite/micro/hexagon/system_setup.cc b/tensorflow/lite/micro/hexagon/system_setup.cc new file mode 100644 index 0000000..0ce5d18 --- /dev/null +++ b/tensorflow/lite/micro/hexagon/system_setup.cc @@ -0,0 +1,45 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/system_setup.h" + +#ifndef TF_LITE_STRIP_ERROR_STRINGS +#include "q6sim_timer.h" // NOLINT +#endif // TF_LITE_STRIP_ERROR_STRINGS + +#include "tensorflow/lite/micro/debug_log.h" +#include "tensorflow/lite/micro/micro_time.h" + +namespace tflite { + +// Calling this method enables a timer that runs for eternity. +void InitializeTarget() { +#ifndef TF_LITE_STRIP_ERROR_STRINGS + hexagon_sim_init_timer(); + hexagon_sim_start_timer(); +#endif // TF_LITE_STRIP_ERROR_STRINGS +} + +uint32_t ticks_per_second() { return 1000000; } + +uint32_t GetCurrentTimeTicks() { +#ifndef TF_LITE_STRIP_ERROR_STRINGS + return static_cast(hexagon_sim_read_cycles()); +#else + return 0; +#endif // TF_LITE_STRIP_ERROR_STRINGS +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/integration_tests/BUILD b/tensorflow/lite/micro/integration_tests/BUILD new file mode 100644 index 0000000..1e96ba8 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/BUILD @@ -0,0 +1,27 @@ +load("@tflm_pip_deps//:requirements.bzl", "requirement") + +package(licenses = ["notice"]) + +py_binary( + name = "generate_per_layer_tests", + srcs = [ + "generate_per_layer_tests.py", + ], + data = [ + "templates/BUILD.mako", + "templates/integration_tests_cc.mako", + ], + python_version = "PY3", + srcs_version = "PY3", + visibility = ["//:__subpackages__"], + deps = [ + "@absl_py//absl:app", + "@absl_py//absl/flags", + requirement("mako"), + requirement("tensorflow-cpu"), + "//tensorflow/lite/micro/tools:generate_test_for_model", + "//tensorflow/lite/python:schema_py", + "//tensorflow/lite/python:schema_util", + "//tensorflow/lite/tools:flatbuffer_utils", + ], +) diff --git a/tensorflow/lite/micro/integration_tests/generate_per_layer_tests.py b/tensorflow/lite/micro/integration_tests/generate_per_layer_tests.py new file mode 100644 index 0000000..b1e7df6 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/generate_per_layer_tests.py @@ -0,0 +1,232 @@ +# Copyright 2022 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +import os +import sys +import copy +import csv + +from absl import app +from absl import flags +import numpy as np +import tensorflow as tf +import random as rand +from mako import template + +from tensorflow.python.platform import gfile +from tflite_micro.tensorflow.lite.python import schema_py_generated as schema_fb +from tflite_micro.tensorflow.lite.python import schema_util +from tflite_micro.tensorflow.lite.tools import flatbuffer_utils +from tflite_micro.tensorflow.lite.micro.tools import generate_test_for_model + +TEMPLATE_DIR = os.path.join(os.path.dirname(__file__), 'templates') +TEMPLATE_DIR = os.path.abspath(TEMPLATE_DIR) + + +def BytesFromFlatbufferType(tensor_type): + if tensor_type in (schema_fb.TensorType.INT8, schema_fb.TensorType.UINT8, + schema_fb.TensorType.BOOL): + return 1 + elif tensor_type in (schema_fb.TensorType.INT16, + schema_fb.TensorType.FLOAT16): + return 2 + elif tensor_type in (schema_fb.TensorType.FLOAT32, + schema_fb.TensorType.INT32, + schema_fb.TensorType.UINT32): + return 4 + elif tensor_type in (schema_fb.TensorType.FLOAT64, + schema_fb.TensorType.INT64, + schema_fb.TensorType.COMPLEX64, + schema_fb.TensorType.UINT64): + return 8 + else: + raise RuntimeError(f'Unsupported TensorType: {tensor_type}') + + +class TestModelGenerator: + """Generates test data from tflite file.""" + + def __init__(self, model, output_dir, inputs): + self.model = model + self.output_dir = output_dir + self.op_idx = 0 + self.inputs = inputs + + def generate_single_layer_model(self, model, subgraph, op, opcode_idx): + generated_model = schema_fb.ModelT() + generated_model.buffers = [] + generated_model.version = 3 + + # Create subgraph. + generated_subgraph = schema_fb.SubGraphT() + generated_subgraph.inputs = self.inputs + generated_subgraph.outputs = [len(op.inputs)] + generated_subgraph.tensors = [] + for input_idx, tensor_idx in enumerate(op.inputs): + tensor = copy.deepcopy(subgraph.tensors[tensor_idx]) + tensor.buffer = len(generated_model.buffers) + buffer = copy.deepcopy( + model.buffers[subgraph.tensors[tensor_idx].buffer]) + if input_idx in self.inputs: + buffer.data = None + bytes_per_element = BytesFromFlatbufferType(tensor.type) + if buffer.data is not None and len(tensor.shape) > 2: + for i in range(len(buffer.data)): + buffer.data[i] = buffer.data[i] * np.random.uniform( + low=0.5, high=1.0, size=1) + + all_equal = True + for i, elem in enumerate(buffer.data): + all_equal = all_equal and elem == model.buffers[ + subgraph.tensors[tensor_idx].buffer].data[i] + assert not all_equal + + generated_model.buffers.append(buffer) + generated_subgraph.tensors.append(tensor) + + for tensor_idx in op.outputs: + tensor = copy.deepcopy(subgraph.tensors[tensor_idx]) + tensor.buffer = len(generated_model.buffers) + buffer = copy.deepcopy( + model.buffers[subgraph.tensors[tensor_idx].buffer]) + generated_model.buffers.append(buffer) + generated_subgraph.tensors.append(tensor) + + # Create op. + generated_op = copy.deepcopy(op) + generated_op.inputs = [i for i in range(len(op.inputs))] + generated_op.outputs = [len(op.inputs)] + generated_op.opcodeIndex = 0 + generated_subgraph.operators = [generated_op] + + generated_model.subgraphs = [generated_subgraph] + generated_model.operatorCodes = [model.operatorCodes[opcode_idx]] + model_name = self.output_dir + '/' + self.output_dir.split('/')[-1] + str( + self.op_idx) + '.tflite' + self.op_idx += 1 + flatbuffer_utils.write_model(generated_model, model_name) + return model_name + + def get_opcode_idx(self, builtin_operator): + for idx, opcode in enumerate(self.model.operatorCodes): + if schema_util.get_builtin_code_from_operator_code( + opcode) == builtin_operator: + return idx + + def generate_models(self, subgraph_idx, builtin_operator): + subgraph = self.model.subgraphs[subgraph_idx] + opcode_idx = self.get_opcode_idx(builtin_operator) + output_models = [] + for op in subgraph.operators: + if op.opcodeIndex == opcode_idx: + output_models.append( + self.generate_single_layer_model(self.model, subgraph, op, + opcode_idx)) + return output_models + + +class PerLayerTestGenerator(generate_test_for_model.TestDataGenerator): + + def generate_tests(self): + # Collect all target names into a list + targets = [] + targets_with_path = [] + for model_path in self.model_paths: + targets.append(model_path.split('/')[-1].split('.')[0]) + targets_with_path.append( + model_path.split('tflite_micro/')[-1].split('tflite-micro/') + [-1].split('.')[0]) + + template_file_path = os.path.join(TEMPLATE_DIR, + 'integration_tests_cc.mako') + build_template = template.Template(filename=template_file_path) + with open(self.output_dir + '/integration_tests.cc', 'w') as file_obj: + key_values_in_template = { + 'targets': targets, + 'targets_with_path': targets_with_path, + 'inputs': self.inputs, + 'input_dtypes': self.input_types, + 'output_dtype': self.output_type + } + file_obj.write(build_template.render(**key_values_in_template)) + + def generate_build_file(self): + # Collect all target names into a list + targets = [] + for model_path in self.model_paths: + target_name = model_path.split('/')[-1].split('.')[0] + targets.append(target_name) + + template_file_path = os.path.join(TEMPLATE_DIR, 'BUILD.mako') + build_template = template.Template(filename=template_file_path) + with open(self.output_dir + '/BUILD', 'w') as file_obj: + key_values_in_template = { + 'targets': targets, + 'inputs': self.inputs, + 'input_dtypes': self.input_types, + 'output_dtype': self.output_type + } + file_obj.write(build_template.render(**key_values_in_template)) + + +def op_info_from_name(name): + if 'transpose_conv' in name: + return [[0, 2], schema_fb.BuiltinOperator.TRANSPOSE_CONV] + elif 'depthwise_conv' in name: + return [[0], schema_fb.BuiltinOperator.DEPTHWISE_CONV_2D] + elif 'conv' in name: + return [[0], schema_fb.BuiltinOperator.CONV_2D] + elif 'add' in name: + return [[0, 1], schema_fb.BuiltinOperator.ADD] + elif 'sub' in name: + return [[0, 1], schema_fb.BuiltinOperator.SUB] + elif 'strided_slice' in name: + return [[0], schema_fb.BuiltinOperator.STRIDED_SLICE] + elif 'leaky_relu' in name: + return [[0], schema_fb.BuiltinOperator.LEAKY_RELU] + elif 'pad' in name: + return [[0], schema_fb.BuiltinOperator.PAD] + else: + raise RuntimeError(f'Unsupported op: {name}') + + +FLAGS = flags.FLAGS + +flags.DEFINE_string('input_tflite_file', None, + 'Full path name to the input TFLite file.') +flags.DEFINE_string('output_dir', None, 'directory to output generated files') + +flags.mark_flag_as_required('input_tflite_file') +flags.mark_flag_as_required('output_dir') + + +def main(_): + model = flatbuffer_utils.read_model(FLAGS.input_tflite_file) + os.makedirs(FLAGS.output_dir, exist_ok=True) + inputs, builtin_operator = op_info_from_name(FLAGS.output_dir.split('/')[-1]) + generator = TestModelGenerator(model, FLAGS.output_dir, inputs) + model_names = generator.generate_models(0, builtin_operator) + data_generator = PerLayerTestGenerator(FLAGS.output_dir, model_names, inputs) + data_generator.generate_goldens(builtin_operator) + data_generator.generate_build_file() + data_generator.generate_makefile() + data_generator.generate_tests() + print( + f'successfully generated integration tests. Output location: {FLAGS.output_dir}' + ) + + +if __name__ == '__main__': + app.run(main) diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/BUILD b/tensorflow/lite/micro/integration_tests/seanet/add/BUILD new file mode 100644 index 0000000..c1fa897 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/BUILD @@ -0,0 +1,995 @@ +# Description: +# generated integration test for one specific kernel in a model. +load( + "//tensorflow/lite/micro:build_def.bzl", + "generate_cc_arrays", + "micro_copts", +) + +package( + default_visibility = ["//visibility:public"], + # Disabling layering_check because of http://b/177257332 + features = ["-layering_check"], + licenses = ["notice"], +) + +generate_cc_arrays( + name = "generated_add0_model_data_cc", + src = "add0.tflite", + out = "add0_model_data.cc", +) + +generate_cc_arrays( + name = "generated_add0_model_data_hdr", + src = "add0.tflite", + out = "add0_model_data.h", +) + +generate_cc_arrays( + name = "generated_add1_model_data_cc", + src = "add1.tflite", + out = "add1_model_data.cc", +) + +generate_cc_arrays( + name = "generated_add1_model_data_hdr", + src = "add1.tflite", + out = "add1_model_data.h", +) + +generate_cc_arrays( + name = "generated_add2_model_data_cc", + src = "add2.tflite", + out = "add2_model_data.cc", +) + +generate_cc_arrays( + name = "generated_add2_model_data_hdr", + src = "add2.tflite", + out = "add2_model_data.h", +) + +generate_cc_arrays( + name = "generated_add3_model_data_cc", + src = "add3.tflite", + out = "add3_model_data.cc", +) + +generate_cc_arrays( + name = "generated_add3_model_data_hdr", + src = "add3.tflite", + out = "add3_model_data.h", +) + +generate_cc_arrays( + name = "generated_add4_model_data_cc", + src = "add4.tflite", + out = "add4_model_data.cc", +) + +generate_cc_arrays( + name = "generated_add4_model_data_hdr", + src = "add4.tflite", + out = "add4_model_data.h", +) + +generate_cc_arrays( + name = "generated_add5_model_data_cc", + src = "add5.tflite", + out = "add5_model_data.cc", +) + +generate_cc_arrays( + name = "generated_add5_model_data_hdr", + src = "add5.tflite", + out = "add5_model_data.h", +) + +generate_cc_arrays( + name = "generated_add6_model_data_cc", + src = "add6.tflite", + out = "add6_model_data.cc", +) + +generate_cc_arrays( + name = "generated_add6_model_data_hdr", + src = "add6.tflite", + out = "add6_model_data.h", +) + +generate_cc_arrays( + name = "generated_add7_model_data_cc", + src = "add7.tflite", + out = "add7_model_data.cc", +) + +generate_cc_arrays( + name = "generated_add7_model_data_hdr", + src = "add7.tflite", + out = "add7_model_data.h", +) + +generate_cc_arrays( + name = "generated_add8_model_data_cc", + src = "add8.tflite", + out = "add8_model_data.cc", +) + +generate_cc_arrays( + name = "generated_add8_model_data_hdr", + src = "add8.tflite", + out = "add8_model_data.h", +) + +generate_cc_arrays( + name = "generated_add9_model_data_cc", + src = "add9.tflite", + out = "add9_model_data.cc", +) + +generate_cc_arrays( + name = "generated_add9_model_data_hdr", + src = "add9.tflite", + out = "add9_model_data.h", +) + +generate_cc_arrays( + name = "generated_add10_model_data_cc", + src = "add10.tflite", + out = "add10_model_data.cc", +) + +generate_cc_arrays( + name = "generated_add10_model_data_hdr", + src = "add10.tflite", + out = "add10_model_data.h", +) + +generate_cc_arrays( + name = "generated_add11_model_data_cc", + src = "add11.tflite", + out = "add11_model_data.cc", +) + +generate_cc_arrays( + name = "generated_add11_model_data_hdr", + src = "add11.tflite", + out = "add11_model_data.h", +) + +generate_cc_arrays( + name = "generated_add12_model_data_cc", + src = "add12.tflite", + out = "add12_model_data.cc", +) + +generate_cc_arrays( + name = "generated_add12_model_data_hdr", + src = "add12.tflite", + out = "add12_model_data.h", +) + +generate_cc_arrays( + name = "generated_add13_model_data_cc", + src = "add13.tflite", + out = "add13_model_data.cc", +) + +generate_cc_arrays( + name = "generated_add13_model_data_hdr", + src = "add13.tflite", + out = "add13_model_data.h", +) + +generate_cc_arrays( + name = "generated_add14_model_data_cc", + src = "add14.tflite", + out = "add14_model_data.cc", +) + +generate_cc_arrays( + name = "generated_add14_model_data_hdr", + src = "add14.tflite", + out = "add14_model_data.h", +) + +generate_cc_arrays( + name = "generated_add15_model_data_cc", + src = "add15.tflite", + out = "add15_model_data.cc", +) + +generate_cc_arrays( + name = "generated_add15_model_data_hdr", + src = "add15.tflite", + out = "add15_model_data.h", +) + +generate_cc_arrays( + name = "generated_add16_model_data_cc", + src = "add16.tflite", + out = "add16_model_data.cc", +) + +generate_cc_arrays( + name = "generated_add16_model_data_hdr", + src = "add16.tflite", + out = "add16_model_data.h", +) + +generate_cc_arrays( + name = "generated_add0_input0_int16_test_data_cc", + src = "add0_input0_int16.csv", + out = "add0_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add0_input0_int16_test_data_hdr", + src = "add0_input0_int16.csv", + out = "add0_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add0_input1_int16_test_data_cc", + src = "add0_input1_int16.csv", + out = "add0_input1_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add0_input1_int16_test_data_hdr", + src = "add0_input1_int16.csv", + out = "add0_input1_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add0_golden_int16_test_data_cc", + src = "add0_golden_int16.csv", + out = "add0_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add0_golden_int16_test_data_hdr", + src = "add0_golden_int16.csv", + out = "add0_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add1_input0_int16_test_data_cc", + src = "add1_input0_int16.csv", + out = "add1_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add1_input0_int16_test_data_hdr", + src = "add1_input0_int16.csv", + out = "add1_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add1_input1_int16_test_data_cc", + src = "add1_input1_int16.csv", + out = "add1_input1_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add1_input1_int16_test_data_hdr", + src = "add1_input1_int16.csv", + out = "add1_input1_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add1_golden_int16_test_data_cc", + src = "add1_golden_int16.csv", + out = "add1_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add1_golden_int16_test_data_hdr", + src = "add1_golden_int16.csv", + out = "add1_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add2_input0_int16_test_data_cc", + src = "add2_input0_int16.csv", + out = "add2_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add2_input0_int16_test_data_hdr", + src = "add2_input0_int16.csv", + out = "add2_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add2_input1_int16_test_data_cc", + src = "add2_input1_int16.csv", + out = "add2_input1_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add2_input1_int16_test_data_hdr", + src = "add2_input1_int16.csv", + out = "add2_input1_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add2_golden_int16_test_data_cc", + src = "add2_golden_int16.csv", + out = "add2_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add2_golden_int16_test_data_hdr", + src = "add2_golden_int16.csv", + out = "add2_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add3_input0_int16_test_data_cc", + src = "add3_input0_int16.csv", + out = "add3_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add3_input0_int16_test_data_hdr", + src = "add3_input0_int16.csv", + out = "add3_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add3_input1_int16_test_data_cc", + src = "add3_input1_int16.csv", + out = "add3_input1_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add3_input1_int16_test_data_hdr", + src = "add3_input1_int16.csv", + out = "add3_input1_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add3_golden_int16_test_data_cc", + src = "add3_golden_int16.csv", + out = "add3_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add3_golden_int16_test_data_hdr", + src = "add3_golden_int16.csv", + out = "add3_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add4_input0_int16_test_data_cc", + src = "add4_input0_int16.csv", + out = "add4_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add4_input0_int16_test_data_hdr", + src = "add4_input0_int16.csv", + out = "add4_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add4_input1_int16_test_data_cc", + src = "add4_input1_int16.csv", + out = "add4_input1_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add4_input1_int16_test_data_hdr", + src = "add4_input1_int16.csv", + out = "add4_input1_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add4_golden_int16_test_data_cc", + src = "add4_golden_int16.csv", + out = "add4_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add4_golden_int16_test_data_hdr", + src = "add4_golden_int16.csv", + out = "add4_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add5_input0_int16_test_data_cc", + src = "add5_input0_int16.csv", + out = "add5_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add5_input0_int16_test_data_hdr", + src = "add5_input0_int16.csv", + out = "add5_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add5_input1_int16_test_data_cc", + src = "add5_input1_int16.csv", + out = "add5_input1_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add5_input1_int16_test_data_hdr", + src = "add5_input1_int16.csv", + out = "add5_input1_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add5_golden_int16_test_data_cc", + src = "add5_golden_int16.csv", + out = "add5_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add5_golden_int16_test_data_hdr", + src = "add5_golden_int16.csv", + out = "add5_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add6_input0_int16_test_data_cc", + src = "add6_input0_int16.csv", + out = "add6_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add6_input0_int16_test_data_hdr", + src = "add6_input0_int16.csv", + out = "add6_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add6_input1_int16_test_data_cc", + src = "add6_input1_int16.csv", + out = "add6_input1_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add6_input1_int16_test_data_hdr", + src = "add6_input1_int16.csv", + out = "add6_input1_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add6_golden_int16_test_data_cc", + src = "add6_golden_int16.csv", + out = "add6_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add6_golden_int16_test_data_hdr", + src = "add6_golden_int16.csv", + out = "add6_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add7_input0_int16_test_data_cc", + src = "add7_input0_int16.csv", + out = "add7_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add7_input0_int16_test_data_hdr", + src = "add7_input0_int16.csv", + out = "add7_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add7_input1_int16_test_data_cc", + src = "add7_input1_int16.csv", + out = "add7_input1_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add7_input1_int16_test_data_hdr", + src = "add7_input1_int16.csv", + out = "add7_input1_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add7_golden_int16_test_data_cc", + src = "add7_golden_int16.csv", + out = "add7_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add7_golden_int16_test_data_hdr", + src = "add7_golden_int16.csv", + out = "add7_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add8_input0_int16_test_data_cc", + src = "add8_input0_int16.csv", + out = "add8_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add8_input0_int16_test_data_hdr", + src = "add8_input0_int16.csv", + out = "add8_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add8_input1_int16_test_data_cc", + src = "add8_input1_int16.csv", + out = "add8_input1_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add8_input1_int16_test_data_hdr", + src = "add8_input1_int16.csv", + out = "add8_input1_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add8_golden_int16_test_data_cc", + src = "add8_golden_int16.csv", + out = "add8_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add8_golden_int16_test_data_hdr", + src = "add8_golden_int16.csv", + out = "add8_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add9_input0_int16_test_data_cc", + src = "add9_input0_int16.csv", + out = "add9_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add9_input0_int16_test_data_hdr", + src = "add9_input0_int16.csv", + out = "add9_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add9_input1_int16_test_data_cc", + src = "add9_input1_int16.csv", + out = "add9_input1_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add9_input1_int16_test_data_hdr", + src = "add9_input1_int16.csv", + out = "add9_input1_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add9_golden_int16_test_data_cc", + src = "add9_golden_int16.csv", + out = "add9_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add9_golden_int16_test_data_hdr", + src = "add9_golden_int16.csv", + out = "add9_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add10_input0_int16_test_data_cc", + src = "add10_input0_int16.csv", + out = "add10_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add10_input0_int16_test_data_hdr", + src = "add10_input0_int16.csv", + out = "add10_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add10_input1_int16_test_data_cc", + src = "add10_input1_int16.csv", + out = "add10_input1_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add10_input1_int16_test_data_hdr", + src = "add10_input1_int16.csv", + out = "add10_input1_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add10_golden_int16_test_data_cc", + src = "add10_golden_int16.csv", + out = "add10_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add10_golden_int16_test_data_hdr", + src = "add10_golden_int16.csv", + out = "add10_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add11_input0_int16_test_data_cc", + src = "add11_input0_int16.csv", + out = "add11_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add11_input0_int16_test_data_hdr", + src = "add11_input0_int16.csv", + out = "add11_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add11_input1_int16_test_data_cc", + src = "add11_input1_int16.csv", + out = "add11_input1_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add11_input1_int16_test_data_hdr", + src = "add11_input1_int16.csv", + out = "add11_input1_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add11_golden_int16_test_data_cc", + src = "add11_golden_int16.csv", + out = "add11_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add11_golden_int16_test_data_hdr", + src = "add11_golden_int16.csv", + out = "add11_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add12_input0_int16_test_data_cc", + src = "add12_input0_int16.csv", + out = "add12_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add12_input0_int16_test_data_hdr", + src = "add12_input0_int16.csv", + out = "add12_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add12_input1_int16_test_data_cc", + src = "add12_input1_int16.csv", + out = "add12_input1_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add12_input1_int16_test_data_hdr", + src = "add12_input1_int16.csv", + out = "add12_input1_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add12_golden_int16_test_data_cc", + src = "add12_golden_int16.csv", + out = "add12_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add12_golden_int16_test_data_hdr", + src = "add12_golden_int16.csv", + out = "add12_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add13_input0_int16_test_data_cc", + src = "add13_input0_int16.csv", + out = "add13_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add13_input0_int16_test_data_hdr", + src = "add13_input0_int16.csv", + out = "add13_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add13_input1_int16_test_data_cc", + src = "add13_input1_int16.csv", + out = "add13_input1_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add13_input1_int16_test_data_hdr", + src = "add13_input1_int16.csv", + out = "add13_input1_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add13_golden_int16_test_data_cc", + src = "add13_golden_int16.csv", + out = "add13_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add13_golden_int16_test_data_hdr", + src = "add13_golden_int16.csv", + out = "add13_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add14_input0_int16_test_data_cc", + src = "add14_input0_int16.csv", + out = "add14_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add14_input0_int16_test_data_hdr", + src = "add14_input0_int16.csv", + out = "add14_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add14_input1_int16_test_data_cc", + src = "add14_input1_int16.csv", + out = "add14_input1_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add14_input1_int16_test_data_hdr", + src = "add14_input1_int16.csv", + out = "add14_input1_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add14_golden_int16_test_data_cc", + src = "add14_golden_int16.csv", + out = "add14_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add14_golden_int16_test_data_hdr", + src = "add14_golden_int16.csv", + out = "add14_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add15_input0_int16_test_data_cc", + src = "add15_input0_int16.csv", + out = "add15_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add15_input0_int16_test_data_hdr", + src = "add15_input0_int16.csv", + out = "add15_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add15_input1_int16_test_data_cc", + src = "add15_input1_int16.csv", + out = "add15_input1_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add15_input1_int16_test_data_hdr", + src = "add15_input1_int16.csv", + out = "add15_input1_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add15_golden_int16_test_data_cc", + src = "add15_golden_int16.csv", + out = "add15_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add15_golden_int16_test_data_hdr", + src = "add15_golden_int16.csv", + out = "add15_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add16_input0_int16_test_data_cc", + src = "add16_input0_int16.csv", + out = "add16_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add16_input0_int16_test_data_hdr", + src = "add16_input0_int16.csv", + out = "add16_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add16_input1_int16_test_data_cc", + src = "add16_input1_int16.csv", + out = "add16_input1_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add16_input1_int16_test_data_hdr", + src = "add16_input1_int16.csv", + out = "add16_input1_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_add16_golden_int16_test_data_cc", + src = "add16_golden_int16.csv", + out = "add16_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_add16_golden_int16_test_data_hdr", + src = "add16_golden_int16.csv", + out = "add16_golden_int16_test_data.h", +) + +cc_library( + name = "models_and_testdata", + srcs = [ + "generated_add0_golden_int16_test_data_cc", + "generated_add0_input0_int16_test_data_cc", + "generated_add0_input1_int16_test_data_cc", + "generated_add0_model_data_cc", + "generated_add10_golden_int16_test_data_cc", + "generated_add10_input0_int16_test_data_cc", + "generated_add10_input1_int16_test_data_cc", + "generated_add10_model_data_cc", + "generated_add11_golden_int16_test_data_cc", + "generated_add11_input0_int16_test_data_cc", + "generated_add11_input1_int16_test_data_cc", + "generated_add11_model_data_cc", + "generated_add12_golden_int16_test_data_cc", + "generated_add12_input0_int16_test_data_cc", + "generated_add12_input1_int16_test_data_cc", + "generated_add12_model_data_cc", + "generated_add13_golden_int16_test_data_cc", + "generated_add13_input0_int16_test_data_cc", + "generated_add13_input1_int16_test_data_cc", + "generated_add13_model_data_cc", + "generated_add14_golden_int16_test_data_cc", + "generated_add14_input0_int16_test_data_cc", + "generated_add14_input1_int16_test_data_cc", + "generated_add14_model_data_cc", + "generated_add15_golden_int16_test_data_cc", + "generated_add15_input0_int16_test_data_cc", + "generated_add15_input1_int16_test_data_cc", + "generated_add15_model_data_cc", + "generated_add16_golden_int16_test_data_cc", + "generated_add16_input0_int16_test_data_cc", + "generated_add16_input1_int16_test_data_cc", + "generated_add16_model_data_cc", + "generated_add1_golden_int16_test_data_cc", + "generated_add1_input0_int16_test_data_cc", + "generated_add1_input1_int16_test_data_cc", + "generated_add1_model_data_cc", + "generated_add2_golden_int16_test_data_cc", + "generated_add2_input0_int16_test_data_cc", + "generated_add2_input1_int16_test_data_cc", + "generated_add2_model_data_cc", + "generated_add3_golden_int16_test_data_cc", + "generated_add3_input0_int16_test_data_cc", + "generated_add3_input1_int16_test_data_cc", + "generated_add3_model_data_cc", + "generated_add4_golden_int16_test_data_cc", + "generated_add4_input0_int16_test_data_cc", + "generated_add4_input1_int16_test_data_cc", + "generated_add4_model_data_cc", + "generated_add5_golden_int16_test_data_cc", + "generated_add5_input0_int16_test_data_cc", + "generated_add5_input1_int16_test_data_cc", + "generated_add5_model_data_cc", + "generated_add6_golden_int16_test_data_cc", + "generated_add6_input0_int16_test_data_cc", + "generated_add6_input1_int16_test_data_cc", + "generated_add6_model_data_cc", + "generated_add7_golden_int16_test_data_cc", + "generated_add7_input0_int16_test_data_cc", + "generated_add7_input1_int16_test_data_cc", + "generated_add7_model_data_cc", + "generated_add8_golden_int16_test_data_cc", + "generated_add8_input0_int16_test_data_cc", + "generated_add8_input1_int16_test_data_cc", + "generated_add8_model_data_cc", + "generated_add9_golden_int16_test_data_cc", + "generated_add9_input0_int16_test_data_cc", + "generated_add9_input1_int16_test_data_cc", + "generated_add9_model_data_cc", + ], + hdrs = [ + "generated_add0_golden_int16_test_data_hdr", + "generated_add0_input0_int16_test_data_hdr", + "generated_add0_input1_int16_test_data_hdr", + "generated_add0_model_data_hdr", + "generated_add10_golden_int16_test_data_hdr", + "generated_add10_input0_int16_test_data_hdr", + "generated_add10_input1_int16_test_data_hdr", + "generated_add10_model_data_hdr", + "generated_add11_golden_int16_test_data_hdr", + "generated_add11_input0_int16_test_data_hdr", + "generated_add11_input1_int16_test_data_hdr", + "generated_add11_model_data_hdr", + "generated_add12_golden_int16_test_data_hdr", + "generated_add12_input0_int16_test_data_hdr", + "generated_add12_input1_int16_test_data_hdr", + "generated_add12_model_data_hdr", + "generated_add13_golden_int16_test_data_hdr", + "generated_add13_input0_int16_test_data_hdr", + "generated_add13_input1_int16_test_data_hdr", + "generated_add13_model_data_hdr", + "generated_add14_golden_int16_test_data_hdr", + "generated_add14_input0_int16_test_data_hdr", + "generated_add14_input1_int16_test_data_hdr", + "generated_add14_model_data_hdr", + "generated_add15_golden_int16_test_data_hdr", + "generated_add15_input0_int16_test_data_hdr", + "generated_add15_input1_int16_test_data_hdr", + "generated_add15_model_data_hdr", + "generated_add16_golden_int16_test_data_hdr", + "generated_add16_input0_int16_test_data_hdr", + "generated_add16_input1_int16_test_data_hdr", + "generated_add16_model_data_hdr", + "generated_add1_golden_int16_test_data_hdr", + "generated_add1_input0_int16_test_data_hdr", + "generated_add1_input1_int16_test_data_hdr", + "generated_add1_model_data_hdr", + "generated_add2_golden_int16_test_data_hdr", + "generated_add2_input0_int16_test_data_hdr", + "generated_add2_input1_int16_test_data_hdr", + "generated_add2_model_data_hdr", + "generated_add3_golden_int16_test_data_hdr", + "generated_add3_input0_int16_test_data_hdr", + "generated_add3_input1_int16_test_data_hdr", + "generated_add3_model_data_hdr", + "generated_add4_golden_int16_test_data_hdr", + "generated_add4_input0_int16_test_data_hdr", + "generated_add4_input1_int16_test_data_hdr", + "generated_add4_model_data_hdr", + "generated_add5_golden_int16_test_data_hdr", + "generated_add5_input0_int16_test_data_hdr", + "generated_add5_input1_int16_test_data_hdr", + "generated_add5_model_data_hdr", + "generated_add6_golden_int16_test_data_hdr", + "generated_add6_input0_int16_test_data_hdr", + "generated_add6_input1_int16_test_data_hdr", + "generated_add6_model_data_hdr", + "generated_add7_golden_int16_test_data_hdr", + "generated_add7_input0_int16_test_data_hdr", + "generated_add7_input1_int16_test_data_hdr", + "generated_add7_model_data_hdr", + "generated_add8_golden_int16_test_data_hdr", + "generated_add8_input0_int16_test_data_hdr", + "generated_add8_input1_int16_test_data_hdr", + "generated_add8_model_data_hdr", + "generated_add9_golden_int16_test_data_hdr", + "generated_add9_input0_int16_test_data_hdr", + "generated_add9_input1_int16_test_data_hdr", + "generated_add9_model_data_hdr", + ], + copts = micro_copts(), +) + +cc_test( + name = "integration_test", + srcs = [ + "integration_tests.cc", + ], + copts = micro_copts(), + deps = [ + ":models_and_testdata", + "//tensorflow/lite/micro:micro_framework", + "//tensorflow/lite/micro:micro_log", + "//tensorflow/lite/micro:micro_resource_variable", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:recording_allocators", + # TODO(b/286456378): change to //python/tflite_micro:python_ops_resolver + # once all internal usage has been updated. + "//tensorflow/lite/micro/python/interpreter/src:python_ops_resolver", + "//tensorflow/lite/micro/testing:micro_test", + ], +) diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/Makefile.inc b/tensorflow/lite/micro/integration_tests/seanet/add/Makefile.inc new file mode 100644 index 0000000..b254979 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/Makefile.inc @@ -0,0 +1,81 @@ +integration_tests_seanet_add_GENERATOR_INPUTS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add0.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add1.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add2.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add3.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add4.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add5.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add6.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add7.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add8.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add9.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add10.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add11.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add12.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add13.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add14.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add15.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add16.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add0_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add0_input1_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add0_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add1_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add1_input1_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add1_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add2_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add2_input1_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add2_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add3_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add3_input1_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add3_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add4_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add4_input1_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add4_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add5_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add5_input1_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add5_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add6_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add6_input1_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add6_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add7_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add7_input1_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add7_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add8_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add8_input1_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add8_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add9_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add9_input1_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add9_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add10_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add10_input1_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add10_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add11_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add11_input1_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add11_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add12_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add12_input1_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add12_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add13_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add13_input1_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add13_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add14_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add14_input1_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add14_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add15_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add15_input1_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add15_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add16_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add16_input1_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/add16_golden_int16.csv \ + +integration_tests_seanet_add_SRCS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/add/integration_tests.cc \ +$(TENSORFLOW_ROOT)python/tflite_micro/python_ops_resolver.cc \ + +integration_tests_seanet_add_HDR := \ +$(TENSORFLOW_ROOT)python/tflite_micro/python_ops_resolver.h \ + + + +$(eval $(call microlite_test,integration_tests_seanet_add_test,\ +$(integration_tests_seanet_add_SRCS),$(integration_tests_seanet_add_HDR),$(integration_tests_seanet_add_GENERATOR_INPUTS))) diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add0.tflite b/tensorflow/lite/micro/integration_tests/seanet/add/add0.tflite new file mode 100644 index 0000000..561c47a Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/add/add0.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add0_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add0_golden_int16.csv new file mode 100644 index 0000000..cb38813 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add0_golden_int16.csv @@ -0,0 +1 @@ +-15745,-3570,6755,-4016,-10749,-15477,-20953,15333,-934,9640,25639,19910,5421,15381,-32768,8238,25088,-19607,-6405,-13716,11231,-19460,17975,32767,31154,-5984,-18654,-10162,32099,6330,25517,15358,-24114,-5780,-14751,2742,15599,-8420,-28545,1702,6860,9845,-19965,-109,19991,5919,-10856,7293,-27763,31149,1061,-4528,6572,-23747,-7002,26369,2918,8370,-8767,21318,-26668,30462,31046,3909,-24790,26299,-27726,32767,-13780,-15707,-6608,-16452,-14096,7426,-11931,-14403,15077,1822,-107,22694,2938,-31305,-15755,-14867,-4814,32767,-26594,31263,-20558,-2875,5989,16758,8791,-16556,829,-32592,-27378,-29839,32767,8200,16299,12231,-16289,16787,6650,26740,-9578,19678,5259,-15803,-7203,10727,-11851,-32768,-10722,-18055,11184,-1998,-6354,-11193,-31322,4047,9924,-597,-21253,-7587,23958,16189,-13300,-11123,22895,-9982,-7849,-24036,-680,17314,-19803,-8466,-22377,-32768,29889,-32371,4646,16166,25052,-31163,-25681,-17980,-25256,32767,13853,-32768,-3936,-5532,-12482,-6324,-13683,9143,-5125,-3223,-16166,-6412,12028,7460,-5280,-4685,-25097,-2420,32767,30934,-10325,-32768,8052,-25978,16391,9762,-7145,-28390,-6394,6494,4461,4111,-32768,30776,16587,-2217,1605,549,17891,-691,-16129,-13733,228,-3179,22871,32767,9695,-14182,14606,-18550,25652,-32768,-5998,32767,4863,-16832,-10225,-10591,-4155,-6785,-21334,9732,-2276,5896,19662,4579,-10706,-8201,-28903,-11216,-4687,5833,-15296,4810,-32768,13623,6678,32767,16302,-30961,-25540,16608,-6708,-8065,9213,8208,25414,18761,27511,-5648,2846,11215,-6409,30228,23131,1236,-12458,-26917,-32768,10163,7332,6617,-19312,-2612,-2391,-6448,17057,32767,22047,25974,-5051,-5829,25677,4846,-21563,20238,-29096,15328,-13710,6569,1736,-11032,28072,9052,-6697,-262,-8251,-25591,-20983,12804,-14666,32767,20512,-31944,-16468,-31,1318,14308,-32768,-17106,-4378,-3702,13904,-19531,24295,20936,-25587,-32768,7291,25353,7292,-17096,-19744,7947,21184,-29916,13011,15507,17086,27290,-5735,5263,17991,3598,-32768,6124,-20361,-8796,16858,-10195 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add0_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add0_input0_int16.csv new file mode 100644 index 0000000..9968f11 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add0_input0_int16.csv @@ -0,0 +1 @@ +-32580,-30413,32039,-4864,11762,-25221,-9003,-14074,-21172,21363,12883,22731,26966,-14910,-20959,23046,13569,-9312,21666,-62,21631,-7719,-8507,29829,26917,-29644,-17480,-15226,12587,-21,32679,-7545,-28012,-22966,-30590,7405,-4821,23923,-5872,21010,-23080,26430,-20408,-21898,18317,-14704,9813,1955,-21086,28191,-9976,-30551,22908,-3260,3309,30095,-30422,17666,6966,14279,-16578,18711,10214,-1082,298,13196,-10111,22657,-26048,-5137,-15855,-24049,20362,10070,-25715,-10843,15879,-2918,-17069,10137,-15860,-23036,-30728,-16635,-5022,22297,-9558,29006,-31781,28355,10865,8534,-16952,-18003,11982,-20501,-16344,-24498,22109,-25645,20315,-5432,7113,-4522,21733,9306,-2576,28767,18923,-8506,21670,-5702,-24407,-30772,7395,-31231,-16951,16559,-32357,14202,-22934,28073,12893,10536,-14138,-23781,29136,-9766,5342,-18276,-806,-11481,3693,-12804,9786,27019,-14793,29275,-441,-32688,13712,-25817,11878,28335,6503,-28469,-27418,7038,-15368,13877,-19574,-22199,11793,-21601,7419,14525,11746,4073,14467,12173,3203,1137,-15287,26637,-12115,-17150,-25117,-31444,30368,30755,26968,-25224,-23828,-9184,-1703,15915,-24086,-28573,-24752,2853,-28312,-21279,-31791,22004,27388,5688,-18242,-21791,24989,-17808,-17952,-28389,25892,14279,32071,32736,19912,6327,-17941,-6470,26985,-22714,2034,25278,-11518,-15650,-7830,-7064,5670,-6732,-30439,-22451,-10110,-12123,9974,31415,-17810,7460,-25974,-31672,-27872,-5679,14958,4312,-31240,-11110,26796,32416,5346,-28489,-11681,-5254,-3647,18581,27625,31086,2207,-11125,18399,-27922,-6548,-21338,-913,12778,19350,14189,-25157,-7836,-24787,-6003,19365,-30028,-5023,22592,-6256,-3608,-635,25540,22172,18026,-15030,-3117,31025,-29392,-30208,6408,-13153,-12729,18169,-15076,-21816,24859,19527,-24059,-4723,997,-11067,-22235,-10801,5653,4477,21340,-354,-18769,-26887,20138,-5587,-19460,-28987,-26065,29128,28070,-17691,-17330,18484,4799,-19394,-16551,-12723,20644,-9944,-3705,8673,-14126,11288,-29541,6522,-10264,32230,23731,32373,-10583,-10485,-11466,-28230,10806,-23622,-22394,15399,-23400 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add0_input1_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add0_input1_int16.csv new file mode 100644 index 0000000..4be7796 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add0_input1_int16.csv @@ -0,0 +1 @@ +3641,17526,-14661,-1571,-22177,-1357,-20071,29674,14170,-3256,23206,8791,-12676,30341,-28282,-6256,22009,-18139,-23845,-17368,-1430,-19107,28990,27280,20030,13903,-11004,-1858,31623,8051,8694,24970,-10297,9319,3459,-1889,23301,-28040,-31981,-13077,25448,-6670,-10545,15744,12095,18179,-20900,7841,-19953,19099,8583,16410,-8271,-27784,-11289,11650,25769,-2187,-16182,16708,-21833,25103,32007,5747,-31688,23817,-27866,28817,1397,-16215,3110,-3445,-32664,2124,3503,-10421,7625,4429,12244,21459,15233,-23036,2284,-6809,-2469,31346,-26830,18653,-3049,-24215,-277,15086,23455,-7961,-7638,-26509,-22904,-20114,31574,29010,5958,19468,-25839,24592,-7320,27198,-10291,4118,-7048,-13894,-24862,17754,2656,-28620,-18975,-270,26493,-14547,15401,-24510,-23131,-15223,3248,-8399,-16728,7616,9284,27636,-20759,-866,29651,-4346,-12643,-21229,-7961,2385,-14412,-31981,-28089,-29846,28001,-22372,-2717,-27,27089,-18915,-12718,-27931,-20918,32080,31784,-31158,-13550,8643,-21227,-18564,-25890,8654,-16999,-12920,-22847,-8965,26358,-9848,2083,6491,-13645,19733,23175,16966,-32668,-31579,27505,-26320,22044,851,8398,-15319,9835,6175,26197,20652,-24294,23113,1194,-6940,15268,16502,4590,12039,-7456,3155,-18490,-14392,5775,30379,-2133,-22594,31556,-18858,12995,-25219,-9090,28071,14527,-10018,-7302,-8323,-9387,-3731,-5008,28638,4443,16278,17728,-16972,-675,-15822,-17855,8732,14265,11524,-30268,2979,-28861,25353,-10957,28908,16819,-18644,-23953,24896,-5871,-23716,-8340,-12125,30664,31887,21582,13081,8362,29714,-7475,29108,15332,-8722,2430,-28489,-25876,17256,-4737,30179,-20874,-19701,1502,-5569,22116,24711,11909,19902,4489,-5139,10096,27470,-5466,21046,-27399,28692,-30583,19274,18027,-32036,21477,28942,-5077,-1056,-2448,-16363,-18805,12155,-21866,28207,26298,-26942,-1407,-14645,5725,32279,-22405,-2812,-26684,-25058,30483,-12226,17438,23099,-18418,-32014,18484,17214,16470,-19017,-31356,20335,18707,-16554,11788,27131,-1684,17435,-30761,14357,30445,12884,-29393,-63,-8717,5075,10234,4028 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add1.tflite b/tensorflow/lite/micro/integration_tests/seanet/add/add1.tflite new file mode 100644 index 0000000..e818380 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/add/add1.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add10.tflite b/tensorflow/lite/micro/integration_tests/seanet/add/add10.tflite new file mode 100644 index 0000000..5acaa4f Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/add/add10.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add10_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add10_golden_int16.csv new file mode 100644 index 0000000..57893d8 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add10_golden_int16.csv @@ -0,0 +1 @@ +-1499,-23044,-1939,-32768,17239,26718,-21648,3224,-32768,-32768,29361,-32768,32767,-32768,32767,-32768,10989,23962,32767,-32768,-32768,6519,32767,-32768,-24348,-5350,30271,27044,-25033,-32768,25981,32767,-28290,27211,-22585,-23070,21417,7736,-30849,-914,-20575,-29957,14819,-32768,-32768,11182,12120,-18928,-14261,23569,-12284,-32768,-32038,-32768,32767,30499,1920,-32768,-1055,32767,3016,12063,-29592,29020,-18325,32767,-32768,-20114,-4735,19927,-9740,-32768,-13608,8324,15779,-32768,-1388,19383,-10243,24315,-11602,-32768,28834,-32768,-10772,19732,-32768,32767,8219,32767,-26843,-8788,32767,-1484,32767,-31961,11557,23630,-16256,16721,-15155,32767,29819,-32400,-9995,2727,-21325,20245,26874,-13783,-6321,9801,-19986,-32768,-32768,3539,-19850,24942,15668,-32768,-29638,5553,-31344,-26722,-5588,32767,-16619,18846,-15520,32767,-14050,9010,10062,-32768,32767,-20924,32767,20476,-32768,-13113,-2242,32767,-32768,10636,23718,-32768,-22763,-32768,17730,-20492,32767,31203,-10986,-24361,19009,22827,31495,-10968,19058,-32768,-32768,-32768,-48,19433,-16573,-14128,-32768,30593,1508,1686,-6925,-32768,-5780,2496,-4568,4837,13105,14342,-27724,879,-7760,8083,32767,32767,-24205,-32768,-6494,8919,-4599,-13182,28513,28422,-10259,32767,-12856,14585,3307,4938,32767,-32768,11956,29691,32767,7797,-6334,28048,-13189,32767,-18686,32767,21585,-32768,-5476,-32768,-32768,32767,22903,13252,-12435,17114,18159,8911,5699,7447,-29689,16234,-7099,-10997,2064,32767,-32768,13668,-2578,-20215,26243,-32768,-22545,-10894,17378,-23310,-25058,23762,9812,18251,-11420,23438,32767,-13500,7372,23079,28221,-11214,-10371,32767,25579,-4160,10866,2121,-32768,17063,19754,-7292,-23346,13363,-527,-20641,17952,23294,8200,-2009,-32768,32767,-32768,-8181,-32768,-32768,22279,7325,32767,-20876,-12350,-6437,-32768,-23187,10288,20897,-15486,32767,1586,-32768,3946,8921,12750,10781,-24579,32767,2425,32767,-25299,7017,-27125,-4894,23660,-32768,-16548,25220,-1714,32767,-32768,-19466,32767,7480,13696,-16828,23665,-28380,-24294,-11366,9768,32767,-13211,-5443,18655,4812,84,-16365,-7682,-25602,10925,-25843,-8289,-32768,-8918,32767,14731,-32768,-32768,-20811,-4984,15376,27862,-22064,2447,-21157,7536,31538,12439,-22250,-18934,-32768,32767,32767,15288,-9479,-26035,526,32767,-23913,-17969,-12206,-27783,1345,11320,11876,-31680,18731,8040,-29905,4632,32767,-356,32767,-32768,-13311,-32768,-15481,27908,-32768,-19113,7091,-16346,-4682,-24969,-5515,-31437,32767,19537,-21603,11612,-19726,21925,-19306,-2012,-1971,22434,-1642,25536,-10717,-11444,32767,-32768,8707,26494,25869,-26605,-32768,-7008,32767,-5363,20624,-32768,-7566,-16927,30659,-32034,-2423,5409,23670,3694,-10663,-29492,3354,-32768,3281,-10340,17206,-15094,32767,-11171,16860,20965,28694,-18877,32767,-32768,-95,-32768,5529,875,9377,-32768,-19048,28074,-32768,22801,840,5430,8305,-23282,32767,10407,19336,-8617,-2919,-32768,32767,-20328,32767,-32768,-3780,32767,-23535,889,-17068,-1204,-4982,696,32767,-28765,-32573,22260,-14416,26134,19966,15982,-18547,24296,18931,4359,32767,-32768,-32107,-1338,3262,4766,-28940,11821,21192,17741,22848,-7073,-32768,24480,32767,-32768,21731,-32768,-2844,-32768,32767,7617,-21102,29527,10005,19813,-28390,6023,-17871,9697,-14150,11204,23050,-32768,32767,741,-10025,14514,-7152,-11096,-11849,-3398,8687,14513,1069,20172,-22901,-4581,16399,9840,40,-8802,13793,-9896,26921,28650,-14077,-2160,7227,30172,-32768,-26965,32767,12310,-2562,7120,24519,15599,9547,-28180,18656,32767,5734,18708,-32768,32767,-30114,31273,30254,14169,-19997,-32768,23753,3232,18706,13575,-9756,25576,25480,1283,-6621,-18291,32767,32767,20605,11239,-32768,4405,-32768,-32768,-8394,10183,-32768,27808,-32768,32497,4611,-8937,27561,32767,11703,6225,20832,32767,-8293,22647,32767,-32768,11353,-15360,9841,3603,15252,-23385,-6480,-3407,5963,-16590,-17917,-4080,32767,23732,32767,-1912,21354,-8695,7571,32767,-229,32767,-27778,32767,3840,-17537,15632,-21428,-21161,-12772,32767,21902,-32768,15050,32767,13162,3389,2834,-32768,-7754,-32768,16867,-23852,10723,29750,-32768,18999,30566,-3890,27075,-32768,32767,-32768,-32768,-3193,-11979,32767,26345,23410,29143,-1095,-24454,-45,-32768,21206,4501,23378,24145,16965,18870,10879,32767,-7118,-32768,-18936,-30397,14228,3897,31124,-8224,32767,-17346,12732,-20562,25559,32767,-20060,22701,32767,-24500,-2777,27997,-17844,26130,-9221,13166,-26338,32767,-26591,32767,-4442,-25837,-4872,29883,-13521,32767,-10640,32767,-30421,80,-30192,-18662,-20638,14048,-27054,-7850,32767,-23056,24395,-364,-545,-32768,-32768,-23515,12276,-32768,32767,12396,-32768,-9555,-32768,-26834,10679,-32768,-32768,6051,20428,3956,-25713,-9391,21543,32767,-6121,-32768,-17688,-28893,1276,32767,2128,-30110,7066,-29285,-19789,377,19006,-24145,26072,29414,-6062,-32768,25870,10038,32767,-32768,17342,15169,-19997,-32768,-23644,27256,10378,-24718,27488,16727,29178,-32768,32767,17436,-5783,12636,-32768,8606,32767,31635,28516,-968,-32768,-20114,18888,23214,-18858,-19112,32767,20376,32767,12308,-2242,-32768,32767,-5406,32767,32767,-12049,-32768,-24357,32767,32767,2049,6725,15223,-6828,25302,-32768,-2711,28112,-17989,32767,-11620,-32768,5482,22073,1935,32767,25789,32767,-5972,19396,26540,-5876,-25947,1589,32767,-8178,-7882,13955,786,835,15162,-21938,-19891,32767,-22284,13921,23294,8587,-32768,25130,6038,-30206,9154,-17213,16162,9509,-807,-9213,15654,14093,15470,27665,-25163,58,-23537,25098,-2437,454,-27902,-32768,5633,22255,-15757,-11497,-31390,25804,-32768,32767,8041,-16863,1576,32767,6760,-3660,-6627,-19016,6311,24198,-18631,-23391,24393,22966,14,32767,-20268,-21174,12177,32767,-2664,-32768,-21034,32767,-13333,-32768,-28310,20109,-32768,13031,-6258,-32768,9706,-21985,-25595,-32768,-5358,-26293,-12358,-2438,4748,-4292,-32768,13117,-18596,1543,-32768,-13642,17198,2462,-32768,32767,-514,1391,21275,-12785,-32768,25656,11181,32767,2208,-13810,-32768,-15814,-6478,-17322,8954,7704,-18777,7973,32767,29736,534,32767,32767,32767,8374,-32768,-26604,28136,-15931,32767,-18708,3113,32767,22516,12127,9097,32511,12886,-32768,-31702,-25687,24313,18749,27824,-32768,-16939,-455,-7862,-11048,29231,804,-31189,-32768,-32768,9543,-32768,439,-12645,12884,8186,-21209,31598,32767,-28628,14554,3782,11011,2960,-17138,-10100,-16526,-11775,-18796,31886,26994,-32768,1239,-32768,-21085,8118,-22830,32767,32767,-5679,-9195,13786,-31464,-7567,13079,17270,-1517,-3417,-9775,-32768,10959,-5642,-32768,27918,-32768,32767,26365,-28180,3069,16974,19473,-8009,-29389,-27191,-27967,-15418,-32768,2219,-32768,-7996,-3080,-12565,-25324,4396,21707,24418,-32768,-2394,17777,18854,-32768,22364,-7664,-22992,9360,16165,-32768,-1955,-5181,13566,-32468,14602,-32768,30615,-32768,14281,-13361,-9970,5234,32767,8438,32767,-20244,32767,32767,-6631,32767,11385,-15816,-32768,7912,-31991,-32768,32767,7533,-29886,-17562,32767,29713,32767,-32768,10882,-10258,32767,32767,32767,32739,-15290,-25539,-18861,26319,-12447,11283,-13347,21964,-32768,2407,-15890,-22293,32767,-14125,2353,-26062,-9491,-7565,4874,31146,2003,8890,-2195,-16231,-31500,19349,-1710,28361,1257,32767,-9167,-8950,-5399,6484,32767,-22867,19520,-24781,28472,-32768,32767,-3342,7195,-32768,-32768,-8388,-205,-11465,32767,-32768,-32768,-11531,4231,28190,17321,-5918,-26597,32767,18060,-29103,-31604,6785,32767,18641,3052,32767,32767,-10637,-32768,-2728,-32768,6370,-10986,-32768,-27881,-15574,14466,32767,-6773,-16966,18146,1372,5865,-32768,14154,23117,32767,8163,28778,32767,-27170,-31990,-32768,-611,20755,-18609,-32768,25558,32767,-16287,27806,-32768,-4643,7109,10673,20925,-32768,-11619,31475,16934,20342,-17989,-7433,-28296,-4205,-32768,6306,-32768,28151,-14560,-20640,-2586,-3251,-6553,24549,1220,2515,-32768,-9627,-1378,-32768,-32768,-21862,-32768,-28222,-21448,-20703,3305,-11577,23240,5745,31787,21298,-20083,26517,6496,-21772,-4328,11258,-27153,-32768,32767,-10845,19698,-10031,-9292,21119,32767,-7604,12622,27582,8360,15233,-32768,32767,-14058,9342,19276,5175,4605,14415,3007,8833,-30391,-31748,-24504,-17955,27059,-6228,-19609,29782,1982,-1848,-1368,32767,-15194,32767,32767,10213,32767,9877,10900,-32768,-8172,4708,-9716,-9538,-1511,-8664,32767,-15265,-5662,-3755,32767,29440,-18252,28035,5165,23805,1783,-32768,-9769,32767,32767,-5474,29950,257,-32768,-7622,9528,-32768,-4303,-6112,-1684,-32768,9052,18744,32767,5473,22888,2081,1449,22524,-234,32767,-16282,6895,32767,14783,-21160,21317,8467,32767,-3810,-4894,-3653,-32768,-15594,-13606,-15246,-3790,-2950,26496,29735,-4749,3328,-8375,3811,-6717,24545,-3854,-4331,-15519,-32768,-15170,-11356,21264,25364,-32768,32767,-4859,-14308,16658,27488,3550,14436,-8283,4250,-3544,6807,7599,-11711,-7924,2470,-14850,-32768,-22164,10913,-4834,16166,25118,28081,-18477,-3733,-32768,-32768,32767,7197,-1489,-11869,-32768,-32768,32767,-2841,15687,23506,13127,-19624,-32768,2765,-32768,-15895,12340,-13451,-19433,-21212,-8156,-32768,-25642,-5561,9325,32767,32767,12724,29887,-6255,25160,32767,-4481,7416,32767,-32768,10494,-2118,-6489,1839,11968,-17918,14643,-13220,698,-22997,-9858,20251,8152,8089,6651,-32768,-30777,20783,-28749,10595,32347,19347,-30253,-21946,22764,24339,-17424,-22024,32767,1557,-32768,10605,12284,1819,12633,-32768,884,-406,-22582,32767,-27928,1615,-17616,-29599,-5019,32767,-7008,32767,8634,18587,-28899,22247,15783,-32768,-8931,-9849,-32768,-12223,-2199,-3807,-32768,5992,11809,18055,4108,-18173,27606,16325,23132,11555,-1237,-22185,-32768,-14768,-23253,9991,-13769,-30645,-32768,27146,16634,-4918,11271,-1253,18240,15797,-6069,13722,17487,-8380,-5761,10171,-6285,-9334,-24230,283,19250,-32768,29206,-19077,-25324,-20889,-32768,13126,2085,-4044,-6861,-17107,-32768,-32768,-14214,8445,32767,15827,32767,-32768,-9867,-30445,32767,9482,32767,12925,-4441,-32768,-19639,24885,-32768,-22599,9132,30962,-8003,-32768,7290,-30679,15479,7356,-32768,-18262,-3033,-32768,-19122,-10050,-17058,4353,7308,32767,-503,14033,-27579,32431,23507,-5057,-9703,2584,-9346,32767,-32768,-20558,-32768,-14430,-31779,32767,31915,-32768,-32768,5399,21056,-32768,1444,-32768,-9237,22780,-8994,32767,-32768,8033,32767,32767,-14914,-8058,20727,1613,32767,-25388,-32768,12025,25322,5199,-2126,-32768,23001,-26301,23819,32767,18422,-6093,-32768,-173,27770,-32768,15711,-19007,25039,23190,-2946,-6853,29816,13768,-3701,-7447,8836,10700,-27349,15502,-3848,5277,-32768,6225,21274,-10786,-12212,1830,8791,32767,32767,17265,-32768,32767,6558,-32768,19370,-3545,-32768,1738,26864,2889,-24050,28709,-19441,-25790,-11242,-19090,18825,32767,-31476,-5521,-32768,14348,-32768,-32768,-21384,12341,-1728,-23242,27048,784,14577,8362,975,-23707,4347,19814,-9108,-9750,-23726,27206,-32768,32767,32767,20870,-14354,-16925,19409,17916,32767,1166,29555,4342,6285,-6718,-17302,-32768,2094,10562,1210,-1880,-24408,32767,-32679,32767,32767,15417,12413,28942,6436,29180,-32768,-11391,32767,-8963,22863,32767,29696,-22606,31656,13160,-14712,32767,32767,-32768,32767,19652,-20903,1696,1469,-23587,27867,32767,32767,-32768,31402,-32768,15406,32767,27635,-3630,-9237,31614,16142,-13084,24191,-11626,3261,-3573,29837,-31083,-17048,32767,23810,7992,6532,-499,-18101,-32768,32767,-1403,-23451,20074,-14686,12782,-31580,32767,-211,26367,32767,17939,-30163,8058,8722,3651,8114,32767,-32768,-32768,-23918,-5822,-20797,-20659,28384,-21135,-9093,15694,-12297,19995,16612,-32768,-21194,-32768,6465,7379,24881,24161,-32768,12753,22444,23199,-15929,-32768,15579,21222,-19260,11042,11374,18826,-3824,-14035,15326,-29047,-32768,-2887,-18893,4204,-18367,-11438,-10913,-22018,32767,27994,-32768,13468,-30166,-22184,-20623,-14442,-789,32767,3159,-19186,32195,10105,-13571,17971,-25109,-32768,-9515,29048,-9270,-20125,29652,10233,-26111,-4235,32767,7372,-25194,21440,-32768,14782,-14840,11744,-6799,-3949,-242,140,-3615,-3005,-1682,-2444,-32768,10082,2094,1881,-27565,32767,11782,-18695,-32768,-32768,20613,-20181,20181,8117,1125,32344,-32768,32767,-30003,-6688,-14482,-4499,7387,-32768,32767,17214,3383,-1913,10406,29362,-23802,24455,-11329,24217,-32768,12926,23973,15934,-32768,4610,26302,-32768,15930,5064,32767,-32768,-11612,4791,-25689,-28505,-29623,3672,22408,13216,27065,-20262,-32768,8472,16893,-32768,-32768,-16127,-32768,26711,-20947,-32768,-16567,23143,22656,29774,-9617,-32768,-13036,13998,1968,-12048,-5658,-8986,-8222,32767,21513,32767,-28319,-20061,20896,13463,-21175,18176,7295,30929,21777,32767,32767,-32768,-32768,16522,-22931,9176,3949,-32768,-32633,-9449,21332,29657,-22680,12013,-32768,-7979,-32768,-1552,23803,5351,-26883,-17981,-11087,-27584,-310,-15352,-5044,-5770,13434,32755,32767,-13688,32767,32767,-20699,-21246,31753,-20964,312,16353,-8544,1412,32767,-19514,-9351,25203,17948,-19924,32767,32767,-14802,4600,32767,20334,-11933,32767,6404,-10162,23714,30045,12485,-11752,-5010,14243,-27634,32767,32767,-27781,-22010,-1932,-6277,-25973,-5453,-4010,-7948,-32768,-7147,32767,-24069,28545,-32768,-9615,-6492,-3376,-15960,2076,10547,-32025,32281,25468,-32768,-16178,-14134,-21919,-9174,30468,-4069,32166,-16211,-8238,-29001,-32768,-20637,32767,9802,31257,6409,-13608,30133,4876,32767,-32768,-3460,-32768,-10685,-29863,-19,32767,1377,-31237,4876,16680,-10337,-7748,32767,610,-14532,22561,29071,-21497,-21203,32767,32767,-32768,-28522,-19443,32767,-32768,-6216,8418,14248,-17071,23005,1822,29472,20496,-16585,12562,22456,1750,-16603,24139,-6768,32767,27725,-21451,11799,-2016,3199,-11447,32767,-1127,23890,10440,17260,5218,23729,-2741,-9005,-12894,29406,-15054,27686,-24868,32767,-29748,650,-22413,-13972,-20503,11187,16362,-16029,-28363,-10525,-4905,21502,-32768,-32768,-14961,-22156,32767,18319,-14726,31390,-9206,-23988,32767,-27137,-19442,-934,32767,8736,19982,-15627,25259,-32768,-11374,31269,5181,-26874,-14075,-16994,3057,25075,10158,-32768,-18999,-32768,-1853,-10306,25920,15173,-10745,-32768,10972,32767,-32768,-32768,-21045,6671,3539,-14109,20968,-26019,-32768,15990,8946,32767,-9617,16843,-6243,28203,-15758,23878,32767,6625,-32768,-22295,-2052,-32481,-7234,31340,5894,-32768,-32768,6756,26358,11355,32767,-32768,-19013,14,-2396,23281,-4873,-17393,-31900,32767,31977,8086,32767,19330,-32239,-16811,8242,-1912,-24323,18377,8834,6768,23953,-9370,-32768,-9711,25633,-13007,32767,31224,-16124,-23079,32164,9572,522,32767,29390,19855,14244,-30645,28909,5455,-27570,5551,-32768,-20625,-32768,-31594,-2115,-23087,-32768,-32768,9222,-7535,-32768,-8864,28633,32314,15815,5608,29474,-8489,-32768,-6444,16950,25475,32767,-12211,-4933,-22826,32767,32767,17671,9751,27738,-9804,11893,-21716,-245,-15240,13644,-6425,-17198,24488,-32768,6206,22525,32767,13993,-32768,-15770,7497,-17521,-32768,-26479,-20892,6659,-18268,23927,32767,7686,3372,5376,2494,24146,-6282,-8295,-24061,-5645,28813,23605,-2687,31498,-31628,-23206,17623,-29178,26775,25487,32767,19295,-3056,17865,27653,-32768,-21255,10836,-9489,18857,8325,-15907,-13172,28174,-32768,-32768,-18839,6658,11042,-32768,-19747,9247,15255,26469,-26549,18278,-32768,32767,-17176,771,-7082,20152,-32768,-32768,-20270,4550,24219,6286,-32768,7658,20607,-28562,25538,-25174,32767,-32768,19457,-17514,32767,30776,15557,-2500,19918,-12165,-22494,21113,-22159,-32768,-430,21705,-32768,9364,-3703,23786,-23701,19385,-12418,32767,32767,-21048,32767,-6204,-32768,-15326,32767,-22769,-141,-31684,32767,32767,-32768,31356,-31194,-10391,-14585,4775,32767,-11836,29575,-3089,24692,27054,16485,2877,-20653,-5432,32767,2381,907,-7,32767,-19909,-5191,-2125,-8245,-26121,26604,17595,-2352,13602,-12551,13861,-2789,-25502,1562,-31554,-14890,-32768,-29295,-10832,316,-26305,-19124,-6656,25060,13430,12916,-26601,32767,-32768,-12714,1541,16403,32767,3072,-32768,-6178,32767,-32768,-11875,-6677,32767,-4892,18533,8501,24080,5783,8580,32767,-19391,32767,23737,-14858,32767,-770,32767,-22293,20120,1784,-17225,-32768,-30836,32767,32767,-23535,15612,-17742,-32768,32767,32767,-27213,-5568,-15796,6139,-9577,32767,-25131,32767,9625,-26786,-16047,28806,-32768,28686,32767,-12084,924,22240,-25225,8930,4504,-300,32767,-3241,32767,-9292,-29728,-32768,72,-32768,-4351,-814,-2217,-16977,14928,2203,10853,31381,17859,5875,32767,-32279,32767,-6560,32767,-5086,-30403,-23859,32767,17745,-5774,-5967,31602,-32768,1626,23554,26170,19795,29922,-32768,9469,4629,-12450,-11704,19997,2733,1273,4184,10033,-32768,7535,22749,-32768,16625,-21735,32767,27677,-30958,14983,32767,4157,9119,7193,32767,19676,-29670,32767,-21577,-2481,9881,-32768,-29057,12243,10231,52,-23519,-8922,32767,-31910,-23547,32767,3505,14977,-8130,-30765,-32768,26366,-9394,20736,-11419,32767,11838,-32768,-6869,28279,16213,-4238,32521,-32198,-23250,-1499,6007,-10461,36,32767,10402,-26227,4834,70,32767,-32768,13307,32767,-25118,-2978,-21222,32767,2206,5979,-13979,-5108,32767,17187,7594,16580,32767,11907,-19084,32767,-32188,29255,21204,23924,14301,-3781,4561,14756,-3765,-16699,-20061,17365,-19664,-32768,-31913,-7389,28814,15843,-31141,-26452,-29236,14825,-26911,-27387,-19700,30652,32767,-947,11632,32767,32767,-23941,-21485,-8008,31105,24770,-10021,-8217,13123,-30212,-4197,-19365,32767,12904,-32768,22243,25982,-32768,654,-32768,5980,12195,8303,32158,32767,-32768,9349,32767,-30872,-23550,21140,16511,-32768,-15413,8215,-31589,4714,-32768,166,13673,-32768,2858,-20029,10171,32767,-32768,-10901,13753,-11228,-28081,-32768,-24346,-300,32767,18399,-32768,-32768,-26549,25398,-32768,-30787,-9144,28066,8078,-25102,16031,9630,-17503,-7900,-6436,-30415,-32768,-15411,23818,-9483,-26312,-1835,-14893,-17379,-12756,-212,-2081,32767,32767,-31494,-4246,2239,7586,23506,27674,-32768,32767,8910,1775,28707,17521,19701,11692,-32768,-10796,-32768,-12104,4952,32767,-11199,21124,9704,17785,-11341,-15236,7602,2161,32767,-32768,17968,-27902,2360,-32768,-32768,-32768,-16929,16520,-8625,5318,1339,4275,-31345,-24910,7240,32767,-26100,21099,11655,2326,31197,-28370,7607,-11005,-12376,9045,-2288,11169,-27388,-21496,28939,-18846,10002,-13752,29041,13702,-10493,12646,-24179,32767,-32768,31812,-17641,21832,2925,-19235,-32768,-32768,3033,-1131,-5803,-12494,-14160,9664,-32768,11705,13272,-7732,-11958,32767,29574,-17516,-9503,12140,-32768,-32768,14534,-27163,41,-32768,-1967,30842,-32768,18671,19701,21730,32767,20612,-32768,27458,-18955,30500,32767,-969,8606,-4170,5358,22395,-4729,10657,25948,32767,32767,-4493,-13446,-17171,-9990,32767,-30310,16117,13746,8270,-15468,8484,32767,25365,5228,-30524,-18883,-1319,-32768,-32768,-16148,13185,26039,-940,-13364,-23250,-13124,29983,-15506,-23915,-32768,8262,8691,-15068,26836,32079,-32768,10846,21271,-32488,-17957,32767,-32768,-857,4159,-4450,-5332,32767,32767,-32768,32557,24723,15111,-32768,-32768,-32768,14989,13117,15665,28498,30558,32767,-32768,32767,-32062,-17509,32030,-6911,-11051,3767,-499,-6762,-862,-32768,23019,-24133,23624,-5346,-32195,969,-25177,-15021,21780,31617,32767,-17872,19576,-23608,26152,-2506,20955,8549,-27532,-1894,-2909,-32768,3168,-17771,8992,3531,28467,-32768,4965,22770,-24248,-4966,19551,-15336,-32768,-1720,8076,9773,4047,22688,-494,-2362,21910,-18630,-19083,-11333,-32768,-32768,-32768,17525,8676,-2869,22770,-6582,1706,-22064,-32768,10691,9431,-11248,-10785,9572,-7271,32767,10785,-23917,32767,-32768,-14672,-29379,-8248,19555,-19339,-32768,-20019,-18481,-14742,32767,20793,4365,5710,-10501,-32768,-23796,-8639,-32768,-17650,-19860,6867,-32768,-22584,20506,-32768,32767,32767,-32768,20687,4822,-21897,-32768,-18945,3393,13241,-15934,28288,32767,-4261,32767,4736,32767,-28896,-9311,20779,21645,-32768,-28998,32767,4326,12861,-21468,22085,32767,20843,-653,-8983,-29356,-32768,-15480,-17795,20780,-32768,29320,10763,-32768,-23621,-32768,-10632,-20526,11186,32767,-31054,-32768,-994,-13761,-926,-23438,-8550,22213,4659,-32768,-32768,13637,32767,-32768,4907,32767,-32768,2116,22673,-12781,1963,-18881,-32768,-15720,5849,-17102,332,-18120,-19131,32767,-23356,-32768,-31925,549,-5899,-32768,-3963,24529,32767,31001,-31621,-5402,-4006,-31731,10619,-32768,32767,-18818,32767,-31360,-18961,30043,860,1254,832,-19567,32767,18453,29295,-17163,-3297,23296,-32768,-23010,2870,3316,12972,19771,23538,-24279,-16393,-17270,-32768,32149,698,16974,24441,-24397,1024,7634,15865,32767,-32768,-32768,-28148,27504,32767,25849,8551,-23465,32767,19488,1422,-32768,-32768,9402,7461,11953,-23580,14488,-10945,11727,-5828,-32768,23382,32767,6888,-10441,-1622,-32768,5636,4269,-8601,-14854,17590,-32768,-9418,32767,-30680,32767,-26165,-27997,11503,-5493,-2873,-8899,-32768,10820,-18886,32466,32767,32767,-17822,-18914,-9360,-331,-30350,-23433,-27984,32767,9921,12767,32767,-26458,-32768,3298,19459,11884,6282,24959,15574,32767,-31226,8599,8033,-4999,18405,-32768,-32768,9160,-884,9088,-9135,32767,2872,-19190,32767,-25939,-32768,-1684,-11767,17371,-2716,18470,13492,-14389,10820,-209,18556,21382,32767,29602,-5248,-27053,2609,32767,32767,29970,18386,-6298,-32768,-15667,32767,-1468,-15768,32767,-14313,26986,-23989,28581,27465,-32768,9315,-32768,-947,-28775,32767,-32340,-13848,-28330,-14537,-13275,32767,32767,-30716,10456,22502,32767,13455,-15396,2705,-25622,14602,-8743,-12786,-8704,-32768,-5639,32767,-23806,16256,-11519,23228,-26621,-14085,32767,-31594,-32768,-26742,-14476,32767,-17290,305,-26159,32767,-14616,8450,17491,-7054,-25338,-32768,-22656,16289,14440,-32768,32174,1606,-27490,-17544,17431,5864,32767,-17741,-32768,32767,-14905,-13879,22603,-32768,32767,32767,-15734,-32768,-19157,-10879,4187,-32768,5272,-11626,-3372,-18668,-32768,30441,-31435,19985,-6495,-24468,-32768,-7677,-14862,24463,-2993,22348,-23990,32767,32767,-32768,32767,20695,32767,16514,32767,6937,-7040,-29956,3869,-27420,21437,-32768,-32053,2021,3671,-8417,612,-32768,32767,-32768,26705,32767,-4594,18479,32767,-15884,21024,-20647,-9890,-32768,32767,25699,22273,-32768,10371,9966,-19635,12176,-30339,17454,-32768,-7906,32767,6722,4671,-8441,-32768,32767,-25455,32298,2443,5856,11297,14913,-23171,17431,-6812,9321,25863,20275,12216,21565,-32768,-27247,16378,32767,-17833,1926,29696,-8502,-1365,-16612,25288,2701,14036,-18911,9704,15861,8718,32767,5473,28464,12810,15406,-11235,32767,-32768,32767,-2052,32767,-11655,28173,2327,18940,-19465,-32768,-11555,-506,-28909,-32768,-20752,-21300,-17264,-29906,32767,-20249,-32768,26072,-32768,32767,-17514,-10416,-31284,-32768,32444,-32768,-5888,32767,-2691,24936,32767,29130,19437,-32768,32767,2898,-15705,-768,-14690,8196,27469,-9051,2391,16341,-19886,32767,-32768,-11420,-7086,-32768,-25807,18894,12763,6372,-26176,-26903,18845,17821,-9016,-11201,-32768,12232,-6733,-3401,-17063,2033,-15489,-17412,-12366,-24863,-7193,17931,-12349,-3952,-2347,-32768,23061,-1536,-20165,24169,662,18300,16678,32767,-13461,-13738,-5412,-11340,32767,-32768,-32768,-5579,5778,-17255,5547,-19202,11659,11173,-20282,-32768,8600,29154,-124,29255,32767,-32768,15091,5835,-17065,357,-6483,-6543,15176,-27401,-18001,-19614,-11057,-29526,32767,-20081,-6655,-30633,-11966,32767,-1653,-609,-32768,-8452,6958,-4514,-22722,5895,-17707,32767,19932,32767,-8013,-26276,4145,-32768,-9446,26060,-17781,-21134,32767,-23781,-9909,-4641,-32768,-12695,15283,-2314,-15290,5973,-32768,8145,-32768,16157,2556,7361,9913,-28122,-817,23134,-47,19258,-26830,9558,32767,-23571,-22551,-9984,14054,-18285,-8322,32767,-18572,26758,5347,-32768,-2914,7875,3170,12002,32767,26267,2484,23174,7027,-24980,-4751,-32768,-32768,-23005,12176,25911,1969,-19731,-32768,-17094,6631,-3361,32578,-11304,25701,4578,30006,10028,27565,2361,5278,-32768,-12237,-32768,25804,-24288,-23643,-15690,26258,24069,-22334,-31833,32767,19900,15545,25869,32767,3841,-676,14526,32767,-17109,-16703,-25664,10444,-24652,32767,24204,26944,-32768,-27754,16346,20600,32767,-13942,7347,-32768,-18765,32767,-28204,32767,-4245,23708,32767,6606,-10212,-6559,20892,32767,29445,6842,388,9159,-32768,-2154,23921,-22015,-26734,-20042,-25903,31781,3887,12634,-26799,-29857,-32768,31857,-4336,-937,-17740,-27511,18423,-30078,-32768,18197,32767,32767,31997,23700,26133,-11782,-8442,15829,6159,-22913,-21339,-3584,9669,-19935,32767,32767,21633,-17506,18177,29519,-159,30668,-11839,28107,15713,-28322,-9067,-24060,8267,-18210,-32768,-10633,-5713,32767,32767,-1205,-17623,16686,-30496,32767,32767,5330,32767,-12855,9753,22357,-32768,-32768,29824,9952,-11366,824,21790,1644,-32768,6297,26009,30599,-4775,-22425,15351,-2934,32767,-1876,-8310,-32768,-7203,2619,-32768,9718,32767,18525,-8896,-32768,-8589,-6036,2453,17194,32683,-8272,-16954,32767,16621,301,8955,-32768,23696,24752,-32768,-32768,-1296,9688,-20641,-32768,27931,-32768,32767,-32768,-16315,-28956,12619,6006,-1975,31136,60,32767,-4362,-14410,16495,30217,-6438,10681,-20979,32767,-32768,-12178,-26526,31420,-9064,14623,-2424,6129,32767,32767,7996,7783,-32768,32767,32767,-197,19533,-32768,2722,12917,17769,-27768,29601,32767,2849,-17960,-15381,-25799,7007,-18902,-32768,-20073,-12888,32767,32767,-12858,29895,27635,1070,32295,-11882,-32768,-14864,29222,17061,13486,-32768,32767,-26015,-24165,32767,-29559,-8878,3443,32714,-20663,32767,1014,-26350,-23000,-6435,-32768,11462,-32768,-14962,-14573,2353,-19354,32767,-23255,32767,-16323,6208,-31592,32767,12673,20129,-19105,-32768,-29522,-26649,30770,-25581,-32768,32767,32767,21855,-32768,15718,25462,-24735,-17155,9132,32767,5570,-15674,14584,-32768,6343,21080,12336,5,28928,23772,11909,-27107,-13098,-14253,-24212,3613,13985,-16589,13616,-9759,18950,-32768,12087,24385,-16262,12294,-12072,-22774,-32768,13727,-25179,-32768,2810,32767,-8906,32767,6254,-13412,-17968,-17406,6638,32767,-16864,-22817,-10956,8494,17024,-9690,10907,-32768,-7959,-7383,16971,-4370,29244,-29753,11111,-19371,-32768,-5921,-23885,-26649,-32768,-32768,-6699,-32625,3972,-23991,32767,-9532,8593,-15118,32756,12337,32767,-30961,-32768,14829,-15838,32767,32767,-32768,-32768,-7275,32767,14097,32767,-14429,32767,-10592,-32768,21028,4962,24017,3643,18376,-14236,-32768,14082,8609,-32768,-187,20545,27085,5501,-8023,10943,-22304,23965,-32768,-9956,27716,-32768,-23962,16259,-11722,-32768,-25048,7226,6726,32767,-10106,-6860,-32768,9132,-17885,-32768,-8042,32767,31414,-3773,31776,-32768,6254,32767,30538,-20632,32767,-4928,31950,19984,20564,11818,-100,32767,15414,10399,-29663,9181,-8094,32767,7428,-31843,-397,-28644,-16914,9551,-6377,-11232,-11147,20169,32767,-27558,28746,-32056,3176,-17446,-7698,32767,12166,-14878,6019,28959,32767,12188,29381,-17761,27393,1905,24465,-3786,25116,18382,8992,-14223,32767,-29976,17059,3611,-16241,-21236,32767,4706,-4090,2752,-32768,757,-885,29795,21767,-8612,12533,15192,32767,-17389,-32768,-6620,5017,21687,-32768,3083,32767,-32768,26328,10352,26214,8680,32767,-28880,30185,-4974,-21158,-22082,-24327,-8110,-21547,-21683,15118,-7114,30187,-11292,21551,-6198,-11211,-23298,-32768,-22220,17833,6305,2423,-6661,17023,-10099,32767,-10799,-2652,32767,5720,20256,4854,-20624,32767,21533,32767,7864,-32768,-3017,29156,32767,-32768,8739,-32768,4186,-32768,2413,3386,13075,-7389,-22396,-22645,-10698,19132,7877,-29313,20191,17596,12385,23306,-2954,-32768,32767,22199,6471,32767,-31214,-15827,20921,-3297,-2399,25225,-32768,12241,-32768,32767,3057,27320,5321,-19452,23078,-2332,-2004,-13700,17748,23539,6674,7604,-7677,17047,24503,-18176,-12226,-17697,-7730,-5372,-31819,-32768,-21440,12742,-25436,23626,-32768,-18700,-32768,29775,-316,-3586,23239,32767,23399,-31662,-8120,-29774,32767,11908,32767,11136,32767,4580,3159,-32768,27410,32767,-32768,-28421,-8412,-1144,-9792,5848,1206,-5781,4225,-913,32767,-5581,24157,-23668,-32768,31553,11937,-8027,-2763,8868,-32768,3073,12950,32767,-32768,16531,32767,5824,5289,-1449,-32768,607,23489,20147,-3822,32767,17445,32767,22767,-4392,-32768,-30923,32767,-5836,-6460,12982,32767,-32768,-16471,25263,-19733,-8906,32056,32767,-32407,-3883,188,20792,32767,-4941,-11290,-32768,-26787,32767,-32768,-6692,21957,-22447,29093,-14882,-25407,-5002,5335,32767,25488,32767,-1471,-23358,-27992,4226,-3547,32767,-2417,12036,-32768,-15933,25147,-19433,14091,-28323,26669,-31415,32767,-973,2342,32767,14556,-2774,31630,32767,10829,-3795,32767,23715,-173,-18785,32767,-32768,-4621,16596,-5894,31350,-9384,-19278,32767,-32768,-10367,-8793,32767,28216,-23275,7979,-13501,27152,7659,-32768,3841,27749,1394,-19172,-21649,-10883,12381,12818,-9830,15768,-32768,-32768,-32038,-32768,32767,-22601,-26359,-18270,17758,-25114,-12957,-32768,26836,4912,-13698,19164,-969,13712,2097,-1119,12027,-32768,-32768,-2637,-8149,14851,6486,-32768,-27758,24750,1178,-29048,3972,-30066,-32768,-13539,-2489,-10773,-1802,16355,32767,-15987,-14164,-20879,8171,504,-9427,9267,-8929,-12592,-672,32208,19441,1943,-14396,-2249,-32768,-19695,-26479,32767,29108,-8632,-18457,-20272,23826,-32768,32767,-23012,-6130,95,6895,-5670,4350,13115,-16528,29200,-15051,-21866,1141,-32768,-32768,10407,-1331,-22623,25070,6408,20026,-31029,3964,-26420,-29135,17710,-2936,32767,24295,-17122,-32768,15592,2590,1771,-3216,-9248,18291,19299,-1493,29407,-32768,22839,-2043,-5817,395,4437,9051,-9053,-15791,-32768,-29910,31009,17709,-31740,-26765,7499,8287,6877,-32488,284,1463,20126,32767,-10297,13589,32767,32767,-5363,-4077,-32768,-7782,-32768,8701,-394,32767,1917,32767,-24039,32767,-16876,32767,-3203,8339,31976,32767,-13703,32767,32767,3829,23748,24759,-6513,-16650,-26584,-2451,2182,32767,16578,12486,21659,31110,8795,-32768,32767,24990,23196,32767,-6324,15495,-6885,14263,8372,2927,25771,-10847,-24600,-19710,-6296,5176,-32768,16015,-12302,-21580,-12257,32767,-32768,32767,-8468,13268,-25017,7776,24355,30561,-27624,-9960,23373,-31382,29225,-4399,11678,32767,-17295,32767,-1462,-5544,26496,-14411,-3531,14366,2319,-32768,3987,-17617,5165,-2678,-7816,-26733,2582,17456,-18755,-6454,3617,-32768,32767,30217,-9170,32767,32767,23410,-11706,-32768,-24406,-20143,-32768,-12560,-10252,14178,-32768,32767,8934,7680,-3158,32767,-10754,32767,20287,-9282,-18896,-9052,13830,-10462,-32768,32767,28210,32767,21512,32767,7937,23083,19352,10933,26708,31644,32207,-1567,-18103,-9406,3722,8122,-32768,20422,-11015,14397,-32768,-17132,23347,32767,-23974,31545,-19541,-32768,6589,8930,11807,9680,32767,32767,30608,-32768,-5094,28864,20930,3030,-27368,27118,-12780,32767,-27615,17271,-32768,-29521,18719,9093,-32705,8724,23576,7535,32767,2768,91,-3590,-32768,-5506,-12692,-32768,-23341,32767,10263,-6148,20313,-18326,777,26213,6392,-32768,-6009,32767,6588,-222,-8065,15997,32767,-15204,-19395,32767,22336,-32768,32767,2908,-12815,-1709,22405,32767,-28935,-24752,-1968,-1689,-21255,32767,-13118,-22982,18879,32767,8098,-14462,-10263,1157,27825,22211,-14895,-29010,-9534,8061,-18270,3664,-12510,10297,26885,21880,-32768,-14455,-5919,32767,1574,3872,14036,-32768,32767,19651,-16287,-32768,-12916,32767,32767,2214,-21944,679,7822,13729,21095,13897,-9222,32767,-2604,25776,21871,-32768,-27627,-32768,17221,21558,-6224,-4825,32767,10453,1367,-29149,27240,-16915,32767,-32768,-3898,32767,-2571,-30821,1343,-5441,-9908,-32768,-10871,15212,-4122,-15891,23914,-25735,-6738,-32768,7783,1469,-32768,-9086,-13114,-13943,8108,-7987,-6134,13068,23518,-19682,32767,-31509,-32768,-21084,5764,-32768,32767,25321,32767,9954,-13131,18912,4965,-18512,500,3865,-27984,-3651,-32768,-30284,7906,-24810,387,-32768,20033,-14306,11156,21884,29975,-14901,1661,16824,32767,32571,-5832,-22299,-3138,-4162,32767,-26729,475,32767,-25047,-19447,32767,-21414,-32768,-11940,-26924,-32768,-811,-32768,-9751,-11314,32767,-32768,30866,-30115,7737,32767,-11302,27654,-32768,2276,-5529,-32768,2294,-18319,-32768,32767,-22296,-4882,23841,32767,-11763,-11621,8094,-16132,-16428,12816,-31217,8225,-4757,-4960,2841,11265,-24142,-7766,-25488,-16015,3032,14392,-32768,-32768,-32768,-12363,-21001,-14342,25215,30723,-26958,-12708,32767,-3223,25803,10770,28510,7617,489,-32768,32767,-2368,-32768,-32768,-2231,-23112,-3319,-2702,-8756,6620,11532,32767,6607,-32768,-22174,-32768,27319,-32768,-32768,-32768,32767,-5966,6457,-6291,10779,11916,-32768,23477,32767,25699,-21206,26860,10384,29947,-8360,-10338,-32768,-15421,32767,-21005,-32768,-32768,7505,22862,32767,-22533,-2897,-32768,32767,-22404,20609,6753,-3011,-19663,32767,-17189,26503,32767,17319,7443,-5502,-1251,-6760,-32768,21967,-28034,21289,4301,20975,4613,-32768,25978,32024,30493,-14670,21966,-3188,6869,24727,16583,-10839,5853,-2709,-32768,1561,29245,-9846,-8879,32767,32767,-28632,-27162,23388,17062,26358,3300,-5424,-10855,19974,16195,17429,2011,13177,-32768,-15534,-24929,32767,9860,-1736,32767,4225,-23691,10392,32767,12186,-32290,-32768,20001,16233,-7780,22299,26875,-3537,-19056,-13183,16034,-32768,21379,14144,-19161,-6828,-32768,1447,8717,10459,-14720,4545,-3134,21889,-28508,-6559,4371,12509,-24180,-3668,-12751,4398,-18757,-18656,-13990,3620,-13306,-7934,-26415,-32768,-32768,-32768,-32768,20772,13786,-7731,11931,-9311,-19491,32568,-21970,-17550,3104,-32768,-25406,-14916,3724,-17280,-2788,32767,2153,-13295,3070,-32768,-6879,2810,-14513,6145,-32768,21123,11741,32767,6626,7248,32767,15783,-3665,-19419,6520,-32768,5552,12093,-32768,-32768,-21972,3243,-32768,-32768,14199,32767,2079,-3836,-32402,-17,-32768,-14534,-1633,-7862,8597,16329,-24357,15171,32767,2751,19469,-32768 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add10_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add10_input0_int16.csv new file mode 100644 index 0000000..a1d4da0 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add10_input0_int16.csv @@ -0,0 +1 @@ +12987,-21463,-6652,-29486,24216,30348,-29998,-17736,-32699,-26839,20840,-31811,25535,-20320,11175,-23672,23031,23197,31822,-23900,-17316,16261,21278,-10194,-7370,12761,22843,16161,-29464,-30931,5779,27608,-5509,6437,-21472,-9707,10451,-6552,-18189,8589,2083,-20240,26261,-29527,-23920,6758,2359,-18021,-20707,12276,-25824,-19679,-21694,-11787,21406,16852,-1104,-26143,10861,24438,8269,29461,-28381,20106,-9941,30254,-13791,-26996,-22692,7233,3139,-22872,-5590,16926,26086,-28443,-4200,16192,-30760,-663,-6246,-22382,20333,-20116,-22109,1736,-19057,27552,23187,23975,-4585,-21519,13089,-18106,29337,-31034,31425,1745,4666,-629,-32446,26955,6466,-29736,-19095,3061,-3414,2301,9740,-2646,-15152,-898,-14464,-28104,-25592,2084,-688,9251,8143,-22466,-10521,19690,-7903,-3119,-1525,10372,1072,25059,-8855,28355,-17292,28070,-11550,-26918,29066,-19920,22160,-1940,-23274,-6238,19389,26208,-20932,27776,22665,-20842,-19505,-20450,24379,-16631,30454,5622,-5015,-18732,3968,-1118,14363,-250,3105,-29784,-23969,-29192,-8575,19556,-29352,-14567,-24161,15992,18052,-6348,5967,-26883,-13086,15553,15708,20863,11164,31363,-11630,9351,-26102,-1514,26268,14776,-17273,-21474,-6950,-8131,-16764,-3293,25363,31870,-31277,20769,1905,-2828,10730,-15302,22772,-21094,21363,14923,30810,20395,-23027,28266,-15291,23153,-4178,21768,6110,-27424,-7666,-18262,-29375,27212,7546,2128,8414,28829,20418,2081,7594,1145,-20662,9782,10588,-31458,5165,27942,-11157,18298,-3148,-24312,21493,-25015,-19259,-14071,23364,-27764,-32488,5933,-4503,8465,-21335,26793,15873,-8282,20337,29043,17769,-7124,-12534,25676,13388,35,23645,-668,-28738,28686,23169,-22070,-29156,7273,-18527,-1397,23546,4026,24593,1055,-14959,13387,-32448,-8247,-19066,-22161,5095,27757,23824,-29252,-22060,-13390,-14892,-10741,-11782,26720,-32671,8642,-16101,-24333,10918,-8695,7050,5959,-23868,16640,-18926,22059,-32109,-3856,-25593,12596,548,-16862,-8639,3120,20344,31608,-26812,-7812,27183,-3302,4517,-19789,27595,-27506,-14335,-32667,9408,21703,5645,-2679,-6047,-2643,-11880,-30843,-12887,-2569,6718,-31423,-6924,-23469,-19296,13461,10671,-28895,-23599,-2773,-17368,20374,11269,-21357,23563,-7511,-937,9420,-10816,-31701,305,-24385,30618,31341,24085,-23507,-26842,-17208,31179,-11501,6196,-9921,-13103,-8075,2577,24909,-22945,27241,3613,-29530,15182,31329,-9951,14649,-27992,-5535,-8039,8773,14092,-31200,-9090,10245,-25423,13931,-18044,-16382,-25141,16267,-3078,-27169,-9061,-17561,16279,-7816,20583,2649,17954,9572,4772,9956,3467,8273,-14180,16464,1561,15129,-18408,-29789,16305,29783,-11761,23940,-28719,-18915,-26011,20968,-26193,-3194,25936,8699,11306,-11856,-13252,15781,-17924,3158,8638,12681,-9531,25478,-30823,17738,-1562,6036,1451,8491,-15298,-13742,-21947,5530,17022,-8430,-29532,-8352,5906,-29830,876,-19313,21098,23182,-6190,13196,22781,8616,-28882,-399,-25120,25487,-22082,22712,-20660,-15757,8437,-25898,10836,2048,-5419,16446,-6422,12031,-16313,-12818,30424,-13756,25604,24470,9247,-9156,26259,3675,393,13423,-30557,-6570,-6252,-11656,-11425,-18260,13405,29426,32763,22748,-7602,-19135,19182,11845,-19229,5090,-24912,5796,-20287,10926,27934,-25003,27888,28616,-886,-24297,-11299,6304,-9069,-26784,12084,27432,-19510,25099,10612,-14955,27986,5356,-10037,5634,-2200,18658,-7365,13397,9590,-15377,-2917,24341,26708,10772,-13259,2948,-21581,15348,31436,-19725,-1144,-9651,19059,-28306,-20680,16498,11970,9199,26957,2033,13142,19267,-13228,10697,9221,-14474,17758,-26062,32029,-16231,14660,11705,-3760,-7095,-31418,5109,19955,6829,14148,-26773,4170,2738,-19580,-1380,-10946,22503,19730,-3411,15281,-18808,-13846,-32014,-32498,-6091,-12900,-31743,22691,-27813,31833,5119,5916,12744,25152,14376,19974,9798,29717,-4897,24908,12808,-31256,18271,6237,-6442,7692,18115,-658,6580,-16656,14101,-30979,-27687,-10647,16274,3382,8555,-7834,14229,-20493,15040,12902,-5582,31623,-18806,27841,-5756,-13290,11167,-21225,-25756,-27569,23725,11952,-25703,-6220,28148,-8226,24062,20002,-30304,-28596,-24458,12568,-24007,12683,19643,-22666,6493,12833,-560,7320,-31399,22165,-27783,-30451,3050,-4671,20884,14819,8621,32677,17296,-23947,-20142,-16222,31511,-11795,25613,30793,10701,10311,-6262,32544,2375,-26004,390,-27880,26731,-13008,23842,-4353,23106,-954,28769,-8651,31030,21542,-3922,28935,32306,-31472,-5974,20993,-19729,17555,-12972,17068,-9680,30145,-13477,20293,11848,-2481,7863,9048,-13297,19654,-12388,14111,-9752,-15779,-9810,-28671,-24030,8869,-12956,-6406,26677,-7953,17654,9192,-6380,-24758,-22351,-2245,-351,-9541,18755,11159,-28441,-10656,-30296,-29711,21416,-25591,-27015,-8974,-2818,26352,-10576,-26769,26778,24411,2445,-20261,-25927,-25822,21374,26561,12208,-18730,24284,-29702,2995,-5328,-2332,-20249,25760,12169,-1747,-22406,25253,25856,27201,-21858,5005,9593,3099,-32425,-25411,14629,-6509,-29709,21957,17641,29317,-24477,24202,28822,5722,-10345,-14216,5185,10773,30136,19650,-9080,-15271,-3823,23966,-1110,-9958,-24586,16509,11121,13893,20565,-18434,-30690,25686,-6059,23278,22634,-21430,-20161,-28824,27750,31206,17323,-3707,11641,-25299,12943,-19051,864,16232,-27708,32599,2847,-31863,12298,24847,-9700,19485,30071,13353,-8191,-5189,29479,2904,-934,-832,24976,-29148,11420,18921,19915,-10816,1434,-14446,-28298,17212,1518,18546,17431,-3206,-10094,17464,27618,-11424,953,-17241,-5565,26429,20619,-19713,7608,21968,28842,22380,-6614,-6646,-29364,10744,-22765,-5367,-9197,-30169,13111,-1819,5099,6293,-13962,11750,-23626,25483,13347,-29095,9566,13735,-5231,16686,10209,-10307,14636,6953,6092,-10426,13507,5239,13871,25640,-26013,-30211,24583,32367,17376,-16660,-32752,20932,-1728,-29045,-22032,22229,-20423,25459,-26147,-28664,-2419,-19106,-8491,-19352,-27248,-8140,-21181,14306,19787,14575,-29624,22099,3520,-9646,-27478,-6833,4622,9075,-20955,17331,-17752,2682,14501,-5815,-20797,25635,25580,31497,17870,-27083,-31590,-10638,-21708,-29337,26313,29419,-6380,26816,26493,10368,-20681,26940,32571,30456,22925,-9241,-21684,11193,-7802,12705,-28932,-15306,25631,1266,30102,19970,9765,-2464,-24104,-16873,-25062,21210,18117,21616,-15340,-1782,-1494,8908,-8801,19068,3484,-23887,-26944,-14541,23984,-26044,-22206,628,-11111,-8211,-21448,23599,27372,-9482,-5520,-9816,12501,-13125,-29419,-30639,-12668,-32691,-816,30915,7165,-32291,-7794,-28159,-31795,18140,-8704,26156,23449,-905,-17500,5114,-8044,-25064,-6019,24153,-14201,9064,-21294,-20929,9456,5404,-23148,19240,-27407,27143,9906,-4236,16783,10678,15455,-22040,-7584,-30920,-25311,-19993,-31345,-7746,-27862,-23250,-25454,189,-4179,26205,19783,6784,-30054,12033,17927,2768,-31689,29596,-12155,-23570,-392,12869,-15483,-10371,16838,19813,-22273,23752,-24764,23569,-30032,12251,-16170,-17607,8662,25052,-14951,32487,-26917,20302,24670,10601,27835,8204,-28345,-19682,24496,-15065,-29166,22653,-2624,-20736,-14716,20981,3759,25808,-27322,3361,-31740,29467,10585,23927,12190,-27276,-11656,-30590,27349,-28606,13351,9474,28175,-21970,2140,-28618,-31745,29210,-18677,-11348,-26985,-29358,13276,25912,7290,-16318,7393,-13537,-20200,-21958,30700,11737,28085,-5373,11728,-18129,-18154,3767,9960,24175,-17820,22437,-12656,24486,-24675,25244,5453,8060,-24611,-15382,-15014,-12485,-10009,32346,-25141,-23502,1771,16759,27378,14919,-25714,-8285,14622,16127,-12496,-14617,15309,20767,3340,14904,19114,26309,-30857,-21192,15020,-26284,-11391,-25065,-28069,-12098,-31300,-8947,26590,10631,-10941,24434,16056,5243,-17797,14374,23027,29656,15967,12171,28698,-16480,-21547,-26663,-323,23278,-20231,-24811,9835,24762,-8256,7762,-29181,7059,-266,-4357,15821,-30908,4431,30180,25118,3418,-32235,10268,-26734,17708,-18104,7957,-25466,7414,-13573,-12177,-14663,-18945,6556,11954,87,2140,-22375,-26383,2292,-28097,-18779,-9906,-18308,-3147,-13206,-30051,-16944,-15288,4147,-8650,28198,1631,325,14429,9587,-10038,5677,3553,-24921,-19429,12773,9851,1751,-4044,12204,25947,25280,14875,23557,14237,-12986,22889,-31091,30741,-5494,21583,31895,22130,21969,-5277,23843,22192,-12201,-23388,-1195,-27919,11222,5585,-32041,21485,19624,-19981,5475,27915,-28948,13174,30965,18464,31686,-3964,14535,-24883,-20027,-5485,-27558,-8545,17908,-15716,30805,-5200,-9824,-9721,30650,15962,-15154,20618,-7937,9080,-17418,-16051,-1702,32584,32465,10881,10811,-5590,-16058,1236,-1775,-32065,-9168,-8069,-11166,-24370,25251,7213,13842,17857,-1430,-7462,16725,14079,-12965,28923,5545,28107,28208,2358,-24170,20532,18229,27473,15608,15994,-6495,-19100,-1178,6949,3293,-11629,-18014,12743,21016,-12376,16057,10820,-13372,16137,18603,-15150,-6901,-27700,-30437,-13541,-15849,14944,14431,-20551,26156,-23302,-2565,4528,17879,-8572,15697,-26048,21274,-15588,26361,14649,1556,11626,13968,-27371,-10798,-2627,-12131,3919,18522,23087,2650,-5844,-8249,-31425,-23970,10901,4674,8022,10124,-17840,-30050,28220,-7798,17523,31377,8833,-18855,-21465,226,-29008,-26899,3917,411,-6211,-7250,-19191,-21965,-1869,15343,-296,25914,30845,16754,15070,12281,13474,21811,-23570,-8464,8422,-10910,26518,20867,-20181,3073,5651,254,5874,5728,-412,-31620,8289,29770,19923,3998,16295,-32157,-5074,-1168,-32211,266,8942,18248,-32738,-23825,11281,29500,-15538,-12845,29358,-11594,-16131,11149,6517,12646,-5587,-20351,10161,-4076,-15921,22286,-17917,-15365,-26924,-21043,-7987,30433,-24979,25798,8591,6085,-12904,19121,24215,-19665,-21253,-20121,-10491,1144,-11726,7478,-21731,-6953,6304,12710,282,-22666,23804,27199,13133,-1533,18547,-17444,-26837,-20599,-16288,26811,5760,-27597,-7348,16929,17635,10971,9738,-20190,24562,7169,15933,9905,12267,3908,-5620,28139,-28503,-14658,-10556,-5567,12633,-11574,5367,-13829,-25431,-668,-28196,9998,2779,-22839,-19257,-23485,-28336,-22089,-10832,-4080,28983,-3665,27902,-29490,-22249,-29291,20224,10056,18341,2600,-17162,-28518,-17263,31353,-26689,-29442,7797,9148,4147,-30368,12245,-12424,32215,1740,-27520,-13894,-2526,-21756,-2169,-10532,716,21760,-6066,13477,-13980,-9109,-11494,20681,28695,12710,-21543,21725,2644,20375,-16253,182,-19630,-28272,-26176,30940,22488,-28084,-26476,-727,30344,-19499,17436,-28660,7121,10974,-16295,15545,-29676,-1809,12077,30889,-17206,-18178,21732,-3058,22974,-25880,-22272,-10811,8787,-14054,-19032,-23271,30842,-25927,14690,21131,2607,9399,-18656,-1732,25045,-28750,18786,-18451,704,28693,-16711,-12796,21735,3620,-24796,-3078,17064,19072,-27633,30643,-24942,13836,-12748,-15421,23271,-12733,-7121,-2319,-5874,15328,18940,2248,-26807,16604,-8225,-32394,26543,-22763,-30866,-11152,15638,5510,-3806,9705,3641,-30475,2998,-4819,7142,31841,-28380,2842,-31520,-1841,-28274,-24518,-32391,30191,2487,-26302,27015,-8982,-9046,712,-953,-6143,-11785,12510,-19942,-16978,-27677,18582,-26750,18009,29442,23812,-10392,5165,3415,3789,18171,19141,28510,8075,24387,-706,7239,-15350,17965,-169,-2408,-20105,-17038,25696,-22342,14257,29560,25847,-5832,16613,22410,23540,-31456,-2360,14983,-19432,11168,12325,10518,-12818,12069,21999,8184,21565,23180,-26939,14559,-5272,-30781,515,-14616,-3688,3287,27812,16021,-27409,20708,-17057,1723,22425,19581,6811,5175,23353,3047,3932,5422,-31907,8277,7087,5564,-14178,-10422,23342,6121,24251,-2906,16600,-8250,-16635,22418,13368,-3815,8462,-14789,5507,-15828,27597,-16898,27504,22445,-2848,-30204,27132,8398,7374,-10108,32280,-27777,-30058,-577,-17177,-17654,-20239,21326,-7103,-9206,-1361,3033,5493,-5139,-26912,-9700,-30907,24370,-11184,25608,11505,-31187,14814,5028,13926,-8188,-18429,11124,19600,-20692,-6688,14776,29840,-16865,3925,12322,-14974,-13304,-16291,-31941,20077,-31836,8166,2249,2054,32658,26880,-30396,5959,-25298,-7956,1202,8430,7779,30406,-443,-29228,30574,16783,-4545,30059,-15160,-29809,-21452,26336,-8497,-11867,11628,-4282,-19548,19041,27551,-8256,-5507,30110,-31618,19051,3698,11519,-1050,-24247,10403,-3965,-5597,-21995,-19063,5780,-32098,20839,18990,-12429,-26965,32259,17827,-20180,-30959,-16680,14424,-10919,24614,-3873,-14374,24333,-26839,14972,-8781,-2484,-16517,-3362,3636,-29288,20164,19219,-833,18066,-9996,26271,-27211,-355,-4602,31997,-32309,-10015,10992,6222,-28623,17137,15031,-28909,10465,2026,20016,-23585,-18285,5530,-12887,-8617,-21185,24796,30314,14975,20083,-19364,-30516,-2458,-2917,-22480,-25899,-17282,-29970,31008,-20718,-26321,-7595,22860,9542,4759,-26306,-30389,664,27028,8865,-2622,-12098,-19707,10785,22925,32068,31852,-11737,-11523,25534,10669,-25740,20515,7347,27925,25571,31341,12428,-22640,-26184,25109,-3277,25357,293,-21231,-27314,10334,7434,18867,-5458,-7220,-31028,-25035,-30965,16386,2492,-14321,-15599,-647,-8384,-10593,5195,-17281,-23735,-7731,15487,17780,14026,464,32684,30390,-2820,-21364,21696,-7755,552,-7757,4462,-17773,26906,-25196,-27551,17296,17428,-23949,25560,15002,-32657,-18719,26063,20621,-6523,31607,27479,-30042,12127,31515,-1395,-4008,15310,17206,-15099,19545,25599,-17876,-28864,-19989,-11087,-10750,-23056,-13335,4403,-30622,-25637,25592,-8814,5778,-29302,-8161,14346,11145,-30639,6549,15926,-20440,9092,32166,-31656,6464,-22680,-10916,-10832,16039,-23925,23925,-2496,13365,-8088,-21624,-2023,17441,31581,7724,-9531,-9628,10630,12865,25104,-31092,7523,-15610,9550,-8801,-9862,22231,-18348,-21655,12736,32102,5399,-16108,17766,-17415,-10038,16500,14258,-11676,-11268,30453,20099,-16227,-23371,-31154,29857,-23109,6611,10276,27516,-1234,7970,5685,26302,-2024,994,-11463,25388,82,-28161,25173,8071,26279,5522,-30818,-11105,-17397,-3437,-32550,29231,-23610,27792,1518,-7273,24566,6808,13746,-386,-16412,11917,-20913,20680,-6897,27661,-23546,-2371,-15234,-9203,-1502,10648,-5374,-10,-15147,-16194,-16587,5261,-22942,-16551,-17615,-12324,30511,7703,-13095,10566,-1919,-10581,8875,-5223,-18837,21378,16246,-4778,532,-23678,28440,-24242,-30276,7803,9458,-20416,-1703,4706,8331,2247,25589,-31063,-5153,-23263,13857,3723,19530,29931,-6561,-24507,27359,21553,-18200,-14420,-9394,21484,-16515,3118,10900,-19762,-16856,8738,15471,31113,3914,264,-4494,22387,-3949,20079,17108,-14874,-16324,-13191,12202,-21873,12135,6349,-13186,-31278,-32040,-8738,27257,16389,32386,-24756,888,-510,-10392,19685,-7874,-5452,-31053,24496,23993,7061,18588,28523,-21677,-29475,-12582,13750,-11182,26268,25332,3931,29288,6907,-28385,-15004,769,-8085,29588,7390,-4801,-5820,29735,20076,-15088,13489,11281,19248,6837,-21913,23295,10973,-6732,9326,-24133,-11398,-15339,-29783,1395,-20262,-22957,-24512,-9249,-27928,-23500,-13986,23838,7081,24904,-5329,18601,-18817,-19111,7624,-423,10722,25300,-23791,13176,-3685,24947,26221,-2232,3307,7348,12610,19635,-31568,-15176,-5517,750,2898,-15839,13015,-23100,16569,23112,14715,30250,-26220,-5185,-7218,-29816,-26123,-16945,-11405,-2876,-14954,-1390,31665,-15884,-17869,-11940,13467,262,-13697,7259,-3304,-2320,3186,5842,-21650,20444,-15296,-28092,27350,-5500,24071,3905,20115,13265,9929,18501,24434,-32481,2193,-9984,-27817,-458,-9522,-691,2602,3750,-19849,-16488,-28475,20697,24112,-25838,-15342,6469,-5620,11820,-14580,12810,-30542,30900,-8166,532,-4520,8012,-22065,-30532,-2718,10499,11616,26508,-12647,9448,9845,-30068,27452,-11872,17268,-27423,17950,-19422,16500,30316,13470,9493,-53,3412,-16554,7037,-2479,-14330,14844,5820,-12955,6537,-2777,18419,-21042,3257,6531,32473,32548,3681,31918,-5850,-21928,4513,26728,-26337,14943,-7596,19137,24923,-23316,21820,-26904,8735,-18674,13314,24988,2081,18055,-602,26984,25032,8836,4123,-20398,-15478,18863,-17234,-17419,-1300,31281,-15918,14788,-19725,7914,-23429,9536,-4955,-8837,19764,1606,-4276,14078,-30515,-7235,-28278,-9876,-29669,-18543,-8463,-12908,-865,-5747,11703,9347,-7969,654,-28466,19101,-12486,-16261,5408,24501,32766,13470,-18343,7132,30636,-30081,-1523,11597,21388,-17622,2165,-1651,19026,15380,-12319,21676,-21929,28799,28737,-23390,32266,8691,16940,-29805,14651,-16735,-24829,-21655,-7043,26535,23671,-10552,9674,-20516,-28949,31484,25804,-32214,-10431,-25663,27430,-14995,28722,-4856,32639,-403,-23364,1130,13183,-25878,25525,31518,3766,14595,15334,-7646,10818,14237,9305,19608,-23336,23785,-1980,-30460,-31404,-13666,-28059,-25352,19352,18865,-27931,6547,15829,8049,32658,26127,-46,21838,-9934,22832,-18293,24309,-11689,-14388,-26834,21593,-4087,-15672,-19545,21355,-27196,16699,11534,13623,26081,28581,-24086,3950,22715,-22852,-12232,12199,-5686,-4519,1370,27682,-14780,-1836,-2303,-13421,625,-6136,22119,18802,-31029,-8360,12540,7938,7358,18728,22408,24024,-10366,15142,-30450,17460,-11939,-24317,-18206,-10802,2060,20609,-26662,-20393,12557,-19790,-23187,23929,-8279,31881,1123,-24062,-27924,13014,4422,18696,-20385,22533,4754,-22984,-23638,17306,7892,-24226,29463,-8353,-20990,4082,9135,-20462,6049,32208,3948,-22131,-916,4726,26145,-26991,30128,27082,-27639,2990,-16153,32213,-17292,2386,4030,14248,14071,14469,21807,30354,29616,27472,-18741,29689,-8756,24987,14027,16699,3883,-8557,-4142,21211,-4666,-26270,-31435,3914,-12268,-27197,-21424,1283,17075,3448,-25466,-17103,-19874,13670,-11872,-13819,3726,30898,32368,17762,24570,20626,31841,-13140,-3301,-6333,13270,3445,1338,656,9491,-27102,-3662,-9897,25948,7905,-30574,10013,32135,-29590,-14446,-19034,11415,3349,-8600,7979,28596,-17486,31252,16414,-27770,-10526,-3038,15889,-23095,-18211,26736,-16706,1316,-21165,22343,-406,-22334,2171,-18299,6947,17591,-25279,-19530,13272,-10553,-15959,-24790,-27865,2843,27151,12156,-30531,-21878,-6802,18801,-31938,-13286,-3732,25439,-8004,-24896,14251,25894,6338,-1263,-17768,-7845,-27296,325,29776,-6183,-14987,-14327,-24353,-3434,-7160,1728,2892,29350,26479,-10635,15099,-16000,27854,7858,29874,-17745,26979,-12727,-5290,30061,3089,5407,3569,-28296,-19822,-24835,-14568,5078,12848,-25266,28547,6676,24894,848,-26272,-5686,-20791,19806,-22731,4218,-11008,-3673,-23577,-30962,-25571,-29532,22506,-21170,-3986,-955,-8011,-6672,-1627,-14808,31364,-31984,27256,-5702,23959,31494,-10407,24687,12164,-11400,21938,1265,8958,-24830,-20396,11106,-26039,19029,-5415,25705,2464,-29852,-10049,-30535,31073,-28707,20497,-5992,24813,23283,-17081,-15178,-18023,976,-15661,6801,-31915,5761,12775,-11483,6847,-7327,-1275,-32270,25483,14823,1865,12026,-2374,-29398,-10572,-9748,-15830,-18841,-31908,19220,7055,-16576,7398,29159,11854,25304,-3114,-29715,27580,-5982,30586,24304,5044,-5858,-17707,-14645,14286,-25139,14916,30483,17915,28868,-3893,-1523,3085,-26675,24582,-19076,-1673,26691,17924,-9078,18188,28856,4991,1883,-27481,1929,7342,-31196,-26490,-9710,12001,27460,20129,-30540,-9470,-23115,12049,-21851,388,-26011,2064,-6375,-9222,16243,31456,-28278,13897,10069,-7708,-8919,12458,-16502,13545,14849,-18804,-23850,14512,31396,-26957,29717,25442,971,-28532,-17627,-31307,14941,2009,21154,6045,22943,24470,-25955,27032,-28134,-5687,31631,-26809,1205,693,17157,-25856,19175,-28789,-1818,-12354,15476,-13271,-14208,-15942,-27618,374,3972,7448,29255,-9517,-3064,-24883,25614,-9264,25839,7269,-17678,-19474,-18900,-21152,22986,-2071,1183,11836,8520,-31657,-7242,8179,-4039,15953,9795,-17205,-28487,5752,-8310,16220,24649,32253,-3503,18567,31978,-9555,-4640,-14668,-20760,-28047,-26739,16909,29595,13316,32416,-1614,19830,-6661,-26147,12957,23457,9785,12850,13109,-11400,28249,10559,-11945,14695,-29333,-26803,-13222,5663,31751,-4405,-19614,-5386,-14910,6582,15641,-4197,24597,-16819,-7735,-28228,-21615,-23758,-11406,-31874,-14335,-399,-32284,-974,3816,-28610,22354,25988,-26558,5774,8242,-24400,-23701,-6950,-3321,22942,-28444,18948,27019,-4043,32291,-6109,18852,-24732,-20168,6711,22036,-15404,-6112,12979,4988,2212,-7966,28880,20173,14235,-5237,-1686,-9702,-16555,5252,-7946,14349,-9015,16196,19783,-28639,-26439,-10608,9077,-13725,-8252,27159,-20304,-28525,14714,6698,-10369,-22627,-25904,15127,-12970,-24166,-23064,-5209,30143,-31407,-5373,25445,-13148,158,9399,-1767,6654,797,-25992,353,-9627,-30066,-5615,4289,-30820,30843,-17324,-14971,-31908,-16784,-14064,-13808,-24725,13890,22077,17635,-13134,-9554,11618,-31137,-6879,-23366,28197,-5478,18445,-13259,5864,12059,-662,9110,1585,3914,14822,15136,17446,-5772,-7141,18661,-23300,-32555,-14311,3417,4763,15772,30501,-1182,-25992,-31540,-28849,12092,-7667,22141,10631,-3137,-4428,22193,1587,21429,-21174,-30393,-24434,3723,22364,680,4674,-25174,27309,30448,19628,-17750,-32545,-445,14621,4683,-2966,30620,-8865,6652,-15216,-17144,28307,26827,13053,-22475,9521,-28216,-2689,-7372,-221,-7901,8333,-28751,-6909,19719,-17726,12182,-12046,-8876,4413,-591,-6922,2425,-18440,5904,-1717,32288,31336,32601,-19291,-3082,10200,-3632,-17935,-1290,-13379,24476,5430,2334,30458,-20492,-16889,470,23661,-11278,-425,6581,-890,20321,-16278,25331,5231,5505,22970,-28579,-25179,18784,-3614,14264,-14522,31220,19020,70,31693,-14697,-6615,-21144,7289,5923,-4148,2063,24131,-27589,4977,-22137,-5920,6791,27451,26203,-12690,-6265,-3961,19200,19915,18718,4257,12708,-24209,-3067,26968,-15477,-11419,23290,-26560,7549,-27463,4493,1909,-12787,-7608,-30271,10484,-11928,29070,-28074,-29655,-10787,-16365,-9546,29478,31243,-26225,5413,4338,16299,1831,-19341,23817,-31097,28815,3700,9820,-1711,-23180,-25877,15011,-2961,13455,-20281,15707,-14175,-12592,21066,-16828,-24101,-6338,1694,30377,-18767,-5406,-32124,32325,-15678,-13829,11919,-23292,-8638,-24017,-10283,30749,-2706,-17153,28228,-18506,-17906,686,8975,-3027,17883,-7793,-9064,30060,-9241,2990,13058,-9169,24836,32235,756,-21708,1648,12564,17419,-29928,23261,11584,11577,2619,-22725,22709,-22843,31588,-15342,-21306,-19780,-21044,2489,19932,-11722,655,-10097,20631,21034,-13500,30246,19501,27572,10522,30405,-15886,-6706,-25362,-8306,-15967,440,-31370,-7336,4357,10627,-26857,20284,-15511,26392,-20538,20684,32354,12855,-520,24872,-21123,3169,849,-22774,-19926,22337,10852,2065,-16882,27690,-12026,-2192,20216,-27050,31901,-18805,-15524,14853,10535,-7790,-1609,-23357,16347,-10588,15781,560,-2194,5904,29234,-12523,8128,7118,30891,14870,26871,6926,13446,-10837,-6927,29670,14776,-7301,-14795,21439,-5502,18848,9,10137,-61,24398,-7670,-2980,17122,27,26224,16823,29461,19613,-3230,-9517,17150,-30402,25612,17499,29147,4162,26385,12198,8996,-5078,-25096,6341,12522,-6455,-29920,-28516,-31315,-17221,-29670,19405,-30134,-8512,24871,-32703,18375,-30181,-30251,-7257,-28145,15578,-29026,-27628,28182,-9252,2082,30518,4547,-4130,-14451,10135,-401,6613,-22995,-15079,18140,14497,-10429,-16757,32076,-23742,32730,-18222,-20430,-27555,-25439,-8407,18440,14528,-14590,-5673,-8538,23998,-708,5019,-10010,-19427,16044,-18055,-7686,-20810,-15880,-31001,-10114,-9752,-16666,-1369,-178,-17267,-11995,-13549,-21544,22710,-1082,-30938,2477,15041,24488,24063,30246,-14806,8446,-14116,-16036,24221,-21738,-29388,-2177,-17401,-6826,-15378,-29846,-5136,19131,-3951,-25687,2476,22246,6241,24775,32645,-29703,26019,1787,-17148,-8330,6406,11577,4210,-22833,-31544,2736,-32627,-29073,27461,3930,3959,-26262,-4264,28310,-22232,8890,-23376,6275,28881,-8663,-11953,13863,768,24330,15395,11687,-16988,-22082,-2901,-17469,-30567,2229,-4525,-27167,21251,-11420,-26618,12017,-12977,-8584,32355,-20286,-11489,-5141,-17326,3859,-18054,31758,-16802,25382,9237,-2363,20811,14236,19513,13003,-32245,21375,25781,-17359,-4276,8351,30710,2967,-14811,21630,-22211,20425,-1299,-19909,-20806,5859,9689,21404,32066,27305,-4871,20272,5626,-23859,-9551,-19901,-20503,-3673,-6659,21710,9285,-21051,-23270,-12837,-5562,-7761,19404,5573,18528,22429,9369,-12128,27579,9481,-4335,-14801,-29270,-21561,4982,-6753,-32145,-26805,4256,1553,-7851,-13633,28250,4256,16518,25147,31074,23951,-8341,-4167,31632,-26728,-8513,-17492,-708,247,32408,6995,15391,-31189,-27097,29195,14024,10663,-13036,-16264,-21423,-19337,25935,-6774,26496,11922,7292,27764,6505,-19929,2018,23607,24809,3584,-12185,-7196,24505,-29900,1139,22403,-4448,-11128,-23502,-24152,11021,-11321,17931,-30713,-5712,-30423,8982,-614,3150,-11769,-11068,22482,-24437,-7806,16303,11463,27386,15640,23076,9128,-17572,-26797,14982,10988,-13174,2665,-10382,8579,-21261,13918,28852,18482,-19430,17040,31462,-21665,5201,3089,32006,535,-31229,-27341,-27422,-14453,-29827,-29741,-3138,-16641,19874,20813,-6835,-26515,18983,-18135,29684,18784,-5719,18583,-13133,1666,21611,-29182,-26473,29963,2672,-30299,19545,1545,23109,-22879,10844,18062,13192,-5747,-6284,31411,17248,11498,-12421,-26504,-32053,10622,-14757,-30428,11336,31771,15993,-26218,-29574,-22290,-6545,-675,10006,23979,634,-16496,24022,2865,-17312,20172,-31863,29378,4130,-17898,-29188,12352,19616,-916,-27228,7493,-8531,16160,-16564,-16233,-5853,14948,-1513,-9951,12973,1065,27448,-5097,4786,24508,15353,10848,12290,-17068,32001,-27694,-4983,-30956,22091,-238,16263,-1718,9366,24619,12782,-12475,17273,-16034,9400,9390,1317,8803,-28178,-11260,17364,-434,-7651,18795,26361,22738,-31586,-9673,-25455,-510,-22375,-30961,-2676,-16245,15138,22687,6070,11933,9114,-755,21960,2954,-25617,-4461,10056,21541,-46,-28367,19479,-23846,-12794,25845,-4869,-18144,20770,25209,-25530,15159,-18684,-14916,-14952,164,-24559,-860,-31050,-23531,-16647,-5796,2138,30660,-13330,16769,-8029,23666,-32305,32264,-2663,20780,-4960,-13623,-25736,-14788,8705,-12110,-10026,26305,31056,18266,-13512,26080,1658,-8978,-28109,-7447,28701,-12867,-25471,17473,-28409,19845,17690,31703,-13980,27262,11946,16563,-5324,-25528,-21301,-25257,23790,-807,-24762,18301,1562,19906,-30330,28019,5468,-30984,29077,-20473,-26051,-25240,-1169,32,-19672,-2959,22036,7909,28680,-15464,-9603,-4661,-7640,5887,24446,-26117,-13449,1463,-6188,19157,-13050,11968,-26244,-7901,7067,21606,-3341,9048,-31519,-5722,-32489,-18431,-24481,-18609,-32192,-27968,-21435,14442,-14076,-2662,-18287,27131,-18770,19920,-1204,15138,-1271,19897,-29657,-29880,10069,-6173,29422,14708,-28854,-32369,-25135,30095,7524,30136,-6620,20373,-25178,-31311,16874,1717,22187,-15462,5700,-11357,-17375,-444,-3420,-28792,10784,18955,13258,3178,15464,18294,-29204,29407,-21992,5304,9706,-29301,-2323,-3537,-6286,-9045,-21286,22776,6786,32052,-29679,-8943,-13133,-12611,-13417,-27865,-23470,26371,26663,-6622,32459,-12428,23692,23881,23989,-6700,25578,12525,26235,10979,17925,19088,13612,23201,-7993,7723,-10112,-7365,-15798,21102,29157,-22084,-17700,-9319,-21441,836,-7691,-30095,-26196,-3352,26382,-19862,8008,-10589,2614,-21559,-23762,30588,17067,-4653,7252,11516,26546,25959,31789,1226,6902,14996,19006,5199,31140,-4215,27456,-21058,25877,-20994,21445,-14645,-9655,-1189,29249,7184,-15171,2150,-22668,-16307,-7151,4517,-975,-197,-9901,7783,23522,-11096,-31779,-22588,21270,6411,-15127,-17857,26616,-28596,7382,26657,15049,13354,28420,-23283,11965,-9261,-8423,-1758,-1496,13295,-4075,1104,-2307,-19387,23262,-20528,17715,-4610,5480,-1391,-20370,-19695,9268,-8531,451,-18093,28256,-2188,21500,-1121,-17720,30224,3917,9884,-9869,-11008,31483,26394,28184,21037,-20420,-10695,15465,23009,-17473,26708,-14895,17206,-21071,-6615,24687,14610,-21114,-6228,-14734,8295,12890,6660,-30507,13917,1082,-6449,26874,6947,-30099,23926,10391,20498,29485,-28657,-16106,16774,-10773,10077,4451,-30745,18730,-28110,31754,23576,30089,-2497,-23766,5661,19760,1870,-30263,31166,1935,-9931,18828,-8318,11481,8667,-14294,-6115,-13693,-12656,3358,-27430,-22624,-11034,-2120,-11787,1059,-24177,-28477,-28564,27861,-12223,1797,12110,32155,13098,-16319,-22544,-24139,29785,-2797,29375,3307,28576,14196,-15648,-32016,25252,18066,-19681,-28084,-24160,-10041,-28522,13337,22275,-24106,22710,8862,10192,10336,2788,-11433,-28179,32035,1325,-13958,-11447,25054,-13216,10340,-2498,17811,-30511,19189,28659,-16019,16435,21393,-28163,-12743,21154,26484,-5519,20135,-5727,27315,4438,11211,-26709,-27941,26724,6019,16839,12321,28030,-20181,-18043,30760,-26643,3807,27459,31974,-10512,11591,13569,-1699,16971,-24640,2035,-16102,-7087,29962,-31029,14030,31114,-28754,16405,8293,-4666,-5953,8477,29425,22935,13565,-19593,-19798,-13920,-1565,-15987,31311,13299,-1102,-26261,5095,16156,-19141,17971,-17709,25976,-5602,26109,15543,-10136,26883,78,11585,7777,32339,30171,18337,17266,7305,20280,3112,29340,-15464,11723,29951,14136,27539,-11146,-3588,19806,-30117,4077,-12618,28450,13833,-256,18252,-14108,2390,-3743,-12753,-12092,12036,-4710,1836,-22652,1054,29444,-3302,8279,18600,-22603,-29613,-13727,-32446,10653,-14788,-18917,-17491,29690,-19120,-16138,-14874,28073,20457,-5386,-4443,10374,26654,9374,926,3846,-12895,-20271,-23702,-27146,10777,16196,-25196,-28354,11607,-1645,-14361,-620,-24774,-10606,-26064,-5409,9043,17302,8610,29258,-27891,-24993,-22593,-11870,-13933,1865,-1069,-8903,-30119,-7449,9484,8489,-9650,-1019,-896,-23137,-32729,-2050,21329,21433,3079,-2575,-27494,4586,-27812,13987,-12253,13410,18568,11729,1910,14302,15231,-15679,31656,-11748,-6688,14466,-19948,-29065,30059,-619,-21518,32262,9574,21527,-15929,-16630,-18911,-18784,20505,18475,29858,28614,-16563,-32655,7608,11213,22938,17996,-10471,11006,-837,10815,26563,-14688,3731,10642,-8635,-19649,1388,8304,1384,-5335,-30473,-32489,5798,21580,-9209,-12143,26081,14519,-2953,-25532,13556,21662,-2538,15714,-25937,-5574,32055,29761,-27381,12201,-25985,-6585,-15206,24546,5342,10559,11759,10570,-27430,15298,-18629,13482,-14232,-6053,11419,32556,-9903,32567,29199,12089,13398,9806,8063,-4125,-31163,19268,7816,28877,12581,8287,7236,15523,26556,-21524,27045,17950,28088,27389,-20683,-2964,-24608,11265,10493,994,20055,-3161,-14436,1608,-8903,11333,-18101,5893,-21579,-18689,-9877,27828,-26468,29689,-7344,-4671,-30448,16968,30480,20620,-12454,-3449,14624,-10922,13725,-15123,-9577,31253,-12039,32245,14599,-18361,6106,6630,-8984,8551,799,-25275,-9251,-28463,-15520,7145,9601,-18136,5458,28805,-32436,-3123,14929,-28647,18102,4710,571,20131,11714,26239,-8555,-20265,-2642,-4424,-20634,8410,-4127,18681,-8076,24724,10205,26028,-10359,14746,-18756,30165,31729,-26112,-1706,5368,-6441,9036,-20089,31444,22548,13671,27304,29499,27632,12002,23426,30512,14352,17338,10118,448,-19830,-23638,6252,-9430,-15266,31397,6036,-9736,-31250,3649,8908,32764,-20589,23581,-22977,-19171,-605,16405,11226,3086,22561,29358,7157,-27118,1305,20932,18325,224,-20303,29945,-15114,30083,-31121,8907,-29925,-14408,253,407,-13347,-9891,343,18252,19511,11725,7087,14663,-21254,-22568,-16792,-17475,-19089,32265,16686,-25723,26136,-2029,2397,10444,3291,-22793,-23379,16456,23040,-13360,14412,12426,32054,-4884,-22024,26080,19304,-23512,24858,2970,-32144,2232,8513,31363,-31488,-12727,3863,-15966,-27913,26360,-29458,-4489,20963,29750,8912,-11595,-29326,-4938,6931,23778,-2833,-3935,-28829,6468,-20730,4713,11619,10042,20562,17737,-30215,-20375,16565,27104,-19396,21446,-8329,-19357,23834,13632,-1558,-30261,-6800,20932,30569,7800,-9114,-12097,-12510,30250,-1582,-1576,-30342,30827,-10935,10745,21173,-20457,-25251,-20865,5698,13296,10002,-21055,11007,-9132,3243,-23052,14204,715,25785,-20907,14786,26403,-17881,-10740,21404,152,-16694,-18422,-13823,32226,-21386,-12130,7888,-4783,-11528,-32176,5495,-11740,-28970,11859,-2874,-12617,20371,-9663,-19596,-3539,4252,-20992,31186,-9361,-31683,-29318,-9988,-28558,29602,23170,9789,12013,-26050,20369,-855,-16737,-11639,-11532,-30437,5214,-18082,-32198,4657,-27967,-4037,-26494,29666,-25996,3947,9476,20467,-18125,-21307,9583,21006,31097,-10309,433,-751,-2122,28878,-7111,-8973,27277,-9743,-22014,25669,-6120,-19614,-11765,-18635,-18742,-1660,-14174,-16614,-20528,28760,-18942,12319,-12741,-11556,17622,-24746,19516,-29613,-13239,6006,-26519,-10147,4028,-28941,20030,-30085,-13133,-32,30895,-24180,-2791,11280,-9607,-12628,-5266,-28037,6423,-27070,7525,-18585,24154,-3741,-6893,-30391,-23626,12921,22707,-27266,-15044,-16931,4750,-7947,-9050,1947,30176,-22323,8132,32689,-21569,15450,10919,17437,46,18951,-12172,25265,-22668,-31376,-27170,-6762,-29714,-6566,-3023,-1074,22552,20778,19174,11989,-15383,-24983,-32638,6808,-25986,-31055,-23453,20941,15363,22735,3274,31418,15371,-8461,1539,16776,21056,-29453,31726,2210,6597,-14269,-18803,-25144,-11448,32639,-2746,-9674,-22062,10098,30875,26100,-25402,-12852,-31863,21123,-7288,23792,-16064,15519,-10619,16300,-17145,13373,15330,10099,24581,-25119,-2712,-15795,-13520,21629,-20402,30457,16370,29623,4976,-24912,32008,13333,8451,-4842,25766,-689,-6679,13903,9973,1342,-10717,-3967,-12713,11086,4903,-12299,-277,20587,23970,-19524,-15610,6686,32672,1635,18594,14952,8568,32643,5107,20269,19897,4576,-26185,6246,-25721,32370,20755,-18770,15533,19354,-9988,12240,24009,1028,-11299,-29316,15354,32741,-22067,3315,10524,-18802,-16077,-27277,2678,-12732,-2527,5879,-4455,7665,-29659,-11745,-3270,-6322,2083,8042,13915,8495,-24781,-25385,22117,31626,-24202,-19558,-28380,11407,-30433,-18207,-22660,13483,-20491,12549,-3482,-24211,-32559,-27359,-24083,1576,18851,10110,16050,4692,3669,8189,-7733,5014,-11060,-24882,-20660,-19411,1654,-24344,340,21074,-14971,9692,6035,-27951,-7209,9686,7948,22338,-21297,4385,-5499,22356,19852,24768,26180,13949,18193,-24273,7370,-29020,-2318,7777,-13575,-15651,-9748,6617,-27768,-16022,28846,26276,4730,15867,-26892,15754,-31952,-4349,-13216,15194,21808,11591,-245,13769,15168,9128,29290,-28151 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add10_input1_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add10_input1_int16.csv new file mode 100644 index 0000000..2a16bf6 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add10_input1_int16.csv @@ -0,0 +1 @@ +-20487,1121,7021,-3486,-12502,-9109,15111,29487,-11393,-31588,7873,-24879,5801,-23350,28223,-19541,-18825,-2422,12291,-23343,-27319,-14882,29980,-31810,-20697,-25103,6177,11589,10007,-30880,25065,28028,-28413,25702,1722,-15717,12533,19288,-13570,-13450,-29369,-9494,-18529,-27757,-11697,4683,12174,1481,11306,12685,21157,-23257,-10085,-27861,30274,15033,4041,-9268,-16878,10584,-7951,-26639,2610,8484,-9296,21115,-29893,12788,26363,15222,-16981,-25149,-9464,-13517,-17048,-20375,4223,1718,30830,32137,-5954,-26111,7921,-22072,17785,22829,-20516,28061,-22601,11350,-27878,19487,24079,23977,6425,3364,-30095,27816,-27521,22347,26940,14824,29006,945,14474,-877,-22474,22679,20549,-13897,13551,13855,-4961,-4737,-26438,1560,-24478,18769,8457,-29676,-22978,-21022,-28908,-29818,-4988,31804,-22850,-11646,-7250,8332,6695,-28567,29416,-5175,22030,1635,21294,29038,-21441,-7903,-30591,22868,-22600,-26061,-1975,-26867,-1317,-26481,-12105,-2512,-511,31988,-6923,-4472,18711,30878,19868,-13711,20007,-30898,-30379,-30775,12196,-3027,20698,2700,-25197,16383,-23870,11236,-17412,-27684,11291,-19030,-28313,-23618,852,-26434,-18937,-12239,27357,12532,23819,24977,-6357,-29326,1605,23063,18063,-12201,320,-9098,31549,16014,-19213,22750,-11096,28207,26033,-25452,-15201,16753,26197,-19152,24790,-4427,4940,17600,-17997,18228,18953,-30107,3934,-29454,-636,15317,18591,13957,-27978,-19256,-5893,8456,-3545,7915,-8547,6841,-24241,30861,-4736,11679,-28907,-8623,1193,8822,2940,-7071,-1389,6139,-11106,9787,14297,21999,19023,11311,15848,-8234,27661,-5478,-19614,-11911,10800,-4201,4613,23063,13673,-5386,-19861,3675,-27053,-19117,-7780,22194,11730,6744,25807,-24479,-10630,24125,-24635,-4085,-24462,26048,-26456,1294,-16846,-23195,21294,-30281,30308,15036,15691,10883,-26927,-14388,30038,-11389,26836,30236,25049,-25976,-10545,23872,6277,5311,2590,23085,30163,31355,13446,14512,1790,-24282,29565,-29735,-8877,27890,-31279,8991,-10051,-13802,16574,14315,11111,6701,-9090,2914,-10671,32115,-918,29328,-25015,-3152,32573,9950,17089,23096,8567,-29168,4411,11767,-735,-29831,16143,23260,3642,-2825,-17246,-22730,18433,-9400,19631,2226,-30543,-16402,11006,26989,31416,16774,-24723,-27951,2963,25508,-14818,21443,4973,25272,8539,-14234,-31906,-1475,-16908,13268,10837,-20372,-7838,-14912,5148,3852,-15760,21911,13767,22880,-4464,-9162,-31723,-32398,15654,-8752,-11523,-5549,15373,-25919,-6236,16342,-4388,23089,29460,11125,27847,-200,4854,-13591,-32003,-6315,3112,-15788,25934,-27978,-19635,32639,-31261,-12365,31752,11556,-7813,-19953,-32296,18976,9932,-7766,-17363,17332,15468,9355,-3649,1458,-30135,17927,-11423,3270,-18887,-18255,-19989,-305,-25610,3944,-5737,7793,29730,-3728,29125,28178,-26287,30946,-21678,19521,-20454,-812,-23209,24078,-14361,-12494,27568,-26391,27995,28684,-23193,-22484,-21016,29937,-19214,12486,30231,-3174,-32092,32689,5490,11903,-18994,17675,32165,6830,-14349,-24820,6201,-29898,10073,32397,-13579,-23459,-14935,1172,-3076,-9367,7282,-10703,-6370,19030,5030,24973,-28726,-31792,7220,20845,22445,-11020,-3998,-14879,-24076,-3209,1794,-24708,3982,30854,-18159,20599,-11405,-11933,-17014,28589,-30159,8672,-1989,-28071,26681,-1686,23876,-31934,25402,20135,-2902,-9646,-25666,30124,-14218,8518,-21387,-16830,114,-23252,-1214,-15527,29144,-17779,12167,-7395,-1706,-13758,-25555,-15346,7662,13478,18155,12593,-8186,10138,-1136,23065,11458,-19647,-5028,32645,-1320,-16436,-29400,28545,1223,-15294,-17238,8640,31470,28044,-1387,-7374,18886,-15426,19159,22076,23549,-15508,-10986,23165,-24378,14233,-2811,25756,26846,28769,29634,-6520,-7816,15626,26431,31306,-7427,-29867,25442,-2986,-6913,-2061,31501,-20062,3234,-5681,-3818,-1403,-19920,17136,10414,-5538,-20566,12716,4688,-3638,-6554,28986,-18994,-11554,-28618,21831,-6373,-6330,-29055,-17718,19438,-12508,23002,16594,9985,24686,25607,31152,8745,7052,18140,-11787,27254,7685,27834,-8749,13832,13153,-3498,4089,2854,9673,23025,9478,11009,-6974,28196,4785,28641,-30048,-24956,-984,30930,-18322,3671,3721,-4375,10082,-16194,15089,20863,-4189,24266,-14838,27965,-22520,-9795,-8455,-8689,22113,12610,17705,-9327,-26128,2863,28734,-30673,-17841,22633,-6624,-13045,6465,9466,22905,8401,-12525,-30612,-24847,862,-19960,23593,5843,-4327,16482,-20886,-24792,-14009,-11570,28056,-20125,-12242,21721,13560,4977,5904,5313,8423,6715,-7509,-19947,24264,-14844,26372,-22634,-29595,-17489,25398,1664,30634,4059,28528,-25082,22657,-24705,17045,7877,5342,-16183,-913,22632,-18206,6057,-13606,8421,-13087,-30571,-26954,16248,-32670,21309,-50,-28273,2976,-19850,8049,-16914,-25423,-18313,20589,30231,-32594,-17865,26218,-10644,13488,-11346,-20446,14372,-151,-28915,11543,-14721,-11849,-25648,4892,-29664,8100,27713,-2027,-3379,20335,-5278,-15146,-2913,-24083,10445,-30112,15091,5745,-30080,-31180,5994,14051,22616,10760,3873,-3761,-4479,-9496,17804,-18834,-15597,30995,-23730,3628,31337,-2499,8489,11737,-23845,-20335,-10030,31363,-9955,10628,30857,10240,32368,-13609,23474,-17961,20337,1726,30555,21059,15177,-32370,9959,9317,1606,-22133,13925,2887,27404,13954,-21984,-4712,12857,16532,-276,-18975,911,-10547,-7204,16347,24197,-9904,25892,4048,32297,-8095,-11688,-31947,3227,8624,31175,-26434,-9146,-27459,16531,17399,-7491,14935,24555,-30754,-8653,4963,15597,-32434,7271,-31733,-22416,10379,2566,28686,-25581,-30508,16361,9204,-13324,-21384,3496,-22822,9574,11782,16836,29414,8254,-22644,-16287,-11515,31146,-27500,-23742,-20307,16303,-13742,32229,-8764,19959,-11652,22907,16148,-28546,-23093,-9659,-12826,21100,-32606,-15101,11982,21970,-19809,29330,11185,16024,-19520,21866,-28255,-20095,19836,12872,-14632,-7536,-4820,-5981,-14701,-19677,29348,-15653,15907,-890,-20694,-14706,32076,-22090,14425,-23576,-22194,-26339,-29976,-14763,-28884,15767,-21716,-7732,15453,-9814,-19696,30197,24715,-2050,6561,-8087,-20998,-3734,-22223,13480,-22711,20999,-90,-5078,22720,19716,-26127,-32170,-14966,-28104,31891,23322,30247,26825,-3981,5519,-22028,-30486,-3130,20091,-9283,25977,17359,25872,9466,27071,-27473,-16876,27744,20051,-32473,-16546,2875,869,-1847,4792,-27392,-19181,1552,-22818,-1591,10238,-3949,-5862,-10646,-26090,-22042,-5994,32305,-17117,32409,22237,3453,6798,15010,-23167,26559,18882,-3745,22558,20069,30841,-3090,31625,-22943,-3290,24383,-19801,12730,-15724,18403,-15516,-16843,16562,15040,-5991,13220,10373,-28861,26120,25380,-12372,18353,-17339,17900,-15519,540,-14962,-9973,8309,-29401,24386,19659,-30092,-20053,6509,2887,21231,-26856,9320,307,8801,-4846,13918,-23352,22977,32433,-16387,-26509,-31819,-434,21624,-10572,-20271,-2822,20227,-30551,-13618,7544,4200,12567,2340,-19910,12317,-30714,-10920,-9809,-15222,-8981,5580,-29608,806,5976,12379,-5668,26467,32195,22598,12508,24260,21942,-23659,16197,2876,20230,-16011,-24866,-19501,-10640,15120,13413,-8695,-1491,31084,32740,10085,-5408,9154,32212,4787,30271,11056,24570,19376,-16098,19533,-5334,24924,-4612,-30662,-12100,-17159,28,20525,16782,16227,8579,19239,5143,29790,-28681,-30787,29530,25895,836,16535,8054,-9018,-19064,-18970,-3766,9292,32286,14155,14470,-12310,-5920,18828,-3859,-7033,-13696,1521,-31633,14841,-12082,-2292,-20984,-27183,10702,17583,-399,14915,-26070,-27156,-17322,-18529,-2975,892,29165,-22273,28252,113,-19468,-19645,-13180,28710,19137,-17389,26737,29260,30463,-18837,-24969,-9119,24453,21737,-7626,-18470,24764,31345,11322,-23884,-6123,-11651,-21191,29,-23792,-2391,-3263,19394,-12353,19516,15907,-11294,-10234,-28207,-322,-6652,5049,-21122,18725,29700,-9090,24572,-17704,-16046,9499,19918,4226,-7737,-21237,-2767,-14183,21207,23002,-24212,1918,-30706,-16749,-3285,-23318,25512,725,-9069,17642,22910,-17777,14402,1440,167,-29278,25364,-5044,-23908,-29549,-13883,-23837,-31702,-8634,16400,28459,7003,23882,19734,466,24987,-26225,13388,-5371,-13579,-13666,9362,793,-29981,26529,-27992,22764,-7086,-29364,-10000,6792,-31016,-17482,15029,29286,-13178,-20818,1734,-10179,-18868,-20866,-24995,-25496,26033,-30225,-20391,-21542,-7292,-29723,16877,18668,-15972,20647,7490,-25509,26191,-9581,31653,21889,28275,25876,-13293,28639,18336,-6795,-15031,18145,13879,26929,-20,-27536,11351,3414,-12148,6780,9079,15079,14947,-1750,6489,17971,17555,27185,-29229,-10098,26885,15954,-22575,22964,8320,-25177,-11544,14759,-17546,7586,3694,13801,-19056,-24483,13733,27531,-18505,31403,13336,-22048,8767,18232,29512,-28811,-31332,6910,15591,7407,-2006,-15196,21766,-27197,-29139,4598,-29007,-18318,-27385,-24263,11761,21965,15771,8101,11599,-18683,-26209,24002,-31682,4893,16712,4309,19689,-2812,-103,8088,5914,11906,-24124,23253,27075,-14687,14895,9703,16806,-3920,26609,-24958,17736,-28950,-11192,-17246,-26783,-16798,20076,-26601,-24675,31338,-11803,-5739,-782,32232,-15347,7003,2486,-28868,27616,2551,-13377,-29696,-25242,-8838,16354,7502,-4926,-14699,4212,1780,-29976,3224,-29236,18061,10230,-17841,-16049,-16846,16970,-13272,-30219,-29064,12384,6307,29053,-7627,16795,-25578,13013,18489,27943,21611,32001,-31897,-24444,-32544,20524,-2034,7274,-23346,10386,-25145,1484,15700,-24493,-16578,-18021,4661,-14761,-30183,-32225,28328,9166,13210,28710,-1267,7991,5905,13074,-10948,-139,-9890,21214,18570,-21817,-2334,6441,-15743,24191,-19338,-13390,5306,-6208,19156,-10212,24035,15890,-7887,4979,10903,26716,21879,-1205,15143,-18624,1204,-14368,-24335,18923,16128,-29426,-17314,13940,-15573,-29370,17625,6136,4991,4866,9089,1384,-17939,10899,17013,-28098,-3522,-16675,10501,-6544,-25509,-25895,139,-31859,10622,-3871,-21991,538,27252,-11713,10015,-30559,3443,4896,-16335,644,-27176,32681,8979,-15991,8320,6634,-27754,29791,-4703,3868,-25839,-7411,2546,-1298,27459,18726,11627,-16465,-24847,-2749,16665,19689,25540,28395,-20211,19147,2817,24241,-2211,20979,12862,18835,-6267,-515,-12897,-7549,13097,568,26639,-16193,-27669,-8152,-21593,-26194,6949,-23678,-3564,-280,-29415,-21428,2164,-22904,-25520,18045,30492,19338,31021,-18946,12037,-10865,-24654,18348,-27739,-15768,28073,-31663,-26630,-27011,21903,-3347,1259,8793,-10529,-16860,7965,-16365,-14287,-23071,-8287,-22027,13533,11756,29470,-13538,12890,27184,20837,5464,15648,-4477,6440,18315,4428,-24571,30878,19920,26758,24477,-17439,-14583,3324,9554,27563,19904,-21250,-30116,2254,-179,-4611,-6701,1994,31111,-11268,20108,9500,7177,12486,30697,-5152,-13058,-13537,4418,-23917,30716,-13009,-30585,30028,-5976,4366,-5485,5662,19673,24204,31731,18932,-14126,19858,20169,-27375,-13095,27991,-22157,18170,12105,-4170,-25409,22952,-30141,10481,-18705,-17599,13938,18323,193,-11144,-13905,21036,-1944,-11670,18871,-27326,-5771,7784,-3921,13844,31628,9708,2613,-21628,22421,7534,16823,11762,9129,8336,-16434,27646,15536,-7267,-3557,-29093,20015,17565,29165,-25865,-2842,-5973,-26797,-7608,-32541,-20344,-22993,13789,4994,26327,-6954,6815,-9982,21836,27305,-17170,24259,13377,-23778,3781,-30008,-11238,31167,16280,13363,31181,23056,-10674,23353,-14565,-30570,31155,20685,-13766,28983,32744,17186,1439,22777,-24984,31046,9756,31677,-19348,10679,-24953,17299,22040,7458,-14392,-19246,7170,16350,-22403,23280,30695,-7648,-14713,30319,-19604,-6970,27648,21792,-24413,12532,-24368,-11426,-31959,23232,-20908,-24627,13653,2302,8524,-17883,18724,23884,-5493,23632,27081,4484,-28447,-817,-5857,24856,21201,-30332,-29424,-29855,17085,-1442,2430,5925,-16957,1496,22076,-20109,17796,28654,-10003,-13320,-25755,-26542,25451,-4689,14546,-32495,-4817,21602,9851,-8728,-24101,4083,-795,4872,23724,-6531,-18505,19202,-23613,2045,-15855,-30379,19584,21423,-23306,21948,-26344,-17213,-31178,11813,-2515,-3578,8757,-2533,-17083,-28171,-30575,-12131,29017,4685,17169,-2406,-11028,-10911,-19915,-10537,-31401,18459,-385,255,-8852,21413,19247,-5550,-32650,8389,21257,-24445,-15539,-25459,-8271,-24321,-1401,-7220,29593,-15180,5847,3363,27586,25091,-11397,-9728,-16855,-24459,20179,3187,10903,-10370,4865,-6628,-23480,5822,-10279,-9297,15948,21990,6706,-5438,21785,-25933,-5028,5033,-965,4278,-30032,29569,-5391,5530,-28277,27636,110,8365,31876,-7953,-14674,-12322,30896,15038,11545,-2164,-18582,12252,-23197,5475,3600,18131,-12377,11242,-1759,-14530,-24246,-7715,-30733,-14589,-4453,6010,1689,-12625,14380,25838,-28771,-6852,4017,-2244,-10061,2746,-15801,-10394,-2991,15422,31389,25267,-25113,-17670,-20679,-10148,-11706,10036,16643,-25963,20589,-18244,28018,-19548,-9261,-9695,2019,9632,-6010,-1145,-244,-8618,22724,25887,-23672,-7017,-14698,-24729,-24475,4647,-19459,-2815,-26892,16736,11072,-21290,25730,-27435,25551,-15440,-25413,26970,27334,-12186,-22139,-2237,-20240,-7823,5010,27457,3649,-4906,16600,28918,-18221,-330,15217,-22520,3286,9717,-15806,-389,32064,-17337,27216,8060,10985,27387,7605,-1890,8677,25380,20784,27694,32658,18883,-3394,-5982,2207,-31065,29907,13084,-6509,18009,-9345,-28310,-6325,-13864,21361,26921,-10083,13026,26094,7796,-17950,25962,13918,-16488,-27876,27478,11641,-18274,28355,-19168,-668,-28833,-20261,23324,-6698,-9236,-11861,28411,-13311,1905,-29991,14290,-12512,3716,16155,28979,7060,-17226,-29671,-25639,-25803,-23579,22752,-32569,29053,21844,-3693,23457,-12135,17308,-28613,-15192,-25307,-27356,-25725,14073,18336,27993,-9114,-11951,-24492,-20977,13086,28942,25676,-4292,5354,16909,-10885,-11091,31352,14186,-32497,-3178,19593,14976,-14018,-17423,-3891,-21056,-20133,18116,-5789,207,29183,-22695,32498,-7486,2127,18957,-5019,-20218,29079,27669,16537,31008,22281,9016,31844,15105,32303,-9082,11222,32535,-28422,20706,-23165,-10999,6921,20685,10584,5952,-22040,31736,-4501,4223,-6974,-4767,-24152,-871,28669,-20546,-14730,9648,17418,20061,-18689,-24063,5989,-10803,13962,12487,-171,25161,-9066,-15645,30750,-27343,1988,-31756,22141,18036,24870,13801,-8253,-18652,28688,28955,-6874,-5289,-15620,-28525,-7987,28952,-23547,-20969,-17004,-23911,-22184,-18541,5331,-23321,-4404,-17693,-25034,32377,-31819,-31512,-13566,-22152,28146,-22554,11315,-5126,-26891,8020,-10639,24053,-17931,21227,-1584,4176,-14568,1927,17811,29759,-23285,-9742,-20074,-10398,-26625,31125,26409,-1199,-20199,21156,-5152,-8861,-1271,-29585,-25657,747,11781,1725,5005,-14517,3470,31231,6721,279,23370,-15977,-10368,20568,28557,-22107,-15216,-13976,-24879,3062,-11140,-21892,-26818,8991,31780,-5127,6776,29488,-13820,-21284,-1247,-16419,22237,24336,21573,-2045,8498,-7985,3783,-8688,-25741,-6211,-17249,-10163,-27852,2047,-4707,-651,-28769,-14197,25050,30255,-23477,8622,2653,31328,-15312,14811,11218,16008,-27059,-19164,22347,17350,26575,18344,-25162,-24012,31677,10523,25857,7781,25076,-30600,-12811,17269,21378,-11662,16429,-12384,581,12807,-12999,-15724,-4144,28530,-25291,-30479,-12817,19934,20145,-29349,-9743,-10496,12653,-2057,32678,-1131,32563,29868,23963,-16051,30598,11521,-21016,-26140,-3924,32404,21928,27500,11180,-18705,10389,-16489,-29565,-63,27110,30313,5788,-18112,-3530,544,-3900,-30398,28170,27591,24842,24289,-19416,-20615,30778,-26390,-27312,16538,-21044,-20302,-5605,-3399,2614,27601,17056,-13213,5134,-19633,8952,-10359,229,-2623,14397,-11050,-20755,-22115,-9171,14462,-29828,-24577,-3682,12360,6343,-6483,-15321,24114,-23665,-701,5297,26833,-3858,701,-16776,25624,-20481,-5191,17023,-24880,-30059,-21770,19522,-28244,2667,-781,4182,-323,20209,-25264,7750,11786,-32260,30584,404,-15475,-26110,5185,8440,-21540,-29783,15689,32131,-17339,9030,-1555,-25815,7985,-12906,10847,-18156,12128,-3102,-6899,-1079,8515,-2203,2665,15157,16810,27689,26062,1849,10145,-2784,-27796,25470,-21888,-16,20494,29652,9615,-10803,-18395,23892,-23701,10907,12346,-53,-4982,-1556,-11071,-1797,18856,-32505,-16315,-25266,18784,28618,15632,6569,23500,-30745,6935,-5754,-13982,-3935,-15314,-28788,-18119,27121,-25544,-13055,-25141,24278,18914,20677,13264,3692,-14566,28614,11721,6473,28725,-10629,14376,12710,-13411,29985,14009,4865,26210,13397,-27329,-29486,29347,14238,-15105,6197,6568,-32183,14917,26228,11141,7768,16421,-31334,9149,32465,-25294,23990,12922,-962,-22199,18106,-27051,310,25876,-20883,-19677,6609,-21427,-4009,-14573,-13685,23887,29200,30070,-9088,5408,-25436,19627,-15022,30658,-28706,-29809,18148,9790,-19801,2416,-6429,-14439,7602,18479,-27204,31834,17734,22541,10185,-18432,7753,25306,28603,14995,20284,10011,-31157,-21784,13726,14096,-11889,-2473,-17161,6500,-26532,16695,2472,8213,11633,8093,3409,-26700,-23753,12289,32472,-24866,20431,-19108,26237,8625,4644,31168,27341,-6014,1179,-17544,17129,-9102,-23240,28484,15849,-28140,29740,-9442,-11247,31144,10179,-29392,7943,17706,24768,-12643,2940,19156,16330,-26360,-12034,-5068,-29360,15217,-18371,-126,14492,14833,8389,-31524,24978,11536,9516,29193,-400,-29360,181,-7757,-5353,15830,-8600,24191,7699,-2007,7510,-6666,29052,-5128,-25996,10851,7289,-8094,-4132,18132,27547,4259,-23691,-26918,27244,1364,-21430,-22121,22951,-23996,2310,28403,-28771,1809,7148,6818,12794,7381,11771,-11392,1840,16131,19201,16679,-7687,-17650,-10311,-11312,12553,15393,-3543,-9483,-9093,-524,-17549,-15376,-30595,-4849,27628,-26604,-20200,12765,27052,-11927,-22840,-1220,20930,26848,-14766,-11477,3266,-13,-149,-10692,27069,5252,-5678,14218,-12607,-6306,21488,-23782,-8646,10855,22943,29844,21565,-27151,-32680,22054,95,-15161,31459,-1533,-14310,6261,-27679,-16640,4166,-32178,-31724,18119,-18945,563,465,3116,27021,-29508,13934,-1330,682,-13208,-20136,8602,-4448,18179,6225,-6797,-32270,-24331,5704,-21781,-20499,-6395,-362,21803,3388,192,-24661,-31511,-8328,17142,-27800,-30575,-20232,-12011,-3326,-12328,18126,15707,-17383,-6127,-2742,-6803,15098,13880,-25195,-27029,25742,-30084,18919,-7205,-17070,16661,29621,9838,-6147,18059,17542,9896,-15902,14486,-10511,5298,-906,27334,21750,-13710,2904,-12771,-15759,18010,17879,32491,13879,-27477,17018,-20055,8278,-10503,-24502,-24037,20499,-10980,19198,12519,3082,16934,-30669,-29627,30454,23248,12240,-11897,23101,-31264,-5002,-21514,-25530,-31503,421,-19756,-4743,1522,362,1581,21245,13047,-14371,-9899,508,14053,29211,30585,12633,28098,-30058,11506,-14063,-7464,-29529,-257,-21537,-21561,2495,20935,-17165,29593,-26398,-5865,-26830,5227,27497,-8095,30788,21684,16747,-25133,-29380,18966,-11844,-30833,32577,-12214,26984,-2859,-29996,29476,-25684,13374,-16410,10929,18949,30890,-8289,-4203,-15763,-4598,16221,-8453,19412,19962,27806,8305,29868,-7652,-10289,18141,6437,-198,-15070,-26435,25315,26967,-11611,23065,-20521,-15013,-6865,-15116,26935,25402,4014,129,-26979,-12186,-26216,-31196,-6834,-242,-5851,-29979,26512,-16286,16207,21236,11344,-31231,-19952,7647,20261,-6146,11205,-3816,-3517,-5952,12891,-30655,-10284,25449,-21640,-20461,-15890,21171,27252,31211,20108,-6465,-717,-4655,17995,-2577,-31087,2428,-2130,13953,-10144,27913,6402,15994,-25368,20712,-911,-14330,-4129,29457,-15897,3841,-25164,28285,-28515,-27730,32125,-13296,8181,12113,-20988,24031,7183,-19802,22260,29909,20121,-9321,29490,5286,-3068,10028,-10056,576,-10046,25407,23285,-19661,-28793,-19834,9843,-12389,24336,-28806,16720,17516,-25330,-29173,11077,4921,-12465,-10428,22238,-10649,-30042,-17001,4373,-29570,-17606,-10239,-17845,6430,-22462,-23789,-16061,-1690,-31175,-22714,-17129,-6136,-26157,-18780,-27265,-4807,-21432,-28414,-32202,-6460,6969,17569,-1259,-13604,27482,-3172,19493,-18785,-18675,-20302,-18509,-20153,-17980,-2393,-28318,29272,32670,-29560,31366,-2413,-12893,373,22879,-29230,22922,-4984,9379,-26625,-27576,20848,-6656,32086,31218,-6736,18282,-5596,6790,-10751,-14366,9099,-15809,20220,9200,27931,313,29300,14807,21828,-1713,16885,17061,-3735,-31095,-28459,28100,-1581,13335,-16150,-12953,31482,6388,6648,-9113,-23787,-27296,-27363,-11467,6144,-32547,14458,-14473,-19392,7493,-32571,-26612,-6710,26144,26099,-10810,-18548,-22308,-27225,13634,2280,26060,6870,24515,-22147,-22938,24938,12051,-24290,13975,9330,-27545,2488,15648,-13869,-6994,-25358,-28796,-20668,21264,21040,8452,-29373,19515,3868,-5195,-23088,4659,24696,12536,-27436,30259,11609,28443,14557,-21786,6728,-21745,3806,23454,-31943,22401,-16307,25556,-21273,-32703,21299,2050,-11414,-1198,-30693,25041,2034,12639,-13765,5979,3207,-22552,17020,24138,-631,9831,2816,-13406,-29453,16126,22932,-13170,23953,11854,-9876,16154,-26810,7643,-21931,18082,28067,-21637,-13793,-1179,29958,19174,32184,4287,5885,22090,-18525,-26233,-24858,-12816,12696,-11329,8638,-26006,-25185,-1367,5534,14275,-23164,-10470,8612,-9823,18733,-15690,-8111,11073,16013,-10717,-7759,10651,-31158,-2205,32163,-14015,28551,-16343,-23224,8447,-6201,6209,-14881,-17006,5439,-21771,-4509,7260,32062,4715,-19855,-26586,4767,-13294,-28213,-16771,22013,4964,13040,5498,-4646,-22252,3559,-8861,31365,8665,22608,21249,14149,-16786,-25179,2827,-14281,-9226,-31287,-6399,-15101,4032,-8732,9040,21297,-23503,-24715,23569,-12264,-32742,28064,-25512,13816,2445,20742,-17187,20979,6765,31375,32264,17719,17040,516,11408,-25746,9008,28814,25667,11687,17499,-26243,-17447,-15712,17301,20240,-3903,17204,19606,23824,8485,30238,32500,-27948,22823,-13154,-16201,-19860,23024,-1353,24626,-20920,4746,-3383,3970,-375,-1913,5675,22663,27041,14642,7898,-30575,11585,-22458,-16503,-30437,-8719,-14776,29756,27543,-26304,1619,14214,7343,-13885,-68,23694,-16471,-31860,-25242,-20990,6804,4648,8118,12364,31288,3662,30606,5399,24246,-20154,-8967,-14362,-23059,22390,-20215,920,28513,-9666,-23484,9530,11849,21033,-11617,-30195,20307,-5909,-22076,10328,-31859,21761,11606,-21262,-14292,-26928,-31913,-19528,-8107,-26487,-31471,-20874,-27689,-22206,6586,-7670,-19518,13599,-930,-21828,20233,-22621,2887,12916,27729,-16339,12780,27350,-26480,30712,-1329,22727,6142,10824,31606,555,-2172,16836,-12348,26868,777,-30628,-3635,-10481,27593,-28209,-20397,10153,-14676,4689,-1239,-24268,24446,13200,9819,22438,-27698,19867,-18148,27428,17452,25618,-21317,-26277,29973,-22052,-13279,-250,-23211,-29966,12049,25258,-6436,17126,-8527,-30949,18726,-17517,18871,2333,10648,6051,-22658,-11821,10741,-18912,-32200,11919,-12403,5769,8442,-28161,-25048,-21402,25493,-12438,23618,7446,-3041,-28692,-21321,17947,3552,-16871,-13293,16707,-4130,11144,16043,-17027,-5601,-11604,24378,-808,19581,-5079,31180,-27646,7014,-20899,-1577,-14451,11435,-17709,-19051,-23885,-18548,-27855,-22088,14143,17440,2472,4050,29172,17100,-30083,-2108,-21264,25604,20676,29880,-29755,-19090,19349,-5265,31939,22752,9773,29009,4554,30866,30835,-26253,29810,4291,-29597,31884,2711,-15417,14512,3298,27020,-24889,8429,8511,-20066,14555,30298,-14684,-21085,-2123,-4396,29028,-25467,-22304,-10131,23871,-18739,-59,-17219,-7243,17171,6624,7860,25307,24445,-7877,-1922,-8069,-7270,23254,8842,12077,16357,-26800,-2882,-423,18358,27461,-20650,-11530,-13003,25356,3898,-29695,13236,8376,22862,-16426,-6190,-4044,32284,-12376,29097,18032,22296,-13015,-20368,-13659,7492,5597,-9080,2112,10390,130,-17835,4930,2622,12365,-17473,-24941,13449,-2510,22000,-29070,32455,3684,6035,-31375,-14195,-1754,-9254,6999,29659,-13488,-24249,-19811,-32358,6593,-12060,-12255,-23810,22216,3561,28316,14005,-2140,9463,-30699,31576,30241,-16339,11724,29209,-14180,25338,-23130,-27449,-4014,-26645,26029,-3190,15010,-30849,4931,-20666,-24671,27296,-26839,-488,-32695,-30796,9325,-27952,6116,11677,-18294,29377,-5421,-22814,-24744,-25870,-27695,10497,11999,7926,5127,8716,-24718,26002,1726,-9784,-15200,15221,-5338,10149,748,972,2062,7558,-18208,-32118,-24258,25136,2204,-10747,4781,-28239,-3577,16456,6782,14051,-22466,6483,-26188,25096,30199,-4064,-10524,12967,-21592,26143,-30569,25978,-21502,15622,18190,27597,28653,-17426,-21345,8958,19442,-3671,-2763,6512,-29309,11056,24589,27781,16260,-9256,-7916,14409,-31974,-777,21048,12561,-25247,3133,-20765,6377,28892,750,32672,-24401,3571,31675,-26495,15133,-22486,19987,24130,-825,15388,-11298,-6946,25131,32646,26194,10784,-23280,-29794,-4391,-1340,-21881,-18385,7886,1298,25012,21168,-9425,9526,-30133,-14100,28024,-4684,-5705,-5932,-19468,-8505,-3651,-32439,38,28410,29288,18687,-2585,20473,10005,27475,-1111,-7806,-10559,-31181,10243,140,4820,31545,27853,1330,5318,-1042,-7108,30764,31903,-19601,-9697,19390,8311,27451,8336,31264,19277,-14324,-9153,16459,16155,18854,8224,15296,-5731,-13195,12341,17257,15011,20902,2284,10129,-2214,-29975,-16648,-4574,8946,28730,-26881,25741,-30923,-19346,-7424,7544,20392,2090,-19782,-25209,-28418,26501,15348,27226,-29859,-24423,24453,-18129,-3739,102,901,26065,-9020,20845,1613,4111,7752,7646,-11517,1833,27718,17225,25132,-17348,-11581,-11599,25846,-18340,-2757,-19319,-15613,-25167,-18252,25116,-31985,29760,-30715,2277,-28775,-5181,9867,11691,21395,-1445,10119,1690,-25325,-13874,16813,-23764,-3867,-2513,2635,-16839,-8498,10224,8725,-11286,-4489,-653,-5526,7210,32331,28088,-14707,-22828,28662,31220,-2135,12472,-28139,19587,-8252,23413,-24682,11103,12865,-28847,22112,-5902,3293,9717,7737,-12398,-21922,6689,21637,13456,-25170,21289,22420,2452,10035,-19464,-6617,-12690,23109,-8907,17364,-26829,17930,716,-12709,23232,-30956,14547,-25273,5928,9988,26668,28008,-12478,-8130,-8488,-28441,15932,-25696,14444,5103,11303,-27881,25259,-10775,18729,-9461,-25865,5654,17906,20062,-3884,-17416,-24670,-1080,-13045,27025,-15503,-29960,12039,8150,1924,-32621,-17117,30290,-18894,18174,22358,11803,25537,16304,-6269,-28523,-20230,1753,-29493,19989,-1863,13416,-8400,-27160,19689,12166,5046,-29371,19092,14116,-8694,-14750,-4147,-13964,-24547,23463,23429,-25794,13779,8026,-9395,19279,-32343,-30997,7834,28312,-22729,11436,30126,-3477,-16385,-11406,100,11593,15701,-10043,-16145,19741,-5546,6224,-3117,-29877,1085,-19572,-9115,-830,24578,6890,22431,21593,-28045,27399,-4037,11833,-19408,-12052,-29237,-21728,8900,-4634,10341,14603,-17452,-17671,20378,17641,17467,2679,-11881,4629,-11492,17631,28504,-1255,1044,26596,526,7327,1568,-9046,20266,22404,-16367,2853,3911,-908,26774,15423,-2027,-18918,18698,15931,-8672,-15655,-741,15791,2514,-32395,-12113,13135,-11295,-23200,-20352,21677,-2842,-27416,25911,-6051,-31940,-1702,-23287,-1072,29349,29460,3984,-23674,29740,-3763,-8685,23233,26997,2182,4626,-5638,-29514,-25843,18288,4881,-16887,18389,-24225,3482,9940,755,-12126,-19585,21910,31197,2299,-23595,22304,12200,21888,-32149,-9278,24791,-23421,8952,11056,2814,28611,23146,30662,11087,-6958,25426,-25982,338,8439,24091,24279,-8790,-12433,-2645,20685,11956,-21472,-7752,-24534,25271,-18992,4214,-12288,-12296,29603,-27712,11857,6290,-8441,-8772,25565,-7031,-25540,16341,-4232,16439,457,-32315,24281,9086,31762,29314,-10765,30229,8362,10816,-6444,-9041,23796,-23968,18654,-30665,29479,29121,-11444,23219,-24825,12114,-7955,23004,-3764,21616,6857,-15100,-25812,-29066,-29407,-21813,-29391,22689,18587,5470,14859,2322,-1361,-22214,-27896,-15309,-349,9627,20282,2463,17318,-18554,-9827,15495,-12250,21927,25031,1738,11854,20333,-10719,15965,-10107,7752,-19984,-19297,11418,15293,29586,-26723,-26967,-21353,-19225,-28132,12551,-30945,-4112,20703,-19825,-7986,-25579,6116,584,6007,6006,21024,25104,-8519,-13719,-26211,23134,13622,-21000,7199,925,2721,2858,11170,-17481,25994,-22929,-11071,-26623,12067,-29778,-7966,10394,9020,21510,-31237,-5244,25685,-21784,27428,22756,-17159,2042,5455,19041,-2882,-6941,-3127,8176,-11690,-1605,-20312,-11729,19375,-15778,28791,-16341,16719,-13694,-1633,17067,-7169,12498,14478,11292,-17286,21809,-3687,15301,19272,16077,9557,23995,-14417,26420,-26529,-937,19288,-16381,3688,23744,12885,28209,-11563,-30293,27042,-27042,-13838,32720,-21933,27001,-14017,-23942,-5318,13418,9655,12818,-24437,-28055,-10838,20181,28721,-1200,-6225,4291,30368,-16708,-32438,-19879,18993,-108,-12014,2986,30247,30563,10990,22860,-21659,-13946,274,10282,-16090,-32356,-960,12722,-28018,4663,-11564,12772,-16866,1868,31015,-26543,-21549,-19155,29099,18602,28883,-17391,-26283,-24230,16660,-18045,-28639,-16310,12309,13868,-30943,-25920,2093,-5274,25316,-90,24408,26120,-1662,-16008,7658,18302,23536,-22110,17014,-5455,-27720,9162,2434,-7614,-11017,-2922,-32289,11254,-23465,17492,16107,18559,-20118,29455,8591,-29236,-31079,29726,19978,-29211,-28544,27123,-23830,-22684,-21524,-27767,848,3896,-19599,32086,-4474,-19126,6757,3908,16419,-29489,-15855,2849,31412,15175,-30558,22211,18389,8520,-27216,4610,-15466,-26206,21161,-24443,-6362,-10428,-3163,-21474,-934,29966,-7852,-6771,1567,-19661,-4883,6448,-24404,-5705,-22941,-9871,30933,-16072,-20511,-10710,-2759,9929,-24726,-17725,30498,28350,3644,-14831,-20680,4924,15155,3863,-16732,5981,-3154,-28682,19890,4539,-26745,-27043,8671,18600,19361,17557,5513,27448,20563,-14758,13415,1273,26901,9786,27757,12803,16286,-17009,-1604,-12054,21520,-31034,32198,6700,-15473,-19994,13297,24006,-14707,24701,-12003,-27032,-26419,-7922,-10003,-14864,-4949,1211,-7795,-2513,-18487,-19214,-16919,-27715,-29618,-823,1740,-13959,-5466,-5084,-17032,28856,-6857,-10522,-6593,-30175,24951,-9738,1713,-20976,9125,-12706,-30516,-29849,3105,7730,25951,-17374,-249,-31743,23963,-17832,4882,28593,3707,-260,-13591,-12629,-31801,8075,31488,-8132,-27550,-16974,-27661,-10124,13042,-5176,-19013,-29087,29444,23423,23867,25398,11072,4634,32259,-22670,-25031,-569,-27486,-23926,-8141,31355,-14350,28267,8374,26647,4982,28307,16235,19349,24693,1512,-3421,9259,14690,-12369,11310,17742,-19879,-15460,10446,-30686,-8373,30089,3281,4170,17439,17716,-26678,-16469,14197,6397,-10396,6590,21453,24112,26344,2193,-4260,2334,4390,-9395,-10920,-27581,4650,-9560,-18411,12119,15066,-967,-1604,13727,-14648,25292,-364,23695,11433,-14280,-12328,9726,-17632,-7846,9077,-24641,17868,15974,28669,25561,-4975,12591,-22743,19134,25259,-27962,8313,6205,1832,-25486,18338,18088,28809,-13648,-23749,-8367,-4490,-18784,22307,-3814,-16700,-12087,30386,32027,-12579,31063,29739,-7478,-2787,-26656,-27529,-19514,-16470,-28132,-7251,-8517,-32543,10905,-3127,-27354,10757,28155,13016,26374,-19332,25419,-21799,-19284,26946,-26336,-19539,29072,3954,28232,-11435,19714,-29317,12453,-8663,-29591,13744,15806,26849,-2651,5124,21724,-4162,23898,-23218,-18684,-22757,32384,-5704,-27191,17214,-2628,-1321,6756,7779,-19795,9317,-11995,-902,8006,30970,2984,29030,-21291,-8400,7103,653,3567,-6083,-8019,5211,25364,9063,9422,-27017,-17272,23649,11082,-22872,25329,29751,-16425,21326,-13209,-10014,-25564,-22890,25197,7723,-17704,-2653,23794,-10687,28883,-11304,-20606,-2430,18694,3495,-15984,25711,31205,-24483,18812,-30946,2757,16657,-12521,6603,11546,1057,-22423,28931,-515,29509,-5382,16570,21002,7894,-13557,-8046,20655,12635,12576,25281,-23062,-5749,29150,-2352,-1976,28754,8542,25784,-5499,-15056,-31586,28979,1094,6197,-2037,-32655,-1146,5094,2712,-2639,10583,-31270,14963,29744,-25688,29910,-29376,26039,5720,-18664,-1236,-6847,12771,12571,-8309,-15120,18162,27915,-25629,29320,20078,31542,4955,12290,17704,-2211,-13498,657,-15487,13945,8647,-22280,23907,32623,26461,-2882,-4439,14638,-22719,30257,-22909,-26135,8690,22261,-24182,-28872,-7197,11153,-27088,5815,-26552,25282,-3045,19399,-26174,7835,-7314,2129,18666,-28771,-28606,-12713,150,-18718,3567,20143,21821,24089,4760,29548,-27036,-13331,14863,21671,-26377,10040,-640,28219,-4404,20393,-4857,7591,179,17278,21442,7612,-12136,-20098,7179,3484,8153,6267,-30541,-16709,18809,8668,14526,9193,6795,32587,7882,20106,-2671,7255,-29222,-2951,-2306,24035,-24121,13436,28689,-18201,6523,24624,-18720,-26037,1501,-7898,-17585,1332,-27564,11241,14830,28956,-28736,21983,-20416,26443,22684,20875,7576,-24300,21843,-15677,-22915,17447,-29255,-29210,30496,14405,12510,30627,11922,19475,-10917,-5742,-6960,-3022,23966,34,1369,32593,-17119,30210,-20077,-25620,-108,10748,13229,-14580,-13997,-3475,-29798,-19363,-22648,-15579,-5460,29560,-3726,-2670,-27924,11411,26697,11013,-1793,11645,9705,-26461,-27187,8155,29365,-22730,-14874,6804,12828,5128,855,-9696,-23744,-14908,19792,-8663,-30061,7268,-23771,25311,-29899,-16076,-17444,23930,-29613,-24215,-12749,-31083,-6687,-31436,27914,31259,2867,14900,-10896,10161,28983,9673,13617,-30081,-3417,12218,-23018,-31172,-15333,-4807,-14808,7032,7407,14655,-23955,24713,-18320,-7574,31624,-26045,-10043,27322,2459,14880,27798,7780,-25589,28848,2272,13907,-30187,-2740,-6796,-16228,-17882,-15439,-1196,-29223,-12431,22019,27034,-11896,-8654,-3105,18358,11844,7016,-15821,22827,2196,-28016,-13844,30504,4951,-10993,26848,21313,-8819,-12528,20443,-24816,31472,-22346,-28330,-26171,-21040,13473,-6617,-25861,10361,-23798,-28853,4790,6603,-17020,24603,26694,-22245,-16112,-4166,13560,14162,-25267,-688,3708,-25978,21564,23865,19430,22339,-1462,22080,16739,-31530,31035,9739,-18210,-19715,-14859,18645,15855,22452,-21859,-5665,-23910,15934,-1145,27872,-26008,-29161,3579,23251,24211,-10664,19442,2095,14445,-14630,12223,-28114,-28905,-27815,2648,-23823,-24143,24391,-9263,-24368,-7638,-18650,-30245,30070,-17127,-29679,19791,-29001,-3057,8613,2413,12632,-4062,22873,24161,-30907,-4689,-25671,1481,-10241,-29977,-24048,-15839,20827,22920,15156,-19877,-26107,25416,306,-30706,9788,-2172,-17474,10435,4395,-27695,-26814,-14250,-5299,-6308,-23555,-23020,11229,-4094,-27601,-3122,-22541,-9234,-12426,16797,-31803,-20145,4377,-30893,-222,24508,-9519,-16895,-29827 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add11.tflite b/tensorflow/lite/micro/integration_tests/seanet/add/add11.tflite new file mode 100644 index 0000000..c34bcfd Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/add/add11.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add11_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add11_golden_int16.csv new file mode 100644 index 0000000..b2ba889 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add11_golden_int16.csv @@ -0,0 +1 @@ +-8666,-32768,29417,-30462,-22336,32541,25250,11109,938,25050,-6984,-17301,-6586,-2616,15814,-22939,11349,14793,32767,-19914,-12033,12221,29265,29452,-3244,-4745,-16655,-9980,11018,17666,-15984,-29081,-26205,-21467,-4308,32767,22458,3129,-2668,4871,-8898,15311,-32768,-32768,-20821,32767,32767,-23484,350,28489,18926,12077,16706,32767,10637,-10838,-19225,-13365,-29526,29412,-14268,5635,6337,7417,-3451,-20184,-9550,5108,17005,-1313,-10862,-30312,-5686,-17584,32767,-14521,20646,3430,29889,9873,27164,12474,-32768,20944,-24041,24214,-11494,-5754,21567,11057,-14257,-13669,18,14112,11291,1286,-25935,-5890,7556,-29291,-18772,25176,20858,-11000,5575,-16726,32767,-2640,-12413,3695,-22264,9926,-17700,-2102,26776,-17683,2122,-6095,-19455,-6782,-32768,-19943,16999,19147,6073,-22473,24420,-21700,-24668,26868,3760,17409,-2232,32149,-32768,-13476,-16517,9778,28792,-27324,-32768,-1021,25175,6558,2422,-24138,32767,-31215,32767,-32768,-16339,-17913,32767,-6518,16974,18156,12277,-4824,1241,-32768,14126,-23224,-20790,-13222,-32768,-23602,13376,28591,-5483,-32768,-19283,21614,-459,18108,-32768,11273,22584,32767,17354,-10690,-26813,14930,26811,13870,-5941,32767,638,-16727,7456,-26400,14448,-3696,17025,-22111,23724,-29353,-16771,5245,32767,-17519,-15902,-25202,7735,-1084,-5377,-5831,28735,17241,-32768,27349,14149,12296,-7033,-29240,-16080,-3368,-22702,6200,29289,-20734,15058,22857,-22036,1765,-26248,-14634,-8864,13457,12170,-2545,-21639,20768,-6069,20837,21475,-32768,-16909,32767,18137,-4859,-32153,25250,-32768,28467,-13667,-24710,-13946,20443,-29689,-23440,-32768,-29523,26485,-15179,-14203,31233,-18037,20015,14432,-32768,-30546,-7216,-26479,-26420,-27591,3627,-21231,-12856,25076,740,-16992,-32768,26080,16710,-24418,12106,-26550,-5063,-5409,-25498,-13367,7725,26034,31060,-11811,-10542,-20198,11147,-32768,-23534,24475,-13433,-4463,-17920,1689,20829,8632,2539,-7980,5354,-18732,-6380,-30717,-20039,10792,17972,14390,9591,-26274,-10557,18583,-7441,22424,-18110,-15188,2541,14622,21698,-2759,-30627,6228,32767,-5784,2695,-2174,-5349,2878,-17067,-21852,-32768,13868,10845,5532,-16413,17178,7499,32767,-20119,5125,-3103,-24980,-32768,-32768,29335,-24164,28436,32767,-5059,-20625,1830,-25710,12208,27025,20784,-5521,-9086,14752,26648,-10164,-26101,13386,14769,21978,-32768,20107,10064,22748,-19016,-19273,-29522,32767,32767,-25789,4907,-187,3765,25567,-25788,-25761,-910,-21720,14660,17965,-14732,25314,28961,18255,-8941,21989,-7685,13470,13921,17066,-2231,22285,32767,19034,2456,-26948,-22253,-22785,-32768,-32768,23564,32767,-23894,18282,-31714,26445,32767,24574,-6605,-14865,7139,-12173,13377,24028,15814,25785,-12303,18871,-31589,-32768,-32768,-15477,-32768,-10378,-21850,32767,19963,-3861,13321,-28509,16312,-32768,13318,26285,-21118,32767,-4971,-13246,-7051,-25340,8185,493,-17891,3955,27571,857,-9608,15862,-32768,-4525,-8468,20620,-19459,-18989,-31996,-26104,32767,14942,11257,28169,23732,-32768,29521,1682,15953,-6692,-3170,12756,-27545,-12656,-15488,32767,32767,-16475,-171,20872,4987,25532,32767,-4859,611,-14622,27066,-7741,-1258,16850,-2166,26190,12917,23824,-28458,-11180,32767,12115,-25651,25535,-32768,-2190,9294,3625,-32768,27868,2267,-15900,-7353,-32768,-10649,-30920,1497,15664,-32768,25439,-12374,4577,19499,-3593,-18063,-32768,26133,-103,-19024,23191,32767,30850,20521,12106,-18281,28583,-3404,-21013,-4453,-25717,-24819,25648,-9156,20685,10996,18049,-3848,-18965,31373,-5955,29786,4081,27158,13385,-25729,-11684,-22463,32767,-20656,24934,-7767,-4659,5098,-28426,28501,-28159,8253,-29906,-8895,-4591,10417,21000,17320,-32768,21630,-31785,24585,8700,32767,32625,-9416,32767,-15021,-570,-7428,26332,20391,8324,-16951,-27035,-25707,-2385,-677,7616,-8317,15727,-5932,14615,4228,-4518,-11958,-32768,-3310,-13310,-21615,-32768,28135,-7823,5452,-32768,32767,-16465,3330,-25867,20611,1565,32767,-30677,-6188,-19112,-14102,28241,-17900,1856,-6738,29248,-14438,27860,-5383,32767,21255,-27101,-2942,-1291,4394,24568,10363,16142,29740,-8447,-32768,3571,-32768,-32768,-31790,-20670,24499,18775,-32768,16968,-664,-12943,31324,21750,32767,11155,19427,27717,-4071,22911,-3901,-15080,8676,-1741,-31298,5226,-32768,-25207,14761,-24530,21974,-28931,31603,9376,32767,32767,-13528,-26376,14485,9473,-1240,-24266,-28052,-28436,-4437,4286,-4225,30025,6989,29292,-608,-32768,-32768,-11397,-24857,28948,-4874,29207,-12313,31188,13780,-26461,14531,-19115,-29199,-3109,17883,-26307,-4319,-32768,8903,821,7918,17250,-32137,-32768,-1034,-32768,18243,16490,-32768,-29477,-2663,-27069,-6213,6807,16501,-12671,-13654,26433,-16743,-22624,17414,7652,9062,32767,-2616,11995,26015,-15508,24991,-4115,28868,1142,-18095,21596,28998,-26310,-791,32767,-31434,26494,32767,16466,-10831,30317,2307,-11315,16194,4741,-15750,-32768,-16259,-9493,6695,1511,25093,32652,6307,32271,32767,5800,22991,-22830,-14613,27321,-19060,16898,5851,-23915,3911,-1483,-30340,-24721,-13940,-28692,23160,-1085,22310,-24451,-22358,26335,15795,-32768,-3032,26439,32685,-26765,6571,-24495,10421,-29867,17745,18141,15394,-5547,22868,4297,13690,6087,-32768,17736,-23865,333,32731,9945,-3280,-24367,15837,18042,-19379,31163,-32768,-27592,6199,-10865,14280,28247,-13305,-5805,10804,24725,-11808,-32768,4139,-20705,-5950,20992,-20025,3648,-12762,-18270,25561,32767,-6771,27350,-16927,1132,-161,-623,-24240,-32768,32767,-32768,-32768,-7792,-3149,-22180,-24810,-5600,1404,-3802,3265,7630,-28251,20206,-12736,-4168,14320,-7694,17754,-18457,0,-32768,14435,-25511,14487,-32768,11519,12279,4283,-32768,-13798,4074,-27094,21690,-17299,-11969,15892,-26186,-21461,-1873,-32768,14787,-11734,22739,-12254,-27493,7912,7787,1205,-24388,23900,-32768,11495,25827,-32498,-8321,4622,-16786,4457,6196,-18881,26658,-29452,-28859,5425,17383,-1484,-29878,-21270,-386,24851,-24110,-5286,30775,26186,22054,-30705,10590,-17348,-20633,27823,1502,21736,-31935,5198,19008,8078,-25889,-7208,25565,-29698,-17263,-7539,22526,-17421,9697,-9230,-1507,4926,-32768,-5312,32767,5360,-16106,6914,-29400,-6858,10910,13175,-6309,-32768,-32768,19025,-27310,23357,-32768,20019,28983,17213,32767,-17389,-1396,-1116,-14593,-5,11124,19250,32767,-18633,3081,-17230,274,14257,-17136,-4419,-11976,-1871,19528,19892,-7507,-26442,-6749,22996,8405,10090,28775,10081,3698,-16349,24316,25416,2344,9169,-10917,-32710,-23770,-18826,-9099,-32768,-19204,-15574,11306,5644,-13209,-9100,21556,-7665,21145,-5315,22467,28224,-4822,16587,-32768,10363,6254,-7743,17802,25293,-8159,-3264,-4973,-18199,22833,-8658,14767,-1146,21262,17133,-20338,14755,8299,-24949,-472,-4829,-32768,5277,-32077,5486,-24611,-24562,7447,16862,32767,-23544,-23264,-23211,23225,25632,22461,-875,-24673,24824,3613,19351,-4654,25159,32767,2553,24839,2876,-21087,-4944,27906,-13682,-29665,15173,17720,-22985,-19453,28751,-30703,-31674,28796,-30853,5232,-6934,-24130,6346,12428,1942,31466,19803,-13635,-26413,8978,32767,32767,20155,15191,9485,-1725,-27332,-25243,7177,-13225,32767,-32768,-32768,-32536,-31600,-32768,20045,-22351,32767,-19512,8789,-12488,2112,-612,-18005,-11482,11055,-20097,32767,-15315,-27443,8308,-1480,-6672,14025,-11877,-14143,29148,14899,-27880,-6160,3010,29442,26253,-30237,11905,19635,-13611,-18200,32767,-4210,-7133,-23200,-11410,-19776,-14762,-2682,17731,578,32767,-3852,26345,5028,24470,26099,4892,3871,-27766,-2046,6965,-5527,27613,-14501,-32768,12675,20613,2918,31048,12456,-17990,-12651,14207,28793,-2984,-19152,-14452,22877,19705,9072,13449,31989,-15705,-345,-24832,18895,-13831,-26137,27450,12723,-17287,19498,10809,-22927,-26646,-15804,20095,-3882,5619,-21817,-17148,14498,32767,29199,18228,10591,21932,32767,27459,-19434,-32768,24845,-6578,881,1400,-6143,17471,3288,8967,-11020,-26171,24587,-852,29314,-24532,30154,6906,10981,-14086,28028,8696,-28778,23913,-32768,-11984,-5058,-5246,-24122,-19807,-32768,923,-14305,8573,-1457,32767,-25886,25932,2240,14143,-32768,-32768,-1385,27621,-12269,19343,-23357,-18463,21575,-23140,22457,-15226,6252,-10817,24941,-30028,3552,-32768,8574,31172,4636,12823,-26924,21013,-11319,-21825,-20508,25416,434,-21361,22589,15942,-15749,-20705,344,18586,27963,32767,-26258,30249,11657,-10335,-32768,23124,16537,-32768,-12082,13603,-6711,11541,-10718,18318,22750,32767,-15061,-32768,-10842,11568,13252,-210,-11151,5720,-19585,-9989,11675,17253,25760,19281,-1354,11515,-29293,-10757,15701,-25690,6690,-22863,7872,-4388,-7381,-12488,-8464,-26092,32767,-17155,-8357,32388,32767,32767,3607,-8791,32767,8192,30038,29586,15483,22393,-12755,-32768,20948,-27260,-19427,-10052,29542,-32768,8897,32767,2046,-23524,28330,-15946,4308,-27245,-18156,10887,-32768,-17856,-3905,29631,7439,20657,-4347,-5961,-17996,-32768,-32768,28699,-9467,-10500,-32768,-24002,10895,-28163,11179,3564,-32768,-32768,24121,-4842,14461,-9707,-20676,32767,-3266,30072,-1686,-3293,-7933,12099,-21258,13586,10195,-23964,-14377,20723,32767,-31582,17967,-12827,-10360,-6743,-6426,26669,13741,19204,19733,-20014,30623,-5389,-6728,15353,13975,23169,32767,32718,5164,-26858,21059,22007,-2222,-32281,-8190,-18101,5758,-690,-32768,14567,-22582,-14235,12958,-23232,27711,26461,11641,-23230,29851,-432,17324,31758,-11264,-2090,14652,-6088,26415,-25146,-4833,24975,12944,-4356,-24470,-3872,-10400,19157,698,8247,-32308,32767,-15447,1184,27162,1495,333,-17576,-27021,-13353,10653,-31954,-29067,-19625,28315,-15065,6159,-20827,-2077,15060,1804,-26032,-19091,-20167,-18699,15850,-6265,8087,10319,-32768,-14682,17134,-30269,-21442,15338,12539,-422,-24092,29296,-7705,-7812,-20017,-32768,16156,1872,9541,-32768,-25035,-29780,-20074,32767,32767,8024,25120,16066,-32174,-7287,-32768,-12813,16547,9817,-2734,24311,-32768,29150,-2134,10030,21630,-8279,2507,32767,32767,-31490,1144,8872,7318,-13393,-9544,-11283,7236,-27299,-32768,26494,-9883,8661,12285,-10702,-14178,-30807,32767,-30261,-32768,22241,-28903,-16239,-27870,30442,22335,32767,-29617,-15383,-17500,-31599,18404,-7731,-1448,-4702,-32768,11123,-14615,13251,-2520,5884,-2379,26546,12872,17207,7809,8663,4713,-17114,20911,-32353,13215,-209,9299,-11508,18639,-27742,-12259,12844,-8020,7923,22346,-17284,18710,31128,-21064,-32768,354,9498,-32768,-7069,27530,-22739,6599,-19254,-29331,-16259,3881,-18039,24171,21839,12283,-3729,14897,22969,6809,-30534,-32768,-14400,-26650,-28875,29784,3997,-7153,-8400,32767,-5870,25621,-8159,-11483,8025,-18480,9252,18908,-6478,-32768,21744,8157,-2430,32767,25746,11616,25554,-19446,27223,-26634,10782,-25821,20254,27455,-1072,-8249,-1596,-17925,-30634,-6948,-24001,237,2837,6681,-18659,23280,32767,6870,-10454,-15812,-9298,6105,3882,18108,24548,31882,-2408,-25284,-25935,-5951,6545,32767,-16505,2861,-3804,-9754,26695,32767,7914,14950,32767,-25703,19285,10919,-354,16022,2875,-1385,26704,-6905,9357,16585,25115,-18445,-256,7531,12635,-4595,-10215,14665,-32768,32541,-10686,-13735,14678,-32768,7334,-19094,28349,29021,31219,1603,18103,-7106,-5579,-6585,-12755,-13040,21526,-29794,27653,6094,2362,11334,-68,17584,-24066,13155,-16847,-24319,3304,-2666,-32768,20500,19079,32767,15409,-26406,6786,1016,8202,17211,-32768,-14423,-1330,32767,-32768,32767,-32768,8050,-19498,14618,7171,-18020,-4119,-3940,11735,4934,16897,-22464,21528,-321,-26151,-23368,-16181,-20384,15042,19481,22435,-17050,-19626,1358,29832,-23934,1732,10171,17911,-20484,9084,-26112,-1449,21497,14420,5484,21007,-25663,12454,-16318,25465,-15640,-32768,-10124,-14320,15886,-11546,24718,-20366,21287,11140,-6214,11722,32767,-32768,-5460,17326,-11257,17122,7621,-4644,27620,32767,-9429,-13945,-22971,-5759,-5354,-20957,-11897,-14911,24266,24850,-16029,12314,-7468,32767,13609,-23662,-28133,25682,-28338,-4352,29901,5878,-20826,-16263,-5534,-31694,20849,14607,-13630,-11424,-6827,14256,-20215,27263,-23059,-23850,-24159,27049,10383,21863,10051,1839,-86,18419,672,-6495,22163,-17875,-15471,-20022,12420,-17829,5920,-19432,-8406,-14990,-29955,-9651,-112,-12816,28895,23783,-10293,-17568,20262,-1299,15560,6227,9520,-27046,-28835,31026,-3357,20553,32767,19119,-10053,32767,17214,9579,-7045,18669,853,17967,-22337,13064,23975,-16201,13016,-3145,-28597,-12037,-30794,-29137,-5324,30622,31512,-17605,31315,-2944,-32768,26230,-27937,3008,16658,14077,-17645,4517,32767,-18047,23558,-22392,-10392,18158,4593,28998,-29386,-18458,17304,-4145,27239,-30224,-13596,7376,23577,14448,16713,-32768,-11805,3788,14742,-30135,32767,30874,-32164,-25903,-25729,-23905,-20540,-17518,-29487,-9723,-17115,26524,32767,9846,25405,-24705,12804,18823,-17786,21172,-30963,-21299,6791,12714,-9457,-21105,-4370,17983,-12634,-31019,10605,-21905,-27493,-32731,22718,3558,7561,-24135,19676,-20949,8351,829,32767,-23220,-32768,-13810,683,-13961,-25814,8479,11540,-18563,-10828,23816,-15411,-21917,-19569,23862,12583,3996,3058,-13460,4564,-13835,16915,17062,32767,30676,-4711,19459,30313,21996,28841,-18125,25573,29629,18849,-31209,19431,32767,-4807,26238,-9537,-23996,26899,-19602,7363,-26859,-17376,31215,-19793,-26420,-12704,-19757,-16288,-856,25976,-19607,15387,22846,8077,-11176,-22923,32676,-31512,28515,32767,30640,-25604,12465,-24900,8944,24699,-7049,3356,-4505,10775,-32768,32767,12514,-1963,32767,-8384,5287,-9218,13402,6603,-14571,-32768,11041,29479,1141,-17652,-13596,-24588,-23777,-6484,18501,-32768,26700,13784,-24156,-984,7487,-28345,-1430,24802,-17116,3187,-13559,28794,-12759,14975,8020,-19449,-12204,-2959,6947,3022,-32768,15144,-6173,26916,9366,-21617,20215,26950,19066,27761,-9974,-27376,-12208,-11767,25852,-6726,9058,21469,-24080,-10189,-25217,-28262,32767,-11763,-3485,-14894,18175,-13552,-7362,5652,-16860,8758,-1019,32767,8690,-6820,1726,15533,4694,19636,21720,24466,-4012,32767,-1602,15291,5052,-22612,2868,21634,31384,24417,32767,2084,11669,3258,-32768,6542,-4054,-22086,-3895,-32768,-4494,4163,-22674,-10775,-10410,-12943,28329,-12976,17705,12817,32767,25070,22352,21577,-32768,-25746,-27999,-17734,24797,20789,-24579,-31270,14367,-5505,-32768,16825,-30531,-176,-21576,29817,-24578,32767,-13523,23451,-7746,-4851,-21523,-20418,-32768,16163,-28250,-32768,-28546,4465,-4261,-3854,-32768,-21704,-30876,-6446,12097,28711,9604,4196,15920,-26177,24247,27454,-13860,7648,-16284,-28274,29813,9896,9861,-32768,-14513,7579,-6565,30333,32767,27093,32767,-28164,-4701,10266,15847,-30403,-26096,-4746,20143,17877,-16401,-6684,-32768,-30573,32767,17180,13624,16233,29912,-3772,32767,2156,13403,18121,-26074,2829,-11206,13652,8995,29767,-16646,-28624,31130,-23759,-30345,16272,20370,10031,3034,25689,-23132,-27348,-32768,29774,-9410,-18649,6298,23598,-14940,-1736,22259,13164,-32768,-21174,23625,19616,16805,21098,6304,21476,4853,17069,14592,-21538,-15427,18728,9214,12352,-29474,-7869,-26246,-12294,-24125,-17336,27440,1911,24396,6281,-17306,-27695,-15864,17029,6517,1265,-32768,-27519,-17845,32767,12444,-14819,3167,-30675,2621,257,32294,6749,-25864,15207,-26432,27309,-3639,6649,-18200,-411,-16747,-18620,-4760,-22192,13146,-5761,-25208,23968,-10173,-12135,-8493,30510,1594,-29627,-4112,19150,-32768,-16025,-9104,-7521,7873,-19496,21122,-13343,28793,-16105,-2143,18273,12835,-10412,-31589,2009,26466,-1311,32767,6778,-10265,14786,-19092,-31815,32548,-28140,-10814,-12089,-32768,-6823,32767,-26759,-23026,32767,32767,16362,-17812,20180,-14716,-9491,32767,-1489,28380,32767,-7631,-23596,1227,7680,12085,-29442,-30654,1300,-5959,-14981,-4519,-30015,-8741,-32768,-4298,20956,-21920,20093,-31697,8747,14748,-27424,-28121,-31864,31674,-30330,1746,-15619,6983,-861,28091,9595,15248,32767,-22205,4240,22956,-32417,19049,8930,22846,7138,21491,9465,31333,10068,11920,-4283,6814,-30055,-11290,-17921,12334,3803,-24438,-12294,-20790,32767,32767,-21639,32767,5089,-1652,-2946,20408,-19218,27538,21518,-22960,-11192,1124,12848,-12506,1600,-2747,2680,-32768,-19118,24182,-27893,-12442,-21287,21791,-12653,5781,-18583,4039,18,16687,19021,-20675,4254,7932,-1715,-22660,17745,-23316,-13992,4133,-1273,-3704,-10590,-32768,-1913,22258,3962,8444,-23512,1031,12695,28334,7831,19605,17206,-20843,25565,7019,-32768,32767,-32768,-21649,-7647,27213,-32768,-9593,-32768,-12625,16458,-32768,12533,-5021,18067,-32768,-1807,-3222,4845,-18138,18931,-25087,26287,6684,31757,-9206,-12049,-32355,18643,23181 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add11_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add11_input0_int16.csv new file mode 100644 index 0000000..3f69aca --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add11_input0_int16.csv @@ -0,0 +1 @@ +-484,-32614,23773,-27274,-18472,23033,16803,15579,5887,30543,-2955,-12771,-8321,-7915,23496,-30232,14021,21297,25501,-24779,-19240,5084,23774,25836,-11396,-10021,-16836,-2288,17969,12816,-11638,-26193,-31927,-13558,4042,29634,18893,1257,-4427,-3808,-3371,18837,-30132,-32343,-11185,25055,32166,-15676,-4178,20434,18434,16019,7268,30989,5401,-18444,-16722,-16537,-29333,27042,-21920,1884,9486,10163,-5681,-22872,-4724,11241,11413,-3922,-7026,-31828,-1806,-8490,31846,-21041,19422,-5022,32448,14568,30249,5278,-29460,19288,-25071,28721,-6835,484,13354,16711,-11422,-11731,-7324,14509,13163,6826,-29557,-2292,1466,-27904,-11556,27423,27560,-14207,-1294,-8301,25612,-10789,-12436,7412,-23517,6046,-15144,5810,31307,-13129,3191,-2116,-25350,-1859,-32570,-24966,17595,24378,12280,-13196,21594,-13379,-21678,22386,2916,22013,-5982,28647,-32357,-18680,-16578,2954,27081,-22872,-30056,-7448,24666,4554,2409,-22670,31193,-31921,25811,-31896,-9640,-24859,26479,-3372,11477,14584,14017,-6242,-3806,-29345,13982,-20674,-13828,-8001,-31081,-24296,7865,20901,-5791,-30876,-16589,14627,-3818,23981,-32676,18029,16873,29930,17899,-10108,-26731,12231,21926,5846,-1593,31616,-3664,-7619,12545,-22786,13807,2812,11936,-21105,18448,-19731,-10235,689,31617,-13564,-9178,-31640,8696,-4814,1977,2615,27829,24528,-32260,25958,7003,14151,911,-28524,-14290,-2056,-29921,-181,24407,-14485,14573,28867,-28917,-1048,-27051,-16714,-3859,18845,15554,-10495,-26019,27083,-11548,22771,15166,-29369,-13589,32281,23574,-9784,-22349,23449,-32475,23578,-15123,-15147,-13828,27219,-20215,-29021,-32487,-23224,27750,-17270,-16672,27495,-9928,11805,7653,-27224,-24498,-1196,-24677,-21910,-24551,-744,-20043,-15589,26884,-3343,-22231,-30024,19052,8148,-23760,17998,-25787,-13182,-12512,-23903,-21281,2793,29392,31063,-17630,-16774,-24401,14910,-32713,-25572,15124,-12358,-11480,-21191,-130,15438,11318,4212,-6651,-980,-20310,287,-23480,-14932,15755,15108,10890,10216,-29407,-4290,11525,-4566,27830,-10081,-10554,-1046,21866,13996,-8500,-21247,5845,30378,-3262,6190,4483,-6753,7537,-13242,-28760,-28504,10471,2079,7940,-11087,15131,11502,31075,-11452,10432,-11203,-15319,-24022,-31563,26037,-25416,25269,26647,-11867,-27105,5490,-29387,10926,18610,15941,-12214,-7727,6439,28484,-3402,-16907,20742,16169,17488,-29124,19986,13280,20174,-25453,-24808,-23125,31014,28248,-23352,-2352,-2221,11132,20754,-17742,-27331,5340,-18838,20555,16802,-20768,32561,20435,9821,-16007,17652,-13010,20499,19521,11637,1404,22944,27015,12109,3894,-23186,-16132,-14088,-30730,-28431,27827,31355,-25933,26018,-21823,24437,25591,18793,-12971,-8785,11589,-18614,13728,27947,15656,27907,-7513,19494,-26749,-30952,-30324,-11259,-29708,-3679,-12086,25783,17410,-4218,19286,-25389,16038,-32316,5740,23188,-26686,25110,-8237,-4195,-10962,-18747,14311,8542,-19600,5207,26288,-7091,-13900,16748,-32752,-1307,-2224,18991,-21568,-17115,-22358,-25979,31059,10227,2259,24019,14262,-31932,28695,-3136,23772,-9850,-7313,13829,-18609,-3812,-6649,31146,31367,-9684,-2765,19495,-2649,25879,32685,-5670,-5408,-9062,32323,-12685,-185,14709,3794,28758,13216,16780,-27146,-2751,31909,11639,-31523,19510,-29736,-1706,13148,-2761,-31526,31812,1212,-7680,-1494,-32443,-18223,-27174,-3310,7030,-31429,16082,-12338,9535,26581,118,-14027,-32589,20850,4912,-11790,26091,29917,23089,11552,10622,-16957,18966,-4871,-16734,4438,-30848,-20493,18253,-16470,13938,8715,13257,159,-24341,32244,760,32245,4540,20854,14578,-26885,-17301,-19502,32009,-18120,29671,-12937,-7679,4086,-21467,30464,-30371,8014,-22223,-16291,-11613,14863,18585,23364,-29558,16205,-32366,30711,5839,31651,23533,-10735,23585,-5703,763,-11966,28820,13052,13690,-20131,-30140,-24632,1301,3464,14799,-4809,19669,-3912,16665,11385,-12760,-12733,-32056,3047,-8577,-28087,-31236,21299,-2071,1719,-25920,31835,-21602,745,-29970,21274,9290,32188,-24085,1269,-20597,-14888,21791,-12126,3293,-4318,27979,-17975,23077,-1868,26976,22530,-23716,528,-6356,12273,31099,3174,23878,30574,-13134,-28186,8462,-31750,-31057,-32280,-14268,15268,19132,-32076,9381,704,-8482,23007,20706,23689,7070,22220,30090,-11335,26942,-9133,-8225,8776,2416,-23620,10356,-24179,-27502,11022,-18470,20234,-19483,30084,9958,31120,26476,-9590,-25686,6177,16497,-8794,-22066,-20192,-22098,182,9757,-7563,31682,15252,27708,-7801,-26910,-32292,-5384,-17079,21958,-4015,21908,-19878,24833,7040,-22986,7682,-26581,-30713,-11297,11937,-23195,-10356,-32354,2154,915,1667,17999,-32602,-24191,-1229,-31068,18090,22814,-28224,-30660,-1636,-17044,-12459,4051,10900,-13980,-10870,22204,-14287,-26330,14954,5114,12505,28260,-1985,3982,26787,-8961,30227,-10930,29953,7531,-20909,26841,19338,-21982,6445,25811,-26187,19540,28494,20992,-3252,32160,6690,-13033,21074,-3375,-11730,-27777,-12482,-11644,-664,-4562,21166,26554,3734,29992,27052,1102,19553,-17127,-12685,20463,-19990,16736,297,-21363,3693,-5595,-20553,-23604,-9148,-21784,23002,-541,19340,-15415,-28098,28094,13402,-30245,2850,32204,27582,-23321,5223,-22776,17383,-26426,21052,17409,13197,-4253,15968,6974,12304,8016,-27544,25022,-17584,-5107,29216,4125,5293,-23222,15987,17808,-25075,22212,-23098,-24946,-1847,-2793,10124,32554,-21104,1951,17081,18451,-17745,-26974,5742,-24259,-3183,12343,-27343,8200,-5514,-11502,18311,26146,-1746,30245,-19823,-2840,4485,3124,-19949,-25572,25788,-30230,-31627,-4452,1391,-15802,-15327,-1343,-4563,-1330,8593,11716,-28894,21496,-9786,-9361,14588,-14670,10398,-24532,6414,-32232,19807,-27350,7121,-30214,17541,4571,-401,-31942,-16394,279,-21809,16478,-15116,-19467,12266,-28672,-28430,-5899,-29719,18275,-7313,19411,-11689,-20619,-176,3820,8912,-31092,18226,-31383,19579,31416,-28447,-1599,6743,-10095,-2819,-2287,-15095,20060,-31150,-32490,13769,20236,-9981,-20467,-25208,6612,27826,-15209,560,21330,22723,25392,-31829,7897,-8873,-19705,18832,-75,13062,-22519,-3588,17303,13803,-28618,-6953,32644,-30954,-12534,-484,14911,-16561,2508,-616,-6852,-583,-31441,-6002,27491,9511,-18028,13016,-25613,-11840,5396,7875,1617,-27373,-28728,10822,-26065,23892,-29812,15581,25217,19805,25006,-8157,1639,1864,-21468,469,7751,26097,29132,-17552,6079,-17035,730,22011,-16660,-6978,-6302,3349,12193,11362,-13862,-18945,2110,15397,11537,16292,31025,14525,1015,-21175,29061,18519,4226,5212,-18917,-27617,-30072,-15646,-987,-28619,-22616,-18386,7438,11247,-7249,-3093,27658,-897,26751,207,27881,25774,-8855,21778,-28370,15066,13086,-14436,12483,26054,-15210,-7919,-12702,-12148,22900,-15152,10433,1303,23473,19210,-10813,21519,15933,-15841,852,-7026,-32392,7228,-30039,10088,-15690,-21547,13610,11110,24924,-16949,-30092,-19366,26908,18667,23130,-8896,-25665,22795,816,26895,-8993,30797,27914,1678,31077,3430,-23162,-9727,28258,-21198,-24771,21309,18831,-30452,-14132,22819,-31906,-32248,25120,-28701,785,-1217,-26938,248,9201,-6531,32134,26782,-20407,-21913,7712,29762,32698,13850,6727,4252,1129,-30206,-21369,1302,-11635,32607,-28466,-30650,-29265,-27113,-32446,19026,-21184,28649,-16758,16412,-4854,7731,-7026,-24169,-9314,6467,-16700,31685,-13023,-26677,9274,-1884,-14861,12459,-6757,-9281,25011,15708,-24132,-804,-2670,26696,17907,-32594,17000,20109,-18239,-21408,29835,-10982,-4409,-22351,-16872,-10748,-9007,-2991,15126,-5877,26769,-9570,29238,8391,19285,16590,5151,6477,-24863,-3940,14951,-210,24369,-7511,-31146,18077,14449,-3899,29971,19212,-14628,-10685,7958,26935,764,-13147,-21569,27327,26254,9475,18094,31853,-12754,-4359,-23541,12117,-9618,-31911,32135,16158,-7940,10186,14244,-17004,-17013,-16643,10665,403,10249,-23427,-18347,19866,31827,24231,14558,14229,24112,31302,22749,-24806,-32552,23177,-11610,8838,3383,-6496,10970,1969,15907,-5919,-20201,21322,1318,28642,-23534,26789,13407,6113,-10328,26551,1372,-32669,16386,-31696,-19377,-8512,-5754,-15051,-27392,-24142,7947,-15760,7551,5096,27568,-19861,28458,1353,17334,-25935,-30987,-3977,21364,-17308,11955,-28955,-16264,21694,-26777,13429,-13171,8215,-7908,18443,-32144,9668,-27625,9213,24546,7404,14899,-30486,25574,-14076,-12861,-20745,23425,4820,-18847,16949,19281,-14515,-17688,3132,24312,19016,26128,-27266,30418,14582,-17340,-30532,30535,23982,-30665,-8558,12916,-645,6010,-13878,22154,27675,30899,-6671,-31953,-7704,14130,8742,6533,-9133,6139,-13804,-11029,18387,8134,21172,26839,-8799,4532,-23484,-15931,9639,-16231,7913,-16816,14899,3388,-15483,-4005,-2452,-29670,30135,-20300,-853,22792,27340,24480,-3089,-12766,31284,14884,29921,28375,14003,13738,-10471,-32584,26770,-25748,-22085,-1666,31221,-31967,8706,24530,4278,-25640,20273,-19226,11070,-17906,-20209,15458,-28494,-12576,-10971,31189,-985,18026,3715,829,-23309,-25559,-29930,24435,-14156,-10885,-31128,-27249,6658,-25979,18556,11227,-30633,-32731,26688,923,22293,-4675,-20803,31000,-10687,31452,-2605,-3156,-12584,13492,-13558,9019,17197,-26539,-15212,11300,30194,-29359,8425,-8233,-15000,-8705,-7634,28691,20618,20026,14679,-18226,29959,2853,-14575,17142,7403,30630,27371,24367,-696,-26193,26265,15526,-513,-27168,-14237,-14319,7082,631,-31607,7600,-12840,-19007,19813,-13590,17998,29852,19416,-14543,20611,807,24222,26862,-16313,-3022,8417,-14402,27145,-17011,-4593,17425,6033,538,-28488,-5834,-1546,24647,-6284,6370,-31035,31795,-19196,-3235,27520,1070,1091,-16663,-30429,-8327,1703,-28284,-21848,-18086,23059,-12711,2262,-11141,-7658,7475,9479,-17635,-23604,-25927,-10936,11708,-12735,12363,10289,-31304,-17537,19788,-21923,-20172,19348,17825,3834,-26775,28693,-8046,-9781,-25400,-32610,11191,-5818,9985,-32365,-25346,-24371,-27535,28539,31935,-795,21179,12922,-26069,-11458,-31870,-18469,23382,9173,-582,30157,-27519,24892,-5584,4011,16839,-7926,6138,26345,30382,-29132,-1304,13224,9902,-18725,-5011,-14015,5745,-27715,-30563,27244,-11164,1283,16026,-6274,-14263,-26299,28613,-26905,-32065,21899,-21858,-12288,-26007,28021,12637,28553,-32510,-23329,-24419,-26178,20519,-11213,619,-12341,-30066,8642,-11186,17643,6004,2910,6364,27209,20346,20459,4439,13516,943,-9066,16890,-27996,16263,-5204,14020,-15234,17616,-28745,-4422,20489,-7358,1575,26758,-8973,15016,25275,-21097,-32090,-4525,1163,-28654,265,32457,-13436,8204,-13698,-24246,-16946,7900,-20851,24437,24070,7380,-5178,6747,23703,9417,-29861,-27661,-6211,-25114,-25731,23256,-1991,-1472,-3361,32335,-11653,28931,-7351,-14977,13869,-19378,15661,12799,-506,-30228,21584,9909,-6028,26126,32301,7387,18421,-16403,19692,-27611,15237,-30573,26886,28974,-6129,-5219,6752,-16313,-26992,-767,-14747,1536,-2018,11579,-18293,28049,29323,3518,-18058,-19685,-4681,10253,11317,13001,24752,24188,5757,-19419,-18490,2937,12723,25107,-18274,5803,-5101,-17535,26692,26072,5309,6290,31924,-26270,23672,10353,-696,18254,1100,-2210,16969,-11808,16986,8689,24107,-15344,-7608,1954,14658,-10249,-11148,6505,-25806,29893,-4793,-20138,9200,-27439,3616,-13901,21240,32421,26612,-2355,23758,-2053,-461,-9103,-6469,-11626,17041,-30654,23711,-1925,3806,16235,6362,15077,-24116,4210,-11881,-31711,1338,-8729,-25465,13839,24245,28348,21315,-24307,9134,4558,2180,12083,-28117,-9544,3530,29647,-26283,29124,-31710,15383,-24575,7695,42,-15114,-8741,2295,10268,5404,18754,-24662,18050,6431,-26157,-28690,-13700,-15951,22693,9907,13637,-21056,-23780,8453,32503,-14300,-3658,9906,22713,-11338,15933,-17102,1603,20337,16866,-1722,23154,-18587,9425,-14358,28014,-7533,-26684,-13772,-21998,13378,-15554,28841,-18321,13003,10676,-4262,8617,31718,-32628,-4705,9648,-4805,8856,7644,-7161,23089,27917,-6839,-9859,-18301,1144,-5386,-11756,-6686,-9544,20802,30790,-19086,3030,-7302,32116,8865,-21609,-25484,18256,-22056,3962,19788,-606,-21177,-10916,-10290,-22255,12913,21504,-6853,-16515,-2302,6845,-25924,23673,-21874,-18187,-26913,17962,11855,25411,7184,2973,-4033,10058,5670,-11979,24828,-25045,-21592,-14414,10311,-21846,12455,-24534,-1787,-6915,-30390,-2152,6534,-9570,27152,14893,-14337,-13239,26338,5117,20269,6789,13696,-17360,-28169,28829,-8783,17170,27130,19814,-1018,31821,7683,12394,-6055,17778,-4107,18733,-24031,15646,27295,-19048,16358,-447,-29167,-10644,-24337,-27281,-11543,30639,21796,-20629,29642,-3594,-30995,19053,-19059,3220,7662,21651,-25313,-4007,23628,-23737,15869,-20718,-16601,23527,12037,24024,-26535,-16356,9203,-12245,25269,-20201,-20041,10286,19258,13151,13869,-31535,-18962,-1346,6648,-27312,32335,28439,-22224,-26785,-23845,-23539,-11504,-24245,-22594,-4321,-20786,21730,32473,12018,28684,-23954,14661,18170,-16324,21982,-27866,-23621,12491,5578,-13928,-15977,-7586,14712,-16296,-22064,3154,-12247,-26387,-23707,25018,-3012,10394,-14519,19577,-11485,11391,2685,32710,-15002,-32446,-11353,7898,-13377,-31059,7994,9438,-12746,-14705,31190,-22925,-25793,-13620,15554,6095,999,-2781,-18050,10094,-8901,20040,10951,26353,23007,-982,24403,28969,13639,24021,-22112,31204,31120,25185,-24084,13728,24996,-12059,19044,-6303,-17840,20025,-22932,866,-32344,-8874,23026,-24048,-19612,-14980,-23707,-17552,4217,22597,-23767,7522,22938,8768,-7787,-28313,28370,-26195,26100,30030,28656,-19578,12591,-27446,9767,25877,-4300,4123,-5639,8080,-29054,29293,11068,2034,30754,-7141,-2866,-5245,8897,-274,-8698,-31239,7658,23863,-322,-13639,-21069,-15954,-16081,1721,23216,-29999,29779,16862,-21484,-5696,13629,-22664,-1181,29578,-22334,201,-17087,23377,-9887,8717,11515,-24588,-13720,2963,9377,11453,-32538,6731,-5392,27016,12717,-15748,22806,28660,9619,26367,-3555,-24404,-7507,-12561,21892,-1852,475,13240,-22699,-5095,-29344,-23124,29291,-19187,-3711,-21768,21704,-12187,-5803,-359,-7595,2274,5457,31653,15820,-7773,-2209,6139,3099,13758,17277,19753,3509,25399,-2418,18414,12618,-18237,9232,15683,28855,23314,28476,4672,14273,-4686,-28065,1258,-1489,-27250,-3866,-30308,-3880,11104,-28000,-13832,-15843,-18853,29586,-7073,15790,9465,25001,23333,22997,26188,-28729,-21506,-30049,-9985,22004,14103,-27519,-22140,20749,-2143,-26299,10901,-31256,-244,-16359,32010,-27808,28482,-17218,28841,-12255,-3864,-16249,-16422,-27884,21294,-32670,-31823,-29786,-3489,-5795,592,-31645,-26921,-29402,-8986,18229,27730,13967,12275,10078,-19556,27004,28383,-18243,7075,-9659,-31358,21716,12509,9753,-30829,-13538,11928,-7799,31132,26305,17364,30485,-19460,-2579,14759,15035,-22880,-22645,-10999,14113,21144,-17528,-11375,-30486,-31039,23919,8861,7606,21132,27877,2728,28998,2981,18714,22699,-24706,3503,-12213,13765,607,23823,-20627,-23456,22035,-21999,-31444,12683,25705,15893,1782,25397,-17406,-18567,-26468,31428,-13152,-24176,-987,25290,-9876,-7128,25040,6327,-27950,-15745,26563,13737,12477,24670,11020,19687,-2894,16369,9275,-13357,-16903,20238,16739,12032,-30173,-11373,-28555,-12742,-29497,-24408,29191,-3854,22669,13145,-18489,-27720,-22294,13963,9578,9637,-31584,-18771,-22969,24417,12676,-21305,567,-28339,1785,-5977,25358,12985,-32764,19581,-32191,21828,-5404,10689,-19125,3507,-17616,-20200,-1639,-23170,17130,2426,-29504,18568,-14377,-7244,-11080,24710,-2747,-26237,-11586,11070,-32649,-15315,-7869,-2413,12755,-26738,14121,-10499,18884,-19029,-1695,10338,12557,-5416,-31009,5631,29519,-2103,28356,3614,-6650,12634,-14137,-24466,29539,-23917,-18278,-19629,-31741,-2132,31288,-27848,-15303,32684,29131,8902,-23871,22060,-7394,-14077,27841,-9152,31519,31250,-15051,-24507,-3120,15668,4958,-27430,-27344,-2090,-1006,-11098,-3675,-22480,-3907,-32592,-1872,16430,-27649,23736,-22813,1919,22400,-30902,-21878,-29709,31567,-30446,6374,-22356,9846,-6521,24449,4322,13025,31915,-27003,1382,21048,-23129,18664,11546,16967,2578,19331,15311,29404,2834,14130,3069,438,-24441,-8245,-13013,17175,6082,-27698,-16574,-17796,31209,29903,-22414,28223,12121,-23,-1977,14991,-17854,23900,27892,-14802,-6009,6450,10197,-6425,-3793,-6359,1590,-22573,-15270,28307,-25685,-4086,-26845,12022,-17434,-1140,-20564,4844,2894,7906,23807,-25219,10387,14204,-9563,-20899,25570,-16466,-13381,-2491,5365,-1347,-12156,-29273,2616,12952,4637,4983,-26963,-5132,10157,18957,2646,19108,25026,-13514,26811,8775,-30928,31424,-31749,-11880,-3853,20127,-30159,-9306,-27955,-8466,11194,-32560,19703,-12917,20012,-30864,-1534,-2198,10806,-22315,15359,-27350,18750,73,30524,-7391,-17431,-31379,11693,29742 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add11_input1_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add11_input1_int16.csv new file mode 100644 index 0000000..b138545 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add11_input1_int16.csv @@ -0,0 +1 @@ +-29161,-11963,15858,-6445,-10460,29808,27147,-18808,-18763,-25186,-13867,-13876,7715,20382,-31731,31564,-12099,-27120,32555,21892,29259,24591,15312,8233,31213,20680,3703,-27082,-28114,15010,-13422,-5567,26256,-25812,-30583,15414,9313,6464,7092,31719,-19146,-16027,-21611,-22313,-32416,25951,5164,-25066,16945,25084,-1588,-17000,32421,17329,17737,30539,-5910,14342,4637,3563,31336,13068,-12982,-11661,9005,13764,-16393,-23968,17917,10041,-12437,11199,-13543,-30968,11694,27130,848,31128,-15041,-19429,-16521,24768,-20792,2418,8237,-21329,-15414,-22390,26936,-23249,-8062,-4797,27576,-4055,-9084,-21046,18317,-12446,21504,111,-23697,-13014,-28963,14044,24791,-28612,23923,31091,2340,-14634,8752,12771,-6388,-29339,-21885,-13895,-4401,-13842,25678,-17262,-18050,22491,-5326,-23129,-24420,-30768,6180,-27318,-6751,11958,2486,-20457,14491,7316,-12897,21997,3228,23860,1198,-11763,-6806,24327,-2661,6336,-392,-1132,29772,8320,26300,-27672,-22199,29346,28126,-10634,17566,10122,-8765,6204,18733,-16878,-2023,-5360,-22378,-17210,-4809,6892,18273,23694,2153,-22353,-6617,22320,12701,-25350,-13338,-27427,17351,31279,-5200,-246,4563,7429,13480,27623,-15256,634,16045,-31176,-20470,-8781,-215,-23775,16024,235,15512,-30816,-21507,16164,17264,-11675,-22372,28762,-5016,14209,-26648,-30670,-1814,-30507,-15060,257,24275,-9202,-28564,2620,-3804,-4318,31243,22844,13019,-19708,-914,-26727,29851,10247,7785,10470,-17191,-22685,-14922,30327,20384,-27496,21685,-11049,19801,-18207,-9402,9984,-23718,19384,-30990,2181,-21072,13196,7953,-31436,2088,-29168,-30199,25221,-6154,-18299,-9561,10612,11853,8369,-27186,27206,22846,-24487,-17171,-21303,-1960,-12145,-6409,15761,-608,12603,-11345,15204,22768,-28736,21665,29128,1963,-24334,1957,31419,27664,-1362,32157,17126,-17343,-5651,24004,25327,19458,-16161,2373,11929,30683,-1600,27172,15544,6526,16470,-11657,-6745,-3542,22823,9329,-23886,-21608,-15547,-20604,7493,10536,-4090,16540,-21624,23138,-9448,-24382,-26873,-14651,13012,-29867,24992,22066,-29674,308,31746,-8424,-13618,-24613,6246,-18025,-11269,29919,-18421,10241,30960,-10049,-17028,4569,-16401,13312,-28905,-20868,30992,-31757,-30363,-29733,7060,9093,6732,24165,26495,28087,-14080,18481,2600,26702,14420,26144,-3456,28548,-11737,-23557,-29796,-30066,-7940,12874,-21584,-3199,-13909,5539,27636,24292,-18670,18701,18152,-4471,26377,7673,-28360,13438,-25543,10575,-23312,-6883,-24806,1106,25351,-31820,26768,28366,28167,12298,21401,-28851,-23566,17296,-13251,-6522,23810,22556,-5849,-9239,-18951,-28534,-26386,-11973,-20293,18232,11998,-32382,-31395,2741,28406,17255,25114,-20141,-18015,26408,-3748,-19087,-2279,-12655,-15760,-5767,-12443,-18675,-4606,-13035,-24197,-23279,-32711,22713,5964,2042,-24826,-6542,-1933,-6456,26050,6861,24751,25371,13171,-31596,15974,-20164,-24499,-30327,9668,-5422,-186,29700,17867,-6208,3267,-11266,-21917,2375,11456,-3592,-30396,4271,24940,14998,31756,10475,31265,-13080,-2259,17792,-32271,13079,16140,-6346,-28566,-30923,-30392,19257,26073,-22518,9777,1382,27780,-5941,9970,3930,22498,-18231,-24664,19978,-3803,4984,-21997,-14404,-3470,22135,241,-29635,16139,-412,26718,17994,-14441,-1419,-16164,23331,-176,-19876,3553,-27992,-20673,-5874,30385,-8457,17786,29589,-21008,30530,2112,-19457,-30146,-13287,-11882,1690,15101,-18820,-23721,-15105,17472,23553,29965,3376,-1652,30936,6128,-12259,-32589,23946,-11742,23121,29137,21590,6573,14725,-14355,23638,-8970,-24144,-14646,-2466,18750,-6914,9017,23224,-7044,7153,-5775,-22322,20831,12190,2876,-20979,-12551,13424,-601,-23429,29399,27211,-18594,5259,-25849,-22740,16452,7954,-27476,9169,15120,28228,6664,30694,-32274,-4904,18395,-14128,23866,-21670,15024,16575,630,-13415,-15433,-28365,-11668,-17666,-6511,-10355,-27652,31782,5084,-2733,-23280,-15364,28238,-12170,20571,-20186,13034,-31259,18736,22289,9107,20110,-6234,-29304,4824,-19191,-26889,9050,5515,19101,-18438,-5734,-7867,-546,15908,12907,-12228,25046,-8651,-7794,-12500,19260,-30396,-28996,25122,-31994,-8534,19140,-29020,-19023,-28493,-22919,7615,-20295,30228,-4751,-2961,25419,-5019,-14406,25555,-28,32471,13319,-14019,-13948,28028,-19302,20362,-23013,-1951,-15301,-23159,-20220,-30982,13201,11366,-18311,2544,-30237,-33,-3889,28812,29601,-12337,2199,28577,-28107,28601,-3859,-24431,-18644,-16544,-21330,13307,-11679,-32308,630,27132,-24338,3564,-20517,-24703,21000,-2342,22116,30656,18208,22815,-8248,23089,31518,10992,31322,19089,-6913,23461,3817,23736,-504,22044,-5947,7583,-32116,919,-16825,-2740,-26752,-25852,9798,-3376,-32742,24593,9116,18044,7220,-7980,11087,-6185,18029,6078,8144,-14578,23096,-1896,27922,-7626,-21779,-24209,26347,-9318,-24208,13858,-23627,31022,-11479,-27040,31313,-14001,21311,18806,-19994,-26505,-12430,-16884,8507,-21274,29626,-12242,-30859,-11235,9803,26429,22540,10194,16978,8519,2699,19181,16594,8739,-17278,-4588,20799,6954,-2460,19802,-5243,110,15715,-31256,292,-15468,-20741,-3612,-1847,7104,-29505,25623,-11389,6120,-11837,-21545,-26459,13233,-8078,3869,-2010,-28047,-7503,-15645,-545,5456,-3853,21768,-10835,2722,-8351,-16466,-30593,-19260,20375,7258,20058,-31608,123,-3439,-2397,24917,27965,-32724,-4929,29101,-28351,13017,-21309,31712,-28081,-25541,19079,24448,-22068,-6772,17112,-9315,28679,31128,-17764,-24911,-22106,22591,28778,-17648,-15841,13953,14716,-17425,-13963,-11718,-22488,21632,-31555,-20125,-11131,-16483,-19931,-31118,-14973,22159,-8594,-20608,-16735,7547,-8514,-8767,20263,-3606,27603,24408,26172,-24096,-20718,-22803,11543,25038,-14832,-24715,26724,16817,-4702,12259,13517,-14934,15640,-5058,30339,10734,14094,30076,15465,-31747,-15790,-14475,8371,102,-20829,28946,13489,-29169,29612,16973,145,-32457,-25686,-9317,-23742,-8808,-22085,26524,30743,-10793,19945,11726,18882,-32331,-13874,32188,-29926,18655,-26218,-15688,-29060,-20999,29890,8252,-16545,9800,8194,-28686,262,28722,5651,28635,-29571,32062,2952,-22974,14952,350,-31234,10111,-14630,-25134,24516,-67,25244,-30682,20353,19799,-8081,3555,20576,-16568,10145,-24178,-8885,19962,18732,17516,-28627,-27000,-26330,27361,284,-6251,-19591,13037,8883,-12863,23909,-31521,-11147,-10993,28475,-1780,10649,-29216,27689,-677,-11823,2396,-1761,-31717,1324,10414,-19140,-19271,24009,28431,25238,-23361,-32055,24368,-13293,-25132,-13678,-18525,9408,21100,-22241,21294,-7495,13200,32036,-13192,27992,-8526,-28821,-26065,16306,13391,12476,-22074,-19990,-20914,-26839,-24033,-24898,-19777,-24419,4076,16025,-22513,-13252,-19549,-26800,26550,16749,-7454,27970,18079,29939,-19426,-4397,25967,13600,-8992,-12169,-10913,-32089,-28090,-30185,-29684,-4888,9131,-5726,-8288,-1829,-18285,-29042,-6865,-24503,18547,23751,-20499,29875,-10227,-18055,21509,-6593,30291,8208,3114,9850,-31853,17146,-25747,17094,2822,-27944,-2602,11626,18864,-6390,30718,-12997,-25807,-7392,32226,-16456,17063,10096,7908,8578,-2479,15756,-20218,14930,21755,9867,31475,-8223,-29812,27915,-12108,3127,8053,11115,20024,29037,17937,-10407,15760,-9967,20765,-3570,6586,-29671,-14001,-6378,-11117,-14407,188,-326,12541,-6802,-30234,-26408,-21490,24207,26424,-6059,15229,-9112,21024,-5830,2108,-5136,1785,31974,3334,-17078,-15697,10247,-5745,-9015,-19001,20791,4968,26583,14346,-21301,-5347,19858,15355,23385,26204,-8936,1025,22591,-30323,-18938,1647,6567,24145,16850,22179,-15652,-13547,15034,30982,-1863,-10494,-5864,7487,-31264,-18970,7172,-23623,-22140,-22596,19411,25080,-1594,-27640,-9363,-5087,20893,1752,-13536,-19078,29360,-20870,-28179,-3163,-19893,-5298,-8235,15142,-338,22031,-13314,26436,-22586,-15214,-31973,31439,-14868,-18085,-31348,6021,31774,-15392,-18413,10012,7617,-22799,1690,13359,10476,-15590,-12172,21321,12707,23711,-3268,1752,20099,-30050,-7704,2442,21247,4356,-27698,-17159,-17675,7801,-7996,-2801,707,7166,-25675,16291,-11558,457,25932,19845,23934,-778,29947,13893,2860,-29696,32092,-30849,-26554,8064,2283,-24352,20843,-17932,-14197,2926,-14554,-20275,-13602,9988,18488,21156,24241,25272,-4908,-4366,17866,29834,-4956,-8511,-8963,19879,13404,-23621,-17680,-3958,19228,-11241,-10127,18272,-20950,12411,-29710,4615,2864,-16554,-5565,17085,-15437,-1776,-7572,-10537,-24886,28531,28090,8554,-6129,-13104,28190,-15213,-32039,-30972,-7124,-11044,111,-21568,18683,13819,-17738,-22632,31221,-28783,-14779,-9820,-11725,14534,-25293,-5555,-2612,-18158,5721,-27333,31124,12555,-31892,28213,24142,-16503,21391,19921,-30869,-5811,-18563,-27828,-28413,31776,-29598,-21048,18178,31045,14931,-26670,30167,21566,32685,24499,16530,10749,-26625,-5015,-824,2749,28446,-6263,-21085,-25674,-728,13512,-29678,-11673,-9637,-898,29810,-8757,12222,25123,15216,-26185,-30135,11009,-19147,-27751,-16591,27252,-11236,30295,6132,-29497,-24425,23227,-22376,-25493,10805,19335,3352,-19737,16557,13936,-3088,-29744,-29432,-22378,-20029,-14023,-20777,-32047,-17139,4232,22556,28470,-10646,3759,82,18911,-7430,-25063,14689,-28154,14025,5749,31633,32031,-2614,32583,-14927,19313,8594,5706,-12438,-28330,-6574,15402,-3082,-3067,-29983,30699,-9509,22151,-32236,26420,25428,21077,2379,-23383,20350,-6015,-13346,24204,-10921,-6019,-4837,-24386,23526,-32495,20511,-28105,-32001,31454,-17546,-31321,-28415,29289,-4576,-29058,12626,21011,3881,20761,32337,-7539,-25994,-23,23826,23611,-17595,19537,8074,-31371,-24101,26100,5554,1085,29460,16889,16386,-6278,1326,-2908,-239,17710,-16455,31685,-7982,-21839,-2217,14601,-6106,13519,-32602,21344,25757,-29159,-26816,20422,25302,-25766,12680,25444,-17531,-1761,-29428,13390,-13080,-25854,-877,-17851,-22134,-15912,14455,-3056,2682,8814,23858,5011,15716,28548,-3401,-3726,5716,-14911,31674,30846,22348,31671,10241,8892,-17090,16992,-27578,23575,-28681,637,-7589,-26375,-17153,10703,13347,20789,14070,179,-14094,30037,4407,-3140,8988,-17961,-11036,22461,-15295,12311,4288,6520,-23296,-7628,6607,26143,-16284,-14692,2893,-11340,22958,-7111,-31107,-2755,-21216,-11893,-1935,3567,32376,25062,16246,32644,29171,-14625,-11286,14483,-7502,29550,-9706,7301,-10226,-18907,-31563,10105,-32411,-7313,-30414,-15341,11242,-19803,13306,-27125,11308,-10491,-13849,18802,-19422,16086,456,8806,-27214,-31053,-1032,22409,-20634,-28081,10479,16335,3948,-28449,18265,29586,-23950,-26266,-23507,-30818,-7226,-17373,-13775,5535,-15804,13838,-5389,-12347,16188,6122,27909,-6928,-11033,3016,-27458,-28148,-930,-6568,19113,21770,-20041,-17404,321,22791,-17088,-1553,15210,-23412,6730,-25755,19513,-21256,-4169,-3347,-8063,13956,27803,-29301,13778,22153,-7898,23345,8506,-18692,22541,-28592,-10694,19193,-9884,-31071,-2801,-8116,-21957,-30403,-4923,17723,-19612,2015,-22143,8107,11344,30463,17422,-15654,-16692,-28635,15897,-5223,23113,-30235,-17440,-23258,-32309,-24398,28129,9643,-11572,5562,31002,-4836,23856,8347,29815,6251,6797,-19984,143,1349,-11296,6146,3351,31721,19671,-30357,26648,-773,-8298,27663,19583,-9893,22075,5361,27991,-26873,4036,-20197,26549,17914,-18255,12634,-16040,21558,-18043,11636,14577,-24530,-17691,-18212,10655,-21297,-2944,12937,8643,9787,29018,-5853,-20469,-24141,6223,4559,31213,-15597,32185,6785,23260,-28734,21298,-22873,11891,-24986,-3090,-10054,-13489,21132,16136,-19372,-15709,-18017,7291,-30341,10645,-13465,-29009,22613,23350,25478,-7645,18112,-22705,3378,-2663,-10043,12336,9155,-25307,4772,24237,-6382,-12950,-31473,32426,28976,18145,19169,-26898,-15452,-31845,19932,-850,-21292,-30637,-27380,-29105,-11201,452,-11808,26073,-11879,-21922,9115,-4398,-14200,-27615,-18914,15543,31442,6535,17154,-19978,-3983,27252,-279,-6205,9536,22817,-29816,-1843,25697,-22192,27942,-1471,10299,12004,12970,-8018,-12815,-13372,-24887,1094,-30759,-17416,-17453,8607,-26828,14394,32638,732,23508,15351,-3413,-4843,23233,-18451,-30442,32559,23291,5101,-17132,18872,-29702,26026,-28561,-22981,21200,-15758,25251,25118,8534,-264,-16943,14734,29222,-7417,-17300,8943,-4593,14842,28062,-18897,21781,-14038,30180,25802,-17429,5667,18329,-25623,22696,-23337,-27610,7075,-26416,-24947,-9867,1299,29078,17060,-13073,-26503,-23866,-20514,-3243,-17418,-31475,2734,2617,20991,8977,20336,-6084,-32114,16951,32679,-12314,-2441,-45,18478,-6140,10420,-12073,-16826,13639,-14917,-9564,7335,-3048,-18662,-1680,24330,-5625,30777,14556,598,2975,-9831,22196,-28277,-1344,30768,-31008,32011,31202,28869,24653,24604,-2223,25213,-23467,-28798,13417,-5372,-4545,27288,31182,2455,-32164,26681,-12272,11942,2249,7648,-16758,29029,18599,27728,-5131,31291,3540,-31498,8018,-2406,2966,-30212,28453,-20539,-18526,16900,13191,12716,-9948,-16932,1664,-9303,-964,-2263,-6887,-6009,12590,-22645,24499,18512,-15429,12876,9020,16052,-28006,26065,-32301,837,-27954,-12767,24033,-12015,-31741,-3200,-31747,-12937,-7123,26343,-26655,-25734,-6721,-27228,341,24391,281,5800,-18479,16529,-32025,31025,18541,-18792,26875,22088,10531,21378,19686,-21603,-16022,-14810,19856,25722,23238,-13152,-22108,-457,27399,12869,18268,-25799,-10983,-27225,-21097,17894,30511,28114,22258,-10418,-18768,20936,16068,23069,25483,-28781,25092,19580,-20778,10858,18428,7708,-18903,7975,19187,26752,-4496,-4062,-10702,24411,10242,-14252,3892,27200,1888,-17986,-2737,14088,-4716,-8912,-9046,-3490,5079,8167,-30391,18107,3159,-14660,2300,-3147,29666,-13249,14489,24636,-19416,-15854,10703,15743,5287,-11870,30542,-27967,-24592,-29646,-21072,-10803,-16414,-14065,-5650,17878,-24434,-16194,-676,-22445,22711,10637,15715,15121,-8472,20789,-14584,22837,7913,-21708,-10390,-32222,1421,28852,-1812,-5265,-14289,-18122,-13406,-11319,32025,196,-22301,-6191,-15441,5120,10179,-17089,30598,27014,-813,-17285,20082,-14170,9809,30026,1483,28527,-16559,-2665,-4521,21553,-31742,22766,-24142,430,-28363,4817,14470,32469,5139,18514,12746,13260,-27523,26481,3357,-14509,-29340,-12327,-24426,18425,3802,-290,28829,-10099,-11903,29249,-23594,18660,-8898,23409,600,-9910,-1489,-26832,24124,13442,22299,24552,-9867,-19817,3977,10263,30490,1971,-6483,-21242,-21240,-11253,12787,-25888,5990,21341,15508,-28618,-26585,-11631,-19871,19199,8267,288,-15681,-13652,16599,14278,16335,-24506,18344,-2828,-15902,-11302,-27083,-22209,21735,1914,9844,29070,6536,-16001,-2819,23541,70,10712,-25233,-1530,-18135,-31113,19053,-20118,-14759,-8476,18982,764,-21928,16720,25001,-11612,-1386,-28018,-1028,-17714,5827,-8512,30837,31627,14646,-27582,-7116,-18741,171,-22738,-8224,24351,18992,-15521,7214,18835,-7338,7302,28761,28131,20133,-21352,2211,-23732,24419,-3492,-22385,-20487,-403,-3045,5819,-2905,29877,16921,17978,-14217,28513,-2296,9640,10526,-23739,-23843,4153,-3567,-17309,-28018,-32672,-11622,15765,24148,26221,-10643,-16310,20569,-14490,23292,-23417,-16549,-15327,18521,13206,-17251,-18861,2821,28221,-469,17323,-26820,8345,-9073,-29942,-1043,7980,14593,13439,3917,24561,29716,-11560,21308,2057,-26926,7588,5125,27035,8424,-12682,-31679,-15305,-27863,22491,29818,-3133,27056,9193,-3203,2663,23371,20190,-24653,30618,-19193,26435,15631,7292,-16383,6780,-14642,6305,9317,-10861,7704,-17354,-29709,20717,15931,17639,-16169,11260,16246,16017,-7353,28822,26875,1537,244,-2985,-17821,-19768,30744,22462,-8259,31993,13908,-1293,26490,-1286,-16878,3557,-13970,-16275,3214,26270,10656,-11714,5400,-15148,-21828,5391,-10752,30002,30520,-19185,-16383,16113,8952,-24831,30984,11398,25053,25997,-10728,-24833,18953,29344,29057,-16945,25100,29259,7707,16108,-31401,24578,-2211,-6868,12498,-17525,-11865,-2349,-22854,-16570,-15951,-8334,13197,25503,-17334,-27616,24062,-31425,18047,-18344,-2307,-5350,5945,-17704,28144,-12025,21417,8578,18064,5581,19771,22055,9967,3000,-29003,-2013,-11449,17937,15834,4210,-23681,1556,25347,-10467,-26839,22714,-15631,-9387,-15183,-20427,-9252,16686,18312,-7470,29426,24589,6843,11439,-27339,-5821,-3105,16642,-1632,8664,-27854,-26476,-17436,-20212,7625,-20571,19967,14066,3609,-32356,-10982,-19887,-3228,-29131,24744,32741,20257,24949,10817,-3756,-10807,29957,-21435,20826,-23810,-25001,29792,-2500,-32617,-21497,245,24134,-24704,-8183,7805,-25326,-16667,30917,-3254,11469,17234,22963,7228,30079,18056,-1694,-32500,-23748,-9325,-7873,-3061,10676,-13041,-32767,-12862,21676,-13948,665,-25394,-13329,16784,-3799,-29209,30572,-10587,-29601,-699,-3261,-23273,18986,9980,13056,23538,23621,-1135,-5147,22406,2209,22723,-28856 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add12.tflite b/tensorflow/lite/micro/integration_tests/seanet/add/add12.tflite new file mode 100644 index 0000000..a2cbfdf Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/add/add12.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add12_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add12_golden_int16.csv new file mode 100644 index 0000000..c35f390 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add12_golden_int16.csv @@ -0,0 +1 @@ +29566,4079,-7514,-10750,-4181,14216,7627,11710,-28895,-14001,-21726,12462,-28671,-4516,-9645,13251,18868,-3430,18400,2432,1307,-19355,-5526,6998,12220,-5805,-620,527,17869,18861,19963,-11960,16086,14994,-14295,9079,9697,12300,-540,-14883,2081,-14544,26896,-13344,-24386,19860,6674,-7773,9526,-9575,-9602,20289,7785,-13421,12248,-186,-15170,4580,-8873,19350,9493,-14076,-4036,25501,-5090,22702,-9088,3472,-9890,-3061,22293,-1247,25234,-9582,5889,-786,7659,28839,-24891,11640,-326,-6766,-5828,6330,8208,8267,8717,-28480,-6129,-15798,20198,2442,3955,20922,2705,4014,481,-10374,-8315,3460,29043,-3636,21866,-4070,5273,2511,-4577,-13118,2857,6182,4882,-1683,-6950,-7304,-4004,2961,-9925,1139,-25568,-3600,14754,11501,28226,3864,-24544,-1991,-14580,18072,14019,-699,2569,-8429,3320,7838,-23674,10373,-10229,14570,-9225,3489,8475,-8553,13908,-323,-21244,8718,25256,15186,5657,-12985,-11501,20429,-13178,-6280,-15740,-20232,8457,-30632,-4817,-11442,-24431,11390,-14148,-939,10827,2493,53,-12242,6450,6040,-14685,-12171,-24144,1739,5923,13585,13291,5311,3935,-7800,-9144,-1111,7285,13582,-16366,7225,9893,14033,-16738,1387,3406,15240,2828,12871,7137,5443,-12989,-9345,1879,-19343,3301,-13806,10072,-13290,11210,-2703,12975,11225,3750,-11148,9151,20461,-24022,18769,-5374,-14282,-28279,13934,10101,-24860,-16144,-15651,10405,15461,10823,-7597,-1739,-6003,-5942,-2089,-9043,-5785,-13741,6213,8857,18575,-8159,-25231,-20749,27745,-8618,9130,7360,10738,-5357,5787,7105,-13979,24472,-7960,3352,-2975,-24377,15969,-13728,-2273,3777,6273,-13392,17492,9259,-6899,12716,-28487,9506,-20944,-1993,2285,19811,-29744,-6513,-23769,2930,12598,-7852,4152,-7191,-12530,5544,-5203,-12802,8714,4299,6497,-11332,-4568,2659,-13394,-25646,9669,-12214,-17127,9841,23770,-28111,-19676,-1993,-7018,1699,13839,5941,10398,-16442,-24016,-21972,-17307,1092,-7595,-2443,-12174,11952,-21806,-9751,-9929,-655,5532,-17991,-6044,11137,-6270,8625,11224,-23540,-17871,19570,-6703,4945,-8851,15192,3512,10313,15637,27444,27965,-29667,16653,5249,-9439,-6152,-7027,8803,6303,-4773,1373,8473,2807,-8623,12967,25068,20262,-3066,-315,9403,29965,-6639,-686,-24476,-2713,-5389,-2314,-7500,14451,-13568,18343,-11861,27425,-4541,-21022,17482,13208,6616,12043,17246,-12762,1396,-17339,6467,-3866,2530,-1423,3667,-2328,-26271,8854,15020,2745,-29708,-4882,-26244,3557,2045,4772,-15845,-8264,-17768,23509,-1115,-4679,15579,19482,-9359,-10648,17395,10473,-13722,12034,8883,-24707,10542,3635,-8209,5145,-15587,-10205,-719,-5097,19644,-3517,-5258,11677,9550,-24876,6615,-7278,12986,7741,9068,-6280,12797,-16148,2622,-12235,-29066,-5168,-8166,11380,20167,-19363,-23770,19424,-9785,16916,-4259,-3345,14006,13441,-1897,-2788,-18783,2871,24337,-2487,-4911,-10344,1550,9988,1084,-1248,2560,-4932,-26580,21265,7242,-12174,3516,6722,-8256,-4164,1910,14981,-15017,14068,15890,4294,10910,-2417,6764,11813,17270,32761,-27898,14376,-15810,-7396,625,50,12993,6529,-19681,-2348,8313,-2372,-12894,27433,360,-2093,-1691,-3865,-11718,6521,-981,-8499,5170,-9798,-2483,-171,6572,-4545,1595,17460,-6509,-21776,-925,-19035,5482,-5674,15063,2727,4236,29137,10592,-11824,-13168,-12347,21027,-8956,-29179,-14548,-15168,21609,-19122,17328,-12298,3748,6663,26468,-17109,-6551,-24286,-24452,8110,-13787,7950,-18917,-22714,-13173,-2849,-2874,7433,5708,-22264,18047,-15529,16882,-18942,-9282,545,-10475,-26870,-15752,3405,16487,-3214,-2152,7678,-1372,-1782,-8965,-5582,-186,-13654,-3534,4405,25630,10264,16827,-3267,5497,-16875,9821,-22815,27825,3935,-3062,19855,2244,15323,-11088,-10004,26990,2701,-3957,-4153,-18006,-10769,-17395,-7657,-22689,-20161,-8102,20548,-13353,15687,-6429,10913,-27698,-4034,7990,8145,-26949,-2468,2978,7089,-3051,29952,9626,4548,21371,-9585,-6655,540,8370,-15713,14755,-5600,-6362,15023,14849,-11527,-13487,1757,-22208,19779,6585,-14517,-10979,15324,18774,18527,-5580,2066,8933,-18304,-3456,-13340,-17053,-1859,-8674,5171,1392,16879,-23246,18609,18351,20220,-15927,-27725,-8310,-19154,21335,-5875,30094,10973,-14089,15666,20773,26935,-24065,3065,7061,6654,-27865,19050,25,-17954,-11646,-11413,-24774,17914,21069,20897,-2466,29021,-6976,-26985,-13697,-1368,-10237,22945,12192,-3540,4739,7772,4254,-13104,-218,1396,-17347,-18942,3192,-9768,-11451,-5297,-9423,12933,-18836,-10627,-26864,3133,-5663,-3580,-17571,-17837,-7923,10579,598,4346,634,-26506,-18294,-3791,-19988,28372,24816,10852,19958,704,-9458,869,-12321,-9289,-22571,11493,7695,-13233,-7023,15499,-21164,25718,-19888,12232,-1644,-10096,-16120,4594,9623,15365,8864,19435,1080,-14235,-5884,-6378,3615,-22419,15843,4518,-16809,8234,9554,-13350,12670,6714,984,1002,9456,4489,12865,6475,-11429,-8087,-22378,11194,-4335,-8393,627,-2095,22708,19732,-12976,-5195,-23509,-192,-202,-1955,5054,-5063,-2363,-2830,20338,17163,11571,5986,-25313,-290,18674,673,414,3904,11172,-21127,-7734,5461,20377,-19013,19693,2563,-4871,2036,23700,-27310,12419,16548,-14490,143,13743,7180,16065,24469,-8494,7830,1688,1469,17240,5926,-18245,21659,-4765,-8993,16488,2902,-23291,25431,-23465,8920,10135,-10627,23480,-2514,29537,11993,3985,-6219,-4975,-12274,20507,18737,12385,20704,14023,18278,-5142,-14528,26442,4758,2354,-18903,2620,263,-6838,24387,-19670,-1761,8007,-6043,6549,3677,19922,1366,-5953,3773,-20078,-13236,-10185,-13992,23128,16397,-19528,15258,-6845,-10815,-12431,-4117,-12630,-23674,-4606,-14878,-22979,-15402,-5472,-9620,2428,-2313,-7038,-22766,7034,-2289,18481,-2912,-14068,-25967,-15895,-9122,-18770,15049,-5170,-5994,-3602,21076,3000,6532,8574,7270,-7961,12300,14459,-12679,-66,-10904,7963,14712,3475,-7701,-14313,12327,-5924,20034,-9207,-10116,15603,-18285,21587,-9701,9334,21075,31448,2797,-21517,-6695,12391,18078,-20006,7970,-2806,4587,-5339,2040,2615,5247,-13894,-5456,-22731,16229,-5660,1305,-16437,2890,-5056,5873,7301,-25478,-20386,-13034,-1757,14308,8516,-18559,29361,-9505,-3482,2461,16154,10094,-11751,11518,3643,-21340,28405,8590,-12126,-12225,-3178,-18748,-29466,-422,4677,9093,-26891,3206,-6308,-6929,19920,11482,-7214,17674,-24902,12398,-7604,-17856,-20992,-6735,-10319,-1506,6310,-3197,-6649,8617,-18801,-6371,-16299,-12093,-1720,-1556,-8058,-22612,-9472,-4257,-2636,-16399,19,17031,1070,-6043,25652,7474,-17030,15900,-1582,-8303,2223,20301,8614,5908,27020,-14722,5098,2876,32381,11846,7618,7091,-580,8789,-10580,-2075,-1975,-16793,4247,-15790,23269,-7166,-8833,3849,14403,-15260,-23897,-19971,4906,12473,10694,-8432,-23953,-13691,-10684,-23346,19968,-1061,-61,887,20715,4417,3472,-9403,9565,12317,9479,1444,204,-576,-16539,-14827,-11009,796,-7359,4700,10917,-4795,-594,337,-6913,-18110,-2637,5145,1720,-984,16005,-10789,14295,-13182,15439,13284,31980,19916,11180,-2525,710,17756,15892,-1990,-19051,10757,16693,-10585,13867,-6813,-8734,-8211,18910,-508,-11445,-6791,7803,1165,10816,1968,-10278,23170,-16965,22603,-4441,26409,5147,-313,-1141,2831,12807,-7731,-22480,-9662,7214,12420,-866,3721,17662,-24628,-7048,-1576,-6678,-13053,-4868,4694,1276,5245,1386,17220,6768,-11512,-4753,28810,-6450,-973,-1650,12295,30032,25076,13984,13290,12487,5327,11281,6390,-19297,-460,-21884,-23512,5227,13665,-2048,-9241,-1497,-26357,22182,-18931,7696,2831,8469,5492,26258,15593,-5477,-6521,5991,-13095,21476,-14420,-5755,-9482,16424,25047,14041,9932,-26391,-15483,-983,11444,26063,-5696,-533,20844,-12214,29003,-10219,-17446,-24660,-11086,-13197,-9356,19597,19579,14202,9875,-28884,-2749,-6296,-1499,5106,-11341,-4500,-8584,1563,-1094,-4048,14703,-25407,-18203,-18012,8960,841,13240,-17715,13938,-9870,-3387,-19636,-5274,-8914,4170,-19416,5125,-9357,-14698,-12777,-5010,870,6431,21033,-9446,15317,8312,3368,-21808,-29920,5009,-5912,3094,-1983,-1062,11071,14146,24864,-22618,12003,22724,5718,14333,-2897,8463,-10629,24537,2778,-9340,26465,-23653,17733,-10096,-18349,-4858,-27824,19336,11329,510,7597,-5640,1596,-5810,-15336,11164,7567,-11094,-8629,397,-7760,-7982,-7181,10852,22393,-30351,14667,16456,-3058,3098,-15823,-15328,16962,-1633,7631,-10303,-7388,-5023,-24412,26438,-14791,-25239,2931,21539,-17251,-10634,-21232,4041,3797,-14647,-18336,5429,-19051,375,24608,-17354,-15479,-2997,-5955,-9252,22905,16853,5050,-20557,6531,4114,-9028,3544,-12100,-20344,-15511,-15930,21488,2163,-27189,3014,22486,-3296,7786,7737,-9620,-27413,27287,11323,-2651,-16454,-16225,-10534,6056,6125,1110,-17567,3113,-15282,13674,13211,9445,-2771,17773,4801,-3110,-5674,-29166,-8513,-7072,-23020,10263,-29450,-19740,-19376,19316,-15475,-19744,-19426,-13078,7893,29339,-6906,-22797,-1452,4787,2013,-28742,-25547,-14772,-14223,-21556,5651,30122,18901,-9472,16816,-12877,-8050,11538,9879,-25735,-1041,2487,15699,20824,8255,-23717,-20262,10126,6215,-657,3323,-3616,5654,4,-7679,12321,-3441,-11687,-10636,367,-19695,-4366,-12861,6322,-3429,-10068,-6954,3588,-23407,1145,11405,8711,-4418,26922,-7340,-24972,8013,-4451,-3456,11114,67,904,626,15808,10858,-10761,3754,-10575,14485,-3732,9528,-3475,-20713,-12385,-18211,-5527,9535,7722,4912,21496,-3509,12281,15699,27334,13933,24162,2648,6970,-3252,16692,-1834,-11822,21947,-10886,-18808,-10273,14144,-13800,13768,-7937,11109,15315,5724,-2403,5928,13517,-15699,16328,-3414,-16332,-21393,-4163,20995,-15077,-7195,-21975,-4969,7339,3287,9790,-15463,12926,1508,-778,73,-24501,-15657,13836,-8868,9244,-2190,2551,-16064,3801,13099,-5723,7482,30085,-10876,1059,-20782,-8954,10017,-19764,14945,12006,-15646,-1558,19592,5917,6045,4337,7738,-1662,-1710,-6123,-20086,-12815,2287,-6942,-14555,-2505,-6952,-19726,9644,14775,5979,-11875,6129,10308,-1386,4591,2853,-6727,-10578,5506,11643,19621,-22612,-20203,15391,21351,14329,19777,-7852,7993,-263,-15120,-9274,-25979,-10482,-23318,-11007,8002,-18909,-28765,1590,-8073,24524,-21370,-29425,-293,16321,-11685,11202,-20419,-12674,5177,-9942,-8804,29154,18917,10740,-29465,-16038,20786,-13792,10104,-2760,-22754,8393,-9360,14882,-1090,979,31423,-18991,-6101,25472,1109,-6045,14552,20173,1732,-16217,25802,-30850,6257,12400,7666,-8043,29055,5458,3466,21696,3909,-1948,28461,-9059,4083,14628,-18847,394,-1037,-1839,25103,-16742,-17049,23507,-13113,18089,-11664,13650,75,-13941,-1324,9773,-2272,-13528,-29107,-4003,24604,11329,-24253,-2242,7492,7341,-6639,7173,-10990,-10151,6279,-11903,-5680,10315,-20768,11433,-11274,4928,15527,30156,21284,-16577,15187,-21462,-17845,952,-3675,16727,7687,23502,-4834,4260,8719,12869,12416,6176,-8076,-11890,7271,-2965,-5983,-5575,4355,4864,-4725,15821,-18854,-23221,-8713,-23945,-11868,19711,111,31687,-2823,-16877,-10430,-8966,-29864,19,18349,-7260,-19947,3232,-16693,-10902,17146,18580,9271,-18319,-24598,-7710,9111,4587,-1167,21611,12311,-5810,12794,-963,-31358,-11437,-3105,-23391,-23128,24523,20659,17243,12644,15107,764,21094,2123,2484,9758,21396,4778,29463,265,-5729,15682,-22525,10106,-7539,7582,6869,-14645,9596,14837,15318,-4702,11190,7401,8032,-26831,2239,20322,-7301,-4401,-1851,1536,-23451,-9781,-17143,19325,15651,-4839,-10573,-4069,22027,-13560,6887,-1146,18339,10699,12892,20226,-2800,-13156,-7016,6386,15543,17101,-12787,-6651,-5210,-25315,-2287,5821,16203,-4082,30779,3516,-11434,28258,11968,-9911,22508,2956,20407,18918,-26026,13222,16974,-19168,14531,-3653,-13483,3904,285,-12234,-8358,-9834,-13306,15535,-30595,-21093,-4376,-27386,1261,-16804,-30736,-10903,-4354,22463,-7243,-21911,-13105,-10609,-6558,1316,-32752,-18759,20713,8776,8008,3991,7248,-21125,-20523,-22526,-25361,-21435,4260,-11227,8876,-10801,12270,17249,-21013,-10760,-10649,-17447,21538,15018,3970,18113,5231,-10948,-1498,-2943,-17498,6875,-13578,11532,-206,-26330,3531,6864,4581,5464,7638,10314,460,1418,-23528,-17794,4176,-4664,-6109,7184,-4971,-3333,-12252,7724,-9595,-4002,-32534,2171,6543,-16260,388,-804,16754,10632,-20113,-1176,-31544,-3090,4746,1644,-453,-15154,-24246,-14461,4754,4192,-12753,21254,8765,-82,-14109,-16689,12697,-18494,17883,-17938,-3805,2068,10212,8390,-5611,18077,8261,-5293,5574,-8460,22278,10011,-8622,-12102,20930,-17451,-3027,1107,-4837,2238,-16497,28087,-13316,-16506,25827,-23138,1713,2549,17818,-831,-11660,7867,23666,-10412,-10298,-9493,27926,4793,-2985,29934,-7173,26053,16926,-12653,9331,-12337,17300,10583,-2896,1317,-15846,-5646,14587,4548,-18066,7884,8087,10882,9097,10928,14807,-10994,24252,8778,20954,-5828,3585,-1799,12466,19584,2530,11981,-10524,13165,-22708,-26216,2900,-11699,3222,549,2544,-7952,-24062,16101,-7909,6599,-7167,-6562,-129,-13450,18199,15408,-31073,-15218,-904,4248,-4174,10858,15447,-11612,13979,-6412,-3222,-13789,14851,-29721,7932,16152,-21919,-3211,-19999,7100,6155,10137,-3845,1753,1984,-22262,-1155,-5068,-4993,-9160,2205,11933,23121,15018,23066,-17295,25637,-24464,27997,20814,3116,-2807,13359,9802,-7964,3084,-5883,-16332,15044,-14592,10193,8590,-166,10025,-6565,18904,-10795,5602,-5533,-9987,-11759,7378,11077,-3649,24237,12880,12389,24390,7113,6993,-1825,-3893,14685,-14335,2423,-16608,13026,-12626,-8229,-9557,-10697,54,-25390,16317,3111,-9316,2232,29090,25383,6003,-9482,-7457,10235,3489,4502,-22364,21374,295,-9980,-21750,-7545,-12379,-24052,-10325,2912,18630,8840,7501,-9014,-2821,-8435,-4379,2866,7921,7011,-21460,-27590,-17694,4084,-1014,6724,9730,-9653,7126,-4764,-22436,-21238,-534,-18504,20224,-22703,2244,-19085,16060,-7038,21455,5573,-5185,-17970,-13246,-1147,3287,-20345,-15428,-6958,-2968,-14469,1074,9937,-16894,-31375,10531,6455,-12519,-6150,10944,20341,19705,-22471,-15351,17512,-19678,28252,-12077,980,-1035,12354,2035,1003,-3852,-3637,4735,16298,-23683,-21962,-10478,-13588,6709,-18382,-13383,-28465,-240,18657,-5626,-15765,9964,-18771,682,-14250,-54,5736,26289,-13894,22146,-11711,-12492,16089,-4790,16047,-8453,-6394,15650,14410,3173,2444,3526,-11820,14276,4083,-9015,-15300,21172,11056,-14445,-1802,-9062,7612,7122,-26929,-12835,22776,4008,-7898,465,-1861,802,-11276,4648,25812,-8766,-14387,8980,16235,1653,-8888,2526,-8980,1896,-2929,-22771,21118,-2660,6102,-29734,-3178,6993,4510,6229,23194,-5038,10985,13770,-3922,6647,-6483,-11032,-9850,-5531,7964,-3954,-17810,-1268,17435,-23829,-15133,162,-3657,5010,3778,-3503,-12581,11082,9213,-29492,6255,-2688,-2510,-13120,-468,-21474,-25524,-9183,14915,1902,2482,17549,-3217,-7249,5832,-24555,17439,-17524,6423,-9705,-1204,18751,-27283,-3788,-28438,-4324,3692,10200,15645,945,-13911,-1796,-22090,-27940,-19266,17625,711,11263,-3094,-67,-1508,5756,-2342,-16439,20862,-3255,-4823,-1090,-3600,-16973,19518,9957,-16991,-15928,-15500,9357,11872,-2425,-12125,13821,10613,-5768,11737,28933,546,3246,-8198,11074,15178,-1305,17214,8933,-14460,-3377,19561,89,8440,-2175,8334,-4726,892,-18160,7272,-21318,-19286,5983,10018,11665,-30056,6910,-9665,-13701,14285,-2933,24885,-24005,13486,23944,-9762,3048,17755,3553,14569,-26569,-841,2595,-19577,-1040,-13844,958,-13111,-5663,-1600,7274,4975,-19413,-7902,4896,11882,26676,-10071,127,-4851,20940,29240,-24711,-794,5884,2358,-15870,-16435,-8907,-10375,-17226,-7344,-5866,-8062,-29407,-27538,-2592,13778,3280,22977,6798,-9506,6210,19855,-29704,-8169,17498,7545,16738,28649,-21644,-10670,-1878,-15254,-8517,13018,-6797,-13347,-1944,-14169,18067,16932,4934,-8090,25362,835,13601,2463,-10469,-17380,13980,10861,-2287,20184,3418,-18010,-21393,14258,3092,21853,21624,-10158,-14650,6074,-11306,358,8586,13899,-269,11376,-12619,6878,26611,7482,-9327,9215,-5487,4612,7372,-15447,22371,-15120,-25421,-23796,-2011,-10475,6664,6443,24308,-15578,-17235,-8623,9670,-14265,23988,6437,-13457,-5853,9142,-10589,-2318,-29606,-11998,-24063,1002,-27312,-10754,-14216,-19481,448,-23245,742,-7751,9495,-2323,-13954,23566,-13127,18728,1776,-7301,-14905,8112,15408,-9707 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add12_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add12_input0_int16.csv new file mode 100644 index 0000000..c754b79 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add12_input0_int16.csv @@ -0,0 +1 @@ +30636,2071,2082,-29729,7719,18246,-8387,21808,-30562,-14162,-27838,32068,-27699,-5128,-19869,14816,22293,9582,29808,3760,-5816,-24187,-32218,31695,29889,509,-19253,14654,11406,29037,26770,-692,25706,3477,-5472,-4650,13496,3576,-11518,-21797,-6965,-13685,27605,-21505,-32715,27514,-4800,-19073,-5506,-1686,-12273,11221,10341,-25066,8817,-13315,-8849,-11293,8569,23692,-1278,-4147,3762,27861,7811,26554,-19369,4535,-11551,-26826,29892,-19862,24921,-14975,25619,-66,4410,30562,-28098,7054,-1522,-27139,-1994,-9741,31932,21785,-9057,-30622,-7901,-8218,21130,12609,-3058,23253,12363,13008,-10896,849,-22328,4420,31052,6339,26892,-17619,32513,-14463,-11392,-23072,20789,-4578,-771,-18358,4735,-14713,-13074,4277,-4973,20148,-25702,-8342,32178,12973,26407,-10005,-32409,14381,-27529,27324,30186,1314,-10843,-16630,13439,22889,-25644,22493,1094,12547,-27813,-9642,2869,-24392,27841,-3837,-16245,9109,32479,19030,17585,-27658,-28795,21539,-27929,-5315,-26147,-28429,20301,-30600,-21648,-12421,-22870,14302,-7842,-24333,4468,772,22219,1423,14325,21209,-30820,-20406,-25220,5866,24668,21227,4077,20667,13132,5805,-30804,21187,9005,6631,-6785,-4987,30908,18385,-17060,1678,29084,11882,-3277,28512,8268,17996,-25988,-4502,11285,-8760,24202,-15006,22699,-30749,31827,-18381,13075,22605,-6922,-17842,10326,28432,-20116,14338,-28642,-29785,-27619,13347,20574,-20717,-24871,-23328,10201,27995,28196,7088,-25971,-21706,-8255,-23084,2893,-3038,-172,21319,16176,15296,4327,-32536,-16623,27218,-29890,-5184,4428,-973,-15217,28132,514,-16508,31347,-24376,20912,-27463,-22520,3756,-8202,-435,4290,25438,-3796,19092,10078,-13059,29849,-25736,12117,-25976,-13979,11673,16029,-27381,10793,-18856,24236,6893,8692,9397,-9734,-19863,2953,7543,-27996,8202,28455,4915,-6592,11100,-8946,-31526,-20251,32308,-26465,-25394,-4276,26770,-31172,-12127,-14253,-2444,18076,9474,21672,22932,-31386,-16616,-14843,-14986,2567,-29929,9131,-20219,18595,-26738,-23366,-24288,-484,18755,-7325,-2386,11762,-923,7524,22865,-19048,-29826,29884,-25124,21178,-20190,16716,5032,27093,30941,24445,24604,-30576,13415,31246,-24595,13517,-32592,-8550,25782,-29830,-3867,4237,-2728,-22651,23328,32424,27764,-23025,-284,5478,28041,2077,-9283,-25273,-19340,-20779,212,-25700,9600,-24425,16467,-27815,26305,10531,-22884,14342,31846,1307,4103,18704,-1158,5402,-15192,12097,8116,-9128,-6087,-15210,6801,-27831,24931,30508,25711,-29053,11863,-23769,-5693,13053,2926,-23899,3822,-16142,24989,3074,-20149,17254,24347,1370,-7434,7957,27069,-9159,13981,21985,-24320,13490,-15460,454,17049,-5275,-2089,-21061,845,25028,17918,-31355,8396,-5838,-27964,27424,-19395,4933,-1804,22035,-24798,27479,-5686,-11066,-7733,-25842,10237,-25293,26455,13910,-14578,-23608,16946,-20340,13660,2030,-25530,8067,6801,-3374,-13731,-22409,12846,22056,-24905,-5407,-22845,-16623,-2879,-11162,-25187,7735,-7262,-20895,26457,23438,-16872,6359,27201,7526,-6210,25880,23996,-18920,32626,27955,-4505,18220,-20220,4322,28632,25386,32247,-32274,29395,-22025,-20236,11964,1442,24394,-5897,-23412,16426,24349,9321,-14467,28329,-10560,-4522,-15809,8650,-32053,-4444,170,-23690,-14789,-13328,11052,10061,18457,11613,22139,22857,1185,-32385,-17460,-10057,19620,-30155,9792,-3038,23317,27460,20614,-17122,-16957,-7444,12028,5141,-27681,-25134,-3545,14618,-22646,12161,-14708,-12266,-2900,32763,-27736,-12081,-27305,-31345,22559,-16939,30840,-18376,-19650,-12152,11161,-864,5775,26105,-25087,28010,-2732,30478,-24503,6145,18698,147,-28021,-27190,25681,28717,-13769,-12361,27719,14907,4925,-4192,-24885,-13146,205,-20985,-14253,29015,21282,27006,-16738,23321,-8903,23579,-28538,23581,-8372,-4404,14888,2534,27872,-30411,-3974,30681,-3645,14096,122,-16214,-12915,-15783,-26025,-25995,-29823,-17509,25806,82,8942,7685,6388,-32272,5257,3517,16983,-29000,-19606,-15958,10638,15570,28638,13371,-10206,12518,4457,4602,12713,-2382,-30971,10185,10000,-6326,11010,11525,-3702,-30141,13996,-22593,27801,9346,-28842,-22233,28263,19018,17379,-19345,19260,5293,-21985,-20705,-8389,-23148,2275,-29959,25051,-15268,31958,-16195,16157,31751,21468,-9487,-27862,-20421,-27983,31187,-19113,31362,14833,-10513,19081,30343,22775,-26352,5638,-3868,14034,-29665,16060,-20492,-22516,-31142,1638,-25294,11555,29610,18334,-12667,29567,-13353,-30939,-27457,-21470,-9582,14981,25297,-24500,15686,16204,2461,-26570,21140,-21238,-18049,-21295,9973,1316,3640,-15703,-6928,7620,-27161,-6387,-26052,16465,13153,5596,-17484,-25824,-14227,12969,3591,-14766,-17493,-31814,-10575,9278,-12883,29384,29313,2646,24674,-8147,-18370,-3891,-11603,-32544,-21977,-1729,19483,-26032,8691,13010,-32531,22432,-23317,7663,8256,98,-31661,30053,18356,24071,16041,20849,-8320,-30968,-19739,9622,5363,-20375,6528,-1445,-26468,19959,-6955,-16790,14333,-8955,-11108,-7947,14443,-9654,23287,14001,-11585,-24346,-25508,3608,16454,-11336,7911,-4737,30619,11510,-23479,-28909,-27701,-9918,-2979,-26825,14490,2222,-7808,-14042,11620,25287,-3770,25239,-23666,-22443,18791,16438,10186,-10299,9857,-19202,-15255,484,19289,-17168,22263,16832,-16721,18507,20957,-26132,4613,28298,-8737,-4457,12136,22489,8178,23979,620,25636,-5554,4398,32133,6207,-22079,28636,-14567,-25499,16209,-8612,-27258,19441,-16193,25425,-4789,-3194,27582,7311,31079,6762,-16235,-23236,-26066,-14056,23189,19062,29331,19419,25572,24260,-2609,-3132,26977,22844,14994,-26760,23622,4753,-15025,27727,-10377,-7493,21819,263,14306,19342,13043,8714,-25632,29988,-27472,-17811,-23091,-14900,22571,27939,-23368,32671,6485,-3286,-28272,-16168,-26079,-17594,-28270,-6038,-32168,-28796,-10124,-25802,22160,13747,-17,-30484,1171,-26131,13876,12947,-7223,-23998,-22891,2266,-29034,15557,4046,2824,16281,15528,677,9808,5034,-10071,-22561,5926,11996,-12229,7447,-16977,19032,8116,-11986,2812,-28422,-319,-14009,20481,942,-28862,11387,-12364,23166,1897,11623,19637,32534,-9920,-20555,-17613,8031,31042,-14470,32506,-19213,20437,-1053,8740,19044,12166,-9618,-13956,-22667,11496,-1205,-19165,-14214,18342,-13704,27502,29092,-30853,-31043,-31504,625,6874,2239,-19833,26849,-23912,-18140,16881,15198,-5982,-1220,26754,29498,-20199,25257,14658,-27849,-5297,-24022,-14814,-27918,-19772,31162,25751,-21824,28438,-24907,9035,15134,30143,-6850,29159,-21500,7727,-22336,-21409,-27619,9148,-23122,-893,1699,-26538,7983,7283,-27664,-15149,-5843,-15543,-542,-9285,-7812,-26897,1298,-9492,7985,-27193,-19807,29948,16191,-18006,22290,29861,-5612,13959,-12851,-8822,8961,20172,24286,16515,25693,-12406,1037,-4651,32663,31673,32072,7888,-2093,10677,-3352,-14438,1219,-32035,-10701,-10334,24245,-21353,-6154,5752,17821,-11672,-17604,-24076,18604,-1312,24615,-10978,-19926,315,5163,-21750,17630,-4675,20147,19629,13572,-16070,17992,-4564,29338,1863,-4010,-8121,6042,15257,-22347,-22838,-8808,4783,290,-2845,17656,-5458,17966,1364,-11941,-31910,-1597,30525,-18818,9340,26093,-28288,7073,-6942,13545,1064,31383,16071,5506,-16893,21731,9201,5875,1490,-9015,8109,26325,-30647,23531,-11129,-9168,-31640,17706,21245,-13807,-1170,13185,9739,23463,13724,-19144,17743,-10370,21200,-29026,23124,7195,-6769,4735,-8245,29686,-4722,-23445,3672,14287,25431,18415,-9399,18224,-17821,10582,-23488,-2682,-1050,1208,537,8266,13614,-11262,12877,23014,-8455,9335,24722,-14661,-11809,-4199,13443,28693,29205,20797,1472,23525,31041,27112,510,-22183,-11075,-30424,-27003,14491,24243,-23723,-25688,-22712,-27170,21877,-22468,32437,11367,13623,24977,32691,26283,9247,-2087,15638,-26904,16433,-2661,188,-4450,24654,19541,31575,21716,-26986,-10036,11487,-1470,21274,-22718,-15393,15051,-27953,24978,-15556,-10041,-19018,-31130,-11952,4730,26250,15266,18472,7883,-29436,3659,-27594,-630,5539,-20029,-10596,9382,-15421,9942,16472,855,-26174,-15311,-31365,31961,5997,18328,-16158,15884,-13059,-21543,-16670,-17983,-4261,-9945,-14563,23333,-15345,-28290,-25055,-16040,22807,16948,18958,-7684,21818,8173,11970,-19510,-30570,829,-14695,22137,719,22018,2442,31597,21544,-27606,3905,29664,2632,32268,-23710,-3187,3244,27708,9717,-1713,32472,-17233,22004,-13222,-28717,3802,-29467,16361,10971,-5148,7393,-25124,11029,-4579,-30682,-4210,-7627,-14196,-7420,-22054,6972,-29357,-534,10337,20973,-29402,30508,16025,-25298,17147,-2891,-19319,20312,283,-6019,-21018,-12559,8396,-19303,28132,-22365,-30705,-8940,27858,-12334,-22073,-20062,26723,28672,-6802,-7046,25836,-12649,-15616,28303,-15648,-6064,-15502,-28141,-28900,20863,25398,9717,-17445,-8940,-16710,6449,1150,-16812,-31739,-22415,-9420,21888,-6167,-26788,-16012,32725,17697,27847,22349,-30418,-28326,26679,3199,-7275,-31659,-19910,-26084,27916,-1244,-8555,-26661,28216,-18657,11744,12060,21684,10672,13770,2515,-12667,-23179,-30834,-21337,7084,-14976,-6448,-26573,-13041,-24899,21435,-6496,-19578,-15400,-17233,1086,31454,1032,-20000,-4638,12223,-939,-30301,-21482,-20316,-24714,-23636,30200,31697,23709,-3841,6469,-23684,4829,5537,5358,-31184,-4234,15526,28059,23389,-3280,-25052,-22969,-4279,-5127,-11953,14353,15099,-1891,-17629,-20720,6381,1460,541,-25070,16444,-23551,-12168,-28404,23860,12436,1393,-13647,22558,-26009,17118,2535,30852,14931,24491,-3086,-19060,-7605,-14691,2769,16360,6446,5426,-16569,19918,12536,-8533,22487,-30035,19569,-23117,29939,-19588,-16644,-8477,-30111,-21318,29399,16120,5388,22821,-18591,30476,9115,31293,18838,21231,-3392,-2623,12098,16068,-4438,-30423,21607,-14955,-31226,-22953,17315,-22625,4050,366,19626,18784,1830,-23532,26176,2354,-5043,25228,-13989,-20269,-30210,-2327,23713,-13264,-31762,-27076,-22323,12543,-13870,6869,-30487,-1704,-11224,-17040,18490,-18241,-5195,13563,-14334,14700,18510,28034,-3398,-11709,2107,-4530,1686,30495,-18974,21097,-27357,2345,23987,-21901,28261,4096,-16969,2509,11565,24229,11139,-4269,1865,-5359,5973,10672,-32597,-10929,-8236,7784,-19325,-11003,-16421,-17727,13579,8764,10858,-7624,1456,2669,-19459,-6058,18444,-31477,1491,-9906,13238,10379,-27215,-10103,10815,30530,24707,24442,-8304,13687,11378,-13245,-17283,-29223,5218,-20268,-9701,12937,-29454,-28711,-18456,-417,17855,-21342,-30218,-5586,23515,-15346,7226,-17319,-3053,20613,-9533,-20632,27024,16816,32415,-28355,-5847,28795,-14941,10071,-26313,-26894,30831,4425,25284,-107,4903,29993,-17337,-31472,19809,6204,-20630,19747,21424,-5870,-21388,28977,-30954,-4388,10198,4724,-20630,30746,7625,16349,26989,7809,-6618,26252,-31051,23945,19976,-10746,-10891,-20486,-17075,28754,-8084,-29446,27045,-5189,23963,-20131,26589,-17549,-30416,-25997,-4447,18362,-23936,-31570,-22846,28088,14739,-23586,3614,28269,2184,-27840,18223,-13845,-22550,15846,1574,-22842,9644,-25151,23698,-13376,29960,10250,29659,30986,-12156,29526,-12687,-7475,-5260,12736,10601,4648,28754,-8522,26731,-6291,26360,16734,-9130,-4035,-9889,29761,15037,58,6217,12387,30142,-28771,8499,-14918,-17999,-20988,-16816,-4804,29468,-4565,31061,7907,-26200,962,-25692,-31761,9067,24424,-4556,-19998,-10976,-17301,1113,28843,25081,19361,-20203,-32409,-24139,11222,-11014,-15830,25343,15726,11418,23750,434,-29392,-12377,17737,-20622,-22204,31213,15024,20532,30600,6785,-2677,22268,9492,-1,-1278,17201,-3050,31810,-14382,-19049,21912,-15753,17250,-9355,1295,31317,-20572,-1482,14849,32378,8545,19558,-7295,18338,-31212,17062,26416,145,-14702,-18783,8934,-27588,-5863,-17027,12879,12760,-31603,1411,9876,20322,-8965,-10380,9918,23851,31353,12679,30127,-16209,-29569,11529,-8855,13553,27984,-2606,-16761,11798,-27257,-15942,11272,24782,-5512,29313,7093,-10052,29209,28619,-4521,24850,-1250,22536,14948,-30071,30310,16040,-15148,5770,-1571,-30330,2893,-13773,-3747,-6661,6957,-12888,13776,-32153,-26568,-18762,-22421,-21731,-22717,-28116,-14685,16630,32496,-5554,-15361,-29120,2038,-10972,-5351,-31402,-13398,28442,27502,20310,17151,25279,-28258,-21434,-15070,-21124,-29992,-3449,-8765,28997,-30409,29082,10505,-31442,-10084,-9269,-9826,17413,1601,-14239,16389,30807,3804,4986,-24655,-19942,2723,-17967,13220,-6941,-20717,6548,23718,404,24752,-9237,-1271,2940,15749,-31965,-22815,28686,-28747,6857,15872,-30533,-2029,-19796,24699,5685,7772,-31952,12065,25607,-4497,-13276,12535,18362,24077,-22705,-9538,-30183,-6398,-2856,26579,-18770,-11846,-18930,-32322,-6252,-8351,-1287,16716,31866,22141,-13330,-19593,264,-23541,20067,-27015,172,1676,3066,9424,-8815,19482,-8294,4481,-8118,-7754,30817,-2346,-1242,-13453,16679,-27792,1918,-4710,-1811,26413,-14448,26952,-23093,-21230,29293,-31854,23973,1802,16536,16767,-7444,-6602,20425,-32648,-24156,-30069,29193,-10901,-15691,26735,-1743,25532,28656,-3252,1214,-13524,14756,32108,8396,-15441,-26456,-4780,26378,-16276,-8335,28642,-3067,18581,-2078,16955,16133,-4100,22611,20940,20560,-9562,4187,-6007,21304,23411,-17635,29089,-24284,26422,-16690,-29034,4333,-11225,-399,-3274,-16014,-17143,-18410,23456,-16243,28506,-27696,8772,18627,-12923,18348,27532,-28806,-19028,11703,-1303,-11401,-4563,28155,-2878,30339,-13868,-403,-9976,28175,-32547,26540,11411,-24397,-6697,-15928,21576,21107,20477,-22774,6101,2324,-31000,17991,-18371,-32308,6531,11434,6382,17206,7531,30320,-28124,28883,-19314,23720,18541,14030,-12664,17821,7660,-7799,1406,-18046,-11660,25567,-25600,-6221,7206,7739,27294,-19442,32520,-8172,22984,11478,-4828,-4701,6788,24486,5075,22864,10933,22089,26253,-9673,-9800,13918,3802,9685,-2939,13583,-17841,2637,-18729,-7058,-12517,-7576,20870,-28110,13005,3381,-9638,17120,27297,31453,5628,-17565,-32692,-3440,5751,20920,-27534,23791,-4142,6760,-21001,-32095,-22627,-21422,-8463,26223,10008,-465,10527,-7706,14715,-5724,-9348,14258,24872,21230,-20804,-26893,-23539,-6583,-24120,-2597,-2160,-31475,27945,-3179,-17308,-25161,-8754,-32071,26202,-17195,14466,-8300,10902,1422,21684,15920,-27900,-14511,-7551,20791,18911,-17391,-29736,-26745,3203,-23825,2657,20558,-17970,-30956,23571,-10693,-27663,5674,23751,22325,17353,-22748,-1993,17820,-23632,30126,-15506,-9022,14247,11748,-20371,-15252,-30288,15803,21014,32468,-25145,-20967,404,-29289,5439,-8775,-30122,-28338,-7914,7789,-23562,-22802,27965,-21752,-12379,-27074,-19797,18493,30008,-12568,21038,-571,501,28564,-9446,31457,-5120,1498,21281,24816,21723,-9943,9211,-19268,15502,1726,-30008,-13461,30486,14829,-6093,11,-10432,24790,-11002,-21824,-1647,19216,-968,-29344,-2282,11479,21876,-12860,23643,28503,-11786,-15587,2642,24456,-601,-12470,-5245,-28654,14701,8591,-15865,12263,-8983,7861,-29515,-2297,4667,9007,28429,28294,-3325,7630,14800,5171,4812,-4394,-14257,-27884,-11607,21951,2172,-12950,10260,19504,-30439,-11844,15736,1458,31752,10128,-15294,-4398,32201,22542,-30063,25563,7622,18540,-29250,16461,-26782,-19884,-15163,19111,21851,-5109,28247,-9831,-9472,19904,-19807,13794,-26265,-1272,-30497,-7015,19720,-29129,-1204,-32744,3682,26403,-145,11609,-19762,-13576,-2199,-28530,-27034,-18551,12283,5596,11910,15502,14494,-6590,13936,9092,-8331,11727,-20900,14524,1786,721,-16163,23503,29277,-28640,-16728,-17233,9293,298,-11072,-5973,8939,14147,-28068,7449,29898,6401,-9887,-23807,3202,10727,-12877,16692,-4789,-14180,6146,23047,12480,13266,2901,5461,-17227,11580,-16348,-11243,-18167,-17004,-11189,3980,-2151,-29937,16717,-3602,-14518,20407,-11526,32436,-20405,12191,18927,-23820,16948,31241,6333,18844,-31512,19561,9139,-9106,12236,-26364,5240,-25696,-13482,-20860,-3099,15934,-25017,-29553,20225,17637,24133,-31256,-7930,5011,30991,28046,-29105,-24669,-7975,23897,-18352,-30913,-21171,-14988,-22265,-9589,-2284,1723,-25693,-27293,-15502,22636,12985,15231,-3285,1769,-7751,20693,-32105,-8245,12167,30795,22047,26757,-30048,874,7513,-19127,-14564,3874,-3430,-32635,-12935,-26936,17669,16770,31771,-1372,19758,12504,5757,12260,-19348,-10397,16291,-2969,219,20681,1132,-10053,-15108,14819,7688,13941,29322,-17861,-14889,32218,-26021,10284,25862,2502,18918,4310,-14847,15281,22585,20796,5599,15942,-31257,13790,22920,-7132,24738,-6308,-24004,-19516,-5134,-8469,23735,-6184,26446,-4574,-7495,-17740,2275,-5515,19093,25939,-15368,-32013,26296,-4010,14204,-29668,-21578,-23867,5501,-25190,-997,-15222,-22301,-6319,-16502,-602,-11647,-5605,-2907,-14145,22196,-2054,30523,18911,-5230,-10562,31904,5786,-17284 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add12_input1_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add12_input1_int16.csv new file mode 100644 index 0000000..802f6ec --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add12_input1_int16.csv @@ -0,0 +1 @@ +26288,6573,-20164,15844,-20185,7835,29041,-2808,-24842,-12920,-12039,-15095,-28237,-3402,4919,10298,13029,-20999,1680,469,10959,-11562,31286,-27180,-12674,-14076,24878,-18808,25602,3797,9434,-26622,1953,29810,-25473,27281,3910,23465,14494,-4521,14315,-14825,24274,-1372,-11507,8181,21942,8146,29481,-19767,-5363,31433,3814,3316,16184,17765,-22875,25987,-32161,12229,23628,-26778,-14444,20710,-22406,16043,5520,1807,-7013,29601,10540,24267,24112,-1624,-21433,-1722,11628,24712,-18979,17192,1329,21488,-10709,27902,-24714,-10713,32469,-23802,-3330,-25185,17684,-11601,13294,16451,-10658,-8523,15998,-25072,11345,1935,24513,-17043,13654,14694,-32272,25550,5016,1289,-21822,20505,12307,21205,-22490,3268,8635,981,-16081,-24906,-23814,3102,-9961,8782,28977,22577,-12289,-24241,4009,4320,-8934,-3406,20738,3296,-10710,-13209,-19527,-6825,-25073,16439,16742,21217,15615,13616,-5985,4498,-26770,7647,13835,9000,-10989,7863,12836,17658,7789,-7213,-553,-7789,-8247,-28794,18478,-9400,-25062,6711,-21895,31086,18852,4691,-30238,-30163,-4707,-15058,8266,-170,-21189,-4008,-20056,2307,25064,-15999,-8873,-25912,21015,-31512,4487,22245,-28453,23469,-19431,7223,-15270,905,-31891,18892,10997,-9292,5153,-12045,5572,-15388,-11089,-32615,-25461,-11319,-7801,11383,-17651,18887,12042,-5015,18102,-1317,6983,8312,-27884,23671,26750,7779,-27444,13881,-4830,-28993,-3226,-4200,10044,-2616,-13581,-27196,31480,15824,-2417,26727,-24797,-9184,-31437,-14810,-1689,21914,-24718,-13699,-25111,26760,20979,28128,10914,26081,8445,-25101,15675,-9664,13575,14960,-20848,30670,-25416,31676,-20435,-4645,2843,-20300,-25681,14231,7571,1943,-11477,-30496,5354,-12780,14507,-10684,23761,-31145,-29760,-29022,-26363,19619,-29977,-3271,-3274,-1741,8743,-22299,8746,8879,-28972,8260,-17113,-25696,18354,12205,-31442,-21861,8010,-4779,28526,18209,-22202,-28782,14882,-12838,-20784,18954,-15920,-7369,4988,-32652,-30364,-19416,-991,23389,-18109,-433,2140,-13727,9452,10302,-849,-12876,-31461,-10672,9599,-13191,9599,-5373,-28232,-437,4274,18881,-17541,7186,12175,1219,-13249,-6235,29856,30840,-26603,20054,-30597,11850,-32651,28338,31974,-20702,29759,8449,13740,10197,11076,-1987,13477,8766,24395,-337,14189,30752,-18140,11104,-21884,20174,15972,-5624,17831,20191,2101,19780,10668,27271,-24857,-17187,20699,-13072,13463,22153,14194,-27834,-4164,-19207,-1623,-20002,18305,5037,29237,-14659,-22525,-13659,-7067,-28806,-28777,-27462,-28013,15979,-13123,7002,-3865,-24270,-18897,20042,-6771,16747,12334,11636,-23445,-14386,29223,-12847,-19113,8635,-9567,-23718,5866,29504,-19541,-11437,-28721,-20667,27121,-12904,11080,-32591,30725,15442,29991,-19128,-22226,9726,23191,20309,-9207,19409,-8052,-29451,21165,-17635,-31686,-25900,15738,-9919,27478,-24712,-22530,21616,5238,20326,-12592,27174,21261,21689,237,12336,-12673,-10935,25959,28299,-3932,7373,26288,26957,17751,31539,-4668,-1444,-32716,12864,-15334,-5006,-586,-21674,-29314,-1112,-30960,1742,-8762,-12156,-1573,16054,252,22059,9685,-11895,5118,31450,-20205,-7030,-6345,10603,-14908,-1856,-3384,23108,-13373,-27857,-14109,-18203,-9952,24524,15260,1354,17704,-20729,16789,21104,-2494,12782,32125,-4373,-20825,-14141,-10072,-26345,-26575,9013,-16623,-5942,21727,-30134,-14173,28127,21341,10438,-22097,29638,-3754,-3858,-7181,-18289,32031,-27669,-29432,810,-30118,29835,-13131,23323,-8250,25401,19321,16239,-1537,1409,-18669,-13531,-12131,-8633,-23817,-18493,-25506,-13759,-21818,-5444,9241,-22515,-17039,3325,-32061,-2733,-10180,-29792,-24294,-24345,-23646,845,-27244,-1238,11406,11931,-20178,-23532,-10837,-14936,21138,17535,-31753,20529,29629,19429,-5423,1884,15341,-19197,-26732,-9583,-13592,31914,20509,-1039,25421,1710,-2767,15996,-17628,20287,11207,-28383,-9739,-19349,-7175,-18529,17913,-16778,-5719,5251,12101,-30890,23940,-25319,16426,-19747,-16482,13612,-4433,-22490,21102,28670,1803,-28307,29906,3917,24428,32156,-28183,-21629,-16126,22547,6101,20093,-26573,-6021,19583,18479,-21512,10099,-15074,-20318,7601,2408,5950,5073,-3298,17288,18957,13571,-21556,13359,-12149,20327,-19285,-7676,-7393,20943,-22311,24071,-4762,-31453,20817,-1088,17272,-23748,-25835,8750,-5913,6561,12574,26512,5024,-18110,10038,6419,30964,-19461,-639,21562,-3840,-23694,21965,28059,-10618,15710,-28544,-22540,25502,8104,23116,11625,26492,2167,-19923,5946,26185,-10504,32417,-6464,25319,-10511,-4228,6442,6102,-29388,32239,-15322,-14563,-6271,-24313,-31368,9248,-12254,19398,-6304,-15767,-26323,-15277,-31026,-15898,-16611,-5827,1178,6663,-3528,30194,25364,-17624,-27717,-21416,-28468,25245,17146,21398,12287,12754,3302,7319,-12544,23059,-21995,28855,-8886,5070,-28064,17948,-4332,28627,-13981,17723,-15070,-23405,6107,-30477,-2902,2524,-1487,16308,13858,9505,13409,-27850,1005,-23834,27598,12389,-2577,-8293,31526,-7829,9620,27712,17447,13169,2060,23538,-2167,-4206,-10514,14627,-16726,20871,-32476,-3855,-9365,1644,10502,29755,2174,27529,-16336,13109,3604,32148,-8150,-14706,5223,12664,31001,5007,31822,-20689,-26008,29999,17367,-20910,-12964,23072,12282,-22459,3018,11926,20612,-20365,14971,-17092,11620,-20596,25991,-27242,22323,-524,-21461,6421,15095,-14179,25854,23634,-20425,-16981,11480,-2624,-4170,5179,-11885,10795,8922,14115,15856,18456,-16439,32053,-31959,-14182,29906,-20131,16431,-15784,25614,18403,31370,17416,24150,-9085,15581,17141,-11531,21187,-2620,8980,-8288,-29208,24085,-20248,-15062,-7006,-26239,-5888,4769,18324,-31159,6179,-11359,-14288,-4453,-17955,28098,-8758,21304,-32280,-8740,-6171,8077,-11892,22469,-383,-13080,-9473,-24638,-20439,9979,12602,6524,-30528,28012,-26042,-9010,3847,1221,13083,-24684,-24116,-16200,-10820,14613,30430,23638,-24404,-22557,-27063,-5359,-24123,-3591,13430,-17446,-17675,-30550,27363,5990,1655,12884,30518,12478,20253,16935,-12514,-10329,-1935,-7651,22821,24388,-21594,5846,28850,5488,18193,-22510,16121,20404,-25252,18102,-24953,5632,21744,28031,20003,-21510,8636,17587,-748,-26342,-26047,19785,-17354,-10868,-7240,-19994,-4531,-18883,6493,-21421,21698,-11399,29196,-18464,-18401,7072,-24042,-22923,-16567,-4572,13006,-4905,23587,16569,-15678,30990,10766,16762,-17394,16467,31440,-25418,-10010,-31910,-21588,30961,-230,10103,-20941,25500,-22971,-29771,26044,-31801,-14227,-32163,-31469,19494,-28317,25235,-14723,-7268,894,-28021,18018,12993,-11903,-10646,-28024,7809,-2250,12222,28893,-26234,9911,-5536,6016,-29586,-6635,-3223,9100,-7899,-15368,-23606,3159,-16988,-641,27108,-1666,-19657,10674,28670,-23575,-31585,17575,13914,-7083,-7121,19229,-13331,-8949,27172,-16981,10334,12984,30006,-15975,-26266,5566,1522,5670,-19806,14947,-6218,5067,24412,-22275,20506,12661,-11952,1011,8848,-19225,-31027,-13134,-14112,30542,-8986,-4436,-27983,-31988,-31681,-24092,21936,3942,-27671,-24778,29203,32139,-16582,-15437,-18042,25845,27328,14426,-7786,-22176,-7586,-2970,-13340,-4702,-17358,14720,1038,-3595,-25919,-1086,382,1859,-3896,-29851,29679,-15031,1236,13786,23286,-20899,17078,29165,30831,23947,18247,17264,-28057,28354,28602,-6623,-31593,13714,2505,17480,-191,-497,-7605,24307,19394,-30202,-7515,-14054,-30,-10623,-7131,-14218,2468,29163,-24934,23131,29427,29275,2032,8527,-9101,17792,-11045,-11368,-19780,-27289,-2895,-6121,-27160,21419,15808,-32416,-30706,28463,-11727,-28653,-12872,10087,-8355,-6513,18583,22097,-15848,-14981,-23711,32625,5167,13893,1935,9970,30016,17894,3814,28623,-3363,-30136,-11044,14033,-14168,14072,-8870,-17297,-7754,-1628,27696,13801,27585,-23627,21235,-12934,-26584,-9006,907,-21471,15855,28,-25260,-12180,-7559,6580,27047,-29603,-13523,-15775,4168,31031,-10780,-6780,-23956,-21975,-17963,28386,31006,17913,19805,27478,10044,32720,-2299,-26493,-30853,16984,-14088,-28029,9301,24269,7495,11991,-26355,-11337,23193,-2594,4200,1227,4107,-32605,24675,-16107,-31838,32722,-22798,-21037,1342,-23021,-6257,5474,-18753,10422,-4907,21630,-22482,12416,-14725,23202,-24854,-20069,-599,4777,4785,10369,-29160,-8335,22577,-11273,5492,7991,-8592,-23608,-27193,10413,6453,-23118,-5553,-32534,22182,-10568,27872,-14413,22331,11845,9584,-11055,25720,23862,-28932,18696,-6875,-19187,16630,-30972,10807,-5205,-3054,-16392,-23869,22214,11123,8210,7409,21331,-11392,-7136,6576,31485,27864,-6173,-9751,31051,-27414,21717,-15823,10889,22958,-29783,-7879,16034,27519,-16290,-32521,-8933,11341,-4150,25815,4972,132,-23051,-29893,22499,-3533,-16220,18973,11581,-22910,5651,-21525,-27202,-30427,-24467,-32637,-22790,-26628,22203,18048,-18619,-27394,14274,24728,18164,24288,4141,-1637,-23546,27269,32317,-29622,6597,-4918,-3524,-5124,-23847,19620,13412,-26066,28826,7114,-31780,-20104,-12706,19390,-24480,26442,21729,3830,5335,-10192,11362,-24187,15819,14249,-4060,-31381,-9731,15471,13972,-7859,-20970,22151,7629,10141,18594,-25094,9534,-25981,-32597,32468,-31572,-27681,-10639,15233,-26793,-18758,-23734,-6597,16710,24647,-17329,-25218,2990,-5669,5923,-24845,-29531,-6289,986,-17390,-28242,26120,11170,-16585,29921,2681,-25154,19028,15450,-16709,3386,-15484,-2154,16040,23510,-20435,-15319,29188,21331,14819,-11952,-28966,15616,24099,10613,19681,-9927,-27677,9740,-21623,-13215,6564,9168,-18031,-24898,-25111,2618,-22553,-18413,-20751,22824,-22079,-30585,28589,-12701,-31515,28861,9814,-11750,3262,-8653,-5331,24084,9221,7898,-13145,-22074,16666,6647,22987,-18948,18757,-25000,-16965,-831,16389,-18194,-4228,3959,18364,17315,-13337,23732,20245,6374,26682,10739,19649,-24026,16518,1837,14322,21064,-4656,-684,7684,8942,-892,26202,-18795,-1212,9634,10694,26616,-22103,27941,-29294,3163,11245,-9949,-8031,-6415,15991,-16628,26816,-13654,19050,-223,26528,13179,6016,32122,18813,21490,-25098,-31549,-28991,13359,-854,1220,-30341,-32428,-32385,24762,27314,-7002,14941,27675,858,-26387,-10521,-23843,-9688,-15630,-4168,22076,-12876,-7020,29356,-19469,-1287,15830,15288,3491,-12104,-28696,-1757,-14604,16525,-26637,-7142,9261,6413,-21246,3674,22081,-1055,-16955,12138,20112,23395,18860,-18626,27506,-26420,26227,8748,31044,-14933,-32763,20699,7496,-733,12188,-6752,-279,-16154,-16752,2240,-19949,-31290,-26052,-12115,767,-3337,-27072,28883,-18038,32130,-20096,-26534,6957,5488,-5964,15946,-23401,-25041,-16233,-9891,7899,30273,20625,-19537,-29172,-28977,8565,-11374,9529,29594,-15698,-22783,-27622,-247,-2367,-4443,31446,-20083,28942,31645,-5921,14256,6558,17223,12014,-8155,19877,-28812,20419,14648,11216,9651,24959,2161,-14352,13131,-1661,4554,29730,21548,-23308,6421,-28759,15791,25603,19094,18572,-27544,938,17227,-23135,8952,622,-4870,24153,9428,32472,28603,-30328,1525,-23953,21992,18331,5974,-23675,-10106,-21360,13936,22740,-8367,-6414,7415,-7180,-29586,18121,10598,-13502,-6028,-7710,-29579,21783,28982,6718,-21599,-5340,-32134,-30918,9383,-25875,24071,11367,14881,503,-26708,28694,-6357,5753,26712,-13101,-13893,-23908,-27382,-13869,-21346,-6889,-29975,28422,24854,-23073,-28930,8595,-32216,-20791,5167,6494,30595,-17311,-3100,-25356,14441,-25437,-12346,8921,-10509,-18652,22447,-14837,-26650,108,8556,-5087,-14618,-12414,15214,5666,25624,18941,15184,6889,-28995,-2962,-2812,-32117,-9449,-31394,-25737,-22970,13874,27090,11689,-12668,25550,5418,18193,-8077,5728,24239,25814,15182,24446,20262,12824,6206,-30395,-278,-4595,15708,-26961,-5646,24143,13910,-8934,-22514,-931,27028,-6544,-19195,-18153,10746,-17028,9945,21400,-8667,-16358,-14534,-16247,26945,18639,32031,-26299,-22874,23004,-19005,30058,-16195,9680,-18180,12392,5454,15694,10081,-31925,26819,17307,1178,-25913,7572,-28131,-21105,16512,-1986,3485,-1878,30891,-1587,-12620,25221,-11520,-16667,17924,8521,16245,23181,-18900,-10940,17208,-23483,25609,-6273,10367,5046,19478,-23080,-10164,-32173,-13060,16983,-26587,-12315,15550,-32488,32600,-7691,-32428,-5066,-32759,7373,-9105,-29514,9585,-27238,-123,10346,-32583,-24932,8878,-17351,-9294,-14236,-17836,-10079,-18016,-31331,-29593,-8425,14532,-13901,-19164,16655,-11456,25404,-5471,-11022,-11880,-26789,25850,32428,28608,19356,-30039,-30433,-10265,26907,-13083,12126,-6746,8517,9009,-32381,-809,-16588,10006,-21227,30228,25511,-2957,-18252,-10554,-9839,-29573,28531,-23452,-5130,30264,-4909,-1191,-15945,-29884,-19844,-31331,-11482,-19910,-31334,19035,-18982,13527,-8393,-15336,10322,-31466,1619,14842,-32529,24603,-18743,-30020,10833,19501,21073,-27636,26150,-23340,-30444,-14306,-11696,28906,-10462,13799,-4432,-9006,2476,19348,6461,-888,15047,30374,-18324,23941,-8905,9242,26281,-18177,-9512,25452,-2249,-9599,8988,-8675,-30934,-18284,27913,862,-9036,19503,-9807,-28809,3414,18475,-24827,-16704,27155,26640,20612,9272,19206,24478,25943,14560,32465,-14152,25164,-142,-24721,19850,-9956,19714,-19479,-18149,24134,-375,-6483,-2421,32724,-30252,-20964,22832,-306,23808,2020,12085,-19738,25003,-8379,20204,-367,2542,4061,-376,13152,29929,-12132,8926,-5759,-29536,-20755,763,-11628,7971,5739,27746,5095,-30307,5061,3964,-23742,21325,-27111,-25750,-13344,16876,-2106,-32262,-9077,-18076,11572,5957,31262,-2866,-22833,-9234,4170,-6875,-18152,-4269,-24034,-17982,21638,-17186,1750,-24334,-13118,-14654,-4616,22257,-4296,1397,-8954,-27247,13421,32639,-30037,-10542,18785,29782,24325,11737,-1435,19627,-29999,32121,22642,-11988,10834,6442,12127,-7699,5188,11098,-21713,-260,1346,31995,9954,-10958,-14189,11433,-862,-13715,-18493,-28438,-16423,-20680,7731,-7926,-15346,24624,14750,-1627,20345,29614,29510,-23225,-14169,20614,-29026,-12976,-13903,26421,-3511,-9323,-4924,-14305,-28393,-20112,19840,2551,-8303,-18249,29752,15529,6146,2145,27483,28292,183,-18209,-13925,16758,6340,-32240,-21438,26464,2385,-26168,-12236,-29119,29266,21012,2906,-10248,-26610,-11620,2679,-12876,-15729,-12850,-21038,-26848,-8621,18410,30622,19048,25378,20758,-21760,-6636,-28064,-14573,10731,1172,10812,-28834,-14595,-32649,22122,-18165,19823,-8908,26172,-21592,-20213,-31053,-18265,-23131,5071,20508,-11218,-796,-1154,-5186,-14385,-30019,-7935,29490,8944,-21930,-7229,16380,21707,-20712,-32660,16016,-13065,23956,-6650,14588,-21853,12422,32526,23153,32507,-29978,-17801,-6800,-20229,-21971,-24704,8701,8032,-30380,10312,-26889,10261,32360,19229,-5181,-15246,-13544,18487,4149,26928,-12049,19591,-14853,22300,-26213,-29479,-1945,1867,-5995,-12488,-16784,6994,-694,-22370,19220,-4460,-917,11723,7053,20225,-16873,7145,5222,-24969,-4169,-6633,-16329,31449,-32250,-27333,26240,10560,21892,4190,-19975,-28044,-8418,-21594,20549,-4101,-11864,17089,4004,4631,-3447,12990,18456,-15718,-18490,-30809,31921,6144,3324,-28205,-4186,9742,-1912,-24489,14801,-7068,14894,11517,-16106,8747,-8938,-5948,15397,3111,-11638,-12081,-23357,-16942,13536,-13332,-18697,-21128,-10422,-31840,-5131,12824,-22989,-18458,-9567,-26899,-20513,-16610,-31119,9726,-23571,-12902,-31662,-447,8264,-25475,12702,1852,6018,-3766,-13754,-29533,21347,-4504,16542,19302,6811,16275,-23084,-7085,-20806,-14999,-27569,23710,20198,29183,-13514,-1135,-11932,-27461,-19059,23842,-6007,9686,-28315,-19960,5529,-5775,-17822,-26507,32062,21056,-30963,-4952,-9283,-17036,12873,-17054,-29,-13856,-12180,8870,26958,9540,-19787,19643,5131,25057,16876,25837,-7488,20993,13634,21151,20327,14587,16869,27134,-13954,-16181,13595,-16847,1327,-8977,11748,12646,-13767,-19519,32124,-24314,-21220,29080,17654,29828,-28371,-6915,-17355,-11743,5041,8988,13037,-27450,14426,29328,10047,-16133,-1763,-463,7833,-18182,-28668,-6507,-32682,-19117,4114,-4952,4891,5370,24816,21001,-10305,-10563,22168,-16352,3287,28511,19496,11128,-18028,5919,29074,-17189,31879,24459,-27219,-11503,4359,8399,-3435,-9283,-3825,-10400,-20938,-32674,-26181,15207,827,-10182,32150,20159,-24329,24906,17489,-24597,-7564,23707,-24689,8455,29474,-8830,-25788,-14594,-9025,270,24713,-10981,13828,13194,4146,17500,16112,-32040,-16772,31460,-15161,23483,-11076,2307,-25855,9962,29092,-5570,18264,6331,-27775,-28666,12616,-3378,31321,9777,992,-13424,-30024,9495,-13227,-15548,28618,-26470,20332,-8799,-5026,30476,-11170,-29149,-544,30063,-8212,-14327,-25859,17761,-26231,-25796,-28183,2379,-12572,-17073,23301,19892,-29656,-29486,4365,19181,-25345,29202,-20606,-10018,30252,-14860,-18928,-24752,-27702,1830,-22853,-5208,-28533,-23425,-11967,-14430,9668,-31031,2534,-1950,29544,-1382,-12836,23990,-27450,1460,-21748,-9683,-19923,-24896,27608,1242 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add13.tflite b/tensorflow/lite/micro/integration_tests/seanet/add/add13.tflite new file mode 100644 index 0000000..b98c4cb Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/add/add13.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add13_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add13_golden_int16.csv new file mode 100644 index 0000000..de916a1 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add13_golden_int16.csv @@ -0,0 +1 @@ +18731,-6563,13201,-3968,26030,418,2244,-2778,-16663,-12557,-415,24471,31761,32103,13142,21626,-8790,7108,-12870,-10457,-7265,572,28318,4149,23076,26546,-21752,28579,-497,-10874,-26678,24357,8448,5699,-21697,-17424,-2461,-12779,-14541,20539,20333,5528,28859,-27057,9634,-11055,2246,15811,-13585,5699,-58,15691,22210,-16546,-17123,-18679,-16949,5497,-23997,5081,11607,285,-19244,14953,20807,14281,189,17641,-5456,-11084,-4805,5833,5847,6358,9747,-20459,-5133,12430,21662,16565,8158,25025,-4423,-19596,2134,32767,1857,696,-23942,18434,22040,18804,-9196,18085,4852,3621,-3223,-10131,-14891,-24741,-8902,-25607,-1106,12272,32767,19442,-15921,29813,-14141,-13207,-10497,-4415,23897,-14426,-3301,-6726,-2504,16100,-11024,-26872,4283,-15773,2104,3834,-856,-21810,-8281,21153,-12204,10414,-31975,-14682,-25254,30642,-25531,2397,3987,-29272,21591,23447,5933,-7479,28676,18431,-32768,18898,-20772,-3388,-14524,3541,-13279,-13413,14640,-14770,20549,-24356,32767,12194,13333,16146,-12974,32767,19472,-13893,28407,10900,-12506,-25772,20693,-17043,-10024,-32768,9107,-30256,-7552,-22795,11790,11303,-14454,13942,32767,4016,-11119,14877,22456,1679,21857,-23246,-12489,5473,5012,-10145,-3556,32393,29523,25951,21256,8507,-28202,-32767,10117,9987,14727,20872,-17097,15846,-7730,27485,-28969,-19265,-16996,16465,23695,13188,21513,-32768,-3275,16984,-17643,-28256,-14777,22847,4605,9367,-12917,26934,-11362,-17472,-12618,-76,23646,-32768,8011,-13899,9880,6894,26018,-2140,18631,-30826,20390,-21816,32767,14856,-21921,12458,-6862,-27514,-7109,-18330,-18702,27172,-7662,11695,-19759,-13736,-2206,20414,11470,-20132,16981,27029,30185,15234,-17238,24113,-12459,-31085,24107,-30769,-8202,5709,-2722,5101,14427,-22700,-16939,2063,-12508,17039,-159,11671,26429,-3260,3167,-21372,-2375,-28834,-1992,-14104,-6669,-3835,-23834,619,-15603,32707,18769,-31224,-25201,-8818,20638,-29894,-14217,2764,-12892,-13907,7642,1874,1982,5020,20276,22018,-26206,6318,17054,12868,-15171,-872,5861,11338,-33,6577,-10243,3594,29664,-253,-24723,-12455,12796,1607,24324,3051,14161,16740,6351,-8447,-17059,-9230,15937,-12790,11742,-28920,-14461,-6363,-7692,-6494,-11467,3986,-12168,-7573,250,22624,13367,-6016,8387,-13057,-15418,-24419,-21049,23681,2380,-2264,-19373,-1756,8017,24116,-6593,6540,-12408,851,-12687,-14251,32767,17654,14218,5251,-17946,-21781,2271,-5858,-31859,3689,-7830,-13816,3328,23681,-15919,-13791,-30217,8665,-14179,-16788,7570,-22619,9112,20812,-12250,12398,-13783,-20117,2216,7889,-13625,-16722,-12411,-8673,14426,-17790,-7613,-19229,14070,-7821,5604,-5726,6655,14998,18175,2694,726,-20742,5899,-8305,11194,-2476,-13573,1065,5563,1584,-16559,14271,10207,-3006,-11541,10294,-18274,-2829,-22726,-9817,-14650,-7194,21609,-25872,30776,-6123,5505,-9958,-14053,7190,11282,-14335,8035,670,-15843,9665,-8259,14408,-7757,19755,32767,3476,9948,-17414,2003,9572,6501,-5776,-15465,11390,608,-28262,15262,25772,-208,11334,23846,8233,5010,18043,11982,-13049,20417,14042,10559,32767,-5869,-7974,-3606,-17069,-10848,-9867,-1440,22815,-18436,32767,-5341,28160,1744,18027,11525,32679,-10344,14403,-27751,-30759,556,-197,1182,-15917,23085,5178,17755,-1004,-9497,-8147,11288,16873,-23335,-14078,1354,-13107,17634,2675,-1483,-23763,-16263,-7004,20445,-981,7738,4923,16628,-18696,-14180,4221,-2267,4031,3574,-31130,-12060,8435,-16894,-11270,188,25365,2594,-28756,17386,-13662,11989,26367,-20839,15834,-26629,-17256,2037,-21717,-2586,-13911,10809,15009,7907,-14753,-1211,24059,-22900,4171,20515,7272,15591,28396,26894,11439,-6866,7253,-22454,-30775,6352,-32768,21498,-20002,-28116,-519,22318,-2058,-15172,4450,8559,14703,-9626,14116,12377,2948,10111,-3210,-26901,20225,25718,-5254,3494,-8354,-2536,4005,-26487,18842,-4910,12871,-16949,-1534,26019,15591,-2486,-25535,-8148,-21952,11335,-4006,7502,-22993,-14967,-15791,24873,29139,23395,16351,16200,14592,19095,13820,-32768,-32768,-10317,32767,-13425,-6708,-1863,-11181,-18096,-83,-19829,22485,-11650,-5704,20748,12779,-13866,-29527,11585,29291,-10635,-19112,16756,22443,19525,-22005,4451,7497,28830,2200,9832,-6705,30744,18003,-2819,4652,-8427,-15696,7807,2468,12916,-13491,851,-5349,852,-16359,17640,16023,9740,5649,14517,11413,15304,-21274,-3501,4353,-15993,-19667,-12854,-3238,14915,2904,28937,29243,17982,21980,-14409,-1184,22805,16389,27721,11953,18667,-3926,9354,28499,9649,26989,27851,756,18178,-11391,18055,29479,-29686,23841,18247,8125,-7435,-4161,28164,22455,32767,11538,13972,20680,16349,-13950,-8645,31117,13692,4522,11642,10921,4399,19721,7530,-31530,-26892,16795,21604,-7364,6067,4762,15826,8238,-14341,21556,-13667,-22185,2828,-12416,-3923,2063,2271,25639,-7183,-30746,521,-13079,-27856,-19586,-10680,24812,11890,-12332,5466,-3123,-21083,12740,-14120,-4477,18140,-7472,12878,8320,6454,5140,-23029,-15480,24251,-20974,17126,14873,7398,-20340,-20722,24726,-6882,6424,29407,-26942,11777,12035,-14279,10683,6460,15811,-7,13098,31301,12778,57,-4448,-7706,-2186,5350,13492,-30173,-7193,31563,-5972,5742,-17216,-25377,1967,-5880,-21742,-10850,-8387,-5168,18932,-15313,-15120,-8621,16806,1064,19633,-25688,28522,29037,14001,5906,17132,-16975,-14599,24167,-6057,-10145,-22724,-17854,-11358,-13404,31497,15933,-10794,-17326,-14433,12756,-9867,1627,238,-15936,-8695,21659,5736,-18911,-8788,-1705,2899,-1859,-32768,16545,7055,-1608,32209,-1373,7527,-18619,1290,12983,21559,4695,20125,-13532,17283,-21841,5926,-16505,24903,-17906,-9068,-26036,3438,16312,-4091,-1185,-129,6823,-21519,-32521,10129,-27329,-1186,24677,11463,-3511,2835,-12932,4932,19261,21055,6829,-13048,-19730,14830,5149,1989,-17003,30601,30215,19472,-18864,14549,19452,-17128,8323,-27208,-15558,-17880,-2851,21333,-19907,-32768,-73,-4450,9233,3380,-24036,-9810,-16964,14841,3455,7579,-19612,-23246,16385,-21681,-20439,-14814,17404,-7367,-8823,-16257,15428,-15,-7881,32767,1461,15545,27462,563,-23599,19194,21623,15752,4045,22221,10668,21108,-21803,13085,32767,1752,-2093,24957,16454,-10516,1265,16943,28693,-16043,-5864,4394,-10369,-23650,-11443,6514,20872,-9660,-7420,-12776,-326,-6601,32173,20409,4649,-11677,-27455,-24790,30091,25386,-10622,17699,20702,9431,8787,-5658,15873,29445,28470,8615,-3045,7472,20202,16995,1830,20791,5266,-31034,-16842,-14352,-7649,19938,-6671,-7395,31724,18813,-18309,-23311,-6719,-12431,24214,16041,11786,-19901,22678,-3369,-8733,-8590,9976,-14549,-12726,-29387,10573,-6866,-9115,-9169,-1855,-17107,-21252,-6563,-26234,4346,-12648,22863,404,-16088,-19756,25775,-9064,1042,-27626,-23063,18242,4746,6583,-26646,-8147,23438,9043,-23958,19459,-10070,1596,-24724,7658,22911,14133,-5066,-3740,14946,7733,13498,-15083,-52,-12367,-23461,9964,-19104,-17162,18675,9453,9144,15717,-12615,18347,13051,20453,6992,17870,32767,3433,9956,23013,8783,32767,23028,-10608,18442,-7437,2435,3678,11787,-203,-12925,-8593,-29778,9764,1432,19231,-7234,22139,-5632,-18309,5186,-11869,-8387,-18958,9849,16416,24853,8663,16869,11248,1941,-862,-18075,2751,-32768,2366,-17533,16988,-23436,-13948,-32768,770,-2468,-16220,-15045,4968,-30170,14570,-18761,26079,23348,-7951,17560,-996,-18561,15918,-6156,-3081,174,-12269,-13899,15401,-20399,18028,-13272,-11435,29123,-24865,-18613,-12811,-20020,-21218,18491,-7149,29970,-120,-9948,-7863,-17119,-695,7810,-32768,7088,5725,26313,-32768,-884,10045,-12002,-24234,-17025,14045,26371,13955,3246,105,-9104,-18897,-26897,13264,6688,20058,11107,18822,31247,-19186,12324,-24420,-8175,11535,-8668,-9044,6899,-18389,9136,-32668,-10588,-18970,11455,-11117,25411,-3469,27069,-14659,-18189,-23576,27080,6343,-8124,7852,-20074,-27580,-4983,-8331,21104,14147,23108,-19928,-191,15733,-3309,16182,-16995,-18891,-8331,-12558,-7411,19808,-25738,-26291,-7061,-15547,27362,4176,-13093,9577,-7054,27487,5386,20411,-18021,22007,14674,-14512,31515,7279,-18171,-13193,-30892,-22852,26344,19655,27779,9029,26824,17495,-5132,805,3329,15892,25957,20521,1365,-27105,12855,-1659,-325,27488,11195,29884,8162,17198,-15062,11589,18466,-7829,-9345,-2851,7141,-5676,11211,19349,10584,7284,17827,-22044,9773,-9856,6686,32385,-11470,27683,-2694,-2767,8755,-17497,-2426,-16621,-1881,-21754,23190,1344,-15051,11831,-20045,-8514,10664,19487,-13245,7734,2991,-4480,12268,-12121,16573,-27327,-4274,9363,-10675,15525,22645,20827,-17497,30718,-16595,-28,8627,-26999,5005,-7221,-19282,8010,-22557,-9583,-28718,410,19379,16592,-21039,-16417,-9293,-32768,-17044,14483,-27098,6586,-10070,10645,8181,3285,-28752,-850,10638,4168,14475,-4358,11974,18915,17122,22070,-21834,-11512,-24950,31925,22327,-10888,1946,1013,-1638,-19954,32767,-23400,-1782,13949,17813,-25653,-1599,-17725,-11645,9223,21355,-32768,-14165,-4732,5589,-16748,666,13257,-1029,30332,-20299,32379,70,9231,-10118,16455,-1851,-29930,23115,-27349,428,-15070,14214,-13312,2846,11441,-21028,12323,-3304,12752,2813,2721,-12797,-22047,-5571,-20016,32767,-15715,-5706,-11774,-11296,12921,10353,-8766,-11710,6717,-32768,-18026,9330,29550,-5972,-1185,22594,-2671,-12667,12123,-19539,27401,8871,-23952,21744,-6044,-21933,27971,15849,-23610,8889,1770,12481,-5506,-12755,14004,31808,-7982,21806,5480,30195,25759,-32768,547,11267,8115,-8608,-20971,-21906,2190,-32768,12717,25662,-30898,-2424,14766,11140,9583,23577,-4418,-16789,-13891,-24355,11018,8191,6764,-22920,-8576,23255,24449,-21253,-6150,-15521,1496,-3991,-17220,-32768,-3449,-15377,-10550,24457,20571,32767,13440,-10755,7813,-32768,9655,8552,11938,-17670,8843,-23317,-17304,-14252,30648,-25006,8676,32767,-23368,19745,-361,20772,-14861,17079,-25164,16482,16254,18828,19373,-30015,18972,-90,4406,-22787,-32768,18238,-17424,11018,23789,13330,5545,-8505,6318,-18265,12965,-15028,4120,-7622,13743,-17325,1809,5192,-2708,-14997,11076,2778,10575,-7514,-9522,13768,-18822,27943,9236,-12227,12905,-3095,-18370,-17392,-4674,21865,-1509,32767,13001,-6340,15957,4942,6384,-10804,-23210,5722,-16977,11546,9664,20223,-11972,-9284,-12391,-11072,28499,-16122,16584,18315,826,11407,30425,1818,-8121,-14494,-3361,-11491,6359,-16261,6434,13118,21476,-16243,10708,18276,3719,6335,10342,-21363,13408,-3876,-342,20431,-580,-14149,17855,-10670,-28961,-32517,19781,-12698,-10321,-8069,-1234,-6859,-2123,-7900,-11783,-32768,-3652,-27051,559,17318,-14519,15939,-20360,16422,5133,-11115,-15578,-14079,-23959,-10154,-21200,22081,-15757,-8402,-2029,-16180,8063,15147,-4703,-4581,-1499,14217,-28786,6900,-5797,-32768,-17636,9529,25597,-15639,1927,-12024,-3881,-30206,3839,-3692,-19877,15071,-12579,-15191,7052,-17256,13830,-6486,-15578,-14543,-25847,-22734,22623,9750,22938,10236,32767,-31728,-14635,-7885,-24693,5869,32767,17523,13844,216,12742,31481,-20350,-9331,-4621,-19574,21138,-8054,-20247,-4588,-1306,-32768,-23385,-6121,-2472,18296,12269,-1190,16930,31287,29063,25608,6904,29674,-26915,16335,10081,5958,-9441,32246,-11838,24444,-5802,-20080,-8025,-4641,16253,14251,-4288,-31429,16043,30775,-12261,-16358,19854,-9211,-13798,3771,10434,-12159,862,-15898,-15895,-26944,4249,18151,-21953,-25740,20849,4347,-11045,22842,16156,-20997,-21070,-5562,19667,-22346,14800,11024,9946,-11158,-19348,-9749,-15712,-24109,8124,7974,-31394,-24211,30885,-22522,9678,9394,-12704,-17231,-11713,-20194,3687,26549,-26293,-9382,4617,-307,11204,-15877,15612,-17543,-16373,-2875,-5328,-27256,-16105,-1888,-27084,18764,-17679,7846,29452,28087,5739,19902,9953,7262,28394,19143,32767,-16104,5096,-4366,8284,20199,-10355,19499,-2711,28128,22419,3437,-11703,-21337,-2416,-10174,-8836,-19366,-2708,4490,-30509,13348,17448,-11848,9282,27741,-5848,14503,22839,-9055,10207,-25291,21157,-2924,-9370,5600,-26544,-31461,1255,-32255,7794,-5345,32767,-16940,-3004,-11279,-29576,25668,14406,8182,-19214,15638,-18461,-12029,-14415,-644,-14868,6358,18455,-2371,1308,16386,-16544,-14329,-14208,12269,-16824,15862,-16454,30432,1857,-6195,-6714,32767,-17118,-6007,-18689,-2561,7941,7636,-7430,-23030,18378,8722,710,27463,-26289,11920,9864,17845,-12310,11361,3184,3532,1824,4787,-161,-26275,-14822,17020,-14689,-13538,4224,-17114,-25783,8617,-21854,-11175,-4656,-26170,-328,-5649,5511,-13469,25118,-16140,-18084,2894,-23334,22859,3778,-3205,20622,1364,7311,25590,20575,-28613,18829,-11711,22277,-13920,13584,-4482,-15748,-17398,20816,9218,3743,-18015,-1566,-6347,-2323,-9494,-32064,-14982,3999,16649,-10123,3762,2968,-18703,12194,-2387,-16039,-11310,6645,-7768,-13714,-5581,-8137,-30189,21475,11453,-11793,-32768,31679,9788,23381,10769,25305,-24237,-7666,2197,-27228,23248,-21420,30391,4961,32767,25152,-21980,-26530,-1958,-11122,17635,-4240,-8103,852,32767,14486,26332,-6149,17263,-19105,-23873,-594,-8331,-7578,-22794,-12568,12591,25560,-32768,-19006,-17276,1144,-9015,-18318,-11357,-11807,-14971,-21288,-24556,-3519,13639,32767,15252,12211,13498,12139,-5982,-11955,30873,9044,10769,-12621,12269,5965,-26117,8187,-11962,-16293,-2305,11395,-1999,22535,10768,2887,10051,5053,-18120,-23326,29559,19553,26677,-25040,-10066,1834,10622,21914,32767,-26980,9589,4411,15720,-19166,-15184,-15043,15266,-8670,-22839,-22942,17083,-2787,-16600,-14066,-15132,-18342,-14841,26350,-6748,6356,5039,21688,-178,2565,32550,-13169,-22111,3403,-15966,-8102,7112,15491,22296,-32768,6666,-21769,-26545,7351,20875,-8849,29256,30612,-29201,8570,1461,-30372,11984,-18563,-1190,-20284,8690,-8372,32767,14787,-18603,-12895,14369,26853,-938,-21814,24265,9116,15592,-16324,7600,-13813,-24463,-1952,17948,25173,11002,-12136,-15700,22493,-14285,-21487,-26136,14129,-9852,-14231,-8386,-12606,8433,-9139,-8122,14837,23918,21950,-17235,-18131,25042,3727,-14574,-19181,-29059,-16102,7915,24608,-6963,-3019,-16589,-13329,27886,-32768,-27210,4770,4550,-28112,-2139,15559,-6007,948,13318,25483,15444,-8219,-14322,-18622,19506,-1531,-17644,429,-18638,27467,15886,25531,6874,24472,4905,-32768,-20172,-2716,24771,301,-8413,-7098,-18043,14232,5283,-10889,5724,20574,668,16562,-12892,13980,-26013,-8082,-1490,8093,17699,511,-11694,672,-14624,8213,25350,11188,-17387,27380,8211,28745,-17096,5680,-21436,-13697,-13880,-6163,2775,21145,-32768,-317,-4224,20590,24465,-28060,30242,-16658,-17086,26144,-28117,-7638,21160,-1763,-19266,-5247,5626,3515,11887,-16898,19114,11164,17544,-14783,155,-3768,-1572,21488,-4126,-6778,-27659,-10835,-23368,-29783,746,-6011,18471,9194,-277,-17640,4497,-4524,-2260,-31846,18477,14186,15324,-8227,19799,-11362,-14510,-19843,-30969,16653,22437,15880,1957,-26148,-14819,26259,-26696,-32185,-22729,-32768,-17792,-11970,23215,-22198,133,-28092,-20185,-7423,15560,-24341,-2365,17934,-6789,9049,-14576,1753,16210,-8913,9132,23642,-4078,21996,2333,19710,-14563,26948,-21805,2245,12978,19245,-32748,-9377,-17777,14438,5877,-4345,17228,16563,-1180,-12526,-23496,23305,-23656,-10128,-19248,32259,-15808,-24043,1079,-3487,5474,-19119,-9686,-1905,26072,19454,6181,7674,32249,1468,-14326,-20721,15171,-16370,5617,-801,-8340,31552,17276,23570,15423,25387,30567,2583,-16881,25497,-1128,-8729,-23766,-21106,-7888,-14012,4330,-12938,-16299,-13562,17152,-3796,9485,1806,21672,13994,-9718,20744,6471,-30099,4306,-12340,-4747,-22351,7667,-8211,-13346,-27378,-18466,16519,8654,-32768,17362,-31143,14281,-6534,11785,-1418,17210,-4108,-21645,9383,13057,3405,26669,10883,-7394,-1173,23550,-2643,18491,3918,-14774,-32768,-18362,-4247,848,26405,-2147,-6924,-18732,-25915,17623,11631,-18158,15276,24091,30642,7052,26342,7461,9923,-14097,12555,-21586,-4935,-19787,-6735,16161,17862,-9586,18055,18483,-19882,22778,19908,-3393,8113,-5726,9492,7472,15186,-2402,17989,-5681,-17387,-19467,-1920,-22493,-7313,-4193,28656,-7510,-24546,23622,-11821,21972,-30998,-14210,-953,-9704,-11705,15031,-6020,-2063,-23606,-23032,-12913,-24364,8929,-7358,-6644,-21639,10152,4326,9295,-13575,-13935,-7184,-15199,-23681,-8409,-30346,-32768,25581,-20692,3040,19014,12070,-7477,-19616,16392,22596,14490,549,-14601,8300,12500,-1794,-16156,-19972,-10911,-17685,-14165,-63,24246,-10596,4685,-5529,19042,216,-5124,-8596,-18107,-6391,28473,25907,16333,19935,8166,3149,15872,-3103,-13549,19836,-23064,-20041,14462,3969,31387,32767,-2881,4566,20618,12870,16211,-25730,-5250,-7052,9436,-19165,-17980,32767,-9633,-15799,-8803,-15274,4251,-1397,6158,-25175,-24020,-25017,-2706,26510,-26231,16855,-31721,9245,-23620,1144,3114,19729,14373,17621,27635,-32428,-20327,5121,13117,-11852,1029,3132,-9904,-6139,-9775,-8190,-21580,2501,-24837,-15325,23752,14324,20965,31605,-8576,58,26071,21083,2560,27663,-1541,-8080,28058,-18479,-12198,1249,-3867,8233,-6628,24744,-9200,23065,5246,15483,-16381,29557,-5109,-10713,-32768,28809,4669,-11826,17285,5089,11541,27283,-7314,-20131,-10658,12194,-10091,-10130,17960,6980,3098,8516,-9746,23781,-12233,8567,-2361,3099,14976,17297,550,-17006,-10080,-19421,2831,4223,22389,-149,-7022,12793,12374,-15636,-18431,10590,10374,-16538,-4140,-1919,-6099,-3002,-28038,-17508,20588,-3341,-2054,-25517,3998,-26586,19375,-4422,-4540,2183,-17160,10788,-11516,15750,32767,-16899,10637,-911,9739,-21201,-1494,24216,-14107,14326,1489,-31283,-5922,14694,7299,22997,-11043,2677,-725,22978,24458,10140,27399,10114,-14036,25974,8223,2671,20878,27898,-20163,26337,18006,13117,-18609,-5035,9817,15707,-11558,-6527,7708,-32768,-25854,6930,-13527,6508,5688,-30224,21830,3907,-20914,-13864,-17184,-8221,23841,-27104,-3878,-21360,-9471,-22205,15303,10448,-346,-26080,4818,-3952,3268,8053,-19171,17052,-24423,-13442,13493,-802,-26441,8136,2096,24174,13858,-19859,18175,3709,9134,-10130,-15892,-21291,1847,-25124,-18186,-8945,12938,-23450,-25570,14911,-7556,16960,-9617,-6542,9183,-32722,22339,2934,-25923,-6094,5288,-21189,-17215,9033,24378,5936,-7710,-16459,-1637,7330,8922,7899,-20739,764,5155,-2634,21838,8587,1897,9704,-7028,-18568,-7656,-31352,10872,13561,10179,3704,16403,-9117,-6641,-8539,28533,-5289,-13808,-7499,-18613,-25043,-9277,-32768,-4494,-15499,18543,-7317,-32768,-19065,20778,10019,9490,3817,18997,9468,15144,-13666,-12398,-5067,-29326,-30918,-9019,-10641,-690,-12605,-9075,27919,-29743,17673,-6201,9370,-13187,11474,-11086,22132,13190,-26276,28212,-11453,9886,26471,29039,4198,-18469,758,-10018,7505,-28544,-6088,-19778,9588,27200,22997,-30172,-7077,-16112,-22477,-7643,-7472,-30341,-19486,14982,-963,-27290,14316,-31749,17993,24244,-1999,-7540,4362,-28588,5386,-10705,30169,-7996,32309,4041,18244,-12506,5212,-9973,-3242,-28478,32767,-28916,-4442,23624,-19992,10839,-8896,14747,32767,16849,15551,22521,-1617,-14484,28561,-11890,-10317,30008,-4281,-27617,20383,5710,2409,13556,-6313,9665,11382,28796,-3252,-8132,-6318,4963,9501,-1858,2422,20298,-17389,16171,4268,-6051,30206,-32242,-9567,-14193,-25262,13468,-22204,1060,560,-10498,32767,-808,15526,4236,21555,25443,31571,-12531,13379,15617,-9032,5938,3567,-30531,-30349,-11712,-19702,11536,31529,15323,18187,-27199,-10621,-701,-19641,10603,-21686,-19434,7008,-18406,-8073,3282,11146,-30886,-19419,-9441,-87,-9514,17335,-4540,-2105,-16737,-4643,17418,-12849,-1066,-14204,18384,27144,21296,-17585,-8072,-17559,12003,4515,-7101,18835,-8900,32767,22581,-6762,8559,3661,-28935,8721,23726,5842,-5453,14426,13143,-16384,-2400,-3751,15488,29875,3677,18560,474,16636,27176,-4377,-11799,-16475,-10858,15073,-435,17478,-18522,26385,14757,-8123,6586,-13294,29110,10740,-16165,-26017,22383,580,9599,-13870,-9150,1768,12878,17324,-23799,19808,17041,18162,-24032,3837,23206,-24455,-32768,13897,22334,-11691,-9806,9243,17639,-13442,-852,-19763,10916,14392,-10715,-15005,-2081,-2975,11025,-4125,-18854,-20731,-4790,-9274,15934,28566,-8057,12894,-8479,-3455,21338,3145,14992,6277,-8175,15696,-18094,-19598,13327,-1102,-27419,1430,8721,14540,-30336,4561,-587,-5866,5893,10775,-6855,-9807,3512,900,26987,-2956,-15690,-8204,-12242,11385,-30204,21584,-30032,-14313,-18130,22853,5708,-9331,327,-23734,13301,-9580,5810,26096,8799,-25171,-11873,-16271,-5661,-22402,-11968,2673,-19872,13507,-25596,24701,-17262,2892,-6278,-2051,-5875,-30029,-24708,27483,-737,-10287,-24860,-16800,-7534,-23979,5482,15921,-10335,-9660,-13070,-8007,-13971,-26377,-19756,7646,17342,-1403,-10029,-9463,14633,5503,20007,12943,5024,-21105,6272,-448,-3462,-10850,-30466,-23661,21712,17318,-32768,28060,-22985,4485,-14691,16547,-15426,21355,1090,-21782,-13073,13900,16762,-21895,23038,-11281,-11944,-17965,-9999,-591,18483,31897,1405,21561,24206,15761,3099,-14698,10964,4276,10982,-6380,3613,-24204,-20813,-13362,1811,-4444,-93,-4652,-24045,-15867,-12968,-13009,-11773,11128,-12715,32767,31941,-7896,-10087,16265,14985,20006,-18129,9061,23702,-19751,9844,-21007,8992,2044,31510,30919,-6342,-6262,14693,-933,7764,3549,18113,12878,10864,10705,15262,13666,20005,-13020,-14503,-32768,-14473,10466,-21624,374,-32768,-8734,-1741,18800,20603,7307,20453,1976,-29655,26394,-25037,-1930,12005,7832,13580,-16982,-22539,3686,27033,-10828,-124,23385,19731,-9453,-18633,-22341,23326,-16132,-14350,-20293,19772,10693,10908,10891,3669,14696,14437,2952,-8856,-378,-8227,-1950,19298,7048,-6875,-8997,32767,-5522,-22635,-14163,-9206,-9594,4735,24375,3539,24967,-12195,-18035,-25433,28956,2785,-16683,25628,-30822,-5776,29074,-32768,-23740,-11356,7915,-17921,11432,22824,25203,11792,15490,8765,32767,-16517,31938,8917,21867,-25239,693,-3259,3643,4205,12705,-5207,25771,-24129,16812,-20873,-789,-17327,21341,-6763,16771,-14925,-7534,12601,-1091,-10912,27759,3380,2932,-32768,10652,-7879,19920,25366,-6142,32767,-17215,32767,11563,7491,-2790,-24904,771,1044,24787,-23226,2356,-8140,18140,1601,-6674,-7358,7878,10187,229,15853,27523,29243,5014,-182,-22782,-26645,-6278,-1397,16989,9086,28651,-7543,-19275,18348,10513,10257,22043,-7705,-13141,2054,-3490,-24942,6114,5404,1440,-17591,-14565,-5058,-17430,10577,-20014,-1245,-19309,21284,27333,-18102,8519,-1387,-18232,21000,-31993,6867,-26552,-6741,-24603,19338,9966,20856,-13657,-3229,30766,15464,-18567,-14873,12668,4312,2865,7471,-11470,-6292,-1210,12433,26737,-21966,22618,-5113,-5523,6750,-1310,30276,17377,5973,6405,-22680,-11558,18561,-3394,23082,-4951,32518,6761,-18519,387,-496,-17737,31435,-10405,-23981,-5940,-6188,-12652,11541,-5152,31222,10744,16580,-13868,-13058,-1341,14943,-28698,17098,-19200,18476,-3251,18541,-25945,-20308,1762,-4123,9763,-704,18203,13293,-17053,15490,8123,32767,-21553,15975,22350,966,-20445,27010,-13607,25558,15455,-26514,6275,-26348,-7826,7783,7246,-10114,-12795,-4639,-6529,-11976,8059,29306,4045,-7964,-23573,-11002,-32768,-581,7846,-709,20285,29026,-5571,-27965,12797,12508,5732,22366,15603,19708,-15329,29486,-7684,-9471,10581,908,26993,21531,12378,-23001,13915,20151,24118,11419,-26791,5543,12532,14196,-4347,30140,-2986,-3748,-3996,-16643,-13868,15326,1962,14035,7464,-23008,-2529,-8227,-21425,-4966,4782,11925,9093,3109,-11897,-5944,-13832,-16407,-32671,-7493,18041,16795,23431,17497,2291,-7094,-8331,-7985,-15361,-26807,8939,-9769,-601,23681,-6536,-20146,12458,-266,-6201,25312,13922,19854,-2535,-294,-12262,-7072,3881,11334,-18928,-8478,915,-19328,-7485,1794,22390,-8842,682,-17761,-16615,-2926,2448,27520,12985,-21313,8046,3659,7805,3143,-10808,-11826,-3500,2840,-28431,12303,8073,-24340,-27935,23883,-27092,-22088,-22986,10610,-8082,-285,-16812,16741,-17458,10168,326,-26437,-19682,-8874,-16073,-26505,-13118,-6302,-32490,-5101,22683,24898,14187,-17176,-15254,-2878,-32768,22709,3515,-16179,-16637,-5221,-19232,-10448,-10894,15036,3171,-28087,12190,-12878,16079,23351,9568,5388,-22912,21196,9353,22141,19064,-17329,8779,-23806,26916,-11254,76,-16015,-17789,712,-4410,7955,18437,8682,-20712,-13548,25808,9567,25126,-13272,-24511,12001,-26699,-8398,-4752,-3624,6233,-2592,14628,1904,-9949,11469,-22839,15470,1935,-26271,-23493,-17781,12800,-6943,-1896,18149,8378,-13113,-13699,5319,19116,-7876,30944,20014,-553,-13530,28428,32767,-16453,8618,-2897,-24865,-17294,8816,19004,-5090,14417,-7215,-11425,4172,-15740,-14212,-7559,-13191,7570,6449,-3071,15877,-23267,-13743,24434,-2487,13273,16325,5001,23988,-30916,-21644,-19271,-16190,24061,-15920,6398,19568,16478,2751,3001,22264,32564,-18033,-28161,7536,10379,18804,13898,10855,-20063,-15038,5091,-2809,21754,-4994,20097,13325,6240,32767,18787,-21419,11133,-4260,10053,16763,-26724,-6617,24088,-13202,-5016,16299,-27588,16159,-13457,-3320,-19994,-16055,12016,-13248,-18481,17145,-5579,3748,-27064,-13444,-1771,10694,-9372,29600,-4581,27508,-6019,29194,-1028,-20741,-16525,-8260,1492,-3327,7108,-1273,10789,-1649,-4521,6837,9321,16677,7088,16969,-8953,-2901,-29501,-14040,14684,-1477,-17584,-22236,-13435,26525,-17198,4846,16367,-12509,993,29136,-16616,-11532,-15933,2894,25335,25221,-14393,-32768,-9627,-15770,4350,-6385,3835,4502,22753,-22860,-28642,-7387,-30295,-15479,11440,28759,-25555,-15191,-15602,14071,11632,17655,-7627,-19864,25544,-17085,-19400,-27249,-16154,-6612,12580,14065,-8350,-6856,18604,-25467,-830,-8396,15311,-15436,18158,-22945,-7110,17976,17036,24487,-210,-18820,20402,13531,2981,-15568,-6612,4024,20929,2411,8254,11289,10572,21089,-15062,30921,-7539,-27808,6787,-32768,17414,14556,-11863,-3431,-766,-16408,-1778,-12457,3780,12755,12298,17057,-3757,3043,23175,-13341,2648,-9333,-1954,590,32767,-12882,1380,-24294,5437,8413,-14453,-22106,7622,-28039,-14199,4738,-17150,19528,16373,-752,-23544,1427,-7861,-10248,18275,21984,-21656,4270,-22485,-21150,32767,12422,13137,26406,32767,-11945,-1254,4505,-12270,1058,11899,-26929,22981,-23533,-20995,-10192,5008,7665,8952,-25052,-26145,13484,-4721,8375,-19719,16367,25569,1892,8957,3718,4880,21865,-15135,-16081,15022,-7962,5452,7021,9184,-17379,21371,31970,7761,18336,2924,10806,16511,-7893,-16812,28381,-20879,23406,15233,21630,32331,-13703,10740,27789,10202,16521,-8802,30511,11975,-17890,-6046,-20450,23573,8448,8514,-2662,-19276,9028,-18080,-6376,4596,-4815,25908,-26996,174,-9518,-15181,10321,-13848,-22760,-16736,-22429,-1229,-21320,32767,777,-24666,-14964,-24813,4866,-3567,19035,-14521,-20272,23272,-5961,-5212,3271,18811,-9570,-19939,-5786,-15304,13904,-12800,-9064,22553,-26211,15063,7168,21020,7421,20320,9564,296,26233,25908,6781,-22559,-12069,-1424,20334,26073,-12335,20240,-6036,-4997,-11307,23650,6810,-23564,7413,-20982,13698,-26324,-380,4344,-11817,15147,-9721,-8228,8448,13338,23712,-26140,26271,17405,2049,13697,-11681,-12996,-605,4280,-17840,19324,4906,18794,21542,12353,2417,-8887,-20171,-3380,17463,28518,-19531,-21488,9195,-14338,26438,-29288,17538,13308,18604,9859,26007,-16425,13557,11309,-19474,24175,-28393,-25768,-1939,11808,6762,22281,-8600,7701,-641,11233,27764,31620,-15125,-32768,19534,-21955,-3326,-18130,-8738,-7723,7647,-18962,16789,-26229,15390,10283,14622,16894,14203,14995,28503,27837,3072,1635,-18932,9931,6173,-23228,24635,-17431,12001,24926,16996,-19493,-24152,11748,-6775,-25478,10193,-15750,-16295,12801,-25537,12587,-6239,5632,2189,-20866,25762,20198,16653,15658,-28748,28704,17608,13314,-10674,-20476,-10279,11497,2014,2682,-5183,23776,-5751,-29367,-9933,-19227,21954,-12202,-14315,-12677,19283,-7105,17394,-25593,22869,5551,16407,9404,-12561,-13045,-24117,16619,7414,-7336,23646,5,5469,-8044,-26730,-28674,-5762,-8913,179,29237,9657,20173,3419,23852,-4744,-11134,-25304,9048,-5156,28894,14320,-5136,-13428,-9900,21937,-21665,17774,-2653,-24857,-30840,-25418,7783,20724,16587,-8661,21214,-7598,-8074,3904,23214,-13258,1073,-16629,-14958,-24637,4210,-32768,-6684,8559,-20675,-9391,-13671,-27,-13457,-8685,19152,14076,16084,23310,20492,-17334,-763,13077,18329,1085,10421,19068,-1970,-8464,13036,-17672,9761,-12309,18595,10104,-23865,-15427,-1912,-23539,-11276,-13644,32767,3733,7157,11075,24153,-15695,15978,11418,-28688,19986,20500,-14829,14208,3659,-11251,-15165,-20031,-5027,2513,-22178,13406,-32768,16118,11687,3790,16372,-13199,2877,2852,-10930,3096,21753,-6741,21778,11360,15704,-27526,-28008,74,-24084,-3122,-2805,-4208,17185,21477,18600,-28977,3643,-18981,-32768,-32219,-22746,7364,25640,-7406,-3876,-4891,14381,-17790,-24351,-24814,-20678,10522,17140,-5666,-3396,13154,-19964,-10893,19238,-32768,-27833,-21539,11382,8313,-4803,14347,15403,4342,2423,-3403,-8049,4092,-2543,17263,-11242,-11455,13075,-13829,-11203,13095,11086,-15962,22363,-14947,26298,-28000,8473,-3396,10017,-14809,-2750,31736,27223,16578,-10316,-26916,-14906,19331,10634,10585,2995,1853,77,-17865,7578,-8937,-24984,-14177,4115,7674,-22988,30495,-9562,203,6490,-203,10628,23770,-874,-24988,32311,-23469,-11130,91,-5765,-13627,-15585,-23372,24959,-24128,9996,-10307,-23669,23853,16487,-12227,-5700,19559,-24761,20789,-9452,5369,-23637,8355,31481,-23675,-8651,-32768,-15168,8056,9013,6185,11159,-3575,21257,-4908,14409,21214,10129,-26246,15875,-19528,20050,-19539,23905,11221,-7820,-5074,5332,-8725,19462,-15660,6558,-32768,6281,-19384,23358,-32768,9703,8717,13091,9129,-4252,-4940,16065,-21757,-20144,-30742,-23385,-1429,5514,-13641,9946,-6554,23178,-3517,-25522,22975,12173,32767,-17594,13734,-15998,20738,25495,1566,-24117,21611,5010,-18809,-12708,25096,-11622,-28346,22697,16842,3556,-4006,-18910,5100,3488,-12035,-4292,-146,-1433,-18450,-4697,19781,-19565,17734,8719,-17699,-12615,29084,-8,-14483,12127,28047,-396,-28928,-19081,-12651,-22147,-13980,6355,3874,-12908,-923,-15626,-12422,-23017,-14130,20642,32396,-3347,7660,19150,-10549,-21514,23444,14972,25833,-32768,-8313,16200,27082,-15654,9679,29115,5604,15443,-16069,2883,8505,-8779,7388,-6336,-5807,-8126,26698,8805,-10092,11579,-1780,7196,-12691,-5300,8898,-318,18788,2728,15103,-4292,1391,-18624,-17537,957,16069,-15566,-10679,-17740,-3495,-7763,6540,-173,11659,16386,21166,9097,-13907,-27349,13521,16994,8766,-24804,31544,-5839,-11638,10742,18451,20310,9752,968,32767,-22494,-4402,2514,-11654,25030,-24978,-1383,1844,-18102,20700,24823,-17109,31627,28955,-8917,-1667,19046,11394,7468,8516,-571,-32768,10295,11422,-8543,21331,3202,-10088,-4971,7967,-7142,9049,22283,-32768,-9611,-9785,3838,10698,-2655,9168,-19980,-18933,-11822,25953,19658,-19517,-14137,-6616,7026,23134,23518,-12579,-17464,-20370,9781,-1064,32220,-11703,-32768,18321,20567,-24571,516,-11530,6120,14813,-17063,-24142,-8688,-13669,-1088,-14977,25468,5135,5708,-12630,-29587,-15273,8612,-2843,18838,6611,-3402,1799,23906,7499,7381,-19575,-32580,-13498,19339,22257,-16137,-32215,3333,17457,-1529,31946,22462,20808,26924,-23202,-18695,-13643,-12987,-20659,-6080,3046,-16549,-3724,-32768,-4970,14305,18015,21221,22805,-16818,3682,-12478,30880,-2704,-7699,11060,-27099,-22173,4789,-9712,-4999,-7884,-7188,-7963,-21608,-21419,24733,19866,19025,-13117,-17824,-1909,-25716,-28702,-15026,-7708,8424,15167,-26541,26926,20808,-13185,5209,-21822,-9004,-23680,21819,4981,-5186,9848,-29247,-21160,7008,2134,10262,-14381,-31989,-7847,-30678,-10884,12776,-27304,-25671,17311,-15434,32767,-1555,2225,-18504,14442,5334,1848,-21060,-12171,-13027,-22378,-20271,2620,13168,25076,-14490,-16084,-17273,-11204,22805,-29994,-8298,11050,17914,7002,8728,31320,-2825,-21204,13050,-5750,-10421,25166,-6082,-20763,-19596,31819,32767,6250,-10487,-32151,-1058,-7882,-13682,13475,-4595,-1764,13653,29816,-25727,2345,-15061,26265,17492,-27201,14048,29204,12801,-6522,-1755,-8862,2565,5370,-4386,-16699,-28275,-10077,7640,-31273,-16452,-15972,-7062,15880,6330,-28008,9251,-18715,32767,-27077,7658,-15433,-32567,29659,19528,-22398,-17321,-3018,-3315,-9216,12325,385,-24671,20163,3251,-15948,-18820,-24421,-10537,-6041,-32768,6124,-3852,4134,-4523,-9407,-6566,6578,28899,16295,-23,-13567,16669,-8653,-24235,-1970,-12803,9406,17653,8181,14526,-12897,-16404,16755,-1712,2692,-6721,-14779,15821,-7862,-4519,-8038,-12592,-12782,4060,27467,26815,32767,25422,164,-24824,24745,24194,-18612,31766,-10764,-17732,8731,-22795,-11670,18298,-8367,-19552,10404,13702,-25237,26773,16999,26272,-12451,19243,21079,24193,12692,17324,5806,-512,9635,4959,29684,3115,30722,20584,23657,13502,-13941,18521,11965,-21961,-24163,-19768,14151,-9235,-17670,1357,-12950,-17028,-18696,-1792,3997,7747,-29550,32767,-23330,5834,-32768,9629,25557,5932,-18172,24684,-20927,-28427,5971,-14478,4825,2800,-6515,18683,5971,1910,9359,-8688,15416,-10804,-26083,13148,29169,-7407,6661,-11912,-26972,-7958,13210,-8977,-26678,13043,8833,-19908,4878,-8451,-19256,-15528,10425,19037,15139,6932,7290,17119,-12484,19409,18200,-32768,18470,2322,-20189,8541,17750,-12554,3831,-6516,9379,-1768,10241,-19443,-21604,14214,26335,21385,4830,18636,-25947,11746,10662,-16969,26138,20083,17881,3420,-727,25178,-6871,12611 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add13_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add13_input0_int16.csv new file mode 100644 index 0000000..e9a7abe --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add13_input0_int16.csv @@ -0,0 +1 @@ +24531,-11189,28325,-8640,24793,3659,9115,-3440,-24662,-24576,10740,20072,26804,29311,21257,25814,-11188,1247,-10751,-17743,-16228,2943,21027,2528,27013,32589,-21732,28411,-10713,-19468,-20248,24328,8373,4619,-27903,-27178,-9639,-21333,-24136,17360,31443,12989,28394,-30762,15968,-468,2049,30359,-25765,-5433,4880,20598,28247,-14299,-9517,-19173,-16360,-3044,-19182,-6166,17476,-4072,-25365,29009,27895,20681,6570,27353,-14164,-18176,-969,14986,5382,756,10183,-16825,6088,17946,32629,9577,-759,20314,1716,-27959,12644,31930,15152,9440,-28651,30081,27720,18473,-23123,10142,12223,3414,-11153,-18601,-13996,-26084,-512,-20704,4754,21216,29197,11868,-32047,26987,-24566,-9120,-6326,-7535,18938,-10599,101,-7,-7550,19171,-11513,-24709,8063,-13032,3937,-957,10804,-21234,-14696,24091,-25386,3988,-25843,-25636,-23169,27613,-28715,-181,9229,-32537,12991,31128,3799,-9564,28440,24410,-32422,29615,-22969,-8485,-13429,12,-12069,-21978,25692,-22462,31590,-21404,29558,17286,4021,26457,-23082,32265,23076,-7801,31898,9490,-7453,-26240,24879,-21883,-22664,-32499,19401,-30300,-7537,-19247,10881,24061,-12456,22522,30584,-4993,-3888,25963,19282,-2941,32081,-26350,-17341,-6202,-775,-10665,4383,28172,24935,29044,21752,20120,-27204,-26785,9182,16556,24122,22040,-19259,10613,-3814,21243,-31907,-24598,-15464,18781,20903,28572,32134,-27393,3116,27589,-24005,-22075,-14910,24366,-918,21526,-8824,30674,-17703,-25846,-17041,12831,18806,-31527,11826,-8779,1167,9430,26944,-14968,16809,-24664,24486,-32117,31275,5136,-32673,2774,-12617,-21616,-15481,-11898,-10324,20915,558,23567,-32444,-8778,-11830,14018,20763,-16476,24687,32177,24114,17808,-11716,17809,-15061,-28217,30601,-29052,-7670,17207,-4692,8546,5416,-25142,-20011,11178,-22085,22005,-3952,24304,22764,-9151,-244,-21332,2123,-22237,-9963,-5490,-15324,-5861,-25047,-5117,-20894,29432,16923,-29569,-24996,-6570,30030,-25538,-22136,-8844,-16482,-23074,14314,-8975,-4288,4976,19689,18673,-23110,10008,28166,27004,-13460,-973,3249,1394,-3865,11369,-21517,10596,29605,2523,-32607,-4185,27150,7761,28776,12364,29500,12457,8694,-2873,-28369,-23461,11481,-27971,7133,-21628,-4450,-14980,-12644,1548,-4856,-616,-20789,-334,-2744,26757,25185,-9664,7311,-15723,-30926,-17515,-28492,27159,7188,2792,-20223,-124,-2993,18888,-9568,8403,-26514,-249,-26449,-16154,30232,26948,12137,1318,-19587,-27466,3164,-8608,-28308,777,-10558,-13603,1940,19574,-19848,-25137,-28109,16690,-22710,-20501,14223,-21522,3568,28077,-8500,5223,-8532,-31604,10464,8700,-22202,-19355,-4675,-656,8221,-10085,-18638,-17072,11940,-2038,451,1795,3213,19806,28839,1051,10372,-12407,18685,-11949,3157,-1390,-17761,9446,3298,-9849,-13822,19059,24838,-16523,-9732,16816,-21552,7036,-16155,-1258,-5334,-7964,18322,-23137,24625,-1185,7224,-829,-27660,-3429,17218,-27714,5222,-1525,-23803,21562,-16604,22604,-4904,21029,32183,12628,9485,-8833,8744,15388,12253,-5442,-18225,21747,6436,-29773,30766,24259,-6998,15384,17876,15568,-3286,10614,3118,-11307,17020,23430,9897,31892,-12997,-3352,719,-21698,-17462,-20595,4201,15174,-25980,31728,5307,31589,13335,13670,7027,31675,-24037,12740,-29340,-23977,-186,-868,12768,-25867,27612,-1705,9925,-6713,317,-3333,7957,11943,-16623,-24882,7468,-4306,23714,-2596,10055,-18101,-30250,-9830,16820,7370,16374,18461,20882,-10624,-7343,16050,-5242,-3881,3661,-26500,-17783,-1364,-23143,-9137,11270,27862,4231,-23766,8894,-16240,22136,23070,-20566,8607,-25680,-19930,7309,-20399,-11946,-22365,20933,6549,2945,-14063,-201,19118,-18532,6770,29006,21481,8927,26716,22568,22423,-11978,14555,-23402,-29700,-4241,-28522,23983,-19996,-28060,-1571,19202,-9376,-27442,6415,17533,6223,-2238,27508,2598,9504,1956,-15931,-24859,17526,31359,-17015,8397,-926,-13336,7885,-24969,25924,-2594,5482,-8180,-4811,27212,12502,-3519,-29601,-21976,-15444,927,7551,2902,-28360,-20149,-11740,23649,26791,16588,28971,19843,17616,21634,7373,-30304,-30725,-16117,27266,-21271,1197,-2593,-3184,-24442,-8118,-18702,22418,-12775,-8368,32532,8751,-12857,-25655,14729,28379,-1096,-22655,30913,31478,12512,-29458,-7469,13086,22195,3705,9088,2485,27736,27515,-4194,9606,-21657,-18862,16752,-2192,8593,-5331,12832,-12125,4652,-17976,8943,25636,18472,-2322,14871,9116,17225,-28404,-15875,9386,-24091,-23532,-25232,-13411,25480,12346,29950,28778,18063,18657,-14820,1779,18237,28767,29424,25069,24381,-16640,6399,31513,14379,29337,29898,-736,16822,-26099,10190,27879,-24297,16717,18229,19329,-19200,1996,30778,14728,31256,5396,12149,17890,15632,-4310,-9414,27327,8322,4541,10215,11711,-4421,31795,20924,-27443,-20325,21119,19515,-4990,16396,11706,28817,5826,-4347,26848,-12166,-15067,-4968,-8413,7149,4138,9199,23438,-11664,-28439,-6080,-22031,-22660,-26546,-12272,26100,24467,-15768,18417,8524,-16571,12575,-8054,367,23254,-2906,10678,-1813,9035,-1291,-27835,-21059,25808,-25840,24663,17988,-2561,-15105,-21151,28764,4317,-2943,31131,-22622,26031,19072,-12342,5679,-806,26108,-3489,26916,27154,17531,-9206,7387,-3491,4984,-5775,3137,-32249,-2818,24925,4013,7914,-25215,-32359,10728,2078,-19530,-919,-1168,-916,24779,-22584,-14480,-11203,9477,8429,10816,-20547,27231,25555,5195,-132,29765,-26704,-12077,24263,4233,-13860,-30256,-32727,-6166,-10585,32683,14283,-14668,-20206,-26844,13005,-1,13815,-7355,-21991,2055,21195,-5680,-31841,-10241,6114,2644,-13192,-32750,11225,3961,9238,29429,11369,-3401,-9318,9838,7310,31269,16054,22752,-27055,25636,-22630,2295,-27712,18314,-21319,-1337,-20524,-8836,30696,-15485,-10869,9548,8361,-27572,-26243,2624,-29947,3373,26028,8459,-17122,-9423,-3197,-469,20490,25922,4514,-20887,-16348,5570,10538,-7103,-31207,25332,24777,16592,-30577,26251,22166,-13975,3575,-21780,-31187,-17325,-7229,23724,-14892,-31447,-2916,-706,-1657,1690,-16985,-14258,-27070,19897,7840,2757,-30227,-28670,9424,-16154,-17915,-12221,31534,-10562,-10194,-29141,8359,-8516,-5520,30242,7636,15708,32009,323,-25534,12357,14220,20304,1575,22133,17144,21324,-26629,22806,31515,12128,-3089,18654,7025,-10461,-3808,20194,23504,-25426,-15619,-5908,-22346,-30108,-19710,3174,27100,-21912,-9150,-16203,3056,-9074,31278,16147,10557,-5366,-25886,-32212,23125,28664,-19958,16803,14976,14906,17822,-3993,15818,31701,28111,3606,1264,15293,29921,28891,5907,23252,15900,-32749,-7481,-9219,-14163,28342,4787,-2881,27638,27459,-17239,-29683,-394,-11906,19507,27896,23002,-23909,26448,2915,-7718,-10264,2768,-10127,-3133,-23890,6848,-9310,1824,-23259,-9268,-15661,-13564,-17155,-23522,-1605,-8750,24187,10416,-6470,-12479,23637,-13751,-8464,-30389,-28426,20787,11429,10763,-30348,-18687,15965,13439,-19133,22504,-8667,-10289,-24560,18651,30086,27463,-2558,-4858,20865,14500,27128,-26149,5481,-19426,-22010,2098,-13468,-29894,21609,5343,19789,27872,-7311,22542,3250,22735,4514,32065,31089,218,22903,21392,12958,32106,29472,-7392,23430,-11220,3437,-6399,19240,-4456,-16894,-6541,-24395,17132,-6361,29334,-18270,13993,-19720,-22109,8490,-23831,661,-21830,23741,31736,32306,20224,27463,1709,-10483,-2201,-28545,-3133,-32553,446,-22893,22819,-25061,-3995,-31829,-8505,-9387,-19904,-22230,15975,-27003,24287,-13786,24092,26096,-3532,18058,323,-15353,13059,-1226,-8767,11052,-16606,-23546,5854,-25616,13481,-22155,-14540,29390,-17974,-21563,-2976,-14849,-32167,30739,-5685,27412,9930,-14884,2249,-27701,-4031,6801,-32358,4266,10167,19009,-31888,-8680,25036,-26416,-16461,-25567,14552,27234,16142,10414,-1319,-1026,-31182,-19531,8260,2633,15498,11798,14693,24633,-22028,3438,-19330,1302,16340,-5476,-21581,17297,-16236,18303,-30279,-11086,-30836,966,-11435,21608,-9110,22011,-12269,-18716,-19294,22981,14975,-6761,10173,-32608,-31432,-10743,-19169,19730,12485,14953,-16562,-6926,28714,-3704,10516,-10863,-25008,-9137,-18204,-12379,24447,-27551,-18940,-19820,-19358,21925,1925,-7280,6127,-9372,26889,16873,32128,-11444,32237,17204,-18241,27183,1107,-19554,-6930,-28894,-16502,27910,22088,32373,4327,27543,21654,-3550,-11240,-2756,20702,20806,26092,-653,-25445,11209,10020,-8401,30256,422,26594,77,19342,-27597,16854,20912,-21403,-14822,-7362,654,-14173,24495,14390,9272,8493,22070,-21238,7390,-7349,975,27882,-22004,21072,1495,-13095,13881,-13923,-9043,-10337,-213,-20472,30412,-8535,-6585,16098,-22259,-9641,10014,14658,-21598,18194,7881,-6122,2365,-17624,7038,-29900,-4763,13222,-12894,7687,16648,21465,-32093,23806,-15238,8129,982,-22802,9122,-18209,-10086,2974,-26942,-10102,-21724,4323,22454,14650,-24356,-7526,-8939,-32550,-15734,8987,-32543,5556,-20571,7032,15174,9745,-25374,-1715,3093,1252,10707,-12120,3697,19686,30646,15098,-13196,-16918,-22347,29739,25393,-1894,-5927,12217,10042,-27199,27384,-25008,5264,15153,18600,-20734,-13033,-19665,-5203,18314,25576,-30943,-23289,-3949,11758,-22414,-6480,27708,-4861,27015,-15531,30660,4505,22602,-14908,10227,-14968,-30550,21151,-31402,-9818,-8098,4969,-12824,6291,3861,-18957,8890,-4868,4777,8592,4293,-22939,-24292,-14809,-17220,30382,-31158,-13611,-10045,-20069,21038,17041,-23114,-2133,17141,-31469,-29170,23016,22772,-575,1952,21724,6510,-6295,19456,-19100,22539,5318,-20118,21450,-6584,-13770,21277,7827,-20320,1471,-5382,5114,-1412,-11123,19552,28438,-4900,15671,13041,31363,17892,-30490,-6803,22053,20166,-3624,-20563,-21563,-4650,-31546,12164,21186,-25920,8016,24225,3461,17043,21955,4079,-24762,-7459,-32448,9442,3558,11467,-14842,-14912,27797,20647,-25445,3433,-29567,-8551,1863,-8489,-32414,-4836,-7905,-6495,26196,18136,28553,4913,-21013,12832,-32425,22006,19066,15396,-9838,-1980,-26212,-20199,-24445,25739,-19474,15033,26527,-16251,18102,-7350,31980,-23539,14999,-26686,9584,14952,12977,19538,-23628,26929,10211,16204,-23960,-29070,30819,-10255,21438,32231,22973,8169,-3754,7116,-23232,17348,-29152,6635,-13467,12737,-14431,8740,7605,-10236,-9685,4115,5834,25501,-13527,-14836,12375,-15375,23571,23460,-8972,14586,-5954,-25874,-18222,-17334,16835,840,31863,27554,-9545,27064,12221,-1790,-20404,-27194,18897,-12385,5995,9397,32528,-7518,-5319,-19910,-21832,24877,-14551,15794,22704,303,8716,27646,5809,208,-10826,8914,-2560,12815,-30060,18509,15874,29061,-23897,9956,24036,4529,15108,19771,-28238,11031,-12789,-8241,27007,-3003,-25861,9338,-1343,-25485,-26523,26040,-21457,-16754,771,-14059,-1283,9012,2874,-13657,-32708,3538,-31366,6934,10783,-22005,24685,-26500,8817,-2023,-19410,-21077,-27072,-32201,-13571,-21412,14039,-23744,-2433,73,-17255,4364,13351,-6022,-1546,11085,11355,-28509,12862,-16992,-32410,-21425,22867,30249,-6442,-10709,-14260,-14833,-26121,5645,2400,-14465,11751,-3000,-16948,18410,-27833,4050,98,-10552,-21223,-23293,-24135,28600,-729,29267,20994,32442,-31497,-19540,-15490,-19518,10864,28674,25603,26001,-12152,23653,30793,-25688,-364,-8307,-19455,22490,-19359,-16062,-13761,9237,-31654,-23519,-10134,-7778,14025,13470,5520,9863,29720,27516,28714,15526,23578,-29689,13957,6911,9967,-9507,27879,-23197,16976,-18799,-17816,-18154,-6326,25766,17110,-6402,-26609,8771,25128,-13984,-31769,15799,-6767,-19793,-1173,5059,-14105,-1156,-26272,-15592,-32416,5829,10159,-26949,-28299,20400,2976,-25399,32202,23142,-19330,-17219,-2118,32660,-28867,9798,8701,15421,-8116,-22728,-7554,-12407,-17099,9520,-580,-24713,-23585,25831,-15556,6097,7516,-26971,-31659,-3675,-14224,16864,30460,-22193,-10286,-5853,2199,403,-30801,30838,-16503,-23756,-2047,-11736,-31110,-24677,1479,-21102,28756,-31811,-795,27087,26661,2738,15850,11241,14389,28933,18807,31200,-10751,18929,-8613,18708,18396,-6108,29463,-5644,23192,32005,-7689,-24182,-25847,-1937,-23584,-17726,-19022,-11430,5737,-29078,13331,11523,-26785,13476,22139,-172,14330,30132,-23000,9843,-17972,26428,6192,-21020,6842,-22173,-30493,-6979,-27739,1597,-19358,32584,-17075,712,-6593,-24516,25004,19901,15382,-27431,29646,-30148,-15692,-13905,6729,-9950,18345,28926,-2133,-7936,18385,-20223,-8184,-7725,13659,-29638,10199,-26866,24963,917,-9420,-11945,30352,-14001,3824,-11572,5358,-2605,7537,-4599,-21995,25334,14006,-392,20488,-32581,15305,14068,21651,-17017,16285,8454,-2700,1901,11677,-1475,-18562,-15300,19639,-5818,-17749,-1577,-22309,-32663,6381,-27693,-5123,-4005,-20488,1840,-5860,9521,-16263,19805,-14017,-18635,-5094,-28913,18579,8113,-7452,22009,7539,9687,18977,15703,-32048,20754,-12471,22229,-29635,8717,-17413,-9232,-22958,29633,9410,6484,-27071,6410,-5007,5174,-9650,-25704,-13430,-7835,21156,-4204,16760,13893,-12989,17934,-6706,-26709,-5374,11615,-15553,-18492,-15713,1794,-31266,19225,26407,-16670,-30031,31083,8644,32530,2805,32246,-30853,-924,-7391,-31059,29957,-18703,27661,7532,32463,30881,-13912,-26456,-13778,-16855,13217,-10387,-9954,-10605,32494,23458,32297,-15720,18597,-29012,-30930,7363,-4453,-17390,-24740,-19692,20398,21981,-32609,-22801,-26263,1123,-15570,-9079,-1773,-21486,-30755,-20605,-18935,-5187,12840,30861,19740,26384,20867,25530,5605,-9065,24034,13756,19554,-14354,23821,12365,-29480,2723,-7492,-23732,6806,11178,8999,28268,5537,-697,11546,16418,-13893,-27836,32552,28850,32165,-18639,-20884,6579,18044,20952,31640,-29079,14888,11579,8615,-14004,-21709,-28688,13980,-15778,-32650,-28149,25977,-13261,-7368,-16315,-6901,-26263,-30165,30278,4085,18131,-920,19364,-12632,-4408,27269,-27758,-28634,3820,-10713,-18505,15384,31148,14021,-27552,9866,-21329,-21312,5758,29293,-19153,26259,25981,-27011,12884,12630,-32015,7475,-10576,7186,-22209,16007,-7503,31399,5833,-18605,-11956,15757,23547,11363,-30655,21157,21804,17794,-32454,3489,-19981,-24075,-11280,14670,17881,9891,-14111,-21556,22131,-14895,-25260,-30943,4771,-14692,-24625,2107,-17013,8126,-22772,-15040,25850,27745,18991,-23427,-19886,18791,-5241,-27497,-11997,-25864,-26745,-2979,31808,-10313,-6547,-27390,-21396,23973,-31133,-21892,7735,14603,-30405,840,28920,-10701,420,9617,21216,18117,1015,-5343,-12910,26864,1952,-20940,-10427,-32563,25592,22881,26725,1510,19404,10510,-29566,-26906,-16015,26528,3574,-13977,-3577,-22083,19369,15478,-9521,13613,22296,12675,18116,-25273,23509,-19440,-9641,-13465,218,14756,10909,-14357,619,-24139,11210,27200,18026,-11707,27838,22316,31112,-24256,12492,-22209,-13601,-13661,-17114,15666,25410,-28428,2402,-4670,30783,29903,-29820,26090,-17239,-11286,31265,-28828,-1720,12361,-8803,-11314,2592,15118,16492,23149,-24447,12033,7485,20509,-14587,-10212,-9673,-7527,14059,-7039,-2066,-28371,-2108,-31771,-32529,2035,-19497,18979,16631,-5331,-21130,6484,-7352,2137,-29781,18353,27594,18909,-661,26109,-19380,-6435,-28734,-32352,22559,17794,25047,-3651,-22405,-5313,31270,-21371,-29404,-17119,-29042,-19800,-9935,25442,-27942,-6161,-24776,-22011,-18214,14332,-25584,3894,27715,2759,2913,-28518,-940,12457,-22902,13276,27433,-14464,25744,9692,27401,-25540,26703,-21946,-1485,25201,29768,-32433,-17427,-18656,17087,1068,-240,10607,32572,-1119,-26155,-29950,27729,-30272,-19274,-15752,26484,-7707,-25358,808,-3712,15349,-14078,-2322,-6294,22629,21181,10803,15561,31354,5244,-7352,-12299,7699,-15305,6214,-9499,-1346,25290,29319,25249,18215,20759,31269,8436,-26900,30695,10883,-7586,-30909,-17796,-1722,-20604,8412,-28096,-7086,-4231,9565,-12168,18336,-10016,32372,10474,-24037,24883,844,-24670,8447,-24044,519,-18874,18285,-19967,-24183,-29431,-12098,16033,-2107,-30108,26880,-29256,4656,3485,7891,172,24394,-8655,-18404,18475,9130,-6490,30408,25196,-6541,-8151,19854,-13270,15037,-8011,-14161,-31028,-21430,-1248,10570,29739,9880,-5452,-28611,-21267,12246,5930,-20554,20247,29306,31208,10596,32719,20409,24601,-24363,27130,-14004,2070,-22738,-20023,16399,30103,-22839,17343,17524,-19913,19530,18936,4332,15632,-10417,20714,18302,5217,8143,22386,-10266,-29345,-19701,-7401,-29209,-11712,-15137,25270,-17366,-22646,16924,-13711,27354,-26289,-21668,4067,-6514,-17587,8890,3075,-3883,-31744,-17672,-25246,-28075,21774,-3688,4764,-14121,431,6067,7055,-8047,-8460,1240,-29542,-20541,-2729,-30890,-28483,24712,-15495,16217,18641,22322,-21970,-24420,14914,32309,19294,12787,-7827,7340,16810,-15029,-11776,-11418,-3705,-23388,-29863,4673,31279,-6356,1161,612,18535,-1696,-14443,-20404,-9005,-10462,32757,24422,26271,16564,10878,12860,17139,-12753,-26838,21816,-30627,-18109,11596,-8130,31766,32342,-5129,-6903,13093,15115,15888,-18651,-18296,-8195,5176,-27476,-22209,31653,-14124,-17427,322,-9329,7042,-14313,10535,-27896,-23151,-21473,-16204,19247,-27497,29135,-27919,3731,-23774,3498,16226,14265,19157,19042,32560,-28212,-25692,6964,18532,-13163,5637,14165,-12479,-5196,-23566,-418,-19919,5882,-26851,-26675,30464,25612,29117,28244,-12184,-12093,29222,30645,5322,22434,-13285,-5629,30570,-31022,-25303,6535,406,2596,-5332,31470,-16463,22402,806,12461,-27415,29267,-11032,-16769,-31482,27165,17612,-4820,30654,15919,13591,32214,960,-23135,-12522,25906,-14338,-8801,14629,19827,8109,3914,-11054,18470,-14054,13248,-9007,1066,23012,24376,-2625,-12491,-61,-18094,3399,-6784,20186,5403,241,11067,26123,-31698,-27020,18344,1420,-10317,-14830,-10068,-3181,3216,-26182,-27423,14341,-9815,-8025,-21706,-1796,-20117,20682,-3490,-12727,10527,-20585,20209,-16611,12164,30724,-14109,24119,-10890,13195,-29338,584,25913,-5669,15231,12926,-32055,-31,18401,2182,19821,-9070,2188,-740,31839,16842,22062,20540,13797,-18025,25419,-31,-8777,24411,27949,-19175,30085,26759,25378,-24056,-16201,15006,11620,-16848,-15449,11203,-32308,-20647,12439,-17711,14716,4042,-28481,15304,1168,-25101,-25681,-25703,-17517,31837,-31949,6682,-24228,-1213,-18962,23985,6448,-3886,-19124,6719,-4906,-5260,4341,-18103,16936,-23159,-24981,10950,-11015,-31018,3586,8661,21007,27840,-28374,27064,12117,14698,-18144,-17796,-19597,-3207,-30053,-28612,-21730,2846,-27759,-26178,30542,-11696,27253,-24235,-8843,3297,-27211,14478,-5754,-18829,-17761,-4215,-20914,-20533,21199,21525,7770,-21955,-6903,7905,3049,5955,12263,-27926,-11417,-2606,3584,20446,19198,-6324,2706,458,-31323,-20515,-28749,3478,19596,13345,602,12508,-2157,-14196,-14048,21180,-9077,-18360,-301,-19053,-29625,-7371,-28692,-7951,-25602,12222,-9587,-31339,-32362,19507,15828,-1312,13960,30411,12903,16889,-15786,-16475,-11593,-28211,-27997,-5585,-290,-11753,-13538,-23427,22421,-29580,20387,1769,-740,-22104,18881,-26183,26804,16295,-18914,32375,-23729,23944,29495,23298,5182,-28771,5386,766,15682,-29607,3209,-27961,2937,31393,29051,-27705,-7030,-27981,-27794,-4883,1101,-23678,-16956,18624,-1909,-23040,27396,-32347,24911,21103,-15301,-13067,18159,-24692,4282,-16703,23769,-10965,30714,13233,17505,-21223,18791,-2381,-12727,-32147,29320,-27709,3474,20387,-31260,464,-18144,6203,32208,11754,24575,13987,-332,-23662,30897,-9667,-15829,28912,-8222,-24752,25258,-406,2716,25470,-196,4904,2916,25437,2677,-9903,1401,15577,11118,4906,5488,18237,-8281,20078,8384,4344,29391,-30786,-7064,-14004,-24370,21088,-29282,-7977,-811,-3388,31719,-10918,16425,5663,26453,19853,28767,-23064,14305,20245,-5610,2006,456,-30928,-28319,-5373,-22808,4987,28125,18881,30955,-23765,-2423,-4463,-30383,1387,-15841,-21505,-2057,-30904,1878,1177,15014,-28153,-28712,-23287,1448,-23609,10918,-9291,-9130,-30641,-4645,32758,-5903,-6424,-18735,17343,26893,28650,-12302,-19413,-32701,18812,7288,-11233,26881,-18156,28819,15252,-20847,-1176,-4568,-31140,-56,19857,992,-1164,17148,4823,-26079,8826,-1614,13295,26690,3995,15072,4826,20035,21342,5777,-7172,-11567,-1490,9154,-12558,26210,-25393,21856,16942,-16803,14433,-9248,24406,9464,-6555,-24776,17545,-1670,1052,-15217,-19604,13760,11354,16432,-25293,21808,11371,18222,-26631,9773,29667,-26901,-30919,14918,28483,-13429,-16203,11473,30303,-27392,-11888,-22745,4925,13806,-24487,-16530,4680,-2521,7939,-12963,-19523,-26977,2532,-20029,22467,25654,-13180,22509,874,3009,16414,1676,18598,10947,-22723,16807,-30898,-26547,15436,3653,-27466,-6611,17152,18875,-27089,11069,6989,-10747,6602,22541,-4183,-9570,14991,-10756,19486,-257,-16569,-13174,-15684,5598,-24819,32403,-28610,-26506,-15196,24763,6686,-182,-11274,-26184,17719,-18875,19439,24881,12699,-32655,-10242,-17132,149,-25981,-18966,9292,-15057,19856,-19919,24651,-9893,8366,-6836,2411,1091,-25317,-29640,24208,2960,-10972,-22937,-11544,-21907,-25286,3869,20944,-21199,-7019,-10859,-11064,-7769,-28310,-14346,7167,17001,4536,-1900,-2933,15178,-5613,11706,6738,1316,-29282,14986,2575,-4725,-297,-30554,-15587,27276,11770,-32141,30951,-22413,11807,-26827,13279,-13364,27381,-3737,-25734,-24696,5867,28491,-22155,19474,-16381,-17462,-22994,-18027,7213,14252,28539,-398,13977,25371,16139,-7214,-28754,2004,14095,11966,-19884,12386,-19266,-25518,-3542,-9862,-12392,-4482,-2151,-27059,-32024,-15407,-27173,-22168,2853,-5750,31042,30436,-7328,-12914,30485,22707,13295,-23752,8831,16000,-23977,5009,-22075,12599,8573,26925,24412,2012,-14231,16376,6598,12593,9649,24342,7768,25508,4923,16821,10655,31239,-5229,-4845,-29788,-15395,16300,-20942,2832,-32665,-8566,-8557,26143,25578,-1806,17039,-10181,-30910,22931,-21279,-11076,4970,-957,20410,-20589,-17887,8151,29641,-15903,-7051,16560,28156,-18663,-20759,-32176,15621,-15328,-24386,-20128,17451,11250,8133,13942,16109,27766,29095,12569,-12126,-9850,-937,-14633,17358,1461,2371,-13527,31739,-1332,-27842,-5572,-15349,573,8467,20724,16272,31088,-2557,-23312,-22612,26365,12328,-9342,25588,-27021,-2704,23048,-31813,-18994,-2979,17128,-10716,2384,17311,30327,6486,14367,1397,28935,-18101,32049,5859,13844,-17526,3212,8485,-6215,-5609,16351,-7312,21501,-16629,7542,-22522,10523,-19959,15574,-7650,19715,-16775,-8151,26489,-4208,-24277,32633,10314,12457,-28939,-249,-4954,19164,25507,431,27131,-20511,30016,12199,-1979,5176,-28514,-6135,-2072,22097,-29146,711,-16999,16712,-5327,-10702,1091,-249,22335,-1111,28924,21454,22028,11844,5841,-24458,-19853,-7099,-5143,29350,16139,25732,-18016,-20673,26393,25298,-414,27820,-376,-17005,-5120,4050,-28636,3823,14762,3895,-13520,-22753,-14751,-16129,24116,-21326,-11974,-16105,13657,32683,-14245,16447,-12042,-18832,23709,-25856,19576,-23272,-3076,-23192,22328,2209,31376,-18120,6775,32477,27933,-13527,-11931,17686,14066,11273,21961,-13096,-11433,-8920,21329,32124,-30036,18883,-15765,2835,20463,6685,23256,12797,4256,10934,-14881,-15246,29171,-2124,15368,3023,31402,9129,-22683,8031,1931,-32307,29166,-17761,-22482,-14472,5203,-5197,19852,-17714,28260,24415,28692,-21868,-24353,-7156,14222,-24527,32070,-12980,17164,1336,32660,-19068,-25843,12291,-11189,17211,-7547,10491,13988,-25537,27879,-704,31000,-30635,21320,23473,5338,-14316,32526,-10176,19105,6856,-23860,17918,-23101,-2651,5738,5749,-15556,-7881,3754,-5237,-21292,21644,30080,11321,-16176,-32719,-11735,-31159,-12891,17563,-3823,16390,31210,-12493,-29072,23095,16981,4930,25278,29511,18695,-18413,30951,-20541,-14982,5983,11970,23473,17617,16263,-14573,7612,26637,28615,17816,-24824,1875,11780,22149,7818,31664,-11560,-9492,-5942,-30612,-10156,11231,9206,10273,7613,-22549,-15960,816,-18919,-14071,-3945,10232,19169,13964,-8466,-6508,-5295,-25226,-28530,-8197,21302,20052,15692,11070,-7797,-15524,517,-7422,-14988,-29718,7227,-8639,-11131,16644,972,-13456,26363,-7553,1574,17870,16520,26431,-10174,650,-7461,-20907,4886,14662,-23221,1736,7680,-24725,2630,7807,32439,-795,8340,-15798,-11201,-10089,-5696,28247,15506,-25366,2903,10687,22190,-8576,-5221,-19283,-13885,-9373,-24615,15790,18389,-32601,-26372,15667,-30549,-19322,-24006,8509,-17072,-9196,-21418,7621,-22040,3594,7076,-24474,-27062,-15659,-22200,-20365,-14101,-1488,-27564,-9349,28663,26514,8776,-14762,-20136,-11359,-31917,22575,-3423,-18889,-24633,-3991,-25406,-3695,-4099,9321,6495,-28584,23293,-27897,17503,22260,15355,19041,-23775,27373,3588,23200,19400,-22699,-2393,-30371,20178,-21406,5618,-6573,-11312,-1768,2586,14053,19480,2347,-27877,-10942,28841,24379,24753,-27674,-29049,2080,-26044,-19486,-14454,-12459,7306,-693,10440,-6564,-10431,21547,-27418,8129,-111,-29443,-17783,-29456,23913,-3869,-4822,22279,6504,-4536,-28805,18415,19875,-21551,31916,14271,10318,-26700,29062,30885,-24738,7536,943,-21737,-28534,22636,21593,822,8964,4351,-21171,-1728,-8040,-23258,-15423,-28081,15401,231,-12957,6101,-18244,-20711,16838,-11812,25648,11779,10353,16732,-26449,-20997,-22777,-26400,28775,-26591,-2321,22320,32458,-8000,-1038,14610,27487,-30215,-31279,8508,5091,29819,9938,8156,-15890,-11297,13814,8956,30067,-12320,14837,28506,8815,31114,14090,-13003,15259,185,15086,18322,-28467,-12313,20255,-22373,-10306,10952,-21072,29094,-20784,-2064,-25774,-24061,18076,-25596,-17961,12944,-14423,6241,-19758,-27162,9495,16684,-23201,31932,-5082,20822,-2362,22491,-7051,-30894,-31210,-2971,12238,-14400,17484,5775,12908,2421,969,12208,18252,23050,9802,25253,-14999,-11876,-24716,-17195,25938,6367,-12141,-24280,-14770,26680,-27175,18579,23100,-13403,8348,22900,-23787,-1186,-23351,-2640,31120,27391,-11698,-27535,-1935,-16930,10197,-2456,9557,8279,15164,-24754,-27233,-9028,-31229,-8613,18813,25797,-29943,-18462,-6569,5176,1389,11706,-20928,-24450,20922,-26679,-14206,-24997,-14220,3839,15221,23615,-2740,-5669,28355,-23296,7963,-14031,13829,-10528,14632,-15276,-8378,10566,23415,31923,1923,-19517,30115,13750,-7168,-10061,17,12973,29597,-6415,20400,12762,14785,32110,-23712,29651,-5933,-23092,7459,-31869,30206,17248,-5813,-7834,-448,-31063,-9880,-13453,-971,14113,4340,20224,-12328,13145,16586,-25822,-3251,-6869,-10228,10283,32468,-28182,8916,-18534,6673,11667,-11964,-22951,412,-22346,-27190,16880,-13745,12869,21094,466,-20890,4038,-8782,-1020,29369,19381,-28262,9360,-23874,-28511,31181,14442,28585,29678,30836,-8294,4942,5571,-3530,-3117,5130,-27571,29021,-19114,-28599,-20658,7915,4757,3413,-18657,-23891,28671,-3506,402,-29935,27069,31800,-5264,11967,-7536,-2146,17541,-23274,-20099,30600,-2604,18479,11808,4071,-23212,12999,29956,19432,29265,8977,4354,12637,-22313,-26042,29759,-29102,30286,6885,23510,27867,-3428,6202,27404,17532,30793,1692,28353,23871,-21083,-2159,-21284,21001,13132,5622,-13279,-17304,21920,-25633,2179,7553,-6650,29807,-24696,12919,-15023,-16851,488,-26659,-24738,-18425,-23934,2895,-17954,28731,657,-25063,-28267,-24731,1534,-5330,28977,-5214,-16104,23627,3321,3977,12105,22371,-6168,-15826,-15989,-27988,19634,-20132,1008,19325,-30564,30902,5249,28994,12850,14697,582,1839,23225,31630,20200,-28434,-25745,6887,18698,18912,-8056,31262,-7987,-4008,-16450,15481,7757,-17804,3489,-27413,24118,-21367,10587,-1820,-2044,12868,-12270,-8084,6557,6784,27453,-18861,29395,20080,15417,26502,-19559,-8201,-8490,13882,-17033,32042,12867,16312,24228,22259,908,-5688,-23611,-6224,31072,28890,-25209,-20902,1848,-29734,23748,-26325,31125,13499,17179,287,23287,-23322,23684,2619,-16054,27669,-31770,-21981,9716,17217,3358,14465,-19914,14714,11976,12927,24055,30577,-12276,-31265,30897,-27577,2730,-16988,-18278,-7058,-2966,-13422,23180,-19017,9456,8221,9816,32343,17683,22787,21686,25363,-25,-1229,-30832,13915,-3519,-26282,24893,-24425,24504,17887,22095,-17576,-19764,3163,-1754,-32309,2951,-11193,-24364,20030,-25053,13188,3132,5097,14245,-14912,20811,27818,27232,28555,-30480,30505,26910,7410,-11012,-19950,-9633,8157,1318,10513,-5805,27003,4314,-26360,-11444,-10498,19847,-25299,-7696,-13466,28044,-11648,18022,-17658,29014,5093,25029,-1231,-6400,-18187,-31722,11497,1042,-17299,31590,2908,4565,-12878,-29971,-22657,-17482,2163,10012,28223,12764,28006,3056,26583,-1691,-13096,-21386,11654,-2272,25632,16063,-14842,-22129,-13529,19722,-29844,28300,9691,-17436,-29104,-18831,14774,21725,32463,-3272,31173,-6681,-22583,-5392,31509,-17521,11428,-31512,-10884,-27073,14887,-31006,-20707,16570,-20262,-19090,-10398,839,-11089,-1965,19760,9600,28872,16716,11964,-13210,-11798,20115,19040,-2219,17998,21924,2910,-15749,18763,-14581,10223,-13765,25045,22935,-28675,-6677,-4128,-31925,-17484,-27182,31186,2897,-1420,16288,16597,-19796,16252,14458,-27468,18316,12179,-28365,5679,-4590,-25176,-28162,-31725,5464,15634,-18014,26176,-30306,31373,7353,16768,11408,-28384,-8496,8916,-18031,-4853,13543,-11041,17453,20698,12255,-22019,-24317,1678,-28522,-6166,-14013,-5812,32215,15964,16129,-26148,17151,-14468,-29735,-31737,-19122,9898,20531,2965,-9656,5185,15388,-31503,-17202,-20650,-21417,24444,24672,-2123,5139,10489,-26590,-6826,22269,-29585,-25119,-25638,13164,4911,-5214,4242,19690,295,-2710,-16770,-17339,-3321,-4247,13801,-10664,-697,26289,-18402,-11146,17778,21684,-21835,24937,-4877,25587,-30824,2096,-13968,3191,-27171,-7989,31357,23132,18131,-15444,-19254,-23076,22468,19625,2064,-7328,6954,10118,-23269,9739,-5951,-26149,-29204,5661,8737,-23888,29177,-13612,-12670,5334,-1033,19628,16450,2174,-23410,31619,-21472,-6365,10065,-7041,-26756,-17045,-31396,19618,-22185,9839,-5402,-31143,25075,22034,-19420,-16953,20446,-24798,20206,-21599,12725,-24606,12722,28084,-19559,-2190,-30420,-8126,13113,10369,-4012,23667,1564,22732,-6,11473,29604,17195,-19825,8334,-10867,32038,-17922,19633,7507,-16222,2401,3323,-15045,11136,-15938,-3796,-28060,12051,-29155,29134,-31556,11241,4553,24566,8320,-3628,-3422,25938,-16511,-29782,-24060,-19635,6104,5758,-9508,11808,-482,19692,-5387,-31572,15271,8382,30009,-8970,17009,-27081,31524,29811,7549,-23181,13763,9081,-29729,-17883,29052,-12223,-28470,22992,31274,-858,5405,-23401,890,9973,-24083,6759,-12277,-6248,-15492,5291,18676,-13156,27734,2870,-22628,-14653,30930,-9700,-27872,24769,30586,4841,-29767,-13204,-20540,-21185,-28939,-5134,12343,-7334,-2184,-28857,-4286,-19217,-15732,26058,31108,-10874,15640,10186,-6119,-15759,20123,16225,27176,-29667,-11006,28253,25932,-13166,1151,32240,11343,7455,-26948,340,22605,-7199,12831,2021,-3654,-22637,30197,2041,-16482,15424,-938,8837,-27993,-12118,2669,-6901,30054,14709,29456,-3725,-3785,-12364,-32399,11101,13094,-9838,-10026,-18926,-12150,-13774,3034,12121,26760,9303,29365,2634,-21769,-31313,28074,9580,-2421,-25834,27683,547,-20616,11230,28658,32073,11189,8691,31963,-14219,-15440,-3971,-22491,27318,-18897,4842,11583,-24983,26843,16724,-27219,27761,27578,-7737,-11197,23078,17376,3121,5288,-4657,-30953,327,1871,-18473,24428,-1879,-3235,-3052,-1761,-15080,22484,23732,-32544,-21470,-3232,13997,18415,-9053,5165,-21002,-21383,-11443,26182,12069,-22727,-27366,-17879,17476,15746,22717,-17912,-14935,-32297,5565,1698,32543,-6485,-31843,29401,22085,-20155,4320,-9478,15940,23701,-7464,-20449,-22549,-17439,-518,-15278,27657,17473,18648,-16287,-24774,-5643,23024,-7495,24791,11537,6498,5785,23986,-1726,3575,-31376,-30965,-8283,19653,17799,-22212,-25937,3301,29237,1329,28311,20932,14925,32410,-31553,-31552,-12232,-6612,-19374,-19388,8394,-7396,-2869,-32622,-1993,17049,22334,21070,25230,-11430,11588,-2194,30854,-13179,-2737,14372,-31030,-21469,4860,-13273,-8937,-17163,-19689,-2667,-30451,-32081,16732,18737,19520,-10760,-20024,-12488,-31857,-28441,-5625,-20683,14130,24014,-19472,26740,19727,-27231,6340,-17261,-14409,-22194,30701,2703,3842,10650,-26336,-27938,19757,568,9576,-8811,-25400,-19671,-29259,-21962,16038,-25860,-31281,19490,-15520,30644,4264,-84,-23842,5680,11985,3562,-18668,-3855,-5788,-22986,-11704,504,16888,25051,-19112,-26175,-27555,-11441,16185,-26124,-16320,23380,15750,6393,7412,31062,8869,-21917,13580,-1225,-1629,26158,-11348,-23492,-24802,25288,30591,4638,-24802,-32524,10205,-3510,-22881,27820,-6936,2049,4618,32086,-30312,-280,-9619,26857,24272,-24613,8666,28486,5038,3411,-4082,-5305,6162,-2962,1243,-8756,-24063,-23037,20612,-28079,-10972,-28530,-10238,21978,-317,-23192,5445,-10808,30214,-19497,11918,-20820,-31797,32570,31632,-20051,-23913,4768,-10911,-3978,9704,-2322,-32239,27570,10848,-15140,-32689,-30103,-969,-14965,-31465,-309,1983,13541,-1347,-991,-8095,-37,27315,16423,3388,-28521,22165,-18998,-20766,8465,-19254,6188,22537,15878,12252,-12841,-9906,31417,-3382,-2157,-12884,-12936,11579,-12519,-8949,251,-24761,-26674,15078,26659,28653,29836,20815,3544,-31918,31231,31480,-28938,25819,-1539,-21210,-1415,-26683,-9870,23361,-12799,-16111,8770,5398,-21103,24231,28754,26712,-11823,30543,28715,31849,11530,21584,3391,-4476,18574,18903,31626,-4361,26439,19186,26391,18764,-20125,13732,4129,-14919,-25265,-32627,16174,-17999,-9852,832,-6126,-20366,-26776,-13854,15591,-1084,-23258,27874,-32329,5405,-30044,19019,29839,19492,-32433,26669,-12410,-30345,16498,-14856,2902,-6874,-12215,30044,6109,-8917,-1301,-11412,16675,-17809,-25893,10409,25294,-6680,20612,-9506,-30628,-3372,26148,-16702,-21681,26055,23139,-17239,7567,-12718,-18253,-15398,7152,14810,24107,-2389,14649,23378,-27501,30933,15270,-27393,18606,-47,-13596,3799,18663,-25104,1285,-17152,1522,9315,25270,-22042,-31177,15248,32244,22895,12077,9748,-19275,9272,19274,-17553,18628,22561,29786,-2485,9876,27152,53,15793 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add13_input1_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add13_input1_int16.csv new file mode 100644 index 0000000..16e0b3c --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add13_input1_int16.csv @@ -0,0 +1 @@ +-5256,8363,-31454,9773,16075,-7941,-16160,280,11815,23969,-28257,23251,28285,23011,-13859,240,1652,18278,-11739,13114,18921,-5676,32440,6143,1594,-1973,-10885,14657,25441,16195,-29456,12206,4397,5553,4797,15849,16825,15144,16886,18224,-17809,-16008,15544,-4161,-11128,-32128,1613,-28708,23860,30831,-12447,-4524,-4119,-13893,-27656,-8061,-9922,24214,-24060,30813,-8976,11099,5807,-27897,-7458,-8979,-15951,-15636,19178,12313,-12038,-20111,4082,17255,3760,-19328,-30773,-7679,-16789,25822,26486,24312,-17641,11268,-25365,29710,-32507,-21642,-85,-20104,-3305,10198,30439,28983,-16117,2324,18336,16251,-9667,-8946,-25532,-25085,-15287,-16377,31430,28730,32620,21956,19172,-16855,-15717,5645,24373,-16809,-10198,-20245,11441,298,-4261,-18823,-7372,-14750,-3561,13956,-29746,-12313,12007,3149,27068,21347,-31346,20231,-17823,22880,-4712,7676,-11195,-6370,32380,-7637,8322,1519,14876,-5855,-22157,-17536,-4821,11129,-9989,10639,-9657,14857,-20498,11986,-17527,-19554,25256,-6729,30056,-17887,18954,29649,637,-22238,5371,8975,-18936,-11660,-220,3682,26793,-17228,-21348,-14961,-3798,-20275,8159,-26452,-12223,-14630,31086,24655,-23721,-20466,19167,12454,-14821,-3774,5980,32085,17047,-3745,-21734,26750,26243,5150,9341,-24964,-16557,-31363,7390,-11543,-16288,7458,-3081,21052,-13698,29387,-7042,3813,-12318,2378,18822,-32117,-15992,-30613,-17701,-18207,7211,-29616,-7025,7560,16183,-25909,-16725,4012,10287,12353,4837,-32495,23950,-29154,-5602,-19799,26832,-2942,10632,31190,13861,-30848,-142,15037,24017,31843,16119,30558,11053,-28535,17510,-25305,-30382,29267,-24486,-24028,22057,-19310,23103,26253,-17656,-19221,-10919,519,30302,1116,-22473,27864,338,-22696,-4321,-19643,-5422,-26070,3597,-6122,29846,-5167,-714,-21893,17851,-3999,9460,-25954,22379,13189,10156,-10745,-12493,-30951,19052,-28686,18442,3185,-8821,14731,5534,24527,13992,-19715,-13067,-10045,-13337,-25844,12832,30565,2605,16124,-12972,28214,16755,2611,11576,19380,-20839,-6132,-19447,-29136,-11859,-179,9488,30653,9618,-8774,23248,-15818,14924,-7107,7510,-27001,-29722,-14673,920,-21899,-31519,19107,-2727,-18225,19943,31189,19144,31803,17440,-32741,-32376,18500,8620,-23456,-22336,13558,15617,-21976,7652,877,-23058,6178,6884,199,31316,-29524,8232,3050,-10904,-13840,-7513,-4979,31680,25159,4197,-1426,29291,3191,28287,-2313,28418,-14578,12316,12506,-4811,3447,-1115,3998,-24798,9159,2959,-7416,5147,22123,1952,21662,-20352,-15863,14388,975,-12958,-14024,18480,-7903,-15531,24217,-20070,18866,-19637,1891,14782,-1707,-25634,-24481,22789,-28236,23930,-15002,12365,-18438,15750,-21765,11971,-4620,-17762,5472,-23893,-31290,-29213,5026,25786,-3965,3769,-20543,8467,29539,-15130,-4932,-31707,32492,-10299,-11274,-859,-26217,-27843,-26412,-30723,-1647,19029,-19765,30798,-15467,-1580,-27916,27216,30283,-9307,26502,11076,5854,12124,-25102,16871,-13434,-11038,6635,20524,-21281,6119,-30252,-15953,-9856,-11226,-3718,-764,-20370,-14351,-10277,-31385,16643,16970,-4539,26889,-14343,23358,27667,28258,-10881,18713,-16612,6925,22561,15001,-15593,-12671,3138,11227,22061,-14902,30577,9787,22966,-29435,5403,-28279,19934,17052,18803,29280,11357,-9827,-32375,2144,1588,-28544,17093,114,19886,28533,13855,-29409,-16164,13999,20802,-28500,20154,-14699,-28661,-6505,14588,-29751,-26073,27070,3619,19298,-21488,-17861,-31589,-2415,-29609,-24254,-27642,6352,21902,1561,-27150,8384,28843,7299,-10976,-27774,6354,-2824,-26872,30015,-321,-19543,21425,-11067,26059,-15649,-1872,-12243,-14133,22250,14329,-20073,28750,16415,-9083,-3144,24407,-22389,-4458,-11132,-32109,24522,18370,24273,-21922,9434,-14748,-8802,-18031,29801,-32256,4458,-9979,-14144,2386,18952,17378,23297,-2724,-18304,28648,-23374,-26644,30756,-15017,25543,30389,-18535,16862,-1376,26956,-10588,-22840,25893,-7762,-17010,-8423,-8269,24992,-30492,7475,9959,15533,1358,-2494,30714,-27299,31818,-31056,15303,2043,5577,-18053,15467,20419,28771,-23590,-1091,-335,3127,23096,-31953,-31246,9447,31653,13044,-23220,909,-25679,6944,20163,-12710,11369,-2973,3857,-19296,16495,-9443,-24445,-2134,16883,-29283,-611,-27254,-11540,27359,7780,32191,-10321,31045,-2688,6769,-26450,22877,-14950,2054,-10141,29071,143,-18603,12948,17305,-27238,-29703,14375,-9132,-4082,30655,-16192,-17105,22857,6340,11462,2793,7332,29372,-10487,12397,-78,24724,23967,-19139,-22297,11865,15735,8754,19305,-6143,-8042,22846,-22962,9525,-27028,-5070,30015,12090,6616,-7088,7540,8726,4128,12465,31312,28771,18707,-28337,29788,9134,-24126,25880,-17554,7456,30616,24253,21191,11545,17315,9945,-31190,-2373,25029,20323,2206,9387,3452,24370,-20537,-29931,-25983,-29908,-2507,16014,-9637,-22952,-15089,-24784,10170,-32275,-2571,-10581,-28949,21012,-16249,-29796,-4190,-16289,18304,7689,-21116,16857,15995,-26940,7747,-1317,9120,-25703,2497,-29844,-30842,-21847,6761,-22288,-14412,-3825,-15202,11948,29624,-3275,18731,613,6318,8164,1788,-10423,-426,28729,-23296,-9244,2161,-31589,26754,10312,-24282,-29977,-11701,-11982,17903,21488,-18016,8753,-28222,26020,-5588,23321,-31976,-14438,-19119,30641,32759,-9808,-14585,32413,-28084,-2603,11540,4917,-21050,-22939,-16392,-30377,-22330,-13266,-5273,10656,-9140,2198,26801,-17989,31949,-25723,17454,23219,29118,18126,-23234,16009,-13614,11796,-28893,4289,7620,28506,-18714,-13765,12706,12085,4365,-1387,24020,5728,-29724,-29838,19211,7287,-31364,11955,31564,23094,-723,-20511,2086,27572,-26209,21619,11293,-28073,23035,-32726,31229,-32662,-20853,20732,-13677,-26224,3417,27264,-12397,-8894,12083,19960,28974,-336,-23958,-26828,32578,-28044,26613,23760,-24397,-468,4503,-31986,23917,-7030,-12055,8895,13265,32478,32236,-30922,16039,6505,-1752,9223,13213,-18333,30673,-10986,23852,27247,28493,28726,16940,20056,-22179,2864,-16460,16086,-27202,31552,-10301,9589,4613,-22527,-19994,7112,-11631,31982,5934,-29702,6299,16962,-5321,-9307,15900,16925,2059,25667,-24698,-16527,-13900,-26861,4366,-946,24300,25460,21370,-9863,29004,-14801,7333,2244,883,-6890,26752,29386,-3600,8226,11289,-10972,9971,1274,-17926,29412,-25218,1462,28280,31905,-5376,13387,263,27341,15604,21610,28094,24953,4458,15088,11642,-5266,25996,654,2255,-8667,2932,18277,20884,-12540,-21686,-17622,6315,32505,4401,18184,11068,24711,-9070,-18343,-7004,8045,8995,15084,16886,-12351,-15944,-14376,-21449,-9341,4168,-24116,-11145,-31928,-20056,12569,-11202,-32135,-15035,26077,-12370,-11811,4412,-19252,-7513,23896,-21821,-22334,167,1816,-17479,-6901,-68,23093,-18366,-30462,-28461,14634,2726,-32047,30865,17716,-12156,-29919,23365,-19887,17130,-16101,8058,-24975,-32200,-28138,18214,7272,24424,-6812,1997,2688,-14441,-7232,-3964,22446,30467,-6551,-24066,2035,-8545,30680,-12726,-23827,-6630,-26479,-8830,948,-7439,-13165,-27551,20314,-13938,11590,-15336,24743,-23687,23468,1925,15045,-22213,-22736,-19620,-1411,31147,4450,9715,-26795,26289,9794,-27597,15540,-6124,21836,-4734,-13370,-3358,5809,-1306,27171,-12869,10594,3542,-9439,-28368,-13663,20311,-15827,24149,31512,32621,437,-5724,24168,-26930,-2221,-30026,-30346,-6362,-24755,-18237,29588,32207,2937,17324,16167,-24085,6008,4746,-6200,-7586,-31976,-26189,23707,16169,1186,10574,-25205,-22990,-17178,-21854,17987,4721,-15071,7493,-3813,-17311,15118,-15463,12764,-27268,4796,17334,31679,2959,20413,15727,2113,13835,-29713,-1854,-31113,-22976,16963,-21587,-7243,21361,-25331,7456,-29343,18082,8043,6426,-19903,10626,-8318,31473,-21405,19163,-32694,30268,-31618,12998,5721,10964,1452,-16407,3633,-24847,21478,-31919,19191,13528,21458,3794,19757,32195,-2409,28484,-24964,-27904,-6336,-12345,27021,-22709,-14573,-18500,-22278,-4023,20389,32081,-4739,22221,12457,26201,-13313,-7734,-22511,23796,-18547,-7473,-1924,21519,-4052,12002,23105,13968,11227,32018,-18389,16842,-24806,-655,22308,-23885,5973,-2124,7942,8802,-1800,-8262,-31579,28567,1840,27301,7739,-21138,13447,2315,15195,-26203,-19296,-25514,-14764,947,2147,26592,19145,-5574,-22320,-20411,-27351,9185,3671,2284,16322,11553,-1745,-6533,30690,16961,-4179,25882,-3787,5755,-17675,10541,-30195,20146,6731,32665,23158,24396,3176,24018,-7467,3047,30233,9118,9923,19868,18539,-27820,22107,8572,588,-1790,-13007,10861,-11213,17691,27454,20777,30414,-11877,24593,-8529,-17701,15431,-24082,-5130,-14060,-6610,25512,-28785,-4836,-4417,-1407,6947,21851,14408,-22449,-10807,1896,31012,7801,32231,-7142,-900,-5041,262,27441,26360,8770,27988,32681,-11677,-20527,23522,-24003,-7860,24035,-32729,16653,-210,-3469,-31893,-9634,1919,13147,-2139,-30536,-5518,-22615,-11784,21035,195,5870,21391,14387,-13509,-14607,-22816,1751,24271,9408,16684,17348,26779,7484,-25480,28525,-32597,7859,-18972,21399,3412,-28041,20767,-27670,-30186,8280,31322,-7612,-18605,3921,6895,-25148,27955,-3951,-22000,-18267,24,-21388,15888,-4327,-12729,5905,18302,-29735,9123,23448,-22100,20450,-11118,-29026,7005,23857,32063,-13348,16453,-3431,25978,-25039,30327,-7858,-7244,24760,-15681,14771,2288,26407,-13131,-2599,19129,-5337,20456,-17002,28525,31004,17037,-10211,16435,-13975,-11661,31712,-29914,-22865,-21780,19045,-29766,31763,-16545,-8479,13441,-24416,-22331,-12401,-10835,25875,13353,-21573,11570,-1652,-31452,30764,28067,-20032,23081,18865,24742,-13037,-10457,-6977,24318,-11726,26288,-16282,12104,32614,-31625,18756,-21510,-26262,-16821,-11473,-11774,18290,-27041,7724,24038,-27908,-27461,-16430,24858,-13986,15821,-23567,11687,-23093,8220,9451,15730,-8458,-31729,11662,162,21739,-45,-27160,27588,26010,-16708,-30531,-17429,1771,-26449,-15451,7810,16369,29055,28136,20438,-8730,-18604,-26248,-22178,-2749,-28495,31620,-4336,-1339,18532,27610,-26365,-11663,32674,-29535,13968,17395,-17836,14420,13737,-8706,25555,11370,24091,9236,-31010,-10559,-25947,-27472,-8401,-26986,-22551,-26705,-20714,-9379,-17610,-3835,-16184,1140,3393,-4563,28031,-4271,10900,9374,-15907,-16529,-3482,17582,-20828,23021,-6301,-32265,11379,8619,10362,-18043,24911,-31166,-14275,2200,5647,9720,-6575,29507,23538,-6657,24158,-30118,4900,-19982,-15843,23734,18759,-1542,-30280,-20003,19710,5486,-20869,-17164,-14594,12736,21543,23302,-11982,10248,-1915,1727,12448,22143,-9131,-24989,-16444,-32540,-28182,-13068,26600,-27160,-396,-8375,11155,7224,-5381,-184,-18905,-18559,6646,12657,20481,19691,-6360,5805,22402,30310,-28768,-23167,-31269,-5885,15701,11034,-26248,31636,-17438,-29056,-31028,-1157,-27579,-19900,-2623,-15752,25058,11591,-14054,5299,27303,20550,15323,6069,25659,8791,3534,-10027,31220,12235,-19195,-6297,-5357,13317,12061,973,-9914,-32390,14278,-15035,-11554,25263,-28902,744,-28793,1052,-30916,32733,-367,25607,-25318,-2629,-17159,-23511,15855,-30352,-3149,-25048,18002,31481,-19788,-20399,9555,-19298,-7801,-3762,31208,-4490,-21954,28278,-16386,5044,15196,-25313,-9638,30104,-11589,-23674,31208,-21089,17410,3287,-27197,6968,-10048,7130,24417,-20608,20782,-27161,-31080,-11311,7042,12111,19854,3092,-17466,26204,19526,18365,4944,-18242,30110,-6432,14117,12993,-7114,-4538,27043,22668,30956,29793,-15696,21472,1924,-15827,-91,3180,-27774,26276,29528,-1776,30604,20086,-10733,8201,14311,18713,-1163,5505,18167,-8679,340,-1857,29138,1627,-6386,11513,5613,30594,-12159,-9520,-14650,-20180,-11430,-22877,5266,19951,11331,-8813,-13208,-1138,-10375,-16136,-29636,537,25481,-32436,-13635,28092,-28736,13826,9403,29549,27699,-26047,-25070,-31298,3389,-23407,-2401,28628,-6453,32742,29620,-30512,-11354,10410,-3515,13461,-3886,13532,-9407,-28534,-15778,26729,25636,20617,17577,10404,20101,1720,-14305,12788,10379,28475,-21483,-32246,8506,-22086,14596,-15836,-15344,6026,26424,-12939,29688,25549,712,-2409,28652,17953,-10511,20583,-898,-18794,6691,23591,31659,-5923,27905,-17185,7659,-6963,30557,5998,-31002,-2716,-24379,24629,-334,-24213,-18106,21331,-27421,19465,32575,20701,-8099,-10841,-17403,-27457,14455,-6642,-14030,11091,-27435,20192,3219,-8463,-18860,-19773,-26977,-17137,-1779,23897,3134,1010,-22590,-23380,2615,23841,22141,17987,28912,3288,5023,9809,25909,-16366,-27712,-27204,-21190,30475,4051,-10820,-14073,-8338,-8943,3126,31218,2727,-2574,-5657,-683,5704,-6722,-11666,17431,714,-14941,3223,-32482,-6182,1892,-29623,3846,16692,4538,4457,9914,3797,-20786,-3955,-27324,-5614,-2282,-7338,318,25871,-13378,-7622,21527,2406,22150,-9019,9083,6783,-14849,-2333,29376,22500,-5615,4539,-3922,11216,32584,19005,30283,-24229,5315,-11803,4110,-5027,13799,-20837,-6531,-20008,-4338,-31963,-11364,31751,-3040,-19926,-30810,-25995,-23685,-8361,9671,18841,-20561,-9188,15706,5183,22698,-29025,-12329,16355,-31899,6391,-26260,17279,7752,-11360,25391,-4850,4564,-20771,25203,-3930,-5292,-17501,22004,-3993,22903,-1878,-31237,-13402,28746,8875,19893,13345,619,29235,25896,-15344,-1885,21005,5243,15395,5855,-20304,-13900,20899,-6460,11654,-13360,21731,-28122,77,13993,623,11992,-32357,-29758,18459,32234,-12320,-26367,2441,8802,30797,-3687,-29556,-11807,-27625,-32117,-13223,32575,-7345,-16726,-1930,-22937,-13121,-4553,17818,-17200,10590,-24060,6221,-28651,-3190,18518,10451,1247,-26062,-19655,-277,7196,-13639,-512,-28567,22189,-11018,-13373,13333,23355,-8162,-8549,-15826,25697,-22528,8844,26819,10838,13556,13295,1665,-13857,24951,-31482,-1352,-28235,10783,31141,3247,-30602,-26443,17493,16647,31227,18811,29494,30125,5390,645,-21163,22125,-17259,-31654,31915,-32023,-4726,-11950,-26382,7668,-10770,21503,22109,26893,-20053,-6579,-27359,-10996,17309,-29329,-21656,-5264,-14070,-6356,24044,29882,-9262,-8783,3667,21689,-31398,11365,19903,-27363,2230,32429,14122,8631,-13161,22485,17184,30875,8275,-1080,6906,12114,-5582,-1216,-931,30570,7263,19049,-30563,4803,4972,29728,13350,-20302,2290,18374,6987,-4619,28192,24408,25237,-27619,-22510,18741,31338,-5848,4957,7369,18896,13647,23729,-25375,-26926,-5080,-23014,-8236,-8555,-25848,8812,1801,15939,23424,970,-27313,-29713,-23638,-8785,-9520,-500,27511,25733,18395,-9678,9715,16912,24935,-11651,-25057,6887,32088,7921,-8081,9802,-12388,1172,-5828,-23006,-8864,-16986,5919,-29861,4343,24713,-16999,-29487,-104,29371,23833,16217,-25891,872,468,16643,-3446,7976,-11621,-22943,12486,-31378,8366,9488,-14301,-8733,-7064,-7464,24469,-31034,-193,-28669,-6996,-983,-15374,-1489,-9552,25503,-6836,-23094,145,-12218,-18686,32667,16825,-29593,-22324,-21065,-30882,-22399,10566,27327,14812,1282,-7856,26146,12971,14192,29384,5271,-15226,-11987,-27342,9491,-7931,-2870,30917,7924,-14121,12571,-10,-2758,4858,-12181,-21054,9516,-26651,-1382,-23122,-6004,14502,-27534,12472,-11949,-6555,22852,-15142,15076,-22436,-31285,479,-26689,-23024,-25428,-31642,-3814,-11078,5962,3387,15892,-22332,-5463,23439,10838,-9000,-16916,-15661,-27392,19937,27798,7646,17511,30737,-5872,2243,24084,1531,-17342,-9522,20350,14038,-10507,10498,-24272,-16874,-17105,15572,-6644,529,15020,-12486,25230,-32006,-740,28033,4526,485,4852,17955,-18380,30589,-28246,-8669,1218,-1171,-22104,-22200,-23341,10087,21644,5348,-8543,-16010,18314,-8763,-24673,-31498,26347,-10832,1296,21473,-21741,31462,-21678,7518,661,24283,13459,-13432,16785,-370,-30764,-7222,6124,-18835,-19433,9596,-8109,31673,-31287,-30218,27623,19162,-17533,30627,-16112,15823,31166,-74,17372,-28645,-8267,23283,-15605,-19876,-22881,25473,20603,-8475,-25211,9451,31371,-27712,-15285,-20258,31317,-28449,15662,-4705,-9494,9387,-18932,-18190,16379,26579,3881,-30570,-5828,16964,21025,25407,17897,31948,-8900,-27302,-1430,-9656,-24024,4770,-31311,-7149,15512,-24595,22300,20130,-3021,-4892,-1115,13840,-5400,-2914,-28844,-31968,18793,-30397,-29818,-20074,-2434,30059,7450,-21884,28552,10783,11619,-9826,19514,12359,-21114,-14866,8944,-23491,-23512,32631,-27714,-2095,8701,21408,-9107,12827,5684,7420,25431,22789,21044,-17003,28610,-1136,-2590,-27282,11677,-13098,-12854,8962,22930,-25868,3549,8707,-24951,24582,-2804,-27853,-12894,-31995,-29682,29501,-2223,10262,-20662,-20708,-24760,28497,-19692,-18470,-13748,-32605,14926,-23376,-31620,10409,-19767,32721,2310,11882,-13170,-4863,-30499,-24308,6548,-4611,32388,-19062,-31458,-23554,5533,32418,-11942,-5609,-15941,11194,-18197,10761,4914,20881,25410,-31906,7055,3410,16639,-16854,18405,-2751,-22852,4720,22719,26669,4901,7531,-14840,14411,32401,14681,23147,4218,31114,29191,765,8887,-30617,30190,-637,15411,11354,1677,29658,6496,-3776,-27329,-22556,-4902,31782,-7939,-5698,-14150,-21373,32595,31467,-9882,-22484,-25362,18471,-11379,-5349,-31419,23567,-4872,5205,1380,-26753,3367,-2085,-7084,-2608,-11074,-26182,1542,-5428,29811,-23622,-14926,-7255,-7308,20908,-5048,-21251,-10056,24194,4801,30583,5063,-13543,-5670,26927,28764,-10188,7660,22336,26879,-12669,-12671,18276,-6559,-4587,13680,13156,13777,15311,19587,15453,12348,9892,-29400,18485,-30221,-23507,-25007,-24698,594,1190,-24450,-2472,-621,-28407,5653,-8389,17321,-28827,-11059,15814,-1564,25199,-1515,-7505,15537,6656,-12749,-9184,8259,-19824,-30216,-13010,-18,29783,16693,-14036,-21760,10711,-28411,32601,12418,-14222,27682,-23880,24818,19535,-10376,-17130,-18633,16212,25964,14614,13992,-22294,16561,-29510,6363,-4547,18326,-19895,64,-18317,7077,16863,25662,-15433,-28602,24638,-3840,9900,-5969,7795,-28246,4861,-28019,-13640,-17762,-2002,16503,19442,-10461,2563,-322,-10837,31333,-24927,30894,-4222,3039,14332,24850,30119,1516,13769,-12527,3693,-13042,-24299,4427,25569,-8158,18101,7546,19185,-4950,-21003,-25970,-10402,3783,-17397,6971,-19437,27284,8834,111,22808,12861,19281,-8230,-1318,-28487,-3428,-25484,-19214,-14210,15262,8728,-30482,-2380,430,23072,13346,-12236,8785,-15344,22319,13115,25281,-1662,15494,-15465,20004,-28257,11520,-13299,-19294,-9443,15106,-3127,-14866,13629,-121,17159,27693,31821,-846,-11209,-31878,6647,-17436,31968,2527,19375,-30158,30894,23309,-30750,26302,26530,-11247,-231,-26094,19317,-1655,31980,-32228,-24811,14416,11906,-7039,7741,31012,22083,-16949,14378,-22405,21617,22431,-22325,22826,28522,-22162,24009,-8420,-2890,9645,17966,-22042,15690,9599,32702,6891,4568,-21834,-8165,-951,-9415,-27350,6454,17685,25132,2064,-25969,23940,13545,-9618,31891,-23603,-19239,-3921,3156,-1476,4078,13885,-17412,-22746,-13128,-31328,27476,-3932,31569,27731,-15224,1977,-23131,30090,15854,-12909,32440,-724,-1238,-31602,3583,25164,-30427,5582,28901,-382,16707,-11261,-32106,-16824,-11546,-26412,10725,21501,3004,-3767,-21233,-3642,21821,2174,-10747,-25279,-31868,-16068,-1697,1898,-24281,-25760,-14312,-8435,19973,32452,10141,-32522,-24037,5458,9750,31120,3483,20104,-21100,10945,15691,-31551,-24058,22235,-4960,25279,-17438,-22118,19906,18376,31489,18823,28830,30900,21203,-14945,32678,-4038,15864,8353,-11511,8722,17703,7778,-20960,-2106,18222,428,-23206,-18525,16786,26958,22790,-16528,403,-22556,-24218,666,-17933,-6504,15293,-31565,-1769,-8223,-29154,17094,-19721,-11058,-7546,-14825,-12453,6737,23253,3726,-23107,28046,25020,5473,-1478,-1579,26731,22777,20244,4335,-3860,-13105,12845,9600,-14208,-20223,-21774,-2004,22214,24264,-1315,-23047,-22183,-25906,9112,17230,28455,-25501,-4471,26285,22260,-29044,6928,-4174,-22257,13694,30114,-3904,30704,24771,9684,16617,26626,-2309,-29897,-23867,12941,4320,11776,14151,-7885,-22044,24498,29331,-11142,-4723,6852,-10851,18843,26507,29678,32049,28742,22516,-8868,26415,21547,15106,-13502,342,27469,16217,-29424,-7242,13230,22889,1031,18017,-10706,-262,28206,-27714,-17512,-20547,-28964,22392,30269,-13253,8052,24531,1857,17780,-16452,-16796,26327,8559,-32218,-16079,23314,5948,26273,-3523,21731,-29275,10246,10873,-8099,4837,22747,8897,-5435,-13016,-4688,-6030,-31930,4355,-4339,-1452,11202,-1004,-23060,28383,27328,-2346,20502,8643,29295,-3639,-18037,-2624,13253,20168,-7710,5380,-20798,22424,-8491,21550,8868,-17754,-27743,-17974,23011,5260,-1600,-8618,32509,5025,23185,7712,1334,-12505,-13540,20931,-16856,-3657,-23276,-14094,-19343,9352,1151,-24219,-10135,-5482,-27116,29759,32304,-8259,-5605,8412,2556,20223,-28586,-16453,-18534,23531,-16408,6581,383,-27653,29335,-5661,-4485,18600,-31378,16053,-5424,6281,-10014,-5940,-17430,-2159,11636,-15313,-22005,-9237,-27026,12428,-27128,-12323,-1725,-12243,-20443,-26806,95,21925,-9665,-3400,-17217,-21584,32390,-8656,6786,-4700,22171,-11452,-12070,3700,-22556,-8278,-23444,5014,9495,-15633,-25437,-21134,5917,30694,30839,22050,11828,10049,-18787,-7824,1453,-31940,-14954,-32089,-3176,22576,-22577,6707,-12887,-16179,23200,16460,-12868,-4515,12681,-912,22714,27123,-21144,-10253,20437,7204,7927,3697,15207,-19917,19847,24331,5233,29809,9129,6900,27477,28023,27991,-22560,2997,30778,-20262,-24474,1464,-31350,30256,17773,10990,-8606,-4399,32724,-326,29137,20275,26350,-23849,28430,19693,-5361,2083,-27657,-11955,26840,5110,5091,31172,789,17062,-7778,-4590,-15399,27226,31762,-24165,16921,3087,-19402,-8277,-13572,-6641,19264,-31413,19871,3681,14378,-18284,-26076,-31509,-26622,-4891,-9458,-12485,-5995,-31902,-4772,16271,-9101,-2249,26554,18772,31555,-11614,21854,-21921,22038,23669,26003,-10411,612,-22924,-9392,6906,7367,17358,28810,-11357,18450,-3936,13602,30994,-10056,18088,-10523,15685,3926,12412,-2247,-29455,-25546,-29667,-22711,3811,23630,-22429,30922,14492,17561,-26675,6911,26609,-13286,1820,-28659,10863,-30346,-7026,21321,-30257,-2955,-30311,4285,-19763,20937,-22609,-26770,12866,-24909,-10601,29634,-30148,-23758,-26722,-19223,-27045,28447,25231,-331,19216,10538,22893,32107,-4245,15628,12130,31068,-31966,-5990,-31155,26602,26772,-2840,2701,23574,-30877,31684,-6251,-28839,-2012,25131,-1139,952,-2781,-2201,-28646,7295,28171,1572,-15752,-22492,-30971,32716,-11281,11823,12280,-19589,31014,-287,32634,4161,27546,-21420,-3326,17750,8357,19111,3317,5309,18223,12627,18218,6804,-24912,24361,-25472,3485,-24972,28971,32709,-14678,-15236,-7132,-30350,-1064,8724,-22620,-13211,21610,22578,-6085,-11091,-31941,31942,-3548,-22267,3171,19062,-20699,-3135,8805,-20839,-5457,-18998,13335,21856,-11952,-28778,-6671,26358,-17674,29780,162,-18714,-15693,26103,-7574,3649,-31369,-28538,-21472,-12573,-15803,2113,24469,-16065,4420,-26765,11023,-23652,-21923,-14806,-6309,-22379,-19716,-32716,-1624,9793,18785,-16177,-227,9351,20657,24239,-23768,-31119,-20758,32734,20173,7292,-8199,-30908,3518,-17435,-4884,30895,-22519,19004,-2587,1246,-19030,-6350,27804,21362,13314,-15714,18497,-31725,-25048,-15149,29022,23000,-29025,-22197,13209,21898,13955,9255,-24782,-29131,-25205,12503,-13155,-26269,-30215,3804,-25600,15713,-13867,16858,28461,4873,12839,-23437,26241,25811,12103,-5483,8308,-10512,-25597,-417,-15404,28957,29320,-19879,-26151,-21290,-16912,9020,7373,8648,-18730,-23415,-6500,17462,-30148,12652,-16281,16682,11256,-3638,-30037,30666,-20527,7478,19898,8967,14631,-11145,-19521,-5018,4873,3819,-27202,12365,120,11004,28503,9141,16833,-27365,22298,20567,-3604,-32650,22781,-6273,704,-10399,-18292,11986,8132,-12927,-32756,11181,20073,12578,2902,26835,-16241,17930,-17237,16450,3342,-12614,32514,-26836,-16973,20421,24328,10196,-20809,-25746,-14553,-1541,-28357,14003,-26687,-1961,787,176,31130,24877,26509,17665,-26399,-5393,-8588,-6034,8759,-7708,26180,29492,-22135,-26857,-28760,18190,-22640,31321,403,-6650,17945,-2520,-18181,31267,-594,-2723,1368,-29908,-16555,3943,-29164,-14227,-14117,-24639,-18916,-13783,-21889,16556,21698,11881,128,-425,16939,-15849,-32285,31034,-19432,12862,24370,32125,-23758,-2641,-21920,8648,-17844,32557,-4803,-17957,-8883,10568,18581,22266,3207,31272,2827,21595,-16812,-18103,8753,12642,7400,-28643,-4062,-15243,-28569,8141,-3738,8337,20672,-14626,4677,19893,-30370,11647,19197,-1244,11821,-5694,5947,-22186,-22514,21860,-6778,-12740,-21847,31352,4427,14376,-9787,-31648,-9241,-4974,19156,8365,8652,4872,32467,4650,30351,19924,-13898,-31719,-25148,6591,-19788,-11372,6561,20254,7700,-13302,5227,-32482,13452,29604,-798,30924,-14946,23700,22030,20412,406,-6066,17817,22243,-3743,-19628,138,26165,6109,-5108,-26060,20500,-21568,-11188,6413,-1344,8886,-28098,31163,-30282,7612,30463,12970,24409,-27613,26378,12565,26927,12638,7012,-11100,-20252,19650,-30361,2955,-17403,20892,-32678,18815,16914,-27202,15669,16009,30873,-15921,18848,23331,32490,-24220,10677,31271,22210,-24506,19563,-10967,30195,-26633,-12407,-784,17610,132,18904,25112,2828,-31975,28406,11652,30337,28987,21652,-6187,1309,18468,-18332,16881,12194,-20486,-16899,-19398,-30983,-10069,15934,23237,-31537,-3367,29201,21170,-31832,-4830,-13300,-7648,4429,-8929,11026,21636,16486,10803,21563,-30128,-24476,11722,-4813,4576,12135,-9253,24452,-10512,19104,19459,-4403,-31852,27799,-29213,-9736,30105,8880,-1022,30515,-12193,31398,14634,15199,28695,-17413,-26279,26186,-22552,-18357,45,-11057,-16057,-10101,-17814,-7718,-3294,-12379,10743,21124,-26727,939,-20984,-20460,-22445,-5935,-3335,12823,16523,-32119,-8778,-3982,-17999,30195,9757,-31759,10716,15357,-1927,7107,-13946,-32386,-24139,-4939,-12536,-13060,-12479,-7256,30418,-6623,-17811,446,-12741,-24975,-12843,21774,-1696,658,-30485,29375,31550,23754,29647,1639,24346,15616,-22724,-19235,-12909,-29575,-374,-17010,-18266,-6400,-15254,-18144,-22524,9988,11354,-20030,17912,-30712,-353,27588,-7554,-6501,-5468,-7623,-14261,6188,27004,-21602,-19964,-20499,-11371,23395,-26430,1919,-5329,-17209,14248,18594,-7794,-25709,1691,-29605,-23493,480,-21123,9362,-1181,28678,19489,-3699,13830,2939,26136,532,19682,-23888,28113,24739,16154,-10846,19832,-24081,24147,32058,-18262,-26585,-400,-3992,-13459,-8885,21926,-28281,25596,-28172,-17105,26472,-3717,-3436,-18401,-5856,-1601,-28309,-18793,17495,5826,-10673,-7706,7974,26781,1109,-32303,4925,29587,-15130,-16206,-438,-28088,11025,22948,-11798,-3740,-22834,8662,21242,-4814,11131,18388,-28558,-18690,-31473,-5408,24220,15868,-18760,-2933,18938,-3106,30150,20098,21763,12928,2095,-31690,-17439,-30042,-8540,17433,6010,31697,20989,-25482,-18348,-13764,21606,17965,32330,14837,10672,10277,-5642,28578,6047,27329,-32663,16761,14811,-13351,-27658,-30771,20623,-23949,-881,-12787,-8089,18208,-7570,11514,25371,-14559,-27922,9986,-24687,-5145,2215,3099,-19229,-31962,9103,-3363,29866,25316,-6362,-4089,-7388,-10983,-19084,27097,688,-11289,25997,-12566,10803,2658,-15520,-30635,-20578,10700,-26310,-25703,-20585,418,-13323,-20273,22773,24273,-7484,12060,-29842,19352,-2109,-32325,8395,-9581,-9956,24261,27349,-3732,20632,-1485,-30367,3538,28377,-21608,14241,30995,-16905,-17635,1899,-4976,7301,32321,1010,-26220,13561,5719,-19378,-25578,-27768,17663,-30462,13277,1568,-4459,8962,23123,2403,-31325,5229,1944,-32594,-25376,13992,-18532,19527,-22012,-10914,-22356,-17576,15604,3975,-18758,4998,-12470,-1398,5468,-25522,13270,4548,-12177,23055,31572,19933,-22040,-25429,6148,12851,28980,19793,9162,-18713,27485,-18299,3257,-5651,-22359,-30273,-7720,11929,30753,24167,-13799,-32046,1335,23155,18373,-14698,-24066,-18844,3201,-16884,-11903,19638,-5518,30497,-23375,-7707,-31199,22586,10306,19369,-30433,-1676,-12126,31339,20086,9318,8016,20495,-5072,27446,-3890,11622,8906,-25463,30116,-4357,-14529,-23065,27440,-15999,4486,23288,-19305,12175,-11802,-13937,4757,-26673,4151,-29225,-25365,25282,-9102,-18307,-24630,-9964,9768,-14619,21478,-4468,-11521,-6744,14126,2754,-18357,-1019,3727,-28174,-22189,-1149,-31528,16235,26855,-23773,-4329,-12424,7885,7085,-32700,-4061,3916,-13508,31428,-21750,6431,7112,21159,19715,21400,-8198,-7296,4996,8148,-5165,-29414,26602,-32290,-24638,17112,-3002,-9648,2616,5012,-10041,-613,-22456,-2047,-9821,22594,2750,21849,15191,4193,16498,9777,-17614,-32361,-31041,-19726,-29223,-13703,7807,-31660,-17866,-14477,-6089,32464,25320,-9297,4117,-25503,29143,-17696,-6147,-24752,-31456,31933,-15880,-11336,19710,-15039,-2191,-12657,-21223,8011,18267,-24146,28193,31652,-19003,27368,-11183,7343,8850,-13863,2316,-13252,14103,-7909,-16576,3699,-2469,-6957,-27233,207,-29688,4619,9363,9993,27247,31568,3963,25133,-7592,31032,2494,7270,-1956,-17356,14154,31136,26652,28523,22565,29411,25127,19428,-28886,-31743,-21517,-25435,-24647,-30333,16719,-30747,20638,31611,30031,-13828,12412,21532,31480,7455,21724,-17824,16496,-27560,-23232,-3998,-838,6099,26786,1938,-29235,24560,15478,-21547,-32154,-20802,-29612,-17260,-20444,-2704,25619,-29769,12605,-27774,4632,25620,-30106,-22832,-8440,-29768,-10401,-11730,-23154,13252,6719,-15654,1961,-28898,-20689,-422,1188,12694,-1360,32557,-3107,12338,14113,31918,19351,20678,3019,17305,-7053,-32757,-26716,4610,-5723,-5254,-21129,6819,4668,-32768,14888,-6846,20257,24892,22154,23710,11805,16762,23848,4352,7755,-32673,13121,1741,-17313,26699,27450,-11904,-25211,4691,-1659,-11959,-9515,30724,-1838,1150,-9187,18504,5422,32472,6141,1986,-17339,30248,-8101,-16414,17834,-16712,-17525,-25036,337,26226,-4092,8535,25863,-16904,5373,-17468,7006,8809,-5737,11998,25457,7511,-12241,11822,25836,-15823,-9337,-6821,24223,-22144,-20555,-27171,-25263,-8704,1081,28722,-25894,-14702,6878,-14770,14560,-10531,-12722,-29218,26871,-31506,-20158,-13800,22651,14929,17231,-21325,7709,11547,30631,-7101,29303,-30536,-11381,14916,-2890,-21844,967,14814,-22335,6581,-3686,-6278,-16824,-24029,14203,-32116,-21078,-19654,2134,-17186,272,-18534,20312,2949,2502,30816,15596,29043,-30451,-1394,19902,-16792,1846,-14264,-14365,30499,-7742,18090,6684,2553,-4278,-13807,10563,-27902,12871,-25660,1873,13127,-14569,24301,-29926,30432,11394,-16628,-27456,12632,-25861,-16313,19051,3579,-1158,9846,24366,26454,-25748,7586,-13367,-12300,-24283,13536,-13452,30652,32057,-19367,-20446,2711,25486,-26645,-21019,-3011,-3336,19376,17261,-16250,32081,-16394,-25188,20028,4307,9490,-24666,2630,-22240,16380,-14053,26267,6644,-11640,27779,19351,7832,-31220,-8345,-10006,-24170,-8305,32443,4499,21394,11041,-3900,-3005,-541,32158,14506,20096,16395,-18970,-28769,-28570,-3564,13708,-25019,28637,-25030,15484,-22157,-6960,-5854,20022,11249,12074,-31001,-32165,25974,-10075,20784,12843,-3654,-29861,27109,32498,-9764,25422,-18966,16778,4123,-16476,-19463,1244,-18937,28022,-32012,25563,17560,21446,6713,-27732,-16342,-23571,8285,-5137,32729,16900,25474,17884,-7410,23133,-652,-9366,14651,12360,9990,-24852,30194,29706,20716,2838,14373,-22258,-7303,28429,16404,-29276,7455,-22954,25032,-21353,-23634,-14077,14767,14632,-7383,-3271,-6842,12350,28875,-1649,26223,25026,-22778,30101,13728,7144,-15058,19844,15473,-7476,15237,-18950,-29284,-18736,6427,-23344,-9307,-10904,-21646,-14971,-32638,-21312,30528,2670,-1976,-6704,7182,-28466,-29696,2904,-26840,-31824,-31952,10282,-5585,-9094,-26589,-9126,11705,26931,13248,19924,-20289,-19836,8843,22296,7239,-31833,1742,-20927,-7949,25053,15036,25159,-384,9443,23019,-10343,-22500,-13521,30435,-11931,-31260,-4005,-27076,-9962,224,-1887,10951,5261,-21927,-18046,-32074,15447,24995,-16311,-2819,-3612,-12814,2207,4116,7413,19407,27856,-17285,11475,16141,32438,12733,8232,-12462,-3346,25652,2632,-14952,-31124,28787,-10152,-14693,-30997,13879,13084,28753,-250,-22340,9106,-15531,-11468,8208,-25284,2889,-21887,6503,-28567,5001,6838,-21171,-32503,25825,-18849,22435,-1840,-17231,1319,3143,-7470,21678,-15408,6915,4206,29227,-14067,-3391,-16504,-26975,-24692,-9619,-31639,6625,-2794,12554,4406,17363,17252,-4985,28007,-24673,16038,-25502,14364,5019,7656,16250,-30813,-8769,5166,-14242,-27298,10041,10213,-3479,3329,32271,26123,7167,30772,-15078,-28849,-14919,16316,-29361,3597,-10468,29521,9142,-1284,7768,-21187,11594,-8335,-20057,20530,16352,25896,-28226,4978,-13358,-7768,23627,-16338,-28292,-24675,27570,-28813,-23609,-21976,23623,4470,-7425,19869,-26062,14179,-29204,26666,-32547,-6898,5860,-18158,7452,-20709,-17057,7949,-21082,17450,-17761,12729,7000,6743,-8582,-17484,-9977,25500,2125,-29307,19430,-27808,19226,-16590,-21597,-10238,-25850,573,19910,18378,7796,-8589,30846,-5516,21703,-20794,-27222,9843,12776,-3487,-15279,12954,-6566,-24512,-28523,3348,13535,12151,-11997,18548,7795,8889,-24848,24327,28566,-25683,15712,8736,27771,24247,-8418,5473,-3985,-6269,16694,30776,-28560,-86,29862,-1577,-10339,-3617,6976,-18392,9290,27705,-22966,19729,-21091,11981,-7781,-18829,-8701,-7202,9245,-2083,8964,9713,-17679,-32592,9901,20352,26074,13767,4909,-6507,8605,21269,25663,-28648,-9264,22490,1961,17438,-28460,1995,-23611,-87,11004,29438,-27162,26066,-30540,32155,11009,3986,-31276,-18815,1961,-31142,26809,7303,-31840,-9336,-23496,-6262,7239,25721,11087,-19263,2626,28176,31466,2523,4514,12232,-13469,13437,24274,-5517,-31764,-11984,-4241,-15496,-25955,14955,-25854,-26222,-31573,-16629,-4331,6520,-12115,-8061,13423,20111,-15009,26892,-14875,-7211,31542,-19312,16434,-30053,8858,7113,-26634,16180,6544,25304,8309,23501,24428,-28751,-32691,-3150,13310,4481,-1741,6856,-15816,31632,-29701,12072,-16345,-6984,31905,3772,-21030,16551,-27023,7576,-20835,-1719 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add14.tflite b/tensorflow/lite/micro/integration_tests/seanet/add/add14.tflite new file mode 100644 index 0000000..b3221ae Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/add/add14.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add14_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add14_golden_int16.csv new file mode 100644 index 0000000..15364dd --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add14_golden_int16.csv @@ -0,0 +1 @@ +5818,5339,7886,26195,15074,-6176,19675,25449,-30181,-32768,21013,1270,11762,15626,-30883,-26969,6823,-29898,32767,8145,7936,22588,-2145,-10888,12716,31623,15528,5605,-4205,-22595,9070,-13319,8465,12031,-16442,1616,-14663,9218,4572,-32768,13921,-6311,32767,-23656,-7968,8426,-14310,-15720,-13322,-29136,10231,-3430,25391,9171,-5959,426,-9633,-32768,2826,28250,-1395,-32768,10252,13303,-14878,27881,18706,32767,-19138,-11024,4129,-12270,-18380,-921,-15039,-17032,-8977,-26228,-19751,-24563,10305,-3289,-32768,32767,-21229,-3536,23134,-24733,-19420,-12789,-32768,-21145,16387,-7079,14634,3281,13229,10887,4786,-30286,18785,-3905,-11948,12481,2805,-32019,3494,12797,10806,24134,10361,-16590,-32768,-25537,24712,10817,2981,9185,7945,-27372,-27826,-30720,5181,-5011,11083,22185,-14854,31299,16748,13813,-31419,-5759,-16555,-32768,5950,8751,-17460,9607,-26246,21309,-21195,-26372,-9535,-26220,30914,-22073,-3290,-777,-24245,6848,11576,-14168,-2027,6311,4248,1393,-3236,3540,24719,-22139,26168,-25117,339,32767,-6551,4858,-5440,14703,23684,5478,27810,8243,-11232,3407,10811,-8191,-26200,-4991,-2180,32767,-5243,-23384,8723,13929,-17838,29627,-4863,-18496,13093,-27136,22879,32767,5346,-18014,13283,-4563,28611,16839,13001,23363,-12484,-32768,-15507,7872,16727,-18823,-32768,28078,-17311,-15648,-32768,-32768,8862,32137,28110,-7845,22531,-304,28760,1049,-23177,-32768,-18089,-23253,-5767,10363,32767,3593,30158,20914,27363,24486,-10948,13448,-10629,-28722,-251,14346,32602,13084,-21456,25630,26600,-9082,-15298,4831,-17558,-29404,32767,-4044,1839,-523,-29713,12895,2467,19601,-10838,28673,-3990,19739,24227,-8585,16383,1594,-12791,-11316,-5124,16763,-1379,-310,67,-12752,12915,-7707,-32768,-32768,-10362,-11644,-5751,18597,13366,-4461,12907,25669,32767,-19895,-14235,25641,32767,-32768,-24525,23982,12499,20085,-10695,4660,-5308,-1916,-19602,-18317,-32768,-7744,-8965,6308,-1823,-8481,18318,1730,1580,-9492,-28467,32767,-11039,-11724,-26242,6485,8178,8985,12359,5581,-21722,1520,10817,-10752,839,27120,-16137,-13343,-6955,-10119,2632,32767,17295,-14111,32645,1919,-8786,-14951,24529,-19315,-17813,5789,-20152,21081,-15848,913,-25037,9575,-18033,19824,32767,-1101,-21640,-13540,-15300,-2883,15503,-19566,4861,-14765,21366,9084,-4355,18420,-15870,-1401,-21897,-9347,21008,-18650,-4526,10153,-15588,-32768,-27435,4252,17339,-22187,-22585,8339,6601,10241,13537,11794,-5783,-11091,5471,-16314,-11273,-5072,3204,-11124,-4648,3861,4844,10757,-2211,9348,-3394,-10362,23869,-746,3894,-4378,-7866,11365,-26570,-27335,-246,18446,4264,-2536,15876,-32768,18275,-973,-1848,3844,20969,19186,31177,2398,6519,6177,1063,-12382,7164,12018,3373,2307,-10826,-24594,-8837,5375,-16678,1239,17534,-32605,11550,29541,1520,15187,13054,5418,2049,-13331,11149,20957,32767,7437,-8868,-6323,14558,-28041,31017,-7054,-32768,14659,-25902,-16529,-14701,23164,-4469,6330,-10644,-3090,-6597,-22902,-5404,10426,-19227,18187,-4628,3488,26736,-370,-22561,-17396,9481,1101,7685,-7359,14523,1972,12534,-15221,19775,-6965,-24649,-31500,-4616,-5053,13028,-14364,6180,14848,-15837,-20567,-28869,20664,1168,-14366,-12969,32767,-5781,7878,-22073,16877,-30380,-7327,-21058,-5681,-20529,-522,32767,20340,-27729,-1431,-15117,-19805,-18578,-27995,-9115,-13015,-9756,4267,-11431,8898,-3431,2562,16367,-845,21173,-6944,-7434,13863,17427,13167,9109,-32768,-18482,-20982,-31878,2422,-6799,516,-4916,-13534,-29030,14781,-24720,4819,27763,15172,-32768,5034,1844,11296,13158,-29110,16625,18866,-8230,-4463,21570,1182,-4665,-423,-1695,24187,13708,-29812,-1957,11767,32767,-7855,-14006,15278,-8029,11530,607,-1862,-4990,18576,30671,-32768,-32768,6609,26924,-12527,13601,12834,-12121,1042,12568,-14273,-20951,32767,-8699,32767,28909,-22260,-23332,7751,12730,289,15680,-1638,-13610,-3336,-32768,-4680,2612,32149,22881,1815,6572,-18997,-7225,-20612,17427,-2170,-8684,2891,-6381,-10959,-18333,11842,-1582,-15295,16049,1889,20219,13746,-304,5813,-21779,-16888,32767,-28978,32273,6764,7542,-10584,-28880,-32768,4848,-32768,-4341,-15033,-32231,17363,32767,-3892,6925,6485,-12995,-8757,11319,-32768,6032,-26798,-1524,25530,13298,-10898,9716,-32748,21437,-24273,22048,18746,-5208,24147,-6403,-12963,23013,15815,-4350,21316,15006,-1049,-14107,-31573,-4574,14870,-19533,32767,-16240,-19523,-12160,2672,20633,-9858,-862,3582,17516,1477,-22603,27788,8344,-27581,14798,-16056,4953,-28925,22889,-6975,-28181,-8509,-14865,-7291,14763,11883,13089,-11005,32767,-8916,-10833,-9037,-18745,-29342,-1325,-17938,26150,-18550,-1335,1067,-28134,-10635,2461,14656,4485,13219,22514,11053,-19531,16492,2266,32767,-32768,5993,-32768,-32768,-6934,-4190,20749,5745,-17445,32767,-17241,-3003,1048,28261,11038,-2142,-3406,11144,32313,29692,940,-11326,-3400,-24250,14639,2124,-23557,-32768,32767,-12850,-6066,2552,9324,-2202,3746,-28662,-20573,-32768,1092,12501,12449,-4619,-27232,19464,5374,-12709,-26633,-22237,32767,32767,32767,-16666,1686,29919,-26769,-18428,11176,-12093,19865,27556,7838,20448,7370,-5041,28779,-12187,21387,-32768,9974,21626,32767,-32768,-25734,32767,-16943,-13459,112,-5797,-9163,6229,11225,-20682,-15572,32024,32767,32767,-16326,1776,-7814,6138,-8583,-14425,23775,-17978,-1834,21085,-16033,-7146,30769,-24358,-3385,-3176,32767,-28764,763,-12722,27322,32767,12260,20408,32767,8972,-18078,-26612,-7566,23948,349,-7858,14760,-9535,13735,38,-14228,8434,15535,28597,-11657,-5487,-4321,-609,25785,12192,4007,27538,-19330,-15144,17245,-5546,-32768,29216,-803,19593,32767,-18142,658,9648,32767,6235,11892,30894,-32768,-18924,13241,32767,-26870,25065,-25106,5727,2508,21083,22176,205,-2236,19514,-19493,9965,-32768,6441,-29315,27542,-32768,-19216,-1031,-17908,5506,-9328,-5729,-5699,28031,26602,-5780,-27531,-32589,8157,-3663,1405,-818,13103,-31734,-12065,32767,6941,2465,13797,11076,-6972,-2496,-31631,-14155,11759,-30278,-152,-27061,-32768,-10999,-9756,3891,-25930,163,4020,-30240,19513,-29745,-32768,32767,2313,-1231,-32768,-32768,-31228,-7172,-17445,10113,21821,-11165,-15332,-18955,31289,-5275,16067,24931,-2486,29924,-25370,-3497,8841,-21892,-28041,986,19820,956,-4842,-18704,14894,12958,-5026,-15614,29548,27628,-11886,22821,-14879,-5285,6262,15061,14209,31953,21493,-31671,29067,21445,2884,-5026,21735,-6141,-11049,32767,6753,3009,2164,-24838,12521,-1895,-8391,5717,-1493,29628,-4127,-27800,25413,4596,-25797,27859,-16092,-28146,19707,11198,-17404,10728,28134,9665,808,-5393,13684,32767,-19913,-14006,7107,16275,-32768,1494,1789,29647,16577,-12150,1728,-109,-28006,-32768,21293,9954,32767,-12458,28526,9545,11397,-10790,-4919,-17591,24214,-890,25713,-12201,-7289,25503,-6067,-18162,6537,-19118,-14194,-6825,-5211,32767,-16238,-20759,-9125,2993,13430,27870,-6216,6771,-11740,-21617,-32768,-337,-26437,8891,24356,-13351,-23446,13279,4422,-150,-30660,31279,-12552,-22181,7342,-11011,-18012,-12189,-22963,17786,32767,-8867,-30146,-9707,20748,6078,-13893,32767,-22048,18173,-14747,-5196,-17165,12815,28102,798,-32768,-6966,27224,26842,-577,28017,28760,11096,-1453,798,-10119,11703,5164,23187,-11833,-19725,7370,-12747,-32768,-9040,-3766,20425,-5042,31587,-7920,2103,5953,23281,-17536,12699,15328,-32768,-2414,-18090,-11455,27089,-32768,5039,-17077,-5648,1971,-11430,15370,30308,2702,-4893,14736,-32768,15336,-2226,4915,22120,-24367,318,15708,2391,-8843,-22516,11156,-5764,-1960,7775,32767,-7399,-32768,-10499,-19429,-32380,12965,9515,-24143,7426,-10554,7286,17637,9819,5336,9201,-5692,-16848,8581,1160,9787,1315,-7174,-15392,26530,-4830,-32768,-3169,32767,-10531,-5667,27221,-650,7160,-1723,-20131,-8714,23828,-31451,29350,5389,-32768,-24781,32066,32767,-16359,6210,18288,2247,-29533,13021,-8074,-22745,-11485,-26190,-32768,27043,-22709,7972,-26099,-7901,874,32767,-5838,32767,-20665,19503,-11414,-1610,755,15132,2754,4369,6499,-17029,27619,-20364,32767,26049,11440,-16948,1795,24601,-23904,26426,25190,-13642,-24229,30992,3647,30268,18935,-17486,934,4427,-10670,19742,-30437,9090,-1013,12754,-18473,-18802,-32768,-15408,-20017,32767,-5127,7185,-3853,17633,15916,-17247,-25516,-3917,569,17165,4680,-348,9691,-4616,11664,32767,32767,-9491,-7753,26768,2865,-7267,-1293,28057,4611,-26335,18716,23953,-12951,-16536,29483,-29583,-2958,20421,-21219,10936,-32768,-26286,9745,-10994,-22631,5596,4583,-18858,18142,23282,421,-5799,-86,-24446,32767,-18180,11578,-22192,-21154,-15309,317,25993,-9813,17385,-20367,-8160,18655,-21418,-9489,8177,8646,3592,-8736,22953,-7803,-19062,5555,-25923,-23033,-22334,-21880,14081,-1574,9987,-32768,935,-11566,-24112,-32768,32767,-23586,3012,2871,-544,-32768,-25034,6991,16539,-2454,4173,-934,-29522,-18724,6667,577,12117,-7184,19689,-25372,1816,20085,30755,16673,558,10943,-7996,6418,-5458,-247,5859,26271,-32768,-3678,-3104,-2190,-16441,680,3501,-22077,-18725,2550,1464,11019,-2933,-32768,-324,-32768,-22127,-6836,22464,-2647,-8761,-1815,23609,-20979,8527,14745,-16841,-6387,15827,12272,-32768,20497,-24740,-1652,-20869,-14106,-27816,-1469,29933,32767,-2938,-2675,-7681,-15700,19859,1855,-3325,3599,86,6876,4899,16572,19246,-7123,-31537,-12058,30776,20120,9357,20748,-8388,-14862,17201,-28592,6261,-12881,9140,-3159,-24214,4630,-20930,-32157,-7148,-13627,17544,8752,27181,25,13628,1985,-20607,-16498,23813,-29161,27492,28465,-4449,-2990,-7025,-27696,5771,11816,957,-30077,16483,9660,28204,20440,-24871,-21151,-20541,-12819,-1915,-17689,-32768,-26876,6700,4495,32767,17343,-21700,-18080,16050,2168,-23706,-20751,-25153,-26911,-38,-20859,-25409,-1927,21576,-32768,-18693,-27283,-1001,3400,-17798,-28237,-12211,-4545,-3376,-12069,-7106,-14271,32767,-21813,-12314,-13931,27873,5816,16723,-2872,-10576,4813,19361,21103,9156,6573,10240,12974,-13490,-14974,-1988,14338,26789,4406,-8869,-176,11456,1009,-10161,-7318,7525,-17200,-13483,-32768,12993,-7507,-16109,2632,9513,-16718,-23250,8033,-1096,-32768,22178,-20323,13974,-18023,-6095,-15319,-15799,-18310,-3682,663,15923,32767,4299,513,-2304,24794,-13381,-26562,-18736,16628,-28379,1136,8096,21100,-577,-8490,-18730,17823,-22879,-1566,32767,-32768,-27905,-2649,32767,19650,10565,5662,-30716,31974,-32768,5551,-17599,-8727,32551,8254,740,19680,-9783,21162,-2126,28706,13555,-22513,-24207,13419,-10124,-29344,-5103,-32768,-15655,-11746,-4979,-17340,17337,30154,-23510,-5633,-25348,-20444,-18662,7203,-7790,3772,-15237,-17914,-9126,-23394,3208,-25836,-334,-32768,-7515,-5298,-19378,23828,23347,13390,-4407,2872,3829,11126,-22136,2140,5426,7798,32767,-6041,-4970,3476,16056,8859,15440,16880,11339,10702,2945,-25092,4478,8929,-32768,-13395,2571,-6773,23762,5982,22854,16134,32767,19440,132,-23466,-27926,6094,6848,10376,-14773,-32768,-19475,-32768,15644,23321,21249,-557,-1470,11579,17040,3727,21599,6450,-22754,-5117,-981,-32768,8512,-32768,-4755,-32768,17707,26304,-5565,-8236,14795,-21501,-3082,-31920,-17382,-32251,11541,3791,21573,1745,-6067,-21521,-7437,2770,21423,4694,-19254,-32768,-3816,-11674,4924,9019,17126,9233,12712,16773,-32768,28423,-19133,-20779,-26557,-7683,-9325,-11080,32767,697,-20191,-16982,5545,-1667,-30196,-20813,3532,-3088,19037,1630,-23109,-27420,-15215,5551,-20972,-26560,28551,17677,4470,2030,27073,23553,-12868,2627,-7401,-6497,-15585,7873,-7163,5150,3563,32767,-8569,15823,19241,-28794,14725,-16728,1725,22984,-1763,-13413,32767,-17718,-4984,5582,16967,18645,-9177,4234,28654,-3534,-1350,-32768,-30845,-32202,-17557,17465,-949,22196,-2307,-32768,13476,-16442,10642,22603,-8397,29477,9964,-6719,-4819,11078,-6693,-3462,-8943,-2388,-9973,28834,2056,17001,18807,-21553,-26051,-27491,32767,-2638,-14146,2757,32767,-15032,-31710,-1386,27571,-20613,-6236,29404,-4050,17427,21548,7440,-7953,13169,18405,11183,13774,-13394,25698,-10380,4451,4178,-16365,22565,5721,-11346,-2050,18047,-30940,-12555,-19757,-24440,-12933,-12317,32767,-19497,-3335,12493,-10673,-22572,13758,405,9751,15357,2881,-10820,-32768,-21902,16732,29913,13400,20831,28877,32767,4634,32767,32767,23556,-5175,-18554,19581,9790,29459,-15021,27193,-25267,-24973,-27,-9968,-25512,9575,6019,17298,-3533,23756,15705,13530,22508,71,32767,11974,-25356,-32768,21052,7331,1621,-16720,-32768,19801,-16040,-816,-21494,-11157,-24778,23723,-29464,-30247,-7922,-29255,-4086,7669,-29944,1924,-11532,-32768,-29268,22393,12264,-3898,32317,-2233,11394,-14170,17961,-8209,-18983,-19429,164,2260,24744,5202,-32768,-14239,854,-4062,-7147,-1306,-274,32767,-32768,14073,11328,5332,-4786,-31900,-1751,-10513,9856,-660,-9039,-3096,18736,-1740,-7752,-81,-2235,-172,-10765,-10773,28569,-29640,-42,-1159,-19906,2518,-788,-24339,-11257,-10015,-19117,-2444,-3084,-4645,-26191,-7020,-15339,24033,-6031,-32768,7422,-25689,-14639,2698,-445,25721,10256,28310,-30169,-6100,-1961,3978,12524,-15982,4949,3010,-3383,-29288,-21803,1258,32767,-14286,-9361,11038,-25003,-2136,-32768,-10614,16240,5626,31564,21226,-13856,-6573,20227,3524,-1883,-6734,-26016,-9321,19347,-19605,-5999,26408,-22276,-4415,255,-29286,1149,-17069,10018,7105,-14414,28585,-4419,-9641,-9500,-25871,-9229,17764,-22355,-2774,9168,-20453,29519,-29434,25272,3130,23768,-27658,385,12303,-10771,5187,1588,-7591,-1309,25117,5313,-13703,-2608,28032,-5177,-25732,14402,-7227,-8211,-7441,29299,-30398,2457,13823,-3685,-7266,32767,-16002,-21309,16353,21963,-11622,-4958,-10801,-22390,19060,-32768,-26661,9226,-20478,19763,15870,-2356,-24211,-14112,1031,9146,-32768,1348,3264,32767,9210,-636,17407,-28654,5912,11724,-13922,9762,-17340,-29657,-14052,32767,-15904,1478,-1174,-18230,-6526,-27519,-2218,32767,5338,-4520,3,10464,12861,-32768,20390,30395,-17575,-29497,13310,19380,-15389,-9396,-1805,-24338,-32768,3481,-32768,10140,-29355,28191,-2905,-24048,22782,-90,-32768,-2475,19548,-7035,-24711,-21652,4359,-7088,9296,21388,3894,-9037,-32768,17037,-3342,22519,23117,31242,-15158,-31557,-6918,13948,12373,-18934,19965,-6118,26503,8722,-9233,6337,2518,32767,-11031,-18852,25610,27361,19151,12285,10363,8365,10044,2930,4995,-7166,28895,10271,21366,17551,7342,-16976,-11870,-21611,21966,2649,2081,24718,14851,21524,-6036,5319,-32768,19420,6373,14985,567,-21867,28855,-6562,5712,7310,23716,-32768,4859,586,-30973,-11533,27372,-25860,-4747,13395,-17787,25622,4026,-19435,11901,-12499,6734,5934,3868,14657,-18340,19638,32767,-18578,-8439,-23508,7332,29580,2320,-15300,-29713,-25676,4109,-16646,32767,3910,-11408,24393,17160,13115,17256,439,29412,11584,3655,14471,-24352,-14986,-26459,20198,-409,30001,-8980,3575,9036,-9876,-32768,5974,4451,1571,8257,32767,1050,-17724,-199,-701,2381,959,15952,4321,5584,-5450,28683,4811,15618,2319,22058,13042,20202,-27618,21759,10476,-21100,-22578,-9107,27195,14380,30347,3242,14287,-19581,15174,-8597,-24845,-912,-2558,30387,-22643,-21978,4357,14139,69,-9069,-2743,19451,11592,28215,17909,13043,-26962,6143,-90,-27735,-10683,21117,-5505,-1750,-32768,7961,-261,-4682,-7125,-4707,8214,20826,-8622,-6990,4559,-3518,-3637,21,32767,-7673,-15099,3497,-11137,16826,-20267,12098,16712,11299,-17401,11093,-32768,1932,9249,4509,7226,9314,11946,155,12613,32452,-20768,-15399,-5755,-29225,603,-12611,-14153,3453,480,3368,32767,-4568,-465,18535,-18328,-22094,-8199,-4841,2017,27024,-12550,-15753,-804,8099,24598,335,-15065,18842,23561,-22746,-5915,-30863,-13501,1744,32767,27539,-5065,-9429,-2386,-390,19088,32767,4453,8911,-8633,-16165,-18457,-8823,14046,394,30506,-9559,-13747,11460,-32768,-8020,3781,9376,-24264,-13943,-3714,-32768,11218,-10109,14477,-16934,17326,-3348,1594,-10161,-18688,-20889,-8297,10423,23910,-12191,-31297,-5513,26512,-32768,22174,14092,6500,-21901,-571,12478,31421,-32768,-10991,-3179,22259,17536,-29405,7899,32767,-19859,-14598,25196,-31233,18408,19087,-6040,10800,32767,-23781,21736,15730,22874,27842,3864,-13439,32767,-1797,-32768,13741,26099,-2597,-32695,-7153,-27318,-3604,-12011,-8356,8640,-32768,16137,-28397,8182,-32768,-10258,-6795,-2967,6220,-14587,24380,-13680,303,-18159,-23777,-8907,-692,8906,17950,-1140 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add14_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add14_input0_int16.csv new file mode 100644 index 0000000..e923911 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add14_input0_int16.csv @@ -0,0 +1 @@ +-723,22704,-2481,31053,-8723,-25571,29121,4661,-19491,-17743,19419,-6602,-636,2672,-13643,-24337,-10759,-18720,23053,11312,-18788,6497,16263,16765,-17807,32751,13165,18460,-21181,-3123,30079,-8913,28892,-10805,-28270,1712,12732,-17699,12751,-17770,-5574,26185,30996,-32356,-16194,-23209,-23180,-3748,-12213,-24008,16817,-2511,24655,-2460,14268,1232,-7037,-21736,-27486,12682,-10173,-29159,4957,8927,-22098,13741,-695,18421,-12254,-12079,-2921,415,-5409,-19924,-17399,-7261,-27750,-9895,-894,-1389,-4998,17209,-16790,28685,-2617,20229,31831,-30418,-1230,-5422,-20094,-4715,11624,-14056,31074,-31395,8524,31581,3958,-19908,-8646,21559,2775,29579,26943,-28768,16149,-18683,24623,21407,5223,-20019,-29527,-3580,25011,-3516,16073,22953,-26056,-24226,-25934,-12736,-24601,25223,31190,9809,-13113,11157,16898,-5274,-24254,1786,-5784,-22358,-22481,29991,-7443,21104,-27636,30398,901,-19299,-15483,-3632,27256,-12540,-26420,-7851,-23701,17575,20461,355,-28083,18383,20992,-7435,9017,-10984,1836,-30070,13149,-25733,-31974,29812,-25361,-12341,15707,1225,13064,-18709,25382,23639,-7395,31466,-20295,20891,-10614,-1779,-20939,28867,21720,-23606,25965,20107,-26225,21550,-10295,134,9890,-16568,2855,26448,15749,1999,-8619,1621,27701,16038,7864,1183,2870,-31672,8694,-770,19056,-29713,-29369,23361,-15110,8237,-24031,-24333,-19346,29850,31872,-17901,14884,-6547,32058,-19420,-26113,-30611,-26912,-21576,-14346,-5235,20918,-5660,20869,17263,17704,17257,-6176,7615,19875,-23734,1320,20775,14088,23207,-10490,6767,25455,-19476,12378,22307,-25960,-9279,16270,-19283,8179,-6188,-30456,-10403,-6311,13073,-2184,7014,-20956,22167,3776,-15406,19978,-28349,11956,-21392,14684,-137,15522,437,-8875,17448,-7702,-2097,-31867,-14895,-31471,2162,-15975,22463,27503,-14832,-2506,29784,20755,-28768,6644,8597,28208,-30365,-11300,26718,-9147,19759,-27960,-31014,-25997,-19214,-5384,-12769,-29418,6053,-9409,-22267,16423,8533,30901,31523,29800,1578,-11477,28354,-1682,-20095,-11365,-1813,-21690,3940,-15478,15086,2932,29698,-6199,3199,21929,16194,-16703,11876,23856,-32729,18425,18090,5620,7607,22381,15859,7564,-8979,16006,-26597,10375,18120,-10370,27481,6504,-3415,-20601,26407,-28061,8004,29033,26766,-27417,-11479,-4970,-17052,21444,-30669,26907,-10419,10646,18275,-10095,8626,-28099,-14330,-23287,9458,17630,-14346,6603,-15702,6934,-24185,-14082,22064,-114,2059,-23017,-17506,-22630,-109,3752,20585,27089,5058,27373,8933,19202,-21481,-11414,-25667,-26218,2870,32642,-16989,-3749,-13511,-15965,-13783,32483,-29875,23433,15472,-6927,12220,-3019,-4165,-13136,13279,31421,27175,12483,-16233,-6038,-837,-6199,20476,-4862,7325,31997,19276,-15719,30996,11962,5481,12912,565,-13745,24924,6411,-8347,-12440,15548,-13916,26683,19199,-16899,-16994,18172,18382,15071,21271,-18466,20057,-5643,-12296,32512,26439,-476,20156,-1179,-9529,-24795,23435,19851,-19410,-14252,-13218,-18904,-18016,-789,4837,-4794,-2145,-9084,27208,-26093,-26719,7802,3948,10172,13730,-5952,14687,19688,-12652,-8112,4559,-20325,13876,-11161,-317,-13710,12308,-23566,-7048,3311,-20789,-15296,-1590,-21398,-1682,1169,3260,22292,-29287,-135,-23653,-3944,-12844,-23721,2305,32506,15154,16014,-19162,22366,-10241,1141,4620,-7839,-23961,-11841,32052,7283,-26466,-20714,-12097,-1253,-9953,-18316,-15969,-22371,-3564,-10070,-24079,19248,-12073,3348,3250,-23692,30268,24753,-20345,9438,-6652,32619,6039,-15597,-8053,-16392,-12329,5527,15533,25261,-17042,-12750,-24329,-4254,-30875,12450,28482,24106,-22582,-3649,19124,26610,32273,-14719,19392,29736,-22594,11789,9963,-3294,-19320,-2854,-5137,29076,-65,-25724,30452,31446,24005,1146,1644,-13005,-28870,1136,-15412,-24131,-22689,-3395,21005,-31926,-25809,-26834,20383,-6689,17811,24477,-5794,27374,17341,-3872,-27244,21971,-2417,31220,19957,-24933,-11743,-19179,6752,25553,14313,-24182,7348,-6084,-31086,-8131,-16019,25322,4325,-4874,-5979,-30272,25237,-16669,18715,21583,16978,-21122,16140,-4516,9680,28763,-9206,9789,22458,-14316,17946,22208,15001,-15804,-13049,-28299,31482,-25787,22538,-5852,7397,-30290,-6728,-20757,25688,-29655,7985,-5717,-12439,-10431,26792,13547,9196,19993,-932,13328,-7460,-28666,-27540,-21657,-23140,23827,31735,11582,-15719,-30322,24197,-8764,16859,-6529,10109,25199,-24373,-11008,21681,5021,19618,6820,-15221,18558,-17032,-21515,-9941,25650,-13849,25970,-25355,-2860,-5483,-11700,-6153,5243,-32654,-26416,8463,14410,-21157,26236,-19063,-22335,-9299,-2428,-5854,-29032,-2587,-31205,-26709,-7468,8087,12083,680,3959,-9608,-18401,26475,-20306,-6897,23168,3003,-13603,-14042,-28424,24958,-26998,-6546,1040,-15739,5920,-16920,32703,-4221,3405,2791,362,-12753,23320,-22491,23153,-28738,-7946,-22889,-23449,22910,-26962,1160,14227,-32293,17789,-18400,-28534,16836,28786,-9708,-30880,-14215,21380,15007,15908,29648,3130,-32355,-31277,3080,21375,-12586,-30844,22022,10932,-7777,28141,10997,26228,17389,-28371,-6364,-22458,-23958,10037,15850,-15206,-14938,8094,1653,9218,-21793,-22420,30138,24912,32599,10800,22552,18301,-23184,-20702,-668,3797,522,31934,-4363,10842,1515,14179,10831,1477,-1300,-31583,28366,29860,29234,-30336,-22572,30694,-26434,1551,-13513,-32705,209,-22577,-21222,-28690,-7310,30746,32209,13089,-22810,17971,-29546,17758,17129,-285,28344,-16386,-1759,11880,3332,-17626,30228,-17363,28922,112,28772,-29593,-20631,14368,23185,31355,-3422,-2246,18446,-1623,6852,-31194,-32056,1369,13799,3662,20334,-13806,9429,-9908,-29277,-20274,23858,7908,-31827,-5621,30490,28986,3570,19072,23949,32255,-4037,-22517,11178,848,-28340,21535,-16587,625,29589,-26512,-20653,-1976,31319,-19800,18347,12619,-28769,-27801,23609,32478,-19175,28383,-1175,23193,4548,-2129,8008,-25446,-22292,-8199,-18813,1666,-27298,-10550,-29847,31093,-30001,-22768,13001,-5984,32596,6825,-20094,-25837,10104,13134,-22165,-4390,-12581,-811,-13834,15866,1498,-2385,-23013,-342,22664,32755,-16186,7459,-20552,17971,15456,-28234,-21827,23202,-29712,-26238,-17131,-32706,-17991,-31633,2625,-6292,19618,-28020,-21742,10868,-24284,-21803,31641,4214,-12488,-21882,-30442,-27442,-8138,-26174,-12973,8335,-4100,-13378,9256,27759,28656,-11444,25641,-27077,16899,-16855,24569,11391,-14603,-9468,5901,-5092,-383,28598,-20137,-5230,7673,-3440,-13116,29500,22736,15659,5661,3646,-5265,18765,-12235,11118,13196,12311,-22708,20686,28708,5181,-20223,-4654,12292,-23792,25654,-1187,-28656,30880,-5097,30566,-30507,-3760,13391,5024,28185,-8421,-19070,10015,-22048,-15409,16086,-29881,-22944,5360,27752,-22866,4142,7043,-20570,-28568,6331,1942,30850,3794,13368,12935,-9494,-24864,22474,4708,9912,-10536,3407,-24872,-12732,-28525,-28838,20137,11442,28064,-24930,31884,6881,21384,-3018,-30757,-23552,15208,11670,21804,-21584,26828,4764,-29027,-5171,32325,-26688,-31097,-18777,-26954,25337,5932,-7861,-124,-5419,514,9833,-22273,19550,6665,-25992,-26367,-643,-28439,27204,12116,-21699,-13638,21946,-9885,-29183,-17568,19377,14686,-31220,143,17391,2994,-16206,-17703,-3959,28658,10102,-27556,-278,18970,-7948,-9074,17872,-19572,27653,6637,-21429,-13630,23780,9126,14960,-23019,2495,9139,22623,-6562,30371,21259,17660,-15417,31422,-28471,1027,11821,9267,-19767,-16287,8764,-15156,-30250,5024,10988,3896,2962,28609,-32070,9294,23413,14885,-2525,4023,-8842,-31883,-27147,-25785,17474,4997,-20856,-8677,-3785,29334,16151,-18905,26260,20939,25940,6430,-8811,-32154,11682,-17345,4303,3456,-24450,27824,29517,20985,-13487,-28153,21963,-2222,21898,6740,20229,4444,-22290,-29998,-11446,-26727,-13945,18495,-6186,-13996,-8881,5937,31814,-1274,-908,3119,-7797,-12831,-12997,-2885,140,14061,-25324,-5388,6887,7867,-27853,22674,12902,13966,632,25753,-18408,32006,29549,-23434,12168,2534,-30797,15976,29718,-30998,-15861,12267,27967,-4841,13464,6185,4647,-26957,-12264,-30849,-24870,-9154,-6259,-30219,7616,-4585,26800,-27225,18510,-9415,31574,13167,26986,2956,7727,15122,21139,30922,20409,-31644,-29795,668,4124,17403,-8819,22922,25827,27278,-14793,-17314,6721,-30192,24367,15892,-2705,-3116,12592,7731,23864,7165,-21591,32246,7996,544,3384,-15793,-24571,-20356,18982,-16326,-8227,-32488,-18219,-24721,32754,-7056,23240,-9625,5759,-1772,1102,-20161,-22840,-26249,9552,27955,18882,137,-16629,-11165,30133,22065,12502,1797,22158,-15874,-13965,-18760,20409,4911,-15528,29501,19942,-7571,818,22810,-26897,12609,2426,-31461,30375,-25465,-4840,-6151,-26300,-16476,9858,-23395,-19069,14182,29096,-13034,-28687,11746,-31162,20499,2096,25153,-15682,-13794,11179,-17689,12405,-32297,26634,-23053,-24264,28943,-23415,5351,1967,6035,-31934,21076,4400,-30507,-120,-23015,-15470,-19853,-1412,-22363,-4762,23232,30914,-22441,20011,-31589,-1836,-29107,31196,-9505,-15681,-11883,29717,-28554,-8321,-7617,9717,-22098,-25186,-4200,-31023,7312,-11457,-521,-15891,-15051,-7243,-13828,-27981,10293,28689,14532,32016,30368,18805,-1466,-30967,-9395,1002,32286,-22913,-5738,-19137,-18514,-29056,-15674,17251,-19559,-3577,-12885,-18954,505,2368,-26034,-32456,-24627,-8889,15831,7783,-10283,-1853,4177,27958,-16391,18147,-12900,11176,-22737,30304,-13285,-30916,9083,-7967,-13950,-4720,-6776,-17237,-30587,14946,22940,3361,-27714,-26046,-28436,5275,-15486,25146,32372,21505,5240,30858,-1766,22947,-21889,-12984,-21032,15597,-3905,-8380,17023,-14201,-21176,-7734,-6847,15520,-27359,7378,-19763,-5442,-14735,-17390,-16943,-24897,1260,6366,19538,23975,27000,10303,12416,-12621,9017,19075,-22056,21940,24526,20829,22566,-4862,-18547,-11469,-18827,-13628,-16686,15889,-13623,21947,6950,-14275,3238,-28970,-12009,19138,-9255,-28193,-8022,16123,-24480,13260,3643,-4062,-23351,23191,24356,-31637,-24790,-21474,-9364,32559,3998,-4932,-11148,3817,-26090,-23374,-17079,-30074,-24174,-15055,-23832,6146,-12630,23260,-10555,-19662,-1473,27491,-5840,11026,-20363,19363,-5121,24386,-31431,-10367,27520,5211,32370,-12007,4954,-5733,-13469,17005,-14955,-3123,4873,18389,-7860,-9133,-14312,-17686,1300,-23231,3158,5400,-3785,-15298,-31489,29628,-30836,4110,-12234,-21128,-29126,-15012,21135,-20977,-30421,31984,-9180,-16462,-5487,23053,-6342,-28379,-25472,29765,-31151,6070,29014,11084,-27929,-2160,27861,-30361,-29860,-6529,22363,-30608,-18090,4558,31158,-18152,8800,-540,-9436,-11855,-27687,16998,-31502,-23417,28416,28091,18500,7220,7586,-28813,23506,-20383,10020,-21745,14754,29916,21140,21668,19883,17695,-4601,-19118,8579,-3472,-11803,1025,-1108,-19705,-30140,-6143,-16673,-4481,-10519,-3636,-2225,13790,28521,1222,-21138,-18027,-3608,-7025,17107,-8542,-3364,14778,-2803,-26646,-10582,6655,-17331,27218,-25267,-25539,8763,6349,30723,19136,-1151,-3407,-13793,-10068,-6618,-19397,-26986,6520,-2397,23223,-8109,4602,-24170,-39,24278,7659,-5153,23180,20729,-9097,-29575,-11620,-7378,-29660,-10668,28703,1647,19397,6580,11068,9409,18159,11216,-4371,-9472,-15841,15947,-17388,-10969,-17217,-29629,-16075,-25204,26977,27625,5335,-518,21207,25722,32390,-27045,-3905,-16362,-11464,27354,-32408,-18935,-221,-25336,-14755,-30240,-6452,2627,-12878,-9536,-13216,-650,8012,-16041,7768,-25410,-3258,17768,15653,25357,-4981,1756,-3987,4322,12007,-2993,-22125,-27673,-26794,-22868,10329,12206,-9686,-10976,20609,-12109,-25595,22698,3251,1028,-5958,-17382,-31059,-18631,31366,-3824,-20294,-10357,27381,-29665,-9213,-13284,-17052,-21318,-2839,20857,-27390,-17295,6479,-9531,-15263,-18354,13252,30022,2516,-15636,13425,17112,-805,-8672,-18621,-24374,4961,-4250,6114,-20682,-11039,29609,3046,13292,5184,-25799,16996,-30261,22363,15007,25947,3904,20869,8216,-23641,12181,31870,7003,9784,13397,26657,15179,2074,-29356,-23129,-31589,-19481,6525,19415,24906,4338,-23731,-13733,10832,18842,21034,-7323,18037,-18808,12490,-10148,28307,11708,16135,15806,3261,-16368,15567,-21481,-11407,749,-26294,-8211,-29363,25939,-19615,-8636,10135,25845,1263,-28083,-19599,7511,-17526,-29650,17786,-11878,16326,15997,-23742,-29507,-9794,11857,23492,9064,735,24120,-3576,10979,27221,-11978,17965,24839,-7785,-27145,5982,-23291,-30819,4664,-23853,14406,16947,24560,-20669,6933,1788,-30732,-10887,10128,26642,13862,-6779,13199,-10493,-22396,-1877,18197,28215,12576,25388,29999,28644,-524,31351,20639,12924,8384,6425,10740,-15561,12322,3908,8731,-6678,-30486,-31901,21804,-13909,-17322,17624,18672,-7732,2828,10598,2805,32676,-30398,22203,-6231,-8580,-24515,19166,1969,18991,-5294,-19159,5964,-16545,30816,-24274,-14685,-7234,5443,-22451,-10346,10942,-7985,-25377,30936,-29642,13471,12076,-32339,-31821,12994,10743,9097,18093,22600,3065,-20792,2887,14643,6534,-6128,-20928,32182,16824,-14173,-24604,16595,28449,-19045,9616,-6768,5531,27701,-32591,7517,24348,-23157,-28731,-19249,26782,-15124,7228,-12286,17242,-1653,21139,-25522,-31156,-19611,19222,-21972,-13563,9822,17104,-26043,10260,-5859,-19921,1096,-26256,-7870,-18969,-30191,388,-28817,32478,13825,-27829,21283,-12364,23384,-8530,-30878,27409,-3362,-10461,6854,-29799,22107,-19282,7795,-22507,188,31868,-21921,-3580,-10966,23493,-29063,-24725,-23391,-21543,-13741,29424,-27756,9755,1053,-30615,10268,-29009,12668,11997,-9012,19572,313,-23860,5000,7664,-3854,24308,-2380,-21442,-32453,11714,-25718,8973,15256,-17979,-26907,21121,-30338,3937,-9175,2112,4881,-14527,19459,27510,-3658,-4678,-24474,-26390,10539,-17062,-27922,20091,-13154,9436,-32293,3551,6544,21588,-29542,-16091,23908,10740,-25073,-15331,-23333,-26517,13398,-20396,368,6255,10097,-17583,-5734,9645,16436,7334,-22974,19841,-11037,27642,28934,-28347,-28602,32333,13063,-22337,24825,1229,-586,2692,-18044,-647,5444,-18545,-14594,29539,-1079,23017,20137,32506,-9147,-19903,-11633,27901,-23073,27941,-5968,23646,29213,6515,8357,-18890,21187,6261,-13856,2394,-1073,-29489,-23902,29962,8146,-18830,-9969,5332,-27649,-24755,-18088,22396,10046,-25826,-27948,-4117,22402,-30214,2407,11667,2267,-31853,13027,27567,-26489,-14630,24866,-3946,-31427,-18021,-30198,30292,-18739,25047,7470,-32390,26910,-14238,-28742,-473,26658,-7211,-2327,-29356,5071,-13220,-19390,23185,-25320,1870,-29192,30886,-6663,20704,19338,17185,307,-29659,-4905,3837,-6818,-9910,-4225,18221,11051,24214,851,-25466,16401,28706,-1215,-20881,25535,15585,11963,1921,4494,26596,-4192,14476,6852,22246,7955,2871,1488,30656,19207,7118,-10046,-19521,8387,5502,-462,5227,27298,-4852,-27141,8379,-17322,25555,-8672,2714,-30028,-864,21591,-12850,8181,-6050,8342,-28104,14927,6955,-19413,13283,18016,-17270,14236,1369,-10569,13018,-29530,-32517,20614,17950,-1772,-29529,4985,10414,-1651,1000,29404,-31693,25093,-9158,-23288,10830,26119,13407,-8252,-24790,-4546,-26127,22695,21377,-1571,29393,-6490,-9474,8018,15616,10680,5868,-12053,15073,-12606,-10978,-31767,24472,11605,32455,-7671,-3310,30924,-25943,-19260,20231,-22726,2317,-23301,20687,19493,7056,10066,405,6009,7967,18832,5443,28874,-8674,13594,-1342,-2178,15344,-1540,10102,18539,-6781,9273,18226,-17440,-15442,-15985,11210,16380,30793,-10014,7876,-24423,13655,-1124,-10700,-9169,3932,11013,-15625,-24430,-8893,-15155,-107,5408,11434,13390,27130,23864,3241,-5262,-9331,3774,-7548,-25902,-19581,3607,5489,6788,-25973,1361,2382,22166,-20997,22639,-8459,30802,11134,15179,29436,-8413,-8949,-23759,26815,-573,5258,10196,-25259,24627,-10763,883,807,23342,5022,26054,-31695,5755,14357,-31508,-1620,17267,15792,-931,5961,28027,-4367,-12352,28657,-16372,-24447,9480,-24759,-31696,2549,4761,26434,-26071,-13126,14347,-24759,-31343,21429,11843,-27447,3568,12693,-24224,16544,273,11851,-10173,-20634,6846,10878,-16518,23549,-30662,-20990,18665,24968,11885,-31944,15341,15778,19357,10094,24298,-2975,-16152,-31697,-26262,2470,15542,6065,20665,29721,-21907,12519,3023,-27622,-10872,-3038,15792,-21012,-23672,-24103,-22270,13645,-23286,4092,7313,28131,-20195,4823,17221,-14681,-26167,-616,5489,27619,-727,-25916,-4173,20090,-29252,6400,25089,6465,-19953,-14010,7705,28000,-32319,-16273,-4440,4948,12781,-10126,-13136,14594,-478,-254,19839,-10449,-5322,1574,14417,5307,16258,-9268,23427,7176,30847,24338,19550,-25904,27227,30524,-24522,4231,19919,-5280,-26477,-7453,-17137,32194,-3664,2862,22557,-27841,29284,-24779,-10218,-18378,15626,-20764,29868,8916,9350,11490,-25014,30269,5997,-9037,20054,8400,27458,19601,17513 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add14_input1_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add14_input1_int16.csv new file mode 100644 index 0000000..5949453 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add14_input1_int16.csv @@ -0,0 +1 @@ +8266,-12123,12469,8280,27192,13439,1330,29556,-23276,-31761,11272,7239,16014,18308,-29131,-14962,18052,-23554,24680,1177,26288,24243,-16538,-28466,31751,13990,9329,-8193,12331,-27098,-13433,-10009,-13228,24944,2207,683,-30032,27056,-4738,-30778,23019,-30388,21014,-3839,3173,30661,720,-17524,-7229,-18091,-720,-2396,12617,14143,-19874,-478,-6741,-28551,26900,26477,6744,-23960,9309,9976,-940,25098,25201,29647,-14848,-4319,7897,-16496,-19624,15591,-5115,-16288,11591,-26167,-25236,-31150,17775,-18842,-30113,26921,-25727,-21714,3595,-6891,-24516,-12256,-26608,-23847,11760,2540,-6951,30795,10219,-12309,2959,-23062,32010,-23321,-18062,-8523,-19032,-17870,-9022,32596,-6548,13702,9228,-4947,-22014,-30584,11424,17199,-9633,-7272,32430,-15586,-14743,-29682,27566,-27866,-11722,20920,-8486,31775,7787,22624,-20887,-9084,-16906,-30801,26790,-13779,-16697,-5157,-11228,2403,-28649,-18426,511,-31438,17691,-18469,17953,5599,-11914,-5812,-2024,-18942,21017,-7199,-12114,8103,-11863,13922,30978,-3771,23344,-11347,27412,29287,12769,16800,-20405,18314,20147,22987,15187,-9090,-8543,-22055,31342,-28397,-25524,-5067,14791,31404,-25217,-10861,-10420,1371,-1354,20810,2283,-24451,8887,-21734,27697,22437,-6248,-25390,24747,-7371,14286,8632,10475,29744,-18848,-28519,-27737,11008,5939,291,-19963,17244,-10035,-27537,-29474,-24674,27977,17113,10109,4775,17095,5122,10807,17759,-8474,-29060,-1105,-12401,4511,18051,27852,9501,22083,12961,21075,17666,-9197,11273,-30749,-17777,-1444,1356,31018,-2356,-19386,28018,13534,4475,-30569,-12457,-1209,-30866,31610,10942,-4478,4530,-13412,25742,8569,14767,-12419,31814,12423,7279,28694,1696,4709,26006,-26914,3152,-19126,22173,-14906,-777,7573,-31495,23490,-8373,-23701,-32240,12907,-17145,5906,5526,-5608,6639,19097,8657,30746,-1916,-24335,26489,26698,-27750,-22741,9023,24161,9764,9508,32288,14941,13683,-21252,-13334,-18465,-15295,-3861,27080,-16250,-18357,-1958,-24309,-23054,-13821,-27779,20507,-13107,1520,-24946,10063,29054,8500,29317,-5380,-31056,-23047,19462,-16846,-17391,22028,-7147,-27574,-29272,14288,-12076,30327,18018,-24984,24080,-10850,-17941,-12101,18778,-2984,-32189,-7664,-17771,4562,-26339,4082,-15570,-9672,-63,19335,20662,-24023,-5352,-8135,-15941,10588,2314,120,-16297,-10641,19136,-3459,2784,16963,2815,10242,-9173,-20276,12775,-12442,-11524,26603,-26359,-28732,-24224,-13013,22912,-30932,-10306,25737,27772,13568,14648,-1842,-30456,-18860,-15887,-29001,-31028,11442,13842,7010,15996,2660,-21155,28483,253,23695,8998,-2011,4013,24214,-14639,-18809,-4508,4648,-32416,-32456,10755,13073,-20889,-26256,10363,-29914,29139,-574,2797,-12211,31693,19068,14039,-13102,21835,-18013,-8690,-20916,-1463,15337,16031,-17985,-19652,-25322,-1136,-6040,-10209,-20874,6880,-28651,29530,23546,-13503,7273,-763,22703,-14220,-12782,25041,156,23190,10188,-28668,-7326,27193,-15986,21049,-26024,-32666,31309,-22936,-5806,-4150,31146,-9960,12372,-12197,3595,-31627,-8130,15423,7139,-28629,15353,-17670,9609,22794,-17091,-19017,-16049,8631,18591,-1590,-270,19377,14157,6113,-154,31965,-11957,-14902,-28549,-4733,11397,18562,-19887,5383,737,3861,-26949,-18039,30517,12369,1102,-19009,17330,-20388,-3139,-12884,3344,-31339,-10604,-31606,-864,-6805,9300,18028,20622,-14167,15587,-9689,-25004,-16052,-21390,1474,1741,-9832,14108,5266,-4525,5667,548,18796,18870,2333,-30013,7376,10282,28541,-10185,6893,-32002,-17528,-13784,-31549,-1474,-22047,-20626,7904,-7056,-17681,23038,-6488,-4159,12511,-366,-24282,9701,-13703,-7579,-9905,-25891,5521,-254,8226,-15815,19981,4333,10155,1850,2102,7304,18092,-17533,-28258,-11037,29084,-11303,-19817,31072,13783,14214,13797,17902,12569,27306,22644,-16916,-23416,31328,18238,-10842,2876,-3756,-11063,-21715,1913,-15516,-4591,24902,-9408,27554,21209,-8263,-20798,26375,11056,-21170,8561,18239,-24106,741,-25737,699,16947,20947,26460,6499,13690,534,-30791,-13064,7147,-21058,-25746,21618,-22009,-10612,-32288,-8676,5682,-28382,2178,14559,11470,-642,-13052,20978,-17653,1644,17427,-16382,23459,13836,3686,11619,-32328,-28554,-15286,-27234,-12447,-14960,-31921,31644,27315,-16546,1357,-8329,-16313,-22764,21186,-29995,31164,-16997,17511,13498,-9267,-24108,26042,-17519,7801,-24548,14794,30173,-15379,10522,12130,-7773,11996,16575,-22270,22297,32583,-17032,-4198,-23400,2365,-2066,-14022,21422,14,-23278,-11376,13383,32339,-17393,26406,26992,15911,-10210,-11899,14438,27057,-17456,27315,-19079,11455,-13576,32300,17139,-14556,-4898,-26381,-19784,18853,12297,25326,1038,21005,5393,-8438,-31431,-27198,-27137,10099,368,13361,-1639,3764,527,-23746,-18987,17508,-8296,9462,14523,27271,14239,-14945,2034,21950,27851,-26007,14587,-27234,-23504,-28446,17226,26325,-4439,4280,28616,-7168,20114,-12820,12910,22712,23225,7507,-3367,29863,25654,-23767,-17543,22814,-5531,16665,-15232,-20383,-29722,31522,-26129,-1423,-20376,2994,-25018,-9737,-13788,-21704,-26283,21643,7985,3013,6747,-23235,18785,5677,-24497,-16665,-10352,26370,23974,15875,-31038,-16802,23935,-15671,-6789,15270,-19115,25699,9327,13994,17763,8420,-18592,28734,-17282,29239,-20728,-10799,3273,30705,-29005,-14825,22574,0,-19018,11544,19955,-12233,27238,32669,-3018,-14325,16208,30871,32403,-2245,-12820,14636,-6900,-25740,-18741,7380,-9837,-930,17726,-23907,5462,14994,-17408,-28846,-4273,20531,-12891,18404,-28858,16398,22504,19019,28748,30542,13175,-29567,-8709,17080,30358,-11178,-13428,2273,-903,10121,8406,5970,28197,320,30960,11503,-2479,-31401,-25248,30919,-42,-14925,9033,-22031,-937,13265,-8013,-23915,20282,12932,25254,32203,-1513,18284,14362,32052,24903,174,30010,-25824,-1454,-2488,23554,-19185,9044,-32045,-12024,-535,29538,22427,21731,15858,32593,-9784,11707,-23284,17373,-13402,10018,-17846,-6084,-12321,-18518,-20246,-18031,9408,14292,28363,23927,11088,-32524,-32272,11418,6848,-11532,-2340,19253,-22348,-15588,25618,-18491,16895,11864,31907,-24330,-16320,-17810,-218,-4095,-14783,21928,-21160,-27298,700,13841,2906,-28814,-16331,28921,-21455,16510,-18660,-27385,29503,-510,8912,-29454,-32007,-17948,-2574,-881,24249,21684,-11234,-8892,-32749,17761,-31109,30793,11180,19565,25123,-19168,-25322,2026,-16491,-28913,-3679,30375,1581,-30491,-7628,24009,10579,-3712,-9484,14001,17179,-28847,25255,-22654,-2514,-7586,30137,9320,30916,17899,-22523,20802,4007,-574,10442,32525,-18447,5527,30200,9887,28127,-23196,-28384,-9303,23235,-7870,-3771,-6202,15216,1671,-20497,24993,24643,-20949,23092,4026,-17686,21411,-8671,-3616,10623,31081,30066,25157,-12436,16368,26765,-29403,-29704,-1558,29422,-29337,-16988,-1616,30651,30699,-18861,23251,10595,-12795,-30881,11036,3448,22910,4632,10646,6757,-3038,-11653,19467,-3284,19036,-11014,15445,2149,-32217,29540,16498,-19537,-18660,-2649,7549,6855,15876,28609,-26370,-20686,-11902,8509,17238,28380,10605,-7579,-21069,-6524,-29920,99,-10803,-11244,21831,733,-19349,-1036,14156,24415,-25527,24817,-28903,-2857,9540,-29156,-26226,-2371,-15285,26743,30701,-20188,-16428,-12539,11303,14701,-10628,30754,-12506,591,-25002,11236,-11091,-3193,29281,-11567,-31287,-11271,28115,16241,4775,11252,19915,-293,11090,-25451,10697,14533,-3174,22695,1101,-12219,2306,-3991,-30377,-16133,-14222,23591,-9133,17436,16626,-5071,-11913,18081,-20945,13317,27627,-28818,19719,-2057,-29811,31431,-27899,13949,-19279,-32172,-11028,904,-1922,22221,-18322,-11861,26822,-20514,10327,11699,2838,26192,-11443,-23048,-4225,-14552,-261,-5884,-3844,-5711,-21048,4546,28926,-13484,-32166,11484,-15912,-20067,28821,-3078,-26551,21576,-6398,4580,-3623,13995,7787,9477,-914,-11348,22253,3960,12760,-10128,11918,-15710,29101,-12990,-25685,-23293,32635,-25636,-7990,14099,14669,-17572,-27188,-6726,-21729,29217,-15411,25146,-17973,-17281,-19231,31848,29532,-17443,-3184,18848,-962,-16126,27477,15393,-8954,-7392,-29184,-23897,29162,-26015,-12112,-11382,-26008,9090,31653,-18787,23724,-29685,19146,-27773,-19947,-25086,2699,30312,30878,7988,-25886,21665,-19358,28005,12495,-7952,-9825,16964,26703,-5991,14222,19744,-15670,-29254,30161,-1721,19702,18873,-4800,-25966,-918,-14499,23123,-26731,32684,15835,774,-10539,-17802,-17803,-4909,-5490,21164,-796,-10146,3048,18345,22437,-23624,-16572,14109,22886,14531,-17418,-16382,12636,7950,24765,30938,32508,-23033,-11717,16535,17158,2216,14121,19707,1925,-21557,-253,14700,-10657,-22449,19558,-16242,-14526,24825,-1387,-11228,-32206,-30506,18011,7714,-15883,-951,25761,-8732,11912,6097,11547,16563,-10019,-5886,26758,-25690,-5978,-15976,-16202,-29573,15336,23741,14326,413,-7357,9726,137,-8435,-16999,9101,6287,31659,-29270,26492,15461,-24982,26720,-21064,-13564,-28198,-9931,22545,-21665,-12931,-24507,-15647,11422,-30180,-29237,18381,-23020,17189,13800,-25779,-19556,-25924,15623,13568,15408,26733,2313,-12682,-30805,18435,1199,29347,3241,32016,-21723,25988,17748,16274,9683,-26268,-11212,-26381,9681,18935,7598,6865,7339,-25680,-1,12056,12733,2871,14114,-9942,-12554,-21623,14223,17912,14074,-5856,-27651,26946,-26252,-21619,-22347,22996,5189,-9966,-5911,7487,-13781,-4084,30282,-31586,10771,-4732,27353,-25898,19311,-25835,9591,-23480,-12847,-22065,23864,26783,24832,-6701,19854,11859,3324,21683,15502,-25583,-22566,-18024,4628,-19579,23296,5972,9088,-30548,1871,27342,29768,19380,12945,940,-1697,29157,-31848,-4851,6124,5804,12511,-27273,18520,-12874,-28024,11592,-18994,17716,-4962,15546,-22739,9243,-7860,-16472,-29314,15247,-19770,17672,16771,-23421,-22966,-5144,-20802,17266,31426,12753,-25504,8289,24200,18603,21035,-20687,-30562,-2596,-6740,-18660,-15471,-28460,-28600,-4781,26561,32051,19748,-25128,-4097,1561,-17688,-4511,-6398,-14987,-27514,-27509,-30819,-29275,6867,25172,-25322,-4884,-21497,24047,24862,-10723,-17056,-21251,4671,-24060,-6979,7232,-17536,31308,-23777,-25502,-1158,20346,11972,1439,22729,-5173,-16877,21081,468,22175,4471,18310,28432,-32093,-7091,18,14757,19741,12427,-3968,11839,29990,231,6222,-12293,5347,-19440,-4839,-25941,-7891,16128,-24663,13781,30337,2566,-17933,-7254,16250,-31196,2208,-19000,32272,-19088,-27463,-14809,3145,-2611,-29948,27144,15833,27791,-3691,24230,-1210,9128,7998,-9768,-19147,3019,-11528,16751,6809,1486,14550,-18594,-24190,31411,-20107,21290,29697,-25812,-16969,-27451,24640,10254,7813,1052,-16118,22249,-27158,-1146,-4818,-23927,17602,-6968,-17300,9127,-27796,31726,13326,30538,20765,-19669,-32717,18592,3297,-13193,-1534,-32419,-16820,-6585,-3485,-20940,11183,15624,-31967,10415,-18151,-23858,-18632,-4949,-3047,7800,-32513,-21208,10464,-21859,-1392,-19380,-23395,-26457,11650,-14362,-30853,5443,14583,18590,-2925,15412,13530,20222,-12769,25576,1641,12282,31744,-1110,-10421,24958,21160,-8819,13857,26558,-4629,-3400,11547,-8074,15693,17972,-32468,-8629,-20825,-10301,14908,2322,20738,13295,28192,16121,3860,-22889,-23387,-5431,23676,22904,-4919,-24541,-12069,-25792,-2167,7389,23461,-296,-19820,-6457,-4895,27713,31714,22287,-20273,-29803,26042,-32251,11387,-25222,6187,-31292,28742,32397,3538,-2795,30614,-27744,-10813,-28473,-29423,-21008,17934,-9997,15186,-19090,-3782,-29799,-6424,0,18063,8701,-6676,-27224,17576,3925,-2232,1574,30704,21406,-654,32283,-31058,18257,-27918,-28209,-29920,4550,13924,1133,27958,4142,-9453,-13611,-15796,22825,-31964,-16183,19029,13916,27444,-15446,-7308,-21494,-25485,15342,-14723,-19470,26392,-2060,3760,15858,24302,16561,-16253,10770,5966,12007,-24691,13944,-14582,24220,13999,18743,-13844,9611,20946,-16130,5042,3510,-16591,17587,-24203,-20942,27609,-30244,13380,-2928,-4553,18628,-20327,-5727,15222,-17452,-3526,-20062,-21081,-15732,-6673,17479,-17623,8202,-6694,-32613,29314,-30771,-1887,12002,-4873,23575,28973,-19375,2217,-9297,-18681,-18163,-25098,-5892,682,24813,20822,31991,24115,-6185,-27354,-11410,30420,13072,-11331,-4920,29104,-20845,-18041,14705,29945,-12342,16800,23691,4688,9163,14863,29813,14421,25589,14218,-5098,10480,-18245,13473,-10643,-3402,-17460,-11432,14541,-13421,-8364,20196,18702,-21070,9471,-29931,-12043,-29168,-30500,24729,-8223,-10236,14931,11875,-20519,9562,-21937,1140,25925,-7341,-5388,-28138,-27237,6670,15565,7026,5999,12698,29358,6540,22250,30373,20096,-13880,-29833,16708,26006,28372,-23061,28419,-27615,-7149,26869,-31505,-21840,27208,-6944,7014,1872,28874,11727,15438,2059,25730,30346,21011,-26128,-24255,11537,7986,-13884,-17536,-29990,21025,-7153,-27064,-7811,-2296,-26503,26625,-19835,-31075,-19653,-31761,16026,-15999,-14403,-8830,-25359,-26653,-11675,18507,7077,-12802,27265,-21999,12408,-1110,21199,-23151,-30489,-20398,17866,-24168,18371,18798,-26114,-32733,-22869,10717,-17514,3990,-5025,23733,-18574,12179,-5629,26546,17934,-25741,-24892,-1078,6873,9493,-26435,-2680,6825,19235,16076,16433,-19152,18304,-2727,-22460,23168,-17038,-8709,3416,-9392,2389,21107,-25389,1185,12284,-25482,21088,-31449,-17772,-10993,-27187,-9757,11903,-742,-23703,-13350,-30968,-10440,-2231,24547,15201,29757,30678,-20716,-8185,-29457,23722,19499,-11781,-13301,28472,16401,-18811,-10520,13244,20468,4610,-20545,13636,-7080,-11470,-23168,-24650,11252,15003,25027,27667,1890,-12866,20152,7888,-22978,-6854,-16149,15105,15578,-4107,-15461,21882,-14149,16884,-17478,-12950,-1809,-14722,11401,5233,-6715,21203,-29016,-9601,-8555,-13401,10113,14486,-15026,19899,-4880,-15819,30884,-11496,30259,-1401,13069,-11479,14078,-3975,-23231,27971,15020,9690,20642,21751,24193,-18341,-8707,28370,8017,-29024,10816,-23372,-16990,9585,21820,-30691,-20080,-6213,19058,14561,26148,-32074,-9201,581,27864,-14798,-8795,1005,-28916,20489,-32054,-22774,-12773,-26036,6593,3900,-30515,-24144,-1784,11168,-11496,-26693,-21791,9328,31073,-12518,-6332,15857,-21773,-10089,10147,-6633,10826,-21912,-14154,1668,29303,-27798,17826,6863,-28485,14731,-15333,12336,24968,-1449,15834,23575,17241,-1970,-26938,24800,30156,-25038,-11950,6527,2252,2091,-25,-23347,-28697,-25366,19779,-19945,-12205,-22823,15971,-10123,-4327,7282,11890,-18964,-2858,3239,-3175,-30554,-3733,1459,1823,28585,8590,26479,-13469,-31821,-3630,1222,12170,14109,26617,-20205,-16511,-4966,15118,22031,-16556,29835,-23418,25554,-8945,-12867,29816,-10519,26484,-13491,-7196,12164,22859,15110,14545,9846,-11423,16752,-8353,794,-28191,31313,11094,26860,-2760,-6538,-28341,-7147,-11973,21831,-1155,3128,28117,-3481,32414,14948,-67,-28766,4001,15700,17429,26071,-28045,19760,2203,617,14721,24171,-24802,-6196,-5095,-24383,-26379,20823,-19463,-18253,16471,-14491,22736,30203,1851,-1726,-31585,10355,32712,886,10503,-22740,24998,25123,2283,-32267,-23210,29289,29789,-18976,-31440,-32139,-12879,9241,131,24540,-12884,-13686,7308,28053,25247,15944,-12592,29694,10294,14975,6330,-21412,-10461,-8025,5939,-10325,12105,-5347,7496,-14190,8885,-31609,-9201,25024,113,30516,32225,-15059,-29273,-8751,-1264,-1935,-5457,5108,1095,-17004,144,26278,7463,22388,-9889,30324,8641,10948,-30622,20811,-1586,-13056,-16686,1498,26330,5107,13962,12711,12157,-5168,8450,-10365,-23668,6533,-6682,30697,-16617,-8316,13233,31386,181,-16495,-13252,14302,-7627,17000,20832,21601,-27608,4900,6248,-14650,2457,24745,-11873,-8027,-29223,9328,-2353,-24855,8333,-25287,17942,1426,-20735,-22000,-18827,2466,2762,20065,23939,-9613,-24302,-3998,6648,1371,-17591,15175,21310,-4818,-27133,-7377,-19252,-2311,62,32506,10875,-2307,2401,989,11569,19065,-23645,-9846,-31741,-24648,21412,-24589,2258,31275,-1518,417,32655,15977,10459,12290,-3236,-2639,-28862,-16358,25803,32551,-27219,-298,-15011,10427,22373,9020,-2421,19019,21828,-15999,-27644,-14751,-63,-13447,28776,26214,20276,-25345,-16447,-16839,16604,22957,8369,25348,15373,878,-26370,-24718,13368,-16910,15075,5898,-28648,12530,-30418,-1384,7537,-981,-14207,1618,15441,-28648,3253,6337,15598,-28450,-926,12626,-1970,-27894,-12209,-5418,-10398,9086,8169,-15429,-19325,-3735,17942,-27611,23780,-2617,3101,-11991,11064,9921,17731,-17747,-738,-439,25117,12296,-30153,21473,32143,-25728,-18994,16423,-32285,28711,23788,-20107,9736,29717,-23476,8844,14646,4083,16110,-11404,4163,24326,-28108,-25661,14513,17543,1036,-20692,-3126,-21494,-31894,-12714,-13409,-7655,-25413,-3464,-16468,19384,-28172,-26677,8571,-29094,665,-27080,22390,3096,-25130,-28953,-23666,-28634,-7995,-11438,7089,-16270 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add15.tflite b/tensorflow/lite/micro/integration_tests/seanet/add/add15.tflite new file mode 100644 index 0000000..221389d Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/add/add15.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add15_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add15_golden_int16.csv new file mode 100644 index 0000000..ff1f2a5 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add15_golden_int16.csv @@ -0,0 +1 @@ +-21111,14742,8024,-21676,19302,17389,-13253,-9619,-10427,1605,15842,-16052,-16263,-12935,-3606,9240,28153,-11050,14654,22432,-12301,22947,5313,8229,-10111,15738,-31712,-15498,19847,12439,-1707,-1597,13887,14338,-16924,16050,12035,-15514,32767,-2510,22480,-3848,-5555,-19593,-31039,19773,17934,7595,7464,-2096,3771,-32303,-708,-25232,4960,-24534,-1842,15167,9492,-7398,-10421,7355,-26191,-7003,-20044,15432,671,-5330,-12437,9453,17543,-4857,-17696,10733,16909,10325,-17241,-4080,31206,3373,-19421,-22398,9610,-992,-18304,7663,-26601,16640,21055,-4940,-10700,-16764,-2872,-11294,-16842,1718,1758,-16022,21107,16611,29382,10595,27152,-12196,18269,-2321,30614,-32768,-12634,12242,-5850,24093,16082,-3737,-2803,20231,-29234,-16877,32767,-12251,-18320,21570,-3548,21442,-11859,-7990,2473,14627,-24106,14799,21588,12991,-2052,21086,22196,15176,-3769,-7452,-29745,9031,-13232,13134,3057,30405,29979,10817,14012,16121,19017,-14577,18215,-2360,-13073,-9229,24663,-26057,-24621,-7353,11298,-5443,-6856,-12625,-5561,-13617,-17880,5465,-17601,-30730,-10023,10528,20368,20308,26706,-30313,-17009,17264,14581,-21515,-12552,-26724,14454,31592,31031,15691,22317,-22086,11970,-12063,-22949,-2314,-15952,24728,-12701,-7136,13492,7942,-9019,32767,-1103,-16536,14746,23917,-18061,7313,-4868,-19818,-28271,315,15797,32731,15130,18509,-14588,8316,1351,4785,-6610,2860,-26793,-28715,-3487,-8915,-27768,-14639,12716,19423,32767,-7265,-174,-16313,-17076,6573,-18252,-8796,3673,16338,7312,2354,-6988,-10911,15496,-1034,-19838,-7870,-18276,-17452,-14306,-30717,-12453,-29996,-17150,5407,-9474,-24996,-1157,8125,13664,-12975,17024,14855,8152,-23639,10259,19584,22896,-25868,-3715,22331,-3790,-20190,2345,-22638,32767,20946,7786,5094,-20644,10006,32767,10573,32767,-19386,-7081,8021,16861,-7198,-5916,727,-17113,6270,3124,-11450,18429,-7600,19213,20266,-23157,-16824,-5903,-7749,-11217,10625,-62,7220,24962,4382,28409,-25260,-7407,13911,-30380,-17290,-2857,-18696,18844,-3582,-10326,28561,-3249,-6721,16935,3344,5876,-6936,-25533,20261,-14777,10685,-13415,11861,-2088,8547,7863,10784,863,12580,10656,-23038,8475,14037,26180,10231,-1019,-12072,2452,23344,-14287,10993,31999,16939,-5728,11802,25047,4556,-25204,26871,5603,19357,848,-15471,-22939,-6013,-4710,-1561,12962,17603,-30532,16236,960,14960,18128,1035,-9849,9913,1835,-2317,-2191,-6050,17644,14971,-13857,-25027,25321,7575,-4435,15568,-14641,-21225,-20415,-17363,-23972,-9771,-28076,32767,14970,-19981,-20589,4946,-22692,4902,22668,6106,20672,-13208,-27690,2680,13716,-14971,20930,-2813,24455,15319,3459,11454,-22333,23742,23377,-2508,3671,-26805,11364,12983,15706,-11803,6844,5341,-5855,3841,-9614,-14627,7573,-16716,-3687,8553,-7171,-4213,-6202,24513,11902,1837,-27052,-303,-12639,-10704,3350,-24464,-16374,14467,10962,-18436,18707,20548,-9988,-4401,21571,-19778,32767,-19001,11204,28148,-8184,13857,-29515,1914,-17898,5749,5899,7159,3294,32051,4158,-11510,-6692,-5636,-3109,28407,21220,-4035,-5992,-8802,-12347,-1799,9491,-25848,-6005,-9697,20912,12562,-13536,-10570,-16016,4521,14856,-29009,-18624,-21216,-13948,18456,-119,-20417,-23182,-12331,-24093,-16041,3676,-19408,18630,3593,10266,13036,-17971,-6411,-5610,4339,-7086,-25315,-16327,-30,-11663,-32768,14059,3528,-13144,-2116,-18147,-26903,-6719,12321,-24929,7183,-11110,8885,-13824,7500,-2479,-497,-23204,-8816,17565,-13764,-31219,12236,-14160,2850,14109,-19368,14340,-3830,32215,16637,29920,26737,-6025,-2486,12974,3626,-16624,-21085,19426,-14922,-21348,-26881,6420,-8546,5981,7730,-1643,985,17301,3574,-24006,16649,-478,9217,22797,21673,11868,8519,-2159,515,12324,16977,4579,-18848,-20494,12548,5690,14829,-13927,-254,1833,2344,-19223,-11505,26436,-17915,12003,-13196,5269,6008,-24896,-13083,-4234,-9004,15330,6342,12681,22122,7529,17849,17506,15110,18431,-5047,17004,-14028,2965,-18294,-20738,-32768,-25342,-7056,-2891,8143,-502,-32768,-12592,-15068,21654,5428,7289,26961,1998,19187,-23803,-13316,-11582,10029,17628,4854,-20906,21101,13830,-24686,22487,-21099,-6037,24515,18968,-1045,9450,20678,24682,894,30810,-5143,-19996,17897,-13801,26753,20502,24222,1782,14940,13221,5260,30333,9957,-19929,620,-8781,13385,-25008,19830,-15901,3612,-10986,17153,-28557,18791,-18046,5027,17428,-8308,-16416,16788,25917,-20037,376,-5617,-2500,-20091,-14644,30334,15437,-11663,-8448,-21937,-7272,-27347,21614,14939,-23550,26237,23749,-15386,-7407,2234,19205,30246,12361,-16864,-32768,7045,-15610,-6491,22935,8743,-3353,-1648,10476,8518,21972,-15457,24945,5247,8107,-5741,10684,-5079,14614,2292,24101,8705,-21369,-3366,-17828,-19138,-12034,-11285,22808,-3093,-4673,7677,8120,18030,-32319,-640,3967,23731,4845,9670,16794,7476,18082,-11391,-2713,14817,-19232,7699,-21853,-14019,-10196,5498,9911,-3615,-1554,-14036,14867,-23415,-28052,-10990,-18897,-9551,-20343,-2954,-22929,9425,-519,-16830,8816,809,4399,-29323,-574,-14720,5949,-1648,-6096,-15070,20065,-21199,-8788,-1108,32550,-2585,21183,24130,14361,14869,14978,-8018,9736,15910,11828,20758,24483,9451,-7741,-12122,-23452,14820,3220,17465,16416,18902,-8867,5014,-19391,-7056,20762,30711,-15177,-24948,17344,19406,-25216,9487,-15572,-316,16028,4557,-6990,18192,32767,-1109,-2053,10781,-6126,2917,-29950,2774,1043,-12295,10025,2659,-1521,-3002,18495,10140,3821,-32768,-3972,-32768,32767,15439,24795,-13640,6412,21848,21187,-1406,-14164,2684,-17768,18640,-32512,26887,-23263,18596,19636,8221,-13417,-15514,-2869,-19785,-17747,19962,-22843,24478,-11574,-17190,17516,-11388,29391,-3189,-7434,3822,14228,8166,8819,11521,-19097,2710,-18067,-24616,-25920,32512,-13744,23214,-15417,-11662,-6503,5169,16714,-21470,-20210,-4752,-27710,-5538,788,-4965,23158,11341,12307,9392,27773,-26374,-23667,-18283,-15491,14818,5925,24110,15663,18433,-25446,-20158,-18089,9690,3572,13705,1490,-10490,-5343,7176,3341,-8894,-14636,-11585,13806,-14494,16277,4441,-2336,9219,24833,-3102,-18642,-27644,-4675,-1969,-20861,-3796,-12695,5744,-25777,-27051,-15655,-15214,-599,-18592,-23896,14283,18971,29068,9221,-25634,18844,6142,-7916,-2477,-755,-27374,13325,-14616,14987,17136,-14108,-13689,6077,21150,20200,1402,6535,-9550,11735,-7752,-5301,-17185,-21228,25619,16650,-13293,5526,10912,-9601,-28520,-28164,-22620,-990,8418,-4956,-4594,19943,-21451,-1448,-19458,13808,-15768,27749,12177,-1491,-4958,-32768,12576,-8221,-222,-7104,-18213,1753,-8504,-23768,17043,-25689,20473,-418,2954,-18665,-1787,19757,-6661,15857,1370,12216,3488,-12511,-18502,11428,-9122,16563,22043,14540,-17036,-7554,24684,-6851,-12253,4639,-31006,-14640,-12444,-19921,-31481,24770,13725,-5445,-26945,29340,-28664,13174,15273,21694,23884,-16386,-12272,-20429,-8014,1525,-19595,-13031,15239,-20946,6187,-3602,32555,8188,16922,30670,4232,-18270,-28169,-1391,11799,16896,-21015,-3977,-13684,-2838,-13605,3658,-27453,-29374,-13481,19811,19355,-3434,16021,5096,15092,16678,-17330,-4465,-17131,25898,25737,9649,-23589,-22101,23265,-23909,-8203,11116,7811,4970,13001,15406,-4885,-4933,5984,4143,-5504,-17317,-15676,4694,8050,-21607,22815,-1031,25940,17576,20703,10015,-13058,-12633,-12492,26382,-20041,-13944,15699,5491,11669,-20675,-15083,12229,-6526,2053,-25872,11036,30481,-9114,-13483,-12553,14402,19013,-7157,7263,738,17078,-18070,25510,-1673,5426,-16323,-28698,13004,-7754,-28280,21047,26939,9313,16681,10991,-30862,13149,-922,-12734,-14998,-17418,31471,860,3330,11930,14985,-10701,-6492,-17856,-32768,20320,19898,20050,-1022,-19424,-14502,2800,-32768,-9182,24553,-5517,-9804,23156,-9584,12038,-22204,-11858,-31745,614,-16732,11822,25404,24281,15422,19584,-29841,15613,909,4313,-15021,7891,-6022,-8206,23096,-21178,-14500,-21894,-21769,-22351,-9317,-336,-14498,-3602,5819,-29310,-13891,-10490,3234,25601,13408,22814,14295,22699,9367,-20002,-6501,28610,-27964,-23769,-771,-5363,-12228,-32768,20485,18479,4289,-15300,17826,-3569,1370,-31724,-10961,19951,-11610,17258,-26592,-6287,31970,3546,20010,-10205,14960,15352,25659,12021,23486,2954,12073,-3588,-15292,-21621,8259,3570,-487,10467,-24176,29042,-32768,32731,13597,12404,-624,16063,23614,-32574,11920,13752,-15422,-4375,17912,-3251,14032,-9105,32767,32767,26135,13711,-16038,-7666,-2139,10474,5468,-9497,-9119,-792,-4082,9546,-1573,-15929,28642,-30020,-23282,6993,17852,-9598,-3168,-21068,-8007,1327,-15197,25123,10438,-6471,-7900,-22488,24505,1300,26361,-30118,-23997,24581,18529,-19524,22717,-9551,-993,14335,16391,9484,26707,-16283,-4932,-18520,-11038,-4815,29938,-25145,7209,-3922,2164,-15336,-2577,-14691,7003,27655,-9809,-24574,1890,12526,335,21196,-7918,1158,-20590,23111,2407,2930,15293,-13374,18384,23125,4977,-15230,12053,9707,5747,18316,-15281,16388,-10221,-17518,18222,28078,-27304,-4795,15404,-12365,28287,-17378,19573,27040,11467,22580,19027,-14090,13391,-5646,-3404,14733,-21646,22509,5431,32767,16114,24244,-27062,-12842,4770,-3014,19710,3602,-17434,-32768,-18658,-32768,19752,19988,-13046,-16437,24487,-1667,12372,-4248,-11919,-23144,26547,27444,-5189,-9388,21970,-24918,-22635,-1243,-8237,16283,5384,17063,-20923,1498,-25585,21488,-15127,-10805,14129,-17238,-26282,6212,-7300,-26392,-13831,16001,2684,12795,22139,27497,9802,500,-23602,14977,-27378,-13449,122,-8210,-5998,-5465,3415,-20262,-19695,3134,30846,13723,21665,14130,12534,-23450,-19045,268,-16567,-19569,23735,-81,-20630,-4736,27111,-15999,7021,-11230,-5728,-12621,-19380,-8897,20142,22833,3990,5441,-10785,-6601,-8828,9266,-8065,10676,-25307,-21001,10384,-17074,9785,13300,31804,10883,496,7792,-10188,-127,-5178,-17160,11944,1237,-11460,-23291,-20062,-19534,-23260,10249,4220,-18666,-7110,21877,11093,29088,10911,-13552,-10455,9442,7159,-7339,17832,-13010,-17383,-5126,-8477,15632,20654,-10738,-12761,-439,21857,28777,-22353,13492,-15062,-22915,4188,-1660,13155,-18772,-27486,4896,6523,5812,25285,22226,22021,13569,14020,-5872,-16687,8601,-22645,6028,-18889,-4491,-23808,-5878,-14525,30215,-15171,5237,-22933,-20663,21861,11720,-25953,22671,-23262,9968,7881,4099,11195,15426,-19613,7472,-6086,1361,-8826,17444,29535,-2229,12996,-3336,7612,19942,994,-5202,-2742,7778,8508,-16696,-19429,12049,-5552,8686,15643,-14377,-4967,31841,-14515,-19447,266,-14653,-10088,-10429,-5933,1571,-8340,-15587,24706,-404,-2951,-14282,-12966,9032,-19889,8772,-4665,-23737,-15511,4085,-2428,7356,-11986,32767,277,24446,-3756,23080,32487,-14615,8190,-1861,-9546,-24456,-32768,2500,-17524,5552,-10564,22682,13240,-9528,458,-2656,-10471,18468,-15422,27871,4855,5414,28508,-27842,-4467,-4190,-15311,-27349,-10138,-20784,-23232,-14939,6493,346,2710,4661,-7191,10225,-9665,-18969,2994,-24431,14065,-6145,-26257,-20716,-21368,-6667,8105,-25300,-17580,-21940,-12650,2397,-1389,31808,3389,14218,-15479,-4009,-17520,-4132,-4424,24983,5486,-24449,5919,11027,-19812,15094,-7055,28110,-15456,-22243,-16683,-18600,-19143,7184,9674,-11749,14833,-7229,-32268,14190,18589,-17865,-9990,-7063,-29928,-13633,581,16529,21248,32170,15713,12946,-5187,-4673,9968,27167,4893,6752,7957,18363,-16412,26869,-28056,15904,1707,3029,-28773,-6447,8501,-29606,5230,2179,-22605,20156,15917,-18786,18362,-4540,11891,-13448,-29674,-2672,-12775,25143,-21746,-21828,-7565,26377,3354,-8338,-4239,13723,-14117,-200,2944,709,2087,24086,-15162,-15788,8023,-11098,29260,5020,-18302,-11236,23082,-32768,15444,5240,-7411,-2054,-5524,-10445,-11632,-6807,-12751,15163,-4204,10058,29267,274,10090,10540,-12043,-10688,-3190,30429,-10358,-7877,-16200,-1590,21011,-20407,26139,6838,-386,3405,26896,-12739,-11181,-4042,25678,32767,-7281,-5862,11419,-5556,-32768,15493,3054,32767,1672,-17804,21027,6051,25170,8729,4857,-8575,-8336,-20952,-7710,6211,8737,-32768,7323,-16543,-32768,-25345,-32418,-6885,6612,-660,17777,-23951,-21131,8644,-20444,-1917,22059,-21147,18271,-23472,3435,30375,-26165,-13782,-25258,19694,-9548,15984,13930,-18395,-4568,26670,4576,9699,-11864,13420,-8651,13029,5993,-19022,-25614,-24205,-6624,10416,23072,17984,4363,18638,-30669,1792,18139,-2963,-19533,-10736,-505,-18830,-8265,-9109,-30259,801,25447,13203,-18656,-5084,27046,7358,2099,-31494,-4086,-18376,11814,-24847,17823,-1223,3103,20109,19662,7420,-338,16197,-26459,-16827,-10311,22084,-13051,10169,-18586,7785,-4840,-16556,-22450,19877,28854,-420,25339,15402,-2685,-6217,-24680,-20646,2901,3062,-10902,-17353,25635,-7932,15932,19996,11195,74,25467,1339,-28030,16411,27309,12904,-13177,4142,6039,-712,9910,26223,30782,5674,7473,10475,-3155,-12706,-14697,15321,19606,20455,30626,973,-7324,-22830,28007,7922,2843,6644,18535,-11372,15437,-29618,-2013,5927,-1481,4830,-14625,32767,-18403,-20524,1119,-9625,19487,-421,-8903,3923,27367,1444,-20295,-20710,16464,20502,22740,3606,-16248,7528,-514,266,10846,9316,-15815,32767,15443,-14129,3514,7362,2336,17621,4659,10868,20158,27337,25208,-14739,-31058,25613,-32461,24449,-655,20737,10020,-724,14295,-19477,-21042,-4104,-14909,22676,466,-8574,-6631,28017,-11472,15554,20024,-8966,-9912,7396,19985,-32768,7386,4066,-10624,-18877,-10684,-3912,-1947,27831,15493,-4363,-5258,22832,30746,-4364,-22464,7273,11061,14989,-1519,24986,-8160,15941,28509,9339,-24607,20430,25112,12498,-19776,-19165,7255,5245,-13210,-8534,815,9778,-17130,-23810,1484,24407,-6203,31979,-14907,13896,-673,-17112,17770,-8537,13018,6831,30370,27127,-10607,10489,5801,844,-7969,7765,-6931,-32768,21763,3083,26976,-1437,-25717,2856,5073,10427,-2464,-31528,15116,813,16163,26565,25543,-3604,27726,26204,13303,19345,-5542,9297,-20567,13636,-15010,-15261,-16105,22636,9003,-15281,6815,-22808,-7936,22315,16734,-12143,-412,-7027,15906,7787,14677,12888,10496,16479,19784,-23649,-22881,21262,-16293,-8776,12487,-15374,-31584,-31181,11671,-18142,-29855,9875,-17756,23980,25752,30027,29262,-22596,-16011,-6984,-6725,-2489,-25295,-26179,-20662,-447,-16866,3471,245,26562,-18954,13469,-21342,-2800,-26081,3650,10422,5260,-19548,1705,-4312,-545,10878,-27998,18987,24043,-8334,18090,11749,8126,-7202,-19548,9812,16038,12906,-15593,-9187,16919,-13143,-376,13534,-24794,-9990,11536,-20001,-6830,-15215,30141,3239,19542,-19183,-18532,32767,13785,-17593,19523,17943,-8955,29871,27567,13593,-2568,7199,-5518,-21507,586,21060,13334,14329,-23618,32767,-21713,2337,13577,-18767,-4351,10090,9541,-25661,-26963,-12393,-18502,-3552,2403,6550,13782,-6998,-6764,-3738,-9091,18297,-23271,7139,17206,-22820,-13576,-14170,-5941,-5630,13780,-9589,9140,-28493,13761,32767,-25104,-25099,5451,-14757,6132,8156,7013,-878,16159,-13348,-9668,-23034,19796,1305,-13184,16691,17469,3392,-16767,-205,-21404,4368,-21137,-6780,-12657,15447,14106,10659,-17007,32767,26368,-29274,24568,-11826,-23692,6892,-17754,10167,25363,-585,-3301,-4308,26085,-30459,-25844,2289,6553,-19029,-14856,32767,16916,-9245,-1620,24151,-28681,22146,-5917,-17038,27497,-18688,-13675,-3165,22641,28730,27163,-354,3536,10930,28176,4782,-18550,-5311,-14510,21595,-18632,-16522,-4518,7949,10118,1659,22580,15767,-19397,18291,-25329,13573,6381,2550,-17930,18450,13272,-5203,-19101,20760,-809,-10488,21920,-2642,3685,-17616,10644,-7240,8742,9856,-13401,-6415,-7804,16786,-4012,-32768,-18237,-5426,-11192,-19354,-13649,8633,14899,5863,-25908,9001,30430,12628,-31949,1198,-32768,-29906,19726,16615,7392,15311,23902,9184,497,11587,25829,-26535,-20463,22154,-331,24564,12905,-12915,17144,-17786,14498,-17194,20973,-892,-10174,17857,6986,-26912,13871,17764,27249,-3763,7980,-25374,-13154,-14303,-5588,22135,-16573,2276,-18986,20473,8196,17841,-15900,-4945,20440,5510,4071,12310,-7635,-3139,26246,-19098,20018,-9970,-18454,15143,-160,29822,-31494,-8020,17641,-9004,-22077,-12934,-9046,1827,8989,32767,10543,-3437,1988,3463,-6227,3211,541,3441,-16623,-15870,-14388,-9372,1063,3268,19404,-10932,17689,-6695,-22490,-32768,-13717,-13353,-2310,-19593,4212,14676,-1992,19824,20770,4465,-5228,-23685,-2814,-17546,6677,-11600,20133,-15565,-11597,19923,-20361,23081,-11741,-6512,-1676,-12601,1778,7436,-15856,32029,15383,1692,22755,563,15690,8946,1711,3639,-9333,-4231,-21519,-281 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add15_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add15_input0_int16.csv new file mode 100644 index 0000000..ebc41a4 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add15_input0_int16.csv @@ -0,0 +1 @@ +-14999,28782,21684,-18578,32520,29260,-8730,-3881,-4317,9292,29073,-25177,-24886,-13706,-6762,23998,32117,-10176,22592,29058,-4246,17812,-2970,13529,-14475,18993,-30767,-32028,11048,10705,10633,-14922,10933,5746,-10930,15040,18884,-28579,29628,6642,30374,3305,-5996,-24742,-28622,13165,11469,2994,18621,-12973,14127,-26750,-7976,-22058,13287,-32251,-7687,12723,22973,3800,-11829,17376,-23311,-4659,-14610,25299,8439,2702,-18936,15988,27167,68,-9053,4546,12172,4719,-21071,5995,31505,3028,-29286,-19298,1564,-865,-25079,10500,-21060,12686,20888,-599,-24880,-12787,3195,-16988,-30422,414,6775,-21926,23583,17002,32568,5801,28448,-8493,11759,-4335,27170,-30151,-14794,2632,4086,26149,16712,-12890,-11108,26531,-24229,-9064,30502,-25885,-24732,24924,-12884,20942,-17014,-9226,4467,25727,-28202,13607,31884,19215,258,19118,27171,14777,8216,-11185,-32679,-2124,-3494,13674,8466,27090,26764,634,13203,26053,23555,-9321,13254,-5011,-27290,-22516,24994,-28386,-18425,-20773,11527,-14673,-1570,-2891,5954,-25830,-25210,2889,-14241,-31485,-3152,26412,15429,28661,30793,-30447,-11829,27988,6686,-29656,-18910,-24458,8518,31110,28964,14540,14400,-14269,16516,-4126,-25380,-8123,-29208,30085,-3352,1102,18574,5846,-19944,32366,-14792,-20125,18811,31213,-27258,-2722,-11848,-25332,-28573,-3756,14920,28022,20007,11746,-28438,6739,-4158,1967,-15341,9284,-25941,-24521,8768,-9742,-29864,-17876,18323,20602,28946,-3610,2027,-19386,-10722,7165,-24541,-1306,-7778,7373,5859,15172,-12427,-9940,7920,1883,-14477,-1500,-9886,-27983,-13253,-28404,-17159,-28898,-19480,2299,-18320,-18341,11348,1039,3718,-11314,18754,10408,4008,-21928,2000,12311,25974,-21525,-17894,18182,2104,-13409,-1133,-21187,32630,28092,18699,18513,-19537,24946,30778,22079,29487,-30841,-8215,7774,14067,-20141,-19927,4550,-20091,-2905,6177,-26050,11948,-770,15675,16342,-32011,-24099,-17562,-566,-7918,7461,7893,2557,28800,-3989,32020,-21323,-11044,19663,-24690,-21084,6658,-23318,10527,-3653,-11418,31527,-17405,-11969,8716,7769,14422,-6563,-27039,29520,-30972,13294,-29481,20973,1905,5912,16572,24961,-3035,5566,3804,-30777,-2801,19336,20703,4948,-8723,-4766,-2927,16005,-24815,20540,26099,20261,4597,15749,17737,7602,-22571,27890,-3538,30816,-906,-5656,-24907,-12893,-19169,1837,3946,25947,-25599,21784,7507,31636,24251,11166,486,17696,-3575,-4420,-10507,-2720,23521,13166,-14468,-21766,23337,-3604,-907,12363,-10646,-29438,-31155,-12777,-21136,-9494,-22130,32751,12084,-23230,-14409,19541,-18482,12138,22676,-4254,30326,-6599,-29005,-9329,12455,-27435,15881,3575,28829,5700,1555,24681,-23428,29064,30653,4983,4685,-21749,5600,10347,11339,-16482,-4728,13415,3075,17653,-7101,-18078,337,-7714,-15867,17205,2953,-4361,5930,19338,26320,-10888,-25107,2856,-2063,-19703,-3161,-28164,-23048,8510,23916,-9135,31642,18948,-22237,-3936,16025,-27448,31035,-17504,18934,23353,-7312,22863,-24146,-1052,-12142,17580,8864,12052,14818,27712,17313,-11305,-18540,-17506,-10281,24528,30431,-16757,586,-4372,-18376,9153,19519,-18781,-13522,-6280,32172,20063,-9575,-2299,-22818,11758,24822,-28929,-28764,-25067,-8591,28212,1317,-30024,-15483,-4306,-30378,-32313,13107,-18159,26817,9839,17404,24284,-18434,2589,-17660,17616,-9790,-22792,-23075,-4217,-14122,-28348,12277,13674,-6300,4701,-23527,-21273,-19728,8797,-25361,5437,-18284,20254,-27018,11713,-11187,-13647,-25956,-5272,24629,-10259,-31724,6554,-6077,8762,23378,-30613,26657,-13812,29326,8339,32328,26592,-401,-1614,16347,6803,-25066,-12597,32172,-23245,-24941,-23666,12978,-8068,5864,22492,5693,-9148,13076,3280,-23937,27857,11679,5826,17024,26810,19707,22101,-1067,1113,19723,26207,-735,-19639,-24396,25812,20278,14042,-7260,-2682,7427,3522,-10955,-11209,26623,-27654,12491,-9819,5021,677,-27588,-4355,-9602,-9054,20973,13452,13465,26003,-1119,26181,29363,15169,20195,2436,11130,-5818,2385,-10715,-17834,-32562,-22369,-10311,3815,1538,-7170,-27899,-27457,-18039,20300,725,19064,26617,-8848,30399,-23902,-2808,-27477,13736,25237,17344,-17514,18412,27951,-32758,23088,-31469,-10725,25402,16638,-11998,13915,22670,30707,-7971,24989,-13733,-12721,21853,-27673,21761,28933,23310,10671,7601,22119,14289,28264,9653,-20428,-7959,-2129,10577,-31363,20140,-30716,-8244,-13083,25604,-26971,18183,-21331,-7087,31596,-8122,-28453,23636,21721,-26419,10686,-7729,-10984,-18385,-29667,27164,14545,-8089,-6590,-22853,-10780,-32121,28536,29068,-31120,28039,32545,-9945,-2179,4459,21050,31851,25788,-16645,-30009,3388,-8020,-14499,18490,11598,-8804,6455,21640,-1032,15227,-22370,23074,19540,21542,-3983,1600,-8204,10148,6718,30110,5026,-25002,-14930,-15830,-30067,-6118,-5384,17377,8731,7108,1286,280,22899,-26979,-7161,4203,23693,12135,19537,9539,-1528,26977,-10842,-3688,18790,-21717,-915,-32183,-19163,-17394,19814,13387,1179,-12543,-23801,19538,-18748,-29273,-17924,-26011,-17,-24819,9756,-26705,21568,7171,-19143,23340,107,14339,-30160,8100,-27479,6615,-983,-2556,-6001,26390,-22026,-7980,-17,28163,-6731,29290,20139,7059,29809,16285,3569,13400,16856,20450,13824,31298,15917,-2399,-6863,-19428,23218,10395,16849,19760,12042,-19481,14837,-21183,-20797,17450,29059,-22395,-29706,22039,10606,-31610,16967,-28720,5907,24416,-2838,-2551,9127,30937,-3044,-8209,19800,-12560,-2764,-26808,6186,-9915,-2278,12289,12881,9588,6948,23660,4534,-6229,-29942,-11571,-31367,32422,24625,23846,-19476,21411,20759,20461,-12932,-8766,-5695,-32625,17794,-30677,24866,-23068,13138,12546,6434,-4630,-31908,-7670,-11970,-16119,19131,-14979,20641,-16105,-22437,24712,-21494,26232,3921,-15769,7025,9037,-664,12683,5175,-30101,-3980,-28150,-24568,-22394,31112,-11361,32592,-27087,-23771,-13065,-5935,21160,-23509,-11700,-19205,-26441,3572,1659,-8248,23876,7416,11785,21659,31104,-22303,-16861,-29423,-26356,31408,3065,23264,20678,22928,-31770,-27106,-27058,23054,11672,11367,5479,-3650,-8816,18140,4970,-4872,-15151,-2770,9787,-27160,30911,-6820,4766,23732,23003,-4498,-18059,-24767,88,11116,-18261,5584,-24641,13216,-28049,-26962,-16888,-22302,-11685,-24229,-21389,25616,14091,22478,21095,-24749,17193,7656,-9378,-11022,-3496,-22154,15550,-9478,9390,7961,-17243,-19093,7029,31268,18187,13665,14360,-24553,15600,-22261,5670,-30431,-22908,22614,26272,-19251,16997,22306,-2872,-26332,-31768,-16846,3069,-700,-19346,5358,12059,-23391,-519,-14313,15652,-8682,27190,25887,-796,-14667,-31821,19504,242,-6184,-4547,-10773,8666,-16215,-25924,23715,-21752,17035,3956,-1138,-12846,7569,29316,-842,20156,8080,28084,5064,-3462,-29241,18276,-15197,26861,30548,13736,-15507,-21899,29729,-3551,-4214,17048,-29880,-29156,-4773,-31983,-27355,17366,26159,-2961,-30415,30451,-22474,9998,27006,28092,28245,-28955,-4236,-23639,-6910,-2978,-32193,-19385,12025,-30240,-3341,-11945,32400,-1258,14809,24128,17755,-12821,-30528,4341,1062,22110,-27368,7675,-12384,-3882,-9936,2116,-25723,-26462,-29702,23299,28572,-7852,6765,2809,22077,24787,-24507,3397,-12977,25424,28283,17056,-26617,-32739,28906,-32065,-2740,18667,3096,13915,7363,16552,232,-12214,200,10973,-10685,-21759,-30708,9266,19138,-25441,29808,10080,31597,25291,29429,20584,-15606,-22672,-11459,20799,-21525,-29291,28510,67,14367,-14779,-31668,20775,-17132,8130,-24597,20572,24084,-3914,-12319,-9927,11952,22548,-3978,5191,-8340,17480,-27055,30322,1882,2317,-18664,-28442,16280,-7919,-32043,14662,25534,22069,10344,13740,-29915,11284,-11280,-25165,-21840,-10966,28870,1434,-3431,19065,11293,-6219,-555,-20410,-32302,11957,29170,19053,-5496,-18227,-23909,-6240,-30336,473,31917,-2747,-15223,25232,-21018,7286,-22084,-18961,-27288,-3311,-30534,27680,25106,24286,17981,22288,-27487,10495,-3467,10737,-26181,-3087,6105,-2354,30120,-30608,-23147,-26107,-31389,-18050,-10550,12645,-28131,-8183,-3597,-24718,-13054,-21421,4509,31669,7176,17429,29846,26280,18907,-12025,3490,27436,-21692,-18731,9998,-17009,-21634,-28971,32070,13745,18809,-8411,26196,3039,-2288,-26338,-2181,26596,-25530,12925,-28499,-14704,30915,15164,13392,-22074,24176,17874,31697,4807,27438,4505,9425,-16027,-17986,-21427,10866,13526,-10273,4583,-22677,22558,-32109,29851,5846,9445,6251,24188,21444,-28616,13143,5234,-15855,-18464,28434,3779,14593,-17057,31559,31697,21583,25153,-16749,-20997,2245,23982,17458,-9244,1619,9393,-8715,2464,-3487,-26605,27800,-24991,-25443,-4868,9845,-2266,-8534,-16028,-9644,-3380,-13378,29881,2695,-4465,-9167,-26534,28803,10120,24001,-28363,-23718,32030,30447,-20652,14373,-24968,7258,22562,28313,24998,30822,-29675,-6023,-16637,-16913,3470,29263,-21728,14888,-577,-7383,-11062,-4665,-11639,19490,20467,-25542,-22759,12156,4577,-11510,20706,2955,-8866,-12462,27376,-5499,-9806,17235,-11387,24716,18087,17991,-17775,27516,14524,2257,27961,-24182,29742,-13671,-13597,19422,25299,-22469,-13088,30473,-15608,29416,-27299,22125,27369,26273,22574,18288,-30479,23063,-1987,6048,14811,-21770,25558,12331,30063,8197,29268,-25428,-7792,2435,1622,29686,-8987,-30355,-31947,-13311,-30719,16964,12436,-6783,-7673,28998,-15463,23148,-14235,-22378,-17592,26661,31935,-848,-14793,20867,-26499,-24493,-2846,-19183,30546,5364,11900,-28811,703,-24684,32463,-6869,-7223,7126,-31928,-27937,12148,-1974,-25018,-24078,21747,1242,22328,24130,30139,9326,8305,-26006,27841,-28802,-12924,11338,-10511,2283,3426,13413,-26826,-22069,-6189,31994,17928,19609,22811,24398,-16968,-24995,-8441,-25632,-14402,25558,6484,-32324,5587,28412,-28334,10668,-13428,-6261,-11423,-21019,-3949,16466,31734,1402,-2726,-16241,-20493,-13257,20338,-12813,9009,-19908,-31846,22744,-9687,2764,11213,28137,12050,1137,6905,-3311,-7653,-10286,-30160,8032,-6573,-25114,-25941,-13978,-15788,-19360,10973,11451,-16741,-4189,19141,26912,24721,24298,-27177,-16836,20562,6810,-17572,13627,-13256,-31878,-15980,-6733,29905,24231,-12215,-16375,1364,24796,21933,-30652,3352,-32012,-23084,14141,6318,16121,-28292,-25579,-6478,9415,-2760,25184,26908,13946,17553,28775,-18703,-17704,23958,-29935,-3350,-25139,-11921,-17907,-14021,-15753,28238,-20054,11074,-16888,-16139,15951,19207,-28754,24860,-18400,1826,20632,-5467,3569,11450,-14695,-2171,5107,5698,-7880,30110,26483,9893,2707,-14748,18521,15290,-10686,4981,-12549,604,7054,-19998,-17070,26623,-20374,9517,31438,-21873,-9760,26352,-20675,-18540,10172,-23676,-8411,-15181,-1811,6649,-940,-17429,17303,8956,-11851,-12653,-8443,18775,-24649,12698,-1203,-32076,-30032,16491,-4422,17671,-15860,31000,-9070,27599,-7821,25537,26559,-8387,5562,-6566,-17872,-25786,-30727,11986,-29522,15998,-18290,23014,14820,699,6877,-11760,-20556,21622,-21853,26539,2764,14448,23872,-25376,-7271,-12189,-9138,-20547,-13173,-13062,-17705,-13133,8710,-11632,9157,17722,-3745,3055,-7470,-13188,11385,-30957,11205,-2607,-24626,-12731,-32045,-21559,1832,-29497,-11526,-15411,-11661,-6855,542,32203,4454,25689,-17542,-17790,-28990,1410,-7622,30510,10860,-18001,1498,2949,-27152,5056,-20078,25268,-23383,-25323,-18954,-25869,-25259,-4359,21038,-19122,13356,-11747,-28095,25570,20791,-15987,-7348,-1395,-24626,-19400,12739,20988,25040,31970,9092,8003,3774,2916,4333,24306,-939,-4997,5185,27781,-30774,26242,-30211,29873,-5069,11087,-32023,-5295,23178,-32747,14366,13283,-17748,19901,17505,-15709,27007,1031,22233,-27493,-27141,1293,-18433,21286,-24362,-31733,-9243,20997,902,-22943,-16758,12551,-15515,1970,3029,798,-9813,20129,-22362,-28321,8102,-26103,28695,-6090,-25640,-26468,20528,-28133,21865,11686,-20980,-13959,-15595,-13036,-3910,4274,-28837,27879,-7939,23414,27295,1604,18545,23187,-22116,-2495,1498,26152,-11252,-16684,-14545,-5103,20508,-26091,29900,1950,1954,16707,27035,-18962,-6311,-378,24325,27705,-4668,-2244,1731,-812,-32180,7846,8725,32686,6649,-9095,19808,10538,21104,12887,4049,-20260,-19390,-25380,-8416,11160,8877,-31687,17959,-20914,-30610,-28130,-27034,-14627,11641,3990,30894,-24673,-20199,13462,-17283,4783,18929,-17711,23083,-22888,-776,28458,-28923,-27801,-31401,25195,-22441,19522,16414,-32212,-10718,19635,12006,25152,-23086,4820,-3553,21481,14223,-26421,-27823,-31187,-8045,25954,27447,27222,10766,14045,-27313,8162,17220,9505,-25154,-5838,-3979,-17357,-13640,-9860,-28458,3610,19711,28754,-32258,3393,19681,1873,5118,-28949,-699,-27104,12969,-28042,23223,11850,-3657,20039,29828,4344,3597,7152,-27779,-13729,-2486,15215,-4447,25691,-25191,14289,-546,-23823,-29613,14943,27182,-12153,21245,19834,-3064,-15012,-19045,-17673,2577,1606,51,-17707,20066,-16848,30791,15065,7939,-460,28835,-5003,-29865,7700,25778,17138,-14312,-351,10946,-11995,19494,21135,30494,8536,17382,2771,3917,-5591,-12353,9817,14949,18292,32648,-7697,-11524,-29668,30458,2421,-2581,18468,28229,-14115,15667,-27279,-9812,1692,9435,-1485,-21833,32383,-10857,-23856,12681,-7297,25692,-5726,-22305,5693,31685,9932,-26751,-18498,27560,15216,15709,3480,-26862,-725,11997,10670,17346,3717,-25316,31947,27387,-26863,14372,756,5291,25192,15588,9084,14564,21673,29134,-7442,-24726,22205,-30997,30551,4073,20704,25845,1840,19212,-17069,-23551,-9926,-24016,18211,10119,1402,-11951,21074,-23647,12185,19242,-16959,-22225,15512,24942,-31527,560,-8088,-6287,-10927,-7637,5818,-315,22679,31729,-15991,1143,23374,25843,-13825,-21694,11810,26824,12944,802,23214,-7640,18500,27956,8109,-32734,23230,25837,18920,-29757,-18528,17902,14464,-12122,-6584,12248,12357,-27330,-19547,-5872,32075,-14033,32065,-8063,15749,-9225,-15492,27090,-13200,23900,4584,24600,21863,-13207,25964,17396,4868,-23161,11036,4912,-28383,30229,9962,21471,-1113,-25206,14651,-1042,12411,-13988,-31717,26933,-6114,9127,28075,26325,-806,32578,19703,28177,12438,-19080,13600,-25537,23830,-7543,-19982,-25479,21315,19112,-7222,15890,-25892,-9055,14948,14141,-14562,1775,-333,13947,22678,14810,9290,11862,16066,23742,-22013,-16061,17379,-7100,-7923,13830,-28182,-28841,-25955,3296,-9662,-28433,18750,-32223,17506,25940,23613,23877,-14524,-21961,4720,863,-6908,-24426,-32010,-14649,-6865,-23417,15443,4844,26617,-19837,6309,-23879,5418,-30177,252,9891,7572,-22718,6769,-16859,-13918,19137,-27908,22528,32184,-14930,9714,7911,14709,-3696,-27626,14290,6956,17721,-27665,-4817,22670,-13067,-1150,25022,-27529,-17270,6383,-32569,-2486,-6696,28388,-7400,15921,-25268,-23837,31509,24734,-20045,21418,21710,-10944,25988,20587,18233,1894,21361,-15288,-15910,-1962,16667,23476,25988,-17580,28955,-21554,15656,10701,-15749,-7519,10811,13897,-21876,-28137,-11419,-32176,-7760,8030,-969,20297,-14044,-13175,-6997,-1400,16209,-17219,19659,30020,-19027,-11065,-26225,-15314,-2247,19421,730,-282,-32083,8359,32702,-21903,-25924,-1106,-22810,-575,15579,3267,2923,19086,-14436,-23526,-23546,18513,14709,-25805,30213,19652,5839,-7364,9498,-25013,-2060,-15819,2823,-6489,13924,8271,13377,-12239,32397,29252,-23141,32036,-21933,-21353,6701,-13242,24591,30810,3526,-9945,-6204,29606,-27631,-31269,4498,3874,-22658,-21731,31772,27933,-12704,3922,19345,-25667,30647,-2361,-26209,29722,-12937,-16276,-1877,26191,32443,23628,8366,-228,26881,30469,15377,-22469,-11249,-20952,13051,-22935,-27085,-14798,17933,2130,5419,20470,11857,-30701,32396,-22189,24667,-3164,2283,-14542,20822,11803,-9629,-11970,20084,446,-2612,14393,-6910,247,-19843,14870,-18314,13508,23306,-28143,4657,-6745,31833,4636,-28720,-19914,2886,-21272,-24333,-13599,20574,6866,12090,-32133,4497,24702,7577,-28579,10411,-30155,-32567,11852,7751,10077,19056,20019,456,9519,20227,28646,-20500,-29977,20794,9042,26039,19399,-2313,17306,-8479,16885,-17791,21068,3828,-18270,13036,-4614,-24238,20473,11779,30262,-16192,13527,-19946,-20524,-8142,-10665,24122,-9987,-5514,-11988,28444,-2775,27758,-16190,-6012,28870,4936,-3849,11841,-11308,-8051,24028,-22708,30344,-9447,-11545,14752,-4190,26172,-32234,-14476,18734,-23804,-20671,-8332,-352,9280,20218,27126,14095,-15700,-6347,-2716,-8925,17024,-1930,-1300,-16165,-14892,-9641,1087,-5219,-1187,16898,-26713,20817,4345,-23473,-31286,-11467,-22782,-13291,-28351,11243,14517,-2373,16484,14748,5223,-13544,-25876,1135,-23132,-2139,-21568,28216,-31082,-22349,28806,-13702,15611,-3395,-4992,-14370,-22955,-3551,17378,-25409,31602,26992,-7354,17861,623,27763,14186,3407,-7656,1973,345,-23714,7237 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add15_input1_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add15_input1_int16.csv new file mode 100644 index 0000000..e122e54 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add15_input1_int16.csv @@ -0,0 +1 @@ +-26433,-25773,-28569,-19464,-21269,-19070,-18251,-19179,-20524,-17686,-23212,13174,11846,-5286,5633,-30549,5983,-8219,-11079,-3612,-26259,25088,22948,-8256,4955,833,-19808,31373,32228,11065,-30758,31312,14813,28682,-23835,11312,-9897,22993,29748,-23499,-6647,-19410,-2006,1612,-22995,26892,25532,15313,-22832,25122,-22936,-31269,17168,-21613,-17378,5085,13105,14286,-27325,-31144,-2357,-20145,-21434,-9534,-24205,-15310,-18398,-22350,8828,-10565,-13557,-14584,-30662,20880,20789,19252,-275,-26597,16524,2699,13100,-19867,24752,-856,6254,-2618,-28088,18749,12040,-13218,28347,-18873,-16245,7514,23502,4100,-11151,5409,5682,8235,8542,17438,11874,-15686,25826,3583,25241,-27491,-1763,29984,-27239,8349,7367,20049,18516,-4040,-28248,-28205,28916,26171,5368,3819,20595,13059,5900,-1429,-3451,-18734,-3426,11060,-12944,-7859,-6714,16408,246,9350,-31040,4902,-9349,31941,-30840,5955,-11379,24814,24336,30582,9698,-15086,-453,-20756,22054,5102,27124,27001,12831,-8775,-28576,28360,5690,19293,-16561,-30494,-30893,21981,7828,9245,-17844,-15159,-22139,-32559,23189,-8957,4886,-16429,-21915,-16368,27133,7778,8423,-20244,22331,18623,22143,11453,31462,-31092,-4367,-25843,-6811,12755,23210,725,-29606,-23848,-4823,9452,21412,31979,32464,-469,-1673,-4408,12240,28288,14174,2369,-14895,10011,10848,29467,-3422,26569,25399,8407,14058,9452,17441,-13941,-16867,-26002,-31535,-2930,-10281,-271,-6519,7886,28629,-12846,-5415,-1590,-24788,2202,5108,-22958,29697,30688,7552,-29667,9279,-8377,26867,-7618,-23915,-19741,-30371,15798,-10451,-22565,4488,-19231,-3849,10496,16135,-29894,-30853,21610,31582,-11183,5230,18955,14517,-17198,25624,28395,5217,-24788,32204,22366,-16336,-27541,9699,-16016,22439,-5689,-22063,-29607,-14083,-30565,30319,-21956,30604,16962,-1173,5031,16069,27294,30583,-8834,-2262,25632,-5650,28946,25845,-20701,19166,20681,8593,8279,24907,-21636,-14170,13515,-19254,15255,4522,22646,6976,-23473,4695,-6209,-30537,-389,-24567,834,30509,-1807,-3069,8618,32407,8966,29218,-8843,-17401,-4734,-10474,-11174,30960,-397,31402,-15460,-10800,11090,-16695,-28292,9896,23899,22444,5965,31927,-5044,27703,18418,18049,-24322,14350,30633,17540,-16990,31940,1335,-28111,-3013,31503,-4840,-20292,12388,25182,-16988,4705,-32265,-7923,13300,32332,-9071,28947,-10430,-28791,-4431,-15287,-32022,-4775,-23906,-30414,-13326,14085,3799,18882,-11388,-4447,12635,-6183,-21709,18788,31196,-10975,16347,-17743,8114,14666,-20676,-20099,-6069,-29882,23505,15245,-3194,-26309,-32530,-22714,-14773,12508,28405,-11899,-23266,-12127,30494,10627,21841,23765,-16989,2948,31705,6513,-25626,-9698,263,-4658,-19485,-422,-27031,20208,13545,19231,4782,31740,-16555,-24810,-31248,-11386,253,21667,-30989,27391,-16178,-28424,-1972,-32739,26051,-28256,31758,-19650,-7801,-32537,15827,17583,-4581,7074,22387,-25239,-32661,-20914,15221,24073,-3557,25322,7600,31165,-14117,-12483,27141,-6629,-14102,-29283,8225,-23799,-25408,-3902,-7864,-26021,28197,-29485,-6856,24927,25564,15609,25071,-10527,28507,-19203,-15567,7742,-27455,-18984,-31360,14844,-13614,-15646,-11181,-17050,-25826,7582,-14987,-15867,-16227,14206,-2421,-20651,-13371,-3536,11927,-31412,-26205,1868,30449,-20753,-13744,-9484,-13105,-11571,-19971,-8813,-25289,26014,-29680,2616,-20087,7280,10098,-505,-31986,12075,-22564,-23800,-17639,2968,-28471,27716,15325,-12735,8188,11193,-22559,24237,-6034,19670,31496,-6174,-13435,-7360,-16074,-16035,20490,-27354,-12709,-14598,16463,-21834,21999,24785,29244,10719,15126,-16917,-3482,-978,-5672,11210,-32160,-20060,11863,-3118,-22623,-12297,-5877,3588,-31394,-18631,25025,19769,2687,-13434,-17877,-29636,13287,26547,-433,-12379,-28108,-3833,-1161,-11064,-12916,15371,-8507,-1898,-25113,-32102,10096,-23804,5725,-12501,-1551,-30601,-7073,14160,13629,5455,-15451,3512,16200,-7255,-28319,10629,-4856,-5162,-13672,5113,2850,25054,-10265,-18973,8209,5924,-20869,23589,-27588,3039,-28421,-18476,-25686,-21189,3964,-17800,20458,15834,-32530,28956,-1151,15240,14363,-24422,15731,27309,-16486,-12917,-32748,32001,-3414,-8640,-27495,-19749,18160,-26475,5859,10975,13393,7990,11406,16112,25885,-5564,6614,-916,21912,31092,17911,-28627,332,25888,26847,-9038,15590,-20492,25989,-14191,-18908,21763,6237,-9809,21070,-20925,14183,1534,10210,27005,30642,-1005,-10938,-19614,11853,-2037,32047,-24599,-5041,20010,-7266,24461,4346,-24702,1997,19115,-15225,28203,24423,10687,-15081,-9157,-9911,4456,-3580,-4778,-25881,5274,10148,-8125,-21650,-16725,-4141,6155,12838,-25608,-9850,-28955,12730,-26965,15761,23414,-2067,11317,-20488,-21184,27782,28439,8160,18306,-31633,-27978,-7420,27853,4743,18868,-9427,-1197,13699,-3032,26078,-14680,15829,-20944,-20493,25726,-30276,-31046,19683,23430,-1800,-30762,15402,1623,13207,-14935,-18495,26810,25887,-11498,-7621,856,-1409,-4625,25068,12881,4679,11756,-31549,-2920,-13580,25690,15834,-3070,-24217,-12553,10680,6744,-28314,-428,-32342,-3548,-24129,-18867,-3713,-30218,2142,-21584,-14183,-21273,22690,1680,-2517,-11923,-30241,-4192,-9717,-6810,-3248,28589,8589,-7881,22979,25579,-27877,5120,-32427,-3473,6506,-14293,28224,-2933,-10398,-17186,-19405,-22682,-12099,-15556,11142,993,27020,20743,-20961,-6387,29299,19475,20963,9050,-2293,-1758,31985,1513,-12830,23159,-15211,-11408,20386,-14587,31955,31406,4061,13738,-15833,12160,15338,-24143,-6709,27052,-30998,72,-23226,-27679,-25699,-2256,19147,26394,-26751,16164,-21611,30355,-13660,15996,6562,-32694,14707,13463,27069,-20870,21728,26076,12345,-22401,19741,-13328,23463,27982,8861,-28644,31034,10014,-29817,-13741,13041,-31624,22799,4550,3177,-7706,18123,23877,-18942,16029,-5625,20405,25846,-4462,21700,16033,17661,14377,-13720,-22843,21352,-13352,-9827,19675,22810,12261,29684,-1505,-6940,-31729,32293,-18379,-25070,-1670,5189,11064,15750,8064,-24447,7300,-24411,-29525,16809,17689,-31892,10185,15369,-3460,-674,1217,5646,11671,-26932,-17597,13224,-8813,-22323,5438,-22523,-2089,-14634,-6846,-27699,17340,22591,-26362,29662,-18451,-29968,18145,1657,-11711,-22228,-14091,-32703,-17811,-24760,21845,-14878,-8757,-15165,-5672,8716,26452,3345,-19265,-19487,22276,31988,-23591,-16304,14404,-263,-842,19277,6205,-27741,1990,-20490,21804,31639,-224,5492,1059,-12756,16028,-28854,-15293,30969,-2852,30771,-29437,22506,-7673,21420,-14044,7048,-24660,-21499,-21564,-21049,-6857,-26453,-10354,26682,32027,-26584,30069,-7169,-3044,-23183,3177,-25836,16688,-26393,-2503,20718,-31660,-9788,-24991,14283,-10105,-28042,-15733,13929,-7926,-6702,-23710,19621,-10799,11518,-24374,-23591,-12177,-17741,-1622,-15456,-31586,-1880,-28777,15720,-10229,9637,-15728,-8365,9978,-13110,30484,1454,-11758,-26193,-27417,-19857,26979,-25412,18133,-27368,31577,-22456,-9012,-6507,13532,-30798,14955,-19908,-3467,2663,21311,-26199,-3536,-7095,11721,19608,8150,16188,10878,26439,18166,18368,27347,14458,32755,-30332,-23263,-9870,-14619,32462,-3259,3734,-30350,-10704,954,-16384,5748,-19351,-23270,31741,2522,-11571,8777,31217,8341,-8535,-10373,7763,-21463,-19503,15457,8073,-12562,-5720,13488,-771,6493,-17732,-12101,15709,-18864,20806,5745,-15063,14865,17283,-14212,9476,1162,27654,-8453,-22340,-2677,-4286,-27415,670,-8925,-9642,-20001,-1062,17272,-9400,28068,-7490,29374,-22276,16138,-70,-25671,31735,-13888,22017,-13547,-17379,-16940,32301,-17601,-10264,-13282,13880,1968,-11636,9019,22341,8466,11721,2473,-9515,10510,-3366,-16479,-728,-3887,-6539,27059,18284,-25673,24529,-568,-19344,11773,24515,22996,8243,-25215,23677,-913,18176,-10646,17202,-16743,-17931,-3699,-29150,31435,-11405,13491,10245,-13627,14712,23390,-31062,-28402,-4222,-9741,7675,7782,22327,18135,-12561,10608,-28312,9822,24099,-31781,14759,13407,2341,4289,-22180,20993,11075,-13137,18661,30885,-32628,-18673,-4207,11080,12879,-1920,11211,-22745,-2170,-31547,24925,9077,25965,-27292,-9698,20612,-1294,-511,22468,25620,-29672,3893,-17873,-30327,-27732,18649,-30608,-25309,-26444,25173,15967,-30470,-16670,21650,-32710,-25100,-10370,-17937,9595,-30547,-27271,-5029,27216,20007,-10088,16862,20218,-26110,27047,23036,-13997,2390,-408,24073,3431,-2115,13071,28069,-1944,-12417,-1735,-22081,23375,20000,-16984,31715,-22882,25049,26241,14004,-16955,-10752,18293,-27566,3632,28180,-7477,31623,-15523,-18781,6400,14180,22873,22514,25441,-20068,-7145,27972,-11773,-26847,-25947,-5860,-30983,-25045,8939,22386,3755,16989,17865,-28740,-7647,32522,29212,-23018,11215,-23820,-471,12105,-12793,2389,24477,-8423,-1305,-2652,3159,-20591,20270,-20885,-13937,-4413,-18555,-8064,32713,31970,-20483,-11954,-19744,-32241,4817,23356,-90,-14785,8093,-22679,18177,-22152,-14568,-10250,24261,-18801,3621,-15494,-26299,32652,32590,-17965,-23759,26128,28804,12898,-30647,24857,-31016,2468,20433,32391,3760,-12191,-5137,24953,-28691,-2267,-30699,-6273,11608,-13180,13061,-23206,2687,-19155,7170,22231,-26773,17385,-27894,1001,12905,14364,4651,14149,-29436,12493,12302,31810,-15967,-11962,-24719,7953,-11663,5072,-13670,26770,28033,1261,-18905,-19299,8279,-12866,-13208,32405,21584,-24595,-23230,-25092,17652,29292,-22341,-30259,2633,32410,-19199,21781,18682,-26205,14395,4316,-13356,7870,14808,-9952,-8020,3185,21894,-25460,3023,21904,7493,2749,-16318,-14641,-28311,-14627,24729,25966,-10527,-10908,-16903,-17905,17114,-5038,4966,-15960,7425,8813,6566,-18580,-7236,-22802,-11689,-8701,-27031,1022,-23321,-24501,-22267,4661,-5149,24258,14274,-2576,16942,-13164,-21736,-28620,3851,21190,12745,-23298,8714,-15906,16853,-27558,11839,20960,-4930,-897,-1878,-9870,-6750,-16871,20014,-8885,8458,22740,7221,29916,5823,-21629,7014,9929,-27032,14596,-24124,-27283,22372,12393,26438,3195,-1275,6449,-22246,18112,9478,21926,16052,19553,26656,-6471,-25788,-19847,-22279,3915,-15139,-14966,-10988,18700,-32089,26628,-26312,25428,9640,-21647,4800,20666,20015,-6596,25415,23392,-8899,-25845,2773,-2365,1680,-4598,4978,32439,7698,31956,32628,-12256,-21733,-20193,105,12625,-19798,30187,-3382,23921,14218,971,31681,-2126,-27901,27755,-6764,-32351,5097,25988,4660,15468,-27414,16426,-5062,21476,3413,-11208,-27279,-22350,26361,-11611,-7577,7241,-24604,25180,-26450,25376,24612,18132,-22723,27427,-30406,-9727,-7164,-20960,23696,-30520,32041,25727,-22149,22262,28769,-27478,22179,21630,8215,-1250,-16437,-28553,32743,2792,-29515,10164,8834,30858,6860,-12940,-23786,13703,-9626,5718,-13239,-11399,-22487,-4164,31539,-22838,19871,-11828,-18093,-18549,508,-4637,-10944,7028,26510,-27715,3477,-20855,2735,28710,22735,5892,7745,6819,32276,-23125,10875,10338,14840,-10304,-32354,-21536,19303,-22169,12828,11732,3500,-29975,-15256,20527,18579,2587,7015,18622,7734,-18835,26956,-21344,4307,17009,-23376,-31549,1730,-30143,-26192,-12621,-1769,29132,-14078,-28980,-12299,22974,-10645,-24452,-18618,2264,14683,-11943,-18451,-30741,13988,32296,19634,-3843,-24344,-27901,-9382,23677,-5433,16625,-701,-19858,-3569,31081,18028,-15673,5281,455,-9951,-29091,13953,25610,6784,32593,27566,22401,10611,-4852,-3732,7282,4196,31859,-22109,11319,11765,6920,-27915,-19651,4952,-14411,-11904,-17599,-29349,6398,-29053,-1637,2582,18262,24680,19097,-24518,-20919,19124,21927,16796,32119,11094,-12606,25629,16364,-10300,-24959,17315,-17795,-8051,-6347,-30763,-8773,-19184,-25624,-24228,11755,4960,-17817,-10740,-15969,-18414,26500,-22521,-11055,6611,23213,-5698,11866,-128,27575,7778,30679,27904,10415,-4425,-5354,1423,176,29905,22873,9015,21556,4242,30118,17536,29617,7614,30592,18928,-29466,-6977,-12679,28688,27628,21280,487,-25085,-30535,31816,-22342,6702,-26709,20938,-3061,-14852,-24731,17681,-25702,-13090,27151,-3566,16924,-12953,7609,12827,2454,5359,15588,-5866,-30256,14528,7995,-17947,-11087,17461,31469,-10338,-11982,29718,-14531,-23760,27038,-12015,29433,-11100,-30882,14566,-7496,23734,-5222,4636,23493,22101,-881,-2555,-8525,4490,-27619,-21651,1419,-29287,-7278,-30923,14899,-8497,-11599,-21867,-11494,-13929,-6863,-18935,-17247,19753,-19988,-1527,-14382,12072,21420,-7796,26253,882,-2406,25874,286,1697,23215,12335,31736,-15423,-31976,20557,28195,-17099,-13220,-16572,7363,-8820,3492,-227,-31784,2182,-12379,-13059,21399,-25058,-14400,12244,-31760,2786,-17766,8113,-13967,8417,-3221,-21074,-6345,27922,-30274,22552,-23290,32743,17320,-6134,-23554,-10440,10931,3740,-6013,-3195,-32260,18047,11283,-13694,11532,-9693,30804,-11433,-16785,-24604,28800,-28000,-31883,5686,-11411,-13050,8406,4900,22905,19986,28116,23894,-2195,-569,17812,-27254,-18592,2386,5210,-32488,-8736,27623,17159,-27096,22966,14053,1331,5937,16063,-11057,30116,18793,-3099,-4539,13143,-8518,26866,-17677,26786,17708,-3779,-19811,24402,-18831,-24211,-13786,21764,22088,16530,12042,21485,6101,3905,9558,17670,14677,-24894,-13178,343,7977,-22019,17731,13508,-27191,17928,9331,23832,-28401,-3293,-27315,-10943,-4221,12583,27458,-2107,4692,-19710,4383,-16789,-17708,24103,29554,2296,16665,24100,-30510,-24989,-9711,18675,14214,28487,-20322,22958,-24291,20030,-5848,-8552,-23831,10316,24655,28792,4447,-25775,-32463,22388,-21476,-1230,-11786,11541,-32697,-6595,-3978,-16583,-5567,11797,13762,23321,-23065,-28841,9188,32259,23076,16737,12955,14356,24272,-15521,-931,-22637,20575,31612,-16351,-29641,-13266,-25671,-5018,27829,-30663,25683,-18371,11310,28839,20446,-14276,-6943,-31971,13225,-6447,18089,-5765,2627,17093,8134,6037,4527,12127,-8609,13185,-12131,-21714,-19374,-9929,-9427,-27172,-826,15177,-23459,18594,-5038,15489,17467,-24774,3204,20290,-13371,-12696,6549,-19098,9204,30725,27710,419,-31592,-24809,-9257,32299,-3610,-32445,-29802,-8426,-14917,28209,-1578,-15448,-26918,17577,969,26480,-16967,-20196,17185,25933,11034,12227,-8753,3602,30190,-28583,27378,29645,-5259,642,-17092,-26337,2972,13746,15703,-19448,-27917,-18160,-5153,-1682,30133,15514,-867,-5512,-20057,13523,-31673,7790,15817,2501,10104,1370,-17022,-29123,21132,-31215,-6911,3656,22449,-24082,-29860,26686,-30516,-19935,-15986,25140,28894,13777,32091,29184,-31990,5528,-32138,-22051,9302,-16080,-381,-25947,15258,6505,-27008,-10976,14546,-8342,24743,-5666,-21404,-4518,10228,7043,-2680,-3145,-11292,27932,32009,-13942,-15690,1938,-6380,11330,30236,15765,-11413,-12452,8712,-5397,30806,-4500,20549,-15635,-4545,-7448,1663,-20275,-7095,12067,18826,19310,-14270,-28992,20893,27494,19549,4101,2576,25648,-18836,-3798,6211,815,-144,25891,32098,-3697,-12201,-30237,20555,-25410,6481,22254,-17133,-20251,-27640,28184,-12384,-30889,14453,-17665,5249,3834,-5251,-23326,-12064,-9201,22812,8204,-12266,21785,-8123,13157,11750,5809,-23607,15157,-27484,-26304,-21451,-21775,-13569,21294,19363,-11284,-6014,-30232,27816,-7075,20657,25457,-21607,-11877,18855,11302,19592,-13426,12925,-9669,1859,-4749,28137,-11494,14039,-31664,23208,-23444,4379,-4037,-31984,-23556,-3109,17945,-24529,-26949,-21898,12217,21893,-675,-20918,19310,7606,-30997,-4466,17883,-18744,4271,-20713,-29231,857,-10255,14228,2201,5909,-23666,-1175,-4073,10093,-1748,8399,29990,-17269,3249,-14285,24960,-23133,-8300,-11861,12742,9820,-24223,-1275,-4860,3937,6906,23553,-21265,11049,-32499,10033,-22954,-783,11412,7546,32579,99,16391,22340,-19730,24890,-8168,17576,18160,16590,-23970,-21586,-19302,26587,2055,-18095,4465,10885,7819,-27784,13107,-3480,-24825,30300,8851,10342,-4356,-4329,22754,-6682,-27049,28212,-30297,-6873,-27077,-23112,-31837,-6027,-23082,18169,1333,-7665,-24078,27643,-11803,721,15855,30658,19183,-25798,-21597,-25017,-10098,29927,30598,-2401,-587,22592,26162,-21522,-14472,7470,-29246,11678,15531,-22828,10012,-8557,-32754,9085,-32316,2246,-8060,11363,-11896,13938,21518,31888,-21333,-8285,24278,7780,27949,-8991,-27138,10536,-22791,9178,7434,-25072,20080,-27400,-7945,31036,-14100,-8086,-156,-9071,4432,21385,7937,4655,10133,19863,-1834,-13885,-6775,-26890,9313,9648,25299,-15617,11165,7108,30782,-15597,-18266,-26004,-16997,-22161,32097,-2756,27728,21235,16843,3078,-31598,6269,13356,-10294,-11135,-19420,-30448,15766,12571,16780,32086,2219,-30373,-10056,-27199,-13016,15401,25254,10332,-14659,8494,-179,19026,26028,635,17202,-7795,-11096,3800,24990,17672,-8401,28888,19568,-10451,-27340,30805,-26652,-7272,29743,18053,13858,-19911,14316,18732,-19546,22791,24399,167,-20499,-7715,-3153,29300,-32475,-13393,-6590,-18319 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add16.tflite b/tensorflow/lite/micro/integration_tests/seanet/add/add16.tflite new file mode 100644 index 0000000..51381c9 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/add/add16.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add16_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add16_golden_int16.csv new file mode 100644 index 0000000..6933a1a --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add16_golden_int16.csv @@ -0,0 +1 @@ +6772,-32768,32767,7228,-5824,-22490,-8612,32767,3516,-32768,-25856,32767,-5166,32767,32385,1853,-32768,-18208,1624,-32768,32767,14870,-32768,-4207,975,32767,29809,-6762,15647,-6216,6325,-32768,1522,-32768,32767,-20886,-1884,26710,-5254,32767,5547,-6481,21009,32767,-32268,16414,-32768,-32768,-21675,-32768,-29188,6690,32767,778,-31917,32767,32767,-1596,4597,32767,17361,-31219,-15495,-17374,4137,-19910,29401,-32768,-32768,960,32767,-28850,-32768,19489,-32768,32767,32760,18618,-32768,-11277,-28257,-32768,32767,-32768,-17193,32767,32767,-27467,32767,32767,32767,32767,4219,-32768,21401,20817,-32768,-3058,32767,-30288,-32768,-5979,-32768,-1723,-6192,30280,32767,-22367,23731,-32768,-32768,-32768,10016,-32768,-27831,32767,-30984,18692,32767,-32768,27374,-11326,-32768,16074,-32768,30501,-32711,-32768,26923,17567,-21316,-7436,32767,-28114,-32768,4469,-3081,17667,-10551,-32768,-9692,7305,30631,32767,32767,32767,-12649,-32768,32767,-32768,32767,-32768,-26602,-32768,32767,1047,-20847,-1767,7866,-751,32767,13750,23512,-7830,-553,21905,6381,-32768,-9041,16300,-12368,6918,32767,32767,22551,-28716,4329,-12710,32767,-32768,-13435,-32768,4596,-16213,-18898,-15675,32767,32767,-32768,3048,3800,9950,-32768,32767,32767,32767,32767,-32768,-32768,5287,-32768,-32768,32767,-32768,-32768,32767,-32768,30697,16780,15342,-28658,-32768,-13820,7759,32767,-32768,5212,-32768,-29814,-32768,-32768,-32768,32574,-32122,-22082,-32768,10528,32767,32767,26474,32767,19634,32767,32767,11298,32767,-32768,-20926,-13616,-16050,20568,22420,-32768,32767,-32768,32767,-28614,-32768,15868,2592,22912,-32768,14539,-15693,-32768,16886,-32768,-32768,-32768,-1850,-32768,17490,-32768,-12786,15474,7940,-16080,32767,5336,-32768,-5225,28343,-18736,2649,5557,-13753,-32768,-29181,-32768,32767,32767,-32768,17188,-26330,23362,32767,-32768,32767,-11103,32767,-32768,1335,16326,32767,32767,14769,-7258,-32768,30350,24197,11606,-32768,25804,32767,-19403,-32768,-10568,32767,19350,17546,-102,-32768,32767,-32768,-9608,-7224,16876,32767,-32768,32767,32767,32767,-29904,20177,-18261,10563,526,-14991,7071,-32768,9218,-20162,32767,-6097,-17221,-18410,3326,32767,-4490,26306,-4996,9549,-32768,3809,32767,12943,32767,-15008,5459,32767,32767,5465,32767,32767,-9479,-22637,32767,-32768,-5613,-10448,-32768,32767,-32768,24170,-7746,32767,-32768,32767,-8661,32767,13834,-31445,-32768,-32768,32767,14212,32767,-32768,-22664,32767,-24127,14600,-32768,-32768,-11092,18804,26172,1492,-32768,32767,-12673,-32768,31982,4965,-32768,-13244,32767,3523,-20168,32767,32767,20669,32767,-32768,16646,7966,-32768,17742,8556,-22799,3580,17949,-32768,-3155,-32768,5030,-23550,-6066,-11013,32767,22875,2681,-32768,32767,-32768,-32768,32767,-32768,-32768,32767,9370,-17271,625,32767,28434,7320,32767,-32768,20308,-16640,32767,-19281,32767,22885,-1274,17706,32767,32767,32767,-32768,-18899,13619,-18816,32767,3479,-5396,-32768,-18166,32767,28409,14629,540,-496,-32768,32767,10677,-32768,32767,17520,-32768,32767,-28138,-32768,32767,-32768,-24656,32767,-24915,32767,-29021,32767,-18978,-20387,-8810,-26999,32767,32767,-20060,32767,-7230,32767,11287,-6138,10632,-32768,32767,-8619,603,-15946,26127,-32768,32767,29048,-32768,32767,-8467,-32768,-12256,32767,32767,18531,-16526,-25508,15794,24578,32767,16464,-29071,-4475,-32768,363,32767,-32768,-12470,32767,-24932,32767,-13324,-26279,-13547,-25828,32767,-360,32767,-28693,-32768,26919,24419,-32768,-32768,13133,32767,-32768,32767,22869,-7424,-80,-32768,27991,-4176,10597,32767,-19635,-20488,-32768,12490,-32768,-32768,32767,32767,14269,-30178,23957,-32768,-12161,-11571,13527,24824,-32768,-4479,-32768,-32768,32767,-32247,11,32767,-3055,10894,-18404,-13083,32767,-4472,-14353,-32768,26490,-7010,30497,-32768,-32768,-832,32767,-32768,21109,-32768,-29316,14446,-32768,32767,-32768,-32768,-32768,-32768,-32768,-32768,-32768,-917,8222,-17077,32767,22437,-11385,13112,32767,32767,32767,32767,-32768,24787,21089,-32768,-11582,-24613,-32768,28041,17173,-28973,-4712,-32768,-19662,32767,-32768,-32768,-32768,-32768,-32768,16721,-32768,-32283,7778,21838 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add16_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add16_input0_int16.csv new file mode 100644 index 0000000..5df7724 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add16_input0_int16.csv @@ -0,0 +1 @@ +-12391,-21606,20466,-18748,18178,-8265,-9709,26526,-8872,-31106,643,11910,6279,15089,-2392,-14602,-1648,13014,13741,-20821,24271,-13863,-23444,-21007,8657,9689,32001,-13908,25601,-27678,26822,-24800,-7510,-16838,27122,4918,-18367,-8087,-13153,22501,5433,4144,18084,25649,-26631,13322,-29923,-8921,-27830,-19008,-19028,6981,20575,24542,1667,7839,5637,6578,3652,19360,32300,2984,-24646,13558,22091,-5019,22675,-22660,-4904,8692,24276,-19642,-24727,6716,-29744,14110,22080,-8108,-30447,-23780,-16037,-18205,25781,-21269,-26138,28575,11854,-20467,18314,13877,6770,20300,1214,-24346,2693,3728,-26825,15542,28842,5702,-10546,14382,-14037,21730,17385,-3356,26071,-31287,2114,-16933,-11346,-29553,-15303,-6416,1401,18649,-13434,19975,7535,-28682,8892,-13753,-17294,-11608,-30684,5738,-21589,-26978,7514,10314,-1379,16837,19011,-28569,-29299,19773,15573,17859,2660,-19979,-4552,13252,23731,30670,14469,15473,-2916,-32227,30749,-29091,16496,-28672,-14752,-30096,5785,13545,-28092,-14834,28744,-3655,18027,2463,18106,-23150,23749,-6637,3242,-18649,9928,789,-15437,7062,14034,32026,17601,-15863,-1732,-29122,24941,-20354,-10154,-31490,-16889,-8648,-11499,3745,5980,31075,-24450,701,14488,7104,-30926,32669,30483,7957,19902,-32602,-24990,-3300,-25062,-31612,26956,-24881,-3082,22748,-11095,10749,4384,22314,-18687,-9217,-17508,-10935,30303,-28797,20478,-10051,-3264,-23830,-8285,-29724,4484,3008,-2338,-30871,-3590,27436,31916,8391,17340,10699,26338,28506,-16863,22867,-31740,-10641,-1788,9296,2165,-8343,-9143,27525,-27387,25410,-6908,-15101,28476,8125,13377,-14628,-12948,-29043,-30211,-13105,-12880,-26060,-15826,-6729,-2524,26202,-30617,-21694,15718,-865,-6106,23911,-7064,-13387,12174,10109,9110,-17171,-11407,578,-32649,-18581,-14511,9181,17494,-22674,12639,-11670,30240,30769,-14315,21091,-16453,10591,-19602,2888,-11887,14471,27609,12927,-5757,-22163,11283,24327,25985,-26093,26397,8504,-23041,-26891,16743,20434,17365,21249,1390,-20214,14361,-13133,5302,14207,-13826,20661,-32069,19946,28085,32709,-19098,3614,2415,23117,-13709,-13213,13897,-29640,-5933,12057,22838,-3392,-16220,-28311,23121,8225,-3942,21605,15075,6641,-18864,22466,26559,30390,10443,-1860,-7039,29194,27318,-2600,19851,26231,-13918,-11584,5759,-27385,5500,-21769,-29321,11665,-9045,4534,17357,24723,-23556,18649,-3723,7067,12357,-27912,-5305,-11254,20952,22445,15474,-28921,-11166,20585,-3160,-3665,-16985,-26466,-11697,7601,10885,-10797,-30836,31892,1387,-14594,11552,17850,-6225,2808,29687,5801,-31690,29635,14261,-608,10711,-10559,-5453,24438,-16331,2497,-13860,-7977,23071,12775,-18497,-7206,-5393,-12405,-3631,-10612,-17118,23135,-8725,-5874,-7336,31190,-26561,-12755,13543,-23835,-27131,25065,-7260,-25242,-16749,27730,-6307,-19632,31749,-17436,-289,-12440,7335,-5817,19913,2608,-14120,-13595,10571,18070,23298,-15745,1781,-9875,-6272,27012,19802,-6706,-19596,-20323,3647,16522,23629,-22538,-16878,-32647,23915,-15287,-29363,31609,2364,-17163,20191,-31341,-24129,9707,-16158,-10902,14852,-19015,24993,-29131,9250,-28883,11574,18207,-13975,17334,30383,-15177,29041,3779,24920,-4450,2254,639,-4243,13836,18119,-9386,2643,23505,-31833,21166,3930,-31210,12674,-8143,-11157,-19892,870,15699,16692,-21661,-31518,1714,7331,463,29011,-3318,1689,-15506,3319,22828,-30735,-18191,31081,2030,30349,13470,-22086,-11814,-9923,26213,12621,27610,3850,-13534,-1664,14548,-14953,-24463,21122,18436,-20761,7969,-5471,18463,-6748,-10998,27131,11714,23538,26491,2780,-18170,-17507,836,-25702,-4312,25209,21629,958,-21716,6692,-8782,-16348,-20641,3641,21477,-939,10707,-20693,-32494,26649,5008,12872,11229,-24277,-17503,8150,-24656,29937,1760,-25003,-32599,27411,-12095,28708,-12613,-21587,18268,19792,-20188,22017,-16782,-18658,-4225,-25604,21180,-28943,-19945,-2708,-25579,-23385,-219,-19185,-7974,-8248,-22729,20555,-7981,16126,13850,20910,26486,18837,27143,-23546,31852,-6830,-30071,-20384,-14392,-24548,773,19155,-13028,-612,-30685,-12409,24347,-29134,-26026,-32234,-10938,-4796,-2836,477,-5304,26656,6056 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add16_input1_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add16_input1_int16.csv new file mode 100644 index 0000000..45f69ef --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add16_input1_int16.csv @@ -0,0 +1 @@ +21842,-15014,3119,30657,-28787,-6817,6093,30510,14583,7356,-21329,23505,-12441,16145,28825,20887,-23996,-31726,-16989,-6217,23848,30212,-18985,24607,-10741,26406,-18955,13142,-21658,31888,-30663,-20843,11193,-23917,24983,-23079,22935,31905,13332,24517,-2833,-10643,-7415,21342,9866,-4720,-10987,-29340,19849,-12342,2194,-3987,26097,-32023,-27490,26351,26876,-10012,-1217,4719,-29210,-28689,20508,-31789,-26104,-9091,-6875,-28412,-23979,-10800,20730,3278,3269,6500,-25511,26509,-3424,25526,8802,22696,-1047,-19304,19323,-19157,21148,4060,32546,5470,13354,26768,28977,32045,1726,-30886,13365,11526,-16488,-23091,-8574,-31567,-16081,-23862,-26557,-30264,-28024,28440,-4961,23899,15980,-29196,-30896,309,28283,-24818,-23901,27902,-6668,-11764,20314,-24725,9850,9322,-9405,28166,4446,16521,2810,-17298,11326,193,-15045,-28280,29053,15733,-8175,-22758,-23151,-9762,-11892,-2257,-1621,-11840,-7306,3968,14445,26896,-6138,-7043,8241,-23370,7393,-17160,-1445,-28203,27371,-17185,20853,18329,-31999,4266,11569,7612,-5462,24588,-32023,26172,741,-18859,-20363,11858,10737,-3914,8761,-3714,-5551,-1642,5731,28666,20723,-14386,2866,-24434,26101,-1337,329,-17393,25154,17426,-2697,1481,-16259,-1569,15003,2136,-10177,24676,13843,-23106,5692,8575,-25986,-32334,23717,-27195,-31890,13868,-29270,10012,7457,-17528,2160,-26689,12341,20687,15273,-32044,-23107,-12978,-19267,-19212,-19976,5282,19830,-29436,-14376,-16414,13111,22506,8798,9804,3863,1318,3925,30467,31373,14367,-25393,-2418,-8404,-25072,13407,28849,-28992,10788,-17225,9485,-13471,-15954,-25306,-8753,352,-7979,28733,26199,12134,30800,-10283,4806,-26025,7484,-27110,-20998,-19418,18727,-8651,7438,-4612,32163,13620,-28461,-20328,8999,-26952,24934,19571,-11659,-17907,1605,-27030,16369,20350,1098,-3199,-5329,-21718,17118,-32421,27999,13090,26860,-23022,-2784,28737,23357,2351,-5497,1909,-25558,9027,-13193,-25368,-25414,-14674,28110,15279,-27802,-30635,847,-7772,-14366,-1929,-12444,10406,-14988,-14659,-24615,31751,30637,-754,32150,-618,-13116,1720,11171,-17672,-22380,18649,5702,-12883,-8361,15190,-32000,2078,-317,7935,23074,-28116,15980,1687,-7903,-24005,-1271,-31836,-26862,15899,-30168,32546,-9410,13684,16934,-6311,7785,24728,564,11004,-2519,20392,-24328,-11759,20678,-11224,18020,-20434,13109,-29217,-370,-28804,8364,-1907,32729,-5480,12222,-29564,-23160,13606,-18597,10395,1485,-3096,18510,-14902,16435,-15657,-14065,6773,4781,6248,15541,8520,15027,-11880,-19154,9961,-19808,-21021,-14222,17685,-4925,26176,22657,16405,17175,17803,-26297,20433,-26193,-4925,10728,25208,-7444,-27848,-2777,-23120,7085,-19395,20481,-13819,9310,14045,17405,29717,9935,-23168,-12118,-10443,-27663,22713,-31786,-24085,22691,17075,19894,22770,26783,30903,31906,-14802,-26787,16465,3368,18530,-7531,9428,14653,17770,32101,30619,20097,11866,-25855,-17334,23917,-6558,7495,-23581,4646,-23150,12644,28912,522,-19841,30402,22054,-30284,31479,28785,-24988,21310,10729,-26587,3918,19401,-24465,13317,-19515,-5025,22772,5560,2271,15762,31577,23385,-31536,-31190,-2793,12435,15933,4300,7720,-10751,17269,14856,-7858,7569,-25060,15214,-30922,12960,-16142,-10572,-22974,18538,17775,10878,24208,4125,-14058,16750,26271,31174,-7526,15722,21719,10227,9712,29503,-25546,-18607,-5790,-8783,-4127,3743,-31241,14319,-11079,-22442,12286,-28465,8564,4985,-7255,-5270,-17070,6754,-27841,-32041,23529,-12,-9970,-9018,-17692,10835,-26424,16501,25385,-30433,8911,-30792,-13918,-18886,-22913,-8514,-19245,7942,-7617,8778,-2367,-27322,-3479,16429,10025,4985,10070,-14423,12112,18289,5869,-8906,-25146,-17786,-24131,2459,13151,-32195,-17110,11914,29868,31904,-25412,22431,19052,-5882,21887,12438,-15479,10535,-14031,-27292,-18312,-24954,20486,-8487,-12566,-22792,1600,17058,-32125,17855,-7804,-3198,-27322,-17127,1265,-31985,-25643,9879,17480,16706,3978,28381,-30462,-8037,269,32343,1379,17016,4669,-22734,25783,8240,17938,-349,-4963,21176,-11877,-5616,-2917,900,934,4200,-231,-931,-20159,-22283,-29959,17012,-27162,-18509,-29292,9238 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add1_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add1_golden_int16.csv new file mode 100644 index 0000000..6ca5354 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add1_golden_int16.csv @@ -0,0 +1 @@ +-8707,-19547,32767,11875,11028,12945,19025,-15040,28507,17926,6264,27878,-16368,29109,-32768,9496,-22601,32767,-6515,18959,-32768,-6875,32767,-13433,8423,32767,-19156,4447,-2612,32767,-12084,29596,-22486,-29981,-7148,17397,-32768,5666,10669,32767,2076,18037,30335,9033,-11807,17264,-21138,-7402,-5549,32767,-6905,15286,14562,6343,15268,-5060,-32768,4875,-7795,28287,-5024,32767,-32768,-19639,-32768,-10747,-32496,27844,-18729,30188,23353,-8255,4695,-10423,16681,-9589,-24051,-12612,32767,2156,19233,-11277,12343,15835,15651,8961,32767,-7005,11471,25874,25396,22557,-12008,992,22202,-4025,9295,-25015,-18992,-29798,-27853,-21155,32767,-13274,24731,28565,10795,9160,5421,16957,-29278,11399,32767,581,-31349,4608,-6479,-9126,-32768,6183,12216,0,32767,12316,-12597,5130,-30526,32767,-7932,11340,-4096,-4979,-22530,32767,-7587,-29820,-659,17743,-2117,-3375,-15194,-32768,-909,24132,-25375,-23109,22366,-13930,575,-3212,-22272,-3141,10755,-11170,-29077,13758,32767,13026,-1662,10595,10241,32552,-21390,-9867,16156,9671,-5630,6417,15986,32767,-25858,28315,12065,28213,25203,19426,670,9877,-13322,30788,-1907,-29099,363,32767,-19171,24775,16638,-32768,-32768,32767,6909,-18270,-32768,14889,22705,5681,22407,10983,5190,29132,-27021,7547,22908,-32768,3281,-27596,5485,2001,24307,8275,-19876,-21888,29686,-17580,-28103,-32768,-20781,17614,130,5494,-7920,32767,1101,-24439,-23540,9956,24587,20241,-9763,21756,8759,32767,8207,-32768,-32768,3168,-19326,-31586,32767,-14149,-32280,-22689,-14298,21436,-12815,20891,15907,-32768,-14188,10378,-18586,10326,19019,24337,26528,-10503,-12915,-12568,-12044,-31111,-16172,-11693,21943,-7501,-7388,-2286,-14776,-20586,10498,-20216,22110,24553,-9456,32767,30741,-15560,5633,19648,-15938,18307,8086,-11982,-32768,18504,14871,4770,19697,9394,32767,-15220,-32768,-23202,967,9601,-24523,-15560,-29146,-7275,14398,-1266,-32768,13220,17451,-4971,32767,-15188,31675,32767,-21542,-26876,-4890,11549,28243,6685,-3313,6584,32767,22828,-19073,-18476,-6534,-10403,25486,-32768,-25258,1635,29880,27004,-29906,6719,26937,115,16637,20170,1087,-32768,-12749,-7530,11870,22254,26241,-18101,-32768,-1514,18261,-16926,-32313,23094,3036,-10217,-23263,-32768,32767,18448,3971,-12498,26316,-16274,13953,-27079,11694,-7064,-24857,-6171,-32768,-32768,4520,-14152,32767,1422,6392,-3458,-23849,20387,-32768,-32639,-2827,-14427,-3869,-2731,-14651,-1440,-32768,32767,-14379,-2667,21710,-3427,-32031,-22995,13073,-10685,8832,1052,-15254,25495,24279,-32768,25675,-17889,8548,-12022,20760,19246,-9344,3465,-15707,32334,-7944,32767,-5451,25990,16647,-32768,-32768,23209,16149,14143,9031,1936,1350,-13669,5422,27376,27794,-30622,-11646,5613,-9556,30792,18891,-31564,27028,15430,29442,-23856,17083,5755,-20549,3186,-27360,20855,27226,12951,-13813,24915,2081,-11629,27906,-6807,32767,18112,-24203,-10233,10169,-6440,4590,-19513,16643,-13395,5000,-2341,14354,6133,32767,-32768,-16790,-17149,-4679,18013,-1475,32767,31605,20528,4566,-30805,-10818,-32768,-6170,-1213,-32768,-29791,-6667,-25927,-14040,-7630,32767,31407,12654,-15406,-27274,-9365,16355,-9797,-29841,-3666,4736,-17055,2589,-27703,27506,2849,19435,-26558,-23010,32767,-32768,30437,9501,20673,12089,-4644,5949,1798,18271,-23843,-10336,28832,16506,30232,-4499,-32768,-10043,-375,-16448,4843,32767,11289,6492,11938,13554,8391,-2761,31627,-17445,-25812,10930,-27388,-8280,-13140,15331,19177,18640,13602,28764,7427,27490,-30185,8840,-18613,11092,10354,-20858,-23730,32767,-2060,-8566,-17715,18208,-28825,2026,-14363,6821,26797,-27013,1139,22332,17899,24175,-31258,30077,15921,3241,14850,-21333,-17368,24371,19762,32767,25972,-12996,31051,-22722,-13331,-8161,-24379,-22205,-4491,3705,3237,-9933,32767,9000,19071,-4199,19997,-32768,32767,-8699,-20379,23693,-42,4623,25661,14324,21343,-32768,-7166,-3385,-21162,16867,5109,-13866,6769,31292,-3313,32767,32767,26297,22610,-26172,23207,746,-12733,-19186,31067,3632,31221,-14264,18946,-14317,-19592,23716,11677,13492,16403,32767,2732,-5904,-21793,-24585,29188,-21238,-9463,22052,-25954,-31195,1729,17828,32767,-25883,-32768,32767,27314,-12240,-15866,13661,20428,893,-2188,-32768,15646,-31228,27646,-32768,1657,-14202,5833,32767,-190,8849,-11033,-13829,17152,-32768,-9305,28932,-14123,-32649,32767,10276,5757,-4571,20114,-12279,-20295,286,30767,26718,-15408,17412,-20215,-32768,-4146,-21705,23775,14430,9052,21829,-7282,5311,23177,-9934,28719,8080,22961,-8505,-32768,-18510,-24558,-1843,-17441,-3583,5451,-14733,10252,32170,4655,-30728,10852,32767,-1216,20493,-9140,-2708,-27466,5758,3703,6591,-1068,-5432,8938,-3969,-14128,-13550,13291,24741,6190,32767,15582,2415,-15530,-28961,-32768,30114,-9570,-22974,22493,3840,-5682,-32768,-4812,32767,-17109,12856,-4970,-5487,-1367,13991,15383,32767,31816,27670,-12373,18166,4660,-22234,-32768,-32768,32685,12842,20068,-10541,32767,8973,-5900,18382,253,6706,26351,-32768,-20031,30539,-13916,24614,-4377,-25507,2178,-8979,-32768,29378,8565,-32768,4226,10189,4521,25802,30266,-26494,16579,3447,-6217,19818,1332,6736,-11945,4794,13150,-11780,-32768,-17724,-3198,17942,-14399,32137,21466,-11664,8709,-14244,-28627,6546,-23434,-7075,-19468,-6846,24614,11087,-26836,-4940,31771,32767,-14689,-32768,24249,8934,-2658,-32768,-12052,-32768,2297,12539,-6787,-23723,-32768,-2784,-17476,7449,-29383,-32768,-5630,-21126,16320,32504,13170,-497,-4858,-23028,-7929,9778,-5062,14834,-18501,25147,21985,3069,-32768,-17836,-14946,-26299,-29243,-11430,17835,-26345,-3421,18785,-11382,-11566,-23813,32767,16503,32767,9543,29918,6065,-32768,32767,-11877,17263,25529,-6108,-11193,-8254,6626,13323,-11602,32767,-964,28572,-7510,-32768,21861,-32149,32767,3877,-32768,19187,-30643,-15220,210,32767,9507,-19569,-32768,23043,1373,-32768,23340,11187,-17356,-20243,-23497,-32768,32767,32767,-13868,32767,18803,-32768,32767,-17762,32767,-8977,2771,-2140,13817,13771,-21263,434,32767,-1940,-22316,32767,-24931,-32768,9325,-22058,-9043,-25399,-27133,-11775,20637,-12373,-32768,29062,8688,-21235,-10556,-17029,10109,21663,-2646,1945,31444,11372,-32768,-16097,-32768,18109,-10414,-7551,-9216,-32768,-21148,32767,14563,-32768,-15708,-12796,-18839,-1992,-19654,-6110,-4756,7406,-32768,-32138,25686,-24479,21436,10087,-26250,-23361,10346,-32768,15039,8574,-15611,32767,27496,20174,-7886,-15891,27256,-24861,-17548,10737,-13724,32767,-11793,15333,-18918,27381,-32768,-17241,-32658,-10865,-32768,-23736,5837,-32768,32767,32767,28778,21164,13011,2883,18227,-32768,12388,-17214,1602,11429,-18942,28499,-27872,-32768,4920,-9912,-32768,-29012,24421,-18667,-2410,-5467,-32768,32767,7511,-3323,-3983,-9422,23235,-32768,6749,-32768,-32768,10480,32767,-32768,-11134,-14146,-24389,17808,-3889,21852,-1818,-26886,-4546,-28196,14696,3314,-13788,-17175,3020,-27654,-8062,-813,-7205,13246,-13655,-4721,-3509,-9658,-7426,-7864,24890,-18085,24195,-19326,17846,32767,29575,19723,13582,479,-31629,-30756,6366,-7483,20078,-6384,-916,-10519,32767,19017,2750,6052,15382,3577,24919,-32768,13907,-32768,8566,-6631,31101,-8924,32767,17575,17153,32767,-12815,-17517,-32768,31705,-6195,-26071,22212,24833,-31340,-32768,-1032,23663,-19549,32767,8491,11311,-1107,7641,-5845,-21943,-13900,-32176,-19855,-14112,-16322,11915,-1415,9402,-16875,32767,-31451,-17,-16311,-18855,-19626,-14934,-19430,23377,-21551,10346,-1743,8230,-32768,-32768,1288,26594,-69,10978,-10338,-5096,7418,3756,-8330,20570,21418,-11161,-7146,-29871,-16369,-8482,-32768,-16362,-17279,28160,-22983,-32768,-9472,32767,-22439,-9242,-9704,-18242,20514,2317,15697,2119,-17306,19284,30150,14327,17877,20639,11412,-3607,-32516,645,32767,-25612,-13346,-7241,-1733,6887,-5765,32699,19250,-16879,-29501,30271,-1507,-19852,1902,32767,2146,-9205,12889,-22635,16523,2118,-32768,-32768,13223,-1990,-10315,-25573,-16321,-32768,-26402,-32768,17833,20380,-3131,-16177,3243,-24160,-21001,-3807,22751,-21998,32767,10971,-32768,15141,32767,11811,10874,9993,-8899,9,15456,16603,11715,32767,-32768,10441,-14225,31841,-32768,8837,-12369,-8611,-23076,-32768,-32768,18786,-12823,3679,-32768,-16801,-32768,-8125,-32768,7369,-19231,22804,-23938,-3150,-2409,32767,-31632,-27812,-1523,32767,-23281,8747,-32768,-32768,32767,20309,-20779,32767,32767,-5439,28060,16602,29930,-32768,32283,-17473,9982,-10999,32767,21276,11256,-9609,-14317,-23311,-31036,13232,18355,-32768,-5572,31029,-28461,16178,21889,30347,30834,32767,-25797,-22786,11791,13902,-17801,-25951,11857,12228,-22555,-5082,-8213,13795,12302,-14624,3539,7410,11647,-804,9172,3064,-5806,-16980,9468,-7209,9651,32727,-4367,11651,-21566,7103,-7629,29055,20522,31757,-32768,19505,5273,17947,29739,-5070,-14943,-17547,10767,16002,8866,-32768,26819,-11615,-1299,-20726,22844,11865,-25030,4022,-6383,28040,-32768,-10819,25275,32767,7710,2786,-22208,-32768,-14243,7010,32767,-25988,21582,6311,11050,32767,1408,-15534,-12139,32767,21233,-10369,-32768,10749,23578,-22006,30035,-15423,-32768,26794,-20223,-6779,28038,1469,-8526,32767,-32768,2239,-6814,16828,-2010,17561,32259,26698,-17709,-9528,3479,-5034,19511,5733,-1818,7211,-5928,3599,6279,14890,88,-8634,-11999,14689,-6782,-16721,31613,-25890,32767,-19757,-11917,-11760,-3720,32767,32767,32767,24291,-32768,23335,-11829,-18697,12954,-30569,-32768,18649,-32768,-4913,-27129,-7130,-4616,-14583,-18754,821,-565,-16723,-9191,20980,20608,3320,32767,18718,-10095,-32768,-9245,16319,17293,-14958,-11774,5203,-6432,22566,-9039,2577,-6428,7045,-32768,1101,8521,14537,-6407,-32768,23987,28944,27294,31047,-17470,21419,-32768,10158,19023,3137,-16052,-3671,-7144,32767,-28654,-13584,-20430,12126,-341,3388,28217,-5553,-31667,32767,9087,7449,-17064,-13233,1421,-30568,-26803,23944,-22740,21441,6797,-7134,-20859,-13636,12249,16708,17270,29722,23895,6281,-22780,32767,32767,-32768,14880,-17151,32767,31893,30569,-11958,13118,-15282,5608,-1719,8739,13529,-2043,8526,-16041,-32768,24157,-8241,-32768,-2233,-26751,-9288,14552,20355,-10797,19371,-10152,-13228,-13813,15172,12465,-25422,-7964,-9512,2563,-11946,-14349,-20031,-24319,-1889,-4704,19882,19328,19698,-6271,15090,7630,32767,-9220,-9604,-3579,-4823,875,4126,-26788,-5621,-32768,13407,-26337,8287,13908,-27995,19107,23228,-17435,-31578,9594,-27531,-15856,32767,22941,-9492,-2445,24076,32767,1005,-11351,22452,-3857,-28525,13476,-6605,-27625,-30824,16326,9799,-27351,-22384,15999,18134,32767,8105,-32768,32767,32767,7848,27351,5866,-32768,9117,6309,-23638,-32768,-22006,18292,-2341,-4758,4640,27199,14957,-10605,-27653,-32768,-7743,-9316,32767,-16011,-28207,32767,32767,-4682,22669,6766,-4787,-32768,11135,9600,-19863,-10212,-22768,-10531,32767,26718,6185,-5414,23317,-8409,-32768,24776,-12590,13815,21680,-15022,-32768,11892,-32768,18230,-8062,-11547,-14827,18813,-32144,-21618,-19419,15394,4582,3281,18191,-10987,-32768,-16786,5247,23564,28706,3042,-32768,-12033,12388,-23159,-1520,-1678,10158,13052,-16432,16620,-25902,32767,12172,-2096,-30746,-32768,3837,4126,-13639,-29055,20321,15022,-24090,-1052,-25654,-19187,-18463,20995,4718,16190,-23056,-12139,-12739,21083,-20076,20569,10663,26494,-32768,-6640,-6184,15380,32767,2994,-20224,13656,-32768,-32768,-32768,-15375,31236,-28642,6728,-10389,-9803,12847,32767,20616,-4770,-32768,32767,-32768,11420,19051,-27752,11663,25394,27133,15570,17459,-32768,11011,-20091,-30301,-1921,18433,-6756,-3859,10674,21016,7777,-1279,-15829,1425,2415,-32529,9409,1374,-22427,-20015,32437,-28555,-18056,-32768,-32768,5165,-20839,-16682,-10530,32767,-1139,-18568,9421,27431,-32768,13200,-3714,640,-22944,6845,-9914,16784,32767,-3194,-5182,29435,2027,-32768,-12648,7164,-32768,32767,17510,-11073,19710,7405,32767,-24942,-11331,-31364,22575,2485,-1131,-22269,856,3104,13860,11894,9280,-8999,32767,-32768,29512,15441,-2967,9256,13462,-32768,-4745,-16799,-32768,12010,1087,-32768,32767,21930,32644,7311,25127,-25583,6058,660,24784,24966,-16692,16579,-5754,-8932,5856,-28160,15484,-32768,32767,-21785,29000,-32768,-23540,8758,-18720,-27217,-10358,5192,-460,13594,2186,-32260,10566,19030,-808,-28095,32767,4141,-829,-30966,-1200,32767,-4667,-27282,-24585,4812,-10728,2940,18802,-32768,-32768,-23637,-21964,-941,-11967,-26925,32767,10174,-32447,17477,2873,4972,-3392,-3353,9267,-17528,-31124,20510,16641,2477,-9125,-32768,32767,29093,32767,7579,-5381,28022,-9267,-22969,-32768,-10837,18073,-2336,-9234,-4959,21250,-14837,32767,-13576,447,-26561,-13112,-3363,-23073,3498,3810,32767,-5337,-3529,2017,5728,-10316,22295,14549,-32768,17395,-32768,-32768,23376,-20073,-22103,9970,-8892,-13783,945,-6054,-636,3677,-8897,-3141,13238,-14955,25936,-17974,3403,-920,-8253,-22566,-23374,-1197,-7375,27060,2920,-24198,23753,23550,-32768,409,20554,-24799,-6857,26243,-10419,-6810,-32768,19899,-19783,-27154,6207,4847,-11377,2037,-10718,8841,18282,12178,-6284,-32768,-10256,23470,-21150,-32768,-23433,-31328,-14901,-1907,1797,32767,-22632,-28956,32767,27239,8717,6565,27227,9742,23287,-11235,-5183,-5755,-8339,19980,30251,-23823,-31246,29425,2112,27990,8026,-13779,-6426,-2769,10679,-24141,5664,-8375,14029,-32768,8363,7873,20748,-23830,-31837,-4327,32767,-29905,-5781,-5801,453,2001,22464,4233,16802,-11891,12320,10250,20047,13529,-20558,-7040,27826,19944,28506,-32768,6982,5887,3211,32572,-8241,-18864,32767,13674,-2939,32767,15507,-27507,14281,-32768,12651,4493,3639,22745,5449,13023,19872,2735,-14974,13280,7352,-30132,32767,10066,19331,24610,30667,-161,24409,21137,16971,6441,4092,-10999,-30569,-32768,8060,32767,-4245,-24777,12189,-8281,-4332,-32768,-20333,6916,-7884,-12564,32767,-27473,-17461,17286,-7330,-18365,-24731,32767,-17047,-13737,-13854,32767,-11765,21733,-20753,-11930,-32768,25684,-1243,5283,11613,15777,-31109,12467,-21891,10616,32767,-10825,-1786,-392,23855,-14313,21617,19100,-25741,32767,3039,-594,-32768,-32353,27214,-819,3031,27934,14566,9450,-1296,-14129,-13764,32767,6530,-7127,20011,-32768,2369,28345,-4903,25043,15320,17352,32767,-15475,16808,-24785,4889,13826,-30459,32767,-32759,-9032,12003,20176,32767,17647,-8679,2049,32767,13297,-2357,32767,30828,-5005,-12731,-24998,-32768,2344,32767,20930,28595,-19304,-25229,-32768,19183,14562,-7929,19579,20196,9610,-14460,-23932,-17582,32767,-18927,-14025,-12224,15308,5585,-14700,13257,-21049,-3327,-13601,519,-27362,-14813,8480,5831,8164,27543,-9488,-831,-3867,-9935,32767,-23394,16086,32767,-12327,32767,32767,32449,-20634,-7596,-32768,-17549,-11692,8017,5585,-16291,10085,-13525,13968,-13883,-28838,17064,12761,-26567,-13521,-11563,23616,-10601,146,-275,23968,21188,-13069,-26402,22881,32767 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add1_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add1_input0_int16.csv new file mode 100644 index 0000000..af7015a --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add1_input0_int16.csv @@ -0,0 +1 @@ +-9128,-14937,17750,19462,11338,-1099,8850,-23678,21549,5102,7697,17542,-20265,31785,-30649,-6238,-16751,28476,-21648,-493,-29045,-23651,21817,-11749,3396,24383,-29007,17097,-18936,25470,-10204,18190,-31539,-13228,-19001,13357,-29189,-14014,14335,32485,-1738,17869,27604,25176,-7420,14744,-11704,7414,-25897,27394,-24597,6468,-1214,-7932,10870,-22010,-28180,3069,-14298,17747,-2088,32102,-32546,-12605,-30774,-27404,-26162,31286,-9625,25903,6502,-25185,-15395,-28356,19032,-20403,-22306,4157,23284,11345,-178,3590,1352,14553,-3795,-3145,18107,-21800,7657,29709,22205,2761,-5215,-15552,6477,-4279,11694,-14864,-17991,-23184,-21619,-18521,25000,-9,29198,11779,20355,5989,2644,30739,-20595,13043,28157,446,-28590,23707,-18575,3044,-24718,-13610,-4658,-8476,16680,-2998,-14033,-1610,-10565,29444,-20705,16545,3357,-91,-10330,30367,7985,-12665,7638,17139,-7770,10249,-24752,-29336,-7354,16274,-27718,-7861,14106,-8204,-18768,-20358,-32297,14026,-2832,-1970,-19893,12892,24830,15001,-2635,68,10037,18666,-21974,2858,8141,15925,-20182,8856,941,28388,-20518,27415,4652,14114,7260,11648,14898,25887,-16719,30989,2192,-27123,-14254,31683,-8386,19139,26940,-27439,-32640,28286,7909,-24058,-29660,13174,15069,-3746,29401,4501,-4990,24500,-28979,10467,10188,-25917,5232,-8986,-11908,-6461,19159,25982,-7537,-12576,30793,2248,-31163,-29000,-3759,12354,-9298,20285,-5770,30727,-4238,-32108,-26611,11638,28541,13200,-13591,27099,27391,32728,12587,-17341,-19828,12021,-32082,-13144,24914,-6034,-29034,-11506,-14788,18159,-21403,13614,31667,-27678,-9184,3636,-4112,-3333,11484,14330,7163,-12689,-6501,-4612,7847,-20717,-25695,-23329,26946,3862,12651,14769,-1302,-2267,-3352,-11789,4730,27168,8356,27336,29666,3220,19219,-264,-31119,10736,7537,-16139,-17040,31248,20014,9675,-305,7387,28378,-15722,-28298,-17439,-6954,-344,-13786,-11360,-10539,-7037,22919,-5545,-30127,5313,11828,-12997,22609,-11699,31538,18970,-27678,-32334,-13292,12797,17213,-7828,-1117,7500,21969,32190,-16190,-4792,-7026,283,32759,-25198,-21146,-3600,13093,11202,-9952,-12885,17185,16586,1796,16675,-11463,-16728,-19123,-8212,-4544,27161,6389,-5215,-31110,-14811,2644,-11419,-28502,9219,6686,-16676,-15637,-22781,28809,-1282,-15755,-22651,13778,-16080,3893,-12948,21902,-13274,-29426,-10521,-23731,-25704,14929,-30898,24080,-2758,25809,-3267,-4717,2446,-30704,-23197,-16783,-14574,-21036,-22516,-21308,-16934,-27556,28758,-22425,-738,17844,-21934,-24210,-3292,-947,-20392,28170,19985,-18012,7600,30165,-25523,12849,-28058,14643,-13289,31112,21344,-18784,859,-27456,17437,-9320,30792,4551,30083,6994,-31052,-32742,17150,3150,6241,28640,-8037,-5151,-6745,21039,15427,31106,-11840,-21305,-14092,-16066,21227,3428,-24123,15589,13308,17761,-17808,17583,24527,-9283,21534,-15206,20050,22027,19486,-9608,8138,19695,-18184,32141,-17282,27000,7087,-26028,3642,12108,-14879,16783,-22715,5536,-22039,862,9463,7291,-1008,18686,-18320,-13237,2585,-11031,12674,13347,29623,16889,23154,16288,-26291,-23629,-32226,-23323,-10597,-28662,-11322,12482,-26416,-28456,-25651,28424,13785,28358,-24197,-25602,-18834,30358,-12096,-18395,15128,17870,-1813,-708,-22014,17037,-12120,-501,-28125,-18150,30366,-28176,15087,19540,26331,12194,-16802,-10406,1422,16130,-12278,-28757,31062,1575,14160,-11090,-18114,-11716,8504,1950,5201,31080,30392,2792,26335,26736,5022,-6218,20977,-8787,-6252,31275,-19481,6871,-25732,25881,390,-917,22530,29100,11617,12810,-27697,2232,380,4384,20387,-8164,-30048,31960,-19996,-6645,-12554,30023,-10310,-15548,-3404,18050,7288,-12203,-11287,15385,20227,17864,-14495,14361,31765,-4046,25262,-24722,-13983,31517,5399,29746,12016,-12572,29333,-16860,-25576,-6932,-11979,-17395,-7269,13070,-7884,-6855,32319,-10487,6230,-23603,32033,-29033,25716,-17160,-20994,14141,-9242,-7541,16961,3134,27723,-27281,-8460,7020,-6102,30413,-14808,-12428,-7576,14098,-398,19821,32759,29080,20783,-30188,24353,11439,-3012,-25868,31496,4917,32627,2328,3708,-21888,-31072,31272,31359,-3034,26742,26751,13177,-17786,-5075,-25866,13705,-3126,-19041,22022,-20832,-17729,17974,5776,24770,-14597,-26262,28299,28136,-9386,-23838,9714,21338,-2330,-21054,-27822,24130,-25316,32162,-14904,11349,-28573,16789,30119,-3781,1704,-24559,-18546,22232,-31805,-19069,27331,-27787,-13645,31792,-8907,889,15464,441,-9185,-18383,-19507,28901,11381,-2975,21591,-32761,-20732,14562,-9885,18986,29952,-142,30107,5776,1425,16421,6891,25418,-7402,5141,-6373,-30959,-28838,-25226,-12962,-13809,-3421,-5169,-13911,586,12467,13633,-23744,13452,18183,-9897,2992,-12970,-13057,-10087,12351,18305,12241,-9353,-5164,-539,-22231,-548,-22543,15138,8558,18765,28929,-3757,-16235,-10201,-15761,-23551,20776,-16360,-22113,10520,17013,-25482,-17883,-4658,31121,-16516,13294,-9260,12690,17718,19906,28360,20068,31694,17690,-10656,25501,-14053,-13523,-29421,-26691,18055,-3067,29971,-15619,31972,1662,-17528,6149,-10155,13042,21838,-22282,-17877,11370,-24218,28012,-14615,-25357,19037,-5921,-26517,27010,5393,-28957,-8257,-1827,24868,24054,30043,-13791,5657,3442,-19399,18418,11907,13334,-21416,12062,-2607,-5085,-19099,-26864,-21131,13253,-16818,17409,22414,-16514,19990,-8706,-29145,-12393,-19485,9203,-24581,-11,28088,8130,-31135,-6484,29473,25884,2109,-23284,8549,29125,-20672,-22307,4689,-22365,-14110,16563,-22743,-4180,-27791,5922,-22007,4444,-21311,-19854,1984,-32554,16858,32688,19876,-329,-8680,-25862,6559,17917,-8470,14590,-19436,26865,7712,-9639,-14640,802,-18643,-6837,-10687,-11534,11568,-28453,-23445,2018,6643,6489,-6240,30365,-2284,27652,9489,23188,1370,-27755,30871,1997,13834,17121,-24003,-5688,9937,-8592,23692,-29850,29209,-4891,18112,-27659,-23536,26028,-32026,23792,-1153,-26444,6513,-23988,-3858,-10189,32658,6742,-2696,-28105,31632,-481,-31132,17784,19844,-15507,-32107,-27935,-20003,17633,25776,-9328,29532,28879,-15134,28763,-31201,18023,2475,369,-5077,31439,-1305,-3414,-14825,27305,-16206,-18571,29644,-7749,-25617,2422,-23605,-16928,-22941,-25429,-21482,12121,-6710,-31988,26519,17243,-5792,-10475,-28303,-5908,20445,-10181,-13394,26753,22969,-23923,2995,-24191,16072,-29355,-26578,-18005,-16753,-7610,16703,20609,-30253,-4195,-2627,-19976,-14031,-18357,-12003,-17103,16210,-24284,-28193,24462,-24197,13275,-901,-17178,-17369,4728,-20255,13136,-4040,-15182,19273,18893,31507,5212,-26038,17381,-26597,-27742,21480,-28906,21130,6113,24141,-1128,28212,-32424,-14131,-23839,7324,-31492,-20476,18922,-27557,22613,30072,29303,7992,-6432,-8493,28915,-18170,9935,-22037,-11719,12396,-4718,15044,-12563,-30563,3156,-5227,-30126,-28140,23796,-19230,9613,-19088,-28288,30122,17155,9843,-13840,-15863,5410,-17355,-3495,-25846,-23015,28623,29617,-28245,-6945,5502,-10449,13495,-9325,5431,-20055,-17753,-2664,-32425,29718,-8079,-30376,-3700,-1619,-30929,3854,10835,-5148,-2752,-19289,13222,1441,-22859,-7055,-20827,30457,-29547,17949,-13456,31536,29728,24109,5884,19396,11464,-16246,-25003,-11696,-24245,16885,-25065,17489,-28641,28703,2128,4070,-9788,13886,-8765,13207,-20746,6772,-31386,-8425,11383,16409,-16334,24133,1838,8006,27427,2314,-10373,-24390,18656,-24127,-11215,4202,13939,-25311,-15870,-362,18967,-20882,30248,-11675,-7423,-17406,-712,-16699,-2011,-12827,-18148,-27496,-14255,-27577,14063,-4577,-2690,-10801,24551,-17173,-18722,1222,500,-31639,-1518,-6041,32546,-21820,14034,-4827,10308,-23324,-27524,13691,23433,-6590,9493,-3641,-334,23282,-11655,6272,4649,20514,-6228,-25219,-20114,-17529,-24266,-21443,-10200,-15991,31647,-13342,-23320,4852,30284,-27335,-6073,6541,-19052,25136,6254,3658,4555,-19877,15722,27498,20572,21691,15883,15008,5102,-24535,16243,23249,-12191,-8031,2010,4111,-3326,-4597,25181,11594,-20736,-31072,28200,17211,-26183,-2961,20509,20267,-19809,26436,-4155,17263,21465,-23351,-29974,19324,-8641,5281,-15424,-16358,-16789,-14692,-29926,14807,20725,-2287,-15793,-6406,-32554,-10850,11629,16814,-13846,24611,27733,-27327,5576,28029,-1879,30485,27147,-19393,-9487,3290,20724,20103,30697,-29446,27326,-12053,31318,-28798,10121,-9302,-25250,-31358,-31020,-31012,18574,-15510,-13637,-27407,-24040,-23908,-6226,-32147,1348,-20055,13154,-18053,12076,-3655,29471,-31649,-14197,5223,19800,-15322,25884,-24255,-23830,32234,20026,-20739,23892,31956,11279,28819,-1694,20914,-31571,28978,-6227,-7226,-16857,19818,24599,-5566,-18250,-15639,-16878,-27602,20573,23938,-28227,-7082,25963,-22143,-1810,19082,20240,18350,20996,-23457,-18440,-5913,15065,-4380,-8500,25736,-3632,-19630,-12961,2088,17574,13334,-16772,-5361,-1276,14000,2783,16982,1246,-23499,-19494,19920,-2294,16491,27868,-19025,-1461,-17583,-11622,-27655,15712,20166,30766,-22011,7256,19991,-1808,29295,-21446,-28108,-17217,12187,2703,15115,-28450,28793,-19758,16966,-1312,22177,4761,-20494,12599,-13688,21892,-22530,-17276,11083,32469,16916,8908,-15435,-24417,-15398,5782,28157,-24683,14234,11142,8178,20254,18662,-12571,-31691,22523,28873,-27455,-29656,-2533,26914,-20153,14651,2379,-29323,28322,-29978,-6925,31095,-2440,-24579,26041,-22426,17197,-13299,15235,-2496,10210,19155,27215,-5358,10458,-11178,4776,30314,3947,6354,-6302,-5847,10232,20657,12895,12502,-11508,7386,3107,-715,-5450,17305,-24918,27443,-3477,-15900,-26238,3509,27214,15895,21621,5619,-27346,15281,-7819,-9300,26806,-20731,-27783,18959,-26200,-25282,-19044,5097,1334,2677,-5724,17363,10895,1266,-23297,28162,17916,-4380,25747,21406,-7435,-32390,-15087,340,8046,-708,-26156,-13917,10380,23508,-28213,17439,-15472,-1530,-24492,109,-10421,31160,-24536,-28460,6638,25819,15435,29283,-16941,23542,-29032,2210,4616,-11752,-20992,-11180,-4301,28144,-31005,-29831,-2695,-8030,-9052,-16769,18500,-10507,-19837,18378,17400,26975,-25986,-26539,-11791,-30094,-17273,31151,-14987,23287,-12271,-12744,-5191,-391,31049,28948,27851,19865,4425,1405,-17732,24966,28453,-22329,25265,-7548,27832,30861,23709,-9422,6568,-4411,20718,3172,1793,-1272,-2596,26126,-25618,-22222,14971,-19116,-26163,-3143,-17998,-5685,-1019,14583,-27827,26946,-10773,-25005,-14751,2731,21249,-10489,-11066,-22310,-6527,-24382,2541,-21385,-17838,4673,-16521,18600,6025,28716,4822,15044,15085,16541,-21230,-7362,359,9134,-16237,3741,-17739,-6871,-28917,20151,-24002,11927,22406,-20901,3974,26550,-23752,-14126,28452,-20931,3074,29977,13792,-6204,13499,22330,23051,5507,-2286,18655,-22992,-23100,-2942,-13394,-23455,-22093,22437,12476,-23738,-13380,24831,29941,26027,6314,-23898,31233,30504,23217,29751,19198,-29667,15427,4941,-9898,-24939,-17134,1331,14125,-1721,9885,18547,3199,-26127,-29800,-31704,1759,-25459,17569,-8943,-23619,25777,26826,-24231,16476,2329,-19733,-31702,10968,17865,-3818,-10944,-3391,-8512,29594,29430,-11811,-18137,22747,-42,-27020,30127,-10829,9941,20470,-32126,-18679,-5622,-25946,948,-27299,-26808,-24183,23686,-29501,-19403,-31045,12424,-5272,22013,19010,-31049,-31399,2017,-12718,23681,24799,11612,-16909,7949,20767,-13642,-16001,14286,5505,20321,-21102,14292,-31512,30686,23500,4894,-22756,-16242,8395,-602,-28247,-29536,22127,2173,-31760,13260,-8928,-8947,-7183,29947,-201,13286,-22532,-29333,-17354,3857,-6252,6814,27022,9168,-32419,2918,-12594,-4702,15599,15864,-21616,25713,-20811,-25963,-32724,-23439,27368,-9479,277,-27636,6348,1351,25911,21293,-9784,-26049,31071,-24335,13218,19305,-30967,7304,32382,32214,18619,15311,-24378,8781,-12475,-23877,17869,787,-21591,-17793,1952,13138,22938,16001,-4843,13274,-11429,-18115,-10030,-1109,-7095,-4284,15445,-11953,-9941,-32213,-25425,2418,-15351,-11136,-12808,24304,17345,-24267,15232,16503,-30513,13958,-21493,-1595,-30917,20757,-3458,4387,29207,-15349,-10281,10507,-5314,-24766,-7137,-3404,-24663,26079,30127,-21900,10673,9893,17611,-22023,8001,-17778,23711,-7906,-16352,-24844,-14295,16369,28035,23709,4888,-21744,18154,-28135,18661,14353,299,6804,16123,-16530,-21259,-11464,-29120,3072,20848,-20211,20891,8034,19228,16825,32391,-10110,-326,19024,16419,10318,-1905,-2153,-22881,-24275,-6011,-9247,10205,-25953,31600,-27379,24949,-29567,-11590,-11029,-7501,-29931,-17772,21088,-5851,11961,985,-24895,14792,13874,-18804,-10248,23170,-13293,5536,-32353,15800,27169,-18534,-27597,-28590,2294,-21513,20014,16597,-27294,-27673,-24737,-5365,1576,-11817,-14349,31027,-1855,-18155,22849,8432,24543,6704,-2249,-5149,-12735,-15958,11772,18862,6383,-17317,-30707,27151,9964,22894,16165,-3092,29553,9788,-6903,-29476,-29107,23065,-2918,-472,9429,22883,-743,30263,-14236,-12777,-13150,-3137,7148,-23959,-7710,-9874,31772,-19772,-12045,8776,-4917,-12251,10652,25516,-31711,5757,-25004,-26945,10967,-18271,-11828,8130,-7303,6319,16738,-15177,-8930,9760,-10412,-17262,14426,-11664,16068,-20289,5345,-18066,-3369,-3180,-22284,-19769,-25620,16942,14515,-7555,7911,25478,-24201,-14748,1072,-16977,-24360,29246,-27721,-18926,-24822,26485,-9198,-27858,7405,3882,-14155,-10386,-27618,3890,18366,2942,3509,-24465,-13867,5320,-10076,-31910,-14131,-18224,-24028,18118,2888,28619,-23222,-19393,14630,28222,-3206,-7526,7514,26058,3945,-26532,-14353,684,1229,25820,16916,-15481,-12176,11206,1250,18815,3459,-17166,-14645,-22997,21678,-7809,22516,10965,22098,-31921,20736,16263,32616,-23206,-17494,-18335,28342,-24264,-21849,6081,4582,1838,10024,-808,16928,-1024,7975,-1788,2302,2595,-17897,-7377,19432,26089,17129,-32173,-4120,18831,17206,29124,-26243,-2046,25452,13862,7199,31099,16050,-18352,30977,-32452,-4739,8220,-13711,23933,21800,10496,9722,-7346,-16157,27069,25722,-18458,25230,3641,1949,7463,18061,-18381,15154,15335,27290,-3954,19505,6897,-28227,-24229,822,30680,-15356,-32368,22989,3888,-24098,-25435,-31732,22318,5677,-18837,30003,-17546,-31524,27940,-286,-1049,-6827,17334,2737,-603,-5887,23734,907,2180,-31284,-12101,-16007,7744,17296,-8702,3956,13918,-30382,31913,-3623,13765,20561,771,-8203,19425,21628,-19481,22079,23469,-12995,20664,-11001,-19499,-22573,-16937,11515,2273,11690,25784,6277,7919,-16200,-32545,-6361,17142,11422,-26715,24315,-15940,21917,20335,-22956,13982,14630,27143,24039,-26005,8735,-9674,12730,28067,-13266,26793,-27948,7168,17882,16091,13246,9543,-27535,6492,28903,30773,-15262,21546,12567,-16959,805,-18621,-29330,-9337,29274,4973,12985,-30925,-31640,-31952,5437,20274,-7905,8602,17503,5374,-25032,-5621,-31039,20090,-2138,-5474,6740,18281,-11843,-26691,27087,-16235,-650,744,13772,-9299,-29831,19947,18819,20278,30408,-12111,-16707,-5430,-29908,28671,-29763,3641,29980,-12411,25803,20685,26295,-27343,-14471,-27225,-21759,-10693,3887,6268,3013,9888,-18768,-2683,-11452,-10002,30817,16593,-15269,-1209,-20669,30986,-13508,-8884,8679,21134,9071,-5444,-18314,32198,21752 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add1_input1_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add1_input1_int16.csv new file mode 100644 index 0000000..e6f6d2c --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add1_input1_int16.csv @@ -0,0 +1 @@ +518,-7798,25943,-12018,-296,22893,16765,13655,11751,21017,-2196,17188,5984,-3779,-6834,25555,-9854,19440,24290,31727,-30255,26934,26611,-2964,8265,15916,15536,-20322,26283,12960,-3256,18945,14188,-27578,18988,6838,-30874,31850,-5717,6654,6191,604,4966,-25873,-7295,4384,-15609,-24036,32720,16222,28411,14507,25716,23143,7377,27247,-20693,3004,10346,17525,-4829,26348,-30500,-11709,-29770,26670,-10817,-5037,-15031,7470,27613,27156,32493,28734,-3484,17267,-3259,-27283,17969,-14782,31666,-24189,17957,2360,31656,19693,27660,23736,6364,-5707,5617,32349,-11180,26704,25776,336,-3698,-16836,-1965,-11219,-10570,-4640,26131,-21643,-6748,27604,-15221,5284,4580,-21917,-14548,-2441,25630,229,-5030,-30723,19391,-19800,-28054,32042,27444,13672,27175,24930,2083,10967,-32763,9615,20457,-8186,-12098,-7976,-20095,31023,-25259,-28223,-13396,1303,9079,-22038,15136,-31398,10379,13122,3311,-25023,13737,-9494,31211,27598,15759,-27749,22116,-15046,-15352,1652,23803,-2945,1539,17177,518,23001,547,-20708,13228,-9909,23369,-3816,24564,8410,-9091,1975,12181,23264,29409,12905,-22938,-25643,5233,245,-6647,-3726,23584,31971,-17751,9549,-16310,-23609,-16821,25438,-1486,8999,-11283,3042,12737,15311,-10867,10658,16517,8010,2659,-4571,20941,-30047,-3086,-30529,28157,13687,8753,-28409,-20271,-15426,-1237,-32309,4416,-22792,-27842,8811,15210,-23757,-3615,26794,8632,11918,4519,-2529,-5923,11731,5995,-8217,-29892,4463,-6914,-31809,-28713,-14222,20218,-30332,26712,-13351,-5833,-18458,526,5682,13616,12125,-25128,-26305,-8334,11067,-23691,22223,12506,16591,31727,3332,-10584,-13065,-32308,-17341,15062,18554,-7665,-18468,-32461,-27552,-22008,-29930,22535,-13966,28444,-3765,-28907,21477,2302,-30580,-21810,32482,24193,12551,1035,6484,-28592,-20214,-8021,-7824,32628,3411,11827,529,-28793,-9725,12795,16220,-17773,-7062,-30552,-519,-13479,6879,-10221,12999,9393,12855,19166,-5908,807,32411,9499,8307,13462,-1800,18314,23534,-3604,-1356,19078,-14680,-5003,-22415,673,-17429,-11261,-19573,-7099,8475,27631,25989,-32739,31746,16228,-26566,24246,6010,20263,-30238,10046,961,26696,-7504,32507,-21120,-7973,21421,25528,-9196,-6744,22808,-5832,10230,-12731,-25829,18179,32166,31892,16147,20711,-614,16485,-23295,-16250,9886,6910,6902,-32088,-24670,-16706,26750,15278,6769,-31203,-372,-31301,29316,-4546,-15834,22460,-29,27620,31864,10467,24966,-22811,27797,12713,-3161,6637,29789,-13207,-32207,22857,15461,-31030,-30521,4167,29336,-9045,-22282,21164,16072,-9673,1822,-16315,-3028,15054,4268,18661,24627,2073,24892,-16235,-6122,15879,-31177,-29927,10203,21267,13008,-31463,16123,10511,-11422,-25090,19780,-4829,-30862,15365,31888,10325,15998,25292,-12586,18952,3708,19386,-10196,-491,-30174,-18553,-29538,-20110,1684,8889,-10302,-7039,27523,-28374,10358,-6316,16770,24137,18119,2497,-22570,-2939,13493,-19583,4805,18224,13696,6768,-19083,11658,11632,24375,-31774,-6042,-32149,10159,8945,-23936,19341,24322,-3857,-18823,-7851,20465,-4187,27554,15114,-17892,-30342,-31011,310,22994,28927,16199,29006,-25097,13895,-3201,15101,-22285,3528,-19015,-30384,-21098,-24902,5366,-9689,17395,24199,32517,2037,-8265,7254,-32639,25323,-16017,-8744,54,19525,26491,640,3792,-19095,29522,-3064,24389,26484,10548,-32343,2513,-14329,-29981,-488,6315,-30605,6089,-23003,-21012,5590,5525,17763,-14289,-32029,-32616,-13261,-24593,20069,-16735,30658,31891,-14149,-10,-6621,24188,-4571,10822,-30981,11026,-15993,-20861,9752,3852,28893,-3257,-8652,-18721,-30398,28385,-17943,-17986,31964,-24388,20065,11619,-3425,10626,-27618,25907,-25262,11814,-16521,5072,-5781,-11077,23533,14509,22991,-924,3346,-9876,19506,-2134,-20452,-8170,4398,-15037,17998,-5148,28720,31599,21065,31222,-19045,-23948,15942,13487,615,15846,14839,19706,14508,18315,-9897,-10640,1955,-16847,-24683,-21539,32222,-2576,23265,28313,-4764,23735,5437,-4003,3365,5994,-1420,-17235,-15915,10424,-117,-2006,-1691,-27028,24929,11947,18156,-11749,-31532,26907,-16374,16741,-16797,19057,-27369,1612,25515,-29608,15274,456,-8742,-22298,-26172,19770,28120,-18683,-24859,19733,-821,-4830,12566,6619,-1090,5216,30391,-28604,-13396,-10114,-6773,-30450,-15603,22918,-17564,8732,5789,11689,21614,7353,-7878,-4611,15578,3118,21779,-31257,12092,31133,7959,-32401,32105,-5217,-3459,31932,3579,25233,-20340,-6419,19864,-28242,-30254,-19467,8164,-24771,14997,-12950,-21198,6366,11326,-27323,5856,25122,29169,-3597,-5436,16318,623,17901,-6181,-328,17231,-1598,15782,32376,-14396,-11833,-3993,26870,13981,28608,6009,16643,-28541,-10529,-23485,-8992,13345,-532,15452,29384,-22166,14256,-2734,26562,-20170,27441,31482,30128,-8883,-21827,-21416,15620,10776,-1813,19728,-21178,31834,-29756,-338,26289,-1272,-469,6828,-29421,-30810,-9283,-20648,22386,785,16609,-2998,-11496,30271,-14462,-15966,-26272,24203,25900,-15603,7996,4012,11959,18647,20072,16793,-10097,7767,-30020,-3845,31485,16361,-5026,16434,-714,-27154,-5098,-13766,4363,5275,-30965,20213,19570,-32737,3296,920,-20980,17924,71,21148,2625,-17034,-10518,15057,-11635,25659,-11017,-26629,14416,28867,7896,3636,24351,-1133,7607,-18035,-9196,307,30671,-6803,-26388,7888,-11152,-5148,4974,6439,2399,4294,23585,-27367,-20975,25773,-32403,29009,-24913,-27226,-19161,26507,-6259,25613,-31963,-11383,-14094,6986,4985,-13563,-23515,-12386,18044,-566,304,-10573,-280,6076,4145,-23517,-12948,5403,667,1167,-2306,23429,20556,-30681,-30394,5687,-31879,-30473,-43,10438,2914,32237,27393,-29285,-29338,-28787,25489,30610,32005,263,11409,7685,-19219,23581,-22599,5850,14035,28752,-9087,-29496,24670,-16479,29221,12531,6316,17400,32362,-17577,-6318,-792,24097,8186,-18506,20799,-11302,-18609,16778,23928,4636,-27578,-32678,-13429,3016,-14341,9394,-13758,-3304,18763,6725,-27548,27369,22775,-7580,28403,-15906,-30373,19206,21349,29078,-18639,3926,4698,-28169,24572,-29185,24622,10006,22975,-6453,7714,-28176,-30341,11307,2087,12551,-4435,-3251,15440,14118,-9363,-13986,4640,-13639,-25303,-326,17871,26023,2365,12106,24778,8148,-18496,-21022,-31094,-27493,3620,30360,30552,14006,-30886,-22228,32002,-9483,-21960,-18861,-16639,1485,19382,-2455,9392,19828,-14064,-15797,-6958,2449,-908,13561,17911,-15119,-10097,9254,-29228,3347,20505,-981,31874,14386,-17907,-21273,16074,16433,2340,16119,-17131,24235,19484,-29101,-13925,-29045,-835,-10339,-5335,-14829,-29540,-26585,-5698,-20999,-21498,25266,29712,-315,21639,31603,18404,-16903,-24487,4185,7462,21517,-1349,-23294,22231,-25209,-26420,2937,-7740,-28685,-1943,1460,563,-19438,21870,-32338,9140,-15418,-21299,15826,10216,29182,-27282,16648,-19564,-26383,-29072,21382,-32617,-6963,-31955,-22937,7287,8696,26892,29384,-15229,-3120,6301,-23960,18438,26502,-22054,7538,4772,-19370,-18803,-3452,26051,8836,-29030,-8050,21115,-736,20764,-8520,18155,10522,-9826,-21752,10465,9364,22687,-9127,-17711,-25398,-9849,29252,26899,5522,30015,-29705,29037,8389,27594,-2079,25662,2697,19974,19353,-27448,11766,-6000,27565,-29180,24274,11787,28593,25709,15071,24730,-24640,-11848,-29657,21634,28811,-24446,29462,18032,-10305,-28217,-1099,8013,1788,10997,32685,30428,26271,13615,17400,-32556,-1987,-23223,11959,-30,17853,-3245,5074,19678,-10110,26309,-23613,30172,-28583,-31569,19015,-21917,-21956,-14358,36,-5757,4943,-3200,-18756,-30291,-19983,5590,10517,2599,-10994,-7775,-25452,24928,-23707,26062,1854,-8164,29021,-16291,1568,25303,-30333,-10242,-2397,-5104,-15976,-25748,-23280,4732,7482,-5283,-26383,969,-7077,-6307,19710,-3890,3827,6102,4835,-9808,-5822,8053,-5589,-14114,-13475,-25149,19792,-22122,-8820,-15056,-9459,16601,-1990,12731,12705,5910,1989,3900,-30220,9845,7879,32121,-29190,16935,-21614,-30228,-888,-31168,-23714,-17153,-9596,10692,-25347,-16844,-242,-31432,-19376,-15629,5211,-180,-1420,-919,15624,13094,-16763,-24970,9998,-13556,29748,-26835,-22576,15708,21598,22301,-31432,-27486,16763,15317,19910,-6340,-13313,23971,-17247,-27044,-3766,1432,-29967,-1907,-5176,26680,12933,-10047,-32385,689,4098,27999,-28180,11367,-18901,-3214,-16400,9849,973,15988,-9935,-24618,1965,22026,-557,-22476,-10909,29702,-13268,-27481,-26401,-16821,17215,832,-448,21631,4805,-27067,-706,29819,15096,-24816,5928,-18463,27942,9246,22401,-4967,27343,13760,1867,-10808,-6113,-11597,-8666,-31889,2333,8746,-10718,29315,4933,16864,20708,29559,-4251,-7432,28775,-1619,-21977,-28629,-22168,25809,-5135,12616,-16767,-5841,-1437,3195,14422,14148,-3581,-5801,-12429,2989,28432,3741,-16685,-8061,-10855,8442,23563,21366,-6824,30335,32161,22060,953,2186,-26894,20118,-23643,32198,1266,26321,20960,-857,-2092,21748,-9916,-32304,-2689,12920,-29486,-31698,1498,11678,-7780,-13761,11666,10435,-19153,10216,23359,16656,-14707,-9823,-11335,-21010,1599,2110,11519,-2586,12251,-7676,4837,24394,-27806,-5067,31314,32742,-11931,27368,-13382,21623,-4946,-3396,25371,-29001,-28939,-1969,15362,111,-4413,6333,25736,24926,-17956,-24087,10334,2881,746,12182,21733,-341,-20250,-32414,23706,-15917,-17065,2987,-13215,21930,-240,-10633,-23076,3494,-20022,4477,-31491,18954,-9912,-18489,23663,-2047,31057,-26625,6205,23136,-11730,13118,28820,29669,30568,-24002,13423,-6687,-15504,-22104,-16434,-30334,-156,-20630,32765,-13543,-19854,-9683,-28110,-21364,-26667,-18496,-29326,22583,-11197,4724,12481,25896,-3990,-4478,-22051,9253,26077,15235,-23263,22981,30938,-27238,-1103,30761,-23926,14470,13962,-15212,1620,30712,-26545,29124,-18719,28428,5575,19633,3419,-1177,-3029,-7252,13009,23591,24075,7671,12045,-4718,9969,3263,25956,-28985,32736,14045,32576,16195,7888,-19667,25288,-13242,-31358,14076,21219,21337,-1330,-15868,-11182,-12927,-2582,30883,8917,-25658,-21617,-30098,-19435,-16748,16449,31848,7982,-8564,32424,31314,-26347,-16476,-15807,14571,2255,11631,-4312,10808,-17818,-24269,-7921,11365,24125,854,-28232,15152,-28082,15264,17390,-20072,1427,-14613,-5983,25386,9686,27270,-11861,814,18753,1258,20348,-13939,-24558,4857,20468,14710,19839,-27509,1814,-10903,-10619,18974,2436,21816,-14182,-18010,353,-11884,29255,19202,-3794,-6419,-22603,27619,697,-15091,1913,-12161,-10630,-4254,-5718,-13451,-11960,24763,-4929,9867,-28735,-30242,-11155,-30828,14099,15181,-5479,-25764,3262,21483,-7243,-14832,6540,30794,-9278,26732,10829,-7237,-14653,-9555,-4137,-6334,-14938,-13951,-18710,26996,3039,-15195,8854,11971,-24646,-3366,-21397,-16260,-10010,2324,-22600,-16620,-8265,27697,-26603,-4986,-8374,14459,19242,24842,2952,-15091,-15471,25867,25403,-11697,-7922,31173,13382,31447,10409,7282,24020,-31598,475,-13154,-26249,992,-31677,-3451,5884,-3881,29143,20422,1351,-13652,-26913,-8173,-3074,6505,2352,27312,-27305,28471,-18311,28213,30881,24404,14818,-7513,-4858,-3972,18395,5076,15979,-30155,-984,32158,-2840,-30641,29076,247,6832,-13767,-29991,-32454,-13287,-15779,23330,-25781,7693,-11484,7229,4062,8570,32252,-18047,-11314,-13457,-30481,-7281,7702,23311,239,-2537,21004,11926,-23105,-27454,-16872,-18537,-14052,8021,4984,-1271,27511,7208,28176,-22670,22568,-26190,28438,-16871,-15541,10225,32677,31675,-20704,1872,-19196,-22250,-13811,-8660,12723,6816,-31441,10530,27628,-26234,18781,17734,-711,8000,-31591,10909,-31347,-2689,-58,4673,7247,-10802,-7695,-4630,3787,-14847,3800,-12656,-10922,-31958,28804,23805,22404,14266,13096,-24311,-27897,-18014,-19087,22376,-23851,31530,4031,-25145,-25744,28008,-27308,-13424,-8243,-25872,4527,-9238,-9255,3480,25701,-29836,8849,-9199,18134,-9557,-978,28610,3617,12437,-22314,-10597,20307,22782,19548,8129,31076,11879,-30332,-9124,17179,-15402,31412,-20028,17260,14941,-3877,31369,-5170,-31393,-22495,-1415,16807,24531,3742,24455,-21340,-22609,-18838,7256,20392,30069,-11328,18048,2041,-5323,4127,-4044,-28211,26550,-8916,-23952,14640,-31855,-22222,23925,22820,22244,-15212,-11252,-25432,10409,-29610,13952,24089,-24161,30522,27521,24584,19251,-31028,8801,-14992,16645,8621,7070,-22508,-19711,32079,-18442,3875,11767,-25545,8688,2885,1978,-12477,-6622,8668,29013,-29307,17859,28199,-10282,1665,-27444,29885,22281,4,6005,4150,17199,-27486,3905,-25092,-30133,1338,-27181,-4077,-463,-20783,24685,19591,-23654,-8342,-8914,-31477,-16348,-1843,23425,-8056,-25039,14474,-3275,-6255,13045,-28010,29623,31393,18085,-13709,-3792,-1951,-30908,-26339,-13491,29270,-7718,895,-14304,-23300,-2242,-23009,6564,813,21339,-22123,-16332,-17017,1003,18143,22143,9404,23185,13671,-10866,17276,2931,19193,-17421,-8689,19094,-16660,-16105,20448,-3277,-16982,3152,-2727,-32680,-25458,14604,13367,-9744,2280,22720,-1672,-5585,16397,3402,-3070,27641,-8030,-31687,-2191,29935,29294,16821,-18650,-27293,25993,-2675,-23537,24457,31805,-13075,28107,-4359,27716,19418,-24637,-10255,-17439,634,-1817,1647,4270,20077,27062,8149,203,15123,-15913,-28362,5635,29711,-18253,-26761,-15438,-21717,14446,-32336,-1726,26419,533,-15961,30852,-1082,19393,22851,32302,-26138,31630,24467,14695,-10492,-15587,-9051,22069,-13896,-31339,29932,1430,15317,7515,5208,13138,32578,-17545,-26790,-27079,-31351,-12757,-12617,-19803,-13388,-18760,-1447,-23724,22516,8376,-9652,25812,-19274,-6652,300,20482,8209,107,-17749,7236,19608,28994,17887,-4672,413,14055,-9543,18878,-15169,18037,-20771,-22515,6164,28885,-27477,13953,-51,-16407,17683,-589,-15276,-26668,-18511,28285,-5929,28054,-1496,-26274,4317,16740,16312,1632,-21997,-29495,-19387,30341,10550,28395,28113,20901,29386,15380,9750,-16331,16886,-24787,-29071,-4343,-27735,11825,28423,17844,11786,-17196,-19783,31803,-24248,18011,-24717,-22021,9886,22353,-16520,22361,-16866,-11497,-28271,-29337,31681,-32227,-21440,-13107,16955,-20658,31941,16603,55,-28788,29413,-29927,22656,12565,3291,-1748,-31136,-29871,-4884,30493,-18905,10318,-31973,4033,8071,-345,-6695,-21036,23828,22703,30483,-19502,-25465,25826,-5002,-13911,3985,13640,2645,24017,29445,-12195,29340,-7771,31465,-6572,-31745,-31488,13445,29029,18305,1397,-15473,15639,16699,13333,-24833,-12557,-22716,-28296,17100,-8366,-26298,-9262,6963,32297,13398,30255,-7129,22827,-27944,20772,29898,30026,19190,-22070,-10748,-21332,18885,6647,26127,25708,18388,9875,-19705,22527,-8945,-186,18068,4717,7011,16786,-29979,21382,27308,-27432,-14052,-30815,-4512,28216,19071,-22064,-8154,-4379,-23390,-21368,-29643,23951,-18340,-20842,-19389,-4112,4055,25593,2450,32034,22206,9841,20372,19185,-93,12788,31109,10526,10441,10950,-27699,6466,-1828,6810,-999,-31439,504,8207,27117,-4178,-30917,-21869,-5945,-18716,-20109,14474,-11452,4493,14568,-14448,5015,19937,-12541,-13534,-14606,19767 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add2.tflite b/tensorflow/lite/micro/integration_tests/seanet/add/add2.tflite new file mode 100644 index 0000000..96bd745 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/add/add2.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add2_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add2_golden_int16.csv new file mode 100644 index 0000000..d4a50a7 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add2_golden_int16.csv @@ -0,0 +1 @@ +32596,3586,-14060,-10985,-32768,22719,-11223,32767,-8068,11948,9491,-21873,-29286,-7480,32767,30501,-4339,32767,32767,16240,32767,-15180,-15145,-7271,15641,-2243,2749,-16732,-18019,-16476,32767,16719,-4599,32767,32767,32767,-1961,29,14812,-3485,-30334,-25990,18716,23037,-32768,-20133,-15756,-14344,17183,23346,-19950,-32768,-17899,-18428,3259,2824,-4312,-17937,32767,-17373,-3041,5927,-1410,-23113,-132,-29919,-17540,-4507,-8546,3312,22571,-12920,4634,4288,32767,6829,-11226,-17945,32767,16298,2459,-32650,-31865,4357,-19403,12837,-32768,-6389,-32768,-530,-17615,4547,-21413,19943,8460,15887,-32768,-30257,27602,20941,-8579,-20093,20324,-6019,32767,-6506,-19591,-12917,-8326,26912,4328,16707,12690,708,22299,-5106,32767,9936,-32768,-32768,-2809,26494,-23234,-2599,-26906,11132,-6795,22234,19552,32767,32767,-8409,-23482,-15024,438,-693,8637,-32768,32767,32767,-26366,26405,-5648,29210,22843,-32768,-17172,-22767,278,-565,32767,24649,-21428,-7361,25646,22170,16973,-7870,-24899,1289,27198,-1377,-14574,32767,-11689,-7829,3613,-29590,16935,11350,-10723,-9910,5771,15931,5875,-10660,32767,12007,-8897,-392,28197,14114,-29138,-25837,-9113,2692,-12588,16087,7768,-32768,24281,11783,13574,823,1017,-10864,2592,4862,-26343,-9213,13022,4986,-10440,-7758,1006,31710,24334,-12584,18111,7617,13932,-14083,-10415,-688,10239,22323,-11293,6331,13901,-15615,-17007,11102,13645,-7104,-19246,3308,32767,6530,-12023,-16552,-32768,2714,28024,17113,-32768,-1947,131,-8909,6315,2563,-4615,9584,-21847,23178,-5693,-43,-9013,14235,-18734,18568,21729,22278,2153,32767,24006,9948,13963,15580,-32768,16031,16116,-23955,9197,-25002,-32768,30131,-32768,25685,-5957,17956,-9285,-10990,28697,-32768,-21144,20295,20568,-20932,-2650,-17910,9647,18615,-21106,-2291,-30345,2616,-7823,-14064,32767,-8614,-26759,32282,-12371,-25638,22163,-32340,4311,-140,24634,-9368,27401,1429,31743,16420,28989,6845,-19907,-29214,-5308,-32768,2810,21399,-2282,-6635,25999,-9954,32767,10058,13471,21030,16390,23943,-32768,-7323,13954,9967,9082,24781,11828,14821,-23398,8458,-25112,-16789,-32768,32767,-14919,-26255,2952,11842,2984,9414,-23529,-2815,-32768,-7180,-14513,-25023,-8861,11653,16039,26542,14595,22646,32767,17694,3727,-15066,12888,19521,7743,7648,-27165,28036,-31040,-4520,4104,-32768,-8907,-13769,-31500,-2545,-32768,22147,27375,18616,-7553,-6617,32767,13235,15902,-27492,-5824,-15286,-7986,-22,-1798,32767,-4678,-24865,32767,-8130,19019,-25099,4327,11149,13784,-2914,9890,-2416,-31392,-11711,-22069,28507,-6916,5869,-25773,-2350,-15270,31191,21946,-7340,-32768,-10817,-30968,-32768,-14955,25987,21338,20023,-23644,-15234,-32768,30005,17697,6630,-5621,-14787,-15656,-29784,-26412,-32768,-9167,9294,12750,7401,32767,-11073,12390,-7126,5945,17298,-12329,-11672,30268,-10688,-8663,-6979,-9397,20835,-31998,-6787,24296,3197,-12656,-6167,1828,-6773,-6031,-4711,4345,11224,8870,6945,17819,1176,31561,-11451,-29113,-23240,4318,-22958,32767,-19217,-5847,14024,28186,32767,-7990,-7846,-32768,28675,-21262,6624,-11063,-28865,-2771,459,-846,25374,5877,32767,32767,24925,-6135,-9776,-4684,32767,31677,10646,-32768,22526,32767,32767,14729,-6407,-32768,7449,15331,8263,-32768,32767,-27996,5041,17102,-5188,16115,-9607,-11629,-18901,24862,-2275,16268,13918,-8645,-26681,-1522,5374,30276,5546,2142,28606,6093,1656,1394,-28611,-32127,32767,-26011,5793,4914,29974,-13255,-29837,-9177,13881,-9476,-6288,-31214,31649,-19062,-8950,4313,6653,14160,15408,-28801,7249,6479,2740,12540,-26527,3319,-23219,16728,-878,11989,-750,50,-7701,-22017,10942,24430,-32768,32767,-32768,-16648,-3450,178,-26680,-30275,20823,-27063,-1559,-16341,32767,15758,-26428,-10421,23067,-23110,23774,18156,21775,-3374,-3790,4553,932,4250,26153,-17984,8134,-28348,-31413,-10587,7657,25922,244,32767,-29815,-17481,-8126,-29743,-12590,30122,-975,-31616,5546,14742,6145,-7301,6203,-32768,-29233,-32768,3881,-7084,13664,-9042,-26243,-32768,15683,3071,-5988,2823,17295,-5058,16017,-5911,32767 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add2_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add2_input0_int16.csv new file mode 100644 index 0000000..530f3c2 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add2_input0_int16.csv @@ -0,0 +1 @@ +12959,11429,1590,960,-28940,27799,1258,31911,5184,12580,4490,-31807,-28806,216,28596,31667,-24033,31940,31922,-5899,17694,-5319,-17918,-13930,16617,22683,-7512,-11586,-24259,-438,21992,-277,-20280,32583,27055,32694,2486,24595,11324,-19528,-31995,-4928,16515,6572,-24636,-32078,506,-3919,-3336,15741,-20680,-17438,-31275,-18682,-15305,-14199,20496,-23752,32260,-12085,12743,-4147,19465,-13159,11786,-10952,-23010,-16400,-2542,-3254,20534,-31207,26877,11052,32177,-2224,573,-25881,20800,-2277,13424,-16863,-22897,4461,-31660,-2942,-30609,3085,-30254,4071,-30361,-8057,-32347,2621,7917,6130,-21828,-21146,31811,19143,-13874,-31102,7995,-27160,32123,-20060,434,-12673,-12490,23321,-10741,19603,15599,14166,18172,17564,19459,-8097,-24622,-16451,-8448,29469,-19117,-22409,-14027,6408,-30007,11550,21007,26722,19041,-11679,-7848,-16721,11911,16421,21253,-28031,24833,20080,-18484,14748,-26124,11463,18441,-29862,-11365,-15996,-18302,11671,31801,6292,-23206,-27332,27189,9799,7219,15903,-25323,24637,30661,-22170,8532,13036,5243,15404,22201,-26309,26414,32664,6216,-8764,-3594,31115,21164,12745,24857,16154,-14389,-9605,28272,-6759,-17365,-25196,-28522,-8359,8722,29856,-416,-22269,17417,8194,27635,-23837,3470,-2020,27054,-3662,-32121,-26325,25826,-13540,-20204,-9172,14383,29431,12212,-14202,27025,27383,-1328,-25195,-17737,2243,32109,15647,-27339,2594,24235,-7636,-13067,23306,7344,4690,-26077,28104,21608,18899,-19928,2665,-30321,-13685,22464,19978,-26900,3994,-18538,-12420,-12552,-375,-3642,-3879,-25480,2789,-15478,-6089,-10227,27629,-19820,7928,20376,729,23723,28731,2890,18166,-8935,20909,-30453,30118,24084,-16042,16288,-22340,-22703,12995,-20034,4356,-17936,31751,-21131,-27020,31170,-29481,-577,-1894,27149,-11833,-1218,-29330,-12317,14321,-23785,-492,-24110,1992,-31938,-29016,31305,5323,-32647,14805,6625,-29354,27244,-18217,11168,22194,15182,12507,14707,-13924,20908,31644,8672,12996,-20694,-23675,-15644,-31536,4111,18138,-17107,1484,12517,4835,16872,7970,8396,3703,28238,16586,-31178,-14326,9700,14712,9115,25212,-1635,-1742,-15330,-1970,-19559,-30888,-16269,28497,-12163,-9077,5351,25638,8282,2177,-3936,10516,-28369,-28135,2948,-23713,6364,13248,-194,27564,17169,13692,32393,3777,17787,-9981,9174,20366,-6345,32264,-29378,9687,-27119,8374,4346,-31727,-28324,-19559,-11479,21103,-30389,5521,16612,-1361,13110,16349,20384,6692,25088,-29047,18573,-19096,-31759,-1828,-12896,24631,-14388,-23888,32672,-826,27901,-6239,29170,30268,25218,-25442,2352,19194,-19036,-26336,-9794,13253,-22145,5855,-24085,-6448,4498,16997,10668,-11868,-16227,-24664,-15401,-26731,-5348,30438,19462,31534,-24248,-19280,-16596,9823,5448,6822,-29876,-30808,-19984,-19917,-15472,-18224,13790,6745,-6295,-10951,29946,-3155,31977,-14934,1183,11484,125,-28207,9655,-7019,-26829,-8709,6154,31253,-27102,8498,28104,25907,-22567,-23221,25623,989,-26867,-14594,-13309,11436,-13375,32257,8529,-23876,31893,10480,-21476,-15027,17575,-24755,25925,-25420,-24672,15781,19213,22573,-2937,-20402,-29362,11022,-1606,23350,-32131,-10194,4401,-18019,-12166,31165,-6819,24419,19075,22420,14599,-10270,-12957,22086,14597,24547,-27544,9428,31336,22475,10699,-30,-27858,13221,-958,13903,-28811,22525,-29046,1025,5203,-20367,8248,-354,-17345,-5316,14779,-5031,32343,32519,-17722,-25939,-17109,14798,25244,-16806,7945,9264,4372,19933,3714,-21746,-16651,24206,-13181,20946,9143,26436,-29182,-17894,-11966,194,-113,-6276,-26044,26934,-28458,4100,27389,-3626,-932,6869,-18680,-12945,-15063,11993,31443,-31396,-9682,-28188,9276,-20283,-998,13509,17615,-27284,-15805,7882,17540,-27488,21223,-31661,-20280,-10985,-5614,-22210,-29593,6603,-29195,-11161,-4528,30443,26763,-12580,12465,13332,-25020,9446,24536,4333,14216,-15624,-12131,15113,16869,20325,-11296,18105,-28616,-21300,4898,22392,4965,659,22095,-13451,-14109,3002,-17056,-13437,10188,-24415,-24208,-18783,21402,22455,-13564,15371,-24976,-12687,-24013,-15551,7743,22611,12372,-24135,-21140,391,5114,11425,-2396,18940,-22092,27940,-30005,23748 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add2_input1_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add2_input1_int16.csv new file mode 100644 index 0000000..d059bd3 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add2_input1_int16.csv @@ -0,0 +1 @@ +31985,-9316,-23008,-17615,-24810,-1786,-18352,29602,-18684,1677,8393,8190,-6715,-11433,20045,4856,24365,13249,24855,31789,27306,-15814,403,7030,2006,-32449,13738,-10087,4254,-24010,32421,25290,19162,14847,26207,13258,-6114,-31514,7561,19859,-4187,-32437,6722,25924,-21257,11133,-24147,-16364,29906,14620,-3218,-29076,13434,-3512,24498,22430,-32729,3726,24032,-10404,-20886,14161,-27078,-17586,-15320,-30568,3365,14321,-9484,9115,7315,20773,-27574,-7785,24832,13038,-17477,6445,28920,27228,-13557,-27056,-18144,774,11685,22919,-15759,-13487,-31461,-6014,12686,17119,9569,26380,2459,15828,-31407,-17992,348,6669,5007,9941,20052,25872,15777,16036,-29774,-3004,3608,10212,20237,-236,-1090,-17120,9939,-30151,26487,25207,-31826,-28269,6651,1701,-10121,24877,-22129,8380,28368,18340,2206,17887,28815,2444,-24951,-951,-14629,-22103,-14388,-14826,21715,26779,-15605,20457,25096,28854,10406,-31999,-11028,-13430,23897,-15818,28098,28687,-2181,24091,3361,20491,16050,-32142,-4642,-29689,1221,26393,-32682,32359,-24160,-31441,-23098,-10372,-8636,-24984,-23968,-3535,13218,-16164,-18393,-32251,21731,-2820,5193,11739,5776,29722,-21174,-6203,23006,14740,-29964,-14316,12118,-23139,13864,7059,-15215,31813,-2936,-13611,-30847,11949,1927,20038,-13717,24809,10354,198,-16955,9529,20622,-545,-7666,-23775,22482,11325,7226,-3904,-25929,13215,18237,6114,-10364,-13490,-8598,-13346,10926,-16612,4756,-31126,28165,-14510,7638,-28105,-32317,21607,12971,-112,-25252,-8028,23981,2649,25523,4303,-2209,19271,112,30988,11369,7749,-320,-14221,-2509,17520,6261,32290,-27228,26516,32094,-8473,32289,-3593,-30599,-14736,-6867,-15143,-7183,-8623,-25456,28263,-27242,32716,14129,-13960,13265,18279,2804,-30847,-30793,32698,-4160,-16034,-2390,10922,30191,9387,-958,-2785,-14320,1345,29313,16256,9260,-19676,1982,29148,-26950,-572,-1903,-24857,-7901,-28685,17259,-30019,21995,19997,20513,-16114,32106,-6467,-3137,-13192,12157,-25566,-1084,8641,18547,-11799,22713,-21049,29374,4774,9318,26612,-11789,14427,-29544,7460,8365,-4013,1849,4609,19737,24338,-15225,15141,-12355,14593,-31199,32117,-6643,-27509,-2464,-15235,-6176,11247,-30040,-17691,-26396,25392,-25427,-6892,-21381,381,24169,4216,-263,16205,19208,21542,-17264,-9663,7450,2981,19689,-29991,-2818,29383,-11496,-17486,545,-22396,23058,4562,-32249,-30873,-12401,25946,19512,29510,-28085,-30846,31154,11152,-8474,-3731,-32517,1705,28839,2313,13866,32649,11484,-6432,28590,-11065,-7435,-29426,-30974,-22210,-11800,28299,11732,-28231,-22392,16326,-20346,25509,18100,1240,-7534,4768,-28544,24709,19041,4281,-29780,15514,-26424,-20718,-15442,-298,6851,-10599,-4149,2018,-27728,32145,19402,1134,29951,17477,2292,-18864,-19538,-28283,-31365,5207,27092,25088,15555,-12466,-22551,8534,7348,11062,-18547,18785,32753,-6933,21505,766,-21910,-9028,-12946,-21026,175,-28473,10081,20598,-30151,-11370,25479,11700,23557,2065,30390,-31031,15631,32389,6147,-30524,-15863,-15379,-16110,-2476,26845,3956,22936,666,17384,25961,-8147,14477,-11424,28623,-29649,-20082,24728,-29968,-9780,23804,14349,-2146,17514,26261,25921,8406,-27882,-1402,9639,28446,28512,-15619,-30263,21497,17083,28710,8239,-9517,-19387,-5855,24094,-5516,-17918,27322,-4483,6203,18829,18395,13450,-13874,4912,-21368,18115,3062,-17237,-20968,9846,-6509,19682,-10973,12763,29834,-6999,30776,3477,-23106,-2687,-14767,-26549,32622,-21879,-18236,-4403,10783,17675,-21539,1667,20453,-13987,-1325,-13134,12641,8086,-18609,-28710,14575,22314,14165,-18984,27421,28990,-11302,-21643,723,17373,1539,13045,24715,19161,-18452,-22527,23523,-12556,6205,13928,-25383,28083,-22982,1193,8949,7468,-11292,-7181,22583,-2901,11995,-18560,14875,-10838,-23273,-31535,17295,-2362,23335,-4405,26915,-23273,14394,22356,-18002,-15306,12925,-12327,-11100,-5560,-19518,-22074,-17311,32289,-482,24238,-27206,-7967,-15971,-22473,-1535,31851,29873,-16090,32372,-5475,-19648,6515,-10471,-25701,-27319,-24575,25741,-20500,-8634,-29360,-8171,-27359,22888,-1981,-23590,7284,1492,20803,-11962,29683,19254 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add3.tflite b/tensorflow/lite/micro/integration_tests/seanet/add/add3.tflite new file mode 100644 index 0000000..fc64809 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/add/add3.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add3_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add3_golden_int16.csv new file mode 100644 index 0000000..da0cdae --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add3_golden_int16.csv @@ -0,0 +1 @@ +-21144,23517,-17492,-3901,-15455,-5094,-17612,1483,32767,24834,10926,20410,-30187,21521,1328,-14379,1301,32767,16793,27255,-16060,20885,-27860,-18626,16214,27531,14585,-24507,7868,9961,-13266,-2625,174,10492,16392,-1773,32767,-3426,5324,-15524,14630,294,15552,6738,-4929,28956,6300,14763,-8242,-178,-12028,-5676,11572,-4937,7811,-27892,17422,15814,6241,6934,31393,14227,13931,-27746,18131,14423,-10276,1552,17717,19453,26030,-15190,16397,20705,28202,-7544,-25632,-8456,32767,-31283,-20732,-16259,27877,1061,32252,-24973,-16985,-2436,-10356,-24756,-5852,-4167,24631,10577,6408,2203,1612,523,8882,14715,3311,-9878,-26643,6106,-28825,-18195,-15532,25028,-21512,-1638,2288,-12281,19409,-2221,402,24228,-18533,4882,-20802,9886,21565,1238,23857,-32768,-21491,13457,10977,-32246,7896,22398,-30305,6746,8928,29742,9806,-15110,-10518,32767,-15457,7009,-17754,-32768,-1316,24529,-14099,-10368,28757,-10348,-5916,-44,11065,6218,28143,3420,-8883,-8323,-16779,15245,-2122,26393,-21981,7265,25496,-25695,-17114,10439,13434,20223,-22767,-14418,31808,29483,-9074,19202,18922,3147,-14520,1900,31172,8933,-28900,-5692,5724,-2794,-24698,-5408,-25907,-1775,-404,-5874,22866,-11287,-3506,6232,26966,-17483,826,-27151,-13120,16720,27299,-13621,20762,-8100,-32768,17450,-6622,-9517,-23035,11379,4403,-20694,-28186,1774,-4210,-26086,28383,-10847,-11463,-5489,-26701,-18252,4084,12468,1346,19747,-4751,-28890,-32768,-28024,-3769,-2844,30002,4629,2741,1607,-3631,16202,-30681,10014,-2167,15779,9192,23931,10896,14010,26211,-16383,16924,-12675,11401,-3113,1061,-409,-12202,6255,-1942,5872,7792,-2943,10559,20641,12479,14398,-8289,487,-16571,6731,10990,1828,6531,18012,-25156,-23055,9235,28458,8946,-15046,-6032,14650,-28792,89,-23704,693,15384,-25453,-13022,-12751,-7110,9654,-11973,-32768,-7048,30181,4965,3210,-442,28934,-11749,-3657,-16697,12244,20053,-28755,21327,5254,-11079,-3105,-9251,-4803,15090,-22342,405,6383,-18413,27155,2598,-814,5921,-32768,12370,32403,-4392,2224,6666,-7959,9711,11321,-22294,16520,3726,-16921,11286,-25866,-24798,19403,-9743,-1466,-13878,-24425,-9223,-15853,-18409,-8076,-11454,-31989,3385,-10442,-15444,18334,22506,1041,24405,27430,28705,-5375,16504,15833,8885,-736,12276,-1368,-17336,-32768,21881,-24233,-2149,-7344,-4250,3333,-15399,25172,-7690,-9902,-16031,-20438,-17705,-23677,10986,31059,9491,-12735,8966,-20189,-2245,-23808,-15979,-14891,11341,4780,11261,-6497,19498,-3940,-18133,12125,12886,669,-14140,-1865,9748,-7158,-1994,-4830,-20263,11772,20956,20871,22537,-6359,-10738,-13387,-7546,-5783,4149,18444,12334,-13487,-27460,-26864,11936,-15863,-20918,12547,5277,18812,2958,3432,19648,19380,-18534,-4422,-6997,-5497,-22872,21695,-19435,-5548,17440,19940,-18738,16305,-6574,-416,25467,4596,-32768,-16437,-4137,18244,5222,-10776,-8477,-278,28430,18109,3160,291,11429,21532,27475,32767,-14309,28910,-25402,6836,-15094,4176,-400,-19622,-10980,2433,326,25728,8458,-23990,8929,-14581,4733,-21515,22217,336,-16737,-1711,-5101,14727,21849,-25332,-13165,6827,-6162,25511,22107,3792,-18881,-11236,-1476,32767,1856,11614,1531,5339,-32768,32767,-15569,-28212,26329,8623,14245,-21754,19241,-11106,6392,26017,-10492,4852,-559,6958,30771,21828,21035,-12135,-9340,-17343,20883,15554,-16871,30603,19141,-10270,-21000,-15749,-32768,-3773,3952,12343,785,-22813,-4919,-14058,-10196,-16774,10024,26516,14032,-17556,19380,5484,9868,-6631,-3208,12644,-3072,8798,-10538,12804,-2444,-6237,9418,-32768,-13641,-14481,-5381,-17263,-16943,-13277,32767,-14483,5476,-22095,-9801,27177,6113,11517,-11310,27679,32767,2344,5118,-6456,12250,-14056,-7258,28595,-32768,-16222,28418,7206,-24425,17161,-12452,-10651,9013,26592,-23152,16765,19589,23233,17942,-19146,-32768,-1348,10700,-12739,29184,21305,-26778,1509,8867,12699,-9092,12330,-24708,-9755,30465,-18794,-32080,7700,-26162,11341,304,-12255,-10029,-3987,9797,-20903,1976,14234,17768,-19206,28457,4306,12796,-29681,-7769,27725,29644,17711,-32688,-12024 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add3_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add3_input0_int16.csv new file mode 100644 index 0000000..84a2cbd --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add3_input0_int16.csv @@ -0,0 +1 @@ +-13987,10295,17236,-28201,-30131,11396,14202,25509,26407,-868,173,14825,-21329,3913,14155,2084,18368,15759,29024,18082,14776,13577,-21483,-782,-18769,19367,-8622,-26359,2537,26552,-27746,8898,28473,-12965,11109,24390,27992,-6525,28597,-16255,10263,-1325,-4946,-9507,18705,26657,-22252,5790,6228,9519,-3448,-23296,32042,3413,-18578,-5647,-8269,25230,-8407,-18669,12407,16555,22569,-28683,1547,15640,26011,1169,-4573,13238,19265,-11183,17742,29686,5887,2163,-2776,-17074,30768,-17993,-29763,-17450,3891,-16259,31377,-24005,-5014,-25226,24942,-9150,-21687,-1387,26140,-1973,-16323,-7702,949,-14799,-16851,9847,-28268,2249,-17176,23165,-25385,-1690,9130,27895,-29137,12722,-22576,22145,-2093,30860,-24814,9527,-2914,-22804,-27858,-8287,17223,11474,23023,-16345,2783,-16997,-27876,-31158,31617,24058,-26780,-10743,-15921,10172,-24476,-28779,27485,30823,-10945,11569,13458,-24011,-16244,-2337,14467,-17251,29336,13752,2696,10859,-9686,32688,10654,-7468,18164,26622,-12348,-2490,1189,6049,-31190,-22246,23408,-24077,-1918,-14516,27116,32629,-28741,-14094,26096,9446,-9651,11766,13283,-24180,-6731,-21339,24185,-18376,-12119,25843,4897,28518,-7908,-22711,-7354,-27376,11274,-22588,-6625,-14990,24197,14931,19055,12333,21720,-14977,16249,-17164,18092,-10876,5073,-27834,-19833,20695,-7887,-805,2548,-20890,-11331,-30988,-22135,-19678,7806,-18067,9430,-13638,-23929,28764,-29884,-18668,-31499,-15214,22294,28578,11298,-22012,-26422,-15611,4780,-13283,16711,-7050,31000,8269,30248,3846,-16548,21722,9231,-11852,-12257,23812,19519,12078,5529,1250,19394,380,-1082,-22979,27797,-13144,28039,-2262,12785,2184,-7094,24136,-31268,23609,1278,3025,-22196,13952,-9997,-6471,13229,7890,19176,20053,-22328,3102,6142,28765,-27908,12471,8650,25388,-7586,-25592,-25261,21188,27609,-29989,12759,-23642,30370,-24038,-28703,-23268,-792,31120,-23743,12778,30188,12989,23746,-10235,12139,22063,8300,-25983,19309,-6315,-17739,10738,31460,-10080,-1606,5434,-12003,-4434,4099,11695,553,-21307,20987,-30946,-15324,30448,1910,-12410,7464,13919,11015,-9057,-1052,1512,-5235,-22387,29684,-20670,-16400,26188,22084,30915,-30587,-24064,-29912,-20073,-1278,10094,-15117,-30208,21995,-4059,12796,24111,2497,-28180,32565,8302,28929,-310,-7536,-20299,20826,14991,-21969,6882,-8235,-27977,-5548,-15234,10920,31705,-19547,-30474,-10139,20831,-21776,20090,15456,-24687,-24539,-7603,-21756,11141,-7449,-22158,31309,-12354,-30573,-24261,-15134,16157,9266,-13458,2605,7718,9302,-31421,-15036,-26314,19271,-17657,-5685,28800,-31838,-5283,27879,-23945,10344,-21313,5422,11456,28965,-7710,-27110,22450,30985,793,9230,12775,8709,-8945,-23442,-3346,-4427,-7591,-27929,-12964,-2280,8478,-1744,20559,1957,-6354,-7973,23677,21053,-30537,2309,32379,-15125,20293,-12753,30951,9462,-11588,-8796,2799,30503,-7809,-24564,-30468,21080,21640,32654,13468,7838,-15608,17185,18670,12280,-10680,24958,19874,30216,18579,-27837,31694,-15794,23199,9584,-11605,26912,10648,2138,-9086,32577,5004,10200,-5363,-11617,-23319,32374,-18987,-5943,-7715,-1752,-26201,20384,-17665,25096,-13087,25555,15767,-5697,26822,-3731,-26507,12901,19212,27144,31851,-21241,24511,5653,30270,-31887,22576,-12812,-8674,8809,8852,24367,-26406,30921,15012,7749,31526,-15139,1591,16603,-11630,21717,7027,-2887,-27447,17680,1245,-9464,24394,-25397,13260,-2347,-18863,-13354,-14712,-22547,-14443,-24421,101,-23279,-30925,-19459,-2571,-25472,-8420,-26782,19048,17613,1048,14566,6062,-30907,31989,-11545,-14676,-29657,11302,-18103,9428,-3283,-365,10095,-31934,-7781,18809,13872,-1423,-19647,-32359,23159,19585,4212,-1892,-24374,20037,1662,-21408,-26575,23084,27518,23417,-9469,9704,9534,-6903,-9290,21652,-16900,4768,17275,-7328,-30916,-18423,14448,26416,-4174,24949,2253,17432,7708,4999,-12117,-16485,-32261,30629,13467,-32036,29182,3123,-20843,27122,-5782,27741,17814,24455,-1268,1363,8370,14645,-20349,25462,-4373,14828,14047,-23201,-11580,23184,28504,-3240,27160,27087,-15236,8777,28477,-26442,12420,-27150,-21477,13598,18568,6894,-15943,-25424 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add3_input1_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add3_input1_int16.csv new file mode 100644 index 0000000..0fb83a1 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add3_input1_int16.csv @@ -0,0 +1 @@ +-17395,22630,-32359,12771,-299,-13401,-30614,-14079,27985,31219,13389,15966,-23987,24145,-7188,-19062,-9849,31698,2642,22390,-29055,17331,-21017,-22521,31736,21930,23395,-13833,8137,-4256,918,-8793,-17544,21047,13321,-17403,27371,-162,-11259,-9038,11671,1189,22296,14253,-17756,19143,21661,14625,-14066,-6157,-12708,7518,-5690,-8228,21237,-30933,26679,3799,12953,20210,31042,7249,3133,-16385,21433,8062,-28917,1188,24738,15774,20139,-11790,9189,7061,31166,-10668,-29932,204,21350,-27422,-7047,-9201,32010,11452,20271,-15877,-17854,12725,-28350,-24874,6297,-4282,14123,14296,18097,7525,1400,9876,21482,12036,21721,-13605,-22200,-6906,-19775,-21423,-24882,13519,-8401,-9958,16907,-28983,25282,-21991,15974,23987,-21077,20254,-8321,17381,15897,-5627,15111,-31374,-28284,27225,30947,-20400,-9966,12663,-20733,15034,20959,30396,27379,-716,-30136,25253,-12268,1443,-30326,-32282,8506,31758,-26440,-2048,17227,-21360,-8990,-6827,19710,-12707,28120,8883,-22302,-26886,-13026,20385,-3363,28831,-7700,22850,16896,-16724,-19945,21949,-317,4630,-10198,-9020,23016,30529,-5190,16382,15090,18969,-13739,15657,23423,22497,-28142,-23150,4017,-21239,-25578,7485,-27416,14882,-7531,6832,32379,-4594,-19423,-1614,21427,-29289,-12527,-24199,-26342,31360,22439,-10043,22483,7355,-31210,8649,-3261,-11255,-30045,27086,12506,-6236,-21012,14465,-10069,-20956,29180,-4893,764,-24721,-14345,-10904,24692,24891,-12242,6569,-12916,-21959,-29514,-24882,-7637,4772,26639,10116,-15949,-3172,-23351,17616,-27579,-1178,-8434,26884,19000,14710,1286,9773,28930,-21018,8810,-15894,14759,10487,-16027,7693,-32562,9138,-10373,5891,14050,-18689,32546,10773,14618,15899,3605,-8100,-14235,12351,5325,-2663,-3893,9743,-17149,-30415,7577,17213,28458,-26365,-12847,2262,-30836,16072,-13526,-12359,1784,-12738,-24044,-1006,-27725,26918,3112,-27066,-8213,17873,20942,-4004,-19375,27641,-29325,1866,-28197,1364,19595,-19316,14302,10429,-2622,-10533,-31050,354,19642,-30989,7987,10650,-25302,26251,2864,12284,-5776,-26241,24839,21037,-6617,10488,3579,-18513,5126,19634,-26884,19464,7868,-6940,-4573,-19060,-20404,7635,-25810,-21093,1934,-15164,7263,-7064,-21944,-16272,-4720,-20675,-9537,-10367,-27059,7610,26244,18862,9836,28707,17416,-6446,25088,32220,-2014,-10259,28867,-5982,-16279,-23563,30490,-20434,-9466,-28847,6942,23125,-12699,18103,4083,-24762,-29443,-9850,-6566,-24506,27141,31419,16371,-1911,-8452,-17234,16295,-14278,-10300,-28472,8230,14299,12286,-12840,18284,14731,-13022,31390,3899,11839,-13921,-20267,31900,-5547,-19852,8968,-31483,27835,22505,18637,9774,-3046,3644,-30539,-28648,-7639,-632,14816,9805,-11081,-19300,-31099,17506,-14861,-8421,23585,7941,17951,4742,-8583,23051,27903,-17922,-20230,-21775,12256,-29694,6605,-14575,-19511,29498,5328,-29049,27370,-2635,-2260,12435,10548,-25653,-1301,-18258,9040,-13916,-21712,-15360,9392,24401,10726,-3756,7021,-1448,14203,15094,31290,-314,15945,-21528,-6025,-24624,12397,-17280,-30881,-14897,8672,-19916,28661,4086,-26290,18276,-3468,-14345,-14735,31152,5227,-19583,14228,-19015,29210,11338,-23130,-32202,-1401,-4059,14785,29636,21217,-31371,-25863,-18754,30301,15541,-941,-1634,-12284,-25788,29176,-11241,-29440,27030,5131,2399,-10403,4483,-23082,3063,12476,-3519,5001,-11046,15849,24466,22582,27785,2129,-22565,-22201,31700,3999,-5001,29534,25109,-921,-17613,-10279,-29640,4347,20114,15184,15489,-8893,6060,-15763,3292,-15470,29087,20875,6348,-22341,14855,2993,31467,-28144,3238,24773,14703,3819,-1726,9936,-971,-7477,5338,-25032,-11998,-29620,-15300,-20438,-8676,3781,31653,-30107,4137,-26114,3095,21075,6515,27580,2604,19794,25153,-11710,12228,-14028,9186,-13058,-3172,21819,-30131,-23013,24331,13472,-10890,32690,-24394,-29634,13737,17288,-30005,9838,19391,25582,29722,-13369,-22447,-20769,4818,4245,17850,24371,-20079,-15052,14560,-1615,-22342,-22,-29731,-12900,32413,-32351,-26937,-6369,-29591,4761,-8386,-668,-5166,-19386,-5676,-23801,-14499,689,31452,-29200,17392,21811,8061,-19732,3798,25768,25038,17579,-30436,1004 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add4.tflite b/tensorflow/lite/micro/integration_tests/seanet/add/add4.tflite new file mode 100644 index 0000000..6322007 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/add/add4.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add4_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add4_golden_int16.csv new file mode 100644 index 0000000..1ba91a3 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add4_golden_int16.csv @@ -0,0 +1 @@ +4760,-8681,-23717,5090,6228,3500,-22985,-32768,19658,-22715,-32768,17954,32767,32767,11817,11010,-18700,32767,-32768,-28640,-6586,15487,6767,-16129,-694,10350,-32768,-25237,-29536,-9736,32767,32767,-2773,-21195,16413,-25161,31045,-9139,-15913,7511,17306,-2092,-2426,-31118,4588,23870,3390,10044,-352,-31524,-13169,-7483,11401,-12467,23226,18342,15925,282,-3931,2082,-32768,16916,19554,-27720,7974,4170,-18147,-23254,-29093,-32768,-16847,-1073,-29010,-18280,-23512,22874,-32768,-32768,1035,-13475,-19779,25542,-32768,-6321,-13284,32767,-12635,-16317,-32768,25100,-7324,-32768,4575,-14997,-32768,2418,-32768,9385,-4966,2881,32767,23848,-21510,-2689,-18682,19007,-11176,-15333,-3216,8063,10413,18166,-32768,20333,-9316,-32768,-26843,-17940,-6170,15206,5871,-31371,-32768,-7633,15306,644,-16811,-2421,12272,-14343,-30367,-9087,25571,-24194,-3277,8684,-32768,-368,-14269,20558,32767,16680,-18848,29748,-32768,-16669,371,-18193,27036,-32768,18354,6966,-11409,-8229,-9839,32767,1470,-32768,31347,-19560,22749,3322,-4740,-6489,-26755,32767,29463,-20668,-12501,-24150,-26971,32767,-20401,-14446,28983,8103,-1364,-6542,-32768,-3564,-8310,32767,28289,-22564,5257,-23305,18151,-17449,-6405,-30352,-32768,-27761,-10986,16097,-22454,7735,25469,-26668,32767,-10337,15446,-4356,-2611,-32768,-19664,2269,7077,14486,-5871,24413,-3949,-20776,26784,12855,20671,32767,-28195,29501,13017,-20755,3419,21540,13714,-32768,32767,18349,-27846,31786,-21947,-3820,-8445,-24714,-7808,20323,23777,32767,-15931,14989,-32768,-20660,-10579,-7827,21383,-22407,-5399,12610,-16556,23918,20888,20368,-10633,-19991,-6157,5605,10161,-2768,-829,30993,-12830,14854,-7369,18558,6919,-2348,430,9124,-6831,11451,32767,5430,-11317,-25549,2041,-16936,32767,4273,-1126,-6284,-32768,28950,-19536,21608,29294,-27474,-31118,5081,12152,32767,19204,13085,32767,-32768,3522,19158,-32768,32767,5698,24423,-178,-22168,-15977,-20612,32767,-26671,1974,-14392,32767,29224,13305,4743,3095,-32768,-24620,12897,-53,-13133,28485,32767,-4472,-25256,7255,-22873,25871,32767,26346,6008,-15472,9083,-28260,-6476,32767,7861,25427,29470,32767,-32768,589,-30545,-21727,-19810,2930,-3453,-10112,32767,23580,-32768,-20845,-32768,32767,10588,-25199,-22852,-28026,-32768,-26204,-20701,32767,27342,32767,-11151,-15554,10365,-5599,32767,32767,-25726,-1321,14050,-15456,-3373,880,32767,25389,-30411,-6586,21326,32767,-21198,-26463,-467,13466,-10,125,-13780,20939,-312,-3643,-15024,16025,6839,-15568,9592,21336,32767,-32768,16014,26588,1230,29107,25065,32767,6625,11967,7167,-4275,-18455,-28791,24194,-29882,-9880,17191,32767,32767,-10435,-32768,8576,25954,6355,-5389,20789,32767,10389,-1736,-5425,-12029,-32768,32767,10561,-18425,-24143,5203,6795,4922,-6760,-32768,32767,2812,31213,7612,-9820,9316,-5512,-18159,32767,-20108,10313,-32768,10208,-9344,7839,-2043,-6857,-8556,-19716,18478,16824,-32768,10488,-1186,17660,32767,32767,15990,-97,-9812,13520,28445,6140,-32768,-25516,14100,1426,8485,-25717,8390,3706,-26360,-1428,-13763,-10963,32767,-566,-5718,-32768,-8003,129,5681,9486,-13509,-13418,-6484,-32768,-1673,3761,13450,20132,-32768,19772,32767,-8412,-4540,32767,-13374,5572,-20038,-6704,27026,7796,-14555,28181,14033,564,4920,-4505,15482,31235,25659,-32768,-8730,32767,10922,-11357,-32768,9,-6563,7806,-528,13929,-32768,-8701,-8234,-1937,2785,16979,353,-8935,28043,25283,-32768,15014,-16959,-10665,-16490,-10385,-76,-21043,-3475,12707,22447,-13787,19798,-23647,-29628,18665,11725,-10587,25949,-25224,-3076,-24428,-32086,576,16487,-30468,2927,-32768,-5902,-10080,-1612,-32768,-8286,-10226,-23619,3047,-32768,15926,-1026,-6139,-14081,-14452,4905,-32768,-9053,2592,-30785,-6796,32767,32472,11721,-15561,-31483,31977,31347,9970,14925,11702,11408,14055,1721,-32768,-2891,-32768,-196,-19534,-21454,-2318,9420,31098,31642,32767,-32768,8619,29416,-1052,-32768,-10516,29148,-2747,-19650,-19493,-12347,-14360,14292,16216,-8524,32767,-8570,9775,15816,22888,8228,-32768,32767,-19237,32767,32767,-32768,26037,9580,-16675,12346,11064,-5667,3782,12942,18633,-10472,12431,-18637,11003,19031,-22650,16726,-4502,-16262,-12330,-27164,-17085,17836,-32768,8971,32767,-26491,7956,-13936,32767,-32768,13492,-14095,-4446,32767,-6134,-17299,-22952,754,1184,30336,-10133,-19854,12963,11648,5711,32767,-4885,10301,-10847,13182,-4893,-6728,-16878,-24020,19973,-25687,31172,20007,-6619,-9540,7869,-11001,12878,4490,23330,-29841,5577,-2635,13363,13225,32767,-6075,-24903,7487,-17929,-32768,-32768,22635,6509,32767,-15437,-29770,-13139,29926,32767,-8276,-8012,9491,7343,647,-32768,-32768,14072,13565,32638,-13798,13155,11114,7779,22823,23093,-2697,10452,-21821,-14631,8490,3263,-32768,-32768,-32768,-21430,6704,13273,31793,3541,-32768,-14128,8819,19914,-19231,32767,-17690,-32768,24742,22483,-5531,-16251,12459,-29155,-32768,32767,32767,-12998,-31364,-32768,-6524,14814,-14436,6177,32767,-31799,32767,-32768,-13683,-4800,-15933,25147,-32768,15252,21463,-11855,-4608,21719,-15385,-27945,-25142,7602,-12043,-28728,-14197,589,23301,8360,1155,6964,-9304,29380,7280,13476,7013,10992,32767,-32768,-26780,32767,-17833,-32768,-32768,14723,3779,-16924,32767,-32592,22108,31330,14747,-32768,-14475,-14117,-32768,-10382,-9834,32767,-2106,-32768,32767,-6286,-28768,13838,32767,-32768,-32689,11697,21420,26196,-25622,-22116,8670,25734,-32768,-12871,-5543,-32768,2773,-4924,-4170,15920,-29527,-7963,15888,25823,-21545,-6528,32767,-27546,23230,-11923,-32768,-8597,9332,6456,-9229,-26138,-198,25306,32767,1067,31737,12202,19757,-12597,32767,-32768,-11364,24587,-29841,7688,-32768,13624,107,-15038,9031,22284,-1049,1020,19724,-12372,24312,5365,-12804,272,5887,24617,-32768,-25596,25725,-28093,12905,32767,-23788,10061,-12202,-16278,11559,21729,20655,4127,14685,-22571,-643,-10145,15983,17423,9939,12534,32767,1041,7176,5582,-30540,-13737,-18770,32767,32483,-32768,-5529,7411,282,2382,-13329,32767,-32768,-4816,-2996,-4032,32767,32113,32767,-11576,25538,22021,1823,-32768,13494,32767,-28161,-48,2761,32767,-32768,24038,-27075,-5290,-32768,-21659,719,-9415,-2928,-5392,11910,32767,7379,-14031,6503,32767,-5028,24034,24258,-14630,32767,32767,32494,-26873,12244,-14558,32767,27102,13282,25666,6615,32767,-16073,-22770,-32768,-19640,10328,19344,32767,-14598,22274,9937,-15343,7108,-13168,6011,-8186,21488,-31677,32767,21041,16239,32767,-12803,-32768,-13925,28784,32767,20847,28342,-32768,14912,-32768,29961,-11400,-11168,-8502,23335,-8684,1500,-7666,-11131,27227,26985,32767,13050,-21490,-2284,-29706,32767,-32768,17743,1954,2156,21577,-16693,-5784,-119,21479,-1723,29650,9786,-13118,-32768,-32768,-30311,28838,32288,-32768,8925,-15611,7751,19576,-18717,-1986,-8885,-17088,4142,13367,25828,-2755,-29471,-9542,25302,-8477,12462,20766,-20251,-2806,-16303,-32768,32767,31053,-29795,4170,-13796,-9498,-5561,-7124,-32768,32767,7603,-32768,32767,2420,-32768,-4157,1296,-19636,-32768,32767,-16280,32767,-32768,-32768,32767,-24064,-7126,20668,-13418,-32768,-32768,21030,-16093,14845,7677,-18099,27571,30465,-5714,28386,-19750,20734,29850,71,21398,-15961,-32261,2593,-12469,32767,-2570,-8052,-24810,-32768,-18338,-19147,32767,686,32767,-27960,7387,19736,-13050,-31829,22812,22151,21855,28713,-14820,32767,27214,-28490,21502,18499,32767,6749,20558,-8672,-32768,-32768,-32768,-21576,-11794,13520,24605,25,1634,-24131,-29620,-32768,-30507,-23114,13592,-32768,24676,32767,-29875,-24831,10586,19570,-5695,-14163,7261,2941,5902,32767,5429,21304,11733,-32768,32767,32767,18483,-32768,32767,-29973,-31756,-7400,10321,-20718,-19810,28799,32767,-8264,25751,14546,26951,26999,14617,32767,-10648,-7344,32767,-32768,-21501,32767,6610,20017,5349,32767,-5263,-12623,12920,-23433,-23822,-9705,-7479,2286,4194,-858,-14840,29744,29784,31190,-1956,-32768,-12719,-28260,-1069,-15641,32767,-19912,-32768,28540,749,-15226,5061,-32768,-19059,11283,-22619,-5955,-32768,21350,-114,-20772,-443,-5962,-560,18139,23474,-32768,29204,12526,-1337,-16923,-11897,5753,4814,-2018,-1612,28970,7605,17304,32761,15637,4289,15448,-32768,-8081,-8681,-32768,32767,16784,-32768,32767,7939,6434,3873,8732,32767,22799,-1335,-32768,-29169,-18896,12775,-22212,32767,32767,-12045,-10262,-1248,-27360,-6296,32767,16814,1922,-13903,-12569,-32768,32767,32767,-475,6739,-11245,24622,-4471,-1834,12976,-9102,17341,-15471,-4804,-32768,31009,22850,-32768,13088,-3742,10396,2865,32767,-29230,-23961,8196,8782,-32768,-5104,-32758,32767,18527,32767,18401,23191,23256,-13428,32767,5897,-13244,32767,20036,-11918,13232,32767,23810,-24592,-12677,-32768,9387,7305,-8413,28006,-4093,2282,-18744,5991,32767,10169,-21081,-24884,-1981,6611,-32768,32767,-32768,-17069,-2901,-22353,-16967,-23156,26022,7604,-17744,14397,-27952,9794,32767,-2305,-4757,-21973,32767,29688,32767,-19726,-13427,-11054,-25685,-2856,-21998,-7248,-2165,-32768,32767,1822,7723,-12169,-32768,1295,-440,15354,-16335,-32768,2338,32767,-32768,10326,-8300,27170,32767,-32768,-22985,-16752,27300,-32768,-2317,-16222,30666,10725,9277,22066,-3736,24514,22878,-25228,21491,-13314,-32768,24750,24144,9260,-18070,-17874,15933,-32768,21962,1602,-5309,21732,2512,-22625,32767,-3619,12562,14442,-23965,16654,13173,-9923,17593,-24066,26305,1522,-32768,-6981,-15958,3480,7868,-32231,32767,14193,-17780,32767,10621,-13723,32318,27455,-32768,14256,-14709,28371,31703,32767,32767,-32768,6551,1109,4137,2296,-32768,-4833,21665,-21441,10071,-13944,-4022,12058,20229,6856,32767,1572,20429,5714,9315,-30034,-25756,9568,-13436,32767,16407,9505,-19269,32767,-16725,-17637,-4283,-4752,32767,-1408,17471,-1849,4630,6251,12323,32767,-1863,-23456,-15235,-22274,28537,-32768,14807,-865,-32768,7260,30502,15029,-32768,-9686,-32768,-29954,-13537,-8387,11128,27685,-2606,32767,9588,1948,448,6124,32767,-13391,-32768,32767,7093,-32768,-24097,32767,-22618,9818,-2381,2467,10774,4626,-32117,-6234,-26665,32767,5252,9263,-12556,-32768,4236,22799,-22367,-15395,26918,32767,-18780,16469,16196,1213,-15893,-32768,-24190,-24657,4408,32767,19170,9121,6070,32767,14311,26314,-22787,-20599,4765,-4346,411,-16253,-16669,6135,32767,-32768,13120,-9480,-32768,-27324,-31730,-31547,-6163,-18958,32767,-7962,-8487,-17207,-22625,11601,22837,-10446,-14006,-22322,8272,-18918,20206,-28047,29677,-32768,18126,8483,-32768,-12575,-18171,24205,-28985,-4899,-10943,-32768,1679,21266,32767,32767,-32768,6412,-24363,16401,-8320,3820,11094,-14748,1058,-16121,32767,-32768,-4381,-22327,-32768,5105,30033,31938,-8185,25938,-14014,11411,-2687,-24367,-14776,24854,32767,-10435,-32768,16846,18711,-1370,-8560,-32768,-19732,32767,10027,-16976,-14544,32767,-32768,-7407,-19582,26189,4007,8320,17433,18685,-26845,-31997,-1882,-24802,7349,24351,-32768,5674,23827,-18001,18849,19621,4664,32767,-25493,8174,22332,8951,9749,17052,-28865,23911,-1028,-13678,2036,18656,16650,32767,8642,4884,-26968,-6972,25163,-5094,-17665,-32375,18253,-25948,32767,27506,22046,21141,-23349,26761,3506,17547,-9593,13674,16130,32767,29272,11541,-10629,-4720,-15122,-32768,32767,-21353,32767,32767,32767,20609,32767,32767,836,-32768,-14709,30532,-16019,7536,-32768,32767,20953,4388,32767,32767,-16950,-25489,-29177,7590,18084,5036,-27262,4157,28243,5887,-30986,4580,-21,-23548,-32768,15204,-10498,17857,-32310,-26281,19658,32767,25533,-17030,-31353,32767,446,-14893,32767,-32768,-2146,-32768,-9122,5350,-5480,-32768,14630,-16518,-11408,28197,26111,-546,-331,-18302,32767,31256,-28874,32767,14764,-9040,-32768,32718,6859,-32768,16822,-9578,-13597,14146,-12404,-20048,-32768,32767,32767,-32171,-12075,-32768,-22912,-32768,-13164,-19339,-13641,27450,30592,23064,437,26934,-4312,-32768,26468,32767,24214,14225,14797,19623,-1062,-14708,32767,25553,30028,-4236,-1199,32767,-18607,-1107,14550,5825,-32768,12852,-32768,32767,4748,24,-13437,16829,30586,32245,5871,32767,5830,177,-27480,-32768,-21826,-32768,-32768,-32768,-17585,-32768,32767,10571,12803,32767,7232,-32363,14935,18208,26107,18906,9082,3385,20683,19022,17452,-23957,-5377,-32768,13315,20529,-27660,-32768,-18995,1725,-19089,-28680,-32768,20678,-31323,-5276,-12472,6309,-2799,14539,-26138,32767,32767,-31866,-1726,-19792,-24876,20819,-20617,-6558,-30912,4083,8366,32767,-4587,6139,11975,-25369,-31248,-2230,-20444,-5205,-3493,-3928,-12372,25799,-21523,-13286,-555,29985,-9813,-14935,7896,-32768,-18248,12455,-31298,-26713,32767,-32768,17671,-32768,-32768,-7898,4558,-4865,18986,23277,-11498,-32681,-2131,22297,-32768,-32768,14666,32767,-3785,32767,-32768,28534,-26151,-24992,21777,-32768,32398,17057,-8894,-20782,10731,5390,-32768,32767,-20175,-32768,-19189,-12464,24219,-14532,-32623,31183,29930,-32768,-23923,-24270,-30454,-7999,-16908,32767,-15766,-3713,29053,785,22678,-25348,-408,-13592,-24449,-5436,-32768,-12709,-31934,-17028,-1851,32767,8672,-1759,-18835,-26646,-816,19526,23569,-18425,-10011,-30550,25175,-4607,-3907,16617,7868,-6301,-4880,9363,-1256,-19403,-30402,10239,14267,7383,20075,32767,-1698,-29875,-1428,22245,5092,32767,32767,1221,-32768,14703,103,-28239,-11446,18253,32767,-15671,31853,21785,32767,32767,28524,-6112,-9940,32767,12870,-8798,-3269,26857,-32768,10483,32767,8877,-32768,-32666,14743,8684,-22522,-25527,-9927,-30648,-17539,6440,-21909,6974,23844,17655,32767,16034,-4301,-17146,-32768,14209,-16422,12826,1961,4203,32652,32767,4579,14071,24676,-13759,32767,-32768,-14665,-9379 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add4_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add4_input0_int16.csv new file mode 100644 index 0000000..b8aedc5 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add4_input0_int16.csv @@ -0,0 +1 @@ +5907,-4735,-3274,4330,15688,24014,-24268,-18812,2185,-11928,-21892,5850,20231,23990,7964,23088,1823,26649,-21132,-28005,933,24503,-8673,-6883,-23198,29084,-18156,-5122,-19118,12110,31285,21596,-4956,-26510,2482,-1339,22577,-17245,-3586,24204,-6675,21599,20861,-17616,-6732,24645,-6113,14448,21472,-24439,-20140,13503,11887,10117,30760,14919,24700,19098,-6988,22267,-26560,10823,1491,-24743,-7332,-9096,2993,-3056,-12797,-20916,-1876,11073,-7482,-16317,-28113,19587,-27535,-8361,17013,-19908,-22027,22758,-30802,-19845,-31700,28473,10997,-24535,-28068,2533,-5148,-20406,-10845,-13822,-16069,-13590,-31254,16979,-7433,-1179,19758,1784,-6625,1083,-16476,22416,4669,-25164,20181,-12190,-2760,-4047,-31764,29913,-16950,-19950,-11544,-27222,13773,-7769,9966,-19879,-18569,-18484,3080,22149,-12824,19211,-11374,-21676,-28194,4803,8461,-23063,4066,-10825,-27909,8985,-17196,-2985,13043,8294,4847,5501,-28235,-9718,7386,-18366,16213,-10911,-2986,21187,441,4636,-2969,10585,-3787,-32767,13497,-17736,12996,-8538,-5127,12867,-6372,15970,12030,-20240,-25628,-7610,-14404,21579,-19996,-8484,11223,25896,13215,-25336,-16298,-19528,2487,16606,25884,-16724,8519,568,3673,-27156,-10772,-14493,-20384,-10416,12804,29945,-19968,23633,17272,-5400,31225,13303,21138,-23418,14676,-26432,4064,15223,7345,21561,-17096,8777,-6615,-6982,25567,1203,25210,21492,-7879,7817,24436,-26585,-8210,6254,9605,-29166,30416,7349,-14073,25814,-15677,-8193,-10549,-20911,-26360,12875,3481,23342,-23286,-6469,-16457,-16020,-30343,-30891,14576,-24939,-20137,8542,-15461,5361,14681,10306,-30485,-4904,-18618,5243,468,-9525,9333,20036,-7103,11842,4594,10847,7296,-15764,-12291,12791,11281,20789,25039,3872,7523,-16701,16198,5813,26246,5729,-8848,-645,-29091,24015,-9001,32647,13661,-24037,-27361,-2342,12481,21044,24060,-4280,23789,-24823,-7374,22531,-16457,31343,-16847,12588,-6353,-5112,3364,-12328,30075,-31911,16077,-11697,18392,7212,16887,10597,17656,-27258,-23216,-5629,-6735,3697,24605,24783,8287,-32423,14881,-19280,19958,30887,30695,-6256,2779,14882,-7006,-9261,17068,1155,21363,16424,14966,-24418,5596,-29890,2462,-28055,23683,17285,9844,32681,30777,-17094,-26956,-26436,20486,12357,-17439,-27525,-11619,-23072,-25141,-8646,21039,8195,20147,-6036,-16597,19765,7510,21709,23261,-3587,8215,-1595,-9888,18824,-21549,27130,19668,-27585,11959,8325,30050,-28263,-32094,-8827,30897,13566,-17965,4760,11211,-5749,-27269,5336,26512,18624,764,14602,2112,15174,-32346,18965,15545,548,21310,29684,22358,16070,-9480,-14117,-5446,1799,-26127,5757,-24380,-19895,-6975,28903,19977,-11027,-20192,30703,29905,3275,-10734,3087,21968,-3808,21252,16631,-9800,-30258,19767,31340,-13443,-6041,7694,19688,-12984,-3441,-15182,21465,4650,19062,-14746,2187,31660,6209,-23011,27839,-10543,25777,-24190,7791,-12382,24313,-9499,-17095,-10885,-2761,-1461,30605,-21549,21088,2773,18559,12092,31948,19292,13118,4883,9911,28410,8055,-32307,-5206,6676,23504,-9722,-24886,-9673,20459,-17355,-24201,6026,10753,25247,8049,5352,-23683,-19465,5701,-3446,22009,-11872,-2622,-28702,-17025,16246,25081,-791,21226,-31681,31732,26050,3471,18828,15674,-16189,-3907,-11958,15584,19323,25176,1622,30799,31244,-21554,-12055,14487,545,17203,29476,-31202,-26755,25762,-10376,-31000,-20713,-22744,1860,-1606,-14679,-6575,-30625,-31425,-19093,-12459,-2217,25371,-6382,-21282,23838,11533,-32437,1642,3248,-10679,-5683,-22076,-1178,-6222,-3621,574,8529,-12223,16057,-25453,-6434,32647,20557,556,8588,-2007,-23475,-23092,-8091,12645,17046,-19279,13071,-27785,-146,-17966,13281,-27710,-12600,-6168,-15807,8801,-25320,3194,-22677,-20107,-31063,-26446,-564,-19391,-1632,17696,-26910,7953,27368,8267,10756,-5393,-25979,25596,17314,19032,26553,8118,-2766,7439,-21230,-19609,17910,-30136,-20604,-21401,-16315,6822,3614,11432,9473,22018,-16684,1754,17138,8421,-19130,-15370,13380,-987,-11299,-15631,7218,8414,-3372,31587,-10075,20531,8128,-106,5046,30789,31378,-18917,28826,-2892,26524,24914,-24668,2738,1404,-21221,30073,23131,3416,12014,9260,-3050,-13032,10228,-24651,14662,13603,-7285,19806,5351,-13012,-30309,-16732,-31950,-4178,-24056,20124,22742,-8579,-4225,-5493,29050,-26207,2020,-12784,10203,27255,-6056,-30449,-5464,12213,22189,16487,-1228,-4504,29783,25900,-1527,31345,-26275,29289,-27838,23171,-22384,-19080,-6970,-10278,6015,-7764,26488,25869,14668,-12168,12033,-1023,29885,21571,26691,-24184,4760,-380,26160,30610,31991,-646,-25529,-12258,-18109,-11101,-21993,30727,-424,26606,-14205,-16196,-12961,29046,24985,-27116,-8781,1962,-9779,10273,-14532,-12010,15025,12129,28953,4247,-4819,14636,-14859,18164,19175,3731,25901,-10171,-1924,-6960,3898,-19456,-21947,-32500,-20450,-11515,17878,16844,9871,-30510,9251,2340,13968,2914,29130,-26100,-16477,2698,10206,-1503,-12846,-11283,-23001,-15283,29967,16075,-28950,-29033,-23531,-11546,-5098,-20021,-15605,21665,-20308,28824,-18458,8837,-21866,-11787,1685,-30329,-3552,-2075,-14407,-10497,26718,-6320,-6466,-7834,28781,-20956,-6834,-2554,20661,16973,-14992,-19856,10533,-25442,19384,11871,27344,24607,31431,27141,-27142,-13212,25060,2110,-21761,-16489,-410,4836,-29855,26761,-20973,26772,24463,-238,-27411,7697,5052,-22451,-11483,3294,32369,-13967,-13988,26994,-319,-10016,29589,17392,-24523,-10702,11927,13769,11678,-9839,-17638,-6177,31129,-24069,-15086,-750,-27377,12886,12454,-13208,23678,-8668,14856,23452,30844,-18370,13311,26890,-3343,5190,-18892,-29199,8888,-12274,8382,-18780,-31264,-4625,4882,32662,200,12680,1329,24299,-12035,16237,-21755,5260,17674,-19912,-6013,-30814,17591,-11107,-3164,17261,14638,-9211,-1533,29409,-6765,9098,-9940,-19160,538,27259,23590,-23966,-9377,31824,-21650,-2186,29494,-8568,15844,1760,-3244,-5743,17583,30589,24336,13337,-17532,-6915,4532,-5493,10840,3439,3349,16363,16522,1778,1075,-26263,-5814,1929,25862,32417,-17660,7923,25520,-3504,-19305,-1958,20121,-10208,16283,-16221,-10741,28247,27279,15258,9159,3991,28148,-13940,-22571,-9784,17072,-31796,-9208,-1486,26528,-30473,7859,-3078,-19507,-19920,-8919,-1439,-28583,-24816,-19186,31506,29835,30834,-1423,21312,24751,-11573,17659,20581,-15736,20687,26585,19845,-7941,-7750,-9435,24876,27201,26719,12752,28909,28499,-31149,-8844,-20927,-12539,-5838,-2461,26666,-9563,22490,-595,-21246,16155,-17050,-11584,9927,26196,-12692,30353,15320,18061,29098,1759,-30600,-32594,5930,29158,14900,23381,-28601,30421,-17442,13077,-19933,-9466,-28243,7217,-1617,-20440,-18591,8186,4230,31012,28383,18592,-947,15413,-21048,22300,-25485,31213,-12644,-519,16322,-31708,-4334,2673,2216,-11150,28358,8880,-16678,-30240,-20818,-28478,25362,28806,-26596,-1485,-878,1775,24111,-14222,-9866,-4484,2715,5458,32578,10638,12384,-28068,-16818,19544,8183,4388,-509,1080,4331,-5327,-31826,21220,11191,-12565,2139,-26702,4993,-16791,-7173,-25493,19100,21212,-20558,19460,-12273,-28320,-13431,10927,3701,-12960,30887,-14946,20589,-30004,-12458,16255,-22016,3557,25207,-29251,-15262,-22259,18737,-20292,5522,864,3846,11650,20635,-13059,17255,2524,7321,25511,10717,18512,-5797,-18286,-20121,5379,28556,-1889,11592,-11925,-31370,-5099,-6983,24144,-56,28545,-10400,13714,-3321,-16140,-10310,20446,5785,28129,19731,-15052,29061,31368,-5531,-1516,-5690,23323,-7106,5040,9454,-20184,-29786,-28376,1426,-5438,28712,13810,-16400,3396,-11016,-10126,-12239,-21187,-22142,19205,-20500,14034,31395,-7334,-7945,2068,21295,-17576,-13318,28305,3486,21299,27583,14739,-529,5142,-20629,29504,23989,23827,-25110,25098,-23262,-9846,-15163,-12981,-26784,-18551,28196,24166,10212,26682,9560,27407,27993,28459,23556,-10571,-10146,27031,-11904,-30546,31143,-3693,2896,-17272,31152,-18384,-4130,-10849,-13206,-30625,10212,-24208,-2537,1453,-2453,6637,31344,13895,20740,-23499,-30853,-10614,-15803,12275,-15104,22814,-16915,-15865,15143,14959,-6314,-16875,-24058,-14549,13424,-12333,1079,-19907,23411,5939,-25870,-16766,-12251,-13609,14704,21680,-24882,28518,25942,-21311,-2489,4032,12833,6487,-1722,18316,31647,12532,18705,21436,-8427,403,21335,-29566,-3180,6833,-13948,32188,4232,-25289,29441,-5349,2083,-16248,-12227,18442,3637,-3196,-19356,-8148,-7185,-2329,-18711,21656,32652,-5989,-30832,9373,-28700,-19398,16563,16665,12820,-15204,-20595,-19858,10497,26517,-20408,10383,-2777,8178,9794,-11021,23867,5662,3115,5756,-10173,-28239,28018,18930,-23676,17738,-9362,15767,-7492,28546,-18271,-11245,-15610,26708,-32090,-10007,-10509,16963,5101,28137,21120,6390,31495,-28323,22186,3810,-8999,16758,778,4394,25428,23119,19756,-17448,-15636,-22019,20325,-11018,-30351,29115,-22499,-15572,-21310,9214,29907,-12561,-29311,-28654,-6334,21190,-16875,32099,-16217,2968,-23528,-30621,2330,-15691,13701,15143,-20156,11973,-20301,30869,29455,-9704,-22640,1230,28002,5949,32539,-16426,-27333,-5022,-7509,-9705,-13756,-25329,3421,-31580,21065,17097,-14023,-22705,-23413,21651,16556,11125,7278,-28415,17919,27326,-31725,-5139,-537,4511,26565,-23147,-22667,-13508,3706,-28398,12350,-6152,24544,31581,14296,30353,-14768,11542,28654,-9779,24115,-26009,-17019,17046,16147,-4652,-11962,-1598,23168,-29207,17105,-5155,-3905,20432,7904,-1943,24778,-7186,-11449,6307,-1051,21983,21412,-8036,23395,-5686,4468,-1004,-27363,689,-15191,19687,5483,-19686,26802,2414,-28298,28283,7400,-28014,23492,13373,-21109,28411,-24587,7625,14934,18169,18040,-21988,-16913,4554,-1114,-3142,-17711,-14789,11519,-1943,-5896,-26176,-25887,4967,11456,12221,28416,11687,19206,-1188,760,-28765,-20570,2011,-6621,31829,1513,11653,3375,28908,5124,-7244,17536,-27824,30373,1818,10549,-18328,26013,16641,30096,16518,-7188,-12885,-12629,8,9117,-32210,18650,-2459,-32437,26915,16861,11224,-23145,-8200,-26451,-31866,4693,-15690,19081,8693,-16839,23774,-8870,-1681,16544,-17270,27040,-14006,-26824,31070,13844,-32370,-27885,27153,-19045,-7750,-17998,19722,28472,11580,-20157,-14552,-3277,22772,-11404,-7564,-5483,-27525,3852,26486,-18585,733,21973,24571,-26237,28109,17083,-18268,-13068,-27141,-16267,-29281,11635,21002,-1016,-11401,21733,26122,4459,24949,-2044,-8498,-3188,-6900,-18800,-6341,4947,28415,31940,-25082,15752,-2306,-28131,-17115,-29865,-31234,7653,-18966,27416,15510,-14335,-14781,-5856,-12045,351,-23417,-18741,-15060,-13101,4285,11583,-16392,20903,-22664,23113,19948,-21735,-6073,-3990,29725,-10941,-19988,10836,-14501,8692,-63,25984,31918,-30784,16781,-7028,-6992,-22365,11178,-3457,4658,-19347,3410,16780,-30260,-12658,-15853,-30889,-7079,26431,30277,-12606,25891,-3539,9380,-24658,-4171,-1084,4026,28622,-21755,-19269,13399,10962,301,-4153,-26976,-11940,28522,11050,-989,5071,21018,-20520,-29041,-6600,25646,-10605,14049,-3646,22066,-4586,-15363,-15927,-18970,11593,7588,-25997,23300,10810,1009,-1310,22512,689,28322,-27290,24520,25857,2121,-13902,21661,-13508,31380,-1140,-25540,-11481,14505,13446,31439,30823,-9307,-27225,-8958,10417,-26934,4683,-13744,13088,-27063,21888,25155,31506,-2084,-20655,30285,842,11629,-3745,1775,757,19862,20924,-573,8724,-27090,-6182,-18789,24481,-7301,23797,20525,15424,9189,31358,29109,19781,-10185,-32177,25145,-4436,15036,-25718,23420,17341,630,23461,27458,-16815,-26303,-5887,238,4290,5706,-31665,14014,4176,14796,-9567,-12993,-1958,-6611,-24596,28654,6507,7660,-10730,-11850,18305,27925,28932,-20454,-7093,12326,13142,-14619,20709,-26781,-8474,-32018,1492,22796,-18956,-31002,28355,-1098,-21224,20589,8739,13294,-3749,-6531,30262,22499,-9707,15505,13021,14873,-17792,30329,13789,-28991,-7106,2537,-23043,19436,1636,-18893,-18391,24987,32522,-16235,-19648,-23343,-16135,-24204,-9344,-2874,-20133,11744,28174,25008,-16577,27030,-2143,-28512,10036,16033,23428,30746,32290,-24,-22164,-1002,14626,11370,7385,-17187,5161,31560,-20447,8168,16740,21947,-14822,7421,-29120,18976,-16685,-7730,-9178,26163,21929,11453,14997,32161,6869,-12847,-27149,-31877,-30758,-15808,-30060,-26681,-11944,-14811,22988,31826,23173,30513,14805,-20476,10359,28350,15915,14949,8616,-15466,9270,-3845,7948,-11507,-10876,-17961,7342,30945,-21164,-25502,-27692,-3914,-23667,-7122,-22432,9931,-18649,-2731,11493,10976,18095,26037,-30632,32483,21796,-20369,20701,-24632,-10091,22179,-9153,-15820,-13504,10098,23213,27694,14218,13667,28611,-26912,-28146,14194,1058,-26636,-7003,19792,-22646,29674,-15544,455,13166,21447,-16619,-8649,19668,-31824,3078,12207,-17157,-27916,24089,-16683,13469,-28247,-21720,-30958,-10173,-12246,19782,2547,10440,-32396,8088,10293,-30002,-31673,24527,17429,17883,21956,-17662,14264,-28948,-21055,23719,-29823,13608,3197,-26038,-28225,-2967,22334,-20348,25901,-8694,-22096,1508,-25982,31717,-669,-19070,15389,16997,-25998,-5470,-7651,-14120,333,-29356,29084,-26882,-6999,12988,8404,10572,-11808,-13913,-27483,-16500,-2871,-25698,8544,-27205,-25399,17106,14994,-13349,5886,-20201,-13977,-13114,9912,1633,-7261,-9110,-13915,16314,3347,13751,32423,12559,3809,1164,16800,4397,-18004,-21841,4101,10584,-7537,3334,10209,19899,-30398,2796,31049,4291,24964,13632,-18232,-31151,3202,15609,-29183,-17330,21837,18624,-26469,10160,7648,15256,26793,20146,-27293,-30777,27416,26240,-18426,13409,12941,-29622,25466,31123,32005,-28658,-29096,7127,21432,-14582,-4139,-4254,-20170,-27951,13560,-31229,23180,15544,23972,28486,458,10666,-9852,-23912,14108,136,-8243,3483,-56,19547,28843,5205,-7412,26648,1318,22920,-17770,-14660,14547 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add4_input1_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add4_input1_int16.csv new file mode 100644 index 0000000..133a978 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add4_input1_int16.csv @@ -0,0 +1 @@ +-1753,-5130,-27342,865,-13271,-28418,2595,-31158,23392,-14061,-19679,16049,22926,22049,4889,-17053,-27633,28466,-29053,153,-10134,-12992,21052,-12172,31062,-26210,-24789,-26836,-13307,-29780,31250,19350,3111,8092,18623,-31951,10563,11508,-16430,-23293,32453,-32599,-32030,-17504,15447,-1927,12984,-6435,-30087,-8639,10087,-28675,-1080,-30699,-11226,4062,-12675,-25961,4357,-27914,-14298,7795,24209,-3109,20823,18146,-28504,-27021,-21429,-29049,-20042,-16713,-28649,-2050,7191,3711,-11527,-32582,-22074,9357,3811,2921,-30317,18880,25876,24143,-32139,11921,-10919,30222,-2738,-30102,21103,-1082,-24564,21991,-2046,-10811,3581,5496,32428,29573,-19756,-5105,-2371,-5385,-21451,14110,-32153,27643,17794,29983,-14122,-13944,10864,-26227,-20136,13447,-27283,31141,-5859,-14722,-29508,15240,16312,-29682,-4894,-29748,32171,10629,-1905,-18831,22679,-690,-10009,26595,-10722,-12886,4550,31731,26373,10966,-32003,32372,-16685,-8988,-9689,892,13955,-32291,28772,-19864,-15933,-17448,-9122,32501,7198,-24511,23492,-1812,12633,16238,704,-26462,-27150,25507,22985,153,18554,-21944,-16363,30896,175,-7704,23453,-24831,-20058,26156,-27311,22145,-14592,26558,2300,-7243,-4688,-32088,19316,14015,6253,-20782,-21399,-22924,-32416,-19678,-2622,-22204,10390,-28374,16580,-32232,-8405,26447,-23748,-15657,-32018,-17947,-624,-10279,15693,20688,3819,-18278,716,15608,-7003,19800,-27006,28846,-16217,8786,15916,20308,5174,-29474,22296,14511,-17995,7094,-7859,6168,3206,-4357,25868,9542,27137,30726,10717,29056,-23238,-5657,27639,32091,8619,4298,20521,5157,-915,24734,7810,13145,27762,-20090,17408,298,13003,9418,-13985,13998,-7437,3620,-16234,9968,-768,18587,17529,-5385,-24735,-13291,26976,1954,-25577,-11285,-19598,-30766,29950,-2162,10690,-7552,-30056,5766,-13827,-16001,20508,-3753,-4063,10055,-890,27364,-7388,23480,15881,-19681,14901,-5340,-21630,8634,30889,15445,8523,-22727,-26101,-10684,5846,8186,-19521,-3199,22640,29308,-5418,-8244,-20194,-24075,-1051,25088,9217,-22740,4328,16549,-17437,10792,-10779,-4134,7225,23037,-6945,16698,-24616,-8325,-28297,4074,20838,8966,4691,16934,24097,-19548,-6927,195,-32580,12083,-28728,-28478,-27160,22493,-10774,-26829,9177,-18256,22799,-2820,-9797,7266,-21621,-29373,-524,-15882,25991,25425,32183,-6654,1998,-13337,-17879,19154,27679,-29609,-13104,21073,-7124,-30492,30902,11900,6978,-2804,-25341,17165,19733,10506,8717,11547,-24525,-18724,24945,-25075,12664,7510,32716,-27540,-15040,-16499,-21965,-7255,25747,28138,-12617,-4645,14275,896,9708,-7272,25165,-13265,29149,29097,1769,-27271,-2639,24558,-6514,14168,32712,4436,27166,1192,-31867,-30825,-6382,4019,7566,23667,26831,19207,-31642,-30224,-2642,-25960,18976,-29038,-6209,-24099,-3623,-18026,24519,-4334,-25180,30896,-2636,15637,30563,-16207,-31151,-15968,7345,11148,-12469,-21698,-14697,2966,4526,-23003,10356,14366,3520,-22675,26836,-19611,-30025,-14997,-5418,-1874,31240,30934,-5129,-18222,-19915,4492,-974,-2862,-18811,-27094,9732,-30501,24806,-222,24611,-23239,-11472,31460,-26798,-29556,19585,-11861,-15062,-24175,16096,-7689,12384,-17612,-1772,-14407,30876,-28871,-24653,-29540,19158,-2232,-21402,-17206,19710,-16087,-32066,26088,4363,12873,-10423,-30499,9653,-24250,-21788,-4624,-24241,30485,23235,-26031,20045,18230,-6187,-25114,25174,16630,28982,27499,-22256,31381,-11381,12701,19536,27778,-19722,31653,15273,14582,6799,-12185,9276,17350,4791,18055,-23829,17903,-27260,402,-14312,16498,1522,-19684,326,16277,18389,-1662,4448,3341,-30924,-19955,-12602,-14988,23011,-31114,28244,-964,-31940,-16666,-1364,-14337,-14096,-22802,-7727,11238,-20482,-7981,6247,-5229,-9925,-8045,-29408,16987,29898,19485,23927,17062,7367,-18260,-9909,-20925,-4238,-20098,11826,32216,909,-13464,-6460,7651,18227,-12857,-16574,4522,19138,8619,31592,-30347,-28584,-8623,28154,3277,-6316,-12523,7669,26006,29438,23837,-24196,9158,15877,-13027,-26980,7072,20700,-2328,-10811,-4626,-26540,-30893,23849,-21782,2445,22245,-22722,13277,14285,-11719,-32224,-27456,16825,-21852,19157,17099,-17074,31198,10932,6869,-24892,-17041,-12323,-11490,4613,29235,3907,2592,8964,-5442,6802,-20377,-4849,-13427,-3898,25240,-13412,21116,29721,-13492,-15705,31531,-23752,16514,-11144,10193,-13548,15337,-1302,-20044,17114,113,18758,-23294,-15831,-29013,18010,-11918,-20457,-23664,-20075,9777,12160,29676,-26558,23824,-14251,24299,17277,-13058,-18089,18533,-23796,5340,-8804,-29121,3968,-6026,-13366,-23919,-23719,-5474,-6730,926,-3015,-18130,-24452,28536,-7269,1759,26963,892,-30785,-28624,-11974,9328,13517,-1144,-17651,227,138,14097,26282,1348,10043,23350,-13299,-28094,-31030,-1820,1493,3909,-24391,24317,-5257,30943,5605,4573,-8768,-21683,-15283,-16999,21003,-993,-22272,-30874,-10470,-581,24887,-6828,19475,-8858,-28914,-31736,8619,7485,-29851,14481,12235,-27004,29513,16124,-5356,-4112,32297,-7440,-31743,8160,28723,22468,-2088,-23442,7161,26930,8221,29820,27957,-14705,14534,-24088,-30568,23710,-5146,31455,-22501,25386,31692,3946,8288,-7675,-11950,-28619,-22967,-29483,12726,-29164,-15547,-27704,7890,31907,28936,-5172,22592,12731,-6593,-19611,-24518,-28584,10702,-26825,-17750,21920,-26864,-27096,-29673,20342,-1594,18443,32360,-14854,-7227,8345,20137,-16864,-30059,-25930,-16512,1891,-17753,14073,16434,-25794,29547,-8004,-24828,-22221,23757,-17964,-29149,-738,9782,19082,-20847,-5381,20165,-8366,-23744,3517,-6411,-18276,-14048,-23791,12615,-11272,-27707,-31185,-11003,-7853,-3604,-27127,28828,-32391,24045,10040,-5444,-23806,29464,-2889,13504,8009,6113,27259,2204,1158,25142,14557,-6974,-323,32069,-16184,-22519,8651,-12622,18620,-2287,-5961,15463,-15836,-11675,9745,11294,3484,-14067,-7289,20109,20916,9226,-376,-29687,531,-30878,-21449,-9336,-7877,20349,6139,-20136,-8338,-18818,-17391,23447,4937,-14443,-28020,1332,-6138,8673,-19878,29045,8453,8608,12218,23034,-21388,7187,6015,-4801,-10434,-27873,21846,-1077,-22233,-18354,-25242,5212,29825,-15204,17258,-31880,-28927,18347,9398,24063,5513,31929,-28182,28800,-9242,21675,-15450,31620,27291,6025,12635,5758,19392,-19297,21450,-32123,19798,-17366,-16793,2951,26775,30293,19219,-27454,19217,-32614,-16885,-20658,29006,9208,7929,4200,2051,19215,30674,16277,-25145,27136,-6542,25800,-1111,-19009,16888,-30985,24664,21371,-18388,-16814,-9088,21925,29378,30089,-6419,-1098,14168,8693,-12733,5827,24051,-24687,-7265,-25045,20184,7134,-3097,12443,-19624,-4176,26249,30486,14299,7453,5824,-22871,-21926,-23978,22209,12178,-1946,27532,21391,-9435,30205,15343,-26242,30739,-6524,7856,-8113,-27560,-24325,-10873,20097,-9279,-19215,20063,3612,6472,21308,-1792,-3847,25796,13063,716,898,5381,-31438,-24640,-1438,3757,3642,-22482,14036,-19759,7963,-6958,-5527,10940,-5750,-26698,-1964,-26976,20022,-20781,-876,10378,7032,-22672,10688,28596,-28691,-9742,-14552,-17684,17037,26277,-22692,2651,18296,-19644,15688,323,-19562,19412,-19043,-16165,28072,20177,-24241,12940,-13329,-31481,-29712,32449,-1255,28832,-17266,-30066,25407,-1960,-14478,-7003,22319,-24206,-18129,2407,6370,12325,9121,-29616,20967,12462,10336,14331,-30010,17754,4912,-14686,3211,-13445,-18115,31234,-24167,30077,-847,-26804,-16879,-2141,-17600,-16088,29440,999,15853,-23213,-8991,31090,4731,-28535,2443,21776,-9439,11356,852,9111,-6707,-30641,30974,32697,22949,18866,20664,-24688,-24206,-9375,-21125,-30948,-8342,-21438,14004,22652,-2489,-17221,-25822,-32015,-11757,-510,-8230,-20125,13790,21231,-30015,-22397,11368,-3083,16591,-657,-29285,-858,-21447,31849,-13035,29346,8669,-21824,29695,30422,-8035,-20906,28899,-8178,-29077,10973,31767,9111,-1025,-203,15345,-25185,-2209,6354,-1598,-2341,-19616,21160,276,4129,9789,-29935,13248,27139,13972,22893,31006,29324,18285,-11260,32317,-13263,10239,-27120,23341,6569,3629,2231,-29088,-3276,20843,13292,29782,-26019,-2446,-16165,-18366,-178,27833,-3418,-30989,17451,-19625,-11744,30072,-28982,-5535,-3359,-13374,-9487,-21796,-3610,-8344,7778,22528,8888,18017,4086,1631,-10294,-103,-18954,27596,-19299,-21541,-9971,-2480,-336,-27426,-4733,-7068,-2554,14442,32627,5206,-8675,-21468,-6469,-21085,-32510,9066,16708,-24683,12145,18041,5770,27612,28593,23596,25609,2614,-18582,-27944,-15473,20372,-4030,18824,23079,-7919,28739,-14604,2832,18297,26402,-399,-15099,2294,11521,-25936,30583,30507,27508,-5268,-11275,21794,-19513,12736,-15487,-20035,18997,-28720,7577,-7035,3011,4585,-21238,-6884,7886,-7781,14181,31063,-14064,-16676,32538,-25039,-22946,6946,-29508,31532,17851,30724,-4412,22338,-12199,21026,29042,2666,-5379,30971,25840,-22069,-17296,18273,4736,-8969,4536,-26772,-15423,25008,30559,-2536,25533,24542,4213,-4660,16591,30983,12109,6094,6075,-20345,-24992,21260,-25943,-27022,28553,12207,-26004,-9464,16058,-10671,3965,2826,-9547,-29418,16531,10288,24835,-31212,20770,31673,20805,-3843,19662,-7922,-24145,9549,-10576,25197,-7627,-27242,20829,-21133,29714,14968,-20515,-28121,-23425,5281,-31980,-23906,-21573,12874,-2599,20958,-10409,30275,8524,-28983,387,-3872,31559,-26434,-20145,-13306,7341,-29150,-7256,-12222,15349,17010,-8788,-20401,-4392,17988,-27909,9736,10161,18854,-7775,-21805,-10551,-13180,5909,9262,-1746,1012,-7527,-27711,17425,5049,32664,10701,-30741,-7948,-11836,-2246,-8634,-24485,29172,3429,-20291,-10328,-484,-22478,3006,-16143,28417,15735,15146,8479,4060,20203,11011,18435,-17464,-20035,14152,27593,21988,22118,23043,-18739,32126,-4791,7093,7417,-31061,13905,13215,-26121,21659,17371,30301,9347,11372,-7646,30444,-14007,952,9314,11464,-671,-6227,10079,-8916,292,19952,-3304,-30538,23005,-29533,-13700,-29938,31991,28462,-4399,8919,22794,-29657,-14554,-24955,31476,7411,-13736,-3047,-29931,25758,-6361,-5832,2230,-30021,-27369,17717,4707,-21761,-1701,-32232,3713,-24656,10374,-11369,25198,19724,16783,25112,4935,-22216,32044,25450,1330,-17589,25742,-9566,-4226,6091,7237,-4115,23877,21625,-23887,-24796,-9757,-15341,11696,-31298,25402,22783,22875,-9304,-26891,377,-5905,-4412,-21690,5853,30792,10960,-16646,-1806,26825,-3325,-10756,-10058,7263,-10126,30656,27151,27976,-21820,20062,13074,937,-27789,-15949,10797,3678,26481,-13086,-29214,-30949,16961,-18300,-4101,-9553,-12169,-13098,-1432,702,-18833,692,26512,-32086,8370,-2727,-22315,32195,30191,18265,7034,-9213,29180,-31321,11167,-15067,11035,-29271,-7530,-16117,-29050,-8515,-18905,-8483,-23844,20986,-29644,-29422,-9733,28653,22029,28082,-22503,-14531,-23033,31674,19670,-10286,19670,-26235,28104,-26357,32350,-5743,11573,-8127,-15905,16621,3889,1143,6391,-868,-13944,2391,30399,-26978,-18353,27832,20980,15987,-28189,4149,10015,-2255,-5770,-25345,-10038,16036,-1771,-21439,-26530,22483,-27961,30103,-17201,-192,20009,-8200,28446,-5335,-29734,-21792,19439,-7152,-6118,22244,-23262,-24514,17097,-25572,27125,-4693,5315,7761,3394,-22838,-5664,9098,32269,-6970,-20143,-11161,191,16852,18569,5054,3820,13215,-30902,19397,1324,2989,19433,30305,-30187,-24532,6468,2471,22953,2254,-13840,31272,-2876,-5822,3548,7532,-7721,15919,20622,18436,10462,16293,-26310,31022,-11787,-26039,32084,-18613,28784,26308,30212,15010,24058,30653,-26159,-30125,24621,6333,-15399,-10615,-11245,30246,4229,5025,27445,25465,423,2039,-31072,9867,18374,-1105,7052,-13744,32178,-12499,-28427,24072,2672,-22513,-29034,-19097,-23076,13422,-28601,-18959,1160,16287,-5606,5335,-32332,28800,-17526,157,24742,-14573,8805,-18805,-14311,-24253,18783,-24684,-19455,-20673,13948,9479,23021,-19068,4726,-15577,10993,10954,-25397,23567,1873,-32655,-25282,2119,-9804,-13107,32397,-16364,13516,-7805,-18918,-872,-25430,21912,12704,-20822,10879,-25627,-8523,-21706,-4795,-22013,9444,20675,2236,-3510,23450,-1100,-2836,-27940,21711,23675,214,-23297,-24658,26391,29142,-18375,25111,18643,30150,18014,-8729,9705,3206,-12752,-3543,-22444,-23888,7028,-30298,29849,29390,10693,-5391,-13478,10840,27517,-12797,32668,-1642,17956,531,-21171,13104,-28931,-4592,-26671,-7148,-30629,26037,-29695,-14762,4992,-10705,-15231,5774,-14642,13119,4778,316,25878,14997,30854,12480,-16310,7777,-31471,7760,-15104,-7965,-22446,12678,7715,7000,-28702,-21447,14079,-16354,-3321,-32604,-6664,-28716,-16381,7137,15435,27555,-14711,-30869,7386,-19497,-2624,-15070,13010,-22898,-8443,-20777,12343,-25771,-10603,-23374,3040,-3155,-22571,-28921,29744,4966,-32573,14614,-6272,-7472,-18474,-18904,10698,9740,-8133,-16520,-24330,-28757,-105,-18378,2619,20429,-31736,5160,-21643,-28886,32088,20153,10355,-1780,27754,-29844,781,-14017,15755,-27584,-21832,-14127,29147,-29748,23728,-32071,18656,4797,-4532,-3461,-4171,24751,18503,23965,11012,18506,-23563,-32061,14573,-15110,-30096,-27855,19092,-11212,-18597,-17520,20662,16761,-31538,-24591,-22049,-21433,-11204,17776,17405,15898,4665,21113,-10536,15881,-17763,18640,19647,-10084,-3342,-26385,-28855,-5375,12157,-26079,27438,30059,-10481,2561,-16515,16991,12558,29407,-14735,-883,-21845,11316,-10805,-24213,-22397,-6753,-13717,-8160,-10593,-7751,-1232,-10715,8097,4567,20312,22368,30387,-29726,1795,-5775,-12942,922,14756,30803,26785,-27560,15333,-21390,2317,8526,-5599,20467,15456,28774,18714,26201,19872,10530,29432,29095,28268,-18902,13595,-22885,18228,-27606,-21041,17505,-32217,-14900,-3749,9974,-17894,-10141,-28581,-7468,-13349,14990,-10051,13642,-22602,10590,-9347,9621,20906,-20488,-9443,-29711,-372,-22246,28597,-2170,5723,16901,14523,-1028,29124,-3607,-20299,31391,-19570,520,-32661 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add5.tflite b/tensorflow/lite/micro/integration_tests/seanet/add/add5.tflite new file mode 100644 index 0000000..5af204a Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/add/add5.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add5_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add5_golden_int16.csv new file mode 100644 index 0000000..a265f19 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add5_golden_int16.csv @@ -0,0 +1 @@ +9035,8620,-32768,18020,28720,5412,32767,-19556,32767,11383,19029,996,-15546,31460,8889,-19630,24036,-16315,-26825,-32768,-18340,-10265,-5275,16139,-2306,-8687,2917,-19943,-16589,-30058,-23136,26094,31786,-8708,32767,-32768,-32768,19948,-32768,-32768,-27492,-6535,18437,3089,24091,-23365,-32768,32767,19627,28440,26261,32767,21220,32767,-4800,28599,-32768,-17720,14088,32767,-10351,32767,-26703,-32768,-32768,-32768,-13803,11668,-7894,2644,-21006,4327,-19037,-26310,32767,-23511,-21266,-4584,32268,-16733,5718,-32768,-3223,-32768,32767,5803,-7193,-32768,1316,67,30623,-16130,-8622,-14201,-32768,18127,-3866,10893,-7037,21490,32767,-23279,-12311,11035,19441,28297,-3326,31250,28513,-24996,-17222,-6620,8327,32767,30712,15037,-19849,-14279,-25509,-32768,-14894,-7687,8068,32767,-11009,32767,10542,26302,20685,-12102,7254,-28733,26295,-13707,-13889,29038,-15627,-32768,-23732,30996,-30493,-2866,23102,-8961,-32768,14293,32286,32767,32767,-32768,9620,-24758,32767,-32768,-3365,426,15281,-16049,-2340,-8261,-13955,-1954,15635,-32768,-32768,14275,18127,-20123,13134,8493,9927,6247,-29947,-6875,-6209,-32768,26273,-32768,23136,11347,-5976,26992,24700,516,-5782,-2520,18574,27526,15229,-17060,32767,32767,7759,32767,14396,-32768,-16927,32767,-32768,28613,25347,-32768,-32768,-32374,-25871,-32768,25908,22738,-4850,-19595,-32768,17889,-19294,32767,-28686,16323,22524,-25686,-802,3359,4658,18526,32767,19550,-32768,-32768,24748,4011,32767,2971,23144,22311,-26993,-32768,31035,7562,1853,21268,6502,32767,-8210,-24893,20891,-25166,-4702,-8145,29438,-17627,-22654,10678,29287,8147,25337,32767,2458,7256,12034,1804,-30622,1167,-26763,-32768,8824,-9814,-8856,12709,-32768,32767,-4986,32767,9432,10888,-11783,32767,24295,10204,-32768,-21894,-528,4951,10067,-28085,-11802,26340,24482,-16500,5537,26165,28521,26502,16034,-400,-29768,32767,-2611,31757,-18671,-7622,24453,1519,-32768,-32768,5370,-315,-24445,-32768,6155,6854,32767,-21563,733,-21521,16567,14018,-32768,1393,3819,-21225,-14057,-12899,32767,9474,32767,6788,-504,1871,-32768,32767,-12678,-32768,32767,-11022,-29306,-7943,-15809,32767,-23390,-9957,-27265,-32768,-27183,25642,-16379,-1038,15627,2273,-5994,19722,32767,-16892,-32768,-27387,32767,29211,-3337,304,10927,17428,-8777,6579,-1769,-9945,30609,15356,32767,-32768,-6718,-395,30368,-7719,32767,-6256,13393,9381,-18534,-32768,-19567,3602,-32768,6956,-25430,-11854,12108,-21382,20035,3068,-14043,14237,-32768,32767,-32768,-32157,16634,-32768,-32768,32767,-14758,9886,26484,-21396,32767,-27651,32767,-23149,26018,-616,4600,-1236,358,32767,32767,-15671,20438,-18927,32767,22412,31169,11699,-25498,-27144,-32768,-32768,-4409,-13819,-20283,-32768,2257,22512,-7942,-24515,11959,12052,17467,-23441,-4376,-32768,-6083,16098,-7593,15455,32767,-26168,-20974,5555,-16510,9237,-9880,2900,-19914,-24432,-17648,4303,31468,-8102,-12352,32767,25918,-32768,19193,12377,32767,32767,3320,-19248,-32768,-8509,7444,-4816,-6773,32767,5799,-12214,6558,-32768,-24952,32767,-7867,-8610,-13154,13536,-189,32767,32739,-424,-32768,-11860,32767,32767,-3908,-24610,32767,5016,26308,3019,17151,232,25659,28679,-27174,19879,32767,22567,-16224,7771,-32768,16039,4754,13194,11953,7720,-13542,-25166,8090,-15237,-25479,-9837,10392,15430,32767,-32768,-16663,-32768,-32768,2696,-28236,32767,-32768,23406,-12795,869,-19993,-32768,30511,-14082,32767,-8652,32767,32767,10132,-32768,-32768,-27663,-22702,15889,22868,10773,98,-4721,23372,-4445,13540,32767,32767,-32768,-27421,11166,1474,31315,10814,29807,17293,32767,-32768,32767,-12285,14210,-7767,-31305,17513,22413,32767,-32734,-23257,-20343,-7472,32767,-20027,5617,21488,-20406,32767,25166,-26034,22755,22825,-32768,-12600,30756,8141,-10337,-32768,-14661,-32768,-20967,11705,-15369,-336,-23791,-3683,-1660,24095,-1261,12393,5125,19827,-32768,-26831,31020,27106,6229,11308,-32768,-2469,32767,18996,-7447,-24119,-15522,32767,-32768,32767,32767,-32768,-32768,-32768,9084,32767,19092,-30254,-14788,-7179,-30121,28914,-17484,-14261,15681,-3676,-6107,-17171,-22446 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add5_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add5_input0_int16.csv new file mode 100644 index 0000000..9f3bbc2 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add5_input0_int16.csv @@ -0,0 +1 @@ +-1264,6760,-28387,2560,10204,11211,17868,-22037,23497,23200,17922,23656,-10422,20141,30533,6086,752,-4270,-9452,-24806,-13765,-24166,-20457,15666,-17075,5816,21353,-18427,8280,-15375,-29095,27582,12655,1837,24733,-29916,-26318,17621,-19647,-29637,-22129,-2205,12291,-10112,17263,-3886,-25731,22929,31096,29306,27248,16426,29284,23030,-12145,29727,-22677,619,-1077,23422,8090,24724,-21448,-19819,-31132,-23096,-19078,1364,3170,11326,4582,-7717,-31015,-15007,32187,-30724,-15869,99,6756,-11989,19703,-27918,-11119,-31483,26761,-8196,-1311,-30431,10651,-12683,29549,-13602,4565,-2910,-21997,17435,6388,28601,3348,30108,31970,-24593,5637,21173,5495,10571,7599,27773,26364,-23766,-7782,-22259,1676,32447,17107,16785,-22749,561,-32375,-26548,-23449,609,21888,27353,12369,16488,-9697,22478,8129,10719,19719,-9301,30236,-18675,1690,32440,-1060,-22399,-22203,27144,-3698,-10850,748,-25545,-32589,16415,26474,20348,22456,-32700,-1730,-8273,24444,-25826,-12028,-5099,4034,6763,-21923,-14088,-20815,7842,-6662,-32486,-29924,29664,10486,-9690,28383,-4804,26364,9271,-14887,-13002,9311,-30911,13789,-17027,-332,16226,12310,13788,30508,8554,-5600,-24495,1340,18960,15374,-23418,32551,27638,-16006,31221,-1535,-25639,-31130,31657,-24866,15661,20467,-24588,-15170,-13337,-6562,-11909,28640,6087,5473,-16305,-27336,17760,-6066,19020,-6215,19603,3042,-23386,1957,-7707,9967,2635,27034,11558,-22609,-29842,5935,-7798,27097,4746,13658,23476,-1417,-24920,24499,-4210,-19649,23555,-7002,21772,-6019,-19952,-1680,-31463,-14883,7446,21092,5597,-19276,-6630,28946,16355,10060,28229,-19533,14210,27308,-5240,-9364,-16714,-20074,-21505,2883,-8728,10233,-3655,-27202,27504,-21802,30331,4584,19231,-1120,24508,4302,-9019,-21240,-8234,-10197,12432,8391,-30039,-8260,13548,29581,-20222,-17296,28561,29224,23347,11897,-3186,-24933,23771,-1715,27400,3833,15141,10050,-7878,-9383,-24485,-13635,19118,-16098,-24566,17794,20232,27547,-3060,21440,-2762,20742,27540,-29511,23315,-16209,-4154,-9941,8381,24300,-14697,23791,16512,16296,-19413,-19868,6245,-14746,-31507,24795,37,-22325,4811,-22857,26851,-12595,10887,-31768,-21333,-8477,11596,771,-17295,-1058,-15710,-7769,4261,30835,-27993,-17964,-3002,29216,9804,5054,-11292,13116,12979,-2525,22678,16840,-7950,21737,-4868,24020,-29480,-12553,6152,17486,-15710,31790,16615,8318,17905,-19871,-10640,-30568,-10616,-29126,16756,-4312,-28587,871,-1791,-753,20838,-5145,-7346,-15014,28292,-14326,-24456,24469,-22300,-15230,21963,-5748,1605,27285,-8314,20714,-9027,27307,-11722,24514,21424,606,3952,20075,20983,29737,-23084,8296,-17785,20413,20462,26816,1208,-5051,-9623,-24635,-26101,-24964,1055,-16871,-17910,-4475,30793,7849,-5023,3565,30665,12780,-29622,11111,-29478,-25196,245,5562,11508,17078,-6535,-13772,5582,-25851,-12861,-4821,-11552,-21231,-13510,-18041,-14797,6164,15001,-10699,17065,8765,-22286,-5389,-6197,23843,12833,-7854,-27987,-23302,10464,7956,2298,-24645,30261,-15659,10510,-12646,-29610,-32450,12196,-21034,-11625,-23786,10245,2567,25119,8124,13889,-26109,-11101,30370,29508,1885,-8222,12658,25257,5959,-12874,1,-4373,1313,7173,-20104,17298,15168,12094,-1456,-9233,-24458,19281,-6791,-9265,2485,-1488,-22847,-17508,29582,2613,-3655,-7871,-10549,6840,14996,-16112,7365,-20096,-21135,-19479,-29159,30419,-11814,13687,2127,-7950,-22233,-31104,25497,-22404,20255,-650,28790,29349,26922,-28929,-9115,-27717,-28026,21315,12950,25187,20099,-21340,13572,-21815,-3067,28544,14726,-20574,-16575,24213,3856,13439,11021,19433,24210,16376,-18684,17982,-6819,31560,-24434,-13312,3817,-2421,30000,-6935,-27372,-9163,13927,29844,-15459,3510,26139,-1564,17363,7470,-3876,9046,11889,-13438,-3686,5535,-8032,10592,-17996,2215,-30953,1596,20457,-7032,16071,-27462,-14634,-18006,27817,-7699,815,17637,8874,-20687,-15421,22757,3064,-12374,23197,-19642,-9020,10086,9203,13512,-5145,1040,25620,-18986,9112,17350,-10268,-32745,-23397,115,31774,25040,-19039,6230,-2566,-15716,24162,7262,-30603,9072,-5233,-17133,-20236,-11064 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add5_input1_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add5_input1_int16.csv new file mode 100644 index 0000000..a71aca8 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add5_input1_int16.csv @@ -0,0 +1 @@ +12891,993,-30689,18498,20791,-9265,28360,7258,12101,-18947,-2066,-32351,-4301,10053,-32418,-32747,28453,-13977,-19531,-5557,-2988,21691,22556,-2413,21402,-18924,-26724,1659,-32126,-15095,12879,-7099,21078,-13302,29890,-28441,-9692,-510,-26069,-6966,-2358,-4897,5200,18146,5087,-23181,-28217,18322,-20029,-6664,-6420,23313,-15501,29652,11342,-7066,-17494,-22642,18831,29594,-24195,9706,-2355,-27664,-12522,-19594,10125,12395,-14195,-12827,-32302,16267,20639,-11014,28284,14731,-3596,-5771,30042,-3535,-20942,-3335,11823,-12003,29128,18760,-6974,-6609,-13501,18083,-4328,-505,-17068,-13311,-27513,-2482,-13814,-27214,-13394,-16338,18516,6314,-23121,-16498,16078,19751,-14870,-1037,-2399,3031,-10107,23461,7848,14658,13440,-5355,7909,-18333,14620,-7712,14988,-10305,-21156,31808,-31076,29540,26710,401,13868,-30077,-19078,-22089,-10619,9670,-19457,-10378,-17688,-8856,2365,-456,-32202,11879,27312,25250,-23509,-5743,2079,24274,21392,-17484,14271,-18665,28474,-27290,12938,7760,13043,-29310,28241,9849,12403,-13530,28658,5211,-20444,-24569,7381,-10962,-24153,17249,-25226,-5486,-15652,10010,-20841,-24373,12697,-22527,28887,-9093,-24811,13582,-12963,-11507,847,31671,20911,6897,-3116,12284,30978,2136,32247,31868,19860,-21752,23393,24824,-31285,12915,2082,-15609,-24002,-20832,-22461,-32054,-8829,19287,-13724,-925,-10338,-3236,-15087,26905,-26411,-7775,23346,1645,-3762,15064,-8425,19014,14918,7607,-22708,-2995,21972,15994,2013,-3087,9041,-5917,-31141,-24999,3345,15263,30164,-7310,17923,15726,-1541,-2255,28043,13747,15349,-20571,6220,-29593,-465,22524,-5113,-13206,16840,15369,30742,-11257,-23978,9653,-24319,25155,-4379,-11016,6746,334,-25400,20797,-12732,20009,24820,30055,5078,-13922,-12882,28422,23733,25333,-15414,-15204,13824,-11564,455,8141,-2772,13122,-11916,8436,31349,-8401,-6448,-587,2808,4030,-1174,26241,-773,115,-28371,-30851,15769,13047,-31126,-32230,25947,-27521,-7175,-16620,-17695,-20297,12996,-22140,-29529,-22512,-9092,-21870,-4478,-31380,27696,-20173,-3155,-27738,17661,32495,9458,-15098,-23748,29851,-29630,32653,5358,-32331,29697,-13589,-4308,-16584,13024,15010,-10851,-27681,11602,-30418,-21355,15035,-21211,23272,20694,25089,3665,18175,2191,18984,-27197,-29376,28409,21962,-11271,16400,-5195,2984,-7196,-24106,-26073,-931,6742,25769,22560,-29192,9566,-9217,12480,12817,12214,-31265,4643,-13891,5440,-29806,19353,19491,771,-15238,-25113,26014,13635,-23719,25676,-25807,-9945,27912,-30300,25557,-24758,-4785,-14299,-9641,-29986,25119,-9968,9864,-6198,-14478,28348,-21149,16320,-11794,-2838,-31164,4790,-7127,-28052,31030,1322,13516,13327,1996,20281,-1515,222,12654,-24148,-19680,-15554,-21591,30016,-18470,-966,-15302,9123,-16055,-20894,-22980,9628,-28721,3314,13253,-21144,-25908,28290,19424,-17220,2648,18190,-22864,-6213,-1100,16413,29598,-5292,19957,5675,-10832,3930,26286,29900,-31242,15,29662,19392,-32715,31221,23997,8346,27021,15225,16082,-27114,-25302,-2149,-9177,26660,4314,29347,-29918,26003,-2050,15410,26205,20191,5924,17604,2084,-3876,26540,28679,-20233,-21772,1189,29914,21210,-7475,-18556,25418,-29686,23853,21980,21063,6492,29650,25042,-4841,-136,27907,10552,-17859,22648,-16931,-7666,15477,29354,11154,11594,15795,-6059,-32049,-22422,-26106,-910,27736,9243,26554,-29479,-30918,-18637,-23979,30958,6706,25733,-30242,9321,-18733,12351,7000,-20533,1285,14502,19884,-9704,28936,26753,-25766,-3430,-28499,5363,11895,-10738,9706,-22516,-28406,24490,9443,25502,20982,22306,30557,-32147,-10153,-20651,-3662,19387,-2361,9028,-13122,24986,-27715,16350,-5410,-27340,25140,-19555,16092,30963,7577,-30361,10285,-11980,-28944,29579,-2656,1917,-10708,-22842,28863,20306,-26473,15108,11159,-21501,-10244,29918,21398,-27729,-18365,-21150,-31061,-28017,-14658,-8895,-23222,9757,16247,23517,-9887,9379,14064,-18738,11756,-25255,-11066,5799,28943,25213,-19035,-21372,9770,31708,10269,-28324,-22320,-20540,5005,-31135,32404,19776,-27465,-32405,-18553,10993,15884,-12091,-10135,-27004,-5175,-14688,1219,-31780,25919,6383,2912,16816,7632,-11865 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add6.tflite b/tensorflow/lite/micro/integration_tests/seanet/add/add6.tflite new file mode 100644 index 0000000..e310a77 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/add/add6.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add6_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add6_golden_int16.csv new file mode 100644 index 0000000..c88dca3 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add6_golden_int16.csv @@ -0,0 +1 @@ +-5977,-13799,-21191,10288,-9976,-375,-12199,3879,-16575,2139,-18830,14118,13147,3398,-14614,9916,21045,-32768,13343,14191,1753,11335,1247,-26165,-18575,7660,-23485,-7243,-8391,11732,-13259,27122,-28792,13404,4577,23552,-5858,29564,-20729,-13427,-7077,-9613,-7030,-8269,-837,16366,19207,-7978,744,13444,15133,5089,11211,-8545,7418,-31327,25231,23538,-26914,-12724,17350,7049,7537,-9846,18639,-12372,15940,-6755,-3348,363,4334,28511,27783,-16035,5429,-4938,-18881,-411,-13318,-10806,25386,11159,-22478,-25401,13463,19255,-32768,-14651,14666,10541,8286,-16208,4071,31341,7953,-31680,-19718,19322,-5610,20460,-10076,26288,359,-13546,20652,13568,-5355,-15595,28779,32767,-8739,22791,-13364,-841,13386,-13156,24135,25107,24899,16221,3541,-6054,13700,-1658,-2308,8934,7961,-18846,-164,9028,-2489,13097,-32768,-2263,-22090,10761,18949,-2824,-10626,6248,2597,25756,2599,-6001,-22189,-13524,-18921,-8566,5390,6304,-4019,-7555,-6972,-10431,-564,26163,-6703,13418,-7685,-13595,-10388,-25844,-14839,16715,23242,32767,1492,15660,22390,-9049,-20133,8753,-11245,-8015,14681,-280,-13184,2766,29640,-23561,7332,21251,-14258,-22513,-7997,28329,16191,7599,-15635,11766,-13162,-8813,-21370,2506,12997,-23494,27526,-24300,-28675,13959,-32768,4148,21504,3839,-10612,609,-6912,-30760,-12810,19417,-11387,3010,11363,26263,-2743,3787,12667,-8437,79,14740,-11897,-14827,-19202,-1670,10352,-25856,11575,-24801,-9700,-11765,3055,-196,-8561,-14060,-15144,4943,427,-27343,-29417,32767,-2320,22195,-23840,25625,-5960,29768,5923,-32768,32767,-3320,-4901,25484,4478,-13291,14377,15043,18891,22053,9973,24274,-32768,260,-10687,21443,-23526,13529,3935,11241,-16928,2522,-7433,-3350,5840,8184,-19567,32767,-13255,30171,3058,-12897,32767,2500,31332,-1405,-13728,22287,-23070,32107,-21794,-5961,26301,-18770,815,13637,-1813,7140,-665,8525,-22592,15690,2314,-13559,18294,-9914,15844,30896,27309,-4416,11587,-4820,9213,14298,-20470,9087,-20823,1895,9743,21736,-25737,29432,5621,-3551,-5871,-5687,-8102,15890,32767,-5942,20756,18910,-1805,22480,9523,11892,-15961,11179,9582,5574,-2822,-27084,25336,-5857,27486,-31256,-3934,-1875,19580,-9781,-8042,-9935,15275,6479,-8172,-29084,-2396,5742,-23824,-26017,-16883,-17841,32767,-6352,-3583,16848,-23908,-9214,-23964,-26779,28914,5721,-17592,14297,-1817,5931,-7458,-11763,-2706,-8854,-9939,-13181,6087,5978,2761,1000,-18205,-23266,5001,4125,13826,19873,-21375,5793,13390,-18804,-8254,2246,30527,26190,15940,-26597,4655,25991,-8378,24826,5476,11026,-18683,24337,17438,-15983,-6462,-16376,25362,-28814,25187,-6674,-17804,7758,8535,16756,23212,-5261,-7791,-1022,11287,-1496,-27547,-32768,-2685,-11072,-3678,-20855,-14090,15626,20967,-4759,9412,1051,-508,-19051,8424,2095,26334,-6617,-26,-11119,25211,2033,24970,-21651,-5892,13089,-15980,2893,9806,-13084,-12161,3860,8010,14531,30474,-16603,22099,-4827,12947,20754,560,4208,2003,25550,-2328,-5114,17632,-11328,-20768,-15080,-12195,-13973,-13164,20683,13638,25232,11692,-5170,32767,10494,-9801,-15330,13450,-5170,6106,-18223,21143,-19511,29229,-6104,6459,-30132,11216,-8357,-32768,-12760,-8820,11586,-32768,20040,13494,24081,15045,32767,-19617,-8197,30934,-12362,-26167,-29218,-25873,-9233,-14814,1235,-21886,-20427,8765,-29040,20977,7484,-24370,17548,4991,6651,24266,11497,-32768,15888,-5572,8657,-11559,29906,-18924,12820,15524,-22909,-10252,-29654,3431,-32768,25012,-2000,25981,4269,7857,32278,15361,-16607,32521,-30962,10449,15815,-25452,3500,-14641,996,-5772,20563,-28177,3706,1512,-13203,21380,-32768,31111,-9530,14044,12795,-32369,-9860,20384,12453,-3953,11663,233,12274,-13024,-14136,8847,21843,9955,-24129,-10515,-16516,7277,32767,-21463,15376,9008,25848,-7528,8743,15150,587,-10668,14265,-810,13459,16525,-19688,-4703,28390,-24654,4110,17888,-32768,-25240,3616,-14060,20747,2430,-8388,18638,-2733,-11016,15210,-479,7383,-30119,32767,6122,7974,-26389,-7279,-7835,4036,9169,-2657,-24457,-2735,3215,-12514,18205,5980 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add6_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add6_input0_int16.csv new file mode 100644 index 0000000..6af0e00 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add6_input0_int16.csv @@ -0,0 +1 @@ +12588,2134,-32137,27482,15352,-3829,-9787,21272,-2779,-3153,-16447,-13592,18352,-3923,558,-30981,1623,-21292,-2664,-4971,3886,23282,-10794,-22163,-18594,-396,-9971,24854,3072,-6450,4307,8646,-14553,27683,8870,17023,17726,13907,-23036,-30134,2821,-1609,-24337,14348,15669,-502,10292,8533,4862,27302,29709,27899,7123,-12453,15150,-29080,31886,24592,-19824,-19476,-7710,-13835,19852,-12988,28158,3407,11262,-22841,22396,-9766,-10835,29748,24851,-24727,27658,24589,3446,-29079,11504,16536,4474,-8004,-31105,-14570,24205,25028,-25624,7620,-16088,-786,-17156,-1072,-31351,29199,-26416,-22785,-29785,20115,-25469,-7502,-31329,13259,-2791,-4017,20608,1747,-13375,3326,31277,29160,-11785,1112,-25145,31427,29214,-30529,9799,17792,12828,11730,9763,-10561,-21492,-12265,24433,8617,-18495,-30891,29050,-12077,14514,26866,-25877,-17086,-30787,24042,31495,276,28245,11570,-12783,28667,1658,27078,-11567,-15788,6663,-6797,11913,-31768,-23450,-3715,-6171,10661,23168,31382,-26235,-11809,25045,-30169,-29746,-8596,-16976,27599,28967,26875,2268,23363,21396,10794,-28334,20754,-9453,4620,-6144,6401,-10332,11910,28296,-18463,-23891,30731,17345,-18302,27085,8877,-1031,-27859,18360,5845,-7357,-31610,634,6660,-21902,-11715,16255,-17466,-12458,-13115,-27247,-30861,-2462,30358,-12649,27888,-4074,-32149,-6305,18259,5819,6192,16611,9968,-1925,-13505,8725,-21430,77,14433,7384,12854,-3356,23947,6962,-23505,12940,-8394,-6085,-17848,21471,18530,-4317,-18982,-11301,-11381,5542,-32709,-14344,29871,22521,15865,-27313,19056,-11145,29273,8049,-30409,20420,-22700,-27316,19301,-32537,13626,32342,31551,26716,26168,-25544,30032,-28946,23296,-226,262,-13249,17415,-19760,-998,-632,14025,-9852,6712,7736,28169,-22216,26384,-30637,16197,29741,-3035,31778,29497,14214,-28946,-20366,26221,-11059,27927,-22403,-807,26067,10614,-2253,6659,11438,19935,19925,16702,-12646,10697,-10245,-8128,22797,-2978,-6974,16813,8471,-426,18551,-21628,-10972,4035,-12624,10716,-31541,-23123,32017,7525,-19176,17790,11060,-6623,-21653,-32371,4692,-11833,27428,7435,24847,3292,-2548,18346,-26720,-17027,13633,25652,-19497,25837,28155,-12670,7947,4323,7650,-15958,-9601,26045,3964,10012,-4989,-15381,4536,12013,8738,-28617,22003,-9640,-15207,-17224,15904,-14209,29538,-14869,24138,-9948,-28929,-3895,-15188,-30942,22864,9250,13219,-6521,-4738,-29532,11316,414,-19226,3083,-2778,-23710,5462,-12135,2092,-10516,-21530,-3548,-901,30178,25979,20562,-5739,1436,-17697,-2560,12358,9201,27772,8085,20116,-21517,26764,23962,-20652,15095,-7986,-19109,1918,905,16696,-28686,-1789,-6156,25469,-13357,31870,-12986,-26427,-8947,8607,9942,18753,-17786,-12847,5409,27580,-19487,-21715,-28780,-9095,-1474,17311,-30631,2877,-17162,-102,15127,-32154,-25341,-650,-25303,-10972,11247,11253,-9967,13845,13565,7240,20948,20039,-24463,-2537,2363,-27724,-9484,-24562,7601,4553,-6640,31075,-11984,19766,1187,11391,-3451,1406,-7669,12256,30707,-32326,29438,-28942,2308,14750,-18022,-14953,-17855,14282,631,-6035,-2407,9247,10124,-7861,780,32094,4688,-23291,-7966,9448,20084,19446,6668,14743,2570,17503,5946,18698,-25424,-1785,-4376,-31191,11905,13886,25148,-23811,-2285,20211,31979,22704,20379,-32385,-31286,28463,-18079,-31143,-13487,-4372,-4312,2503,-24713,-29744,-11338,-22646,-9956,18360,30863,-23174,23859,21488,-28174,29446,-14410,-26363,9078,22364,18582,24313,32536,6956,-18603,24751,-3717,1635,-31482,-14737,-31912,10415,-13693,27378,2625,1362,18499,6945,8384,25642,-31286,-9722,15148,-25878,-22674,-24669,-19486,-21694,30624,-8393,6690,31426,-8847,31422,-29385,23718,-12308,-11292,-19516,-29366,3099,14338,15762,3696,6807,-15176,5683,-23175,9022,-3650,10788,-21429,-24559,28257,13401,13148,31571,-11798,27709,-24532,17509,-29142,538,9063,22083,-8615,-16004,-25870,-10095,-11006,-18080,-4874,22154,-31979,-17669,8188,-31823,-30740,602,-16997,9321,-31053,-17431,-1562,9273,3528,-13438,-11797,-21202,-18992,25459,588,-30843,-21887,32083,-16400,-23317,10520,-28029,-23026,-10208,-7235,14799,19145,30741 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add6_input1_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add6_input1_int16.csv new file mode 100644 index 0000000..e62d3fb --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add6_input1_int16.csv @@ -0,0 +1 @@ +-15869,-19691,-7954,-3614,-22925,1911,-10062,-8228,-20290,4828,-14688,27325,5933,6987,-19783,32680,26962,-30202,19418,21998,-114,421,8450,-20845,-12998,10434,-24953,-25271,-13090,19659,-20340,30622,-29126,403,505,20604,-18944,30559,-13067,1109,-11185,-11770,5967,-20024,-10973,22077,19062,-15977,-2070,696,1427,-10789,10425,-3526,330,-23355,13484,15823,-23312,-4663,27921,18079,-2471,-4919,7065,-18594,14108,5391,-18545,6628,12581,19191,21304,-5762,-10185,-22039,-27274,17752,-24948,-24774,30940,19875,-10315,-24606,2670,9854,-32128,-24276,29624,14510,21813,-20877,25141,23300,27198,-27786,-7476,13034,8567,31926,6316,26611,2234,-15484,14492,16942,1296,-22829,18585,26930,-4204,29605,-1947,-20894,-584,1717,25925,22188,25035,14187,-1435,-1404,31741,5513,-18444,6457,22224,-5620,-18498,19604,-12443,509,-32121,7742,-10000,-820,5377,-3929,-31903,1027,11497,16208,2412,-25019,-22225,-8048,-29351,-7113,-330,28373,9412,-7708,-5387,-20578,-15329,15040,7596,25273,-25978,907,4905,-28955,-9048,4858,12676,32404,557,6121,16307,-18825,-8941,-1421,-9004,-13565,23387,-4400,-11029,-3817,21606,-19710,24783,8919,-29873,-18418,-27677,32082,22177,27635,-32343,11967,-12871,8173,-28814,-859,31064,-23868,26372,-21320,-30289,26814,-32664,24935,30143,-13999,-6151,-16739,-6627,-20670,-13065,14329,-18803,106,4657,28648,-2436,13533,11352,2266,56,10517,-20466,-27803,-23420,-17290,9384,-19589,7248,-27695,-9069,-4412,-9449,-11921,-8667,-6751,-13025,13734,-2919,-15774,-30089,27188,-17257,19529,-14512,22081,-912,21161,2811,-26974,31819,9870,10672,21740,26429,-26247,-1235,149,8308,12857,29334,13378,-29301,-14314,-14068,28347,-22945,7030,17667,15575,-22111,-5472,-3684,-8678,2897,-6843,-12038,29437,1654,29925,-14649,-15239,24750,-15237,32717,16346,-5438,13134,-23716,25118,-14881,-7418,18569,-31637,2502,13943,-9608,-3050,-13422,826,-22082,14131,9523,-12914,9980,-11308,25456,30501,30982,-5604,3734,7201,19154,16472,-19275,5339,-7840,17070,-7192,24167,-22155,27940,514,-554,5819,12808,-13726,28575,26897,-12580,11963,23072,-797,18347,29476,26527,-29802,-1278,25009,-8847,-21469,-28040,28688,-10508,31733,-31518,810,-18882,23540,-19306,-7554,-3531,17457,1055,-16365,-20665,-17032,13701,-22109,-23755,-32456,-14782,25289,911,-19953,28662,-13586,-9800,-22307,-16136,24059,1787,-31710,23114,566,26469,-17037,-15901,8500,-13713,-11467,-2607,4657,15585,2355,7947,-10658,-28704,7216,-13505,2037,13486,-24810,6799,28940,-23392,-18751,-2804,23115,29736,8537,-21826,-10652,19481,1855,23511,12306,26685,-26049,31791,12680,-3201,-7467,-17901,17697,-29908,13436,-702,-7044,15946,5933,16024,19064,4196,-2275,-4763,-2347,10273,-22964,-27379,2153,-13794,-15783,-8455,-20546,31577,27943,-15847,32748,17344,-267,-9409,18105,-4291,27935,-2527,-8746,-23320,28967,-10478,20592,-13395,-6238,15917,-3802,9814,28495,-22181,-19035,9311,-8904,26863,28082,-22823,22217,-4247,16331,32422,-6968,-13727,23005,15449,15117,-8252,14163,-3722,-18205,-8816,-25203,-18976,-13706,29016,12315,27179,20493,-7365,29843,11003,1624,-15371,11939,-19512,-4118,-28426,18836,-27560,27851,-11858,-3177,-24067,16037,-8358,-23982,-24458,-20466,-419,-32705,28084,5224,11896,5718,31581,-5705,8788,23221,-5061,-15196,-30364,-31651,-9563,-21273,17193,-10385,-20027,25905,-32348,16339,-9470,-17822,8320,-6885,26572,13736,24355,-32593,15413,-21482,-182,-30669,19292,-29540,28753,5067,-28123,-14661,-19619,13836,-30545,26704,5957,17318,4025,9590,31278,16055,-27357,27106,-21482,20012,11497,-17559,18921,-3945,13586,5976,8072,-32185,718,-17765,-11988,8656,-26697,26442,-4927,25780,29294,-24561,-15061,18081,6640,-7582,11224,9860,12744,-2734,-24474,14060,22256,26721,-16630,-31763,-30393,1403,27934,-21114,3009,27415,23352,8328,11287,14441,-13116,-8764,29038,15202,24249,28899,-14802,-3186,23809,-12658,16583,18633,-24705,-14218,4429,-7999,21721,22771,-185,25765,-9469,-16867,28680,6786,23159,-28098,31641,7770,30011,-21316,-29867,-98,20039,5572,14105,-18030,2787,8828,-25952,12160,-11392 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add7.tflite b/tensorflow/lite/micro/integration_tests/seanet/add/add7.tflite new file mode 100644 index 0000000..0808042 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/add/add7.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add7_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add7_golden_int16.csv new file mode 100644 index 0000000..cb840da --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add7_golden_int16.csv @@ -0,0 +1 @@ +-27912,5613,-11794,-19886,-11823,26115,-1571,-29137,31713,645,5920,22559,32767,27847,-12069,-19683,-7415,11293,-15035,-30625,22642,26874,16668,6407,27415,-20936,19385,18640,-32768,-421,-13946,-28434,-10913,3438,32274,-12429,-22330,-18013,15386,12697,-1904,-21148,10312,-5408,-6549,-21966,-8874,32767,32767,16082,-23711,-12976,-27996,-3817,32215,-32768,14937,-5778,-7047,-1948,-4154,15733,-23977,6258,-10325,-3183,32767,-27753,12888,-14842,12205,20911,-26781,-5695,-18009,-29200,-4632,-32768,31507,-32768,18088,32767,26966,-6090,-12219,23240,32767,-30116,27786,-20766,20673,32767,-32768,-2399,-4046,-11720,-10544,27041,13845,-8189,21462,-17589,3583,16345,-14973,2905,2969,-25701,20680,32767,4086,-22426,4447,-659,-23578,4008,32767,18727,-12819,5119,-14426,13048,16725,-13890,-8394,-10583,15586,32010,8780,-24041,-12341,20879,17326,8742,-3713,-15432,32553,-8413,-32768,13827,2788,788,17928,14721,-32760,-1782,8048,8729,11504,23898,-13724,4657,-14283,32767,-24468,-1395,32767,3952,23838,-27422,16213,4092,3828,-184,11569,-4192,29087,2182,-5397,21605,14774,-6907,5885,25956,-5479,11527,98,-15091,20690,1923,3952,12933,-32768,-23865,-32768,-995,-19243,7117,-18924,-27717,5120,-27825,19323,-19186,-6141,-6878,-13116,-8201,-9705,-15188,-7005,-16851,32106,-18887,-6818,-4725,-16968,24960,-4142,598,-11669,-29604,19356,-11026,-23253,22883,11992,32767,-31337,-13111,32767,24881,-16166,-29086,28422,-32768,-30262,9563,24952,-4711,-16701,22871,29192,-8068,9008,8358,18854,10218,-23587,-29388,26309,27110,-11232,-13994,-15436,3815,-27616,-8306,5349,-6331,-32768,-32768,15518,-32768,-24142,32767,-32768,-25324,23490,-32768,-9404,-4094,20781,-32768,25104,-4592,-13730,-717,7486,13415,27746,-31896,32767,24849,-19199,8607,-14639,10879,-6870,-25588,11850,32767,-8040,27985,7200,15551,21144,27100,-29589,-9948,20587,12161,46,3206,-26289,-13035,-8717,11371,-11626,-29077,-20590,11746,30372,-23046,-22436,18704,15340,-32768,29124,-4045,8023,-13886,32767,-13294,1513,-2854,-32768,-30346,-28028,-6766,-6582,12783,-15439,26609,-10211,5670,24551,-9853,-29652,-13743,-15955,-18962,-25663,4503,-32768,32767,10489,7350,7391,-26349,-14336,1616,-434,3975,-32768,-24543,-4650,9250,-32768,15790,9455,25313,4738,-7272,4120,-5112,-17078,5092,-31914,-2564,24325,27240,-2260,32767,11055,20123,21546,-4497,11255,650,24358,-8629,-14646,-6555,-32768,-2976,-1484,11199,-10725,-32202,28221,3081,-14570,-4458,21546,-9947,-21953,13343,-9908,29480,773,20468,-1586,-17955,-9234,5834,-18772,21606,-32768,-7497,18739,-14780,31527,17615,17214,19360,21481,32767,19424,32767,-12708,30070,-28828,-9095,8234,-15046,23298,5854,6244,-9611,-1202,-26829,-23226,27766,28040,-9315,-7762,22473,-12434,-15932,7467,-25139,21161,-32768,7002,-32768,-11616,32767,11253,-12728,28124,-26964,6215,-24553,6136,32767,15524,-7406,21391,4259,4395,-17417,-32768,-2204,-16477,-26114,-32768,18857,-12654,6571,22756,-24654,-17490,13709,988,-16945,17401,-12007,32767,12121,310,-2581,-26101,-29976,12753,14196,26291,9646,-5063,8878,32767,16064,-17246,-21751,-5760,28380,-9655,-18531,17514,31904,-7029,-13215,-18515,32767,21233,28880,-31842,-7402,-16277,-7677,6232,-18126,-27330,83,-17565,22145,24050,-26432,21813,1075,-10393,-29579,2334,-386,4332,-3921,-11603,-27876,-27512,4636,-27821,10103,12883,-4800,21536,29933,-7040,-28004,-3418,20559,-11792,-15735,813,21766,16907,-9047,-11402,3177,24867,25821,5511,3769,23399,23751,5716,1798,24729,16485,31609,-2542,24610,19754,-7334,3463,-13984,16164,20885,7113,-14527,16845,-7553,10854,32767,7731,1486,3676,9642,-27651,8557,12677,-4944,24252,15119,16802,18959,-29033,25653,26060,22936,-2725,14480,9903,30318,3597,-15930,-16563,-23654,32767,-15626,32767,-18963,-6916,12161,32347,-7563,-10300,-14264,9244,-2831,-17436,7893,-32768,28166,16100,-11301,16419,-4736,-3805,32767,-31596,-22932,5097,-425,-12426,-929,-24835,24336,-14194,32767,-32768,10434,-18695,-20978,-32768,25682,-12620,8321,-20578,-22766,-24066,14765,32767,-20881,-17467,1550,28603,-17505,17240,-17349,-32768,-21487,-9073,30471,29023,-6220,-19611,-2193,-25955,1624,-32768,-19798,22227,-647,32767,16757,19800,29564,15770,24891,-19166,-16239,-8005,-32015,28261,-1892,-19166,-12152,-32768,-21225,20459,-23162,-2552,-22337,32086,6009,-15048,12609,-32768,-24473,19220,-986,-32768,-32768,26516,25563,-32768,-3075,-13520,-8353,-30353,-21811,-28857,32767,21654,-25957,-26261,11800,10239,-2372,-4422,-16625,6731,18913,-32768,2870,30246,12071,28215,-4630,-2033,-6719,25261,8130,15150,-15792,-29885,29364,29792,-9998,-23526,14722,23491,25336,-8381,4738,-21544,-20804,11983,-27449,-10907,-19916,24561,-16984,-3280,6922,-22038,32767,-2519,22083,-12729,-32768,-1755,-17716,27909,9510,8104,-9728,12730,21257,16707,25247,-26628,32767,19715,25226,15265,1682,-20793,-6182,5860,7858,-32768,-8784,-1486,20093,-18654,-15798,10450,-7049,25125,8885,-6973,-10290,-5611,17445,22003,-32768,13445,-18995,-14133,5844,4224,-5494,-32467,-19177,12238,11779,15489,397,-20939,21518,-30827,2715,2039,-24532,16868,32767,-32768,-18226,21427,-14775,17777,9440,17357,-26964,1969,-32768,-6527,21452,-7330,-32768,4470,-12117,-24083,-30839,-32768,8009,16511,5870,-14456,-4647,25881,6988,2573,-20285,6779,-32768,32767,10595,6821,-12489,-3242,-12294,15225,-11412,13611,-906,21840,-1807,7424,-25497,4833,-9832,-9584,-17014,30499,13344,-10805,26061,-6323,-9041,-21470,-21208,26458,-21244,-23083,-7195,-32768,-17850,32143,21091,-9423,-7984,21190,32767,7717,-21601,14513,22412,-397,32767,4346,-6825,-2384,-16883,-24421,-25187,-25404,32767,25396,-25660,23994,-23211,-28067,-5018,-30139,-17739,15772,32767,-28294,-13019,-30947,-5140,23821,-10807,12591,-30512,10326,-3482,-23688,-28135,-7488,-32768,-250,22444,20985,-14372,-2706,-7544,32767,-32768,-16307,-11327,32767,-29034,12629,32767,25138,-5934,-15192,18007,-8813,-12707,-15603,-16743,-6742,15706,3654,27075,4553,-24180,-340,24959,-29575,9612,-20147,20526,31234,15740,-12870,-13461,-27508,20521,-17750,-13124,-20070,26821,15756,8381,10791,18731,19238,-28157,-32188,26971,12889,681,9779,-24830,-5837,10671,18662,-32768,-864,11103,-23489,-785,11352,-15564,32767,-22785,11579,-6211,-27550,-20223,-6730,3110,13284,-13612,13658,-8862,28262,10066,29490,3084,-18345,21290,5636,15625,17268,-11513,552,32767,3660,13818,-17500,32767,32767,-7933,26349,5080,11752,-22508,-11301,20554,-22783,-22010,-32768,26101,3416,-16944,-13761,15511,-12109,-27147,-20452,-20672,-714,-4909,-3094,-3403,-17621,-32768,27023,-22781,-8108,-32768,25353,-31375,-1884,11204,-15260,18815,-25155,29114,-32768,21611,-29661,-12959,-18011,5262,-25702,-21483,-11882,633,27124,20556,1170,-3306,-11184,-24675,26145,11012,18893,-1614,20255,-7320,2877,-7072,19114,-28412,-15730,4933,32767,28510,56,-13656,1606,31079,6504,7996,-32768,-190,7535,18992,-23198,32767,-25243,32767,-7018,-32768,-370,1244,-12145,-14886,18368,-2139,20521,-2986,-11710,-8620,-32768,-14529,-24237,-21954,-30886,21362,19586,-32768,-32768,-28289,3721,9727,-7591,25619,-13879,-4903,-15440,-25414,11703,-13896,-24005,-25814,-1077,14391,25781,24861,6373,32767,14153,19689,31475,-22065,-28696,17358,25644,12220,-10752,9185,26863,20432,-24158,-18933,32767,2681,30685,16729,7982,-14433,-1184,-28592,20091,-24530,23607,16531,-19356,-4893,13432,10953,-3574,1818,8304,-32768,-18990,-4581,-23866,-6277,-24759,30518,-13540,15092,8875,-32768,-9704,-7366,-27896,-20171,-3990,17408,-24928,16854,4662,-1269,1626,14379,-24030,-28759,25714,-31435,-26972,-27827,-8539,32767,-17764,20179,-32610,5195,-13965,-32768,7563,-24601,9563,13560,32767,32767,11094,20568,4191,-350,-29224,11948,-23080,5894,-32768,22695,3807,-16199,935,-23200,-27622,20012,-12574,-22157,-224,-24175,19612,26360,-6809,10113,1565,-22188,-26139,-31085,13986,1976,-3580,-786,6229,-10596,-16484,22980,32767,-3478,-18901,-28638,-30051,19926,32767,-7762,4084,-2523,10488,27259,-5127,-24808,-30638,-23650,14805,13343,-5582,22710,6752,32767,-22215,32767,-27197,-32768,-32768,-24108,-32768,-11693,32767,-125,11953,-10313,-20288,5059,-890,1284,-1495,-2771,-24634,-8351,-11600,32767,13706,-6960,13855,23800,10487,-22965,-32768,17837,-19582,14962,-15035,-9006,13201,7264,2689,-3252,-1173,-4190,-22015,7720,-2759,31691,-17637,21660,19510,-32768,6528,14673,14116,-10622,-29452,24029,31958,8482,15859,-32768,-754,-10785,18075,-23700,-122,32767,-15680,17613,32767,-2106,25217,30430,-5506,9270,1465,21378,32767,26273,7478,1770,32767,-24304,-22092,-8489,32767,-29708,27783,-20979,-6572,-11722,-32768,25314,28955,3118,10882,-32768,-29426,2460,-4534,1389,14585,-21884,29829,23600,-24371,-8015,-18556,18675,-10682,15734,5861,-32768,13449,10075,32767,-1673,7845,-19351,1157,29443,1124,-21416,1807,9275,15127,-21982,8919,29412,16806,31104,-5210,25347,28160,6414,-32768,-24860,21740,5946,-4495,32767,17364,-32768,7864,-32768,-29271,-19984,-16440,-32768,-21252,-30720,8972,24482,24541,-32768,26473,20959,5240,18892,4705,11200,-8927,6857,32767,-15238,11723,-2760,32767,-8509,-23130,-10473,963,9903,15090,8210,10708,16647,4962,-26757,18728,-31942,5862,-15872,5258,32308,12662,-9532,3436,-12728,19177,-21158,32767,-5111,19101,6761,-21775,32767,31932,32767,6441,32500,-20107,19357,29114,10716,-6294,-12553,25834,-16308,-28399,32767,32767,-31766,28080,-16651,12840,4250,-32768,23362,25810,-2494,-32768,21447,24177,-17097,-27083,32767,-26640,6421,-4813,18524,-15010,-3165,-17460,-25468,-2645,-2851,6715,18268,30719,-32768,15601,-5813,-15963,24483,-12086,-32768,-6195,7957,-13620,5900,31863,25176,7935,12288,21798,32767,-18177,22365,13846,8989,-9378,21991,-24158,3527,30772,-2118,-18524,5967,-4288,11615,28405,26138,-4950,-964,1989,-14410,16936,-13287,13716,13156,-21196,-769,-8768,31033,8816,-23860,-32768,8886,-9112,-12334,11082,22157,-13438,-27190,-16448,11765,18897,19189,-17371,17745,12446,10634,9119,12559,-19454,13384,2998,3724,-295,21198,-5355,-2340,-8784,32767,-32768,-25611,-12129,20570,1915,3417,30074,-12302,-13156,20955,-32768,4396,-32768,-2555,-32768,-9028,13467,-18196,-32768,-9688,-27037,3728,12035,-5065,15159,-31915,-32768,-6890,16011,26090,-30740,2541,382,-15100,-22857,-5805,-27131,27953,11486,7446,30680,22796,9004,-30618,1031,-19920,-21382,7799,-1214,2002,32767,-32768,-32768,29829,24768,-22528,32767,-14392,25680,20647,1015,32767,4900,2825,-18703,-9374,10652,16034,1488,-26247,31330,21808,-29177,-2874,6807,-27439,32767,-9498,27655,23573,21227,-19314,-11069,30025,-32085,32767,-22401,32767,27653,10685,-16182,-21864,-15900,-12014,21104,-32768,17553,11098,22855,-8422,-10511,-9399,4861,-1452,480,-11588,28894,16388,-17363,-28769,7269,24798,-17681,-1650,27412,25679,21334,18536,8304,11569,-9013,-5716,-27492,32767,-21512,27949,-20543,-32768,13207,-23458,32625,32767,-7637,25576,12551,32767,2404,20715,-30559,27056,-24286,-23742,-12681,5927,-29843,3251,22491,-32729,26557,19888,-13652,-9595,10023,-7159,-31433,2176,16906,28184,-32768,-16082,22467,2371,26174,-3153,18827,24993,-15259,-16084,22886,19995,8568,17497,14997,32767,-20649,197,-7592,-12623,-19023,-28243,-15603,24494,8492,-5855,-23602,-32768,16994,10339,-17173,19822,-24271,-32768,-32768,-24235,8618,-15136,2842,-18342,-17730,-14105,-32768,3082,-32592,29198,19643,-20558,18204,-13679,32767,-662,27238,21434,-13561,-28153,-395,10737,-12467,16356,-32768,-23846,30885,26326,-18931,32767,32767,2331,-4922,-4021,-25362,16392,9555,-23095,17988,-638,-19523,-14914,818,-1149,24625,6671,-13323,-30902,31660,14352,-7760,30877,23470,11173,-32768,32767,-26925,26307,-15944,25181,-29833,4826,965,-6122,28682,3779,2906,773,-16411,-11845,-8697,-3403,18548,17069,-17932,-15130,22913,-25192,-28207,-16775,9074,2332,-9459,13723,26710,24573,-6860,-12898,24755,15643,2701,8097,-17761,-23781,-18711,-20533,23047,-10966,-32021,-9807,-32768,-7419,-935,-6959,-9473,19327,7118,25631,18995,-28199,-17627,-23976,-27126,-6304,-12079,32767,-10551,-7461,29934,7694,4014,-4809,14127,-7989,16321,7878,-32768,13454,24044,-22352,-2012,22771,-19425,7785,6981,-32768,-7245,25829,31535,-26288,13222,4249,9664,-20983,14193,-32364,25240,6217,-18750,21405,-32768,-18786,9901,6656,9220,-18946,-32768,-4349,-21855,-22547,5215,20908,-18280,18991,-12915,22247,-25383,16287,15906,-32768,-22910,-29992,-4396,-4591,-26129,-2268,-13797,-653,-18536,-16164,-32768,-8719,7949,-10601,-32050,4220,32726,7168,-10102,1736,20696,-31500,-7026,32767,-20638,-23091,-2949,32767,11172,-918,18340,-21817,23359,27250,237,29217,32767,28756,-32768,18563,-5467,-31589,32767,3265,14153,-15725,26574,31027,22976,-14729,13442,-5795,-32768,-18654,-11768,8995,16993,-24054,-19871,4274,27325,7591,13029,7055,-11106,3758,-5443,13019,-25444,-2947,-23587,17318,11720,181,1523,32767,16402,-13376,7076,-1578,20504,-17467,31314,7949,-1410,32767,32767,-20109,-3445,-32768,17101,20367,-20036,18159,-31379,7603,22229,5614,22986,15047,-20221,26932,27101,7116,27317,-5528,-18196,25977,32767,-7298,-32768,-15812,-15052,-16421,-17422,-32768,-23803,-24090,17425,14488,-32768,-32567,10168,-32768,21179,32767,-14612,-12597,-21736,-30801,-29430,-8247,22734,25441,-17219,7498,-23671,22739,32767,-11163,15179,-8839,-5947,7509,9710,-28659,-32768,-415,-25130,19495,32767,-3909,-15473,-23189,-9365,14202,11767,6488,-29380,12675,-32768,-26683,13683,1564,25167,-5875,1168,5472,4491,-27485,-9065,-32183,-5112,-27903,-5993,-22926,-8706,27629,-8713,12696,20135,-16726,25744,-21192,-6941,18259,-25548,-4266,-14707,-32768,-17363,19179,-4375,-20870,5393,-14728,-29997,9346,-25581,15040,-29167,-16228,12608,-12,26106,-21095,-14498,-23275,-18940,23923,-15940,-4894,-29207,25836,-28042,13121,31355,-9356,12420,21306,-16792,-32768,-14095,32767,-9027,394,-20555,9225,29918,-17896,-26142,-22239,-22848,17147,2307,11657,12338,9891,-13791,24174,7629,27134,5221,21842,32767,17445,-16257,2978,4223,-12909,32767,32767,18391,-32768,4329,18499,-18349,-21677,25527,22029,-13127,12950,-31116,-8201,8170,-6865,-6008,-8879,-13408,-360,-11871,8667,-11344,-2380,19293,-8757,-17089,1692,9576,-22378,12988,-32019,31586,-29059,8535,-30739,-9699,31138,7300,-32058,23541,-17544,-2467,6393,13956,-11843,2046,12370,28523,5112,-32768,-2954,-8624,-1535,17518,22868,9259,-259,-30352,25976,30148,-32768,928,-32768,18157,-20339,-2236,-9942,4514,-6553,23551,947,-32768,-22181,30354,30982,32767,12049,-10362,-4045,31517,7064,28698,-2532,-32768,12834,32767,28200,-6167,2806,-557,7313,-22799,-12308,-949,26115,-3972,-29101,-26874,-24125,-16895,22488,9302,10871,-29221,28372,-18911,-30047,32767,-17526,19061,-6693,21710,-7244,11960,23006,-1074,14048,-8177,-10876,22125,-13663,26307,-2867,-13929,-21948,21296,-18212,4453,-20190,-9015,6979,-29436,-32768,-13032,14863,32650,8615,32767,-6655,921,22698,11657,10587,12132,-1230,9804,489,-14040,-22578,-32246,5655,-19731,23234,-32768,18965,-31428,30148,-5806,-52,12677,27374,4207,-23904,32767,32008,-11405,-5067,-20741,-17745,-9955,7899,-456,1122,10792,-32768,6120,19226,-19949,-12882,-25933,27172,14249,-3805,-5116,-10766,-19469,-20769,-12766,13860,7255,22846,28377,25991,-12107,16714,-32768,-19369,7230,31403,-18846,-21716,-21667,-32768,28939,-14671,-13686,29485,-9656,-6571,-32768,10449,9485,22728,9707,-9848,26428,32767,25287,10410,-18052,18371,-25750,-32768,-26671,-32768,-25006,-6984,12199,4221,9504,-18828,-8287,32767,-10721,19911,32767,29931,-25311,31898,-14055,5333,736,30270,928,9663,6451,25119,-1870,27774,4424,17423,-20716,-12777,7660,-8877,18336,-12810,-17794,29061,-32768,-15195,14864,14718,10422,18172,15192,8231,31073,20997,19610,22303,31800,-32768,1355,5556,-9971,-28444,32767,31872,-32768,13248,19474,1400,4507,-2251,19287,-13852,10447,31974,-13989,-6760,-18426,32566,32767,30996,32767,-11812,-7378,32767,-30625,26814,-32768,-21653,-21401,1148,15102,-32768,-11404,-29685,-15523,-118,-2303,21456,15953,31469,32767,-9456,-21222,11442,-27337,-12600,16991,32767,-4768,-32768,1412,20492,-29859,25984,-19438,19990,27892,4606,2480,-8607,-29619,2185,-8896,-5542,-32768,-16150,20362,-16277,-3432,19987,-2968,8762,18588,20695,-12960,-32768,32767,3664,13508,-8986,17090,-25563,-17481,-3810,377,9880,16301,-22412,5568,24356,-15130,3796,31601,5388,-19866,8532,6632,-28715,32102,-17117,-24102,-127,-13842,19166,4856,21225,-5217,6385,-14435,-1739,-14822,16996,-4821,-11701,-11885,-26364,-32768,13314,32767,24488,-11773,-15409,23750,-14080,32767,-7703,-16324,-897,-32768,-2504,22249,22941,-15344,32767,-28330,1669,-31843,-22594,-10126,-625,-10291,24048,13105,20618,-29690,-18346,-32768,32767,32767,-21336,12272,-30331,8468,-32768,-5613,6930,19714,21797,29420,30658,-1834,12905,12009,10521,-7229,2925,423,7072,-32768,9643,-2913,17174,-11482,1644,23307,-13042,-13023,-17359,2604,-16833,-4825,-22826,29802,32767,-19837,28016,1461,-3277,18855,-20815,32767,22911,-24055,21057,32767,-19591,-32768,-4718,26060,27099,32767,32767,1890,-22483,3871,13804,-6157,11456,9575,-31638,17989,-15283,15766,7563,6182,15671,23522,-5491,5570,-10660,-20983,18252,-30004,-22489,-25591,-24871,1336,-25343,735,-31520,-29433,-17192,-32768,2630,-10152,-1147,-8941,-8651,6284,28796,32767,-12298,-24525,-22301,25939,-20863,-20718,-25072,-20713,28052,-22470,-32768,11285,32767,29531,1061,2809,-9617,5922,-7156,-14198,17560,25050,-24737,32767,-3916,-1083,25823,6918,16493,-28372,10373,-23325,-15649,-32768,12596,7302,-22516,-11137,-23206,-29593,1772,29241,23891,-260,5331,2705,-17617,-11202,-26031,-14284,-15822,11626,-31220,-32768,-7342,-11366,-10705,-13346,-18462,16704,22834,16671,-9273,14208,20804,26432,15183,-607,22437,29872,32767,14916,-7281,32767,-29358,-11671,5271,-13754,-21072,-32768,-31694,-32768,23845,-12677,-3254,3884,4984,-20413,32767,-8272,7876,32767,21367,16272,-13550,18448,-10319,-23779,-7651,-2980,-30769,-27502,-23709,-3168,11282,27911,-14152,11073,-25463,31602,-9489,-21243,-19737,32767,24977,-15690,-12092,1695,9854,-29489,3997,-11702,18516,-2255,-82,18833,-22075,-1642,-2516,26244,-32768,-32768,-22322,-19619,-26623,15865,-20685,-5319,-10036,-30900,28917,-14519,11942,-15490,13360,4591,9096,-21691,-25263,24217,-3005,-30586,5399,-338,18603,18436,-11630,-14567,6944,28477,15175,-4719,11797,-15859,-32768,-21801,8441,1321,-6210,-32768,22707,11618,-5547,-15828,9391,-9914,-16650,-23329,24325,-22413,32767,15865,-14306,-30647,9922,21023,27598,23951,-31813,9140,-1646,23435,-655,-15314,-28530,2545,18612,4146,-24526,16512,4112,-15351,32767,-31848,-23833,32767,15618,-6213,-4807,-11445,16965,15840,-25565,26837,9996,-19040,11819,32767,-32768,-13492,32767,-17926,-7401,-13002,-16480,-5981,-15653,3683,-31618,28347,-4426,17169,-16812,-25795,14566,-32768,-4036,30704,-831,-32768,-32768,14292,14413,6685,-24038,-27415,-6807,20829,19056,15148,15644,24057,21427,-2405,26436,25184,-236,11471,-12189,-17404,-31017,14726,18262,20541,-32768,-17419,11313,-19242,23367,13487,-9273,-25172,-32768,10027,7875,-1427,22557,-20359,586,-4808,29153,-11542,32767,25859,23930,32767,-11469,524,-19177,1300,32767,-1993,14135,-17159,-6976,-29535,-22452,-10498,-10537,14783,11713,32767,-22402,16490,11694,-3459,-23752,-14845,-10851,13566,5032,-19170,12100,-32768,30098,13268,-32768,-16819,28331,-3383,-2454,-15841,-32768,-15670,-19712,14927,32767,-19838,14330,-25563,12044,-27662,19188,32767,-10629,11391,28470,25159,8008,-6361,-16843,-12434,-6107,-18558,28180,11567,26782,-1234,-17369,-8874,8909,3171,17990,28753,-7263,-12317,-24812,-30371,13368,-28022,-2642,-12501,17339,-28980,15545,-26146,1019,-23552,13579,31967,27889,-13781,-7229,-23704,-32768,2569,-18977,28578,24886,22417,16419,26070,32767,-6925,30780,23098,-7555,8035,-14227,16236,-28089,19939,-24958,30453,-31442,5479,-32768,-21478,-588,-30040,-22043,-11759,-9048,4608,9820,9817,-16324,-20100,-13909,-22010,31705,-9266,8955,-21528,5922,-32768,-24239,-32768,-5869,-21376,-20445,-1297,-31955,-7571,-13199,13718,-15421,-20580,-32768,24046,32767,21421,-18141,18973,12787,-983,-32768,-23396,-18265,11439,-27967,-24904,-12928,-18868,-3054,-9749,-23142,-24272,-32768,3707,-27329,-10987,-32768,32767,1039,-32768,-8827,-19160,-24364,-31081,-13756,-4414,13167,13422,30605,1652,24888,8898,-23945,22959,29043,30646,-16285,-32768,-13530,-3212,20854,-4872,-4061,12575,21319,15873,16967,31930,12230,13551,-18692,22303,10294,-29893,-9143,-31417,-7753,-32768,-9757,-2727,3615,-17541,-9886,-26987,32767,32767,1320,7509,5010,-25191,4188,25201,-7071,5073,32767,29761,-29121,32106,18127,10687,11339,16680,23576,12026,-17397,409,-6685,-28416,22315,25360,32767,2816,-32768,1518,-19976,32767,-8334,22521,16297,-4541,-9344,9691,394,744,32767,31229,3247,-20400,25263,-24867,8848,5513,-20955,-23429,-29319,8701,16799,4306,-27643,32767,-18884,-20650,7105,22091,22053,20662,12778,7387,5311,-22885,-17307,-24725,6264,-30305,25207,32126,-14032,-23437,-2681,20670,4446,21867,-31512,-22064,-18336,23972,-23277,32767,-15117,-19584,-7990,178,-32768,-15081,15842,25910,-1246,-16585,10524,25032,6733,-32768,-32673,730,10203,17618,-7710,-7336,6693,-20432,26758,-8732,588,2465,-16834,17937,32767,30350,-17306,-20763,-7000,8179,15074,14671,-28065,24975,-5212,-15035,-10225,11526,32767,-10142,-11160,20141,-12961,-4601,10474,-29868,-16574,25735,25190,29400,21397,-2679,13752,-28973,-18128,27924,26704,19861,12972,-7902,32767,23520,-11313,32767,-6780,8432,-19410,4787,20749,19777,-9662,16987,32767,-19639,21473,21901,25506,-993,-10238,31727,-32768,-13222,-22192,-17797,32767,-8658,-2346,9781,-23765,-25116,-4868,12632,-21236,25514,-10929,-14806,-23576,23923,-22208,31431,20428,17099,11709,28966,28680,18156,1931,-17940,10016,-21649,5620,-3043,-8569,22210,1053,-6114,27040,-32768,20069,32767,-20065,-32768,-29101,32767,23994,-3551,-2498,-30422,-13328,17435,11773,-16263,23812,-14799,-27413,-5899,14107,7063,-14481,-4861,-29297,25816,13331,-4992,31610,17919,-16174,-24423,-19552,-18362,16702,-792,-6952,-15186,-13410,9407,-32768,13567,11297,9383,-14634,479,-6924,-31086,32767,-1557,3695,20938,-15855,-653,29772,14879,7277,12173,-13213,-26881,-861,29413,11539,31084,19557,-29312,-32768,-21560,-3898,9575,6186,-1743,22293,5215,-3934,-11153,26382,-21165,12045,19236,32767,11098,-10263,32767,8270,17780,16812,-1283,-29271,-4221,-30610,30527,-15183,1487,-16180,7394,21177,30499,-30181,23052,-3961,-1628,-563,26595,-1887,-264,-6695,4000,-25406,-7343,-21454,7087,29166,997,-26513,25889,-2802,-23040,32767,26793,-7074,-1802,12134,-19230,32767,-16068,1412,26783,8420,-11292,7549,-12714,-20669,-15919,15207,-32768,32767,-5379,-12088,14489,2068,4600,2219,30999,5801,9805,-23290,1045,-31906,1547,16821,-16576,21915,-22473,-21639,7026,-26354,-12581,28739,-32768,9246,-13256,22490,-11814,-5397,15807,22990,-32768,15424,-8477,-15444,6733,3239,2766,393,-17194,12910,-9413,12438,-22974,-2007,-13887,-14676,-32768,-31237,-12157,6918,30081,4946,-15295,-5015,-9417,8779,14152,-13463,-32768,-32768,-4960,-16225,32550,-10321,16387,-15300,809,5787,4226,-12200,32767,12717,-14546,3448,-27459,23337,-25074,29808,27816,-7065,-28365,-21303,6717,-28646,21424,-32393,24752,4065,-8447,-28806,23705,-23363,29367,-5343,16751,24298,25810,21542,29915,8854,-9499,-14309,-20085,27125,-17034,-7105,-2737,22395,-19496,23464,-24002,-1700,-10377,32767,778,16242,12596,-1775,-17583,-469,-18954,-16176,32767,-6061,-7400,-21591,-32768,-748,-11958,-29440,5550,22411,4930,22307,9266,8640,-17421,9596,-1227,-8688,28419,-16167,32767,-12006,-31153,-13637,22340,-11909,-29353,3206,-25384,9516,-32768,27794,32767,-18076,31304,-20585,-1923,22689,11562,-32055,899,-28429,32767,9122,-13783,682,-2151,13686,-15541,32767,-4008,-2528,-1707,-29475,-20202,-866,24175,-11307,-22468,32311,20115,-11832,-3208,-17163,-14312,26724,4224,-8682,-6261,-3406,1661,-19078,-32135,32767,15087,-15407,-434,20852,32767,14104,-24146,32767,13346,-5871,30565,-32768,-14317,21696,21937,-29016,-3693,2197,-30206,22394,-16152,-20093,-31960,-23916,14387,-13713,4948,12169,19186,32767,-6864,30839,-32300,-32768,29780,-20860,-5245,-8104,10898,32767,17307,16225,8792,-30989,3467,-21257,-2347,-21104,32767,20238,-28306,22283,19091,28010,7345,1213,-10849,2904,-30329,13443,22160,12557,-32005,-8279,32767,-22395,4641,-10995,-4563,-12800,32720,-16090,5058,-10942,597,-21806,-11942,-6501,32365,15354,-12751,-8982,-17058,-11204,6066,-13824,15494,-30698,20480,32767,-27430,27215,-3539,18389,12507,-23693,-29024,-9508,10410,-17946,6325,-19209,9534,-6420,16952,-3464,19702,2525,-32768,-28781,-6722,12509,2511,-16726,-31924,-32768,-32768,-6640,2005,-9063,16713,-10578,-32768,-4522,966,-11747,28114,32767,7707,11308,-18337,20153,-23371,-32768,-19821,17177,2396,4726,10951,-32768,6336,8276,-32768,-29182,-27363,-7593,5185,-17517,32154,9721,29044,-21045,-23554,-20535,-60,14752,-14214,32767,4636,21791,-19146,30031,-26589,-5427,13131,19905,-13719,14924,-25188,-4274,-7460,-28384,7935,28709,11143,7973,5167,-10840,20718,-7036,32767,-28229,5952,17136,-18368,22265,-11623,7014,30698,-19395,27270,32767,32767,4636,-13457,-32768,-32768,-7149,-3679,11445,-22050,8648,21552,-17481,12038,-7700,-618,24820,32767,16226,29351,-22986,-23990,8655,14255,-17812,3566,17565,-113,-20994,18010,-20387,6770,23028,22717,26619,2720,29089,-14117,16935,-17392,-25635,60,-5541,32767,9939,31416,26057,8586,-12909,-32768,32767,30333,22007,-4533,15738,6364,-1686,13385,-18221,-32217,-27345,11554,-29484,9790,28346,-32768,10474,13514,-6222,32170,17490,-32768,1934,15649,-32768,-23089,11376,-4712,-6156,32767,23439,21635,-27504,26284,32644,7898,-10830,-530,-32768,-20577,-21727,32767,4432,26743,-10951,26893,21564,-11632,-32768,11887,1462,3508,5100,-28850,-10136,20227,-4987,-30963 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add7_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add7_input0_int16.csv new file mode 100644 index 0000000..5128ad1 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add7_input0_int16.csv @@ -0,0 +1 @@ +-20833,7800,-22550,-28221,-6138,22077,2266,-22314,22371,-7183,1005,19824,29241,26404,-9112,-26933,4462,8789,-10928,-19436,32291,24566,22668,14374,26941,-19367,13299,19960,-32067,3936,-6271,-24602,-12231,-5489,22816,-7441,-19238,-29618,10827,20800,5619,-13976,-1086,-8324,-16584,-20868,-14121,31168,30193,7758,-23842,-23931,-20088,-4616,21150,-29073,5504,3126,-8280,-1797,-12057,15116,-13394,2589,-3029,-9989,23162,-29032,10965,-26784,17527,8570,-27421,-8371,-14778,-17265,2007,-32720,26699,-27958,23753,24507,24754,4742,-12616,28761,31880,-29668,21594,-28308,18745,25034,-31608,-8119,6659,-2188,866,20090,18530,-2916,19348,-19819,7598,9057,-16922,8129,28,-14108,21911,26008,2640,-28258,11904,-386,-24188,9619,25892,18659,-17974,724,-16126,12897,8069,-6993,-15723,-3074,18809,23397,12684,-13159,-21252,12323,27313,6702,-1255,-22365,28707,-3004,-26272,19639,441,-6695,13193,21472,-27580,1776,13796,14310,21052,12878,-11381,-3754,-18268,22823,-24188,-11841,30498,4473,19142,-21640,7210,-2512,-6409,1295,12964,-11103,28031,-2193,1881,22698,4760,-11606,15961,24286,4022,19777,-11773,-18005,11560,5688,2889,12224,-26591,-28230,-24729,-5166,-23909,1372,-18475,-19632,2860,-23973,10675,-16019,-15668,-14750,-10298,-19084,-14930,-11896,3728,-5143,21038,-16972,-7559,7345,-17050,25351,6865,3279,-12930,-26562,28900,-18667,-27377,23703,2556,28631,-25113,-3939,28787,17708,-8103,-28488,29415,-30427,-24024,5232,20546,1030,-9222,22793,22458,-20118,13189,12483,28581,9908,-20366,-24850,29683,17199,-2148,-1844,-23507,-7097,-32509,-3757,7307,-9773,-27427,-25132,16379,-32746,-28909,25601,-22040,-22074,13106,-28899,-5679,-9605,22761,-32516,19877,-16415,-17017,-6140,-616,11543,15880,-32071,29803,26497,-14538,19137,-9102,-979,-2461,-31826,8867,26765,-19047,31193,9909,26282,13280,26596,-29320,2251,22539,2738,-1036,7353,-22785,-12050,-7587,5776,-15910,-18142,-23458,16979,18408,-19942,-30228,10073,12226,-25280,20126,-15383,5359,-5569,25418,-10608,-7268,-1126,-31520,-28730,-20290,4553,3289,12149,-17685,15585,-14474,8971,28293,-16915,-29093,-20068,-9530,-27273,-14315,15913,-30038,30304,17844,146,10557,-29929,-13433,11441,-11480,8687,-31252,-14614,7565,14941,-27311,13947,14072,27784,-5293,-4504,5070,-10259,-13206,-1027,-25448,-4507,22152,14964,8947,23034,574,23288,14531,-11996,17998,9554,32396,-3971,-13955,4007,-27095,-4510,4930,14367,-14671,-23008,32112,-7429,-12114,-13502,11005,-6072,-18152,15930,-5178,32538,8607,23206,-7496,-18144,-5075,15781,-19723,27085,-29194,-6212,30385,-6957,25580,9487,27435,16642,24107,28105,10114,30249,-16424,17667,-26962,-14458,11772,-4550,22564,-201,5501,-9404,4249,-16999,-19420,32756,17184,-12924,-6338,14196,-635,-15535,-1685,-18723,27090,-29760,10824,-29837,-8754,30724,21624,-22441,16365,-18481,7396,-19557,4918,21619,5390,-9272,10833,5082,-1747,-11435,-31733,-6159,-16540,-32129,-27688,28386,-5792,-3953,13712,-28705,-15276,16453,-8435,-25682,21899,-2343,29786,20292,-10249,-2238,-23071,-32410,4202,13147,31893,11610,-13711,3204,31675,5606,-15649,-15707,4891,17339,-20108,-6973,14255,32328,-11553,-24248,-23099,32020,12779,29200,-26059,-393,-12395,-8325,14844,-11336,-31880,-7120,-26011,18876,22776,-16165,16599,-5364,-15148,-24527,6273,5953,7874,-3746,-16431,-23997,-15372,-3929,-24852,21334,10138,3783,14375,18265,-3705,-16536,5335,11448,-23329,-25271,5333,30890,15885,361,-21756,8896,20711,30507,12643,-2547,31459,25412,17420,1169,17651,6093,30850,-12260,19182,26919,1087,8018,-5379,18140,26312,205,-14415,4979,-11891,6603,32174,15398,11900,3813,4280,-25453,7842,5796,4400,31749,4026,26777,8306,-28682,26588,32462,23292,-1144,3090,13294,24317,-2584,-16085,-27768,-22893,25042,-13756,30870,-18453,-2690,7556,32124,126,-14006,-25457,16493,749,-18649,1267,-27209,18038,27613,-639,12738,2879,-13114,31206,-30146,-27355,12347,-10575,-11254,8241,-20658,21045,-22246,30573,-32162,2996,-23067,-14997,-32288,26771,-6457,14508,-14112,-12221,-15909,17798,32397,-8734,-19525,864,16793,-11309,18343,-10585,-27829,-32207,2619,21292,24600,4656,-22324,-1344,-25064,8904,-28626,-21071,27860,8558,26029,23395,22188,25065,25415,29238,-10298,-16976,-10612,-29004,15847,-4554,-17089,-17441,-27499,-31713,29734,-22426,-10746,-25608,20088,11499,-7517,17772,-27809,-32147,27622,7217,-29299,-28905,17461,25760,-32319,-12363,-3524,2813,-20293,-27171,-21360,23748,12165,-17226,-31943,8544,16239,8474,-1961,-9878,11826,12954,-29831,10609,20726,20712,31708,-9007,-14147,-4005,18470,13234,25968,-26374,-20865,26382,31164,-1068,-24070,4634,31577,24260,-19809,8232,-22979,-19459,8565,-14991,-17211,-9031,15186,-16688,-5434,2087,-10069,28158,-11151,23892,-15555,-30102,6253,-12802,28069,10929,12476,-8819,13860,19786,5827,26418,-18657,24853,18100,25239,20593,6921,-29209,-2463,2378,4753,-26029,-9392,-9003,19362,-11546,-16487,5374,-5441,14648,-2836,-3402,-6251,239,5716,21048,-31950,3774,-27881,-20606,-6192,11075,3412,-29956,-18169,3389,14223,8884,-6737,-31506,29438,-19500,7272,1758,-28019,20402,21728,-22453,-12434,19059,-4215,9656,9121,17098,-30801,7152,-25746,-17702,26601,-382,-31313,3955,-3078,-12937,-21903,-30080,-2270,25013,9076,-12474,4739,22679,12248,-1580,-23651,9664,-29171,31215,-1086,8191,-5779,-2483,-9421,4164,-6015,19550,1729,18534,4353,6406,-26784,12596,-11542,-11605,-10409,18263,9337,-18385,23053,-16219,2902,-13065,-26954,18203,-24104,-22653,1702,-30157,-6035,28126,17559,-7154,-8785,30059,28943,4726,-9195,21622,22958,11536,31477,-2188,-5821,-13872,-19421,-25274,-16609,-29395,31150,22169,-23443,29334,-26546,-20281,-9623,-28350,-13658,21873,26901,-27278,-4724,-23505,-7167,22561,-20965,2316,-28393,1136,-14680,-26180,-28481,-7215,-25517,7247,17217,16384,-15493,-14702,-1066,30117,-31479,-4879,-409,27281,-22068,15248,24211,20469,-10429,-12364,16069,-17546,-9459,-21509,-4402,989,23923,-1051,21882,-7575,-19576,-2268,27397,-28866,12537,-17528,8524,22522,4883,-16327,-1266,-20089,12447,-16522,-13561,-23981,16871,22240,18935,8954,11564,20348,-23224,-30548,28528,21067,799,6594,-16840,-10134,2543,24722,-31724,-8993,-732,-14688,-2983,630,-25964,21700,-29970,20413,-8318,-27719,-21545,-4329,7078,11094,-18002,20896,-5340,22996,19921,24025,-6997,-27588,26861,9855,10457,9213,-10831,-10919,32544,-2068,17004,-18035,32239,27707,-1750,29029,8335,21305,-19583,-22857,19319,-12625,-13415,-30450,20820,2543,-17460,-2588,11341,-15639,-30118,-20964,-13209,-3423,-10864,-9820,3116,-25447,-32568,19445,-30822,412,-26462,23582,-25608,-681,12567,-25474,26978,-24120,22352,-29382,25187,-24966,-13606,-21573,-1686,-28333,-9923,-16567,-2040,32160,26281,2050,-4060,-497,-29266,18736,21002,24190,-9956,29757,-5723,-6719,-9105,13343,-31823,-18993,13413,25200,16521,11148,-1782,-3128,23840,1803,1704,-26199,11234,6629,14370,-28658,32580,-28286,31936,-6249,-32599,9692,7074,-4207,-20267,12966,-480,28087,700,-16055,-1122,-21778,-10651,-13009,-15057,-31773,15524,24212,-30396,-29891,-30969,14405,5415,3429,30770,-12776,-4577,-24878,-29587,17572,-14709,-28852,-31175,-9422,17497,24872,12759,13578,26459,10494,26670,30349,-10736,-18218,10195,30089,19866,-18483,10238,21573,26040,-11992,-25268,29158,-5224,24542,20329,8460,-19479,8609,-28270,10536,-13178,28089,12637,-8320,-8327,19959,5551,-10364,2998,3678,-30346,-15385,7287,-17706,-1554,-20106,23991,-10697,11311,10766,-29004,2083,-106,-23789,-16472,-5992,13167,-31112,27330,-154,-2410,6482,5745,-15704,-32388,18057,-20314,-22914,-20110,-9754,27720,-9793,9773,-32498,6185,-14358,-24238,4557,-22545,-78,19344,29800,28339,15475,23238,11290,-8541,-30693,7469,-29903,15296,-29740,29645,-7597,-4122,-927,-22112,-24266,9814,-9656,-30832,-11212,-24853,25031,29321,-3171,15925,11476,-18742,-18312,-29612,23540,-7878,-3839,6114,1833,964,-24989,15136,26436,531,-17990,-18458,-31892,26821,22147,-17711,9540,-12,5903,20216,-7876,-20582,-31327,-17331,16495,19973,-5977,16899,7408,31389,-21946,27759,-24033,-26760,-27415,-18256,-32660,-23411,29126,-9651,2278,-17084,-30346,15476,-7212,-2671,7434,6436,-19719,-19999,-11220,26395,10558,-1327,18468,15994,21865,-15994,-32045,21289,-14361,18299,-19452,2852,11119,14479,5240,-10071,10926,-12198,-32000,3338,-13444,32530,-14666,10528,18499,-31714,7865,13869,6207,-9159,-31641,31661,32281,17165,24455,-29744,-4413,-1069,17565,-23457,-5870,32274,-10261,28755,29763,2390,30386,28756,-10676,10039,10383,20639,24558,16047,-1674,5657,27240,-22896,-30885,-4278,32520,-23448,25034,-18724,-8825,-14158,-31586,13507,30674,3161,6615,-29908,-28976,-1477,4166,3895,24287,-25688,27895,15647,-22380,-11002,-28041,24446,-14910,19541,3049,-28729,19198,23,26707,-8687,-2216,-28586,9089,17839,-560,-20717,9363,13113,16117,-25166,4552,18439,26539,23024,-4177,19188,29749,3868,-27412,-26615,13713,6768,-4532,24225,13246,-23774,-3870,-29658,-18565,-22321,-15699,-31983,-20492,-27299,3295,16071,31204,-26898,25955,30117,15218,25356,3574,20197,-20460,18279,30812,-6832,5804,-8095,29550,2834,-16765,-3910,-9454,7543,4763,-3668,5626,20425,-2110,-19151,27409,-31870,1732,-7313,-1729,21668,1521,-976,12072,-21926,26895,-8896,22340,4773,27705,18114,-21973,24028,25583,29729,6461,19966,-22799,13217,30471,17981,3347,-10275,20518,-15281,-22257,27180,27021,-19898,24756,-24289,2061,5371,-28093,27083,19090,-10902,-31507,28265,22798,-11303,-26643,27580,-29353,16715,6204,13761,-7176,-5852,-17867,-17496,-13002,-12236,-3357,6656,19495,-28010,5078,-1536,-12198,14556,-2699,-23715,-11877,-2432,-12676,7451,19657,28887,-297,19508,23475,29950,-22249,19863,5275,14844,-2788,18334,-14349,2576,27633,-3019,-18305,2180,-14711,1419,27841,27446,-634,-11171,-5559,-12363,14777,-11980,17545,16481,-15320,-8621,-16948,19237,6011,-22531,-26236,6966,-2404,-8909,16497,26013,-23747,-18936,-28271,17974,26614,10891,-26486,27117,11491,542,12372,23430,-28022,8086,3655,-5461,-9594,17546,-5802,-14406,-2853,31898,-30063,-15119,-12482,23579,11683,-2937,19232,-12024,-15283,21378,-22458,13004,-30829,-5050,-31776,-3792,19494,-11925,-32745,-12223,-24259,13201,10438,-2671,22392,-23232,-24029,-806,15484,15298,-18698,-9107,-6237,-13548,-15597,3603,-20172,18683,18392,-1538,18556,16277,10341,-26942,-5827,-29707,-28646,5888,10239,53,22097,-30327,-26666,23683,28271,-12137,32263,-19806,16068,10713,-9096,30044,7142,-1219,-12221,2046,3963,26361,4361,-20041,29427,31228,-26879,2872,7352,-16005,24617,-15654,24007,28223,13543,-8468,-1134,32029,-27216,26795,-20359,27839,23515,5128,-12656,-27519,-17437,-1920,21470,-31581,20368,9814,28733,3438,-10765,-2333,3104,-5262,-3821,-8323,28904,13046,-5860,-24731,16449,12650,-27977,-2419,23451,16333,14232,17919,1552,8410,1107,1328,-32209,31020,-26047,21962,-26163,-30831,9149,-23956,23329,26827,1831,19076,23862,22084,-6978,23305,-29612,28909,-21528,-29335,-424,-4153,-32460,-1338,30685,-27310,18360,27034,-25429,616,3611,-14181,-23175,12680,19577,16065,-30232,-24715,31802,4578,32386,-13021,21297,21181,-9348,-23904,12939,22898,4447,24072,17272,28949,-10052,143,-2020,-7111,-12813,-18819,-18610,14483,15653,-13927,-13060,-30050,6069,2434,-14222,7956,-30817,-32050,-32255,-28006,16494,-13821,-596,-28415,-18378,-16663,-31304,9582,-29121,31724,15497,-14976,23361,-22593,22383,-9840,28287,29533,-12970,-28734,-894,22378,-334,20256,-29661,-30720,27990,17191,-22457,21770,30025,12450,-10062,-10875,-14394,19929,6172,-13566,20120,-2918,-29911,-11130,1260,-9756,13547,-4785,-2841,-22211,27512,23420,-19626,30842,30990,13737,-32639,22810,-21567,18706,-17206,22241,-26219,7732,-7435,622,27034,6158,5481,2966,-15501,-13313,2247,-2133,29500,25078,-25951,-21986,31987,-27326,-16734,-20765,20609,12080,-17398,22742,31962,13715,-4071,-7296,12915,26713,-8569,3854,-9617,-11532,-8580,-25596,15978,-2527,-29909,-13879,-21094,-16411,-3440,173,-16235,25824,1527,19217,23608,-30026,-20464,-17352,-26774,-17809,-4229,25722,-17030,-11906,29686,8761,-586,-6943,15215,1903,21534,14000,-31772,22862,26567,-32704,-2288,31512,-10837,258,11719,-30444,2195,13491,23914,-31173,7366,4271,19379,-16072,20264,-31809,16988,11182,-11233,29667,-30250,-9271,1698,16893,12339,-9090,-32306,2401,-14909,-24527,14970,10268,-24707,12774,-12426,14493,-31938,9606,20273,-30611,-15826,-30611,7091,1542,-28290,-7602,-7327,8943,-29492,-5844,-26294,-18302,3566,-6525,-28578,-6453,28313,13539,-17091,3957,23167,-30707,579,29313,-16286,-25560,9208,31375,272,-7145,24007,-16857,16825,23000,5735,17006,24065,25851,-32037,8612,-10303,-30435,28085,6810,19606,-23241,15025,19530,25006,-14701,2226,3016,-28329,-7689,-16028,2480,15999,-22451,-26253,-5305,27868,14812,12056,13108,-19809,-5685,-6847,16399,-16198,8687,-30120,27013,8713,-9050,223,32556,27778,-4422,12236,9984,18556,-9198,30789,10167,10397,24088,31742,-14164,-9982,-25624,15879,29922,-12831,28651,-18855,15833,32145,7046,29814,18362,-31630,18401,29328,3317,18511,-16244,-10244,24802,27648,-11708,-31339,-24099,-26864,-12548,-24564,-22176,-12701,-12096,26329,4973,-24209,-30993,7183,-26292,29122,22797,-8993,-15397,-11075,-23085,-29791,-3894,16540,21441,-23380,2689,-31577,11225,28402,-8329,14373,-19825,2837,10392,-566,-17477,-29976,8077,-27257,30678,30720,-11575,-3655,-25478,-2836,1940,8272,16207,-23416,16228,-31426,-15187,11586,3795,30934,-5603,10463,-3293,7377,-28219,-2578,-21553,6730,-27084,4929,-13756,-9515,17237,-13477,18910,22535,-23051,18196,-21268,-12161,10824,-22833,-7348,-8416,-27598,-23349,26477,3032,-29806,2845,-9016,-19703,1580,-13339,6197,-32001,-13485,8711,-1749,15826,-28223,-15588,-16123,-9226,19638,-14892,-12398,-19152,18099,-30935,21632,23146,-7178,13456,20583,-23156,-28851,-5224,23942,-3198,-2190,-9703,7140,21420,-13312,-27944,-14003,-16232,25042,1469,13203,10481,8951,-17352,27663,3901,25440,-3476,31664,27843,18073,-16680,1601,-2456,-9595,30831,28609,26936,-32462,-2032,15573,-23358,-24263,24598,27911,-1010,22810,-25038,1407,13993,3294,-13623,-17284,-7677,4003,-22313,17333,-11939,-13272,30692,-17550,-23998,12357,4091,-14040,23080,-20650,27002,-24738,6900,-31357,-17947,20016,14322,-21438,18664,-22668,-2960,18176,24286,-22901,2786,10114,26908,4204,-25664,-1618,-11018,-10715,16094,29744,178,8394,-32671,15529,31843,-27125,-7190,-24165,19404,-9564,2380,-12958,6397,5525,31827,10330,-22976,-24915,25152,24150,28402,8679,-1621,1784,21716,-2469,16475,-4271,-31361,1535,31208,16715,4664,-3333,-10962,6934,-12767,-14794,-2650,16844,-7064,-18909,-32358,-13986,-10450,20803,5659,4487,-29960,27235,-20364,-21644,31959,-15760,17073,-12352,14954,-10991,5912,12080,4449,6877,-5840,-675,22572,-18829,27582,5674,-14651,-15201,30456,-29954,13558,-14219,1884,16727,-19200,-27513,-7342,22789,29399,3841,26758,-9214,-4533,11502,6789,18580,16373,-8674,9631,7178,-22923,-17869,-32045,15641,-9291,14501,-29317,16105,-28526,30722,-4384,-5834,20898,23112,-7596,-31138,30824,26700,-9784,-1646,-29782,-24208,-6437,16235,4449,-2232,10589,-23307,-3723,22422,-26107,-21942,-20128,19099,15341,-4027,-445,-13494,-17818,-13138,-21660,9464,9145,14693,21654,19892,-4676,11231,-30839,-8889,14641,21790,-15784,-12138,-28278,-26299,27352,-22785,-15650,26434,-19492,-4567,-30710,20800,14857,30134,14122,-3889,27205,30426,30044,6984,-21367,18604,-21626,-26031,-31825,-27193,-32199,-3490,16227,4445,-1194,-21005,-8123,32627,-22419,8255,30750,29504,-15056,32287,-13248,11602,-1333,27305,6511,2181,-637,31957,6333,32105,2181,14052,-24120,-11210,641,-8345,26793,-16076,-20113,31402,-25341,-3636,3596,9450,9422,17871,5689,16462,29842,11530,25611,16108,27042,-28859,9997,10429,51,-20853,28130,19631,-27979,18162,20955,2420,7766,5166,10128,-11279,2313,29496,-25810,-7480,-21181,23507,29371,23815,25578,-6250,-18840,29245,-20485,21042,-30243,-11022,-26984,9031,18186,-21764,-11513,-21913,-6593,-4368,-1106,28369,14701,31729,29690,-14236,-14575,7532,-20626,-15047,19913,29869,3532,-28243,4947,20235,-18431,31118,-9669,19890,22287,775,3540,-643,-32539,-7676,-9588,-3071,-32682,-9835,15437,-14892,4889,14840,4235,14082,26336,28878,-3425,-29693,26732,15579,13805,-7665,6275,-18728,-18333,-11892,7715,5643,27441,-19274,5421,24790,-17715,9405,29481,2163,-23776,7449,15992,-21657,24853,-26235,-29793,-6382,-15809,26916,14012,30351,-3324,10267,-16921,-13174,-4536,6878,-14747,-15206,-15313,-23024,-28321,22525,21530,13519,-2377,-11378,12651,-1954,31994,-9681,-12322,6962,-23340,4948,13409,31475,-7747,29169,-19563,2499,-30538,-22719,-16867,-5511,-17207,23010,24139,9489,-25309,-9376,-30018,23031,26332,-16050,18826,-27528,11813,-32528,3808,14141,17128,22458,27034,31291,-13777,4560,8963,17153,1266,3,-8250,-3722,-25476,-2361,8744,23647,-20113,-10458,26326,-18691,-11534,-13205,-1751,-17531,3529,-13602,30014,30453,-8075,16837,4119,2572,8652,-30542,30828,30735,-26471,20010,30310,-30182,-30766,-1932,25951,15384,24074,27254,-8583,-29234,-1454,15792,-13607,6669,18670,-30588,6256,-16396,8864,-1931,14777,17631,26603,-2545,13485,-19191,-32184,19540,-31512,-21644,-19848,-31341,-9496,-19647,8103,-27768,-26502,-13750,-28862,4188,-19341,4075,-20143,-19969,9333,16903,28933,-16620,-20909,-15824,17146,-23367,-11961,-18052,-30342,21623,-25653,-30835,13432,27745,20986,-9111,7592,-7367,5,-15290,-9743,16999,18437,-19109,31754,2847,-8183,14979,17439,9853,-20218,9575,-16198,-22896,-26806,23795,19253,-20864,107,-14739,-26190,1144,21187,29078,5518,-2314,5626,-12667,-1737,-17839,-20207,-4546,21457,-22939,-30480,-10012,-15257,-7173,-17946,-27407,21746,29063,8711,-2302,9070,30640,17431,18459,-12111,11939,19384,30739,8052,-14391,25454,-28045,-14383,14151,-11285,-18461,-24420,-23470,-26594,23325,-6410,-12049,15126,6572,-29005,29057,-2569,3714,32159,32598,19437,-13500,8993,-6629,-11901,-5247,2052,-25695,-30181,-15309,-7673,15187,20419,-19925,412,-14426,23911,-18502,-16343,-10004,25900,30762,-17491,-9851,-6622,15875,-24241,13944,-2035,22045,-9204,-8806,20369,-16394,2512,-4303,14238,-30813,-23116,-10786,-17581,-17825,18027,-23012,-14282,-14260,-19073,22217,-12421,7280,-24754,4828,-422,2161,-24734,-25949,29222,1188,-24099,5396,-8427,7275,10584,-21500,-8542,11433,29356,14204,3803,17140,-11255,-25622,-31020,3834,-5838,-10637,-31407,30689,5934,-17577,-5699,10044,-20913,-25733,-28995,14784,-30978,27792,15597,-2324,-19885,17172,11145,16884,25517,-24755,7268,1416,18331,-11496,-15323,-27591,12402,16570,-2734,-18485,6130,12694,-10128,27158,-30231,-19104,24148,26179,-9733,-14799,-20451,11950,23563,-13287,30483,18629,-10499,1090,31189,-28292,-10353,32664,-15253,-17286,-19577,-10326,-6487,-6211,-6732,-25136,26577,-2361,5272,-4977,-13543,23821,-26216,-15916,22568,6089,-32659,-27202,20733,11824,3852,-13272,-28615,-1966,27593,10490,3120,25417,29400,12858,-3857,28133,26397,-337,10168,-18432,-7111,-21425,20774,27173,21805,-27611,-17958,6050,-28048,28125,17693,-12460,-20555,-29492,3159,7015,-9774,30692,-15262,-11212,-13519,23736,-14008,29494,28819,29375,31355,-6665,-1805,-26988,-442,26840,-9999,2545,-25357,5238,-18784,-16964,1650,-11213,9659,17454,31401,-23988,17596,15144,5823,-29754,-4247,-9198,11120,-5662,-24109,521,-30064,19728,18207,-31055,-23509,16965,6806,3378,-13628,-29863,-6935,-19354,16283,25844,-12125,21923,-13967,16058,-22904,9247,22659,-16469,12885,23920,19502,13477,-2748,-16775,-20541,-8929,-23424,16844,3324,32696,-8198,-19957,-9418,9379,15049,22512,30033,2268,-9862,-22469,-18328,10582,-26350,-6195,-2464,25626,-18742,12069,-31099,5819,-20822,9827,30652,29454,-23937,-7480,-29481,-27362,7824,-14094,24489,13034,21721,26187,26470,28257,-10227,30220,17729,-3271,8573,-9094,23375,-23352,10688,-23860,18220,-28422,3746,-29806,-24621,-8182,-26406,-24470,-20868,-15590,16154,10610,11345,-8954,-20477,-18624,-12058,21550,1680,12838,-27961,2831,-28544,-13793,-30167,-9576,-17350,-29055,-8642,-22090,-17410,-24923,21934,-20682,-17232,-28156,18184,22794,11980,-12524,28399,12992,-11478,-31030,-22567,-9859,5723,-21154,-25573,-24296,-24328,-11699,-9190,-15415,-13871,-26456,-1683,-23880,-17461,-27156,27754,12967,-29718,-7253,-12832,-22853,-21407,-22086,-8287,6978,22747,21118,2955,16912,7226,-14965,16510,32359,27129,-22710,-23882,-3588,-3605,30144,4338,4373,24557,29553,27379,26387,24845,23669,4858,-21534,13625,3408,-23225,-10776,-21702,-10427,-29640,-3619,-9332,-5085,-20477,-21548,-23168,31850,30385,2906,-3029,-7113,-15272,9114,15549,-15371,15818,28204,21288,-31975,26612,14990,22233,13525,8618,31877,333,-16300,-6801,-9549,-21853,30781,20189,25770,-3450,-24699,3701,-24830,30919,-12160,30799,20342,-8240,1650,2894,487,-1293,24859,23131,10372,-22303,21270,-21284,-1705,1935,-9668,-11983,-31379,3185,22333,4821,-16037,28139,-30548,-25623,4494,22920,16306,28926,19834,-713,7916,-26984,-6510,-15090,1878,-25943,27047,31646,-2691,-17074,-12139,21440,-2239,21100,-23945,-25751,-24180,17331,-13423,27637,-16268,-7410,-10371,-7143,-29293,-13506,27289,30261,-8083,-4348,1080,21611,-4025,-29827,-25645,-9718,1222,24387,3722,2525,-2979,-13236,24668,1785,1017,-189,-20689,14037,32539,27568,-18472,-17638,2420,16321,21574,11419,-19027,26360,-7351,-9108,-8236,4575,23543,-17691,644,32000,-16273,3461,18809,-30824,-24592,31560,31529,22969,13978,8932,5069,-32333,-7982,26070,25566,10675,24408,-13816,27210,13285,-15148,32375,5343,6763,-28807,-5332,11684,25091,-10540,11551,30842,-18051,32188,23756,21783,-11457,1499,26677,-24900,-19174,-14963,-19462,27122,-19938,-6872,3263,-19581,-23057,-12118,7323,-11133,27084,-22538,-2827,-22923,21087,-24922,23630,21762,16672,2984,30530,28149,16970,5101,-13661,2192,-9686,9211,-13221,-17426,28013,8197,-13568,18325,-31898,9430,29250,-30638,-31863,-17317,32190,25644,8434,5204,-23275,-20109,18336,6306,-11194,13959,-26096,-31707,-7172,13868,10797,-24131,-8923,-23396,30976,22674,-10841,27410,27361,-26532,-32457,-9051,-25216,4946,7621,-17269,-18899,-16444,2698,-28609,25526,17740,18136,-24878,286,2870,-28260,25030,5610,10281,20336,-11215,-9190,32228,20681,11235,4299,-23961,-15776,-2542,28352,14451,24800,24882,-17563,-30453,-29167,-7778,5811,5760,8041,31798,4859,-5867,-3218,19036,-24538,-156,22127,29901,13164,-16275,23279,-144,21119,26846,-2443,-18172,-12235,-21516,32538,-8893,-3218,-22236,-1185,29507,28034,-22952,29443,8184,-400,5007,19344,-10098,-4210,4278,5060,-17016,4214,-12494,10135,28025,7631,-26019,31009,-1241,-24603,32457,29864,-5289,-3693,4945,-18916,24498,-13009,8247,20533,14886,-8039,-3061,-15938,-13187,-17922,6545,-24661,29378,-7421,-3409,22198,13786,7002,-4511,26040,16173,9332,-20284,-77,-25182,10091,14921,-16235,29747,-17890,-24260,10506,-25420,-17948,17882,-31730,-1331,-14721,14286,-12508,-987,21257,16118,-30192,20248,-14872,-17661,17122,-7732,-3298,12422,-24788,4541,-6795,11173,-23173,-1934,-12655,-7994,-23341,-27136,-1613,4807,29297,-240,-17564,-10523,-18296,13850,12068,-17945,-28645,-31089,3208,-23311,28691,-7840,12508,-4782,-4454,-4227,-2175,-3002,28593,17972,-23305,7628,-31751,13188,-32724,24117,25007,-2458,-16316,-27388,-780,-27321,9218,-29417,15345,-2125,-7225,-22452,28173,-31998,30322,-16704,9603,22391,17140,21651,18997,11152,2423,-22968,-26374,31450,-10929,-11860,1413,13859,-31072,25843,-30722,7937,-3679,30563,-7134,9697,9966,7697,-6410,-6979,-22041,-22738,29011,-9802,-11242,-12809,-25515,-11081,-6651,-27686,14935,18702,-5725,25460,8999,2824,-21573,681,3898,-7581,22442,-14103,31183,-18465,-32499,-14012,14244,-11629,-20077,-7256,-21858,3249,-31462,32218,32248,-6159,22725,-31495,-9359,18667,6480,-20178,-7343,-32002,26489,-531,-19369,-5764,2644,24053,-20603,32212,-11231,2225,5476,-23969,-28217,965,31451,-6477,-11362,24824,27342,-17299,-14487,-15056,-21407,25663,12783,3408,-8285,-15477,-5584,-10929,-20297,28744,19945,-13819,6206,14744,31133,24553,-32320,32345,11305,1045,28070,-21293,-18290,16969,27857,-30060,-6487,-8777,-24202,32377,-25054,-29544,-23428,-20756,15832,-24243,4690,22551,21692,25861,-12921,20817,-30669,-32156,30862,-25287,-9917,868,21611,32435,20949,10905,-710,-31335,-5560,-17361,1262,-31050,27802,14359,-22908,12690,10952,32336,2037,-8568,-11177,2,-23967,7443,21821,3956,-22161,-1814,25578,-20693,8743,-7734,-5139,-1863,20937,-5443,-2518,-17967,-2963,-17331,-357,-1772,22482,9699,-18150,-17675,-10604,-19737,14155,-14219,22731,-23234,21716,29490,-22637,29133,-6963,7955,313,-26577,-25216,2211,8663,-22688,-3407,-12692,18741,-17357,10182,-3394,18525,-2172,-29082,-21015,-4391,12070,-1767,-19344,-30703,-31712,-31086,-8785,13873,286,5291,-12223,-28799,-4618,3048,-16466,21443,32485,5671,2115,-22966,19455,-21823,-22779,-15457,10715,7280,6820,21255,-32632,101,11970,-30800,-24665,-24372,-11129,-5877,-5429,23084,4306,31682,-31736,-26480,-20533,3740,20233,-8362,27124,-1372,23561,-10004,20467,-25146,-5741,20829,25432,-10639,11405,-30137,2963,-3509,-28719,-3838,29873,1557,15065,10833,-15680,11346,-11298,32317,-20009,7111,21336,-14815,17759,-4673,14200,23246,-11222,15548,25316,25389,11131,-14177,-31113,-28014,3749,3822,22676,-25153,-3573,21239,-26139,22933,-18689,-3424,30509,26131,9506,18860,-25322,-14450,18227,7037,-15640,40,22256,3681,-24057,9305,-18851,11005,16939,30726,14359,-9320,21804,-14593,28773,-24307,-15366,-12028,3991,28777,15032,27069,23964,12374,-16454,-28718,24257,30313,11547,-3986,6049,14195,-4723,13051,-13826,-29080,-19481,740,-21650,675,22421,-27889,9027,2313,-18108,23467,13935,-29634,316,15924,-25724,-17678,6264,3836,-3149,32260,27493,27284,-23860,22920,27690,4669,-13909,4099,-29331,-30104,-22754,23830,3523,29242,-17717,28654,18699,-4499,-31752,13029,-5521,8721,7329,-29224,1878,8520,6896,-20322 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add7_input1_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add7_input1_int16.csv new file mode 100644 index 0000000..3d7721b --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add7_input1_int16.csv @@ -0,0 +1 @@ +-18108,-6091,29407,23164,-14912,9973,-10288,-17375,24077,21081,13044,6586,30919,2917,-7549,20231,-31758,6356,-10547,-29095,-26803,5284,-16758,-21701,321,-3500,15729,-4210,-16064,-11731,-20203,-9337,3935,23946,24370,-13012,-7556,31914,11753,-22287,-20215,-18595,30366,8049,27282,-2193,14454,28887,24795,21878,1181,29986,-20342,2286,28705,-30917,24907,-23802,3571,-340,21449,1114,-27693,9673,-19308,18460,26381,4418,4735,32712,-14773,32540,2660,7412,-8081,-31156,-17737,-16859,11862,-29946,-15902,27700,5022,-28988,1497,-15695,27525,-156,15721,21056,4476,31002,-14939,15504,-28717,-25288,-30392,17795,-13114,-13929,4949,6627,-10948,19076,5776,-14183,7826,-30354,-4040,30642,3756,16506,-20257,-713,2468,-15267,22769,-471,14344,11670,5087,-49,22751,-18107,20050,-19873,-9234,22102,-10832,-28497,24452,22337,-27529,5195,-6496,19230,9232,-14289,-21451,-16152,6231,20146,12138,-18713,-12820,-9529,-15776,-15351,-26141,28873,-5838,22513,11242,30688,100,28209,31524,-1542,11826,-14630,23704,17659,27462,-3981,-4165,18776,1832,11717,-19431,-3700,26479,12910,-27369,3596,-25422,-22642,31998,8382,23889,-10216,2728,1460,-32038,12601,-21913,11279,13250,15239,-549,-20827,5915,-9414,22638,-7869,25898,21461,-7140,29625,14425,-8345,-28689,-30974,28716,-4504,2235,-32373,814,-1925,-29528,-7249,3807,-7168,-26404,20984,11929,-3010,25018,29513,-15685,-24269,24801,18467,-21172,-596,-3670,-29170,-15760,11342,11007,-15311,-19578,-589,17134,32766,-11586,-11411,-26880,479,-7859,-11207,-10015,25771,-24096,-32265,22296,29283,14156,-11972,-5464,9499,-27049,-25770,-2863,-26043,13695,22796,-32308,-7877,27172,-23245,-9714,15000,-6062,-10926,13215,32033,9341,14645,21580,4577,31019,1586,9070,-5311,-11894,-28686,-14415,31586,-11647,17709,7628,15466,29954,-9626,-7555,-29471,20461,412,309,-32538,-5981,24978,2916,-11292,-8529,-2201,-2742,14685,11956,-28463,8450,-14518,31191,-7564,21790,22615,7859,-29518,23239,30707,6901,-21937,27986,-6778,23620,-4558,-17082,-3298,-19882,-30278,-26381,1262,6595,28790,11848,-9096,-10945,19382,-472,17530,-16763,23066,-29697,-30915,-24078,10001,-20195,19163,-8792,10571,-1935,-26542,29792,-12841,-20942,-25911,-32766,-15666,-17479,4416,-12777,-7546,26875,-7207,-2706,14053,-9842,16317,-16316,5328,5009,32143,-30134,27974,27868,-9236,18160,20373,-18572,-24025,-22518,-12255,-1352,-28245,-22731,4238,-17238,-8931,11013,-23660,-11475,28224,-6113,24536,27663,-10100,-9479,-7440,-12405,-9274,-21146,-8097,15988,1137,-10890,-27019,3218,-15525,-12385,-3203,-32049,-20574,14931,21296,-28155,6651,-7830,32434,24421,30401,10460,32385,-4025,14776,-9825,-27769,1165,16120,1786,-222,-14654,-25563,-9450,-14422,28287,10054,-3568,21529,-31373,-514,24412,-16418,-16721,-22015,-10547,-23333,-7311,16319,-28352,26630,30719,-21927,-3402,-12612,3069,29904,26776,5289,27715,-2368,16403,-15517,-16139,10739,745,17126,-15521,-26348,-18057,28140,23585,11781,-5359,-7877,25367,24146,-12734,-25634,9454,-22451,28455,-835,-7257,7608,22607,2333,-16020,-5632,23489,14986,10190,27633,-3704,-15535,-28513,28774,28516,-30512,8173,-2258,12440,30203,13003,1925,22048,-1870,-14477,-18637,-9898,2016,-23433,-17672,13221,19415,23383,8040,2594,-26756,13293,17320,13181,-12587,-10699,-17075,-9699,-334,13421,-9483,-31766,22928,-7033,-30628,6951,-22971,18552,30409,-8744,-29939,-23476,23844,31514,26256,-12213,-25357,2164,-25047,28311,-15528,10335,-13534,-19420,16895,-22546,-5308,-31750,1633,18217,27439,942,26287,13774,-20006,-22445,-12401,-22709,-5892,-15359,18374,204,31400,11958,11080,11835,-20938,-28127,-497,14119,-4961,1629,18107,-25018,-21058,29376,-27477,28057,67,-3417,-18168,-1761,-4167,30199,-9486,15118,16538,974,30784,-1225,31133,-4495,22117,-714,-11152,11990,-528,-20463,10349,30672,-19865,-9551,3879,17587,-26989,26319,-31600,-28348,9350,-20363,25228,13013,-2806,12724,-19722,27376,-2725,-24688,-10394,8023,22201,25437,-9310,19686,12439,-15392,-22965,-3832,-16174,-16969,-16712,-27632,-21149,-8691,7505,-32016,6159,1794,30838,-16091,-3575,-17628,-30536,29648,-31203,23680,10909,-29102,7999,-2212,-1497,-19683,-16305,4123,-15961,-24793,19318,-18481,-7129,11097,-26552,-12587,-23236,2554,7307,-6998,32479,7242,-4929,14683,-19537,29014,-25718,-1176,22178,9599,31225,-15010,-19778,-14359,-27476,21541,-23322,-22079,-14926,-23675,23484,-1423,-16388,25145,-26474,-29810,-26061,15211,-19202,29890,24825,-22630,16234,8365,-16531,-29156,-6481,-17607,-13970,15404,-23525,-20964,24607,-23717,-10402,11962,32728,-7081,17424,-14043,-29692,29078,-23274,7013,-4739,-23724,2288,26682,-22618,2017,31101,-9585,4620,-2900,8796,-32627,17376,-28649,24416,-205,5921,12792,-31496,12223,23359,-5649,8063,-23571,-21526,-12628,-1406,-4158,-12068,-2110,-3492,3224,28748,-4039,-20559,27157,3666,-917,-14895,-14183,23414,-9811,9182,8095,-23690,1945,20317,1269,-18511,2408,13318,-4089,27367,31288,-9382,-10530,-15575,31010,1807,-17836,25602,24618,17944,32243,-18617,-23816,-5636,-2049,23429,-6999,17264,19217,29217,-22103,-29460,-12379,685,10257,-10115,32278,-28465,-14978,5636,-27952,21271,531,93,11285,-14042,-32055,30354,-14629,-18475,-29501,1232,-23943,-29207,-23012,-19663,27430,-23495,-8849,-4839,-25141,7727,-14424,11105,9783,-8013,-13144,10740,31120,-3932,-17654,-1934,-7316,29287,-14152,-16486,-7072,8151,-16544,2485,4359,-21097,4953,5783,-17212,31921,10335,20811,7200,26898,-31880,-21909,16230,21330,8452,-353,-23733,-27701,-31228,9707,8785,-5789,2437,-24650,32533,7794,-32690,-19671,-2255,-32155,18709,17463,-2468,31053,7432,3151,-22245,11645,24157,7812,-5081,-15234,9802,-20010,12589,-3771,-10381,-16998,15755,-1752,-21908,-18982,5645,2565,27761,27261,-4648,24415,30309,7545,1916,-475,-19516,-20203,13308,11670,3525,32433,-17199,15183,-16899,-30237,-29037,18093,-17764,-7501,23628,11708,12325,-7094,4597,23849,-8313,16466,-32683,-20606,-22700,12557,13054,32537,-11568,5210,-7445,-880,-8221,-6358,31638,22395,28718,9770,-32405,-19041,21050,-2691,1637,11245,25886,-18030,-28744,4576,18668,-3664,-12315,-3298,-5138,-22497,-342,8244,-20673,11788,21539,-16988,-3466,21945,31516,-22906,5952,28509,28581,29050,20164,-24219,5897,1418,4269,-6237,-10805,5439,12311,-19990,-9186,13209,-26918,13704,27069,25559,-15762,-11571,13387,21113,-1437,30905,24553,15314,-9070,2052,29276,15812,-16391,-8145,-8953,-26162,-7099,31547,2612,-26589,-22401,-24927,13325,2234,1983,-29640,10699,9940,8958,2095,-19397,7328,16224,18240,-17455,21713,-12863,19486,22472,-22686,-25091,3890,-14450,-3176,-4065,28069,-22663,-1912,17212,-23864,-10396,-11620,2197,10230,18547,7990,-30414,13046,7184,-14522,-16152,-2413,2149,-28419,13237,19060,-27316,-14938,22546,-26323,-4050,25769,5727,14890,10188,9345,-23033,31853,31325,-29903,-31532,12707,18430,12446,16682,-19114,-30789,2180,11798,15528,9036,9085,28269,-1829,-32345,-27112,-15761,-20976,15027,13921,-4397,-21114,-9833,12122,-19911,-31406,-9947,-29423,-17827,3470,14993,-13154,-23402,-27530,8213,-28931,11286,-29444,-14780,-2489,-708,25982,12136,-16231,2676,13906,15353,22533,-8875,1551,31757,-19646,25547,9370,-19506,1936,-29770,-27245,18703,-12877,-21039,21217,-3159,13323,-15832,-31953,17739,14360,21217,15490,-10288,-1567,14107,-26359,131,25057,-29746,-12906,9920,-29074,9427,-18065,14180,18428,-3245,12181,-24460,-9056,-31833,-15774,-12513,-11678,16530,-7192,9666,-5407,-13128,-31437,-19315,-10097,-9267,5537,10826,17541,-28830,12819,3120,-13147,22773,-21606,10787,19743,-28883,-9998,-19831,3574,28224,-20869,27347,836,-2850,1547,-28157,7840,-4684,25655,-16066,26854,29016,-12197,-7915,-19283,22094,4980,11657,19198,-25553,-15810,-19528,30610,-31991,4988,-2124,-8082,26794,-7427,24160,29628,2673,-15293,-8903,-9570,-16021,-26772,-8514,-20188,-2886,-26243,26495,822,-18573,11634,-30794,23503,20343,16945,-10686,-1796,-26443,6013,-19284,31243,27092,-14851,-6680,11995,18034,7590,-10526,2926,-16209,-5072,-18340,1260,14873,-2003,22687,50,20225,-7579,-28176,-24125,-14933,-12829,31998,28361,25684,25665,18614,27824,-28259,17074,10617,-24019,-24723,-12389,31692,-619,26551,8009,-14943,-12920,20213,-31039,-17990,-27572,-9929,-13391,-9519,12432,-31653,5153,-19705,-6971,18497,-32576,21733,27686,11544,28901,-3368,-7394,29254,2045,-24197,-3833,1655,20827,-3574,6929,-21413,-1987,-23705,-23727,-30312,9889,-25815,743,172,15500,16874,-14061,-30652,7585,-12046,-14814,3451,14130,-2397,-24092,1247,29883,26651,24412,-10540,22671,-2948,24476,-11056,17744,-15839,6442,-5347,6304,6975,-8778,30945,-5644,-225,11124,-26485,-187,10528,-23294,-6805,-26663,11020,4172,20617,-4517,8331,26217,-16210,11771,-10812,7377,-14242,-15968,26747,18694,18966,26849,25571,-21424,30253,4501,-1137,-20432,-10670,-3198,9352,11461,28554,-26826,20696,-2604,15719,-5267,6640,-27934,5599,20880,-2423,258,27055,10496,-27126,31358,-25795,-27840,6999,-1423,-22297,-1306,-8150,14992,21819,-18818,-24707,473,-25419,-27081,-18084,2885,-24645,31403,-31032,21831,-22128,15547,14478,18582,-30282,-16350,-17326,28049,6015,27313,31733,13326,-10765,18892,-19571,-24056,922,10929,-22518,18651,27556,29591,-22732,-23400,25241,-21476,-32318,32229,-26466,-23861,-30841,1295,28778,16000,11323,-280,32654,7960,15877,-4676,-19959,-25770,-5703,13430,-2200,-15566,32682,26108,-30884,7980,21171,28609,-3171,-29859,-10848,17215,22754,-16464,-19130,2874,-15023,-240,30782,8245,-27976,-29532,12192,-20595,7353,1706,-20603,28014,25399,26917,30665,29185,-18330,27824,-11327,-9593,25907,-24883,-23963,15533,27730,-2069,-4386,31792,-10883,21914,-19893,-5283,26735,11613,5965,22623,-16098,-17439,9092,-25599,2441,7387,2504,56,10001,28249,27082,528,-4438,-11462,27550,20279,-5014,5228,-3060,-10800,-9424,-15100,21195,22359,30715,7253,-2750,-27608,4866,-17764,-8803,-14985,-11169,28260,-21301,32447,-17149,-21464,21700,25179,-25884,2141,26834,-9089,-29745,23776,13816,-1875,24632,25079,9106,1392,32609,-15682,15420,-16637,-27390,1375,-8831,-26399,17010,28177,-319,6194,-1873,-32134,-23360,-17588,6815,-3404,-13801,-16719,-16269,-30191,7173,-6546,-25667,3885,-6278,-20028,-22293,-32472,-16161,861,28183,-31389,31312,17831,-3657,-18773,-25159,-17814,24015,-19019,23960,31614,16777,-3919,-8841,18453,27079,20328,4879,-30832,5183,29386,-32511,-25354,15528,-10308,-27226,15395,15097,25015,26058,27222,16361,-6214,10804,-16820,-30460,17661,-28399,-7797,-15815,4035,-26156,-5176,-15389,-1706,-29867,28630,16928,8870,-13359,19974,-28563,-26396,-6450,-12005,20896,-4724,17795,10190,14608,-8940,16008,4699,-26791,-1724,-27043,-8201,3073,-16645,-31678,1053,-18721,4566,10321,11577,-8396,-1036,8436,-30403,-9881,-25000,31884,28373,2130,9720,24298,18401,1015,17911,8113,-26968,-18789,13677,4163,12976,15164,15868,-10039,10479,2162,23920,19361,-25256,16630,-30931,31580,25208,-7706,-1486,-5940,-6587,15907,-32601,26966,8098,12257,-22875,-13467,21171,-19958,32224,-27191,16935,19179,-21164,-28392,-7791,31687,-17650,23835,-25951,-6032,-17661,26712,-7317,9404,-15401,21642,26015,-8524,10810,-18335,-6656,29486,-27846,139,-14756,-14419,-16076,-24420,8652,26132,-19600,21965,-27595,-22217,28857,20950,-7356,31295,18493,-8499,-19935,11012,-21532,-3016,9168,27796,2367,7389,-13425,-17631,-8219,-7830,10491,-14330,-14537,24507,28521,24766,-3778,-22583,-1120,2549,1360,-31758,-32272,-11084,-15035,19364,6726,23707,10167,28636,12602,-27360,14029,18617,-28683,-10108,8786,-24881,-6376,6169,28685,-9681,-1220,23243,29005,30651,-27791,-22349,10078,-24947,32258,-985,-21092,-7301,-20599,26393,-13504,19573,3960,7047,-8700,-8002,22610,-17968,3440,-6545,-7043,-5940,-1879,4371,-29198,-3306,-30171,-22186,22244,19012,-25261,6632,-29945,11343,-31414,-26361,21732,-24793,-15092,28414,-7278,-14652,31053,-30388,30288,11155,-21335,-32190,-26657,14367,18253,-22367,-4576,11319,-31299,24501,6785,-18984,18560,-18190,14823,16397,-13100,5909,8264,-17021,-3,31236,-20739,27128,17834,12244,-376,-3146,12261,5921,-3425,-26387,-14624,-16778,-2000,-25833,-7641,28688,815,-24360,-22473,20020,-13017,-17264,-25196,32360,19444,14087,15325,-207,-26528,-12507,-16863,-367,21365,-13601,-19609,-23021,-17119,-24994,21767,-27828,-8730,-25908,-469,-18046,-17961,6126,-26480,27953,17963,16096,-866,20127,18556,17442,-12329,-31103,-18298,2716,-30814,-16373,6738,14459,-16959,-25847,30183,-27255,-17022,26138,11538,-10618,-8240,28625,10754,-17425,19194,-6049,-7384,-1038,-20255,24273,-11011,7461,-32670,22379,28993,16819,-15917,-12609,16799,10506,-14830,31898,31238,6828,-6138,26178,13229,-2009,27322,-9670,-15194,20811,30205,29909,-6276,438,29766,-23550,-28095,-28908,11894,17248,2085,-3482,17898,25675,-2419,-19732,2169,-16564,23850,25325,3974,-9566,-24036,-31259,18436,-26741,7696,24878,3451,233,-31239,-23671,-14157,-31113,4536,-21681,323,-6258,-31781,30854,8059,-15325,17743,-18604,2698,-26469,-18723,-28918,-32667,-22453,-27509,-4056,-19210,-9461,31463,22058,-6949,9992,22785,29080,-20803,2260,14990,12143,-22788,22893,32368,-9867,19863,-31093,-29097,-31492,-24612,25144,-24333,-3107,7692,-28491,-22151,28288,-14637,7989,-27982,-19726,2002,-11446,15903,9896,17209,12702,22139,30246,29330,-7250,1644,29925,-23471,-8035,27364,-29143,-22419,-22877,6612,-30829,10714,20802,-31318,6981,-17275,32560,9012,-26426,-15051,-10020,-22390,-30058,5175,-6068,-16425,-529,-25098,23438,-7938,2939,-17170,-27533,-31746,-1234,-29235,-23919,2486,27051,13146,-17194,-7174,17636,19448,944,14315,19405,-6427,8458,-16447,-14958,16743,-20344,-19816,24818,6681,-14885,-26703,20610,-32108,23315,8659,-6829,10064,4682,26802,19951,3445,-18468,-25527,10717,-2268,20401,-26086,19955,8779,-23403,21036,-5544,-3227,1204,17743,-10094,-23422,26102,-15398,6952,-28536,5298,21864,-11734,5770,-21426,-17038,-21882,2179,-4574,4576,2190,10082,-10250,9783,3620,23262,-27242,20877,-2302,1708,3607,17858,-8482,31757,12365,-23679,-18625,16996,7243,14145,7729,1612,-16627,-32208,-27032,-15298,-25616,-15982,-27147,20737,22967,-14981,-11750,28563,-23665,1999,29446,-31402,24011,19221,-28809,14451,-21695,-27659,-29530,11255,-10635,4109,2739,22574,28895,-19185,-27509,12325,14427,1415,-31989,-28334,30223,-2066,5651,3358,2268,-26684,-3499,6756,24801,3228,-19334,24158,-23319,7311,27257,-5622,-20767,21852,-29420,-3996,-28337,-12367,8477,-5235,-32332,-23134,-25328,-28359,8145,12965,17337,18182,8663,-23203,-15573,25321,25453,31948,4777,-20847,30012,29109,29977,-28982,16451,28068,766,-26249,7132,4619,24081,8475,-26460,15722,-26490,-16785,3758,9497,16831,3012,2074,4577,-21604,13488,-4149,4694,15490,17454,10353,15887,28652,-14852,18841,-6015,-27120,-1977,14404,-4355,-22925,2432,-17421,-25436,32290,-24700,-15393,-29068,-26522,-26567,-31376,-14885,-21887,7623,12568,24923,7132,14670,29389,12717,-21918,-11856,20110,124,-18049,24438,-11905,583,-27117,-27456,22730,-20907,7048,-6726,-2601,-3632,15590,-22606,10534,31672,20335,7103,13191,-3973,-9046,25097,18043,-9137,-22747,-13208,9003,170,-25613,26320,-9287,17296,24874,-14743,20814,-3441,732,-12414,7729,-3770,-19847,24422,11368,-5348,21181,17132,15534,-19611,14197,-7093,-27576,-20231,24818,-7597,-25062,18579,-19955,3267,22386,5773,7195,26852,-5174,-19337,-28268,-14812,-20760,-12240,-15721,-3017,21044,-13707,8872,9567,-1269,-10218,-19732,14826,-20425,20263,-9175,-11285,-752,28508,6527,-152,3509,31911,30726,20143,105,-26763,-2163,-1686,-17087,5552,6935,-15082,19832,18882,-19311,-22048,-12646,5893,8480,9900,-3778,18654,-1125,-23439,9251,6873,-7325,-21502,-30630,29857,13688,2331,178,25088,-22477,2234,24788,-16861,15923,11717,-10052,-23345,-13331,-26668,-19470,13088,31887,-18818,-13709,-4673,-2798,-8944,-19917,24017,-6452,21564,5563,32355,2177,8071,23285,30327,18276,28331,-14582,31156,26375,-26267,14624,-30718,-27903,15798,-21292,-8841,-31925,691,-19914,-23531,11462,-3147,-19384,2817,-1799,30922,13216,-17179,10141,-17137,7037,-8470,22978,-22209,-22501,-9578,-22,-29765,-14748,-25656,-429,14137,10167,-2944,-21169,8906,26506,2177,-6468,-7519,-16459,12567,-3166,-22312,13177,-19315,-14648,-21536,-22783,-25252,-26876,18337,-32249,-1271,-3248,28559,-17534,2908,21920,-19796,11078,-30601,-7677,202,-2020,7498,-15252,4613,8506,11235,2621,-25464,-18025,18421,25179,16184,16867,5785,-21562,-24852,-25343,-4920,-10689,7205,30888,-27212,26684,26926,9857,9657,-8084,-16182,-25296,29739,28715,-24918,-10329,29091,-32198,7171,5600,-10219,-21155,-31207,-20002,23055,-23808,-19944,13086,-22646,-2295,-2405,1127,18525,13193,19003,1959,-30203,29282,-10774,-23541,-25040,30352,31706,-13504,-18098,-6497,-9314,-4932,-25200,-19682,6282,-2543,5404,-2776,32261,22046,7793,-18247,-22648,7774,23365,28851,-27309,32025,-31322,-18050,23668,32566,-8953,15685,-3559,-10592,11649,2469,-22352,-24068,-1611,8487,-31015,29159,-7217,-15654,26847,26948,22295,-21891,7353,2086,26146,29235,-15526,-7347,-615,30635,25926,26704,28168,18984,14221,-5842,20300,12506,-24853,-1726,31001,3535,18056,25329,-23385,-5832,-9126,-7750,-21533,23369,30928,-4109,5114,-1493,-14588,18309,29154,-14471,-19887,-9014,-6875,-8679,-18168,-4293,25125,-14038,30510,30812,-8440,31056,14527,12081,-8891,-16683,22799,7478,-22885,-18048,26680,16352,9366,-30227,-6183,21134,22005,27384,-12992,-5731,15744,22177,-11515,900,16952,-14307,32519,-18094,19178,28331,-28603,17323,-20991,1790,-18398,20082,-30288,-30629,-32472,-3668,-29923,-22016,-8141,1630,20692,-14818,-15566,20423,-7970,-12729,-25124,-21175,16467,-29845,-26908,-21235,-14230,7455,10887,-9148,12866,24759,-14175,-17590,20876,-18469,13354,-27241,23342,-9361,31035,27516,27230,17895,17983,19422,23894,-2514,7718,-24122,-6175,-6304,-32562,-21063,-21700,568,-16451,23824,-30443,-4454,23875,20668,-15086,10944,27355,-31023,-9100,338,24846,-9587,-31191,-6213,-13461,-12604,8183,-21816,12255,-10922,19222,16056,28352,-28865,19631,24628,-12469,-25549,24652,-16468,5403,-5620,22363,-16575,-13119,-26956,-25651,-10159,18811,23521,-4798,-14543,-11141,4905,31450,-9160,-30391,-30319,-4810,-22787,-6381,6995,24349,11737,-30804,17052,-5150,12151,25516,22534,13353,18379,8962,2732,-14338,-11198,-16419,-180,21818,29888,20523,27014,-15733,-12343,-3363,2088,-22810,-14816,-11857,-23338,25613,12125,19253,12151,-19771,-22310,14917,32625,-26754,-2087,29998,25067,16089,24871,23872,20460,169,-31801,-27943,-19891,25894,27918,-5057,-17916,4727,-8198,12941,29249,559,-1534,-26661,4856,18402,-15429,27411,-23280,-13544,20190,-3247,-11916,25615,-29015,9705,27103,24677,12926,-21372,-32206,-10765,-23621,-22360,28510,32365,-18392,-7991,24104,-6579,26906,18179,-16014,1574,-24908,27949,-16370,3781,-5411,31473,-31317,-32128,-25457,-20330,32168,20862,-18625,-1146,-14333,-17864,6475,7404,-28184,4192,-12813,-18962,22426,31897,-26892,-15243,22352,3998,-5499,-4150,280,3113,17255,-27140,-24774,-16819,-24661,-4126,-31426,2061,13792,24411,-13643,-11810,8916,-11568,-28734,18164,2044,22552,-22717,-13029,31785,23650,13586,7051,21494,-8882,-15513,26409,-12549,6260,21727,4651,28967,21653,30752,22698,-32682,-27951,-14010,-32383,2190,13298,-15885,24861,5059,-3556,-9710,-24902,17010,-28051,-4078,6120,28653,13985,30792,-13082,26904,-13778,-5769,18623,29651,-27350,-15636,-5413,-16833,-23000,-278,-4176,30561,-20101,-20970,-30368,-11241,-11862,26129,32174,16114,-4424,11272,14373,-15023,-9517,406,22289,7822,13766,29577,21818,-16879,18817,7582,1777,-1578,-32132,-12819,-4455,-25440,-6188,-5449,-31405,7045,-3529,9671,-26620,-22946,-26589,8827,14265,-12976,-6538,9641,2428,-5194,27861,930,16401,-32722,-14255,-12501,10026,31081,1093,-26905,-1988,17194,9143,435,13667,-11284,-1730,-13341,-19812,-11790,24244,-2088,31914,-7044,4480,-20456,9224,20492,-8749,7312,24967,17952,-31286,-2472,-4461,-19298,1717,13197,-26059,26270,-29186,-10780,18093,8127,-13692,-27313,-30396,10198,-10108,23926,19847,-25479,26788,32066,-22629,14720,-8306,-26595,14964,30494,24703,-14508,-26072,-999,28326,-15247,-1419,-22024,15009,-17390,2674,31097,15377,23412,-1166,-20022,-27192,-16576,14401,-8344,17837,-14770,24734,-32193,-17948,-3934,-16390,-3222,-24995,22936,10596,16225,-25608,24507,-3571,20632,4196,-23371,16583,-9954,8412,17889,-25973,-26329,1171,-25771,-24658,-22594,-32741,-22942,-31572,-25987,17986,-31265,22962,8313,22616,18203,-16932,4722,-25092,7480,-26352,-16207,17902,23328,8526,31783,-9353,16356,30404,-4321,28145,32506,-25859,-13425,25139,22622,-29144,20210,21803,8710,13691,7824,-31500,-6289,21152,-23202,31101,-2350,19422,7954,-16701,-23603,13054,31464,16794,-25593,-5938,13784,25789,10606,-23102,-11473,10129,-29312,17986,-265,5465,22899,20739,-19322,5843,9882,-8791,28140,9452,-29696,-30038,6576,14567,-15505,-1539,-30323,17369,32102,14127,6790,-3007,14724,-22999,-19469,21577,-7208,11848,-28501,-25112,11605,-10700,-5841,171,-30083,-16334,25591,-2798,17866,1304,-19299,10709,16395,17065,-25753,14706,3631,-32136,6698,19731,-15345,-3719,-31412,-12634,18474,-32410,25092,8347,28767,-26086,-17804,28139,23856,-18863,-30548,-26328,25839,-18685,4699,-28046,-1177,7068,10979,9888,10119,6440,3747,-7700,-25149,-22236,-18048,8254,-23384,-4605,5949,-15452,-5005,18336,31613,20705,-31432,-32672,9382,-21572,-22835,3619,22193,-16602,-17968,16310,19252,-31207,22928,10069,-26719,4023,2136,24071,-31283,16219,17081,26771,10733,1708,-32445,4205,26011,27113,23714,-15015,2705,14061,4458,-3594,-29635,-5766,9145,28244,-31284,12506,-22814,16508,-18714,5109,25566,30712,12282,17231,-10449,-4673,19715,13870,-26493,-5122,31677,-31776,-937,6810,8091,19933,-4310,553,23113,-5228,430,2563,-8614,-10909,20743,-31495,-9876,27545,24175,-16420,-19295,20307,22550,-26932,27981,17466,29204,-18504,-30752,5835,-5286,-32186,-20676,-18205,18745,-3039,14326,-13096,25729,30970,12533,3637,153,-10314,26521,11121,-14884,-14813,-25653,15943,10218,-26079,28488,22512,-27626,19117,31109,-22652,28055,10541,8648,17757,-26218,-32712,-17763,-23923,28128,503,-26162,-6532,25780,-19267,-17884,891,-11954,23037,-7661,-16160,-10924,20802,29437,-28997,4563,1832,-8252,15854,-15037,-30650,-17713,21261,10595,9814,932,-26316,-26403,778,5348,-21002,18883,9832,32472,-8464,22444,-5957,16565,30606,22395,-9623,-27637,3171,-28899,21752,-23447,-6487,-16427,12631,16892,22868,-23196,5581,-18435,-18033,-32601,-3253,-14996,18618,22200,10646,-29346,-2998,-21731,-30900,-23406,-8464,2058,-17919,-406,-14706,-4109,5018,20368,-9215,-4565,5162,18957,-175,25738,-7685,-18474,15913,-17724,-8375,28338,9134,-19449,5956,22819,-26721,15329,5692,-22974,-21288,-31661,-6636,18065,12286,-28164,934,-7291,2989,-17012,-23088,4534,-341,-21879,-11571,7821,-9626,-1599,14908,28265,-26994,28191,4412,21330,2283,-11699,-15243,17723,-11303,-13543,17536,6516,-28241,29462,16250,-32441,21073,22111,-6730,2977,1339,-128,-2836,-17501,-26190,-9966,-28001,5448,1062,13807,6652,15024,24265,-13976,5125,12553,-14990,-30259,-21847,19669,9267,-6329,9884,-27821,14161,26795,17108,-24371,21945,-14611,24121,-11388,12528,26545,21499,14301,6602,-12172,-31492,17149,19976,-2572,32157,-6892,24496,16546,-2999,-16123,-12873,24093,-3601,30814,18686,4292,22471,-1047,28387,-6504,-31808,23843,17655,-12606,-15863,13066,-11091,22230,31886,-7233,18953,-25920,-17694,8110,21301,17076,6650,-25472,-29507,17566,8985,18255,22129,10297,10616,-22921,-25644,27881,-13890,-3701,-25493,9216,28551,-9278,397,15376,11801,23699,-13774,-2682,15121,-5000,11809,17831,4715,1488,21044,-338,-23982,28092,-8618,16562,-3023,-12897,5618,-31496,22033,30131,20112,10051,13297,-30898,22187,10625,29715,25705,15540,17353,-12851,-28424,14189,4122,19612,-12725,-19304,-13815,22311,-4905,-20460,-12625,-29154,19055,-20184,15151,30517,-5080,19627,1927,-23222,-32290,5676,32659,19474,-21301,-30792,28602,-13622,-3743,-17884,15739,17958,-28661,22878,21730,5036,-18439,5658,-32449,11211,11985,-16724,3827,7661,29506,-15131,-27695,24561,26180,-21886,-7683,-4397,28865,524,-28413,-7425,28412,16567,25940,-3268,-21736,-3956,12662,12778,-23903,-29260,14366,-10422,13774,25308,2015,24215,-9761,-9648,27549,24594,15143,-13563,25083,21274,-12640,14052,26326,1264,7722,-16093,15705,140,22749,-25421,-17138,20840,-3806,-11221,-8407,1712,-29036,30623,-28141,20247,19319,9577,-11303,-30814,-12522,25512,14708,14999,23749,-16804,23395,-22019,1547,-20051,-19049,-4048,31519,-11963,-6122,9354,27487,32436,8602,-9253,-31259,4347,13409,26015,-16897,-25152,29709,17660,-68,2484,12575,-26905,-19931,-6048,746,11445,7642,-2176,-30504,-9337,6014,-32065,-24887,30208,4803,-17878,417,-5646,13131,17003,-234,5220,24389,13118,1177,-3356,-27514,-11072,16821,-13249,-5810,-28160,-13838,16586,-10247,-21030,-11157,-7107,9798,29641,-31975,23327,14259,-8126,29556,8711,712,-10243,-15290,-15280,21309,16035,-5533,-23977,24733,-2961,1036,-21210,-15596,-7823,8966,14222,-19359,-10390,1894,31461,-4140,25452,-19398,-15454,13425,24543,11734,28620,-21174,-3331,-11922,-8936,11370,-18331,-19616,19016,-21356,30647,32489,22996,-17670,2412,-7788,-29971,-29128,-20094,-30677,9135,32644,90,23950,-29791,29894,7586,-16202,21070,17549,27256,7100,-24879,-26107,18961,-5234,9382,-13258,-10225,8990,22839,-3429,-11654,15612,-22385,32121,32363,18624,1776,-32505,19248,-26789,32585,-25503,18777,-14078,10621,4732,-10512,10008,-26161,25315,-1006,27431,-1317,25570,-21334,8246,433,-11211,-7333,-20246,28748,-20089,24231,14983,-17553,3534,29725,32260,22339,8974,-14487,4293,-1289,-20747,-13782,13385,-22880,-7891,26628,-11746,-15985,-8863,8151,12214,8428,8679,-12460,-20426,26401,3526,25677,2297,-7671,18621,-5686,6970,-18822,-22380,-3493,18773,-14175,-6187,2015,-32034,30853,-31860,-27604 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add8.tflite b/tensorflow/lite/micro/integration_tests/seanet/add/add8.tflite new file mode 100644 index 0000000..a509dfa Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/add/add8.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add8_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add8_golden_int16.csv new file mode 100644 index 0000000..1f24f86 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add8_golden_int16.csv @@ -0,0 +1 @@ +1373,11821,-20054,24398,-22558,24420,11196,-27432,-9849,-32768,9210,-13080,-12036,-17404,-28484,-10783,-9113,17209,-5627,-7124,-4388,-2272,6073,10686,-4569,-30449,30631,6474,-11563,8641,26694,14952,-24556,-8437,-17229,1385,-16238,28598,-8263,-20625,-3509,-16255,17878,-6204,12168,-28104,-32045,1878,24105,15179,1355,-680,-32768,32767,-23156,8251,-11183,-32768,9839,24644,-11925,32767,10687,-29696,14361,-7735,18225,11015,-9308,-21591,23003,19240,21261,9684,30673,-26582,15669,-29752,257,13992,-7172,28445,-26466,5864,-25662,14912,30015,-15313,-16176,-25052,-12896,-4048,18030,32767,-24417,-9891,20229,6921,22504,-26197,-2900,11879,26083,-6956,15741,-3976,3018,-6339,-23382,8528,-2319,-15309,-4462,16380,-4189,25600,30234,18140,-32768,-3821,-23231,-19361,14816,7464,-16579,-18139,20577,26844,-9687,-10797,27171,-23885,2264,-8855,-4932,24295,7371,-6215,-19120,5847,-21687,-13182,-16478,-21056,-2019,17405,6629,1240,-12660,-29511,23839,-23130,-19063,7603,14758,29782,-17775,32065,-11961,-12912,-32768,5990,8978,3985,-5984,-30911,29415,11554,-32768,-32768,25119,16636,26310,12935,-28377,-14662,32767,-27951,1405,6981,-20543,-31544,28934,-13141,-17965,-12442,12890,-19726,26123,-32768,25522,5645,12576,-32768,21947,8583,-163,10778,5045,30614,-3813,2453,-6615,-26535,-32768,-10860,20616,5425,21294,3824,-25149,-21543,-8283,-16616,32767,17470,28189,8604,-3589,24776,-11097,32241,14207,32411,24727,-32150,-26458,32767,17533,7771,22032,-13863,-23951,-18396,31838,-19931,14303,10082,20631,4323,-7220,29351,31323,-12867,19902,3716,-6256,-17543,15400,-1567,3056,-25708,32767,-29115,32767,-32768,14814,9996,-25712,24340,32767,30107,20885,-2113,-11901,20068,-12263,-3823,-22289,4068,29801,-7425,-26599,-22374,-16307,10661,-11548,13066,10218,-8798,25131,-7230,-32768,11574,13760,-23443,2856,23792,25748,-30309,32767,-530,-29752,-24981,6254,13066,23736,21100,-24504,16473,-6484,16290,-17698,-24029,20267,32767,-16906,-25637,30342,19622,7232,-30332,-32768,1650,4195,-14441,28457,5179,-32768,-14569,4310,22459,27669,15994,24743,-10895,-16933,-5181,-7863,-20068,26114,32767,11446,-7044,8410,-8534,-10447,24765,5909,-26124,29499,7633,-4304,-26716,18637,31253,8913,-32768,26432,19601,18342,18473,-31366,10192,-18596,-27100,32767,-30856,18944,29550,-12322,-657,-8629,23556,17991,12238,32767,-29858,-3135,26531,11433,-5638,-12022,-24128,-15333,-23898,11432,-18683,-21490,-487,10879,17086,12334,-17351,8584,-11041,-26244,10829,-26724,-32768,28815,19327,22961,1907,28974,3241,9906,-15718,8148,17990,24772,-24905,-7248,19375,-15431,12870,-24188,-32768,-32768,-8260,24530,-26444,-25073,-5897,31275,-17743,1309,32767,8522,-17734,2338,3314,-20204,-27217,-6347,433,-21291,-21717,-8897,-2745,7062,-1115,-16655,-22647,-18865,-14807,28956,-215,-11652,880,30986,23236,-18101,134,-18886,-25837,32767,27246,12665,32767,-3850,15033,-17585,5408,16355,-7054,14655,-29331,-22249,-8258,28754,11649,8979,18997,13058,-4272,5584,10456,9089,-17437,-4470,7191,-29656,-2434,3953,16702,-19756,4614,23942,10114,-23202,18568,-6501,-23134,-2808,-27298,-1774,24637,-25907,24511,29402,-26752,-18264,-14336,-26024,-27508,29110,2545,21395,21919,-24775,21025,-3535,615,6039,11893,-14632,-838,-25357,-26297,1227,-32768,-3000,28120,-4616,-7379,-26901,30988,32767,-19577,-9044,27000,-19040,2656,11774,-18490,-10044,23419,9956,-32768,3850,-2431,-3630,22202,28691,-8336,-10426,-19247,-3260,25295,1675,2316,-15114,18897,31633,-17100,-1147,-30386,29354,25563,-18426,16700,-17544,4754,-25773,26709,21366,24085,15817,7906,-26329,-3901,-22389,9268,27161,10011,-30124,-3402,32767,-25515,-29552,29215,-30619,-25553,-11282,-17924,-25807,22123,18927,31676,-11185,-13974,-32768,-11232,11713,-6116,24664,-2196,22380,6023,-11528,-21352,8253,-6036,9044,32767,-11709,26331,-11166,17099,-23893,32767,25618,23776,-18439,13129,11899,23095,15777,6337,-3911,-3476,13198,-27733,-32768,-22113,22273,32767,-12576,-16809,6201,-32768,-3010,-19713,32767,-24583,27847,30243,-9948,8915,12081,-30554,-13824,-16758,-10751,-14616,15984,-16983,2809,-27102,24368,-15581,-9210,7967,-7355,32767,-18568,29784,5104,543,-32768,-21141,13093,6846,26993,4386,30287,2945,3569,28238,20536,-8341,13780,-24456,-13183,27372,18059,15343,29604,-5683,-9806,-32768,-21378,-7982,24987,14998,-22890,-32038,-9823,27588,-6420,31898,28382,385,11442,18793,-17502,129,32767,-26354,13829,-2610,31252,-17790,32767,22647,-27406,11827,11462,8819,10465,-2686,-32619,-4090,-20997,6185,-17295,7497,-32768,-24429,-26353,-14639,24520,17412,14502,-16338,11407,3248,23946,23181,12805,3387,27298,-18235,-9063,24520,-24810,32767,28031,24979,-14154,-24733,-6514,-10706,5933,14676,-10639,-21290,22055,-29806,26718,-11699,-28084,10334,435,-14661,1861,-11172,-14877,11301,-19405,19668,-4336,8244,-21342,4147,-31582,-7748,-31908,16326,11169,-8182,16725,-24535,16673,30791,2018,-19565,21795,8241,16786,-29194,-30133,-25495,-26168,-32768,5887,23064,29662,64,-8176,23044,-15037,-17824,-10164,-21801,-32768,-32768,9696,21776,4391,-18356,-1789,29899,-32768,-6984,-25028,-26327,22014,-25952,-23678,18719,17963,-1329,-23495,10426,10992,-17991,-11697,-29285,32043,22588,-7905,23054,-4230,-11033,26584,-21836,8667,23373,-32733,30931,-8495,-1818,-22985,-24792,25518,29807,31225,16018,8802,-22983,17985,-5607,30663,-23464,833,-28496,23395,-14235,17949,-19430,-32768,-3850,-2659,1658,-6968,-5046,-17938,13294,-14874,24896,-8569,-16899,-2889,3114,-6995,24076,32767,-7429,-25114,32767,18100,-14170,-17203,5151,-4011,-13343,32767,29389,5020,-26113,15228,-22999,24049,-20788,-9360,3908,-19606,-13241,-7567,26342,-32768,-28996,21182,-13278,-20538,-22313,-24263,-7908,28487,31548,-8185,32767,-20990,-22946,25036,984,6916,-3926,-10322,-16764,-32768,17740,-4374,-12666,32767,7948,-7748,3760,-13609,-18843,-14884,-23717,-28759,-29444,-7865,6992,1653,-16353,9824,-13767,-18729,-5709,-24703,19858,32767,12790,442,-28891,7045,30293,-901,-17210,-24901,-32768,-18424,6148,-31610,12155,-2833,11709,7381,10930,7508,32767,-29810,21791,-14555,-3511,889,-2399,-29582,9347,12371,632,8935,32767,-17170,-8086,-902,21997,-19897,7322,-9570,-18872,-32768,-31170,21236,-11417,-6146,-5930,6339,32767,-4958,7945,-16256,-12020,-15181,-25375,10006,32767,10753,22843,-23243,-27646,-6024,-19354,-23607,31472,3509,-26152,-20820,-18859,398,16731,5383,24023,-28518,32767,-25276,9444,16111,4983,-26787,-14920,-200,-32763,-4058,-32768,21139,-32768,-32768,14912,18355,-32768,-26709,409,20297,32767,32767,26690,-32768,1758,5343,22971,4266,26922,16767,-32768,-7990,7472,1191,-32768,32767,32767,16467,-29029,30588,-3921,2451,-32768,14740,7754,1225,-4354,31506,-12184,2849,-24855,16047,12315,30400,15191,32767,11781,-32411,20359,-7990,19886,30080,-20191,-12796,-20536,20006,15270,-29449,-29581,-12272,7344,22666,11231,-18381,6857,-5631,-7025,13856,17104,10582,3722,13275,-1345,-17587,32767,21228,29979,6007,-3426,-5941,25500,2578,17383,6542,3308,17923,19918,-9863,30970,-22961,32767,10947,-2016,-14408,24568,-30785,32767,-24995,-11351,16404,11152,24502,1376,-8649,4782,-2520,20742,-25719,12398,19722,11513,22383,6,-23626,26499,24307,24382,-7389,20726,25617,-11070,-11892,13177,9493,12755,-22866,-9424,22744,5272,-8609,-1353,-22390,17649,-22930,-2788,20799,17684,8923,19793,-32768,-28789,-2094,25030,-13418,3506,9700,-19385,8277,30352,7981,20743,-4982,32767,-18863,-23330,10167,-11741,4347,-32634,10916,-13151,-2781,-30812,28774,-14937,-288,12907,-2826,23375,32767,-19936,-12053,-14359,-32075,-8780,-29608,-5722,28543,16134,-7263,-5030,-18951,2176,29575,12551,-16695,-12197,9575,-20259,-10011,18199,29252,-24418,-13195,-1322,4504,20769,-21354,8743,20558,-22901,32767,8360,-22584,22263,-12039,-3079,-2467,-10989,20688,-27416,30002,-1721,-15026,-21618,-22126,-24113,26839,1979,-11715,8873,11373,2458,-6473,-32768,-19978,16445,-25166,-28221,31182,23299,-9273,-1829,20923,-10149,13310,2889,-14646,31350,-24998,17354,-28460,30618,13291,32767,-1455,-17217,-2445,-23084,-5685,3225,13453,-22063,-11033,-32768,9086,-9501,5827,14936,15015,-22360,-32768,7233,32734,-32768,19881,-5099,-20553,14596,-10235,6123 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add8_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add8_input0_int16.csv new file mode 100644 index 0000000..1e4d4e5 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add8_input0_int16.csv @@ -0,0 +1 @@ +3732,4644,-22938,29935,-18971,17291,6596,-24376,-2185,-30949,3648,-16106,-4445,-17413,-29442,-8582,-9741,14765,-11745,-786,-1537,-3753,11504,13605,953,-23762,32120,11528,-13178,6224,31267,19086,-22736,-7143,-13880,3929,-21387,29750,-14575,-27306,-10251,-15106,14847,-5844,8589,-32089,-29172,984,29468,14992,5439,2477,-29298,29090,-19829,12514,-10252,-31872,12893,24662,-18867,27430,12284,-31279,12532,-12825,11799,7399,-2014,-24155,26777,13442,16800,6589,24824,-21692,13099,-26319,-5389,6946,-2570,21251,-24035,1939,-30430,8613,25017,-8094,-20671,-17234,-17756,3224,14820,30453,-26632,-14698,26322,6930,23983,-18556,-3928,16372,30169,-7901,13140,-10519,4630,916,-25328,6424,-3639,-8873,-8128,20925,1331,30417,25808,17055,-29186,-886,-27666,-24170,15620,7100,-15279,-21679,18572,20853,-6957,-7982,21941,-19599,3191,-8225,-6321,20680,13270,-3857,-14693,2966,-15295,-16304,-17534,-22094,1085,14991,10788,-2262,-15812,-25576,27420,-27081,-13979,7648,9155,24782,-15822,30331,-9929,-19859,-31913,12251,8701,6590,-10422,-32454,22312,9651,-28543,-28935,21320,15988,32736,11720,-25243,-13443,32353,-21155,6641,9281,-14771,-32293,25625,-6067,-18887,-6288,7805,-22601,27595,-32380,25142,-468,7570,-30699,27023,5234,-6841,16914,5815,31672,1424,8554,-6049,-23510,-32752,-13283,19424,6686,22960,1382,-28378,-25511,-14673,-21351,29817,11126,23628,14082,-7473,26044,-9360,27085,7306,31482,24195,-31039,-18783,28233,13627,8296,26008,-17391,-19902,-13167,27031,-19178,14944,12433,18680,-1778,-689,31842,24462,-12922,18733,1844,-6926,-14529,21621,5619,-2872,-19401,29757,-28094,32465,-31277,11762,13334,-23526,24294,29113,22295,23445,1660,-6160,24735,-11633,-9754,-22222,10803,31747,-9149,-32672,-14229,-9191,8780,-10135,10559,13600,-15847,26720,-9754,-27971,13986,11503,-16699,2658,26057,30719,-26156,30090,-4845,-25663,-25193,4185,7345,21791,19232,-29090,18018,-7166,13377,-22898,-20828,13642,32671,-12923,-21294,22093,11839,4909,-24289,-31362,-4563,9572,-20047,25641,11006,-31230,-12516,7827,26761,30536,15374,27659,-4136,-16708,-3947,-9172,-18333,18680,31622,7822,-6015,825,-13281,-5770,24008,-340,-28910,21985,5231,1277,-27482,15645,23511,2165,-30886,24711,15928,13647,14264,-23839,3640,-24463,-22122,26924,-31862,25537,21786,-8928,-6850,-12495,29059,17152,10945,32330,-23629,-7917,28565,8234,-1645,-12841,-17484,-17074,-27131,18367,-17457,-16824,3925,9014,17413,16036,-12350,8098,-15709,-24052,9577,-31043,-30593,30745,14244,28330,6002,22156,3066,7617,-13497,9188,11534,18327,-28059,-3123,22913,-22313,12612,-30269,-32497,-29557,-10171,18426,-24784,-20079,-12635,23517,-24439,599,29231,811,-10524,4013,2105,-20637,-31557,-153,2933,-21513,-14682,-10140,-5660,11519,130,-10278,-27419,-15389,-15216,20866,1603,-11017,-132,29420,15172,-18918,481,-13645,-26250,32246,29198,7458,31297,3548,11754,-19696,4894,18224,-3469,14825,-32421,-16807,-11732,29970,10404,10821,23027,12479,-4549,10484,9981,11432,-14833,2025,10200,-25792,-8075,-1529,11297,-24190,11114,17748,6442,-23473,24286,-10723,-18465,-4351,-31418,3323,21259,-19676,27678,26550,-25734,-18281,-19909,-29341,-32060,24124,6105,24293,19330,-27776,15650,-3298,3162,9005,12933,-16122,-205,-30417,-28855,5230,-30559,2517,30137,834,-13756,-25009,31926,25677,-23549,-9450,22608,-15005,4664,10319,-13720,-5711,16840,8386,-28750,10417,-979,68,16242,25852,-5574,-12729,-18345,1234,26882,868,-2426,-21073,11705,26877,-11734,-3577,-27356,32305,18132,-17520,19345,-14625,-2338,-18615,23526,15910,23759,11905,4256,-20716,-3018,-28834,8117,29824,5809,-30914,-2423,32013,-20648,-24588,30212,-23404,-26905,-12424,-14441,-28623,23480,12935,32213,-15832,-13101,-29198,-10220,15624,-9229,23464,3333,18562,-1080,-4134,-23294,10529,-11310,4011,32316,-18457,28888,-11294,23217,-27538,32382,24979,29432,-21049,6492,7661,24240,16065,1269,-7034,-8295,8192,-21159,-30054,-26970,24801,26185,-9261,-16728,11911,-32627,-1646,-16864,28698,-24557,30809,32619,-14566,11009,5332,-31241,-18688,-13961,-16152,-14939,14131,-20919,5872,-26372,22983,-10545,-14044,11004,-10563,30796,-17070,29550,2307,4745,-30877,-15330,6380,8652,25046,4928,27264,-727,3909,32625,17408,-3012,7827,-29447,-13979,28549,14473,11107,28640,582,-10706,-29275,-26330,-4454,28666,8178,-15493,-31129,-14149,22433,-7704,32339,32639,-362,17290,20107,-18343,963,28358,-31133,19082,-7543,30554,-20147,32747,28661,-25593,14889,11834,11407,5378,-550,-31231,-8638,-15992,3222,-19653,10585,-27771,-21514,-20521,-21587,17563,14814,12579,-12878,13463,5873,21093,20308,10668,1237,26124,-12175,-11402,29933,-26019,31404,29038,23590,-18509,-27267,-2391,-4833,6741,9841,-14768,-13695,15397,-23264,28134,-13755,-32434,3903,-1196,-8449,-4323,-17592,-12566,7157,-13991,13762,-8772,9985,-21066,-196,-27590,-10072,-30593,10566,10963,-10923,21574,-16413,13617,25714,3049,-26107,19339,3621,13919,-29767,-26554,-23655,-32641,-32066,5603,24916,28678,7273,-1845,23287,-11423,-16347,-9857,-28506,-30005,-27359,9391,16324,10250,-24198,-9120,25563,-30064,-13574,-24902,-27645,16149,-30536,-26223,15470,18359,5594,-19123,12214,7127,-11472,-14312,-25190,30194,17432,-14068,25759,-11127,-16806,25728,-23224,13194,20180,-30090,31495,-4903,3166,-20337,-24238,26039,31656,23676,17334,5461,-28175,21825,-7253,30717,-15318,-6135,-27231,21598,-19690,15528,-25814,-30559,2022,-2638,8686,-11033,300,-12903,13839,-13147,30496,-5622,-15222,3166,5348,-9302,16504,27829,-11517,-23823,28569,16614,-7129,-14223,5336,-2629,-10856,31593,28451,2811,-29430,10935,-27881,17519,-15108,-16508,1057,-25693,-7319,-5581,26800,-30744,-25550,14654,-11039,-21973,-28885,-29231,-2202,28411,23046,-468,32471,-14442,-26156,29521,-2479,12415,-1772,-13586,-20968,-30358,9758,1839,-19231,30127,7963,-10660,-1591,-16743,-17180,-9284,-28329,-27344,-31708,-9069,1285,-1592,-10095,11312,-18034,-14202,-3861,-29406,14858,26604,11588,985,-29224,2143,25448,-2361,-21752,-17054,-32629,-14831,7887,-25629,7780,-7169,10135,4662,13151,8919,29109,-23450,15340,-11111,-1284,6710,-5100,-25555,3391,5205,5699,3259,29717,-9640,-14901,-7307,17929,-12418,4154,-12010,-21833,-28997,-31400,18562,-10098,-6723,-3126,10993,31861,-618,12303,-8298,-16278,-8420,-24009,16646,27461,5203,25580,-29181,-19367,-12797,-13101,-22662,23285,147,-28176,-22527,-11521,6799,10011,1066,15968,-27705,29853,-29804,9306,18954,-1044,-27375,-9611,5188,-24223,-6546,-31554,23520,-29316,-26508,8997,22752,-31030,-30897,-207,24090,31824,30863,26900,-30740,1178,2010,22626,527,29224,13481,-30382,-1311,5092,102,-26698,26966,29754,14755,-28211,22692,-1237,-1094,-31860,9801,7963,-5249,-3580,24773,-17047,8718,-17737,16976,19269,31135,19650,30485,15580,-30678,25664,-5236,16858,28377,-24556,-18934,-21848,17562,8522,-25460,-29218,-12139,1801,28242,15935,-18693,2446,-5134,-3229,5996,13800,8030,3395,14213,-2698,-16227,30541,27113,27804,9341,-7641,-12310,18355,8428,23259,632,7824,23448,14122,-16179,28959,-19956,32071,15785,-5651,-16551,26210,-31946,27719,-23258,-6902,9668,11043,25320,1793,-6839,8659,-7921,19951,-18314,12863,22087,14256,18353,-356,-28882,20476,17762,16795,-11455,20045,22456,-17098,-14712,15867,14097,7919,-16825,-3829,25918,4422,-14639,3065,-21010,17759,-21028,-5292,27511,12748,15439,18877,-27722,-22911,-4110,30741,-12473,-768,8187,-25740,10674,23183,6620,21972,-7203,28644,-16770,-16812,12717,-11475,325,-32218,10894,-5618,-9576,-27533,22977,-11738,2499,9106,2967,17033,27153,-16029,-15210,-16440,-26452,-5995,-27511,-10731,28891,21760,-8068,1852,-23608,-3097,23668,13815,-17500,-15879,5352,-23712,-9048,10751,28654,-27942,-5652,-4496,7164,24516,-27725,2480,22966,-24939,30259,15485,-19728,19588,-12519,3480,-1158,-15251,19507,-23385,23964,-2082,-20866,-14643,-18919,-30240,29376,-4994,-18191,2314,9454,9139,-2046,-29758,-25360,17235,-30089,-25761,32286,20185,-5575,-4727,18231,-5506,15830,-1019,-21081,26110,-26390,13709,-26672,27008,13666,29833,3143,-18601,379,-18462,1855,-3130,19016,-15473,-16250,-27145,15577,-8452,971,9940,21424,-28468,-30583,9390,25835,-29404,25984,-10530,-24216,11163,-10710,9358 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add8_input1_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add8_input1_int16.csv new file mode 100644 index 0000000..b4e9bd0 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add8_input1_int16.csv @@ -0,0 +1 @@ +-10556,29787,15589,-27871,-12459,27740,18563,-9420,-32213,-19138,23077,15195,-31573,2579,8359,-8092,4085,8221,27685,-26791,-11879,6835,-24730,-14374,-23581,-24919,-11006,-23134,8776,9353,-23970,-20331,-4412,-4453,-12194,-11372,24975,-9230,28917,32343,30115,-2673,10700,-675,13939,21597,-7943,3651,-27064,-1391,-18129,-13764,-25511,11970,-11231,-19921,-2458,-5364,-14845,-3673,32220,23658,-8569,11282,5936,23477,25557,14269,-30669,14404,-19923,22652,16487,12178,21211,-17596,8999,-10736,24753,28897,-19162,27440,-6813,16380,24677,25483,17567,-29463,22094,-30673,23221,-31338,11467,9277,13285,22548,-29704,-1050,-9775,-29728,4936,-21459,-21742,5164,9125,29310,-7516,-30931,11953,7996,6136,-26025,16745,-22345,-23628,-24881,15025,2117,-13752,-12331,22860,23938,-5690,509,-3289,18187,5804,22392,-10574,-10785,19002,-15335,-4402,-1474,6818,12328,-26977,-9449,-16649,11799,-24902,15631,7041,7626,-13335,8061,-19226,15196,15684,-12973,-19198,20721,-19543,-1305,22449,17613,-5983,2938,-7177,32387,-17421,-28364,-91,-12020,20359,11282,26897,6671,-21459,-16612,13017,420,-32051,3449,-9622,-3213,118,-25765,-23196,-11116,-22348,7886,10308,-29143,6669,-25208,20446,15499,-10272,-24699,-2053,26016,20147,-15691,-25488,13453,29347,-28515,-4117,-9109,-22437,-27144,-1519,-9413,-28054,12223,2226,-6326,-10421,10165,17845,20565,29266,23214,23338,25309,15917,-25308,17577,-9181,-6007,17939,28230,-647,-1271,-191,-29842,31740,14595,-3440,-20671,17511,-14286,-20276,16465,-401,-4900,-11792,5559,26156,-27622,-15216,25557,2119,2232,7677,3854,-10676,-29559,-31323,25582,-23944,27402,-236,9066,-14372,11242,-16113,-5849,-3348,29298,29909,-14286,-16258,-23471,-23418,-980,26598,2955,-30165,-12890,8652,30543,-32499,-28865,6704,-4519,9104,-16338,32233,-10640,12135,-29555,-12276,7905,-26194,451,-13413,-25581,-13814,21914,19025,-13616,4574,8174,23215,5077,5124,23709,-9187,3942,10415,25413,-10551,26132,28799,-15024,-15332,31794,31314,9146,-22112,-11687,27038,-24219,26721,8215,-26339,-3457,-6890,-16072,-22162,-16624,391,-16410,-28087,1483,-4663,6892,-4690,28831,2174,14245,-3493,32077,22089,-19011,-287,26577,16043,28692,9432,-23875,7258,10421,29438,28331,-13882,3704,13268,17938,15789,-28474,27280,28470,-17905,27021,8916,-31709,29779,-13106,27286,18231,-27599,1062,3892,3533,-22997,21452,-12800,12381,-16709,5349,-25654,9880,17681,-32116,-2659,-17353,-19302,6602,-3927,-18054,-19427,883,22107,-5798,3917,22862,-12962,-12674,19502,-26921,-18260,25711,297,8604,-7460,-5754,25725,24685,17479,-17055,-18360,32467,-745,30227,-5160,-15485,9596,23224,-3432,-18272,30444,29503,31989,2927,12370,32615,-29073,-7695,4827,4845,23023,-26271,-11040,4080,-27723,6756,13199,-20599,-5303,-25573,24256,-12510,3954,31297,-7949,-1090,4315,2356,32018,6226,-1545,-20258,5582,481,-12545,21016,2124,-31923,12206,11834,1469,-10590,-14714,-2884,17843,-20651,16458,-9533,3767,-9397,-20465,637,1841,-22329,560,-11614,-8890,-27865,-14260,-12642,25122,23495,21296,22347,-29213,23706,14649,4572,-27813,19485,-17126,7184,22071,-22119,11241,-23581,-17479,8235,-567,2737,26561,18360,23999,17647,-16000,-15846,8173,16788,20535,-526,-11271,-13903,-6301,8677,-2659,25916,15067,-17757,-10250,-23788,-12956,-23256,29076,-4384,-8638,26555,20296,3102,15346,-14941,-9205,4673,-18247,-17562,25474,5441,-28049,-29397,-6023,-15709,22930,8281,-10913,11632,-1156,-19255,-10655,3298,20482,28367,28822,16269,-21067,10838,-8874,-17237,28902,-1291,-14049,-10257,30447,-27670,10082,20842,-2082,14870,14872,-20807,-3310,31563,3702,-15651,16990,7861,-3802,30811,-17649,-17487,-8638,-27214,9663,6661,-12680,16129,-9183,23550,-6976,22036,-1795,-25673,-2806,-18882,14558,1673,-23957,13500,30308,-30783,11638,-11197,24037,20780,2785,31335,-15066,2188,-29357,19486,31422,-931,-28299,14150,27225,16871,-8393,-3566,21328,14281,21664,20056,-24821,-15458,24549,-14346,30858,-12722,2094,-25974,3401,-5551,-9633,14486,3471,-17067,-14844,21729,-10492,27872,7471,23373,-9839,25281,3548,5807,19757,-13860,746,2527,-19839,22567,-14498,15157,32574,-3870,-3317,11536,-18531,-29560,-22432,27565,-8928,4615,-3018,8856,15695,-2013,-23380,10740,-22182,24130,25480,5419,-9159,13112,16364,-83,-26681,5381,-11810,24860,-14325,-19798,27758,-29140,682,20426,18610,6575,-6589,-22828,3224,-27344,-8511,6245,-3680,20404,24828,-25079,22042,-1494,12944,2169,-29707,-3966,-15168,-3306,-12647,20808,-8989,-1340,20566,-18916,12110,12876,-14650,-19097,-9237,-21764,32642,26971,8870,6328,-12809,-10690,-12001,9034,9235,7516,8947,1173,-23949,11591,-27343,8926,31587,-8509,2457,21185,14733,-17154,-24226,-4411,19088,19681,-30244,26018,-24379,-10113,10735,23193,26728,7098,-25137,26882,29818,-7976,16549,-20941,23063,20110,-8848,1898,18466,-12923,11334,-1121,22911,-726,13226,-23728,-32083,10989,17802,-4819,31577,7606,19083,10141,6772,-11319,-4360,32237,-6173,388,-11495,-4,-31663,-26606,-4428,-13677,-3885,133,32617,-18009,-27587,-73,20762,-26367,28326,32451,14680,-15581,29954,3097,9626,22543,23910,14626,11534,-4356,-30202,-15772,-9369,15366,-26000,13189,-13710,3448,19345,28214,-15237,30901,26957,-117,9279,-21141,10612,-6832,-6987,-14532,-21620,-8274,1181,-6008,-12463,28593,-8114,13384,26149,-19483,8043,-4710,-32344,30474,-1399,4479,26025,8012,30864,-7424,-25219,296,-31098,18865,-22738,-19493,-4330,-5415,-28217,-11689,-4899,-26165,-10261,11149,29738,25286,19031,-2008,26325,3885,-28849,-10574,-1565,-5485,-8976,23266,-165,8969,18370,16628,24789,25163,-21908,32749,11949,29587,-24071,-7615,-5852,-5708,-10901,25575,-7894,9296,32108,25352,-23902,-3822,32729,-32691,21358,-25690,17440,-23343,15061,-25155,-8887,15838,20903,-26648,32459,-26641,30674,25948,-1225,13915,22947,15744,-4553,-22417,23707,-2021,14234,6433,24038,14007,-25093,-7967,20743,-17146,-7282,24253,19059,25081,3411,-2447,5676,20497,16857,6543,22454,-30825,785,-13089,-8533,-21652,17436,19453,5203,10862,-11344,-7288,19814,-23579,25148,-12999,-9268,-25687,12209,-13370,24788,29661,-22341,23621,28383,-30560,31100,28256,14655,-29936,12842,12108,15754,-23328,5553,8644,-4127,3428,-11447,-21360,20447,-18331,-20294,-32570,20450,-27472,-2298,-30615,20249,22800,-15350,29460,-32321,30618,-24633,-707,31357,14249,12701,10530,-29471,-28165,27065,18168,31866,588,21158,23568,-769,-14831,25737,6489,-21133,-23629,-32721,11516,-8410,-13536,-11730,-28253,23795,-21984,-12780,22283,2643,-19612,14052,32033,-4814,-10767,2292,13856,-1834,15793,-14033,11982,-15338,-28160,9361,4609,-28727,31971,13227,5115,640,30208,-11214,15208,-13155,19537,-2049,28246,-2764,24970,23127,-26186,-27628,-6419,-32330,-7659,-21793,6003,-18397,-2882,-26261,-10927,10395,3092,22110,28814,8757,7812,27403,-13221,2721,1206,23267,-27787,-22291,4049,18367,-1362,-15644,32489,12015,9661,894,-6055,6137,-3407,5815,-28935,5178,-15516,19007,28831,27652,-26062,-28335,24997,-20312,-26872,22546,29172,4315,-9847,30361,-22837,16256,11509,-10793,9586,24789,-3981,-17879,27183,-1148,-7165,-2033,-6688,-17721,24080,450,-28762,-3849,-13258,-13721,14429,1589,26521,22582,25193,29758,18932,-30,10145,28083,14114,-13731,-21598,19375,-23190,-23190,-17252,2965,27730,-19201,-2796,-3057,-5008,11399,-32505,19094,-29913,1135,-32411,-21609,9158,-28725,-2194,18253,5227,30729,-11733,27052,4812,-8419,10479,31963,-6438,-25219,-12679,543,17027,2933,-1497,-31156,30242,-9903,21256,-11866,-12196,14807,-25022,24438,28342,-14249,15617,11232,-20014,-10950,-4891,22826,-5688,-27055,4594,-29483,23211,22836,21623,-7379,5970,17947,17146,18116,-2770,30047,-1640,19032,-31197,14127,-12334,-19482,31085,26223,-13569,12288,8829,-32503,-9247,8500,3861,-28348,-5387,20315,2168,-13701,22137,1838,27833,-27472,-10854,30419,-15051,30330,30141,27507,6768,-29692,-18496,-31140,26546,-5867,25287,-6685,-9394,10276,-14884,12991,8767,-18908,-13004,16739,30388,18436,9757,13473,-3703,11387,-3586,21784,-19975,8589,-12043,-16928,-32279,27432,-26386,-25718,24516,-27721,-29825,-3221,20471,19760,-30329,30081,-19440,-10524,25520,-25112,-29694,24588,19078,12944,3578,-15096 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add9.tflite b/tensorflow/lite/micro/integration_tests/seanet/add/add9.tflite new file mode 100644 index 0000000..c74aa5c Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/add/add9.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add9_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add9_golden_int16.csv new file mode 100644 index 0000000..ba3cf15 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add9_golden_int16.csv @@ -0,0 +1 @@ +-27485,-26380,30703,-305,-28017,-11458,-2185,15038,1917,-26287,-11612,-19385,-23877,-11688,-6454,-7389,-7,-4914,421,-19186,-7114,-32391,-2866,-9646,-32768,-25670,11367,-7103,8965,12314,-32717,-13187,32767,20449,-3973,-7611,9339,-12493,-8891,19856,-32768,2747,17931,-32768,5619,-10619,28662,18637,-24145,-3603,115,11200,16333,10647,17364,-28802,-19394,18698,-11925,32767,-26858,17660,30010,32767,11956,-16194,1126,-248,-212,-32768,-32768,-6687,-32768,8132,-31776,-32768,-8142,-6232,-6991,-5969,2538,-32768,2954,3896,-18768,-22592,11099,10682,-5309,5295,-4114,8743,-18815,13332,6909,-7597,-2563,-19006,-32768,13535,-32315,26794,32767,22899,-13660,-7458,-5189,-6267,9689,13262,14311,23690,32767,25106,-26704,22269,-10258,-8570,17878,-14563,-4615,-28809,8658,7493,-17549,-16350,-18470,32767,-678,11057,-1566,4349,2736,-32768,-22987,22599,-7543,6372,-891,-18068,15893,8052,18214,-3209,218,-6234,31758,-8299,6920,-28524,-32768,-3405,-9177,-2209,14434,26304,3284,-32768,31220,4682,3263,-1134,-21297,-11280,-2195,-21461,-32524,6733,-30070,9065,-6048,28687,-1402,-20272,10540,14655,11134,32767,27654,-2635,2176,-3015,15848,-32768,32767,21977,32767,-26185,13169,19171,8505,-11907,24134,-15540,17397,9859,20654,29096,24690,29266,19971,18384,-12591,32767,-11122,-264,22481,-10775,-28977,-8001,-8291,32767,9024,-25351,-6333,18825,13349,25376,32767,-21331,-21496,-24518,-28760,15748,-8061,7307,-5035,-418,8380,24994,32767,31571,543,-11902,5291,13487,-9697,-21530,-32768,10669,23733,32767,28254,-32768,32767,-9691,10782,18305,17195,22841,14618,-1496,32767,15125,27019,4340,-7510,-16184,-32768,-5156,-1695,-5960,15256,-29818,14995,-15947,-19606,-8299,-4605,32767,11697,-5506,5213,-23419,-10900,-18873,-3905,-14757,-4866,-12108,-27379,-2050,-11426,-13485,-6783,9886,12461,-32768,-22219,9485,2962,-19382,22734,32767,30016,-3084,19804,-30258,-12042,-32768,-12062,12188,14423,-2010,11697,1575,-22417,12365,-9475,-2896,-6316,30348,-28223,32767,17932,16956,8562,16301,22805,6294,-5724,17258,1850,-32768,6370,32767,-11997,343,32767,-26893,-31045,14020,6782,23026,32767,8463,-24381,-22149,-32768,-2397,-4670,-19086,9760,-3135,-30573,-26434,-12509,1943,-13787,11424,-21824,-18024,-23894,14121,-27030,32767,-31021,-19783,-1634,-11573,13198,15449,8211,17424,9240,-32768,-14676,7622,15176,18226,17406,2808,-21740,5264,-315,20078,7183,1255,-32768,-32768,-32768,5740,-17647,9572,24830,1061,10160,21412,-2733,13751,16597,10964,-29486,-26725,-19393,26532,-2127,-1942,-21863,-14933,2092,-14591,-32768,17239,-4313,-9179,-7156,-22008,-14164,25202,6481,15454,-7785,-13096,-17674,7515,22938,32767,11077,-3015,-9290,-1753,-16527,-11253,29252,7052,-24150,32767,32767,9439,12046,-14025,-32768,-9358,23925,21259,-7589,6629,-12210,-32038,-3193,31553,5808,28513,-31400,-32768,8304,-26227,25571,25133,8312,26151,-20057,1192,13036,25959,-7557,1161,13901,32767,-23724,-19701,-28229,-10269,2964,-7832,32767,18571,27422,-1598,-11577,-10227,6726,29793,-23253,19925,3908,-32768,-3771,32767,1768,-32768,-29982,-12650,20091,32767,9219,10488,7814,-12288,-5031,-9901,-6130,-7319,-4393,7253,-26594,-1007,16143,-10766,32767,28643,16845,-18013,336,-22183,5134,-21956,12523,2704,-20294,14002,-9078,-11589,-4059,7776,-32768,-6848,-4971,-20658,29368,5914,-6631,-25339,1289,-2189,13878,-11647,3594,-11130,-26174,32767,24574,84,-32768,-32768,-32768,13441,-32768,7061,-2231,-22549,9546,12597,-855,753,763,653,-25368,9313,-5245,-7728,-28416,-14455,9167,-7482,-22192,-3825,-32768,5973,4926,-32768,-4076,-4678,25333,-1377,12560,-7292,16960,20525,32767,3725,-26471,316,32767,-12479,30119,8338,-29053,32767,26181,-20522,4745,12993,25404,-23122,5054,21549,-32768,-32768,32767,6231,-12743,1628,24712,-20778,-11712,28774,4402,-15279,-17240,5774,-1257,23664,-9169,-29967,-17983,22236,-2739,-13462,-19428,-9440,1825,-1577,-27745,1495,6355,-10833,19259,19497,20846,-29706,-2017,-32768,1199,-1702,-10121,31085,13245,-32768,25009,-13789,992,14109,6143,-10106,15372,-8487,-4845,21951,12631,32767,1712,2174,9774,27156,-480,17346,-24041,32767,9428,-29180,1131,-21110,-6317,-32768,20341,25066,-31186,21862,8981,5542,4869,-4344,518,25052,74,9035,-1824,2034,20105,-8775,-23833,7940,10172,-6530,-18246,-11868,13931,10744,9144,-1758,-18939,-12184,32767,-5716,-27444,30355,-32768,30698,-3081,-8124,32767,26397,20604,-15112,-14929,32767,-32532,2645,11018,-2118,32283,-18546,10255,10066,-800,-28203,-14284,-18777,19150,-5354,25687,-23959,-19424,-17676,14183,21765,-22135,15614,899,-3404,-32768,6239,7622,-8425,-26205,32767,9034,32767,13806,29165,-17688,-22199,-32768,-7687,11339,32724,-17682,-6884,-6646,-19945,-7134,15755,-3769,4061,32767,11515,24616,-2398,27785,-5518,-16593,3528,-1649,-4354,-32768,24354,14419,6119,-32768,29735,18633,32767,24062,-6295,32767,-2965,18676,-10190,-9147,-9767,6281,22011,3029,3861,-23069,-3828,-32768,32767,-20819,-31406,32767,16767,6670,-1190,-24691,22710,1371,9881,10861,16804,19048,-10654,7729,-3879,-32768,29362,23257,-28602,28821,30579,27662,17877,11169,17153,-18984,-5156,9999,32767,-23560,5759,-9024,6869,-16599,9147,-30695,814,4703,-32768,5502,4769,19158,-2484,23394,-19629,-28277,-7839,1473,-32768,-12299,-16595,-31162,-23294,-21599,10288,-3776,10493,-23506,-18486,-9020,20639,3815,9740,-17090,-13542,1912,32767,-24002,12214,32767,2309,14657,-12690,-16677,-29015,-13272,-5158,11693,-636,3343,8881,10199,11955,9085,-15677,-353,-18840,13124,11624,7434,-14869,2533,-4765,-13147,19418,5596,3396,1097,1545,-14659,1811,-9484,28239,9936,-7465,15767,17056,4876,13559,-21150,-27372,-5181,25726,13658,-9348,-7772,-22584,7749,-18762,-14527,-21580,-28872,-773,13123,32767,-12911,8660,27344,18057,-16014,28238,-13743,21479,-3920,-27969,-587,-5498,-1994,-19337,28789,-5127,-10604,-11009,8338,-11277,11258,-26112,15766,7778,32767,3730,-14283,27481,4966,7889,-21016,-9146,10415,-27953,-30913,-490,15031,10699,32767,-32768,-29395,-8619,-13113,-7862,-28903,-32768,-16441,403,-18973,-31380,-118,-1490,15058,-20011,32767,4767,-28513,32767,-6895,-7708,28231,-27617,-15139,18989,5177,-32768,8986,-9548,32767,-23151,-2072,11926,28113,13533,1819,-17346,-7672,2465,-21231,2325,-16052,2997,-13274,-8211,26501,23937,-10789,-7919,3432,17407,3745,32767,-18045,15571,21594,-17751,-5505,11604,495,1899,32767,2427,-5480,30370,32767,16589,-6061,25762,49,-6735,-8857,17693,14155,-26924,-18918,-14845,-18914,21540,-29682,19149,-18235,16064,-434,-22205,-13172,-32768,-26446,1198,-19119,843,-14236,-17803,6899,9016,-26802,-25542,-19236,-511,-6392,-907,-9848,7059,-20944,-16617,2227,-6637,2256,14810,23625,9768,12424,29727,4810,21419,4155,31610,-26175,-32613,32767,-3211,25262,32767,11621,32767,-6959,14063,13301,12292,11272,-26992,-346,-6652,5840,-18764,-32768,19524,32767,-32768,-23919,11066,-32768,-19975,-32768,19719,4156,-23219,32767,-32768,23538,13993,-18048,-10759,-4266,-4922,13997,4581,5049,27629,32767,-22111,16132,-32768,-32768,359,-16750,7065,-1996,32767,-4657,10026,-7455,31251,-20263,32767,16639,12667,5793,5186,2990,-32768,-4032,-21774,-11788,-15275,291,-390,12752,14858,-18110,3228,-27116,7352,-5127,32767,-7312,7780,-3318,-22670,-6675,192,-25223,-16256,-19315,-10879,-15450,-29411,-15466,6615,-10271,-7246,-12455,-16248,32767,3019,-15291,2422,-32768,20410,-4397,-1520,29918,-32768,13587,-31192,23115,15416,-6131,-19130,8050,-4473,-32768,-9847,-21268,-2044,-22985,4986,-15522,-1158,3483,14808,-16015,-22776,3984,32767,6838,-22145,-20848,15575,16631,-32768,7951,20669,15919,32767,8549,-12335,-13461,32767,10098,-13743,23183,9248,-11273,16817,18506,-27512,10001,-11649,24287,-32768,32767,-8015,-26733,-8678,-19815,19880,161,-2123,32767,-7791,-2431,-32768,-32023,-14116,-26596,-32768,29321,32767,32767,-17722,-19790,-23468,-9919,8230,21896,5140,-3723,-21221,-32768,12665,5551,-10293,13017,-12310,31576,755,-3359,15075,12892,10777,32767,-11890,5186,20415,32767,-3323,14092,-7926,-2681,24058,-23125,9016,-13820,-562,15400,8088,-3863,2756,32767,-2306,9741,1601,-1420,-11964 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add9_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add9_input0_int16.csv new file mode 100644 index 0000000..a6507e2 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add9_input0_int16.csv @@ -0,0 +1 @@ +-13192,-6425,23053,11099,-20836,-3148,18580,-7378,-2621,-13347,8597,-26855,-11613,3336,-17692,7147,-18021,81,-27454,-29992,-13349,-16941,18506,-22468,-29504,-30127,24869,10172,-4874,-5350,-17962,11261,28829,-1944,10639,6952,-15228,-12630,6105,32111,-32096,-14808,5283,-30693,-6505,-5720,15481,28471,-26019,7722,17797,9252,16244,27915,-2910,-30203,-13617,16166,12458,14002,-16246,20705,15186,26825,30294,8380,27191,2306,-29912,-29912,-31240,-19964,-19501,4105,-14543,-29759,2680,-12463,-15463,-24155,-15694,-21631,14825,-23796,-14037,-17694,139,21110,7541,82,-29732,-2342,-2284,-1356,-264,-16690,-11526,-29918,-22764,-10125,-14208,12480,31245,30059,-22501,-1770,-2373,-1643,-14618,3122,22078,23362,17332,14139,-26954,11612,15312,-3054,4605,-12653,20333,-24806,3572,-4575,2273,3391,-19102,21305,3221,25686,14450,13318,12441,-27459,-14588,27742,87,-8333,6685,-4659,8556,14573,-5671,25300,-14916,1164,12114,-29444,83,-15875,-20834,11101,-23216,20569,-8715,27980,-17122,-28877,24444,-12539,32347,23618,-9024,-15963,-18319,-29154,-19995,17809,-15392,-1460,19273,27025,-5549,-6852,-979,10054,21131,30115,20373,3760,-4211,14283,-5876,-29944,22802,7335,30981,-13967,-5856,25859,236,-26536,692,-8543,18160,28219,16690,14153,4793,25520,30855,-2747,-13050,20307,7678,8759,24031,-29492,-7317,2561,-28681,27197,9476,-18405,-1376,12413,18795,17909,15239,3311,-9198,-30421,-16369,-3706,-26377,1765,-25830,16969,-3802,24840,22619,21817,23300,-26046,-20099,12537,-2884,-1679,-21713,22460,21425,16456,7829,-30186,28583,-3826,-11579,-5698,-1994,32665,9327,-2078,24158,15005,15767,6597,-26290,9329,-27539,-31706,17535,-15909,-3108,-29308,6575,-26095,5396,-18223,-23149,23988,31144,21401,9182,-19123,-10784,-12192,1149,-2479,-4665,-3750,-10008,-29031,-24723,12846,17447,-13875,27744,-17371,-22915,12183,16406,-13092,9106,30853,10267,17187,7094,-29303,12392,-29574,-19321,-13167,28821,-19121,-4173,-13330,-27916,16049,7778,13368,13970,23709,-16104,32581,19334,7035,32752,16759,23512,16907,2841,-3331,31298,-24482,14350,28834,14101,7249,13703,-25110,-11160,23670,3514,14446,17989,746,-23125,-13959,-27532,-9977,356,-21897,-6507,-649,-9166,-10473,8709,-21643,-25784,-8869,-21257,-14259,-4203,22731,-16799,30326,-14706,1954,-4669,-26549,-13223,30843,-4914,-938,1097,-14423,6144,20780,31031,27243,14606,-22243,-19935,-10840,13310,14174,-13662,-24810,-21931,-17204,-20116,-9105,-4107,6621,3905,27942,363,1680,-16234,28774,3950,17826,-19955,-24426,-25351,18208,-27798,-11704,-22323,-2087,8435,-25669,-23370,7847,8555,2652,-271,-16775,-18715,1815,-3666,-3270,-11669,11049,-24824,15097,11301,30976,25702,12162,-5445,24600,2054,-32181,17275,19226,-15739,22799,21370,17682,25482,1660,-27044,-6550,6636,12866,-8840,-1525,-20859,-24913,24012,22730,20315,30100,-19150,-18718,11594,-9422,19033,31218,5315,22683,-12574,11436,1869,5416,14395,22806,9736,30308,-16515,-30172,-30814,-21436,15992,5960,27662,3471,20379,6156,9827,15968,17316,11332,-16487,11499,3176,-20472,-30580,16122,14647,-20371,-24991,-23643,-3967,18289,8100,-7351,-19536,-2155,14074,-10099,-17352,1728,14135,-7741,-7311,-13254,-2118,912,30979,17382,25945,-2238,-30938,-17144,26097,-27432,25499,-15343,-7263,18043,-7917,-9100,-13662,-5274,-26607,-31072,-1914,-25772,27059,16757,-7238,-23208,4762,907,17350,5250,21195,-4446,-11956,20529,19720,16188,-15664,-32227,-16263,-5848,-30539,-15099,12378,-28412,-3655,25200,-7951,6643,-12696,31031,-10040,24294,14125,-1976,-24829,-7633,7676,-30882,-31506,5880,-20671,17711,29998,-31512,5959,-4147,13873,18922,15262,1008,11036,11252,29914,22207,-13134,13816,29180,11987,22992,-9615,-28034,26228,7485,-630,17287,32692,2743,-7526,15295,20144,-21126,-20723,30099,8108,5178,22776,16080,-7073,-31503,14086,27856,-26002,3937,14687,17580,10477,-5244,-29701,-9012,7923,10411,-28942,95,-11033,8839,-27251,-20288,20673,-12791,6049,11855,7946,23282,-24321,18588,-28887,14686,17375,-995,22849,21218,-23523,31422,8473,1499,23820,29758,-1601,11033,-22884,-3357,17247,5561,23008,-9912,32685,9977,7618,-14626,30837,-5013,26772,-13660,-25583,-27285,-23354,-29849,-31756,22420,8226,-9635,17025,21257,29626,-18255,5133,-15847,31170,14433,25673,-10961,17120,2271,16313,-724,29086,-15514,-3100,-26350,-21558,24903,-15131,7460,2105,-21962,11676,29096,-2740,-11124,12195,-25419,19437,4289,-17517,32314,15846,3504,-20192,-539,23698,-32292,21910,-13403,-10756,17723,-10841,-17175,24030,2793,-8852,-21149,-13002,18099,8980,12269,-31943,-1380,-5451,27896,30328,-4137,30576,1561,-992,-28486,-12230,20515,-1261,-25459,32423,13329,25426,-6379,7880,-11486,-29923,-19786,14572,18951,12451,-22210,20321,-19329,-32399,11303,28283,6158,-7095,28482,31674,23362,20780,26949,-17699,-23691,6494,5919,-3787,-22443,26171,23547,-1088,-28715,22002,-2371,32671,13992,-11047,28107,23789,17627,3063,11459,-15130,-5982,15858,10356,-21384,-6151,15753,-29509,31781,-14945,-19018,29126,-3901,27106,-14570,-26735,23867,-3926,16588,-15460,-1902,31797,-9544,4558,-23144,-30691,18672,27767,-30337,20739,19599,9167,19477,29253,30638,-7941,18339,14898,30853,-28038,21601,-31964,-12000,6441,16032,-25178,4480,-4252,-26147,30636,5080,2051,-31561,-42,-15910,-17132,10848,8691,-14927,-29523,-32358,-13886,-4401,-3234,16360,23356,-13521,-1027,3926,-30047,32063,2494,-17847,-31901,-6323,28165,30081,-8201,481,19091,13009,28055,642,9111,-7832,-5735,1783,1840,27678,-5058,1562,11023,118,21928,4605,-22629,-29278,5199,5458,-4533,-7512,8771,-31728,-10617,30378,3908,894,-21208,3525,-18267,28535,7937,30799,15133,5014,-2902,12758,21063,12836,-15520,-20977,-22221,29960,-13439,303,-16503,-28399,15137,-7860,10552,633,-20969,-21728,10849,30151,14074,28703,29740,10154,-16324,7310,-20668,21159,11733,-8137,-11940,1810,-20376,-4407,6843,667,-3894,-10065,-15356,-2012,29570,-24923,6658,12949,21044,-4316,-17060,15236,-3234,-9806,-14360,5577,-5293,-23342,-10034,83,15180,-11970,23237,-29808,-27208,-23112,-32338,-4953,-24338,-32106,9359,19603,-28091,-13384,-3751,22239,20654,-23266,28137,-23628,-24967,29552,-16473,16041,32739,-30998,11067,8292,29009,-32088,-16356,-9368,26656,-31663,19962,-10966,22174,7296,-5520,-20172,2327,10441,-4363,-14740,-11044,9363,-31584,-20410,17256,30051,14106,-27060,-11805,32338,-4073,25508,-23348,3268,-291,-12331,14150,77,577,-4128,21163,17170,-20508,19040,29682,-5479,13183,3305,-21694,-24037,-4683,7201,17263,-17680,-18598,-3817,-17173,30176,-24895,-2243,-16056,29205,-29213,-4693,-31062,-22251,-11488,-26419,752,-19067,-19217,-10615,10568,29245,-13481,-25830,-14435,-22255,-17831,7524,-10872,-7553,-12369,-25703,14424,-20373,-25314,-8831,14975,-5094,-10649,14606,15342,17259,-25558,23411,-16671,-16801,22757,-29579,29108,26647,-10254,27183,-26368,-7377,24791,-833,-11237,-7709,10069,-28373,7585,-32091,-29831,6162,16934,-31203,-12005,5585,-31760,-23040,-28291,3774,-14337,-26385,32025,-32376,6759,20261,-28748,3857,-14068,20871,-8312,30030,19872,8776,24037,-6449,3090,-31218,-17392,21676,-13561,-15858,17709,24377,-16747,21195,-18155,13266,-17779,30414,-9446,23671,850,-8525,21257,-29488,-25322,-9504,-7061,-772,4693,-21206,28407,14781,-22320,-13913,-27050,16342,-27288,23720,-21140,-16299,-595,-29775,-1940,-15518,-29483,-627,-4902,-17257,10503,-14237,445,-10071,-13406,-7608,4220,-16522,23059,-5297,2699,4203,-27322,2100,2409,10410,19255,-32690,7395,-15208,14945,2379,6162,-8310,12027,-6827,-23435,-23204,-15051,-18121,-23283,-13778,7093,-26206,-15439,2737,1281,-9263,16515,25396,-9218,-32487,-27430,27302,-2329,-24075,2000,583,4857,29754,13090,-11280,-6058,23810,-3554,-22759,29162,29383,5525,4720,32747,-23015,11686,-24865,6358,-21867,20038,15965,-6032,-15378,-25496,15189,12653,26420,28449,7720,7707,-30165,-19778,-12287,-9920,-26622,12281,28284,23942,-9441,-7448,-29938,3434,24226,31098,23218,-25611,1012,-29338,18952,20299,-22300,1555,7550,26680,2209,-3926,-10094,26315,15587,15670,-1678,6874,22290,32186,7321,-8281,-9522,23028,30615,-4071,-6481,-32664,20232,14597,-13401,-29532,17737,27634,-16703,-16338,9041,-32626,-6475 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/add9_input1_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/add/add9_input1_int16.csv new file mode 100644 index 0000000..832a890 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/add9_input1_int16.csv @@ -0,0 +1 @@ +-23969,-29497,18119,-11971,-16743,-12440,-22336,28307,5357,-22164,-24876,1363,-20664,-19505,9562,-17574,18748,-6824,29154,4901,4136,-26796,-23194,10155,-24496,-3852,-10293,-20330,17370,22459,-26181,-29809,22674,30072,-16524,-17676,28660,-3990,-18550,-6187,-20063,19181,19097,-21386,14478,-8612,23201,-4071,-6037,-12980,-18366,5732,5496,-14452,26847,-8069,-12429,8821,-29324,32101,-19930,2673,25357,25961,-15132,-30935,-26758,-2740,30844,-26761,-25033,11607,-31707,6881,-28449,-26345,-13958,4424,6506,16955,19817,-31597,-11379,30113,-11133,-12572,15080,-7321,-15132,7178,25304,14430,-23431,19699,9751,6952,8482,5071,-31629,29104,-29537,23763,18677,123,4684,-8388,-4647,-6886,28506,14942,-3351,8178,27418,19720,-8573,18459,-30009,-8576,19729,-6806,-27494,-13697,8158,15040,-26437,-25956,-5452,28879,-4283,-11569,-17188,-7897,-9196,-26634,-16346,2123,-10437,17414,-8181,-19934,12894,-4124,30886,-30736,15824,-9762,30953,19264,9406,-22602,-25782,-16225,11577,-24439,28870,6957,22326,-20551,17380,19473,-29193,-26138,-19819,1143,16057,908,-23800,-9301,-25225,13954,-28357,11220,3853,-20674,15476,9637,-6722,20831,16727,-7528,7368,-19002,27854,-24347,26808,22510,30015,-21379,24159,-620,11420,11288,32384,-12423,4961,-15849,10959,25179,28878,13580,-4722,28076,-3687,31910,-23247,-9479,5824,15918,-32131,-13641,18480,29464,2514,-15616,-7255,12901,-1253,16167,31027,-32706,-19911,-1967,-22411,25458,16398,8185,19979,-18236,15452,8429,27500,20596,-23508,10785,28178,5451,-10299,-27785,-25976,-8744,10253,32003,30606,-18163,16098,-9311,26841,31039,25661,-2670,10343,111,25783,5128,20650,-913,17063,-31909,-26003,25930,-20576,8384,24161,-10395,13724,5288,-32510,7584,17779,23518,-16373,-29828,-2407,-12218,-3727,-13197,-6552,-17661,-1819,-12705,-27138,27405,10060,-31868,-27464,28002,-11785,-31458,-6625,329,-13014,-12958,21705,28382,30486,-22120,19781,-11003,-29416,-14352,3565,30423,-10215,17146,20388,16035,-1692,256,-21092,-17887,-23204,16949,-21951,29851,4473,15935,-22346,4915,6808,-8964,-10808,27139,-30039,-29289,-6199,30200,-31133,-7075,32468,-10752,-30968,-5406,5645,16547,31194,10832,-9373,-15851,-19583,7097,-6776,-3388,20160,-3624,-32395,-25357,-26223,25192,7927,24902,-7809,-9881,-28400,-4290,-19590,28142,-27244,-29169,2619,11759,31866,-10913,16377,24876,11533,-31839,-26526,-11175,-11483,-3356,8672,27004,-9071,18503,-14286,12787,24073,27546,-25781,-30936,-30479,17350,-19931,6238,29994,-27629,13558,27622,13148,-11088,18654,-3515,-19674,-11233,-214,17441,26017,9519,-6753,-18311,-5910,6704,-32361,15479,-14820,-15351,-9534,-12727,52,32680,12705,24602,1467,-29464,1596,-5406,19700,32568,-11559,-16794,-7075,-28010,-24807,18061,22143,-10339,-16743,28599,22977,-5457,-10000,-20965,-21513,-6018,25910,15769,-1209,10680,4963,-18014,-29373,19621,-13178,7780,-23138,-28535,-677,-26167,15264,1980,5869,12260,-14423,-10269,15936,29970,-25349,-22145,8933,20653,-15351,4382,-6648,8227,-12580,-16946,23510,21861,16402,-8599,-26109,-30648,-8798,29071,-14734,15361,2055,-32429,26657,28168,-12821,-29727,-15113,7258,31687,30972,4215,22038,31053,-14612,-21550,-3069,9653,-11838,-20738,18006,-28869,12414,24348,-15717,13238,21197,-3900,-22378,32663,-12583,-20122,-1563,-9363,19679,-20277,426,-4212,-6424,8653,16155,-32728,22949,-4827,-1511,12118,-9330,-1562,-10600,-3189,-3947,977,-21441,-17131,-10639,-23458,28237,13181,-16735,-32631,-31498,-28251,24524,-16878,25401,-15944,-1356,16898,-8951,7103,-5882,14262,-31404,-24346,-12513,-21896,-8543,-13133,-11882,4585,21881,2353,-11367,-27337,-10242,-24467,-29085,-11793,-2100,20309,-21584,1343,-11051,11776,16442,31536,-18005,-22638,-13947,24606,-29594,17381,21445,-10672,24540,28121,-27493,-11485,-16206,31991,-23882,-8987,8591,-31399,-30657,19124,107,-22869,-21474,17159,-21138,16726,24807,-22956,6107,-27745,-7367,-20023,21554,-7118,-10190,-15287,22254,-14594,11659,-26748,-1465,-6697,26201,-16940,-19467,22031,-21155,14077,18473,4360,-15432,-22114,-29156,-13642,-20420,-12847,18855,-3917,-24611,1598,-27733,-200,-5440,-22548,-12196,9602,12178,-3152,12158,11537,25848,12666,-31039,3022,29320,14566,-8304,-27759,18258,27150,-13397,29952,-4647,22404,-24807,4565,25820,-32748,12266,-9807,-23235,25679,-11301,17205,1919,-14921,-14329,8907,-15030,25214,-29016,-31938,-19383,30100,-5730,2399,6160,-6812,30487,4778,-4603,-3118,-28865,19861,-4988,-26065,28943,-31730,21876,-8690,7090,26156,19714,24615,289,-19916,23695,-11011,-19178,29064,8290,25834,-14155,31944,-11205,-4004,-29472,2420,-12223,7429,-16691,22464,385,-25207,-18572,-9582,-1713,-26056,-10408,-392,-3637,-25760,21287,-10898,-10244,-9445,12829,-1482,29949,25577,31802,-12307,696,-31229,-25712,-4172,31926,-1136,-30594,11003,6365,-21550,-7828,-11580,12955,30909,-17173,9448,-24919,10062,10853,1899,-1920,-8423,-2030,-29570,6165,-4731,9526,-29689,17886,28026,14812,18441,2864,28078,-28828,7270,-17166,-24474,2351,14842,13686,-6624,27554,-25240,-21648,-23525,12393,-13001,-23283,15451,27059,-19065,13533,-6040,6308,5967,-3713,30990,25029,-6969,-4680,5858,18769,-23296,20840,2999,-7656,17946,21544,28402,4249,-15129,-8362,-17774,-26161,-1791,15228,-3132,-14584,20893,21913,-29473,-4140,-15896,-3547,10877,-19697,-24341,1254,24143,29444,32133,-10364,-20955,-22044,-7026,-30938,13860,10918,-28291,-27371,-26261,-2917,-29490,28466,-31173,-29443,18903,-5064,2637,31937,9763,-11994,-26693,26047,-24387,16253,27336,-10374,-9097,-18075,-32359,-31647,-12235,-8931,14124,-29682,9850,10556,2516,16276,-10362,-26297,23069,4632,12590,10263,14915,-12576,-5655,26489,-6983,-4984,3608,3728,23579,-1550,-1094,-27217,-21270,6677,-2123,-15458,24647,10116,-15235,5238,-12857,-15711,16022,4103,32722,-13138,6517,-1418,-5127,-17554,-30909,-30260,-17777,21556,6708,16204,-32359,-17997,6552,14199,-4975,31125,2662,7438,-17590,-29895,11623,-9425,18473,-21937,32366,-7727,-10492,-4625,27420,-13374,-15336,-9876,14696,-2809,23870,9609,-1835,21836,10178,21028,-13880,-18350,19795,-14047,-31958,-759,4817,27135,31778,-21607,-12000,12234,15673,-5629,-14313,-19437,-32293,-19852,3215,-29112,3743,-25191,-844,-3232,20170,31132,-13123,29794,7688,-27270,4646,-5616,-32285,17416,-23093,-13221,29350,-3346,19239,1201,-23620,27772,15482,10968,8240,-2796,-12946,-7487,-24580,18531,-10523,-5635,14667,9981,18389,1555,-29482,17303,16995,-9783,9377,27775,-450,17956,29923,-11513,-22279,15837,78,6902,32475,-14542,13830,21840,26781,28457,-22036,31897,22648,15781,-7275,16774,1447,-18528,-6591,-16389,-8069,-1863,-14802,28601,-8300,-8364,29811,-25573,14264,-28570,-24317,29142,-27007,21002,475,-13371,-1537,-18073,-22732,-8149,-11360,22464,9792,-9076,-2192,17544,-15854,3960,-11959,12102,29443,29506,16818,18701,28126,25572,-9371,11416,32302,18991,-18551,-27246,30496,26383,4354,30365,26613,30258,17900,26968,-7559,17727,27157,-29000,-10955,20408,116,7664,-19007,20366,30141,-27645,-20313,9366,-17493,-3417,-22562,23119,20623,-4385,26313,-24862,25251,-1895,5167,-18773,8792,-28475,27851,-24974,-13758,28763,29098,-23617,18911,-16270,-27184,-22069,-8860,26197,-21171,24262,11043,-8309,8671,29058,-9288,23910,32655,-7264,7061,15987,-18025,-24433,20826,-19974,-8819,-20149,-4485,21538,-12077,4995,-1609,18909,-9039,-6925,21371,30943,11974,27636,-3932,-104,-7137,16416,-3910,-21645,-21392,3040,-32125,-25523,-21678,19556,-135,-2020,-21476,-5089,27474,9654,-23784,-1053,-30800,25810,-8538,-12920,20996,-28884,10939,-26955,16150,18669,-14824,-17590,-1476,971,-27917,10645,-13506,16058,-7293,21180,-28674,25688,20847,17463,-23300,-21600,-11725,31248,18974,3439,-46,-7054,25237,-26817,8824,27744,16780,25683,-1898,-5178,-12158,31089,17551,4838,1445,-17898,-21213,18154,-8701,-13782,1555,9903,26696,-30881,29253,-27611,-30390,4103,-642,11459,-12949,-30412,18652,-18722,-11356,-17099,-23338,-6574,-26155,-19072,27436,28112,29107,-14482,-19393,-1029,-17180,-13927,-2335,-17117,21551,-30162,-18275,-2355,-13514,9093,16237,-24744,15542,-1263,-521,31184,-9707,-1442,29082,-14563,-42,4802,18805,-12178,27949,-961,-27647,1133,-27482,19113,15042,-21830,5930,25043,25440,-14681,23083,14222,30367,-7214,32012,-9671 diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/integration_tests.cc b/tensorflow/lite/micro/integration_tests/seanet/add/integration_tests.cc new file mode 100644 index 0000000..946b314 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/integration_tests.cc @@ -0,0 +1,278 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add0_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add0_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add0_input1_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add0_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add10_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add10_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add10_input1_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add10_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add11_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add11_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add11_input1_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add11_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add12_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add12_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add12_input1_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add12_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add13_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add13_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add13_input1_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add13_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add14_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add14_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add14_input1_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add14_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add15_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add15_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add15_input1_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add15_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add16_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add16_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add16_input1_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add16_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add1_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add1_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add1_input1_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add1_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add2_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add2_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add2_input1_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add2_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add3_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add3_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add3_input1_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add3_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add4_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add4_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add4_input1_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add4_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add5_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add5_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add5_input1_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add5_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add6_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add6_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add6_input1_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add6_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add7_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add7_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add7_input1_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add7_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add8_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add8_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add8_input1_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add8_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add9_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add9_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add9_input1_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/add/add9_model_data.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_profiler.h" +#include "tensorflow/lite/micro/python/interpreter/src/python_ops_resolver.h" +#include "tensorflow/lite/micro/recording_micro_allocator.h" +#include "tensorflow/lite/micro/recording_micro_interpreter.h" +#include "tensorflow/lite/micro/system_setup.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +constexpr size_t kTensorArenaSize = 1024 * 100; +uint8_t tensor_arena[kTensorArenaSize]; + +namespace tflite { +namespace micro { +namespace { + +void RunModel(const uint8_t* model, const int16_t* input0, + const uint32_t input0_size, const int16_t* input1, + const uint32_t input1_size, const int16_t* golden, + const uint32_t golden_size, const char* name) { + InitializeTarget(); + MicroProfiler profiler; + PythonOpsResolver op_resolver; + + MicroInterpreter interpreter(GetModel(model), op_resolver, tensor_arena, + kTensorArenaSize, nullptr, &profiler); + interpreter.AllocateTensors(); + TfLiteTensor* input_tensor0 = interpreter.input(0); + TF_LITE_MICRO_EXPECT_EQ(input_tensor0->bytes, input0_size * sizeof(int16_t)); + memcpy(interpreter.input(0)->data.raw, input0, input_tensor0->bytes); + TfLiteTensor* input_tensor1 = interpreter.input(1); + TF_LITE_MICRO_EXPECT_EQ(input_tensor1->bytes, input1_size * sizeof(int16_t)); + memcpy(interpreter.input(1)->data.raw, input1, input_tensor1->bytes); + if (kTfLiteOk != interpreter.Invoke()) { + TF_LITE_MICRO_EXPECT(false); + return; + } + profiler.Log(); + MicroPrintf(""); + + TfLiteTensor* output_tensor = interpreter.output(0); + TF_LITE_MICRO_EXPECT_EQ(output_tensor->bytes, golden_size * sizeof(int16_t)); + int16_t* output = ::tflite::GetTensorData(output_tensor); + for (uint32_t i = 0; i < golden_size; i++) { + // TODO(b/205046520): Better understand why TfLite and TFLM can sometimes be + // off by 1. + TF_LITE_MICRO_EXPECT_NEAR(golden[i], output[i], 1); + } +} + +} // namespace +} // namespace micro +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(add0_test) { + tflite::micro::RunModel( + g_add0_model_data, g_add0_input0_int16_test_data, + g_add0_input0_int16_test_data_size, g_add0_input1_int16_test_data, + g_add0_input1_int16_test_data_size, g_add0_golden_int16_test_data, + g_add0_golden_int16_test_data_size, "add0 test"); +} + +TF_LITE_MICRO_TEST(add1_test) { + tflite::micro::RunModel( + g_add1_model_data, g_add1_input0_int16_test_data, + g_add1_input0_int16_test_data_size, g_add1_input1_int16_test_data, + g_add1_input1_int16_test_data_size, g_add1_golden_int16_test_data, + g_add1_golden_int16_test_data_size, "add1 test"); +} + +TF_LITE_MICRO_TEST(add2_test) { + tflite::micro::RunModel( + g_add2_model_data, g_add2_input0_int16_test_data, + g_add2_input0_int16_test_data_size, g_add2_input1_int16_test_data, + g_add2_input1_int16_test_data_size, g_add2_golden_int16_test_data, + g_add2_golden_int16_test_data_size, "add2 test"); +} + +TF_LITE_MICRO_TEST(add3_test) { + tflite::micro::RunModel( + g_add3_model_data, g_add3_input0_int16_test_data, + g_add3_input0_int16_test_data_size, g_add3_input1_int16_test_data, + g_add3_input1_int16_test_data_size, g_add3_golden_int16_test_data, + g_add3_golden_int16_test_data_size, "add3 test"); +} + +TF_LITE_MICRO_TEST(add4_test) { + tflite::micro::RunModel( + g_add4_model_data, g_add4_input0_int16_test_data, + g_add4_input0_int16_test_data_size, g_add4_input1_int16_test_data, + g_add4_input1_int16_test_data_size, g_add4_golden_int16_test_data, + g_add4_golden_int16_test_data_size, "add4 test"); +} + +TF_LITE_MICRO_TEST(add5_test) { + tflite::micro::RunModel( + g_add5_model_data, g_add5_input0_int16_test_data, + g_add5_input0_int16_test_data_size, g_add5_input1_int16_test_data, + g_add5_input1_int16_test_data_size, g_add5_golden_int16_test_data, + g_add5_golden_int16_test_data_size, "add5 test"); +} + +TF_LITE_MICRO_TEST(add6_test) { + tflite::micro::RunModel( + g_add6_model_data, g_add6_input0_int16_test_data, + g_add6_input0_int16_test_data_size, g_add6_input1_int16_test_data, + g_add6_input1_int16_test_data_size, g_add6_golden_int16_test_data, + g_add6_golden_int16_test_data_size, "add6 test"); +} + +TF_LITE_MICRO_TEST(add7_test) { + tflite::micro::RunModel( + g_add7_model_data, g_add7_input0_int16_test_data, + g_add7_input0_int16_test_data_size, g_add7_input1_int16_test_data, + g_add7_input1_int16_test_data_size, g_add7_golden_int16_test_data, + g_add7_golden_int16_test_data_size, "add7 test"); +} + +TF_LITE_MICRO_TEST(add8_test) { + tflite::micro::RunModel( + g_add8_model_data, g_add8_input0_int16_test_data, + g_add8_input0_int16_test_data_size, g_add8_input1_int16_test_data, + g_add8_input1_int16_test_data_size, g_add8_golden_int16_test_data, + g_add8_golden_int16_test_data_size, "add8 test"); +} + +TF_LITE_MICRO_TEST(add9_test) { + tflite::micro::RunModel( + g_add9_model_data, g_add9_input0_int16_test_data, + g_add9_input0_int16_test_data_size, g_add9_input1_int16_test_data, + g_add9_input1_int16_test_data_size, g_add9_golden_int16_test_data, + g_add9_golden_int16_test_data_size, "add9 test"); +} + +TF_LITE_MICRO_TEST(add10_test) { + tflite::micro::RunModel( + g_add10_model_data, g_add10_input0_int16_test_data, + g_add10_input0_int16_test_data_size, g_add10_input1_int16_test_data, + g_add10_input1_int16_test_data_size, g_add10_golden_int16_test_data, + g_add10_golden_int16_test_data_size, "add10 test"); +} + +TF_LITE_MICRO_TEST(add11_test) { + tflite::micro::RunModel( + g_add11_model_data, g_add11_input0_int16_test_data, + g_add11_input0_int16_test_data_size, g_add11_input1_int16_test_data, + g_add11_input1_int16_test_data_size, g_add11_golden_int16_test_data, + g_add11_golden_int16_test_data_size, "add11 test"); +} + +TF_LITE_MICRO_TEST(add12_test) { + tflite::micro::RunModel( + g_add12_model_data, g_add12_input0_int16_test_data, + g_add12_input0_int16_test_data_size, g_add12_input1_int16_test_data, + g_add12_input1_int16_test_data_size, g_add12_golden_int16_test_data, + g_add12_golden_int16_test_data_size, "add12 test"); +} + +TF_LITE_MICRO_TEST(add13_test) { + tflite::micro::RunModel( + g_add13_model_data, g_add13_input0_int16_test_data, + g_add13_input0_int16_test_data_size, g_add13_input1_int16_test_data, + g_add13_input1_int16_test_data_size, g_add13_golden_int16_test_data, + g_add13_golden_int16_test_data_size, "add13 test"); +} + +TF_LITE_MICRO_TEST(add14_test) { + tflite::micro::RunModel( + g_add14_model_data, g_add14_input0_int16_test_data, + g_add14_input0_int16_test_data_size, g_add14_input1_int16_test_data, + g_add14_input1_int16_test_data_size, g_add14_golden_int16_test_data, + g_add14_golden_int16_test_data_size, "add14 test"); +} + +TF_LITE_MICRO_TEST(add15_test) { + tflite::micro::RunModel( + g_add15_model_data, g_add15_input0_int16_test_data, + g_add15_input0_int16_test_data_size, g_add15_input1_int16_test_data, + g_add15_input1_int16_test_data_size, g_add15_golden_int16_test_data, + g_add15_golden_int16_test_data_size, "add15 test"); +} + +TF_LITE_MICRO_TEST(add16_test) { + tflite::micro::RunModel( + g_add16_model_data, g_add16_input0_int16_test_data, + g_add16_input0_int16_test_data_size, g_add16_input1_int16_test_data, + g_add16_input1_int16_test_data_size, g_add16_golden_int16_test_data, + g_add16_golden_int16_test_data_size, "add16 test"); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/integration_tests/seanet/add/integration_tests.cc.orig b/tensorflow/lite/micro/integration_tests/seanet/add/integration_tests.cc.orig new file mode 100644 index 0000000..8f076f8 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/add/integration_tests.cc.orig @@ -0,0 +1,277 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include "third_party/tensorflow/lite/c/common.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/all_ops_resolver.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add0_golden_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add0_input0_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add0_input1_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add0_model_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add10_golden_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add10_input0_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add10_input1_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add10_model_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add11_golden_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add11_input0_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add11_input1_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add11_model_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add12_golden_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add12_input0_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add12_input1_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add12_model_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add13_golden_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add13_input0_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add13_input1_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add13_model_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add14_golden_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add14_input0_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add14_input1_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add14_model_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add15_golden_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add15_input0_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add15_input1_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add15_model_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add16_golden_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add16_input0_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add16_input1_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add16_model_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add1_golden_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add1_input0_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add1_input1_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add1_model_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add2_golden_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add2_input0_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add2_input1_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add2_model_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add3_golden_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add3_input0_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add3_input1_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add3_model_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add4_golden_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add4_input0_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add4_input1_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add4_model_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add5_golden_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add5_input0_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add5_input1_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add5_model_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add6_golden_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add6_input0_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add6_input1_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add6_model_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add7_golden_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add7_input0_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add7_input1_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add7_model_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add8_golden_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add8_input0_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add8_input1_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add8_model_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add9_golden_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add9_input0_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add9_input1_int16_test_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/integration_tests/seanet/add/add9_model_data.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/micro_log.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/micro_profiler.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/recording_micro_allocator.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/recording_micro_interpreter.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/system_setup.h" +#include "third_party/tflite_micro/tensorflow/lite/micro/testing/micro_test.h" + +constexpr size_t kTensorArenaSize = 1024 * 100; +uint8_t tensor_arena[kTensorArenaSize]; + +namespace tflite { +namespace micro { +namespace { + +void RunModel(const uint8_t* model, const int16_t* input0, + const uint32_t input0_size, const int16_t* input1, + const uint32_t input1_size, const int16_t* golden, + const uint32_t golden_size, const char* name) { + InitializeTarget(); + MicroProfiler profiler; + AllOpsResolver op_resolver; + + MicroInterpreter interpreter(GetModel(model), op_resolver, tensor_arena, + kTensorArenaSize, nullptr, &profiler); + interpreter.AllocateTensors(); + TfLiteTensor* input0_tensor = interpreter.input(0); + TF_LITE_MICRO_EXPECT_EQ(input0_tensor->bytes, input0_size * sizeof(int16_t)); + memcpy(interpreter.input(0)->data.raw, input0, input0_tensor->bytes); + TfLiteTensor* input1_tensor = interpreter.input(1); + TF_LITE_MICRO_EXPECT_EQ(input1_tensor->bytes, input1_size * sizeof(int16_t)); + memcpy(interpreter.input(1)->data.raw, input1, input1_tensor->bytes); + + if (kTfLiteOk != interpreter.Invoke()) { + TF_LITE_MICRO_EXPECT(false); + return; + } + profiler.Log(); + MicroPrintf(""); + + TfLiteTensor* output_tensor = interpreter.output(0); + TF_LITE_MICRO_EXPECT_EQ(output_tensor->bytes, golden_size * sizeof(int16_t)); + int16_t* output = output_tensor->data.i16; + for (uint32_t i = 0; i < golden_size; i++) { + TF_LITE_MICRO_EXPECT_EQ(golden[i], output[i]); + } +} + +} // namespace +} // namespace micro +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(add0_test) { + tflite::micro::RunModel( + g_add0_model_data, g_add0_input0_int16_test_data, + g_add0_input0_int16_test_data_size, g_add0_input1_int16_test_data, + g_add0_input1_int16_test_data_size, g_add0_golden_int16_test_data, + g_add0_golden_int16_test_data_size, "add0 test"); +} + +TF_LITE_MICRO_TEST(add1_test) { + tflite::micro::RunModel( + g_add1_model_data, g_add1_input0_int16_test_data, + g_add1_input0_int16_test_data_size, g_add1_input1_int16_test_data, + g_add1_input1_int16_test_data_size, g_add1_golden_int16_test_data, + g_add1_golden_int16_test_data_size, "add1 test"); +} + +TF_LITE_MICRO_TEST(add2_test) { + tflite::micro::RunModel( + g_add2_model_data, g_add2_input0_int16_test_data, + g_add2_input0_int16_test_data_size, g_add2_input1_int16_test_data, + g_add2_input1_int16_test_data_size, g_add2_golden_int16_test_data, + g_add2_golden_int16_test_data_size, "add2 test"); +} + +TF_LITE_MICRO_TEST(add3_test) { + tflite::micro::RunModel( + g_add3_model_data, g_add3_input0_int16_test_data, + g_add3_input0_int16_test_data_size, g_add3_input1_int16_test_data, + g_add3_input1_int16_test_data_size, g_add3_golden_int16_test_data, + g_add3_golden_int16_test_data_size, "add3 test"); +} + +TF_LITE_MICRO_TEST(add4_test) { + tflite::micro::RunModel( + g_add4_model_data, g_add4_input0_int16_test_data, + g_add4_input0_int16_test_data_size, g_add4_input1_int16_test_data, + g_add4_input1_int16_test_data_size, g_add4_golden_int16_test_data, + g_add4_golden_int16_test_data_size, "add4 test"); +} + +TF_LITE_MICRO_TEST(add5_test) { + tflite::micro::RunModel( + g_add5_model_data, g_add5_input0_int16_test_data, + g_add5_input0_int16_test_data_size, g_add5_input1_int16_test_data, + g_add5_input1_int16_test_data_size, g_add5_golden_int16_test_data, + g_add5_golden_int16_test_data_size, "add5 test"); +} + +TF_LITE_MICRO_TEST(add6_test) { + tflite::micro::RunModel( + g_add6_model_data, g_add6_input0_int16_test_data, + g_add6_input0_int16_test_data_size, g_add6_input1_int16_test_data, + g_add6_input1_int16_test_data_size, g_add6_golden_int16_test_data, + g_add6_golden_int16_test_data_size, "add6 test"); +} + +TF_LITE_MICRO_TEST(add7_test) { + tflite::micro::RunModel( + g_add7_model_data, g_add7_input0_int16_test_data, + g_add7_input0_int16_test_data_size, g_add7_input1_int16_test_data, + g_add7_input1_int16_test_data_size, g_add7_golden_int16_test_data, + g_add7_golden_int16_test_data_size, "add7 test"); +} + +TF_LITE_MICRO_TEST(add8_test) { + tflite::micro::RunModel( + g_add8_model_data, g_add8_input0_int16_test_data, + g_add8_input0_int16_test_data_size, g_add8_input1_int16_test_data, + g_add8_input1_int16_test_data_size, g_add8_golden_int16_test_data, + g_add8_golden_int16_test_data_size, "add8 test"); +} + +TF_LITE_MICRO_TEST(add9_test) { + tflite::micro::RunModel( + g_add9_model_data, g_add9_input0_int16_test_data, + g_add9_input0_int16_test_data_size, g_add9_input1_int16_test_data, + g_add9_input1_int16_test_data_size, g_add9_golden_int16_test_data, + g_add9_golden_int16_test_data_size, "add9 test"); +} + +TF_LITE_MICRO_TEST(add10_test) { + tflite::micro::RunModel( + g_add10_model_data, g_add10_input0_int16_test_data, + g_add10_input0_int16_test_data_size, g_add10_input1_int16_test_data, + g_add10_input1_int16_test_data_size, g_add10_golden_int16_test_data, + g_add10_golden_int16_test_data_size, "add10 test"); +} + +TF_LITE_MICRO_TEST(add11_test) { + tflite::micro::RunModel( + g_add11_model_data, g_add11_input0_int16_test_data, + g_add11_input0_int16_test_data_size, g_add11_input1_int16_test_data, + g_add11_input1_int16_test_data_size, g_add11_golden_int16_test_data, + g_add11_golden_int16_test_data_size, "add11 test"); +} + +TF_LITE_MICRO_TEST(add12_test) { + tflite::micro::RunModel( + g_add12_model_data, g_add12_input0_int16_test_data, + g_add12_input0_int16_test_data_size, g_add12_input1_int16_test_data, + g_add12_input1_int16_test_data_size, g_add12_golden_int16_test_data, + g_add12_golden_int16_test_data_size, "add12 test"); +} + +TF_LITE_MICRO_TEST(add13_test) { + tflite::micro::RunModel( + g_add13_model_data, g_add13_input0_int16_test_data, + g_add13_input0_int16_test_data_size, g_add13_input1_int16_test_data, + g_add13_input1_int16_test_data_size, g_add13_golden_int16_test_data, + g_add13_golden_int16_test_data_size, "add13 test"); +} + +TF_LITE_MICRO_TEST(add14_test) { + tflite::micro::RunModel( + g_add14_model_data, g_add14_input0_int16_test_data, + g_add14_input0_int16_test_data_size, g_add14_input1_int16_test_data, + g_add14_input1_int16_test_data_size, g_add14_golden_int16_test_data, + g_add14_golden_int16_test_data_size, "add14 test"); +} + +TF_LITE_MICRO_TEST(add15_test) { + tflite::micro::RunModel( + g_add15_model_data, g_add15_input0_int16_test_data, + g_add15_input0_int16_test_data_size, g_add15_input1_int16_test_data, + g_add15_input1_int16_test_data_size, g_add15_golden_int16_test_data, + g_add15_golden_int16_test_data_size, "add15 test"); +} + +TF_LITE_MICRO_TEST(add16_test) { + tflite::micro::RunModel( + g_add16_model_data, g_add16_input0_int16_test_data, + g_add16_input0_int16_test_data_size, g_add16_input1_int16_test_data, + g_add16_input1_int16_test_data_size, g_add16_golden_int16_test_data, + g_add16_golden_int16_test_data_size, "add16 test"); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/BUILD b/tensorflow/lite/micro/integration_tests/seanet/conv/BUILD new file mode 100644 index 0000000..1170d3c --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/conv/BUILD @@ -0,0 +1,965 @@ +# Description: +# generated integration test for one specific kernel in a model. +load( + "//tensorflow/lite/micro:build_def.bzl", + "generate_cc_arrays", + "micro_copts", +) + +package( + default_visibility = ["//visibility:public"], + # Disabling layering_check because of http://b/177257332 + features = ["-layering_check"], + licenses = ["notice"], +) + +generate_cc_arrays( + name = "generated_conv0_model_data_cc", + src = "conv0.tflite", + out = "conv0_model_data.cc", +) + +generate_cc_arrays( + name = "generated_conv0_model_data_hdr", + src = "conv0.tflite", + out = "conv0_model_data.h", +) + +generate_cc_arrays( + name = "generated_conv1_model_data_cc", + src = "conv1.tflite", + out = "conv1_model_data.cc", +) + +generate_cc_arrays( + name = "generated_conv1_model_data_hdr", + src = "conv1.tflite", + out = "conv1_model_data.h", +) + +generate_cc_arrays( + name = "generated_conv2_model_data_cc", + src = "conv2.tflite", + out = "conv2_model_data.cc", +) + +generate_cc_arrays( + name = "generated_conv2_model_data_hdr", + src = "conv2.tflite", + out = "conv2_model_data.h", +) + +generate_cc_arrays( + name = "generated_conv3_model_data_cc", + src = "conv3.tflite", + out = "conv3_model_data.cc", +) + +generate_cc_arrays( + name = "generated_conv3_model_data_hdr", + src = "conv3.tflite", + out = "conv3_model_data.h", +) + +generate_cc_arrays( + name = "generated_conv4_model_data_cc", + src = "conv4.tflite", + out = "conv4_model_data.cc", +) + +generate_cc_arrays( + name = "generated_conv4_model_data_hdr", + src = "conv4.tflite", + out = "conv4_model_data.h", +) + +generate_cc_arrays( + name = "generated_conv5_model_data_cc", + src = "conv5.tflite", + out = "conv5_model_data.cc", +) + +generate_cc_arrays( + name = "generated_conv5_model_data_hdr", + src = "conv5.tflite", + out = "conv5_model_data.h", +) + +generate_cc_arrays( + name = "generated_conv6_model_data_cc", + src = "conv6.tflite", + out = "conv6_model_data.cc", +) + +generate_cc_arrays( + name = "generated_conv6_model_data_hdr", + src = "conv6.tflite", + out = "conv6_model_data.h", +) + +generate_cc_arrays( + name = "generated_conv7_model_data_cc", + src = "conv7.tflite", + out = "conv7_model_data.cc", +) + +generate_cc_arrays( + name = "generated_conv7_model_data_hdr", + src = "conv7.tflite", + out = "conv7_model_data.h", +) + +generate_cc_arrays( + name = "generated_conv8_model_data_cc", + src = "conv8.tflite", + out = "conv8_model_data.cc", +) + +generate_cc_arrays( + name = "generated_conv8_model_data_hdr", + src = "conv8.tflite", + out = "conv8_model_data.h", +) + +generate_cc_arrays( + name = "generated_conv9_model_data_cc", + src = "conv9.tflite", + out = "conv9_model_data.cc", +) + +generate_cc_arrays( + name = "generated_conv9_model_data_hdr", + src = "conv9.tflite", + out = "conv9_model_data.h", +) + +generate_cc_arrays( + name = "generated_conv10_model_data_cc", + src = "conv10.tflite", + out = "conv10_model_data.cc", +) + +generate_cc_arrays( + name = "generated_conv10_model_data_hdr", + src = "conv10.tflite", + out = "conv10_model_data.h", +) + +generate_cc_arrays( + name = "generated_conv11_model_data_cc", + src = "conv11.tflite", + out = "conv11_model_data.cc", +) + +generate_cc_arrays( + name = "generated_conv11_model_data_hdr", + src = "conv11.tflite", + out = "conv11_model_data.h", +) + +generate_cc_arrays( + name = "generated_conv12_model_data_cc", + src = "conv12.tflite", + out = "conv12_model_data.cc", +) + +generate_cc_arrays( + name = "generated_conv12_model_data_hdr", + src = "conv12.tflite", + out = "conv12_model_data.h", +) + +generate_cc_arrays( + name = "generated_conv13_model_data_cc", + src = "conv13.tflite", + out = "conv13_model_data.cc", +) + +generate_cc_arrays( + name = "generated_conv13_model_data_hdr", + src = "conv13.tflite", + out = "conv13_model_data.h", +) + +generate_cc_arrays( + name = "generated_conv14_model_data_cc", + src = "conv14.tflite", + out = "conv14_model_data.cc", +) + +generate_cc_arrays( + name = "generated_conv14_model_data_hdr", + src = "conv14.tflite", + out = "conv14_model_data.h", +) + +generate_cc_arrays( + name = "generated_conv15_model_data_cc", + src = "conv15.tflite", + out = "conv15_model_data.cc", +) + +generate_cc_arrays( + name = "generated_conv15_model_data_hdr", + src = "conv15.tflite", + out = "conv15_model_data.h", +) + +generate_cc_arrays( + name = "generated_conv16_model_data_cc", + src = "conv16.tflite", + out = "conv16_model_data.cc", +) + +generate_cc_arrays( + name = "generated_conv16_model_data_hdr", + src = "conv16.tflite", + out = "conv16_model_data.h", +) + +generate_cc_arrays( + name = "generated_conv17_model_data_cc", + src = "conv17.tflite", + out = "conv17_model_data.cc", +) + +generate_cc_arrays( + name = "generated_conv17_model_data_hdr", + src = "conv17.tflite", + out = "conv17_model_data.h", +) + +generate_cc_arrays( + name = "generated_conv18_model_data_cc", + src = "conv18.tflite", + out = "conv18_model_data.cc", +) + +generate_cc_arrays( + name = "generated_conv18_model_data_hdr", + src = "conv18.tflite", + out = "conv18_model_data.h", +) + +generate_cc_arrays( + name = "generated_conv19_model_data_cc", + src = "conv19.tflite", + out = "conv19_model_data.cc", +) + +generate_cc_arrays( + name = "generated_conv19_model_data_hdr", + src = "conv19.tflite", + out = "conv19_model_data.h", +) + +generate_cc_arrays( + name = "generated_conv20_model_data_cc", + src = "conv20.tflite", + out = "conv20_model_data.cc", +) + +generate_cc_arrays( + name = "generated_conv20_model_data_hdr", + src = "conv20.tflite", + out = "conv20_model_data.h", +) + +generate_cc_arrays( + name = "generated_conv21_model_data_cc", + src = "conv21.tflite", + out = "conv21_model_data.cc", +) + +generate_cc_arrays( + name = "generated_conv21_model_data_hdr", + src = "conv21.tflite", + out = "conv21_model_data.h", +) + +generate_cc_arrays( + name = "generated_conv0_input0_int16_test_data_cc", + src = "conv0_input0_int16.csv", + out = "conv0_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_conv0_input0_int16_test_data_hdr", + src = "conv0_input0_int16.csv", + out = "conv0_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_conv0_golden_int16_test_data_cc", + src = "conv0_golden_int16.csv", + out = "conv0_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_conv0_golden_int16_test_data_hdr", + src = "conv0_golden_int16.csv", + out = "conv0_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_conv1_input0_int16_test_data_cc", + src = "conv1_input0_int16.csv", + out = "conv1_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_conv1_input0_int16_test_data_hdr", + src = "conv1_input0_int16.csv", + out = "conv1_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_conv1_golden_int16_test_data_cc", + src = "conv1_golden_int16.csv", + out = "conv1_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_conv1_golden_int16_test_data_hdr", + src = "conv1_golden_int16.csv", + out = "conv1_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_conv2_input0_int16_test_data_cc", + src = "conv2_input0_int16.csv", + out = "conv2_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_conv2_input0_int16_test_data_hdr", + src = "conv2_input0_int16.csv", + out = "conv2_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_conv2_golden_int16_test_data_cc", + src = "conv2_golden_int16.csv", + out = "conv2_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_conv2_golden_int16_test_data_hdr", + src = "conv2_golden_int16.csv", + out = "conv2_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_conv3_input0_int16_test_data_cc", + src = "conv3_input0_int16.csv", + out = "conv3_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_conv3_input0_int16_test_data_hdr", + src = "conv3_input0_int16.csv", + out = "conv3_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_conv3_golden_int16_test_data_cc", + src = "conv3_golden_int16.csv", + out = "conv3_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_conv3_golden_int16_test_data_hdr", + src = "conv3_golden_int16.csv", + out = "conv3_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_conv4_input0_int16_test_data_cc", + src = "conv4_input0_int16.csv", + out = "conv4_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_conv4_input0_int16_test_data_hdr", + src = "conv4_input0_int16.csv", + out = "conv4_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_conv4_golden_int16_test_data_cc", + src = "conv4_golden_int16.csv", + out = "conv4_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_conv4_golden_int16_test_data_hdr", + src = "conv4_golden_int16.csv", + out = "conv4_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_conv5_input0_int16_test_data_cc", + src = "conv5_input0_int16.csv", + out = "conv5_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_conv5_input0_int16_test_data_hdr", + src = "conv5_input0_int16.csv", + out = "conv5_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_conv5_golden_int16_test_data_cc", + src = "conv5_golden_int16.csv", + out = "conv5_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_conv5_golden_int16_test_data_hdr", + src = "conv5_golden_int16.csv", + out = "conv5_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_conv6_input0_int16_test_data_cc", + src = "conv6_input0_int16.csv", + out = "conv6_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_conv6_input0_int16_test_data_hdr", + src = "conv6_input0_int16.csv", + out = "conv6_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_conv6_golden_int16_test_data_cc", + src = "conv6_golden_int16.csv", + out = "conv6_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_conv6_golden_int16_test_data_hdr", + src = "conv6_golden_int16.csv", + out = "conv6_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_conv7_input0_int16_test_data_cc", + src = "conv7_input0_int16.csv", + out = "conv7_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_conv7_input0_int16_test_data_hdr", + src = "conv7_input0_int16.csv", + out = "conv7_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_conv7_golden_int16_test_data_cc", + src = "conv7_golden_int16.csv", + out = "conv7_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_conv7_golden_int16_test_data_hdr", + src = "conv7_golden_int16.csv", + out = "conv7_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_conv8_input0_int16_test_data_cc", + src = "conv8_input0_int16.csv", + out = "conv8_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_conv8_input0_int16_test_data_hdr", + src = "conv8_input0_int16.csv", + out = "conv8_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_conv8_golden_int16_test_data_cc", + src = "conv8_golden_int16.csv", + out = "conv8_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_conv8_golden_int16_test_data_hdr", + src = "conv8_golden_int16.csv", + out = "conv8_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_conv9_input0_int16_test_data_cc", + src = "conv9_input0_int16.csv", + out = "conv9_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_conv9_input0_int16_test_data_hdr", + src = "conv9_input0_int16.csv", + out = "conv9_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_conv9_golden_int16_test_data_cc", + src = "conv9_golden_int16.csv", + out = "conv9_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_conv9_golden_int16_test_data_hdr", + src = "conv9_golden_int16.csv", + out = "conv9_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_conv10_input0_int16_test_data_cc", + src = "conv10_input0_int16.csv", + out = "conv10_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_conv10_input0_int16_test_data_hdr", + src = "conv10_input0_int16.csv", + out = "conv10_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_conv10_golden_int16_test_data_cc", + src = "conv10_golden_int16.csv", + out = "conv10_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_conv10_golden_int16_test_data_hdr", + src = "conv10_golden_int16.csv", + out = "conv10_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_conv11_input0_int16_test_data_cc", + src = "conv11_input0_int16.csv", + out = "conv11_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_conv11_input0_int16_test_data_hdr", + src = "conv11_input0_int16.csv", + out = "conv11_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_conv11_golden_int16_test_data_cc", + src = "conv11_golden_int16.csv", + out = "conv11_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_conv11_golden_int16_test_data_hdr", + src = "conv11_golden_int16.csv", + out = "conv11_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_conv12_input0_int16_test_data_cc", + src = "conv12_input0_int16.csv", + out = "conv12_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_conv12_input0_int16_test_data_hdr", + src = "conv12_input0_int16.csv", + out = "conv12_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_conv12_golden_int16_test_data_cc", + src = "conv12_golden_int16.csv", + out = "conv12_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_conv12_golden_int16_test_data_hdr", + src = "conv12_golden_int16.csv", + out = "conv12_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_conv13_input0_int16_test_data_cc", + src = "conv13_input0_int16.csv", + out = "conv13_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_conv13_input0_int16_test_data_hdr", + src = "conv13_input0_int16.csv", + out = "conv13_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_conv13_golden_int16_test_data_cc", + src = "conv13_golden_int16.csv", + out = "conv13_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_conv13_golden_int16_test_data_hdr", + src = "conv13_golden_int16.csv", + out = "conv13_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_conv14_input0_int16_test_data_cc", + src = "conv14_input0_int16.csv", + out = "conv14_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_conv14_input0_int16_test_data_hdr", + src = "conv14_input0_int16.csv", + out = "conv14_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_conv14_golden_int16_test_data_cc", + src = "conv14_golden_int16.csv", + out = "conv14_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_conv14_golden_int16_test_data_hdr", + src = "conv14_golden_int16.csv", + out = "conv14_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_conv15_input0_int16_test_data_cc", + src = "conv15_input0_int16.csv", + out = "conv15_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_conv15_input0_int16_test_data_hdr", + src = "conv15_input0_int16.csv", + out = "conv15_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_conv15_golden_int16_test_data_cc", + src = "conv15_golden_int16.csv", + out = "conv15_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_conv15_golden_int16_test_data_hdr", + src = "conv15_golden_int16.csv", + out = "conv15_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_conv16_input0_int16_test_data_cc", + src = "conv16_input0_int16.csv", + out = "conv16_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_conv16_input0_int16_test_data_hdr", + src = "conv16_input0_int16.csv", + out = "conv16_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_conv16_golden_int16_test_data_cc", + src = "conv16_golden_int16.csv", + out = "conv16_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_conv16_golden_int16_test_data_hdr", + src = "conv16_golden_int16.csv", + out = "conv16_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_conv17_input0_int16_test_data_cc", + src = "conv17_input0_int16.csv", + out = "conv17_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_conv17_input0_int16_test_data_hdr", + src = "conv17_input0_int16.csv", + out = "conv17_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_conv17_golden_int16_test_data_cc", + src = "conv17_golden_int16.csv", + out = "conv17_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_conv17_golden_int16_test_data_hdr", + src = "conv17_golden_int16.csv", + out = "conv17_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_conv18_input0_int16_test_data_cc", + src = "conv18_input0_int16.csv", + out = "conv18_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_conv18_input0_int16_test_data_hdr", + src = "conv18_input0_int16.csv", + out = "conv18_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_conv18_golden_int16_test_data_cc", + src = "conv18_golden_int16.csv", + out = "conv18_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_conv18_golden_int16_test_data_hdr", + src = "conv18_golden_int16.csv", + out = "conv18_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_conv19_input0_int16_test_data_cc", + src = "conv19_input0_int16.csv", + out = "conv19_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_conv19_input0_int16_test_data_hdr", + src = "conv19_input0_int16.csv", + out = "conv19_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_conv19_golden_int16_test_data_cc", + src = "conv19_golden_int16.csv", + out = "conv19_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_conv19_golden_int16_test_data_hdr", + src = "conv19_golden_int16.csv", + out = "conv19_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_conv20_input0_int16_test_data_cc", + src = "conv20_input0_int16.csv", + out = "conv20_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_conv20_input0_int16_test_data_hdr", + src = "conv20_input0_int16.csv", + out = "conv20_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_conv20_golden_int16_test_data_cc", + src = "conv20_golden_int16.csv", + out = "conv20_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_conv20_golden_int16_test_data_hdr", + src = "conv20_golden_int16.csv", + out = "conv20_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_conv21_input0_int16_test_data_cc", + src = "conv21_input0_int16.csv", + out = "conv21_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_conv21_input0_int16_test_data_hdr", + src = "conv21_input0_int16.csv", + out = "conv21_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_conv21_golden_int16_test_data_cc", + src = "conv21_golden_int16.csv", + out = "conv21_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_conv21_golden_int16_test_data_hdr", + src = "conv21_golden_int16.csv", + out = "conv21_golden_int16_test_data.h", +) + +cc_library( + name = "models_and_testdata", + srcs = [ + "generated_conv0_golden_int16_test_data_cc", + "generated_conv0_input0_int16_test_data_cc", + "generated_conv0_model_data_cc", + "generated_conv10_golden_int16_test_data_cc", + "generated_conv10_input0_int16_test_data_cc", + "generated_conv10_model_data_cc", + "generated_conv11_golden_int16_test_data_cc", + "generated_conv11_input0_int16_test_data_cc", + "generated_conv11_model_data_cc", + "generated_conv12_golden_int16_test_data_cc", + "generated_conv12_input0_int16_test_data_cc", + "generated_conv12_model_data_cc", + "generated_conv13_golden_int16_test_data_cc", + "generated_conv13_input0_int16_test_data_cc", + "generated_conv13_model_data_cc", + "generated_conv14_golden_int16_test_data_cc", + "generated_conv14_input0_int16_test_data_cc", + "generated_conv14_model_data_cc", + "generated_conv15_golden_int16_test_data_cc", + "generated_conv15_input0_int16_test_data_cc", + "generated_conv15_model_data_cc", + "generated_conv16_golden_int16_test_data_cc", + "generated_conv16_input0_int16_test_data_cc", + "generated_conv16_model_data_cc", + "generated_conv17_golden_int16_test_data_cc", + "generated_conv17_input0_int16_test_data_cc", + "generated_conv17_model_data_cc", + "generated_conv18_golden_int16_test_data_cc", + "generated_conv18_input0_int16_test_data_cc", + "generated_conv18_model_data_cc", + "generated_conv19_golden_int16_test_data_cc", + "generated_conv19_input0_int16_test_data_cc", + "generated_conv19_model_data_cc", + "generated_conv1_golden_int16_test_data_cc", + "generated_conv1_input0_int16_test_data_cc", + "generated_conv1_model_data_cc", + "generated_conv20_golden_int16_test_data_cc", + "generated_conv20_input0_int16_test_data_cc", + "generated_conv20_model_data_cc", + "generated_conv21_golden_int16_test_data_cc", + "generated_conv21_input0_int16_test_data_cc", + "generated_conv21_model_data_cc", + "generated_conv2_golden_int16_test_data_cc", + "generated_conv2_input0_int16_test_data_cc", + "generated_conv2_model_data_cc", + "generated_conv3_golden_int16_test_data_cc", + "generated_conv3_input0_int16_test_data_cc", + "generated_conv3_model_data_cc", + "generated_conv4_golden_int16_test_data_cc", + "generated_conv4_input0_int16_test_data_cc", + "generated_conv4_model_data_cc", + "generated_conv5_golden_int16_test_data_cc", + "generated_conv5_input0_int16_test_data_cc", + "generated_conv5_model_data_cc", + "generated_conv6_golden_int16_test_data_cc", + "generated_conv6_input0_int16_test_data_cc", + "generated_conv6_model_data_cc", + "generated_conv7_golden_int16_test_data_cc", + "generated_conv7_input0_int16_test_data_cc", + "generated_conv7_model_data_cc", + "generated_conv8_golden_int16_test_data_cc", + "generated_conv8_input0_int16_test_data_cc", + "generated_conv8_model_data_cc", + "generated_conv9_golden_int16_test_data_cc", + "generated_conv9_input0_int16_test_data_cc", + "generated_conv9_model_data_cc", + ], + hdrs = [ + "generated_conv0_golden_int16_test_data_hdr", + "generated_conv0_input0_int16_test_data_hdr", + "generated_conv0_model_data_hdr", + "generated_conv10_golden_int16_test_data_hdr", + "generated_conv10_input0_int16_test_data_hdr", + "generated_conv10_model_data_hdr", + "generated_conv11_golden_int16_test_data_hdr", + "generated_conv11_input0_int16_test_data_hdr", + "generated_conv11_model_data_hdr", + "generated_conv12_golden_int16_test_data_hdr", + "generated_conv12_input0_int16_test_data_hdr", + "generated_conv12_model_data_hdr", + "generated_conv13_golden_int16_test_data_hdr", + "generated_conv13_input0_int16_test_data_hdr", + "generated_conv13_model_data_hdr", + "generated_conv14_golden_int16_test_data_hdr", + "generated_conv14_input0_int16_test_data_hdr", + "generated_conv14_model_data_hdr", + "generated_conv15_golden_int16_test_data_hdr", + "generated_conv15_input0_int16_test_data_hdr", + "generated_conv15_model_data_hdr", + "generated_conv16_golden_int16_test_data_hdr", + "generated_conv16_input0_int16_test_data_hdr", + "generated_conv16_model_data_hdr", + "generated_conv17_golden_int16_test_data_hdr", + "generated_conv17_input0_int16_test_data_hdr", + "generated_conv17_model_data_hdr", + "generated_conv18_golden_int16_test_data_hdr", + "generated_conv18_input0_int16_test_data_hdr", + "generated_conv18_model_data_hdr", + "generated_conv19_golden_int16_test_data_hdr", + "generated_conv19_input0_int16_test_data_hdr", + "generated_conv19_model_data_hdr", + "generated_conv1_golden_int16_test_data_hdr", + "generated_conv1_input0_int16_test_data_hdr", + "generated_conv1_model_data_hdr", + "generated_conv20_golden_int16_test_data_hdr", + "generated_conv20_input0_int16_test_data_hdr", + "generated_conv20_model_data_hdr", + "generated_conv21_golden_int16_test_data_hdr", + "generated_conv21_input0_int16_test_data_hdr", + "generated_conv21_model_data_hdr", + "generated_conv2_golden_int16_test_data_hdr", + "generated_conv2_input0_int16_test_data_hdr", + "generated_conv2_model_data_hdr", + "generated_conv3_golden_int16_test_data_hdr", + "generated_conv3_input0_int16_test_data_hdr", + "generated_conv3_model_data_hdr", + "generated_conv4_golden_int16_test_data_hdr", + "generated_conv4_input0_int16_test_data_hdr", + "generated_conv4_model_data_hdr", + "generated_conv5_golden_int16_test_data_hdr", + "generated_conv5_input0_int16_test_data_hdr", + "generated_conv5_model_data_hdr", + "generated_conv6_golden_int16_test_data_hdr", + "generated_conv6_input0_int16_test_data_hdr", + "generated_conv6_model_data_hdr", + "generated_conv7_golden_int16_test_data_hdr", + "generated_conv7_input0_int16_test_data_hdr", + "generated_conv7_model_data_hdr", + "generated_conv8_golden_int16_test_data_hdr", + "generated_conv8_input0_int16_test_data_hdr", + "generated_conv8_model_data_hdr", + "generated_conv9_golden_int16_test_data_hdr", + "generated_conv9_input0_int16_test_data_hdr", + "generated_conv9_model_data_hdr", + ], + copts = micro_copts(), +) + +cc_test( + name = "integration_test", + srcs = [ + "integration_tests.cc", + ], + copts = micro_copts(), + deps = [ + ":models_and_testdata", + "//python/tflite_micro:python_ops_resolver", + "//tensorflow/lite/micro:micro_framework", + "//tensorflow/lite/micro:micro_log", + "//tensorflow/lite/micro:micro_resource_variable", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:recording_allocators", + "//tensorflow/lite/micro/testing:micro_test", + ], +) diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/Makefile.inc b/tensorflow/lite/micro/integration_tests/seanet/conv/Makefile.inc new file mode 100644 index 0000000..a53bead --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/conv/Makefile.inc @@ -0,0 +1,78 @@ +integration_tests_seanet_conv_GENERATOR_INPUTS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv0.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv1.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv2.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv3.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv4.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv5.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv6.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv7.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv8.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv9.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv10.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv11.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv12.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv13.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv14.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv15.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv16.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv17.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv18.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv19.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv20.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv21.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv0_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv0_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv1_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv1_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv2_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv2_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv3_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv3_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv4_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv4_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv5_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv5_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv6_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv6_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv7_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv7_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv8_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv8_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv9_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv9_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv10_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv10_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv11_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv11_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv12_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv12_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv13_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv13_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv14_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv14_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv15_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv15_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv16_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv16_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv17_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv17_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv18_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv18_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv19_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv19_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv20_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv20_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv21_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/conv21_golden_int16.csv \ + +integration_tests_seanet_conv_SRCS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/conv/integration_tests.cc \ +$(TENSORFLOW_ROOT)python/tflite_micro/python_ops_resolver.cc + +integration_tests_seanet_conv_HDR := \ +$(TENSORFLOW_ROOT)python/tflite_micro/python_ops_resolver.h + + +$(eval $(call microlite_test,integration_tests_seanet_conv,\ +$(integration_tests_seanet_conv_SRCS),$(integration_tests_seanet_conv_HDR),$(integration_tests_seanet_conv_GENERATOR_INPUTS))) diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv0.tflite b/tensorflow/lite/micro/integration_tests/seanet/conv/conv0.tflite new file mode 100644 index 0000000..df0c57a Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/conv/conv0.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv0_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/conv/conv0_golden_int16.csv new file mode 100644 index 0000000..848ece5 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/conv/conv0_golden_int16.csv @@ -0,0 +1 @@ +-3097,-32768,-9948,13979,-3385,-5527,-28931,19703,-5564,13467,-32768,5017,-32768,-11004,-32768,23048,-32768,-32768,-20366,4235,-18957,-32768,-7399,9279,-32768,18401,-32768,-32768,-29407,4495,-25012,-10335,-32768,-32768,-32768,-32768,-32768,-24990,13163,15009,-20812,-24607,-32768,-32768,-30205,-32768,-25292,-5932,17907,-32768,1778,-32768,-32768,-30947,-32768,-16347,-32768,2519,-32768,-8782,-32768,-5395,27499,-28082,-24912,32767,-9187,18556,32767,19065,24928,-21043,13098,-32768,19918,-20968,-32768,-13672,-16377,-17687,-32768,-8016,29299,-886,-3710,-21345,32767,-16310,-14589,14792,4912,-90,32767,18673,30676,-18924,32767,-9778,-32768,30680,32767,32767,11232,15128,32767,-32768,-24542,-2582,-32768,32767,32071,18817,-32768,-7229,-32768,-32768,-25338,-15458,1015,1689,-19221,-24791,-32768,-32486,32767,-32768,-32768,-20522,-370,18645,32767,-32768,32767,-5040,-32768,-13974,16750,-32768,1650,12763,-25242,32767,-32768,-7155,-30055,3893,-32768,-27083,-32768,21218,-32768,-32768,-8361,-23488,-32768,-26401,32767,-32768,-13022,-32768,-9954,32767,15309,-32768,-19503,16379,13022,-32768,-10142,32767,32767,32767,32767,11045,7909,-32768,9925,-14814,19336,3680,23301,22313,-32768,-11488,3863,32767,-30131,2640,9961,32767,32767,-3165,-17747,5630,25402,-9877,-32768,7302,19109,-16190,3141,-32768,-8636,15549,29466,-32768,-14047,28647,-18576,31778,32767,3168,-6995,-32768,-32768,6844,4683,-11206,-9550,5570,22585,-12464,17199,10936,-9173,10108,19542,-29342,-32768,-26517,-32768,-32212,-32768,-31565,-13705,-30962,-32768,13103,32767,-24627,-32768,-10967,-11737,-32768,31975,7978,-32768,2696,-1090,-32768,-3240,-13905,-15005,-11089,-24986,4945,10750,2216,15709,-32768,32767,32767,21537,-32768,-935,-32768,-28882,-32768,-27857,23447,28545,-3937,-32768,20053,-31127,21859,15321,-17248,-32768,-15408,-32768,-5940,-10898,-26692,-27976,-32768,15145,-25753,-32768,32767,9652,-5652,32767,-32768,-10428,-32701,-13318,32767,32767,32067,32767,-32768,-17123,-19594,4099,32767,-29828,-13844,-32768,28266,18246,-32768,23530,-32768,-20233,10875,-23189,32767,-7652,11350,-21015,-11513,-32768,18561,5142,17112,17907,-18014,16624,-32768,-32768,11424,-32412,32767,6769,-814,-32768,-26962,-28226,-32768,32767,-6298,-32768,-32768,-32070,-32768,8778,-32768,-32768,-13127,13799,-32768,-32768,32767,32767,-30536,32767,29550,-32768,-32768,17317,-4594,32767,-18697,32767,3362,-25703,16629,29968,-1348,32767,-22010,32767,13704,-2488,-12353,-9716,-12305,22771,30926,-4755,32767,31743,-11659,-32768,25050,32767,-19828,28519,32767,32767,-29218,-4503,31000,-5836,7647,31691,-12022,-16666,-18130,-15477,20120,32767,11120,32767,-27797,-9422,25758,32767,17884,32767,-32768,-32768,-26290,-32768,-18127,222,23041,-25557,7800,32767,-19190,-17964,-15649,19912,-32768,-19362,32536,-32768,-32768,-32768,32767,-32768,-1985,-32768,-18712,-32768,-32768,18787,-11217,-32768,32355,-32768,-18759,-32768,11281,32767,-32768,3499,21283,10496,-7684,14362,679,-32768,14092,-940,21456,-14058,9706,-32768,-903,32767,-20295,32767,-32768,-15389,12884,-12133,32767,31405,-125,32767,-32768,17711,14594,32767,-10432,-32768,30104,-5864,-32768,-7848,32767,-32768,-32768,-2589,-778,-27334,18848,-32768,-6179,-1184,-21451,7333,-23442,-32768,32767,32767,-32768,-8246,-28457,-32768,-32768,5203,21603,1383,7031,7520,-31343,10171,16450,-194,32072,6710,-10062,-3423,-26905,-32768,25377,10876,-32768,-32768,17471,-32768,15361,28837,16799,-2504,-21706,-32154,-26529,-32768,5609,-21875,-30703,-21449,21764,-24565,-32768,10769,31020,32767,-22802,11528,-32768,6168,9878,8945,32767,-5525,-5951,-32768,-32768,1427,-1269,-22841,-1591,-32768,-23339,-3713,-32768,-32768,-32768,32767,17253,-20530,-5694,-8568,32767,22974,-32768,32767,2441,-32768,-32768,273,31850,32767,8833,32767,9876,-22766,-10817,20745,-32768,32767,27298,3094,-1061,-32768,1015,12474,31927,-32768,6840,27336,-26583,-12106,-13975,-22112,32767,28484,-21338,4516,-7044,32767,413,-2292,27176,32767,-32768,-32768,-26721,6981,-26281,-32768,32767,-32768,32767,-32768,-31602,-32768,9175,16602,-31243,32767,32767,9775,-24664,26674,32767,12130,13399,-11374,9900,12622,-26043,-28523,3178,24689,9232,-20094,32767,-32768,-21958,32767,-10522,-13528,-29069,-32768,-7133,-5334,-8150,19432,-24545,-12027,-32768,19948,26063,-32768,-32768,-32768,9978,-4353,32767,-11699,32767,32767,32767,15459,11870,28926,5493,31648,-12264,-32768,32767,30020,32767,-4137,-20871,-32768,18313,22775,-31423,-11511,32767,14070,-30351,18751,-13062,-32768,-32768,-28883,-10975,32767,-3515,-32768,11564,890,-7726,9788,-22845,24559,32767,32767,32767,32767,32767,1206,6207,19979,-12256,-27618,32767,-32768,-1492,-10741,-16902,12,32767,32767,-7281,32767,-12188,32767,-32768,32767,-26345,2037,-29636,-32768,-16744,-11560,-26921,-32768,32767,32767,6818,32767,-12524,-17487,-11629,32767,-17683,32767,32767,19360,31526,32767,3222,-31677,-20297,8350,6126,-18325,18165,-32768,11817,-32768,-32768,-14060,16872,8285,19479,32767,-14150,-32768,32767,32767,-32768,-3303,22045,32767,-32768,-18670,32767,17332,30723,32767,32767,32767,-19627,32767,32767,32767,14437,32767,32767,32767,26131,32767,4253,17421,32767,19632,27470,32767,8156,32767,-28634,32767,32767,32767,497,1946,-9230,13372,32767,-4063,3642,32767,753,25509,-20356,-13277,32767,32767,32767,-6033,18709,26278,-8308,-32768,32767,20441,24960,28728,-4083,16262,18783,10386,-32768,-32768,-28354,14004,15745,9338,28992,-32768,-32768,1126,5235,-32768,-1606,-32768,-17817,4024,-32768,-32768,2449,-12835,32767,22635,-11069,-2065,-32768,-32768,-32768,-32768,29339,-32768,-27165,-29671,-32768,-32768,-22321,13452,-28516,-32768,-25168,32767,-32768,26536,-32768,31089,11522,-32768,-2324,32767,-32768,-32768,-7142,31726,685,-32768,-32768,6590,32767,-32768,-32768,-8799,3629,-23714,-22594,-31958,3111,-5385,-32768,-16390,4431,-11917,-32768,-32768,-21968,-32768,20911,-27839,-30585,-20043,-26365,2907,32767,-26616,-32768,7948,28319,-7482,9553,-6246,-31649,32767,10922,32767,32767,-5361,23985,-1240,-32768,32767,9603,-18558,6035,-30356,4978,15344,-32768,9494,-32768,5384,-10285,-32768,-336,26915,-32768,32767,-8496,32767,32767,-32768,780,32767,-16992,30762,4728,-12290,32767,-19262,15892,-1074,-17002,-32768,-289,14351,11164,-20801,-10435,-29940,8736,-17656,-32768,-32768,-19357,19677,-22318,-32768,258,-23323,13913,-15460,-32621,-8551,-14364,-17070,32767,32767,28481,32767,-32768,-32768,11507,32767,21440,32767,32767,32767,24348,32767,19938,21574,4279,14546,32767,-20204,23527,-18494,2254,20508,-32768,-32768,23176,-16625,27852,-7503,18319,32767,-32768,-32768,-32346,-11787,18563,32767,-1685,5274,32767,32767,-17671,17248,-30093,-32768,1527,32767,32767,-10389,7011,25446,6407,1761,5820,32767,32767,32767,-4966,32767,32767,32767,-20939,32767,32767,-25975,32767,-32768,32767,-19765,717,32767,-32768,-32768,32767,-378,32672,32767,32767,32767,22717,-26091,6486,16767,17177,24749,19302,32767,-17057,17519,32767,32767,-7762,-32768,8402,-1653,-31100,-10548,-21925,-31310,-27473,-7420,-32768,-32768,20269,32767,-10913,27651,13885,3630,-32768,-32768,32577,-32768,-9575,8145,641,-32768,-32768,-24990,32767,16905,-8709,-16617,-22192,-26262,-27054,27758,-32768,-18253,-17211,-20386,32735,-32768,-12359,13842,9693,-32768,-18220,13939,4075,-32768,-32768,17749,-32768,-32768,2073,-5874,-29803,-32768,-32768,-32768,29758,-32768,-32768,2037,-9068,-19290,-32768,-32768,32767,-32768,-32768,-32768,-32768,-32768,-32768,-23452,32767,15898,-32768,32767,-32768,-20417,-29000,23596,32767,24845,3676,11369,-32768,-31278,-3583,32767,32767,1008,-18353,-16637,15773,32767,-1163,-32768,32767,-32768,4569,-32768,32767,32767,18482,-32768,8530,32767,32767,-31959,-26854,22780,10848,32767,32767,18321,-24073,32767,-17113,32767,2500,32767,14990,1086,10738,1055,32767,32767,28204,32619,-32768,-28132,32767,-12876,22544,32767,18217,2578,25153,-28362,-17311,8138,20958,32767,-32768,32767,32767,32767,5842,27832,32767,32767,-14276,32767,32767,32767,16628,32767,32767,-16587,11315,32767,32767,32767,32767,32767,32767,2348,24341,32767,-32768,-24393,32767,32767,32767,-23238,20957,22776,16153,-9309,20265,13928,-28608,-32768,-32768,18777,-9129,-2251,13493,32767,12689,32767,-32768,-16721,32767,-1191,-32768,-16984,-17630,-32768,-32768,-32768,-32768,32767,-21415,32767,2808,-9601,-5152,-32768,-32768,-32768,-32768,3945,16913,-32768,4351,-24952,-32768,-18583,-32768,-20660,30597,-29486,32767,-32768,-32768,-32768,-32768,-3924,-7008,-32768,12200,-1148,-32768,-32768,-32768,-32768,-32768,-32768,-17644,-32768,10174,-14745,-32768,-17053,-32768,-32768,-32768,-17029,32767,-32768,-21994,3890,-16435,-32768,-32768,6745,19753,23464,-23576,28073,-26041,-9646,20872,-6753,-32768,-18317,26538,-21616,-13332,-32768,-2561,-32768,-28590,-32768,14766,-32768,-10795,-30523,-17230,-32768,-25134,18676,-32768,4411,-17184,32767,32767,-16650,32767,32767,-32768,32767,-22334,-6616,-17420,-6690,-9326,32767,8983,-32768,-32768,-16065,-5215,-23779,-32768,-32768,17351,-32768,-13404,-9315,31082,-8654,-32768,-32768,32767,20693,21418,25706,32767,-8332,16907,-21499,2182,-25576,31572,-32768,1933,-32768,-20814,-32768,-5839,-24197,25343,32767,-32768,-24373,-32652,-31359,-32768,26391,-7949,32141,-32768,-20844,-32768,-5834,-9439,-8844,14821,6235,-16143,-32768,5336,-32768,-8562,-15702,32767,21584,-32768,-32768,32767,27290,6779,32767,32767,32767,3394,19532,-32768,32767,3423,32767,32767,27939,11494,16270,32767,32767,-24941,32767,32767,31815,-17368,32767,-32768,32767,-6829,-11966,32767,32767,2571,32767,-32768,153,-19778,32767,24049,9338,-2128,-32768,-32768,-8083,32767,25920,8438,-18287,32,-27225,-3571,32767,8599,-8926,-60,-28377,-7375,-32768,-23750,7086,-28351,32767,-32768,-32768,-32768,-32768,5736,-22789,-28408,-18669,-21217,-32768,-11265,-8066,-6799,20030,-27460,-890,19843,2947,-24848,11977,-32768,-32768,-4189,21297,32767,4797,8274,6745,29273,317,-8141,-31756,-6434,-31967,15462,-28357,-19565,9678,-14921,25234,30840,32767,-1383,20897,-17530,-9101,23874,-16145,8535,11235,13086,-17872,-32768,32767,10932,32767,-32768,-32768,32767,31752,-32768,32767,-3828,-32768,-8183,-32768,24607,-5348,22342,23259,-11560,32767,5465,-32768,12993,1388,32767,-17663,-21480,-32768,-32768,-32768,-1741,-32768,674,32767,21749,-32768,-32768,-18334,-2862,12540,-22878,-32768,15522,-32768,-32768,-7240,-7001,-2862,-12326,-32768,-4118,-23514,-32351,2500,32767,8262,-24764,13218,32767,-14615,1768,5951,24916,-19359,32767,14718,-16418,32767,27570,32767,-32768,21247,5395,350,-7646,-23755,32767,32767,-30090,9128,896,4538,15454,32767,32767,27822,7451,32275,9790,5447,-32768,14983,32767,32767,-25208,30274,32767,32767,-20942,19797,-23909,6978,-11592,-4213,-18149,32767,6949,32767,-32768,-32768,8448,32767,32767,31901,8436,-18127,-14220,-11324,16487,25665,32767,32767,32767,32767,15192,-32768,32767,-22583,615,31097,32767,-32768,-21242,3019,27393,-21886,-17466,-32768,-4424,-32768,-32768,3806,13387,-32768,-32768,-19864,418,-32768,-32768,-32768,18413,-32768,-20265,-32768,5639,-32768,-32768,-32768,-4602,-2370,-32072,-28915,-32768,-2283,-29974,-30865,-12401,28581,-32768,-13540,-22868,-32768,-23491,7251,-17806,770,14040,-32768,-15921,-2844,4702,-6115,-25472,-32768,5788,-32768,-16188,-28613,-32768,32767,-9277,-32768,32767,29901,32767,32767,4494,32767,-26312,-32768,-4793,5792,3337,-14160,32767,32767,11068,-29307,-28892,17554,-32768,1896,21959,32767,24379,14732,-32768,-1698,-29465,32767,32767,-32768,1201,32767,-32768,16436,-32768,-32768,29975,-32264,-32173,-1448,-32768,-27172,-32768,-3790,27267,-32768,6226,-23338,-32768,-32768,-31186,500,-21018,-32768,-16309,-16511,32767,32767,-12023,32767,-8307,-32768,-32768,32767,22236,32767,-20717,25582,32767,-20482,41,-10536,32767,8758,4269,25748,3465,19942,-19343,-13574,-2909,17578,11164,4021,-32768,5285,-32768,-32768,32767,32767,-10499,12547,-32768,3088,-27351,-16101,32767,32767,32767,32767,10869,-15862,24250,17034,32767,31917,20841,-18399,-8376,26739,22764,8075,32720,12185,18107,16388,-23891,32767,26738,16835,-23861,-32768,-5813,-32768,-12837,26170,32767,-32768,-32768,-32768,21476,-32768,-30525,-32768,32767,-32768,-27787,-7869,32767,-32768,-32768,-32355,7887,32767,-32768,-32768,-32768,-32768,-32768,-32768,22744,32767,-32768,-28443,-26084,19756,-9154,-32768,32767,32767,-25595,-8062,-11752,-32768,-1301,11217,5487,-32768,23595,-32768,15500,-30355,-14430,8604,-32768,-30402,32767,-7071,7237,-32768,-32768,-16385,-32768,-21956,-31289,5663,-820,13730,7621,32767,-32768,20901,-3951,-15131,-9086,-32768,3230,21073,-2478,-32768,-494,3343,-6192,32767,10661,17477,32148,8588,-32768,-32768,26502,-32768,32767,32767,16419,-19372,-24342,26752,-32768,32767,25731,19988,2547,585,7416,27250,-28334,-32768,30526,32767,-32768,17661,17200,32767,-11022,32767,30962,-3590,-30434,-29915,6479,12405,-14131,-22351,-3225,1515,-32768,-1205,-22750,30464,16719,5402,-27297,-32768,-27935,-32768,32767,32767,32767,32767,-22969,-4546,-20233,12610,16778,28261,32767,-32768,-15378,-6813,-8627,-32768,32767,4309,-3325,32767,-32768,32767,8256,-16765,20872,7134,8933,-26432,-32768,-6186,-12619,10802,-2757,32767,25872,-2488,-21869,-32136,32022,30335,23696,2206,10604,-23431,3144,32767,20540,32767,32336,15264,-32768,-32768,2796,32767,-25689,32767,13665,9490,-11965,15934,-32768,29004,-6304,32767,32767,7880,17029,-32768,-24705,32767,-3986,-12498,22328,32767,32767,31978,-26138,16810,22660,-24137,32767,31826,32767,-32768,-1693,32767,25016,-32768,32767,13458,32767,25287,-32768,32602,22354,-15522,-32768,9903,28427,-32768,-32768,-12611,32767,32767,-32768,-2447,-8109,12273,-9474,-1361,32767,-16946,32767,-32768,-24897,9725,23710,-10868,-2342,32767,-32768,5388,-28263,-25757,32767,32767,-560,-32768,21755,-4131,-26495,32767,16151,-5312,-28131,-14809,-32768,-32768,14824,-32768,1539,6048,-5883,-30762,32767,32767,-27511,32767,32767,12330,-28356,20986,32767,32767,32767,32767,32767,29540,-11468,-7468,-20516,32767,4437,32767,32767,214,-9259,-19742,-32768,-32768,-1538,6709,5785,16880,-23520,-3492,10194,12902,-14034,32767,11125,14367,-32768,-12567,21654,-26329,-25158,32767,32767,18308,-32768,-7365,32767,-15879,-26854,9635,32767,32767,-1472,-29556,-32768,12878,-23980,14667,-16432,10484,-32768,9481,32767,32767,-18609,32767,8173,3014,-9664,-198,32767,32767,32767,32767,32767,18138,3147,-2914,32767,32767,24655,-10822,27556,32767,-1898,-15261,-11248,-5908,-7561,32767,-32768,19136,28301,128,1029,-20539,11020,-32768,-27265,-23077,32767,14940,32767,-8200,12231,32767,32767,32767,14969,22004,-15044,26028,-32768,-32768,32767,7156,5902,-15806,32767,-32768,-32768,-11758,21545,32767,5609,-32768,1035,32767,-28997,-28725,-20239,10918,-32065,-28302,8316,21181,-6974,-272,-18497,-15518,-22006,32767,-5240,32767,11434,32767,32767,-27061,18233,32767,-26718,-7391,26185,-28821,32767,27393,32767,32767,-32768,-4276,2987,-32768,21957,32767,32767,10267,32767,-6788,7426,32767,11463,27409,-12866,32767,32767,21399,12967,-26305,-17050,32767,-12467,-23996,32767,32767,32767,32767,6702,7555,-22769,32767,-32768,15610,32767,-32768,32767,32767,18044,14320,32767,-9500,31147,32767,32767,32767,-10207,32767,-29733,32767,29633,32767,32767,25664,-2286,-21532,-20536,11748,-4414,32767,32629,32767,-32768,-32768,13158,10280,14206,32767,32146,20181,-32768,-32768,32767,30276,-7288,28527,24002,-32768,-32768,-32768,11606,31673,-32768,32767,13336,-32768,-28860,30109,32767,32767,-18418,16540,18149,-30132,-27175,6317,297,4351,32767,32767,32767,-32768,460,32767,-32768,32767,16339,24401,2067,-32768,-15343,-32768,-14683,-32768,27738,-815,-31362,-4623,-32768,-32768,-15211,-25013,-13313,-32768,20395,7589,-32768,-32768,-2569,-23672,-32768,32767,-4678,-9538,-24036,-32768,24794,32767,-4502,15288,-32768,-32768,-4034,-19565,-32768,411,-6786,-18955,4452,32767,-1247,-32768,-32768,-30995,-27618,-32768,-6515,-19809,20855,-32120,-32768,-14223,-29425,456,32767,23763,19791,1372,32767,11809,-31681,32767,32767,25198,-3075,-14330,-32768,32767,1304,8208,32767,32767,-23633,29215,-32768,12723,-32768,-10200,32767,32767,-16594,13926,32767,32767,-29776,32767,32767,32767,-28899,32767,32767,32767,32767,32767,32767,32767,1287,32767,32767,32767,32767,17654,32767,32767,30938,-23680,32767,32767,32767,32767,-27157,32767,-2111,5266,8501,32767,32767,31968,23868,12370,857,2463,32767,32767,28763,19435,16613,8533,-14045,756,32767,32767,32767,32767,32767,32767,-25375,32767,-32768,8193,32767,-32768,-7508,-32768,-19541 diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv0_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/conv/conv0_input0_int16.csv new file mode 100644 index 0000000..10762c6 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/conv/conv0_input0_int16.csv @@ -0,0 +1 @@ +-21423,25118,6251,-30971,833,-3368,-23687,-19353,-24145,32570,29345,29305,20333,10599,7799,29601,-7849,2102,-13228,-12404,-20583,-26461,-14840,18440,-21470,-8125,22895,-8960,31467,25630,-3849,-12466,157,-27769,-12733,27750,23812,-1333,19292,31701,9138,-6404,10857,-9284,-12675,-4222,19853,-11841,9863,-22849,12487,-7274,22004,11844,14889,-20921,21918,-5378,8489,-2855,18329,-30437,5760,18399,15203,-27672,19461,25123,27029,-18793,22524,-9720,13699,-14002,672,1517,-3702,24348,-3974,-7400,-2825,15337,-22828,508,4272,15062,16801,13110,9475,11189,-824,15723,-18879,-19103,13399,2764,-25030,-29328,6194,22694,-11412,-14027,5139,-22395,-7703,29985,20135,11280,-24470,16519,-8223,-24519,22256,30114,19986,6187,-31782,25263,-8442,18401,-16003,27873,21664,30490,15558,-22047,-30621,28698,-1693,29405,27040,32374,-5321,29982,-18639,16510,13739,-7133,-25945,-20757,-18260,-27464,29681,-24455,-23831,-27471,24846,2800,5737,11260,-1743,-20910,-9395,-3344,12039,32065,-25050,-15803,-24223,8594,19162,-12306,-11352,28413,16663,-15029,26588,-21176,-9109,9617,20604,-12809,28696,-9429,13164,-24984,-8923,26731,-29449,-22521,-25790,11892,24345,-13877,21126,32020,-29451,995,32625,7461,-30893,-11839,-29858,2144,-23141,-13081,19774,-3244,-22035,-22747,23320,-31485,12718,20269,7867,2467,16975,17955,27131,25882,32158,22779,-28936,-12559,-12500,-14133,22131,-19454,-639,-32543,-17449,11441,4149,-30973,-1563,20265,121,30739,25469,22025,1716,-20645,-8344,25737,2826,28423,30803,32100,7585,-13215,-25450,23905,16669,5705,3123,-25577,-21014,23234,19023,7741,16358,13460,14689,3274,-7509,-15759,3752,23358,-23494,32005,26428,-32428,-11387,-14431,-6751,-14177,-10792,-10680,-3877,23456,-4620,1181,28274,-32240,-17758,9618,-10643,-9006,5214,-3252,-17714,25948,28239,7976,8577,27195,-17427,6704,3169,-11208,-2268,-6318,5056,26561,-22747,4432,16188,5918,32092,24372,18304,-31015,-8841,-25713,9678,-9449,-31533,16625,5408,31047,-23048,-15346,29693,-10854,-25795,7410,31587,5799,-5467,32328,2982,20372,-4250,22428,-10318,30355,-23585,26653,-7693,-1341,26203,23102,13183,16936,6958,-29365,-28404,15086,-28345,26473,17356,12432,-18561,-26715,-9364,-15619,10989,3656,9444,-1988,-19684,24967,-29967,-28670,3688,-19451,9243,-8714,13901,-27516,3255,28756,14068,-6240,30711,8538,13239,-25896,-13561,-17020,2871,12350,-24253,-31857,7141,30334,-15241,-11355,-29592,9132,-8856,2504,-8202,5484,2571,16137,30591,18206,9309,3519,-3145,1696,-27352,-10075,3272,-3284,-17322,14211,-684,18120,-7012,-15878,5003,-24723,-21357,-19260,17370,-17121,-9860,-27447,11838,31032,-21339,6159,-23567,23010,29374,-4689,13468,-29013,-15717,23623,8036,-22843,7667,-32689,-7968,19761,-4573,9859,5905,-10834,5015,-21446,-12207,9151,-26354,8438,-32079,18989,-6209,22262,29720,5346,12126,15164,-25575,31736,-14906,3325,-13422,-6175,8000,26125,21719,4475,-17288,9847,-31221,22832,-27130,20021,9087,-19295,24207,-10370,-20782,-15173,233,-18772,21521,21606,19807,-12556,-7337,3286,17574,-895,1464,-8218,-25278,24907,28870,5896,-11277,-9379,-22290,3117,21461,5094,-22763,9204,-17958,-13486,25506,-2305,12387,-16946,6870,15371,-560,4154,-29838,27426,7490,27339,-5770,4624,18496,17755,-15104,-21908,-25589,-389,29186,-1653,-7909,-13135,29990,6497,26544,-28653,-2040,-5963,21107,3304,27148,32616,-2715,-29005,-4673,-2462,-655,10991,-7087,7082,-10651,19544,-7419,-12802,-10762,2126,-17032,-24222,2938,12602,14066,13261,-16241,8100,-3407,-16343,-31177,-7544,-32768,-30129,6233,13889,-16895,-28954,19954,8341,-8474,-30655,-21308,31207,4121,9340,-19238,722,-14465,2171,-26031,28790,13395,-10531,-4660,-18418,5094,25255,14696,-20504,28392,-30567,-23642,-30310,-15988,887,-1638,-3033,-2216,-30447,-3903,-3593,-2122,-29070,-9621,-16307,-2828,17085,25033,-21115,-9101,18011,24293,31043,-10230,30259,19840,-14170,-21207,-1183,-16579,8177,-5824,1420,-28969,-3593,-23263,2008,-9946,-4376,-21710,9999,31196,-30206,-20831,16576,-29586,16572,14772,16267,16715,-21328,18447,21341,-16043,25480,-16911,-12934,16814,-22964,-5502,11102,2851,-17878,-24172,-18769,-26422,-18836,-12909,-14867,7490,17294,-13379,-29779,28165,-4157,27519,18915,-3102,15758,29762,4180,26588,7572,14277,23995,19588,-1990,5801,-3334,-14691,29717,26842,-10113,6958,-13908,-14126,5674,-28371,21428,-31440,10109,7409,-20134,5108,-3927,26473,-30208,16460,25093,1475,-27601,-9726,-5569,3995,7748,-10065,21393,11392,-30768,28457,29046,-2369,31344,-13041,-12292,-25000,3436,30237,-18334,-29201,-837,22262,-20497,29742,-7528,31600,-2267,17174,1426,-6237,-27852,13941,-3187,12996,-20038,-14284,31358,-27583,-32241,22406,9066,-25131,-23386,15631,14999,11229,-18570,-25512,28881,-10676,1849,22381,-26697,-6641,22707,25894,9938,-31583,-415,16916,-21226,9424,27320,-20133,-13270,17154,-19287,-13557,15301,5392,-26180,-17061,-9287,16215,12432,13245,-14757,-6761,-3334,6891,-15478,10873,-29726,19532,29300,14281,-14426,-8248,12561,16375,10409,-18839,-14641,-18264,-30909,-25968,29528,22906,13050,12020,2020,-2333,22922,25047,29963,-7676,23092,-19625,-21157,-25349,-24751,1074,-12468,-2518,20401,-4462,15398,6251,26469,-27704,14156,-19992,9547,-23392,-28096,26167,-7748,4375,18032,4847,3505,31173,28636,-22428,23088,-26024,26570,30658,15530,17078,-5452,-2171,3445,-21075,31058,15645,17702,-18384,1988,25955,20960,147,-29903,18982,19478,-29177,-11944,-11645,-26389,105,31102,25027,-13772,-32648,-27181,-31795,-20492,9624,18588,31390,10478,11806,30041,14835,4694,1205,27621,-8853,-554,4862,11112,-27356,5176,19904,-13820,7944,27227,15338,15766,22692,20357,27127,-22912,27243,26786,9570,22508,26281,15613,1805,-22426,16607,-24088,-14450,-25012,21421,6949,7300,-10645,26203,-28826,-1398,-16815,27241,27551,-27179,8921,3959,17204,-28671,30920,-24739,-10492,4596,7263,-25751,12292,-10941,30422,17999,-6269,30199,-5379,30840,11329,-27099,-13094,-18461,6257,-277,15475,-13207,-15051,-25591,15047,-2176,4324,-23283,-20477,-30821,-12246,11136,-10716,14961,715,-30802,2829,24961,-25014,-30387,28238,6819,-22242,14057,-178,17255,3280,10199,2192,-14766,-29302,2230,14490,-18004,25381,12597,-3120,-4758,30752,-3492,3410,-26014,-30654,-15729,-15323,-24553,27528,20934,-29978,15256,-9819,-29739,-4987,-15031,-19000,-19279,26444,31419,-13613,18908,17851,-27483,17361,-22288,25728,6435,11940,7053,-6996,30372,29840,28796,-25174,-21347,4956,-20947,1366,-20333,20707,-2622,-8124,19508,19733,24240,-28839,-4862,-21318,-26050,-6524,4783,-24811,5526,22081,-30451,27392,-21149,20371,-9235,-16121,-17834,-18522,13226,-11963,26577,1197,5303,-17418,23672,30656,-2356,22648,-25336,-15867,1412,13961,-1716,-10059,-11785,171,-11824,6639,31934,23791,3233,-14394,-27933,-19294,-6002,-2011,5657,-23142,-30457,28476,29854,16445,14,2584,12162,-12864,6793,11946,18485,-31806,2718,2228,20433,-4707,12288,894,-3305,-17644,14074,-21780,26053,13220,-27792,-17951,29426,-14458,18125,11942,12004,-8676,31401,-22749,-29629,-24547,6185,10482,30872,3904,-13629,-19036,-22076,-16943,19127,-10426,-22323,24459,3368,-26290,26042,16946,-15618,5759,16385,-24350,6052,18967,13659,-19603,-4095,-20644,-10550,-4669,-6494,-20043,9781,-28087,-31020,-665,8299,-9734,7780,5257,-30195,-17137,349,-6356,14441,-9124,13327,-2168,22833,1257,-23517,-31427,22857,18546,23463,-19212,-22256,-6728,-12701,-28428,18368,-1234,31966,-21975,19101,27091,16789,-31566,-27491,30294,5083,-11977,4991,-22637,20221,-18519,944,17136,-4265,-18685,32739,-20921,1785,-9245,-25653,1271,-7689,-5368,-23887,31619,-13322,-26389,22596,-17975,16955,-5925,29936,-10380,10463,27520,31675,26314,-26618,-19302,11775,-6734,-6132,16886,-10346,-15660,9993,-15559,-4588,-14223,21772,-1019,-4836,14367,-12945,-5976,6423,-12480,-31196,-8014,9856,-12297,12146,28431,-30670,25022,-21336,12352,4583,-26814,-28202,-20415,5638,-23056,-13427,-17831,30401,16739,19518,16153,7367,-15585,-19737,22306,-7859,-27767,32256,-5294,22246,-7904,32536,4741,-18138,22876,-31970,-24931,28165,-13091,10754,27948,-23540,-15639,-24464,23107,-29503,31788,-23240,-25569,-11668,17970,10202,-8873,-2644,-7583,-24034,31000,-18526,10938,10526,-19896,9451,-21398,16068,-30777,-12401,31342,-23257,-18192,-14660,27438,-30485,-15557,-7435,18342,-12844,-11170,-30259,-7513,-3689,29089,20300,-16878,-9308,1647,-29680,-27610,-9540,-25050,-27894,-18903,26217,-10884,-5571,-29077,16959,16023,6328,12386,24478,-13709,-11940,8445,-13347,-15755,25131,-29696,17786,11825,-13928,30539,-11814,-17684,-16835,-11497,-16647,-7212,29308,31417,10884,-23851,28077,10271,7565,8024,8405,16739,23843,-29349,-19048,-25743,22440,-7307,25641,-32145,-25673,-20469,-8621,27142,17569,17835,28765,13080,21377,20020,-9517,14124,31519,-22555,-3648,18418,31824,29711,8891,-2403,8052,-26091,19209,-12826,-580,17960,-18130,24470,25111,-1895,-17055,-13475,20677,10074,20535,182,-14444,-6704,4800,-10947,28545,-6867,-20601,28159,-4307,-24554,13670,-5568,6423,-5352,26404,-28276,32586,11440,-7160,-11966,-649,-344,3100,-24698,30276,19153,7909,11092,-625,-5052,-13932,26664,-26110,2889,4298,-7022,-520,15066,14580,-8086,-5403,-14211,23903,28117,-8731,-15083,-3414,7670,28024,-9653,24342,23585,-7025,-10680,32147,9491,-25200,-11287,-29456,-29952,-32656,-5276,25175,-26692,10215,21496,-26432,16849,30524,-11802,-30929,-6983,31902,-21851,25138,-4752,-6449,-20463,-12400,-1914,8657,5644,17431,17629,-18673,29010,-24972,-2211,18936,19011,16952,11214,14718,26425,-31123,-31758,3806,-16465,3743,-11042,18929,-21927,-12065,24284,-20689,22898,10347,-23287,-25982,6652,-22684,14297,13567,20863,-3196,-16531,4368,-8535,3943,-2825,24759,-5567,732,15150,28112,-10947,-7443,-5253,-4835,-28006,26009,-12299,29999,-19603,-20407,-14657,-25478,4562,-31838,-25254,8483,-29289,13420,-14575,11010,-23676,-15021,42,5136,784,15537,5431,-11401,-28523,25596,21800,-12810,29052,10718,23247,-674,16275,31034,24184,25503,31147,-23158,-10571,26898,4781,-23178,6103,-14002,-15126,18597,15614,-30293,4945,-20292,18558,-17887,-21550,1153,28195,-17541,-21664,-1153,-4019,118,-3127,-18209,-32723,-22051,-5678,-22864,17189,25319,30146,-9969,-12927,22474,31435,-23085,14055,-14656,-15104,-16206,7352,5925,-29108,18488,24760,29285,-6631,10024,-31346,26381,-15664,-3778,-3133,28757,-12678,18185,-21574,22959,23400,23734,18010,2744,25874,32405,1559,4691,-5851,8028,7451,967,-27787,30674,-8313,-7661,-20013,-28500,-16688,26989,-22411,-6007,7381,4007,14842,-4340,-13496,-27688,-17071,19791,-8498,21776,32113,24008,24218,17823,18245,14124,-2387,30185,-10998,21472,7434,25642,29458,26847,12270,25313,-23607,-25735,-21306,-189,23791,-25636,15490,705,-2458,18635,22283,21299,28620,-17228,21763,16097,-626,-7654,-9233,-16596,32280,22931,25205,-6456,-20050,-7133,5827,-25861,-31135,15612,5572,26678,-31960,31061,30572,-10514,-1997,22776,27487,-16549,-19074,-21565,2559,19588,1147,-23948,-8812,-22463,24119,-9598,-23921,24262,-16651,4769,-25628,28144,20232,7993,17684,-18916,12913,6326,12257,-3647,24028,-32433,3888,-20877,27152,-1740,28061,-20927,11317,23705,24237,-7399,-3031,-18753,-26447,-22114,15657,-10184,-6711,28383,-3236,18384,17327,-9299,-15679,19144,30963,13649,-9646,-11482,25448,-27759,1990,-30066,-10076,-6872,-5253,6268,2919,-23723,-24665,21260,22987,13999,-2669,-10878,20114,-909,-11507,11229,12258,19774,-31199,10777,-21927,15939,28663,30051,-2075,3407,441,-29459,20972,22183,2546,28309,6750,-13334,18295,-10765,17324,24450,-193,12899,20460,1689,-14045,-954,-12190,6893,25527,-5168,13514,-5691,-16301,-17454,-23652,-19840,5037,-32667,23577,24764,31067,5509,-25325,29069,-10406,14321,-21073,-2851,-20326,-18203,-11529,-26772,-20527,22350,14279,22727,9739,15346,-22702,18082,-17629,-7369,3170,12912,-30917,3504,-18416,-1579,-14429,24954,32654,-27690,-31600,8741,24384,11735,-13501,-23551,28610,32598,-15616,-7779,-12101,15023,-24492,-7761,-25359,-31954,-10323,-30359,16888,11454,-12774,-1284,18440,4981,30976,-6977,-32142,12256,25773,28961,-26378,7668,3636,19797,9955,32317,-18953,-938,27016,19543,2430,2714,-16799,7419,24418,13986,-20070,26932,-6651,-11877,10250,-1127,13965,21249,19334,3821,21243,18764,22822,28306,-17716,-1182,-27988,9768,-18772,-24165,-18047,-24005,21980,-3948,16116,26930,-16998,-20507,-3975,-491,-28152,6889,-27231,11291,-2210,31432,11347,22646,-23736,1749,3361,7042,28756,28617,27111,-31206,-4772,32504,22678,2632,-19635,30604,23967,-12789,9294,8596,22919,5420,-8492,-27294,-19574,-26055,29606,18821,-6673,-4966,-30863,14721,14460,25625,229,-27521,-3924,-27205,-5772,3221,-28726,4779,-21091,21057,-31633,24544,-3898,-12891,-31010,7112,-6698,26913,-9119,3646,5754,6840,5448,-28598,556,-29412,14624,1480,8838,-5996,15544,-2845,5772,15058,28010,8127,27198,10306,21013,-24701,16452,21591,-28217,30608,7710,-29072,26736,3763,-16713,-19241,-22627,3769,29837,9316,16872,-12110,-23547,-23325,-6428,-28421,16754,31984,10491,13960,-15006,-11326,-7501,-18366,-21039,22597,-7796,-21457,-5800,19781,12871,-5951,-23893,8875,2923,-23404,-2173,15797,13105,-13613,11670,23569,12971,-15656,-17826,-19590,22646,18831,-5822,-19278,-22165,22534,-20812,-7774,-24939,-12244,-26838,-7428,21861,30346,13176,29802,31503,3796,-26944,29585,19737,-18539,-14451,-30378,-17689,-7143,-30428,-27453,6163,-24956,-18620,25813,6159,-9632,-31180,-12083,-2857,-19060,27678,-6059,6409,29106,-29275,17056,30862,29741,-9874,22402,5966,-21461,19420,1856,11289,18595,11466,-31814,-10805,-18255,-22193,13398,-21154,-25927,-5742,-2197,975,10925,9338,27204,31340,-10031,1258,-4993,-12170,13335,16685,-19843,-21781,-19663,-12471,-31743,-25468,5624,-6254,-7860,-17787,26809,-21920,11266,22971,4129,-4169,16456,-29750,26175,30973,-11524,9370,27926,29075,22434,-21285,-31582,13634,26274,-2301,-12606,24990,25013,2860,-27715,19210,-527,-21319,22079,6763,7703,6577,2109,6366,-24326,-6110,360,-31753,16744,18955,430,-12319,18065,7810,-31982,-19676,-10227,2069,-32237,20994,-9628,-5343,-16252,8056,-22521,-30477,16483,5192,26578,-18659,826,17315,-1129,15053,14662,3242,-22333,9244,-14694,-30860,-28149,6956,-5459,-25364,20236,-19235,15932,-12160,-24154,-8125,-19744,-13385,2359,23053,-27933,11473,-26184,-15123,-12346,-13857,500,-12851,12112,-17084,-15268,-15124,-12556,-32743,-30250,-19110,11652,26233,-679,20578,28039,276,-10005,-3233,-6675,-11968,3061,-16366,18684,-9502,21797,3959,19351,198,-26901,-28832,-7369,-242,-17279,-31364,12573,-16266,-27585,-25989,-10133,7864,6617,-3098,-13927,-8645,16351,-18836,24601,-25634,-7480,-15841,-2874,-27410,-18928,26929,20403,1943,28839,-14148,2310,20458,-20796,9595,5731,28554,13832,4011,-20981,-23308,-11090,6382,-9057,-24150,13709,-2061,11990,-23182,-14668,-27014,27465,-13632,-15493,-23677,16613,-29798,-133,9062,-19565,139,10078,28974,-22340,-29468,-22495,-1,-16658,29156,-30808,-14637,10623,22028,-13350,-12713,-30323,31302,-16176,-28341,23163,-5217,3311,11353,31159,12935,13100,1466,7512,6139,-31892,22863,-17331,26438,20433,-23203,9609,24860,1502,32579,25057,7274,-4174,-1980,-12341,14190,7170,-12568,-32729,-9825,-29475,27122,-32341,17681,-13542,14050,-11598,-32506,-23822,-23422,-961,17977,-24047,-30800,-2849,25063,19872,-10593,30375,13586,-17185,21008,8827,-18008,-14189,4811,-27352,-11026,-5542,25450,18337,-21745,6700,12384,30932,20970,4329,-6289,530,617,22977,23825,-12212,-2781,-7857,21111,27429,7295,-26415,4695,-13187,-10062,-2867,-28263,-17121,-25876,25385,-28099,18339,30169,20563,-6890,-15298,13360,24157,-881,-14604,25808,8275,27527,24721,-25094,-29536,-9640,22223,8581,-12823,32481,-14318,22333,-10815,30308,-19889,31378,23479,-31919,-16804,-30912,-4047,-15073,11731,1089,11416,-19834,2638,-17620,-28724,-27046,15839,14646,-4053,23732,32709,27464,9378,28049,21738,7356,-14101,28105,-25351,19066,18606,14610,32217,-6839,32442,-30851,-14617,-1279,11241,7942,-30083,17713,-3444,-29060,7718,17556,-30684,-3480,-30959,10052,11474,-4375,-21208,-18143,3696,-30289,-12152,-8286,18892,23503,13160,-22385,3976,22279,6080,22085,25463,28745,11571,17082,3406,-8393,19427,-26076,31171,-16059,25278,3772,18615,4355,-11939,25892,13757,24380,-27655,-8265,30920,28458,-8802,-28835,-24942,17817,-28077,-31523,-12392,26636,28389,6337,-5119,7281,-19720,-3956,11535,-4039,-151,12968,-30168,-23502,-31710,-21120,4935,-167,24888,31928,-8304,-6143,-20310,23912,30683,25191,-14155,27297,25815,15301,-30275,1829,-26002,-4719,-13001,16977,2881,22422,12946,-8092,-1446,-29317,-12297,9395,5702,-22524,-20697,16675,4012,-28145,-12820,32683,12232,-30358,-15003,-30912,-4717,791,-32266,-15438,-8999,15344,21611,-15913,5080,20144,8526,-30375,17661,14321,-30517,-20488,-32323,22754,13775,26074,-15062,1681,-18726,16628,-17832,20671,-12588,20751,-7361,-588,-27717,-26260,-801,16683,2766,-26972,-10304,-24900,-11615,18013,-24658,-17563,25057,13910,-3549,31889,16108,-31287,-2482,-27920,28001,-13885,8280,23145 diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv1.tflite b/tensorflow/lite/micro/integration_tests/seanet/conv/conv1.tflite new file mode 100644 index 0000000..8eab6db Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/conv/conv1.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv10.tflite b/tensorflow/lite/micro/integration_tests/seanet/conv/conv10.tflite new file mode 100644 index 0000000..924ddc3 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/conv/conv10.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv10_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/conv/conv10_golden_int16.csv new file mode 100644 index 0000000..2ffc244 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/conv/conv10_golden_int16.csv @@ -0,0 +1 @@ +-19445,32767,31935,3819,-32768,-2050,-32768,32767,-32768,32767,11444,32767,32767,-13176,24411,-31853,-32768,-31141,2154,32767,-32768,-32768,-32768,32767,-32768,4972,32767,-11562,19625,15631,32767,-32768,32767,32767,23746,-14439,-32768,32767,-32768,11925,-32768,-18875,32767,20065,32767,-32768,-20091,-32768,-32768,32767,22479,5484,10293,32767,-3850,-32768,32767,32767,32767,-32768,-32768,-32768,14916,32767,-10446,-32768,32767,-15062,32767,32767,-3165,32767,-30470,6578,-32768,32767,-7182,-32768,-32768,-32768,32767,32767,29398,-19388,32767,-22955,-32768,14119,-32768,15038,-32768,32767,-23784,-333,-32768,1797,-32768,-32768,32767,-32768,32767,32767,32767,32767,32767,9972,32767,2839,-19059,32767,32767,32767,32767,-24768,-32768,18020,32767,15651,-30647,32767,19076,-32768,32380,32767,32767,-9709,32767,-32768,-20904,32767,-32768,32767,32767,32767,-32768,32767,32767,-1326,32767,32767,32767,-32768,32767,-32768,-32768,32767,-32768,-18227,-32768,32767,-7920,-32768,32767,32767,32767,32767,-32768,-32768,15862,-32768,25008,19894,32767,26316,32767,32767,-32768,32767,-32768,32767,32767,-32768,-32768,-32768,-7129,5994,32767,-32768,32767,32767,32767,-8366,-13093,-32768,32767,-3023,-32768,32767,32767,-32768,32767,-26733,-15492,-32768,4036,-32768,32767,32767,32767,-32768,-2561,31379,32767,-10668,-32768,-32768,11020,27452,32767,28433,32767,-30076,32767,-19985,-32768,32767,19394,32767,-32768,4789,32767,25804,-32768,24168,32767,18047,13451,-32768,-12630,-32768,15778,1150,-17514,32767,32767,32767,-5357,32767,-23050,32767,-32768,-32768,-32768,-32768,18663,32767,32767,32767,7576,32767,32767,27951,-32768,-32768,32767,-8639,-2389,32767,-32768,2397,-32768,32767,-32768,32767,-32768,32767,32076,32767,-32768,32767,-7869,32767,-32768,32767,-32768,-20529,32767,-6533,32767,32767,-32768,-1453,32767,-32768,-32768,-32768,30767,32767,-32768,32767,2179,32767,32767,32767,23056,-32768,-10652,-32768,32767,21850,-32768,-32768,32767,32767,32767,32767,32767,32767,32767,32767,32767,32767,-32768,32767,-29952,-32768,-32768,32767,32767,32767 diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv10_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/conv/conv10_input0_int16.csv new file mode 100644 index 0000000..cf98bdf --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/conv/conv10_input0_int16.csv @@ -0,0 +1 @@ +-20370,-5718,32669,-18714,8781,-17911,23832,-32609,22242,-17651,-25106,23121,-27042,-4258,5327,14852,-29619,-7314,-10908,-12398,-10831,18917,27663,-7855,-31930,-29180,21154,-23224,-5424,7713,10382,22945,7340,3961,-18223,-4679,31406,-13296,-9113,-8388,16202,28056,17904,-9434,15512,7921,-31272,-283,23896,-3776,3242,-7965,5745,-13066,20009,-5405,24638,-10949,-29805,4702,32637,-25488,8700,-24547,-17882,5478,-31487,-1106,21740,-5673,-30923,20819,-15632,28553,31308,-704,5099,16710,21258,18335,-14054,29848,4587,7589,-14054,-6556,-18727,-2793,-9027,-5242,-637,-12267,-15339,-27699,4687,-15657,15447,5616,-5764,-5545,-2951,20406,-8120,-19478,9987,-7231,3762,-4777,-27075,28727,-32482,27706,32074,25702,7879,-4607,16772,2681,23637,-11650,4407,-24870,10930,19210,26814,17364,-21031,-5487,-32647,-26119,26245,18935,-16011,-21345,6250,-6798,28318,24218,-14754,20530,21567,5923,-14483,25201,183,7492,7449,-25156,6431,18243,-23656,3012,-28395,-7242,27290,-23479,25753,29173,23157,-22584,7447,8712,6030,9573,-9028,-8302,-28506,18707,25516,12023,11017,28157,215,-20900,-12051,-32507,-13519,20866,-11178,-22936,4191,-30116,29342,16432,-24696,5842,358,-20300,-30022,-20737,-11162,22614,-10824,11876,19729,-8799,-17663,-6707,-9039,-17322,541,-26149,-21143,16333,-28636,9815,13181,30804,-13603,-6199,7997,13973,16482,-20232,-5735,31941,-1755,16154,-22456,14857,-960,-7900,29351,18303,2653,32038,25187,25509,26841,-20387,-6517,21568,-32368,32096,-4982,-13114,-17113,21050,-24005,-23552,11960,2571,29030,20380,12950,7718,-31274,-3681,-28858,-21137,-20360,-3791,-19807,19905,-13426,-26379,-14032,-10261,-21735,19076,8444,17017,9550,32199,17408,-21984,-23167,12419,4738,15790,-13127,30280,29911,3953,-29292,-26262,-19693,26959,-25353,7648,-25890,-23455,-20026,-9784,16099,4618,-7279,23163,-23999,-19008,-19484,-7107,27215,19650,-5732,-25723,14337,-3458,27276,954,-9353,-8294,-10246,16746,17430,31466,23060,14381,27893,-29542,1232,31290,-24971,21023,-23848,-848,10122,-11048,26369,18239,-9514,-12561,22723,-10915,20817,13781,24023,-7551,-13600,-950,2074,24997,-8300,-24816,-8929,27078,-28686,22327,-25703,21135,-11083,28744,-30658,-2395,25372,-11871,26067,-27241,17797,22646,18251,-31725,18460,14913,28055,-1089,2608,-28568,-3310,-23825,-8411,27862,-7622,30643,-18292,11922,-14083,-23160,32407,23582,-15462,-8125,-21318,6666,-16603,17746,25376,-32203,-29491,22285,-25633,28331,32125,-24105,-27001,27045,2565,-13834,-9045,-27494,-9596,-27996,12054,20988,-10374,-11330,-30626,-15745,-1077,28719,-13202,27048,28540,-28440,1022,2580,13867,18759,32084,20065,28080,-7740,18993,-5197,-5719,28695,6052,26761,11477,24796,32634,11387,-880,8921,-17301,-2191,10024,4534,27779,-31126,-7089,30717,3388,-29450,-31880,16481,-29476,-21840,-6535,-2151,-11492,-5090,30580,28983,31928,-19995,-19108,29306,-16853,20918,-6903,25853,-8444,-6563,-30295,32435,-26937,14550,-4646,-18965,-29015,-19480,-3248,26144,-25183,26714,-23792,-12276,23817,17763,6914,-18089,-1635,2307,-4174,268,29707,-1349,-24665,9502,-24642,-18902,14828,-2992,21279,-21563,30099,-30738,-28821,14093,26461,32039,-805,-4998,-17127,-25072,-1283,18859,-26056,21091,-99,-18114,-29474,-32561,-27331,-31040,-11181,28243,31074,10221,-3583,31049,-21461,29079,17341,10288,-13785,-20942,8381,9352,19380,-30085,241,-1176,-30168,8404,-11636,15321,-2102,15399,-17147,-9728,-8592,-12250,30460,-2319,5796,23780,-8440,20366,22208,-21881,-24236,-2835,-2760,-18313,28291,11864,-7058,-11294,-22575,18718,-20278,23008,5039,-9685,-5750,-31135,9331,-26026,10063,-11862,-18325,17339,-6038,24472,30519,7568,5113,14367,-18868,17069,7032,1921,26825,-26682,-11984,15,20379,1250,-32474,20086,-31789,-31652,-20835,25887,-3982,3752,22636,-7967,-4161,-26395,29819,-8645,30873,6547,-25497,16420,1900,7683,-21769,15933,27723,12650,23351,-18206,13048,496,-25425,-19899,-19865,-9518,25381,31626,14665,1763,-27885,-2870,-17535,24098,-30029,-18719,25256,-26638,19894,-24927,-4680,6293,8469,-28267,-28622,-2209,15143,9241,25002,-11357,3412,27445,-15478,-29956,25340,23855,205,19062,-19408,14814,-26657,-27589,-3664,-21369,149,-6000,-4679,-1727,-14998,8083,-16897,5724,-10948,-2969,27344,3889,-10071,-4634,-15943,-8224,20153,-31643,32707,26376,-25445,-16041,14811,-12073,-11377,-13490,-661,-17652,-15732,21771,15999,21038,-23039,15962,-30488,13109,-16136,13444,22565,16993,29490,-18343,-25126,-25764,-30248,-23135,-30889,-32691,-28423,-21446,20778,-32650,19793,3551,-11176,15329,-16745,-24720,21949,30555,-11778,-1715,6620,-6412,-8543,-3932,6840,-20309,-10455,-31708,-3305,-29711,18541,17036,2190,9194,-26123,-29328,4831,7005,19235,-13694,-10218,10998,-25416,-22507,62,-15151,-21261,-23119,21394,16716,9290,4555,-1259,-9956,-31479,-15708,3833,-12179,-19271,26184,-22856,-32578,-9711,-23414,30608,-30808,1824,21053,-27452,-16537,29885,-12023,7315,-29607,7060,-6394,-8871,-25968,-26358,-9024,-5133,-29727,11492,804,-28559,21465,-26473,21726,4120,-847,-32302,28691,5031,14998,12123,-10962,13563,-4018,-30144,17855,-19209,-9147,29103,-5242,30437,-7828,24185,4771,-29590,28914,8918,-10070,-26697,27052,-2235,-11769,29193,12006,-899,-15053,4765,27362,4383,29689,-8603,19849,-6523,-12003,-30322,27634,13259,6533,29182,-20260,-31833,27636,-32202,-6991,30580,-32298,-1414,8509,13779,-3094,7184,3936,-32003,-24531,29750,-7595,-7280,-342,19054,10576,17727,-22204,8641,8499,8025,27883,15098,-2212,9006,16077,-18041,30012,-8808,-13909,-30724,15295,-27569,8049,-16579,-25592,12907,827,-8826,-307,-17381,19755,26050,12235,17948,8463,9649,-8357,19567,-29865,16791,8519,12858,-19538,22377,-21566,-27073,-10680,197,-12997,27083,22708,-25097,16255,14382,-3363,1588,-14849,11543,-24145,12979,6411,16263,25620,-18026,8196,31649,-12387,-7925,-13758,-7213,17560,-24924,19167,21569,-20370,-5526,-18313,-3752,-21112,23941,12220,11566,26162,24514,-25969,-4671,28619,-21582,16988,915,-2069,-31908,7014,16953,5625,-13309,23750,-4541,6816,31613,17820,-28069,-23836,-14548,1615,-24875,-17675,-18998,-13568,-11156,-4461,2007,-23685,10979,-30465,-29365,-13748,-24161,-4865,-27430,22376,-5717,-25213,-804,11750,19870,-3851,-30968,-24117,-31734,31181,32154,30158,-7149,3596,-12173,5892,9916,-31851,-15906,30803,21650,-24869,32243,-31685,-18823,-26088,22009,-32227,-5311,-22389,29367,-5148,-3867,-384,8232,24828,30451,8286,-22046,17246,-31225,-29814,25031,-13296,26404,18103,-5145,-18627,-16240,11966,-24726,-27838,-1969,-7257,19164,10536,17894,-26935,32112,18042,-19973,-28929,-14441,13034,-18768,-1128,15177,-11513,11644,4642,-13847,10470,13187,-562,1206,-17224,-21300,-23733,-21255,-18835,25818,28526,-21619,-1895,-25999,19924,10660,30806,-11718,-10247,-20503,-24523,6289,21452,11332,-14578,31306,16111,-31163,8987,24455,-2566,12792,29240,12558,-24359,-7976,-22300,-29578,7984,-5121,13154,20119,2094,-21550,-25266,-27585,26185,-27416,10855,18525,-5321,20606,4690,21569,-19819,-3825,28068,-16280,-9192,7173,-24129,-16502,17426,-6907,-18334,-20635,25312,25139,6359,-5428,-8221,26336,9860,19302,-16085,-29023,-19605,13902,11161,-17310,3954,19985,-28647,-3145,3733,31315,20533,18563,-3994,-20396,24490,8797,3831,-22373,-16402,-13294,16125,-20646,16218,2659,19349,23753,-4981,2559,-24462,-30787,-69,-2427,24082,-18524,-10929,-28594,-30859,-26472,-29271,5633,28135,-26529,-31692,-3084,26400,4481,-5107,25741,-1876,-10482,11386,-1600,-26891,-23536,24150,-10574,-20648,-24506,19264,-15313,-3455,-22387,8688,28072,30913,1522,-30081,6491,-8794,-3300,-25509,-10350,28491,1080,-22072,10625,-18678,-20519,-22467,4639,267,28841,3947,-7361,24726,9463,-7291,-23279,24829,12446,-3913,-26702,-10461,11132,-10737,19084,-3175,-29191,6905,-12828,12063,-11050,-23538,19021,11858,28789,-26132,29621,17120,-13074,-6420,31093,16383,-5413,-13123,-16880,8087,-3907,13138,19397,13833,-17982,21965,-18561,31395,5620,31855,12956,9320,-7624,-23805,16349,11246,-18614,-4495,-22528,20176,21708,22916,-12319,-30765,31103,-27460,-7858,-7421,27601,-162,-9040,1476,-6862,-18007,22422,14065,16722,23716,11275,-9011,-2739,-1488,-7293,-32195,-13988,22498,1915,17440,-29553,-8093,-10150,27662,-143,-31226,28116,-23995,-25485,-4877,6159,2922,-17259,5848,-13266,22090,-26858,12595,-6727,-16934,-18120,346,114,-13817,-27502,18717,30882,-19914,-28173,31514,21552,6983,-26974,729,22829,-23790,7245,32463,-18332,-29810,27693,6346,17329,-14520,-28930,-4603,-1172,-21447,-15420,29362,28152,-21891,23304,2202,25171,2578,-12340,-32370,-21010,-12693,28014,18969,-10399,29647,-9543,-1203,24749,-1033,-12068,18734,29685,-7240,8398,-31338,-1889,-12711,-28252,-26791,29456,-16294,31842,4873,-7675,8984,-4088,24027,5618,-4485,-31496,-9059,16741,-14094,3505,32685,-6753,25298,26563,-22432,-14115,-31422,-1553,13593,-6707,7245,13859,-20855,7073,-10496,30270,-7368,265,19238,3380,-18902,-24576,5119,-756,12597,30136,25222,-27136,-6410,-10927,23838,-17205,2594,5466,-11303,-19606,-11922,8044,14265,9117,19578,-2571,-8694,860,24964,-12770,-24551,-17655,-17374,-744,-13177,-15555,-31518,1804,-30015,-9295,-884,6855,10309,14257,-18192,21155,-26929,-6892,-32030,15507,12989,6181,-26443,25362,30524,-26749,9628,31290,-4378,15302,22816,-20111,-15615,-29691,-1287,29646,25882,-4397,6513,22661,-7338,28062,21491,-24464,-19901,1787,-25509,-12576,-4875,-4853,25160,-30458,-28075,24036,-31409,-21411,4155,24987,-1506,-19321,25689,-21490,3101,12495,27452,11327,29951,-11576,-23027,-26018,12335,23220,10356,-13984,21906,27572,6416,-9492,11721,-23134,-11851,11069,13383,2379,-8090,-26888,2762,24171,-2600,29032,13782,-31109,1421,-17567,-6666,2279,-15281,28486,-10732,30670,28571,28746,-19247,15049,8598,-19831,21224,-23020,30728,2033,26305,-1705,-17493,11406,-12501,8443,19191,-12529,-9223,23877,28349,11299,29454,14041,29396,-22322,-8117,-20082,3664,-30024,23622,20542,16032,-129,-3757,-7331,22543,29504,-6901,-29990,-8700,-19696,-31269,26483,-24954,-12059,22901,11207,-13244,-15839,20498,7314,-16816,-18233,16153,8605,29735,19,7244,15501,25479,8008,-2385,-19542,-26989,-15042,9982,7374,-27837,21726,-24911,-20727,1860,7847,-4406,-24241,9916,-18178,-353,-24245,17962,-12442,-12627,-22441,-19400,29442,17244,12156,-15158,32643,-2631,26232,-21767,12655,-8120,-8347,-16845,10409,-12691,-28689,-9277,-9558,6268,-19957,26122,10146,6732,28447,-12576,26235,-26230,-24873,-9822,5708,30622,18956,2739,-16572,-1529,-29193,9990,23111,-17630,9594,13721,-3480,23129,-1973,25695,-27560,26774,23880,-23989,-11762,19547,10775,18570,19167,15124,3036,12911,-27404,-10617,-13620,-20630,-26189,19421,-27671,14532,26722,25608,-26136,23344,29874,19521,3527,-19378,16823,-21598,3770,30993,-17485,10995,-7399,-16910,9996,-19519,-12307,-25099,17066,28501,-20913,-16671,-24766,-9886,-16537,-3758,13224,-10979,30726,20315,-14186,-3735,6491,-10152,-4879,-27115,6160,-10533,12883,-22567,-11846,22249,-6707,9899,21744,18024,20215,24979,20803,-16276,30295,-4751,-14482,-17926,9540,20076,20151,-20961,-10162,-14236,397,-21438,9607,-8264,-9156,7007,-17173,-464,18424,-17962,3971,-25748,-31458,30792,-24224,12665,23228,-17568,-24829,-13833,-5622,27943,27872,-962,5245,-14186,-11896,-2863,29445,20431,24122,-12989,-15218,12399,-16107,10573,23191,-31416,11522,-11269,-12756,26195,3074,-5567,-5147,-12901,21600,18743,10799,-2121,-30695,-14769,27042,-9210,23009,8168,-14280,-11560,-17362,28922,8728,-9940,22756,17282,15166,-9821,-2701,-536,-3919,19524,20515,20195,24999,25803,30479,20276,25497,29349,27149,17975,-13333,23979,-17993,31913,11793,7144,-25030,-5889,17318,-18976,-29447,1963,8518,8362,-28962,4908,7866,-9744,-6822,25990,3896,-2613,11737,4610,-1336,-2034,-7344,-20823,-17603,-18330,15094,-11310,29847,-5706,-24621,-32090,9585,-29981,19597,-13775,-14160,5946,-18622,5020,-2738,31471,-7952,-22906,-3021,-30517,27120,25129,30156,-31628,-8540,-26320,-7782,1157,10934,19298,3832,-12793,14291,-13767,32030,27633,-7888,27455,-7294,-12032,-19763,26407,4544,-21632,1398,-31002,-17574,9177,-12472,-24751,-24238,-23407,-18088,-17921,-23868,4789,-23534,-12905,11909,-19163,3681,-29044,-30495,-15111,-18353,13326,-10588,-31162,17395,-2754,-31167,-9386,8078,11366,6689,-9556,-23698,-11333,24267,-26884,28199,14286,-25772,12687,-397,-22662,-3606,-18710,10892,6410,29643,-12315,-7907,-11303,-15435,-18721,17875,1824,-29918,14108,9373,-25822,8203,1840,-11597,-4522,-28817,-22039,19505,-26456,31429,-18268,-18420,-19869,-6357,-429,18113,-8661,15883,19895,30369,12288,-29285,31811,-10983,-22073,24535,10011,3938,12377,31126,278,20533,9604,13742,-27428,-17207,22617,-18629,20594,-32097,-10479,-12104,-8507,12366,3263,-21989,-13052,18000,-23192,4717,-3508,-32026,-15468,11265,5840,-26582,5275,13544,20150,-18678,-26973,3806,-24145,28889,-6382,26728,-4787,-32438,20135,-2430,-19876,-19853,-13633,30041,10754,-8134,-18007,29036,23516,-18102,4524,-6601,-4548,2135,-5771,-2245,-16976,28945,11581,-27238,-7154,19238,-15769,-10404,31569,27311,-8283,24534,31832,-23239,-28567,-23862,-26691,29742,-13801,-8096,-12179,7040,-3110,-18421,3279,19747,-27647,32132,-12806,-13375,-18983,31268,12649,-13003,7080,-19556,-20826,-13452,-16732,-18206,-834,3655,-22604,20703,23828,12789,25193,-21195,6769,-22544,-29834,20130,-24840,22649,25193,30313,-18751,-12025,-23890,-12251,-14258,-30868,-10874,9255,29798,30906,2166,-17708,-22978,-23428,-26926,-15447,25984,-24793,-17710,30218,18971,22139,26076,13976,-11718,-13386,21179,-31894,28711,-6443,18286,-25884,-12348,-9593,10553,32100,-10039,169,17583,-26550,27403,22244,-13469,-26875,-14047,-11677,18706,17823,-16048,30765,-7194,10401,7997,-3638,23693,-30985,17235,29503,19112,21981,27460,-10758,16705,-20486,5783,899,-4267,-32376,9063,-17858,8026,26989,-21340,31753,31039,-47,6463,-26172,28825,11509,3496,-22797,5583,-22128,-2710,-650,-8290,-31198,-11882,13213,-14910,-14694,-4057,20327,-13793,-24745,-18107,-10938,1973,-4731,-26692,-19995,30415,-17213,-19675,-25334,-9246,23162,-15483,-12169,13561,-6178,-18181,-10725,-25851,-1050,20116,7049,-3168,32287,4628,-28957,13495,30488,17217,-26887,30737,-24571,-19837,-1020,25836,253,32239,25890,27058,-30558,-9488,-29920,-20637,31980,17556,29803,15035,-18004,30460,24171,6114,18463,-4002,3348,1003,-2032,-12028,20690,-8165,19014,18313,20827,27563,-22896,28384,19101,-4699,15764,-12855,-28517,-31427,27206,-29229,24265,10219,19530,21391,4370,155,-19077,-5385,-14226,12587,2420,-21841,19085,-30621,-7164,14348,-18686,32174,-32726,4401,-1984,28801,-13688,27586,-17917,27587,27321,-11976,-25224,2537,-3965,-11000,29479,4628,-19887,-28444,-11884,-11700,-25301,23646,-23683,-21369,-5347,-424,-26807,11930,-535,23623,-8466,12251,-17638,24548,-12188,-30074,-6254,27160,1904,-25796 diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv11.tflite b/tensorflow/lite/micro/integration_tests/seanet/conv/conv11.tflite new file mode 100644 index 0000000..ec81786 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/conv/conv11.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv11_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/conv/conv11_golden_int16.csv new file mode 100644 index 0000000..9c5bbcf --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/conv/conv11_golden_int16.csv @@ -0,0 +1 @@ +32767,-22186,-32768,32767,32767,-32768,32767,-32768,32767,32767,32767,32767,-18076,8062,32767,32767,6702,32767,32767,32767,-32768,-32768,32767,32767,32767,32028,15724,-32768,32767,32767,-15535,32767,32767,-6284,32767,2284,32767,-32768,32767,32767,32767,-14426,32767,32767,16936,32767,16795,1,-32768,32767,-32768,32767,32767,32767,-32768,32767,-32768,32767,-32768,2273,-32768,32767,-32768,8114,-11799,32767,32767,32767,32767,-32768,32767,-25637,32767,12383,32767,-32768,32767,-32768,32767,32767,21481,-8325,20261,-16859,32767,-32768,32363,32767,32767,32767,32767,32767,32767,-4190,32767,-32768,-32768,12071,-32768,-32768,-32768,-17074,32767,32767,-19670,-32768,32767,32767,-32768,32767,32767,32767,-22186,15077,32767,-32768,-32768,10413,-32768,32767,-32768,32767,8578,-32768,-32768,32767,32767,32767,32767,-32768,-27792,32767,32767,32767,1309,-32768,-32768,-19643,-32768,-32768,32767,-32768,-32768,-32768,-32768,-32768,-32768,32767,-32768,-32768,32767,-21163,-32768,-32768,32767,12053,-32768,-32768,-32768,-32768 diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv11_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/conv/conv11_input0_int16.csv new file mode 100644 index 0000000..ad225c7 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/conv/conv11_input0_int16.csv @@ -0,0 +1 @@ +-27620,-1294,-21561,-965,-14470,31191,-20345,-22384,21546,-3718,-9160,18125,28434,-18300,23415,-20105,22523,1953,31519,11252,2151,-20424,-29697,-22478,-30610,-4689,29709,14177,-15018,-11345,18213,30159,-18136,-29172,25962,-346,-24029,20237,5195,-8556,9797,-5291,1422,-15840,-4640,-12574,-27175,-4199,-7490,-26441,27587,-31697,-19374,-19736,5793,17551,-1761,-19187,-28115,18618,-22538,24129,6969,-17917,-868,-28295,10654,6500,26032,-19566,-5565,28466,11469,-31466,-28180,-20364,19919,-29628,3798,-18767,-4040,13099,14318,-28150,22682,3319,12123,24433,15392,-28164,1030,-20804,-2973,19555,-20010,25012,17624,-10697,-24705,19534,31649,-15480,25732,32597,-2067,16925,-11258,-15108,-3929,-29678,1648,-23873,-383,-11758,-6655,18554,-6750,6146,-24748,17526,-5025,-23245,11284,-23475,-2405,22935,32490,-18129,9361,-26805,-17321,28843,-14081,-13775,-24085,-12986,-25110,-31570,-5415,4589,-24456,-13679,-20288,10467,-32533,5884,-6714,-15589,28893,9130,10950,-8811,12066,19592,12783,26750,8761,13487,25995,27219,-30260,25050,-26266,-11037,22830,-26667,-8136,-6705,-15995,-15671,-915,-12734,-16069,-14859,-28942,-26585,-26516,-17005,11562,30369,-6246,-12670,-16724,-24359,-11985,-25927,10729,28029,13708,11460,-26145,27292,-21410,-11995,21398,-10331,-12069,22728,11792,14634,-9551,9100,-21903,-22933,24361,6409,14034,-530,25138,-10215,18085,-14407,-4243,11121,-2863,2031,27045,26658,13207,8813,-27423,19155,-3774,28403,-26124,-21055,7887,13555,24757,-24569,24309,-7853,-30024,-27834,4743,15480,8726,13848,28894,-9999,12094,-23695,27372,-9491,-8069,-21304,31704,-21321,1351,1058,2346,-4573,20645,-28919,17228,22420,20438,-19077,-8092,25088,-3775,3833,-13114,-25467,-16767,30715,15916,-9380,-26702,20290,-11943,1789,28375,22201,-9461,-19478,-1000,-21280,4934,-8029,-21268,-26652,21134,-16874,19948,25546,30718,-22786,-7441,-702,-23653,9510,27180,17962,22736,-11476,24015,-28196,-32609,-30515,18838,664,15104,29345,29155,-24390,876,-15235,21552,-6027,8259,-25002,-2368,31216,-4574,-6260,-18714,22654,30439,25196,8602,24270,-2039,-9729,-27750,12583,11019,32544,5606,1176,-9900,3744,-22560,1775,26893,1384,-24996,9773,-12671,-12375,-9654,21958,20751,17001,29083,-23413,19665,27407,1329,27869,-16769,22,-28454,-23580,14159,29595,10376,-29958,-27310,17954,30172,11074,3683,32397,18637,-4394,-23206,-27165,21617,-13112,-10500,8376,6921,27954,-25763,-22227,2592,30220,17648,-24806,30621,-6930,-28157,-19523,24876,14184,23649,-2236,7146,-5334,21715,32198,558,23030,-1531,18779,-1030,25568,16240,29571,31649,-26200,-23896,7719,19848,-13916,-15954,-32285,-29304,6683,26013,-561,30412,-14967,26466,-31181,10177,-28267,22667,25321,-1384,7819,-25521,32628,-4578,10492,30835,25533,-25697,6617,-32566,12650,14265,16325,25506,-32027,-24409,-8558,12205,-14056,17193,168,-8489,-15482,18589,3592,22052,-19154,9371,23655,-20950,-8649,-17815,-29445,-18119,-23790,6141,-32724,19924,-30958,-32364,-11336,-26467,-20306,-16224,-9208,-27242,-30484,16054,18910,18962,18348,-20841,-16586,13497,14769,2278,12466,-12854,-29722,-10875,1273,5806,-164,4435,30278,-28698,15387,-26395,28111,26808,-26145,12665,-13956,23997,-4863,31706,27803,20678,31994,10146,23303,18724,-15275,-20641,-30160,24047,24886,2154,-14972,10484,24457,17922,803,25114,9515,-13523,-25452,26425,-27151,15855,-28511,-26002,27348,25393,-25058,-2454,30493,-22832,-3156,21350,1824,9250,-10251,-2057,19606,22973,-32008,-1446,18784,-2253,-18083,24358,10106,-15677,5489,-5919,29891,21056,7472,-3772,-16234,14512,-26858,20303,-8151,21406,28500,23022,-18139,-14758,16110,-17986,30265,-29560,-29367,4861,4283,379,-2848,10820,22233,32220,-14116,-6827,-139,16641,-16891,-1343,14819,-9083,20334,15487,30574,-17828,-16001,27690,8400,-29339,-1944,-6964,-6820,-29525,21011,6570,-23465,-28685,22178,13012,7947,9373,-11010,-12406,8289,2427,-28255,-21766,29828,-28764,-17386,17085,25365,24105,-15003,-17366,29848,18827,12875,-25618,-3016,-21417,8879,-17386,24706,-31793,24592,-2005,16511,-5790,1616,-15062,15854,-5842,19566,-30186,27190,11695,-32436,6560,13881,13690,-4865,25246,-26395,886,21107,-4887,20190,-8256,-26317,4243,-23525,-8292,319,6131,-28399,-7828,-31186,4347,-23454,-22321,-22014,-6778,31914,-26817,-26789,-21688,15327,17522,5524,-9337,13951,-22950,-32333,-17605,25507,-9014,-26013,-2951,27118,-8340,7680,-8949,-12527,-19579,14447,-29855,-24446,13699,8374,28547,23807,2815,18473,-12849,24411,-10547,-11586,20561,-22456,-23738,24130,1800,-107,24674,-26146,-19368,17292,-20008,8928,9076,-24365,-176,8877,-32052,14445,-9510,8391,11082,-21579,-28823,9305,19674,18650,1153,5275,-16342,972,31737,-20209,-23024,-25271,-7727,10954,-7812,-31002,6724,7212,-17254,568,-15291,30387,-18112,18617,12095,8375,10011,-8238,-15102,-17315,8471,4202,6948,27435,-7752,-17224,-7479,-9054,-30586,26323,8774,-32260,-7595,-19639,10973,-16549,-5046,8298,-10717,-15301,-25858,25225,-9644,-3242,-24184,17899,-32118,28148,-15326,16434,-22403,27434,17554,6910,6808,15506,10595,-15046,9764,15239,22292,-7707,-27110,-20892,16945,22827,-14552,-13823,-11882,-14022,22921,28605,-22237,28387,-14948,-18108,4107,27473,19269,18733,-24297,-16062,-18021,24797,-2325,24381,5321,21258,-29815,-24413,7933,24774,-22510,-16867,-17859,-15456,23709,170,20677,19971,23146,-28719,26658,-8037,5687,-19206,-21721,-29015,-30203,-141,10852,16842,17433,-28788,10313,-31018,1377,-3616,30877,26674,-31812,6636,6661,23665,17097,-16671,-24616,-18355,32123,-31199,21205,21360,-32616,-30984,9965,12177,23066,-11888,30750,9776,-18643,19488,-3177,-16246,-11517,-28628,12546,3791,-3895,5933,8220,-18867,25711,31024,-10741,27992,-30667,-26359,-10101,-27799,14669,32580,22939,9586,1530,12815,18285,25611,1670,1366,-16436,-13217,8821,28633,-7426,-30039,17337,-8647,17088,-16520,-20473,15057,-7063,25433,-4483,-12976,-25431,2285,446,-10398,-2831,-24684,-25466,-19315,20516,-19614,4888,-2096,-8398,26044,2791,7901,-16367,-26697,3633,-2940,22907,21098,-355,-3820,-21407,-2755,2826,30469,-7003,-20628,-14569,9418,11845,-7556,-27137,28571,14960,4007,-723,-2006,5447,11929,-1265,-28672,26162,-653,13158,3611,2255,26879,-18934,-25067,-6466,-18489,-15001,-23671,1436,-2353,26246,-891,28452,24248,23484,-15802,13833,-26426,2867,24534,-18575,-17737,-9914,9236,11644,5628,-7008,3164,-17399,-16538,-238,5635,-12403,-20402,-10774,-29507,-1132,-6740,9179,-11834,10397,24451,-6878,-5809,7045,5113,-19252,-15282,15600,-15865,-1018,20901,-27718,10698,8187,26167,31382,-22961,-12558,11661,-3080,-15340,32436,-16128,-27394,-21611,-13584,29009,-28269,5946,1907,-16125,-31239,-2956,-25769,7532,23769,14597,-3952,23952,-12194,-29696,31976,3802,-26470,23982,4266,-1592,-29029,20135,-22544,23013,-17468,17785,22876,16651,-23029,-20961,-31540,2824,-20976,-7222,-22244,-23550,-15905,-26906,5615,28193,2119,22094,12506,-24610,-30258,8651,-28049,-887,5262,-25665,31146,-31028,6002,4643,-5405,-27172,16203,12726,32085,16943,-3880,20823,-6423,-30833,16396,-4530,-6607,-25993,13950,-23679,-5388,-6538,-15447,31221,5207,-26438,-17293,19182,-25294,-31660,-23062,11239,-20006,-16514,21755,29448,26311,-18616,12553,7287,-6143,-14770,18705,-13081,-31330,-8784,-20308,9034,-16221,-29405,-19711,21151,-6835,-23296,6381,25198,-10149,-20445,-26048,12479,-24243,15390,14170,20039,32443,-20396,-26978,3936,-18455,-1531,-23022,-8963,-26748,-22667,-21257,-24168,-28932,-21487,-30811,10709,-29285,18634,2036,-10508,28829,-8019,-12772,-7212,-6639,-9653,-3006,-28097,6280,10390,6460,-21498,21193,22505,-26295,25740,30219,26671,-29058,-24963,29453,-32291,23011,14847,3113,-16146,12856,28376,-8790,-8796,12883,-30531,-28298,-30419,-11526,-29572,-3504,29208,8626,-7377,-28127,25074,25116,23272,8580,5184,-3696,-23223,-24477,22176,4921,2815,27472,13295,8167,-18753,6631,-23842,-5340,-22290,-8416,-30978,-8645,-5448,19060,-3777,10258,-17653,-2546,8752,28654,6168,3149,7426,13311,20416,-26682,1795,21757,-9666,24340,27192,-11272,13982,-7095,26558,15523,32399,15729,6327,22563,29431,15202,26365,18144,-5747,26767,-5114,-25784,27128,3144,2918,-32273,-3128,4918,-17701,15240,-31220,-14277,-21084,24255,-14843,25181,-24237,-14928,-30788,32246,27690,-25163,9301,26603,25528,15594,13187,-18190,27982,7356,3816,-29436,1645,-20456,18969,-12288,-23801,19115,22248,-15209,9377,25394,3574,31872,-10865,-4106,4204,-4192,7948,25188,-23856,-19956,921,-8825,22168,7917,5248,9509,26508,-14524,-14910,6047,-17892,21618,5369,-1764,14984,-29767,13423,14198,16350,-31233,-11429,-30590,-24077,2697,25253,11179,25122,-26080,-17592 diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv12.tflite b/tensorflow/lite/micro/integration_tests/seanet/conv/conv12.tflite new file mode 100644 index 0000000..94ff163 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/conv/conv12.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv12_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/conv/conv12_golden_int16.csv new file mode 100644 index 0000000..49ce83f --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/conv/conv12_golden_int16.csv @@ -0,0 +1 @@ +-20987,-19540,23122,-32768,-6151,3709,10079,-32768,32767,-20569,-32336,32767,-12681,32767,-32768,-25495,-32768,-32768,32767,705,-32768,-32768,-32768,-32768,11040,-16713,17941,-24529,-24467,2548,-20745,-32768,-32768,24730,-32768,32767,32767,-32768,31151,23796,17828,-9238,32767,32767,-32768,-32768,-24075,-4379,-32768,-32768,-28507,-21594,-32768,-32768,-32768,561,32767,-32768,14573,-23477,32767,-32768,32767,-32768,22494,32767,-4147,-14804,-32768,19417,-32768,32767,32767,31854,14261,32767,32767,30890,-32768,-32768,32767,32767,4527,-19624,-32768,-32768,32767,10375,8220,-4941,-32768,32767,-9876,-29703,14254,-32768,32767,8390,32767,-16828,32767,32767,-21773,32767,32767,-2965,32767,32767,19201,-32768,-28360,32767,-32768,-19575,32767,7086,32767,32767,32767,976,32767,-32768,-12242,-15093,32767,32767,-32768,32767,-32768,-10905,-32768,-7416,32767,32767,-32768,-18476,-13539,3301,6004,-18360,-32768,32767,-32768,-25174,32767,-32768,32767,32767,32767,-32768,-32768,-32768,12045,-12512,-32768,-17452,-23670,-24229,32767,32767,-32768,23459,-32768,588,-32768,523,-32768,-13446,32767,-19967,30976,23500,-4131,-20600,6794,14639,-13456,32767,32767,-11609,4084,-32768,-8745,-11715,-32768,-22205,-13942,32767,7745,1273,29149,-32768,12964,32767,-1097,-11778,32767,32767,-16095,14066,-24387,28932,5761,32767,-32768,16956,32767,-32768,-32768,-7039,30350,-28415,32767,32767,32767,32767,32767,32767,-148,-12962,32767,24210,23358,32767,-32768,32767,860,30249,-32768,32767,32767,32767,32767,31935,32767,17707,32767,-32768,25529,-32768,-32768,32767,-27227,32767,27575,8122,745,17208,32767,-22582,32767,-32768,32767,-32768,32767,24690,11197,32767,-32768,-32768,-30753,-32768,32767,21592,-2328,-32768,-15575,23754,8995,-32768,-32768,-32768,-32768,32767,11454,-5233,32767,-32768,-7958,-32768,1723,-29991,-32768,-30393,-3811,-32768,32767,32767,-32768,-13473,32767,-26275,-19476,-32768,32767,5542,25553,-32768,32767,12391,-32768,-32768,15388,-3714,-32768,-4424,-32768,-32768,32767,-11668,-12562,-12008,3951,12241,32767,-30840,-4651,-13792,32767,-32768 diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv12_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/conv/conv12_input0_int16.csv new file mode 100644 index 0000000..2e223da --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/conv/conv12_input0_int16.csv @@ -0,0 +1 @@ +2704,-29464,26963,-3116,17181,6932,-22003,15709,30790,-15068,-7071,26286,2064,-9770,-9314,15907,30088,14785,-9275,-21746,24935,-18457,-11559,-19045,22123,-8185,-3405,-28068,-10899,17517,23239,-22839,-14362,31186,-182,18748,3536,23431,15683,-15906,18941,30934,-25972,6493,29202,-27428,-9567,5774,13146,11273,-8780,13575,17239,23314,26770,-3435,8873,244,18446,8786,-7891,-16182,31081,-11594,17071,18588,-5561,14381,-6065,-17998,-23773,-934,23613,6541,29162,16323,-7900,-2689,12904,-13682,-758,-19426,7204,19034,-371,25501,-24577,13268,-32097,13733,24654,-13917,29533,-30564,14219,24178,-15942,26940,-15629,21047,-27565,-25307,-24314,32252,-11908,-8631,17731,-30048,-861,23496,4226,-27235,17919,29157,9354,-32616,-14960,-18697,16243,633,8271,-23461,-31131,-19973,-23310,1196,8142,-18157,20227,25621,-18963,-309,-14462,-3797,750,-22342,22836,13618,-21326,-14969,16269,-29855,9752,25810,3248,-27515,9960,14981,31260,3242,-6872,1146,-3235,-32570,-23110,20346,1802,11520,31379,-12133,2768,-14869,-14774,-5575,-30687,17227,26547,-15015,14939,-4125,31244,-24095,58,7967,6706,29872,3352,-16428,26169,-28135,26010,-30196,-24806,-26316,5287,14143,19369,-32436,4886,-28213,18052,8169,31576,19105,12820,-26891,-22768,-22236,-13823,-19767,-2794,-15752,7472,20914,-23487,30552,-25995,25623,3098,-6851,3840,10759,-10334,-30080,6700,28605,27210,-12724,25093,5858,-13938,15965,-1845,-11728,12777,5470,32545,-23070,9232,-10608,-27630,29884,-2016,-30927,-9962,21324,7535,-21252,22427,-30919,179,-11083,891,-21723,-21149,1718,9385,1578,7502,23120,20032,6834,23277,-14004,-14321,30064,6800,17292,-1887,-10766,18129,-3977,-31662,-4121,-2275,-11340,30160,18128,-14195,-32073,-22587,-6703,-19524,-24592,-32270,7747,-32416,4982,-13028,-23662,-7084,25395,27344,-14820,-29113,-21901,-30699,-11670,28088,-26289,-31594,12721,-24132,5777,-4396,-22670,9040,-10711,-2204,21184,16856,13612,-12846,31937,-2122,-29776,-9155,-17694,23192,-3271,-11038,-28379,-10752,-23133,21480,5693,-23794,-9291,3542,21420,-15862,-5352,-14713,-23483,-8165,-32154,11574,-9343,18822,-1877,-7456,22641,-11007,3394,5680,11310,25000,10728,28402,-28226,-31970,-26266,-13262,15272,15164,-5709,29049,9083,101,31870,-22074,-32221,-25145,11884,23058,4363,8995,-29668,4413,-20158,18594,2326,27322,32287,-15828,12170,-11643,12,-30065,-459,6745,-15384,-28051,-11175,-16078,4263,3835,13350,6788,16598,-25652,-7823,16699,-7277,15119,-10333,-4786,-14493,4974,-28295,14143,-25426,-21090,-18666,22847,789,-27086,23589,5386,-19236,23260,-24798,18239,-3068,-16463,-26370,1090,-14608,9835,18123,30338,5976,-30888,22165,27181,29496,3340,-32709,5578,13739,7542,-10787,15213,-22692,28814,19968,31106,-4923,3642,-20318,10456,-26868,32457,13708,25523,2868,-19938,-31799,14262,25601,-22976,4493,-4757,-16029,-3571,4450,-30446,10921,7800,-24857,3397,6019,3532,-28132,12839,-7716,6992,1398,-11866,5963,5742,17566,7685,-4375,30015,7094,11047,-12901,5009,8378,-13883,26196,-27940,6059,31263,26323,-29611,-4196,-26647,-1265,-21358,10126,-18470,17933,17822,31975,-22958,32032,30446,-23370,23031,-9498,25656,19462,-11172,-9972,-24122,-18644,16668,-173,24306,15342,12898,-5714,8298,-24741,-13579,-2884,5942,28851,2513,-29353,27505,32377,26678,2630,4030,4083,-5821,-17379,592,8342,-16601,19663,-32613,19670,-5522,-24208,-16038,22891,5536,5543,-398,-21920,7952,26681,3028,26541,13937,-17741,30839,-7255,-23006,27646,26107,25631,-7408,17533,20106,21194,6130,1798,-18187,-14046,-25330,16851,2988,-14535,-9868,22695,32652,12635,926,29063,3051,-5789,12068,-13441,7865,269,1630,-17977,20473,26949,-29485,27719,20865,-17714,7266,75,9412,-13506,18623,28478,-11688,-4185,-1583,-3972,-18474,-4356,22019,-27470,-30468,1248,10527,-32539,23792,-16538,10528,-7166,-26698,-8868,23554,27518,-11712,-10204,-27323,16780,-22833,30936,31140,-31366,-10429,29016,-3419,-1847,17143,-2464,-7332,15695,-6906,-21566,-27398,-4454,-1489,-4720,-31608,27035,-3964,7165,28091,13576,21455,4194,-8916,18484,24932,-20127,12315,6226,-30007,-576,2996,13495,21338,28009,-8002,14622,-19771,-14774,-5572,-13926,-10458,18765,-26978,-17245,6965,649,-15031,31161,-22551,-26843,26612,-16393,-30412,-5928,24048,27357,-11838,-12925,32663,-6329 diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv13.tflite b/tensorflow/lite/micro/integration_tests/seanet/conv/conv13.tflite new file mode 100644 index 0000000..1130076 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/conv/conv13.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv13_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/conv/conv13_golden_int16.csv new file mode 100644 index 0000000..b71dd21 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/conv/conv13_golden_int16.csv @@ -0,0 +1 @@ +8347,32767,10599,32767,-32581,-6619,-32768,-4639,17380,-32768,-32768,31875,32767,-32768,14132,-27577,32767,-32768,-29655,-32768,-6301,32767,-32768,-32768,-13014,32767,32767,25719,32767,21789,-32768,-32768,-24094,32767,-32768,4511,-32768,-32768,32767,7810,-24095,-32768,32767,-32768,-32768,32767,32767,-32768,-32768,-32768,31345,7129,-32768,-32768,-32768,-32768,-32768,23056,-32768,-32768,32767,-32768,-32768,-32768,32767,32767,10824,10210,22524,-32768,-29798,-32768,-7830,7467,32767,32767,8479,32767,32767,15377,-19073,24195,-7214,26543,32767,32767,32767,-32768,-32768,17858,32767,32767,-32768,32767,-32768,32767,32767,14861,-32768,1888,7048,32767,32767,32767,32767,1546,-28801,32767,-32768,-23523,32767,32767,32767,32767,-32768,-32768,-32768,-32768,32767,20754,32767,-527,32767,-32768,-31816,21535,-32768,-371,32767,32767,-32392,-32768,-18996,-19428,32767,32767,-23236,32767,-32768,-32768,-32768,32767,32767,22095,32767,-32768,32767,-32768,-32768,-25795,-32768,-32768,32767,-6394,32767,27335,32767,-32768,-905,32767,32767,32767,-7325,-32768,990,-32768,-32768,-32768,-26661,29381,-32768,32767,32767,-32768,-14073,-4612,32767,-24426,-11203,-32768,32767,1509,-32768,-32768,-9157,32767,-32768,-32768,2856,-32768,32767,-32768,30351,-32768,-32768,2267,32767,32767,-32768,24136,32767,-1092,25717,-26846,32767,-1442,32767,-32768,-32768,32767,3537,32767,26142,32767,32767,32767,-32768,-32768,-4029,32767,-32768,-32768,-32768,-32768,-3289,32767,-32768,-32768,1592,7037,-32768,-32768,-26905,-11396,-31169,-21153,-32768,32767,-7113,-32768,-3214,-15916,32767,-32768,21698,-6378,29482,-32768,32767,-32768,-32768,32767,32767,-32768,32767,-15109,18775,32767,-32768,-28284,-7869,32767,-18816,12392,-14817,1061,-32768,-32768,-23598,-16814,-32768,-32768,-32768,-32768,-32768,-32768,32767,32767,-32768,-32768,-10358,-13155,32767,32767,-23695,-7309,32767,-32768,-32768,32767,-32768,32767,-32768,-18294,-32768,-32768,-32768,-32768,351,32767,-32768,32767,-32768,-32768,2599,32767,32767,-32768,32767,-32768,4105,-32768,-32768,-32768,28255,-32768,32767,-32768,-32768,32767,-32768,32767,16993,24379,13116,32767,32767,32767,32767,32767,32767,32710,-32768,-32768,-23756,-25734,4635,-25553,32767,32767,13895,32767,32767,32767,-27660,32767,32767,15759,-32768,5100,32767,11349,-24526,32767,-32768,32767,25961,-32768,12643,32767,-30964,8739,-32768,32767,8237,-2180,32767,12812,-32768,32767,32767,32767,32767,-32768,-24535,-32768,3810,-32768,32767,-2948,32767,32767,-32768,25167,32767,32767,13951,-32768,-28652,-32768,32487,32767,-32768,32767,32767,30119,-32768,-32768,-32768,-25003,-23939,32767,32767,32767,-32768,32767,32767,-32768,32767,32767,32767,7993,-8257,-32768,32233,-12164,2233,5968,8386,-22112,32767,10975,32767,-32768,-21487,-32768,32767,32767,-31244,-32768,-32768,32767,32767,5200,-32768,-32768,-32768,32767,32767,-5708,32767,32767,-32768,9205,32767,-32768,-32768,32767,-4947,32767,-32768,32767,-17097,8236,-10630,-32768,-32768,32767,6051,-32768,-32768,32767,-32768,-16895,-25956,4759,-32768,16442,26894,-32768,32767,-408,32767,-32768,-32768,-9678,32247,32767,5502,-15697,32767,32767,32767,-32768,32767,32767,-1172,32767,-32768,-20800,32767,2605,-441,-32768,32767,-13466,32767,-28943,-32768,-32768,-32768,32767,-32768,32767,-32768,-32768,32767,32767,-13645,8058,32767,32767,28194,-32768,-32768,32767,32767,-32768,-13395,32767,32767,32767,-32768,-32768,-32768,3620,-32768,-32768,-32768,-32768,32767,32767,-32768,32767,15192,-32768,9574,2560,-6501,30381,32767,-32768,-16147,-26620,-32768,-32768,-32768,-32768,-32768,32767,32767,-6049,-32768,-32768,-32768,32767,-32768,-32768,-32768,-32768,-32768,24303,32767,-32768,32767,14667,-32768,-32768,-32768,-32768,-32768,-32768,-32768,-32768,-32768,-22011,32767,32767,-20443,8029,32767,24013,-16388,-32768,-32768,10057,32767,32767,15961,-32768,32767,-29001,-23055,18409,-32768,32767,-32768,32767,-32768,32767,32767,-32768,10282,-32768,-32768,-32768,22406,32767,32767,2889,-16423,-29941,-32768,32767,-32768,-32768,24888,32767,23445,12415,-32768,-32768,10750,1825,-1365,26146,-32768,-1168,-32678,-32768,-32768,32767,-32768,-684,-32768,-32622,-32768,-32768,32767,-12485 diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv13_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/conv/conv13_input0_int16.csv new file mode 100644 index 0000000..9976075 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/conv/conv13_input0_int16.csv @@ -0,0 +1 @@ +-1479,26283,11110,27865,26288,30777,22005,-32317,29638,22562,6359,14304,-717,-20126,3173,-9094,-18615,16234,1009,-10453,9591,25207,17382,-7707,12596,11467,-27567,-17399,-22297,13700,-684,30967,9606,-10568,1581,26837,1369,17047,27394,9470,-27554,-15486,-30192,25640,27233,-3744,25769,2902,-27140,-8159,-26665,30277,18885,2042,17861,5016,11239,11614,-12668,31305,-23724,27306,5995,19399,-27063,-10676,-6414,2459,-30512,-280,14835,-5555,27237,-23892,19219,20210,12231,21803,31739,8356,17740,-15416,15867,23477,-24495,-3064,-14226,16737,-10885,16443,4600,-27735,27346,6287,32058,-28071,-8556,-25579,31100,-10626,-10902,-14030,-17364,-7704,-27902,23619,-12715,31258,-10108,-23217,-3176,19662,-21427,30083,27051,-23178,-21187,11253,384,-27187,10193,11273,73,26513,31330,16665,-11024,19805,-31257,-18301,-25800,7103,4462,20613,15549,16173,18939,5349,12254,13271,-31451,-4284,-15543,-22331,-2776,3618,-20341,24341,30222,-30261,-5200,-30494,-3696,27271,11056,10736,26838,7115,-1073,-21398,-30724,16278,-18687,30872,19304,-771,9110,3844,3175,850,-2482,16806,-2070,-20116,-5479,1015,-25736,9828,-3067,27471,-30318,2959,13355,21480,-2891,3655,-28036,26159,18325,-20301,-27188,7305,31611,-19366,20314,-7298,-20878,1627,7183,-32628,13337,31179,14696,-30754,16662,-12326,5774,-6890,-20968,-11054,-31623,-25350,-18535,163,-21220,-8864,-28178,-19231,11026,8666,-16875,-20539,-11221,28546,26393,-12404,-17040,15784,12523,9670,4077,27212,12062,21432,-16787,-26782,3897,20322,19873,-31133,23767,27414,-14983,-24285,-16874,6520,-8414,21878,-6111,-1171,-23310,-25028,433,-25178,-31123,-2668,14340,18260,8895,-1280,3227,-8306,31250,-27375,-5134,-273,-18816,12401,-21317,10909,17775,19111,-4774,9884,14274,-3245,-6891,-2922,1676,20930,-2656,-19146,21858,2284,5054,6635,25339,18190,-31222,22105,-5155,23559,10727,18322,25669,-12948,27225,22466,-8247,-2618,15274,19574,-16326,-11343,-28506,14737,25489,14214,6559,-20177,-13731,-6765,-31470,25464,-20912,10810,-21884,-24473,5257,6807,-23000,-18837,3737,-7892,31330,-10519,28106,-6937,24226,-10257,-26848,-8355,21482,4277,-30010,31584,8932,16045,-873,-27805,469,-4023,24544,26857,-14341,-13252,31325,20169,-10078,875,7608,24580,-23927,7833,-32227,-13692,20207,-6540,25446,14252,-8433,-5846,12886,27935,19545,8637,23995,-19486,-29469,2614,-13970,-13595,-16839,16360,-25779,21908,18898,16526,-11965,-4096,10746,-32611,17275,18739,7723,-9395,-2448,4292,-21606,5159,-31874,12443,15491,-12363,-28043,-32750,1530,17112,-28583,9517,-21434,-18939,32587,26220,-10274,-23984,29106,1977,7093,26635,-2896,10747,26878,-4806,-22600,-19586,-25639,-28111,-10444,4891,30516,11979,20783,21088,-8879,-4992,-22920,-21222,-10580,9994,1754,-31270,-15155,14833,29392,23908,17541,8274,26409,-588,-24562,29068,-12009,10876,14559,-21553,6748,16502,9794,32101,3097,-8580,-27807,29467,-20134,7705,1225,30809,31968,-29360,29400,22311,19716,27564,5000,9626,-5946,-27292,8493,-17856,-13526,18939,-26078,-9621,21620,20636,-18095,1834,10489,24643,-10675,-22219,15966,26979,-26773,-6252,-1114,-3834,9777,-4807,15377,18580,-24679,-15701,4357,-2140,-5163,-20707,-13889,-31740,8122,2366,-13404,28245,-1891,8139,15785,11024,23919,28932,-4094,18514,21618,15694,9920,-19016,14742,-13512,18500,-29587,24911,25611,-18346,17663,-30486,24596,10330,27722,2577,-4260,-12297,-4114,29825,-23532,1583,-30246,18851,28471,-7770,269,-7828,-26963,24420,14530,27724,-2166,23057,-21823,-23198,-27515,-25710,30592,-29291,-14187,6272,-30323,-15179,-4046,441,23285,29045,27101,-28067,-16854,5315,21235,6131,27783,-31065,23812,29688,-32637,8973,-15331,-1821,-19280,12850,-7374,-25213,-28664,-12151,20776,-22692,26482,-7184,-32502,-24090,7754,-27963,30945,10834,-30572,28362,-27767,-17141,5607,-2762,-21408,-5235,-8380,22778,25533,11009,5695,28826,-18525,29300,20759,31061,32621,-9775,6326,-10299,29226,-24672,29045,7772,-2216,16873,-25166,-25698,17926,14509,-6673,17721,6288,-1866,29753,-219,24399,-8469,-24317,-19597,9634,-10697,39,12733,23096,19470,-10082,8411,24616,-22996,3125,30100,1444,20436,-28292,-31982,31849,-13889,-14403,16713,-2301,-10736,-15918,-31773,-10318,-3898,-6580,-29555,-17365,20424,-1409,31456,-23342,-29014,29849,-6490,25764,11922,-24865,-27810,23060,30893,-32493,13633,29437,-11905,32201,-25403,22440,-3497,16307,-28372,24451,31005,11166,10624,28840,20413,11533,-3622,11604,11891,30482,-17326,-26721,-14556,-22782,-18851,1208,6652,-10730,-10977,-28911,-6056,19857,-12036,32339,19825,2673,1777,6074,24766,-27242,19508,22483,-24955,31796,26362,-24567,27302,-7264,-20190,28888,32318,-17167,-11949,29944,32228,-10013,12174,18567,-29936,-28794,-2801,-2861,-30187,32160,30472,-13012,-7237,-2561,-15942,12373,-18151,16016,30469,-10159,-13063,-1858,8898,-3177,-9792,-12341,-32416,13172,-7693,-17613,-24584,-2319,23890,-16908,-31058,14213,12287,-21714,-12556,-19489,-7031,5091,21306,-4328,-3479,-21377,-6330,5581,13401,-13782,-31599,30568,7372,29915,11906,29654,9517,25517,15040,-17410,15940,-4451,14172,21264,-32173,-23709,26135,-28486,-27976,1614,12516,22940,4801,-22359,20532,-20147,32657,32034,-11432,4377,-14211,-10823,28812,21642,-29649,19890,-23938,-29477,31766,16157,-23043,-20840,25791,31703,-22546,17727,8432,13974,-3175,-12448,2247,-7362,-31492,10310,5788,22970,-20246,-25822,-26110,-22228,19704,17162,-1900,-17031,11797,-13995,-13517,27858,-11184,-7324,-17971,-26279,2898,26583,6725,18234,29560,30477,-25560,-32230,29760,11887,-30497,-28298,20070,24917,-4637,-7810,4442,-3136,-28216,20710,-31219,1110,-9503,1692,-13474,15456,-20899,-26620,22374,-25523,2431,-32021,-19081,-14024,-15867,-22235,28114,32644,-28520,30944,22223,-683,-23807,2091,-8029,-20314,15912,-3308,-19438,22753,30861,22566,-31070,-22167,-29568,12606,-29187,9712,-2782,-20620,-2224,13967,11745,-6579,-27206,18883,-19779,-19473,25678,13066,12516,-3695,-13161,5814,20398,-7737,21374,-19152,-11638,-13299,-13081,31430,16518,5618,20612,-26538,-185,21178,10960,-32010,-5338,-3741,18863,-22486,21946,6099,12289,32676,6067,-16827,-21602,26849,-303,7090,-16606,3540,-15674,-26290,17179,-5167,3647,30467,-17036,2464,-25562,-9586,-25074,22388,11958,13289,-7902,-1621,-16480,-6982,2170,21269,5664,-17657,15660,-7057,23659,-25310,4567,12512,-4122,-9869,31087,-17890,11443,-18857,9381,-15294,-3593,4326,3397,-22736,18645,-6669,18089,31584,-14778,3632,-24477,20848,30989,19076,15300,-3998,25718,-3353,-3401,15778,8493,-9508,-18379,3018,15012,-27961,10841,14456,4729,31997,28694,-20906,12021,8809,-19973,-27536,-23590,173,-11365,24999,2492,-4280,-6239,3548,7538,23426,30041,-715,29200,-4372,-16103,-16323,-20459,-20327,-4132,-20535,30765,20557,-9625,32158,-3078,24953,31813,-15430,-29533,-27381,-14814,-2722,-20296,-20101,-4140,6206,11996,-9190,20147,29473,7227,9181,19755,-31218,-14449,22353,29232,7818,-7447,31912,31033,23188,-24213,14183,-16183,7588,-32205,31576,-28838,-10273,-15720,-29653,-917,-28287,23710,12500,6302,-1323,1895,3894,-24680,29455,-32137,-15653,9871,23534,17322,-2213,22337,-26453,-28102,5774,-3983,-27379,30641,-31518,19058,-32721,-27012,23386,16114,-26906,-24626,17783,-32326,-22465,28579,-25411,-22631,-10956,12321,25756,13414,-7659,10610,-2879,-18363,18976,-13754,30569,-9412,-21521,-4599,-7816,23106,-26486,20518,-7690,-16026,26305,-20177,11790,4476,-2431,7213,5960,11726,-28611,-15696,9963,-18006,1990,3583,-6488,-22234,-12458,-20433,3694,-15829,27535,16934,26824,-3777,-32099,-29499,15632,-212,30741,30124,32341,10060,-19082,-8835,1202,-7317,-3057,-20032,10163,8669,-5475,11328,-25321,-11931,23131,-12642,22461,23817,4266,31443,-30398,13921,-26450,-3946,-26133,-19039,-4818,-14274,-18964,-24428,21979,9951,17395,-16960,24803,5371,-27222,-29847,-18133,27855,19971,-5149,648,-28829,9263,6734,8047,-29654,-21325,2137,-15919,-26079,-10919,-11795,3509,15527,-3086,10224,-25571,-25500,-13971,-31795,-3678,25077,-23224,11167,26200,-10305,17593,-20603,12732,-26404,27881,-440,20129,-5863,9939,6196,20000,11805,18492,-32590,4478,-8761,2370,-4138,-32441,6701,29436,1355,-14305,32286,-12459,-30423,3655,10172,32692,-26377,-4828,-8473,-7861,-15955,-9262,-21634,26145,16815,-2076,19904,-6419,23749,1258,-13743,11454,3477,23213,-19127,18981,26349,-10736,-24990,16089,1509,25577,-25276,30295,-7308,20663,18501,-20875,-28987,-13299,10179,-4009,-12140,-1819,-8812,-28468,-13694,-5145,-8022,-29475,-18530,24276,14391,13129,26632,-26777,-23430,-28425,-23829,-19522,-27037,-7886,9277,30699,18388,-19382,-25190,-5797,-19435,29046,-19980,25757,30011,-5337,-15920,-30971,2380,11029,18024,-10747,-32513,16084,3199,-30483,-18047,-12508,-27055,5158,-25778,3238,-12122,30280,28717,7474,-4065,-27583,-827,1882,16747,22297,-20695,18147,-23369,22191,21227,-5065,4077,29758,-30958,-8166,-10455,-11468,22783,-10857,24859,-22754,10772,32196,-18841,11398,-30252,-23184,3218,-10895,1622,-1743,31546,-18649,-32508,-21017,-31612,16533,-8956,29690,-11945,17498,-24635,30908,-23353,-14683,32143,30927,24574,28454,10907,10166,-11324,32390,-11060,26912,5070,5606,20564,16377,19937,-24514,4366,-24838,14362,-43,6570,-13512,5513,-16280,20847,-22985,12605,-384,30146,-22164,16001,25538,30676,11807,-28528,4005,7097,-25918,-26000,-10042,17006,-8611,-22578,20525,1927,11650,-21746,31388,27460,25485,23363,-3273,28416,6650,-25543,9268,15859,-28807,-26584,-2184,7620,-11553,8269,-12457,-11211,13786,-12550,18217,26078,16020,18026,-15745,-8633,12867,-23667,32165,-28785,-12336,-9437,-29097,-4184,23914,-2626,-29079,-1838,-11025,-4288,32112,22666,12005,13,5070,-8929,9829,13613,-5198,8365,-31392,-17214,-24559,-13858,15429,-2046,-19565,30078,8213,-16237,3454,-6474,-16051,-28860,6740,-21134,26979,25498,19617,-27335,10169,-3849,-19197,-7221,-2322,-11333,12179,-27149,6789,1004,-12093,-23292,14743,17890,-14076,-25072,3236,-6825,9828,-21169,-3203,502,28670,-27247,29039,-18578,30420,-3490,28314,-25580,-1791,-31002,12534,-29293,17700,11782,30890,29526,-11420,-4362,24401,-9914,-27201,13740,9666,-31934,-16286,21862,10784,-3671,-8413,8283,-30924,-19715,-15649,15863,24849,29942,5497,-16612,-19598,-25007,12793,19999,-25724,29116,27742,31560,-21812,-11628,27006,29054,-13312,-31927,27670,2976,-15336,2806,3156,-29963,-32246,-26519,-31367,-15939,-8290,-10731,5626,23877,-1151,-7273,-16475,9996,-15779,28069,20785,14411,21961,13335,-5750,-22995,-29365,-6931,-25093,-15342,8250,27668,32055,22548,4884,-5619,-32026,-18532,-13783,19955,22457,-7034,-31665,401,8016,7134,29752,-27782,32143,11760,12248,-32571,-27229,-17976,-11674,-18534,-21662,27923,-29280,29946,-24041,25471,-20004,530,-28667,-26556,-25382,-7081,-432,21870,-5648,-7380,18646,32367,-26303,-25240,-14501,-19673,24233,25578,-13178,30908,-13722,14039,-30929,-5524,15976,-20088,-6791,11995,-21044,12232,-14469,-15715,241,-19069,4282,3705,-9047,-18893,28183,22706,-12321,28545,-20527,-19103,10241,-305,-3756,31940,20030,16483,15865,-18841,-24495,9399,18984,-31612,-12197,-17566,-20159,-3210,26619,27566,-29597,-4763,29264,14021,24955,-31074,3997,-32248,14905,30568,-29383,5011,-1829,-29866,29031,2692,1572,-9224,-13768,-7339,-20525,-9787,20514,7960,-19965,16959,18849,26710,30967,7246,-7910,19358,-5580,12363,7725,23729,3322,21995,-3296,26518,-6759,1625,-10677,-651,-19010,-3281,2956,12545,-1593,-26560,1059,-2810,-336,27082,-4250,-15294,-13914,31364,20700,21692,3205,-19051,-16444,-29017,29193,21282,-15424,-24907,3729,-1729,-28400,25953,-27477,1941,4465,8626,-29938,-19661,-10691,-11257,21016,-17191,-8591,378,5441,17551,-5982,24962,27699,28103,21284,16222,17000,8705,-17873,-26456,14963,4551,-18857,1804,-32430,-21331,-957,-17595,-23893,-27968,8392,26286,19237,17154,-20637,265,5538,21404,-13193,-24926,-4615,-31126,14061,-30379,3021,10504,17173,-2144,-23051,6372,-6753,-21509,28255,27966,-4766,-23547,-25594,-18489,-12412,-28752,24244,-19777,6737,15391,-2425,17670,-14342,-25798,29852,317,13180,-10576,3719,23947,7737,-2145,11904,16557,12857,-28384,17468,-4489,20002,29189,28885,-21365,4581,30739,-606,17456,-2481,20286,-10616,-26358,-31236,7010,10560,-21698,3113,-3589,-29147,20569,3673,-4960,14315,28146,30929,-20904,-176,-16547,6264,22705,6759,-9502,6281,5323,21689,-12851,31570,4655,-2563,-12751,8134,27733,6311,19705,-30504,-3187,30314,-26130,-14394,-27635,-1784,-21488,-18460,-412,-10217,-20868,-1924,-22075,18068,-13169,-15448,-2350,16712,17321,-8458,-22013,30298,21330,29580,30415,16557,-14121,-29067,20321,-10938,-15868,-19699,7559,-5761,-13098,1765,21372,-16136,6142,8593,5233,1716,-28607,16413,-22439,20434,29761,-24900,-12081,-14070,14849,25526,24884,-15797,-16529,-17101,27048,5564,18121,9466,-9240,20038,-2769,-2158,-28415,-3590,-12117,-12544,30201,-31351,7880,4948,3544,-10426,30287,-8349,18331,20537,-14978,-21702,-31697,-19455,30421,-15464,-30734,19148,3035,23299,-23773,15839,9290,-5393,4727,10681,30308,6236,20081,-11245,10032,-19173,-12427,-3175,-17195,14653,-17317,2745,-15264,-14379,-18617,25367,-29327,12658,16335,-22999,3101,22687,12599,-23949,3550,-27696,18324,24141,-5574,-32468,14355,14597,29106,24519,-6515,22922,-13906,31055,-2602,-15615,-28840,-29173,16691,11012,-3778,1760,-21146,-12904,28104,-30054,2201,-19551,21547,-18807,27906,8446,7875,31234,4026,-12843,-14550,23714,443,-32213,-13509,32475,-18300,-17216,2191,12398,-24459,-3455,29655,14137,5691,8845,5476,23533,-7582,-27656,-29377,-56,4056,14374,208,31042,30711,9400,24498,21668,6347,1466,-22609,818,-10914,1692,-20391,21053,590,4440,15232,23206,-25491,-25502,6036,-12323,-12174,30655,15290,-2492,-8561,27063,-2314,-9545,-28464,-15437,20237,13396,-10186,9010,-19677,-19264,32013,-15793,17021,732,-16647,-6039,-11341,13487,3395,-10529,26731,2640,23358,19218,12346,17151,-32348,-19244,7404,-2995,-4056,18178,13030,-7433,30803,16771,-17433,-32198,-7352,-7438,110,-31184,1021,10181,-27708,-30142,-4649,18797,-23220,-14757,4543,-3859,12578,4718,23648,-4926,10016,-30625,-21991,2016,9536,18148,15801,16760,9734,16338,-4274,-28812,-14158,22049,-18275,27172,-8433,29004,-17824,26974,-7479,12404,-4002,23382,12269,-15058,28564,3938,24783,4937,-2791,-31376,-5963,-1564,-9039,-20462,-6744,21517,-23603,-176,-5059,-31101,26348,-14418,4793,-14146,12617,20097,-25629,15030,-15532,2398,-9211,12433,14464,-17858,2948,3728,22824,19967,23877,-5306,7123,21296,12727,20642,29758,31893,31055,24225,-31910,-8885,11992,-9913,20828,993,5106,27262,-1821,6969,6177,11153,-5145,30314,29537,-12611,-15453,-30427,-5254,5487,3271,27153,-17588,-736,-18676,29407,17761,20759,23964,9551,-10032,-14615,-11005,7533,-5770,-30253,26145,6588,-5504,20122,11857,14365,22938,-19859 diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv14.tflite b/tensorflow/lite/micro/integration_tests/seanet/conv/conv14.tflite new file mode 100644 index 0000000..d5630c9 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/conv/conv14.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv14_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/conv/conv14_golden_int16.csv new file mode 100644 index 0000000..b22d58b --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/conv/conv14_golden_int16.csv @@ -0,0 +1 @@ +-32768,10939,-32768,-29043,-30415,-20464,32767,32767,-14899,32767,32767,-32768,-32768,-32768,-32768,-21999,-32768,32767,-32768,1547,32767,15968,-32766,23803,6918,-32768,-32768,-32768,-2003,-19909,9718,-32768,-14070,2376,5370,-18548,-31269,-32768,31787,-31717,32767,-4631,18069,5897,23998,-32768,32767,-32768,32767,-32768,-9770,-22837,9068,-32768,1498,12636,-7368,-32768,32767,-32768,3488,-13507,-3394,32767,-32768,-26511,-1996,-20458,6861,-32768,-32768,11026,32767,23172,-19253,-32768,-12043,-25658,32767,32767,32767,-32768,-32768,-5739,-32768,2278,-1183,-17114,-5727,-25316,2149,-32768,-32768,22780,32767,-61,9770,9312,32767,-5994,2305,32767,-32768,32767,-32768,-11949,-32768,20732,-8336,-26740,32767,-32768,-32768,32352,-32768,-9064,-4388,-6196,-29576,-32768,21390,22188,-32768,-32768,4309,-32768,32767,-32768,4413,-32768,32767,29218,13398,-13440,32767,32767,20381,-9679,32767,32767,-12623,-1291,29198,22254,32767,-32768,32767,10974,218,11397,32767,-32768,-5039,-32768,16443,-32768,-16726,-32768,21124,28037,-32192,-2428,-24213,-32768,-819,-25779,17984,-32768,-32768,-6146,-24619,-7723,11255,32767,32767,32767,32767,-32768,-32768,-32768,-32768,-32768,-4444,-17400,-14035,31239,3188,32767,7000,32767,-23271,-12984,32767,-11181,-28860,-25874,10760,-1664,-13792,-32768,-22872,-4480,-471,-10649,3191,32767,32767,32767,-32768,30875,32767,-28739,3736,-32768,-32768,32767,-3439,32767,-2254,32767,24923,32767,15718,4695,32767,-8315,32767,7293,17812,-32768,-21242,15202,25420,32767,25674,19218,-8349,32767,3972,30128,-8083,-32768,32767,27464,5640,-9722,-21095,969,17759,-4600,32767,32767,32767,-8550,32767,29964,32767,-30515,16828,-27169,13311,-32768,-18246,9983,32767,-4931,-2364,32767,-9206,-4893,-32768,-32768,18184,-5326,2525,-19123,11727,5108,-15438,-11608,-8057,32767,-31713,-32768,4012,-32768,32767,775,-1176,22114,13640,-30981,22279,15281,-31814,-10398,4232,11801,9556,32767,-15304,17821,32767,-9729,-32768,32767,-32768,-32125,-15920,7496,-32768,-27398,-743,9115,-32768,-32768,-27606,-26068,32767,-23959,-32768,-32768,-9662,-32768,-22057,-32768,-32768,-32768,10329,-32768,-32768,-32768,-32768,-25125,-32768,-32768,-32768,19549,-32768,-32768,-32768,-32768,-32768,-18598,-13571,5724,-32768,-32768,32767,-32768,-32768,-32768,-32768,-10365,-28516,-32768,-23138,-32768,-32768,-21650,-32768,-16036,10970,-26993,-8157,-32768,-32768,-32768,-32768,-32768,-32768,10797,-32768,-17054,-10259,32767,-13971,-20048,-32768,-32768,-23621,-29734,-29154,-32768,-32768,-32768,-32768,-7172,-2320,13228,30439,-18742,32767,-32768,-32768,-20519,-16105,-32768,-32768,17419,-2742,-32768,32767,7812,-15990,10600,-32768,-32768,-10961,20210,-32768,1659,-32768,32767,-9648,-10051,-3683,-32768,-11831,-1126,-13742,-32768,-32768,32767,-32768,-19009,-26201,-11381,-28211,-32768,-27460,-25415,31907,-30475,15921,-31845,-32768,-32768,-32768,-5280,-8815,5139,18584,12515,-32768,-32768,9165,-12123,-32768,-32768,11784,12041,1400,-6545,8928,23164,32767,-10340,6805,-32768,-19671,-32768,-25719,-32768,-30551,24614,-32768,-8727,4667,5769,-22618,21991,2215,15041,-18035,29374,32767,-32768,-32768,8314,28508,-26255,-32768,28227,15815,-18166,32767,32767,-32768,30042,22042,-13025,24918,-24897,13304,-15084,32767,-11136,-5298,32767,-25188,32767,6340,2884,-926,17414,-7100,-17822,32767,-24265,-32768,-15765,32767,15838,6391,-32768,21518,-2292,-32768,-20195,-32768,32767,-32768,3128,-11236,32767,-32768,-32768,32767,32767,-32768,14080,-1520,8440,-3114,13148,16512,-10920,11876,26142,-28970,5544,-29240,-18968,32767,-9520,32767,32767,-26090,32536,32767,-3030,32767,-13906,17979,16459,9348,10212,-13182,16111,32767,7332,-32768,-19384,29472,-12612,6440,32767,32767,-4057,32767,32767,24603,32767,-19382,-9126,-32768,26051,32767,-2748,11385,32767,-32768,-17748,18365,32767,-32768,27431,12441,32767,32767,32767,32767,32767,32767,-24236,6723,26554,-12686,-32768,619,-1312,-32132,32767,32767,32767,25680,-14761,15545,-5244,-10470,27084,-32768,32767,22531,-13620,22804,20842,22599,-32768,-19918,-32768,32767,-8253,-32768,-28273,32767,-28174,-16109,-5091,3984,-14449,-13379,32767,32767,8677,3408,-17661,32767,-11666 diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv14_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/conv/conv14_input0_int16.csv new file mode 100644 index 0000000..340e953 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/conv/conv14_input0_int16.csv @@ -0,0 +1 @@ +4723,-6105,1381,-15640,6128,-13133,6078,-14472,-18020,22367,-30180,2775,28068,-13402,21224,29706,-7344,26902,21759,10122,-20165,24261,-6163,-16182,15213,31715,28639,30181,31038,-14146,15909,-6032,-6036,4684,-1438,8799,14076,-4280,-26622,-11600,-3237,-27245,18153,12612,-8011,21158,-26206,9101,17860,32571,-22899,-14577,8877,-17478,13901,-31687,-1440,-24657,16842,9449,-24675,30765,-32744,-26511,21116,29908,16145,-29538,10718,-16889,-1214,-19259,5335,13684,20698,-31368,18102,-8633,4619,-2610,26374,18271,-21055,-20591,-20667,-32483,30277,30071,11192,-1538,-11453,-8700,-9738,1812,15849,-24545,18542,-2795,-13622,-19751,-22210,20011,-23743,-24569,25353,-30333,22153,-19453,-24708,21895,-29448,-25326,-26102,-17986,24198,-12027,24534,18225,13769,-2527,-29584,16526,1020,28992,27614,-13981,-30862,24072,28717,2683,12271,-22284,4899,-8437,19339,29938,-20283,-1238,-7594,-1543,-20880,26857,-1143,31053,29706,21596,17111,-6428,20565,26283,-4434,-27813,-22050,11874,-16874,19964,-2336,22371,-25727,14043,-4150,-8582,-4735,-19326,-27320,3484,8421,23791,-13587,22655,8697,10237,25231,-2248,11937,26486,18780,-24802,13470,10970,-8162,-19381,13177,31068,-26100,18474,-2428,-16117,483,-8422,-19075,15376,-15889,-12646,-549,-5631,32530,-22113,30859,16714,29633,-26173,-5170,3980,9708,-6511,16024,-7726,-23699,3429,23643,4015,15553,-9989,36,-24818,-23379,-20212,2670,-18976,186,-11747,26222,30668,103,24947,14920,-27781,-10504,280,4399,15231,-19915,15392,31300,30861,-15299,-10138,25620,-25481,-5583,3391,-11722,-31851,560,-19167,9742,-14131,-17465,-18733,2716,-12915,-9851,22955,-17892,22383,23110,-30889,-4665,8079,28228,5216,3664,-896,-17766,28168,18623,-1095,6098,19359,-5257,-18285,14533,-4884,13505,19036,-17950,28844,1363,-5981,6096,6013,-20670,-18176,18904,3050,-13739,-4702,-6679,-9770,22598,-7136,-114,-25888,-16968,-26606,-27375,7899,-10000,18174,-6430,30247,17109,1772,-25095,-14594,-26589,10875,-18041,6155,23189,5193,-20655,5786,4050,-28976,-18271,-16857,-14273,21203,12131,-20697,16632,23262,-32672,-28867,30227,-2619,-20286,-13935,26424,19322,-28182,-27181,26126,-29039,7550,-11249,-29494,12429,26217,14756,5715,-16758,27579,19709,1266,-23083,-22636,-12949,3261,3175,22486,6347,5687,-1339,-4679,-13446,-20640,150,-5401,18887,27748,9463,23355,7687,-20168,19755,9581,-26464,-23638,-23996,7622,3012,330,-29955,9903,3028,12924,-23416,-933,10539,12993,27387,-1496,351,5928,5584,28989,32762,740,-31944,-31589,-8900,-14995,12033,-29031,28941,6245,17049,-1805,-559,29998,-19203,4360,13984,11489,16770,21396,31153,-9901,18562,27051,28745,-163,19670,-25932,11206,-6345,-212,24919,-22083,-6117,-2985,1619,-514,32107,-20532,15366,-10674,17283,1354,32141,-3868,-12871,1381,-21212,19154,-19305,24701,25486,32503,-22480,22111,16801,17792,-17799,9080,13691,21176,32706,-31493,583,673,27684,19344,-21309,11652,23313,-851,21381,2680,-11273,-941,-23623,-2767,19094,4397,899,18990,-14344,25343,22589,-26525,2071,-25141,455,27844,-23300,-481,-20549,-31820,27481,3428,26536,17716,9969,8834,12810,-3875,-21819,16282,-24488,-2198,-12555,9631,6255,10415,18750,-23517,-16408,-17392,21200,-30497,-6044,-1805,-12740,-27450,-3870,3779,31587,-32014,-6185,1024,3617,8015,-21701,11554,7332,14162,-4473,19150,24444,14143,21608,12297,-25748,2248,10234,-391,-10360,23010,23255,-20157,9070,-8022,23132,-19404,-23757,8784,10023,-32254,-16368,14049,-29755,12115,-8995,9084,13990,28365,28195,-17303,-14303,18370,-11772,-22762,7463,-1066,4603,-18031,8370,17918,-12028,-5230,-5124,-169,-12884,20172,-29273,8317,32179,20391,-12535,-31684,-9473,14374,-28577,-15797,-16611,-20960,29229,-10407,-19169,31624,3437,-22031,18343,1170,-4977,8293,8031,-29581,27482,-32029,23226,-27239,21604,6271,695,-18897,11653,29786,-31983,9287,-6947,25735,-27098,7043,-7739,32343,-17961,-8391,-29591,-6819,-32195,23189,-6397,-29486,-9169,-11687,-25092,-23406,-25746,-17180,-21087,13042,-2481,-8702,18572,-32209,16297,-27070,12533,-12252,13441,-29671,26389,-26229,-15259,14294,20389,-15319,-5307,-10002,-1758,3011,10852,23903,9327,21267,29548,-12463,18899,-9793,1623,20871,-32252,29285,-32684,-17361,-17947,20227,-23007,4374,15268,-12185,2313,15762,-20553,-22907,27454,-32393,1546,27301,-26912,21626,-27708,28457,-22130,-1891,-7785,5401,-5706,12089,-27542,-24669,3121,13427,-15068,20144,-30408,3684,28478,-11691,9698,32164,-25351,-20296,-14966,-12344,27521,17492,-17557,19581,9331,18295,14093,-4503,-13972,-21731,22473,17244,21686,-14089,-6419,6181,4412,-6524,-25853,-24774,2785,-31258,-7441,-15077,-16798,17059,-25124,-32077,13067,17853,-4199,-17106,14035,29447,-2419,31019,-15476,974,17596,26824,-17551,31365,11121,370,8636,-20588,-2237,31188,-19394,30886,-32742,30084,26863,18923,10003,17149,-27770,-23910,29566,-3506,20055,-22338,-11106,-24288,-24115,-27764,24016,-10488,13505,26051,16697,22683,1644,-18082,10304,-22205,26790,-4810,5094,15811,20503,-18191,20956,-28080,-30650,-4062,-29390,17113,5860,28295,-22623,-26678,15754,-2678,14042,27285,-19810,-3499,13273,28543,9281,18774,-26805,-5330,-18314,21535,12416,9674,32471,180,15501,950,30086,-30739,-8078,19863,-19287,14024,30885,-24832,32500,2397,-8761,-14224,14594,20797,-9783,-18536,21118,28910,-11791,-16770,-27630,-21131,-16827,104,-12563,2693,3694,-11004,17816,11868,-3691,-5188,23986,3034,-31022,20874,27780,-5848,-3461,26012,-32079,-29336,11975,31056,-14315,31123,-20947,-4867,25894,4527,-3704,-16543,26765,-13988,32512,-24774,-24713,5857,31791,-4189,283,-19065,26629,24426,30005,-2278,-23167,17271,22339,-31574,-6134,-31847,27206,-107,21358,18024,21230,-28364,3872,-21043,24855,-29382,22855,-22209,26255,-18377,2750,-4707,-15415,18962,-27235,3731,25173,-26019,-13559,-18886,-946,-6386,17680,24660,-7327,7138,10177,-14030,8747,28635,-30094,-11771,-3364,17656,-19157,-10814,-18091,3625,-10927,9849,6726,31841,-5124,-20226,25721,15291,7840,-255,-4110,12091,-21693,-29524,-20362,-13184,-17754,13915,-16263,5217,11346,13675,-22367,-27732,-9426,-28428,-3101,25876,-12011,-21402,-14144,2472,19414,14995,8570,9535,3862,4129,16328,-15432,29508,5542,14892,-18254,24796,-18790,22845,-15577,-24497,27682,28591,8063,-27785,-20112,-1424,27616,4797,-10293,-14957,732,-11842,-13136,411,23336,-30196,-28037,-16657,28864,-12809,22500,11123,-11030,-23619,18543,-7543,5617,-2472,-20200,-1488,10130,-5079,-3578,-12671,-3569,-15558,-21881,-10161,29368,12381,-21883,-22341,-5541,-9836,2445,23005,-13910,18536,18908,30994,-16602,-28459,7434,-28000,-24475,-5486,-3562,-4801,15234,14578,-31319,27709,12850,-6731,-6239,14378,-66,-12555,-3916,-4060,1057,21783,26996,-15656,23605,-7242,32418,-28267,27060,-8786,-18533,14686,30105,-15809,26621,-6341,21887,26693,20800,-23367,4560,6932,11876,30569,25603,-5125,9261,-16724,-18685,-4049,27966,-5494,15844,6568,-10578,28074,12419,-24062,28357,-8637,-21525,20332,13915,16559,23351,14941,-27832,23125,8743,27855,-9906,-31323,-32367,-7922,19076,10295,1342,116,-13473,12711,-6747,-1599,12033,1527,-29291,26262,-15542,-5055,-25688,13124,13130,19180,5825,-1006,-24922,-32732,15017,-24582,2447,-4891,9646,23997,-12713,20988,1886,18869,-29333,10531,1863,-30957,-1589,20027,-17192,-26586,-11684,-15829,-18213,-7972,4980,-27240,-7755,26547,-8118,-25460,24448,-17924,-27710,15497,14154,-25412,17296,-7509,-4047,20629,18549,-15773,9960,-15838,-21588,22299,27023,26369,-28093,23909,29779,-13965,15277,-30690,18515,-4669,9543,24746,3590,22596,-16661,-14049,-735,22675,-21798,-27337,17393,-23933,-25474,29289,20446,-7923,-8383,17407,28398,-22073,-1694,2969,-24850,-23622,-23880,-13794,9032,-13795,-740,30164,-11603,-2239,28698,-16086,14566,13111,14597,-17748,-2673,17543,-9722,-6448,15008,-30232,2025,10979,25506,14352,-21468,20127,10762,-10029,-6185,-29284,-1927,18802,-28193,-13286,-19739,5631,20164,-24687,32611,-8888,25221,14058,27856,-6635,-1044,9060,-20516,8356,-18720,-8041,6715,-32657,-20210,-25376,-5508,17495,18364,6381,-23065,-24052,1647,14660,16141,10012,16560,11545,10972,-13179,-4873,20567,24728,-8686,-10453,-27750,12467,22786,-11868,-30246,24948,10025,-14484,-19141,-32355,32408,-16599,-12050,1373,-12960,-14618,7326,-17581,-110,29507,8277,-25779,23336,-2132,15201,-5902,3544,-11787,-58,19833,16612,-2662,-12109,-25991,-21170,-19812,16679,-25602,4815,22604,13810,-26422,32346,-19770,16698,25167,-19876,1579,23149,11655,11670,-10197,30403,-21790,15057,-3285,8141,15396,29547,-6335,8782,-9606,-32319,-22773,2205,8036,24046,25169,8770,23073,6383,-6847,28526,-32469,-4635,16280,23081,-6367,10317,-2615,25586,-21607,31442,28486,-3124,13485,-25062,-3188,-912,-5004,-1194,24703,-24840,-24970,-12244,32115,-299,8990,16986,-12746,27434,-27742,-1839,-20594,-16757,25316,24230,-1756,28941,14402,8530,19247,24913,16920,12157,10264,-1793,16331,-9796,-7880,-26380,-27987,-14649,-6243,86,31080,6602,25862,-24911,20496,-17062,10006,27673,-26812,-7965,-29250,-12999,15511,988,-17504,-14957,-1479,18267,-7303,-13328,5556,12933,-14066,-29357,-7641,-32549,-30596,5947,30037,-32108,29593,-17109,9711,-29897,-29130,-11293,-4725,27011,-1981,10182,-14379,14760,-15738,-4692,29803,21717,17629,-10962,11840,-4604,-17366,-11248,1313,995,6943,-2795,-17641,-20078,14545,24148,24137,14625,2344,-1397,-16320,-16375,32754,9466,30861,-79,9500,6895,-23948,-30437,-1397,-10199,-20112,-18540,32468,15926,19042,1487,-15575,-18303,-31054,16950,14160,21234,21637,-3906,-931,-22668,3351,12480,31251,7679,-9230,-21060,13827,-27706,-27230,-10250,30352,-27807,10272,30478,12104,-27747,23393,-30664,-12869,-9203,-7177,22864,-6255,-443,-16750,-20335,-24166,20067,-31398,-21300,-32261,-30397,26800,5456,-22923,24111,16010,17050,-23356,-17124,6646,-13158,20963,-18483,11621,9591,24781,-20225,-7764,-18477,-13884,8991,17645,6833,9252,-23045,-21141,-28778,8446,-22827,1246,-29333,-987,-22310,26763,9008,17935,-19542,-984,-6438,-28776,-32398,-19710,-1403,15886,-2825,28889,-8662,-5004,25536,23236,-25084,25918,18183,-14794,-587,31367,-30122,4851,29165,-4430,13830,23752,-24082,8942,-15464,-31475,-23305,-2768,29941,8599,17418,-3022,5007,-7947,16669,7071,-28726,323,7123,-8367,30672,25580,12430,-23711,-6872,15130,17325,-17967,1127,31567,4165,28924,-6344,-8156,12917,12318,31630,-689,-20407,26765,2763,679,-2107,-11966,1978,29969,-708,-2851,-10960,29778,2531,-16903,-10121,-9287,-26699,22798,12345,11814,10073,11461,-12265,-1198,16084,-24645,-3135,-11312,19124,-10520,-29067,21421,27739,-10076,7046,12140,26327,-17506,-18681,11411,7146,-23294,-15605,23011,-13866,-16740,15591,956,-3546,25840,29008,-19357,-3823,29692,-15140,19979,7043,-8182,-27814,-16246,29464,-21635,-26263,-29224,15759,-15823,10267,-10164,-9398,-19993,-12131,18176,-121,24507,12992,-29745,10006,32137,-1221,-11383,-427,-1595,30068,-25566,22854,1310,17658,6444,-17607,1068,-3182,25199,27478,14023,-18002,-19079,13229,30394,5884,-5221,8897,22993,-23220,-6850,-6577,28210,14811,-12806,952,11697,11680,-27860,-4086,20223,29694,22931,-21411,22619,-16158,-17757,29138,11661,-4337,18266,-11837,-20819,5040,30119,-9981,-20757,18072,4741,29144,11136,-18166,17538,-7444,-26541,17824,16436,-3067,-11764,-7033,24954,-20319,-1244,-6182,-13346,3732,-1734,-29101,-23419,19966,-26962,-10503,-20480,-31398,17808,18043,17534,21552,-8699,-14112,23618,-11770,8589,8938,-28406,10159,-23118,-16979,30926,-20584,21073,-29766,-31712,23474,26607,-30603,-11175,4945,-11404,25831,-18378,31439,-20194,17039,9177,-28121,24432,8378,28965,19792,-30834,21811,-7886,11310,22142,-9188,26309,18310,13476,6854,7186,-20546,30477,28319,267,10695,29125,23417,5874,8690,-32736,16259,-19531,-2541,15125,13103,-21381,6752,-12740,-17245,3291,3894,-2280,-22814,25375,4267,-11053,24941,11572,-6959,-23412,27657,16545,2420,-7259,5876,17564,16464,28472,-18404,16983,-4021,-18893,-27388,16562,1004,20307,27001,-23458,-27515,-168,24862,1022,32111,13787,3854,10108,-25432,-19515,5894,19932,-10956,-6824,14400,21996,-10615,14171,14804,13795,2624,16717,30345,8768,12843,2082,-18912,-16775,25764,-7408,12886,-24820,29209,-2756,-13188,-4897,24899,23301,-22176,15315,-7001,-8195,-24859,27335,18947,22607,-28926,9020,705,-4350,-28482,10460,31475,8508,27158,14573,24493,30656,10632,-3500,8915,-13868,-23175,-22871,16949,21589,32096,-16824,-11525,-32523,23985,11090,-30127,-12498,-2386,-5508,-27952,-27840,6792,-29126,12619,-27960,10977,-13085,18185,-8746,-26469,7172,-31012,-10923,-9366,-20449,28670,32037,-18091,20024,3388,-24224,25418,10407,-14354,31993,-20689,-28237,27532,-31929,-12919,-31855,27281,-16528,4108,27957,-14167,-292,25530,-15525,-21674,-11892,-4147,-21416,-6740,12624,-1563,21873,7543,15969,24491,-17992,20521,-9719,7323,-6255,1050,-13178,-24080,-2927,-15268,8800,12196,7152,-15906,22951,17153,-12299,3889,-24467,-17716,4913,17682,16671,-11699,-8978,26471,-13169,-21780,3223,19770,-23373,9595,-23032,-17147,-24285,22484,-9094,-27368,20280,-31797,15839,31701,-28465,24581,2309,8386,-1160,-17956,5478,-30166,-5268,5910,-10062,25878,16850,-374,-8459,5864,-22280,-20978,-11129,928,-17215,25035,21568,-21112,10466,10269,-30121,9494,29400,30411,-4528,-12927,-1302,31313,6807,-30478,18156,-15285,-14938,6473,-23961,10917,5153,-16916,7836,7752,29192,21325,-1685,9670,24849,5772,3704,-15119,8739,31849,13328,-31422,-22787,19183,667,-3352,20635,-1934,-15497,31730,-21759,27665,-7283,-13740,-23248,2300,-24084 diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv15.tflite b/tensorflow/lite/micro/integration_tests/seanet/conv/conv15.tflite new file mode 100644 index 0000000..266eb78 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/conv/conv15.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv15_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/conv/conv15_golden_int16.csv new file mode 100644 index 0000000..28be133 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/conv/conv15_golden_int16.csv @@ -0,0 +1 @@ +-12636,-17006,19007,-6858,2291,-514,22685,12521,-7258,-6109,-22115,10767,-5764,5764,-5145,32767,-8303,32767,20231,-7774,-6943,7328,19813,14635,6398,2817,-11425,18517,-2059,15801,29079,-12816,-10988,20126,4705,-12238,-4463,11532,-4813,-28295,30032,14099,-4625,6681,-3222,-3945,-4032,-25001,-3965,25500,25104,-22436,3273,-16132,-16343,-3897,4791,-21997,1888,18149,-14625,-8664,-21776,4944,12374,11413,-16725,-16425,24581,22072,28539,-12414,-15516,-4432,-8063,3393,3828,16446,32767,-10231,-8000,205,27789,-10044,-13173,16789,-19424,-23924,1442,-8838,-28142,-19233,32767,10688,-28986,10713,23022,18887,5817,-1641,-32768,14962,22545,32767,22732,12173,10645,-14639,6475,-12574,32767,-24786,-22923,32767,-6062,-17686,32767,-31605,-3560,-5336,3775,22479,32767,-15067,-32768,1528,22156,3681,4884,-8794,-14920,-31154,10131,2719,-7219,23598,12867,-16916,-7726,5177,-2125,6681,-19107,10449,3711,7245,3864,-338,2371,13575,-22393,6117,6015,290,4861,32767,-31272,3749,22180,-9487,-18044,4014,13383,-22138,-6097,-13421,1595,32767,-27215,25954,12231,-5252,-4151,-32768,-23269,-19531,-28700,2877,4382,218,3267,-8570,6912,-32768,-1463,-18814,-3519,-13178,-22348,21180,-32768,-10113,15355,-13331,-28398,1221,5613,5219,31092,14791,-32768,32277,13015,10999,-10162,30870,21209,4925,-14499,25388,23353,-3357,-6688,9036,12537,12438,-1039,19503,-17993,20663,6870,12578,-589,13048,-28759,1258,-9987,-1257,5789,-17576,-3307,9068,-11314,-23059,-5483,32767,11809,7979,-21306,32767,25541,-32768,32767,6381,-32607,32767,32767,-11316,-3522,9201,-29322,-10126,32767,5936,-10554,-15391,19101,-8196,7215,8922,18665,24127,-18893,10638,-25610,31498,-7305,24542,-1597,-32768,32767,-9223,18304,32767,-32768,-11227,22469,6545,3298,22363,-4709,21983,4929,-1621,-32768,-11251,-3334,6535,866,-8261,5243,8607,705,-22043,1186,4298,32767,2951,-22994,4405,-5883,5623,-32768,19854,3211,-5819,-24099,-6728,-6073,-24000,30720,19372,8716,-5855,-2489,197,32767,4024,30558,-30434 diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv15_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/conv/conv15_input0_int16.csv new file mode 100644 index 0000000..302fb48 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/conv/conv15_input0_int16.csv @@ -0,0 +1 @@ +29914,11868,19688,21744,32565,-4708,2431,-5618,-8879,12690,-4845,12975,-11490,-12195,27451,-5791,-31090,-24091,9453,-10736,13102,-26036,-14200,17434,-1918,3491,-23723,-18974,23239,4544,12648,-27695,28697,4977,-19096,-6192,22929,4035,12097,-30233,-13651,-24535,4285,-15076,-17430,-23168,-6647,24054,-26757,-9412,19814,-251,7896,-1220,-6312,-21659,14607,-12409,29667,18559,-17227,-24930,-13352,5876,29793,-214,1214,-4249,-20640,-1007,-27714,-10452,1799,-2244,5846,-8802,-17725,18789,12572,22171,8703,14863,227,-18599,19049,-18426,9051,-31418,-3729,-16573,18786,23099,-2676,-32203,21850,20650,-20078,-16205,-27792,13852,27427,-3351,-21701,14219,19228,-6144,23655,17090,30087,-6920,26153,-6772,-5251,-15149,-119,5328,8762,4492,-4838,21142,-19688,166,7694,-4739,-18159,22481,-8088,-25448,514,-15077,10132,-1890,24122,4503,-31907,2162,-18075,23467,168,-11870,-2941,22600,-14509,-28447,31873,32052,26340,6837,29390,23853,-17597,-8295,32427,-25176,-31226,22703,-18963,1896,-27296,-638,9367,-16180,-24803,3854,21162,-21164,10045,-2241,-25029,19719,12290,2943,-27799,26615,12086,-1045,3214,-3596,-135,7034,2101,7164,20672,1020,-10828,26121,-8345,8955,-22504,-10610,-32757,-28452,-7638,-8761,12188,-9448,-23631,12477,-8884,31682,-6666,-14180,31808,12020,-28271,4510,31280,-326,-22735,32705,-10931,-1713,-9937,21237,9687,17640,-26571,-24361,29420,7586,21359,-17620,-13395,-20041,-17464,9399,15013,15077,30947,-17684,-10930,-13045,27417,17526,-16489,12782,4303,-12078,27175,23032,10090,-25585,2838,-20467,-5230,28351,15214,-27674,-20870,-25624,8347,-27004,30589,455,6633,-21016,26041,-9807,2724,28061,-11023,2963,-11462,26011,-32547,15194,31452,-13049,28782,20008,5917,3865,12811,-23852,-1309,-32548,21337,6375,24075,2090,-2170,-13502,-1639,-9202,-10225,-1972,-11561,-30163,25571,2585,23213,454,-4349,-20742,29341,11169,-16518,-18003,-9961,-6686,-15275,23671,14515,3738,24226,-19904,7016,-20261,-25692,-8153,-29303,-7205,22591,23105,-15715,2545,3631,5251,-6812,32150,9725,24058,12425,-1505,-19508,-298,8809,28831,11232,-433,-31045,13253,16604,-19556,-1760,22524,17262,-16717,16116,23580,1209,-9785,-22125,11187,28790,1276,20702,14519,-27805,-9070,-13019,9249,30713,-15587,-8798,18251,-10600,25820,5188,-24306,25313,12218,11295,-32606,12506,-10156,-9709,14346,-10061,-21201,-28777,-29655,17007,-2471,-30376,16082,28836,-25806,-2648,-22921,20492,29072,11137,28419,-7301,-25373,-17157,-1531,30190,6697,-23035,-21548,29255,22943,6072,20530,8685,27276,-22252,25008,-15285,-17378,12395,13090,32170,12249,-7507,27129,3727,-23192,-3120,-13548,-19776,-26616,-21427,31836,3747,-4340,-15203,16811,7636,22690,6524,-5897,-19585,-19086,-13258,-3841,11469,-13843,473,26325,-24286,-14711,-32265,27182,26089,27415,13465,26824,14336,-5131,-13992,-7704,-31472,-21692,31021,2044,-7130,4257,27757,-4219,12291,-6501,-14782,550,-3962,18034,-3391,-31588,14662,-24328,32157,16910,11891,-9895,-31074,16380,-13329,-1738,-16696,23156,29993,31955,16055,4763,-25728,26976,-31817,19994,31515,-30983,-15337,-29632,-30767,-27576,8259,-16107,-24391,-20077,453,-7815,7045,11781,-31657,-20454,-2058,8099,2078,-5953,6778,729,-30362,7535,21502,29131,12989,22319,-30521,-16002,-15243,20782,9956,-1348,29622,-9073,21138,17903,23198,-20357,-13,-20864,27453,-14723,9111,13131,-25238,28981,3039,25309,-800,10738,18953,-1832,-4544,12735,7861,-4033,-29751,-1999,-31890,5039,21846,-99,-26837,22611,12166,-14042,-9487,-11251,26889,16321,5290,-1059,23769,-32367,-18278,-18300,-16935,457,15120,-31521,4895,21276,19277,-3223,-30516,-28877,-21677,-4422,-9530,11546,-13344,6767,15477,10878,-24138,14939,25203,9217,-30434,-676,-11953,12959,-19441,-10056,-26215,10426,-28252,-28674,-16141,-677,12132,-17596,-9319,8726,-18440,-10073,18065,-23325,-29685,-16447,-4338,10732,24295,22392,32124,-12515,27622,2888,10286,-26083,-3033,-32080,27571,11014,28858,-9869,-10300,9290,-1682,28707,-13752,-3953,4324,-31414,19398,-14997,-31605,24935,-7299,-28605,7737,19963,25025,12286,-11734,12488,4158,-88 diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv16.tflite b/tensorflow/lite/micro/integration_tests/seanet/conv/conv16.tflite new file mode 100644 index 0000000..29f6380 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/conv/conv16.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv16_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/conv/conv16_golden_int16.csv new file mode 100644 index 0000000..dd843cd --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/conv/conv16_golden_int16.csv @@ -0,0 +1 @@ +-32768,-32768,32767,-29933,-21216,32767,16766,25653,-32768,-27758,-112,-19631,-6702,21490,-10817,-19446,-32768,32767,16238,-32768,-32768,-32768,32767,-29830,-26863,32767,-28825,32767,-4820,-24464,-32768,-32768,-32768,32767,-16126,11655,14117,32767,32767,-32768,-32768,-32768,1077,-9395,-12342,-32768,-22430,-32768,32767,32767,651,-26738,5752,-32768,-1244,-13453,32767,20030,-32768,-32768,7438,-32768,-32768,-8655,-32768,32767,-32768,32767,-23460,27190,32767,-32768,-16214,-24328,32767,11261,15365,32767,23964,32767,-32768,-32768,9528,-25022,-11633,32767,-32768,32767,13506,-32768,20726,32767,-6293,-32768,-30832,-12985,32767,32767,32767,32767,32767,-5129,32767,32767,32767,32767,19756,32767,32767,32767,32767,-22655,32767,32767,-32768,22145,32767,32767,16363,31709,32767,32767,32767,32767,32767,32767,32767,-15814,32767,7400,3741,32767,32767,15252,32767,-4322,1455,16379,32535,15576,-625,14378,-27307,32767,6736,-32768,32767,32767,26205,32767,-5311,32767,20313,32767,-19379,32767,32767,32767,32767,32767,32767,32767,19170,-32768,32767,15157,32767,6273,32767,2159,29775,18313,32767,32767,29507,32767,32767,32767,32767,32767,32767,-21022,32767,28810,32767,5496,32767,32767,32767,32767,32767,32767,-20616,21542,32767,-32768,32767,-9144,-32768,32767,830,-18033,-32768,-699,422,-32768,-8242,-32768,4728,-32768,32767,7843,32767,32767,32767,-32768,18097,10792,-6739,-862,32767,-32768,-32768,-14534,32767,-32768,-8854,20586,32767,11769,32767,6426,27417,-32768,-5924,-12985,-23322,4266,-13835,-14049,-2737,32767,32767,-20040,32767,-32768,-29189,-24418,-15518,696,-15106,-32768,-32768,32767,32767,17973,32767,-32443,-26347,-32768,-11403,32767,32767,32767,32767,-8365,32767,19282,20787,-7381,-4726,32767,-21720,30043,-32768,16063,32767,-32768,12317,32767,32767,32767,-32768,23128,-11224,32767,32767,-32768,32767,-32768,32767,32767,7799,-8031,32767,-3860,-25484,32767,14998,32767,-5165,7620,32767,32767,29382,32767,27405,32767,8000,32767,-18108,-32768,-18158,32767,-32768,32767,16083,32767,32767,-7413,32767,19509,-7097,10407,32767,-16456,32767,-7708,32767,2823,-4068,32767,-23479,4167,11996,17071,-32768,32767,1988,32767,12741,-32768,32767,27178,5064,-32768,32767,-32768,-32768,20038,-32768,32767,-2313,-32768,10502,8603,-32768,-32768,-32768,-32768,-32768,2478,-1389,-12089,-23296,7380,-3105,32767,-23666,-32768,13948,-32768,-32768,-32768,-28938,-19874,-32768,-16424,32767,32767,-9567,-32768,-16974,-32768,-32768,-32768,32767,18999,18668,-32768,17547,-7985,5435,32767,16163,20047,8229,32767,-13376,-31770,32767,-32768,-25630,19157,32767,32767,-22695,-32768,-4706,32767,32767,-32768,5096,32767,32767,9937,4813,32767,17682,26797,32767,32767,-32768,31233,-32768,22227,32767,-4765,10403,4821,-16065,1261,-32768,32767,22444,32767,-32768,-32768,32767,-5142,16877,9995,-12134,-32768,30724,32767,-32768,-2926,15289,-32768,32767,-10601,-24131,-32768,32767,32767,5576,32767,32767,32767,-31193,17249,32767,-24793,30332,-32768,-32768,-32768,-32768,32767,6140,32767,-32125,32767,32767,27362,32767,32767,32767,32767,-14319,-32768,32767,-8371,-15383,5404,-32768,32767,-32768,32767,-15043,-32768,-9970,-32768,-25284,-32768,-32768,32767,-32768,19649,-32768,32767,-32768,25004,-2631,-31105,-32768,32767,-7965,2273,-32768,-24161,-32768,824,-32768,-32768,-32768,-26707,-32768,-32768,-21624,-32768,20505,-32768,-10667,31180,30216,-32768,-32768,-32768,-32768,-32768,-32768,-23022,-32768,-32768,-9049,-32768,32767,32767,-32768,-32768,-32768,32767,32767,-32768,-31313,-32768,-32768,17683,-32768,-32768,-32707,-32768,-32768,-32768,-32768,-32768,-28604,-32768,-32768,-32768,-32768,-32768,-32768,-32768,-32768,-32768,-3399,-32768,22878,-32768,-28043,-32768,-32768,-32768,-22157,-32768,-32768,-32768,32767,-32768,32767,-32768,-32768,-32768,-32768,-16584,-13260,32767,-10788,-9230,-32768,-14382,-28407,-32768,-32768,19977,-32768,-21943,-26150,-32768,32767,-32768,-32768,-29039,28380,-28868,-6836,-18688,26177,-6100,32767,12848,-32768,-29982,32767,-32768,25948,1108,-12193,25082,-23894,13693,6989,-32768,-24901,-32768,-32768,32767,-24847,-1927,-32768,-32768,14790,-32768,-22272,-28701,-32768,-32768,20599,32767,-32768,29070,-32768,5750,32767,-32768,-32768,-21315,-15185,-9904,32767,9386,27566,32767,32767,32767,-17497,32767,-14836,11251,32608,-16855,14374,-32768,-32768,-25379,26446,32767,32767,32767,32767,-32768,32767,32767,-4765,28229,32767,-3273,-32768,18933,30306,24095,-12881,32767,27550,32767,31370,-19982,32767,-2399,31393,-31220,32571,32767,-21624,32767,32767,32767,32767,-32768,32767,32767,32767,-32768,-24407,28742,-32768,29006,6137,32767,31895,32767,32767,15108,6534,-17268,-13159,32767,32767,-32768,32767,-21819,28658,2385,-32768,-32768,-32768,16793,27425,-8115,32767,-32768,-32768,-32768,4954,32767,-32768,-32768,32767,32767,-32768,32767,-32768,-15208,-32768,-32378,-32033,-20084,28324,-32768,-32768,-32768,32767,-32768,-32768,-32768,-31687,-32768,-8201,-28943,32767,-32768,29916,32767,-32768,-32768,-27092,-24733,-1107,2121,-1078,-31482,-32768,32767,-18936,-17486,-10511,-32768,-20170,-2178,17056,14188,-32768,-32768,-32768,-32768,32767,-11010,-32768,-1037,-17972,-26477,32767,-32768,10317,-22722,-32768,32767,30437,-16356,-25085,17604,32767,-13,32767,32767,32767,15766,25747,-24315,25863,32767,-6044,-32768,-5424,-32768,-32768,19406,-32768,-32768,-10944,-32768,32767,27062,-32768,32767,32767,-11512,-32768,-32768,-10223,15241,18536,-6614,32767,27169,32767,32471,-31924,-5630,32767,15715,26849,31452,-8784,32767,32767,32767,7162,-6862,-800,32767,7870,22850,23674,-1562,32767,32767,32767,32767,32767,32767,442,32767,-32768,16382,32767,32767,29884,-13566,-29821,140,32767,6955,32767,8183,-10773,-32768,32767,32767,11683,-32768,-10335,-32768,27287,-28243,29137,-17333,6079,1931,22317,-8566,-32768,-32768,24023,-32768,5867,-32305,-32768,-29853,32767,32767,-24642,32767,-12544,-2411,-9608,17948,-3833,5977,-32768,-32768,-12621,-32768,-32768,12965,32767,-32768,-14311,20812,32767,32767,30494,-32768,32767,17526,32767,29786,10907,-1063,-32768,-15830,30,27881,-32768,19969,-16183,-17501,26249,26317,32767,24910,-32768,32767,32767,-32768,-32768,-11661,-184,15086,17415,-32768,-25836,32767,-23900,-9985,-32768,6772,15354,32767,-16186,25026,2014,-32768,-32768,16707,24540,-7522,-32768,-11201,4443,-32768,-30417,-24141,-32768,31431,32767,-32768,-32768,32767,9310,-32768,-23973,24684,-4103,-32768,7760,-32768,-32768,515,-32768,28993,-32768,-5047,-3935,-7599,32767,-32768,-15849,32767,17907,-32768,5341,-32768,-29102,-32768,-32768,32767,12440,-19253,-22320,5874,7764,-30391,4519,-32768,23211,-32768,-32768,32767,-32768,-32768,-32768,286,-20447,4208,-32768,-22453,32767,-32768,-32768,-3503,-26161,28385,2696,-32768,-8559,32767,-4860,32767,32767,-8454,32767,28837,32767,32767,-9365,17645,852,-32768,-23851,-19569,32767,32767,-23993,-14599,32767,16278,30490,13584,32767,-23901,32645,23826,11914,31416,19546,32767,-1266,32767,32767,32767,6155,24707,32767,32767,32767,32767,-19966,32767,32767,32767,-6496,32767,32767,32767,32767,32767,32767,26338,32642,6190,-32768,32767,32767,32767,32767,32767,8292,13761,4835,-32768,32767,32767,-32768,-32768,-5578,32767,-32768,-952,32767,-20968,32767,32767,30374,32767,32767,27384,32767,26946,-8933,4597,25643,-18376,7248,32767,1287,-597,32767,8266,-2563,10384,32767,-15295,-32768,21552,-32768,32767,32767,32767,32767,31167,24016,-29242,32767,13045,-13840,32767,-4427,-32768,32767,6763,32767,1762,-32768,26655,-718,32458,32767,27073,-32768,-8017,23010,32767,32767,32767,32767,-23453,-32768,32767,18295,2981,-4820,32767,32767,32767,-32768,-32768,19370,32767,3648,-31371,32767,32767,28082,19404,32767,-25628,32767,12226,32767,32767,791,16477,32767,-3406,19148,-15335,32767,32767,-645,-4034,32767,-32768,-32768,-6153,32767,22131,8482,25364,-20252,-32768,32767,32767,-14328,25433,32767,-919,32767,-12753,32767,-1558,-30736,26978,-2407,27261,-32768,-8090,19189,-20310,32767,32767,-5048,32767,-24817,32767,-32768,32767,7366,-2894,32767,12716,32767,32767,32767,4078,-32768,32767,32767,22998,-13633,-30184,11816,2741,32767,31394,32767,27443,30079,18645,32767,-28461,32767,32767,-32768,-32768,26714,32767,-32768,32767,-13139,1654,32767,-32768,16978,32767,-9008,32767 diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv16_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/conv/conv16_input0_int16.csv new file mode 100644 index 0000000..2d076a7 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/conv/conv16_input0_int16.csv @@ -0,0 +1 @@ +-28,-27266,7543,31441,-21938,22918,-25498,-22986,-10877,24749,31687,12072,-24645,-12706,23341,-6390,-31638,-19771,-31582,-23734,20073,-25718,1333,30007,-31722,-1341,-29400,-1305,-22170,2213,-28089,19510,11097,-28112,21958,-2844,8094,25390,12831,32680,-31254,-851,6261,-10893,18729,-1455,3829,21264,26558,18221,1620,29692,15245,-7895,26158,12408,27298,-25901,22119,18921,29160,-31599,-22509,-9810,-17407,15452,-478,24203,-12332,26422,578,-21331,18040,19310,5527,27973,25414,14052,14284,12712,-12016,30566,-16585,23149,-11640,-1948,25781,2159,-23376,-28352,16495,-13265,31890,18534,1818,24897,12293,-9007,-18397,2338,18630,10459,17691,1462,15062,-21794,-28836,24806,-8462,-32289,-26572,-4856,-20211,26233,-7196,-24239,-23982,-7663,-31638,23199,-30812,-32515,-17446,-27243,-22633,1725,2983,-13004,21206,-22508,9876,12558,6146,-3874,14920,-599,17076,10171,-9105,15155,16301,-16496,-29049,-30482,4965,-18562,15568,-2537,31242,-4590,-4005,-5856,-31719,26074,-16051,-10254,23831,-11930,-14941,-16737,13784,10393,27493,-31719,22683,23075,-25376,-10299,-4735,-32314,-22316,-5347,-24292,-16157,-27156,1430,-26554,-30528,21149,11608,4997,821,29328,18237,7734,-17538,-24142,-21942,-24716,30638,27288,10327,-30040,-5349,8793,10300,16712,-19162,-29055,-8343,18200,18122,20075,-18933,-22924,5625,-3427,-15593,24400,-4971,9414,8496,-28837,-20030,-3045,-31141,-31844,20830,11109,1199,12982,-21442,2243,3468,-11884,-23223,-9056,28058,-14725,30260,25480,-12272,-13601,-22044,2238,-7751,29877,-8668,-32278,9904,-31882,3241,15910,-24044,-3500,31668,32633,-10091,6634,13227,21887,26388,-14801,-20424,6318,-32258,13092,25071,-10979,-23510,-10244,6698,1246,-4185,6651,31861,-4260,2610,-13636,28579,2235,31083,28124,20106,-26829,17384,-10148,-15165,-21876,29395,15127,-32136,-18042,-24148,25732,351,22571,28605,-3196,9156,-23998,649,-27877,-30799,280,22816,28860,26328,-32589,-1771,-31193,13654,-24485,-21032,-19978,-8457,8935,-22925,-27101,-1109,-11048,10343,-21721,-15583,10861,31490,-13550,-28573,26682,838,-32245,2200,-7971,-9828,4023,-25867,-17985,-30961,-218,-30366,31430,28923,-21808,20665,7179,2890,-13235,-9350,-26622,17495,-15392,16902,-25591,-8990,-30063,-9774,-20219,3498,29369,29792,11358,10037,-27406,-6686,7754,30510,24225,14824,-8375,5002,3959,20404,23583,15436,-11470,18348,-14097,15661,-18491,7376,4336,20649,8394,-31689,31004,13581,851,-5910,-16994,-11146,-27625,-30522,32479,13778,-14761,-4771,28161,-2562,-27024,21512,-22827,-18545,-6527,-1492,-21836,3551,6706,-15647,26817,-14825,-1402,-8985,26809,-3934,-2615,-13786,18971,26421,14645,16519,-13135,16152,28509,-20272,-27042,-23931,25754,17376,-32090,2855,9763,-19496,30063,-29097,25235,30630,-29202,14869,4875,21123,1945,4635,27105,-16559,-6180,291,-18913,-24346,-4452,16621,-21310,8284,19163,14606,1079,17084,-22742,-29948,-2298,-2753,-27206,-15428,8310,19901,-4257,-13969,-14691,-31978,32465,6251,26268,30256,29276,-29164,4885,-1819,16538,10077,2222,-103,29806,3342,-17950,22995,-479,-13014,-22888,5787,-309,-14776,-4412,-16834,22001,13674,10612,-27089,-16412,26806,30721,5122,2831,-10269,-15457,6140,-6444,-10097,-15125,28507,-1890,-28378,-15491,11676,23269,-3853,-16252,-20270,-3698,-25378,12224,28337,7656,11998,1707,24660,-10923,-16468,32716,10840,-21069,32034,14722,-8665,32580,-10077,16266,-24270,15072,-1381,-6806,24635,1954,7467,30202,30107,5980,-20771,-8995,-16898,-4468,13436,-19129,19042,-25918,22781,-29772,32417,20202,-20020,-28459,-31499,-15178,-8383,-31016,11979,-8102,9528,14066,30067,9810,22776,30463,16399,544,-22001,-13753,-20047,29316,9653,16501,-13918,-11135,-20198,9899,8760,-806,633,-22600,5001,1545,21110,21844,-4367,-12957,28614,-13019,-19767,-19687,30017,-1638,-8161,-4452,-1054,3570,26836,32335,-22877,7554,12966,-25213,23682,3559,31754,6117,-12386,32729,-31744,24210,8520,5524,-2534,4944,29404,10709,-30310,-28035,-19390,21419,373,-18218,-11259,2215,-30947,24607,-8877,16704,-25670,29990,-25769,-9710,20336,26559,8947,-18881,5037,25841,5258,-2975,-23616,10768,-11704,22117,-4854,-24200,-12422,-429,4362,1826,32638,-7170,-30095,-32499,1537,4492,8336,-14716,-23818,-20404,-13096,9833,-10363,8063,-20833,-29021,6277,-16748,16258,13790,27348,13019,-3659,16566,-890,29994,-3720,-25594,4663,-2953,21392,-27311,17357,-2454,-2863,-27479,-5265,18591,-28130,-18907,26067,25262,-19531,7725,-25449,3089,620,12734,3575,-17867,-22142,-13060,-16591,25977,628,6194,-27504,-28944,22242,-27945,18639,-16872,18327,12263,-23334,-21991,-10604,-26630,-3520,-7688,-28020,18249,26121,-1963,-10643,26368,20934,-20249,18752,-9447,22490,827,-14230,32348,8200,-14354,20422,-20420,-13220,29523,-24646,-21728,7283,5603,-9444,-14174,-32155,15966,-12705,21499,17718,-15967,15888,-20720,3796,-28827,-11528,14020,30929,-6114,-23136,27721,20398,28908,32290,-27390,26690,1252,10675,-15446,-21410,5534,11122,-12206,14750,-28141,27655,-19959,-19134,-6703,-6095,27456,-25306,10050,23395,19870,17475,8260,-28995,9586,-23422,23216,18137,-18900,-30282,-19216,7177,24505,11824,28257,31774,18295,-10243,24636,15583,-25666,22849,20881,-23695,-13690,8848,-1981,24790,-4744,15941,10062,-1139,-20635,-16514,-17588,8508,-4528,-20953,31336,-4401,-6701,8620,-9526,622,2557,16290,23630,-17695,-32355,-23784,-17623,-22602,1541,10523,-3684,-32570,-19595,-30243,4708,-14416,15178,-12494,12001,-21558,-10570,-12796,-1572,-29069,23668,-25692,31365,-6357,1179,-24394,28269,-27772,-29385,-10525,-6062,-29422,-22866,-29630,21619,31894,7592,22283,-15352,15936,-26514,14848,-13693,-18260,-10246,16978,-16654,-25961,-4561,-11508,19448,22211,16423,-15913,20404,19372,-6917,17074,-4887,25282,30193,25154,17522,8174,-16235,-23570,-1022,-26891,-15625,-3869,25837,-15885,-816,31842,-32253,-1404,276,15233,-4437,28053,28479,-6435,9202,16764,-16474,12418,-27091,-15828,-11940,2974,6132,-28926,-19385,-8925,17214,-6833,12544,2425,-8205,17753,19895,18161,-6421,-30349,9097,4333,-7554,-574,24833,-25457,-20247,-26049,-30140,-31087,-822,15668,4709,-19065,-12732,-8684,-10342,16653,-2456,28988,13268,30099,14619,18293,22629,11049,25272,30949,21264,26028,17974,-342,15265,-32414,-29211,12802,-9057,-1876,22709,-31428,14366,-9597,810,28679,-25096,-6648,5794,-7605,-24236,-1006,31097,-504,-13412,25078,6812,11643,19858,-3257,23906,-13006,6460,14663,24353,-19530,28409,17366,-25091,23519,24597,25167,-30977,-31595,4343,-3640,32563,6140,1286,5949,-27367,30578,-27627,-15319,-21595,3607,11605,-11430,-15265,-8376,21998,-10996,23088,5426,6500,32510,32586,-13382,27485,-21891,-24208,-4479,22345,30927,4295,3471,-20138,-2270,11375,24390,-23058,24240,-29440,19396,14173,-6220,-4071,19096,21568,19246,2536,15277,-25822,-24317,16831,-22326,14868,-20391,19789,-8029,-30132,19560,-28538,10604,-12591,21085,24224,24109,24816,19573,1329,-14978,-22972,24817,-18862,5353,30163,7659,-24622,-27228,-17635,22780,-19473,-2059,21558,19394,15912,30435,3647,22811,-9877,31147,7228,-25101,15819,-25216,12633,18729,812,-20763,27120,5237,-23869,18813,5721,-9352,-23940,-29367,-771,-4063,-22194,-27657,-2787,-3327,-18898,11471,-648,-31834,-1504,-30458,21528,-26291,-30247,-3013,-32274,19691,-25498,4564,-20228,-12527,-19682,27289,-21023,-14072,-21584,11827,4354,-5353,-10725,-27822,24367,-22559,-15101,-24922,-15860,28690,3390,3760,16917,-17282,3555,-11530,-25062,-32228,7519,10483,-29372,-13140,-29808,-2929,-26354,-31303,-16301,604,-19967,12447,23964,4429,-31003,-7182,29091,-379,-10793,29853,-1113,5500,-17720,7470,-25065,-29643,-6270,25805,-514,19302,-15321,-21136,18547,14001,-18178,361,-7147,28875,11846,-15809,3191,2939,-11263,17519,1296,-4960,13046,-7697,6334,28223,-27627,-24853,30658,802,10826,-24923,30476,-26135,16850,7345,-23047,-32039,15593,9107,-2932,-32735,-3599,-28414,293,715,22672,-21817,-11341,10586,-9962,-29119,17902,21379,27222,-28239,-17278,-21525,22920,-24284,31215,29834,21536,-28501,-23436,2941,-20291,-13496,4924,19847,29424,-28410,16318,31758,-26443,-23125,14523,-23189,17198,-10795,-25180,-25404,23917,-18726,-14902,-2962,-21790,2573,-17672,-17188,-13481,10041,-8445,2159,-23475,18374,-31678,-22657,7177,18158,22665,-25457,-31678,5212,16639,-25252,-15238,31581,21565,-28785,-24735,-2964,-1152,28523,350,-30828,21362,14726,-28809,11551,-18324,-12018,6896,-13075,-32434,-19851,26975,-9987,-6625,29552,-3120,-32648,19150,-9724,9781,-25446,1409,-3282,-3435,30830,7193,-24731,-28866,-1947,-31010,-11889,-14553,5805,-11583,-19264,-22919,28827,4551,2034,28718,6235,-2050,-9095,9291,31301,19054,-15248,26532,-20330,25108,-5631,-17998,-8945,-29977,13512,-25147,-28630,-28547,12545,-6,5543,-26917,-23389,12062,19670,10307,-21352,20061,19938,-14240,23355,10007,7161,-11588,-25680,2405,-31874,-17573,28758,-27740,10689,3858,10236,5776,3741,-24642,-9275,-29192,-8987,-1922,22416,21343,-21740,-11319,10674,19927,-26316,2090,-1362,32240,-16800,-28550,-22193,20572,18325,-3646,-2467,-1487,-9380,-17663,13902,24081,6439,-5124,8949,-10387,-20086,25660,3146,-12353,-19905,14979,25057,31350,7365,14269,-9983,-21867,-4819,-1785,-8614,-5635,26254,20833,31719,-21441,-9817,-13757,-12633,11233,17785,-21870,567,-10659,1092,22469,792,24144,10058,27243,-10190,8350,30103,-21203,4050,31653,28821,-10480,12481,-19792,-12761,9954,-7547,-15004,12204,3896,13771,-12823,-23260,10274,-23530,21901,25204,-18574,-16651,-421,31559,-20269,-1559,-27783,-32195,-19450,27794,29447,-26975,5458,-5471,18207,-10091,-32618,18169,5258,11728,6088,13453,-12530,-28676,-12855,-10560,-30032,-20933,-3966,24692,-32235,17106,23785,10693,-18214,-753,-13572,17315,-31123,15664,-20441,-23530,-27231,17235,7910,-28618,9191,-21217,-30526,5947,-22463,26156,-21986,-24534,21215,10169,6151,13306,26688,-8369,-32250,-17335,-19707,3050,-19549,15167,4318,-8482,32535,8505,-4544,31155,-15498,11882,-22531,-19079,32750,-17461,-10290,-24135,-12901,-20026,8739,31545,-28551,29753,30469,-14736,5876,-26266,-7663,24399,-3208,24131,-7774,-26668,-20912,18851,-24515,8196,-21196,3119,-11041,-18902,-207,-20346,12128,-31848,-17732,9772,-32752,-5368,-13376,-17537,11159,-21404,11642,7202,-31603,6446,-6032,-27182,-29839,-9896,23684,-13375,23160,-16365,-15561,23592,13882,-11381,28685,-30571,-113,-11229,26070,30863,-5006,24821,-12644,137,-13543,-20075,388,25943,19028,-17841,-14802,23686,30086,-31176,-13597,-24521,24853,-21808,291,-21068,-11856,9442,24891,8328,-15424,29595,-581,-12228,-32643,-5835,27482,4998,21941,11013,16765,32530,10500,-13602,10822,15337,-7353,19581,-19726,23543,-9224,-22735,-5081,28621,5339,-25071,14009,8088,21879,-12337,-4519,-2563,-27587,22657,-22672,8478,-16118,-23978,-29354,19491,-17309,166,8798,19257,4768,-27028,-29843,-1380,-27322,19515,-18844,-6388,-24086,29446,-17332,-16790,-19425,-11530,32549,14066,11838,3911,-11570,-10292,-9833,-5356,-6409,-3474,-20224,31796,1892,981,-11632,-21652,1505,-12461,19647,-25124,701,-23047,-4550,12293,23495,4793,-12768,-30370,-18325,-4184,-32273,-8192,6226,19655,21596,29341,-25980,28415,-25809,-7741,-31831,-24025,17712,23697,3613,22561,-23252,20214,29794,-28006,11565,27567,21223,27589,28210,32063,-31073,13063,-9628,1383,7910,-24395,-29986,18225,-31405,21702,12159,25721,-29321,6050,30623,32556,-32180,11592,-3137,-8398,25861,-31309,14565,26581,30951,-868,-5135,12253,962,9126,-5516,-17563,-8705,22528,-20531,6041,3713,-22057,24031,12876,-7858,-20815,-2127,-7994,30466,17767,27172,-18283,-14296,-24566,-20942,31877,18936,-27192,3494,2614,-16365,-13756,-11715,2760,-8736,-25011,9500,-9996,-4355,-15098,-28252,32438,-30335,-31812,26491,-25521,-23037,-27847,-28908,-17858,-27690,11955,16184,23814,9176,-27491,-6213,-22546,-30770,6833,-20358,29620,4792,32030,-30849,30844,-9446,3449,-476,-628,-26476,1050,-7538,-26686,-11776,21305,-27094,5754,6735,12351,-11826,11031,-30919,17693,29217,-15541,1657,30260,13274,-1113,258,17018,-27390,-1613,21368,10635,7537,22410,-130,-12225,-18582,11070,-26587,-25408,13389,-5604,-22743,-17861,30246,-7054,-32745,26716,30513,8461,18254,-2615,3521,8289,-22475,-31462,-26316,-17937,-16780,27842,6887,-13860,15715,17835,-19360,-2797,23541,4022,-7059,3323,8152,-7708,18309,4991,-26081,24329,21441,4731,-9437,-4926,20814,11177,-16081,14202,32669,13914,-13798,-22506,20801,5679,13265,30449,25054,8990,11588,7114,3764,26716,24905,-12065,-7407,2831,-28701,7974,-18934,1161,24188,-15015,-25018,2165,9328,-5961,11507,24734,32651,-21672,14735,8722,26372,31520,-30402,4518,4473,10300,24201,15948,1287,-28943,13234,-17270,7264,18893,31780,-23521,-23392,25433,-10608,14155,-27809,29277,30875,-13223,6574,8895,29473,19760,30270,-18239,-16314,4346,11699,81,-16636,30739,27197,30801,-70,-25127,15241,22497,885,4136,25276,-12912,-1796,-23550,7215,-6879,27414,-26926,-23754,-20657,28236,9673,31970,12836,6839,-24292,-22655,31226,21803,27915,26090,-30206,-13302,-11732,13461,9551,18308,20135,-30738,2430,-19628,-17513,10525,13928,18858,-29952,27424,23564,-14369,31267,-11709,-20881,-23147,-27941,-18649,10592,-4730,29289,12431,-17776,-25706,-8980,-4556,-22275,16947,22399,-734,2737,-18644,-1271,-32758,-21526,-26228,20736,-18119,-4567,1485,-13748,31422,-4422,-18264,631,-604,22567,16136,-10885,-8579,21006,-13866,-4564,-26417,22852,15326,24200,-31033,-20306,-23690,3340,16625,-25599,-28188,-29843,14903,-21064,-23989,-27742,15725,-30963,17886,-4415,-25770,-32694,21677,24595,9155,6411,-18893,-1004,29290,9089,27630,14949,-16633,22523,-15206,8730,-15217,23980,-19008,7930,-6437,-28890,-31335,-24455,14701,14810,17610,21502,-11570,-15114,-28787,26169,288,-22355,24020,4604,4685,28012,19081,13488,-15874,-23243,30644,-23082,11315,12266,20264,-20235,-13124,1208,20532,-18484,7686,-10613,-29745,25505,24023,19351,-7856,-15800,-17930,24761,32201,18256,-26472,28922,18509,-9560,-14583,30292,24299,2496,14335,-18299,31736,-18787,-16223,20983,10181,29608,-30767,4603,-17321,-1172,28677,-21558,22419,23897,22159,-8811,195,-25905,22870,2447,-13943,-13842,12242,-12881,27968,-17337,-20856,23259,10818,-15734,-20173,4496,-12400,18323,-11237,-31829,12091,5463,3791,23489,21677,16019,-10840,-15404,-31933,-3040,-3501,-14917,20181,29193,3443,-14850,11550,-4815,-10319,-9113,-7863,-9087,28524,-9268,-7761,-7410,-28655,-32159,-1820,9252,2964,-29905,-5189,13889,8107,15898,15055,8949,19572,26649,-8691,-20898,-2743,24331,20305,-9165,13889,-18429,11385,15623,4003,12396,9210,-31938,-26422,-20632,21995,-14308,-10805,-1413,28043,-12275,-29212,31254,-8006,404,-29136,-18439,-9344,-30897,-26290,-8973,-32338,2884,14378,-3451,17423,8231,-27364,-29926,9307,23803,26223,14901,-8994,27858,-27160,-10081,-16731,19790,2505,-11324,23097,-17128,27243,14915,14513,8257,-21739,24631,-20138,14442,-23726,-4477,-28192,-16182,23215,21357,-11394,11946,-20034,28413,5561,-28376,-19697,24975,13469,-610,-12136,-10720,16142,-2596,-22407,29176,-31075,-6766,782,18113,-25094,-19924,-24830,2228,-1140,17469,-17262,-25670,17448,-9337,9027,28088,-5375,-30788,19398,19506,-24608,-6853,25866,-1775,23175,-29563,23624,-4443,-315,-5047,-23891,14883,-14992,-6858,1083,12809,19242,9492,4480,-25540,25047,-28126,23381,-3760,24625,17961,-24627,-8401,-19293,-4617,1191,-25945,-40,22357,14691,-28797,18830,-2447,-10717,-5542,30919,-15512,28166,24899,1650,-4819,-22653,9343,-3868,11487,16461,-9265,7946,-26642,4234,-16543,3247,1929,32383,5189,894,29410,31386,-17386,-18190,26120,26690,-22229,-15929,-8531,68,31083,-13913,-10199,22130,15997,-13676,-1839,-8501,30778,-23479,-21357,29504,-10980,-30157,-10340,-17058,24620,-21374,6334,-19186,-31414,-7414,26122,3014,2100,13672,-29384,14345,16438,-32640,-8882,-4353,-23649,-20914,-24904,-5999,-3289,-2776,-30565,-24634,-1501,-28969,9746,-19888,-11458,22311,1773,29462,10374,29397,24160,-26431,-20903,21387,22386,-5800,-11474,5590,15654,-9883,-2213,21499,298,30240,-15277,-25868,-27678,23154,2861,27914,-29787,-28320,-27578,28110,20415,-21863,9559,22541,27889,-18770,-30019,-8042,24499,-8003,28134,14191,-25124,19722,-1279,24715,13533,5264,-440,32161,31887,-25755,10792,4383,-26317,-1546,-8833,-28740,-21512,19510,18765,-19472,2090,14061,-13485,8801,-2827,23875,8523,21842,24291,14739,24023,-31729,-2947,610,-30673,24090,13538,-16576,-31712,-25543,4197,18666,12351,-22518,1129,-9503,29698,-31487,27707,17262,-28823,-16381,-15224,29640,-9805,5613,-28633,-28207,23678,-24648,-24667,14507,-31490,-24911,-2863,32377,-1130,-26573,-29293,30288,12322,-28981,-13726,31883,-3634,32572,-26959,5943,13206,24323,-28874,16583,-3335,-16911,-9493,-3830,1536,-4891,-31458,-15039,5644,-26077,29782,24718,-31372,-13018,18262,713,-29406,4405,-27521,-27491,19522,-9600,11228,26538,27659,-12972,-21805,-11077,16985,-18060,18469,21923,-29465,24223,-8516,27476,-21446,10887,-15167,1949,13198,-29057,13703,3219,31560,-3080,-24208,30799,15724,-1448,29133,24660,-2888,8444,-30748,599,-28766,28268,1729,10770,-19272,12883,32338,19952,11382,19777,18132,-23650,-20579,-30376,29843,17848,11369,-13852,13900,-13773,-27314,19493,-6803,-1729,-1138,200,-11406,-7345,10934,30105,-21314,-107,15082,-9976,9284,954,23805,-4704,17288,32515,3290,5986,-23815,6017,19883,-19866,-7491,20513,1598,-27932,24678,6426,-9669,30990,32200,12061,23921,-31119,-23477,11627,-15301,1491,28115,1884,-3898,-30514,1319,15606,2207,-32557,13565,14504,27197,8477,-5130,-20617,24978,8952,-22269,26291,-18006,-31938,1129,-31306,-25816,-91,6221,20366,13916,-28743,-8497,7623,-7444,-29979,31537,-12149,17573,-32470,-13381,2944,-28249,7438,7833,-1906,27199,3725,-6796,25004,-8573,25171,-12903,-11484,24024,30374,-10220,26438,-28033,-22657,-30162,-12507,25222,31981,15743,555,22461,-19530,-20744,-20132,-10761,-9745,4811,19896,16595,10665,-10539,715,-13522,22433,2852,14783,-1739,-15026,5401,-8810,29248,23696,16304,19437,-14886,-30286,-31865,1381,-16401,-16757,-12059,32133,4902,-12782,32113,-4593,6455,22077,20710,-30935,-32595,5226,-20328,-2995,-15291,-27379,-31523,7353,7039,18640,28889,8405,24975,-20863,-2848,-20745,3265,30220,-68,-16495,-5242,16419,18544,21215,6677,-17812,-26896,-19522,29434,-32209,3548,14442,-17619,-15926,15762,14220,12391,-726,-19464,5232,-17655,29631,-10077,-32106,9720,-12169,-15367,-13976,9735,21986,-19816,22221,17922,-5157,11058,23148,-7854,-25253,-9998,15521,-7219,-1435,-10242,-26335,4216,-22966,-5624,-15876,-2387,5551,-20879,-30238,28889,28425,-29525,22747,9032,10392,2784,-9918,-3581,11452,5405,23314,-31990,20088,31381,21059,4511,-14140,-15193,-7725,16555,24120,-24936,-5456,23850,-26913,-12714,24491,-25357,-27132,-191,-3576,-14844,25789,18569,28938,-22976,1886,713,-29698,23212,18778,31076,-28731,32045,13491,-5720,-7007,-7468,1998,-10477,-11297,-10546,7096,-13555,27995,-5568,19783,3673,4239,-15382,-29914,-14752,-10426,-31813,28609,22669,-11227,-5176,15214,16759,4434,-26680,-11346,-23052,-10660,-22314,-15165,-29925,19164,-25537,21801,1066,4226,-9324,25787,6982,-24802,16206,18458,7190,30888,-27588,-1091,17430,31467,-10943,3032,29507,-25826,6817,32029,-3781,-17338,23711,21703,-30225,-15500,-18785,-3758,19009,1896,-23813,-13372,29931,-16854,-9072,30528,-9724,-8513,27227,8063,-1278,5494,6504,26264,-11116,17175,12509,23095,-29568,20372,-17684,-30553,-31339,-25454,18889,6164,-19108,-20637,4582,19346,-3705,-13676,-5252,-29542,-21185,2801,3300,-6761,8337,10905,19865,-30130,-11603,-5267,21198,4563,28015,-2583,-13791,-17811,-2507,-7550,-13977,-4998,23120,13846,28241,-18888,29624,24805,-31874,-1608,6987,-31454,-25380,17230,398,13634,17760,-17283,-31555,31136,2043,-5725,-22733,17258,-19375,-19117,-29152,24951,27063,2769,609,15356,-4551,5492,30673,-598,-22580,-6946,-27621,-28434,-19805,-27788,-5496,-5714,-16383,-18161,26631,12689,18684,-11932,25569,9253,-30528,28498,-10041,-32524,19909,-15476,958,-13077,-5421,-26083,7350,12852,-15711,-26442,31817,-29529,-3373,19511,17114,-21022,-31730,-4522,-21031,-13320,-10476,29777,23680,29872,-27884,-30690,18549,-28526,-5617,4847,17263,-2485,14160,10772,-1143,-2824,-17216,-13854,16108,-4760,317,-11879,9963,-17152,12078,1970,9161,22596,-24005,-27054,-23131,-2214,7614,8209,-15438,23439,21893,-12234,-11593,-62,-5069,-27367,-16234,6288,-31359,-2319,-29756,-3280,-13511,-7922,-4197,31150,18185,-25039,5547,26065,-27107,27121,404,24816,-4257,20352,13297,-30546,-1090,-6838,13274,-9166,-391,15567,-15899,-10253,13247,28014,24208,1454,7053,31451,-30707,28282,-27891,21806,-6602,-26730,29694,-32612,9650,2696,21743,18173,-3310,-20190,-17268,6217,-12714,20258,3886,10271,-20271,24134,-9334,10941,-26115,16729,-6505,18078,10196,-14875,-24509,26294,26750,1085,-6863,-30393,-3791,18337,10419,14256,-24668,-12202,-20047,19484,-9672,10248,4025,23037,-27552,-27502,-17319,20769,-20539,920,-21342,-14214,-300,20937,-10768,-5648,-9943,32550,2056,-17040,5920,11642,18188,21773,28826,-25591,-21023,29425,-2690,-29275,27955,-3592,14979,-14156,17984,-27968,-32528,-20931,10522,2926,5667,-1375,17986,4885,-22555,-632,-32042,18580,21079,-29325,-20794,-28953,246,-4283,28308,23802,13092,-29488,-16242,-21313,11160,16076,-31663,23782,-5063,15303,-12319,29647,-8771,-16627,21654,-18449,19159,21062,-20795,11387,-31058,29020,-19527,-31850,1846,-7991,-30979,-30853,-13642,7088,20293,7850,-7570,1619,-31547,-8746,27161,14715,14707,-30901,-11629,-23516,6949,23166,-10162,23405,-1504,5409,23549,936,16871,-18281,5030,-32555,-27619,14678,7537,-3608,-7658,-25892,12735,20786,-23505,-5648,-11698,-23211,21487,32500,31521,13533,23452,30404,-22828,19202,-28302,-4651,-1445,28689,-2620,12442,-16119,-16964,26988,-17400,-7423,-7894,30661,-24784,29215,-14951,16390,-32282,-31843,-16808,-587,28546,-17050,-830,13521,24616,17354,-8282,-25050,-18746,29762,-17968,20928,-22685,-30179,5391,-27176,-18659,5134,21119,25734,3030,-17560,14209,-27345,11519,8563,31018,3377,9052,6059,25973,15756,-16324,-7291,27168,-26933,4714,3239,-19958,-23627,18132,17224,7493,10583,24292,-26437,-29811,-7418,25113,-1080,-6245,-5634,-11750,-7110,-24535,-27707,-27974,17505,-24242,14409,-8639,-29079,-20381,3686,-19661,10824,32055,-29181,6406,19192,3483,3055,9600,31826,11017,-8788,3562,-13509,11957,11623,1566,-836,24244,12244,32716,-10227,10734,-2001,4659,-10469,-5910,-20605,5764,28826,11062,-3885,-24249,-13933,4499,28654,-14125,5363,20378,-10978,-1203,7395,2387,28879,-27757,-5636,24204,-7127,26014,13753,1155,-13197,1308,-31956,31384,-19293,26170,646,22420,-7748,-1644,-22420,-11800,5977,-15905,-20541,-3542,-23558,-9597,17249,-2656,13112,-11174,-26950,-7609,4654,-27214,-23316,-20389,32668,6279,32441,15219,23200,-15499,9399,-23741,-16713,-6572,17583,31442,-27195,11708,-7707,28931,-29817,6792,29273,-25846,21653,-14173,-15594,110,20291,32106,-27738,3499,30731,-28579,6742,-10381,7070,-21632,1422,11700,-24573,-27599,-7073,-9243,-12410,13868,19117,4001,-8223,5181,17316,6859,30874,7852,322,5950,3001,1428,2769,-28842,16854,-16870,-11432,-26436,15893,-26869,-14451,-30113,23442,-9977,-20032,31410,-11851,23035,-5476,2113,26828,19974,6897,-20637,-2508,6951,18249,-20126,-24592,3776,-21182,-14751,13123,16538,-17614,22281,19993,-20454,12573,-25151,28423,-21156,-7054,15244,-4221,28253,29058,32547,-8059,136,17348,-30919,-27296,-13758,-26530,-27938,-8670,81,14392,15976,-11547,-15675,32035,-12335,31216,-12382,-12774,-9659,25159,4095,-17943,-17486,17051,15903,-22133,-27688,7267,4280,-14999,32511,-20113,23980,29141,23107,-22811,28278,-29003,-5834,-26398,-22879,-1058,22972,11172,1712,29845,-19757,17984,-7617,-17734,19576,16943,9225,-11617,-22790,20075,-7394,-8009,-25047,-23074,32340,10390,-28327,-16355,3448,-18294,31670,-13064,-3143,-20252,-30359,30181,-1243,24784,-30818,27609,-19056,-2730,-11410,-161,27684,9013,-26951,19053,-13922,-24443,30934,-32354,-21761,22558,-11741,17602,-5386,19315,-14893,8732,19542,-16548,-32627,17594,-9666,-7062,9722,-15649,10103,16054,-13377,32146,-23856,20571,-27513,31168,17793,-31602,-15896,15989,29354,-15600,-11611,13173,3976,28401,21542,1763,12802,73,-18765,17528,-14944,24224,-19945,-16852,-28571,29130,-16640,-2218,-17991,-32416,-26303,590,582,-14363,15399,10163,18856,-18929,-27094,11472,-23228,3540,-3354,16945,-8771,-13647,26654,-21988,-10844,27735,18124,-14379,1158,27711,-6695,-13906,17922,-19145,-19452,3447,31079,-14496,6165,-31051,25948,-16152,13830,-3625,13454,18497,3308,-1638,3773,-28513,6659,19134,5548,-984,7989,20419,17887,-29355,-32695,5282,-9969,14900,20982,-3320,-9837,-24674,-26165,-10408,-25569,8100,-7965,3372,751,25672,-7951,-7329,20664,2052,29979,2767,2479,-7594,-28695,-31114,-17436,-3113,22212,3349,20192,27162,-8010,-31503,29026,-2568,-21612,-21260,-29171,16697,-372,-16908,1059,9035,-5288,-6715,-30787,-6567,32735,-16232,14332,825,17285,-89,-28086,-25560,-29678,-7060,1969,-25289,721,-28259,-5646,5315,29126,-5387,-8385,17528,-5189,-10753,-7400,27421,-15911,-25047,26368,-5039,29820,26797,-4985,23924,9877,-17531,-17547,24507,10991,1316,16869,2083,22490,-22737,3206,28381,7119,19764,3584,-24609,-652,32257,1071,7900,21211,-11720,2320,13889,18847,-8678,-1115,12521,-11157,31626,-15673,2016,-8160,7557,13067,-10901,-28844,-14269,26859,-16880,21488,1309,-11738,16049,22637,19334,-14130,-21905,15582,-30444,-25831,2068,-6339,-22728,30991,13170,4078,3239,10211,-31388,6013,-17729,-20180,-21363,23192,-27856,12946,-29241,-7387,-19439,27710,-3923,7068,26003,32307,-18737,-10707,4251,-18460,23240,22774,-10144,17373,-28593,31526,-15828,12151,-11522,-6860,32540,-7982,9861,-8784,-7542,21953,10732,-17956,18989,3470,-26120,-28398,23160,14948,-2605,-15422,-19354,-14749,-27298,-22888,21740,10235,-16552,-29173,13681,3494,4062,19413,-26732,28176,1230,15498,-23971,8029,-5464 diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv17.tflite b/tensorflow/lite/micro/integration_tests/seanet/conv/conv17.tflite new file mode 100644 index 0000000..d73ca7f Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/conv/conv17.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv17_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/conv/conv17_golden_int16.csv new file mode 100644 index 0000000..1ac582e --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/conv/conv17_golden_int16.csv @@ -0,0 +1 @@ +16046,22156,32767,21285,13664,-3868,-26411,-11333,18865,-11784,24576,-788,-32768,-31327,4363,-32768,-8818,-16383,-19489,1698,-810,4994,-28576,-16065,4891,-14296,18002,9673,-29109,-13780,-32768,-25734,-8614,10097,32767,-30022,-353,1496,32767,-32768,-7895,-32768,-32768,-25569,-32768,-271,-32768,-32454,-11364,32767,-9588,-5856,2928,32767,12000,15195,9946,32767,15258,16937,32767,32767,-10177,20698,32767,19018,-32768,32374,32767,29918,-10974,-11841,-8215,6579,-4191,-2479,-20758,8367,21520,32767,-3452,-25124,-32768,32767,12817,-7037,-26545,-12990,-6344,5194,18470,-8844,-32768,-32768,32767,27058,913,32677,-32768,-5596,25708,-27741,13102,25487,-19510,24426,13668,-3340,3304,18678,12073,32767,-18427,-29814,32767,-21848,-11007,-4884,-1781,2041,32767,9192,32767,17396,-32768,-17687,-6717,18658,-8490,16008,32519,-20236,-9624,-14421,20896,-32586,-32768,3467,-32768,-32768,-5498,32767,-8487,-5293,14233,-7762,7439,32712,7561,-12697,19413,11633,25690,10454,32767,2374,-32768,-3757,16961,-12355,-12066,11351,-32768,-16089,-19465,894,-1619,-32768,-14824,5431,-9568,-3545,-4922,-4948,3804,32767,13813,-20261,32767,1029,19476,18263,32767,32767,26320,-19646,32767,4314,-32768,32767,31322,18274,-5909,-318,-32768,44,18130,-3885,-27149,27327,-29588,-32768,19590,7325,-32768,-32768,-19366,-31016,6456,-25471,-2845,6786,-13974,2020,-32768,-29276,-13262,24517,-5072,-6344,-29153,-32768,-21488,-32222,15187,5352,-32768,10122,2160,-7839,-32768,30715,11498,-21435,-28296,11958,-3188,-15082,-32768,-32768,29553,-32768,-32768,32767,6003,-11254,-28808,-32768,-8092,-1872,-24056,-20293,-22581,-32768,32767,3098,-3302,-1006,27902,1340,-1970,6863,-32768,23071,-15762,-32768,18589,5797,-32768,-11276,-8483,29181,16103,21656,-442,13584,25273,15868,32767,-19673,21608,32767,18902,-3812,32767,32767,-32768,9191,-6586,10933,-32768,2440,-7673,3765,-16523,14846,-32768,1259,-17951,-6660,30652,15330,32767,32767,-2716,32767,-32768,-10356,835,32767,32767,32767,32767,32767,15351,29748,32767,32767,21242,32767,13498,-4918,-32768,-1787,13436,-17118,-32768,-25988,-4927,-8031,18871,6903,19250,-32768,28034,32767,12008,20494,32767,4738,13794,-23497,-32768,1009,17292,-10236,6096,5616,-32768,-9472,-14005,32767,-8346,-4660,32767,-1245,-7892,-19846,-32768,-29391,-4260,16951,23620,5704,14482,-32768,6548,-29882,3167,-10077,32767,-2670,-22078,325,32767,28698,13003,25449,-933,2141,22883,16925,-9235,-32768,-23297,-30912,25556,-15591,-22560,-6073,15057,32767,2568,-32768,5368,10157,-32768,-30126,-22172,-32768,32347,25980,-967,32399,4271,-2071,21518,12599,-11602,7747,-32768,-1312,-23841,-30070,-11186,-32768,23894,-51,32767,5203,5467,16349,-12363,32767,32767,-24384,32767,627,-32768,2542,31500,-18598,10466,-9120,32767,-11045,13208,-11396,32767,-32768,3068,8877,-4908,-18155,-32768,27196,19360,16003,-4172,22604,29413,3378,-140,214,32767,32767,-32768,27878,-32768,-11528,32767,32767,-10272,32767,-24450,5676,-9053,-22339,22078,-10732,-7795,32767,4039,31600,-32768,-3037,32767,-2890,-1586,11096,16780,-11704,32767,3241,16136,-10728,-1803,-32768,6597,32767,-25672,4260,-30675,1568,23332,32767,22414,-13656,32767,-6478,-6373,-6249,-28413,-1046,-14569,32767,-4106,-1363,32767,-911,-31006,-16112,-32768,1417,32767,-9628,-32768,2537,32767,-1532,19746,-32768,-30559,8076,11253,32767,-26985,-32768,13349,-5033,-32768,9477,28844,5820,-25368,30770,-6412,-28934,13208,-19907,-32768,7390,-32768,-32768,-9760,32374,-32768,7514,-19635,-13961,31538,-25467,-32768,-32768,-24800,136,-32768,20235,-16071,-1172,-4464,32767,11142,11394,-3374,11061,32767,-12066,-1909,-17237,-32768,2262,32767,18291,-405,32767,19228,30303,-32768,98,32767,7691,32767,-32768,-11599,32767,395,-16090,-16217,32767,-12904,3813,1014,1732,-32768,5370,1456,-499,24659,22736,-8868,-5256,32767,9688,32767,13318,32767,-5413,-4392,24530,-32768,9372,637,-13166,24098,-32768,-32435,-32768,-32768,-26666,-5545,32767,-32768,26493,-2660,1858,32767,7485,4821,-4976,11209,-3005,26844,5568,-32768,8692,21006,32767,-8258,32767,-5514,8372,-32768,-15416,13928,16134,24729,20762,-32768,14208,15505,4863,-867,27658,-4925,-32768,10396,-3255,-12946,17752,29994,2508,-32768,13413,27419,-32768,25981,10604,-32768,-11388,30066,30572,4707,4934,-32768,-14697,-4749,10513,32767,323,-15380,32767,32767,2242,32767,32767,21825,32767,18603,-32768,18289,23256,12538,-13190,-11165,6670,3575,32360,32767,-5276,24845,-5003,32767,27454,-9480,23521,32767,18468,-15411,-6194,-32768,32767,8867,20427,32767,3287,-32768,-2118,32767,32767,3373,1511,32767,-18436,12174,2835,-32768,32767,-32768,1450,-15748,-11532,26960,-32768,-8945,15591,-25651,32767,27924,-32768,-31388,-6319,9395,28710,23302,-18940,-30850,7092,-16785,-4064,-32768,-32768,-18518,-11012,-19352,-19131,-1001,-16743,-6361,-7552,19643,6478,32767,12445,-32768,6384,-11472,-30000,12861,-22986,32767,12799,13555,-19563,25812,-32768,-31328,32767,-9169,-16667,-32768,23415,567,-4744,-26478,-7132,-32768,-13308,-31909,-28496,2381,-32768,-32768,-32768,-32768,-21202,-32768,-7485,-32768,-32768,-1577,-6668,32767,12497,-628,-13178,-11946,-30857,16711,10700,-32768,6165,32767,32767,-32768,-32768,-14876,30253,-32768,28669,-8949,15013,-32768,15293,-31305,-27700,14895,15616,28761,-14563,4250,-12541,23490,22562,-32768,5069,28187,15047,18692,-6914,19976,12956,21146,-11087,-1174,-32768,-16480,-23477,4782,4448,-32768,20424,25059,-14364,-32768,-32768,-32768,-11574,-27115,-17952,-32768,-32768,3225,21036,6127,-895,-32768,-9667,-22787,5779,-32768,-32768,-26152,-6333,-32768,-3897,32767,-21774,-20979,28097,-11832,-4026,-21416,-5858,-19344,8074,-21771,-17548,-825,27482,-32768,1222,-21776,8578,-25251,-7670,9375,-4103,-32768,-13084,12340,-25043,-32768,-32768,-32768,21394,351,-21423,-11323,-12515,24964,-8583,-1864,12534,32767,-10528,7661,-8299,32767,-17225,-26881,-5891,-6632,-977,-6624,32767,1531,-32768,-20556,19126,16664,7116,-32768,-13716,20467,-32768,7272,-4067,-25406,11051,32767,32767,-15472,32238,12932,13737,-1212,-13685,17851,-166,-365,32767,7644,6314,32767,-4486,32767,-12675,14453,24309,11304,32767,32767,10711,30717,-9599,12038,21599,22143,-3436,-32254,-6845,-23568,32767,32767,32767,-6891,-21661,32767,2374,7984,5721,31464,32767,32767,8502,32767,9316,32011,-6879,8214,25723,17519,-7902,32767,12955,28069,-9554,32767,20845,7489,-11372,10254,-6332,-32768,-8117,-1811,-32768,16227,7748,12642,1566,26302,-11859,-4145,9376,-2644,-1231,-32768,-1006,-30331,29014,-32768,21938,-32768,-7290,32767,-18701,-24444,15529,31420,-32768,6868,-32768,-32768,27652,-20356,-32768,15596,-32768,5409,1005,24393,10242,25658,3779,32767,15319,-2496,21663,3311,3480,31474,32767,-23150,32767,-9671,5140,-32768,5090,-15789,-10684,30863,-32768,32767,-29654,32767,21409,-32768,32767,-16100,540,5519,13716,18846,877,-18683,18175,26536,29413,9011,18307,27803,6301,32767,32767,32767,15286,11140,5796,240,7849,2316,-2586,-7804,8949,30136,15063,32767,4442,29671,32767,32767,32767,-10902,1999,32767,-12249,-3278,15275,32767,-17388,2875,-11388,-8771,686,23143,32767,32767,32767,-1759,32767,32767,19230,-7410,11977,32767,-13583,15518,32767,32767,7944,22913,32767,26050,-32768,4720,398,32767,-15291,-7,4187,-2835,18245,32767,28587,26683,5437,32767,-32768,-16619,32767,27858,10372,32767,746,-12011,7195,20048,32767,13959,6622,18617,-7900,32767,-13927,32767,32767,11243,6149,32767,27663,19859,31178,-32768,11482,12544,-31153,32767,-9782,4452,-10789,3415,-6616,-7534,-22192,30896,-5666,10084,23039,-3459,32767,2052,-825,10200,26816,1481,-10738,12953,6721,22739,28924,24401,5655,32767,9996,32466,-2604,24524,16172,-23709,-9215,-32768,26112,-6319,9574,-21048,3398,18644,908,-14404,3791,4715,16003,32767,12533,32767,21954,32767,32767,30647,32767,-1878,-1597,-9082,7405,32767,1516,3699,-21413,-8154,17704,1454,-11804,-11315,32730,23323,-11304,3528,7270,32767,-13119,5063,-14170,32767,24133,-26414,-32768,32767,14352,32767,32767,11054,28398,-8285,17936,-11141,-30235,11183,18074,32767,-13385,-27558,23264,-32768,-11180,32767,32767,-32768,-32768,6233,-32768,20882,12510,5186,-7462,-28259,9190,900,-25975,32767,-11334,-32768,-32768,-11160,-32768,-4852,32767,30711,-30443,-28567,-2037,32767,-16175,6373,32767,-32768,8983,32767,32767,-32768,1316,9259,-28667,32767,-16256,-8308,9482,-10965,-17565,32767,28683,-27956,-9552,-27055,-23496,-22158,-32768,5956,-32768,-27553,-25450,-8964,-1526,-8759,-10012,-32768,25386,-28390,-32051,-32768,-32768,-32768,-32768,14454,2283,-32768,31153,-6027,-2966,32767,-32768,-32768,-32768,8405,-19929,7701,-18575,-1489,-15223,24328,-6372,-32768,5887,4795,659,7240,-21756,-17214,-27083,-28128,-11482,-32768,-17125,-32768,24998,17255,-12269,-32768,-1950,-9482,-15003,-30967,-32768,-32768,32767,-32768,-15222,-9126,3741,-27165,-32768,14024,18366,-32768,-11293,25108,3695,32767,20129,-14164,-32768,32767,-12682,-6744,-2908,32767,32767,983,-30702,-32768,-31436,10709,75,-14622,-32768,-22732,32767,-22126,-2858,-20201,30190,-15886,-27664,-190,-14306,32767,-17781,-22487,-6649,32767,2099,-20670,-23322,27819,-3163,31372,-7318,19849,-32768,-12446,-29320,32767,-4764,-8665,-4851,-32768,-12800,-30368,-7128,-32768,-15760,-12508,-15521,-32768,16560,21759,-4672,-32768,21947,4412,14698,-32768,32767,17677,-9632,10252,13530,-29275,-17780,16216,32767,-78,-11624,-123,5648,-14765,-23015,-32768,-12939,1573,-32768,29301,873,-32768,-32768,19332,-8823,30352,6194,32767,19464,24774,29630,11366,-32768,-29825,32767,-32768,-18836,14185,32767,-18472,-16957,-31456,-16933,-32768,-14422,-24075,15618,32767,32767,-18334,-32768,32767,19995,-17264,-32768,20742,-30751,29402,-10244,-32768,6186,21689,-9481,32767,-32768,-32768,-28448,9765,-17095,-32661,-21903,-7777,32767,-10034,-11022,32767,-17692,19488,-4585,21493,-9711,-25123,-12449,-28049,3920,32767,2420,-9897,32767,-24193,32767,32767,-20144,-8586,11393,32767,4596,23313,32767,32767,-2504,32767,32767,6125,-22265,17600,-9503,-3236,25939,-12125,-3725,-32768,-32768,-479,32767,-27451,-11532,-30120,17984,16096,-32768,1902,-6822,-32768,6559,-12474,32743,32767,23119,-29433,23149,32767,13612,32767,7337,32767,-32768,-1458,-13363,-32768,32767,7997,-23457,-2702,1049,-11623,-32768,-11585,2772,-32768,-32768,-2506,30810,3465,-30424,-32768,-23119,27317,-13713,-32768,-1179,-30987,-32638,16427,-9510,-32768,-32768,-17864,-16975,8744,-22671,-31274,12333,4316,-27322,15621,-32768,-23657,-32768,-32768,-3844,-25336,-32768,-24952,30836,18521,18551,22343,-6821,-3352,9526,32767,-7010,-15105,32767,-32768,-24807,32767,32767,32491,30533,-11038,-16278,-32768,-13778,-12135,7545,-32768,32767,7106,-22324,32767,10747,32767,-16763,32767,-32768,22531,-8058,-32768,32767,9209,-5585,-27154,-13448,-32768,-25705,-17436,-12267,-32768,-32768,24407,30006,5871,-6625,-32768,19531,-21052,-11742,-32768,9792,22828,-16932,8697,10556,-23448,-24210,-32768,-32768,-3779,-18551,-32768,13485,32767,-19566,32767,-6994,20274,12680,-26349,6274,16795,30598,5629,32767,-16786,21625,13093,13816,8339,26682,32042,32767,-8289,29402,15472,-15252,32767,32767,16951,32767,4909,-4071,29406,32767,30104,-3575,-32768,32767,32767,-5433,25153,-9219,16117,-17747,32767,32767,-7931,20281,16274,-10118,-1494,6380,-7809,24650,-7450,32767,8346,-9927,-20026,-25331,5568,-32768,-12739,5346,32767,-18286,5299,-933,32767,-26508,-6675,-2196,-32768,-15540,-32768,24132,-15655,-8442,-16052,8984,19644,-32768,-32768,25083,32767,13257,-32768,32767,-32768,-22303,32767,-7042,-31328,-32768,-10840,-22886,-32768,-8491,-5970,-11941,11465,-2979,-24254,-32768,7910,16064,-10774,-32768,-9894,11475,-4663,18092,-32768,13163,-28173,6799,-14077,-6896,-13197,-20680,-13550,12576,-32768,-32768,-22366,-32768,3025,23108,-22058,19592,429,-1204,-18151,-22532,-32768,-3817,-14817,-2036,-991,-24777,-32768,-8203,17693,30259,-32768,7774,27177,-8604,-11794,3696,19286,-32768,32767,5094,-32768,-9214,-5556,-12962,-12951,-32768,-26368,-6913,-2541,8266,8579,-22534,-2382,-6083,-19422,3454,-23671,22022,18207,22352,18587,2161,32767,9819,14501,-1503,32767,25057,398,32767,32767,-12252,32767,32288,17170,31746,905,20661,32767,19715,-17150,-2645,29077,-32768,18300,17421,-32768,-15346,-32768,32767,-20392,-32768,-255,32767,-32768,-15000,24398,-6062,32767,-9666,-32768,26100,32767,-9872,32767,32767,10277,-20921,-10189,-16777,-32768,-21858,-2790,-13729,-32768,-19820,7700,-2771,-18402,4404,5555,-32768,-4162,-32768,18368,30534,32767,32767,23239,-16195,32767,-13230,325,28352,8539,16157,32767,32767,18492,32767,1564,-13260,-32768,8116,20353,4168,-16931,-4703,32767,-20110,14307,-1157,28004,-9181,15065,32767,4323,-2775,32767,21298,-10391,94,-27579,-19873,18377,15541,-18005,12235,-12438,20600,2887,653,12852,-20814,-32768,12537,-16293,-1658,-31585,-32768,-8411,-32768,-7294,-5084,-32768,27408,-29073,-8914,14086,-16198,32767,-10767,-32768,2822,5010,15259,6643,-6254,-32768,-9477,-32768,7071,-32768,-32768,-8797,11563,-5330,494,13020,-14098,32767,32767,-21815,-28454,-25668,-20128,-32768,-25484,-12807,-32768,-8642,-18944,-32768,-294,8739,-16682,-11761,648,15113,-32768,-32768,-1161,2287,-32768,-32768,32767,16342,-13836,-32768,30960,-12525,-4063,-32768,5527,-27807,-8301,-26466,-5104,32767,32767,-32768,-32768,-12470,11065,-32768,-4914,-32768,15144,-5724,-2423,19954,-32768,13076,-13480,9719,-32768,-8216,-32768,32767,12832,-32768,32767,5740,3008,-32768,-19978,-32768,32767,-7669,-5139,32767,-32768,32767,6626,-12896,12714,30452,24455,-3063,10247,-29866,-25719,17748,-31293,6789,9172,-32768,-32768,-32768,-17632,20184,32767,-32768,18500,32767,-21342,32767,-32768,-32768,2260,-32768,-16230,-32768,-18595,-32768,-9384,6524,32767,-19399,9072,-1337,3451,14624,22184,25650,7859,32767,13535,-14288,15252,-15873,32767,-4379,-1235,32767,-26381,-15085,-10374,-3273,-18001,-1262,11215,23110,5058,29390,2969,-4070,32767,-19521,4193,32767,-9138,7193,-9105,19928,21353,14365,-32768,29148,1122,-30080,32767,-32768,-32768,-18947,7944,-32768,-32768,-32768,-12850,32767,-13936,-32768,-11662,-32471,4867,-3364,23272,-32768,-16198,3478,26389,-5851,-22363,6095,-2186,32767,8040,-25673,20175,-7313,-6798,24243,32767,-19584,-9241,-11408,18182,14450,-8606,6505,-3579,32767,-12517,1115,32767,32767,8177,32767,32767,32767,32767,13477,-6284,32767,-9280,-726,-20719,22568,11126,16583,32767,32767,1475,32767,23082,20797,32767,-1950,1358,24060,3272,2422,-15792,14476,23730,-10184,7024,18004,-10074,-21076,32767,32751,22911,-10280,22397,-9398,9975,13609,-9287,5166,22168,18540,-11301,32767,23102,465,32767,27400,32767,24579,27831,16754,32767,-1747,-4610,15057,-25504,11052,32767,21099,5257,32767,32767,-17108,3892,-5833,6118,2389,-2924,16097,9301,-32768,-6484,21100,-3902,-17233,4339,-11020,-13769,-17890,28237,1653,15983,17621,-5221,13762,6980,32767,-32768,-9234,32767,-10096,-3689,32767,29108,6339,32767,-32768,7466,-32768,-32768,-28468,4821,-32768,32767,3387,32767,32767,10731,32767,751,32767,-32768,-5014,-32768,-14044,13417,-10542,-13400,-32439,12469,21077,-32768,12713,-2476,-32768,-3719,32767,22737,-9807,-1665,32767,4372,-1970,14166,17901,31611,-16265,32767,21294,5132,-8985,27583,7118,-32768,-584,-566,-32768,-19509,-17519,7537,10361,21056,-28397,9925,32767,1076,-21320,-32768,27047,-12432,13,-1307,-18173,-27155,8122,2633,-5216,-17676,-9965,-7021,-1391,5959,32767,8864,19305,30086,20597,-15807,-851,32767,16004,-10776,-32768,6317,28914,-22546,-16393,-10183,2401,-31382,32767,32767,15356,2607,-32768,8149,2536,-11649,-32768,-23969,-4456,17201,24831,6973,3056,-2909,-8645,32767,15669,3484,32767,22995,11691,7932,-10177,-32768,-6660,-12990,-32768,-16350,-7027,19617,-32768,-32768,-973,546,16404,10799,-8020,-89,-32768,-1749,1934,-32768,-8363,7669,-17758,-32768,-22563,12587,12614,-1896,30159,13041,11978,-9414,-32768,-23514,1147,-15895,-18432,-11039,-32768,-32768,-16278,-3688,-17247,32767,32767,-16395,5020,5577,32767,32767,15147,21158,-11101,11665,32767,32767,-32768,20743,-23564,32767,32767,-6182,-602,-9755,32767,-32768,14475,32767,-21316,-519,2414,32767,-27193,-32380,7906,-32768,32767,-14480,-4305,9105,-22058,22405,-11302,-11722,19677,20166,-32768,-32768,-32768,-32768,14486,-914,-32768,-21328,-1022,7630,-14580,-11412,-32768,-32768,-17127,-21747,-23394,-24641,-32768,-30943 diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv17_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/conv/conv17_input0_int16.csv new file mode 100644 index 0000000..95a60a8 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/conv/conv17_input0_int16.csv @@ -0,0 +1 @@ +3850,27079,-8115,1092,2446,-16688,-6880,1039,-27492,-11271,28548,26079,-9815,-26177,-8291,12628,-9601,-10528,-14261,-22781,31280,-1294,-10413,-15544,17302,-5122,7875,-8148,10665,12401,-19647,-7037,1585,-9059,10409,4620,27636,32009,-20408,-28506,9177,19645,-4422,1632,23550,27317,901,10613,11858,17345,21695,888,-9560,16744,24846,-32528,-13654,5958,-1320,-27994,28982,-31432,-31242,13555,31703,-47,-22766,-18564,-22924,-28101,-3109,-26687,-28229,-12057,19326,7511,-13567,-11143,7076,30044,-19545,-1511,-28808,-22726,-17021,-31822,-8825,-27688,5714,25817,-30161,-31739,-21913,-18241,5712,-22124,3593,-29700,20528,-23535,31143,-29668,-29555,22890,30392,-21290,28746,-12482,-29386,-8603,23542,-7630,29555,-13331,-32232,-13124,-11270,28532,27374,-6484,-9834,4643,17051,4771,-394,-6974,14568,32429,-15338,-4044,-15984,26616,-3295,-21741,-2736,-18076,2977,-14181,-4401,24130,24455,20566,15287,12216,23908,-4968,-31619,32051,-5717,10050,22383,-11708,23496,-2835,-29643,-32622,13871,26117,11229,-32493,24622,11235,-21880,-24820,15735,-20994,-4818,4160,20149,27897,26299,-1916,2083,28962,-6931,-16607,-19457,21260,31528,3246,-4642,13756,-1792,-15814,29067,450,5715,-24945,18661,-4226,7613,-27797,-30489,24985,-19138,18673,-19957,-30024,24735,-22151,-23807,-20525,7174,-19989,-17928,28096,-7853,-23431,4873,-25007,-4461,-18286,-797,23588,-9857,20999,-21365,31527,21977,-25076,15418,-5815,10025,-24387,-3279,14293,32261,-11008,-575,-14608,-30191,19541,-27836,-11109,-3428,32167,5846,-27914,1663,9913,24216,15897,-16276,5351,4479,2310,16755,-7414,11028,-96,-14666,-12758,-13496,-7889,32513,-11583,22983,7586,-568,25518,16299,-9611,10131,14510,-12078,-23399,29901,29173,21917,-19336,-14215,-30388,19786,-5888,-32463,-28638,11116,16804,32183,-3736,-7739,-11204,-29805,-4409,27151,19890,-1191,-28520,-4814,-11078,-1682,-7038,-25288,-23100,-5224,-20420,10842,6839,6966,5788,7745,-19294,-19585,10888,-25443,-20443,-25260,32536,22021,23497,5170,-10175,-2025,-11195,743,-18915,-19071,-30377,-17793,-31594,-11785,-21472,17855,20837,-17227,-1014,21779,6886,-27771,31025,-10949,11704,-6993,19159,-17176,29399,-12465,30740,2430,20930,10318,18464,10870,-4281,25871,-8498,-23548,22453,-15445,-30643,2201,-9280,7714,5502,-17803,-14710,-12512,3646,15339,7364,6403,24606,-19378,-22180,1504,7096,-20697,31016,-21114,29484,-13295,-24542,-26954,-3880,-21220,30029,28241,1637,-9731,2273,17718,-603,-4077,-3756,-18118,8202,-4251,11490,23266,26586,-14375,-22447,26029,-1939,25134,11698,30436,-5319,-23386,3898,21147,22991,19445,29211,4636,31824,-26482,14175,-25893,-32408,22427,-31420,12510,-10144,-1320,-14807,-21030,-29437,18980,-32128,14679,-17192,-16802,-5868,-23748,27404,10904,31042,-16024,-24980,5487,-18356,31031,-5130,-31283,4771,19066,-21922,1730,-3808,-29410,23151,25262,-30970,-26326,29669,-29178,-3140,-29735,-12186,17397,4455,-28399,-8995,-7071,8739,-14714,19740,27690,-14102,-19280,9415,12087,-17289,6642,321,15163,-28758,-26941,29252,26467,27820,5851,-23129,-29101,9456,-23174,1298,24100,-5234,17315,-16453,-15010,29716,-15997,387,10658,17736,31420,-21243,981,-22419,17957,-22289,11446,10112,-1387,4589,-21687,27238,-13309,13381,10016,29935,-2644,-23007,-10352,-29357,11847,31166,572,29038,19373,29301,19443,-29366,-538,32741,-6607,-9283,14118,-19905,-19169,32407,18066,-28646,-11340,-9692,12075,13967,-5836,20799,1528,2268,-14758,8023,4265,-7563,31243,12503,4647,29385,-19736,5576,23011,-16704,-4886,13800,-8156,16328,-9966,-30765,1440,-10395,-26452,-19358,743,-22293,18242,-7989,26287,-2683,1655,-20764,13658,14723,23967,17612,-24130,-15568,5649,-14915,28866,-2311,8440,3354,31939,-25648,-31842,-26170,15307,-25303,-19952,16759,-10535,1780,-31584,-19651,-28288,9406,29243,32564,15153,-22960,9965,7971,22471,-21521,-9504,-14657,-6635,-27068,-22703,27476,-30437,26021,-21219,9014,31065,4777,-19334,28864,-17298,-32321,-5782,27642,-4362,19611,-11846,-16647,-4547,16791,-4018,12328,20595,9888,25819,-17115,-18567,2374,15307,8189,24316,5261,-5486,-31421,-2898,2632,-17679,8457,23121,-26212,25189,-3873,-22420,-19392,18193,12132,21331,-30601,5784,-27122,12527,-9938,19146,16381,-28908,8191,1740,20164,-28059,-10780,-19919,-32189,-32354,-20797,27652,4362,21548,-27990,-21214,-28687,8895,-17991,-7835,-9321,-16922,-28993,-28586,-1964,20033,31767,14722,-22155,-3525,-20747,-9263,3854,-27645,-7941,31778,-16791,-9118,-30460,12767,19557,28717,-17529,-15296,16025,15301,27262,32683,-29826,-5076,-25205,-20510,3863,-24071,10030,-21050,7999,7562,-22328,13208,-2869,124,-13137,30428,-27366,-20686,9784,-9265,13950,-22671,-13190,14207,-16754,23296,8199,-25517,26319,27346,-15661,-19137,-1363,8122,19669,-26879,-32449,-25055,14442,29114,-26143,-27157,-17180,-25065,-14851,-28865,-11152,31709,-28684,-256,-15476,9974,25248,-22538,-28091,12154,18136,13076,-3064,21433,-28051,15872,-17436,-16688,12306,24060,12356,25582,26771,-8170,-20952,-24065,7473,1006,-18274,18045,-1261,10848,12058,-30701,17300,20800,22455,-2858,-27895,12317,588,13472,-31852,-32736,25393,-24604,-31403,28407,29768,26635,-6722,-3257,18083,-30800,6552,8667,26816,-18614,10586,2726,3342,-31755,18659,8054,17312,20817,-9004,5600,25,6890,-16862,-6771,5052,-30902,-21628,30023,-14805,833,29694,30973,28877,30059,21007,-28369,-10952,-31467,19398,22087,-31258,5217,-11395,2791,13107,-3392,2775,15169,-29898,3397,-19775,22207,29766,28536,11012,28698,-9250,7684,-27566,32112,-3874,-23976,30511,-19066,3643,-8318,4480,-26337,13819,-23193,8080,31177,30841,11354,-18320,-10278,19287,31129,9471,4272,-22416,-5053,-7317,29433,28237,-9955,-2125,-26641,-1596,6310,10051,21462,-14821,-21159,22086,-19186,-16041,552,10114,18722,11602,-27822,-2626,10747,-29148,11396,15945,-7690,-9457,13416,5444,-4777,10405,10643,21499,21067,-565,-530,1083,-7254,12522,-16741,10952,-21356,619,-12269,-12678,-16674,10769,32048,-3470,8698,2409,-30590,27741,-25299,-18018,-6011,-3055,863,-19370,-12604,-8455,-7972,28574,-9208,10510,11704,30704,23237,30844,-10611,18065,27149,6080,-2789,-25340,9944,19244,-5036,-12146,4887,-23129,28034,-16916,-1435,6260,12148,6567,14118,-11547,-16458,-15828,-19441,28268,-14680,7720,19817,-4924,-13961,-23334,23715,17150,-9438,-21487,16660,6717,27681,-4518,1976,-26999,-9211,7532,-28909,-9468,-7917,26991,11135,-4839,-14457,-23940,-24731,-17411,-4754,-3038,19235,28523,14429,166,29801,-25688,21481,-27752,-14454,-11886,-1705,13071,-26848,-20288,-14317,-31682,20149,-15546,16707,17744,6422,-5773,25285,11580,18178,25617,4229,26650,-6065,-14199,-14456,10117,-6083,3455,-19034,-10175,-16139,16907,-30734,14045,29937,-10823,-7199,-20246,-6573,32000,8546,-31774,13556,1466,22997,20819,28528,-4,22970,7187,4377,3946,12709,31059,-27878,20205,25371,18973,-20529,-9946,4411,13654,-32194,-22560,29792,12248,-14545,-19060,-28239,-8979,-28066,-30727,12310,-16091,8748,-28184,9645,94,-3192,19211,-2520,-18480,28380,2922,-10108,19016,-14257,-14423,-2371,-4926,-30530,26927,-28014,366,10551,5924,-18014,-25436,3214,-18952,-29305,2535,-11222,22645,26868,-18731,-14085,-28340,5219,-21837,-7034,3230,-26951,17244,27393,395,-15489,-30303,-30624,-21122,-5929,-11275,-24893,3125,24516,28736,9880,-18920,1713,-10561,-17879,26511,2309,29321,-27430,30566,-24582,8007,19952,15786,17972,-501,4541,3477,-16204,-3429,22037,11669,19719,23217,17452,10362,28810,-10677,16638,16703,-20058,-30516,14380,-18376,-31134,11447,-4311,-17315,-13947,6344,27419,-2199,31224,-13418,2077,-5002,24698,-14901,-31492,-27845,-32189,-30247,6181,-7643,19406,11638,-31827,24978,-22335,-28503,32017,-26732,-19681,-28162,-26050,-9356,-13548,15641,12754,-5655,17734,-20844,7803,-7039,-24392,-29989,29050,29443,10296,22784,-16374,25988,-31888,-10152,-892,28751,-3197,-30172,-14550,-30983,2935,-23630,8152,-4884,-32417,22083,-30745,16541,-9746,12008,25841,10605,-15476,-7781,-18779,-10970,-15467,-545,-14291,27546,-29951,328,10200,25894,10923,-11275,-14329,-31525,3527,3310,-21129,2432,-32727,-29636,-10569,-13060,-7154,17368,18695,-9467,32366,16026,21518,9048,-9000,12759,-4753,-6957,-22303,-4421,-28684,-12010,1868,7775,-9724,20232,-23261,-31483,23160,-20333,-16505,-23205,-24326,-28508,-3554,11180,2383,-17081,17034,24134,4084,-4834,-20139,3395,-11104,18035,-15949,9306,-9035,9404,19447,-19239,4300,-14218,30381,-4026,-21821,-15018,-24231,27570,-2769,-4531,19965,5928,30780,14448,-32498,-24506,758,-5475,-18825,-17659,11535,23361,-21002,-25848,32645,-30342,28525,-1770,-872,27906,-14412,-6035,4988,-7397,32579,13758,-8149,-19675,-8352,19950,-7331,-12965,4437,18648,19674,-15190,-32559,31160,-18694,-8067,896,30380,18125,1265,-26081,27856,1044,-20541,-9728,-5349,24539,29330,-588,12049,20169,22733,-7649,-16263,-16628,28479,-1585,-43,-25458,24764,-14346,15197,21108,-22920,2808,21804,21041,-27156,18493,-11674,23283,7266,7005,20478,-5192,12795,-11220,30817,421,27717,31096,32683,22732,-14648,12268,4944,-879,-21083,19080,25592,20009,24654,16407,19928,3746,-29601,-20501,11640,-22219,-8623,-7472,-2442,-11576,19887,-8861,-1831,-4662,962,15059,26418,14697,-7824,3841,-30030,4355,17861,12824,22244,-4326,-32074,2907,-986,-9270,17945,27610,-9054,19898,-223,1529,22238,-5579,-28252,-10784,-11947,-1273,-15208,29333,32735,25972,470,-21459,-10428,-10530,-7741,-28805,1248,-3860,27668,-6846,29794,-25361,-1184,31933,6381,-26356,23728,-7268,30034,-3881,2993,29389,2274,29968,14745,-26467,20548,29595,1012,1194,-24439,-21984,-2521,7815,-4985,8275,24711,6095,-14535,-4071,-28886,27348,-22146,-25447,-1161,-23220,16186,-14951,17422,27996,21565,-18534,-7304,10640,-26359,20221,13877,-16654,20249,27106,-12819,8604,3644,3139,-3137,25103,-11046,-16652,31545,-18950,10680,-24052,17374,-12765,-14,-4156,3952,2925,-19856,15607,-29532,-26407,15968,-11496,6363,-9368,-6979,11876,830,28885,5725,-31918,-9358,32613,13376,-26071,-16312,5338,-27661,24127,-8483,25625,15427,23338,-5895,19976,16751,-27458,-12251,32502,19639,18752,-20909,-5699,32037,-26147,12665,28957,16464,-16527,-28023,1521,20096,-32156,-9809,-19388,-9673,-15531,9644,31487,-10515,32535,24925,20323,-3979,-13991,16523,5172,-981,23338,-9512,-11363,4503,-32266,19064,-20367,13366,-14369,-22211,-19109,6330,21677,31748,-1323,16736,-2079,31067,-17221,-963,24797,3333,-22412,7879,-6940,-21480,12123,3870,10375,-131,17028,-12392,4100,32279,14604,30090,-30520,-8180,31070,-17624,-13848,-19649,-734,-3175,21708,-497,-3426,-13425,31918,-5946,9212,-23326,-31463,5108,24476,30287,-14735,15503,19471,28939,11455,-32003,8492,-14244,1130,-10588,13608,29504,26719,-10916,9118,26053,10907,28206,-25812,1918,12998,9457,-4394,25584,2652,-23927,13878,-17989,12522,8554,-8046,27231,13577,-4228,-17870,9880,-1812,16794,-24724,14032,22073,457,28533,-25184,-2889,-32520,21736,1042,28900,4171,-18179,-21744,-8587,30759,-16840,29644,-15128,7001,24679,25144,-12420,-5964,-2284,-28930,-10956,7831,13875,625,-7274,-3437,-28597,-8499,32730,-4147,-29494,-422,-14351,-17298,19324,-25043,15800,29074,11048,-2711,26730,29907,-30280,26881,-29728,-15698,25476,-6712,16432,-11857,14005,-1528,-8407,15839,30912,-25989,-20934,-26067,27449,13415,4866,23624,18707,8009,-5107,-30215,-14804,5234,-10364,-19536,22367,-16136,-8069,-6849,-5301,8176,-30255,21262,-14883,-16444,22071,26557,6960,4740,27817,-17891,17747,-21125,-28545,19829,-18253,-16230,-29284,-16535,19458,-29245,-4508,28074,-22346,-27720,5757,-18328,-21594,29151,-10683,4130,-2026,-9863,11222,-7111,17915,-23357,18168,12828,-4690,-32509,-303,-4340,-26617,-22672,18158,13912,-27588,-18091,22306,-27903,-14446,17496,-3562,-21128,4448,5909,22795,-23767,9290,29809,11232,-30594,32248,30117,-22836,-3950,10821,-31149,-6479,-6610,-18253,-15106,18205,32245,19058,23805,24050,19967,-22742,30973,10671,-24266,17763,-7454,-27715,6933,-32371,27389,-27878,28179,-14416,-479,8064,-30190,-13777,14279,16619,25833,13643,-5491,22176,30914,13862,-11650,-23066,-4265,-363,3614,-10449,7580,-17206,-7784,-32680,27804,-6900,15196,-29567,-14498,21310,16017,16928,26739,1813,-10891,-10457,-27845,-4524,4082,-25655,7371,-5732,251,32445,13117,-31847,14214,-8344,-1419,-282,-5065,11623,-24505,-20548,-9517,-27445,14245,-4008,21728,31381,-18769,30972,-7793,-10383,4439,-15630,22224,-12425,-6444,9446,-5001,6853,20639,-14016,-24663,-17540,-16252,-18272,-4969,-9124,-1335,27096,-14328,-7376,8139,2731,17807,5635,1456,24476,-29070,-25099,696,15070,-2182,-16692,-12726,-8945,-29658,14769,-23808,26985,-18266,-9176,-23625,-17135,4097,-24450,-17675,9146,16693,26714,6912,-11770,8552,-20838,29760,24166,14936,27061,1689,21052,-5248,-13434,14867,23049,-2624,-12823,6883,-15172,4952,18494,-16483,-27144,-11493,20184,23454,-20120,16534,-26824,10278,11527,-12646,-9510,-16972,-14267,20633,-25097,-3438,-26220,25854,-31667,28399,-3601,12922,10,14940,11672,2777,-30129,-28187,19831,27611,5079,25830,-19255,708,-31909,25874,1693,-32496,32105,3071,9046,-23093,2138,-16492,28780,-6872,-28244,-5041,20476,9275,-8964,23765,31218,17069,5643,-24575,14638,31986,14400,-8948,-19638,-3273,-23331,-20551,24407,30449,-8109,-27536,31328,-28683,-16699,-19994,23218,-24220,22462,-9039,-21146,-22606,29466,15584,-26723,22280,-14748,5893,23139,-16601,-18828,-12031,-6365,5620,3501,7548,15883,28394,27162,288,-19739,31941,2787,2278,30261,-12391,31769,3182,23493,21321,9342,-16540,-26718,8218,-3011,-2854,-30360,5421,-5020,31078,3030,6493,32243,13994,-29391,-23454,-3019,14890,-22047,-11030,-30418,2937,17915,32440,142,-26334,-20441,-10348,21394,18170,24026,-17062,-12816,-32254,-188,-17564,4960,15392,2361,-30673,14834,-12717,-4894,-18214,7652,20275,12812,-11241,17987,14133,-12724,-1946,-20266,27669,14639,-16998,-8483,19908,19306,30104,3432,-7032,-2527,22829,-7649,30150,24887,-31065,28584,-26950,-4786,-26585,-30774,25301,-7504,-12025,-6801,28362,-24234,32765,-13665,-18021,1540,31564,7073,-10196,-8798,-29878,-28270,11823,5531,-26463,-16496,-29738,-29279,12625,18846,-1258,-12883,2522,14444,-14720,-3275,-25996,-5080,10254,-16945,7433,-16082,6915,27950,-24096,333,-10936,16617,-7065,-32018,-6338,32209,-19599,28932,6165,-30395,-29185,27468,-26207,9870,12868,3030,2723,-20432,-5739,22629,-16531,12479,11233,-24642,12382,-32479,-29606,-10697,-2886,20273,1259,26115,-23797,-1986,-3820,4575,26579,24477,31212,25636,13293,31894,-20587,-15686,22980,30727,-29345,-11399,11732,15246,7948,-25642,12238,-22708,-26422,-28363,-27988,18653,-24132,418,-5107,-57,8526,18646,17560,13502,17181,-13012,-26808,6764,-4994,30873,-15469,-20723,20471,1525,6438,-4748,-172,5251,-21378,12112,11704,2941,-11835,-8801,-23355,-22114,-26573,-4510,-4067,10715,11738,28870,-3593,12929,24438,-1180,19837,-2840,-24139,567,-20977,7999,-8171,-8131,-3752,17385,-24631,-26294,-31443,4851,-18699,-4927,-8500,199,-20339,16982,-20222,22372,-8378,-24513,24530,-31139,-13845,-29153,-26900,-24514,-3698,4341,-22940,-28431,16409,7532,-1346,-14542,-294,-2331,-6189,-14458,21601,16207,-580,29934,32433,-21148,4142,-19146,-15741,-17147,-8830,8902,-7345,-29091,22888,-21058,-27951,-7796,21447,24549,12352,-30296,-14394,20125,11170,12360,-26453,31891,-24726,-14415,21884,-27082,27514,-27470,-4767,-27931,30543,8603,-22262,3657,12893,-8341,5760,4891,-9212,-28069,1440,29970,14874,31594,24179,8792,-2444,21165,-4165,-22678,-22874,-14142,22582,5770,16302,23262,-29212,7862,-28747,-10481,30348,-25788,-13715,25204,-15879,16903,28919,-6379,5209,23522,-19567,-23070,-22052,-7592,19872,-153,15973,104,-13023,-26481,29662,-630,12328,-13394,31035,16801,-25208,6461,23012,5859,29844,13334,14389,-854,24741,10804,-2719,-17028,-26973,-29105,-460,29699,-18325,-9754,8671,-12733,-9292,14291,-17986,-5019,20583,-28776,15349,22241,-25466,-16868,-17908,-24853,-16347,6033,8077,22071,-8981,1611,-30801,25473,7042,18627,31682,-23121,28334,-20423,10014,-31582,7357,-29982,-2253,-4332,23364,-18606,31554,6454,-7694,29144,13407,8288,16442,-26669,6861,19697,15213,1205,21132,5307,-14276,25429,-23587,19786,-22554,13112,-10924,30475,20496,998,-26530,1959,9840,-17077,-740,-6567,-1634,-6654,-29637,-31025,23641,-18170,-23600,-13921,-10701,17382,15927,-31931,15066,27841,-20205,-29424,30555,-32690,3651,20196,-19078,-6094,23878,-7906,-29574,-23218,4015,7241,22779,-20851,-23996,-14963,16854,21144,23537,-30584,-15298,-17593,-12838,15381,-22625,28003,16333,821,18678,-16487,30994,21859,-373,-17721,31376,18401,-27439,-5351,417,16341,-19140,17917,-26126,3497,28354,-24452,-27835,13647,-9165,10748,-14968,3519,8950,-28719,16174,22992,-23501,16268,15310,-23467,-20138,-5420,-21776,27897,-1943,7015,16375,-4555,15364,14643,25399,30060,12569,19361,-26183,10665,3982,20197,31692,-2277,-10476,17164,28988,12019,-1720,3333,30419,900,-17190,-7664,-31888,-25614,-21770,-8121,32312,23027,31333,-19157,10373,1656,-25297,-27157,-14651,-17570,-13089,18609,-19073,24472,-31036,-21719,196,-7436,-9552,20376,-30554,19487,-19705,24508,15625,-12790,17359,-9579,23151,-24549,13318,-11842,521,-30871,-25206,32509,-12136,24777,16562,20984,5296,-10772,-31575,-28342,-8808,-3228,-24990,7144,29925,31011,1654,17022,-28230,7307,-30389,2054,20960,28448,26595,25730,-9139,25546,26113,6735,17040,-14678,-12306,14493,12144,-31661,-31200,21420,-16789,27667,16312,-2855,57,15278,31142,18046,28683,23238,18006,-13661,-16729,-12112,16839,29467,-19679,15145,-17720,-26325,-27004,-7315,-31421,16214,4339,31671,-27781,28005,-31839,-20790,-28468,29062,31823,17798,-13396,22081,-19749,6280,26079,-2714,1977,6504,-11075,-12653,7820,25213,19911,-20502,-31748,28816,-29725,-10763,9878,17625,29064,-15231,-20372,5147,-22066,-21482,7877,-28145,-27973,-18438,19541,11784,-1602,8342,-2221,5592,4463,13929,-15480,8956,-25558,30322,-15771,-7819,19538,5937,-6359,-14779,2597,-13847,-1074,29070,29281,20060,-14084,4183,-25287,-24829,-10199,-18577,-23601,-16910,32447,2014,25978,-10454,-14386,-6204,-22724,17205,19239,-8964,22186,15902,-21236,-24869,6240,-24077,-11676,28119,-30164,9679,8913,5858,-16771,-20850,7348,-23109,21998,570,-405,-21005,31148,29545,-15,-3713,3852,23967,4283,1810,31226,-28514,6902,19036,22915,-513,27828,1151,-9217,19588,-20709,25166,-21768,-10089,-18138,25811,-28657,-8087,-842,6032,32758,25890,-13112,11540,-8204,27024,14520,-11951,-20688,-14646,-18573,5130,30429,20667,22673,16343,22879,28744,25936,-26345,2214,22020,-12980,-23513,13240,-18778,-26368,763,-8541,-28728,24299,14106,-14091,24770,-12922,12540,-29089,-14794,28608,5983,24679,-11891,-19265,-8887,-25024,-24033,4415,29968,16789,27813,-5936,-9845,9870,-29733,23813,-7296,13185,-1550,-24351,-30012,23313,-5859,-18638,-2599,-3785,11028,-13997,-30231,-32669,27257,16068,7344,23027,-26617,-9714,15538,-4291,21270,2429,29121,31233,30563,-29611,29687,-27856,22537,13258,25080,12891,20473,7572,-12093,13766,-22991,30715,26825,31189,-3770,14738,7632,26722,15849,-3020,-25089,-22905,-30227,-9176,-23214,-21661,-19993,27986,13283,-7296,-10126,-9285,-21846,-5797,-27009,-28515,-1539,12560,-1653,21724,15353,-284,-3822,6807,9891,19532,-15606,-10113,-31588,-17703,-13784,24316,-30768,-5793,-8576,-1555,2367,17904,-19995,-19113,2963,6045,-17567,18350,-26776,10768,-9095,-19127,24226,-14986,2239,-1568,-32403,-27349,-17841,-20153,-28588,-4074,-5379,3465,-18892,5019,25056,7654,20313,-16923,11475,12211,19173,27318,29971,12856,-20917,27327,-23961,21266,-28182,32409,-16351,32052,-19341,12070,893,-1687,27608,30109,5045,-20902,-10141,-26196,-12056,-17532,25250,-20805,31491,371,21907,-31536,-17192,26451,15856,6450,1121,18367,-17391,2866,-20632,-2550,-6629,-17253,-6182,-20244,-18313,3314,12729,4531,-27258,4732,-22331,-30580,32587,29731,24688,-11202,-31080,-28678,-32338,-15607,-31595,7283,-969,-32525,-29523,19306,30467,-6311,-17436,15439,-15320,12090,-9574,4957,7986,18053,-27724,-7768,-2835,-3475,-8097,32085,23893,6839,25737,-31925,-15501,9188,31789,18759,-22280,-31908,-23494,1018,9584,-10325,6627,-3992,-26467,-8767,20292,-12650,7387,-6940,13572,-288,-32738,9299,-13550,-9602,-25452,-31808,988,6888,-17759,-6399,-23871,-17257,25759,-10087,-7102,4723,-25844,-12097,-7965,15515,-31158,-29913,-3352,-31588,-11548,7194,8016,30755,14492,-7677,-6657,933,23333,11868,19939,-20654,12646,31066,-4319,-865,27154,32157,23121,-9882,25241,-7858,10153,22201,-13035,630,-2732,21748,32443,3570,16294,-820,14123,-22354,-24728,-29351,20319,13885,5435,27110,-5485,-8810,-2229,30575,-12347,6061,23911,-5398,-8075,-10509,30523,-703,5349,15950,-9977,26319,31498,-20322,-13532,-27667,30542,1177,-5787,29586,-7041,6247,2969,9375,9571,-32505,-10483,-19989,-30300,-4104,-26115,18469,-25438,-910,-28913,5484,-21700,29856,23076,-16994,6398,19406,-2196,14330,-29897,29755,21469,-9183,-21992,2776,14794,28827,29146,-2733,29820,29385,31346,-1143,8198,31949,18063,-9827,-15339,18677,-18303,25142,-16066,3552,4286,899,-5364,27031,8734,22002,19336,10692,12628,9262,12827,21549,24193,8948,-15205,25481,10790,-8834,-1313,-14663,110,-18162,-17105,-24295,-16603,24833,-10834,15875,24004,-20338,-1357,-28162,17025,-1858,16630,19283,20723,1676,3018,-13304,22211,-7940,-20331,23694,-12886,-16569,12285,32124,25962,4021,14498,-14005,-27611,-31582,-32335,29695,-23703,-18394,27739,-28782,9215,13716,-7735,-29988,-31851,4469,-4557,-6308,662,-20646,-15371,-19115,-21352,-23859,-96,19274,10799,-2967,31007,-13980,-20255,-9719,-1101,-19178,8256,-17091,-23658,15788,24486,6536,11985,-16213,30102,2129,1272,-17112,19401,10202,-19841,-20531,13495,7449,26142,10274,32397,-28270,24669,-27339,26728,13481,27681,13836,772,3671,-15997,9770,-23487,-8724,21011,30915,22557,-13070,-7882,11703,-12097,13422,15344,21559,5524,-30337,-5505,26970,-27848,-7434,-17547,242,-15729,22500,15121,322,9422,20672,-32482,-25845,-21339,-29867,4302,-23308,3762,-1006,-6218,4795,10981,16076,-32572,30592,5574,-3622,3555,-5495,31165,-1281,-3842,4806,-4575,-32492,-29127,27276,1681,23327,-29544,-21147,25750,16513,-29845,16076,-21293,14760,8164,13815,26756,31777,-1828,-30859,27138,1688,11779,30015,-9842,27565,28840,8312,5668,19458,-28663,19746,-13357,-12790,10128,18322,11511,7141,-4528,-18027,12525,7051,22789,19742,15649,29494,989,-26181,29614,17982,-17615,25697,-19916,-17101,-30599,-8539,24943,31280,-19297,-6387,19102,11043,-27695,7544,28832,-5020,23934,26863,-9684,21978,31185,32443,-14133,-4345,-14128,-22402,29548,24139,-9264,-16295,11480,24176,6943,-26990,23408,-23442,-20383,31689,830,3573,-31459,-22730,25412,-14749,-11293,2494,31180,23883,-3276,-9931,-2497,27626,-3545,-28099,26922,-1003,-16143,-28007,12980,-20088,4067,-23082,26026,-6933,3150,31806,-5966,-10183,17814,-29014,29086,-31941,-8816,764,22491,-5344,18444,-20247,18300,-26300,3746,-11889,-25620,-31978,27081,22021,-11180,2542,10545,-16950,15200,-3618,-22369,1657,7513,20280,-26961,-19504,-4249,30967,7519,-22925,-1767,-20807,-28893,675,27402,10531,-13957,-19394,32155,25173,-8537,-32059,-18039,22972,21433,-18997,-13024,1012,29016,-9273,6318,-29255,20589,-29631,1841,-27594,8187,-25360,-17767,-30776,-6016,-21322,6448,-28115,26085,-10621,-23496,-6966,719,7951,13666,-513,-22190,8969,-17952,-15331,-30818,-26909,19005,-1313,22657,-11163,-15554,-20874,21267,-32357,5960,26109,-20153,-8039,17353,-19329,-24340,26442,16018,21635,-3542,11071,22157,6595,-8438,9672,-29090,-7501,-6017,17896,-30651,-12596,-31171,29226,22760,-13954,10162,-30633,-17777,-14559,-3098,16838,26713,28962,15223,2451,-18504,-24004,2800,25005,-31351,13695,-30672,-3295,-25064,-14788,29236,-7906,196,-24506,-12160,-14930,-15113,25464,-17813,30011,-3992,7166,-18811,9719,23588,-2920,-28012,12176,-32737,21303,-32112,-24656,-20990,11669,30247,-17182,-20115,-14683,-20066,5951,18086,17697,-10962,12014,7416,17438,31398,-2938,535,-24364,30975,-1142,-23021,-8781,-6697,32382,23133,956,-14536,-32720,3350,20495,-6786,7810,-24171,-4787,1451,-1537,-26075,-29120,-5596,17194,24026,-3169,4114,-22603,20045,-21178,9937,-16347,-25593,27821,-17331,-25680,-30680,31185,27366,20595,29602,-15607,4429,707,3071,-29981,11764,-17367,-25122,30620,27118,18523,-3030,-29324,18031,-6090,-1425,28878,-4441,-28344,-10294,26533,12178,-22133,5878,-27282,-25575,-724,-8704,13384,25523,-27779,-16945,22789,3,-23241,-9649,9487,19465,-25107,-28768,-22118,-14414,14295,30282,24040,3011,7844,11562,20195,-18099,-31266,15905,-11326,-25530,7684,9582,20888,-11837,23535,8933,-14593,11925,-6002,16616,-15037,11907,-14278,-16664,24584,4957,7428,-3866,-15583,-22408,14582,26139,-1822,-16803,-24571,-3856,-18100,7099,-7351,31919,-224,28758,-16722,-20679,-18125,-14512,15086,5687,-18872,22860,-25632,-12437,-27086,6546,-26572,10057,5936,-24792,16423,-31083,984,6896,-21373,-23495,-1767,-17818,-10379,-10547,-10667,7663,-18355,24846,-17027,2997,15694,-25703,23468,19657,26474,8979,-27053,23238,11379,-21083,-28383,-21039,-25893,-28229,22293,-19150,7414,-32564,-14938,-10310,24261,24140,16423,-32360,32393,-10643,17077,22985,-18144,6824,19632,-24523,-824,-3189,-1664,-13772,-7729,14283,6918,15982,6148,28764,-27650,16895,17378,-32605,18229,28731,18849,-2516,13262,8326,-22117,-26842,30604,10671,-20306,-2675,21206,29423,-11344,-595,-25743,-16713,30185,-19873,27065,-9069,-24059,32283,-22027,-13614,-15361,-32219,-15895,44,17548,-16822,-22606,11672,-9190,13241,19694,1041,17984,-27256,-8778,-7583,5373,-6468,27209,25892,7841,-24589,-6018,-21790,26835,7428,11486,-22181,-8711,-32531,30600,-27096,-26751,3277,-18920,-10742,-18478,-32113,26716,-27414,-18690,1323,24320,-31333,-26199,-25831,-18893,199,12286,4465,-32281,-18737,-20942,26749,-9975,11410,15868,-8742,13656,-5299,4359,21778,-30097,-14646,-18022,-32148,4186,-14121,-32749,5097,4971,325,8859,-8101,32213,-20620,28316,4917,10647,26879,26740,25111,-20156,7772,-24385,-28265,-14740,6266,24702,17180,13702,-28787,9113,-9643,-12775,-6306,24637,-4388,-28202,5134,-29232,32557,-12139,-22997,10844,-19163,-14593,19687,1983,23659,110,4968,17775,-6095,20465,29223,-4075,-8065,8958,32267,16429,-617,-22989,22372,-7517,-16525,27991,-20709,20662,-19725,-9718,-30347,-642,-21798,-18422,19264,17163,-12005,-642,-14762,3868,-21087,-10363,-6731,-15840,7268,30101,-32414,580,-29536,-14905,-16902,-10171,2051,-29456,16462,23261,29945,20873,30221,-4655,-1122,17024,23619,24041,-31068,6484,-20972,-14110,11729,15535,-31368,5964,24091,12449,10413,-9073,-18847,32422,-26788,-17812,9520,-15626,2741,-992,-11231,25030,-32712,-18189,-12949,22192,21401,25187,-14076,-28085,20371,19733,-27420,27680,-9708,1900,-28802,-3107,20926,13351,-36,18341,-16543,-3912,-19616,-4394,2319,5550,17217,-16736,23704,26187,-11842,-2029,13397,32674,7134,-24646,-27948,-16310,-4585,-88,-21997,9851,5595,-18915,-3072,29978,29818,-19994,31787,-11838,17608,-31720,22647,31323,-10164,-26856,3228,7620,-12888,21131,-6908,-8181,-30372,-17357,-20659,9902,1066,-15367,-18099,-8222,-20702,778,4154,4259,-15650,25963,-484,-32445,22590,-17025,-6881,18749,-29010,27876,24680,6442,32762,-29847,6158,718,-5965,25872,-23516,-6705,5303,17668,-26309,-1104,-23041,26270,23109,15332,-24649,-29094,-6450,16485,11273,12878,25315,9441,7655,14939,26286,-5855,12197,-5834,-13606,-27797,-5763,17738,21767,452,18698,-18686,-18735,-6117,-21055,8843,26137,24441,-614,14823,-5938,-27644,-27930,-12394,3630,31638,32014,-5282,-12718,12373,26055,26719,-8028,-11952,-1436,9119,28396,32105,23523,8109,27375,-2526,-9396,-18564,-30362,27533,-27064,4079,21839,927,19979,-15200,-1965,23714,8413,-26297,-14573,8234,-26208,-214,-17976,-1410,-20175,-26104,-27404,-22676,-8404,-19707,-24725,-10259,6465,-20251,28182,-26244,1140,5846,1242,-10032,3681,-6845,1931,2393,-12614,-2117,20509,-745,1390,19007,24831,-24531,7269,23105,-25981,22202,-4202,21112,14040,27764,27977,-16114,7045,-20621,8263,-25980,-9277,22056,20705,-11371,1643,-24361,26744,-19860,4489,-14810,24747,-11929,15629,-18109,32028,-20343,15344,-15828,-24569,-10756,-24605,32021,14424,7014,-20311,-9258,-16477,3627,-16044,7369,-6840,363,27500,21204,-30473,25202,2301,6739,17374,-26407,23088,3842,-4321,-27690,4347,-16437,28370,31152,-31298,4953,122,30064,-2714,20355,23334,5064,-18672,-21319,-29444,-16508,20311,-19046,26965,27457,7128,-31164,-5241,-15025,29872,11343,30028,16083,-24936,4444,14812,9511,-2473,-20812,11902,-12526,-30227,4877,-32499,-10352,27837,14900,-18582,25172,-11945,-32698,23440,1161,8785,15710,-16976,8180,-20016,7675,-22865,-15564,-15888,-8802,27074,6843,-21228,125,28876,-6523,-28803,-14291,-422,-7422,-15143,-23292,-3823,29247,20225,1661,5158,20542,30791,3247,14019,-982,-7738,17288,18224,-3284,-23798,27988,-26402,-9488,8775,14418,-3977,-15174,22251,14820,30743,23349,18095,10611,-22195,-17887,-5503,334,-16900,-11013,-13065,3471,30444,-24787,16973,8463,23756,5738,18819,731,21999,30059,-24528,-5920,-8953,-25850,-26972,21017,-10397,31527,19292,814,-2811,26087,8773,1965,-21624,14065,-2200,-2973,-25882,22965,-19381,32260,-2056,-32717,-30962,-13420,-30305,9416,-18389,18146,-14723,-25390,-17129,10239,-30890,-18370,-22744,-23429,9145,25953,1412,-26976,-11440,-7765,-9696,20083,27158,889,-8797,3806,-21979,-24083,16740,-14706,-26761,-13216,-14836,-27456,12553,20814,-3051,-11219,-6231,-26685,20666,22694,32561,-5480,4091,29592,-8199,28703,-28056,-15105,8100,14569,-24983,-19963,19318,-9470,26197,9045,-23755,-26355,-107,32644,27620,-30951,12863,-9656,-6081,27302,-2653,27605,18723,-2963,26564,28965,5885,-29247,4012,-9198,-27924,-24576,-13975,24196,-28490,8850,-5005,2595,13172,12853,-24447,29591,-17655,18782,14431,-15124,2491,-4242,-28222,-21482,-5835,15377,4014,26374,20918,26343,13372,21952,-3643,32295,26290,-24280,-2732,22592,-28262,-2626,-6904,28324,1549,-21097,-24199,24218,-13910,13746,17139,-22741,31589,23454,-30220,31991,13878,16527,-18355,8864,13680,-28313,-4592,2719,24532,-15134,-20601,-30585,-26167,9483,3750,17355,-13307,-17022,-27234,-17771,2903,3828,-30138,-26800,-15469,-32496,22021,11459,-25612,-21475,31717,22099,7970,-30706,-1656,6917,9565,-18693,-8926,-10825,14831,-11843,-30789,-505,10974,23003,704,32404,19246,16809,5249,-23815,24345,23318,-2172,8552,17289,-20017,18642,-20188,-19277,2326,-3811,26874,-23359,1411,-11600,6211,-24137,-4301,58,26642,-11981,26803,-13831,-5449,11072,20886,18384,27653,12831,-18905,20892,-24612,17549,15111,23569,8783,18351,-28485,26435,31666,22045,1424,23482,23720,28067,-21344,-18199,14171,16880,26804,11459,30093,-14185,-19472,-29346,-3604,9714,-24925,1635,8413,9685,-29142,23709,-19642,26478,-5424,21517,-9105,-7072,24791,-18950,-26577,-24772,27148,6873,-3891,-17403,26976,7607,16912,-4768,20614,13613,-30486,20031,27533,4184,-24159,19863,-17771,-15867,-25465,25019,23971,30349,-4918,28533,30582,9134,-14177,-2184,-28381,-12701,-14742,24564,-24903,11457,-24246,854,-29995,-29338,15921,-19615,-2297,-20751,24156,-15588,9711,19346,-25092,-29166,21966,-31676,4323,-14614,-11073,31833,29623,-7985,-7393,-7025,-23033,-20306,-8856,31766,31472,-8922,30353,-3566,12211,26527,-18505,-1902,2300,6024,22887,11741,-30313,-24690,26195,26911,-5443,8849,10171,20171,-23197,6697,10464,11231,4068,18863,32538,-4204,-11357,8907,-2255,8178,-17438,14277,-18571,-5846,-27537,8113,2055,1194,7047,-16793,-30785,10617,-22231,11630,21939,-19070,62,13537,-20870,25241,18326,-15698,-3748,-29707,-13085,-29679,7288,9780,-5644,-12699,28711,3961,26906,-23727,24131,1010,-12386,-21116,-11552,6970,-9658,-26593,15824,6436,-20794,28738,-23930,-17798,-12881,-19355,12813,-27465,19919,23097,15973,13316,-1144,-15493,-30578,-12146,30645,523,24005,12710,-29778,-22860,15661,5253,-14009,-5400,22535,-22415,13075,3715,8811,27191,26831,-26259,31029,20326,-11967,2556,13155,1162,30231,-16171,11413,-22743,24764,-18978,-18761,14791,29483,23649,10079,-657,32158,-12240,16059,-19978,22676,-15491,14770,-25759,-28302,-19950,-2618,-6408,23958,31224,11849,-28502,3187,2684,-14707,26327,18842,-1553,12100,-28028,26651,-16796,-19243,17007,-13613,-19715,-6218,-415,14765,-32560,3124,-28519,10466,-6879,-31726,-14589,20720,30347,-14835,-14126,3923,-23364,30942,-22123,19691,6108,362,103,20340,-6966,9335,29888,24712,3520,-22042,-7553,1304,-603,21239,15986,-10611,-5770,-13399,12176,17130,26481,28506,22720,-29405,17002,-9439,1386,17769,-23922,17601,19510,-6101,21214,-18039,17260,18641,30040,25285,-22017,-12521,8827,28973,17060,22669,27378,29041,-19500,2180,-31407,3324,-23909,-19851,10320,602,-19854,-11037,3999,10682,6673,25738,6612,-29825,-9906,-7335,22947,-23428,21034,32077,-1442,16896,-24261,-7872,4657,25149,28460,29308,-23572,30013,-5505,-7394,-28953,20262,-19175,25080,-9160,24568,-20273,6466,-18439,1878,-16442,-19276,-18835,17063,26974,19985,19491,-112,26313,22963,-27118,29176,11611,-14602,1618,999,-6731,-85,8656,21842,9402,4702,-12534,-31702,31646,8754,-21173,-10125,-4537,24960,6773,-30538,2560,29318,-5222,-31674,-20419,20307,32343,-19054,-20667,-5989,-27546,-28316,28279,-25438,-16588,5668,28843,-13647,-15294,-6870,32689,-9419,-4435,-22672,-28754,13456,-21401,26206,17774,21583,29714,-25710,4480,-13660,-15861,2154,-16504,-32655,-23408,23512,32687,26853,-15162,1443,27749,2429,-11051,-11348,-15557,-15729,-15637,20075,12404,3037,1211,-15392,-19317,25356,25133,3776,21076,-26324,-15886,-21545,17823,22638,-6852,-20523,-7364,5160,22780,31254,-26286,30856,-10244,26235,6854,-12092,-7689,10814 diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv18.tflite b/tensorflow/lite/micro/integration_tests/seanet/conv/conv18.tflite new file mode 100644 index 0000000..d6bf912 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/conv/conv18.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv18_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/conv/conv18_golden_int16.csv new file mode 100644 index 0000000..97f5411 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/conv/conv18_golden_int16.csv @@ -0,0 +1 @@ +-6386,-3010,32767,-6580,-7121,-633,6992,2130,7240,-17974,8662,-1641,11339,-11509,-789,-29206,756,32767,-32768,1296,72,-6132,30594,-3814,11537,16581,18553,3321,19807,19430,3725,32767,-1868,835,18441,-8080,1750,5307,-14983,777,-10200,-24970,539,-195,9022,1469,-5000,-27285,758,5088,8294,953,12007,-3902,29028,1229,-10722,14883,-11724,354,21117,24225,-18360,-30146,2222,24389,-32768,1318,-473,2444,-3619,122,3237,7882,15673,1170,26539,3661,12234,24274,389,-27843,11531,5385,8486,5039,2112,11966,15494,-22245,-19491,-5149,486,15759,-91,-32768,-8169,-5148,6316,-151,1576,-3088,10532,-5770,15665,7355,5466,-4512,-4436,-10830,-11921,18053,-6958,15440,-19449,-5004,4728,105,30814,-11959,-5614,32767,-21998,-2424,16640,18879,-32172,15281,2173,14389,32767,-323,1309,4682,12223,-4881,-4777,-4316,12819,1421,966,27785,12669,-7084,8058,16195,-32768,-1950,4249,613,-18168,11242,-9220,7134,31761,4100,12779,-2829,14396,32111,-9960,6634,28822,-3072,-4889,-1668,32767,-1522,27518,-3779,1712,4788,8869,8175,-872,3125,1387,-1490,-10709,-4403,-7772,-1242,-1802,-17903,-9242,3052,-32768,3473,-1584,5505,-15407,8660,-4444,-17917,32767,2340,269,384,3863,19603,-3942,-4468,-1824,1219,-1101,10960,4768,-14491,-3886,14943,31365,-1155,8188,-988,-4163,8647,-9577,32767,22055,-2007,-1512,2039,-5806,8518,5744,8453,-32768,2270,4596,4441,25232,5139,-11556,32767,-2213,-1594,32767,23382,-8412,32767,-569,8918,-32768,-4243,-9388,-4716,24562,-2180,-2327,10178,1998,1769,26699,15526,-1730,25959,4927,-28883,4907,4693,-9268,-5980,-17997,-5502,18209,-26475,-10455,3123,-23846,-5934,-902,-8245,-7013,-4816,32767,-2798,5416,415,-24185,12940,968,-8684,2536,-5177,-8155,-8511,312,-5219,-3539,-8244,-32768,487,-3208,1344,-2395,15412,-6806,-22502,19864,-989,4817,-7152,4907,-1918,-2183,-21260,-12881,-5452,9634,115,9542,-6754,-7744,-7217,-14210,-6357,-6489,4764,-6909,-9601,-2328,16511,13810,-2306,3123,5111,12743,-5468,3718,19620,-7230,4309,13299,27973,-7849,-12720,2298,-19836,3532,5355,-918,1918,-17928,7108,-13189,14123,-2603,-5485,-18324,-1171,-12155,-9680,-7651,775,9666,-3931,-11751,-130,-5265,-649,10800,-2727,-14607,2096,-18395,-19115,5416,12570,-4507,-16262,-32768,1003,635,-4285,19039,-3073,-2419,32767,-8116,-4764,5872,9616,-27841,32767,-233,16779,-13024,1125,-5869,-977,6313,-136,-12473,1128,-6934,-397,-694,-6282,-6298,5744,847,-9685,6068,-4337,3449,-2134,-14175,-2693,-16691,6465,2280,-5161,5768,-8158,-11239,25510,-4672,6042,32767,-2028,1938,-6017,-2749,-1000,-5720,-5245,-18920,-2613,4601,13193,-20363,-16093,763,26261,-6664,-3311,-5623,399,13929,7303,-17756,9026,4756,6695,14988,-4061,14591,6861,854,-10418,-1572,-1120,-1315,-4292,-32768,-4459,6634,-1738,-29603,27,-17766,-18511,-13816,18912,4041,4187,-32768,1621,-2772,-5562,10687,2350,650,22555,11987,1151,12818,12714,5843,32767,1381,-2766,-16452,-2199,-6894,-3694,6951,-21050,-15203,9722,-32768,1376,3878,16129,-29426,-23215,6152,-13120,-7757,12065,9469,-1025,4439,13550,-13825,-2783,6604,-662,-22549,5422,7904,-2556,1549,1617,-27050,6007,882,-3827,3322,2937,20496,5932,32767,3465,-6509,-18670,6110,25110,10707,5359,1646,4878,686,-5387,11222,-11482,2917,-20917,9541,7650,20776,22179,-737,-19350,-5976,-9895,-32768,-3609,-11151,-4595,-20330,2602,-20941,12576,-17752,756,-26419,-6549,-4838,32767,-6258,-8809,32767,-1061,-713,-2935,22741,-6603,-9632,11971,-1402,1127,-5619,14645,-26677,-16945,4603,9974,-32768,1222,5143,2812,-8809,-6596,2847,24912,7840,-7946,7678,-6416,-6109,11110,28,11260,29346,7431,9209,1392,8987,-1155,1501,25240,1236,-2852,4489,5061,-4589,-4776,2101,-9646,22500,4099,8052,-2932,-2444,-2307,-16545,10323,19836,-875,-9376,9007,-9697,-183,-6971,-24433,-10465,-6102,-2312,-2128,5558,6566,6457,5760,6085,2367,-15125,-21741,-2432,13957 diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv18_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/conv/conv18_input0_int16.csv new file mode 100644 index 0000000..1381cd2 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/conv/conv18_input0_int16.csv @@ -0,0 +1 @@ +-18939,-10829,-32517,30409,-13040,-3673,14856,25263,13668,21031,14365,-26725,-29516,16989,17703,-2047,-4268,-25181,-15792,30451,13189,7406,3464,3943,27923,10242,27995,-20839,22234,-7429,12460,15857,6147,11459,-636,7563,137,-21146,-15529,-11482,-3365,-759,-32708,-30576,1115,-18493,19798,-16997,6441,27113,31425,-20079,-15502,-28031,-10412,-28368,-12638,-5148,25547,-22247,30588,-8234,-29564,-14414,1114,-10324,14189,-9136,1531,-28783,26786,19844,-11553,-9318,7948,32377,7855,-28718,2502,12920,23164,-5288,1257,-586,10223,20956,31307,-1571,7964,16257,12600,-3768,-25917,-27720,-24133,-2971,-5809,-16888,-3023,-17206,-10499,30549,-6721,9095,24559,-9853,17228,-21315,-21390,-11273,-18438,30986,-32649,4830,-9069,4546,28802,-30465,-14519,-6735,16352,-10658,23403,10670,28994,-14813,-7123,-22498,-20310,2841,28480,15346,-19143,-23422,261,-10272,7663,2430,8385,-10823,14835,-12841,30382,-13953,-784,-7235,-1618,27435,-25402,12493,10131,-9029,-11204,11124,30525,6939,-8849,-27647,-24030,-12398,-26271,7336,23546,-85,-19731,-437,-15209,21732,16205,13206,-30965,30835,7164,7351,14558,31875,27817,10476,14585,-9018,24927,41,-22703,28704,18890,19700,-11250,-7771,12968,7615,30348,10486,-10342,9878,-28189,-30198,10822,18814,-21652,8101,-8989,29639,4021,-20916,-1405,28049,-2998,30578,-2893,-11181,-26387,14962,-31350,-32113,29943,18699,-17588,-26000,21560,-7278,2547,-24357,13866,-2643,13899,27633,-22673,-26214,8654,29415,-8998,-60,31415,14396,19521,-10203,-4776,5201,15530,17004,-27129,3314,-31481,-29683,22016,-17238,2480,-7661,-15496,-16541,26213,20157,24959,-31075,-1804,24924,-16418,-6225,5745,31099,-22554,-14904,19681,10369,10018,10020,-6296,27665,31999,398,-13902,-26597,-23997,-19878,-10917,-23043,4951,-15268,14994,1643,-2632,-22093,28329,-3145,-14901,11367,-9915,-2987,14143,18894,22003,-2893,-31294,-5050,8202,-5965,16937,-13315,7770,6331,9140,-32251,-23862,7686,-11471,-19659,25639,26294,-15068,24925,3424,13326,23101,-25937,-1327,28771,5755,-2325,-32414,-1673,-19532,10907,-31205,4885,26466,11999,9327,788,5740,27463,1957,-26963,-335,24185,-7972,14649,24917,-4357,16697,-30402,-4288,-22929,2331,-3935,-15801,-9839,-23388,-29871,5422,50,-11812,-29916,27374,-22559,-150,-20775,31093,1057,-21894,30341,12796,19307,-2548,-2636,8679,-12753,-13307,694,-23448,-7827,12921,-25229,-5317,1842,-30394,-32641,-24012,-23811,31003,28716,-9730,-26482,12822,18858,-25213,4868,-2661,-17209,-14987,19784,7271,17792,10073,9435,-6010,7866,-21121,22482,-25773,-21914,-27692,30242,27811,28846,18286,-16746,-18899,16320,-9488,26725,1937,231,-13457,3600,-135,10727,-13897,18320,27266,20142,-19408,3603,3463,-30989,-21220,-5393,-20447,-13163,-28651,-3810,-2295,-26070,-15455,10311,15568,22451,28636,-28886,28276,-6378,-13334,9744,16369,21302,1934,3626,-29350,-7713,-9420,31777,1098,-7644,-16410,-6632,-13781,19958,2250,-6236,3593,-17755,1818,-29973,-6444,-18343,-22391,24215,-31183,26255,16610,21819,-22921,5842,-7392,16128,-20631,22663,9803,-18015,-16469,26864,5118,24381,-27991,-501,15891,6423,-23662,30451,-17243,-24848,-8730,-24722,-18640,-31501,7480,-11434,31170,-8718,1892,-14855,-14938,219,-27937,167,-10039,4018,2344,-2979,11700,-12201,13900,725,-20516,-29753,784,-14138,26068,10714,25304,24012,10971,11699,2016,9043,12582,6617,13197,-17736,6550,4099,31254,27834,-22524,-11468,-31819,-727,-2722,-20895,-19704,-4424,13585,12493,22839,19043,-22807,2799,-13370,-10081,-7104,13853,-780,-22997,-24212,-24216,5078,21127,-23149,8976,10227,31330,-10391,13818,-3842,25206,3871,-3856,19115,28012,-26725,6587,21489,-2973,-15714,-16208,13959,-3209,25117,9310,12079,-2033,-18645,648,-31737,13936,-11283,11800,19690,20062,-11878,16803,-30056,16659,24489,-2498,14780,4294,-19150,24849,-13095,18368,2605,29833,7489,-15380,23621,-23786,-11020,9530,23545,-8148,2468,-1086,-30803,-21179,-22111,31945,-11765,-7919,26853,-3998,81,-28847,7229,29443,-6271,7525,29678,-14399,25132,-31208,27345,-7977,15495,-3922,-27365,-6599,-15239,18256,10271,-12169,-23422,10874,30175,17631,24491,-17757,30275,-20458,29846,14453,28017,15845,16041,-27064,-13204,13248,-12019,-14574,-20503,-19066,-29806,-23072,20211,-21763,20517,8349,21932,2928,8183,-13571,-17973,-5424,-25669,-30923,-26269,-14872,4778,32424,22965,-21059,-27977,-32009,8866,15262,20494,4039,14892,-16101,-10523,-25016,15525,-20759,-30730,-13047,16575,32440,16137,21210,-30246,18102,-20829,-26452,20968,11554,14835,-8120,14136,-14900,32661,11838,-22805,-17473,14606,24518,29856,-2240,-26627,30262,-20989,21564,25881,-16683,2239,-30756,-8299,3315,-23699,10539,-22932,-29043,14884,-17464,6397,-30609,20367,32247,1776,27137,-31499,-19799,13197,-11844,-23315,26342,-27783,-15142,690,-18233,-29532,-8708,-2333,3980,-9038,21029,-721,2755,-23036,-27018,-144,-74,-14689,1402,-26958,24787,12452,1697,-19515,22088,29154,-9554,-4416,-10882,16393,-18932,-10198,-12863,-15780,-13283,-29789,167,-11396,10438,24357,-26618,2302,-16363,-21096,-27242,14172,29068,-23955,-5454,-12926,30025,18510,-29718,-1583,20248,-24752,26820,-5351,-29398,-4102,-639,-17125,17479,-21482,21463,-15635,-32388,16691,3167,-15576,-24500,8471,-3605,-130,-18208,7918,26446,23315,-5496,28812,7351,-15279,-17546,-7416,-13440,-15396,-20668,3457,32314,-20224,-31199,-29278,1805,5151,26143,14078,-19982,-17202,-21390,-23436,15295,-29829,-6991,30257,13750,-25834,-18927,-10574,17471,13136,-14196,-7303,-10935,-31494,13120,24269,-18345,-5440,-5822,-12883,-17366,31561,18542,-18122,-8160,-18020,9296,10117,-17754,27420,-17644,-18953,-26282,16654,-3245,2197,-27718,-2183,-9497,-9931,-10277,28732,26368,-27043,-18763,17033,24567,13867,-8517,-793,-2644,-2199,961,-23416,-14371,-4381,-8532,26097,10051,-21588,-32249,-22217,-25423,-18527,5556,6772,-3173,16583,26579,808,-5887,25009,15965,14007,9484,-21656,-31767,-29335,-30945,-14350,15841,1477,7202,24152,9815,1149,29709,-13146,-26034,-2299,30702,-21178,17872,-10520,-22156,702,-11274,-1525,-25277,12282,-18815,18141,13408,25706,2746,-22250,-11229,-17547,23716,-29382,-22589,-23952,11340,-9913,-908,5596,546,17370,-28092,-10265,-12378,15843,7603,25465,24825,22436,6112,-5755,9462,-18764,-11955,13376,-22798,-5996,-3903,-23864,-10738,23095,10047,-32273,-31493,-20875,6870,6380,32190,9519,-7004,-21505,3802,12235,-16166,11776,-10086,-24810,15321,-15095,-9783,-28199,-1088,21243,12972,31589,20621,-8577,-5346,16280,28748,23545,3028,21296,-25356,-22169,21923,11250,-14050,-32479,6927,-21348,16914,-24843,-25711,-4158,6057,26938,-13258,20260,1856,16763,-635,-5965,-8688,-15429,-32331,12494,11078,7282,-16278,28430,-2085,23848,24885,-30016,-22145,22669,16344,-7770,-17406,-14976,-10209,13748,-14048,-20353,-106,-138,-14079,4305,-15679,8132,27329,30731,29619,30506,-24652,13165,18774,-25624,-5234,-12197,-6890,-21251,29055,-8665,28401,-14255,12232,-24170,-7506,-8506,-30287,844,4298,31753,-7440,-8383,-28231,23730,-13376,-20968,-18369,-12853,14559,4701,-21749,-10129,28854,28812,24461,24687,-11705,-27635,6747,9929,-3273,-13753,22208,-22082,-19981,-26753,-24563,-23073,-8479,5173,8077,-17218,-32588,-17206,21088,1006,6427,-4595,3393,6052,23063,3149,28677,8651,25568,-11110,2419,-23419,1024,-32626,-13115,29716,-533,-24016,-24439,5993,-27518,15845,-18944,2732,17942,769,5182,17162,19228,-25440,-64,-9836,-21882,-7792,314,-30361,-31174,7555,-14931,13252,8885,-24950,-3032,13910,-13091,21292,-17651,30417,10385,-18523,-15200,26815,20110,26659,22239,2777,6459,-11819,12365,20368,-12415,-27889,19755,-20529,848,19588,10837,7320,3659,-8939,-26714,-10837,11314,-4783,-16586,10299,4981,29083,-17113,9430,28790,-30783,20068,4437,-31042,19254,4378,-6182,22892,27753,4534,-8945,-16331,-4027,-17402,21156,25450,13280,30864,-14650,27072,-28023,23221,11455,8092,-7530,22120,-13619,-15754,-12557,-25173,15409,17023,-5385,29831,-16164,-28874,28886,7765,16272,25457,25838,21929,15005,15012,-4920,663,24217,15875,-2218,29648,24294,18523,18251,23224,-17943,-2019,-4103,-32340,6943,-24900,-27506,27053,32735,-18158,-27041,27374,3536,-25999,2914,8338,-11068,-2273,-21744,-11698,7283,5582,19247,21483 diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv19.tflite b/tensorflow/lite/micro/integration_tests/seanet/conv/conv19.tflite new file mode 100644 index 0000000..bc2a033 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/conv/conv19.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv19_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/conv/conv19_golden_int16.csv new file mode 100644 index 0000000..2a46c6d --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/conv/conv19_golden_int16.csv @@ -0,0 +1 @@ +32767,32767,-12203,32767,31622,32767,30155,32767,10831,-7216,14453,32767,-32768,32767,15147,32767,23654,32767,32767,-2459,-24554,12963,32767,32767,-19870,32767,-1474,4940,-20722,-29918,22302,-6290,-29526,-32768,-18242,-25091,25711,-32768,8166,-21345,-9984,-32768,-22405,6138,27703,3798,-16927,-25755,-7550,-30990,22872,-24026,-32768,12414,-2104,32767,32767,18941,32767,-28750,32767,-2512,19141,32767,-20324,-4803,32767,18245,32767,-6231,974,32767,25869,2490,-387,-618,32767,25671,31747,23041,-3086,31164,32767,-9441,32767,-9336,16304,32767,32767,32767,-9038,-7854,-6881,8633,19317,32767,7773,32767,-32768,24274,21638,-32768,-14529,-27305,21866,32767,-9500,32767,19658,-29671,25703,-32768,-11979,32767,-14193,25503,-32768,19534,-22780,-29635,-19318,-32768,4585,1430,-32768,18792,-447,27926,-601,32490,-18106,-10682,-32768,-6572,1517,19996,1539,32767,29616,26501,32767,22606,395,32767,120,-3048,32767,-13321,-31991,10143,32767,32767,2674,32767,20884,-32768,-17686,-32768,32767,32767,-7683,21532,21932,25925,12296,22156,-17504,32767,-16911,-32768,8767,29857,6194,-20635,2994,32767,6049,-32768,-13256,11851,18804,866,3592,10399,-10859,-32768,15663,-32768,-32768,-29250,11633,-8875,-8955,12909,17811,-24440,32767,-32768,16224,-32768,20198,32767,-13984,32678,4945,-7553,16131,7904,16707,15834,-32768,-16925,-32768,-27991,6978,-32768,-18287,32767,1045,32767,26185,-15352,-19613,-32768,-29708,13841,-32768,11306,-8171,1369,-32768,-32768,25720,32767,2366,22215,32767,32767,-28909,-7163,-8719,-32768,-9690,-1984,-24333,-9223,-26953,-32768,-2221,32767,28486,30840,32767,22872,12434,32767,9198,-32768,6346,-997,-24316,14804,-23539,32767,20992,-21437,15341,475,32767,-32768,25978,-10691,-3041,32767,-32768,12643,31098,-1054,-32768,5089,5918,-32768,32767,-1700,-15101,13027,-18709,8475,-9734,-32768,21553,6986,32767,214,-9992,14855,23062,-32768,13038,-32768,32767,25779,22097,-30803,7790,-32768,16803,-32768,-20185,-28974,5504,-6461,-347,-32768,23402,-8243,-13776,32767,-10631,-32768,-2031,32767,-32768,14509,-32768,14025,27067,4953,-17100,-32768,2750,1186,-10433,-32768,-32768,-32768,3800,-32768,4513,15503,10503,11999,2983,24293,-8765,-32768,32767,-842,-498,32767,23041,28746,11475,-32768,30169,26557,-11840,32767,32767,32767,-25735,-13441,32767,-20893,17061,17133,13723,-32768,-3286,32767,32767,-772,4532,32767,11928,18185,-25313,-32768,24070,-5974,32767,-11280,-9540,-32768,8082,21025,-5565,-14221,14452,32767,8899,2239,10234,32767,-12375,-7154,-32768,7669,-13337,32767,20169,-32768,32767,-7628,-23291,-12280,-23001,32767,-12961,32767,24171,8329,32767,-32768,6652,23831,5827,-32349,1636,19169,19862,10985,5276,32767,27882,32767,-29810,30396,-32768,-32768,-16843,1367,-5621,-32768,-9039,-9025,32767,32767,-32768,-12593,12127,-20330,30191,32767,8354,-32768,15836,32767,6570,-32768,-17638,1412,-13379,21764,1401,-10094,24542,12441,-897,-15669,21134,-32768,-9845,-11496,8046,-32768,5104,-16425,-15598,-26554,-2236,-32768,-12069,-16800,-737,-24029,32767,1974,19946,32767,11552,998,-5864,-11082,-32768,-32768,17351,32767,5387,-32768,10054,-32768,-21888,-4079,19043,18719,-1795,8588,32767,-6359,-15637,4672,-3132,32767,-10255,-11305,15101,-9251,-32768,-32768,4800,32767,-14942,32767,32767,4705,12025,-32768,22177,30015,3624,32767,25334,4332,17872,-32768,9511,22326,-23346,32767,-12614,-6210,30179,-32768,21321,-10107,23412,32767,-3309,32767,32767,-23519,-10527,32767,21352,32767,17465,22414,-11835,-32768,7448,-19626,10809,-32768,-23862,1952,32767,27179,-32768,-32768,-30052,-32768,3500,-12258,-3382,-32768,-17663,-12516,10356,11247,18247,-32768,-32768,-1348,-6570,-11657,4842,-32768,23377,-21121,17618,-26296,20703,32767,8338,14989,6144,-18872,32767,-32768,-20415,-32768,-10009,-32768,32767,16840,32767,12009,-7993,32767,-5586,32767,-22490,-17162,-19419,-29363,6570,-32768,32767,-32768,25366,6862,32767,32767,-27669,-6791,1468,21226,32767,32767,13503,13739,3881,-18743,-18433,-32768,-23440,-32502,-1929,26262,20225,-8650,32767,32767,-32768,-736,-32768,-28627,3539,-32768,872,32767,25184,1698,-32768,3505,-32768,-7357,-8217,-32768,32767,32767,-32768,-32768,28540,-32768,15388,32767,3206,3205,32767,28285,32767,-32768,-16607,32767,1077,13776,32061,-9684,-32768,-8304,22827,32767,32198,8436,-13468,-2620,-14005,32767,-20945,-32768,22333,22765,-29093,-32768,-21411,-32768,-32768,-29539,-32768,-8672,32767,-32768,32767,-3813,-1188,32767,-32768,27819,-17536,-32768,24673,-32768,32767,32767,-23778,32767,-17699,32767,-1229,-31886,-24400,13397,26902,26767,17627,32767,8680,-32768,13677,-6781,27373,-31932,-32768,-32768,32767,-32768,-31585,-29151,13468,-15271,32767,32767,-32768,-22383,15853,32767,-2569,6255,32767,-32768,-32768,-32768,12420,-21180,-32768,-4784,-32768,32767,-6344,-32768,8261,5361,-2071,19463,32767,-32768,-18377,30067,27442,11934,-10378,-8485,32767,-2986,2188,9869,7676,29477,-16096,-32768,-32242,-13269,11991,32767,-6883,-32768,32767,-32012,11194,-32768,-18083,32017,-13027,-32768,-32768,20519,-32768,-32768,5284,-32768,-10830,-14920,-3637,2303,-27981,-8666,-14348,-17592,3066,-32768,-19746,-8511,11033,-32768,11083,-26993,-13664,-32768,21008,-32768,-13464,-1332,-18577,-998,-27176,-32768,25391,-25827,14997,-13201,1435,15358,-10250,32767,32767,-7279,16026,32767,9679,8956,3037,27155,29298,32767,31459,32767,13519,32767,6506,32767,29725,12329,17227,32767,27799,32767,1642,32767,32767,25193,17315,-23999,-21421,1582,-5712,-32768,30477,-27727,22250,15277,-4261,15081,6161,32767,-30200,792,32767,-15393,-18849,-2717,-6883,-32768,-31323,-5190,-32768,27483,-9468,9514,1719,-32768,-15095,-1464,-32768,-26912,-5913,32767,-26062,-32768,-28773,-8555,-32768,-32768,-25177,-32768,-26027,-32768,18915,32767,32767,-19343,-15347,32767,-941,22779,-4766,-3893,-14816,-23813,22980,-32768,3540,32767,-19853,-32768,7194,-10669,-29575,25690,16786,32767,-2089,24160,18039,2011,-4379,-6936,21724,32767,14960,32767,32767,32767,24361,-22161,5384,17249,6863,32767,-32768,32767,4381,32767,-12671,-32768,32767,32767,29436,-23273,14466,-32768,3071,-19097,-20602,9638,14919,16660,20176,-32768,4193,32767,7108,3108,14225,7882,-10187,-6956,-10031,3755,-3703,-23670,14739,14732,-25166,32767,18823,-32768,32767,-4314,21100,25406,11259,28867,-32768,-32768,-9095,-17033,14132,-12206,-32768,-31962,10539,-32768,5804,-15797,32767,32767,9017,-32768,10502,-32768,-4286,-4756,1422,32767,-5418,-32768,-30316,-32768,-5602,-23666,-32768,5779,-32768,19475,13142,-32768,-9715,-32656,32767,-18738,6893,-25806,15554,-23970,29683,-13562,-32768,32767,18029,-857,-15011,-32768,14176,15069,32767,-8959,-26144,32767,28991,-24249,10747,-32768,-7035,-32768,-4464,-32768,11381,-32768,-32768,-32768,12435,-1586,-10534,-32768,14527,-10439,9437,32206,-23226,24824,-11571,32767,-28051,-32768,-24616,-12079,-32768,-3284,2265,24505,24615,27261,1840,-4480,3702,-8895,32767,-7146,5798,5256,-32768,-32768,27738,-3592,-28398,-7777,29653,-10664,2500,4204,-32768,-32768,-19141,27409,-9396,18839,-19191,-32768,32767,-5284,-21014,-8436,-7513,3548,24085,-6135,32767,-25934,16016,-8752,12020,5485,30798,-16342,12732,32767,32767,-32768,3459,32767,-32768,4802,2794,11330,-29919,-32768,32767,32767,-18801,16005,11261,-28199,-4488,-32768,-23551,-26477,-26111,-29978,-32768,-4677,-2705,-19615,-32768,32620,-8877,3532,11102,-32768,-2484,11503,2743,32767,-18804,30914,-31504,8792,24425,32767,-6506,32767,-9099,6393,32767,-13854,-804,-32768,10636,-18241,-32768,-9074,-2997,-9278,-32768,2278,10701,19813,-10469,-23594,-32768,-26177,-32768,-32768,24535,32767,32767,32767,29840,7404,21752,32767,-3654,32767,32767,-4676,32767,6351,3253,-19875,6387,27813,32767,-8540,11676,4646,1855,32767,-8518,18903,24893,25404,5273,-32768,-9152,-22082,15455,-23207,3121,9136,-10739,32767,13728,32767,-8657,32767,2228,25372,28396,32767,5600,-32768,3376,-32768,-1378,6112,-11926,4215,-21209,-32304,-32768,-32768,-9303,-32768,-32768,-8462,-22606,-32768,-8682,-32768,32767,756,32767,10508,32767,-32768,2544,32767,-6976,23487,32767,20304,-3680,-32768,9483,32767,-22024,-2181,-20641,32767,-32768,32767,6391,32767,-22050,15137,14545,-2922,-32576,-32768,21554,2779,-32768,26581,-8812,32767,5755,-15744,-3190,32767,-18340,3228,13520,-26459,8702,32767,20153,30804,-29647,-32768,-4050,9338,-238,32767,-9885,-32768,-5706,-32768,-32768,-23882,9426,32767,7951,-32370,32767,-232,-5996,32767,22456,32767,3904,14951,25079,32347,32767,32767,-32768,32767,2150,-32768,32767,-29299,-415,32767,32469,32767,4722,32767,30060,16160,21911,-10713,26541,-9054,20490,32767,-32768,-2780,19235,7594,12682,10438,14735,32767,31791,32767,32767,31071,11732,-13008,-10618,7575,-10477,32767,-10166,32767,30860,8945,1059,32767,21629,-16432,32530,-18477,16595,-32768,7321,32767,-26182,13612,-8569,21426,-32768,-32768,12685,-3998,-16607,29641,10495,-1801,-32768,-32768,-11799,-32768,9264,166,-32768,12379,-15995,-32768,3238,32767,27095,32767,-32768,-16217,29599,21693,-19097,32767,-19689,8385,19229,-31727,9302,32767,32767,-14289,20449,32767,31538,4267,3908,22518,31663,28928,32767,31863,26806,32767,27881,32767,-32768,32767,-25153,-32768,-18233,26667,-18748,27378,32767,-29471,31007,-32768,19400,24870,32767,-27966,-11706,-32768,135,2299,16172,32767,2016,17740,11417,-32768,12268,11616,-32768,32767,-7757,32298,-8579,32767,22197,16744,4573,32767,6991,31133,-2302,27525,20169,16448,-30098,-45,-2847,32767,-3431,-32768,-17317,-21170,-32768,-32768,25511,32767,4383,32767,-6569,3729,32767,-32768,-288,-32768,-3338,-32768,13079,-15895,5301,12186,-32768,32767,37,32427,-5873,-5989,-32038,-32768,4816,-32768,-22019,9721,-32768,9448,-13160,-20490,-6846,-32768,21832,24043,-28851,-32742,-25425,3565,-23012,-15749,7136,-32768,-11162,-26888,-15178,21997,-11645,-32768,829,-21337,4422,-4213,-10832,-1388,32767,6057,-21824,-27664,-30909,-32768,26331,24750,-32768,-32768,-17538,-32768,-31570,11809,-26502,14484,-12428,5533,17534,32767,-18168,-25305,16313,9813,-14181,-32768,4169,16619,-5823,-19052,32767,9001,-21919,21522,3474,-11751,806,-26223,-10594,32767,-14842,32767,-19939,-6859,-20786,-32768,-6075,-26291,32767,32767,26686,5270,6251,-32768,-16776,-945,16161,32767,-11898,32767,3100,-20715,10091,-22849,-31681,14196,-18748,-32768,1491,-21831,16484,-28826,-25787,20272,-13732,-32768,-23808,-5079,-15905,-10440,15243,-32768,-18093,-32768,-3753,-32768,3030,32767,-12020,17919,-22910,-32768,740,29969,-5164,-19662,15296,32767,17866,32767,-30462,-23208,-32768,-26140,-961,32767,-4310,-26706,32767,32767,-2610,32767,26541,32767,-12815,-32768,-19352,-1661,-32768,25916,-5094,32767,7196,32767,17370,9703,19046,15345,-2313,24561,3886,32767,15926,-1098,32767,10378,15053,22949,-1557,-14133,17569,26149,32767,-8512,-14605,10291,10459,-32768,-18547,15019,920,13616,-32768,18372,-2598,-17810,899,32767,5582,11444,-11842,-32768,24225,32767,8946,20507,32767,19415,-32768,-32768,-4459,32767,-30362,20024,19490,32767,-32768,32767,32767,32767,32767,17042,32767,-15677,32767,-28527,-9415,-8675,1352,-32768,-32768,-32768,10743,32767,-8415,-10563,-17991,32767,-32768,-12820,-3288,-32768,-25287,32767,1695,-6318,32767,-32768,8654,-32768,14982,-32768,-13561,2783,-24643,32767,-19768,27563,-21152,16354,9177,16200,32767,-32768,24872,27578,30838,29458,-6087,19490,-32768,32767,7318,24478,4722,32767,16209,17552,19218,21409,27038,32767,-6127,32767,32767,32767,28818,27001,32767,32767,18663,32767,10997,-218,-27065,9117,-5828,-5837,3464,10604,12935,32767,-18189,32767,-3740,-8290,-16083,32767,32767,17092,31779,32767,26987,32767,-4938,32767,-11983,32767,32767,4710,18886,5158,22137,19799,-18677,-14993,13008,-32768,-4249,-32768,5171,-26726,-11052,32767,32767,32767,-32627,-32768,-6853,-31496,-19382,-25988,-32768,-10764,-12912,-19505,70,-12443,32767,-8989,10217,-27045,657,14520,-11918,-31536,-3134,-3437,22968,24254,-3732,-32768,8653,-31383,21673,32767,32767,32767,-7313,-32768,8470,32767,-27451,19739,3571,32767,13884,32767,-3681,32767,-11902,-32768,3863,-32768,19977,28329,17551,-30021,15901,7349,32767,-32768,-9562,32767,-2067,-15597,-31754,32767,6336,-19254,-24165,-32768,581,-32768,5442,32767,-6230,32767,10323,-21330,-2789,-32768,16619,-3808,12840,13994,-28055,-32768,-11064,32767,-32768,-31137,1859,-16870,-7186,-5957,7831,25319,22147,32767,-32768,-15481,-4290,32767,-8387,-32768,32767,11489,13147,-5390,16713,27297,-1320,-17762,18165,-853,32767,-32768,-23749,-32768,28363,14089,6510,-22814,-17757,22148,-5463,-24869,3967,-11215,-27503,-17622,2779,-32768,1218,-32768,20140,29105,-1394,32767,7555,29846,12317,-7053,23048,32767,-20803,-11710,-9109,-15508,-32768,18417,1710,-23759,12013,18703,25651,-15262,-1890,227,3996,-16692,-31130,-12075,-32768,-32768,137,-3552,10908,32767,32767,32767,32767,10453,28438,-3979,-13761,32767,-5583,-32768,27456,-20020,27801,32767,11739,29282,32767,32767,24524,8256,-19711,32767,-30146,-27102,-2274,-1432,32767,-8071,-2997,-20730,29939,32767,-27518,29269,26255,23215,-32768,-32768,-19037,3049,-28521,25530,-21500,32767,-14250,-17782,32767,32767,6181,7351,32767,23869,32767,-21155,23185,32767,-15539,32767,32767,15950,16526,32767,9839,32767,-32768,-14754,-32768,-29882,-24340,3150,-27327,-32768,-892,-13556,-16039,-6317,-32061,27382,17441,32767,-17165,21909,-7668,12619,-7929,-32768,-8532,-1179,20946,14140,14323,32767,20244,32767,22692,32767,32767,30651,32767,-29913,32767,-23785,22276,32767,32767,-7482,21009,11358,28013,32480,4628,32767,13258,29187,2795,-2595,8244,32767,2462,32767,32767,11855,13362,-16537,6725,6903,-385,20231,29279,32767,-32768,2756,30011,32767,600,32767,-15382,29320,26151,31463,5181,32767,2384,32767,7403,8031,11040,-32768,-15600,-32768,-13560,-32768,-19872,-29143,15558,20181,5320,2599,32767,32767,9499,32005,31777,-12555,-9017,-32768,-5575,-32768,7381,-8356,32767,12710,3552,-9817,32767,-32388,-20814,16196,26815,-6617,32767,-10522,6531,-819,11329,-11674,5402,32767,6610,32767,7887,32767,28353,-29432,-2000,-29229,7217,32767,-32284,-32768,-14084,-32768,-32768,-32768,8476,12315,3239,-32768,-21678,3610,8981,-23715,-4025,-32768,12276,32767,-14754,15721,15222,32767,-31331,-32768,-16502,32767,28789,32767,12697,9660,11984,3071,-10484,27410,-283,1920,-31790,32419,13488,28214,-21772,-19824,32767,29194,15479,-13158,-22991,32767,13431,32767,6650,-20039,23647,-4352,27001,32282,2517,-32768,9697,-23394,32767,3280,25220,-32768,-1598,23730,-32768,-15577,-24194,-11547,17167,-32768,-17141,-32768,-32768,-13634,-31875,10555,-32768,-32768,26066,-19052,-10006,27356,18796,13609,4775,29167,-10253,32767,-29958,-26920,28818,-32768,-2395,32083,32767,28768,-32768,-3482,-32768,-32768,1653,-12597,-12215,-32768,-15327,-32768,-21331,-15147,-20172,-32768,-15323,-20850,-32768,19708,-26514,5352,-23676,-7063,-7734,31784,31379,32334,32767,32767,2104,32767,-27941,-16932,-17748,-12095,-9534,-1414,-32768,32767,22609,-18740,28821,-18198,32767,-32768,-10237,32767,2269,-32768,2848,1883,13385,25580,6153,32767,15069,-32768,32767,29168,29147,1555,17626,-30258,-26534,-32768,-20287,-32768,-32768,31262,6428,15847,9354,-32768,32767,-1979,32767,-21730,32767,-29468,-14727,19531,-28778,-26659,194,32767,24459,5275,8790,32767,-18486,32767,4588,32767,-5735,32767,9117,32767,-16266,-22016,-24948,11400,-6599,-7252,-32768,18777,-17992,-31449,24409,-32768,-23340,5904,32767,32767,-1844,23417,-4532,-25156,11011,505,-7723,32767,22707,-32768,25095,-32768,32767,-32768,9753,-32768,-31529,-23052,192,-32768,-32768,8967,-32768,-32768,6993,-13213,-32768,-20498,-23050,-32768,-7035,-32768,9173,-32768,-32768,9079,-32768,-32768,437,15656,-18374,114,10280,-23310,-32768,-32768,-12218,32767,29551,9669,9444,-32768,-18204,-28530,8396,32767,32767,23524,14257,-1691,6790,32767,-12238,-32768,9996,18934,32767,-7255,9247,8062,31674,32767,32767,32767,32767,32767,14609,32767,-9477,-32768,-19581,-27109,-4015,20339,7031,11255,32767,32767,4279,-32768,32767,32767,7332,32767,-9664,-32768,-32768,3314,-10568,24042,-27164,-25467,32767,32767,6823,30326,32024,-14864,27632,4568,821,32767,-30949,1091,-32768,32767,-15869,27286,-9674,-32768,17689,20478,-32768,15228,-10441,18488,-4395,32767,4394,32767,31177,-27166,24143,-32768,-13974,32767,21230,22150,12551,23237,-6302,32767,-3226,-32768,28211,-10140,32767,-22173,-32084,-25599 diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv19_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/conv/conv19_input0_int16.csv new file mode 100644 index 0000000..3ecc417 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/conv/conv19_input0_int16.csv @@ -0,0 +1 @@ +26546,-30042,14035,-5634,-17795,-12845,-25394,-20980,-30593,1097,-26814,23356,-1798,15747,2181,-5894,17234,28526,-20937,30978,5598,3130,-29804,-28753,-22915,1087,-19098,17760,31991,-23419,-22828,-22118,11638,-14517,-10292,-11416,6214,18981,-24471,18616,28711,5952,18138,26072,-16189,26665,7154,23544,-29915,-12010,28542,-607,4563,24574,-1058,10405,-2767,24349,26009,-22321,-6158,-406,22178,-30782,-21395,-26749,13407,8476,-2771,-27830,-5787,12624,8607,-28520,-11792,-32421,-7938,3132,-18208,21102,-19116,-29529,956,4582,-8349,8,-20663,-20914,25194,-11470,5386,-694,31844,29867,3838,-13926,-3791,-11883,14396,757,-9291,14397,-10431,-23877,19414,-13357,-7514,-22711,-23206,-30620,-32337,-400,-9905,24314,-23667,-17769,-2544,-31407,29069,30176,-9917,12011,2417,608,7348,13067,-28578,28101,32418,-14555,-9558,2841,28687,-12239,-31820,16776,-10951,29911,2960,-17927,5903,7674,8227,-10534,10099,-10856,-2372,-11293,22703,-3332,-23260,17644,13983,-1889,-4909,-28639,-3506,-15842,-16501,5714,-30443,-22340,-29873,-22222,-7953,27572,27937,19570,-20756,-24510,16649,-22680,19641,-20736,-27120,21903,18282,-1783,21290,4037,2273,2318,-19299,-23843,1862,13458,9925,22671,2748,19791,-12988,32169,13020,17101,-6934,-1279,-3640,563,25718,29996,29240,393,-13426,-16013,-9201,29714,-32379,-23619,20986,-16198,-15330,15734,-17405,22470,10678,-32417,12132,-27468,-27246,-28510,-32123,-24423,-21760,15063,-9558,1742,4552,-24071,30281,27598,22811,31558,18317,5709,25944,-21111,-30661,-26599,29218,-31938,-28348,23995,15975,27671,20536,-11639,23112,-32767,7325,-15213,-24223,-11894,-16353,-5873,4383,-21144,1326,-25893,30995,-26563,-174,5788,-24587,-20109,-31056,19282,-32272,-20865,-19934,-18873,14142,-31221,23279,-12524,31936,-968,8429,16187,-28,-18257,-25875,-29218,10529,-6900,-17129,-10762,-30056,5885,23755,-1299,19816,-29884,14597,-17619,-14343,25919,11463,1142,12992,-20481,-22024,-5641,-8939,1615,-2183,30225,-10188,15700,-5198,-21386,13065,-16388,-11633,2994,8006,-901,-26652,30728,23338,-31484,-17025,-12161,-23429,-27869,17220,-794,-25028,-16051,-20652,3562,28990,26500,8299,19292,20683,12490,4568,1576,-14696,-18388,26180,-1989,-10799,-3451,-19865,18826,-16127,22505,11971,14759,-31002,19556,-24820,2395,18042,-23708,-22624,17048,-26509,29529,-2289,12045,2655,26432,-17792,-6043,8716,-20685,25036,-1674,-19754,-141,13764,-13600,28558,-22301,10159,8335,-2478,-685,18304,-8986,-15470,-5142,27776,323,-13228,19222,-11115,-12010,31996,-14493,-11856,-19493,9107,-7402,-7439,31568,13480,18822,-24005,10113,17456,25227,-12690,16828,1426,1582,15242,17964,-32591,-15734,-19555,-25565,-26289,1140,-13451,-15258,15726,2472,-27288,16571,-20338,21054,-10374,1797,-8883,-4682,15253,-4255,-26847,3138,14300,3101,21090,-24265,14241,-2893,29829,-4680,-28443,22448,-2839,-29229,-23806,-14312,-9894,27938,25266,19857,16173,-21699,-14762,31825,-14005,-31913,-4113,-9903,-789,-5452,-24080,11308,1475,3296,-28857,20539,8194,1346,17273,451,10516,-27332,17276,21329,-20378,-14181,-23395,14942,-17189,-15144,-20288,-12101,-32765,16181,18432,-12357,-17944,-24225,18261,-1178,20656,17018,-25935,31390,-2277,-6498,19004,-7119,11173,-22522,27165,-30820,-12740,-21542,18468,-26746,-24767,30194,28721,-2233,-20904,15673,30840,20653,-6816,30088,-18385,-24310,-12406,31856,25317,-26819,-18191,27029,9870,-30196,-10506,31541,14609,1085,-15441,1084,-21004,5596,28656,-3873,-2656,13637,10713,-26461,25971,-32749,27162,26343,-32713,-4124,11003,199,29604,31940,-14955,4307,-15988,-17686,-13265,663,25378,-11635,2556,24885,-10280,19625,-27908,28468,15981,3853,-25301,-3248,25494,18088,25404,-21051,11598,20379,14594,-28882,-4764,-16965,23233,-28159,-9669,12114,-16011,27622,31891,23074,907,18703,29835,-22132,17520,24641,7894,27643,-12763,18721,-6101,-2256,-16902,4033,-22572,-12465,-6285,-26894,30227,29237,13823,-731,10270,-5434,-26099,5936,14970,-6595,26442,-22572,-31211,19823,6026,9841,26844,3775,-31420,-12685,-17603,-8308,25390,-2424,-18423,29783,17162,20394,-26723,-10530,-26158,5970,-13843,-6432,28262,-30760,-7713,-28430,-7217,-6234,-669,-24668,21851,-20511,-30891,-22411,-31916,3482,31286,28636,11929,9952,24259,28026,-25323,-18380,-30125,-7225,31247,20240,-12922,-25350,-25442,1186,23072,6724,-14285,32271,-9648,8252,-12942,9586,-7877,4884,-30306,8418,3799,32615,-30088,-5428,-12874,20172,28975,-5957,20431,21829,14495,-32038,-27872,-19848,32298,-4964,30306,-3154,23710,26362,-23300,26934,10434,18474,1230,-32631,9955,-30178,18762,-23916,11174,-7321,-17992,-32620,-22759,4868,-1856,-30811,9082,-16626,30616,-8223,5157,11599,11883,14181,-12640,17806,8073,-203,-24201,8010,-2605,19422,17688,-22338,21886,-2996,-19133,-4686,27034,24131,27992,6192,-26811,14053,-27913,30298,6057,-6384,2119,-32347,-14645,3596,-18615,12878,13159,-9693,28979,24217,20252,5561,-6453,-25923,30757,14478,-6478,-10015,29836,30016,3227,4065,-14373,-23547,-19120,-14427,-23198,8746,-723,29969,-3657,-19044,531,18780,23718,10910,14629,-25596,-5719,23297,-9100,29863,19896,-15781,-25276,21509,27203,-17674,-10297,27453,28310,20597,-16725,-19658,9475,25040,-4906,31638,16151,5570,-8412,1538,21226,-4446,-19977,-8466,-14613,-20502,28121,20928,-30304,20183,19873,14996,24465,15324,655,26349,16550,-18929,9652,-6074,-29416,25606,31618,-29186,2388,-7283,17611,15069,-31407,-21947,-31665,-20754,14025,1088,1638,-4352,-22473,5889,-4882,9431,-4855,-5918,-898,-27304,-3897,-2209,16505,27612,7367,-30611,-6117,-27241,12392,-6567,-24031,-18761,24127,-5396,18873,96,-13346,26580,-24053,22325,-8902,-3922,-16630,-24272,1289,14059,715,-15888,-18524,7767,-25450,7785,15599,7474,24931,-20701,11681,30357,-8980,13902,9399,-18204,26645,-11617,-6230,-25468,16651,20252,26956,7486,-4788,-21335,11247,8051,-8059,30322,22160,24567,-5850,13421,-7082,-27862,2596,-29150,-21502,-28255,-23773,2874,16081,-2944,18505,12819,-29151,-16487,-31502,-8997,-1005,-7344,13489,9441,-21016,-17942,4396,-8989,32131,27182,-13035,-24785,-3122,14130,-5785,-2286,-21909,-26920,-27194,8063,14817,-21231,-12162,17722,1737,3258,27296,5996,14805,-4917,-20682,2203,31565,15513,-4418,-7744,26238,-32649,-3987,3496,2032,12748,-28779,29644,-28735,17228,32478,-14761,21297,-11465,-11161,-23894,-5075,17108,29813,3788,-2929,15361,-31319,5140,30269,-23457,-3764,-21469,20884,29644,-6273,15275,11923,5615,-11888,527,-31612,27716,3690,-28541,-17187,-17485,6487,30784,291,24509,-655,-25277,30850,26306,-226,-7137,-8341,14181,29718,-24096,-19304,4757,26699,12353,-12429,-18394,-26907,-2768,-26192,-1903,17493,-32038,19395,-28635,-3437,-7835,3179,-1124,-13165,-1905,926,-18303,-22483,-21695,1300,2110,22406,-24374,22280,-19114,-9391,-7203,-21466,14767,-20387,22775,13902,-32520,223,-15547,-8323,15880,-3458,-9773,4249,23434,-29387,17600,17078,-1426,-31555,14014,-19882,3557,5380,-7003,10895,13444,32611,22268,32316,-1580,-13598,7508,-17415,-31390,-30081,24268,-30262,15859,7113,-21269,23736,12790,-19863,-21115,19115,-27271,11745,-19845,-143,-4593,-13886,4282,-23605,-15546,30907,9163,-339,31946,-23662,-7592,-2768,28779,18947,-15197,14401,-26811,-21995,-156,6324,-14909,20924,-10148,21591,-27711,28446,-32702,-1808,27808,4566,-25937,-30814,-27203,10984,9387,1651,20420,17150,31368,5891,21504,-31541,28689,11685,29199,-7364,32455,-29609,30972,5384,-11535,6563,-14250,-4277,-24076,-756,-7745,28079,-25622,-2088,-17094,-13853,13165,20244,-8539,28979,9913,-800,15010,-3435,15600,-25135,-13392,6038,7602,29128,22401,19172,2175,-31039,24089,1797,-25906,-2402,2797,-26748,-5069,1952,-9984,19419,13286,23970,3276,-26041,-5994,-27749,24419,-23158,28056,-26060,-23057,-25433,30722,-2096,16900,21216,16468,-1545,81,8597,-30325,29396,-21856,21593,-2630,-11053,-26708,26376,-14424,25854,-7343,-23727,-16563,-14157,24007,-23240,-11401,13762,-16830,26711,-32633,7962,15860,-29770,26587,22357,22977,-10056,-4796,28998,27333,-24588,-30853,13991,21494,21495,-9710,30986,28341,21761,23765,-1344,-7039,3947,-24478,32459,28848,18842,16359,7588,-28983,-11444,188,11018,21222,676,-15823,-31659,-25430,-7706,-30905,30132,12711,22816,-22155,-28910,-27245,25337,30921,-8774,-16684,28082,28516,-27261,15238,21141,27964,26296,-4854,10511,4315,20591,2115,-25882,-30335,-8812,21543,-10726,-13955,-27844,-3091,-10410,-28339,4990,-32550,-7563,-16726,-15245,1763,-17152,-12630,-26390,27955,804,20761,32481,-16133,10701,-15418,-19440,18454,-18561,22925,24857,1232,-23029,936,-17133,13027,-23200,-22236,31589,25595,2024,7163,-1204,10290,-13134,17133,-29505,10323,11532,30832,-8228,-18569,-28254,-777,-29565,-7636,-22381,11750,-28123,-9755,31812,-6659,-2938,-6921,13197,-32427,-27836,21917,6384,-22623,-12962,-26785,1719,3650,16113,-1780,9801,-31399,-5858,-7630,15330,-401,-1182,6709,10203,-13207,8940,-22146,-20741,14609,-12115,-24674,4720,-7292,29112,-29924,-4489,-5893,-4398,-20724,13158,-11037,-20289,18403,-28000,-3737,28954,31254,31096,-25853,26307,19436,-5820,20909,-1225,3437,13262,32095,-4825,-15751,-27146,-21342,-9361,-10684,11177,28766,-15679,8329,19764,-4711,2955,14816,8180,24696,26662,3934,-14497,-27053,19603,807,6813,31691,4728,5513,8909,-31871,-22887,-24659,-29570,-17433,-2914,-29717,-13363,3738,-4479,14752,-11357,-7264,20865,17129,5846,-28210,-21549,13478,-13471,-3885,-22513,31090,26135,-24564,-14796,6938,-16230,-19276,-26173,-20701,18386,11219,17707,26755,-17875,-20647,18966,612,26539,-19907,-14832,13886,7138,-24692,-10210,-25807,28130,18098,-7467,456,31144,21444,23609,1541,19354,11534,3394,-26031,-21530,9415,2604,27004,-25600,3256,-8595,-23461,18864,-2536,-11011,27788,-9946,28989,6857,10050,15544,21615,11938,-487,-32687,-23059,27449,30830,-1965,-6680,-14219,1880,-32595,-2852,15474,-4841,459,14417,22738,26184,1476,-5492,26533,-7388,26883,21016,-5300,19035,-21134,-26562,26903,19896,5029,-18338,-8589,4655,-29982,10020,-32226,-10419,4138,1409,-8218,19900,-4889,19001,10934,15403,1328,-24449,11094,-19667,29141,31520,357,18369,4955,1395,15985,15726,25990,-24464,12478,-11097,14754,5809,-19615,9917,-14976,32464,13269,-23196,21659,2422,30745,-24388,10184,16216,-4294,-31680,-16202,8803,-5470,30705,18151,-2294,-16735,12765,9368,-22951,-7719,13455,31174,19671,16939,-30729,-10355,-23142,-25994,7321,9887,-26632,28053,-8231,-16089,32113,-32530,13763,23851,30938,-28166,25503,-19556,28160,-4801,19984,-22649,2617,-6540,17525,26093,11815,-15973,-2755,13320,18649,-9550,-27528,1544,-12751,-20814,-12955,31959,-3048,13583,-9822,-8558,-441,27209,-19306,8337,13139,-32621,10870,-10226,11684,-20751,-8388,2673,-15675,-15462,-8813,-10698,-6306,30233,-12039,15383,-20187,18045,-25827,-29375,16155,30682,-451,-17,-31350,17472,-16538,-21158,16917,17122,-31412,30105,-3599,-31698,-28770,4977,27099,-30572,28949,-30149,14263,-30524,-29792,-11761,288,22612,32273,-30713,-26286,-12129,8574,9477,11270,-8515,-7496,-15011,-9735,6543,-3065,-30118,-24991,27359,-7567,15668,7179,23683,-31626,-5593,-8295,11321,10662,-15484,-18247,26817,28584,19126,-7876,-13645,27290,-10479,-13738,26836,4996,12306,15266,-29875,-18383,18149,18826,6249,-24874,-31497,-11892,-16543,-4379,-25918,10137,15781,29612,32217,989,7134,17484,-9183,27993,-14551,23799,25501,29965,-9639,-26686,12943,23707,-19508,-3635,32220,-16619,-14146,30319,-19790,16538,-6179,-4436,-16122,17484,-14040,-15136,-2627,30864,-4553,21784,-5172,-31259,15247,26837,-27813,1095,-7560,20131,-2036,21233,-26143,-3887,25929,-19679,6109,-18583,-15974,-18293,-2390,-18846,2375,10544,-12636,4007,-25598,-10594,14837,-6133,22599,-6974,-21825,3883,23704,5714,-12961,23648,-25154,-23093,-19721,-3682,-18642,-25147,24047,-28433,-26395,-30215,-851,14707,10241,-16452,13739,18826,-27546,127,-30563,-18906,-10780,-2718,926,-30492,8584,-28327,-3314,13870,9644,1238,10688,30530,-860,32261,12362,21942,25239,8965,4184,-8206,31776,29908,-18941,-26662,-3996,14536,-6301,8173,-15322,-8067,-30624,-21604,16209,-23111,22191,-517,6462,1693,-1686,-8187,-6567,-13504,9184,-31804,28792,-30817,-4954,15713,-4206,-18255,3156,19328,20200,-22068,-20738,-24921,2612,-1444,-30652,13964,17238,2491,11932,-27923,-27641,9381,19428,10344,-13052,26522,-11267,-26393,28570,446,-32458,-30733,23518,17514,-22933,-13764,-13442,-11026,-29736,11338,-25764,15729,3088,8366,21710,20254,-4378,4989,28915,12778,-7981,26996,23397,6123,-29843,25576,942,-8862,10991,-5301,19444,-7729,129,11688,15025,-24254,11850,-23671,-30953,28302,16365,409,9229,7819,-9918,-30586,-5037,31699,25487,-27787,-30679,-9820,-17926,-22331,-7380,-29526,29294,4222,-1192,-14713,-13658,1872,16942,3786,-32392,2262,-6013,196,-2236,30177,5201,-32614,-23945,-24924,9917,7839,28564,11070,-27473,16197,8775,5240,-7018,22368,-8325,4879,-13746,-20198,13761,32611,-13328,23291,-27605,30353,5325,29580,-23462,-32724,-28926,-6833,-31533,-8319,32459,7818,6957,-7113,15669,-24799,-29196,3683,-17180,3870,-14096,6342,-3286,-31451,1175,-24328,1155,25963,19044,-11063,28485,-5926,23120,-17043,-25786,-25644,4461,-6398,13384,28992,13314,-19725,28795,-25988,16925,-16984,25286,-14879,-11475,9520,30051,14435,23150,20517,11890,-32100,-493,-15895,20236,-26471,-19247,-25431,-1876,-19339,-7777,-23079,19435,-19005,14799,28594,5569,26396,23604,28761,-4233,-12905,14563,-13978,10495,21966,25098,-21260,-12811,-691,13292,-9511,-25897,-13683,31411,-4465,30966,24148,-12073,25122,14151,27676,-3706,-17460,15494,8050,-12561,20915,-5298,-30711,-1196,21134,-32564,8350,16068,-21319,24522,-5417,-32607,-27249,-21586,-15294,-21553,-22257,-7158,-31838,23818,3116,-18703,-17423,-316,8790,-28070,-15611,-10273,-16897,-6015,-21766,-25056,5232,-19163,-30300,-18406,-20277,-14262,28008,12553,29106,-20640,-5264,21696,25608,-7436,-19637,85,-31391,7987,8145,13745,-9844,-21694,21173,-19740,25029,5798,-445,28250,-32007,-23659,-31788,-15005,7903,-16968,-9424,22719,12267,30064,-26201,-16043,-270,27566,-5113,-1083,26444,15566,22606,-14244,32039,170,-18233,-26348,27273,30973,14008,7295,-11901,12287,27714,27730,19403,-28521,9942,-21802,-18227,9461,2079,32478,9308,-11813,17311,16937,24915,-8319,27219,-9237,-16067,-31064,-30686,32694,8397,-14695,2628,-16531,7276,9487,-17220,-26792,-16707,-12504,-22464,14256,12008,24650,-17091,-19693,-30843,11721,12810,-12366,-14576,17742,12583,-21684,-16357,-20623,4270,-6291,11461,-15482,6411,17726,-17552,14502,-2503,-24594,-16065,13754,24416,28485,-18132,31560,-28815,-361,-15851,25746,24011,-21639,29582,-79,-21532,-5684,-24457,-11292,-6014,14988,-31007,3268,21702,-16045,1666,-15310,935,-9943,-12659,23797,5484,23280,4518,-7632,-24586,17704,6922,12254,8117,3220,-19987,19589,8245,19626,7221,31436,22753,2615,-13950,7139,10547,-17277,-7765,-28969,17680,-18073,-6935,-19641,7153,-13791,-17966,27644,25977,618,19881,11742,-13983,25146,27795,-10444,-10402,22783,-10388,28793,13024,-20831,12895,29488,12801,-4448,-4250,-23818,30257,22882,1216,19318,18634,30815,21113,17785,500,11041,306,3908,14687,-10941,-6639,1345,-15485,3295,17812,-2894,18946,13566,-19946,4653,-3765,-32384,-1519,-25041,11522,-30338,25439,-29403,25813,22511,-21328,3892,28119,-28908,19355,12225,-843,-1296,11107,13695,11284,-302,7922,32085,-12398,11298,8470,18299,-3174,-4265,31547,27176,-18679,-17105,-28657,-5787,4392,-16278,25343,8537,-10712,-5469,-18553,13279,-19725,-24947,-22605,16140,-11453,12991,-19105,-3626,-12729,-20395,-28263,14626,21178,12409,-458,23762,-27595,12137,8011,29236,-7748,-4691,10301,30827,-16081,-4926,11369,24130,-7463,1534,3152,-21678,-5735,8960,16153,11854,9669,6900,13920,-29576,17680,-27260,22949,-26174,-3863,19983,6344,9502,10845,23541,-14566,-20792,-15717,-11803,6081,-12473,-30381,-23115,-4977,19899,-514,-14760,19972,-18594,30999,30611,26631,20791,-5384,10831,12546,-8473,-14880,-10255,-11540,28818,-25081,25174,29472,-20767,-5683,-18871,-20460,-1566,6465,-16727,-31832,-16445,-30354,-19099,-31212,10091,19965,0,23941,24625,242,-25204,615,-21098,10667,-10852,18536,-3365,-20851,-25477,-26541,-14945,-29894,-28368,2298,-20478,18994,-2985,9742,-10681,8504,11782,5195,-4590,-18190,6120,-30183,-30382,-5084,4184,28876,23259,25106,-19981,18899,-785,-13654,-24368,-15747,12417,30663,17530,-26886,-16344,19780,26748,9002,-22648,-22059,-289,-24727,-6722,27188,14203,-27717,-3807,3387,-29682,-11882,4200,-24303,-9151,4685,3863,20367,30322,-5027,22208,10736,17029,-3285,-23172,22409,-5222,15298,-2778,5049,-18775,-14299,7946,-6483,-10170,11546,-26291,-30976,30265,12686,18577,-6716,21865,22374,-26527,9574,-7600,12711,-9881,15523,28721,-2574,-9995,15927,7075,-28722,11546,14593,21189,16831,-19628,-9868,14870,26121,-17550,17601,4859,28012,830,-32081,-11334,21440,19097,-26609,-21802,-9345,-21916,-16218,604,4829,3935,-25348,-6297,-32516,8892,13710,-4683,-8710,19777,-19623,16360,6989,3829,-24049,-26671,-30246,22080,-30014,-11294,-10825,-26369,3798,-179,13271,-5363,-32146,19793,23039,19976,-7737,11699,-14663,-19874,10374,-12102,-8347,4409,-12558,2822,-14613,19,-15586,-30588,7494,18496,6644,-30620,-24062,-27979,29854,-10755,-12120,8509,6941,-12756,6190,-15953,-14734,15395,-25680,-1258,31358,13535,25989,-3202,-11024,12879,-18251,1175,3587,1622,-7841,13015,-12035,14421,-29201,29644,-24803,-23986,151,31265,12481,-21562,-8230,10366,22514,-9251,-3733,25466,-14154,30451,20892,-10010,28655,-24442,28758,20117,-22520,-16549,-7298,-24486,-32225,-30747,-11269,14091,10376,15491,-12,20726,-23935,7261,-30849,2536,-14002,-26090,30951,14499,32448,-26590,-12995,-11245,-29180,4288,-12468,6927,19228,31217,10976,11390,-22633,-23219,-7218,10330,-2938,23343,8868,30571,441,-4754,5,-30585,17164,-13029,758,24834,27712,-24214,-3314,-29389,1808,-10616,2147,-19315,-22182,-31214,96,-30000,-26869,302,22913,-24222,-27236,29821,-4001,-9678,3185,-2369,3999,27126,11105,-11212,25684,-12878,5705,-30731,17993,-17645,-19186,30859,-9008,-19659,-8185,-10863,-14923,6627,10092,-770,-22261,16494,-22311,23825,15851,11024,-18743,-30208,4985,-6015,-23915,12849,22952,12244,27813,20734,11207,-17373,-6592,26370,25567,-3495,-11640,-6598,21616,3400,2340,29090,15284,26200,-21336,-10628,6726,13240,-15962,16241,-20579,27404,-24887,-5361,29824,-8011,-8102,13790,3383,-30029,-19030,31010,-28374,5657,-16418,-3734,-24689,30349,19905,-12669,-8926,-3753,8671,19833,3246,-12783,-10887,25730,5771,31465,6276,20952,2233,20479,-4383,10798,-14009,-3267,-12408,-30338,-3191,-7403,-13402,-1609,31526,6780,-24934,-28176,-17507,-19651,-28323,7099,-21019,-2744,-7026,2126,16314,7853,-10079,-3139,18830,8243,-19500,9431,4524,-24460,-25126,-5023,14490,7422,8501,16683,3582,23572,16426,18159,-29694,-3755,-16849,16815,3595,-17650,12163,11512,28550,-14825,-9594,-22163,15220,20805,18776,11865,12551,-14866,9267,29465,8326,25580,-18282,-16593,25026,-7071,-31230,-23601,2638,-16877,22050,-18293,-3714,12715,-208,-13482,8176,16421,-28368,-11666,15195,-15931,6029,16730,-18940,12227,6189,-17061,22970,-3246,16660,-17936,-6032,31433,5108,-13460,11051,13396,32347,7005,-30202,26956,-27216,1207,-13370,6893,-19653,-4254,18946,16816,-15027,20378,-4143,1299,-31217,28107,-18124,-24126,16912,8739,8189,28475,27880,32070,5878,21233,-15849,5923,-25560,3006,763,-12913,-32288,-26907,-18997,28866,-15149,25172,-12211,10627,-18592,-16766,10304,27533,-30340,3835,-26597,18553,-14127,-835,-26512,-2068,16833,24556,-25260,10975,-5159,16887,-7370,19463,23437,-28816,16704,3252,-1060,-17855,-15446,18802,6980,-30934,-13050,14272,10117,-28611,-21067,-18762,14566,-6329,30181,10179,-9315,10413,18709,-30338,7184,-21504,-24665,-15694,-24939,-10879,-22555,20510,-10826,1835,18832,-17143,-13225,-16438,-926,-13431,-7929,13958,-16290,-8110,-25944,-4883,-17802,-24018,23671,6390,-10176,30588,30814,12567,-23030,-28345,-16452,-14649,32439,-14188,6945,-14121,2088,28781,-3332,-29173,22891,-2144,25385,-15982,-26166,3040,11212,-29834,1416,5601,-24127,-17779,-24173,-30962,-26117,-32231,23764,5630,-25204,12405,14197,15297,20663,-27967,-23823,28128,-18337,-2517,13884,19698,-1393,21607,-13398,28986,12177,12337,5927,13994,-12403,13045,1582,-2431,-24761,-12733,14909,-26616,-28734,4137,2718,-13886,-17440,20435,-26782,-7091,19345,-12467,8382,9144,-2407,-2966,-2648,1671,3167,-3944,16505,9098,-23449,24964,-456,20127,-11467,14308,14069,27115,-29468,-7463,21476,854,6440,-7389,7030,-26818,-7184,-30567,-30036,4359,5432,-12054,-30932,25511,28808,31379,13253,27446,23699,12167,-17735,-7932,-17448,-25850,27475,7930,-6134,3123,9015,15620,-9773,-16918,-6413,-23821,-16924,-20029,29196,-30924,-32037,9933,19945,8712,12599,30018,20435,15210,-30991,16845,5004,16596,-16410,2818,-20863,13203,28642,-21333,-11146,-5135,18284,-2466,-9201,-31558,8911,-1515,-8728,26594,-25485,31281,32220,7735,-9155,17497,-4133,-17734,-27160,8429,13318,13396,-15252,26532,-839,7786,19852,31749,9530,17915,24452,-4087,-15601,30243,-3410,-26276,7347,15757,15396,-3060,-28391,-2597,-13884,-1608,30794,-22026,-17402,-13854,31049,30901,-10469,25269,6663,-15000,28949,-11286,6620,26014,-30226,-4580,-11804,-6246,-3809,-18774,27361,-22461,9971,-209,12807,-20860,-21965,-10847,-27660,2357,2172,32285,20529,-7015,3771,-12055,5097,-16459,-23211,-7149,6996,14564,-7022,-18254,22017,-11959,-19125,11035,-20954,14331,16191,-29416,-30788,1778,12375,-24220,21599,24786,-14051,28635,31832,8068,-4659,-169,-24031,-28430,-8739,-30080,21698,12920,-24644,31124,9249,-30799,-16081,-6275,-31186,-7881,8605,13680,-15091,24999,-7052,-27474,-22874,23470,-25527,26728,-10927,-2720,-3124,2628,-1336,-9434,25759,11324,-1476,-6114,18776,21753,-27916,22363,15537,-23993,-6122,58,-11299,3433,-23487,32354,12365,-449,-27897,-21403,-20419,30668,-32049,-4684,-21332,7798,-893,5561,-8988,-18168,-25698,-20915,-3251,31118,-15046,-4010,10032,30297,-30705,-29702,7525,24220,5583,-15904,26367,28290,-11818,-21996,25780,-17391,-11783,-20561,18521,-8681,-9587,-13923,-23044,-3811,-23496,-19,-11057,1426,-30922,21526,-10094,-26385,-4646,16979,24577,-19363,-17544,-12709,23893,19418,30125,25057,7604,-8226,-9408,-21971,-31530,13431,-13026,-30635,13746,13062,19025,-17955,-5616,-18668,30790,16088,-1432,16000,-14387,-839,-3856,21179,-26303,31156,-24534,-25105,-12484,-27738,-18333,20353,-18815,32536,-30858,17311,23732,23913,-12817,-6914,-3140,-12308,-20214,28022,-19153,4443,2575,1290,6459,-24760,26498,8128,-9585,9153,6507,-32337,5673,7309,24768,-28399,25086,-18979,26631,-22196,-23606,14892,1529,-19158,24485,14950,23688,32382,31409,1607,-6009,-12113,28260,-32589,-29619,17793,24197,25850,13254,-8705,-29913,10536,-23853,-28995,-31618,-16348,12702,-16248,12609,-31819,-3824,-3049,23843,-18933,-31877,7665,6365,-3693,-4428,-5531,-12979,315,10460,-1456,-18424,-10148,-21473,-23060,-9472,7770,21770,5777,-8127,-2868,994,20524,-11844,14036,-24683,-2881,19660,-20579,-3307,28048,-8558,3232,29355,-16280,26257,16605,29101,19100,-10635,27188,-26777,12067,-413,-7515,6883,29952,-2547,13023,-23814,16874,-28296,2930,24299,-2707,-37,31244,13306,-19023,11858,24084,8047,31437,-10115,-17177,-3535,12856,-25694,-441,13418,32072,22575,27488,-28077,23128,-21507,23545,21689,-31671,-32382,-31440,-25611,-19047,-29412,18636,-19802,24434,22015,-19781,12083,-31858,-16672,-23588,8750,20121,2539,-218,-25424,9931,20718,-22235,-18803,8432,-18654,-15414,-10961,1575,26260,4016,-23169,-9953,-4004,-7916,12929,-15297,-24215,17842,647,8676,-11726,-31715,-23479,24283,4852,17507,-8859,-8801,18339,-15259,-28384,-2594,-20000,-15430,11596,-5962,-27009,-5218,-24164,-20772,9501,-12242,14785,-20047,22326,14246,-22958,-16590,13063,-27930,278,11019,-7902,-13719,16408,-2299,13507,11148,23202,28758,-8437,-32063,29838,11328,-14555,13490,-3224,-30173,-7285,4490,-21442,-19094,28733,9243,27970,12355,-23973,28966,15509,-18903,-26824,-29160,6626,4687,15530,28654,16519,14539,8545,-16088,-3322,-11756,-30357,-30256,31897,26362,5160,19583,4080,-28226,27456,-3796,17683,16057,25483,32092,-27038,19230,30260,20923,8350,-829,7003,-11698,-1391,8430,-28239,4912,15051,-24423,-7663,-22641,-27416,23495,7472,18257,2962,-31656,8138,-4412,10626,-25512,-11779,5083,-28348,-4908,1207,28622,-21888,-23526,4848,23509,-3471,14451,-6017,-3096,4875,-15986,15941,4757,-24677,567,18644,-7035,-29675,-10969,-17398,29742,9106,21221,30502,-15463,-11191,20747,-8103,-20624,17408,23648,26931,12103,-25754,8145,-24119,22313,-31496,2475,12892,-29718,14949,18157,-29078,6222,-3806,-25871,4768,-22205,-5996,24599,17421,6036,-7685,-29499,6260,28312,17817,24386,-12644,-31299,-11355,30556,9925,-17965,18341,-7291,18867,-601,-539,11297,3231,-22840,3153,-24740,11971,31159,-28073,-14610,-521,19738,-22798,-11706,-11135,16380,-8613,12192,18328,-24929,2136,-11324,-26376,11035,1720,-12655,8947,25616,-13549,12241,4298,-16746,360,16784,4054,-24304,-12135,12770,-1152,-32445,-32647,-7742,-8290,20482,-27747,-31931,-30366,-32187,-23324,5398,23975,-10038,27842,17321,16585,26588,-25659,-11388,26842,-27214,30292,30759,17557,16865,19688,-770,10801,-4239,-20221,9790,-4420,8998,-24016,19734,20980,-6094,-21423,-15602,-6209,32552,-24701,-26121,-8508,6233,26090,29617,24052,-27662,-20270,-10693,-20229,-12244,7877,-20653,-32506,-27347,26979,16089,-924,-7505,27535,-19146,4945,-5080,30118,-22763,20023,25279,-13997,6199,22541,-3444,29095,-12132,13833,6408,30304,-12055,7128,-16799,-5669,-12720,-5752,28214,-30695,31539,-3723,-7702,-20731,4812,10991,24973,21393,-21777,-1973,28865,22494,14551,28681,-28676,4842,-2476,-28722,28102,-9138,3328,21407,12201,-245,23074,2039,29152,-10569,8500,-11787,5576,21079,-21664,-22053,-28914,-13482,17839,-11099,24576,-18568,-30923,18839,2220,-18942,15097,8197,17241,-15000,-20965,-11212,15462,-2172,31837,3776,-23779,12537,-9128,-13320,-24799,-4401,-15383,21274,-10751,21571,-2664,25114,10059,-7328,-13003,19676,20565,21526,24048,13978,-12802,-23427,16520,32092,597,-6263,-29508,-4369,19117,15718,9573,11315,-16055,-11738,2446,-22266,-495,-2051,-4372,20733,-12690,-17932,-13692,-8405,32617,-32307,-7524,25671,-30557,27946,-23557,31044,23814,-11615,-23438,-11869,28173,-31072,-31560,23863,-26635,-10924,16677,-2128,16893,-6596,13688,-29713,27067,13215,31732,12861,-11674,-639,22547,22104,17800,-15370,-18335,-8309,16,1569,27897,-8521,-11511,-9975,-3107,31098,20048,5490,-15476,-313,-17796,-4123,21415,20654,-3682,24858,-3335,-24242,27155,-29687,-21036,27658,-4163,25572,12081,6669,-17986,8548,11941,8299,-18523,29875,-8375,-16504,-20600,12483,11885,-3165,-32664,-31228,27497,9284,21473,-16783,-16094,-6479,-27592,5333,7639,-12965,-29927,-16040,13555,7856,6130,-16525,5273,-29683,-13059,1178,-10780,16554,-2595,-2454,4575,19,-20400,-9224,28061,-31803,-26174,10585,-25245,23013,-5129,-24025,-31350,14386,-32310,27858,2554,32684,-9370,-19640,21845,-2318,-16011,-27741,4211,-4578,-21811,3756,9390,-4610,-15062,-24405,-12817,-12707,21990,27743,-18108,-2605,-17480,-18207,24326,-22070,-7824,28124,-21651,-26869,119,-3248,20890,-18682,13534,28855,16393,-21217,-23874,-4089,26514,6567,-5644,-30762,18737,-21882,-29416,31484,-29428,-17091,29312,-22722,32252,-2848,-22695,-29671,30209,-12482,-16094,-19350,27764,31654,24082,-14627,116,7701,-31316,2604,19275,-15635,-28229,-14058,-9298,3276,18364,-17049,-9986,4353,10401,19924,18993,-11228,10491,25438,18901,10656,12446,-10282,-15443,24574,20891,-5542,30400,-30895,237,-26201,22096,365,30193,-8236,19831,13921,-25395,24144,30481,-31659,23213,-23930,-23632,6197,27763,-3050,27788,-254,-10703,12559,25864,30848,-23014,-22483,27411,-28465,25579,-24301,-17344,20314,4899,10671,-31847,26615,9592,-163,-30179,26852,25672,20428,-13033,-2465,7790,-32565,-27344,-26892,-24048,-30426,27772,28210,-12204,31179,-31617,-10123,11417,19358,-4510,-7839,6666,-6878,-7351,50,-501,-30498,-31507,6069,17397,27114,-10585,-31664,-25667,13717,-11987,27171,25576,-21061,-24966,32434,-11065,-5015,11229,-11229,-16532,-5618,29813,9931,32631,8004,28175,29533,1929,26817,-10738,29733,-18021,-31245,-20186,-15356,-5724,-28324,16686,-8573,19570,-3578,-28869,9330,-17796,20827,20841,-30459,2847,-23025,7925,-20656,-28122,3496,18567,20351,-10652,13022,-4093,14866,10506,-3730,-8293,-6432,-19101,-5552,1338,-5798,23596,-20488,9782,14619,-7174,26371,107,17264,7316,31741,24790,-4704,14617,-31709,31301,18712,-12161,21344,-11538,25258,11508,-18878,-23814,-4066,9644,-25084,-28300,-26678,-30603,-16067,-10255,19539,-836,1226,-20257,11781,24160,-986,-8437,19448,21788,19166,-19325,23368,1005,1096,17576,26827,10742,11403,-23960,26448,21797,15035,6944,3770,15489,-10822,-18315,18343,-25248,-4165,-20417,24592,-2087,-20325,-20813,-28651,-2824,11237,-7882,-25947,30175,-7366,3073,3007,14640,-14062,18293,5327,27501,19938,-4440,17735,19040,-26878,-1398,-3022,-28216,19274,1772,11901,30696,16141,2767,-13608,18359,-3637,-30280,-23309,29231,7750,24317,-32671,16702,19745,-19559,9086,-32094,-11546,17193,18979,5220,-7993,13504,-19135,19406,26106,-18000,5920,15782,7663,-27894,-29359,16410,2897,-27781,12531,-13533,-10866,18825,-6601,8633,-29550,-8807,-21754,-6963,-4795,-16090,-15147,-31082,10606,27993,3321,9798,-9165,-31100,-28237,26666,-29068,14515,-24249,-11845,-29277,14699,-18546,20122,19615,-18620,-30134,-27492,30600,11159,15346,18942,-26955,-17153,10130,6097,13728,-14976,-7976,-4859,-21920,-27743,-10700,-18460,-8405,-27512,-31704,-13012,-27134,-19453,-10689,-13754,3526,-28646,20555,21792,29473,-7539,28050,-24063,-10447,-21758,-20383,-16786,-29244,-13450,192,32580,-4223,18221,-5077,26646,10327,-5386,26660,22200,-906,5739,32605,-13790,2117,-22047,-31616,-20444,3281,18938,15451,15879,-10359,25687,-16783,2965,6576,-20273,29433,-30470,12032,-24066,-9174,-6651,30281,24908,-8072,31830,20233,-12673,-17070,-8259,-17659,27279,25723,26517,-2199,2761,25208,-10638,-13337,12699,21048,9782,-20494,748,-5666,14575,-918,6755,-1850,23045,-1444,16447,-2648,-8621,24302,-4086,3250,-5086,29809,10310,-22856,24711,31320,16739,-16627,18406,19191,-9828,22386,7047,-1773,-10068,13820,27347,-15851,18627,15800,-5920,-1838,-2667,-21930,-25482,-21454,5428,8175,7866,28138,29253,-10304,-6519,26531,-18567,32296,13797,-13302,7841,19853,19415,-23589,17041,29310,-9049,29225,-9323,-2593,-3565,-2010,-4741,28065,20604,15868,23051,4267,-12929,28505,-28841,-29382,-7829,30676,-5264,-12874,5742,-17977,-3834,9588,-4150,7425,-21390,16298,11861,282,16508,3229,-14075,-29134,23053,21469,26833,-26300,24990,-23764,-11648,5175,-3276,3761,20245,-28283,-25263,22987,-29125,-23241,-197,-2760,20507,27149,6924,-28986,-8429,-7443,11181,-19433,18346,-14863,-23212,-16316,-5889,-28961,-2965,-24820,11461,12176,13661,2663,-140,-13726,-3142,-24950,-15271,22332,-30435,20830,11759,30066,22418,-13210,17613,8962,5347,-2094,-10065,28413,16138,30191,-29418,-9662,27590,-9549,-16949,14248,-24440,12245,6501,-15150,-18633,4321,-6846,-16825,11624,-13561,-28130,-12345,30347,11524,16730,-8134,29187,-6282,13936,1897,-13109,-11166,-25223,29992,-26463,23659,6636,-4681,-81,19839,-1454,26797,-31002,878,20703,-4889,22069,29535,-15027,24272,-18714,15308,-15274,4252,32528,13492,-6645,-10377,17114,-9362,9876,-9037,-11522,-30789,22976,10135,30208,31211,-32577,6199,790,-17931,-28692,-15551,21718,-13086,32581,-17994,-27547,-2781,-21391,-24870,-29005,-6692,26644,27890,2429,-859,-15828,-13019,3182,-18402,-4461,-30903,20904,-28618,-11662,21057,20013,-3559,-15660,24277,-854,2437,13489,15452,31222,14631,5813,5784,23681,-7352,18370,-23219,8674,-18676,-18492,774,-30863,15452,25361,-3462,-11469,-3442,1827,-3501,19641,5501,25038,-5938,-14734,32631,14993,-4784,7987,351,-25116,-22720,11313,16020,26865,-9889,14211,-18615,-8675,-17808,-20361,-30271,-2481,7282,-21636,32227,-10506,-9978,18625,-15043,18292,31119,14097,14374,-28223,-13027,10996,-952,827,-1584,17151,29793,-13616,28614,29285,24400,-7041,20675,-17708,-7946,-20219,-28673,-12104,10612,-8093,693,-8929,-5562,2471,-28030,-23932,-1104,-8042,-16103,-29619,-7323,30511,-1590,20854,21408,-10240,30380,5001,-2713,-2218,-19538,-21236,-7549,16805,-22667,-16064,25433,13250,8956,23843,-20826,-29609,-16053,4990,10128,4456,-23053,-27524,27880,-7995,18784,29320,11270,26669,24909,-285,-35,-27504,-9286,-8253,1056,20984,16023,-23662,-10295,-5341,-1885,6355,24151,20676,-17557,-23014,12242,-22823,9633,-562,-27872,30640,-5643,18956,-6742,-8583,10512,5081,21497,32528,-24211,3173,-18092,29234,-17121,1320,6854,-8662,-28978,19197,17094,1768,-28342,28091,3078,-25262,-18910,25809,-13438,-18501,13072,-19363,-2769,-7159,27850,29779,541,1852,-13574,26976,-27752,-17468,10877,10217,3769,8266,-4789,7419,-31542,-2965,12355,23442,32615,25072,-26776,28816,9507,-21391,19656,3057,29793,9908,3193,-186,20082,29443,31884,-29412,-1451,-18184,8085,5623,-22585,1189,-15773,12197,-18778,20545,-11876,742,-28379,24059,11188,11130,-12942,-30541,-19747,-30045,-9512,23035,1248,-19280,6643,4768,-28393,25323,13165,20989,-6894,-31933,-3424,-6652,-5080,2365,-14688 diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv1_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/conv/conv1_golden_int16.csv new file mode 100644 index 0000000..bc61a13 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/conv/conv1_golden_int16.csv @@ -0,0 +1 @@ +18008,-1963,-4521,20183,32767,28137,28851,32767,32767,-32768,4742,2414,32767,22387,-20519,1144,4260,5898,-32768,-32768,32767,5970,32767,17993,-21970,6988,32767,-30370,32767,8219,-32768,-20116,-10201,17488,7728,-8738,-32768,29412,-25245,-18199,15439,-9017,-20182,-7988,-32768,-12620,15485,32767,-2806,-16500,32767,32767,32767,-830,-20089,32767,-5755,-11710,32767,13339,32767,-13700,13559,15353,19718,5092,-3785,-32768,26607,-4082,-3064,32767,7587,17080,1107,17396,952,-9717,10407,26488,-5354,1802,32767,-32768,-32768,23948,-32768,-32768,-19568,-5394,-19567,-3832,25656,32767,32767,32767,32767,7446,-31462,-32549,12569,-32768,-25658,-32768,12157,29856,-32768,-30061,-1243,-26026,32767,18550,-8858,6538,-32768,32767,32767,-9560,32767,-9382,11208,-11422,32767,21626,-32768,16166,6388,-25255,9563,9954,1522,-32768,-26628,8460,32767,-30963,-9525,-24509,21397,4849,-20403,-26192,29165,-32065,-11634,-32768,32767,32127,-32768,-3990,-32768,-6043,256,8205,1481,26246,-32768,32767,32767,-25279,1592,13225,-32768,16238,32767,-3769,-22657,390,-27562,14626,32767,32767,26853,10153,32767,-23780,12032,-8030,32767,32767,15964,-32768,32767,13577,19092,-5384,-12494,32767,32767,-32768,32767,-3346,-32768,-16657,32485,2501,-14675,-19654,-29926,4207,14464,6902,32767,32767,32767,-10352,32767,16302,15018,-18190,21246,-15658,-21414,902,-8591,32767,17754,3754,-32768,21306,15630,-20685,14377,-32768,-3054,-25268,21593,10786,-32768,-7411,32767,-21364,15076,-22173,6251,32767,19509,8770,32767,-4597,14565,-18639,22022,31094,7919,18941,7453,-13467,11681,-2475,-30144,-3296,32767,-3203,31470,5785,12485,-1849,32767,-31675,-724,24981,6702,-32768,22009,1629,-9056,-470,22193,17969,32767,32767,24831,32610,-17328,-32768,32767,-4681,-29571,14005,4341,2203,1689,595,12127,15120,32767,32767,-17233,3292,-32768,-3637,32767,4174,-32768,14798,-32768,6363,2049,-7881,-16023,7979,-32768,1170,-32768,-8896,-6562,-17107,-688,-7030,-32768,-32768,-32768,9810,11958,18190,-29694,-31015,-32768,16388,7971,-19916,-32768,-14725,-9052,32767,-32768,-7574,-26187,-328,15921,32767,-1090,-32768,32767,-32768,-24851,32767,-32768,18168,-32768,-20650,32767,13823,23686,-11679,11285,4992,-3931,8367,-23317,-32265,2130,-17624,16867,14685,-32768,32767,2497,-13844,2353,32767,-32768,-15053,32767,-32768,2335,3479,1556,-17621,32767,32767,6642,-1370,-2085,-26494,22783,1432,-31480,-20037,-32768,32767,-18202,-5410,11092,27343,-30512,-8013,1873,4970,-22578,-609,-32768,11910,1513,-2107,-16705,24614,-25273,8744,-10070,-15510,-30671,-12241,-25386,13586,-32768,-20402,-21981,18881,17873,23811,-32768,-32768,32767,-32768,16423,-4150,32767,-15917,-4400,32767,-30430,-3432,-6896,-27226,-11476,-2086,-27984,32767,-32768,20112,19436,5315,-32768,-32768,24425,-32768,-32768,-32768,-32768,32767,-9374,22972,-32768,-16503,32767,7301,1127,-18786,14655,32767,-26779,30803,-32768,27375,-10415,-19043,32767,-8252,-14643,13764,32767,9757,12043,4598,7358,-20875,32767,18953,-13152,25933,32767,-2475,-18875,-15346,32767,26270,22837,10002,-4386,10944,6602,4611,-946,32767,23027,32767,-9240,-12117,-32768,-111,-10635,32767,-32768,32767,-32768,26286,-14378,-8909,-19747,30645,4482,22686,25416,-11054,-23155,6241,-19217,29522,32767,27180,-11356,10163,6256,-8111,-19272,-15543,32767,-32504,20079,11154,32767,8128,15389,-9556,741,-24075,22821,20462,-32768,1301,29707,3955,18763,2207,14282,-14681,20137,5488,32767,-32768,32767,-20758,-32768,5034,29987,7786,-12,2171,11730,12871,24422,-12180,32767,-32768,22101,10698,7405,-32768,-2577,-18803,32767,5139,-32768,24728,25300,4825,23079,-10122,5258,-27569,32767,22976,-32768,-6607,-32768,-13762,-29760,-24208,-30238,11808,-15208,-16248,-15869,28147,-25915,3001,32309,529,-2987,32767,20688,-19411,19462,13938,-32768,-32768,23648,-25565,-13524,-16132,-25019,-17075,32767,29101,-32768,-25392,-28574,6985,32767,19734,-8355,28452,-32768,-26721,-13,7803,-18389,-28957,32767,32767,-12647,32767,22161,-2301,-32768,15951,30920,25323,32767,-32768,10182,22327,-32768,-4438,32767,-7811,32767,32767,-859,-19828,32767,17350,-32768,-735,-17981,-6149,-1885,12074,-23796,32767,23350,-3362,32767,-32768,-9345,-2849,13506,8657,-20116,2084,8599,-32768,-261,887,21443,32767,8380,13734,-32768,-9498,-27418,-15850,20603,12878,-8516,32767,-32768,-22035,32767,-3707,17009,-30586,-32768,-4249,7906,-6891,-12766,2528,-5715,32767,-32768,-30680,32767,-10134,-32768,6831,-32768,-32768,32767,26908,-32768,32299,22625,13114,32767,19805,-10471,-32768,32767,21713,14669,11779,5777,26658,-12629,32767,32767,32767,-20280,22922,2637,7634,-31509,-17580,32767,8929,13949,-26260,32767,-21778,-18779,4820,-32768,-32768,22947,4198,8195,13638,-32768,-28367,32767,-32768,27292,24016,-28562,-22768,19098,-32768,14964,-22859,-20546,1402,-22319,10982,-32768,25074,-32768,-19977,-2358,-12934,-17267,12753,17721,-5233,-6888,-32768,537,-32481,32767,-13392,-23195,29826,-32768,-17973,32767,-17043,2886,32767,-32768,32767,32767,-32768,-20514,778,-3141,-8669,-27291,21978,-16591,12816,9177,32767,9105,32767,32767,-4961,32767,32767,-11717,13499,32767,32767,32767,-5322,11160,32767,-2689,-9929,32767,13137,17700,-9663,9841,32767,-14292,-5369,-32768,32767,32767,31547,-3115,32767,-11073,3733,-32768,305,-3827,-29675,32767,18061,-28997,11202,23785,303,32767,25270,32767,32767,4255,-8520,32767,7648,23944,-10398,32767,32767,-4821,-7273,-26233,-32768,335,-21488,-32768,32767,4802,-24799,-21263,-32768,-27192,-26317,16931,4976,14084,-23081,-10767,8379,-25792,14870,-21543,-16635,4299,-17317,-32768,32767,18181,21489,10792,-22320,-8805,5919,-7966,-32768,14205,7902,-32768,-32768,-5444,13521,16609,-32768,31870,-18977,-7928,19332,15126,-5076,32767,17049,9844,-32768,32767,-4858,32767,1034,-32768,1266,32767,-10662,-18867,23346,-8747,9362,32767,32767,-5194,13713,32767,18851,9467,-22172,32767,30726,-26948,16717,10679,-21368,28698,-32768,-4768,6679,-27483,3036,32767,-26419,467,-17007,15076,-24899,21086,-17279,32767,6781,32767,1387,-4130,-16613,-25880,15743,-32768,-20836,-16496,18448,8862,5299,-32768,31413,2884,32767,24267,-12871,-32768,32767,-6926,30786,-10935,4966,-32768,15252,-32768,-9243,-32768,-10752,29206,13945,11731,-13208,32767,-5756,4980,-7326,-32768,-17701,-32768,-13038,32767,32767,-16446,10155,32767,-6626,27050,-2841,-32768,32767,-8181,-2637,-32768,27175,-4437,9996,20167,-32768,32767,-31127,-32768,20787,9900,10597,32767,32767,-32768,1633,32767,17111,9843,-3944,-32768,-32768,-32768,32767,-32768,32767,32767,-16649,-13849,-9434,-14786,-15872,32004,-6021,13483,22986,-32768,9968,32767,30907,-6979,-10340,-8318,-13622,8297,-32768,24374,13725,-32768,-32160,-8929,14195,32767,27352,-11208,32366,6886,-32768,32767,-3071,-3930,-3421,553,-1534,4277,-5643,-11036,-32768,28295,-2422,-32768,9119,-8322,-32768,-15119,-32768,76,-5539,-1056,32767,-32768,23937,6535,28750,-27853,13562,32767,-4780,30713,18150,-5541,7498,6893,8019,20112,-32768,32767,23800,-25075,-23386,19313,-18097,-5418,-7781,-32768,-32768,8243,-1803,-3109,-32768,-32768,-5202,-32768,-22471,-8870,13464,8155,-32768,-30768,23655,-32768,32767,1574,-1886,-14879,-14904,-15355,32767,-32768,-7064,14555,27474,-32768,14765,-27308,-20851,-3294,32767,-14670,14434,-32768,-9756,32767,32767,-7309,32767,-29346,32767,29343,32767,23215,-6445,32767,1725,-29636,12596,18572,6135,-4205,32318,-22034,14175,-3067,10680,-29373,14230,6289,-22468,23968,16761,6209,20935,-2440,25264,-5602,-32768,32767,9937,22455,-9442,1050,32767,-11267,-19867,32767,4562,-2272,-14875,5362,32767,32767,-32768,24686,-32768,7675,-3933,-32768,-9746,-369,-22014,-31097,23884,-3026,-15388,7415,-10585,-32768,-32768,-19299,4481,-5829,2079,-32768,12507,21149,-240,-32768,-32768,-32768,-9983,15560,-1912,-7891,5271,21238,-405,-5522,-29718,-28241,18780,-32768,-24619,-32768,-32712,10492,17639,-32768,-29332,22187,-32768,32767,-32768,29981,32767,32767,32767,20862,-32768,32767,16067,-32768,-5385,31762,32767,-32768,-17160,-32350,-6817,29101,-12215,31048,-21096,-29088,32767,-32768,-19038,32767,11501,-9251,1137,29272,-32768,32767,-1398,-32768,438,32767,9753,-32768,-1114,-32768,-24035,19053,8358,-32768,18498,32767,26168,2852,25545,22443,6688,21172,32767,-2882,-9256,32767,-32768,21424,-3105,-4615,-10317,29999,32767,3481,-5257,32767,7905,32767,32767,32767,-14159,32767,-19432,26989,4163,7632,-32768,-17930,15768,-32768,870,-20796,27260,-32768,-32768,-32768,-28454,-24437,-8220,-13250,-9133,204,3775,-20774,-20621,-23155,-32768,7720,3417,17879,20392,-10957,15709,32767,-16360,21557,14080,5492,-13647,32767,-22233,3640,32767,-23396,5003,-32768,-659,-32768,13305,-27609,-3589,-656,3340,-32293,-3887,32767,-15990,32767,-32768,-25961,4385,30117,28493,-13902,-18270,28807,-12934,-11527,1821,9590,21426,32767,-25240,10318,32767,14841,32767,-10088,31060,-32768,10126,32767,25346,-300,20606,8168,-32768,-23950,18856,-32768,32767,1602,2879,32767,12417,-32768,-20430,8124,-32768,-10202,-24508,32767,12531,-32768,32767,1353,-32768,-16762,-22345,32767,20843,-32768,29916,-11859,21340,32767,-7836,557,-32768,32767,2906,-11036,-11269,10200,27594,15059,-11470,-12062,-13487,32767,13899,18652,-27572,-7965,31076,32767,4668,-32768,9000,-32768,27090,-19493,32767,-23084,-9956,-24828,1773,-1369,3523,-28150,12377,32767,-32768,-2217,21114,150,1286,32767,32767,-32768,-31400,32767,-18529,32767,-4809,15131,23886,-8017,-32768,32767,21411,32767,-5633,32767,-938,16592,-32768,31975,-15142,-30275,-22998,-15307,18162,32767,11785,32767,-26434,24224,-21751,-16385,22921,-32768,32767,-19546,-6883,-18894,8053,-30063,-11267,1225,8652,32767,18489,29226,7302,12016,8927,26226,-29979,-32768,13626,-13231,15650,32767,-16489,-15529,32062,32767,6915,1000,-642,-20064,14362,23179,17573,-4172,32767,31980,-1069,32767,-26959,32767,26487,-32768,17241,-16985,26256,4653,18304,-32768,12726,20496,32767,32767,18105,-32768,-27715,25981,-14298,-17291,-32768,-1599,32345,10344,25048,13920,-32768,32767,32767,15297,-9737,-5132,32767,22008,4593,30506,-1812,4823,-4252,-2767,20100,15999,-32768,32767,-21446,-27211,-3734,4338,4907,-32768,32767,-13688,-27293,-25507,-1047,13488,16674,32731,6497,18080,-9807,26664,-32768,28733,-14932,32767,32767,-32768,8033,3584,-9289,26390,-12553,-32768,-22750,-19892,-6746,-6027,32767,-13730,-25655,32767,32767,-6283,17141,14462,-2260,-22008,-12627,-20101,16665,-32768,15200,-32768,-17545,2347,-32768,-32768,2330,-26165,-32768,-12333,7382,20107,-15900,32767,-2479,-32768,11296,32767,-4545,32767,32767,-16741,20139,9988,12875,-9924,19340,15539,-32768,-32768,32767,-319,-1780,25215,32767,-32768,-32768,32767,-12888,-32768,13880,-22945,-8381,13543,14514,-32768,-32768,-23651,-30386,-20324,-9613,7514,-32768,-32768,-2562,-32768,-2282,15989,5038,-4853,-22892,-32768,22765,-32768,8403,32767,4431,-26823,-32768,32767,29440,-4798,-29288,-8698,12955,-31366,32767,-3396,-17934,-29761,1729,-16816,15701,9732,-4712,-16703,-10192,-32768,-3731,-7587,4080,15412,17386,-24677,3228,32767,-4190,-7256,-18903,-10096,4021,-32768,17906,-32768,3607,-9751,15201,-5318,21925,25897,-32768,27737,-8598,5993,-11738,32767,32767,32767,-8169,32767,32767,32447,-1416,7155,18739,8963,32767,32767,26844,-15750,28810,7581,-22364,-27057,374,-32768,32767,-21375,2440,21400,-10826,12446,-14627,-26490,21879,-6588,-17506,32767,32767,676,18098,32767,-2208,-21229,-14581,-1625,4857,-21507,26587,32767,32767,-28531,12585,6378,-6723,8434,10978,32767,-32768,32767,32767,32767,32001,32767,26446,32767,6791,32767,20123,-339,-16555,-6821,22471,7501,32767,11888,17201,-26530,-10389,24832,-28896,-24035,-6843,32767,9510,21586,3745,24127,-31363,32767,-21051,10477,-6592,-32768,-32768,32767,338,-32768,32767,-15953,9790,16082,809,7933,-32768,14920,-30527,-4754,32767,-32768,5280,28522,-32768,-3735,32767,-2043,2717,-32768,32767,-32768,-32768,32767,-13290,-10429,-4036,32767,32767,-14679,12455,17946,-32768,10052,-3879,15536,31605,10378,-10319,-5420,-1931,17951,-7442,-32768,3835,-28019,32767,-32768,-11003,32767,18066,27302,-32768,32767,-24859,-32768,-7093,-18867,2084,32767,32767,-28212,24807,32767,-32768,32767,7174,-12774,-8326,32767,31649,14783,-5617,28145,5833,25957,32767,24552,13202,32767,19872,-7004,6064,-32768,-32768,-32768,-32768,22921,-7516,32321,16275,32767,8230,-28737,-22611,-32768,5153,-24320,-19694,24509,9906,-5549,24824,32767,11898,11667,-9435,3703,18624,-25880,32767,-32768,32767,-32768,-19563,-32768,-21647,-18781,-208,-32768,-22483,-18630,4875,-32768,-1203,-32768,-32768,392,-12400,-6054,-4206,-7107,28001,-16378,-32768,32767,-25569,-269,-14486,32767,32767,-28074,10787,32767,-13108,-4905,16050,32767,-2791,32767,-25517,3135,4244,-15264,-11811,32767,-9869,-32768,32767,-32768,21645,-24152,16443,-32768,-17642,8355,-32768,-2139,-14221,-794,-2911,-31475,-27925,32767,-32768,-32768,10581,568,31579,-32768,-32768,-32768,-32768,-14484,-16974,-8936,27183,-16961,-20496,32767,-32768,5088,19461,-14098,-18167,32767,18045,-32768,318,-16240,6265,-26261,-9209,11725,-352,-10911,23210,23349,26988,-32768,-32768,32767,-19601,-32768,32767,-32768,-13542,-14306,-19803,-15679,-32768,-32768,-5477,-32768,-423,-12807,-7077,32767,-16211,-32768,-32768,32767,-11306,-7778,21966,32767,22654,32767,-12968,-474,386,23578,-25485,-16495,32767,8785,3326,-3379,4570,-2807,4903,24049,19398,32767,-7098,23364,-32768,25483,3139,32767,24737,-32768,27028,-3541,3414,28340,-17526,1488,8339,-24928,220,32767,15354,4644,2966,32767,32491,32767,7548,-16495,-27698,-18523,12872,24408,32767,32767,9915,19756,32767,-12506,9748,-6471,14003,32767,13222,-2687,32767,10450,-2458,-24310,12618,-22630,19902,-4394,26896,24058,5466,-21393,4724,-32768,-28684,32767,-32768,6793,-27524,32767,-15475,-32768,32767,-32768,14695,22496,4199,32767,983,32767,32767,19369,-12258,32767,-17334,18179,-18772,25826,32767,32767,5135,1236,18312,-23967,-30552,24699,2425,23277,-24988,32767,-20907,-8566,10412,32767,31338,32767,32767,-25840,6002,13425,1008,32767,-9060,10042,-32768,-8130,5004,6807,-28618,32218,32767,-26252,32767,-13823,-7796,-32768,-16412,-24253,-32768,-32768,-31520,-5503,-13555,-4746,-20359,-17024,-14913,-32768,11434,32767,-10689,27614,-32735,-32768,18540,-32768,-32768,-29376,-10545,-28044,-32768,3661,16343,10196,-32768,-28067,23469,20793,32767,22010,-11461,-9083,-12822,-8569,-31383,18483,32767,-32768,-4890,-30404,-15449,10781,-32768,-10106,-18286,-19654,-18600,-32768,-32768,-29266,-15125,6790,-10412,-32768,12067,32767,-32768,-11950,16310,-32768,6689,18465,10750,27606,30295,19374,-22670,32767,23087,-32768,-11783,-8738,-32768,-1252,28730,-32768,12360,-9440,-24168,32767,-32768,21235,-25055,20562,8452,-32768,-25140,-18983,-20763,-4968,31290,26215,-10042,14404,-21617,32767,-17545,-13122,79,32767,9358,-32768,-10655,10498,-2598,32767,-19230,-547,12708,21806,32767,-31003,24920,27074,-12495,-32768,-32768,-25692,13607,-32768,-32768,-26454,-10818,-25558,-10279,-7711,14666,20307,-3405,5622,222,22188,4243,-12725,-25313,5349,-21261,29623,-26709,-11512,-26439,-32768,1595,-32768,16807,18989,17708,11649,-11686,32767,-29849,32767,-32768,-8070,7672,-24746,32767,-17359,-32768,32767,-19942,-9714,-23739,32767,17238,-32768,-2349,-20641,28189,23738,-5300,-1829,-32768,-32768,-12524,-32768,-24054,19813,-20146,-8780,-32768,32767,-5819,32767,-20705,-32768,9175,-4331,-32768,-32768,32767,-32768,-11414,16574,28170,-32768,-19984,-21489,1356,-29703,32767,9364,-12999,9461,32767,-32504,-8900,-1542,-32768,32767,5747,2729,-7140,32767,2571,32767,-32768,-4257,-47,-6659,9406,14051,-32768,32767,-32768,-15873,-11232,32767,23931,-17802,-16,-32768,10514,-19889,-1048,-3415,31803,32767,15993,32767,-23238,-9685,-2715,18242,8035,18022,-5564,840,32767,-11830,-16394,9998,31683,-32768,-32768,32767,2862,3310,-14435,-30859,-5914,17809,8125,32767,32767,15923,-6476,32767,12612,-32768,-4987,32767,-27057,16252,-9093,-9269,-30064,32767,32767,-26610,-308,-32768,8591,-32768,26235,420,22164,32767,-14648,27969,-1620,-32768,-32768,17618,-16746,-29495,22104,-32768,14432,-1200,13367,-32768,-32768,13286,-32768,-8474,22497,32767,4056,32767,5055,-32768,20199,-32768,-27655,29948,32767,-32768,32767,10909,-9246,14771,-15345,-32768,-32768,23697,26555,-32768,-29381,-32768,6951,-26626,-15678,31184,-27083,-27545,32558,-4013,-1956,-32768,9674,-32768,-32768,21314,-20352 diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv1_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/conv/conv1_input0_int16.csv new file mode 100644 index 0000000..8ea6f35 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/conv/conv1_input0_int16.csv @@ -0,0 +1 @@ +-29859,-8009,10382,32202,9556,30238,-1691,30150,5782,-12275,-9402,-21701,628,32458,-30321,-6943,24671,-945,-32296,-25941,-15062,-10833,-14937,16504,14184,6144,9625,-14215,8636,-29324,-18522,26799,14220,-30923,25236,-659,7520,24240,13998,4519,26627,19275,1204,-10519,285,-29913,26619,-24620,-24278,-23529,-6311,-10214,-26522,13148,18246,-5566,17824,-9796,4802,-23005,-10868,4255,-23290,-17044,-14678,-3403,-30205,-17250,-32397,-16854,31931,6955,32241,-3629,-6577,-31452,-7444,-29786,2101,-9949,3727,-20107,29668,-5717,9367,-10024,-3841,28263,11378,3487,2382,16947,-15803,17790,-14984,-32091,24887,10701,-23496,26219,-9654,238,-4942,26486,21022,-22893,1223,-16852,-19811,20638,-8108,21476,-7657,-1057,12703,7356,-9907,19287,-17506,22177,14624,-2754,27095,-29754,-449,6640,-24963,-10386,10190,2525,-30306,15622,30567,-1189,23521,-12464,26451,16653,-15966,5006,17730,-31726,4237,-11203,-22243,274,24670,30766,-12079,-31277,-6227,-2159,14142,4545,8011,20222,6717,24056,19981,23298,9658,-23804,7516,7357,-31314,22347,5550,-25896,-29816,-29862,-21960,881,18424,-24381,20120,-7464,-2687,29083,-2565,-14073,4548,15163,19351,15924,-12231,-11730,-6495,3885,2083,-28674,30012,31707,-29038,-798,-6304,-977,25182,-26166,-29364,3052,13543,-15371,-18908,-17512,-13769,9726,-11872,12909,17652,-11386,7705,2728,-29242,6997,23737,-27364,-25688,10403,-16791,23027,-12685,23841,-312,-1191,13182,-685,3508,-16832,28338,24780,1056,29662,-13522,-11813,-27270,-15660,-22919,26608,-21737,9081,13588,-9359,28658,16073,-5437,-6813,-30638,-19352,27885,-29514,-9240,-29744,-19366,-13432,-23717,-7843,12737,-2197,-17001,-23136,-11403,-31251,8261,32177,9667,17781,-17880,25165,7454,3978,-27347,21704,21487,27969,-21428,2135,-27312,-30179,32399,24768,18478,-496,19678,-24140,22598,10900,-24707,-18286,17233,-19719,15216,-3390,5130,30522,-5115,-18123,-27962,27583,-3828,-7392,-30461,-10328,22244,-21111,25725,29183,26802,28348,-6023,-3954,-22879,9450,-18511,-13763,26604,30416,2202,-21945,15496,16185,14897,17615,26497,30017,7460,20568,-20580,29871,16136,32444,25536,-16266,-17093,3840,-12585,10113,25966,-29695,25107,-16292,-10897,25149,-18264,31021,9053,-14608,-22847,28670,24311,-24203,23742,6256,-12418,12951,-10262,-13203,32118,10498,-15029,121,25936,-12578,11635,3108,-5273,-11887,4376,-17728,16089,-7385,13051,-21258,18469,-25801,13099,20483,-30366,-18510,18844,-21569,-7996,19336,-23654,19414,-29650,-14674,-29027,14160,26296,-20018,-21022,23178,2712,5613,3523,1974,5598,-6260,2939,-1221,-6205,17810,19513,22061,19673,-580,3473,15566,10578,31867,5324,21921,-17955,-22544,-6986,1465,26928,4579,-14158,18430,26809,-18324,-7283,-5699,31688,-27517,-27343,19508,31376,-29597,-27592,8336,-6796,219,31725,-10637,10995,-29634,29736,-18173,12634,-1629,-18183,-12043,16156,17315,28117,25860,-27931,20159,23707,18180,-10645,23232,-2017,-1335,20968,4945,-31797,-31511,19752,5370,-9204,-7887,-6111,344,15555,27517,-24414,11024,-28289,22105,27787,-32518,-18595,-28285,-10937,-1529,19060,8148,823,-5464,21881,-32143,-14947,-672,-2783,11745,3235,-3013,-2179,8879,-29854,21891,-13222,17497,-2502,27189,3514,19259,23989,3556,-13205,28753,26888,2793,24524,11479,19644,-17285,-8109,29132,-20124,25292,-28614,-24849,-25213,7851,5203,3673,-12263,-16429,-25000,25884,-5188,12431,15690,-2442,28326,-1863,7120,25182,-28878,-8612,-26518,16479,-7981,13544,3982,16301,8655,-14224,-16706,-12869,-17881,-10883,18428,4751,-19801,-4390,-31192,32053,27211,2137,1235,-6651,-8857,6429,29232,-9394,-11753,-2846,12464,-2880,11084,-11411,25860,4106,19566,19176,14326,29247,-18181,-12016,-31925,31183,26720,-27313,8616,-14904,26843,-15097,7137,-9757,-3183,-14445,29741,19874,-12258,-30465,12900,-2221,-11217,-14677,27764,23209,6162,-8725,813,12753,30821,-13397,20942,-7258,5945,27711,-29604,-6025,10701,-29791,23704,-6275,-2166,-10748,8786,27111,18172,23177,32069,-30836,-732,21356,-17301,-13449,-25085,30308,17190,-18703,3686,18287,-1391,-12245,19130,9129,25338,-32463,-24273,25909,-13188,-3849,-31720,798,-24481,3655,-27263,11098,26400,20625,-17941,27597,-22846,11809,-11177,-22668,14386,29943,-5237,-11508,27381,-16867,-8642,-11399,10622,-31483,1766,-30686,-29338,27422,-27996,31685,11313,-13272,-28074,-11018,-13128,-13649,24785,-24143,22329,-24604,12995,17685,-5536,-2950,9667,-13692,11838,3753,4289,-8425,19063,24958,9055,-21125,-18031,3756,8344,14018,4599,-19155,-20154,7056,-30665,-25384,26971,-21363,8572,6359,31984,13783,16180,-26617,24203,5859,2494,-28797,-29220,24653,27861,-3590,-19293,-13660,-16098,9009,11435,-31650,6211,13330,-4466,9501,8425,1508,21579,9262,10168,9480,31641,31061,-8656,-7980,-14808,-5453,-24256,-30019,24375,18025,-32578,-2801,-549,-31168,12164,27255,-17331,6084,-27186,27862,29985,-11678,-15496,3376,-11689,16277,-10642,-26179,22007,27204,-2603,-25160,32748,-28361,24419,-18770,18338,-31901,5011,-14899,-25266,-18373,-21416,-29637,-4430,-12000,7418,-28847,3286,3152,11074,-24713,-17457,16817,20628,24585,-19533,-14347,-30805,-13905,-24757,15821,-29000,29849,6016,14422,10024,3974,-30181,-16831,31325,-495,7223,-11153,-4337,30202,-14327,-23240,5288,-26365,-21521,-8899,7823,8862,-25188,-25900,-10153,-3202,14463,-3634,14465,-26459,29461,7476,12355,15568,28123,-19550,-9037,-10937,15866,13554,-20355,28262,-11103,-18691,-9171,-10044,29013,15250,31752,14119,-4366,-27007,-567,25531,24665,-12585,-1421,16058,-11064,-12941,-28205,-24912,25738,-13884,-27003,27787,-14810,24941,11296,-9430,17663,-3791,9614,2245,-26936,18607,-6055,30769,-31256,27061,26562,15136,9230,-26371,-14120,-28258,32679,5081,10763,12803,3123,-27159,-27191,23966,-22724,20574,8314,25035,-19972,37,4737,14530,-2923,-3722,-27002,-8767,19383,-13660,31119,-18574,-31912,6802,-22200,-6224,-21365,-25919,9022,-341,9249,-18373,14753,80,17562,-6394,-424,-25343,-9140,16430,-14208,16331,8719,-12338,-10571,-18990,4718,5227,-22765,-14826,11863,2124,-5463,-15399,9824,-16997,32298,-24283,9669,25408,-18576,22614,14477,-6885,26129,-32437,-31213,-3842,-31013,-7848,3073,-29780,-1415,11182,-22492,-5615,-3853,19354,-9242,-4003,-18688,-21061,-14274,31576,-29443,-23530,21586,15524,-25438,-6316,32112,-8528,2229,15691,9330,20768,10644,11603,20055,6182,-27373,9727,-30611,9538,-28847,9986,-23809,-30937,-17669,-12583,9969,-29470,27112,24401,21238,-13429,-25589,-24761,28020,-22046,-32092,400,-29669,-1619,-5768,-21310,-11448,-25350,17658,10552,30744,-5624,-5862,31390,-16644,-20767,-14518,-11463,-29979,29325,11429,-5427,31573,7890,-26768,-27404,12073,-605,-22420,-25466,-18090,26271,-11783,-19027,31496,-3706,7599,-28213,30665,-7930,31331,-11865,-31463,19443,-917,5769,-28192,-8527,2496,308,17561,-4019,-1073,16359,-5985,-3212,7158,24160,-7779,-5402,-6919,14659,14150,-24928,-28966,-27400,-9588,25178,-8398,17415,-5872,-12171,226,6831,-10794,-32697,-547,20193,-29244,31833,24439,-22119,27447,14712,30789,-14278,-23503,2612,21004,32142,17370,2918,-9631,-1509,7827,-22010,10465,-24976,20650,16912,7992,8042,16623,-26257,3869,14921,17356,31195,1663,8078,17760,8574,-15266,-15161,-20634,6359,29537,28127,18870,11474,8009,-864,-4896,-12342,2032,5506,-18241,-30185,10182,1773,-23565,10752,-3788,24051,11793,22537,-11576,-29536,7250,-28723,26421,22615,-729,15932,-23625,13279,-13227,-13433,15209,-13276,-32358,-2965,9859,27661,-20177,-31458,4421,-22755,-15295,-14434,15800,-5151,-9127,-10229,27865,15391,5713,-15710,-136,-19492,13911,2079,-24721,-10757,24894,11883,15083,-21161,-18557,-5872,-10997,-15582,-3001,-2316,-25718,4183,-10206,24855,-25824,-20826,-19090,-25780,27347,23264,-15015,-10630,-25383,351,32028,9778,-14099,30253,5396,20625,13093,-7679,-26405,31110,14798,-20967,17719,16181,3596,-19805,7987,-13917,13821,29361,16077,-5861,16795,25162,-4659,11853,-30297,20183,16536,-5420,-32657,-31044,-10117,-32678,22361,-8588,2172,1131,-14150,-18847,-19666,357,16892,22570,-31351,6879,13438,-22885,27907,13176,-4920,-15011,2063,-15060,22587,7784,14637,-20325,-31370,3059,2512,-28061,-27056,-5896,-16488,-17772,-23789,-17379,28029,-12164,12078,8697,-15168,-13795,10457,19443,-27555,-26808,2211,25207,-4826,11210,-30083,-13927,-18209,13773,23935,12395,15344,-11414,32512,-4315,27996,32155,-30591,-11897,-29002,-15018,-2316,5638,-31620,-23329,-17951,26720,795,3972,21197,-6550,-6736,24950,-19146,-18720,21235,14512,-25514,-25726,20947,-6935,-30649,-11268,-135,-21789,26586,23361,31643,25127,14223,-20146,-17015,-23872,-20904,14989,28939,-14507,-9380,8485,-14536,23661,-26844,11068,-17339,14286,32150,27184,26065,15033,11456,29090,-3900,-18840,16136,13650,24588,-8555,3578,-24714,-15381,-26969,-20741,-16888,-7240,-19656,779,-8648,9449,4539,-11326,16662,-25393,29059,6482,-141,-13332,20847,25688,16925,29335,-22565,-18069,7292,20776,-2814,22129,-13669,-10001,-6713,25660,-7597,-30611,8545,12424,13652,-14276,19695,31726,11862,11934,-14559,-15773,-15870,-13795,-12090,-24493,-16374,26026,-5256,19545,-13549,8843,11121,-14767,6011,-23089,-17530,-21290,9769,1022,-24263,-13396,-11015,-17835,22246,7652,13132,-1162,-24014,-23815,-12588,-911,1677,-15128,17879,18132,-28445,6370,-15449,14995,-30678,14530,24193,-4434,-14189,-32645,550,-19403,18023,-28510,-31206,19527,-10818,-9653,-30350,12920,14794,23086,-7031,-17879,-22951,-23409,-4645,-21243,27963,1826,-29660,5797,-7330,-20835,-13749,22694,-14080,5965,-3840,-24535,26649,25047,12534,-18898,-26279,227,-13823,-28001,-28417,-17177,13579,-26889,-3251,-25500,21317,16892,-5618,30044,-10409,32094,-19404,-21305,9577,-27508,6146,-23320,21031,-5191,22332,-7390,-7551,-26450,-9250,-19475,-27998,-24468,12858,-25033,-24530,5109,-22191,8893,25,-21866,-26519,-10251,25795,5092,3930,9639,-14715,-31544,-28279,-16985,1135,-17769,21997,17066,9576,-5823,-27888,-30169,-2977,-5007,-7917,-10128,1333,14729,-10111,21654,-15011,-12114,18953,-32678,-18589,15391,1535,10904,-16227,30536,-4893,-21405,-12981,19139,-21452,-20132,-10060,-25919,3508,-7186,-4778,-6214,31800,22955,12852,19241,-5259,-656,30400,-20472,-30746,-22882,-29789,29,-21062,521,-8525,-20903,1546,25346,-8413,-31734,-6373,8433,30703,-23641,7107,-8712,-26373,30891,10061,-6254,-2950,3235,23498,-7263,-9144,27729,5102,22567,17830,-17945,-561,27449,9185,-2585,-5144,18858,15145,-9246,27946,11187,1415,28669,23175,5882,10428,4288,-4933,14887,-3785,-22463,10982,-26684,-6788,-10971,31555,13908,-6826,-25450,5895,4639,17316,25468,12677,-19931,16901,3670,22863,-11715,-15577,11181,11766,31805,-1464,10221,-30369,-9100,-28815,-5161,28425,28598,-21030,-8685,9026,-21854,29914,-19278,-24862,-7593,-784,-2032,-18267,-28429,30198,2711,-10060,18231,-28793,-15858,-27551,15789,31033,19370,12874,-9903,23493,-19343,30843,9506,17788,-10255,4202,30096,-9188,-228,5996,-14020,508,5446,7170,21482,17894,14609,1760,-4004,18133,20670,5212,7530,13554,30152,27101,-9760,-20392,23999,17764,-10046,4303,-465,-25760,21810,3457,-28489,30011,20353,11132,-26513,-29715,-28983,11696,22584,26191,-12160,27542,32742,12186,1966,-8484,-9180,22983,-28663,-2986,25942,-13394,-10765,31287,13351,-13626,-26108,14340,-15324,5192,8805,12606,-31636,12263,11608,28587,-4517,-4870,-11497,-16930,-24006,31784,-18626,2627,2890,-40,2770,1572,15907,5804,-29742,-15350,3118,-26133,32556,27426,-26221,5410,-3798,-30746,10406,-27026,11708,-16279,13224,20798,23123,23595,18792,11758,-25125,23269,-25675,26136,20128,-5111,21876,-19019,-19001,346,2954,-28733,30474,-3406,-32126,14154,29265,-12219,-10504,7351,-13575,7532,9030,-24182,24307,-25013,5039,-21335,8178,31048,-15923,-24751,19532,-28819,-22719,8366,9809,-1330,-26557,-3836,-32597,6332,-21558,25338,-11464,9252,-13104,28884,-23599,-20242,-31144,-5760,21454,-18544,6025,-2542,-21305,13552,3095,-3484,-10635,3712,-9626,17909,-26124,-5694,26165,7893,3558,-8933,-30936,-25708,-26999,-28343,10731,662,24326,14776,-25545,-16216,-6701,28352,-26696,-28288,-32018,-4351,-29707,20109,32261,-11514,6374,270,-25697,9845,5989,31015,-122,6127,3705,-20979,-8241,2054,-26628,24022,9355,-19617,7135,-8380,17203,4820,156,-29532,14170,-24468,9605,-5536,5884,-24649,107,1679,-27503,6293,11063,28334,-14256,-18637,-19382,25529,-15784,15768,-20467,32701,8806,-32498,-15423,7865,14079,-23476,-21402,-12169,-19694,10291,-4211,-31051,14118,-26765,-23144,-12647,-16784,-22681,26128,3460,-19693,24206,28120,6348,-10705,27937,-14545,-11074,-28290,-20410,533,20914,-13122,-29721,7581,28827,-26583,-30404,-5334,-10952,-17034,13574,4447,-28904,-18003,6952,29411,4824,-1879,-16770,-13300,17014,30239,9439,-1212,17701,-8445,-11424,31852,20063,5894,-28374,-19521,13572,10601,10015,-31024,5215,14750,10295,-32067,-12824,2082,7918,18141,-15182,-22130,19139,17354,-18728,5109,-8141,-29727,28134,-7633,13403,20033,10204,31919,31666,-30700,-29981,11455,-18782,1569,13812,-3071,8038,97,-31064,4925,-14171,22938,28537,16747,-14965,18325,-31477,21593,-8701,25612,-12329,12814,1175,-4974,9200,-13828,-12506,25941,30939,-1472,7014,5916,3137,-18938,11119,-18420,18360,24530,-6246,17979,-6034,-31024,17502,-13755,11833,-535,17828,30715,27168,13214,3435,7894,-18418,17601,-4857,28715,-11202,19353,13819,-1609,-28473,13434,-13271,-14552,-3268,-456,29465,14504,-8215,-14968,31310,14008,-14749,8736,874,15751,18979,31605,21455,-18753,13223,22206,14092,-31777,-634,-21670,-12635,-24149,-3129,-14240,-17942,-28369,-16166,-31213,-14986,-9376,-17848,-24791,-8145,5264,-10603,-9577,-6637,-10664,-5526,9070,-5291,-24876,8818,-8512,3275,25696,25227,-7393,-29706,-23416,-1504,-26438,-13260,-13288,-19976,2451,-17381,1765,-27796,-7525,-22908,-16331,-7885,15492,-5291,12913,-29701,15312,16618,1504,-14495,-15444,-18768,10964,-25925,-19652,-15221,12568,-11349,14272,15349,-496,21552,-29660,32051,-3663,6007,25931,-12543,-10491,-26426,-11143,11700,32425,18991,-31337,-18879,2679,-13785,-9313,-13342,17491,-22063,6553,15426,-21909,-2398,7488,-23509,7887,-7751,11559,26567,-747,-22548,-15070,19179,6019,31858,15052,-7886,-30687,18571,-21262,18628,-16178,-8814,-2914,31816,-4178,8316,-9269,-28289,-30135,-27809,-21779,21156,14465,-26005,4474,-27924,22038,-30501,29976,15355,-10858,-16143,17230,-18913,-18240,-20205,1323,-19367,22337,658,20405,-678,6399,18756,-10970,-17560,20742,-19517,-22052,-3960,21922,-3206,21217,-10400,17588,29899,-23988,-2454,31605,-5323,-27962,-16879,-30658,-8236,32639,19317,12015,-13354,18377,23136,-17637,16542,15190,-17820,28338,10593,-22769,21670,4373,28124,-32191,18850,20528,7942,-25643,9816,31553,5398,3905,-8721,11596,-31740,18506,-27122,17164,590,-9132,28117,-19472,31576,-16814,22087,32091,-21903,-17093,5187,28105,13302,8295,9623,-25333,-6095,1171,-16000,-5865,5265,13791,-31947,-32138,22280,6833,16138,14406,-29543,-1612,-16136,-13714,893,-17967,32014,-8288,24504,-18047,-28467,-8850,-16915,14346,14037,12953,-28815,11893,-20334,-3129,-28542,-2402,4759,-15560,-10037,5164,-31138,26627,14842,-5044,-30415,-9382,-19769,28116,-7883,-12745,-8156,-6134,6038,16682,26774,23614,4316,-7160,-10630,-8692,18248,30028,-11713,7197,9860,9309,-32469,-10717,-21224,19934,-18585,-5880,-2162,-30505,-21556,-21757,-20765,-4397,15623,-1272,27689,-7424,-3635,3482,-21041,-30810,17300,15219,-21643,-10646,-12181,-17691,11813,23914,-17941,-6309,15272,27135,-2170,-25246,20910,3643,30290,8992,-10827,4232,18491,-19413,-29142,-4077,29299,22519,25083,-12729,-677,28756,30661,8737,-16352,-20703,7539,-25387,-118,-9443,31327,-13217,6305,22515,-16815,-28806,23041,-648,16092,-2950,29926,-9872,-4911,12305,25324,8384,-29885,-25149,-18510,27651,-15173,28254,11557,-10340,-4688,30892,-32124,-8375,-18764,30475,21671,18397,-2234,7810,-7553,-20215,-760,19937,19173,10482,-16180,-17970,8388,-12681,24249,5448,-17290,27646,-22708,-27299,-18688,-1230,-24153,-2692,-31886,-17500,30002,2487,-5072,-6710,-1023,11433,-31896,14096,-25577,5727,-671,28236,29158,31194,-16707,-16010,-10633,19325,12990,31981,30610,25673,-10316,2815,26444,20375,-21417,29063,-25694,-5805,27027,-3044,-6176,20218,-7691,14710,-18870,947,-1208,15925,-11538,26679,32558,20717,17557,12540,-8856,-25712,-9612,24145,20875,-15285,-23746,23517,1234,3576,22324,-13084,12520,-7210,-23278,-16359,26558,-3556,32238,-11125,14613,-31431,27308,6735,10534,861,-24882,5457,-9817,-11047,2522,21624,12002,19488,7376,-26241,31257,-3521,-17157,2325,29772,-284,-18628,21297,-8422,14950,9373,4620,-13980,6435,19703,20330,18644,31772,-28721,-31493,-25796,21908,-27506,-21001,24696,19514,11646,-15347,-24344,16516,-32248,-25490,-17269,26372,29328,-25135,-655,-18526,-16325,25537,-11921,-21289,19504,2600,-14836,-25731,-30729,17157,-15128,14612,20012,-4485,13377,25068,12383,-30698,-3735,270,2182,4098,-15641,7590,-31497,-28127,-24464,-31071,21643,13904,-23950,-32737,-21025,7753,12150,9242,14863,2823,-27762,11215,4498,28331,14848,-21139,-25724,-17,26394,7147,-10870,-15122,-708,-24901,-26616,-23209,-27827,-9864,10690,-11676,29339,30167,27875,13283,28518,10867,-19243,-7028,4385,-21077,24636,11285,-19905,-6856,1201,-11768,-11257,1205,-4647,25019,-26922,25672,-23829,-1820,-12176,-11769,8862,14944,1007,-29552,25583,5640,20752,-21584,-2495,-16083,21764,-17279,-28544,-20069,10369,6448,14540,8855,30495,18966,11235,21736,-22438,-14362,23046,-12708,29064,24454,24727,7581,-26359,-8936,-21667,10684,25303,-6160,-24778,23160,-29304,27053,-13589,-23808,24133,-7791,21697,20814,2684,-712,15547,-28872,-18334,7689,2284,24174,23466,-2346,-19545,-4654,-7981,25780,-28041,-788,-14245,-26116,-22745,22256,-12695,-26539,32016,-17765,-20280,-27709,10454,9899,20762,24815,8499,-4974,-12171,-8355,-21608,-31385,-10024,-28445,10575,2367,27867,-24774,-24482,-15997,17433,-9399,-7454,17403,3040,31022,-22591,1942,-31775,26028,-9238,-16178,19719,-19923,-17313,-26435,13758,25897,-24362,10003,-31102,-3930,431,-6019,-29009,15507,-31488,25134,17561,6952,-23328,24399,8263,2564,-27785,22516,-639,-13412,27969,-15353,-7381,-3301,-25464,13485,-14908,-28132,8171,18911,-5116,23535,2628,27756,-5034,4078,-20808,-24548,-979,4856,12849,18312,-22980,2212,-32015,14192,25294,-6384,18073,-10884,14196,19322,21679,18950,25322,-676,24385,15704,-28129,-7327,1236,-4780,721,-28571,4406,-31751,-2031,-12803,2457,25543,5695,-30866,7192,-2139,-26307,24723,-18475,-23427,2486,6190,-8721,-28328,-5469,18024,14228,1463,-29999,4067,-19459,-12859,12353,5711,13804,-19640,-3472,-47,-1435,-31509,-16784,29474,-16509,-21594,4513,14336,-9783,19933,2486,410,-3525,-27419,-5675,-26313,-182,-26699,11884,-13876,-24951,-12856,-27677,21413,-8522,2146,-28513,511,-3607,1780,29277,-20942,-19906,22131,23149,-4964,-3984,23346,19995,-9601,11633,-27666,-8404,16726,-32216,-2055,-27031,12773,-11,17386,8226,26416,-28800,-31170,-9297,29333,32283,23104,5667,-29267,-4076,29844,25204,-7796,12551,12932,5083,-9112,24026,-20495,31637,31133,-3440,-21631,19742,-9713,-9417,5549,-22340,3246,7339,27098,-25933,21173,2592,2711,12718,13288,-18349,757,-16953,16865,19859,-1419,21464,27293,22721,-2645,-28315,14009,27244,-14190,-1360,29197,27142,16880,18168,3524,6882,-14613,25712,-23994,-4631,-18666,31441,-27139,18603,-5828,-17898,23729,25438,-8785,-22090,22317,-27482,23332,-23197,23811,11075,5351,7540,-4407,-27476,11552,8108,-28895,5381,29698,-27291,11370,22578,-25503,21068,11367,-27705,-31007,20735,28056,-25357,2106,-16811,26182,28556,-7714,-2584,6769,-17425,26995,21658,-7090,21160,21646,25885,27522,-28009,12200,-29584,-25884,4758,-13590,-11744,7187,-52,-22654,-26805,-10656,-29620,-18459,-571,21957,-3253,-3810,-19083,-19225,2219,-13191,-24202,-25560,-2688,4089,-30485,29355,-3148,3163,-26735,-3309,-5589,8074,6394,21987,-28476,-28916,-19580,31768,-5907,31928,-3353,18279,-29405,-14149,-28716,16565,-26256,-16132,11413,-26574,-14121,-2040,8535,24758,-31374,-7625,17966,-1992,22224,-26816,-5026,-22370,-24571,4218,7561,-29047,-13633,-11639,-29380,-27544,21156,-23221,13916,-24132,14760,24249,-28362,11900,1307,4821,17667,15260,-14172,12331,-20842,21079,-17106,-19495,-10300,10053,-22961,18365,-21294,9038,25236,-8132,1136,-2076,-6199,25177,20772,-29369,10306,26681,-5824,-15743,19637,24371,-32758,12484,30252,-12681,-27842,19201,-18045,3407,9471,23636,25509,-9948,5503,6356,11871,-24759,-31090,3613,-20404,-2515,4421,-29789,20237,6064,31455,-16832,-30378,-16116,18743,22941,-8091,16945,-5952,27547,12577,2805,9688,16657,20442,21780,-27795,-31409,19833,16693,-9511,-11595,9280,-13055,-30191,22801,-10661,-8032,-21178,-5639,11768,26046,-8635,15559,12097,-11203,-383,-31684,-19422,-28719,-32406,-3583,16876,-5977,-8413,25413,-9039,-9134,1558,-26628,-3101,24220,31003,-6841,1474,18393,27633,-10537,-6931,9308,-26922,9420,30925,-8258,-14437,-8946,8716,-28976,24238,5220,31180,-5916,-5157,30100,-30318,23085,-23456,27092,17813,32715,-21962,-11606,27697,-662,-11910,-8046,20372,9877,7038,8656,-30740,10744,4712,-25697,-30624,13043,23460,-5278,-11979,7927,-21956,28960,17826,-27634,-25375,32168,-18328,-32684,25569,-16689,505,-9658,28150,-26822,-26994,-11360,-591,9278,-24991,6538,2800,-24420,16731,2725,17719,-25214,28599,-469,14634,27592,7720,10714,-26584,28035,29775,6970,32683,8434,5932,-16843,30097,-14526,-30291,10511,32253,-19574,-13509,25735,21062,-29753,8130,7451,6981,-17448,24396,-17973,-18994,27853,13917,-22261,7221,-1260,-16296,-16425,-1236,5108,10486,-8452,-23580,-11954,7477,-22557,17432,20493,15916,32362,101,3502,-5171,30674,5034,-1886,10264,21512,-30734,-8804,16088,14680,-25892,-24569,12375,6452,16120,-1097,-21673,29193,15866,10261,28979,-20581,-23362,-6499,-7998,-15273,-30259,25405,-4567,-22196,32597,-31359,-21885,-776,5523,10261,13237,-13010,8262,-9737,228,1788,-21994,27284,-5530,11483,28930,15224,-5380,-7606,8898,-28025,18020,-26946,11227,-9247,22128,-30275,10254,18437,29607,-6347,-6890,29402,-993,-80,-7258,-3632,23827,-20345,30629,-13849,-28081,-32025,-31208,-22501,-24165,-26859,31994,24374,21287,17964,-15354,-5247,-18610,-28433,10354,31583,31205,-16924,-4754,-733,-29666,23015,21869,-8721,-11446,-18974,-1609,-14876,5628,4413,-30705,-23115,30323,25836,17737,25799,26075,-8579,-23008,1766,14972,15715,19439,-14137,-16373,-24802,-31938,-4739,-28939,-23624,25697,-30051,-30731,28899,-10641,28542,876,-7695,-29223,-20343,-26911,12972,13626,9846,5146,-27377,-1073,12288,-25328,17711,7043,-25302,12583,22003,22386,13027,22992,1638,-27562,-7280,-20025,13962,-25710,-21672,4562,23801,28735,21244,-10286,23230,-19526,-17455,-7601,30789,-5246,-4506,-29810,-24829,10838,24689,7030,10176,-4638,26527,1284,15462,-3936,30315,-19115,11903,-7085,24583,23601,11903,29607,25763,5330,-15231,-3706,31012,17851,31611,-30415,-25073,-1362,-10086,25075,27855,-26194,16065,-30924,22120,11078,-26323,-1398,-17842,-31630,-8909,-13041,30219,-18243,11347,29390,-4780,-24852,-14228,-9032,19773,8724,-5515,22639,30845,-27045,16656,-21583,150,-2670,-13320,8608,-25385,-3709,-17520,31968,22546,-23694,-27467,-26289,-30719,-11864,31637,-30059,-12087,7139,31522,31626,18180,-8257,-24816,25914,-22428,23128,15678,-13549,11698,26950,15061,22913,7347,9999,-6716,2017,-14695,13128,11927,-12633,8034,32387,-20174,-30289,-30639,18803,21713,29726,32326,15873,4748,-13021,-18610,28161,7562,9000,-29584,-15169,28969,-22699,3689,-24189,11756,-19857,-5919,-1369,8528,790,-21769,18160,24409,-21063,23793,23974,3592,15074,20817,-28983,2895,-4553,-13824,17234,5962,-31843,671,9727,22194,18850,1266,-22204,-27053,17674,21399,-10864,23136,14627,12777,25217,10007,28437,15172,-22925,-12094,-17868,3077,12696,14549,18441,-3419,27509,26318,-16163,8594,-23023,-10788,-24878,13677,7914,16733,7592,30902,-21685,17548,-17817,-14326,22643,9825,6682,-28734,6926,-13012,29369,-32514,27755,14400,-27606,27229,15914,-24019,-16498,-21155,-451,-29228,28993,24236,10992,-20243,-20893,4349,-8488,1417,2294,22189,-8467,-31462,3222,27612,-8275,18390,-3359,2429,10781,29103,20513,-4716,16682,-24554,-7123,271,-26676,-15231,23783,-10466,-17090,15106,29580,5885,-27473,82,-6411,4656,-24883,-10333,22257,-4311,-14208,13183,17424,-4419,-23710,-20623,-5112,8100,-20244,-31464,22100,4402,-32386,6869,29537,-22778,-16212,-11687,5228,-6250,-21836,6103,13803,5770,-11198,1192,-24109,25573,-7872,6699,-12895,5064,-22076,-5731,-12195,7775,-26176,-15679,25965,2181,9525,20605,11812,15451,-6107,13898,20868,-23096,14440,26278,19180,10212,-12217,-10374,31693,-30807,23419,-12928,22593,-30990,-27554,23883,8774,31694,-26766,-15265,31880,-28325,-30525,17518,-13030,8863,11566,5087,-13718,14098,31560,28393,-14237,-24523,-30889,32514,-13270,-11567,-24714,-9897,-12897,21884,-20960,-3473,31449,-8874,-17950,18661,1497,32135,21347,13899,-31523,2269,-16077,-16792,-9022,-10378,-9424,18970,19977,-13289,-10510,7083,-25201,12580,1882,-24545,-6474,23518,-25974,2784,25411,3904,-8503,-19506,-19054,-20917,-23919,17973,-19800,10663,-11563,-16702,-27313,-4628,-28869,-24788,22922,839,12099,21786,10994,-17485,18147,30719,16825,16053,27675,7551,-5667,-5023,-956,30076,24055,4584,-7007,5453,8593,-2974,18910,17147,25388,-2736,10911,-29365,5962,-29142,24306,24633,10240,17049,20182,27770,-7558,-23043,-22929,23024,5337,-18299,839,-17445,12871,-26124,6750,-25355,-15741,2652,5079,-18794,8815,-4925,4257,-3692,29777,-9813,7795,-19565,-14910,18927,2819,-9168,4373,-18891,14474,-5033,-24750,-15762,-22628,-8370,14935,5522,2516,20028,18842,32080,-20778,-18237,6381,23395,26046,3114,27549,32556,16507,-8598,-24746,-7596,-12920,-2251,-12238,25482,1195,17934,30982,17878,711,4629,28939,4877,-8375,-7235,-25568,2857,-13778,9908,15128,-21170,11521,-11441,-25572,-24054,22290,26120,-15657,-31922,-30463,-27890,-14046,-14518,16052,10161,-755,18339,3844,16708,-8760,-28145,-14366,-11712,-9675,-22121,2049,-12894,-17213,26275,13020,11174,19776,-11420,12969,-21837,9543,27532,24704,10503,28713,12310,3364,2219,30268,23795,31605,-16586,-19781,6016,25058,-32308,30725,16099,-9324,30925,29754,-11122,2496,4842,17319,-31912,-31076,13120,-7138,3935,5378,23453,9013,-23358,3631,23815,12682,11185,-10010,3598,-17635,24575,-30348,23511,23017,9990,-6147,-24990,-15196,18930,24549,11428,-19546,-5469,25536,-7402,-2645,118,-11393,15266,-16535,18564,-4370,12389,29049,-2615,12805,-28019,1671,-19128,-25809,-16876,-11234,9097,-27873,10929,-14546,-1995,11570,14578,-28943,7517,-8710,-21642,3475,-24529,2432,-11086,-9973,10200,840,19854,1622,-19247,-3457,-7912,-18429,3421,-16268,-27847,17149,-31409,-4934,31852,23034,513,7214,-17262,1821,26005,19563,21828,27897,-23627,1873,-10034,31331,-15077,-3432,16429,26102,14129,25249,29946,-16667,-25620,7518,-24218,24310,-27990,18517,4266,17766,15456,-15557,-4515,-8173,5229,1008,-26165,11381,-23515,-15759,26539,-28342,-25331,26376,2655,-26209,-29587,-5594,-940,-12050,17363,28253,17193,-24485,-18860,33,-18242,-24062,-29827,-24866,28195,22932,-25344,1881,28994,-12146,25570,-24038,-3802,-3427,-121,26727,1571,-3853,-1549,32710,-22149,-31170,-19857,525,30400,100,8144,-12594,14331,14496,30909,29230,21542,19031,-3232,29929,5521,31236,21796,-16300,-25920,7811,-17431,25719,7583,-18270,952,11826,32594,-3538,-15111,-20293,18593,-2052,18605,20334,28025,-11486,-14681,17658,-30086,14152,-8565,1091,-24798,16843,-11797,9318,5156,15146,-30503,30777,32418,29753,10315,-14591,23164,19825,14939,-11348,-26481,2712,-17040,22403,994,-17557,-10733,15522,16139,-6401,-11374,16575,15197,-21066,6119,28125,-9068,11317,6289,-15768,-24988,-597,-29850,-5128,-1755,30684,1627,21348,-26366,-5598,-11221,11878,-9566,93,23131,17861,5333,9735,27684,1983,-10668,-20962,6742,28130,14467,27620,-10275,-4780,9283,10429,-8556,-13305,-18,-19523,7679,-4952,19779,4334,14761,-5708,-26016,21480,-13845,-13427,-23969,-18679,19579,-15072,19192,17748,14810,-17310,30685,-18880,-9112,16274,-27505,-1391,-17927,-5825,17846,18872,19586,-4159,-27256,25264,-6409,16883,-14965,-31133,4654,-13055,3656,23145,-20613,-5031,-5719,-30736,-5133,5561,17395,26725,-5555,-2377,-8258,-3995,11289,24532,-1237,17563,5350,9553,-20447,13227,-23447,17388,-30070,-30149,-26843,-14158,29400,-12511,-27850,-9941,15788,-22170,-30119,-14921,27007,30884,-23733,22903,-16107,32723,-19891,11581,-3958,26513,-27873,-7591,13767,-16073,-9850,21459,-19170,-29457,-6848,-32689,-2360,23570,19461,-9514,-4751,27165,21400,-23315,-4530,-1837,-30520,15613,-17406,26267,-11691,-30050,-30154,15445,5937,-25829,-16488,15573,-9454,5704,-27469,30876,-4478,32704,-18760,20263,26662,7495,25015,-17100,5102,-30906,-23474,4853,-206,24500,26064,371,-27811,-28932,-5864,-18288,27835,-9399,24942,-17714,-18332,20156,28666,29387,-19114,-8199,-930,-20162,-20611,-18870,-10846,26834,25359,-13082,14307,-5517,-28050,-10918,31053,-317,6814,4803,-9701,-12193,24825,12574,-2747,-9883,426,24443,22710,-16788,-32024,3155,18996,15559,9391,8068,6061,23569,17048,23197,-9050,14837,13242,-23876,536,30074,2647,-7071,5804,-13323,9941,-20555,12743,-31475,-31276,-13842,-4106,-22499,21833,9214,-19274,19112,3924,6061,1195,-27528,-2113,-13524,25591,-14031,-68,2209,-6570,31452,7670,-5441,-30756,15835,15190,-8398,-5692,15428,-26705,7114,17711,23611,4676,-31145,-6349,15294,22150,-10431,15137,-4271,2872,-26454,32586,-12233,-11121,-17059,-10511,1790,-27304,-30568,1664,-19271,25202,-28499,-7798,-25294,-733,-32730,11977,14121,-30279,12862,6487,26465,22248,26011,-13002,29852,-19401,31329,-29725,26304,31226,-14628,12933,18868,-18560,10319,-11560,30836,-31618,27763,16378,-4837,-32202,-13928,-26932,-3708,-7961,-23138,-26801,-13175,27305,-15233,-4602,13792,11528,-13699,398,-1879,-1617,-12133,15978,-13321,15098,3583,-1361,-25446,28033,13522,29231,17030,7204,25598,-5265,-13899,-4778,8500,31753,21897,13745,12903,-17229,6427,-16654,-15594,-3162,26206,-4971,-4794,9549,-2903,-3623,-10085,7379,3039,-20744,-7816,-17737,-28151,22507,-9714,-15441,12502,-17840,27737,-7157,-5598,695,-5257,16032,-27854,-29117,3226,2233,5858,10319,-19166,-11044,-18967,-23193,-6876,28714,-13206,-32328,-10176,22504,-17658,-777,-7737,30655,-2112,4434,19161,-5697,16369,1345,-22060,-25852,-27685,1520,13030,-18479,-5748,-13220,-13226,8997,-26210,29204,19518,-29918,-12900,5096,22075,-15670,-18843,30044,11318,4043,29453,-20983,26207,-21254,15138,-30271,4987,-7204,16254,20424,17071,29788,-27507,15057,24161,-7231,900,-16855,-24547,27037,15362,-18555,14566,24986,-25794,-18729,-19275,11565,-20708,-8508,5490,2840,-9339,-17700,31883,1731,7935,-27780,-23305,-11754,-3601,-18241,4911,-31170,-9153,-29952,10301,-28374,-28997,2407,31268,7774,3620,-10364,17364,15805,-5616,-7193,-4335,-16084,-1075,2837,12784,-31572,-28711,32027,-6873,3304,3869,6998,3056,19724,19357,14349,12486,-3313,864,-5822,-13210,-27699,-3278,-2886,9257,-22541,27728,3012,-15012,32703,-17404,17154,-68,8464,-14306,-16687,-26102,10475,-15243,-4945,15032,-32630,-8255,-3115,27443,-6887,4145,-21737,-19066,-25698,-9047,14070,27977,-5104,1118,-30118,-16133,25704,-9943,5538,23404,26784,12102,19807,18545,10244,12032,18919,-28396,17065,-23354,4771,-29041,-23904,-30599,-4475,8967,-1169,-21112,-2477,31065,-4672,10639,26699,9640,21695,-18264,-23853,-1701,11599,19919,-9101,-23008,16627,12600,31418,-32545,-11442,2854,-10683,3797,14603,23360,-32615,-8515,-3124,19403,-3563,11704,14881,-17224,-18777,-20855,5271,23761,15060,-13477,-27387,29498,894,-10579,4621,-13393,-491,18874,17904,8666,17936,21468,32138,-31034,-31284,27303,31384,-25511,-14485,31425,-5749,14698,25721,-18150,16241,-19944,20020,15398,-26584,30985,10040,19792,14422,-12947,26043,-30980,23572,-6811,15266,-28808,4254,13492,5261,12047,30617,30174,-12807,-17420,31622,-26097,2867,19725,11121,10427,9438,-11696,-4105,23214,-30896,-4514,-18920,-1953,13458,1568,30200,19081,-9977,-11237,28637,4648,-2930,379,-7072,5072,-22565,-8458,18832,-3472,-26300,-29876,-4827,1940,8314,28948,2891,-23027,21853,-25993,13882,-27724,7976,-27466,-21414,49,-7858,-26872,-31270,-25576,31638,24289,25760,29941,6930,-5072,13546,-14326,2375,28811,24512,24430,-16763,-31825,11571,-17327,15703,-25801,-9171,25489,-28617,-17752,23392,-20290,-16829,-31671,-11329,13480,-1556,8727,-30929,815,-20469,-16621,28437,-22719,-3726,-3359,17726,-24022,3870,-32666,-9709,-21981,1514,5756,-16996,-10515,17569,-24399,13280,-2628,-4743,-15981,1095,30693,-2380,-29856,29859,-20177,-6977,-27667,12227,-2710,2286,2300,-10196,20439,-7008,15311,16382,7215,-24428,-25164,16833,-20873,-2043,-21916,-10583,-23972,-163,-29946,2393,-20352,-16983,18895,-24358,-25022,-27569,-10660,6654,-21307,-9899,10204,-26473,-23479,17544,-18929,-18944,23797,7627,-25251,15122,32239,-13532,-14860,10876,-2691,18730,-4260,-29088,-2363,-8452,-7172,-26882,-26654,32739,18042,27348,27157,-24702,1947,25970,-31061,-6666,30437,4463,-5512,31018,18878,-29187,-16773,-12760,14131,-4689,10314,-23311,-6533,-12323,27972,13133,10180,-2394,26640,-6256,3234,7825,-25471,7987,-15521,26092,18030,-25303,22683,-4126,24987,28759,-31997,14929,19646,15258,-32540 diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv2.tflite b/tensorflow/lite/micro/integration_tests/seanet/conv/conv2.tflite new file mode 100644 index 0000000..30ae6bf Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/conv/conv2.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv20.tflite b/tensorflow/lite/micro/integration_tests/seanet/conv/conv20.tflite new file mode 100644 index 0000000..b43207d Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/conv/conv20.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv20_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/conv/conv20_golden_int16.csv new file mode 100644 index 0000000..2b8424f --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/conv/conv20_golden_int16.csv @@ -0,0 +1 @@ +32764,9266,24507,10821,32767,32767,-32768,-32768,32767,-6194,-32768,-29976,32767,-32768,-32618,-32768,-32768,32767,-20189,32767,32767,-32768,-32768,-32768,-32090,24273,-32768,12316,-32768,-32768,11363,-8681,-19235,-32768,18232,32767,29938,-14521,32767,-32768,12221,32767,-20370,9531,3351,-32768,-32768,-32768,32767,32767,18936,-32768,-32768,-32768,32767,32767,1703,-32768,14718,-12912,32767,1317,32287,9498,2064,-32768,-2688,-24842,14234,-32768,-32768,-32768,32767,32767,-9177,-32768,-3882,-32768,-27931,-32768,32767,-32768,-16292,-17978,-30270,24103,-22474,-32768,17233,32767,-17633,32767,32767,-30609,32767,32767,-32768,32767,-1228,32767,32767,-2545,-32768,32767,32767,32767,32767,12100,32767,32767,32767,32767,32767,-32768,-11739,-18547,-32768,-32768,32767,17666,28351,-32768,-8702,-22716,-32768,32767,-7002,30942,32767,32767,18895,-29547,7702,32767,32767,32767,441,-32768,-1597,-8435,32767,32767,32767,32767,13045,32767,-6542,-9052,32767,-14336,32767,9033,32767,32504,-18012,-3796,27417,-32768,-32768,-32768,-7803,-32768,-32768,2171,-32768,-32768,-32768,-32768,32767,-32768,-9572,8806,-32768,-32768,28324,-13716,32767,-32768,32767,-8185,32767,32767,-32768,-32768,-29304,32767,-32768,20325,-32768,-32768,-32768,29981,-32768,-32768,32767,13734,-32768,23916,-32768,-32768,-23869,11460,20544,22338,32767,32767,32767,32767,-26894,32767,-32768,10567,-32768,-32768,-32768,-32768,-32768,-32768,23323,-6726,32767,-29653,-31989,-32768,-32768,-32768,-8973,32767,-17977,2386,5516,-11187,6342,-32768,-32768,-32768,-32768,-32768,-5529,-32768,32767,32767,27439,509,32767,32767,32767,32767,32767,7153,26,-32768,32767,-32768,30178,-32768,-4495,32767,32767,-3680,31721,32767,32767,32767,-7681,32767,13588,-5357,-3884,-18543,-21207,32767,-29943,32767,-12840,32767,32767,-23546,-32768,-32768,5047,32767,2627,-11849,32767,32767,32767,32767,30928,-32768,-18215,10086,21400,-32768,32767,32767,-32768,-32768,7160,32767,11687,-28735,32767,31587,-32768,32767,-20934,-2446,32767,-30228,-18003,14937,-32768,32767,-1625,23177,15864,2102,-29237,30784,32767,32767,-13687,7997,-32768,21628,32767,32767,-298,-32768,19950,9081,-13094,32767,11791,12834,32767,32767,32767,32767,32767,32767,32767,32767,18123,32767,-20628,13532,32767,14920,28166,13603,32767,-32768,3921,-31634,18544,-6831,32767,32767,-23387,-32768,2054,-32768,-241,9285,-32768,-32768,-32768,32767,23341,-10914,-3878,-30537,32743,32767,15726,-32768,14653,-24975,-32768,15182,32767,-32768,32767,32767,-26476,-20660,32767,9395,32767,32767,-32768,32767,-15653,32767,32767,-32768,3346,11536,32767,-32768,32767,-13341,-32768,32767,31231,24291,-32768,10185,24562,32767,32767,32767,-13962,8142,-32768,-32768,-13826,4384,-32768,-13322,-32768,547,-32768,-32768,21111,-12681,32767,16531,32767,-4221,-32768,32767,-2295,19656,-32768,-32768,32767,32767,-32768,-32768,-9064,-2072,32767,-32768,-15927,-32768,32767,5965,3815,313,-32768,28462,19282,-1739,-32768,-32768,-20046,14435,32767,-32768,-32768,-32768,-31709,-32768,19865,-1152,-32768,32767,32767,32767,-32768,32767,-16824,27392,32767,-32768,-3575,-8667,-32768,32767,-32768,-18852,32767,-32768,-25062,-32768,32767,5775,-6729,-10894,-32768,31244,-32768,-32768,-31964,22225,-32768,-26195,-13636,-32768,-32768,32767,-29126,-32768,-32768,-14483,-32768,-32768,-32768,-32768,-32768,32767,6109,32767,32767,-25780,27765,32767,-23941,-32768,-18495,32767,20509,-32676,32767,32767,32767,32767,17734,-25448,32767,-32768,32767,5585,-32768,32767,32767,6402,17347,32767,31309,4336,32767,-32768,16284,-32768,-15854,27766,-14093,32767,23860,32767,-30284,-32609,-32768,-32768,-32768,-32768,-32768,32767,6094,17589,9419,-7098,-32185,87,-32768,-32768,-15716,-24707,32767,32767,3942,-16446,32767,-32768,1757,12834,-32768,32767,32767,32767,14442,-31651,9986,3843,-32768,9282,32767,32767,-32768,32767,162,32767,32767,9537,1479,32767,32767,15211,32767,21696,-32768,32767,32767,32767,32767,32767,15034,-15682,-32768,-28769,-32768,-32768,7800,-32768,-3788,-19558,-32768,-25730,-32768,-32768,32767,32767,-11289,-18058,-860,-32768,-32768,-32768,-32015,32767,-5299,32767,32767,32767,-32768,5848,20269,32767,32767,32767,-16692,12702,32767,32767,-9165,32767,24258,14124,-32768,-1982,-32768,-32768,16474,-32768,3003,-32768,32767,27911,-10366,-32768,-16330,-32768,32767,-32768,-32768,32767,32767,32767,32767,-32768,20016,-31896,-32768,17621,-23649,-32768,32767,32767,8700,-32768,-32768,-32768,-32768,-32768,-32768,-32768,784,9419,-4795,32767,-16921,32767,-11911,32767,-8633,4704,12274,-26244,6255,32767,-32768,32767,1876,200,-30201,-25850,-32768,-31125,3907,-32768,21230,-16403,32767,32767,-32768,-32768,-32768,32767,-32768,4729,32767,-32768,-21094,-18111,17532,-32768,1116,13752,32767,15644,-20144,-8358,-6280,32767,17641,-761,32767,32767,-18629,24370,738,-32768,-17172,-18997,32767,32462,1955,32767,-10807,32767,-26118,-6914,-32768,-32768,28032,13893,21892,32767,16265,15572,-32768,31634,32767,32767,32767,32767,14806,3343,32767,-14157,32767,30477,-32768,32767,-27806,25074,32767,-32768,-32768,-32768,-32768,-32768,-29099,-22801,32767,9902,-32768,-32768,11505,-32768,-17150,-32768,-32768,-32768,-5505,-32768,-32768,-32768,17994,-10868,32767,32767,32767,32767,22982,32767,-10795,-29408,-32768,22314,-32768,-26421,-14887,-15934,17860,-32768,-32768,-1965,-4931,6364,32767,32767,26645,-32768,-1733,10470,32767,32767,-32768,32767,32767,-7932,-32768,32767,26012,31740,32767,-32768,-27914,-18627,-32768,-32768,32767,-16823,32767,-32768,-7571,-26778,-20137,-32768,32767,-12848,-32768,-32768,15455,17645,-32768,-11138,-12116,-30359,32767,32767,6422,28359,32767,32767,32767,32767,-20007,32767,-26316,-10820,-32768,-32768,-23956,1833,32767,32767,11552,8825,32767,14530,32767,32767,18784,32767,-2093,-29062,-32768,-32768,-32768,-32768,-32768,32767,8929,32767,-27334,32767,32767,32767,-32768,32767,-8357,32767,32767,-32768,-26817,32767,-32768,-32768,-4831,-5111,-32121,-7343,-29407,-32768,-32768,32767,-26472,29029,-17208,-27054,-32768,-21759,-5205,-32768,-366,-28088,-32768,32767,32767,27535,32767,-32768,32767,-11158,-31411,32767,32767,32767,481,-10042,-32768,-7808,-32768,-32768,-32768,-26279,-20484,-32768,32767,2908,-32768,32767,-32768,-32768,17329,32767,29597,21534,32767,6084,20886,-30920,-32768,-32768,-9951,-32768,-32768,-17951,-21657,-21930,11415,-32768,32767,32767,32767,32767,9441,-27666,-32768,-1598,20790,15326,-32768,32767,-32768,-32768,-32768,32767,-32768,32767,-1460,-32768,-32768,-32768,-24837,-32241,8875,25569,32767,31484,-23903,-32768,6239,-1213,12640,10897,32767,32767,10763,-32768,-32768,32767,-16971,-2399,32767,-32768,8481,3300,32767,-32768,-31095,-19822,-32768,-16790,-32768,-32768,-32768,-32768,23308,11466,-32768,32767,-32768,-30659,32767,32767,4539,16142,32767,32767,-32330,-32768,-13944,-32768,-32768,4283,10180,-32768,-32768,-32768,32767,32767,17398,-1593,32767,-32768,32767,4324,-7377,25077,32450,-27660,32767,32767,21195,32767,32767,-32768,17222,-14639,-5610,32767,16600,20088,-31126,32767,73,32767,32292,-32768,20750,-28157,-917,-32768,21141,-9121,-13213,-32768,15928,-32768,32767,32767,32767,-32768,-32768,32767,-3450,-25206,30092,-32768,-14166,23249,-22862,-13183,32767,32767,12835,32767,32767,-17674,-32768,32767,-641,-26759,60,6451,20432,6158,-4858,32767,21881,31810,-32768,16151,15745,-4071,-32768,-11856,-32768,-2161,3536,32767,-32768,-4822,-32768,-32768,-32768,-32768,-32768,-32768,354,13736,-32768,-22298,-18743,-29590,8760,32767,-20143,1447,32767,-2074,-32531,-32768,32767,1668,18446,-21171,-32768,32767,32767,32767,2466,-32768,-18679,-20515,-32768,-32768,32767,32767,-32768,-32768,-16548,32767,32767,2846,32767,32767,29648,32767,19918,-12586,-32768,32767,-31655,32767,-7178,-32768,19248,-19510,-32768,16415,32767,32767,29285,32767,23191,-4354,-28638,195,-32768,-32768,25990,-32768,-32768,-32768,-32768,-32768,-28126,-32768,-12739,-32768,4160,2621,-32768,-32768,-32768,-32768,32767,32767,19783,-32768,32767,15674,18674,18928,32767,16600,-14449,-4921,-32768,-11012,32767,32767,15012,-32768,27814,13638,-32768,32767,32767,32767,-23418,-32768,9498,13511,32767,-32768,32767,32767,16035,32767,13634,-16831,32767,-32768,-32768,-32768,32767,-32768,1624,-31598,32767,-22982,32767,32767,32767,1543,-32768,311,-4766,-32768,-32768,32767 diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv20_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/conv/conv20_input0_int16.csv new file mode 100644 index 0000000..f10307c --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/conv/conv20_input0_int16.csv @@ -0,0 +1 @@ +-28998,24783,-13464,25933,-6404,-17958,-5167,-31415,13512,-16732,20743,10718,3145,1157,29902,32548,-17850,5822,-9306,-3602,24335,-17037,28963,17997,32033,3969,-24414,31122,22096,10466,25395,18448,-32197,-31602,12395,-2527,-14216,-574,-31092,5622,-31547,9326,17494,28726,28185,-12748,17858,25090,21938,-2235,24297,-799,10231,18288,31845,-9993,-9917,30118,9013,16370,-8479,7892,22984,5042,12563,-28367,11157,-27461,-29194,19527,-22505,-25003,25523,-10876,17324,795,8432,30312,-8565,4018,31037,454,-16434,-3066,4530,-26416,-31158,17967,-5423,-30450,1830,600,22244,5145,10577,18898,23173,30959,5307,17886,-31087,-29936,-14066,27766,9826,-13676,-23584,-30213,-19490,-11077,14244,-31437,7328,-15602,-4732,16503,16288,-5212,-5770,-7503,14692,25907,-19749,-10640,14414,-24388,12869,-28253,19026,-20849,19751,-16515,14113,-27964,-18356,-8682,29183,1216,-30642,-27237,-2662,9525,8640,10042,8911,30062,17401,-19623,18221,-24954,17828,-20160,23044,14940,-19281,-10839,-10787,21491,30872,-26524,-6382,15995,-14086,-13721,9,-8563,31734,2573,-4168,-19778,8242,3520,18932,-21667,8478,-22989,8217,5903,29006,18610,-26904,8476,-18146,-27508,-26793,-29844,2103,27241,31938,-8746,-12460,-7050,26814,-19623,-15723,-6837,13859,22085,-4438,-19955,-30798,-13736,-19152,-15626,12352,14431,-21581,886,-26673,12104,-24509,29420,-13812,6168,-11669,-7679,8301,-8597,-3184,21346,-32565,-24352,-24390,19367,13029,5598,25053,2937,-8980,-2564,27675,-14522,22523,3660,-439,6443,-12166,8312,11346,-15072,-7367,-22994,-29006,22029,20278,-17012,24176,26196,14353,-3870,9853,7699,6690,-30674,-11264,14723,-12607,25572,-27622,-14562,-7439,11154,17549,10451,22479,-9814,-15091,14289,-9373,-17900,-18582,-32185,5931,15035,2154,32431,-24246,28006,7453,7797,-8434,14605,-6660,-9235,29869,-32642,-1357,-3533,-12565,26442,14276,30545,28431,-6750,-21079,-2568,12756,-431,-32341,2630,30975,13066,-730,-21076,-1896,12648,4399,-1530,17788,-31906,-12362,-10486,-16458,26358,13103,10691,19712,-13111,9360,17295,-382,-15719,-21232,-19282,-25772,-11313,21785,15854,-21060,667,8157,19655,24314,15910,-1196,10319,-296,-20442,31663,7754,1675,-15602,-16898,-1205,9342,26941,22368,21963,4853,-2917,-7029,-2972,22753,23655,-11448,-19453,-618,1349,-23335,-30604,29668,-6050,19662,-25918,-552,-27120,19570,21122,19132,-20424,-17497,-18717,28179,-16502,-14936,30160,-23232,-3139,-32513,31470,-16395,30999,-25272,23052,-22475,-20240,-31884,4858,-8249,-18619,-7509,-5448,-16680,-18828,27231,-23895,-24299,25530,15557,16550,-30158,10379,-22033,32325,-3215,2980,16623,-27126,-5297,-30811,-10059,12521,9527,11632,-7971,-13894,-5601,30136,2979,-13022,26493,-6468,8984,6162,-30065,13473,16675,22636,-5168,30719,29429,25490,-30054,-31823,-4468,-15692,-21643,-1299,-30081,7678,6492,28360,-7282,-24812,28104,29292,6833,-6916,9933,-26602,20161,3414,-583,24834,10941,-17759,12221,-22764,29323,-312,4794,-11635,24901,16500,31502,-63,29767,13914,31783,-18484,11210,28377,30238,-16036,28747,-15234,23063,25053,3440,-6052,2062,32147,27933,25459,-20000,-5003,-20975,-28795,14324,-14567,-18608,10637,-19783,-24584,-28478,2518,16968,23946,15679,20988,-7864,-3232,-26844,23243,27027,24694,-938,-14121,13663,-4327,9420,14576,-11190,10147,5853,19915,-25744,17549,-19739,10901,913,-25689,-4109,-27904,-10771,-28590,-5189,-25187,32764,-14399,-3130,3140,-32383,-8847,-24919,9958,-18486,438,-31045,-23762,-16957,-28133,-6431,15681,-26255,-21160,7276,-24823,29301,-11684,-21797,-10130,-11924,-10822,-14415,20907,-828,6184,8863,3257,12068,22918,-11636,14804,-1694,26132,15718,3893,6002,4211,-8769,5141,-4004,20912,-32235,-1798,22304,-28819,3230,19647,-16810,-16956,-7253,2169,5602,8362,-16177,24104,7560,-8749,-9214,16830,11710,-30893,-4932,23763,-12102,14873,-5553,-14973,9605,23077,22411,25385,-6572,-831,26935,28191,-5274,7304,-21735,-638,-32467,25445,11872,31499,-30400,-4331,-14890,-13716,21905,24376,9953,15996,-7910,-21142,2699,-2407,26103,19236,4826,-7651,-7090,12180,-23285,-3576,-24634,8452,11278,18077,28277,17303,18944,-6477,7161,13573,-2148,14708,29418,-26493,23973,25346,-30665,-27179,8705,11596,-18489,8447,-6426,10595,5013,4092,15624,-27391,6326,-13968,27887,3210,-3269,20250,-32403,15415,24484,8081,-20747,11586,-28933,24734,-25893,-31379,1244,-32181,30718,27238,-15139,-7254,-17805,-27915,-1161,1216,20022,-7854,24893,24595,-779,-7824,24860,-2053,-21881,11343,17750,-9165,6057,-1974,24931,-20427,31244,16085,15894,-11138,-2756,-4329,31877,19642,-32226,-12350,-9616,-14415,-26198,-24597,-29693,-20692,-22065,1232,17750,-31112,-970,14048,9363,28141,4898,-20499,27152,-7404,13143,24210,-31824,-24101,19537,1334,-24694,-10886,-21566,11486,-13910,17639,-29106,-21482,-14098,17105,-3410,-25482,9231,-27818,9262,23400,-6480,-21409,22583,-10974,32158,-14194,31958,-23380,13629,-13359,4496,-22933,-5220,30287,-7672,18782,14392,-22100,28081,-14640,4791,-22605,-31668,22565,29308,18979,-30235,-21929,24243,-18562,1968,2043,-5424,6630,-10207,-24509,-26074,14112,-3345,19805,30647,-6738,21118,-25615,-11226,-16041,-26764,-14882,-27075,-15200,13150,-15643,16504,-26876,11526,-10217,-5797,-11137,-21778,-23467,-24733,-30411,-16891,19,28604,21641,-32706,-8150,-17092,-13897,28535,18433,12630,-28485,13119,-18089,5445,-4592,-9118,30828,-4541,-10575,30517,3524,-16947,-27955,18241,-8838,-5329,-13567,32116,21214,11362,-8630,-12674,27096,7733,27398,31710,-26667,29789,27715,3391,-18516,12490,21222,12750,-15629,-9941,-28955,-2959,5061,-28941,-8567,1670,20813,-30222,-10733,-21699,11739,-28078,14000,14961,-25180,23323,-14147,19531,-13925,10889,15095,7152,-32421,31581,19136,-4229,-18481,25309,28543,8608,3645,14246,14935,-10029,16937,28930,-30784,-13568,-1211,18286,-25292,22858,26186,-4816,1416,-25503,-4415,10483,-25506,4897,-6888,20560,-588,-21854,2812,-16679,18617,-14773,-11480,30224,-1770,-17381,20235,-17568,-31579,-23871,-31098,25570,28370,25610,-5802,21899,30801,-15279,7770,-27817,6271,-14541,-14894,30216,-8989,17405,15917,3439,15166,-22346,-14372,19832,-17949,-29943,-9166,26366,11138,24759,21643,10601,32476,29715,16489,18615,9711,9021,-4092,11204,-12173,-27906,-24346,18483,30359,28049,13097,-31770,-14113,10855,-17140,14412,-21782,25094,-23867,21875,9828,-19036,18240,-30885,-12444,8939,31478,18875,6522,28075,-10119,-32700,3174,30931,-4166,-27168,8519,-29284,-29189,-3264,-6746,4808,22282,-21490,1559,25710,-31571,7351,-15660,4184,23656,-22978,-7107,20162,-18182,25924,3927,14000,5507,2104,-27051,12216,32142,14630,3838,-25665,-2977,-14163,-25985,-20752,-3785,-15863,32372,-6706,5437,20386,-32623,26718,31534,-30734,22353,6776,-4571,16652,-8434,18555,15409,13888,-15703,21205,20813,29669,32604,9083,16581,-8994,8564,-17362,747,30848,23713,34,197,-8180,13135,15485,-21482,-15932,19181,15830,-4964,30166,31678,-8841,-15920,-5851,13780,2777,-27877,-32589,5778,6238,-29367,26639,-12798,-4072,30250,14465,-29657,-9955,2408,16646,20692,-15004,-20607,-10613,-29408,-23832,20053,-14102,19470,-3628,-25996,30763,-23854,32536,10153,-24440,10324,-5868,230,-10748,-8697,14545,-13516,26423,-24346,-10752,-22262,25662,17098,-30863,21885,-14254,-4551,-16714,-25192,2570,-8539,-5757,-25335,-735,-5661,-9132,-1333,8494,24233,19627,17705,27662,-22146,-24724,-28407,25543,29941,-14770,18213,7698,-24738,-988,13843,-8144,9530,14043,17636,12190,-10235,28949,24327,-20976,-21294,-17859,-3896,369,-13768,-18694,-19354,-18725,13092,8216,18192,-16693,28748,24188,17365,-11562,-12177,-7388,-32121,14436,-26529,-11876,27415,9800,28797,-9192,-13909,-24480,-16795,3906,10537,7337,-27967,31722,9542,-26278,-22095,19298,-6731,9688,-8386,-29844,-15427,8582,16125,14027,18058,-26890,-12246,-31133,-17918,6190,30550,16579,-19866,-13133,-1864,-322,-9496,-18355,30212,29263,-9892,-32117,-32204,27228,-2407,15610,23397,-27634,-25823,25631,-20001,-3644,-10,-17881,-1962,27816,-22210,26450,11395,-6913,-2704,30856,13239,-22497,-25282,-3958,-14809,-12532,14911,-29734,12679,-13505,-8353,13537,-13036,-12003,-9882,-32682,-7478,295,1578,19132,5247,-26213,15216,-22486,-23775,7352,74,19218,-29488,-12288,27508,6590,-316,-22686,13149,-20804,-21345,-20170,-22713,-20496,-27247,-10418,23980,-32188,13728,-18784,-26214,2334,25473,-27320,-16951,-18519,-741,4085,-17855,-21116,-16733,27968,123,-21576,23840,10917,-199,14698,23976,-24885,-4050,9397,7996,22221,-9220,16731,25528,-30317,-25633,23667,10071,-16768,-701,-24542,825,-23907,23681,-14978,20251,26824,17620,30815,-6485,23303,-21460,-15976,11926,-29394,-5006,-4992,14265,-25412,7797,4758,-19006,16773,-13228,10206,27425,10924,446,-14758,5851,4802,12353,-22873,13263,12354,-20747,-24851,-32670,-13141,11631,22426,32377,-8427,5189,-32192,13579,3415,29794,10843,5786,29639,6504,10233,27445,3687,-7421,-25560,4694,13596,-29024,-4290,-31041,-19801,6894,-29029,22794,6925,26922,7500,-24862,-7437,8206,-23456,-22832,-10911,14407,-3266,22012,183,-12574,-4246,20316,10240,-14325,-4601,10658,-20502,24793,-28546,-16258,-4651,-27359,-12089,-821,19345,-8777,-29593,15943,-22861,-15379,24091,12408,-12221,10576,3602,14758,1242,-18210,30540,-25986,17038,-31641,23120,2078,2463,16658,28946,1383,-15092,28872,-14409,27444,-24329,12321,7034,4546,30508,4715,18460,1836,6844,-12953,27471,4839,-11737,-19592,-8914,-1509,843,-8254,23902,-13584,-28101,-30450,-2444,24358,-6079,-446,-6367,16245,-32347,28116,-31442,-6959,14813,-26151,18659,-31682,-14584,-15474,28652,12839,19829,17160,1438,27537,11268,-6312,-1983,18851,-11441,-27404,30706,-26135,-31993,24183,2335,26395,1947,239,-17420,6586,22884,29456,-22350,-12525,22875,12168,-18420,23475,10201,744,-18379,-12774,2736,27581,7067,-27735,-14097,7421,-2886,-22493,9585,25157,-24205,8091,-26253,-21507,-3598,31637,290,20029,-11971,-13489,-19281,-24576,18576,-19110,-30919,15000,14626,-2776,27344,-27640,-6011,-24231,10799,-32064,21946,1744,-15673,-5203,-22190,18677,-19852,1480,22529,13581,-13727,11932,2567,11002,6998,-11517,27233,5937,30390,-17422,-1306,-3475,-32620,-27486,16747,10395,-15599,30614,5350,27781,27495,14210,-1038,-7246,13137,23875,-1355,-7603,22537,-23627,-4197,-534,7153,3156,-9342,11013,-9390,26668,-6804,16934,6037,6567,-19202,-2171,13623,-32582,25875,10339,-21441,-17214,-12685,22947,-29442,-27253,29196,-24432,16167,12826,-23899,-8814,-32096,1164,-22454,10219,-21289,-3796,5818,-4776,-27890,12295,-2959,-21800,19340,-2956,10614,14422,-28713,-15407,-25442,13931,4954,8876,23114,29393,-27660,-13920,-30887,-4747,5863,-7160,2888,24103,-32013,-5789,-31491,11164,-21212,1714,-18684,17990,-12199,-6862,-17914,21934,-13775,22375,-16975,-13061,20779,-31354,-18579,-1913,-9923,-15348,27465,-8188,20397,6963,12483,-14620,18562,10584,27639,14964,19682,25485,-7030,11217,5769,-24998,-21627,-20603,8097,13758,-8832,-26494,12893,9233,23677,29411,-6282,20930,6733,-14162,28302,-25814,-5445,-17428,13105,-6657,-2721,18864,-1391,1092,-12671,-1217,21525,-24770,-22209,12335,15573,-1016,28476,29180,-277,31693,14085,23680,-9123,-26836,26911,-31795,-1228,22322,19586,22238,-30294,-15188,-10661,2936,13125,-12383,25487,6154,485,25971,-1795,25087,-31319,11278,-30408,4528,17499,16536,12146,12744,727,32568,31417,-25874,-24951,-18217,-1117,232,-27136,21054,-10661,-11235,-25431,-13205,30977,-1505,12630,-16803,8281,-21213,-8321,-2557,25741,32007,697,-6259,-30027,28482,26803,9814,-4057,-28173,-31630,5880,21121,32360,-12032,19388,16985,-11948,16236,29109,-16375,-2044,7072,27985,10853,-30102,-3849,10912,11021,-25586,-11766,-19328,-24380,-20583,-24634,15534,2875,-673,19505,-24828,-20267,-20477,-9219,-26960,14241,31988,7566,29277,-2804,31978,15860,-18468,15686,4703,26319,-15587,13316,-5900,-30840,-18117,28784,16620,1235,21041,-29630,11156,-12768,17817,-11803,-21486,31995,19728,6054,-115,-4661,-794,4191,23328,32322,11255,20907,17724,20408,-5692,-16701,16512,-28393,20825,-32016,-2101,-15581,11044,-23309,-24186,5552,-20376,8594,-1956,3810,7059,-22232,-31861,-1013,-8989,19028,-11111,23515,-24457,14220,-23825,27067,-18176,-5813,-18259,-25703,-2097,-8555,24072,-15915,-6038,-3885,2792,-31676,15051,17358,-23466,-10817,-11439,-4669,-24794,31871,-17697,-24097,5502,-4644,-9098,-24777,-10915,-20128,-26987,-29516,4373,-13938,-17547,2858,23301,-514,23399,-6354,6466,16745,-11419,-16490,6935,-5740,657,11280,-11623,26876,24711,-23463,28818,-15158,-26508,4769,15803,-18624,-25507,-31392,797,-29509,-2309,-422,21205,-12985,-24821,17512,-13569,31303,7245,-2224,-22685,15059,7202,17255,9614,-667,1927,25754,11001,-11436,-5783,-6752,-18611,31382,-22730,-21577,-5256,-2576,10658,-15528,25086,29495,-15866,27093,18600,26584,-16104,-31203,-12687,20703,28273,9513,27480,16290,-1499,-1237,6969,-634,7156,17458,17022,2561,-27626,4099,-2933,15818,-857,4021,-5006,15470,14107,-19197,28586,-9490,-9168,21725,31808,-17245,5597,25114,-15713,26514,50,23243,10690,7421,-516,-17190,14881,26797,14290,25570,-6853,-18489,15611,-19813,25344,-31672,5346,-4482,763,32277,-29910,26503,-11638,29358,18224,27042,-20240,-4357,17555,18351,-4387,24807,-6014,15575,-10882,-32655,27843,-25709,-30935,-26660,-16288,19117,30663,15370,8166,2142,-13596,-306,-30359,-24000,24403,1259,19727,-28592,31422,-19365,-28073,11885,17000,-9843,-10887,-11115,6487,-1068,21940,27200,-23615,-7988,-13190,28738,-32590,4367,-5918,16065,-24909,480,4804,7616,29180,-11135,23026,25943,20234,23976,30628,15617,14249,23809,-20530,-24312,21390,-14496,-25569,17137,27655,-3534,-895,942,19980,-9377,-23539,-21788,29664,-28242,2758,-28833,23021,16673,-16513,-12614,23164,-30447,-28247,23138,-27488,15031,-21706,-7734,-10593,-31718,-17803,746,24659,3780,19680,16997,21279,8993,-633,6216,-30782,-19508,8904,-23430,-20931,620,-31917,-20172,13788,-31712,14920,8538,2131,-16706,25500,-7143,26096,25269,-385,5271,-17419,-463,-30023,-24443,-3117,-4434,-18549,23377,14478,2402,-9560,-586,31500,4056,-16489,-14021,25796,-31043,-13242,10841,-21049,-19675,19878,14181,25131,-5343,-7765,-25509,-21085,-21922,-12084,28202,-29934,4547,30150,-22304,-4822,32089,-31760,2256,9640,2749,29431,-9930,-23880,-4329,30140,-32391,24977,-26680,16829,26493,-16042,-15226,17136,-1175,-16529,5032,-13855,-25165,-27745,29814,-31711,-27029,30382,-3470,8164,-17251,20657,15676,-4098,-15444,-23712,25985,-10439,-3300,-28711,21256,-9950,23359,-17835,-30006,-14068,-10886,3994,-4547,21610,-21129,10584,4836,-19794,-22117,-1982,1370,1976,-5271,-25435,21257,-18010,-5950,-24165,14774,15338,18030,-20346,-18880,16766,-20761,16379,31501,24033,19582,-13858,19137,-9431,-9998,18699,22132,-29782,-1662,11737,27754,-13705,-13335,9686,-1617,22084,-21909,-6908,12922,-25265,11757,-3771,-21660,13271,7879,-7184,-19738,8056,25813,24408,-31334,1031,10706,19598,-23421,3825,7378,10860,27465,-9046,12976,26198,8497,16005,-19051,743,-10995,-28903,31509,-15000,10892,31242,25337,-15485,31950,22423,-1757,12956,26519,2567,-26102,-24513,22140,-30841,-13391,-26371,31449,27683,-5340,18797,-11236,-26131,14978,-23636,-32288,10980,29936,17267,-6746,26286,-17819,3351,-6238,8594,-17766,-27114,-20933,14323,-14604,-5273,-6515,-2926,-22739,-14244,-14309,18544,23916,-17693,-10159,6288,15700,-5004,-4678,19972,-4644,22217,11596,-12998,-20283,-6036,-32720,-26690,-7908,22858,-25758,-21650,-20615,4479,-15408,10439,-22212,27754,25884,1186,-2225,-17354,22081,-11610,4288,-23916,13122,-15320,-1162,-18392,27405,20884,5213,32631,241,9960,6632,403,-1149,5379,-17669,-24269,-28640,-19538,-17819,-21450,5731,7138,-8354,14770,19744,12137,-10058,-16,32417,15596,19162,28765,-26090,12717,11554,-12026,-1911,-4634,7971,11158,-28856,-13495,-4892,10338,-15605,-10885,31702,11874,7523,5907,501,5500,7623,22022,28200,-32195,17738,14355,-14670,-19126,23511,-14392,8293,-22204,-32600,-3937,-11824,-3810,20694,-23215,14157,-7524,24664,21523,-11738,31545,-30181,-26131,13960,-31644,-11417,-7489,-27985,-1595,-20079,-2395,8757,2474,-20488,6132,18175,31534,-18556,-20836,8020,29206,-28803,-6253,-15894,-15582,-6028,-16810,2138,12296,1658,-3372,1159,32107,16733,26103,10420,-3352,32487,30200,-19318,-26041,5223,-21687,27152,-9449,-31197,-13268,7309,13266,-25693,-6827,27107,6942,1664,29496,16890,-20629,-9118,20586,-26606,40,8400,12053,24384,-19273,-26029,9877 diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv21.tflite b/tensorflow/lite/micro/integration_tests/seanet/conv/conv21.tflite new file mode 100644 index 0000000..a1cebf4 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/conv/conv21.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv21_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/conv/conv21_golden_int16.csv new file mode 100644 index 0000000..74b7c44 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/conv/conv21_golden_int16.csv @@ -0,0 +1 @@ +32767,32767,20344,32767,32767,32767,32767,32767,32767,-32768,-32768,-32768,32767,-25188,32767,-32768,32767,32767,-32768,17987,-32768,28531,32767,-32768,-32768,32767,-32768,-32768,-32768,32767,32767,-32768,32767,-32768,32767,-32768,-32768,32767,-32768,32767,-32768,-32768,-32768,-32768,-32768,-32768,32767,32767,-32768,-32768,-32768,-32768,-32768,-32768,25819,-32768,-32768,-32768,-32768,32767,32767,32767,32767,32767,32767,32767,32767,5052,-32768,-32768,32467,32767,32767,-16536,32767,32767,32119,-26929,8265,32767,-32768,32767,-32768,32767,-32768,32767,-18983,-32768,27507,32767,32767,32767,32767,32767,32767,24265,32767,32767,30736,32767,32767,-32768,-23854,-32768,32767,32767,-32768,32767,-32768,-11700,30089,18879,-32768,-32768,-32768,-32768,32767,-32768,-32768,-32768,32767,32767,32767,-32768,32767,-32768,-32768,-32768,32767,-32768,-32768,-32768,-32768,27697,-32768,-32768,20340,-32768,-14661,12367,13710,-32768,-32768,-21555,32767,32767,-26329,-17753,-32768,32767,-32768,32767,-24434,32767,-32768,32767,32767,32767,22703,-32768,19306,-32768,32767,-32768,13511,-32768,-32768,-32322,32767,-32768,15280,32767,32767,-32768,-32768,32767,32767,32767,32767,-32768,32767,-32768,-32768,32767,21301,32767,-26260,29171,-32768,-32768,32767,-32768,32767,26954,32767,-32768,-32768,-32768,-32768,32767,-32768,32767,32767,32767,15071,7719,-32768,-32768,-26719,32767,-32768,-32768,32767,-32768,-32768,32767,7974,-32768,-32768,32767,-32768,-19628,20168,-32768,9165,-32768,7678,32767,32767,32767,32767,-32768,28726,-32768,32767,-32768,32767,32767,32767,32767,32767,-32768,-32768,32767,-32768,-22730,11517,32767,-32768,32767,-70,32767,-32768,-32768,32767,32767,-32768,32767,32767,32767,-32768,32767,-32768,-32768,-32768,32767,32767,-32768,-20454,-32768,32767,-32768,23222,32767,32767,-17376,32767,-32768,-32768,32767,32767,-32768,32767,32767,32767,32767,32767,-32768,32767,-32768,32767,-32768,-12398,32767,32767,-32768,-32768,-32768,32767,32767,32767,32767,28494,32767,-32768,32767,32767,-32768,-32768,32767,-32768,-32768,32767,32767,32767,-32768,32767,-15448,-32768,32767,32767,-32768,32767,-18834,32767,-32768,32767,-32768,20565,-32768,-32768,-32768,32767,-32768,32767,-32768,32767,-21126,-32768,-28310,-32768,32767,26064,19058,32767,32767,-32768,32767,-32768,-32768,-32768,-32768,-32768,-11498,32767,32767,-32768,-32768,-4282,32767,32767,-32768,32767,-32768,-32768,-32768,-32768,32767,-32768,-8576,-32768,-32768,-32768,-32768,-32768,-32768,32767,10602,32767,-32768,32767,32767,-3272,32767,32767,32767,-19272,32767,32767,-32768,32767,3366,-32768,26509,-32768,32767,-22777,-21814,5809,32767,295,32767,32767,32767,32767,32767,32767,32767,32767,32767,32767,32767,32767,32767,32767,-32768,32767,-32768,32767,-32768,32767,-32768,-32768,20405,-32768,-32768,-32768,32767,32767,32767,32767,32767,-32768,-32768,5593,32767,-32768,28011,8238,-32768,32767,32767,-32591,32767,-29974,-32768,-32768,-32768,-12426,18740,-32768,-32768,32767,32767,5377,32767,32767,32767,32767,32767,32767,32767,19408,32767,32767,32767,32767,-32768,-28922,32767,32767,32767,32767,2042,24931,-16340,-32768,-32768,-32768,-32768,-32768,32767,32767,-32768,-32768,-32768,-32768,32767,32767,2823,-32768,-32768,-32768,-32768,-12411,32767,32767,-32768,-32768,-32768,-32768,-32768,-32768,11686,-32768,-32768,32767,-32768,-32768,32767,6693,32767,-32768,32767,-32768,32767,-32768,32767,32767,32767,-32768,32767,32767,-8091,32767,32767,32767,-32768,32767,32767,-32768,32767,32767,-32768,-32768,24010,32767,32767,32767,32767,32767,32767,32767,32767,32767,32767,16431,-32768,32767,32767,32767,-32768,32767,32767,32767,12278,32767,32767,32767,-32768,-32768,32767,32767,-7074,32767,-32768,32767,-20461,32767,-32768,-32768,32767,-32768,32610,32767,-32768,-32768,-32768,-30508,32767,-32768,32767,-32768,-32768,-32768,-16217,23586,-32768,32767,3650,-32768,-32768,8294,32767,-32768,32767,32767,32767,-32768,-32768,32767,-32768,32767,-32768,32767,-32768,-32768,-32768,32767,32767,32767,32767,32767,32767,32767,-16050,-32768,32767,32767,32767,-26000,32767,32767,32767,-32768,10795,32767,-32768,-32768,-32768,-32768,24557,-32768,-32768,32767,-32768,-32768,-32768,32767,32767 diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv21_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/conv/conv21_input0_int16.csv new file mode 100644 index 0000000..de8e13c --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/conv/conv21_input0_int16.csv @@ -0,0 +1 @@ +14054,16586,20758,-5384,-7809,25529,-3769,-27139,-28696,-24926,14787,32236,1167,2,5954,-30155,8951,-24084,21654,6842,-9028,32120,13102,-24032,-14047,-13087,-23374,-26847,-23445,24853,11822,15601,-21529,-26602,-17869,-26355,31542,-26608,-23534,-18185,7632,-8449,-2897,-25953,10654,11017,-3307,-10633,-16879,-30278,14743,18516,29810,979,23673,-26054,-24734,1890,-28280,-16884,-2123,21376,-12936,-30970,30006,-22878,-22398,3282,-18316,-24777,13352,32113,21974,-22165,-13669,-15699,7344,5870,-31205,14386,-10636,30299,17947,373,8456,24886,31784,27140,18600,29427,4164,2097,-20926,-26330,10461,-28494,-2249,-19757,24198,-25556,16998,-29803,30771,12981,27146,3070,29484,26864,-27024,25260,-9442,-9346,26378,-16994,-19087,29357,11083,21188,8289,801,-8315,4177,-16896,16674,-30950,-30327,-18888,-21113,-14984,-6351,26176,8132,8295,4122,7691,-29955,-27842,-24308,-16872,9906,-30951,5638,4249,15934,-1219,-9471,-24604,242,28756,-26464,28462,6247,7600,-9290,10091,-4289,-14077,-26527,-7648,-23938,-19385,-15452,-13650,17480,-13719,-13325,18799,16011,-4939,6912,-4143,-13809,26700,-20267,7644,-5981,23681,29490,-15651,-10558,20506,18457,-4674,31629,-22817,-8048,19531,30975,-12214,26725,15532,2512,-13119,-22632,-20692,8181,7537,-1189,-20049,-8227,29136,18327,-16889,17198,-29098,19163,-21750,1412,-22515,-27060,-12521,16306,-31990,17600,-17164,22385,-303,-5794,-29438,29956,-24836,-22171,-15846,-21037,-416,922,23217,-8279,15212,15845,20589,9476,-30427,-6730,1568,30775,17781,-17187,-18864,-14125,13779,296,10597,-10567,53,8157,-3486,-1205,-2913,8209,31480,-22885,25496,5973,-30002,18378,13372,29882,7144,4207,-24639,15781,2775,-2524,23659,9449,9927,-2824,-19688,19095,-21829,6129,-31800,-5238,-8031,1616,13791,13921,4366,15363,-6919,-1316,-13159,-346,17064,12962,-2079,19100,-28086,30647,-12193,-23346,-15951,21351,5236,18903,21946,15896,-12570,-8397,-17541,2935,7668,-9091,-26034,32464,27305,19163,19506,-17109,19578,-1283,27656,-762,2814,-12951,-27643,3496,-14360,-25761,-15550,-11976,1868,-15988,21934,17586,-26244,-15640,-26445,22915,-24346,-2795,-11167,16429,10920,-10857,-6149,17947,20376,-24767,-26564,-30348,20594,30555,12951,-32258,-15517,29457,-5323,14853,-1052,-2681,-12345,-12705,-23613,-4314,-13931,-18955,-14474,-30980,17354,-28896,14543,-19925,-7200,24000,22274,-16377,-14796,1576,1444,-9021,16021,17257,-660,24721,-17857,9958,19712,26785,17325,23937,-21681,-18849,-3886,-8062,-21364,-26926,11386,-6181,5694,-10477,20998,4624,16095,27630,6358,-28140,-702,-22061,25370,14048,-10387,21797,3498,9219,-24672,-26792,28840,14423,8342,30904,-12339,12319,5214,9630,26598,-20047,-17369,28549,618,19462,21399,31898,23894,-26929,28494,-6376,-4332,9122,-25187,-17395,20739,-2827,1262,-9146,26032,23914,-11803,-2047,15016,31949,-3511,14891,28074,-8300,30925,-12173,3702,32447,416,13618,14778,15435,17144,13952,28012,22798,8277,-19030,14890,-27602,-3084,29435,-7894,8904,-30196,-723,-27072,31937,-21679,-2836,4221,14469,-29566,-17848,-8987,-5019,17793,-14892,-8633,-1172,22558,-29488,22386,31070,8361,18450,-2457,31352,-27624,-12897,25799,3658,29841,23813,-4601,-27776,-23412,5242,21610,-32006,-22707,-12885,10910,170,25769,22850,28000,14782,-12851,13395,-24969,-5038,16974,21618,18928,17940,21445,-24832,-3612,-30132,10294,-17250,-16063,17607,-457,-16712,14113,-7733,18386,14576,-29951,-4416,-179,-18192,22323,-22403,-14024,16402,-17300,-27060,-24139,782,11992,-6746,-10946,12027,13415,10499,27980,-21549,-17038,-21431,-30223,-29144,-21124,-5111,-31104,-31279,5095,-9720,-22986,26769,6718,7055,31873,-22085,-17900,19472,-15391,25083,13115,32265,-23908,32244,15517,10968,-15153,6806,-18574,5211,5275,3711,11984,1652,29886,1617,24607,-32205,-20011,28528,-13397,1719,-14903,-9229,-1706,-1834,8388,14295,22009,3349,16471,-11857,31788,-7954,-25559,-22254,-29869,29524,-16900,15962,14582,-27520,-8532,9723,-12337,13735,21991,5136,-10214,24637,3467,32349,-12110,-16186,-31597,-15162,-313,-28555,24906,16868,10315,-1262,3926,23696,26123,-7434,17141,7287,-27024,-18661,24697,23156,16461,16249,-10706,25517,3200,32748,19357,-30497,19675,6045,-5085,-19084,12573,16974,17749,-8548,31414,-10389,-5024,-3781,8521,-6566,-11433,-31132,-5337,9998,18145,3573,18566,18269,30255,-24591,9289,6277,-8374,-2027,4264,-18580,7723,-8186,14092,3313,-16942,-17018,-27620,8212,-15514,15852,24724,-8980,-25092,6871,-9296,15146,-19421,-27917,-19873,11721,11162,30193,1239,10209,26034,-25242,-20228,7564,-1194,-26264,-8260,19748,8829,19463,15871,-20606,-2667,-2635,3128,1205,26699,30337,7729,-31446,-29488,4188,30792,562,32484,4649,-28515,-15914,11068,-11353,-15662,9038,19754,20887,-14963,16053,30032,17391,19446,11715,11986,26845,8710,29371,-8268,-3182,-20730,-27846,-5650,-20802,1152,-696,-15183,22497,3563,-4129,-24513,-19913,4933,7285,-26635,27945,-28433,691,-606,-7717,22705,11421,31968,2611,-697,-12041,-21619,-13986,-8157,4657,-916,19491,25394,3596,23407,-5169,4823,-20535,9636,28223,-26853,6290,-4238,-5020,-16062,26430,-11771,16738,-23022,-18126,-29495,-4830,-29688,6366,27420,24274,-26015,-27473,18786,9813,18526,19175,-25577,-25270,27631,3989,717,-28867,30221,31456,8261,-24702,-14727,14913,-14762,2516,-21490,14318,-12842,-24129,-12587,30200,17411,-27168,29508,3247,-3716,-934,-30886,21548,-491,30771,19531,-456,23068,19894,-11378,27611,-5004,-4545,2785,-15074,1953,-20041,5275,-11672,-22710,-31377,3206,917,-27591,-825,21311,3332,-16954,9463,-8465,-29246,23219,-9214,-30832,32459,-23074,-3046,25338,22899,-32717,21610,15678,8578,-22916,12662,-27091,-10665,14255,17340,1570,-22588,29700,-11272,4768,14827,29837,31623,32755,-19839,-15288,-6034,29029,10392,-11355,-21088,2296,10614,-19708,28836,32628,5186,-19563,-14895,-5079,-5099,-4010,9898,13324,-7608,22373,-24429,15455,-2111,-25132,24862,4405,-32383,30436,13664,4617,15701,-24242,28556,-16189,-29828,-17964,-30773,30120,18610,-3420,-4654,5889,23603,28522,5914,28569,18092,-31875,-17219,-14255,-5524,-21254,14132,-12734,17630,-3725,12204,5257,-21771,16832,24693,-24081,-15428,2503,-25779,13712,-5480,-9480,-21006,22683,-23528,-22620,27765,31398,576,23581,21918,-6004,-31392,25553,-21532,19088,1599,-32279,27394,-22382,3868,-25508,-7538,-4531,21133,404,-18761,26345,13317,11293,243,-23998,18390,22820,-31454,30688,-5420,8151,-23865,-518,-19652,14358,-22497,18983,14349,-14501,-5881,-12516,-18330,-6723,-6897,22313,8653,16186,-11038,-10855,-7345,11304,21080,-26785,7455,2946,-17157,14787,8669,-19544,27680,32377,27744,-7745,-17598,-23614,26913,3518,-15370,-2507,-8484,30962,-3747,4555,-21305,26056,28622,26450,-31992,-8148,25665,14135,21763,20997,21891,14716,27789,7894,-32198,-29918,-8043,-1307,-525,1848,22078,-24033,-3467,605,28695,31578,-27557,20558,2911,-32002,-31169,4933,2577,32074,-24071,10787,21798,2196,-24669,15273,30422,-30243,-16143,-17312,-10468,-19193,-24950,-26639,-1370,25904,11632,11224,-14184,-19152,3375,32064,-287,32686,26269,4854,-5661,12583,32404,1029,-25061,27641,-21512,-19553,-16123,-13852,-4807,-18063,3807,-18865,-6098,5649,-17202,-31179,-28267,-7793,-18593,1054,-23208,-18194,-25855,-18390,-3910,-9975,-12241,7021,26052,17545,18002,8545,-15232,-11777,15057,19733,-17255,-11725,-31575,23695,21880,15002,-281,32715,6525,-20788,-1498,-1999,-4313,-20691,9350,13305,21555,8261,20227,15905,1052,17497,312,-19067,-31319,28029,6972,-32250,-30355,-26048,11968,9034,1193,-3576,-7883,-28886,-1900,14303,-31584,-22815,21395,-21057,2260,-15237,2260,22107,7020,20379,-27407,-31362,-19223,30257,24842,17860,23291,19965,9036,-30649,-12616,18247,26618,29900,-23146,-12702,3610,-22335,-10081,-25824,-24206,3889,-22112,-27259,-19107,32165,1864,-21513,2442,28869,-1662,-30100,-28368,-24781,25037,15962,9920,-4370,-19996,13940,4948,4793,18998,6578,16438,-6,-7544,-28074,-375,-2358,-8590,10481,21420,12862,-2913,-13491,-20382,10519,-26747,-6244,2883,-20913,15647,22632,17790,-8787,5568,-5635,-16106,-8465,-6783,9010,-2595,10310,30388,5018,51,-19884,-2807,4562,-26764,-2745,25057,1540,-3650,21927,3337,-22602,1208,-29278,31683,22566,31247,-29947,7304,-7059,31203,-5776,-15297,-5950,12302,20188,20425,-27483,-27602,8612,-14636,-28096,-22248,-3975,29675,14574,23563,-15066,-2973,-24300,27716,-6436,-14059,18867,-13834,11546,22630,2555,3383,15718,15184,7704,22437,-9632,27432,1253,16856,28555,-12340,-23874,-30061,-17181,20294,18576,1875,-6173,26488,13481,-20783,32662,29262,-27976,5910,-28404,-23308,-3193,-31401,-30545,-15766,-11771,-18055,26347,22535,12561,28129,-8175,-1240,-32472,-19905,-32570,15021,18005,9754,25187,-2932,-22680,-4895,20263,5961,6628,-8957,5784,-19418,-12224,2440,-24228,-11322,-28118,17278,25631,24951,-32634,31659,20232,2718,19601,-31540,13569,-3841,6670,24668,21837,2212,18327,31634,27541,29570,26342,854,5864,-32423,32492,-16192,28270,-10823,-9619,-1395,-18729,-16900,8593,16874,28097,19256,-28270,6835,-18989,14497,-21322,-26829,15604,-19965,-12801,13014,-21027,-8279,-9955,-3393,-2584,28317,-24346,-17543,31469,-5950,14420,-25135,25463,27825,11274,-12588,9046,22891,12748,4622,15361,9050,-21783,13686,25516,30658,27844,31996,1266,31447,-14741,7062,30645,-14158,-10539,-2534,9699,-1822,7362,13455,-15634,29101,25141,-10421,6690,27202,-19580,4970,6022,-9923,22755,9707,-22087,-14504,10530,-21368,-9794,1918,-20369,-7860,-7492,-12965,-9883,30145,-4148,25695,19731,-25608,-9679,15051,2700,16799,17546,-9374,-27831,-19395,-4879,4589,-3507,27460,652,-20316,-17829,-24020,30763,2232,31799,-4746,31221,-32004,-29822,-25387,6644,-26560,25541,30067,-9278,-2617,-17406,-90,-6642,-10920,1088,-13621,27147,-474,25318,29538,-6825,-28936,-20591,28773,14572,31726,3984,-4479,29279,-23977,187,17770,25803,-18006,13918,27320,-17108,31574,-20452,-23175,-20136,2564,16930,-10620,-16653,20538,-1868,8128,30073,-1939,-15628,-9377,24176,420,-27137,1289,-25476,16530,-8584,-9088,4338,21785,6906,-24879,10819,6326,28831,32118,-25998,-4630,30249,17169,22278,-31097,-13346,23326,27880,29472,191,13840,32472,-9859,5220,-32715,6306,-20764,-9536,31841,-11319,4067,17015,11544,-23056,-26698,-28200,14909,-24238,-1364,-1099,11718,-44,2076,-24618,-8630,20270,-177,-20504,24045,-10028,12000,26529,96,25717,2297,10526,-21570,714,17421,31506,1809,-19520,-9422,-14438,5274,-13745,-28813,-29818,-25892,-6937,-1191,7234,-2813,28048,-10731,11501,6672,27875,-27558,12877,580,-3517,-28535,22155,-22091,18684,30119,18614,17668,7947,-29768,25084,-24090,-3488,13667,12704,-6131,-24573,-11946,-3052,-28220,2309,31257,-17176,-10572,1532,-30319,-13624,16233,-4269,-7212,31341,23240,-15950,-9031,-32188,17561,14308,-17672,22738,11915,31172,28238,-21451,-25047,32661,-16090,26640,-26711,30085,-25690,-14585,-21206,2130,26507,10907,-30487,-7735,-151,-32330,23994,-25690,-26927,24287,19997,-637,10310,5518,-5401,-8090,-23684,3805,-28346,-8652,30604,28207,-226,22737,-4461,-1171,19570,10901,-10324,-18534,-22638,-5182,13616,-25506,-9023,28339,3202,-30963,30464,-30200,-15630,11651,12315,-18629,-8497,5960,21553,-2151,-32075,19470,-12010,8541,-6982,4191,-22710,21579,11017,-21143,-17254,30304,-10218,-17427,-18376,-12427,-21824,24955,30733,-9949,-15277,19407,9264,24206,-22579,-31837,-11100,-13838,5500,-21202,-32737,-5925,-20249,1974,15435,-10939,-7334,-17074,2353,21248,17563,27227,31513,-14941,25438,17283,30086,22082,27954,-11912,-815,-14166,13363,-4986,27470,-15010,18228,4160,-32638,-13044,-594,-18540,-24966,-32733,-19170,-9916,20245,23781,-7445,30103,-32548,27618,20443,30154,-18886,-493,5651,-8535,14845,-4639,-31873,-31081,26089,-16149,-22676,-22160,16731,-21551,-9735,19854,-27160,-1013,-5344,2518,22287,15476,27224,17412,-15974,-20510,-17604,28485,14531,-32720,-22705,12441,18282,8205,255,-31877,-14621,-6404,25083,7090,-23395,-3147,4331,-8143,28453,-28138,18607,-9217,-1777,-16822,30021,-13941,15458,-9055,-2775,-22785,18653,504,26816,-9929,31201,-17268,-23198,-22350,-15669,17014,16111,-26395,31617,16635,7036,11166,-5936,-7879,32324,11746,-604,-15186,15500,1098,-727,16892,1803,-9536,-14404,28561,-10922,16436,18688,625,11740,5883,5396,-1084,-4172,18756,20829,-9302,22097,-8408,-24745,17272,23359,31198,6663,32269,-8605,-16035,10677,-31553,25707,-4427,5199,13625,-9041,8766,-12561,21927,7070,-26852,-26998,24010,16284,25231,-32502,32018,-14692,-27862,18169,22028,2480,21161,-14321,-13066,-31101,-20396,-12150,-5470,-7720,-5543,25175,7171,-7391,-20187,-3027,-10890,26475,-7643,-8276,-30158,12779,-26424,23498,7872,10035,31209,20605,5621,-12127,3378,12106,13628,12109,20288,1081,27426,-32455,-20204,-6025,14270,-32622,-22162,4717,16931,28141,-14847,113,19367,6961,26529,-7839,-17926,6772,-27837,-29721,25110,-25043,26172,-10004,-22087,19763,3487,-18246,29447,-32447,-10853,17158,-10806,32693,-9564,-9036,-18001,-6377,-19636,-11561,17057,6755,-14738,11768,-11331,-15749,271,7375,306,-4101,-1348,-21973,16342,-16286,-30789,-25138,6599,-22783,-7052,3621,-32692,14416,5140,-22836,-16331,26724,-30242,3772,-26862,-15651,27019,19204,29044,-24465,-15087,9275,14314,32444,11309,-10456,-18136,27813,17958,-6662,24472,-29758,-457,16956,-9446,-9763,-30638,-226,32394,24709,-28684,14619,-6510,11885,1929,22766,-8253,-28706,-14204,-22436,12581,25470,-32526,-16018,-470,-12805,23717,-15346,-24581,26372,-21056,28549,-2295,-3426,-18717,13855,-20790,-13595,20107,23367,6794,17390,-23318,-15499,-7264,23885,15545,-3591,-6572,31095,-6968,-15775,-9643,-30959,13466,12469,12030,20439,4202,-22461,31602,4150,-12816,-24299,-8169,-479,12653,24140,30356,12908,9226,31221,21132,24143,27071,-5112,-26398,-2595,-16203,31243,-16895,-3477,27782,9233,20853,5814,4571,-2694,-4172,-23992,15579,-28355,-13371,26102,-29870,-15752,20959,-23676,-25251,-2786,-8512,-30866,20091,6153,-30484,8556,-27045,4519,18679,-11180,-7283,-21021,15567,-9150,-5760,-21497,-3383,22471,-1268,-32451,-13364,23431,-22344,12196,-19065,-20413,12183,-7504,14906,-15159,5460,488,-14871,20531,19432,11372,-17225,7875,-32204,20413,-4929,-25398,26536,10511,5300,3372,-20615,-22369,-27418,23467,20288,-12461,22763,-25872,-8067,7485,-7885,-30281,-19714,15077,-18055,-20547,26021,-10558,-9647,-7514,21747,-28078,-17408,-8083,-13068,32258,-7671,-9300,6516,19084,12670,-23653,-8112,-18634,-4046,-9731,13303,13580,-4814,-28532,-25870,16014,-20891,30698,23239,3638,-12557,11745,16793,27491,18475,-9999,-3607,9960,-21071,23868,-7612,-472,20688,-27888,-11545,-12797,26090,-18424,4382,-15979,26074,17535,-19029,13162,-24351,-8589,-26963,-16677,-9923,-25226,2893,15423,-9145,-30928,8956,-18765,4864,-7323,-29075,-17512,-917,19951,-18597,-28981,11688,-17224,18464,-3547,-13690,19252,-9899,-6455,-14543,28171,-3429,-19129,-1017,-17095,13940,-1113,-26437,32593,21325,-14165,-24570,-23157,27442,26232,24603,399,-23812,-14918,-5028,-5115,4985,3360,-15964,-25016,17935,-21772,-14492,-28843,-23014,-19192,7023,29473,-7656,-20231,16869,-11588,23518,17737,3054,-31052,31680,11107,-21372,19679,6777,-23459,-28321,17667,11587,-32431,-31968,5850,32612,30085,26051,-17965,-31071,11525,-8033,11651,22820,-147,-4849,19551,25184,-12049,23245,-19630,-16388,-7722,-13688,-6408,3661,21352,1150,-10931,-8236,-9451,-13420,-26972,-19994,32641,20816,11426,23460,21598,29109,-19347,-1332,-26440,-18865,5885,26605,8421,13964,-15049,-9971,2560,30751,-268,17580,4733,27528,-1447,30770,-15374,18186,-17624,-10100,25086,-8016,19973,-17873,31932,-20052,-20783,31128,23,-10263,-2455,-32447,-21966,8033,11098,14035,14127,20122,-20681,-16711,-22661,-8024,-1786,-28273,-21461,-32680,9951,-15621,-23155,-11716,12490,19201,24335,-15246,24992,-1613,-5519,-12032,22289,4497,254,-13435,-13897,27312,-17352,-5790,32054,10828,1701,17010,-4471,-8552,4001,22474,29076,13595,23099,7306,-22141,32515,-20216,-21566,-14104,-22784,-18430,-28421,22687,-3126,24575,-27139,20329,-2238,7942,5335,-798,-28969,-22905,30211,28129,14046,15317,-26651,-25858,-31261,-15921,12573,-29353,19555,18747,-30556,-16912,884,-11108,29969,25613,23357,27704,-16802,-13299,27156,15472,2050,29346,-2804,-24886,-2647,31999,-7293,4957,-22469,28324,25570,-6158,-31989,-25953,-29410,13704,2712,-15612,-21211,-30343,2979,6128,20620,-3811,-23801,-8536,3032,-12358,23685,-29142,5654,-26944,18859,10323,-23442,29969,2858,15876,-14708,1413,14887,-26814,-30680,31645,-21865,-1164,-30316,-5614,30956,9339,3595,-30155,24175,9597,6559,-28327,-27073,32714,-24097,-20795,29896,29266,1673,-19436,29906,30241,30143,10288,14496,-21592,-25545,-24782,22077,31595,4485,27032,9414,12871,18898,-14720,-12431,-31889,-16799,2573,-11827,19560,14199,-19902,2799,-28759,25581,-8661,21504,-9475,-24338,-25147,-4272,28887,28,21861,-27059,5298,277,25332,-2031,32189,26397,-7881,-3715,-6204,-12032,26033,-27064,27712,1384,4670,24341,19909,-30373,-3330,-648,25116,9932,31512,-6525,-11502,0,5438,-9492,26484,-2379,-29550,25324,-596,24166,-32401,6035,-13167,17606,-447,29875,2339,-18096,-7903,9611,-4315,985,-14648,-6878,14489,-30069,-20282,-12181,-27368,-18914,-20932,31496,-23164,7142,3658,9662,28100,-17238,-28176,-25942,10983,-32535,-2718,1941,16831,-10634,-12191,22957,3151,8060,16164,23185,24305,-22297,6023,31863,-10521,-29034,-4743,9135,-2463,14948,25980,14077,6600,3221,-6527,5760,29447,-18438,25427,-11745,-12983,22679,-30607,30775,-13619,8103,1372,-7384,-1052,5344,-15614,4998,-30713,-5170,-8275,-14989,-8288,-14291,15216,25774,9929,28501,10680,-4799,17546,-23206,21071,-2251,6247,1574,1897,6005,-3691,10297,-28085,18398,-7956,25114,26928,601,-234,24232,-2021,5570,2928,682,17479,-28715,-13823,10574,4586,-9936,-7771,-6800,4947,-26356,-1585,-30793,23037,-20383,23762,-15436,11863,-15850,-20496,20045,18325,-14427,28374,-19647,9070,28168,-22802,10522,880,-3708,4603,-24499,11117,23376,24222,11478,2880,-13784,24296,-5217,23268,-7019,-15966,-4970,-6439,-18404,20822,1512,15396,-16302,15799,2069,27960,-16394,-24812,-30730,-17591,-1809,-16628,-17691,-15317,28582,-13293,13621,-1350,23026,-23982,20033,21588,23381,26818,19599,-20603,7421,31434,-1393,-20844,7021,24668,25048,25667,-17706,14027,-5367,30789,29357,-23312,22187,-22072,26723,-4064,8335,-26109,-24874,12126,-10324,3619,-13279,26707,-2760,-4870,-24844,6322,10506,-16823,-24838,31885,-17646,-13457,18276,-8167,-22994,17464,7350,-6435,25901,-26116,9250,-31955,-3777,-13349,-416,-22771,20588,20361,-2556,27582,-10813,15481,17497,-25295,8771,-1722,-20715,-32693,27853,6904,-18921,22366,6176,29491,30731,-26900,20256,8428,-29055,-24272,-20009,7770,142,-13951,-21770,1332,9553,19715,-26724,-5424,-5508,673,28825,1427,25566,27736,11409,-12458,6836,-11809,12193,28311,-4584,-9070,-23250,29058,-28063,12316,-13285,-21159,15495,4745,-5658,-31484,11416,-26376,-30251,-22836,-11493,3616,12601,19502,30474,-16178,-29349,4214,6151,29280,21400,-26644,-24518,-20821,-13239,-30581,-9457,-32332,-16530,4139,-22739,-15455,20619,10595,-31959,-6053,-10112,-4694,6572,18934,6062,8366,27040,15833,264,2939,-26292,3523,19202,24784,-5085,-15639,18059,28847,-25494,31327,6190,-3855,-25376,5654,8051,24469,25460,26062,-4195,-9756,28426,-5785,9063,-3798,-12195,9049,-24028,-15929,10415,18232,4304,-16185,31899,20992,13184,-19217,-30011,18235,15091,-23520,4624,-27784,-18349,24513,-10187,11208,-3895,27386,-5716,-28468,-30021,679,-21453,-14745,26963,14414,25951,4634,7570,-15034,-4547,6107,-19099,-7966,-6317,-31246,-21477,15068,15568,-27729,1976,-9,-28054,-3741,211,-21446,-31982,-32133,13237,-24632,-13088,21936,-19321,-19299,29293,-13038,-17760,-31369,-16172,16907,16895,5669,-30979,19864,-10550,18517,-31461,-31737,-27183,-25936,-21442,23436,2323,-23728,-31581,25338,-25097,13960,-20629,-21573,-277,2832,24549,-21474,-29719,3226,-5032,-27531,-2970,8295,102,4142,12268,27121,19932,-23078,-7965,-31641,-26222,19,-30595,6863,14371,3455,-6371,1992,-10286,-24711,-18869,-13675,6578,-29663,27894,-1548,-11580,-24198,-9515,-24676,11775,6010,-15738,-18093,-5111,-19094,10397,25861,-8088,5187,-8940,-8761,-28794,-30980,-8151,-28944,22505,-29287,26033,-2436,19043,6607,-23111,-31318,-8573,-28370,-19985,21790,-16547,-19104,-28428,23938,-25661,-11675,-32223,-6231,-20547,-20216,13034,4311,24731,3114,-31664,28022,20342,30005,-11113,-31845,-899,-22739,-18361,13183,-29905,-9039,32467,-30610,30645,5177,8870,-18903,30045,14312,-5215,-14717,-28607,-18225,-11539,-16722,-19294,-21277,-12570,-8883,16533,7041,10401,30061,-3716,23568,30796,20212,30973,3896,-28344,29504,26529,-5983,16300,23900,26353,-32041,14524,15688,-5088,-1621,-7553,-3073,-21771,28355,31399,-8232,8392,-12978,28616,-17864,-9268,18600,22062,4396,12630,-23104,-12595,-22256,-14970,8311,28362,-19720,-11829,-30086,11979,-25281,3064,-24167,32368,-16859,-22425,-12877,28706,-8141,21030,-11474,11515,8221,969,20340,15913,-17110,17511,-26926,26330,-8479,-26826,-10988,-15938,-267,11532,19939,-27881,-3474,13638,15077,20387,2364,-25804,-1157,31728,-6428,26583,7848,5793,-31773,-7515,27612,2818,24204,-32339,17444,-10054,-128,23408,-2401,2708,-28259,-16829,19328,16157,32253,3377,-15800,-27852,-15867,-10882,10267,-6078,-11487,-22086,-23336,18919,-5464,5665,-6489,-20513,-2514,-20703,19140,26018,-6295,30749,-11,-9795,17955,-18606,-31372,-2480,17487,-8227,19743,-4340,-25397,5222,-30780,-29064,-1262,-14594,30562,135,20618,-16133,-12013,8531,18837,-6739,-8169,-14666,6683,7618,14097,12399,-11446,4781,-17569,-23222,-743,-18137,28368,24611,-22777,18680,-13317,3027,-16649,25707,-25753,-8380,27481,13882,-6944,-11296,-13670,7717,-27828,-29429,-11747,30073,-10866,-11122,32198,16327,20808,-7882,-430,-30426,-6770,-17678,26569,-32343,14386,-32198,-24806,17725,28741,-944,-6529,-23184,-17482,-19116,6380,-22716,-21955,-28488,10382,-25492,-30258,29113,-13329,30743,3988,4874,-21181,1671,21349,-23237,-13497,-14747,-18728,-23164,-26181,6612,-8834,-13720,22974,-18533,8959,-17192,3095,-13520,-21560,3033,4485,-17380,-23086,4027,-13759,-15994,-29862,-18967,14458,-3729,25923,-2735,25288,17661,-25505,27133,22293,5264,10606,-10343,19251,8010,31123,16733,22993,29519,14349,-12285,13622,25623,-18063,10390,-23492,17977,5923,29224,-31653,19058,8680,-24635,32538,-4212,-28075,15237,-1047,3916,-16028,23476,-17505,4048,-29676,9450,-4968,2221,14047,14725,8786,-18784,-30966,28632,30036,28566,28316,-15014,-28443,-12655,-11696,6064,16423,-15439,-9902,-24723,1666,26985,14157,-9355,24526,14168,-17257,-8425,11710,-8133,17084,23880,-14293,10430,23499,8449,24231,22576,-31479,-20411,19385,4886,21922,6536,7034,-11608,-9008,6975,-16072,14909,28156,26291,25755,-11234,19674,5189,-11991,701,29210,28598,24262,-15501,-25320,-29807,-27631,-9537,-3151,14729,-31451,15515,13618,-27499,26915,-27311,25156,-19939,-12862,-16346,15098,-7551,-26876,22450,25974,-3501,26593,15318,30886,-103,-1081,-13172,1631,-32105,3443,32567,23425,-30952,-11093,31068,-21388,-15515,-8116,10526,26421,-22717,-8341,-26761,4957,23613,-15243,-24349,-26592,669,23122,13027,13116,27982,24151,-19219,-27434,28373,-20456,30513,-2650,-24063,30153,-17185,17142,10657,-18468,22120,-30406,-10510,26885,-25198,-18941,-1372,-24164,-11163,20365,-10105,-21078,12990,6175,-11850,14712,-5494,14179,12484,-7479,6262,-865,19155,-5157,1690,15647,-18379,-10838,-26568,5737,1829,-5156,20043,27402,-16385,-4911,12969,24343,8418,13127,-21788,16064,-10183,-21882,-30759,22368,29640,25001,-7009,-20962,-9098,25551,25769,-15785,10488,-30670,-21350,27245,3725,31316,-25071,29009,-8849,-18398,-32625,-16716,-3234,19168,2505,18193,11098,-16852,-30184,26308,15100,29046,15359,18013,16324,30318,-24317,-4991,-27729,-9404,5431,-18543,-686,10464,-26729,12771,31434,-9858,2090,10237,-804,2916,10396,-4540,-7724,25210,-29497,25786,24120,25948,-28675,2266,7476,11209,21426,-19999,20146,3974,5715,-9708,-24210,-32504,29758,17743,-27857,-21559,-21656,-9110,8417,16033,15038,-2580,18826,-7440,-6882,24031,-31129,10936,12385,1462,495,17235,-18723,-13768,-24648,4975,-32202,-20718,-4581,-16039,30108,-30830,30055,13837,-9923,17825,-14277,29911,32143,-19324,-24305,-9698,-1647,19752,-20122,-13915,24558,15008,14942,4937,-6762,20062,2098,-22932,-10514,18692,-7722,-1686,31989,-32666,2012,-4046,-19227,-28629,-29838,-7159,10700,-20265,-30148,-28583,-15653,28014,-625,-30607,30330,-3148,-6537,-22924,27014,-32049,-17304,20910,26329,27736,-9293,28613,-14676,18345,10765,22197,-27028,-1322,10658,26873,-16314,-31672,23714,-9455,14022,-30909,29359,2032,15239,6653,-19759,32224,9010,-15657,1335,10243,21614,-17165,-28278,-4276,6878,15549,25470,29924,-18125,25584,-5690,21666,-9424,8468,-21047,32266,-32362,-14596,25550,-1057,15418,13581,15810,-8816,-26471,-5864,-31222,3871,21317,22521,-3169,-30917,-9263,-12013,-23360,14078,-27774,25965,-46,-1617,-17367,20336,-4230,-17020,13770,20334,2031,-4577,-6069,18292,-14485,16236,27278,-23372,200,10056,-21219,2037,13173,21318,23021,12439,-20079,-26854,-17772,18561,11681,-23031,-26263,-18649,15345,11347,27045,17734,-10986,-1257,27435,-6511,-12747,16652,19468,12990,1780,28298,-27826,-3253,-7022,26959,-13542,9733,-18913,15561,16390,-24033,13189,9243,7817,-1280,23230,-3120,965,14607,16776,21232,-6784,-17962,-27002,-26150,21431,12702,-108,23982,-21453,-241,13501,-20639,-9857,17221,8252,4351,30320,5344,18718,-19777,25421,26601,20099,-3749,-21043,-12649,30154,12662,-6547,31222,16280,-20121,-6218,-21060,-10390,13914,27928,21150,2694,26661,15011,-24478,-11528,3821,-29207,8601,22498,-30329,-26529,-6788,-7883,17087,-21125,-31143,15092,15366,17150,-26211,2471,-25774,-14501,-22935,4688,16294,7623,14967,4142,-32397,25479,-28125,19362,-8074,22984,-16124,-32015,-5618,-31913,13370,-9395,29307,26421,27945,11811,-14509,28840,14097,12883,21603,22048,-8441,-23282,27280,-9907,31733,-7569,15219,-20012,12423,-25153,-30288,-3302,4674,-26480,-20423,17180,10134,24613,6030,-18875,-5931,-12167,-12248,-20811,-20184,-19391,21762,-22162,-26433,15808,5177,-32665,-26200,28926,10549,-11484,19672,-23871,-13707,-8737,-6376,-32558,-6965,27645,3111,-24795,-7919,-32170,1720,-11009,-7990,-9196,11076,-9299,-5104,4779,-31648,10789,-11279,835,9513,-27425,11000,-20192,28613,-14217,-17322,-19497,-2980,-6214,-27226,-18923,4471,11518,-30728,-15840,25983,32210,28842,3096,-5997,-1831,8833,4447,-6731,22346,-5560,2513,-2862,-17414,-26679,13861,8432,16822,4763,30469,-30642,29763,30699,22339,12329,-11898,-17250,-24380,28333,11161,17016,-18231,-15711,-1025,26331,8676,-6679,-8425,-13382,1679,-12684,-20407,-7888,-25312,3321,8266,-2913,21086,-6781,-25666,-5823,9606,-25060,-29987,28042,21303,25136,-31329,32509,-1228,13170,17389,1687,-30720,4534,4282,-6501,-20865,-14500,-18050,21819,-28099,13859,-7778,-28565,17453,-18578,-15800,15103,25339,24433,-23494,4870,21764,-8366,14290,-16593,919,-8696,26080,21821,-11085,11822,-9455,28501,-26565,23508,14112,19503,-18175,-29082,28008,14563,14315,62,32273,31231,-4045,5039,-6134,-30756,5730,8860,-17460,-2523,11582,31606,32660,5446,18710,-21201,-5328,490,-15429,-24164,-17242,20291,3792,-3353,-30472,-22892,7290,-14089,-10407,18685,-24586,-11660,11993,-16004,-30419,13871,-22978,32009,10761,-24281,-15358,-23096,-2901,-26363,6508,19674,12292,-20019,-27118,-30328,-24574,13554,11269,-21323,6016,25423,1412,-17245,-2380,4760,-6105,-12345,20770,-26117,-4275,-19434,-30455,5657,6990,-7214,11926,11665,-11289,27882,2666,17809,6825,20455,10804,13652,-23111,-18214,-20908,-20325,-30182,20938,-27752,-24351,28756,-14988,-23102,-13505,-22584,-23057,20484,23166,17636,-6846,-17609,-25917,-17307,-32035,-6944,2039,-3842,12464,-10274,-31785,-11013,21302,25886,23521,9387,6501,20853,-3712,18906,-19440,12032,23145,29814,8196,-12835,10311,-25692,-6090,-627,17454,-4917,6106,-30331,7695,-14019,-11643,-29893,-19548,-2303,17616,-15681,-23143,-16677,18785,-4289,8985,9710,30609,-9633,-14837,-14294,789,22308,26111,-25495,5376,20439,8994,-22114,-25003,22566,-15357,-8909,-28122,32546,-26798,-5957,3386,-32459,23887,-14427,-17617,-14183,-7618,-27476,2272,-19536,-23615,-6677,-18283,12856,-30827,-3623,14686,17327,16585,-9813,15595,18865,6514,23519,-3785,22189,30533,31361,-5773,12443,-25016,-20806,-21332,13396,28358,27224,-3487,-4484,-32504,21571,-18048,1095,-10611,-16444,-28933,-954,-20875,29420,-28428,-29057,6733,505,-23820,-28961,32754,7074,7737,-26124,20718,-22222,-24381,24532,-27658,-32023,-23159,-28452,4886,-25934,-32250,29424,-11171,-20606,29005,9623,-22678,28187,12143,-31646,12883,-23133,-32068,21397,23110,1341,-31694,11812,-20346,-3743,32558,1627,-76,-27818,3316,26695,7763,18348,1734,-7753,7454,6923,-22498,-23726,-22917,1753,-18278,13371,-14893,-20772,-2878,13578,-12466,-20661,-12219,23470,2943,-14575,12169,1602,29878,11357,-26039,-27311,9284,8553,18072,26419,-23798,7686,26837,9725,-11558,32029,31242,-9122,8349,26254,1534,18339,-26730,27656,24830,-1161,-14034,20116,31461,-23472,-4120,953,30549,-672,6033,-13967,-22839,22104,14676,-27375,-18176,25938,11033,14690,12016,-21083,-6012,-1882,-2310,12240,-26,-1388,-21228,-16537,-19463,-22614,11691,4326,-4999,-19551,11856,24961,-2468,-24572,-29654,-13077,12562,21590,-23626,20446,-30672,-25239,21328,5637,4355,-30319,-22193,-8011,20702,13356,-6087,27616,-26074,-16592,20369,-5883,22057,-21687,-28554,-28548,10056,5834,-6589,-16601,19026,11654,-14887,-24981,-31265,26743,-9464,20993,12610,-739,-4096,25197,5901,-3040,20029,-5632,-21208,24326,18274,-20373,-31266,-26658,16781,-21885,5708,-702,-30101,20198,9282,30448,-22230,-15783,-23191,24925,-13284,-22234,6818,-12285,-23667,31552,-5745,-25185,2807,27891,2415,20068,-2070,-5239,-5360,-10049,-5557,23126,-4008,-3541,-13720,-21120,-5033,-10247,-15431,27182,2304,9923,1771,-23189,25603,8286,897,-32417,-10291,-24029,-2480,-13284,-15768,-28363,18734,-20663,-16976,28486,-22996,21187,14917,-16917,15066,-13362,30656,-14323,16744,-23451,-18658,18110,-27406,-6083,-23406,-5994,29913,-19745,-21977,18353,12850,-3607,14993,29048,32286,-6708,-5502,-22092,31256,-12154,-29994,-15346,-31999,5912,27895,5362,9487,5272,-1855,26380,16013,31387,-18921,-12908,16200,5734,8896,-30851,-9528,-15849,23751,15387,-10847,-5251,10103,1323,15348,-26277,29218,8883,-17270,-19160,-26757,-25,15201,-11972,-2487,-13302,3845,12003,-31161,17910,-29484,27169,32462,-20666,-25244,-13548,26837,-10478,-23072,11522,-7099,4622,-9700,31656,-1345,20287,-5877,-28358,19175,21443,-26941,-28316,-6228,-20930,-15718,4050,16442,26946,-21619,-29907,-30839,-24119,-31086,17732,-16372,-19540,10467,-16060,11620,13699,1708,4027,21191,-22305,649,15560,-24747,-21371,-4341,21924,30070,-31876,-8780,2027,-1247,3867,-6600,-27804,-19350,-921,-15612,8317,-14843,-24953,-28168,-22964,23673,19447,19998,-15501,6169,10219,6237,23018,2307,-25422,26467,1373,-32097,26940,-7839,-26385,-3860,24014,30548,-21631,-15461,4676,31747,25100,30911,8966,-15808,-20834,-10941,26153,-19559,-18365,24993,4590,16544,2620,-14112,-28965,30905,-23799,18956,-18067,-6209,5579,20387,1268,-4586,-21994,25787,-2006,3720,25252,-29670,26610,-14084,-2714,-25241,30353,2092,-12048,-5433,-16594,28939,18324,-7209,-6918,-18632,15278,-15124,-10885,-30194,10361,-25862,-12838,-7801,-19497,-19721,-14457,-18647,-4742,5552,-161,-5523,19023,-11213,-29352,-29121,32603,17892,6871,28651,-14496,-5090,2848,4668,-18224,-2612,-6417,-28753,19803,-26051,12735,4810,-6355,16113,13990,25173,-6184,4733,17794,-29881,31014,-10828,-29587,-27488,32448,25791,-29675,22277,12797,-27981,-11268,-26148,2329,23007,1693,29869,24229,-23036,16376,-14426,-22293,31030,-10354,16343,-7231,4872,-25059,-19372,16097,-16587,22856,14182,28800,26686,-8845,2220,29533,10849,-2135,-5080,-7125,16318,23731,-20975,25688,28986,26164,-10449,29403,27853,-27686,1562,-3968,-2997,6790,-13389,21290,-27181,31207,16870,11619,-27808,3365,9702,-10777,18582,-7897,27754,29517,-28245,-22423,-25028,20261,4250,-9155,-29834,-15241,-26863,-17089,-29450,23406,-22353,-5499,9265,-5168,10470,-9085,-8818,-9463,-19557,21447,30732,29328,32605,-12898,17291,-31293,-24049,21328,23942,-3823,-29538,-25051,23629,-23070,9540,13548,15914,-16812,-26009,-11682,-18831,-5477,-6768,23499,-19243,15135,28944,25073,15622,-19774,4601,1846,31932,-29328,-9141,-15282,-14302,-27384,6583,2754,-3747,-17107,25158,-14971,-1960,16784,27335,15017,18002,-5536,-23496,-14406,-6305,10727,19271,32427,5715,14300,24261,-22756,24705,30036,6410,-1490,29932,27923,24910,-22161,9402,18131,10112,-9908,17885,-22111,32599,15111,-8774,6329,-2303,-1991,-16672,-20662,-7227,22501,66,25014,8389,-2117,22102,-9104,11482,15221,-3761,2119,4083,30243,-1946,27617,11317,-2345,-21466,24809,-3566,-20854,14884,27852,-15498,-10937,3159,-7435,-6488,-27422,-29679,-14849,-17389,-8693,3541,29935,-4935,-23855,-18089,13837,-16519,-81,17685,17485,-27960,20079,19920,11078,20446,-31348,-27859,27724,-8720,-18683,-29242,18470,12332,-29836,11990,32487,-12833,21889,15265,-11284,25322,31788,7235,10372,-15048,-24569,14962,26902,-26777,-25318,-13810,6647,-20210,27606,13895,25234,13565,-13199,-17598,-30658,-1612,30895,-16887,8204,27924,-28078,-25824,13613,-32302,910,-9900,31626,-20122,24298,5149,8623,13023,19532,-20973,20888,3213,28505,17551,29789,17107,-10610,-9095,-23634,-17924,3339,17435,-6354,-31333,-20046,24928,-8683,6080,26891,10316,19851,19052,-11252,-17414,-25556,-20738,2499,-1953,9666,-5591,26391,-5386,31161,-19569,-21357,-18933,16829,-31048,18632,2054,-15251,-8046,-14763,14690,6680,24978,-31029,8316,-30006,4647,-1455,-2439,-16987,30778,-15405,29683,-3173,-21256,10055,-18332,11522,-26170,19915,4429,-27065,-19142,-11301,-27106,-31299,-19857,7551,-22754,-15410,-24821,27686,21376,-19963,5181,-13282,-6036,-1252,-24305,-4775,-21048,-1445,9365,567,-30848,25693,14641,-21023,14441,-17524,2575,-20879,-13008,-1917,-3920,-9036,-8344,22293,-20138,-29575,8226,32597,-23000,4138,-20896,29166,-5264,15240,-15278,-17316,-9584,17991,19070,8542,-25401,-17921,15688,26934,8367,8589,18888,7452,-14460,13808,13008,-30907,5699,-29621,-22205,-15550,-27841,-32604,-13805,-14104,8408,-14063,-7434,-5760,-12097,21565,27676,1750,-24865,7959,2326,12890,5788,-7146,10743,4347,16723,-25136,1903,3381,-22844,-28634,-32590,917,27370,-26979,6179,13579,-6423,-18498,27089,31934,8729,-9993,-18726,-12818,-28192,29628,23042,-12417,-9055,-13929,-4251,1947,24921,-28205,29178,14289,6758,-22764,-379,2365,15294,-5265,474,-15069,-24963,-9401,78,-24155,-1213,-31520,31490,29702,3619,691,-1087,-31425,3543,-19614,-7418,-14143,-4454,13310,-28513,752,-27779,-9732,4288,-20098,-23800,4235,23663,-3844,19309,4008,-28373,-27668,-31524,32381,-7283,-12992,12143,32671,16960,24884,22591,29553,3058,-31102,-3206,-6232,-30585,-23172,10862,25666,-31921,-1599,32005,15214,-7034,21807,-30187,-3633,828,6043,9137,30375,-14585,13567,-20867,-17343,-1367,-3321,-17335,10961,-25356,-16778,-10788,11508,-3454,23965,-3427,10190,1713,-22252,30554,94,-9771,13660,6426,8144,9850,-6391,18549,-26388,-10783,31298,-12283,-15934,10118,4094,-22497,925,-31653,-9570,13085,2352,14087,29029,-31042,4289,14598,-30432,-5426,-8970,8250,-13380,-7114,24678,-14196,24068,18599,-11250,-27589,-18040,3803,19361,-26423,7841,-29038,-25904,19078,29488,23333,20790,28801,-28209,6809,15507,-14883,-20134,19730,2244,16597,-5972,-17801,9507,19126,-28355,28807,14561,29579,-7728,8983,-670,2403,-16390,-13073,2861,22163,-20065,-14234,1363,32068,20040,31269,29658,-21744,-6951,22732,-12517,-8373,-22808,-5794,-18084,16527,-20900,-15401,-17088,2751,-18922,20897,-20616,-11412,-32724,-5350,-30767,25173,-14899,-18415,26600,-19695,26720,-30209,-15156,-18444,26201,-6649,5268,-26300,-3405,-18638,-13109,24571,-30827,-30802,31667,29244,-10491,23077,18264,-3955,27993,8000,-1696,449,-20026,-20573,24024,-3007,9345,-29948,-28681,-2193,19584,14484,-5568,5063,10955,-23515,7235,19638,-13193,22752,-25048,18308,-15187,9953,19062,7779,12342,22706,-354,4612,21871,26871,-12488,-17851,22373,-24358,9525,-12322,-20983,-27803,-12233,9016,-5983,25717,-1321,-31063,-12272,17358,20497,21305,2202,5197,20644,-19801,1321,-23217,-13754,6214,-11897,450,-27409,-29720,24927,-20631,-28,-25477,24604,2648,2258,-3196,16304,26086,896,-8020,25181,26697,-32708,14127,-21668,-25690,24671,7331,26851,-31825,6432,-3922,-20188,-13909,13437,19748,22797,23088,8513,13908,-7787,-6317,-259,24082,-18459,25505,-9014,-18481,-14089,8036,12654,-31829,31548,-9268,-30791,-28999,24342,5947,-6838,9163,-10028,-22153,6295,6743,-7279,26723,-22907,-17438,14336,-20172,23770,12284,14363,23118,18395,-5247,-14157,-9022,-32265,-29154,24232,20949,28152,-2171,25653,-25520,-4009,8333,25493,9485,-13860,-27255,-1272,26389,-13794,-21736,2762,-27633,26943,29054,-13729,24103,-28027,-5460,-14924,894,24236,-2067,10673,7306,-7716,-8123,-25527,-30334,30067,3837,-11712,-29478,-4986,-31440,-883,30867,-26684,25749,-13980,-10151,2367,6816,-6847,-25912,16648,-17879,-6869,-17523,30210,-12197,-23947,-17189,27506,25947,-4633,1122,28498,-1636,32592,15016,10638,23259,20525,8344,21985,-6162,22121,-1593,23528,5021,-9386,-4754,11996,14738,15215,28857,3253,18871,-4541,26718,32188,-11586,5699,18829,-14004,-22417,30876,24035,-4569,25104,21696,22418,31019,28833,-4652,-29874,-18659,-26322,5704,30530,29205,-135,-9721,6335,13045,27421,31639,7746,5984,22739,6659,656,16204,24435,-13955,31910,-4757,11283,1854,27887,25019,26735,-5414,-29068,24242,-5035,-15336,7620,-30788,16876,17472,17460,-15826,-16380,32537,-6823,-5183,-650,-8716,15465,-29516,7391,-30752,-7,20931,-3179,19504,-14390,9256,2924,21463,30755,4405,25325,-29322,-32040,-19770,31764,9135,-13432,-6710,-5511,24577,-17419,-16424,22890,30131,10509,-21026,18338,-9655,10982,15167,-2431,30328,-8530,22778,-126,27281,25485,-24477,-20543,29342,-24503,22600,15728,-13659,-32591,20946,31335,12351,-25405,29274,6959,30765,23973,11632,-186,-7821,-14792,-5942,20308,-24584,18964,22448,-14248,22905,27207,29719,-31727,23280,2936,-18516,-18287,-16640,-20316,-11624,24787,28826,26630,-5644,28505,6462,30545,7298,25723,-5781,8795,9906,-2121,8514,1320,-5532,6966,-31398,-2629,-2791,-28316,-13305,6281,13939,-14758,-11482,14880,-18874,-21593,12628,-11559,9973,-31934,-27038,21030,-8781,-5188,826,25113,31090,-27251,6960,-14287,29827,10032,22934,1602,10056,-14836,11650,-10065,-13294,-5440,10369,7878,20652,29556,-21268,-1156,-24558,20821,15333,-2455,-12490,-19815,19926,-22229,30456,2274,-30588,-21236,6144,-9413,9635,-19857,-3899,-24706,28516,-849,14556,10765,206,13447,-24250,22551,13212,20130,1278,6980,7679,-11483,-6908,-10017,25203,7020,14092,-19256,-5782,1307,-28131,1356,-9248,14695,7701,2970,6791,21844,-3129,-28449,-5499,27937,16828,-23255,25405,25248,-6323,10659,-2154,-19894,4185,-6913,464,31574,-4790,13831,-11896,24433,16624,-9678,-14700,23373,2494,11717,-9286,-6840,-12323,8254,32529,-29879,24405,20372,-21773,20505,-5273,5232,21636,-29044,-1808,-15468,13497,5569,-9177,-2795,7701,-19786,17735,444,346,-19667,4523,27679,8977,-29321,10834,10630,-28028,10640,17700,-12385,-27400,-18536,9809,25335,25516,8472,18245,14103,-2722,6460,-31515,-8001,-1465,29369,15161,-25512,19142,-22461,7870,-1352,-17916,-17384,-12969,2967,9745,4312,13051,19128,8537,-28791,16473,32373,1181,-1153,-1755,21386,-24871,4474,-31000,12620,18866,-12916,-20001,5839,21925,-1772,32261,-2323,18562,-23465,18045,-22970,-3562,27133,-26129,-15038,-14846,11656,-15443,31473,-9754,9083,-21299,17960,226,5439,-15765,23819,18741,-18896,-273,5822,28083,-22223,27026,-5562,26252,-7481,-21022,-476,-32101,28815,-19744,-4269,-10647,-19114,23819,-28021,-14269,15535,23666,-4751,-12846,24773,-19426,13534,22758,-24743,-6460,-3759,4842,11419,-13075,-9883,2835,24148,24707,10085,-477,-28019,-24205,-27029,4256,17732,-8831,3106,-16936,-2091,25745,5935,29663,31210,-31594,-27893,-32228,-13644,-10566,10996,21989,-9034,-20587,5796,17538,-16917,7829,-11619,18158,-17320,-14087,-21388,-26703,-3998,6416,11051,-3660,-9762,-12183,-17774,-20642,-25377,-22079,-31971,-2603,29354,14540,-13218,14803,26175,-16962,31481,9712,11948,23034,-15729,-11767,-5042,-20608,-14683,30470,441,-9397,-13470,2297,-4575,4142,4060,28095,6025,25167,-7738,-10087,13946,-30565,-9245,-28680,-21856,-23818,-1117,12665,32574,8184,-16601,29267,141,-6515,10172,-10326,6815,-32183,-7244,12179,-7594,28393,-12289,-28008,11841,-22485,-6440,-6055,14008,-14755,22490,-14081,11330,30242,9939,-11053,1871,-12326,-25574,-5838,30088,8212,-13573,-3480,17349,-20353,-8001,-24284,7092,30109,-8839,-14262,-21858,14765,1199,16865,8575,20906,14700,28551,20449,15953,-8267,24569,32612,7551,4069,18995,-30084,2691,-29800,12514,6135,32176,31811,-2723,-26737,-23582,28781,-20392,12086,12781,31428,15950,8982,-15976,-31754,8261,9381,15951,-2892,-18228,8204,14279,-12615,-9569,10723,7921,-25646,3093,-11249,-30764,-7582,-6430,-16628,-28507,30310,26044,1391,-434,8412,-13615,30393,19748,2366,-6520,1593,-14298,2617,-21673,388,-20528,-22525,9729,13079,-68,28116,-7541,-18389,3256,-21032,-11685,-979,-30488,15896,28767,-15524,24953,22588,-13812,11922,11614,26239,-17860,-29803,26284,13537,-24470,-30322,-4377,22295,-18847,9817,18054,-6540,3702,8261,4299,4558,-21827,-30224,13613,-2696,11030,-30993,19426,24000,4257,-16201,-14027,528,25912,-15900,623,-16221,-18516,-23491,8565,-10076,7011,12763,12033,-12861,-7078,-31589,-31649,-18106,-25954,27341,18678,-30881,15371,-2839,3006,24170,-8196,14210,-24607,7654,16710,31735,-17875,-236,-18347,21718,15758,-27501,-23967,13854,-20860,2368,-22650,-18860,2839,-14465,22449,-11242,23598,-1419,-13455,-2553,14510,24149,2833,-9870,-15755,28896,-14515,-16440,5368,-83,12559,-11595,-3639,26724,25968,28194,-9572,5969,8040,-29140,13740,6912,-17925,1389,-23335,-29481,31466,-14759,17317,30714,-30094,12335,-6040,-12965,12988,4615,26891,11475,-26116,30346,-24775,-3201,1179,20052,32438,-16796,-29376,11003,27595,18102,-13226,-4586,-24491,29329,18161,-6355,20156,7203,-23062,1272,20761,27619,27707,15941,-11989,-31246,18670,31222,29285,31156,27918,-26038,-808,10851,-2764,-14322,-13495,9665,-21540,23052,-6819,-2053,31734,7288,-7539,-1201,-29124,-7057,2713,4210,-20903,11760,-866,32403,-788,-27205,-12760,-21061,5508,9907,-18642,27987,9680,14780,5267,15209,6869,-25666,23832,28827,-19797,7233,-15747,-27686,20017,-13124,30847,-29840,-8512,-32506,13558,6796,23240,-22720,-6833,-25046,-6002,-12155,-21974,31488,1658,15096,-2005,23860,5314,-22280,18309,-11863,-13271,9156,15362,-12742,20273,-20891,-23965,21777,26853,2826,12907,32061,2100,-18002,8714,-28767,9559,-19042,23381,26496,19307,1473,-18075,-10320,9940,24142,28745,-24943,-26334,-17236,8968,1402,-13380,853,-728,23798,14124,31543,-6416,24840,6394,-1782,-8202,30429,-13116,-20664,-9645,30425,-14742,-8283,16600,20229,1290,5358,-24476,6915,-17938,-10392,-7013,26222,-9410,19906,18343,8383,-18608,-30180,24777,23593,8924,-2260,24497,-26846,16389,-19210,-31202,-10332,13886,-32610,28748,1114,18044,11651,32629,-19434,21662,-32614,-5807,5593,-14835,-2735,-10040,-16341,-4342,23796,-4249,22152,-22380,-1474,8231,-4306,-1058,1547,13044,-21507,23387,18245,28559,26049,11822,1488,32221,-12768,22018,20129,8706,-12531,22537,-13731,27283,-14011,17327,-15169,26188,5741,-12347,22670,-17716,9600,12945,16923,4723,-17075,5956,20346,-3002,18318,-32521,-17948,-8281,-17787,19430,-5337,3360,-2059,22251,23898,32144,3851,23625,-14469,32713,3592,-16702,-16827,28097,-15028,29127,18065,30620,-14101,-32588,3726,-6788,6385,-30299,-19796,26826,32425,30490,16550,-5978,-25955,-20556,-1202,-30148,-11110,-24358,31724,32164,-18260,8557,-24064,30968,16680,-6429,-10348,-5180,24750,26482,-9968,-27047,-1360,-27972,1671,20040,3546,17251,-25275,10853,-5208,10583,-9703,29317,17287,-27479,-7239,-30799,23820,21498,20859,16642,5300,-3688,7092,17714,-32624,-4930,2620,20326,17932,262,17129,5968,30780,-15984,-25981,3329,30373,17525,-27951,-8560,8816,-17023,5828,5089,9171,-2078,22020,10606,1814,27676,-22776,22081,-32471,-19902,29523,-11713,-20427,-15332,8797,-22496,-24407,-11,-5051,-19801,-13536,8100,9879,-31169,2308,5080,4579,2258,9293,5941,-23008,-23336,-14600,7450,-30948,27584,24232,-25412,-18674,24805,-17561,25116,-26044,14318,20816,20315,-31195,-8783,-29140,9611,-32636,15491,27220,30555,26612,1494,31385,-26877,16442,13100,17259,20572,10453,25715,-27043,-13241,-23702,13377,-27102,-28061,-20073,129,-7554,-30559,-16175,16340,-13947,17204,-3330,5280,2554,10231,1828,7107,-646,-17737,550,12566,9481,-25585,-3314,-28729,-5874,14435,19035,5209,-24272,5891,-16695,-10498,-31272,-8240,-11894,-13613,-8166,13588,-26036,-10899,9029,-28729,-3861,-21456,13428,5331,22060,-22587,-14808,1744,-30167,7956,-13957,-15617,-4736,-10498,-13530,-21804,-3356,13395,12148,-7651,31976,14091,5362,-18539,-22370,20753,29709,22853,-26968,-18202,-25034,-24747,20639,-16468,29898,30151,6193,5920,30168,11626,-28739,-19499,24855,20150,-25914,-5761,-17352,32686,4619,-10677,14071,864,-15682,-28194,7509,-6488,-21762,32343,19841,-12148,10632,-4581,533,-11695,19435,-8158,203,-27511,-25828,-23824,-2232,-3887,17921,-5187,-28132,-24113,30351,21470,13028,21399,15844,1204,-20972,-21624,30593,-2484,10784,30930,24000,-9447,11976,30621,10715,26071,-3309,-17698,-24056,20447,3435,-5495,-32540,17590,5658,18817,-6604,-3644,18012,-32594,-25731,4799,20993,3019,27684,-4973,9008,20035,-20076,-1274,25273,21626,3091,-13518,-26346,-27423,32716,18981,-11825,4983,-14542,-13070,-27392,9520,-21689,-2589,-8148,-20887,19642,-25969,-12602,-28810,28834,30566,-16419,7514,-31113,13283,31208,2430,-31475,12145,-18559,-13322,-17688,4947,2034,13691,-29089,-11573,-22243,-30691,-15585,23288,-21997,-1317,14669,16079,8944,14611,-22419,-27702,-7748,-7983,-21501,-29722,-5059,-11565,-8101,26919,30521,20987,-31929,-14311,-19627,-13024,-10401,20824,-13479,-31468,8744,-16298,-22690,639,25667,-7670,-30248,-28847,24250,12998,-29271,18668,24451,-15362,19707,28803,-30556,-1537,-16510,-14505,-24680,-24836,-15410,-17584,31432,-7592,-11774,-28870,-26374,17445,-3933,4776,30863,-4548,1949,20116,-27619,24384,-3666,12111,1166,-3785,-12892,-30658,-14715,-25416,26150,-28969,-30605,32036,-30015,17669,-12797,19659,8752,-4137,9870,-19042,16929,24916,15758,22023,19397,-21955,-31730,-32252,-19173,32236,19708,3117,-14092,13079,21745,-15206,6058,-26891,-3582,-7681,25600,-13785,-26857,-22512,-8537,-2655,-739,-10468,-24141,26801,-14363,5119,-6310,-31355,-12978,-8644,19492,32525,21817,-543,9810,3257,16039,29202,3713,-6262,18776,-7664,19212,26259,2610,-25398,-4237,24273,-14477,19824,-24786,-24123,-11263,-27372,13205,-12957,18682,27130,10919,-5103,19739,20803,23102,13288,20223,13397,585,11159,26136,29250,13139,23505,-21340,-4311,25008,-4195,-29344,-23782,29215,18425,21496,-23540,4415,-8817,3660,-32573,-9119,-16085,6581,-10573,-29253,-5212,8705,31649,-22692,15555,-17424,7708,12303,-19984,-4427,-25330,-24610,28997,-9315,-3822,26535,26630,20150,21108,11097,-20475,19799,7650,23129,22463,6661,-21734,-28434,-17058,2536,-20456,-10684,11766,26680,-13496,-30216,4500,-22568,-5332,330,-32521,22090,15816,18009,-19920,-14682,-23066,23205,-25615,-1662,-7811,-6695,-24127,-26891,27279,25258,2489,11601,7921,-10635,24463,-6118,18702,14941,-6048,-10386,-29729,31747,-9357,1045,-5500,30460,4239,-31688,13576,-9840,6893,25444,16972,28744,18770,-31811,-32538,-2758,9914,5864,-11480,7210,-21589,-5029,16195,-14653,-28364,-2107,8034,-13824,-3952,-7963,-21469,-24776,15853,20478,-20479,-4938,-24738,29018,-15341,-3965,-32284,-29669,-18852,-13466,2423,26398,-21265,14526,-9216,6457,-23679,-22281,19207,-12935,32396,-18842,23008,14314,5837,30592,26412,8062,1442,-31279,-7989,-7482,1983,-29331,-5914,17254,21646,8142,7564,24374,-18478,-7455,27983,5484,-6151,16509,-2235,-32494,30564,-10927,-21654,28840,-19284,17531,-25653,5411,5534,-29544,-30374,-21983,-25240,25406,20714,-19400,-30769,-13420,23115,-8997,-18409,-4582,-8244,-15876,14737,-20399,-1645,13608,-3602,-16076,9252,-20567,-19422,-10760,22137,-18340,11648,-30365,13894,9379,15050,6743,-13426,-3735,32540,24682,-30444,5436,11886,27196,-21746,25716,12000,5775,2547,30874,-9387,-28856,18274,-5157,6381,-25194,-13688,-30975,22232,-22163,-8722,26557,25329,12072,-21514,23305,-5115,-32498,23779,-15890,3376,16565,26106,-30661,-8489,-11428,-19908,295,-20736,12140,23265,-10395,1387,26594,9868,-1107,18215,-7666,-23789,-4502,-3663,32072,-24837,3253,-32449,-22180,-24826,-22191,29751,5016,10865,-28352,6922,-10747,-31972,-7860,25642,-20227,13588,12484,-31026,8437,-17276,-6678,14483,-17383,-18302,1722,-19422,240,-9931,-3096,-23878,-20026,-20235,10233,-25980,27456,-1912,17999,-29995,-23619,30554,10763,31559,20264,-12006,11182,-31107,10688,-7840,-5228,-6965,-11169,-29668,-4884,26188,-30769,-22469,-12444,-17961,16203,5026,21963,22523,28421,-16170,32142,-20781,31209,22500,29372,23939,-32382,-29865,23524,32628,-17456,4390,-32177,-14787,12738,30767,-14058,11116,24,-25958,-552,8249,28603,32033,-24379,-32179,-30992,-426,-12924,-12603,-22274,-13117,29126,30698,-845,29967,27407,-18002,29760,-22273,-13581,1520,9693,-15394,16978,29696,30812,-12446,-10096,21654,3834,8138,-990,27510,-14679,-23212,-9643,2305,-26837,-8897,-14443,-10091,-16159,-5042,26751,19577,30023,-16943,21863,14187,18637,-15935,-5925,-13638,-21870,31744,16064,14415,-14074,-27383,15546,29588,-26779,7826,8072,-24787,-17278,-8090,-18012,-21246,6585,9057,-25185,4383,11726,18347,6779,-19031,-1904,20849,20380,3474,272,-18056,24932,-29476,18114,11996,-12692,3225,25990,-21673,-8494,2321,868,-23562,-9759,32426,-29043,-9279,-30760,10186,4411,3043,19466,21909,27299,-17532,21182,6502,-4637,26572,-18971,25975,20311,7812,-8211,-27879,-18007,13618,-15423,-6917,-11488,-19555,-19248,9678,-16008,-15492,-23487,30283,-12959,-28529,-4987,-27750,-5859,20901,25232,12432,-12215,13115,-19552,-5810,-3800,-29219,32207,25159,4172,5938,-10611,7763,-24438,-16516,-25480,-24360,-2176,27866,-9593,-14370,-5907,-25590,25716,5040,14111,-22931,23997,-5941,16631,2420,15018,4966,-10453,17486,-8279,29419,21267,7393,-7400,27475,2480,-4023,-10016,-17919,3877,-28628,-28071,-7578,-3582,6907,21092,4966,20763,-8593,17893,-25872,-3434,-26117,-25056,3149,29879,-21344,4298,-25093,10243,-15795,10704,-7464,-4538,21554,-23346,20844,-7088,15624,20327,-15357,10339,-12473,5223,30855,21,15566,29030,-23707,-22270,12759,11059,-26175,29332,-2890,-5875,19452,-29399,-17067,15144,29117,-18568,-12740,-10084,25867,29580,-8834,-22552,-8981,-22641,3425,-30277,28898,-32269,-6181,-16659,27642,-13912,-29312,12160,5874,-15849,-20626,10658,-6316,8025,-19177,-12891,31510,-7496,-26511,21259,-3525,-32534,18560,-2021,18014,-30604,21997,-24421,28739,-17398,-2969,25922,-15647,-32536,17388,10559,-16530,7834,-10495,-3941,31939,-28158,3838,-20415,14842,28336,8871,26718,-11779,4117,32160,-6810,-13060,19173,-20232,-27261,-24006,8133,18581,-32074,13574,-20803,31447,16604,-25978,-9227,23836,-4249,-4825,22872,-16382,-12395,18476,17302,-3777,-19756,17117,26920,10773,-27005,27735,23447,-4257,-1709,18161,-4188,7889,2748,-6266,32655,10431,22874,-16559,-11982,-8206,-1626,-25139,-10753,-14672,19833,18106,-10066,24173,-30647,8992,-31073,27667,-12750,-31385,6720,691,-31776,1718,32012,32358,9628,-4923,7347,-23390,-14315,-1804,-11889,-9330,-3032,23751,-22758,-22940,-28752,14741,-16589,11273,2294,-4729,16190,-27371,-12166,-24759,-22789,-14252,27721,-28393,9409,-31334,-16894,-16058,7933,-29830,-17027,18522,-21094,10161,5052,-25637,32158,4219,978,30386,-2430,-24958,-25785,5906,-25637,-9478,-16281,2687,16016,7643,5510,-16196,-18085,31926,-3296,-8779,-16675,-6958,14405,-17764,-9811,13548,-21678,-8449,28657,32377,-16071,-25632,-21684,-31214,-4512,28425,-22086,6884,-27827,-14523,16116,30469,-18505,-27189,3316,-9233,-7086,-22558,-29146,-12024,-5973,-22336,13515,6404,24013,-19190,-10689,-14757,19213,19040,32161,-1156,-11207,8283,-30895,27820,17960,7408,31120,25532,-10280,12989,-21554,-15544,17563,3311,-32601,-32080,31433,16876,26449,-16471,-23550,-5902,-268,23064,2621,-23296,-10165,3187,3352,11491,-24107,-31599,-1919,23517,-6781,18823,2904,19977,25018,-19868,25931,4469,32459,2377,1329,4479,-6777,1625,16973,-28099,14917,-17223,-27178,2394,9116,12250,-19048,-19109,11923,4238,29666,-9556,4731,10875,9110,-4223,26368,1949,21847,1350,32484,-14958,7744,-8152,-20781,27605,-23742,-23212,-21055,-15623,-14870,20668,3475,-28745,8225,29075,-7527,3263,9296,20915,-32696,-12599,-19156,10577,11634,23017,17564,-927,-28064,9120,-25241,-12265,-9835,15330,-26789,-29382,11029,26598,24397,13730,29866,-11835,5506,4690,19699,7521,-13326,30510,-856,-19332,-8409,29744,-30615,-24981,1960,-30559,1510,31243,-23709,-1449,-31440,21352,-13115,19020,-24807,-10057,-22065,12485,599,16000,-379,1863,-6501,13432,-25830,7817,-200,-9327,6155,26433,-1595,6043,11992,327,21355,-10974,17808,-14342,7324,-5831,5108,10036,-7372,16957,-22154,-17915,15502,-4775,180,15549,29111,-9977,-13347,-17357,24186,-14885,13643,-26120,-10346,-23218,-19740,4798,30199,-29183,-25132,-20602,-29762,22571,-19184,9742,11022,15194,1931,13094,8706,-23275,-18896,154,19181,2726,-10035,-5750,-29959,-10178,-24513,9382,6502,1352,-3771,11236,-18772,22641,29100,23680,21573,-29418,17256,-30203,14745,-23057,11382,32491,14040,-31672,-18077,-6616,-2925,30618,-8813,17534,26984,611,-31808,-16995,24640,3780,1439,23915,-26,9026,-4587,17827,2508,65,23700,-12286,-23546,27206,5307,-6874,12350,11808,-9559,31176,-32683,20388,14914,16705,-27982,1856,294,-5037,-3863,-12375,-18221,-18700,-23259,-13972,28079,16916,24379,5488,2644,-5495,-23111,-9642,21918,-16530,25106,-1408,-5024,10453,-1175,-23615,18062,-25248,18376,16360,24721,15885,16034,32760,-13458,11671,9939,14278,18728,4480,11229,22977,-28768,9464,-871,27818,-7525,-26207,-20918,-26514,7682,-28620,8304,-773,-20149,22528,1113,-22913,-10821,9585,7027,-16909,-11460,-28567,-5041,-26536,-618,21676,13634,12670,4469,7477,-32704,-21188,277,-29055,28009,23675,-23721,16529,30722,20020,26665,30725,24819,-28489,-6521,-5760,-32023,-13480,7344,-7977,-23944,-28699,746,17256,10574,6681,-9234,-9103,53,6694,25896,25357,9087,27739,27850,-1465,12696,24200,3943,-5986,-18946,-24979,17015,-28598,9452,31237,-24557,11903,-30432,-25254,3747,9938,239,-15423,-28891,6124,32181,-14553,28698,8972,-1306,-14714,26499,-9755,29228,26529,-4314,1847,-21584,16870,-31672,-31756,-9242,3176,-11158,-19387,-1563,-4860,17072,4186,3757,19668,2059,24824,1124,-21965,-13098,-23472,-26817,-17705,-47,-27840,7283,28687,23117,19547,4330,27933,-26048,-24564,-26390,-2695,-30707,-12416,20854,10099,-1862,-1745,-3111,-27608,15889,-10196,-21194,-15842,2323,262,-10756,-24548,-21516,14874,12934,-24682,-11071,2602,-19672,-21830,27421,2258,1197,3693,-27139,-12605,-7267,13075,-5753,-19756,-17782,20533,17437,19689,-32033,23912,4896,-22618,-14406,26570,-7937,-3366,32074,11269,22073,17948,16111,-27575,23624,31683,-28898,6700,17960,16151,-31778,11690,4040,-14599,14583,-5112,-14976,19384,22726,4660,23571,17224,19496,-1347,-16780,-4426,-19402,-30183,31487,14442,-24922,11499,14986,10221,9343,-29056,-13858,27894,-3625,23509,-30484,30943,32661,17877,25054,-15979,11356,-13468,8506,-14586,13634,-16273,-27758,-30663,7604,9313,17844,19086,26594,18225,-3500,27823,-23217,-19551,9959,29979,1773,182,29480,-211,12252,17622,5952,23789,28278,24859,15066,-20159,-20767,13826,-15214,838,32559,-10422,-6617,21749,-31482,-2011,30091,31771,-22110,27245,15355,-29480,2338,17759,16127,15880,10550,-24565,21525,-5818,18839,3987,-29299,28282,-17800,-19497,-27458,2843,-19164,8171,-21227,16682,-27220,-9796,-24159,30416,5523,-13777,-7057,25919,-10091,30992,8646,31020,-1404,-210,5034,-23535,-21892,32412,12627,-9113,17935,32570,-4286,22922,-26527,-6030,32004,-17287,32489,31711,14878,-26692,23893,-11819,-11572,-8289,-23317,-7405,-9092,11080,4963,-25856,4241,-8605,2689,-4593,22286,6750,-20163,13112,-15027,-2796,17690,-8328,20049,-23288,15856,22831,25851,16054,-16469,17542,-5300,-2780,12031,3216,4017,30196,18872,2676,-28675,27101,13481,-28102,-11095,-4467,-11999,-17626,-9117,-16711,6754,-27087,24091,12726,12941,20157,-17922,14890,-31332,19345,-15660,20276,12881,17960,-32507,-18437,20243,14701,1497,-14246,-10824,22160,24136,930,8971,-10426,18384,-29532,8862,-23557,23962,8670,-14126,9494,909,-2,21327,29368,5733,-19788,29260,-3357,-26073,11139,-5611,2378,30009,-27823,-18265,-10850,31546,-31875,4381,6823,3611,25404,10636,-16501,32531,-11070,-20135,-8456,25171,-30248,-17672,13595,-12206,-29980,-8430,-22394,-23118,-5644,16536,-3583,-12105,-18860,-6066,-9650,13151,10969,-13681,28231,31548,-11577,-9268,8238,-7810,25754,28787,-29958,-3045,20499,-25180,15373,-16102,-28523,7047,-3352,-14089,360,-22230,-20143,2991,-8698,-22621,-21444,-27713,-29957,7315,-3851,5809,-11715,4808,-12480,24548,14154,-24426,-13294,27081,718,13996,25948,28256,-16979,392,9095,-17029,23357,1724,-16163,-25299,-11198,-28970,-27712,13795,-26514,9088,18756,747,-18385,19620,-32700,13613,-12314,29852,14975,4291,24390,31738,-7808,-25034,1961,28371,204,-11601,-1405,-8280,16769,-24148,-5838,6965,28034,8762,8604,-14657,5060,-1239,-5978,-9858,-27075,10521,-2508,30213,8677,-17481,-22705,-17573,-15383,18679,22341,-31271,-25191,-31314,-96,21828,-3806,-863,22840,-6872,-28888,29785,-1709,-10209,20068,13243,-11767,-28804,-6370,-31523,19083,-10741,20039,-22853,19486,-20274,-17541,2781,-31046,-9791,12627,-22906,3812,12337,-30161,-28187,-23092,-18526,28812,-16849,-14242,-3428,-246,30389,6845,-29706,-177,9119,-2129,17890,-6911,-30964,11673,-27271,-10006,-405,-30658,-17057,1202,-12787,-27305,2510,31423,27465,-4598,11454,-21764,-13900,30445,-29569,-26953,8355,-22648,-32479,-9209,-14537,-13542,-28447,28506,-31034,-14689,-6029,-9036,-9079,-26971,17726,-14398,24655,-10886,31877,-13514,-29293,22712,1711,13278,26503,14060,-29162,15882,-25403,-10859,-29172,23921,-14157,-27660,19774,-12009,13513,9817,12693,7192,10769,6540,-11530,-3039,-26401,11083,-14970,30015,-29379,2736,-3140,-3260,16312,-18955,26995,420,-24555,-6004,24956,24650,-21839,14636,-24418,22527,32345,-12056,-23611,-5970,-30252,-16468,32262,2582,-8359,23238,-12326,2695,-7539,-19616,16076,12924,10652,-6804,-11615,12027,20836,-15091,4021,-13215,29866,27258,18620,-1045,-17676,-32261,-29474,-289,25423,-4568,3723,19806,16230,16858,-18386,24459,-4483,-13677,-26415,-7032,13322,15330,-29839,-7560,1613,4305,-9147,28752,27673,-8752,8366,-16981,27872,-26383,-29822,-28532,-3741,-31407,-4779,19887,-25234,-10957,-6604,5256,21967,13310,-22578,32039,-10965,31552,31323,31762,18915,1906,29577,-6660,20976,-24566,-23860,4819,-31102,13657,-5515,-2147,980,-13241,12344,-32731,19940,23694,2676,-32049,-17147,-5394,21918,6753,-13706,3275,-17685,-8052,-1974,22152,-27147,22116,16134,12668,20976,24170,-1497,11495,16610,10838,-6850,-3068,-924,-31967,-1281,14401,10013,12092,16890,-3827,-26416,-23507,-3637,-28119,-19117,-14852,-11803,-9081,-19826,-24894,-11481,-1886,16964,-14708,3316,20529,19398,30540,-26298,29884,19234,-9080,23834,-26239,14221,-3152,-23157,239,23949,32525,16133,-8006,6304,-2502,-28724,-9383,8995,29641,31148,12773,-32398,17475,32548,13673,-26139,725,-21400,19259,-27823,-12391,22080,13572,-30336,-14329,-26805,-14925,-19763,18032,-8271,-2984,-11679,-15917,7034,31783,28821,-18753,29805,1766,12192,-24942,30569,11880,2980,3848,20337,-26462,-12871,4599,7564,-18920,10907,-26680,-19690,-4836,28847,-25092,-17848,-27515,-934,-15690,-14105,32761,29447,17696,3745,3994,-20421,1898,-15236,-3061,-9595,10749,-11127,6657,23494,-26493,-14577,4684,-22956,20882,27468,12554,-11602,9936,-21467,23797,7917,16239,-8969,-29792,-4945,31142,10242,-316,-12097,28434,-23938,-20470,-28967,-21324,9598,-23787,11796,25247,-22830,-25416,16536,2750,22986,-22421,27000,13979,-31048,27919,-22343,1409,5718,1352,5573,22037,-9698,18616,18762,-11004,-11881,-32271,-32525,-8775,7394,7115,32120,-16542,9285,-32337,20431,-2208,7567,-11544,-14150,27409,-14581,-30272,-24378,12728,25491,-17622,-21103,5814,18700,-7812,-24993,-28390,-18448,2915,-16674,-4006,6598,-31197,-32527,-8841,19554,-28073,8417,-10780,16698,12927,-977,5196,-15789,12874,21650,1928,-25323,25753,18878,-2048,-13490,28900,-30029,24161,-14697,16752,-5049,1114,-28711,-26107,-28722,-14756,-339,26072,19746,20553,21866,-27517,25063,28258,13105,-2421,29928,-24013,10412,18385,3727,23424,17154,10384,24759,-773,10769,12727,3348,-17958,-4827,-2163,372,23078,14974,-9285,-1771,16265,-7505,-857,23603,-20481,30749,-16274,-21720,23641,13156,-21280,12870,8469,-18775,22728,18669,11196,-22297,22070,-29063,-1027,30457,6520,-6101,-12176,-17877,30609,-2490,18513,-20924,-12325,-725,24663,27217,29913,-1453,15446,-16888,3289,5398,8540,17517,-20880,-10365,11362,12807,1987,21178,-124,-3920,24144,-19941,14,11341,6396,2886,26367,7520,28706,22720,784,-5678,9027,-4064,-29923,-5747,-26809,-13987,-3312,-14026,-3643,31083,22215,-28177,-27200,-1558,9545,16452,-29358,504,7721,23939,10558,-23619,23724,4205,17606,-24483,-4726,13619,19392,4486,7456,22900,27867,-30410,16875,-32099,-30064,10868,-23755,27062,3015,-12048,27270,14581,22572,12747,-30811,12955,-15603,-2340,-26919,13572,-15662,16981,-4880,22456,13557,27367,-27561,-2842,-13419,-5164,4210,14995,-3566,19758,25760,15492,10673,-24471,-17821,-29389,32047,-25114,7184,16063,27295,-17824,-22953,-28779,-24947,19410,19354,-25280,4125,-31612,481,28908,5327,17570,11477,18589,-17082,4959,-15382,-16092,-17612,17841,4911,-10582,21594,-12394,-2387,3768,-5105,11099,4038,-24515,-13703,-1494,30442,-23124,17703,-28985,12379,-27633,-14678,25444,20024,26093,-28634,12208,-13087,24046,-22689,6130,1371,5641,14051,-15907,15101,18648,20841,-25420,26459,21815,-19177,-32735,24757,13234,-25110,-17924,6986,21817,6026,-12022,29232,26602,23005,10279,3325,14602,20313,32443,-1771,10507,1283,3478,-12880,-31546,-30071,-20433,-27241,-257,-11506,12809,19030,-26457,9653,1107,-13654,12332,-1052,-4040,15835,22902,23404,-27530,-31160,-28999,20370,-21991,20796,4756,16838,9953,-4250,-22082,18964,-3606,-16202,17025,-18580,19794,-18552,21879,-9981,-7299,-21231,-214,-11870,3870,-10563,24577,21559,-13064,23165,-4902,-11634,-19478,129,30804,-31797,-12316,26124,-30997,-21670,5071,27270,6328,31621,-15955,-13818,30201,-19963,25797,23679,-10918,-9356,-17606,15787,19968,8495,19757,-19731,-3918,3593,22025,-6583,17225,-4777,-31284,-23004,-1114,-12037,10666,-24598,10382,-24245,-3532,-15465,-12685,-25669,-8418,24295,-12382,6532,-2353,17025,-22358,-11403,-16444,7032,-17109,-1572,-17569,-23544,25213,22484,30118,-10892,1366,-15921,30458,8938,-28401,9067,8670,-7234,-13481,-27629,10074,8908,-13393,24903,-21494,-27784,2732,-20796,19236,-6717,22869,31457,14988,27147,-4329,22404,20863,12250,30315,-25467,32035,-25037,-24876,-8751,15165,12430,10852,-9056,17344,9769,-11944,12237,-26423,-7048,22153,-26284,4054,-28334,-416,-26235,4001,-27578,-6093,5356,16205,23495,2105,24360,27456,19592,-15361,16960,-24568,-31366,-14697,-4769,-4806,2270,-24181,-10982,-14351,-24902,23332,-1602,27055,-30297,-155,-21948,26123,22290,-26683,20456,2726,-7677,15877,3808,-29910,24977,-23467,14890,18715,-11476,-22264,-13832,6047,17358,13713,-5668,29717,-1530,15722,28712,-3606,-18908,8863,28267,12020,-25093,-19991,-15166,-23879,3546,-10849,-13713,-22445,9210,6885,17320,17645,26145,17855,4516,-27367,-14300,17180,-25954,-12929,30250,-13675,-31666,32551,19891,-23985,-933,10865,4853,17872,-3946,3227,9698,8079,-7310,-12611,-27255,-14518,-14005,-951,5046,8341,21455,1905,26587,-22243,24309,-11494,29195,11008,18859,6774,-6559,12748,-13840,-31924,24939,-11225,20109,-28050,22843,20827,28994,-31767,16600,-9832,1614,-12290,24838,-26277,-14803,10176,6383,9883,-9615,13674,16622,-23408,-16591,13905,-15740,29168,-29059,20250,-11637,7254,-19835,-22735,-16897,-31821,27795,4048,-23403,-14833,-2601,-23718,-30068,7117,-15672,21875,-12437,8412,-19417,11355,-26873,10889,-20216,-1600,-30878,-9921,-23051,3269,-2380,16200,1303,-24768,-11695,29388,-31771,-5867,-13256,-12622,-6263,-3668,-14828,20769,-24033,29781,25544,-24195,-1344,14221,31057,-18900,-28080,6891,21523,20096,-5430,17744,-26555,-30826,25752,17152,-14789,-32671,-26057,12379,31798,7255,-17382,-16260,32684,-30509,16129,-6769,25071,22576,-12705,131,-24954,-9497,-14770,601,12394,-13856,-23407,-17911,23531,-2659,27669,-21093,-28365,-22512,26034,-24987,5406,-17782,-17445,-14689,26013,12537,15093,-19987,17543,-29002,-8901,-486,7814,-32283,-22785,9925,8730,15119,10782,23649,-19617,-2682,16686,7911,18453,17302,-22833,19615,2866,21680,5889,29175,19069,11282,2339,30528,8638,-13713,13184,-16661,17994,-18916,6323,-25971,8349,4525,7737,-32692,24117,28190,9322,-19007,-17865,5177,-3016,-8753,3866,-17362,-22063,-13147,-30046,2890,6677,-5029,30956,-19399,-28682,-31084,20282,-14005,-527,-24248,26646,12857,24952,-19538,-25879,8280,-14025,-15460,-15877,-10601,-21750,10065,26171,7575,-12597,-32333,18004,-19557,11768,-5244,-21447,-26597,-18884,-14002,-32512,-18269,-20991,-32066,-5814,20836,-4720,685,16552,6340,361,-13902,-4110,-13589,-31160,-6010,-30862,10830,9848,-29683,26516,25284,-23781,2406,-29444,813,-4491,12240,-30822,22105,10646,-30257,-21001,-27694,27394,5118,7757,28458,-25533,22584,19798,11994,20100,-29393,21880,23139,12835,17771,-28770,29028,-27915,-16820,4424,-11860,-23797,-12314,-25629,11913,-13853,-9953,-13714,-26614,-11201,-29858,-6121,-30018,-21279,-16128,-24022,-23546,-19991,28557,-19054,29493,11586,-17617,9745,-243,5078,-455,-12014,2025,-16890,30706,948,26283,-2919,-2270,-5748,-2741,32417,-30513,9539,31967,-22899,27508,24519,29875,-24848,25966,-28065,-1451,25921,-7470,11254,6888,27826,22015,18643,-26683,-13440,26877,18188,10948,23339,-19760,-17942,-24208,8232,-6987,30882,22266,-14428,12779,-31976,-8198,12567,14029,10095,-31946,8994,-7231,23201,2106,31277,-19083,-23547,-14502,-11277,15290,30958,-18292,-13554,12841,32243,-13619,-28926,1682,9134,16316,-6567,10234,26609,-2210,-29081,-15906,-23700,3206,-5288,32668,22649,-23207,17881,-19519,-10506,2582,-30835,21801,-7818,6084,-24761,18430,-6603,8469,-31193,28378,16888,17412,13091,-2848,3785,-11111,7573,-15133,-19914,7360,-23018,-27865,-5826,-7027,26335,-11444,-17018,-23230,-12844,32066,-11070,6919,-1984,19328,9163,-27036,7254,29298,2726,248,29119,-7282,28533,-22197,31724,29756,-25727,14384,-6782,21772,-13779,-12574,-7732,-28973,24163,-26400,-23252,-12750,29837,-19196,8962,-8279,-5432,-22575,-24334,21822,-12358,24478,598,24519,20876,1438,12061,-12545,28931,3110,-29245,-32135,2310,-21417,-15618,19189,-509,31357,-3079,-19141,29789,-11261,-6826,19810,19652,-31608,-1705,-3413,21435,29080,2837,27053,-942,106,8499,20953,31967,-16002,23038,-3728,7187,-23698,17853,-8005,31672,-29448,-25201,-16422,-30510,-29389,-10739,14787,-13792,-29897,-8312,-1492,-28393,-6303,-23812,-17301,-18864,-14821,18785,22281,-12423,-16197,30964,30493,-8547,-26250,-9737,-4428,-30726,-30584,-28310,7495,10125,26843,-12439,13406,-16801,-25651,20153,22832,23800,27208,-10066,-31807,6409,31869,-24269,23306,-20187,4770,-182,4273,-8156,-1262,22028,-8160,11985,29583,-24498,-10386,13451,-16018,-24637,28202,5376,-1,-1517,10925,26728,-17599,31521,-20524,20316,-26821,27220,-10993,-16622,-5953,-26282,-9677,-13270,29986,11569,-23878,30726,6768,-2995,-9291,-30396,-4009,1735,13486,-7704,19743,-21308,23831,-16380,-18162,32211,-18670,-31537,20822,-30367,1353,1064,-13072,12717,31901,-4804,20770,13427,-11732,-17208,8725,30320,-9344,11780,-29332,-1199,-7308 diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv2_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/conv/conv2_golden_int16.csv new file mode 100644 index 0000000..a10d98e --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/conv/conv2_golden_int16.csv @@ -0,0 +1 @@ +-22418,-2670,2401,32544,-17278,3102,-4520,-12541,31333,-32723,32767,32767,32767,6740,-12598,2269,3102,-32768,-32768,-3782,18207,21441,-25371,32767,-8854,-25418,-32768,-14477,-32768,4238,-32768,-18802,13721,23994,-4697,-24567,-3886,-13461,-21163,14878,22875,7725,-32768,-26410,-32768,24297,-32768,-24885,-5017,-32768,-4478,11551,32767,-30114,-12245,12234,13544,20244,32767,32767,334,9951,32767,23595,-12221,-16223,23103,32767,32767,15475,32767,32767,-25713,30092,12877,6514,-32768,31472,32767,6333,2545,32767,17916,32767,32767,5083,14507,-32768,7290,32767,-32768,18829,32767,9062,32767,32767,32767,-32768,-11829,32767,-21656,1420,32767,8468,-9356,32767,21390,6609,9428,16771,32767,32767,25894,-32768,-31267,32767,5386,-12631,-32768,-2161,7536,16689,-32768,20653,-32768,-6716,28195,-2094,-32768,-32768,-32768,32767,-32768,21670,-32768,12478,2608,32767,-32768,-26132,-32768,21409,32767,14006,-25452,-32768,32767,32767,12668,-32768,32767,32767,-32768,5670,-32768,10173,-32768,23908,32767,32767,6407,18581,-7681,32767,11869,-4525,32767,3932,-6864,32767,32701,-32768,32767,-8958,-10924,-17013,32767,31454,1825,32767,32767,28077,-26810,31174,-22967,32767,-7498,-201,32767,2721,32767,-32768,-31983,-15811,-8973,32767,-32768,25732,32767,-20251,32767,32767,-16068,-14265,-32768,-22000,-32768,-32768,12744,-32768,12253,-20676,-7226,27390,16085,-1399,3738,-8337,-20621,-32768,10467,32767,32767,-32768,32767,25342,6272,-32768,-32768,8024,-32768,-32768,10930,20477,-26414,-2415,-32768,32767,32767,10598,23725,-13281,32767,15356,32767,107,20370,32767,-4910,32767,-32768,-32768,-32768,20046,32767,32767,29794,23862,15765,32767,-32768,-28767,32767,20490,1041,32516,32767,-32768,-11847,-9200,8752,-10224,32767,32767,32767,7909,-12989,26162,-25815,27713,-7742,-19170,-32768,32767,-13147,828,-25813,17106,16465,32767,-1884,32767,32767,-8178,32767,16235,32767,32767,32767,-14201,19567,22101,32767,32767,28127,-23122,30024,-32768,24030,32767,32767,-32768,32767,-28848,32767,-32768,-28808,1408,811,-32768,-32768,-1614,21023,11308,-28437,-28691,-32768,30418,32767,32767,29888,32767,32767,32767,32767,26782,32767,32767,32767,32767,-31686,25027,32767,-18652,-8871,32767,8132,-32768,28362,32767,28087,-1570,6677,-8679,-27357,32767,7202,32767,32767,30234,10074,17554,32767,-32768,-30277,-3547,-32768,-32768,-22642,32767,31840,-22542,-24927,11757,-16889,4393,-22199,24261,14568,15002,-5669,-32768,-32768,-32768,-32768,32526,15820,-32768,32767,-10878,-3401,-24274,-24167,18000,32767,890,-11187,32767,32767,-29202,32767,-32768,32767,-32768,3844,19605,-5773,32767,-22792,-24454,-12060,-32768,-7484,-32104,-32768,3518,-29000,-5616,-32768,4908,-16145,516,32767,-32768,-262,-6170,32767,32767,15368,-32768,-32768,32767,-32768,32767,29777,18044,-11161,856,-32768,3096,32767,5424,-32331,-32768,32767,22876,-32768,-6271,32767,32767,32767,-19705,32767,-32768,-25118,23570,16372,-23078,10165,32767,32767,-2651,14786,5793,-32768,-28095,-32768,-14311,-20305,-32768,-15779,18835,32767,25596,32767,29813,32767,-32768,24075,-21707,-32768,-15262,-17215,-32768,23484,-3909,32767,-10311,-32768,24325,-32768,-15136,-21936,-17765,12822,-7379,-6041,-32768,20585,32767,32767,32767,23176,4032,8291,32767,9638,-32768,-26725,-32768,32767,622,2010,20285,32767,-13847,-32768,21593,32767,-2254,-12350,-9599,-32768,32767,-32768,32767,-32768,9519,-32768,-32768,-32768,-32768,32767,23389,32767,-9094,-32768,-32768,-3143,22306,-23709,-32768,32767,32767,28737,32767,-14916,-32768,-15061,9596,17898,13309,-6075,-32768,-17352,32767,2922,-17447,31196,18633,-32768,15882,-32768,-32768,3705,20794,32767,-32768,17837,6397,32767,-18972,-3367,-27640,32767,-32768,32767,32767,-18727,32767,11454,-14820,-8141,32767,32767,32767,12703,1418,32767,-24550,-28671,32767,-32768,-16170,-23196,-32768,24508,-7067,-32768,-32768,32767,-32382,32767,-32768,30615,-32768,-32768,-32768,-32768,12267,27514,-32768,24157,-24179,-32768,-32768,-4541,-6416,-6111,17109,22143,1929,-32768,-32768,-32768,-22196,6761,4324,3658,13883,-4337,32767,32296,32767,32767,-32768,-32768,-32768,-7550,-21965,-9891,-32768,32767,32767,-22421,-32768,17924,11935,-27304,-32768,-32768,-21861,-32768,32767,-32768,-20546,-4836,-6776,32767,27555,-32768,-24062,27921,32767,-18206,-32768,32767,32767,4059,-3119,32767,32767,4292,32767,-13907,-32768,5399,32305,-32768,29931,3404,32767,-32768,-32768,-32768,22432,-32768,-14784,10063,32767,32767,32390,-31754,-13179,26959,-20831,1048,3281,32767,-32768,-14460,32767,11333,-29784,-27476,-32768,-8175,-12169,-19680,19747,-9215,-32768,-26677,-24723,24042,-7555,32767,-32768,-32768,-32768,-27516,-32768,-32768,-6086,-32768,-6225,-32768,32767,-27708,32767,20309,28774,-22661,-32768,-23801,23306,32767,18553,31734,-32768,12774,31953,-18288,-30800,-16845,-16157,28104,-2957,30255,-5582,32767,-32768,32767,10450,-3871,22973,32767,21625,32194,32767,32767,32767,-18632,32767,-32768,7217,32767,32767,32767,11549,32767,-32768,4879,-21177,32767,29048,18339,32767,7188,32767,20257,18030,32767,32767,-19425,-22044,-5977,-17287,26638,-32584,-32768,23086,-12114,2427,30047,27195,12551,4065,32767,4746,32767,9716,32767,-32768,29684,-20603,-32768,-26233,23942,-32768,-29384,32767,-21153,32767,32767,23859,3862,-32768,-9944,8061,-29966,-11169,30971,-13844,-5881,-32768,10037,-11458,-32768,32767,-32768,15493,-32768,-32768,32767,32767,-17732,27385,17719,32767,-1013,-32768,-1752,32767,21891,32767,-5564,32767,-14400,-32768,32767,32767,32767,-29363,-18839,-32768,10524,-32768,-22162,22347,32767,32767,-32768,26039,3615,-8664,32767,31805,-4774,-6431,-25223,-32768,-32768,4298,9197,-17267,21182,-19691,16175,-14953,21835,-32768,32767,-10178,32767,28189,-23376,-11924,29594,-26758,32767,32767,29828,32767,32767,-32768,-28684,-32768,32767,32767,-32768,-21269,9165,14123,-32768,-32768,-11151,2830,-24319,22304,20940,32767,-32768,31898,31971,-31622,-32768,-4604,32767,-32768,24983,32767,32767,-21477,32767,-17334,-32768,-5592,32767,32767,26502,-14893,30211,1053,32767,32767,22451,32767,-20027,32767,32767,14636,32767,20848,-32768,-32768,32767,-12895,-1037,-14002,32767,-12335,-32768,-32768,-32768,32767,32767,21142,32767,32767,32767,32767,11264,-32768,32767,-10091,-23231,-7815,23633,-32768,11931,-9417,32767,23985,32767,32767,32767,11913,24069,-32768,28661,-926,10346,-1010,-4033,-7924,-32768,14573,-20242,32767,32767,-10089,-17708,9374,-21067,7759,32767,-13443,-721,32767,-474,-24618,-24563,32767,9478,-32768,32767,32767,-32768,32767,23771,-32768,11313,-11871,32767,23359,-470,-32768,2780,28115,-14020,32767,32767,32767,32767,32767,32767,32767,-28481,32767,32767,32767,-11874,-20369,-2248,1171,12055,32767,-23981,32767,16982,25111,14685,5583,32767,-8169,32227,-1727,32767,-10368,-538,32767,24160,17188,-16981,32767,-17060,-32768,-29252,-2835,32767,-13243,-32768,30348,-1976,12338,1977,-10562,-4816,9005,32767,-4515,32767,10806,32767,15993,-32768,-10966,22695,32767,-32768,-32768,-32768,32767,-32768,32767,27337,32767,32767,-32768,-12729,-1525,-32768,32767,17544,13656,-31629,-32768,-32227,-7669,-32768,-32768,-32768,31038,-12078,32767,-32768,-32768,-32768,32767,-32768,-28782,28330,-32768,-32768,13711,19942,-25986,-32768,-504,1267,32767,-28323,-32768,-32768,31780,1553,6187,-20482,-32768,-32768,2466,-13855,-32768,-32768,-2071,-12303,-32768,-32768,-12271,32767,-20091,12712,32767,9903,32767,-32768,31643,9775,-25192,32767,32767,32767,32767,32767,-32214,-21333,-17799,1042,16154,-21904,-32768,32767,-32768,-25104,-3246,32767,18528,-473,26744,-25513,-32768,-11933,18654,-29413,-32768,-25870,-4928,32767,32767,-7659,-31746,-32768,-3537,-32768,32767,22275,32767,-32768,32767,32767,-32768,-1768,32767,32767,-24559,-2553,15169,-32768,10244,32767,32767,32767,-4869,32767,16204,7941,15589,7628,10578,-32768,32767,32767,32767,24711,-6756,10171,-32768,-32768,32767,32767,6319,32767,-8378,-7456,32767,9614,32767,32767,-32768,32767,-32768,-32768,32767,-19686,-29017,32767,28981,-32768,-19255,5609,2995,32767,-28727,32767,-12547,-14200,-4544,-606,32767,21217,7011,18275,-9516,-32768,-22267,-24625,7076,-9151,-16037,13558,-6333,-32768,-32768,16517,-32768,-2538,13450,-14057,-32768,-32768,-18031,-11140,-704,32767,8068,32767,10054,32767,32767,-29082,-25883,-14600,3601,-32768,-26987,-32768,-20119,32767,-32768,32767,-25198,-1750,-22878,-32768,-32768,5060,-13931,7723,-12274,-16472,-13908,32767,-7418,7641,-10768,8045,-32768,-32768,1349,-32768,-32768,13882,32767,-5660,-32768,-32768,-32768,29016,28254,20481,32767,32767,15843,-27344,3553,-32768,-23526,17490,22299,11357,7420,28917,32767,8130,32767,9969,32767,12046,-32768,-32768,-32005,32767,-32768,-3941,32767,32767,32767,27155,-27257,29219,24715,32767,32767,32767,32767,12179,10652,32767,-6638,9506,32767,27306,15568,32767,-9902,32767,32767,4348,32767,28543,-4609,32767,22259,30899,32767,-9458,32767,21461,-7225,-28987,5813,416,1904,-32768,32767,32767,-32768,-32768,-11737,29152,-32768,32767,-32768,28286,23848,-32768,17511,32767,-2630,17742,-32768,-24634,-32768,-32768,30760,-28737,13402,6940,32767,32767,3079,32767,-8717,-32768,-5922,10512,-32768,32767,2621,32767,-5740,-25410,22316,32767,-24110,24738,32767,32767,9561,4935,-16596,22228,32767,31764,28700,-5794,-32768,32767,32767,11615,15761,32767,32767,24600,32767,32767,1890,32767,32767,32767,-32768,32767,32767,32767,14453,32767,32767,32767,32767,-32768,19374,32767,-21725,-32768,32767,-32768,6669,-15411,15312,-6741,32767,32767,32767,32767,24708,-1433,30688,32767,6006,-29925,-32768,11597,12556,-32768,32767,-21197,32767,-3815,32767,-12970,-32428,32767,-2132,7811,-4272,-21282,32767,-16992,14092,32753,-32768,-32768,-32768,-18559,32767,31657,-3153,11081,32767,7246,22640,32767,32767,-32768,-19470,-29322,-17849,-32768,3205,-16836,32767,32767,32767,32767,6851,32767,21334,-6764,32767,32767,-13591,32767,-32768,32767,-16766,-3178,-5773,-31550,6040,32767,-28685,-32768,4128,32767,32767,32767,29655,-29208,31409,5713,-27531,30244,-11400,9375,32767,27065,-16774,1140,-5211,30384,-13885,-14553,-9272,30013,16723,-32768,-8995,27735,32767,24836,315,-14091,-15831,-17012,11034,18190,32767,32767,12184,1715,32767,27214,32767,-6451,-32768,-32768,32767,30856,-20049,7480,-1426,13140,32767,-32768,-32768,-32768,2997,-32768,-32768,32767,-10343,-11613,32767,31772,26154,25761,32767,32767,32767,22569,-32768,-5534,24966,-30670,32767,16391,-32768,677,32767,32767,3332,32767,-27437,32767,32767,32767,-19261,21208,32767,-5003,-32768,32767,-32768,28093,-27391,32767,4616,-32768,7123,13332,5044,-32768,-29344,-32768,32085,32767,-18603,-14586,-32768,32767,32767,-32768,3451,870,-32768,23392,-32768,4555,-15486,-32768,257,-32768,-32768,32767,-18686,-15974,-32768,-32768,-105,-30667,-13703,22282,32767,24723,-32768,24978,-11218,-32768,-32768,736,9608,2929,-17987,-10633,563,397,32767,-2189,7196,-32768,-32643,-32768,1109,7525,-6489,-32768,-32539,32767,12107,9120,-32768,11222,32767,3501,32767,32767,12590,12719,5561,32767,32767,8121,26092,-32768,-7874,30109,1290,12165,-32768,23910,-32768,30871,-17509,10014,-10888,-32768,-32768,30838,-32768,-32768,-32768,27975,32767,-29147,-9442,-10396,-19883,-32768,3923,32767,23259,24745,-32768,-32768,-10961,16222,-1364,5998,25414,227,-21464,6172,-32768,-32768,32767,17925,10769,32767,1613,-32768,32767,27711,5962,-26192,32767,32767,-32768,-32768,-31449,7207,-25155,-17755,-8273,-32768,-13803,32767,32767,2585,32767,26484,32767,-25411,-32402,24076,32767,32767,21320,32767,-21204,-32768,-9160,-20274,32767,19944,-19908,30624,-32768,-29505,32767,24705,13786,-8473,14807,-32768,3669,6011,969,32767,-32768,-25865,-32768,2603,1740,30277,15400,-32768,-581,15450,2196,-26946,-17224,-32768,32767,-32768,-32768,16516,-32768,19463,-11306,22045,26266,-3783,-22782,-32768,32767,-32768,-32768,32767,32767,-32768,-32768,-32768,32767,32767,-16106,32767,32767,32767,18615,28738,-28424,-32768,21973,11157,32767,7994,32767,3851,-2870,12642,32767,32767,-32768,-23769,32767,32767,6901,32767,32767,-11666,25070,1414,32767,-32768,-32768,-32768,32636,-29485,-20232,-32768,-12932,-32768,3641,-32768,-32768,-32768,-32768,-32768,-32768,-32768,-32768,-28429,-10568,4451,-7793,-5253,-32768,-32768,16177,-32768,-11840,-32768,-32768,-5341,-28504,-32768,-32768,-32768,23739,-32768,18285,-32768,-26263,-32768,-32768,-32768,-32768,-32768,-32768,17984,-32768,-11391,32767,-9030,-32768,25560,32767,31929,32767,-32768,-6941,-32768,-4271,-22914,-32768,-32768,-32768,14884,-5132,-780,-32768,-32768,-3716,22957,19381,-20260,25402,32686,2663,32767,5193,32767,32767,32767,32767,-24322,-25532,-32768,32767,22400,3919,-4548,-32768,32767,-16961,-32768,31406,-30990,-3993,15275,-17654,32767,-14626,32767,-3605,32767,32767,32767,7154,32767,32767,32767,-32768,20386,-3713,-32768,-32768,-32768,-32768,-32768,-32768,1297,20563,32767,-32768,-32768,32767,-32768,-32768,32767,-32768,-19164,-24047,-32768,-15205,200,5048,7597,-11709,-32768,1936,-32768,32767,30682,-4114,-32768,24174,-32768,11694,11180,3077,-32768,5538,24524,-32768,-32768,26075,32767,-32768,-32768,-24479,961,32767,32767,32767,32767,-32768,32426,32767,-27838,32767,31646,32767,32767,32767,-29905,32767,-32768,32767,3006,-16837,28968,-24096,32767,-32768,-16550,32767,32767,23393,32336,9081,20563,21385,4514,15504,-16196,32767,7797,32767,-2518,-6302,-6201,-3609,32767,-32768,-22178,-26574,-32768,-29346,-32768,32767,-30049,-32768,-19377,-19793,32767,-1766,7294,-946,-9992,32767,-22175,-32768,-2086,735,-7572,-26200,-7550,-31443,32767,32767,32767,11133,17650,32767,32767,44,-13901,-28470,32767,-17162,2583,-32768,-8533,-29982,1673,4190,-32768,-16410,28569,4883,3427,2951,32767,-17152,11196,-32768,27206,10423,-32768,32767,32767,2008,-4983,29593,11770,21010,-7642,-24615,-32768,5839,32767,-20599,32767,-1075,-30844,-32768,7338,32767,32767,-9035,32767,-15096,-32768,31348,23672,-32768,-22302,-5836,644,32767,32767,-32768,32767,-535,-25855,19382,32767,-24781,-3591,-4405,32767,32767,32767,32767,-31414,-10708,1923,-28892,10933,-32768,32767,-32768,15,19962,11320,4502,32767,-32768,13587,-32768,32767,32767,-32768,-32768,-7591,4140,-21890,32767,771,-15561,-32768,-32768,-32768,32767,-32768,32767,30412,-15518,-32768,32767,-31989,30066,29256,32767,7148,32767,32767,32767,32767,32767,32767,32767,14834,32767,32767,-3463,23098,-14424,32767,-3620,32767,530,-32768,32767,-32768,32767,32767,-32768,-32768,32767,-32768,-32768,32767,28311,29917,-26668,10633,-21145,-32768,32767,32767,2555,704,32767,-32768,29134,-17150,-4320,-29122,28531,32767,-20493,20579,-31792,-3302,32767,32767,-32768,15378,-32768,-18509,32767,32767,-4505,32767,14564,32767,-12514,20812,32767,18953,22217,32767,-32152,32767,-2382,-28503,-11823,-32768,13594,32767,-24530,17225,14,-26506,32767,32767,24916,-32768,-25478,16315,24856,25943,32767,-26865,2396,-4290,-32768,-32768,32767,32767,15275,-7624,32767,-27844,27644,-27091,32767,32767,8956,-32768,-32768,-32768,-32768,32767,-32768,-32768,-32768,-12694,-14923,-32768,32767,18772,-32768,-32768,-32768,-14447,32767,-27862,6276,-18690,-6978,4789,1947,-1836,-9463,-14379,30879,-32768,10460,32767,12529,7146,-4743,-31469,-24738,-32768,-10397,-15072,32767,18791,-32296,32767,-32768,-7802,32767,32767,28833,24404,-11863,31566,-32768,-32768,11048,28127,-32768,22588,2448,-2489,-10743,7836,-25023,-32768,700,29645,-17315,-5144,-32768,-23657,9572,-15527,-32768,6563,17696,10008,2988,19386,31277,6033,-19078,-32768,-2517,2668,-32768,6672,32767,4923,-32768,-14845,32767,-13764,-3557,-32768,-6607,27914,32767,-31622,-3080,-766,32767,32767,20694,32767,17520,-99,10051,32767,32631,-32768,10481,-14420,32767,-32768,31940,32767,-16534,-32768,-32768,21557,32767,32767,28560,32767,-4817,32767,22085,9099,32767,-32686,17395,32767,30463,-32768,-28462,-23189,-11181,-32768,32767,32767,-12635,-32768,-563,-32768,27533,-1601,32767,31116,16744,5340,-25700,10676,-3511,-19394,32767,-4553,-26741,-19776,13935,32767,32767,-32768,32767,23599,355,845,32767,17160,32767,-25942,-25183,12157,32767,-13815,32767,21406,32767,32767,7063,15802,10709,2482,-4968,-10657,-11091,26765,20876,-32768,32767,-32768,20036,-25233,4738,32767,32767,-27837,-24154,32767,4997,-12110,28421,8192,29974,-32768,32767,-32768,-32768,-16944,15052,-28132,32767,-22060,-32768,24735,32767,-17124,32767,-17271,30041,32016,32767,-32768,27817,32767,14848,-28943,32767,-6625,3778,-27922,-23775,-32768,-3182,1421,-16460,-2369,26227,32767,32767 diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv2_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/conv/conv2_input0_int16.csv new file mode 100644 index 0000000..c8e3fd3 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/conv/conv2_input0_int16.csv @@ -0,0 +1 @@ +4937,-2615,-18657,-21881,28250,-3001,31602,-8784,-30056,15731,-12097,-20960,-29530,5583,-7924,21461,-13761,-19182,6136,10865,3179,10037,5413,4596,30541,-27366,-22689,-16051,6427,-16878,8754,-32496,21465,-7160,14750,6426,3494,8063,-8378,-7545,-26456,9084,2319,-4755,1154,-16682,12699,14420,12145,-29106,-30761,5929,8894,-16146,32515,24872,-1693,-13167,12609,21544,4950,4332,-14284,24683,-13889,-2388,895,-6727,7022,16998,22627,8524,17749,9566,-7928,4970,-6626,-3217,-4419,-17952,15285,25141,833,-22612,-5266,-18706,585,-8949,-28991,-7913,24321,-28940,-16008,-12017,-20168,20272,14679,28632,27010,-26134,-26059,13523,8226,-17314,-28716,-3881,-2605,22831,21564,-2484,-26886,147,-14195,-15533,-30335,-11499,3461,-30833,18349,9882,11455,16613,-23942,-2173,18223,-24066,-21247,18590,14719,-7147,-3192,-10200,24403,28171,21480,26775,-23643,-32678,24242,16267,20291,23548,10273,-16972,17168,-14281,11125,-28614,29299,-12559,9763,11479,-13076,12311,7146,32115,8835,-31697,7429,-22007,-22070,28623,-4592,-21425,32081,-29789,26858,-31025,-7810,30759,-5087,21197,22842,31099,-20466,23547,-10894,24445,5759,-21689,-24288,18727,-10161,7062,-12837,30007,-12217,22394,25531,-24834,17770,-8704,-23768,-23499,28490,-13356,-30933,-15057,-32148,-17740,18411,19490,4052,24138,-16513,8410,12950,-26000,24982,-23812,-3702,-20989,31091,-27587,-1716,-7838,19805,26378,-8781,-2017,30519,1513,-11549,-11262,15362,27055,-1574,9460,22827,-29817,-25687,21863,-23362,5832,16928,-20025,-30559,-29469,27552,-22629,-23476,17973,12879,-8824,6268,19012,-6185,12723,31164,-31616,14589,25414,30420,-10480,-13573,25413,-27895,-19125,-4926,-25138,-16528,16528,-26813,4998,-12526,-7786,-14774,-6610,25965,18627,-19226,25939,26785,-1967,-31962,-13375,-21487,14337,14695,14529,-22159,-7051,-25381,-25702,32537,-4358,19007,-6335,7741,-29399,-25237,29179,-25490,-9153,31904,-7444,-525,-21356,14584,23978,17424,-11989,-5111,14815,1407,-15707,-14669,10192,18804,-2603,-6553,8697,7696,19586,-27443,-25331,-28200,327,-15277,9468,19061,7577,-26223,27067,13648,-25806,24025,-1342,-16547,22996,-23139,-30402,3829,1830,4326,5811,-26632,-2782,-9233,-24463,-20455,-24986,-6896,-24353,14169,-47,17249,-1885,29313,-24295,8542,-28507,-23516,-19280,-32719,-18208,28486,31548,-12371,-15231,23285,17336,-29020,-3276,-13844,31011,-24413,-11627,5665,-8050,-18962,32547,-9913,-28823,-9892,3750,3667,16406,30360,-15530,-9165,17564,5815,-13753,-30420,-32450,772,6711,-10680,-6835,8220,-15124,-19528,-12091,18111,-3884,-21010,-11410,-27451,-19941,-22123,23771,4694,-23092,2998,-1309,-23175,29696,18090,30801,29133,13396,-11458,28098,26172,10594,3978,2489,-12244,30053,23415,9213,-25481,-6321,-6130,-22835,-4137,-3085,-14294,-1390,28437,-29992,-21123,26939,29879,-10494,-7427,-17057,-13723,-23098,17539,9308,-6652,25250,-16000,-12019,18516,-7066,-9702,20202,4812,6219,-32607,-7727,-1806,2985,-5051,6899,415,4000,25808,-1572,-21577,19623,7961,5702,5585,24841,-26940,4276,-24775,-12614,12689,2291,21280,6964,-29569,-21846,-6740,3262,18469,605,-9854,-3141,6225,16339,31931,-24273,15142,31475,-32766,-19156,-3044,10956,-29417,-11144,-1976,15971,19257,7178,-1619,-20373,-21672,22432,22980,30402,-26276,15833,-13490,-8102,-3090,-5523,-14887,4108,25578,5582,-3417,-6161,-9858,20423,-446,6029,-14239,-6372,-3830,-12284,6463,1418,293,-17413,-438,21270,17671,-27957,7456,22181,19271,-17537,-31130,11578,17347,18525,-13933,-9314,-29589,20678,28400,24631,-19768,-31946,20380,20109,32251,-30469,-23643,22571,-22042,-25859,-14167,24213,30004,-16413,-27810,-12352,21008,25792,17097,-2486,7317,1036,23909,15574,-2665,-31452,9168,-28538,24717,-23039,-13628,3658,-16116,-10282,5814,-9171,-15974,23983,29632,-10468,-22797,2819,3477,30608,18301,6184,-29929,11004,10328,-18936,1650,15918,7527,-21669,24455,16815,-17388,-23146,13365,2769,29991,-1776,1736,-23249,-31623,-28032,31420,-26379,3561,15328,21321,11258,24511,27841,15622,27273,-18885,-31666,-42,12991,25499,14980,-9835,-17375,27653,-14277,-11618,8623,-30765,-20783,4442,-30017,-31947,-21238,25642,-28452,10631,20016,-12582,21573,8696,29846,-17130,19477,29157,10640,-15178,20698,9396,-16035,9823,-17949,28395,-19591,-2240,18683,-1258,-13082,-19744,-6950,27589,9780,10635,9329,-29154,-13627,17376,-14961,29349,3076,-15372,-19854,31387,7087,23565,10427,15664,-28739,10646,-26979,-27493,-15602,-19071,-6934,-17299,-7064,-29766,20914,9431,-2232,-14978,23346,-29684,-12834,13043,4418,22006,-15583,-12818,-25054,25134,-22468,14090,-16884,-3819,26022,30518,12196,604,29170,17095,31222,20995,22085,-2524,21470,13801,12031,30958,7246,1840,-26744,9423,27311,-18729,5200,-10513,4032,-21163,-18738,-28475,8652,12445,9795,-15227,28678,16824,-17845,23725,-30853,16576,-10207,28335,10383,5153,3101,-6832,19513,-2851,19237,-4438,-14639,2322,19329,-10784,-15752,-24478,-31193,-116,17622,24826,-10758,-27390,21063,10204,-24134,-16605,27527,-9981,-7005,10894,-2277,-2984,5064,7862,-26432,3120,-29629,-30470,20638,4941,12918,-22935,-21594,8149,30255,-5652,4310,17008,6121,16049,-12042,-30594,-28465,25684,4036,-3822,-1191,-28418,32603,-9740,-19883,-11349,-4454,13462,-28333,16443,7909,-30115,30921,4766,23241,831,16672,17976,-8142,-986,-10570,-25503,15795,19546,4321,24122,-26230,-3495,525,11835,-15823,-4989,-22685,2240,31871,-25439,22199,-17751,18718,1507,-1077,-5474,-10368,8715,-15710,-3031,-26483,-5033,18844,-31679,11727,14190,-24300,28129,-26736,20146,2004,-25672,30099,15886,13498,4099,-13471,-9889,-30808,-15359,-12504,-11990,-8531,26908,-7230,29317,-14816,31972,3128,2564,-7986,-27306,18065,-8108,-383,9417,-5535,-16823,-29080,-24003,8540,9331,5884,24034,7959,-22601,16970,23458,-17443,-5953,-4556,3651,-17529,10643,22160,-5331,-24789,12425,31330,-15169,-20280,-15978,3255,29811,-1698,-27699,-26719,-21989,28453,17410,-13271,4690,19143,1526,-2740,-8775,32279,18494,-16194,-29180,-11099,-20233,12659,-30515,-26797,-669,22907,-13814,9385,-4734,-14392,-19415,-20046,8228,-706,9285,23446,-8878,-10944,3606,19292,-20038,-32671,23707,-11841,2331,-23898,-22774,-31358,-8072,445,24094,3142,2199,-2159,-26816,-16794,-18173,25992,-7823,-7167,15843,-14884,-9923,27434,-31816,-30322,11370,7614,-14232,-16419,25554,324,9562,27985,-1773,10027,-4322,-6410,30555,-1562,-27873,4905,-31321,23012,19553,7102,5755,9940,-29584,29183,-13320,20430,-27402,-31853,-5697,21101,-1054,23113,-22899,-18500,13878,-31052,-11792,551,9572,3798,10843,978,-1996,-31598,-30249,7839,2493,27469,9573,-9297,19330,23669,-8609,31666,24464,-29642,21413,-7558,13512,18,-28559,-25960,-231,-27701,4355,26460,3271,3156,169,-31421,-25209,18568,-26503,-21960,-28845,937,18,-24800,-14741,24510,-16721,-17837,-7996,2812,-27699,-26234,22323,-15634,-32287,-8864,6046,-23808,13606,18537,-10747,22857,1329,13398,17278,8801,-12885,29062,14165,2189,-26409,21290,3392,20778,-6320,17238,-21938,-11054,6958,9683,28667,-21804,12672,29132,2550,26711,-3776,-12703,31620,25608,-24862,110,28458,31225,-22461,29617,-5890,7039,-3830,10921,5925,27391,25340,32200,-13871,-8442,-11724,-19008,-29395,22367,31929,21678,-14314,28206,9896,-9688,10650,-9844,14417,-31298,-9716,7342,-12393,-16219,-24368,-19211,29608,9607,-16282,-5981,26154,6171,-780,4242,14275,-5178,-9588,4554,-11924,-22763,-17613,-20348,-19844,9681,21490,-21580,-27909,8109,-11545,-27843,26031,15429,26317,-27096,6490,27213,31985,-26582,-27347,30084,5869,-1300,22082,28921,-11050,-25776,10758,8813,-6510,29867,-8841,18927,1025,7561,21554,30231,12176,-9557,11895,30464,5369,-19232,8778,20122,-13333,17080,-13029,-532,-6311,21622,19791,-13672,4960,-28066,2071,12371,-28843,-14697,-9237,-28385,-4649,-26451,-20456,18540,-9808,-31273,3316,-9440,-7572,-1105,24734,-18063,2959,-979,1237,5848,19052,-11539,-21468,26490,32223,1200,-18409,32026,-5292,-25002,2311,-6480,18214,-26174,-7182,-16047,-22410,-14434,13760,9258,4124,14954,-4825,-28894,-17415,24270,21790,27335,15640,27547,11048,-27367,-30375,-27118,30134,11151,31773,20105,20251,-14577,-22450,-10076,7343,4403,9693,-18183,20225,19428,15290,9431,31219,21107,-17655,20384,22728,-19841,-23897,-31952,-10344,742,12955,-27331,1489,-2464,-11339,-623,10307,27613,-6999,-19200,-21134,27810,8571,-7517,-29912,30113,21625,-30850,-11982,14291,-11837,-5956,13174,3423,-30509,-22918,30452,-14816,554,-26978,20376,10471,31294,14896,24382,11940,15871,-1666,26875,3995,13418,8526,-16909,13941,16272,-550,25188,-17053,2299,26747,-13323,7253,-4682,27475,12364,-14565,23987,-23120,-10611,12805,-23310,17688,-18023,10518,22414,-13959,-10699,-24511,28205,-11100,20824,-32472,-383,-7627,1821,-28711,-20463,15719,-13263,-12287,-20749,32361,-20619,-21536,14149,22326,-27612,-16825,-26014,30244,-8390,1757,-27003,-28328,-11141,-28440,14834,-17956,20219,-9443,-27177,-5728,-24722,-19630,-1075,20034,-19519,28542,3585,21822,-16706,10117,-2297,-5646,-22439,-27816,-21180,-4583,7226,-23790,-32605,-6056,27320,-18562,-10824,6620,948,-14495,5113,19929,32456,-12030,4584,22823,-6376,-22376,-5804,3822,5625,-2240,-30,-20733,-5824,103,-31606,12584,16821,388,-3751,22939,-16888,-3760,-605,-1642,26475,-29727,-30007,521,-1764,12768,-6475,-7390,11770,29251,-19836,-30181,21853,13116,-28955,-1910,-25618,26157,29008,-19746,-25735,9753,-27055,4737,-89,-13975,-17665,17118,11239,-29650,-15420,13894,32465,-5485,-11582,-30195,-16948,-5705,2160,30589,-19911,-17238,7857,-1402,-32196,-28668,-32160,7952,29046,9779,-17269,-10208,-27452,22916,-30149,31425,7458,-25120,25774,-31186,-9062,-2452,-13067,7785,9670,-18831,-30813,4484,22847,11815,3338,-8351,19545,-15568,-26707,29766,21066,23108,32751,-12043,24511,17283,-31926,26740,-31661,16877,1849,-17140,-10008,-2222,-31540,-15904,-24620,26470,27909,21322,-12269,-11353,-20775,15940,19328,15714,16295,15056,-20852,23997,25183,-26263,27248,20167,29031,-32171,-28746,-25150,-15492,11779,-25179,-29567,-27307,29649,-8172,4067,-27803,7628,-18042,-26283,-20723,31472,13042,-27243,-12432,-22783,-14525,19639,-666,-25774,9195,-379,-31071,29337,15115,13995,11541,20845,-30482,-18395,-15357,-14690,29118,-31466,-4622,10108,-3492,-14365,14582,-29789,19976,-9129,-10127,-25285,-15255,-14260,-29544,15865,-11118,18742,21892,10948,24514,-31461,12792,-594,29475,24335,15812,14976,-9309,-15445,-3081,5430,-2929,-14759,-8437,21638,-30433,7722,27279,-1884,-27978,29461,-31516,-11274,-5173,6620,-9369,-27424,7289,26460,-1837,-22624,-32276,-3820,-26717,25559,-30453,-8849,-15293,4304,-22108,-19777,-8094,-30214,-2829,2182,-13052,-2269,10827,-8084,20965,-21679,15574,-18608,-4782,12597,20078,29019,20047,7802,-10860,5659,-14163,-28911,3311,16280,16768,-15025,4906,22267,-11111,14115,30212,3903,16157,-4973,-29071,-12282,-19651,18664,8456,24480,5829,-478,-2131,32138,16355,3753,3629,14937,21202,18807,-17069,27557,27611,29208,-6459,-1933,-4038,22891,-21604,-28227,29119,-7612,-24973,-12696,-28289,-7262,3867,-10770,22221,11916,-21224,-27308,11845,21660,12594,-8954,-23132,4008,24348,-22240,16675,-521,15087,-12019,4685,17403,-19275,16677,3769,-32169,-11114,29352,23350,-6517,-2446,-7226,5154,5947,32392,13387,-2720,6041,-28459,-30835,-17168,-10225,-3616,21899,19276,-26817,1269,16092,-22155,-18609,-17198,-19175,-10128,11352,-24943,-1981,9960,10397,-11171,-12533,-17847,31811,21877,-18407,31623,18268,-30486,-14211,5902,-19723,-26702,27385,-4250,5796,11837,-11818,874,-6445,3226,23502,-9189,14650,-8342,5214,9063,-5647,-19734,-14353,-26393,976,17240,-356,-6460,-15144,21739,-13826,-31575,-4484,-13348,-9620,-22994,-5487,17583,-31481,-13062,-4658,8703,5442,-6430,2322,-10304,22774,12222,26196,9858,6471,23340,2221,-3097,-5486,30098,-11381,-3845,-718,6524,26195,17090,31079,30488,25948,-21773,16892,3168,-21982,-28864,-24834,-28790,-25998,-5748,-23016,-8799,3482,-25103,31869,-10592,29050,-29544,3988,24745,23380,-10721,-29574,25309,-17237,-23126,-28703,22702,19022,-9706,-29812,20191,-18499,14764,12825,23351,6583,-26381,-26193,17356,-1859,-17881,11378,9451,-15929,-23356,-18183,17529,-21504,29842,-6963,-4683,12221,20563,-29884,-9105,-15059,-2287,-1453,10759,-12322,31546,-8822,29284,1539,29616,12197,5263,-28896,13244,32252,-32509,3323,14532,-18808,12003,10878,30977,8010,31161,15237,-4280,6576,21825,24241,-14427,-24565,22474,-22321,9502,-25994,-22912,-31687,24784,-16657,-8674,18722,17643,-22768,-12770,-9263,8918,14726,12195,-4230,4383,-21972,-179,-19531,11249,-13806,19441,-28937,5366,-12358,-28122,-11669,-14107,28895,-5951,-2163,-25000,-12529,-29160,18868,24418,15830,23834,-20996,-15346,-16910,31902,-15640,10647,-3421,-3659,-5304,6255,26522,9723,-28332,-5504,-25875,-28866,20934,-5019,-5475,-31984,-31893,-18537,25547,22427,5427,-18451,21431,3953,8436,-1802,27622,-19790,-14864,-2629,5315,14589,-5836,1845,8176,-17844,-16556,-17910,-24155,-8638,-3141,-19460,29391,-31995,-4103,-2992,25078,-28705,20400,-6140,14725,25250,14619,32406,-21807,-815,-1038,31180,30123,16147,-54,-10682,-20833,-6217,-12304,19305,-18910,-6546,24169,-27515,18975,3868,-12913,10073,-17485,-24489,7032,1931,-22255,-1698,10634,22503,-3200,19037,32524,-836,505,16939,-13375,-12330,-15908,-11460,574,-19579,-31050,-14885,3188,3204,23100,17363,-20803,-25648,25979,-24835,24298,-32515,18649,-11959,4464,-26056,-21575,-10141,8352,-29737,9252,-19761,92,-18650,-8655,11630,-23286,29539,10640,22552,-2746,-12952,-5848,-1954,-24484,9229,-11028,-32212,8037,-7511,20438,16055,-20465,-30039,5933,17482,-17996,21835,23891,28544,-3781,-23575,-16676,-5898,-16827,-23536,-20853,17461,-3533,-19357,6440,6831,-30614,-26714,4985,27080,1755,6050,-30220,28282,870,-11262,24412,-32174,-21149,20941,13924,-2284,-30450,26165,-6498,23017,-29024,-3627,7535,-21824,-24078,-2579,-841,-26926,-14233,-29836,26685,-28766,-23100,-272,29032,-23162,22108,-5562,6927,-29982,12912,8763,-9099,27166,6677,-20796,19663,1631,-8538,-31718,-243,-17379,-15516,31397,-25843,349,-30129,9057,6814,8221,-15391,8646,-21075,-32035,4868,-20105,-28981,10140,-15978,-9548,-14045,25727,28994,-16605,31901,20238,-12070,-7160,14124,-4757,18198,-30873,4980,-25396,24538,11117,10017,-2767,8825,-19881,-11149,-20406,-12249,239,32153,4814,1126,32252,22664,-4525,-23504,-30374,5960,-28551,23156,-6584,19780,-13828,-23938,-25818,-27464,-10566,-8336,-19804,-14603,-7880,-13452,29393,-18377,6961,-25733,-10159,-4149,15058,-30996,-30675,15566,24751,14228,26683,-2400,10678,3796,10296,-5316,-30342,-22222,-8956,21441,13127,-4450,15020,-9557,303,25703,22497,13313,12679,-20038,-26160,1875,3297,-13722,-24098,30642,30095,9148,-31736,-12960,-17312,-6008,-17575,-15957,17701,13334,22934,-7119,31299,-29777,-24877,-11581,11040,-9205,-20295,4424,20392,-6923,-5222,-9627,23140,11134,2328,12802,-4820,-6630,-8584,-18499,-29905,-29454,3294,24655,8363,-4267,-14883,25453,-4072,-4466,-26325,-12963,-21180,-24940,3617,16664,-29071,-24523,-18621,-3271,-27468,-294,31566,-4429,7343,19685,-15547,-6438,26312,-24236,-25021,17387,-5910,26621,-30050,-2417,7772,18469,21902,-21016,-29384,28090,-29627,1921,4462,24150,21512,23534,4433,-2195,-9497,-28335,16522,5620,-7743,19745,-30660,-13595,4813,-13543,11482,7813,-27487,26600,-6341,-31803,2851,31954,-9625,-15414,-24941,1494,22666,14306,4109,12215,11967,-13513,-17322,28375,-6550,7524,12227,-13369,-10333,-13570,23482,-21445,10777,-17432,-20703,18717,19319,-7669,10823,-8129,2271,12681,-24530,-28004,22772,30020,28299,14555,26060,-28046,-21638,29455,339,29342,9644,22211,20373,-9248,-25814,21604,7941,27231,13575,-24538,5807,-1874,-10295,401,10946,25060,22964,23638,26906,-28150,-19850,-28850,17688,21350,-27786,-25335,5611,-15158,20626,28297,-14175,26541,-27703,-20116,-23000,-12584,-7318,28903,-23175,-26936,-10961,9763,-19126,-13869,21862,23768,-19009,-16449,9417,31785,24964,-27389,25188,-23934,1671,18625,11153,10105,-28755,-4766,-29905,6204,-21998,-23464,30371,9609,-28824,18911,-8734,-5428,14654,1199,22539,15624,4283,3011,24062,-3007,-31047,-14735,-30231,5646,-18805,7041,26284,22277,-6731,8765,-11945,-19721,9081,11703,-7041,-22517,-14614,31731,7483,18961,-14629,3310,-20360,-27934,-7620,-12920,-10855,-25098,15925,5964,-19214,6681,25438,-8374,16661,-23923,-29793,23473,-11985,-1444,560,9961,8736,-27268,5228,-8443,13235,10291,6527,14391,-23661,2288,12088,17089,-28709,8867,-18385,-25942,-31659,5572,-15453,-13014,22737,11419,-1699,-4559,-6282,3765,-9876,18224,-24696,-15247,12815,6285,29508,31256,-2051,31671,13358,-21620,-13542,-2629,390,22878,20942,13690,-6176,-4288,22799,-12811,12950,13779,10553,-26152,5218,-23560,18310,-12170,-15953,10147,-15835,-4772,27946,-16181,31348,-15439,18221,19531,7464,10867,2760,24664,11,-30484,-4627,23334,25301,-16038,32021,-12753,14918,-9215,-27125,-10899,-21736,-29983,-18815,24710,-304,-32432,31990,-12203,17334,31101,-7095,31652,6407,24478,-27261,26953,5808,11882,-9715,-29977,12017,31240,6854,13461,-8366,-2187,-13935,13122,-26019,-869,20236,-15384,-26548,-27857,29245,21517,3088,-30736,-26664,-32426,-30797,28716,25974,-22240,-23878,-32431,-30781,-24703,-14824,9417,4541,26097,-18219,2482,21142,909,-13740,-27021,1097,-25310,25134,-14542,-11449,-16024,-3635,10982,19528,-10563,-31207,29989,-3662,-4144,11724,12463,14319,-19249,19830,-22102,-30413,23944,26417,11845,27440,19833,4268,-24284,-14501,-5649,5333,28743,-8969,-13405,1354,23738,32705,-14850,19589,-5336,23198,6784,-32428,-32119,-29225,-1388,20058,6796,-4398,16779,-31692,-32687,6244,-21725,-10240,-9226,17137,10401,-8812,22941,31073,-24066,-21908,10607,-4192,3202,-31021,-19318,4508,-31622,2174,2904,-28419,11774,26886,8068,13934,-13355,-24644,24876,20402,-25368,-26563,-19492,11526,9093,-25053,-16156,8002,-29288,-941,19535,-851,14580,-10891,-11531,11972,-30943,23298,3109,-26241,19753,-29263,7442,18001,-1977,-16238,-3262,-19121,-1405,3470,24415,-13729,14066,-4361,-5097,4554,-3777,32126,19159,-4269,-22767,-19050,300,5531,18545,8109,12302,24625,-3219,-9699,-4764,-22665,16309,-3676,21486,-3762,8683,4516,-32524,8558,6960,-26208,-29598,-27171,-24332,-12023,-7233,-14208,4210,-2905,-7798,-13493,-20381,-430,-2867,-19389,-29256,-2240,14674,-6035,29106,-23712,-30543,8335,6334,-16062,18633,30528,21104,17061,-2909,14321,-29055,32018,22082,-23207,4323,-4351,-20969,-7488,14983,19556,16057,-4977,-16704,-13563,263,18395,-21731,1464,-19879,-7969,-13770,14340,-12996,-13821,-13845,1080,26459,-8987,28019,-19677,-11241,14429,29835,-3199,3201,-2184,-28141,-20774,-21650,14238,-10075,7286,22219,655,8063,23019,-32162,4605,-27510,29476,24839,-2397,7430,14902,-6987,21950,30546,1407,31364,-25248,-11988,11165,1475,30247,-6204,-11120,-19561,-16398,15069,-2372,9275,9550,806,-2550,27613,1931,-25749,-1577,-4648,25104,-24060,-30266,13515,-20637,8869,29305,-8528,23266,12649,24557,16963,29161,-25067,22130,32047,14906,21914,4237,-17569,3281,-16680,-7732,-7589,16989,291,-28726,12835,7011,-10106,24479,25367,1017,-6870,-12036,-31570,132,-30473,-4344,31620,-5280,-26304,-5183,21722,14728,-25856,4025,17450,11363,6683,-22473,-18680,1861,-25127,-6366,3940,-1445,-4469,3222,-26409,20447,25239,16646,18374,11090,-5921,7635,-31433,-18629,28265,-29285,4830,-26118,19491,-13547,25789,-26715,12088,25542,5044,13346,-14177,-16144,2553,-23663,16801,23882,-22868,30392,-1020,31441,3854,-820,-23493,176,-32423,-13805,14998,-19167,28486,12058,308,-1538,-5023,4174,-30869,1202,-19157,-21241,-21548,-31167,32085,23488,20440,28276,21130,9539,21263,21631,22601,30508,-15762,13987,-27114,-1141,16249,-11252,31668,-9075,22156,-30039,-32255,31498,-5186,-7567,25302,-15973,14703,10324,-30036,-28384,-15496,13095,-876,24386,-1448,17493,16867,19624,17930,-28113,-28913,-3954,3435,-8752,-1220,30643,-7840,-30957,-7907,-3536,21443,23913,13480,-14732,-20795,18831,-11704,27495,-21891,-29700,4599,-2584,-17530,10022,10435,4446,-31944,-21609,-23701,-25680,28434,6996,-17614,-10181,-3170,-9758,-2495,-13338,-22078,-9497,31332,9176,18686,21806,-31380,-30558,-22628,-12391,-29199,32537,-25103,26672,3008,29683,27432,2017,17914,10759,-27896,-13546,5713,-14619,24634,2659,24868,-14789,9177,23907,17531,-6574,-3006,-19125,-24411,-16379,-25784,9351,-25342,-18819,-15150,28629,-22341,-2063,23566,-6795,-16190,-21110,13643,-29424,8090,19317,26834,4512,24202,29890,-28840,-21510,24447,3105,5521,28068,14104,-5432,-22951,10348,-14294,-8446,-10853,21156,-4577,6658,-27811,20339,26503,3728,999,31403,25353,29927,-19428,3559,-22396,13237,27037,-9739,32496,15361,595,29596,14586,-9283,-29341,6058,29907,10905,7588,12179,27584,-24630,8735,32511,13631,27670,-5664,-28245,-2483,16001,-18256,10695,13760,-23240,18017,19435,378,32239,-31786,31706,3445,-28651,12483,-11966,-25211,-32423,-13242,17195,18069,22132,8462,24501,6690,12507,2108,-3211,23346,2767,-16706,-8705,414,10069,6280,349,18993,8492,31850,-18098,-10190,-11156,-23146,-4362,9568,-21343,262,845,7258,-25749,-21286,-9585,29053,4405,-19290,29106,-17731,15764,2746,-32728,-20830,5407,206,-15686,24548,-29814,22530,-506,14609,25521,-23571,29413,32695,-1748,3810,13671,-15188,15750,-9144,13331,-15014,31527,17689,-8057,-3953,-4407,6377,19542,17347,-6718,22664,-31097,25939,-29470,25010,4918,23075,-2185,4054,7879,-2866,5868,-31954,-23264,-15596,26853,-18313,-6068,-29992,-19387,-32228,31741,-2481,-20,7335,11339,-105,-24535,-1170,-31997,-22196,5030,-4666,15525,-29132,13972,3311,-12959,-19002,1784,-15668,-14716,-11481,-8609,23774,-31543,-8215,366,-16492,21819,8247,31721,16505,26203,-6685,7753,32269,16670,-5899,-17163,-19622,-21332,-7390,2705,-30298,23392,17518,28800,-24563,20027,32653,-3400,-21327,819,-18181,18806,-15372,-7876,-13424,11666,-27633,27496,12480,-29354,6718,14744,31814,715,17253,17138,20347,13954,-12103,15988,-5905,-27588,-32024,-1288,4614,-23511,3334,147,-9451,9234,-24648,14886,-22123,30096,-12166,15946,21825,-13983,-28748,11399,-5566,14676,-14427,32606,9024,-21736,-23225,-27340,-19993,18185,-12879,17672,6092,7998,20484,-661,11748,20299,25006,32380,18705,20868,-12260,28495,12272,17247,-19055,31847,22466,-30616,-18455,-11396,5993,-29186,-12579,1103,18580,29988,-28109,32606,-16728,23738,2215,2271,16042,11421,14329,11187,-28430,-32233,14895,2438,-7184,14941,-31398,-15599,-25440,26388,-13545,886,15081,-6213,-1032,10572,-18463,21030,-24198,24842,23764,-15201,15109,26830,30676,1393,11293,17161,24630,21284,-20021,-12469,-6320,4002,21344,-14955,-3319,31846,5833,-12030,14228,-28950,-25525,2027,32751,-4367,8159,969,-23717,7175,20112,-31525,29019,-3195,15301,32732,-12855,17644,-31406,-21953,-15025,-5932,8268,-20965,-26573,-26068,30957,-14525,-21972,-11968,-31908,9201,-18745,-24691,-27598,-30652,-26914,26032,32455,-1781,-3657,-18648,26630,-10202,23253,15944,17733,15844,-26949,-9262,-10035,-22985,-28575,20953,-20906,-2973,20714,-25395,30411,7585,-14344,7810,14294,-19779,-30457,-16776,-32180,-12333,12970,-24378,7987,26677,-13715,-1022,10207,-32659,9836,-1863,-7615,-8797,9806,-31543,12380,24294,-23699,20339,-15864,29502,12180,28533,8597,24939,-17696,-20397,-421,-27415,-26447,-4313,20103,28312,-6853,15216,-19136,-5148,-11297,-28660,-5184,-4244,13405,-7626,23824,-9758,-21479,-21646,669,6876,30597,11008,7406,20240,31068,-19082,30559,-6896,32156,-7018,18740,9324,25359,-13139,-17897,-1381,-2216,-23227,24776,25146,-22662,-8971,22432,14267,-26374,7250,-20502,-10027,-28816,17170,29994,-11284,-13000,-5948,-29801,-10697,21855,-20508,-19653,21981,9080,4108,7163,-31490,-32743,9079,32364,23997,-14335,873,-11189,9285,20728,-24787,-23098,-1963,-30377,-8734,-26621,-7809,17966,6532,7691,16206,-13287,7778,-12660,25310,-13532,-26625,27833,31262,-22463,-3891,30857,-24675,10865,-22161,-23976,16463,-23231,2899,15014,16510,-5976,-29006,3208,9912,-21157,11557,9187,8408,8069,-32216,3808,-3886,-22117,-542,21714,20933,-5948,-26174,-16811,-2443,24457,27723,-18245,-6990,-24153,5280,-29499,23906,27728,-27335,798,7235,19161,31364,-23712,-877,5356,2716,27215,-27048,23242,-20077,13684,11399,9921,-22912,-15536,-6311,-26840,5405,-28078,-3738,19410,20837,-16847,2202,8379,17657,26631,29422,15583,-4323,-17856,-23202,14257,-11070,-31571,22954,-15180,26552,-26797,-14591,7765,-7623,-29256,29905,16681,30909,18042,-30243,-19332,29648,18221,-25182,-6470,14298,25369,-28919,31351,17410,1258,-23778,-28702,-3592,32661,-28744,-14514,17734,-10866,668,-5670,23950,-10779,-238,15915,-1549,17323,-5689,-13391,-3852,4953,-28478,-13340,24502,-13906,23234,16394,-30642,-15250,-15142,-30343,-20511,6645,18143,27299,26036,-7677,-4018,-13017,2951,9294,-11843,15311,-15783,20332,-8859,-5809,-30024,-11901,-17675,25842,-4222,18841,-4359,-1431,16481,22803,-16746,11205,-14762,18139,-27963,27639,-7459,32257,-14529,-24120,-16773,8753,20156,10634,28397,-21988,16299,23102,4325,17309,26389,-4370,-8937,11431,1001,2629,-11089,24755,10913,28816,21757,-822,-15993,13864,-24741,-12785,-2488,15837,-32740,6765,-9876,-29567,8153,10968,-26475,26593,-2970,-11237,-1999,-32530,4836,-4769,17698,-721,-9694,24692,20362,15431,28336,-3164,-21474,6161,-3434,-30868,22591,-2488,25842,28446,-13173,-32331,15461,18427,4688,-18463,21061,-17090,29672,27768,3232,-24695,-6489,21034,3258,19550,-25182,18869,-528,11986,14228,28008,26732,-18957,-25735,24323,6148,3274,-13221,-3476,21887,-20166,-23780,31424,-12850,29359,3294,-32561,24900,859,1258,-18136,17053,5968,-15285,24904,9446,-10465,852,-6583,-27230,-14980,11236,-1192,-11553,-12629,21078,-14663,12307,-25029,-24148,-22871,3022,-20718,13058,31863,29560,-10966,-2360,4717,27700,-20164,13880,30419,-18492,-31928,-11231,-11188,-10423,18059,2991,-11402,15531,5728,12375,18459,-10156,17385,-29612,-30231,28076,-13031,-11549,17365,124,-2725,19531,-15278,3996,30091,2739,747,-20677,3977,1221,-28326,-6595,-9774,-209,11192,-6082,5844,31988,-470,-12179,6359,-32184,5295,3759,-505,-19601,-24521,-7616,-15783,-8809,2732,13599,27310,-27979,20735,-25066,8193,32286,16140,28282,-23895,23593,3160,-31580,14343,-6283,-8887,6901,-8650,19416,-9931,8261,12091,29055,-29110,-20955,-10961,16458,-11685,-3284,-10532,-21043,-66,16474,-30797,-19177,9570,-31160,8129,-3889,-2857,-27141,-7309,2279,18686,-31587,5997,7109,-7415,9910,-6663,-25309,8581,9715,5749,27932,-20020,24467,-22985,25482,16877,-2064,-14116,-22198,-26245,-24847,-16706,-18347,18089,-6340,-32078,28915,12508,13426,-15767,14028,28255,19574,17966,-21963,1364,22900,26257,-13075,-23230,-7475,9679,-22995,2435,28165,2062,-30013,18423,26596,11936,31873,-2089,22966,-30919,-5925,8573,-23502,-24533,14627,955,11830,-22219,18576,-15825,-28694,-26958,1092,2804,-15882,625,9752,32129,-4365,15632,-28412,16992,-22122,-30891,-32533,-25251,-7127,25822,2770,-10910,8630,22485,23864,9574,20453,-9544,-19055,-16306,8079,9190,9236,-20520,-24141,16291,-17136,-22603,-15596,13056,-31133,23902,31587,-26725,-26563,29579,-4071,14176,-22690,3860,26957,-3269,-2699,26342,-5882,25786,-1089,17263,18151,-29034,17304,-12248,32072,-14054,-16890,12447,-24498,-9842,6047,-5447,23399,-1776,21264,1810,-1038,-29155,-10307,-12147,2335,-30447,28844,-82,1404,-3643,-835,-3683,9922,-8586,16470,4835,29431,9119,1419,13936,10537,-24086,-26815,7958,-10689,-27740,10063,-28130,16643,2208,26740,7652,-13998,8495,-17264,31700,4417,16289,-16093,26950,22560,12256,29812,-18831,4156,11247,11752,-17104,-19743,-31057,-1293,-5899,-23149,28659,-1080,-16523,-19222,-31826,-18437,1654,-9755,11205,-20351,-18945,14747,31408,16679,-7186,16885,11869,-31207,17869,30603,30117,-10229,-8933,22830,21231,16044,22490,12147,25458,-11276,18426,27333,-14673,2137,22881,-7576,-22935,32149,24097,-7573,-8498,29024,26878,6479,-480,-16681,-18732,-10345,23109,99,-30722,-16119,31705,28831,-15487,-2763,20673,4267,-25981,27247,-27226,29855,-30952,15909,-24591,-20251,-4767,13903,-8677,-4349,-7891,-16099,-20990,-22356,-19937,571,-31970,-5418,7294,190,-18764,20251,-22980,-6200,24964,25562,-31008,9386,14948,-13404,16598,-14574,-23065,-353,19206,-19146,28000,-4987,-12405,23803,-13541,-11114,16105,31324,12917,18431,1650,-2434,23725,-6775,26392,-31983,13301,-25880,15651,-31415,7913,-18943,-5497,-4063,-8725,-26452,-8229,-15442,28246,26925,25587,-9996,-9371,8908,25001,20267,18292,-4264,-21344,23972,16084,20928,-24642,29419,-991,32751,7894,-28932,-7764,32610,1749,8512,-1080,-3371,-22923,19429,-28542,15330,-24001,29287,29997,-22141,-6270,12024,1551,-22439,-27260,4250,-14523,-12346,-5570,12596,-774,-20337,30084,-7116,-8740,-13788,-12926,19293,-27910,-22382,-26242,-20157,25659,-18154,-32072,16010,10715,-14530,-4591,-6989,30975,374,6470,-27406,4283,25436,20198,21612,9206,25305,2485,11438,1742,-26882,20914,-11746,-10234,18810,14152,8291,13831,-2462,-5592,-15780,32718,-8010,-6487,2688,28971,-25811,11651,29486,-1560,29577,19431,-11882,29998,-22808,14904,3589,25813,20586,-20655,-7899,-6710,955,23908,17701,16176,-13462,-16102,13986,-17442,-2162,27498,10546,-14600,-10786,-15404,-6049,-28533,31402,25680,-15555,-29880,-30903,-4110,-212,277,9250,613,-2518,24779,18018,-10027,-23796,-17894,14822,103,14578,26044,-29647,23810,17369,30410,-12526,1994,11420,8087,29010,13876,18956,5151,28500,29733,-12336,3544,17058,-3583,28548,27452,12846,10331,28511,-20075,29316,30750,23985,22482,-17615,16880,-7755,27124,6102,6837,11033,12773,-24039,32742,20116,28259,6823,13584,-11123,2674,14511,-7843,-20942,10367,-6529,13102,-30059,12558,11219,32576,21336,-24009,22504,-25860,13099,27808,-16666,-28217,2866,15332,-28967,-8388,-21123,8903,-375,-9253,-12170,8509,24195,15402,-2192,-30452,-890,-18826,-25111,-5351,12272,31691,-21331,26789,-5493,7318,977,16506,2015,24999,19565,-25277,-25000,29809,30548,315,-10090,-3579,-24386,-29733,15317,-1136,32685,-26780,14649,-2754,-24131,-7623,31991,-7985,25161,15904,-4612,26187,-25085,11955,31394,20188,28773,14017,-4958,2849,-20012,-21144,-31124,23016,-22622,-12900,17253,30821,-23652,11811,-9541,-14982,-5536,-9556,6347,17991,-3022,661,-1016,15203,-5447,11683,-8597,-23050,-6416,-15562,10722,-6276,28214,-27054,18997,15340,16131,-304,-14981,26038,-4829,21857,-15981,-5340,18015,-682,31284,8591,24804,4055,-1923,15201,4676,18573,-14705,-4097,5797,-2167,-24307,-29305,-6581,28373,-2849,23204,-22993,-32518,-5017,-24647,-5132,-6413,-32603,11608,25405,-3519,24072,-8993,19541,-22091,3921,-23186,-26372,-7891,16107,-8023,31914,25835,19645,12201,6485,-18366,-31136,-17445,23641,2071,-12916,-15889,21489,13300,-19328,29010,8907,-9903,-3971,-10932,-21142,3845,2803,-4346,-12200,11056,-22578,-9461,-2372,-13080,23750,-27127,-2893,18495,4489,22781,710,25088,-8981,-26721,-14248,21889,95,-31177,-9263,-3068,21090,-2822,-31679,22593,-536,2631,29198,30276,-30179,29338,-31298,22502,-9110,20390,23863,22882,3778,7850,1284,20125,-7956,25536,-8324,5806,-13255,-27587,-1280,-31684,-11822,29994,24920,-15152,5098,-26816,1757,-5201,-20052,-21283,10156,-5254,-24306,-15112,-7397,8817,26889,-16557,-13271,19774,17296,11131,13910,-4270,736,16193,-6256,-16312,31101,-7684,-26175,-7571,7755,-21218,-3638,30617,-25608,4017,28507,17179,31285,-31614,-28777,-32320,-13577,-25142,32464,1649,-4038,-14556,18358,-17313,-25752,18762,23432,-8698,-30030,-4013,-31206,32464,-16324,10030,-2959,-1378,9948,24826,17106,31209,8247,-19870,14109,-30047,-25776,4601,9286,12893,-21578,-7245,-32484,16428,12598,-4735,12476,3011,-24594,24531,-9268,-1839,4562,28289,-6045,14384,-15229,7690,-20863,-25831,9194,18438,-28678,-10377,30969,-2736,-22682,24259,25016,-9329,26392,21252,-14671,-24299,12006,-27686,-31994,-15901,28103,5766,26677,-18265,5994,-31480,18828,14169,-18906,-4879,-2704,25147,8755,31863,12090,17050,7053,782,-19637,-2619,-6105,-5295,20636,-12911,-14744,-18893,1601,27347,-15690,28574,-13485,-697,11407,-13661,1537,3517,-22010,-15656,25741,-25855,23582,2846,31122,8038,-30712,-31181,16606,24922,13860,27697,11422,-22333,1776,11000,9809,-29795,-16132,32704,31651,-19160,15767,-14457,-11982,-19062,-8775,3591,19139,-9385,-13548,-18531,-3325,27674,-11390,22469,-14240,-29039,-7130,-1397,5757,-24130,-2737,-4449,20058,6587,-23963,15452,-898,-16997,-21125,-25130,-21575,-5230,-21117,-12098,-3480,25745,6375,10857,30227,6865,31728,-4248,-28266,-17091,4628,-20464,32240,-22849,-9557,-7233,6869,-21618,18411,13198,20357,-15453,19039,-25242,-26942,-18580,-18981,18278,30889,-25141,-20912,25705,-4062,19515,-3238,15539,10577,-32520,26988,-29370,-8025,12391,-31273,24212,-17237,15661,-15873,22459,-433,-19285,-8488,2039,-11429,-2437,-9310,13691,-19038,29701,-25757,225,-26522,-27996,8564,-29947,25312,19332,22857,15030,22079,24094,-17964,-27675,-27520,21611,-25718,29006,5595,26956,-2890,-29872,2209,16214,10735,5480,25643,-31723,-9036,-8961,-2597,-1465,-22605,16560,-3373,11860,-10500,13217,-32088,-30422,25958,-17038,11868,-12142,-12318,-16316,-23656,-7580,30743,24715,-4414,-7032,1220,32650,13740,-3274,-15993,10323,19187,21343,-10791,16961,23798,11821,30059,16772,-23554,-4687,-17269,-11929 diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv3.tflite b/tensorflow/lite/micro/integration_tests/seanet/conv/conv3.tflite new file mode 100644 index 0000000..3f2ae86 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/conv/conv3.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv3_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/conv/conv3_golden_int16.csv new file mode 100644 index 0000000..4858be2 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/conv/conv3_golden_int16.csv @@ -0,0 +1 @@ +-30345,-6638,32767,-11546,32767,11153,4903,-251,-32768,14596,-23569,-32384,-3975,-22413,32767,8580,32767,-16895,32767,32767,-32768,-32768,32767,-12894,-32768,2032,32767,1685,32767,-32768,32767,1106,-6714,13266,19977,32767,32767,-25887,32767,-32768,15766,25708,12976,-1110,-32768,-32768,32767,32767,32767,-7975,-32768,32767,26218,20479,-21583,-22610,32767,-22886,19066,32767,17344,32466,-18131,20539,32767,-20548,32637,-32768,-32768,-24733,-32768,-21670,24796,20974,-32768,-32768,-32768,20783,26660,1413,-3051,-4183,-26711,-32768,-9073,-15177,-32768,8123,32767,-29660,-32768,-27838,-32768,-32768,21674,-31047,32767,18891,21965,-32768,32767,32767,-32768,32767,10170,12365,32767,32767,-32768,-32768,32767,15034,32767,18101,32767,-24752,-32768,10587,-32768,-21295,32767,12002,32767,32767,-32768,11427,19044,-29601,-28898,-3390,18894,32767,-6917,-32768,-13261,-6301,-15956,32767,-32768,32767,-19761,-4564,32767,-32768,32767,16220,29090,32767,-32768,32767,32767,-2822,32767,21775,32767,-2295,27406,-10166,32767,32767,32767,21619,32767,32767,32767,32767,28843,8700,-30063,32767,32767,32767,-16312,10733,32767,32767,-14165,26551,32767,32767,32767,-265,32767,31996,-14626,1942,32767,32767,32767,-19089,32767,-12233,4596,7767,32767,32767,23137,24983,32767,-13570,32767,2348,32767,-32768,-30251,5647,14470,-32768,28243,19736,32767,-22181,32767,-32768,32767,32767,32767,-7090,32767,-17066,-31483,-15451,-32768,9345,-32768,-4634,32767,32767,32767,31322,-32768,-32768,-32768,-22692,32767,4119,12343,31593,21314,8238,32767,-8093,-8999,-32768,32767,32767,-22364,32767,25664,-9906,-32768,32767,32767,9213,11897,-21968,32767,17108,15679,2343,-6698,-25536,32767,30343,-25593,32767,32767,21971,-32768,-32768,32767,11203,-5969,-9854,26830,-32768,18069,13479,19020,32767,32767,32767,32767,-32768,-32294,32767,32767,-22562,-32768,-622,32767,-32768,32767,32767,32767,-32768,-20905,-9212,32767,32767,32767,-22398,-15833,-20578,15707,-3431,-20480,-28064,802,-13546,-32768,7149,-32768,32562,-32768,32767,-32768,2169,32767,32767,-32768,-16812,32614,-32768,32767,-32768,-16013,-20559,32767,-32768,-32768,9457,-32768,32767,-32768,-14905,-32768,5744,-531,32767,-32768,-6276,32767,-25370,-32768,-31698,32767,12088,32767,19726,-32768,-32768,-32768,-6192,32767,-16803,32767,-20171,32767,5472,23357,-7996,-32768,-32768,25919,8565,-32768,1632,-12014,6513,32767,-15109,-32768,2045,32767,-23651,-32768,-32768,-17720,-6970,32767,19282,-32768,27785,29069,-4479,32767,32767,32767,-20103,32767,32767,-29036,-1044,-29815,6337,-376,-32768,22540,32767,-32768,-27366,-32768,-32768,-32768,-32768,-32768,32767,32767,28370,-32768,-32714,-32768,-9573,-32768,-32768,21094,-3321,-32768,-32768,32767,-32768,-22535,-6797,18574,8288,-32768,-32768,1405,-19366,10997,-8025,15467,-27614,-24435,-32768,-30896,-5990,-32768,32767,-32768,-8147,-32768,-32768,-27443,14926,-32768,32767,32767,-540,-15643,-32768,-32768,-12620,-3140,12439,-17157,-32768,32767,-32768,32767,-4714,-32768,-32768,-32768,-10398,32767,32767,-264,32767,32767,-32768,-5301,-24715,32767,-32768,2736,-17109,-32768,9262,32767,-13453,-32768,29607,-18711,-16115,-32768,-32768,-32768,-18741,-32768,4585,32767,-16636,22528,30388,-32768,-1997,-30440,-13458,-13124,-6646,32767,13914,-32768,-30305,-12772,19179,-32768,-32768,-27026,-32768,-32768,9060,-32768,32767,-32768,15726,-3919,15711,14916,-32768,10257,-32768,-32768,27870,-32768,18123,-3070,2968,32767,32767,32767,32767,32767,13954,-8665,15010,-27324,32767,1378,-25675,-32768,-14591,32767,-12661,32767,-32768,-22752,-9101,32767,-32768,16471,-29805,32767,-8854,-32768,16036,-26389,26923,-63,8961,-32768,-32768,32767,-32768,-32768,22324,-32768,707,-32768,11581,14860,-25956,-32768,-7027,-32768,-11855,32767,-32768,32767,10989,-9799,23631,-32768,-6329,32767,-31317,32767,21147,-32768,32767,32767,-6497,32767,32767,-32768,-19026,26387,-18265,-32768,17728,32767,-17554,-3695,32767,32767,-9972,32767,3559,32767,29757,-32768,32767,15514,32767,9618,6945,32767,-9908,32767,11000,-13513,-6204,-18389,16627,32767,32767,-4638,-23307,-26086,2684,8770,22847,32767,29669,4583,32767,32767,17693,-23063,563,-32768,-21,32767,32767,32767,32767,32767,32767,32767,32767,23465,5223,32767,-6614,32767,-17411,32767,6740,32767,-32768,-28674,32767,-32768,27280,-32768,-20891,32767,32767,-11688,28781,32767,-1966,32767,-9799,32767,-32768,12007,19333,-32768,-18441,32767,-11037,-9081,-32768,-8465,16770,-32768,4977,32767,5873,-24513,-32768,31223,29813,32767,32767,26279,-2962,-32768,32767,32767,-3237,32767,8527,-4819,32767,20607,27602,32767,-6114,-32768,-5754,21786,-32768,-25997,-32768,-32768,3039,-4689,-6265,-11653,32767,32767,32767,-32768,7128,-32768,-25430,5022,-6705,3355,-3750,11311,28610,32767,-1970,31543,23355,-32768,31263,-32768,-3467,-32768,-6651,-14308,12681,-19963,-6903,-22862,24544,32767,5182,-646,25058,-32768,6155,-32768,-22638,32767,32767,-32768,32767,-27978,32767,12825,20675,-32768,-22974,6425,-32768,-32768,32767,-32768,-9266,-20949,32767,32767,-10367,-32768,27654,-32768,-32768,29353,6160,-32292,-32768,-26358,-32768,-32768,32767,-4376,11120,-32768,32767,-11634,-24160,-9924,-19207,23827,-1832,32767,29623,32767,32767,-32768,32767,32767,30764,-32768,-32768,-32768,32767,-32768,14944,-32768,9574,-32768,-32768,-32768,-32768,-32768,9700,32767,-4870,32767,12027,-32768,32767,28490,-18234,-32590,3561,32767,32767,32767,5473,32767,32767,-32768,20595,-23498,11263,-568,-11011,-32768,25089,5210,-4545,13390,32767,-32768,-32768,-20232,32767,32767,-17735,-32768,-29148,32767,-25121,-32768,4422,-17730,-23891,-32768,-32768,32767,17194,-340,32767,5676,12150,-25138,19235,-32768,-10865,18438,1577,-25551,-10054,-32768,2123,20052,-32768,-9192,32767,-22758,-6807,-32768,-32768,32767,3546,1793,13978,-30910,662,32767,-32768,-13834,4510,32767,16840,2947,5521,-24639,-32768,32767,7201,-32768,-32768,32767,10159,20689,-32768,32767,-23843,3963,31361,-32768,32767,-32768,-29109,-32768,32767,-23649,32767,32767,-1427,32767,32767,-32768,-32768,-32768,32767,-28107,12580,-32768,11073,16439,27298,32767,32767,19237,-11336,29817,11144,32767,-21536,-32768,32767,-7026,-11106,-32768,-32527,-1208,-32768,-7090,11713,-32768,-2787,-32768,-32768,9039,9241,32767,6818,-32768,9005,-12753,-12953,14205,-12151,32767,-1658,32767,4078,32767,-2760,-14213,-13953,22064,17949,2377,9956,-19504,-32768,27296,-32768,-32768,32767,31206,-15027,-32768,-32768,-32768,18061,32767,-17769,17351,-8898,12742,31573,19259,8539,32767,25530,-32768,5389,32767,-2550,32767,-24921,-22109,-7567,-32768,-10570,-32768,-32768,-32768,21304,-20034,29903,-32768,-5795,-32768,-32768,-32768,-15291,32767,32767,-32768,16276,-32465,8781,-4708,-29380,32767,-10808,32767,6173,32767,14316,-32768,-24210,32767,5293,32767,13543,32767,-32768,-3478,-18115,32767,32767,-32768,10425,-32768,32767,15000,32767,32767,32767,32767,32767,-32768,-22944,32767,-32768,32767,-25915,-32768,-4888,32767,-32768,32767,-32768,-32768,15856,-32768,-32768,-32768,19524,-32768,-15641,-32768,-32768,-9690,-7342,-32768,32767,32767,32767,32767,-32768,-32768,-2394,-32768,32767,-23022,-31482,32767,-11616,-3478,5102,-32768,32767,-32768,-28029,32767,32767,-24010,5277,32767,-7270,13532,-25982,32767,1309,-25604,6855,32767,32767,32767,32767,32767,-12621,32767,-32768,15438,32767,32767,-32768,32767,-27009,32767,-1059,-32768,32767,18948,10077,-11537,-24341,-32768,30405,15237,-32768,-32768,530,-32768,2392,-32768,-6031,-32768,32767,32767,5605,32767,-32768,32767,-8746,-22757,32767,-31283,19109,32346,-32768,32767,-12909,-32768,32767,-32768,32767,32767,-880,32767,28096,-32768,-32768,-32768,-32768,-31056,-6568,-15091,1745,-24846,-12121,32767,28391,-27181,-13639,-27019,-20972,-28114,17549,32767,-32768,3797,-21591,-32768,-1674,-22737,-32768,24239,28974,32767,-32768,32767,-4377,32767,22749,-32768,-32768,32767,-32768,-32768,-30697,32767,-32768,-7621,-32768,-32768,32767,-12345,9404,-32768,32767,-32768,11333,-29022,-32768,-32768,-28496,-32768,-32768,-32768,4430,-32768,4124,-16686,-7053,-32768,23771,-32768,-11656,-25325,-32768,-25266,-20316,50,32748,32767,8866,32767,-2321,12740,10498,-32768,-32768,12924,-26716,-12812,-25876,32767,2798,-32768,-32768,32767,-30278,32767,32767,-14508,15314,32767,32767,-32768,-32768,-4768,15352,-32768,4707,10912,-32768,-25555,-23175,32767,28592,32767,5434,-32768,-15137,32767,8102,-32768,11109,28882,12281,32767,32767,32767,-32768,32767,-32768,-3597,6366,32767,32767,32767,-32768,32767,24382,15346,4249,11077,-32768,-13626,32767,25583,29070,-1027,10115,32767,-32768,-6027,21347,29053,18403,32767,-1490,14855,-32768,32767,-32768,-32768,11502,-32768,14882,-32768,8467,32767,-32768,24823,6215,-10131,12330,32767,-32768,-32768,4493,-32768,-32768,-16900,-12717,22230,-32768,-31482,8212,-8958,7909,32767,-2974,-32768,-32768,6632,-32768,-32768,-32768,-23653,32767,-32768,-8676,14208,-32768,32767,-22444,32767,20100,-23728,-32768,-32768,-32768,-32768,-32768,32767,29381,-32768,-32768,-30353,32767,-32768,32767,-32768,-4994,-32768,31123,12215,24074,-32768,-32768,2425,29650,4329,10229,7024,32767,32767,18036,-22119,-5497,31792,27722,3850,32767,32767,32767,32767,32767,22301,-32768,28918,27885,-22595,-9195,32767,32767,-17602,25538,-32768,32767,21839,-32768,31079,-10300,32767,32767,32767,14097,-9847,21933,31570,13271,-32768,-32768,32767,-12454,32767,-27401,23234,24726,20142,6608,15121,30144,-11340,-7776,-32768,12528,10962,28916,32767,32767,32767,-4395,-25048,9051,30894,32767,32767,-32768,-19910,-6320,32767,12103,27123,32767,32767,14634,32767,32767,-888,-13423,-9334,16407,-32768,32767,31269,29176,-20393,-9654,32767,32767,32767,-32768,-31152,2999,23723,2362,32767,-32768,11128,-32768,32767,-32768,-32768,-424,-32768,-32768,32767,14198,-32768,32767,-15546,-16658,32767,15301,-2438,-9404,-32597,-9308,-32768,-12561,-30089,-32768,22944,-32768,-32768,-17298,-32768,-32768,32767,14836,-22082,-11218,32767,2610,-11880,-4328,19844,-32768,-32768,3224,-32768,-32768,32767,-3454,-32768,-32768,-23579,-21525,24574,-32768,32767,-19226,-32768,17084,8739,22239,-32768,-4175,32767,-30706,-32768,-6120,-32768,-16537,-22383,-30937,27074,-3948,-22400,-32768,32767,-32768,-32768,32767,32767,-2840,-6294,-3991,-19998,32767,-32768,-122,-17904,-6790,-32768,-32768,-32768,1428,-5205,-32768,-6195,17519,32767,32767,-14573,32767,-32768,-30190,-32768,6416,32767,-14999,-32768,27438,19962,13783,-32768,-7615,32767,-32768,32767,32767,4416,-6680,32767,2777,-14327,-32768,16143,32767,32767,-25686,8635,-18387,32767,32767,32767,14422,-32768,32767,-32768,3017,32767,32767,-4440,-32768,32767,32767,-25220,10416,27542,-11564,21530,12952,-27175,-32768,7859,-83,-32768,-32768,32767,-3303,-32768,32767,-32768,-32768,32767,-9582,-24499,-32768,13003,26314,32767,-7614,-10214,-23965,-32768,32767,-32768,-32768,32767,11220,-32768,20006,-32768,25304,16905,-13197,-22868,-18609,-4062,32767,-29143,28238,32767,-32768,-32768,-24675,-32768,32767,32767,-32768,32767,-32768,-13949,-15998,-30279,-14375,-32768,9868,-32768,32767,-32768,-30484,-17292,-32768,-9441,-4818,-24402,1977,-32768,-9624,-32768,-28187,-27299,-32768,-2326,8608,-32768,-32768,-32768,-20120,-24921,-32768,-32768,-32768,-32768,-14583,-287,-32768,5183,-32768,-32768,-12788,32767,-24125,-32768,-18682,-32768,32767,22474,-32768,11128,392,9245,32767,32767,-25537,-32768,32767,-5456,18170,32767,22727,-32768,-13662,-32768,31670,-32768,-29285,-32768,-32768,32767,32767,-32768,-32768,29793,32767,15423,-32768,32767,32767,32767,15312,-2602,12141,32767,32767,-31287,19205,32767,-6898,4353,-9768,-32768,7026,3151,-32768,32767,20863,32767,22259,-3020,-32768,32767,-32768,32767,24211,32767,4360,29660,32767,8586,-14908,14105,-12381,-32768,14225,20927,-32768,32767,-32768,32767,23037,-32768,18724,-32768,-32768,-22441,25501,-32768,32763,32767,-3037,31640,-32768,32767,-32768,-32768,32767,32767,32767,26459,-32768,109,32767,-17765,278,8181,13601,-32768,-32768,-32768,32767,29866,-30598,-32768,-22104,-24878,-32768,18603,-32768,32706,32767,-32768,19652,32767,-32768,-11836,-6156,-32768,-19906,-32768,-5038,32767,-12809,-32768,32767,25600,-8700,-32768,-4822,32767,32767,-32768,-32007,-32768,32767,-32768,-32768,-3258,32767,32767,32767,-23703,-29117,-32768,-13680,32767,-11238,32767,-9204,32767,-32768,-31195,-12097,32767,27651,32767,3149,-5638,29696,-32768,-17333,-32768,-32768,-32768,10326,32767,-30536,-32768,32603,13835,-32768,-12703,32767,-27021,-32768,24339,13792,-26826,-5620,31977,-17268,32767,-16423,-18535,-32768,32767,-12078,32767,-14239,-32768,19083,-32768,-19378,17790,-32768,-2271,32767,32767,-21669,-32768,-3125,-32768,-32768,32767,32767,-32768,5093,3220,-10337,32767,32767,-32768,29001,-32768,-32768,-32768,-23304,-32768,32767,-2083,-32768,32767,16242,-5906,-24347,-32768,32767,-32768,-32768,-32768,-27387,-32768,-32768,-32768,-32768,-32768,-31684,-32236,-30713,18969,-17068,-32768,32767,-32768,32767,-32768,-5451,-22553,-21017,-32768,-32768,27081,-18628,-32768,-32768,-32768,-26871,-9262,-6081,-32768,-8379,-32768,11519,-32768,-25820,7662,20317,1259,-6132,804,-30839,-32768,-9822,-32768,-32768,-32768,-32768,32767,32104,-14685,-30564,32767,-32768,10724,32767,7916,-32768,-32768,-9838,32767,-21014,-4068,-1349,17784,-32768,-32768,-3625,6489,-13985,10086,-15934,-32768,-7504,32767,-48,32767,-32768,-123,-18678,32767,-32768,31796,21766,-32768,32767,-32768,28564,32767,32767,-32768,-7811,32767,-32768,-32482,-32768,10877,4967,12496,18928,-13559,-32407,12234,12083,-30608,7998,32767,13128,3963,32767,32767,32767,32767,-32768,14096,-20203,14642,-2355,-4846,31172,-6047,32767,10433,3371,-32768,10,-32768,-32768,32767,-32768,27658,-3739,32767,32767,-4843,32767,-28973,13581,-32768,-19738,-32768,32767,32767,158,23490,32767,32767,1553,-15240,-32768,-1430,32767,-32768,-1578,28711,-7371,-30139,32767,-26635,-32768,32767,23608,-15633,-11955,32767,8812,6473,-32768,-32768,-11627,1734,-32768,-32768,32767,-32768,-25089,32767,-32768,-32768,23711,-32768,-9645,-32768,8068,32767,17641,32767,-18081,32767,-10951,-32768,20341,-32768,-12581,-10756,-18154,-9206,-5942,32767,-32768,4384,24920,32767,1142,-32768,-32768,-32768,22416,-32768,-8836,871,20114,32767,32767,-32768,-29532,8716,-32768,-32768,9451,22415,-32768,5724,32767,-30762,-32768,32767,23430,-32768,2803,32767,29568,-11593,-29703,32767,1686,32767,32767,32767,32767,-27324,-29313,18007,-9018,32767,1121,32767,-14464,31394,32767,32767,5255,-27427,32767,-32768,32767,29621,-32768,31567,29866,32767,-24285,32767,1737,32767,32767,-4828,-2962,23890,-32768,-32768,-32768,-32768,3405,32767,1061,32767,32767,-19636,-6520,-32768,32767,32767,17874,32767,32767,32767,32767,-32768,-32768,32767,-32768,8758,-32768,3081,6582,32767,27252,-32768,21492,-32768,-32768,-32768,-32768,-32768,-32768,32767,-32768,-6895,-32768,-32768,1601,-15263,-32768,12079,-4213,8456,32767,-32768,32767,-32768,20699,-32768,8933,-32768,-32768,-32768,32767,-32768,3337,25358,719,27546,-6461,-32768,-32768,32767,-32768,21444,32767,25756,32767,-32768,-17081,11591,-20579,32767,12372,29265,-32768,-32768,32767,6096,-32768,32767,32767,-874,-13532,-32768,32767,-14449,12336,-31424,5420,25972,-32768,-32768,32767,-32768,-17817,-22444,18611,-17656,32767,-32768,29217,-13323,-7024,32767,-12581,-32768,-32768,-32768,-32768,-32768,30119,-6355,1992,-32768,-18710,5113,32767,-13129,12937,32767,-27326,-32768,-32768,-32768,32767,32767,-32768,2533,-21565,32767,7560,32767,-32768,-15541,-25935,32767,-8372,-28349,-32768,-32768,-17914,-5511,32767,32767,29984,-32768,88,32767,-17118,-6257,-32768,32767,-7724,32767,-31500,32767,-9259,-23846,32767,-15235,5741,-32768,29130,32767,-32768,5635,32767,32767,-7421,32767,-23759,32767,18797,-32768,-32768,-32768,-2172,23928,32767,-32768,-13288,32767,9755,-32768,-1254,32767,-32768,-32768,-32768,32767,32767,-32768,32767,32767,32767,6753,7261,-32768,-7838,32767,430,-32768,-32768,-867,-4586,-32768,-32768,-19521,32767,-32768,-26500,15478,-32768,11101,32767,-32768,-11729,32767,-32768,-3663,-17470,-26441,-32768,-27198,32767,-32768,-32768,2183,32767,-7799,-32768,32767,24812,16046,-32768,-32768,7528,-32768,-32768,-10275,-32768,-32768,31743,-27332,-32768,-28860,-11421,-32768,9772,-32768,-32768,-13767,-32768,-32768,-32768,-25592,21662,-10536,-32768,-32768,4678,-32768,-15991,19236,-4013,-11419,32767,-32768,3744,-32768,-32768,-32768,-23453,-3668,32767,-10855,-19182,-32768,-29477,32767,-21924,-1418,-32768,-25921,16382,-32768,-32768,32767,32767,-32768,-32768,15412,32767,-32768,11462,12134,-9911,-32768,-5358,32767,19535,-26430,-203,-8852,17085,2940,-9850,15304,-32768,32767,-3000 diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv3_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/conv/conv3_input0_int16.csv new file mode 100644 index 0000000..781240c --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/conv/conv3_input0_int16.csv @@ -0,0 +1 @@ +-17110,14046,13156,-9338,21583,-16770,-6551,12086,-15579,-30492,-21569,-16868,-13196,-8833,24518,-18413,20803,-5712,29968,-30324,-11596,-6287,-13190,-2678,24733,10141,32599,22977,-17535,10273,-8804,25832,32196,-13877,28269,31945,26563,1166,-23489,-15702,-32683,-11932,-11354,10565,-2629,20874,9100,9775,4371,-26934,18792,-17426,5738,-18753,-10978,1295,-17401,-23056,18695,-30195,13225,-16874,-23175,-23669,11968,31637,25499,18830,-27321,15705,-27234,-14256,-31427,-887,-7757,5243,16670,-1958,31069,15760,-30501,26494,18496,8306,23106,-19652,14259,5474,11893,-2347,26570,-23844,-5091,-9399,26450,23280,-29496,-28022,-8489,-22854,184,23465,7768,32759,18658,-14099,1945,-17422,27454,-5152,32145,20790,-832,-17969,-17950,-22063,-28031,-17876,-12329,-16958,9895,12984,2452,1624,-20432,13381,-19643,-6501,-22633,-10181,1902,-15834,-10189,26041,-15763,-8748,17585,5491,29272,6880,7814,1173,-3647,-4195,-28220,15479,-16235,16657,22846,-8633,23566,24324,-26764,-21423,9485,8602,17111,-20272,-22031,-18902,20018,3141,-14493,-4643,12706,23538,1833,-20015,-32033,23179,-18150,-17328,20397,26363,8648,-21203,-23832,-17232,-24978,-30132,-31550,-11545,-16353,-11408,29919,-28123,5985,19173,-13975,-14001,-11396,-19828,-22194,-8235,21616,22145,-8105,-20034,5291,18989,-18841,6825,10875,-24279,10673,-26379,27365,7887,12435,-32721,-31207,25250,-21162,-12094,-29089,5517,-23285,2280,4901,-15713,8116,30368,-26712,17331,-9974,8944,-18830,-32300,988,-19737,12963,636,2705,-18806,8234,-17562,13888,-4239,-25091,17832,27085,-27404,16895,-9774,6514,25728,16465,-5917,18473,-1549,-11004,-17472,-27946,-14794,-23452,-23286,-30709,9091,-11792,-9177,13425,24324,-25371,-21924,1191,7061,30645,618,24406,23121,-17217,4904,-9296,-9711,-8239,-25967,-2741,30868,-1184,-29457,13149,-11016,-24261,-32533,24946,20082,9639,-24973,20398,-21875,-25443,8559,9714,9794,7881,30894,-16076,-19276,-25102,-4208,8544,-27788,-28856,19794,9135,2828,30496,-9997,13857,-12556,-32710,-25555,29175,-1642,21491,11015,6327,4770,-6064,-2897,-9817,-21352,-13678,10671,-22755,20344,11825,14889,2783,-27761,-985,-24567,30619,20321,26420,-10344,-6852,19000,-22241,10167,10204,9744,-757,31432,17357,9474,32585,-7848,26449,8235,-11420,9489,-28534,-6766,15741,-13202,18110,-2723,17342,13861,12952,7980,-32592,8419,12221,6395,19266,-7464,26218,27716,17226,-8896,-13916,-11813,-9954,-3788,15654,25669,4805,-15716,4806,-22993,-18350,6708,23478,29809,31361,17058,-26746,-24859,30479,22671,-19026,-11183,-27043,-13148,-15836,-17954,-17202,23926,-32503,27523,-13608,5243,-25558,4755,-7098,-26543,21144,-19911,-6788,-12120,-14779,9747,-5937,6445,26600,-14045,-10005,-7058,13606,-17472,-24531,-1780,-31743,-19003,-14474,6829,28081,27578,19873,14587,-4206,15374,13383,8206,-1205,21675,16866,-25474,28883,18910,12596,26322,-1573,31597,-11050,17057,-9107,12011,4577,-10706,3309,-7492,-7275,-22013,16435,15400,-32103,32445,-3358,1989,4183,-8990,27947,-29386,13755,-6120,24450,166,11206,11361,28610,-8120,-2144,3262,-6192,-18980,28969,-213,11078,27206,11821,-30768,2367,11217,-25821,7904,-26011,-17733,9180,-19596,-1495,-10612,23653,13035,18382,9775,-1264,-11322,23381,-20375,-16537,29027,15077,-26565,32354,-3030,-11653,8884,24929,15045,-27492,25632,-7675,20022,20545,-1850,18481,13996,13578,-844,17310,-20257,5599,9377,7612,-24102,30240,-29791,-11405,-28838,14079,23823,-2110,26311,5066,27612,-13478,21745,13483,-6715,2966,31169,-13169,-9646,-19559,26155,-22672,-30848,590,-23580,-19702,30193,1597,-22285,24096,-24952,-890,-30576,20424,29335,28157,-28512,-18171,-26247,7790,-20107,31600,-5760,-13954,-17280,18896,23579,13726,18318,32388,1604,-20224,-31847,-22243,30025,-7861,13300,12980,-25627,-25482,-11415,9573,-26517,-18439,-1093,15743,11577,32076,-8219,8078,9256,-14131,-11371,24623,8716,-15423,29382,-674,10955,-3372,-2949,-18052,-3998,6504,-2182,1341,10464,29622,-13528,19343,28567,16301,28580,-323,2129,-18445,-7830,1578,-13149,-21755,-31756,32626,3794,14632,-26804,-17836,3231,-10625,8176,17255,-27552,-23086,-16651,-18688,11408,28091,-7779,22592,-15511,-22719,-15499,-22868,-18535,-6348,26451,-6307,-18108,18267,-8662,-25013,-31329,-17559,7351,-4101,18970,-30557,-6573,13285,-26651,-14324,-32389,-14601,3272,7365,24886,17519,9352,13879,-9112,-17523,-29178,14808,4170,11535,16762,11592,10719,-13062,13713,31270,-8366,19471,-17283,9135,-12958,-6924,12938,-3771,-27400,-18598,24311,-17949,-26674,-21053,-28367,4594,-5050,22334,-3496,-26957,13747,14241,-6561,-14149,20596,-2946,-18843,17885,-4970,-14957,13224,24728,-18891,-31743,-18175,-8562,-27555,-15090,-31823,-11664,-21825,27908,-4624,-19457,10316,-31439,9245,-7658,16275,-11678,-12954,4501,-19712,-3346,561,-23497,2403,23889,-22022,29334,-8151,-10382,4638,11200,19818,8135,2543,29643,29225,32722,10498,-5786,802,24825,-21732,-32162,-28175,-12293,-3548,2694,-6504,-25423,-1604,-29269,18023,26580,29840,17802,-22131,4129,-28222,3889,705,-8890,-1112,29793,-17396,1698,27261,5777,-15146,-13174,23082,4056,23203,-13519,-17954,-31186,4171,-2472,-6255,26370,-28489,-7536,-32481,3535,22333,-1996,31417,17640,32507,-29647,-10031,-8296,-20193,22574,16575,4642,28459,-24382,31754,2222,1922,1202,11438,13091,7558,-11118,22651,-22411,9924,5689,29146,-31654,-9595,-18782,25967,-1933,123,12068,23757,7903,11615,31175,-23502,-1371,-1952,17314,30628,24727,19545,-31637,32357,5477,29783,-28202,-15860,79,-10269,-17511,-17908,-25558,756,-1247,-31514,-17891,30261,-20422,-14147,-25468,-4421,-18991,264,-10119,-12792,29500,11418,7641,-1277,-29592,-10419,-10827,30613,6073,20756,27565,-18064,-4154,1223,-727,27700,13655,-21389,-10754,-20470,22281,24668,-12556,14432,-11205,-27113,1150,-5619,6212,16047,-5858,24348,-7986,-6887,12296,-8094,-9849,12630,24563,2587,12439,26491,18848,-24162,9580,21317,-7194,-12234,6038,26699,-20758,-16319,-10281,19498,28943,22243,20334,8813,28454,666,-25830,-9273,-22401,3264,17279,24476,-15053,-28992,-30786,26885,-21400,19605,19667,-6604,-25990,26316,3265,-28826,20733,6601,-16701,-17339,898,19775,10881,-9120,30309,6877,-20001,31391,-20994,-17193,6285,-15260,6267,-8976,31098,-23630,-308,-19655,26288,-28924,-18436,32140,22425,-4938,24244,30547,-19215,23175,-17256,-1762,9747,6778,-4137,3354,17079,4891,1254,24804,-12800,29734,-18065,-28446,-7109,-17275,-15535,-26901,21332,-9243,319,3226,-23528,-21197,-27635,-22214,30426,-21244,-24780,8614,30856,28420,7555,21291,28982,5438,9558,-32292,19060,-23234,17006,2083,27469,-26713,-24729,-8451,22499,-15830,25971,-23029,16384,4015,29854,28347,21674,-2004,7308,-30657,21412,-30310,-26861,16327,-12199,-22792,18588,-10142,-29294,28503,-16371,22222,-32649,-11556,12062,-7704,15930,24164,28356,18556,886,-23882,-27170,5716,-18132,-28676,-28384,21030,-21540,15754,-14546,31015,32473,1281,12932,-11287,-11887,26237,9403,-4945,-7681,20999,15141,-27810,436,3831,27332,-3237,-18008,14842,-19525,-1732,3655,-4537,-21727,18718,-5909,-13509,-27078,-22262,5946,29587,581,24383,17657,-10826,-3981,21681,9592,15938,19345,9414,-21978,2967,19694,9425,-5746,-19009,7993,-23767,15472,-16014,-9532,-20626,-20465,25965,5313,24487,-24168,-1071,-24862,28963,-23703,-17916,23708,-28328,-5187,-20163,19538,27883,26293,12562,-3067,-20926,-6564,-31346,16853,2745,-3245,-23277,23343,19835,-15915,-4897,18011,-8279,-20351,24021,1400,-16101,-18165,-25163,13524,31086,-32693,24498,-15017,9262,25151,32113,27538,21286,-10974,31013,-1692,11254,-16378,7916,1225,-20713,16843,-27080,-7664,-23163,21973,-4599,29537,-9463,-15596,12773,1757,20143,-21547,4500,2347,26386,31764,-19956,-19836,31075,10976,-21094,25211,10853,-1884,-27409,-548,5398,10635,8980,7341,20595,-21839,-18062,-31233,-30234,-8624,1982,24208,-6564,24360,31940,14679,-13309,-23897,-26088,-5423,6755,-28802,-30924,10779,-13589,-14899,-4842,12945,-25412,-22897,-988,21118,30673,4280,21604,28586,13708,32332,19057,6817,-24957,-32578,17409,7472,30903,9374,-24177,17769,-8729,1681,567,-6702,19795,3031,30373,-24118,24162,3941,-17878,-27662,-26339,19960,-23950,-14273,27181,19699,30366,-10758,19358,9079,13211,-27667,31182,4186,-19515,-19372,-16246,-31572,-23179,23012,-24245,8703,-7514,31446,-9804,-8603,-17221,-7614,-26816,3144,-5420,7193,12007,12309,26316,-9345,-25663,-26522,21330,14726,27187,998,-15273,-16613,30824,16680,28712,-22293,31691,-1780,-2203,14321,29870,-1121,-29784,-25230,7575,22192,-17029,-3107,7219,-26632,4632,-12275,3778,24114,19067,-30651,-30362,-22993,-3674,23574,31000,-6064,31757,-15507,21355,21082,-23476,-31922,4068,-30572,4749,9165,-21121,-1296,-19752,459,32030,-1993,23552,-28675,-9579,-3226,7415,15099,-11371,-24694,-1900,25098,-18403,14898,15824,990,2026,-25584,19470,-4108,-1038,-17681,-10085,-9949,13668,-10286,8834,892,-7718,10506,21482,-13977,-25776,-2585,-8758,-8266,9,28724,-5508,26135,22013,24853,-17189,-5845,10518,-17664,6899,17086,18524,-23586,15255,-7826,-4462,-19276,15822,-15419,29931,1490,17053,-6533,-4912,23757,18209,-1760,15861,13603,27315,1344,-13103,-8206,31544,-8538,-17237,27201,-19237,-17109,-3887,-5757,12349,26032,-25610,215,-19016,-21331,32502,-23934,32457,-27753,-5427,-15109,3282,21051,24659,-22159,28893,-20151,26483,30876,-19085,29526,-7,-11658,22778,-2237,-2927,-25073,-18836,9368,23404,-16068,-30298,12441,8057,-8585,22131,12200,14891,6267,-24985,5784,30817,-5928,-6233,-28522,-28535,-3197,17288,10664,-24294,-32006,8999,-10741,11076,-19525,-29923,3080,-30720,3360,-12677,-24359,15814,13748,-23657,23721,5544,-11342,-25165,1455,28022,26266,12385,29158,-2339,-16303,22293,-8783,-10546,25894,28328,23267,-32701,-23418,5855,-24737,-4740,23625,8297,7363,17420,-31421,1537,15275,-16627,-4611,27916,-20520,-9083,-13099,-17822,-24562,4095,-23145,17944,27959,-14788,-15686,-23485,-26189,-20314,-26287,18617,-15526,-3729,-24666,-2248,-1482,-6508,-14167,20569,5037,-16652,11315,3234,21299,-14405,-24229,-31405,24542,15332,26020,-12094,-30789,6627,-15030,-19790,4660,25844,-7888,27001,3608,30093,-24069,23548,30744,3765,-30554,19640,3187,1086,-9348,-15973,7394,23997,-1672,-6753,29395,20920,19565,13610,-282,7657,-18403,8007,-26329,-29252,13265,-1947,18312,-24401,-24279,-12786,7272,13994,12227,-515,8047,-3478,986,-30598,26313,-402,-29888,26669,-19444,-19922,18725,6503,-31695,23078,2344,-16632,18031,-14455,11578,17684,12943,-17508,21601,-10012,-7329,-29929,23995,-653,-3617,-29355,15886,-28578,12057,-24749,25795,-15630,-27179,-21920,3025,8275,-7107,9654,-4516,-13952,6431,-25234,14802,-3988,-10170,-9021,26929,24491,21425,-10854,29489,-25120,30489,15194,30551,3260,3667,9554,-24218,-28865,-18395,-13205,28380,5940,-20415,16803,30772,30808,7417,-19221,-25631,-24354,-28220,-25480,-24191,5336,-18358,23573,-15936,-30764,7453,-28805,3298,-11653,15328,13895,-3638,29701,-23954,-24525,4353,20793,-16260,1825,23164,18543,-18476,10440,5920,-17977,-9138,-24141,29564,-15609,12295,30722,-30242,24499,16590,25831,-7437,11005,-25329,4327,-12499,11888,30102,14764,-30299,31448,-21806,13168,24856,8093,-11552,-24239,32015,-11908,-2320,-22766,8880,1773,18519,-29202,3142,-25044,13988,20116,5702,-24246,15793,29681,-21598,16946,-1819,7797,-25920,-28385,29931,-2942,20094,-19987,241,-31743,7426,4670,-12004,3309,4825,2459,-13663,13287,29641,30228,6022,23550,-21469,22793,18300,-7594,31860,9705,8924,25936,15640,-11809,20085,8926,-7825,3035,13195,-25754,-28775,17110,-16114,-31757,-28782,-3920,-20652,-17269,5974,8277,-19566,-15955,-4002,26822,-10738,7285,27095,-10917,-11183,21003,7187,-28024,28435,4446,-4666,12272,-3113,25674,1452,-12600,11784,27635,25638,-21264,30331,18256,-27144,-6540,29789,7490,-14537,13784,4116,32725,11654,-5955,26312,11608,-25222,-14770,14435,-391,9042,-12792,-18094,-27105,-14697,-8362,10482,11988,9021,28988,25762,-22453,-9864,-18381,19661,24317,-16807,-12784,-31038,-10609,-16406,-2873,-10526,18328,21249,26534,-31228,-29562,21575,-32199,1325,-11871,28713,-19532,13675,-19570,-6086,17007,4525,-26135,5376,13226,1928,-299,24578,-18575,-19928,32208,-14967,-17839,-29423,-669,9841,16858,-21416,11014,-6989,6452,11266,-22287,-18574,31237,-15519,12730,29874,14834,2900,14510,-18638,-30224,-8253,-4671,11406,-30596,7101,24676,-3300,-7093,-12353,32755,-28349,-8589,-5358,-5995,22170,-2951,-23771,-15851,-28702,31737,27033,7458,-7624,-8068,-1854,-27376,12890,6505,4085,-15337,-15790,-16081,-3899,-11226,30181,-15103,4697,-3981,-2684,-22608,-26501,24360,11497,-12528,-23717,-1841,30262,14331,-15328,16178,-17365,-17069,-28683,-30736,-1356,-14005,-4886,11058,-21220,398,-7642,-24353,-13221,2346,-32451,-4229,-18401,27698,19252,25378,15651,-28521,-29945,-2812,-32131,14895,-25554,19705,13192,-4518,-5575,8006,21559,-3239,26130,32234,-28737,1518,22884,-24521,21886,-8485,26779,-13596,26616,4823,18034,-24721,-7624,-31207,10381,-24188,-4716,6824,-20939,-6417,-24916,2198,21815,-12517,-9254,-7322,-28087,3434,25524,-13489,17001,-14688,25752,-4010,32636,-14286,28666,-19326,1373,32368,7620,-9827,-1640,28752,-7961,16586,-9678,-8728,5676,4414,23201,-18039,-18631,-27947,23508,29947,-20131,12717,20684,-446,-13972,-5153,-4018,23868,11552,-17587,5859,24505,-9608,-13195,-13143,-13842,15431,-4856,-15150,-6639,15192,13224,4123,17306,-31036,-32349,6582,19757,12014,5261,2160,-27189,-18628,26110,5064,-2171,-13184,12758,-5695,29814,-1427,24011,22542,5913,-8717,-21341,-2645,17005,1842,12255,19492,-23624,14324,20062,6786,-22205,-7362,-22921,-16004,-23612,12212,-25608,-31494,-15748,32736,13849,20819,-22217,22362,-25008,-3134,-30304,-32239,8360,31757,-32244,20781,-15295,-20808,-17017,-21642,21353,-19381,937,-21864,2691,18832,-20918,-10765,5460,-10355,-13099,16397,-26160,-23054,7490,11454,-27495,23882,-6495,-358,14391,14309,-17065,-10279,9259,32225,-26386,-21304,-18054,31930,24743,15259,-17095,-3070,18191,28351,-15840,7238,-22276,15368,4163,25599,-20179,-18371,-11229,-24177,-31830,18274,32297,32238,-28422,19705,-30080,21124,24929,-7180,19512,-16114,14771,-29452,-17416,9799,23578,-8249,-26434,5028,-29420,24666,22509,20621,31872,12458,-19487,16928,-11628,-4837,30022,-18751,15828,-12166,-27337,27629,-6934,-31738,-21661,-28862,11092,-15082,8069,-1394,567,-12628,-7598,-20865,14527,4510,-26781,-4488,24557,-25550,26209,16318,19028,11151,3780,-30995,28335,-24617,-22016,-30054,2391,-5700,-8516,24708,-3589,5957,-19886,-8266,4448,5311,-12662,18498,-31646,-11349,-7773,-18568,-12361,14426,17643,-11755,-11304,7169,18817,10993,2522,-14811,20038,-19112,-22065,20036,4487,-4903,28267,22049,-21222,23924,-3866,11398,-18228,-18427,7647,-29866,32496,-28805,-14137,13687,16090,-16881,-24040,-12152,571,7455,1912,-987,26999,9971,20308,10057,-19538,6898,-20316,7128,16368,15879,-12396,-25942,4781,-10594,-27111,-25289,-10623,5494,-30550,28184,19281,-24375,29816,26063,-9368,-23567,-26126,11420,-9113,8898,436,19771,-729,16878,6289,-16925,10554,-16683,-12876,-6054,23625,-3416,31847,-10416,21504,8570,19344,-19243,4841,-23163,-4354,-14734,-6419,-25418,-382,-28122,18901,2969,8554,24364,7274,5152,-14368,31559,-12836,17718,1865,22283,-22350,-14014,1431,28764,32146,-31132,-30085,31887,-27262,-25814,-24810,-22725,967,-8183,-24591,-11510,-6013,1361,32108,13190,5517,30754,-5318,-3312,-12644,30310,2939,-30561,19423,11613,-30984,22402,21830,2846,28935,22529,-13553,-19621,-2944,24923,25143,29826,-23971,-29399,11442,-22091,31779,18963,-6934,1289,-2425,-4587,20303,30581,-6431,11425,3582,21615,-20499,22850,-23285,-2371,-29620,1962,-5687,-518,-11066,12817,-32406,17724,-31036,-252,-17003,17111,-22356,-30227,15638,-2585,7130,6678,10988,23402,-30030,-26777,-10147,22689,8297,869,16701,-14585,2660,24521,7843,16293,8849,-20764,-16743,12993,31130,25707,-24920,6172,-18485,24730,-29680,-25184,14775,-27644,17295,-26749,9999,-4598,-2985,-19580,-19222,10104,-27018,21137,13559,-5504,9409,17940,-27808,24262,-27135,7366,-23478,-14931,24960,9478,3956,31969,28072,-4132,-5119,-31973,-2845,-26823,30217,-31962,2392,32483,-19270,12404,30958,-14344,26405,23356,-279,28246,-4719,-17660,28734,3013,-28459,-5456,28518,-9896,5012,22673,-32143,-784,23683,-22263,6273,-25432,-9055,17036,31969,-17177,20853,-31462,28448,30030,-29604,-15831,3102,9632,11030,27896,25735,-191,19131,11079,22845,12501,-27302,29051,-5092,24791,31175,1321,16441,27450,14989,-19508,-4265,-31558,14494,28669,-7839,28827,-2119,9661,-16321,21559,-20168,21648,-16893,-16493,23263,-25404,-24296,-26940,23421,25770,28123,-22042,1498,19304,2447,12651,-11152,-30218,25439,23960,10537,7953,3414,-30343,19987,-15138,-18930,-8369,-24721,-19398,17784,17953,6842,-30009,-6772,1909,-6623,-19764,-25444,-18650,27378,-1261,13328,-8263,14602,-26672,734,-25877,-7879,-14076,-30976,-30797,-1836,26730,-11222,-4232,-10100,-21905,-9210,-4429,30799,-1306,32251,26556,-10447,8015,-1376,20026,23505,-14349,-24120,9900,-4051,-15997,3088,-9132,-30220,-20646,-3440,-19507,-7590,-23769,10144,-12236,-13986,-9634,11895,23847,18668,28901,5198,-32731,14279,-26090,-841,28707,11711,-20881,10207,-19807,11203,-18958,-15858,3922,18880,20053,-19873,4014,-5064,29620,31194,-19279,-8604,-14607,-9296,-21066,-26151,-27981,-1462,-21661,9973,-17494,-18490,23782,31602,15403,-8100,-25996,-23881,13654,16693,-18540,14235,22820,-7515,-8,-10431,-8404,-1633,-22834,-17195,-23140,2,5243,-3541,-26179,13622,-30354,11994,-4033,-28135,30895,21259,17768,6044,-28008,29708,28729,21726,22506,29807,-10728,14112,-17615,19889,25897,-29490,-12390,18267,2679,1589,2308,-29385,-26533,26575,-15355,31235,13041,10949,-28746,-25056,-3913,-12125,-15185,-28843,-13455,-21619,-3931,-22112,15842,23424,7635,1485,705,-12361,32188,6901,24831,2905,19282,-24607,-25205,-23884,-18131,17639,-5333,-3094,13079,-15813,-17334,-26748,-31546,31532,29391,-16845,-30352,-24666,-4909,20874,22225,26650,-21899,14131,-970,29871,-58,-7720,716,3473,-17934,-9619,-12671,-19959,-29315,-5108,-7429,-145,-10655,-11075,-25438,-15731,23837,9157,-1002,-20556,5907,-26477,11606,-18732,126,23224,24249,5910,-13191,-23247,2094,17695,-314,-20257,23835,-9428,-993,-25163,-8700,29188,-3730,-29657,18886,12858,12542,8488,14164,19964,-16888,-8607,16507,-17100,-4377,-20513,31945,16354,9543,26822,24217,9694,25675,-29717,-3674,-29893,-22821,27501,3463,-27399,13190,-16650,13201,-13025,-16536,21236,26692,-10338,16909,14097,14052,-11519,-13670,-19665,-252,-29691,83,17262,18928,-12451,16952,-31374,17781,2901,-14485,4126,14659,15829,14704,16056,18218,22975,16177,-10132,13077,13680,5189,27810,-137,20877,24027,-16097,21185,-8353,-12336,-8140,-16110,412,15148,-31508,-32703,813,22699,9284,-13567,-10141,22140,-17431,4885,-19582,-27873,-769,25459,-2791,28722,24662,-7445,-3400,24506,31381,-13790,26596,-31282,-19352,-30187,10534,5567,-16852,5302,26491,12319,13664,-1403,-53,-18852,-13580,5874,29538,19535,-6136,7362,31229,-12412,-28685,-15532,5072,-8207,-19426,-27701,8843,6734,21905,-29685,3954,-30346,-6449,-14975,30478,-15984,-6469,9579,-21771,-27889,28292,31515,-28935,32385,-13814,13583,-4721,-22035,-17427,-20029,-18845,29783,-21104,-302,-22486,-11110,-13163,19876,30547,-12523,20274,11916,636,1838,28343,-13070,-14622,-19730,-4266,-17868,-7649,-11447,-2832,-14348,7473,33,24437,6024,-3079,-16746,25565,-11414,-14593,25601,31488,30903,19757,-27085,23325,5452,22920,-8743,-9104,-24910,27606,19476,23997,13572,28987,27610,31951,-2286,15421,-21520,-30480,-6929,16280,-27238,16339,-2336,-29591,19693,10597,10001,-16079,29080,-18794,10460,25509,31494,15534,23325,18871,10288,-29761,15279,-10742,24453,5526,-2273,-14314,11134,5996,26630,21864,-30355,-29639,12529,8305,3961,-6852,-13973,7804,3683,1272,-28678,12931,-21267,7913,16422,29308,4368,29710,-25186,17476,29917,29122,2840,-29785,5618,1297,7832,-21868,20143,18172,13137,-9952,-22044,6170,-7611,25016,-27199,5251,-26052,6702,-2963,6057,-26176,22602,-3181,19377,-7550,9070,17648,3554,5788,21586,-12600,-2067,-25786,-5879,4085,-25854,7337,-24265,19771,-22313,22109,-18334,9631,32589,-32457,28580,-25999,23286,17567,-31710,-29609,-27265,-22492,-13251,8998,-8852,-25682,22769,24909,-22902,-30605,-13285,-13500,11160,10274,27293,-3872,2154,25750,-14999,12643,-15900,-16152,-26039,6413,-14405,-15138,1686,-19364,18551,-10527,10879,-8150,-22373,-27446,22398,-14941,21999,26354,29397,-21579,18674,-25538,32684,12437,14748,-31713,-7073,-1420,7246,-14755,-1176,-21410,8986,-25503,-21380,328,30182,23588,-19675,-756,26386,7829,-1577,7423,9760,-20419,9300,-9242,-20768,31835,-20328,20813,10163,1997,-26972,-2534,25258,-13461,25867,-28473,-18582,-7476,32387,-16386,13604,17192,-1014,-10687,6465,-10441,31822,7644,12722,-22830,7813,-26409,3049,4174,28497,1765,15934,26408,18390,-19483,-25177,18258,16402,20093,-14303,-31655,2203,32175,-29107,76,11664,8029,-7959,9585,13771,23004,11156,6466,14270,-16571,21469,-18447,7893,14244,15260,25750,15616,-25722,-978,21529,-31197,27138,-1747,-29158,4812,3485,-27091,9327,24058,-26215,-18718,18600,-17953,-31166,-6482,-11794,18035,-2482,26177,-30501,23655,30386,-21334,-5694,21083,-3200,22755,27688,11451,1416,-9308,24923,-17083,3621,20405,27098,-14009,16341,-7111,-2991,27113,16957,16215,-13560,32523,26352,-26480,4372,31947,-20570,-20978,3230,8605,3694,-2819,-10798,-2905,32571,27083,18451,-2617,-16016,8403,32559,4027,15805,-16024,-5623,28628,-5756,-588,31349,9915,-1565,-4851,-21852,24931,-31801,13652,30489,31361,24120,8560,16190,-7871,-25688,-24977,30673,-12155,8342,-27646,30710,32326,-18473,21542,-19487,6372,-4704,23525,-17212,-29201,-7010,-7443,4787,-24826,32241,24050,27291,-5794,26861,-10838,-5181,29100,-32304,-24201,-18203,-14218,11247,-23300,-25619,11398,-10946,-10911,-3487,8576,-9571,12876,32148,-952,-6232,3698,906,-31902,4859,-24593,-23883,2657,-14425,-20791,-2973,-13034,6773,15843,16973,9545,32716,27861,10913,23469,-5999,-5223,-10310,12770,31030,-11567,-26034,24549,14866,14363,28713,-28722,16992,-12727,2398,2661,-18432,-3168,-25633,12004,-29240,-9825,1689,17316,28184,-22489,-1927,9307,31219,-16216,-20212,-22377,13659,14376,-29541,5409,17826,7521,-20939,21962,-24643,-7128,24265,11255,29494,5932,20504,-28465,-23979,-26975,27254,17245,-11167,-20145,-32315,-30083,-20838,-15651,-2108,6775,-19981,29819,27991,2527,-11249,-4455,-20623,-937,25922,32212,22790,-15428,-772,25774,29802,19161,-20730,-17081,-9249,27977,-6684,-4954,-21368,-8364,8021,20121,30923,-26823,11739,1960,-27059,17406,7043,6115,5972,-17910,27862,434,6977,-7739,-448,22096,13068,-24772,4516,1493,22522,5663,-30985,31399,12829,-31963,3755,28874,23497,-2370,16595,26648,20687,5327,25776,19377,-25352,14423,9357,-1276,-3652,18352,-3623,25904,-9078,-24918,-10731,-13760,-29021,20919,-15039,2933,-6077,5176,6654,17293,28477,19201,-28749,-18955,-30448,-4987,17751,17357,-19311,5329,-10397,13037,-11812,7476,-2928,-21582,30486,-11069,-27175,21096,-32541,11616,-22268,-32483,30532,4274,9975,7598,9555,-23863,32372,-15137,21325,-9554,-16517,4117,865,-12710,26600,24510,-7047,-30174,-23565,12949,8717,-18294,-5927,4000,-3012,5473,10537,14006,-16340,28328,-4229,18380,-32255,29811,-31855,2475,14185,-17060,2903,24053,-3274,31188,-31326,-6428,13082,16031,-15872,15762,-28273,32372,-2429,-15690,13380,29451,-15579,16766,4672,-16690,-25849,-30230,-426,5553,-7851,24217,-14161,-18737,-7819,-26874,16291,-31749,32449,-30913,14511,-16545,13315,-4106,-30526,-7794,19081,-4373,-32604,1735,-16342,-9574,-12943,26151,24570,21351,-10608,-15239,15744,25427,5879,-23571,-28379,21287,-24327,-11174,-28649,-20449,-17589,-23972,3175,-26458,-31667,28872,8112,-1911,24617,-15532,2720,-29187,-24291,29989,6759,6922,30302,9993,-16588,7596,-13649,-19653,-2485,23481,-12698,23474,22605,23076,26289,17715,19298,29376,-22954,2489,12624,25703,24527,-9622,4179,-10606,12497,14281,-28962,-4492,-29333,32329,-30127,-4461,-28675,25446,8675,29805,-11883,29586,-29755,7566,19888,-4482,-20594,25515,-11686,-20047,25085,16327,2830,-18986,-6116,-7153,-29808,15348,6226,-7258,22815,20622,-14059,-11995,6587,7613,5312,-21490,-27009,-12019,10258,25697,3739,9072,22614,-20592,-25100,-14112,29426,-6753,27084,-29141,-2578,-28769,-14558,29468,24225,1782,-26763,6128,-27047,-26815,-26845,-24882,-962,31075,-23904,18766,-11601,8161,-18100,31655,16444,-8676,-14189,-30870,-12579,-19846,1279,-14804,28526,-7693,-11423,20047,-25286,19809,-15994,-4511,-4541,-13404,-6707,8915,22673,24864,22316,-8000,-8788,-18526,5825,-5934,19072,-26807,6099,-13838,15650,-7577,161,13090,-16607,21035,13859,17749,-1853,3826,-28500,-24676,-30598,32518,-22528,10575,24355,18584,15659,-15714,10231,1329,-26252,31948,-19785,26591,1329,-27316,926,28391,7312,3569,-21810,-27114,-27300,18620,-1538,-13626,-17452,26500,-3598,-24063,-7834,-22392,-421,-2850,13891,28935,-25436,17191,14605,-2454,-21267,-19287,-12418,16107,-32093,-2815,17575,1668,-22153,-31869,-31179,971,10053,3821,17563,24517,-216,-7280,31459,8376,-867,14700,-4285,-12421,-9216,-7223,-7918,24933,-28860,-30648,24549,-23857,-4236,12644,-28814,-5476,-10450,-16542,23490,4323,32653,-10732,10992,-22673,12907,-26758,-23878,19946,32166,-28499,3715,-24345,-3580,32,-18411,-27936,-11866,5323,-15978,26783,-9090,8718,-6154,7222,-19818,29149,7881,-20380,-7763,-20155,11067,6942,-25520,-30331,-22459,19113,-21772,-24033,19513,17983,7796,31241,-3763,26082,-11196,4779,2707,-16636,-32738,18690,24711,-22948,-31257,-1014,-31670,-2151,-32682,-25282,27124,9720,22623,-28928,-4007,-1150,-17555,1910,-8265,-13388,20449,-15962,26175,1420,32394,21539,-1976,-14031,-12737,13787,20883,-7614,-32398,-29843,15968,20164,-17162,-2414,-21930,-17562,25718,8388,-16534,12891,-875,21392,3296,-16950,-22153,-25600,-4086,-20674,31598,29243,9931,8457,26095,5745,4227,1423,23426,9358,-5711,-9048,-12883,-20945,-21739,25518,11960,9569,16088,10221,-6518,30700,-21936,21712,-552,-1542,5365,-19978,26677,-4,13298,-12387,28718,-21345,10663,-3262,-14768,-479,-15288,-10326,28807,-27556,21643,506,-7774,-20688,-1139,-14889,-20979,1096,-4998,-7954,16228,6137,19055,-9090,-23486,-28846,-20601,-10123,-5802,-21787,-3592,32118,-29952,809,-14823,18482,7736,-11483,-3831,8328,-4235,-13109,3432,-6058,13652,6603,18757,24715,-13442,31490,-20470,-28589,-11495,-17295,11629,570,-1705,-9994,31107,-11918,130,-17017,14724,-15412,-22550,-8811,6518,2053,18441,7301,14441,28657,-5754,30569,-26495,22989,29348,16396,-26523,1651,5201,-20913,-29735,25853,-31205,-32750,-20759,7590,-9668,-16379,26389,-8026,-5171,-29176,-20601,-20629,-13107,21588,5409,30564,21585,-11999,-30156,-21628,10858,12258,25316,12612,1492,15377,-3134,-27021,21444,-22164,9226,-12424,4468,13472,-8682,18931,20068,-30596,-17303,13108,-28768,-3874,21027,13608,17752,25449,25113,12886,-9282,17469,-26080,-22561,-9511,31742,4898,-26911,2960,-26178,17507,-13610,15195,-32154,9516,1522,16690,-27256,-19101,-24482,22282,-20930,31427,-17733,15441,6460,-4146,-17132,-1360,17882,27832,5865,-24962,-25354,15577,14917,31841,5640,24934,1590,32152,9670,-25502,-6240,26975,18849,7653,-11737,23383,-10037,20650,4600,-9416,-8621,6007,10155,4096,-3035,4213,-2941,-14849,-3995,22072,-27066,8659,31028,-8077,-19742,-22297,-31945,30135,-393,-16795,20413,10070,8671,-5347,-12318,-31526,-1430,-5271,-13741,31186,-28652,-20581,3643,22902,-1208,-732,28121,27574,-21130,26742,1308,-4611,6243,-4998,16034,-7913,-5160,17864,-14312,-4794,22223,9260,-13019,29254,31899,-2077,-2714,-27073,20873,-12359,20209,10433,-32047,19857,-10175,23306,-12660,-23560,30791,3223,26777,-6645,19839,22779,6428,-13758,10871,32348,6850,24528,30265,-271,17010,-15934,18348,-24953,-11024,-973,-31807,-26415,-9556,23360,-3379,-30812,-23187,-28318,-24694,-31689,-1567,13239,17368,-7763,-31490,22741,24602,3837,7530,27452,-21340,-14931,8512,-12438,-22399,-12386,402,-30865,-14818,-8057,1557,-151,18626,16117,32123,1329,-2080,-25453,24702,25659,-10918,-25927,15209,15640,921,5035,-8721,27524,29254,-18379,75,13512,17315,-13918,27215,21692,16550,5242,-21978,10215,-26250,20664,26921,23049,-9862,19826,21496,4231,3280,-4363,-16425,26879,3979,23248,-10796,18950,18571,-13963,13765,-19639,5638,12378,-3026,8130,6729,-6229,4702,7547,-11235,-21490,-32174,-9344,-6020,-18058,-27536,19847,-19969,-11581,-12910,-20502,-24071,4419,-22927,-19462,-24746,28154,-6248,-12613,-19172,-14086,20946,1039,-4607,32111,968,2841,30927,-16849,-12716,10851,9900,-16832,-2927,25730,-1337,-10671,-12025,2237,-6607,-22892,14742,28315,7518,10140,24871,-18694,16939,17296,-21197,14380,-8141,25078,22332,11613,-12426,20178,-31534,-12520,-2121,12845,-11445,23317,18016,-29301,-11757,-29796,-15250,11824,19067,-7257,19548,26,-10984,-8480,23201,-480,-20616,-10344,-24168,24568,31721,-30918,10072,10515,-22397,22452,-3928,22630,-12557,21540,24937,28706,23851,5309,18053,-4485,21143,21025,13359,27424,-31115,26764,-31306,-10215,6870,-3753,22942,-20825,-28767,-11977,-26045,-20477,8595,5313,16644,-16494,-886,-18120,27179,-594,-28106,3070,31684,21163,24308,19761,-4962,-3042,-5800,12920,-3240,17569,-10229,-26959,29479,-13779,-1381,4177,28330,-27282,28084,-4200,-28503,24076,-14165,-9443,-24140,16204,30967,-30963,5048,-17350,-18235,22076,-13570,8927,-32048,21869,-23159,-24363,10623,7990,24361,-6198,29334,-12786,21514,28202,-16666,15210,32506,30724,12158,12644,24561,31241,-5543,-13362,32367,22466,10771,5692,23032,-27392,-21634,-9280,7346,15709,6687,-28852,15419,-26775,23384,-9013,3018,14677,20873,-29889,-8620,22199,31219,-12830,13844,14682,-27890,29797,-21666,-2984,2361,16531,27026,-31066,-17044,22424,27686,-4334,-6220,-32617,-28859,16191,14623,32681,29076,16266,27605,-3334,18362,7878,-32051,-3534,4899,17513,-9858,-5699,-9993,30311,-4454,31083,-25440,-15708,29896,-7822,-7166,26695,-289,22181,9360,23332,-15838,25221,-5992,-27881,-4249,-27066,-28292,26334,16990,15868,2918,-12608,-29336,-4839,10055,-18504,-3663,26289,24324,20589,-6126,17250,-19239,25886,11755,-24922,27668,4458,8334,-8747,-9614,13602,-30961,9677,30690,4095,-4207,-14984,32751,14166,6477,-5610,4667,-4549,16572,-23357,-20240,20038,-19000,-4437,-28053,32154,6043,-27321,-17009,-30388,-12615,-21423,-19029,7176,-17235,-4109,-24803,-639,-31688,7042,15435,-672,1806,6548,28208,-21063,-6912,6497,21433,-3204,-31386,-29237,17861,-13154,-32174,-6167,-9031,-9548,16379,13394,-22730,-18981,3611,15512,11474,29377,-47,23255,11350,-5856,30800,27282,-27704,18446,-7193,1519,18378,-4993,-10490,30502,31735,4790,12346,-5036,-5014,23514,3802,-609,-19874,17201,-30139,12695,24654,-25769,2600,-5282,17272,10242,8116,-28818,1061,-10952,2280,29154,1702,-17027,12180,-21040,8770,-31979,22331,-28850,21847,27802,-8072,-23511,13458,810,5269,-19610,-30252,23296,-27091,12059,27053,3239,-23761,-28046,-22568,-17094,27779,-28192,-24218,31392,-25347,21505,-22735,-31150,21658,-23047,-20054,-6792,-14466,23307,6697,14608,-16716,-2878,4143,9447,-32596,-15083,15446,-10583,-26157,16929,-11691,5251,330,-19187,12137,3216,-18091,26916,-30230,-16360,-18210,-32071,27642,19024,-26027,209,-30689,-461,-21128,-26940,-28257,-2552,9093,9387,-21403,4904,13519,-32202,8239,-7539,-6598,-14313,-5914,-25697,-25997,-8683,3111,3121,-24150,21033,20529,19377,13289,-13014,-26685,-21894,-30415,32497,-14675,-4316,-16266,-3331,897,28689,-7585,-1838,-23283,-24424,5748,-104,30304,-26624,-19040,28001,-16024,-31736,-13523,26211,10767,-16239,-17435,-31360,-17298,18345,5544,-28985,-11295,14991,27331,-10886,-19575,-18082,21417,20999,-8104,-6864,-18097,31892,-15177,-10433,29409,30629,14804,18532,-22547,31867,7442,26525,-18888,18828,23283,-8425,25625,3617,1131,13395,20149,15640,-29768,10896,7601,-13159,18423,22790,26693,-12342,-25861,8405,5011,9563,27802,18681,9123,15802,4220,245,-12844,-30647,6721,1686,-12174,1495,15628,-6803,-10244,-10008,-22052,7839,-26832,-4048,-13988,27997,29047,-22982,9006,-1428,9620,11731,23779,30880,6851,-23280,699,26274,-28089,-20177,-19092,-31854,-25547,10103,-6566,-209,-1296,8912,-31945,-25307,12142,7327,18794,6347,-4433,-24490,-30173,-13571,-19593,-27060,-10572,4165,-5330,-21652,25586,25938,-28851,29195,21296,5758,2366,-10058,15941,-28213,-25355,-17619,-4806,-8468,-10929,-24566,25868,5549,20643,-8370,31217,-8496,28050,25334,-24536,17626,-17566,1225,-8259,30989,-22208,-30455,26992,-31817,21635,-30718,6176,-23290,31822,8014,17374,-27825,23324,13044,-31723,26493,20686,-6033,-20831,9961,-13250,-17180,-28134,24853,-28934,-7040,6204,-10650,32519,30588,16061,-2095,3626,-18296,-3996,32432,11003,3833,28929,2192,-28570,13928,-21510,9673,6734,-7227,28719,-8862,-30240,9159,30438,9656,29305,26609,2397,28625,-26059,-19898,29335,25829,14417,-4673,-8222,-6560,3313,-28062,-22127,12466,-18572,-14779,-13511,-14640,-7333,31957,16564,-8130,6714,-17609,3970,25565,23485,10465,21897,-28713,31218,3363,23790,5014,1642,24351,-1597,-2751,7085,-28308,-1523,-24364,6242,11912,20222,-30453,652,27703,-9920,-29332,-21368,-27895,-13601,28609,-29135,10618,29160,23757,-9532,-12770,15580,24829,25640,-11457,17652,-11670,-4487,-6146,-5118,-3275,19484,-9279,16739,-24072,-22389,-12248,8552,15046,32642,-24387,9191,-17566,-22667,-29857,-28600,-22598,-26913,-5404,-3006,29958,6757,-21728,-429,26609,2938,9334,1475,8283,-17132,-29272,29240,611,7293,-3278,18622,32622,-2428,18240,7423,-3494,-25357,2774,-9479,-7804,-5269,17182,30094,9792,12578 diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv4.tflite b/tensorflow/lite/micro/integration_tests/seanet/conv/conv4.tflite new file mode 100644 index 0000000..7bbdfaf Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/conv/conv4.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv4_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/conv/conv4_golden_int16.csv new file mode 100644 index 0000000..b4d072a --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/conv/conv4_golden_int16.csv @@ -0,0 +1 @@ +-32768,-17914,-25034,-22288,-19067,5789,-6361,-20881,-32768,-3393,4789,-32768,-17755,-32768,-11231,-26349,-32768,-32768,-21945,-32768,-32768,-32768,-32768,6824,-16624,-17083,-32768,11656,-10368,3982,-32768,-21649,32767,17558,-32768,4775,-4604,4118,-14383,31943,26046,-12150,10453,32767,32767,6029,-14757,1004,32728,-26443,8035,25080,32767,-5346,9900,-32768,32767,4040,8380,5979,-2730,14221,1969,-21806,-875,32767,24800,17400,21051,-29398,4151,-10229,32767,4724,32767,-14899,10686,32767,32767,32767,32767,1396,-7245,8535,29573,32767,32767,11821,-32768,10291,32767,32767,32767,-8751,32767,15963,-6811,32767,32767,9220,-5505,32767,21170,31499,32767,32767,16182,4026,14435,17389,12022,7008,-32768,1828,-10967,-30751,-10480,32767,32767,6127,32767,32767,23895,32767,1713,-31235,10663,32767,12165,-7560,22964,-779,-13854,5424,-1318,-9151,-20384,19168,-14131,32767,31934,18155,-4147,-19665,-32768,-32327,22940,12020,15227,32767,5492,8062,-26872,-14891,32767,-1307,-9318,-1253,32767,-13086,-22602,-15371,7131,1465,6138,-18579,8528,-25719,-31539,20047,6810,-5178,-32768,-26468,18381,19474,-1343,-26661,32767,20900,-17093,-32768,-19603,-32768,32767,-32768,32767,-32768,4644,-7768,-2863,-8083,28066,32767,28626,-4672,-18885,3638,-30913,13302,32767,-32768,-5464,32767,10591,-14387,-18682,21523,-22658,3197,23256,-32768,13111,17913,-32768,-29227,13498,32767,-32768,10391,-32768,-32768,-2867,32767,32767,5182,18621,21791,11508,-12822,15672,-7008,9562,-7428,-11296,-23256,-32768,-32768,32767,-25824,-27954,-26609,18488,8815,19095,-5127,20196,14758,-32768,32767,32767,-13094,1412,-32768,-3786,13819,8372,27201,32767,25397,2224,5890,32767,3282,32767,32767,32767,-18372,-17854,32767,5238,12657,-8621,32767,28656,27654,32767,32767,32767,32767,-343,16681,19211,-23544,-3972,14721,3790,32767,-7224,24857,-25162,410,3982,-6270,3879,29472,-2669,-29594,3096,27092,-9907,32767,-32768,17602,10545,-12530,-3347,2381,-32768,703,30281,4268,1506,-19364,18077,-2004,-30586,32767,-2304,-14494,-32768,12030,-13968,-8984,-8135,-4673,-32768,32767,-20600,10805,-32768,32767,-7475,-32768,-32768,32767,32767,3730,-23741,1545,-1152,-32768,-32768,28107,-32768,-32768,-25705,7314,-20951,-32768,-11308,-30229,16536,32767,-27959,21616,14427,-6859,-3897,20770,32767,5052,-5434,6546,15503,-27689,-27826,-4171,21222,-13069,-7768,12405,30337,-32768,13746,32346,6395,11248,-3700,7095,-5781,-31524,4162,32767,-32768,32767,32767,-940,9890,-16955,-13844,17585,25145,22521,-11983,32767,-14804,32767,-14385,6621,32767,32767,32767,-32768,7858,7466,2204,-2991,17179,-32768,32767,22319,8341,-8492,22068,-19718,-12652,-12404,-32768,13727,-1871,1791,7416,32767,32767,32767,4587,32767,28462,-32768,-18354,-27825,-4078,-21740,-13491,-25228,10594,-23717,-13098,32767,-15272,32767,29349,32767,1911,32767,9528,-32768,32767,-32768,32155,7917,3945,1628,-32768,-15003,-30299,-22667,19729,22403,-32768,1189,32767,5739,18653,2504,-21310,32767,32767,32767,-32768,3929,4265,-32768,32767,-12256,13258,-18442,8575,32767,-1704,32767,-1017,-14430,-7171,-10820,13040,32767,32767,32767,12881,-5740,25267,3014,24915,-32768,-20881,30536,5366,32767,-32768,-5908,-23495,32767,-711,-29168,-32768,-32768,-12390,15877,-22267,-32768,25581,-4861,20811,17646,387,32072,-14193,7793,32767,5474,-2056,-32489,-32768,32767,32767,-9245,215,-32768,16905,-32740,32767,25551,15771,-29394,-26560,-4774,9884,-18314,-28371,-32768,22916,-28410,13012,22146,13076,23940,7413,-8366,8646,-4410,-18175,32767,32767,-32768,32767,32767,-4763,-14084,-32768,-32768,17531,10657,22409,-32768,17826,-4563,-32768,-32768,-16490,23006,2885,-5808,12376,-32768,6093,32767,20901,7034,6975,-3139,22653,8846,32767,12700,32767,-7833,30972,-28231,10949,-8237,28568,7739,4160,-24829,-32768,32767,26752,32767,-32768,32767,-32768,14447,-8157,-5593,-20324,25020,32767,-7466,-32768,22062,-3882,27982,5772,22055,-14569,-26457,5581,-18447,32767,-29032,32767,-15726,-19843,-8837,-25876,32767,-14448,15704,32767,-13790,32767,32767,-19683,-32768,-32768,21927,-20510,-32768,-24996,10805,-31486,27260,5414,20312,-10311,10812,-9894,-19915,-20187,32767,19031,32767,32767,26469,-23260,-16999,-32768,-18583,32767,32767,17003,-32768,4281,32767,7613,-31157,-16044,-32768,26908,-13255,-217,26108,-20158,10969,3989,-17676,-32768,11367,-5235,32767,-5637,-27451,-6984,32633,32767,-8119,20483,19858,21183,24715,5225,-15134,21918,-19545,-30327,32767,32767,32767,27120,-2130,-730,969,-32768,3751,-25157,32767,13556,9667,-25282,14540,32767,19194,27382,32767,-8121,32767,-3866,-14503,-7848,32767,32767,32767,-32768,32767,20488,2314,32767,-32768,32767,32767,21354,-32768,14894,18613,26792,32767,4391,8193,-3012,-21918,1053,-18859,1695,32767,27140,-12897,-32768,17760,32767,-13698,-1579,-1784,-22385,32767,12434,32767,-29961,-28588,-32768,20324,-21371,-11660,18940,-7810,968,16881,-12707,22337,-32768,1411,-9869,578,4622,19048,-25827,32767,-32768,1102,2935,-29630,-32768,-11357,-13172,-22537,-32768,-20889,-32768,31160,-31597,-32768,-32768,-32768,-32768,7347,-10033,-32768,-12641,-32768,32767,12015,6398,27717,-4168,-14851,-10727,-7810,3453,25728,-10625,-32768,-3952,19109,-3740,-32768,-32768,-32768,-15271,-12959,-32768,-32768,-18415,-32215,32767,32767,-28403,23585,-23069,22289,-24541,-32768,-32768,32767,3502,32767,-4926,-8997,-11072,-2845,7949,32767,-19,-29252,4776,22002,-7310,22995,-19920,30262,4808,18516,32767,29114,32767,16200,-22119,-15639,21744,-11668,2044,2375,671,11151,-32768,32767,-27217,2389,-6782,-28390,-6805,4558,22621,-32768,32767,-32768,-20480,25180,-10970,-32768,-15947,32767,8917,-32768,24457,-32768,-9612,-5835,-32768,-19197,-4456,-32768,12599,-6585,-5821,25467,-32768,-7350,29982,4789,-11141,27625,7693,-28050,10326,-32768,-32768,29234,-32768,32767,10917,2574,-32768,-32768,-17065,32767,-3702,-28630,-25755,-25485,-26049,-9407,-2860,11553,-21884,-32768,2277,-17827,-717,7756,4253,12577,-25499,6632,-3573,-32768,2205,-32768,32767,30177,-16021,6328,-32768,-13940,-23780,-16721,-32768,32767,25490,32767,32767,-2230,24253,32767,32767,30696,17300,6935,-9438,-31545,3787,7586,18491,23542,-4913,3971,8545,14214,9970,32767,25020,5666,32767,16468,-16090,11022,32767,-27856,8966,-32768,32767,32767,-2401,32767,22134,32767,11144,1062,29149,6710,28563,19534,32767,-1839,32767,6573,7888,2107,-32768,6444,-29831,-8533,-23658,-7189,32767,32767,32767,15985,-3665,-4777,2701,-24344,-1324,-32768,-32768,25415,32767,-8824,32767,-32768,573,-24670,32767,4858,32432,8891,32767,13193,1910,24877,15788,18061,28937,30389,32767,15077,32767,32767,11536,28548,29205,3592,11550,28569,32767,32767,-6515,32767,-31540,14467,32767,32767,15856,-8730,15209,-8793,32767,-5620,-2290,27960,32767,7118,17747,32767,-32768,5345,6823,-16649,-13537,32767,-9525,-28670,5707,12014,-12386,29408,25248,32767,32767,13457,16984,14279,32767,9193,-32768,32767,-14029,27292,-32768,-7800,14759,-26718,-3929,-11422,9141,24572,9850,-32768,-32768,32767,1429,32767,10616,9756,-1724,-32768,23263,32767,-2247,32767,32767,-12382,32767,32767,-32768,12595,-22407,32767,-3334,21486,15185,-32768,32767,-24106,1529,-234,5158,-9389,-25063,14347,-7888,32767,-10312,32767,-15109,21821,-30530,-21775,5634,687,-3530,4339,12248,-16449,-19914,-32768,-32768,-22813,12528,-12126,7857,-32768,-6867,23047,18322,7648,-1552,12341,26459,15024,32767,-32768,31764,-26643,32767,-32768,2658,-24076,15335,5561,-18336,32767,-31497,17250,26380,32767,-32768,32767,-32768,-2316,-21628,32767,22565,32767,2997,31626,32767,1079,7594,-14380,1396,27290,19950,32767,-11168,10216,22626,32767,-2541,13105,-31086,21154,30835,32767,32767,32767,32767,-10068,-2950,-21959,32767,-11545,-419,32767,-12380,-24563,-32768,4331,30224,-3286,6702,4946,16553,-13992,-6758,32767,6454,25393,-32768,16760,3625,-10999,-2376,-8246,29791,-1568,32767,-32768,-32768,21329,-26558,-1298,-32768,-16707,29556,17535,-25511,32767,-11590,3127,25887,22404,-7056,10276,3662,830,32767,5337,-12873,-32768,-22203,32767,32767,2541,28958,32767,-1599,11781,-16374,13919,21415,-21690,-32768,-22104,32767,32767,-11238,7031,-12618,-26454 diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv4_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/conv/conv4_input0_int16.csv new file mode 100644 index 0000000..f0f4e37 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/conv/conv4_input0_int16.csv @@ -0,0 +1 @@ +-18827,306,9834,3532,-8879,28246,11911,8991,-19897,29907,23787,-30549,18828,7328,29137,-1720,12197,-22852,31487,22951,-17268,25328,-104,31687,28456,-778,9539,-2103,-23488,4328,-32672,-15276,2285,-20820,-30136,-14717,-3961,-8290,27488,17063,4590,-24770,32604,24708,-13711,26443,-14715,14513,-27957,-31168,-223,-24065,14987,8999,12323,2306,24328,-23560,-15135,30534,31352,-5665,-17002,21555,-31595,-6830,-12899,13280,-11343,3519,-3044,32176,8239,19990,18476,-18337,-19358,3839,23460,14536,26838,-11735,17808,-28506,1299,-10353,-31252,16627,-6100,-9081,-18803,-7510,-15088,9476,18615,-15047,4570,-29932,-30673,-589,-2403,3634,139,24320,16414,-9096,-16220,18212,20965,-14420,7885,2312,20456,-1673,-22442,22360,-12210,12554,-14375,10838,-20676,-5363,-28385,-7131,-21509,-24553,-27377,-20820,-23508,-30694,-31929,-21250,24177,23726,-14132,12739,3566,27992,3655,8680,-21624,20623,-664,-2480,15386,423,-12797,29374,-3242,20308,-20726,-3349,13806,-7514,-16431,-26341,-16792,18222,-9845,29106,15542,17306,26385,27803,-4222,-23091,-9800,-887,-9308,-19957,31169,-8240,-13948,27390,9763,13638,19558,1544,-12745,-15706,28008,-4662,-13202,-8170,22406,-5652,31125,14875,-26904,29104,-18532,6485,-32401,-10857,-29249,-19103,15406,8554,-293,-5080,-18156,25157,19497,-21581,-600,-28319,5276,9061,-7205,-8940,11303,-32549,11421,3545,30799,-17832,16829,-14506,28487,-29165,816,-1429,9501,-5216,23687,8748,8624,-32758,-18519,-28977,-28741,16835,28717,22270,-26507,-13055,-28883,22267,-1193,21419,29198,-22676,14864,32463,21244,3904,13050,17667,18099,-27895,-509,7331,15492,-10820,6611,-27603,9672,-1285,18872,-7827,1628,3331,-23388,-27660,6776,5227,-30410,26817,4200,-8928,-21853,-30409,12149,23891,-32360,-19324,2289,25000,7529,11767,31746,-13825,23888,-16658,22629,-15807,11240,6818,-29505,-3613,27815,2041,12558,9704,14086,-20291,-31187,10052,19079,9603,29422,2784,20284,25024,-4645,31018,-17292,-23296,-27705,-14745,-16998,10103,-27867,22884,-6598,-23936,31025,-15309,-19275,11827,-22787,-13147,-2626,-397,-10047,-19954,-10807,914,-24994,-13207,23925,20407,-16552,6404,-30524,10036,-28507,19224,26505,12766,12157,12871,18106,-13968,7863,-11138,-14535,21820,-17031,-5467,8502,-20781,-18319,-16537,27328,-8352,-15853,-15959,-7512,-1189,30225,30915,-19174,8809,-690,7533,-11198,-3586,16608,26181,-22040,-17685,10710,-5641,10965,2685,-2830,15752,24530,-1276,-31508,-2038,32384,-27580,11622,-22684,3602,29516,-32397,-4774,12435,11622,1551,29605,15826,-9299,-23793,15491,-28732,-5326,22728,-8397,-3142,19557,9786,16326,19090,7846,17071,2027,13526,-19501,15412,-3218,3241,-28531,-27700,-16508,-25764,20777,-5462,15283,7682,-24722,16708,16588,3336,17489,-13715,23127,12014,7805,-9331,-13220,13145,18660,12872,-14841,22354,-21811,1651,-18756,-16712,8246,15658,-14452,15583,-12925,-23116,1938,-9948,15290,-4375,14783,-7476,16110,12568,-449,-23285,-28082,-21087,-27692,17182,8962,-23989,20706,32346,-11846,-1250,18007,21976,-30449,-18384,26461,24527,-4797,-16933,19780,-13324,4967,-7382,-21545,17404,-4519,9022,1401,-18677,-1774,16829,-14914,27046,27632,17577,-28072,31522,-7147,-20243,-5282,-8633,1439,-15801,-7744,-25773,-20158,-3609,-7074,19217,8442,24552,-21549,863,-25765,-17425,7773,-30203,-15145,-10994,22033,-19492,14665,-24463,-27456,13502,16252,23139,10909,-27142,-14471,-1816,6387,-7710,-11891,-19151,25317,-26984,-13065,-17126,32497,27741,-13100,23147,-23696,-4187,8136,5434,-17480,25304,9729,-18625,-24737,6037,29290,-12280,-13206,-964,-13911,-4753,24984,1785,32202,-9425,-19046,-5727,-30524,-10775,-23587,9023,15357,11518,-4115,13051,-13810,-12360,-11050,32425,-14273,-22261,-4324,-15182,-13359,6278,27499,-11664,30126,14862,-20788,-1355,4254,-27459,-6189,-15744,28671,25789,18482,19862,27916,-30659,29400,-20777,-4690,6351,-21362,-26170,14524,25235,25887,-13927,-29110,-17490,3069,11820,15252,-31240,-5673,-26274,-31847,12968,21187,-13494,11063,27745,-12712,27954,27350,-8574,25478,876,22794,-24940,31477,-30007,-11577,4506,11909,-29337,7966,4236,-21368,-12823,-26094,-11604,11474,122,-975,-11452,-4916,5800,6029,-30869,1230,2139,-6890,22125,7171,26412,3097,-16079,25132,-17166,1569,11102,16547,-16553,-23050,-18176,4878,3335,-24169,-32458,-28648,14610,6764,-12382,12687,-4078,-7901,-32675,24899,-18755,8673,3456,912,13843,3910,-4320,16660,23499,19540,-6032,24710,24403,-9876,28226,23534,-15448,10343,-6141,-19160,17001,-24586,17700,-4184,-24243,-15270,10210,-31763,17871,22742,2122,25048,-25682,471,-16503,12650,-18439,7130,-17110,7135,-16584,30574,3731,-11938,-24246,5275,26581,16865,4559,-32444,30938,-18750,31467,-26236,9061,17434,5357,12076,-14424,712,29332,-12841,28446,-9957,-13777,23277,-21675,-19703,-13720,12625,-13786,-29857,-4818,-8328,25810,-31705,10978,25576,23712,28410,-28785,28201,-21873,25851,4335,27722,-32038,10181,26961,-30289,-20066,20623,26139,13251,-4048,1643,-26906,11598,32325,7889,-23100,-1320,-3817,4899,-4346,-3023,11315,-12333,4092,6945,-1094,10175,9621,25187,13002,-4088,-24200,-17049,-7454,31570,-15052,30813,30903,-10088,16240,-32604,-5943,-23339,1936,-1234,-12250,-29221,30326,9869,10229,1594,-2761,27645,1449,-8688,-32078,20623,-27379,-26165,15248,7327,-20403,16967,23536,-12215,11311,-1370,21812,-8078,-32085,-7622,3959,12362,-2706,-23095,16447,4209,-22443,1645,-5287,-4530,26267,26542,-27484,11273,-5442,7540,-158,-7662,-1649,27237,-11833,7068,28323,-8003,9878,-5769,6405,-3739,-17796,-564,-32616,3949,-32341,28605,4623,-71,-17695,-25714,22542,12826,5550,9482,-9106,19031,18485,-7295,17043,30180,31799,-31368,25610,-17382,-19293,-14169,12830,-2070,-25354,-25209,6564,-7150,-16094,31873,-27475,25666,5035,-14697,-22890,10805,-30586,-28876,17638,16678,-32341,-28121,-19506,18862,32081,5768,8150,25452,-8656,14734,22216,30321,20911,27984,13074,27537,7738,992,11162,15063,29623,21776,-13485,-11851,17866,-11071,28792,391,-11183,28686,8572,-7459,26598,-16319,25329,6534,-18551,15964,-19004,-19181,-7603,22989,29606,-22809,-3220,23052,14918,17812,14712,15291,-12966,10366,-4189,-25552,10820,-7544,-12861,11548,17785,-29756,-7058,-10231,-6059,-7857,-9641,-7526,1427,3982,22297,-10328,-31196,-23493,-26015,-13185,-19693,-20325,-23512,-24055,-12903,25703,-25812,-13096,-16858,-14067,11511,13815,15968,9013,2951,9705,-8630,-21678,-7425,-31820,25096,-16153,-26953,15024,29241,5118,528,-16948,22052,5116,4468,-14557,-8508,-20195,-15562,15781,9289,-14333,-15233,-5780,-19537,26498,18151,23370,-29217,-19555,-7471,-6801,19588,-25739,-10522,2082,23487,-11994,29244,1816,-23546,21778,18338,16062,-22815,-3374,-730,-2402,-12991,-13858,12142,-21005,-11672,-24912,30710,-15173,2413,-22359,-6335,-23468,26511,25060,2448,14979,19501,-8558,-18320,-24122,-21375,7023,-6144,32716,-1331,-25646,-20074,24180,-5179,-25934,12448,3829,-12784,-25486,5119,23875,-13594,-29907,-22896,-2924,5900,15334,-30365,23427,24457,-7905,-5017,-8279,12549,-20149,15938,-10102,-31576,23563,-11942,26283,9481,30818,-20114,27481,-9362,-30896,-31154,20874,29540,-23038,3534,1440,-1640,2968,-21007,-8170,24038,11938,17178,-32006,24097,16697,-9556,-19316,-20844,20539,18824,1128,-23335,24977,-13078,30845,24264,25815,-11901,5429,12552,-26444,-21075,10248,-21278,22538,23502,1130,-4915,-21222,3945,-18766,5362,7497,11727,-28271,-3240,-15532,1832,-30023,12272,-5156,17304,30260,-16256,30933,-31235,13016,-29958,5366,-17847,2960,-18947,-9646,29053,-3384,-781,-29934,-25858,-14846,-23122,19862,-17554,-31117,-3481,4735,-17789,-16255,2752,-17967,30250,29300,-1560,2800,5645,-10371,21380,31579,16602,27125,-5150,28607,31834,10502,555,-27158,-281,-9539,-8075,-14760,-15698,18783,21222,-31584,-13273,-23974,2996,-16417,-12716,-4993,22615,-21582,10471,21081,31507,-4613,-6669,22544,-31871,-18255,-18956,21814,-4942,-20542,-15279,-19300,-9561,-31285,11980,23473,-8845,-7154,-2352,-883,-15863,-29621,-32226,-14121,-17037,16591,-327,2450,-22016,2997,-5856,24016,8605,-22918,27087,9656,14543,14332,-22835,-6770,108,14552,-31508,-28443,-25032,-30567,-9634,-25135,10265,31894,21782,-5690,28432,-22939,26514,29796,31013,-15968,-16799,14968,-18182,-29774,28938,8433,462,4140,10270,-28534,-6020,-32070,-4668,22019,4957,-1908,-1369,-18415,-15598,-23813,-18010,32221,15527,7911,-10583,-19259,-25931,-18641,9931,20587,20019,-6277,10051,-16962,24984,-11642,32522,3008,14811,16533,24535,-24031,18628,30070,11579,11508,-17118,-1617,26374,14246,4865,-29243,-12434,-32109,-22804,-7356,-18395,-7132,19653,-16898,22339,30878,-687,30906,-832,10378,-30341,29457,-28569,-21224,663,-29978,26178,21721,-21161,28571,-9606,24474,15324,26980,2137,-20329,1824,26158,17955,14119,5849,9776,-1901,-7545,-26560,29508,18790,-5104,22436,-5067,10998,25172,9594,-29167,-10317,8161,-9032,-27564,-32029,10757,10596,-24832,19048,-24395,-11348,23043,-12018,-5904,-20319,-30432,-31446,20939,22049,28573,-30769,-8013,-13198,-12232,-3194,-12539,-16425,-15316,-22118,5984,-11445,-25575,-18182,5947,-21432,-9841,-11358,-29348,21702,30020,1608,-11657,-14908,14916,-12764,18240,-9801,-14810,-28748,-29696,-19406,17908,-20896,-5166,-15298,-8959,-20695,4472,-28992,15900,19742,19564,-12985,-22882,12790,-126,28629,-23585,12,-30295,-265,14497,-1349,28243,-10349,-16726,-24942,6295,-26521,-25100,-10329,-1712,-31394,18787,-10696,-31112,11405,-153,-23420,-27539,2773,17893,27654,1316,15486,-21206,-7770,182,-26558,7497,-17350,27950,15086,9976,-13015,-1050,-15028,-17961,31357,-2937,13416,25384,26074,-16100,9670,22396,-19299,-25627,6103,19469,15485,26845,-3774,15603,29045,4650,16082,30154,-23382,16781,-2604,32690,-4405,22579,2293,2621,26001,22050,351,-3418,-25349,7907,11049,3647,-21338,-20406,-18779,28597,-3455,-31875,-17774,-18995,10279,-8123,-14705,1831,-23251,1962,-15004,26065,30146,-3019,12396,26863,-3524,-22251,22866,-21583,-23293,17113,-1282,13764,-12632,-15830,-30237,23078,25911,32515,6549,10043,26643,8568,-31928,2724,-23145,-26444,-15501,-20441,-5819,-23968,3418,-27395,22386,-4491,-7118,1061,-24882,7182,13303,23761,-9153,14759,-704,16765,20147,15066,10163,-7742,-13358,-11419,-25268,27076,-10789,-5746,-22810,-26677,-20355,-7949,21069,-24506,13017,6681,13572,-19148,25622,-8690,-32466,-21269,-5610,-15864,3591,16416,32632,-8718,-16396,-2085,-28971,5958,-28076,9726,-10739,30298,22907,-10804,-3505,-7240,-21407,-15687,-12103,-5660,-16391,-19394,9760,26870,14219,-15294,9620,-30242,16978,-31881,-2989,-12208,-4276,-3940,28953,-27030,30991,16740,2402,-8210,4085,-24024,-3251,23958,23466,-8269,-16873,30386,14964,-25930,31857,-14927,8366,21067,16902,23161,-6732,6430,-18091,-17705,-12554,-29626,31854,11961,-5014,11350,15650,-22039,-25945,4999,25714,29038,16510,-19205,27124,-17335,30780,-24600,-3111,-7286,2361,-9201,31882,11869,-24416,-26604,-6542,17670,7576,-12675,-16239,-6802,21314,-31051,-25967,-10502,-27512,30787,32716,-9833,17521,-27906,4767,16729,10465,26739,-18081,18945,12917,-2378,-17836,10433,-4512,-31182,-12858,8665,27040,13665,-18502,16497,-21390,-30414,19548,9635,-8944,8738,13755,-26570,-32534,18146,24451,24165,12665,-15490,-5255,-5925,-83,-27455,-15229,-3652,-29830,-11680,6310,-18195,-16490,1089,25810,-27604,-31672,-24457,3329,-24305,-11414,-32626,7552,-16508,15837,16382,11750,26878,-30345,20691,8004,-21947,5062,-5365,7477,7743,-2362,-22270,-23270,22392,30476,3299,-19310,-20284,-14068,1483,14688,23102,21130,23568,28061,-9419,9806,12095,24416,29101,5018,4571,10022,2919,-15038,9732,-32052,-2627,24270,-14594,19468,-11711,32461,-8383,-899,-9251,22958,31212,-24749,-4887,1015,4228,9385,24552,3445,19027,17191,-5532,13253,-18122,16480,-2374,31101,24909,-10800,20453,-12592,21832,-7570,-7752,-29188,18082,22239,-627,14374,-11994,11974,-28520,32422,-9203,28814,8033,-767,-16459,-10039,14229,27039,7107,-8050,-10929,-27694,3783,17793,5996,25914,-22894,-16358,-26549,30691,-4271,-17427,4084,4835,30065,-25511,11736,13700,20763,17736,-9946,24995,-19027,3685,24090,-22741,-9411,-18336,-6685,-8330,23632,19629,-13991,-15709,-17768,17375,14506,9594,7501,9535,20202,10338,-21012,2855,25320,-8634,23253,-30538,-24367,-14275,-18022,-16469,2947,29799,-22477,4339,-28154,16988,-14725,-3770,-23709,-11599,-21940,-3344,16,-14762,-14155,22513,-28344,-2864,7927,-8040,-19106,-7973,-4923,-10096,17864,14944,9948,-6629,29096,-5298,-3990,-17592,-15946,-22330,23562,-23033,29731,32015,-17054,17604,-22293,-23540,27156,-20887,30151,4009,-19846,30144,12197,-28647,-19665,-19327,32004,-7454,-591,5352,-26118,4099,-28429,-27020,-11114,-12460,-28178,23903,24024,-30581,-1508,-12972,19001,-15487,30658,5767,32223,12547,-32737,11645,25442,24357,-13399,-23741,26499,-1699,20068,14871,-16336,13663,23327,-29038,29686,9742,-22248,-6324,-13442,11093,6806,12479,-28197,-24121,-14408,-32221,13920,27043,28585,13570,13588,1320,9414,31598,-11194,18668,-9329,3603,-31502,-31393,23648,-32259,25731,13633,25183,4868,9281,11801,14662,-21026,7419,-22940,-7289,-17968,30640,7033,-30769,27474,20436,-24730,10707,-20408,21434,-3,16757,1622,-29682,-23606,415,-18656,521,3884,3192,6494,-22566,2564,-9784,13831,-9571,-10837,13899,-29133,-27506,-886,-20967,22213,-32029,-32766,30545,7647,-12696,-26935,23979,-7180,-30247,-13959,-19149,-19056,13367,-7570,-32400,5067,-5939,-9113,-5458,13075,22168,13442,-4591,-22052,-18807,6239,-18129,-17073,16468,-10884,31509,29555,-21092,30533,-5840,12618,25832,12340,27367,9805,9139,-8835,13829,-6610,28248,1608,18683,21831,-22006,-26840,8675,28989,30476,-220,16073,22910,31891,-25037,21348,12829,7545,-6113,-19480,24032,-30298,12913,-19630,-27911,4106,21912,14578,-15450,-31258,-28686,-18425,25860,641,-27163,-30209,6142,-12712,-18267,-5170,17693,25143,12596,3005,7821,-26685,8258,-21897,-27217,-18492,-17655,-21501,-7208,-28943,29341,26535,-27969,25828,15866,-19916,-27962,-9948,-4776,-30187,-8078,25376,1392,20725,27163,24473,22473,5695,5468,23838,-12356,13262,25189,-27662,11820,-12201,3556,-31783,6622,-29623,13821,-6283,-20461,27052,-11568,-27535,-29980,9259,-7051,26070,-4409,-15855,11596,-26173,-25835,30492,-29964,-27243,-27918,24090,-28840,17263,12102,-32643,21247,-16491,31039,-12930,-20421,-9069,-16379,-19523,-12950,23249,-20035,22345,26336,-32615,-18145,9562,-16037,26742,15199,3815,15230,-15272,15052,12624,19542,6236,28599,1467,-13853,-15128,21228,-23381,23926,16869,-13538,5988,29931,817,-22019,-18138,-8142,-25848,-12922,-9260,26537,16999,-14650,-22152,21389,-32002,-3255,2537,-3943,-4274,8663,27170,1616,30288,-26046,-10997,6569,31082,-27327,-23827,4430,-24316,-20053,-29935,13931,17193,-12717,-28337,-8469,-32110,30909,19207,-20362,-13624,2994,19934,2976,-25409,18347,19292,1595,20375,31530,-14728,-21119,-6001,-1520,24116,5729,24254,-14096,-21655,-21423,-24726,-5115,-22751,5857,20618,7154,15539,31951,23515,-27526,13871,-17161,16782,-12507,-6180,20166,3793,3840,29358,-1741,-16412,-20749,6291,-23513,18845,-16120,-26071,-921,-11782,-10341,18521,-10945,11869,1003,10223,-6624,-14667,5088,16812,-20586,-29601,-8185,-678,19850,15006,4944,29346,8170,-21349,13404,9202,-32362,-3444,-31538,-8868,17862,20647,20233,-7691,-22755,5317,9063,-31012,-32707,8862,-32083,18226,12477,13105,-25636,24741,13309,-10901,20977,-9587,-27116,-14057,-9316,622,4155,30910,-12962,-970,-8189,24174,8618,-6791,-17302,-6525,-8724,-11526,-19420,-27609,-12154,32268,-4708,23991,24067,-27024,-25416,-23800,4098,5837,5451,-12611,1620,-24113,-13575,-5683,1657,-7296,15251,27535,-23069,-13436,15334,11063,25927,4274,21170,-5008,-18258,-7800,-9446,21490,27898,-29910,-19680,6061,-28883,32456,3877,27990,-4149,13093,-26492,-31021,-27240,-14854,-3174,19728,-11089,-4899,29217,17319,24315,11415,2495,31148,8594,-1135,-12149,22418,11976,21043,26122,2923,11394,16069,12636,21966,11052,32269,-15425,23309,6086,29228,2885,21717,-22267,-1452,19155,16494,9515,-30965,-9630,-9950,-30579,31029,-510,-29251,-31843,29765,25315,-19667,31479,-8502,17416,-11822,-7624,-28875,28545,-32646,22093,-26086,31461,24916,-5444,12327,-2802,-20827,23596,-4071,-2136,7076,12568,-21629,-24102,-6230,-6502,8358,18347,12074,32302,31454,-10978,-13788,9688,19530,-7255,-31285,26915,-23872,10366,784,-23138,-14616,15053,-20144,-5654,-16428,-20745,17331,-27204,-28273,8085,2127,27929,-2638,15638,24451,-28051,-14069,-1537,17311,-16073,-23926,21063,-4336,-18256,14522,-15711,-2997,-4882,4998,-17743,17715,-15516,6547,-14717,-8816,-19085,-28368,32046,17230,23693,-16030,-8372,23507,-30589,16325,-27386,2645,27727,-4048,-24004,-17889,-21999,-19336,2157,-11006,2756,14927,11045,28318,21738,-29390,30629,-31518,-16616,-2343,-18566,6647,-25362,-21606,13857,22608,8979,-24749,19278,24602,-26123,-24447,23079,10296,-9894,-29514,3198,-25858,14605,21916,-18680,29964,-23553,2807,24070,-15258,29387,10357,-22021,175,30988,18488,-9244,26238,29439,-18932,-25554,1490,8789,13565,-242,-31019,21312,12073,-28747,7821,-16179,-31101,-4435,-29263,-10361,-3757,29399,25860,-11140,-817,-29917,-3095,-27361,-12364,13885,-25688,4975,28031,-15996,-14290,31850,-22358,7410,-26667,-28636,-24121,4429,9824,29191,5037,13607,-27255,-10037,21531,32312,1314,25909,-6457,21927,3175,-11808,3309,-19672,-8067,-24519,-29508,27677,2656,-18625,11475,13838,6935,9274,17459,-8851,29571,7151,-18836,-706,-29273,-2491,-15412,-24252,-4446,28210,14742,-4144,22344,7074,8789,5547,-957,25549,2266,-827,15853,23471,30223,-27093,-26887,-29384,-20076,-3423,-31189,11364,-21609,-3195,12955,19083,-6843,-8431,7371,-13279,-11765,-4567,-9303,-24967,-20984,5373,-26717,-20801,24657,-24694,-9588,18574,-10518,27109,-361,5262,26789,-14198,22960,3782,-9086,4560,30885,-27475,-30222,-14599,870,20824,-22026,3295,15330,26985,3595,27339,17509,-22247,22714,19154,6178,28255,20239,3877,-6991,30892,-18328,-8985,-27495,-18175,30283,23237,2085,7339,-26198,22902,14879,-7553,25748,4594,-30439,-28509,22816,26574,-19498,9262,750,-1881,9736,-9653,-23036,8287,7792,-9657,17650,11640,11059,10293,14271,-27637,-32277,-1490,7593,30420,24219,-11604,-30967,14476,-10761,-22744,-10503,-4274,3013,32443,9304,-30368,11610,21515,-29161,11960,-14575,-23927,-19975,-24176,23502,7575,-17372,19882,-52,-3389,29765,21749,-13542,-12104,-32319,-20534,2368,-12946,8967,30068,27062,-25241,23148,626,32676,30079,-23183,18692,6509,-7693,7418,-13639,21682,29224,-3434,-24479,28914,-11515,-20425,-29742,-23431,29,-15530,-8865,-24541,-2524,-16118,-17612,11085,-21750,-16509,-30989,-28716,-27104,11262,-24854,-19843,16533,26595,30782,4455,3447,-25052,-18872,997,12378,15136,-32440,20390,-20761,-16516,7934,-12209,26214,-913,16468,1399,-29377,19157,18759,-23644,-6539,20193,-26797,2818,-6090,7083,-4084,32293,-30432,5126,13391,-22694,-22069,-6698,-18465,-5769,-7625,-21570,5461,-30503,24380,-5912,27386,12854,27986,11396,-2920,14405,1357,-29981,7933,2428,30505,19681,-15609,3606,21401,25986,22416,4831,12833,21418,-17114,21909,102,101,-23619,30637,7531,6629,-7298,-8894,9433,3534,28055,8836,-27409,-6541,-13050,-11268,7296,11880,16016,-15324,12199,-28561,-2630,12447,501,-3867,14353,14389,6579,20916,-21357,17432,-32551,27065,-109,-24132,-31891,13096,-7902,-2070,-32202,13286,15127,16259,2563,-22533,-15530,18350,2314,-22104,-7593,4107,10188,-22902,-10763,-30492,-25808,9918,-4191,25544,-10480,-13928,3264,-12030,-25408,-5875,-23306,14516,-21917,31976,-13142,24647,-512,-18820,-2716,8004,-13136,10742,29314,6551,1727,-22743,-13916,3337,5838,-5842,16297,12834,9517,-13071,24870,16813,14037,-2405,20142,11794,15912,-29232,-31351,7160,-24222,-26384,-16397,-15480,16807,15270,-20588,-22794,-29356,-3324,-29871,-17203,31639,-6571,19340,-11594,-3672,23975,9958,-14563,-11298,24057,4087,12474,-18480,31524,8091,22368,22092,-7679,19982,-22168,-13238,-24488,-6627,31130,12804,12248,-27293,-24827,-19030,14491,-14335,-19197,20002,-18704,23260,-20944,-7653,-25004,-1275,-7298,-3105,-508,-3458,-3925,10600,11902,24799,2194,205,-8129,-22839,-14155,-13106,-24934,18134,-8734,-13693,-28460,13880,19077,-10453,-9151,23371,-14323,-23463,-4991,22046,-19983,1773,-19347,19578,14114,-18242,-24197,25076,26664,20750,-8720,-17500,24526,-14949,8258,7075,9496,8481,-1771,-23341,-30907,29702,27337,-30920,-8604,-19806,-16868,13819,-9278,-18731,5466,-26110,-13714,-2871,22591,-5821,21040,-1941,-9039,-10697,9762,2516,-9503,30234,-3645,-3782,1726,-22807,15866,-13334,11315,-9336,15811,10435,-8879,5450,26977,28849,-12013,29908,29922,-7735,-28500,-26192,3438,-14444,-14889,-18056,-17658,30337,-32279,14977,9605,-13725,15327,17184,-28926,-19727,9600,-11343,-15272,-3601,-27571,1601,15123,3134,-25391,18515,-9924,8582,29252,9785,-21373,5274,2451,-7731,-28583,32400,-20411,-20594,-14442,26966,-17441,-734,-29703,7870,18761,-5558,29943,18733,7656,7030,-5534,-4820,-6523,-6688,16074,318,11285,-12878,23149,18980,-14847,-27202,-7267,-14415,-8132,-19713,18723,-7578,-1861,-27368,-29602,18045,-24506,-11254,32675,20723,-7610,-26170,67,8506,-14210,28193,3159,32650,31871,-32378,-1415,15322,19194,-12732,26795,-10791,3003,16972,-29273,25089,-2811,-29287,-25223,-15325,-9404,4281,27612,31491,924,-1444,1863,-14597,11046,29177,-30639,-12910,288,9038,-27142,14345,-21434,13759,-4471,-28809,-2723,-27297,29687,-11991,-29253,-30606,-12093,8813,15480,-1831,6252,-18007,31567,-16261,-21727,3054,22827,-24608,14137,-14945,-7546,32538,-24818,-25984,-13647,10316,-31653,-16180,-21254,3334,-11221,27681,-14868,-3155,6751,-6265,-17561,8300,-19843,-3268,-1550,-29745,-22472,-10362,-19525,32400,32135,-31491,32378,-23443,7883,-26986,4835,3958,-32570,10318,-26444,28000,-2591,28067,-17788,19974,-14379,2436,11756,9594,-3837,-18182,21828,-4875,21370,3263,15871,-30398,-16776,-4706,15072,24707,-22035,19835,-18806,5798,32018,31272,28288,-31849,-6798,18985,27172,748,20224,-32508,-22933,-25070,-8658,-9445,3817,-15757,-14581,17601,-15954,19319,-19736,-15122,3702,-23878,-20623,16943,-6274,14469,6848,-5402,-4676,-11391,20768,8986,390,2709,10654,17039,26700,-25664,26622,10524,12382,22368,-548,-1844,-6201,-8521,31400,7284,32439,-6540,30841,24665,-20568,20144,502,-110,14501,-32048,-748,-10347,-19557,-24186,20607,-4999,12414,-28261,-4733,-26824,12576,-1305,-849,19831,-7412,-14877,12465,6649,-29649,-24525,-13252,-5228,7181,31878,-12822,-25943,-22454,-13087,-19905,28355,13049,-17726,-1904,11994,-1994,-29264,-27627,17276,2610,-4695,-19724,-18701,-8263,-5214,-28125,16020,31343,12348,21566,17545,-1708,-28851,27496,23736,10086,18228,-17095,-16595,20751,-24581,27970,23844,-13991,-5094,11360,21131,-13004,-13195,7948,3923,-8128,-26779,31645,5369,6073,19174,17440,24541,-16671,-1292,25773,17502,6216,30497,-26902,3061,7811,21828,16811,-9935,-1446,31398,-30158,-18685,20366,27074,-15381,25276,16356,-15455,20346,-6831,5894,-6636,-10610,16578,-27971,14488,11753,31070,16640,28257,18694,1690,9595,-322,3961,-27542,-31994,-15058,13662,18817,-17386,1269,-10472,-31851,16750,-23557,-18913,-15811,-18052,-24157,21457,-30917,11355,-20173,10428,21385,6499,-5976,-2292,-4310,-32097,28891,-27049,-22090,27026,27268,22605,-3791,-23119,12528,-4941,-23333,-27939,14462,18314,-11478,-30364,23090,-27603,-27936,-10611,18414,18437,-20734,-14416,30137,-12334,17389,27717,-10458,-31831,8112,26712,-10428,20362,-28724,17884,-17685,10384,-14064,-6690,20290,6236,26245,14235,18279,-20876,25691,27690,-27355,19545,-18753,3518,-7506,-16556,14750,-25720,-8432,24248,-20319,32742,-27968,13653,5571,254,28038,27822,5398,-16708,920,-29393,-23464,-8290,-21696,-18323,-10727,-12670,-9241,-18214,-31826,30491,-8535,-23207,15470,-6986,28642,6721,27638,-11028,24742,-14840,-8708,6197,22430,1360,11710,-31317,5971,14808,-16915,2293,-7602,-352,5574,-22541,-26656,-15861,-26398,-24461,-19744,11086,21860,32270,-6946,-487,24278,-2713,-30549,-21331,3839,-31884,-22070,13899,4065,-129,10903,-3213,17223,30110,30932,1016,27632,-19868,-32587,-13778,5761,10170,-8503,26717,-15757,-28599,-17333,20326,5759,-6135,-5262,26787,-2260,-1849,10217,-2504,-25085,26513,-15832,24771,3902,-14453,933,-15076,3846,19054,22798,-27068,25820,5021,18707,-30789,24378,-30961,31581,-24330,-9995,-6615,1674,-13735,2297,-1032,4297,-21033,31575,-18077,26892,19813,-20757,-18102,-20889,-28736,19100,-32473,-24847,32069,24940,12736,15834,-31532,-15851,-6114,13831,-26163,-791,-24592,24292,6628,-10640,-17674,2067,-12490,29169,12484,17790,-24416,-18435,-18936,-29849,3221,18931,-27041,-17533,-5565,-29797,-30702,12037,-32244,20415,-17232,21072,-17967,26680,32731,-30596,-19882,-14964,8157,-13907,28963,-27039,-28560,3496,25425,2684,27728,12968,-5710,-9443,17005,-10487,-29929,26696,11344,-17462,6887,-20527,-29511,31988,13852,-14774,-22905,-10341,13087,14974,31452,-19225,25671,18116,-7827,-13565,31747,7331,32746,-24587,27743,19188,-31981,13872,6864,-6884,-26274,-2851,16482,-29409,732,214,13930,-22236,-22338,16176,17361,11811,29923,-31887,-21741,-16454,-2245,766,-25747,30614,21907,12912,26614,-27398,25905,-408,-26970,-29112,30871,6976,-4275,5755,-32016,-20725,-13746,-12427,-4309,-8200,-28973,-32585,9237,-15844,16983,18310,11827,-17938,-23977,2567,25681,3892,2449,-2868,16455,30516,26420,-7621,23575,-20845,-6623,-24333,-8535,11405,27830,2171,26191,-7533,3701,1196,10884,4003,1725,-28356,30719,-4942,-18557,-5225,7178,2111,-2586,7556,27852,-10327,31561,-12347,-17755,-18576,-8470,-23683,-17242,-497,12070,25272,15227,20031,-9916,25353,-31795,-11278,-30793,-19892,21336,5705,6492,-20205,3381,21494,-13792,-5478,28270,16848,6254,5413,-30596,32759,-20445,11242,-29610,31653,-14130,31678,-16551,14946,3345,1857,29288,16354,18576,-26158,-16762,21803,-32447,12497,17643,30226,-12628,-22597,-4207,-19350,-9596,-22706,-10159,-9458,-11595,5224,26142,12187,16761,25072,-12057,17423,24621,4638,-30624,5875,-22330,19888,-30267,-10105,18845,30437,-9962,-22564,-354,19787,15501,32743,1817,4830,-13237,7144,28582,20514,-19109,-12105,25931,-23073,11423,-29847,-24829,-12103,-12738,-20469,-23350,-15407,3580,3889,-25236,-22313,-22834,4176,-19492,-442,-3305,4893,22214,-16803,8922,-1477,2520,7893,29493,12893,-26589,8773,24037,-6561,7317,-22726,13931,5926,21123,20139,17012,4186,23060,7167,28881,11483,-24916,23635,23337,-6377,-14579,-22749,4727,28562,19823,-6190,-9441,23044,8941,21041,-15835,-22462,20935,13993,16577,-16597,-21768,1253,-21416,30315,28621,25398,32485,944,20200,9974,2431,-10419,-4808,15879,-19367,-10190,-10094,32661,-1799,-13982,7894,-15710,-8290,20738,-19717,5027,-24064,524,-9182,28202,-18309,17122,17176,-31805,9468,20808,9388,-21487,-26685,-17726,-17286,7443,29612,12958,-32467,-2382,16230,2770,-24311,-30394,2808,27154,31383,-12262,18234,25870,-12935,-27882,20091,6797,-11507,9276,-13225,-15731,5093,-28995,19730,-11109,47,-14570,18656,5337,24847,15921,-16803,-5668,-15378,10076,-26583,27700,-10111,5307,-21016,30813,7700,17975,3635,16320,27784,31690,-18114,2863,22277,12943,14628,15844,-24140,1459,24875,11304,31254,3458,1668,-29911,-31398,-26826,-7112,1504,23096,26305,9625,-26329,19423,-8309,-27034,11381,-7051,-23321,1635,14926,-21937,-10206,-14591,8745,-22742,25777,-10228,19263,-25289,18699,31809,14368,29487,12830,27582,-15206,-27422,32675,-19502,19801,14025,6804,25535,-5997,13124,20624,-12130,6510,28245,2042,7091,3207,21990,24811,-20170,5957,5943,-5051,14859,1066,5070,-6186,25612,-22681,-15763,-32347,14539,-792,-14480,13686,30129,20967,1886,7673,-3563,18362,10812,18591,17713,-19157,13547,13187,1355,-8958,29866,-19046,-20298,-13498,-4622,-13772,4669,26898,23054,30170,-9,-30231,-20960,-27123,2726,3717,14865,-27575,-1602,-4802,-32250,-28398,-29150,-21946,-12994,-24531,-31346,7323,22943,-26995,23173,7482,8285,32569,-6532,22417,-7814,10281,-10655,19485,-22373,17331,25532,-13862,22557,-19459,30597,-24328,-25842,5533,13377,2051,-6440,26441,9893,26286,30715,-25122,-14861,7713,7930,17816,25304,-15957,11829,-1331,17850,21722,8803,-9887,-28181,30337,-32121,6593,13585,-12160,16735,-8085,17396,-13074,-4089,-4379,22547,-29885,2737,-25514,-4354,26570,-13806,-30472,15986,3079,-14662,-1978,-23279,29768,20116,-31872,-29968,32474,-29518,10740,-32064,-30446,27403,11466,-7418,-20222,-22249,11388,-24065,-31240,31774,-8317,23504,-17355,25548,4963,-6370,29656,-6211,30853,-22441,-16040,-4576,-29426,25431,28475,1839,-29635,-859,-14993,-705,-32154,20497,15073,19154,-3320,1534,16037,-32134,-18700,-6653,-28683,-24611,23061,12670,28128,3482,1520,28649,21381,-11162,27528,-10148,-32743,12834,-25028,-1266,13133,26623,-21567,-29309,-24531,6880,31230,-4047,11326,-10861,14944,8114,21950,-5221,30461,-20918,11454,19357,7180,17078,29648,10562,19324,-15820,27261,-11640,2821,-24190,20485,6125,14993,-26581,-13306,27666,-18413,25726,-14317,-14858,19168,5538,-4642,-10433,1119,4877,-10896,-19624,9583,31131,-7458,14822,7149,-5249,-25022,6246,-16783,13884,-30205,-11668,-15282,-31401,-13422,-9953,6818,26781,-13961,-4967,-28382,-4014,-21935,-19481,8825,18146,9298,-4125,-18421,16295,182,-9630,-26307,-27792,-11942,1100,-23568,-11436,30691,-3918,23900,31889,1278,24348,-935,28348,-20617,10491,-32511,-30549,-8973,-2943,-8592,-23647,10771,-12529,-26986,-9826,12966,4380,-17859,-17319,21027,-7102,11360,-25261,-736,-2163,-18575,16646,26429,18201,25852,8345,-18969,-25999,17135,-1555,-22701,2130,23028,10780,185,25909,30207,-5099,23377,-30574,27975,8914,2540,25480,9347,-29874,-24164,543,-24955,6240,-3271,32151,2663,2704,-21862,3611,12975,28461,-28235,-32300,9725,-9926,-22041,21833,-6023,-22770,-11201,-16168,28931,-19516,-14057,-6392,-11944,-12091,105,25695,21733,-3,22619,1439,9833,-14843,4,-3979,-10635,-11730,18979,-30370,14263,-25504,24233,-4980,-2055,2749,-1820,-20226,20359,8410,-28671,-21659,31585,21693,10724,25446,-32244,-13138,-5941,9592,-133,4148,-27853,-7772,-13001,-698,25461,-14380,-5849,23269,-6824,-18646,13578,-24166,-2037,-17698,21550,-27760,-29857,13105,16116,12883,22241,-31792,15072,19807,-25321,26804,-16693,-5551,12839,6255,-11107,32228,-3480,-27173,-7407,-4263,-19236,14940,31580,-6585,-9848,21904,26199,25816,-18472,-10279,31389,-31027,-9040,-14138,-20407,25596,-2783,5535,5979,3132,-8466,13743,15430,22362,-18526,11255,28765,-9940,-18500,-28955,-11996,14635,11477,3922,16291,18339,22863,10906,13945,6110,25258,7279,1434,10212,-3192,11543,-10937,-7484,22029,-17049,-29408,32183,-1865,-20859,6861,-30762,12145,-4220,18775,-13124,-2750,-7130,31355,-24453,21072,17444,30400,28150,4034,-32388,-17422,14062,19504,-30478,-17121,-23569,-30235,122,32746,19082,-288,24240,14456,-9268,16937,19932,-8291,-1410,-10442,-27650,-10792,19276,-10116,-25953,-16061,29119,-11973,-11017,16859,-151,24060,29541,4918,-27067,-10905,15623,13410,160,-29978,-11235,8987,-20159,-30695,-21997,-20103,26206,-21564,2736,20827,-24429,-25720,3587,-18109,20165,-4178,16512,-19877,-6855,-31091,5132,11723,28720,-26593,954,-21986,5656,-2707,-13065,20805,-24828,-1215,-9505,-5764,-30764,-15691,-1856,-22801,12779,-4205,30074,12021,-8743,9736,-17326,-29745,-2357,-32721,23,10703,27589,22601,16630,-19432,14917,29690,-29183,10474,-14554,-10741,-6831,3606,10143,2172,4285,10540,-31243,27230,-19483,-9619,-17334,-20416,31522,-27441,-11628,-19134,22317,-4608,13854,-25124,26935,-5147,23628,30448,26585,-25303,12953,-20622,-4168,7728,10157,-11957,-19838,-11320,-30231,-10280,30869,21268,-20131,6457,23593,18224,17583,-32518,8172,27752,-27855,9125,-700,21328,-31309,6187,10011,-4021,15857,25665,-8889,-10164,29653,13412,-4646,15001,-24869,-11925,-21622,-7426,-29412,11137,-1703,23970,-13379,2776,30001,12184,-14835,-981,3945,15055,13658,-32252,13989,-17938,8141,-22214,-28483,5534,23290,-18861,3265,3865,3262,-25403,30936,-11558,2659,17052,23426,-25291,-14433,-3050,8676,-14487,-5550,-16151,-11118,6887,13150,-29117,-19841,-29695,27415,-27630,-13504,-26950,21045,6471,13252,-21843,-13408,880,-16501,-22462,-24144,31951,-5821,7956,-12863,-25303,14895,3339,-30499,-22779,-17096,-27953,-7189,6497,5094,10783,-20550,-26589,-32229,-32287,12020,-8277,-15716,7057,-18214,-17214,-5864,-11126,23112,-24902,3743,19011,-4900,27842,13526,-17587,14438,226,-3498,28444,21267,-22580,2319,-12849,14078,19370,18806,23525,6600,25807,20683,22214,8724,-29140,-2981,21237,-4146,-14628,-17742,17817,-18843,10517,18830,-16800,-25517,17171,16543,12343,-6105,15436,20931,-10921,-18804,-8840,11449,26369,-30175,-31835,4419,-20109,-4588,-29709,-17662,9305,-25333,-5139,-20000,7471,-30180,32719,23919,-10797,-17281,21586,11480,1192,15757,-12813,16282,31798,-27337,-4337,13292,24803,6371,-14416,-9246,27349,-24130,-27740,-8175,12009,-5109,30066,25340,-17979,-1046,-12432,-26292,7876,1837,28795,-20657,-32554,-17618,-14329,23157,9269,-27013,-13618,5094,2537,-14163,-32039,-3958,694,-20163,-17881,13145,-6710,-11456,-3804,-26744,-7059,341,-20390,-16930,19868,-25239,-30564,-19385,20580,-4045,-24483,-19765,575,-13801,11581,30219,32606,17061,-18753,26908,-22783,12841,-5667,-27671,27404,8598,-13230,21168,6786,-25045,-30629,-21327,-28228,19906,-24870,22144,6563,13920,-18043,16992,-24749,27870,20003,5971,10451,-4124,-20361,17914,-19314,10814,-14538,-29708,-7143,-2554,-26056,21020,23422,-3226,30497,-13401,32095,28293,-17026,-8463,3300,16386,12109,-11295,-26746,-32030,10013,-1525,-804,-13320,-5463,-24419,1461,-14591,-26972,-5377,19409,9923,-28455,-3011,7825,-24067,-14507,-25004,19750,-4875,-31223,24118,-12573,-24410,519,11568,-23869,22559,9093,6197,14736,-25920,-5729,-11007 diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv5.tflite b/tensorflow/lite/micro/integration_tests/seanet/conv/conv5.tflite new file mode 100644 index 0000000..6a0d9e2 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/conv/conv5.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv5_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/conv/conv5_golden_int16.csv new file mode 100644 index 0000000..bdd9c65 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/conv/conv5_golden_int16.csv @@ -0,0 +1 @@ +32767,-16259,-14654,-11761,32767,31094,32767,32767,32767,-3490,32767,32767,32767,-32768,-32768,-32768,-32768,-22772,-32768,-32768,-32768,7957,20180,-32768,-32768,-29366,-32768,-8601,32767,-32768,13780,32767,32767,11578,32767,-32768,32767,32767,-5090,13439,-32768,-4227,32767,-19874,-32768,32767,32767,32767,4548,20366,19181,-32768,32767,32767,9222,-32768,-32768,32767,-24128,-32768,-32768,-31634,-32768,-27046,32767,6197,-32768,32767,32767,32767,15956,30121,32767,-32768,-21318,32767,-3281,-32768,-9213,-3777,32767,-32768,-4641,32767,-32768,32767,32767,24082,-32768,-14048,32767,-32768,-13365,23902,-32768,-10639,-32768,32767,-32768,-1085,-32768,32767,32767,-32768,-32768,-14088,32767,32767,-15069,-32768,-32363,32767,-32768,-32768,13827,-32768,4280,-32768,-32768,-5828,32767,-32768,-16765,-32768,-32768,32767,32767,-32768,32767,-13733,-32768,32767,-32768,32767,5026,32767,32767,-32768,-32768,32767,-7678,-32768,-32768,7099,-32768,-32768,-8771,-32768,1008,-32768,-32768,32767,-32768,966,32767,-32768,-32768,-32768,-32768,-32768,32767,-7262,-32768,18261,-32768,-32768,32767,-32768,11671,-17829,32767,27236,32767,-27791,32767,-32768,-32768,32767,32767,-32768,23727,-22870,32767,25534,32767,-10650,32767,-32768,10483,-32768,-32768,-32768,-4684,6180,-32768,6011,-32768,5591,-11147,-18724,17425,-585,32767,-32768,-32768,-32768,-32232,32767,-32768,-32768,9824,32767,-417,-32768,-7942,29633,-32768,13004,-32768,-32632,-32768,-32768,24145,32767,-32768,-32768,-32768,-32768,-32768,-25041,32767,32767,32767,32767,-28306,9687,-29696,32767,-32768,26386,16300,-21084,-13743,32767,-32768,-32768,32767,24569,32767,20418,32767,-32768,-32768,32767,-32768,-26172,-32768,-32768,-32768,16307,-16068,1369,23853,-32768,32767,-12798,32767,-16436,32767,-32768,-32768,-32768,-17188,32767,27716,-32768,11544,-32768,32767,-32768,-32768,14151,-32768,-32768,-32768,-32768,-32768,32767,-32768,-32768,3631,32767,-32768,32767,26905,-6747,25262,-7820,32767,-25325,119,1912,7470,32767,-32768,-25224,-32768,32767,-8625,12705,-12457,-32768,-32768,15734,32767,-32768,5952,32767,-32768,5637,32767,32767,4916,-32768,-32768,-29525,-32768,4503,-32768,-28427,-8425,-18906,32767,-32768,-32768,-32768,32767,26108,-11166,32767,-32768,-32768,-32463,-32768,-32768,-32768,-32768,-32768,-923,-14568,-21317,-32768,32767,-32768,-139,-21688,-32768,21902,-32768,-32768,32767,32767,1133,17465,2068,1208,393,-32768,-9203,-32768,-32768,-32768,-32768,32767,-20275,-32768,-32768,15179,-32768,-5173,-21017,-32768,436,32767,-32768,-32768,-2289,-29680,-32768,-17786,2907,-32768,32767,-32768,-27072,-32768,-18287,9275,-32768,-24204,-32768,32767,32767,-32768,-7394,-32768,7326,-32768,32767,-32768,-32768,-32768,-19366,32767,-32768,-32768,-32768,-32768,32767,-32768,-32768,-32768,6486,-32768,-32768,-32768,-32768,-27578,-32768,-32768,-9930,-1053,-32768,-32768,-26274,-32768,32767,-32768,-32768,-32768,-32768,-15671,-32768,32767,1096,32767,-32768,-32768,32767,-4374,19429,-32768,32767,-32768,-32768,-3326,32767,5457,32767,32767,682,32767,-32768,14018,-32768,-32768,-32768,-12582,-5740,9228,-32768,30064,-32768,32767,-21338,-1382,-32768,-10462,-13670,29706,12817,-15765,32767,-32768,-7924,32767,32767,-32768,32767,32767,17402,25956,32767,-32768,-32768,32767,-32768,-32768,-31731,-32768,23560,32767,-18750,-32768,-32768,32767,-32768,32677,-17138,17508,32767,32767,32767,-10111,-32768,-17800,32767,-32768,30873,-22318,32767,32767,-3310,-32768,-32768,-428,-32768,616,32767,-220,-26179,-27337,-32768,32767,32767,1098,32767,-19447,32767,5574,-32768,-21442,-32768,30122,32767,-20147,28443,-24223,32767,-32768,32767,32767,32767,16872,32767,31288,32767,-23871,-32768,10830,32767,32767,27968,32767,-14494,-24734,27553,12963,-32768,-28173,-8028,-25264,-26654,21043,-32768,32767,27591,-32768,23630,32767,-32768,32767,8519,6569,-32768,32767,-3518,30192,32767,32767,32767,-32768,32767,-32768,2740,32767,32767,25565,19820,-32768,32767,32767,-32011,7811,32767,32767,-30812,32767,32767,-17083,32767,32767,-29486,-7265,-32768,-32768,32767,32767,4146,9784,32767,32767,-20591,-15811,32767,-32768,32767,32767,32767,32767,32767,32767,21262,32767,32767,32767,32767,-32768,32767,-32365,-32768,32027,28074,-14100,32767,32767,-32768,32767,12620,8775,28846,4066,32767,-32768,32767,7695,-1729,32767,-11796,32767,-22288,32767,-32768,16361,7630,14013,-18564,-16843,32767,-636,32767,-5155,32767,29783,-32768,32767,32767,32767,32767,32767,32767,32767,-21619,-13850,32767,32767,29891,32767,13796,-25413,-27845,32767,21609,32767,23652,32767,32767,32767,32767,32767,32767,-1964,-32405,32767,32767,32767,-14399,-2325,-12411,-32768,32767,32767,-32768,-32768,-32768,32767,18875,12493,32767,32767,32767,22191,-32768,21938,31739,32767,32767,32767,32767,32767,-32768,32767,-32768,32767,1833,-32768,32767,12000,-9631,-32768,32767,-32768,32767,32767,32767,32767,32767,-21195,-12406,32767,12860,-32768,32767,32767,-11425,32767,23648,-32768,32767,-32768,-32768,28009,-32768,-32768,14512,-32768,28278,-32768,-32768,32767,32767,32767,32767,-32768,-32768,32767,32767,32767,-32768,32767,32767,-32768,32767,-32768,-32768,-32768,-14213,-6387,32767,32767,19685,-32768,32767,-19233,-32768,-9783,-32768,-32768,-32768,-32768,-32768,32767,32767,-7451,32767,32767,-32768,32767,9793,10368,31963,32767,-32768,32767,-32768,-32768,814,-9231,19679,-32768,32767,13349,-32768,32767,-32768,508,-32768,14899,32767,-32768,32767,16468,32767,-32768,32767,16817,32767,32767,32767,-15615,32767,-14007,32767,19471,-32768,32767,-32768,32767,32767,30618,32767,-32768,5025,32767,9012,-9016,-32768,-32768,-32768,-32768,-32768,-27528,9480,32767,32767,-32768,32767,-32768,30121,-11663,32767,-30260,32767,32767,9370,-7566,-32768,-32768,28811,32767,32767,32767,14603,-32768,-32768,-32768,32767,-32768,32767,4403,10856,-23042,-32768,32767,-5621,32767,-32768,-15257,-32768,-32768,-25648,-32768,32767,17859,-32768,20919,-32768,-32768,-32768,-32768,-32768,-32768,32767,29170,-32768,-32768,-32768,-32768,32767,25635,-32768,-3936,3976,-32768,32767,-13891,19724,8267,-32768,32767,-32768,-32768,-32768,-32768,32767,-32768,32767,-27563,32767,-32768,32767,-17319,1833,-32768,32767,-29654,-32768,32767,-16689,-28699,32767,-25832,-32768,-32768,-32768,-32768,-32768,7194,-32763,32767,32767,-4308,-32768,31878,-30791,-32768,22681,-32768,10465,-32768,-15613,-32768,-32768,-18187,27711,-32768,15385,-32768,32767,32767,-32768,-32768,-32768,-23813,32767,-32768,-32768,-32768,32767,-32768,-32768,32767,32767,13490,-32768,32767,32767,-32768,-32768,-26313,-32768,-15953,-10426,-16687,-13671,-32768,-32768,-32768,-32768,-32768,32767,32767,32767,-4412,1770,22353,-32768,-32768,-32768,-32768,16547,-32768,11331,32767,-32768,-20518,19901,-32768,-24271,-32768,-32768,-28007,-14017,32764,-32768,-17340,-32768,-32768,-21560,-27118,-32768,-32768,-30299,-32768,-18822,-32768,-25735,-32768,-32768,-32768,-32768,-4944,-5054,-32768,32767,-32768,32767,-30173,32767,32767,32767,29756,-32768,32767,-32768,854,-3495,32767,-32768,-12154,32767,-32768,22621,32767,-3289,32767,-23008,23089,729,-31346,32767,-13915,-11937,-32768,-1976,32767,-32768,-32768,-32768,32767,32767,-16984,-32768,3861,-32768,-32768,-32768,-13896,28643,32767,32767,32767,20983,2038,-27040,24667,22194,-21376,32767,32767,-32768,32767,-32768,-24274,730,28485,32767,9830,10300,-32768,32767,27745,32767,32767,-32768,18675,10521,18878,32767,-32768,2263,32767,-32768,32767,32767,32767,32767,-28710,16900,32767,32767,32767,32767,-32768,32767,-2176,32767,9414,20375,32767,32767,7074,-32768,-32768,32767,-7479,32767,-32768,32767,32767,32767,6367,-32768,-39,-22908,-32768,-32768,21276,11804,-32768,547,32767,-32768,-32768,-30225,32767,32767,-30919,901,-30021,32767,32767,-32768,26487,-32768,-32768,32767,-32768,-9449,-1695,-24595,-32768,-32768,-32768,-32768,-6031,16264,32767,-32768,27825,-32768,-32768,-32768,32767,-32768,-15769,6067,32767,-32768,-29399,-22770,32767,-32768,-32768,-32768,-14409,32767,-22190,-32768,-32768,-32768,-32768,-830,-32768,32767,-32768,32767,31155,-17824,31929,14612,-32768,-32768,28543,-32768,-30997,-32768,-29778,-32768,31511,6428,-28994,-32768,-22847,-32768,-1111,23524,-2322,-32768,32767,-32768,32767,-32768,-32768,-32768,32767,-32768,26538,-10138,15722,-32768,-24738,-32768,-32768,32767,-32768,-14264,-32768,-32768,-32768,-29631,25804,32767,-32768,-32768,-12272,-32768,-5885,32767,32767 diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv5_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/conv/conv5_input0_int16.csv new file mode 100644 index 0000000..4ef1a9d --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/conv/conv5_input0_int16.csv @@ -0,0 +1 @@ +-5913,30232,-26917,-1638,11195,29121,-13531,-4785,-28807,-30749,-4432,-5435,-7906,-13844,-18197,-28311,-3984,-25439,21248,-23697,-1877,28741,-23702,-3265,-19447,10551,643,21170,-28884,24484,21521,-26107,14089,783,5803,16615,-27281,-25423,12216,26115,27663,-8026,10951,22985,710,12512,-18505,11684,-18620,15651,16821,7270,13844,-9135,-3380,-10568,32375,23075,30210,13510,16322,-17450,7919,15386,-17375,22214,30428,3347,-30842,-21442,-23363,11356,-14225,21443,-19928,30644,14385,-8357,6533,18998,210,-21509,-2286,-23756,4794,-10455,-2315,-28639,16339,9870,17257,26557,-29980,-15877,-30096,7258,-8309,20517,-8827,15825,-13252,-22422,-24191,20905,17622,32472,-10139,857,10844,31205,19207,-9377,24477,-11115,7318,3896,-9102,318,-12995,-28056,-32672,-1993,18223,-22362,-27765,24047,25171,-16955,-5790,-6245,-22297,16144,-20917,-561,29349,32531,13983,-31027,17541,6633,-32061,-26554,-29956,-29594,168,-19386,-28922,29194,-19673,30951,-1357,31086,4653,12769,24566,25506,25637,3724,-13992,-22061,-22059,19285,-659,6417,-11384,-12499,23564,28304,20982,17190,-22887,-28140,-17965,-3386,-18981,5475,26105,582,23824,30660,32634,17821,9808,-5350,-7706,31134,25371,-19255,-22780,-22806,-22850,-25106,-2039,18259,1714,-10079,31881,-24039,15348,8770,-31691,18023,1434,25553,11723,-20373,-30435,23118,28320,-12465,-25926,8972,14257,11948,3016,32219,28228,993,-868,-23254,14673,-1740,16971,20798,-3497,11153,-644,10180,7076,25721,-10651,-2465,-20888,-23699,-25198,-27840,-27414,-2727,28721,-23759,13340,2139,19081,650,5064,-8349,-28329,-9223,18564,-10801,27304,-22237,24420,-23156,-22021,-20860,-27091,-3692,-4154,-3118,-1703,2250,25053,4802,-2294,-30183,24130,15321,-7066,-7197,-1507,13257,22937,18790,-29455,20243,-6762,-6544,-20974,9428,-13680,-15571,-3489,7637,16191,-22849,32697,26802,19535,15881,-9424,10369,-23844,31567,10339,2109,-3393,-4629,19492,-21160,25924,-30743,22430,-31532,-21201,18670,27130,-7295,-4118,-12247,-11591,-16192,9610,-27560,11070,7489,24868,-12009,13458,-23668,-28498,17771,30273,23959,-3694,-2333,-2372,-15962,-5449,32036,16974,21340,-26878,-10896,27667,-19843,11622,11454,-30417,-20498,14986,-8547,25338,23362,-20837,-18121,26463,-27786,-11009,27318,14229,-18133,-23596,3192,6733,-24781,28027,-30325,-22168,24600,23281,6965,-9701,-10425,-28537,26176,12833,-26311,-26916,-29522,-22831,-5834,-21006,9279,-12998,22574,5955,-31629,12463,19087,23669,12362,19356,18763,-13109,-8883,2127,18175,1294,30354,-5663,13298,25908,-9481,-20866,-12636,-24852,21258,7868,-743,3899,-6988,5934,-9506,-31782,-8369,-8173,32140,19773,11833,30189,23238,-17484,10079,-23791,16870,-8264,-24135,26502,-30235,-10572,27508,23648,14784,-6161,23050,20770,9237,21779,-30898,-23310,-4244,-18884,2571,7939,-13787,11883,22349,-22919,29602,-7436,3236,30528,18670,-22986,11641,-23050,5925,-29498,16233,3173,13017,23157,2221,28607,-28472,-4007,30876,-293,23127,-23996,20977,-24150,-12142,10233,-11547,-10885,28294,-18796,23255,-24362,-1187,-29645,-27987,-17621,4742,-7390,-30833,-31329,20597,26781,24926,28356,-28419,-19261,-2819,-21503,16969,-10163,7515,32248,-21206,-27585,30599,-20083,8257,-28074,-22926,10001,-26452,-12672,9597,3073,-31990,10056,21326,14404,-17918,30658,-23647,25777,-20185,-11762,-7010,-23525,25505,766,-3566,-13504,-4606,-32092,10849,4557,29130,32669,-18229,1593,29612,-31774,2835,-11296,13332,25617,-25375,26802,9749,31124,29823,-7544,9287,-29564,-3691,8132,15922,-2778,-7686,-14990,-23199,-28001,24180,-26507,-31594,1390,-25660,11229,-31970,20061,27788,-14769,22644,8570,11444,388,9975,-17629,-13461,15872,15094,-20168,6689,-622,-2467,-17684,494,23144,22851,-28567,-31234,5842,10977,-4641,371,-22597,-7524,17644,20936,19108,9669,-22207,-31369,3463,4771,2264,-6895,21580,-32069,6695,18659,-2172,18178,-28665,18916,19613,11213,-7011,26059,-29973,7037,22817,-17170,19758,-12645,-22321,-18928,12952,25024,-273,17412,-19909,27151,12056,16706,28923,-27785,12129,-18017,56,12602,-24764,-18929,-30843,-10379,32596,2685,-27372,-30729,-22627,-5934,30283,-23778,14261,29998,-5416,19311,-25249,-29059,14310,24118,27918,-27975,-2260,-24585,-6306,3291,17822,-17308,-11397,-10299,-7421,8205,-580,14429,32674,1277,-24923,-23542,-17297,-11843,-21234,9677,-14080,2875,20666,1057,-4142,13521,13619,-5668,-6671,2323,-5845,13759,-9627,10368,-12213,-18531,11459,-24718,17252,-19268,-16812,20039,7946,9972,-17123,2099,8851,-8556,-29254,14614,-30079,-516,-13140,-23679,18181,-19828,-1166,-31909,20207,-28569,-23543,-4935,-5494,-9415,12820,12059,3599,-25872,22808,-7668,25972,19200,11185,12021,18297,8407,1789,2141,26792,15485,11326,5844,-19302,-2449,-24223,-18746,28421,-31306,2423,-19866,-24813,8254,-32672,-11998,-6607,23124,-20396,-28559,25215,-12535,26306,21244,3490,2815,-6254,-19333,-7497,-18728,-3915,-13765,-28812,-30314,-3227,-23452,23978,20617,-24448,4519,13613,32408,-23055,-15477,-8291,19749,-25621,-14841,32163,30758,-1521,13572,-24184,1388,1492,20322,9159,-2673,27831,-19440,-2421,23457,29512,2578,28362,-11806,-22564,32485,-4639,-11110,-4571,-2343,6064,-5288,-6060,4965,21520,-5098,-30243,-13191,-30917,-29280,30622,-17033,-2429,-26401,9114,-27635,-15538,-17932,-6598,29701,-27002,7069,28530,-14099,10847,-773,14118,2705,-18256,-21710,28307,-11792,28256,2220,-29859,-20543,-982,939,-7044,6940,-6523,-22509,30990,2464,-17314,-21366,-2374,-5917,-30455,11187,-25508,-30246,10858,-28086,-30617,-1554,-13275,845,7984,-8991,-1860,22794,-7425,18633,-8246,-28659,-10842,-9649,-11494,9506,30012,16313,-22748,6553,-4035,8213,29400,-24648,20549,-17855,-28479,-22467,19366,10525,-31449,-10416,-12869,-17941,4879,26374,7964,-4025,6418,-29156,-16950,-4014,-19961,977,17075,-23129,14032,-15438,24008,8012,-7171,9383,-12994,26935,-8440,-8220,14630,19460,-13610,31165,-1030,8106,29225,29176,7668,4289,-21886,24160,27234,23620,13933,31103,31860,-15415,5020,-18814,-14854,16551,-3522,11765,9242,-489,28173,-8194,-6883,15356,13876,-13911,-16643,30185,8071,19804,9316,-4750,-3064,-26533,25223,-30206,-13815,-7634,9642,-7485,357,-26140,20948,29984,-15716,-25013,-32311,18667,-18098,3871,16471,-26959,23756,-7362,20555,20324,-19485,-28554,3969,-10928,-1094,-11540,8134,-2211,24033,-3500,7958,29063,21484,-6754,15635,1092,20519,3013,14002,-4433,-1666,-26896,-18090,-27818,-12405,-3031,-9641,14947,-7220,-20350,17373,26551,-30095,23782,-28128,-4987,6153,-8232,9587,13433,11892,18062,13460,18925,23251,2758,-22000,-28898,-30721,4660,30316,24090,12179,8823,23808,8009,-17774,13715,26859,-7110,-10006,-19512,25076,6738,9817,32735,-13247,5327,-23494,-27741,31151,8971,10277,199,16191,-10590,18400,19924,21790,19331,-5491,29300,-32703,-25360,-29062,-27243,9821,-309,25140,-21046,-26894,-31416,-312,-11415,-27199,30067,-6763,6542,-1459,4965,7314,27142,-30820,10562,32428,-14876,3911,-25878,-21800,30456,-1733,-20511,4690,3879,17415,30817,-20162,18256,-11790,5585,23982,-27071,21021,-24003,13484,14000,12927,13064,29610,15597,26022,15243,25852,26369,30441,26711,-22398,-16554,16701,-27128,-2498,-21920,-31895,14965,-11740,-10618,2649,3815,-14242,783,-30656,14797,-23498,-31248,11004,-10165,8104,-13454,-29103,-3632,5512,-11481,-6113,2232,-23599,-14441,26519,-12802,-475,-20928,14265,-30051,6961,21932,5240,3476,-21547,2094,16099,672,5228,24720,-30395,13500,12931,19777,9363,-32482,-5152,10467,23257,-32370,-18279,-18799,8970,-12339,14374,44,20874,-29744,28686,-7522,23748,11352,-17759,7012,12725,8339,-26604,-24091,-24435,25860,-21862,-21334,28037,-31902,-6916,9123,29990,20539,-28964,12667,-11404,29331,19851,-9772,13396,-4439,-6136,-20555,31151,-29039,4687,17001,18100,-17555,30645,-18804,17131,31475,-23900,-19610,-21554,-6856,-16699,-31524,27473,939,-16247,20369,-5841,11089,17920,-32286,15041,5002,-20983,20493,-28695,-29128,19723,23353,20399,-22720,11273,31104,20262,-346,7069,-18803,-5109,28643,14516,-8582,-14443,27257,24244,-5608,-17982,-16987,-20562,29500,-24540,10507,27788,26887,9245,12265,15265,19947,22340,29420,-11221,-10474,17231,29849,3883,10662,-28412,-8845,30505,-20942,32564,-8965,27349,-16780,17537,-205,-13562,9526,-1345,4525,-27482,9557,13977,-23290,21055,-14814,31280,-6885,-13190,21689,-31125,19094,5748,-8942,-11600,21,-30134,-18457,-19671,28906,22986,-12352,10164,28764,-19536,5219,17524,-3057,25299,17787,5177,-23000,23187,16650,30976,-6885,22548,-7383,20146,27468,-26248,-29281,19301,-26425,19655,11879,-5108,-26104,-10320,13676,-4031,30430,-26240,4744,-4218,-28732,-21133,1541,-14768,26494,21503,524,-938,-7065,18962,26598,-24416,-8305,-15332,32125,-8464,-4487,6404,13050,8304,4067,9232,-18918,-1520,1788,23078,29771,20928,-13251,7997,17821,-14845,6323,-24637,-8103,-6155,23443,-6997,-24829,10375,1411,-15476,18410,21943,-20142,-6845,9625,-10974,27170,25230,-28017,27258,-7085,18014,223,-14634,1565,-21106,20982,7538,-17916,224,-22690,9638,-32053,27683,18413,9057,29070,-420,-26039,17501,-25691,-22788,-27657,4720,-606,-27386,19436,15163,31531,300,-7041,-22672,-19910,-1623,7433,27733,-6249,28638,-17662,26827,29710,8173,-8316,-29774,5441,-20721,-7602,-31739,31872,13427,-14377,16919,31039,-6260,-16968,6474,-9862,3841,22229,-24607,-3577,20427,-16344,-17783,-30380,-18133,19642,16156,-2495,18739,24501,7622,-19272,-17174,14998,11607,4153,29407,-18229,13321,26351,23438,-20390,30074,12116,17547,-23228,23888,-15814,15769,-4693,-24954,22431,26328,3370,-526,23706,14920,1013,9720,15453,-4338,-7548,13199,411,28343,2957,-26397,-16905,4049,940,-23208,-14717,-12764,-22999,28605,26180,-32249,-1488,-27301,-15569,-7297,-16598,2384,14915,-498,-18110,9672,-561,29982,15717,-10936,8428,-26542,12886,-21557,-12074,-23093,32111,18744,-19536,7550,19494,1147,20205,7844,-15427,19814,-26164,20880,3011,17871,542,10769,21406,-19363,20208,-4374,-30064,19096,-7096,-17315,-1100,-23147,29651,-23784,-4028,8574,-23504,-21396,31900,-10540,6378,13883,-15376,6854,-20888,11429,8975,16601,26405,1192,-21111,28914,-5785,332,-19224,26122,-30054,-1419,-8332,-32743,11522,-29527,11162,-25917,4434,19312,23158,-4595,12910,31688,-1659,3713,-18606,24302,24152,10629,23784,24338,20248,13825,12365,18439,-25107,14647,19618,-32676,6250,-21701,-17495,15987,12388,-31947,-10407,19420,10402,1419,15907,22518,23388,-7880,-24600,-22153,26686,9806,-17326,15956,-14687,-24863,-1933,31321,-9755,-3033,-15746,-5668,24440,-4856,470,-1651,-21816,11220,14458,8899,-21636,-13160,580,16236,-31257,-14334,26339,-25164,-11538,4545,-18699,-26854,-19496,-20061,2590,-13326,-25215,23964,12962,6673,-3901,23442,15177,14367,2939,12670,-22321,-25318,-29238,-21150,-28226,30268,-8854,7951,12165,-30570,17346,-14441,9250,26128,22744,5052,-2210,21366,-21037,-14064,-26826,2790,3070,15699,10923,-6908,3164,3903,-3124,-12658,-25586,-30941,13214,9305,7509,-607,29271,5475,17452,14678,19914,3033,-24756,-25265,30480,2949,-22065,21119,20342,4774,-29144,-20714,11729,31125,-25039,1237,597,-28605,-14381,24215,-25182,-10600,-25231,-7491,-17462,20764,-19101,22470,-21143,-11498,5377,-9530,17782,28323,-14559,-9973,-13083,3509,27214,-3251,10470,20698,-2798,-8999,25818,23570,-4617,9455,3077,17742,-7051,13848,6419,-18286,13105,-1662,19524,15278,29706,6277,-10199,10763,3731,-21545,18537,-27168,-13334,2552,-22321,13387,1079,20117,-2017,9447,16277,24314,20614,-16567,27247,-204,747,29739,23334,4352,-5400,20135,29476,-22795,-27844,7599,29329,31905,8661,27838,2166,-31748,-24240,-12303,7723,-11906,12769,-18051,10890,10221,29609,26201,13011,21291,29267,-23847,-6102,-19050,-29128,498,7585,26000,-28398,28935,16197,-27508,10402,20158,23275,21965,23254,-12022,22264,-27772,7323,-12326,7968,26126,4758,22430,-9397,-25312,2815,-3096,11549,31164,-8736,-23674,11709,-32095,17738,-11757,24844,-1220,-14558,-17849,-18973,-19597,-10456,-6995,-18156,-20774,-12804,6687,18295,6604,15562,-16976,6862,-22218,9881,-4771,-17637,18837,9581,9817,-11693,12154,-7231,8590,10242,28480,-308,-6720,-13653,-5824,21461,7782,-23556,-28492,10121,834,-26129,-19029,18642,20307,4883,-27866,10591,-26616,11908,20133,5345,21983,-25547,-11517,-5034,18503,11786,-6799,4934,-8068,21454,-9912,-22435,-31578,-11216,29902,-17914,-3170,-15653,-9731,29980,-22940,4994,-17052,23969,-17504,-12829,19249,26783,19398,23575,-864,27522,7771,32029,3160,-30758,29662,21563,-29231,-19449,-19553,-12990,31911,29312,-26530,-18066,14841,-4665,17480,-25716,-23349,-7111,-8768,19991,-5999,29198,-27390,21117,12200,-15816,12910,-30841,29916,2194,-4574,31153,11187,-21863,-1339,-28980,-30953,-13980,17466,-19564,13643,-10599,28341,-16294,27988,-2380,3371,25596,4596,-1098,6752,-18333,-6488,346,2279,26437,32003,-28215,-13360,-19207,24285,16489,13085,8206,-14848,-26143,-24352,-6402,10063,-19958,-20778,-3517,10310,12037,-3659,10604,-17028,-7355,30637,14390,-1624,1867,9160,1022,-2023,22732,-12772,12256,-8695,4276,18052,-1850,-19058,-10629,18609,15645,2643,8754,-12923,-13148,-26672,-13473,-13908,3561,18120,29002,-24665,28627,31757,-8452,-16655,30654,-24268,14595,-1608,-17364,-9939,-29649,-13381,-9568,4036,23113,26935,-18707,22106,17445,-14742,-21358,-5601,9327,22752,-18516,30541,-14008,-21595,10051,-14241,-18678,16772,20996,-31957,19166,-28544,30593,8411,16073,1168,-31414,-32110,-5960,-25791,-10542,2531,18872,10696,11869,-28225,-25000,998,21153,27529,-21319,4752,7353,-16429,12428,27998,-2761,-30435,26739,20433,-16196,10340,-8103,-20141,-6600,-11631,-28089,4519,-14646,9196,-18214,-1189,26864,678,167,2487,-24536,-20198,-14799,11472,29535,-14875,-6766,25719,10424,-15948,-19642,28710,-4982,8128,-25400,26168,28195,24549,10122,-20926,21756,29523,5288,8831,3596,-13753,28618,-11284,-11281,-16771,-21065,2645,20611,-20480,-12504,17796,23598,22025,-16326,21963,18883,-6008,1950,20729,20294,-21203,11609,16724,11390,16438,23582,22149,32270,31770,8821,-16273,-14687,6233,-18232,-4489,-8760,-7882,-22196,18269,-24202,-31349,-6034,18067,9737,-32333,2369,29029,1386,-4256,-20986,-17372,22649,21004,10097,-11736,7900,12147,16709,-27823,10761,-11010,-875,1388,-32283,-6820,-8713,-26301,-20501,-27043,-13855,-10389,29363,-6054,-23525,-5363,-20536,14284,-32664,29545,30863,-629,4248,-9503,25342,-14610,-9968,10283,-3129,2240,4171,-21043,-19788,5221,-7740,429,14618,5759,10965,22179,-16028,14330,-19654,19128,22861,-29925,-21655,-22392,13176,-17884,23482,22115,31774,-12717,-15660,-10809,-28685,6086,-22122,22038,5158,-30911,-3455,-28411,-25924,-15700,-562,-29654,-29668,6323,23022,22427,-31717,1648,-16559,1374,2661,5555,16436,1222,22170,17857,-25024,924,-6139,-25487,7666,-4347,14738,10688,-15232,-13342,70,-25630,22691,-9898,-23082,23202,-31250,26167,-2252,5144,10326,-2640,-20438,17975,-18052,8148,-15495,15756,21394,14732,-9424,9506,25964,-13543,23942,32645,-32692,18081,12197,13722,32395,-24423,-19648,-11452,25941,-28065,11668,11440,-30642,29648,25801,-14474,28572,-2201,16232,31581,-23857,-16814,24553,8741,16229,-8529,-24314,22305,6216,25074,21363,9697,-10553,-25243,13096,12715,22693,31556,-27404,25947,6881,-276,-972,-30437,-29650,28578,22271,-22118,15471,-14991,8782,2157,-1076,-7665,10718,3475,-24991,-268,-15436,-13973,3376,12861,-22,24910,17653,24695,-14096,-3572,7878,-20255,27153,-12701,16177,-15003,23813,26439,5819,18788,10438,30061,-10938,-19743,-1724,-26273,-14030,15847,-24052,7375,-12225,6363,8784,9471,-27341,12233,5058,31852,-2734,25657,25384,22219,-26446,30418,21741,3405,20293,-17740,19252,-1051,21023,-30457,26715,-11695,2766,-25655,31421,-24,-29560,3109,-9096,15983,-1813,24367,15235,18683,3935,24831,-15768,-3964,-11595,7941,19922,499,-17807,28030,27550,-32644,7701,5282,32070,26166,-8522,-6110,10618,2967,-21523,-2872,-13052,26279,-3602,-28329,-31223,18936,-24441,-30228,-5826,-18735,9197,-30182,-24087,25171,-5438,10663,-32301,-21566,-4132,13801,3565,26387,9712,14276,-28013,23045,-28223,-8815,8259,17618,-8827,-29821,13090,31609,-30767,13632,8265,-11052,-20581,32329,-27157,-3479,21240,-25602,-24414,482,27911,18101,-6819,-32129,-28832,-871,-28751,17581,-27769,-1164,-11241,23662,-27200,11788,20142,-19050,4181,-964,-18452,15233,19311,1349,10127,-32463,-10670,-31956,-17788,31154,-13885,-27851,-32716,-19630,7456,-5560,-4527,21811,-2806,24242,-6071,-12580,-21829,28835,2055,28644,17138,30645,-26913,2520,-22744,18574,13459,20503,3475,10226,-1091,-9057,24884,-23981,-17480,-21303,14513,25060,-253,-32268,11307,-18319,-5154,23943,23939,9837,8636,-21705,20798,11722,-21440,22024,-12736,30885,880,-19145,-8099,-13811,30401,10882,-5930,-23983,24527,-23876,-17952,26057,504,2814,2492,-10273,25135,28354,-328,30332,10216,26688,-12515,-32491,-32765,-22684,22265,-24256,-13270,-8203,27569,14495,-15379,25158,-28289,9013,-29902,6698,-32154,10463,12373,-28727,7125,-13154,19210,-16964,-17735,-2401,12081,12463,-16274,29525,-21509,317,-22862,-9245,6214,6524,2074,-17039,-14959,22156,25488,-32497,-32172,-24362,12887,-27013,22399,30986,-1140,-20613,-1233,-27181,18839,29686,25081,29497,-23867,-25039,-26794,-28121,-20390,30664,21299,-15452,30389,-6525,-32466,-11777,32064,-3552,2,11200,26661,-14894,17088,19136,-31451,-6272,-8498,11420,7380,-1099,23780,-18455,28657,-17101,3305,-17067,12352,9213,-13501,31747,-626,-8199,12428,16967,5055,-30598,16429,1597,-9080,6109,-7100,-32343,29759,30822,-17956,25652,634,-13762,-25044,23757,-12311,-7715,-12661,706,6432,-22251,17428,15078,27915,-14979,-17870,-4455,-14667,9580,-12254,8120,-6929,10575,5238,4528,-21385,4334,-4548,3614,-20741,5742,-7918,6216,16949,-22793,13007,25614,-9862,-32381,8953,591,5247,-703,-30593,29552,-32273,-641,23248,31692,31196,-24997,9994,13911,30506,-20845,-28134,1451,-3989,10961,-30412,-8666,-32176,29004,-10606,31570,16920,13612,-13526,-25249,32432,-14220,-10896,17283,18127,-12841,3878,10332,-18592,9271,8433,20108,-16864,-17561,-30002,-17064,20245,21870,-23556,293,-24642,-6170,-2108,8377,-27153,-14382,-22342,20466,5691,-11022,31801,-23762,-17804,-31936,-727,-2833,31470,16272,14102,27693,-1276,4414,16253,-2802,20142,-28296,26371,18679,6505,-21929,20149,-11333,-4807,-25622,14944,32529,2682,5045,21193,32552,-22592,1643,-30232,-19899,-22899,-31724,-14427,-30192,31170,-25127,-11090,20351,16031,10195,15608,-26634,-19169,24394,23096,-1943,-19198,19670,20169,20483,3526,-8832,-28863,11399,27844,13011,30888,-5986,18341,1093,-24408,-16590,3200,1323,1619,9161,23174,7617,18645,-24886,4408,18268,7789,-4465,-2523,21912,-21317,-32741,-4935,-16465,4542,-16086,16877,20364,25906,32081,30705,-4757,32081,-14866,-13923,20169,-22606,-16229,-25264,-2609,-12908,21808,-26226,-9429,-21204,22672,4882,-32719,7740,1116,19432,2651,-28876,-21495,9859,24522,-195,-17246,5880,-6913,16544,28705,19953,17992,17361,21885,12453,2758,-7545,-29659,31879,28141,15213,10292,18448,25654,-25686,11483,16508,-31069,-9820,-24118,-31019,-12635,-915,-29238,-26311,-11470,-18530,-19855,20932,30911,11993,9977,6843,29985,11691,21352,-12598,6704,18573,-17019,7516,5508,13945,1648,17333,17792,30083,12313,-63,12559,-4271,16221,8862,9791,2831,-25435,-32521,28471,6156,20690,31364,31151,-16034,-2579,-18102,-992,11891,14904,17170,-32699,-29971,-4264,-17287,23992,-6376,-1839,-26271,18880,3279,30709,-1406,31803,12342,29450,10107,-31737,-2128,-13457,26575,26511,17814,-15682,-32195,25070,-9647,18632,3867,26150,6353,18403,-17759,-12908,16965,-6356,24227,18396,-10242,-17014,-10143,-28182,-19335,32665,-10187,6622,-17386,-16782,6292,1782,17836,-4449,3388,-30137,11811,26498,24218,28010,767,17700,25805,25411,-23119,-32180,15731,-27929,-15973,-10549,-4676,2359,-25876,22776,-2119,-9597,-5890,17113,23351,11707,-8527,-18247,-16799,8953,-31121,13439,-2762,88,8792,-17085,19167,525,-10750,-12161,998,19675,207,-7386,31855,1951,-17376,2253,17790,-12370,19376,32362,8974,-30510,15219,-18663,-4328,14650,5927,20359,291,-10270,-2456,18597,16851,22344,11151,-10538,-16802,-959,-1879,22477,-155,19965,16467,-28590,7363,6616,30339,-16740,-5718,-14763,30026,-16046,26375,40,10280,-2448,-5768,-3653,22010,25375,-28832,24482,-13789,-16484,28340,4700,30574,-12356,-15634,-30132,7618,25994,9154,17251,-29492,29774,-16740,15948,32645,26141,12178,27970,-5341,27160,18575,-28229,4680,-10755,5773,27691,9728,19086,-26173,-30077,-331,-13194,14100,8259,14542,28316,22964,10811,22863,-19505,26622,1167,17068,16074,21936,-16258,-26093,-20183,7184,-18645,-2193,14256,-28338,9332,23196,-5772,1480,-23238,-20461,25253,25717,-23081,-86,-31469,25895,12449,-15206,-27637,-19999,8704,-22201,-27740,27408,15748,27435,22295,32612,-20672,4365,-1154,-27257,-28632,30079,-9417,1629,-9226,-15933,27615,-21689,-31192,6110,-31648,24145,-20243,-10837,-24229,12731,18710,-25804,-32621,30303,7737,102,21941,4606,5631,29314,-31392,21013,-13481,-16583,-17033,-14778,11770,-25827,-20713,-155,-10698,31912,-29238,-8616,563,11408,24947,6674,-6508,-7184,-29030,-9656,9205,9473,-7247,-17383,21986,21232,-18410,-9769,-26377,1890,-10586,9846,-8022,20537,-7762,-10306,10224,-29663,-24316,-25107,-23232,11700,32336,21124,-28881,12291,4718,7066,10111,167,-7307,5196,-14130,16069,31780,-18331,-23132,26819,26024,8884,22597,-22107,-8695,-5745,-4902,19084,-10774,14323,-13113,23441,-21010,11929,28704,-10619,17754,-28723,-23421,-14997,4667,20797,17824,-25711,22117,4980,-10040,26874,-22890,12575,-7333,-26467,-6381,-6554,1925,-30783,8563,-17661,-17678,11107,-9935,-26048,-27246,2663,-25569,19428,6443,5812,-31299,16878,-4892,21553,5690,7749,-11445,-21825,14286,-13661,-22856,-429,-23149,-22838,9543,2924,-31076,14521,258,25399,12730,3226,-24196,29350,-28723,-5073,-5427,7894,31730,-15287,2487,13540,15955,-9017,19394,-19948,-19192,27686,-24232,-30332,3549,-18706,18454,11233,8245,-14586,-21441,-6715,10587,-17737,-8125,-10536,-152,5033,-21042,20617,20131,23970,-14610,25300,-28048,9903,-26648,-15093,20879,12585,2412,-10490,-15741,20473,31410,-22552,-30296,-23339,-29316,-2466,15712,5106,-28117,11100,-10994,28611,-6052,-8705,715,28056,17157,-12381,-11065,18502,-10382,24706,22066,12523,-13911,-25072,4510,23090,19766,-29311,-21525,17402,-26956,10741,-17255,-30455,31089,-12265,-3551,3562,-32556,17674,-30649,-27834,-11461,10906,4398,26279,-24654,17854,-16530,-6363,21320,-86,4527,14627,-5118,-22132,-28113,25565,32725,-8201,9784,29764,-25754,1707,-7230,28799,-20426,8801,-32634,-31715,-23308,-14292,-684,18742,-7727,-28832,-19710,-15513,7478,-6423,9104,-1195,20718,-8285,-28370,-13186,-23584,27824,18966,-6794,-15672,14660,20493,-3040,14973,10341,26807,28260,23030,-23862,5856,21836,8841,18527,14098,-1404,2573,15320,32364,26353,-18392,21098,-12503,4881,1188,-14957,-1795,2573,-17475,5225,2091,-30606,-28331,-23560,-9182,-22780,-25709,11364,-2338,-7166,15112,-15592,25458,12667,5391,18750,31390,-16842,6220,14641,-8515,-2270,-173,23747,30774,28938,-4885,21753,-14629,5238,-23595,-12822,-29508,-30827,-6105,-22697,22914,2781,-26467,-29971,27518,26169,7178,-15657,8280,-26871,30172,5810,3018,25897,-24521,9702,1954,18072,9643,-26503,19841,22453,23989,-9407,9141,17745,23738,-18879,27756,-3307,-19292,24985,-12136,12624,18170,-28212,-8227,6163,-20575,-5854,1909,2342,13811,13777,10586,3840,-3605,-31435,-14680,22340,-7339,-24399,15030,-19325,-6389,-7371,-22259,-26558,4519,-4531,27212,28839,-15355,-1844,-19695,-26423,-26395,1344,-7431,7402,22952,13407,-22902,4910,-23111,20077,-315,2603,-28583,-23698,-8652,30040,-21048,-22218,-3675,-29698,-18097,21420,10721,-20715,27759,15259,-9825,22122,-22949,-12392,18359,19459,6038,-2405,-25232,-15219,21527,8575,-29107,3022,-25163,12852,-30450,16011,-1604,-24289,24337,-13612,-535,-15343,-24931,11171,3961,-22334,-21605,14414,14925,-15110,19481,11638,-11384,-24610,32470,-8305,-23762,-20317,-9070,-5838,-11698,-2018,32094,-30183,-18689,10000,10301,-19193,17613,-19057,-7301,20518,21100,-32732,14350,-5088,-6187,15098,-23847,18869,4265,-25560,-21666,6674,-19221,-14029,-1050,24527,-15143,-13878,29152,-16172,-10476,-13592,-19804,31274,-3619,-1325,8278,-10802,21785,27056,11464,-17189,-7535,32485,-19117,-7503,11543,31896,27562,7282,-25041,22599,-32290,31111,-28268,-6843,-21425,-28544,13418,10990,22488,8519,-28830,-8516,24301,-23355,20152,-24909,-13568,-27398,-10525,-7441,-24958,21387,-23409,-22316,-17421,-28384,-3959,-19862,27644,22023,29085,22585,-28448,-27966,13530,19214,5999,-4950,-26204,-10653,25636,15378,-16594,-24125,-28421,15307,9481,-25969,16335,-28838,18696,28390,6432,30126,14082,-632,-11946,-21728,-1210,-4154,26659,-16752,8991,19040,-3093,-23103,18525,-29652,-11081,11468,31561,28037,23875,10783,8423,-13266,-28664,27217,9034,677,-30637,7740,-2433,-15587,-21370,-10800,26943,-17086,9912,1698,15055,25420,25032,-25332,-16532,-4422,1586,2050,-2622,19725,14390,-21980,-5284,-28831,-16824,22356,104,5509,-15475,-29121,5516,15792,-12060,4360,20497,10388,19066,-22190,-1843,30237,-6304,26443,12595,3633,15506,29642,-26251,23737,24746,14605,-25368,-15417,24417,18232,-26676,15978,-19407,12585,-29624,10025,-27774,-18068,9915,-31981,-3616,12208,-19031,-12035,-29529,10145,-24949,-474,12613,-5284,25011,31967,-4479,26951,-254,284,-11112,10162,7106,-18860,21515,10553,-27712,23573,-10323,-9,-15403,1002,6178,-30971,27944,-27493,-13682,-19037,-4244,4320,3784,-25710,-17211,13114,-19616,-31873,3673,-22472,-2477,15994,24619,2006,-8557,21907,-16629,-5212,-20354,9216,32276,28254,-31404,28049,-32523,-22632,8260,-18623,-4194,-9855,-4073,28222,13674,-25910,-27054,-13381,-29866,-5614,4839,-5588,7374,-22531,27647,-19062,27755,-1266,21703,20617,7842,32288,-12741,-17344,-15985,-1537,-28357,25450 diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv6.tflite b/tensorflow/lite/micro/integration_tests/seanet/conv/conv6.tflite new file mode 100644 index 0000000..8df0203 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/conv/conv6.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv6_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/conv/conv6_golden_int16.csv new file mode 100644 index 0000000..5213e53 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/conv/conv6_golden_int16.csv @@ -0,0 +1 @@ +-32768,-32768,-32768,32767,-32768,17968,29320,-18318,-32768,-32768,-32768,5344,23756,-32768,-32768,20904,-32768,-32768,-13508,11455,-32768,-32768,-32768,3642,-32768,-32768,-32768,-32768,-23406,-12775,-21525,32767,-32768,-22486,-14644,-32768,-32768,-29938,-16779,-476,-29037,-28959,-32768,-32768,-32768,-32768,-32768,-32768,12222,20420,20187,32767,32767,-32768,-11451,-32768,-19203,32767,-32768,-32768,-32768,32767,-32768,9422,22234,32767,-28742,32767,32767,-32768,13068,-32768,7825,-14947,-32768,22754,-31240,24730,-37,32767,32767,-17573,32767,-20486,-32768,-17145,32767,32767,25182,29524,-32768,25914,32767,32767,32767,32767,15975,32767,32767,-11224,3284,-32768,32767,32767,32767,-21711,32767,32767,15507,-32768,-32768,32767,-32768,10190,32767,32767,-32768,32767,-32768,-14512,-32768,32767,32767,-5480,-9480,-17612,11889,-13911,-19855,8726,32767,14743,32767,6559,-31966,-28069,32767,32767,-32768,-32768,-32768,-32768,-32768,-32768,32767,-32768,32767,-32768,-32768,-13319,32767,32767,-32768,32767,-32768,-32768,32767,32767,32767,32767,-13513,32767,-32768,27761,-21921,-29006,11002,32767,-32768,-32768,-7282,32767,-32768,32767,-32768,7487,-14132,32767,-32768,-28232,-32768,32767,-3988,32767,32767,18407,32767,32767,-9235,32767,32767,27833,32767,11972,10666,32767,32767,-21392,27545,27637,32767,25650,24412,32767,32767,32767,32767,32767,-32768,14277,20270,32767,32767,-32768,4006,32767,-32768,32767,32767,32767,32767,-32768,-32768,32767,-30675,-32768,-32768,-13062,-1963,-17178,-14030,15018,-29133,-32768,13320,27108,-32768,-32768,-32768,-32768,32767,13148,-283,32767,32767,-32768,1668,-32768,-32768,-32768,4511,-32768,-32768,-32768,32767,-7846,-27237,28525,-32768,32767,-22085,10242,-6229,32767,-32768,32767,32767,6835,16947,-32768,20922,-32768,-32768,-24562,-32768,32767,17956,32767,-32768,14669,23507,32767,-7327,32767,-32768,32767,-32768,32767,32767,-32768,32767,27264,5311,690,32767,32767,32767,32767,-32768,32767,-32768,-32768,-32768,32767,32767,-32768,8554,15508,-16039,11911,32767,6968,-10572,-26381,16518,32767,15958,32767,-14222,4950,-32768,-26757,-32768,23520,-32768,32767,32767,-32768,-28359,5643,-32768,-32768,-32768,-32768,32767,-4215,-32768,32767,5995,-3062,-32768,-14025,32767,-32768,29442,32767,4995,-32768,-32768,-32768,-32768,-32768,-32046,32767,32767,32767,11494,-12880,32767,-25853,-15215,32767,-28645,30233,32767,32767,32767,-29006,32767,-20319,-18278,32767,32767,20462,32767,19289,32767,32767,32767,32767,32767,32767,32767,32767,32767,14093,32767,4676,-21028,-2292,29393,32767,-17862,23064,32767,32767,-1813,32767,32767,-10887,-32768,10081,2180,-32768,-25190,32767,32767,19276,-32768,32767,32767,32767,32767,32767,32767,-7617,-32768,-32768,-32768,-32768,-32768,-32768,32767,-32768,-32768,32767,-32768,32767,-32768,-27090,32767,32767,-22346,-20721,32767,-8188,-32768,2520,-32768,9838,12065,-32768,-32768,32767,32767,2949,-32768,25666,-32768,-32768,32767,-25572,-32768,-32768,-18077,9626,32767,-32768,-32768,-32768,-32768,21088,32767,-32768,-5938,-32768,32039,-17306,-32768,-32768,23186,-11046,-32768,32767,32767,-32768,-32768,32767,12879,20894,17276,-32768,-32768,-32768,32767,-32768,-32768,32767,-10483,-32768,-3369,-32768,-7499,-32768,-32768,16300,32767,32767,-19743,1225,32767,-32768,20557,-32768,822,4654,-32768,32767,-32768,-32768,-23158,-32768,32767,-32768,32767,32767,32042,32767,-31933,32767,32767,32767,32767,32767,32767,-32768,32767,32767,32767,25806,32767,32767,27934,-32768,-32768,12050,32767,31369,32767,32767,32767,32767,32767,32767,32767,32767,19406,21869,32767,3618,10018,-32768,32767,32767,32767,-32768,-32768,-32768,-4675,-3011,32767,-32768,9205,-5893,22053,32767,-4071,32767,-32768,32767,32767,20115,32767,32767,-28042,-32768,6811,25178,-32768,32767,32307,-21351,-32768,-32768,-32768,-22479,-32768,32767,-1688,-18361,32767,32767,12339,-32768,-32219,31884,2257,-32768,-11530,-32768,3064,32767,16984,32767,-32768,28669,-32768,14856,9513,-3280,-32768,-20429,32767,-15333,-16670,-32768,32767,-32768,1273,32682,32767,32277,-13194,32767,32767,-3762,-32768,-22890,2436,-32768,32767,32767,32767,32767,4662,-32768,-19294,14997,-32768,-32768 diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv6_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/conv/conv6_input0_int16.csv new file mode 100644 index 0000000..dec4528 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/conv/conv6_input0_int16.csv @@ -0,0 +1 @@ +31444,16063,-12996,-19487,7034,2259,-24416,-11954,24986,31088,-16191,-7443,24042,5378,23664,-19565,-13315,9062,8481,-17904,30236,-31061,-12096,27236,-4717,16223,6183,-12115,-10121,-30297,-9643,-15169,-2616,8517,30289,2007,-15532,20246,30792,15397,5344,-20428,-26068,-30071,23942,21629,-1045,9639,6896,-3208,-26976,-3880,-1671,-12119,-25110,-14595,286,26551,-215,-9858,-30893,-22606,25552,-579,-17370,12900,-3856,1063,-4703,28157,14375,32311,18594,3018,30601,-23271,-15258,10813,296,6338,13422,-15942,-6524,21520,5062,21975,-28768,13634,-3725,-22276,6080,-8102,-9290,-22215,21152,14483,21635,-14527,12642,20478,-12049,19933,-11796,10394,29078,2739,-9147,22082,12129,13242,-22639,22293,-4872,-2885,-573,6100,-5746,6745,-23527,-2724,-11792,-20642,-29872,-21495,3221,-32471,-6494,15357,-25949,3057,-21732,31765,-31312,22259,-4175,-9062,17654,-28880,26071,-21037,7473,-3123,13231,-19678,-31650,-30872,-8264,-29289,2132,-30454,-18800,-7362,-7519,-10261,5749,-3785,-16713,-12897,-3319,19958,10244,-23812,-26996,-26490,31241,8423,11177,18171,5123,-8989,-32050,24893,-6961,-6971,3210,29147,20806,-13929,-8200,-3349,-22645,15002,14710,-2361,3049,23178,-6302,22139,28791,-6723,24691,9948,15728,7346,24718,32646,20111,-16413,6782,-32553,-1567,1163,2840,-11549,-3225,11145,-21310,-4886,-12879,16644,2097,-14938,11679,-16557,-15933,-13954,-29347,3966,-29319,-16958,-16369,-32043,31854,-24093,-4611,-18004,-3150,17240,12241,285,20633,-17880,6612,23003,-3961,-27216,-22956,-3584,-32158,19990,5414,-32503,26650,15461,-26206,8142,-26645,-18036,31350,9378,-4184,9500,10238,16256,-31167,-25710,17757,-15321,-14624,25248,1426,16371,-23860,4424,15568,-9487,13037,-4303,30239,-10187,732,18161,596,-30593,31574,21524,28953,27957,-23208,-5879,28879,14989,15271,21179,-30035,2937,-20284,27348,-25896,-28360,-14437,20581,29540,-22391,-8674,17557,23501,3942,-24879,-3896,-15554,3265,4556,-27665,14437,6769,32213,-9328,-14919,30348,-13427,17771,-25648,-17876,-16380,25338,-16296,9448,32276,-24339,15316,7011,-2054,-10271,-28657,-30394,12425,-17673,-31279,-12528,-25412,19064,12377,10238,-16258,-31607,3243,15941,-28230,-13062,10203,15935,15411,9715,18507,27927,-29693,-4717,-23075,-29582,-362,-14515,-6780,-30198,-112,-4631,19288,18596,12240,13210,-18953,31066,-10690,31222,3197,13071,6109,32004,-12001,-6589,12201,6607,235,-11333,-27538,12804,24628,-32711,-28925,11368,7613,-4253,6558,-6360,-7991,-27110,-2617,-21901,-15401,18152,20423,-18905,-1939,-14898,-6196,-26288,13579,-2973,-18351,29734,-3201,28219,-11362,17670,6281,-26012,14680,-9112,-7605,21699,-27142,-9011,-28873,-7323,-6783,-4755,13083,-17325,-19231,7515,-31470,18664,-1928,2535,11125,10954,-21578,-7181,10859,-9296,30910,23900,22358,-25013,-5505,6160,5643,28563,8716,28076,384,20001,25061,3148,10321,-8893,2432,26276,-31289,1393,-1138,-14979,709,19319,-146,24779,26763,26503,-2676,9743,4014,1610,17173,2951,-26564,-32386,30469,-1385,10703,-2986,20220,29435,-14692,-18675,-28400,22167,1482,-28378,11491,-23796,-10420,-27895,-30397,-28362,-5059,15496,-7608,-1285,-20245,7156,25584,21942,-19423,-6294,-19310,-11643,11707,-3254,-2444,26793,-4465,4236,1270,20047,-24340,-12028,-11467,-6837,-16729,31707,4875,23215,-21760,12946,18961,21280,14401,31003,3143,29593,26494,16973,-28784,-17065,-6340,32511,31219,8793,-8610,-17990,-4542,15348,-17495,17153,14881,19709,17209,32466,-20736,-415,20002,-29534,4293,6107,-2533,13901,-283,-15752,27710,-8734,-31571,1954,-28464,9185,-9087,12693,24044,-332,-13979,-16839,22896,3590,-25225,12768,-23916,-17804,-14082,2869,14357,14439,21840,11176,-10838,26338,-15187,-13882,-15972,-26945,-4931,-16526,14781,28083,21292,-11228,-7430,25293,-43,-6123,-30937,-8064,-28341,13523,-30422,-3891,18098,25292,30863,12844,16676,-16046,-25189,-28566,-7589,30462,15693,-8058,6111,21577,-3722,-16033,-24557,11316,23320,3796,32345,-26061,1114,14273,-13776,-9332,20831,6026,22274,14870,-18279,-6012,-27774,-2322,-18791,-22190,-32301,-26773,-3046,-16286,11150,-22309,-21620,-28986,-11598,7227,7539,31986,20886,7041,12231,-2875,-26896,31100,-31008,-11464,-28698,28060,-4171,-19893,1515,-29136,12535,19661,-20605,-23957,-2065,27634,28361,-31333,-13199,-15521,-15772,-3320,-26062,-6234,31605,-16338,29954,-20941,3771,26843,5564,30834,-16307,1806,12283,12544,-2699,-20651,31617,3558,-28172,-24357,10418,375,-29120,27403,-19052,-12821,2830,26145,-12380,21648,12914,-30690,-11504,-11791,7181,18049,-30583,-14291,-22715,-25241,19633,27225,-5693,-15038,-11669,13223,-16242,30236,4707,-17989,23684,24806,25700,-1796,-18448,-22676,30056,18614,-6318,-5066,-22872,-7781,-6010,-17709,32532,27265,15012,-11943,-2196,25493,-28402,32522,-2353,32201,-22478,8064,-1907,-4691,-21445,22764,-22026,29083,-506,4166,-28959,1387,-10942,-24406,12221,4028,14988,-9755,31193,-11093,-9181,264,5542,-5953,30697,-18073,14217,-25201,9515,1562,-21342,-8330,-26323,-172,29519,-25963,27272,-25318,11325,12251,-11317,10718,-15879,-22346,-23137,27305,18232,28632,30041,-13861,-969,-19960,2060,25205,19047,-2787,30937,16064,3220,21909,-22608,-29883,8844,-25819,6698,6322,26691,-27542,1071,21400,26724,-11870,30989,-21479,-30293,1580,-957,-8052,-4604,20944,1525,-31732,24269,-13807,-28233,-31610,-31684,29579,-23936,-23403,-8682,-7121,27902,-1394,4241,29491,-16606,-17110,-31714,1310,-25299,6734,20135,29273,-24877,31194,-14406,2415,16740,-3924,25635,-14916,32371,-32371,9396,-3822,-31522,14861,311,-13760,-20045,4594,-24469,29939,29918,15054,-24517,31923,27608,-13001,-15754,-12043,17657,26806,-27528,5612,-11798,-1733,7993,-20526,-5583,-1359,26972,1593,18627,25202,9635,-4572,31986,-32228,-22814,-7242,11599,30276,-17688,-3876,23603,22442,-28656,32634,18363,-5469,32492,8470,27145,-23987,28773,-17033,12320,-17822,10932,15012,-1985,23583,-22892,-27218,-2024,24471,-17751,4683,5123,29218,-10607,-10005,11771,-24064,17061,-24040,32145,8760,31974,24423,7789,10106,22695,-17208,-24087,-7644,-17905,21288,-26245,30245,8083,12073,21204,-29570,20510,-24808,10894,-20384,19190,-567,-4602,8065,-10454,4004,-16791,-3455,20507,-14783,-11889,18957,-19908,-8055,14742,19862,14811,16623,-19311,24247,15322,-4640,-8824,-29339,-803,-20433,-13510,-15709,14179,-11858,31571,18370,-29636,-5629,28181,-12669,-15942,24861,15467,-20793,21518,31008,19906,-5674,5696,-15739,-4706,29618,19702,-17464,-25550,-30846,-27597,15362,4602,-6386,23115,20724,-14070,-13488,-16035,-31870,-3088,-3953,-31994,-5,-12736,25989,32180,8204,-11712,-20339,-16089,463,-19977,9371,-8936,1388,23688,11870,-32654,28074,-8503,18992,-10656,-10171,14179,2729,8322,-9039,828,27848,20982,7073,15556,-4255,7849,-8406,-4738,18834,-17354,-10626,-28397,-12335,-26964,3350,25235,12174,-20167,29637,26043,4911,13841,-21876,-7361,-29779,11115,-4156,-5718,9032,-15115,10389,-19604,-4500,-726,29824,23748,-4998,11581,8764,22193,-30304,-4868,23930,10787,28283,32747,27987,3131,29916,-28716,-7654,25387,-21049,-14234,-22400,21088,10629,-18425,-9087,12640,18468,-8876,-5822,4051,-4751,18507,-15659,-21862,-22149,-1589,-22220,-24859,-12298,15718,-14050,-21019,-9414,26324,-7217,-848,-6036,-27663,30804,-8855,-25903,-26346,-15084,-6452,32036,-28182,7122,-4495,9557,20474,28815,-3092,-23580,-24245,-13360,23949,2461,8392,32339,-25085,17689,-16701,29411,-8846,-31765,21818,32654,-10722,11026,-13673,-1579,19842,2350,-28052,-28295,2566,28802,1048,-7635,-13188,10196,21669,24339,17058,-32458,-9021,-17865,14872,31395,27133,-6445,-1662,30613,23073,-9893,-14864,3495,-18263,-116,30032,-16216,10039,-25602,-16609,-32526,1070,19191,8949,-9453,-17003,19525,29328,20307,-6018,-16542,-31792,-29507,-4324,-24089,4052,-674,20610,-22926,-14737,30164,32009,-8524,13364,25145,-22230,20256,-2446,25104,9099,-4130,217,-2178,31747,-29979,14426,10260,12538,31222,-8921,-18345,14416,3650,-26044,-6581,4590,-31357,-4622,30926,-3440,6081,28218,-12010,15852,-9780,-22456,-4316,-11878,17316,-13342,-14441,-24610,-4836,-1563,-21255,10503,29549,18379,23918,6877,31585,-11083,-12065,-13869,19204,3893,16557,-16638,-6990,-6550,11397,-13452,30062,-18396,-437,12614,-6382,-25564,27525,8189,-18716,-19781,22964,-1344,-208,20618,-8287,-14986,-5199,-939,19,-5823,-27002,8018,5972,26702,6602,-17526,-20661,-6003,14380,-2902,-27923,11205,-316,-27491,30749,31458,2195,-8944,21722,19017,-11614,-27394,-6437,-11398,25254,-16299,28840,25632,-26994,-3864,-10744,-29877,17517,-29719,-16760,-6689,-25079,-17978,-30636,-13801,9006,24262,6839,5578,-26156,22560,-15074,-23951,-19024,-24133,-22560,-1332,-8199,30108,-1668,19457,-14613,4700,-14727,6844,-13508,-21872,-27290,-11470,-14292,24391,-14543,22781,-4112,-26457,-28595,20220,30095,-7984,21855,31236,1623,-26141,29674,-30810,-19983,-4925,-18053,28452,29355,-26297,-10612,-2064,31013,24835,-28428,-28596,27403,-2006,-29231,14084,6928,4868,7501,-15372,-11313,4400,-22307,-18556,18021,-7440,-6384,20075,-28716,-22936,-12959,-6527,8593,-9397,-25492,-5848,1891,-4817,9972,-32402,-1900,-9305,24004,-21172,-26586,-29972,23315,-15690,13550,32686,10587,-17737,-2163,26747,5182,-1427,18776,-7979,31128,25568,12339,28409,8401,-29178,-28616,16193,4714,31208,-14165,28934,-10751,7302,-28735,8366,3072,9158,-25694,7441,3309,-29040,24699,12197,15716,19631,-3488,26178,29267,22706,-3820,-23497,8998,-13680,16425,26819,5772,-15978,-28132,4557,-20257,20083,-11609,-15669,21725,-30187,-257,-27433,-401,-6102,-13509,-4481,26971,14337,-18252,6249,17167,29276,-2718,8632,-93,20952,9045,1862,-19683,15924,29239,-770,-871,780,13321,-12438,25740,12646,-12217,17093,8044,-19032,-14848,17740,29261,-1687,-19676,22891,171,11059,31464,-8123,12363,-25395,15297,2948,-1546,-7,17173,-1616,20607,9811,9603,-30075,23805,-19443,-103,14,15839,-29914,-25862,-31874,730,-14403,-19081,13344,-22953,-3227,-17849,-2191,-27724,-13710,-27187,2711,-4170,32387,-661,17250,-31226,-27493,32102,17413,8343,-378,15989,10419,-3597,32714,-18588,31113,-27098,-4766,-27959,-8898,28647,28426,-26386,-30057,-16691,-962,-27630,28933,13295,5651,24960,-10442,-9019,-8259,6632,-28992,30453,28246,-14251,-2589,30965,-8969,-31901,22561,30686,16872,13991,23614,17799,15635,-7539,-17498,25751,-4445,31939,12267,-23927,5661,28092,-910,-32744,1451,-4941,7967,12971,30921,12076,8173,30166,8253,-10966,-16670,1147,18212,19998,11505,-27849,28892,1306,-12729,8320,-7044,30973,17224,-17114,5293,-30131,-13718,-3908,15485,-18644,5219,-16210,-4714,-28571,18898,-14141,7749,-17555,-7720,4433,2662,-19955,16232,-16689,-7927,5740,10779,31780,22177,-7655,25757,-9161,-7416,-23293,-7506,-17569,25350,24279,27916,19987,-11211,-25922,-17600,-22566,6784,24778,-32632,7489,5812,-1958,21857,-31133,-8579,24818,-13734,-23942,-3865,16168,16518,27143,22930,15675,-21372,24706,-15438,-7274,1355,7748,-31884,-13237,25441,18591,21652,-31405,-15710,-3946,-22558,-18904,-15997,22431,-12309,3759,31631,17822,-10085,5061,268,10693,-29290,13899,-28329,24033,-17837,13347,-7452,-22764,16027,-21108,-23515,-2428,-233,-17024,-12873,-28476,-2268,-2062,-31575,11875,16058,-16096,-2297,-24156,-16620,-26993,-10674,23523,-18041,1994,26480,26985,-11442,10930,21689,6493,-28249,-18243,-20217,27555,7740,-21748,23632,19568,13811,-18666,13532,-18678,-1294,20294,25882,2339,24164,20389,29480,23247,13562,9496,21593,-12297,15906,28057,-31835,-1029,10249,15960,6998,-21734,-15271,-22098,2401,-30385,-24339,23803,-25582,-846,8295,31295,23716,5643,14456,-25088,-8431,-26098,-21392,-12259,-28659,12350,-5315,30415,8080,30355,-3861,7909,19121,-9219,-27236,-5673,17244,-30424,-11857,-5863,10519,9307,-8355,20179,31129,7596,21043,16087,10167,28752,-28534,-16579,-29579,11838,6973,-23851,19742,-23183,21207,3586,32271,-27849,-4393,28103,-23533,32336,-17061,6046,-26232,-12145,-3471,-3926,-23312,18171,12764,-14677,-31735,32744,-5458,-21899,-27905,19534,9350,-16429,-6428,-17724,8382,4235,24803,3791,29030,19821,-5870,-22812,-1053,-20098,30944,25075,-1191,-21561,3936,11571,-6901,25791,2279,8115,-2257,-8348,17842,11527,7843,-7233,25288,-22922,20989,15855,-1618,31277,-27925,31742,-24455,22229,-11787,-8116,5050,18643,20699,28133,11585,16307,20010,-14148,8146,-18397,29470,-30119,4641,23027,26192,5631,13120,-1248,-28353,-23762,18717,-1173,-17001,-24118,1752,-9955,-21185,-17041,10049,-32693,-27633,16057,-21705,24496,278,-28455,26436,-11264,-20926,134,20600,-26069,17516,-27447,-24594,16987,-27117,22064,-4338,-12468,-9932,-8237,-25749,1910,3200,23912,31991,24166,-1401,9276,17836,12963,-27029,-18728,-3005,6533,-15777,-26721,2853,-8193,31388,-26924,-16738,11463,-11431,282,11783,-12316,-22866,30155,32421,29506,32584,3425,-29612,-3110,-32413,16046,-12025,409,-11202,20516,12908,-11756,28824,996,24795,8426,-9584,-31933,29878,10879,24817,4488,-5882,-16348,5822,31805,-11624,22781,30214,-22657,-15806,-27233,18962,-23112,-24903,340,-7864,-31839,14792,-9644,10999,11538,-23959,-20762,-18739,-22620,-25613,15544,28448,21953,-23,19665,-26837,13404,-85,483,-1147,3773,-24432,9528,-16515,16748,31310,147,-10051,31906,-12363,30337,-13135,-26533,22225,13366,7705,22386,-15836,6207,-4585,24910,15470,-12475,-25252,-28305,-24089,4614,-13019,-8961,-19938,14872,-23676,22157,19104,13843,-24437,-14782,-25221,15932,-1738,-619,25082,-24463,-7985,-22687,-585,31557,9351,31335,8290,14516,2635,-14957,-11381,-14710,28554,-5980,17658,20594,22192,19784,-7999,19963,-22939,-7360,-16283,11093,-20432,4215,-30197,14751,32248,2220,16483,18233,-31247,3979,6060,28025,-27501,-27867,29413,14401,10325,6071,-1793,-21872,-8112,-22115,-19117,31925,9043,21314,-200,7930,-17560,-28403,4772,-1604,-30442,-25645,-15800,-24854,1762,-8131,-23136,-2163,-7460,-10450,11433,-17074,-8646,9639,-273,-7005,-10828,22424,-6957,-30190,-16050,-5260,-20058,11723,-12912,-25902,-23421,5335,-14206,-7378,-8098,-18899,19816,17671,-5515,224,12493,-6414,-3615,32618,12095,-25333,12148,-3680,-28103,61,-6260,22981,27099,11803,-11366,24375,-31401,-19511,9837,-9118,535,32584,-28148,864,-9138,-32511,-21447,605,-23814,586,-11068,-17879,15827,27441,31259,-25226,-19911,-14418,21256,-12354,-22443,10638,-25845,-21973,-21378,2452,32639,-29222,695,10996,24805,-18117,7706,32182,4382,695,4133,2124,-25538,10540,-2194,-10573,249,-10260,32153,-30134,15862,-583,-9740,653,31819,-12312,10486,26162,22720,-18931,31457,14678,-1805,13417,-24602,-10736,-24705,15123,1116,966,-9564,11190,-4927,7673,-31866,29234,5090,-428,1055,-11910,14376,6553,19243,11295,21503,-22081,-10496,23545,-19708,18444,-8717,5795,-5261,-15303,-26341,21104,4953,-11405,28223,3173,-1303,31167,-19740,28506,-8916,-21178,-3483,8629,20864,18281,-5555,7917,5314,29904,16707,-12365,-19661,-7796,-22132,21963,14849,18529,-27787,28095,-3319,-13411,6911,31643,25970,17726,-17085,-28232,-23473,27424,25355,-27970,9020,19932,-14345,-19932,-12903,5611,-25392,-138,-9201,-12634,-2678,6013,-29878,26599,-8010,-7516,-23974,5835,-865,22344,-15175,-2741,-22671,-16564,5274,209,19876,-23264,25899,-26458,27652,25048,18629,31988,32214,2343,-2888,-20742,-7802,10967,-2342,10404,19598,32035,-31994,-9962,14549,10653,17421,12359,14223,16153,-17533,6783,365,2429,-30249,10183,7744,32063,19888,24103,-19264,-20364,32315,-8248,15126,-31680,32462,2884,-3,12619,23397,-25663,-12966,28759,-32331,16364,-8057,17948,6389,-27077,30934,-298,5840,5156,29650,-7178,31339,8697,21309,-32239,-17282,921,-13414,-30142,-20944,-26728,12248,-12462,-19095,-17425,-1948,11901,-17198,3280,-22298,26603,23518,27368,15200,-11793,-6690,-14755,-15772,23550,31414,-9822,8720,-23722,-20776,-10268,30052,-20438,7225,14166,-24684,-6630,-30791,19765,31418,-20638,-13082,15435,28432,-24963,-12468,5025,18618,18396,29276,-3201,13215,22912,-16461,17468,11624,4485,-5547,17804,15768,-32107,4434,-11858,-22673,31435,-11051,988,24391,-11168,-27579,-31645,5273,24149,20008,-2845,-15325,-6121,-6253,-9561,13435,-27953,2634,-20064,927,-8925,29079,-29426,31009,-232,9726,-14960,10984,-4374,16003,-19657,-12067,-11976,-14527,-9579,-7415,-17604,7165,-3355,28413,26163,25584,-2264,-8032,-27644,-6917,25531,28329,-11268,368,10163,-15300,12412,11267,-6760,-22201,10555,-25987,-32261,-9355,-9337,-25420,-17714,-25202,-11445,27418,-7174,-20343,-7386,29062,29644,-3234,21268,1054,-31724,10313,2674,-9826,17222,-30444,13536,-2389,25547,-5668,31900,-29587,-5710,-525,-21731,18324,2365,20063,-17272,-20420,8432,-1011,911,-3561,-2820,-29009,-9894,19444,16520,-10144,22504,-9824,4126,11421,18094,-28258,12496,-9705,13363,-24790,-2573,-19116,18608,-20167,28376,-10264,1278,30235,4040,30259,30141,-14123,-15006,29137,24621,-19705,3699,-10644,10093,-4604,-10660,-30434,-815,-22996,-7262,-13157,1053,11436,11453,-11406,-4495,15351,-6569,-12505,-20483,2428,8881,-2441,-11995,16604,-21718,6460,-19182,32733,-20531,-17327,8233,-9791,31004,-14475,5805,11343,7242,26374,-8248,-27200,1910,-26899,22041,23069,-32697,-8902,28354,-32386,-7755,25382,-13571,-2387,-2977,16929,7525,-20874,-32375,21468,-19973,32018,-5874,7982,-16626,25833,32316,-4390,-9611,24884,-9528,-17764,-1495,481,23393,-17345,-3893,29651,25314,-2825,-19317,-3797,20208,-5841,13198,29729,12062,-31750,32216,26231,-28271,19256,6117,24614,-17815,-1872,22308,12658,-17014,11458,21637,-9600,-20419,-15210,26459,6856,12797,14583,-15242,26125,-18810,-20961,6250,-29822,-6561,28194,2538,-22295,20546,16831,15029,-13989,23313,-9586,-13286,-30338,1968,11274,-4690,-24125,-3193,15145,32139,-22748,20668,25575,-24471,22261,30214,-8904,-15034,18646,28612,-2926,-12988,19154,-20718,-7241,-10701,7486,-23346,5840,-22663,16449,-725,3670,26211,23059,-2949,17567,-18270,9100,516,28992,29703,-2553,18046,5917,30844,6746,-3989,29860,13146,18950,-13432,-4500,-16144,12943,24598,-9446,-26411,-2310,19018,-2110,21073,-29903,-21889,2629,30229,-1315,-7386,9769,20029,-18186,-17685,28132,-28145,-992,-12778,24893,-8581,11467,-17075,-27835,9394,30515,23007,17022,-8014,-28391,-3984,-24500,-8934,-6265,-10744,-16316,24721,-8592,8434,22674,11416,11721,17051,-14364,-24166,1166,863,31819,-6090,-18076,-5101,6674,-20758,7337,6103,19065,-5182,32643,19619,-17865,25955,12909,-8505,22382,-15164,25012,-28281,-28403,-9974,-22316,-19869,-2054,-7476,19302,-20789,24438,-1797,28973,-9899,-19046,31279,-23904,14991,-14724,-14030,-24748,-16261,-18671,-17340,-14845,21650,25452,25523,-24065,130,25733,-18031,-8175,17623,-19991,29963,-2106,-5010,4079,1618,-7603,-17206,31721,-14344,7108,31776,18704,29579,9125,-14038,29301,20434,-14194,-14684,-23069,2845,-25186,3809,10281,-12953,31759,365,-25543,-22256,-31437,-4536,-9444,-20449,2404,16314,-19987,28869,4077,-9703,-26652,-22509,-9309,26997,5014,-18911,18159,9380,-16385,-12584,6645,22814,-4063,1560,-21545,18403,-13376,-30526,16098,-5064,14220,-1234,-26532,-20839,25589,-9478,25658,13926,-15090,-14914,4932,-24494,32633,-16733,-29353,-25781,9969,31341,-22931,31928,8933,-32048,-29035,-1467,-14389,-16137,-4528,9389,-28123,24808,-9667,-15308,-23951,-10635,-5614,3306,24559,19076,25762,-15000,-16117,-16011,15073,-11,-15582,-2752,11321,1384,22825,5034,-26215,-6418,7893,27193,-26963,-7891,-88,22591,15591,23785,-19717,15330,-28423,13335,-22972,14096,-3173,5068,-13101,1360,26384,27878,6646,-3753,-8774,-26967,-27411,2499,-28481,8462,28875,-10130,22662,-19002,-10508,20210,-15019,-14642,-27359,-6643,-17760,23937,-2791,11528,32556,10575,6771,-26487,10831,-9615,3598,-25987,17591,-172,-21131,-18839,364,-24461,-20865,4233,32256,-16036,-14176,26435,-20042,-8016,12615,-16480,-19151,-6659,-27711,2403,-29867,-7749,25901,5167,24688,24417,-22348,-25649,-627,-13778,-3959,-21724,272,-24412,-1043,-6725,-21146,-29330,22302,29018,10924,5664,30230,-30890,-25803,-31742,11286,-26452,-20246,-13257,-26997,-31684,7767,-25258,6010,10617,31666,-19477,28873,-7416,-26166,-378,29702,1326,-31969,-664,-30345,20557,-24910,31249,29936,-22334,-20858,10443,6365,-18595,-28511,23589,30375,235,-1510,29367,-16947,-7754,27903,-2774,5443,10178,-20988,-14773,27013,18322,15941,29144,30117,31778,23046,14902,4530,-26319,31226,-19072,9512,-24092,13770,18623,-14615,16965,12303,-6048,-19552,-26724,6758,21767,-28622,23837,-27392,-25432,-29179,14630,-9526,31421,1564,17199,1344,10419,-25373,-18242,-32114,-31541,-2428,22161,5777,-14716,-3060,20872,-14353,-19620,-133,24126,-11682,14861,-3885,-6150,11299,-29414,-6342,22624,-25725,-10318,-31154,-15237,-12227,398,-6287,-23699,-27706,-11463,7726,-24999,21570,-28767,852,-18638,16089,-17969,-23319,26108,2575,11902,-7326,13732,-17371,-32112,9626,-21311,377,-15093,15854,-15880,10505,873,7943,30862,21257,-1271,-32412,-15288,-21606,-1030,20681,-22324,-31661,5426,22870,121,-4387,32347,-12388,8943,28886,-5820,-30289,6569,27157,-14939,14928,7874,29451,-26658,-1052,9215,-16959,24949,9481,-21470,31938,17456,21179,-102,-10999,27624,4753,-3778,-17092,-27549,1626,-18978,28031,6342,-90,-6189,-20998,5535,18097,10248,-25293,3961,24226,21410,21564,-13798,-5419,30975,29799,13104,-22963,19977,19570,-12202,22703,-15719,-12741,-29917,-8978,-9882,-4408,18977,30722,6004,-15732,-1088,10803,8138,-12680,-21002,10590,-1981,-630,-26301,7135,-31832,27012,-12645,-6972,27038,-30403,-27426,-17458,16923,-24206,20512,9657,-1978,193,13978,1062,1720,-25714,13771,-3006,-8739,-476,19955,-14322,14986,5598,-21555,-8458,1613,11070,-19229,-28744,-24438,-2763,-20539,-14110,-8779,-12094,855,-22673,-3294,-30322,17512,8260,-29889,-13604,-30120,-1982,-17624,23355,-10357,15946,24769,-27256,-17003,-13743,19020,28775,5316,-11953,32197,4682,-20200,-6214,12785,24713,952,29170,-2770,15611,29004,-22204,-18477,-3520,-17813,-31083,-26005,3147,7778,13487,22440,-8328,5749,-16092,28509,9767,31039,8980,-26476,8880,-4568,-5239,30674,18677,-17635,17609,-7070,-1179,2177,30152,-15128,-886,-32452,30181,-19841,-456,21762,22685,-21002,-17982,19624,-10729,-4093,-16285,5300,10417,12333,27956,5632,19881,24527,-1024,-31504,27196,22457,-22770,-28790,-14136,-22694,3635,-6022,-3967,-29036,2790,-31111,3490,19857,-17256,-19919,9404,4075,29204,6751,-27695,-18557,127,-31800,-31464,-25644,-9031,-4216,-22962,-30377,-27832,-29752,21919,-20107,19739,-23849,-8100,-20448,-31101,31855,-26786,-23914,-11866,-11347,252,-2617,-18047,-17681,-22110,16764,-25585,-18959,3007,-6551,15210,-31974,-2814,-31580,-11384,-17392,-2409,3753,20267,19365,-14511,6392,12130,15821,-10304,-9066,-29247,-27834,-27180,24192,-13314,-20194,365,6060,22808,6138,8495,-26501,-1499,19384,-12918,-5774,-12639,24468,-31057,31113,22410,-29851,18416,-7923,-23570,-23088,-29677,20800,4,-13307,7517,3703,27589,-14128,28269,-24150,-2815,32428,3742,26208,-2274,-26447,6260,20943,21215,10495,18754,2259,16503,-21018,6646,-18901,10337,-23575,20172,-23284,29054,-29180,-1806,135,-4302,29200,13812,-2108,23505,14455,-8292,22941,-5407,1112,-25750,-3017,-16891,-30293,-16452,22519,-21726,-8966,31524,16924,-29873,27353,-12578,31977,-11687,-9428,18029,-28771,-23871,-9410,23579,15862,23706,-17865,17762,29201,-6091,-30345,-12335,-30501,-10437,29019,-29891,-21301,-28831,-7664,28905,-12492,18060,-5488,8168,16397,28445,-14742,17463,7686,-15791,21071,24319,-20354,-10003,-13428,1354,7034,-24609,-29493,18175,-13050,-169,-29707,18411,-15630,15886,-1776,2475,423,-21953,-12682,262,29777,-21729,23800,-11498,15248,13902,-15382,27192,1512,-27849,15437,-5134,-10157,32301,-11583,16414,-15492,-6993,6398,5482,-7192,-14493,9329,14972,5916,8932,-11738,-10378,28377,28685,28262,-17754,25671,-2394,28954,11105,5399,-4084,4448,4444,-28027,-27479,30901,-30103,-22016,29229,-19536,-16606,-3733,2451,-4303,-22772,-31292,30771,2500,-20869,-22698,-12821,-23920,-21167,12467,-7752,13938,-29995,5212,-29775,-31278,-15725,26686,-9480,4591,4769,-25828,-305,9980,26348,25666,-27572,-5346,-21126,-6294,-32498,2639,-8915,13765,9210,29263,32205,23798,-23367,6749,14337,21674,2283,-29013,-11357,-14163,-7415,-3628,-6474,-10923,-20724,31127,8256,4177,-16291,12686,-11058,-20267,4252,-22399,-29740,-16116,30026,26856,24307,29835,31360,23929,28500,-239,27028,-6103,-17765,18397,2020,-15491,21532,15028,-29657,20361,-15594,15248,-2646,-18654,2605,-16860,-3902,-27758,-17759,-19777,5832,4691,11200,8365,-16900,29044,-8094,-20180,-21133,-30477,-23377,-13796,32540,23396,-19473,-21447,-15661,11143,7678,-12415,31443,24505,6033,-23390,-4338,-27447,-10750,-14046,27708,-19188,4470,239,6006,-31550,-17373,14998,26388,-7351,-15065,18607,10580,-15134,-19465,-17507,-9236,-5957,1987,-26341,7451,14458,13373,16459,22625,-25394,9253,31752,-4817,-23779,30157,20064,-23660,19184,-22100,-21961,6349,-16236,27514,-22417,24997,-14045,1096,-13246,9101,6395,-7501,7683,14470,14246,-16374,-26915,-7856,-12486,-30113,-25361,-2999,18096,21831,12279,3050,-18213,-26660,28584,-23100,-28196,32053,-572,21628,-31359,-17774,-8699,1577,25638,-14969,-24618,23346,25458,19182,-30702,-3667,3547,-7333,-770,-28613,-21655,6183,-721,25425,6536,-14578,16741,21198,-25021,-24744,-6677,-3418,30341,-12549,15410,-5943,20026,-7247,31397,3219,31758,12343,-9916,-732,-10623,-21146,979,-4764,30541,21927,-2059,13199,11119,-14204,-4375,-14109,32135,-32243,22236,18810,24220,-25231,-29611,17892,-19211,-28970,27886,-28835,26683,28182,-8073,15800,-26474,26418,-7031,12464,9102,3718,-14075,3975,32167,2215,14324,-6678,22219,-16031,-11375,16451,-30185,26758,27802,21696,23764,-569,-26858,463,642,-10466,13322,-27927,-18909,-10720,32252,-10298,814,21333,-28429,29635,-32010,5877,-10610,-20122,26426,26259,27044,15977,-5144,-20496,-18318,-18322,-31297,27590,-10554,-22741,1327,-2180,-19093,-13226,-5174,-28143,12866,-14530,-27510,23371,28786,19288,-7336,16407,20998 diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv7.tflite b/tensorflow/lite/micro/integration_tests/seanet/conv/conv7.tflite new file mode 100644 index 0000000..f448045 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/conv/conv7.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv7_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/conv/conv7_golden_int16.csv new file mode 100644 index 0000000..d2f5494 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/conv/conv7_golden_int16.csv @@ -0,0 +1 @@ +32767,-32768,22348,32767,32767,-32768,32767,3136,32767,-2745,16365,-32768,-32768,32767,32767,32767,32767,-32768,32767,32767,27073,32767,32767,32767,1428,18780,32767,6393,32767,32767,32767,32767,32767,-32768,32767,-32768,32767,32767,-5479,32767,23599,-32768,-8271,-13112,9681,32767,-32768,32767,-32768,32767,32767,32767,713,30419,-32768,21671,-32768,16116,14310,20268,-32768,-32768,-10617,23035,-32768,32767,-32768,15821,21444,-18306,28760,17198,32767,-32768,-21719,32767,4664,32767,25734,26654,31178,32767,-32768,22220,-23741,-32768,-32768,32767,-32768,32767,-32768,-32768,32630,-32768,32767,19301,-32768,-27368,-32768,32767,-12841,-32768,32767,-25060,30538,11428,4460,32140,-32768,-8897,-32768,32767,-32619,-6401,-32768,32767,-13954,-32768,-32768,-32768,-29331,-31923,-11843,32767,-19558,-32768,-32768,-32768,-32768,-32768,-32768,32767,32767,-32768,-32768,-19417,-32768,-8555,13385,-32768,20549,32767,-32768,-32768,-32768,11779,-32768,-32768,-381,-32768,14361,-16334,27065,-32768,-17080,32767,-27663,-32768,-6994,-18881,-32768,32767,6441,-32768,31856,-32768,-32768,-32213,-370,32319,-32768,1948,-32768,13449,-27345,-32768,3892,-32768,32767,-4066,-29466,-32768,8485,32767,-32768,-31966,-10534,-32768,-32768,32767,-32768,-32768,-32768,-32768,31676,32767,-9479,32767,21050,-19109,32767,32767,-32768,32767,-10713,-32768,-25430,-32768,32767,14468,-32768,-32768,-32768,-2998,-32768,-32768,8157,-6403,-32768,-32768,16285,28139,-10367,32767,-8488,-32768,-32768,-299,-24767,2165,23636,-32768,-32768,32767,-22401,-32768,32767,-32768,-11561,-32768,-32768,-32768,20529,-1992,24216,-3146,-23761,-32768,5268,-32768,-32768,-5580,-8892,-32768,32767,6568,-32768,-32768,6770,-32768,-1665,8826,-32768,-20627,-32768,-32768,822,-32768,-32768,-32768,-32768,-32768,32767,-20861,-28528,-29902,-32768,-32768,-32768,-32768,-32768,15633,-32768,-32768,-32768,-32768,-32768,-20646,-32768,-32768,19925,21920,31344,-32768,-32768,-12677,19995,-32768,-32768,-32768,-32768,-24946,-32768,32767,-32768,-32768,-32768,22751,32767,5400,-32768,27011,-32768,-27992,31688,-7934,-32768,-32768,10656,-32768,32767,-32768,-32768,32767,15974,-32768,-32768,-3054,-32768,31619,-32768,-32768,-32768,-32768,32767,2394,-32768,32767,-32768,-32768,-32768,-32768,-32768,-27880,13309,-32768,-29619,-32768,-32768,-32768,-15370,-32768,-32768,-12128,-32768,-32768,-32768,-32768,-32768,1330,-32768,-13080,1358,-32768,-32768,-32768,-32768,-24539,-32768,-32768,-32768,-32768,-32768,-32768,-32768,-32768,-32768,-32768,25317,-24498,-32768,-32768,-12754,-32768,-12530,-12177,-32768,-32768,-5381,-32768,-32768,1223,32767,-9781,-9371,11452,-32768,23034,-10548,32767,-32768,-32768,16,-12180,-20641,30355,-32768,-22752,32767,-32768,25478,-32768,-32768,-32768,-11705,30604,32767,-32768,-32768,-32768,-32768,-32768,15360,-638,32767,-23495,-32768,-32768,32767,-32768,32767,23290,23432,5335,-32768,-28680,14629,31428,6903,-31866,-32768,-32768,-20457,1490,15078,-32768,-17491,-32441,-32768,-23909,-32768,32767,-1712,32767,-32768,32767,12345,10540,-32768,32767,122,32767,141,12538,-6459,32767,-5952,30530,32767,32767,-6181,32767,-12118,32767,28705,22274,32767,-32768,32767,-8730,32767,-32768,32767,32202,3675,32767,31103,18974,25237,-32768,-21103,32767,-32768,-936,-32768,10440,-32768,-32768,32767,31726,-32768,32767,-32768,26643,-32768,32767,-32768,26607,16069,-32768,-31132,28802,-32768,32602,-27224,-32768,32767,32767,32767,12899,-32768,-17252,10214,-32768,25063,32767,-19385,32767,32767,-31460,1165,32767,32767,32767,27141,2604,32767,32767,-20940,32767,32767,-4367,32767,32767,32767,32767,13274,32767,-32768,-13883,32767,22744,32767,-32768,32767,32767,16994,-3856,12750,1744,32767,32767,32767,32767,-1075,32767,32767,-32768,32767,32767,32767,-32768,32767,32767,32767,32649,-32768,32767,32767,32767,1909,32767,32767,-2859,7800,-16021,32767,-1114,32767,-32768,-21307,32767,-23505,2130,-32768,14568,32767,24789,32767,15188,-32768,32767,32767,32767,32767,32767,32767,32687,-32768,32767,4938,-32768,14575,8636,13265,-28559,-28300,20251,-32768,32767,-32768,-32768,-32768,32767,-32768,32767,13534,-18298,7926,23389,-32768,16989,-32768,31279,-3939,-32768,21880,-32768,-2551,5579 diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv7_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/conv/conv7_input0_int16.csv new file mode 100644 index 0000000..027ad4f --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/conv/conv7_input0_int16.csv @@ -0,0 +1 @@ +-25251,21724,-24077,7391,14215,21162,1634,-9810,-1681,-30165,4116,21114,-27278,18909,27171,31853,-844,-24206,-23383,13052,1930,26828,-5704,-25414,32212,-31475,21579,27659,29545,-16145,6819,-4732,19679,-5323,1539,17406,-4786,24865,26195,31427,-31483,-12386,14420,-1847,-9035,14094,27475,-28825,-24243,-28694,-3306,-7898,21856,30506,-21449,21361,3822,-16404,26433,-7474,25106,-16229,32722,29128,-24772,-30828,-32124,5847,-17198,-15250,19140,-15869,2567,-20739,-23693,4686,-20235,30820,-12071,31825,-18283,-10070,23949,-11496,-21073,22210,16441,-3613,-29294,7265,785,-6192,-12181,-20906,-16715,-29642,19478,-31908,23472,-3863,-29378,-19013,-29531,24533,15188,-3495,10598,12099,-22378,-471,31425,-13842,8000,2319,26653,29487,-25888,-15077,25414,-31498,19083,-20197,-28597,28641,21979,1006,8890,-7007,23955,20964,-18292,28930,4787,21977,-28231,17221,29964,18004,-10502,-11626,-32190,-6929,18578,2280,-31232,-17002,7963,1708,16143,17805,10162,-11976,1309,-17676,-7126,21733,-18885,-5205,-21631,-1713,-15050,-15849,-3331,1018,16458,30365,-3551,-12323,28533,-31846,-28508,19194,4163,-9161,16000,-6334,9320,2917,-12094,22619,7533,-30056,20749,-27751,-8862,-18931,-18179,8840,-31373,25473,31224,31805,27208,7826,31453,32357,-16678,25143,-5001,-2590,-20894,28690,15547,3782,21086,11635,9716,20685,24992,19916,-12240,13513,-13104,-13714,3782,-2445,-6687,-4940,2834,2994,-17734,31808,-19710,32438,15876,-26922,7374,-18681,1733,-14960,25826,8527,-5437,25774,-71,-15867,-9704,-10262,29597,-23863,27611,27253,7719,14700,16674,-11407,-12482,-28175,-25084,-2919,-26204,-5467,-22140,15505,2980,-18364,2605,23018,-19448,29883,-4614,-31635,-28613,31904,26691,15599,6744,-15155,-7035,5155,7646,-4182,16625,-8063,-1090,29009,-23457,-17317,19482,31043,32724,-24126,-11952,16598,13363,-11469,-26873,-7903,30559,-19,23382,-7491,-1502,-27740,-18699,16971,-18319,-17504,-19558,-26883,14118,-21558,-15147,-1442,1495,25802,689,8463,22118,-18976,-14950,-4489,27149,24908,-27397,-11190,13491,15013,-30669,-11149,13943,5053,9530,-10129,18672,-3007,9321,26024,13778,10228,7634,1751,15580,-18337,19548,29273,-14052,-22774,-3429,10445,29306,-1007,-14048,19603,-6846,28850,-10602,23600,-4383,20805,18809,29332,24919,-12455,15753,28174,21925,1387,-4425,9412,-2162,14768,-25829,6613,-13185,30184,27898,-17236,-30492,25523,22819,5268,29370,19079,-2625,-14526,-9874,22012,-6894,-24283,28030,7398,4370,-6069,31078,21371,11880,-11902,28688,-17253,27193,12614,-12871,-169,22115,30154,-13546,-29299,322,30483,7845,2753,24130,-13199,27732,-26353,1008,-22940,-12847,12966,26499,25132,22521,5611,-31276,20765,24519,-28925,-3727,25132,28989,13013,19432,-11036,14713,16589,28741,28070,1645,15369,24436,14319,-257,29053,27700,6016,-445,30936,-29367,32644,-11425,15091,12064,19255,-20186,5359,-3631,-30364,-20201,18183,-3763,19370,19966,1000,-15005,-18027,28734,22561,-29692,-6327,-20677,28185,32671,4592,-23375,12924,22958,6968,-16545,16595,-19703,19851,-30952,-3180,27041,1982,7448,-16162,15474,-26001,-31542,6968,-28410,-10648,27145,-3537,22830,-7826,5429,16540,-26923,9131,11756,-24701,1275,-25556,-6981,27914,-7690,4766,6747,-32571,-8841,11805,21941,-15125,-3277,-2371,15414,-12091,-31891,-7246,-5103,-31655,-9701,-6194,-28050,-21595,14355,6595,31513,-7759,-12642,-19810,32076,-16817,16715,11880,-18971,-23381,-16844,-28773,-4510,-9154,-14711,12240,8118,-22961,-20163,21530,26841,-12024,-25406,-9663,-11295,28530,-11897,-19469,-32228,32038,15100,0,-19996,-9265,-21278,-28923,19100,-9978,-17410,26889,-14908,-4448,18721,-19898,-29120,10133,27599,5453,32072,26773,-28119,-1772,-6517,23504,8055,-30139,-30774,-8993,-1269,-12416,-21415,-9660,-23433,1656,32476,11861,1481,-9561,25918,-22162,21528,23440,-9752,-15091,-8864,-25191,-32050,18235,18944,9246,-18327,28953,-29234,-7243,12635,-29132,-24211,6622,26778,-9357,-11966,-10467,8462,10770,-30781,-18605,25985,-9962,23132,6754,14678,28847,-12923,-26477,2537,-22230,4692,-11875,-15639,-18269,-1977,28296,-13659,26687,-4022,-32670,31052,-10178,12112,2382,-13816,-11592,16532,-25165,-14603,252,-9059,30226,16625,-12010,-25547,7257,30285,-25052,-15419,18961,2842,15891,6277,9076,-10659,14556,2343,8619,19069,12379,2263,8845,-16965,-1380,-16554,2864,29972,12769,7232,-32422,16775,3885,-22967,-31797,31556,13444,12437,-7184,-7763,-18160,16784,-26404,17427,6993,30011,25589,-11313,-1866,14242,-29979,24854,-7925,-11619,-20269,5179,9321,-22202,-24697,6876,-361,-23175,5593,-23688,9516,3760,17102,-10544,-18142,4522,-23351,26666,7597,25170,-14351,-14873,-23488,-4661,3072,7288,-23700,-12426,-6375,-30330,-25492,21217,-2175,27732,7345,12693,-25881,-10565,28563,17441,-21240,14859,-12487,-13233,22852,-1748,-27596,7810,-31083,-13431,-12777,14665,23790,-407,-16243,-19846,-14564,-18154,-11247,-13871,-14653,-13758,-7070,9222,-30132,-8068,-28343,20072,29242,3675,-24351,6123,-32623,23432,-22603,-12561,8633,5266,-27420,18129,-30685,-31689,17470,3351,21756,-8638,21615,9617,-9608,25452,18054,-17162,-7587,-27663,6887,-13546,24043,20346,-10795,21768,-25088,-14423,-4833,-22132,-21474,13590,15809,-2525,3415,32733,-6152,29333,-2671,2077,24335,-4658,5991,27132,-10880,15206,-6259,3122,25533,-25804,17133,-29380,-28415,30368,15590,26334,-24823,21904,-11563,9540,23731,19823,-1441,-24390,-22025,6556,-26022,4560,-13015,-6732,10110,15281,7829,-21119,-31303,-26267,28059,32452,30018,29149,-3924,-9111,-14645,21440,10577,-9901,4553,-8622,-26443,-434,-3927,31373,28600,2084,26635,3253,-6884,-15736,32349,555,-15006,-27944,28964,5690,-21942,19705,28581,19975,16424,-28300,15396,21659,20666,558,16898,-16173,4537,-13187,-23293,20821,-11791,-18477,-10725,-14239,-28832,8918,-30766,-27314,22461,31324,15396,-15543,-6413,27847,-3377,-11625,-29910,-23341,-24037,14185,21510,8801,-30088,17699,-25648,10189,-10711,19760,10902,-18713,-16215,-26166,24444,-5859,-17148,3488,32484,-28781,-28088,10001,21820,-21008,922,10140,-15197,31993,-1691,20110,-9514,-20809,20625,-2849,-6836,-16606,24446,-8256,-4579,8523,12792,-4633,-5221,7769,-26672,-17144,20093,14600,-791,27930,-16673,22804,13109,-20847,-4843,17354,-4938,-3666,7983,-6355,-2992,13370,-1914,17429,-29835,-6439,6668,-25349,-31800,-7885,28028,29998,9980,10347,-23571,-7672,884,21911,-2807,29579,3619,-17800,23817,10414,-2866,18383,-15188,29134,31416,32672,-26072,-27925,20822,28590,31918,31787,11027,18142,-8807,27723,-29674,9246,31895,-11227,-13125,-9455,-25008,15064,-32112,22014,-27431,24041,9896,-29954,-10726,-14783,3697,29158,30180,-10502,12125,-5631,-11088,4601,959,3250,-968,-2338,-4570,10558,-23570,-22121,-1262,-7731,11102,20101,-22766,11747,-22084,-10630,31043,-31049,-808,-7701,971,-24486,27362,15481,-20962,16236,26578,22905,-9,2016,8905,4867,3715,-31537,10796,18930,5679,9841,-19585,-22736,24128,-12263,23687,27465,-11654,-22845,8194,29471,-605,7140,-13422,-18948,-2023,-1522,-15056,-17379,21891,5492,2690,5943,-32673,-23911,27471,-28897,-13923,-10589,17592,-9631,25838,21348,6741,-9896,1841,30461,19293,-22423,3558,-5221,9768,15503,-26382,-12547,-25341,29590,-2517,26256,-6074,-12868,-26350,-22689,-2084,-10365,-4225,-23941,4972,-25960,3858,31071,-14788,10034,-4921,22887,16218,5422,29201,8976,-22992,11973,-15993,26186,-11994,9359,-30798,-30449,4025,-564,-14488,-7489,-21413,16632,-19411,26056,28427,-18000,5519,-20734,-1681,-26721,-12656,26923,468,31748,-4116,-32494,-16672,-26531,7361,-193,-5704,-15150,19478,14522,14035,-3221,10303,-2610,-19204,-2399,-31915,26654,14295,28109,-14292,-14797,2542,-26299,-2563,-29347,9969,8756,-18068,578,13078,2471,-20012,-8775,11611,8382,-28112,-1393,-6421,13627,12806,11797,-6598,6011,21400,21331,-7398,15728,-653,133,18670,-10781,-2458,9074,-29274,-17326,26566,25087,-231,-25381,18936,32155,-19806,-29593,-25613,-2279,-25267,-24650,9074,14263,-15745,-4936,4031,5197,1188,27154,16995,4578,31748,-32627,26051,22531,-10187,-16517,-7250,-839,-13072,-3748,-3802,-3088,3835,-11989,26174,-25135,28106,31981,-7447,29008,-856,15833,-15761,-12567,-12075,31405,9166,-17237,-6358,29335,22697,21085,-3096,27534,-18961,2438,22916,-29585,-26176,-18417,-18834,-15786,23067,26419,11461,-235,17317,12585,16060,-11037,-338,-23284,9250,27569,11714,4162,8291,-23011,1872,-18885,3708,-25861,-29604,-10366,-12761,-19015,-30430,6966,-6338,-26221,-25312,-14546,-20348,-5367,-25894,19185,-29540,-15121,26762,9896,29766,-8204,-29369,-19898,-23862,-5132,-20629,29857,-23967,4284,-18187,-9476,17103,-11205,18145,28315,15271,-24548,18949,27895,10815,-3863,1952,22656,19959,32221,-11244,-1328,21670,32708,-30804,-2143,15746,26135,-30066,17494,31234,15131,-25516,-9847,24708,-3958,-31982,-8995,5358,-18482,-12544,-25275,-9937,-21433,-13383,18209,-24818,-7489,-4858,-4433,-11207,-25329,-3763,19827,-24751,8185,9290,3413,-24197,22394,-3853,6513,-24027,24751,20396,22469,29575,3385,-8013,28836,20544,-28904,-3847,-14092,11939,11075,26988,6103,-26899,-14779,26807,-1597,-5526,-28933,-22437,-18087,-20205,7178,28647,-28721,-26844,-10891,-6906,24421,3288,-8528,31519,-379,-22047,25318,-13774,23678,9030,-15617,-8371,-31035,5055,16109,-31185,-24890,27616,1827,12624,16002,24347,-18693,11721,2953,17479,-10852,32231,13130,16640,-2167,6413,16679,-6560,-20070,11980,6751,-27823,21734,-21280,26997,15632,26909,-7729,11120,23188,-11534,-67,18223,-31371,29660,529,-22915,24729,6105,-20366,-11980,-5902,-7940,6508,-22654,-4411,-16804,13420,-29683,13747,-16782,27333,2965,-12404,-6482,-17942,-18449,21710,-32145,23985,-19014,8354,-28124,-21463,-6558,-10166,7174,11996,-22813,-11571,-5830,-6986,31781,3997,-15358,3186,6975,-12665,2675,-24320,-6010,-29373,-19992,15364,25965,22497,3771,23327,-4552,23978,13233,-6949,-29922,-30023,-30592,-18080,-6029,26337,-9209,16301,4464,-32283,-2753,-31292,14932,-31252,-13844,-32145,14059,-3670,-13870,-9559,18288,20420,-16371,-16833,-19918,-21623,-5229,24081,19427,31275,29329,10750,-32152,28059,-13365,18301,-18242,20341,32133,6947,26684,8581,-13557,-27568,-15291,29725,-19923,-11868,13437,17171,31458,29587,-2573,-2359,10769,29404,28586,27530,-10338,-31417,-17710,-13838,17927,25131,-4714,-22953,19273,-11251,23155,29269,-24186,26182,-22705,-23445,-21598,-9751,-26201,-12916,6583,-1538,-31798,27333,14480,-14538,1788,8246,23521,-29460,28393,-29820,16392,-22897,25287,-227,16532,4543,29105,-28775,32326,21367,27931,28875,-31529,-4655,25341,27601,3848,15986,-23751,-7688,26747,-10969,28306,-10833,-17428,17465,-27894,-20181,-14703,25462,-21185,-31933,-21644,17564,-6689,27779,-9109,-12711,-4662,8676,-6708,-2480,27032,-13856,15587,13293,27587,19231,-27805,22498,4803,3481,-24422,14297,-28688,7799,-3128,-28025,-31742,27959,23929,17603,32291,-15722,15112,13774,-6934,-14851,-16445,24213,-17702,12581,29781,1613,12872,-31929,7379,-26933,-23482,13253,-22399,-24496,30566,15991,21293,28377,-9977,-31604,-4998,18062,5084,-23033,-28956,-30226,5169,-2197,-13381,218,-27237,-7972,11829,-17442,-21452,28027,-17330,-23369,32347,-2400,20295,-16992,25008,-5379,11719,-28506,14576,-32574,-18883,-29797,2306,-10866,-5651,-29591,28451,3577,-3108,25532,-21251,22035,-32754,27409,-10909,-24653,8809,27365,237,-76,-32561,-8828,-15908,-30957,-20223,14081,-21280,26278,23839,-26284,32209,27772,-3565,-31767,-21589,7408,7703,-19267,906,-9031,29753,10714,21277,-15297,20531,-23980,9395,31781,-12940,-1500,-21106,2715,1923,1902,-12185,19265,13296,-7773,-2331,-12262,-24654,15877,1486,11896,-4089,22132,23297,-21190,3290,370,23351,12035,1547,-28319,18525,28600,14292,-29962,-15837,20810,-14881,17953,-31157,-12036,-17729,-29509,18937,-12243,24907,-13920,1944,23346,-11387,-23028,25137,-22019,14260,-8518,12716,10015,4855,1586,28273,1346,-14574,-4506,-18770,25482,18237,8559,32048,-6735,-11181,-25769,-20526,8719,-31595,14858,1589,22598,-2873,19320,17275,8069,-6373,9072,25959,-29550,-30613,-31209,-19845,-18113,-30627,20806,13009,16074,-10375,30808,-21992,-1413,3041,18616,10446,17765,-4261,-3300,-23844,1784,24608,-13967,-20680,17031,-24155,-32343,-32532,30781,6577,15985,5315,-335,-14710,-12905,-20427,16470,1951,-508,21305,742,4144,24124,-20392,-11018,-29535,-83,26680,31919,23171,-15846,13264,-26354,-7693,-13869,1862,30349,-12739,20283,-31485,-29909,-149,30857,-13132,6980,7307,24756,-9879,30682,-13051,20380,-16865,27210,-929,-1932,3922,2402,-12665,-13290,2898,8726,32232,-26595,-30823,4701,-14846,15790,5575,-31732,-26543,12394,14702,-21725,-10207,117,3000,-31695,7484,25137,28353,-17913,17737,1218,11221,14495,-18435,8942,27181,26687,-1970,-14758,9618,29422,15014,-25832,31669,-28582,15636,7332,5947,2869,-3977,9393,-30606,22677,-9305,-3855,17048,-9060,31676,-13334,-19806,-2731,23256,-13201,-27880,-25660,26335,-20869,-16908,3797,-2488,10934,24545,-13181,-3276,5355,32635,23386,-31854,-19413,-2186,22271,-17846,-26113,20082,-25191,22987,-24624,-26552,21670,15620,12054,-27832,-23284,-13143,1339,-28805,-14879,26159,-5439,-14179,5037,-1467,-18523,-22724,-9550,25313,14492,22068,-3112,636,29846,29883,-4404,2247,25052,31937,-31772,8005,2505,-10715,17167,2868,-17349,-3110,-5630,-9384,-21685,-11386,-28367,29712,-30442,-496,-29397,3469,15918,18838,26369,-14389,-1739,-13494,-21300,-29939,7396,-15360,21532,-4074,-25719,20274,-21840,32456,-620,18257,-12997,24768,6699,28670,-10670,-10479,-23686,32440,-25341,29295,-29770,28199,-9310,-9403,-24037,-15788,32263,6405,5206,14679,-21287,9391,23968 diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv8.tflite b/tensorflow/lite/micro/integration_tests/seanet/conv/conv8.tflite new file mode 100644 index 0000000..36f513b Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/conv/conv8.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv8_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/conv/conv8_golden_int16.csv new file mode 100644 index 0000000..e9663d5 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/conv/conv8_golden_int16.csv @@ -0,0 +1 @@ +32767,-9585,32767,12850,32767,1535,32767,32767,32767,-32768,13038,32767,6688,32767,32767,32767,32767,32767,18572,-32768,32767,256,-32768,32767,17397,32767,-7426,-20024,-9064,13984,32767,32767,30186,-32768,-8064,-21900,32767,16149,16273,-16595,32767,32767,32767,-32768,-27467,-16331,32767,32767,32767,-32310,32767,5670,32767,-26772,32767,32767,32767,-32768,32767,-30034,32767,14302,32767,32767,32767,-18107,4108,28014,32767,-23267,-262,32767,32767,10215,20293,32767,-30052,23559,26255,32767,18226,10102,-32768,-32768,-32768,32767,21678,18318,32767,-22158,32767,-10620,32767,10659,32767,-32768,-3845,16521,32767,12826,32767,-13125,32767,9858,-32768,-4812,-32768,32767,32767,19752,32767,-3258,32767,32767,-17224,32767,3093,11902,-5057,32767,32440,32767,32767,-32768,-16894,-20449,32767,32767,-32768,3263,23643,-29966,12037,-3696,32767,32767,-32768,7800,-1790,-30460,-32768,14261,-32768,-9832,-31767,-32768,-32768,-32768,-6436,-32768,-32768,25678,32767,32767,-7953,30244,-29935,-24255,10372,32767,13308,27135,-7914,-27949,-14423,-22895,3999,-32768,-32516,-32768,-32768,4837,-16287,-10023,27816,-7053,-32768,-21748,-32768,32767,32767,32767,-32768,-20931,32767,16648,-4529,-8747,-3285,32767,-32768,28216,-2535,-22383,-32768,-8703,-32768,29590,-4215,-25959,8462,-1460,-32768,-32768,32767,-3412,-16598,-32768,32767,3597,-32768,-11384,-4209,-22525,-32768,-32768,-32768,32767,17008,-32768,-32768,11146,-32768,32767,24247,-5032,-10777,-23878,13152,-11587,-28417,17727,-28401,32767,-32768,-32768,-32768,-6789,10739,-32768,32767,-9551,-22042,-32768,-32768,-32768,-32768,-27820,32767,-32768,-32768,-5090,-32768,7612,-32768,-32768,32557,-32768,-32768,32767,28884,32767,-28592,32767,32767,3480,-18397,7148,-16603,32767,32767,18693,-32768,-32768,-12833,12257,32767,6833,-30883,32767,-32768,-32768,24274,-4541,-32768,21771,13812,-19024,-16955,32767,-22907,10536,5050,-32768,30159,32767,27764,5559,-32768,32767,-13693,29811,-32768,9328,1161,32767,-5462,32767,-32768,-2645,26204,32767,-26914,-5973,9179,-32768,32767,-32768,32767,32767,32767,-26297,1778,32767,32767,32767,-20656,25619,9798,15249,32767,-30,-8096,32767,5098,-32768,32767,-12219,5011,24557,-22909,32767,-26417,32767,32767,24778,32767,17174,-10446,-32768,-32768,-22040,-24802,-4129,32767,29306,24801,21198,32767,32767,32767,7276,32767,8763,-32768,26870,6764,27495,17871,4166,-12174,32767,32767,-28461,-12476,32767,-4190,32767,-3567,32767,-12489,32767,32767,-32768,-22168,-21428,32767,30701,24365,32767,-10570,29769,24626,-32768,15587,32767,-32768,-32768,-32768,32767,32210,3980,-3560,-25605,32767,-32768,32767,32767,-32768,-19993,-7137,4708,15130,32767,-5694,9134,24119,29357,32767,10618,32767,32767,16783,-14686,7475,32767,-32768,32767,-17286,13992,-32768,-12219,32767,-26160,32767,-32768,-5541,-32768,32767,-3788,-32768,-20727,-12651,32767,-19724,32767,-32768,32767,-32768,-13875,-27599,-32768,17689,-2550,-32768,32767,-32768,-32768,20843,32767,-18759,-32768,16837,13100,-32768,-32768,-32768,-32768,32767,-27415,32767,-32768,6794,-32768,-32768,-28545,15989,-32768,-28717,-32768,-19633,29795,4395,5951,32767,-9232,5308,-11083,-32768,-15590,-32768,26205,32767,11264,-32768,-32768,-32768,-25612,-32768,4680,-32768,-29438,7841,-10534,32767,32767,-6697,17609,-18208,16338,1491,32767,-32768,-9690,-6905,-32768,-6693,-18440,-5602,32767,6810,-14061,-31170,32767,32767,-32768,-10224,28286,16787,467,32767,4084,-2447,-7129,32767,-32768,-32768,7534,-3109,11718,30905,-757,-8739,-27455,31100,32767,-32768,-3396,32767,-15443,15945,-32768,3139,-25026,32767,-7730,540,32767,-32768,-2054,-32768,14448,-16662,-32768,32767,32767,32767,32767,31539,-32768,-23741,2834,32767,30488,-31732,-10575,32767,32767,-11418,-557,-10125,32767,-3953,-32768,32767,32767,32767,-11640,13935,32767,5874,-32768,-15145,32767,-11905,26104,23959,-15236,8857,32767,16789,24702,-28567,16556,-827,8340,32767,12078,32767,32767,32767,23679,32767,-32768,20716,-24816,3823,32767,-32768,32767,11288,7895,32767,2662,-5286,-11618,-18210,-17132,32767,32767,32767,32767,-24661,32767,27913,-3368,32767,-7427,32767,32767 diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv8_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/conv/conv8_input0_int16.csv new file mode 100644 index 0000000..408d8a3 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/conv/conv8_input0_int16.csv @@ -0,0 +1 @@ +4929,-20291,681,4265,-3721,-25434,27632,5541,-600,12230,-11370,26175,11858,23812,17288,12718,-27481,28372,-5286,30903,22471,-27456,10980,10966,16778,23888,-14591,3406,20330,8393,-24169,30331,17020,-26357,3959,-30765,-26212,-25627,-18111,28553,11569,-28530,25664,4930,4047,-20286,6010,-9516,-6942,-11713,6703,3494,-6994,-27788,15329,-8999,4383,-357,-3971,-1451,-4176,2125,-12282,922,-29034,-1161,-13913,-30328,7595,31274,24030,-2821,-28677,-9710,-11998,32487,16617,22303,-26949,13264,-30971,11139,19230,-14506,8747,22845,31836,31267,-590,18527,1698,-2750,-29315,30110,-2204,-23854,-27738,5995,1030,1684,48,23955,-21177,10642,18106,-1608,-7744,-27373,-26601,-16154,31784,-18130,8902,11400,-24332,-7851,30023,-20003,-12038,16475,-21412,-12862,30013,-13005,30562,18738,9842,2672,3374,-12207,7749,-9893,-11580,-1641,8773,1720,9528,16893,-32746,-20516,27153,8789,30988,-25817,4146,6939,-8911,12431,12566,-20880,17203,-3304,-17406,11417,-31264,14556,-30954,14173,19347,27742,-2914,18619,-83,-18087,-22354,-12671,6843,-14722,-14386,10138,-1430,27300,-28263,-23600,-23326,-7908,-31388,-648,19645,22382,-16333,22404,-21014,-27374,22043,22549,-515,31940,-25623,16654,-21233,10157,52,1554,-2685,-10630,23627,20600,-29410,30796,8884,20721,-25998,13839,2633,-16439,22085,-6559,-27905,27911,18514,-20504,-23092,20396,-8732,-30274,14159,-10386,17774,135,-4096,714,-23565,-22480,1586,-20158,31407,-30277,5565,17505,14726,27370,-21625,-18698,-3791,30032,-23846,4685,22296,15896,19821,-30944,-22215,-9449,31240,3705,-25511,32609,24900,-8821,-22105,16471,-29527,25232,7316,22046,15220,8757,-12193,-26811,19799,-18437,11829,32259,21948,21154,-4689,10299,-4000,-5497,29068,26338,-28406,-22554,-21658,32477,1347,-18484,12780,26534,1160,-6198,23603,9475,-10314,26365,-12957,-14207,4399,-26942,-21715,19389,-19635,-10480,-13405,20015,17893,-22295,9124,1206,11416,16201,-10047,9124,7866,-23851,-7437,-15288,7069,-14532,20508,-20864,-19968,-18699,18821,26239,-31435,-390,19461,26946,13778,-24763,-19484,27818,29513,19073,25267,-24334,30495,-11285,13647,-23734,-11999,847,11156,-22697,9336,-12642,22790,10413,28976,-7494,-20959,4885,30305,-16710,21396,24664,-8716,-20167,12310,-24543,-14542,9734,-11437,28138,1901,-9545,11458,30045,-16593,-3040,-18390,16571,-9597,-27449,-13890,-6961,-16959,30192,-11177,-32252,4102,15915,-31465,-7526,24058,26533,18386,17778,12266,-6173,-23539,6476,-12833,3680,13924,-22381,28723,-3454,1369,-25857,15226,-9632,-28099,886,-6936,-1840,31868,4538,-22304,11246,-17399,-7728,31711,-28978,26459,-17245,-32548,-6561,-21590,24972,10753,19923,-19863,-15081,11079,-25315,-22112,-10162,-10909,2522,1543,-14844,6359,21364,4658,26045,-3661,17358,-28839,-23498,-13936,10957,-30876,22157,29947,14181,9323,30980,-14800,-10188,11000,-26898,-28304,-27602,-31698,4892,-8965,14399,22352,1496,21332,28210,10410,6844,-1442,7880,-10624,1291,-5788,-31811,4874,16771,-22668,1740,39,-1915,-21996,-21538,-15611,30439,-2953,-26336,-17545,-11033,-17501,24561,-29604,-26885,18777,16905,-3315,29613,-5190,24447,-11305,25713,28303,23406,31244,-20479,8007,-8228,19177,11328,16277,523,-21907,4159,2633,2180,21594,20532,13080,-6297,-10094,9919,18660,6584,-19823,17987,-19365,-4303,-3961,-2060,-8594,14758,25913,-6268,25735,26362,-18627,22887,-4963,-7815,-5336,-12405,-197,9224,10057,28400,-6492,6563,-10668,21422,21356,-3084,-4170,-527,-30054,-14802,-11457,-6249,16906,10772,-9027,9519,-20001,-32319,-21565,23776,21598,11828,-853,-5931,21812,12561,-10241,-15827,-21433,-15930,-10954,15804,165,-13873,-22047,-25747,-25887,-11651,-4713,-19254,-19368,-24776,29186,-24608,-29219,-9605,14373,30595,-15033,-17226,13667,-32397,-22427,-20205,-11589,-897,6645,-9015,24062,9370,32433,-27337,-18836,28087,-27904,3983,-27089,27432,-28049,-9202,-6909,-14934,-19895,14793,-30044,24000,5699,-32544,4733,-29369,19423,8164,-23262,-26536,14755,24073,-5162,23842,23041,9497,-5184,24900,-25305,-6095,8230,30438,19760,5849,-22340,-25694,27698,26993,-19695,12843,-16892,-25646,-19485,2611,7955,-967,-15689,17907,20685,-3412,-31562,27038,16921,-31479,-32349,-24348,7700,-10713,5218,-8416,22112,4016,4950,15035,18077,-2850,13690,18067,-27718,-28090,28812,-17970,-8597,23090,6022,-7687,28681,-31769,21878,13312,25238,22044,-2328,-2480,25284,-10020,-28026,22953,9604,-32457,13397,29851,2061,-29531,10766,-28481,-7614,-15265,2010,-3318,1519,27316,23392,-21792,11835,22208,20897,-16644,2189,-18587,-32366,22240,-2388,-24346,12076,-26890,-9975,-17770,-20662,2046,-10504,5947,1065,20563,-20237,-10748,-1575,9970,2003,39,23729,13171,-19985,-17980,-20424,10535,6264,27205,-2589,2473,-18709,31848,-16680,8136,-303,-1828,2930,11839,-29358,7634,-26543,-20557,-13498,-13821,-2796,4843,-30741,-23783,-18868,-9938,-28001,-4178,23355,7158,8142,-30133,10482,27658,-20984,-12059,5413,10544,-21898,-2571,-29073,-18649,3110,-17178,-31948,-24459,-4598,-31152,6857,16215,-15973,-9324,20244,6243,5859,-27294,5830,8205,6267,-28849,17156,-13129,-31234,-20042,-12890,-605,-22336,-22493,-25341,20951,18050,20757,-16427,-9582,-2958,-17757,26769,-8509,13598,-30203,30431,-896,15274,29193,29978,25960,-29422,-10001,-27104,-697,-20795,-22828,-16427,-10580,-21686,23962,-17168,-19499,8687,7413,26963,-27627,1275,-12494,-20208,-17818,31848,31190,15705,3725,-5081,937,-25638,-25528,-23001,-22383,-2631,212,-19980,-32146,32000,-4185,5965,28516,10639,-15316,-13721,-17425,20827,15660,25380,29119,-29916,32091,15359,21154,19117,-5287,-32258,-16745,-29641,-7047,645,20079,-17652,27047,-25620,4935,9185,-22736,31631,-6611,40,18001,2626,1996,21279,-7443,-17477,4470,-26903,-11254,-21634,-26328,26171,-14295,24092,26843,13834,-18574,-20227,13508,-31743,4168,14651,28035,17050,10021,-19800,25798,-4621,-26606,17546,25498,8215,12374,-21137,-1411,1144,3976,24914,-20566,-18657,-19322,18947,7019,-17035,6754,10538,23200,-18129,-3324,-6530,-18382,3061,17435,-19947,28411,19310,-4460,32162,22412,-16799,-29126,-29868,-23867,7358,1116,21613,-8829,8933,27178,-3393,6653,-14141,1888,22850,21328,-28297,10047,13315,-31592,26115,-27508,29739,-17535,25887,15548,-16986,-28347,11398,8659,-23917,14846,-26119,-1690,-20086,-1618,3718,25768,26890,32719,366,16869,-30696,-3910,-11130,23471,-15447,-14476,-6670,14561,-3628,-26295,-18472,15671,-25044,2900,-4433,-15838,8182,-8737,-4817,-5298,-26609,-4152,-24346,28724,32041,-29147,-3452,16605,1075,-4955,5617,819,26366,-11136,-29259,-13204,-24622,-31105,-28744,-1009,4445,17279,-3176,-13349,-24106,6633,9442,-28840,-5986,-1615,2641,-2678,-26469,17801,-20709,-13336,-1808,11564,17533,1315,-20420,-2300,6522,25115,-8313,-9455,-28199,31948,-28090,-12377,-18720,3036,-8882,-1698,24160,-14340,-24195,-7149,-3529,30174,-30590,23763,-25032,-15517,-21576,-16908,-12824,-10726,16323,-8489,21266,22850,-19269,9645,-24385,3839,21802,26412,31232,-8415,-12387,13559,-5255,-13071,-30067,32718,-8213,-30715,8500,-31466,-11638,27446,-27456,-19864,-23613,-16873,-13511,28243,3944,7181,-15121,-12968,-14468,12736,-21886,29236,-29973,-17430,-17375,-13694,28019,-30943,30335,-13314,-3170,-9698,-19866,-11312,28549,13863,10404,-11166,21650,-29196,16854,-3570,-20962,19665,30192,10933,-11282,897,-11603,6049,26196,9936,-15,-11292,7735,23017,22926,-24188,-1895,6505,-18628,-16991,21334,-24092,14030,-31721,23457,21412,-13327,-30236,29773,-92,-1616,-9493,-8292,-19662,4648,-29937,-20404,-2653,32392,-20791,-24499,13861,493,22847,8876,14718,-17733,29943,-28555,23582,28834,11406,-28292,-6760,-9698,20701,26837,24705,-23760,-13651,-12305,-6511,26171,17969,29925,1436,-13786,2682,-27611,16573,2204,-32758,-10345,17623,-28246,-15843,29375,-16156,19891,991,-26119,-19516,17598,2443,-30544,-6245,30238,23412,28462,-25497,6533,-533,-19957,-486,18547,25026,25191,-27851,-2240,-30405,22007,27223,-17822,-29733,27664,-29183,-18358,8606,4424,28737,18543,12936,-5858,-12517,32259,22566,-6680,15380,15008,28607,-20020,30381,-18544,8844,32311,-12686,13373,-20477,-8314,-19369,-23985,7257,25950,-30943,31663,-15991,-32528,6315,709,27575,-13308,-6390,23225,163,-12333,-5861,-11882,-31852,25355,-15646,19451,29182,393,4995,-10197,30641,-10442,-27388,-7279,-7869,-26670,-3355,-10280,-19151,-22440,17064,25625,751,-30495,8303,15576,11263,-21644,-28358,4890,16184,-15031,-24784,-15416,14806,31576,-25468,-2840,11340,-23437,-8571,31721,-8274,27255,-11908,-16381,4052,9910,-4119,22994,21845,15939,-10598,-10057,4574,16691,13552,8576,-21957,-20166,30385,-4384,28081,10959,-2097,-13528,-1867,-16153,-20007,-32006,-5120,1871,4932,-5376,1191,10880,-8448,32556,-29755,-20346,884,3265,25000,16158,-32036,-13910,23461,7940,4118,31204,-4714,13446,-1552,6414,-17164,-23902,26901,23829,-31263,-2082,-2580,-28052,-2230,97,-8315,-25915,-8074,-31702,815,-13716,18632,13878,-2023,21883,-11892,-17979,-29324,-29053,2311,-8749,29817,21265,-23498,17623,-27008,29335,-17323,-2514,16399,29004,16772,-24905,-15973,-9114,-28633,32233,-6972,-22526,-3933,-11476,-20144,30888,11430,-3763,11493,-27832,-4978,-18811,-17473,10982,-21345,1213,-12857,-24528,-5860,27032,20441,10096,-32021,8706,-22854,25417,29077,20937,12728,-5788,-1761,17003,-28432,-17095,-18218,12362,12833,23240,-26732,-6923,-29341,28490,3853,-7050,-10589,-8892,3506,30737,-20008,11590,-20602,12523,26631,-22747,20192,1418,-1756,-24771,23003,2719,5059,-21976,-18500,-23262,21713,22652,-15296,-14911,14943,2670,1263,5759,2783,27007,-8685,19520,-31835,-224,-21578,-2526,-28827,-20333,-22801,14190,-9960,28543,32105,24064,-2102,10703,25066,28926,12295,29934,-15089,-27867,16221,4121,-31152,-12995,28346,-18215,-3239,-3379,27509,-22884,-31976,-29060,1486,-2066,18878,3954,-9573,-27280,8562,3646,20948,9884,-15432,-10740,-22714,22812,-27147,-3360,-27873,-20472,1669,-16077,16991,-21856,20195,13226,21485,-21927,23262,29390,-9395,-15892,-6404,20419,30075,15768,5276,19328,32564,999,11594,10854,15621,-8339,-2412,-20382,4067,-6016,3054,24790,24152,17341,-5241,-4771,17830,-31693,-10572,-10397,-20437,27329,19725,-7987,-7135,-23178,-12313,19355,-7292,-71,7217,17115,-19673,31600,20528,11141,4792,-25824,-11487,-12738,20619,6987,32513,-20986,-15130,12478,7553,-7808,20981,602,-31016,-13853,-12599,11448,-12563,-921,4094,16146,9189,8629,-9311,6274,-31294,-23283,-21375,10498,-12321,18562,30944,-15146,-12459,8013,2237,-1577,21258,30248,2067,-20413,-20126,-15006,25742,38,16702,7193,-23520,18597,25730,27299,-26499,27202,-30939,21436,1818,21222,8297,-28889,-30042,28211,-29950,3242,13408,-29432,18489,-13077,9684,-22231,-13082,20852,32228,-13614,-19699,12526,-23581,32009,-12969,11439,27556,-1478,19342,-28332,18699,-28090,10116,-16599,-9748,-6835,-21230,28067,29427,-6922,-4832,27479,-28845,-24836,-29421,25833,-8766,12055,-22033,12695,-28251,25414,30993,13066,28081,-24205,-18575,-17687,25703,-30248,-4513,-32046,29367,-14390,3411,-397,-31085,-2975,29921,21777,-18751,1497,6498,-20117,-4925,10683,-22757,26285,24719,-15558,32530,10414,15726,9679,14502,-23481,23615,-12738,-8032,2579,-27914,-5565,21682,24801,24798,2473,2223,16742,-15439,-7494,3239,28888,19654,14464,17216,-16905,29142,-29657,12586,19386,-22175,20314,-31101,6336,3416,-22049,-31332,8227,-31661,15493,-21952,10284,264,-32029,30504,4759,-31243,-421,-19877,7537,12875,23443,22422,12806,-5446,15126,12720,-5946,24122,-11598,22880,18472,-21574,16092,3740,30367,-28809,15236,-1494,-9295,-21589,-5537,20943,24556,25051,-29949,31051,-21831,-10874,-3142,16643,21308,30185,-21858,20132,-1050,20122,-5955,-20444,32395,-4644,-9137,-21419,-7367,8936,11476,-32436,3000,22156,12752,27221,-17620,-16649,32592,2728,-10666,23492,2040,-21618,17711,-15023,13784,-22415,20693,8660,-11162,-5544,-15935,-32349,30006,-10412,-7018,-12407,14364,-16461,10254,-27109,-10714,10184,12971,25425,-28458,-30574,12223,-15355,5451,-14043,10425,-487,19641,3770,-3274,-5927,17453,3665,23745,17777,-531,12889,-22418,574,19670,3966,11725,12233,3746,-25429,26673,7644,18481,22918,5912,-31915,5257,-29404,-12449,27175,-22377,-21178,-30433,30816,-10999,6507,-7810,-24875,-10813,21622,-32300,-15910,27769,9376,-19814,14399,-6318,20648,-28307,-20415,14560,14560,-15965,-5,17985,2170,26306,22687,-17280,-12604,31763,-16538,26092,3316,2097,-4071,-1341,-11349,-14370,-20389,22742,-20508,17420,23049,-6122,3642,-23297,-9287,-22294,25113,3619,1561,-15917,8977,-24610,12237,31459,17681,11038,26465,-17417,32613,8694,-22234,12425,30202,-11348,1658,-30615,31237,-14128,13592,9606,6446,-2701,-745,-15037,-3963,-6484,19272,30988,-8746,31274,-15692,-26485,16542,-26014,27855,-12863,-4040,31586,-10590,-21134,23272,31660,14586,7864,5765,-22063,5039,11698,-7645,21627,-16383,-9126,-5775,31208,-20671,-24272,1692,17646,-16069,-19326,-3884,-18220,28438,-22992,19250,21706,-7751,8376,-30289,1024,30262,-26171,23830,-10258,26472,28454,-31761,-10929,-21417,-32516,-10177,-14613,1323,27333,-28503,-20625,6887,-15102,7859,-3517,20805,23965,-4120,32723,18587,11422,15496,4498,11022,-6729,3740,22903,30962,-19213,6236,-27191,25278,-3192,-18246,-32286,-24370,5332,30077,-28985,-25860,-32608,13127,5243,-4201,-12546,29532,-31777,-17644,-9758,19751,-8341,-11493,3569,-5510,-96,19185,18138,-26102,2106,12462,28015,-16860,-19704,26956,-31919,4611,-12686,-16358,12399,7183,-32607,-26588,19023,-11677,1065,-11412,-16955,26934,-8819,-16361,4221,-745,-10242,10547,16107,-2927,-3815,-30145,27493,-7010,-16438,19103 diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv9.tflite b/tensorflow/lite/micro/integration_tests/seanet/conv/conv9.tflite new file mode 100644 index 0000000..601bb30 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/conv/conv9.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv9_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/conv/conv9_golden_int16.csv new file mode 100644 index 0000000..6f2446d --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/conv/conv9_golden_int16.csv @@ -0,0 +1 @@ +32767,32767,-9914,-32768,-32768,32767,-32768,-32768,32767,-12466,-4114,3303,11024,-16308,-13112,26384,-11414,-32768,7604,32767,32767,-32768,-13154,-32768,-32768,-32768,-32768,32767,19675,-30362,20779,-32768,32767,650,27799,-22791,32767,-32768,9432,32767,32767,10567,32767,20155,24642,-10179,9497,32767,-32768,32767,-20052,32767,32767,-32768,-32768,32767,19594,-31453,-7966,32767,-32768,32767,32767,32767,26457,-32768,-15004,-32768,-32768,-32768,-23569,2126,28329,13166,-17264,24753,-32768,11082,32767,-32768,-1746,32767,-15205,32767,-32768,-32768,-27261,-32768,-32768,14997,-32768,-1707,13423,17180,-32768,-32768,-32768,3839,-21135,32767,-32768,-32768,15154,-32768,-32768,32767,-32768,32767,32767,-32768,32767,-32768,-32768,-16593,32767,9449,-10348,-32768,32767,-32768,32767,32767,-32768,23351,17782,32767,32501,16130,32767,18975,32767,32767,3092,-32768,20462,-32768,32767,-21866,-21084,-32768,-32768,13748,-32768,32767,-32768,-19852,32767,-29401,-32768,32767,-32768,-32768,10380,-32768,32767,-24427,-32768,19281,-32768,-25552,32767,-32768,-32768,32767,20773,-6852,32767,-170,6915,-32768,32767,-5392,32767,6720,-32768,-32768,-32768,-32768,-26426,-22857,13347,-32768,8273,-32768,-32768,-32768,-32768,-32768,1031,-32768,32767,32767,32767,32767,-17318,-10595,26444,8353,-32768,32767,15570,-21824,1303,-19779,32767,30251,32767,-32768,32767,-32768,-2067,28301,-24838,-26516,13167,-32768,32767,-32768,32767,-32768,-32768,32767,-32768,-32768,32767,32767,-13967,-22342,-28032,32767,-3362,32298,32767,32767,-12264,32767,7362,-26048,-17191,8310,32767,32767,-23225,5494,-25963,9917,5952,32767,32767,-3022,32767,-32768,20461,20038,11878,32767,-12995,22400,32660,-9318,32767,-24493,32767,-32768,32767,18143,-32768,5600,-32768,8584,-30449,-32768,-32768,-32768,-10280,-32768,7142,1232,-7858,22911,-32768,-32768,-32768,32767,22352,-26967,13775,-2953,-11928,32767,10052,32767,32767,32767,10996,32767,32767,-8636,-32768,13931,5406,32767,-22735,25165,-8565,-32066,32767,23900,32767,-32768,32767,-12967,32767,-13449,32767,32767,-32768,-5404,32767,-843,32767,19065,32225,-32768,-20063,-32768,32767,-32768,32767,29538,-24468,-2900,-32768,10756,-32768,-32768,-32768,-32768,-11060,-27198,14164,-32768,-24113,7054,11765,-27370,-22579,-24708,-32768,32767,-13306,32767,-8551,-9705,-14950,32767,-32768,17787,32767,-32768,-32768,32767,32767,2070,29982,-32768,-11563,156,26562,-11988,-32768,32767,-20704,-4742,-32768,-32768,32767,-26107,32767,32767,-32768,2287,32767,-12544,12114,32767,17677,32767,-32768,-31686,-19839,-12392,31249,4900,-22112,-16467,28038,5202,23057,-16639,-32768,-32768,2760,-6639,32767,-32768,11332,225,15920,32767,-32768,-24447,32767,11923,22083,32767,-6406,-32768,22556,2064,9112,32767,-13200,-18013,-12356,30102,29887,-28002,32767,-32768,-24167,32767,20284,32767,32767,7900,-31395,32714,32767,32767,-30760,10768,-17239,-12512,-10506,-29275,-32768,-22071,-13682,16814,13271,32767,-32768,-32768,29754,-27253,-32768,19517,9374,-32768,32767,10938,-14229,-32768,-32768,27444,2071,3088,32767,10614,32767,-8175,-32768,32767,-32768,-27772,-32768,-32768,9923,32767,32767,-32768,5515,-32768,-23823,-32768,-32768,-22656,21646,32767,-32768,32767,17801,6854,-19376,-32768,-4861,-32768,-32768,13728,-6768,-32768,9349,32767,32767,-32768,-32768,-32768,-32768,-32768,-14694,-32768,-15272,6265,-32768,27231,28408,-30070,-32768,-32768,-32768,32767,18167,13376,15186,-22158,-32768,-32768,-32768,32767,-24581,-1105,-32768,-32768,32767,-32768,32767,-32768,-32768,-32768,-11670,28063,-32768,-32768,-29487,-32768,-2928,32767,-32768,32767,-32768,13057,29729,-24802,-9681,-32768,-32768,-8133,32767,32767,-10877,-32768,32767,-16913,-6219,-32768,-32768,32767,32767,-32768,-32768,4154,-32768,32767,-32768,4318,7419,-32768,-32768,-13640,-12301,-32768,-32768,15214,-32768,-32768,-25105,-32768,-8890,-32768,-32768,-10496,32767,-32768,-12749,-14874,-32768,30294,-32768,-32768,18667,-13699,-32768,-16154,1596,-32768,15984,13669,-32768,638,-24796,-32768,31300,31930,24665,-32768,-6033,-32768,29112,32767,-32768,-32768,-32178,-32768,-32768,-32768,32767,32767,-23831,26476,-32768,-32768,-31198,-32768,-32768,-32768,-32768,-32768,-32768,-32768 diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/conv9_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/conv/conv9_input0_int16.csv new file mode 100644 index 0000000..edea4ad --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/conv/conv9_input0_int16.csv @@ -0,0 +1 @@ +9764,-8932,12001,7283,-27925,-1848,-25787,-11192,13346,574,27062,500,22163,29199,51,25505,10119,17726,-20316,-13716,-10166,8718,16576,-18483,109,-24290,23910,12332,26863,-29196,-32544,18961,-26123,-8901,-18871,26153,66,-31275,21258,15027,-25826,-14267,-14964,8892,8771,-3322,7389,29622,-14751,31358,5170,-10927,11638,17044,-17557,11706,-17919,7642,-15939,-32703,-12871,19793,378,-18151,-15160,4579,8732,3146,12041,28722,-961,28465,-7514,-23251,-18905,20804,-21579,23959,-14705,-12178,19772,-29190,25691,-5800,2664,14423,23981,-26247,-3836,24487,-7012,11064,3873,31743,24326,-6208,28661,-8673,28661,-6258,-30676,-16465,21077,28505,-10025,-25498,-12980,-14949,17470,7683,13728,-15934,-9730,13871,-13481,-4161,-30885,-20532,29486,-5362,28363,-4749,-4119,-6272,-8629,-31553,1216,27045,14659,7961,11587,18448,28280,-18523,-11105,22087,-29681,-4439,23359,-19567,-25877,31284,24471,30299,-3832,22531,-18796,11432,5253,-7854,32732,32566,16268,-28344,-21860,5151,-16210,1855,32407,-6606,-12049,7243,-8847,-4991,-15954,20890,29288,11894,24853,7761,22586,-7979,-32687,6426,32349,-9876,8086,-31238,-23005,-25533,9827,6611,13621,16018,10783,-15320,-30611,1638,-4111,23866,19162,25496,16043,17586,23165,-26793,16660,29484,-18170,-19557,20641,-28519,-1167,10337,-9579,-19120,-17695,-3786,10477,23063,-27395,-22863,31632,-29935,29906,-1214,15724,19876,-6926,-13651,29471,13357,-24133,26902,-31458,-3916,-406,25646,15279,9733,11512,3670,-19230,-12424,-10037,-695,30517,13084,31903,29796,-17978,11822,14904,5538,-27194,2227,-16721,26332,-20200,-30362,-25969,-4385,2351,-8355,5838,24444,4687,12369,-20646,13899,-20047,-26453,-18816,-30356,-29044,27980,-15331,-8055,-4206,-29048,-2885,-27419,10458,9902,3432,12044,29749,23121,-2555,-19799,11823,3711,-551,18265,-8708,3359,18118,5074,17580,-30286,-13889,-12714,9146,14008,27929,-1846,17437,-14293,-6419,28349,-15738,27071,-13540,-1406,6138,-27079,30954,-2818,-17580,-20626,482,27407,-17435,651,-31787,-10860,-29463,20137,-15254,7021,16589,-11622,13826,28418,363,1077,14906,-24667,-19486,26083,-5396,-3199,465,-1607,18505,-3343,-12086,-17022,-12761,21241,1893,5754,8005,25024,-386,21582,-18352,-28925,21437,-18914,4423,335,30280,23034,14503,-14840,-22271,-14065,-4476,-7401,-6365,-25043,-17806,15011,20106,32646,-7986,-5894,20000,26917,-23320,30816,8000,-12055,-32589,-31506,25191,13414,-884,-21346,-29627,9021,20574,24369,-31840,-25589,-25944,32644,16408,-6887,16386,3810,-4005,6134,10664,2954,-28752,-27764,31531,3389,-11092,14786,-24508,-19039,-24017,3692,-405,12,23003,27948,18902,2358,5751,30249,13932,28420,-27356,7703,-27887,16811,-11682,24566,-24309,14647,-27367,20987,15860,-13892,-12159,30768,-8301,-30536,2542,3823,-22243,-11163,-17074,4000,-6769,-23115,-4507,-28738,21172,6219,-19495,-20385,26370,24513,-16109,-21866,-9661,-20700,-7873,-4837,26366,-4197,-28238,-8284,26838,-12751,23464,-20463,-14657,-23316,-16101,-22152,6021,29550,-26383,-28794,721,-21928,-9210,30649,-1104,-10294,-12098,32279,29197,-22168,25046,5075,-12081,22565,26244,-17372,8036,27078,-16931,-17457,31024,10825,19973,4319,21147,-11307,-11806,-31739,27799,3157,-32537,-3697,-32007,-2184,16469,-30937,602,-2344,-16694,-13449,11492,-17897,-26969,9740,-14476,-19498,-16827,27942,-15127,-18472,3362,-31241,7609,10432,15740,-9686,20825,-3756,-14674,17118,14780,-8020,11473,23004,4233,-10483,10688,-23735,-8982,4131,11057,23181,28805,3711,-25623,-12450,-32558,3686,277,20862,27143,28628,-1836,31715,-25794,50,14985,-29733,28676,15785,17844,3553,22769,-4398,-13456,-6051,21040,-27518,-13200,121,-14241,23714,-19664,25118,22181,5291,-18043,-6755,25755,22722,-1101,28655,4397,-23048,4987,3983,25525,19878,-11853,8812,18021,20085,8045,-21228,-15750,29246,-26474,-25206,15743,193,16621,-11025,11649,-15886,27649,-8517,22144,15863,-7312,20645,-12772,-13524,-20032,23490,-27045,-12268,5736,26998,-7542,1754,30242,-11735,1641,11579,22938,11140,20474,30183,26530,-9024,-3613,14695,-17648,-25238,-8949,-18628,6547,6097,-24050,2614,-10354,-22743,5265,-15484,19003,-25031,-23087,-6585,-10338,8443,22411,14308,2784,-4040,9210,-10250,26413,-16709,-18675,23914,12814,12345,30522,15279,-6954,-29531,30394,-19618,6270,22427,-11998,-306,641,5163,-20628,26578,4961,32220,20896,10423,70,-15963,-2330,9634,8786,-28191,14419,-20762,21295,-21915,14695,28222,9052,-3328,28566,26587,-8460,13223,30041,-18403,28196,-24745,-11961,-17531,32360,-23540,-30395,16109,-5473,-1557,-24641,13973,-29348,24063,-9314,3886,-20142,17106,-2509,24316,-4445,-28301,15305,24632,-15961,-573,-15420,29956,21308,-7359,-20734,31897,23005,-28270,30537,-14467,-7110,-21751,-4459,9058,3674,295,1730,10281,17801,-31592,17679,-19043,6688,-13648,-19003,-25717,20399,13038,10852,-13601,-23284,7606,-13181,-10533,15800,-21419,-354,-18580,-10598,-17737,27615,-909,-31482,-2215,12026,4625,15813,5078,32379,-12726,638,-31060,25906,5218,19225,-12836,-22402,24399,-32637,-14559,-24173,-30863,30857,-16235,-6542,19018,-28085,32374,8400,-4977,-20967,-22386,-32520,-6573,17118,789,-30252,22274,30026,-1036,-22890,-28908,-4420,-12728,7188,6193,-22399,-1113,-7193,-12242,4120,-22985,7144,25457,5075,-2423,-24333,22810,18007,20874,22287,-30168,-11391,254,1396,17675,-6977,-4166,10148,-3354,-29883,-15170,184,29685,9972,1288,-3962,30813,18794,-10549,-8634,-18153,-218,-15888,19664,-29398,2055,4893,-2256,22381,21472,16429,-16757,29442,10099,19361,31754,2980,-24107,-12375,12541,22060,-20225,-25805,-2057,-13179,11198,-28260,18878,-6797,-3324,1336,18811,-20396,-10096,-4136,-16630,-31042,-26474,5547,-9163,-22148,11216,-16747,21584,-15313,3414,-19751,-12217,-27066,-8102,9887,-42,31050,-3486,10681,-23644,-232,24732,-32370,-32558,-24816,4826,27369,3624,24071,3827,-22538,29050,18349,26132,-29196,-3046,22184,-30131,4837,-24320,-24138,27404,-3956,-9284,9549,-10313,17220,23671,-10901,-2550,-7103,-29262,6626,-17154,18256,29260,-12330,-2140,20553,-12859,-318,-22325,-6143,-16386,-129,19551,-26818,-1092,-31508,8471,-20909,-10785,-1182,-3782,-10432,3357,13481,17163,-8552,-22543,-6547,-7363,13851,-30492,-10699,11002,24090,-6527,-8038,26735,423,-2250,21858,-8971,-28767,-32739,-10161,-28137,26245,-19989,21772,-19683,4411,-23323,1920,6383,16461,-16008,-31893,12384,26998,-30388,-8321,2218,25241,-26868,-18972,25229,21784,23272,-27171,-8542,29563,4531,-28789,27277,-7400,-4956,-9895,-20483,-1744,-12477,-19154,26613,-13745,17835,10935,6288,-32322,12828,17260,-1817,-2357,-8571,21828,11390,5834,24434,19428,11028,-17073,-3733,21297,16889,26605,-15264,23908,8447,14721,-24225,14124,18170,-10539,17752,14448,-20828,-549,-4595,22618,-14920,10007,-10748,13580,16314,-21367,-14436,-1718,17577,-16615,16755,-23046,-14155,-11460,-21133,10321,-21407,3782,24392,19141,-25378,24461,-5993,-5084,23700,27043,-28064,16707,-12589,-17384,4393,-7292,1912,134,-3819,-12794,9647,24823,10214,-32021,32641,-184,-27560,28804,-19058,20466,18200,-6097,-205,14980,-14265,-9604,-20252,8578,-20063,26090,-22825,17512,21508,-30905,29211,-9583,32626,-7852,2875,-11204,21135,-2188,29533,-25113,-26577,-19985,-9387,-21952,-23773,-13610,-23258,24617,22326,-19542,31093,2740,4165,-26746,-14625,-18144,1736,26084,-23694,102,-32714,28382,-5568,8169,-15695,25107,-17958,7636,-2758,16798,24681,1206,-11886,-5321,27925,-23432,-6737,-27145,31070,4288,-4665,2931,-21285,27534,-12727,26576,-17148,24135,-15290,-689,-31599,12217,967,-15683,3821,4873,-7367,-5223,-14649,27805,21385,8286,8120,-10342,13153,26156,2259,31668,29762,-17667,-15563,-2798,32129,12941,-19320,26685,26932,29502,-3491,13766,26371,-25266,-28432,-6110,28523,30754,-6095,19888,-15348,18695,17759,32175,-26191,19097,26467,17419,12249,16666,-25235,-17546,16281,22527,-9782,13988,-13739,10040,8888,28320,-25762,14360,-19556,21963,15522,26726,25850,9272,13158,2088,-17014,-11492,14789,25378,2995,662,23465,-29409,-20285,-13767,-28379,7372,21539,11191,1325,4227,-23911,10914,30400,25818,-16974,-27627,24757,20184,11793,9050,-31542,-5608,-25550,21305,-27846,-12851,-9347,13273,-6309,13482,14465,14363,14648,-10394,31197,14529,21309,-18591,-24831,29073,-8632,-15268,29618,-9222,-13230,10215,16829,1922,-2093,4943,-22933,4611,-20500,29008,2425,5572,19071,-29603,29294,-31572,-19561,25990,-26390,-26791,-1361,-27536,22803,-17319,-3339,-5226,-27192,20552,-26012,-14701,-5165,7516,5896,-4786,-2981,-3297,20316,-9069,-7627,26467,16221,-30298,-6854,-14922,27644,-13646,25727,28907,4657,-30071,-18137,18137,11899,32763,8405,-5099,-14470,-11545,18299,6832,27045,11736,-16728,-19518,-21406,30178,-27439,-18067,7852,12675,-3138,5876,-15657,-14678,-651,28812,31119,-11507,-29984,-28174,17785,-28753,-3180,16871,-26622,14069,-29507,-4403,-3401,16450,-7765,23592,9338,-31175,-4227,-6665,-15825,6682,16531,26727,18462,6849,6555,-4619,-21689,27494,26988,10338,20726,-915,-26137,15881,29638,8126,9248,-3442,4769,-20652,-32184,-21063,21097,9402,1740,-8955,19753,16183,19254,21102,7948,18644,-14890,13552,-19077,-22294,14181,22127,-25823,23475,18273,-6854,-10377,17026,11242,-31396,17148,-2448,-12599,31229,31508,31798,-2473,1854,-356,8219,-7495,-11639,-10010,25955,18420,-22961,-20032,15510,28222,-4783,26495,19238,-16495,-5221,-28471,-18085,31012,-1072,5237,-32271,-27406,20628,-17133,20358,-17183,-17830,4397,-28597,-2480,-8257,-8995,-27947,-6112,-31389,27579,-27419,-4095,9283,117,-7422,28143,-31585,-8577,-9122,2290,-28975,21512,-5925,27098,-24423,26071,-30046,-31145,-13224,18118,20210,17714,-5382,-25422,-12526,23118,19655,1625,21670,1011,3393,-28853,1749,19521,-9919,-28085,22996,-31626,-12056,155,21824,-2793,10135,-32429,19305,1272,-23683,-2944,179,-21307,-18604,5691,-10163,5458,-311,30027,19166,11520,-31371,-30718,24674,-10847,6531,15412,26768,8424,13168,3456,-3398,16339,-27401,29048,3267,-18012,-9902,10893,24389,31576,-16063,26274,-30444,17000,25227,22839,1646,9808,-4270,-6485,-26973,31367,12039,-7212,20815,15535,12919,1407,-26753,2973,5950,-24192,7244,-30930,-11476,-4615,24066,-15638,232,-13032,-18222,15530,-8365,28972,16403,-8049,7131,2493,28680,-6000,-4277,30391,-28802,-530,26660,-16890,25350,-21450,23818,8988,15639,-23366,-28411,-27322,4548,14157,-28997,26858,2789,11152,-32022,8290,15772,-30786,-15585,-17464,-21109,-18050,-14856,-12477,-24896,32277,15862,22637,-18581,27847,2772,-22984,24313,20603,23267,17007,-21112,-14495,-18532,20958,-9692,23991,-25401,9126,963,17869,-1855,-16304,3126,-27863,4797,17181,-3901,-31183,-30572,-7708,23470,-21709,572,31883,23323,-3235,29841,-7774,21307,-30358,1118,-26597,7207,-8098,26133,19853,30154,-20751,-14077,-2370,24571,22619,4873,8387,-1021,8323,-4952,-12588,-1538,-9270,-17782,24068,9083,-9484,-636,-13894,8880,-23024,26695,-23731,-23395,14743,-17677,30113,21792,-6339,13629,8793,-21471,-20460,-30135,12922,28546,17657,-21374,1799,3527,-5075,-496,11188,17209,-26268,551,26332,-10764,5579,-23423,-24506,8791,-27638,-22714,-13525,30093,15399,7216,31471,8560,-1355,5125,17982,-3320,-2059,-8880,-11811,-28544,-29595,28440,-14880,-21506,22869,25919,-8178,18826,22185,6840,20359,-27593,24141,11377,-3757,-30922,-13247,3295,-4118,17731,24550,20812,30291,-25662,11335,-21911,-26109,-12662,-21882,-25854,-8347,-19785,12357,9301,16841,-7363,-13056,-25227,-22878,28732,-30256,16469,18869,-8530,13679,20974,16632,-15221,28334,-670,20475,-8167,2347,-19619,32439,26850,22136,-30801,26926,28989,11349,-18464,31024,-23493,-29838,-21974,25211,9738,16281,2934,19474,22171,-18487,6972,32376,2547,-10135,-19319,-16366,19675,-7826,-13183,-22777,-31201,2407,-14880,-25527,2313,20128,-28541,-17482,-6644,-22583,-27897,-30527,29525,5267,-25307,7861,-9451,531,-26035,-23908,31815,-26268,-21232,5876,21371,-5613,-28415,-30478,20680,-26501,2752,-22771,-27268,11933,3355,14847,-1238,-731,32469,3187,-14450,-12895,-16295,-25524,20311,9259,5992,-31338,18132,-30947,14624,12125,3096,27648,-18533,32218,-15042,-11576,22137,19845,-20148,5887,-8546,-23361,11113,-12962,-25185,-12130,-27010,-18108,-18017,25203,20315,-20955,21857,19640,3167,-30285,2806,31903,-12560,14825,-30984,18471,-29252,2256,-25152,-196,10371,7305,11808,7040,8521,-16696,-549,-6200,-26413,18615,20764,-11197,-31349,-20842,25319,-833,-5012,22310,-7354,-31893,-18015,-26612,12818,26438,17664,-5900,-24684,24325,-2800,11285,-715,-12369,-25684,-29458,14091,18391,-27991,-12423,-11279,13951,-479,13564,6172,10618,629,15223,-31306,22628,-31023,-10024,24450,-3347,-32497,-20225,-27767,5912,30718,-13991,-8346,-28443,-26973,16208,-18644,-10723,14269,7415,-1472,12881,-7946,4689,-32643,-7168,-2908,-19538,-14120,18002,-31136,-25049,-25343,-17225,10432,3660,13595,-15426,14629,-9023,-27365,-13283,-5333,26870,14518,-30762,15846,31936,9722,-10140,19165,27861,24426,11466,27782,31295,-17002,10265,-11241,-784,17692,29115,20828,17117,8729,25471,-25622,-11800,-8325,11044,8553,5597,19109,28842,30821,-11928,-6777,19742,-6221,-31121,-15156,13461,-31272,-30864,-12359,-1963,31394,-3880,-21359,-11808,-5402,-7417,5439,-18266,-24229,-29461,-26201,86,-8103,-22101,18742,-4293,-13298,-8107,-7482,-16341,-16703,16888,-18747,-30660,2069,12961,-10087,-12857,-17531,22219,18411,-15486,427,11602,-24421,-17623,10996,26387,-24003,-23228,-16238,16180,-18711,19841,4511,4754,-26532,-27161,28978,23045,-1921,-17102,-30893,23564,20122,-791,-18297,1615,-30984,2307,16929,-13280,18388,-11358,22658,-8253,-8227,-14633,16366,2247,-16221,13421,13254,3023,-18102,-22980,24466,21590,-27413,25710,-15553,-23026,-10583,25237,30569,6238,32650,27472,14313,19198,22631,-23506,7644,-29684,-28880,-2138,-18203,7239,11647,-21459,-14372,10964,-7959,-9687,11869,-8651,18768,-9038,7845,7801,-714,5650,-21320,-29,19179,-32044,27291,-22766,23653,9790,-9392,26147,16015,1055,17056,20954,32121,10503,15939,4707,25973,-12071,31999,1229,28875,-8347,-19796,7046,-28563,-14356,-15759,17390,-27492,31688,14181,-6171,-27353,-4871,-19903,-13347,2701,20318,13811,23889,-29421,23952,24063,-11725,9012,4008,-3355,-16337,-19389,10306,3869,5927,-15050,9721,-19998,3192,17506,21633,-16389,-17072,53,-13698,6582,-15023,-13061,16658,-11201,12400,-3434,15440,-17277,-4077,-389,-6308,-15141,25510,17769,4897,-8242,-6040,30719,-3214,11651,-21673,-263,25980,16062,-5480,-8792,28785,-19478,31343,-9468,-8888,-9764,7497,-3488,26720,-2732,27189,27708,-15846,22115,13820,-2755,-27764,-22279,-4638,-14605,26587,17628,-7329,11233,11804,-4128,-8129,5243,12762,479,32750,18934,-13817,14948,-9798,18871,25954,24723,-21996,11693,16364,-28148,-27869,-29098,-8194,308,-14804,-12447,-26130,-10175,-10930,-28030,-16496,-25897 diff --git a/tensorflow/lite/micro/integration_tests/seanet/conv/integration_tests.cc b/tensorflow/lite/micro/integration_tests/seanet/conv/integration_tests.cc new file mode 100644 index 0000000..0fa2898 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/conv/integration_tests.cc @@ -0,0 +1,290 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include "python/tflite_micro/python_ops_resolver.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv0_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv0_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv0_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv10_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv10_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv10_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv11_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv11_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv11_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv12_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv12_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv12_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv13_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv13_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv13_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv14_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv14_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv14_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv15_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv15_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv15_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv16_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv16_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv16_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv17_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv17_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv17_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv18_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv18_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv18_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv19_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv19_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv19_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv1_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv1_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv1_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv20_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv20_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv20_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv21_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv21_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv21_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv2_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv2_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv2_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv3_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv3_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv3_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv4_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv4_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv4_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv5_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv5_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv5_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv6_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv6_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv6_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv7_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv7_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv7_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv8_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv8_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv8_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv9_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv9_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/conv/conv9_model_data.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_profiler.h" +#include "tensorflow/lite/micro/recording_micro_allocator.h" +#include "tensorflow/lite/micro/recording_micro_interpreter.h" +#include "tensorflow/lite/micro/system_setup.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +constexpr size_t kTensorArenaSize = 1024 * 100; +uint8_t tensor_arena[kTensorArenaSize]; + +namespace tflite { +namespace micro { +namespace { + +void RunModel(const uint8_t* model, const int16_t* input0, + const uint32_t input0_size, const int16_t* golden, + const uint32_t golden_size, const char* name) { + InitializeTarget(); + MicroProfiler profiler; + PythonOpsResolver op_resolver; + + MicroInterpreter interpreter(GetModel(model), op_resolver, tensor_arena, + kTensorArenaSize, nullptr, &profiler); + interpreter.AllocateTensors(); + TfLiteTensor* input_tensor0 = interpreter.input(0); + TF_LITE_MICRO_EXPECT_EQ(input_tensor0->bytes, input0_size * sizeof(int16_t)); + memcpy(interpreter.input(0)->data.raw, input0, input_tensor0->bytes); + if (kTfLiteOk != interpreter.Invoke()) { + TF_LITE_MICRO_EXPECT(false); + return; + } + profiler.Log(); + MicroPrintf(""); + + TfLiteTensor* output_tensor = interpreter.output(0); + TF_LITE_MICRO_EXPECT_EQ(output_tensor->bytes, golden_size * sizeof(int16_t)); + int16_t* output = ::tflite::GetTensorData(output_tensor); + for (uint32_t i = 0; i < golden_size; i++) { + // TODO(b/205046520): Better understand why TfLite and TFLM can sometimes be + // off by 1. + TF_LITE_MICRO_EXPECT_NEAR(golden[i], output[i], 1); + } +} + +} // namespace +} // namespace micro +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(conv0_test) { + tflite::micro::RunModel(g_conv0_model_data, g_conv0_input0_int16_test_data, + g_conv0_input0_int16_test_data_size, + g_conv0_golden_int16_test_data, + g_conv0_golden_int16_test_data_size, "conv0 test"); +} + +TF_LITE_MICRO_TEST(conv1_test) { + tflite::micro::RunModel(g_conv1_model_data, g_conv1_input0_int16_test_data, + g_conv1_input0_int16_test_data_size, + g_conv1_golden_int16_test_data, + g_conv1_golden_int16_test_data_size, "conv1 test"); +} + +TF_LITE_MICRO_TEST(conv2_test) { + tflite::micro::RunModel(g_conv2_model_data, g_conv2_input0_int16_test_data, + g_conv2_input0_int16_test_data_size, + g_conv2_golden_int16_test_data, + g_conv2_golden_int16_test_data_size, "conv2 test"); +} + +TF_LITE_MICRO_TEST(conv3_test) { + tflite::micro::RunModel(g_conv3_model_data, g_conv3_input0_int16_test_data, + g_conv3_input0_int16_test_data_size, + g_conv3_golden_int16_test_data, + g_conv3_golden_int16_test_data_size, "conv3 test"); +} + +TF_LITE_MICRO_TEST(conv4_test) { + tflite::micro::RunModel(g_conv4_model_data, g_conv4_input0_int16_test_data, + g_conv4_input0_int16_test_data_size, + g_conv4_golden_int16_test_data, + g_conv4_golden_int16_test_data_size, "conv4 test"); +} + +TF_LITE_MICRO_TEST(conv5_test) { + tflite::micro::RunModel(g_conv5_model_data, g_conv5_input0_int16_test_data, + g_conv5_input0_int16_test_data_size, + g_conv5_golden_int16_test_data, + g_conv5_golden_int16_test_data_size, "conv5 test"); +} + +TF_LITE_MICRO_TEST(conv6_test) { + tflite::micro::RunModel(g_conv6_model_data, g_conv6_input0_int16_test_data, + g_conv6_input0_int16_test_data_size, + g_conv6_golden_int16_test_data, + g_conv6_golden_int16_test_data_size, "conv6 test"); +} + +TF_LITE_MICRO_TEST(conv7_test) { + tflite::micro::RunModel(g_conv7_model_data, g_conv7_input0_int16_test_data, + g_conv7_input0_int16_test_data_size, + g_conv7_golden_int16_test_data, + g_conv7_golden_int16_test_data_size, "conv7 test"); +} + +TF_LITE_MICRO_TEST(conv8_test) { + tflite::micro::RunModel(g_conv8_model_data, g_conv8_input0_int16_test_data, + g_conv8_input0_int16_test_data_size, + g_conv8_golden_int16_test_data, + g_conv8_golden_int16_test_data_size, "conv8 test"); +} + +TF_LITE_MICRO_TEST(conv9_test) { + tflite::micro::RunModel(g_conv9_model_data, g_conv9_input0_int16_test_data, + g_conv9_input0_int16_test_data_size, + g_conv9_golden_int16_test_data, + g_conv9_golden_int16_test_data_size, "conv9 test"); +} + +TF_LITE_MICRO_TEST(conv10_test) { + tflite::micro::RunModel(g_conv10_model_data, g_conv10_input0_int16_test_data, + g_conv10_input0_int16_test_data_size, + g_conv10_golden_int16_test_data, + g_conv10_golden_int16_test_data_size, "conv10 test"); +} + +TF_LITE_MICRO_TEST(conv11_test) { + tflite::micro::RunModel(g_conv11_model_data, g_conv11_input0_int16_test_data, + g_conv11_input0_int16_test_data_size, + g_conv11_golden_int16_test_data, + g_conv11_golden_int16_test_data_size, "conv11 test"); +} + +TF_LITE_MICRO_TEST(conv12_test) { + tflite::micro::RunModel(g_conv12_model_data, g_conv12_input0_int16_test_data, + g_conv12_input0_int16_test_data_size, + g_conv12_golden_int16_test_data, + g_conv12_golden_int16_test_data_size, "conv12 test"); +} + +TF_LITE_MICRO_TEST(conv13_test) { + tflite::micro::RunModel(g_conv13_model_data, g_conv13_input0_int16_test_data, + g_conv13_input0_int16_test_data_size, + g_conv13_golden_int16_test_data, + g_conv13_golden_int16_test_data_size, "conv13 test"); +} + +TF_LITE_MICRO_TEST(conv14_test) { + tflite::micro::RunModel(g_conv14_model_data, g_conv14_input0_int16_test_data, + g_conv14_input0_int16_test_data_size, + g_conv14_golden_int16_test_data, + g_conv14_golden_int16_test_data_size, "conv14 test"); +} + +TF_LITE_MICRO_TEST(conv15_test) { + tflite::micro::RunModel(g_conv15_model_data, g_conv15_input0_int16_test_data, + g_conv15_input0_int16_test_data_size, + g_conv15_golden_int16_test_data, + g_conv15_golden_int16_test_data_size, "conv15 test"); +} + +TF_LITE_MICRO_TEST(conv16_test) { + tflite::micro::RunModel(g_conv16_model_data, g_conv16_input0_int16_test_data, + g_conv16_input0_int16_test_data_size, + g_conv16_golden_int16_test_data, + g_conv16_golden_int16_test_data_size, "conv16 test"); +} + +TF_LITE_MICRO_TEST(conv17_test) { + tflite::micro::RunModel(g_conv17_model_data, g_conv17_input0_int16_test_data, + g_conv17_input0_int16_test_data_size, + g_conv17_golden_int16_test_data, + g_conv17_golden_int16_test_data_size, "conv17 test"); +} + +TF_LITE_MICRO_TEST(conv18_test) { + tflite::micro::RunModel(g_conv18_model_data, g_conv18_input0_int16_test_data, + g_conv18_input0_int16_test_data_size, + g_conv18_golden_int16_test_data, + g_conv18_golden_int16_test_data_size, "conv18 test"); +} + +TF_LITE_MICRO_TEST(conv19_test) { + tflite::micro::RunModel(g_conv19_model_data, g_conv19_input0_int16_test_data, + g_conv19_input0_int16_test_data_size, + g_conv19_golden_int16_test_data, + g_conv19_golden_int16_test_data_size, "conv19 test"); +} + +TF_LITE_MICRO_TEST(conv20_test) { + tflite::micro::RunModel(g_conv20_model_data, g_conv20_input0_int16_test_data, + g_conv20_input0_int16_test_data_size, + g_conv20_golden_int16_test_data, + g_conv20_golden_int16_test_data_size, "conv20 test"); +} + +TF_LITE_MICRO_TEST(conv21_test) { + tflite::micro::RunModel(g_conv21_model_data, g_conv21_input0_int16_test_data, + g_conv21_input0_int16_test_data_size, + g_conv21_golden_int16_test_data, + g_conv21_golden_int16_test_data_size, "conv21 test"); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/BUILD b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/BUILD new file mode 100644 index 0000000..597aa7e --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/BUILD @@ -0,0 +1,1007 @@ +# Description: +# generated integration test for one specific kernel in a model. +load( + "//tensorflow/lite/micro:build_def.bzl", + "generate_cc_arrays", + "micro_copts", +) + +package( + default_visibility = ["//visibility:public"], + # Disabling layering_check because of http://b/177257332 + features = ["-layering_check"], + licenses = ["notice"], +) + +generate_cc_arrays( + name = "generated_leaky_relu0_model_data_cc", + src = "leaky_relu0.tflite", + out = "leaky_relu0_model_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu0_model_data_hdr", + src = "leaky_relu0.tflite", + out = "leaky_relu0_model_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu1_model_data_cc", + src = "leaky_relu1.tflite", + out = "leaky_relu1_model_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu1_model_data_hdr", + src = "leaky_relu1.tflite", + out = "leaky_relu1_model_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu2_model_data_cc", + src = "leaky_relu2.tflite", + out = "leaky_relu2_model_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu2_model_data_hdr", + src = "leaky_relu2.tflite", + out = "leaky_relu2_model_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu3_model_data_cc", + src = "leaky_relu3.tflite", + out = "leaky_relu3_model_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu3_model_data_hdr", + src = "leaky_relu3.tflite", + out = "leaky_relu3_model_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu4_model_data_cc", + src = "leaky_relu4.tflite", + out = "leaky_relu4_model_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu4_model_data_hdr", + src = "leaky_relu4.tflite", + out = "leaky_relu4_model_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu5_model_data_cc", + src = "leaky_relu5.tflite", + out = "leaky_relu5_model_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu5_model_data_hdr", + src = "leaky_relu5.tflite", + out = "leaky_relu5_model_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu6_model_data_cc", + src = "leaky_relu6.tflite", + out = "leaky_relu6_model_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu6_model_data_hdr", + src = "leaky_relu6.tflite", + out = "leaky_relu6_model_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu7_model_data_cc", + src = "leaky_relu7.tflite", + out = "leaky_relu7_model_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu7_model_data_hdr", + src = "leaky_relu7.tflite", + out = "leaky_relu7_model_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu8_model_data_cc", + src = "leaky_relu8.tflite", + out = "leaky_relu8_model_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu8_model_data_hdr", + src = "leaky_relu8.tflite", + out = "leaky_relu8_model_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu9_model_data_cc", + src = "leaky_relu9.tflite", + out = "leaky_relu9_model_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu9_model_data_hdr", + src = "leaky_relu9.tflite", + out = "leaky_relu9_model_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu10_model_data_cc", + src = "leaky_relu10.tflite", + out = "leaky_relu10_model_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu10_model_data_hdr", + src = "leaky_relu10.tflite", + out = "leaky_relu10_model_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu11_model_data_cc", + src = "leaky_relu11.tflite", + out = "leaky_relu11_model_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu11_model_data_hdr", + src = "leaky_relu11.tflite", + out = "leaky_relu11_model_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu12_model_data_cc", + src = "leaky_relu12.tflite", + out = "leaky_relu12_model_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu12_model_data_hdr", + src = "leaky_relu12.tflite", + out = "leaky_relu12_model_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu13_model_data_cc", + src = "leaky_relu13.tflite", + out = "leaky_relu13_model_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu13_model_data_hdr", + src = "leaky_relu13.tflite", + out = "leaky_relu13_model_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu14_model_data_cc", + src = "leaky_relu14.tflite", + out = "leaky_relu14_model_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu14_model_data_hdr", + src = "leaky_relu14.tflite", + out = "leaky_relu14_model_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu15_model_data_cc", + src = "leaky_relu15.tflite", + out = "leaky_relu15_model_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu15_model_data_hdr", + src = "leaky_relu15.tflite", + out = "leaky_relu15_model_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu16_model_data_cc", + src = "leaky_relu16.tflite", + out = "leaky_relu16_model_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu16_model_data_hdr", + src = "leaky_relu16.tflite", + out = "leaky_relu16_model_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu17_model_data_cc", + src = "leaky_relu17.tflite", + out = "leaky_relu17_model_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu17_model_data_hdr", + src = "leaky_relu17.tflite", + out = "leaky_relu17_model_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu18_model_data_cc", + src = "leaky_relu18.tflite", + out = "leaky_relu18_model_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu18_model_data_hdr", + src = "leaky_relu18.tflite", + out = "leaky_relu18_model_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu19_model_data_cc", + src = "leaky_relu19.tflite", + out = "leaky_relu19_model_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu19_model_data_hdr", + src = "leaky_relu19.tflite", + out = "leaky_relu19_model_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu20_model_data_cc", + src = "leaky_relu20.tflite", + out = "leaky_relu20_model_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu20_model_data_hdr", + src = "leaky_relu20.tflite", + out = "leaky_relu20_model_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu21_model_data_cc", + src = "leaky_relu21.tflite", + out = "leaky_relu21_model_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu21_model_data_hdr", + src = "leaky_relu21.tflite", + out = "leaky_relu21_model_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu22_model_data_cc", + src = "leaky_relu22.tflite", + out = "leaky_relu22_model_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu22_model_data_hdr", + src = "leaky_relu22.tflite", + out = "leaky_relu22_model_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu0_input0_int16_test_data_cc", + src = "leaky_relu0_input0_int16.csv", + out = "leaky_relu0_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu0_input0_int16_test_data_hdr", + src = "leaky_relu0_input0_int16.csv", + out = "leaky_relu0_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu0_golden_int16_test_data_cc", + src = "leaky_relu0_golden_int16.csv", + out = "leaky_relu0_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu0_golden_int16_test_data_hdr", + src = "leaky_relu0_golden_int16.csv", + out = "leaky_relu0_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu1_input0_int16_test_data_cc", + src = "leaky_relu1_input0_int16.csv", + out = "leaky_relu1_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu1_input0_int16_test_data_hdr", + src = "leaky_relu1_input0_int16.csv", + out = "leaky_relu1_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu1_golden_int16_test_data_cc", + src = "leaky_relu1_golden_int16.csv", + out = "leaky_relu1_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu1_golden_int16_test_data_hdr", + src = "leaky_relu1_golden_int16.csv", + out = "leaky_relu1_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu2_input0_int16_test_data_cc", + src = "leaky_relu2_input0_int16.csv", + out = "leaky_relu2_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu2_input0_int16_test_data_hdr", + src = "leaky_relu2_input0_int16.csv", + out = "leaky_relu2_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu2_golden_int16_test_data_cc", + src = "leaky_relu2_golden_int16.csv", + out = "leaky_relu2_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu2_golden_int16_test_data_hdr", + src = "leaky_relu2_golden_int16.csv", + out = "leaky_relu2_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu3_input0_int16_test_data_cc", + src = "leaky_relu3_input0_int16.csv", + out = "leaky_relu3_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu3_input0_int16_test_data_hdr", + src = "leaky_relu3_input0_int16.csv", + out = "leaky_relu3_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu3_golden_int16_test_data_cc", + src = "leaky_relu3_golden_int16.csv", + out = "leaky_relu3_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu3_golden_int16_test_data_hdr", + src = "leaky_relu3_golden_int16.csv", + out = "leaky_relu3_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu4_input0_int16_test_data_cc", + src = "leaky_relu4_input0_int16.csv", + out = "leaky_relu4_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu4_input0_int16_test_data_hdr", + src = "leaky_relu4_input0_int16.csv", + out = "leaky_relu4_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu4_golden_int16_test_data_cc", + src = "leaky_relu4_golden_int16.csv", + out = "leaky_relu4_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu4_golden_int16_test_data_hdr", + src = "leaky_relu4_golden_int16.csv", + out = "leaky_relu4_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu5_input0_int16_test_data_cc", + src = "leaky_relu5_input0_int16.csv", + out = "leaky_relu5_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu5_input0_int16_test_data_hdr", + src = "leaky_relu5_input0_int16.csv", + out = "leaky_relu5_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu5_golden_int16_test_data_cc", + src = "leaky_relu5_golden_int16.csv", + out = "leaky_relu5_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu5_golden_int16_test_data_hdr", + src = "leaky_relu5_golden_int16.csv", + out = "leaky_relu5_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu6_input0_int16_test_data_cc", + src = "leaky_relu6_input0_int16.csv", + out = "leaky_relu6_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu6_input0_int16_test_data_hdr", + src = "leaky_relu6_input0_int16.csv", + out = "leaky_relu6_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu6_golden_int16_test_data_cc", + src = "leaky_relu6_golden_int16.csv", + out = "leaky_relu6_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu6_golden_int16_test_data_hdr", + src = "leaky_relu6_golden_int16.csv", + out = "leaky_relu6_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu7_input0_int16_test_data_cc", + src = "leaky_relu7_input0_int16.csv", + out = "leaky_relu7_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu7_input0_int16_test_data_hdr", + src = "leaky_relu7_input0_int16.csv", + out = "leaky_relu7_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu7_golden_int16_test_data_cc", + src = "leaky_relu7_golden_int16.csv", + out = "leaky_relu7_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu7_golden_int16_test_data_hdr", + src = "leaky_relu7_golden_int16.csv", + out = "leaky_relu7_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu8_input0_int16_test_data_cc", + src = "leaky_relu8_input0_int16.csv", + out = "leaky_relu8_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu8_input0_int16_test_data_hdr", + src = "leaky_relu8_input0_int16.csv", + out = "leaky_relu8_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu8_golden_int16_test_data_cc", + src = "leaky_relu8_golden_int16.csv", + out = "leaky_relu8_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu8_golden_int16_test_data_hdr", + src = "leaky_relu8_golden_int16.csv", + out = "leaky_relu8_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu9_input0_int16_test_data_cc", + src = "leaky_relu9_input0_int16.csv", + out = "leaky_relu9_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu9_input0_int16_test_data_hdr", + src = "leaky_relu9_input0_int16.csv", + out = "leaky_relu9_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu9_golden_int16_test_data_cc", + src = "leaky_relu9_golden_int16.csv", + out = "leaky_relu9_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu9_golden_int16_test_data_hdr", + src = "leaky_relu9_golden_int16.csv", + out = "leaky_relu9_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu10_input0_int16_test_data_cc", + src = "leaky_relu10_input0_int16.csv", + out = "leaky_relu10_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu10_input0_int16_test_data_hdr", + src = "leaky_relu10_input0_int16.csv", + out = "leaky_relu10_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu10_golden_int16_test_data_cc", + src = "leaky_relu10_golden_int16.csv", + out = "leaky_relu10_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu10_golden_int16_test_data_hdr", + src = "leaky_relu10_golden_int16.csv", + out = "leaky_relu10_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu11_input0_int16_test_data_cc", + src = "leaky_relu11_input0_int16.csv", + out = "leaky_relu11_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu11_input0_int16_test_data_hdr", + src = "leaky_relu11_input0_int16.csv", + out = "leaky_relu11_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu11_golden_int16_test_data_cc", + src = "leaky_relu11_golden_int16.csv", + out = "leaky_relu11_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu11_golden_int16_test_data_hdr", + src = "leaky_relu11_golden_int16.csv", + out = "leaky_relu11_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu12_input0_int16_test_data_cc", + src = "leaky_relu12_input0_int16.csv", + out = "leaky_relu12_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu12_input0_int16_test_data_hdr", + src = "leaky_relu12_input0_int16.csv", + out = "leaky_relu12_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu12_golden_int16_test_data_cc", + src = "leaky_relu12_golden_int16.csv", + out = "leaky_relu12_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu12_golden_int16_test_data_hdr", + src = "leaky_relu12_golden_int16.csv", + out = "leaky_relu12_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu13_input0_int16_test_data_cc", + src = "leaky_relu13_input0_int16.csv", + out = "leaky_relu13_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu13_input0_int16_test_data_hdr", + src = "leaky_relu13_input0_int16.csv", + out = "leaky_relu13_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu13_golden_int16_test_data_cc", + src = "leaky_relu13_golden_int16.csv", + out = "leaky_relu13_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu13_golden_int16_test_data_hdr", + src = "leaky_relu13_golden_int16.csv", + out = "leaky_relu13_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu14_input0_int16_test_data_cc", + src = "leaky_relu14_input0_int16.csv", + out = "leaky_relu14_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu14_input0_int16_test_data_hdr", + src = "leaky_relu14_input0_int16.csv", + out = "leaky_relu14_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu14_golden_int16_test_data_cc", + src = "leaky_relu14_golden_int16.csv", + out = "leaky_relu14_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu14_golden_int16_test_data_hdr", + src = "leaky_relu14_golden_int16.csv", + out = "leaky_relu14_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu15_input0_int16_test_data_cc", + src = "leaky_relu15_input0_int16.csv", + out = "leaky_relu15_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu15_input0_int16_test_data_hdr", + src = "leaky_relu15_input0_int16.csv", + out = "leaky_relu15_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu15_golden_int16_test_data_cc", + src = "leaky_relu15_golden_int16.csv", + out = "leaky_relu15_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu15_golden_int16_test_data_hdr", + src = "leaky_relu15_golden_int16.csv", + out = "leaky_relu15_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu16_input0_int16_test_data_cc", + src = "leaky_relu16_input0_int16.csv", + out = "leaky_relu16_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu16_input0_int16_test_data_hdr", + src = "leaky_relu16_input0_int16.csv", + out = "leaky_relu16_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu16_golden_int16_test_data_cc", + src = "leaky_relu16_golden_int16.csv", + out = "leaky_relu16_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu16_golden_int16_test_data_hdr", + src = "leaky_relu16_golden_int16.csv", + out = "leaky_relu16_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu17_input0_int16_test_data_cc", + src = "leaky_relu17_input0_int16.csv", + out = "leaky_relu17_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu17_input0_int16_test_data_hdr", + src = "leaky_relu17_input0_int16.csv", + out = "leaky_relu17_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu17_golden_int16_test_data_cc", + src = "leaky_relu17_golden_int16.csv", + out = "leaky_relu17_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu17_golden_int16_test_data_hdr", + src = "leaky_relu17_golden_int16.csv", + out = "leaky_relu17_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu18_input0_int16_test_data_cc", + src = "leaky_relu18_input0_int16.csv", + out = "leaky_relu18_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu18_input0_int16_test_data_hdr", + src = "leaky_relu18_input0_int16.csv", + out = "leaky_relu18_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu18_golden_int16_test_data_cc", + src = "leaky_relu18_golden_int16.csv", + out = "leaky_relu18_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu18_golden_int16_test_data_hdr", + src = "leaky_relu18_golden_int16.csv", + out = "leaky_relu18_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu19_input0_int16_test_data_cc", + src = "leaky_relu19_input0_int16.csv", + out = "leaky_relu19_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu19_input0_int16_test_data_hdr", + src = "leaky_relu19_input0_int16.csv", + out = "leaky_relu19_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu19_golden_int16_test_data_cc", + src = "leaky_relu19_golden_int16.csv", + out = "leaky_relu19_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu19_golden_int16_test_data_hdr", + src = "leaky_relu19_golden_int16.csv", + out = "leaky_relu19_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu20_input0_int16_test_data_cc", + src = "leaky_relu20_input0_int16.csv", + out = "leaky_relu20_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu20_input0_int16_test_data_hdr", + src = "leaky_relu20_input0_int16.csv", + out = "leaky_relu20_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu20_golden_int16_test_data_cc", + src = "leaky_relu20_golden_int16.csv", + out = "leaky_relu20_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu20_golden_int16_test_data_hdr", + src = "leaky_relu20_golden_int16.csv", + out = "leaky_relu20_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu21_input0_int16_test_data_cc", + src = "leaky_relu21_input0_int16.csv", + out = "leaky_relu21_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu21_input0_int16_test_data_hdr", + src = "leaky_relu21_input0_int16.csv", + out = "leaky_relu21_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu21_golden_int16_test_data_cc", + src = "leaky_relu21_golden_int16.csv", + out = "leaky_relu21_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu21_golden_int16_test_data_hdr", + src = "leaky_relu21_golden_int16.csv", + out = "leaky_relu21_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu22_input0_int16_test_data_cc", + src = "leaky_relu22_input0_int16.csv", + out = "leaky_relu22_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu22_input0_int16_test_data_hdr", + src = "leaky_relu22_input0_int16.csv", + out = "leaky_relu22_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_leaky_relu22_golden_int16_test_data_cc", + src = "leaky_relu22_golden_int16.csv", + out = "leaky_relu22_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_leaky_relu22_golden_int16_test_data_hdr", + src = "leaky_relu22_golden_int16.csv", + out = "leaky_relu22_golden_int16_test_data.h", +) + +cc_library( + name = "models_and_testdata", + srcs = [ + "generated_leaky_relu0_golden_int16_test_data_cc", + "generated_leaky_relu0_input0_int16_test_data_cc", + "generated_leaky_relu0_model_data_cc", + "generated_leaky_relu10_golden_int16_test_data_cc", + "generated_leaky_relu10_input0_int16_test_data_cc", + "generated_leaky_relu10_model_data_cc", + "generated_leaky_relu11_golden_int16_test_data_cc", + "generated_leaky_relu11_input0_int16_test_data_cc", + "generated_leaky_relu11_model_data_cc", + "generated_leaky_relu12_golden_int16_test_data_cc", + "generated_leaky_relu12_input0_int16_test_data_cc", + "generated_leaky_relu12_model_data_cc", + "generated_leaky_relu13_golden_int16_test_data_cc", + "generated_leaky_relu13_input0_int16_test_data_cc", + "generated_leaky_relu13_model_data_cc", + "generated_leaky_relu14_golden_int16_test_data_cc", + "generated_leaky_relu14_input0_int16_test_data_cc", + "generated_leaky_relu14_model_data_cc", + "generated_leaky_relu15_golden_int16_test_data_cc", + "generated_leaky_relu15_input0_int16_test_data_cc", + "generated_leaky_relu15_model_data_cc", + "generated_leaky_relu16_golden_int16_test_data_cc", + "generated_leaky_relu16_input0_int16_test_data_cc", + "generated_leaky_relu16_model_data_cc", + "generated_leaky_relu17_golden_int16_test_data_cc", + "generated_leaky_relu17_input0_int16_test_data_cc", + "generated_leaky_relu17_model_data_cc", + "generated_leaky_relu18_golden_int16_test_data_cc", + "generated_leaky_relu18_input0_int16_test_data_cc", + "generated_leaky_relu18_model_data_cc", + "generated_leaky_relu19_golden_int16_test_data_cc", + "generated_leaky_relu19_input0_int16_test_data_cc", + "generated_leaky_relu19_model_data_cc", + "generated_leaky_relu1_golden_int16_test_data_cc", + "generated_leaky_relu1_input0_int16_test_data_cc", + "generated_leaky_relu1_model_data_cc", + "generated_leaky_relu20_golden_int16_test_data_cc", + "generated_leaky_relu20_input0_int16_test_data_cc", + "generated_leaky_relu20_model_data_cc", + "generated_leaky_relu21_golden_int16_test_data_cc", + "generated_leaky_relu21_input0_int16_test_data_cc", + "generated_leaky_relu21_model_data_cc", + "generated_leaky_relu22_golden_int16_test_data_cc", + "generated_leaky_relu22_input0_int16_test_data_cc", + "generated_leaky_relu22_model_data_cc", + "generated_leaky_relu2_golden_int16_test_data_cc", + "generated_leaky_relu2_input0_int16_test_data_cc", + "generated_leaky_relu2_model_data_cc", + "generated_leaky_relu3_golden_int16_test_data_cc", + "generated_leaky_relu3_input0_int16_test_data_cc", + "generated_leaky_relu3_model_data_cc", + "generated_leaky_relu4_golden_int16_test_data_cc", + "generated_leaky_relu4_input0_int16_test_data_cc", + "generated_leaky_relu4_model_data_cc", + "generated_leaky_relu5_golden_int16_test_data_cc", + "generated_leaky_relu5_input0_int16_test_data_cc", + "generated_leaky_relu5_model_data_cc", + "generated_leaky_relu6_golden_int16_test_data_cc", + "generated_leaky_relu6_input0_int16_test_data_cc", + "generated_leaky_relu6_model_data_cc", + "generated_leaky_relu7_golden_int16_test_data_cc", + "generated_leaky_relu7_input0_int16_test_data_cc", + "generated_leaky_relu7_model_data_cc", + "generated_leaky_relu8_golden_int16_test_data_cc", + "generated_leaky_relu8_input0_int16_test_data_cc", + "generated_leaky_relu8_model_data_cc", + "generated_leaky_relu9_golden_int16_test_data_cc", + "generated_leaky_relu9_input0_int16_test_data_cc", + "generated_leaky_relu9_model_data_cc", + ], + hdrs = [ + "generated_leaky_relu0_golden_int16_test_data_hdr", + "generated_leaky_relu0_input0_int16_test_data_hdr", + "generated_leaky_relu0_model_data_hdr", + "generated_leaky_relu10_golden_int16_test_data_hdr", + "generated_leaky_relu10_input0_int16_test_data_hdr", + "generated_leaky_relu10_model_data_hdr", + "generated_leaky_relu11_golden_int16_test_data_hdr", + "generated_leaky_relu11_input0_int16_test_data_hdr", + "generated_leaky_relu11_model_data_hdr", + "generated_leaky_relu12_golden_int16_test_data_hdr", + "generated_leaky_relu12_input0_int16_test_data_hdr", + "generated_leaky_relu12_model_data_hdr", + "generated_leaky_relu13_golden_int16_test_data_hdr", + "generated_leaky_relu13_input0_int16_test_data_hdr", + "generated_leaky_relu13_model_data_hdr", + "generated_leaky_relu14_golden_int16_test_data_hdr", + "generated_leaky_relu14_input0_int16_test_data_hdr", + "generated_leaky_relu14_model_data_hdr", + "generated_leaky_relu15_golden_int16_test_data_hdr", + "generated_leaky_relu15_input0_int16_test_data_hdr", + "generated_leaky_relu15_model_data_hdr", + "generated_leaky_relu16_golden_int16_test_data_hdr", + "generated_leaky_relu16_input0_int16_test_data_hdr", + "generated_leaky_relu16_model_data_hdr", + "generated_leaky_relu17_golden_int16_test_data_hdr", + "generated_leaky_relu17_input0_int16_test_data_hdr", + "generated_leaky_relu17_model_data_hdr", + "generated_leaky_relu18_golden_int16_test_data_hdr", + "generated_leaky_relu18_input0_int16_test_data_hdr", + "generated_leaky_relu18_model_data_hdr", + "generated_leaky_relu19_golden_int16_test_data_hdr", + "generated_leaky_relu19_input0_int16_test_data_hdr", + "generated_leaky_relu19_model_data_hdr", + "generated_leaky_relu1_golden_int16_test_data_hdr", + "generated_leaky_relu1_input0_int16_test_data_hdr", + "generated_leaky_relu1_model_data_hdr", + "generated_leaky_relu20_golden_int16_test_data_hdr", + "generated_leaky_relu20_input0_int16_test_data_hdr", + "generated_leaky_relu20_model_data_hdr", + "generated_leaky_relu21_golden_int16_test_data_hdr", + "generated_leaky_relu21_input0_int16_test_data_hdr", + "generated_leaky_relu21_model_data_hdr", + "generated_leaky_relu22_golden_int16_test_data_hdr", + "generated_leaky_relu22_input0_int16_test_data_hdr", + "generated_leaky_relu22_model_data_hdr", + "generated_leaky_relu2_golden_int16_test_data_hdr", + "generated_leaky_relu2_input0_int16_test_data_hdr", + "generated_leaky_relu2_model_data_hdr", + "generated_leaky_relu3_golden_int16_test_data_hdr", + "generated_leaky_relu3_input0_int16_test_data_hdr", + "generated_leaky_relu3_model_data_hdr", + "generated_leaky_relu4_golden_int16_test_data_hdr", + "generated_leaky_relu4_input0_int16_test_data_hdr", + "generated_leaky_relu4_model_data_hdr", + "generated_leaky_relu5_golden_int16_test_data_hdr", + "generated_leaky_relu5_input0_int16_test_data_hdr", + "generated_leaky_relu5_model_data_hdr", + "generated_leaky_relu6_golden_int16_test_data_hdr", + "generated_leaky_relu6_input0_int16_test_data_hdr", + "generated_leaky_relu6_model_data_hdr", + "generated_leaky_relu7_golden_int16_test_data_hdr", + "generated_leaky_relu7_input0_int16_test_data_hdr", + "generated_leaky_relu7_model_data_hdr", + "generated_leaky_relu8_golden_int16_test_data_hdr", + "generated_leaky_relu8_input0_int16_test_data_hdr", + "generated_leaky_relu8_model_data_hdr", + "generated_leaky_relu9_golden_int16_test_data_hdr", + "generated_leaky_relu9_input0_int16_test_data_hdr", + "generated_leaky_relu9_model_data_hdr", + ], + copts = micro_copts(), +) + +cc_test( + name = "integration_test", + srcs = [ + "integration_tests.cc", + ], + copts = micro_copts(), + deps = [ + ":models_and_testdata", + "//python/tflite_micro:python_ops_resolver", + "//tensorflow/lite/micro:micro_framework", + "//tensorflow/lite/micro:micro_log", + "//tensorflow/lite/micro:micro_resource_variable", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:recording_allocators", + "//tensorflow/lite/micro/testing:micro_test", + ], +) diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/Makefile.inc b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/Makefile.inc new file mode 100644 index 0000000..6a1459b --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/Makefile.inc @@ -0,0 +1,80 @@ +integration_tests_seanet_leaky_relu_GENERATOR_INPUTS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu0.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu1.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu2.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu3.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu4.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu5.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu6.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu7.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu8.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu9.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu10.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu11.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu12.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu13.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu14.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu15.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu16.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu17.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu18.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu19.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu20.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu21.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu22.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu0_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu0_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu1_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu1_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu2_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu2_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu3_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu3_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu4_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu4_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu5_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu5_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu6_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu6_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu7_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu7_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu8_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu8_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu9_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu9_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu10_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu10_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu11_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu11_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu12_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu12_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu13_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu13_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu14_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu14_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu15_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu15_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu16_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu16_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu17_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu17_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu18_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu18_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu19_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu19_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu20_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu20_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu21_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu21_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu22_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu22_golden_int16.csv \ + +integration_tests_seanet_leaky_relu_SRCS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/leaky_relu/integration_tests.cc \ +$(TENSORFLOW_ROOT)python/tflite_micro/python_ops_resolver.cc \ + +integration_tests_seanet_leaky_relu_HDR := \ +$(TENSORFLOW_ROOT)python/tflite_micro/python_ops_resolver.h \ + +$(eval $(call microlite_test,integration_tests_seanet_leaky_relu_test,\ +$(integration_tests_seanet_leaky_relu_SRCS),$(integration_tests_seanet_leaky_relu_HDR),$(integration_tests_seanet_leaky_relu_GENERATOR_INPUTS))) diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/integration_tests.cc b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/integration_tests.cc new file mode 100644 index 0000000..d95dca1 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/integration_tests.cc @@ -0,0 +1,323 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include "python/tflite_micro/python_ops_resolver.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu0_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu0_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu0_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu10_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu10_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu10_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu11_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu11_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu11_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu12_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu12_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu12_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu13_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu13_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu13_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu14_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu14_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu14_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu15_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu15_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu15_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu16_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu16_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu16_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu17_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu17_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu17_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu18_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu18_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu18_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu19_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu19_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu19_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu1_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu1_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu1_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu20_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu20_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu20_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu21_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu21_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu21_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu22_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu22_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu22_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu2_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu2_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu2_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu3_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu3_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu3_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu4_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu4_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu4_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu5_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu5_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu5_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu6_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu6_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu6_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu7_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu7_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu7_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu8_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu8_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu8_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu9_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu9_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu9_model_data.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_profiler.h" +#include "tensorflow/lite/micro/recording_micro_allocator.h" +#include "tensorflow/lite/micro/recording_micro_interpreter.h" +#include "tensorflow/lite/micro/system_setup.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +constexpr size_t kTensorArenaSize = 1024 * 100; +uint8_t tensor_arena[kTensorArenaSize]; + +namespace tflite { +namespace micro { +namespace { + +void RunModel(const uint8_t* model, const int16_t* input0, + const uint32_t input0_size, const int16_t* golden, + const uint32_t golden_size, const char* name) { + InitializeTarget(); + MicroProfiler profiler; + PythonOpsResolver op_resolver; + + MicroInterpreter interpreter(GetModel(model), op_resolver, tensor_arena, + kTensorArenaSize, nullptr, &profiler); + interpreter.AllocateTensors(); + TfLiteTensor* input_tensor0 = interpreter.input(0); + TF_LITE_MICRO_EXPECT_EQ(input_tensor0->bytes, input0_size * sizeof(int16_t)); + memcpy(interpreter.input(0)->data.raw, input0, input_tensor0->bytes); + if (kTfLiteOk != interpreter.Invoke()) { + TF_LITE_MICRO_EXPECT(false); + return; + } + profiler.Log(); + MicroPrintf(""); + + TfLiteTensor* output_tensor = interpreter.output(0); + TF_LITE_MICRO_EXPECT_EQ(output_tensor->bytes, golden_size * sizeof(int16_t)); + int16_t* output = ::tflite::GetTensorData(output_tensor); + for (uint32_t i = 0; i < golden_size; i++) { + // TODO(b/205046520): Better understand why TfLite and TFLM can sometimes be + // off by 1. + TF_LITE_MICRO_EXPECT_NEAR(golden[i], output[i], 1); + } +} + +} // namespace +} // namespace micro +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(leaky_relu0_test) { + tflite::micro::RunModel( + g_leaky_relu0_model_data, g_leaky_relu0_input0_int16_test_data, + g_leaky_relu0_input0_int16_test_data_size, + g_leaky_relu0_golden_int16_test_data, + g_leaky_relu0_golden_int16_test_data_size, "leaky_relu0 test"); +} + +TF_LITE_MICRO_TEST(leaky_relu1_test) { + tflite::micro::RunModel( + g_leaky_relu1_model_data, g_leaky_relu1_input0_int16_test_data, + g_leaky_relu1_input0_int16_test_data_size, + g_leaky_relu1_golden_int16_test_data, + g_leaky_relu1_golden_int16_test_data_size, "leaky_relu1 test"); +} + +TF_LITE_MICRO_TEST(leaky_relu2_test) { + tflite::micro::RunModel( + g_leaky_relu2_model_data, g_leaky_relu2_input0_int16_test_data, + g_leaky_relu2_input0_int16_test_data_size, + g_leaky_relu2_golden_int16_test_data, + g_leaky_relu2_golden_int16_test_data_size, "leaky_relu2 test"); +} + +TF_LITE_MICRO_TEST(leaky_relu3_test) { + tflite::micro::RunModel( + g_leaky_relu3_model_data, g_leaky_relu3_input0_int16_test_data, + g_leaky_relu3_input0_int16_test_data_size, + g_leaky_relu3_golden_int16_test_data, + g_leaky_relu3_golden_int16_test_data_size, "leaky_relu3 test"); +} + +TF_LITE_MICRO_TEST(leaky_relu4_test) { + tflite::micro::RunModel( + g_leaky_relu4_model_data, g_leaky_relu4_input0_int16_test_data, + g_leaky_relu4_input0_int16_test_data_size, + g_leaky_relu4_golden_int16_test_data, + g_leaky_relu4_golden_int16_test_data_size, "leaky_relu4 test"); +} + +TF_LITE_MICRO_TEST(leaky_relu5_test) { + tflite::micro::RunModel( + g_leaky_relu5_model_data, g_leaky_relu5_input0_int16_test_data, + g_leaky_relu5_input0_int16_test_data_size, + g_leaky_relu5_golden_int16_test_data, + g_leaky_relu5_golden_int16_test_data_size, "leaky_relu5 test"); +} + +TF_LITE_MICRO_TEST(leaky_relu6_test) { + tflite::micro::RunModel( + g_leaky_relu6_model_data, g_leaky_relu6_input0_int16_test_data, + g_leaky_relu6_input0_int16_test_data_size, + g_leaky_relu6_golden_int16_test_data, + g_leaky_relu6_golden_int16_test_data_size, "leaky_relu6 test"); +} + +TF_LITE_MICRO_TEST(leaky_relu7_test) { + tflite::micro::RunModel( + g_leaky_relu7_model_data, g_leaky_relu7_input0_int16_test_data, + g_leaky_relu7_input0_int16_test_data_size, + g_leaky_relu7_golden_int16_test_data, + g_leaky_relu7_golden_int16_test_data_size, "leaky_relu7 test"); +} + +TF_LITE_MICRO_TEST(leaky_relu8_test) { + tflite::micro::RunModel( + g_leaky_relu8_model_data, g_leaky_relu8_input0_int16_test_data, + g_leaky_relu8_input0_int16_test_data_size, + g_leaky_relu8_golden_int16_test_data, + g_leaky_relu8_golden_int16_test_data_size, "leaky_relu8 test"); +} + +TF_LITE_MICRO_TEST(leaky_relu9_test) { + tflite::micro::RunModel( + g_leaky_relu9_model_data, g_leaky_relu9_input0_int16_test_data, + g_leaky_relu9_input0_int16_test_data_size, + g_leaky_relu9_golden_int16_test_data, + g_leaky_relu9_golden_int16_test_data_size, "leaky_relu9 test"); +} + +TF_LITE_MICRO_TEST(leaky_relu10_test) { + tflite::micro::RunModel( + g_leaky_relu10_model_data, g_leaky_relu10_input0_int16_test_data, + g_leaky_relu10_input0_int16_test_data_size, + g_leaky_relu10_golden_int16_test_data, + g_leaky_relu10_golden_int16_test_data_size, "leaky_relu10 test"); +} + +TF_LITE_MICRO_TEST(leaky_relu11_test) { + tflite::micro::RunModel( + g_leaky_relu11_model_data, g_leaky_relu11_input0_int16_test_data, + g_leaky_relu11_input0_int16_test_data_size, + g_leaky_relu11_golden_int16_test_data, + g_leaky_relu11_golden_int16_test_data_size, "leaky_relu11 test"); +} + +TF_LITE_MICRO_TEST(leaky_relu12_test) { + tflite::micro::RunModel( + g_leaky_relu12_model_data, g_leaky_relu12_input0_int16_test_data, + g_leaky_relu12_input0_int16_test_data_size, + g_leaky_relu12_golden_int16_test_data, + g_leaky_relu12_golden_int16_test_data_size, "leaky_relu12 test"); +} + +TF_LITE_MICRO_TEST(leaky_relu13_test) { + tflite::micro::RunModel( + g_leaky_relu13_model_data, g_leaky_relu13_input0_int16_test_data, + g_leaky_relu13_input0_int16_test_data_size, + g_leaky_relu13_golden_int16_test_data, + g_leaky_relu13_golden_int16_test_data_size, "leaky_relu13 test"); +} + +TF_LITE_MICRO_TEST(leaky_relu14_test) { + tflite::micro::RunModel( + g_leaky_relu14_model_data, g_leaky_relu14_input0_int16_test_data, + g_leaky_relu14_input0_int16_test_data_size, + g_leaky_relu14_golden_int16_test_data, + g_leaky_relu14_golden_int16_test_data_size, "leaky_relu14 test"); +} + +TF_LITE_MICRO_TEST(leaky_relu15_test) { + tflite::micro::RunModel( + g_leaky_relu15_model_data, g_leaky_relu15_input0_int16_test_data, + g_leaky_relu15_input0_int16_test_data_size, + g_leaky_relu15_golden_int16_test_data, + g_leaky_relu15_golden_int16_test_data_size, "leaky_relu15 test"); +} + +TF_LITE_MICRO_TEST(leaky_relu16_test) { + tflite::micro::RunModel( + g_leaky_relu16_model_data, g_leaky_relu16_input0_int16_test_data, + g_leaky_relu16_input0_int16_test_data_size, + g_leaky_relu16_golden_int16_test_data, + g_leaky_relu16_golden_int16_test_data_size, "leaky_relu16 test"); +} + +TF_LITE_MICRO_TEST(leaky_relu17_test) { + tflite::micro::RunModel( + g_leaky_relu17_model_data, g_leaky_relu17_input0_int16_test_data, + g_leaky_relu17_input0_int16_test_data_size, + g_leaky_relu17_golden_int16_test_data, + g_leaky_relu17_golden_int16_test_data_size, "leaky_relu17 test"); +} + +TF_LITE_MICRO_TEST(leaky_relu18_test) { + tflite::micro::RunModel( + g_leaky_relu18_model_data, g_leaky_relu18_input0_int16_test_data, + g_leaky_relu18_input0_int16_test_data_size, + g_leaky_relu18_golden_int16_test_data, + g_leaky_relu18_golden_int16_test_data_size, "leaky_relu18 test"); +} + +TF_LITE_MICRO_TEST(leaky_relu19_test) { + tflite::micro::RunModel( + g_leaky_relu19_model_data, g_leaky_relu19_input0_int16_test_data, + g_leaky_relu19_input0_int16_test_data_size, + g_leaky_relu19_golden_int16_test_data, + g_leaky_relu19_golden_int16_test_data_size, "leaky_relu19 test"); +} + +TF_LITE_MICRO_TEST(leaky_relu20_test) { + tflite::micro::RunModel( + g_leaky_relu20_model_data, g_leaky_relu20_input0_int16_test_data, + g_leaky_relu20_input0_int16_test_data_size, + g_leaky_relu20_golden_int16_test_data, + g_leaky_relu20_golden_int16_test_data_size, "leaky_relu20 test"); +} + +TF_LITE_MICRO_TEST(leaky_relu21_test) { + tflite::micro::RunModel( + g_leaky_relu21_model_data, g_leaky_relu21_input0_int16_test_data, + g_leaky_relu21_input0_int16_test_data_size, + g_leaky_relu21_golden_int16_test_data, + g_leaky_relu21_golden_int16_test_data_size, "leaky_relu21 test"); +} + +TF_LITE_MICRO_TEST(leaky_relu22_test) { + tflite::micro::RunModel( + g_leaky_relu22_model_data, g_leaky_relu22_input0_int16_test_data, + g_leaky_relu22_input0_int16_test_data_size, + g_leaky_relu22_golden_int16_test_data, + g_leaky_relu22_golden_int16_test_data_size, "leaky_relu22 test"); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu0.tflite b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu0.tflite new file mode 100644 index 0000000..1962676 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu0.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu0_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu0_golden_int16.csv new file mode 100644 index 0000000..828fffa --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu0_golden_int16.csv @@ -0,0 +1 @@ +-4702,-6775,31876,23565,2780,28947,-4124,1043,13458,14533,32594,1510,-5021,18179,3744,-6010,13919,11555,27852,2414,-354,12085,-8452,28246,26310,28210,20627,-8353,24756,-7032,19359,-1273,-6043,-5510,14373,-1477,-9761,-2896,19452,28704,2398,-5843,-2130,-3692,632,24592,1877,16578,-1308,-4928,-5709,18564,24846,5770,21491,4879,30968,22936,6404,21827,17791,-6067,-64,5341,-3099,3868,-7530,31826,3981,-4305,20694,14012,-8925,19135,14833,-5670,29529,25130,-6278,-6311,28698,-7456,-7012,11821,-3233,-6412,-5257,12238,28055,11557,10153,12409,-6550,-2935,-8739,-1934,-9291,-6071,3580,12903,1603,32173,3143,-6315,13051,30476,-3786,18178,30359,26185,-8919,-8302,-8716,-2504,22664,-4952,-8441,2290,-283,-829,-9580,-9159,6168,-3167,-1025,-2293,13951,21874,11671,-8743,-1626,11322,20247,3032,-3048,2287,-5733,-7969,-5591,-4882,24627,-5579,-9403,-704,-2570,13185,-5508,-2978,-3148,29021,27839,-4632,3522,8192,24736,18443,17248,16236,-6204,14186,-5493,-5046,8649,12752,7720,-4048,-3870,3237,32164,-3375,-2024,29769,-9487,8531,-3157,1457,-6965,-7167,-6769,25772,17509,-603,-6358,18241,733,-3813,25429,31675,-737,-4374,21726,-9795,-2157,-1600,30150,20096,8528,114,-4734,25891,-7025,-8679,-8137,18116,-7993,-7957,9491,8572,21327,-916,-1577,294,-566,23784,-1170,16106,-2170,-2107,-683,-3530,-1717,5357,-7101,15797,-5203,-7652,-4134,21314,11077,-7336,-2451,27216,-5068,14160,-7346,19178,14250,20173,18103,8004,15794,2848,-6433,-5511,6490,-7843,553,15696,-1736,-8020,-541,-5098,19609,-138,18712,6455,-3110,-2433,-1009,27023,-436,-273,-7209,-1491,27180,30899,31058,-3187,28302,-74,15669,24265,13896,7069,-4762,-5299,30400,-7846,-5215,17676,3233,8784,2098,-8157,17406,-1505,4644,-4060,4,-8286,-1479,-4147,19370,4611,31247,24577,30105,-1867,-9411,11055,-1376,-5614,-1324,6736,32101,29189,-5069,-5787,-8953,25252,-1648,-6100,20255,2077,-2213,-4903,6055,8135,30037,19178,18237,-1196,-48,-8435,19744,-6469,-8880,-7354,14142,18703,-8221,13675,14053,17396,-6124,-4981,-3828,26838,15281,-400,-7315,-1029,-2300,2777,-3370,8486,5802,27278,11985,-5920,13243,20062,-9693,27573,-4962,1833,-632,32110,17329,25790,12316,12154,20909,-4051,-7543,17948,-351,-4154,-3239,12627,-4378,-861,11893,21725,4286,27003,20579,-6447,-4023,26799,-2343,6915,14303,-2584,-2036,828,10916,-2753,-2173,-3130,18019,-2397,8054,13211,-8828,31682,-9674,1442,14016,29584,19803,-4404,15268,31161,-7910,21311,32174,4872,3736,30340,-2153,-5690,6022,-4328,-9066,-5107,4744,21345,400,-2300,-9361,3826,27696,30150,-361,15711,-3790,-9427,-1480,5720,22657,-6514,-1485,-4146,24095,25886,29442,4274,-8095,31230,18376,11793,76,-6381,10293,18461,13228,-4263,-2746,-7793,20447,-9577,20214,-6813,2878,13104,19020,-2097,-5761,12726,-2591,24041,9648,21411,22818,4044,12749,-638,-8161,8258,-2166,-3220,12525,31894,8442,1481,-6184,11108,4428,-9814,-6319,-1094,-5050,18291,961,-2385,-4654,-2706,25866,-7797,29750,13569,22376,3871,-7311,28136,-409,11901,-514,14892,2044,-6832,460,-593,18515,19379,-2090,-3932,19169,1192,26672,970,13588,-7090,1616,13754,19788,11382,4229,-5683,25558,-3182,7182,24913,27383,-6243,31846,-4006,9033,-1742,-7407,14747,31287,-5482,1806,12555,32067,-9668,21744,10361,12289,-9485,23507,-7399,-5295,9125,2126,-3872,14541,-2026,-7135,5763,-2785,932,-4749,-9112,10427,-7562,9745,-5746,-7077,-5176,-6159,-5096,30819,-2725,-9744,31818,481,10285,12203,-4189,-6505,5663,-761,-9690,-2353,2064,-1000,-5427,20735,-4344,18167,20037,-3418,-3244,18133,22137,-6184,-1618,-5584,2412,22051,-3334,-4842,23025,-6123,-7614,6037,-8546,2475,24292,-4769,23347,-6380,-9778,-8817,-1570,1414,16038,-6864,-9409,-6656,-3287,-2413,20610,29872,-1445,18379,22882,-9830,-8618,-7963,-6181,-9275,-3592,-1750,-5494,29380,29704,32147,-6333,-276,-8225,-2683,-9229,17413,13007,-4692,-5360,-1760,-9379,-2672,2388,-8740,-731,-2595,32291,-4587,19515,9282,22979,-7063,11700,-1257,-5382,-7402,30547,18468,-214,12478,-3973,-7801,-7482,16021,-5119,-2209,29421,23929,15305,-5227,27685,-2356,5770,1462,-403,10710,18014,10992,-6411,3189,-1781,-2183,27410,16050,-6170,17514,-4986,19753,18542,-9707,26287,-2251,26884,17417,32697,-762,17223,2661,5321,-5788,12721,11641,-6387,-5004,-2914,14531,3404,1879,26160,5369,5087,-7068,-6673,3575,28930,26788,-2473,-1690,22705,7234,-4978,8990,2153,-3980,-7580,20505,31144,28183,18954,-5508,-4742,-735,-8452,-1000,-4062,-1549,32744,-8927,-1199,-2868,-1103,12825,-4733,27844,-348,18110,5609,29873,-6561,6194,20780,-4733,-3346,3819,-9719,-7999,6569,24416,-3531,7619,-2015,10290,28282,-4643,11307,10336,-8266,-7127,-1810,15646,11878,-6251,-5704,-6759,-4718,-9398,14049,-7258,-8426,-9043,-5919,-3407,-5925,19088,-6763,20318,-3259,-8875,-9639,19608,-4600,32763,-9656,11730,-3923,25312,2971,14765,-9571,-2171,3691,-6031,-5566,-537,7276,-8823,-5475,6198,-2332,28991,19184,6878,-2641,10698,15999,-5614,10543,-6896,10993,3305,-2472,13996,-749,-1981,-5977,15960,12874,28728,19015,-564,-7951,9705,-5,31263,25255,31217,-398,19203,-878,-5722,29180,-3377,-66,10144,25629,26320,-5440,30577,-876,-5927,-3590,28564,30256,-1619,30181,31590,6299,-8149,-3788,7774,9811,4848,-2290,-8194,32693,-3247,11352,8862,1747,-5782,-4938,18284,-4810,17037,-9131,-9631,20752,-849,-2341,-2274,30971,-5929,-1175,13637,2955,30116,-9788,13430,20587,-4130,-8710,27458,-1424,-8863,-8752,13231,2462,-9598,-3235,17853,29213,18453,30323,-543,-162,-962,-3794,-1573,-1426,30918,7933,-5827,30910,-1164,-9751,11335,-4532,27933,-3598,6135,-9177,11093,-2574,23553,-2384,11919,-4333,31828,-173,-8076,18933,18583,29578,29831,-5615,-4229,-6030,-8551,-8371,18537,20580,2594,7991,11522,-1295,-5356,-6688,6091,11687,14910,2748,-4800,762,10207,-1248,9970,-1109,30305,25097,30480,18728,28677,2867,-8617,3714,7467,-3455,25381,-4832,-265,-2457,-5091,20419,-3841,7291,20592,-3277,-4859,5791,-5815,-3279,-3412,14239,-2201,-6681,24590,30211,24948,3322,-5647,11099,22091,24911,-1421,-1856,1575,-7726,-8584,27992,-8962,3520,-7048,5157,12027,2061,2994,-1915,-7475,7881,-6325,11673,-670,-4373,8820,-7855,-4257,31763,4294,-7456,10098,20534,26520,-2897,-7975,603,-803,21239,-5735,6744,-7459,13389,-7581,18647,26960,10891,20169,23722,-197,23335,-2560,15323,16718,27487,-3346,-4703,10395,-4556,-7766,-8865,13968,-74,30547,27427,-3523,-1105,-3121,-6403,24027,21359,-7153,18487,30070,6141,-8734,-5039,-9791,-9767,22282,22217,31934,-9010,-1092,13576,32685,-5,20977,24733,-7380,31454,30061,-8863,-4749,-2796,-3822,-9778,-9196,10600,-8049,-5494,-3604,3786,-2930,18674,28503,2181,15298,-8751,-2980,-4418,-5899,25262,17657,-2821,17753,19347,3164,-7528,-3011,8445,-4689,11456,-1443,5795,-8605,2261,10975,-5615,24030,-2048,6887,-8628,-3188,9344,14561,-8342,19693,9504,10864,3336,-7337,-319,-7352,22585,17272,13445,17709,-9270,-9604,5374,-7454,1231,-5026,618,9923,-8194,-289,14529,7202,26354,7688,23686,26274,2146,32394,-6402,-763,-266,-9285,28836,23109,32122,-3412,18452,-2158,-1223,-6476,-7664,5960,5491,-5695,-9164,-7795,17462,-6490,12375,-7424,16196,-6112,-4489,-8096,17894,-3973,-1336,18727,-9160,-6221,-6278,6976,13016,-250,25937,-5608,-2477,22702,21083,6769,14188,-8423,-3418,2672,-4490,1587,-3026,-5543,-955,-3176,-3099,-8583,-8161,-5635,23215,12187,-114,-5263,1369,12449,-1441,-690,-5084,1775,26333,-445,-5116,-2025,-9346,25591,-7964,3275,-2436,-1385,-7505,6931,27462,-3464,2489,21053,-4380,1788,7561,4764,27199,-9069,12121,19090,-1663,28962,-4190,16682,-5366,28167,-8597,-3539,13803,12582,21059,3207,2568,6462,21818,28104,-1360,-7477,22212,9900,-7483,21640,-6091,7171,-2797,16725,23123,-2703,15827,29149,3445,10947,13904,-5877,-2953,29295,1434,-4101,32573,4666,31994,27562,-5102,15236,15797,-27,-5651,24005,9282,-4228,-4698,27054,-3569,-9473,-8581,-286,3344,12190,-7672,20158,11335,32210,6742,25017,-570,23923,2061,22808,-8856,21267,24455,6802,-4275,-7683,1790,-8229,13090,-122,-2108,-6546,-1301,-1709,13483,-6965,-5445,-3653,-5879,14588,-3650,-4277,-2558,5131,19392,19356,6210,16395,12050,-1986,15703,22635,24595,71,17941,-5341,-1317,-4457,-6478,-3303,-9682,3216,6277,32684,29313,5502,11024,-5935,-3423,5957,3870,-6300,-7509,-719,5224,-9246,-5958,-1100,5155,25438,19823,18035,1960,-5320,-7425,32396,-3687,-5237,1080,16675,-1796,-7330,-6462,-8035,26288,-1666,-2953,-8541,-9571,16281,-862,21829,29274,23782,28545,-2642,-1082,-5469,-1174,28012,11599,1566,7673,-722,-9417,8824,-3138,-8814,-2421,13324,-3153,30938,-3271,-439,-7037,13250,-9763,-3853,32492,-6072,10172,-4532,-4134,15109,-3613,-7795,-3879,32674,-8945,-6217,29963,9950,-7258,21796,-8380,20422,-7673,9457,-8664,-9497,-8912,-6812,-2877,-754,15369,27215,-3203,-2910,15465,-8871,17101,-4528,27427,2639,24247,-7349,26824,30053,-3327,-5605,-6631,18613,27112,-7973,8919,30358,-1856,3717,17659,-767,23357,15213,-5684,-6670,-9631,-1789,12678,1447,-9544,24527,10498,22024,-1994,32352,-2634,16814,-226,-711,-7282,9944,-3752,13273,823,-566,-6998,-4047,3179,-9447,32403,5044,-786,-5907,-3962,-4272,20060,-6738,-1456,10212,-4435,3491,14697,6612,642,-5223,16338,22844,-2003,20872,-1412,19747,-4983,11446,288,-6311,-5521,25948,-3790,-4918,3701,-7070,14913,-8017,-2808,19968,-7347,-6895,-9073,-7818,13982,-3317,-7832,-1619,-2579,16946,-8590,32754,-9009,19804,26266,-1995,3584,8612,-2587,-7240,30266,8821,18484,-8248,-100,-4221,1132,1282,-8680,32695,-3922,-6550,23590,-2687,-2301,-5303,-4683,31867,-4990,-4544,-1356,-9066,7723,29065,-34,2710,-1100,13021,30798,-3444,-2736,8333,23554,-7030,-493,-2839,-6604,8174,29518,8599,3955,7768,29257,-5743,21409,10222,3578,18595,25878,-5015,-4255,24919,-7361,8777,-1973,-6191,-4663,-5026,6688,1611,19752,27464,-6907,-9795,-3416,-981,1881,31735,20489,17509,-1141,17858,2250,12892,-1908,-9664,-3550,8185,-102,-2290,28609,5847,21882,14273,-9017,27253,-3473,22010,-6976,-5781,19228,8644,11072,-375,11592,20707,23201,-2372,-1658,12531,-2641,-8783,-7014,-9087,25988,-7134,4276,-7751,19798,28217,4943,-9309,-8596,29404,3034,-560,-5389,29108,3562,-2837,3404,19701,31310,21973,-2576,438,24034,-1320,-6904,9751,22250,-3631,24619,29010,6149,-4688,10477,19772,-8191,-5613,18757,-9281,9177,-5010,7720,16930,28864,25942,7284,-2159,-4030,-6407,22078,16851,-7683,-4112,-2354,1404,4801,-1196,-8191,4540,-7584,-293,-8505,19313,10516,-6628,20335,-3007,22605,-5984,2147,-3146,22442,28419,1511,23497,-1216,-5438,8277,-1000,12708,-3346,-8233,2539,12616,25513,-556,30958,-3709,30262,21370,18716,-4601,22245,-4415,10894,-8026,-8134,31907,12137,-1018,-9586,-3304,13236,-6374,-260,12479,-6073,-5718,18081,18140,10198,-6303,-3093,-477,-2084,16674,5382,19426,-8705,21669,-7911,-6139,13264,-2720,-160,19356,20521,21044,-5083,456,-2605,9083,24071,-2805,-7357,7066,-8239,7541,-3164,-6218,-827,12857,-1420,-1647,-2827,21262,32045,-8807,28359,9137,-7004,-3176,3982,3251,6044,-1065,-1360,13293,-804,-900,15943,-1689,9934,14269,-4446,5812,27225,18486,15928,16236,-7896,27805,-4570,7772,-2150,-3613,-521,25081,1888,-5804,12754,-8496,5054,-6781,7058,-5342,27095,-4620,23018,22676,-2588,-105,23264,18056,-2441,24607,-4831,-979,-3832,250,-1671,27543,26361,15600,32705,-8713,-2189,32547,9674,-5737,21709,-7470,-5058,2586,-2543,27555,10390,3817,-2744,-2590,20952,29035,-7960,-902,24743,-9192,12947,-5710,31187,16783,22664,-835,-9340,20213,4124,9776,-8242,-8953,3108,22732,29683,-1643,9810,-9526,8842,-4547,19868,28729,10084,6931,-3218,23117,31757,-7235,8099,-3794,12727,-4638,30560,26704,13969,-6731,-1307,8924,8796,-2056,-1524,3108,-6515,17279,2709,14051,-7260,-941,13710,-8641,-4842,-8409,-9643,8746,10140,32686,-2703,-4831,-7297,9404,24893,-995,-5251,32618,2210,13683,4440,-8118,15464,-1893,5532,-2584,6828,23006,-3847,3096,19328,7826,-3936,1908,8162,22142,24383,18944,27327,-7821,14633,19033,-4164,30104,12277,-2234,-4005,16274,-3339,32009,22694,13853,-8621,-2384,26371,12588,1855,-9152,-2011,-6449,6630,29993,16967,15984,-8197,13742,11358,-7423,16956,28250,31855,132,24892,-7153,9265,-5089,9561,20222,-4206,20746,19670,2675,14648,-4282,32090,-6605,25141,9922,-4533,-877,-4849,25503,5939,5109,12779,4236,10541,16502,-9082,27655,-7629,-7984,-7462,24438,-4138,2664,11740,-7940,-4337,17936,-2093,-4052,29646,30087,25127,27127,-3719,4020,-4615,-568,-219,-4006,-127,10389,-3050,8781,7841,16969,22654,-9055,11360,-2647,27168,2500,27571,9737,-5621,22823,-3561,-6925,6034,-5920,-8722,6825,-4459,5658,28896,-4274,24181,-63,-4814,4354,-6083,-5905,8127,25019,28661,26909,-3121,-837,-1071,-9434,19680,26035,2745,19671,4733,3841,18704,-9789,27125,-7056,-6613,-456,-4322,-6581,1626,-8563,-9825,-41,-6670,-1461,-1962,-6484,-4530,11129,-8805,2333,-5123,-7675,-8055,-1383,-6949,-3711,-8287,13810,-3285,8713,21697,14912,-1406,4945,31594,21877,4874,-4604,-5105,-7660,32061,-5726,11428,9963,-4011,10664,-6675,4618,11390,-6190,15215,-1048,20497,16294,-2312,-9275,8870,-1841,24432,26608,14223,-5052,6911,-9416,-8247,15683,31356,24029,2796,-4747,18861,-7755,27855,8574,-8949,-384,8315,-5199,3967,-7637,29895,11328,20860,32434,-8668,-6613,10170,-6153,12294,14061,-1622,16775,26383,-1109,12349,-2553,-8613,23695,-3734,-1789,20275,-7778,13976,-5920,21332,1753,22194,8045,-4826,-2650,-6675,15737,9896,9232,10227,-4817,7868,-3286,-6289,-916,11182,28234,19985,19287,-1518,23147,-3857,-8201,-6232,-5766,28233,-2003,6047,3431,8232,-4378,31292,12638,161,29317,-4796,32702,-2345,-9474,-9554,24989,21871,-9324,-5122,-9040,22528,-4424,-6361,4709,12412,17178,-8125,-7609,-5384,-4728,-8924,3308,20042,-8947,-5001,26084,-3637,-861,-7471,-5710,-9323,232,11117,-5165,-5695,-781,-2616,19631,-4728,-164,812,-3907,-3493,18548,26559,-6764,32650,-4593,24066,-2552,25928,22971,-8523,11950,-6752,-3085,24515,-4627,26274,-8323,-1918,-7289,13135,27597,27406,-488,12956,16888,-4597,1382,17243,-170,21364,-4597,2938,-7683,-2413,-7507,17966,20512,-3436,32702,-8476,24901,-4270,-6333,-7774,-2215,14279,-1107,-3288,9072,-2846,6054,8018,23317,17621,5891,6518,-2469,31819,14082,1116,1586,-509,-4444,11437,-5771,-7334,13629,12453,-4821,-4920,-7079,-1974,-6178,-9821,24163,19074,-5533,30741,27326,-1480,18847,22254,-9322,30992,675,19791,-6407,-7544,-1379,17106,20689,-3908,13088,27705,21458,23046,-5000,3359,-9425,-4711,-7789,-9146,24738,30314,-8521,15144,-7542,19954,29113,15742,-227,6136,25346,-3579,1259,-1661,13641,-1465,3081,17048,-391,23785,-6819,4801,-4830,23341,-6726,7818,15866,-9757,-4148,-113,-2107,-4076,-3538,-4998,7380,-4393,-6612,29642,7899,29201,26794,-5379,13532,15719,27118,-1516,12058,23543,-6910,-2712,-4594,-7248,-5236,-8449,-8851,-6914,-3478,10513,-213,-8671,-1583,-2820,3733,-2507,-6598,770,24575,30760,3098,-9389,2825,-644,12115,10698,-3422,20277,-3085,6397,2482,-5822,-772,-3195,20027,17408,13177,2946,-2754,-649,24557,32721,13739,13770,-3442,13134,902,16446,-2156,-641,-1593,-1538,25865,29209,-2313,-1686,-6663,30122,1387,31324,9028,18254,30794,9560,-7806,-2951,13516,-2611,9219,-7730,-9407,30834,27449,-3278,-9708,28706,-9255,-1204,-4904,32167,-2834,-9194,14592,19394,-5480,-7190,27524,-5560,29619,-6610,-273,-1535,-2817,-3043,-70,21220,-3213,-1074,-8005,24075,10575,-6377,32244,-7691,-577,1442,-8663,6640,-8752,-988,9690,-4290,14732,19093,-8725,-7984,-679,24956,23226,-3917,22110,22159,-8156,7597,-6257,-9271,-8148 diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu0_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu0_input0_int16.csv new file mode 100644 index 0000000..31ca3ef --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu0_input0_int16.csv @@ -0,0 +1 @@ +-15671,-22583,31876,23565,2780,28947,-13747,1043,13458,14533,32594,1510,-16737,18179,3744,-20031,13919,11555,27852,2414,-1179,12085,-28173,28246,26310,28210,20627,-27842,24756,-23439,19359,-4243,-20144,-18366,14373,-4923,-32536,-9651,19452,28704,2398,-19475,-7099,-12306,632,24592,1877,16578,-4358,-16426,-19029,18564,24846,5770,21491,4879,30968,22936,6404,21827,17791,-20222,-213,5341,-10329,3868,-25099,31826,3981,-14348,20694,14012,-29749,19135,14833,-18900,29529,25130,-20926,-21037,28698,-24853,-23374,11821,-10775,-21373,-17521,12238,28055,11557,10153,12409,-21834,-9781,-29129,-6447,-30969,-20235,3580,12903,1603,32173,3143,-21048,13051,30476,-12619,18178,30359,26185,-29728,-27673,-29053,-8347,22664,-16505,-28136,2290,-941,-2763,-31933,-30530,6168,-10557,-3417,-7644,13951,21874,11671,-29144,-5418,11322,20247,3032,-10159,2287,-19108,-26561,-18637,-16273,24627,-18596,-31343,-2347,-8565,13185,-18359,-9927,-10494,29021,27839,-15440,3522,8192,24736,18443,17248,16236,-20680,14186,-18310,-16819,8649,12752,7720,-13494,-12900,3237,32164,-11249,-6747,29769,-31622,8531,-10523,1457,-23215,-23889,-22563,25772,17509,-2010,-21192,18241,733,-12710,25429,31675,-2457,-14580,21726,-32650,-7189,-5332,30150,20096,8528,114,-15778,25891,-23417,-28928,-27122,18116,-26643,-26523,9491,8572,21327,-3054,-5255,294,-1886,23784,-3900,16106,-7232,-7024,-2275,-11767,-5721,5357,-23668,15797,-17343,-25505,-13778,21314,11077,-24454,-8170,27216,-16894,14160,-24485,19178,14250,20173,18103,8004,15794,2848,-21441,-18370,6490,-26143,553,15696,-5787,-26734,-1803,-16994,19609,-458,18712,6455,-10366,-8110,-3361,27023,-1454,-910,-24030,-4970,27180,30899,31058,-10624,28302,-247,15669,24265,13896,7069,-15873,-17661,30400,-26153,-17382,17676,3233,8784,2098,-27190,17406,-5017,4644,-13533,4,-27620,-4929,-13821,19370,4611,31247,24577,30105,-6222,-31370,11055,-4585,-18712,-4412,6736,32101,29189,-16897,-19288,-29843,25252,-5492,-20334,20255,2077,-7375,-16342,6055,8135,30037,19178,18237,-3987,-160,-28115,19744,-21562,-29600,-24514,14142,18703,-27404,13675,14053,17396,-20412,-16601,-12758,26838,15281,-1333,-24382,-3429,-7667,2777,-11233,8486,5802,27278,11985,-19732,13243,20062,-32309,27573,-16539,1833,-2106,32110,17329,25790,12316,12154,20909,-13504,-25143,17948,-1169,-13845,-10797,12627,-14591,-2870,11893,21725,4286,27003,20579,-21490,-13408,26799,-7808,6915,14303,-8613,-6787,828,10916,-9176,-7243,-10433,18019,-7989,8054,13211,-29427,31682,-32246,1442,14016,29584,19803,-14679,15268,31161,-26365,21311,32174,4872,3736,30340,-7175,-18966,6022,-14427,-30218,-17023,4744,21345,400,-7666,-31204,3826,27696,30150,-1203,15711,-12632,-31423,-4934,5720,22657,-21711,-4948,-13819,24095,25886,29442,4274,-26982,31230,18376,11793,76,-21270,10293,18461,13228,-14210,-9152,-25977,20447,-31922,20214,-22710,2878,13104,19020,-6990,-19203,12726,-8637,24041,9648,21411,22818,4044,12749,-2125,-27203,8258,-7220,-10731,12525,31894,8442,1481,-20611,11108,4428,-32714,-21061,-3645,-16831,18291,961,-7948,-15511,-9018,25866,-25990,29750,13569,22376,3871,-24370,28136,-1363,11901,-1713,14892,2044,-22773,460,-1977,18515,19379,-6965,-13105,19169,1192,26672,970,13588,-23633,1616,13754,19788,11382,4229,-18941,25558,-10607,7182,24913,27383,-20810,31846,-13354,9033,-5807,-24689,14747,31287,-18271,1806,12555,32067,-32225,21744,10361,12289,-31617,23507,-24662,-17650,9125,2126,-12906,14541,-6754,-23782,5763,-9281,932,-15829,-30372,10427,-25207,9745,-19152,-23588,-17253,-20529,-16987,30819,-9082,-32478,31818,481,10285,12203,-13962,-21684,5663,-2535,-32300,-7841,2064,-3333,-18088,20735,-14478,18167,20037,-11391,-10812,18133,22137,-20613,-5394,-18611,2412,22051,-11113,-16140,23025,-20408,-25379,6037,-28487,2475,24292,-15897,23347,-21266,-32591,-29388,-5232,1414,16038,-22878,-31364,-22187,-10956,-8043,20610,29872,-4817,18379,22882,-32766,-28727,-26542,-20602,-30915,-11974,-5832,-18312,29380,29704,32147,-21108,-920,-27417,-8941,-30761,17413,13007,-15638,-17866,-5866,-31263,-8905,2388,-29132,-2435,-8649,32291,-15289,19515,9282,22979,-23543,11700,-4188,-17938,-24672,30547,18468,-714,12478,-13244,-26001,-24939,16021,-17062,-7364,29421,23929,15305,-17424,27685,-7851,5770,1462,-1341,10710,18014,10992,-21370,3189,-5937,-7276,27410,16050,-20565,17514,-16619,19753,18542,-32356,26287,-7504,26884,17417,32697,-2539,17223,2661,5321,-19293,12721,11641,-21290,-16680,-9712,14531,3404,1879,26160,5369,5087,-23558,-22242,3575,28930,26788,-8242,-5634,22705,7234,-16594,8990,2153,-13267,-25267,20505,31144,28183,18954,-18358,-15807,-2450,-28174,-3334,-13539,-5163,32744,-29756,-3995,-9559,-3676,12825,-15776,27844,-1160,18110,5609,29873,-21869,6194,20780,-15775,-11151,3819,-32395,-26664,6569,24416,-11768,7619,-6715,10290,28282,-15477,11307,10336,-27553,-23757,-6032,15646,11878,-20835,-19014,-22528,-15727,-31325,14049,-24191,-28085,-30143,-19729,-11357,-19748,19088,-22543,20318,-10863,-29583,-32128,19608,-15332,32763,-32186,11730,-13077,25312,2971,14765,-31904,-7235,3691,-20104,-18554,-1788,7276,-29409,-18249,6198,-7774,28991,19184,6878,-8803,10698,15999,-18714,10543,-22986,10993,3305,-8240,13996,-2496,-6604,-19924,15960,12874,28728,19015,-1879,-26504,9705,-15,31263,25255,31217,-1325,19203,-2926,-19074,29180,-11255,-220,10144,25629,26320,-18133,30577,-2920,-19755,-11967,28564,30256,-5397,30181,31590,6299,-27162,-12625,7774,9811,4848,-7633,-27311,32693,-10823,11352,8862,1747,-19272,-16460,18284,-16034,17037,-30437,-32101,20752,-2828,-7804,-7579,30971,-19764,-3917,13637,2955,30116,-32627,13430,20587,-13765,-29031,27458,-4746,-29541,-29174,13231,2462,-31993,-10783,17853,29213,18453,30323,-1810,-540,-3206,-12646,-5243,-4751,30918,7933,-19424,30910,-3878,-32502,11335,-15107,27933,-11994,6135,-30589,11093,-8579,23553,-7946,11919,-14443,31828,-575,-26920,18933,18583,29578,29831,-18715,-14096,-20098,-28501,-27903,18537,20580,2594,7991,11522,-4316,-17852,-22291,6091,11687,14910,2748,-15998,762,10207,-4159,9970,-3696,30305,25097,30480,18728,28677,2867,-28723,3714,7467,-11516,25381,-16107,-884,-8188,-16969,20419,-12803,7291,20592,-10924,-16195,5791,-19383,-10929,-11373,14239,-7335,-22270,24590,30211,24948,3322,-18824,11099,22091,24911,-4737,-6187,1575,-25754,-28612,27992,-29872,3520,-23494,5157,12027,2061,2994,-6382,-24916,7881,-21082,11673,-2234,-14575,8820,-26183,-14188,31763,4294,-24852,10098,20534,26520,-9657,-26582,603,-2676,21239,-19115,6744,-24861,13389,-25268,18647,26960,10891,20169,23722,-657,23335,-8533,15323,16718,27487,-11151,-15675,10395,-15185,-25886,-29548,13968,-247,30547,27427,-11742,-3683,-10404,-21341,24027,21359,-23841,18487,30070,6141,-29112,-16796,-32636,-32557,22282,22217,31934,-30032,-3638,13576,32685,-17,20977,24733,-24599,31454,30061,-29541,-15828,-9320,-12738,-32592,-30651,10600,-26828,-18312,-12013,3786,-9765,18674,28503,2181,15298,-29170,-9933,-14727,-19664,25262,17657,-9404,17753,19347,3164,-25093,-10035,8445,-15630,11456,-4809,5795,-28681,2261,10975,-18716,24030,-6826,6887,-28758,-10625,9344,14561,-27806,19693,9504,10864,3336,-24455,-1061,-24507,22585,17272,13445,17709,-30900,-32014,5374,-24846,1231,-16754,618,9923,-27313,-961,14529,7202,26354,7688,23686,26274,2146,32394,-21339,-2543,-885,-30948,28836,23109,32122,-11372,18452,-7192,-4075,-21587,-25547,5960,5491,-18981,-30546,-25983,17462,-21631,12375,-24745,16196,-20372,-14961,-26986,17894,-13242,-4451,18727,-30532,-20736,-20925,6976,13016,-834,25937,-18694,-8255,22702,21083,6769,14188,-28076,-11391,2672,-14966,1587,-10086,-18476,-3184,-10587,-10330,-28609,-27201,-18781,23215,12187,-378,-17543,1369,12449,-4802,-2298,-16945,1775,26333,-1483,-17054,-6749,-31153,25591,-26547,3275,-8118,-4617,-25016,6931,27462,-11546,2489,21053,-14598,1788,7561,4764,27199,-30230,12121,19090,-5542,28962,-13965,16682,-17887,28167,-28655,-11797,13803,12582,21059,3207,2568,6462,21818,28104,-4533,-24924,22212,9900,-24944,21640,-20304,7171,-9321,16725,23123,-9008,15827,29149,3445,10947,13904,-19588,-9841,29295,1434,-13669,32573,4666,31994,27562,-17007,15236,15797,-88,-18837,24005,9282,-14093,-15658,27054,-11895,-31575,-28602,-954,3344,12190,-25573,20158,11335,32210,6742,25017,-1900,23923,2061,22808,-29519,21267,24455,6802,-14249,-25608,1790,-27430,13090,-407,-7025,-21820,-4337,-5695,13483,-23215,-18149,-12177,-19596,14588,-12167,-14255,-8525,5131,19392,19356,6210,16395,12050,-6620,15703,22635,24595,71,17941,-17803,-4389,-14857,-21591,-11010,-32273,3216,6277,32684,29313,5502,11024,-19782,-11410,5957,3870,-21000,-25030,-2397,5224,-30818,-19858,-3666,5155,25438,19823,18035,1960,-17733,-24749,32396,-12288,-17455,1080,16675,-5987,-24433,-21539,-26782,26288,-5553,-9844,-28469,-31903,16281,-2874,21829,29274,23782,28545,-8807,-3605,-18230,-3913,28012,11599,1566,7673,-2407,-31389,8824,-10458,-29379,-8068,13324,-10509,30938,-10901,-1462,-23456,13250,-32544,-12843,32492,-20238,10172,-15107,-13779,15109,-12044,-25983,-12930,32674,-29815,-20724,29963,9950,-24193,21796,-27933,20422,-25575,9457,-28878,-31656,-29707,-22706,-9590,-2512,15369,27215,-10675,-9698,15465,-29569,17101,-15091,27427,2639,24247,-24497,26824,30053,-11088,-18682,-22102,18613,27112,-26576,8919,30358,-6186,3717,17659,-2556,23357,15213,-18945,-22233,-32102,-5962,12678,1447,-31811,24527,10498,22024,-6647,32352,-8778,16814,-752,-2370,-24272,9944,-12505,13273,823,-1887,-23325,-13488,3179,-31490,32403,5044,-2619,-19690,-13207,-14238,20060,-22459,-4851,10212,-14782,3491,14697,6612,642,-17410,16338,22844,-6677,20872,-4707,19747,-16608,11446,288,-21035,-18403,25948,-12632,-16392,3701,-23567,14913,-26721,-9358,19968,-24490,-22983,-30244,-26058,13982,-11055,-26106,-5395,-8595,16946,-28632,32754,-30029,19804,26266,-6649,3584,8612,-8622,-24133,30266,8821,18484,-27493,-331,-14069,1132,1282,-28933,32695,-13074,-21833,23590,-8956,-7668,-17676,-15609,31867,-16633,-15145,-4519,-30218,7723,29065,-113,2710,-3666,13021,30798,-11480,-9120,8333,23554,-23431,-1641,-9463,-22013,8174,29518,8599,3955,7768,29257,-19143,21409,10222,3578,18595,25878,-16716,-14182,24919,-24536,8777,-6575,-20636,-15543,-16752,6688,1611,19752,27464,-23023,-32648,-11387,-3269,1881,31735,20489,17509,-3801,17858,2250,12892,-6359,-32214,-11832,8185,-339,-7632,28609,5847,21882,14273,-30056,27253,-11575,22010,-23251,-19268,19228,8644,11072,-1250,11592,20707,23201,-7907,-5525,12531,-8803,-29275,-23380,-30289,25988,-23780,4276,-25836,19798,28217,4943,-31029,-28654,29404,3034,-1867,-17962,29108,3562,-9455,3404,19701,31310,21973,-8586,438,24034,-4400,-23012,9751,22250,-12103,24619,29010,6149,-15627,10477,19772,-27303,-18709,18757,-30935,9177,-16700,7720,16930,28864,25942,7284,-7195,-13434,-21355,22078,16851,-25608,-13706,-7847,1404,4801,-3986,-27303,4540,-25279,-975,-28348,19313,10516,-22093,20335,-10021,22605,-19946,2147,-10486,22442,28419,1511,23497,-4054,-18125,8277,-3334,12708,-11154,-27443,2539,12616,25513,-1853,30958,-12361,30262,21370,18716,-15336,22245,-14715,10894,-26751,-27112,31907,12137,-3392,-31953,-11013,13236,-21245,-867,12479,-20242,-19059,18081,18140,10198,-21008,-10308,-1589,-6947,16674,5382,19426,-29016,21669,-26368,-20462,13264,-9065,-533,19356,20521,21044,-16941,456,-8682,9083,24071,-9349,-24523,7066,-27464,7541,-10545,-20727,-2757,12857,-4731,-5490,-9424,21262,32045,-29356,28359,9137,-23347,-10585,3982,3251,6044,-3550,-4533,13293,-2680,-3000,15943,-5628,9934,14269,-14819,5812,27225,18486,15928,16236,-26320,27805,-15234,7772,-7166,-12041,-1736,25081,1888,-19345,12754,-28320,5054,-22601,7058,-17807,27095,-15398,23018,22676,-8625,-349,23264,18056,-8137,24607,-16104,-3261,-12773,250,-5570,27543,26361,15600,32705,-29042,-7296,32547,9674,-19124,21709,-24898,-16858,2586,-8475,27555,10390,3817,-9146,-8633,20952,29035,-26532,-3007,24743,-30639,12947,-19032,31187,16783,22664,-2782,-31134,20213,4124,9776,-27474,-29841,3108,22732,29683,-5476,9810,-31754,8842,-15155,19868,28729,10084,6931,-10727,23117,31757,-24115,8099,-12645,12727,-15460,30560,26704,13969,-22435,-4355,8924,8796,-6852,-5080,3108,-21716,17279,2709,14051,-24199,-3136,13710,-28804,-16140,-28030,-32143,8746,10140,32686,-9010,-16102,-24321,9404,24893,-3317,-17503,32618,2210,13683,4440,-27060,15464,-6310,5532,-8612,6828,23006,-12824,3096,19328,7826,-13120,1908,8162,22142,24383,18944,27327,-26069,14633,19033,-13878,30104,12277,-7447,-13350,16274,-11129,32009,22694,13853,-28737,-7947,26371,12588,1855,-30507,-6701,-21495,6630,29993,16967,15984,-27321,13742,11358,-24744,16956,28250,31855,132,24892,-23844,9265,-16962,9561,20222,-14018,20746,19670,2675,14648,-14271,32090,-22015,25141,9922,-15109,-2923,-16163,25503,5939,5109,12779,4236,10541,16502,-30273,27655,-25430,-26614,-24874,24438,-13791,2664,11740,-26465,-14455,17936,-6975,-13506,29646,30087,25127,27127,-12395,4020,-15382,-1891,-730,-13354,-421,10389,-10165,8781,7841,16969,22654,-30184,11360,-8821,27168,2500,27571,9737,-18737,22823,-11870,-23082,6034,-19732,-29074,6825,-14864,5658,28896,-14247,24181,-210,-16046,4354,-20277,-19682,8127,25019,28661,26909,-10404,-2789,-3570,-31447,19680,26035,2745,19671,4733,3841,18704,-32630,27125,-23520,-22041,-1519,-14406,-21936,1626,-28542,-32748,-136,-22233,-4869,-6538,-21614,-15099,11129,-29349,2333,-17077,-25582,-26849,-4609,-23163,-12368,-27624,13810,-10948,8713,21697,14912,-4687,4945,31594,21877,4874,-15346,-17016,-25534,32061,-19087,11428,9963,-13370,10664,-22248,4618,11390,-20633,15215,-3491,20497,16294,-7707,-30917,8870,-6137,24432,26608,14223,-16840,6911,-31386,-27490,15683,31356,24029,2796,-15822,18861,-25848,27855,8574,-29829,-1278,8315,-17328,3967,-25456,29895,11328,20860,32434,-28891,-22043,10170,-20509,12294,14061,-5406,16775,26383,-3697,12349,-8508,-28709,23695,-12445,-5964,20275,-25925,13976,-19731,21332,1753,22194,8045,-16086,-8832,-22248,15737,9896,9232,10227,-16055,7868,-10952,-20964,-3054,11182,28234,19985,19287,-5059,23147,-12857,-27337,-20771,-19219,28233,-6675,6047,3431,8232,-14592,31292,12638,161,29317,-15986,32702,-7817,-31578,-31846,24989,21871,-31079,-17071,-30133,22528,-14747,-21201,4709,12412,17178,-27084,-25363,-17945,-15759,-29746,3308,20042,-29821,-16669,26084,-12122,-2870,-24902,-19031,-31075,232,11117,-17217,-18981,-2601,-8718,19631,-15758,-546,812,-13024,-11642,18548,26559,-22547,32650,-15308,24066,-8506,25928,22971,-28408,11950,-22507,-10282,24515,-15422,26274,-27743,-6394,-24297,13135,27597,27406,-1626,12956,16888,-15324,1382,17243,-566,21364,-15321,2938,-25609,-8043,-25023,17966,20512,-11451,32702,-28253,24901,-14234,-21109,-25914,-7384,14279,-3688,-10960,9072,-9487,6054,8018,23317,17621,5891,6518,-8229,31819,14082,1116,1586,-1697,-14811,11437,-19237,-24446,13629,12453,-16069,-16398,-23597,-6578,-20594,-32737,24163,19074,-18444,30741,27326,-4931,18847,22254,-31071,30992,675,19791,-21356,-25146,-4596,17106,20689,-13026,13088,27705,21458,23046,-16665,3359,-31416,-15703,-25964,-30486,24738,30314,-28403,15144,-25138,19954,29113,15742,-757,6136,25346,-11930,1259,-5537,13641,-4884,3081,17048,-1303,23785,-22730,4801,-16099,23341,-22420,7818,15866,-32524,-13826,-375,-7021,-13586,-11794,-16658,7380,-14643,-22040,29642,7899,29201,26794,-17930,13532,15719,27118,-5052,12058,23543,-23033,-9038,-15312,-24159,-17451,-28162,-29502,-23047,-11593,10513,-709,-28902,-5277,-9399,3733,-8355,-21993,770,24575,30760,3098,-31296,2825,-2146,12115,10698,-11405,20277,-10282,6397,2482,-19406,-2571,-10648,20027,17408,13177,2946,-9179,-2164,24557,32721,13739,13770,-11473,13134,902,16446,-7185,-2135,-5308,-5126,25865,29209,-7709,-5620,-22208,30122,1387,31324,9028,18254,30794,9560,-26020,-9836,13516,-8703,9219,-25766,-31357,30834,27449,-10927,-32360,28706,-30849,-4011,-16347,32167,-9446,-30645,14592,19394,-18267,-23966,27524,-18531,29619,-22031,-910,-5115,-9389,-10143,-234,21220,-10710,-3579,-26684,24075,10575,-21257,32244,-25635,-1924,1442,-28875,6640,-29173,-3294,9690,-14299,14732,19093,-29084,-26614,-2264,24956,23226,-13056,22110,22159,-27185,7597,-20857,-30902,-27159 diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu1.tflite b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu1.tflite new file mode 100644 index 0000000..5eb7597 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu1.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu10.tflite b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu10.tflite new file mode 100644 index 0000000..bfd7f38 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu10.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu10_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu10_golden_int16.csv new file mode 100644 index 0000000..8723714 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu10_golden_int16.csv @@ -0,0 +1 @@ +-3083,32767,7193,32644,10957,-7103,-9763,12886,-4214,29569,-10808,15246,-7328,-6231,-1802,-7582,32767,-4369,21045,20632,-6547,32767,18959,10857,-3661,647,-2038,-5775,237,-3880,27002,15973,16378,-5564,-1894,-11666,22795,6533,14207,10748,-2385,7354,-9670,32767,20523,22467,-11991,4353,32767,-7131,-2944,-694,7562,21601,-50,-6361,-5285,-2155,3690,8458,9730,-8533,-7086,-1236,-1911,-581,14746,10231,-5194,-10191,28267,32767,21911,-3331,29791,28751,-7438,-9802,4439,2305,-3514,-10886,-252,-11507,32767,-1403,-10862,32767,-9451,25809,-329,-467,18098,26592,32767,-10140,-7306,-10462,-7475,-8147,2880,-1774,12644,-1415,26956,-8642,-9647,-11201,32767,32767,32767,30544,-7138,-11877,-7051,6140,-3787,-6203,-9657,-4108,-9475,-8463,-1725,-2491,-5463,-1236,-4603,-11000,-7514,-6350,-1755,-6678,28279,-4392,-5977,14195,-1437,-5629,-10732,-4251,11084,3856,-193,31639,15928,32767,11459,27662,20317,15471,32767,12946,23373,32767,2784,26549,17979,-1869,-1307,851,-382,-11967,12564,-114,-2629,-8029,26642,32767,-11988,-5019,-5465,-2095,7277,22053,9224,-5437,-10611,7094,32767,30028,-5943,13776,32767,25510,-8450,5176,-830,29866,28282,28489,-3692,-3457,9410,-2755,24794,32767,26528,19263,11261,-3151,-6795,-4770,30654,-2670,23860,-3837,-5172,-4849,34,14636,-1959,6010,8782,-9873,18292,-7517,2420,-7973,22570,-5512,-4248,29019,-8110,14727,23947,-9208,21835,-5975,-6204,-1079,-7512,31,-2964,5320,-4545,-1065,-7579,-6443,-2610,32767,28794,32767,-3973,-2414,21718,-228,6590,32767,-947,32767,-12047,-1433,-3998,8853,-8016,-4119,-10580,-433,29062,-11897,15829,-2363,-8825,-3016,-5193,-11083,-11149,-1238,480,-5889,-752,21407,-6554,-7666,4237,17199,9220,-9327,-5116,-7051,-2055,-9590,-11173,32767,-10028,12328,-10955,32767,32767,-1215,7013,19402,-1738,24191,16513,-6568,-8639,5367,-11203,15217,-4753,21240,-4160,26006,32767,-3114,-4953,7372,-6608,-11007,-336,-9544,17611,5427,32767,-10031,7900,32767,8932,17675 diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu10_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu10_input0_int16.csv new file mode 100644 index 0000000..fd8cfc9 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu10_input0_int16.csv @@ -0,0 +1 @@ +-8369,31738,5857,26582,8922,-19279,-26499,10493,-11438,24078,-29335,12415,-19891,-16912,-4890,-20580,29941,-11859,17137,16801,-17769,27965,15438,8841,-9936,527,-5531,-15674,193,-10530,21988,13007,13337,-15103,-5139,-31666,18562,5320,11569,8752,-6472,5988,-26246,27786,16712,18295,-32548,3545,29838,-19355,-7991,-1884,6158,17590,-135,-17264,-14345,-5850,3005,6887,7923,-23161,-19234,-3354,-5187,-1576,12008,8331,-14097,-27661,23018,28708,17842,-9041,24259,23412,-20189,-26606,3615,1877,-9538,-29547,-684,-31233,28930,-3807,-29482,29049,-25652,21016,-893,-1267,14737,21654,29799,-27523,-19831,-28398,-20290,-22114,2345,-4815,10296,-3839,21950,-23458,-26184,-30404,29020,28500,32737,24872,-19374,-32237,-19137,5000,-10278,-16836,-26211,-11151,-25717,-22971,-4681,-6761,-14828,-3354,-12493,-29857,-20396,-17236,-4764,-18125,23028,-11922,-16222,11559,-3900,-15278,-29129,-11538,9026,3140,-524,25764,12970,32581,9331,22525,16544,12598,30144,10542,19033,32634,2267,21619,14640,-5073,-3546,693,-1035,-32482,10231,-308,-7134,-21792,21695,28316,-32539,-13623,-14834,-5685,5926,17958,7511,-14758,-28800,5777,32011,24452,-16130,11218,28175,20773,-22935,4215,-2253,24320,23030,23199,-10022,-9382,7663,-7478,20190,28580,21602,15686,9170,-8553,-18442,-12947,24962,-7246,19429,-10415,-14038,-13162,28,11918,-5316,4894,7151,-26798,14895,-20402,1971,-21642,18379,-14960,-11531,23630,-22014,11992,19500,-24992,17780,-16218,-16839,-2929,-20390,25,-8045,4332,-12336,-2890,-20571,-17487,-7084,28250,23447,29838,-10784,-6551,17685,-619,5366,28765,-2571,31339,-32700,-3888,-10850,7209,-21757,-11179,-28717,-1174,23665,-32291,12890,-6413,-23953,-8185,-14095,-30083,-30262,-3361,391,-15984,-2040,17432,-17788,-20808,3450,14005,7508,-25316,-13887,-19137,-5577,-26031,-30327,28101,-27218,10039,-29734,31001,28920,-3297,5711,15799,-4718,19699,13447,-17827,-23448,4370,-30407,12391,-12900,17296,-11292,21177,26876,-8452,-13444,6003,-17937,-29876,-912,-25905,14341,4419,32247,-27226,6433,31437,7273,14393 diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu11.tflite b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu11.tflite new file mode 100644 index 0000000..71fa19e Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu11.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu11_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu11_golden_int16.csv new file mode 100644 index 0000000..37eb3fa --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu11_golden_int16.csv @@ -0,0 +1 @@ +19021,-6743,-9822,8840,-9486,14865,-8535,23202,-3247,32767,15231,24964,19388,29557,21283,-3519,14976,17814,13667,1124,-1025,-455,-2163,-9680,29130,-5972,-6730,121,-589,-384,-9954,-5825,3402,-3862,32767,23426,-5044,-9115,27119,-10095,-1692,-2425,15652,-813,-2327,2768,27809,16510,31531,-2915,28063,-9070,-2260,-9740,-1825,-1861,3344,-4544,-5115,32767,-3178,-5733,4766,-3154,32767,12105,23332,-233,-696,-8518,-8700,23941,18737,-5838,-8005,-8768,14058,16426,6593,32350,27481,-1436,22341,-10029,-3456,26628,40,27736,13843,-10888,15581,-9623,3021,-2507,10876,7563,-4588,-616,28219,16209,-9960,2905,-628,20104,24852,30719,-5099,22057,-6025,8227,-67,32767,2467,-1485,-5437,-7842,32767,28071,9466,-378,-2171,-4662,-271,-2050,-698,24030,13477,2501,18953,-9104,-9199,-670,16328,-8934,-9164,10043,-3987,12555,12435,-6383,-4690,10293,21541,-10414,-1208,13532,8122,-4839,-3522,-5663,24768,-9602,18943,-4735,-76,-1049,-1489,30973,20533,30005 diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu11_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu11_input0_int16.csv new file mode 100644 index 0000000..fb1f898 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu11_input0_int16.csv @@ -0,0 +1 @@ +17088,-20192,-29412,7942,-28406,13354,-25558,20844,-9723,32069,13683,22427,17418,26553,19120,-10536,13454,16004,12278,1010,-3070,-1363,-6476,-28987,26170,-17882,-20152,109,-1764,-1148,-29808,-17443,3056,-11564,31225,21045,-15103,-27294,24363,-30230,-5065,-7260,14061,-2434,-6968,2487,24983,14832,28327,-8728,25211,-27160,-6766,-29165,-5464,-5573,3004,-13606,-15318,32270,-9516,-17168,4282,-9445,29537,10875,20961,-698,-2083,-25507,-26051,21508,16833,-17483,-23970,-26256,12629,14757,5923,29062,24688,-4300,20071,-30031,-10350,23922,36,24917,12436,-32604,13998,-28816,2714,-7507,9771,6794,-13737,-1844,25351,14562,-29825,2610,-1879,18061,22326,27597,-15269,19815,-18043,7391,-200,32172,2216,-4447,-16280,-23484,30920,25218,8504,-1130,-6500,-13959,-812,-6139,-2089,21588,12107,2247,17027,-27262,-27545,-2006,14669,-26753,-27442,9022,-11939,11279,11171,-19115,-14045,9247,19352,-31186,-3617,12157,7297,-14490,-10545,-16958,22251,-28754,17018,-14178,-228,-3140,-4458,27825,18446,26956 diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu12.tflite b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu12.tflite new file mode 100644 index 0000000..df26a0a Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu12.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu12_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu12_golden_int16.csv new file mode 100644 index 0000000..8d681ee --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu12_golden_int16.csv @@ -0,0 +1 @@ +11237,-8099,-3843,-4337,-405,-1512,-330,-4654,18671,30544,22430,-6736,-2210,-10960,10951,25718,15478,28516,-10851,19,21823,13371,18227,-9513,1243,25339,-6061,-6382,-3516,-2616,32767,15357,-2047,-5114,32767,-89,28763,32767,31999,11141,30228,30607,21527,-5704,-1914,547,11812,1333,-6610,27152,-7777,6595,-7965,4157,-7062,-7826,-296,3259,-4218,-3932,-6401,32767,14115,24056,-9976,22945,-2348,-9183,-5942,-1696,19522,8049,20001,7678,-3409,2434,14710,7148,20031,29918,-9362,17041,2413,13334,14090,3984,31641,30208,8219,-10703,28440,-3572,-6498,-6048,13279,-2167,-220,19000,21221,-6307,-9098,18212,-34,-818,-328,-10487,7818,29013,-9740,-10927,5944,12607,-9463,-2276,-3749,8291,-8829,-9538,-158,32767,-2599,32767,-10614,-3911,-4857,-4518,25565,20255,32767,15909,-915,17805,28895,5014,10355,-5924,-9432,-7672,32767,9163,31706,-1573,32767,2290,-2979,-10291,32767,-6865,15823,25458,-7060,-3834,934,21556,24523,-5064,2267,18610,16947,-7254,-3984,25928,-9849,-7328,-388,4362,-2551,32767,-8361,-1824,-6640,31810,-5664,32767,-9583,-5097,17402,21748,-3379,8866,-1483,-10933,32767,-212,-6604,-10022,-5151,28430,2688,12401,14224,19127,-3573,15925,28305,-10353,-2596,2843,-303,-1974,2671,-9342,3127,15719,29310,2235,-1126,-8860,-9786,-5487,-1507,18055,-314,28816,32767,9451,26022,318,54,-6838,-995,18240,-5193,21934,27015,-715,9024,26075,-3237,31058,-10081,-7089,10941,-1762,-1074,27115,-5064,-1258,9508,-10517,23107,17284,-564,-646,27285,-3901,-4409,27840,7698,11759,32767,-8276,24112,19231,-961,24350,-7062,-7812,1667,32767,-10207,-3743,-2478,13220,765,6869,-2676,-7214,-4078,30645,-3732,-10182,-4709,-7959,32355,3356,-9024,-2513,-9713,-2457,6865,14429,30313,7664,-2005,6147,-3883,-1835,14471,2237,-8486,-6466,11401,-10776,23538,-9185,12573,-8413,-10350,7843,-5891,32767,26916,25831,28713,32767,32767,-3146,32767,29023,26436,24182,-9066,28437,24912,9944,-3989,18307,27032,108 diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu12_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu12_input0_int16.csv new file mode 100644 index 0000000..eb36390 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu12_input0_int16.csv @@ -0,0 +1 @@ +9929,-23853,-11318,-12772,-1193,-4453,-972,-13707,16497,26988,19819,-19839,-6507,-32278,9676,22724,13676,25196,-31957,17,19282,11814,16105,-28017,1098,22389,-17851,-18796,-10356,-7705,30990,13569,-6028,-15060,31751,-262,25414,29461,28274,9844,26709,27044,19021,-16799,-5636,483,10437,1178,-19467,23991,-22904,5827,-23457,3673,-20798,-23048,-871,2880,-12423,-11580,-18852,30369,12472,21255,-29382,20274,-6916,-27045,-17500,-4995,17249,7112,17672,6784,-10040,2151,12997,6316,17699,26435,-27573,15057,2132,11782,12450,3520,27957,26691,7262,-31522,25129,-10521,-19139,-17812,11733,-6382,-648,16788,18750,-18575,-26795,16092,-98,-2408,-964,-30885,6908,25635,-28686,-32181,5252,11139,-27871,-6702,-11041,7326,-26004,-28092,-464,30882,-7653,31838,-31260,-11518,-14305,-13305,22589,17897,30397,14057,-2695,15732,25531,4430,9149,-17448,-27780,-22595,32581,8096,28015,-4633,30284,2023,-8773,-30309,28967,-20219,13981,22494,-20793,-11291,825,19046,21668,-14913,2003,16443,14974,-21363,-11732,22909,-29006,-21581,-1141,3854,-7513,30904,-24624,-5371,-19555,28107,-16680,32466,-28223,-15011,15376,19216,-9952,7834,-4366,-32201,31411,-625,-19451,-29517,-15170,25120,2375,10957,12568,16900,-10522,14071,25010,-30491,-7645,2512,-893,-5814,2360,-27515,2763,13889,25898,1975,-3316,-26095,-28822,-16161,-4438,15953,-925,25461,29167,8351,22992,281,48,-20139,-2930,16116,-15294,19380,23870,-2106,7973,23039,-9534,27442,-29689,-20879,9667,-5188,-3162,23958,-14913,-3704,8401,-30974,20417,15272,-1661,-1902,24108,-11488,-12984,24599,6802,10390,29919,-24374,21305,16992,-2831,21515,-20798,-23007,1473,31374,-30061,-11022,-7297,11681,676,6069,-7882,-21246,-12011,27077,-10992,-29987,-13867,-23441,28588,2965,-26576,-7402,-28606,-7236,6066,12749,26784,6772,-5905,5431,-11436,-5403,12786,1977,-24993,-19043,10074,-31738,20798,-27050,11109,-24778,-30482,6930,-17351,32092,23782,22824,25370,31826,32111,-9265,30635,25644,23358,21367,-26700,25126,22012,8786,-11749,16176,23885,95 diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu13.tflite b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu13.tflite new file mode 100644 index 0000000..0f97942 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu13.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu13_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu13_golden_int16.csv new file mode 100644 index 0000000..1b08e61 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu13_golden_int16.csv @@ -0,0 +1 @@ +17380,-8145,-5965,-80,-10748,-1445,17258,-6652,28390,1225,-8934,19345,5168,-10995,-9180,-9746,8274,-6569,-1621,30031,32767,-9702,7,11230,-2622,29121,-7007,-226,14,-4692,5440,32767,31928,17275,-5256,-10387,32767,-6755,18547,6523,5884,-6039,18880,-7104,-6221,-11003,27116,23639,-10415,5864,-7165,32767,-9667,32767,-6330,-10372,-2458,32767,23541,-991,-8170,9145,9752,-949,-10977,29140,-568,-7820,-148,-2916,25507,32767,24371,24862,-3215,-5561,32767,-2344,-1006,-10720,-10480,-1452,32767,-4273,13948,-340,32276,-6328,-8849,4892,6815,-10652,2614,-7269,-9736,32767,-11243,25276,32767,-8319,1090,31669,-6699,1513,-1142,-5480,-6736,-11072,1386,9872,9522,24880,-8036,7550,5026,-6487,16117,32767,32554,10094,31555,-3604,32767,-2573,4544,4946,1884,22842,11054,22736,-7539,-3153,2937,-347,783,27616,-4345,-2188,20906,-711,-4723,31966,25847,32767,32767,32767,12185,31744,4441,-4085,27022,-9318,-10633,-5403,25361,8209,-4253,21991,32767,389,41,-9473,-11433,24026,23129,-5632,-1514,1088,643,-6772,14183,-9125,-3825,-736,-8447,27854,32767,-10865,-7563,-583,10381,-373,2695,17079,-9174,16430,-6150,-6674,14104,19689,32767,9949,-379,5138,1352,-3667,-8094,-6508,-3840,-2516,-2479,26324,13463,7800,-9856,-4369,22826,9804,8802,-6667,-3467,-5429,-972,32767,-11609,21829,19757,32767,5349,-10448,-8016,17408,22639,28227,-361,5782,1884,-7845,-5466,1713,-3618,7001,32767,-6459,-11511,20082,27398,-4453,-7473,11940,32767,-4370,-7093,-2014,13916,7005,-9015,20875,-8062,-8351,30307,28249,29083,-9647,-3124,32767,-6646,-4053,-6663,-3745,-7621,20384,11064,18443,24916,32767,20809,32767,-5781,32767,32410,7773,-9282,28369,32767,-2910,16904,-8199,-9204,27333,-2526,-11739,-5369,-8364,-3935,14320,2592,-6213,-3540,2045,-5534,20808,2006,29691,32767,13076,32534,28390,-3490,15682,25391,-9765,23439,-6748,32767,-9270,28185,-3778,5812,-3587,31396,-5172,32767,-1489,32767,-592,17637,26653,-11722,-7022,6917,7673,9195,12116,22318,11618,-1146,1962,-7266,-8554,-9643,22850,-3302,21991,-5870,-10534,-11305,-4881,28685,-5369,-1325,-3855,6898,-4635,24436,26634,-10638,-6628,18524,27778,24037,12486,-1246,-8602,23214,-9401,-3970,1137,-5436,32261,-2872,-2135,950,13842,-5371,3382,-5745,-10880,-9888,-4834,-8657,-6433,-1704,14558,-190,32767,32767,18020,-9731,-11320,-668,-7037,6913,29391,16398,-108,32767,-11205,-4396,-10987,-7978,-10833,8219,32767,16762,-4791,3872,-6210,-1524,4949,-2959,-8798,10440,30993,-10027,-4992,8802,19816,1489,15139,28417,-494,-5268,-6705,9550,32767,26773,12423,8383,-9032,12540,32266,-4283,32767,17674,12057,-11195,3092,-11267,-7790,23288,-4127,14525,-2647,-6595,-2146,32767,6690,32767,32767,32767,-2305,-1844,4249,5029,32065,5232,-2950,26113,32767,32767,4972,19412,-369,11089,23855,-11500,32767,17904,-7801,-9261,10400,27776,7967,32075,5770,8723,18490,-8260,-2317,3507,32767,14901,32767,-10146,17970,28638,10273,-8838,30011,26290,5782,-4657,-5246,11699,-9842,7900,-9482,-184,32767,26762,-1275,-4528,18046,-10063,28272,6743,4514,-7168,19874,-9086,17418,-616,-5850,12852,19930,-4826,-1141,26917,-3084,22050,2290,26423,1284,-313,13936,668,13179,24089,-1125,-10128,-4622,29110,-181,7084,-7594,26168,-4721,-8604,14773,1236,-9849,24598,-95,14722,20218,-2779,18123,32767,789,13666,32767,-8626,-4337,-3944,-1309,18917,-1400,32767,-9873,-2603,25713,-1009,-8465,29176,-11593,-7974,-4309,-7091,13209,-748,-986,3882,31021,-7011,-532,-1646,29869,32403,14958,13388,-10271,24676,-2625,3176,-2589,-6067,2157,2335,-10920,13999,1689,25996,-9326,25326,-4792,32554,-8701,13104,11769,-10131,24682,-1649,-770,32767,2964,-9114,-6354,-7587,-10027,-9303,-7502,11790,-4238,-4295,7843,-2376,12616,1306,32767,18135,-898,-7100,-6839,-8646,-2742,9693,-1276,9369,-86,-10073,-7437,32767,22301,27781,17280,-953,-8720,-10325,25350,3743,30300,-8047,9304,-9818,11675,11350,7531,-7952,18788 diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu13_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu13_input0_int16.csv new file mode 100644 index 0000000..60f7ead --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu13_input0_int16.csv @@ -0,0 +1 @@ +14509,-22663,-16597,-222,-29907,-4019,14407,-18509,23700,1023,-24859,16149,4314,-30594,-25545,-27120,6907,-18278,-4510,25070,30926,-26996,6,9375,-7296,24310,-19496,-628,12,-13055,4541,29971,26653,14421,-14626,-28902,28202,-18795,15483,5445,4912,-16804,15761,-19768,-17309,-30618,22636,19734,-28980,4895,-19937,28973,-26900,28187,-17613,-28860,-6839,28787,19652,-2758,-22734,7634,8141,-2640,-30543,24326,-1579,-21759,-410,-8114,21293,29979,20345,20755,-8945,-15474,27641,-6521,-2799,-29829,-29160,-4041,31305,-11889,11644,-945,26944,-17607,-24623,4084,5689,-29640,2182,-20226,-27090,27421,-31284,21100,29155,-23149,910,26437,-18640,1263,-3178,-15248,-18743,-30808,1157,8241,7949,20770,-22361,6303,4196,-18049,13454,31900,27176,8426,26342,-10027,32603,-7160,3793,4129,1573,19068,9228,18980,-20979,-8774,2452,-966,654,23054,-12089,-6089,17452,-1978,-13141,26685,21577,28989,32191,27377,10172,26500,3707,-11365,22558,-25927,-29588,-15035,21171,6853,-11835,18358,31021,325,34,-26358,-31812,20057,19308,-15672,-4213,908,537,-18843,11840,-25391,-10642,-2046,-23504,23252,32652,-30233,-21044,-1621,8666,-1037,2250,14257,-25526,13716,-17112,-18571,11774,16436,30768,8305,-1055,4289,1129,-10202,-22523,-18110,-10686,-7001,-6898,21975,11239,6511,-27424,-12158,19055,8184,7348,-18550,-9646,-15106,-2703,32067,-32302,18223,16493,27799,4465,-29071,-22305,14532,18899,23564,-1005,4827,1573,-21828,-15209,1430,-10067,5844,30691,-17973,-32031,16764,22872,-12391,-20795,9967,30771,-12159,-19736,-5604,11617,5848,-25085,17426,-22433,-23238,25300,23582,24278,-26844,-8691,30260,-18493,-11276,-18540,-10420,-21205,17016,9236,15396,20800,27417,17371,31589,-16087,29051,27056,6489,-25828,23682,31992,-8098,14111,-22815,-25611,22817,-7027,-32666,-14940,-23273,-10948,11954,2164,-17287,-9849,1707,-15398,17370,1675,24786,31615,10916,27159,23700,-9711,13091,21196,-27171,19567,-18776,30701,-25793,23529,-10512,4852,-9982,26209,-14390,29613,-4142,28410,-1647,14723,22250,-32618,-19539,5774,6405,7676,10114,18631,9699,-3189,1638,-20217,-23801,-26832,19075,-9188,18358,-16333,-29312,-31456,-13582,23946,-14939,-3685,-10727,5758,-12898,20399,22234,-29601,-18444,15464,23189,20066,10423,-3467,-23935,19379,-26159,-11046,949,-15127,26931,-7991,-5941,793,11555,-14944,2823,-15985,-30274,-27514,-13451,-24088,-17901,-4740,12153,-528,28267,29055,15043,-27076,-31499,-1859,-19581,5771,24535,13689,-301,30101,-31179,-12231,-30571,-22199,-30144,6861,30071,13993,-13330,3232,-17280,-4239,4131,-8234,-24481,8715,25873,-27901,-13889,7348,16542,1243,12638,23722,-1373,-14659,-18658,7972,31353,22350,10371,6998,-25131,10468,26935,-11918,27926,14754,10065,-31150,2581,-31350,-21675,19441,-11483,12125,-7364,-18350,-5970,31659,5585,29416,32049,28597,-6412,-5130,3547,4198,26768,4368,-8208,21799,27414,29601,4151,16205,-1026,9257,19914,-32000,28231,14946,-21708,-25768,8682,23187,6651,26776,4817,7282,15435,-22984,-6446,2928,27636,12439,28341,-28231,15001,23907,8576,-24593,25053,21947,4827,-12958,-14596,9766,-27386,6595,-26385,-512,30931,22341,-3548,-12599,15065,-28000,23601,5629,3768,-19944,16591,-25283,14540,-1714,-16277,10729,16637,-13428,-3173,22470,-8581,18407,1912,22058,1072,-870,11634,558,11002,20109,-3131,-28181,-12860,24301,-503,5914,-21132,21845,-13136,-23941,12332,1032,-27406,20534,-265,12290,16878,-7732,15129,31121,659,11408,27564,-24003,-12068,-10973,-3642,15792,-3896,28665,-27471,-7243,21465,-2806,-23555,24356,-32259,-22188,-11989,-19732,11027,-2081,-2743,3241,25896,-19509,-1480,-4579,24934,27050,12487,11176,-28580,20599,-7303,2651,-7204,-16881,1801,1949,-30387,11686,1410,21701,-25951,21142,-13335,27176,-24212,10939,9825,-28191,20604,-4589,-2143,28337,2474,-25359,-17681,-21111,-27902,-25885,-20874,9842,-11793,-11951,6547,-6611,10532,1090,30088,15139,-2497,-19757,-19030,-24057,-7629,8092,-3549,7821,-238,-28030,-20693,29941,18617,23191,14425,-2651,-24263,-28731,21162,3125,25294,-22390,7767,-27318,9746,9475,6287,-22128,15684 diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu14.tflite b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu14.tflite new file mode 100644 index 0000000..a7b4706 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu14.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu14_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu14_golden_int16.csv new file mode 100644 index 0000000..bd99548 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu14_golden_int16.csv @@ -0,0 +1 @@ +32767,-6422,-2260,27326,32767,-8500,-13930,-6481,-550,-3112,-12853,32767,-6140,-12129,16368,26177,-11859,23899,21714,-7962,-6157,13826,9151,-3741,-10383,-10135,-13032,-9791,-1603,18422,-9103,-9609,-6753,-2620,26431,-1940,-1068,-4486,-6861,-5413,17193,-3687,-3429,24731,29029,32767,-94,-8032,13873,20462,27253,6547,32767,11223,32767,-448,-11379,3457,32767,16221,9148,18423,-4973,-2070,-10897,8954,22032,-756,-9069,-283,8912,-10555,23743,-4957,-9389,11287,-10627,8351,17262,-4920,-11661,20240,-7568,20994,19890,23236,17551,3956,-4740,-4758,-10043,32767,-10592,-3500,14152,-2843,-13153,-9447,-13503,14125,-1561,23146,-4890,3516,-2267,-3613,-11146,-5516,-12276,32767,-10905,-2070,-12843,32709,10387,-2030,-2774,-9281,24145,-1232,6888,-11611,-10431,-5558,27689,-9721,6859,32767,-10268,-10603,-4319,-10,28332,32767,4616,-7643,-2720,-4295,-2298,-8184,-103,-12645,-11069,-10810,-9036,-7053,-7881,26743,14746,32767,6618,12268,-8951,-568,-7843,27702,-6443,32767,-8925,32767,-2490,-11353,9228,-10545,-8133,-717,21561,-2008,24077,-11821,28483,-6910,-4985,-5301,-69,17569,20770,-12478,32767,31677,27704,32767,-12492,-10620,15596,7378,-13314,-6112,21261,3026,-2275,-11479,21895,30759,32767,-9001,-7965,-2813,32767,-8905,-9438,-2589,-8022,28445,32767,-9527,-6172,-11446,15189,20595,7139,-6147,-300,-12680,-7293,32767,-4962,-2373,-8682,-13216,32767,32767,-5161,-798,-13656,-369,32767,22424,13784,31003,-6888,-1807,9080,15370,32767,-11253,14073,-8311,-13409,-1518,21724,-5730,-1094,-13393,-9563,12390,-11325,8722,988,-10990,32767,32767,-908,268,28275,-13693,-5732,32767,11713,8537,9145,2538,8052,-12221,-10984,4298,-11632,-8274,32767,32767,10186,-7671,5397,18654,-1615,5091,27922,-2235,-5186,-4962,31700,-10189,-7139,-5968,32767,-3777,12199,-4602,32767,-325,-422,7469,22795,10166,17465,16462,29648,32767,-5009,-7598,-6344,-3493,-4828,-10107,32767,-3078,-8856,17179,-9769,-1969,32767,-1006,18097,-11807,25023,-8260,1310,-6396,32767,-3597,10495,5081,1774,32767,-7291,29387,4217,-3188,-11320,19975,17635,-10375,-13278,-1004,-6752,28908,32767,32767,19090,21855,19329,-8599,24848,-13890,-13182,-7865,-9595,-373,10898,32767,-5454,5642,32767,-4491,25526,-2476,32767,32767,5303,-9074,-10318,13233,-1490,32767,20458,-7919,32767,-769,32767,13663,-10664,-483,7340,-7490,19223,32767,-7895,-893,17748,32767,30812,13414,17360,14232,-1626,-12076,-7752,-13907,5226,2753,-7030,28037,23407,32767,-3804,-7756,32767,32767,-13757,10216,-1915,-1635,2927,-3019,-10715,-31,-9690,32767,10357,-6495,-11066,31975,-1491,-9190,-3198,15674,13422,27463,-6166,16674,11770,-419,26008,3886,-9833,24597,-12974,-1292,-3239,16341,19431,-2761,32260,16595,32767,996,-682,22626,32767,17857,30831,-2053,13703,-6871,32767,16113,10781,-1568,1596,21899,-1172,-436,30058,23254,32767,15090,32767,19302,-2035,-12680,4927,32767,-8077,9127,-11606,32767,32170,-6351,31389,-3275,-13969,-11729,-4336,-10117,32767,31966,20559,27312,-12582,32767,-5684,13203,-6336,-7787,32767,18506,865,32767,-2952,-6337,2597,-3825,-772,8109,9073,-7092,-792,-6658,-2602,-6641,32767,27493,-11739,-11398,29018,-8980,-873,10717,32767,30024,15523,29494,-5623,-8162,1283,-10700,-3240,32767,26165,32767,8408,-10712,4321,-4704,-5689,26353,-10999,-12630,-1147,-13104,-4024,32767,17501,-9297,-6670,20779,-2043,-494,-6765,32767,-426,-5897,9486,-3191,27742,-12197,-2349,17006,-13200,32767,-4800,22170,25589,-3996,14844,-2463,-967,25235,32767,-4580,1439,-1593,-9423,-11326,32767,32767,9192,12147,-10529,-10512,12730,-13653,-9081,-7115,-5274,-4698,32767,-7883,-4395,10694,-7412,6618,-2474,27424,4793,-13765,-3527,5806,32767,9740,-9787,32767,-8294,-3529,-12604,32767,1170,25663,-6835,-2728,-7423,-343,-13234,14438,-8648,891,12658,32481,-7584,32767,-461,-3956,7987,-4263,-2861,-2978,-1262,32767,7324,27127,-8761,-2261,23033,7217,28369,-1459,-481,32767,32767,-12383,13794,-3064,-9165,26970,-1627,-1767,-8024,5259,7780,18510 diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu14_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu14_input0_int16.csv new file mode 100644 index 0000000..a4ea507 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu14_input0_int16.csv @@ -0,0 +1 @@ +25522,-15019,-5286,19174,28168,-19880,-32580,-15159,-1286,-7277,-30062,27483,-14361,-28369,11485,18368,-27736,16769,15236,-18622,-14401,9701,6421,-8750,-24285,-23705,-30479,-22899,-3748,12926,-21290,-22473,-15794,-6128,18546,-4537,-2497,-10491,-16046,-12659,12064,-8622,-8019,17353,20369,25220,-219,-18786,9734,14358,19123,4594,27902,7875,31703,-1048,-26615,2426,27337,11382,6419,12927,-11632,-4841,-25486,6283,15459,-1767,-21212,-661,6253,-24687,16660,-11594,-21960,7920,-24854,5860,12112,-11507,-27273,14202,-17700,14731,13956,16304,12315,2776,-11087,-11128,-23490,29088,-24773,-8185,9930,-6650,-30763,-22095,-31581,9911,-3650,16241,-11437,2467,-5302,-8450,-26068,-12901,-28711,29864,-25505,-4841,-30039,22951,7288,-4748,-6487,-21707,16942,-2882,4833,-27156,-24397,-13000,19429,-22736,4813,30528,-24015,-24798,-10102,-22,19880,29586,3239,-17875,-6362,-10046,-5374,-19140,-241,-29575,-25890,-25282,-21135,-16496,-18433,18765,10347,25579,4644,8608,-20935,-1329,-18344,19438,-15070,27937,-20875,32540,-5824,-26554,6475,-24664,-19022,-1677,15129,-4695,16894,-27648,19986,-16161,-11660,-12399,-161,12328,14574,-29185,25694,22227,19439,26226,-29216,-24838,10943,5177,-31140,-14295,14918,2123,-5320,-26847,15363,21583,30665,-21051,-18630,-6579,23227,-20827,-22074,-6054,-18762,19959,29068,-22282,-14436,-26771,10658,14451,5009,-14376,-702,-29656,-17057,32256,-11605,-5550,-20307,-30910,23297,24155,-12070,-1866,-31940,-862,31063,15734,9672,21754,-16109,-4226,6371,10785,25205,-26319,9875,-19437,-31362,-3551,15243,-13402,-2558,-31324,-22366,8694,-26487,6120,693,-25703,30618,25235,-2122,188,19840,-32027,-13407,32093,8219,5990,6417,1781,5650,-28583,-25691,3016,-27206,-19352,24860,24123,7147,-17941,3787,13089,-3776,3572,19592,-5228,-12128,-11604,22243,-23831,-16698,-13959,27324,-8833,8560,-10764,25714,-759,-987,5241,15995,7133,12255,11551,20803,30444,-11716,-17771,-14838,-8169,-11291,-23638,32298,-7198,-20713,12054,-22848,-4605,25509,-2352,12698,-27614,17558,-19320,919,-14958,24384,-8412,7364,3565,1245,31540,-17052,20620,2959,-7457,-26475,14016,12374,-24265,-31055,-2347,-15792,20284,26148,32379,13395,15335,13563,-20111,17435,-32487,-30832,-18395,-22441,-872,7647,23118,-12756,3959,29175,-10503,17911,-5791,25268,24571,3721,-21223,-24133,9285,-3485,30753,14355,-18522,25460,-1798,29357,9587,-24942,-1129,5150,-17517,13488,30211,-18466,-2089,12453,29674,21620,9412,12181,9986,-3803,-28244,-18131,-32526,3667,1932,-16441,19673,16424,29014,-8897,-18139,32003,28756,-32177,7168,-4478,-3824,2054,-7060,-25060,-71,-22664,27669,7267,-15190,-25883,22436,-3487,-21494,-7479,10998,9418,19270,-14420,11700,8259,-979,18249,2727,-22999,17259,-30345,-3021,-7574,11466,13634,-6458,22636,11644,30106,699,-1595,15876,26733,12530,21633,-4802,9615,-16070,26411,11306,7565,-3667,1120,15366,-2740,-1019,21091,16317,27441,10588,26990,13544,-4760,-29657,3457,26386,-18891,6404,-27144,28157,22573,-14853,22025,-7660,-32672,-27433,-10142,-23662,30683,22430,14426,19164,-29427,25421,-13294,9264,-14818,-18213,30740,12985,607,24932,-6904,-14821,1822,-8945,-1806,5690,6366,-16587,-1853,-15573,-6085,-15533,23728,19291,-27457,-26659,20361,-21004,-2042,7520,25384,21067,10892,20695,-13150,-19089,900,-25025,-7578,26588,18359,30945,5900,-25054,3032,-11001,-13306,18491,-25726,-29539,-2683,-30648,-9412,28791,12280,-21744,-15601,14580,-4777,-1155,-15822,24079,-996,-13791,6656,-7464,19466,-28526,-5494,11933,-30874,26521,-11227,15556,17955,-9346,10416,-5760,-2261,17707,30872,-10712,1010,-3725,-22040,-26490,25496,29557,6450,8523,-24625,-24586,8932,-31932,-21240,-16641,-12335,-10987,31647,-18438,-10279,7504,-17336,4644,-5785,19243,3363,-32194,-8248,4074,30934,6834,-22891,24592,-19398,-8253,-29480,32357,821,18007,-15986,-6380,-17361,-802,-30953,10131,-20227,625,8882,22791,-17737,25239,-1077,-9251,5604,-9971,-6690,-6964,-2951,26914,5139,19034,-20491,-5288,16162,5064,19906,-3412,-1125,30708,27698,-28963,9679,-7167,-21435,18924,-3805,-4133,-18767,3690,5459,12988 diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu15.tflite b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu15.tflite new file mode 100644 index 0000000..bd6baa5 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu15.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu15_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu15_golden_int16.csv new file mode 100644 index 0000000..e07bf80 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu15_golden_int16.csv @@ -0,0 +1 @@ +-2740,14904,-8024,32592,24797,-6691,19915,-8822,32605,20177,-6108,-792,24871,14212,3476,-6296,-4737,-9528,29092,-4482,14711,-6595,8137,-3432,-3077,-7214,903,25751,-41,21208,22972,-7543,-7534,5208,31123,-1385,5302,-3163,5993,-1341,9402,13594,-4907,30469,-3934,9645,20772,-1752,27203,4763,-2740,-2704,-4887,-431,2851,21855,9007,6490,7148,-9708,-886,-4159,30790,-4456,30385,-4525,-1536,-7911,-2156,-8272,-3249,16901,-7083,-9796,6141,12400,12728,-7567,14936,4413,-8690,950,-9469,-6461,-6016,-3383,1987,-1191,25610,4171,25039,3869,-5212,-5238,8549,21754,-1693,1142,-2079,3617,-5475,2773,-2542,16624,18550,-8493,18698,-2884,2057,12653,-4434,10236,-7993,17512,-2825,-2323,5033,-6147,25463,19656,-4916,-3412,-8562,-3127,-539,23885,27553,-8597,4596,-734,-625,8230,-4594,6511,24098,-3969,-5859,-9796,25227,-8849,-2155,13835,5814,-9781,15200,6532,4262,-709,29997,-114,-3593,12919,23712,-2926,-329,-8469,27299,-1649,20589,2419,-4913,6435,6202,-3345,831,18158,-3916,30430,16143,-548,-9522,20893,8379,-2462,-8460,-8492,22406,-4202,29159,-1224,16020,11118,1177,7701,-5548,19249,14957,-1728,17880,-7526,9595,10505,22499,30182,-602,-501,19844,17782,24691,14039,-5549,26681,-2540,26119,-8488,-2769,-5056,-4726,-1470,-4191,15648,13943,-3151,-8001,24102,-5720,14169,-2326,-7840,-4818,10714,17592,-6221,11138,-7049,12123,-9677,27673,-1513,-8306,-5590,21889,31839,14972,11912,-7390,16301,-4465,28520,24370,12307,-1663,629,-694,285,3302,27073,-5616,14154,-7057,-3288,-2385,12385,26929,-720,12512,-7572,-5698,2847,-3186,-2069,13101,-1567,24229,31851,25640,12348,32656,18114,22553,-3234,1679,27899,-8072,-6821,9625,21918,16042,9003,-2293,21357,-4080,-1783,-2052,4012,17950,14197,14673,11854,19954,30048,-2501,23249,-2516,2221,-9581,-1477,-7548,-4178,2891,-7068,-1882,-8912,-4388,14689,-8981,-5391,6828,-3274,18393,16913,6263,-4971,16345,28266,-9404,-8863,-6300,6226,1812,16971,10459,14520,13891,18490,22370,21563,20988,-4328,3864,-79,28488,-3767,28425,-6213,-7235,8288,5146,22304,3047,-892,-9145,21324,-1540,7588,-4525,22633,7627,-8362,6509,1931,16535,11253,289,29957,24190,25199,1494,-35,-9700,31435,8217,18140,-5888,8702,-8882,19466,18558,-5921,29051,11656,18813,-3774,4158,-7943,7376,20995,-1138,-1738,-1711,-9757,-5596,-1672,-9688,6271,-6813,21784,23276,-3871,-4968,18105,16552,25833,-3745,-8720,17990,-5627,-1301,-4055,12899,13546,17334,-8001,19461,11811,32131,10880,7198,-4444,4533,-7282,-8764,-8391,17397,-9328,11159,-3300,12694,-7897,21594,6811,17430,15454,6418,-5440,-8139,-9402,-9089,13683,15010,14521,-1961,21614,6216,15529,2069,-2788,-5939,2748,-7525,11538,-4537,-5668,-2851,-2134,15571,3506,-6670,-2065,21402,28274,-7868,21817,-6624,-8247,-587,-9263,28481,-411,-708,22713,25236,-1684,-1119,9344,263,-325,26932,-1511,-6169,-5887,-668,-7213,18355,15893,12860,17075,-2781,-431,-2731,25330,-9154,29671,-873,23195,5256,-6067,12494,6801,20747,-9635,9359,-8821,-6188,-2311,6549,10458,-5583,22884,-1302,4808,6003,27999,-4312,24626,10787,21019,14686,-4435,23534,26249,-3733,12066,-8322,-3846,-460,-7196,-4887,-6338,9439,-5576,-3845,-953,-1493,-6219,-1414,12262,24810,13563,-5785,28889,1908,-3351,5256,-4256,-3104,-9336,-4260,25811,13915,27496,-4649,32425,21883,-2989,8682,-307,25812,3809,-148,-1220,-3522,-5078,24051,10676,-1566,-7141,2747,-7728,18894,-4739,26849,31277,-4124,-5384,19183,577,4865,7791,4493,4878,-7130,-5757,-5158,22011,31970,-7844,20057,18704,-9703,5684,17771,-5947,27087,10330,-4260,-1488,-7611,-9616,24922,-9575,-89,21483,-9468,-7036,-5413,25176,-5217,15612,-2770,12093,-5591,-3090,21354,3095,24426,31233,-8253,-9070,-9199,15988,-1621,-9068,-4117,20225,-673,21181,-8621,10760,-8271,13773,24474,12667,5049,8705,-7064,23824,26613,6160,7841,22532,912,-8962,18960,31172,-3789,-8330,23786,31513,-2461 diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu15_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu15_input0_int16.csv new file mode 100644 index 0000000..9e23d86 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu15_input0_int16.csv @@ -0,0 +1 @@ +-9132,14904,-26745,32592,24797,-22303,19915,-29406,32605,20177,-20360,-2640,24871,14212,3476,-20985,-15788,-31759,29092,-14938,14711,-21982,8137,-11438,-10257,-24046,903,25751,-135,21208,22972,-25144,-25112,5208,31123,-4615,5302,-10543,5993,-4468,9402,13594,-16357,30469,-13111,9645,20772,-5840,27203,4763,-9133,-9011,-16288,-1437,2851,21855,9007,6490,7148,-32358,-2952,-13861,30790,-14854,30385,-15082,-5120,-26370,-7186,-27574,-10830,16901,-23609,-32651,6141,12400,12728,-25221,14936,4413,-28967,950,-31561,-21536,-20052,-11275,1987,-3968,25610,4171,25039,3869,-17371,-17458,8549,21754,-5644,1142,-6929,3617,-18249,2773,-8471,16624,18550,-28310,18698,-9611,2057,12653,-14778,10236,-26643,17512,-9416,-7742,5033,-20488,25463,19656,-16386,-11373,-28538,-10424,-1796,23885,27553,-28657,4596,-2445,-2084,8230,-15313,6511,24098,-13230,-19530,-32653,25227,-29495,-7183,13835,5814,-32604,15200,6532,4262,-2361,29997,-379,-11975,12919,23712,-9753,-1095,-28230,27299,-5497,20589,2419,-16375,6435,6202,-11150,831,18158,-13053,30430,16143,-1826,-31740,20893,8379,-8205,-28198,-28307,22406,-14006,29159,-4078,16020,11118,1177,7701,-18492,19249,14957,-5760,17880,-25085,9595,10505,22499,30182,-2006,-1668,19844,17782,24691,14039,-18496,26681,-8466,26119,-28294,-9228,-16854,-15752,-4899,-13968,15648,13943,-10504,-26670,24102,-19066,14169,-7753,-26132,-16060,10714,17592,-20737,11138,-23495,12123,-32257,27673,-5044,-27685,-18632,21889,31839,14972,11912,-24632,16301,-14881,28520,24370,12307,-5542,629,-2311,285,3302,27073,-18718,14154,-23523,-10960,-7949,12385,26929,-2400,12512,-25238,-18991,2847,-10620,-6897,13101,-5224,24229,31851,25640,12348,32656,18114,22553,-10778,1679,27899,-26905,-22736,9625,21918,16042,9003,-7643,21357,-13599,-5941,-6840,4012,17950,14197,14673,11854,19954,30048,-8337,23249,-8387,2221,-31936,-4921,-25159,-13927,2891,-23559,-6273,-29705,-14625,14689,-29936,-17968,6828,-10912,18393,16913,6263,-16568,16345,28266,-31347,-29544,-21000,6226,1812,16971,10459,14520,13891,18490,22370,21563,20988,-14427,3864,-262,28488,-12556,28425,-20710,-24116,8288,5146,22304,3047,-2974,-30481,21324,-5134,7588,-15083,22633,7627,-27873,6509,1931,16535,11253,289,29957,24190,25199,1494,-117,-32331,31435,8217,18140,-19626,8702,-29605,19466,18558,-19735,29051,11656,18813,-12579,4158,-26475,7376,20995,-3793,-5793,-5701,-32523,-18653,-5574,-32293,6271,-22709,21784,23276,-12901,-16559,18105,16552,25833,-12481,-29066,17990,-18756,-4336,-13516,12899,13546,17334,-26668,19461,11811,32131,10880,7198,-14811,4533,-24272,-29214,-27968,17397,-31092,11159,-10999,12694,-26322,21594,6811,17430,15454,6418,-18134,-27130,-31338,-30297,13683,15010,14521,-6535,21614,6216,15529,2069,-9292,-19796,2748,-25081,11538,-15121,-18891,-9502,-7113,15571,3506,-22232,-6883,21402,28274,-26225,21817,-22078,-27489,-1957,-30875,28481,-1368,-2360,22713,25236,-5614,-3730,9344,263,-1083,26932,-5036,-20563,-19623,-2225,-24044,18355,15893,12860,17075,-9270,-1436,-9101,25330,-30513,29671,-2908,23195,5256,-20224,12494,6801,20747,-32117,9359,-29404,-20625,-7704,6549,10458,-18610,22884,-4340,4808,6003,27999,-14374,24626,10787,21019,14686,-14781,23534,26249,-12441,12066,-27738,-12819,-1533,-23986,-16290,-21127,9439,-18585,-12815,-3175,-4977,-20728,-4711,12262,24810,13563,-19284,28889,1908,-11170,5256,-14187,-10346,-31118,-14199,25811,13915,27496,-15497,32425,21883,-9963,8682,-1024,25812,3809,-492,-4065,-11740,-16926,24051,10676,-5219,-23804,2747,-25758,18894,-15797,26849,31277,-13747,-17946,19183,577,4865,7791,4493,4878,-23765,-19189,-17194,22011,31970,-26147,20057,18704,-32343,5684,17771,-19823,27087,10330,-14200,-4960,-25369,-32051,24922,-31915,-296,21483,-31560,-23451,-18043,25176,-17390,15612,-9233,12093,-18635,-10298,21354,3095,24426,31233,-27509,-30232,-30661,15988,-5402,-30225,-13722,20225,-2241,21181,-28735,10760,-27569,13773,24474,12667,5049,8705,-23546,23824,26613,6160,7841,22532,912,-29874,18960,31172,-12629,-27765,23786,31513,-8204 diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu16.tflite b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu16.tflite new file mode 100644 index 0000000..113ddcb Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu16.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu16_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu16_golden_int16.csv new file mode 100644 index 0000000..0ab47b7 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu16_golden_int16.csv @@ -0,0 +1 @@ +29198,23020,25010,27507,-6538,-4648,-2115,11207,32621,-8305,31107,19208,-7990,-2849,-903,-864,-7430,-448,3257,31846,-8542,5805,10752,9357,13221,23497,24661,-7182,21103,5933,26479,-3616,-5569,-3733,23637,-3274,-4540,-1777,-6042,-2413,13889,30925,16642,-3197,-1139,31958,32014,28523,16887,-1810,-305,-3938,-4605,-6778,18324,-7036,-2740,-5074,-4056,6177,29592,25107,12037,-5341,-730,25844,-8340,15323,-704,13674,-6096,-8750,2007,-2452,-444,22813,14726,2794,-3874,6721,7054,31251,9212,9487,-4178,18554,27038,15941,-4426,-5981,-4452,141,-2809,-7590,13150,28892,22725,-8701,5524,-9186,-1830,25710,-6642,-936,-7465,11623,2361,-6406,-5624,1510,-560,-7655,9064,4052,-7853,27289,2435,-6551,-7622,-111,-5221,15289,-7331,23428,-1375,27674,7655,-1238,-4483,25548,-7560,3365,4975,-3907,-650,-3302,15543,27152,22373,27673,-4015,2418,29747,-6806,31271,15849,-8105,6876,-9035,30326,28242,22423,-4753,8638,15016,-3116,-4102,-7276,-5257,24468,17312,28344,5000,26465,-8369,-7164,29371,-3401,24138,-1310,31054,-6206,32597,-4819,-1542,-2612,8648,-3933,-3556,-7008,16828,11291,27811,31242,25723,-516,12776,8900,26091,22243,-8827,-891,28452,2919,-181,-8179,9781,-8191,9854,-8776,17607,12743,-6217,8192,-6885,1743,-9602,12945,2633,3659,20344,2969,19467,-1047,14721,-569,-4356,-988,30775,-7690,21159,-9613,18988,15998,-5315,11145,-8688,-5913,-9648,19725,19232,-5397,16849,-940,1224,22360,18084,-7528,-9255,-197,-7222,2392,-6869,27094,-3737,26608,29498,-5104,24361,6656,27294,-1325,27848,-1629,26506,32202,-8134,3591,-1180,-8700,4244,4516,20369,-9409,10637,31588,-1071,3586,-1182,22861,-1747,-4266,5554,-4599,22741,3200,22053,14775,-7059,-6347,-3893,-8259,-9298,21408,1553,-1894,15077,27658,-6097,14623,1623,7639,-9417,25383,10748,27582,-5189,10697,-8275,7649,-2499,32045,29406,-7035,-8930,-4745,32203,15004,-2919,-103,2423,26349,-2236,-4010,8399,-3843,-5288,13535,-5434,18047,-829,15797,-6833,25460,-1239,-6864,27981,31849,27532,6225,17820,-8312,-9409,15440,11668,24798,-9098,-6908,20018,-6258,23353,-4545,27596,27138,-2335,-3005,15682,-2893,21340,13373,-1041,-5833,-2953,29987,20422,-5017,-3706,-598,24390,24697,12490,-9287,-1858,-7691,29250,1002,28759,7565,13434,11166,-8220,29068,14169,13531,-1952,8198,-619,-1072,-3310,-1294,3328,-4076,19957,-2395,-424,-4043,-1878,-7566,3155,25766,-3697,7583,6512,20219,18724,-339,-3322,-2887,9207,-5887,18242,-6373,-7918,-2362,-4875,-6755,25307,6628,-6547,-6639,11493,-4943,24320,-2077,15587,20364,30818,-278,-4962,-1405,-9319,-3088,444,8421,-7056,-5033,5423,16841,31704,28848,-4063,-5889,29259,-2323,20107,-6482,-313,-1973,-777,-9084,3664,-7791,24125,30412,-8401,-8275,-763,25429,-2156,5716,-6702,-124,3372,29997,6302,-4179,32318,-5694,16237,3694,-6267,-1556,-2002,-6700,-1200,-436,24913,-5323,-419,-482,-4702,-1089,32559,23169,8364,-4125,-1006,19296,7671,17749,18949,-3299,6803,-8372,-316,22208,691,-1807,25460,20685,-8742,27343,-1636,-3807,-6360,-5529,-8660,-9691,-4869,-4574,10780,-2821,-5427,28153,10815,19969,32206,2559,14211,-3124,23680,24028,-1087,-9682,-4347,19693,31506,2049,-7426,-8156,3291,-6943,-7849,-6000,32221,9956,32116,-4387,18698,-2579,30828,9196,-8656,18131,7512,25390,22915,15768,-7773,8823,-2804,-3310,-7121,11133,8324,11147,6510,-6103,7407,2405,31280,-5359,1157,1605,17814,18774,28969,-1815,27694,25470,-3470,25786,19771,12125,-3734,12436,-6398,13224,-4292,22049,-7394,13203,22187,-8772,9518,18742,27307,-7566,16762,-1088,12932,-3786,-3093,1747,11926,-2758,25281,-4695,7219,-4194,-735,22521,-6600,9765,17254,-1225,22632,32181,29908,3160,3780,-574,-3287,-9631,17596,31702,640,16729,-2799,-9443,6003,26151,10033,-1978,-5931,21450,-6251,-9206,-2379,13616,31282,15761,-2176,15394,250,-3881,17373,9118,5642,-270,24015,24832,8128,19027,12382,15116,-3917,-3516,-7527,-3877 diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu16_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu16_input0_int16.csv new file mode 100644 index 0000000..89b43bb --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu16_input0_int16.csv @@ -0,0 +1 @@ +29198,23020,25010,27507,-21793,-15493,-7048,11207,32621,-27683,31107,19208,-26633,-9497,-3009,-2880,-24765,-1491,3257,31846,-28471,5805,10752,9357,13221,23497,24661,-23940,21103,5933,26479,-12053,-18561,-12442,23637,-10913,-15131,-5921,-20138,-8044,13889,30925,16642,-10656,-3795,31958,32014,28523,16887,-6033,-1015,-13126,-15349,-22592,18324,-23454,-9131,-16911,-13519,6177,29592,25107,12037,-17804,-2433,25844,-27798,15323,-2346,13674,-20320,-29167,2007,-8172,-1478,22813,14726,2794,-12911,6721,7054,31251,9212,9487,-13925,18554,27038,15941,-14752,-19936,-14838,141,-9362,-25299,13150,28892,22725,-29002,5524,-30618,-6100,25710,-22140,-3119,-24883,11623,2361,-21352,-18747,1510,-1866,-25516,9064,4052,-26175,27289,2435,-21837,-25405,-369,-17404,15289,-24437,23428,-4582,27674,7655,-4125,-14941,25548,-25199,3365,4975,-13021,-2165,-11007,15543,27152,22373,27673,-13381,2418,29747,-22687,31271,15849,-27016,6876,-30117,30326,28242,22423,-15843,8638,15016,-10387,-13671,-24252,-17524,24468,17312,28344,5000,26465,-27895,-23879,29371,-11336,24138,-4366,31054,-20685,32597,-16062,-5139,-8707,8648,-13109,-11851,-23358,16828,11291,27811,31242,25723,-1719,12776,8900,26091,22243,-29422,-2970,28452,2919,-602,-27261,9781,-27304,9854,-29252,17607,12743,-20723,8192,-22949,1743,-32006,12945,2633,3659,20344,2969,19467,-3490,14721,-1895,-14518,-3292,30775,-25631,21159,-32043,18988,15998,-17717,11145,-28958,-19709,-32159,19725,19232,-17990,16849,-3133,1224,22360,18084,-25092,-30849,-655,-24073,2392,-22895,27094,-12457,26608,29498,-17011,24361,6656,27294,-4415,27848,-5428,26506,32202,-27112,3591,-3934,-28999,4244,4516,20369,-31363,10637,31588,-3570,3586,-3939,22861,-5823,-14220,5554,-15328,22741,3200,22053,14775,-23530,-21155,-12977,-27529,-30994,21408,1553,-6312,15077,27658,-20324,14623,1623,7639,-31388,25383,10748,27582,-17295,10697,-27581,7649,-8329,32045,29406,-23449,-29766,-15815,32203,15004,-9728,-341,2423,26349,-7452,-13365,8399,-12809,-17626,13535,-18111,18047,-2762,15797,-22775,25460,-4129,-22878,27981,31849,27532,6225,17820,-27706,-31361,15440,11668,24798,-30325,-23026,20018,-20858,23353,-15150,27596,27138,-7782,-10015,15682,-9642,21340,13373,-3468,-19442,-9844,29987,20422,-16721,-12351,-1991,24390,24697,12490,-30957,-6191,-25636,29250,1002,28759,7565,13434,11166,-27399,29068,14169,13531,-6506,8198,-2061,-3573,-11034,-4312,3328,-13587,19957,-7982,-1414,-13476,-6260,-25219,3155,25766,-12324,7583,6512,20219,18724,-1128,-11071,-9622,9207,-19622,18242,-21241,-26391,-7871,-16248,-22515,25307,6628,-21824,-22128,11493,-16475,24320,-6921,15587,20364,30818,-925,-16538,-4681,-31061,-10293,444,8421,-23520,-16777,5423,16841,31704,28848,-13541,-19628,29259,-7741,20107,-21605,-1044,-6577,-2588,-30279,3664,-25969,24125,30412,-28001,-27581,-2544,25429,-7185,5716,-22340,-411,3372,29997,6302,-13929,32318,-18978,16237,3694,-20890,-5186,-6674,-22334,-3998,-1454,24913,-17743,-1396,-1607,-15674,-3630,32559,23169,8364,-13748,-3353,19296,7671,17749,18949,-10996,6803,-27907,-1054,22208,691,-6023,25460,20685,-29140,27343,-5453,-12690,-21200,-18429,-28866,-32301,-16230,-15245,10780,-9401,-18090,28153,10815,19969,32206,2559,14211,-10413,23680,24028,-3622,-32271,-14489,19693,31506,2049,-24753,-27186,3291,-23144,-26163,-20000,32221,9956,32116,-14623,18698,-8597,30828,9196,-28852,18131,7512,25390,22915,15768,-25910,8823,-9345,-11032,-23737,11133,8324,11147,6510,-20342,7407,2405,31280,-17862,1157,1605,17814,18774,28969,-6048,27694,25470,-11567,25786,19771,12125,-12445,12436,-21326,13224,-14305,22049,-24645,13203,22187,-29240,9518,18742,27307,-25219,16762,-3627,12932,-12618,-10308,1747,11926,-9194,25281,-15649,7219,-13980,-2448,22521,-22000,9765,17254,-4081,22632,32181,29908,3160,3780,-1914,-10955,-32101,17596,31702,640,16729,-9328,-31476,6003,26151,10033,-6592,-19768,21450,-20837,-30687,-7928,13616,31282,15761,-7254,15394,250,-12935,17373,9118,5642,-900,24015,24832,8128,19027,12382,15116,-13055,-11718,-25090,-12923 diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu17.tflite b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu17.tflite new file mode 100644 index 0000000..084605a Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu17.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu17_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu17_golden_int16.csv new file mode 100644 index 0000000..dda5cc2 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu17_golden_int16.csv @@ -0,0 +1 @@ +3019,16310,14534,30580,-2309,-7644,24540,12157,8306,-4040,14695,-3346,-2262,15753,16428,-8638,21282,9852,-1984,8135,28226,-9170,14705,14682,24122,-344,16301,-7067,13071,12498,669,-9008,-8916,25294,27579,24413,30856,-3020,18231,18895,-9514,-8741,21385,-9725,-7118,-5889,-7185,16384,-1562,25708,-6761,-9595,30944,748,-8798,32350,25592,-586,-4591,-468,437,-8908,17535,-9252,25685,-1765,-3264,14828,22897,3725,-4143,-4762,32242,5076,7847,26345,12093,-1710,-3318,-2330,24346,10724,29114,-1279,-7704,26161,16018,31152,-4802,25014,-6162,26218,12916,-9096,15135,13796,14465,5523,21005,21221,12553,-1242,31675,-3622,24185,-2902,16047,-2157,25462,31657,-5449,-9276,-3801,-6228,1699,19300,18310,31592,18255,13660,-3090,28457,14639,-8597,-1255,-6687,-8876,10565,6568,5157,-1700,-7512,-9473,-2096,13302,481,-1925,-5956,17148,-5251,26182,12128,23148,-9794,-8087,12930,-1475,29982,-7348,1529,23339,-1950,-1545,-6615,-5359,-8261,-1402,-2710,29428,-9271,15574,-2003,9657,18200,-9364,27961,15474,16196,28207,17906,21512,-4132,-5726,29925,8234,14481,-54,-5456,-2902,21765,-9019,-4745,29203,3445,-1458,-6144,27121,-8943,11824,-4261,14103,-9119,-6856,30613,31473,9081,-3559,-4522,-9671,20322,-4853,8447,3228,-5143,-7371,-5567,25561,-896,-3395,-1484,24649,15655,19262,10639,-6305,-5054,23381,29099,116,6822,-8416,-6515,-132,-7939,28733,18360,-8137,-8044,-7302,14040,-6533,-9156,-7528,-6727,-8745,-9420,25987,-4247,6883,-3226,31368,-2392,-3642,18629,19736,32284,15060,16433,-2799,22684,-5605,2746,-6010,24244,6158,-4027,17428,-5129,-1401,-6128,29669,-4580,-1446,-8742,-8684,10141,-5923,-3046,-8965,-1039,-7498,13258,21778,27238,1869,4159,-3906,-8384,17179,-7312,-4153,18261,-7499,-9585,12240,29824,22799,-5925,29302,-2587,10724,27532,-6391,20485,-4922,31831,-7506,32478,15887,-7596,16504,19591,20734,-1063,-1981,9535,30051,-8862,24089,739,-9466,-9539,-6664,23271,1343,7271,-3088,19444,25960,1025,10237,30212,-4808,-3854,-9533,14701,16315,22764,28623,-6261,-5949,-8932,5455,-9696,-5408,23234,4442,-379,19976,-1376,4918,8619,19467,17369,30028,5418,31571,14805,-6691,9552,7922,29475,27964,19662,24098,-9677,-745,-9719,-9405,30693,20397,-976,-5425,-2421,-6663,-7632,-8246,-7608,-2538,14449,1962,-9308,-6149,9517,29895,30832,-9663,17083,31812,3443,10064,-6917,19473,-9378,-9173,1840,24721,-8286,-6964,15310,-5959,-1149,-6046,18915,-3063,21942,27708,3486,-9600,-6178,-9691,-3087,-8423,-7613,17921,14661,-5465,-2345,-8469,-6586,18479,31881,-8975,22863,31477,-560,8295,5874,-6287,-384,-6747,-13,26539,14731,-8670,-8717,18912,-9121,-5635,28669,31737,-8205,19982,-2526,-5355,-4591,-9633,13032,2547,27531,-678,4683,25915,22841,-6924,11175,8435,25403,30847,21,15847,-8989,7783,8099,15838,-5193,-1234,-2726,-9604,16311,-5653,902,11251,11888,23619,-2252,-7743,-921,6501,13041,-8714,30635,-6274,-9614,30538,-5085,23734,16834,-6148,32008,9365,-2928,13007,-9648,18941,-5181,590,-7418,-3913,-6839,-1969,-6364,20003,-8129,-6844,-6876,-7851,23413,-597,18434,8049,11210,20241,-8745,-780,-4223,-101,-2938,12560,28476,12096,-3736,-8738,-1343,5519,-7822,-9694,-7728,9335,16278,-6861,5569,22499,-4861,26421,-2386,6698,-2884,13070,-2297,773,-3705,684,-909,22665,4758,-1894,18271,-7737,10133,24793,-8133,-7129,10839,-942,-7193,26488,-7346,21255,13158,-7812,-3733,-8683,-1908,5910,4511,24841,-2393,1224,20597,30067,-4171,-9746,10632,-7689,-9012,-1635,-2536,-6698,30171,30959,-5811,-2653,243,9422,21199,-7557,23421,-7955,14092,-8395,25740,-8948,14564,-5182,21375,7197,29719,-9636,-444,14135,-746,31326,-7309,-4240,23078,11822,16563,-2607,-7222,-4342,15407,23697,-2787,-3626,-7222,-5115,-8738,10589,2349,-7058,23048,-2005,18194,25567,-6616,2032,-7854,-7933,1564,-5582,5112,21998,15427,20786,6558,32070,23795,32600,-5743,27713,10604,21666,10726,-4106,-2748,-208,933,-2629,-7660,-1237,973,21655,6299,19305,-2969,26245,5525,15009,6413,-242,-237,24241,25690,-6440,-8152,29992,8854,-4776,-3067,-10,-4267,7398,-3123,-8021,-9125,12465,4270,-6918,19740,2814,-1574,3456,-8032,11971,16300,9240,-5865,31043,-8610,145,-7315,-8908,-9290,-6311,-8104,10944,15496,-6450,31366,7776,-3288,-1233,2749,17202,29671,-9352,10780,-3961,26249,-2489,32754,2669,-7612,32704,22167,-2374,2374,13985,16107,-9437,17982,22315,-7559,15656,25619,-6862,31869,12407,-7612,-5743,13267,-820,-133,21091,25861,12194,4693,7578,13749,-8310,22351,-6388,-9461,-5628,4261,219,21656,13559,-1654,-2202,-2675,-1681,21937,-2473,-9146,-3904,25415,36,7101,7388,2831,-2610,28297,11307,21573,10574,-4790,1248,23977,8838,11210,23647,20073,-5952,-7137,13721,-8960,-293,-233,-2326,-3124,-648,871,11115,14768,-2065,28181,23987,20208,29370,21227,-5847,18890,-8679,-3807,-412,22786,6593,-8911,8769,14088,-6904,-550,-4360,574,24310,-6511,-4840,11534,-771,13559,-8744,7044,-1830,9822,24320,-5648,-4125,3463,10063,10922,-2838,21109,-3113,-8551,113,18935,-3772,14088,8114,-2996,28386,6414,-2542,22885,-7944,7076,-9213,9732,-221,27376,-997,5690,-2136,-1827,18173,25290,-4636,26084,-9580,14922,26175,-5841,-639,8318,6933,-8299,15018,22385,-4923,-2957,27412,9840,30447,-8887,-3827,31846,3260,3143,15057,-191,24797,5615,-4975,17100,-2790,1936,16771,7125,-844,27890,9653,-6952,14193,-4219,16206,-565,9982,-4353,-6537,-3457,-8395,6659,20186,-8986,-2487,23544,-6595,-1871,-5295,-6447,625,8527,-9238,-1559,14980,4773,-9011,-7603,-932,-3721,-2260,-4560,-3317,-357,-7730,-65,-9080,-2724,-3256,25093,-6558,23752,-6033,25516,-2512,22807,8680,-5604,7231,15653,-7041,31498,-1288,25588,9797,-4508,-4825,-2856,-5509,-5165,24328,-3893,29502,-2968,1003,8491,-7528,3681,-1279,30290,-4269,-5877,-197,22965,-597,2682,-6380,26825,28562,-3733,-2848,-4614,-729,-7087,14040,24812,-5926,-8781,-1395,-7018,-1503,-127,22338,-2092,15398,17133,-8728,17415,-4798,-8674,-7598,16493,6106,-6366,32322,-6987,26891,24302,-1848,-1066,-1818,-6714,-5559,-1012,-9773,-1224,-8950,-2232,-4500,15298,-4690,-9312,25547,-9628,-4833,5617,-4465,-5113,-9633,3912,20438,6039,26482,-8454,18859,-2359,32133,-2572,15293,18747,26869,-4011,-9490,-5082,3169,24179,26634,31860,1953,8576,-1543,-8563,12102,29890,9607,5258,-8731,-6241,26786,-1392,11252,26325,25265,-9039,-2349,-2981,-7241,5615,16284,-4691,13071,4050,15601,-2352,19945,18888,-2936,24321,9655,5100,21320,19561,757,-9692,-7389,7572,-793,-1956,-6108,-9091,3882,-5577,12276,-1268,-4475,-6265,-2152,-784,-166,4079,-713,16844,-586,20947,-5676,-1703,7581,-1682,5837,8608,31672,-8466,21312,31499,-8232,-9407,11018,16997,-3079,26523,9310,28284,26053,17098,15894,-7870,15572,-3227,-8046,-6982,-2262,9518,8364,-6741,-7748,20090,-3139,2231,2952,-9116,28242,-1807,13665,28964,1011,3055,30949,29096,31849,-4123,-1588,-9604,3384,-854,2924,-5942,-8909,-6,-7808,-3421,26647,-9675,7963,-6841,7115,15166,-3979,-4208,8157,-6079,-2995,9819,-9547,4572,2597,-8500,30002,32240,-8033,18597,-331,-3116,-3373,-3725,-7188,13997,15783,21511,13322,-7015,-4699,-2656,8897,21634,-4776,13230,-4400,-7086,-246,-8185,4461,18493,-8197,2851,-2703,-5161,-6731,8810,-2043,-7248,25506,-5560,-1189,-5966,27797,13243,-4575,25291,-3184,14530,4742,-5800,-8441,26364,15740,2340,12813,10716,31602,-2311,26868,-6850,14628,-1120,-9272,20299,-400,-5531,-9728,372,-3556,-5275,-2228,-7861,-8750,-6709,-1605,-7894,-1353,-8609,-5950,-2988,11413,27208,16404,8658,9158,8259,30183,-6243,-9385,-7501,-7749,9853,-8089,4115,2550,-2698,31200,10516,-5198,-269,22347,-5205,-499,-1775,-2226,3056,29686,13542,7567,13272,-4465,-1915,-6094,8029,14945,4273,17522,6439,-5465,1966,5464,24939,-2799,-5406,-2554,16125,26651,15080,-5950,-6220,-3868,18677,-9608,-7877,-7069,6814 diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu17_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu17_input0_int16.csv new file mode 100644 index 0000000..012ea63 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu17_input0_int16.csv @@ -0,0 +1 @@ +3019,16310,14534,30580,-7696,-25478,24540,12157,8306,-13465,14695,-11151,-7538,15753,16428,-28794,21282,9852,-6612,8135,28226,-30567,14705,14682,24122,-1146,16301,-23555,13071,12498,669,-30027,-29718,25294,27579,24413,30856,-10065,18231,18895,-31714,-29135,21385,-32415,-23727,-19629,-23950,16384,-5205,25708,-22537,-31982,30944,748,-29325,32350,25592,-1952,-15302,-1558,437,-29692,17535,-30839,25685,-5881,-10879,14828,22897,3725,-13809,-15871,32242,5076,7847,26345,12093,-5699,-11060,-7766,24346,10724,29114,-4263,-25678,26161,16018,31152,-16005,25014,-20539,26218,12916,-30320,15135,13796,14465,5523,21005,21221,12553,-4140,31675,-12074,24185,-9672,16047,-7188,25462,31657,-18163,-30919,-12669,-20758,1699,19300,18310,31592,18255,13660,-10300,28457,14639,-28656,-4184,-22288,-29586,10565,6568,5157,-5665,-25039,-31575,-6987,13302,481,-6415,-19851,17148,-17502,26182,12128,23148,-32647,-26955,12930,-4917,29982,-24493,1529,23339,-6498,-5148,-22048,-17862,-27537,-4674,-9032,29428,-30901,15574,-6675,9657,18200,-31213,27961,15474,16196,28207,17906,21512,-13771,-19086,29925,8234,14481,-180,-18187,-9674,21765,-30064,-15816,29203,3445,-4860,-20479,27121,-29808,11824,-14203,14103,-30395,-22852,30613,31473,9081,-11862,-15074,-32235,20322,-16175,8447,3228,-17144,-24568,-18555,25561,-2987,-11316,-4946,24649,15655,19262,10639,-21015,-16846,23381,29099,116,6822,-28052,-21715,-438,-26462,28733,18360,-27121,-26814,-24338,14040,-21775,-30520,-25091,-22423,-29148,-31399,25987,-14156,6883,-10754,31368,-7971,-12138,18629,19736,32284,15060,16433,-9329,22684,-18684,2746,-20034,24244,6158,-13424,17428,-17096,-4668,-20427,29669,-15267,-4820,-29139,-28946,10141,-19742,-10151,-29883,-3462,-24992,13258,21778,27238,1869,4159,-13020,-27945,17179,-24373,-13844,18261,-24996,-31948,12240,29824,22799,-19748,29302,-8622,10724,27532,-21304,20485,-16405,31831,-25018,32478,15887,-25319,16504,19591,20734,-3543,-6603,9535,30051,-29539,24089,739,-31552,-31796,-22213,23271,1343,7271,-10293,19444,25960,1025,10237,30212,-16025,-12845,-31777,14701,16315,22764,28623,-20869,-19829,-29772,5455,-32319,-18026,23234,4442,-1263,19976,-4587,4918,8619,19467,17369,30028,5418,31571,14805,-22303,9552,7922,29475,27964,19662,24098,-32255,-2484,-32395,-31348,30693,20397,-3254,-18084,-8068,-22208,-25438,-27486,-25360,-8459,14449,1962,-31026,-20496,9517,29895,30832,-32209,17083,31812,3443,10064,-23055,19473,-31258,-30577,1840,24721,-27620,-23212,15310,-19862,-3829,-20152,18915,-10208,21942,27708,3486,-32000,-20592,-32301,-10290,-28076,-25376,17921,14661,-18215,-7817,-28230,-21952,18479,31881,-29916,22863,31477,-1865,8295,5874,-20956,-1278,-22489,-42,26539,14731,-28898,-29056,18912,-30401,-18783,28669,31737,-27350,19982,-8419,-17849,-15302,-32108,13032,2547,27531,-2260,4683,25915,22841,-23078,11175,8435,25403,30847,21,15847,-29961,7783,8099,15838,-17310,-4113,-9087,-32011,16311,-18844,902,11251,11888,23619,-7506,-25809,-3070,6501,13041,-29046,30635,-20913,-32045,30538,-16950,23734,16834,-20491,32008,9365,-9758,13007,-32159,18941,-17268,590,-24726,-13044,-22796,-6564,-21214,20003,-27097,-22812,-22919,-26169,23413,-1989,18434,8049,11210,20241,-29148,-2600,-14077,-335,-9792,12560,28476,12096,-12452,-29125,-4475,5519,-26073,-32313,-25759,9335,16278,-22870,5569,22499,-16201,26421,-7953,6698,-9611,13070,-7657,773,-12350,684,-3030,22665,4758,-6311,18271,-25788,10133,24793,-27109,-23761,10839,-3138,-23977,26488,-24485,21255,13158,-26039,-12442,-28942,-6360,5910,4511,24841,-7976,1224,20597,30067,-13902,-32487,10632,-25629,-30038,-5450,-8453,-22325,30171,30959,-19370,-8843,243,9422,21199,-25189,23421,-26516,14092,-27982,25740,-29826,14564,-17272,21375,7197,29719,-32118,-1480,14135,-2485,31326,-24362,-14133,23078,11822,16563,-8690,-24072,-14471,15407,23697,-9289,-12087,-24072,-17048,-29125,10589,2349,-23525,23048,-6682,18194,25567,-22054,2032,-26178,-26444,1564,-18605,5112,21998,15427,20786,6558,32070,23795,32600,-19144,27713,10604,21666,10726,-13685,-9160,-691,933,-8761,-25533,-4121,973,21655,6299,19305,-9895,26245,5525,15009,6413,-805,-789,24241,25690,-21466,-27172,29992,8854,-15920,-10221,-33,-14224,7398,-10410,-26736,-30415,12465,4270,-23058,19740,2814,-5245,3456,-26773,11971,16300,9240,-19549,31043,-28700,145,-24381,-29692,-30966,-21035,-27011,10944,15496,-21499,31366,7776,-10958,-4108,2749,17202,29671,-31171,10780,-13201,26249,-8295,32754,2669,-25374,32704,22167,-7911,2374,13985,16107,-31457,17982,22315,-25197,15656,25619,-22871,31869,12407,-25371,-19142,13267,-2732,-443,21091,25861,12194,4693,7578,13749,-27700,22351,-21291,-31537,-18760,4261,219,21656,13559,-5513,-7338,-8916,-5602,21937,-8244,-30486,-13013,25415,36,7101,7388,2831,-8699,28297,11307,21573,10574,-15966,1248,23977,8838,11210,23647,20073,-19838,-23788,13721,-29865,-976,-775,-7751,-10412,-2158,871,11115,14768,-6882,28181,23987,20208,29370,21227,-19490,18890,-28929,-12690,-1374,22786,6593,-29704,8769,14088,-23011,-1834,-14534,574,24310,-21702,-16133,11534,-2568,13559,-29147,7044,-6100,9822,24320,-18827,-13748,3463,10063,10922,-9458,21109,-10376,-28503,113,18935,-12572,14088,8114,-9987,28386,6414,-8473,22885,-26478,7076,-30710,9732,-736,27376,-3321,5690,-7120,-6089,18173,25290,-15453,26084,-31934,14922,26175,-19469,-2129,8318,6933,-27664,15018,22385,-16409,-9855,27412,9840,30447,-29621,-12757,31846,3260,3143,15057,-637,24797,5615,-16584,17100,-9299,1936,16771,7125,-2812,27890,9653,-23171,14193,-14063,16206,-1882,9982,-14509,-21789,-11521,-27981,6659,20186,-29951,-8290,23544,-21982,-6237,-17650,-21489,625,8527,-30793,-5195,14980,4773,-30036,-25344,-3106,-12401,-7533,-15199,-11055,-1189,-25765,-216,-30265,-9078,-10851,25093,-21860,23752,-20110,25516,-8372,22807,8680,-18679,7231,15653,-23469,31498,-4293,25588,9797,-15025,-16081,-9520,-18364,-17215,24328,-12975,29502,-9893,1003,8491,-25094,3681,-4264,30290,-14229,-19589,-656,22965,-1989,2682,-21266,26825,28562,-12443,-9493,-15378,-2428,-23622,14040,24812,-19752,-29268,-4650,-23391,-5010,-423,22338,-6973,15398,17133,-29091,17415,-15993,-28912,-25325,16493,6106,-21218,32322,-23289,26891,24302,-6158,-3554,-6058,-22380,-18530,-3374,-32577,-4078,-29831,-7440,-15000,15298,-15631,-31040,25547,-32092,-16110,5617,-14882,-17042,-32108,3912,20438,6039,26482,-28178,18859,-7863,32133,-8572,15293,18747,26869,-13369,-31631,-16940,3169,24179,26634,31860,1953,8576,-5143,-28543,12102,29890,9607,5258,-29104,-20802,26786,-4639,11252,26325,25265,-30130,-7829,-9937,-24136,5615,16284,-15636,13071,4050,15601,-7840,19945,18888,-9787,24321,9655,5100,21320,19561,757,-32306,-24629,7572,-2641,-6520,-20358,-30303,3882,-18590,12276,-4225,-14915,-20883,-7171,-2613,-554,4079,-2377,16844,-1951,20947,-18918,-5677,7581,-5606,5837,8608,31672,-28219,21312,31499,-27440,-31356,11018,16997,-10261,26523,9310,28284,26053,17098,15894,-26234,15572,-10756,-26818,-23272,-7540,9518,8364,-22469,-25825,20090,-10463,2231,2952,-30386,28242,-6022,13665,28964,1011,3055,30949,29096,31849,-13742,-5292,-32011,3384,-2845,2924,-19806,-29696,-18,-26026,-11401,26647,-32248,7963,-22801,7115,15166,-13262,-14027,8157,-20261,-9984,9819,-31824,4572,2597,-28331,30002,32240,-26775,18597,-1104,-10387,-11241,-12416,-23960,13997,15783,21511,13322,-23382,-15664,-8854,8897,21634,-15918,13230,-14667,-23620,-818,-27282,4461,18493,-27324,2851,-9008,-17202,-22435,8810,-6809,-24159,25506,-18533,-3963,-19887,27797,13243,-15250,25291,-10611,14530,4742,-19334,-28137,26364,15740,2340,12813,10716,31602,-7701,26868,-22831,14628,-3733,-30905,20299,-1332,-18436,-32426,372,-11851,-17584,-7427,-26203,-29167,-22364,-5350,-26314,-4510,-28695,-19832,-9960,11413,27208,16404,8658,9158,8259,30183,-20808,-31282,-25002,-25828,9853,-26961,4115,2550,-8992,31200,10516,-17327,-895,22347,-17350,-1662,-5916,-7420,3056,29686,13542,7567,13272,-14883,-6381,-20313,8029,14945,4273,17522,6439,-18216,1966,5464,24939,-9329,-18019,-8514,16125,26651,15080,-19833,-20731,-12892,18677,-32027,-26256,-23563,6814 diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu18.tflite b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu18.tflite new file mode 100644 index 0000000..39e40cd Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu18.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu18_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu18_golden_int16.csv new file mode 100644 index 0000000..545741d --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu18_golden_int16.csv @@ -0,0 +1 @@ +21009,-5794,-11055,-6072,-8831,5132,-1164,-3417,-7476,31601,-8896,24668,-4601,4665,-10025,8744,-3410,-2483,32767,6250,1266,32767,-986,9792,-7276,31268,-3541,4759,-2071,31757,-916,13946,-5964,-9004,-4070,32767,13831,-5003,19803,-3592,23526,-10565,-512,-4613,-3702,-3058,7934,24117,-5039,-6211,-6055,-10822,-9249,-8917,12524,28723,-8011,15164,5039,-11092,32767,-2239,-1818,-1926,21838,8483,-10249,-2043,23676,-5319,-7040,-10149,9047,-11148,28279,25129,-7117,-2079,-6508,23730,12812,32767,-7998,-5093,-7111,14775,-3082,3455,-7310,-5018,-11258,32767,-8848,-5725,1275,24326,14507,11729,5032,10612,-6445,10399,31756,-5280,10200,3632,-10027,-3738,-11368,-4283,9412,-6977,16793,-1892,17718,-11287,23746,-701,-9808,-6940,-610,-11060,-7422,4092,9007,-8879,-7494,10586,23887,-6810,-3480,32767,32767,-4486,4644,6296,16217,9044,150,18322,2169,-2833,25471,17722,3668,-9846,30586,-3428,32767,-5788,-6457,20596,32767,29800,16283,28125,-11048,29138,32116,29806,25288,-8334,-7711,-10298,25671,1570,-6744,18404,-9499,-2869,-1480,-5407,-526,13950,22163,-10062,7208,-11161,421,22371,-5960,-5152,12509,-8302,32767,25894,6692,32767,32767,-9049,3473,-9838,-863,9730,27663,2051,-42,-3976,2400,-1070,-4133,-7744,30373,8402,-11233,-9287,2096,32767,9249,-4668,-9861,-2105,28112,14267,24157,1052,-10896,2347,-462,15487,9231,-1223,3499,-10774,-3389,32767,-9752,32767,30193,-7131,-10587,-2963,19166,-10317,9849,-2331,16134,-8182,17517,30877,32767,468,8793,-10184,-7969,511,-7940,5959,3846,-453,9716,15216,4799,-5057,17812,-6054,-7401,19049,17060,-829,15970,-773,-10421,-8208,28796,2734,32767,20864,23126,26966,15969,4265,-4008,-5494,20885,18469,-4919,16562,2350,-3116,-6072,-2392,32767,18632,-3581,4905,-3528,11547,-10001,-4023,1041,-11089,-6766,-10967,-8680,26757,-1899,-11229,-6571,-10773,24683,-1785,8856,14361,1367,1914,6792,4438,-5601,24766,32767,9938,-8970,23309,-9652,-288,2833,5408,9377,3808,-3139,-3836,-3479,-7125,-3708,-8597,-4750,20018,-6733,-4569,20717,-5643,32767,-444,32353,5733,-11317,23458,-8906,-9974,-6268,6481,19602,-3372,23768,-768,-4533,19339,-8526,11438,-10012,8954,27462,-384,-9368,26415,7854,32767,-2599,-9232,8536,-1709,-3506,27976,-1989,14370,-3810,-846,11495,-7235,-3312,7068,-10366,6209,6925,20413,7160,27158,2290,-3974,-8096,-10459,13747,-1982,29059,24681,-10607,30180,-10001,-3806,-11443,18954,-8073,21135,9500,1592,12866,10965,26959,6643,1733,-8463,8674,-8452,11059,23641,32767,10306,-5620,21552,24350,-8485,-5196,31524,30223,-4563,3384,3608,4142,-741,-10875,-6790,-10168,27811,32767,18517,5391,11233,-7221,-7570,13791,26247,-5259,5948,14058,-3237,32767,28303,-5765,-2421,-8759,32767,28601,32767,20927,-9655,895,18142,15530,8847,12632,16068,-10106,14813,-9754,1155,16521,-1830,8148,18447,14778,-653,31865,18168,-6386,-2322,949,3597,-3077,27792,-786,25482,27860,13460,-8161,-433,-11082,-10260,-6174,-3820,32767,-8135,14640,-9614,-2879,1136,23917,-460,-9222,23399,-8041,5223,-6822,18679,10178,17748,32767,-3256,-5388,4779,-9798,-489,19281,-10214,9214,21475,-1904,8416,10878,-7315,-9396,-1379,-9753,15257,-8875,15522,32767,25604,3409,3250,-708,-109,-11105,-2317,-6009,-3411,-3917,-7965,17357,29433,-3580,-10994,31377,-4622,14710,24954,-6097,-7516,-8873,-1615,1357,-6235,32767,-8905,-2102,-9419,16077,4471,32311,32767,25906,13311,-8558,-6864,32482,9664,-9104,32767,21136,-9235,29923,-9709,-7480,2753,-2372,-3975,21825,-3527,1709,-4357,28098,17182,9686,-2635,-7513,-9105,-8628,-753,4916,14302,-7541,-4294,27157,-10391,-9780,-8653,-5157,26692,16310,-7929,-1637,9441,-4236,-8942,27105,30013,-1684,5117,32767,-4205,-2770,13141,-9963,9157,7925,5597,-7460,-11267,-2634,-3332,-98,-3969,4967,-6140,-9333,32767,-10711,-664,-6759,14655,14663,-208,22761,24925,-415,-5927,-6309,-6118,-10101,32767,5768,1223,-9753,-1872,-9708,32767,-4962,5679,-5800,519,9869,-10252,-10893,-4063,-10754,-6811,4675,-781,10924,17465,27770,22632,-2853,-2097,9615,-6164,-11390,-1554,23598,-2501,-8972,-6048,16477,-11289,28173,32767,31134,-4059,-8100,21708,30419,-894,27286,11125,20926,30188,-2844,-10974,-327,-5322,29975,-10668,26656,-9931,-2293,8438,32767,-5625,-10110,-4742,29766,24468,30741,-2474,-9142,-4647,-6475,-231,-8949,8851,13255,3139,8795,-8428,-3702,-3774,-571,21888,6984,-9359,17520,32767,-4387,14884,-2696,-7796,29276,-1941,-7466,-3747,-1371,28393,-7138,-7344,29462,-2569,-7822,21689,-3778,-1982,-2688,9625,31709,6165,-11111,-10142,-11104,13013,16474,-85,4188,-8136,-3141,8318,-11035,-9794,-9449,-3931,-770,26408,-6159,-8956,-8433,-8575,-11084,29524,-1932,-8012,12211,-2186,32767,29891,-10790,-4005,977,-9257,16309,4452,-6519,-2394,-9384,-10247,3077,-5948,4582,-1755,16238,11341,9943,-10319,17418,32767,-9135,22688,15609,-2214,-4609,15634,-4880,29118,-2934,1782,-6058,-8676,-4002,-120,16260,8367,25753,18405,32767,-1477,2617,-2013,32767,-5091,-1964,-206,28789,4924,-6142,-7952,-1686,-601,-6958,18536,-5516,16350,-9205,-891,-8058,-29,-10270,-11051,32767,-528,5371,6736,-6827,-383,23393,8696,14221,20941,-4693,-3128,7605,-9009,19488,18969,-4358,-3627,20362,32767,-4434,9327,14639,17913,32767,17910,11830,-4859,6240,-8701,6651,9511,26245,1626,-3252,-5444,26303,-5312,6083,-11068,10259,-9866,-10370,32767,-11155,-11277,27393,2660,4770,-9295,21153,-6125,-6647,12476,-2124,-10310,-9271,27365,23464,32767,4705,-1303,32767,-1609,-3732,23671,7142,-3900,-340,-5076,3838,-4535,32767,-7892,-5883,-6382,19402,-4705,31034,23308,-7752,11796,-7179,-3825,-5291,-6929,-5606,14399,3253,-1839,-10001,-10738,26167,-7982,-9945,-5415,-26,15945,-6064,-8093,-7441,31644,18066,-5674,24969,14648,29004,-838,24398,32767,32767,-3567,-281,-7051,18930,-3494,-4123,-10970,-2670,-8811,9615,24110,-5707,-1146,-3801,-11075,14962,-9276,9966,-5321,16683,27208,-1227,25059,30048,-2193,-174,-1960,30898,26540,-6982,-307,15904,-335,19268,10488,26400,-6464,26017,-199,29415,14647,-5027,-5377,32767,15059,-7596,21472,-9215,-1838,-4433,-3367,-6039,-4018,6864,404,3794,32767,-3990,-784,19329,-3582,-2686,-2875,17594,18772,-6639,-6322,-9097,32050,18764,-9729,24950,-4235,-4260,-4036,6328,32767,22141,20870,-2902,11300,22620,-11042,-2532,4578,12273,-3244,6413,26518,32767,-4211,-5884,-1600,-5378,-4536,21446,28258,-9100,-6025,18369,32750,16249,32767,-10188,-8141,-9639,21569,-9968,440,1542,32767,32767,31204,-4767,-9004,28601,-11137,8725,28931,21556,-7382,4053,15483,883,4158,-6295,-10689,-9834,-9966,-1157,-9641,-2874,-8647,1880,-6537,26091,32767,-6235,26727,-983,1022,32767,-111,26211,-836,26515,27030,-1901,-1481,32767,17756,-2438,-9054,24526,-5660,-4233,1575,32767,-3319,-10545,9786,12727,32767,9338,19467,-4841,29691,10472,22240,-4182,14357,-7943,-2349,-8673,16321,10345,17748,-8568,15647,-3966,23498,-5298,20524,9888,-1572,10929,7501,-9932,-9344,-10912,-4395,-7727,-2664,7947,18497,-787,-9339,-8665,10169,-7245,17502,13705,-8143,32767,8687,30318,1680,18272,-9480,-5779,15299,-6233,-4577,-9348,17549,-5432,5919,-7378,-2765,25342,-9313,13464,-7918,2552,26220,21814,12412,18181,-6619,31014,32767,-8284,6008,-9453,-8492,11091,-8622,-5377,19536,-6136,13691,-4440,-9546,-3942,13556,-5199,-925,-1547,18338,-10364,-9113,27635,-4116,20636,23819,-5599,-10463,32767,12029,26954,29090,-11229,27597,-8135,-5407,-4049,-1006,19829,-4876,11360,19488,-7387,5231,-9519,-5018,-6534,-8415,-6076,6290,5124,-9243,-3442,24609,3742,15685,-10977,-1970,-10736,13425,32767,-4378,2532,-9334,32767,-10427,-8255,12320,11320,-11361,-2936,-11146,7883,-6096,4727,31114,4972,-7760,9533,25846,-7049,-2372,14664,-244,-8725,4320,-1734,-1266,15331,15662,-10485,-6183,6629,-10886,16744,-1288,10885,20183,-4678,32767,-8695,-3768,-4183,30153,-10031,16437,2035,32767,-2915,3440,19121,31800,-1223,29078,9053,7629 diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu18_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu18_input0_int16.csv new file mode 100644 index 0000000..a78915c --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu18_input0_int16.csv @@ -0,0 +1 @@ +18049,-16591,-31658,-17386,-25287,4409,-3333,-9783,-21409,27148,-25473,21192,-13175,4008,-28706,7512,-9763,-7111,30506,5369,1088,28526,-2824,8412,-20836,26862,-10139,4088,-5929,27282,-2622,11981,-17077,-25784,-11654,30859,11882,-14325,17013,-10286,20211,-30255,-1466,-13209,-10600,-8756,6816,20719,-14430,-17786,-17340,-30989,-26484,-25535,10759,24676,-22940,13027,4329,-31763,30503,-6412,-5204,-5514,18761,7288,-29348,-5851,20340,-15230,-20159,-29063,7772,-31924,24294,21588,-20381,-5952,-18637,20386,11007,30313,-22903,-14583,-20363,12693,-8826,2968,-20933,-14370,-32239,29514,-25336,-16394,1095,20898,12463,10076,4323,9117,-18456,8934,27281,-15119,8763,3120,-28714,-10703,-32553,-12263,8086,-19979,14427,-5416,15221,-32321,20400,-2006,-28085,-19872,-1746,-31672,-21254,3515,7738,-25426,-21458,9094,20521,-19500,-9964,28775,30367,-12847,3990,5409,13932,7770,129,15740,1863,-8111,21882,15225,3151,-28195,26276,-9815,32547,-16574,-18490,17694,30586,25601,13989,24162,-31637,25032,27591,25606,21725,-23865,-22080,-29490,22054,1349,-19313,15811,-27202,-8214,-4238,-15482,-1506,11984,19040,-28813,6192,-31959,362,19219,-17067,-14754,10746,-23773,31332,22245,5749,29357,31822,-25912,2984,-28173,-2472,8359,23765,1762,-119,-11385,2062,-3064,-11836,-22176,26093,7218,-32166,-26594,1801,29305,7946,-13366,-28238,-6028,24151,12257,20753,904,-31201,2016,-1321,13305,7930,-3502,3006,-30852,-9703,28614,-27925,32003,25939,-20420,-30316,-8483,16465,-29543,8461,-6673,13861,-23429,15049,26526,28515,402,7554,-29162,-22819,439,-22738,5119,3304,-1296,8347,13072,4123,-14480,15302,-17335,-21192,16365,14656,-2372,13720,-2214,-29842,-23503,24738,2349,30437,17924,19867,23166,13719,3664,-11477,-15733,17942,15867,-14085,14228,2019,-8923,-17388,-6849,28927,16007,-10254,4214,-10101,9920,-28639,-11519,894,-31755,-19374,-31405,-24857,22987,-5437,-32154,-18816,-30850,21205,-5112,7608,12337,1174,1644,5835,3813,-16039,21276,29905,8538,-25685,20025,-27638,-823,2434,4646,8056,3271,-8988,-10985,-9963,-20402,-10618,-24617,-13601,17197,-19281,-13082,17798,-16159,31275,-1270,27794,4925,-32408,20153,-25504,-28560,-17948,5568,16840,-9655,20419,-2199,-12979,16614,-24416,9826,-28671,7692,23592,-1098,-26827,22693,6747,30987,-7442,-26437,7333,-4893,-10039,24034,-5695,12345,-10909,-2422,9875,-20718,-9485,6072,-29683,5334,5949,17537,6151,23331,1967,-11380,-23184,-29949,11810,-5674,24964,21203,-30375,25927,-28638,-10899,-32767,16283,-23118,18157,8161,1368,11053,9420,23160,5707,1489,-24235,7452,-24204,9501,20310,28321,8854,-16092,18515,20919,-24298,-14878,27082,25964,-13065,2907,3100,3558,-2121,-31141,-19444,-29117,23892,30339,15908,4631,9650,-20678,-21678,11848,22549,-15058,5110,12077,-9268,28477,24315,-16508,-6932,-25081,28622,24571,28992,17978,-27647,769,15586,13342,7600,10852,13804,-28939,12726,-27932,992,14193,-5241,7000,15848,12696,-1869,27375,15608,-18287,-6649,815,3090,-8811,23876,-2250,21891,23934,11563,-23369,-1240,-31733,-29379,-17678,-10937,31448,-23294,12577,-27529,-8244,976,20547,-1316,-26409,20102,-23026,4487,-19535,16047,8744,15247,32713,-9324,-15430,4106,-28058,-1400,16564,-29248,7916,18449,-5451,7230,9345,-20946,-26907,-3947,-27927,13107,-25415,13335,29131,21996,2929,2792,-2026,-311,-31801,-6634,-17206,-9767,-11216,-22809,14911,25286,-10250,-31481,26956,-13235,12637,21438,-17459,-21522,-25407,-4623,1166,-17854,31862,-25501,-6019,-26971,13812,3841,27758,31881,22256,11435,-24506,-19654,27905,8302,-26070,30295,18158,-26445,25707,-27801,-21418,2365,-6792,-11382,18750,-10098,1468,-12476,24139,14761,8321,-7546,-21513,-26074,-24708,-2157,4223,12287,-21595,-12295,23330,-29754,-28007,-24777,-14768,22931,14012,-22704,-4686,8111,-12131,-25605,23286,25784,-4821,4396,30635,-12040,-7933,11289,-28529,7867,6808,4808,-21362,-32264,-7541,-9541,-280,-11364,4267,-17581,-26725,29723,-30673,-1900,-19354,12590,12597,-596,19554,21413,-1187,-16971,-18065,-17518,-28926,28345,4955,1051,-27927,-5361,-27800,29432,-14210,4879,-16609,446,8478,-29357,-31193,-11633,-30796,-19504,4016,-2236,9385,15004,23857,19443,-8170,-6005,8260,-17651,-32615,-4449,20273,-7162,-25692,-17320,14155,-32326,24203,31241,26747,-11623,-23194,18649,26133,-2560,23441,9557,17977,25934,-8144,-31424,-936,-15239,25751,-30549,22900,-28438,-6567,7249,31516,-16108,-28951,-13578,25572,21020,26409,-7084,-26179,-13307,-18541,-660,-25625,7604,11387,2697,7556,-24135,-10600,-10807,-1635,18804,6000,-26800,15051,30952,-12562,12787,-7720,-22324,25151,-5559,-21380,-10728,-3925,24392,-20440,-21029,25311,-7357,-22399,18633,-10817,-5676,-7698,8269,27241,5296,-31818,-29043,-31797,11179,14153,-243,3598,-23298,-8993,7146,-31599,-28047,-27059,-11255,-2204,22687,-17636,-25646,-24149,-24555,-31740,25364,-5532,-22944,10490,-6259,29435,25679,-30897,-11467,839,-26509,14011,3825,-18668,-6856,-26873,-29342,2643,-17033,3936,-5025,13950,9743,8542,-29549,14964,29989,-26159,19491,13410,-6340,-13198,13431,-13975,25015,-8402,1531,-17348,-24844,-11461,-342,13969,7188,22124,15812,29940,-4230,2248,-5765,31461,-14577,-5624,-590,24732,4230,-17589,-22771,-4826,-1719,-19925,15924,-15796,14046,-26359,-2551,-23075,-83,-29409,-31646,32525,-1512,4614,5787,-19549,-1097,20097,7471,12217,17990,-13438,-8957,6533,-25799,16742,16296,-12480,-10387,17493,29784,-12696,8013,12576,15389,29217,15386,10163,-13915,5361,-24915,5714,8171,22547,1397,-9313,-15589,22597,-15211,5226,-31694,8813,-28251,-29694,28183,-31942,-32293,23533,2285,4098,-26617,18172,-17539,-19034,10718,-6081,-29524,-26549,23509,20158,32339,4042,-3732,32360,-4607,-10685,20336,6136,-11168,-973,-14535,3297,-12987,31158,-22598,-16845,-18274,16668,-13472,26661,20024,-22197,10134,-20557,-10954,-15152,-19841,-16054,12370,2795,-5266,-28638,-30750,22480,-22857,-28477,-15506,-73,13698,-17365,-23176,-21309,27185,15520,-16247,21451,12584,24917,-2398,20960,30006,31353,-10215,-804,-20192,16263,-10004,-11807,-31413,-7646,-25230,8260,20713,-16341,-3281,-10884,-31714,12854,-26561,8562,-15238,14332,23374,-3513,21528,25814,-6279,-498,-5612,26544,22800,-19994,-879,13663,-959,16553,9010,22680,-18509,22351,-569,25270,12583,-14396,-15398,31260,12937,-21752,18446,-26388,-5264,-12694,-9640,-17294,-11504,5897,347,3259,29181,-11424,-2243,16605,-10256,-7690,-8233,15115,16127,-19010,-18103,-26049,27534,16120,-27861,21434,-12128,-12199,-11556,5436,30584,19021,17929,-8309,9708,19433,-31619,-7251,3933,10544,-9288,5509,22781,29534,-12058,-16849,-4582,-15401,-12989,18424,24276,-26059,-17254,15781,28135,13959,28973,-29174,-23312,-27601,18530,-28543,378,1325,30145,28437,26807,-13650,-25784,24571,-31893,7496,24854,18519,-21139,3482,13301,759,3572,-18025,-30608,-28161,-28537,-3312,-27608,-8229,-24762,1615,-18720,22415,29011,-17855,22961,-2813,878,30487,-316,22518,-2394,22779,23221,-5443,-4239,32575,15254,-6982,-25927,21070,-16208,-12122,1353,28259,-9503,-30197,8407,10934,32478,8022,16724,-13863,25507,8996,19106,-11975,12334,-22746,-6727,-24835,14021,8887,15247,-24536,13442,-11357,20187,-15171,17632,8495,-4502,9389,6444,-28442,-26758,-31248,-12584,-22128,-7627,6827,15891,-2254,-26743,-24814,8736,-20746,15036,11774,-23318,28469,7463,26046,1443,15697,-27146,-16548,13143,-17848,-13105,-26770,15076,-15554,5085,-21126,-7918,21771,-26668,11567,-22673,2192,22525,18740,10663,15619,-18955,26644,29024,-23723,5161,-27070,-24317,9528,-24690,-15398,16783,-17572,11762,-12715,-27335,-11287,11646,-14886,-2647,-4428,15754,-29678,-26095,23741,-11787,17728,20463,-16033,-29962,28333,10334,23156,24991,-32154,23708,-23295,-15482,-11593,-2880,17035,-13962,9759,16742,-21153,4494,-27257,-14369,-18711,-24098,-17398,5404,4402,-26469,-9856,21141,3215,13475,-31434,-5642,-30744,11533,31227,-12535,2175,-26729,30457,-29858,-23638,10584,9725,-32534,-8407,-31917,6772,-17456,4061,26730,4271,-22221,8190,22204,-20185,-6792,12598,-697,-24985,3711,-4966,-3625,13171,13455,-30024,-17706,5695,-31172,14385,-3687,9351,17339,-13395,28390,-24899,-10789,-11979,25904,-28725,14121,1748,32727,-8348,2955,16427,27319,-3501,24981,7777,6554 diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu19.tflite b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu19.tflite new file mode 100644 index 0000000..3e5b762 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu19.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu19_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu19_golden_int16.csv new file mode 100644 index 0000000..63a9334 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu19_golden_int16.csv @@ -0,0 +1 @@ +17628,-10352,-8565,9643,-7110,32767,-4126,22797,-2484,10424,-7820,14988,-8182,7084,-10310,-91,27200,11671,-3518,-9465,-6569,9463,3344,22065,9890,-7682,-7258,20635,22841,18826,-2301,-6132,-2944,11946,-1997,-9936,31279,32767,25195,22053,-6218,14228,17628,25101,31024,-909,-10203,6796,23546,880,-1023,21584,32767,251,3755,-2352,30141,-4714,-3498,-7846,-1621,14587,-1518,-2446,14775,31233,19080,-8773,-7823,22569,-9805,-5270,-843,-7383,-2383,16625,8365,-10091,6118,-6724,-2067,-5166,3553,-3812,-3993,26061,2302,10133,25565,14250,23107,3900,-9106,-9261,29872,20620,24906,31962,-3220,18752,-1670,7683,32767,23844,-7314,15153,13624,18184,8117,17423,13622,-6932,10561,-3915,20875,14836,-1514,-6811,3132,18457,-2986,11310,16930,-8755,-9260,25898,-5175,-8091,4178,-7833,29396,14622,-5707,-3627,11295,-7657,-3494,11589,-4463,8305,32767,-6036,-9842,-87,14172,-9002,-6335,-7284,-1145,3353,21834,23808,-10346,24564,2104,-6519,-554,-1099,25960,-3866,24322,3890,6180,-2149,261,30077,19652,-1528,2662,6371,3342,-2458,-5895,29462,19392,-4179,-1059,-583,25422,-9846,25757,31816,24394,-9623,-10273,11268,-3856,7157,32767,270,624,-10322,-3396,-842,-4461,-9770,23874,-8195,15947,-3720,32317,-4231,-10356,24190,32767,-9611,-7798,21086,17940,10890,32139,-8238,-5754,32767,15942,-3288,-4676,-8587,-1538,-4833,-8514,14265,-10034,-6410,16976,10980,26801,-9746,-3262,-6088,-2072,32767,8222,-8647,-6070,20081,-49,-9945,860,-3678,25238,8967,23927,5292,22261,-6429,-7547,-1187,-6806,-2989,-2281,32733,12386,21309,-4061,28530,-5824,-869,26542,32767,29823,-328,26695,-9907,-6305,14160,-8840,-5279,-266,32644,10366,-10110,30268,-3920,25378,7294,32767,-9134,32546,25549,29755,-1806,-10382,-8618,13651,-8731,-9987,19046,29639,21089,2660,8897,32767,11794,26469,-3532,5282,-10075,-5068,32237,29535,-6307,18844,-5893,-5008,1559,20697,-8520,-356,-4686,-2640,24006,16612,-2949,14603,27715,-7704,25275,-2409,-9844,25873,3961,-9812,-875,-5858,-7512,-945,-4361,11468,-10048,-4901,-3906,-4366,-3126,32370,25464,24627,17420,7991,-4681,-7357,-10117,-7380,9447,16528,-8275,-518,591,-250,-9371,-7668,-2244,-9606,32767,3815,8969,-3837,-7066,15894,-1835,3764,-2762,-4346,15050,-10199,-4110,17014,-7616,24553,-9602,-4943,-6052,-4622,-4928,-47,-5140,32767,32362,-8720,-6455,32767,31028,3952,-458,-6878,9341,7475,-8425,32767,21305,9891,17475,4878,-4841,-2293,-2632,22596,-9592,-1812,20519,-3150,9688,-10031,-1142,20048,-6526,21189,-9064,3304,32767,-5724,-10368,-7828,-8779,3455,-9321,9249,15258,21203,-8344,-5055,7376,8272,-4791,27620,-4248,-5713,-873,32767,-2107,-4757,-8246,-9735,-9853,-2144,989,26874,8399,24476,8672,-149,24075,23627,8344,-4492,16182,2041,20112,24153,193,-8290,3905,-8535,115,-4986,-182,-3477,-8224,-85,24096,-3777,12866,13766,-4393,9263,-1787,-8675,-291,-6659,5,-7185,-8053,-8719,-7635,32767,26251,-7722,13354,-9842,-842,-7756,-3408,14389,-596,-3977,6386,9189,-6571,4140,1809,6848,26786,-9576,-6392,-77,-9947,-3403,-8060,32767,-6594,30811,15630,-3646,5856,-3668,-7265,7509,-9516,-7629,-4083,-8573,12860,9358,24437,31587,-5964,32767,-9005,13537,-6169,-9884,24453,-5217,-123,-8005,-5105,16660,-886,26481,-5476,-7087,32752,15944,14797,-6999,16017,14950,12310,30247,-2323,-6302,-5937,3137,-7621,7826,-9475,-141,-5092,24269,32246,-7986,5390,30666,13921,-965,32767,-331,1493,5448,-6443,-3103,-6578,-9887,-9598,-4535,-4879,-8802,2953,29759,11395,-5273,-5592,-1635,15570,-6667,-2206,14663,-2037,-8587,-7603,-6334,17531,16118,26537,26014,30081,8487,6057,6334,-171,18036,-6556,-5640,-5481,2175,-2356,-10277,499,25615,25961,3446,-10415,-7691,-899,-1043,-5094,-7947,31305,32767,-7038,-10044,22370,2020,32053,-1182,-1718,-6171,-899,17496,-520,18994,-3550,6410,16122,26639,-4854,-5877,-747,-6121,-8113,-1151,-2572,27888,-8891,25303,22205,-7979,9044,32767,-1614,-2299,20369,-935,-2700,-2263,-10072,7227,16931,-2749,10993,-5305,32706,12696,-9936,26944,-1143,32767,-6360,2035,-6113,14569,-5655,-8799,-6920,32767,20489,-8567,-2779,19750,-10203,2991,-4804,18784,27864,-2215,32767,3408,-5401,-9464,-6972,6459,-1849,11172,-8303,-7721,-2280,17538,-9006,31571,29821,-1990,-5211,-9532,-990,-3658,6003,-8194,-619,23945,-5012,-1598,3699,21356,-7572,12498,-8121,2994,29120,4616,4881,8196,-1836,20109,18087,31093,-8885,-3495,-8664,31513,-2984,32767,26627,-5719,25607,19117,-7434,11795,-87,28658,15923,-4609,-5532,-2531,-6586,-5754,-2474,-8329,3596,30726,-5208,19904,-7004,8347,-8009,-2232,-7542,17015,-4294,10424,-8265,-4751,-955,6387,2773,-9894,-182,-1457,-5964,-7302,-2530,-5610,-8544,613,-4890,75,5734,14369,148,-3533,21837,-3887,16544,-6245,20998,942,-5024,-752,-10389,-9002,-5082,-5707,26354,29139,11945,20439,-617,-5379,4507,-7933,13202,21488,-3381,32767,-4090,-7866,9029,7173,-3943,23702,11103,29884,10668,20338,-7084,-4895,14559,7793,-10178,-137,-10028,1299,7026,-2992,-895,9216,31468,-7003,24261,10550,11048,-8601,-5042,-3980,29267,1323,-1577,-9813,27578,-3349,11419,-5284,14861,25192,-626,21257,-5890,17513,-6286,-7056,-3660,7263,-344,11326,30458,-8996,199,32767,-2726,-5407,14089,-7509,11698,-8090,-2520,31352,512,-2226,-8644,28745,-3509,10594,-1419,30699,15841,-7712,-8515,-10275,-9157,-1134,-5999,-5179,18349,26523,-7417,-9498,13925,18432,17333,-3731,22079,-3974,-2663,-5281,-6933,10647,23104,-6408,-9058,-8226,26341,3420,8189,-8863,-2533,6281,23794,-5156,-8311,30515,30409,-3674,13158,-9490,30022,28457,32458,-260,28409,32767,-1937,-2170,21854,18808,-1308,15807,-9183,12793,13081,-10047,8971,-8267,27473,13900,30024,4242,-4673,-7067,-7874,-7556,6689,11946,-8303,-6210,-2490,-2831,5459,-8153,-8000,15321,29470,1316,-10047,-7576,32767,5168,1134,-4614,14203,13185,-3018,8927,23495,25234,-2608,-2342,-4334,1122,-1592,-7067,-7269,7479,-10346,4778,29847,28051,31904,5097,25514,-8898,-1533,22120,11814,-20,-4828,-7804,-6588,1977,-487,-4261,32767,7308,32285,31648,-2754,-2864,4281,10638,-1871,6473,15506,3937,-2382,1982,11365,-172,32767,20010,-9189,-8605,21072,10365,32767,-8718,-9018,9114,-9162,26085,-9474,5360,-10259,-4986,4826,-7556,-813,-3647,28367,-3732,-8906,15039,-7598,-2228,256,-2650,19398,-10030,-8636,-5497,5165,-8494,-6939,16787,15121,-7159,-1722,-7202,32198,-7018,-6537,-388,17388,29975,-7061,-5852,26691,11588,21655,-8127,-567,-316,13200,32767,-4049,-1383,20330,28916,-148,19770,19123,10269,15292,-1694,-10309,26911,26943,32767,13067,16167,-10111,6215,-9876,14381,28942,6524,-1154,29561,2056,15745,-8746,-3746,18517,6888,6841,9679,32767,-9249,25749,-5063,-5738,-2705,4717,17786,26303,32767,20559,-7267,-4607,-7779,-1492,28921,6143,-2570,22176,-9739,-294,-5801,-3765,13508,-1018,9643,-7276,22113,31615,8823,-2623,31419,-9807,-10072,12559,3540,-3893,-3310,-9424,-6294,-3372,30874,-2030,13750,-142,-511,20967,-10289,13224,-3413,22731,27805,18618,28843,-10339,32482,28358,-8225,-6715,-5565,23618,4122,30617,5791,-9434,26356,-2715,12224,-7649,29187,2190,-4002,3279,2302,-2804,26170,-7925,4181,32767,-7585,8750,-7639,-4596,12707,20001,9016,-6874,24851,-9091,-4094,21312,14549,27395,25548,32767,5000,849,-9179,22816,-10213,-1801,23428,-8102,32480,24587,-1317,-8722,29993,-6114,-204,32589,-4969,7667,-6233,-8334,14380,-5598,-3681,-6488,24927,-4766,4709,-2754,3122,32767,-4138,23519,9948,28499,-9742,-6015,-8169,-4315,18218,5834,10241,-5591,5641,3654,5742,-8071,-3631,-422,-10316,6971,-476,-9914,781,1200,-9065,-2031,-3409,21174,10233,-8976,-9281,-9097,21397,-6592,-833,28956,9406,-8529,31768,29490,3114,-5346,-1069,-5159,-9861,-5340,-9526,-8813,-9026,17460,9805,22174,-5114,21605,-8827,23908,32767,9910,-974,31438,-3851,-4644,14466,16089,-10026,2397,26687,-237,23691,14028,19299,-9945,-7180,-7154,30553,-4,-3349,18239,9314,-2928,32767,28794,-9846,-3557,-6358,-9678,-5480,25015,29898,11474,3234,-5224,-9668,-804,9606,-2351,-613,23678,-500,-10419,2031,-8194,24157,14,12961,20606,-6422,-8849,23010,-22,-4491,29853,12189,4783,-9423,31091,-2127,-5280,-7227,-6077,13581,15107,28650,3438,-9448,1904,-478,-229,10177,21434,-10293,-9543,2165,23527,1630,-5729,31027,-10066,23438,-2782,26137,-5469,-6708,28605,31070,12316,32767,10714,11677,20468,-5600,5726,-6836,32767,12080,18146,-9809,12170,-6915,-10183,-6132,-6036,32366,13131,-1318,21850,-7608,22968,20290,29333,3069,29703,16588,29353,-1440,27782,18974,-7599,-2754,28926,13112,23279,15078,32767,-4359,18909,843,-3229,-4027,-4986,-4165,-8800,-9315,13845,-5023,-4413,-9917,-2702,19627,-6724,29799,20724,15472,12076,6933,-2434,2046,29558,-10304,32342,-7868,-1449,6609,24137,16963,-4828,32767,4242,-3652,-8840,25605,8933,23136,22455,-174,-637,-2387,29067,5637,24251,-587,32767,18063,24023,-3282,-1473,-7125,-7308,32767,5206,30320,21368,4773,20936,-4456,19396,-854,30670,-10062,13741,23787,4085,-5669,-5390,-3893,15837,30191,-1844,32113,-3901,-5297,4215,-7024,-4905,28607,-5715,-8328,9177,-6480,-4107,15910,28958,-407,-10270,-4735,-2810,-6364,-5331,-8849,3677,-5857,22806,-3325,12992,-2647,32767,16667,31227,23704,-6170,-7169,4873,14782,-2701,-7256,-6507,15403,-5585,-9333,27813,12110,14069,26609,-8787,25931,-9410,-8852,-8641,32767,25639,21382,-6743,32644,-8404,-8963,-6133,-2482,-2644,-7250,13495,-40,-8791,-7642,29247,-3676,30411,32767,-2033,-10226,20211,-6296,-967,-8207,8212,15675,21361,-290,8096,-315,16016,-5325,28008,7443,31772,31596,-1383,-5849,1252,5268,-8601,-4825,-2167,-8828,31897,-9541,3155,-8911,-7857,-8407,-2034,-9358,-4044,-3966,-3363,18957,7237,-7132,5826,-4143,-3550,11660,26346,8273,-6656,-9587,-1981,32767,2693,-3355,8487,-1599,-4653,27708,27296,23163,10010,-3167,8292,-3863,7965,-10066,4439,-1081,7330,-1242,-9721,-2262,5825,2943,29016,8592,30978,-8349,15531,1208,-3609,15171,3594,4101,-6644,-1958,-8039,29606,-4065,-2017,5930,-5401,11804,-1488,18201,-4967,2624,-592,-7433,-5614,16826,12708,-9426,-41,-3552,-4656,23890,21006,29810,-4097,-332,-8989,12799,-2774,-615,32767,-2064,1604,-6461,24897,10651,-2291,13639,-4609,-10048,7949,2656,-7194,1330,-3610,32517,-8812,1250,30382,-4667,32767,14299,28200,-2608,-5697,3938,17598,9754,-813,-7316,3367,-3398,29395,27453,28770,9233,-9868,-5374,2724,-8258,-7282,21311,15809,5317,-4074,9681,-1619,-3799,-1259,-328,-10291,5674,1415,8748,19665,4454,24536,-6171,-3444,-8671,6190,18086,-77,11817,-4355,20026,-9077,30402,32767,32299,19938,16166,28814,-7402,-555,21573,-1378,31366,-7204,32767,-6406,-8921,3016,-10155,302,-4412,15044,-10327,32127,-6441,-9697,-2182,32767,-2570,-556,26411,-3991,18266,26228,19518,-1392,32767,27889,12334,29900,12270,1579,-3493,14878,32767,32767,-3116,-9089,21846,-8934,32326,32041,-670,-6866,11506,-2863,17770,-4943,3657,23352,-7590,-7080,12589,7396,21770,3951,-122,-19,21706,32767,32767,28574,19228,1419,19439,-2074,7787,-5441,31106,-3704,-2878,244,-9829,-5813,-835,-638,-2445,6178,6788,32767,-66,28822,27534,26681,-1801,-6122,-2654,21731,-3367,-6422,24693,9005,6022,2248,-10093,-7020,32735,-4245,-4349,-9765,11145,-4856,27315,-5800,-4499,1695,22529,23664,-2416,-1147,-7552,20556,-10181,482,-5664,-589,-479,-6811,-4738,3457,-9826,8434,-5488,14363,5073,-6752,10862,10472,17774,6084,-2895,-519,-2303,29029,-5848,20943,31287,-9440,12195,-7007,2886,-3099,-10212,-2157,22223,-1642,16617,-1546,17838,-9641,-9844,24646,-715,-6482,-5500,-9543,-9474,18936,24003,22157,-7444,17336,-5233,8713,11180,18890,23054,-2190,-857,-9304,-7788,32767,23240,32767,-8273,-8438,3172,19601,105,-4742,30415,-3461,3789,-7519,-4416,27008,32767,30226,4920,-7056,18874,-5598,7789,6038,-3590,7246,21789,11803,18879,-5247,28458,19348,-6708,30147,-6824,17423,30017,-1261,-4129,-2794,-5577,-1256,6503,-7044,-8974,159,-3849,23419,14106,13473,29156,10842,24147,-1367,31271,18528,32767,13351,-5235,17141,-4966,3995,-7647,23784,-8239,-4888,5687,-8962,-8190,-9323,27496,3772,11463,-9940,-1352,23568,6792,-9302,4005,-6938,-8907,-9825,2414,-3348,4185,-9781,13939,-2794,-6597,4728,-1851,31289,3856,12249,32767,-3326,-3196,20331,10002,16242,16597,1774,-5533,23937,-8051,-7169,-4405,-9728,-6899,1309,15597,9826,-8901,19724,-6375,27273,-2059,-3728,3017,-6786,16938,23133,-483,-763,31084,16133,-7065,30909,22389,-193,-2359,-3764,28425,-4714,-2463,14405,27691,16580,-5437,-3919,5135,-2580,-3689,-3424,6790,32767,22861,26533,-4052,-8558,29058,-5977,28322,-5919,15006,32767,-707,-3651,20265,17283,-2229,-6721,-85,-2023,-777,6265,-10270,6421,10099,-4956,-5489,7368,4261,-6368,29726,19880,-4400,-4479,-8006,22324,-7383,32767,-9236,7148,-6103,-3438,-4287,-4019,32767,-2177,-1874,-5911,31155,5438,-1578,-9508,2059,29232,4575,7905,-618,-3927,30602,-64,31203,28853,32708,28026,19345,24138,-7518,30634,-4498,-7321,3507,28046,-8269,16178,32767,-9038,3393,7180,-2629,16108,-2586,-957,19745,4475,-1142,-808,7795,-5414,19970,-10114,17192,-7809,24405,-2598,32767,16029,22860,-6408,-6433,26327,-5119,23825,-288,-6595,-3255,5883,-2945,-1562,15183,6314,5876,588,-9124,-5230,-9402,25849,22404,-6274,30858,-2330,-5784,9216,-3903,-7577,7955,39,15558,-9881,-8736,-6415,-3473,13996,-7055,-1218,11744,-9188,-4070,-5894,-3988,-2422,9748,29483,-8361,9369,12080,31134,27128,-9680,-2938,24575,4164,-864,-3392,-4715,-5061,-5633,-5595,-2100,-6823,-2376,27191,28464,20572,8147,-3226,10322,-9924,-5124,-2440,3383,31499,-4852,-2514,-4110,-9019,-10119,-1599,8864,-7608,9776,-3981,-8286,-7087,5849,-5938,-1173,2147,4307,1904,18338,29057,-9711,32767,-6823,-10300,26686,4945,-10035,-8718,532,8604,-603,28735,-3639,3009,2254,-5188,-3788,-5390,16949,-6953,-2818,-6346,-2837,-2235,21503,25905,30619,18840,-7152,6309,21873,18598,32767,7843,23753,12491,-713,-1272,-1832,-1026,500,28693,-4358,11268,12908,-2041,-4683,4400,-401,-1189,-329,25911,23951,3339,12390,-9194,-3763,2337,-9654,16354,24785,32066,-2449,-7942,-4520,20653,21934,31978,-1136,-5242,9385,4572,31631,11153,28919,22471,23263,4167,2219,-4756,-4365,-1605,-4693,19761,8214,-651,-8130,706,-5238,21250,27433,24104,32767,10452,-7904,-2053,19872,26481,32767,23947,31170,-1608,88,-9267,-7388,-4007,-3138,-4509,22295,29575,-5784,-2072,-10217,-9582,-9506,-9713,-2203,-4435,27999,26630,-3657,-1367,19960,15983,-2539,31865,-4448,9309,-8670,-9138,-5362,-1901,4051,28417,-9304,-4079,-1240,-3152,-9032,-4465,32767,25974,-4715,-6472,20159,-6693,-5814,28769,-2576,-3993,-2470,-10198,-9314,2863,32767,4333,4562,13062,10929,24542,-3393,31253,4471,-9052,6256,8765,26642,-1772,-8457,-10145,11421,32695,10043,23431,-1578,32515,-1055,-1467,20326,30812,-9377,28838,18943,15184,28159,-2874,-1058,19323,18297,10036,-8334,5479,-3318,-8012,11820,13010,-9112,-1476,-4670,19142,7999,14317,-6033,-9970,24736,4451,-8104,-4136,14210,2433,-7884,20460,9594,12416,-3063,18953,31934,24860,13384,-166,-8698,-6298,-528,-8999,-206,-1954,18409,13899,6659,-6512,402,-9364,6136,17000,-85,23106,-927,-7706,30943,29261,31563,8374,21259,-9457,-3118,19644,-5366,15946,-4497,-10147,7373,-5950,4193,11994,4102,4274,-5131,-7934,-5569,-9518,32208,9941,8570,25332,-1989,-4394,31760,-7263,-4860,30785,-720,2283,8550,12158,-119,-3753,-10193,28721,26532,-34,30096,-6792,-8950,13684,32767,7116,3590,-8446,31659,17572,27738,-5994,26533,-8580,-5158,-5625,-7662,23860,-9727,19051,-996,4984,12691,-9199,32762,19057,1836,19605,-1045,5510,-8912,31812,-4373,14320,-4735,942,27655,26198,8457,-8076,19900,-5722,-9808,-5268,-3802,27532,4312,-9060,-4258,-7834,-4681,7006 diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu19_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu19_input0_int16.csv new file mode 100644 index 0000000..8c3bb76 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu19_input0_int16.csv @@ -0,0 +1 @@ +16626,-32545,-26926,9095,-22351,32457,-12972,21501,-7810,9832,-24583,14136,-25721,6681,-32413,-284,25654,11008,-11059,-29755,-20650,8925,3154,20811,9328,-24152,-22817,19462,21543,17756,-7232,-19279,-9254,11267,-6276,-31236,29501,32500,23763,20800,-19548,13419,16626,23674,29261,-2856,-32077,6410,22208,830,-3215,20357,31190,237,3542,-7394,28428,-14820,-10997,-24667,-5095,13758,-4771,-7688,13935,29458,17996,-27581,-24593,21286,-30824,-16568,-2651,-23209,-7492,15680,7890,-31723,5770,-21140,-6498,-16240,3351,-11983,-12554,24580,2171,9557,24112,13440,21794,3678,-28626,-29116,28174,19448,23491,30146,-10122,17686,-5250,7246,32266,22489,-22994,14292,12850,17151,7656,16433,12848,-21792,9961,-12306,19689,13993,-4759,-21411,2954,17408,-9388,10667,15968,-27525,-29111,24426,-16270,-25438,3941,-24625,27725,13791,-17942,-11401,10653,-24073,-10984,10930,-14031,7833,31648,-18976,-30940,-272,13367,-28300,-19915,-22899,-3600,3162,20593,22455,-32527,23168,1984,-20494,-1741,-3455,24485,-12155,22940,3669,5829,-6755,246,28368,18535,-4803,2511,6009,3152,-7726,-18532,27788,18290,-13139,-3330,-1833,23977,-30955,24293,30008,23008,-30254,-32297,10628,-12122,6750,32502,255,589,-32450,-10676,-2647,-14024,-30714,22517,-25762,15041,-11696,30480,-13302,-32556,22815,31268,-30214,-24516,19888,16920,10271,30312,-25899,-18088,31292,15036,-10336,-14701,-26995,-4834,-15193,-26767,13454,-31546,-20151,16011,10356,25278,-30640,-10254,-19139,-6513,31603,7755,-27185,-19082,18940,-152,-31265,811,-11563,23804,8457,22567,4991,20996,-20210,-23726,-3731,-21395,-9395,-7170,30873,11682,20098,-12767,26909,-18308,-2732,25034,31081,28128,-1029,25178,-31147,-19823,13355,-27790,-16595,-836,30789,9777,-31783,28548,-12323,23936,6879,32250,-28716,30696,24097,28064,-5677,-32638,-27092,12875,-27447,-31398,17964,27955,19890,2509,8391,32043,11124,24965,-11102,4982,-31673,-15933,30405,27856,-19828,17773,-18527,-15743,1470,19521,-26784,-1119,-14733,-8300,22642,15668,-9269,13773,26140,-24221,23839,-7572,-30947,24403,3736,-30846,-2750,-18417,-23617,-2971,-13711,10816,-31589,-15407,-12278,-13727,-9827,30530,24017,23227,16430,7537,-14715,-23128,-31805,-23201,8910,15589,-26015,-1629,557,-784,-29460,-24106,-7053,-30199,32714,3598,8459,-12062,-22213,14991,-5769,3550,-8682,-13663,14195,-32065,-12922,16047,-23944,23158,-30187,-15538,-19025,-14531,-15492,-146,-16158,32097,30523,-27415,-20294,31424,29265,3727,-1439,-21624,8810,7050,-26485,32128,20094,9329,16482,4601,-15220,-7209,-8274,21312,-30156,-5697,19353,-9904,9137,-31535,-3591,18909,-20516,19985,-28496,3116,32308,-17994,-32596,-24610,-27598,3259,-29305,8723,14391,19998,-26232,-15893,6957,7802,-15062,26050,-13356,-17960,-2743,32356,-6623,-14954,-25925,-30604,-30977,-6741,933,25347,7922,23085,8179,-469,22707,22284,7870,-14121,15262,1925,18969,22780,182,-26062,3683,-26833,108,-15674,-572,-10931,-25854,-266,22727,-11874,12135,12984,-13810,8737,-5618,-27272,-914,-20935,5,-22588,-25318,-27410,-24004,32128,24759,-24276,12595,-30942,-2646,-24384,-10713,13571,-1874,-12504,6023,8667,-20658,3905,1706,6459,25264,-30105,-20095,-241,-31271,-10698,-25340,31004,-20730,29060,14742,-11463,5523,-11530,-22841,7082,-29917,-23983,-12836,-26951,12129,8826,23048,29792,-18749,31508,-28309,12768,-19395,-31075,23063,-16401,-385,-25167,-16050,15713,-2784,24976,-17216,-22279,30891,15038,13956,-22004,15107,14100,11610,28528,-7302,-19811,-18666,2959,-23958,7381,-29788,-443,-16007,22890,30413,-25106,5084,28923,13130,-3032,32708,-1040,1408,5138,-20255,-9754,-20680,-31083,-30174,-14258,-15338,-27672,2785,28068,10747,-16577,-17581,-5140,14685,-20961,-6936,13830,-6402,-26995,-23901,-19912,16535,15202,25029,24536,28371,8005,5713,5974,-536,17011,-20612,-17730,-17232,2051,-7406,-32310,471,24159,24486,3250,-32743,-24180,-2827,-3279,-16015,-24984,29526,31571,-22125,-31575,21099,1905,30231,-3714,-5401,-19399,-2825,16502,-1635,17915,-11161,6046,15206,25125,-15259,-18477,-2348,-19243,-25506,-3619,-8084,26303,-27951,23865,20943,-25085,8530,30943,-5073,-7226,19211,-2938,-8488,-7115,-31663,6816,15969,-8642,10368,-16677,30847,11974,-31236,25413,-3594,31954,-19994,1919,-19217,13741,-17779,-27661,-21756,31943,19325,-26934,-8735,18628,-32077,2821,-15103,17716,26280,-6963,31543,3214,-16978,-29753,-21919,6092,-5812,10537,-26102,-24274,-7167,16541,-28313,29777,28126,-6255,-16382,-29966,-3113,-11499,5662,-25761,-1946,22584,-15755,-5022,3489,20142,-23806,11788,-25531,2824,27465,4354,4604,7730,-5771,18966,17059,29326,-27933,-10988,-27239,29722,-9382,32598,25114,-17979,24152,18031,-23372,11125,-273,27029,15018,-14490,-17391,-7957,-20704,-18089,-7778,-26186,3392,28980,-16372,18773,-22018,7873,-25179,-7015,-23710,16048,-13500,9832,-25982,-14935,-3003,6024,2615,-31104,-570,-4579,-18750,-22955,-7954,-17635,-26860,578,-15373,71,5408,13552,140,-11108,20596,-12221,15604,-19634,19805,888,-15794,-2363,-32661,-28299,-15975,-17940,24856,27483,11266,19277,-1938,-16910,4251,-24939,12452,20267,-10630,31032,-12857,-24729,8516,6765,-12397,22355,10472,28186,10062,19182,-22270,-15387,13732,7350,-31998,-430,-31526,1225,6627,-9407,-2814,8692,29680,-22015,22882,9950,10420,-27040,-15851,-12511,27604,1248,-4956,-30851,26011,-10527,10770,-16611,14016,23760,-1968,20049,-18518,16518,-19761,-22183,-11506,6850,-1080,10682,28727,-28283,188,30957,-8570,-16998,13288,-23606,11033,-25433,-7922,29570,483,-6999,-27175,27111,-11032,9992,-4461,28954,14941,-24244,-26770,-32302,-28789,-3565,-18858,-16282,17306,25016,-23316,-29859,13134,17384,16348,-11728,20824,-12492,-8372,-16603,-21795,10042,21791,-20144,-28477,-25861,24844,3226,7724,-27863,-7963,5924,22442,-16209,-26129,28781,28681,-11550,12410,-29835,28316,26840,30613,-818,26794,32204,-6089,-6822,20612,17739,-4110,14909,-28869,12066,12338,-31586,8461,-25990,25912,13110,28318,4001,-14692,-22217,-24754,-23753,6309,11267,-26103,-19523,-7826,-8900,5149,-25630,-25150,14450,27795,1241,-31587,-23816,30988,4874,1070,-14505,13396,12436,-9486,8420,22160,23800,-8199,-7361,-13626,1058,-5004,-22218,-22851,7054,-32525,4506,28151,26457,30091,4807,24064,-27972,-4818,20863,11143,-62,-15178,-24535,-20712,1865,-1530,-13395,31878,6893,30450,29849,-8658,-9002,4038,10033,-5881,6105,14625,3713,-7488,1869,10719,-540,31193,18873,-28887,-27052,19874,9776,30999,-27409,-28352,8596,-28802,24603,-29786,5055,-32253,-15675,4552,-23753,-2554,-11466,26755,-11732,-27998,14184,-23885,-7005,241,-8329,18296,-31534,-27151,-17281,4871,-26703,-21815,15833,14262,-22506,-5414,-22642,30368,-22064,-20550,-1218,16400,28271,-22198,-18397,25174,10929,20424,-25550,-1782,-993,12450,31498,-12729,-4347,19175,27273,-463,18646,18036,9685,14423,-5326,-32409,25382,25412,32306,12324,15248,-31787,5862,-31048,13564,27297,6153,-3628,27881,1939,14850,-27495,-11777,17465,6497,6452,9129,31360,-29077,24286,-15917,-18039,-8505,4449,16775,24808,31164,19391,-22847,-14484,-24454,-4689,27277,5794,-8080,20916,-30619,-924,-18238,-11836,12740,-3201,9095,-22874,20856,29818,8322,-8247,29633,-30832,-31666,11845,3339,-12239,-10404,-29626,-19788,-10599,29119,-6381,12969,-447,-1607,19775,-32348,12472,-10730,21439,26225,17560,27204,-32503,30636,26746,-25857,-21111,-17494,22276,3888,28877,5462,-29659,24858,-8535,11529,-24047,27528,2066,-12581,3093,2171,-8815,24683,-24914,3943,30965,-23845,8253,-24014,-14447,11985,18864,8504,-21611,23439,-28580,-12870,20101,13722,25838,24096,32727,4716,801,-28857,21519,-32107,-5661,22097,-25471,30634,23190,-4139,-27419,28288,-19222,-642,30737,-15621,7231,-19595,-26199,13563,-17599,-11573,-20398,23510,-14982,4441,-8658,2945,31219,-13008,22182,9383,26879,-30628,-18911,-25683,-13566,17183,5502,9659,-17578,5320,3446,5416,-25375,-11416,-1326,-32433,6575,-1497,-31169,737,1132,-28500,-6385,-10717,19971,9651,-28220,-29177,-28600,20181,-20725,-2619,27310,8871,-26812,29963,27814,2937,-16807,-3359,-16217,-31001,-16788,-29949,-27706,-28375,16468,9248,20914,-16076,20377,-27750,22549,30918,9347,-3060,29651,-12107,-14601,13644,15175,-31520,2261,25170,-744,22345,13231,18202,-31266,-22573,-22491,28817,-11,-10528,17202,8785,-9203,31688,27158,-30955,-11182,-19989,-30425,-17228,23593,28199,10822,3050,-16423,-30395,-2526,9060,-7389,-1925,22332,-1572,-32756,1916,-25761,22784,13,12224,19435,-20189,-27820,21702,-69,-14119,28156,11496,4511,-29625,29324,-6686,-16598,-22720,-19105,12809,14248,27022,3243,-29703,1796,-1501,-720,9599,20216,-32360,-30001,2042,22190,1537,-18010,29264,-31647,22106,-8746,24652,-17194,-21087,26979,29304,11616,30928,10105,11013,19305,-17605,5401,-21492,32513,11393,17115,-30837,11478,-21740,-32014,-19276,-18975,30527,12385,-4144,20608,-23919,21663,19137,27666,2895,28015,15645,27685,-4526,26203,17896,-23891,-8658,27282,12367,21956,14221,31631,-13705,17834,795,-10151,-12659,-15675,-13093,-27667,-29284,13058,-15790,-13874,-31178,-8495,18512,-21138,28105,19546,14593,11390,6539,-7653,1930,27878,-32394,30504,-24736,-4555,6233,22765,15999,-15177,31241,4001,-11482,-27790,24150,8425,21821,21179,-545,-2003,-7504,27415,5317,22873,-1846,31454,17036,22658,-10317,-4631,-22398,-22976,32632,4910,28597,20154,4502,19746,-14007,18294,-2683,28927,-31632,12960,22435,3853,-17822,-16946,-12239,14937,28475,-5796,30288,-12264,-16652,3975,-22082,-15419,26981,-17965,-26181,8655,-20371,-12912,15006,27312,-1278,-32287,-14886,-8834,-20007,-16759,-27819,3468,-18414,21510,-10454,12254,-8321,31523,15720,29452,22357,-19396,-22538,4596,13942,-8491,-22811,-20456,14528,-17558,-29340,26232,11422,13269,25097,-27624,24457,-29584,-27828,-27165,31616,24182,20167,-21197,30789,-26422,-28179,-19282,-7802,-8311,-22792,12728,-126,-27638,-24024,27585,-11557,28683,31745,-6390,-32150,19062,-19793,-3039,-25801,7745,14784,20147,-911,7636,-989,15106,-16739,26416,7020,29966,29800,-4348,-18387,1181,4969,-27040,-15168,-6811,-27754,30084,-29996,2976,-28014,-24700,-26430,-6395,-29421,-12712,-12469,-10571,17880,6826,-22422,5495,-13023,-11160,10997,24849,7803,-20925,-30139,-6226,32153,2540,-10547,8005,-5026,-14629,26133,25745,21847,9441,-9957,7821,-12143,7512,-31647,4187,-3399,6913,-3903,-30561,-7111,5494,2776,27367,8104,29217,-26249,14648,1139,-11346,14309,3390,3868,-20887,-6154,-25273,27923,-12778,-6340,5593,-16979,11133,-4677,17167,-15616,2475,-1861,-23367,-17649,15870,11986,-29632,-127,-11166,-14636,22532,19812,28116,-12879,-1042,-28260,12072,-8719,-1933,31177,-6488,1513,-20312,23482,10046,-7201,12864,-14488,-31588,7497,2505,-22615,1254,-11348,30669,-27703,1179,28655,-14673,31383,13486,26597,-8199,-17909,3714,16598,9200,-2554,-23000,3176,-10681,27724,25893,27135,8708,-31022,-16893,2569,-25963,-22893,20100,14911,5015,-12809,9131,-5088,-11943,-3956,-1030,-32353,5352,1335,8251,18547,4201,23142,-19401,-10826,-27261,5838,17058,-241,11145,-13690,18888,-28537,28674,32168,30463,18805,15247,27176,-23271,-1745,20347,-4333,29583,-22648,31681,-20139,-28046,2845,-31927,285,-13869,14189,-32467,30301,-20248,-30485,-6860,31611,-8080,-1748,24910,-12548,17228,24737,18409,-4377,31226,26304,11633,28201,11573,1489,-10982,14032,31510,30959,-9797,-28575,20604,-28086,30489,30220,-2105,-21585,10852,-8999,16760,-15538,3449,22025,-23861,-22258,11874,6976,20533,3726,-382,-58,20472,32635,30951,26950,18135,1338,18334,-6519,7344,-17104,29338,-11643,-9046,230,-30901,-18275,-2623,-2005,-7687,5827,6402,31229,-208,27184,25969,25165,-5660,-19245,-8342,20496,-10584,-20190,23290,8493,5680,2120,-31731,-22068,30875,-13345,-13673,-30700,10512,-15267,25763,-18233,-14143,1599,21249,22319,-7595,-3604,-23743,19388,-32006,455,-17807,-1852,-1506,-21411,-14894,3261,-30891,7955,-17253,13547,4785,-21228,10245,9877,16764,5738,-9100,-1632,-7239,27379,-18385,19753,29509,-29677,11502,-22027,2722,-9741,-32106,-6780,20960,-5161,15673,-4859,16824,-30311,-30948,23245,-2246,-20379,-17291,-30002,-29783,17860,22639,20898,-23403,16351,-16450,8218,10545,17816,21744,-6883,-2695,-29249,-24483,32324,21919,31538,-26009,-26526,2992,18487,99,-14908,28686,-10879,3574,-23637,-13884,25473,32172,28508,4640,-22181,17801,-17600,7346,5695,-11286,6834,20551,11132,17806,-16495,26841,18248,-21088,28434,-21454,16433,28311,-3964,-12980,-8784,-17534,-3948,6133,-22145,-28213,150,-12101,22088,13304,12707,27499,10226,22775,-4298,29494,17475,32124,12592,-16458,16167,-15612,3768,-24042,22432,-25901,-15368,5364,-28174,-25749,-29311,25933,3558,10812,-31251,-4250,22229,6406,-29243,3777,-21813,-28003,-30888,2277,-10524,3947,-30749,13147,-8784,-20741,4459,-5818,29511,3637,11553,32403,-10457,-10046,19176,9434,15319,15654,1673,-17393,22577,-25312,-22539,-13847,-30582,-21689,1235,14711,9268,-27982,18603,-20040,25723,-6471,-11720,2846,-21335,15975,21818,-1519,-2398,29317,15216,-22210,29152,21117,-606,-7417,-11832,26810,-14818,-7743,13586,26117,15638,-17091,-12319,4843,-8112,-11597,-10763,6404,31425,21562,25025,-12739,-26906,27407,-18790,26712,-18608,14153,32754,-2222,-11477,19113,16301,-7006,-21130,-265,-6358,-2443,5909,-32288,6056,9525,-15581,-17255,6949,4019,-20021,28037,18750,-13831,-14080,-25170,21055,-23212,31297,-29036,6742,-19185,-10809,-13476,-12636,31494,-6842,-5892,-18582,29384,5129,-4959,-29892,1942,27571,4315,7456,-1941,-12345,28863,-200,29430,27213,30849,26433,18246,22766,-23634,28893,-14141,-23016,3308,26452,-25995,15259,31833,-28413,3200,6772,-8263,15193,-8129,-3007,18623,4221,-3591,-2540,7352,-17019,18835,-31795,16215,-24551,23018,-8168,31373,15118,21561,-20144,-20224,24831,-16092,22471,-905,-20733,-10234,5549,-9258,-4911,14320,5955,5542,555,-28684,-16441,-29558,24380,21131,-19723,29104,-7325,-18184,8692,-12271,-23819,7503,37,14674,-31065,-27465,-20168,-10919,13201,-22180,-3828,11077,-28884,-12796,-18528,-12538,-7614,9194,27807,-26284,8837,11393,29365,25586,-30433,-9235,23178,3927,-2716,-10662,-14824,-15912,-17708,-17588,-6601,-21450,-7468,25646,26846,19403,7684,-10141,9735,-31200,-16110,-7669,3191,29709,-15252,-7902,-12921,-28355,-31812,-5027,8360,-23917,9220,-12515,-26051,-22280,5517,-18668,-3687,2025,4062,1796,17296,27406,-30529,31873,-21449,-32382,25169,4664,-31547,-27407,502,8115,-1895,27102,-11439,2838,2126,-16310,-11909,-16946,15986,-21858,-8858,-19951,-8917,-7026,20281,24433,28879,17769,-22484,5950,20630,17541,31567,7397,22403,11781,-2240,-3998,-5760,-3224,472,27062,-13699,10628,12174,-6417,-14721,4150,-1261,-3738,-1032,24438,22590,3149,11686,-28903,-11831,2204,-30350,15425,23376,30244,-7699,-24968,-14210,19479,20687,30161,-3571,-16480,8852,4312,29833,10519,27275,21194,21941,3930,2093,-14950,-13722,-5044,-14753,18638,7747,-2045,-25558,666,-16468,20042,25874,22734,32666,9858,-24848,-6455,18743,24976,32143,22586,29399,-5056,83,-29134,-23226,-12596,-9864,-14176,21028,27894,-18182,-6513,-32121,-30124,-29886,-30535,-6925,-13941,26408,25117,-11495,-4297,18826,15075,-7981,30054,-13982,8780,-27258,-28728,-16857,-5975,3821,26802,-29249,-12823,-3899,-9909,-28395,-14037,31881,24498,-14823,-20347,19013,-21042,-18277,27134,-8097,-12553,-7764,-32061,-29281,2700,31139,4087,4303,12320,10308,23147,-10667,29477,4217,-28459,5900,8267,25128,-5569,-26587,-31894,10772,30837,9472,22099,-4959,30667,-3317,-4610,19171,29061,-29480,27199,17866,14321,26559,-9036,-3325,18225,17257,9466,-26200,5168,-10430,-25189,11148,12271,-28647,-4641,-14682,18054,7544,13503,-18965,-31344,23330,4198,-25476,-13003,13402,2295,-24785,19297,9049,11710,-9628,17876,30119,23447,12623,-520,-27346,-19801,-1658,-28292,-648,-6142,17363,13109,6281,-20471,379,-29440,5787,16034,-268,21793,-2913,-24226,29184,27598,29769,7898,20051,-29732,-9803,18528,-16869,15040,-14137,-31899,6954,-18706,3955,11312,3869,4031,-16130,-24943,-17506,-29924,30378,9376,8083,23892,-6252,-13812,29955,-22833,-15278,29035,-2264,2153,8064,11467,-373,-11798,-32045,27089,25024,-107,28386,-21352,-28137,12906,31958,6712,3386,-26551,29860,16573,26162,-18845,25025,-26975,-16214,-17684,-24089,22504,-30581,17968,-3131,4701,11970,-28921,30900,17974,1732,18491,-3286,5197,-28019,30004,-13746,13506,-14884,888,26083,24709,7976,-25389,18769,-17987,-30836,-16561,-11952,25967,4067,-28482,-13386,-24630,-14716,6608 diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu1_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu1_golden_int16.csv new file mode 100644 index 0000000..2b5ace4 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu1_golden_int16.csv @@ -0,0 +1 @@ +-5390,-507,-3488,-1508,32767,-4031,30366,2005,1755,-8203,-1521,25660,11272,29190,20706,-8377,32767,29839,-11322,-9632,-11086,-4779,32767,-751,-10356,15607,9875,8555,32498,9585,13490,30087,-5112,-7481,21426,9781,-3948,-5869,-6328,2083,-10816,-6871,-10825,-8969,1213,-3331,22021,-5839,22648,-8500,-1278,-3258,-4755,-2855,-10594,-1641,26230,-1445,22243,30373,28086,17730,2787,1246,17352,18994,-6802,21759,-1928,-12176,-11395,20178,5572,32767,-7264,-8787,-5234,-7789,-3543,9372,1829,-8588,-7637,-255,-3743,19017,-5474,30333,-3483,23997,-1334,-3403,-5873,10987,32767,3435,-7849,-10907,-149,20997,20543,-7262,29115,26227,-10267,29076,-1283,-8917,26413,-7580,-11772,32767,-12280,10710,-8884,-3131,2052,-11857,32767,13256,3956,-4731,9068,23196,-6623,-3005,-3848,-9477,32767,-454,-3902,27426,5311,-2390,-10186,29944,-235,24498,-1736,20388,27572,32767,14406,27603,-6899,-4587,7877,-10474,-7658,3816,14190,-11986,-317,-2497,23840,32009,-3093,-12243,17621,-11780,-2654,4916,15265,32767,-11880,-3879,-8960,-3935,-9170,23999,25395,9627,-4306,-1676,32767,19275,-1110,-3138,345,-10491,-3665,30903,32767,-5059,-1369,-9912,-7323,-5049,31724,-8767,8819,-6141,1245,-11905,8354,18930,-10432,-8800,1189,18187,-1405,9507,7492,17975,28032,20380,32767,-6067,28002,3610,-9526,-11727,21874,-11607,28189,11004,-391,20906,-4060,32767,-1546,-941,27173,32767,13137,-10885,-8086,-7407,17498,3714,-10313,-3437,25951,22193,15733,-367,-10339,3274,-7387,31328,-6449,-407,-11060,5068,32767,-8206,-9680,22643,-5717,-2545,32767,29134,-3830,-1099,19374,-838,-6650,-9666,-5606,14078,3842,-5268,32767,3919,-1696,29946,32767,145,-7513,-10096,-3303,9889,-8742,-7884,10613,7976,-8102,6859,32767,26428,32767,5833,-4889,-12020,-12116,-5044,-4909,32767,30953,12044,14860,-8381,-3962,4167,-4148,-1913,-4732,-2143,32767,-4496,-4404,-7090,-6793,-9621,10785,-10295,487,-1153,-7488,21426,-9802,14286,-8270,-10874,-5741,-9363,-6162,5180,-8193,12187,26335,-5514,12989,8926,12356,6659,-6600,-32,9050,32767,-11012,-3883,25462,30104,-6391,-7704,-3457,-10036,-8390,13453,1764,-11750,-10182,1070,-5105,-8761,-12066,5862,18876,9284,6956,32184,8945,18509,24432,-11758,-12166,-7221,-8062,-6691,-9921,5491,32767,32767,-7782,-10002,32718,-11690,20437,-9604,-9186,-5026,-576,32767,14388,-2452,-7778,-9202,-8767,9177,-10999,-2781,32767,24983,-9667,-7252,-4128,-6321,5311,-5280,32767,-9654,-11732,-7859,32767,-10210,-6690,13305,19998,-8974,32767,-8316,22124,-3639,6054,-12015,29526,-4281,-10841,-4566,-4541,-12306,-3075,13558,-4632,-9380,21146,14676,1764,-10780,-102,-8319,21006,-5227,1701,-7536,-8208,-11091,32767,16993,15588,-6517,-11667,-4634,24959,-6739,32767,-2007,24816,-2321,1335,-3736,-9314,-3244,-7711,32767,32767,32767,-7080,-10733,22516,-11430,-6677,23360,15064,-4996,32767,-4023,24774,13793,28753,-8679,13521,-7806,-5862,-9141,28052,-8866,-7666,27860,-4248,32767,6109,-5720,-2866,32767,-5876,19362,32767,1718,19734,-5050,32767,4118,-4654,-484,-4691,32767,10990,27356,32216,-882,-278,-11595,-6237,309,32767,32767,-439,-5794,-9982,-2351,-2154,14526,148,844,32767,-6502,5596,27328,-4651,-8314,-3771,-10001,-10755,19790,32767,758,-9999,32767,17019,10667,-4595,-2911,-8246,621,6416,-9622,24110,-6745,-10459,-6219,12515,-1757,-5105,25949,32767,32767,11585,5698,-7194,20674,-4711,17736,17041,-1963,31158,30615,31169,-7940,2424,-2687,-12071,9857,32767,-8939,32767,12186,-10109,-5319,-10716,17199,-4316,-11697,21657,-6887,-1869,-9591,-11347,31588,2263,11533,-8239,32767,-7242,2981,-3028,-8827,-7877,-3340,-3628,32767,-6173,-2759,32767,30170,-5157,29821,16799,-2256,-10673,-302,-11181,-10471,3001,-1581,-12184,-9024,-9080,32767,-964,-7006,-11961,-5097,293,-3208,-141,22136,-10275,-11698,-3039,-9865,27494,17404,12845,22560,-9605,26103,-4774,-11963,-3816,-11608,-10542,28761,-7987,-3037,-9333,8225,32767,8305,-10821,-1363,25294,32767,-4837,-5754,-1331,32767,25838,10507,-11065,-4268,19349,28457,21408,-7987,9556,-5517,-10022,-10361,5461,-7437,-6276,20155,914,32767,-3699,31355,-3205,18088,-11005,4616,-3418,-11276,-7411,-6125,32767,-9181,12758,-335,11736,-10432,1075,-8101,-8296,-6484,18473,3776,10472,7184,-11954,-9836,-3289,-8856,32008,-11656,-8075,-7386,-8942,8234,-11356,-9261,27861,-9179,-5613,-4307,11321,32767,-8424,32767,5417,9117,-6531,32767,31955,-730,17365,12777,32767,32767,2794,-1134,13496,12065,20536,8980,13768,-9435,-334,-12002,-946,-1878,-6520,24533,-10720,29511,32767,-8422,1344,31390,-10125,7333,-8191,-7814,-7912,-165,-4897,13012,-5659,28597,32767,32469,-4874,-9038,10795,14790,-7168,-5604,-12152,-10670,32767,8244,32767,-1661,-10229,32767,-10572,-4031,-4136,-11808,-8935,-2077,-8428,-9342,32767,-9462,32767,-8435,-7911,-1803,10896,-6321,14350,12085,8075,-8331,23394,24300,-1315,-2488,-207,-3440,23590,-11262,2024,-7795,-9174,5365,-2590,32767,-4346,32767,-1947,-9391,32767,23173,2239,-2121,24488,24681,-8565,-5398,-6087,32767,17899,-1073,11619,32767,-3160,-8086,26445,14614,-2808,-10259,32767,-2277,-1777,28038,-12180,101,30623,6019,-11011,-11719,26665,-7341,14762,15275,28954,32767,16479,-2719,-771,-2948,-2899,31483,-4169,27888,-7141,4090,-7539,8457,12167,9992,9278,32767,-3159,18256,-8406,25330,-8967,-8150,31193,-10429,-3539,20526,32767,32767,22140,-5921,14883,9585,20986,2076,-9164,-11908,-4552,443,-8079,31558,32767,-196,-2862,7168,26697,7617,15010,-10287,23432,20635,-8411,25216,-2035,-5096,28580,-1301,-2391,-12037,32767,-5442,-11501,18703,-2362,25238,1254,13126,-775,30913,-11591,7838,17589,9832,-9683,-6074,19505,8417,19487,5326,2106,1915,-1176,32767,-2039,-9112,-9377,8067,25776,-4809,3204,11664,-8440,-11178,-775,1540,-367,-287,20557,32767,-5807,32767,-3761,1078,-10711,4841,-3538,32767,-1372,-9566,-5489,13252,27292,-680,-2520,-1659,26672,-3751,-5692,21802,20757,-5409,15304,-6828,31423,-1027,-10176,32767,3474,-9302,-5725,8731,12118,17758,21034,-4867,24583,-4915,-7493,32767,20703,-614,-8295,-10080,4681,-8273,8718,20149,32767,-1,27857,-1275,-8001,-8345,-11263,-5722,27116,12613,-2263,-5260,-9592,-293,-8558,10090,20967,32767,-5895,-9072,32767,-1372,9593,32767,-338,16140,-11312,25395,23820,5239,-9207,-1875,32767,23545,-9272,493,-11207,-6712,32767,-11374,27744,-3687,-1572,-7678,-12100,17376,-5401,32767,10533,32767,-11033,22284,22015,756,32767,-5609,-1893,-4600,-69,-2227,17039,5858,21283,-11583,21082,-3834,1821,6410,-7448,3699,17986,9852,-9902,-6128,7000,8908,13137,28750,-6205,32767,31758,-4858,-3373,-8989,30559,-11816,-4111,32767,-7140,3502,-3112,-10419,11178,-3161,32767,23556,-2934,12932,25178,10397,-4538,32767,32767,-10267,28448,20034,15488,-8624,26513,-11172,-9707,29279,17049,-6722,-5390,6058,32767,32767,32767,-10036,2088,12233,-11401,-1180,-6051,30627,-9592,15114,20634,-10625,-11831,32767,5053,-4051,-6784,-5809,3798,26357,26875,16921,23636,-11899,32767,3988,15475,-9019,-4641,28129,9967,-9813,-1856,11843,-12235,-81,-2624,-10335,-8749,6502,-9429,-3373,-1932,17266,-7656,4237,32371,-3447,13716,-5704,1762,-1516,-1897,32767,20704,-1093,10075,29628,-11526,-10074,13412,-8187,-8654,32767,-814,-3400,-2446,-316,12839,-9905,17372,404,-40,-11582,25866,4931,23693,26860,-2438,-11349,9043,-11814,32767,5805,6108,-8638,22160,-7499,-1136,-1111,25467,-1811,-12348,18357,-1122,-5711,-931,-9230,32767,-3978,3391,32767,27092,25878,13983,16049,28839,-1669,6635,11427,-8372,1491,-9609,-6321,20659,32767,17274,-1704,32767,31878,-9467,-717,-4496,32044,32767,32767,6717,-7712,-2256,19979,1201,-11398,1441,-10242,17826,-1472,32767,292,-3148,-5774,32767,31968,-11089,-7491,1193,30446,18153,12484,13942,32767,-8961,-3313,-3177,32767,-328,-6477,19731,-285,11655,7627,-70,-4123,-10076,-1258,11303,-8041,32767,-9198,-3123,-2309,-6880,-4982,-7906,-3288,22404,-8932,-9919,32767,-4965,-5565,-4473,13389,27236,-2191,28593,-1396,-10164,-11402,-810,-5241,-3275,4999,-3278,32767,30177,32767,18131,-2229,-10951,-10535,12699,-347,31317,6004,-6536,-11079,18933,20533,-2108,-6317,32767,-9975,32167,20377,20266,-7748,-3461,27857,-5638,-944,11906,-9449,-9806,-9086,21754,-2327,-8875,13311,-7985,32767,-7110,27191,-11217,24381,-4036,-6864,-5565,-8254,-75,13347,-297,10247,-9853,24563,-2940,-7440,-11092,5839,75,-8547,32767,16501,210,28042,-2667,23345,15119,-1402,32767,10399,31051,9705,-9345,19839,-2589,-323,-6576,32767,-1571,30676,10975,-7750,-1454,32767,-12208,24992,30738,-7521,-12285,18762,-3477,-7747,-9071,7686,-3212,17239,-12029,25743,6023,-2956,1759,11303,22755,-682,16626,7989,21426,-7835,21094,24694,-3480,12082,8854,15368,20203,-10513,32767,8694,-4448,29775,-2318,13882,-2422,32767,32767,26398,-10914,-351,24703,-7634,-1469,32443,2501,-5605,-9581,2622,22480,31922,14855,-2636,-388,-5055,-1617,18431,32767,3915,-5077,-7267,28691,-5505,6699,29537,-7301,29185,-5627,-5997,6134,29723,-11596,-2754,19932,-6537,6337,30678,-7019,15740,-9073,32767,20781,-9286,14648,-8084,-9409,-4213,21014,-2095,32767,19036,-7917,-10629,21166,-5389,6774,-6145,-11742,-8644,32767,32546,32767,31190,-6066,12226,12098,-8403,-7529,32767,-3501,32767,32767,-11945,9690,8830,19201,3471,-12178,4304,-1600,-5901,-4660,-6359,-2259,-7272,27879,-2254,24518,3436,-1260,32767,-5390,-7791,12529,23813,32767,-8893,-10730,24982,32767,32767,20333,-9256,2098,32767,32767,-2149,23049,10998,22941,-1771,138,-4573,24578,22941,-2971,-10292,-11064,122,29454,-10823,-2281,-245,-4771,-11664,30401,16023,11760,32767,25253,-5419,7621,8423,25362,-11005,-1631,-9052,-3415,10252,-11491,27158,17645,32214,-7670,8545,15386,-7474,32767,-5656,-9004,-1229,-89,20228,-1143,-1166,-12129,-5918,32767,-4216,-6743,-3332,-3806,-7558,32767,24049,17624,26883,19286,-3693,20782,32767,-4878,9870,32767,-5911,-11779,-9051,6976,24913,26027,-7842,1510,22881,27622,15899,32767,20596,32767,-7797,-11652,-7245,13707,-11962,-620,32767,32767,-7366,1629,16419,13839,-8107,-682,32767,-5569,4755,-9901,-8858,30040,3279,-6392,14000,-6721,-6852,-3167,9011,-9905,-3835,-9957,30715,-11054,-236,-2230,-4913,-5992,-8944,-7718,11121,14235,-3225,-6428,-11528,-4435,-9965,-7225,-8747,20940,-1863,-8261,20542,12178,-10645,9504,-8724,29856,-11504,8848,-2053,524,-10400,15221,-6272,12137,-2352,-5039,11674,5759,26325,26985,2371,21884,5092,14717,-5368,16734,9718,-7411,19372,31136,1836,4588,19657,32767,-10401,-7724,32767,32767,2715,-10760,11552,-9342,2432,-8162,1631,-9210,-8608,32767,15163,-3157,10139,9924,-11292,-4405,32767,-9113,26081,-4765,25560,-11164,-4700,24114,572,21997,15059,13197,-1810,16079,31931,3270,-5956,-4180,26247,15953,-3782,4431,5854,32767,-3295,11953,-9044,6548,-6792,26393,-11805,-5760,-8168,7998,32767,32767,8701,31102,13780,-697,16885,-5574,-2567,16492,-1450,-9541,-2750,-7693,22667,32767,5720,3126,-6393,32767,12166,32486,4979,20028,20789,32767,-8404,26818,-8700,23178,-11082,13033,14844,-11901,-8309,32767,9935,-8869,6919,-7376,-2345,-8117,23199,-1435,-3527,30343,32223,-43,26102,-4305,20605,19600,32767,-7752,-10960,24611,-8400,13595,32767,-6480,20347,-4382,31179,-3695,5995,13079,1147,21532,-8416,7355,28305,18143,-56,-5910,-2357,-6804,-3345,7144,-4567,23250,-1964,-2870,-8574,-3556,-1348,-7699,4440,-9833,-2980,19710,22707,-1752,-11611,-4721,20072,22656,-5050,-11847,25128,-6539,1960,5214,-7179,8743,-9957,27390,32767,20198,32767,6284,-9058,32767,-3167,10593,20488,-11339,18733,-2280,-11302,-3773,21576,25970,3344,-1002,-7067,15719,5948,-3047,28800,22802,-637,-10698,-377,-1364,7628,9944,7719,17682,14244,31101,32767,-3632,-9875,-11569,-6926,-1428,32767,20224,32767,-5352,32767,11967,28994,10018,17097,-4062,24105,-4297,-838,-5325,-8579,10227,15663,9386,-11635,1563,3617,26679,-3273,26903,23774,15588,-7502,9062,12895,11115,-7463,22110,-2802,32767,7404,32767,32767,27685,12593,17828,-3913,27652,9497,-5707,-352,31339,-1744,15518,24816,-583,-4328,-769,10364,-8953,12479,-347,9254,-318,1929,625,2881,21427,32767,32767,-10176,-4171,9891,14,-4791,16147,30109,-10683,-4739,28665,32767,-10385,-2427,23714,7950,32767,2609,-8526,-5464,11687,27239,32767,32767,11183,17756,-8016,32767,-3155,-2669,-788,13981,27584,700,-6478,1031,32767,-1417,-4795,18996,-2661,-1890,-1648,-4873,29189,-10386,15660,-4506,24786,-11720,-9506,-7244,32767,5023,-2729,26543,-2446,-6865,1569,-11065,-5550,-7384,-6577,-3319,32767,32767,-6522,-5563,6261,24537,-2150,9551,18940,31193,18192,32767,22792,25701,-6760,17316,-5816,23970,-7967,913,-11507,-8893,-521,-1127,-3761,23996,-5515,-2234,-4498,30280,-6429,-7796,32767,15968,32325,17204,17057,32767,32767,-6081,32767,-1494,16627,7006,20284,13555,-8577,15497,32767,-6630,514,-5893,-3534,-650,26653,-4890,32767,23670,22548,-5601,32767,-168,32767,-946,26799,-3690,3558,32767,-5442,32767,4636,-2672,13242,-514,-6377,-3083,-10582,-113,-700,-10669,-8427,16028,-7948,-11359,-2426,2273,15124,-489,17014,2720,-4282,-1594,32767,5766,-599,13975,-10337,-3966,32767,-1677,12070,31924,-2615,-7695,-6653,-10729,-9763,14374,-158,32767,-5379,10108,-6573,26195,-3295,-10809,-444,-4074,13430,11716,3464,-2428,32767,-11226,32767,-4822,8761,-7407,-1772,-1048,20883,-6444,2571,-5453,2007,6945,-8310,13909,-11885,695,-3266,-9633,-8034,20867,26571,-3889,32767,-10732,17839,-5007,-137,30532,21573,-7645,-11757,519,-8146,25802,25607,-4946,-99,-1555,-8595,-9334,-3852,32767,-981,32767,-134,18627,-7329,25169,32767,22859,31901,30094,-10786,31509,18388,10974,10209,12634,5977,9634,-3447,-731,-4252,4191,14887,-5059,6078,-10482,2727,4479,-8203,-9282,10007,22974,3356,-1064,3709,4563,-7490,10204,-3956,-2015,5129,-4563,-3198,-3842,27480,32767,-9680,-2621,13340,32767,-12217,-1634,20254,-6594,-7935,-10100,5815,-8257,-10533,-6037,18752,-10111,28863,786,-9032,17234,-4083,3062,28965,-2213,9045,-9606,-12278,7528,25201,18551,19611,14462,-2985,-9682,-8549,2551,-7583,-11446,-7163,-2705,18698,32767,-2282,-748,13642,30346,8424,32767,-2038,-7807,30246,-4627,-5152,29589,-1020,-8134,10336,-11481,-8026,-2294,7816,1758,-10997,-7880,9893,17770,32767,-5251,-8034,-4301,23209,-7385,16595,14154,-4438,23567,-12203,32767,-3709,-6610,11838,-2972,-1965,8352,-6887,-3290,4435,23629,32767,1056,32767,-11415,-4624,-8400,32767,32767,-5592,7650,31457,-3123,-5711,11139,-9549,27537,11326,32767,4050,1547,18124,-8137,-4141,32767,-7152,-10830,-2379,-645,-10441,20418,-2438,-3189,32767,2419,-5860,-396,21710,-1855,12815,-3747,-12012,-2319,5399,8352,-8091,6031,-5078,26261,-3028,30071,8956,22998,-6809,-11731,9602,27811,21069,-180,-12147,-525,32737,-303,-8347,1626,18650,-797,-11360,-4016,12395,32767,-4875,-10935,25988,-7689,32767,32767,-10865,-6757,17259,-10619,-11728,-2994,-10313,-4276,-7231,-6515,-11793,29302,32767,-2068,-3490,31187,-4247,-3048,11879,19531,-5165,-4809,-8868,-2288,7763,14678,20217,-1428,30700,11771,-2028,-5978,-9136,-5832,-11137,19570,-320,30740,30747,-1510,2300,-4098,-10141,16329,-7503,26626,-10079,31378,10904,-6319,-283,32415,2474,-5310,32767,32767,26381,-10156,-6891,21054,-2702,16648,3382,6866,-7383,-653,707,-2006,-7959,-8813,-11618,27319,-590,-1651,-707,-5057,17802,32767,32767,11843,14855,-6501,-8427,-3146,21753,32767,-42,11878,-8166,16989,-1254,-7875,-9912,-5352,32767,27563,-3244,32767,32767,1628,23553,-4910,12497,32767,-8441,-6420,3449,7463,5091,-1462,1880,-12217,-8601,-6575,32767,10756,7787,25012,30957,-10440,-3512,-3350,-1480,-6866,-8046,-4955,-5000,-3653,-12307,-7786,-3605,23044,-4034,-7079,32767,-11623,-4612,-10382,-6812,104,24481,-392,32767,31348,32767,-10930,20542,-894,20394,32767,32767,29172,-10062,32767,32767,26556,29210,-845,-3856,-11067,-38,-9507,32767,23955,-2701,-2781,17766,32767,26654,2550,-3385,15210,10634,-6425 diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu1_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu1_input0_int16.csv new file mode 100644 index 0000000..ade8a6e --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu1_input0_int16.csv @@ -0,0 +1 @@ +-14290,-1344,-9247,-3998,29680,-10685,24151,1595,1396,-21747,-4033,20408,8965,23216,16468,-22208,27869,23732,-30015,-25535,-29389,-12669,31044,-1990,-27455,12413,7854,6804,25847,7623,10729,23929,-13551,-19833,17041,7779,-10466,-15560,-16775,1657,-28675,-18215,-28698,-23778,965,-8829,17514,-15479,18013,-22533,-3387,-8638,-12606,-7569,-28086,-4349,20862,-3831,17691,24157,22338,14101,2217,991,13801,15107,-18032,17306,-5110,-32280,-30209,16048,4432,32691,-19256,-23294,-13876,-20650,-9393,7454,1455,-22768,-20247,-676,-9922,15125,-14512,24125,-9232,19086,-3536,-9020,-15569,8738,31918,2732,-20808,-28916,-395,16700,16339,-19251,23156,20859,-27218,23125,-3400,-23640,21007,-20096,-31209,28027,-32555,8518,-23553,-8299,1632,-31434,26699,10543,3146,-12542,7212,18449,-17558,-7965,-10202,-25124,28768,-1204,-10343,21813,4224,-6336,-27004,23816,-622,19484,-4601,16215,21929,27180,11458,21954,-18290,-12159,6265,-27766,-20301,3035,11286,-31777,-839,-6620,18961,25458,-8199,-32456,14015,-31230,-7035,3910,12141,30161,-31495,-10282,-23754,-10431,-24310,19087,20198,7657,-11414,-4442,26985,15330,-2942,-8318,274,-27811,-9715,24578,29329,-13412,-3628,-26276,-19414,-13386,25231,-23241,7014,-16280,990,-31560,6644,15056,-27656,-23330,946,14465,-3725,7561,5959,14296,22295,16209,31688,-16083,22271,2871,-25255,-31090,17397,-30771,22420,8752,-1035,16627,-10764,29990,-4099,-2495,21612,26142,10448,-28856,-21437,-19635,13917,2954,-27340,-9110,20640,17651,12513,-973,-27409,2604,-19584,24916,-17096,-1079,-29320,4031,32278,-21755,-25662,18009,-15156,-6746,28537,23171,-10153,-2913,15409,-2222,-17629,-25624,-14861,11197,3056,-13965,28616,3117,-4496,23817,27654,115,-19916,-26766,-8755,7865,-23175,-20900,8441,6344,-21480,5455,27840,21019,27248,4639,-12961,-31865,-32120,-13371,-13013,28415,24618,9579,11819,-22218,-10504,3314,-10996,-5070,-12545,-5682,26218,-11918,-11674,-18796,-18008,-25505,8578,-27292,387,-3056,-19850,17041,-25986,11362,-21924,-28829,-15219,-24823,-16335,4120,-21719,9693,20945,-14618,10331,7099,9827,5296,-17497,-83,7198,31759,-29193,-10294,20251,23943,-16944,-20424,-9164,-26607,-22243,10700,1403,-31149,-26993,851,-13534,-23225,-31987,4662,15013,7384,5532,25597,7114,14721,19432,-31170,-32253,-19144,-21374,-17738,-26302,4367,30748,29069,-20630,-26516,26022,-30991,16254,-25461,-24353,-13325,-1527,29157,11443,-6501,-20619,-24395,-23242,7299,-29159,-7371,32230,19870,-25627,-19224,-10944,-16756,4224,-13996,29995,-25594,-31102,-20834,28628,-27067,-17735,10582,15905,-23790,32425,-22047,17596,-9648,4815,-31852,23483,-11350,-28740,-12105,-12037,-32624,-8151,10783,-12280,-24868,16818,11672,1403,-28578,-269,-22053,16707,-13858,1353,-19977,-21761,-29404,30061,13515,12398,-17277,-30929,-12284,19851,-17866,32093,-5321,19737,-6153,1062,-9904,-24693,-8600,-20441,31809,27762,28343,-18768,-28455,17908,-30301,-17701,18579,11981,-13244,31216,-10665,19704,10970,22868,-23009,10754,-20695,-15539,-24233,22311,-23504,-20323,22158,-11262,28918,4859,-15163,-7598,26949,-15577,15399,27047,1366,15695,-13387,30031,3275,-12338,-1282,-12435,28731,8741,21757,25623,-2338,-736,-30739,-16534,246,31968,27084,-1164,-15360,-26463,-6232,-5709,11553,118,671,28633,-17237,4451,21735,-12329,-22040,-9996,-26514,-28512,15740,26954,603,-26508,32386,13536,8484,-12180,-7718,-21860,494,5103,-25509,19176,-17881,-27727,-16488,9954,-4658,-13534,20638,28082,27900,9214,4532,-19072,16443,-12488,14106,13553,-5203,24781,24349,24790,-21049,1928,-7122,-32001,7840,27191,-23699,30316,9692,-26799,-14102,-28410,13679,-11442,-31009,17225,-18257,-4953,-25426,-30083,25123,1800,9173,-21843,31074,-19198,2371,-8028,-23401,-20882,-8853,-9617,30682,-16366,-7314,30776,23995,-13670,23718,13361,-5979,-28296,-799,-29642,-27760,2387,-4190,-32300,-23924,-24071,32389,-2556,-18572,-31709,-13511,233,-8504,-373,17606,-27241,-31013,-8056,-26152,21867,13842,10216,17943,-25463,20761,-12657,-31716,-10116,-30773,-27947,22875,-21173,-8051,-24741,6542,26534,6605,-28686,-3614,20117,29848,-12822,-15253,-3529,29944,20550,8357,-29333,-11314,15389,22633,17027,-21175,7600,-14625,-26570,-27467,4343,-19716,-16638,16030,727,27201,-9807,24938,-8496,14386,-29175,3671,-9061,-29894,-19647,-16238,31702,-24339,10147,-887,9334,-27655,855,-21475,-21994,-17188,14692,3003,8329,5714,-31691,-26077,-8718,-23479,25457,-30901,-21408,-19581,-23706,6549,-30105,-24552,22159,-24333,-14880,-11417,9004,27954,-22333,28358,4308,7251,-17314,28541,25415,-1935,13811,10162,32068,26068,2222,-3005,10734,9596,16333,7142,10950,-25012,-885,-31818,-2507,-4979,-17285,19512,-28420,23471,27603,-22326,1069,24966,-26841,5832,-21714,-20714,-20975,-436,-12982,10349,-15002,22744,30739,25824,-12922,-23959,8586,11763,-19002,-14856,-32216,-28286,30666,6557,29076,-4403,-27119,30049,-28028,-10685,-10965,-31305,-23686,-5505,-22343,-24767,31261,-25085,31675,-22362,-20973,-4780,8666,-16757,11413,9612,6422,-22085,18606,19327,-3486,-6595,-549,-9118,18762,-29856,1610,-20665,-24320,4267,-6867,26421,-11521,31710,-5162,-24897,26797,18430,1781,-5622,19476,19630,-22705,-14311,-16136,31002,14236,-2845,9241,30035,-8377,-21437,21033,11623,-7444,-27196,29765,-6036,-4711,22300,-32290,80,24356,4787,-29191,-31069,21208,-19461,11741,12149,23028,31388,13106,-7209,-2043,-7816,-7686,25040,-11052,22180,-18930,3253,-19986,6726,9677,7947,7379,29439,-8373,14520,-22285,20146,-23772,-21605,24809,-27647,-9383,16325,30640,28607,17609,-15697,11837,7623,16691,1651,-24295,-31570,-12067,352,-21419,25099,30541,-520,-7588,5701,21233,6058,11938,-27272,18636,16412,-22299,20055,-5395,-13510,22731,-3448,-6337,-31911,29303,-14427,-30489,14875,-6262,20073,997,10440,-2055,24586,-30729,6234,13989,7820,-25671,-16101,15513,6694,15499,4236,1675,1523,-3118,27793,-5405,-24157,-24860,6416,20501,-12748,2548,9277,-22375,-29633,-2055,1225,-972,-759,16350,28048,-15394,31179,-9969,857,-28396,3850,-9379,28538,-3638,-25361,-14552,10540,21706,-1803,-6679,-4398,21213,-9945,-15090,17340,16509,-14339,12172,-18102,24992,-2723,-26977,32215,2763,-24661,-15176,6944,9638,14124,16729,-12903,19552,-13029,-19864,30423,16466,-1627,-21991,-26722,3723,-21932,6934,16025,30710,-2,22156,-3380,-21211,-22123,-29858,-15169,21566,10032,-5999,-13945,-25428,-776,-22687,8025,16676,32024,-15629,-24051,28049,-3638,7630,27532,-896,12837,-29988,20198,18945,4167,-24409,-4971,28053,18726,-24581,392,-29710,-17793,30217,-30154,22066,-9773,-4168,-20355,-32079,13820,-14318,27024,8377,30005,-29250,17723,17509,601,30992,-14870,-5017,-12195,-182,-5904,13552,4659,16927,-30708,16767,-10163,1448,5098,-19744,2942,14305,7836,-26252,-16245,5567,7085,10448,22866,-16449,32590,25258,-12879,-8942,-23831,24305,-31324,-10899,29097,-18928,2785,-8250,-27622,8890,-8380,26412,18735,-7778,10285,20025,8269,-12029,29725,27677,-27219,22626,15934,12318,-22862,21087,-29617,-25735,23287,13560,-17819,-14289,4818,30620,27109,28622,-26606,1661,9729,-30225,-3127,-16040,24359,-25428,12021,16411,-28168,-31365,30997,4019,-10738,-17984,-15399,3021,20963,21375,13458,18799,-31544,29895,3172,12308,-23910,-12302,22372,7927,-26015,-4920,9419,-32435,-213,-6955,-27398,-23193,5171,-24996,-8941,-5122,13732,-20297,3370,25746,-9137,10909,-15122,1401,-4018,-5029,27397,16467,-2897,8013,23564,-30557,-26707,10667,-21705,-22943,26802,-2158,-9014,-6484,-837,10211,-26260,13817,321,-105,-30704,20572,3922,18844,21363,-6463,-30086,7192,-31321,32143,4617,4858,-22901,17625,-19880,-3011,-2946,20255,-4800,-32736,14600,-2975,-15140,-2468,-24469,29937,-10546,2697,27989,21547,20582,11121,12764,22937,-4425,5277,9088,-22195,1186,-25475,-16758,16431,28774,13739,-4516,29471,25354,-25098,-1901,-11920,25486,31321,29319,5342,-20444,-5981,15890,955,-30217,1146,-27152,14178,-3903,30952,232,-8346,-15308,30142,25425,-29399,-19858,949,24215,14438,9929,11089,30431,-23755,-8783,-8422,30386,-868,-17172,15693,-756,9270,6066,-184,-10929,-26712,-3334,8990,-21317,30079,-24384,-8278,-6121,-18238,-13207,-20960,-8716,17819,-23679,-26296,30402,-13161,-14752,-11859,10649,21662,-5808,22741,-3701,-26945,-30228,-2148,-13893,-8682,3976,-8691,27883,24001,27480,14420,-5908,-29033,-27930,10100,-919,24908,4775,-17327,-29372,15058,16331,-5587,-16746,30633,-26444,25584,16207,16118,-20539,-9176,22156,-14946,-2502,9469,-25051,-25995,-24088,17302,-6168,-23529,10587,-21168,29484,-18848,21626,-29737,19391,-10698,-18196,-14752,-21882,-199,10615,-788,8150,-26121,19536,-7794,-19723,-29406,4644,60,-22659,29796,13124,167,22303,-7069,18567,12025,-3716,29970,8271,24696,7719,-24774,15779,-6863,-855,-17433,30474,-4165,24398,8729,-20546,-3854,29936,-32364,19877,24447,-19938,-32568,14922,-9218,-20537,-24048,6113,-8515,13711,-31890,20474,4790,-7835,1399,8990,18098,-1807,13223,6354,17041,-20772,16777,19640,-9225,9609,7042,12223,16068,-27871,28238,6915,-11792,23681,-6144,11041,-6421,26620,28710,20995,-28935,-930,19647,-20239,-3893,25803,1989,-14859,-25401,2085,17879,25389,11815,-6988,-1028,-13402,-4287,14659,31233,3114,-13458,-19265,22819,-14594,5328,23492,-19356,23212,-14918,-15897,4879,23640,-30742,-7300,15853,-17329,5040,24399,-18607,12519,-24052,32056,16528,-24617,11650,-21430,-24945,-11169,16713,-5553,31630,15140,-20989,-28179,16834,-14285,5388,-16290,-31129,-22916,27648,25885,28630,24807,-16082,9724,9622,-22277,-19960,26825,-9280,29502,26578,-31666,7707,7023,15271,2761,-32285,3423,-4242,-15644,-12353,-16857,-5989,-19278,22173,-5974,19500,2733,-3339,31011,-14290,-20655,9965,18939,26338,-23577,-28445,19869,32681,27900,16172,-24538,1669,27557,30361,-5697,18332,8747,18246,-4695,110,-12122,19548,18246,-7875,-27286,-29331,97,23426,-28693,-6047,-649,-12648,-30923,24179,12744,9353,30879,20085,-14365,6061,6699,20171,-29175,-4323,-23998,-9053,8154,-30463,21600,14034,25621,-20334,6796,12237,-19814,30133,-14995,-23871,-3257,-236,16088,-3029,-3091,-32155,-15689,29725,-11177,-17876,-8834,-10090,-20037,27544,19127,14017,21381,15339,-9789,16529,30005,-12932,7850,31783,-15670,-31228,-23996,5548,19814,20700,-20789,1201,18198,21969,12645,28513,16381,30118,-20671,-30889,-19207,10902,-31712,-1642,28953,32614,-19528,1296,13059,11007,-21491,-1808,28241,-14764,3782,-26248,-23483,23892,2608,-16945,11135,-17817,-18164,-8396,7167,-26258,-10166,-26397,24429,-29304,-625,-5912,-13024,-15884,-23712,-20460,8845,11322,-8548,-17042,-30562,-11757,-26418,-19154,-23189,16654,-4939,-21899,16338,9686,-28220,7559,-23129,23746,-30499,7037,-5443,417,-27570,12106,-16626,9653,-6235,-13358,9285,4580,20937,21462,1886,17405,4050,11705,-14230,13309,7729,-19646,15407,24764,1460,3649,15634,27841,-27575,-20476,28306,27776,2159,-28526,9188,-24765,1934,-21637,1297,-24416,-22820,28326,12060,-8369,8064,7893,-29936,-11678,28667,-24159,20743,-12631,20329,-29597,-12460,19179,455,17495,11977,10496,-4798,12788,25396,2601,-15789,-11081,20875,12688,-10026,3524,4656,29990,-8736,9507,-23975,5208,-18005,20991,-31295,-15270,-21655,6361,29385,29083,6920,24737,10960,-1846,13429,-14777,-6804,13117,-3843,-25294,-7291,-20394,18028,30664,4549,2486,-16949,29220,9676,25837,3960,15929,16534,31646,-22280,21329,-23065,18434,-29379,10366,11806,-31551,-22028,32430,7902,-23513,5503,-19555,-6217,-21518,18451,-3804,-9349,24133,25628,-114,20760,-11413,16388,15589,31017,-20552,-29057,19574,-22269,10813,32276,-17178,16183,-11616,24798,-9794,4768,10402,912,17125,-22311,5850,22512,14430,-149,-15668,-6247,-18038,-8867,5682,-12106,18492,-5205,-7608,-22731,-9428,-3574,-20410,3531,-26069,-7900,15676,18060,-4645,-30782,-12515,15964,18019,-13387,-31407,19985,-17334,1559,4147,-19032,6954,-26396,21784,26563,16064,26127,4998,-24012,30574,-8396,8425,16295,-30060,14899,-6045,-29962,-10003,17160,20655,2660,-2657,-18735,12502,4731,-8076,22906,18135,-1688,-28361,-999,-3616,6067,7909,6139,14063,11329,24736,31677,-9629,-26178,-30671,-18361,-3786,27331,16085,30094,-14187,30256,9518,23060,7968,13598,-10768,19172,-11391,-2220,-14117,-22744,8134,12457,7465,-30845,1243,2877,21219,-8677,21397,18908,12398,-19888,7207,10256,8840,-19785,17585,-7427,32764,5889,28046,27780,22019,10016,14179,-10373,21993,7553,-15129,-933,24925,-4624,12342,19737,-1544,-11473,-2037,8243,-23735,9925,-919,7360,-842,1534,497,2291,17042,29908,28273,-26978,-11056,7867,11,-12701,12842,23947,-28322,-12563,22798,30381,-27531,-6434,18861,6323,30497,2075,-22602,-14485,9295,21664,32575,26278,8894,14122,-21251,30793,-8364,-7074,-2088,11120,21939,557,-17173,820,30409,-3757,-12712,15108,-7054,-5009,-4368,-12917,23215,-27535,12455,-11945,19713,-31070,-25202,-19204,26421,3995,-7235,21111,-6484,-18199,1248,-29334,-14713,-19575,-17436,-8799,30536,27539,-17289,-14747,4980,19515,-5700,7596,15064,24809,14469,32710,18127,20441,-17921,13772,-15417,19064,-21120,726,-30505,-23576,-1381,-2986,-9969,19085,-14619,-5921,-11923,24083,-17043,-20667,31081,12700,25709,13683,13566,30877,31510,-16121,30119,-3960,13224,5572,16133,10781,-22737,12325,26817,-17575,409,-15623,-9369,-1723,21198,-12963,27842,18826,17933,-14847,26986,-445,29724,-2507,21314,-9782,2830,27684,-14427,27151,3687,-7082,10532,-1363,-16905,-8174,-28054,-298,-1854,-28285,-22341,12748,-21071,-30114,-6430,1808,12029,-1295,13532,2163,-11352,-4225,29204,4586,-1588,11115,-27405,-10513,27011,-4446,9600,25390,-6931,-20400,-17637,-28442,-25882,11432,-418,28105,-14261,8039,-17426,20834,-8735,-28655,-1176,-10801,10681,9318,2755,-6436,32195,-29761,29575,-12782,6968,-19637,-4696,-2779,16609,-17084,2045,-14456,1596,5524,-22031,11062,-31509,553,-8657,-25538,-21298,16596,21133,-10309,29776,-28452,14188,-13274,-363,24283,17158,-20267,-31168,413,-21596,20521,20366,-13111,-261,-4123,-22785,-24744,-10212,26970,-2600,27460,-355,14815,-19430,20018,28165,18181,25372,23935,-28595,25060,14625,8728,8120,10048,4754,7662,-9137,-1938,-11271,3333,11840,-13411,4834,-27789,2169,3562,-21747,-24608,7959,18272,2669,-2819,2950,3629,-19856,8116,-10487,-5342,4079,-12096,-8477,-10185,21856,29938,-25661,-6948,10610,26978,-32388,-4332,16109,-17482,-21037,-26775,4625,-21890,-27923,-16005,14914,-26806,22956,625,-23943,13707,-10825,2435,23037,-5865,7194,-25466,-32549,5987,20043,14754,15597,11502,-7913,-25668,-22664,2029,-20104,-30345,-18990,-7171,14871,26084,-6048,-1983,10850,24135,6700,29219,-5403,-20698,24056,-12266,-13659,23533,-2704,-21564,8221,-30436,-21278,-6082,6216,1398,-29155,-20889,7868,14133,30162,-13921,-21299,-11403,18459,-19577,13199,11257,-11764,18744,-32352,30626,-9833,-17523,9415,-7878,-5208,6643,-18257,-8721,3527,18793,27305,840,27940,-30261,-12258,-22269,28564,30618,-14824,6084,25019,-8279,-15140,8859,-25315,21901,9008,27253,3221,1230,14415,-21572,-10977,28385,-18961,-28712,-6306,-1708,-27681,16239,-6462,-8453,28093,1924,-15536,-1049,17267,-4917,10192,-9934,-31844,-6147,4294,6643,-21449,4797,-13462,20886,-8028,23917,7123,18291,-18052,-31100,7637,22119,16757,-476,-32202,-1390,26037,-803,-22127,1293,14833,-2111,-30117,-10647,9858,26335,-12924,-28989,20669,-20385,27462,29217,-28805,-17913,13727,-28152,-31091,-7936,-27341,-11335,-19170,-17272,-31263,23305,26351,-5481,-9252,24804,-11260,-8079,9448,15534,-13692,-12748,-23510,-6065,6174,11674,16079,-3785,24417,9362,-5375,-15848,-24219,-15461,-29524,15565,-848,24449,24454,-4002,1829,-10863,-26884,12987,-19891,21177,-26721,24956,8672,-16752,-749,25781,1968,-14076,27955,32464,20982,-26925,-18269,16745,-7162,13241,2690,5461,-19572,-1730,562,-5317,-21099,-23365,-30800,21728,-1564,-4377,-1875,-13405,14159,31247,28331,9419,11815,-17233,-22341,-8340,17301,28806,-111,9447,-21649,13512,-3324,-20876,-26276,-14188,31236,21922,-8599,31765,32614,1295,18733,-13017,9939,32307,-22378,-17020,2743,5936,4049,-3875,1495,-32389,-22801,-17431,26119,8555,6193,19893,24621,-27676,-9309,-8880,-3924,-18201,-21329,-13136,-13254,-9685,-32627,-20641,-9556,18328,-10693,-18767,26404,-30813,-12226,-27524,-18059,83,19471,-1038,26201,24932,28750,-28977,16338,-2369,16220,31127,27088,23202,-26674,31402,31944,21121,23232,-2239,-10223,-29340,-100,-25204,29232,19052,-7161,-7371,14130,27049,21199,2028,-8974,12097,8458,-17033 diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu2.tflite b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu2.tflite new file mode 100644 index 0000000..853fde1 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu2.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu20.tflite b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu20.tflite new file mode 100644 index 0000000..5d11bb7 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu20.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu20_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu20_golden_int16.csv new file mode 100644 index 0000000..cb9547b --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu20_golden_int16.csv @@ -0,0 +1 @@ +-2721,-12057,21844,20381,31579,-1311,-11102,-10663,-6755,32295,-2395,11008,-10418,-9728,-1476,32767,25160,13471,32767,32767,21056,-14903,11161,-15430,8629,6513,-11484,-7938,21575,-13253,25699,32767,32767,-1999,32767,32767,27100,32767,-5447,27636,30906,-13118,-15306,-8226,12478,-6417,32619,-11229,-9271,-8267,9307,-4893,-10770,29704,-13012,13332,27773,32767,-5333,-13911,-14995,-1995,6468,-283,-6471,-928,29750,-99,32767,-16394,32767,3801,-10980,-12420,-7994,-7124,14317,5681,-7229,-15981,-1153,9540,32767,22353,31053,30948,-5918,536,-6334,6565,24353,-10641,32767,-8280,32767,13959,-16289,32767,-8283,-6924,28187,3737,-4134,28505,32767,25741,32767,6980,31396,32767,32767,-12951,-1949,32767,10940,32767,32767,28450,2823,32767,-1008,-8228,-8246,-11959,-8397,-9977,-1722,31475,10638,25823,-13722,30981,24103,-8997,-9651,-8400,-15159,6685,12473,-8709,19782,-12396,-6196,32767,25376,-1519,17863,26578,-1185,-15528,32767,-612,11199,-392,-15389,32767,32767,-7951,32767,-7949,26338,1378,-13242,-13252,-10845,-12770,-7317,-8197,11278,-13675,32767,-10996,-6492,-6049,17681,27673,3262,-15008,10948,-4376,-9891,-1343,-7036,-14681,32767,3625,-16301,25988,-11728,-7442,32767,-15504,11132,32767,-10595,-10473,-14468,-4991,25057,12211,29049,-11383,32767,32767,32767,32767,-15917,-6286,-14095,-7688,15392,-9311,32767,-5993,28371,-11679,-1797,-3922,6796,-11868,32767,-7020,-9745,-9768,23122,32767,7834,32767,-7476,-12370,1972,-15941,-14776,-1030,10665,32767,16673,-16419,16581,32767,32767,-61,32767,7119,-16087,-8559,4793,-11655,24815,-6037,7377,7623,-14243,-16344,17754,-6334,643,-10436,-7833,-11974,-8329,-15197,-14897,-6767,11415,-5396,5204,-11095,8320,7847,21613,-14548,-240,32767,-13764,14513,-6927,-6356,24140,-5057,-8807,32767,30936,-1102,-11027,-6411,32767,-15390,4226,8154,939,32767,-5744,12306,-13197,-8047,32767,32767,32767,32767,-7227,-12944,32254,-602,1237,-13343,23800,-1097,-11167,-8735,32767,-9954,-9389,23450,1373,32767,-3071,-5201,32767,8671,21844,32767,146,-15921,-9830,17438,18258,-10845,11566,32767,-7952,-11483,-12654,-1223,-11246,32767,19040,-13330,-547,16813,-10080,-1794,32767,6950,-12593,-13445,-4724,-2340,2347,32767,-12686,-11614,3556,29339,-5515,32767,11621,-10069,32767,32767,17769,-1505,1822,-11586,-6201,-6240,-11191,-1123,-15369,10165,-2160,3946,-7855,32767,31036,17249,-6048,-12391,-1056,31018,21151,16688,32767,-4306,-133,-3911,29096,-10203,32767,-6133,-11642,-2436,-16248,-1952,32767,4907,22074,18483,29235,14796,-11723,-3998,-15874,7732,-1582,-9134,-2429,-283,-10929,-3563,32767,-11022,-13570,22939,-620,-16118,32767,-13757,16403,-14570,601,17920,32767,32767,-15877,-1224,32767,-6120,32767,32767,15715,29238,26477,-9043,23782,15906,-11724,-4456,-7686,3935,32767,-12270,9788,28518,-15984,16554,25555,-11631,-7164,32767,27684,7591,32767,-16045,-5832,-12607,32767,-4150,-14998,32767,-9121,32767,32767,1058,-6832,-16066,-14838,-13669,-13095,29528,-7536,14926,20430,5468,32767,15618,-9418,-624,32767,-8150,-447,-6967,2351,27311,-14063,-2277,20793,16388,-292,-8170,32767,32767,-12983,-8391,26504,-15193,32767,32767,32767,-7867,131,-10063,4229,-13533,5899,14066,-12308,-3950,-12953,5825,-11925,-92,32767,32767,27617,-6340,32767,1391,14946,-16009,32767,-13150,26978,10868,-8703,32767,21096,32767,27544,-9764,24614,-10654,-5657,32767,-1041,-4656,27098,-13276,28227,-12925,20786,1616,-2374,-2143,-14560,32767,-92,18839,7089,-6767,26122,25459,-6351,32767,-8211,-13493,-8353,-13361,-15939,-7161,-9005,32767,-12126,-1858,16343,14144,-10071,-8378,25685,20681,1912,32767,4360,28195,-13644,21637,-7321,16982,32767,28500,32767,-5239,-15945,-15319,-10492,32767,12298,4402,22288,32767,-9198,-4139,32767,-919,29878,32767,-5306,32767,-14034,-5801,-3503,990,32767,-12494,-11834,-12622,32767,-4912,-10585,-14255,-4098,-339,17974,5041,-13979,-5017,14662,-10063,-6259,-785,12031,-3121,18389,-4465,32767,17630,32767,18352,32767,-1923,32767,32767,32767,-6868,-10418,32767,32767,32767,32767,-12905,7243,-13432,-7091,-13705,-11201,14884,13311,16371,26586,32767,32767,32767,20629,26268,-1397,32767,5535,32767,27112,31445,28768,32767,-13883,32767,10851,-2671,21623,32767,32767,13877,-3812,-13595,-301,31912,18901,-15971,-9170,-13417,-13984,2168,-14728,-12174,-765,20351,16750,21868,24922,-450,8387,-7884,-8492,32767,27532,26268,19000,-6002,32767,-14153,-8328,17706,-13871,-1015,-13580,32767,-6093,-13471,20395,3320,17311,-9467,32767,-592,32767,-4331,-14524,9435,-12660,32767,24992,2218,32767,26499,-13796,4927,-6354,-1991,4445,19094,16239,10608,11594,28317,31627,-395,1997,8472,-14985,24008,-5718,32767,7745,32287,-10276,23258,20451,15896,-234,-3512,12130,-7179,32767,-5774,-4147,5816,32767,-5841,27619,-11280,17630,-3610,25568,-6147,-7805,-551,-799,5441,32767,32767,-14469,3203,30124,-481,-1209,-7542,-4477,-9571,-13224,32767,-1216,32767,16820,-6204,-9949,7220,-3741,26275,-2144,136,18099,6423,15126,32767,-8725,-14392,-5579,-10065,32767,-5716,32767,17128,32767,-9994,-13736,16704,10610,32767,20359,9165,-8390,28247,17404,32767,-3532,-2536,9451,-7405,32767,16840,-7150,-13268,15832,-982,5885,1271,-2533,7874,32767,7235,-11961,-16215,-15672,-3683,-13524,-15118,6471,4393,-11027,-10033,32767,15464,-14307,-11726,-7522,32767,-6314,-14717,27489,32767,-6422,-9132,3216,18503,18255,-3605,32767,-2249,32767,32767,14895,-9740,8793,-3178,31460,12668,-3234,31393,-7155,19865,32767,594,32767,-11275,32767,-14091,8114,32767,-9045,-3729,-14469,23380,-6389,531,-4937,-1395,-976,-11395,-3802,-7819,11544,-4719,32767,-14404,32767,-5908,-11180,-8869,-6703,32767,11340,32767,14595,-7845,30368,29051,32767,32767,-2630,-2950,32767,6965,19839,8276,-15954,32767,32767,32767,32767,-15761,-14867,28835,-9888,32767,32767,-8125,-7257,32088,-12019,11350,15923,13264,-12460,-4069,8261,-16160,-738,-7439,27132,32767,32767,-6059,32275,4303,32767,-14713,-6512,-6129,32767,-8393,18817,32767,24552,-2552,-567,14851,-2999,2354,6602,32767,-1908,4318,-10864,32767,1967,-10876,32767,32767,15010,32767,32767,32767,-427,13527,32767,-1350,703,-3507,21575,8631,32767,-6524,-8803,-8591,-1717,-9722,32767,14188,26424,-12215,32767,-14841,32767,32767,32767,7400,28421,-12739,12401,-1641,-9134,17557,-7273,23162,3183,-8906,6783,-1977,-13351,-12698,-690,32767,32767,32767,32767,-8583,-10670,-11725,-15087,5708,-1620,24786,18955,32767,-1064,9875,13073,-338,-7995,17655,-13289,15730,32767,31674,7310,32767,-1236,18213,-2776,-15460,4112,-11811,-11909,8050,-234,32767,-2180,11193,15330,-4367,-8180,2496,-9411,32767,-14523,14233,-6664,-5888,22501,32767,32767,32767,32767,-12279,32767,-15985,-5401,-11160,-11951,13336,31457,32767,-15730,29335,-7242,32767,-16337,-5526,13175,22961,-472,-7217,30469,32767,32349,25875,-9741,-4445,-8386,14216,32767,21081,32767,31490,30177,-5826,31000,11931,23095,-15444,9865,-613,-9764,31214,-16215,-1759,32767,29859,-15541,32767,25653,-14284,-5904,32767,-13269,-2664,32767,-4207,-15186,32767,-3076,23346,-896,-537,-8367,-9012,3570,7960,32767,-11720,-13884,32767,5410,-16000,25513,12996,-14214,-10224,-10511,-5532,32767,32767,32767,-3474,-608,32555,18529,-5995,32767,5957,-5382,11970,-12516,18196,-6649,2769,32767,-2027,10128,10056,-14975,3963,-1553,14211,-15259,-9706,-6908,-5010,-2665,22132,32767,-7328,32767,10926,32767,-11353,31313,-7100,21014,32767,32767,-6127,6890,32767,-11583,-2966,20520,32767,-11220,-12041,7827,-2500,21228,-15748,26203,-16063,-1150,-15745,-8814,32767,-13149,32767,32767,-12767,-7420,18059,7568,32767,-994,-2123,-13517,32767,32767,32767,30624,-10332,23999,31215,32767,32767,-3378,32767,32767,-2129,18722,-15256,8144,23525,32767,-8168,32767,32767,21339,-12116,17660,3079,-16133,-3936,-5112,17250,-12007,25139,-5367,23823,5152,31949,24004,5997,3501,-5579,-8808,32767,18163,32767,-4264,16646,-8635,17495,32767,-13126,32767,-12060,-5981,-5622,-13830,-10037,-885,32767,3002,13786,21424,26836,-15661,-10851,32767,-12231,32767,32767,-7234,-3944,-15921,32767,-15850,-319,30772,32767,22956,-15653,-923,-6863,32767,32767,-12531,24068,32038,11119,10863,32767,-14527,32767,-7642,20008,-15541,-8670,-2789,22432,32767,32767,-15586,32767,-6585,-10053,4090,-1339,8435,-9443,-14138,32767,22590,1056,-10299,32767,26759,14749,-16123,-5177,-9734,11710,-8606,32767,15993,-8481,6488,-2179,4884,-10088,24475,32767,9073,32767,-7503,3236,618,-13221,32767,-15050,24294,28584,32767,21980,-10595,32767,32767,-6510,32767,28322,14273,861,-6992,-9580,-9949,-16127,27157,2578,-2835,-6611,6438,32767,-12331,-14712,7420,12149,32767,28828,21761,-984,3246,-11716,-1389,-2167,32767,22320,32767,-3318,-2031,-2664,32767,-3086,-5758,-7709,14601,-7915,32767,-11270,-2098,32767,27376,20701,-13589,-1397,14645,16537,32767,-7870,-6449,31976,-7086,29022,29290,32767,2002,-8868,-1242,-9522,-5972,32767,8155,9398,-12284,-5706,32767,4040,-128,8010,22020,32767,-8370,32767,-3550,31544,32767,10399,-7571,-13053,32767,-4531,18148,2958,-6759,12192,-3698,-11097,32767,2913,23792,3149,19609,32767,-14934,-7359,21506,-1460,12291,-2732,-4916,-1689,30367,-15901,27055,32767,-15225,-2526,-8163,32767,-6211,-16057,32767,-14067,24517,-4895,32767,10185,-8011,22819,32767,32767,-9512,7136,32767,32767,15281,-7193,-6945,18325,-7420,-4015,-2899,32394,32767,-13032,28954,10588,-7647,-7702,-8562,32767,-13241,2607,-16289,-15,-10920,-10613,17275,-13951,-15304,2505,24247,5428,9743,-10628,32767,25389,28520,-4927,12435,-6957,-8036,32767,32767,16505,20820,-489,-12956,-15450,-13736,9785,-7583,-2692,16529,32767,9895,-5210,21895,-14084,32767,-10132,32767,32767,30604,22735,32767,25366,32767,32767,32767,-9258,-16071,32767,32767,30782,32767,32767,-10225,17781,-11393,-2810,32767,-11498,32767,21421,1997,23318,-7995,-12356,-10277,32767,8741,30621,32767,-13759,-13417,32767,-8468,-6042,32535,32767,-9986,6942,-14887,-9568,-2852,-8011,32767,32767,-14297,-14115,-1954,32767,32767,-15172,32767,-11084,32767,-9596,1401,-777,-9914,-15961,32767,10794,-15500,28701,14797,32767,-11503,32767,32767,11722,32767,-13671,13550,-10621,-11730,32767,8489,32767,12639,32767,-13377,32767,-5063,32767,-16002,21546,-698,3216,-6590,32767,32767,29372,32767,-16076,17821,-3420,5162,32767,-407,32767,-1595,-10745,-9040,-556,-8411,-5806,-16395,32767,-7700,13188,16746,-1409,12247,-9031,32767,-7672,23311,-2051,-10228,-6875,-5186,-16083,6824,171,6630,27668,-15072,-5322,-5189,32767,-4383,-15443,-1358,20249,32767,-3607,-14497,32767,-7387,-8321,-14879,11373,18061,-14375,11365,10076,-6563,32767,4797,-1187,-8859,-1578,32767,28292,-13665,3911,32767,26546,-3265,32767,-8864,-1024,32767,-7824,-12179,-4563,-6434,-16111,-3685,-160,-13854,32767,32767,26616,7412,-1104,4636,32767,32767,21853,32767,32767,32767,-6914,-7525,32767,-5778,-10274,8813,32767,-15376,20336,14298,-11821,32767,32767,32767,-3049,-15468,-4913,-2013,32767,-9747,-14533,-14521,-12376,32767,10794,-8667,-3987,-13909,-7572,-1584,-11663,-1176,-3119,-8681,32767,7241,21769,-454,2069,12057,21848,12711,-5616,-2593,-13837,23092,9051,5294,-14395,32767,32767,22918,-4031,19777,32767,-10050,27864,-5625,-6620,32767,-6757,22286,-6587,32767,21776,-15668,-8221,-3691,-9491,-15167,-14000,32767,32767,-10829,-12170,-4842,15517,2644,-7234,-15987,26775,1902,32767,32767,32767,-9151,-7348,-11695,32767,-887,-7075,-14401,19814,-9135,-7205,-4272,-14439,2979,32767,32767,-10862,-10166,11538,32767,-1548,14677,-529,16827,15601,32767,6088,13252,32767,-8758,32767,32767,27271,-2405,7077,-8536,32767,17362,-9556,-9883,32767,-10191,14245,-14166,1999,-325,25314,-6280,-15174,16365,-8468,-9313,-813,28835,19792,-8293,31810,29461,32767,4931,-4605,7603,19290,-15617,-15762,-2613,32767,30353,32767,-16135,-4598,-3175,-7051,17605,-11140,15321,26034,-3029,15958,32767,-4666,32767,-1795,-169,-9306,10816,-15760,-6630,28676,32767,-4769,32767,-4962,32767,-8257,32767,4865,-15769,-2579,-7560,-13211,-12912,-771,4256,-12815,-8378,-13484,11154,-5906,-2503,29292,-8374,-5603,-6984,28672,14253,10176,-5298,32767,32767,-7616,-12178,2138,10694,11169,-8800,-16013,6059,-149,32767,15842,6361,-6142,-8897,32767,32767,22829,23777,32767,32767,26449,12594,32767,13200,32767,-13002,-8450,-1763,-6799,-9092,-4135,28717,32767,-7378,32767,32767,-10133,32767,-8179,32767,-6277,5266,-15745,-2291,32767,30109,-4325,18948,32767,89,3565,32767,32767,-4497,22839,-1926,-9911,-10462,12085,-3328,20440,-949,32767,-8096,32767,32767,-14408,-5961,-7617,6043,-9625,-7029,-3219,-1672,-3696,-9444,-2033,-13336,15589,29973,21826,32767,1743,-1281,-15569,-14327,32767,32767,32767,28411,-15933,-11341,-10809,18911,-1213,3796,-3060,-7679,-5581,12527,19452,-8582,32767,10345,-8230,-12461,12917,-15561,-3761,22394,32767,29457,21970,-9090,12127,12944,-6621,-3049,-4150,32767,16673,-13589,24276,-2912,32767,26752,-4636,31318,-13639,19711,32767,-10055,22268,-8046,14675,32767,-10506,21639,-3050,1025,31010,32767,32208,-786,-6796,-15804,32767,-929,-4436,-2186,13078,31996,2480,32131,-8934,-11023,-5038,32767,17863,-11923,23187,-9096,-6591,-15524,-13596,-15253,32767,32767,32767,-5052,-6013,-4133,-9942,11896,-11494,-1284,-15783,32767,-6834,5617,25079,5081,-15436,-13505,26908,-5764,13240,4432,-12469,12274,-13001,-15830,-9171,12755,32767,-5532,-5003,-1931,10891,-807,12462,-9987,-5871,-8188,-7837,-2381,-6925,-11140,4102,7675,-9083,-99,-10671,-16052,511,28893,-15549,24160,13753,5845,-7567,32767,-2178,3643,32767,6444,-10177,-2215,-15926,-9982,-5739,32317,-15264,-7613,32767,32173,32767,15720,-1523,-938,12728,24004,10350,-15611,-3410,32767,-2023,32767,32767,-4055,32767,-9823,21925,-4812,-7052,-7163,19787,32767,-11186,7127,-10039,28203,424,-15308,-1929,-10855,17068,-5196,-6410,-12985,-3073,-4078,32767,28029,8618,4328,32767,13004,-7223,19864,-13699,20307,1309,2945,32767,6721,-7798,16100,19025,16599,-9230,22166,32767,-5966,-5023,32767,-10564,32767,32767,-3653,-149,-14340,-3253,-377,-4673,-14193,-4308,7425,32148,1791,32767,-452,-6307,22452,32767,-9170,-10732,6603,-10618,-2547,18682,32767,-11257,-9639,-297,30852,-16045,-13911,-7634,-4600,4646,-8158,-7829,-3947,-11740,9533,-9652,-9158,-4954,3678,18104,-5510,-14077,-8260,-6828,-2251,-13160,32767,-6743,-13762,32767,-11201,-7126,-14906,9975,-1154,-14608,23917,-10713,-7096,31046,-6733,16880,13692,-1698,32767,10896,-5114,174,-287,32767,19037,16723,-24,-10166,32767,-6053,20483,-2366,27969,-9937,-9004,-8737,10947,-6821,27149,31594,-8534,-5034,-3807,-12201,-7172,-3839,-13392,32767,-2784,32767,5741,32767,-6582,-7677,12947,-6613,-6495,-2548,28133,-11137,-12794,3856,32767,-15652,6531,-8594,21669,25813,32767,32767,32767,11183,32767,-4259,32767,29856,-5201,32767,6446,13510,-5723,-11310,-5342,-9032,29029,25091,32767,32767,32767,32767,32767,32767,32767,-8039,-13117,323,-10108,3474,-15530,-7435,-13646,-13087,-11046,32767,27083,-12760,877,31706,-6630,-13091,25774,3315,-9302,32767,-11749,17212,-16224,-12805,-13139,-711,-8084,32767,-15396,18752,-8687,32767,32767,-9281,-9056,-11682,5639,15373,1480,-12629,11199,-384,-7985,18925,32767,27494,25804,32767,-8625,-15031,13418,32767,-12808,-8031,31994,-6586,32767,-5018,-13189,-2364,24150,32767,29601,-9629,-15946,-6432,-10694,-14050,14022,-8054,-2279,24110,-16186,-8810,24085,9180,-7438,-631,-11930,32767,-9621,28245,32767,-1839,21059,-7135,32767,-10200,-15166,-16312,-8662,-5464,-14277,32767,-15810,-14400,26484,-9729,-15095,-11689,-8080,28970,-13909,-3352,-6517,-1115,7645,-13131,32767,-5071,1298,32767,-792,17359,-9255,32767,-10506,1234,10448,-5192,-1812,29901,-447,11628,-3164,-9859,-11482,15154,32767,26335,32767,21120,-1713,-10325,-3016,-6429,-13526,-15990,-1767,-12877,-9,32767,32767,14752,32767,-15554,-11412,-1363,32767,-5575,32767,-9198,32767,-592,-13469,-15124,-8266,16043,-8185,-12338,-13938,-7074,-2478,17120,19147,22866,-5109,-4714,32767,-5234,-2008,14573,-3985,-14512,32767,9034,-9440,-5070,-7368,32767,-4617,32767,32767,-6346,-10182,29595,-4963,32767,-8149,32767,29722,-15451,-10605,14596,-9134,13838 diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu20_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu20_input0_int16.csv new file mode 100644 index 0000000..fcdf2d2 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu20_input0_int16.csv @@ -0,0 +1 @@ +-5418,-24005,13047,12173,18861,-2610,-22103,-21229,-13449,19289,-4768,6575,-20742,-19368,-2939,27640,15027,8046,32214,31112,12576,-29671,6666,-30719,5154,3890,-22864,-15803,12886,-26386,15349,24251,23952,-3979,20960,22876,16186,31324,-10845,16506,18459,-26116,-30472,-16377,7453,-12775,19482,-22355,-18458,-16459,5559,-9741,-21442,17741,-25905,7963,16588,32414,-10618,-27695,-29853,-3972,3863,-564,-12884,-1848,17769,-197,32419,-32638,32494,2270,-21860,-24726,-15915,-14183,8551,3393,-14393,-31817,-2295,5698,28645,13351,18547,18484,-11783,320,-12610,3921,14545,-21186,21646,-16484,23191,8337,-32430,31853,-16491,-13784,16835,2232,-8230,17025,32253,15374,29448,4169,18752,23729,20712,-25784,-3881,24229,6534,23238,21746,16992,1686,25768,-2007,-16382,-16416,-23810,-16717,-19864,-3429,18799,6354,15423,-27320,18504,14396,-17912,-19214,-16723,-30180,3993,7450,-17339,11815,-24680,-12335,31103,15156,-3024,10669,15874,-2359,-30914,26067,-1218,6689,-780,-30638,32697,21670,-15829,27675,-15825,15731,823,-26364,-26383,-21591,-25424,-14568,-16319,6736,-27225,20008,-21892,-12925,-12043,10560,16528,1948,-29879,6539,-8712,-19692,-2673,-14007,-29229,20514,2165,-32453,15522,-23349,-14816,30458,-30866,6649,26264,-21094,-20851,-28805,-9936,14966,7293,17350,-22663,30947,32327,27016,32647,-31690,-12514,-28062,-15306,9193,-18537,26547,-11931,16945,-23251,-3577,-7808,4059,-23628,31737,-13977,-19401,-19447,13810,30174,4679,19591,-14884,-24627,1178,-31737,-29417,-2050,6370,26951,9958,-32688,9903,27532,22786,-122,28290,4252,-32027,-17041,2863,-23204,14821,-12020,4406,4553,-28356,-32540,10604,-12611,384,-20776,-15594,-23838,-16583,-30256,-29658,-13473,6818,-10743,3108,-22088,4969,4687,12909,-28964,-477,24547,-27402,8668,-13790,-12655,14418,-10068,-17534,22531,18477,-2193,-21954,-12763,32319,-30640,2524,4870,561,22743,-11436,7350,-26274,-16021,19953,30940,27257,27682,-14389,-25771,19264,-1198,739,-26564,14215,-2184,-22233,-17390,25730,-19817,-18692,14006,820,27798,-6114,-10354,27921,5179,13047,23395,87,-31696,-19570,10415,10905,-21592,6908,26245,-15831,-22861,-25193,-2434,-22389,29717,11372,-26539,-1089,10042,-20068,-3572,31268,4151,-25071,-26767,-9405,-4659,1402,30475,-25257,-23123,2124,17523,-10979,32487,6941,-20047,22158,27754,10613,-2996,1088,-23067,-12346,-12423,-22281,-2236,-30599,6071,-4301,2357,-15638,27831,18537,10302,-12041,-24670,-2102,18526,12633,9967,25499,-8573,-264,-7786,17378,-20313,29598,-12210,-23177,-4849,-32348,-3886,23773,2931,13184,11039,17461,8837,-23339,-7960,-31603,4618,-3150,-18185,-4836,-563,-21758,-7094,20136,-21943,-27017,13701,-1235,-32089,24438,-27389,9797,-29007,359,10703,31261,29653,-31609,-2437,21843,-12185,23587,22105,9386,17463,15814,-18004,14204,9500,-23341,-8872,-15301,2350,23987,-24429,5846,17033,-31823,9887,15263,-23157,-14262,21761,16535,4534,30828,-31944,-11610,-25100,28603,-8262,-29860,28797,-18159,31967,32729,632,-13601,-31985,-29540,-27214,-26070,17636,-15003,8915,12202,3266,29352,9328,-18751,-1242,19678,-16226,-890,-13870,1404,16312,-27997,-4534,12419,9788,-581,-16265,21376,24394,-25848,-16705,15830,-30247,32335,30344,30945,-15663,78,-20035,2526,-26943,3523,8401,-24504,-7865,-25788,3479,-23741,-184,26492,30422,16495,-12623,28351,831,8927,-31873,31177,-26180,16113,6491,-17326,22670,12600,32654,16451,-19439,14701,-21210,-11262,32265,-2072,-9270,16185,-26431,16859,-25733,12415,965,-4726,-4267,-28988,26264,-183,11252,4234,-13473,15602,15206,-12644,23029,-16347,-26864,-16630,-26601,-31732,-14256,-17928,25515,-24141,-3699,9761,8448,-20050,-16680,15341,12352,1142,20499,2604,16840,-27163,12923,-14576,10143,26722,17022,22593,-10430,-31745,-30498,-20889,22507,7345,2629,13312,23758,-18313,-8241,24656,-1829,17845,27140,-10564,24416,-27940,-11550,-6975,591,30168,-24875,-23560,-25129,31648,-9780,-21074,-28381,-8158,-674,10735,3011,-27830,-9989,8757,-20035,-12460,-1563,7186,-6213,10983,-8890,24042,10530,30360,10961,27026,-3828,29947,30572,26989,-13673,-20742,24590,27166,32016,23205,-25692,4326,-26741,-14118,-27285,-22300,8890,7950,9778,15879,23391,21288,21129,12321,15689,-2781,23776,3306,21167,16193,18781,17182,30844,-27639,21697,6481,-5318,12915,31070,27263,8288,-7590,-27067,-599,19060,11289,-31797,-18256,-26711,-27840,1295,-29321,-24238,-1523,12155,10004,13061,14885,-895,5009,-15696,-16907,26634,16444,15689,11348,-11950,27489,-28178,-16580,10575,-27615,-2021,-27036,30732,-12131,-26820,12181,1983,10339,-18847,22684,-1178,27277,-8623,-28915,5635,-25204,28002,14927,1325,31497,15827,-27466,2943,-12651,-3963,2655,11404,9699,6336,6925,16913,18890,-787,1193,5060,-29834,14339,-11384,24821,4626,19284,-20458,13891,12215,9494,-466,-6992,7245,-14292,32365,-11496,-8256,3474,26650,-11629,16496,-22457,10530,-7188,15271,-12239,-15539,-1097,-1590,3250,30560,24619,-28806,1913,17992,-957,-2407,-15015,-8914,-19055,-26328,31686,-2421,24041,10046,-12352,-19808,4312,-7447,15693,-4268,81,10810,3836,9034,20786,-17371,-28652,-11108,-20039,23685,-11379,22184,10230,28396,-19897,-27347,9977,6337,22827,12160,5474,-16704,16871,10395,21581,-7031,-5049,5645,-14743,21055,10058,-14235,-26416,9456,-1955,3515,759,-5043,4703,21687,4321,-23814,-32282,-31201,-7333,-26925,-30099,3865,2624,-21954,-19974,24535,9236,-28483,-23346,-14976,21162,-12570,-29300,16418,23428,-12786,-18181,1921,11051,10903,-7177,24421,-4478,26116,31383,8896,-19391,5252,-6327,18790,7566,-6439,18750,-14245,11865,23537,355,29593,-22447,32085,-28054,4846,21518,-18007,-7425,-28807,13964,-12719,317,-9830,-2778,-1943,-22687,-7569,-15566,6895,-9395,30899,-28677,30537,-11763,-22259,-17658,-13345,25917,6773,21467,8717,-15618,18138,17351,30815,20882,-5236,-5873,24890,4160,11849,4943,-31763,29468,26583,32762,32247,-31378,-29598,17222,-19685,19577,27818,-16175,-14447,19165,-23929,6779,9510,7922,-24806,-8101,4934,-32172,-1469,-14811,16205,24431,30779,-12063,19277,2570,26043,-29291,-12964,-12203,26802,-16710,11239,32318,14664,-5081,-1128,8870,-5971,1406,3943,22558,-3799,2579,-21630,30856,1175,-21653,23985,27887,8965,24908,29720,30344,-851,8079,24984,-2688,420,-6982,12886,5155,22620,-12989,-17526,-17104,-3419,-19355,29107,8474,15782,-24319,29738,-29546,19867,23948,24369,4420,16975,-25362,7407,-3268,-18185,10486,-14480,13834,1901,-17731,4051,-3935,-26580,-25280,-1373,25771,22950,26148,27009,-17088,-21243,-23344,-30037,3409,-3225,14804,11321,23297,-2118,5898,7808,-673,-15918,10545,-26456,9395,25478,18918,4366,27316,-2460,10878,-5527,-30780,2456,-23515,-23709,4808,-466,31160,-4340,6685,9156,-8694,-16286,1491,-18737,20903,-28913,8501,-13267,-11723,13439,26910,26953,22106,23937,-24446,26747,-31824,-10753,-22219,-23793,7965,18788,21247,-31316,17521,-14419,25752,-32525,-11002,7869,13714,-939,-14368,18198,28546,19321,15454,-19393,-8850,-16696,8491,26331,12591,20030,18808,18024,-11599,18515,7126,13794,-30748,5892,-1221,-19439,18643,-32282,-3502,25010,17834,-30940,25877,15322,-28438,-11754,22915,-26418,-5304,21105,-8375,-30234,19808,-6123,13944,-1784,-1070,-16658,-17942,2132,4754,23248,-23333,-27642,29104,3231,-31854,15238,7762,-28298,-20355,-20927,-11013,22989,30352,23421,-6917,-1211,19444,11067,-11935,31822,3558,-10715,7149,-24918,10868,-13237,1654,30558,-4036,6049,6006,-29813,2367,-3092,8488,-30379,-19324,-13754,-9974,-5305,13219,20916,-14590,30630,6526,31237,-22603,18702,-14135,12551,24337,19616,-12199,4115,23034,-23060,-5905,12256,23782,-22337,-23972,4675,-4977,12679,-31352,15650,-31980,-2289,-31346,-17547,25557,-26179,19627,28091,-25418,-14773,10786,4520,19580,-1978,-4227,-26910,29044,20742,20646,18291,-20569,14334,18644,24274,21710,-6725,24071,27418,-4239,11182,-30373,4864,14051,19623,-16262,20282,24802,12745,-24122,10548,1839,-32119,-7837,-10177,10303,-23905,15015,-10685,14229,3077,19082,14337,3582,2091,-11107,-17536,32455,10848,30631,-8490,9942,-17191,10449,32685,-26132,20208,-24010,-11907,-11193,-27535,-19982,-1761,29715,1793,8234,12796,16028,-31180,-21603,30009,-24350,24885,25235,-14402,-7852,-31697,24811,-31556,-635,18379,26206,13711,-31164,-1838,-13664,23330,20346,-24947,14375,19135,6641,6488,28750,-28921,26884,-15215,11950,-30941,-17261,-5552,13398,31035,20992,-31031,23891,-13110,-20014,2443,-2666,5038,-18800,-28147,22093,13492,631,-20505,29000,15982,8809,-32099,-10306,-19380,6994,-17134,29280,9552,-16885,3875,-4339,2917,-20085,14618,25799,5419,25595,-14938,1933,369,-26322,29868,-29962,14510,17072,26542,13128,-21093,20623,24702,-12960,25666,16916,8525,514,-13921,-19072,-19807,-32107,16220,1540,-5645,-13161,3845,23812,-24549,-29289,4432,7256,32040,17218,12997,-1959,1939,-23325,-2765,-4314,31668,13331,29817,-6605,-4043,-5304,29897,-6143,-11463,-15347,8721,-15757,23964,-22437,-4177,29390,16351,12364,-27055,-2782,8747,9877,21560,-15668,-12840,19098,-14108,17334,17494,20706,1196,-17656,-2473,-18958,-11889,29628,4871,5613,-24456,-11361,20679,2413,-255,4784,13152,29394,-16663,32372,-7067,18840,25485,6211,-15074,-25988,28660,-9020,10839,1767,-13456,7282,-7362,-22092,32320,1740,14210,1881,11712,21015,-29731,-14650,12845,-2906,7341,-5440,-9787,-3363,18137,-31657,16159,22090,-30312,-5029,-16251,20023,-12366,-31968,27282,-28005,14643,-9746,28699,6083,-15950,13629,27198,30846,-18937,4262,28038,26573,9127,-14320,-13826,10945,-14772,-7993,-5771,19348,22203,-25945,17293,6324,-15224,-15333,-17046,22072,-26361,1557,-32429,-30,-21740,-21130,10318,-27775,-30469,1496,14482,3242,5819,-21159,25943,15164,17034,-9809,7427,-13850,-15998,19833,27994,9858,12435,-974,-25794,-30759,-27347,5844,-15096,-5359,9872,31083,5910,-10372,13077,-28040,25212,-20171,31831,19902,18279,13579,24218,15150,28551,29213,19699,-18432,-31995,31570,21366,18385,21714,22266,-20357,10620,-22682,-5594,24856,-22891,27871,12794,1193,13927,-15917,-24600,-20460,25613,5221,18289,27787,-27392,-26711,19642,-16859,-12029,19432,24399,-19881,4146,-29638,-19049,-5679,-15950,22885,21047,-28463,-28101,-3891,29212,28125,-30205,20558,-22068,27265,-19104,837,-1547,-19737,-31776,26861,6447,-30858,17142,8838,21161,-22902,21398,21602,7001,32327,-27218,8093,-21145,-23353,29404,5070,28995,7549,21572,-26633,30678,-10079,29501,-31859,12869,-1390,1921,-13120,32635,25457,17543,22550,-32005,10644,-6809,3083,23367,-810,30163,-3176,-21392,-17997,-1106,-16745,-11560,-32641,20979,-15330,7877,10002,-2805,7315,-17979,28050,-15275,13923,-4084,-20363,-13688,-10324,-32019,4076,102,3960,16525,-30006,-10595,-10330,24822,-8727,-30745,-2704,12094,20120,-7182,-28862,24659,-14706,-16566,-29623,6793,10787,-28620,6788,6018,-13067,22768,2865,-2363,-17638,-3141,25707,16898,-27206,2336,26483,15855,-6500,23000,-17647,-2038,24421,-15576,-24247,-9084,-12810,-32075,-7336,-319,-27582,29679,29249,15897,4427,-2197,2769,20817,19667,13052,20334,22317,24849,-13765,-14982,32650,-11504,-20454,5264,31963,-30611,12146,8540,-23535,21926,24432,32456,-6071,-30795,-9782,-4008,30608,-19405,-28934,-28909,-24639,30209,6447,-17256,-7937,-27692,-15075,-3153,-23219,-2341,-6210,-17282,30980,4325,13002,-904,1236,7201,13049,7592,-11181,-5162,-27548,13792,5406,3162,-28659,21829,23843,13688,-8026,11812,28118,-20009,16642,-11199,-13180,21893,-13452,13311,-13114,28652,13006,-31194,-16367,-7348,-18896,-30196,-27872,21339,30463,-21559,-24230,-9639,9268,1579,-14403,-31828,15992,1136,22256,22824,30559,-18219,-14629,-23283,22285,-1765,-14086,-28671,11834,-18187,-14345,-8505,-28746,1779,25202,31242,-21625,-20240,6891,21833,-3081,8766,-1054,10050,9318,24002,3636,7915,25212,-17436,26899,28484,16288,-4788,4227,-16995,22431,10370,-19025,-19675,30029,-20290,8508,-28202,1194,-647,15119,-12503,-30209,9774,-16859,-18541,-1618,17222,11821,-16510,18999,17596,30853,2945,-9169,4541,11521,-31091,-31381,-5203,28430,18129,20825,-32123,-9154,-6321,-14038,10515,-22178,9151,15549,-6031,9531,27997,-9289,30060,-3574,-337,-18528,6460,-31377,-13200,17127,20833,-9494,27784,-9878,24490,-16438,23971,2906,-31395,-5134,-15052,-26301,-25706,-1535,2542,-25514,-16680,-26845,6662,-11758,-4983,17495,-16672,-11154,-13905,17125,8513,6078,-10547,23745,23486,-15162,-24246,1277,6387,6671,-17520,-31880,3619,-297,32277,9462,3799,-12229,-17713,27175,22775,13635,14201,28076,31168,15797,7522,28269,7884,27630,-25885,-16824,-3510,-13536,-18101,-8232,17152,21354,-14689,30146,22641,-20173,19643,-16284,32318,-12496,3145,-31347,-4562,22698,17983,-8610,11317,26482,53,2129,28018,24426,-8953,13641,-3834,-19732,-20829,7218,-6626,12208,-1890,24125,-16119,28489,30414,-28685,-11867,-15165,3609,-19162,-13993,-6409,-3329,-7359,-18801,-4048,-26550,9311,17902,13036,25285,1041,-2551,-30997,-28524,31716,27584,32328,16969,-31721,-22578,-21520,11295,-2414,2267,-6093,-15288,-11112,7482,11618,-17085,31580,6179,-16386,-24808,7715,-30981,-7488,13375,26760,17594,13122,-18098,7243,7731,-13181,-6070,-8263,23422,9958,-27054,14499,-5798,24691,15978,-9230,18705,-27153,11773,30798,-20019,13300,-16019,8765,30627,-20917,12924,-6072,612,18521,27800,19237,-1564,-13530,-31464,27100,-1850,-8832,-4352,7811,19110,1481,19191,-17786,-21945,-10030,28351,10669,-23737,13849,-18110,-13122,-30907,-27068,-30368,25438,30667,30226,-10058,-11972,-8229,-19794,7105,-22884,-2557,-31423,23492,-13605,3355,14979,3035,-30732,-26886,16071,-11476,7908,2647,-24824,7331,-25884,-31516,-18258,7618,31910,-11014,-9960,-3844,6505,-1607,7443,-19883,-11689,-16301,-15603,-4740,-13787,-22178,2450,4584,-18083,-198,-21245,-31958,305,17257,-30956,14430,8214,3491,-15065,31664,-4337,2176,21828,3849,-20261,-4409,-31707,-19873,-11426,19302,-30389,-15157,29779,19216,22162,9389,-3033,-1868,7602,14337,6182,-31080,-6788,29435,-4028,25886,26501,-8073,22162,-19556,13095,-9581,-14039,-14260,11818,27796,-22270,4257,-19986,16845,253,-30476,-3840,-21611,10194,-10344,-12761,-25851,-6118,-8118,27254,16741,5147,2585,30911,7767,-14381,11864,-27273,12129,782,1759,19943,4014,-15525,9616,11363,9914,-18376,13239,19801,-11878,-10001,22957,-21032,22588,29314,-7273,-297,-28550,-6476,-751,-9304,-28256,-8576,4435,19201,1070,21810,-900,-12557,13410,22790,-18256,-21367,3944,-21139,-5071,11158,23836,-22412,-19190,-591,18427,-31944,-27695,-15199,-9159,2775,-16241,-15587,-7859,-23373,5694,-19216,-18232,-9863,2197,10813,-10969,-28026,-16444,-13594,-4481,-26201,27886,-13425,-27398,20856,-22300,-14188,-29677,5958,-2297,-29083,14285,-21329,-14127,18543,-13404,10082,8178,-3380,22231,6508,-10182,104,-571,30835,11370,9988,-48,-20240,20459,-12050,12234,-4710,16705,-19783,-17926,-17395,6538,-13580,16215,18870,-16990,-10023,-7579,-24290,-14279,-7644,-26662,29731,-5543,27040,3429,26251,-13104,-15285,7733,-13166,-12931,-5072,16803,-22173,-25472,2303,29577,-31161,3901,-17110,12942,15417,24118,27596,24152,6679,28946,-8479,31174,17832,-10355,26642,3850,8069,-11394,-22516,-10635,-17982,17338,14986,21898,25943,27740,24631,20204,32061,20184,-16005,-26114,193,-20124,2075,-30918,-14802,-27168,-26054,-21991,31506,16176,-25404,524,18937,-13200,-26062,15394,1980,-18519,26437,-23391,10280,-32301,-25493,-26158,-1416,-16094,19826,-30651,11200,-17294,30271,27778,-18478,-18030,-23258,3368,9182,884,-25143,6689,-764,-15897,11303,22761,16421,15412,30275,-17172,-29925,8014,31576,-25500,-15988,19109,-13112,28324,-9991,-26258,-4706,14424,25742,17680,-19171,-31747,-12806,-21290,-27972,8375,-16035,-4538,14400,-32225,-17539,14385,5483,-14808,-1257,-23752,24361,-19154,16870,20471,-3662,12578,-14205,28630,-20307,-30193,-32475,-17246,-10878,-28423,24265,-31475,-28668,15818,-19370,-30053,-23271,-16086,17303,-27692,-6674,-12974,-2220,4566,-26142,26351,-10096,775,28806,-1577,10368,-18426,23955,-20916,737,6240,-10336,-3607,17859,-890,6945,-6300,-19629,-22859,9051,30848,15729,32173,12614,-3410,-20555,-6005,-12800,-26928,-31835,-3517,-25636,-18,25294,21771,8811,25853,-30967,-22721,-2713,24157,-11100,23965,-18312,26955,-1178,-26816,-30111,-16457,9582,-16295,-24564,-27750,-14083,-4933,10225,11436,13657,-10171,-9386,21845,-10421,-3997,8704,-7933,-28892,27017,5396,-18795,-10093,-14669,28829,-9192,24588,31379,-12634,-20271,17676,-9881,20821,-16224,31954,17752,-30761,-21114,8718,-18184,8265 diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu21.tflite b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu21.tflite new file mode 100644 index 0000000..c011d9d Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu21.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu21_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu21_golden_int16.csv new file mode 100644 index 0000000..fc98e07 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu21_golden_int16.csv @@ -0,0 +1 @@ +21780,-7038,1697,-4857,-10992,-7087,-3442,23909,-1265,19568,11507,17556,-10826,23090,6208,7894,-2198,-4887,-5851,-4499,544,32767,32767,-3764,-2611,32767,7135,-10479,-10967,32767,1430,30507,32767,-1144,-1701,-607,-2606,-1452,19830,453,17994,-2219,12109,-2859,32767,-5354,17359,-820,-5282,-8405,24377,11458,-11225,32767,-10849,28302,21296,20657,-4204,-9068,25361,-1419,23185,740,26165,-12634,-7445,-546,-12084,27973,-463,-12026,32767,30138,32767,-6682,-12431,-4793,-1568,11181,-9883,23068,-366,-17,32767,-2378,10537,12569,32767,-12248,27555,14077,-10182,-2037,-6750,32767,-8353,-9812,-7065,-7562,20229,-2235,-9802,-1155,-4853,-918,-7635,14580,-195,-2677,32767,12085,-10731,-2819,19885,18542,23478,-6969,-6169,8928,-12729,-11831,-11519,7373,24192,15185,13941,15337,-15,-1810,-8753,-3327,-12569,7455,-5625,-12692,-7705,-5418,31071,1604,2754,22344,13851,14197,12952,-7647,16944,29092,31375,99,-2796,4276,7765,-8031,-5135,-10610,31375,-11889,-10238,-8685,-6750,-5751,2234,32767,32767,-4417,-12468,-3614,-1643,-5407,-2635,-10531,9875,-10515,-1784,-1061,-5447,17682,-2162,28406,-10650,14569,32767,-8948,-5514,-6170,18309,-8408,22607,-4821,-2925,22053,-4152,-2373,5705,5666,-6834,-10994,-7545,-2689,-7146,32767,-6607,32767,32767,-8939,-10484,-4364,-5934,11142,32767,-1649,-6268,32767,-9719,19199,17372,10202,-2774,24035,8439,23910,-11571,-1769,18832,-364,-797,4821,32767,-1850,26368,-3174,-9583,-4359,-6427,32767,22878,8104,-4384,-6818,-4406,-5516,-8002,32767,21988,-11237,-4936,-875,-10454,-1917,11440,-10648,-7182,-1152,32733,-12639,-2257,-1761,-11956,29345,32767,-1271,-10620,4225,4009,32767,2427,-1048,32767,32182,-5715,-4278,22141,-8637,-7607,32767,21820,32767,6925,-2803,-6975,-12712,32767,32767,21670,17487,32246,-12619,26043,13900,-6727,-8950,32767,14269,-6296,-102,19681,20208,32767,-10604,-9237,-11076,23194,32767,-8301,-10043,28952,32767,19902,32767,-6434,-2051,-11849,18404,28611,32767,-8928,32767,11082,-1357,-8070,20357,28677,-6950,-3581,26618,-5388,-5224,29666,4800,-11530,17636,12574,6276,11565,-8128,27865,32767,9654,-12060,29008,-12735,-5707,15563,31066,26683,5557,23346,32767,27274,-1491,7095,5283,6456,-3085,8215,4025,11595,-4314,32767,-3227,6283,-11568,-11101,-7451,12008,-12776,4129,16221,3597,4178,26965,-8116,-2357,-2518,-2177,-7166,-9612,17237,-3543,-3142,13550,32767,31008,27732,8009,21703,-4075,-9467,3782,-10705,32767,32767,-368,-46,-11254,10253,11304,13664,21795,-6346,-12223,32767,8030,19158,32767,24749,-3018,-944,-2275,19687,7421,21303,3970,-12744,-11209,32767,-7559,32767,32767,26136,-9856,26829,-2128,32767,25012,-2321,27901,20611,-959,-8185,-11975,-3101,12989,-310,-9043,28510,-12464,17362,12758,-781,20866,10164,-4100,-446,-602,-5523,-8297,32767,-3939,-2072,21071,29932,32767,32767,32767,11421,-3847,-6473,-971,-11493,5883,-8021,-8805,-1883,-12629,-411,32767,14215,-8976,26818,14431,-5836,-5120,1301,-7272,23206,3732,22757,-3187,26653,16044,-2601,1848,10593,31957,31248,-7106,-1405,32767,-2413,9766,32767,-11570,-12466,8876,-8765,-994,-8965,-10785,-12253,-65,26084,32767,-6710,32767,-3720,8909,-6086,32767,-7554,-528,31481,6025,18606,12573,-3919,8907,32767,-681,5681,8251,-9899,-9624,-11189,9726,-2310,16051,-868,-10217,32767,32767,19731,-5107,-6701,-12513,12486,29301,7311,32767,28492,-8113,32767,14617,-7421,14161,-7438,-3157,25736,7795,-7234,-8819,-8084,-3141,18105,1662,-7723,-190,29440,28868,17889,3101,4165,32767,-7236,-2193,-8468,32767,-8421,16308,32767,13169,32767,-5418,-10226,32767,10382,-3110,-9449,1222,-9195,32767,32767,22376,24475,1388,-1427,-5774,-3457,31196,27712,27491,2003,2410,11877,-3238,32109,-10967,17031,-9754,29511,-3743,-7553,25788,32767,-8667,13252,-11353,-7500,-2558,-5991,6788,-10876,-3780,-282,-1245,-11562,-615,-8181,30506,-9959,-10409,26684,32767,-12317,-11766,-11850,-10105,9162,-8432,-3186,-528,-11392,19747,-2063,2847,-8112,-1493,-2126,11826,4673,32767,12469,-5815,-1823,18510,29096,-9330,8971,7948,-830,-5120,32767,11411,-8171,12922,32767,25654,-10131,-6746,13300,29314,32767,-614,17440,26568,32767,-2675,-3689,23678,-3082,23215,32767,-2204,32767,28103,-4806,10678,5805,4471,32677,12630,5101,32767,-7088,19929,22940,-1513,30275,-8644,-1746,32767,-2578,-515,14918,-3933,18650,-8923,-296,-7882,-9629,-3648,-10958,-8761,-1121,-6634,-1867,32767,-3744,19091,-11743,-1302,-6804,32767,-5401,-9402,30139,-10290,32767,8890,32767,-8511,-4245,-1177,32767,-4343,-9135,32767,5839,6287,9479,28137,246,1170,3197,-3552,-11655,32767,32767,32767,13881,-8170,-6496,-3518,-7731,-12549,10357,-11336,-2971,17368,-94,21691,9764,5326,15955,32767,8863,32767,11527,-10853,5357,12089,30890,19955,32767,14099,-7746,-4752,19038,-5285,-10976,321,1386,-7351,18538,31909,14002,8531,12019,6267,-8834,-8449,4899,6087,32767,-4589,-9781,9416,-8841,-4985,9101,32767,5783,28476,-7357,-8369,43,32767,16921,-5595,-97,-79,8266,-7069,15091,148,-11621,3232,-10296,32657,-10761,-4964,-10010,11265,-2583,14468,7558,-11744,13687,31279,18573,24381,-4676,-8384,-6464,-2006,5317,-5392,4966,-11049,-10568,21715,3580,6051,29036,23721,-9564,19452,29563,-489,-3051,-1586,30269,21836,-4841,31541,-3854,-6515,-6698,-1892,-7021,-4322,32501,9224,31399,-10764,28345,31336,32767,-393,-8308,-11114,-7256,32767,-6705,31038,-7300,32767,6898,32767,-1994,-6054,-6975,-6625,32767,-11183,344,-3127,-3447,13105,-2188,-6994,3822,27354,28293,20517,31607,-2915,-6467,15025,23510,32767,-7544,-2256,25917,-8832,2169,32767,-9288,32767,-1520,32767,32767,21245,32767,-11953,-2220,-8282,-1952,-6043,-10118,14033,5003,-5648,-12310,19614,9420,20033,-4845,3949,-1264,3891,-10963,-7210,30703,-6997,9060,4336,-603,27915,32767,-382,4467,-9288,-10310,32767,32767,20966,32767,-431,-5342,1811,25637,-8413,-3039,16077,12040,27775,32176,-6009,16084,-2579,11790,32767,-5247,29234,-11386,2944,-6728,-10799,10863,-10965,29067,-1318,4476,32605,-11961,-4141,32767,32767,19509,32767,32767,19789,32767,-5149,-6878,-4737,-8028,32022,-2049,7304,-9899,17517,-6710,3428,-11809,-9677,-5854,-5062,-11113,-9807,-4495,-10356,-7573,28462,25406,26699,27202,8078,32767,-1923,-4115,-147,27190,32767,32767,-12481,6711,-9427,-11188,-8,-9599,21366,-8837,-9082,-8248,-7473,-9333,10336,13420,-4499,15906,-10000,-5092,32767,24745,-6421,31467,10488,32767,22065,14390,-9042,-7448,24643,7561,-8579,-7090,-9008,5879,-3861,2532,-7111,32767,-12225,-7757,7654,14604,-11149,26460,-4919,-5434,16202,32767,32767,-5004,-2025,32767,-5458,-7551,32767,-12040,32767,-1987,-4211,29587,-3359,32767,-3883,24387,-12070,-5643,-2739,6868,-8448,-10487,-8518,32767,13688,32767,31771,-1343,-1783,-3480,20256,32767,-1105,-4389,-8137,-12787,10892,32713,16813,-5302,29150,-6815,8121,-9353,750,23557,18018,17833,17751,-503,31312,22218,-2110,29482,14660,-10939,32767,32767,32767,-298,25147,32767,32767,-5019,13480,-12559,-11406,-10946,-1793,-9531,-4860,2099,-2463,21003,-3484,-11265,-88,9037,-11152,-4774,-8669,-11545,-4569,-2756,24523,2147,32767,-8042,-1428,-10688,-9767,28915,30190,-6142,17009,-1691,32483,-1864,-6172,28963,4596,-10167,487,-10428,-10925,17495,5101,-12668,-12048,-6484,4463,-5454,9209,-2323,-6582,19257,-4522,16617,-7204,23690,32767,-7743,-6112,-1204,-1281,-8602,3765,-2748,-869,32767,-5077,25646,-518,14638,2910,-3603,32767,-2609,-11580,32767,-1974,16048,-1328,28122,-8137,-4109,29442,-5190,-1563,3659,32767,8068,-4400,-2504,-3222,3230,29793,-4888,32767,-7988,28558,-11707,19484,30759,32767,24905,-7334,-2916,-973,-5930,2485,-7492,872,-10848,-8230,-278,25080,-3168,-7304,20247,700,-5939,-4525,-535,-1670,-11741,-7230,11660,2052,-8251,32767,-4092,-12481,-8232,-507,-4598,826,-10642,-1800,79,26004,29077,-2170,-4449,-10256,27058,-9693,-2740,-9177,-655,9082,-3758,-10929,-3488,-6675,-12538,-9140,-7423,-678,32767,21851,28788,13014,-10684,-1421,28264,7200,12565,14142,26290,-11090,-10989,-11776,-1868,847,-8118,14598,22361,-9422,-5850,-3030,-3494,13902,-4897,19493,-8024,19474,936,-12269,-8150,6102,30337,28717,4573,-4431,-8781,-4688,27997,6093,-3354,-3671,-9020,32767,9905,-6106,-1460,-1985,-275,-247,-5032,21542,-3609,32767,32767,15781,32767,-5666,13284,-2772,24909,-6079,-12547,32767,-7459,15949,15181,-9242,-11948,32767,-3610,-6755,32767,-5274,2348,-8054,7373,31906,7879,13605,-7610,-798,-3374,23677,4456,-10115,-612,23690,-11049,-5852,-649,26291,-10296,-3991,4855,11545,32767,-1177,28725,-1629,-11968,29559,5495,-4325,-9622,30966,-9154,-3724,32767,-117,32767,-12396,-1632,-3897,27568,-7479,-8177,20041,-10858,8875,-8412,24122,-4604,18313,12880,-7620,21319,8326,-11152,31295,9808,-964,-8902,-12719,-1942,-4663,32767,32767,-994,26942,20503,-6520,32767,-2767,-7052,22628,-5253,-5204,-10918,-11618,-4106,2986,28820,-5167,32767,14867,32767,-6116,-8425,6089,-832,12003,22351,9892,32767,-12650,-7291,-7543,11414,-874,-9954,17826,-3422,32767,-11268,3555,21366,-1618,32767,-8873,-10568,-11622,29133,14478,32767,-10576,-6734,-5441,11736,32767,32767,16679,29237,-10127,3562,15588,32767,-6599,5499,-388,-2090,9497,-3865,32767,-11164,-7728,11723,7373,25091,8181,-632,29518,20823,28963,31562,-7550,-5599,10010,9101,22588,4828,-4520,-1139,17131,18,31083,26002,-8803,-7852,12112,8520,-6688,-12510,-5219,-1448,-10429,-11812,-8188,18343,24074,7407,16981,-3444,32767,13988,-6701,-3949,20775,32767,-1561,-10378,-3586,-776,-5554,3209,-3082,-7193,-11322,32767,-5074,-9199,-6369,20671,-10810,-1199,32767,8894,1424,-1431,-9575,29198,-3175,-1520,-3556,7372,-8396,-9596,-4424,-7165,-5479,-2835,-937,-7753,32767,29498,18732,-12602,19916,-8114,-6572,-1062,25319,-8827,23989,-468,-10897,32767,20049,32767,32767,-10620,30134,8997,-5981,2826,32767,11906,-777,-7673,12633,24622,-7893,-8937,-11238,-5189,15915,24378,-10740,20800,-10395,-8821,32767,2954,4553,-8109,26714,9123,-8152,32767,-2154,-12365,32767,-9815,5385,-9550,28337,11286,-8188,29015,-9203,11255,-3969,-9115,32767,295,-4993,21220,-5432,-9405,22468,32767,32767,10102,-12203,31580,-2912,-11800,-1259,-1830,10255,-10708,-2411,-3974,-4175,29796,14663,2562,-2128,-4314,-6544,25845,-5078,10509,-6772,20158,9472,-469,32767,-3164,-8843,26757,-441,-5749,32767,32767,14161,24702,13031,-9677,-10328,-4436,11716,-6964,4685,24242,-1234,-3293,-5958,-12087,30055,-8909,-7077,-12314,23270,38,-11712,-5833,-10966,-5904,-4877,-6657,-518,-7343,-3946,32767,13657,30942,-5455,-1463,-5679,23676,2238,-11204,28999,-5658,13477,32767,20413,12327,32767,32767,32767,-6252,32767,-7732,-5720,32601,32767,-1213,32767,32713,32767,-11319,-2497,32767,1102,25745,-3697,5192,22648,-4908,-11058,-5647,32767,-6055,-2287,32767,-11981,-4177,28984,32767,-9383,17984,32767,-2460,-8315,-11609,-1776,-811,23237,-12661,30704,-3604,28597,-1216,-8070,32767,-3390,32334,27140,17596,23629,-4590,-4111,-12598,-4453,14308,32767,-7752,-1196,-2651,-118,32091,4348,16493,-1160,8287,-10874,7348,32767,18413,29334,32767,18491,-577,21798,16614,-10720,1327,16379,-2943,-6801,-4965,1542,-4594,5364,18025,-4206,-1478,-10969,24365,5009,32767,26700,8374,9536,24218,-2229,-506,666,7611,-8135,1291,32767,-3862,-6303,11083,-4681,27747,32767,187,27279,-11476,28025,30937,-6698,-407,32767,-10131,28605,-6941,32767,6553,4233,7627,18426,-6419,-9918,-9046,14676,-8380,32767,32767,-11393,-11900,-4198,-5107,-10080,-8668,11554,1001,32767,-9418,21892,29492,-9033,-2668,-1012,32767,14258,-8916,-376,-7044,-6260,-8532,32259,10263,-5717,-461,4786,-2751,-10260,-610,-1022,-10946,2718,22944,27854,-9501,-6565,-11836,-4080,1735,-11226,30458,-12348,-4471,-5554,6414,19544,-1129,-11195,26520,8254,-1325,-5863,-7590,5465,5222,25477,19085,-6079,32767,-9774,-6341,-12707,29189,-9173,-1627,-7990,28831,-4029,-3174,16813,-10552,16260,7917,-9680,13274,32767,-2419,-4991,-3629,-12593,-6241,-1095,32767,12677,-12714,-8867,22503,23141,-4368,-8134,-10220,-4583,14663,-114,-11847,-1777,-1459,-7240,15640,28859,-2477,-5552,20243,-1327,-7588,-6753,-10873,-11073,10669,25507,23192,21738,19281,32767,-11494,-6334,7,21889,32767,-9155,-8389,-1016,-9115,-9794,-8202,-11956,14244,-4421,-7052,8543,-10739,-6738,26848,15189,-8357,-1828,-10978,-4982,29843,-4949,-9516,-1335,-8191,15341,-3550,-9869,6793,21981,-8115,-745,7771,27464,-8827,10164,-9787,-11967,-7183,8900,7955,-6948,-8910,32767,13008,28937,24191,32767,23889,5298,32767,2427,-228,-2140,22421,-2138,-6893,5908,-10577,32767,-5032,3009,5300,32767,32767,18188,-110,-5450,-8630,8468,11230,6734,23397,32133,4258,22157,-12134,-7059,-7579,23160,-2395,5525,-12479,-9402,19316,-11064,22019,19273,-5605,-11781,-6364,323,-12543,-8608,-5020,-11717,-2902,21693,32767,32767,-9006,17927,19184,-7902,-8791,-4181,18390,28286,-8695,-7101,833,11725,13254,-12313,20491,32767,-11408,-1716,24156,21704,-4802,13825,12878,32767,124,-9335,-11833,26199,5615,-5455,-3959,32767,32767,7139,-10850,-8831,11575,21903,-5511,30101,21175,-6696,-7052,-294,19931,-8566,32767,-9414,-2357,10301,-10857,-5449,30956,-12743,-9932,32767,399,6988,9238,-9567,14954,-10827,6346,751,31778,21313,8043,32767,-11854,29191,6735,-4865,32767,-4536,-11667,32767,22930,32767,-2386,-7059,-3246,19789,-7726,-7923,17126,-5504,32767,-5295,8492,5490,-8015,-9865,-5978,-1880,-11680,21493,31071,3144,-12116,-3611,24597,-48,-2964,2001,-1522,12343,32767,-4812,19115,17830,27978,-1461,-9346,-10139,29779,-12017,32767,23717,-6126,-7001,-699,-1520,14789,23919,-6205,28077,-8292,32767,-8357,21151,29720,-11074,32767,26231,9805,8527,-12615,32767,-8233,-552,16459,32767,5705,15496,24499,-8866,-10796,-2069,-12280,-6104,3251,32767,16215,-10750,-8859,-9540,-10663,-10253,10547,-11526,-4587,-3902,-11043,29116,-6565,-2050,32767,-8798,30920,-5770,32767,31374,12969,-10146,-3885,18465,32767,1774,-2352,32767,32767,-9625,-7797,-2524,31546,-2495,32767,19341,-6275,-3358,-2696,32767,-5457,18029,-8645,13485,-10855,20826,-2039,-12045,14368,13299,-728,23805,-625,-4576,25835,-4375,-548,8573,32767,-8118,-12188,-3465,32767,27076,32767,32767,-1447,-2815,30336,32767,-4680,-11551,21665,-11177,19240,11891,32767,32767,30908,22706,-1722,13678,32767,16809,28857,-3908,-2788,-10597,-8526,2659,-11953,32767,-2951,-8494,30739,32767,-3474,32767,2498,15916,-3313,32767,-5351,32767,-1936,27151,3442,-5717,25547,-12285,-4789,-4361,-7852,-9721,13307,-12148,32159,-11727,-12593,-208,-5768,7308,-8464,19301,453,-2074,32767,-4417,-11542,32767,32767,27011,29682,17759,-2182,5693,-5710,-5259,-4520,30074,-4079,32767,-6395,1533,-5197,31260,5841,-6694,9658,32767,12196,-697,22134,-6816,-6257,-1790,32767,24708,-4729,-11259,-2079,23279,-7411,20434,5628,-6291,-1915,1457,13805,-6305,1770,-2568,-11949,-668,22805,22825,-3938,20972,-1222,32767,23513,13561,-3084,16418,-3561,30648,32767,32767,-6318,32767,-11142,-7435,-3635,-7049,11384,1027,6236,-10772,24627,10880,-7220,32767,-478,-12705,-7681,-11480,-5244,-9346,32767,2931,-10732,29789,-7203,-11779,15976,-2580,28334,-776,-3745,-5400,32767,-9334,13073,26653,32767,-1945,6333,-114,-10324,2370,-4218,-12066,-6252,32767,2688,-2891,9211,29153,31232,32767,-450,23634,-2934,-8098,32767,32767,-10328,-4433,22532,-1444,-2512,27189,32767,32767,3102,32767,6123,-5438,-3402,-624,-11658,-2488,7108,23453,20007,-8751,22981,-3214,-8876,32767,32767,23033,-8691,-4328,1482,-2893,-2909,-9045,32767,-12053,-9700,22912,341,-3062,-8534,-10405,-6315,-10894,-10077,23538,-12697,-1360,-9083,-7842,-11199,-8542,9307,30971,32767,20069,-4537,-7346,29889,-11620,-6831,14705,14508,900,-4394,28566,-3840,-571,12413,26730,32767,-5472,32767,-2192,15742,1054,5691,-4301,32767,12029,18269,32236,-4077,-8234,4305,19914,21626,24143,13069,-8912,-9883,21448,12364,32767,-11407,7845,22585,-2506,-7036,-4068,29780,10271,-12471,7951,4267,11497,21419,20061 diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu21_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu21_input0_int16.csv new file mode 100644 index 0000000..db5e254 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu21_input0_int16.csv @@ -0,0 +1 @@ +16737,-18028,1304,-12441,-28157,-18154,-8815,18373,-3239,15037,8843,13491,-27731,17744,4771,6066,-5630,-12518,-14987,-11524,418,26526,27338,-9641,-6687,25538,5483,-26842,-28091,26076,1099,23444,26417,-2929,-4356,-1553,-6674,-3719,15239,348,13828,-5684,9305,-7322,25943,-13714,13340,-2100,-13530,-21529,18733,8805,-28754,29844,-27789,21749,16365,15874,-10768,-23228,19489,-3633,17817,569,20107,-32361,-19070,-1397,-30952,21496,-1185,-30804,30917,23160,25810,-17115,-31843,-12277,-4015,8592,-25315,17727,-936,-43,31323,-6090,8097,9659,27901,-31373,21175,10818,-26080,-5216,-17291,26787,-21395,-25133,-18098,-19371,15545,-5725,-25109,-2959,-12430,-2352,-19558,11204,-500,-6856,26179,9287,-27487,-7221,15281,14249,18042,-17850,-15802,6861,-32606,-30305,-29506,5666,18591,11669,10713,11786,-38,-4635,-22420,-8522,-32196,5729,-14408,-32510,-19736,-13877,23877,1233,2116,17171,10644,10910,9953,-19587,13021,22356,24111,76,-7161,3286,5967,-20571,-13153,-27177,24111,-30453,-26224,-22247,-17289,-14732,1717,26468,31471,-11313,-31937,-9258,-4209,-13849,-6750,-26974,7589,-26934,-4569,-2718,-13951,13588,-5537,21829,-27281,11196,32388,-22921,-14124,-15805,14070,-21536,17373,-12349,-7492,16947,-10634,-6079,4384,4354,-17504,-28162,-19326,-6888,-18304,32613,-16923,28076,25974,-22896,-26854,-11177,-15200,8562,28628,-4224,-16055,25424,-24894,14754,13350,7840,-7104,18470,6485,18374,-29638,-4531,14472,-933,-2042,3705,26257,-4738,20263,-8130,-24547,-11164,-16463,28236,17581,6228,-11229,-17465,-11286,-14128,-20497,26999,16897,-28783,-12642,-2242,-26777,-4909,8791,-27276,-18397,-2950,25154,-32375,-5780,-4511,-30625,22551,25876,-3255,-27204,3247,3081,28355,1865,-2685,27265,24731,-14638,-10958,17015,-22124,-19486,27874,16768,29814,5322,-7180,-17867,-32562,29718,30244,16653,13438,24780,-32323,20013,10682,-17232,-22925,31665,10965,-16126,-260,15124,15529,26399,-27163,-23661,-28370,17824,25348,-21262,-25724,22249,26934,15294,31065,-16481,-5253,-30352,14143,21987,29073,-22870,25395,8516,-3476,-20671,15644,22037,-17802,-9172,20455,-13802,-13380,22797,3689,-29534,13553,9663,4823,8887,-20819,21413,25424,7419,-30891,22292,-32621,-14617,11960,23873,20505,4270,17941,27459,20959,-3819,5452,4060,4961,-7901,6313,3093,8910,-11050,31212,-8266,4828,-29632,-28435,-19085,9228,-32727,3173,12465,2764,3211,20722,-20790,-6036,-6449,-5576,-18356,-24621,13246,-9074,-8048,10413,32757,23829,21311,6155,16678,-10437,-24249,2906,-27420,31336,31747,-943,-118,-28827,7879,8687,10500,16749,-16254,-31309,25186,6171,14722,32687,19019,-7729,-2418,-5827,15129,5703,16371,3051,-32644,-28713,32185,-19361,31789,28053,20085,-25246,20617,-5450,29594,19221,-5946,21441,15839,-2457,-20965,-30675,-7942,9982,-793,-23163,21909,-31926,13342,9804,-2000,16035,7811,-10502,-1142,-1541,-14147,-21253,29743,-10090,-5306,16192,23002,25969,30471,29747,8777,-9854,-16580,-2487,-29439,4521,-20545,-22553,-4822,-32350,-1052,25578,10924,-22992,20609,11090,-14949,-13114,1000,-18627,17833,2868,17488,-8163,20482,12329,-6662,1420,8140,24558,24013,-18202,-3599,28162,-6180,7505,29809,-29636,-31931,6821,-22451,-2545,-22963,-27625,-31387,-166,20045,28850,-17188,30582,-9528,6846,-15590,26398,-19350,-1353,24192,4630,14298,9662,-10037,6845,27963,-1743,4366,6341,-25356,-24651,-28660,7474,-5916,12335,-2223,-26172,27493,26156,15163,-13081,-17164,-32053,9595,22517,5618,30712,21895,-20782,30518,11233,-19008,10882,-19051,-8086,19777,5990,-18530,-22589,-20707,-8046,13913,1277,-19781,-485,22624,22184,13747,2383,3201,30881,-18535,-5617,-21690,31756,-21570,12532,27774,10120,27565,-13877,-26194,28484,7978,-7965,-24204,939,-23552,27630,32012,17195,18808,1067,-3654,-14790,-8854,23973,21296,21126,1539,1852,9127,-8293,24675,-28091,13088,-24986,22678,-9587,-19347,19817,29693,-22201,10184,-29081,-19212,-6553,-15345,5216,-27860,-9681,-721,-3189,-29615,-1574,-20955,23443,-25511,-26662,20506,28989,-31551,-30139,-30354,-25883,7041,-21599,-8161,-1351,-29180,15175,-5285,2188,-20779,-3824,-5446,9088,3591,30097,9582,-14895,-4669,14224,22359,-23898,6894,6108,-2126,-13115,32031,8769,-20929,9930,29235,19714,-25950,-17280,10221,22527,32736,-1573,13402,20417,30860,-6851,-9450,18196,-7894,17840,26009,-5644,32483,21596,-12311,8206,4461,3436,25111,9706,3920,27650,-18156,15315,17629,-3874,23265,-22142,-4471,30276,-6604,-1319,11464,-10074,14332,-22856,-757,-20189,-24664,-9344,-28069,-22442,-2871,-16992,-4783,25967,-9589,14671,-30080,-3334,-17429,28029,-13835,-24082,23161,-26358,29531,6832,31786,-21800,-10873,-3015,25844,-11123,-23400,26979,4487,4831,7284,21622,189,899,2457,-9097,-29854,28961,25797,25216,10667,-20927,-16639,-9010,-19802,-32145,7959,-29038,-7611,13347,-239,16669,7503,4093,12261,27660,6811,26279,8858,-27801,4117,9290,23738,15335,30821,10835,-19840,-12173,14630,-13536,-28115,247,1065,-18829,14246,24521,10760,6556,9236,4816,-22628,-21641,3765,4678,31346,-11755,-25054,7236,-22647,-12768,6994,29377,4444,21883,-18845,-21437,33,28332,13003,-14331,-248,-202,6352,-18108,11597,114,-29766,2484,-26372,25096,-27565,-12715,-25640,8657,-6615,11118,5808,-30083,10518,24037,14273,18736,-11978,-21475,-16557,-5138,4086,-13812,3816,-28303,-27069,16687,2751,4650,22313,18229,-24498,14948,22718,-1252,-7814,-4063,23261,16780,-12401,24238,-9871,-16687,-17157,-4846,-17983,-11071,24976,7088,24129,-27572,21782,24081,26436,-1006,-21280,-28469,-18586,31496,-17175,23852,-18698,25456,5301,25185,-5108,-15507,-17866,-16969,29787,-28645,264,-8010,-8830,10071,-5604,-17915,2937,21021,21742,15767,24289,-7467,-16565,11546,18067,29207,-19323,-5778,19916,-22624,1667,32462,-23790,32496,-3894,30912,29474,16326,28267,-30617,-5687,-21214,-5000,-15478,-25917,10784,3845,-14468,-31533,15073,7239,15395,-12411,3035,-3236,2990,-28081,-18468,23594,-17923,6962,3332,-1545,21452,29615,-978,3433,-23791,-26408,32015,30233,16112,32212,-1103,-13684,1392,19701,-21551,-7783,12355,9252,21344,24726,-15393,12360,-6606,9060,31340,-13439,22465,-29166,2262,-17233,-27661,8348,-28087,22337,-3376,3440,25056,-30638,-10606,31910,27388,14992,30222,27726,15207,32103,-13190,-17617,-12133,-20563,24608,-5247,5613,-25357,13461,-17187,2634,-30250,-24788,-14994,-12965,-28467,-25121,-11514,-26526,-19397,21872,19524,20517,20904,6208,26542,-4925,-10541,-375,20895,31616,31837,-31970,5157,-24147,-28658,-20,-24589,16419,-22635,-23264,-21128,-19143,-23906,7943,10313,-11525,12223,-25614,-13043,31720,19016,-16448,24181,8060,31435,16956,11058,-23161,-19079,18937,5810,-21974,-18162,-23074,4518,-9890,1946,-18215,28123,-31314,-19869,5882,11223,-28558,20334,-12599,-13920,12451,29218,27710,-12817,-5187,29914,-13981,-19342,25768,-30841,27752,-5090,-10786,22737,-8603,27832,-9945,18741,-30917,-14454,-7015,5278,-21640,-26862,-21818,25926,10519,26046,24415,-3439,-4566,-8913,15566,28559,-2830,-11241,-20842,-32755,8370,25139,12920,-13581,22401,-17456,6241,-23957,576,18103,13846,13704,13641,-1289,24062,17074,-5404,22656,11266,-28020,27438,31473,31329,-763,19325,29708,30619,-12857,10359,-32170,-29217,-28039,-4593,-24414,-12448,1613,-6309,16140,-8925,-28855,-224,6945,-28567,-12228,-22205,-29572,-11703,-7059,18845,1650,27621,-20599,-3658,-27377,-25017,22220,23200,-15732,13071,-4331,24962,-4773,-15809,22257,3532,-26044,374,-26712,-27984,13444,3920,-32449,-30862,-16609,3430,-13970,7077,-5949,-16860,14798,-11584,12770,-18452,18205,29760,-19833,-15655,-3083,-3280,-22035,2893,-7038,-2225,26465,-13004,19708,-1326,11249,2236,-9228,26819,-6683,-29661,29474,-5057,12332,-3402,21611,-20843,-10525,22625,-13293,-4004,2812,27929,6200,-11269,-6414,-8253,2482,22895,-12521,26846,-20460,21946,-29987,14973,23637,27064,19139,-18786,-7469,-2491,-15189,1910,-19190,670,-27788,-21080,-711,19273,-8114,-18710,15559,538,-15213,-11590,-1369,-4276,-30074,-18519,8960,1577,-21136,27037,-10482,-31971,-21087,-1297,-11778,635,-27259,-4611,61,19983,22345,-5559,-11396,-26271,20793,-24829,-7017,-23506,-1677,6979,-9625,-27995,-8935,-17099,-32115,-23411,-19015,-1735,27890,16792,22123,10001,-27366,-3640,21720,5533,9656,10868,20203,-28408,-28148,-30164,-4785,651,-20795,11218,17184,-24135,-14984,-7760,-8950,10683,-12544,14980,-20554,14965,719,-31427,-20875,4689,23313,22068,3514,-11350,-22493,-12007,21515,4682,-8592,-9402,-23104,28921,7612,-15639,-3738,-5085,-704,-631,-12889,16554,-9243,25977,30293,12127,27994,-14512,10208,-7101,19142,-15570,-32140,29760,-19106,12256,11666,-23674,-30604,25586,-9247,-17303,31544,-13509,1804,-20630,5666,24519,6055,10455,-19494,-2044,-8642,18195,3424,-25909,-1566,18205,-28301,-14989,-1661,20204,-26372,-10222,3731,8872,30325,-3014,22074,-4171,-30655,22715,4223,-11078,-24646,23796,-23447,-9538,26236,-299,31592,-31753,-4179,-9982,21185,-19157,-20944,15401,-27812,6820,-21548,18537,-11793,14073,9898,-19518,16383,6398,-28565,24049,7537,-2468,-22803,-32581,-4975,-11944,25864,27782,-2546,20704,15756,-16702,25188,-7087,-18063,17389,-13454,-13330,-27966,-29760,-10517,2295,22147,-13234,30701,11425,31273,-15666,-21581,4679,-2131,9224,17176,7602,28985,-32402,-18675,-19321,8771,-2238,-25496,13699,-8764,30009,-28864,2732,16419,-4143,25274,-22727,-27070,-29770,22388,11126,30239,-27090,-17248,-13938,9019,32290,32744,12817,22468,-25940,2737,11979,25289,-16902,4226,-993,-5352,7298,-9899,31023,-28596,-19796,9009,5666,19282,6287,-1618,22684,16002,22257,24254,-19340,-14341,7692,6994,17358,3710,-11577,-2916,13165,14,23886,19982,-22550,-20113,9308,6547,-17130,-32045,-13369,-3708,-26714,-30257,-20974,14096,18500,5692,13049,-8821,26326,10749,-17165,-10115,15965,32395,-3997,-26583,-9184,-1987,-14226,2466,-7893,-18425,-29002,25745,-12996,-23563,-16313,15885,-27689,-3070,30209,6835,1094,-3665,-24526,22438,-8133,-3892,-9109,5665,-21506,-24579,-11331,-18352,-14034,-7262,-2399,-19860,25652,22668,14395,-32280,15305,-20785,-16833,-2721,19457,-22610,18435,-1197,-27913,26335,15407,31232,31941,-27204,23157,6914,-15319,2172,29619,9149,-1989,-19654,9708,18921,-20218,-22892,-28787,-13291,12230,18734,-27510,15984,-26626,-22595,32569,2270,3499,-20772,20529,7011,-20880,28593,-5518,-31673,29230,-25140,4138,-24461,21776,8673,-20973,22297,-23574,8649,-10165,-23348,26399,227,-12789,16307,-13914,-24092,17266,25610,26457,7763,-31258,24268,-7459,-30226,-3224,-4686,7881,-27429,-6175,-10180,-10693,22897,11268,1969,-5451,-11050,-16761,19861,-13008,8076,-17345,15491,7279,-1202,30605,-8103,-22650,20562,-1129,-14726,29573,26313,10882,18983,10014,-24787,-26454,-11363,9003,-17837,3600,18629,-3161,-8435,-15261,-30961,23096,-22821,-18127,-31542,17882,29,-30001,-14942,-28090,-15124,-12492,-17052,-1325,-18808,-10107,25935,10495,23778,-13973,-3746,-14547,18194,1720,-28700,22285,-14492,10357,27110,15687,9473,30140,28718,29395,-16013,26876,-19806,-14652,25053,30869,-3106,30693,25139,29456,-28993,-6396,28104,847,19784,-9469,3990,17404,-12572,-28326,-14464,27202,-15510,-5858,26766,-30689,-10698,22273,32538,-24034,13820,30030,-6301,-21298,-29736,-4549,-2078,17857,-32431,23595,-9230,21976,-3114,-20672,25488,-8682,24848,20856,13522,18158,-11758,-10529,-32271,-11405,10995,26295,-19856,-3063,-6789,-301,24661,3341,12674,-2970,6368,-27854,5647,25190,14150,22542,32329,14210,-1477,16751,12767,-27458,1020,12587,-7537,-17420,-12717,1185,-11766,4122,13852,-10772,-3786,-28097,18724,3849,25816,20518,6435,7328,18611,-5708,-1296,512,5849,-20838,992,27288,-9892,-16145,8517,-11989,21323,32329,144,20963,-29395,21536,23774,-17156,-1043,29393,-25950,21982,-17779,31361,5036,3253,5861,14160,-16442,-25406,-23172,11278,-21466,28737,30722,-29183,-30481,-10754,-13080,-25821,-22203,8879,769,30686,-24123,16823,22664,-23139,-6834,-2591,26445,10957,-22839,-963,-18044,-16035,-21854,24790,7887,-14643,-1180,3678,-7046,-26280,-1562,-2616,-28037,2089,17632,21405,-24336,-16816,-30319,-10450,1333,-28756,23406,-31629,-11452,-14227,4929,15019,-2891,-28675,20380,6343,-3394,-15017,-19442,4200,4013,19578,14666,-15572,25550,-25035,-16242,-32550,22431,-23497,-4166,-20465,22156,-10320,-8130,12920,-27030,12495,6084,-24794,10201,29716,-6197,-12785,-9294,-32256,-15986,-2803,32122,9742,-32567,-22712,17293,17783,-11189,-20834,-26179,-11738,11268,-291,-30346,-4550,-3736,-18545,12019,22177,-6345,-14220,15556,-3398,-19437,-17297,-27851,-28364,8199,19601,17822,16705,14817,28573,-29442,-16224,5,16821,26440,-23450,-21488,-2602,-23347,-25088,-21009,-30626,10946,-11324,-18063,6565,-27507,-17258,20632,11672,-21407,-4681,-28119,-12761,22933,-12676,-24374,-3418,-20982,11789,-9094,-25279,5220,16892,-20787,-1907,5972,21105,-22609,7811,-25070,-30653,-18399,6839,6113,-17796,-22823,29553,9996,22237,18590,32269,18358,4071,26897,1865,-583,-5480,17230,-5477,-17656,4540,-27093,27474,-12889,2312,4073,32565,27376,13977,-282,-13959,-22105,6507,8630,5175,17980,24693,3272,17027,-31081,-18081,-19414,17798,-6134,4246,-31965,-24084,14844,-28341,16921,14811,-14357,-30176,-16302,248,-32128,-22050,-12858,-30013,-7434,16670,26914,28295,-23070,13776,14742,-20241,-22518,-10708,14132,21737,-22273,-18189,640,9010,10185,-31540,15747,30635,-29222,-4396,18563,16679,-12301,10624,9896,32737,95,-23912,-30311,20133,4315,-13972,-10141,31990,32602,5486,-27793,-22621,8895,16832,-14115,23132,16272,-17152,-18064,-753,15316,-21941,26448,-24113,-6038,7916,-27811,-13957,23789,-32641,-25442,26094,307,5370,7099,-24505,11492,-27734,4877,577,24420,16378,6181,27095,-30365,22432,5176,-12461,31370,-11618,-29884,30603,17621,32672,-6111,-18082,-8313,15207,-19789,-20295,13161,-14097,26246,-13562,6526,4219,-20529,-25269,-15313,-4816,-29919,16517,23877,2416,-31035,-9250,18902,-123,-7591,1538,-3898,9485,29149,-12325,14689,13702,21500,-3741,-23940,-25972,22884,-30782,27956,18226,-15691,-17932,-1790,-3894,11365,18381,-15893,21576,-21239,26041,-21406,16254,22839,-28365,27346,20158,7535,6553,-32314,25903,-21089,-1413,12648,29410,4384,11908,18827,-22711,-27655,-5300,-31456,-15636,2498,26845,12461,-27536,-22691,-24437,-27313,-26263,8105,-29524,-11748,-9995,-28286,22375,-16815,-5250,30557,-22536,23761,-14779,30976,24110,9966,-25990,-9952,14190,28850,1363,-6023,26981,27111,-24655,-19972,-6465,24242,-6391,26931,14863,-16072,-8602,-6905,27351,-13978,13855,-22145,10363,-27806,16004,-5223,-30854,11041,10220,-1863,18293,-1600,-11721,19853,-11207,-1404,6588,32761,-20794,-31220,-8874,31283,20807,29234,32634,-3706,-7210,23312,32662,-11987,-29589,16649,-28631,14785,9138,32189,26718,23752,17449,-4410,10511,29787,12917,22176,-10009,-7141,-27143,-21839,2043,-30617,27367,-7559,-21756,23622,30784,-8897,27191,1920,12231,-8486,29052,-13706,26489,-4958,20865,2645,-14644,19632,-31469,-12267,-11171,-20112,-24901,10226,-31118,24713,-30038,-32257,-533,-14774,5616,-21680,14832,348,-5311,25321,-11315,-29566,30452,32146,20757,22810,13647,-5589,4375,-14627,-13470,-11577,23111,-10449,30803,-16380,1178,-13311,24022,4489,-17147,7422,30463,9372,-1785,17009,-17458,-16027,-4585,28947,18987,-12114,-28841,-5326,17889,-18983,15703,4325,-16115,-4905,1120,10609,-16149,1360,-6577,-30607,-1710,17525,17540,-10086,16116,-3130,30334,18069,10421,-7899,12617,-9122,23552,30344,27711,-16184,30569,-28539,-19044,-9311,-18056,8748,789,4792,-27592,18925,8361,-18493,30986,-1224,-32543,-19674,-29407,-13433,-23940,28896,2252,-27491,22892,-18451,-30173,12277,-6609,21774,-1987,-9592,-13831,30103,-23909,10046,20482,26756,-4981,4867,-291,-26446,1821,-10804,-30907,-16015,31765,2066,-7404,7078,22403,24001,25824,-1151,18162,-7514,-20742,32360,29136,-26454,-11354,17315,-3698,-6434,20894,26624,28059,2384,29767,4705,-13930,-8714,-1599,-29861,-6372,5462,18023,15375,-22416,17660,-8231,-22736,25433,31722,17700,-22263,-11085,1139,-7411,-7452,-23168,28738,-30875,-24847,17607,262,-7843,-21859,-26653,-16176,-27906,-25813,18088,-32524,-3483,-23265,-20087,-28686,-21880,7152,23800,26347,15422,-11622,-18816,22969,-29765,-17497,11300,11149,692,-11255,21952,-9837,-1463,9539,20541,26253,-14015,31077,-5614,12097,810,4373,-11016,31226,9244,14039,24772,-10442,-21091,3308,15303,16619,18553,10043,-22827,-25314,16482,9501,25646,-29219,6029,17356,-6418,-18022,-10421,22885,7893,-31944,6110,3279,8835,16460,15416 diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu22.tflite b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu22.tflite new file mode 100644 index 0000000..d684ecc Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu22.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu22_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu22_golden_int16.csv new file mode 100644 index 0000000..a4523da --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu22_golden_int16.csv @@ -0,0 +1 @@ +-1927,29305,8955,2689,15239,-7417,-5775,32047,1814,6813,-3790,-4584,24375,18078,-7015,24401,31790,1603,9961,-2457,-8974,29097,11011,-7116,-7522,3709,18550,-3195,-4465,-7195,-8330,13545,12068,-3105,-7055,-8586,-9405,-7425,-9033,-7816,26349,1856,-4946,20741,-4822,28352,-5070,-947,-2853,-1712,-4237,-3464,-8283,4153,3916,-217,-7336,7795,-5391,-501,15837,7682,-5566,-5262,27622,11021,-4260,20165,12071,16486,28062,-7774,3511,307,-9370,-3353,3228,5661,6886,28655,-718,-9197,3282,4712,-640,-7396,-9269,-216,-7520,18199,-774,917,15440,-5812,-1697,-5832,5682,-8020,19501,14233,-3907,8627,-6492,-7429,-3064,-4531,21239,30354,22782,17788,28723,-7434,22947,1549,-2469,30630,-2379,-7350,18976,-937,-2453,25772,-36,-6433,24639,17544,17739,4624,-6380,-7807,-565,-7814,27593,-5944,3387,4273,-8239,30527,11452,7961,-6647,3353,-4184,3068,-7233,-3308,-3080,-3949,-5978,-6312,7202,-595,4879,-8828,16221,9685,-8488,3643,13244,19273,18390,24801,-1955,23330,2239,5175,6364,-3614,23321,1217,16695,-1090,-8626,-2870,16493,-6009,-5953,-3832,15504,7763,16192,31906,9091,7861,-5561,-8858,15609,4610,27580,26580,12640,-2579,-4593,1349,-5794,2459,-1327,597,-6304,-3275,-5220,-4161,-4987,19354,2978,-5285,-1413,-8689,-7867,26939,-2747,-4698,7536,4845,-9546,-405,-6040,5162,-7846,21418,1090,15207,-4547,-1217,-320,-8785,4558,7565,-6745,7101,30125,-9147,25682,18919,31898,16728,23941,28359,24355,-5883,24256,839,-2325,-4786,-8111,25432,29287,-1796,-6311,11342,-6633,25884,-836,-2644,32533,-4567,-6369,24410,18203,-5743,1828,21427,-8425,17445,26031,-4562,-1114,-5993,-6489,12240,3164,12527,29767,31041,10367,27646,5136,-392,-8463,-2300,24606,-7140,-5797,-2693,3993,-84,31131,2810,29594,-2554,25271,-594,25842,-7952,13054,-8072,13689,-268,-8434,-9552,-6037,30502,-3994,6560,18330,2566,10561,-2794,324,11716,14250,-559,31514,-8631,-9392,30443,8721,-4543,24746,-602,16893,1407,214,-9080,-1189,4422,19605,-5425,14208,17410,-22,-2083,-8755,114,12165,-679,25093,-3393,6655,-3604,-9819,26793,31173,26674,23173,7297,-5886,-9355,19575,-2343,26344,-3132,25142,10951,-534,-632,32242,19153,26744,31570,2049,-1653,18614,13637,-282,-8476,-954,-3025,-2265,14211,-4561,-6280,-9784,-6585,-385,-4471,18547,-9504,-5052,-8411,-5283,-7048,11392,-485,23075,12888,-8299,10942,22459,32628,21147,18179,-9315,-1615,14644,25442,1488,22896,-1161,-6658,-441,7440,-7693,8714,-4180,17982,6173,-1487,20928,-283,-6270,-3557,22311,-7301,-1288,-2703,-9451,-1959,23370,10632,-5026,19814,-1939,-6520,14840,11573,-2436,9376,-1511,-4856,30693,-5929,-9076,17388,7831,-7183,20092,-9542,15676,21775,31236,13117,-7703,-8022,4750,-389,-8815,-1309,7047,25890,-6590,10503,-1153,27043,4320,3523,-8503,-7977,6979,-5832,648,-73,19603,14260,29043,-1149,29038,22136,730,5516,-1520,30432,-6899,13678,17225,27447,-8989,-4616,-5309,-6477,9581,-5417,48,16176,23550,-3983,-6465,32611,12854,12559,-7636,-9375,-328,-2170,9901,13338,-8885,29067,-8499,30371,-8112,3500,-2577,-6587,25022,-5815,7941,28763,-8013,-2665,10381,-5664,8384,-4060,31765,18787,7885,30065,-7550,28280,-9331,11110,-7348,22495,28905,-6865,10651,-8275,-9321,24631,-6075,16300,9464,28094,3957,-9373,-6512,10136,21205,-5248,-7962,12781,12920,16742,26541,8531,2865,-3842,6668,16410,32312,-7913,24850,25326,2517,-9492,-8011,-448,-7135,12370,7964,7135,-830,8835,31444,14318,-4390,-8018,-8088,-3823,2884,13906,15465,16954,10864,-2716,22828,21152,-3111,11010,3296,-8295,31912,-9500,-2131,25479,-320,-9103,-5310,13657,12467,6728,13748,-693,27364,18945,25497,7716,-3243,27735,432,4648,17392,-588,-2767,-795,12589,10459,-566,13947,22570,28343,22230,-6517,14359,-4097,5540,-5250,-3934,-3376,25024,-1632,3145,-730,7183,-8700,29130,27213,-4780,25636,23554,-6106,7787,-7963,29255,29419,-8597,-106,3396,-9247,-7385,-2740,-1061,26625,-8120,-2593,10693,27502,-6623,-2531,-788,4188,21675,-8655,-7792,-1027,27355,-3554,-3708,-3981,-5725,2770,30517,19480,30822,-3623,-1563,32177,-7577,-7258,-9192,-713,-397,13598,-8522,19318,-4709,23763,-8021,-6890,27173,7833,27853,-6326,4230,6958,-2928,-3535,16444,-3994,-258,-7215,-8248,2759,-5928,-8468,3868,14247,20994,27816,-8836,20896,-8609,22139,25043,-7687,-463,-7658,-5967,26567,-5310,25704,-8186,21159,11334,-4938,5747,-877,-9367,-7180,-2775,8313,21811,-7091,24945,-9178,-6036,27492,12624,-6517,2529,-8810,-732,11362,32331,-9216,24633,-9077,-2764,17681,-9408,5961,-8411,3693,-7621,17535,-3358,-7891,8277,-2246,11927,28826,-6558,-2321,21933,5980,27235,30287,-69,-6919,8587,23333,27998,-4918,24583,-5317,6373,-2249,20816,-2203,-2438,1917,-6984,7584,5511,6106,12553,388,-9207,30442,-6330,13062,-5248,4904,-8436,1901,15697,-6749,25547,-2095,19970,-6021,-7583,32632,-3746,2666,238,-1153,-5582,-2770,-4864,10508,20362,13439,22991,-3642,1142,-6911,21267,-8629,-721,28947,7100,9891,4638,-2028,-7689,17258,-5248,-6133,-8646,5156,17154,-7598,-7894,22092,-2929,27609,18866,22127,964,18603,-3009,1145,27046,-4796,-6950,-1988,-6386,-5436,-2985,-1100,-9243,25116,-1072,5889,1771,-6491,10987,-5435,3548,-9009,21494,30686,-2242,29581,30144,-6035,18399,-4363,-5134,-3224,-5954,-8132,-4329,-5297,15219,-572,-9580,-8491,7859,12013,10345,-5757,3252,13122,644,-5149,9581,-8077,-4477,-5061,30233,-960,-6235,2523,-5548,-7459,-1874,-4653,19939,-3032,7405,-575,-8572,-9298,30681,-5101,25422,5838,-9641,16553,-519,5835,31380,-6646,-5885,-5020,-8462,-1492,-5690,32681,-3371,9831,-4183,-6430,-9279,6296,9850,915,13087,-5482,22428,-7293,23670,-3388,19213,-117,16322,3225,-7827,-290,4650,10821,-1668,-5113,28221,-211,-8821,-5105,19124,4997,-7597,15369,19791,-7320,11239,13338,2426,-8720,-6191,-6874,-2459,-3092,-6270,-6313,13514,-3886,-4947,-3919,26489,10639,-1264,-1666,-7029,23766,17277,-5126,21665,-3520,10604,28974,19498,20994,29610,18167,-9605,-9252,-6225,15400,-1431,27653,14127,20444,18591,-4014,-3998,10645,-5189,4224,27351,-5717,-3075,31713,20128,-7848,28909,25233,15308,-626,-9623,-5754,-896,-3889,-7414,31196,4921,974,17603,-4169,1420,-1041,31457,11118,8620,-359,-4178,-4575,17925,17785,-2146,-3043,-6132,22397,-1163,-8550,16768,-446,-404,16085,-9341,-4669,21315,8982,-7740,7395,3292,-6333,19689,-9347,-2639,5832,-7497,-4294,-2118,-4222,-3147,15393,9456,20691,-514,-7652,-5313,-1831,-3392,-2642,-345,19784,22168,17498,-1760,8268,20645,14137,-49,-1785,771,-94,-6717,-2863,19952,7166,26784,-7903,18417,-5925,-7394,-755,-2808,-6826,-2577,-263,26620,23126,-9615,-510,24652,12329,29148,243,3611,2353,32273,-8268,16444,-207,8271,19599,24187,335,15929,19477,-1164,-8098,-3461,26216,-5096,-5011,-1364,-2247,-8989,-3831,-9254,-4545,12791,3933,12652,8760,-551,-9709,648,3273,6701,-6485,17399,26388,-6993,24753,-620,11800,-2820,-2693,-6616,-142,2791,-1065,-625,-3223,-1963,-9204,3524,499,-5168,2687,-2805,31808,-5184,29236,-6103,15175,9323,17582,19261,15453,-6444,-8025,-3526,-4738,-3643,-130,15544,-2052,-6848,-6307,-2036,-5607,-5884,21488,-8168,20092,-5976,-8326,-4234,-7659,20897,5036,-7314,20876,-2519,31973,9270,8820,-5420,18142,11529,-7409,-741,-5144,16460,7414,3223,-1936,-6126,13506,-311,23675,30931,-273,28100,-4073,-7420,31110,16633,5662,-3720,-4628,15696,-327,25828,31085,-9759,-7563,-5819,30092,-4426,-4713,-1311,-9595,-6960,-8494,-2662,6690,-4123,-1411,29193,-5358,1898,4172,-5838,-500,-8729,-2204,22572,23550,-7658,-6960,-5707,-6271,-1835,16527,9392,-8114,1615,-4337,7746,16333,8679,20251,-2249,15049,19908,-1773,-7742,11610,24740,-2818,2353,-6996,12407,15191,-872,-5628,14043,16604,-5401,-3686,15632,17584,-1137,11942,-6924,5048,29693,-7717,11455,-8002,11878,-6085,391,21779,-4762,27915,22961,-4622,6587,-2722,-1330,-1924,-8859,-8431,22958,12426,32246,-6633,-1771,-9792,25245,-292,21263,-3275,-7825,10890,13011,-2287,23114,7670,2677,-1198,12571,6260,8149,27476,20327,-806,-8924,1498,-9314,-5587,-5345,12743,-2447,9471,6806,14091,-7859,-2021,29611,-4883,32343,-1816,-3131,-7474,14332,28564,4992,27489,23122,16028,-5111,-2398,-6496,18074,7641,-8686,-3466,9241,-3108,22915,-4925,17073,-1566,24351,21006,-2580,-6739,6661,16090,17795,16933,-3775,27970,-2275,20308,20643,-5453,-1881,-9407,24951,-4579,30697,17482,30574,3401,11202,-3130,-2992,-4987,7538,-2685,32469,-3490,23899,-1082,-8460,-4862,-6812,24877,-8211,-6582,7044,-7540,28427,20451,-6744,-5285,-1140,-8577,27227,-5549,11797,8879,18511,-5478,22990,-2800,17626,-7188,27922,-5319,6862,-8439,-4943,13040,21893,23379,-2971,-5000,-9580,-6052,26843,14212,-2257,26623,-499,-9001,-3868,10592,-6760,-8952,-8927,-2961,26556,-2504,15263,-7722,-3550,-1006,-1904,-6484,28246,-6074,19250,-1738,10302,-4697,22491,29136,-4030,-1810,14628,18276,-3655,23676,17256,22353,-1218,-9375,18314,14416,-3233,32502,7622,23247,-4213,-5748,3001,-4920,31003,-8520,20011,4305,13313,-8592,21942,22409,-4030,-9783,-1341,15114,-6087,-2256,32267,-2382,-414,24858,-2685,20902,4228,11521,-7181,2293,-3433,19384,24239,5016,12689,28561,-7433,15660,-3930,-8325,17666,-2843,3154,-5460,-2809,-1359,18676,-607,-7061,7026,9885,-5755,12915,-4454,27633,-5283,16063,-1572,-1413,30298,-8703,23299,8394,7859,-9722,-5170,-3948,29127,19281,-3290,24367,7672,-353,26617,13040,-1001,1065,-8128,23849,17679,3243,-4510,-9598,-4072,-6449,19277,-274,-3595,8288,-2297,18943,-159,22099,-7432,31831,-4538,-8499,30900,7847,-4612,-1867,18763,-5056,-3590,-192,26767,7077,578,-5616,-3573,2545,14795,-4728,-8026,-9769,-7138,1024,-5113,869,10566,-7315,-6284,21750,-3120,17700,15242,18017,7459,27557,-7184,3014,-9505,-2337,-9159,-4192,-9150,7058,7831,-156,-2264,-2152,13743,-664,27957,26766,16866,28730,-713,1263,3562,11969,12277,-487,-2781,1346,-6767,28727,-7778,-8898,-5263,30120,-507,-2947,-3980,-1263,-7254,-5426,28745,16626,7294,11810,-8954,-9052,22517,23477,-3459,-6748,-8952,15998,-1833,8998,22882,-7731,-3278,-8020,-2454,18697,16401,21884,-9518,-5036,-4492,22927,7816,25279,-7068,-8960,23020,12415,-4695,8501,6297,25403,-5330,-5647,3754,1887,-9213,29221,7055,18054,-2245,-290,2303,1109,32189,-8395,4128,24777,27951,5192,-5363,-8447,28662,-9661,-6838,20024,-232,7742,-4995,31502,-6954,2563,31937,-845,30477,-7047,-9536,18619,-6859,-6956,27528,-6867,-541,-9635,-8465,-65,18288,-1162,-8438,-7084,29505,20079,-3223,17011,27802,3269,-4217,29746,-8135,-7399,-4864,-3006,23870,28783,10424,18128,15778,30814,-1504,-2527,27489,-9239,6027,-215,-3766,4099,-1386,2094,13028,-717,-2858,23538,-3040,-7496,-3073,-5092,12845,-2216,22446,131,-3192,5517,-3241,12310,15642,550,-9081,14279,3579,-2233,23122,-5657,3737,-4514,-5608,-3881,-9739,29514,-2920,11235,-1432,14116,-8011,-936,-8503,30537,22862,-7745,347,6075,18567,26556,-277,-1803,-9518,28019,22288,-2736,-316,22187,-9380,-7311,11965,21226,-4908,4072,-630,8956,-9826,20853,-1139,-3000,19721,2418,-3940,-5814,-8177,4141,9160,14485,4158,14026,-1785,7228,3923,-3908,4968,26061,-1884,-8211,-944,-638,-1804,11242,11775,23256,31394,-6839,1048,-1838,15928,531,1964,-6853,-8257,-9001,30361,-472,25085,6068,-2818,18338,13900,20438,22882,-9043,-4342,2266,-2210,-1384,-3857,13849,-394,14071,15887,-9580,-9725,174,3918,-1472,-8995,10279,5430,1614,6143,18266,16898,18872,32684,-3913,-291,20036,-3915,-1754,-8189,-6012,-8187,8685,-2869,-9823,25158,-9335,8708,12578,-2212,-7960,6767,-862,27785,2053,31907,22419,10031,-1756,-6832,4664,9548,16991,-3373,29910,18634,9977,-2626,-5848,-4354,5758,-3726,-89,5736,22289,-5022,9288,-1237,-8464,-1857,-5717,3908,-9212,-382,-359,-7044,8292,16415,-8975,-4317,-4359,-986,-5890,604,6458,-76,11453,29552,-6512,-7766,2693,16139,-5050,-3493,17527,-3481,27870,-2682,9151,-9493,20679,30263,29156,-2664,15212,-8814,-8582,32519,-3145,18211,28312,-5770,15632,-1297,10727,-9219,20028,13353,9381,29942,4711,-4735,4385,-1162,-2082,-9258,30727,-7508,7205,-5856,-6941,-6431,17486,18131,7931,209,-8712,14122,-3942,-7378,24720,-8710,4799,12465,1976,25462,28491,-8898,15560,-6572,11209,-8888,-2375,-1927,-8817,30562,-1939,-4582,-2687,30578,4257,-1952,-28,-4487,-6537,-2673,7700,11504,6540,23243,13111,1824,1242,6947,17652,2252,-2608,27767,-1944,-7691,-4092,-4281,9736,-5728,7245,21540,19670,-5306,11154,22272,20618,-2526,-5845,31291,-5307,27541,-2929,-9818,-5854,30490,-4075,-9239,28758,-9271,23579,-3336,-1524,-4,-8529,-7888,379,-8007,-964,-7024,-3202,-6688,7124,-1871,-4015,8470,29163,-7530,-3439,27213,-1768,-6265,-1378,-4923,27562,-5601,12466,7178,6529,-4381,-4372,-7770,-4788,10330,-6251,31070,-9187,5687,-9379,8428,-8843,-7813,-9609,-1769,-7969,-8368,-9123,17582,-3053,23785,-7084,24806,13972,-6314,29446,3716,22264,22236,22805,-5073,22161,18494,28052,10981,7696,29478,-1003,26621,32657,-8701,30400,15691,-8808,-261,19321,-4235,28987,9304,18204,-3457,5117,-1815,-4387,-165,-6281,14029,-3379,-6864,-1846,-1189,-2116,-6687,2930,15963,-3350,12633,27667,7832,5208,-6253,-2327,17761,16351,-9474,-3778,-2383,7769,-3092,5508,-1335,-4805,10873,4328,-9022,3913,-2702,15680,29684,-5206,-155,2912,-7542,7232,30259,-4685,8215,17926,-6588,-1379,21005,-835,-5255,26462,-7171,29332,-8607,7257,-6182,27954,13513,-4597,-6805,20131,23593,16212,-1752,-7636,7368,24563,-8130,22412,14157,10629,14469,4186,-8075,-4137,-677,4852,24770,-8384,6776,4434,-4131,24459,-6973,-8319,21310,-2385,19219,10734,12815,27009,26166,29717,-6220,9913,-374,-7977,9447,12342,22929,-7807,7224,-8167,23099,-3598,-3177,17322,-8953,16387,-6745,28928,-648,19517,-3209,1708,-3757,-7737,-9758,-4196,-4309,18379,-8511,19440,73,-1584,-4442,-2252,5637,1095,24194,22567,-8769,-5123,-1672,28660,-1526,9840,-4870,-4996,17640,-3307,-591,5882,19663,521,-2836,-8246,-5645,20259,27805,-4419,9267,-9808,1981,552,26673,12639,-9184,13015,12599,16960,-9821,30028,14759,-5793,-1213,-8244,-1506,-1982,-1711,29504,-3148,29727,-7312,-9428,26325,24329,6970,-1351,-2699,32452,12463,-4140,19916,12452,26774,-4071,-4546,3915,-4878,29737,-31,22952,10585,-3739,-2645,-1795,31524,-865,-9140,-2756,25960,15584,-9728,-9400,28024,10490,-2370,20174,-7843,-5518,-7960,1816,7450,-6593,-5747,-1539,-85,-4810,-9117,362,-8631,12530,10821,27936,2180,-6624,-4182,31855,-3562,21663,-9430,23869,6286,19824,-8184,6924,18396,-6839,-7512,9323,9364,2260,19713,-5104,20882,-789,-6671,13892,-566,3551,-3454,12477,-5824,-5046,2659,-9289,30180,18786,-1546,15786,-5868,31389,-2769,8993,-463,3966,-5459,-3286,27505,1509,-4347,8088,949,951,-2103,4370,-8506,-9240,-1214,-5560,5283,-1363,7767,4626,-1308,-8027,20664,11209,-3710,10958,-9317,-6444,-2727,11903,24472,-8899,-7107,-9471,-3927,-6832,17116,12580,13172,-3741,-7892,-7469,3448,-3219,25768,26560,972,18106,9892,15881,-6345,1330,32647,14225,-9799,14053,15629,24797,-5016,-2483,-8539,19098,-7669,-9426,-8581,-7859,9267,-2685,21593,-4290,31150,-2726,-6116,-1968,9562,15040,-3676,4265,16265,-6599,8342,23443,19755,10332,-6806,5344,-1039,-3436,14858,-4530,-7657,-5271,1486,7415,-2693,27726,12300,-2912,-7043,27489,27644,19835,15347,-2248,30261,13658,-6439,15349,8320,20348,-7480,-266,17202,-8006,11951,28562,25408,14965,-147,29773,30036,32324,16505,12518,27642,-7938,-4993,-8545,-1685,27299,-3391,-2919,-4097,19307,17612,1983,18914,18973,22412,-3768,29630,-1455,13268,30155,17072,26872,-8176,-3624,-6379,3651,9725,11668,17081,-3095,16264,-76,-5928,26112,-9012,-9790,1518,-6936 diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu22_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu22_input0_int16.csv new file mode 100644 index 0000000..1232c3f --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu22_input0_int16.csv @@ -0,0 +1 @@ +-6424,29305,8955,2689,15239,-24722,-19250,32047,1814,6813,-12631,-15278,24375,18078,-23383,24401,31790,1603,9961,-8188,-29911,29097,11011,-23720,-25071,3709,18550,-10648,-14881,-23984,-27766,13545,12068,-10348,-23517,-28618,-31348,-24748,-30109,-26052,26349,1856,-16487,20741,-16074,28352,-16899,-3155,-9510,-5706,-14122,-11546,-27608,4153,3916,-722,-24451,7795,-17968,-1670,15837,7682,-18551,-17539,27622,11021,-14198,20165,12071,16486,28062,-25912,3511,307,-31231,-11175,3228,5661,6886,28655,-2391,-30657,3282,4712,-2132,-24652,-30897,-718,-25065,18199,-2578,917,15440,-19372,-5657,-19440,5682,-26734,19501,14233,-13022,8627,-21638,-24762,-10212,-15102,21239,30354,22782,17788,28723,-24780,22947,1549,-8228,30630,-7929,-24499,18976,-3123,-8176,25772,-118,-21442,24639,17544,17739,4624,-21265,-26024,-1881,-26046,27593,-19811,3387,4273,-27464,30527,11452,7961,-22156,3353,-13945,3068,-24110,-11026,-10265,-13164,-19926,-21039,7202,-1981,4879,-29427,16221,9685,-28293,3643,13244,19273,18390,24801,-6516,23330,2239,5175,6364,-12046,23321,1217,16695,-3631,-28754,-9565,16493,-20028,-19841,-12771,15504,7763,16192,31906,9091,7861,-18536,-29525,15609,4610,27580,26580,12640,-8597,-15310,1349,-19312,2459,-4424,597,-21011,-10917,-17400,-13869,-16621,19354,2978,-17616,-4708,-28963,-26221,26939,-9155,-15659,7536,4845,-31820,-1348,-20134,5162,-26154,21418,1090,15207,-15156,-4055,-1066,-29284,4558,7565,-22481,7101,30125,-30490,25682,18919,31898,16728,23941,28359,24355,-19608,24256,839,-7748,-15952,-27035,25432,29287,-5987,-21036,11342,-22109,25884,-2787,-8814,32533,-15224,-21230,24410,18203,-19142,1828,21427,-28084,17445,26031,-15205,-3713,-19977,-21629,12240,3164,12527,29767,31041,10367,27646,5136,-1307,-28210,-7666,24606,-23799,-19321,-8975,3993,-280,31131,2810,29594,-8514,25271,-1978,25842,-26505,13054,-26906,13689,-892,-28114,-31840,-20123,30502,-13313,6560,18330,2566,10561,-9313,324,11716,14250,-1862,31514,-28770,-31305,30443,8721,-15141,24746,-2006,16893,1407,214,-30265,-3962,4422,19605,-18083,14208,17410,-74,-6941,-29181,114,12165,-2264,25093,-11308,6655,-12012,-32729,26793,31173,26674,23173,7297,-19620,-31183,19575,-7810,26344,-10440,25142,10951,-1780,-2105,32242,19153,26744,31570,2049,-5509,18614,13637,-938,-28254,-3178,-10081,-7549,14211,-15203,-20931,-32611,-21949,-1282,-14903,18547,-31679,-16838,-28037,-17609,-23493,11392,-1615,23075,12888,-27662,10942,22459,32628,21147,18179,-31050,-5381,14644,25442,1488,22896,-3870,-22192,-1469,7440,-25641,8714,-13933,17982,6173,-4955,20928,-942,-20900,-11856,22311,-24337,-4291,-9008,-31503,-6528,23370,10632,-16753,19814,-6462,-21733,14840,11573,-8119,9376,-5036,-16186,30693,-19763,-30253,17388,7831,-23944,20092,-31807,15676,21775,31236,13117,-25677,-26739,4750,-1297,-29382,-4363,7047,25890,-21966,10503,-3844,27043,4320,3523,-28341,-26590,6979,-19438,648,-242,19603,14260,29043,-3829,29038,22136,730,5516,-5066,30432,-22997,13678,17225,27447,-29963,-15386,-17697,-21590,9581,-18055,48,16176,23550,-13277,-21548,32611,12854,12559,-25453,-31250,-1093,-7231,9901,13338,-29617,29067,-28328,30371,-27040,3500,-8589,-21956,25022,-19383,7941,28763,-26710,-8881,10381,-18880,8384,-13533,31765,18787,7885,30065,-25167,28280,-31102,11110,-24492,22495,28905,-22883,10651,-27582,-31068,24631,-20249,16300,9464,28094,3957,-31244,-21705,10136,21205,-17491,-26538,12781,12920,16742,26541,8531,2865,-12807,6668,16410,32312,-26375,24850,25326,2517,-31638,-26704,-1492,-23781,12370,7964,7135,-2765,8835,31444,14318,-14632,-26726,-26959,-12742,2884,13906,15465,16954,10864,-9053,22828,21152,-10368,11010,3296,-27648,31912,-31665,-7103,25479,-1067,-30343,-17698,13657,12467,6728,13748,-2308,27364,18945,25497,7716,-10810,27735,432,4648,17392,-1959,-9221,-2649,12589,10459,-1885,13947,22570,28343,22230,-21722,14359,-13656,5540,-17500,-13112,-11252,25024,-5438,3145,-2434,7183,-28998,29130,27213,-15932,25636,23554,-20351,7787,-26541,29255,29419,-28655,-353,3396,-30823,-24617,-9134,-3535,26625,-27065,-8642,10693,27502,-22077,-8437,-2625,4188,21675,-28850,-25972,-3423,27355,-11846,-12358,-13269,-19081,2770,30517,19480,30822,-12076,-5209,32177,-25257,-24192,-30640,-2376,-1321,13598,-28405,19318,-15695,23763,-26737,-22965,27173,7833,27853,-21087,4230,6958,-9759,-11781,16444,-13311,-858,-24049,-27493,2759,-19758,-28227,3868,14247,20994,27816,-29451,20896,-28695,22139,25043,-25621,-1541,-25525,-19890,26567,-17699,25704,-27287,21159,11334,-16458,5747,-2921,-31221,-23932,-9250,8313,21811,-23636,24945,-30593,-20120,27492,12624,-21722,2529,-29365,-2439,11362,32331,-30719,24633,-30256,-9211,17681,-31359,5961,-28035,3693,-25402,17535,-11194,-26304,8277,-7487,11927,28826,-21859,-7735,21933,5980,27235,30287,-229,-23064,8587,23333,27998,-16392,24583,-17723,6373,-7496,20816,-7344,-8126,1917,-23279,7584,5511,6106,12553,388,-30689,30442,-21098,13062,-17491,4904,-28120,1901,15697,-22496,25547,-6982,19970,-20069,-25276,32632,-12485,2666,238,-3844,-18605,-9231,-16212,10508,20362,13439,22991,-12138,1142,-23036,21267,-28763,-2404,28947,7100,9891,4638,-6760,-25630,17258,-17494,-20443,-28818,5156,17154,-25326,-26311,22092,-9764,27609,18866,22127,964,18603,-10029,1145,27046,-15987,-23167,-6626,-21285,-18120,-9949,-3665,-30808,25116,-3573,5889,1771,-21637,10987,-18115,3548,-30030,21494,30686,-7474,29581,30144,-20116,18399,-14543,-17114,-10746,-19846,-27106,-14429,-17657,15219,-1906,-31931,-28301,7859,12013,10345,-19190,3252,13122,644,-17163,9581,-26921,-14922,-16868,30233,-3199,-20781,2523,-18494,-24862,-6247,-15508,19939,-10107,7405,-1917,-28572,-30992,30681,-17002,25422,5838,-32136,16553,-1730,5835,31380,-22151,-19615,-16734,-28206,-4974,-18965,32681,-11235,9831,-13942,-21433,-30928,6296,9850,915,13087,-18271,22428,-24309,23670,-11294,19213,-388,16322,3225,-26088,-965,4650,10821,-5560,-17044,28221,-701,-29402,-17017,19124,4997,-25322,15369,19791,-24398,11239,13338,2426,-29066,-20635,-22912,-8195,-10306,-20900,-21042,13514,-12952,-16490,-13063,26489,10639,-4214,-5551,-23428,23766,17277,-17086,21665,-11731,10604,28974,19498,20994,29610,18167,-32017,-30838,-20748,15400,-4768,27653,14127,20444,18591,-13380,-13327,10645,-17297,4224,27351,-19056,-10249,31713,20128,-26159,28909,25233,15308,-2085,-32077,-19178,-2986,-12963,-24712,31196,4921,974,17603,-13897,1420,-3468,31457,11118,8620,-1196,-13925,-15250,17925,17785,-7152,-10142,-20440,22397,-3877,-28500,16768,-1485,-1347,16085,-31136,-15563,21315,8982,-25800,7395,3292,-21108,19689,-31156,-8796,5832,-24990,-14311,-7059,-14074,-10488,15393,9456,20691,-1714,-25506,-17708,-6102,-11305,-8805,-1149,19784,22168,17498,-5866,8268,20645,14137,-162,-5949,771,-312,-22389,-9542,19952,7166,26784,-26342,18417,-19750,-24646,-2517,-9359,-22751,-8589,-875,26620,23126,-32049,-1700,24652,12329,29148,243,3611,2353,32273,-27559,16444,-688,8271,19599,24187,335,15929,19477,-3879,-26992,-11536,26216,-16985,-16703,-4547,-7489,-29964,-12770,-30845,-15150,12791,3933,12652,8760,-1837,-32361,648,3273,6701,-21616,17399,26388,-23308,24753,-2066,11800,-9399,-8976,-22051,-474,2791,-3548,-2082,-10742,-6541,-30680,3524,499,-17226,2687,-9350,31808,-17280,29236,-20342,15175,9323,17582,19261,15453,-21479,-26749,-11752,-15792,-12141,-431,15544,-6839,-22826,-21024,-6787,-18688,-19614,21488,-27225,20092,-19918,-27753,-14114,-25529,20897,5036,-24378,20876,-8395,31973,9270,8820,-18067,18142,11529,-24695,-2470,-17146,16460,7414,3223,-6454,-20419,13506,-1037,23675,30931,-910,28100,-13576,-24731,31110,16633,5662,-12398,-15425,15696,-1088,25828,31085,-32528,-25209,-19396,30092,-14751,-15708,-4368,-31983,-23199,-28311,-8871,6690,-13742,-4702,29193,-17859,1898,4172,-19460,-1665,-29097,-7346,22572,23550,-25525,-23199,-19023,-20903,-6115,16527,9392,-27047,1615,-14456,7746,16333,8679,20251,-7497,15049,19908,-5908,-25806,11610,24740,-9394,2353,-23319,12407,15191,-2905,-18759,14043,16604,-18004,-12285,15632,17584,-3790,11942,-23080,5048,29693,-25723,11455,-26671,11878,-20281,391,21779,-15873,27915,22961,-15407,6587,-9074,-4434,-6412,-29529,-28103,22958,12426,32246,-22109,-5901,-32639,25245,-973,21263,-10916,-26083,10890,13011,-7621,23114,7670,2677,-3992,12571,6260,8149,27476,20327,-2686,-29746,1498,-31045,-18623,-17817,12743,-8156,9471,6806,14091,-26195,-6737,29611,-16277,32343,-6052,-10436,-24912,14332,28564,4992,27489,23122,16028,-17035,-7992,-21651,18074,7641,-28953,-11551,9241,-10358,22915,-16415,17073,-5220,24351,21006,-8599,-22461,6661,16090,17795,16933,-12582,27970,-7581,20308,20643,-18176,-6268,-31356,24951,-15261,30697,17482,30574,3401,11202,-10431,-9972,-16624,7538,-8948,32469,-11633,23899,-3607,-28199,-16207,-22706,24877,-27368,-21938,7044,-25134,28427,20451,-22478,-17616,-3798,-28590,27227,-18496,11797,8879,18511,-18258,22990,-9331,17626,-23960,27922,-17730,6862,-28128,-16475,13040,21893,23379,-9902,-16665,-31932,-20171,26843,14212,-7521,26623,-1664,-30004,-12892,10592,-22533,-29839,-29756,-9870,26556,-8347,15263,-25740,-11833,-3352,-6347,-21612,28246,-20246,19250,-5793,10302,-15656,22491,29136,-13433,-6034,14628,18276,-12184,23676,17256,22353,-4058,-31250,18314,14416,-10776,32502,7622,23247,-14042,-19160,3001,-16399,31003,-28400,20011,4305,13313,-28640,21942,22409,-13434,-32609,-4469,15114,-20289,-7519,32267,-7940,-1380,24858,-8950,20902,4228,11521,-23936,2293,-11442,19384,24239,5016,12689,28561,-24775,15660,-13100,-27749,17666,-9477,3154,-18200,-9364,-4530,18676,-2023,-23537,7026,9885,-19181,12915,-14846,27633,-17609,16063,-5238,-4709,30298,-29009,23299,8394,7859,-32406,-17234,-13159,29127,19281,-10966,24367,7672,-1176,26617,13040,-3335,1065,-27092,23849,17679,3243,-15033,-31992,-13572,-21497,19277,-911,-11982,8288,-7657,18943,-529,22099,-24774,31831,-15126,-28330,30900,7847,-15372,-6224,18763,-16853,-11965,-639,26767,7077,578,-18720,-11908,2545,14795,-15759,-26752,-32561,-23794,1024,-17043,869,10566,-24381,-20947,21750,-10399,17700,15242,18017,7459,27557,-23946,3014,-31684,-7789,-30529,-13973,-30498,7058,7831,-520,-7547,-7171,13743,-2212,27957,26766,16866,28730,-2376,1263,3562,11969,12277,-1622,-9269,1346,-22557,28727,-25925,-29659,-17541,30120,-1688,-9823,-13265,-4210,-24180,-18087,28745,16626,7294,11810,-29845,-30171,22517,23477,-11529,-22492,-29840,15998,-6108,8998,22882,-25769,-10925,-26734,-8180,18697,16401,21884,-31725,-16787,-14973,22927,7816,25279,-23558,-29865,23020,12415,-15648,8501,6297,25403,-17765,-18821,3754,1887,-30710,29221,7055,18054,-7483,-965,2303,1109,32189,-27981,4128,24777,27951,5192,-17876,-28157,28662,-32202,-22792,20024,-771,7742,-16649,31502,-23180,2563,31937,-2815,30477,-23488,-31787,18619,-22862,-23186,27528,-22889,-1801,-32117,-28216,-217,18288,-3871,-28125,-23614,29505,20079,-10743,17011,27802,3269,-14056,29746,-27116,-24664,-16214,-10018,23870,28783,10424,18128,15778,30814,-5014,-8423,27489,-30796,6027,-717,-12552,4099,-4618,2094,13028,-2388,-9526,23538,-10131,-24987,-10242,-16974,12845,-7387,22446,131,-10638,5517,-10801,12310,15642,550,-30270,14279,3579,-7443,23122,-18857,3737,-15047,-18691,-12936,-32462,29514,-9731,11235,-4774,14116,-26702,-3119,-28343,30537,22862,-25816,347,6075,18567,26556,-921,-6010,-31725,28019,22288,-9118,-1053,22187,-31267,-24368,11965,21226,-16359,4072,-2100,8956,-32752,20853,-3796,-9999,19721,2418,-13134,-19380,-27255,4141,9160,14485,4158,14026,-5949,7228,3923,-13026,4968,26061,-6279,-27369,-3145,-2126,-6014,11242,11775,23256,31394,-22795,1048,-6125,15928,531,1964,-22841,-27524,-30003,30361,-1571,25085,6068,-9393,18338,13900,20438,22882,-30142,-14474,2266,-7366,-4613,-12857,13849,-1313,14071,15887,-31932,-32417,174,3918,-4906,-29982,10279,5430,1614,6143,18266,16898,18872,32684,-13044,-969,20036,-13050,-5847,-27296,-20040,-27288,8685,-9562,-32743,25158,-31116,8708,12578,-7372,-26532,6767,-2871,27785,2053,31907,22419,10031,-5851,-22774,4664,9548,16991,-11242,29910,18634,9977,-8752,-19493,-14511,5758,-12418,-295,5736,22289,-16739,9288,-4122,-28212,-6188,-19056,3908,-30707,-1271,-1195,-23480,8292,16415,-29916,-14390,-14529,-3287,-19633,604,6458,-251,11453,29552,-21706,-25887,2693,16139,-16834,-11644,17527,-11604,27870,-8940,9151,-31643,20679,30263,29156,-8880,15212,-29379,-28605,32519,-10482,18211,28312,-19234,15632,-4321,10727,-30729,20028,13353,9381,29942,4711,-15782,4385,-3873,-6938,-30859,30727,-25027,7205,-19520,-23135,-21437,17486,18131,7931,209,-29040,14122,-13139,-24593,24720,-29031,4799,12465,1976,25462,28491,-29659,15560,-21905,11209,-29626,-7915,-6424,-29388,30562,-6461,-15272,-8957,30578,4257,-6505,-93,-14956,-21788,-8908,7700,11504,6540,23243,13111,1824,1242,6947,17652,2252,-8694,27767,-6478,-25637,-13640,-14270,9736,-19091,7245,21540,19670,-17687,11154,22272,20618,-8420,-19484,31291,-17690,27541,-9763,-32725,-19512,30490,-13583,-30797,28758,-30903,23579,-11119,-5078,-13,-28428,-26294,379,-26688,-3214,-23414,-10673,-22294,7124,-6235,-13381,8470,29163,-25098,-11462,27213,-5892,-20883,-4594,-16410,27562,-18670,12466,7178,6529,-14603,-14573,-25899,-15959,10330,-20837,31070,-30622,5687,-31264,8428,-29476,-26044,-32029,-5895,-26562,-27893,-30409,17582,-10176,23785,-23612,24806,13972,-21047,29446,3716,22264,22236,22805,-16909,22161,18494,28052,10981,7696,29478,-3342,26621,32657,-29002,30400,15691,-29359,-868,19321,-14115,28987,9304,18204,-11524,5117,-6050,-14621,-549,-20935,14029,-11261,-22880,-6154,-3961,-7052,-22288,2930,15963,-11165,12633,27667,7832,5208,-20843,-7756,17761,16351,-31578,-12592,-7941,7769,-10305,5508,-4450,-16017,10873,4328,-30073,3913,-9007,15680,29684,-17351,-515,2912,-25139,7232,30259,-15617,8215,17926,-21958,-4597,21005,-2782,-17516,26462,-23901,29332,-28688,7257,-20607,27954,13513,-15323,-22683,20131,23593,16212,-5840,-25453,7368,24563,-27098,22412,14157,10629,14469,4186,-26915,-13789,-2255,4852,24770,-27947,6776,4434,-13769,24459,-23242,-27730,21310,-7950,19219,10734,12815,27009,26166,29717,-20732,9913,-1246,-26589,9447,12342,22929,-26024,7224,-27224,23099,-11992,-10590,17322,-29844,16387,-22484,28928,-2158,19517,-10695,1708,-12522,-25788,-32525,-13985,-14364,18379,-28369,19440,73,-5278,-14806,-7506,5637,1095,24194,22567,-29229,-17075,-5572,28660,-5086,9840,-16231,-16653,17640,-11024,-1969,5882,19663,521,-9453,-27486,-18815,20259,27805,-14730,9267,-32693,1981,552,26673,12639,-30612,13015,12599,16960,-32735,30028,14759,-19308,-4043,-27479,-5018,-6605,-5704,29504,-10492,29727,-24372,-31426,26325,24329,6970,-4501,-8995,32452,12463,-13800,19916,12452,26774,-13568,-15151,3915,-16258,29737,-103,22952,10585,-12461,-8816,-5982,31524,-2881,-30466,-9187,25960,15584,-32426,-31333,28024,10490,-7900,20174,-26142,-18393,-26534,1816,7450,-21975,-19156,-5129,-282,-16034,-30390,362,-28769,12530,10821,27936,2180,-22080,-13940,31855,-11874,21663,-31432,23869,6286,19824,-27278,6924,18396,-22797,-25039,9323,9364,2260,19713,-17012,20882,-2630,-22237,13892,-1885,3551,-11512,12477,-19412,-16819,2659,-30964,30180,18786,-5151,15786,-19558,31389,-9229,8993,-1544,3966,-18196,-10953,27505,1509,-14489,8088,949,951,-7010,4370,-28354,-30800,-4046,-18531,5283,-4542,7767,4626,-4360,-26755,20664,11209,-12365,10958,-31057,-21478,-9090,11903,24472,-29661,-23688,-31568,-13089,-22773,17116,12580,13172,-12468,-26306,-24896,3448,-10730,25768,26560,972,18106,9892,15881,-21148,1330,32647,14225,-32662,14053,15629,24797,-16719,-8276,-28463,19098,-25562,-31420,-28602,-26197,9267,-8948,21593,-14300,31150,-9087,-20387,-6558,9562,15040,-12251,4265,16265,-21996,8342,23443,19755,10332,-22686,5344,-3464,-11454,14858,-15100,-25524,-17569,1486,7415,-8977,27726,12300,-9706,-23476,27489,27644,19835,15347,-7493,30261,13658,-21462,15349,8320,20348,-24934,-885,17202,-26687,11951,28562,25408,14965,-489,29773,30036,32324,16505,12518,27642,-26460,-16644,-28483,-5615,27299,-11304,-9730,-13656,19307,17612,1983,18914,18973,22412,-12558,29630,-4848,13268,30155,17072,26872,-27254,-12079,-21261,3651,9725,11668,17081,-10316,16264,-251,-19758,26112,-30040,-32631,1518,-23119 diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu2_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu2_golden_int16.csv new file mode 100644 index 0000000..4dac128 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu2_golden_int16.csv @@ -0,0 +1 @@ +4806,-245,28973,12039,20167,7913,-5324,16888,-4539,24348,10571,-4318,-3208,-2837,2494,30861,20574,29188,-1616,-3097,17105,16701,11352,1316,9785,26999,-719,-9576,-4816,30182,30925,-5456,19731,32010,-2101,-4697,23509,29799,28209,-2802,16362,-8777,16325,-2749,24292,-604,25271,-9418,32436,-5564,25963,29148,-7727,29858,18326,-6592,-7338,16321,-6791,-9178,1577,-4838,-6352,-6756,-4420,26968,-8533,-5678,10837,-3484,-2014,-4888,-981,28027,23508,-7678,20179,28688,10638,-5724,2681,-529,5858,19945,10430,-2581,-2072,-3330,-7771,-6560,-8727,27434,24752,23156,10285,548,32057,7476,-5959,11607,-4561,-4017,-4605,20499,-7241,7500,-3555,11146,-1612,30488,27122,3996,13472,25114,29848,-4760,-981,24476,-4960,24837,-3257,22982,13191,-3955,-5233,29437,22937,23918,11185,-440,-2995,-9084,-8772,-601,20145,-3650,17549,24778,-2909,-8807,32254,-4438,-5521,9829,15986,-4161,-855,13450,28629,-9503,-3147,-1024,-8448,-7554,15296,24930,18529,-8250,-9634,-3803,-3276,-756,20527,-1202,27634,-9136,-2111,10719,22020,-1168,-2403,6270,32685,1488,5118,-2063,22048,-8258,4149,24745,23820,-4486,-3052,-9482,-9571,-2233,14050,-6049,-4589,-9738,-85,-8170,-8754,21288,-786,-7612,31680,-9674,7678,24262,-5508,-8023,-406,-5263,10614,-8507,26855,22459,30971,-6126,22044,28578,-1270,-7551,28134,5137,13872,10712,-6958,15164,-2554,-3082,20708,4106,-3188,-9821,32705,32643,-4327,-3175,28657,-9794,4499,-6653,32732,-3990,-9709,2877,-5490,2357,-8846,6670,-1583,6928,21718,-8215,-7179,-5158,-8629,-7394,-9016,721,-6388,-2758,24605,16255,16369,24690,29977,-499,-2049,27088,28830,5232,-1906,-4994,9624,6800,6836,31099,-658,-1339,73,-780,16543,-3120,-4000,3521,28149,30215,-2401,8992,-1019,-7855,20481,12767,-3442,27456,-6508,1223,-8848,28990,-6687,-2392,25011,2811,-6523,-78,8440,-7757,18158,1137,6098,-1960,2997,19588,8999,-1415,22930,29461,6181,-5060,-2578,21809,21020,10365,21772,-4942,2232,-316,26459,-2289,-1402,-8831,21565,-277,-1148,17577,-1622,20472,24293,-1880,-2847,-6510,-1570,15110,-7766,-8917,-8431,29180,288,28616,30469,26568,-3828,-5776,17015,-6587,-5997,-6386,-3981,-6668,20355,-1664,-7110,-4789,26009,28742,27207,27452,8952,788,572,-8842,27724,7488,10547,-1517,5274,19386,-8685,-1340,32427,-2958,-8201,12189,-3810,-2350,-1917,-4313,-4280,22657,-3886,-1382,30209,17568,29136,21390,5927,-9747,6533,28375,-7390,908,-2552,7713,1499,10255,-1920,13715,21734,-9696,-1510,26691,21850,11038,29196,-5656,16395,-8436,3752,20541,25371,-6938,-3517,15752,-3915,-5849,-8645,11133,8198,1011,-9319,18800,-9071,17136,-9761,24167,8776,-7042,5942,2819,8520,-253,-5508,6183,-2165,193,-9150,3333,14817,19952,-4948,18546,-2292,-4386,-6900,28890,18562,11672,3126,-615,-9023,16586,1050,-3585,-6918,-9803,-5619,-9159,-5995,19284,-5306,-3159,-3778,-7663,12243,350,2754,-463,1043,29858,-8039,-9370,-6053,-6384,32670,-2432,-1786,-1766,10451,13529,-4599,-6127,21344,-3346,-1155,13202,22723,12515,-6744,31069,-9166,29832,-7882,-2718,4443,-6817,-3214,-3404,-1902,21870,26487,-766,29935,-6076,15843,-8383,25722,25299,-5668,19,8822,-4539,-3583,-5933,-7537,10567,-3783,12372,12804,1340,420,21724,-1071,29879,3484,27616,17942,-9220,8547,22128,10579,794,19160,6623,21586,3087,-7750,18673,15009,-257,-7953,-3122,699,-2462,15169,13249,15554,27694,-6175,-8850,-901,-9682,20865,12969,30748,3155,-127,-3468,19614,5101,7370,-3407,-7735,11335,22452,-8050,31511,-5525,9929,-1529,7113,-2648,-2161,-9484,-4933,11112,-1813,13199,18142,-1778,-5112,-1900,-5392,-2733,-8731,-2769,-5254,23753,12664,-8760,-4376,-2997,-1060,10198,1854,16514,-3238,-7511,10074,-4568,29512,9068,-6676,-9775,19386,4857,9278,15301,-6062,-8197,17101,2543,29970,30565,-2478,-5457,-9409,23133,-9361,-3564,13059,-3378,19244,-9483,3800,14997,28872,15939,29119,-3511,10942,-8974,-8020,32537,-5041,32232,4510,-2188,24491,8694,4800,14202,21519,-5946,-2835,-8874,-7315,-8554,3863,3242,-698,25208,6219,22920,1760,30164,32510,-3003,30077,28244,15313,669,-1323,22144,8829,-2230,-8188,13863,14033,-428,-7598,-8593,1980,-2858,-2639,-6649,8921,-1738,-4126,17623,16265,29550,-6558,20323,11205,-7729,13757,-652,-6792,11860,9415,2554,10029,26861,24672,2138,29021,-1547,22340,22831,18250,9095,28000,31485,18451,24776,-888,13759,514,15013,2169,16305,31003,-2248,18931,21661,-8236,14384,-435,5877,28947,-1736,26231,1570,-9797,11333,24757,-320,-9285,18497,20069,-1704,25679,12620,6405,-5803,-3267,3502,-7716,-5722,-5978,1025,28534,-6989,7264,-5544,29499,-6638,-2315,4824,28537,-5509,-2655,-4884,-455,20112,30096,8983,-863,23901,-2005,12521,5933,2902,8407,-6881,-586,-1735,2572,-364,20406,-6499,-2323,-7138,-1684,23515,1365,-4797,-6804,-5724,14417,-7013,-6822,-1448,-8014,-6388,21271,18077,-6078,-4447,-5219,-2565,-6089,-5230,28283,-335,-916,-244,6216,-7487,6971,-835,-2944,1103,19059,-2029,4234,-1040,21131,12258,2988,-729,28608,-8086,-3,31805,12205,-7547,-8840,27958,-8952,-478,12592,-8514,-5744,-8809,14394,30131,7003,30473,-8000,14022,-9170,-6830,27707,-6447,24447,-6477,9344,15916,29719,2583,-7262,30178,-6542,-5857,-8491,22706,18307,-7448,1998,12284,25036,3382,-2218,-4263,33,-7555,-9400,-5246,-1764,31765,-4926,-3208,-1630,20176,32692,-1229,7987,-8559,-5197,-9559,14788,24758,12241,-5729,31651,-3372,17225,-3278,24275,21110,1192,19985,2732,7293,22948,8119,-4128,-9054,-1957,-6222,31842,29309,22746,87,-8421,14618,3183,22118,28836,-6317,-9198,-7378,-798,14348,1588,10240,28225,-31,29964,-3186,-1504,23847,-3674,-469,27871,-748,28976,6544,-1005,-7114,-7524,-2560,27551,-316,30487,3284,11437,-3778,-2185,16566,-1975,-4325,22417,-4254,21345,11716,24347,-1923,8587,-8688,18346,1143,-5018,1786,22940,-1646,25088,-7098,30058,5696,-125,-8875,9379,7939,-5542,4065,-6306,31856,4108,19723,-1026,1265,2543,25174,7679,12836,25276,23501,-6183,30317,-4971,-4117,-5779,-2352,-1056,23343,32186,15805,31236,-828,-1613,5556,-701,-6778,-906,-3735,20865,462,-6105,14382,-7104,-1067,6293,-1993,-9075,31464,27159,-4839,20919,3871,693,-4031,-2555,17055,12340,19373,12591,23365,28354,-9300,-7177,966,-5778,-2152,8154,-4585,28612,-8919,-4204,21871,-5529,-1602,13859,-6610,19490,-7150,31555,-7130,-1222,30423,-8161,1856,-4222,-5801,20190,-2511,2308,18322,-4910,-4013,10523,24997,-693,-7899,5225,-3753,-1403,-148,-6479,11809,10174,27696,19694,-13,3651,-2592,-6140,13609,-1605,31187,-1703,10243,-1735,3857,19152,-4445,-7708,-8279,-8942,-4513,22705,2366,-2659,16434,-7777,-8257,-4783,4673,23426,776,19546,-4115,-9202,-692,6420,-3901,30901,26602,26974,13202,7543,-5928,10918,-4910,30578,-6355,23470,-1075,13360,26238,-4888,20871,-2110,12012,13882,29641,-1543,760,29774,17024,4510,-8729,-5209,29609,22373,31379,-5518,-4452,8827,21536,5768,8680,1074,944,13973,-7235,24382,-4611,-7694,31905,16987,27698,-9688,-4338,24994,-6881,-6897,-3155,1195,-3165,7808,-2791,-7627,-487,-5958,-1656,13820,-8200,-238,-7744,1945,1179,28707,-5738,-39,15406,32675,26545,-78,-23,30891,14321,-1749,5444,-5940,27304,-6477,-4555,-8175,-5487,26561,-9441,13505,-4396,22259,27076,5580,-5263,18,9855,-2650,-5972,20961,-8958,15986,3622,27090,27268,-2289,15985,4273,29005,8586,25516,-2150,-8886,-4314,-7551,-2951,-4716,-1407,-3197,18277,17544,-6916,10590,-2889,18868,-1071,-5017,-6805,-6155,-9348,31116,11975,2083,20728,-9792,1269,-9825,-7727,-2685,30128,-1955,-5242,26,-8766,12915,-1467,-5788,2123,-115,-5296,-8427,-1742,-4048,21344,14471,21524,-4612,-9003,-1171,22869,-673,21057,25062,5693,26721,13796,27534,-5568,13789,6919,25473,27996,10108,-3347,-1856,-5578,-5626,-1899,17908,5947,-7160,29731,29211,5070,-8588,-1030,8336,-5955,23827,-2461,-5667,-5089,-4639,4423,26025,-4767,-7757,18786,7756,32356,27913,-1057,-2757,14479,6683,-441,6275,-1840,26769,31341,-8158,30360,-1153,-8597,22413,-5955,6326,11895,-1095,5144,-590,-7399,-444,-2774,-634,-358,-6473,-7086,32496,-3031,21912,3109,-1453,-7336,2963,11530,2132,28369,-3790,25840,-45,-6379,-1278,-4398,-7627,26292,26139,6986,13714,18967,32078,28090,-2227,29500,20340,-1894,29069,-3054,4541,-5812,-1269,-2990,429,17048,-9329,2398,-7018,-3684,7004,13588,-5350,8571,20865,-4304,-176,2276,-3688,27615,6047,-4781,-749,3069,-6975,15299,-5414,-2473,28832,6220,-7141,-7970,22976,-632,7955,433,-4838,16294,-2425,-2845,-1901,5259,-5587,4633,-5839,-3334,20114,3621,7982,-1988,20033,-9712,-1720,11690,-1948,-6246,-3545,-9116,8823,-3364,2686,22557,-2440,-4804,3181,-196,-3762,3815,1528,-7349,15590,3455,5524,-4976,-2607,-8435,31252,23036,-1886,-6415,-6764,-8183,12876,-2047,-624,-6077,16766,23679,-1948,-4024,26942,8140,5917,-2923,26643,-8636,-9422,-7458,-9390,-1216,16833,-8520,-5907,-2677,-3426,-9014,17831,12749,-7251,-2389,-7218,24282,7489,-2691,9626,540,17373,28150,16938,-6415,19822,-4848,15053,14370,13213,24133,18230,10046,21434,4878,2772,-2896,-7255,-5252,-8458,-8096,-1172,31298,-5380,7156,582,6589,-9696,-4856,27106,9507,26342,-4700,21325,-7598,-4429,19277,-8227,-3135,15575,-5901,-9311,-3466,-1689,30585,-5917,-3006,31634,739,-3667,18698,15961,2500,-1065,21920,2116,-7777,28919,740,23686,-2276,20033,-2618,24264,22453,28803,-8573,-3849,16184,-492,6622,-2755,3328,-6419,-6988,12899,-7300,22768,-8346,-4456,8513,20513,19752,-9313,10114,26438,-2863,-4767,11262,-2128,28025,14918,-6769,23675,5522,-9640,26522,-1174,11300,-9593,-3326,-4289,-8893,7710,-7893,9274,-6012,-6694,-6954,240,6045,28449,-4693,-9150,2677,32102,-9800,22098,1595,-2197,28483,25322,-6565,-1099,-9170,21452,13563,12932,718,-9482,-8547,17185,8024,15473,-3639,-605,-7528,24396,-3739,30187,11631,-9141,26542,-2818,-5361,-2596,31547,24578,20835,3165,31624,6620,-9388,4734,10565,1217,-4301,-7759,25747,14303,16941,5308,29233,-6944,-6071,-5666,-4337,16131,-8591,17516,-2331,-1753,-8233,3448,-731,-6194,11607,-5977,-9781,7648,-9614,-1071,-8143,12215,11979,-3607,-9143,-6577,-9428,19172,-44,-6825,-5080,-3439,-8049,22069,-4033,-8731,5526,4891,-3934,-8329,2641,12305,-4539,2812,7672,-1037,17172,30679,-406,-6684,1131,-1284,13199,-2626,17845,19929,-4633,10218,-4113,-2149,-8422,920,-9075,13633,-8229,21586,20457,-453,-7801,-5621,8670,8038,-7541,29909,15894,31127,-6019,22784,26574,16106,19970,32059,24471,-9668,22936,31029,-1074,11125,3072,-5513,-7835,15319,7275,23223,-7207,8720,-4871,21971,-3415,4155,12278,-9485,-5247,-7118,-8731,-1812,26428,-1593,-9121,-8378,-8259,20380,7933,-8251,31597,-5473,-1765,9191,2856,-5563,6933,-2644,-7256,-5473,14658,-9566,-1432,-6750,175,-3043,11018,29752,-4920,-4709,-3765,9461,-468,-3072,4406,30501,-9734,-5017,-4635,-1113,9063,31689,12176,24901,12706,-2898,-3298,-3201,32463,-4249,-5432,21182,-6628,-9813,-6286,10839,-4484,8298,-4412,14340,12078,-2590,-5194,17754,30004,27726,3637,9849,25279,18411,8599,-4396,-227,-2609,10957,-3972,19891,-2058,-6470,17689,15799,6100,1843,-846,25580,-9358,-6474,31852,6731,-6814,31934,30507,3039,11982,1737,-8700,19609,1927,-7499,-7069,-1772,-7311,-4305,-2411,23132,-9150,4909,19654,-9489,-1669,11134,-9360,11463,11910,17388,6739,16255,32351,10137,-6904,6937,-9750,-621,-6326,-5453,14622,-5935,16618,28193,-9716,24552,1354,3873,4420,-9523,17114,-1714,22587,12096,26,-4223,-7777,-372,7949,-2630,18036,-6147,32231,1382,-4183,17161,-2151,-7526,8138,15032,2263,-6172,31053,29818,25295,27225,1938,-4165,2156,27929,-3755,7762,5813,-5932,23743,-4415,2396,16180,16749,-3189,21481,-1223,1419,28968,15623,-7422,13088,-9718,12731,-2125,29828,22403,-1773,-8237,-8797,9190,26879,5310,-2652,-8206,7382,-5011,-6929,18225,-9764,-3073,4678,-164,-6137,12663,31219,-5768,-2141,-4174,2496,19848,31442,-5004,-27,-9663,-4620,17700,-2946,-1175,4309,-6300,-25,-1887,-9304,7494,26966,-9068,1683,-5334,20827,-1910,4740,-2956,21289,17078,-2126,-4639,-614,30436,29811,10457,32419,-771,7564,-5054,6715,7412,-4445,651,29073,-1591,-4957,9422,-9500,-4362,5235,-3488,-760,-1556,16302,25702,-8524,-6759,-1545,26533,18471,12385,-154,-7362,28687,-3227,22556,19070,10050,-7083,-4088,21437,9764,-243,4970,25700,18361,-2674,26160,31147,4992,27247,22664,471,2204,28247,27178,26193,24684,-5731,14997,-3343,-3982,31268,4494,12457,-643,-596,30356,20797,23261,17147,3686,25317,29751,-2518,14328,-3667,-4459,10448,30900,2508,-8717,30921,4755,32400,-6094,-3059,28883,15251,148,-4708,-1048,3993,-2982,-9812,4559,-1379,-6848,-8491,-540,-3571,-519,29312,31324,-4508,23557,-3583,20410,-6243,-3780,25262,-6845,-2455,-4875,-6404,-9594,-8046,14801,30495,32192,32588,-3258,10549,-7265,-7483,-6059,-2272,-8218,-2269,24723,25063,13492,15538,-4372,29125,32204,-6371,25174,-4285,11606,-34,-4658,16318,16085,-9665,6456,9780,14894,10892,23060,-8906,25002,-7377,-531,17293,25703,2854,11400,9978,31835,-9304,-6913,26499,29050,24806,17478,32024,29967,-2023,-9219,25002,-4332,13064,15485,32628,14011,-2698,-9070,-9004,-8761,-6783,-1392,-9474,8387,-4287,26645,-709,14671,24576,24397,7581,7406,-4805,-7576,-4045,-6437,11989,-7119,-7773,26413,-8099,-7078,9045,8432,-6574,3882,20691,31752,13082,955,-5037,30663,27442,11702,-5849,-7604,12415,-2730,1425,-2992,-8426,22238,-1881,-4282,18315,11046,26055,-8670,23009,-7544,29722,-6638,-8090,30703,-5521,-5812,-3581,10449,6495,23002,-6788,13299,27823,13575,934,380,11917,-9227,7794,21473,1994,25946,7058,18778,25543,23336,21809,7401,20520,-5197,938,-7711,-5420,29035,7509,1937,28305,1346,-3600,-151,-5230,-4527,20936,-610,-1407,-8503,-412,30113,-6254,-798,3281,-8319,-1234,22692,31397,-3104,-9597,21976,7246,-8513,-7812,-3068,-2457,-5698,-3403,25793,2094,25691,9991,-8685,1538,-7811,3713,28418,24769,28146,21044,-5174,-849,22168,6911,-8538,-5357,-5873,-2823,-4989,-5427,21920,-1256,12833,26480,31651,8181,-286,31691,32413,15331,21444,-5479,-7866,2326,28072,-7992,-6121,582,24266,20912,-9155,20625,9288,31840,-5634,4173,-351,-9057,-9072,-9207,21598,-9486,-4937,-8439,21735,15123,-2517,-8369,-164,-5509,-5090,-9165,-7381,-9718,-4177,-9762,11937,-6865,-2071,-1304,-9549,9056,-7461,-7973,32330,-1181,-1999,-7930,-2354,-5008,9309,-6961,3621,608,-1097,11646,5319,-5608,7245,-5420,20043,23562,-4992,-9538,-4089,-841,24200,-8936,10728,-408,10649,6196,24499,6452,5638,-5209,-3473,-730,18836,31054,-5871,-8709,1994,-6366,-1120,-7557,-1156,-6247,14568,10473,-3577,-2579,4536,-4906,21918,-1370,-2076,-9309,6551,-970,20753,-2257,-2344,-7211,-2911,-181,-4205,1097,-8720,7048,14660,17669,9764,-9775,18246,888,-5858,-9041,5813,20421,21598,21932,-6984,-4432,-3161,19188,22108,20754,16674,30521,8773,-586,-868,-1308,-4123,14391,3084,8041,-1502,-5520,-3904,-3478,10430,29199,6659,4561,21041,3977,-9625,-3244,-6748,24134,-8033,-6916,12296,6821,-2226,3718,-5689,-164,9860,-2986,-5281,-1846,-1234,7153,-4585,22927,24572,-2612,7312,11173,-1940,12212,-6515,29652,12741,20412,-1588,18636,21521,31569,-9717,-5044,19268,7381,-5385,-2047,24327,-8266,10980,-7210,-1101,24794,-6216,-6550,17098,-3177,25735,1549,-8734,29874,17445,-954,1205,32415,-4457,1970,7956,19271,17831,-4030,9775,20989,30554,-7160,-5308,-2802,-4258,26545,10312,-7519,28728,1800,-1617,-6631,-388,22117,-8899,26512,10618,6596,5704,3663,13117,-6862,-6058,-6048,7174,4340,-5140,-3617,-2600,11411,31524,-4389,-8094,-5960,23585,16543,-8742,-1500,10955,-2367,29269,-5535,-4554,-5379,27670,-3051,-285,12704,-7424,-1063,3804,31849,3653,26418,-4995,29978,-4439 diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu2_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu2_input0_int16.csv new file mode 100644 index 0000000..072b83c --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu2_input0_int16.csv @@ -0,0 +1 @@ +4806,-816,28973,12039,20167,7913,-17746,16888,-15128,24348,10571,-14393,-10694,-9457,2494,30861,20574,29188,-5387,-10324,17105,16701,11352,1316,9785,26999,-2396,-31920,-16053,30182,30925,-18186,19731,32010,-7004,-15657,23509,29799,28209,-9338,16362,-29255,16325,-9164,24292,-2012,25271,-31393,32436,-18546,25963,29148,-25757,29858,18326,-21972,-24458,16321,-22635,-30594,1577,-16125,-21174,-22519,-14734,26968,-28443,-18927,10837,-11613,-6714,-16291,-3270,28027,23508,-25592,20179,28688,10638,-19078,2681,-1763,5858,19945,10430,-8604,-6905,-11100,-25904,-21867,-29089,27434,24752,23156,10285,548,32057,7476,-19864,11607,-15201,-13389,-15350,20499,-24135,7500,-11848,11146,-5374,30488,27122,3996,13472,25114,29848,-15867,-3270,24476,-16532,24837,-10856,22982,13191,-13183,-17442,29437,22937,23918,11185,-1467,-9983,-30280,-29238,-2002,20145,-12166,17549,24778,-9697,-29355,32254,-14793,-18401,9829,15986,-13868,-2850,13450,28629,-31676,-10489,-3414,-28159,-25180,15296,24930,18529,-27500,-32111,-12676,-10920,-2520,20527,-4005,27634,-30452,-7036,10719,22020,-3891,-8009,6270,32685,1488,5118,-6876,22048,-27525,4149,24745,23820,-14951,-10171,-31606,-31903,-7441,14050,-20162,-15295,-32459,-283,-27234,-29178,21288,-2618,-25371,31680,-32247,7678,24262,-18359,-26743,-1352,-17543,10614,-28355,26855,22459,30971,-20418,22044,28578,-4234,-25168,28134,5137,13872,10712,-23194,15164,-8513,-10272,20708,4106,-10625,-32736,32705,32643,-14423,-10581,28657,-32646,4499,-22176,32732,-13298,-32362,2877,-18298,2357,-29485,6670,-5276,6928,21718,-27382,-23929,-17194,-28761,-24647,-30054,721,-21292,-9191,24605,16255,16369,24690,29977,-1663,-6830,27088,28830,5232,-6351,-16646,9624,6800,6836,31099,-2193,-4461,73,-2598,16543,-10398,-13331,3521,28149,30215,-8001,8992,-3396,-26181,20481,12767,-11472,27456,-21694,1223,-29491,28990,-22288,-7972,25011,2811,-21744,-259,8440,-25856,18158,1137,6098,-6531,2997,19588,8999,-4717,22930,29461,6181,-16867,-8591,21809,21020,10365,21772,-16474,2232,-1052,26459,-7630,-4671,-29435,21565,-921,-3826,17577,-5405,20472,24293,-6265,-9489,-21700,-5233,15110,-25887,-29721,-28104,29180,288,28616,30469,26568,-12759,-19251,17015,-21956,-19990,-21287,-13269,-22227,20355,-5546,-23700,-15964,26009,28742,27207,27452,8952,788,572,-29471,27724,7488,10547,-5056,5274,19386,-28950,-4465,32427,-9859,-27337,12189,-12698,-7832,-6388,-14376,-14266,22657,-12954,-4606,30209,17568,29136,21390,5927,-32488,6533,28375,-24634,908,-8505,7713,1499,10255,-6398,13715,21734,-32318,-5031,26691,21850,11038,29196,-18853,16395,-28118,3752,20541,25371,-23125,-11724,15752,-13049,-19496,-28816,11133,8198,1011,-31063,18800,-30236,17136,-32537,24167,8776,-23473,5942,2819,8520,-842,-18360,6183,-7217,193,-30500,3333,14817,19952,-16491,18546,-7638,-14619,-22999,28890,18562,11672,3126,-2048,-30077,16586,1050,-11949,-23060,-32675,-18729,-30529,-19984,19284,-17687,-10530,-12593,-25543,12243,350,2754,-1541,1043,29858,-26796,-31234,-20175,-21280,32670,-8105,-5953,-5885,10451,13529,-15328,-20422,21344,-11154,-3849,13202,22723,12515,-22478,31069,-30552,29832,-26271,-9059,4443,-22723,-10712,-11347,-6339,21870,26487,-2551,29935,-20254,15843,-27943,25722,25299,-18893,19,8822,-15128,-11942,-19777,-25124,10567,-12609,12372,12804,1340,420,21724,-3568,29879,3484,27616,17942,-30732,8547,22128,10579,794,19160,6623,21586,3087,-25833,18673,15009,-857,-26508,-10406,699,-8206,15169,13249,15554,27694,-20583,-29498,-3001,-32272,20865,12969,30748,3155,-424,-11560,19614,5101,7370,-11355,-25781,11335,22452,-26834,31511,-18416,9929,-5095,7113,-8826,-7203,-31612,-16441,11112,-6042,13199,18142,-5925,-17039,-6334,-17974,-9110,-29104,-9230,-17513,23753,12664,-29199,-14587,-9989,-3534,10198,1854,16514,-10792,-25036,10074,-15225,29512,9068,-22252,-32583,19386,4857,9278,15301,-20207,-27324,17101,2543,29970,30565,-8259,-18189,-31361,23133,-31201,-11880,13059,-11258,19244,-31609,3800,14997,28872,15939,29119,-11702,10942,-29911,-26733,32537,-16803,32232,4510,-7294,24491,8694,4800,14202,21519,-19820,-9448,-29580,-24382,-28514,3863,3242,-2325,25208,6219,22920,1760,30164,32510,-10009,30077,28244,15313,669,-4408,22144,8829,-7434,-27291,13863,14033,-1426,-25325,-28642,1980,-9525,-8796,-22164,8921,-5793,-13751,17623,16265,29550,-21858,20323,11205,-25764,13757,-2174,-22639,11860,9415,2554,10029,26861,24672,2138,29021,-5156,22340,22831,18250,9095,28000,31485,18451,24776,-2959,13759,514,15013,2169,16305,31003,-7493,18931,21661,-27452,14384,-1449,5877,28947,-5787,26231,1570,-32656,11333,24757,-1067,-30948,18497,20069,-5680,25679,12620,6405,-19341,-10888,3502,-25720,-19071,-19927,1025,28534,-23297,7264,-18479,29499,-22125,-7716,4824,28537,-18361,-8850,-16280,-1516,20112,30096,8983,-2875,23901,-6681,12521,5933,2902,8407,-22935,-1954,-5782,2572,-1213,20406,-21664,-7742,-23791,-5614,23515,1365,-15989,-22679,-19080,14417,-23376,-22739,-4826,-26714,-21293,21271,18077,-20260,-14824,-17395,-8548,-20297,-17434,28283,-1115,-3054,-813,6216,-24955,6971,-2781,-9812,1103,19059,-6764,4234,-3467,21131,12258,2988,-2430,28608,-26953,-8,31805,12205,-25157,-29466,27958,-29840,-1593,12592,-28379,-19146,-29362,14394,30131,7003,30473,-26667,14022,-30565,-22766,27707,-21488,24447,-21588,9344,15916,29719,2583,-24205,30178,-21806,-19524,-28301,22706,18307,-24825,1998,12284,25036,3382,-7394,-14210,33,-25182,-31334,-17487,-5879,31765,-16420,-10694,-5433,20176,32692,-4095,7987,-28529,-17321,-31864,14788,24758,12241,-19097,31651,-11240,17225,-10926,24275,21110,1192,19985,2732,7293,22948,8119,-13758,-30178,-6523,-20738,31842,29309,22746,87,-28069,14618,3183,22118,28836,-21057,-30659,-24592,-2660,14348,1588,10240,28225,-101,29964,-10620,-5012,23847,-12246,-1561,27871,-2494,28976,6544,-3350,-23713,-25080,-8532,27551,-1051,30487,3284,11437,-12593,-7281,16566,-6584,-14416,22417,-14180,21345,11716,24347,-6409,8587,-28958,18346,1143,-16725,1786,22940,-5487,25088,-23658,30058,5696,-415,-29583,9379,7939,-18472,4065,-21018,31856,4108,19723,-3418,1265,2543,25174,7679,12836,25276,23501,-20608,30317,-16568,-13722,-19261,-7840,-3518,23343,32186,15805,31236,-2760,-5377,5556,-2337,-22591,-3019,-12449,20865,462,-20349,14382,-23679,-3556,6293,-6641,-30250,31464,27159,-16128,20919,3871,693,-13437,-8515,17055,12340,19373,12591,23365,28354,-30999,-23922,966,-19259,-7172,8154,-15284,28612,-29730,-14012,21871,-18428,-5339,13859,-22031,19490,-23834,31555,-23767,-4071,30423,-27201,1856,-14073,-19335,20190,-8369,2308,18322,-16366,-13375,10523,24997,-2308,-26330,5225,-12508,-4676,-494,-21597,11809,10174,27696,19694,-41,3651,-8639,-20466,13609,-5349,31187,-5677,10243,-5784,3857,19152,-14817,-25694,-27595,-29805,-15043,22705,2366,-8863,16434,-25924,-27522,-15942,4673,23426,776,19546,-13716,-30672,-2307,6420,-13001,30901,26602,26974,13202,7543,-19760,10918,-16365,30578,-21184,23470,-3581,13360,26238,-16291,20871,-7032,12012,13882,29641,-5142,760,29774,17024,4510,-29096,-17363,29609,22373,31379,-18391,-14838,8827,21536,5768,8680,1074,944,13973,-24115,24382,-15370,-25646,31905,16987,27698,-32293,-14460,24994,-22935,-22988,-10515,1195,-10550,7808,-9302,-25423,-1624,-19860,-5519,13820,-27332,-794,-25814,1945,1179,28707,-19125,-128,15406,32675,26545,-258,-77,30891,14321,-5829,5444,-19799,27304,-21590,-15183,-27248,-18289,26561,-31469,13505,-14654,22259,27076,5580,-17542,18,9855,-8832,-19907,20961,-29859,15986,3622,27090,27268,-7628,15985,4273,29005,8586,25516,-7166,-29620,-14379,-25168,-9835,-15718,-4688,-10656,18277,17544,-23053,10590,-9628,18868,-3570,-16723,-22683,-20515,-31159,31116,11975,2083,20728,-32639,1269,-32750,-25755,-8950,30128,-6515,-17471,26,-29220,12915,-4888,-19291,2123,-383,-17654,-28088,-5805,-13493,21344,14471,21524,-15374,-30008,-3901,22869,-2243,21057,25062,5693,26721,13796,27534,-18560,13789,6919,25473,27996,10108,-11155,-6185,-18594,-18753,-6328,17908,5947,-23866,29731,29211,5070,-28627,-3434,8336,-19850,23827,-8201,-18888,-16962,-15461,4423,26025,-15888,-25855,18786,7756,32356,27913,-3523,-9190,14479,6683,-1468,6275,-6131,26769,31341,-27194,30360,-3844,-28656,22413,-19848,6326,11895,-3648,5144,-1966,-24661,-1478,-9245,-2111,-1193,-21576,-23619,32496,-10101,21912,3109,-4844,-24452,2963,11530,2132,28369,-12634,25840,-149,-21262,-4258,-14658,-25423,26292,26139,6986,13714,18967,32078,28090,-7422,29500,20340,-6311,29069,-10179,4541,-19371,-4228,-9967,429,17048,-31095,2398,-23394,-12278,7004,13588,-17834,8571,20865,-14345,-587,2276,-12293,27615,6047,-15936,-2496,3069,-23250,15299,-18047,-8241,28832,6220,-23803,-26566,22976,-2105,7955,433,-16125,16294,-8082,-9484,-6337,5259,-18621,4633,-19461,-11112,20114,3621,7982,-6627,20033,-32374,-5733,11690,-6491,-20820,-11817,-30386,8823,-11212,2686,22557,-8134,-16013,3181,-653,-12540,3815,1528,-24495,15590,3455,5524,-16587,-8689,-28115,31252,23036,-6285,-21381,-22546,-27276,12876,-6822,-2080,-20256,16766,23679,-6492,-13412,26942,8140,5917,-9744,26643,-28787,-31407,-24859,-31298,-4052,16833,-28399,-19689,-8923,-11418,-30045,17831,12749,-24169,-7963,-24059,24282,7489,-8969,9626,540,17373,28150,16938,-21384,19822,-16159,15053,14370,13213,24133,18230,10046,21434,4878,2772,-9654,-24183,-17505,-28193,-26986,-3906,31298,-17933,7156,582,6589,-32319,-16185,27106,9507,26342,-15666,21325,-25327,-14764,19277,-27422,-10450,15575,-19668,-31036,-11553,-5629,30585,-19724,-10020,31634,739,-12224,18698,15961,2500,-3548,21920,2116,-25922,28919,740,23686,-7585,20033,-8725,24264,22453,28803,-28577,-12830,16184,-1640,6622,-9182,3328,-21397,-23294,12899,-24332,22768,-27820,-14852,8513,20513,19752,-31041,10114,26438,-9544,-15890,11262,-7094,28025,14918,-22563,23675,5522,-32133,26522,-3913,11300,-31976,-11086,-14297,-29644,7710,-26310,9274,-20038,-22311,-23180,240,6045,28449,-15644,-30498,2677,32102,-32667,22098,1595,-7322,28483,25322,-21882,-3664,-30566,21452,13563,12932,718,-31607,-28490,17185,8024,15473,-12128,-2015,-25094,24396,-12462,30187,11631,-30468,26542,-9393,-17869,-8651,31547,24578,20835,3165,31624,6620,-31291,4734,10565,1217,-14335,-25861,25747,14303,16941,5308,29233,-23146,-20235,-18887,-14455,16131,-28636,17516,-7769,-5843,-27444,3448,-2435,-20647,11607,-19921,-32603,7648,-32046,-3570,-27142,12215,11979,-12023,-30476,-21923,-31425,19172,-146,-22749,-16933,-11464,-26828,22069,-13443,-29104,5526,4891,-13114,-27764,2641,12305,-15128,2812,7672,-3456,17172,30679,-1352,-22278,1131,-4278,13199,-8753,17845,19929,-15441,10218,-13710,-7164,-28073,920,-30249,13633,-27429,21586,20457,-1510,-26004,-18735,8670,8038,-25137,29909,15894,31127,-20064,22784,26574,16106,19970,32059,24471,-32227,22936,31029,-3579,11125,3072,-18376,-26115,15319,7275,23223,-24024,8720,-16236,21971,-11383,4155,12278,-31617,-17490,-23727,-29101,-6038,26428,-5310,-30402,-27925,-27530,20380,7933,-27501,31597,-18242,-5884,9191,2856,-18541,6933,-8813,-24185,-18243,14658,-31886,-4771,-22499,175,-10144,11018,29752,-16399,-15696,-12550,9461,-1560,-10238,4406,30501,-32447,-16721,-15450,-3709,9063,31689,12176,24901,12706,-9660,-10993,-10670,32463,-14162,-18106,21182,-22091,-32709,-20952,10839,-14946,8298,-14705,14340,12078,-8633,-17312,17754,30004,27726,3637,9849,25279,18411,8599,-14653,-755,-8695,10957,-13238,19891,-6858,-21566,17689,15799,6100,1843,-2820,25580,-31194,-21578,31852,6731,-22714,31934,30507,3039,11982,1737,-29000,19609,1927,-24996,-23561,-5906,-24368,-14350,-8037,23132,-30500,4909,19654,-31630,-5564,11134,-31200,11463,11910,17388,6739,16255,32351,10137,-23014,6937,-32498,-2068,-21087,-18177,14622,-19781,16618,28193,-32386,24552,1354,3873,4420,-31744,17114,-5713,22587,12096,26,-14077,-25921,-1238,7949,-8767,18036,-20488,32231,1382,-13941,17161,-7169,-25087,8138,15032,2263,-20574,31053,29818,25295,27225,1938,-13881,2156,27929,-12515,7762,5813,-19772,23743,-14717,2396,16180,16749,-10630,21481,-4076,1419,28968,15623,-24739,13088,-32392,12731,-7082,29828,22403,-5908,-27456,-29323,9190,26879,5310,-8839,-27351,7382,-16703,-23095,18225,-32547,-10241,4678,-547,-20456,12663,31219,-19226,-7137,-13913,2496,19848,31442,-16678,-88,-32208,-15400,17700,-9819,-3917,4309,-21000,-84,-6290,-31014,7494,26966,-30225,1683,-17779,20827,-6366,4740,-9852,21289,17078,-7086,-15463,-2047,30436,29811,10457,32419,-2569,7564,-16846,6715,7412,-14815,651,29073,-5301,-16522,9422,-31665,-14539,5235,-11627,-2531,-5187,16302,25702,-28412,-22530,-5148,26533,18471,12385,-511,-24539,28687,-10756,22556,19070,10050,-23608,-13627,21437,9764,-808,4970,25700,18361,-8914,26160,31147,4992,27247,22664,471,2204,28247,27178,26193,24684,-19101,14997,-11143,-13271,31268,4494,12457,-2142,-1986,30356,20797,23261,17147,3686,25317,29751,-8391,14328,-12224,-14864,10448,30900,2508,-29056,30921,4755,32400,-20314,-10196,28883,15251,148,-15692,-3492,3993,-9938,-32706,4559,-4595,-22827,-28301,-1798,-11904,-1730,29312,31324,-15026,23557,-11941,20410,-20808,-12598,25262,-22815,-8181,-16248,-21347,-31978,-26819,14801,30495,32192,32588,-10858,10549,-24215,-24944,-20196,-7573,-27391,-7564,24723,25063,13492,15538,-14571,29125,32204,-21235,25174,-14282,11606,-112,-15525,16318,16085,-32215,6456,9780,14894,10892,23060,-29686,25002,-24589,-1770,17293,25703,2854,11400,9978,31835,-31014,-23044,26499,29050,24806,17478,32024,29967,-6741,-30728,25002,-14438,13064,15485,32628,14011,-8992,-30232,-30013,-29202,-22608,-4638,-31578,8387,-14288,26645,-2363,14671,24576,24397,7581,7406,-16015,-25254,-13484,-21457,11989,-23730,-25908,26413,-26995,-23593,9045,8432,-21912,3882,20691,31752,13082,955,-16789,30663,27442,11702,-19495,-25345,12415,-9099,1425,-9972,-28087,22238,-6269,-14273,18315,11046,26055,-28898,23009,-25145,29722,-22126,-26965,30703,-18401,-19372,-11935,10449,6495,23002,-22626,13299,27823,13575,934,380,11917,-30756,7794,21473,1994,25946,7058,18778,25543,23336,21809,7401,20520,-17324,938,-25701,-18067,29035,7509,1937,28305,1346,-11998,-502,-17434,-15089,20936,-2032,-4689,-28343,-1374,30113,-20846,-2659,3281,-27728,-4111,22692,31397,-10346,-31989,21976,7246,-28376,-26040,-10225,-8188,-18991,-11343,25793,2094,25691,9991,-28950,1538,-26036,3713,28418,24769,28146,21044,-17247,-2828,22168,6911,-28458,-17855,-19576,-9410,-16629,-18090,21920,-4187,12833,26480,31651,8181,-953,31691,32413,15331,21444,-18262,-26218,2326,28072,-26638,-20402,582,24266,20912,-30515,20625,9288,31840,-18778,4173,-1168,-30190,-30238,-30690,21598,-31618,-16457,-28129,21735,15123,-8390,-27896,-547,-18362,-16965,-30550,-24601,-32392,-13921,-32539,11937,-22881,-6904,-4345,-31830,9056,-24869,-26577,32330,-3937,-6662,-26432,-7847,-16694,9309,-23204,3621,608,-3657,11646,5319,-18693,7245,-18066,20043,23562,-16638,-31793,-13630,-2801,24200,-29786,10728,-1359,10649,6196,24499,6452,5638,-17364,-11576,-2431,18836,31054,-19570,-29029,1994,-21220,-3733,-25189,-3852,-20824,14568,10473,-11922,-8597,4536,-16351,21918,-4567,-6919,-31030,6551,-3233,20753,-7522,-7814,-24037,-9704,-601,-14015,1097,-29065,7048,14660,17669,9764,-32584,18246,888,-19527,-30136,5813,20421,21598,21932,-23279,-14771,-10535,19188,22108,20754,16674,30521,8773,-1953,-2893,-4360,-13743,14391,3084,8041,-5006,-18398,-13011,-11593,10430,29199,6659,4561,21041,3977,-32084,-10813,-22493,24134,-26777,-23051,12296,6821,-7419,3718,-18963,-546,9860,-9952,-17602,-6152,-4112,7153,-15282,22927,24572,-8707,7312,11173,-6466,12212,-21717,29652,12741,20412,-5293,18636,21521,31569,-32388,-16814,19268,7381,-17948,-6821,24327,-27554,10980,-24034,-3670,24794,-20719,-21832,17098,-10589,25735,1549,-29114,29874,17445,-3179,1205,32415,-14855,1970,7956,19271,17831,-13431,9775,20989,30554,-23865,-17691,-9339,-14194,26545,10312,-25063,28728,1800,-5389,-22102,-1293,22117,-29663,26512,10618,6596,5704,3663,13117,-22872,-20194,-20159,7174,4340,-17132,-12055,-8666,11411,31524,-14628,-26978,-19867,23585,16543,-29140,-5000,10955,-7888,29269,-18450,-15178,-17930,27670,-10169,-948,12704,-24747,-3543,3804,31849,3653,26418,-16649,29978,-14795 diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu3.tflite b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu3.tflite new file mode 100644 index 0000000..64ddfd5 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu3.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu3_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu3_golden_int16.csv new file mode 100644 index 0000000..dd6364b --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu3_golden_int16.csv @@ -0,0 +1 @@ +32767,-21293,-14505,-21562,9055,-22403,-7472,-9292,32767,32767,-11743,32767,-12571,32767,32767,32767,-14208,31371,9031,8010,13881,-7432,-21512,32767,-5490,928,-6346,-23147,26633,-20614,5236,13826,32767,29128,-18238,1815,23594,32767,31778,32767,32767,32767,-4639,9117,14537,32767,-18368,24031,32767,-10213,-601,-21474,5768,-7797,18117,-18142,32767,32767,32767,-12674,32767,7731,32767,-23334,32767,-5052,17289,-15061,-15951,8308,-19743,32767,32767,32767,-7571,-8941,-847,4890,-5056,32767,32767,3454,-19844,-22465,32767,32767,-7622,2975,-16516,-64,32767,-20332,32767,32767,32767,-16600,-16169,32767,32767,-18322,32767,32767,-22437,32767,-18534,-16476,20195,4528,12318,-22943,28143,-16225,19429,21764,-22224,-5804,-678,4499,32767,12144,32767,-8620,-14121,-22764,-5474,32767,6662,32767,20930,-21391,-18765,-11700,15283,12588,32767,-18487,-10627,1586,32767,-1840,32767,25686,-4103,-6655,32767,-4150,-18691,-22495,32767,32767,32767,-9118,-12129,32767,-6398,6214,31301,9992,-6350,-22926,-13560,32767,-2412,-12557,-2339,-22841,-8255,-11410,-19795,-17934,32767,-2196,19107,-12587,-10676,32767,-6783,32767,-13131,17032,-20768,32767,-12732,32767,-3044,-2814,-3638,-6509,32767,32767,-76,-8951,32767,24789,-10741,32767,-21500,32096,-5029,-20006,32767,-21936,-3464,-6714,-15815,23484,-22213,-13533,-3977,-9745,-6484,32767,-10787,-4108,-15586,1388,-5885,32767,-15309,-7346,-17073,19751,-3536,32767,4346,-16038,-1295,32767,-3560,32767,30576,-2826,-16688,-18629,-7279,32767,2128,-20138,32767,32767,-6648,-8377,-4538,-17164,32767,-14682,22239,-4004,32767,-8429,-21784,32767,-11074,32767,-17929,32767,-7609,-16373,32767,-13594,32767,32767,-1744,-11613,32767,32767,32767,-11154,32767,-4435,32767,-1372,-11791,-12488,-16920,32767,32767,32767,-22,-8342,23243,-16181,5673,15386,-19742,-15664,-18200,28262,32767,32767,-9607,32767,-6548,32767,32767,32767,-11923,32767,-14147,29820,-14833,31442,-5363,32767,-7916,32767,-10254,21962,-23416,-21481,-15070,32767,-2636,25204,-17202,32767,18857,15830,32767,-16714,-5488,-11418,-9481,-6189,32767,-11728,32767,-15834,-12301,8065,22747,5336,32767,22187,3981,17948,-6947,-8445,32767,-16203,-14778,32767,-1707,-809,32767,-8771,-3324,-2527,1207,32767,32767,-5903,-588,-9712,-1778,8413,22175,-5182,32767,-16318,-21275,32767,32767,32767,-13869,-4104,11350,-1359,32767,32767,-18775,-13316,32767,-6645,32767,-9921,-22785,-6357,-8993,32767,32767,32767,27077,-23096,-11725,26893,-11977,-7976,-7889,-9735,32767,-7566,-906,18415,-18653,32767,24214,-15355,-15488,-16287,9785,32767,32767,32767,32767,32767,-10958,8623,32767,-500,32767,-8002,32767,32767,-5024,-15763,5992,-4503,-4654,-2934,-3170,32767,20600,-8665,-3497,-20891,32096,-21999,32048,-13579,32767,-1227,-13117,-6260,21533,-21514,32767,-16357,-2495,-1005,-3885,-15508,-10208,32767,32767,-19421,-4250,18678,-2525,-10093,-18027,27988,-7298,32767,16018,-14851,-21504,-17936,-6136,2433,-17324,25409,-6618,16311,-14167,32767,-9719,-19266,-11554,22852,-19799,-5452,-4686,-4035,32767,3693,1291,17626,-20989,-22455,32767,9148,-5008,32767,32767,-9289,32767,32767,9437,32767,-16478,-3225,32767,-19941,32767,-4715,32767,32767,-5643,-3127,-12607,9859,-11398,32767,32767,-13271,-5469,-11580,32767,-8775,32767,32767,32767,-12855,-20588,-7463,-7965,32767,-4634,20906,13246,-7962,-7966,32767,-23422,32767,2667,32767,20891,-632,-17711,32767,27780,32767,-3874,14854,32767,32767,-22421,1946,-21245,-10334,16760,19148,32767,-15847,32767,32767,32767,32767,32767,5758,-23367,-2381,5861,-6843,-17654,-7183,-10364,-16519,-18404,-10438,32767,-2671,32767,-9272,-8394,19703,-15109,32767,20047,-15083,-19001,6207,-8084,-6690,32767,21657,-19092,-7566,-10728,16617,29493,-6093,-9807,21643,-16544,-16985,-8386,-5669,11149,1050,32723,-20473,32767,20526,-21822,16870,-12419,-19650,-16809,17857,-20258,-21780,-2510,-9418,32767,-20322,-6182,8688,32767,-9191,32767,-19470,-7504,4415,-15117,8475,-7365,32767,-17727,32767,32767,-2502,32767,32767,32767,-8274,-19959,31914,32767,27237,32767,13399,-15115,-2598,-20865,32767,32767,-13328,32767,-696,32767,-13240,-10867,-3942,-21802,32767,-2347,32767,-7127,32767,-20908,-12138,-15122,-877,32767,1794,-3811,14553,-14524,19687,-3557,-20775,-18787,5269,32767,32767,-14314,-13276,-4068,32767,32767,-11980,12542,32767,-13616,32767,32767,18873,-15732,32767,-22866,32767,222,32767,32767,5606,32767,-10234,32767,-6879,13764,-14560,32267,32767,-5000,-7699,32767,-18369,-2992,24350,26292,-3763,-9906,-21580,23289,7557,-18020,32767,-15092,30870,32767,32767,32767,32767,32767,-7378,-4728,-6893,-11469,-19691,-14972,11510,-9705,-23088,-5857,-18044,3750,-5217,-22048,19925,-14783,32767,32767,28565,12571,-3081,32767,26552,-20453,-732,-8815,-18648,18625,32767,32767,32767,-11353,32767,32767,30020,-2956,30555,-13138,-17939,-21374,-19703,32767,32767,27132,-16462,32767,-1800,-14465,-15019,32767,24012,-11343,-18004,-17790,-12434,32767,32767,-229,-21444,-13751,-20982,14429,24990,-3111,28336,4747,-17762,32767,-18845,25662,-19564,-2707,32767,10126,32767,-13121,32767,32767,32767,-22828,-19478,32767,-16441,32767,32767,32767,-13781,14926,4661,-13191,32767,-13846,32767,7605,-17292,-18108,3394,-18271,19458,32767,-5906,-2758,32767,10159,6286,7671,-20883,32767,-5985,-19414,-22639,-17285,1844,-3595,-1417,-21504,32767,3311,-3980,32767,32767,32767,-8392,-19265,32767,-12958,32767,-5699,32767,-1729,-12372,26590,-6182,-884,32767,-9756,-20251,-3018,32767,32767,32767,-16041,32767,32767,-13054,-1486,29789,13754,-13260,-19617,-15095,-21698,-16781,6615,-8561,11638,-2197,-15618,-21176,3976,31929,32767,-2686,-4584,-10149,-12177,-14819,32767,-2430,32767,31194,-4581,-4281,32767,32767,7702,32767,-398,-6629,-3107,32767,-11116,-7452,32767,-16992,32485,-14884,32767,26127,28914,-5991,-20514,-10148,32767,32767,-5628,-5555,-21779,32767,-1081,-17575,-6804,-9015,-10244,-10473,14627,-5863,-20603,-21422,10491,23265,2359,7259,32767,18093,32767,-17490,32767,32767,32767,2314,-2800,-15646,-18475,-20399,-17289,26511,-803,-17688,-912,20305,-8581,-19613,-22649,9420,-8480,-14931,-2778,-10834,-20492,-6881,-3764,8983,-2865,-10369,14952,32767,-21343,-12163,32767,32767,9787,-21603,29679,312,32587,-23218,212,-20692,3676,32767,-4231,-4328,32767,-5643,3778,-2292,-8621,-18256,16264,32767,-6586,32767,10207,32767,-11791,-18867,17588,26080,26774,-21079,32767,-22232,-6272,-6840,8483,-21345,-14787,32767,-16723,-1337,19384,32767,32767,-3749,32767,32767,4599,32767,-13019,-12673,-7695,32767,-17715,-16303,32652,-11796,16846,32767,32767,-7674,1522,22051,32767,-9807,32767,-13439,-15294,5026,-6076,32767,-21864,-403,-22308,-21031,-21499,12688,-21751,-18497,-17558,32767,-19503,-21522,32767,-22685,32767,1073,-21527,32767,24830,-22536,-11064,32767,9773,6147,-3383,32767,-2331,32767,32767,2705,-18517,-18288,32767,32767,32767,32767,-19524,14322,-16122,-15790,32767,-19061,-8336,-10442,32767,32767,-20963,32767,21120,-20536,-10025,-5335,4559,-10018,32767,-13768,-798,-12036,-19381,27614,-3119,-11184,32767,-17181,-2425,-9985,-12245,-20135,-15422,32767,30569,15887,-12848,32767,-14673,-1224,32767,32767,-11330,32767,32767,32767,1632,32767,32767,26969,-2248,-12063,-12433,3051,-17674,32767,32767,19723,32767,12547,32767,29460,-392,32767,10016,-5118,-7251,32767,30347,-4778,-3530,32767,32767,-21399,-16220,21891,32767,-12272,16443,-512,32767,-15405,32767,32767,-7490,-3667,32767,-20419,-2683,-14763,-17695,-5207,-22723,32767,18704,-132,32767,-5481,32767,-10854,-2973,-14437,30404,-7947,-13801,32767,32767,6345,-19867,32767,-19222,-4931,7929,7612,-19,32767,-12432,32767,26643,26791,-19223,32767,27988,-19623,32767,-18477,32767,-10737,-16522,-1468,-3199,32767,-23009,-15708,-19920,-17218,32767,32767,32767,32767,-324,-13884,14651,32767,-11569,19529,-8548,-16071,-11303,23298,-16324,-9153,-5615,-15817,26311,32767,-15619,-960,20689,-22159,32767,-22755,-7402,32767,29250,-15359,32767,14382,22647,-6279,-9517,-2065,32027,-12225,-7244,-18930,32767,32767,32767,-10088,3786,-5726,-7829,-393,-3409,32767,32767,-14361,23045,32767,-6161,-5486,32767,-4209,-14265,32767,-3624,-10791,9000,-15812,-18366,-5751,-2030,32767,-23210,-11494,-2224,-781,32767,-12101,32767,24959,-2266,-12380,-20310,32767,1966,-18412,32767,-20098,5701,-18268,13165,31559,-15981,32767,-17996,29930,48,-6450,-21359,32767,-7221,-15804,32767,-4,-6033,13218,32767,-15758,-17240,7788,32767,-20035,32767,-13616,12032,-20865,32767,-5242,-17111,-14197,1512,12096,-12372,2834,-11471,-21092,32767,25939,-23203,32767,32767,-2150,32767,-19068,30259,-1553,32767,32767,1639,10064,-9595,32767,-15569,32767,-7367,32767,-14612,-6688,-17090,-18172,32767,32767,32767,32767,-16848,-3980,7760,-10578,-6914,32767,-22999,32767,-17213,-18508,-4367,-23299,-15476,27229,32767,32767,-11175,7996,-23004,-14311,-6188,32767,-4323,32767,-1310,32767,-16337,24682,-21545,24928,-12913,24672,19458,32767,-20903,-6674,-15549,-2108,-15666,-12826,-11451,32767,32767,-4413,32767,-9030,32767,32767,32767,-20062,32767,-20388,-4658,-15556,-15174,32767,-10410,-7098,24885,-22778,32767,13585,-17725,32767,-3021,-13451,32767,32767,32767,-22755,-16482,-21453,32767,32767,-10203,32767,24980,32767,32767,32767,-4169,-22828,32767,-22672,-7687,-2040,32767,1212,32767,32767,-16298,9062,32767,-21411,-15503,32767,32767,8616,32767,32767,-18378,32767,-5628,-15722,-15568,-3698,32767,2373,-8965,-22049,-9471,-16851,32767,-10515,-20019,-8678,25276,-2814,-861,-21700,-17848,-16184,10694,12886,-5670,-9997,-10168,4776,21326,18260,8401,-22643,32767,-7227,-8451,32767,-20755,-15565,32767,7774,32767,32767,-12730,-21390,32767,32767,32767,-22962,-17538,32767,-1574,-5039,32767,-248,32767,32767,32767,2218,-9038,-10789,20705,32767,32767,32767,-11783,-18048,31235,-2326,32767,7674,32767,-19391,32767,32767,-10528,-19221,-5203,-22985,1190,-19677,-9591,-14109,-17645,-11948,32767,-17248,-12966,-22704,-1151,-8128,-12520,32767,32767,-22004,32767,8719,-23302,-17569,10312,2173,-19169,-14688,-17264,-5271,-2530,32767,-20123,-21509,10429,32767,29050,-19157,-18989,32767,-2131,25927,14036,-256,-8728,-18516,-17056,-4340,-11220,13122,-13410,24343,-19113,-1048,32767,17249,32767,32767,-3048,-15327,-8457,32767,-9492,-8457,32767,32767,32767,13170,-22521,-14088,-8039,-8555,32767,25335,32767,32767,-21049,32767,-9451,-1203,-13789,25064,-22632,32767,-12212,32767,32767,7910,-23142,32767,-15334,32767,32767,28374,-19323,24670,-19545,-10289,-21304,32767,-8215,-16801,32767,32767,-12484,32767,32767,7457,20953,-4292,1691,-3119,32767,-1481,-5577,32767,23401,32767,-16275,-13953,18005,31270,-17498,-1013,32767,32767,17483,32767,-11095,32767,-1713,32767,32767,32767,-12058,16118,-6602,-17838,32767,27315,-17662,32767,25796,32767,32767,32767,32767,-17837,-5495,7552,-7090,-1340,-17126,-5930,-22063,-23280,-7601,-14300,-19137,-4684,32767,-1294,-22748,-8902,32767,-9935,-6629,32767,-17498,-2246,-16225,20786,17907,32767,32767,-6889,32767,-8987,-22780,6968,32767,32767,-22702,-10377,-3826,-6380,12261,32767,-3051,21824,-11144,32767,32767,32767,17058,-19076,32767,-19677,-9149,32767,32767,-4523,-15729,32767,6949,-15915,15195,-18517,-14260,-12061,-155,32767,32767,329,-21302,-5344,-14311,-10474,32767,-9373,-17523,32767,21524,-23241,-7522,-16241,32767,32767,-12664,-4526,-10929,32767,30929,14324,-6100,-10921,-2774,-15515,-13376,-5546,-12206,-6889,32767,-4958,32767,32767,-23281,-10427,17719,32767,32767,32767,32767,-21522,-20901,-371,-9152,-11290,13094,32767,32767,18723,-13771,10975,-18439,-959,10484,32767,2037,32767,32767,-14912,32767,32767,-3739,32767,32767,32767,32767,-21565,-10207,30490,-14672,-15709,-5386,29166,-214,-19846,32767,-6120,-22759,-18402,32353,32767,1450,-22748,32767,32767,32767,-20286,-3384,-7158,-21516,-2148,-4393,-3646,-1468,-4026,32767,-19809,-1242,-9957,32767,-17329,-17603,32767,-15382,-17929,-12775,-18925,32767,-15973,-20900,32767,-10446,32767,10920,-22044,-9980,-11446,17010,-4697,-18768,-2349,-14720,32767,-8899,32767,-467,20553,29145,32767,32767,32767,-7677,-9610,25412,-9208,-7522,32767,-8629,8032,-17687,32767,-4896,-19934,-9515,-21246,32767,17261,13709,12278,1951,-5101,-7785,-1305,32767,-12403,7132,32767,-20,32767,-7362,9334,32767,32767,-22707,32767,-1475,32767,-7571,32767,32767,-18181,-8097,-16673,-4818,32767,-20729,-10351,-10490,-8384,32767,-230,32411,31855,-10782,-9448,32767,32767,-6847,-19486,-17889,7664,32767,-286,32767,-1048,-22187,32767,13048,-17123,32767,-23226,21385,-19657,-719,12428,32767,-1,32767,6646,32767,-13834,-6855,32767,-21529,32767,32767,32263,959,-15594,-12551,14184,10319,9985,-12458,27652,-12589,-12410,-20710,-7242,32767,-17460,32767,-9030,1670,32767,-2065,32767,32767,32767,2121,-8367,13690,-17355,-10759,-21920,-9853,32767,32767,32767,-18588,1751,-19179,-11396,32767,32767,32767,-11524,-14448,4465,32767,24252,-14915,-17902,8599,32767,-11655,-11675,32767,32767,32767,24,4005,-12211,30490,-21720,-13996,-1554,-11547,-19270,19198,32767,3714,-4522,-4246,-16987,-570,-13643,-9476,-15727,10412,-13351,-5224,32767,8902,32767,-23314,28487,-17380,32767,-1138,19851,32767,32767,30240,9823,32767,31132,26273,32767,-12210,-18281,32767,-20095,32767,32046,32767,32767,32767,32767,28909,-17411,32767,32767,30898,-18794,-19562,-16683,32767,-7196,32767,-18827,-8680,32767,32767,29834,32767,1293,32767,27349,-19176,-1786,-8564,-11511,7722,-4604,32767,32767,32767,-10626,32767,-20445,-2327,1364,-661,-1456,-20814,-18690,32767,-2500,32767,27315,32767,10016,5389,-20698,17242,32767,-12255,32767,-9446,-20166,-11748,-2508,-15318,-313,30796,32767,32767,-20481,32767,-21381,32767,32767,-18233,32767,-9214,-16954,32767,26328,-12913,-16151,-20168,-22292,32767,32767,24608,27449,32767,-9400,-6767,-12156,-7964,-18529,-4078,32767,-3799,-8486,138,-3451,29698,32767,-21454,14255,32767,-14046,1531,-4033,32767,792,-18821,23659,-19136,32767,-20029,-19786,32767,32767,-20136,-20851,32767,6216,32767,-12838,32767,-1169,-9870,32767,-2834,18847,32767,18091,20114,-21953,32767,-15615,32767,32767,-22494,-8691,32767,32767,27955,-1514,32767,-15158,-4892,29517,32767,32170,-7246,-15341,-4015,-21983,32767,-12177,17638,32767,-8828,32767,-20298,-20829,32767,-4233,-2598,-6829,26771,32767,13652,-1725,2932,32767,32767,-15280,32767,32767,32767,-22491,32767,-18316,-6136,5761,-12050,25013,-13842,-6046,-15540,-22347,32767,32767,32767,32767,31163,-19237,-3668,-379,32767,-17746,-8555,5699,32767,32767,6262,32767,32767,-2421,-18938,-7280,10384,32767,16824,29706,-1093,-8805,-12342,21955,11245,32767,-9636,-5391,-3998,4943,32767,8423,32767,-5984,32767,19281,32767,27222,134,-261,4077,32767,-21828,-14612,32767,1426,-20969,9496,1472,11474,26724,16583,32767,-20495,-225,32767,17690,32767,32767,-22767,-3561,-9780,32767,32767,-15256,-7220,20233,32767,-13007,32767,-1059,-7518,-17961,31526,-7419,-11439,-10889,-12723,25762,-12784,-6128,-10231,-8084,-6895,32767,4735,23088,32767,32767,-15591,-422,-17408,-1720,32767,-15472,20729,-14632,10004,32767,-12075,32767,11467,-17393,32767,32767,-7733,-7327,-18778,32767,32767,32767,-13863,32767,7893,26392,20178,32767,9883,32767,32767,-21029,-20536,-8658,32767,22129,32767,19365,-19154,-9103,32767,-12797,-18817,-4974,32767,-14200,496,-3107,-19580,32767,-7376,-1429,-6713,25820,-14134,-10125,-21142,26330,32767,-3998,-5537,32767,-19496,29073,4320,-19168,-21533,32767,-4516,15209,-12363,-22297,546,16874,30452,-11081,17850,-612,-19448,-16450,5401,-18463,32767,-4454,-17207,-2129,-14911,-8350,32767,23983,-7161,-11814,-16426,32767,-7203,-2981,-4022,-7013,-20672,19522,32767,7123,27012,-4931,-9445,-7257,-15070,-17968,-8726,32767,32767,-7933,32767,-13450,-3416,-12865,17721,-7817,-15826,-16874,-15747,-12453,1834,-12266,32767,-11720,-15091,-9427,9365,-14109,-964,32767,11841,-8733,-12589,32767,11168,-6552,-12684,-9644,29147,-8591,32767,-18952,32767,12700,13916,-1184,18845,32767,-7135,-17103,3247,-21876,-22519,-13838,-16947,-9878,-7797,32767,-5261,32767,32767,32767,32767,-12722,-3349,-7245,-10178,16426,32767,15379,32767,32767,32767,32767,-7414,32767,-6857,32767,-11083,-13158,-909,-6662,32767,-9982,32767,32767,-10238,32767,-16963,-21493,8160,32767,-1936,-21898,15727,-20657,20164,4881,-18143,-22761,-307,32767,-16248,463,-10849,32767,-3207,28255,-14783,19897,-16142,-22290,-679,-8559,32767,31397,5940,-8290,32767,32767,-6618 diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu3_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu3_input0_int16.csv new file mode 100644 index 0000000..beb7dfb --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu3_input0_int16.csv @@ -0,0 +1 @@ +23967,-29755,-20269,-30130,3796,-31306,-10441,-12984,19079,28892,-16410,18973,-17566,14799,22053,29805,-19854,13151,3786,3358,5819,-10386,-30061,20622,-7671,389,-8868,-32345,11165,-28806,2195,5796,22394,12211,-25486,761,9891,24026,13322,18087,14868,19737,-6483,3822,6094,27750,-25667,10074,23866,-14271,-840,-30007,2418,-10896,7595,-25352,15550,17247,28142,-17711,25486,3241,26716,-32606,22375,-7059,7248,-21046,-22290,3483,-27588,25403,31529,17853,-10579,-12494,-1184,2050,-7065,29299,29415,1448,-27730,-31392,18374,23418,-10651,1247,-23079,-89,22423,-28412,18298,14015,23052,-23197,-22594,23575,21661,-25603,17518,24717,-31353,15591,-25899,-23023,8466,1898,5164,-32060,11798,-22673,8145,9124,-31055,-8111,-947,1886,25581,5091,17572,-12046,-19733,-31810,-7649,21553,2793,19841,8774,-29891,-26222,-16349,6407,5277,14727,-25834,-14850,665,28785,-2571,15807,10768,-5734,-9299,31962,-5799,-26118,-31434,16188,21228,19223,-12741,-16949,15723,-8940,2605,13122,4189,-8873,-32036,-18949,15435,-3371,-17547,-3269,-31918,-11535,-15944,-27661,-25061,14406,-3068,8010,-17589,-14919,22575,-9478,29593,-18349,7140,-29021,25363,-17792,27269,-4254,-3932,-5083,-9096,15598,23424,-106,-12508,18105,10392,-15009,30899,-30044,13455,-7027,-27956,26795,-30653,-4841,-9382,-22100,9845,-31040,-18911,-5557,-13617,-9061,21660,-15073,-5741,-21779,582,-8224,30801,-21392,-10265,-23857,8280,-4941,24810,1822,-22411,-1809,17405,-4975,24297,12818,-3949,-23320,-26032,-10171,31641,892,-28140,30340,19597,-9290,-11706,-6342,-23984,14028,-20516,9323,-5595,23170,-11778,-30440,30584,-15474,27481,-25054,22620,-10633,-22879,17421,-18996,15999,28945,-2437,-16228,15836,16436,17268,-15587,28392,-6197,28047,-1917,-16477,-17451,-23644,20602,17835,30027,-31,-11657,9744,-22611,2378,6450,-27587,-21888,-25433,11848,23321,17475,-13425,23570,-9150,17982,27157,24488,-16661,29431,-19769,12501,-20727,13181,-7494,19420,-11061,22085,-14329,9207,-32721,-30017,-21059,14952,-3683,10566,-24038,26840,7905,6636,27980,-23356,-7669,-15956,-13248,-8648,25728,-16388,25987,-22126,-17189,3381,9536,2237,20271,9301,1669,7524,-9708,-11801,31309,-22642,-20651,18131,-2386,-1130,20288,-12257,-4645,-3531,506,24812,32618,-8249,-822,-13572,-2485,3527,9296,-7241,21890,-22802,-29730,18209,29740,23602,-19381,-5735,4758,-1899,16053,25604,-26236,-18607,15463,-9285,26684,-13863,-31839,-8883,-12567,22656,29913,24217,11351,-32274,-16384,11274,-16737,-11145,-11024,-13603,19226,-10573,-1266,7720,-26066,21540,10151,-21457,-21643,-22759,4102,29966,23081,28531,23163,27249,-15312,3615,26483,-698,20493,-11182,18077,17631,-7020,-22027,2512,-6292,-6504,-4100,-4430,20574,8636,-12109,-4886,-29193,13455,-30741,13435,-18975,25834,-1715,-18329,-8747,9027,-30063,18394,-22857,-3487,-1405,-5429,-21671,-14264,25048,31998,-27138,-5939,7830,-3529,-14104,-25191,11733,-10198,23774,6715,-20752,-30049,-25064,-8574,1020,-24208,10652,-9248,6838,-19797,19065,-13581,-26922,-16146,9580,-27667,-7618,-6548,-5638,27128,1548,541,7389,-29329,-31378,26217,3835,-6998,29930,24487,-12980,20077,28497,3956,17464,-23026,-4507,24011,-27865,23749,-6588,24607,24779,-7885,-4370,-17617,4133,-15928,19809,28561,-18544,-7642,-16182,29030,-12262,24660,27027,27074,-17963,-28770,-10429,-11130,30009,-6476,8764,5553,-11126,-11132,24540,-32729,23449,1118,22455,8758,-883,-24749,21488,11646,26501,-5413,6227,19730,14276,-31331,816,-29688,-14441,7026,8027,20494,-22144,27589,23343,21657,21587,29066,2414,-32652,-3327,2457,-9562,-24670,-10038,-14483,-23083,-25717,-14586,19957,-3733,29869,-12956,-11729,8260,-21113,19276,8404,-21077,-26552,2602,-11296,-9348,20534,9079,-26679,-10573,-14991,6966,12364,-8514,-13704,9073,-23118,-23735,-11718,-7922,4674,440,13718,-28608,14622,8605,-30494,7072,-17354,-27458,-23489,7486,-28308,-30435,-3508,-13161,24466,-28398,-8639,3642,29546,-12843,28144,-27207,-10486,1851,-21124,3553,-10292,32616,-24771,30247,26932,-3496,16810,25044,15822,-11562,-27891,13379,18169,11418,21334,5617,-21121,-3630,-29156,26066,23137,-18625,30618,-973,26337,-18502,-15186,-5508,-30466,20964,-3280,24090,-9959,18607,-29216,-16962,-21131,-1225,31267,752,-5325,6101,-20295,8253,-4971,-29031,-26253,2209,31781,26666,-20002,-18551,-5685,32339,15029,-16741,5258,20838,-19027,24303,28736,7912,-21983,17326,-31952,21659,93,22771,25803,2350,20984,-14301,15844,-9613,5770,-20346,13527,20897,-6987,-10758,25779,-25669,-4181,10208,11022,-5258,-13842,-30156,9763,3168,-25181,32411,-21089,12941,21877,27567,24383,27687,24823,-10310,-6607,-9632,-16026,-27516,-20922,4825,-13562,-32263,-8184,-25214,1572,-7290,-30809,8353,-20658,17592,26383,11975,5270,-4305,26331,11131,-28580,-1023,-12318,-26059,7808,30556,24817,14927,-15865,29275,14494,12585,-4130,12809,-18359,-25067,-29868,-27533,27331,28234,11374,-23004,18542,-2515,-20213,-20987,31398,10066,-15850,-25159,-24859,-17375,16669,26415,-320,-29965,-19215,-29320,6049,10476,-4347,11879,1990,-24821,28651,-26334,10758,-27338,-3783,23322,4245,22773,-18335,21972,16018,29332,-31900,-27218,20711,-22974,30771,17884,14835,-19257,6257,1954,-18433,25813,-19348,16146,3188,-24163,-25304,1423,-25531,8157,21434,-8253,-3854,24549,4259,2635,3216,-29182,23809,-8364,-27129,-31635,-24154,773,-5023,-1980,-30050,19728,1388,-5562,32172,28547,24084,-11727,-26921,24091,-18107,32400,-7964,26210,-2416,-17288,11147,-8639,-1235,31986,-13633,-28298,-4218,20619,15213,27820,-22415,14190,26942,-18242,-2077,12488,5766,-18529,-27412,-21094,-30321,-23449,2773,-11963,4879,-3070,-21825,-29591,1667,13385,17460,-3754,-6405,-14182,-17016,-20708,31669,-3395,26954,13077,-6402,-5982,14673,27746,3229,25254,-556,-9263,-4341,32485,-15534,-10413,14092,-23745,13618,-20798,17819,10953,12121,-8372,-28666,-14180,22366,21616,-7865,-7763,-30434,22183,-1511,-24559,-9508,-12598,-14315,-14635,6132,-8193,-28790,-29935,4398,9753,989,3043,23214,7585,30463,-24440,24297,29954,24873,970,-3913,-21863,-25816,-28505,-24160,11114,-1122,-24717,-1275,8512,-11991,-27407,-31649,3949,-11850,-20864,-3882,-15139,-28635,-9615,-5260,3766,-4004,-14490,6268,31917,-29824,-16997,16766,27995,4103,-30188,12442,131,13661,-32445,89,-28914,1541,31331,-5913,-6048,31823,-7886,1584,-3203,-12047,-25511,6818,18675,-9203,24984,4279,19826,-16476,-26365,7373,10933,11224,-29455,21661,-31067,-8765,-9558,3556,-29827,-20663,15411,-23368,-1869,8126,26147,30612,-5239,26119,24545,1928,22775,-18192,-17709,-10753,26483,-24754,-22781,13688,-16483,7062,28479,14703,-10723,638,9244,19163,-13704,32568,-18779,-21371,2107,-8491,21608,-30552,-563,-31173,-29388,-30042,5319,-30395,-25847,-24535,18703,-27253,-30074,19335,-31700,22354,450,-30082,27232,10409,-31491,-15460,31356,4097,2577,-4728,21050,-3257,21283,21959,1134,-25876,-25555,25657,20952,19155,27978,-27283,6004,-22529,-22064,25388,-26635,-11648,-14591,16076,22765,-29294,28323,8854,-28696,-14009,-7455,1911,-13999,16739,-19239,-1115,-16819,-27083,11576,-4359,-15629,26162,-24009,-3388,-13953,-17111,-28136,-21550,18868,12815,6660,-17953,23118,-20504,-1711,19628,26553,-15833,21856,24896,18033,684,22672,19822,11306,-3142,-16857,-17373,1279,-24697,19610,29239,8268,15150,5260,29811,12350,-548,26613,4199,-7152,-10133,22860,12722,-6677,-4933,28314,16866,-29903,-22666,9177,22553,-17149,6893,-715,16973,-21526,15482,18107,-10467,-5124,24911,-28533,-3749,-20629,-24726,-7276,-31753,27754,7841,-185,24601,-7659,14040,-15167,-4155,-20174,12746,-11105,-19285,16152,27201,2660,-27762,22502,-26860,-6890,3324,3191,-27,25375,-17372,25370,11169,11231,-26862,22911,11733,-27421,21261,-25819,21625,-15004,-23087,-2051,-4470,25024,-32153,-21950,-27836,-24060,28246,26496,29057,28609,-453,-19401,6142,26286,-16167,8187,-11945,-22458,-15794,9767,-22811,-12790,-7847,-22102,11030,17378,-21826,-1341,8673,-30965,26509,-31797,-10343,18332,12262,-21463,19485,6029,9494,-8774,-13299,-2886,13426,-17083,-10123,-26452,28353,22636,20542,-14097,1587,-8002,-10940,-549,-4763,26833,15026,-20068,9661,24606,-8609,-7666,27379,-5881,-19933,27428,-5064,-15079,3773,-22095,-25665,-8037,-2836,21153,-32433,-16062,-3108,-1091,18168,-16910,21078,10463,-3167,-17299,-28381,29299,824,-25729,28670,-28084,2390,-25527,5519,13230,-22331,22713,-25148,12547,20,-9013,-29847,31857,-10090,-22084,30761,-6,-8431,5541,22925,-22020,-24091,3265,30080,-27997,14304,-19027,5044,-29156,14775,-7325,-23911,-19838,634,5071,-17289,1188,-16029,-29473,18591,10874,-32424,18256,25212,-3004,22403,-26646,12685,-2170,19544,20557,687,4219,-13408,17355,-21756,14026,-10294,16230,-20419,-9346,-23881,-25393,20791,24939,19960,21815,-23543,-5561,3253,-14782,-9661,31532,-32138,23640,-24053,-25863,-6103,-32558,-21626,11415,16118,23883,-15616,3352,-32145,-19998,-8647,29671,-6041,16315,-1830,19048,-22829,10347,-30107,10450,-18044,10343,8157,17530,-29210,-9326,-21728,-2946,-21892,-17923,-16002,30134,29445,-6166,14224,-12618,28835,32263,23418,-28035,14314,-28490,-6509,-21737,-21204,14953,-14547,-9919,10432,-31830,24503,5695,-24769,28365,-4221,-18796,28948,20108,32369,-31797,-23032,-29978,17354,21637,-14258,30222,10472,18336,20118,32477,-5826,-31900,14183,-31682,-10742,-2850,24582,508,26216,30370,-22775,3799,22289,-29920,-21663,21748,30050,3612,28343,30268,-25681,25623,-7865,-21970,-21754,-5168,24619,995,-12527,-30811,-13235,-23548,23949,-14693,-27974,-12126,10596,-3932,-1203,-30323,-24941,-22615,4483,5402,-7923,-13969,-14209,2002,8940,7655,3522,-31641,26121,-10099,-11809,14940,-29002,-21750,17413,3259,19985,16801,-17789,-29890,20190,30133,20252,-32087,-24508,23091,-2200,-7042,18355,-346,25316,26775,18937,930,-12629,-15077,8680,18855,20572,13854,-16466,-25220,13094,-3251,20702,3217,30534,-27096,15930,14532,-14712,-26859,-7271,-32119,499,-27496,-13403,-19716,-24657,-16696,31717,-24102,-18118,-31726,-1608,-11358,-17495,24281,29505,-30748,14631,3655,-32562,-24550,4323,911,-26787,-20525,-24124,-7365,-3536,18441,-28119,-30057,4372,24774,12178,-26770,-26535,20476,-2978,10869,5884,-358,-12196,-25874,-23834,-6064,-15679,5501,-18739,10205,-26708,-1465,31638,7231,30084,14105,-4259,-21417,-11818,19768,-13264,-11818,13811,30580,21790,5521,-31471,-19686,-11233,-11955,25149,10621,24055,29507,-29413,31332,-13207,-1681,-19268,10507,-31626,22104,-17065,27220,30325,3316,-32338,22685,-21427,14064,17731,11895,-27002,10342,-27312,-14377,-29770,17230,-11479,-23477,23408,32624,-17445,27425,29375,3126,8784,-5997,709,-4358,15182,-2069,-7793,24772,9810,18709,-22742,-19497,7548,13109,-24452,-1416,31732,31087,7329,28042,-15504,16834,-2394,19626,23092,20829,-16850,6757,-9226,-24926,27635,11451,-24680,14171,10814,17303,20677,23214,26024,-24925,-7678,3166,-9908,-1873,-23931,-8286,-30830,-32531,-10622,-19983,-26742,-6545,26244,-1808,-31788,-12439,17621,-13883,-9263,29411,-24451,-3138,-22673,8714,7507,31862,18275,-9626,29908,-12558,-31832,2921,30730,23829,-31723,-14500,-5346,-8915,5140,29039,-4263,9149,-15572,21793,23954,27782,7151,-26657,19282,-27497,-12784,29545,25107,-6320,-21980,25263,2913,-22239,6370,-25875,-19926,-16854,-217,19253,22549,138,-29767,-7467,-19998,-14636,25757,-13098,-24487,28086,9023,-32477,-10511,-22695,31083,23766,-17696,-6324,-15272,22517,12966,6005,-8524,-15261,-3877,-21681,-18692,-7750,-17056,-9627,25668,-6928,22007,20741,-32533,-14571,7428,30345,17798,20940,25183,-30075,-29207,-519,-12789,-15776,5489,29977,28425,7849,-19244,4601,-25766,-1340,4395,29345,854,16774,20665,-20838,21988,20392,-5225,26932,23072,30428,19128,-30134,-14263,12782,-20502,-21952,-7527,12227,-299,-27733,22438,-8552,-31803,-25715,13563,30526,608,-31788,18868,18049,17654,-28348,-4729,-10002,-30066,-3002,-6139,-5095,-2051,-5626,26218,-27681,-1736,-13914,31521,-24215,-24598,17363,-21494,-25054,-17852,-26445,19534,-22321,-29205,19309,-14597,19493,4578,-30804,-13946,-15994,7131,-6563,-26226,-3282,-20570,30981,-12435,23804,-652,8616,12218,27158,27480,26312,-10728,-13429,10653,-12867,-10511,31293,-12058,3367,-24716,24915,-6842,-27856,-13296,-29689,18867,7236,5747,5147,818,-7128,-10878,-1824,28671,-17332,2990,25477,-28,25545,-10288,3913,32570,26878,-31730,28186,-2061,27991,-10579,18755,29203,-25406,-11315,-23298,-6733,20995,-28966,-14464,-14658,-11716,28257,-322,13587,13354,-15066,-13203,21284,28685,-9568,-27229,-24998,3213,31766,-399,14981,-1465,-31004,30837,5470,-23928,26509,-32455,8965,-27468,-1005,5210,22153,-1,14794,2786,30925,-19331,-9579,28069,-30084,26951,28792,13525,402,-21791,-17538,5946,4326,4186,-17409,11592,-17591,-17342,-28940,-10120,30393,-24398,26917,-12619,700,20350,-2886,22201,24431,32417,889,-11692,5739,-24252,-15034,-30630,-13769,17741,18441,20591,-25975,734,-26801,-15925,27090,31787,27186,-16103,-20189,1872,20582,10167,-20842,-25016,3605,27746,-16287,-16315,21907,24091,18622,10,1679,-17063,12782,-30351,-19558,-2172,-16135,-26927,8048,20066,1557,-6319,-5933,-23738,-797,-19065,-13241,-21976,4365,-18657,-7300,32560,3732,30959,-32578,11942,-24286,30024,-1590,8322,15451,20619,12677,4118,17980,13051,11014,31394,-17062,-25545,27197,-28080,16748,13434,32056,28621,19632,29150,12119,-24330,29937,29499,12953,-26262,-27335,-23312,28508,-10056,16823,-26308,-12129,25614,20566,12507,28222,542,28379,11465,-26796,-2496,-11967,-16085,3237,-6434,13797,29665,28489,-14849,14885,-28570,-3252,572,-923,-2035,-29085,-26117,25300,-3494,17008,11451,27648,4199,2259,-28923,7228,23384,-17125,31820,-13200,-28180,-16417,-3504,-21405,-438,12910,27975,22890,-28620,27937,-29877,31588,23059,-25479,18109,-12876,-23691,17196,11037,-18044,-22569,-28183,-31151,32681,23888,10316,11507,17169,-13135,-9456,-16987,-11129,-25892,-5699,19640,-5309,-11858,58,-4822,12450,25441,-29980,5976,26697,-19627,642,-5636,14860,332,-26300,9918,-26740,24602,-27988,-27648,15629,21555,-28138,-29137,27230,2606,18296,-17939,16704,-1633,-13792,21873,-3960,7901,14946,7584,8432,-30677,13983,-21820,26784,21900,-31433,-12144,31433,22189,11719,-2115,20126,-21181,-6836,12374,26649,13486,-10125,-21437,-5610,-30718,17071,-17016,7394,23366,-12336,29723,-28364,-29106,20570,-5915,-3631,-9543,11223,30165,5723,-2411,1229,14172,29800,-21352,24974,28210,17535,-31428,18835,-25594,-8575,2415,-16838,10486,-19343,-8449,-21715,-31227,21640,13831,19696,25806,13064,-26882,-5126,-530,14953,-24798,-11955,2389,25135,15786,2625,17568,14959,-3383,-26463,-10173,4353,29752,7053,12453,-1528,-12304,-17246,9204,4714,21983,-13465,-7533,-5587,2072,23388,3531,19501,-8362,16726,8083,22869,11412,56,-365,1709,15466,-30502,-20418,16931,598,-29302,3981,617,4810,11203,6952,22144,-28640,-315,22373,7416,29129,25732,-31814,-4976,-13666,27894,18181,-21319,-10089,8482,19782,-18176,25603,-1480,-10505,-25099,13216,-10367,-15985,-15216,-17779,10800,-17864,-8563,-14297,-11296,-9635,14837,1985,9679,32047,16534,-21787,-590,-24325,-2403,19903,-21621,8690,-20446,4194,20209,-16873,30208,4807,-24304,28120,19102,-10806,-10238,-26240,22689,29985,16349,-19372,32024,3309,11064,8459,18157,4143,23183,25744,-29386,-28697,-12099,23839,9277,21766,8118,-26765,-12721,21917,-17883,-26295,-6951,17793,-19843,208,-4341,-27361,16852,-10307,-1997,-9381,10824,-19750,-14149,-29544,11038,30222,-5587,-7737,26368,-27243,12188,1811,-26785,-30090,19435,-6311,6376,-17276,-31157,229,7074,12766,-15484,7483,-855,-27177,-22987,2264,-25800,28569,-6224,-24045,-2975,-20836,-11668,22751,10054,-10007,-16508,-22954,23510,-10065,-4165,-5620,-9800,-28887,8184,17747,2986,11324,-6890,-13198,-10141,-21058,-25108,-12194,15147,32344,-11085,29914,-18795,-4774,-17977,7429,-10924,-22115,-23580,-22005,-17402,769,-17140,17009,-16378,-21088,-13173,3926,-19716,-1347,30630,4964,-12204,-17591,29714,4682,-9155,-17724,-13477,12219,-12005,26427,-26483,26730,5324,5834,-1654,7900,28350,-9970,-23899,1361,-30569,-31467,-19337,-23681,-13803,-10895,15969,-7351,27031,22166,29908,27109,-17778,-4680,-10124,-14222,6886,31032,6447,15219,20146,24330,31451,-10360,17955,-9582,24669,-15487,-18387,-1270,-9309,25879,-13949,17018,19339,-14306,20102,-23704,-30034,3421,19354,-2706,-30600,6593,-28866,8453,2046,-25353,-31806,-429,21813,-22705,194,-15160,19569,-4481,11845,-20658,8341,-22556,-31148,-949,-11960,24286,13162,2490,-11584,15008,32195,-9248 diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu4.tflite b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu4.tflite new file mode 100644 index 0000000..9cce01b Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu4.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu4_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu4_golden_int16.csv new file mode 100644 index 0000000..9440ddc --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu4_golden_int16.csv @@ -0,0 +1 @@ +4007,7785,-4786,25742,11871,13830,-7098,25533,9726,-5375,-3506,1075,-5535,-4023,1639,28608,32598,-2752,3789,-8935,20035,-2162,-9697,-37,-4180,17636,30580,8586,28276,-553,-7537,8177,26956,-4886,18196,1224,-2557,-4834,21233,9462,421,6267,-5637,28757,-362,-8128,-8159,-8322,-2947,16679,-4463,-6583,1459,30179,15465,-4101,-338,7565,-9527,-4537,-2710,-3447,-9592,-5775,26944,-6934,-6799,14432,24201,12866,26101,-2458,-4303,-2193,21911,15202,-1199,-2806,-6196,32554,-8752,-6357,12183,28361,-7012,10602,31726,29585,-3245,12722,-2035,-9534,23110,-4850,-4540,24166,-957,19396,8585,-4194,11000,-3148,3587,20399,-1951,2600,4794,-965,17091,-8809,-5405,-3815,13767,-3918,10589,18062,-301,1941,-2580,22671,31108,27874,-7125,-8068,-4508,-5034,9406,-5917,2954,-7934,31435,-1375,-1156,25124,-6401,-4137,1414,-9691,24583,7819,18945,6067,29501,24289,-255,14028,21100,-8811,-5451,-5232,12979,-1364,5071,-6981,-5862,-5514,-1113,-5967,-980,-4330,-3072,-877,15514,12036,-8775,26366,202,-8177,-5547,15519,-292,-5689,12701,30424,2582,-8854,-8562,4718,-5814,23202,29075,-9317,7765,9058,-726,28948,-5155,-871,19015,28934,-5908,836,-7902,-523,11168,-2758,13036,6410,-9432,-2306,24062,-4612,-2245,-6077,21411,13740,237,20420,-2548,20232,-5329,27291,22122,19798,-1601,-6741,26254,28941,-6235,-6099,7642,29484,-3169,-4246,-9727,-3985,-6577,-9793,7069,12638,-6298,-5049,5362,5794,22602,10599,-2735,9302,-3455,-4180,30137,-9409,28875,-3036,31241,25023,-511,649,-1786,32320,-486,17273,18571,-7896,-5410,-5174,-5937,-9250,-4632,-7280,-615,12240,25117,9947,7235,-1148,-922,-2703,-3356,-2331,1530,30525,-9046,-6499,-1893,-5636,-9481,31929,-7110,31027,-5791,658,5927,18340,31977,-2611,11115,-8512,-6700,26867,-7888,-5077,-7564,-8806,-8363,-8575,-2876,22580,15046,-2406,-7984,29942,-4533,-9065,32471,27020,14812,19438,564,433,11702,275,-5989,-8143,-4049,32719,-7561,28745,30911,13692,27087,-6097,-2689,18151,28802,-5992,14459,-1129,-1261,1616,-3107,-1205,-564,6484,32358,-1083,-4591,14038,25152,11858,-9363,-3010,1855,-1226,-3267,-7706,-6536,23808,25766,-2654,10929,-3274,-5902,21850,12096,3372,1446,185,11863,20638,-2290,-3817,23526,-8451,-6948,-4383,-8455,16811,-9822,-2655,-2870,21682,23441,-7993,-8457,13987,24440,11970,-9081,2018,-3874,16871,-2654,-9122,18367,-6068,18493,32134,-3066,30288,-9729,12408,-4270,-9307,25942,32282,19172,-3080,-4657,-4832,-9103,28369,29177,29101,-4071,25491,32515,595,20282,-8391,-3856,-228,-2971,-9786,14271,-512,9939,-8230,-588,8386,25499,-6791,10820,-6381,1096,15441,-8878,-6061,-3627,-9398,-2826,-8137,-4497,-7068,30597,23219,14628,-5761,18463,-6100,-9742,17805,28441,32031,-9307,-5973,-9024,-364,27917,-9427,-2761,-8375,-3088,-6284,7865,13420,27557,14515,5852,6957,18732,-1255,8787,11842,-8247,16235,26121,-9187,7327,-5661,-9255,-56,10399,28729,5890,-2317,-6825,-4010,7222,-836,17288,5403,-2746,-5892,14535,-5651,1447,32495,-7724,-1121,-1262,9403,7775,-2740,-6136,-456,19443,2297,-9242,-3073,21627,14872,25706,-2778,-1410,11708,-7284,-6994,25140,13295,2985,-1152,22223,-2206,29672,11119,-7497,28711,-2707,-9767,6207,-3445,-3928,5651,21707,5339,18062,7545,32299,4239,-9516,3365,10079,19186,23497,-5985,22380,22562,-2892,-7436,-8362,-6657,4150,26634,-516,-5207,-532,-5404,18486,-1478,-4684,-7653,-6450,26100,-2866,21225,11424,26503,-8490,27049,-3997,24089,10557,25952,-751,-2541,-3025,-116,24734,-8422,1945,10743,-183,-1522,-379,29317,28115,-7186,17770,-8255,28678,9946,-5072,4116,-4033,22681,-8981,-6755,-1632,9992,11533,-1309,13205,-1239,31796,-4883,16148,-8499,-6028,26643,15353,-4700,-5840,-2964,-384,-2902,11145,64,2622,1836,12339,-858,9809,-2259,-8111,27700,-7120,18751,5108,26267,-4363,-770,28140,-2475,23787,29597,5338,-1630,-9376,-5863,-9126,-5284,16349,-584,7067,17117,-7099,-5656,27383,-2145,-7182,-4131,-1348,-3037,-6011,27428,13124,-8218,21240,5064,-8122,-5064,-9686,32720,14506,-3743,11358,-2767,28192,31249,-4348,-3143,26944,11345,19916,-4703,29448,22809,2175,-5880,-9145,19972,-6420,29092,32694,-852,16596,6806,21957,3241,22629,-7165,15656,-1693,30719,27963,11540,-2944,28656,8771,8514,-3455,-8993,2826,11314,-5733,30831,5616,12048,-6135,-1684,19390,-7200,12490,26026,29147,-1966,-625,-8849,-4438,22991,26104,28483,-151,32630,29554,-6189,-9505,5030,-405,-667,1762,6975,10237,6287,2955,-6304,10059,-5495,2835,-4964,-2834,13980,14853,29784,-2426,-4229,-4407,21353,25123,-2308,25395,667,25151,-2261,-6232,-2359,-3336,-8508,890,21388,-8634,-3477,14505,27400,-7410,30380,16155,-5366,-8412,-2131,-1819,-2496,23996,31632,7859,21443,-9049,31518,18369,-1995,8610,-6551,-463,30385,-7885,2105,30971,14419,16548,-1200,6980,-9184,-3549,-8164,7209,27468,-921,13276,5917,-9394,-8755,12331,-5653,-7426,-1708,17827,-6492,26643,-8848,23300,-4015,-9111,11352,31176,7440,2972,-899,32036,-5768,4043,29888,-6686,-1926,19838,2253,-7660,727,29074,-203,-1689,19424,9592,-2988,27088,19591,3842,8539,-3916,26337,19605,8488,-4502,14755,8569,13058,1628,5774,-2605,23852,-9397,22562,11228,-2802,-4343,17562,16335,1396,2836,-9530,-148,-5943,-4722,-4560,-192,24622,-1916,-921,5462,-8895,-7382,-5539,-1873,-7246,-6315,14595,27785,20145,-3214,-773,-4050,15349,14236,29415,20858,-8696,20433,25826,-2371,1114,2250,18333,-8462,10265,10078,10225,28108,19609,-2348,6025,7253,-7883,-6849,20093,19636,-2397,-4962,-5578,6576,32241,-942,31037,-5587,-5294,-2106,21115,-4543,-8707,-3417,-8612,-4657,31398,16227,-162,-9492,14089,-6373,-1213,-9260,-4090,14323,-4863,26952,8068,-8884,31972,-1643,-483,-5119,-5384,13258,-1584,-3910,19519,-53,-1750,5245,11930,-1825,-394,2538,-1474,-6926,21535,-3994,8913,3461,-2562,12642,-3412,8495,31894,-4395,19488,-934,-3199,30956,22131,32509,-5230,20453,-365,31474,25505,-290,-529,-5389,19954,26140,22269,-1689,13657,26104,-3760,3160,-1933,-1643,-3408,12073,-9198,-515,24485,-1209,25273,-5514,26251,-9278,-5262,-5420,-5321,-6507,27550,-9454,20227,-8710,-8229,-5600,-469,23675,-4166,-7164,245,-801,6728,12402,-8941,13845,-6171,-1736,-6965,-8009,-6411,9685,4471,26008,-410,-3082,-2227,-2026,28847,12125,-8565,21283,15062,-756,-9820,-4126,8085,17347,-5835,-8036,1818,-4801,29237,31053,-4708,-9586,-5397,4732,12904,29238,-4374,30230,-113,21093,-5624,16045,-8495,-3778,32524,7804,25119,-7464,12555,-4927,-9152,-8257,12188,-99,-7827,-1543,22299,51,-6418,28248,22449,-2328,26466,-5464,13124,-401,-1636,12585,-7086,30215,-9071,5311,-9718,-1376,28537,20660,-4663,-2596,14666,-8772,-2743,13652,-4006,23112,-6445,27502,-9458,22849,17502,-2274,-7249,13548,-2650,-2772,-7618,3735,4306,21684,-9678,-3566,25150,-9010,-1397,23296,30816,-6098,-5890,9766,6615,-2880,23970,2528,5390,10260,-8616,8175,31044,14027,-2007,-982,-4526,30328,-7156,7843,-3483,14361,-7885,-3745,3615,11408,-6803,28506,-5474,-3763,13762,-4421,31012,25628,27674,-5444,19967,-8425,-5563,-8464,-1899,861,29060,-9622,-8823,-1950,10337,-817,666,11139,26750,-1383,-567,1672,7006,16001,10573,-8674,30798,-5068,14181,30020,-6086,-9662,27957,-4097,-3461,-814,24321,29364,12049,-5469,22886,-9601,-4115,2375,-2748,-4805,24044,-1921,23578,-3806,10243,-1560,15664,-5839,25305,-2592,-4411,-1444,-58,-7129,31498,32402,27055,16930,-2155,-6299,-5886,-325,-1332,27403,32415,449,-1198,9375,-4161,22151,-4912,-4249,-8731,19301,1133,25023,15077,25882,17220,-8146,3587,16853,-8053,-9202,12549,388,31446,-2007,31637,17844,-8438,20102,-7439,27601,-3649,-282,-2091,18299,-5760,14041,7667,22084,27823,-7193,-6130,25828,-6239,32100,-940,-5967,-7060,6319,-3341,13886,28395,-1820,12498,-3595,-6111,15448,30986,-4132,-127,27832,-4354,-9084,-2383,-5106,12902,-9460,3051,16409,19721 diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu4_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu4_input0_int16.csv new file mode 100644 index 0000000..18a14f7 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu4_input0_int16.csv @@ -0,0 +1 @@ +4007,7785,-15954,25742,11871,13830,-23659,25533,9726,-17916,-11685,1075,-18450,-13408,1639,28608,32598,-9174,3789,-29783,20035,-7206,-32322,-123,-13933,17636,30580,8586,28276,-1842,-25124,8177,26956,-16286,18196,1224,-8524,-16111,21233,9462,421,6267,-18790,28757,-1206,-27093,-27196,-27740,-9822,16679,-14875,-21944,1459,30179,15465,-13669,-1127,7565,-31755,-15121,-9033,-11489,-31974,-19249,26944,-23111,-22663,14432,24201,12866,26101,-8192,-14341,-7310,21911,15202,-3995,-9352,-20652,32554,-29171,-21188,12183,28361,-23371,10602,31726,29585,-10816,12722,-6782,-31780,23110,-16165,-15131,24166,-3189,19396,8585,-13979,11000,-10494,3587,20399,-6501,2600,4794,-3216,17091,-29364,-18015,-12715,13767,-13058,10589,18062,-1002,1941,-8600,22671,31108,27874,-23750,-26891,-15025,-16780,9406,-19723,2954,-26447,31435,-4584,-3854,25124,-21337,-13788,1414,-32301,24583,7819,18945,6067,29501,24289,-850,14028,21100,-29370,-18170,-17439,12979,-4546,5071,-23270,-19540,-18379,-3709,-19888,-3267,-14433,-10239,-2921,15514,12036,-29250,26366,202,-27256,-18490,15519,-974,-18963,12701,30424,2582,-29513,-28540,4718,-19379,23202,29075,-31057,7765,9058,-2420,28948,-17184,-2904,19015,28934,-19693,836,-26338,-1741,11168,-9194,13036,6410,-31440,-7685,24062,-15372,-7481,-20255,21411,13740,237,20420,-8491,20232,-17762,27291,22122,19798,-5337,-22469,26254,28941,-20781,-20329,7642,29484,-10562,-14152,-32422,-13284,-21922,-32644,7069,12638,-20993,-16829,5362,5794,22602,10599,-9116,9302,-11517,-13932,30137,-31364,28875,-10120,31241,25023,-1702,649,-5954,32320,-1618,17273,18571,-26319,-18032,-17246,-19789,-30831,-15439,-24267,-2050,12240,25117,9947,7235,-3826,-3073,-9009,-11185,-7768,1530,30525,-30152,-21662,-6309,-18785,-31601,31929,-23699,31027,-19302,658,5927,18340,31977,-8701,11115,-28372,-22333,26867,-26294,-16921,-25214,-29351,-27875,-28581,-9586,22580,15046,-8019,-26613,29942,-15108,-30215,32471,27020,14812,19438,564,433,11702,275,-19962,-27142,-13496,32719,-25201,28745,30911,13692,27087,-20323,-8962,18151,28802,-19972,14459,-3764,-4203,1616,-10357,-4017,-1878,6484,32358,-3608,-15301,14038,25152,11858,-31210,-10034,1855,-4085,-10889,-25685,-21785,23808,25766,-8845,10929,-10912,-19672,21850,12096,3372,1446,185,11863,20638,-7634,-12722,23526,-28170,-23158,-14609,-28182,16811,-32739,-8850,-9565,21682,23441,-26644,-28190,13987,24440,11970,-30268,2018,-12911,16871,-8847,-30405,18367,-20226,18493,32134,-10220,30288,-32430,12408,-14232,-31021,25942,32282,19172,-10266,-15523,-16107,-30341,28369,29177,29101,-13570,25491,32515,595,20282,-27970,-12851,-760,-9901,-32619,14271,-1706,9939,-27434,-1959,8386,25499,-22637,10820,-21269,1096,15441,-29592,-20202,-12089,-31326,-9419,-27121,-14989,-23559,30597,23219,14628,-19201,18463,-20332,-32474,17805,28441,32031,-31022,-19909,-30080,-1214,27917,-31424,-9202,-27916,-10291,-20946,7865,13420,27557,14515,5852,6957,18732,-4183,8787,11842,-27489,16235,26121,-30621,7327,-18870,-30850,-186,10399,28729,5890,-7724,-22748,-13365,7222,-2785,17288,5403,-9152,-19638,14535,-18836,1447,32495,-25746,-3735,-4207,9403,7775,-9133,-20453,-1518,19443,2297,-30806,-10244,21627,14872,25706,-9260,-4699,11708,-24280,-23311,25140,13295,2985,-3838,22223,-7352,29672,11119,-24989,28711,-9023,-32555,6207,-11481,-13093,5651,21707,5339,18062,7545,32299,4239,-31718,3365,10079,19186,23497,-19950,22380,22562,-9639,-24786,-27872,-22189,4150,26634,-1720,-17355,-1771,-18014,18486,-4927,-15614,-25508,-21498,26100,-9551,21225,11424,26503,-28300,27049,-13322,24089,10557,25952,-2502,-8469,-10082,-386,24734,-28072,1945,10743,-610,-5071,-1263,29317,28115,-23952,17770,-27517,28678,9946,-16906,4116,-13441,22681,-29937,-22517,-5438,9992,11533,-4363,13205,-4128,31796,-16275,16148,-28330,-20093,26643,15353,-15667,-19467,-9880,-1278,-9672,11145,64,2622,1836,12339,-2859,9809,-7530,-27035,27700,-23734,18751,5108,26267,-14542,-2565,28140,-8249,23787,29597,5338,-5431,-31251,-19541,-30420,-17611,16349,-1946,7067,17117,-23664,-18853,27383,-7148,-23938,-13769,-4494,-10124,-20036,27428,13124,-27391,21240,5064,-27073,-16880,-32286,32720,14506,-12475,11358,-9221,28192,31249,-14491,-10475,26944,11345,19916,-15677,29448,22809,2175,-19598,-30481,19972,-21399,29092,32694,-2840,16596,6806,21957,3241,22629,-23883,15656,-5642,30719,27963,11540,-9812,28656,8771,8514,-11516,-29976,2826,11314,-19109,30831,5616,12048,-20450,-5612,19390,-23998,12490,26026,29147,-6551,-2083,-29496,-14794,22991,26104,28483,-503,32630,29554,-20628,-31681,5030,-1350,-2222,1762,6975,10237,6287,2955,-21012,10059,-18317,2835,-16545,-9445,13980,14853,29784,-8087,-14097,-14688,21353,25123,-7694,25395,667,25151,-7536,-20771,-7863,-11118,-28359,890,21388,-28779,-11589,14505,27400,-24700,30380,16155,-17887,-28040,-7101,-6061,-8318,23996,31632,7859,21443,-30163,31518,18369,-6648,8610,-21835,-1544,30385,-26281,2105,30971,14419,16548,-3998,6980,-30612,-11828,-27213,7209,27468,-3069,13276,5917,-31313,-29183,12331,-18841,-24751,-5692,17827,-21640,26643,-29494,23300,-13383,-30368,11352,31176,7440,2972,-2996,32036,-19225,4043,29888,-22286,-6419,19838,2253,-25531,727,29074,-675,-5628,19424,9592,-9959,27088,19591,3842,8539,-13053,26337,19605,8488,-15007,14755,8569,13058,1628,5774,-8682,23852,-31323,22562,11228,-9338,-14477,17562,16335,1396,2836,-31765,-493,-19808,-15739,-15198,-639,24622,-6387,-3069,5462,-29648,-24607,-18463,-6244,-24151,-21049,14595,27785,20145,-10712,-2575,-13500,15349,14236,29415,20858,-28987,20433,25826,-7902,1114,2250,18333,-28207,10265,10078,10225,28108,19609,-7826,6025,7253,-26276,-22828,20093,19636,-7988,-16538,-18593,6576,32241,-3139,31037,-18621,-17647,-7019,21115,-15144,-29024,-11388,-28706,-15521,31398,16227,-539,-31638,14089,-21242,-4042,-30867,-13631,14323,-16210,26952,8068,-29611,31972,-5477,-1609,-17064,-17947,13258,-5278,-13034,19519,-175,-5834,5245,11930,-6083,-1312,2538,-4914,-23087,21535,-13313,8913,3461,-8539,12642,-11371,8495,31894,-14650,19488,-3112,-10664,30956,22131,32509,-17434,20453,-1216,31474,25505,-967,-1764,-17964,19954,26140,22269,-5628,13657,26104,-12533,3160,-6443,-5475,-11359,12073,-30659,-1715,24485,-4030,25273,-18379,26251,-30927,-17539,-18066,-17737,-21690,27550,-31513,20227,-29031,-27429,-18666,-1561,23675,-13887,-23879,245,-2668,6728,12402,-29804,13845,-20570,-5785,-23215,-26695,-21369,9685,4471,26008,-1367,-10274,-7421,-6752,28847,12125,-28550,21283,15062,-2518,-32733,-13753,8085,17347,-19450,-26787,1818,-16003,29237,31053,-15694,-31954,-17989,4732,12904,29238,-14578,30230,-376,21093,-18745,16045,-28317,-12591,32524,7804,25119,-24880,12555,-16424,-30507,-27524,12188,-329,-26090,-5142,22299,51,-21393,28248,22449,-7760,26466,-18211,13124,-1337,-5452,12585,-23618,30215,-30235,5311,-32393,-4587,28537,20660,-15541,-8654,14666,-29240,-9143,13652,-13354,23112,-21482,27502,-31526,22849,17502,-7580,-24162,13548,-8831,-9238,-25394,3735,4306,21684,-32260,-11886,25150,-30031,-4655,23296,30816,-20327,-19632,9766,6615,-9600,23970,2528,5390,10260,-28719,8175,31044,14027,-6689,-3272,-15085,30328,-23854,7843,-11610,14361,-26281,-12481,3615,11408,-22677,28506,-18247,-12542,13762,-14737,31012,25628,27674,-18145,19967,-28081,-18541,-28213,-6329,861,29060,-32071,-29410,-6500,10337,-2724,666,11139,26750,-4609,-1890,1672,7006,16001,10573,-28912,30798,-16891,14181,30020,-20287,-32206,27957,-13657,-11535,-2711,24321,29364,12049,-18230,22886,-32004,-13716,2375,-9159,-16016,24044,-6403,23578,-12685,10243,-5200,15664,-19464,25305,-8639,-14701,-4812,-193,-23764,31498,32402,27055,16930,-7183,-20996,-19620,-1084,-4439,27403,32415,449,-3994,9375,-13869,22151,-16372,-14164,-29103,19301,1133,25023,15077,25882,17220,-27151,3587,16853,-26841,-30671,12549,388,31446,-6688,31637,17844,-28126,20102,-24796,27601,-12161,-940,-6968,18299,-19200,14041,7667,22084,27823,-23977,-20431,25828,-20795,32100,-3132,-19888,-23533,6319,-11135,13886,28395,-6066,12498,-11981,-20369,15448,30986,-13772,-424,27832,-14512,-30279,-7944,-17018,12902,-31532,3051,16409,19721 diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu5.tflite b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu5.tflite new file mode 100644 index 0000000..b56f0f3 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu5.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu5_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu5_golden_int16.csv new file mode 100644 index 0000000..195c782 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu5_golden_int16.csv @@ -0,0 +1 @@ +-18001,-7740,32767,-9675,-13977,-12803,-16081,-16752,-18422,-9862,-12749,-7304,32767,-9835,-14203,32767,-13620,-17729,32767,-12371,8810,-21228,-16156,32767,32767,-18329,-21193,32767,-12529,-20327,32767,32767,-18452,10723,-7610,-4513,32767,4975,13093,-19115,-16740,7010,18924,32767,24174,32767,32767,-9697,-18640,29112,32767,-12357,32767,5456,32767,32767,-1547,-20753,-5037,-9524,-8637,32767,32767,32767,-17292,-7166,32767,32767,-8939,32767,32767,32767,32767,-18516,-3927,8949,20543,-23466,-20047,32767,-18453,31961,-11707,32767,32767,-10803,32767,-18711,-9271,9499,32767,-9550,-13037,-13710,-19211,-60,-23084,-5648,-14738,-20439,28126,-3775,15407,-8481,-19891,-11621,-22931,-18384,-10210,-3529,32767,32767,-4981,-6072,-11714,32767,32767,30737,32767,32767,15887,-23502,-12328,-6354,32767,-16679,19051,-22412,32767,32767,32767,-13109,-20530,-243,10410,-22544,-12180,32767,32767,27306,32767,-3977,32767,32767,-8854,-11269,-22380,-20405,-12625,3945,25236,32767,-13421,32767,-15778,-1315,-14030,-21958,32767,32767,13731,-1577,13772,-5976,32767,-16962,32767,-13952,23959,-18944,-9086,20951,-12320,32767,-7481,-18691,32767,24791,17592,32767,32767,-13103,-11240,-14992,-8124,32767,-5902,32767,-17603,-17577,25922,-14403,29167,-19082,16619,-3669,-9931,-2538,-2817,-15678,20722,-10511,15493,-10712,-10185,-18270,-22044,32767,-1008,10,32767,-7972,-1130,-6052,-6367,-16182,-15238,32767,32767,32767,-5252,32767,-8685,-5499,-1115,-17695,-13729,22458,6814,-3046,-972,-19697,32767,32767,-586,32767,-3580,-6646,32767,32767,32767,-12577,-19282,-21910,16734,-1521,32767,-17402,-11032,-23331,-20908,-6458,21585,32767,29996,32767,32767,32069,-9152,32767,-11491,-13563,-11892,-13427,-21611,19904,-6361,32767,32767,32767,-20994,32767,25143,-14072,-9867,14188,-2644,-618,32767,-16193,-181,-13894,32767,32767,-18726,32767,32767,19008,-1673,-16798,32767,15261,-14543,-8492,-5179,-11395,28483,-7574,-7998,-20056,-10666,-4129,-18792,-9054,-1432,-21405,-2650,32767,-18475,-762,-12981,32767,32767,-20435,32767,-5078,32767,-5394,32767,1243,-10844,32767,-1617,-5402,-20439,29346,25819,-15405,-11230,13016,32767,-13134,4409,32767,32767,-20291,-12708,32767,32767,28024,-6567,32767,-10953,-1558,32767,29224,-12581,-10470,32767,23648,20791,32767,32767,-16636,-957,16158,-6090,-16834,8081,-5985,-22544,25341,13026,-3523,32767,32767,-22407,-2262,4033,-12072,32767,-3209,32767,-8859,32767,32767,32767,-19399,32767,-16260,32767,-7579,-7006,-10453,32767,-16258,-1110,31388,32767,13992,-704,32767,8306,32767,31660,32767,-5579,30022,-12592,-16799,-2669,-3961,-12077,-19891,-10717,-12260,32767,32767,198,17078,15957,-1257,-20007,-10910,-11304,28401,-908,32767,-20241,32767,6625,24858,-3913,13116,-13628,32767,32767,29243,32767,-9337,-20596,-13833,-6381,-18709,32662,-10926,-11331,-20822,32284,18666,-3137,-20743,23017,-499,9105,-4232,-22975,-19990,-14025,32767,32767,32767,-14482,-4073,-12647,20789,-4425,16485,-9941,-7710,32767,32767,11981,32767,-18591,-16008,-8119,32767,32767,-3859,32767,-12237,-22117,30522,-4353,-9134,-12494,32767,-7054,-19973,-3771,-22968,8433,32767,32767,-6705,32767,32767,25439,18769,32767,-21801,-20725,-3421,-22722,32767,25776,32767,-405,18467,-14246,11644,-16637,32767,14317,32767,-21580,32767,-3652,-13437,-22520,32767,32767,-6265,-16504,32767,32767,29080,-4866,10013,6147,-3601,-13173,13707,32767,32767,32767,32767,-14451,-16877,-1993,-20221,-1793,-15758,2400,-10149,-13087,32767,32767,-22815,32767,32767,-2399,-32,-14300,32767,32767,32767,20303,31440,32767,32767,-13140,18197,32767,6668,-5613,-3353,17303,19534,-15159,4992,20158,-15374,32767,-16058,32767,32767,32767,-15240,4605,-14501,32767,32767,-1695,32767,32767,-17739,27794,-22225,-20706,-15707,32767,-15309,32767,32767,30329,-21889,6243,-6265,32767,32767,-13290,32767,-5209,9987,-17955,24284,-1422,-18544,8323,-9430,32767,32767,-11203,-14076,32767,8227,32767,-15567,-6600,-7731,-17328,32112,-22377,8208,-22189,-9453,32767,3077,18998,-18311,29996,32767,-1014,-17320,-552,32767,-3029,-605,-7997,32767,-15536,32767,-12869,-7108,32767,32767,-10978,22312,-16203,-6204,21147,-11340,-9976,-13856,26307,-12887,8029,28947,-7830,32767,-2250,24021,32767,-22806,4210,-8186,27318,26532,-3272,-14075,29841,32767,-14128,19567,32767,32767,-7423,-5874,-6131,-21140,32767,-14038,-17830,32767,32767,29669,28786,32767,-14883,32767,-13886,-15121,20280,-13620,32767,-12440,29427,-17597,32767,15110,22955,-2514,10463,-8180,-13069,23911,-2005,-3763,-662,11295,-21935,-7183,-9205,29838,14934,32767,27562,4550,32767,-1860,5054,28198,32767,-2961,-14071,-19903,-3041,5796,32767,-20709,11950,-3510,-12049,-7800,32767,-13189,23500,32767,-171,19476,-2029,32767,-10565,32767,-19432,-4744,32767,13420,-12104,1310,31835,32767,-11618,-21429,32767,32767,32767,32767,-14993,-23147,12987,32767,-8317,32767,-4312,-2112,-21122,-10997,32767,5270,32767,-214,32767,32767,-6961,32767,-17077,-17875,17367,-15766,-8832,-10472,7369,19990,32767,32767,32767,-4301,32767,32767,-12719,24516,-23193,-20425,-2329,-20970,-9267,12600,-18516,32767,17336,31440,-18489,-9949,-16430,-5459,-13342,-21320,32767,-7819,4712,-6036,32767,-7906,32767,-2784,-10184,17181,4516,-15048,-15120,-16323,-1303,32767,13078,32767,32767,32767,32767,32767,-22962,-6097,12684,32767,-10842,32767,-19522,32767,-14148,-1060,-10791,32767,-21776,4488,-7923,32767,24968,-7688,-9375,32767,-9908,-9610,26285,-8415,-12765,32767,24877,32767,-2656,-18511,32767,32767,-12505,-19080,-2106,-20680,24552,-5448,-2412,-12447,32767,-1564,-4672,-22988,32767,32767,-707,-1823,32767,32767,-2815,32767,8344,32767,-15030,6642,-22852,-20150,-21077,-9467,-11588,-5610,-19002,-11304,12514,32767,-14033,32767,32767,-12769,32767,32767,3830,-3872,23887,-13382,-21524,-19575,-7359,15409,32767,20932,-17091,32767,25614,-15489,-2874,-14946,16851,-3162,-3355,1772,-2967,-4178,-15872,-171,26372,14202,30369,32767,-14012,24777,-6328,-19551,-19518,32767,32705,27479,-12305,-3752,24017,-21680,-19371,-16828,32767,24445,-18645,20210,31617,32767,-19764,-5792,2604,19366,32767,399,13714,32767,-9000,-5624,-1579,-19741,32767,-12173,-1941,32767,7381,-16886,32767,8134,-1230,2097,-20508,24127,-2520,32767,28571,-22255,6714,32767,-6165,32767,-20490,-1739,2415,32767,-11467,-19281,32767,32767,-16072,18283,-7777,-1685,3830,-15522,-1079,14384,7405,32767,-3424,-17191,16980,32767,-9659,32767,32767,2104,-18293,-20297,10231,-994,-7767,32767,-16745,-6306,32767,18759,-18992,32299,-9053,32767,-5636,-10103,-7596,32767,9652,-20489,-21552,32767,-9833,32767,-10095,17303,14188,-747,32767,-15320,-13093,-19881,4029,-1133,32767,-17765,26161,32767,32767,-7772,32767,32767,-809,-8715,32767,-21493,-20812,11055,9150,32767,-10811,-1216,26099,-3673,-5007,32767,32767,-10966,-15631,-19545,15108,-10383,10121,-10559,-5448,-10972,-9210,-2235,19742,26510,-5002,2498,32767,32767,-1715,-13614,-10393,32767,32767,32767,-14490,29912,32767,32767,-12610,32767,-17257,-22253,-9086,-12729,-9461,32767,32767,-18912,11947,32767,-6902,15431,32767,32767,32767,-4571,-21439,-23295,32760,17807,3634,32767,-20796,32767,-13716,32767,26670,32767,32767,-7491,-6557,-1378,26625,-6406,32767,-14288,32767,-1798,-1473,-17905,-3603,-19107,-6544,-962,-22511,-3762,32767,-16663,32767,15491,32767,14039,-2172,32767,17671,26874,-17045,32767,32767,6467,32767,-19084,29035,-3469,32767,-22546,-10561,32767,-12919,-7800,-425,32767,27318,-18011,-8900,32767,25956,-13770,-1158,-20798,32767,27782,-16932,-19531,-18509,2879,-11870,-16063,-1009,-15931,22223,32767,32767,32767,32767,32767,-22395,-18405,28313,-565,27615,-18414,32767,-21598,32767,2025,25614,26259,32767,32767,-21235,-18076,25389,-13578,-16849,32767,28583,-13138,-13495,-13404,32767,14740,32767,-14269,-13976,-12015,-716,32767,-6141,-20720,-11584,22386,20705,32767,12729,-5641,-18059,12064,32767,9007,-19107,32767,-6389,-9638,-11873,16225,21635,32767,9157,32767,-12319,-14761,4502,-1052,-16819,32767,-12671,-1798,-4073,-6916,32767,20423,15118,32767,32767,6063,32767,32767,-18242,1533,32767,10895,-13610,31902,22888,-9532,-10957,-9461,-2150,-4543,24987,-6600,-8870,32767,-18646,-19870,32767,32767,32767,32767,32767,32767,3859,32767 diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu5_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu5_input0_int16.csv new file mode 100644 index 0000000..7bfe264 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu5_input0_int16.csv @@ -0,0 +1 @@ +-25096,-10791,32016,-13489,-19487,-17849,-22420,-23355,-25684,-13749,-17775,-10183,20318,-13712,-19802,25052,-18989,-24718,13784,-17247,3685,-29595,-22525,17475,19799,-25554,-29547,18724,-17468,-28340,15731,31654,-25726,4485,-10610,-6292,15117,2081,5476,-26650,-23338,2932,7915,32174,10111,26882,19400,-13519,-25987,12176,26483,-17228,16651,2282,20480,29679,-2157,-28934,-7022,-13278,-12041,32680,21970,24744,-24108,-9990,28399,30149,-12463,29833,22404,22736,32437,-25814,-5475,3743,8592,-32716,-27949,13968,-25727,13368,-16322,23797,26713,-15061,27330,-26087,-12925,3973,26452,-13315,-18176,-19114,-26784,-83,-32183,-7874,-20547,-28496,11764,-5263,6444,-11824,-27731,-16202,-31970,-25630,-14235,-4920,18153,21489,-6944,-8466,-16332,21067,28815,12856,19862,23105,6645,-32766,-17188,-8859,18404,-23254,7968,-31246,14756,22329,30999,-18276,-28622,-339,4354,-31430,-16981,23895,29381,11421,14432,-5545,23595,16671,-12344,-15711,-31202,-28448,-17602,1650,10555,23694,-18711,15246,-21998,-1833,-19561,-30613,27529,29918,5743,-2198,5760,-8332,18030,-23648,26926,-19452,10021,-26412,-12667,8763,-17176,27793,-10430,-26058,30010,10369,7358,17882,15944,-18268,-15670,-20901,-11327,23470,-8229,32586,-24542,-24506,10842,-20081,12199,-26604,6951,-5115,-13846,-3538,-3928,-21858,8667,-14654,6480,-14935,-14200,-25471,-30733,26562,-1405,4,24790,-11115,-1576,-8438,-8877,-22560,-21244,28706,20058,18835,-7322,28185,-12109,-7666,-1555,-24670,-19141,9393,2850,-4246,-1355,-27461,17376,25296,-817,14020,-4991,-9266,20712,19632,24310,-17534,-26882,-30547,6999,-2121,13847,-24262,-15380,-32527,-29149,-9004,9028,29018,12546,16968,31392,13413,-12760,28485,-16020,-18909,-16579,-18719,-30129,8325,-8869,32127,17410,14082,-29270,20693,10516,-19619,-13756,5934,-3686,-861,29212,-22576,-252,-19371,20770,21494,-26108,21222,30765,7950,-2333,-23419,27046,6383,-20275,-11840,-7221,-15887,11913,-10559,-11150,-27962,-14870,-5756,-26200,-12623,-1996,-29842,-3694,29153,-25758,-1063,-18098,21201,29701,-28490,25677,-7079,16390,-7520,24852,520,-15119,30335,-2254,-7532,-28495,12274,10799,-21477,-15656,5444,28092,-18311,1844,19588,30957,-28289,-17717,26475,18453,11721,-9155,23370,-15271,-2172,19333,12223,-17540,-14597,13804,9891,8696,20323,32391,-23193,-1334,6758,-8491,-23469,3380,-8344,-31430,10599,5448,-4911,14042,31893,-31239,-3154,1687,-16831,27081,-4474,19200,-12351,21304,27733,27112,-27046,30576,-22669,19897,-10566,-9767,-14573,19717,-22666,-1547,13128,19242,5852,-982,26627,3474,27348,13242,25421,-7778,12557,-17555,-23421,-3721,-5523,-16838,-27732,-14941,-17093,31485,26526,83,7143,6674,-1753,-27893,-15210,-15760,11879,-1266,31684,-28219,17491,2771,10397,-5456,5486,-19000,19525,18702,12231,26365,-13017,-28715,-19286,-8896,-26084,13661,-15233,-15797,-29030,13503,7807,-4374,-28920,9627,-696,3808,-5900,-32031,-27869,-19553,14631,19154,22639,-20191,-5679,-17632,8695,-6169,6895,-13860,-10749,22772,31164,5011,31788,-25919,-22318,-11320,28148,14760,-5380,31733,-17061,-30835,12766,-6069,-12735,-17419,17738,-9835,-27846,-5258,-32022,3527,28567,21366,-9348,32040,25793,10640,7850,27555,-30394,-28894,-4770,-31679,26654,10781,26359,-564,7724,-19862,4870,-23195,30008,5988,21965,-30086,25540,-5092,-18734,-31397,14710,29525,-8735,-23010,20774,16722,12163,-6784,4188,2571,-5020,-18365,5733,23363,32468,14823,23463,-20147,-23530,-2779,-28191,-2500,-21969,1004,-14149,-18246,21514,15203,-31808,23021,15584,-3344,-44,-19937,23620,23182,27946,8492,13150,28215,31604,-18320,7611,29713,2789,-7825,-4674,7237,8170,-21134,2088,8431,-21434,27004,-22388,17476,18666,29112,-21247,1926,-20217,21905,15471,-2363,19212,15199,-24732,11625,-30986,-28868,-21899,15305,-21344,24568,28832,12685,-30517,2611,-8735,17746,31400,-18528,17883,-7262,4177,-25033,10157,-1982,-25853,3481,-13147,15748,18068,-15619,-19625,16367,3441,28247,-21703,-9202,-10778,-24159,13431,-31197,3433,-30936,-13179,30028,1287,7946,-25529,12546,16428,-1413,-24147,-770,15532,-4223,-844,-11149,25439,-21660,22063,-17941,-9910,28033,22157,-15305,9332,-22590,-8649,8845,-15810,-13908,-19318,11003,-17967,3358,12107,-10916,32178,-3137,10047,24161,-31796,1761,-11413,11426,11097,-4562,-19623,12481,21773,-19697,8184,14723,18868,-10349,-8189,-8548,-29473,32219,-19571,-24858,26513,19839,12409,12040,25782,-20750,28352,-19360,-21081,8482,-18989,26545,-17343,12308,-24534,14376,6320,9601,-3505,4376,-11405,-18220,10001,-2796,-5246,-923,4724,-30581,-10014,-12833,12480,6246,19595,11528,1903,17108,-2593,2114,11794,17254,-4128,-19617,-27748,-4239,2424,23223,-28872,4998,-4894,-16799,-10874,29802,-18388,9829,18004,-239,8146,-2829,24318,-14729,30561,-27092,-6614,21546,5613,-16875,548,13315,17417,-16198,-29876,27405,30081,15622,16607,-20903,-32271,5432,16246,-11595,24297,-6011,-2945,-29448,-15332,28692,2204,21910,-299,30083,17281,-9705,22824,-23808,-24921,7264,-21981,-12314,-14600,3082,8361,17962,25895,18828,-5996,20212,29129,-17733,10254,-32335,-28476,-3247,-29236,-12920,5270,-25814,28969,7251,13150,-25777,-13871,-22906,-7611,-18601,-29724,24653,-10901,1971,-8415,18162,-11022,15229,-3881,-14199,7186,1889,-20980,-21080,-22757,-1816,31446,5470,15101,18356,15221,14575,23494,-32013,-8501,5305,26098,-15115,17701,-27217,24550,-19725,-1478,-15044,29960,-30360,1877,-11046,23626,10443,-10719,-13070,21733,-13814,-13398,10994,-11732,-17797,25129,10405,31417,-3703,-25808,22425,18565,-17434,-26601,-2936,-28832,10269,-7595,-3363,-17354,31525,-2181,-6514,-32050,28108,22126,-985,-2541,31220,21305,-3924,16151,3490,26302,-20954,2778,-31860,-28092,-29385,-13199,-16156,-7821,-26492,-15760,5234,29760,-19565,13872,23183,-17802,30311,24534,1602,-5398,9991,-18657,-30008,-27291,-10260,6445,27454,8755,-23828,17849,10713,-21595,-4007,-20838,7048,-4409,-4678,741,-4137,-5825,-22128,-238,11030,5940,12702,15126,-19535,10363,-8823,-27258,-27212,28243,13679,11493,-17156,-5231,10045,-30226,-27006,-23461,15806,10224,-25995,8453,13224,17725,-27555,-8075,1089,8100,14862,167,5736,21857,-12548,-7841,-2202,-27522,21223,-16971,-2706,14167,3087,-23542,24014,3402,-1715,877,-28592,10091,-3513,22690,11950,-31028,2808,29444,-8595,28688,-28567,-2424,1010,20880,-15987,-26881,21016,25882,-22407,7647,-10843,-2349,1602,-21641,-1504,6016,3097,18627,-4774,-23968,7102,27177,-13467,26549,26410,880,-25504,-28298,4279,-1386,-10829,23436,-23346,-8791,26843,7846,-26478,13509,-12622,21724,-7857,-14086,-10590,19333,4037,-28566,-30048,28067,-13709,31550,-14074,7237,5934,-1042,17078,-21359,-18254,-27717,1685,-1579,23614,-24767,10942,19370,23512,-10836,15485,29907,-1128,-12150,22056,-29965,-29016,4624,3827,28915,-15072,-1696,10916,-5121,-6981,23184,31772,-15289,-21792,-27249,6319,-14476,4233,-14721,-7596,-15297,-12840,-3116,8257,11088,-6974,1045,24880,26457,-2391,-18980,-14490,28928,15189,30386,-20202,12511,20079,21614,-17581,15674,-24060,-31024,-12667,-17747,-13190,24985,28741,-26367,4997,19481,-9623,6454,29144,15403,27173,-6373,-29890,-32478,13702,7448,1520,32105,-28994,14752,-19122,26902,11155,32493,30648,-10444,-9142,-1921,11136,-8931,21865,-19920,20778,-2507,-2054,-24963,-5023,-26639,-9123,-1341,-31384,-5245,26420,-23231,20479,6479,15119,5872,-3028,25622,7391,11240,-23764,18586,22865,2705,15519,-26606,12144,-4836,28473,-31433,-14724,18740,-18012,-10875,-593,25264,11426,-25110,-12408,17324,10856,-19198,-1615,-28996,32744,11620,-23606,-27229,-25805,1204,-16549,-22394,-1407,-22211,9295,27579,27402,16060,21625,25490,-31222,-25660,11842,-788,11550,-25673,24372,-30112,16409,847,10713,10983,20306,23298,-29606,-25201,10619,-18930,-23491,31304,11955,-18317,-18814,-18688,18382,6165,22503,-19893,-19485,-16751,-998,21755,-8562,-28888,-16150,9363,8660,19539,5324,-7864,-25177,5046,26379,3767,-26639,27785,-8908,-13437,-16553,6786,9049,22005,3830,30853,-17175,-20579,1883,-1466,-23449,25642,-17666,-2507,-5679,-9642,26717,8542,6323,23204,24277,2536,27302,27547,-25432,641,29390,4557,-18975,13343,9573,-13289,-15276,-13190,-2998,-6334,10451,-9202,-12366,21438,-25996,-27702,29107,19152,27476,32007,24465,27475,1614,26573 diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu6.tflite b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu6.tflite new file mode 100644 index 0000000..eab31c7 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu6.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu6_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu6_golden_int16.csv new file mode 100644 index 0000000..bc89910 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu6_golden_int16.csv @@ -0,0 +1 @@ +23325,9227,4517,-556,10188,-3698,-959,8418,25111,15501,-4754,-9047,31501,31253,-6546,-5066,25346,-5218,3484,14581,31310,-4299,14798,29261,18391,27226,9377,-3735,-8175,2629,-9514,-4844,-7990,11514,-8119,27817,29668,-6826,-8511,23476,25371,-7959,-3136,3076,9354,-6207,-387,31842,-639,-2185,4145,-8323,-6697,-697,-2761,25470,-8682,17474,-8285,19790,20532,8811,23019,-539,-1521,29599,3387,-6619,8285,14372,-7886,-7648,-578,-5958,-5104,-4837,-6297,18414,-160,3082,-6761,-2734,-5140,-7168,-6826,-1492,6413,-4612,2803,12198,-4699,26948,22447,-8800,7482,-6877,-5188,-4222,-733,24937,-3735,24935,28329,-834,13966,-8324,15810,-9226,-6214,-6456,9968,-4825,24454,16803,-296,-5422,11973,29443,-833,-5130,-7243,-7290,11877,-9338,-5949,3069,-3000,2124,25194,-6741,-8099,-7081,-6438,7926,-4956,-9787,-5928,14127,-7717,-8101,-7173,-55,22336,-7957,-3155,-3849,17321,-3388,24363,3386,19876,-9783,-3936,-2241,-5908,22384,-5428,-1135,-2045,-4569,-958,23060,25243,27528,9215,-9502,-8599,-3305,12769,-5836,16041,-8614,-7985,21980,-8508,-8365,-9463,13192,12548,21056,32023,26046,14265,17718,-6189,28772,30155,-1768,11101,5620,-2692,7127,-6005,-4345,-2355,5002,-4707,-2380,-790,19474,-8241,29766,26691,-9706,25384,-6491,22611,13537,-4471,-9506,5196,23090,13630,-4645,-1997,19079,-8287,19119,20209,-4479,-8972,-8052,26762,8770,-4217,1641,-6069,-54,-8447,-5732,-3674,22084,-1438,7742,4958,-521,-1707,18298,-9436,1094,22570,10535,-5571,6383,13088,6504,-1790,13439,15779,20589,-3356,21980,9265,21246,21375,-9447,-3487,-6334,22224,-7188,-8961,-6854,-7305,-8495,7057,-9095,-4819,-5823,26111,-3620,-4047,-3729,30183,38,-6559,31426,-7687,32419,25000,-8759,16737,5052,-3212,19635,17339,-1015,-491,-7217,-4180,-5946,29210,-6796,22103,-7664,-2332,-445,-4809,-3511,27011,20373,-4893,14623,14685,-470,8632,22798,30164,23583,14362,-4480,-3499,4575,-7219,-150,11823,-4184,-8211,-4833,-29,-6687,-239,-6799,9496,32538,-1739,6727,-4034,-8157,26889,-16,-6241,7978,-7872,15460,-7703,-3425,17314,-8066,-222,-9155,30480,12026,-2918,-2729,-5739,-6060,-4168,21556,18316,22579,20588,-2265,1698,-9422,4843,-5999,9321,-5214,16151,28919,22561,-1403,23066,1350,5861,-6451,7878,-8572,-7725,31333,-3761,13630,7610,-1134,28192,-5114,16171,28803,-6819,-3415,-7116,14492,-1051,-8810,-843,-2419,-2815,10559,-5115,-277,-9281,12810,1610,13810,23485,-3168,-9718,4622,-1721,-8500,-7082,-2167,-8737,24959,-4144,-7639,-5471,-8221,23931,547,1846,27005,25697,-8691,26391,-9502,23473,-4025,-4673,-6817,-8705,18736,16009,16849,4848,-9763,8203,-7442,-3138,-2033,-7783,-6020,-6814,-7771,812,-9515,-3901,30352,18717,-2243,-910,2896,-3998,28912,-169,25753,-1053,913,-8118,-4367,-9100,-3962,10233,11270,-2995,22311,-3828,-1059,15226,24809,11263,11961,-3188,27238,-2905,-7879,20162,-7105,-9743,-9617,-6592,19331,-4909,15848,15274,-6391,29905,5693,-8221,19331,-1616,20623,19156,4170,13485,-4673,28873,-3285,-7500,-1192,5782,3384,-1323,-1761,-7320,-961,12477,-6914,31623,18513,-8728,6768,-7805,-58,8979,15119,11922,542,21976,19968,8327,-7011,31272,-6776,26257,-8599,12386,7433,-629,24850,-2827,-256,-3984,10486,-6028,32593,29359,-7152,29411,2151,30856,22874,23004,21269,29604,-9145,-4010,31867,-6083,-8451,5689,28971,-6172,4304,19245,-8125,27223,1519,19614,-2446,21990,11714,-8377,-742,-6224,19314,18691,27772,-5308,31109,-1495,-340,-8416,-6105,-7860,17230,-9769,-5998,-7822,-7980,-6808,-1752,1741,30912,20895,-5406,-7566,31425,-3238,28398,2916,-160,3030,-999,-8359,-3881,-9142,31676,29914,30762,3371,15589,-2883,25574,26044,-947,10687,19446,-6794,1927,2231,-8454,32537,19089,7659,14839,-7010,-6027,-1101,15247,30231,-8139,-4272,27581,5602,18976,-7854,13912,27371,12753,9414,32198,-4057,-4280,14751,-5011,14037,-2814,-10,-896,27113,-9001,11153,-4164,24845,-3930,-2705,28835,8846,-8170 diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu6_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu6_input0_int16.csv new file mode 100644 index 0000000..0988aa7 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu6_input0_int16.csv @@ -0,0 +1 @@ +23325,9227,4517,-1851,10188,-12325,-3197,8418,25111,15501,-15846,-30156,31501,31253,-21819,-16887,25346,-17392,3484,14581,31310,-14328,14798,29261,18391,27226,9377,-12450,-27250,2629,-31714,-16147,-26631,11514,-27062,27817,29668,-22753,-28369,23476,25371,-26528,-10452,3076,9354,-20690,-1290,31842,-2129,-7284,4145,-27743,-22323,-2324,-9204,25470,-28940,17474,-27615,19790,20532,8811,23019,-1797,-5068,29599,3387,-22063,8285,14372,-26285,-25493,-1927,-19859,-17011,-16124,-20989,18414,-531,3082,-22535,-9111,-17132,-23892,-22753,-4972,6413,-15372,2803,12198,-15662,26948,22447,-29334,7482,-22921,-17291,-14071,-2441,24937,-12449,24935,28329,-2778,13966,-27747,15810,-30753,-20713,-21520,9968,-16083,24454,16803,-987,-18071,11973,29443,-2775,-17098,-24143,-24299,11877,-31126,-19829,3069,-10000,2124,25194,-22470,-26997,-23604,-21458,7926,-16519,-32621,-19759,14127,-25724,-27003,-23910,-184,22336,-26523,-10515,-12828,17321,-11291,24363,3386,19876,-32609,-13118,-7470,-19694,22384,-18091,-3781,-6816,-15229,-3191,23060,25243,27528,9215,-31673,-28663,-11015,12769,-19452,16041,-28713,-26615,21980,-28359,-27882,-31543,13192,12548,21056,32023,26046,14265,17718,-20628,28772,30155,-5894,11101,5620,-8972,7127,-20017,-14483,-7850,5002,-15688,-7934,-2631,19474,-27470,29766,26691,-32352,25384,-21635,22611,13537,-14902,-31686,5196,23090,13630,-15481,-6656,19079,-27622,19119,20209,-14928,-29906,-26840,26762,8770,-14057,1641,-20230,-179,-28157,-19106,-12245,22084,-4793,7742,4958,-1737,-5689,18298,-31454,1094,22570,10535,-18569,6383,13088,6504,-5967,13439,15779,20589,-11185,21980,9265,21246,21375,-31489,-11621,-21114,22224,-23960,-29870,-22846,-24350,-28316,7057,-30315,-16062,-19410,26111,-12067,-13489,-12430,30183,38,-21863,31426,-25622,32419,25000,-29197,16737,5052,-10707,19635,17339,-3384,-1635,-24055,-13934,-19818,29210,-22654,22103,-25545,-7771,-1483,-16028,-11703,27011,20373,-16310,14623,14685,-1566,8632,22798,30164,23583,14362,-14933,-11663,4575,-24062,-498,11823,-13947,-27370,-16108,-95,-22289,-795,-22664,9496,32538,-5797,6727,-13445,-27190,26889,-54,-20801,7978,-26239,15460,-25677,-11416,17314,-26887,-740,-30515,30480,12026,-9725,-9095,-19129,-20199,-13892,21556,18316,22579,20588,-7548,1698,-31407,4843,-19997,9321,-17379,16151,28919,22561,-4676,23066,1350,5861,-21504,7878,-28571,-25750,31333,-12535,13630,7610,-3779,28192,-17045,16171,28803,-22729,-11382,-23720,14492,-3503,-29365,-2810,-8061,-9384,10559,-17048,-923,-30935,12810,1610,13810,23485,-10559,-32394,4622,-5735,-28331,-23607,-7223,-29123,24959,-13812,-25462,-18236,-27404,23931,547,1846,27005,25697,-28968,26391,-31674,23473,-13416,-15576,-22722,-29016,18736,16009,16849,4848,-32543,8203,-24807,-10458,-6775,-25943,-20066,-22712,-25902,812,-31716,-13002,30352,18717,-7476,-3031,2896,-13326,28912,-563,25753,-3508,913,-27059,-14556,-30334,-13206,10233,11270,-9982,22311,-12759,-3529,15226,24809,11263,11961,-10627,27238,-9682,-26263,20162,-23682,-32477,-32057,-21971,19331,-16361,15848,15274,-21303,29905,5693,-27401,19331,-5385,20623,19156,4170,13485,-15575,28873,-10949,-24998,-3974,5782,3384,-4408,-5869,-24399,-3202,12477,-23047,31623,18513,-29094,6768,-26017,-193,8979,15119,11922,542,21976,19968,8327,-23370,31272,-22587,26257,-28662,12386,7433,-2095,24850,-9423,-853,-13280,10486,-20094,32593,29359,-23840,29411,2151,30856,22874,23004,21269,29604,-30481,-13365,31867,-20277,-28170,5689,28971,-20572,4304,19245,-27081,27223,1519,19614,-8153,21990,11714,-27922,-2474,-20746,19314,18691,27772,-17693,31109,-4982,-1134,-28051,-20348,-26199,17230,-32561,-19993,-26071,-26599,-22691,-5838,1741,30912,20895,-18019,-25219,31425,-10792,28398,2916,-532,3030,-3329,-27863,-12935,-30472,31676,29914,30762,3371,15589,-9610,25574,26044,-3156,10687,19446,-22645,1927,2231,-28178,32537,19089,7659,14839,-23367,-20089,-3668,15247,30231,-27128,-14239,27581,5602,18976,-26178,13912,27371,12753,9414,32198,-13524,-14267,14751,-16703,14037,-9380,-31,-2985,27113,-30001,11153,-13879,24845,-13098,-9015,28835,8846,-27234 diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu7.tflite b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu7.tflite new file mode 100644 index 0000000..c1c29af Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu7.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu7_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu7_golden_int16.csv new file mode 100644 index 0000000..b6786bf --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu7_golden_int16.csv @@ -0,0 +1 @@ +-8420,-8071,-7103,32767,32767,29061,1154,17163,5891,15044,18042,14068,8752,3208,17024,-4626,-5227,32767,-547,-8782,-424,32767,18505,32767,26607,5037,-2197,-8541,-159,13041,21103,9537,17147,32767,32767,8549,-3736,32767,14914,32767,9212,13509,812,32767,32767,-12220,-6399,-2793,-12047,32767,32767,22384,-4761,27589,32767,32767,-9405,-12371,-12649,32767,4337,32767,23303,7925,29115,32767,20552,-13199,20517,-177,-2371,12700,32767,22178,-5188,27614,-1408,32767,32767,7972,17943,16901,-7329,-4638,32767,14947,-5105,-8904,-8026,32767,-252,-3838,32767,-1539,-207,-12835,-360,32767,32767,-10586,1956,20562,32767,-1593,25169,-860,-9414,21305,10628,-6740,-12452,32767,-8377,-7098,2389,-13240,-4558,32767,32767,-5778,32767,-1455,29311,-4400,-10729,-384,4701,21915,2822,-4223,-12644,-4573,-11301,32767,30982,-7242,31371,32767,32767,26722,-4965,-9899,-4704,250,-2741,32053,-6269,23335,24494,22704,32767,1949,-4746,20705,-7662,32767,-1833,-9709,29750,-2810,28829,32767,11262,-6570,-413,-2222,-1922,-10935,4227,-10591,27110,-11366,-12457,-3399,30238,16533,-6124,1494,23120,-3500,15517,7760,8356,-9770,2992,-5694,25662,-11862,22998,-2154,12498,32767,15562,-5400,32767,14553,-9667,14147,3669,23392,29546,-8827,24598,-6526,-5151,-12526,32767,20059,3351,-5669,-7097,-2825,-2535,-10959,-6768,19871,21592,23608,-4081,-6682,10607,-11443,-7240,5358,-43,-12880,-7115,14500,10488,5290,26970,32767,32767,32266,19895,23935,-3889,32767,13382,10994,930,20990,-5927,-10281,13663,32295,-10102,32767,-3950,-975,-781,-1483,-2094,3668,21455,24352,23308,3598,14684,32767,-3475,25515,12796,16934,15283,8411,-4985,-11407,32767,-93,11737,-273,-3533,32767,31000,-6463,-12807,-1405,-317,8385,-13,32767,17810,-5210,27919,1626,30493,32119,7727,32767,32767,-11402,25326,895,4659,28225,-3846,-3648,-5277,30484,-5454,14955,27718,10711,32767,22154,13212,6325,25662,26139,23953,12482,-8692,10608,-4166,22008,-12569,21614,32767,9260,-9622,32759,3183,4720,6156,8892,-7665,23365,-1570,-11262,32767,-12882,-5818,3999,13731,-7851,-4207,32767,-2469,-2421,2254,32767,2392,-12820,32767,20269,32767,15780,-13252,25062,-9812,-6064,-12947,195,-10631,23957,-5307,-3258,25860,8558,2884,-12592,-4560,-8358,-32,32767,7028,17412,-2847,3451,-11580,21632,32767,-6743,-6732,22045,-1005,-2725,32767,11345,32767,18101,-6130,-2002,-10998,12480,19689,-3051,9423,-4450,-10254,7214,-13240,19353,27692,-2187,-3042,-10384,-9486,9504,386,14285,-3975,25112,32767,-10316,-3591,-8064,30813,-7470,-5551,14206,-1633,-8769,-9873,-10454,-3914,11689,20015,-11624,32767,-5299,2925,29915,-7537,-6344,32767,22115,-8820,-10771,5129,-3971,32767,-462,-5758,3499,-7569,-10855,3936,14414,-10239,11058,32767,-6128,28856,5126,-2342,23671,26062,-12340,28023,19890,-5226,-3342,-5561,-2193,32767,-8588,-13195,2528,32767,-7238,22969,32767,32767,16014,25405,-13191,31304,28715,-10474,-2477,32767,-6020,32767,-8366,349,32767,-12552,5401,-10825,16680,24814,-10696,-5420,4171,32767,15948,32767,32767,8774,-5814,-12768,11292,32767,-12147,-4649,5516,13429,-141,-12932,-11824,-12585,16092,15370,-3173,24579,-12263,-8973,9151,-11628,-6227,-1107,-2787,-5471,21115,-12714,676,20820,-2504,12016,-13225,-756,29197,27929,29229,32767,24017,-703,-9712,32767,32767,-6396,32767,32767,20398,12089,2841,-12427,32767,32767,-2161,32767,5624,22438,19386,-739,22165,29275,30499,-3739,-13009,18020,-13276,-6130,32767,-4113,-334,32767,19187,22619,-11646,-2229,32767,32265,-11026,-1660,32767,2035,32767,-7539,32767,-6535,-2749,-6471,32767,12434,-5808,32767,-4518,32767,-12283,6607,32767,21735,-4608,27426,20061,-3896,4975,-6349,1261,32237,28336,-6794,21264,1513,-8201,-2742,31700,-5603,6967,-6428,30087,-4433,-10612,-2601,-5316,-751,-12580,-377,-2595,4257,1354,8936,-12617,7065,-12672,-11549,4739,-3890,-10589,-11632,32767,17837,31585,20970,-4279,-6575,-8640,7689,14886,-9122,17583,-1983,-2171,-9144,-4617,12578,31500 diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu7_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu7_input0_int16.csv new file mode 100644 index 0000000..c53a794 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu7_input0_int16.csv @@ -0,0 +1 @@ +-20438,-19590,-17240,27104,31152,21162,840,12498,4290,10955,13138,10244,6373,2336,12397,-11229,-12687,29764,-1327,-21316,-1029,30153,13475,29630,19375,3668,-5333,-20732,-386,9496,15367,6945,12486,31425,31321,6225,-9068,28287,10860,24366,6708,9837,591,32315,31917,-29660,-15531,-6780,-29242,28942,30523,16300,-11555,20090,32223,24849,-22829,-30027,-30703,25105,3158,30652,16969,5771,21201,31750,14966,-32037,14940,-428,-5754,9248,30384,16150,-12592,20108,-3418,29720,30829,5805,13066,12307,-17788,-11258,29764,10884,-12390,-21613,-19482,32093,-611,-9316,32224,-3736,-503,-31153,-873,32429,25293,-25694,1424,14973,24191,-3865,18328,-2088,-22849,15514,7739,-16359,-30224,31515,-20333,-17228,1740,-32136,-11064,24950,31991,-14024,25418,-3530,21344,-10679,-26042,-932,3423,15958,2055,-10250,-30691,-11100,-27431,24251,22561,-17579,22844,29774,30589,19459,-12050,-24028,-11418,182,-6653,23341,-15217,16992,17836,16533,24796,1419,-11519,15077,-18598,26715,-4449,-23565,21664,-6819,20993,24249,8201,-15946,-1001,-5394,-4665,-26543,3078,-25708,19741,-27587,-30237,-8249,22019,12039,-14864,1088,16836,-8495,11299,5651,6085,-23714,2179,-13820,18687,-28792,16747,-5227,9101,31585,11332,-13107,24375,10597,-23464,10302,2672,17034,21515,-21426,17912,-15839,-12502,-30403,30697,14607,2440,-13759,-17225,-6856,-6152,-26601,-16427,14470,15723,17191,-9906,-16219,7724,-27776,-17572,3902,-103,-31263,-17269,10559,7637,3852,19639,26531,24973,23496,14487,17429,-9438,26126,9745,8006,677,15285,-14385,-24955,9949,23517,-24520,27303,-9588,-2366,-1894,-3599,-5083,2671,15623,17733,16973,2620,10693,27920,-8434,18580,9318,12331,11129,6125,-12099,-27687,25900,-225,8547,-662,-8575,25673,22574,-15686,-31085,-3410,-770,6106,-31,24294,12969,-12646,20330,1184,22205,23389,5627,25991,31915,-27675,18442,652,3393,20553,-9335,-8853,-12809,22198,-13239,10890,20184,7800,25033,16132,9621,4606,18687,19034,17442,9089,-21097,7725,-10112,16026,-30509,15739,26916,6743,-23356,23855,2318,3437,4483,6475,-18605,17014,-3811,-27336,29694,-31269,-14122,2912,9999,-19055,-10211,25955,-5993,-5876,1641,26137,1742,-31118,28827,14760,31628,11491,-32166,18250,-23817,-14719,-31425,142,-25805,17445,-12880,-7907,18831,6232,2100,-30565,-11068,-20286,-77,32725,5118,12679,-6909,2513,-28108,15752,24025,-16367,-16339,16053,-2438,-6614,28038,8261,31215,13181,-14879,-4858,-26695,9088,14337,-7404,6862,-10800,-24888,5253,-32138,14093,20165,-5309,-7383,-25204,-23025,6921,281,10402,-9647,18286,31394,-25040,-8715,-19572,22438,-18132,-13473,10345,-3964,-21285,-23965,-25375,-9501,8512,14575,-28215,27433,-12861,2130,21784,-18294,-15397,31749,16104,-21408,-26143,3735,-9637,30955,-1120,-13976,2548,-18371,-26348,2866,10496,-24852,8052,26599,-14875,21013,3733,-5683,17237,18978,-29952,20406,14484,-12685,-8111,-13498,-5323,25264,-20845,-32028,1841,26939,-17567,16726,25341,31972,11661,18500,-32019,22795,20910,-25422,-6012,27777,-14612,26672,-20307,254,24325,-30468,3933,-26275,12146,18069,-25961,-13155,3037,25168,11613,31879,28024,6389,-14111,-30991,8223,24067,-29485,-11285,4017,9779,-341,-31389,-28700,-30547,11718,11192,-7701,17898,-29765,-21779,6664,-28225,-15113,-2687,-6765,-13278,15376,-30859,492,15161,-6078,8750,-32100,-1834,21261,20338,21284,27894,17489,-1705,-23573,27227,30285,-15524,31487,29756,14854,8803,2069,-30163,25962,30069,-5245,27177,4095,16339,14117,-1793,16140,21318,22209,-9076,-31576,13122,-32225,-14879,28691,-9983,-810,24720,13972,16471,-28267,-5409,31638,23495,-26763,-4028,24626,1482,26273,-18300,27342,-15863,-6672,-15706,28309,9054,-14098,26683,-10967,27493,-29813,4811,29082,15827,-11184,19971,14608,-9455,3623,-15410,918,23475,20634,-16490,15484,1102,-19905,-6656,23084,-13600,5073,-15602,21909,-10760,-25758,-6312,-12902,-1823,-30535,-914,-6299,3100,986,6507,-30625,5145,-30759,-28033,3451,-9441,-25701,-28233,26232,12989,23000,15270,-10385,-15959,-20972,5599,10840,-22141,12804,-4813,-5268,-22194,-11207,9159,22938 diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu8.tflite b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu8.tflite new file mode 100644 index 0000000..b5ceb60 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu8.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu8_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu8_golden_int16.csv new file mode 100644 index 0000000..fd2ce4a --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu8_golden_int16.csv @@ -0,0 +1 @@ +29374,-3085,-14409,-10684,-10057,-11833,2426,17839,-11509,-6001,-9604,-12343,32767,-14953,-2764,-13434,-11650,-4920,25767,-288,-14907,15936,-8944,-1048,11683,4305,-2491,-10377,-5439,1189,-11840,19637,32767,-9929,-11229,32767,-2461,-2993,32767,-13946,-5998,32767,-10637,32767,-9259,7101,6705,-6593,32767,-10415,11221,32767,-4980,32767,3960,28206,-3602,-1400,-11936,23745,-13799,-15020,32767,-4671,-15348,32767,-4430,32767,-1815,13097,32767,32767,21850,-14291,32767,-15282,-4223,-13802,-14690,-12716,32767,29639,32767,-634,-2251,3994,6129,-14761,30163,-10646,-183,6713,-4098,2209,32767,-11295,13865,-7380,1609,18076,-12789,-59,4417,32767,-819,11477,8505,30842,-12397,32767,-14371,-13831,-14215,-2287,-7863,-36,10624,-15061,-9184,-218,25491,-9211,26009,-3657,-3454,9322,-2718,-14970,-11214,-12540,32767,32767,-13261,28187,-3695,-5373,-11613,32767,32767,353,22214,-6253,-759,27462,32767,5970,-8044,14000,-13873,-9113,-11854,-11244,-1580,17166,32767,-8307,32767,32767,729,-298,-7153,-8504,-7956,-313,-8995,13278,-467,-12922,-7523,32767,32767,-7320,-3210,-11123,-5328,-158,32454,32767,32767,32767,-14298,-5100,32767,-8741,32767,-6977,32767,9081,32767,-12596,32767,-8256,32767,-4267,10517,-4875,-11585,-210,-12962,32767,-11777,-303,-3824,29783,-8766,31767,-12450,5162,32767,14860,20611,-5423,32767,-1219,32340,-12346,30834,-8032,22411,20627,-2305,-6858,22617,27192,-9158,21253,-3950,-1289,32767,-3524,27862,-12042,25163,-5715,-3140,-5081,24107,15743,32767,-12765,32767,-5510,6456,32767,-5773,-5976,-8440,-3044,-1087,-2676,30095,25099,21994,-740,19381,-6067,32767,-1334,-7377,-8104,-3274,-269,-10552,32767,31197,-8434,32767,32767,-5635,32767,32767,-7820,31481,-3729,23671,-14813,19377,10128,-2834,32287,32767,32767,-1023,32767,28932,-7427,20946,-471,16608,32767,6905,6940,32767,-283,18596,23935,2569,32767,-5000,-11777,-1664,-6907,27173,32767,18601,11738,23849,-11611,32767,32767,20515,32767,-4943,-11509,-4928,-974,3778,-10197,-11577,-6344,-12454,-5453,-1449,11174,-5652,-9483,-2932,32767,7589,15904,32767,-5706,-7223,32767,772,32767,-4421,32767,-1538,-5189,1782,1423,-12768,-7585,67,-2262,-9010,29549,-12609,-5712,8188,32767,-8833,-9035,-5306,-7120,-5079,-5691,24811,32068,-178,32767,1334,16161,-12,27087,4644,-1776,-10871,-10323,32767,-376,3788,-12026,19273,-13703,-14617,2607,-10100,-12133,30338,6983,1014,24794,29190,4481,-2871,-5716,7852,26581,-4574,-6775,-5070,16663,18340,11513,-3502,6078,32767,32767,-6040,-6965,7193,1431,-5361,27467,-9505,32767,-1174,20550,-13410,3616,-8096,32767,32767,32767,22611,30203,-9911,-13320,-13250,-4632,-9904,-14086,-12871,32767,20893,32767,-1312,32526,-10641,-14578,-7503,32767,15974,-834,-4720,32767,-14263,-4592,-5553,-13661,32767,-11577,26886,-18,4128,-5168,32767,-6464,-13417,-6323,-9376,-4098,-8387,24957,32767,-10325,-3565,-11886,4487,2751,13497,-687,10457,-5951,-9906,32767,-4996,-10954,1720,32767,13728,-465,-2685,7958,-11124,32767,32767,32767,-9656,32767,32767,-2596,19554,32767,9009,32767,-13005,-3616,26364,3163,-4335,16493,18098,-11223,13714,32767,14676,1359,29008,3683,23645,-97,4411,-3047,6546,-4874,-12346,-4883,11518,24697,22698,9668,32767,32767,-2842,-2800,13250,27082,-1817,-3970,32767,13728,3763,16182,18025,32767,-15065,32767,-13387,-11689,-3510,-1513,-4860,198,14706,-2910,-15044,-9169,-11295,32767,32767,26225,21806,32767,-10532,32767,-561,32767,-9426,32767,-2338,13259,-2830,31226,21264,-11906,10003,4462,6301,1029,-15078,32767,13500,27888,14724,-5625,-3921,-15297,-10248,-9194,13095,19734,-2682,-13048,32767,-7604,-13502,-13379,-1441,-7045,32767,32767,32767,-13835,-14467,12136,-2424,-10250,32767,-8011,-4434,3478,-9466,-3801,25847,14493,-1544,3711,3685,32767,10724,31553,-14270,-4317,17698,-14337,32767,27198,-13532,32767,12939,32491,8563,-739,-13933,-7066,24332,1276,-13536,-11224,32767,-577,-2108,-14118,-14440,-1918,25891,-9314,4049,-780,-10168,28662,-4959,20349,32767,32767,-11472,32767 diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu8_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu8_input0_int16.csv new file mode 100644 index 0000000..356df64 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu8_input0_int16.csv @@ -0,0 +1 @@ +18806,-6583,-30749,-22800,-21462,-25252,1553,11421,-24561,-12807,-20495,-26341,24947,-31910,-5898,-28668,-24861,-10499,16497,-615,-31812,10203,-19086,-2236,7480,2756,-5316,-22146,-11607,761,-25268,12572,28751,-21188,-23963,32554,-5251,-6386,32511,-29761,-12799,31886,-22699,32730,-19759,4546,4293,-14069,21702,-22226,7184,26005,-10628,26677,2535,18058,-7686,-2987,-25471,15202,-29447,-32054,27555,-9967,-32753,32729,-9454,27546,-3873,8385,27176,31581,13989,-30497,29297,-32613,-9011,-29455,-31350,-27137,25148,18976,22862,-1352,-4803,2557,3924,-31500,19311,-22720,-391,4298,-8744,1414,30806,-24105,8877,-15750,1030,11573,-27292,-125,2828,26441,-1747,7348,5445,19746,-26456,29791,-30668,-29516,-30336,-4881,-16780,-76,6802,-32142,-19599,-465,16320,-19657,16652,-7804,-7370,5968,-5801,-31946,-23932,-26760,28954,31105,-28299,18046,-7886,-11465,-24783,31529,23766,226,14222,-13345,-1619,17582,30457,3822,-17166,8963,-29606,-19448,-25298,-23996,-3371,10990,32335,-17727,28858,32644,467,-636,-15264,-18148,-16979,-667,-19196,8501,-996,-27577,-16054,24215,27763,-15620,-6850,-23736,-11371,-336,20778,30684,32093,26835,-30512,-10884,21927,-18654,21479,-14888,22018,5814,23547,-26881,21565,-17619,30737,-9106,6733,-10403,-24723,-448,-27662,26429,-25133,-647,-8160,19068,-18708,20338,-26569,3305,29104,9514,13196,-11573,22255,-2601,20705,-26347,19741,-17141,14348,13206,-4918,-14636,14480,17409,-19543,13607,-8429,-2750,30247,-7520,17838,-25699,16110,-12196,-6700,-10843,15434,10079,27431,-27242,25967,-11759,4133,25200,-12320,-12753,-18011,-6496,-2320,-5710,19268,16069,14081,-1578,12408,-12947,27355,-2846,-15743,-17294,-6987,-574,-22519,24129,19973,-17998,30966,25437,-12026,27005,29085,-16688,20155,-7958,15155,-31611,12406,6484,-6047,20671,25829,23507,-2183,25831,18523,-15849,13410,-1004,10633,22007,4421,4443,26191,-603,11906,15324,1645,32413,-10670,-25133,-3551,-14740,17397,23982,11909,7515,15269,-24778,30125,30839,13134,32295,-10548,-24560,-10517,-2078,2419,-21760,-24706,-13538,-26578,-11636,-3091,7154,-12062,-20237,-6257,27956,4859,10182,29161,-12176,-15413,26935,494,24669,-9434,24131,-3282,-11073,1141,911,-27248,-16187,43,-4827,-19227,18918,-26908,-12189,5242,31000,-18850,-19281,-11324,-15195,-10839,-12145,15885,20531,-379,27352,854,10347,-26,17342,2973,-3790,-23199,-22029,25437,-801,2425,-25664,12339,-29243,-31194,1669,-21554,-25893,19423,4471,649,15874,18688,2869,-6127,-12197,5027,17018,-9760,-14458,-10820,10668,11742,7371,-7473,3891,31256,31876,-12889,-14863,4605,916,-11440,17585,-20284,28824,-2505,13157,-28618,2315,-17278,24659,26468,31725,14476,19337,-21151,-28425,-28277,-9884,-21135,-30060,-27467,22446,13376,26157,-2799,20824,-22709,-31110,-16011,22633,10227,-1780,-10073,25477,-30438,-9800,-11850,-29153,28892,-24705,17213,-37,2643,-11028,30405,-13795,-28632,-13493,-20009,-8746,-17899,15978,32641,-22033,-7608,-25365,2873,1761,8641,-1465,6695,-12699,-21140,29070,-10662,-23376,1101,30692,8789,-991,-5729,5095,-23739,24269,29319,22445,-20607,28079,27408,-5539,12519,22717,5768,30770,-27754,-7716,16879,2025,-9251,10559,11587,-23951,8780,29991,9396,870,18572,2358,15138,-207,2824,-6501,4191,-10402,-26346,-10421,7374,15812,14532,6190,24065,30468,-6065,-5976,8483,17339,-3877,-8471,22845,8789,2409,10360,11540,25799,-32149,27981,-28568,-24944,-7490,-3228,-10372,127,9415,-6209,-32104,-19566,-24104,28980,26421,16790,13961,23016,-22475,27917,-1197,25104,-20116,31379,-4989,8489,-6038,19992,13614,-25408,6404,2857,4034,659,-32177,27462,8643,17855,9427,-12004,-8367,-32644,-21870,-19620,8384,12634,-5724,-27845,30144,-16227,-28814,-28552,-3074,-15034,23159,27075,32662,-29525,-30873,7770,-5173,-21875,23650,-17096,-9462,2227,-20201,-8111,16548,9279,-3295,2376,2359,30019,6866,20201,-30453,-9213,11331,-30597,30358,17413,-28879,23736,8284,20802,5482,-1577,-29733,-15079,15578,817,-28886,-23952,23770,-1230,-4498,-30128,-30815,-4093,16576,-19877,2592,-1664,-21698,18350,-10582,13028,26136,24166,-24481,25047 diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu9.tflite b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu9.tflite new file mode 100644 index 0000000..383e10d Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu9.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu9_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu9_golden_int16.csv new file mode 100644 index 0000000..429e5e2 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu9_golden_int16.csv @@ -0,0 +1 @@ +29448,-6595,-6240,3840,-4947,23790,7605,24454,19622,-2202,-8431,-8578,28979,14711,-3295,-5501,-7585,-2483,-619,19079,3023,-2824,-2513,-7357,-5699,-4923,29317,13837,-4881,30600,18646,-7256,11967,-9063,4680,25449,-661,10727,-9055,-8291,-7936,-2284,3902,14726,-2224,31872,29887,-8285,27920,-492,-8083,153,-7441,-5414,-3661,11921,-7041,17023,-3437,-3850,-6896,-7297,6460,-2076,-66,-3139,24889,9484,13911,17016,4617,-4496,21081,-6757,5735,-7378,31552,19209,13331,-7942,-4693,-9784,-2953,-9259,-4086,-1631,-8792,437,-1800,-5689,26448,-7494,-8642,19268,-8872,25953,23576,-8435,27820,-5760,-4632,31411,24303,-1906,2876,20576,29162,-846,-6015,14296,23944,9660,-1017,28838,31281,3134,15364,-4747,7968,-4486,-2484,20888,-4027,-6200,-7812,14038,19802,8725,-8716,-8405,2386,-4695,28800,-1210,2961,-4948,-1378,-5298,30112,3038,21692,-7950,15499,-5547,25960,2995,-7755,27498,9015,-4369,17811,18356,-1375,-7115,-8865,4366,-790,-397,23864,17827,-1225,-9547,-9582,19627,28727,20363,-9700,-4515,29623,-7409,1148,-1838,-8176,-1268,16968,-4926,28036,-6619,6261,27400,-8874,27731,12258,15034,-9088,8910,28905,-8901,-4375,9771,1252,-8501,-2993,-1428,-3014,25097,30343,-9322,28253,-3707,23631,-9829,9246,30925,-1187,-7242,-132,-4010,-8511,2512,-665,3618,4483,-7071,20330,8602,-8474,11525,-5542,-6685,27740,19541,31917,20396,-4909,28035,-3373,-5275,21649,-8159,10184,-252,10159,-3659,-9771,24486,15681,20992,-6469,9057,26807,1266,215,-3142,-4910,-6678,-1737,14468,-2500,-8972,28960,3640,-6129,-4075,10302,-7140,-6598,22259,-629,-9132,20226,13609,-3015,-5427,19914,19281,-4969,18787,-9436,22053,-1169,24000,-2211,30916,-5203,-7685,-2227,-1628,29251,15083,-3141,20506,-3421,-6206,-8469,-8729,-169,-7193,-3094,16369,-3875,-8721,5008,6767,27826,4064,-8175,30000,1818,21846,-1435,8328,22266,-930,-7795,3797,-8039,22247,-82,-2056,-7456,7802,-2395,1946,13263,811,11360,18099,26292,3665,26873,1685,26963,-4095,4940,-3841,9161,10010,-5730,-4120,-6246,24708,27011,23584,13705,-5968,-1875,-7274,21422,-2998,-1994,6079,-173,-5070,-4584,1912,-7227,12665,-3945,29278,21340,-8112,4144,10654,17350,285,22197,-4579,12181,726,27452,23553,-14,-421,9441,26686,-451,-7618,16598,-1471,-4527,-3285,28585,26034,-271,20353,32544,-7876,12438,4267,-1386,27696,5011,12343,28966,-6013,11219,27010,2218,11279,-3676,29687,18955,16582,26445,-3401,21246,-6163,-3700,22404,5884,20024,8442,-2487,23648,-8769,27991,18304,29047,-7762,1153,13038,10880,-3841,26486,-4954,13086,14847,5346,-2426,-2401,7065,6095,21100,31875,-912,31353,-4900,-8504,31675,15679,-5643,-4613,6424,19823,17538,25242,7188,-3767,-4891,-9021,-4098,15372,-3902,12838,-5383,24470,-7054,11405,27163,-2957,7890,-1017,27533,-8861,-2528,20987,-7108,13520,-2910,23649,159,14831,1115,-1139,11037,22431,15601,-5078,-9214,4436,19337,18859,14819,1391,7778,18283,17342,-3893,-5836,9088,-8423,26421,-641,21478,11799,18794,21176,-5040,-2544,3111,21940,-3659,-1076,-5510,-1030,-9442,32424,10202,-690,-4154,-5824,-9256,14073,-2218,18161,-4845,22859,-9514,28478,20000,-2650,-7432,26188,22278,24531,22048,-4472,23207,25516,30987,-9358,-2661,793,8970,11792,-8933,30136,6975,12518,-8410,16837,-3535,-9023,-983,2103,-2614,9667,22182,-8783,29573,-9160,-3874,-6902,-3512,16852,-1608,23501,4244,15288,9059,31054,-875,-9258,22610,29710,-954,25261,-985,12069,-8177,4221,-8715,-3775,24406,-8846,24691,-1051,15754,18467,-5208,4495,5755,16008,-5460,11048,-167,-2526,12061,3446,-3493,8085,-3881,-9475,-3185,32499,20876,-1564,-1930,-6288,28336,-6896,27442,-8810,-9580,8955,-1170,27719,10363,14329,-4217,11516,31804,-3181,20880,7673,13601,-9029,30426,26294,22731,-5378,8903,-9594,4430,-8500,-5141,-9421,-8830,2080,21500,30684,-6795,-8760,-8881,-2328,357,-6581,25515,18966,26776,-4631,31833,-2881,625,10082,-6030,-847,21871,-1000 diff --git a/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu9_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu9_input0_int16.csv new file mode 100644 index 0000000..0fc26b6 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/leaky_relu/leaky_relu9_input0_int16.csv @@ -0,0 +1 @@ +29448,-21982,-20799,3840,-16489,23790,7605,24454,19622,-7339,-28101,-28591,28979,14711,-10984,-18337,-25284,-8276,-2064,19079,3023,-9411,-8376,-24523,-18995,-16410,29317,13837,-16269,30600,18646,-24186,11967,-30210,4680,25449,-2201,10727,-30184,-27637,-26454,-7614,3902,14726,-7414,31872,29887,-27615,27920,-1638,-26944,153,-24803,-18046,-12201,11921,-23470,17023,-11455,-12833,-22986,-24322,6460,-6919,-218,-10462,24889,9484,13911,17016,4617,-14987,21081,-22522,5735,-24591,31552,19209,13331,-26473,-15641,-32614,-9841,-30863,-13619,-5435,-29306,437,-6000,-18963,26448,-24978,-28806,19268,-29571,25953,23576,-28115,27820,-19200,-15439,31411,24303,-6351,2876,20576,29162,-2820,-20049,14296,23944,9660,-3388,28838,31281,3134,15364,-15823,7968,-14952,-8280,20888,-13424,-20666,-26039,14038,19802,8725,-29051,-28016,2386,-15650,28800,-4032,2961,-16493,-4592,-17659,30112,3038,21692,-26499,15499,-18490,25960,2995,-25850,27498,9015,-14564,17811,18356,-4584,-23716,-29550,4366,-2633,-1323,23864,17827,-4082,-31822,-31939,19627,28727,20363,-32332,-15050,29623,-24695,1148,-6126,-27253,-4226,16968,-16418,28036,-22061,6261,27400,-29578,27731,12258,15034,-30294,8910,28905,-29669,-14584,9771,1252,-28337,-9975,-4758,-10046,25097,30343,-31071,28253,-12356,23631,-32761,9246,30925,-3956,-24139,-440,-13365,-28369,2512,-2217,3618,4483,-23570,20330,8602,-28246,11525,-18473,-22282,27740,19541,31917,20396,-16364,28035,-11241,-17581,21649,-27196,10184,-838,10159,-12197,-32569,24486,15681,20992,-21562,9057,26807,1266,215,-10474,-16366,-22259,-5790,14468,-8331,-29905,28960,3640,-20428,-13583,10302,-23798,-21994,22259,-2096,-30440,20226,13609,-10050,-18090,19914,19281,-16561,18787,-31451,22053,-3897,24000,-7370,30916,-17344,-25615,-7422,-5427,29251,15083,-10470,20506,-11403,-20685,-28230,-29095,-561,-23977,-10311,16369,-12917,-29069,5008,6767,27826,4064,-27249,30000,1818,21846,-4781,8328,22266,-3100,-25981,3797,-26795,22247,-271,-6853,-24854,7802,-7983,1946,13263,811,11360,18099,26292,3665,26873,1685,26963,-13648,4940,-12802,9161,10010,-19100,-13731,-20818,24708,27011,23584,13705,-19892,-6248,-24247,21422,-9992,-6646,6079,-577,-16898,-15280,1912,-24088,12665,-13148,29278,21340,-27039,4144,10654,17350,285,22197,-15263,12181,726,27452,23553,-45,-1402,9441,26686,-1502,-25394,16598,-4902,-15089,-10948,28585,26034,-903,20353,32544,-26253,12438,4267,-4619,27696,5011,12343,28966,-20041,11219,27010,2218,11279,-12252,29687,18955,16582,26445,-11336,21246,-20541,-12331,22404,5884,20024,8442,-8290,23648,-29228,27991,18304,29047,-25874,1153,13038,10880,-12804,26486,-16511,13086,14847,5346,-8086,-8002,7065,6095,21100,31875,-3040,31353,-16331,-28347,31675,15679,-18810,-15377,6424,19823,17538,25242,7188,-12557,-16304,-30068,-13658,15372,-13005,12838,-17941,24470,-23512,11405,27163,-9856,7890,-3389,27533,-29536,-8427,20987,-23693,13520,-9698,23649,159,14831,1115,-3795,11037,22431,15601,-16925,-30711,4436,19337,18859,14819,1391,7778,18283,17342,-12977,-19453,9088,-28076,26421,-2136,21478,11799,18794,21176,-16800,-8480,3111,21940,-12195,-3586,-18365,-3431,-31471,32424,10202,-2299,-13846,-19414,-30853,14073,-7391,18161,-16148,22859,-31714,28478,20000,-8831,-24773,26188,22278,24531,22048,-14906,23207,25516,30987,-31191,-8868,793,8970,11792,-29777,30136,6975,12518,-28034,16837,-11781,-30077,-3276,2103,-8712,9667,22182,-29275,29573,-30531,-12911,-23005,-11706,16852,-5358,23501,4244,15288,9059,31054,-2917,-30858,22610,29710,-3180,25261,-3282,12069,-27257,4221,-29050,-12582,24406,-29486,24691,-3503,15754,18467,-17358,4495,5755,16008,-18198,11048,-556,-8418,12061,3446,-11644,8085,-12937,-31582,-10615,32499,20876,-5212,-6432,-20958,28336,-22986,27442,-29367,-31934,8955,-3898,27719,10363,14329,-14057,11516,31804,-10603,20880,7673,13601,-30095,30426,26294,22731,-17925,8903,-31979,4430,-28334,-17136,-31402,-29434,2080,21500,30684,-22649,-29200,-29601,-7759,357,-21935,25515,18966,26776,-15437,31833,-9601,625,10082,-20098,-2821,21871,-3331 diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/BUILD b/tensorflow/lite/micro/integration_tests/seanet/pad/BUILD new file mode 100644 index 0000000..1aadbd5 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/pad/BUILD @@ -0,0 +1,839 @@ +# Description: +# generated integration test for one specific kernel in a model. +load( + "//tensorflow/lite/micro:build_def.bzl", + "generate_cc_arrays", + "micro_copts", +) + +package( + default_visibility = ["//visibility:public"], + # Disabling layering_check because of http://b/177257332 + features = ["-layering_check"], + licenses = ["notice"], +) + +generate_cc_arrays( + name = "generated_pad0_model_data_cc", + src = "pad0.tflite", + out = "pad0_model_data.cc", +) + +generate_cc_arrays( + name = "generated_pad0_model_data_hdr", + src = "pad0.tflite", + out = "pad0_model_data.h", +) + +generate_cc_arrays( + name = "generated_pad1_model_data_cc", + src = "pad1.tflite", + out = "pad1_model_data.cc", +) + +generate_cc_arrays( + name = "generated_pad1_model_data_hdr", + src = "pad1.tflite", + out = "pad1_model_data.h", +) + +generate_cc_arrays( + name = "generated_pad2_model_data_cc", + src = "pad2.tflite", + out = "pad2_model_data.cc", +) + +generate_cc_arrays( + name = "generated_pad2_model_data_hdr", + src = "pad2.tflite", + out = "pad2_model_data.h", +) + +generate_cc_arrays( + name = "generated_pad3_model_data_cc", + src = "pad3.tflite", + out = "pad3_model_data.cc", +) + +generate_cc_arrays( + name = "generated_pad3_model_data_hdr", + src = "pad3.tflite", + out = "pad3_model_data.h", +) + +generate_cc_arrays( + name = "generated_pad4_model_data_cc", + src = "pad4.tflite", + out = "pad4_model_data.cc", +) + +generate_cc_arrays( + name = "generated_pad4_model_data_hdr", + src = "pad4.tflite", + out = "pad4_model_data.h", +) + +generate_cc_arrays( + name = "generated_pad5_model_data_cc", + src = "pad5.tflite", + out = "pad5_model_data.cc", +) + +generate_cc_arrays( + name = "generated_pad5_model_data_hdr", + src = "pad5.tflite", + out = "pad5_model_data.h", +) + +generate_cc_arrays( + name = "generated_pad6_model_data_cc", + src = "pad6.tflite", + out = "pad6_model_data.cc", +) + +generate_cc_arrays( + name = "generated_pad6_model_data_hdr", + src = "pad6.tflite", + out = "pad6_model_data.h", +) + +generate_cc_arrays( + name = "generated_pad7_model_data_cc", + src = "pad7.tflite", + out = "pad7_model_data.cc", +) + +generate_cc_arrays( + name = "generated_pad7_model_data_hdr", + src = "pad7.tflite", + out = "pad7_model_data.h", +) + +generate_cc_arrays( + name = "generated_pad8_model_data_cc", + src = "pad8.tflite", + out = "pad8_model_data.cc", +) + +generate_cc_arrays( + name = "generated_pad8_model_data_hdr", + src = "pad8.tflite", + out = "pad8_model_data.h", +) + +generate_cc_arrays( + name = "generated_pad9_model_data_cc", + src = "pad9.tflite", + out = "pad9_model_data.cc", +) + +generate_cc_arrays( + name = "generated_pad9_model_data_hdr", + src = "pad9.tflite", + out = "pad9_model_data.h", +) + +generate_cc_arrays( + name = "generated_pad10_model_data_cc", + src = "pad10.tflite", + out = "pad10_model_data.cc", +) + +generate_cc_arrays( + name = "generated_pad10_model_data_hdr", + src = "pad10.tflite", + out = "pad10_model_data.h", +) + +generate_cc_arrays( + name = "generated_pad11_model_data_cc", + src = "pad11.tflite", + out = "pad11_model_data.cc", +) + +generate_cc_arrays( + name = "generated_pad11_model_data_hdr", + src = "pad11.tflite", + out = "pad11_model_data.h", +) + +generate_cc_arrays( + name = "generated_pad12_model_data_cc", + src = "pad12.tflite", + out = "pad12_model_data.cc", +) + +generate_cc_arrays( + name = "generated_pad12_model_data_hdr", + src = "pad12.tflite", + out = "pad12_model_data.h", +) + +generate_cc_arrays( + name = "generated_pad13_model_data_cc", + src = "pad13.tflite", + out = "pad13_model_data.cc", +) + +generate_cc_arrays( + name = "generated_pad13_model_data_hdr", + src = "pad13.tflite", + out = "pad13_model_data.h", +) + +generate_cc_arrays( + name = "generated_pad14_model_data_cc", + src = "pad14.tflite", + out = "pad14_model_data.cc", +) + +generate_cc_arrays( + name = "generated_pad14_model_data_hdr", + src = "pad14.tflite", + out = "pad14_model_data.h", +) + +generate_cc_arrays( + name = "generated_pad15_model_data_cc", + src = "pad15.tflite", + out = "pad15_model_data.cc", +) + +generate_cc_arrays( + name = "generated_pad15_model_data_hdr", + src = "pad15.tflite", + out = "pad15_model_data.h", +) + +generate_cc_arrays( + name = "generated_pad16_model_data_cc", + src = "pad16.tflite", + out = "pad16_model_data.cc", +) + +generate_cc_arrays( + name = "generated_pad16_model_data_hdr", + src = "pad16.tflite", + out = "pad16_model_data.h", +) + +generate_cc_arrays( + name = "generated_pad17_model_data_cc", + src = "pad17.tflite", + out = "pad17_model_data.cc", +) + +generate_cc_arrays( + name = "generated_pad17_model_data_hdr", + src = "pad17.tflite", + out = "pad17_model_data.h", +) + +generate_cc_arrays( + name = "generated_pad18_model_data_cc", + src = "pad18.tflite", + out = "pad18_model_data.cc", +) + +generate_cc_arrays( + name = "generated_pad18_model_data_hdr", + src = "pad18.tflite", + out = "pad18_model_data.h", +) + +generate_cc_arrays( + name = "generated_pad0_input0_int16_test_data_cc", + src = "pad0_input0_int16.csv", + out = "pad0_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_pad0_input0_int16_test_data_hdr", + src = "pad0_input0_int16.csv", + out = "pad0_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_pad0_golden_int16_test_data_cc", + src = "pad0_golden_int16.csv", + out = "pad0_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_pad0_golden_int16_test_data_hdr", + src = "pad0_golden_int16.csv", + out = "pad0_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_pad1_input0_int16_test_data_cc", + src = "pad1_input0_int16.csv", + out = "pad1_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_pad1_input0_int16_test_data_hdr", + src = "pad1_input0_int16.csv", + out = "pad1_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_pad1_golden_int16_test_data_cc", + src = "pad1_golden_int16.csv", + out = "pad1_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_pad1_golden_int16_test_data_hdr", + src = "pad1_golden_int16.csv", + out = "pad1_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_pad2_input0_int16_test_data_cc", + src = "pad2_input0_int16.csv", + out = "pad2_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_pad2_input0_int16_test_data_hdr", + src = "pad2_input0_int16.csv", + out = "pad2_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_pad2_golden_int16_test_data_cc", + src = "pad2_golden_int16.csv", + out = "pad2_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_pad2_golden_int16_test_data_hdr", + src = "pad2_golden_int16.csv", + out = "pad2_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_pad3_input0_int16_test_data_cc", + src = "pad3_input0_int16.csv", + out = "pad3_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_pad3_input0_int16_test_data_hdr", + src = "pad3_input0_int16.csv", + out = "pad3_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_pad3_golden_int16_test_data_cc", + src = "pad3_golden_int16.csv", + out = "pad3_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_pad3_golden_int16_test_data_hdr", + src = "pad3_golden_int16.csv", + out = "pad3_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_pad4_input0_int16_test_data_cc", + src = "pad4_input0_int16.csv", + out = "pad4_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_pad4_input0_int16_test_data_hdr", + src = "pad4_input0_int16.csv", + out = "pad4_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_pad4_golden_int16_test_data_cc", + src = "pad4_golden_int16.csv", + out = "pad4_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_pad4_golden_int16_test_data_hdr", + src = "pad4_golden_int16.csv", + out = "pad4_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_pad5_input0_int16_test_data_cc", + src = "pad5_input0_int16.csv", + out = "pad5_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_pad5_input0_int16_test_data_hdr", + src = "pad5_input0_int16.csv", + out = "pad5_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_pad5_golden_int16_test_data_cc", + src = "pad5_golden_int16.csv", + out = "pad5_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_pad5_golden_int16_test_data_hdr", + src = "pad5_golden_int16.csv", + out = "pad5_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_pad6_input0_int16_test_data_cc", + src = "pad6_input0_int16.csv", + out = "pad6_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_pad6_input0_int16_test_data_hdr", + src = "pad6_input0_int16.csv", + out = "pad6_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_pad6_golden_int16_test_data_cc", + src = "pad6_golden_int16.csv", + out = "pad6_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_pad6_golden_int16_test_data_hdr", + src = "pad6_golden_int16.csv", + out = "pad6_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_pad7_input0_int16_test_data_cc", + src = "pad7_input0_int16.csv", + out = "pad7_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_pad7_input0_int16_test_data_hdr", + src = "pad7_input0_int16.csv", + out = "pad7_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_pad7_golden_int16_test_data_cc", + src = "pad7_golden_int16.csv", + out = "pad7_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_pad7_golden_int16_test_data_hdr", + src = "pad7_golden_int16.csv", + out = "pad7_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_pad8_input0_int16_test_data_cc", + src = "pad8_input0_int16.csv", + out = "pad8_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_pad8_input0_int16_test_data_hdr", + src = "pad8_input0_int16.csv", + out = "pad8_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_pad8_golden_int16_test_data_cc", + src = "pad8_golden_int16.csv", + out = "pad8_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_pad8_golden_int16_test_data_hdr", + src = "pad8_golden_int16.csv", + out = "pad8_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_pad9_input0_int16_test_data_cc", + src = "pad9_input0_int16.csv", + out = "pad9_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_pad9_input0_int16_test_data_hdr", + src = "pad9_input0_int16.csv", + out = "pad9_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_pad9_golden_int16_test_data_cc", + src = "pad9_golden_int16.csv", + out = "pad9_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_pad9_golden_int16_test_data_hdr", + src = "pad9_golden_int16.csv", + out = "pad9_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_pad10_input0_int16_test_data_cc", + src = "pad10_input0_int16.csv", + out = "pad10_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_pad10_input0_int16_test_data_hdr", + src = "pad10_input0_int16.csv", + out = "pad10_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_pad10_golden_int16_test_data_cc", + src = "pad10_golden_int16.csv", + out = "pad10_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_pad10_golden_int16_test_data_hdr", + src = "pad10_golden_int16.csv", + out = "pad10_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_pad11_input0_int16_test_data_cc", + src = "pad11_input0_int16.csv", + out = "pad11_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_pad11_input0_int16_test_data_hdr", + src = "pad11_input0_int16.csv", + out = "pad11_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_pad11_golden_int16_test_data_cc", + src = "pad11_golden_int16.csv", + out = "pad11_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_pad11_golden_int16_test_data_hdr", + src = "pad11_golden_int16.csv", + out = "pad11_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_pad12_input0_int16_test_data_cc", + src = "pad12_input0_int16.csv", + out = "pad12_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_pad12_input0_int16_test_data_hdr", + src = "pad12_input0_int16.csv", + out = "pad12_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_pad12_golden_int16_test_data_cc", + src = "pad12_golden_int16.csv", + out = "pad12_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_pad12_golden_int16_test_data_hdr", + src = "pad12_golden_int16.csv", + out = "pad12_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_pad13_input0_int16_test_data_cc", + src = "pad13_input0_int16.csv", + out = "pad13_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_pad13_input0_int16_test_data_hdr", + src = "pad13_input0_int16.csv", + out = "pad13_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_pad13_golden_int16_test_data_cc", + src = "pad13_golden_int16.csv", + out = "pad13_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_pad13_golden_int16_test_data_hdr", + src = "pad13_golden_int16.csv", + out = "pad13_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_pad14_input0_int16_test_data_cc", + src = "pad14_input0_int16.csv", + out = "pad14_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_pad14_input0_int16_test_data_hdr", + src = "pad14_input0_int16.csv", + out = "pad14_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_pad14_golden_int16_test_data_cc", + src = "pad14_golden_int16.csv", + out = "pad14_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_pad14_golden_int16_test_data_hdr", + src = "pad14_golden_int16.csv", + out = "pad14_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_pad15_input0_int16_test_data_cc", + src = "pad15_input0_int16.csv", + out = "pad15_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_pad15_input0_int16_test_data_hdr", + src = "pad15_input0_int16.csv", + out = "pad15_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_pad15_golden_int16_test_data_cc", + src = "pad15_golden_int16.csv", + out = "pad15_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_pad15_golden_int16_test_data_hdr", + src = "pad15_golden_int16.csv", + out = "pad15_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_pad16_input0_int16_test_data_cc", + src = "pad16_input0_int16.csv", + out = "pad16_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_pad16_input0_int16_test_data_hdr", + src = "pad16_input0_int16.csv", + out = "pad16_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_pad16_golden_int16_test_data_cc", + src = "pad16_golden_int16.csv", + out = "pad16_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_pad16_golden_int16_test_data_hdr", + src = "pad16_golden_int16.csv", + out = "pad16_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_pad17_input0_int16_test_data_cc", + src = "pad17_input0_int16.csv", + out = "pad17_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_pad17_input0_int16_test_data_hdr", + src = "pad17_input0_int16.csv", + out = "pad17_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_pad17_golden_int16_test_data_cc", + src = "pad17_golden_int16.csv", + out = "pad17_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_pad17_golden_int16_test_data_hdr", + src = "pad17_golden_int16.csv", + out = "pad17_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_pad18_input0_int16_test_data_cc", + src = "pad18_input0_int16.csv", + out = "pad18_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_pad18_input0_int16_test_data_hdr", + src = "pad18_input0_int16.csv", + out = "pad18_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_pad18_golden_int16_test_data_cc", + src = "pad18_golden_int16.csv", + out = "pad18_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_pad18_golden_int16_test_data_hdr", + src = "pad18_golden_int16.csv", + out = "pad18_golden_int16_test_data.h", +) + +cc_library( + name = "models_and_testdata", + srcs = [ + "generated_pad0_golden_int16_test_data_cc", + "generated_pad0_input0_int16_test_data_cc", + "generated_pad0_model_data_cc", + "generated_pad10_golden_int16_test_data_cc", + "generated_pad10_input0_int16_test_data_cc", + "generated_pad10_model_data_cc", + "generated_pad11_golden_int16_test_data_cc", + "generated_pad11_input0_int16_test_data_cc", + "generated_pad11_model_data_cc", + "generated_pad12_golden_int16_test_data_cc", + "generated_pad12_input0_int16_test_data_cc", + "generated_pad12_model_data_cc", + "generated_pad13_golden_int16_test_data_cc", + "generated_pad13_input0_int16_test_data_cc", + "generated_pad13_model_data_cc", + "generated_pad14_golden_int16_test_data_cc", + "generated_pad14_input0_int16_test_data_cc", + "generated_pad14_model_data_cc", + "generated_pad15_golden_int16_test_data_cc", + "generated_pad15_input0_int16_test_data_cc", + "generated_pad15_model_data_cc", + "generated_pad16_golden_int16_test_data_cc", + "generated_pad16_input0_int16_test_data_cc", + "generated_pad16_model_data_cc", + "generated_pad17_golden_int16_test_data_cc", + "generated_pad17_input0_int16_test_data_cc", + "generated_pad17_model_data_cc", + "generated_pad18_golden_int16_test_data_cc", + "generated_pad18_input0_int16_test_data_cc", + "generated_pad18_model_data_cc", + "generated_pad1_golden_int16_test_data_cc", + "generated_pad1_input0_int16_test_data_cc", + "generated_pad1_model_data_cc", + "generated_pad2_golden_int16_test_data_cc", + "generated_pad2_input0_int16_test_data_cc", + "generated_pad2_model_data_cc", + "generated_pad3_golden_int16_test_data_cc", + "generated_pad3_input0_int16_test_data_cc", + "generated_pad3_model_data_cc", + "generated_pad4_golden_int16_test_data_cc", + "generated_pad4_input0_int16_test_data_cc", + "generated_pad4_model_data_cc", + "generated_pad5_golden_int16_test_data_cc", + "generated_pad5_input0_int16_test_data_cc", + "generated_pad5_model_data_cc", + "generated_pad6_golden_int16_test_data_cc", + "generated_pad6_input0_int16_test_data_cc", + "generated_pad6_model_data_cc", + "generated_pad7_golden_int16_test_data_cc", + "generated_pad7_input0_int16_test_data_cc", + "generated_pad7_model_data_cc", + "generated_pad8_golden_int16_test_data_cc", + "generated_pad8_input0_int16_test_data_cc", + "generated_pad8_model_data_cc", + "generated_pad9_golden_int16_test_data_cc", + "generated_pad9_input0_int16_test_data_cc", + "generated_pad9_model_data_cc", + ], + hdrs = [ + "generated_pad0_golden_int16_test_data_hdr", + "generated_pad0_input0_int16_test_data_hdr", + "generated_pad0_model_data_hdr", + "generated_pad10_golden_int16_test_data_hdr", + "generated_pad10_input0_int16_test_data_hdr", + "generated_pad10_model_data_hdr", + "generated_pad11_golden_int16_test_data_hdr", + "generated_pad11_input0_int16_test_data_hdr", + "generated_pad11_model_data_hdr", + "generated_pad12_golden_int16_test_data_hdr", + "generated_pad12_input0_int16_test_data_hdr", + "generated_pad12_model_data_hdr", + "generated_pad13_golden_int16_test_data_hdr", + "generated_pad13_input0_int16_test_data_hdr", + "generated_pad13_model_data_hdr", + "generated_pad14_golden_int16_test_data_hdr", + "generated_pad14_input0_int16_test_data_hdr", + "generated_pad14_model_data_hdr", + "generated_pad15_golden_int16_test_data_hdr", + "generated_pad15_input0_int16_test_data_hdr", + "generated_pad15_model_data_hdr", + "generated_pad16_golden_int16_test_data_hdr", + "generated_pad16_input0_int16_test_data_hdr", + "generated_pad16_model_data_hdr", + "generated_pad17_golden_int16_test_data_hdr", + "generated_pad17_input0_int16_test_data_hdr", + "generated_pad17_model_data_hdr", + "generated_pad18_golden_int16_test_data_hdr", + "generated_pad18_input0_int16_test_data_hdr", + "generated_pad18_model_data_hdr", + "generated_pad1_golden_int16_test_data_hdr", + "generated_pad1_input0_int16_test_data_hdr", + "generated_pad1_model_data_hdr", + "generated_pad2_golden_int16_test_data_hdr", + "generated_pad2_input0_int16_test_data_hdr", + "generated_pad2_model_data_hdr", + "generated_pad3_golden_int16_test_data_hdr", + "generated_pad3_input0_int16_test_data_hdr", + "generated_pad3_model_data_hdr", + "generated_pad4_golden_int16_test_data_hdr", + "generated_pad4_input0_int16_test_data_hdr", + "generated_pad4_model_data_hdr", + "generated_pad5_golden_int16_test_data_hdr", + "generated_pad5_input0_int16_test_data_hdr", + "generated_pad5_model_data_hdr", + "generated_pad6_golden_int16_test_data_hdr", + "generated_pad6_input0_int16_test_data_hdr", + "generated_pad6_model_data_hdr", + "generated_pad7_golden_int16_test_data_hdr", + "generated_pad7_input0_int16_test_data_hdr", + "generated_pad7_model_data_hdr", + "generated_pad8_golden_int16_test_data_hdr", + "generated_pad8_input0_int16_test_data_hdr", + "generated_pad8_model_data_hdr", + "generated_pad9_golden_int16_test_data_hdr", + "generated_pad9_input0_int16_test_data_hdr", + "generated_pad9_model_data_hdr", + ], + copts = micro_copts(), +) + +cc_test( + name = "integration_test", + srcs = [ + "integration_tests.cc", + ], + copts = micro_copts(), + deps = [ + ":models_and_testdata", + "//python/tflite_micro:python_ops_resolver", + "//tensorflow/lite/micro:micro_framework", + "//tensorflow/lite/micro:micro_log", + "//tensorflow/lite/micro:micro_resource_variable", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:recording_allocators", + "//tensorflow/lite/micro/testing:micro_test", + ], +) diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/Makefile.inc b/tensorflow/lite/micro/integration_tests/seanet/pad/Makefile.inc new file mode 100644 index 0000000..d6a3f56 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/pad/Makefile.inc @@ -0,0 +1,68 @@ +integration_tests_seanet_pad_GENERATOR_INPUTS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad0.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad1.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad2.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad3.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad4.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad5.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad6.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad7.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad8.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad9.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad10.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad11.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad12.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad13.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad14.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad15.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad16.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad17.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad18.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad0_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad0_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad1_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad1_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad2_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad2_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad3_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad3_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad4_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad4_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad5_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad5_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad6_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad6_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad7_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad7_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad8_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad8_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad9_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad9_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad10_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad10_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad11_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad11_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad12_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad12_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad13_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad13_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad14_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad14_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad15_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad15_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad16_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad16_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad17_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad17_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad18_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/pad18_golden_int16.csv \ + +integration_tests_seanet_pad_SRCS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/pad/integration_tests.cc \ +$(TENSORFLOW_ROOT)python/tflite_micro/python_ops_resolver.cc \ + +integration_tests_seanet_pad_HDR := \ +$(TENSORFLOW_ROOT)python/tflite_micro/python_ops_resolver.h \ + +$(eval $(call microlite_test,integration_tests_seanet_pad_test,\ +$(integration_tests_seanet_pad_SRCS),$(integration_tests_seanet_pad_HDR),$(integration_tests_seanet_pad_GENERATOR_INPUTS))) diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/integration_tests.cc b/tensorflow/lite/micro/integration_tests/seanet/pad/integration_tests.cc new file mode 100644 index 0000000..4082c88 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/pad/integration_tests.cc @@ -0,0 +1,260 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include "python/tflite_micro/python_ops_resolver.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad0_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad0_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad0_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad10_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad10_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad10_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad11_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad11_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad11_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad12_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad12_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad12_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad13_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad13_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad13_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad14_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad14_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad14_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad15_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad15_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad15_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad16_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad16_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad16_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad17_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad17_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad17_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad18_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad18_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad18_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad1_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad1_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad1_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad2_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad2_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad2_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad3_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad3_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad3_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad4_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad4_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad4_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad5_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad5_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad5_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad6_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad6_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad6_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad7_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad7_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad7_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad8_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad8_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad8_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad9_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad9_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/pad/pad9_model_data.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_profiler.h" +#include "tensorflow/lite/micro/recording_micro_allocator.h" +#include "tensorflow/lite/micro/recording_micro_interpreter.h" +#include "tensorflow/lite/micro/system_setup.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +constexpr size_t kTensorArenaSize = 1024 * 100; +uint8_t tensor_arena[kTensorArenaSize]; + +namespace tflite { +namespace micro { +namespace { + +void RunModel(const uint8_t* model, const int16_t* input0, + const uint32_t input0_size, const int16_t* golden, + const uint32_t golden_size, const char* name) { + InitializeTarget(); + MicroProfiler profiler; + PythonOpsResolver op_resolver; + + MicroInterpreter interpreter(GetModel(model), op_resolver, tensor_arena, + kTensorArenaSize, nullptr, &profiler); + interpreter.AllocateTensors(); + TfLiteTensor* input_tensor0 = interpreter.input(0); + TF_LITE_MICRO_EXPECT_EQ(input_tensor0->bytes, input0_size * sizeof(int16_t)); + memcpy(interpreter.input(0)->data.raw, input0, input_tensor0->bytes); + if (kTfLiteOk != interpreter.Invoke()) { + TF_LITE_MICRO_EXPECT(false); + return; + } + profiler.Log(); + MicroPrintf(""); + + TfLiteTensor* output_tensor = interpreter.output(0); + TF_LITE_MICRO_EXPECT_EQ(output_tensor->bytes, golden_size * sizeof(int16_t)); + int16_t* output = ::tflite::GetTensorData(output_tensor); + for (uint32_t i = 0; i < golden_size; i++) { + // TODO(b/205046520): Better understand why TfLite and TFLM can sometimes be + // off by 1. + TF_LITE_MICRO_EXPECT_NEAR(golden[i], output[i], 1); + } +} + +} // namespace +} // namespace micro +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(pad0_test) { + tflite::micro::RunModel(g_pad0_model_data, g_pad0_input0_int16_test_data, + g_pad0_input0_int16_test_data_size, + g_pad0_golden_int16_test_data, + g_pad0_golden_int16_test_data_size, "pad0 test"); +} + +TF_LITE_MICRO_TEST(pad1_test) { + tflite::micro::RunModel(g_pad1_model_data, g_pad1_input0_int16_test_data, + g_pad1_input0_int16_test_data_size, + g_pad1_golden_int16_test_data, + g_pad1_golden_int16_test_data_size, "pad1 test"); +} + +TF_LITE_MICRO_TEST(pad2_test) { + tflite::micro::RunModel(g_pad2_model_data, g_pad2_input0_int16_test_data, + g_pad2_input0_int16_test_data_size, + g_pad2_golden_int16_test_data, + g_pad2_golden_int16_test_data_size, "pad2 test"); +} + +TF_LITE_MICRO_TEST(pad3_test) { + tflite::micro::RunModel(g_pad3_model_data, g_pad3_input0_int16_test_data, + g_pad3_input0_int16_test_data_size, + g_pad3_golden_int16_test_data, + g_pad3_golden_int16_test_data_size, "pad3 test"); +} + +TF_LITE_MICRO_TEST(pad4_test) { + tflite::micro::RunModel(g_pad4_model_data, g_pad4_input0_int16_test_data, + g_pad4_input0_int16_test_data_size, + g_pad4_golden_int16_test_data, + g_pad4_golden_int16_test_data_size, "pad4 test"); +} + +TF_LITE_MICRO_TEST(pad5_test) { + tflite::micro::RunModel(g_pad5_model_data, g_pad5_input0_int16_test_data, + g_pad5_input0_int16_test_data_size, + g_pad5_golden_int16_test_data, + g_pad5_golden_int16_test_data_size, "pad5 test"); +} + +TF_LITE_MICRO_TEST(pad6_test) { + tflite::micro::RunModel(g_pad6_model_data, g_pad6_input0_int16_test_data, + g_pad6_input0_int16_test_data_size, + g_pad6_golden_int16_test_data, + g_pad6_golden_int16_test_data_size, "pad6 test"); +} + +TF_LITE_MICRO_TEST(pad7_test) { + tflite::micro::RunModel(g_pad7_model_data, g_pad7_input0_int16_test_data, + g_pad7_input0_int16_test_data_size, + g_pad7_golden_int16_test_data, + g_pad7_golden_int16_test_data_size, "pad7 test"); +} + +TF_LITE_MICRO_TEST(pad8_test) { + tflite::micro::RunModel(g_pad8_model_data, g_pad8_input0_int16_test_data, + g_pad8_input0_int16_test_data_size, + g_pad8_golden_int16_test_data, + g_pad8_golden_int16_test_data_size, "pad8 test"); +} + +TF_LITE_MICRO_TEST(pad9_test) { + tflite::micro::RunModel(g_pad9_model_data, g_pad9_input0_int16_test_data, + g_pad9_input0_int16_test_data_size, + g_pad9_golden_int16_test_data, + g_pad9_golden_int16_test_data_size, "pad9 test"); +} + +TF_LITE_MICRO_TEST(pad10_test) { + tflite::micro::RunModel(g_pad10_model_data, g_pad10_input0_int16_test_data, + g_pad10_input0_int16_test_data_size, + g_pad10_golden_int16_test_data, + g_pad10_golden_int16_test_data_size, "pad10 test"); +} + +TF_LITE_MICRO_TEST(pad11_test) { + tflite::micro::RunModel(g_pad11_model_data, g_pad11_input0_int16_test_data, + g_pad11_input0_int16_test_data_size, + g_pad11_golden_int16_test_data, + g_pad11_golden_int16_test_data_size, "pad11 test"); +} + +TF_LITE_MICRO_TEST(pad12_test) { + tflite::micro::RunModel(g_pad12_model_data, g_pad12_input0_int16_test_data, + g_pad12_input0_int16_test_data_size, + g_pad12_golden_int16_test_data, + g_pad12_golden_int16_test_data_size, "pad12 test"); +} + +TF_LITE_MICRO_TEST(pad13_test) { + tflite::micro::RunModel(g_pad13_model_data, g_pad13_input0_int16_test_data, + g_pad13_input0_int16_test_data_size, + g_pad13_golden_int16_test_data, + g_pad13_golden_int16_test_data_size, "pad13 test"); +} + +TF_LITE_MICRO_TEST(pad14_test) { + tflite::micro::RunModel(g_pad14_model_data, g_pad14_input0_int16_test_data, + g_pad14_input0_int16_test_data_size, + g_pad14_golden_int16_test_data, + g_pad14_golden_int16_test_data_size, "pad14 test"); +} + +TF_LITE_MICRO_TEST(pad15_test) { + tflite::micro::RunModel(g_pad15_model_data, g_pad15_input0_int16_test_data, + g_pad15_input0_int16_test_data_size, + g_pad15_golden_int16_test_data, + g_pad15_golden_int16_test_data_size, "pad15 test"); +} + +TF_LITE_MICRO_TEST(pad16_test) { + tflite::micro::RunModel(g_pad16_model_data, g_pad16_input0_int16_test_data, + g_pad16_input0_int16_test_data_size, + g_pad16_golden_int16_test_data, + g_pad16_golden_int16_test_data_size, "pad16 test"); +} + +TF_LITE_MICRO_TEST(pad17_test) { + tflite::micro::RunModel(g_pad17_model_data, g_pad17_input0_int16_test_data, + g_pad17_input0_int16_test_data_size, + g_pad17_golden_int16_test_data, + g_pad17_golden_int16_test_data_size, "pad17 test"); +} + +TF_LITE_MICRO_TEST(pad18_test) { + tflite::micro::RunModel(g_pad18_model_data, g_pad18_input0_int16_test_data, + g_pad18_input0_int16_test_data_size, + g_pad18_golden_int16_test_data, + g_pad18_golden_int16_test_data_size, "pad18 test"); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad0.tflite b/tensorflow/lite/micro/integration_tests/seanet/pad/pad0.tflite new file mode 100644 index 0000000..e2f94fe Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/pad/pad0.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad0_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/pad/pad0_golden_int16.csv new file mode 100644 index 0000000..c7533de --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/pad/pad0_golden_int16.csv @@ -0,0 +1 @@ +0,0,0,0,0,0,-5106,6707,-6350,-901,17496,4636,5023,-30974,21226,-21804,31527,26015,-2741,-5280,15808,-17291,-30996,-27283,23922,-21443,-13770,4860,-3013,12331,21323,14589,10923,24546,29914,17086,11380,9017,-19981,-802,14989,15023,-4795,-27835,30095,23507,28519,14337,-20060,27426,16961,14950,21177,-7116,531,-31541,-24955,-12044,17070,-7774,28677,-17097,-24725,-18689,-252,-3179,-30868,-5599,20786,4069,-28868,-4463,-27248,7320,25457,-16603,-3301,1610,9365,-29091,-24732,16264,490,-24121,-2552,17505,6340,-22228,-19671,-25356,-27561,17014,31595,-31391,14047,4449,-9342,-13182,29845,8287,19886,-27890,-20677,-16781,47,-19811,-28627,25052,-10132,-26351,25126,-21443,-29321,9397,-18897,13514,11743,-12354,-24748,18295,-30015,4670,-1565,-28863,5013,10691,21477,-2158,21216,-28722,9783,17426,-5095,8914,-12905,-6460,17885,-6481,-23116,-10463,-17747,-1504,8325,-3192,-22249,4880,-30844,-30979,-29422,19162,14197,23513,-27540,-12411,-25585,-24042,6334,23186,17289,-3381,-7695,11382,29625,20716,-17614,1327,14825,-16516,25472,-4749,16789,-30675,-2806,7695,-24295,13654,-31507,24167,2179,3123,18941,27064,-32134,-23690,-10912,-12307,20218,28485,-26502,-28806,-23337,-9947,2039,-3785,24828,6794,-13452,-23443,20105,-9600,-13978,13251,-30000,-22059,21512,-31984,21983,26515,-781,29722,13049,-23333,32611,-4964,29777,24879,-4579,-9974,31671,21190,6472,-12007,-3705,1298,-32096,-8226,-2170,15092,-14915,-7702,18920,26572,3233,8692,-10989,18873,16577,13207,9859,4437,-14212,18773,-2203,6277,-21677,32762,-12230,20360,30860,-21159,-11686,-25419,1455,-4736,3728,-27084,-16219,24899,-8943,-32498,4509,29298,-16064,-10913,22385,20528,-29968,-31066,17403,5887,-6096,12555,-9305,-1174,19270,22762,32259,-23145,8585,-5515,-24803,-12198,32742,-6566,-20451,-20642,21977,25574,1693,-4829,-15606,-21547,20245,-7215,21744,31121,-28753,-13202,-15207,22640,15478,22379,-18875,-14342,-1167,-27305,14502,-8438,2583,2821,-5879,17107,-30599,-17002,-15997,22492,-7598,6651,-62,-12132,-30466,29323,18182,-5896,22007,-21557,0,0,0,0,0,0,0,0,0,0,0,0,-16661,17253,-31435,-27921,-24612,653,-20163,-19050,16417,7901,31100,-10345,20357,-494,9007,-215,-9274,15157,14922,-23574,15613,-2719,-30036,20710,31967,-4600,19040,-26328,18878,-13915,10309,-30818,2637,27952,25945,-7826,11544,-27995,-5001,-25103,-25542,12874,31436,10864,5363,-19895,-9448,30656,17989,-27520,16409,-6715,591,-10248,-29479,23885,29228,-2507,17870,-20363,24479,9856,-31613,-30019,-18846,20497,7071,-11639,30835,-16835,2064,3893,-20815,6214,23829,23451,18146,7757,-13521,14740,-9226,-15243,4937,13009,28424,6279,-755,-14271,3146,-29161,-14558,71,11312,14109,-14325,22505,-32539,-10108,28518,-23173,29806,20839,31893,25766,-25916,-18804,-16550,-20092,26087,21407,-12528,3243,10387,1086,-27210,-11680,27402,-10409,15115,16674,7431,-23659,-29053,-26625,-18206,-29465,-31771,-27261,29560,10528,12283,-25485,9705,9523,24993,-13720,20826,25830,31495,-26971,-11646,-1180,-19131,27972,291,5877,-18367,17089,558,19162,2265,32345,6019,-16662,-22356,30398,-12734,31477,13245,12835,-23954,-19726,-16630,-2471,27972,-10081,27644,28245,17351,23070,-4579,24617,-3129,18685,5100,19125,14704,27604,-25546,-17536,-9142,19367,-21628,-13172,21902,-32281,-5003,28266,32010,-12555,-2662,9131,10376,-10275,-2634,-10446,-6325,-22590,29624,26691,-12724,-7480,-32695,-12621,-13281,-9536,-19531,12480,-29441,13816,-6798,17802,11940,26561,-18327,30499,32561,16607,25403,19976,-23249,26610,3881,12239,19766,11310,-9033,-29868,27976,10217,-15244,5766,26827,-13919,-4287,8099,-387,-11610,31465,-28416,27224,20222,10557,5752,-14001,23408,-9803,27692,25007,23673,890,-26489,8756,-22566,25250,-12664,30878,9110,10642,22469,-7507,-31747,-27876,-8161,3590,-21509,-27761,-29252,-8955,32682,22926,18064,2952,18021,25475,-22495,18820,11829,-24019,1254,-6771,-26270,-22248,29399,-14665,-30220,-14906,-2722,13673,4472,16984,15774,-2190,15536,11023,-1875,-24536,18078,4819,-15535,-16626,2826,-23356,-16688,-22560,-16125,12681,27117,22517,27846,26414,-7999,-30323,-4012,-16341,-1367,21371,14445,1501,-23993,0,0,0,0,0,0 diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad0_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/pad/pad0_input0_int16.csv new file mode 100644 index 0000000..30035eb --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/pad/pad0_input0_int16.csv @@ -0,0 +1 @@ +-5106,6707,-6350,-901,17496,4636,5023,-30974,21226,-21804,31527,26015,-2741,-5280,15808,-17291,-30996,-27283,23922,-21443,-13770,4860,-3013,12331,21323,14589,10923,24546,29914,17086,11380,9017,-19981,-802,14989,15023,-4795,-27835,30095,23507,28519,14337,-20060,27426,16961,14950,21177,-7116,531,-31541,-24955,-12044,17070,-7774,28677,-17097,-24725,-18689,-252,-3179,-30868,-5599,20786,4069,-28868,-4463,-27248,7320,25457,-16603,-3301,1610,9365,-29091,-24732,16264,490,-24121,-2552,17505,6340,-22228,-19671,-25356,-27561,17014,31595,-31391,14047,4449,-9342,-13182,29845,8287,19886,-27890,-20677,-16781,47,-19811,-28627,25052,-10132,-26351,25126,-21443,-29321,9397,-18897,13514,11743,-12354,-24748,18295,-30015,4670,-1565,-28863,5013,10691,21477,-2158,21216,-28722,9783,17426,-5095,8914,-12905,-6460,17885,-6481,-23116,-10463,-17747,-1504,8325,-3192,-22249,4880,-30844,-30979,-29422,19162,14197,23513,-27540,-12411,-25585,-24042,6334,23186,17289,-3381,-7695,11382,29625,20716,-17614,1327,14825,-16516,25472,-4749,16789,-30675,-2806,7695,-24295,13654,-31507,24167,2179,3123,18941,27064,-32134,-23690,-10912,-12307,20218,28485,-26502,-28806,-23337,-9947,2039,-3785,24828,6794,-13452,-23443,20105,-9600,-13978,13251,-30000,-22059,21512,-31984,21983,26515,-781,29722,13049,-23333,32611,-4964,29777,24879,-4579,-9974,31671,21190,6472,-12007,-3705,1298,-32096,-8226,-2170,15092,-14915,-7702,18920,26572,3233,8692,-10989,18873,16577,13207,9859,4437,-14212,18773,-2203,6277,-21677,32762,-12230,20360,30860,-21159,-11686,-25419,1455,-4736,3728,-27084,-16219,24899,-8943,-32498,4509,29298,-16064,-10913,22385,20528,-29968,-31066,17403,5887,-6096,12555,-9305,-1174,19270,22762,32259,-23145,8585,-5515,-24803,-12198,32742,-6566,-20451,-20642,21977,25574,1693,-4829,-15606,-21547,20245,-7215,21744,31121,-28753,-13202,-15207,22640,15478,22379,-18875,-14342,-1167,-27305,14502,-8438,2583,2821,-5879,17107,-30599,-17002,-15997,22492,-7598,6651,-62,-12132,-30466,29323,18182,-5896,22007,-21557,-16661,17253,-31435,-27921,-24612,653,-20163,-19050,16417,7901,31100,-10345,20357,-494,9007,-215,-9274,15157,14922,-23574,15613,-2719,-30036,20710,31967,-4600,19040,-26328,18878,-13915,10309,-30818,2637,27952,25945,-7826,11544,-27995,-5001,-25103,-25542,12874,31436,10864,5363,-19895,-9448,30656,17989,-27520,16409,-6715,591,-10248,-29479,23885,29228,-2507,17870,-20363,24479,9856,-31613,-30019,-18846,20497,7071,-11639,30835,-16835,2064,3893,-20815,6214,23829,23451,18146,7757,-13521,14740,-9226,-15243,4937,13009,28424,6279,-755,-14271,3146,-29161,-14558,71,11312,14109,-14325,22505,-32539,-10108,28518,-23173,29806,20839,31893,25766,-25916,-18804,-16550,-20092,26087,21407,-12528,3243,10387,1086,-27210,-11680,27402,-10409,15115,16674,7431,-23659,-29053,-26625,-18206,-29465,-31771,-27261,29560,10528,12283,-25485,9705,9523,24993,-13720,20826,25830,31495,-26971,-11646,-1180,-19131,27972,291,5877,-18367,17089,558,19162,2265,32345,6019,-16662,-22356,30398,-12734,31477,13245,12835,-23954,-19726,-16630,-2471,27972,-10081,27644,28245,17351,23070,-4579,24617,-3129,18685,5100,19125,14704,27604,-25546,-17536,-9142,19367,-21628,-13172,21902,-32281,-5003,28266,32010,-12555,-2662,9131,10376,-10275,-2634,-10446,-6325,-22590,29624,26691,-12724,-7480,-32695,-12621,-13281,-9536,-19531,12480,-29441,13816,-6798,17802,11940,26561,-18327,30499,32561,16607,25403,19976,-23249,26610,3881,12239,19766,11310,-9033,-29868,27976,10217,-15244,5766,26827,-13919,-4287,8099,-387,-11610,31465,-28416,27224,20222,10557,5752,-14001,23408,-9803,27692,25007,23673,890,-26489,8756,-22566,25250,-12664,30878,9110,10642,22469,-7507,-31747,-27876,-8161,3590,-21509,-27761,-29252,-8955,32682,22926,18064,2952,18021,25475,-22495,18820,11829,-24019,1254,-6771,-26270,-22248,29399,-14665,-30220,-14906,-2722,13673,4472,16984,15774,-2190,15536,11023,-1875,-24536,18078,4819,-15535,-16626,2826,-23356,-16688,-22560,-16125,12681,27117,22517,27846,26414,-7999,-30323,-4012,-16341,-1367,21371,14445,1501,-23993 diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad1.tflite b/tensorflow/lite/micro/integration_tests/seanet/pad/pad1.tflite new file mode 100644 index 0000000..0f197a6 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/pad/pad1.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad10.tflite b/tensorflow/lite/micro/integration_tests/seanet/pad/pad10.tflite new file mode 100644 index 0000000..b4c2aec Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/pad/pad10.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad10_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/pad/pad10_golden_int16.csv new file mode 100644 index 0000000..2220e57 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/pad/pad10_golden_int16.csv @@ -0,0 +1 @@ +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8757,27708,9885,-7054,-10074,-25213,-8111,-15971,-28195,-32492,21808,18950,-9791,-4266,23818,-28794,-31495,30879,15147,29703,14538,4095,26583,-13998,26655,22908,-6448,7783,27419,32410,-8818,13604,-16361,22827,-25466,8907,11255,-31638,7633,24543,19414,-15765,-11004,-1978,-13721,20930,20409,29005,9829,-27341,24771,-5546,28499,14119,1749,-3857,-20091,9122,8084,23932,-15238,342,-14336,10968,-7833,404,-23117,10830,16979,31261,-26200,10326,16157,14160,-30767,4178,685,19380,22284,157,31600,7943,-12530,27391,1983,-31726,32401,-24532,-15366,-16893,17612,32376,13086,-20113,13902,32071,-12675,28914,2519,11296,-4449,-12137,-21399,29498,2317,-1379,17870,26711,-26589,-14119,-30966,-23829,-7795,-5607,-5678,7201,-22412,-12591,23003,-7212,-11705,21889,30345,-11040,-4183,8245,-15709,-12628,24672,5195,10438,-19495,18310,-28653,16282,22426,-31821,32469,25995,20976,4427,29840,-7095,-23248,16378,1573,-14751,-11713,1928,-18292,-29316,-27936,-15152,2821,-32026,-23601,-25027,-21740,11373,30172,18276,-19943,1843,-29529,11881,11988,10669,10114,24900,8013,5250,742,-8423,7252,-3443,-20792,-3625,-25320,-15698,31505,-32724,31538,10708,-21547,-7386,18605,30423,-5316,-18629,-21418,-17119,22583,8072,23645,29143,23891,-609,-30693,-4977,5804,-27002,8123,2342,-23892,16478,14727,32216,-13941,119,2040,-20512,24600,-28544,30003,-12473,-10887,-9706,-21991,18301,29231,19587,10438,-22588,8659,16919,10537,21233,2291,636,-15578,11196,3438,32057,21636,7490,-19273,-5390,-18689,-16116,18658,-7536,-21184,-11482,-8695,25245,18275,-21533,-22838,19276,-22403,12735,-21156,-27314,-20088,21350,-27516,24869,27670,-13170,24712,14285,-9803,32133,-16816,20563,-4166,-14161,-25101,21133,23550,-17384,7523,27316,14365,-28028,20141,18214,-15463,-17186,-4398,-32712,30254,11324,-32548,20170,7357,-29889,-27444,30096,-21557,3026,8950,-7710,-12973,-26162,-5796,-20176,10094,30883,23166,29434,-5022,19425,-23205,10337,20656,-17427,13217,23243,-823,29134,-30750,-13294,4013,32419,-12483,-5973,-10239,31927,-30543,4405,7512,26026,16295,7378,-1525,-27999,-29809,17078,-17590,25385,3969,-4263,-10286,-7626,5868,20804,10363,5417,30792,-10829,13101,3449,1499,-26299,-32033,-2433,28214,-3559,-31304,-19386,10871,6044,-266,-16324,-23598,19444,7925,-6731,-13090,-17973,10142,-7303,15692,27800,25785,20684,-20194,-18096,20843,8279,-27288,19807,32379,11468,12170,-15471,20876,-20543,12076,32540,-19256,-26904,-22270,3353,-27389,278,19240,-12116,-18383,-10927,30851,-13621,-27852,7068,6295,-15148,27848,-17805,16445,13551,5380,-27013,-17735,-23626,-13021,20842,26876,2300,-5837,-18125,8262,27646,-13636,10450,-756,22969,955,-462,-10278,-17547,26180,28563,-13078,-32072,3015,23573,24659,25916,25062,-90,31286,22404,11128,-29626,-18153,29952,30722,2586,-30948,-1995,-32008,7549,-6717,32101,8798,-28191,-23442,-27249,30589,13600,21039,21068,32468,-22648,-23679,25341,7350,-18727,22599,-4267,29831,-10556,-15737,17766,-27179,27089,4535,-32610,-16024,-23841,20193,-1308,937,-10996,-17429,-16061,-32142,8673,30087,28566,-23973,23181,13930,7025,20832,-6962,20416,-23610,3250,18563,12626,-29810,23003,-17099,-29812,17193,-31436,7088,-17974,-8904,20576,9603,8722,-10927,188,-20819,-16318,-3108,-21386,-14221,-1742,10047,-23455,16509,7255,-1968,-24197,-25113,25931,32618,-3487,11181,-24431,-10476,-21224,20405,26712,-22087,-6681,-9186,-26025,-23765,17459,22491,-1495,24877,-24015,-5780,1303,9060,15183,-12882,-19183,30585,-15606,20090,-25524,27648,-21650,14178,24021,29244,4358,3324,-19258,21483,-26244,-22224,11833,22181,23477,-19912,20384,22728,-15345,3931,-29860,2338,28007,-14198,-21099,-14314,22026,27449,9923,31001,19614,-7173,18677,32211,18560,-15333,-224,-8013,-27474,27318,-17484,7264,5527,4966,-4209,13682,3380,24018,29133,-17084,-5515,-10573,-1530,-8492,29687,-23868,4715,28924,5458,-7314,5632,-13461,11010,14410,18567,-21695,-689,8975,-27306,18646,-1960,-15049,18031,-29932,12755,23210,7640,30489,-18669,5896,23836,-29618,20546,8659,-18522,-17899,-1288,-23373,9749,-5458,-6587,-25418,-10553,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad10_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/pad/pad10_input0_int16.csv new file mode 100644 index 0000000..f012b8c --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/pad/pad10_input0_int16.csv @@ -0,0 +1 @@ +8757,27708,9885,-7054,-10074,-25213,-8111,-15971,-28195,-32492,21808,18950,-9791,-4266,23818,-28794,-31495,30879,15147,29703,14538,4095,26583,-13998,26655,22908,-6448,7783,27419,32410,-8818,13604,-16361,22827,-25466,8907,11255,-31638,7633,24543,19414,-15765,-11004,-1978,-13721,20930,20409,29005,9829,-27341,24771,-5546,28499,14119,1749,-3857,-20091,9122,8084,23932,-15238,342,-14336,10968,-7833,404,-23117,10830,16979,31261,-26200,10326,16157,14160,-30767,4178,685,19380,22284,157,31600,7943,-12530,27391,1983,-31726,32401,-24532,-15366,-16893,17612,32376,13086,-20113,13902,32071,-12675,28914,2519,11296,-4449,-12137,-21399,29498,2317,-1379,17870,26711,-26589,-14119,-30966,-23829,-7795,-5607,-5678,7201,-22412,-12591,23003,-7212,-11705,21889,30345,-11040,-4183,8245,-15709,-12628,24672,5195,10438,-19495,18310,-28653,16282,22426,-31821,32469,25995,20976,4427,29840,-7095,-23248,16378,1573,-14751,-11713,1928,-18292,-29316,-27936,-15152,2821,-32026,-23601,-25027,-21740,11373,30172,18276,-19943,1843,-29529,11881,11988,10669,10114,24900,8013,5250,742,-8423,7252,-3443,-20792,-3625,-25320,-15698,31505,-32724,31538,10708,-21547,-7386,18605,30423,-5316,-18629,-21418,-17119,22583,8072,23645,29143,23891,-609,-30693,-4977,5804,-27002,8123,2342,-23892,16478,14727,32216,-13941,119,2040,-20512,24600,-28544,30003,-12473,-10887,-9706,-21991,18301,29231,19587,10438,-22588,8659,16919,10537,21233,2291,636,-15578,11196,3438,32057,21636,7490,-19273,-5390,-18689,-16116,18658,-7536,-21184,-11482,-8695,25245,18275,-21533,-22838,19276,-22403,12735,-21156,-27314,-20088,21350,-27516,24869,27670,-13170,24712,14285,-9803,32133,-16816,20563,-4166,-14161,-25101,21133,23550,-17384,7523,27316,14365,-28028,20141,18214,-15463,-17186,-4398,-32712,30254,11324,-32548,20170,7357,-29889,-27444,30096,-21557,3026,8950,-7710,-12973,-26162,-5796,-20176,10094,30883,23166,29434,-5022,19425,-23205,10337,20656,-17427,13217,23243,-823,29134,-30750,-13294,4013,32419,-12483,-5973,-10239,31927,-30543,4405,7512,26026,16295,7378,-1525,-27999,-29809,17078,-17590,25385,3969,-4263,-10286,-7626,5868,20804,10363,5417,30792,-10829,13101,3449,1499,-26299,-32033,-2433,28214,-3559,-31304,-19386,10871,6044,-266,-16324,-23598,19444,7925,-6731,-13090,-17973,10142,-7303,15692,27800,25785,20684,-20194,-18096,20843,8279,-27288,19807,32379,11468,12170,-15471,20876,-20543,12076,32540,-19256,-26904,-22270,3353,-27389,278,19240,-12116,-18383,-10927,30851,-13621,-27852,7068,6295,-15148,27848,-17805,16445,13551,5380,-27013,-17735,-23626,-13021,20842,26876,2300,-5837,-18125,8262,27646,-13636,10450,-756,22969,955,-462,-10278,-17547,26180,28563,-13078,-32072,3015,23573,24659,25916,25062,-90,31286,22404,11128,-29626,-18153,29952,30722,2586,-30948,-1995,-32008,7549,-6717,32101,8798,-28191,-23442,-27249,30589,13600,21039,21068,32468,-22648,-23679,25341,7350,-18727,22599,-4267,29831,-10556,-15737,17766,-27179,27089,4535,-32610,-16024,-23841,20193,-1308,937,-10996,-17429,-16061,-32142,8673,30087,28566,-23973,23181,13930,7025,20832,-6962,20416,-23610,3250,18563,12626,-29810,23003,-17099,-29812,17193,-31436,7088,-17974,-8904,20576,9603,8722,-10927,188,-20819,-16318,-3108,-21386,-14221,-1742,10047,-23455,16509,7255,-1968,-24197,-25113,25931,32618,-3487,11181,-24431,-10476,-21224,20405,26712,-22087,-6681,-9186,-26025,-23765,17459,22491,-1495,24877,-24015,-5780,1303,9060,15183,-12882,-19183,30585,-15606,20090,-25524,27648,-21650,14178,24021,29244,4358,3324,-19258,21483,-26244,-22224,11833,22181,23477,-19912,20384,22728,-15345,3931,-29860,2338,28007,-14198,-21099,-14314,22026,27449,9923,31001,19614,-7173,18677,32211,18560,-15333,-224,-8013,-27474,27318,-17484,7264,5527,4966,-4209,13682,3380,24018,29133,-17084,-5515,-10573,-1530,-8492,29687,-23868,4715,28924,5458,-7314,5632,-13461,11010,14410,18567,-21695,-689,8975,-27306,18646,-1960,-15049,18031,-29932,12755,23210,7640,30489,-18669,5896,23836,-29618,20546,8659,-18522,-17899,-1288,-23373,9749,-5458,-6587,-25418,-10553 diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad11.tflite b/tensorflow/lite/micro/integration_tests/seanet/pad/pad11.tflite new file mode 100644 index 0000000..2a11b3b Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/pad/pad11.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad11_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/pad/pad11_golden_int16.csv new file mode 100644 index 0000000..b26eb58 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/pad/pad11_golden_int16.csv @@ -0,0 +1 @@ +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,25135,4842,-16341,-22166,-32509,-24893,2374,-32284,24283,21039,-27560,-14847,-27788,-7480,-5084,20520,-8981,-24644,13087,-16575,21228,-27491,11592,10328,23837,12648,-15281,3917,31296,-19584,-9638,19632,29967,-5318,-22818,-13364,-30308,13359,21785,3531,26438,20420,-16312,14765,-5539,31776,-11077,-25454,4761,27420,13936,14564,-5054,14625,-28945,2429,32151,13298,-31676,30667,-15563,672,2910,24210,9509,22218,-24905,32672,-22408,14306,-19027,20422,-1456,-8213,-30368,-30212,23823,23011,-19095,-19541,-25271,-10305,13135,21729,-29129,15232,17087,3000,802,-31561,-24032,-1527,30713,21710,-26655,12558,-19502,5620,3364,24684,26283,-23291,-9746,-12327,-30274,27235,20327,31578,-23715,-3766,32580,26197,-28947,-30880,-29265,-22995,-2521,-4997,-5958,-4577,-8198,-5936,-9047,4995,-29485,-19823,30652,6674,-2881,-26287,-7710,19964,-9363,-10130,8901,-25780,3588,28215,-3782,-4801,11517,14386,5246,9839,-11700,20227,21964,18145,11438,-17362,7129,8256,25803,15521,-5739,-1685,-20014,6992,-12805,8084,-14728,-21372,12680,-2344,-22952,734,13817,-11669,23820,21884,3207,29485,28729,21047,-1874,-14485,-25714,20377,-10293,20364,-2298,-15644,24432,8255,-22195,20128,20037,25607,23918,21928,-31458,-27633,-18242,-13551,2907,763,-6188,-22465,-6679,17167,3053,10708,-1637,-1508,-15513,-8449,-23917,3755,-5923,-13939,8991,-23982,-8444,-5026,30461,4771,-23646,-18059,31961,-5259,-6465,-23159,13402,30082,29889,-32349,29322,27508,-23587,16335,626,2650,-2484,336,26655,-11738,-18715,6543,2463,16066,-13305,-12619,10269,27807,-22746,28856,21781,-29939,31112,-32304,24974,28999,23983,-15192,19297,-3403,-3997,26060,-16821,-20488,-21686,14747,-6749,-5569,-28175,-24655,-18805,223,22799,-2329,-8412,-5654,23978,13735,3366,-11188,14136,-21160,-6698,22991,24765,26857,8963,10601,22069,-26388,-16272,-19745,-29381,2260,23879,20539,-534,-18355,-20004,-13363,-11354,-19504,-16917,-2638,18955,-25427,-30887,-7374,-29257,-26606,-10906,28704,19087,3308,-20057,9206,-7920,19008,-5541,20181,-23164,21651,7283,-13257,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad11_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/pad/pad11_input0_int16.csv new file mode 100644 index 0000000..cded281 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/pad/pad11_input0_int16.csv @@ -0,0 +1 @@ +25135,4842,-16341,-22166,-32509,-24893,2374,-32284,24283,21039,-27560,-14847,-27788,-7480,-5084,20520,-8981,-24644,13087,-16575,21228,-27491,11592,10328,23837,12648,-15281,3917,31296,-19584,-9638,19632,29967,-5318,-22818,-13364,-30308,13359,21785,3531,26438,20420,-16312,14765,-5539,31776,-11077,-25454,4761,27420,13936,14564,-5054,14625,-28945,2429,32151,13298,-31676,30667,-15563,672,2910,24210,9509,22218,-24905,32672,-22408,14306,-19027,20422,-1456,-8213,-30368,-30212,23823,23011,-19095,-19541,-25271,-10305,13135,21729,-29129,15232,17087,3000,802,-31561,-24032,-1527,30713,21710,-26655,12558,-19502,5620,3364,24684,26283,-23291,-9746,-12327,-30274,27235,20327,31578,-23715,-3766,32580,26197,-28947,-30880,-29265,-22995,-2521,-4997,-5958,-4577,-8198,-5936,-9047,4995,-29485,-19823,30652,6674,-2881,-26287,-7710,19964,-9363,-10130,8901,-25780,3588,28215,-3782,-4801,11517,14386,5246,9839,-11700,20227,21964,18145,11438,-17362,7129,8256,25803,15521,-5739,-1685,-20014,6992,-12805,8084,-14728,-21372,12680,-2344,-22952,734,13817,-11669,23820,21884,3207,29485,28729,21047,-1874,-14485,-25714,20377,-10293,20364,-2298,-15644,24432,8255,-22195,20128,20037,25607,23918,21928,-31458,-27633,-18242,-13551,2907,763,-6188,-22465,-6679,17167,3053,10708,-1637,-1508,-15513,-8449,-23917,3755,-5923,-13939,8991,-23982,-8444,-5026,30461,4771,-23646,-18059,31961,-5259,-6465,-23159,13402,30082,29889,-32349,29322,27508,-23587,16335,626,2650,-2484,336,26655,-11738,-18715,6543,2463,16066,-13305,-12619,10269,27807,-22746,28856,21781,-29939,31112,-32304,24974,28999,23983,-15192,19297,-3403,-3997,26060,-16821,-20488,-21686,14747,-6749,-5569,-28175,-24655,-18805,223,22799,-2329,-8412,-5654,23978,13735,3366,-11188,14136,-21160,-6698,22991,24765,26857,8963,10601,22069,-26388,-16272,-19745,-29381,2260,23879,20539,-534,-18355,-20004,-13363,-11354,-19504,-16917,-2638,18955,-25427,-30887,-7374,-29257,-26606,-10906,28704,19087,3308,-20057,9206,-7920,19008,-5541,20181,-23164,21651,7283,-13257 diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad12.tflite b/tensorflow/lite/micro/integration_tests/seanet/pad/pad12.tflite new file mode 100644 index 0000000..6e93ae3 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/pad/pad12.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad12_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/pad/pad12_golden_int16.csv new file mode 100644 index 0000000..ab64166 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/pad/pad12_golden_int16.csv @@ -0,0 +1 @@ +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-10790,9447,30411,-3512,15605,-28534,29786,10839,-15967,19296,-26529,29494,-29150,12981,31175,-25902,29479,-28394,-28103,11913,28227,-16626,-14375,-483,-23676,1065,7100,-29134,-19918,-13436,8766,-31851,-23317,17708,-21994,2768,7264,17691,-11136,9229,5447,-24677,2811,7887,6552,-14858,-11514,-13998,4858,23354,-3364,-25588,4876,12173,30287,-31709,2071,-8850,12579,-20027,-315,8732,-8420,8960,-1290,23320,26381,11550,4098,11094,-27185,14837,-10000,30489,10771,-1865,-27759,4992,27175,29093,24632,18395,32578,-8241,8271,-16561,2625,-13866,18081,23354,28905,-14736,-21362,-21928,28600,-291,30242,19910,13803,-1491,-818,-4192,27296,7883,25202,7489,3396,31895,26876,32658,13842,-28246,5342,-1806,-351,12320,-12149,-1363,-9006,1649,21270,13715,31457,-2703,-6144,-7936,4331,-1249,3524,14131,10839,4168,-11950,8607,15804,-17598,31299,5967,28276,5955,16456,22975,-11735,-14345,-32767,-28936,14007,-4768,29054,5075,-8946,23039,16786,971,27661,-31555,21438,-26009,864,6275,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad12_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/pad/pad12_input0_int16.csv new file mode 100644 index 0000000..0ff1a8f --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/pad/pad12_input0_int16.csv @@ -0,0 +1 @@ +-10790,9447,30411,-3512,15605,-28534,29786,10839,-15967,19296,-26529,29494,-29150,12981,31175,-25902,29479,-28394,-28103,11913,28227,-16626,-14375,-483,-23676,1065,7100,-29134,-19918,-13436,8766,-31851,-23317,17708,-21994,2768,7264,17691,-11136,9229,5447,-24677,2811,7887,6552,-14858,-11514,-13998,4858,23354,-3364,-25588,4876,12173,30287,-31709,2071,-8850,12579,-20027,-315,8732,-8420,8960,-1290,23320,26381,11550,4098,11094,-27185,14837,-10000,30489,10771,-1865,-27759,4992,27175,29093,24632,18395,32578,-8241,8271,-16561,2625,-13866,18081,23354,28905,-14736,-21362,-21928,28600,-291,30242,19910,13803,-1491,-818,-4192,27296,7883,25202,7489,3396,31895,26876,32658,13842,-28246,5342,-1806,-351,12320,-12149,-1363,-9006,1649,21270,13715,31457,-2703,-6144,-7936,4331,-1249,3524,14131,10839,4168,-11950,8607,15804,-17598,31299,5967,28276,5955,16456,22975,-11735,-14345,-32767,-28936,14007,-4768,29054,5075,-8946,23039,16786,971,27661,-31555,21438,-26009,864,6275 diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad13.tflite b/tensorflow/lite/micro/integration_tests/seanet/pad/pad13.tflite new file mode 100644 index 0000000..d47357d Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/pad/pad13.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad13_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/pad/pad13_golden_int16.csv new file mode 100644 index 0000000..81a148d --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/pad/pad13_golden_int16.csv @@ -0,0 +1 @@ +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-27379,7634,25195,14017,23345,29910,-19504,14329,-18119,27137,18952,-32149,-8518,-23149,-20236,-51,17618,12505,8930,15041,21364,4120,30286,9430,-19259,4841,27512,-12736,-25001,29784,6145,-15969,-21759,-22335,-21675,23254,-16114,16796,1605,14300,24735,29786,-25247,15107,-5261,29978,11774,-16181,-24220,14297,30429,-4333,-13003,-6848,15761,-27277,23075,11991,12729,8987,-869,13342,18391,27363,18982,4856,10504,22138,-26670,3364,-20859,-15988,-27068,-15065,27971,-14309,-13155,-9394,-11948,-15841,-17360,30039,28071,1006,-8506,-17600,-10386,-27616,-8519,2115,-25989,-9203,-22539,11714,576,5116,-11165,-26549,19587,-17558,-8627,22470,15283,30132,-10887,-6145,-18125,13991,14803,-18921,-23998,17151,14077,31738,-13557,-19911,-10369,-14548,11674,6089,-26144,-10806,-4969,2124,-7697,-17312,-29722,-10938,-7813,-23344,22523,-1414,22276,-24057,23671,8877,15415,15943,21683,3199,-29914,-23835,-497,31436,-14040,19398,1757,24552,12255,10125,-13169,-4323,3779,26687,-29010,-18372,16553,-1580,-7413,2855,8393,-10376,4597,22302,29774,8963,13945,663,-22570,-6464,12216,-18554,26149,18921,24989,12898,-17081,19392,-5613,14655,-20381,-15472,-19845,29903,27847,-28762,-6325,25148,27108,7594,-8505,-13081,6690,20184,533,6792,-27253,-2661,-7647,-5228,-15386,-1685,-15807,-26307,17728,3811,-8705,29402,18265,-8070,-4727,19018,-25814,24071,-16991,-18435,9764,-3875,-10199,25131,7651,30926,20052,31521,5238,-19430,-6757,25357,28719,6593,-21731,-12219,1051,-5619,-8784,13951,-14041,-23957,28391,5887,5497,-16223,32277,-30678,6457,230,31029,-25679,-12343,31927,7114,24828,-22397,-12663,11358,-2393,5893,-11210,1704,-30022,19448,-22238,15344,-21003,-18629,30721,-29677,-16266,-4870,-8098,12804,-1348,-4339,-22317,-22861,19262,4421,14830,-2382,31747,-20834,19407,-31504,-17979,17510,-28189,16754,-16882,15867,2658,-17099,-5652,-24219,-22320,-4349,139,14071,10703,8383,5563,-16351,-5149,-12665,-29736,-12432,-8069,-28072,-8546,-1119,29704,29976,-1066,4593,-17218,18990,28902,-3513,20250,23507,-23736,7503,20188,-22274,7072,-17517,1615,32330,11641,-23993,11663,-6154,-16321,23903,28166,-15086,-13832,-28423,-5587,-8970,20436,-17784,8063,20884,12933,21969,468,24254,26146,31276,20459,-5803,24029,10001,6322,-26790,-27638,-32718,10612,32585,-26381,31164,-15069,-8805,5800,2306,-12914,32284,-22004,24678,-22259,14647,-9584,20401,29276,-5627,16009,-6731,29924,-24444,-225,-16890,9826,-7951,796,-26742,17282,-5566,1805,5683,28113,-32512,26131,-9343,-28894,21273,-16580,-26036,-21512,-21697,-32237,3390,-13049,7043,-30519,-22815,-21612,-29227,-32356,17485,21923,-21434,-5513,-12913,23324,-14176,-14452,31606,21235,25544,-25268,-21589,21770,8038,12142,23529,32456,5811,23291,15061,16813,7357,28972,20904,5661,13380,9816,32285,-2964,-30615,21647,17990,-15014,1307,18742,-146,27502,1364,-24809,-18400,-31860,-29698,-20538,-26432,5252,-454,19908,-19973,-18415,21489,-30836,5353,29729,15526,-10038,-26321,32422,-4406,-19580,-29734,-26292,21539,-6177,24164,-24260,-11963,23574,-13354,-17405,14601,-15937,-31072,32148,-24245,-15201,3105,-488,-28860,15759,-23747,-16094,-18371,-4838,-27271,-27618,-20473,11821,-23868,2702,-3242,-7128,10850,-2329,16283,1252,24623,-16691,29193,-28982,7113,-27328,-25792,-5400,-7397,-28915,19845,-4828,-17316,-23764,-24263,29102,23945,-25816,11022,-19780,-2413,-27638,29513,-32405,7705,13567,-24619,21685,8004,-21295,-26107,11717,-4574,22682,-3086,11312,-5756,4522,27228,-391,870,-300,-28287,5593,27247,27411,28763,-30408,-32287,-14365,7381,30473,-25604,-32520,-3420,-12227,-15032,5483,-11055,-16069,18082,-14623,15675,-22796,4249,20841,-5572,27591,-11911,29335,13092,32378,25591,5531,-9778,-2540,-23732,7195,1290,15248,-8263,6881,-5557,-30767,8795,-6416,-5475,30665,-6856,-6869,-30361,20072,6326,9701,-29918,-23338,5198,365,-28755,10206,14555,-30900,26696,26933,-29295,-12820,-20175,-27199,24279,12557,-11078,-20410,-16849,9213,28234,-27193,28926,-20186,-6002,15211,2518,-14405,6781,19147,23264,17048,-14726,15307,-11604,-27719,-21149,1809,26013,25868,27304,14450,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad13_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/pad/pad13_input0_int16.csv new file mode 100644 index 0000000..f7da710 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/pad/pad13_input0_int16.csv @@ -0,0 +1 @@ +-27379,7634,25195,14017,23345,29910,-19504,14329,-18119,27137,18952,-32149,-8518,-23149,-20236,-51,17618,12505,8930,15041,21364,4120,30286,9430,-19259,4841,27512,-12736,-25001,29784,6145,-15969,-21759,-22335,-21675,23254,-16114,16796,1605,14300,24735,29786,-25247,15107,-5261,29978,11774,-16181,-24220,14297,30429,-4333,-13003,-6848,15761,-27277,23075,11991,12729,8987,-869,13342,18391,27363,18982,4856,10504,22138,-26670,3364,-20859,-15988,-27068,-15065,27971,-14309,-13155,-9394,-11948,-15841,-17360,30039,28071,1006,-8506,-17600,-10386,-27616,-8519,2115,-25989,-9203,-22539,11714,576,5116,-11165,-26549,19587,-17558,-8627,22470,15283,30132,-10887,-6145,-18125,13991,14803,-18921,-23998,17151,14077,31738,-13557,-19911,-10369,-14548,11674,6089,-26144,-10806,-4969,2124,-7697,-17312,-29722,-10938,-7813,-23344,22523,-1414,22276,-24057,23671,8877,15415,15943,21683,3199,-29914,-23835,-497,31436,-14040,19398,1757,24552,12255,10125,-13169,-4323,3779,26687,-29010,-18372,16553,-1580,-7413,2855,8393,-10376,4597,22302,29774,8963,13945,663,-22570,-6464,12216,-18554,26149,18921,24989,12898,-17081,19392,-5613,14655,-20381,-15472,-19845,29903,27847,-28762,-6325,25148,27108,7594,-8505,-13081,6690,20184,533,6792,-27253,-2661,-7647,-5228,-15386,-1685,-15807,-26307,17728,3811,-8705,29402,18265,-8070,-4727,19018,-25814,24071,-16991,-18435,9764,-3875,-10199,25131,7651,30926,20052,31521,5238,-19430,-6757,25357,28719,6593,-21731,-12219,1051,-5619,-8784,13951,-14041,-23957,28391,5887,5497,-16223,32277,-30678,6457,230,31029,-25679,-12343,31927,7114,24828,-22397,-12663,11358,-2393,5893,-11210,1704,-30022,19448,-22238,15344,-21003,-18629,30721,-29677,-16266,-4870,-8098,12804,-1348,-4339,-22317,-22861,19262,4421,14830,-2382,31747,-20834,19407,-31504,-17979,17510,-28189,16754,-16882,15867,2658,-17099,-5652,-24219,-22320,-4349,139,14071,10703,8383,5563,-16351,-5149,-12665,-29736,-12432,-8069,-28072,-8546,-1119,29704,29976,-1066,4593,-17218,18990,28902,-3513,20250,23507,-23736,7503,20188,-22274,7072,-17517,1615,32330,11641,-23993,11663,-6154,-16321,23903,28166,-15086,-13832,-28423,-5587,-8970,20436,-17784,8063,20884,12933,21969,468,24254,26146,31276,20459,-5803,24029,10001,6322,-26790,-27638,-32718,10612,32585,-26381,31164,-15069,-8805,5800,2306,-12914,32284,-22004,24678,-22259,14647,-9584,20401,29276,-5627,16009,-6731,29924,-24444,-225,-16890,9826,-7951,796,-26742,17282,-5566,1805,5683,28113,-32512,26131,-9343,-28894,21273,-16580,-26036,-21512,-21697,-32237,3390,-13049,7043,-30519,-22815,-21612,-29227,-32356,17485,21923,-21434,-5513,-12913,23324,-14176,-14452,31606,21235,25544,-25268,-21589,21770,8038,12142,23529,32456,5811,23291,15061,16813,7357,28972,20904,5661,13380,9816,32285,-2964,-30615,21647,17990,-15014,1307,18742,-146,27502,1364,-24809,-18400,-31860,-29698,-20538,-26432,5252,-454,19908,-19973,-18415,21489,-30836,5353,29729,15526,-10038,-26321,32422,-4406,-19580,-29734,-26292,21539,-6177,24164,-24260,-11963,23574,-13354,-17405,14601,-15937,-31072,32148,-24245,-15201,3105,-488,-28860,15759,-23747,-16094,-18371,-4838,-27271,-27618,-20473,11821,-23868,2702,-3242,-7128,10850,-2329,16283,1252,24623,-16691,29193,-28982,7113,-27328,-25792,-5400,-7397,-28915,19845,-4828,-17316,-23764,-24263,29102,23945,-25816,11022,-19780,-2413,-27638,29513,-32405,7705,13567,-24619,21685,8004,-21295,-26107,11717,-4574,22682,-3086,11312,-5756,4522,27228,-391,870,-300,-28287,5593,27247,27411,28763,-30408,-32287,-14365,7381,30473,-25604,-32520,-3420,-12227,-15032,5483,-11055,-16069,18082,-14623,15675,-22796,4249,20841,-5572,27591,-11911,29335,13092,32378,25591,5531,-9778,-2540,-23732,7195,1290,15248,-8263,6881,-5557,-30767,8795,-6416,-5475,30665,-6856,-6869,-30361,20072,6326,9701,-29918,-23338,5198,365,-28755,10206,14555,-30900,26696,26933,-29295,-12820,-20175,-27199,24279,12557,-11078,-20410,-16849,9213,28234,-27193,28926,-20186,-6002,15211,2518,-14405,6781,19147,23264,17048,-14726,15307,-11604,-27719,-21149,1809,26013,25868,27304,14450 diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad14.tflite b/tensorflow/lite/micro/integration_tests/seanet/pad/pad14.tflite new file mode 100644 index 0000000..f20d89d Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/pad/pad14.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad14_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/pad/pad14_golden_int16.csv new file mode 100644 index 0000000..095d73c --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/pad/pad14_golden_int16.csv @@ -0,0 +1 @@ +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-21195,-20432,11478,-17831,-27343,-13712,-19194,-14420,-30271,29640,26554,-32034,2113,-8329,18388,18464,26738,-30748,1741,-10564,27675,-2328,16037,-24337,17821,-9397,4075,8559,-13376,22666,15277,31372,14322,-1104,-20230,1850,-31238,-15690,-29585,-10077,875,-5264,31131,8314,-716,28347,-9659,5095,30159,4356,-23141,-27351,-30110,20181,-13931,9446,-22258,13465,17371,1092,29539,25567,16852,-19560,-2963,-17425,18961,-768,-26078,-24874,-23911,3154,-18108,15259,-474,-30382,15437,-3538,15155,-31111,-12782,32535,-14979,3543,-1274,10742,31621,-24672,-8500,-23207,3163,18119,14257,18165,-14315,12726,21227,5756,10331,3607,-1502,6366,24464,-4530,8198,-12166,-25949,15406,4409,-32001,4848,19534,-28535,2011,-10315,-27891,19067,-22662,13068,12024,-28802,-11444,10405,26983,23643,-24847,-30099,8129,-24972,4426,-18989,14856,-23549,-21422,2273,-13854,-23567,-18165,12955,-22841,-24928,28073,10968,-5923,3883,24020,-12289,-14320,13185,7726,-22611,4848,9690,-4001,5956,-20440,-2430,24341,-17378,27980,21013,-26155,-8871,-29679,15329,6360,-27061,5778,10670,4254,13489,-11786,6571,-19641,-12345,20309,23235,-20609,-21771,-15290,7434,-26093,27925,-8940,1798,-19939,8612,6193,-18529,-15594,27064,10203,14372,23813,-7658,-8925,874,-32531,-30732,11786,23947,3123,30689,-25008,-20768,-845,22683,13318,-20381,-14023,-25330,-24483,12741,-21124,6079,28986,-19249,-4417,-13523,23347,-16359,25577,-20592,-23121,-26613,-6596,32394,-21796,16466,24694,25591,9628,-21008,8938,3613,28651,6575,-16945,-6538,30456,-18138,-14108,7369,-6263,4207,17465,143,7964,-16400,14619,15437,24550,-23974,28210,6761,26133,-55,18274,-23315,-5746,-25343,-2741,4290,18967,2944,-28714,-2593,-11239,30181,31856,-18343,-29530,32756,-8618,5276,6079,-15834,20861,-11344,-2757,-12974,-16289,5581,-25542,-29851,3549,10201,3192,-23912,-739,10732,2834,-31221,-25486,9745,-22061,30210,-17120,12976,27665,22516,-1627,-1002,-10117,-25124,-27956,9929,-2170,-6998,12352,17030,-29078,31130,26002,-30217,-1633,29341,-3871,11023,-7780,-10362,10812,8050,23102,19584,15807,-10134,-19973,6735,26679,8006,22263,-16837,-5136,-6594,13961,12403,-29857,-862,-22551,25010,-4600,-9389,3370,-22247,4777,3000,8381,24191,18813,8036,21971,22274,-21126,25754,7054,13123,16333,-2327,-15873,-11457,28685,16143,-13126,7080,-12888,22074,-31269,-21150,-23134,14979,26161,-17005,-16279,995,23835,24225,1095,-11491,22833,27111,-13512,26031,1937,31166,11905,9662,-28717,11517,-29087,5350,9665,20388,14022,26426,12973,-20607,6183,4184,-30750,20347,-15521,29919,-24001,-2506,-11768,12530,12,-14011,-10123,-22308,6005,15462,-22710,23809,31820,11748,-17047,15503,21961,-31120,12426,-1385,-686,-22212,-19463,13362,30206,26065,2791,13217,25733,-13821,-6885,-21034,-2104,-17490,5982,29908,5299,12902,-5539,16354,758,-27308,19112,25789,-11976,14497,140,-11974,-15335,-31053,29314,-10138,29324,-14230,-22871,1272,-12469,30258,-27716,-23378,19123,32531,-10901,-146,5615,3457,-11538,5632,-22506,-21228,11420,21785,70,-8215,13161,-1590,-2958,-6062,23018,-25114,10619,221,14959,-8015,-23811,2159,-1,-15402,-20786,8247,-5055,21693,-14272,7336,2057,-11442,-27479,-30988,7560,20140,-31216,5521,32165,-19500,18059,16828,27356,26460,-3948,-8753,-3162,3729,-18757,22466,31619,14850,32318,-24543,3006,3265,3997,20630,12017,1704,-24137,2656,-18389,27105,14977,26298,-3019,23190,32764,-14272,3786,5614,-25791,-20863,-21329,-13594,-21418,19530,9572,28354,-13452,16824,-24795,14370,16443,3129,20347,-2511,-26973,14576,26431,-29965,-9906,-17153,8462,20116,5420,-15219,-22235,-25967,-2949,-31956,28215,27647,-32278,6653,-6077,23284,-14987,-19988,-32529,-31087,-19791,23351,3414,-16358,4852,-537,31925,-24335,-24077,-5346,5556,7174,9101,14738,-18884,10863,-26752,14930,-10189,17698,-32234,-23159,-13141,-17690,14076,11831,15897,14329,11554,-17114,-10986,21402,22104,2276,-23301,-16512,6641,-3520,25205,8617,23907,-21879,27056,15467,10086,-6501,-11272,4494,3895,14391,-30649,-5580,-17570,14757,-12813,32487,-28121,16705,20399,-7997,-8599,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad14_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/pad/pad14_input0_int16.csv new file mode 100644 index 0000000..c527af9 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/pad/pad14_input0_int16.csv @@ -0,0 +1 @@ +-21195,-20432,11478,-17831,-27343,-13712,-19194,-14420,-30271,29640,26554,-32034,2113,-8329,18388,18464,26738,-30748,1741,-10564,27675,-2328,16037,-24337,17821,-9397,4075,8559,-13376,22666,15277,31372,14322,-1104,-20230,1850,-31238,-15690,-29585,-10077,875,-5264,31131,8314,-716,28347,-9659,5095,30159,4356,-23141,-27351,-30110,20181,-13931,9446,-22258,13465,17371,1092,29539,25567,16852,-19560,-2963,-17425,18961,-768,-26078,-24874,-23911,3154,-18108,15259,-474,-30382,15437,-3538,15155,-31111,-12782,32535,-14979,3543,-1274,10742,31621,-24672,-8500,-23207,3163,18119,14257,18165,-14315,12726,21227,5756,10331,3607,-1502,6366,24464,-4530,8198,-12166,-25949,15406,4409,-32001,4848,19534,-28535,2011,-10315,-27891,19067,-22662,13068,12024,-28802,-11444,10405,26983,23643,-24847,-30099,8129,-24972,4426,-18989,14856,-23549,-21422,2273,-13854,-23567,-18165,12955,-22841,-24928,28073,10968,-5923,3883,24020,-12289,-14320,13185,7726,-22611,4848,9690,-4001,5956,-20440,-2430,24341,-17378,27980,21013,-26155,-8871,-29679,15329,6360,-27061,5778,10670,4254,13489,-11786,6571,-19641,-12345,20309,23235,-20609,-21771,-15290,7434,-26093,27925,-8940,1798,-19939,8612,6193,-18529,-15594,27064,10203,14372,23813,-7658,-8925,874,-32531,-30732,11786,23947,3123,30689,-25008,-20768,-845,22683,13318,-20381,-14023,-25330,-24483,12741,-21124,6079,28986,-19249,-4417,-13523,23347,-16359,25577,-20592,-23121,-26613,-6596,32394,-21796,16466,24694,25591,9628,-21008,8938,3613,28651,6575,-16945,-6538,30456,-18138,-14108,7369,-6263,4207,17465,143,7964,-16400,14619,15437,24550,-23974,28210,6761,26133,-55,18274,-23315,-5746,-25343,-2741,4290,18967,2944,-28714,-2593,-11239,30181,31856,-18343,-29530,32756,-8618,5276,6079,-15834,20861,-11344,-2757,-12974,-16289,5581,-25542,-29851,3549,10201,3192,-23912,-739,10732,2834,-31221,-25486,9745,-22061,30210,-17120,12976,27665,22516,-1627,-1002,-10117,-25124,-27956,9929,-2170,-6998,12352,17030,-29078,31130,26002,-30217,-1633,29341,-3871,11023,-7780,-10362,10812,8050,23102,19584,15807,-10134,-19973,6735,26679,8006,22263,-16837,-5136,-6594,13961,12403,-29857,-862,-22551,25010,-4600,-9389,3370,-22247,4777,3000,8381,24191,18813,8036,21971,22274,-21126,25754,7054,13123,16333,-2327,-15873,-11457,28685,16143,-13126,7080,-12888,22074,-31269,-21150,-23134,14979,26161,-17005,-16279,995,23835,24225,1095,-11491,22833,27111,-13512,26031,1937,31166,11905,9662,-28717,11517,-29087,5350,9665,20388,14022,26426,12973,-20607,6183,4184,-30750,20347,-15521,29919,-24001,-2506,-11768,12530,12,-14011,-10123,-22308,6005,15462,-22710,23809,31820,11748,-17047,15503,21961,-31120,12426,-1385,-686,-22212,-19463,13362,30206,26065,2791,13217,25733,-13821,-6885,-21034,-2104,-17490,5982,29908,5299,12902,-5539,16354,758,-27308,19112,25789,-11976,14497,140,-11974,-15335,-31053,29314,-10138,29324,-14230,-22871,1272,-12469,30258,-27716,-23378,19123,32531,-10901,-146,5615,3457,-11538,5632,-22506,-21228,11420,21785,70,-8215,13161,-1590,-2958,-6062,23018,-25114,10619,221,14959,-8015,-23811,2159,-1,-15402,-20786,8247,-5055,21693,-14272,7336,2057,-11442,-27479,-30988,7560,20140,-31216,5521,32165,-19500,18059,16828,27356,26460,-3948,-8753,-3162,3729,-18757,22466,31619,14850,32318,-24543,3006,3265,3997,20630,12017,1704,-24137,2656,-18389,27105,14977,26298,-3019,23190,32764,-14272,3786,5614,-25791,-20863,-21329,-13594,-21418,19530,9572,28354,-13452,16824,-24795,14370,16443,3129,20347,-2511,-26973,14576,26431,-29965,-9906,-17153,8462,20116,5420,-15219,-22235,-25967,-2949,-31956,28215,27647,-32278,6653,-6077,23284,-14987,-19988,-32529,-31087,-19791,23351,3414,-16358,4852,-537,31925,-24335,-24077,-5346,5556,7174,9101,14738,-18884,10863,-26752,14930,-10189,17698,-32234,-23159,-13141,-17690,14076,11831,15897,14329,11554,-17114,-10986,21402,22104,2276,-23301,-16512,6641,-3520,25205,8617,23907,-21879,27056,15467,10086,-6501,-11272,4494,3895,14391,-30649,-5580,-17570,14757,-12813,32487,-28121,16705,20399,-7997,-8599 diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad15.tflite b/tensorflow/lite/micro/integration_tests/seanet/pad/pad15.tflite new file mode 100644 index 0000000..6f76c8c Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/pad/pad15.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad15_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/pad/pad15_golden_int16.csv new file mode 100644 index 0000000..d817d4a --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/pad/pad15_golden_int16.csv @@ -0,0 +1 @@ +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-29492,9079,6198,11079,-27037,26868,1293,5042,10958,-18430,-11170,-28558,-7487,20579,4872,-6082,-14748,4983,-23752,-31206,-30799,31796,5761,10143,4653,22831,-30515,11073,32576,-26049,2355,346,16690,-30394,-6103,-18145,-31282,-26854,-20319,-25943,-11261,15218,21992,-15358,-31264,28007,17486,25560,9889,931,-9803,24237,-3243,-8007,6166,-27529,3289,-26594,9151,-26765,-27376,26168,7074,-590,31781,3063,9723,18153,-24757,-1019,-30047,-10007,-11481,-10058,21523,-4179,17766,6490,-25042,8014,14742,28468,-22401,26196,16796,7008,-21481,20442,-847,-1243,-1204,7738,-1478,22911,2003,-22301,-10250,-18885,-11281,-32550,14223,9660,11383,18415,-27369,-19548,-23507,20978,12445,-14696,6841,-22147,1666,31124,-17262,27718,1109,-10267,-19339,-13099,11414,-328,23633,-4302,29205,22662,25249,-2989,-140,23028,-18038,-7261,14842,1749,-5699,-14305,-26705,-17954,-26231,5670,14946,29686,24951,-21320,-26520,-26891,-20270,-27047,8675,30572,-5502,19936,-31475,15068,5232,8734,-13275,26665,24737,-22971,28897,-19532,12350,-11900,8190,25138,19508,-25601,-11860,16713,-9854,-22216,-20300,-9446,197,4940,-20650,-2610,17387,-31786,13077,29439,-18495,32045,-25493,8148,-20731,7570,14436,-27879,-30691,18803,-15879,-21194,-18839,-12611,-16413,5304,-25996,3225,-32492,23077,4909,-28034,-21254,768,8251,1597,2633,-6049,-25161,-23810,19307,4283,11035,562,-18857,-12333,8435,24158,-30740,-14182,-19475,14022,-5456,4297,-2473,-5688,22741,23072,-15698,-24265,8182,-6607,26127,26763,18155,9169,27033,-13796,10570,23780,24202,-20489,-8959,-158,-26364,8696,12262,-5368,-18674,-6459,-16028,31669,-16183,-22527,-31098,-17698,1933,-11447,-22754,-7945,2320,11879,18950,-3512,9422,8133,11082,-32007,-2488,-15398,24493,-8479,29246,11720,-21884,-16189,9628,21739,-25408,-8071,27980,-11301,8867,23962,9368,-3461,-24869,28742,17920,24780,-24178,32364,-11585,-11983,31602,11736,-20305,-19138,31506,21391,21740,-16848,31889,23537,15594,10174,-4420,28461,4098,30186,-84,-8371,-13315,14629,-15856,10916,-9766,-20778,-24678,-31998,-22689,6058,6077,25053,7280,-12112,14941,9449,6319,-27658,29275,-24219,24410,-30949,-3673,-7420,-24580,-31183,-15498,6786,-10596,-21531,-20139,-6700,1092,-14607,29625,27951,-19467,-13109,8943,-31646,-3350,5547,19848,17636,22378,14455,4191,24237,16385,2725,1194,-31925,-11109,-6641,32497,-18495,-3220,16481,-13306,27646,-15777,-13855,1696,24172,29920,-29914,-4079,12287,-9271,1299,-11486,-4721,-2002,26363,-7736,8408,-1663,-17681,23396,125,-28918,-289,31999,936,-7803,5957,28266,10014,1184,15972,11054,-28001,-4880,2226,5401,21238,-20826,24750,-26839,-8679,26285,-22632,-21779,-3634,-16911,16102,-18022,6173,-29074,17902,2877,-32030,32254,6006,11610,-14700,12999,-448,-3214,9342,-14610,-15812,18101,-10607,28189,-21615,15432,13484,24585,-7628,-29212,17909,-17199,-17523,-2336,-18719,20995,746,-22983,30696,4699,-24604,-17679,16521,-27796,10806,-4030,30103,-9482,-13813,10012,-30490,-20243,28995,-20801,-11135,-31010,3969,25447,564,-234,-19814,19580,-16993,19783,24594,13921,-4787,21367,21708,-17296,-5676,24684,31160,-1106,-26303,-16022,29611,6014,28912,-18789,-23429,-3251,-1178,-25817,7100,16993,14504,-4294,-15134,-169,-32038,-31441,17551,-32600,-8437,19129,-5318,23880,10085,27027,5729,-29782,-26212,-8976,14738,32672,-20593,903,12180,6447,30855,-16936,-7096,-13370,3781,4102,-23095,-15641,-31899,15204,15199,-12191,14225,-31287,1420,604,2214,2093,-19084,6805,-30737,30958,6330,12354,-14472,-10172,-20778,13333,-179,13872,13199,-13781,-26814,-11120,27256,-17195,13033,32378,-29508,16159,-21301,-472,26135,28955,4594,14192,26977,-16587,-27143,-13588,-4701,-16369,14282,29011,29074,30016,14722,18348,18230,-3080,-1479,-31716,27142,-20175,10144,-23165,9274,29895,-19072,-18014,-17570,7950,20840,13992,-15503,-3691,-2003,30078,19109,-26633,-31396,3966,-19129,3197,5005,19673,5287,18024,4538,2006,-18932,10507,-438,23639,-28023,-18061,9341,-13823,-23,-1604,27910,23185,-6935,-14176,-13850,1826,13698,-3079,-17155,26253,15704,-27889,-7670,25189,-2316,-15527,-17782,-20544,25856,11550,12017,-15267,-16439,31790,5262,-402,22686,-2223,-23470,18701,2681,-28503,-29490,29970,16397,10843,19175,-22900,29467,-10040,-20332,4375,12100,-7870,-10729,-6730,-18852,-6818,26221,-10746,-23778,4199,-5167,-13886,17763,-15055,-20809,-1687,3705,1271,-6136,-10944,30791,15483,-26513,11289,-9746,6419,5615,-8329,-18140,13245,-10279,-10177,-18778,-19157,10679,-10192,-1124,10326,23065,4984,19402,-16583,23750,11906,14296,-31076,18809,14196,1105,-9676,5911,-27657,112,-9021,12062,-16129,-9406,23029,-21649,2202,-28572,-30261,5094,831,-18720,-8605,-2772,-29558,-3297,10274,8641,-28549,2803,21641,5644,-22796,-21062,-10057,20037,-30612,1367,-11153,-26231,7230,-27051,7139,-31265,4714,-9096,8191,1372,-6564,1629,-2242,11606,-16255,-14479,25033,31069,-10490,-8192,18194,25378,-15885,18823,-18875,4214,32001,18008,-13021,15296,17085,14349,-21244,-188,20863,27516,-27115,28657,-3864,6839,-11469,18232,-23544,-2401,-7932,-30810,-19318,-9787,-5168,-12946,3979,-6811,-12870,23989,-10239,5229,22418,9886,5616,-2249,-23087,23726,19962,-18541,-16774,-20441,30852,18879,-26815,-4969,-32271,9268,5131,-26741,28250,-24949,-9158,13021,20502,-30344,19950,-13870,-29561,-23332,5614,21093,19438,-23880,-29133,22491,20548,-603,-31556,-12968,15034,-19286,32604,-31954,-6644,25031,-9698,22268,12138,-17637,-6070,16107,27587,-3599,-2993,2967,25086,23665,-20820,-2242,-11088,3158,-17618,-12910,6750,14432,17822,-515,4012,-11259,4881,5590,-9937,32315,3341,7076,-22845,20988,14817,-15414,-28406,10790,-27612,3634,-7081,21061,-21510,4329,-16234,-29639,-15025,7539,4081,-31790,-24903,31616,-6252,31001,-30309,-31160,14865,24724,-9442,-32267,-13654,5818,-28938,14659,14567,-14293,31118,-32056,354,-14522,11574,-10732,-24628,18813,-23017,-16209,18549,-18462,-27695,23919,14244,10792,10976,-26979,-5176,-31855,15931,17645,-13634,7350,-28571,32369,30811,-23854,-16774,731,14451,22399,-14318,-17956,-22778,8277,-26159,30816,-8906,16331,13672,-3409,-30894,-11228,4707,30117,-19589,24272,10085,-4749,-24606,32356,-3530,21032,19966,-9501,7224,14734,-8150,-7142,-22931,27737,31457,-3938,23990,-31894,-978,-18799,17099,21580,-17189,-14089,2725,-32318,-7972,23771,5317,-25752,15457,13740,-26368,11546,31554,562,22119,29780,27372,5921,3288,7013,-1042,31449,-8114,-12865,19622,21502,5918,19121,13655,20220,-30998,29554,26416,-19507,28094,3467,5899,-11277,32153,-5166,-17986,-16357,-24586,-21118,29456,-2323,20663,22893,15503,19534,-20982,-29111,7912,11478,-17542,8558,-15499,3746,26523,-2700,19759,20238,-7194,15224,-28617,1696,30208,14669,-16875,14814,18721,-20559,-30383,-22471,-3036,-1100,-23781,-20021,5057,19248,2350,4198,51,32381,10676,7793,-23373,5533,15492,-5950,11501,9326,-16568,15588,-28177,11609,6351,651,-32673,12517,8327,15390,11004,-25258,-8588,-26982,-29165,16465,22107,-4650,-12183,-5927,23435,26337,32423,115,16501,7989,22889,13927,-17298,-13608,18765,19675,21848,-21380,-28357,-32300,29220,-23798,-11317,-22139,-5135,27772,-23125,30839,-32165,32406,31116,26995,-22735,27323,-7083,-16438,2206,18388,-30704,-2516,-29774,20691,-10470,13228,6907,-17590,-26561,-28282,31505,-32038,-14269,10137,-27954,21115,19801,6580,4865,-32297,25175,-6044,6785,10767,16937,-6940,-27376,-32354,-2073,9661,-16776,6767,-2227,11567,-8752,14139,-1607,-22702,27170,13276,-10427,20751,23418,19366,-29333,10043,-17967,-16848,-5212,-23516,15687,3672,-20122,-15932,-22806,-12565,-21454,-21585,-30138,14205,26999,-8317,20391,-21148,619,5801,27605,-31592,17783,12157,26719,9721,18533,2123,-10607,-13848,12185,-2937,-26785,-1686,29918,-18150,-22235,-5036,20463,-555,-7719,-15089,29036,-25093,-1460,-14596,-2905,-936,-4673,2351,-6872,-11475,-25271,-1905,26177,-19905,-10580,21557,16552,-15623,32423,-7292,-19657,7612,28196,-29818,-13943,14536,-3249,-11080,27112,-337,26053,19841,-21809,-2248,-7758,-5541,2541,-8644,30405,27378,11252,27675,3889,-4732,13576,-7717,-6558,-3274,-21259,-27148,-18833,21770,2322,-15860,26543,481,13840,10889,30961,-14888,-19197,4354,-4455,-25194,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad15_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/pad/pad15_input0_int16.csv new file mode 100644 index 0000000..a442aa2 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/pad/pad15_input0_int16.csv @@ -0,0 +1 @@ +-29492,9079,6198,11079,-27037,26868,1293,5042,10958,-18430,-11170,-28558,-7487,20579,4872,-6082,-14748,4983,-23752,-31206,-30799,31796,5761,10143,4653,22831,-30515,11073,32576,-26049,2355,346,16690,-30394,-6103,-18145,-31282,-26854,-20319,-25943,-11261,15218,21992,-15358,-31264,28007,17486,25560,9889,931,-9803,24237,-3243,-8007,6166,-27529,3289,-26594,9151,-26765,-27376,26168,7074,-590,31781,3063,9723,18153,-24757,-1019,-30047,-10007,-11481,-10058,21523,-4179,17766,6490,-25042,8014,14742,28468,-22401,26196,16796,7008,-21481,20442,-847,-1243,-1204,7738,-1478,22911,2003,-22301,-10250,-18885,-11281,-32550,14223,9660,11383,18415,-27369,-19548,-23507,20978,12445,-14696,6841,-22147,1666,31124,-17262,27718,1109,-10267,-19339,-13099,11414,-328,23633,-4302,29205,22662,25249,-2989,-140,23028,-18038,-7261,14842,1749,-5699,-14305,-26705,-17954,-26231,5670,14946,29686,24951,-21320,-26520,-26891,-20270,-27047,8675,30572,-5502,19936,-31475,15068,5232,8734,-13275,26665,24737,-22971,28897,-19532,12350,-11900,8190,25138,19508,-25601,-11860,16713,-9854,-22216,-20300,-9446,197,4940,-20650,-2610,17387,-31786,13077,29439,-18495,32045,-25493,8148,-20731,7570,14436,-27879,-30691,18803,-15879,-21194,-18839,-12611,-16413,5304,-25996,3225,-32492,23077,4909,-28034,-21254,768,8251,1597,2633,-6049,-25161,-23810,19307,4283,11035,562,-18857,-12333,8435,24158,-30740,-14182,-19475,14022,-5456,4297,-2473,-5688,22741,23072,-15698,-24265,8182,-6607,26127,26763,18155,9169,27033,-13796,10570,23780,24202,-20489,-8959,-158,-26364,8696,12262,-5368,-18674,-6459,-16028,31669,-16183,-22527,-31098,-17698,1933,-11447,-22754,-7945,2320,11879,18950,-3512,9422,8133,11082,-32007,-2488,-15398,24493,-8479,29246,11720,-21884,-16189,9628,21739,-25408,-8071,27980,-11301,8867,23962,9368,-3461,-24869,28742,17920,24780,-24178,32364,-11585,-11983,31602,11736,-20305,-19138,31506,21391,21740,-16848,31889,23537,15594,10174,-4420,28461,4098,30186,-84,-8371,-13315,14629,-15856,10916,-9766,-20778,-24678,-31998,-22689,6058,6077,25053,7280,-12112,14941,9449,6319,-27658,29275,-24219,24410,-30949,-3673,-7420,-24580,-31183,-15498,6786,-10596,-21531,-20139,-6700,1092,-14607,29625,27951,-19467,-13109,8943,-31646,-3350,5547,19848,17636,22378,14455,4191,24237,16385,2725,1194,-31925,-11109,-6641,32497,-18495,-3220,16481,-13306,27646,-15777,-13855,1696,24172,29920,-29914,-4079,12287,-9271,1299,-11486,-4721,-2002,26363,-7736,8408,-1663,-17681,23396,125,-28918,-289,31999,936,-7803,5957,28266,10014,1184,15972,11054,-28001,-4880,2226,5401,21238,-20826,24750,-26839,-8679,26285,-22632,-21779,-3634,-16911,16102,-18022,6173,-29074,17902,2877,-32030,32254,6006,11610,-14700,12999,-448,-3214,9342,-14610,-15812,18101,-10607,28189,-21615,15432,13484,24585,-7628,-29212,17909,-17199,-17523,-2336,-18719,20995,746,-22983,30696,4699,-24604,-17679,16521,-27796,10806,-4030,30103,-9482,-13813,10012,-30490,-20243,28995,-20801,-11135,-31010,3969,25447,564,-234,-19814,19580,-16993,19783,24594,13921,-4787,21367,21708,-17296,-5676,24684,31160,-1106,-26303,-16022,29611,6014,28912,-18789,-23429,-3251,-1178,-25817,7100,16993,14504,-4294,-15134,-169,-32038,-31441,17551,-32600,-8437,19129,-5318,23880,10085,27027,5729,-29782,-26212,-8976,14738,32672,-20593,903,12180,6447,30855,-16936,-7096,-13370,3781,4102,-23095,-15641,-31899,15204,15199,-12191,14225,-31287,1420,604,2214,2093,-19084,6805,-30737,30958,6330,12354,-14472,-10172,-20778,13333,-179,13872,13199,-13781,-26814,-11120,27256,-17195,13033,32378,-29508,16159,-21301,-472,26135,28955,4594,14192,26977,-16587,-27143,-13588,-4701,-16369,14282,29011,29074,30016,14722,18348,18230,-3080,-1479,-31716,27142,-20175,10144,-23165,9274,29895,-19072,-18014,-17570,7950,20840,13992,-15503,-3691,-2003,30078,19109,-26633,-31396,3966,-19129,3197,5005,19673,5287,18024,4538,2006,-18932,10507,-438,23639,-28023,-18061,9341,-13823,-23,-1604,27910,23185,-6935,-14176,-13850,1826,13698,-3079,-17155,26253,15704,-27889,-7670,25189,-2316,-15527,-17782,-20544,25856,11550,12017,-15267,-16439,31790,5262,-402,22686,-2223,-23470,18701,2681,-28503,-29490,29970,16397,10843,19175,-22900,29467,-10040,-20332,4375,12100,-7870,-10729,-6730,-18852,-6818,26221,-10746,-23778,4199,-5167,-13886,17763,-15055,-20809,-1687,3705,1271,-6136,-10944,30791,15483,-26513,11289,-9746,6419,5615,-8329,-18140,13245,-10279,-10177,-18778,-19157,10679,-10192,-1124,10326,23065,4984,19402,-16583,23750,11906,14296,-31076,18809,14196,1105,-9676,5911,-27657,112,-9021,12062,-16129,-9406,23029,-21649,2202,-28572,-30261,5094,831,-18720,-8605,-2772,-29558,-3297,10274,8641,-28549,2803,21641,5644,-22796,-21062,-10057,20037,-30612,1367,-11153,-26231,7230,-27051,7139,-31265,4714,-9096,8191,1372,-6564,1629,-2242,11606,-16255,-14479,25033,31069,-10490,-8192,18194,25378,-15885,18823,-18875,4214,32001,18008,-13021,15296,17085,14349,-21244,-188,20863,27516,-27115,28657,-3864,6839,-11469,18232,-23544,-2401,-7932,-30810,-19318,-9787,-5168,-12946,3979,-6811,-12870,23989,-10239,5229,22418,9886,5616,-2249,-23087,23726,19962,-18541,-16774,-20441,30852,18879,-26815,-4969,-32271,9268,5131,-26741,28250,-24949,-9158,13021,20502,-30344,19950,-13870,-29561,-23332,5614,21093,19438,-23880,-29133,22491,20548,-603,-31556,-12968,15034,-19286,32604,-31954,-6644,25031,-9698,22268,12138,-17637,-6070,16107,27587,-3599,-2993,2967,25086,23665,-20820,-2242,-11088,3158,-17618,-12910,6750,14432,17822,-515,4012,-11259,4881,5590,-9937,32315,3341,7076,-22845,20988,14817,-15414,-28406,10790,-27612,3634,-7081,21061,-21510,4329,-16234,-29639,-15025,7539,4081,-31790,-24903,31616,-6252,31001,-30309,-31160,14865,24724,-9442,-32267,-13654,5818,-28938,14659,14567,-14293,31118,-32056,354,-14522,11574,-10732,-24628,18813,-23017,-16209,18549,-18462,-27695,23919,14244,10792,10976,-26979,-5176,-31855,15931,17645,-13634,7350,-28571,32369,30811,-23854,-16774,731,14451,22399,-14318,-17956,-22778,8277,-26159,30816,-8906,16331,13672,-3409,-30894,-11228,4707,30117,-19589,24272,10085,-4749,-24606,32356,-3530,21032,19966,-9501,7224,14734,-8150,-7142,-22931,27737,31457,-3938,23990,-31894,-978,-18799,17099,21580,-17189,-14089,2725,-32318,-7972,23771,5317,-25752,15457,13740,-26368,11546,31554,562,22119,29780,27372,5921,3288,7013,-1042,31449,-8114,-12865,19622,21502,5918,19121,13655,20220,-30998,29554,26416,-19507,28094,3467,5899,-11277,32153,-5166,-17986,-16357,-24586,-21118,29456,-2323,20663,22893,15503,19534,-20982,-29111,7912,11478,-17542,8558,-15499,3746,26523,-2700,19759,20238,-7194,15224,-28617,1696,30208,14669,-16875,14814,18721,-20559,-30383,-22471,-3036,-1100,-23781,-20021,5057,19248,2350,4198,51,32381,10676,7793,-23373,5533,15492,-5950,11501,9326,-16568,15588,-28177,11609,6351,651,-32673,12517,8327,15390,11004,-25258,-8588,-26982,-29165,16465,22107,-4650,-12183,-5927,23435,26337,32423,115,16501,7989,22889,13927,-17298,-13608,18765,19675,21848,-21380,-28357,-32300,29220,-23798,-11317,-22139,-5135,27772,-23125,30839,-32165,32406,31116,26995,-22735,27323,-7083,-16438,2206,18388,-30704,-2516,-29774,20691,-10470,13228,6907,-17590,-26561,-28282,31505,-32038,-14269,10137,-27954,21115,19801,6580,4865,-32297,25175,-6044,6785,10767,16937,-6940,-27376,-32354,-2073,9661,-16776,6767,-2227,11567,-8752,14139,-1607,-22702,27170,13276,-10427,20751,23418,19366,-29333,10043,-17967,-16848,-5212,-23516,15687,3672,-20122,-15932,-22806,-12565,-21454,-21585,-30138,14205,26999,-8317,20391,-21148,619,5801,27605,-31592,17783,12157,26719,9721,18533,2123,-10607,-13848,12185,-2937,-26785,-1686,29918,-18150,-22235,-5036,20463,-555,-7719,-15089,29036,-25093,-1460,-14596,-2905,-936,-4673,2351,-6872,-11475,-25271,-1905,26177,-19905,-10580,21557,16552,-15623,32423,-7292,-19657,7612,28196,-29818,-13943,14536,-3249,-11080,27112,-337,26053,19841,-21809,-2248,-7758,-5541,2541,-8644,30405,27378,11252,27675,3889,-4732,13576,-7717,-6558,-3274,-21259,-27148,-18833,21770,2322,-15860,26543,481,13840,10889,30961,-14888,-19197,4354,-4455,-25194 diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad16.tflite b/tensorflow/lite/micro/integration_tests/seanet/pad/pad16.tflite new file mode 100644 index 0000000..45af253 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/pad/pad16.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad16_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/pad/pad16_golden_int16.csv new file mode 100644 index 0000000..790a68e --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/pad/pad16_golden_int16.csv @@ -0,0 +1 @@ +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-10060,302,24299,-31273,-13139,7751,7954,-9429,-8170,12718,4968,28358,-3442,27352,-20946,-90,20057,-1988,-22619,-15044,-17193,-6503,-20859,18220,-7479,8786,-3747,-27256,-9720,-21402,-7472,27005,-27571,25405,8859,-31372,30137,-23996,-12791,-28391,-30826,8368,-21652,-6797,25744,32269,-23959,21159,-24948,-24223,-221,3250,-11424,-9805,-20548,11852,-16554,-5935,-10057,5265,30156,27902,-7990,-20995,26658,-29313,-28658,-27209,19010,32545,-2195,6445,-2331,-12932,27940,-20249,-997,-16525,20065,-26653,30391,-16402,11480,13984,13681,-23082,18828,5296,13293,-26814,27987,27626,3385,-14141,2881,13248,-27520,-6925,-32036,-17439,-15231,-23607,-31692,-25765,16194,-865,-25195,-31412,-26034,23236,25114,-9948,26588,-22688,-2110,11339,-31545,-20098,30503,-2725,3619,-8438,-5569,-22110,23403,-15137,-28360,-13615,254,12,14629,-7080,16983,19701,-8216,15034,901,14662,-14497,-18071,-15189,-2881,13367,22776,1550,-18029,-11009,20067,-15265,-26254,-10318,8032,-7926,13250,-20307,18066,-14435,-15029,6002,-10250,-9126,30315,-14446,-3192,5862,-5583,29377,-2011,30497,-20775,-18585,-28923,12762,-26187,12329,-13512,25593,26033,18513,30281,-25612,23487,5281,31023,-10200,-6931,4110,-27568,27813,-2783,14160,11282,-564,20611,4410,20709,-14213,-23315,24532,-11266,-29441,5131,24439,30837,-1806,-5984,6942,-16166,-26732,21704,27833,-12,30154,-28275,21218,28377,-5839,-19189,-3532,-10489,-9997,-23547,-18616,-14293,1996,12039,18102,6543,-19371,19741,-28506,20629,4259,-12716,22607,-27708,2419,27246,12416,-4931,15013,-23378,-2832,-16164,-14722,-31642,-31408,23847,-11853,-27816,-16676,-227,-901,-6571,-24692,-14274,-22606,14985,-16368,-13592,18678,12918,14410,26651,10912,-31237,-19989,-10985,-17315,31324,16587,-8897,-25406,-26684,22090,547,-19258,-15733,8292,-25947,27907,26460,25810,-7385,21561,30880,22619,-16811,-12105,-20212,-12851,4444,4100,9846,-7172,-16002,-9761,-1114,3684,-7926,-15726,18142,7995,19306,-7839,-7590,28788,-6497,16665,-9172,-24809,-666,-12554,-29002,16300,-25664,-7776,-13208,-19919,-32289,14512,21813,12116,5128,32417,-5564,12333,-15816,-27268,-20389,-6985,17707,-26690,25185,21501,-10001,-5747,16769,7225,-1914,-11553,-10395,-10294,-32187,-11124,27658,-26379,30136,-713,2197,17879,19561,-1671,3457,14453,-10928,-31117,-14881,-12553,28776,-13498,29216,5761,3440,13134,-10420,9272,9663,-32412,-25589,1452,10023,18572,-4337,20749,-30047,-6744,11542,-18283,-28950,-26541,-10036,26263,2816,23187,4920,24901,14435,22247,-22536,19922,32009,18343,30514,24821,-2358,5935,15335,3551,29710,27216,15374,9916,-11316,11429,1249,-27010,3392,15649,-26475,974,-18840,-8289,6924,4092,-30802,27271,13006,108,11321,23868,10019,-8254,12429,22144,29045,-2882,5692,2999,12023,12246,-27687,-17876,5514,9056,-29341,4828,23987,29882,-15303,1059,-24964,23865,-11600,-30822,-617,-25029,-7615,-1174,5656,-25539,31985,20639,-29009,-108,-14986,-23242,4512,-13368,5973,-139,8381,23029,-13267,25527,5674,-26906,-7954,-30009,-28155,1116,20203,1552,12274,24563,9948,15054,5323,-2938,-10645,-8542,25669,27321,25884,4205,-30842,-27189,7045,-20052,-20917,-11370,29414,-11847,25677,-2744,-26366,4390,19120,8856,-32395,-3962,21595,-18466,-20731,2229,27803,21903,8898,25103,14149,1124,21701,-26976,20814,21416,8791,30121,31669,12754,11797,-19831,11305,3242,-30602,-461,-6582,18075,-13479,31698,-15210,11578,13999,-24167,-32167,24000,24574,-27670,-4038,29905,16871,28348,-1441,21253,-17896,17731,-6460,-6119,-24793,-28847,31588,-22585,-12249,30997,27098,5308,-10584,18675,-7814,-9127,-3685,-3741,-21122,-17278,-15521,31657,-11738,-24113,-2543,25450,-22972,-30992,13536,-29210,-17151,-1336,-13413,22642,17271,16225,-26885,-2916,-25112,-26572,18994,13287,24220,2944,31317,-15717,-15707,12340,1912,-31635,17618,-11308,19954,-21326,31052,28804,2321,-4756,26777,19422,-8117,9263,-21629,19767,16102,18733,22709,10246,-11165,-3966,-1943,-8601,18823,-4345,77,-10494,-32500,-4912,17479,8778,20034,-4341,-24400,13485,2699,19251,1248,17873,12746,-9694,-9937,11113,14149,-10538,21652,23753,3040,-3187,-19462,-32633,17295,27364,27858,27325,1151,-18669,2734,31469,29561,17275,-3132,32362,-13666,9281,-32152,14483,-3595,21680,27165,27478,10324,-10575,32733,32387,-28414,18533,-8598,8159,-31983,-11778,-26310,-7667,24556,18540,4981,-27362,-12341,22760,30992,-27060,7871,9001,-12454,24742,-7094,16770,-84,32347,25527,17695,8389,19697,5531,-31235,7911,-21429,-29984,-5475,21166,-939,9027,-649,-31206,-1887,5419,-2229,-24043,18943,29680,-4627,-13494,7469,4388,-31033,-32133,10264,31508,20667,31641,4744,18815,22357,6902,30209,12858,-30453,2206,-10337,13301,-16931,8553,16953,-2557,24894,-11113,-26666,22152,1612,18470,30103,-7153,4800,533,-5791,-22930,9901,-2467,4895,-5516,31580,2375,-2426,9847,4331,30033,-1779,10032,7430,12422,17229,10089,24325,648,-22760,-8572,-4871,16032,-20478,-15480,25828,32755,-13813,7433,-5674,-26564,1861,-12524,3383,-27230,-12656,-2556,-766,-31226,6937,4652,-12175,-2772,-21712,-1463,-25200,20373,14199,14805,-31418,29463,28646,13389,-27762,-24876,4614,-31013,-9859,1432,-22405,-31651,-6813,2830,23622,4096,-30276,29083,-20217,25956,32190,5107,-11070,13981,26914,-25459,-9963,-22593,-1341,-27297,-11264,19935,-2411,-29780,8801,30082,6732,32292,29786,-17368,26541,19094,-14947,13037,-2114,-28722,29927,-25551,17086,-11838,-25852,-28226,-8732,9132,-20289,16337,-26452,17997,24461,25055,336,3691,-30349,-15204,18244,-30843,-9194,-10018,-12449,-6774,9336,12453,28607,14025,24116,-7967,-32463,-22944,-3717,29043,-3011,-1396,27978,22566,17104,-30524,-25453,25892,-12311,23297,1542,-17064,-7792,23964,-22520,32244,-19044,-30247,-26353,29505,17845,6140,13581,10699,7925,31867,10001,18694,10056,17118,5984,15854,-18139,13773,-21697,16463,-5390,19390,24697,1850,-7612,865,8093,-23776,18548,14391,-11582,-1246,-11115,5749,1443,-17046,18901,-12878,-6672,1133,26631,21729,32301,1257,-2330,-26851,14946,-19605,4647,23196,-25125,-16051,-13001,15441,-5795,-18321,-30217,-31467,17365,16854,-6770,-12040,-25861,20114,-24460,-11845,1157,32471,29212,-13040,19619,10513,32304,21241,-21140,15961,23286,-30536,30912,23669,-26428,-293,-7809,-12366,32005,-23784,-14793,17460,-13995,24805,-29906,20188,27552,2316,-16643,-11247,-27984,-32435,30274,-29551,-31795,25539,-17433,-31000,-21559,-30921,18343,29505,12514,-26794,-10678,-26549,-23856,20675,-6874,32700,19145,-18780,15751,-24872,-28711,-13402,-10284,289,30228,-281,-29377,-14105,-32484,-3641,-28008,-24517,-26377,-23032,21734,19263,5708,-15936,8515,10701,-6351,-13335,-14746,-11124,-13997,-2362,13284,-23682,12193,15155,21541,-9169,-6979,-16250,6482,3811,-4301,8559,-21652,7269,8647,10682,-22625,-4739,16781,30043,28966,-5825,-16599,-32262,-20983,19026,17875,18786,1239,12510,3763,-11710,-4684,22609,25754,25030,29105,123,17194,-10228,14134,-12016,3917,-23852,22069,-23373,13443,31102,27717,22290,2662,-13728,-32190,-2422,-18444,25575,-16420,-17784,-23090,31362,-8502,28589,15214,-13726,26387,28981,29011,-29233,-19434,-14279,11714,-12872,-9714,-13286,22548,29732,1221,-25260,-19220,13499,-5208,11673,-10158,-32113,-7460,18846,-25959,-15156,-19200,-7703,32658,-8276,31890,-15019,14564,-2641,-17348,10830,20072,-5331,-17418,-16274,-27993,-16176,24222,25479,9849,28573,5819,-17542,24011,16117,13199,-9316,-29233,2021,-7553,-12753,-20459,28092,24962,-1202,27312,-20818,-16036,21054,-8806,292,-12536,13847,-3307,5687,-20416,-10189,-19201,-13659,15479,-21094,-2251,-8498,-28367,12368,15160,8398,-25492,-2560,-25884,-23811,-11263,24699,-19635,22587,-19788,6777,31680,-32249,-6884,-10502,-25523,-23259,7271,-9341,-23826,733,2229,13967,-24955,5555,-10391,4288,14733,25276,6729,9593,5035,1505,-26167,-26211,-32398,-545,-5371,-9877,10493,-8764,-13992,12006,-11821,15470,-12455,-519,14696,22526,32268,16739,-26950,32407,-9583,-17347,-29188,30270,7901,-32228,2167,-29797,27498,-21022,-2025,-28371,18075,-3998,27165,12390,21879,-11861,-9654,8526,-18394,23948,24337,-19914,-9027,-19734,-11690,18535,-24302,-5019,-22172,7801,-26114,-8620,13947,21806,-24105,30410,11283,-8317,2309,12082,23779,1658,22521,-1357,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-13157,-20932,-8699,-3140,-32203,-22707,18968,-11774,12451,-20854,-9026,-31746,-18005,31377,24565,-10421,24324,3235,-30872,18562,-28658,4809,-13133,6931,-30315,26232,-6083,27372,-29659,-13034,31236,-27164,-26580,7424,13019,28106,4854,26385,-18892,31056,-17151,635,17952,25886,-3869,-8818,-32250,7204,-12258,-31225,-26321,13004,590,-25228,8331,-10032,30066,6614,-24969,-2227,-4599,13859,-15087,13765,1446,-2297,-15581,28132,12457,12828,22996,-26987,-3147,-26239,16201,-11146,-21143,3687,26600,-26539,25732,-12550,17605,5723,-18453,17371,4271,-23250,-28677,-1095,6558,11441,-4273,-7311,12746,-5580,-23281,-10382,-28980,-21792,18147,5350,-16802,-14063,-3023,325,-23686,19997,440,32685,-2761,-16471,-3985,-19821,8440,-26907,25658,-3782,21989,25545,29377,41,-6549,-31522,-11662,20595,13835,-25235,12697,28895,-29711,20574,-4697,-15409,-21607,17707,8736,2421,-6708,25958,-5976,2948,29923,-14880,15312,-21098,23433,15066,14299,8951,-17024,-20438,7431,17895,24067,-17564,3331,28645,-18708,3046,14590,1307,-26713,692,-32004,15899,-20809,-23050,-4178,8765,18437,19092,32258,-23674,8372,13635,820,14879,6897,29205,20506,11751,11726,17530,-12110,-28442,-16875,2135,5440,1012,-26658,-23337,27459,-14915,26341,29189,15441,20761,31379,19152,-9604,11021,12881,-10649,30077,23631,12089,24560,5100,22796,-22436,6964,19911,-1050,17594,-10090,32624,-2710,7834,1477,-20222,-7551,-5307,26546,-30914,-21895,24004,-3487,26960,-26919,902,-22034,3075,-22574,-23781,13988,-798,32723,-4085,19009,1070,18840,-25104,-19578,2244,-15094,474,-30355,-32413,-22522,2751,-17542,-27383,12403,2077,-3459,-7964,27620,8866,17355,-11925,13002,30590,-31122,-4792,16622,-15329,-4351,-28575,16063,-2877,-17832,-32533,-15701,31421,29448,-31292,-5680,7193,29010,-23607,-19970,14283,-3352,-17400,9131,11683,-26330,-24744,-7000,16157,-18886,-25492,-28467,13788,25696,10152,-32074,26134,20159,4748,12887,17142,-22080,5671,31666,691,-15909,30984,17192,16148,30659,-13033,17374,-20665,-12717,21149,-20939,-17788,14683,-6431,-11145,22910,21260,-15722,-13818,7130,8971,7888,22285,13341,28800,-925,16742,-11856,10338,14950,798,-29496,28885,26570,5974,4481,12696,4240,-8438,-26666,15041,-18807,-29464,-30027,373,-10860,15449,-14484,-21621,-18710,31775,-11059,-29298,14125,-31646,-12669,24456,12048,-29580,-24641,8059,-32098,-5267,6110,-31406,-17562,24317,-744,23083,27417,-3948,4963,-11851,-23448,22504,-17713,-32007,24651,-21360,15226,-4502,30630,-22813,19381,22138,31415,-9754,4464,11456,31225,-26080,23961,8447,-8338,-26509,26831,-16604,6206,-3099,-20434,22460,4710,19693,-26,-21107,25140,2763,15944,2748,-25806,-21630,28983,10183,27773,-14485,2007,-11277,3580,-20583,4069,-29884,19085,-2851,-4723,5145,-7707,3890,-29111,14257,778,-4663,28964,-27260,15527,-18788,18389,3403,-25057,2805,6705,-20918,-16090,2918,-19809,19895,-20596,24369,28414,26070,28432,7059,-4129,13974,8024,24851,-8952,-30699,-14820,-17190,26819,9232,-15475,18838,-8145,-31702,28730,-29293,-16204,19643,18318,-32692,21520,29612,-21332,28407,-11503,13928,-31694,-29039,-22756,-31519,22909,-5826,-20113,21011,22387,-28203,9971,-25992,-32612,28621,3643,-7181,6451,18091,-24697,17321,24549,183,-12694,-4429,-5263,6681,-14802,-13723,26320,6768,-19665,-24151,-12000,-28232,15060,28426,-14874,25309,-23912,-21593,13351,17154,-15005,-32095,27267,23505,24063,26457,-10661,-6897,15526,2182,-11171,-26994,-14360,30141,31121,-23889,19702,465,-3489,-24309,-32743,24548,5887,-27757,-4556,-19684,4099,12023,-12295,-22808,-23280,26109,-25056,4784,-27528,-3260,-6523,-3396,-15333,19390,14607,31623,-17547,-25783,22577,20143,13774,-24453,4919,11478,22995,5632,-31786,3830,21856,-25622,-9699,21734,5276,5967,1057,-24746,-31413,4054,-26341,-9741,-2616,-9395,17413,-17663,24767,25815,-22047,-25469,-19910,8548,22831,11713,1921,16491,-29794,18243,-2398,7466,9042,-23296,-4882,908,-26442,19055,18312,32108,26109,-13780,10952,32181,17393,-9006,-17064,12351,-28925,3311,16005,3073,-8072,9122,-16792,977,30429,32261,-28932,25988,26504,1608,-8758,-9532,-5583,5712,10045,-29053,-14821,19619,-17925,3770,-21606,19950,-27827,-27,-31435,3065,2913,23790,-11090,10078,31770,-32139,24220,14689,-19486,23338,-25646,31993,5266,24857,-31224,-12978,2648,-9411,17151,5352,-6942,12330,15831,23940,-26442,-23481,-2026,3696,26057,18537,-3049,20025,-31488,-4799,-7320,552,20499,11544,-18395,-24885,18345,20006,13598,19272,32461,-20891,-18199,24638,3285,-28101,11629,10574,-2571,18677,-7196,-27832,11819,16920,24804,-20350,-25463,-4719,-28000,-8847,-14333,-22685,7879,26712,-5495,9200,22328,3470,-30580,31203,25276,-1704,13660,-31692,13720,-5571,-9942,-17013,4695,15588,-15260,9640,11600,-2273,-23913,15415,-630,-12053,-24455,20721,-15941,-18984,16527,14847,-20465,17672,-19344,-17764,-27480,-10137,-31472,-4269,20031,-17706,6286,19559,-4731,23264,4707,-21086,23922,-3393,19195,-12674,28628,-30009,-1809,14048,-11045,32672,8458,-28938,-21647,28942,-5188,6605,3898,-12030,-18067,-29242,9792,19849,25593,-2561,25623,-31011,14365,10475,12468,20753,24282,-4748,-22287,24347,27631,28752,-12503,31543,28043,-31549,-16187,-12481,-21639,-20853,-17183,-27798,-11418,-32595,10943,10898,4426,6488,25410,-5240,-12536,-10364,28994,-8666,12447,3153,-20869,-22205,-2158,-31791,29247,4013,-20617,2343,22553,32138,10265,-13425,31585,-10558,29290,-15229,-75,30731,-16950,21177,-26547,20386,21715,18293,-4039,-64,7758,-16693,-15700,5241,-17828,14318,-17842,15356,-25231,-17656,14098,11169,13225,18331,30089,7351,-15530,22609,6224,15447,-10326,-32673,14374,24009,2988,11788,-9372,-24000,2018,4907,-14502,-742,26992,25701,10538,-3460,3350,447,-28860,-22460,12575,27026,-6741,-17283,29648,30150,-14791,1407,25035,-8810,15287,-27656,17201,7389,-7205,20475,-13847,29671,-24002,14319,-5248,5494,-26672,-22390,10699,-23154,21956,27271,28868,-32281,20729,-25553,11955,24170,26216,16876,-8967,-22532,-6572,9381,29940,25413,-32625,-19033,-32420,-5180,28457,-26390,-3999,-30115,26673,-16607,1519,30264,19830,-634,15074,17218,28829,-3086,12602,30096,30216,12638,2226,335,-22951,11158,-21750,10813,19614,28198,-15103,-9081,31588,-2711,-15963,9081,-6322,1727,3512,-30878,-21116,31432,-5588,4483,-2257,-9415,-18606,-2591,-1507,-8472,-31593,19308,17805,21248,-10211,12829,-16366,10749,-13092,10009,-6219,2832,5830,12852,11436,12568,11925,8650,28563,25160,24661,-18426,31369,13188,-32301,-20596,-25813,30204,-19558,7694,6774,20862,16418,29588,19396,-3270,-6776,-28774,-26239,-4658,4184,3408,-24862,-19466,8823,27796,-10903,22958,-7583,-2189,31077,31358,17187,21029,19450,27495,-26853,-10985,-15255,-7621,-14474,-26114,-6281,-4248,32139,10767,13296,-19308,7904,-4802,7091,-356,-15967,31554,32027,13879,219,5441,7744,14845,-1268,28957,32545,3756,-13899,10044,21421,16762,-1858,-2342,20855,-30049,-24156,8529,-1267,-5210,-17126,17552,-2112,-20297,31564,-9674,6272,-9209,-29415,-31494,-17782,26164,18862,1185,-27329,2709,10897,-24078,18395,31868,-6327,19578,25771,25470,-22161,-11192,-28408,-26505,27313,24527,30030,24813,22355,13455,3732,-16225,-31384,17628,-20187,-14826,20686,-4912,-4329,18961,8044,-9725,19484,28241,-18369,-30146,-8453,-10905,16724,-7200,-9241,-21890,15919,-13089,-22302,7623,30490,-9844,20593,-30329,14899,9325,-9844,-16124,29185,19757,30449,4663,-10483,-32547,-25379,-6758,8409,23771,12819,31078,-7541,-11811,-17127,-9374,-1465,-31200,-29361,-5763,-15339,-20459,-7137,14165,-30532,-16231,-13215,-9261,-20608,30259,-2369,27904,24995,-29251,-7926,31570,-6538,21601,12101,3201,-16981,-31655,-25565,19096,-6725,7193,26365,-5981,-14537,20999,25052,-28507,29538,17711,28539,-10453,2409,10130,-7598,7050,-15661,14220,11166,-30650,14145,7473,9032,-10676,-26030,24117,-9436,-4445,25566,12035,-11742,-21884,9127,6456,-18258,-29387,7644,-3761,7900,18381,32464,-20633,-30872,-1187,20378,-10477,22681,20683,-27115,-13449,15859,6140,-17823,-12812,12663,-22103,20889,-1898,27813,-11381,32290,10481,21570,21214,-11387,28654,22473,-6986,-29805,-14090,11315,28378,4873,3876,-16328,11969,15225,20101,-16748,-21050,-28633,30914,30962,18320,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad16_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/pad/pad16_input0_int16.csv new file mode 100644 index 0000000..696760a --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/pad/pad16_input0_int16.csv @@ -0,0 +1 @@ +-10060,302,24299,-31273,-13139,7751,7954,-9429,-8170,12718,4968,28358,-3442,27352,-20946,-90,20057,-1988,-22619,-15044,-17193,-6503,-20859,18220,-7479,8786,-3747,-27256,-9720,-21402,-7472,27005,-27571,25405,8859,-31372,30137,-23996,-12791,-28391,-30826,8368,-21652,-6797,25744,32269,-23959,21159,-24948,-24223,-221,3250,-11424,-9805,-20548,11852,-16554,-5935,-10057,5265,30156,27902,-7990,-20995,26658,-29313,-28658,-27209,19010,32545,-2195,6445,-2331,-12932,27940,-20249,-997,-16525,20065,-26653,30391,-16402,11480,13984,13681,-23082,18828,5296,13293,-26814,27987,27626,3385,-14141,2881,13248,-27520,-6925,-32036,-17439,-15231,-23607,-31692,-25765,16194,-865,-25195,-31412,-26034,23236,25114,-9948,26588,-22688,-2110,11339,-31545,-20098,30503,-2725,3619,-8438,-5569,-22110,23403,-15137,-28360,-13615,254,12,14629,-7080,16983,19701,-8216,15034,901,14662,-14497,-18071,-15189,-2881,13367,22776,1550,-18029,-11009,20067,-15265,-26254,-10318,8032,-7926,13250,-20307,18066,-14435,-15029,6002,-10250,-9126,30315,-14446,-3192,5862,-5583,29377,-2011,30497,-20775,-18585,-28923,12762,-26187,12329,-13512,25593,26033,18513,30281,-25612,23487,5281,31023,-10200,-6931,4110,-27568,27813,-2783,14160,11282,-564,20611,4410,20709,-14213,-23315,24532,-11266,-29441,5131,24439,30837,-1806,-5984,6942,-16166,-26732,21704,27833,-12,30154,-28275,21218,28377,-5839,-19189,-3532,-10489,-9997,-23547,-18616,-14293,1996,12039,18102,6543,-19371,19741,-28506,20629,4259,-12716,22607,-27708,2419,27246,12416,-4931,15013,-23378,-2832,-16164,-14722,-31642,-31408,23847,-11853,-27816,-16676,-227,-901,-6571,-24692,-14274,-22606,14985,-16368,-13592,18678,12918,14410,26651,10912,-31237,-19989,-10985,-17315,31324,16587,-8897,-25406,-26684,22090,547,-19258,-15733,8292,-25947,27907,26460,25810,-7385,21561,30880,22619,-16811,-12105,-20212,-12851,4444,4100,9846,-7172,-16002,-9761,-1114,3684,-7926,-15726,18142,7995,19306,-7839,-7590,28788,-6497,16665,-9172,-24809,-666,-12554,-29002,16300,-25664,-7776,-13208,-19919,-32289,14512,21813,12116,5128,32417,-5564,12333,-15816,-27268,-20389,-6985,17707,-26690,25185,21501,-10001,-5747,16769,7225,-1914,-11553,-10395,-10294,-32187,-11124,27658,-26379,30136,-713,2197,17879,19561,-1671,3457,14453,-10928,-31117,-14881,-12553,28776,-13498,29216,5761,3440,13134,-10420,9272,9663,-32412,-25589,1452,10023,18572,-4337,20749,-30047,-6744,11542,-18283,-28950,-26541,-10036,26263,2816,23187,4920,24901,14435,22247,-22536,19922,32009,18343,30514,24821,-2358,5935,15335,3551,29710,27216,15374,9916,-11316,11429,1249,-27010,3392,15649,-26475,974,-18840,-8289,6924,4092,-30802,27271,13006,108,11321,23868,10019,-8254,12429,22144,29045,-2882,5692,2999,12023,12246,-27687,-17876,5514,9056,-29341,4828,23987,29882,-15303,1059,-24964,23865,-11600,-30822,-617,-25029,-7615,-1174,5656,-25539,31985,20639,-29009,-108,-14986,-23242,4512,-13368,5973,-139,8381,23029,-13267,25527,5674,-26906,-7954,-30009,-28155,1116,20203,1552,12274,24563,9948,15054,5323,-2938,-10645,-8542,25669,27321,25884,4205,-30842,-27189,7045,-20052,-20917,-11370,29414,-11847,25677,-2744,-26366,4390,19120,8856,-32395,-3962,21595,-18466,-20731,2229,27803,21903,8898,25103,14149,1124,21701,-26976,20814,21416,8791,30121,31669,12754,11797,-19831,11305,3242,-30602,-461,-6582,18075,-13479,31698,-15210,11578,13999,-24167,-32167,24000,24574,-27670,-4038,29905,16871,28348,-1441,21253,-17896,17731,-6460,-6119,-24793,-28847,31588,-22585,-12249,30997,27098,5308,-10584,18675,-7814,-9127,-3685,-3741,-21122,-17278,-15521,31657,-11738,-24113,-2543,25450,-22972,-30992,13536,-29210,-17151,-1336,-13413,22642,17271,16225,-26885,-2916,-25112,-26572,18994,13287,24220,2944,31317,-15717,-15707,12340,1912,-31635,17618,-11308,19954,-21326,31052,28804,2321,-4756,26777,19422,-8117,9263,-21629,19767,16102,18733,22709,10246,-11165,-3966,-1943,-8601,18823,-4345,77,-10494,-32500,-4912,17479,8778,20034,-4341,-24400,13485,2699,19251,1248,17873,12746,-9694,-9937,11113,14149,-10538,21652,23753,3040,-3187,-19462,-32633,17295,27364,27858,27325,1151,-18669,2734,31469,29561,17275,-3132,32362,-13666,9281,-32152,14483,-3595,21680,27165,27478,10324,-10575,32733,32387,-28414,18533,-8598,8159,-31983,-11778,-26310,-7667,24556,18540,4981,-27362,-12341,22760,30992,-27060,7871,9001,-12454,24742,-7094,16770,-84,32347,25527,17695,8389,19697,5531,-31235,7911,-21429,-29984,-5475,21166,-939,9027,-649,-31206,-1887,5419,-2229,-24043,18943,29680,-4627,-13494,7469,4388,-31033,-32133,10264,31508,20667,31641,4744,18815,22357,6902,30209,12858,-30453,2206,-10337,13301,-16931,8553,16953,-2557,24894,-11113,-26666,22152,1612,18470,30103,-7153,4800,533,-5791,-22930,9901,-2467,4895,-5516,31580,2375,-2426,9847,4331,30033,-1779,10032,7430,12422,17229,10089,24325,648,-22760,-8572,-4871,16032,-20478,-15480,25828,32755,-13813,7433,-5674,-26564,1861,-12524,3383,-27230,-12656,-2556,-766,-31226,6937,4652,-12175,-2772,-21712,-1463,-25200,20373,14199,14805,-31418,29463,28646,13389,-27762,-24876,4614,-31013,-9859,1432,-22405,-31651,-6813,2830,23622,4096,-30276,29083,-20217,25956,32190,5107,-11070,13981,26914,-25459,-9963,-22593,-1341,-27297,-11264,19935,-2411,-29780,8801,30082,6732,32292,29786,-17368,26541,19094,-14947,13037,-2114,-28722,29927,-25551,17086,-11838,-25852,-28226,-8732,9132,-20289,16337,-26452,17997,24461,25055,336,3691,-30349,-15204,18244,-30843,-9194,-10018,-12449,-6774,9336,12453,28607,14025,24116,-7967,-32463,-22944,-3717,29043,-3011,-1396,27978,22566,17104,-30524,-25453,25892,-12311,23297,1542,-17064,-7792,23964,-22520,32244,-19044,-30247,-26353,29505,17845,6140,13581,10699,7925,31867,10001,18694,10056,17118,5984,15854,-18139,13773,-21697,16463,-5390,19390,24697,1850,-7612,865,8093,-23776,18548,14391,-11582,-1246,-11115,5749,1443,-17046,18901,-12878,-6672,1133,26631,21729,32301,1257,-2330,-26851,14946,-19605,4647,23196,-25125,-16051,-13001,15441,-5795,-18321,-30217,-31467,17365,16854,-6770,-12040,-25861,20114,-24460,-11845,1157,32471,29212,-13040,19619,10513,32304,21241,-21140,15961,23286,-30536,30912,23669,-26428,-293,-7809,-12366,32005,-23784,-14793,17460,-13995,24805,-29906,20188,27552,2316,-16643,-11247,-27984,-32435,30274,-29551,-31795,25539,-17433,-31000,-21559,-30921,18343,29505,12514,-26794,-10678,-26549,-23856,20675,-6874,32700,19145,-18780,15751,-24872,-28711,-13402,-10284,289,30228,-281,-29377,-14105,-32484,-3641,-28008,-24517,-26377,-23032,21734,19263,5708,-15936,8515,10701,-6351,-13335,-14746,-11124,-13997,-2362,13284,-23682,12193,15155,21541,-9169,-6979,-16250,6482,3811,-4301,8559,-21652,7269,8647,10682,-22625,-4739,16781,30043,28966,-5825,-16599,-32262,-20983,19026,17875,18786,1239,12510,3763,-11710,-4684,22609,25754,25030,29105,123,17194,-10228,14134,-12016,3917,-23852,22069,-23373,13443,31102,27717,22290,2662,-13728,-32190,-2422,-18444,25575,-16420,-17784,-23090,31362,-8502,28589,15214,-13726,26387,28981,29011,-29233,-19434,-14279,11714,-12872,-9714,-13286,22548,29732,1221,-25260,-19220,13499,-5208,11673,-10158,-32113,-7460,18846,-25959,-15156,-19200,-7703,32658,-8276,31890,-15019,14564,-2641,-17348,10830,20072,-5331,-17418,-16274,-27993,-16176,24222,25479,9849,28573,5819,-17542,24011,16117,13199,-9316,-29233,2021,-7553,-12753,-20459,28092,24962,-1202,27312,-20818,-16036,21054,-8806,292,-12536,13847,-3307,5687,-20416,-10189,-19201,-13659,15479,-21094,-2251,-8498,-28367,12368,15160,8398,-25492,-2560,-25884,-23811,-11263,24699,-19635,22587,-19788,6777,31680,-32249,-6884,-10502,-25523,-23259,7271,-9341,-23826,733,2229,13967,-24955,5555,-10391,4288,14733,25276,6729,9593,5035,1505,-26167,-26211,-32398,-545,-5371,-9877,10493,-8764,-13992,12006,-11821,15470,-12455,-519,14696,22526,32268,16739,-26950,32407,-9583,-17347,-29188,30270,7901,-32228,2167,-29797,27498,-21022,-2025,-28371,18075,-3998,27165,12390,21879,-11861,-9654,8526,-18394,23948,24337,-19914,-9027,-19734,-11690,18535,-24302,-5019,-22172,7801,-26114,-8620,13947,21806,-24105,30410,11283,-8317,2309,12082,23779,1658,22521,-1357,-13157,-20932,-8699,-3140,-32203,-22707,18968,-11774,12451,-20854,-9026,-31746,-18005,31377,24565,-10421,24324,3235,-30872,18562,-28658,4809,-13133,6931,-30315,26232,-6083,27372,-29659,-13034,31236,-27164,-26580,7424,13019,28106,4854,26385,-18892,31056,-17151,635,17952,25886,-3869,-8818,-32250,7204,-12258,-31225,-26321,13004,590,-25228,8331,-10032,30066,6614,-24969,-2227,-4599,13859,-15087,13765,1446,-2297,-15581,28132,12457,12828,22996,-26987,-3147,-26239,16201,-11146,-21143,3687,26600,-26539,25732,-12550,17605,5723,-18453,17371,4271,-23250,-28677,-1095,6558,11441,-4273,-7311,12746,-5580,-23281,-10382,-28980,-21792,18147,5350,-16802,-14063,-3023,325,-23686,19997,440,32685,-2761,-16471,-3985,-19821,8440,-26907,25658,-3782,21989,25545,29377,41,-6549,-31522,-11662,20595,13835,-25235,12697,28895,-29711,20574,-4697,-15409,-21607,17707,8736,2421,-6708,25958,-5976,2948,29923,-14880,15312,-21098,23433,15066,14299,8951,-17024,-20438,7431,17895,24067,-17564,3331,28645,-18708,3046,14590,1307,-26713,692,-32004,15899,-20809,-23050,-4178,8765,18437,19092,32258,-23674,8372,13635,820,14879,6897,29205,20506,11751,11726,17530,-12110,-28442,-16875,2135,5440,1012,-26658,-23337,27459,-14915,26341,29189,15441,20761,31379,19152,-9604,11021,12881,-10649,30077,23631,12089,24560,5100,22796,-22436,6964,19911,-1050,17594,-10090,32624,-2710,7834,1477,-20222,-7551,-5307,26546,-30914,-21895,24004,-3487,26960,-26919,902,-22034,3075,-22574,-23781,13988,-798,32723,-4085,19009,1070,18840,-25104,-19578,2244,-15094,474,-30355,-32413,-22522,2751,-17542,-27383,12403,2077,-3459,-7964,27620,8866,17355,-11925,13002,30590,-31122,-4792,16622,-15329,-4351,-28575,16063,-2877,-17832,-32533,-15701,31421,29448,-31292,-5680,7193,29010,-23607,-19970,14283,-3352,-17400,9131,11683,-26330,-24744,-7000,16157,-18886,-25492,-28467,13788,25696,10152,-32074,26134,20159,4748,12887,17142,-22080,5671,31666,691,-15909,30984,17192,16148,30659,-13033,17374,-20665,-12717,21149,-20939,-17788,14683,-6431,-11145,22910,21260,-15722,-13818,7130,8971,7888,22285,13341,28800,-925,16742,-11856,10338,14950,798,-29496,28885,26570,5974,4481,12696,4240,-8438,-26666,15041,-18807,-29464,-30027,373,-10860,15449,-14484,-21621,-18710,31775,-11059,-29298,14125,-31646,-12669,24456,12048,-29580,-24641,8059,-32098,-5267,6110,-31406,-17562,24317,-744,23083,27417,-3948,4963,-11851,-23448,22504,-17713,-32007,24651,-21360,15226,-4502,30630,-22813,19381,22138,31415,-9754,4464,11456,31225,-26080,23961,8447,-8338,-26509,26831,-16604,6206,-3099,-20434,22460,4710,19693,-26,-21107,25140,2763,15944,2748,-25806,-21630,28983,10183,27773,-14485,2007,-11277,3580,-20583,4069,-29884,19085,-2851,-4723,5145,-7707,3890,-29111,14257,778,-4663,28964,-27260,15527,-18788,18389,3403,-25057,2805,6705,-20918,-16090,2918,-19809,19895,-20596,24369,28414,26070,28432,7059,-4129,13974,8024,24851,-8952,-30699,-14820,-17190,26819,9232,-15475,18838,-8145,-31702,28730,-29293,-16204,19643,18318,-32692,21520,29612,-21332,28407,-11503,13928,-31694,-29039,-22756,-31519,22909,-5826,-20113,21011,22387,-28203,9971,-25992,-32612,28621,3643,-7181,6451,18091,-24697,17321,24549,183,-12694,-4429,-5263,6681,-14802,-13723,26320,6768,-19665,-24151,-12000,-28232,15060,28426,-14874,25309,-23912,-21593,13351,17154,-15005,-32095,27267,23505,24063,26457,-10661,-6897,15526,2182,-11171,-26994,-14360,30141,31121,-23889,19702,465,-3489,-24309,-32743,24548,5887,-27757,-4556,-19684,4099,12023,-12295,-22808,-23280,26109,-25056,4784,-27528,-3260,-6523,-3396,-15333,19390,14607,31623,-17547,-25783,22577,20143,13774,-24453,4919,11478,22995,5632,-31786,3830,21856,-25622,-9699,21734,5276,5967,1057,-24746,-31413,4054,-26341,-9741,-2616,-9395,17413,-17663,24767,25815,-22047,-25469,-19910,8548,22831,11713,1921,16491,-29794,18243,-2398,7466,9042,-23296,-4882,908,-26442,19055,18312,32108,26109,-13780,10952,32181,17393,-9006,-17064,12351,-28925,3311,16005,3073,-8072,9122,-16792,977,30429,32261,-28932,25988,26504,1608,-8758,-9532,-5583,5712,10045,-29053,-14821,19619,-17925,3770,-21606,19950,-27827,-27,-31435,3065,2913,23790,-11090,10078,31770,-32139,24220,14689,-19486,23338,-25646,31993,5266,24857,-31224,-12978,2648,-9411,17151,5352,-6942,12330,15831,23940,-26442,-23481,-2026,3696,26057,18537,-3049,20025,-31488,-4799,-7320,552,20499,11544,-18395,-24885,18345,20006,13598,19272,32461,-20891,-18199,24638,3285,-28101,11629,10574,-2571,18677,-7196,-27832,11819,16920,24804,-20350,-25463,-4719,-28000,-8847,-14333,-22685,7879,26712,-5495,9200,22328,3470,-30580,31203,25276,-1704,13660,-31692,13720,-5571,-9942,-17013,4695,15588,-15260,9640,11600,-2273,-23913,15415,-630,-12053,-24455,20721,-15941,-18984,16527,14847,-20465,17672,-19344,-17764,-27480,-10137,-31472,-4269,20031,-17706,6286,19559,-4731,23264,4707,-21086,23922,-3393,19195,-12674,28628,-30009,-1809,14048,-11045,32672,8458,-28938,-21647,28942,-5188,6605,3898,-12030,-18067,-29242,9792,19849,25593,-2561,25623,-31011,14365,10475,12468,20753,24282,-4748,-22287,24347,27631,28752,-12503,31543,28043,-31549,-16187,-12481,-21639,-20853,-17183,-27798,-11418,-32595,10943,10898,4426,6488,25410,-5240,-12536,-10364,28994,-8666,12447,3153,-20869,-22205,-2158,-31791,29247,4013,-20617,2343,22553,32138,10265,-13425,31585,-10558,29290,-15229,-75,30731,-16950,21177,-26547,20386,21715,18293,-4039,-64,7758,-16693,-15700,5241,-17828,14318,-17842,15356,-25231,-17656,14098,11169,13225,18331,30089,7351,-15530,22609,6224,15447,-10326,-32673,14374,24009,2988,11788,-9372,-24000,2018,4907,-14502,-742,26992,25701,10538,-3460,3350,447,-28860,-22460,12575,27026,-6741,-17283,29648,30150,-14791,1407,25035,-8810,15287,-27656,17201,7389,-7205,20475,-13847,29671,-24002,14319,-5248,5494,-26672,-22390,10699,-23154,21956,27271,28868,-32281,20729,-25553,11955,24170,26216,16876,-8967,-22532,-6572,9381,29940,25413,-32625,-19033,-32420,-5180,28457,-26390,-3999,-30115,26673,-16607,1519,30264,19830,-634,15074,17218,28829,-3086,12602,30096,30216,12638,2226,335,-22951,11158,-21750,10813,19614,28198,-15103,-9081,31588,-2711,-15963,9081,-6322,1727,3512,-30878,-21116,31432,-5588,4483,-2257,-9415,-18606,-2591,-1507,-8472,-31593,19308,17805,21248,-10211,12829,-16366,10749,-13092,10009,-6219,2832,5830,12852,11436,12568,11925,8650,28563,25160,24661,-18426,31369,13188,-32301,-20596,-25813,30204,-19558,7694,6774,20862,16418,29588,19396,-3270,-6776,-28774,-26239,-4658,4184,3408,-24862,-19466,8823,27796,-10903,22958,-7583,-2189,31077,31358,17187,21029,19450,27495,-26853,-10985,-15255,-7621,-14474,-26114,-6281,-4248,32139,10767,13296,-19308,7904,-4802,7091,-356,-15967,31554,32027,13879,219,5441,7744,14845,-1268,28957,32545,3756,-13899,10044,21421,16762,-1858,-2342,20855,-30049,-24156,8529,-1267,-5210,-17126,17552,-2112,-20297,31564,-9674,6272,-9209,-29415,-31494,-17782,26164,18862,1185,-27329,2709,10897,-24078,18395,31868,-6327,19578,25771,25470,-22161,-11192,-28408,-26505,27313,24527,30030,24813,22355,13455,3732,-16225,-31384,17628,-20187,-14826,20686,-4912,-4329,18961,8044,-9725,19484,28241,-18369,-30146,-8453,-10905,16724,-7200,-9241,-21890,15919,-13089,-22302,7623,30490,-9844,20593,-30329,14899,9325,-9844,-16124,29185,19757,30449,4663,-10483,-32547,-25379,-6758,8409,23771,12819,31078,-7541,-11811,-17127,-9374,-1465,-31200,-29361,-5763,-15339,-20459,-7137,14165,-30532,-16231,-13215,-9261,-20608,30259,-2369,27904,24995,-29251,-7926,31570,-6538,21601,12101,3201,-16981,-31655,-25565,19096,-6725,7193,26365,-5981,-14537,20999,25052,-28507,29538,17711,28539,-10453,2409,10130,-7598,7050,-15661,14220,11166,-30650,14145,7473,9032,-10676,-26030,24117,-9436,-4445,25566,12035,-11742,-21884,9127,6456,-18258,-29387,7644,-3761,7900,18381,32464,-20633,-30872,-1187,20378,-10477,22681,20683,-27115,-13449,15859,6140,-17823,-12812,12663,-22103,20889,-1898,27813,-11381,32290,10481,21570,21214,-11387,28654,22473,-6986,-29805,-14090,11315,28378,4873,3876,-16328,11969,15225,20101,-16748,-21050,-28633,30914,30962,18320 diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad17.tflite b/tensorflow/lite/micro/integration_tests/seanet/pad/pad17.tflite new file mode 100644 index 0000000..c4c2cd8 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/pad/pad17.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad17_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/pad/pad17_golden_int16.csv new file mode 100644 index 0000000..9be6dd9 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/pad/pad17_golden_int16.csv @@ -0,0 +1 @@ +0,0,0,0,0,0,0,0,22605,18505,26065,26664,-17759,-29975,9323,-8181,21591,-28735,1792,-19745,-9950,-7842,-791,11377,14715,8593,31314,32069,23053,-7759,-16567,31163,30264,21821,-21573,15572,18997,-22736,-14493,3507,-6166,9173,12476,22292,31855,27288,31204,-26635,26909,21301,16862,13953,3286,6679,15367,11335,-30773,21406,13432,-27601,2007,30149,2936,-12462,-14768,21715,4214,-7048,-2205,-21389,-31264,-20057,-93,-32362,13908,-1792,-8810,-995,23469,24255,32351,-30890,-27055,19558,-27775,-23037,4974,-1870,-31033,-4678,-11345,-28824,-17060,11759,8991,16043,5824,11146,-32733,17409,-16,28559,23327,-31067,-1298,6852,-29776,-15713,7394,-24637,-2836,-15635,26083,-7192,-4177,16180,3294,926,13247,10755,-29712,-15065,7177,-14756,-14853,-30832,5657,15681,1441,31447,-4229,17310,26405,-5664,-22600,-19111,8540,-10016,11929,30412,2541,17245,8329,-9274,-14245,-14626,-4929,-17874,13516,9603,-25340,-32303,-25771,19782,-27716,11263,27810,-27253,-13709,26494,9774,27745,11427,227,26488,-15184,-30765,-19813,-625,17742,24354,-23913,-5584,-28772,31740,27622,20510,-6562,-10702,-1192,-23639,28651,6626,25339,-11609,-28621,-10331,83,2361,19166,-12545,-20004,-10924,-28636,20238,26046,-22873,1949,-21849,-27489,16567,-17948,21172,21284,23972,-13246,3988,3617,-24429,-22814,10736,12027,-4844,-16579,20522,-30352,19546,-9370,-13579,-8147,7140,-23713,-13492,21115,4553,-5526,-25516,18982,12721,-24995,14827,6036,-12447,28342,11475,-14884,-16633,23205,-19365,23312,15501,-8196,13677,4590,-30441,-28021,18470,-32237,-31765,24971,22223,7252,5123,10172,29826,28587,-19927,13743,-32152,-31904,21458,13790,-32487,-8895,-18446,-20865,-9515,26934,25734,29955,-22440,2123,-24850,24524,-16994,-272,13517,7424,-4703,-11475,-21014,-9106,3399,28460,-32011,-30452,28550,8608,-14671,-15951,-11326,-13829,11125,-30170,-18070,857,-31592,7403,-29625,15062,-29552,-16706,5597,-15572,-2202,31329,29192,-12867,27145,23581,-23471,30609,7517,27273,-12637,17264,10437,17527,26533,9345,5342,-32171,-18082,-9128,-358,-23344,4324,25685,-25603,-17751,7554,-26282,22846,-24651,8826,-10839,1328,-12559,19557,25923,-10934,1826,27964,-13343,18824,14367,30771,-3187,28524,-26735,-12988,-32681,7526,-24122,-30148,14741,-25965,7584,-21309,24762,-22024,-5002,21230,27061,25585,12872,-3098,-29166,-17286,14864,-5629,27223,-26887,-22721,30057,26550,-31080,-6021,-3880,21053,8544,-17323,22089,-32700,-8927,-28838,-27062,-10621,-2845,-19807,-27487,-3024,-6633,10293,29657,12266,-25143,-11185,-7068,20369,5553,9153,11909,-28641,-30894,22602,-31648,-15987,4814,-13443,-2007,14322,8026,27209,10693,17193,12718,24878,8623,23800,24814,12327,-2790,-10038,27077,12667,-9987,-10894,-4842,-21160,6794,10674,7469,-4554,-7972,26554,-16733,-24438,11416,-11698,23285,-21361,22663,5394,-1454,24757,15219,-17462,14844,6644,-11417,-16000,28107,28382,-6902,-30614,-32318,-20856,-24371,30560,3858,-23170,17806,-29918,-26018,-319,-11350,-9235,18974,-1018,23267,29163,14721,30576,-4812,9234,13164,-22082,-16535,1608,23985,-9630,25412,-17059,-8275,12643,-4880,-20573,31530,-13029,15729,-26947,-6224,-25901,-11466,-29150,-29291,593,14606,4560,-28469,-1059,31147,2853,-31447,19469,1136,-7271,-16952,17488,4528,2731,24508,-25579,-18014,23654,63,-5009,23708,24281,24884,-20814,-3249,24273,-28699,12117,-26053,-31827,13304,-24524,-29980,11055,25137,18900,19947,31190,28841,21749,26379,14566,16947,319,26725,2941,16454,-31239,-7760,-24357,23742,-19926,23472,13804,-16964,4522,-973,21942,5557,17989,27740,-6766,-28985,9385,8698,-28451,-15175,-20656,-28204,26114,-2745,-11976,23327,14504,22921,-13441,18389,30544,-2811,20945,-19482,30454,-22812,-13269,-26890,8573,17255,-7953,17872,32557,-8407,-3230,21453,-20480,11448,-6489,-26487,2182,-25479,-28247,-15570,19331,25055,-6177,23778,-4585,-30255,-11354,-1152,12814,-13837,-11939,8597,-9364,18310,-25671,-19276,5850,32055,-776,-18619,-10116,-17087,21649,-2091,-29300,24581,3786,-31369,-13618,23186,-18451,5589,-9273,21160,-18458,31728,-3966,-7378,-21438,-17612,7214,29186,15126,-7371,28485,-18546,-21073,-13625,-27294,-26277,-7356,-28932,-10462,-4903,11786,30650,13497,-14007,23777,16123,-5955,-5742,2057,4178,2372,-24526,14608,-13162,6814,-27901,-4614,4320,25037,-9282,28312,25377,-21561,4650,-31478,-8100,-4215,20789,4859,-15037,519,-27422,3834,24152,26414,23745,6174,30667,4940,23108,7317,10333,2350,-23391,7001,-31984,-13442,-6097,27815,2560,-8241,-11923,19501,-24244,7685,-3949,-18750,2522,14066,-28775,-22407,-15080,-6194,30080,30987,20422,-14438,-13291,1138,-15748,866,-29109,19612,-21276,-4318,5361,-13838,-15296,-22338,-20174,-16347,-2462,7907,24296,8839,4545,-31248,-10391,-27584,-10498,12698,21276,7225,11686,18338,-14972,-26098,-6596,-5162,-2885,-17344,2122,-30049,17558,-26009,7588,3572,-22206,-20155,25822,1913,5995,-19651,511,13866,23530,32022,-14153,18163,24086,28298,-12855,16161,18835,-27134,660,-8901,3316,18168,26573,-24546,-7852,-12775,-13255,-9956,-751,15957,2093,-13255,-10519,-13983,11464,-12748,24157,21799,-20754,9328,12388,23622,5341,2150,30031,-29906,15831,-280,-6642,-21551,-32201,1643,23066,-11508,21507,-25489,8185,-11958,-2248,-6663,6467,-28438,-25807,-31536,27235,19642,-4896,-10653,4411,26732,3800,7811,661,-6547,-26982,16084,-21111,8705,-19904,9914,16340,-18163,-14875,-8190,-21013,17781,-15292,23770,-7331,-14736,-16993,-26038,25148,7258,-28329,-32223,29118,-16614,1739,14463,23684,21248,-13252,-3737,8529,22878,-17028,18720,21728,26646,16350,-17127,25119,26230,5600,22622,-13284,-16069,-1666,28142,4996,4643,-7106,4816,-19577,-30514,11344,-31546,10795,5870,-22068,13710,17549,-17411,-1320,-26545,-14451,21234,-7367,-31676,24336,8838,-8025,-17684,-19128,26138,-11891,26350,9249,-6337,-26212,-22626,26294,-27840,20150,-20041,-31155,-28626,6837,-4478,29365,-7839,4455,32,-6641,-7086,32009,17207,26172,25814,-30970,25486,6584,-21609,-20088,23111,18437,-17119,1698,21332,2794,7106,-6708,12992,9586,23019,-1569,-26690,19135,1221,-2911,-25022,-27469,-7570,-5865,-28400,-23694,-15743,144,11455,20448,12694,24943,19312,-2798,-5475,11847,30184,-5561,30476,-890,-14657,-2071,-8758,10825,13522,24609,-23142,6577,18926,-31560,-10591,-26221,-11241,-5972,28381,-19273,25227,-21551,-21326,-26771,10225,-14,-3212,-5990,16433,29051,-15600,32630,-11291,-3313,7616,-28490,-19148,21812,-17214,18928,-2791,-17933,-31055,-2200,1867,17375,9833,-2520,-8348,-18753,-29554,18695,-13546,-15568,-22917,-28898,15183,18026,-28377,-13472,11976,-6782,-31870,-8313,-10361,-13450,-30563,14457,-4965,-30874,1332,630,-1640,-28371,-1114,-10733,-12485,7567,-19940,12407,10093,31428,24688,8494,-1085,-16488,-25633,31916,9430,-13470,-15622,28707,20278,-5470,-20103,10205,-22973,19289,15226,-18864,-18808,15243,-498,-30158,30691,-21079,7133,2885,-1666,31702,-8914,21970,-22982,-6131,-19506,-25204,29513,-10890,25185,-8884,28915,7280,21580,-13754,-10204,-29087,-28104,16611,20269,16295,-22167,22164,6422,-13761,27523,4427,13534,5680,-9470,-13589,29603,26387,5759,9999,-22651,-20541,11145,32236,5056,-13992,24905,20632,-25235,1473,-29727,-8068,29555,-17212,21219,32160,13942,-19724,15465,14295,-9297,-20725,-5472,6279,10696,-30030,26454,7342,13873,6652,28477,-15534,-31766,25155,11881,4647,-6114,15974,21873,226,10595,1037,30496,19983,-11201,-29330,31663,-9264,12901,4510,-7724,-13678,-13508,-6666,31625,7685,-23950,5997,-23754,-1326,-3014,29297,-14133,-11495,31755,26519,10789,-28292,16436,-16496,13238,26471,17330,814,-9920,14640,23402,-1763,30467,28200,4537,-16424,22021,-9801,-2398,-28440,31229,31010,26584,-8443,27149,-3241,11154,-11169,25529,-26421,23737,27220,-20835,-3820,10108,-9591,5437,-26658,-852,-26780,13254,-29107,7734,23118,27895,15387,-23317,20419,-18408,29625,-3250,12067,-31993,22705,14736,-17666,-8866,6211,-19752,6027,6350,32655,29966,10668,12391,23231,21576,3541,-9748,2194,28989,-22458,-30751,-16745,18438,24612,-12720,12919,658,-7540,26803,4518,-20837,-10482,-13217,-28131,-22793,13766,-1597,296,-14290,17236,20221,25653,-8608,17291,15923,8545,16517,-24947,25163,14730,10198,-9821,-29584,804,-10528,16693,15116,1693,-21929,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,21095,-31755,-11974,7261,-28075,-25863,-19367,-871,-6279,-28246,20221,20104,-26544,16771,-32230,310,-9742,-12482,16559,4220,9763,-24692,30812,20349,2298,29289,168,31081,-14328,-22911,22200,-16522,11121,24,-4570,5932,8152,-19933,-22098,24704,-22268,-2950,29818,-27232,4317,-14654,-20767,-22333,-3160,6497,8477,18920,8082,12931,-20001,30302,-19995,-16055,4392,-23703,10820,-9902,1669,27720,15252,18396,-13928,23681,24386,23255,17052,-30881,-10228,-24020,31971,27974,5436,4192,12299,-26398,-22903,-13999,24143,-17860,18073,30855,-13490,25765,19251,16968,4920,18962,3239,25463,-30604,-32232,20414,4127,-16489,29535,8843,24479,-16082,-25742,6936,-10794,-31576,-17008,6541,-30426,16378,26563,-6629,3084,-2802,-5758,-26830,21332,6141,-7367,11063,-27737,-12093,-19220,-14884,-9627,-26462,29697,-10294,14826,30521,13243,2705,-7750,-12306,29625,4665,23727,-16668,20350,30015,2709,-11828,-4997,2658,-29212,28990,-23422,-26012,-8487,29353,9301,1396,-669,25154,-15236,32146,21260,21141,18514,29285,18193,-986,-30167,26075,-24499,32476,-28932,-30246,-14529,-10399,8992,-6892,-16061,-13398,-1584,-22937,17839,-4462,1594,-7911,-1248,28170,32474,-21398,829,-12708,-29533,6239,3441,-17764,6722,1121,-26462,8031,-8789,-14927,-21150,-7125,4365,-11942,10526,-18262,-32439,-12777,-1189,-27300,-13994,-14873,-20131,840,22233,18422,21149,17292,-21262,15381,14927,-5739,7075,-5468,-28314,-31023,15561,-20210,20374,29585,-24517,-27453,15532,-11065,-10468,32097,-22225,3439,1927,-19624,22456,-31177,-20156,-20468,-22426,-26595,-20406,-21843,26428,19544,-9693,17540,8209,-12117,-11539,-12520,-5121,-8830,-18751,-8040,-15173,14000,-27355,30422,-29738,7523,4769,4582,29750,-30709,-30103,14340,-15698,-22387,6411,2821,10947,1786,10061,11831,20041,17138,29915,17857,-4122,-32307,2624,-12887,-13495,11654,-32137,-3764,22303,-17056,-4112,-15850,-18802,29096,-26624,-30962,18339,28024,-8882,22581,17836,1026,-27256,16154,-2972,-694,-22281,-3841,21464,25679,-29833,30994,-30625,-22118,22352,14633,-32105,-3529,-1499,24386,5672,25824,-2698,-361,9660,-8430,23870,28520,27642,-11819,-2482,17555,-10974,28211,-21401,-21565,-17674,28770,15433,29916,19082,17080,29815,-26933,-4582,4531,-18260,-3309,28958,27550,-23405,-28150,-19998,-29974,11853,24190,-21818,-7584,-12736,2243,25719,1165,24527,362,30937,1647,-24953,28354,-2487,25396,13701,9717,7518,-25073,23582,32750,30099,-1434,15722,9433,-31802,-15905,-23496,-6241,829,-18769,1748,-18684,23164,-6563,-6652,-1588,4058,10010,-12318,-15443,-3118,-15847,7755,-14630,9500,19357,20046,-23880,16116,3216,-25961,-15312,31326,-28941,22697,-24868,-23805,30729,-6344,3352,31798,8515,598,3852,16370,11775,-18270,-10638,-5527,4372,-17362,-26971,15617,31371,6478,16277,-30021,20508,-3368,29784,-11998,-22953,-13513,11508,26834,-29864,-3376,-2795,-13413,-15628,209,-821,-28959,25125,9315,-10095,-28612,20551,1360,-4444,-21805,8945,28688,-10756,-1042,20477,32236,-29296,-5101,23255,-11517,1209,28559,-16313,31933,20935,31657,-23658,-25840,-2930,-30621,20417,11190,18558,-2590,-20649,5816,-14040,9366,-1632,7314,-30132,13565,-18464,-23697,-23320,9382,10385,-4859,25408,26925,-5781,-28448,-18066,-31572,-21334,-32194,1361,-9999,18658,-1237,4927,1647,-10940,-18845,-29016,19698,9658,-8250,8774,9525,-5891,8353,-17845,-14223,-15312,6627,-27271,-30394,30401,3887,-26423,-2144,-26809,-9952,-17529,-26889,31562,-12867,-16539,-23546,27513,6140,31421,-199,-6803,6536,-3125,-20758,-25680,-25363,16824,2708,11553,13415,-21057,-4434,-30304,20501,-11731,-22587,-8167,-5757,-2289,20453,-28056,-1564,-19100,13723,-13178,-5098,8882,-13622,703,-6224,-17413,9731,30244,-23813,26652,4609,16634,15158,30333,-15051,15697,30413,4076,15205,-27632,-11156,-29952,5996,-8216,20845,17489,-4308,18975,4203,-11807,25523,-29703,8227,5184,12692,-14427,-8142,16055,-25581,16455,26809,-1485,-3339,-5471,1874,-17013,-11007,8364,21744,-1550,22005,-17319,-16025,4331,-11994,27900,27759,-13365,13545,11420,22462,5385,-16008,-22017,-21505,-29363,-8256,-13340,5637,-17987,24953,-15960,2311,-12565,-11553,-24541,-31970,-21753,-32759,19379,-27546,-3111,-29530,27867,4874,20050,-4126,26975,15079,14390,-19840,10100,-295,10157,19063,16517,1562,-23386,-32262,-10269,7849,-3657,8181,6511,-2419,-3035,-18506,19222,16937,13874,13857,5768,-8262,19719,15147,10442,-30858,-32553,-13052,-811,-3392,12541,-30189,-22790,-23199,22527,-30767,-20356,-12014,5776,-13536,-16486,26128,24718,3144,-28907,-2959,28588,1352,19204,-20570,-13520,21656,-25575,-7447,10075,-2186,-31387,17924,30384,-28323,-12718,-11328,-9978,4474,-27081,-5845,-14881,425,19844,-8551,9021,-27551,-12941,-5866,6586,2524,8243,29449,-12258,-7778,-10423,-26529,14096,-2630,-20719,19688,19015,-23370,18516,-16755,-15198,-9874,-37,18864,25298,22639,5720,-5353,21736,11204,9477,26610,-18750,12125,3030,-32431,1706,-24262,2402,14158,-24251,17235,-17969,6296,-28811,-5063,24910,-9649,-12504,-16167,10933,-5152,11488,30426,29747,28940,13056,-23,23300,-8898,6042,15639,10065,-18269,-2473,-23321,24816,-15347,7588,-9767,13104,3545,-17789,-24703,-23785,-5308,-14363,-11075,28038,-16779,8623,9572,-31374,15236,8530,-18891,-23843,5595,2337,-17936,-32376,-17667,-25773,-20547,-22404,-28264,-11739,-29043,-29297,20825,-26165,-6375,-26305,-28939,3915,-29673,27468,530,-9024,-11954,30135,8581,-32664,-8484,-9814,14254,11893,10306,2576,6828,7686,1484,4084,9298,27337,1950,-27651,9953,-29845,-797,-21865,-16341,2694,19190,-1878,22440,-30718,-23295,14776,-23729,-24098,1716,-29261,-30352,-3729,-27567,7768,14380,-10586,-26679,1006,3266,-3115,-6192,8637,-9871,-467,21598,-12593,9701,-4711,19235,3333,-27957,26484,20616,19730,-3596,-8170,-13305,-31911,22684,-22763,29102,-22028,-22229,-29181,2493,-26384,-27262,3572,9469,-14177,2670,24169,-29738,-2252,-28595,9832,-4381,-24250,-25279,-24103,-392,12819,2665,-10645,-18572,-10432,8089,19191,17861,1833,-15891,22728,-29590,17494,-12084,20503,11018,13943,-5174,29780,-23442,25163,-24105,6556,-26699,-22229,10066,-23252,-18477,16361,31446,18687,12703,-27066,-13199,3207,-11625,5061,-8868,-14004,-8128,26697,23485,-2111,30269,9860,19453,-26891,3471,2509,-12363,18696,16560,4437,-12780,-9343,4770,-11337,-3065,25418,2750,-25302,-28084,-19602,23072,-4750,26086,4435,-15822,24735,-26546,8468,-1325,-30282,-30364,31347,6389,-27277,-4222,32660,-11579,26487,-19526,32562,-16811,-3556,-14958,-8338,-2124,-3527,-6290,11462,13803,25337,11936,21157,-12759,14844,15874,-12054,20763,-3452,4921,18762,19842,-11116,-11348,-17597,15090,-19175,9896,-18801,-28686,-14497,-26462,-16389,-14856,723,12391,-18197,9754,-25009,10920,-379,13087,-27301,20455,18476,26426,13287,-13229,3078,-1651,25726,-17742,-26424,11557,-16873,10287,12425,-22663,29152,31103,-23992,7892,23458,19647,-4481,-11167,-6983,-8514,-28766,-22352,1149,13901,4296,9502,26662,1069,27172,314,-20150,13454,-13628,12905,-25401,4368,30630,-28297,-5662,26689,19268,26489,-29178,20645,7404,12390,-32246,-31108,3696,-23493,27071,10124,-6386,22840,-15878,-2764,19831,6167,-12962,1164,18099,-25885,-8398,-13823,-27475,31016,-20615,-8622,-12247,25656,-7833,-21575,-30302,-9041,-1099,5815,-11784,26382,-8590,-27484,-25355,-17031,11645,25561,-16481,15591,8982,-15139,-6576,13505,-13857,26433,21306,-9583,-546,-3091,-29992,-17366,-28679,27444,18846,-3045,-4573,31953,-16251,-24680,29080,26032,-8834,8974,-27524,-27411,-16488,769,-18844,876,-30095,-30528,-23700,-96,1432,-13018,-16929,-8138,-5059,-28030,31809,-10698,-2715,-2468,15898,9844,-10927,13643,-26081,-17779,18732,28636,-29109,-7525,20167,-2135,15088,4425,6296,-31119,350,-7265,29263,-21318,14576,-5028,15187,26411,27520,-12929,2094,7651,30585,-3360,28367,15999,7916,-24922,10625,26921,-30642,-31847,6830,-17896,-2115,-21254,17289,8672,-18075,-7288,-16450,14065,2391,-31189,-24371,-24446,-32493,6698,13193,-18406,14220,28582,8147,30555,13075,-31010,-12169,31786,3831,29009,-3155,-16350,29816,24922,-693,-27190,14624,-28401,-25552,14951,-29118,-14491,-23982,18348,5365,-11916,-28,-22338,-11159,-19332,9125,-31681,14906,10363,-18713,-17024,-31375,-24254,27556,-25577,-11904,25078,0,0,0,0,0,0,0,0 diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad17_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/pad/pad17_input0_int16.csv new file mode 100644 index 0000000..c95113a --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/pad/pad17_input0_int16.csv @@ -0,0 +1 @@ +22605,18505,26065,26664,-17759,-29975,9323,-8181,21591,-28735,1792,-19745,-9950,-7842,-791,11377,14715,8593,31314,32069,23053,-7759,-16567,31163,30264,21821,-21573,15572,18997,-22736,-14493,3507,-6166,9173,12476,22292,31855,27288,31204,-26635,26909,21301,16862,13953,3286,6679,15367,11335,-30773,21406,13432,-27601,2007,30149,2936,-12462,-14768,21715,4214,-7048,-2205,-21389,-31264,-20057,-93,-32362,13908,-1792,-8810,-995,23469,24255,32351,-30890,-27055,19558,-27775,-23037,4974,-1870,-31033,-4678,-11345,-28824,-17060,11759,8991,16043,5824,11146,-32733,17409,-16,28559,23327,-31067,-1298,6852,-29776,-15713,7394,-24637,-2836,-15635,26083,-7192,-4177,16180,3294,926,13247,10755,-29712,-15065,7177,-14756,-14853,-30832,5657,15681,1441,31447,-4229,17310,26405,-5664,-22600,-19111,8540,-10016,11929,30412,2541,17245,8329,-9274,-14245,-14626,-4929,-17874,13516,9603,-25340,-32303,-25771,19782,-27716,11263,27810,-27253,-13709,26494,9774,27745,11427,227,26488,-15184,-30765,-19813,-625,17742,24354,-23913,-5584,-28772,31740,27622,20510,-6562,-10702,-1192,-23639,28651,6626,25339,-11609,-28621,-10331,83,2361,19166,-12545,-20004,-10924,-28636,20238,26046,-22873,1949,-21849,-27489,16567,-17948,21172,21284,23972,-13246,3988,3617,-24429,-22814,10736,12027,-4844,-16579,20522,-30352,19546,-9370,-13579,-8147,7140,-23713,-13492,21115,4553,-5526,-25516,18982,12721,-24995,14827,6036,-12447,28342,11475,-14884,-16633,23205,-19365,23312,15501,-8196,13677,4590,-30441,-28021,18470,-32237,-31765,24971,22223,7252,5123,10172,29826,28587,-19927,13743,-32152,-31904,21458,13790,-32487,-8895,-18446,-20865,-9515,26934,25734,29955,-22440,2123,-24850,24524,-16994,-272,13517,7424,-4703,-11475,-21014,-9106,3399,28460,-32011,-30452,28550,8608,-14671,-15951,-11326,-13829,11125,-30170,-18070,857,-31592,7403,-29625,15062,-29552,-16706,5597,-15572,-2202,31329,29192,-12867,27145,23581,-23471,30609,7517,27273,-12637,17264,10437,17527,26533,9345,5342,-32171,-18082,-9128,-358,-23344,4324,25685,-25603,-17751,7554,-26282,22846,-24651,8826,-10839,1328,-12559,19557,25923,-10934,1826,27964,-13343,18824,14367,30771,-3187,28524,-26735,-12988,-32681,7526,-24122,-30148,14741,-25965,7584,-21309,24762,-22024,-5002,21230,27061,25585,12872,-3098,-29166,-17286,14864,-5629,27223,-26887,-22721,30057,26550,-31080,-6021,-3880,21053,8544,-17323,22089,-32700,-8927,-28838,-27062,-10621,-2845,-19807,-27487,-3024,-6633,10293,29657,12266,-25143,-11185,-7068,20369,5553,9153,11909,-28641,-30894,22602,-31648,-15987,4814,-13443,-2007,14322,8026,27209,10693,17193,12718,24878,8623,23800,24814,12327,-2790,-10038,27077,12667,-9987,-10894,-4842,-21160,6794,10674,7469,-4554,-7972,26554,-16733,-24438,11416,-11698,23285,-21361,22663,5394,-1454,24757,15219,-17462,14844,6644,-11417,-16000,28107,28382,-6902,-30614,-32318,-20856,-24371,30560,3858,-23170,17806,-29918,-26018,-319,-11350,-9235,18974,-1018,23267,29163,14721,30576,-4812,9234,13164,-22082,-16535,1608,23985,-9630,25412,-17059,-8275,12643,-4880,-20573,31530,-13029,15729,-26947,-6224,-25901,-11466,-29150,-29291,593,14606,4560,-28469,-1059,31147,2853,-31447,19469,1136,-7271,-16952,17488,4528,2731,24508,-25579,-18014,23654,63,-5009,23708,24281,24884,-20814,-3249,24273,-28699,12117,-26053,-31827,13304,-24524,-29980,11055,25137,18900,19947,31190,28841,21749,26379,14566,16947,319,26725,2941,16454,-31239,-7760,-24357,23742,-19926,23472,13804,-16964,4522,-973,21942,5557,17989,27740,-6766,-28985,9385,8698,-28451,-15175,-20656,-28204,26114,-2745,-11976,23327,14504,22921,-13441,18389,30544,-2811,20945,-19482,30454,-22812,-13269,-26890,8573,17255,-7953,17872,32557,-8407,-3230,21453,-20480,11448,-6489,-26487,2182,-25479,-28247,-15570,19331,25055,-6177,23778,-4585,-30255,-11354,-1152,12814,-13837,-11939,8597,-9364,18310,-25671,-19276,5850,32055,-776,-18619,-10116,-17087,21649,-2091,-29300,24581,3786,-31369,-13618,23186,-18451,5589,-9273,21160,-18458,31728,-3966,-7378,-21438,-17612,7214,29186,15126,-7371,28485,-18546,-21073,-13625,-27294,-26277,-7356,-28932,-10462,-4903,11786,30650,13497,-14007,23777,16123,-5955,-5742,2057,4178,2372,-24526,14608,-13162,6814,-27901,-4614,4320,25037,-9282,28312,25377,-21561,4650,-31478,-8100,-4215,20789,4859,-15037,519,-27422,3834,24152,26414,23745,6174,30667,4940,23108,7317,10333,2350,-23391,7001,-31984,-13442,-6097,27815,2560,-8241,-11923,19501,-24244,7685,-3949,-18750,2522,14066,-28775,-22407,-15080,-6194,30080,30987,20422,-14438,-13291,1138,-15748,866,-29109,19612,-21276,-4318,5361,-13838,-15296,-22338,-20174,-16347,-2462,7907,24296,8839,4545,-31248,-10391,-27584,-10498,12698,21276,7225,11686,18338,-14972,-26098,-6596,-5162,-2885,-17344,2122,-30049,17558,-26009,7588,3572,-22206,-20155,25822,1913,5995,-19651,511,13866,23530,32022,-14153,18163,24086,28298,-12855,16161,18835,-27134,660,-8901,3316,18168,26573,-24546,-7852,-12775,-13255,-9956,-751,15957,2093,-13255,-10519,-13983,11464,-12748,24157,21799,-20754,9328,12388,23622,5341,2150,30031,-29906,15831,-280,-6642,-21551,-32201,1643,23066,-11508,21507,-25489,8185,-11958,-2248,-6663,6467,-28438,-25807,-31536,27235,19642,-4896,-10653,4411,26732,3800,7811,661,-6547,-26982,16084,-21111,8705,-19904,9914,16340,-18163,-14875,-8190,-21013,17781,-15292,23770,-7331,-14736,-16993,-26038,25148,7258,-28329,-32223,29118,-16614,1739,14463,23684,21248,-13252,-3737,8529,22878,-17028,18720,21728,26646,16350,-17127,25119,26230,5600,22622,-13284,-16069,-1666,28142,4996,4643,-7106,4816,-19577,-30514,11344,-31546,10795,5870,-22068,13710,17549,-17411,-1320,-26545,-14451,21234,-7367,-31676,24336,8838,-8025,-17684,-19128,26138,-11891,26350,9249,-6337,-26212,-22626,26294,-27840,20150,-20041,-31155,-28626,6837,-4478,29365,-7839,4455,32,-6641,-7086,32009,17207,26172,25814,-30970,25486,6584,-21609,-20088,23111,18437,-17119,1698,21332,2794,7106,-6708,12992,9586,23019,-1569,-26690,19135,1221,-2911,-25022,-27469,-7570,-5865,-28400,-23694,-15743,144,11455,20448,12694,24943,19312,-2798,-5475,11847,30184,-5561,30476,-890,-14657,-2071,-8758,10825,13522,24609,-23142,6577,18926,-31560,-10591,-26221,-11241,-5972,28381,-19273,25227,-21551,-21326,-26771,10225,-14,-3212,-5990,16433,29051,-15600,32630,-11291,-3313,7616,-28490,-19148,21812,-17214,18928,-2791,-17933,-31055,-2200,1867,17375,9833,-2520,-8348,-18753,-29554,18695,-13546,-15568,-22917,-28898,15183,18026,-28377,-13472,11976,-6782,-31870,-8313,-10361,-13450,-30563,14457,-4965,-30874,1332,630,-1640,-28371,-1114,-10733,-12485,7567,-19940,12407,10093,31428,24688,8494,-1085,-16488,-25633,31916,9430,-13470,-15622,28707,20278,-5470,-20103,10205,-22973,19289,15226,-18864,-18808,15243,-498,-30158,30691,-21079,7133,2885,-1666,31702,-8914,21970,-22982,-6131,-19506,-25204,29513,-10890,25185,-8884,28915,7280,21580,-13754,-10204,-29087,-28104,16611,20269,16295,-22167,22164,6422,-13761,27523,4427,13534,5680,-9470,-13589,29603,26387,5759,9999,-22651,-20541,11145,32236,5056,-13992,24905,20632,-25235,1473,-29727,-8068,29555,-17212,21219,32160,13942,-19724,15465,14295,-9297,-20725,-5472,6279,10696,-30030,26454,7342,13873,6652,28477,-15534,-31766,25155,11881,4647,-6114,15974,21873,226,10595,1037,30496,19983,-11201,-29330,31663,-9264,12901,4510,-7724,-13678,-13508,-6666,31625,7685,-23950,5997,-23754,-1326,-3014,29297,-14133,-11495,31755,26519,10789,-28292,16436,-16496,13238,26471,17330,814,-9920,14640,23402,-1763,30467,28200,4537,-16424,22021,-9801,-2398,-28440,31229,31010,26584,-8443,27149,-3241,11154,-11169,25529,-26421,23737,27220,-20835,-3820,10108,-9591,5437,-26658,-852,-26780,13254,-29107,7734,23118,27895,15387,-23317,20419,-18408,29625,-3250,12067,-31993,22705,14736,-17666,-8866,6211,-19752,6027,6350,32655,29966,10668,12391,23231,21576,3541,-9748,2194,28989,-22458,-30751,-16745,18438,24612,-12720,12919,658,-7540,26803,4518,-20837,-10482,-13217,-28131,-22793,13766,-1597,296,-14290,17236,20221,25653,-8608,17291,15923,8545,16517,-24947,25163,14730,10198,-9821,-29584,804,-10528,16693,15116,1693,-21929,21095,-31755,-11974,7261,-28075,-25863,-19367,-871,-6279,-28246,20221,20104,-26544,16771,-32230,310,-9742,-12482,16559,4220,9763,-24692,30812,20349,2298,29289,168,31081,-14328,-22911,22200,-16522,11121,24,-4570,5932,8152,-19933,-22098,24704,-22268,-2950,29818,-27232,4317,-14654,-20767,-22333,-3160,6497,8477,18920,8082,12931,-20001,30302,-19995,-16055,4392,-23703,10820,-9902,1669,27720,15252,18396,-13928,23681,24386,23255,17052,-30881,-10228,-24020,31971,27974,5436,4192,12299,-26398,-22903,-13999,24143,-17860,18073,30855,-13490,25765,19251,16968,4920,18962,3239,25463,-30604,-32232,20414,4127,-16489,29535,8843,24479,-16082,-25742,6936,-10794,-31576,-17008,6541,-30426,16378,26563,-6629,3084,-2802,-5758,-26830,21332,6141,-7367,11063,-27737,-12093,-19220,-14884,-9627,-26462,29697,-10294,14826,30521,13243,2705,-7750,-12306,29625,4665,23727,-16668,20350,30015,2709,-11828,-4997,2658,-29212,28990,-23422,-26012,-8487,29353,9301,1396,-669,25154,-15236,32146,21260,21141,18514,29285,18193,-986,-30167,26075,-24499,32476,-28932,-30246,-14529,-10399,8992,-6892,-16061,-13398,-1584,-22937,17839,-4462,1594,-7911,-1248,28170,32474,-21398,829,-12708,-29533,6239,3441,-17764,6722,1121,-26462,8031,-8789,-14927,-21150,-7125,4365,-11942,10526,-18262,-32439,-12777,-1189,-27300,-13994,-14873,-20131,840,22233,18422,21149,17292,-21262,15381,14927,-5739,7075,-5468,-28314,-31023,15561,-20210,20374,29585,-24517,-27453,15532,-11065,-10468,32097,-22225,3439,1927,-19624,22456,-31177,-20156,-20468,-22426,-26595,-20406,-21843,26428,19544,-9693,17540,8209,-12117,-11539,-12520,-5121,-8830,-18751,-8040,-15173,14000,-27355,30422,-29738,7523,4769,4582,29750,-30709,-30103,14340,-15698,-22387,6411,2821,10947,1786,10061,11831,20041,17138,29915,17857,-4122,-32307,2624,-12887,-13495,11654,-32137,-3764,22303,-17056,-4112,-15850,-18802,29096,-26624,-30962,18339,28024,-8882,22581,17836,1026,-27256,16154,-2972,-694,-22281,-3841,21464,25679,-29833,30994,-30625,-22118,22352,14633,-32105,-3529,-1499,24386,5672,25824,-2698,-361,9660,-8430,23870,28520,27642,-11819,-2482,17555,-10974,28211,-21401,-21565,-17674,28770,15433,29916,19082,17080,29815,-26933,-4582,4531,-18260,-3309,28958,27550,-23405,-28150,-19998,-29974,11853,24190,-21818,-7584,-12736,2243,25719,1165,24527,362,30937,1647,-24953,28354,-2487,25396,13701,9717,7518,-25073,23582,32750,30099,-1434,15722,9433,-31802,-15905,-23496,-6241,829,-18769,1748,-18684,23164,-6563,-6652,-1588,4058,10010,-12318,-15443,-3118,-15847,7755,-14630,9500,19357,20046,-23880,16116,3216,-25961,-15312,31326,-28941,22697,-24868,-23805,30729,-6344,3352,31798,8515,598,3852,16370,11775,-18270,-10638,-5527,4372,-17362,-26971,15617,31371,6478,16277,-30021,20508,-3368,29784,-11998,-22953,-13513,11508,26834,-29864,-3376,-2795,-13413,-15628,209,-821,-28959,25125,9315,-10095,-28612,20551,1360,-4444,-21805,8945,28688,-10756,-1042,20477,32236,-29296,-5101,23255,-11517,1209,28559,-16313,31933,20935,31657,-23658,-25840,-2930,-30621,20417,11190,18558,-2590,-20649,5816,-14040,9366,-1632,7314,-30132,13565,-18464,-23697,-23320,9382,10385,-4859,25408,26925,-5781,-28448,-18066,-31572,-21334,-32194,1361,-9999,18658,-1237,4927,1647,-10940,-18845,-29016,19698,9658,-8250,8774,9525,-5891,8353,-17845,-14223,-15312,6627,-27271,-30394,30401,3887,-26423,-2144,-26809,-9952,-17529,-26889,31562,-12867,-16539,-23546,27513,6140,31421,-199,-6803,6536,-3125,-20758,-25680,-25363,16824,2708,11553,13415,-21057,-4434,-30304,20501,-11731,-22587,-8167,-5757,-2289,20453,-28056,-1564,-19100,13723,-13178,-5098,8882,-13622,703,-6224,-17413,9731,30244,-23813,26652,4609,16634,15158,30333,-15051,15697,30413,4076,15205,-27632,-11156,-29952,5996,-8216,20845,17489,-4308,18975,4203,-11807,25523,-29703,8227,5184,12692,-14427,-8142,16055,-25581,16455,26809,-1485,-3339,-5471,1874,-17013,-11007,8364,21744,-1550,22005,-17319,-16025,4331,-11994,27900,27759,-13365,13545,11420,22462,5385,-16008,-22017,-21505,-29363,-8256,-13340,5637,-17987,24953,-15960,2311,-12565,-11553,-24541,-31970,-21753,-32759,19379,-27546,-3111,-29530,27867,4874,20050,-4126,26975,15079,14390,-19840,10100,-295,10157,19063,16517,1562,-23386,-32262,-10269,7849,-3657,8181,6511,-2419,-3035,-18506,19222,16937,13874,13857,5768,-8262,19719,15147,10442,-30858,-32553,-13052,-811,-3392,12541,-30189,-22790,-23199,22527,-30767,-20356,-12014,5776,-13536,-16486,26128,24718,3144,-28907,-2959,28588,1352,19204,-20570,-13520,21656,-25575,-7447,10075,-2186,-31387,17924,30384,-28323,-12718,-11328,-9978,4474,-27081,-5845,-14881,425,19844,-8551,9021,-27551,-12941,-5866,6586,2524,8243,29449,-12258,-7778,-10423,-26529,14096,-2630,-20719,19688,19015,-23370,18516,-16755,-15198,-9874,-37,18864,25298,22639,5720,-5353,21736,11204,9477,26610,-18750,12125,3030,-32431,1706,-24262,2402,14158,-24251,17235,-17969,6296,-28811,-5063,24910,-9649,-12504,-16167,10933,-5152,11488,30426,29747,28940,13056,-23,23300,-8898,6042,15639,10065,-18269,-2473,-23321,24816,-15347,7588,-9767,13104,3545,-17789,-24703,-23785,-5308,-14363,-11075,28038,-16779,8623,9572,-31374,15236,8530,-18891,-23843,5595,2337,-17936,-32376,-17667,-25773,-20547,-22404,-28264,-11739,-29043,-29297,20825,-26165,-6375,-26305,-28939,3915,-29673,27468,530,-9024,-11954,30135,8581,-32664,-8484,-9814,14254,11893,10306,2576,6828,7686,1484,4084,9298,27337,1950,-27651,9953,-29845,-797,-21865,-16341,2694,19190,-1878,22440,-30718,-23295,14776,-23729,-24098,1716,-29261,-30352,-3729,-27567,7768,14380,-10586,-26679,1006,3266,-3115,-6192,8637,-9871,-467,21598,-12593,9701,-4711,19235,3333,-27957,26484,20616,19730,-3596,-8170,-13305,-31911,22684,-22763,29102,-22028,-22229,-29181,2493,-26384,-27262,3572,9469,-14177,2670,24169,-29738,-2252,-28595,9832,-4381,-24250,-25279,-24103,-392,12819,2665,-10645,-18572,-10432,8089,19191,17861,1833,-15891,22728,-29590,17494,-12084,20503,11018,13943,-5174,29780,-23442,25163,-24105,6556,-26699,-22229,10066,-23252,-18477,16361,31446,18687,12703,-27066,-13199,3207,-11625,5061,-8868,-14004,-8128,26697,23485,-2111,30269,9860,19453,-26891,3471,2509,-12363,18696,16560,4437,-12780,-9343,4770,-11337,-3065,25418,2750,-25302,-28084,-19602,23072,-4750,26086,4435,-15822,24735,-26546,8468,-1325,-30282,-30364,31347,6389,-27277,-4222,32660,-11579,26487,-19526,32562,-16811,-3556,-14958,-8338,-2124,-3527,-6290,11462,13803,25337,11936,21157,-12759,14844,15874,-12054,20763,-3452,4921,18762,19842,-11116,-11348,-17597,15090,-19175,9896,-18801,-28686,-14497,-26462,-16389,-14856,723,12391,-18197,9754,-25009,10920,-379,13087,-27301,20455,18476,26426,13287,-13229,3078,-1651,25726,-17742,-26424,11557,-16873,10287,12425,-22663,29152,31103,-23992,7892,23458,19647,-4481,-11167,-6983,-8514,-28766,-22352,1149,13901,4296,9502,26662,1069,27172,314,-20150,13454,-13628,12905,-25401,4368,30630,-28297,-5662,26689,19268,26489,-29178,20645,7404,12390,-32246,-31108,3696,-23493,27071,10124,-6386,22840,-15878,-2764,19831,6167,-12962,1164,18099,-25885,-8398,-13823,-27475,31016,-20615,-8622,-12247,25656,-7833,-21575,-30302,-9041,-1099,5815,-11784,26382,-8590,-27484,-25355,-17031,11645,25561,-16481,15591,8982,-15139,-6576,13505,-13857,26433,21306,-9583,-546,-3091,-29992,-17366,-28679,27444,18846,-3045,-4573,31953,-16251,-24680,29080,26032,-8834,8974,-27524,-27411,-16488,769,-18844,876,-30095,-30528,-23700,-96,1432,-13018,-16929,-8138,-5059,-28030,31809,-10698,-2715,-2468,15898,9844,-10927,13643,-26081,-17779,18732,28636,-29109,-7525,20167,-2135,15088,4425,6296,-31119,350,-7265,29263,-21318,14576,-5028,15187,26411,27520,-12929,2094,7651,30585,-3360,28367,15999,7916,-24922,10625,26921,-30642,-31847,6830,-17896,-2115,-21254,17289,8672,-18075,-7288,-16450,14065,2391,-31189,-24371,-24446,-32493,6698,13193,-18406,14220,28582,8147,30555,13075,-31010,-12169,31786,3831,29009,-3155,-16350,29816,24922,-693,-27190,14624,-28401,-25552,14951,-29118,-14491,-23982,18348,5365,-11916,-28,-22338,-11159,-19332,9125,-31681,14906,10363,-18713,-17024,-31375,-24254,27556,-25577,-11904,25078 diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad18.tflite b/tensorflow/lite/micro/integration_tests/seanet/pad/pad18.tflite new file mode 100644 index 0000000..28fa98f Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/pad/pad18.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad18_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/pad/pad18_golden_int16.csv new file mode 100644 index 0000000..8876328 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/pad/pad18_golden_int16.csv @@ -0,0 +1 @@ +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,894,-12957,6412,-27996,11098,-18597,-3500,22393,19438,28862,-11520,2454,3264,-30241,27447,10304,-8582,-5853,-22905,24559,13343,-17919,6977,12698,31579,25135,15744,12953,-28756,15242,12947,-17952,4560,24645,-31534,-10422,29596,16103,-23398,-20114,10042,-7136,30630,19514,-311,-25603,-29490,2891,-26415,5805,15115,24588,16693,20806,-7734,31214,-11864,-17542,-1365,2454,-11864,-17505,-30963,21029,10176,10875,-2919,20966,11836,169,18648,-15424,-8445,-1425,-31371,2982,-18145,-20043,-10997,-81,11695,-21241,-21349,-11095,30590,22668,-3451,-20006,30706,-29310,-4604,-21288,-28185,-7258,12985,-23108,19796,17148,-30318,-26004,7857,-8611,-13294,1654,-19582,-4333,13354,22515,5285,-32384,-30761,-24551,-18759,23531,31829,6379,8240,-411,-14836,-1617,-27453,-7351,-28244,9843,-30167,14576,-12279,-3065,13277,838,18500,-28884,-6625,18135,22596,328,-1191,-25464,-10493,3738,30856,20080,27878,-12327,-29363,680,-12441,-23927,14131,16628,-26094,32128,-750,-2722,14997,-12829,-27450,-30234,6300,25950,7792,3204,11621,-12743,-8818,-29480,25114,-27181,-15614,30561,12538,-30857,-7239,-17479,22272,1746,-17916,23947,12204,-24920,24663,-1798,4240,27458,9616,-11728,8635,-22297,-20469,21934,17754,-2302,30127,-17833,-16571,30839,-19167,-28753,-3642,25573,13527,-29252,16668,15812,-30448,-23487,30466,15817,-8217,21854,-14081,23854,-11618,5360,18921,-17648,-13903,-18347,-6949,-15474,-4767,26700,29751,-26851,-18052,-3891,28117,13702,15473,-16577,14309,-413,3611,-5059,-22315,-6798,14386,20878,-5307,6575,5437,-9036,22758,4174,-23986,-21553,-12113,22773,32028,16496,11041,-11669,7988,17527,-3330,-6075,-22217,-29629,21966,-20210,23997,3727,13169,23744,32650,13203,-19347,19015,-28913,13889,30207,21292,27038,12738,1679,-28969,-30229,-1541,-30207,-13525,-3291,16599,-8699,-23865,31644,-20231,10512,4605,-4377,-13523,5635,31045,-20271,-23222,-8532,25344,-9543,-26035,-27298,24554,-19074,-28437,32326,-20433,20492,25534,-540,-15423,-21691,-24207,-8676,25969,8712,28565,19984,2638,-9969,1619,20814,-25178,14446,22239,6271,19727,-4286,-1480,-4024,-22787,-18894,22860,22971,-11813,5877,26813,-17026,16521,22317,-12288,-6764,-13194,10605,30846,-24936,-30378,4564,-481,-27982,-21254,25041,1419,1463,-5624,-10901,7055,31184,-32359,16765,-4015,-3070,13936,-12252,6564,-8843,1068,-17515,-22319,-14005,30343,28098,-17507,-20626,-27769,-9349,10851,28201,3950,17838,27825,-24482,13095,24528,-26446,-27807,-11054,27055,23574,5230,25882,352,-4038,22364,-16728,-10759,24185,7704,-5332,17399,16708,-9538,-29621,5655,12337,-6339,-31524,25389,-32301,-15341,-8581,-14351,-13701,-4996,31531,-23554,15342,16203,-20998,19846,2775,-13978,14163,2317,8093,-26444,-7872,-15249,-17206,25899,-20383,18446,12984,-4037,26180,21266,-18165,-6503,11538,15356,-22873,16717,10561,-30287,-9288,-25765,7403,2932,793,20490,-8417,17149,-29017,-27716,27722,-4678,-22508,-10675,9289,10917,-15278,-25174,25405,-23115,-5512,-14634,30026,16456,6199,51,3994,17926,25036,23516,15458,28184,-3646,-2460,29351,18470,19611,-9844,21374,19235,20354,10001,3178,-20446,26509,30220,11709,-702,-8004,-14612,-20397,8772,513,-24204,895,-12673,31588,22907,19264,32545,-20034,-20735,-16089,-24503,17198,14300,-6623,25728,14588,16000,21278,22507,-31102,12371,14959,-32168,8741,9980,22177,-13073,-17584,6029,-12045,-3785,8591,32349,248,26474,24541,-1869,-12251,-17548,-5548,-28366,27221,-23973,2575,16218,-1330,-4022,-32522,-27011,1669,-23422,-4692,2454,31744,-6457,6018,-5971,8819,-4146,-32246,7848,-12147,12731,5302,-16901,-1085,14689,28402,-7369,2258,25154,172,-6187,-13550,-6907,-18721,26292,-3787,6887,-24623,11840,16986,6193,14026,-5160,-1915,-8872,31331,-25670,15559,-16056,-3299,30960,-15605,13280,12849,-25185,-4521,-2384,18867,22739,4785,19479,24377,-31200,12135,26453,15240,5083,-6473,23336,-8261,27776,21422,-8956,-23734,19139,8953,-21541,-15824,-11879,30692,31585,-26854,-4523,19169,-7737,-3223,26210,18667,6522,-14378,-21654,-10319,-19754,-31320,-25544,23754,31411,-16797,8308,17351,12623,29826,26472,-5567,-21581,-7172,11013,-17184,27814,13374,-1144,83,19377,15276,1659,-22833,-6290,7308,15338,29086,31699,32409,17143,-23351,29021,23748,-31900,32738,-3485,-21149,-5224,-7215,-5147,-27088,32410,23073,-17729,24260,8514,-6200,6415,23710,4617,-1230,-30267,32042,467,-24993,-2056,-17290,28477,-15850,15401,23416,-9198,16231,-9673,7942,-1169,-29515,3880,-10472,-30831,3626,-4007,-14616,24496,-21711,9310,19982,-23703,-15654,5068,-23812,-21974,25824,-10509,17685,-16787,27,-23827,-7176,25195,-1629,-27378,-26924,14494,-12270,6300,-20909,16596,28388,20574,5554,-14888,12541,6909,-23137,-11264,-17595,20422,-4546,6237,-20178,27582,-2856,5476,-29410,26581,7044,26230,-8611,21718,-209,-15166,7536,-12572,27287,-31506,9520,-1661,3272,-29743,-27626,18697,-32621,-3666,23132,-13326,355,-8481,1295,-13207,-7424,-19831,-25862,-20010,18672,-29246,12965,-25999,21556,-6890,-30478,-13456,13167,-26321,16001,29638,-31643,22973,-27199,30015,32412,31753,3256,16026,-32004,28384,-18828,-1547,-2367,7783,-13464,-25292,-14991,32334,-1582,-32515,-28019,16618,24595,-17719,-3751,2219,15041,20725,-25857,1158,-27166,-19925,5269,16609,-24454,-28963,-29581,-13694,-8904,-15582,6058,20459,-4495,-2476,-7873,-29889,5745,6398,20592,-17850,-22485,-10272,-16887,-13301,-11918,-31311,-15129,7768,11092,13022,-16612,7295,22532,17514,-9981,6548,-29507,3842,-23596,-5382,-21466,17806,10878,27530,21474,-7872,3816,-6491,27426,-22205,29480,7531,-11167,8994,30725,10539,-3683,-21133,3108,5449,12604,-28835,28396,25044,21049,-25437,5902,11443,24264,-26265,-13250,-17276,-5637,-2692,6065,-25794,29157,15129,-11653,-14282,-1650,-14214,31214,-14705,1141,10667,31824,-2627,-22998,-9435,4644,13764,-24233,14485,32085,-10076,-14922,19818,998,-1950,18864,2151,19475,31226,-3349,2735,10361,25107,-18694,6121,-26736,-12521,-10656,6736,-9926,8060,-27063,12305,12028,29130,3119,8211,20741,22790,16497,-14451,27452,-26656,5479,25228,2751,12487,-20793,-9238,-5257,31544,5778,24240,32635,23995,28522,-9746,21377,-9911,11594,20767,3568,-25883,16373,-4372,18862,8199,32604,-2205,-19013,-26439,-9527,28567,-14852,-11578,17906,-20283,11569,21425,7407,21928,-4978,334,-18633,-21852,-16255,-3451,-2577,12198,32401,5036,20131,-14931,13258,16444,18677,28548,-16864,-80,6273,-19932,-19965,-1102,14941,-7460,-16123,23281,-15351,11335,6136,-26353,20705,11304,21448,-14679,-15056,-4903,20648,17441,3532,-24167,-10875,10219,-9371,-9186,10151,16466,1421,-22649,-12998,-31312,27410,-4285,27606,-16101,20359,-11631,-6075,20905,26787,12532,993,-8232,-23574,11398,7530,22512,28483,27602,1892,13902,31175,29877,-15854,5917,15224,-10959,-22277,-21029,29507,-7915,-18777,29759,18027,-735,-17795,2714,16328,-22711,30751,6112,-14434,10265,-30565,-25553,-2805,26702,23970,-13669,-29881,28712,-27681,28660,-26936,-19474,-2232,1077,12596,487,18110,23386,-18976,313,-30189,-18050,-13379,28333,-12300,18728,-3211,-24758,-4504,-4736,9254,-6018,-9319,1664,28150,-21227,19839,9715,19971,-28904,-3010,-2976,-15182,-7589,-23179,-9821,28277,4186,14836,18329,7971,2325,-7690,-11411,10230,-6936,5359,12846,-13425,-8434,-32650,20501,-24881,6059,15028,-23017,-30871,-3263,32614,-10698,25113,-1200,-18953,4978,19665,13980,-14767,23772,-16664,-1921,14798,24438,-26171,-14529,-32299,23897,-916,-25175,-3141,-32184,-11545,-12214,-9674,-5506,-23326,26985,10723,32359,-31263,-30818,-14133,10338,27594,15910,23664,19322,26191,17740,-28916,-9714,22630,23795,-6909,24162,-16461,-6046,-20278,32517,-29744,23223,-373,15263,-14465,2114,-6117,-28489,-22543,7638,28396,-26354,-7818,-18968,9232,1763,-29821,-10621,-21042,-32679,-16294,-21100,7777,13107,-30157,-24020,-22006,4312,31135,25139,28538,-19723,2052,26675,-9178,-15946,-24830,-452,23567,-4739,8339,-22089,32411,-8,21311,-19596,-14718,-4797,-8359,-11187,19557,29159,-16230,5350,2556,2417,3643,-21048,-3654,4030,-6411,-9705,-10308,-19993,-10050,-14334,29199,31726,21677,29739,-27046,3395,-27372,4901,22308,25541,24871,-7756,-25279,-13815,-2407,-8389,-9377,-3717,30327,-21733,16778,17034,906,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-5298,3011,-13026,17528,-22120,-29588,-28714,6033,795,12585,30481,-1120,22605,-14256,-18714,21608,6997,14706,-10042,-13167,32247,20549,14923,-5783,1977,-31851,-13980,-15408,-676,17554,-12841,18340,24364,-8007,-14231,-2748,-21817,-13576,28649,31411,-28784,4496,-13316,-15461,-22394,-32024,-25476,14049,10979,-10031,17477,16400,16427,23330,14855,-23652,-27062,6255,5002,-22863,-21297,2426,-14656,16789,2463,-3754,-20005,-12994,-20349,-14522,-29785,13619,17069,-16047,25915,8857,-2381,-20073,30221,25857,6776,6494,-21875,27493,21970,-22622,7650,-25149,31317,271,12214,3710,26486,18413,-15097,25060,-18999,11432,-4004,1043,-27288,22963,12077,1848,12049,-25675,408,27379,-13962,253,-17983,24485,-27525,-16628,15200,-14928,-4488,-13341,27813,14225,-17299,-21835,5916,-26634,-19222,31850,19911,-26361,32181,-23389,5582,15188,-19754,32461,15339,-9039,5158,-15945,6049,-24267,24614,9904,-3201,-29662,14041,24673,-8776,17844,-24860,25081,-21436,10508,9014,24273,7885,-21543,22096,-14216,1550,-18852,-4758,-29241,19046,4897,9522,-7039,13869,30623,-7468,-20565,5346,1695,2940,2899,32146,-17251,-30492,-17299,-29429,13085,-28317,-27585,-5033,15603,-28345,-22094,-26719,13357,-27875,-10971,7473,6807,-7098,-5091,-14600,-13224,3738,-29060,22488,13972,-11138,25099,-8975,-5821,-20982,11393,-20204,1704,-27377,-27841,27679,30213,-1369,-25667,9782,-15637,28091,9745,14172,-9612,-15727,-29670,11155,-24567,-413,-17844,25592,8253,22761,-13016,19722,2251,2242,7901,-26009,23669,24335,23744,-8548,22869,21128,13008,-19013,23768,28404,1150,1664,-3589,3012,-19507,-15726,-17514,-32157,93,4723,-27699,-23314,-18428,-12226,2063,1957,-26707,-19219,2242,23515,22188,11336,19576,-26407,14748,-6989,110,-26595,31114,24687,-14900,-16026,19481,-31338,28615,22526,-3573,623,4768,-17918,28590,11276,13023,-31398,14085,18270,17375,12378,19836,9287,2114,-15200,-12963,-11592,1270,-1731,-27350,12312,12625,-4890,2459,9135,31538,31985,20935,-13892,-29363,15756,2872,11996,-17545,-12064,-3542,-12255,10759,-22199,-7854,28550,-21300,-6998,12072,8003,-9854,-27657,28699,-7599,778,6826,-17072,-6484,10691,-32341,-10160,-11547,1384,9399,-23804,-26132,-27407,5597,28674,-28715,-20963,13109,-8386,25345,14265,-22389,-14593,-29654,-23699,-29069,20093,-8225,-13698,765,-26775,-12727,26921,19740,457,-22685,-4371,-12037,-3618,-23427,20310,-23170,-30742,-8932,8216,-10137,-23911,13087,-2589,21035,1483,-29611,-300,12113,1823,21700,17653,9701,13474,5268,26231,28474,-19882,-14374,-15175,9564,13011,12731,1410,26361,-26205,-13311,28790,-22018,9347,-7622,-18160,-19169,-19385,9371,31832,20384,-25009,20190,-3965,-18325,17619,30182,30083,13800,-32569,-24775,31773,15233,-13083,17525,9797,5898,25895,-14368,4394,17044,-23571,-16243,6156,25613,7588,-22613,21738,4407,12112,-28633,-2007,16850,-31369,16496,16851,-30771,10170,-22996,29425,-300,-21706,23361,-20221,-24659,15968,-19376,28642,-19336,-29607,7164,5626,31994,8282,27489,-23426,-31896,-5658,-11165,-3190,-9201,-32199,10146,-25748,30327,2581,-32358,-24149,-5691,-5662,-22699,32049,20642,16601,29898,30805,-25312,-15578,-19691,7743,-8143,20274,-26868,-27908,27004,8943,16216,-21899,22207,28064,25101,32765,-20378,-6296,21823,6300,730,-20876,7021,26372,-28872,31347,12592,2797,20471,21544,-26318,31839,-28303,3237,4677,-22953,31470,13570,-11440,-31942,3152,-31184,16950,21366,-6808,1683,31021,-23992,-16450,-4482,7122,25507,-30885,18862,-22957,19509,-361,-12280,-23310,20092,-24242,-12304,-27107,19138,-4681,-15105,-14978,5655,32444,6732,31052,-13730,-6742,-2620,-22444,30688,20474,-21064,22570,32512,10202,-32621,8348,-20483,13535,1452,-1578,-15883,-8204,-31313,-9072,31789,27789,-23313,-7969,24162,-25174,-14817,-22758,5089,9435,3559,26923,9119,-31672,5534,-12052,25542,-27823,22669,-30408,-10576,-14388,23881,-9153,-27317,-15201,-11083,-28415,-7759,28992,-5996,16485,13055,14605,15437,-11985,-7067,10321,4427,-5461,-13735,5946,-1575,-232,-4011,-30179,-20504,-31822,-27241,-20255,12620,-29368,-16755,-31889,-8845,-26543,-29403,-14167,30730,-3640,14227,9129,28744,15900,18519,-17927,-32742,-14562,2913,-1513,8777,10504,-31492,26533,-18415,-31621,16624,-15000,32407,-2520,16869,-7333,-22473,-881,-6432,-32114,16979,23993,-3546,-21983,11342,25975,32667,-20419,-23957,8774,-24699,-25095,6306,-10193,3892,-10568,10986,-24220,-26339,-29945,27640,-11022,10572,-6883,27306,13637,-31010,9232,-2661,31128,18165,29577,-3875,81,24314,-28721,12391,-32605,-23835,-17467,24804,-7682,-7367,-29935,11301,-26807,-16141,-17297,28334,-23321,6557,-9003,-1761,997,19184,26723,-23150,-17131,10958,-3084,-27430,6257,20320,15483,-29970,-14261,18653,29712,-15641,31172,-15693,8414,18184,-15947,25766,4439,13122,10191,-15729,-12979,-11551,6642,21164,-24888,-30621,-2182,-6739,10658,5125,-31007,28522,15110,1126,18059,11329,8340,17445,-16055,-21842,-21938,12030,-25450,-2174,-27416,-2827,30479,13097,2278,24261,-13990,28164,-30514,-4392,-11136,21552,15349,-16730,21232,23576,-26540,3552,-10260,-11573,-18203,12156,19004,-20806,15177,-971,-21202,-13739,5739,-11093,-21992,25079,916,-17535,19837,-24065,-30631,-22448,3069,-7628,6954,-14767,-22125,15053,18391,-8828,-13258,10768,-3586,-15194,-26654,18425,-2143,7426,5819,-3368,-22585,-29351,-24953,8186,-16338,2314,-28466,-21971,15833,30778,-11420,-26189,4296,-24005,-4649,14036,-29164,3743,15127,-16219,-16861,13343,-13529,26052,13921,-8080,-1949,26585,-32122,30339,13216,-22336,11272,31197,15938,8829,16236,-7995,-23808,-32456,-13179,-32506,-632,6581,20401,-17839,-14671,-6942,-7833,19878,-23836,4981,9131,2531,9793,29632,-18882,10321,-32121,-22828,-18081,13424,25324,1866,-2336,15330,-2919,-21558,-27522,10657,-28088,31157,-28831,969,-27806,3021,-25677,-13732,24607,-15788,5554,31935,31592,20581,5300,-28797,-17985,-12216,7434,-13722,19772,23591,21313,7732,2226,129,7636,21355,-21787,-14614,-4364,28387,-25512,-31583,6719,16909,-29933,5294,21746,20108,29974,-5196,-6989,-18280,-12217,3002,3666,-6336,-23706,22795,32125,19592,28991,-16328,-30539,-23750,-1056,-26220,15501,-9734,-20362,-11136,-30479,-28667,13743,-24363,-26967,29638,25174,-6051,-9730,20074,10308,16073,-12949,-2870,-10028,-1889,-9805,-12390,-19821,-24225,6075,2535,-12031,11311,-23734,-30309,-12467,7557,-18923,19067,-30779,16626,13167,-29183,17548,-25000,25244,-24354,-28240,14995,2950,-23380,-22451,-27122,-11606,-10804,-14622,5479,-4351,11670,-9495,26809,24648,23469,-17098,-32007,-22440,30285,27445,-29301,-24286,-24477,-15321,-20854,27188,-30722,15966,-30022,-9659,-6772,-17604,22360,-17341,11470,4861,-7948,10105,-4994,20021,4020,24831,-32565,30681,-29523,-24123,-17822,-12306,27738,-8558,12270,-31035,-3021,-18872,27609,-24281,-1746,15751,29083,-30955,32446,29410,-2724,29730,9273,-25573,-14626,18969,-20656,15512,19143,4519,18245,-2173,-2362,-21529,29345,-9142,7973,-27070,-32084,-13966,-23745,17226,27837,-10778,16991,31322,-13395,-14878,15229,8048,9061,-24166,11919,4583,2758,185,-15791,142,31060,-11285,-1263,4195,24176,23396,29639,52,-5137,14693,-25866,-23294,-4453,4267,32466,29346,-5915,-3865,2419,-25167,18600,10769,10610,25723,9402,4534,-24016,-26170,31331,-7317,-2998,-29069,-13320,26405,-31795,-18848,31816,20042,-31989,32485,-25409,-9630,18717,22822,-21468,-23196,23093,-5483,29948,14064,23038,26336,29764,-18176,24222,-10235,-6520,19966,6269,17463,-11775,-16730,-12269,-10981,-28269,10649,-10716,19480,20617,-17494,-10290,-56,-13847,-7337,-10502,-6099,-2268,15238,30046,-13339,13644,-30067,4914,24638,-13335,-19751,-20036,13009,18200,-16136,21834,25581,19401,-32609,-22765,-684,-22414,-1061,30706,-12680,27266,62,21469,-22883,-27225,-6529,-29196,32051,-15616,17238,-10954,13487,-20547,-30730,-5705,-17062,13315,16556,17156,-21854,-874,9096,-29012,-3956,10758,4647,6130,5061,66,25298,-5645,-1480,5858,17551,6778,10038,-21052,19586,1824,-22619,-6609,-30998,-31699,-30129,26483,-17531,18820,-5619,-23761,9751,-17721,9657,-2132,-22041,8563,4349,-26377,-1586,7662,-25066,16939,-22436,-15698,10330,14537,17987,10037,14382,-24240,30634,14319,1234,-3718,12894,656,23979,12105,4343,11688,14788,-26302,-7703,-19254,9551,2941,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad18_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/pad/pad18_input0_int16.csv new file mode 100644 index 0000000..73c809a --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/pad/pad18_input0_int16.csv @@ -0,0 +1 @@ +894,-12957,6412,-27996,11098,-18597,-3500,22393,19438,28862,-11520,2454,3264,-30241,27447,10304,-8582,-5853,-22905,24559,13343,-17919,6977,12698,31579,25135,15744,12953,-28756,15242,12947,-17952,4560,24645,-31534,-10422,29596,16103,-23398,-20114,10042,-7136,30630,19514,-311,-25603,-29490,2891,-26415,5805,15115,24588,16693,20806,-7734,31214,-11864,-17542,-1365,2454,-11864,-17505,-30963,21029,10176,10875,-2919,20966,11836,169,18648,-15424,-8445,-1425,-31371,2982,-18145,-20043,-10997,-81,11695,-21241,-21349,-11095,30590,22668,-3451,-20006,30706,-29310,-4604,-21288,-28185,-7258,12985,-23108,19796,17148,-30318,-26004,7857,-8611,-13294,1654,-19582,-4333,13354,22515,5285,-32384,-30761,-24551,-18759,23531,31829,6379,8240,-411,-14836,-1617,-27453,-7351,-28244,9843,-30167,14576,-12279,-3065,13277,838,18500,-28884,-6625,18135,22596,328,-1191,-25464,-10493,3738,30856,20080,27878,-12327,-29363,680,-12441,-23927,14131,16628,-26094,32128,-750,-2722,14997,-12829,-27450,-30234,6300,25950,7792,3204,11621,-12743,-8818,-29480,25114,-27181,-15614,30561,12538,-30857,-7239,-17479,22272,1746,-17916,23947,12204,-24920,24663,-1798,4240,27458,9616,-11728,8635,-22297,-20469,21934,17754,-2302,30127,-17833,-16571,30839,-19167,-28753,-3642,25573,13527,-29252,16668,15812,-30448,-23487,30466,15817,-8217,21854,-14081,23854,-11618,5360,18921,-17648,-13903,-18347,-6949,-15474,-4767,26700,29751,-26851,-18052,-3891,28117,13702,15473,-16577,14309,-413,3611,-5059,-22315,-6798,14386,20878,-5307,6575,5437,-9036,22758,4174,-23986,-21553,-12113,22773,32028,16496,11041,-11669,7988,17527,-3330,-6075,-22217,-29629,21966,-20210,23997,3727,13169,23744,32650,13203,-19347,19015,-28913,13889,30207,21292,27038,12738,1679,-28969,-30229,-1541,-30207,-13525,-3291,16599,-8699,-23865,31644,-20231,10512,4605,-4377,-13523,5635,31045,-20271,-23222,-8532,25344,-9543,-26035,-27298,24554,-19074,-28437,32326,-20433,20492,25534,-540,-15423,-21691,-24207,-8676,25969,8712,28565,19984,2638,-9969,1619,20814,-25178,14446,22239,6271,19727,-4286,-1480,-4024,-22787,-18894,22860,22971,-11813,5877,26813,-17026,16521,22317,-12288,-6764,-13194,10605,30846,-24936,-30378,4564,-481,-27982,-21254,25041,1419,1463,-5624,-10901,7055,31184,-32359,16765,-4015,-3070,13936,-12252,6564,-8843,1068,-17515,-22319,-14005,30343,28098,-17507,-20626,-27769,-9349,10851,28201,3950,17838,27825,-24482,13095,24528,-26446,-27807,-11054,27055,23574,5230,25882,352,-4038,22364,-16728,-10759,24185,7704,-5332,17399,16708,-9538,-29621,5655,12337,-6339,-31524,25389,-32301,-15341,-8581,-14351,-13701,-4996,31531,-23554,15342,16203,-20998,19846,2775,-13978,14163,2317,8093,-26444,-7872,-15249,-17206,25899,-20383,18446,12984,-4037,26180,21266,-18165,-6503,11538,15356,-22873,16717,10561,-30287,-9288,-25765,7403,2932,793,20490,-8417,17149,-29017,-27716,27722,-4678,-22508,-10675,9289,10917,-15278,-25174,25405,-23115,-5512,-14634,30026,16456,6199,51,3994,17926,25036,23516,15458,28184,-3646,-2460,29351,18470,19611,-9844,21374,19235,20354,10001,3178,-20446,26509,30220,11709,-702,-8004,-14612,-20397,8772,513,-24204,895,-12673,31588,22907,19264,32545,-20034,-20735,-16089,-24503,17198,14300,-6623,25728,14588,16000,21278,22507,-31102,12371,14959,-32168,8741,9980,22177,-13073,-17584,6029,-12045,-3785,8591,32349,248,26474,24541,-1869,-12251,-17548,-5548,-28366,27221,-23973,2575,16218,-1330,-4022,-32522,-27011,1669,-23422,-4692,2454,31744,-6457,6018,-5971,8819,-4146,-32246,7848,-12147,12731,5302,-16901,-1085,14689,28402,-7369,2258,25154,172,-6187,-13550,-6907,-18721,26292,-3787,6887,-24623,11840,16986,6193,14026,-5160,-1915,-8872,31331,-25670,15559,-16056,-3299,30960,-15605,13280,12849,-25185,-4521,-2384,18867,22739,4785,19479,24377,-31200,12135,26453,15240,5083,-6473,23336,-8261,27776,21422,-8956,-23734,19139,8953,-21541,-15824,-11879,30692,31585,-26854,-4523,19169,-7737,-3223,26210,18667,6522,-14378,-21654,-10319,-19754,-31320,-25544,23754,31411,-16797,8308,17351,12623,29826,26472,-5567,-21581,-7172,11013,-17184,27814,13374,-1144,83,19377,15276,1659,-22833,-6290,7308,15338,29086,31699,32409,17143,-23351,29021,23748,-31900,32738,-3485,-21149,-5224,-7215,-5147,-27088,32410,23073,-17729,24260,8514,-6200,6415,23710,4617,-1230,-30267,32042,467,-24993,-2056,-17290,28477,-15850,15401,23416,-9198,16231,-9673,7942,-1169,-29515,3880,-10472,-30831,3626,-4007,-14616,24496,-21711,9310,19982,-23703,-15654,5068,-23812,-21974,25824,-10509,17685,-16787,27,-23827,-7176,25195,-1629,-27378,-26924,14494,-12270,6300,-20909,16596,28388,20574,5554,-14888,12541,6909,-23137,-11264,-17595,20422,-4546,6237,-20178,27582,-2856,5476,-29410,26581,7044,26230,-8611,21718,-209,-15166,7536,-12572,27287,-31506,9520,-1661,3272,-29743,-27626,18697,-32621,-3666,23132,-13326,355,-8481,1295,-13207,-7424,-19831,-25862,-20010,18672,-29246,12965,-25999,21556,-6890,-30478,-13456,13167,-26321,16001,29638,-31643,22973,-27199,30015,32412,31753,3256,16026,-32004,28384,-18828,-1547,-2367,7783,-13464,-25292,-14991,32334,-1582,-32515,-28019,16618,24595,-17719,-3751,2219,15041,20725,-25857,1158,-27166,-19925,5269,16609,-24454,-28963,-29581,-13694,-8904,-15582,6058,20459,-4495,-2476,-7873,-29889,5745,6398,20592,-17850,-22485,-10272,-16887,-13301,-11918,-31311,-15129,7768,11092,13022,-16612,7295,22532,17514,-9981,6548,-29507,3842,-23596,-5382,-21466,17806,10878,27530,21474,-7872,3816,-6491,27426,-22205,29480,7531,-11167,8994,30725,10539,-3683,-21133,3108,5449,12604,-28835,28396,25044,21049,-25437,5902,11443,24264,-26265,-13250,-17276,-5637,-2692,6065,-25794,29157,15129,-11653,-14282,-1650,-14214,31214,-14705,1141,10667,31824,-2627,-22998,-9435,4644,13764,-24233,14485,32085,-10076,-14922,19818,998,-1950,18864,2151,19475,31226,-3349,2735,10361,25107,-18694,6121,-26736,-12521,-10656,6736,-9926,8060,-27063,12305,12028,29130,3119,8211,20741,22790,16497,-14451,27452,-26656,5479,25228,2751,12487,-20793,-9238,-5257,31544,5778,24240,32635,23995,28522,-9746,21377,-9911,11594,20767,3568,-25883,16373,-4372,18862,8199,32604,-2205,-19013,-26439,-9527,28567,-14852,-11578,17906,-20283,11569,21425,7407,21928,-4978,334,-18633,-21852,-16255,-3451,-2577,12198,32401,5036,20131,-14931,13258,16444,18677,28548,-16864,-80,6273,-19932,-19965,-1102,14941,-7460,-16123,23281,-15351,11335,6136,-26353,20705,11304,21448,-14679,-15056,-4903,20648,17441,3532,-24167,-10875,10219,-9371,-9186,10151,16466,1421,-22649,-12998,-31312,27410,-4285,27606,-16101,20359,-11631,-6075,20905,26787,12532,993,-8232,-23574,11398,7530,22512,28483,27602,1892,13902,31175,29877,-15854,5917,15224,-10959,-22277,-21029,29507,-7915,-18777,29759,18027,-735,-17795,2714,16328,-22711,30751,6112,-14434,10265,-30565,-25553,-2805,26702,23970,-13669,-29881,28712,-27681,28660,-26936,-19474,-2232,1077,12596,487,18110,23386,-18976,313,-30189,-18050,-13379,28333,-12300,18728,-3211,-24758,-4504,-4736,9254,-6018,-9319,1664,28150,-21227,19839,9715,19971,-28904,-3010,-2976,-15182,-7589,-23179,-9821,28277,4186,14836,18329,7971,2325,-7690,-11411,10230,-6936,5359,12846,-13425,-8434,-32650,20501,-24881,6059,15028,-23017,-30871,-3263,32614,-10698,25113,-1200,-18953,4978,19665,13980,-14767,23772,-16664,-1921,14798,24438,-26171,-14529,-32299,23897,-916,-25175,-3141,-32184,-11545,-12214,-9674,-5506,-23326,26985,10723,32359,-31263,-30818,-14133,10338,27594,15910,23664,19322,26191,17740,-28916,-9714,22630,23795,-6909,24162,-16461,-6046,-20278,32517,-29744,23223,-373,15263,-14465,2114,-6117,-28489,-22543,7638,28396,-26354,-7818,-18968,9232,1763,-29821,-10621,-21042,-32679,-16294,-21100,7777,13107,-30157,-24020,-22006,4312,31135,25139,28538,-19723,2052,26675,-9178,-15946,-24830,-452,23567,-4739,8339,-22089,32411,-8,21311,-19596,-14718,-4797,-8359,-11187,19557,29159,-16230,5350,2556,2417,3643,-21048,-3654,4030,-6411,-9705,-10308,-19993,-10050,-14334,29199,31726,21677,29739,-27046,3395,-27372,4901,22308,25541,24871,-7756,-25279,-13815,-2407,-8389,-9377,-3717,30327,-21733,16778,17034,906,-5298,3011,-13026,17528,-22120,-29588,-28714,6033,795,12585,30481,-1120,22605,-14256,-18714,21608,6997,14706,-10042,-13167,32247,20549,14923,-5783,1977,-31851,-13980,-15408,-676,17554,-12841,18340,24364,-8007,-14231,-2748,-21817,-13576,28649,31411,-28784,4496,-13316,-15461,-22394,-32024,-25476,14049,10979,-10031,17477,16400,16427,23330,14855,-23652,-27062,6255,5002,-22863,-21297,2426,-14656,16789,2463,-3754,-20005,-12994,-20349,-14522,-29785,13619,17069,-16047,25915,8857,-2381,-20073,30221,25857,6776,6494,-21875,27493,21970,-22622,7650,-25149,31317,271,12214,3710,26486,18413,-15097,25060,-18999,11432,-4004,1043,-27288,22963,12077,1848,12049,-25675,408,27379,-13962,253,-17983,24485,-27525,-16628,15200,-14928,-4488,-13341,27813,14225,-17299,-21835,5916,-26634,-19222,31850,19911,-26361,32181,-23389,5582,15188,-19754,32461,15339,-9039,5158,-15945,6049,-24267,24614,9904,-3201,-29662,14041,24673,-8776,17844,-24860,25081,-21436,10508,9014,24273,7885,-21543,22096,-14216,1550,-18852,-4758,-29241,19046,4897,9522,-7039,13869,30623,-7468,-20565,5346,1695,2940,2899,32146,-17251,-30492,-17299,-29429,13085,-28317,-27585,-5033,15603,-28345,-22094,-26719,13357,-27875,-10971,7473,6807,-7098,-5091,-14600,-13224,3738,-29060,22488,13972,-11138,25099,-8975,-5821,-20982,11393,-20204,1704,-27377,-27841,27679,30213,-1369,-25667,9782,-15637,28091,9745,14172,-9612,-15727,-29670,11155,-24567,-413,-17844,25592,8253,22761,-13016,19722,2251,2242,7901,-26009,23669,24335,23744,-8548,22869,21128,13008,-19013,23768,28404,1150,1664,-3589,3012,-19507,-15726,-17514,-32157,93,4723,-27699,-23314,-18428,-12226,2063,1957,-26707,-19219,2242,23515,22188,11336,19576,-26407,14748,-6989,110,-26595,31114,24687,-14900,-16026,19481,-31338,28615,22526,-3573,623,4768,-17918,28590,11276,13023,-31398,14085,18270,17375,12378,19836,9287,2114,-15200,-12963,-11592,1270,-1731,-27350,12312,12625,-4890,2459,9135,31538,31985,20935,-13892,-29363,15756,2872,11996,-17545,-12064,-3542,-12255,10759,-22199,-7854,28550,-21300,-6998,12072,8003,-9854,-27657,28699,-7599,778,6826,-17072,-6484,10691,-32341,-10160,-11547,1384,9399,-23804,-26132,-27407,5597,28674,-28715,-20963,13109,-8386,25345,14265,-22389,-14593,-29654,-23699,-29069,20093,-8225,-13698,765,-26775,-12727,26921,19740,457,-22685,-4371,-12037,-3618,-23427,20310,-23170,-30742,-8932,8216,-10137,-23911,13087,-2589,21035,1483,-29611,-300,12113,1823,21700,17653,9701,13474,5268,26231,28474,-19882,-14374,-15175,9564,13011,12731,1410,26361,-26205,-13311,28790,-22018,9347,-7622,-18160,-19169,-19385,9371,31832,20384,-25009,20190,-3965,-18325,17619,30182,30083,13800,-32569,-24775,31773,15233,-13083,17525,9797,5898,25895,-14368,4394,17044,-23571,-16243,6156,25613,7588,-22613,21738,4407,12112,-28633,-2007,16850,-31369,16496,16851,-30771,10170,-22996,29425,-300,-21706,23361,-20221,-24659,15968,-19376,28642,-19336,-29607,7164,5626,31994,8282,27489,-23426,-31896,-5658,-11165,-3190,-9201,-32199,10146,-25748,30327,2581,-32358,-24149,-5691,-5662,-22699,32049,20642,16601,29898,30805,-25312,-15578,-19691,7743,-8143,20274,-26868,-27908,27004,8943,16216,-21899,22207,28064,25101,32765,-20378,-6296,21823,6300,730,-20876,7021,26372,-28872,31347,12592,2797,20471,21544,-26318,31839,-28303,3237,4677,-22953,31470,13570,-11440,-31942,3152,-31184,16950,21366,-6808,1683,31021,-23992,-16450,-4482,7122,25507,-30885,18862,-22957,19509,-361,-12280,-23310,20092,-24242,-12304,-27107,19138,-4681,-15105,-14978,5655,32444,6732,31052,-13730,-6742,-2620,-22444,30688,20474,-21064,22570,32512,10202,-32621,8348,-20483,13535,1452,-1578,-15883,-8204,-31313,-9072,31789,27789,-23313,-7969,24162,-25174,-14817,-22758,5089,9435,3559,26923,9119,-31672,5534,-12052,25542,-27823,22669,-30408,-10576,-14388,23881,-9153,-27317,-15201,-11083,-28415,-7759,28992,-5996,16485,13055,14605,15437,-11985,-7067,10321,4427,-5461,-13735,5946,-1575,-232,-4011,-30179,-20504,-31822,-27241,-20255,12620,-29368,-16755,-31889,-8845,-26543,-29403,-14167,30730,-3640,14227,9129,28744,15900,18519,-17927,-32742,-14562,2913,-1513,8777,10504,-31492,26533,-18415,-31621,16624,-15000,32407,-2520,16869,-7333,-22473,-881,-6432,-32114,16979,23993,-3546,-21983,11342,25975,32667,-20419,-23957,8774,-24699,-25095,6306,-10193,3892,-10568,10986,-24220,-26339,-29945,27640,-11022,10572,-6883,27306,13637,-31010,9232,-2661,31128,18165,29577,-3875,81,24314,-28721,12391,-32605,-23835,-17467,24804,-7682,-7367,-29935,11301,-26807,-16141,-17297,28334,-23321,6557,-9003,-1761,997,19184,26723,-23150,-17131,10958,-3084,-27430,6257,20320,15483,-29970,-14261,18653,29712,-15641,31172,-15693,8414,18184,-15947,25766,4439,13122,10191,-15729,-12979,-11551,6642,21164,-24888,-30621,-2182,-6739,10658,5125,-31007,28522,15110,1126,18059,11329,8340,17445,-16055,-21842,-21938,12030,-25450,-2174,-27416,-2827,30479,13097,2278,24261,-13990,28164,-30514,-4392,-11136,21552,15349,-16730,21232,23576,-26540,3552,-10260,-11573,-18203,12156,19004,-20806,15177,-971,-21202,-13739,5739,-11093,-21992,25079,916,-17535,19837,-24065,-30631,-22448,3069,-7628,6954,-14767,-22125,15053,18391,-8828,-13258,10768,-3586,-15194,-26654,18425,-2143,7426,5819,-3368,-22585,-29351,-24953,8186,-16338,2314,-28466,-21971,15833,30778,-11420,-26189,4296,-24005,-4649,14036,-29164,3743,15127,-16219,-16861,13343,-13529,26052,13921,-8080,-1949,26585,-32122,30339,13216,-22336,11272,31197,15938,8829,16236,-7995,-23808,-32456,-13179,-32506,-632,6581,20401,-17839,-14671,-6942,-7833,19878,-23836,4981,9131,2531,9793,29632,-18882,10321,-32121,-22828,-18081,13424,25324,1866,-2336,15330,-2919,-21558,-27522,10657,-28088,31157,-28831,969,-27806,3021,-25677,-13732,24607,-15788,5554,31935,31592,20581,5300,-28797,-17985,-12216,7434,-13722,19772,23591,21313,7732,2226,129,7636,21355,-21787,-14614,-4364,28387,-25512,-31583,6719,16909,-29933,5294,21746,20108,29974,-5196,-6989,-18280,-12217,3002,3666,-6336,-23706,22795,32125,19592,28991,-16328,-30539,-23750,-1056,-26220,15501,-9734,-20362,-11136,-30479,-28667,13743,-24363,-26967,29638,25174,-6051,-9730,20074,10308,16073,-12949,-2870,-10028,-1889,-9805,-12390,-19821,-24225,6075,2535,-12031,11311,-23734,-30309,-12467,7557,-18923,19067,-30779,16626,13167,-29183,17548,-25000,25244,-24354,-28240,14995,2950,-23380,-22451,-27122,-11606,-10804,-14622,5479,-4351,11670,-9495,26809,24648,23469,-17098,-32007,-22440,30285,27445,-29301,-24286,-24477,-15321,-20854,27188,-30722,15966,-30022,-9659,-6772,-17604,22360,-17341,11470,4861,-7948,10105,-4994,20021,4020,24831,-32565,30681,-29523,-24123,-17822,-12306,27738,-8558,12270,-31035,-3021,-18872,27609,-24281,-1746,15751,29083,-30955,32446,29410,-2724,29730,9273,-25573,-14626,18969,-20656,15512,19143,4519,18245,-2173,-2362,-21529,29345,-9142,7973,-27070,-32084,-13966,-23745,17226,27837,-10778,16991,31322,-13395,-14878,15229,8048,9061,-24166,11919,4583,2758,185,-15791,142,31060,-11285,-1263,4195,24176,23396,29639,52,-5137,14693,-25866,-23294,-4453,4267,32466,29346,-5915,-3865,2419,-25167,18600,10769,10610,25723,9402,4534,-24016,-26170,31331,-7317,-2998,-29069,-13320,26405,-31795,-18848,31816,20042,-31989,32485,-25409,-9630,18717,22822,-21468,-23196,23093,-5483,29948,14064,23038,26336,29764,-18176,24222,-10235,-6520,19966,6269,17463,-11775,-16730,-12269,-10981,-28269,10649,-10716,19480,20617,-17494,-10290,-56,-13847,-7337,-10502,-6099,-2268,15238,30046,-13339,13644,-30067,4914,24638,-13335,-19751,-20036,13009,18200,-16136,21834,25581,19401,-32609,-22765,-684,-22414,-1061,30706,-12680,27266,62,21469,-22883,-27225,-6529,-29196,32051,-15616,17238,-10954,13487,-20547,-30730,-5705,-17062,13315,16556,17156,-21854,-874,9096,-29012,-3956,10758,4647,6130,5061,66,25298,-5645,-1480,5858,17551,6778,10038,-21052,19586,1824,-22619,-6609,-30998,-31699,-30129,26483,-17531,18820,-5619,-23761,9751,-17721,9657,-2132,-22041,8563,4349,-26377,-1586,7662,-25066,16939,-22436,-15698,10330,14537,17987,10037,14382,-24240,30634,14319,1234,-3718,12894,656,23979,12105,4343,11688,14788,-26302,-7703,-19254,9551,2941 diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad1_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/pad/pad1_golden_int16.csv new file mode 100644 index 0000000..145c803 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/pad/pad1_golden_int16.csv @@ -0,0 +1 @@ +0,0,0,0,0,0,0,0,31476,17068,-22738,22758,24211,-26866,-30146,30915,24376,19637,-32048,16208,10391,-27230,26851,2510,9696,-10184,28801,-16325,-15667,-273,21897,-26605,-7526,26474,8801,-7925,22883,5458,-7925,-7433,-16397,-13350,14050,15111,32681,11580,-30267,-12392,-29391,-32039,-11203,18877,15482,-15701,15210,22722,11107,27606,-3390,-6398,30754,31432,19440,-17418,1975,10277,9581,19327,-2316,-29487,18748,12550,-15466,1254,-2402,-30679,-30417,-10410,-29583,19261,-19707,-12567,8456,17644,-25851,16065,-11541,24616,-23265,-9749,-13237,-14009,19174,-9406,-787,6358,14379,-22017,-31949,29112,12091,10850,15158,-30608,6170,-12819,13071,-15582,15677,-11501,-18510,22734,-6986,-10723,-11715,26359,17770,31137,-11112,13136,7993,-23479,3986,-30056,-12235,14890,-12516,18995,-8380,-796,-13576,-29424,-9070,-23361,-19014,4761,-16710,6983,13304,22596,-24238,-8445,9065,-17970,-405,-22223,10941,-29056,19770,-1469,-11139,-22257,-10482,24759,19934,-10923,-7980,18881,4070,12730,28887,10598,-11975,-27450,-3753,-616,-29787,15532,-375,-31135,14374,-14538,31131,-29975,18892,24934,-3064,-12259,-9631,6288,11754,14573,-25649,12360,-6186,-11525,23274,-26720,-22545,3979,-32076,27077,11275,12054,12056,-17467,13891,4813,22105,27076,-11159,3791,-12401,16177,27279,-11648,-26364,14819,-12746,11442,-14627,-17000,-21439,-131,19805,-6436,-19460,13608,-28921,30920,-6129,-21658,-1861,-11515,-19962,6962,24192,22823,-6315,21839,4231,-6007,12146,-10827,-1817,-32512,-30685,-27945,31971,-2012,-4941,-5511,-31529,11395,10366,27223,-22908,-26694,18336,-25232,32758,-30204,-20274,-208,-21793,-3994,31367,-22035,-18426,-28431,32001,-8118,-15549,21930,24249,-25696,-26318,26363,-19783,-16091,20074,-11164,-32704,-4521,1356,6929,32330,7370,-17265,8208,-31139,-21366,-6280,5458,16801,28724,7177,-13473,-27214,-8557,-9608,4581,-9720,21256,-11493,-11884,1233,15084,-31992,-1490,8822,29877,-8102,-12184,-25314,-27945,18424,26999,-23029,-7359,-22389,30095,-4662,13998,-6283,-24211,21446,-4919,-10560,29749,-730,-26657,2305,3758,25119,-27906,27725,27253,-10223,-15240,-20070,11280,-5125,-17253,-27314,-27225,-23307,-26026,-20034,-26373,-10923,-6171,3756,-5213,-8843,4974,12954,-13678,13745,30157,30480,30325,-12032,-23725,-14738,-14098,-21443,15752,31179,-987,19255,26806,24760,-7111,-23944,-19906,-18508,-8543,-1138,11774,4063,27868,20047,-2256,-17879,30614,-8223,30410,2558,-29406,-20510,10661,-28198,7179,32187,20207,-13454,-4590,18526,28605,26113,27063,-3765,-30589,-28168,27733,24443,-3292,-1618,12373,-30545,20377,18518,-16326,7674,8013,4767,-18978,-5816,-15917,-1147,22651,-2023,12287,3750,-26471,-15249,13253,-16535,-7838,-1701,8752,-24675,22990,9965,-3,-12362,15402,5183,21390,28473,-20530,-27230,-20633,-27813,-25631,-20826,26325,29380,-6191,9507,19409,30530,24864,-8238,26009,-13960,14571,26834,-204,-4061,-15914,-6433,14825,2627,-29331,6641,-30292,-3441,20997,-15801,3004,-14521,-12563,-26705,-23040,-3720,21672,-25016,-25866,1668,-31679,29970,-11236,-15179,-20735,-3548,9755,-6574,6236,-24117,-31118,21765,7829,14558,-8675,-27924,-31164,-19291,8552,-4049,26501,31724,-5055,12743,-32064,30771,4089,28135,-1564,-16187,-17764,22475,29867,-13620,25193,18988,6043,-2087,121,-32758,-15199,-8198,-5377,16111,9243,21491,15719,17525,4678,26974,19835,9581,-6896,27077,8975,-16978,-16086,3339,28276,3782,4728,-2590,-19877,-30504,-2239,-22795,27062,27296,-9781,-8565,-1527,-24914,20998,-32493,1302,-31524,-16134,8448,6217,31822,16538,2229,-30673,16540,-11549,5378,-28670,-14869,28451,-14078,27329,-24632,17545,-22075,21721,-32216,-13761,-6760,-22223,13243,7136,-13943,-27207,23654,-9939,-5294,30450,-19193,-25060,32646,6986,-1020,-20575,12208,-21876,26687,9663,19917,-29934,15015,-4393,10453,2043,669,-13058,-18443,7510,-9734,-13765,-30945,-30917,-4159,29453,-21666,14931,20797,17670,4937,3958,16203,-24925,10962,-10771,-2262,8597,30980,-8088,-30802,-20285,16187,18511,19339,24819,-7335,28233,6179,-16337,22913,17870,-1911,-30895,-2689,-22476,3965,28321,10737,-1581,19068,-5446,-16402,-16592,3527,-28698,-30921,-28262,-8272,11605,-5340,-23173,4228,-25633,-13529,27081,-28818,21049,20241,6836,-3934,-6494,-19367,-20482,23377,-8593,7742,11843,-4252,-32665,4957,30058,15268,-6686,20489,-24014,15607,10061,-12648,-1992,-4549,-30767,-19397,30081,14508,17371,-10552,131,22668,4972,11045,31778,9447,8690,20878,-31081,-27391,11950,25836,-31389,13289,-28772,-28732,8443,18291,-5866,19279,27559,-27432,330,-30176,1090,11660,-14678,23765,-25939,25798,-31262,-18017,30639,-14176,-20332,15000,-3355,15015,5952,-9152,-31350,15808,17478,-1360,-21670,13894,11963,13671,4474,-1145,18145,-3277,30041,-29655,-29098,-13243,29456,24897,-10697,-14137,-16174,21521,32661,15975,-9206,23951,-16293,14198,13911,-26399,8607,32465,9257,-15078,-22556,-24188,-1380,18398,-26075,10243,27839,-23781,21946,651,4714,30282,30719,21886,-16580,29157,-21605,8716,-11769,13482,-29646,-14426,25034,-4099,18256,-22303,3633,-25948,31966,-16409,-13529,-25422,18694,-8131,5606,16882,-977,-18029,-3790,-18835,6422,-24556,1503,17970,24957,-9864,-21494,-22002,24121,2276,-20502,-5327,20692,-1055,-13261,-21981,-30831,6446,-2935,9204,-31600,-9015,-34,-1355,-2560,2208,22174,31530,-17752,-15237,-14362,-10914,-16717,25499,-9960,15324,11534,-9032,-10995,7702,-19603,-22105,-1795,26808,-32005,29219,-28592,9567,2940,-6870,-16534,-26097,127,3905,-19818,1815,13294,-13245,-6694,11560,-11059,-21258,25538,-11633,-21002,17018,15213,30538,-20687,-3414,-6515,29182,-9553,32564,-31032,-27772,20221,-31648,-5225,-26430,-604,-27194,10770,1459,-25788,-6970,2300,3693,-23950,11087,-17535,1558,11731,-1574,11035,11361,-27496,9411,5289,15816,2316,-27814,-13436,-27287,18675,55,-9783,6323,30071,28336,-804,-24828,-17622,25873,-11211,13712,-17651,13147,-26320,17350,29150,-26395,-5914,31008,-12646,25893,-3142,-16045,-7434,21801,-10807,-2902,-18863,-24977,-27003,9773,-18546,-28767,-3813,-3419,32268,29949,25494,8038,-10421,-11816,32079,-15830,32639,13886,-11064,16750,-10564,4069,-23411,-26987,-12110,29940,-28545,1860,6871,-5778,30178,-30915,30307,-19554,-1100,2814,2248,1789,15446,7634,12964,29296,-13725,19887,8174,-27160,-32125,22236,23438,-21016,-21022,-15601,24714,-21114,19880,7016,17204,-29050,19978,-6455,-15839,-15600,-16465,20915,-14157,13551,-11431,12592,-22697,7388,2945,25558,24013,6953,-30056,-593,-18399,4303,25199,-19718,15360,-29462,-15439,27621,6025,8879,-17426,6891,-21712,-18047,14983,-26653,28006,-2151,30223,19840,-3726,-22418,-31761,9986,-1250,-18505,15754,-22518,29204,11395,-30752,-29912,-23548,-1495,-4682,-3246,-11411,32246,19651,5625,29811,-18694,27672,21447,10018,-8255,1872,-15196,5430,2915,-31875,19348,-25138,-2649,-8346,-16170,-27057,-7802,-11431,11882,-20896,-12191,-5683,-7363,-4498,25085,29394,-9593,-16340,20906,94,9693,-21658,-22008,26721,11889,27455,24955,18692,31805,22888,-18319,20145,32097,26699,-6385,-11236,16712,-24792,-23537,-1537,9455,2879,8196,-28073,-6487,14796,27703,1817,333,13203,-2950,26828,-24507,-3896,9930,6536,-13330,-1472,-26075,-10027,27938,27642,30094,30260,-10739,-30137,2530,-19650,12092,-31172,3801,23808,-10304,29487,-22357,-1051,-26426,-25208,-8480,-26589,-997,14736,6620,11303,-29570,-2455,2041,5547,1849,-3131,32223,-22633,-3254,17399,28529,28044,-21526,-9500,-31406,31269,-8749,7562,-24297,2226,21655,17914,29209,11860,18890,19888,8548,30238,-28152,-10149,25980,-29039,10434,28481,26537,22846,-19848,-11956,-2579,17066,-9778,-4123,25192,-3437,-2075,-9262,-2598,-25195,20459,-32535,-25136,18257,-23768,3365,-23430,18414,4129,23227,-26378,-10004,10331,9766,-2886,-15362,2577,29644,-14054,-7358,26293,21900,30211,11405,8257,14855,31190,14712,7362,-3027,-20457,-18005,2528,-21845,21599,-27945,-22594,-14428,-2392,-1820,20194,17825,-9955,-17949,-18675,23804,-29218,-5826,-18479,-5993,-8295,-10007,15057,-26800,8378,20761,-17932,-13840,6919,32478,8608,2878,15081,30110,-4361,32238,15376,15579,5744,9132,-3187,782,-14927,-10345,-31461,-18825,9653,-13126,13725,-22161,-2716,-28325,-13952,-5284,11034,-4994,22743,-8816,29577,19641,-20188,17879,23581,-17353,-9202,-29768,-5848,31225,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-10157,12407,25298,13901,738,22523,22055,6898,-3471,30585,-21383,24731,13011,7675,-4754,11092,1654,-3909,21075,-9218,2693,2748,32079,-13161,-21909,21670,19206,-10366,3476,-17593,-26845,-9872,4450,8095,940,-17337,-997,9798,835,21582,-1121,32247,-8763,-31667,17239,29205,25261,29814,-22943,-17863,-29494,9890,-14178,-30424,3165,16964,31562,29464,13787,26237,13920,-21302,25708,-19214,2239,-9124,-6327,27676,3574,32171,2689,-2732,9321,-31687,10423,17889,-18996,-23407,-22893,21865,-642,-7175,-432,12869,-21727,-18079,-18170,6801,2391,10301,-16893,344,1615,-14145,3630,16167,9592,-12564,-6249,-9369,15714,-28025,-6744,17237,-10188,4979,-2160,27511,30529,-22388,3119,30678,12497,1671,-29858,8193,-23659,-12442,-22256,22882,17473,26326,22238,30133,-8348,-8081,-15675,-7861,25840,-25435,-2507,-4227,-8237,-143,16711,12422,5472,11751,4458,17806,-2257,-26000,-25335,29425,-29669,-9805,-2987,4905,25245,-11701,32729,-26515,22216,577,19424,-29072,-22743,15857,12575,18980,2376,-516,24176,8826,10173,-19780,25008,9732,-4597,-24600,-7404,-25773,-23535,-27377,12780,28044,-6338,-17287,-22122,17903,13443,-27198,-10690,-10472,15971,-858,-7885,-20634,-30642,30398,9917,-11870,-20095,-32226,-3412,13036,-25971,32723,2924,-30785,-24522,12077,6306,17556,-14182,-29159,-27974,14851,-8720,25608,-11091,-8050,4330,-13391,21434,-4893,-6953,-20204,-8262,24473,8521,-25868,30516,4995,-10783,8450,-20902,20567,10838,11404,8141,18653,18534,-23449,-14231,19230,-14240,26894,9636,-29112,6705,14219,21050,20807,10178,15745,1250,-15899,27491,22352,28007,26338,5925,3575,-14655,-32091,-1037,7969,30889,18224,-17779,-29285,31544,-27274,-24457,-14569,-22566,-23655,-338,-14556,-24802,-167,-8159,20094,-588,32568,-26425,-10882,-26188,17617,-24424,-24371,-2742,2090,-22457,972,-5574,18652,-12451,19512,-30416,11470,26305,29601,-5203,-18124,13363,5090,-7131,-24157,19770,-22800,19660,-12056,-29779,-28364,6910,-23028,25584,-10168,3836,818,13231,23607,-12616,-4463,-6767,16954,28757,-7893,3429,1463,26619,-28854,16584,15839,23571,31078,2814,-19060,27752,-3180,-12260,-19371,14951,3511,-4352,-28907,-24745,28203,27438,-30617,-17960,-17157,25364,-27081,30405,-26520,-180,-1006,26003,-17011,22161,28379,13658,-44,30861,-28640,16833,-32633,7744,19729,-24857,11757,-16028,-27303,27639,-11630,-9114,32485,4044,28648,-1916,10394,-19560,6992,7012,23977,5233,20968,-23139,-12916,-12101,-792,-4580,7946,193,-17810,27214,12164,16215,-23515,-27353,20643,2156,-16686,-12081,-9353,-29007,-13607,19164,25225,-2480,11080,-20299,11429,3013,-23167,-4773,2801,-20773,-9002,9372,-16800,16298,18782,-17037,-3475,-5588,767,18269,-31043,16147,-21624,3419,-27228,-11454,-15142,31348,-19542,1653,-17469,-11028,25141,-4617,-8832,-14899,5748,-14318,31416,8500,-11101,-1611,-10328,2106,29070,-23379,23820,-28976,-23906,10683,26773,-16234,19180,27406,3992,20402,-3342,-14344,-6032,32522,24482,-22292,-1401,4191,-29292,11533,-8407,15865,26956,-11733,-4539,-13434,-3461,9826,-18606,4757,22069,28328,-3159,766,10544,-31734,6449,-12902,3166,20528,-7688,30150,24925,9103,-8452,8193,-22927,-9846,23698,-3574,17905,638,8838,26318,-26970,-23653,-17537,-19302,3111,-30101,-13313,-31048,-15324,-30875,8408,9815,-3806,1241,17963,-9941,7849,-219,22352,25907,-9694,-18545,-8535,-18769,930,-5524,-18554,7990,29281,-3974,6561,17834,-5932,-6609,-15007,2419,24300,-22003,23419,-21086,5887,-9476,-5488,12401,-5581,11715,31132,29416,-14017,32258,18292,-6481,24256,12276,22940,-12194,9024,31186,-14262,11428,-23647,-18160,22621,32156,1504,28178,-30559,-19547,-31731,1148,3730,32149,29419,-21008,16648,21466,-19012,17555,-18974,-24677,4472,-3454,14331,-5728,-2977,20942,-20025,27308,-4682,-23516,12596,-2153,-7469,-23847,-3449,13546,-3073,-22402,2659,-21683,17717,-11123,-30579,14305,24929,9409,10679,-30689,24171,24441,-5352,-7580,-28662,2433,-26207,-15495,-11410,-23487,12583,24708,11366,17378,-6896,-1431,-32648,4463,-12578,18651,6376,17522,-12188,-1591,24575,-712,-11996,7679,24707,-9431,-22472,16094,-28059,-3924,-12864,-23474,-24348,1747,-30935,-23378,24787,-19364,-27929,8080,2842,4449,-27367,17100,19251,14855,-1640,31440,17120,-32695,645,-961,-22061,30361,13342,-2840,31426,-5234,7082,-214,-24411,-19609,-20826,17000,19192,9761,832,23544,6679,-21522,-6388,18380,24696,-22810,-12916,21742,-7471,-11266,-27641,20312,21472,29254,-17733,-13649,-12980,8748,-13767,15886,21224,25661,-22321,-7316,32039,10554,16961,28771,-11746,15288,18632,14537,-2919,11248,13531,22249,-9994,-15389,-27645,-11070,28529,27397,-27694,31258,-6266,-23797,-24585,30582,-2472,13777,-20383,8188,10486,26236,-11449,583,18549,14483,15889,7784,-11807,14571,24891,12341,-14886,16812,-14482,1763,30670,-31323,19991,-8169,10429,14536,28036,32474,6371,23825,10755,10883,11014,26043,6033,-2121,23836,-2872,1200,-23642,-7968,-21917,-25585,21296,10519,-12230,31954,-4760,25985,-3721,7802,-13850,252,13394,-19920,-32751,31831,-16807,9286,-16717,22784,-11704,-15220,-11320,27241,26755,13056,-24705,5083,-22200,16501,-20251,22246,-18172,-16801,20218,-24789,-22781,-10153,-29700,24941,-32217,-13211,-18240,-32132,27648,-25735,-32640,-7582,14785,3302,-1179,-31720,13087,15169,-2245,-3269,3665,-23330,-14176,-18115,-30272,14609,-26908,-22326,-9813,20603,-22400,12370,26467,21334,-12626,-19917,-26356,19289,-21012,-14577,14224,-18910,-28897,-25639,1479,15727,-32611,26598,-8513,20949,16330,25619,-14143,-765,-14186,13157,6926,14892,13238,1212,-14658,-30784,-27336,21421,28150,17474,32185,-1777,-30015,6343,3479,-20487,14214,-20630,-17248,10332,-21506,4146,-25912,-13876,6369,437,437,-22281,9380,25165,-10384,-26964,-7778,-26937,20381,-6558,-22201,10604,-29417,4009,12290,-30781,-14143,25493,4013,24894,6589,11734,17658,16762,-2919,4485,17130,32722,-10877,26879,-5453,-29393,-31301,11707,16865,12488,27964,-11611,9202,-19107,-14790,-3357,-10419,24708,11346,30953,-3096,-30727,8179,21644,-829,5147,5719,-21698,19849,22970,-11755,16378,-26674,21088,-19006,-27371,-26799,28495,-22896,8450,-22730,24630,21345,27743,-16495,15703,-29462,2199,27922,2996,23404,-28495,-12775,21682,24507,-7595,-27422,17061,8143,-12297,13647,-26758,10696,30175,19215,-1183,13708,30292,24545,-24017,-4903,7317,-2060,25988,8994,-19718,-5393,-7919,-29872,-4080,25042,-9234,3136,16405,16387,-21135,12360,-27515,24847,-30710,-17058,32462,-24737,22267,20591,-11026,4947,4984,15212,24673,26174,32663,-7380,-12703,-32746,5137,-23333,-5105,-19441,29986,9950,15870,-16334,595,27066,21067,-31968,-10016,26648,5847,20167,-10411,-444,24299,-27063,28412,17837,10770,23308,-16576,11084,10876,-9590,16699,11024,-22458,105,-14879,4900,-17112,4201,-21180,11920,19642,10926,6489,-8288,-11616,-15761,-14226,979,-18748,12341,-11783,-13606,19973,22198,28709,7181,22989,-13033,-28150,20114,-21656,-31626,27392,-25792,-9384,-26966,-25775,-7597,29603,7944,-3090,14824,-26767,29589,-19798,-23674,-11825,-29820,5854,11868,-20262,30849,-7310,-3230,-27100,17571,-18750,-14360,-384,-13760,-31187,13207,-1144,1321,-4479,-16951,6691,27404,29699,27233,15201,21231,-3391,-23784,-25108,3018,-7052,-27207,-20423,27140,-1025,-8068,-22063,28667,-19421,28725,14867,-24400,-6178,-17148,21490,-1628,-32397,10130,-1229,-31624,-26349,-30993,-27806,-13334,-27817,-74,17369,25349,14319,25554,31012,-20745,28200,-17633,-24997,-4707,7802,-626,-27631,31390,-2976,18406,13942,29502,-17712,-745,19407,8840,-14827,1511,20882,-2709,-14651,-1116,-27708,-394,32716,15747,-24254,-23573,-6149,-10121,-6031,14181,19122,-13537,7133,-4693,-17395,-16216,-17988,16430,-26918,-3284,-7729,-9627,-8996,-22106,-13974,5909,-18232,6589,-17342,-19853,-26546,-16118,15989,-31626,11882,1600,24669,23389,-18155,-14724,-23650,-20027,-23478,25719,-19738,-9351,-15945,-5240,-6762,11174,10212,-6189,9780,-22855,15210,-27946,30196,10996,30197,-7104,21557,6948,-16096,10552,-12319,-17794,6134,-11737,26810,-19143,2580,2252,-18277,-4597,-1772,14848,-21945,27651,-25311,-15389,-168,7389,-29496,14981,18206,-27518,11521,-10450,-19173,15540,-19233,25909,21599,9577,23652,30640,28396,29569,25602,31513,-4444,14504,-23171,0,0,0,0,0,0,0,0 diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad1_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/pad/pad1_input0_int16.csv new file mode 100644 index 0000000..a466f5b --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/pad/pad1_input0_int16.csv @@ -0,0 +1 @@ +31476,17068,-22738,22758,24211,-26866,-30146,30915,24376,19637,-32048,16208,10391,-27230,26851,2510,9696,-10184,28801,-16325,-15667,-273,21897,-26605,-7526,26474,8801,-7925,22883,5458,-7925,-7433,-16397,-13350,14050,15111,32681,11580,-30267,-12392,-29391,-32039,-11203,18877,15482,-15701,15210,22722,11107,27606,-3390,-6398,30754,31432,19440,-17418,1975,10277,9581,19327,-2316,-29487,18748,12550,-15466,1254,-2402,-30679,-30417,-10410,-29583,19261,-19707,-12567,8456,17644,-25851,16065,-11541,24616,-23265,-9749,-13237,-14009,19174,-9406,-787,6358,14379,-22017,-31949,29112,12091,10850,15158,-30608,6170,-12819,13071,-15582,15677,-11501,-18510,22734,-6986,-10723,-11715,26359,17770,31137,-11112,13136,7993,-23479,3986,-30056,-12235,14890,-12516,18995,-8380,-796,-13576,-29424,-9070,-23361,-19014,4761,-16710,6983,13304,22596,-24238,-8445,9065,-17970,-405,-22223,10941,-29056,19770,-1469,-11139,-22257,-10482,24759,19934,-10923,-7980,18881,4070,12730,28887,10598,-11975,-27450,-3753,-616,-29787,15532,-375,-31135,14374,-14538,31131,-29975,18892,24934,-3064,-12259,-9631,6288,11754,14573,-25649,12360,-6186,-11525,23274,-26720,-22545,3979,-32076,27077,11275,12054,12056,-17467,13891,4813,22105,27076,-11159,3791,-12401,16177,27279,-11648,-26364,14819,-12746,11442,-14627,-17000,-21439,-131,19805,-6436,-19460,13608,-28921,30920,-6129,-21658,-1861,-11515,-19962,6962,24192,22823,-6315,21839,4231,-6007,12146,-10827,-1817,-32512,-30685,-27945,31971,-2012,-4941,-5511,-31529,11395,10366,27223,-22908,-26694,18336,-25232,32758,-30204,-20274,-208,-21793,-3994,31367,-22035,-18426,-28431,32001,-8118,-15549,21930,24249,-25696,-26318,26363,-19783,-16091,20074,-11164,-32704,-4521,1356,6929,32330,7370,-17265,8208,-31139,-21366,-6280,5458,16801,28724,7177,-13473,-27214,-8557,-9608,4581,-9720,21256,-11493,-11884,1233,15084,-31992,-1490,8822,29877,-8102,-12184,-25314,-27945,18424,26999,-23029,-7359,-22389,30095,-4662,13998,-6283,-24211,21446,-4919,-10560,29749,-730,-26657,2305,3758,25119,-27906,27725,27253,-10223,-15240,-20070,11280,-5125,-17253,-27314,-27225,-23307,-26026,-20034,-26373,-10923,-6171,3756,-5213,-8843,4974,12954,-13678,13745,30157,30480,30325,-12032,-23725,-14738,-14098,-21443,15752,31179,-987,19255,26806,24760,-7111,-23944,-19906,-18508,-8543,-1138,11774,4063,27868,20047,-2256,-17879,30614,-8223,30410,2558,-29406,-20510,10661,-28198,7179,32187,20207,-13454,-4590,18526,28605,26113,27063,-3765,-30589,-28168,27733,24443,-3292,-1618,12373,-30545,20377,18518,-16326,7674,8013,4767,-18978,-5816,-15917,-1147,22651,-2023,12287,3750,-26471,-15249,13253,-16535,-7838,-1701,8752,-24675,22990,9965,-3,-12362,15402,5183,21390,28473,-20530,-27230,-20633,-27813,-25631,-20826,26325,29380,-6191,9507,19409,30530,24864,-8238,26009,-13960,14571,26834,-204,-4061,-15914,-6433,14825,2627,-29331,6641,-30292,-3441,20997,-15801,3004,-14521,-12563,-26705,-23040,-3720,21672,-25016,-25866,1668,-31679,29970,-11236,-15179,-20735,-3548,9755,-6574,6236,-24117,-31118,21765,7829,14558,-8675,-27924,-31164,-19291,8552,-4049,26501,31724,-5055,12743,-32064,30771,4089,28135,-1564,-16187,-17764,22475,29867,-13620,25193,18988,6043,-2087,121,-32758,-15199,-8198,-5377,16111,9243,21491,15719,17525,4678,26974,19835,9581,-6896,27077,8975,-16978,-16086,3339,28276,3782,4728,-2590,-19877,-30504,-2239,-22795,27062,27296,-9781,-8565,-1527,-24914,20998,-32493,1302,-31524,-16134,8448,6217,31822,16538,2229,-30673,16540,-11549,5378,-28670,-14869,28451,-14078,27329,-24632,17545,-22075,21721,-32216,-13761,-6760,-22223,13243,7136,-13943,-27207,23654,-9939,-5294,30450,-19193,-25060,32646,6986,-1020,-20575,12208,-21876,26687,9663,19917,-29934,15015,-4393,10453,2043,669,-13058,-18443,7510,-9734,-13765,-30945,-30917,-4159,29453,-21666,14931,20797,17670,4937,3958,16203,-24925,10962,-10771,-2262,8597,30980,-8088,-30802,-20285,16187,18511,19339,24819,-7335,28233,6179,-16337,22913,17870,-1911,-30895,-2689,-22476,3965,28321,10737,-1581,19068,-5446,-16402,-16592,3527,-28698,-30921,-28262,-8272,11605,-5340,-23173,4228,-25633,-13529,27081,-28818,21049,20241,6836,-3934,-6494,-19367,-20482,23377,-8593,7742,11843,-4252,-32665,4957,30058,15268,-6686,20489,-24014,15607,10061,-12648,-1992,-4549,-30767,-19397,30081,14508,17371,-10552,131,22668,4972,11045,31778,9447,8690,20878,-31081,-27391,11950,25836,-31389,13289,-28772,-28732,8443,18291,-5866,19279,27559,-27432,330,-30176,1090,11660,-14678,23765,-25939,25798,-31262,-18017,30639,-14176,-20332,15000,-3355,15015,5952,-9152,-31350,15808,17478,-1360,-21670,13894,11963,13671,4474,-1145,18145,-3277,30041,-29655,-29098,-13243,29456,24897,-10697,-14137,-16174,21521,32661,15975,-9206,23951,-16293,14198,13911,-26399,8607,32465,9257,-15078,-22556,-24188,-1380,18398,-26075,10243,27839,-23781,21946,651,4714,30282,30719,21886,-16580,29157,-21605,8716,-11769,13482,-29646,-14426,25034,-4099,18256,-22303,3633,-25948,31966,-16409,-13529,-25422,18694,-8131,5606,16882,-977,-18029,-3790,-18835,6422,-24556,1503,17970,24957,-9864,-21494,-22002,24121,2276,-20502,-5327,20692,-1055,-13261,-21981,-30831,6446,-2935,9204,-31600,-9015,-34,-1355,-2560,2208,22174,31530,-17752,-15237,-14362,-10914,-16717,25499,-9960,15324,11534,-9032,-10995,7702,-19603,-22105,-1795,26808,-32005,29219,-28592,9567,2940,-6870,-16534,-26097,127,3905,-19818,1815,13294,-13245,-6694,11560,-11059,-21258,25538,-11633,-21002,17018,15213,30538,-20687,-3414,-6515,29182,-9553,32564,-31032,-27772,20221,-31648,-5225,-26430,-604,-27194,10770,1459,-25788,-6970,2300,3693,-23950,11087,-17535,1558,11731,-1574,11035,11361,-27496,9411,5289,15816,2316,-27814,-13436,-27287,18675,55,-9783,6323,30071,28336,-804,-24828,-17622,25873,-11211,13712,-17651,13147,-26320,17350,29150,-26395,-5914,31008,-12646,25893,-3142,-16045,-7434,21801,-10807,-2902,-18863,-24977,-27003,9773,-18546,-28767,-3813,-3419,32268,29949,25494,8038,-10421,-11816,32079,-15830,32639,13886,-11064,16750,-10564,4069,-23411,-26987,-12110,29940,-28545,1860,6871,-5778,30178,-30915,30307,-19554,-1100,2814,2248,1789,15446,7634,12964,29296,-13725,19887,8174,-27160,-32125,22236,23438,-21016,-21022,-15601,24714,-21114,19880,7016,17204,-29050,19978,-6455,-15839,-15600,-16465,20915,-14157,13551,-11431,12592,-22697,7388,2945,25558,24013,6953,-30056,-593,-18399,4303,25199,-19718,15360,-29462,-15439,27621,6025,8879,-17426,6891,-21712,-18047,14983,-26653,28006,-2151,30223,19840,-3726,-22418,-31761,9986,-1250,-18505,15754,-22518,29204,11395,-30752,-29912,-23548,-1495,-4682,-3246,-11411,32246,19651,5625,29811,-18694,27672,21447,10018,-8255,1872,-15196,5430,2915,-31875,19348,-25138,-2649,-8346,-16170,-27057,-7802,-11431,11882,-20896,-12191,-5683,-7363,-4498,25085,29394,-9593,-16340,20906,94,9693,-21658,-22008,26721,11889,27455,24955,18692,31805,22888,-18319,20145,32097,26699,-6385,-11236,16712,-24792,-23537,-1537,9455,2879,8196,-28073,-6487,14796,27703,1817,333,13203,-2950,26828,-24507,-3896,9930,6536,-13330,-1472,-26075,-10027,27938,27642,30094,30260,-10739,-30137,2530,-19650,12092,-31172,3801,23808,-10304,29487,-22357,-1051,-26426,-25208,-8480,-26589,-997,14736,6620,11303,-29570,-2455,2041,5547,1849,-3131,32223,-22633,-3254,17399,28529,28044,-21526,-9500,-31406,31269,-8749,7562,-24297,2226,21655,17914,29209,11860,18890,19888,8548,30238,-28152,-10149,25980,-29039,10434,28481,26537,22846,-19848,-11956,-2579,17066,-9778,-4123,25192,-3437,-2075,-9262,-2598,-25195,20459,-32535,-25136,18257,-23768,3365,-23430,18414,4129,23227,-26378,-10004,10331,9766,-2886,-15362,2577,29644,-14054,-7358,26293,21900,30211,11405,8257,14855,31190,14712,7362,-3027,-20457,-18005,2528,-21845,21599,-27945,-22594,-14428,-2392,-1820,20194,17825,-9955,-17949,-18675,23804,-29218,-5826,-18479,-5993,-8295,-10007,15057,-26800,8378,20761,-17932,-13840,6919,32478,8608,2878,15081,30110,-4361,32238,15376,15579,5744,9132,-3187,782,-14927,-10345,-31461,-18825,9653,-13126,13725,-22161,-2716,-28325,-13952,-5284,11034,-4994,22743,-8816,29577,19641,-20188,17879,23581,-17353,-9202,-29768,-5848,31225,-10157,12407,25298,13901,738,22523,22055,6898,-3471,30585,-21383,24731,13011,7675,-4754,11092,1654,-3909,21075,-9218,2693,2748,32079,-13161,-21909,21670,19206,-10366,3476,-17593,-26845,-9872,4450,8095,940,-17337,-997,9798,835,21582,-1121,32247,-8763,-31667,17239,29205,25261,29814,-22943,-17863,-29494,9890,-14178,-30424,3165,16964,31562,29464,13787,26237,13920,-21302,25708,-19214,2239,-9124,-6327,27676,3574,32171,2689,-2732,9321,-31687,10423,17889,-18996,-23407,-22893,21865,-642,-7175,-432,12869,-21727,-18079,-18170,6801,2391,10301,-16893,344,1615,-14145,3630,16167,9592,-12564,-6249,-9369,15714,-28025,-6744,17237,-10188,4979,-2160,27511,30529,-22388,3119,30678,12497,1671,-29858,8193,-23659,-12442,-22256,22882,17473,26326,22238,30133,-8348,-8081,-15675,-7861,25840,-25435,-2507,-4227,-8237,-143,16711,12422,5472,11751,4458,17806,-2257,-26000,-25335,29425,-29669,-9805,-2987,4905,25245,-11701,32729,-26515,22216,577,19424,-29072,-22743,15857,12575,18980,2376,-516,24176,8826,10173,-19780,25008,9732,-4597,-24600,-7404,-25773,-23535,-27377,12780,28044,-6338,-17287,-22122,17903,13443,-27198,-10690,-10472,15971,-858,-7885,-20634,-30642,30398,9917,-11870,-20095,-32226,-3412,13036,-25971,32723,2924,-30785,-24522,12077,6306,17556,-14182,-29159,-27974,14851,-8720,25608,-11091,-8050,4330,-13391,21434,-4893,-6953,-20204,-8262,24473,8521,-25868,30516,4995,-10783,8450,-20902,20567,10838,11404,8141,18653,18534,-23449,-14231,19230,-14240,26894,9636,-29112,6705,14219,21050,20807,10178,15745,1250,-15899,27491,22352,28007,26338,5925,3575,-14655,-32091,-1037,7969,30889,18224,-17779,-29285,31544,-27274,-24457,-14569,-22566,-23655,-338,-14556,-24802,-167,-8159,20094,-588,32568,-26425,-10882,-26188,17617,-24424,-24371,-2742,2090,-22457,972,-5574,18652,-12451,19512,-30416,11470,26305,29601,-5203,-18124,13363,5090,-7131,-24157,19770,-22800,19660,-12056,-29779,-28364,6910,-23028,25584,-10168,3836,818,13231,23607,-12616,-4463,-6767,16954,28757,-7893,3429,1463,26619,-28854,16584,15839,23571,31078,2814,-19060,27752,-3180,-12260,-19371,14951,3511,-4352,-28907,-24745,28203,27438,-30617,-17960,-17157,25364,-27081,30405,-26520,-180,-1006,26003,-17011,22161,28379,13658,-44,30861,-28640,16833,-32633,7744,19729,-24857,11757,-16028,-27303,27639,-11630,-9114,32485,4044,28648,-1916,10394,-19560,6992,7012,23977,5233,20968,-23139,-12916,-12101,-792,-4580,7946,193,-17810,27214,12164,16215,-23515,-27353,20643,2156,-16686,-12081,-9353,-29007,-13607,19164,25225,-2480,11080,-20299,11429,3013,-23167,-4773,2801,-20773,-9002,9372,-16800,16298,18782,-17037,-3475,-5588,767,18269,-31043,16147,-21624,3419,-27228,-11454,-15142,31348,-19542,1653,-17469,-11028,25141,-4617,-8832,-14899,5748,-14318,31416,8500,-11101,-1611,-10328,2106,29070,-23379,23820,-28976,-23906,10683,26773,-16234,19180,27406,3992,20402,-3342,-14344,-6032,32522,24482,-22292,-1401,4191,-29292,11533,-8407,15865,26956,-11733,-4539,-13434,-3461,9826,-18606,4757,22069,28328,-3159,766,10544,-31734,6449,-12902,3166,20528,-7688,30150,24925,9103,-8452,8193,-22927,-9846,23698,-3574,17905,638,8838,26318,-26970,-23653,-17537,-19302,3111,-30101,-13313,-31048,-15324,-30875,8408,9815,-3806,1241,17963,-9941,7849,-219,22352,25907,-9694,-18545,-8535,-18769,930,-5524,-18554,7990,29281,-3974,6561,17834,-5932,-6609,-15007,2419,24300,-22003,23419,-21086,5887,-9476,-5488,12401,-5581,11715,31132,29416,-14017,32258,18292,-6481,24256,12276,22940,-12194,9024,31186,-14262,11428,-23647,-18160,22621,32156,1504,28178,-30559,-19547,-31731,1148,3730,32149,29419,-21008,16648,21466,-19012,17555,-18974,-24677,4472,-3454,14331,-5728,-2977,20942,-20025,27308,-4682,-23516,12596,-2153,-7469,-23847,-3449,13546,-3073,-22402,2659,-21683,17717,-11123,-30579,14305,24929,9409,10679,-30689,24171,24441,-5352,-7580,-28662,2433,-26207,-15495,-11410,-23487,12583,24708,11366,17378,-6896,-1431,-32648,4463,-12578,18651,6376,17522,-12188,-1591,24575,-712,-11996,7679,24707,-9431,-22472,16094,-28059,-3924,-12864,-23474,-24348,1747,-30935,-23378,24787,-19364,-27929,8080,2842,4449,-27367,17100,19251,14855,-1640,31440,17120,-32695,645,-961,-22061,30361,13342,-2840,31426,-5234,7082,-214,-24411,-19609,-20826,17000,19192,9761,832,23544,6679,-21522,-6388,18380,24696,-22810,-12916,21742,-7471,-11266,-27641,20312,21472,29254,-17733,-13649,-12980,8748,-13767,15886,21224,25661,-22321,-7316,32039,10554,16961,28771,-11746,15288,18632,14537,-2919,11248,13531,22249,-9994,-15389,-27645,-11070,28529,27397,-27694,31258,-6266,-23797,-24585,30582,-2472,13777,-20383,8188,10486,26236,-11449,583,18549,14483,15889,7784,-11807,14571,24891,12341,-14886,16812,-14482,1763,30670,-31323,19991,-8169,10429,14536,28036,32474,6371,23825,10755,10883,11014,26043,6033,-2121,23836,-2872,1200,-23642,-7968,-21917,-25585,21296,10519,-12230,31954,-4760,25985,-3721,7802,-13850,252,13394,-19920,-32751,31831,-16807,9286,-16717,22784,-11704,-15220,-11320,27241,26755,13056,-24705,5083,-22200,16501,-20251,22246,-18172,-16801,20218,-24789,-22781,-10153,-29700,24941,-32217,-13211,-18240,-32132,27648,-25735,-32640,-7582,14785,3302,-1179,-31720,13087,15169,-2245,-3269,3665,-23330,-14176,-18115,-30272,14609,-26908,-22326,-9813,20603,-22400,12370,26467,21334,-12626,-19917,-26356,19289,-21012,-14577,14224,-18910,-28897,-25639,1479,15727,-32611,26598,-8513,20949,16330,25619,-14143,-765,-14186,13157,6926,14892,13238,1212,-14658,-30784,-27336,21421,28150,17474,32185,-1777,-30015,6343,3479,-20487,14214,-20630,-17248,10332,-21506,4146,-25912,-13876,6369,437,437,-22281,9380,25165,-10384,-26964,-7778,-26937,20381,-6558,-22201,10604,-29417,4009,12290,-30781,-14143,25493,4013,24894,6589,11734,17658,16762,-2919,4485,17130,32722,-10877,26879,-5453,-29393,-31301,11707,16865,12488,27964,-11611,9202,-19107,-14790,-3357,-10419,24708,11346,30953,-3096,-30727,8179,21644,-829,5147,5719,-21698,19849,22970,-11755,16378,-26674,21088,-19006,-27371,-26799,28495,-22896,8450,-22730,24630,21345,27743,-16495,15703,-29462,2199,27922,2996,23404,-28495,-12775,21682,24507,-7595,-27422,17061,8143,-12297,13647,-26758,10696,30175,19215,-1183,13708,30292,24545,-24017,-4903,7317,-2060,25988,8994,-19718,-5393,-7919,-29872,-4080,25042,-9234,3136,16405,16387,-21135,12360,-27515,24847,-30710,-17058,32462,-24737,22267,20591,-11026,4947,4984,15212,24673,26174,32663,-7380,-12703,-32746,5137,-23333,-5105,-19441,29986,9950,15870,-16334,595,27066,21067,-31968,-10016,26648,5847,20167,-10411,-444,24299,-27063,28412,17837,10770,23308,-16576,11084,10876,-9590,16699,11024,-22458,105,-14879,4900,-17112,4201,-21180,11920,19642,10926,6489,-8288,-11616,-15761,-14226,979,-18748,12341,-11783,-13606,19973,22198,28709,7181,22989,-13033,-28150,20114,-21656,-31626,27392,-25792,-9384,-26966,-25775,-7597,29603,7944,-3090,14824,-26767,29589,-19798,-23674,-11825,-29820,5854,11868,-20262,30849,-7310,-3230,-27100,17571,-18750,-14360,-384,-13760,-31187,13207,-1144,1321,-4479,-16951,6691,27404,29699,27233,15201,21231,-3391,-23784,-25108,3018,-7052,-27207,-20423,27140,-1025,-8068,-22063,28667,-19421,28725,14867,-24400,-6178,-17148,21490,-1628,-32397,10130,-1229,-31624,-26349,-30993,-27806,-13334,-27817,-74,17369,25349,14319,25554,31012,-20745,28200,-17633,-24997,-4707,7802,-626,-27631,31390,-2976,18406,13942,29502,-17712,-745,19407,8840,-14827,1511,20882,-2709,-14651,-1116,-27708,-394,32716,15747,-24254,-23573,-6149,-10121,-6031,14181,19122,-13537,7133,-4693,-17395,-16216,-17988,16430,-26918,-3284,-7729,-9627,-8996,-22106,-13974,5909,-18232,6589,-17342,-19853,-26546,-16118,15989,-31626,11882,1600,24669,23389,-18155,-14724,-23650,-20027,-23478,25719,-19738,-9351,-15945,-5240,-6762,11174,10212,-6189,9780,-22855,15210,-27946,30196,10996,30197,-7104,21557,6948,-16096,10552,-12319,-17794,6134,-11737,26810,-19143,2580,2252,-18277,-4597,-1772,14848,-21945,27651,-25311,-15389,-168,7389,-29496,14981,18206,-27518,11521,-10450,-19173,15540,-19233,25909,21599,9577,23652,30640,28396,29569,25602,31513,-4444,14504,-23171 diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad2.tflite b/tensorflow/lite/micro/integration_tests/seanet/pad/pad2.tflite new file mode 100644 index 0000000..9d6a4b7 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/pad/pad2.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad2_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/pad/pad2_golden_int16.csv new file mode 100644 index 0000000..1b88bfc --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/pad/pad2_golden_int16.csv @@ -0,0 +1 @@ +0,0,0,0,0,0,0,0,-3420,31602,-17513,23798,9060,-1531,1361,-29223,-16708,-2387,13498,27934,-25224,-7812,-21846,16859,-421,-320,-24600,20148,-29802,-20860,16790,-61,-5728,-12166,-27690,621,-16282,-22510,-16195,21432,1887,26817,-14490,-22342,8201,11675,31410,-10170,27061,20339,978,-28201,24409,-17079,-17012,-19749,-6005,8266,-14334,605,-12325,-11432,13312,7278,27796,-20202,20999,-24013,-21818,-4633,-18057,12382,1463,6072,-20641,5042,1419,22888,5185,-14874,-32191,-28725,30791,5898,7072,-26372,22336,20338,-6573,-10629,-10693,25203,4048,4929,7668,-4354,5564,-27703,15424,-18931,23534,-5515,10758,1533,30157,-29557,-3513,-22444,-8829,-23182,-11191,825,30714,30316,-28812,28978,-12087,10291,-22581,30594,21238,6898,-11101,-13326,13593,-8359,-10286,-16015,-973,6511,-12739,-5531,23909,-31849,-9559,19081,-2690,8906,-31286,10224,-14848,-1421,-9632,-4003,-32590,-18763,-19049,15354,22545,19368,-2464,-19077,2689,28657,-13549,-9915,-23691,-4795,3905,-4244,-17408,17839,17667,-16725,7644,-25997,-24597,-19373,12317,1066,-4543,15495,-11835,20123,-24020,25150,-22081,-1168,6886,-12911,7297,-16343,-5817,-24966,23331,32678,25779,7031,-30562,20447,17693,-15870,22119,-10700,16271,-14330,10345,24644,28956,2501,-9501,-9858,19603,-4125,-15278,18565,-23274,18208,-4484,-6089,-22186,17307,4357,-16578,-9103,-1066,-29024,-1762,4337,22393,-25849,13525,2057,-26899,-14876,28539,2843,-14668,30318,7801,-16710,-2664,2741,-26278,4596,-3176,-29071,-26165,9786,-5550,13358,23557,-19033,17265,24110,30970,25466,9502,9400,30009,20794,-109,4814,-6688,-26919,201,-22122,-4465,-25392,4654,23429,29938,-16897,27138,-27644,22254,3449,-9639,-28422,-19773,-25811,11223,-10777,14072,-15991,22270,3551,7546,27908,17018,-22177,8918,-10360,3144,-10662,-15262,-10075,32212,1371,1144,-27776,5071,-8198,-15629,-27651,-20539,5083,-498,-6094,-9647,-2380,-20451,-20956,-19673,28474,-27729,-28967,-19570,9377,-23463,-25252,-27393,-8071,-26989,6327,-22448,9568,31570,31595,-27011,18749,-24435,15442,5282,19250,-32293,-2829,29563,-26025,-22530,31386,-443,15339,25267,3412,-14320,19979,4835,-13228,-21452,-16163,-31011,-8178,10121,-5107,-23194,-26512,6732,-34,-19730,-14255,-27153,24238,-2663,-15272,-26186,-24134,2947,-28143,28846,-29457,-1251,6642,-27431,27099,-2684,-15684,4966,11702,-433,-16927,2853,-27253,-18779,15641,-8827,-23246,27787,8763,1706,9031,6173,26903,13153,-26006,19919,1209,-26023,-29945,4966,8127,-6285,8351,15339,8275,-11568,21653,-25387,30628,5053,31789,13861,-14896,6753,10595,7637,31804,-32122,-11988,31428,6942,4229,26135,13708,25895,-10744,5887,-30106,28508,-27439,-19328,-5304,-1141,11236,-9000,26998,-13908,18381,-9540,-14894,-4729,-10100,16723,20120,32501,-13740,8829,22628,-12587,-8202,9509,3200,13551,-11296,-26974,20946,-2844,-11995,-11620,-18378,-7178,4449,-6493,-2616,7194,8994,-16108,-21285,31608,14136,25516,8178,24832,-16446,6901,-11205,-21337,-31950,13230,11643,-25918,-15713,-20648,21773,20939,-22695,26567,27367,11439,-30409,-6756,22379,-6079,16314,-3747,-7058,-25228,-8172,-6613,14619,-27631,20715,-15326,-23258,9203,11780,-29685,13664,-3132,-9212,5608,-20684,-15240,6426,-25734,19795,29721,-18316,27336,-2522,26264,-3919,27453,-7162,-19773,-2265,11195,-19918,30243,-20969,-17714,-10373,18692,-31583,-21149,-22349,19313,6557,18968,-3155,-3760,-22468,-12380,-5930,29787,23354,29301,-3096,-24384,32144,24890,10699,-25805,-19053,9742,26673,-24416,-29731,23249,31500,-25606,30623,14463,22054,15657,17378,-15006,29335,4172,-28218,31639,872,-12173,12337,-4641,5297,29933,21072,-14057,12648,-7619,29676,-15770,18399,15422,-16139,-21229,29514,-25942,20455,2390,569,12820,23545,-11114,15540,20963,6979,-31931,1740,-32326,-867,-13212,17534,-4121,-8233,-19901,17310,23291,-30375,-13563,5121,4474,-11578,-20607,-28375,-17840,1753,-31282,-15919,1467,27329,-29333,-18514,-17187,-24463,13470,27494,-12864,8393,-9629,26724,31771,-13790,-22871,20170,10392,-11674,-18130,-11172,24437,-16295,-19131,-28678,23399,11107,-25087,-18537,-15090,10375,25230,8895,-19123,31776,-12976,18651,23820,-15431,13122,19630,26702,22389,-14685,-258,22198,-6190,-26548,10467,5129,29032,-1681,22735,788,-26847,16342,-15515,-13819,-11941,15858,25609,4946,-25592,13599,22178,553,-23519,-10133,3449,31024,-32413,26108,-18087,-9007,-13208,28828,2353,-31935,-28073,-10694,10028,-14362,-5571,-29259,-21845,-7848,-15902,-24318,-16634,26324,12261,-12542,11024,-22097,8576,-11504,-14935,-4484,26764,-20369,3948,-31808,8262,-6232,5060,-31366,2860,32234,19998,-32414,-32013,29204,28675,-26590,-2174,12957,-16044,-30777,-28881,-10195,5692,6062,29278,-791,-1041,-28,15037,-4702,-6534,10404,7007,27173,-21190,-19973,25312,-21121,-30763,18315,-23329,-11948,32127,-32599,7126,1481,-2997,-30136,-16035,-10854,-1440,-28528,5903,9657,9658,-13425,12064,10310,28774,9288,1777,-24382,22311,-7753,7709,20070,-3897,-8190,4076,27655,32502,1870,18287,-14739,-26614,-30881,-14666,28790,-27475,-654,-30131,8489,12156,20328,14172,26045,-25786,-12163,-11365,21459,-27550,-10031,-32293,21324,2901,16464,-23031,-14255,3009,-18097,-12140,2594,20020,-24235,-27369,-24111,-20800,9868,-2860,12805,-5228,-15077,-26152,30131,-434,-13967,8065,-19831,-23910,-3609,12765,19568,23186,-31027,32204,-12363,7827,-26914,-15898,23442,-26754,-11408,23957,-18468,-13705,26880,-29732,-25256,-6650,24734,23359,29682,24722,-21166,-27111,11724,22183,-31736,-26119,-23598,28558,14622,32341,-22904,7503,-23940,30130,15229,19267,27634,-2898,-102,-11358,16601,-1034,-1776,-29955,-7890,-16340,22205,3801,26620,16813,480,-16867,-26940,20897,-28328,26296,12152,-3359,20797,-26294,8633,26923,11879,32463,27965,29037,15754,32466,-15211,-22016,16223,-30848,-20969,10298,30330,-22353,-10834,10251,18711,-12260,-20758,-27005,29913,-21999,10338,-26117,-21476,20059,18049,15676,5311,9944,-29099,-17256,-5958,-31395,-24835,24122,13927,-19300,-31926,24127,-32375,-10203,-21313,15452,947,11688,-26421,-5741,-14117,-2622,-28156,5343,-10920,-29826,13182,21043,-31655,-27222,-24592,12757,-30854,-22666,-23217,-17985,23732,-18691,27229,-5578,-20286,30230,-16496,15707,-10127,-12991,8342,-20477,-32574,28784,-13227,-28669,8853,-6057,-14262,-15458,-21307,1434,25162,-24086,-8716,30538,-18514,-28936,10519,-2530,-29961,-9644,13085,-3152,-1688,-29963,20184,-25132,-25926,26746,-1150,-7463,1283,-10684,8065,4117,-12638,-20333,2501,20596,-30505,10974,-22571,-29642,-11369,6554,-19045,-432,20393,-21309,-16458,-3428,15622,21076,-1728,30062,-11341,-26373,19270,25888,31488,-22081,13763,-7823,-21524,-6712,-21868,13258,-20370,9708,-2392,-14140,-29851,15804,-25371,-23345,32443,-12160,-20838,-2233,15163,-17409,24495,-12806,11304,-22651,21405,26542,24980,26189,31632,31412,-14023,-12135,22537,-20867,29681,21830,3722,3963,-32145,-15245,22660,-29865,-23132,23676,-23255,-26923,-5620,9998,-30561,19506,18487,20743,3400,-15211,-19350,20135,25549,23436,20741,-5767,-30193,-14629,-26408,16804,-11828,25879,32404,-1775,15575,32192,-8711,23351,27159,-12283,-32390,20572,-23996,-13896,-5902,-24834,-3850,-24967,6927,-27845,5020,14824,-17165,16166,-25525,-9691,9558,6665,-3921,18344,-13841,27391,12877,9903,-11633,13809,-17989,-16201,30606,-13570,-27616,30467,-1759,-18117,-8434,-30183,-10950,-4741,28450,4074,-2955,-32513,-31102,22929,-24268,30890,27638,-3860,15231,-22404,-369,-25179,14046,-24094,-16365,13181,17721,-1319,-9234,-2987,-29884,-29385,-2575,-14089,16206,-10,-17140,-3643,21283,-24159,-15417,16579,-2044,-29026,31834,-23030,-21361,-7069,31216,10775,19189,-26767,-24745,-16569,-7046,26409,27207,-10749,463,-29476,13439,20125,-6400,-9451,-20980,-13773,-28201,-2641,12549,-21160,10758,4013,18093,-23258,28600,8667,-13284,-23752,-11372,-12060,21862,10027,2831,32711,-16438,-15823,6071,-8527,-31305,20305,-20036,-22483,-3418,-2874,28585,21247,31707,29982,-17895,32450,7024,28488,15355,28656,20800,9036,7849,-20955,-19594,-30851,4385,14396,-4613,-17195,-14957,21965,25461,-8545,-9486,-27485,-29704,-29513,21819,-1414,-24823,13599,-26803,15455,18235,-32247,261,-6496,25413,-23928,-9477,-4313,28312,24601,-27942,-25317,-13399,16025,-18685,5830,13126,11667,16140,-29481,1354,16626,-17957,12254,27692,-18229,-27862,-23529,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4389,20926,-6040,-22200,-24483,27492,-32126,8187,8757,-9631,21358,-566,18247,-16119,-25226,8084,2846,-13568,-26227,-7777,-27019,25914,-4035,-20335,11143,-10495,-24962,-50,5067,-21366,-26811,16180,-14584,13830,6770,10117,-20418,22334,-14681,4451,16327,20645,22398,9704,11965,14737,13579,-26098,13029,15211,10744,-11718,30718,24430,16341,4944,-31989,-15515,-27639,-6511,12381,-18886,-4262,-16562,-5855,-16780,-13875,-2477,14063,-23081,-5085,-7892,-23049,-16610,6963,32033,-27143,-4712,-10448,-9721,13570,-18489,-25234,-9175,-28496,27877,-28608,-7691,9217,6275,24490,25116,9465,-6852,-30690,26565,-21636,-28536,-14004,-29908,16286,2871,-12291,-28243,3848,13419,25194,-21289,28547,8893,29077,-28749,-797,-27453,10979,31499,-5051,17463,31692,-21381,-19132,6375,-13134,-32666,4223,1381,32351,-21399,28500,-25464,-18801,1115,-14570,28001,-3895,15768,32242,-14439,13163,-8034,-8828,6165,10760,16414,20929,-10177,-14349,-8800,-30457,-13724,-1318,-30245,-29756,900,-30898,10025,17326,8758,-28244,-26533,11849,-2461,-21674,-5304,-4423,-11722,31474,-14051,15204,-6936,22193,-11727,-19835,-15716,-16449,-21680,19571,-29865,-1883,-15895,1108,32628,8947,18943,16796,9041,20854,-31146,32335,-5739,1198,8263,-27443,19338,23587,-32559,10665,26277,-9677,1326,22570,32008,21630,-14694,-15501,15516,3438,9996,-20459,-24186,22980,-8585,29601,-15772,11949,-29249,1655,-21172,-12650,-17708,-7632,-15354,-5113,1457,24021,-11274,-6330,10710,-32184,8209,-13484,27798,18096,23622,15819,19048,25083,12560,-11590,-22194,-11631,-16818,744,-948,11692,-10039,31529,-28520,27935,-18796,-26938,22318,-13255,-22128,3943,-12563,-14089,3952,-29017,-29704,-15250,12835,31939,-20970,-10586,16444,-170,-5778,-5556,31334,24277,-23228,22421,12289,3151,-17712,5205,-3427,-24675,-25133,28431,25862,13116,-19085,28067,22944,-29656,13031,24184,32181,20136,16784,4224,1608,17419,-12272,-8053,-25473,-21194,9898,10796,-32044,32513,-15960,-16038,-24001,365,-19172,4521,-20518,-1543,-18742,20695,-9365,-23464,-25887,-10264,8562,20856,9771,8845,22530,20483,32108,23749,-19229,665,20675,-16872,4912,-21174,-4744,5733,10893,-12134,16025,23389,-27016,31150,-7195,-26121,-28440,20900,-30748,21325,-18378,-5152,-16692,-25772,15920,-25640,406,26479,12439,-7089,-5791,20009,-31755,16839,-13807,-14526,-30294,27556,-10562,4864,18412,25374,12045,-28524,11416,-16953,-192,5871,206,25205,373,11700,-25418,24565,961,-17549,15350,19814,-27220,32156,1159,7471,12165,-11017,28232,-24758,-11361,6239,13799,-12735,-17766,-9193,29867,-27840,-27738,21457,6261,32300,3408,-29163,2190,25785,9543,-11134,2029,-26841,-3833,24775,11807,-5621,19744,16433,-19995,14189,-3227,-14796,-12670,-5138,-8365,-30358,-236,27493,-22603,-31232,2940,-12036,-13480,-5063,17459,-9724,32405,27067,-12454,-24259,10082,-15457,27882,3036,22903,-29601,-4258,-30403,1268,11365,-25997,10255,-11886,1700,-14868,28277,15706,-15994,-32133,-3355,-18227,-21815,19627,-20038,-7677,7150,12227,-31643,-18651,29179,-4917,25647,-22594,-32272,-30375,20153,-21951,27038,-29871,-8831,16698,-21348,-10950,9760,-22627,-11378,-19182,11411,18060,27339,-15140,13359,-22402,-26129,-917,29099,-19702,8784,27163,-23154,-15410,17803,-2427,14932,30350,-26561,-31095,2121,31636,32675,-24939,-17277,-12119,-19396,11494,4220,-10475,-20845,9488,1100,11644,22693,-17396,25715,22630,-26530,16680,-1794,-15197,-10661,26155,-27253,20308,-6883,-25456,-27889,6229,-27742,13211,3100,-25113,29596,-31407,-23877,-27848,-3806,-31399,6404,-24281,-7258,-19397,4883,11750,26890,-6127,-16321,478,3652,786,-25462,9064,-17463,-10261,20213,-6444,25871,22835,11097,21343,28046,-12658,13810,-2353,11910,-31500,12827,-29904,10827,8806,-9417,-12396,-8821,-18375,9269,2105,4035,19789,11240,25294,-18937,3649,-23803,-18665,17923,-19355,-4604,16632,-8515,-5226,-25364,-9782,-8365,28331,20441,18898,26254,-32435,13285,22516,15038,30925,7192,-1322,-32503,1608,16416,2568,14883,30749,11369,5070,17235,-23801,20989,-15690,16238,20429,-25323,-12261,-16225,12144,12047,12491,22847,-3901,1420,28248,-20667,-4620,15900,-736,4418,23056,19438,8086,-5405,17113,4465,-31043,-30640,-30003,10536,-22342,25429,30672,-26689,2302,7282,-29484,18054,29666,26145,-12627,712,-25594,-18875,19179,8155,-7961,17060,15162,27490,20138,-28768,-16274,-23569,-17758,8905,-5323,32734,-31017,19973,4216,-5376,163,22807,10624,6334,32058,-161,-3474,12950,-92,-9868,-19769,203,32377,9769,-10870,-2326,-16093,-14674,-7643,12944,18075,-30962,27729,-21169,9850,-2948,28444,15712,-17509,-3974,-31082,-30859,-21943,21330,-814,19751,13745,5293,5160,-21102,-25952,-13580,2601,29806,17373,-19373,-6462,-14883,-14573,10175,-8562,30781,11449,3286,26582,-25911,-18701,32278,28895,9572,-1191,-18977,12940,-31507,-7882,-11516,11124,2071,-13838,12471,-13669,-20961,10666,-12459,-26954,-22187,-23274,-16492,3934,-29939,12552,32480,-14499,-22088,16301,8979,-8210,8298,8095,21949,29519,30217,-17454,12430,-25471,15408,-15498,19010,-5462,-21464,-30546,-22564,-18137,25660,-15805,28365,22431,-12764,-32065,-24297,-408,11770,27747,12659,3252,-31611,27797,-12747,-30347,-18671,-10345,-31790,18746,24805,28433,21417,-19630,-32093,25621,830,-6593,-5428,-16653,19735,5955,1363,6960,-15187,-13627,11464,-5198,-12837,-31869,-27845,-6562,6967,31797,-14816,-7493,-11508,8777,11189,4602,-3441,-11737,-22608,-26630,25199,26363,-6906,3683,112,2295,8953,-26973,-31038,-30378,17859,-7087,-658,-17674,-8447,19566,24039,-29865,4103,28574,-17287,-30127,3924,16660,32344,4001,-26833,3768,-8670,-9164,-21738,13506,-16265,26423,-7320,-19518,-16130,26254,18327,27349,-7948,11570,22353,-1823,-21577,-30829,1262,-31544,24457,17451,24415,-4369,-14738,7548,26857,-27481,-25981,15959,5728,23248,25480,29511,-24167,20650,18665,-1336,-21743,-26758,25065,13318,-9407,-11477,-1652,-18929,7497,-19523,2587,26627,30843,31771,14353,-24199,22191,5118,-18669,9312,24544,2945,-13776,-561,20892,-12417,15078,7641,5546,23682,26858,30934,15404,-18191,-8405,-23189,-10423,-24880,16326,-8930,31630,3626,-31406,-15801,12474,-26646,-27649,-190,21537,5987,27906,-26384,6265,29848,29772,-8789,-25368,-4818,20326,28377,28053,-31018,27864,-16540,26726,-18180,6609,-27106,-7529,-10559,22177,-27894,-32753,15195,-10521,-30728,-27228,-29750,-6499,-11275,25005,-15761,-28250,-7732,29364,-30680,-18686,-31855,-28183,-32368,-27365,1966,12591,30915,-25886,15790,2463,-18104,-22552,20864,5368,-10374,23145,25108,-23461,23717,-18572,-4422,28070,-7157,-27848,-17185,-3752,14656,32323,10868,-3422,-17260,-6224,27170,8003,29542,-10244,24573,-28848,25393,-31665,29418,22931,-26279,765,18642,-3479,26172,-12443,-17283,-19713,-28162,-8270,21850,-31123,-19538,717,-14693,-16743,-4560,-21047,23001,7044,2205,27118,26177,27636,2991,18076,3531,8652,22353,-21713,26805,-31910,-11021,2411,1152,-1963,-18131,30243,28305,-13334,22119,4799,18736,-31977,-21369,-24853,15463,20111,-14596,-6571,-26299,-16417,-20881,10720,-23413,16825,-6713,11600,-4885,-10547,32574,-9449,-23368,26047,25405,20499,31877,-669,-29461,-20881,-14145,-24316,-32472,29990,-10881,-7507,25189,-4879,21591,14606,-1361,-17278,-10959,-13439,5674,-29857,9190,-23436,-21536,-11878,-32246,20579,2201,13350,-28656,10681,830,-1204,5629,-30939,23150,-1968,25832,3498,32185,30432,-20937,28810,27188,-6944,-21218,-24945,16514,32427,29195,31999,13383,7396,-12400,22084,22670,-23914,-21509,1407,-26860,-15200,-24008,-14966,-11508,-27188,20663,2819,26097,4509,3733,-28095,-23100,-23401,3215,6262,27372,-30536,-14309,25851,15825,30085,20795,-2433,9822,20276,19611,-19041,-9709,-4233,3815,23288,-9174,-3094,-27278,32227,-24785,-17798,-29341,-4798,16781,25803,27860,28771,-23177,-30956,-3012,16016,-25465,27869,20245,-19540,29917,-18452,28088,12448,4368,12762,-1823,-9734,-6843,3732,-7583,-1591,20846,-8736,22992,31822,-17767,19557,-18307,31142,-24037,2764,-28703,16887,-31970,-23827,12268,28332,-10964,-16739,-582,29898,-18280,32517,-10905,-24387,-4009,-4745,-13052,-1475,-14942,-16695,-4486,-8509,-11309,-1399,28109,-6441,-23808,-18581,554,31921,-5491,-24009,21173,28273,-16780,16787,31421,-30520,-17488,-30614,31977,18500,-875,8358,11034,6047,27557,0,0,0,0,0,0,0,0 diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad2_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/pad/pad2_input0_int16.csv new file mode 100644 index 0000000..08ac897 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/pad/pad2_input0_int16.csv @@ -0,0 +1 @@ +-3420,31602,-17513,23798,9060,-1531,1361,-29223,-16708,-2387,13498,27934,-25224,-7812,-21846,16859,-421,-320,-24600,20148,-29802,-20860,16790,-61,-5728,-12166,-27690,621,-16282,-22510,-16195,21432,1887,26817,-14490,-22342,8201,11675,31410,-10170,27061,20339,978,-28201,24409,-17079,-17012,-19749,-6005,8266,-14334,605,-12325,-11432,13312,7278,27796,-20202,20999,-24013,-21818,-4633,-18057,12382,1463,6072,-20641,5042,1419,22888,5185,-14874,-32191,-28725,30791,5898,7072,-26372,22336,20338,-6573,-10629,-10693,25203,4048,4929,7668,-4354,5564,-27703,15424,-18931,23534,-5515,10758,1533,30157,-29557,-3513,-22444,-8829,-23182,-11191,825,30714,30316,-28812,28978,-12087,10291,-22581,30594,21238,6898,-11101,-13326,13593,-8359,-10286,-16015,-973,6511,-12739,-5531,23909,-31849,-9559,19081,-2690,8906,-31286,10224,-14848,-1421,-9632,-4003,-32590,-18763,-19049,15354,22545,19368,-2464,-19077,2689,28657,-13549,-9915,-23691,-4795,3905,-4244,-17408,17839,17667,-16725,7644,-25997,-24597,-19373,12317,1066,-4543,15495,-11835,20123,-24020,25150,-22081,-1168,6886,-12911,7297,-16343,-5817,-24966,23331,32678,25779,7031,-30562,20447,17693,-15870,22119,-10700,16271,-14330,10345,24644,28956,2501,-9501,-9858,19603,-4125,-15278,18565,-23274,18208,-4484,-6089,-22186,17307,4357,-16578,-9103,-1066,-29024,-1762,4337,22393,-25849,13525,2057,-26899,-14876,28539,2843,-14668,30318,7801,-16710,-2664,2741,-26278,4596,-3176,-29071,-26165,9786,-5550,13358,23557,-19033,17265,24110,30970,25466,9502,9400,30009,20794,-109,4814,-6688,-26919,201,-22122,-4465,-25392,4654,23429,29938,-16897,27138,-27644,22254,3449,-9639,-28422,-19773,-25811,11223,-10777,14072,-15991,22270,3551,7546,27908,17018,-22177,8918,-10360,3144,-10662,-15262,-10075,32212,1371,1144,-27776,5071,-8198,-15629,-27651,-20539,5083,-498,-6094,-9647,-2380,-20451,-20956,-19673,28474,-27729,-28967,-19570,9377,-23463,-25252,-27393,-8071,-26989,6327,-22448,9568,31570,31595,-27011,18749,-24435,15442,5282,19250,-32293,-2829,29563,-26025,-22530,31386,-443,15339,25267,3412,-14320,19979,4835,-13228,-21452,-16163,-31011,-8178,10121,-5107,-23194,-26512,6732,-34,-19730,-14255,-27153,24238,-2663,-15272,-26186,-24134,2947,-28143,28846,-29457,-1251,6642,-27431,27099,-2684,-15684,4966,11702,-433,-16927,2853,-27253,-18779,15641,-8827,-23246,27787,8763,1706,9031,6173,26903,13153,-26006,19919,1209,-26023,-29945,4966,8127,-6285,8351,15339,8275,-11568,21653,-25387,30628,5053,31789,13861,-14896,6753,10595,7637,31804,-32122,-11988,31428,6942,4229,26135,13708,25895,-10744,5887,-30106,28508,-27439,-19328,-5304,-1141,11236,-9000,26998,-13908,18381,-9540,-14894,-4729,-10100,16723,20120,32501,-13740,8829,22628,-12587,-8202,9509,3200,13551,-11296,-26974,20946,-2844,-11995,-11620,-18378,-7178,4449,-6493,-2616,7194,8994,-16108,-21285,31608,14136,25516,8178,24832,-16446,6901,-11205,-21337,-31950,13230,11643,-25918,-15713,-20648,21773,20939,-22695,26567,27367,11439,-30409,-6756,22379,-6079,16314,-3747,-7058,-25228,-8172,-6613,14619,-27631,20715,-15326,-23258,9203,11780,-29685,13664,-3132,-9212,5608,-20684,-15240,6426,-25734,19795,29721,-18316,27336,-2522,26264,-3919,27453,-7162,-19773,-2265,11195,-19918,30243,-20969,-17714,-10373,18692,-31583,-21149,-22349,19313,6557,18968,-3155,-3760,-22468,-12380,-5930,29787,23354,29301,-3096,-24384,32144,24890,10699,-25805,-19053,9742,26673,-24416,-29731,23249,31500,-25606,30623,14463,22054,15657,17378,-15006,29335,4172,-28218,31639,872,-12173,12337,-4641,5297,29933,21072,-14057,12648,-7619,29676,-15770,18399,15422,-16139,-21229,29514,-25942,20455,2390,569,12820,23545,-11114,15540,20963,6979,-31931,1740,-32326,-867,-13212,17534,-4121,-8233,-19901,17310,23291,-30375,-13563,5121,4474,-11578,-20607,-28375,-17840,1753,-31282,-15919,1467,27329,-29333,-18514,-17187,-24463,13470,27494,-12864,8393,-9629,26724,31771,-13790,-22871,20170,10392,-11674,-18130,-11172,24437,-16295,-19131,-28678,23399,11107,-25087,-18537,-15090,10375,25230,8895,-19123,31776,-12976,18651,23820,-15431,13122,19630,26702,22389,-14685,-258,22198,-6190,-26548,10467,5129,29032,-1681,22735,788,-26847,16342,-15515,-13819,-11941,15858,25609,4946,-25592,13599,22178,553,-23519,-10133,3449,31024,-32413,26108,-18087,-9007,-13208,28828,2353,-31935,-28073,-10694,10028,-14362,-5571,-29259,-21845,-7848,-15902,-24318,-16634,26324,12261,-12542,11024,-22097,8576,-11504,-14935,-4484,26764,-20369,3948,-31808,8262,-6232,5060,-31366,2860,32234,19998,-32414,-32013,29204,28675,-26590,-2174,12957,-16044,-30777,-28881,-10195,5692,6062,29278,-791,-1041,-28,15037,-4702,-6534,10404,7007,27173,-21190,-19973,25312,-21121,-30763,18315,-23329,-11948,32127,-32599,7126,1481,-2997,-30136,-16035,-10854,-1440,-28528,5903,9657,9658,-13425,12064,10310,28774,9288,1777,-24382,22311,-7753,7709,20070,-3897,-8190,4076,27655,32502,1870,18287,-14739,-26614,-30881,-14666,28790,-27475,-654,-30131,8489,12156,20328,14172,26045,-25786,-12163,-11365,21459,-27550,-10031,-32293,21324,2901,16464,-23031,-14255,3009,-18097,-12140,2594,20020,-24235,-27369,-24111,-20800,9868,-2860,12805,-5228,-15077,-26152,30131,-434,-13967,8065,-19831,-23910,-3609,12765,19568,23186,-31027,32204,-12363,7827,-26914,-15898,23442,-26754,-11408,23957,-18468,-13705,26880,-29732,-25256,-6650,24734,23359,29682,24722,-21166,-27111,11724,22183,-31736,-26119,-23598,28558,14622,32341,-22904,7503,-23940,30130,15229,19267,27634,-2898,-102,-11358,16601,-1034,-1776,-29955,-7890,-16340,22205,3801,26620,16813,480,-16867,-26940,20897,-28328,26296,12152,-3359,20797,-26294,8633,26923,11879,32463,27965,29037,15754,32466,-15211,-22016,16223,-30848,-20969,10298,30330,-22353,-10834,10251,18711,-12260,-20758,-27005,29913,-21999,10338,-26117,-21476,20059,18049,15676,5311,9944,-29099,-17256,-5958,-31395,-24835,24122,13927,-19300,-31926,24127,-32375,-10203,-21313,15452,947,11688,-26421,-5741,-14117,-2622,-28156,5343,-10920,-29826,13182,21043,-31655,-27222,-24592,12757,-30854,-22666,-23217,-17985,23732,-18691,27229,-5578,-20286,30230,-16496,15707,-10127,-12991,8342,-20477,-32574,28784,-13227,-28669,8853,-6057,-14262,-15458,-21307,1434,25162,-24086,-8716,30538,-18514,-28936,10519,-2530,-29961,-9644,13085,-3152,-1688,-29963,20184,-25132,-25926,26746,-1150,-7463,1283,-10684,8065,4117,-12638,-20333,2501,20596,-30505,10974,-22571,-29642,-11369,6554,-19045,-432,20393,-21309,-16458,-3428,15622,21076,-1728,30062,-11341,-26373,19270,25888,31488,-22081,13763,-7823,-21524,-6712,-21868,13258,-20370,9708,-2392,-14140,-29851,15804,-25371,-23345,32443,-12160,-20838,-2233,15163,-17409,24495,-12806,11304,-22651,21405,26542,24980,26189,31632,31412,-14023,-12135,22537,-20867,29681,21830,3722,3963,-32145,-15245,22660,-29865,-23132,23676,-23255,-26923,-5620,9998,-30561,19506,18487,20743,3400,-15211,-19350,20135,25549,23436,20741,-5767,-30193,-14629,-26408,16804,-11828,25879,32404,-1775,15575,32192,-8711,23351,27159,-12283,-32390,20572,-23996,-13896,-5902,-24834,-3850,-24967,6927,-27845,5020,14824,-17165,16166,-25525,-9691,9558,6665,-3921,18344,-13841,27391,12877,9903,-11633,13809,-17989,-16201,30606,-13570,-27616,30467,-1759,-18117,-8434,-30183,-10950,-4741,28450,4074,-2955,-32513,-31102,22929,-24268,30890,27638,-3860,15231,-22404,-369,-25179,14046,-24094,-16365,13181,17721,-1319,-9234,-2987,-29884,-29385,-2575,-14089,16206,-10,-17140,-3643,21283,-24159,-15417,16579,-2044,-29026,31834,-23030,-21361,-7069,31216,10775,19189,-26767,-24745,-16569,-7046,26409,27207,-10749,463,-29476,13439,20125,-6400,-9451,-20980,-13773,-28201,-2641,12549,-21160,10758,4013,18093,-23258,28600,8667,-13284,-23752,-11372,-12060,21862,10027,2831,32711,-16438,-15823,6071,-8527,-31305,20305,-20036,-22483,-3418,-2874,28585,21247,31707,29982,-17895,32450,7024,28488,15355,28656,20800,9036,7849,-20955,-19594,-30851,4385,14396,-4613,-17195,-14957,21965,25461,-8545,-9486,-27485,-29704,-29513,21819,-1414,-24823,13599,-26803,15455,18235,-32247,261,-6496,25413,-23928,-9477,-4313,28312,24601,-27942,-25317,-13399,16025,-18685,5830,13126,11667,16140,-29481,1354,16626,-17957,12254,27692,-18229,-27862,-23529,4389,20926,-6040,-22200,-24483,27492,-32126,8187,8757,-9631,21358,-566,18247,-16119,-25226,8084,2846,-13568,-26227,-7777,-27019,25914,-4035,-20335,11143,-10495,-24962,-50,5067,-21366,-26811,16180,-14584,13830,6770,10117,-20418,22334,-14681,4451,16327,20645,22398,9704,11965,14737,13579,-26098,13029,15211,10744,-11718,30718,24430,16341,4944,-31989,-15515,-27639,-6511,12381,-18886,-4262,-16562,-5855,-16780,-13875,-2477,14063,-23081,-5085,-7892,-23049,-16610,6963,32033,-27143,-4712,-10448,-9721,13570,-18489,-25234,-9175,-28496,27877,-28608,-7691,9217,6275,24490,25116,9465,-6852,-30690,26565,-21636,-28536,-14004,-29908,16286,2871,-12291,-28243,3848,13419,25194,-21289,28547,8893,29077,-28749,-797,-27453,10979,31499,-5051,17463,31692,-21381,-19132,6375,-13134,-32666,4223,1381,32351,-21399,28500,-25464,-18801,1115,-14570,28001,-3895,15768,32242,-14439,13163,-8034,-8828,6165,10760,16414,20929,-10177,-14349,-8800,-30457,-13724,-1318,-30245,-29756,900,-30898,10025,17326,8758,-28244,-26533,11849,-2461,-21674,-5304,-4423,-11722,31474,-14051,15204,-6936,22193,-11727,-19835,-15716,-16449,-21680,19571,-29865,-1883,-15895,1108,32628,8947,18943,16796,9041,20854,-31146,32335,-5739,1198,8263,-27443,19338,23587,-32559,10665,26277,-9677,1326,22570,32008,21630,-14694,-15501,15516,3438,9996,-20459,-24186,22980,-8585,29601,-15772,11949,-29249,1655,-21172,-12650,-17708,-7632,-15354,-5113,1457,24021,-11274,-6330,10710,-32184,8209,-13484,27798,18096,23622,15819,19048,25083,12560,-11590,-22194,-11631,-16818,744,-948,11692,-10039,31529,-28520,27935,-18796,-26938,22318,-13255,-22128,3943,-12563,-14089,3952,-29017,-29704,-15250,12835,31939,-20970,-10586,16444,-170,-5778,-5556,31334,24277,-23228,22421,12289,3151,-17712,5205,-3427,-24675,-25133,28431,25862,13116,-19085,28067,22944,-29656,13031,24184,32181,20136,16784,4224,1608,17419,-12272,-8053,-25473,-21194,9898,10796,-32044,32513,-15960,-16038,-24001,365,-19172,4521,-20518,-1543,-18742,20695,-9365,-23464,-25887,-10264,8562,20856,9771,8845,22530,20483,32108,23749,-19229,665,20675,-16872,4912,-21174,-4744,5733,10893,-12134,16025,23389,-27016,31150,-7195,-26121,-28440,20900,-30748,21325,-18378,-5152,-16692,-25772,15920,-25640,406,26479,12439,-7089,-5791,20009,-31755,16839,-13807,-14526,-30294,27556,-10562,4864,18412,25374,12045,-28524,11416,-16953,-192,5871,206,25205,373,11700,-25418,24565,961,-17549,15350,19814,-27220,32156,1159,7471,12165,-11017,28232,-24758,-11361,6239,13799,-12735,-17766,-9193,29867,-27840,-27738,21457,6261,32300,3408,-29163,2190,25785,9543,-11134,2029,-26841,-3833,24775,11807,-5621,19744,16433,-19995,14189,-3227,-14796,-12670,-5138,-8365,-30358,-236,27493,-22603,-31232,2940,-12036,-13480,-5063,17459,-9724,32405,27067,-12454,-24259,10082,-15457,27882,3036,22903,-29601,-4258,-30403,1268,11365,-25997,10255,-11886,1700,-14868,28277,15706,-15994,-32133,-3355,-18227,-21815,19627,-20038,-7677,7150,12227,-31643,-18651,29179,-4917,25647,-22594,-32272,-30375,20153,-21951,27038,-29871,-8831,16698,-21348,-10950,9760,-22627,-11378,-19182,11411,18060,27339,-15140,13359,-22402,-26129,-917,29099,-19702,8784,27163,-23154,-15410,17803,-2427,14932,30350,-26561,-31095,2121,31636,32675,-24939,-17277,-12119,-19396,11494,4220,-10475,-20845,9488,1100,11644,22693,-17396,25715,22630,-26530,16680,-1794,-15197,-10661,26155,-27253,20308,-6883,-25456,-27889,6229,-27742,13211,3100,-25113,29596,-31407,-23877,-27848,-3806,-31399,6404,-24281,-7258,-19397,4883,11750,26890,-6127,-16321,478,3652,786,-25462,9064,-17463,-10261,20213,-6444,25871,22835,11097,21343,28046,-12658,13810,-2353,11910,-31500,12827,-29904,10827,8806,-9417,-12396,-8821,-18375,9269,2105,4035,19789,11240,25294,-18937,3649,-23803,-18665,17923,-19355,-4604,16632,-8515,-5226,-25364,-9782,-8365,28331,20441,18898,26254,-32435,13285,22516,15038,30925,7192,-1322,-32503,1608,16416,2568,14883,30749,11369,5070,17235,-23801,20989,-15690,16238,20429,-25323,-12261,-16225,12144,12047,12491,22847,-3901,1420,28248,-20667,-4620,15900,-736,4418,23056,19438,8086,-5405,17113,4465,-31043,-30640,-30003,10536,-22342,25429,30672,-26689,2302,7282,-29484,18054,29666,26145,-12627,712,-25594,-18875,19179,8155,-7961,17060,15162,27490,20138,-28768,-16274,-23569,-17758,8905,-5323,32734,-31017,19973,4216,-5376,163,22807,10624,6334,32058,-161,-3474,12950,-92,-9868,-19769,203,32377,9769,-10870,-2326,-16093,-14674,-7643,12944,18075,-30962,27729,-21169,9850,-2948,28444,15712,-17509,-3974,-31082,-30859,-21943,21330,-814,19751,13745,5293,5160,-21102,-25952,-13580,2601,29806,17373,-19373,-6462,-14883,-14573,10175,-8562,30781,11449,3286,26582,-25911,-18701,32278,28895,9572,-1191,-18977,12940,-31507,-7882,-11516,11124,2071,-13838,12471,-13669,-20961,10666,-12459,-26954,-22187,-23274,-16492,3934,-29939,12552,32480,-14499,-22088,16301,8979,-8210,8298,8095,21949,29519,30217,-17454,12430,-25471,15408,-15498,19010,-5462,-21464,-30546,-22564,-18137,25660,-15805,28365,22431,-12764,-32065,-24297,-408,11770,27747,12659,3252,-31611,27797,-12747,-30347,-18671,-10345,-31790,18746,24805,28433,21417,-19630,-32093,25621,830,-6593,-5428,-16653,19735,5955,1363,6960,-15187,-13627,11464,-5198,-12837,-31869,-27845,-6562,6967,31797,-14816,-7493,-11508,8777,11189,4602,-3441,-11737,-22608,-26630,25199,26363,-6906,3683,112,2295,8953,-26973,-31038,-30378,17859,-7087,-658,-17674,-8447,19566,24039,-29865,4103,28574,-17287,-30127,3924,16660,32344,4001,-26833,3768,-8670,-9164,-21738,13506,-16265,26423,-7320,-19518,-16130,26254,18327,27349,-7948,11570,22353,-1823,-21577,-30829,1262,-31544,24457,17451,24415,-4369,-14738,7548,26857,-27481,-25981,15959,5728,23248,25480,29511,-24167,20650,18665,-1336,-21743,-26758,25065,13318,-9407,-11477,-1652,-18929,7497,-19523,2587,26627,30843,31771,14353,-24199,22191,5118,-18669,9312,24544,2945,-13776,-561,20892,-12417,15078,7641,5546,23682,26858,30934,15404,-18191,-8405,-23189,-10423,-24880,16326,-8930,31630,3626,-31406,-15801,12474,-26646,-27649,-190,21537,5987,27906,-26384,6265,29848,29772,-8789,-25368,-4818,20326,28377,28053,-31018,27864,-16540,26726,-18180,6609,-27106,-7529,-10559,22177,-27894,-32753,15195,-10521,-30728,-27228,-29750,-6499,-11275,25005,-15761,-28250,-7732,29364,-30680,-18686,-31855,-28183,-32368,-27365,1966,12591,30915,-25886,15790,2463,-18104,-22552,20864,5368,-10374,23145,25108,-23461,23717,-18572,-4422,28070,-7157,-27848,-17185,-3752,14656,32323,10868,-3422,-17260,-6224,27170,8003,29542,-10244,24573,-28848,25393,-31665,29418,22931,-26279,765,18642,-3479,26172,-12443,-17283,-19713,-28162,-8270,21850,-31123,-19538,717,-14693,-16743,-4560,-21047,23001,7044,2205,27118,26177,27636,2991,18076,3531,8652,22353,-21713,26805,-31910,-11021,2411,1152,-1963,-18131,30243,28305,-13334,22119,4799,18736,-31977,-21369,-24853,15463,20111,-14596,-6571,-26299,-16417,-20881,10720,-23413,16825,-6713,11600,-4885,-10547,32574,-9449,-23368,26047,25405,20499,31877,-669,-29461,-20881,-14145,-24316,-32472,29990,-10881,-7507,25189,-4879,21591,14606,-1361,-17278,-10959,-13439,5674,-29857,9190,-23436,-21536,-11878,-32246,20579,2201,13350,-28656,10681,830,-1204,5629,-30939,23150,-1968,25832,3498,32185,30432,-20937,28810,27188,-6944,-21218,-24945,16514,32427,29195,31999,13383,7396,-12400,22084,22670,-23914,-21509,1407,-26860,-15200,-24008,-14966,-11508,-27188,20663,2819,26097,4509,3733,-28095,-23100,-23401,3215,6262,27372,-30536,-14309,25851,15825,30085,20795,-2433,9822,20276,19611,-19041,-9709,-4233,3815,23288,-9174,-3094,-27278,32227,-24785,-17798,-29341,-4798,16781,25803,27860,28771,-23177,-30956,-3012,16016,-25465,27869,20245,-19540,29917,-18452,28088,12448,4368,12762,-1823,-9734,-6843,3732,-7583,-1591,20846,-8736,22992,31822,-17767,19557,-18307,31142,-24037,2764,-28703,16887,-31970,-23827,12268,28332,-10964,-16739,-582,29898,-18280,32517,-10905,-24387,-4009,-4745,-13052,-1475,-14942,-16695,-4486,-8509,-11309,-1399,28109,-6441,-23808,-18581,554,31921,-5491,-24009,21173,28273,-16780,16787,31421,-30520,-17488,-30614,31977,18500,-875,8358,11034,6047,27557 diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad3.tflite b/tensorflow/lite/micro/integration_tests/seanet/pad/pad3.tflite new file mode 100644 index 0000000..9c56ab3 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/pad/pad3.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad3_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/pad/pad3_golden_int16.csv new file mode 100644 index 0000000..f618d12 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/pad/pad3_golden_int16.csv @@ -0,0 +1 @@ +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9619,18648,24963,-5114,-31198,1384,23227,-2744,21316,2613,13906,-28189,26877,6394,30734,-10970,7919,-18418,25511,-23839,-13711,-10586,22938,400,16609,-25691,13981,-5958,5548,-3090,25468,2112,-19776,-25293,-3307,4083,1473,-7278,-26132,-24521,-16955,19395,12267,17416,17569,14461,5767,15890,-17037,-4542,4780,31190,11614,12719,-16138,23900,-31456,-28839,-30114,-12966,15063,11732,-696,-27977,-9909,13205,22110,-19431,-28018,31766,-27312,537,24960,-10805,-7865,-11744,-26092,-440,-25049,343,-25248,-27944,25281,-26864,14230,21458,4297,-16159,9772,-7044,24336,11591,-4578,2566,-25590,-15651,31407,-14104,1422,20619,-8908,-12887,-5016,-605,-21713,25100,9222,-14978,24592,14710,8662,16543,6730,-27811,19867,7089,-16798,14849,18658,-3385,-283,-15731,-31745,-21066,-8815,-16845,-3500,6760,15238,20214,-25833,-10773,8491,8350,-7221,-15315,-23465,27532,28150,-32033,6108,-6618,19517,21147,-6677,2167,12665,17070,714,-3409,-8607,8856,3564,-25801,2012,10562,31040,22054,15779,374,16843,14847,26042,4542,-11226,-15946,-16549,3109,24411,-18994,20043,-19616,18648,-3260,25767,-27275,6456,-7358,17371,25113,-16488,-7297,19544,1177,-30208,-14705,12067,-29278,-29193,7321,-21188,-2402,23780,-3086,13707,16616,13383,-7228,6434,1806,-16057,-28173,6202,-26179,-30557,-21460,-15183,-14057,32372,20017,24786,29814,-3803,4627,23701,-10772,2452,-5521,12483,-7168,-16907,-6832,-25078,-27146,29027,19789,1670,-15573,14745,10343,26865,-29073,7871,-28263,-30085,-26662,13158,14066,-11252,9013,4354,-16832,-10156,-23656,19699,15206,13976,-31005,3612,23932,-14892,-19155,-1724,16591,-15950,17167,2137,-7105,16352,-9582,8750,-21615,-11316,5905,-29279,7608,20165,9871,-4715,-20600,-199,-22641,-2499,16966,-4012,19200,-12241,4686,26540,-6485,11172,3688,-6185,-32522,10210,29886,-26601,-21545,11584,9511,-5463,3650,-1535,-13988,-18658,13129,23228,32206,7998,1815,25031,20821,19998,-31114,-22411,4513,-17153,30122,-13892,5553,9396,-18210,7627,10139,-20245,23180,-10975,-5177,-28091,-2504,-3625,-15382,28882,22369,14504,5560,31265,-299,9516,-5573,11691,-2850,6171,29466,-16587,-30454,-1732,-3335,-13794,-27067,6489,-8724,-28920,26071,-8737,-19056,-12016,30605,31565,-3303,-20417,-3240,21652,725,-6896,32131,-2274,17573,7949,5398,-18111,4007,-14029,-19032,14130,25937,30592,-21695,29818,29625,26797,-652,-27888,-27085,-17717,3878,17196,19872,-11723,8990,25989,-32427,-20855,-24875,-2831,-8217,4915,-27445,-17566,-2961,2487,4874,-15194,-32768,-11613,-9947,909,19213,11101,-29618,-27966,-5397,-13634,-6164,17296,15881,13487,-24025,-8199,-32473,-4628,-17157,27696,-28836,30156,-3059,2097,-12125,-30488,10059,-27009,2586,-5569,5938,-30536,30012,-18565,-12974,-5565,13443,-149,22878,41,-28767,31600,-22827,18182,8144,29267,-19864,-20057,-6020,-12862,-8337,13464,-31927,-14757,-6774,7402,-7289,14939,16249,-4557,-11694,-1189,25894,12479,-7017,-12592,2927,30596,23143,-28391,-6929,22000,-32623,19720,18525,30231,-27220,8661,14330,-11415,-603,-14254,32000,13183,-28815,-779,-31270,-20510,-18636,18876,-9469,-28565,23583,17708,7915,16766,21935,-15295,-2914,22760,-24183,-28375,9207,-4399,3960,-18750,-11866,3709,28907,-30932,26771,22944,-3319,-8110,8207,14208,27722,-5286,-2177,-10530,3559,-20180,32619,-30405,20910,17040,-17180,-21493,-30471,-11576,-24563,29050,-202,3519,-8329,14111,12517,-13305,-26970,-9839,-3148,-15345,-20932,26424,17490,1604,-1810,18709,-28356,21630,-1389,29842,-16149,10647,-18901,-17951,-16606,16677,-25913,10759,6936,-12030,-27525,26822,29338,5235,6984,11921,24916,32235,-27095,-7951,-28134,-30386,26556,19731,-14389,-22898,-27603,-5513,12678,-10090,637,8191,-22325,4450,-29972,29240,10683,-4131,13265,826,25421,-21071,6099,-12070,-9526,20451,-13979,-16439,19204,-27409,-21941,-5353,32075,16588,-28169,28838,11236,-11003,-24378,-4634,-1491,-7371,-775,25745,-157,94,-31022,15586,-22806,-4549,-3845,31510,11010,-22392,-20660,-8216,26090,-31534,30528,53,2150,15286,717,27555,-7178,26573,13133,-4544,14770,2456,-11121,2763,28486,-14231,-15522,18274,-20184,-23305,-26185,-24187,-29048,-2349,18751,3907,18620,-3461,-16386,-8650,-3688,-24064,8037,1260,18200,-32677,13409,-22859,-16501,-25040,16275,7027,-24432,20384,-3254,-3612,20548,-18046,16385,-5407,-32498,14648,3895,-6688,-12683,9635,-21773,-6236,15212,27212,-26115,20758,16192,13241,25036,4303,-13221,29927,-9426,-11118,-5032,27153,27339,-28553,-4350,10629,-9596,18737,6212,4041,-27793,15191,-27447,3943,-17152,-20679,26978,-20711,27361,4282,-325,25507,3861,25557,21893,-26247,-12578,-21311,1616,-2730,-22155,-12944,16008,1261,10148,26410,-13758,-27672,8472,12041,20158,25483,-22777,31234,-584,-17216,-21270,-13324,24737,31037,-5975,-17389,26738,24108,19742,-28034,31180,30805,-6811,-11712,32695,10545,-19457,9180,-1460,-30026,-7498,20918,3915,-18530,-24698,-3592,12974,-32745,-22469,-14110,24165,21705,20088,10047,13458,1030,2675,-18659,-26352,3000,-30530,20878,-16465,-2285,13385,9491,-31229,25766,27890,-3463,-2958,3874,1014,12547,-25010,-5541,27538,7584,5225,-2642,8268,-12614,21938,-10756,-31789,31865,-27141,-250,19006,30696,11615,4680,20583,30834,17004,26101,13950,14661,-25889,-17450,-18623,20215,-31576,-4172,-28444,17800,-479,9316,21304,-19666,-14446,-13887,28022,-8216,31850,7812,22540,31964,-22294,13233,-31854,-31806,-13029,-15682,13479,-18208,3602,-24075,12863,-20179,29160,-23709,20242,5997,-23753,-31522,14779,2675,-30446,1574,23475,-22279,-14082,-3161,6486,-31909,-27762,10291,-23098,9723,-21324,23390,-7509,-25320,27157,-12949,8091,-10993,8898,-6046,-13203,-2201,7498,-11491,30697,-21741,9825,-13889,3684,-4314,5641,-24009,24869,14467,13837,28175,-165,5686,-2248,20160,-27604,24724,-4988,-20348,-602,7338,-31302,28359,-29597,8604,-28700,-19888,-5619,28053,26454,25851,10811,24450,-24283,-25707,21372,3409,-28945,26121,13739,-5478,-11684,32499,-21631,-27746,15991,-2739,-20967,-24429,3075,-32122,17830,10438,17241,12372,-6296,-22601,-8758,-4166,-931,24621,27037,3608,-14983,10790,-7616,-24777,6405,-13817,-23557,26853,-21470,6987,6997,32081,-17369,-31442,-27674,-8308,190,-32676,-30850,-27195,27544,21647,-2471,7407,-2871,-26952,-25297,4945,365,1972,-24293,27263,6293,6946,-25236,20955,16846,-25553,26644,-19421,15043,24046,24722,-8533,32074,14290,-5247,-7685,29058,25205,-29535,19102,15409,-25768,-30820,13009,-6572,7051,-32334,30796,-2587,-1517,-10948,-28263,-3317,891,-11164,26405,-10378,-10356,-19013,716,6801,-2000,-22745,19543,16641,2765,8221,19915,-32086,-3371,-14746,-11462,-17447,21651,-32061,5969,12169,20962,12562,31313,20425,-4391,16561,-28586,-5602,14647,24958,30147,-3381,-22875,-4861,-28265,1639,8365,27102,23438,20966,-10025,-19656,-13096,9511,27785,18219,-25865,18092,23392,30189,19864,-9663,-29051,2711,6125,-2744,-16040,3217,10642,-29199,10243,17517,10889,-24455,-20205,16915,-18050,15645,17535,-7534,-22127,-14408,-7616,-2093,31459,7639,-12548,-21084,5867,-19128,31998,-14248,-1250,-16729,-23812,-13059,-30125,-5873,-9404,-4180,20844,-11480,-99,31260,-20596,18419,23434,29788,-25052,11705,5887,22897,-2139,22765,-7210,12411,32141,20787,2116,24904,7881,-17706,-16559,8230,2732,20870,-6083,3958,4321,-19958,13295,-1775,26718,13357,-31572,-6108,-17108,-1879,-5556,12701,17766,27421,-728,15577,-28802,11302,14225,8902,12956,-949,5318,-26956,6555,17557,1172,5050,17652,-27119,19576,-987,-1438,18689,-8977,13913,17736,26821,-12740,-31604,-2034,-20869,18715,22133,-11304,-8402,-26281,-13918,1407,-21513,-14171,-17571,13925,22064,-8902,449,-2857,-12064,-24119,-30663,-14281,-13188,-11712,-30948,10302,7853,28216,16173,-9317,23157,-31303,-10433,-21129,-19570,19414,10680,-10061,-26808,-9624,11704,-30988,-13047,-7647,-13616,-26773,13646,-10199,-16758,5823,22247,29873,-31561,19298,-13529,-1661,8769,-4747,6817,6146,-7795,10470,-15023,-9483,-21608,9033,-2836,15458,5383,17326,4810,23194,11484,-10012,26300,-30294,-27821,-27492,23374,5145,7527,24542,-19792,-9277,-22435,3889,-31140,-12549,-28602,-555,-27306,30510,-11801,4641,-31599,19497,-27407,-8903,-30915,4423,8773,-23248,-17601,1832,-32406,27114,-2666,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,26670,-2074,-28448,-10272,13085,-15163,-13373,-11637,-29603,6813,32307,-27380,716,-18591,20205,-32187,-29111,4042,-4082,-12313,3308,7486,11426,-967,-13716,-9923,15314,-28531,-19974,11972,8098,-14802,19816,-15074,5608,12680,-13314,4280,18984,-21081,29070,28591,29165,29153,-8834,-2866,5657,-2407,2477,4781,12818,-28844,-15191,23174,7490,11677,7485,26346,25295,-9718,27185,-21467,-20364,-20630,-520,-29594,32608,-18919,-22883,15122,-25405,2704,29998,-16345,-1958,-31997,846,24685,-2804,2843,-1645,-3876,23263,-3061,20632,6794,7199,28372,29755,-5602,2695,-28370,-23193,-14551,-31805,17141,3592,14674,6411,24359,5298,29216,-26184,-24698,-24144,-19637,-8814,12719,-26621,-26462,-10038,-1401,-1549,17780,27708,11815,-3756,11624,-14795,18108,-964,-30764,-25010,-16810,28031,18686,-32083,-16612,21789,-29336,-4796,-8743,16001,-29468,10433,-2770,25362,12741,-14854,-18188,5895,20838,27639,-6040,-31543,-18454,25920,-9407,24462,-13367,11552,18985,7186,23222,26172,14481,21822,8301,3422,-5946,28873,17964,-3404,26544,29750,-27944,-3392,-14660,6953,29539,-5946,21528,7363,27016,609,-30582,11406,25524,-11659,11275,-17608,-10033,25381,4346,32646,3798,-8795,-28037,-7749,22981,-14308,10942,-15387,30381,32026,5260,-1837,10180,15807,-13248,4124,-4116,17981,-20650,-27874,22254,-15829,8738,-13298,1206,-10421,-26426,15833,19087,16103,-6934,-4913,-13472,2058,-27327,20181,22615,11506,-20239,19683,12314,-2772,-21428,-2898,374,4529,-12008,32326,18321,-18180,30773,-9538,11544,23766,17007,-21801,883,-25157,6933,2091,9593,-29549,7618,28483,-17566,-27074,-26330,-29314,-1950,-21197,-4441,5031,-8147,19688,18019,-23812,19632,6534,-16014,26529,13069,26318,13386,3090,17065,-13055,-32097,-17400,-19241,5099,259,-5255,15266,-11039,18759,23195,-20378,-2744,19406,4888,-17390,12611,-18400,-11139,-9746,13695,11006,16500,-10110,26420,-332,7677,9673,17770,18760,21734,8068,28117,9010,-15809,-5161,-26613,-3606,-28798,22140,4383,-8027,10085,-2302,13084,-1296,-21383,-31441,14862,14807,10213,28885,-27176,-3759,2814,23440,-30881,29871,-280,-16116,-16121,-17537,-16618,29699,-9240,27489,24929,8103,7793,22558,21721,-26929,-30473,-15696,20715,14764,29757,30506,15625,-9186,17321,-4215,12079,-32269,15617,-6903,13321,-15931,10496,18339,28610,3677,17766,17454,-19453,-15060,8969,-7222,-26774,-11645,-13639,31737,829,-17974,-23185,7440,27860,-28436,-26761,9418,-29335,-4020,-17501,16735,-2728,1462,-1924,-17127,16058,-27151,-22556,-16085,-27936,-13250,-7786,24724,32239,24031,-9241,32623,18864,13763,-32111,-531,-32488,6023,-25102,17434,16712,5119,-4465,1475,24583,-2393,6151,23203,3117,22504,16959,-25594,-15298,-31757,2946,23238,-9430,27556,-24014,-11535,-5406,21509,-19242,16007,27714,14218,-15107,10917,18991,-23346,4464,-31304,19869,1605,21930,-6754,-20995,-17366,-14578,-13031,-1617,22174,-4289,3052,-26417,25134,29142,-18421,7962,27107,1365,27606,-2666,-9904,6281,501,23080,25399,-8180,-24595,32053,25105,13319,3638,21316,878,-26569,-28590,26063,25760,-687,-25440,26327,23120,4650,-8702,2668,11078,2564,25826,17918,-11677,22360,-8853,-31921,-8450,-12647,-31810,6038,-8879,-31821,28929,23956,-14419,-17997,4010,-24223,20547,12560,4593,-16052,6674,-14060,24351,27333,29245,-30965,-32581,14341,-27260,17559,25727,2538,-31442,-5040,-10009,1750,29603,4256,-16492,2599,23349,-3925,-15054,-12767,-22547,12732,2211,-23022,17716,17622,-11876,18872,7295,15114,-4763,-14821,-32565,-18966,13972,25775,-2027,-16873,-4098,29465,30658,-939,26698,-14860,-19739,3320,18780,-26772,-31094,16362,27278,-9371,6527,24023,-7298,8731,-26413,-14197,-25368,-24687,3613,-25232,24656,-21481,-26905,-20215,22192,6096,-29728,-29237,-12613,-24105,31267,9166,-22787,21754,30148,-10418,-10808,-24703,14134,-2683,16119,-11058,-27471,-16881,-1878,21201,23127,-21570,15897,-1279,16630,-16063,-13169,3220,4129,-31290,-10777,22768,-2461,-29620,-58,20301,-17797,22570,-2552,-10521,22965,6919,15247,-8559,16869,-15826,-26734,16312,-25247,-15100,24854,2550,-15419,14889,30535,-27848,16776,16462,-31053,-16330,-3592,24202,-17158,-26020,-89,32200,-16701,27785,27779,32742,21944,-6695,11627,19638,-24878,866,23152,15987,-21130,-11847,-17656,24418,15439,2927,-6344,10959,-5993,-24134,-25157,-2556,27808,-17397,-18641,16175,21630,472,23358,13115,-28553,6817,-2889,-15765,-27090,-2915,-15597,-10163,17713,-19541,32090,883,2223,-29116,777,-30159,-9826,-11437,-3606,25241,4349,-32186,-6876,17932,-20345,-607,-21841,-24797,-10360,-7784,-10239,2328,461,18246,20750,26281,-10790,25678,-23186,30403,-8710,-13726,2916,-5236,13419,5953,2447,-31087,-29860,-67,29314,-30474,-9157,16781,-26172,28949,18061,-15625,-29537,17658,25773,-7115,13005,22554,3813,-15684,-23303,-30887,-25090,1617,6917,-5115,-18420,-23450,-19096,-14444,15521,30583,16646,-29349,11108,-28389,17366,-31703,-4479,31685,-14924,27841,-1988,7146,-23010,-11759,23718,-26675,16877,-2650,-18523,32081,-12586,22721,-7236,-4261,9834,-29677,-11742,-28656,-16375,28100,-24361,-31940,-9688,23941,3346,-17413,6213,-28066,24161,-28868,-21338,26406,-10883,-6463,16368,24151,23210,-17168,-2974,28942,2392,6351,-4294,-19977,-11824,9573,19429,-6611,-20119,31021,-19634,10897,8229,-21621,9377,-4370,-27651,-14798,-16537,-13039,26577,-4155,28255,-10696,-28408,17530,15710,-4477,-2999,-1471,24600,4127,-13679,-26677,-30984,18240,-16793,-29384,14476,-27342,-14818,30084,-30189,12818,12568,-2770,13830,-14487,16364,-14795,-1881,1043,10665,-13892,-3404,18644,-11651,14357,-8680,-29340,-11163,-25085,11596,7198,-5970,-13269,10472,-18306,-11464,18589,313,30176,27944,31462,-10165,32631,-4409,-6834,25715,23538,17336,28737,-29294,-30834,-16780,13886,13955,10698,14651,5180,2167,21965,18639,-31819,-8405,7429,-7148,10221,-22372,29072,-18691,811,-7549,4924,-1336,-29311,12297,-29038,-6611,2541,5551,7786,-11287,-14886,-21907,1193,15483,-5246,-15215,-11258,8175,7779,96,-10697,-416,-31449,-32706,10805,16333,18610,-18436,-7285,-9155,30424,-22877,-6831,-17096,31446,-2880,15933,-18463,-7850,24529,13201,8987,7451,11654,-20099,10427,7335,31788,-10563,-19854,-18690,-3239,-32237,22000,-30556,21980,-31714,-25077,-28053,-14414,-13924,22025,-12360,14248,-26808,8248,14953,18868,-32161,31334,19089,-15709,-17509,-13822,10247,-28846,14036,-10550,-6898,13636,-17268,-515,25883,11627,-3304,8514,10099,-18883,9384,322,-25025,-31103,20398,2399,10153,14656,8470,-24014,3323,-17548,5545,-15259,26716,-21240,-29960,-17609,31487,19939,26538,22737,20965,31578,-11235,11484,12618,-24113,14189,-11158,13990,-26474,22142,8309,12105,-26206,20702,-17730,-1428,12888,10302,-4738,5629,-26423,-14286,9428,-8649,-2266,32627,-24366,-29037,6906,27626,-10319,-30958,-25823,31476,-7026,-6766,32418,-32528,12569,-12427,-3539,-31737,23192,13942,-15031,31736,4066,11652,18629,31537,-12844,-25888,-31414,-28482,-12057,-22752,-30736,23295,-8895,9710,8198,1401,16646,10063,-763,15707,7109,21543,18651,4079,4574,-12691,-16414,-28483,-12953,-15214,-18139,12804,31777,12166,11324,14786,5342,6958,-13292,4137,-18491,-22480,19655,16506,11361,30186,2165,-24428,-26458,-16508,21790,13711,761,21083,-32002,17387,-32754,12572,-26691,-7366,-1762,-20904,-7358,32643,-24425,26800,6458,-3881,20127,-24918,-16387,-21744,-19430,8163,11432,-21800,12988,-11840,3171,-6630,783,-1322,-11744,-27718,22927,20265,-16147,21967,918,-17771,-20199,4127,7386,-5380,-26972,-28380,-17666,20392,17411,-14771,-23100,-20238,20667,-9887,1852,-16135,-30945,9124,-30165,24507,-5550,21987,-6390,-28774,26426,14094,-3776,-22238,7962,1317,10097,15966,28296,-25248,-10018,2579,-25989,1319,-30815,23425,-26859,-12451,10136,-12655,11935,-16236,26130,-27590,22036,10921,7953,5056,-7442,-31216,6118,1685,20717,18045,29360,-30189,22007,14287,-31176,24627,7952,-4928,3357,30705,9548,17700,10427,9994,-9933,-5377,-32320,13041,-17070,-20696,30122,11540,6611,-8248,4067,25995,-3024,27005,23278,6662,-768,27831,-21252,10243,-2046,-27948,-4486,-27289,17338,27056,-23104,-29777,-8563,1119,7413,-18121,-16059,30864,1585,-27741,-17079,-4337,17199,-29356,-29965,-12215,3482,31959,8954,24462,22022,-22509,13656,-21060,3587,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad3_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/pad/pad3_input0_int16.csv new file mode 100644 index 0000000..fa9d4c6 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/pad/pad3_input0_int16.csv @@ -0,0 +1 @@ +9619,18648,24963,-5114,-31198,1384,23227,-2744,21316,2613,13906,-28189,26877,6394,30734,-10970,7919,-18418,25511,-23839,-13711,-10586,22938,400,16609,-25691,13981,-5958,5548,-3090,25468,2112,-19776,-25293,-3307,4083,1473,-7278,-26132,-24521,-16955,19395,12267,17416,17569,14461,5767,15890,-17037,-4542,4780,31190,11614,12719,-16138,23900,-31456,-28839,-30114,-12966,15063,11732,-696,-27977,-9909,13205,22110,-19431,-28018,31766,-27312,537,24960,-10805,-7865,-11744,-26092,-440,-25049,343,-25248,-27944,25281,-26864,14230,21458,4297,-16159,9772,-7044,24336,11591,-4578,2566,-25590,-15651,31407,-14104,1422,20619,-8908,-12887,-5016,-605,-21713,25100,9222,-14978,24592,14710,8662,16543,6730,-27811,19867,7089,-16798,14849,18658,-3385,-283,-15731,-31745,-21066,-8815,-16845,-3500,6760,15238,20214,-25833,-10773,8491,8350,-7221,-15315,-23465,27532,28150,-32033,6108,-6618,19517,21147,-6677,2167,12665,17070,714,-3409,-8607,8856,3564,-25801,2012,10562,31040,22054,15779,374,16843,14847,26042,4542,-11226,-15946,-16549,3109,24411,-18994,20043,-19616,18648,-3260,25767,-27275,6456,-7358,17371,25113,-16488,-7297,19544,1177,-30208,-14705,12067,-29278,-29193,7321,-21188,-2402,23780,-3086,13707,16616,13383,-7228,6434,1806,-16057,-28173,6202,-26179,-30557,-21460,-15183,-14057,32372,20017,24786,29814,-3803,4627,23701,-10772,2452,-5521,12483,-7168,-16907,-6832,-25078,-27146,29027,19789,1670,-15573,14745,10343,26865,-29073,7871,-28263,-30085,-26662,13158,14066,-11252,9013,4354,-16832,-10156,-23656,19699,15206,13976,-31005,3612,23932,-14892,-19155,-1724,16591,-15950,17167,2137,-7105,16352,-9582,8750,-21615,-11316,5905,-29279,7608,20165,9871,-4715,-20600,-199,-22641,-2499,16966,-4012,19200,-12241,4686,26540,-6485,11172,3688,-6185,-32522,10210,29886,-26601,-21545,11584,9511,-5463,3650,-1535,-13988,-18658,13129,23228,32206,7998,1815,25031,20821,19998,-31114,-22411,4513,-17153,30122,-13892,5553,9396,-18210,7627,10139,-20245,23180,-10975,-5177,-28091,-2504,-3625,-15382,28882,22369,14504,5560,31265,-299,9516,-5573,11691,-2850,6171,29466,-16587,-30454,-1732,-3335,-13794,-27067,6489,-8724,-28920,26071,-8737,-19056,-12016,30605,31565,-3303,-20417,-3240,21652,725,-6896,32131,-2274,17573,7949,5398,-18111,4007,-14029,-19032,14130,25937,30592,-21695,29818,29625,26797,-652,-27888,-27085,-17717,3878,17196,19872,-11723,8990,25989,-32427,-20855,-24875,-2831,-8217,4915,-27445,-17566,-2961,2487,4874,-15194,-32768,-11613,-9947,909,19213,11101,-29618,-27966,-5397,-13634,-6164,17296,15881,13487,-24025,-8199,-32473,-4628,-17157,27696,-28836,30156,-3059,2097,-12125,-30488,10059,-27009,2586,-5569,5938,-30536,30012,-18565,-12974,-5565,13443,-149,22878,41,-28767,31600,-22827,18182,8144,29267,-19864,-20057,-6020,-12862,-8337,13464,-31927,-14757,-6774,7402,-7289,14939,16249,-4557,-11694,-1189,25894,12479,-7017,-12592,2927,30596,23143,-28391,-6929,22000,-32623,19720,18525,30231,-27220,8661,14330,-11415,-603,-14254,32000,13183,-28815,-779,-31270,-20510,-18636,18876,-9469,-28565,23583,17708,7915,16766,21935,-15295,-2914,22760,-24183,-28375,9207,-4399,3960,-18750,-11866,3709,28907,-30932,26771,22944,-3319,-8110,8207,14208,27722,-5286,-2177,-10530,3559,-20180,32619,-30405,20910,17040,-17180,-21493,-30471,-11576,-24563,29050,-202,3519,-8329,14111,12517,-13305,-26970,-9839,-3148,-15345,-20932,26424,17490,1604,-1810,18709,-28356,21630,-1389,29842,-16149,10647,-18901,-17951,-16606,16677,-25913,10759,6936,-12030,-27525,26822,29338,5235,6984,11921,24916,32235,-27095,-7951,-28134,-30386,26556,19731,-14389,-22898,-27603,-5513,12678,-10090,637,8191,-22325,4450,-29972,29240,10683,-4131,13265,826,25421,-21071,6099,-12070,-9526,20451,-13979,-16439,19204,-27409,-21941,-5353,32075,16588,-28169,28838,11236,-11003,-24378,-4634,-1491,-7371,-775,25745,-157,94,-31022,15586,-22806,-4549,-3845,31510,11010,-22392,-20660,-8216,26090,-31534,30528,53,2150,15286,717,27555,-7178,26573,13133,-4544,14770,2456,-11121,2763,28486,-14231,-15522,18274,-20184,-23305,-26185,-24187,-29048,-2349,18751,3907,18620,-3461,-16386,-8650,-3688,-24064,8037,1260,18200,-32677,13409,-22859,-16501,-25040,16275,7027,-24432,20384,-3254,-3612,20548,-18046,16385,-5407,-32498,14648,3895,-6688,-12683,9635,-21773,-6236,15212,27212,-26115,20758,16192,13241,25036,4303,-13221,29927,-9426,-11118,-5032,27153,27339,-28553,-4350,10629,-9596,18737,6212,4041,-27793,15191,-27447,3943,-17152,-20679,26978,-20711,27361,4282,-325,25507,3861,25557,21893,-26247,-12578,-21311,1616,-2730,-22155,-12944,16008,1261,10148,26410,-13758,-27672,8472,12041,20158,25483,-22777,31234,-584,-17216,-21270,-13324,24737,31037,-5975,-17389,26738,24108,19742,-28034,31180,30805,-6811,-11712,32695,10545,-19457,9180,-1460,-30026,-7498,20918,3915,-18530,-24698,-3592,12974,-32745,-22469,-14110,24165,21705,20088,10047,13458,1030,2675,-18659,-26352,3000,-30530,20878,-16465,-2285,13385,9491,-31229,25766,27890,-3463,-2958,3874,1014,12547,-25010,-5541,27538,7584,5225,-2642,8268,-12614,21938,-10756,-31789,31865,-27141,-250,19006,30696,11615,4680,20583,30834,17004,26101,13950,14661,-25889,-17450,-18623,20215,-31576,-4172,-28444,17800,-479,9316,21304,-19666,-14446,-13887,28022,-8216,31850,7812,22540,31964,-22294,13233,-31854,-31806,-13029,-15682,13479,-18208,3602,-24075,12863,-20179,29160,-23709,20242,5997,-23753,-31522,14779,2675,-30446,1574,23475,-22279,-14082,-3161,6486,-31909,-27762,10291,-23098,9723,-21324,23390,-7509,-25320,27157,-12949,8091,-10993,8898,-6046,-13203,-2201,7498,-11491,30697,-21741,9825,-13889,3684,-4314,5641,-24009,24869,14467,13837,28175,-165,5686,-2248,20160,-27604,24724,-4988,-20348,-602,7338,-31302,28359,-29597,8604,-28700,-19888,-5619,28053,26454,25851,10811,24450,-24283,-25707,21372,3409,-28945,26121,13739,-5478,-11684,32499,-21631,-27746,15991,-2739,-20967,-24429,3075,-32122,17830,10438,17241,12372,-6296,-22601,-8758,-4166,-931,24621,27037,3608,-14983,10790,-7616,-24777,6405,-13817,-23557,26853,-21470,6987,6997,32081,-17369,-31442,-27674,-8308,190,-32676,-30850,-27195,27544,21647,-2471,7407,-2871,-26952,-25297,4945,365,1972,-24293,27263,6293,6946,-25236,20955,16846,-25553,26644,-19421,15043,24046,24722,-8533,32074,14290,-5247,-7685,29058,25205,-29535,19102,15409,-25768,-30820,13009,-6572,7051,-32334,30796,-2587,-1517,-10948,-28263,-3317,891,-11164,26405,-10378,-10356,-19013,716,6801,-2000,-22745,19543,16641,2765,8221,19915,-32086,-3371,-14746,-11462,-17447,21651,-32061,5969,12169,20962,12562,31313,20425,-4391,16561,-28586,-5602,14647,24958,30147,-3381,-22875,-4861,-28265,1639,8365,27102,23438,20966,-10025,-19656,-13096,9511,27785,18219,-25865,18092,23392,30189,19864,-9663,-29051,2711,6125,-2744,-16040,3217,10642,-29199,10243,17517,10889,-24455,-20205,16915,-18050,15645,17535,-7534,-22127,-14408,-7616,-2093,31459,7639,-12548,-21084,5867,-19128,31998,-14248,-1250,-16729,-23812,-13059,-30125,-5873,-9404,-4180,20844,-11480,-99,31260,-20596,18419,23434,29788,-25052,11705,5887,22897,-2139,22765,-7210,12411,32141,20787,2116,24904,7881,-17706,-16559,8230,2732,20870,-6083,3958,4321,-19958,13295,-1775,26718,13357,-31572,-6108,-17108,-1879,-5556,12701,17766,27421,-728,15577,-28802,11302,14225,8902,12956,-949,5318,-26956,6555,17557,1172,5050,17652,-27119,19576,-987,-1438,18689,-8977,13913,17736,26821,-12740,-31604,-2034,-20869,18715,22133,-11304,-8402,-26281,-13918,1407,-21513,-14171,-17571,13925,22064,-8902,449,-2857,-12064,-24119,-30663,-14281,-13188,-11712,-30948,10302,7853,28216,16173,-9317,23157,-31303,-10433,-21129,-19570,19414,10680,-10061,-26808,-9624,11704,-30988,-13047,-7647,-13616,-26773,13646,-10199,-16758,5823,22247,29873,-31561,19298,-13529,-1661,8769,-4747,6817,6146,-7795,10470,-15023,-9483,-21608,9033,-2836,15458,5383,17326,4810,23194,11484,-10012,26300,-30294,-27821,-27492,23374,5145,7527,24542,-19792,-9277,-22435,3889,-31140,-12549,-28602,-555,-27306,30510,-11801,4641,-31599,19497,-27407,-8903,-30915,4423,8773,-23248,-17601,1832,-32406,27114,-2666,26670,-2074,-28448,-10272,13085,-15163,-13373,-11637,-29603,6813,32307,-27380,716,-18591,20205,-32187,-29111,4042,-4082,-12313,3308,7486,11426,-967,-13716,-9923,15314,-28531,-19974,11972,8098,-14802,19816,-15074,5608,12680,-13314,4280,18984,-21081,29070,28591,29165,29153,-8834,-2866,5657,-2407,2477,4781,12818,-28844,-15191,23174,7490,11677,7485,26346,25295,-9718,27185,-21467,-20364,-20630,-520,-29594,32608,-18919,-22883,15122,-25405,2704,29998,-16345,-1958,-31997,846,24685,-2804,2843,-1645,-3876,23263,-3061,20632,6794,7199,28372,29755,-5602,2695,-28370,-23193,-14551,-31805,17141,3592,14674,6411,24359,5298,29216,-26184,-24698,-24144,-19637,-8814,12719,-26621,-26462,-10038,-1401,-1549,17780,27708,11815,-3756,11624,-14795,18108,-964,-30764,-25010,-16810,28031,18686,-32083,-16612,21789,-29336,-4796,-8743,16001,-29468,10433,-2770,25362,12741,-14854,-18188,5895,20838,27639,-6040,-31543,-18454,25920,-9407,24462,-13367,11552,18985,7186,23222,26172,14481,21822,8301,3422,-5946,28873,17964,-3404,26544,29750,-27944,-3392,-14660,6953,29539,-5946,21528,7363,27016,609,-30582,11406,25524,-11659,11275,-17608,-10033,25381,4346,32646,3798,-8795,-28037,-7749,22981,-14308,10942,-15387,30381,32026,5260,-1837,10180,15807,-13248,4124,-4116,17981,-20650,-27874,22254,-15829,8738,-13298,1206,-10421,-26426,15833,19087,16103,-6934,-4913,-13472,2058,-27327,20181,22615,11506,-20239,19683,12314,-2772,-21428,-2898,374,4529,-12008,32326,18321,-18180,30773,-9538,11544,23766,17007,-21801,883,-25157,6933,2091,9593,-29549,7618,28483,-17566,-27074,-26330,-29314,-1950,-21197,-4441,5031,-8147,19688,18019,-23812,19632,6534,-16014,26529,13069,26318,13386,3090,17065,-13055,-32097,-17400,-19241,5099,259,-5255,15266,-11039,18759,23195,-20378,-2744,19406,4888,-17390,12611,-18400,-11139,-9746,13695,11006,16500,-10110,26420,-332,7677,9673,17770,18760,21734,8068,28117,9010,-15809,-5161,-26613,-3606,-28798,22140,4383,-8027,10085,-2302,13084,-1296,-21383,-31441,14862,14807,10213,28885,-27176,-3759,2814,23440,-30881,29871,-280,-16116,-16121,-17537,-16618,29699,-9240,27489,24929,8103,7793,22558,21721,-26929,-30473,-15696,20715,14764,29757,30506,15625,-9186,17321,-4215,12079,-32269,15617,-6903,13321,-15931,10496,18339,28610,3677,17766,17454,-19453,-15060,8969,-7222,-26774,-11645,-13639,31737,829,-17974,-23185,7440,27860,-28436,-26761,9418,-29335,-4020,-17501,16735,-2728,1462,-1924,-17127,16058,-27151,-22556,-16085,-27936,-13250,-7786,24724,32239,24031,-9241,32623,18864,13763,-32111,-531,-32488,6023,-25102,17434,16712,5119,-4465,1475,24583,-2393,6151,23203,3117,22504,16959,-25594,-15298,-31757,2946,23238,-9430,27556,-24014,-11535,-5406,21509,-19242,16007,27714,14218,-15107,10917,18991,-23346,4464,-31304,19869,1605,21930,-6754,-20995,-17366,-14578,-13031,-1617,22174,-4289,3052,-26417,25134,29142,-18421,7962,27107,1365,27606,-2666,-9904,6281,501,23080,25399,-8180,-24595,32053,25105,13319,3638,21316,878,-26569,-28590,26063,25760,-687,-25440,26327,23120,4650,-8702,2668,11078,2564,25826,17918,-11677,22360,-8853,-31921,-8450,-12647,-31810,6038,-8879,-31821,28929,23956,-14419,-17997,4010,-24223,20547,12560,4593,-16052,6674,-14060,24351,27333,29245,-30965,-32581,14341,-27260,17559,25727,2538,-31442,-5040,-10009,1750,29603,4256,-16492,2599,23349,-3925,-15054,-12767,-22547,12732,2211,-23022,17716,17622,-11876,18872,7295,15114,-4763,-14821,-32565,-18966,13972,25775,-2027,-16873,-4098,29465,30658,-939,26698,-14860,-19739,3320,18780,-26772,-31094,16362,27278,-9371,6527,24023,-7298,8731,-26413,-14197,-25368,-24687,3613,-25232,24656,-21481,-26905,-20215,22192,6096,-29728,-29237,-12613,-24105,31267,9166,-22787,21754,30148,-10418,-10808,-24703,14134,-2683,16119,-11058,-27471,-16881,-1878,21201,23127,-21570,15897,-1279,16630,-16063,-13169,3220,4129,-31290,-10777,22768,-2461,-29620,-58,20301,-17797,22570,-2552,-10521,22965,6919,15247,-8559,16869,-15826,-26734,16312,-25247,-15100,24854,2550,-15419,14889,30535,-27848,16776,16462,-31053,-16330,-3592,24202,-17158,-26020,-89,32200,-16701,27785,27779,32742,21944,-6695,11627,19638,-24878,866,23152,15987,-21130,-11847,-17656,24418,15439,2927,-6344,10959,-5993,-24134,-25157,-2556,27808,-17397,-18641,16175,21630,472,23358,13115,-28553,6817,-2889,-15765,-27090,-2915,-15597,-10163,17713,-19541,32090,883,2223,-29116,777,-30159,-9826,-11437,-3606,25241,4349,-32186,-6876,17932,-20345,-607,-21841,-24797,-10360,-7784,-10239,2328,461,18246,20750,26281,-10790,25678,-23186,30403,-8710,-13726,2916,-5236,13419,5953,2447,-31087,-29860,-67,29314,-30474,-9157,16781,-26172,28949,18061,-15625,-29537,17658,25773,-7115,13005,22554,3813,-15684,-23303,-30887,-25090,1617,6917,-5115,-18420,-23450,-19096,-14444,15521,30583,16646,-29349,11108,-28389,17366,-31703,-4479,31685,-14924,27841,-1988,7146,-23010,-11759,23718,-26675,16877,-2650,-18523,32081,-12586,22721,-7236,-4261,9834,-29677,-11742,-28656,-16375,28100,-24361,-31940,-9688,23941,3346,-17413,6213,-28066,24161,-28868,-21338,26406,-10883,-6463,16368,24151,23210,-17168,-2974,28942,2392,6351,-4294,-19977,-11824,9573,19429,-6611,-20119,31021,-19634,10897,8229,-21621,9377,-4370,-27651,-14798,-16537,-13039,26577,-4155,28255,-10696,-28408,17530,15710,-4477,-2999,-1471,24600,4127,-13679,-26677,-30984,18240,-16793,-29384,14476,-27342,-14818,30084,-30189,12818,12568,-2770,13830,-14487,16364,-14795,-1881,1043,10665,-13892,-3404,18644,-11651,14357,-8680,-29340,-11163,-25085,11596,7198,-5970,-13269,10472,-18306,-11464,18589,313,30176,27944,31462,-10165,32631,-4409,-6834,25715,23538,17336,28737,-29294,-30834,-16780,13886,13955,10698,14651,5180,2167,21965,18639,-31819,-8405,7429,-7148,10221,-22372,29072,-18691,811,-7549,4924,-1336,-29311,12297,-29038,-6611,2541,5551,7786,-11287,-14886,-21907,1193,15483,-5246,-15215,-11258,8175,7779,96,-10697,-416,-31449,-32706,10805,16333,18610,-18436,-7285,-9155,30424,-22877,-6831,-17096,31446,-2880,15933,-18463,-7850,24529,13201,8987,7451,11654,-20099,10427,7335,31788,-10563,-19854,-18690,-3239,-32237,22000,-30556,21980,-31714,-25077,-28053,-14414,-13924,22025,-12360,14248,-26808,8248,14953,18868,-32161,31334,19089,-15709,-17509,-13822,10247,-28846,14036,-10550,-6898,13636,-17268,-515,25883,11627,-3304,8514,10099,-18883,9384,322,-25025,-31103,20398,2399,10153,14656,8470,-24014,3323,-17548,5545,-15259,26716,-21240,-29960,-17609,31487,19939,26538,22737,20965,31578,-11235,11484,12618,-24113,14189,-11158,13990,-26474,22142,8309,12105,-26206,20702,-17730,-1428,12888,10302,-4738,5629,-26423,-14286,9428,-8649,-2266,32627,-24366,-29037,6906,27626,-10319,-30958,-25823,31476,-7026,-6766,32418,-32528,12569,-12427,-3539,-31737,23192,13942,-15031,31736,4066,11652,18629,31537,-12844,-25888,-31414,-28482,-12057,-22752,-30736,23295,-8895,9710,8198,1401,16646,10063,-763,15707,7109,21543,18651,4079,4574,-12691,-16414,-28483,-12953,-15214,-18139,12804,31777,12166,11324,14786,5342,6958,-13292,4137,-18491,-22480,19655,16506,11361,30186,2165,-24428,-26458,-16508,21790,13711,761,21083,-32002,17387,-32754,12572,-26691,-7366,-1762,-20904,-7358,32643,-24425,26800,6458,-3881,20127,-24918,-16387,-21744,-19430,8163,11432,-21800,12988,-11840,3171,-6630,783,-1322,-11744,-27718,22927,20265,-16147,21967,918,-17771,-20199,4127,7386,-5380,-26972,-28380,-17666,20392,17411,-14771,-23100,-20238,20667,-9887,1852,-16135,-30945,9124,-30165,24507,-5550,21987,-6390,-28774,26426,14094,-3776,-22238,7962,1317,10097,15966,28296,-25248,-10018,2579,-25989,1319,-30815,23425,-26859,-12451,10136,-12655,11935,-16236,26130,-27590,22036,10921,7953,5056,-7442,-31216,6118,1685,20717,18045,29360,-30189,22007,14287,-31176,24627,7952,-4928,3357,30705,9548,17700,10427,9994,-9933,-5377,-32320,13041,-17070,-20696,30122,11540,6611,-8248,4067,25995,-3024,27005,23278,6662,-768,27831,-21252,10243,-2046,-27948,-4486,-27289,17338,27056,-23104,-29777,-8563,1119,7413,-18121,-16059,30864,1585,-27741,-17079,-4337,17199,-29356,-29965,-12215,3482,31959,8954,24462,22022,-22509,13656,-21060,3587 diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad4.tflite b/tensorflow/lite/micro/integration_tests/seanet/pad/pad4.tflite new file mode 100644 index 0000000..60fd6f4 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/pad/pad4.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad4_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/pad/pad4_golden_int16.csv new file mode 100644 index 0000000..5ee85c6 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/pad/pad4_golden_int16.csv @@ -0,0 +1 @@ +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-6031,839,-31119,-14547,-26902,-31516,-19497,-28126,24560,-16466,-6370,20734,24748,32005,-15637,-9390,-17227,8219,-18232,2911,-29711,8574,-11644,3384,-29263,-18645,21051,22560,26243,15421,-6745,-5586,14431,-29069,-5097,-23125,24099,4718,28244,-16523,6130,-8950,-635,21978,6030,-420,-31064,28079,11062,17568,6700,-27256,23035,12306,12058,-6461,15570,14795,18549,-25402,29166,-20976,-9150,31745,24255,17717,-18160,15722,9011,-146,-3818,-23528,-24727,-3924,-22976,-10328,22014,-760,-608,-23598,-11349,-31621,6935,12396,-23343,3437,-4589,1233,9632,28533,9598,22882,-18944,24498,-29556,-6984,-16932,8137,2283,-10398,1084,-7369,22110,2167,-29930,23145,27761,24831,-11868,631,-12275,8512,18986,3481,-4206,11632,18133,-20866,-2686,-309,10050,1800,-32019,-9389,13250,-5239,6947,-2592,6222,8162,29116,-22208,29155,23538,-6681,-11013,-5064,-26914,-5753,-17136,11710,-20951,5214,16775,4117,13763,-14594,-24958,-25759,3497,-23747,29350,-19568,-5878,-23468,5153,-32344,-11095,18381,-8256,28754,-12894,21341,14697,-6143,-10858,32455,13014,15368,13300,-8646,28276,-2707,-6858,-8632,-488,-1840,20855,30162,18814,-5976,-13483,-14365,-22244,-21833,28843,28794,-22218,19541,9758,5610,21887,-30655,-1753,-29855,-24950,-8298,32285,-20679,-6843,-28392,12636,-31396,4521,25427,-17282,30008,12886,28860,28396,32692,-27951,20301,17525,-15737,16435,5878,-8947,29908,18753,441,-3606,772,-24577,-6031,21310,1677,15322,-27668,-11138,-29902,26049,29274,-4820,8827,-28825,21309,15164,-13675,-7745,-14913,-19797,29909,-18528,-5590,-2853,-15649,-13647,12683,-17053,8553,-6199,23298,26403,-2742,-19009,-500,1652,25688,2545,-27864,5918,29607,-4639,-3589,25305,8426,12758,21776,-27195,-18151,27067,5350,7752,-4050,-6526,32314,31348,-29614,-27511,-16243,23344,-32047,-9727,-18686,10870,13299,-23914,-30312,-342,6030,-13785,16877,-31042,-17057,-27235,17389,-25984,-21852,-5263,-11483,-25051,-23516,-28026,-14740,13536,14862,-14798,-7559,-6255,-6689,12073,19669,-25913,-22029,-3117,23766,-26350,-31612,-32269,-11736,3428,-7371,-10119,30750,9932,17519,-2693,-16006,2495,-14314,29063,1128,18929,-2727,30850,13480,835,-17756,6282,5890,4747,-5618,-29290,11282,15965,32560,32234,8756,15012,-26790,-32357,-25848,210,-7553,13318,6508,-11085,-6232,16876,-29859,-13358,14884,12150,-15144,5987,-13333,-27622,11743,-22350,-7578,18554,-14263,-27102,-26898,-6614,5164,-9204,6089,1156,4660,-11748,2245,-21931,-26960,-5864,-23760,-29188,-21715,23474,-12520,-16035,-19318,24625,23666,26615,12510,-17271,-30180,-23394,11571,21865,-6946,4478,-27108,17521,-2445,29507,3253,29281,13201,20779,19209,-19337,-30892,-19846,-1404,8100,10115,3998,-14147,6014,22616,17548,27060,-30308,-11466,24442,21407,-25166,24093,10383,16042,6889,744,-27559,-27319,18789,31642,-18252,13388,-8125,3785,3951,-10720,4556,7652,-14926,-22269,28736,10408,5865,20665,-696,-31780,15384,-22771,-27137,-26592,26159,30016,14261,-18769,-9898,-720,24967,-31363,-31300,-5675,-28135,24290,29350,-12826,-15335,2259,30528,-18275,24286,32335,-27067,31284,1651,-32665,-5053,-32160,23300,-21980,-23815,-18189,-18739,18589,24527,31946,3716,-201,-6710,-31314,4538,-13817,-10802,-12865,-22532,-21500,-2832,29760,-17318,-31791,-23818,-15486,-14587,-22531,30758,24726,-14121,-18325,-8368,-28057,-4282,2263,-8122,-31670,-31858,37,29064,-21128,-28675,18637,4231,18615,-3392,-16453,-25495,4363,-12726,7578,21475,20191,-14034,-2330,32443,24519,-18321,2619,29026,4144,-24557,21326,-27139,12751,-14210,21440,19975,11053,-22339,12503,-8049,-17710,24345,-19084,-31877,-13740,31770,-3147,-24880,7810,-21741,22720,-24676,30604,30072,12080,-21614,31572,852,-3441,-27385,-6247,3546,22202,-15969,-27904,-28111,24561,-31588,-27131,18899,5092,-14881,-22930,50,-23212,-9319,31110,-6746,8394,24348,23356,18318,21342,-10043,23759,2485,-12754,19489,-9276,-23136,-24435,24044,-17870,31441,23727,-24865,24360,32130,-10704,-9605,7468,5567,20177,-12079,16738,22597,14830,-18031,3159,16057,-16542,-32516,11728,8603,-32347,-20253,18415,22394,-27980,-4182,21387,5056,-19662,-8165,1604,21935,-30358,-24853,16739,6706,-3367,-12934,5072,-14678,-29035,30343,32029,-297,-18650,-15010,14796,-3750,-3281,9857,-16899,10953,-15719,-13300,-19670,-29722,1656,-11844,3115,19419,-10350,13450,-18909,17208,1490,-13743,23032,4348,3604,-889,-10496,-26507,17742,-27399,18456,-13770,32246,12776,-26874,16831,-13595,5147,5217,-32313,18732,-14600,-28031,-7933,-5907,-31684,-4888,4133,9591,-6121,21823,-14058,10222,-10567,28305,3735,-10012,-8715,-32534,-6064,-22485,32535,29681,11958,-32437,-14750,-29655,-6239,20054,16563,-10902,5489,19799,-8878,-15330,23927,10266,22436,332,-10679,31866,15377,-5636,-29498,23481,25077,-17358,-22001,12670,29149,26224,-21632,-17598,-2652,16707,4861,-12428,-17728,-6513,13337,31176,-549,12718,11530,-7519,11047,-5187,3443,7084,-14136,14853,17843,-3816,8111,4307,-6514,832,17563,18466,4348,3151,-22818,27640,-6546,-2135,-23765,132,-28641,-13219,8413,-14537,-29418,-29451,19905,-20863,2021,-16243,8962,-9332,17860,32123,5359,-13371,-9138,-10261,-2106,26960,-30867,-3158,27254,30662,13727,-30040,-24645,890,28835,-29749,7742,-6760,30022,3635,12395,-24822,-31166,11549,29452,298,-22243,29774,16361,11426,-8552,28282,24811,-17422,31977,-1959,29913,24592,19262,-24806,-30431,-24183,-7384,-18460,12648,-27198,-16554,-24296,-23321,-17288,13195,-17418,9484,-27077,16733,-31604,-30874,18180,2018,-9835,-28258,3488,25221,1460,-21521,-27428,10192,-5755,11234,-30650,-30839,8072,27816,-5730,336,-2780,-18256,12221,9500,-16714,-20894,-24002,25900,-20436,-20488,-15786,-3857,-28323,18870,32641,32506,3433,-8382,-30835,-7773,10834,-6586,23064,-1695,-26418,-30566,24716,548,-31341,30035,-32088,29965,-26990,-15969,16587,-28855,8660,-7368,-23897,-1333,-16522,-8738,-7152,-7871,-6029,19517,12378,9004,-16762,29096,17398,31287,6766,-5495,-22670,-11134,26777,9655,31822,11497,-12140,-10658,20417,-27038,7929,-27317,-5040,-6265,-27349,-28532,4798,11577,11811,-21341,-28064,-8560,-20373,14707,-25459,-21995,-25186,30173,7995,-2859,709,-30578,-29259,5109,20105,24538,-15877,23757,14846,-28916,-13620,13587,27836,-31385,7961,-6039,-30990,-8224,13161,-12407,13443,14274,-14412,-10578,-23927,24580,2682,-3562,-1666,-24086,5567,324,-10964,-6506,17602,-5417,11963,-8472,-31441,627,12966,28652,562,7077,5432,18288,8264,5124,10016,-13258,21363,-13560,28847,-3313,24158,8100,10652,14072,-27979,13811,10647,21355,-13584,-11028,15802,-23712,30991,-30706,-22884,-7576,-27627,-22423,20889,-21320,-8636,25195,-32700,-21056,15016,-15916,4097,-27556,4967,8781,13670,5151,20719,-27942,-20431,-16137,3199,-15510,-18515,-2831,9693,4403,16685,3312,14281,-29974,-28489,25972,15698,5063,32497,-3461,23103,21742,-7030,-27742,13897,32379,11307,-18020,-19280,7315,26789,28879,620,23730,29554,29219,-30321,-13101,24409,-183,-31679,5130,-4220,11113,14895,-14933,12576,-12916,23439,-1614,-32106,19794,-11378,27923,902,-20554,-8265,-9738,14856,-25504,2133,-11863,-2558,-22967,-21330,-23481,1774,14731,22195,-4670,-28603,25295,30541,-5190,19055,11033,4911,-28569,10403,18918,6691,26759,-24262,-16069,-1997,17759,-13412,-16787,24953,29896,-27378,32375,26741,-12422,-22534,-11518,21158,-25596,-30381,31588,-8228,-8932,-30239,6015,-21188,-19908,5901,-19233,31664,3127,-18308,-19651,10044,-27766,2796,8939,-17444,1584,-28202,-14680,-11539,19496,24371,26033,-7849,-15027,29911,-15158,14570,-29929,11063,14801,1747,10201,23576,6071,-6568,-27777,7222,-3234,-8062,-25725,13797,-32185,-22435,-20961,1666,12700,26792,-23949,30200,-18763,3323,-18995,9869,-7041,8925,-4786,19754,22690,-24732,953,-7901,-6722,24181,13991,23730,32709,-26328,21232,10037,15339,-9350,-31911,26156,-32491,19956,-16065,24092,-24634,-24126,23879,24490,-19884,32731,-14570,-24405,-31211,17382,27494,-9182,29156,15110,-5601,11884,-32437,26791,-22706,-23562,22467,-2936,-215,-11186,9040,-24418,-8685,-17934,-22438,25648,-18030,17605,-13873,18779,24989,-8897,12481,-28537,-7807,12152,-10087,-26644,-2601,-31527,29770,-4748,29401,-27684,19720,-19387,-21248,-8951,10396,-23241,32231,-21589,5120,-1385,-29710,-22337,-1429,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,25404,16172,1134,-27953,-29989,10724,-14176,3512,-29721,14386,4525,-2129,-32090,-30270,7726,-6146,19834,14551,-23374,-13217,26509,-25675,20982,-9432,-29798,25014,-1919,14855,15138,-24634,-22930,-8103,-2784,21005,13903,24938,14629,-9113,-9056,-28521,-20456,17051,-28958,-16933,27711,14163,26091,-14069,18393,29922,26952,-14878,-29223,13380,-7707,-7427,28086,-4239,-9853,21517,-23329,-23047,5233,-17396,-11020,-16226,22537,-4893,21467,-28855,21733,31164,-17415,14146,19611,7858,16699,-4185,9150,-16095,12487,-5911,-21583,1542,2640,-4827,31193,20034,24736,-2560,24863,16733,-7926,-12244,-28088,-24442,-13218,-2891,-19782,-25954,299,26345,9580,-25475,-11127,18606,-22889,19283,8389,-14366,-30056,-18870,-9181,24592,-17190,26559,22418,-7857,-11433,23531,23682,-9106,-3543,-4442,-15373,6416,1519,27632,31519,18382,9853,30522,-15237,-2094,27538,-2907,-25837,-16944,5424,-6267,-11474,27541,-27043,20833,31507,-2215,20560,6067,30586,1903,29252,14678,-7731,11896,-13745,-16830,-10554,15943,-28611,15199,3402,-30546,4836,-18870,32715,-15651,6873,-27375,-14567,-12931,17013,-31199,6786,-4160,-9310,-16506,-19658,27244,-32296,11524,-7472,23782,-7002,-15825,-6410,601,-26149,24780,11402,26831,-11469,-17252,-17299,21228,-27602,5962,-8709,31831,-31119,-23847,-7214,-31533,-15541,8115,-16293,-20628,-16300,11534,6562,-2645,13177,3370,29878,5532,-25189,-30291,22230,14685,15074,19372,8936,-2899,17046,8767,20579,-19290,8743,-10356,22579,-15909,1155,-2497,2743,11615,-25665,4908,-7009,14141,-10273,-16403,20100,26174,15701,-14225,17059,-4334,-1020,-8779,11955,-22112,-8265,-18417,-32265,-3974,-17346,-5229,-18927,-27372,23717,-25340,19234,12499,-29025,-18612,-14871,5497,-30472,22711,14014,16220,-28604,25745,6395,-15701,3274,29897,17646,25810,29034,9119,-4767,31419,15659,25733,11626,28546,10968,-29172,-22043,-5019,-16374,3983,29779,27746,10835,-20662,-2324,-11094,-15392,-30947,-23983,32426,31530,4178,-25438,-6199,16316,568,-14385,25091,10179,7868,16217,13566,17577,-5674,30511,-2497,-7399,-14126,-20949,-4708,24450,-28325,2651,-11744,29432,-10551,-1039,-7861,-22408,-8314,26017,-17783,-15117,10421,-21623,12943,21681,-25231,-10452,25803,-13874,-17409,-30919,20764,18394,7579,16733,25382,-10403,-18300,6392,26617,28754,-2201,535,28981,5830,-13444,23969,-15319,-16192,-30680,-12691,-18255,3648,15316,-9566,-15897,15677,-30231,-5329,30449,26416,10160,29274,-2833,30194,21942,-12344,-19811,-7320,-7127,28574,30093,-24688,10292,31515,-11203,-25065,24368,31727,8667,-14726,6466,-26085,31470,-32697,6468,1672,-6300,22639,3655,-6406,18155,-12149,18577,-27983,14874,26278,-22292,-10206,8576,-17954,29525,22977,7498,-16618,23781,-18047,8736,9515,-8642,-29139,-32200,16401,-31285,-9257,-26474,-16814,21585,2069,24331,23445,-12907,13758,-7322,20559,-8201,-18199,2264,-8967,13221,-20207,-5018,-14621,-20285,-23743,-23711,-6995,-17576,11122,8811,22761,-20515,-24228,-24869,-27866,5060,-6439,-16940,11888,30920,21385,13783,23891,27571,-20479,22549,27431,269,-15430,8871,15024,-8736,12453,-26228,-16757,9874,-28049,-1600,-29632,-19963,-16269,-24669,-9619,-14486,22209,23773,-29247,22010,8228,24415,-11239,-622,6837,-17211,-9355,-21651,-420,-26864,9237,-10502,30323,16058,6555,4854,-26620,-30020,21161,16960,15661,-9257,-20131,25682,8789,12344,19256,-88,-18235,-13765,22557,-1187,-25565,-27663,-14362,32667,22403,7122,-29374,-20273,2692,31876,-27844,30084,32610,-24635,6974,7837,29512,6335,-31170,-10365,-21467,-21233,-12374,-27502,-9309,-4985,32315,-31552,-23275,-30942,6190,27704,7861,-3238,-24516,15524,21584,32371,-31998,-24731,4205,-20415,14806,-22400,-18699,-14741,-30508,-14295,-19250,7264,-16123,9599,20627,-15745,-26615,-2455,-24049,-11476,-19339,-13853,-4589,-31159,25041,31900,5087,-21086,15547,5504,-30488,28283,-20963,30482,31590,-5193,13965,-27584,-12971,29246,27556,27107,18128,-9141,-21891,-27455,-23687,-32226,-6333,20623,3521,-8828,19161,-8169,-3505,26252,-20761,-7362,10477,-8158,-7158,-26620,4178,10629,-9240,1013,31771,17779,7412,-20252,11287,31792,8228,32652,-12026,20940,-15635,-1300,-8252,17690,7330,5346,-29830,8181,23470,-1317,-27926,-22115,26238,26444,-18998,19244,-7205,32259,-13207,-13605,-3604,1204,24445,10155,23590,-7649,30357,11102,13282,-21205,-7691,8663,32756,24866,-10211,-6960,32609,31174,-26773,-1029,-18971,-19354,10850,27233,-5816,-19032,-6093,25469,-11415,8359,-28857,-25828,-29999,3407,23202,-6087,-23367,-5299,-11801,26468,-31408,3939,28772,15560,3681,-17808,32553,14844,13509,30929,-11551,-24872,-3094,-13350,-20036,-27887,-1534,-13400,-14803,-11763,-15218,-11312,32367,-1641,20685,30538,-15074,-27633,-28023,7072,-6297,2661,-9728,-5959,-6565,-24132,2125,-9779,-2895,16417,-31330,-11188,14936,14335,-3039,-25422,16108,-5680,-24543,14253,24327,-31253,-12670,-12299,3858,-12578,30546,17494,24078,-28328,-26261,20500,-3537,-15115,2810,27740,2369,8833,-4891,-22502,-15773,-32075,24516,-27403,-10541,5715,-9855,19124,-20437,26192,3643,13105,-21555,22348,6693,-25589,32269,9930,-2502,1852,-19077,-18215,19836,-29065,23372,6947,22553,-28273,1648,-5926,-16316,-16810,23912,19154,2061,-2080,25564,-16680,-1792,5650,5588,-15079,22202,-14957,13108,19679,8528,18961,-22237,-4082,-27252,-4403,17457,23541,-25584,25866,31541,7538,-22719,7451,5329,-25652,-28107,-15848,-8372,16685,28273,12488,31890,14580,-13895,18368,15831,-18349,11329,19132,12466,5413,14984,10424,-17993,-28651,-1824,-14722,-9454,28771,6538,-27519,-20838,-23501,-15594,19708,-15552,-11666,15635,-11074,-2985,-10842,-10366,22259,6460,-30665,-2643,-17462,31499,31104,-7783,-1459,-14440,-12076,7395,29058,27777,-24491,22999,-6767,17841,3875,-17961,-28170,19080,6495,4788,18923,14732,23094,-5331,-14330,-17023,-16574,7158,22382,30959,23716,25540,31510,-18630,-10474,-28056,17889,3752,27371,7310,-3267,-1848,25141,30751,-5410,21019,-879,21344,24907,-9941,-14150,19355,-23315,-19594,12432,30287,-10015,22592,-26791,1349,21562,15665,-26958,189,15244,6533,-28638,16087,10023,2303,6626,-29050,23512,-18101,12994,32362,31897,22892,31593,8444,-10178,11043,-22781,-13910,22019,-10326,-16861,30961,10736,-11991,-23904,22862,-27358,179,-22559,-8791,-14478,4941,29807,12075,-18528,17647,10507,27219,4393,-5209,19158,23955,-21598,-26963,-28662,7271,-14122,31977,11136,-15487,-81,-18782,-26125,11090,8509,-23415,14089,-4973,31855,-2467,13412,-11022,20202,13343,4942,-25095,8993,21706,-17440,-13365,17899,28152,-21923,16426,-27284,15459,2554,-4513,-31634,-10913,-27855,7028,1472,-21699,2015,10668,-21686,32534,13565,-23937,-25428,-4490,14966,-3066,5253,-364,27782,-21610,4935,20564,26185,-6257,-30024,-12327,-22197,-21661,18725,7405,17387,10322,14385,-29466,30052,-12579,-20783,-23059,2913,-3098,24631,9171,-5702,-4433,28192,-12158,-29464,14518,-5744,-12276,-6255,-3020,-19153,28925,12930,-1779,25309,23094,-6816,3074,-21735,-25861,-28405,-7506,20245,5339,-8343,15899,58,-23364,-7338,-15618,-31341,-6760,32660,15929,7160,26417,25235,17998,3459,-32420,9535,-18124,666,7968,-17258,-21715,-15666,2249,12422,25685,-9661,32088,27251,-22290,10401,657,-12491,21998,-9032,-9341,-22803,-27120,28113,11457,-27092,8628,4438,26844,18811,-32585,8885,18957,-7444,-4502,-20781,16390,-26619,-4693,-23015,29199,4400,-26696,13117,-19602,29556,-3655,-2099,9220,-31745,-14379,13088,-10930,26460,-10342,-16896,4541,14649,20977,7123,-15148,-19187,-15920,16650,29274,-9553,4757,16179,4589,-7450,16786,-20801,13339,4723,-28968,-26812,25423,18770,-14376,-31417,-1755,-31628,-6203,22815,-30789,14237,-20796,-3653,-11478,6857,-30481,28780,4803,-23926,25295,-31038,18393,-18161,-6276,21300,-413,-23028,-16947,-22697,19502,-4625,-23209,-28826,-24153,3146,13038,-1905,-27648,-2289,-20048,21397,-6852,8832,27190,28956,-3733,-26250,-24863,-21894,2154,24379,-24876,-11391,7821,-8119,27020,-4322,-19689,-12165,22537,-22692,-28,30295,-13719,10967,29367,-2532,-5404,2034,13403,26057,23342,18301,-6203,-4645,-4247,28797,18339,-4358,10561,-18600,-21234,6881,7585,24867,10511,30787,21680,-19260,13018,13054,-25965,-1637,19879,-18806,-23236,-18883,10340,-11080,-811,-18632,7210,25686,11603,-11048,9764,-32472,-24519,-12028,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad4_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/pad/pad4_input0_int16.csv new file mode 100644 index 0000000..f8a19fa --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/pad/pad4_input0_int16.csv @@ -0,0 +1 @@ +-6031,839,-31119,-14547,-26902,-31516,-19497,-28126,24560,-16466,-6370,20734,24748,32005,-15637,-9390,-17227,8219,-18232,2911,-29711,8574,-11644,3384,-29263,-18645,21051,22560,26243,15421,-6745,-5586,14431,-29069,-5097,-23125,24099,4718,28244,-16523,6130,-8950,-635,21978,6030,-420,-31064,28079,11062,17568,6700,-27256,23035,12306,12058,-6461,15570,14795,18549,-25402,29166,-20976,-9150,31745,24255,17717,-18160,15722,9011,-146,-3818,-23528,-24727,-3924,-22976,-10328,22014,-760,-608,-23598,-11349,-31621,6935,12396,-23343,3437,-4589,1233,9632,28533,9598,22882,-18944,24498,-29556,-6984,-16932,8137,2283,-10398,1084,-7369,22110,2167,-29930,23145,27761,24831,-11868,631,-12275,8512,18986,3481,-4206,11632,18133,-20866,-2686,-309,10050,1800,-32019,-9389,13250,-5239,6947,-2592,6222,8162,29116,-22208,29155,23538,-6681,-11013,-5064,-26914,-5753,-17136,11710,-20951,5214,16775,4117,13763,-14594,-24958,-25759,3497,-23747,29350,-19568,-5878,-23468,5153,-32344,-11095,18381,-8256,28754,-12894,21341,14697,-6143,-10858,32455,13014,15368,13300,-8646,28276,-2707,-6858,-8632,-488,-1840,20855,30162,18814,-5976,-13483,-14365,-22244,-21833,28843,28794,-22218,19541,9758,5610,21887,-30655,-1753,-29855,-24950,-8298,32285,-20679,-6843,-28392,12636,-31396,4521,25427,-17282,30008,12886,28860,28396,32692,-27951,20301,17525,-15737,16435,5878,-8947,29908,18753,441,-3606,772,-24577,-6031,21310,1677,15322,-27668,-11138,-29902,26049,29274,-4820,8827,-28825,21309,15164,-13675,-7745,-14913,-19797,29909,-18528,-5590,-2853,-15649,-13647,12683,-17053,8553,-6199,23298,26403,-2742,-19009,-500,1652,25688,2545,-27864,5918,29607,-4639,-3589,25305,8426,12758,21776,-27195,-18151,27067,5350,7752,-4050,-6526,32314,31348,-29614,-27511,-16243,23344,-32047,-9727,-18686,10870,13299,-23914,-30312,-342,6030,-13785,16877,-31042,-17057,-27235,17389,-25984,-21852,-5263,-11483,-25051,-23516,-28026,-14740,13536,14862,-14798,-7559,-6255,-6689,12073,19669,-25913,-22029,-3117,23766,-26350,-31612,-32269,-11736,3428,-7371,-10119,30750,9932,17519,-2693,-16006,2495,-14314,29063,1128,18929,-2727,30850,13480,835,-17756,6282,5890,4747,-5618,-29290,11282,15965,32560,32234,8756,15012,-26790,-32357,-25848,210,-7553,13318,6508,-11085,-6232,16876,-29859,-13358,14884,12150,-15144,5987,-13333,-27622,11743,-22350,-7578,18554,-14263,-27102,-26898,-6614,5164,-9204,6089,1156,4660,-11748,2245,-21931,-26960,-5864,-23760,-29188,-21715,23474,-12520,-16035,-19318,24625,23666,26615,12510,-17271,-30180,-23394,11571,21865,-6946,4478,-27108,17521,-2445,29507,3253,29281,13201,20779,19209,-19337,-30892,-19846,-1404,8100,10115,3998,-14147,6014,22616,17548,27060,-30308,-11466,24442,21407,-25166,24093,10383,16042,6889,744,-27559,-27319,18789,31642,-18252,13388,-8125,3785,3951,-10720,4556,7652,-14926,-22269,28736,10408,5865,20665,-696,-31780,15384,-22771,-27137,-26592,26159,30016,14261,-18769,-9898,-720,24967,-31363,-31300,-5675,-28135,24290,29350,-12826,-15335,2259,30528,-18275,24286,32335,-27067,31284,1651,-32665,-5053,-32160,23300,-21980,-23815,-18189,-18739,18589,24527,31946,3716,-201,-6710,-31314,4538,-13817,-10802,-12865,-22532,-21500,-2832,29760,-17318,-31791,-23818,-15486,-14587,-22531,30758,24726,-14121,-18325,-8368,-28057,-4282,2263,-8122,-31670,-31858,37,29064,-21128,-28675,18637,4231,18615,-3392,-16453,-25495,4363,-12726,7578,21475,20191,-14034,-2330,32443,24519,-18321,2619,29026,4144,-24557,21326,-27139,12751,-14210,21440,19975,11053,-22339,12503,-8049,-17710,24345,-19084,-31877,-13740,31770,-3147,-24880,7810,-21741,22720,-24676,30604,30072,12080,-21614,31572,852,-3441,-27385,-6247,3546,22202,-15969,-27904,-28111,24561,-31588,-27131,18899,5092,-14881,-22930,50,-23212,-9319,31110,-6746,8394,24348,23356,18318,21342,-10043,23759,2485,-12754,19489,-9276,-23136,-24435,24044,-17870,31441,23727,-24865,24360,32130,-10704,-9605,7468,5567,20177,-12079,16738,22597,14830,-18031,3159,16057,-16542,-32516,11728,8603,-32347,-20253,18415,22394,-27980,-4182,21387,5056,-19662,-8165,1604,21935,-30358,-24853,16739,6706,-3367,-12934,5072,-14678,-29035,30343,32029,-297,-18650,-15010,14796,-3750,-3281,9857,-16899,10953,-15719,-13300,-19670,-29722,1656,-11844,3115,19419,-10350,13450,-18909,17208,1490,-13743,23032,4348,3604,-889,-10496,-26507,17742,-27399,18456,-13770,32246,12776,-26874,16831,-13595,5147,5217,-32313,18732,-14600,-28031,-7933,-5907,-31684,-4888,4133,9591,-6121,21823,-14058,10222,-10567,28305,3735,-10012,-8715,-32534,-6064,-22485,32535,29681,11958,-32437,-14750,-29655,-6239,20054,16563,-10902,5489,19799,-8878,-15330,23927,10266,22436,332,-10679,31866,15377,-5636,-29498,23481,25077,-17358,-22001,12670,29149,26224,-21632,-17598,-2652,16707,4861,-12428,-17728,-6513,13337,31176,-549,12718,11530,-7519,11047,-5187,3443,7084,-14136,14853,17843,-3816,8111,4307,-6514,832,17563,18466,4348,3151,-22818,27640,-6546,-2135,-23765,132,-28641,-13219,8413,-14537,-29418,-29451,19905,-20863,2021,-16243,8962,-9332,17860,32123,5359,-13371,-9138,-10261,-2106,26960,-30867,-3158,27254,30662,13727,-30040,-24645,890,28835,-29749,7742,-6760,30022,3635,12395,-24822,-31166,11549,29452,298,-22243,29774,16361,11426,-8552,28282,24811,-17422,31977,-1959,29913,24592,19262,-24806,-30431,-24183,-7384,-18460,12648,-27198,-16554,-24296,-23321,-17288,13195,-17418,9484,-27077,16733,-31604,-30874,18180,2018,-9835,-28258,3488,25221,1460,-21521,-27428,10192,-5755,11234,-30650,-30839,8072,27816,-5730,336,-2780,-18256,12221,9500,-16714,-20894,-24002,25900,-20436,-20488,-15786,-3857,-28323,18870,32641,32506,3433,-8382,-30835,-7773,10834,-6586,23064,-1695,-26418,-30566,24716,548,-31341,30035,-32088,29965,-26990,-15969,16587,-28855,8660,-7368,-23897,-1333,-16522,-8738,-7152,-7871,-6029,19517,12378,9004,-16762,29096,17398,31287,6766,-5495,-22670,-11134,26777,9655,31822,11497,-12140,-10658,20417,-27038,7929,-27317,-5040,-6265,-27349,-28532,4798,11577,11811,-21341,-28064,-8560,-20373,14707,-25459,-21995,-25186,30173,7995,-2859,709,-30578,-29259,5109,20105,24538,-15877,23757,14846,-28916,-13620,13587,27836,-31385,7961,-6039,-30990,-8224,13161,-12407,13443,14274,-14412,-10578,-23927,24580,2682,-3562,-1666,-24086,5567,324,-10964,-6506,17602,-5417,11963,-8472,-31441,627,12966,28652,562,7077,5432,18288,8264,5124,10016,-13258,21363,-13560,28847,-3313,24158,8100,10652,14072,-27979,13811,10647,21355,-13584,-11028,15802,-23712,30991,-30706,-22884,-7576,-27627,-22423,20889,-21320,-8636,25195,-32700,-21056,15016,-15916,4097,-27556,4967,8781,13670,5151,20719,-27942,-20431,-16137,3199,-15510,-18515,-2831,9693,4403,16685,3312,14281,-29974,-28489,25972,15698,5063,32497,-3461,23103,21742,-7030,-27742,13897,32379,11307,-18020,-19280,7315,26789,28879,620,23730,29554,29219,-30321,-13101,24409,-183,-31679,5130,-4220,11113,14895,-14933,12576,-12916,23439,-1614,-32106,19794,-11378,27923,902,-20554,-8265,-9738,14856,-25504,2133,-11863,-2558,-22967,-21330,-23481,1774,14731,22195,-4670,-28603,25295,30541,-5190,19055,11033,4911,-28569,10403,18918,6691,26759,-24262,-16069,-1997,17759,-13412,-16787,24953,29896,-27378,32375,26741,-12422,-22534,-11518,21158,-25596,-30381,31588,-8228,-8932,-30239,6015,-21188,-19908,5901,-19233,31664,3127,-18308,-19651,10044,-27766,2796,8939,-17444,1584,-28202,-14680,-11539,19496,24371,26033,-7849,-15027,29911,-15158,14570,-29929,11063,14801,1747,10201,23576,6071,-6568,-27777,7222,-3234,-8062,-25725,13797,-32185,-22435,-20961,1666,12700,26792,-23949,30200,-18763,3323,-18995,9869,-7041,8925,-4786,19754,22690,-24732,953,-7901,-6722,24181,13991,23730,32709,-26328,21232,10037,15339,-9350,-31911,26156,-32491,19956,-16065,24092,-24634,-24126,23879,24490,-19884,32731,-14570,-24405,-31211,17382,27494,-9182,29156,15110,-5601,11884,-32437,26791,-22706,-23562,22467,-2936,-215,-11186,9040,-24418,-8685,-17934,-22438,25648,-18030,17605,-13873,18779,24989,-8897,12481,-28537,-7807,12152,-10087,-26644,-2601,-31527,29770,-4748,29401,-27684,19720,-19387,-21248,-8951,10396,-23241,32231,-21589,5120,-1385,-29710,-22337,-1429,25404,16172,1134,-27953,-29989,10724,-14176,3512,-29721,14386,4525,-2129,-32090,-30270,7726,-6146,19834,14551,-23374,-13217,26509,-25675,20982,-9432,-29798,25014,-1919,14855,15138,-24634,-22930,-8103,-2784,21005,13903,24938,14629,-9113,-9056,-28521,-20456,17051,-28958,-16933,27711,14163,26091,-14069,18393,29922,26952,-14878,-29223,13380,-7707,-7427,28086,-4239,-9853,21517,-23329,-23047,5233,-17396,-11020,-16226,22537,-4893,21467,-28855,21733,31164,-17415,14146,19611,7858,16699,-4185,9150,-16095,12487,-5911,-21583,1542,2640,-4827,31193,20034,24736,-2560,24863,16733,-7926,-12244,-28088,-24442,-13218,-2891,-19782,-25954,299,26345,9580,-25475,-11127,18606,-22889,19283,8389,-14366,-30056,-18870,-9181,24592,-17190,26559,22418,-7857,-11433,23531,23682,-9106,-3543,-4442,-15373,6416,1519,27632,31519,18382,9853,30522,-15237,-2094,27538,-2907,-25837,-16944,5424,-6267,-11474,27541,-27043,20833,31507,-2215,20560,6067,30586,1903,29252,14678,-7731,11896,-13745,-16830,-10554,15943,-28611,15199,3402,-30546,4836,-18870,32715,-15651,6873,-27375,-14567,-12931,17013,-31199,6786,-4160,-9310,-16506,-19658,27244,-32296,11524,-7472,23782,-7002,-15825,-6410,601,-26149,24780,11402,26831,-11469,-17252,-17299,21228,-27602,5962,-8709,31831,-31119,-23847,-7214,-31533,-15541,8115,-16293,-20628,-16300,11534,6562,-2645,13177,3370,29878,5532,-25189,-30291,22230,14685,15074,19372,8936,-2899,17046,8767,20579,-19290,8743,-10356,22579,-15909,1155,-2497,2743,11615,-25665,4908,-7009,14141,-10273,-16403,20100,26174,15701,-14225,17059,-4334,-1020,-8779,11955,-22112,-8265,-18417,-32265,-3974,-17346,-5229,-18927,-27372,23717,-25340,19234,12499,-29025,-18612,-14871,5497,-30472,22711,14014,16220,-28604,25745,6395,-15701,3274,29897,17646,25810,29034,9119,-4767,31419,15659,25733,11626,28546,10968,-29172,-22043,-5019,-16374,3983,29779,27746,10835,-20662,-2324,-11094,-15392,-30947,-23983,32426,31530,4178,-25438,-6199,16316,568,-14385,25091,10179,7868,16217,13566,17577,-5674,30511,-2497,-7399,-14126,-20949,-4708,24450,-28325,2651,-11744,29432,-10551,-1039,-7861,-22408,-8314,26017,-17783,-15117,10421,-21623,12943,21681,-25231,-10452,25803,-13874,-17409,-30919,20764,18394,7579,16733,25382,-10403,-18300,6392,26617,28754,-2201,535,28981,5830,-13444,23969,-15319,-16192,-30680,-12691,-18255,3648,15316,-9566,-15897,15677,-30231,-5329,30449,26416,10160,29274,-2833,30194,21942,-12344,-19811,-7320,-7127,28574,30093,-24688,10292,31515,-11203,-25065,24368,31727,8667,-14726,6466,-26085,31470,-32697,6468,1672,-6300,22639,3655,-6406,18155,-12149,18577,-27983,14874,26278,-22292,-10206,8576,-17954,29525,22977,7498,-16618,23781,-18047,8736,9515,-8642,-29139,-32200,16401,-31285,-9257,-26474,-16814,21585,2069,24331,23445,-12907,13758,-7322,20559,-8201,-18199,2264,-8967,13221,-20207,-5018,-14621,-20285,-23743,-23711,-6995,-17576,11122,8811,22761,-20515,-24228,-24869,-27866,5060,-6439,-16940,11888,30920,21385,13783,23891,27571,-20479,22549,27431,269,-15430,8871,15024,-8736,12453,-26228,-16757,9874,-28049,-1600,-29632,-19963,-16269,-24669,-9619,-14486,22209,23773,-29247,22010,8228,24415,-11239,-622,6837,-17211,-9355,-21651,-420,-26864,9237,-10502,30323,16058,6555,4854,-26620,-30020,21161,16960,15661,-9257,-20131,25682,8789,12344,19256,-88,-18235,-13765,22557,-1187,-25565,-27663,-14362,32667,22403,7122,-29374,-20273,2692,31876,-27844,30084,32610,-24635,6974,7837,29512,6335,-31170,-10365,-21467,-21233,-12374,-27502,-9309,-4985,32315,-31552,-23275,-30942,6190,27704,7861,-3238,-24516,15524,21584,32371,-31998,-24731,4205,-20415,14806,-22400,-18699,-14741,-30508,-14295,-19250,7264,-16123,9599,20627,-15745,-26615,-2455,-24049,-11476,-19339,-13853,-4589,-31159,25041,31900,5087,-21086,15547,5504,-30488,28283,-20963,30482,31590,-5193,13965,-27584,-12971,29246,27556,27107,18128,-9141,-21891,-27455,-23687,-32226,-6333,20623,3521,-8828,19161,-8169,-3505,26252,-20761,-7362,10477,-8158,-7158,-26620,4178,10629,-9240,1013,31771,17779,7412,-20252,11287,31792,8228,32652,-12026,20940,-15635,-1300,-8252,17690,7330,5346,-29830,8181,23470,-1317,-27926,-22115,26238,26444,-18998,19244,-7205,32259,-13207,-13605,-3604,1204,24445,10155,23590,-7649,30357,11102,13282,-21205,-7691,8663,32756,24866,-10211,-6960,32609,31174,-26773,-1029,-18971,-19354,10850,27233,-5816,-19032,-6093,25469,-11415,8359,-28857,-25828,-29999,3407,23202,-6087,-23367,-5299,-11801,26468,-31408,3939,28772,15560,3681,-17808,32553,14844,13509,30929,-11551,-24872,-3094,-13350,-20036,-27887,-1534,-13400,-14803,-11763,-15218,-11312,32367,-1641,20685,30538,-15074,-27633,-28023,7072,-6297,2661,-9728,-5959,-6565,-24132,2125,-9779,-2895,16417,-31330,-11188,14936,14335,-3039,-25422,16108,-5680,-24543,14253,24327,-31253,-12670,-12299,3858,-12578,30546,17494,24078,-28328,-26261,20500,-3537,-15115,2810,27740,2369,8833,-4891,-22502,-15773,-32075,24516,-27403,-10541,5715,-9855,19124,-20437,26192,3643,13105,-21555,22348,6693,-25589,32269,9930,-2502,1852,-19077,-18215,19836,-29065,23372,6947,22553,-28273,1648,-5926,-16316,-16810,23912,19154,2061,-2080,25564,-16680,-1792,5650,5588,-15079,22202,-14957,13108,19679,8528,18961,-22237,-4082,-27252,-4403,17457,23541,-25584,25866,31541,7538,-22719,7451,5329,-25652,-28107,-15848,-8372,16685,28273,12488,31890,14580,-13895,18368,15831,-18349,11329,19132,12466,5413,14984,10424,-17993,-28651,-1824,-14722,-9454,28771,6538,-27519,-20838,-23501,-15594,19708,-15552,-11666,15635,-11074,-2985,-10842,-10366,22259,6460,-30665,-2643,-17462,31499,31104,-7783,-1459,-14440,-12076,7395,29058,27777,-24491,22999,-6767,17841,3875,-17961,-28170,19080,6495,4788,18923,14732,23094,-5331,-14330,-17023,-16574,7158,22382,30959,23716,25540,31510,-18630,-10474,-28056,17889,3752,27371,7310,-3267,-1848,25141,30751,-5410,21019,-879,21344,24907,-9941,-14150,19355,-23315,-19594,12432,30287,-10015,22592,-26791,1349,21562,15665,-26958,189,15244,6533,-28638,16087,10023,2303,6626,-29050,23512,-18101,12994,32362,31897,22892,31593,8444,-10178,11043,-22781,-13910,22019,-10326,-16861,30961,10736,-11991,-23904,22862,-27358,179,-22559,-8791,-14478,4941,29807,12075,-18528,17647,10507,27219,4393,-5209,19158,23955,-21598,-26963,-28662,7271,-14122,31977,11136,-15487,-81,-18782,-26125,11090,8509,-23415,14089,-4973,31855,-2467,13412,-11022,20202,13343,4942,-25095,8993,21706,-17440,-13365,17899,28152,-21923,16426,-27284,15459,2554,-4513,-31634,-10913,-27855,7028,1472,-21699,2015,10668,-21686,32534,13565,-23937,-25428,-4490,14966,-3066,5253,-364,27782,-21610,4935,20564,26185,-6257,-30024,-12327,-22197,-21661,18725,7405,17387,10322,14385,-29466,30052,-12579,-20783,-23059,2913,-3098,24631,9171,-5702,-4433,28192,-12158,-29464,14518,-5744,-12276,-6255,-3020,-19153,28925,12930,-1779,25309,23094,-6816,3074,-21735,-25861,-28405,-7506,20245,5339,-8343,15899,58,-23364,-7338,-15618,-31341,-6760,32660,15929,7160,26417,25235,17998,3459,-32420,9535,-18124,666,7968,-17258,-21715,-15666,2249,12422,25685,-9661,32088,27251,-22290,10401,657,-12491,21998,-9032,-9341,-22803,-27120,28113,11457,-27092,8628,4438,26844,18811,-32585,8885,18957,-7444,-4502,-20781,16390,-26619,-4693,-23015,29199,4400,-26696,13117,-19602,29556,-3655,-2099,9220,-31745,-14379,13088,-10930,26460,-10342,-16896,4541,14649,20977,7123,-15148,-19187,-15920,16650,29274,-9553,4757,16179,4589,-7450,16786,-20801,13339,4723,-28968,-26812,25423,18770,-14376,-31417,-1755,-31628,-6203,22815,-30789,14237,-20796,-3653,-11478,6857,-30481,28780,4803,-23926,25295,-31038,18393,-18161,-6276,21300,-413,-23028,-16947,-22697,19502,-4625,-23209,-28826,-24153,3146,13038,-1905,-27648,-2289,-20048,21397,-6852,8832,27190,28956,-3733,-26250,-24863,-21894,2154,24379,-24876,-11391,7821,-8119,27020,-4322,-19689,-12165,22537,-22692,-28,30295,-13719,10967,29367,-2532,-5404,2034,13403,26057,23342,18301,-6203,-4645,-4247,28797,18339,-4358,10561,-18600,-21234,6881,7585,24867,10511,30787,21680,-19260,13018,13054,-25965,-1637,19879,-18806,-23236,-18883,10340,-11080,-811,-18632,7210,25686,11603,-11048,9764,-32472,-24519,-12028 diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad5.tflite b/tensorflow/lite/micro/integration_tests/seanet/pad/pad5.tflite new file mode 100644 index 0000000..380ce73 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/pad/pad5.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad5_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/pad/pad5_golden_int16.csv new file mode 100644 index 0000000..abdd9de --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/pad/pad5_golden_int16.csv @@ -0,0 +1 @@ +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1701,18054,-7382,11352,-14745,29311,-19245,3338,-27603,20112,14165,-24038,26858,3293,29215,5040,26120,-23557,5970,2871,-2515,-17768,-17314,23618,18345,25178,14967,-28747,-4155,23678,-32163,-12920,27259,-8663,11175,20706,25649,-6366,-6771,8095,-7308,-7099,-8432,-2270,-29047,-27760,-29015,-28685,5004,17247,-31469,15597,-22135,-23394,-24463,7408,6330,22416,-16168,9378,-21545,3111,25688,-22818,-10272,-2906,-8294,-23988,4228,-28348,18385,27746,-28725,-5089,27424,-15136,-3818,26988,19861,1984,-31922,-5324,-17785,25814,6373,23022,-4178,-18020,10787,-3793,13629,-25500,-28188,-11430,20520,-14263,-19718,4137,-715,-17022,-21037,26855,27421,-27168,10224,-15511,2006,-18275,-25387,-29915,14884,-15608,23538,-17940,9226,29522,14625,-1726,29906,5671,-5476,-22822,17680,31250,23154,-1804,4072,-13330,-11485,29897,23680,14450,5938,-5664,-25953,14288,-4495,-17625,-15225,-27918,7590,-1554,23262,25722,-10590,16531,-14075,-32425,3340,-27608,-18124,-18051,8304,-4,-18841,23158,-8299,15888,-16724,-20450,-29191,20919,9575,-2759,-25742,-17614,10328,10113,17994,14663,24854,11119,-29143,25770,9414,-13156,9044,8151,28950,-8654,1801,-18173,-28459,-20759,7381,30592,27433,15729,11583,-13089,2300,18447,-4659,8584,-24734,-10738,31083,-29847,-1495,-31806,8527,-12382,-12846,9057,22381,3424,15808,23014,2372,-32621,26303,-6101,11279,-27800,-18285,28375,-30147,-29168,24574,5492,19932,25755,-5201,-9493,-7537,19536,-10833,18274,20933,-249,20630,-30748,-20394,26699,14271,-26901,3002,16965,14940,-28814,18198,-5545,-28459,-12622,-31886,26654,-30918,29132,-7411,7475,13014,-13427,-5283,-5779,-29276,11118,-8336,9029,20097,17170,9947,545,19514,-29257,22711,-2136,-244,8119,20454,-23715,-20512,-28600,23007,-10121,-16201,-1196,-7226,-31303,-19471,-13425,-116,-30280,-17169,6638,-5566,25929,19590,-9742,14880,21773,30419,10284,-8996,-24750,3115,-23322,-19379,32164,-10988,-5283,6037,30080,-16775,8043,-30502,-23571,-13019,26295,3567,6250,5376,-25101,-20136,-30494,-25119,-20896,17212,-20985,23692,28917,16456,-2751,-9360,9271,-16970,-28798,15445,25064,-18139,-21968,-16562,20418,25119,-4230,-5068,26018,19321,-19413,20297,29557,9353,29544,18354,-18895,-4010,-1214,18837,-10558,-32556,23190,12717,30383,-23996,12576,-32041,-16605,12922,-10542,-21098,815,27208,19842,5538,22093,-25773,28602,-27588,19484,-20266,22855,-13100,-10378,16278,19558,31427,-1739,-15101,-15857,-26462,19162,-15401,-19075,-1067,-22916,-13887,20523,23444,11799,-13143,32525,265,13215,11537,1151,17080,15766,28463,17634,26968,17044,-21689,-31278,30296,11692,-3875,14699,-14901,-940,22098,-21991,23903,7597,27306,-16275,-13520,-8617,-28731,26112,16664,-28290,-19220,-1167,-8207,8295,-18777,-23224,12868,17847,-22545,-16456,9158,4024,-850,-4928,-21606,24565,-3358,-79,-26869,-26337,19340,-15800,-16628,30640,26826,15167,-8540,-19338,-29575,4338,-5342,10501,-4814,17734,-17260,-25683,-5581,20034,-30166,19083,-580,19276,8371,13283,-4107,-14907,2253,-5261,-30519,-4710,-30664,-22362,7461,32423,11739,-27085,-9469,-19386,7988,-11592,-12719,-9319,-18382,-13292,14910,647,-14737,-1554,-3655,30476,-519,-6467,-17418,-15529,-13106,20289,6676,4305,-25143,-28453,-23888,-18898,-8470,-27421,-24349,-27078,-4871,-22089,-31875,13403,-422,-6946,-10571,28168,-4848,16667,-17679,-2997,-15167,-23398,-11899,-912,24864,-20463,14858,4135,-15827,-25286,5910,15273,-1791,-11796,-21627,23370,31874,-16936,15602,-24899,23839,-16469,27353,12021,6184,-6082,7948,31139,28481,-10910,-13004,29985,19071,11327,-23986,-29821,5857,-10394,-31617,7673,14553,-1530,19443,-328,14994,-11730,-2673,-9542,-1137,23688,-21573,14894,-31237,27567,26436,-26941,20979,-20545,-28585,-5683,12694,-15836,10057,19959,-30386,28324,28753,-14491,20004,-30352,26022,-9979,6149,-23149,-4642,408,17151,-29661,-8824,18590,2010,-6816,7211,-1979,16398,-24629,-16553,20060,11498,-28013,19565,1614,31218,-1663,-14854,-28080,-9622,28357,13278,13700,4861,18219,-25788,-32698,-3583,725,23150,9741,30690,31146,-3087,14339,31648,-31102,-22719,22077,53,-7211,20640,-16030,16473,9952,-31846,22713,-13663,21645,-15347,-21740,10153,-9236,-31406,21642,-7432,-24322,-14889,-24676,27490,29718,-1686,30115,18576,746,-23604,16130,12128,-386,-2439,26146,2912,-6533,6990,-16259,24600,-30964,-30051,9242,11261,-32334,18385,-6132,11920,26332,-3930,-7532,12327,12023,-18584,-24625,27103,-13192,-9268,16190,29626,-7592,-7760,15397,-22722,5896,4099,27575,20790,-25536,19143,19525,29784,-2025,-1345,-25835,-29265,12236,13304,-7135,-26943,-9039,-20275,25526,15837,29412,22406,-1123,1756,-28084,-30947,-19331,13932,-24121,2539,24990,21619,28614,4068,-27164,21156,25329,-25475,-9350,-26933,14876,18826,2186,6491,-30559,-13204,32036,26606,1610,-22497,25073,-5783,25531,-28665,18712,24592,-7797,-30716,17910,11136,25448,26403,-31038,7838,-31241,-12232,-29423,26669,-3604,20431,14446,23715,24979,17866,-15102,11084,-19183,20710,31831,25049,-17146,-27470,6052,28688,14877,27670,-31126,-4268,-17993,-10383,4452,-23163,-12151,-16753,14315,-6297,2215,1905,-32322,-18345,21361,26355,1361,8354,-7777,3575,-32562,10663,10274,-4515,28122,-205,13662,-13661,17710,-30350,25092,5928,15293,-23410,-16386,-31217,26658,-26944,-16188,17519,-27130,-30902,2876,-10337,-1272,-24667,25914,-16307,8786,-17251,-12279,-6280,4841,19108,6096,26526,-23607,6576,11548,-20082,31339,25029,7617,30856,-26722,-1648,2896,-19578,19210,32562,27712,-4701,-8950,32438,23722,-24972,2688,-7049,-2878,19925,24403,31223,-24276,-11162,-27338,27981,-31108,-2502,4478,-16991,-25234,26691,14101,-17811,-4489,-5932,26710,-10707,14284,28779,10328,15931,-2665,-21493,-11029,-21093,-17634,25109,23691,28516,-20440,24823,28930,5395,-16324,1790,26314,-35,22142,20941,-10680,18359,21751,31216,23155,-25603,-15244,-31850,26439,31555,1616,3025,-29698,17177,-14155,30844,20024,14102,-32628,26688,-6601,-15098,7622,11048,-11816,-18479,12882,-28690,-2610,-20888,-6821,-7121,1561,9804,28993,-13705,900,6339,9230,13071,8540,-26401,29361,11239,-18921,-21352,-25421,-32314,13764,-17658,-4946,-5743,-12727,3356,-16606,-21024,-24873,30963,10220,1946,9000,-11563,-32654,17961,-8540,-24932,-21706,-4155,-15352,-4917,10949,19209,24537,30627,19767,18108,21891,-28870,14586,-25436,32150,-12186,-27059,-31887,18471,-6263,-5688,-23615,-23281,-5197,-14613,355,-12048,-22160,-26719,3413,-4702,17515,-21931,32400,7821,-19550,-12335,-6892,-20492,-19856,5705,14444,10273,-27814,-9372,-5461,-5773,-12235,-28424,-4484,-5562,14140,27423,-13650,14846,21471,-14109,7570,21089,12110,-24165,-14767,13236,31705,13351,-5313,-21124,32092,18457,26067,-4698,-31496,-24517,29775,-910,7006,31665,5484,-31546,27519,-25572,6493,19329,14364,4664,4144,-1318,1126,-10104,7552,-24708,-1274,-24555,-17997,11179,-9418,-26617,7445,-13510,16682,28989,27501,7874,-24078,-32320,12253,17924,7114,-13834,-30204,29803,-10963,28090,-10887,-7327,-26405,22014,-23446,-14495,25495,214,-1887,15969,23250,-30070,7615,18027,-12723,31882,5613,13313,-5254,18656,29840,-20618,4579,15597,3571,-25281,-26183,-15808,-15638,18594,-4619,18447,443,25981,-25756,5522,-12588,-28737,-23080,29031,-30606,-32453,12055,-16365,-7129,-19470,20245,-29991,-16680,32405,29838,25560,-10512,-23004,-7881,-10245,-28253,27344,-25740,7818,9649,9460,-5991,12826,25164,-11872,-25408,-25477,-11060,19122,-11427,7679,30990,-24245,14346,21170,20614,23187,21922,9812,-9316,-7838,-30737,28312,-16152,-22271,2419,-284,7626,14758,-20144,1418,4583,-30945,-18039,19343,2783,-9849,26364,-11527,-15985,12531,-21816,-29667,26465,27161,16046,-29828,-4340,-22452,30899,-31884,-23669,4433,15169,28289,25081,27561,10444,5982,24054,-23427,16701,-18761,6341,-31508,-885,31167,-4398,-11584,-12676,-5671,9233,-5629,31980,18012,-3662,-25446,22393,-20738,-14383,9866,-9742,3294,7378,-9879,-12917,-31476,-14727,24430,7525,24791,12859,1522,-17584,-18749,28867,-7176,-9317,-8270,5454,-11447,-6841,29206,-9480,-6615,3787,-12964,-19129,-15542,5703,-26793,25122,-10818,23019,-24914,-7982,29814,24877,-27997,-30613,14042,-5320,-17013,6440,24402,-2802,5140,-2799,-22244,31180,-29833,4396,15936,-25832,-8888,10269,30266,-29621,19357,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad5_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/pad/pad5_input0_int16.csv new file mode 100644 index 0000000..944e2d8 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/pad/pad5_input0_int16.csv @@ -0,0 +1 @@ +1701,18054,-7382,11352,-14745,29311,-19245,3338,-27603,20112,14165,-24038,26858,3293,29215,5040,26120,-23557,5970,2871,-2515,-17768,-17314,23618,18345,25178,14967,-28747,-4155,23678,-32163,-12920,27259,-8663,11175,20706,25649,-6366,-6771,8095,-7308,-7099,-8432,-2270,-29047,-27760,-29015,-28685,5004,17247,-31469,15597,-22135,-23394,-24463,7408,6330,22416,-16168,9378,-21545,3111,25688,-22818,-10272,-2906,-8294,-23988,4228,-28348,18385,27746,-28725,-5089,27424,-15136,-3818,26988,19861,1984,-31922,-5324,-17785,25814,6373,23022,-4178,-18020,10787,-3793,13629,-25500,-28188,-11430,20520,-14263,-19718,4137,-715,-17022,-21037,26855,27421,-27168,10224,-15511,2006,-18275,-25387,-29915,14884,-15608,23538,-17940,9226,29522,14625,-1726,29906,5671,-5476,-22822,17680,31250,23154,-1804,4072,-13330,-11485,29897,23680,14450,5938,-5664,-25953,14288,-4495,-17625,-15225,-27918,7590,-1554,23262,25722,-10590,16531,-14075,-32425,3340,-27608,-18124,-18051,8304,-4,-18841,23158,-8299,15888,-16724,-20450,-29191,20919,9575,-2759,-25742,-17614,10328,10113,17994,14663,24854,11119,-29143,25770,9414,-13156,9044,8151,28950,-8654,1801,-18173,-28459,-20759,7381,30592,27433,15729,11583,-13089,2300,18447,-4659,8584,-24734,-10738,31083,-29847,-1495,-31806,8527,-12382,-12846,9057,22381,3424,15808,23014,2372,-32621,26303,-6101,11279,-27800,-18285,28375,-30147,-29168,24574,5492,19932,25755,-5201,-9493,-7537,19536,-10833,18274,20933,-249,20630,-30748,-20394,26699,14271,-26901,3002,16965,14940,-28814,18198,-5545,-28459,-12622,-31886,26654,-30918,29132,-7411,7475,13014,-13427,-5283,-5779,-29276,11118,-8336,9029,20097,17170,9947,545,19514,-29257,22711,-2136,-244,8119,20454,-23715,-20512,-28600,23007,-10121,-16201,-1196,-7226,-31303,-19471,-13425,-116,-30280,-17169,6638,-5566,25929,19590,-9742,14880,21773,30419,10284,-8996,-24750,3115,-23322,-19379,32164,-10988,-5283,6037,30080,-16775,8043,-30502,-23571,-13019,26295,3567,6250,5376,-25101,-20136,-30494,-25119,-20896,17212,-20985,23692,28917,16456,-2751,-9360,9271,-16970,-28798,15445,25064,-18139,-21968,-16562,20418,25119,-4230,-5068,26018,19321,-19413,20297,29557,9353,29544,18354,-18895,-4010,-1214,18837,-10558,-32556,23190,12717,30383,-23996,12576,-32041,-16605,12922,-10542,-21098,815,27208,19842,5538,22093,-25773,28602,-27588,19484,-20266,22855,-13100,-10378,16278,19558,31427,-1739,-15101,-15857,-26462,19162,-15401,-19075,-1067,-22916,-13887,20523,23444,11799,-13143,32525,265,13215,11537,1151,17080,15766,28463,17634,26968,17044,-21689,-31278,30296,11692,-3875,14699,-14901,-940,22098,-21991,23903,7597,27306,-16275,-13520,-8617,-28731,26112,16664,-28290,-19220,-1167,-8207,8295,-18777,-23224,12868,17847,-22545,-16456,9158,4024,-850,-4928,-21606,24565,-3358,-79,-26869,-26337,19340,-15800,-16628,30640,26826,15167,-8540,-19338,-29575,4338,-5342,10501,-4814,17734,-17260,-25683,-5581,20034,-30166,19083,-580,19276,8371,13283,-4107,-14907,2253,-5261,-30519,-4710,-30664,-22362,7461,32423,11739,-27085,-9469,-19386,7988,-11592,-12719,-9319,-18382,-13292,14910,647,-14737,-1554,-3655,30476,-519,-6467,-17418,-15529,-13106,20289,6676,4305,-25143,-28453,-23888,-18898,-8470,-27421,-24349,-27078,-4871,-22089,-31875,13403,-422,-6946,-10571,28168,-4848,16667,-17679,-2997,-15167,-23398,-11899,-912,24864,-20463,14858,4135,-15827,-25286,5910,15273,-1791,-11796,-21627,23370,31874,-16936,15602,-24899,23839,-16469,27353,12021,6184,-6082,7948,31139,28481,-10910,-13004,29985,19071,11327,-23986,-29821,5857,-10394,-31617,7673,14553,-1530,19443,-328,14994,-11730,-2673,-9542,-1137,23688,-21573,14894,-31237,27567,26436,-26941,20979,-20545,-28585,-5683,12694,-15836,10057,19959,-30386,28324,28753,-14491,20004,-30352,26022,-9979,6149,-23149,-4642,408,17151,-29661,-8824,18590,2010,-6816,7211,-1979,16398,-24629,-16553,20060,11498,-28013,19565,1614,31218,-1663,-14854,-28080,-9622,28357,13278,13700,4861,18219,-25788,-32698,-3583,725,23150,9741,30690,31146,-3087,14339,31648,-31102,-22719,22077,53,-7211,20640,-16030,16473,9952,-31846,22713,-13663,21645,-15347,-21740,10153,-9236,-31406,21642,-7432,-24322,-14889,-24676,27490,29718,-1686,30115,18576,746,-23604,16130,12128,-386,-2439,26146,2912,-6533,6990,-16259,24600,-30964,-30051,9242,11261,-32334,18385,-6132,11920,26332,-3930,-7532,12327,12023,-18584,-24625,27103,-13192,-9268,16190,29626,-7592,-7760,15397,-22722,5896,4099,27575,20790,-25536,19143,19525,29784,-2025,-1345,-25835,-29265,12236,13304,-7135,-26943,-9039,-20275,25526,15837,29412,22406,-1123,1756,-28084,-30947,-19331,13932,-24121,2539,24990,21619,28614,4068,-27164,21156,25329,-25475,-9350,-26933,14876,18826,2186,6491,-30559,-13204,32036,26606,1610,-22497,25073,-5783,25531,-28665,18712,24592,-7797,-30716,17910,11136,25448,26403,-31038,7838,-31241,-12232,-29423,26669,-3604,20431,14446,23715,24979,17866,-15102,11084,-19183,20710,31831,25049,-17146,-27470,6052,28688,14877,27670,-31126,-4268,-17993,-10383,4452,-23163,-12151,-16753,14315,-6297,2215,1905,-32322,-18345,21361,26355,1361,8354,-7777,3575,-32562,10663,10274,-4515,28122,-205,13662,-13661,17710,-30350,25092,5928,15293,-23410,-16386,-31217,26658,-26944,-16188,17519,-27130,-30902,2876,-10337,-1272,-24667,25914,-16307,8786,-17251,-12279,-6280,4841,19108,6096,26526,-23607,6576,11548,-20082,31339,25029,7617,30856,-26722,-1648,2896,-19578,19210,32562,27712,-4701,-8950,32438,23722,-24972,2688,-7049,-2878,19925,24403,31223,-24276,-11162,-27338,27981,-31108,-2502,4478,-16991,-25234,26691,14101,-17811,-4489,-5932,26710,-10707,14284,28779,10328,15931,-2665,-21493,-11029,-21093,-17634,25109,23691,28516,-20440,24823,28930,5395,-16324,1790,26314,-35,22142,20941,-10680,18359,21751,31216,23155,-25603,-15244,-31850,26439,31555,1616,3025,-29698,17177,-14155,30844,20024,14102,-32628,26688,-6601,-15098,7622,11048,-11816,-18479,12882,-28690,-2610,-20888,-6821,-7121,1561,9804,28993,-13705,900,6339,9230,13071,8540,-26401,29361,11239,-18921,-21352,-25421,-32314,13764,-17658,-4946,-5743,-12727,3356,-16606,-21024,-24873,30963,10220,1946,9000,-11563,-32654,17961,-8540,-24932,-21706,-4155,-15352,-4917,10949,19209,24537,30627,19767,18108,21891,-28870,14586,-25436,32150,-12186,-27059,-31887,18471,-6263,-5688,-23615,-23281,-5197,-14613,355,-12048,-22160,-26719,3413,-4702,17515,-21931,32400,7821,-19550,-12335,-6892,-20492,-19856,5705,14444,10273,-27814,-9372,-5461,-5773,-12235,-28424,-4484,-5562,14140,27423,-13650,14846,21471,-14109,7570,21089,12110,-24165,-14767,13236,31705,13351,-5313,-21124,32092,18457,26067,-4698,-31496,-24517,29775,-910,7006,31665,5484,-31546,27519,-25572,6493,19329,14364,4664,4144,-1318,1126,-10104,7552,-24708,-1274,-24555,-17997,11179,-9418,-26617,7445,-13510,16682,28989,27501,7874,-24078,-32320,12253,17924,7114,-13834,-30204,29803,-10963,28090,-10887,-7327,-26405,22014,-23446,-14495,25495,214,-1887,15969,23250,-30070,7615,18027,-12723,31882,5613,13313,-5254,18656,29840,-20618,4579,15597,3571,-25281,-26183,-15808,-15638,18594,-4619,18447,443,25981,-25756,5522,-12588,-28737,-23080,29031,-30606,-32453,12055,-16365,-7129,-19470,20245,-29991,-16680,32405,29838,25560,-10512,-23004,-7881,-10245,-28253,27344,-25740,7818,9649,9460,-5991,12826,25164,-11872,-25408,-25477,-11060,19122,-11427,7679,30990,-24245,14346,21170,20614,23187,21922,9812,-9316,-7838,-30737,28312,-16152,-22271,2419,-284,7626,14758,-20144,1418,4583,-30945,-18039,19343,2783,-9849,26364,-11527,-15985,12531,-21816,-29667,26465,27161,16046,-29828,-4340,-22452,30899,-31884,-23669,4433,15169,28289,25081,27561,10444,5982,24054,-23427,16701,-18761,6341,-31508,-885,31167,-4398,-11584,-12676,-5671,9233,-5629,31980,18012,-3662,-25446,22393,-20738,-14383,9866,-9742,3294,7378,-9879,-12917,-31476,-14727,24430,7525,24791,12859,1522,-17584,-18749,28867,-7176,-9317,-8270,5454,-11447,-6841,29206,-9480,-6615,3787,-12964,-19129,-15542,5703,-26793,25122,-10818,23019,-24914,-7982,29814,24877,-27997,-30613,14042,-5320,-17013,6440,24402,-2802,5140,-2799,-22244,31180,-29833,4396,15936,-25832,-8888,10269,30266,-29621,19357 diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad6.tflite b/tensorflow/lite/micro/integration_tests/seanet/pad/pad6.tflite new file mode 100644 index 0000000..a292b54 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/pad/pad6.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad6_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/pad/pad6_golden_int16.csv new file mode 100644 index 0000000..2f8248c --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/pad/pad6_golden_int16.csv @@ -0,0 +1 @@ +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-22907,-22140,11614,18890,4333,-30946,-23444,23127,-22231,-26254,18974,-7150,3759,-5139,437,12080,-1010,7869,-18148,9143,-30802,14385,26668,31802,2871,-24649,19253,-32530,22572,31262,27972,18422,-23816,-15892,-5768,-7061,7929,-31353,-16575,-32489,3794,-5070,32479,13396,14351,-7366,-176,3706,-7469,14824,18877,29673,-22220,18746,-16942,4719,-24573,-26597,12068,-19179,-19906,9945,2955,29236,-25741,1275,-31326,10625,-19463,14200,30896,-31103,-31461,-30047,4360,-22462,10097,25632,-32095,18434,13731,-12395,-20629,27474,319,-13520,-31097,4052,32498,23603,10143,32299,-15503,-21230,11324,-4158,-30425,23674,7979,22850,-12536,23778,-751,6758,-20070,-22042,3699,-22284,1667,-2564,-26512,21939,-15644,-29133,-20931,5820,20755,-10153,77,24738,-30444,-1676,22433,18527,-26072,11841,1344,-26075,-2717,-28788,11210,20004,6006,32687,6506,20218,24058,28246,-12251,16054,-22147,31344,-5087,-18692,30943,28807,-15389,-20510,25457,-25977,-31627,-791,21989,11887,23891,1627,-2489,22802,-2805,15762,3262,25842,11633,22957,-10552,-26700,28212,12256,-20831,5590,18472,-23233,32424,26408,13110,4081,-27340,-13116,-18670,-7353,-24153,11388,-22571,8592,-7567,-13940,-13478,8293,-8019,26773,32074,17293,8218,2155,-9074,-21423,22606,-23688,-27813,14799,-3331,13919,-20484,5491,-18433,-23909,29888,27269,-5889,-17341,271,-1730,4495,5968,2119,4340,5972,-5163,8696,-6210,7642,-138,2565,-25212,31732,9234,4076,3881,-18174,25780,-11972,-22766,-19200,7659,10955,22825,17269,2506,26022,8532,27623,1229,2602,-21591,30457,-29237,20801,-23118,-13572,31009,-30642,-25020,17329,9498,19127,24989,-7055,-12570,-11616,-21926,-26857,-24618,27113,7215,-16634,-16034,-31336,21307,4169,5341,-29564,-3865,-23380,19340,26566,-16188,14948,-26834,-25709,-26791,-21042,-12692,-6723,9377,-3238,-26634,-25198,-16405,-6703,-21605,15890,13102,-14740,20714,-7190,26891,-2077,22938,-14066,-18987,4714,413,-25613,-16133,-6039,25455,32071,-16775,-3683,25099,30108,-3067,15738,-12577,-15470,29884,-13671,-3791,-30590,14244,24798,-7875,-2811,17418,10931,13823,-31766,-29265,-6058,-7520,26500,-3357,-7673,20625,-30557,-3401,9161,-26570,-20591,2809,15724,13472,7373,17310,-29086,-18072,20266,-19428,19993,26238,3117,12287,21489,8905,975,-17115,8120,-7167,-31341,-51,-10055,-14145,26574,-2739,-24458,29630,22129,29945,182,-30663,-981,-22801,-3300,20100,-20194,31350,28275,-24895,-6187,-17204,17533,1583,4634,-8882,-6712,2695,-27356,19125,-18628,26319,-16408,-20711,13659,-11262,-14688,-3508,-24485,-13782,18910,-11164,-21378,-30670,-5264,25202,-13118,16593,6880,1697,16564,-5708,23264,5977,32712,-29270,-8774,12340,-8918,-22717,15741,30431,-21228,-15907,-10782,-11879,-19330,-4448,-6782,-6204,30114,-24262,-30130,-3544,-27354,-2130,-14281,14326,365,-11045,15218,16903,14044,-25358,10123,2164,32705,5749,20597,-6036,-12076,4885,16221,23585,-17703,13821,28386,14283,-19851,-21559,4236,9976,27806,-28256,23401,3120,-28157,-16232,18882,6956,4055,9784,26551,-30042,-3227,1630,-16376,-13223,1633,-4396,25468,21492,27942,-24277,32322,-26044,-14730,17111,-21515,27114,-19256,14791,-7296,-12442,843,19933,-19106,-31239,13834,-16396,-17130,20439,-12272,-24429,16677,5513,-26281,-8023,-2952,-28698,9110,-25322,12079,-32157,-23549,-22115,15713,-28836,-24999,-26721,-4156,25692,25621,-13303,19970,1940,4215,14638,6277,-11193,23404,19683,-28616,-29854,19910,-25374,21919,29958,-22309,20116,-5188,20692,-19331,12579,-9253,-1826,-1051,-28709,-15664,-7461,13313,-29191,15887,-15416,17974,-10812,19535,-11983,-26223,7808,-15362,19204,-22277,-14792,-24297,-14633,12581,6854,-124,6370,11737,25300,-26920,19420,3438,6039,30141,29451,4007,8260,4205,-18769,11204,-6748,24177,31902,20007,32219,-30113,-17020,-26207,23741,-8181,6985,-3466,-23319,-16956,-18608,27889,5272,13839,-641,-5231,-21875,4160,10162,-32547,23691,19659,-16231,-23911,-31625,-2293,434,13674,22935,12854,-29133,2161,24353,-13084,19302,-47,30365,5792,32727,14302,-22352,-27518,-31043,-19815,19092,8907,-22276,27989,22951,-4959,-29981,-22004,-31166,-29308,8813,3819,9637,-13226,-19747,32540,-15462,3993,-6813,-32124,9323,15983,7404,-7246,20201,13125,3743,-31443,19564,-15932,-31968,-18820,-15189,15496,24131,-29724,3576,18065,15266,-15678,15192,18864,20374,-4928,-24894,-4767,-26504,-6163,19709,-28196,-5676,-5084,-19483,5513,-23208,-23282,-19387,8862,21911,2364,27370,-31928,-17661,-17471,20714,2198,-30416,-10761,4335,24959,-6764,-24440,808,-999,-10778,13052,-20118,-9879,22550,-8895,-27750,20136,-22970,8168,15222,17726,2005,19186,18307,79,-5460,16621,26492,-17855,14335,59,-24866,28277,-19951,-5021,1496,13794,-16740,21556,10057,5301,21938,851,-3319,23516,-26357,-13665,-20754,22705,-17782,2467,4832,13609,-26685,-4915,29635,30129,-15449,15508,10224,-8376,-6403,22516,15481,-14744,-15531,-8010,-11349,-17316,-22427,23361,11998,-13623,-25924,-15503,22380,-29307,-7148,18195,-25587,5479,8200,-28251,-14740,18203,-7435,2980,24145,-6041,7530,-22271,-7886,10734,20428,-2674,18440,25157,-9795,-20999,11660,7589,14135,23476,19810,25284,20084,30391,-9014,-13094,-10327,26726,-21444,-31915,27251,388,22855,1990,-26587,-7267,-27266,-21798,18363,-13659,-16510,21989,25622,17419,14891,8129,22606,24014,16850,-9038,-12264,28680,31858,-5284,-8691,19212,20779,21698,1305,-5159,-6000,-23593,19155,28029,6194,19988,13259,1611,15631,-22327,-18794,17542,-5526,-24937,-22690,8752,26982,-2207,-21499,-6116,-12513,27232,272,7266,658,24547,-20878,26168,12988,4279,28651,10577,-31601,29308,-7999,-3613,-7104,-15801,-26054,-21804,-12306,-31378,7868,26049,28835,20173,10592,23846,8644,2530,8740,-10745,31960,7650,-29096,-27440,23074,-8658,23079,10658,15755,-25128,13377,-23720,1251,-11274,23728,-10826,297,-22379,6781,9396,-4016,20478,29474,-6739,4374,29604,-31963,-26782,-29599,-8523,-12191,-2555,24767,29228,1371,-30418,-14345,5901,-13189,9248,-4785,-14503,-25583,-28421,12652,-24138,31939,15452,14829,2599,-660,-24070,31535,-12005,-14316,20561,4877,28721,-31050,-27252,-29315,-15597,31075,-4339,-28851,18124,11494,11632,-31511,-19845,-20060,17229,-26112,-31875,-11547,11301,-20335,2960,5813,20045,102,-24481,-11433,1250,1280,-27863,20528,-6951,-3623,22195,-7691,12303,23931,-25904,-27666,-11653,-16452,-31920,27454,232,-28367,6538,-9274,28529,11697,-22242,9686,-1350,21982,15570,26338,-17393,-32714,31131,-2844,-23026,19490,-25660,-3444,2457,27178,-11960,-16290,25954,11427,-12808,25960,-8884,-25443,-23899,-25873,-24643,-1905,-17430,-29566,-20825,-26111,1276,-27831,-26537,-13873,-16080,904,-27344,-17390,23701,-2982,13334,5126,-15252,26985,-27829,-584,-29380,9170,-83,31318,3398,-7573,2411,-12413,-30245,22868,5046,20188,8768,-27975,-25432,-32298,-25662,-26470,-7119,13194,-20799,-2837,-26398,20112,-12954,-10472,1518,-22076,-6547,17961,32274,26315,13426,11955,-2079,-19649,5227,-6518,24894,31081,17856,-16763,-13875,14053,7969,-7894,17559,-14564,-26270,-14003,16024,22107,7470,-14227,-22370,-14065,9959,-5242,25024,-24143,2990,-27445,682,16470,31680,-6369,-8874,-824,-5627,14469,-9353,23925,-20290,29538,26046,-23045,-12192,-32649,10210,288,5309,-29471,-10282,11332,-16858,-26926,-12334,-21722,28177,24159,-2191,-25354,-15247,22262,12690,10065,25621,-20374,5958,-23803,-15068,-10762,-17852,-21360,22971,12645,2822,-14646,-16451,12335,-629,19075,-5923,32579,-30308,-27345,6094,10296,10763,-2722,26020,21800,-22115,-6528,9275,18895,-24573,-26112,-17493,4669,32652,8525,20304,-12466,-10683,30940,3070,-30514,-29194,206,10749,16303,-28160,3007,32749,-22582,11296,1902,5712,26416,32721,20655,-28967,-335,13316,18394,12226,27368,-20049,2074,16777,-2417,25551,-2639,-25738,12097,-26927,-20196,-26211,-3783,1572,14459,24208,8252,6792,-31214,-25130,19007,31484,-3122,-25306,-11604,13941,-14884,-2847,6234,-20468,12049,3066,21453,-19680,17448,-27945,-14825,-28412,-13600,5374,-30062,626,-17622,-14093,-31271,-5204,10048,-27849,27517,16032,-24199,17400,11269,1527,-11412,-10008,-25733,-16088,21588,26390,-18320,-8512,-17963,9379,3772,-9757,-19886,1812,-2532,-29966,19294,7499,11151,26883,1938,1003,-13316,16582,-21469,-19002,-11565,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad6_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/pad/pad6_input0_int16.csv new file mode 100644 index 0000000..8b9094c --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/pad/pad6_input0_int16.csv @@ -0,0 +1 @@ +-22907,-22140,11614,18890,4333,-30946,-23444,23127,-22231,-26254,18974,-7150,3759,-5139,437,12080,-1010,7869,-18148,9143,-30802,14385,26668,31802,2871,-24649,19253,-32530,22572,31262,27972,18422,-23816,-15892,-5768,-7061,7929,-31353,-16575,-32489,3794,-5070,32479,13396,14351,-7366,-176,3706,-7469,14824,18877,29673,-22220,18746,-16942,4719,-24573,-26597,12068,-19179,-19906,9945,2955,29236,-25741,1275,-31326,10625,-19463,14200,30896,-31103,-31461,-30047,4360,-22462,10097,25632,-32095,18434,13731,-12395,-20629,27474,319,-13520,-31097,4052,32498,23603,10143,32299,-15503,-21230,11324,-4158,-30425,23674,7979,22850,-12536,23778,-751,6758,-20070,-22042,3699,-22284,1667,-2564,-26512,21939,-15644,-29133,-20931,5820,20755,-10153,77,24738,-30444,-1676,22433,18527,-26072,11841,1344,-26075,-2717,-28788,11210,20004,6006,32687,6506,20218,24058,28246,-12251,16054,-22147,31344,-5087,-18692,30943,28807,-15389,-20510,25457,-25977,-31627,-791,21989,11887,23891,1627,-2489,22802,-2805,15762,3262,25842,11633,22957,-10552,-26700,28212,12256,-20831,5590,18472,-23233,32424,26408,13110,4081,-27340,-13116,-18670,-7353,-24153,11388,-22571,8592,-7567,-13940,-13478,8293,-8019,26773,32074,17293,8218,2155,-9074,-21423,22606,-23688,-27813,14799,-3331,13919,-20484,5491,-18433,-23909,29888,27269,-5889,-17341,271,-1730,4495,5968,2119,4340,5972,-5163,8696,-6210,7642,-138,2565,-25212,31732,9234,4076,3881,-18174,25780,-11972,-22766,-19200,7659,10955,22825,17269,2506,26022,8532,27623,1229,2602,-21591,30457,-29237,20801,-23118,-13572,31009,-30642,-25020,17329,9498,19127,24989,-7055,-12570,-11616,-21926,-26857,-24618,27113,7215,-16634,-16034,-31336,21307,4169,5341,-29564,-3865,-23380,19340,26566,-16188,14948,-26834,-25709,-26791,-21042,-12692,-6723,9377,-3238,-26634,-25198,-16405,-6703,-21605,15890,13102,-14740,20714,-7190,26891,-2077,22938,-14066,-18987,4714,413,-25613,-16133,-6039,25455,32071,-16775,-3683,25099,30108,-3067,15738,-12577,-15470,29884,-13671,-3791,-30590,14244,24798,-7875,-2811,17418,10931,13823,-31766,-29265,-6058,-7520,26500,-3357,-7673,20625,-30557,-3401,9161,-26570,-20591,2809,15724,13472,7373,17310,-29086,-18072,20266,-19428,19993,26238,3117,12287,21489,8905,975,-17115,8120,-7167,-31341,-51,-10055,-14145,26574,-2739,-24458,29630,22129,29945,182,-30663,-981,-22801,-3300,20100,-20194,31350,28275,-24895,-6187,-17204,17533,1583,4634,-8882,-6712,2695,-27356,19125,-18628,26319,-16408,-20711,13659,-11262,-14688,-3508,-24485,-13782,18910,-11164,-21378,-30670,-5264,25202,-13118,16593,6880,1697,16564,-5708,23264,5977,32712,-29270,-8774,12340,-8918,-22717,15741,30431,-21228,-15907,-10782,-11879,-19330,-4448,-6782,-6204,30114,-24262,-30130,-3544,-27354,-2130,-14281,14326,365,-11045,15218,16903,14044,-25358,10123,2164,32705,5749,20597,-6036,-12076,4885,16221,23585,-17703,13821,28386,14283,-19851,-21559,4236,9976,27806,-28256,23401,3120,-28157,-16232,18882,6956,4055,9784,26551,-30042,-3227,1630,-16376,-13223,1633,-4396,25468,21492,27942,-24277,32322,-26044,-14730,17111,-21515,27114,-19256,14791,-7296,-12442,843,19933,-19106,-31239,13834,-16396,-17130,20439,-12272,-24429,16677,5513,-26281,-8023,-2952,-28698,9110,-25322,12079,-32157,-23549,-22115,15713,-28836,-24999,-26721,-4156,25692,25621,-13303,19970,1940,4215,14638,6277,-11193,23404,19683,-28616,-29854,19910,-25374,21919,29958,-22309,20116,-5188,20692,-19331,12579,-9253,-1826,-1051,-28709,-15664,-7461,13313,-29191,15887,-15416,17974,-10812,19535,-11983,-26223,7808,-15362,19204,-22277,-14792,-24297,-14633,12581,6854,-124,6370,11737,25300,-26920,19420,3438,6039,30141,29451,4007,8260,4205,-18769,11204,-6748,24177,31902,20007,32219,-30113,-17020,-26207,23741,-8181,6985,-3466,-23319,-16956,-18608,27889,5272,13839,-641,-5231,-21875,4160,10162,-32547,23691,19659,-16231,-23911,-31625,-2293,434,13674,22935,12854,-29133,2161,24353,-13084,19302,-47,30365,5792,32727,14302,-22352,-27518,-31043,-19815,19092,8907,-22276,27989,22951,-4959,-29981,-22004,-31166,-29308,8813,3819,9637,-13226,-19747,32540,-15462,3993,-6813,-32124,9323,15983,7404,-7246,20201,13125,3743,-31443,19564,-15932,-31968,-18820,-15189,15496,24131,-29724,3576,18065,15266,-15678,15192,18864,20374,-4928,-24894,-4767,-26504,-6163,19709,-28196,-5676,-5084,-19483,5513,-23208,-23282,-19387,8862,21911,2364,27370,-31928,-17661,-17471,20714,2198,-30416,-10761,4335,24959,-6764,-24440,808,-999,-10778,13052,-20118,-9879,22550,-8895,-27750,20136,-22970,8168,15222,17726,2005,19186,18307,79,-5460,16621,26492,-17855,14335,59,-24866,28277,-19951,-5021,1496,13794,-16740,21556,10057,5301,21938,851,-3319,23516,-26357,-13665,-20754,22705,-17782,2467,4832,13609,-26685,-4915,29635,30129,-15449,15508,10224,-8376,-6403,22516,15481,-14744,-15531,-8010,-11349,-17316,-22427,23361,11998,-13623,-25924,-15503,22380,-29307,-7148,18195,-25587,5479,8200,-28251,-14740,18203,-7435,2980,24145,-6041,7530,-22271,-7886,10734,20428,-2674,18440,25157,-9795,-20999,11660,7589,14135,23476,19810,25284,20084,30391,-9014,-13094,-10327,26726,-21444,-31915,27251,388,22855,1990,-26587,-7267,-27266,-21798,18363,-13659,-16510,21989,25622,17419,14891,8129,22606,24014,16850,-9038,-12264,28680,31858,-5284,-8691,19212,20779,21698,1305,-5159,-6000,-23593,19155,28029,6194,19988,13259,1611,15631,-22327,-18794,17542,-5526,-24937,-22690,8752,26982,-2207,-21499,-6116,-12513,27232,272,7266,658,24547,-20878,26168,12988,4279,28651,10577,-31601,29308,-7999,-3613,-7104,-15801,-26054,-21804,-12306,-31378,7868,26049,28835,20173,10592,23846,8644,2530,8740,-10745,31960,7650,-29096,-27440,23074,-8658,23079,10658,15755,-25128,13377,-23720,1251,-11274,23728,-10826,297,-22379,6781,9396,-4016,20478,29474,-6739,4374,29604,-31963,-26782,-29599,-8523,-12191,-2555,24767,29228,1371,-30418,-14345,5901,-13189,9248,-4785,-14503,-25583,-28421,12652,-24138,31939,15452,14829,2599,-660,-24070,31535,-12005,-14316,20561,4877,28721,-31050,-27252,-29315,-15597,31075,-4339,-28851,18124,11494,11632,-31511,-19845,-20060,17229,-26112,-31875,-11547,11301,-20335,2960,5813,20045,102,-24481,-11433,1250,1280,-27863,20528,-6951,-3623,22195,-7691,12303,23931,-25904,-27666,-11653,-16452,-31920,27454,232,-28367,6538,-9274,28529,11697,-22242,9686,-1350,21982,15570,26338,-17393,-32714,31131,-2844,-23026,19490,-25660,-3444,2457,27178,-11960,-16290,25954,11427,-12808,25960,-8884,-25443,-23899,-25873,-24643,-1905,-17430,-29566,-20825,-26111,1276,-27831,-26537,-13873,-16080,904,-27344,-17390,23701,-2982,13334,5126,-15252,26985,-27829,-584,-29380,9170,-83,31318,3398,-7573,2411,-12413,-30245,22868,5046,20188,8768,-27975,-25432,-32298,-25662,-26470,-7119,13194,-20799,-2837,-26398,20112,-12954,-10472,1518,-22076,-6547,17961,32274,26315,13426,11955,-2079,-19649,5227,-6518,24894,31081,17856,-16763,-13875,14053,7969,-7894,17559,-14564,-26270,-14003,16024,22107,7470,-14227,-22370,-14065,9959,-5242,25024,-24143,2990,-27445,682,16470,31680,-6369,-8874,-824,-5627,14469,-9353,23925,-20290,29538,26046,-23045,-12192,-32649,10210,288,5309,-29471,-10282,11332,-16858,-26926,-12334,-21722,28177,24159,-2191,-25354,-15247,22262,12690,10065,25621,-20374,5958,-23803,-15068,-10762,-17852,-21360,22971,12645,2822,-14646,-16451,12335,-629,19075,-5923,32579,-30308,-27345,6094,10296,10763,-2722,26020,21800,-22115,-6528,9275,18895,-24573,-26112,-17493,4669,32652,8525,20304,-12466,-10683,30940,3070,-30514,-29194,206,10749,16303,-28160,3007,32749,-22582,11296,1902,5712,26416,32721,20655,-28967,-335,13316,18394,12226,27368,-20049,2074,16777,-2417,25551,-2639,-25738,12097,-26927,-20196,-26211,-3783,1572,14459,24208,8252,6792,-31214,-25130,19007,31484,-3122,-25306,-11604,13941,-14884,-2847,6234,-20468,12049,3066,21453,-19680,17448,-27945,-14825,-28412,-13600,5374,-30062,626,-17622,-14093,-31271,-5204,10048,-27849,27517,16032,-24199,17400,11269,1527,-11412,-10008,-25733,-16088,21588,26390,-18320,-8512,-17963,9379,3772,-9757,-19886,1812,-2532,-29966,19294,7499,11151,26883,1938,1003,-13316,16582,-21469,-19002,-11565 diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad7.tflite b/tensorflow/lite/micro/integration_tests/seanet/pad/pad7.tflite new file mode 100644 index 0000000..db88220 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/pad/pad7.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad7_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/pad/pad7_golden_int16.csv new file mode 100644 index 0000000..85bf507 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/pad/pad7_golden_int16.csv @@ -0,0 +1 @@ +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-21548,-9630,-14575,3879,-6992,-12654,-21378,25720,17853,9587,-23748,-13435,20521,-20590,28762,-11012,-3312,-5546,-24692,19395,-23574,15532,15503,-27039,-29851,115,2511,9391,-9730,32374,-21969,9070,-23187,-24414,-3226,28906,12826,-20845,-23442,1920,12314,-13460,29988,14505,19526,22632,2792,739,27431,30433,-26342,21554,11538,-11635,-5111,12925,-26449,-10814,21949,9922,14104,29268,541,-19228,-5788,-22384,-25167,-16443,30624,-25047,20629,-12910,14229,-31027,19632,11677,-13695,8817,8738,15784,-6378,-24235,24365,-15587,9972,17088,-20767,-11340,-9721,22727,23993,29821,29269,9587,1042,-28474,9367,-21453,-25766,-24776,-23247,-12282,25255,-9075,-19293,11496,15587,-25877,-705,23512,9580,30708,-6837,5836,-2483,27511,-2277,-2900,-18852,453,-25924,-28664,20913,-13397,-14490,15209,-15770,-11292,14017,-25151,-30214,16557,-26079,24405,-16036,1227,-1473,1796,5615,-17539,-12193,-26083,28881,29469,21292,542,-15600,-5473,6692,3905,-14410,-29117,-25716,-12145,-20335,27155,-8291,27053,-8912,-11338,30158,-29582,27980,5761,-14649,-861,-23173,-9908,-8990,4674,15778,-6643,-5766,19842,-26720,-11407,-30502,6726,-8364,4862,-2242,27536,-32015,32202,-30775,32118,16177,-14623,12446,-17634,17657,-1393,-19353,-5489,29060,-19949,-21283,-28243,-7112,-19537,3957,-12216,-27755,-23605,-12234,-20620,-26322,-26396,30184,-15095,31282,-28252,-28508,12051,-7344,13722,-2558,-30825,31148,-27237,10032,11062,26617,23128,-29570,-13906,9153,19655,-4860,-12180,4258,27512,29757,5066,-31134,-3298,-12666,13482,29326,22936,1150,16048,-3070,25359,-31156,27137,-27682,11637,26831,-31413,15132,8098,32686,22632,-24345,5987,2346,-1634,1312,-7462,-4228,13069,9866,-13984,12852,8163,7949,25651,-13763,-14661,13651,-21156,4655,-9935,-28417,24691,11275,-25791,-8779,9394,-20504,10977,28245,-14227,26110,-1128,-10445,-9886,29858,14649,-1471,4509,25663,20744,-10882,-30326,-32558,7975,-32533,10963,15175,-21176,14526,-21445,22243,21999,-25779,18575,23043,76,-14962,-28042,-3034,-31526,4024,-22029,24263,10205,-3638,-7671,-16952,16292,9841,-25791,5110,-10813,30060,27187,10429,-24838,17120,-14903,24130,19120,14713,22721,-32422,-16909,-29602,-14857,17742,-12258,11180,9274,28215,9895,4655,-248,30897,-9383,-9811,32136,-12764,-11852,1872,12362,-25509,-22678,9854,7381,-21632,-23491,15844,29919,25833,21337,-28685,-5145,-759,29225,16848,-13758,24788,13573,15390,-28855,-13532,5204,9819,-18266,-15436,-26586,12863,31057,25879,-16250,11798,21584,17927,4174,18666,17973,31146,-31476,16527,1496,29923,-14058,-8032,-24777,-29632,-3255,17285,6784,28510,-3937,-18326,-5439,-21179,-23184,31145,-30483,17944,13271,25589,4403,4304,8446,1692,6863,-11641,-20805,-23552,-16906,26725,-15849,32361,6907,-18090,2120,10881,3742,31871,-30012,18122,4458,7239,12562,-13654,-22383,8361,-29198,27691,27316,-13667,-28686,-5380,-22858,19485,22144,15663,-24537,4968,14672,7451,1327,-11344,27664,-6670,-13626,-7233,17947,19179,7366,1997,9860,6696,22235,24188,14030,-11310,-13440,8432,3351,-3312,26083,-16981,-1377,-1142,-20959,-19962,21953,5679,-16306,17470,12459,-14382,1620,4063,28644,-20572,-15052,-12443,29091,-9919,-5462,18198,13728,-5384,-2999,21590,32679,-11531,-31629,-16054,-29854,-25973,-30333,-31026,-2373,-1920,25470,-12381,-25484,-29305,469,24795,5895,-16025,26899,24204,-32713,17647,-24969,-5929,11458,18415,-16178,-2361,1798,-4234,3719,-31065,-543,11292,-27007,19349,32418,7455,-21922,1860,4318,-1277,9656,-5573,22521,6645,-13400,13013,-8332,-29075,13434,-9500,-23105,3775,-23261,10819,30346,26012,-9585,-1298,-22568,7996,16988,-26814,-16915,13268,-17194,-1719,-32707,1740,11020,-24301,-11003,28830,-1142,-14550,-29396,-28735,-12852,-32026,21942,23510,13000,31429,-28658,11934,32582,-18419,13056,-7803,-8182,9606,-24411,-10990,-17582,-10531,7139,7813,-7195,-30643,-8193,-17823,6216,6819,10024,-5695,-32358,-11688,21714,-6880,-20818,-7182,4376,28893,3111,18672,-6440,-16970,-7088,-22461,31564,15947,-26706,16646,-28028,8183,-28902,333,-28938,20827,-5549,-27531,8073,-3816,-16605,-2238,-30034,-2612,4993,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad7_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/pad/pad7_input0_int16.csv new file mode 100644 index 0000000..e0d9bbc --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/pad/pad7_input0_int16.csv @@ -0,0 +1 @@ +-21548,-9630,-14575,3879,-6992,-12654,-21378,25720,17853,9587,-23748,-13435,20521,-20590,28762,-11012,-3312,-5546,-24692,19395,-23574,15532,15503,-27039,-29851,115,2511,9391,-9730,32374,-21969,9070,-23187,-24414,-3226,28906,12826,-20845,-23442,1920,12314,-13460,29988,14505,19526,22632,2792,739,27431,30433,-26342,21554,11538,-11635,-5111,12925,-26449,-10814,21949,9922,14104,29268,541,-19228,-5788,-22384,-25167,-16443,30624,-25047,20629,-12910,14229,-31027,19632,11677,-13695,8817,8738,15784,-6378,-24235,24365,-15587,9972,17088,-20767,-11340,-9721,22727,23993,29821,29269,9587,1042,-28474,9367,-21453,-25766,-24776,-23247,-12282,25255,-9075,-19293,11496,15587,-25877,-705,23512,9580,30708,-6837,5836,-2483,27511,-2277,-2900,-18852,453,-25924,-28664,20913,-13397,-14490,15209,-15770,-11292,14017,-25151,-30214,16557,-26079,24405,-16036,1227,-1473,1796,5615,-17539,-12193,-26083,28881,29469,21292,542,-15600,-5473,6692,3905,-14410,-29117,-25716,-12145,-20335,27155,-8291,27053,-8912,-11338,30158,-29582,27980,5761,-14649,-861,-23173,-9908,-8990,4674,15778,-6643,-5766,19842,-26720,-11407,-30502,6726,-8364,4862,-2242,27536,-32015,32202,-30775,32118,16177,-14623,12446,-17634,17657,-1393,-19353,-5489,29060,-19949,-21283,-28243,-7112,-19537,3957,-12216,-27755,-23605,-12234,-20620,-26322,-26396,30184,-15095,31282,-28252,-28508,12051,-7344,13722,-2558,-30825,31148,-27237,10032,11062,26617,23128,-29570,-13906,9153,19655,-4860,-12180,4258,27512,29757,5066,-31134,-3298,-12666,13482,29326,22936,1150,16048,-3070,25359,-31156,27137,-27682,11637,26831,-31413,15132,8098,32686,22632,-24345,5987,2346,-1634,1312,-7462,-4228,13069,9866,-13984,12852,8163,7949,25651,-13763,-14661,13651,-21156,4655,-9935,-28417,24691,11275,-25791,-8779,9394,-20504,10977,28245,-14227,26110,-1128,-10445,-9886,29858,14649,-1471,4509,25663,20744,-10882,-30326,-32558,7975,-32533,10963,15175,-21176,14526,-21445,22243,21999,-25779,18575,23043,76,-14962,-28042,-3034,-31526,4024,-22029,24263,10205,-3638,-7671,-16952,16292,9841,-25791,5110,-10813,30060,27187,10429,-24838,17120,-14903,24130,19120,14713,22721,-32422,-16909,-29602,-14857,17742,-12258,11180,9274,28215,9895,4655,-248,30897,-9383,-9811,32136,-12764,-11852,1872,12362,-25509,-22678,9854,7381,-21632,-23491,15844,29919,25833,21337,-28685,-5145,-759,29225,16848,-13758,24788,13573,15390,-28855,-13532,5204,9819,-18266,-15436,-26586,12863,31057,25879,-16250,11798,21584,17927,4174,18666,17973,31146,-31476,16527,1496,29923,-14058,-8032,-24777,-29632,-3255,17285,6784,28510,-3937,-18326,-5439,-21179,-23184,31145,-30483,17944,13271,25589,4403,4304,8446,1692,6863,-11641,-20805,-23552,-16906,26725,-15849,32361,6907,-18090,2120,10881,3742,31871,-30012,18122,4458,7239,12562,-13654,-22383,8361,-29198,27691,27316,-13667,-28686,-5380,-22858,19485,22144,15663,-24537,4968,14672,7451,1327,-11344,27664,-6670,-13626,-7233,17947,19179,7366,1997,9860,6696,22235,24188,14030,-11310,-13440,8432,3351,-3312,26083,-16981,-1377,-1142,-20959,-19962,21953,5679,-16306,17470,12459,-14382,1620,4063,28644,-20572,-15052,-12443,29091,-9919,-5462,18198,13728,-5384,-2999,21590,32679,-11531,-31629,-16054,-29854,-25973,-30333,-31026,-2373,-1920,25470,-12381,-25484,-29305,469,24795,5895,-16025,26899,24204,-32713,17647,-24969,-5929,11458,18415,-16178,-2361,1798,-4234,3719,-31065,-543,11292,-27007,19349,32418,7455,-21922,1860,4318,-1277,9656,-5573,22521,6645,-13400,13013,-8332,-29075,13434,-9500,-23105,3775,-23261,10819,30346,26012,-9585,-1298,-22568,7996,16988,-26814,-16915,13268,-17194,-1719,-32707,1740,11020,-24301,-11003,28830,-1142,-14550,-29396,-28735,-12852,-32026,21942,23510,13000,31429,-28658,11934,32582,-18419,13056,-7803,-8182,9606,-24411,-10990,-17582,-10531,7139,7813,-7195,-30643,-8193,-17823,6216,6819,10024,-5695,-32358,-11688,21714,-6880,-20818,-7182,4376,28893,3111,18672,-6440,-16970,-7088,-22461,31564,15947,-26706,16646,-28028,8183,-28902,333,-28938,20827,-5549,-27531,8073,-3816,-16605,-2238,-30034,-2612,4993 diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad8.tflite b/tensorflow/lite/micro/integration_tests/seanet/pad/pad8.tflite new file mode 100644 index 0000000..c25392c Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/pad/pad8.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad8_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/pad/pad8_golden_int16.csv new file mode 100644 index 0000000..9eef2cc --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/pad/pad8_golden_int16.csv @@ -0,0 +1 @@ +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-18626,-28155,-28367,17924,3487,-5817,-6060,23519,15635,-4368,-14431,-20921,26372,-7256,351,2540,20255,19184,-25937,-5657,-398,-15838,-24007,-16937,-7236,16527,-19470,-7985,-21254,-17933,20454,-4498,-9454,-3940,-32190,32663,21702,-22057,-28379,27560,15974,-6678,-18322,13508,32515,1077,25556,-29686,-29676,-6595,7516,-17935,-16252,-23392,-6204,-28993,14450,-11544,28793,-20443,-20440,2827,29022,-25919,-18968,-12183,5145,9770,26197,-9142,12303,8571,5664,17851,-29335,-23299,23055,-12072,-29788,29018,29351,-6341,13394,17478,-23468,-23661,-23687,10837,9040,22872,23011,15529,2856,-24246,24383,-26699,-11320,-26824,-24414,-5108,-21931,-24689,-2347,-18512,27187,-17628,25862,24130,-9433,5912,-19792,7807,-23395,-27738,20722,-6272,6805,-17556,10690,23991,21428,-3689,-12575,-1113,6496,15221,10937,12924,-30298,-19376,-140,14474,-2662,-1081,18453,-30142,28434,5952,-14180,32654,5878,-26271,17189,4274,-19136,21517,-7512,-12094,10927,30688,23371,-16690,10972,20901,2456,20977,14036,7331,350,31136,27418,14544,19492,12210,25649,-5197,8203,-4630,-18066,-8328,-21296,25272,8471,-12948,-26697,-18909,24679,-16335,17615,-7255,-26776,-298,-29497,-7488,-23698,2705,9119,31956,-11363,17923,-2110,18432,-14473,-12085,-24040,-5363,-29438,21883,-23600,15515,-10507,-4169,14069,32725,-25013,5056,-21680,-12606,-18974,26603,-18279,8163,-26934,10796,23740,9791,-15229,24791,-9718,-14611,-19530,-23330,7098,-10615,-27139,760,4529,-23109,18072,13742,-40,-26748,16571,20597,8470,-20286,16018,-7177,-9557,6175,-10785,-3496,11171,-22249,-20323,12647,21974,9167,1196,-29143,6671,8403,-26032,31146,12604,9962,16686,13186,-6167,-28491,-24133,-29137,-8953,-11813,6187,-14350,-7079,19841,-10376,-2523,-11195,10535,-9329,-23410,29022,26878,-2014,23166,2770,2345,-19401,-17186,-22832,6034,-7805,15184,-21234,10594,-6240,-23781,5240,8527,11340,-3953,-10419,-27332,10005,-21833,14060,22382,-1334,-27667,-13965,-26429,5650,27661,9122,9201,-13800,12960,-13231,31367,-31570,17224,-9295,14177,-21775,-4971,29696,-32654,432,-15297,10215,14975,21872,14456,27706,-4326,-7961,3054,-26657,26111,18331,-28719,25921,696,-11627,-6602,-25354,32265,27666,30577,22026,-31111,-24158,-30168,4036,13783,16954,-2094,30961,10327,3076,-30897,8378,27886,16015,1964,-19224,-7897,-574,-4209,18965,12940,3473,-5561,-9608,16717,4474,-17570,-10838,-12840,-31705,-11835,26692,-30109,-17638,-14059,1459,5326,-8636,-12236,18801,19651,-8966,32344,16297,30889,-25980,30821,13752,-20294,8546,-25344,8723,-29762,-17499,-16148,22507,-29168,5312,-28825,27603,12220,-10406,-26667,-19678,10552,15930,-23150,16147,-10171,16407,-24373,-30907,-16903,-2961,-18217,-16666,-21129,27916,-17072,-25235,20357,12049,20880,-20533,23489,31818,-710,13993,-17278,-2391,18178,29606,-31089,-9995,-10261,13156,6362,-4768,15518,13598,-14454,-6931,26684,5854,-26518,26053,-14091,-13569,28364,-12415,12874,-13371,-16027,26495,-12947,6962,-31036,-14843,-7296,-7629,31805,16910,15131,-258,13740,-21613,831,30896,8866,21,19832,-9657,-22538,28322,2315,-10492,28220,-17169,-7903,-27372,18757,20320,3154,-15871,4398,8413,-27549,11037,-20484,-22152,14525,-20350,-16683,24902,12414,-27559,12682,32631,22728,-19924,-5792,30384,-10270,26176,25765,-514,15047,-4276,-25817,11385,-32762,6225,2726,-17199,12888,29305,-9428,-17469,14879,-4252,12814,-31032,23556,-6771,25581,-9405,-31390,22290,26367,-27531,31277,-938,4142,-20318,-21139,25231,-13428,-1734,-2987,15469,-25962,-9878,25261,14374,2495,-16427,22843,-3887,19801,20205,10886,18415,-8560,22686,-18448,19815,32060,10689,9103,-24726,27550,15901,22243,12190,19281,-23259,10237,-16705,-30633,985,-16590,16989,-6272,25450,5944,-1952,-29962,26331,15657,-5751,-29371,-13514,17545,-15142,-16354,-21142,-15936,7414,-24715,-25781,-24577,5981,-1140,21221,-18967,-31803,-18723,2788,23313,21629,18012,-1927,-2426,13670,3178,12848,10636,11528,-26640,13301,-26662,27185,-7929,7619,-14420,25588,-7530,11092,-22002,28918,415,-1610,-20631,7156,-27814,24423,-30694,27223,-11684,-618,-14490,11155,-2772,21189,-18350,22188,23559,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad8_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/pad/pad8_input0_int16.csv new file mode 100644 index 0000000..e6ad472 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/pad/pad8_input0_int16.csv @@ -0,0 +1 @@ +-18626,-28155,-28367,17924,3487,-5817,-6060,23519,15635,-4368,-14431,-20921,26372,-7256,351,2540,20255,19184,-25937,-5657,-398,-15838,-24007,-16937,-7236,16527,-19470,-7985,-21254,-17933,20454,-4498,-9454,-3940,-32190,32663,21702,-22057,-28379,27560,15974,-6678,-18322,13508,32515,1077,25556,-29686,-29676,-6595,7516,-17935,-16252,-23392,-6204,-28993,14450,-11544,28793,-20443,-20440,2827,29022,-25919,-18968,-12183,5145,9770,26197,-9142,12303,8571,5664,17851,-29335,-23299,23055,-12072,-29788,29018,29351,-6341,13394,17478,-23468,-23661,-23687,10837,9040,22872,23011,15529,2856,-24246,24383,-26699,-11320,-26824,-24414,-5108,-21931,-24689,-2347,-18512,27187,-17628,25862,24130,-9433,5912,-19792,7807,-23395,-27738,20722,-6272,6805,-17556,10690,23991,21428,-3689,-12575,-1113,6496,15221,10937,12924,-30298,-19376,-140,14474,-2662,-1081,18453,-30142,28434,5952,-14180,32654,5878,-26271,17189,4274,-19136,21517,-7512,-12094,10927,30688,23371,-16690,10972,20901,2456,20977,14036,7331,350,31136,27418,14544,19492,12210,25649,-5197,8203,-4630,-18066,-8328,-21296,25272,8471,-12948,-26697,-18909,24679,-16335,17615,-7255,-26776,-298,-29497,-7488,-23698,2705,9119,31956,-11363,17923,-2110,18432,-14473,-12085,-24040,-5363,-29438,21883,-23600,15515,-10507,-4169,14069,32725,-25013,5056,-21680,-12606,-18974,26603,-18279,8163,-26934,10796,23740,9791,-15229,24791,-9718,-14611,-19530,-23330,7098,-10615,-27139,760,4529,-23109,18072,13742,-40,-26748,16571,20597,8470,-20286,16018,-7177,-9557,6175,-10785,-3496,11171,-22249,-20323,12647,21974,9167,1196,-29143,6671,8403,-26032,31146,12604,9962,16686,13186,-6167,-28491,-24133,-29137,-8953,-11813,6187,-14350,-7079,19841,-10376,-2523,-11195,10535,-9329,-23410,29022,26878,-2014,23166,2770,2345,-19401,-17186,-22832,6034,-7805,15184,-21234,10594,-6240,-23781,5240,8527,11340,-3953,-10419,-27332,10005,-21833,14060,22382,-1334,-27667,-13965,-26429,5650,27661,9122,9201,-13800,12960,-13231,31367,-31570,17224,-9295,14177,-21775,-4971,29696,-32654,432,-15297,10215,14975,21872,14456,27706,-4326,-7961,3054,-26657,26111,18331,-28719,25921,696,-11627,-6602,-25354,32265,27666,30577,22026,-31111,-24158,-30168,4036,13783,16954,-2094,30961,10327,3076,-30897,8378,27886,16015,1964,-19224,-7897,-574,-4209,18965,12940,3473,-5561,-9608,16717,4474,-17570,-10838,-12840,-31705,-11835,26692,-30109,-17638,-14059,1459,5326,-8636,-12236,18801,19651,-8966,32344,16297,30889,-25980,30821,13752,-20294,8546,-25344,8723,-29762,-17499,-16148,22507,-29168,5312,-28825,27603,12220,-10406,-26667,-19678,10552,15930,-23150,16147,-10171,16407,-24373,-30907,-16903,-2961,-18217,-16666,-21129,27916,-17072,-25235,20357,12049,20880,-20533,23489,31818,-710,13993,-17278,-2391,18178,29606,-31089,-9995,-10261,13156,6362,-4768,15518,13598,-14454,-6931,26684,5854,-26518,26053,-14091,-13569,28364,-12415,12874,-13371,-16027,26495,-12947,6962,-31036,-14843,-7296,-7629,31805,16910,15131,-258,13740,-21613,831,30896,8866,21,19832,-9657,-22538,28322,2315,-10492,28220,-17169,-7903,-27372,18757,20320,3154,-15871,4398,8413,-27549,11037,-20484,-22152,14525,-20350,-16683,24902,12414,-27559,12682,32631,22728,-19924,-5792,30384,-10270,26176,25765,-514,15047,-4276,-25817,11385,-32762,6225,2726,-17199,12888,29305,-9428,-17469,14879,-4252,12814,-31032,23556,-6771,25581,-9405,-31390,22290,26367,-27531,31277,-938,4142,-20318,-21139,25231,-13428,-1734,-2987,15469,-25962,-9878,25261,14374,2495,-16427,22843,-3887,19801,20205,10886,18415,-8560,22686,-18448,19815,32060,10689,9103,-24726,27550,15901,22243,12190,19281,-23259,10237,-16705,-30633,985,-16590,16989,-6272,25450,5944,-1952,-29962,26331,15657,-5751,-29371,-13514,17545,-15142,-16354,-21142,-15936,7414,-24715,-25781,-24577,5981,-1140,21221,-18967,-31803,-18723,2788,23313,21629,18012,-1927,-2426,13670,3178,12848,10636,11528,-26640,13301,-26662,27185,-7929,7619,-14420,25588,-7530,11092,-22002,28918,415,-1610,-20631,7156,-27814,24423,-30694,27223,-11684,-618,-14490,11155,-2772,21189,-18350,22188,23559 diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad9.tflite b/tensorflow/lite/micro/integration_tests/seanet/pad/pad9.tflite new file mode 100644 index 0000000..e95d4dc Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/pad/pad9.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad9_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/pad/pad9_golden_int16.csv new file mode 100644 index 0000000..edbf918 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/pad/pad9_golden_int16.csv @@ -0,0 +1 @@ +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-2324,15875,-3324,-1052,-9375,-9386,-21207,-20312,-22109,9985,-28186,-19193,625,-15801,16295,28092,8435,-2668,-27619,-23797,-21012,30180,-15309,8132,3439,-12573,16040,13541,20361,-6949,12835,-24449,4707,9178,-21519,-11801,-13835,-2203,-22217,9762,13946,22693,-6949,-19015,-30925,19140,-13261,11738,16092,15172,3497,18157,-30039,29035,4948,-14116,-7623,-495,28242,-30139,20763,-10516,-27829,-15700,21674,6615,2133,-13076,16498,-2692,9970,17085,-12058,30309,-1574,8947,-5643,-30367,-30829,-17957,-27035,2479,20815,-16202,7654,-31756,-13309,-11272,17194,25902,-20098,18890,32356,28228,-30204,2623,9313,-11135,27108,5157,29220,-13499,6631,-1560,21511,-11783,-15466,-16349,-20770,3274,-31442,22979,5461,8426,-15380,29212,-26029,28969,-3870,-11980,906,9020,-30597,-19294,8657,-10669,-29076,-25862,11282,18116,16964,-9167,-23692,3919,-13810,-23738,-14951,23032,6917,-14832,1071,-31103,29050,-5798,8571,7229,-10991,30278,-12899,24078,11244,-2164,13291,-2517,28060,-29140,10470,-32170,-6424,5088,-18268,-28517,-17443,-24753,-15841,8652,-23713,13331,8811,-12209,-30718,30298,-5395,-4967,-32243,-11351,82,-31475,6518,-5643,-21774,14152,30522,-8275,10127,5796,-16351,27145,-30279,7514,1554,-6068,-26701,-7254,-32276,12010,-1147,-19879,-17555,28178,-14484,21487,-8050,27363,-17710,-29340,-255,10161,7188,22678,23385,19259,12306,10994,-2036,31637,-5562,-19714,26129,-14002,-13009,4315,24731,-11458,10916,15340,-2279,25513,3847,-12273,-20783,32472,-16454,-10440,-3853,-11288,30240,30566,-7721,23568,-18025,-3164,-11059,26948,-25829,26442,-19978,5867,29715,-24176,-20670,-20459,-1783,15351,30534,-10988,17776,-4133,14948,-6778,-16998,31253,-23781,-19490,8610,1808,22236,-9253,28727,-2812,-5161,-8159,-694,-13283,-21388,-13116,-3483,-11160,-22815,18078,30572,11222,15538,16689,-8274,9280,24172,-23696,32443,-32614,-20129,18839,-24912,-2305,31042,-15940,16244,-22405,25932,-4916,-16909,-22041,-25260,-15453,27103,-30912,-30157,-30504,-8685,-20921,-2873,-21183,12196,26919,29952,-28783,-1806,17768,-1742,-13945,-1778,-2956,-26844,-26282,14527,-8186,-27991,21869,-15031,-27632,8831,20437,7414,9391,-5449,7778,25933,202,8673,-859,-19504,-23568,26,13616,12874,30939,22400,-7149,4568,27412,-22978,31685,21772,1591,26104,28436,1321,25185,-18301,-18615,-27824,-5632,26215,29532,-12698,-3787,-2206,2577,-31814,28386,16024,-141,26651,-23443,24230,23501,-19404,-3987,-7362,18075,6923,-3439,-1201,11049,21748,25094,14329,-26706,14992,1771,-9415,11978,-2017,-8204,-7619,-2894,7622,-6272,13775,21731,30637,-14972,-29021,29504,30766,16808,4948,23513,22156,-16383,4824,23941,27896,-3796,-2434,-21105,19811,-25858,-10418,-12352,15353,-30441,17223,-21597,-6686,-17429,28720,-22261,25029,29493,-24605,-24879,7088,14965,-7312,-6806,-32572,-12624,11335,-7265,26415,-13143,715,-23785,-7390,5540,24116,9094,-4174,-22096,-25813,-12089,-8226,-22671,10202,13100,-11784,17963,-23427,-15373,3281,-16240,22917,-5555,-2154,-26212,10185,-14511,26427,6889,30882,-27872,-20874,18755,-30881,-30570,213,32262,9439,-27424,-18438,-30310,-22134,10640,14189,23649,19717,-126,8712,17384,-3935,27023,-8416,22128,27986,-20594,2387,26406,12856,31123,30908,-2119,10613,-13581,3638,-650,16966,17961,-10169,7707,-11490,-17486,-31778,9024,1882,14861,22471,29835,-4090,-3189,10900,2345,7081,-25015,-24658,1242,-13078,31233,-24174,-26748,-22401,-26744,-21169,-3327,-14561,-27814,-3049,28528,-26646,22041,7032,-19727,-19525,31017,-139,7665,-26385,11142,-2348,-15629,11600,5595,21352,-28020,-3402,26660,30682,26726,-13542,-30428,25496,1363,-20357,23426,-16807,27741,-6114,19207,13791,30759,5857,-7544,17094,-6565,-1592,16646,10215,25756,19760,17242,19300,16240,10453,-16740,25514,-6485,-29705,31234,-9575,19797,-27059,-24629,25798,-13057,-31882,19323,-20199,-26373,-29538,-31222,-24996,-11871,8765,26327,-7382,-31509,-9520,-2430,-3993,4190,-24144,14667,27037,4430,9809,17198,-26289,3288,3860,8844,-28092,10297,-26301,-5311,-21888,-11355,5779,1017,32684,-27373,-7871,6080,-21323,-523,-7574,18642,-12066,11889,3002,-20709,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 diff --git a/tensorflow/lite/micro/integration_tests/seanet/pad/pad9_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/pad/pad9_input0_int16.csv new file mode 100644 index 0000000..c7e7bd6 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/pad/pad9_input0_int16.csv @@ -0,0 +1 @@ +-2324,15875,-3324,-1052,-9375,-9386,-21207,-20312,-22109,9985,-28186,-19193,625,-15801,16295,28092,8435,-2668,-27619,-23797,-21012,30180,-15309,8132,3439,-12573,16040,13541,20361,-6949,12835,-24449,4707,9178,-21519,-11801,-13835,-2203,-22217,9762,13946,22693,-6949,-19015,-30925,19140,-13261,11738,16092,15172,3497,18157,-30039,29035,4948,-14116,-7623,-495,28242,-30139,20763,-10516,-27829,-15700,21674,6615,2133,-13076,16498,-2692,9970,17085,-12058,30309,-1574,8947,-5643,-30367,-30829,-17957,-27035,2479,20815,-16202,7654,-31756,-13309,-11272,17194,25902,-20098,18890,32356,28228,-30204,2623,9313,-11135,27108,5157,29220,-13499,6631,-1560,21511,-11783,-15466,-16349,-20770,3274,-31442,22979,5461,8426,-15380,29212,-26029,28969,-3870,-11980,906,9020,-30597,-19294,8657,-10669,-29076,-25862,11282,18116,16964,-9167,-23692,3919,-13810,-23738,-14951,23032,6917,-14832,1071,-31103,29050,-5798,8571,7229,-10991,30278,-12899,24078,11244,-2164,13291,-2517,28060,-29140,10470,-32170,-6424,5088,-18268,-28517,-17443,-24753,-15841,8652,-23713,13331,8811,-12209,-30718,30298,-5395,-4967,-32243,-11351,82,-31475,6518,-5643,-21774,14152,30522,-8275,10127,5796,-16351,27145,-30279,7514,1554,-6068,-26701,-7254,-32276,12010,-1147,-19879,-17555,28178,-14484,21487,-8050,27363,-17710,-29340,-255,10161,7188,22678,23385,19259,12306,10994,-2036,31637,-5562,-19714,26129,-14002,-13009,4315,24731,-11458,10916,15340,-2279,25513,3847,-12273,-20783,32472,-16454,-10440,-3853,-11288,30240,30566,-7721,23568,-18025,-3164,-11059,26948,-25829,26442,-19978,5867,29715,-24176,-20670,-20459,-1783,15351,30534,-10988,17776,-4133,14948,-6778,-16998,31253,-23781,-19490,8610,1808,22236,-9253,28727,-2812,-5161,-8159,-694,-13283,-21388,-13116,-3483,-11160,-22815,18078,30572,11222,15538,16689,-8274,9280,24172,-23696,32443,-32614,-20129,18839,-24912,-2305,31042,-15940,16244,-22405,25932,-4916,-16909,-22041,-25260,-15453,27103,-30912,-30157,-30504,-8685,-20921,-2873,-21183,12196,26919,29952,-28783,-1806,17768,-1742,-13945,-1778,-2956,-26844,-26282,14527,-8186,-27991,21869,-15031,-27632,8831,20437,7414,9391,-5449,7778,25933,202,8673,-859,-19504,-23568,26,13616,12874,30939,22400,-7149,4568,27412,-22978,31685,21772,1591,26104,28436,1321,25185,-18301,-18615,-27824,-5632,26215,29532,-12698,-3787,-2206,2577,-31814,28386,16024,-141,26651,-23443,24230,23501,-19404,-3987,-7362,18075,6923,-3439,-1201,11049,21748,25094,14329,-26706,14992,1771,-9415,11978,-2017,-8204,-7619,-2894,7622,-6272,13775,21731,30637,-14972,-29021,29504,30766,16808,4948,23513,22156,-16383,4824,23941,27896,-3796,-2434,-21105,19811,-25858,-10418,-12352,15353,-30441,17223,-21597,-6686,-17429,28720,-22261,25029,29493,-24605,-24879,7088,14965,-7312,-6806,-32572,-12624,11335,-7265,26415,-13143,715,-23785,-7390,5540,24116,9094,-4174,-22096,-25813,-12089,-8226,-22671,10202,13100,-11784,17963,-23427,-15373,3281,-16240,22917,-5555,-2154,-26212,10185,-14511,26427,6889,30882,-27872,-20874,18755,-30881,-30570,213,32262,9439,-27424,-18438,-30310,-22134,10640,14189,23649,19717,-126,8712,17384,-3935,27023,-8416,22128,27986,-20594,2387,26406,12856,31123,30908,-2119,10613,-13581,3638,-650,16966,17961,-10169,7707,-11490,-17486,-31778,9024,1882,14861,22471,29835,-4090,-3189,10900,2345,7081,-25015,-24658,1242,-13078,31233,-24174,-26748,-22401,-26744,-21169,-3327,-14561,-27814,-3049,28528,-26646,22041,7032,-19727,-19525,31017,-139,7665,-26385,11142,-2348,-15629,11600,5595,21352,-28020,-3402,26660,30682,26726,-13542,-30428,25496,1363,-20357,23426,-16807,27741,-6114,19207,13791,30759,5857,-7544,17094,-6565,-1592,16646,10215,25756,19760,17242,19300,16240,10453,-16740,25514,-6485,-29705,31234,-9575,19797,-27059,-24629,25798,-13057,-31882,19323,-20199,-26373,-29538,-31222,-24996,-11871,8765,26327,-7382,-31509,-9520,-2430,-3993,4190,-24144,14667,27037,4430,9809,17198,-26289,3288,3860,8844,-28092,10297,-26301,-5311,-21888,-11355,5779,1017,32684,-27373,-7871,6080,-21323,-523,-7574,18642,-12066,11889,3002,-20709 diff --git a/tensorflow/lite/micro/integration_tests/seanet/quantize/BUILD b/tensorflow/lite/micro/integration_tests/seanet/quantize/BUILD new file mode 100644 index 0000000..58217b0 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/quantize/BUILD @@ -0,0 +1,125 @@ +# Description: +# generated integration test for one specific kernel in a model. +load( + "//tensorflow/lite/micro:build_def.bzl", + "generate_cc_arrays", + "micro_copts", +) + +package( + default_visibility = ["//visibility:public"], + # Disabling layering_check because of http://b/177257332 + features = ["-layering_check"], + licenses = ["notice"], +) + +generate_cc_arrays( + name = "generated_quantize0_model_data_cc", + src = "quantize0.tflite", + out = "quantize0_model_data.cc", +) + +generate_cc_arrays( + name = "generated_quantize0_model_data_hdr", + src = "quantize0.tflite", + out = "quantize0_model_data.h", +) + +generate_cc_arrays( + name = "generated_quantize1_model_data_cc", + src = "quantize1.tflite", + out = "quantize1_model_data.cc", +) + +generate_cc_arrays( + name = "generated_quantize1_model_data_hdr", + src = "quantize1.tflite", + out = "quantize1_model_data.h", +) + +generate_cc_arrays( + name = "generated_quantize0_input0_int32_test_data_cc", + src = "quantize0_input0_int32.csv", + out = "quantize0_input0_int32_test_data.cc", +) + +generate_cc_arrays( + name = "generated_quantize0_input0_int32_test_data_hdr", + src = "quantize0_input0_int32.csv", + out = "quantize0_input0_int32_test_data.h", +) + +generate_cc_arrays( + name = "generated_quantize0_golden_int16_test_data_cc", + src = "quantize0_golden_int16.csv", + out = "quantize0_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_quantize0_golden_int16_test_data_hdr", + src = "quantize0_golden_int16.csv", + out = "quantize0_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_quantize1_input0_int16_test_data_cc", + src = "quantize1_input0_int16.csv", + out = "quantize1_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_quantize1_input0_int16_test_data_hdr", + src = "quantize1_input0_int16.csv", + out = "quantize1_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_quantize1_golden_int32_test_data_cc", + src = "quantize1_golden_int32.csv", + out = "quantize1_golden_int32_test_data.cc", +) + +generate_cc_arrays( + name = "generated_quantize1_golden_int32_test_data_hdr", + src = "quantize1_golden_int32.csv", + out = "quantize1_golden_int32_test_data.h", +) + +cc_library( + name = "models_and_testdata", + srcs = [ + "generated_quantize0_golden_int16_test_data_cc", + "generated_quantize0_input0_int32_test_data_cc", + "generated_quantize0_model_data_cc", + "generated_quantize1_golden_int32_test_data_cc", + "generated_quantize1_input0_int16_test_data_cc", + "generated_quantize1_model_data_cc", + ], + hdrs = [ + "generated_quantize0_golden_int16_test_data_hdr", + "generated_quantize0_input0_int32_test_data_hdr", + "generated_quantize0_model_data_hdr", + "generated_quantize1_golden_int32_test_data_hdr", + "generated_quantize1_input0_int16_test_data_hdr", + "generated_quantize1_model_data_hdr", + ], + copts = micro_copts(), +) + +cc_test( + name = "integration_test", + srcs = [ + "integration_tests.cc", + ], + copts = micro_copts(), + deps = [ + ":models_and_testdata", + "//python/tflite_micro:python_ops_resolver", + "//tensorflow/lite/micro:micro_framework", + "//tensorflow/lite/micro:micro_log", + "//tensorflow/lite/micro:micro_resource_variable", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:recording_allocators", + "//tensorflow/lite/micro/testing:micro_test", + ], +) diff --git a/tensorflow/lite/micro/integration_tests/seanet/quantize/Makefile.inc b/tensorflow/lite/micro/integration_tests/seanet/quantize/Makefile.inc new file mode 100644 index 0000000..7f45baf --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/quantize/Makefile.inc @@ -0,0 +1,17 @@ +integration_tests_seanet_quantize_GENERATOR_INPUTS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/quantize/quantize0.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/quantize/quantize1.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/quantize/quantize0_input0_int32.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/quantize/quantize0_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/quantize/quantize1_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/quantize/quantize1_golden_int32.csv + +integration_tests_seanet_quantize_SRCS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/quantize/integration_tests.cc \ +$(TENSORFLOW_ROOT)python/tflite_micro/python_ops_resolver.cc \ + +integration_tests_seanet_quantize_HDR := \ +$(TENSORFLOW_ROOT)python/tflite_micro/python_ops_resolver.h \ + +$(eval $(call microlite_test,integration_tests_seanet_quantize_test,\ +$(integration_tests_seanet_quantize_SRCS),$(integration_tests_seanet_quantize_HDR),$(integration_tests_seanet_quantize_GENERATOR_INPUTS))) diff --git a/tensorflow/lite/micro/integration_tests/seanet/quantize/integration_tests.cc b/tensorflow/lite/micro/integration_tests/seanet/quantize/integration_tests.cc new file mode 100644 index 0000000..c774905 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/quantize/integration_tests.cc @@ -0,0 +1,93 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include "python/tflite_micro/python_ops_resolver.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/integration_tests/seanet/quantize/quantize0_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/quantize/quantize0_input0_int32_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/quantize/quantize0_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/quantize/quantize1_golden_int32_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/quantize/quantize1_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/quantize/quantize1_model_data.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_profiler.h" +#include "tensorflow/lite/micro/recording_micro_allocator.h" +#include "tensorflow/lite/micro/recording_micro_interpreter.h" +#include "tensorflow/lite/micro/system_setup.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +constexpr size_t kTensorArenaSize = 1024 * 100; +uint8_t tensor_arena[kTensorArenaSize]; + +namespace tflite { +namespace micro { +namespace { + +template +void RunModel(const uint8_t* model, const inputT* input0, + const uint32_t input0_size, const outputT* golden, + const uint32_t golden_size, const char* name) { + InitializeTarget(); + MicroProfiler profiler; + PythonOpsResolver op_resolver; + + MicroInterpreter interpreter(GetModel(model), op_resolver, tensor_arena, + kTensorArenaSize, nullptr, &profiler); + interpreter.AllocateTensors(); + TfLiteTensor* input_tensor0 = interpreter.input(0); + TF_LITE_MICRO_EXPECT_EQ(input_tensor0->bytes, input0_size * sizeof(inputT)); + memcpy(interpreter.input(0)->data.raw, input0, input_tensor0->bytes); + if (kTfLiteOk != interpreter.Invoke()) { + TF_LITE_MICRO_EXPECT(false); + return; + } + profiler.Log(); + MicroPrintf(""); + + TfLiteTensor* output_tensor = interpreter.output(0); + TF_LITE_MICRO_EXPECT_EQ(output_tensor->bytes, golden_size * sizeof(outputT)); + outputT* output = ::tflite::GetTensorData(output_tensor); + for (uint32_t i = 0; i < golden_size; i++) { + // TODO(b/205046520): Better understand why TfLite and TFLM can sometimes be + // off by 1. + TF_LITE_MICRO_EXPECT_NEAR(golden[i], output[i], 1); + } +} + +} // namespace +} // namespace micro +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(quantize0_test) { + tflite::micro::RunModel( + g_quantize0_model_data, g_quantize0_input0_int32_test_data, + g_quantize0_input0_int32_test_data_size, + g_quantize0_golden_int16_test_data, + g_quantize0_golden_int16_test_data_size, "quantize0 test"); +} + +TF_LITE_MICRO_TEST(quantize1_test) { + tflite::micro::RunModel( + g_quantize1_model_data, g_quantize1_input0_int16_test_data, + g_quantize1_input0_int16_test_data_size, + g_quantize1_golden_int32_test_data, + g_quantize1_golden_int32_test_data_size, "quantize1 test"); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/integration_tests/seanet/quantize/quantize0.tflite b/tensorflow/lite/micro/integration_tests/seanet/quantize/quantize0.tflite new file mode 100644 index 0000000..8ac54e1 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/quantize/quantize0.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/quantize/quantize0_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/quantize/quantize0_golden_int16.csv new file mode 100644 index 0000000..ed1d972 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/quantize/quantize0_golden_int16.csv @@ -0,0 +1 @@ +-32768,32767,1266,-32768,32767,-32768,-32768,-32768,32767,-32768,-27386,-32768,-32768,-32768,-32768,-32768,-32768,-32768,-32768,-32768,-21583,-32768,-32768,32767,32767,-32768,32767,-32768,32767,-32768,-32768,32767,27164,-32768,-32768,32767,-32768,-32768,32767,32767,-32768,32767,32767,32767,-13400,32767,32767,-32768,-32768,-32768,-32768,-32768,-32768,32767,-32768,-32768,32767,11576,-14742,32767,32767,32767,-6012,32767,-32768,32767,32767,32767,-32768,32767,-32768,-32768,-23534,32767,-32768,32767,-32768,-32768,-24942,32767,-32768,32767,32767,32767,-32768,-32768,32767,32767,-32768,-32768,-5195,32767,32536,-25551,32767,32767,-32768,-7505,32767,-32768,32767,32767,32767,-32768,32767,32767,-32768,-32768,-32768,-32768,32767,-32768,32767,32767,32767,-32768,-32768,-32768,-32768,-17149,-32768,24976,-32768,32767,-32768,32767,32767,-32768,32767,-32768,-32768,-32768,-32768,32767,32767,-32355,32767,-32768,32767,32767,-32768,32767,32767,32767,32767,32767,-32768,32767,32767,6950,32767,32767,32767,-32768,7304,32767,32767,-32768,-32768,-32768,32767,32767,32767,32767,32767,32767,32767,32767,32767,-32768,-32768,32767,-32768,32767,-21919,32767,-10546,-32768,32248,32767,-32768,-32768,-32768,-32768,32767,-32768,-32768,-32768,32767,32767,32767,32767,32767,32767,26590,-32768,32767,32767,-10268,32767,-19858,-32768,-32768,-32768,32767,32767,32767,32767,-32768,32767,32767,-28905,-32768,-32768,32767,32767,-32768,21832,31529,-32768,-32768,-32768,-32768,32767,-32768,32767,-32768,32767,-32768,-32768,-32768,-32768,-32768,-32768,32767,32767,-32768,-32768,32767,-32768,-32768,32767,-32768,32767,-32768,-32768,-32768,27557,32767,-32768,32767,-32768,-32768,32767,-32768,15851,32767,32767,-32768,32767,32767,32767,32767,-32768,32767,32767,-14742,32767,32767,32767,-32768,-32768,32767,32767,32767,-11192,32767,32767,32767,32767,-3155,-32768,32767,32767,32767,32767,32767,32767,-6698,-32768,-32768,-32768,-21295,-32768,-32768,32767,-32768,-32768,32767,-32768,32767,32767,32767,-9576,-23954,32767,-32768,-32768,-32768,32767,32767,-32768,-32768,32767,-32768,-32768,-32768,-32768,-32768,32767,32767,-32768,32767,-32768,32767,32767,32767,-32768,32767,-32768,-32768,32767,-32768,32767,-32768,32767,32767,32767,-32768,32767,32767,32767,32767,32767,-32768,-14080,32767,32767,32767,-32768,32767,-32768,32767,-32768,-32768,-32768,-32768,-32768,-9999,-32768,32767,-32768,32767,32767,32767,32767,32767,-32768,32767,32767,-32768,-32768,-32768,32767,-32768,32767,32767,-32768,7179,-32768,-32768,-32768,-32768,-32768,32767,32767,32767,-32768,32767,-32768,32767,-32768,-32768,-32768,-32768,32767,32767,-32768,32767,32767,-32768,-32768,-32768,-32768,32767,-32768,-32768,-32768,-32768,32767,20561,32767,-32768,-32768,-32768,-32768,32767,-32768,-17622,-32768,32767,-32768,-32768,-32768,32767,-32768,-32768,32767,-32768,-32768,32767,32767,32767,32767,-32768,32767,-32768,-32768,-32768,32767,-32768,-32768,32767,-32768,-32768,-32768,-32768,32767,32767,32767,-32768,32767,-32768,-32768,32767,-32768,-32768,-32768,32767,32767,-32768,-32768,32767,-32768,-32768,32767,-32768,-32768,32767,-32768,32767,-32768,-32768,32767,32767,32767,32767,32767,-32768,-32768,32767,-32768,-32768,32767,32767,32767,32767,32767,32767,-32768,32767,32767,-32768,-32768,-9386,-32768,32767,32767,-32768,-32768,32767,32767,-32768,-32768,-32768,-32768,-32768,-32768,-32768,32767,-32768,-13688,-32768,32767,-32768,32767,32767,-32768,-32768,-32768,32767,-32768,32767,32767,32767,32767,-6834,-32768,-32768,32767,-32768,-32768,32767,32767,-32768,32767,32767,-32768,-32034,19220,32767,-19702,32767,32767,-32768,-32768,32767,-32768,32767,32767,-32768,32767,32767,32767,32767,32767,32767,32767,-32768,-32768,-32768,32767,-28751,32767,-21036,-32768,32767,32767,-32768,-32768,32767,32767,32767,-32768,-32768,-32768,-32768,32767,-32768,32767,25114,32767,-32768,32767,32767,-32768,-32768,32767,32767,32767,-32768,-17977,32767,-32768,-32768,-32768,-32768,-32768,-32768,-32768,32767,-32768,32767,-32768,32767,32767,32767,-32768,-32768,-32768,-32768,-32768,-32768,-32768,-32768,-32768,32767,-32768,-32768,-32768,32767,32767,-32768,-14698,-32768,32767,-32768,32767,32767,-32768,-25991,-32768,32767,32767,-32768,32767,-32768,-17060,-18181,32767 diff --git a/tensorflow/lite/micro/integration_tests/seanet/quantize/quantize0_input0_int32.csv b/tensorflow/lite/micro/integration_tests/seanet/quantize/quantize0_input0_int32.csv new file mode 100644 index 0000000..bf3f0f2 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/quantize/quantize0_input0_int32.csv @@ -0,0 +1 @@ +-503238142,1390891343,7831066,-473509042,1973713823,-1540297669,-833974130,-347617228,714614853,-1803267904,-169446166,-478790262,-1776099184,-1851713959,-1287500291,-1868406927,-1259364534,-1499204946,-454575451,-1088326942,-133537607,-1462292917,-1120657954,1242534360,1661049091,-679721664,1274444378,-419859447,641311774,-1374104551,-1382404849,399919150,168072384,-282017910,-742753412,929382970,-967728937,-1006766358,1922483699,415377296,-1158724948,1681031217,1930682836,571474873,-82907821,1563582646,1203030294,-863023586,-1289437058,-1644476987,-1195212319,-1390487113,-1061634208,1078155688,-1531491957,-1954565939,261727075,71621778,-91212613,2013191175,863011468,317871452,-37195075,894925674,-807473189,435991254,1879714348,901588815,-1633214130,1194611513,-1254415835,-1909560639,-145612821,1391592139,-1398819340,1477401063,-315484387,-463821176,-154320514,1014376129,-1603012536,303222505,1547019523,691560226,-581798397,-1536238723,302474920,304807042,-686161447,-418287467,-32144051,1400924695,201308537,-158088547,367450185,1325859047,-1155720038,-46436280,1260686276,-412610602,1005562808,629935460,1999027626,-1603196215,1503137060,977398177,-656189003,-883691284,-383047721,-1049651559,1552651123,-1019858823,1144087988,1114558628,337663240,-1954387550,-1345128811,-1064081065,-1971177787,-106102935,-1623682777,154533117,-1003551684,1099870455,-1444939152,1041952432,746552286,-1184949669,896693327,-1565503707,-1230652404,-593934272,-1151682517,1470486955,1295303062,-200188908,210140678,-1484038642,713982597,609633143,-619763025,474439851,1416456276,664158481,1405740703,1800634484,-1433490935,1644634120,1068868287,43003508,631890787,1094878517,531631977,-1544246898,45192465,733995039,934816319,-259931349,-1409938069,-2032493300,525857028,415626995,1370904813,1233226291,726248488,1060762602,769654240,1609475489,1754540587,-918225281,-971704203,903576656,-917154930,868443557,-135619227,1949773873,-65251484,-401077433,199528850,1221279985,-210092178,-1905449088,-1750087895,-2038797456,1066077554,-1175565120,-1301614296,-741106574,543278657,1187972167,657489377,1984071877,1396088790,1697769835,164516101,-702564588,578861661,2064859555,-63529162,266309759,-122864677,-1922910180,-1829736715,-479160957,232222409,1146092687,506829218,1665263596,-1379417187,278732766,1671369573,-178840354,-582614797,-591066246,1044007028,992972354,-1415808361,135078173,195078373,-619258044,-1919318471,-1567643218,-1128530544,866212030,-1126498558,1590916976,-1219855293,1405184423,-427242315,-1324538476,-323766273,-1514566350,-1291866375,-1359467517,541529081,2038774213,-424176551,-1441552329,1381991786,-1158665919,-1593096785,1753704141,-638541023,1147417333,-1154120454,-777621574,-463370496,170500517,981846548,-236782515,2057679545,-1208189671,-711580494,1898276750,-1646956249,98072727,1828452702,1826217975,-645413388,1381199096,977374145,1767874884,915599373,-1624061899,1309067381,2099494587,-91209382,1331576844,348388474,2126376705,-455571707,-1666302170,1702833011,1387709072,1466163218,-69248506,971980474,1498243008,644422157,1263759749,-19522516,-360460306,887923603,442171149,252791027,469226140,2108285699,642801150,-41439378,-955789663,-947885773,-688446910,-131757519,-1552730976,-1636525179,1747907544,-1191213506,-1629328522,1863346230,-924152691,1049083246,446135237,811385357,-59249712,-148206820,248562665,-1037288098,-231192221,-2079178070,849877210,1127893074,-1159705239,-2037350302,1903105443,-1747893180,-892712977,-1809110385,-226642001,-1382202260,1656767381,963915096,-1213676393,926733572,-926629695,496458060,296878352,1339057426,-1330482616,1720536767,-1236895254,-1179960147,682564897,-213888767,448773448,-1110652328,716998715,2146298121,529946369,-540976255,460342546,1465073300,1244268965,1779483146,409911518,-681533788,-87118779,970270593,606181605,602874719,-1276699429,1884005195,-1126160240,632394049,-1999041454,-269965868,-1981030039,-772431724,-2026756481,-61868304,-983663330,2058504905,-826557235,318268006,509488914,655242400,1784734058,482268096,-1700754152,787140409,560572502,-257578980,-1247420043,-1870049769,1596450697,-845003137,930849758,1362708168,-1745892571,44417767,-628034575,-898830018,-801716211,-753669577,-2126435263,1713336676,426501466,631919229,-2031764307,1813083575,-414158723,681703404,-406902315,-982729225,-2078005834,-2019949278,1299108020,317643003,-1059133320,1738288516,1837688570,-1210985938,-1476449129,-1100144316,-1597040162,709223264,-1296228588,-1922947230,-1547437947,-1249795354,1265774005,127216352,227863858,-1745788367,-937205439,-1840991414,-1038942273,1353029094,-616976344,-109029465,-1888450535,1915856257,-1330532625,-323730328,-1690386861,461059278,-569240960,-2139314476,461098177,-942808944,-421660307,287717033,1373411867,1677035786,1491848454,-810426017,1812542545,-1600291435,-1371466460,-1286067062,1000655891,-537724066,-1075530559,1985101412,-950446576,-1381129090,-2079264408,-254372360,539169938,1872122063,727453711,-296458994,1235168525,-1453591595,-681581198,1773038435,-830152149,-1634682020,-1980645326,349408810,1592902638,-1002773736,-859372277,1543587049,-495571180,-282701321,470476398,-420750430,-826878933,881560644,-385497553,1085152759,-1717199138,-1481918994,1855546096,1218584353,885254545,1042190209,387137462,-1600478716,-1664092785,2037697720,-1814952796,-1598318626,751727711,1831103829,1760520227,2049332296,1309350952,1501695252,-1755057449,1477001226,823026176,-223888873,-703200983,-58072285,-2146668752,983785152,569830144,-1093773401,-1403157855,527799019,1980484388,-362693075,-1150797407,-429077580,-275369902,-332807325,-1258249352,-587881307,1213063049,-1061893483,-84690350,-1674400718,1974365317,-1266158331,2122721242,1747660879,-1631670705,-2079461581,-204218358,523527214,-355029452,2002463961,777955752,2122769738,963922546,-42284751,-1456020416,-428313940,1735905223,-637505495,-491522338,2064519972,1128618671,-1098646941,317108826,703515197,-2083582778,-198201197,118916082,1408444981,-121902142,1873222299,1804952814,-840694144,-2006252980,370433664,-408008534,1800107211,1920465618,-1202357235,1240846759,487759176,729616131,1720842575,240696736,920814087,471175270,-1342791421,-1957459189,-1763432067,1419699954,-177886545,1718492117,-130156409,-2078241065,1379034934,247204924,-2028891968,-1287328617,860500657,2068302781,996092638,-1942244416,-416992639,-352780001,-655418099,1709987151,-1794719722,367911447,155386186,2043048541,-1834532183,330694557,1753142802,-1955572266,-1395486051,323732413,222942595,1394005772,-1561970387,-111225935,2106102598,-337920944,-777898876,-691976603,-2038505722,-614594542,-484556584,-2091782964,819311509,-1342263612,1302216640,-830320155,1183792374,978444985,1565153266,-379618315,-427007194,-836453201,-2066042871,-893310254,-629488183,-1408776715,-1618179734,-773457427,478136512,-1246524020,-1190653464,-1484269963,1901352348,1183323562,-1623287248,-90941660,-1673514972,1610315799,-1380647190,2129734612,1930810303,-1770325370,-160814836,-413021800,365905016,225408872,-1515348363,1160554561,-1155440208,-105554754,-112490007,1413130171 diff --git a/tensorflow/lite/micro/integration_tests/seanet/quantize/quantize1.tflite b/tensorflow/lite/micro/integration_tests/seanet/quantize/quantize1.tflite new file mode 100644 index 0000000..e2dab48 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/quantize/quantize1.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/quantize/quantize1_golden_int32.csv b/tensorflow/lite/micro/integration_tests/seanet/quantize/quantize1_golden_int32.csv new file mode 100644 index 0000000..20d486f --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/quantize/quantize1_golden_int32.csv @@ -0,0 +1 @@ +-84477715,-86547695,-93630076,-45294068,105051452,80938183,-53789608,-82557013,85187612,-26325891,-46856504,-65847901,73796091,44252443,-91026015,-61193765,-7656270,44069993,-49915031,81299766,-8253379,74084694,12326992,103130750,108027048,-90130351,-24962491,89772085,14360482,-24262546,-12260647,104799339,19697977,68932966,-50163827,-90770585,-550668,101999560,-91513655,-62268562,-67533076,-10031438,-107031865,-57236256,-21048107,-106312017,-106272209,6535031,-64690172,-11122822,-40858871,-52678321,49712677,2318775,-86647213,101422354,-88275995,3416793,24693792,37581404,27374150,14068562,-92160523,-99388865,301872,-78506621,9902065,-48243125,10847488,55806511,38676105,-34612443,-91616490,11292003,-105837646,4647502,-84079642,-70210117,-75036752,-2899298,98878004,91022698,-48050723,22666937,92170475,-47559766,90710874,-37345878,26073778,-13454866,90156889,20633448,-11749787,-106474563,26408823,-20042973,25735416,-2846222,-19057743,-23973944,-27709195,56251026,65741748,-61475733,-100775485,-50814012,-71951686,-87748548,-34967392,756339,22099683,-28027653,94466029,-35133255,-70856986,-1114604,-11865891,14466635,21890695,-28302987,18195251,11000083,8120688,-14440096,-895664,-3556118,10529030,-40719546,-49964790,-43545864,-39392636,65287282,27049057,-58377399,22749869,45061858,8100785,41197233,97249222,59150324,99246222,-44166194,30047873,-54446428,8412608,-16662670,13726882,-83018114,93261858,18825533,-11092966,56237756,-51424391,80264776,-17405740,51178913,14871342,-54947337,103137385,77846483,-69991177,-18845437,65652182,-66133187,47101982,-84769635,57859904,-87748548,-63446194,-51835733,75268961,44351962,61893710,-53610475,-76194480,-71002946,-45231039,67138321,-1190902,97378596,-64262244,38991246,62285148,-35027103,75932416,-104504102,72485767,102168741,-44348644,-61406070,81817261,-105867502,34506290,-24100000,87121583,-94489250,-74224019,95534191,-1924019,-90926497,21326758,-79690888,-56960922,64355127,-89002478,106258940,-8180399,51586937,-29138940,78241239,70027667,-60029401,-11059794,102006194,36237908,56214536,34244226,60420840,-28906731,5317591,90266359,75524391,-20215471,53839367,-62908796,62086112,-40580220,-27888328,93288397,2302189,62378032,58407254,78453544,-21867474,-105127750,-57090296,-94844198,-18102368,-19668121,-100586401,-15564652,-107247488,-3851356,-86859518,36380551,-49178596,37704143,-5529897,79116999,-5918018,17644584,66043620,52927116,-36961074,84019931,41817564,103983290,52754618,22733283,-33388369,-99631026,-61452512,-10900564,5782010,-75816311,-35571136,10167447,73782822,-42149291,98264308,13252512,-102626525,94721459,-89218101,57767020,5095334,-96781487,70206800,69735747,107778252,30399505,-76990626,15733833,68465231,-90452127,98071907,42713228,80072375,-43904129,12947323,52960289,-98884639,-18215155,-68850035,68992677,-101813792,75710158,83774453,26770406,-41797660,28714329,-69709209,78559697,66418472,105058087,-67961005,-42411356,-36337426,-86786538,-101803840,7390888,84338390,90050737,-57969374,30034604,47493421,64680220,25702244,-55425024,21293586,-75756600,93998293,-55557715,70190213,-14891246,-77295816,-23701927,-44245809,85214150,104049635,103698004,94734728,-80692705,102450709,-27865107,-13013668,6515127,65662134,2660454,-101233269,77909511,55252526,62918748,55196132,42965341,-100520055,-30986662,-40059408,107585850,75444776,99640977,63811094,59064074,64089745,53255527,102978156,-99757082,-44487970,72107598,43990379,-5782010,-20749553,24667254,-93567048,98921129,-5340812,103419353,-10605327,-87509704,-72781005,10237109,-94306800,-102974838,-66236022,40304886,48425575,83263593,-48614659,103230269,-95116215,60812278,24723648,-101727543,-63320138,74476132,-56320688,27593090,73454412,-7171948,-97226002,-92873737,-69745699,59372581,-104019780,-71639862,-106378362,103668149,-45997330,55010365,80357660,-73112732,10442780,91994659,-86252457,-63880757,29228507,13318858,-100762216,102201913,-12864391,-72830764,67961005,-44381817,93447626,66975774,-84009979,48598073,-66876256,-105243854,54934068,-35209553,-50409305,99176559,-20387970,-5168314,8084198,80490351,97434990,90886690,28024336,76718610,2640551,-26432044,-40139023,-85393283,-2215939,76721927,73076242,105678417,22089732,34393503,3960826,70740881,-38586538,-79806993,3337178,-44242491,-59870172,99942849,281968,28415774,-23307171,106414852,-30409456,97779986,-101949801,41993379,-61064391,-87260909,-5675857,-86584185,195719,-69321087,45297385,-101382547,-49115568,95846015,-11918968,-89997660,-58257977,-57654233,38039188,-7384253,43980427,-94369828,32363331,85137853,553985,69934783,-93825795,-37969525,97942533,-69463730,-72873889,92386098,-31547282,-72936917,52811012,-61253476,-73155857,11749787,106444708,92286580,-14234425,-107104845,42209002,38516875,107688686,-93829112,15282684,242161,-16593008,46547997,19094233,-18155444,5466869,-99806841,23108135,-64278830,44723496,-17010984,-82643262,-71032801,-22822849,-96582450,-53607158,-1506043,-93245272,73670035,-43927350,-13945822,-88421955,45396903,-106627158,-18118954,-84832664,38264763,-72940234,36377233,67728796,40434260,-101349374,-108411851,-51440977,-64942285,18576738,32313572,-89241322,-105509236,29726098,-61926882,106049952,1336862,53119518,617013,49181913,21874109,-31640165,42185781,26694109,-20497440,2202670,90435540,-26342478,-92711191,78237922,-67410337,-35846469,-64295416,-9819133,55766703,103289979,66448328,84573916,82324804,53756435,-89188245,70601555,-84305217,-66245974,-63811094,-50684639,29002932,-47075444,-88133352,-89025699,66182946,-18065878,97634026,-94721459,63144322,5201487,107781569,74539160,-53544129,15050475,52101115,71732746,-70176944,88992526,-43512691,-63648548,73311769,27337660,-101943166,44325423,108023730,-67078610,8415926,-24388603,-60752567,75733379,-92598403,-7062478,77982491,-26521610,-86753366,-61863854,-4236160,21837619,-66680537,-31318390,30412774,85575733,52708176,-4624281,48107116,77690571,9679807,33673655,-72562065,-13637316,88796807,67635912,102032732,-69752333,-51543813,-43406538,98002244,-90909911,-59704308,62474233,-70800592,54031769,-108169690,-34459849,-50163827,84991893,-102553545,-79578101 diff --git a/tensorflow/lite/micro/integration_tests/seanet/quantize/quantize1_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/quantize/quantize1_input0_int16.csv new file mode 100644 index 0000000..6f4e19d --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/quantize/quantize1_input0_int16.csv @@ -0,0 +1 @@ +-25466,-26090,-28225,-13654,31668,24399,-16215,-24887,25680,-7936,-14125,-19850,22246,13340,-27440,-18447,-2308,13285,-15047,24508,-2488,22333,3716,31089,32565,-27170,-7525,27062,4329,-7314,-3696,31592,5938,20780,-15122,-27363,-166,30748,-27587,-18771,-20358,-3024,-32265,-17254,-6345,-32048,-32036,1970,-19501,-3353,-12317,-15880,14986,699,-26120,30574,-26611,1030,7444,11329,8252,4241,-27782,-29961,91,-23666,2985,-14543,3270,16823,11659,-10434,-27618,3404,-31905,1401,-25346,-21165,-22620,-874,29807,27439,-14485,6833,27785,-14337,27345,-11258,7860,-4056,27178,6220,-3542,-32097,7961,-6042,7758,-858,-5745,-7227,-8353,16957,19818,-18532,-30379,-15318,-21690,-26452,-10541,228,6662,-8449,28477,-10591,-21360,-336,-3577,4361,6599,-8532,5485,3316,2448,-4353,-270,-1072,3174,-12275,-15062,-13127,-11875,19681,8154,-17598,6858,13584,2442,12419,29316,17831,29918,-13314,9058,-16413,2536,-5023,4138,-25026,28114,5675,-3344,16953,-15502,24196,-5247,15428,4483,-16564,31091,23467,-21099,-5681,19791,-19936,14199,-25554,17442,-26452,-19126,-15626,22690,13370,18658,-16161,-22969,-21404,-13635,20239,-359,29355,-19372,11754,18776,-10559,22890,-31503,21851,30799,-13369,-18511,24664,-31914,10402,-7265,26263,-28484,-22375,28799,-580,-27410,6429,-24023,-17171,19400,-26830,32032,-2466,15551,-8784,23586,21110,-18096,-3334,30750,10924,16946,10323,18214,-8714,1603,27211,22767,-6094,16230,-18964,18716,-12233,-8407,28122,694,18804,17607,23650,-6592,-31691,-17210,-28591,-5457,-5929,-30322,-4692,-32330,-1161,-26184,10967,-14825,11366,-1667,23850,-1784,5319,19909,15955,-11142,25328,12606,31346,15903,6853,-10065,-30034,-18525,-3286,1743,-22855,-10723,3065,22242,-12706,29622,3995,-30937,28554,-26895,17414,1536,-29175,21164,21022,32490,9164,-23209,4743,20639,-27267,29564,12876,24138,-13235,3903,15965,-29809,-5491,-20755,20798,-30692,22823,25254,8070,-12600,8656,-21014,23682,20022,31670,-20487,-12785,-10954,-26162,-30689,2228,25424,27146,-17475,9054,14317,19498,7748,-16708,6419,-22837,28336,-16748,21159,-4489,-23301,-7145,-13338,25688,31366,31260,28558,-24325,30884,-8400,-3923,1964,19794,802,-30517,23486,16656,18967,16639,12952,-30302,-9341,-12076,32432,22743,30037,19236,17805,19320,16054,31043,-30072,-13411,21737,13261,-1743,-6255,7436,-28206,29820,-1610,31176,-3197,-26380,-21940,3086,-28429,-31042,-19967,12150,14598,25100,-14655,31119,-28673,18332,7453,-30666,-19088,22451,-16978,8318,22143,-2162,-29309,-27997,-21025,17898,-31357,-21596,-32068,31251,-13866,16583,24224,-22040,3148,27732,-26001,-19257,8811,4015,-30375,30809,-3878,-21955,20487,-13379,28170,20190,-25325,14650,-20160,-31726,16560,-10614,-15196,29897,-6146,-1558,2437,24264,29372,27398,8448,23127,796,-7968,-12100,-25742,-668,23128,22029,31857,6659,10368,1194,21325,-11632,-24058,1006,-13337,-18048,30128,85,8566,-7026,32079,-9167,29476,-30733,12659,-18408,-26305,-1711,-26101,59,-20897,13655,-30562,-14806,28893,-3593,-27130,-17562,-17380,11467,-2226,13258,-28448,9756,25665,167,21082,-28284,-11446,29525,-20940,-21968,27850,-9510,-21987,15920,-18465,-22053,3542,32088,27820,-4291,-32287,12724,11611,32463,-28285,4607,73,-5002,14032,5756,-5473,1648,-30087,6966,-19377,13482,-5128,-24913,-21413,-6880,-29115,-16160,-454,-28109,22208,-13242,-4204,-26655,13685,-32143,-5462,-25573,11535,-21988,10966,20417,12189,-30552,-32681,-15507,-19577,5600,9741,-26902,-31806,8961,-18668,31969,403,16013,186,14826,6594,-9538,12717,8047,-6179,664,27262,-7941,-27948,23585,-20321,-10806,-19382,-2960,16811,31137,20031,25495,24817,16205,-26886,21283,-25414,-19970,-19236,-15279,8743,-14191,-26568,-26837,19951,-5446,29432,-28554,19035,1568,32491,22470,-16141,4537,15706,21624,-21155,26827,-13117,-19187,22100,8241,-30731,13362,32564,-20221,2537,-7352,-18314,22830,-27914,-2129,23508,-7995,-26152,-18649,-1277,6583,-20101,-9441,9168,25797,15889,-1394,14502,23420,2918,10151,-21874,-4111,26768,20389,30758,-21027,-15538,-13085,29543,-27405,-17998,18833,-21343,16288,-32608,-10388,-15122,25621,-30915,-23989 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/BUILD b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/BUILD new file mode 100644 index 0000000..a70568c --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/BUILD @@ -0,0 +1,1469 @@ +# Description: +# generated integration test for one specific kernel in a model. +load( + "//tensorflow/lite/micro:build_def.bzl", + "generate_cc_arrays", + "micro_copts", +) + +package( + default_visibility = ["//visibility:public"], + # Disabling layering_check because of http://b/177257332 + features = ["-layering_check"], + licenses = ["notice"], +) + +generate_cc_arrays( + name = "generated_strided_slice0_model_data_cc", + src = "strided_slice0.tflite", + out = "strided_slice0_model_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice0_model_data_hdr", + src = "strided_slice0.tflite", + out = "strided_slice0_model_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice1_model_data_cc", + src = "strided_slice1.tflite", + out = "strided_slice1_model_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice1_model_data_hdr", + src = "strided_slice1.tflite", + out = "strided_slice1_model_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice2_model_data_cc", + src = "strided_slice2.tflite", + out = "strided_slice2_model_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice2_model_data_hdr", + src = "strided_slice2.tflite", + out = "strided_slice2_model_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice3_model_data_cc", + src = "strided_slice3.tflite", + out = "strided_slice3_model_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice3_model_data_hdr", + src = "strided_slice3.tflite", + out = "strided_slice3_model_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice4_model_data_cc", + src = "strided_slice4.tflite", + out = "strided_slice4_model_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice4_model_data_hdr", + src = "strided_slice4.tflite", + out = "strided_slice4_model_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice5_model_data_cc", + src = "strided_slice5.tflite", + out = "strided_slice5_model_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice5_model_data_hdr", + src = "strided_slice5.tflite", + out = "strided_slice5_model_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice6_model_data_cc", + src = "strided_slice6.tflite", + out = "strided_slice6_model_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice6_model_data_hdr", + src = "strided_slice6.tflite", + out = "strided_slice6_model_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice7_model_data_cc", + src = "strided_slice7.tflite", + out = "strided_slice7_model_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice7_model_data_hdr", + src = "strided_slice7.tflite", + out = "strided_slice7_model_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice8_model_data_cc", + src = "strided_slice8.tflite", + out = "strided_slice8_model_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice8_model_data_hdr", + src = "strided_slice8.tflite", + out = "strided_slice8_model_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice9_model_data_cc", + src = "strided_slice9.tflite", + out = "strided_slice9_model_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice9_model_data_hdr", + src = "strided_slice9.tflite", + out = "strided_slice9_model_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice10_model_data_cc", + src = "strided_slice10.tflite", + out = "strided_slice10_model_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice10_model_data_hdr", + src = "strided_slice10.tflite", + out = "strided_slice10_model_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice11_model_data_cc", + src = "strided_slice11.tflite", + out = "strided_slice11_model_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice11_model_data_hdr", + src = "strided_slice11.tflite", + out = "strided_slice11_model_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice12_model_data_cc", + src = "strided_slice12.tflite", + out = "strided_slice12_model_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice12_model_data_hdr", + src = "strided_slice12.tflite", + out = "strided_slice12_model_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice13_model_data_cc", + src = "strided_slice13.tflite", + out = "strided_slice13_model_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice13_model_data_hdr", + src = "strided_slice13.tflite", + out = "strided_slice13_model_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice14_model_data_cc", + src = "strided_slice14.tflite", + out = "strided_slice14_model_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice14_model_data_hdr", + src = "strided_slice14.tflite", + out = "strided_slice14_model_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice15_model_data_cc", + src = "strided_slice15.tflite", + out = "strided_slice15_model_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice15_model_data_hdr", + src = "strided_slice15.tflite", + out = "strided_slice15_model_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice16_model_data_cc", + src = "strided_slice16.tflite", + out = "strided_slice16_model_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice16_model_data_hdr", + src = "strided_slice16.tflite", + out = "strided_slice16_model_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice17_model_data_cc", + src = "strided_slice17.tflite", + out = "strided_slice17_model_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice17_model_data_hdr", + src = "strided_slice17.tflite", + out = "strided_slice17_model_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice18_model_data_cc", + src = "strided_slice18.tflite", + out = "strided_slice18_model_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice18_model_data_hdr", + src = "strided_slice18.tflite", + out = "strided_slice18_model_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice19_model_data_cc", + src = "strided_slice19.tflite", + out = "strided_slice19_model_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice19_model_data_hdr", + src = "strided_slice19.tflite", + out = "strided_slice19_model_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice20_model_data_cc", + src = "strided_slice20.tflite", + out = "strided_slice20_model_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice20_model_data_hdr", + src = "strided_slice20.tflite", + out = "strided_slice20_model_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice21_model_data_cc", + src = "strided_slice21.tflite", + out = "strided_slice21_model_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice21_model_data_hdr", + src = "strided_slice21.tflite", + out = "strided_slice21_model_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice22_model_data_cc", + src = "strided_slice22.tflite", + out = "strided_slice22_model_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice22_model_data_hdr", + src = "strided_slice22.tflite", + out = "strided_slice22_model_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice23_model_data_cc", + src = "strided_slice23.tflite", + out = "strided_slice23_model_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice23_model_data_hdr", + src = "strided_slice23.tflite", + out = "strided_slice23_model_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice24_model_data_cc", + src = "strided_slice24.tflite", + out = "strided_slice24_model_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice24_model_data_hdr", + src = "strided_slice24.tflite", + out = "strided_slice24_model_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice25_model_data_cc", + src = "strided_slice25.tflite", + out = "strided_slice25_model_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice25_model_data_hdr", + src = "strided_slice25.tflite", + out = "strided_slice25_model_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice26_model_data_cc", + src = "strided_slice26.tflite", + out = "strided_slice26_model_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice26_model_data_hdr", + src = "strided_slice26.tflite", + out = "strided_slice26_model_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice27_model_data_cc", + src = "strided_slice27.tflite", + out = "strided_slice27_model_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice27_model_data_hdr", + src = "strided_slice27.tflite", + out = "strided_slice27_model_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice28_model_data_cc", + src = "strided_slice28.tflite", + out = "strided_slice28_model_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice28_model_data_hdr", + src = "strided_slice28.tflite", + out = "strided_slice28_model_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice29_model_data_cc", + src = "strided_slice29.tflite", + out = "strided_slice29_model_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice29_model_data_hdr", + src = "strided_slice29.tflite", + out = "strided_slice29_model_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice30_model_data_cc", + src = "strided_slice30.tflite", + out = "strided_slice30_model_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice30_model_data_hdr", + src = "strided_slice30.tflite", + out = "strided_slice30_model_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice31_model_data_cc", + src = "strided_slice31.tflite", + out = "strided_slice31_model_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice31_model_data_hdr", + src = "strided_slice31.tflite", + out = "strided_slice31_model_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice32_model_data_cc", + src = "strided_slice32.tflite", + out = "strided_slice32_model_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice32_model_data_hdr", + src = "strided_slice32.tflite", + out = "strided_slice32_model_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice33_model_data_cc", + src = "strided_slice33.tflite", + out = "strided_slice33_model_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice33_model_data_hdr", + src = "strided_slice33.tflite", + out = "strided_slice33_model_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice0_input0_int16_test_data_cc", + src = "strided_slice0_input0_int16.csv", + out = "strided_slice0_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice0_input0_int16_test_data_hdr", + src = "strided_slice0_input0_int16.csv", + out = "strided_slice0_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice0_golden_int16_test_data_cc", + src = "strided_slice0_golden_int16.csv", + out = "strided_slice0_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice0_golden_int16_test_data_hdr", + src = "strided_slice0_golden_int16.csv", + out = "strided_slice0_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice1_input0_int16_test_data_cc", + src = "strided_slice1_input0_int16.csv", + out = "strided_slice1_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice1_input0_int16_test_data_hdr", + src = "strided_slice1_input0_int16.csv", + out = "strided_slice1_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice1_golden_int16_test_data_cc", + src = "strided_slice1_golden_int16.csv", + out = "strided_slice1_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice1_golden_int16_test_data_hdr", + src = "strided_slice1_golden_int16.csv", + out = "strided_slice1_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice2_input0_int16_test_data_cc", + src = "strided_slice2_input0_int16.csv", + out = "strided_slice2_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice2_input0_int16_test_data_hdr", + src = "strided_slice2_input0_int16.csv", + out = "strided_slice2_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice2_golden_int16_test_data_cc", + src = "strided_slice2_golden_int16.csv", + out = "strided_slice2_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice2_golden_int16_test_data_hdr", + src = "strided_slice2_golden_int16.csv", + out = "strided_slice2_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice3_input0_int16_test_data_cc", + src = "strided_slice3_input0_int16.csv", + out = "strided_slice3_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice3_input0_int16_test_data_hdr", + src = "strided_slice3_input0_int16.csv", + out = "strided_slice3_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice3_golden_int16_test_data_cc", + src = "strided_slice3_golden_int16.csv", + out = "strided_slice3_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice3_golden_int16_test_data_hdr", + src = "strided_slice3_golden_int16.csv", + out = "strided_slice3_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice4_input0_int16_test_data_cc", + src = "strided_slice4_input0_int16.csv", + out = "strided_slice4_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice4_input0_int16_test_data_hdr", + src = "strided_slice4_input0_int16.csv", + out = "strided_slice4_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice4_golden_int16_test_data_cc", + src = "strided_slice4_golden_int16.csv", + out = "strided_slice4_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice4_golden_int16_test_data_hdr", + src = "strided_slice4_golden_int16.csv", + out = "strided_slice4_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice5_input0_int16_test_data_cc", + src = "strided_slice5_input0_int16.csv", + out = "strided_slice5_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice5_input0_int16_test_data_hdr", + src = "strided_slice5_input0_int16.csv", + out = "strided_slice5_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice5_golden_int16_test_data_cc", + src = "strided_slice5_golden_int16.csv", + out = "strided_slice5_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice5_golden_int16_test_data_hdr", + src = "strided_slice5_golden_int16.csv", + out = "strided_slice5_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice6_input0_int16_test_data_cc", + src = "strided_slice6_input0_int16.csv", + out = "strided_slice6_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice6_input0_int16_test_data_hdr", + src = "strided_slice6_input0_int16.csv", + out = "strided_slice6_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice6_golden_int16_test_data_cc", + src = "strided_slice6_golden_int16.csv", + out = "strided_slice6_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice6_golden_int16_test_data_hdr", + src = "strided_slice6_golden_int16.csv", + out = "strided_slice6_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice7_input0_int16_test_data_cc", + src = "strided_slice7_input0_int16.csv", + out = "strided_slice7_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice7_input0_int16_test_data_hdr", + src = "strided_slice7_input0_int16.csv", + out = "strided_slice7_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice7_golden_int16_test_data_cc", + src = "strided_slice7_golden_int16.csv", + out = "strided_slice7_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice7_golden_int16_test_data_hdr", + src = "strided_slice7_golden_int16.csv", + out = "strided_slice7_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice8_input0_int16_test_data_cc", + src = "strided_slice8_input0_int16.csv", + out = "strided_slice8_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice8_input0_int16_test_data_hdr", + src = "strided_slice8_input0_int16.csv", + out = "strided_slice8_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice8_golden_int16_test_data_cc", + src = "strided_slice8_golden_int16.csv", + out = "strided_slice8_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice8_golden_int16_test_data_hdr", + src = "strided_slice8_golden_int16.csv", + out = "strided_slice8_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice9_input0_int16_test_data_cc", + src = "strided_slice9_input0_int16.csv", + out = "strided_slice9_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice9_input0_int16_test_data_hdr", + src = "strided_slice9_input0_int16.csv", + out = "strided_slice9_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice9_golden_int16_test_data_cc", + src = "strided_slice9_golden_int16.csv", + out = "strided_slice9_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice9_golden_int16_test_data_hdr", + src = "strided_slice9_golden_int16.csv", + out = "strided_slice9_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice10_input0_int16_test_data_cc", + src = "strided_slice10_input0_int16.csv", + out = "strided_slice10_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice10_input0_int16_test_data_hdr", + src = "strided_slice10_input0_int16.csv", + out = "strided_slice10_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice10_golden_int16_test_data_cc", + src = "strided_slice10_golden_int16.csv", + out = "strided_slice10_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice10_golden_int16_test_data_hdr", + src = "strided_slice10_golden_int16.csv", + out = "strided_slice10_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice11_input0_int16_test_data_cc", + src = "strided_slice11_input0_int16.csv", + out = "strided_slice11_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice11_input0_int16_test_data_hdr", + src = "strided_slice11_input0_int16.csv", + out = "strided_slice11_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice11_golden_int16_test_data_cc", + src = "strided_slice11_golden_int16.csv", + out = "strided_slice11_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice11_golden_int16_test_data_hdr", + src = "strided_slice11_golden_int16.csv", + out = "strided_slice11_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice12_input0_int16_test_data_cc", + src = "strided_slice12_input0_int16.csv", + out = "strided_slice12_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice12_input0_int16_test_data_hdr", + src = "strided_slice12_input0_int16.csv", + out = "strided_slice12_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice12_golden_int16_test_data_cc", + src = "strided_slice12_golden_int16.csv", + out = "strided_slice12_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice12_golden_int16_test_data_hdr", + src = "strided_slice12_golden_int16.csv", + out = "strided_slice12_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice13_input0_int16_test_data_cc", + src = "strided_slice13_input0_int16.csv", + out = "strided_slice13_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice13_input0_int16_test_data_hdr", + src = "strided_slice13_input0_int16.csv", + out = "strided_slice13_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice13_golden_int16_test_data_cc", + src = "strided_slice13_golden_int16.csv", + out = "strided_slice13_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice13_golden_int16_test_data_hdr", + src = "strided_slice13_golden_int16.csv", + out = "strided_slice13_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice14_input0_int16_test_data_cc", + src = "strided_slice14_input0_int16.csv", + out = "strided_slice14_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice14_input0_int16_test_data_hdr", + src = "strided_slice14_input0_int16.csv", + out = "strided_slice14_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice14_golden_int16_test_data_cc", + src = "strided_slice14_golden_int16.csv", + out = "strided_slice14_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice14_golden_int16_test_data_hdr", + src = "strided_slice14_golden_int16.csv", + out = "strided_slice14_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice15_input0_int16_test_data_cc", + src = "strided_slice15_input0_int16.csv", + out = "strided_slice15_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice15_input0_int16_test_data_hdr", + src = "strided_slice15_input0_int16.csv", + out = "strided_slice15_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice15_golden_int16_test_data_cc", + src = "strided_slice15_golden_int16.csv", + out = "strided_slice15_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice15_golden_int16_test_data_hdr", + src = "strided_slice15_golden_int16.csv", + out = "strided_slice15_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice16_input0_int16_test_data_cc", + src = "strided_slice16_input0_int16.csv", + out = "strided_slice16_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice16_input0_int16_test_data_hdr", + src = "strided_slice16_input0_int16.csv", + out = "strided_slice16_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice16_golden_int16_test_data_cc", + src = "strided_slice16_golden_int16.csv", + out = "strided_slice16_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice16_golden_int16_test_data_hdr", + src = "strided_slice16_golden_int16.csv", + out = "strided_slice16_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice17_input0_int16_test_data_cc", + src = "strided_slice17_input0_int16.csv", + out = "strided_slice17_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice17_input0_int16_test_data_hdr", + src = "strided_slice17_input0_int16.csv", + out = "strided_slice17_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice17_golden_int16_test_data_cc", + src = "strided_slice17_golden_int16.csv", + out = "strided_slice17_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice17_golden_int16_test_data_hdr", + src = "strided_slice17_golden_int16.csv", + out = "strided_slice17_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice18_input0_int16_test_data_cc", + src = "strided_slice18_input0_int16.csv", + out = "strided_slice18_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice18_input0_int16_test_data_hdr", + src = "strided_slice18_input0_int16.csv", + out = "strided_slice18_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice18_golden_int16_test_data_cc", + src = "strided_slice18_golden_int16.csv", + out = "strided_slice18_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice18_golden_int16_test_data_hdr", + src = "strided_slice18_golden_int16.csv", + out = "strided_slice18_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice19_input0_int16_test_data_cc", + src = "strided_slice19_input0_int16.csv", + out = "strided_slice19_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice19_input0_int16_test_data_hdr", + src = "strided_slice19_input0_int16.csv", + out = "strided_slice19_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice19_golden_int16_test_data_cc", + src = "strided_slice19_golden_int16.csv", + out = "strided_slice19_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice19_golden_int16_test_data_hdr", + src = "strided_slice19_golden_int16.csv", + out = "strided_slice19_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice20_input0_int16_test_data_cc", + src = "strided_slice20_input0_int16.csv", + out = "strided_slice20_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice20_input0_int16_test_data_hdr", + src = "strided_slice20_input0_int16.csv", + out = "strided_slice20_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice20_golden_int16_test_data_cc", + src = "strided_slice20_golden_int16.csv", + out = "strided_slice20_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice20_golden_int16_test_data_hdr", + src = "strided_slice20_golden_int16.csv", + out = "strided_slice20_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice21_input0_int16_test_data_cc", + src = "strided_slice21_input0_int16.csv", + out = "strided_slice21_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice21_input0_int16_test_data_hdr", + src = "strided_slice21_input0_int16.csv", + out = "strided_slice21_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice21_golden_int16_test_data_cc", + src = "strided_slice21_golden_int16.csv", + out = "strided_slice21_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice21_golden_int16_test_data_hdr", + src = "strided_slice21_golden_int16.csv", + out = "strided_slice21_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice22_input0_int16_test_data_cc", + src = "strided_slice22_input0_int16.csv", + out = "strided_slice22_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice22_input0_int16_test_data_hdr", + src = "strided_slice22_input0_int16.csv", + out = "strided_slice22_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice22_golden_int16_test_data_cc", + src = "strided_slice22_golden_int16.csv", + out = "strided_slice22_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice22_golden_int16_test_data_hdr", + src = "strided_slice22_golden_int16.csv", + out = "strided_slice22_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice23_input0_int16_test_data_cc", + src = "strided_slice23_input0_int16.csv", + out = "strided_slice23_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice23_input0_int16_test_data_hdr", + src = "strided_slice23_input0_int16.csv", + out = "strided_slice23_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice23_golden_int16_test_data_cc", + src = "strided_slice23_golden_int16.csv", + out = "strided_slice23_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice23_golden_int16_test_data_hdr", + src = "strided_slice23_golden_int16.csv", + out = "strided_slice23_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice24_input0_int16_test_data_cc", + src = "strided_slice24_input0_int16.csv", + out = "strided_slice24_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice24_input0_int16_test_data_hdr", + src = "strided_slice24_input0_int16.csv", + out = "strided_slice24_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice24_golden_int16_test_data_cc", + src = "strided_slice24_golden_int16.csv", + out = "strided_slice24_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice24_golden_int16_test_data_hdr", + src = "strided_slice24_golden_int16.csv", + out = "strided_slice24_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice25_input0_int16_test_data_cc", + src = "strided_slice25_input0_int16.csv", + out = "strided_slice25_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice25_input0_int16_test_data_hdr", + src = "strided_slice25_input0_int16.csv", + out = "strided_slice25_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice25_golden_int16_test_data_cc", + src = "strided_slice25_golden_int16.csv", + out = "strided_slice25_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice25_golden_int16_test_data_hdr", + src = "strided_slice25_golden_int16.csv", + out = "strided_slice25_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice26_input0_int16_test_data_cc", + src = "strided_slice26_input0_int16.csv", + out = "strided_slice26_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice26_input0_int16_test_data_hdr", + src = "strided_slice26_input0_int16.csv", + out = "strided_slice26_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice26_golden_int16_test_data_cc", + src = "strided_slice26_golden_int16.csv", + out = "strided_slice26_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice26_golden_int16_test_data_hdr", + src = "strided_slice26_golden_int16.csv", + out = "strided_slice26_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice27_input0_int16_test_data_cc", + src = "strided_slice27_input0_int16.csv", + out = "strided_slice27_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice27_input0_int16_test_data_hdr", + src = "strided_slice27_input0_int16.csv", + out = "strided_slice27_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice27_golden_int16_test_data_cc", + src = "strided_slice27_golden_int16.csv", + out = "strided_slice27_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice27_golden_int16_test_data_hdr", + src = "strided_slice27_golden_int16.csv", + out = "strided_slice27_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice28_input0_int16_test_data_cc", + src = "strided_slice28_input0_int16.csv", + out = "strided_slice28_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice28_input0_int16_test_data_hdr", + src = "strided_slice28_input0_int16.csv", + out = "strided_slice28_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice28_golden_int16_test_data_cc", + src = "strided_slice28_golden_int16.csv", + out = "strided_slice28_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice28_golden_int16_test_data_hdr", + src = "strided_slice28_golden_int16.csv", + out = "strided_slice28_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice29_input0_int16_test_data_cc", + src = "strided_slice29_input0_int16.csv", + out = "strided_slice29_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice29_input0_int16_test_data_hdr", + src = "strided_slice29_input0_int16.csv", + out = "strided_slice29_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice29_golden_int16_test_data_cc", + src = "strided_slice29_golden_int16.csv", + out = "strided_slice29_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice29_golden_int16_test_data_hdr", + src = "strided_slice29_golden_int16.csv", + out = "strided_slice29_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice30_input0_int16_test_data_cc", + src = "strided_slice30_input0_int16.csv", + out = "strided_slice30_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice30_input0_int16_test_data_hdr", + src = "strided_slice30_input0_int16.csv", + out = "strided_slice30_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice30_golden_int16_test_data_cc", + src = "strided_slice30_golden_int16.csv", + out = "strided_slice30_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice30_golden_int16_test_data_hdr", + src = "strided_slice30_golden_int16.csv", + out = "strided_slice30_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice31_input0_int16_test_data_cc", + src = "strided_slice31_input0_int16.csv", + out = "strided_slice31_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice31_input0_int16_test_data_hdr", + src = "strided_slice31_input0_int16.csv", + out = "strided_slice31_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice31_golden_int16_test_data_cc", + src = "strided_slice31_golden_int16.csv", + out = "strided_slice31_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice31_golden_int16_test_data_hdr", + src = "strided_slice31_golden_int16.csv", + out = "strided_slice31_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice32_input0_int16_test_data_cc", + src = "strided_slice32_input0_int16.csv", + out = "strided_slice32_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice32_input0_int16_test_data_hdr", + src = "strided_slice32_input0_int16.csv", + out = "strided_slice32_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice32_golden_int16_test_data_cc", + src = "strided_slice32_golden_int16.csv", + out = "strided_slice32_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice32_golden_int16_test_data_hdr", + src = "strided_slice32_golden_int16.csv", + out = "strided_slice32_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice33_input0_int16_test_data_cc", + src = "strided_slice33_input0_int16.csv", + out = "strided_slice33_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice33_input0_int16_test_data_hdr", + src = "strided_slice33_input0_int16.csv", + out = "strided_slice33_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_strided_slice33_golden_int16_test_data_cc", + src = "strided_slice33_golden_int16.csv", + out = "strided_slice33_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_strided_slice33_golden_int16_test_data_hdr", + src = "strided_slice33_golden_int16.csv", + out = "strided_slice33_golden_int16_test_data.h", +) + +cc_library( + name = "models_and_testdata", + srcs = [ + "generated_strided_slice0_golden_int16_test_data_cc", + "generated_strided_slice0_input0_int16_test_data_cc", + "generated_strided_slice0_model_data_cc", + "generated_strided_slice10_golden_int16_test_data_cc", + "generated_strided_slice10_input0_int16_test_data_cc", + "generated_strided_slice10_model_data_cc", + "generated_strided_slice11_golden_int16_test_data_cc", + "generated_strided_slice11_input0_int16_test_data_cc", + "generated_strided_slice11_model_data_cc", + "generated_strided_slice12_golden_int16_test_data_cc", + "generated_strided_slice12_input0_int16_test_data_cc", + "generated_strided_slice12_model_data_cc", + "generated_strided_slice13_golden_int16_test_data_cc", + "generated_strided_slice13_input0_int16_test_data_cc", + "generated_strided_slice13_model_data_cc", + "generated_strided_slice14_golden_int16_test_data_cc", + "generated_strided_slice14_input0_int16_test_data_cc", + "generated_strided_slice14_model_data_cc", + "generated_strided_slice15_golden_int16_test_data_cc", + "generated_strided_slice15_input0_int16_test_data_cc", + "generated_strided_slice15_model_data_cc", + "generated_strided_slice16_golden_int16_test_data_cc", + "generated_strided_slice16_input0_int16_test_data_cc", + "generated_strided_slice16_model_data_cc", + "generated_strided_slice17_golden_int16_test_data_cc", + "generated_strided_slice17_input0_int16_test_data_cc", + "generated_strided_slice17_model_data_cc", + "generated_strided_slice18_golden_int16_test_data_cc", + "generated_strided_slice18_input0_int16_test_data_cc", + "generated_strided_slice18_model_data_cc", + "generated_strided_slice19_golden_int16_test_data_cc", + "generated_strided_slice19_input0_int16_test_data_cc", + "generated_strided_slice19_model_data_cc", + "generated_strided_slice1_golden_int16_test_data_cc", + "generated_strided_slice1_input0_int16_test_data_cc", + "generated_strided_slice1_model_data_cc", + "generated_strided_slice20_golden_int16_test_data_cc", + "generated_strided_slice20_input0_int16_test_data_cc", + "generated_strided_slice20_model_data_cc", + "generated_strided_slice21_golden_int16_test_data_cc", + "generated_strided_slice21_input0_int16_test_data_cc", + "generated_strided_slice21_model_data_cc", + "generated_strided_slice22_golden_int16_test_data_cc", + "generated_strided_slice22_input0_int16_test_data_cc", + "generated_strided_slice22_model_data_cc", + "generated_strided_slice23_golden_int16_test_data_cc", + "generated_strided_slice23_input0_int16_test_data_cc", + "generated_strided_slice23_model_data_cc", + "generated_strided_slice24_golden_int16_test_data_cc", + "generated_strided_slice24_input0_int16_test_data_cc", + "generated_strided_slice24_model_data_cc", + "generated_strided_slice25_golden_int16_test_data_cc", + "generated_strided_slice25_input0_int16_test_data_cc", + "generated_strided_slice25_model_data_cc", + "generated_strided_slice26_golden_int16_test_data_cc", + "generated_strided_slice26_input0_int16_test_data_cc", + "generated_strided_slice26_model_data_cc", + "generated_strided_slice27_golden_int16_test_data_cc", + "generated_strided_slice27_input0_int16_test_data_cc", + "generated_strided_slice27_model_data_cc", + "generated_strided_slice28_golden_int16_test_data_cc", + "generated_strided_slice28_input0_int16_test_data_cc", + "generated_strided_slice28_model_data_cc", + "generated_strided_slice29_golden_int16_test_data_cc", + "generated_strided_slice29_input0_int16_test_data_cc", + "generated_strided_slice29_model_data_cc", + "generated_strided_slice2_golden_int16_test_data_cc", + "generated_strided_slice2_input0_int16_test_data_cc", + "generated_strided_slice2_model_data_cc", + "generated_strided_slice30_golden_int16_test_data_cc", + "generated_strided_slice30_input0_int16_test_data_cc", + "generated_strided_slice30_model_data_cc", + "generated_strided_slice31_golden_int16_test_data_cc", + "generated_strided_slice31_input0_int16_test_data_cc", + "generated_strided_slice31_model_data_cc", + "generated_strided_slice32_golden_int16_test_data_cc", + "generated_strided_slice32_input0_int16_test_data_cc", + "generated_strided_slice32_model_data_cc", + "generated_strided_slice33_golden_int16_test_data_cc", + "generated_strided_slice33_input0_int16_test_data_cc", + "generated_strided_slice33_model_data_cc", + "generated_strided_slice3_golden_int16_test_data_cc", + "generated_strided_slice3_input0_int16_test_data_cc", + "generated_strided_slice3_model_data_cc", + "generated_strided_slice4_golden_int16_test_data_cc", + "generated_strided_slice4_input0_int16_test_data_cc", + "generated_strided_slice4_model_data_cc", + "generated_strided_slice5_golden_int16_test_data_cc", + "generated_strided_slice5_input0_int16_test_data_cc", + "generated_strided_slice5_model_data_cc", + "generated_strided_slice6_golden_int16_test_data_cc", + "generated_strided_slice6_input0_int16_test_data_cc", + "generated_strided_slice6_model_data_cc", + "generated_strided_slice7_golden_int16_test_data_cc", + "generated_strided_slice7_input0_int16_test_data_cc", + "generated_strided_slice7_model_data_cc", + "generated_strided_slice8_golden_int16_test_data_cc", + "generated_strided_slice8_input0_int16_test_data_cc", + "generated_strided_slice8_model_data_cc", + "generated_strided_slice9_golden_int16_test_data_cc", + "generated_strided_slice9_input0_int16_test_data_cc", + "generated_strided_slice9_model_data_cc", + ], + hdrs = [ + "generated_strided_slice0_golden_int16_test_data_hdr", + "generated_strided_slice0_input0_int16_test_data_hdr", + "generated_strided_slice0_model_data_hdr", + "generated_strided_slice10_golden_int16_test_data_hdr", + "generated_strided_slice10_input0_int16_test_data_hdr", + "generated_strided_slice10_model_data_hdr", + "generated_strided_slice11_golden_int16_test_data_hdr", + "generated_strided_slice11_input0_int16_test_data_hdr", + "generated_strided_slice11_model_data_hdr", + "generated_strided_slice12_golden_int16_test_data_hdr", + "generated_strided_slice12_input0_int16_test_data_hdr", + "generated_strided_slice12_model_data_hdr", + "generated_strided_slice13_golden_int16_test_data_hdr", + "generated_strided_slice13_input0_int16_test_data_hdr", + "generated_strided_slice13_model_data_hdr", + "generated_strided_slice14_golden_int16_test_data_hdr", + "generated_strided_slice14_input0_int16_test_data_hdr", + "generated_strided_slice14_model_data_hdr", + "generated_strided_slice15_golden_int16_test_data_hdr", + "generated_strided_slice15_input0_int16_test_data_hdr", + "generated_strided_slice15_model_data_hdr", + "generated_strided_slice16_golden_int16_test_data_hdr", + "generated_strided_slice16_input0_int16_test_data_hdr", + "generated_strided_slice16_model_data_hdr", + "generated_strided_slice17_golden_int16_test_data_hdr", + "generated_strided_slice17_input0_int16_test_data_hdr", + "generated_strided_slice17_model_data_hdr", + "generated_strided_slice18_golden_int16_test_data_hdr", + "generated_strided_slice18_input0_int16_test_data_hdr", + "generated_strided_slice18_model_data_hdr", + "generated_strided_slice19_golden_int16_test_data_hdr", + "generated_strided_slice19_input0_int16_test_data_hdr", + "generated_strided_slice19_model_data_hdr", + "generated_strided_slice1_golden_int16_test_data_hdr", + "generated_strided_slice1_input0_int16_test_data_hdr", + "generated_strided_slice1_model_data_hdr", + "generated_strided_slice20_golden_int16_test_data_hdr", + "generated_strided_slice20_input0_int16_test_data_hdr", + "generated_strided_slice20_model_data_hdr", + "generated_strided_slice21_golden_int16_test_data_hdr", + "generated_strided_slice21_input0_int16_test_data_hdr", + "generated_strided_slice21_model_data_hdr", + "generated_strided_slice22_golden_int16_test_data_hdr", + "generated_strided_slice22_input0_int16_test_data_hdr", + "generated_strided_slice22_model_data_hdr", + "generated_strided_slice23_golden_int16_test_data_hdr", + "generated_strided_slice23_input0_int16_test_data_hdr", + "generated_strided_slice23_model_data_hdr", + "generated_strided_slice24_golden_int16_test_data_hdr", + "generated_strided_slice24_input0_int16_test_data_hdr", + "generated_strided_slice24_model_data_hdr", + "generated_strided_slice25_golden_int16_test_data_hdr", + "generated_strided_slice25_input0_int16_test_data_hdr", + "generated_strided_slice25_model_data_hdr", + "generated_strided_slice26_golden_int16_test_data_hdr", + "generated_strided_slice26_input0_int16_test_data_hdr", + "generated_strided_slice26_model_data_hdr", + "generated_strided_slice27_golden_int16_test_data_hdr", + "generated_strided_slice27_input0_int16_test_data_hdr", + "generated_strided_slice27_model_data_hdr", + "generated_strided_slice28_golden_int16_test_data_hdr", + "generated_strided_slice28_input0_int16_test_data_hdr", + "generated_strided_slice28_model_data_hdr", + "generated_strided_slice29_golden_int16_test_data_hdr", + "generated_strided_slice29_input0_int16_test_data_hdr", + "generated_strided_slice29_model_data_hdr", + "generated_strided_slice2_golden_int16_test_data_hdr", + "generated_strided_slice2_input0_int16_test_data_hdr", + "generated_strided_slice2_model_data_hdr", + "generated_strided_slice30_golden_int16_test_data_hdr", + "generated_strided_slice30_input0_int16_test_data_hdr", + "generated_strided_slice30_model_data_hdr", + "generated_strided_slice31_golden_int16_test_data_hdr", + "generated_strided_slice31_input0_int16_test_data_hdr", + "generated_strided_slice31_model_data_hdr", + "generated_strided_slice32_golden_int16_test_data_hdr", + "generated_strided_slice32_input0_int16_test_data_hdr", + "generated_strided_slice32_model_data_hdr", + "generated_strided_slice33_golden_int16_test_data_hdr", + "generated_strided_slice33_input0_int16_test_data_hdr", + "generated_strided_slice33_model_data_hdr", + "generated_strided_slice3_golden_int16_test_data_hdr", + "generated_strided_slice3_input0_int16_test_data_hdr", + "generated_strided_slice3_model_data_hdr", + "generated_strided_slice4_golden_int16_test_data_hdr", + "generated_strided_slice4_input0_int16_test_data_hdr", + "generated_strided_slice4_model_data_hdr", + "generated_strided_slice5_golden_int16_test_data_hdr", + "generated_strided_slice5_input0_int16_test_data_hdr", + "generated_strided_slice5_model_data_hdr", + "generated_strided_slice6_golden_int16_test_data_hdr", + "generated_strided_slice6_input0_int16_test_data_hdr", + "generated_strided_slice6_model_data_hdr", + "generated_strided_slice7_golden_int16_test_data_hdr", + "generated_strided_slice7_input0_int16_test_data_hdr", + "generated_strided_slice7_model_data_hdr", + "generated_strided_slice8_golden_int16_test_data_hdr", + "generated_strided_slice8_input0_int16_test_data_hdr", + "generated_strided_slice8_model_data_hdr", + "generated_strided_slice9_golden_int16_test_data_hdr", + "generated_strided_slice9_input0_int16_test_data_hdr", + "generated_strided_slice9_model_data_hdr", + ], + copts = micro_copts(), +) + +cc_test( + name = "integration_test", + srcs = [ + "integration_tests.cc", + ], + copts = micro_copts(), + deps = [ + ":models_and_testdata", + "//python/tflite_micro:python_ops_resolver", + "//tensorflow/lite/micro:micro_framework", + "//tensorflow/lite/micro:micro_log", + "//tensorflow/lite/micro:micro_resource_variable", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:recording_allocators", + "//tensorflow/lite/micro/testing:micro_test", + ], +) diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/Makefile.inc b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/Makefile.inc new file mode 100644 index 0000000..c8ddcae --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/Makefile.inc @@ -0,0 +1,113 @@ +integration_tests_seanet_strided_slice_GENERATOR_INPUTS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice0.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice1.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice2.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice3.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice4.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice5.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice6.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice7.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice8.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice9.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice10.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice11.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice12.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice13.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice14.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice15.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice16.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice17.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice18.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice19.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice20.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice21.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice22.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice23.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice24.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice25.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice26.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice27.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice28.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice29.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice30.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice31.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice32.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice33.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice0_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice0_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice1_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice1_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice2_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice2_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice3_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice3_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice4_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice4_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice5_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice5_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice6_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice6_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice7_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice7_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice8_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice8_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice9_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice9_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice10_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice10_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice11_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice11_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice12_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice12_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice13_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice13_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice14_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice14_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice15_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice15_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice16_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice16_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice17_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice17_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice18_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice18_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice19_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice19_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice20_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice20_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice21_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice21_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice22_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice22_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice23_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice23_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice24_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice24_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice25_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice25_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice26_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice26_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice27_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice27_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice28_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice28_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice29_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice29_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice30_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice30_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice31_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice31_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice32_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice32_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice33_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice33_golden_int16.csv \ + +integration_tests_seanet_strided_slice_SRCS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/strided_slice/integration_tests.cc \ +$(TENSORFLOW_ROOT)python/tflite_micro/python_ops_resolver.cc \ + +integration_tests_seanet_strided_slice_HDR := \ +$(TENSORFLOW_ROOT)python/tflite_micro/python_ops_resolver.h \ + +$(eval $(call microlite_test,integration_tests_seanet_strided_slice_test,\ +$(integration_tests_seanet_strided_slice_SRCS),$(integration_tests_seanet_strided_slice_HDR),$(integration_tests_seanet_strided_slice_GENERATOR_INPUTS))) diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/integration_tests.cc b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/integration_tests.cc new file mode 100644 index 0000000..d648ee0 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/integration_tests.cc @@ -0,0 +1,444 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include "python/tflite_micro/python_ops_resolver.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice0_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice0_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice0_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice10_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice10_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice10_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice11_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice11_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice11_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice12_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice12_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice12_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice13_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice13_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice13_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice14_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice14_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice14_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice15_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice15_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice15_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice16_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice16_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice16_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice17_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice17_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice17_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice18_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice18_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice18_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice19_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice19_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice19_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice1_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice1_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice1_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice20_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice20_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice20_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice21_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice21_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice21_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice22_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice22_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice22_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice23_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice23_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice23_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice24_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice24_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice24_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice25_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice25_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice25_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice26_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice26_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice26_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice27_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice27_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice27_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice28_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice28_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice28_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice29_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice29_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice29_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice2_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice2_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice2_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice30_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice30_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice30_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice31_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice31_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice31_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice32_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice32_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice32_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice33_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice33_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice33_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice3_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice3_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice3_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice4_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice4_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice4_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice5_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice5_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice5_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice6_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice6_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice6_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice7_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice7_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice7_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice8_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice8_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice8_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice9_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice9_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice9_model_data.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_profiler.h" +#include "tensorflow/lite/micro/recording_micro_allocator.h" +#include "tensorflow/lite/micro/recording_micro_interpreter.h" +#include "tensorflow/lite/micro/system_setup.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +constexpr size_t kTensorArenaSize = 1024 * 100; +uint8_t tensor_arena[kTensorArenaSize]; + +namespace tflite { +namespace micro { +namespace { + +void RunModel(const uint8_t* model, const int16_t* input0, + const uint32_t input0_size, const int16_t* golden, + const uint32_t golden_size, const char* name) { + InitializeTarget(); + MicroProfiler profiler; + PythonOpsResolver op_resolver; + + MicroInterpreter interpreter(GetModel(model), op_resolver, tensor_arena, + kTensorArenaSize, nullptr, &profiler); + interpreter.AllocateTensors(); + TfLiteTensor* input_tensor0 = interpreter.input(0); + TF_LITE_MICRO_EXPECT_EQ(input_tensor0->bytes, input0_size * sizeof(int16_t)); + memcpy(interpreter.input(0)->data.raw, input0, input_tensor0->bytes); + if (kTfLiteOk != interpreter.Invoke()) { + TF_LITE_MICRO_EXPECT(false); + return; + } + profiler.Log(); + MicroPrintf(""); + + TfLiteTensor* output_tensor = interpreter.output(0); + TF_LITE_MICRO_EXPECT_EQ(output_tensor->bytes, golden_size * sizeof(int16_t)); + int16_t* output = ::tflite::GetTensorData(output_tensor); + for (uint32_t i = 0; i < golden_size; i++) { + // TODO(b/205046520): Better understand why TfLite and TFLM can sometimes be + // off by 1. + TF_LITE_MICRO_EXPECT_NEAR(golden[i], output[i], 1); + } +} + +} // namespace +} // namespace micro +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(strided_slice0_test) { + tflite::micro::RunModel( + g_strided_slice0_model_data, g_strided_slice0_input0_int16_test_data, + g_strided_slice0_input0_int16_test_data_size, + g_strided_slice0_golden_int16_test_data, + g_strided_slice0_golden_int16_test_data_size, "strided_slice0 test"); +} + +TF_LITE_MICRO_TEST(strided_slice1_test) { + tflite::micro::RunModel( + g_strided_slice1_model_data, g_strided_slice1_input0_int16_test_data, + g_strided_slice1_input0_int16_test_data_size, + g_strided_slice1_golden_int16_test_data, + g_strided_slice1_golden_int16_test_data_size, "strided_slice1 test"); +} + +TF_LITE_MICRO_TEST(strided_slice2_test) { + tflite::micro::RunModel( + g_strided_slice2_model_data, g_strided_slice2_input0_int16_test_data, + g_strided_slice2_input0_int16_test_data_size, + g_strided_slice2_golden_int16_test_data, + g_strided_slice2_golden_int16_test_data_size, "strided_slice2 test"); +} + +TF_LITE_MICRO_TEST(strided_slice3_test) { + tflite::micro::RunModel( + g_strided_slice3_model_data, g_strided_slice3_input0_int16_test_data, + g_strided_slice3_input0_int16_test_data_size, + g_strided_slice3_golden_int16_test_data, + g_strided_slice3_golden_int16_test_data_size, "strided_slice3 test"); +} + +TF_LITE_MICRO_TEST(strided_slice4_test) { + tflite::micro::RunModel( + g_strided_slice4_model_data, g_strided_slice4_input0_int16_test_data, + g_strided_slice4_input0_int16_test_data_size, + g_strided_slice4_golden_int16_test_data, + g_strided_slice4_golden_int16_test_data_size, "strided_slice4 test"); +} + +TF_LITE_MICRO_TEST(strided_slice5_test) { + tflite::micro::RunModel( + g_strided_slice5_model_data, g_strided_slice5_input0_int16_test_data, + g_strided_slice5_input0_int16_test_data_size, + g_strided_slice5_golden_int16_test_data, + g_strided_slice5_golden_int16_test_data_size, "strided_slice5 test"); +} + +TF_LITE_MICRO_TEST(strided_slice6_test) { + tflite::micro::RunModel( + g_strided_slice6_model_data, g_strided_slice6_input0_int16_test_data, + g_strided_slice6_input0_int16_test_data_size, + g_strided_slice6_golden_int16_test_data, + g_strided_slice6_golden_int16_test_data_size, "strided_slice6 test"); +} + +TF_LITE_MICRO_TEST(strided_slice7_test) { + tflite::micro::RunModel( + g_strided_slice7_model_data, g_strided_slice7_input0_int16_test_data, + g_strided_slice7_input0_int16_test_data_size, + g_strided_slice7_golden_int16_test_data, + g_strided_slice7_golden_int16_test_data_size, "strided_slice7 test"); +} + +TF_LITE_MICRO_TEST(strided_slice8_test) { + tflite::micro::RunModel( + g_strided_slice8_model_data, g_strided_slice8_input0_int16_test_data, + g_strided_slice8_input0_int16_test_data_size, + g_strided_slice8_golden_int16_test_data, + g_strided_slice8_golden_int16_test_data_size, "strided_slice8 test"); +} + +TF_LITE_MICRO_TEST(strided_slice9_test) { + tflite::micro::RunModel( + g_strided_slice9_model_data, g_strided_slice9_input0_int16_test_data, + g_strided_slice9_input0_int16_test_data_size, + g_strided_slice9_golden_int16_test_data, + g_strided_slice9_golden_int16_test_data_size, "strided_slice9 test"); +} + +TF_LITE_MICRO_TEST(strided_slice10_test) { + tflite::micro::RunModel( + g_strided_slice10_model_data, g_strided_slice10_input0_int16_test_data, + g_strided_slice10_input0_int16_test_data_size, + g_strided_slice10_golden_int16_test_data, + g_strided_slice10_golden_int16_test_data_size, "strided_slice10 test"); +} + +TF_LITE_MICRO_TEST(strided_slice11_test) { + tflite::micro::RunModel( + g_strided_slice11_model_data, g_strided_slice11_input0_int16_test_data, + g_strided_slice11_input0_int16_test_data_size, + g_strided_slice11_golden_int16_test_data, + g_strided_slice11_golden_int16_test_data_size, "strided_slice11 test"); +} + +TF_LITE_MICRO_TEST(strided_slice12_test) { + tflite::micro::RunModel( + g_strided_slice12_model_data, g_strided_slice12_input0_int16_test_data, + g_strided_slice12_input0_int16_test_data_size, + g_strided_slice12_golden_int16_test_data, + g_strided_slice12_golden_int16_test_data_size, "strided_slice12 test"); +} + +TF_LITE_MICRO_TEST(strided_slice13_test) { + tflite::micro::RunModel( + g_strided_slice13_model_data, g_strided_slice13_input0_int16_test_data, + g_strided_slice13_input0_int16_test_data_size, + g_strided_slice13_golden_int16_test_data, + g_strided_slice13_golden_int16_test_data_size, "strided_slice13 test"); +} + +TF_LITE_MICRO_TEST(strided_slice14_test) { + tflite::micro::RunModel( + g_strided_slice14_model_data, g_strided_slice14_input0_int16_test_data, + g_strided_slice14_input0_int16_test_data_size, + g_strided_slice14_golden_int16_test_data, + g_strided_slice14_golden_int16_test_data_size, "strided_slice14 test"); +} + +TF_LITE_MICRO_TEST(strided_slice15_test) { + tflite::micro::RunModel( + g_strided_slice15_model_data, g_strided_slice15_input0_int16_test_data, + g_strided_slice15_input0_int16_test_data_size, + g_strided_slice15_golden_int16_test_data, + g_strided_slice15_golden_int16_test_data_size, "strided_slice15 test"); +} + +TF_LITE_MICRO_TEST(strided_slice16_test) { + tflite::micro::RunModel( + g_strided_slice16_model_data, g_strided_slice16_input0_int16_test_data, + g_strided_slice16_input0_int16_test_data_size, + g_strided_slice16_golden_int16_test_data, + g_strided_slice16_golden_int16_test_data_size, "strided_slice16 test"); +} + +TF_LITE_MICRO_TEST(strided_slice17_test) { + tflite::micro::RunModel( + g_strided_slice17_model_data, g_strided_slice17_input0_int16_test_data, + g_strided_slice17_input0_int16_test_data_size, + g_strided_slice17_golden_int16_test_data, + g_strided_slice17_golden_int16_test_data_size, "strided_slice17 test"); +} + +TF_LITE_MICRO_TEST(strided_slice18_test) { + tflite::micro::RunModel( + g_strided_slice18_model_data, g_strided_slice18_input0_int16_test_data, + g_strided_slice18_input0_int16_test_data_size, + g_strided_slice18_golden_int16_test_data, + g_strided_slice18_golden_int16_test_data_size, "strided_slice18 test"); +} + +TF_LITE_MICRO_TEST(strided_slice19_test) { + tflite::micro::RunModel( + g_strided_slice19_model_data, g_strided_slice19_input0_int16_test_data, + g_strided_slice19_input0_int16_test_data_size, + g_strided_slice19_golden_int16_test_data, + g_strided_slice19_golden_int16_test_data_size, "strided_slice19 test"); +} + +TF_LITE_MICRO_TEST(strided_slice20_test) { + tflite::micro::RunModel( + g_strided_slice20_model_data, g_strided_slice20_input0_int16_test_data, + g_strided_slice20_input0_int16_test_data_size, + g_strided_slice20_golden_int16_test_data, + g_strided_slice20_golden_int16_test_data_size, "strided_slice20 test"); +} + +TF_LITE_MICRO_TEST(strided_slice21_test) { + tflite::micro::RunModel( + g_strided_slice21_model_data, g_strided_slice21_input0_int16_test_data, + g_strided_slice21_input0_int16_test_data_size, + g_strided_slice21_golden_int16_test_data, + g_strided_slice21_golden_int16_test_data_size, "strided_slice21 test"); +} + +TF_LITE_MICRO_TEST(strided_slice22_test) { + tflite::micro::RunModel( + g_strided_slice22_model_data, g_strided_slice22_input0_int16_test_data, + g_strided_slice22_input0_int16_test_data_size, + g_strided_slice22_golden_int16_test_data, + g_strided_slice22_golden_int16_test_data_size, "strided_slice22 test"); +} + +TF_LITE_MICRO_TEST(strided_slice23_test) { + tflite::micro::RunModel( + g_strided_slice23_model_data, g_strided_slice23_input0_int16_test_data, + g_strided_slice23_input0_int16_test_data_size, + g_strided_slice23_golden_int16_test_data, + g_strided_slice23_golden_int16_test_data_size, "strided_slice23 test"); +} + +TF_LITE_MICRO_TEST(strided_slice24_test) { + tflite::micro::RunModel( + g_strided_slice24_model_data, g_strided_slice24_input0_int16_test_data, + g_strided_slice24_input0_int16_test_data_size, + g_strided_slice24_golden_int16_test_data, + g_strided_slice24_golden_int16_test_data_size, "strided_slice24 test"); +} + +TF_LITE_MICRO_TEST(strided_slice25_test) { + tflite::micro::RunModel( + g_strided_slice25_model_data, g_strided_slice25_input0_int16_test_data, + g_strided_slice25_input0_int16_test_data_size, + g_strided_slice25_golden_int16_test_data, + g_strided_slice25_golden_int16_test_data_size, "strided_slice25 test"); +} + +TF_LITE_MICRO_TEST(strided_slice26_test) { + tflite::micro::RunModel( + g_strided_slice26_model_data, g_strided_slice26_input0_int16_test_data, + g_strided_slice26_input0_int16_test_data_size, + g_strided_slice26_golden_int16_test_data, + g_strided_slice26_golden_int16_test_data_size, "strided_slice26 test"); +} + +TF_LITE_MICRO_TEST(strided_slice27_test) { + tflite::micro::RunModel( + g_strided_slice27_model_data, g_strided_slice27_input0_int16_test_data, + g_strided_slice27_input0_int16_test_data_size, + g_strided_slice27_golden_int16_test_data, + g_strided_slice27_golden_int16_test_data_size, "strided_slice27 test"); +} + +TF_LITE_MICRO_TEST(strided_slice28_test) { + tflite::micro::RunModel( + g_strided_slice28_model_data, g_strided_slice28_input0_int16_test_data, + g_strided_slice28_input0_int16_test_data_size, + g_strided_slice28_golden_int16_test_data, + g_strided_slice28_golden_int16_test_data_size, "strided_slice28 test"); +} + +TF_LITE_MICRO_TEST(strided_slice29_test) { + tflite::micro::RunModel( + g_strided_slice29_model_data, g_strided_slice29_input0_int16_test_data, + g_strided_slice29_input0_int16_test_data_size, + g_strided_slice29_golden_int16_test_data, + g_strided_slice29_golden_int16_test_data_size, "strided_slice29 test"); +} + +TF_LITE_MICRO_TEST(strided_slice30_test) { + tflite::micro::RunModel( + g_strided_slice30_model_data, g_strided_slice30_input0_int16_test_data, + g_strided_slice30_input0_int16_test_data_size, + g_strided_slice30_golden_int16_test_data, + g_strided_slice30_golden_int16_test_data_size, "strided_slice30 test"); +} + +TF_LITE_MICRO_TEST(strided_slice31_test) { + tflite::micro::RunModel( + g_strided_slice31_model_data, g_strided_slice31_input0_int16_test_data, + g_strided_slice31_input0_int16_test_data_size, + g_strided_slice31_golden_int16_test_data, + g_strided_slice31_golden_int16_test_data_size, "strided_slice31 test"); +} + +TF_LITE_MICRO_TEST(strided_slice32_test) { + tflite::micro::RunModel( + g_strided_slice32_model_data, g_strided_slice32_input0_int16_test_data, + g_strided_slice32_input0_int16_test_data_size, + g_strided_slice32_golden_int16_test_data, + g_strided_slice32_golden_int16_test_data_size, "strided_slice32 test"); +} + +TF_LITE_MICRO_TEST(strided_slice33_test) { + tflite::micro::RunModel( + g_strided_slice33_model_data, g_strided_slice33_input0_int16_test_data, + g_strided_slice33_input0_int16_test_data_size, + g_strided_slice33_golden_int16_test_data, + g_strided_slice33_golden_int16_test_data_size, "strided_slice33 test"); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice0.tflite b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice0.tflite new file mode 100644 index 0000000..2abcc1f Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice0.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice0_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice0_golden_int16.csv new file mode 100644 index 0000000..c480d56 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice0_golden_int16.csv @@ -0,0 +1 @@ +-5197,16022,-11131,24553,12482,-7814,18879,-10292,15379,-30377,19290,-15189,-13893,25085,-8937,18420,-26785,15575,20198,-5215,10005,-17642,-28893,12744,-32113,-9632,11279,13763,26631,-12215,-30950,10434,-27479,-29918,1205,16882,-26973,31478,6342,-10674,-15506,-23497,11396,-1834,22152,14714,22763,28903,4284,-1190,-26802,-5872,20944,-17818,-14805,-19618,458,11807,-12003,-29049,-8479,5784,-330,5568,29803,-12989,18828,-9433,-26464,23843,-15445,29738,-11778,-16750,-23782,11861,-30344,-7107,-20484,10303,26109,32141,-20766,6631,4871,13091,-3887,-13512,18302,4496,-7484,-13476,-31270,27605,-18488,31248,27170,-28233,-17024,-17754,-5378,-25285,5366,8520,22299,7786,-8203,9686,21358,-18288,-23280,-5496,11963,-1507,9400,10734,-13035,3005,8596,-13609,24432,3371,7016,-10486,22544,-21279,29761,-5469,-27370,-18704,-3559,20614,16173,13651,-23111,12620,-17728,32406,2362,-19890,10717,-9592,26740,-11364,-6042,-8061,26188,-28386,-14118,28042,27870,10933,23798,24520,23625,-26460,1056,19119,-20415,-28399,-15161,-16173,-23556,26661,16830,-1099,21539,18514,-8758,25033,-15475,19987,16970,29942,16243,-22449,-7391,6796,-27029,-25943,4685,-7354,-26343,30310,-17608,8786,-14581,13507,-23943,27272,8384,9813,4251,-12782,27749,-27465,-23124,22373,22803,-30586,-12759,-7029,27599,-27336,-23811,16494,-1654,30603,-17785,-8816,12413,5266,-22495,27301,-1946,3813,-26100,-10049,-26168,-14507,-6632,-28546,15871,2169,-18445,-27518,-29072,-32064,-29862,13749,19862,23831,18721,5567,13684,25761,32543,13806,-7511,-31069,6302,-21749,31483,-4230,-17103,-619,208,17871,-5394,25934,-25553,-18743,-13731,-20917,15958,-11334,-1326,22878,-7343,4834,-25134,-14966,-4648,9208,30978,-1970,-23271,-22834,-13175,-23332,-551,19940,2249,22773,20610,-30272,-4006,-443,-9028,30386,29995,13094,3617,-20446,19640,7134,-5649,-6404,-14959,-10230,6865,10235,-27410,15505,-26169,10157,32155,11849,-3433,-24539,2104,-14961,31505,27439,25502,30788,-20659,-7906,5436,-13810,-16777,-5680,30139,12643,12621,588,-23450,-32378,1056,25089,25486,20686,23654,17825,27836,22568,-25794,13240,-10534,20390,31285,20692,19773,-27841,-3004,21344,-7466,1673,-13685,4637,32246,-18074,-21877,5740,14468,-14934,27105,26359,10483,-25100,-27940,17355,23789,-15092,-21857,29475,-26058,9087,30507,-15929,-9931,17388,-24985,24465,28255,20080,3467,6312,-19271,26681,-4375,25567,-343,217,-1702,2046,6874,-12623,26254,-17275,-14192,15370,-27649,-31652,-31920,26124,-20436,28716,-21074,18856,1906,-27183,-1431,-14994,-6353,-16573,-31508,25910,30282,18403,-16049,26940,21921,21660,-17144,-4947,-29650,6411,-268,-5069,-13452,-5224,-20559,-27694,8282,31228,-4598,-31,-28385,-24946,14897,31096,-27386,-11925,26407,-19594,28917,29943,21211,12165,-4320,-16542,-165,20002,11312,26044,-9793,-7105,6819,15470,10416,28289,-18714,11576,17197,7951,-6349,6990,1656,-14460,28357,-26244,26909,20056,-13105,30948,-30317,20207,24770,5994,25951,3939,26305,-4116,-25790,17615,24495,-23128,20863,-25657,6908,-32227,15799,-31176,-7661,-30447,5289,6431,16492,-18328,-23194,13856,-30485,10216,-4198,-18312,16179,-21372,4023,-823,16164,-9452,11140,18620,10240,-12491,-25243,10914,9165,-14702,21591,-18250,-13045,-7943,12950,1757,-10797,31679,-11651,-7886,-10227,21010,-8358,-17345,397,-3299,20551,18492,4276,22717,12800,19512,23177,-20890,-15536,-22627,-24282,13477,24666,13957,-6767,20978,-758,26684,-18281,-3863,-32471,-14662,-30702,-24640,-975,108,-27635,-4576,-11893,20973,-28113,4920,18311,-6996,-13794,10982,23347,-6270,-16060,17940,-26001,-9643,6045,9920,-13598,19006,25263,25997,3985,7063,-5916,-25504,-12333,8880,23026,20790,-8683,-14204,3036,-7517,-6106,15724,-15625,228,-13847,29161,22659,-30503,-10851,-9516,22465,-5154,-15641,-21958,-20416,-10974,-31349,5164,-9749,23702,12831,23949,2692,-15102,-18737,7613,12985,22572,-4656,-17937,2473,-21892,-17652,4448,-21552,-8382,-13351,-17712,-14262,5073,-31851,-10922,-1333,12722,11839,-9689,4059,468,16302,-12410,-10916,6771,5059,-7028,-27481,-6021,11740,1264,14024,25563,-8341,-28793,29459,20976,24173,-30640,-27633,-10593,-31102,-28149,18937,-15981,-3298,-19502,26992,22008,16951,30402,6701,-31685,-6039,5873,-11000,19839,-13559,-10920,17974,-20656,-2310,-4655,-14949,9449,-5736,-6114,20144,-2672,6140,-7988,-16719,-22671,-20755,-11646,3474,10942,-9341,8125,-5355,10095,-19361,-30444,5174,7263,15357,2905,-17808,-10468,-10303,-16573,24580,-28700,27684,5491,-27518,-3377,-9933,-11742,-11397,-17567,25941,12675,-12203,-29251,-20159,-15126,22996,3972,-20318,-23055,-9638,30616,-31294,-31654,15670,-12009,8812,20288,17832,-23767,25085,1931,259,-25276,8368,16285,-18865,19708,212,32370,-16491,-3441,-576,-4869,-14174,4042,-9797,-9536,-4952,2092,-903,22337,14598,2262,18882,-3820,13154,10342,18939,18422,17318,22900,7129,32165,-8762,-27972,29465,-25350,26639,-20341,28517,13827,-19218,16913,-13064,-10994,25900,10915,11056,-17572,28603,-1398,13577,29839,18603,-825,-8971,22056,-15814,-14298,-21836,-20148,5383,-27106,-16364,-11580,21207,14974,-12164,-19349,-2997,27581,16392,-5980,-26920,10709,-15426,-6193,-941,-7026,23290,-16160,710,7094,-29339,-17671,-4868,32406,-8707,-22523,16015,26272,2368,-23631,25222,4847,11283,6937,2669,-26728,-29056,18372,18163,-3882,-10505,30525,-10798,6483,31779,-24506,10449,30869,26411,30291,760,-4044,-18592,23962,26489,-17681,-28174,8671,-24165,-1501,-29597,5662,-26433,-27371,22444,7773,-31854,1777,-9371,18422,11213,25517,-3288,8990,-26621,-7469,10462,7544,3683,-11371,-2078,22102,20909,-5897,-5549,-11842,-16310,-27900,-20174,-15821,-14811,14789,26827,22347,18304,2902,-19421,-2174,18902,-22644,-15588,-11824,-20315,10338,12849,-25627,4670,-20627,21741,-8948,-12254,-10584,-9340,18372,29981,-18378,19498,28000,-11778,-5540,21337,6495,8636,-23016,7893,-5943,-21345,25534,3570,-26196,-32724,781,31617,359,23913,-18139,-6251,4602,14777,26988,-7506,7700,-13723,-10010,16544,22486,15387,11572,-1607,-17186,-31163,14438,14163,-31693,4723,-23349,-8384,11062,10147,-24515,-4652,-21720,15872,18396,-11633,-12419,7519,-22382,8809,24021,6851,-11493,25230,12598,-4539,-28152,13400,29119,-15322,2615,16619,30956,23314,10826,-31009,-16896,24725,-27877,13792,-7048,-2856,-28483,-7787,-19926,5467,11844,3381,1140,-14351,-12249,-20486,5226,8319,11271,26829,-8018,-6852,28871,-13761,-32248,-8598,32305,7072,11650,-24925,7003,13935,-13374,25606,2235,-28861,-1310,-8610,-23034,-32412,-17022,-12659,2229,-6333,18444,-11026,-9720,24948,30568,-32687,28702,-20251,-25666,-22806,13627,-3547,1517,25386,15212,22666,23218,8041,-108,-18867,-5361,-10682,19904,-17454,-28734,9592,-27711,10871,-23628,-21087,3383,18878,30267,-21331,29640,10056,6120,-25959,-32495,-16203,-8998,-4284,-11655,2286,15702,-12587,495,6309,5094,25901,-25530,15844,-30452,-5721,14600,-18143,-5211,-24454,12026,-20986,-31804,26380,-16813,-16467,-8930,-983,-10197,14030,14266,-30940,22678,14078,13142,6352,-6865,25087,-6255,6146,23191,-21467,17087,-18612,-31167,-15714,-3701,-5471,-2897,553,6833,20386,-1991,29253,-23557,-544,-16308,10350,32481,-28996,-31742,27560,2145,-17249,-10213,18498,-5019,-23336,-12827,25734,-21448,4099,25822,19963,5239,-3922,17746,397,2837,20563,-23714,504,-25717,12470,16347,-13946,252,20416,6767,5594,1924,3516,-7824,9875,-21748,6335,26684,-4951,6703,16528,4776,27630,12909,8559,-23151,-746,14364,-10274,31572,-18696,-30220,-23605,-3694,-10956,-122,-18506,-11827,15404,-32612,30674,29829,-5821,15745,3336,-28714,-12573,23424,14210,-29504,-4416,-32367,7247,25796,23269,2871,6248,-27888,21256,12265,-745,16392,-25005,15567,5990,-19973,26577,-27927,9088,4907,6156,5632,21283,20360,6027,12780,11741,-18744,-22134,-24819,-26532,22442,-7175,-31386,-25165,20748,8714,17832,-12153,3132,-19243,4354,28614,30162,-10756,-29558,10625,-19753,22231,-29888,-30758,6814,-4902,29782,-22727,8602,-5182,-13082,-4787,-28753,-4396,-28887,-2606,-17155,14874,25348,-14942,28177,-12215,-12536,14735,13703,-14209,-23046,-19857,17727,-5125,-11855,14906,30032,11987,-10185,-6190,-15592,20571,19024,6610,24276,-10692,13819,5237,-15731,24274,-17101,29790,13402,-281,27952,-299,-28279,6450,-20388,-11290,26775,-21794,29165,-14352,-30794,-8196,-24701,-12154,-17195,-350,-28494,22620,11729,29323,-32068,23594,-14686,-10378,-4500,5196,8274,2175,3565,22251,3259,-24095,12437,6870,15157,22676,27115,-13958,8312,-24716,-32300,-31195,4994,-27544,-30972,3580,27711,25088,6169,-12497,22526,13872,-117,16683,9471,24817,-6732,-30028,-32348,-7460,2072,12981,3304,14061,-23754,-3673,-290,-26513,3653,-11544,28734,-14030,-8213,9923,8041,1352,-19452,-27522,22603,-32302,-2967,-32632,31727,-30886,-1341,316,2109,-4707,-10015,19288,20595,-18428,2649,-31880,9458,31810,-32262,13230,-27417,-19625,-6617,30340,-2991,5492,-9258,20883,-25883,-23144,-3013,22716,-25018,2549,5270,-16453,-23099,13419,5738,-1033,27617,-17051,19138,6196,4799,32264,-46,-32641,2943,5510,-26568,-18321,22633,32555,16343,-29022,-1290,-27699,20510,-5400,22452,-19303,-26833,-7475,-21419,-6545,-16578,12786,-18773,32375,8592,30736,20704,-19902,-18299,14622,-6249,17434,-1586,-3415,15903,-22973,-28327,1961,24781,-14553,24613,22663,22964,18867,19422,-3779,-21042,24011,-26524,-26601,20159,21342,-32199,19741,-10689,-27990,12982,15281,-28049,-30511,11300,-32422,-396,29682,-23669,-8893,-26967,2676,10437,12450,-24968,-22399,8412,-25625,-1011,-28363,-3109,32170,-20932,22256,-478,-23764,-15214,12414,9344,7188,21184,-24592,32376,13606,29519,3336,20767,2955,-18945,-14035,31348,20537,-29649,4423,16278,4430,26259,14095,-12765,-15587,-29351,7640,-18821,24243,22877,5981,4008,7080,-15588,13949,-31006,-13993,-14508,-25250,14541,-24314,-9476,5416,3012,8846,-2209,-25390,-19282,-3816,21487,30591,13359,-15963,24448,4167,16705,-5831,24922,-22976,31826,-1058,28320,24789,-17061,-32514,19918,-5286,-26098,-10127,-23991,-7348,6175,-25022,-17446,30737,-11446,-30674,-12162,436,-6050,-15584,7300,-14851,8254,37,-23238,9059,19820,-13712,-14099,-1844,8629,-7125,-12439,3048,7755,-16617,11566,-26431,27088,-11412,-10776,7447,-26514,10254,-30323,2181,32394,7623,-8766,5042,23772,7036,65,17898,28323,22924,1668,21755,-28484,18055,-3539,23509,31389,17109,28167,-13498,11856,-31297,-23056,-28813,-2311,2289,-9233,-23670,25822,-19348,19246,-9205,-8637,-29391,19690,21921,14491,13150,9846,11722,-19565,-18568,18023,11087,-22306,-19007,2544,-27099,-5230,-7145,-103,-19716,1061,-10427,-25094,32635,23440,-3667,14678,25535,1890,22316,16331,-24115,32284,-22244,5357,-10962,-25646,-7787,-1936,-6511,22437,-13810,-320,-23984,-17403,2679,21388,-7867,-21406,21204,4995,-12418,16937,18345,17266,-17043,-26650,-24796,-15442,5111,-32644,-4220,22572,-10201,-456,-8688,-14969,-7224,20893,-3651,-25816,21266,24768,-16500,31839,-9492,5098,-24525,24561,8768,16015,-8964,8951,3122,11015,-22569,-5220,-12290,23538,28276,-4461,-5669,-736,24391,-10475,-17306,-4286,24023,-21863,26553,19863,-5485,12440,19015,26981,10135,7982,7198,-4228,-7536,28098,16235,-3946,-7412,7639,-20086,-31079,-5690,22293,17876,24528,25670,-7495,578,-5972,25447,24804,27580,21774,21398,9131,-18004,-8818,-20332,20841,26044,-13791,25810,15307,-17056,-863,-28485,-3616,-12271,-2259,7823,-31027,31103,32625,-15582,19807,6189,-1955,15491,31364,28755,-23555,18284,-21595,21142,-30213,12414,31975,-8757,6227,-26371,15974,-24983,-8688,-18211,31045,-15766,31997,23809,-9693,-7797,7916,-16279,-12146,32132,16882,-3871,11647,-25388,-31572,19527,-7177,11241,-10037,3398,-21789,24872,17460,19387,12297,-19591,-27711,-16561,-6062,28347,-15264,-29629,30278,18236,-19063,-12828,-19433,3187,-14337,-17084,-20552,13764,-19648,-13679,12391,28062,29005,10782,-19240,25114,-30892,7344,-26649,1374,-24560,3731,19706,5526,-18643,-5214,31605,11070,-22892,2150,18867,-10961,25703,10146,-17109,-21002,-31679,-24061,-29669,16631,28632,-858,29230,11758,27693,-15490,-17037,25975,-24882,16809,-29501,-3155,-27223,-16200,-7751,29315,4977,10321,-13284,11481,18175,-10503,-18877,-20258,-15138,8600,-14651,29325,2626,-6136,9367,-14929,-18349,-27126,-1135,24328,3764,-23312,16912,30384,-10046,27989,29229,-11812,23929,-6117,10316,-1788,-16843,-30403,29449,23044,6357,29792,3161,5771,6100,29087,3735,-28144,22318,-27587,-21478,5714,3482,-7559,-1382,-2011,-31414,13277,7826,31109,18922,-24509,-31097,17756,-15450,15738,2304,-10235,24453,-24951,14449,-7628,-30859,-10054,-28707,5914,-11830,12135,14222,-28346,18669,8899,17595,22907,17700,10283,-2354,85,-28892,3572,10007,-28002,-5051,-18182,30035,19293,-8007,-24162,-2722,22973,18558,-23629,-5548,19128,-11996,13105,17171,1406,26900,-21107,-4683 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice0_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice0_input0_int16.csv new file mode 100644 index 0000000..9830bd2 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice0_input0_int16.csv @@ -0,0 +1 @@ +-1741,-27526,-24803,-26896,-7062,-14384,28147,-17511,28292,4915,18653,11837,7240,-17747,20935,10283,-24718,9622,17965,-21527,12967,22144,25016,-14742,29410,-15846,29857,14186,-24889,-31931,12671,28350,12080,-9560,567,14194,29758,-8865,28770,16329,24473,26782,31943,-26323,-25316,24525,3330,-15291,-29014,3952,10523,16743,12315,-4740,2256,-15013,23847,23683,-18747,-7423,2662,8267,19760,-22419,-6534,18871,18632,4218,-26320,-6314,-4496,-9620,794,19753,-2514,-29100,22675,-31802,26648,-19565,-16310,-31455,29192,19666,-20512,-6521,-13558,24673,13687,-6098,16339,-31342,16382,-3357,29823,-9057,16644,-31901,-26733,26465,21022,-31237,14942,-23176,-22537,-14611,21094,-21466,7843,23059,-691,-4176,32713,30254,19695,6425,2526,29268,1062,-25580,-7674,-24925,-5027,-9602,-16204,-11396,-2470,883,16532,-2095,-13473,-27968,26122,24648,-20018,3809,10963,23571,-4045,-7557,5034,26112,-21510,-19435,28168,29968,27691,-10807,16190,19307,9469,3966,-1137,30420,15471,6917,19325,17728,28284,9061,-20746,4696,-26314,-18784,-31813,28451,17204,3465,1113,-21313,21940,11868,-30898,-2955,25200,1241,26340,-18777,-5195,8178,-11926,-28360,-20311,-111,11805,-4035,-7845,-15803,27052,30377,19217,26744,10313,6219,6893,-13089,-29956,24726,27215,-499,-6820,-16524,27329,28276,28020,13888,1174,-10352,-804,26775,-3190,-3787,3306,-1606,-3320,-20510,311,-21362,30004,-13290,-7662,-18501,-8020,27036,13777,-27869,7026,25516,-4787,-1358,8414,-32768,3279,25201,32292,-26641,22452,-5856,-1853,9957,-28817,21599,-9277,-31169,6524,11632,-5471,-12182,-28392,-11803,-5501,11945,28324,-18454,-15715,-31539,-15463,10071,8125,6648,28854,-1303,-24233,22792,-31980,-21666,-8816,7379,28707,-24545,-29108,-20685,25239,-6301,-29494,-29733,15245,20050,6431,-5585,-8389,29917,12944,-3881,-30947,7789,21029,24579,-10199,14328,-31557,22695,-14861,4841,20751,-15003,23353,-10274,-30873,-11937,6078,8139,-19355,-2994,21963,14875,-2157,3516,-21178,-15498,-13835,27734,-15299,344,17388,-1592,-4568,13786,-23815,-24829,18033,-9844,22861,-18320,1453,8172,24043,10351,-28850,26541,11811,21823,9227,-14882,12449,-2095,13706,27354,3006,-24554,8319,-29554,-19322,-9057,-10536,-14859,-25948,8562,-8559,18389,32353,11422,5757,-14799,24429,-24319,-3651,27184,16951,-17230,10658,-2333,1649,22250,19959,3044,7987,-9702,-10236,-13885,-4502,17507,-30411,-14116,-20765,-24891,-27073,-19496,-19200,-26631,22915,-23055,-19953,2950,21277,7629,-5949,22492,16825,15008,7818,-31217,-14466,23897,-23923,19782,-23907,-9616,5008,2185,-14998,-14302,24026,-28402,-8063,16878,32154,-20919,-17311,-14848,20279,18169,16385,-21641,1142,22552,-29546,-10002,8255,-18447,18717,31472,8688,-14847,-23433,21583,20293,24258,-25597,-21210,4598,28382,-21481,15993,6303,-19429,11674,-32028,-15162,-12860,-30558,-8256,20405,10550,-9852,5853,29598,27492,9615,25181,12118,29778,-23931,16550,-26423,12976,-10628,-32510,3073,-11030,19561,-2336,15806,619,-9551,-18967,9478,-23134,25323,-23814,-29214,14961,6176,-11997,-13418,32154,24604,1532,-13516,-20804,-31047,-208,-131,7957,5952,-23671,-7148,25296,-15877,1675,-17429,26836,-13784,30590,10535,-14822,91,26523,7008,-24876,8057,-2051,-9718,-21332,-1038,-30578,3229,22238,-32429,10693,-26619,-9290,-15637,-1257,-341,-26639,1798,22780,1621,-21998,-11333,4074,31369,-32132,18837,31500,-19984,-23221,-22095,8639,32480,-3699,28190,14694,-30061,19603,-27333,-23982,16233,26739,8339,-13380,-32409,1400,-10821,-32391,9206,-8825,18671,-20478,29135,-23918,20177,-20408,-10677,-18547,-12185,27910,-228,17660,8179,-17332,19325,-8742,28303,-27725,1948,18200,-28291,-17998,-17501,12306,-9749,-20571,22395,16731,4500,-21523,16542,-24213,5121,-10293,-19107,-7947,11805,16821,11750,-1324,26128,25802,-26787,6905,-8610,3332,25586,20929,-23856,31259,-18324,-10947,-14298,30717,-1811,-3235,-7080,-31079,6054,27759,7385,27125,17023,4258,-3916,-19351,25205,30087,16433,23810,16459,-21083,13257,29445,-20832,-11974,-12208,2430,-11386,7693,-10705,-28608,1518,8433,2603,8737,-27237,-15357,-21121,-23272,-5407,13726,20483,23958,-5079,96,-21179,-13896,-18974,26551,-14118,-23654,-6219,-6027,30517,-26487,24534,4796,29060,-5168,-24591,20511,17423,19492,-22871,21594,-5197,16022,-11131,24553,12482,-7814,18879,-10292,15379,-30377,19290,-15189,-13893,25085,-8937,18420,-26785,15575,20198,-5215,10005,-17642,-28893,12744,-32113,-9632,11279,13763,26631,-12215,-30950,10434,-27479,-29918,1205,16882,-26973,31478,6342,-10674,-15506,-23497,11396,-1834,22152,14714,22763,28903,4284,-1190,-26802,-5872,20944,-17818,-14805,-19618,458,11807,-12003,-29049,-8479,5784,-330,5568,29803,-12989,18828,-9433,-26464,23843,-15445,29738,-11778,-16750,-23782,11861,-30344,-7107,-20484,10303,26109,32141,-20766,6631,4871,13091,-3887,-13512,18302,4496,-7484,-13476,-31270,27605,-18488,31248,27170,-28233,-17024,-17754,-5378,-25285,5366,8520,22299,7786,-8203,9686,21358,-18288,-23280,-5496,11963,-1507,9400,10734,-13035,3005,8596,-13609,24432,3371,7016,-10486,22544,-21279,29761,-5469,-27370,-18704,-3559,20614,16173,13651,-23111,12620,-17728,32406,2362,-19890,10717,-9592,26740,-11364,-6042,-8061,26188,-28386,-14118,28042,27870,10933,23798,24520,23625,-26460,1056,19119,-20415,-28399,-15161,-16173,-23556,26661,16830,-1099,21539,18514,-8758,25033,-15475,19987,16970,29942,16243,-22449,-7391,6796,-27029,-25943,4685,-7354,-26343,30310,-17608,8786,-14581,13507,-23943,27272,8384,9813,4251,-12782,27749,-27465,-23124,22373,22803,-30586,-12759,-7029,27599,-27336,-23811,16494,-1654,30603,-17785,-8816,12413,5266,-22495,27301,-1946,3813,-26100,-10049,-26168,-14507,-6632,-28546,15871,2169,-18445,-27518,-29072,-32064,-29862,13749,19862,23831,18721,5567,13684,25761,32543,13806,-7511,-31069,6302,-21749,31483,-4230,-17103,-619,208,17871,-5394,25934,-25553,-18743,-13731,-20917,15958,-11334,-1326,22878,-7343,4834,-25134,-14966,-4648,9208,30978,-1970,-23271,-22834,-13175,-23332,-551,19940,2249,22773,20610,-30272,-4006,-443,-9028,30386,29995,13094,3617,-20446,19640,7134,-5649,-6404,-14959,-10230,6865,10235,-27410,15505,-26169,10157,32155,11849,-3433,-24539,2104,-14961,31505,27439,25502,30788,-20659,-7906,5436,-13810,-16777,-5680,30139,12643,12621,588,-23450,-32378,1056,25089,25486,20686,23654,17825,27836,22568,-25794,13240,-10534,20390,31285,20692,19773,-27841,-3004,21344,-7466,1673,-13685,4637,32246,-18074,-21877,5740,14468,-14934,27105,26359,10483,-25100,-27940,17355,23789,-15092,-21857,29475,-26058,9087,30507,-15929,-9931,17388,-24985,24465,28255,20080,3467,6312,-19271,26681,-4375,25567,-343,217,-1702,2046,6874,-12623,26254,-17275,-14192,15370,-27649,-31652,-31920,26124,-20436,28716,-21074,18856,1906,-27183,-1431,-14994,-6353,-16573,-31508,25910,30282,18403,-16049,26940,21921,21660,-17144,-4947,-29650,6411,-268,-5069,-13452,-5224,-20559,-27694,8282,31228,-4598,-31,-28385,-24946,14897,31096,-27386,-11925,26407,-19594,28917,29943,21211,12165,-4320,-16542,-165,20002,11312,26044,-9793,-7105,6819,15470,10416,28289,-18714,11576,17197,7951,-6349,6990,1656,-14460,28357,-26244,26909,20056,-13105,30948,-30317,20207,24770,5994,25951,3939,26305,-4116,-25790,17615,24495,-23128,20863,-25657,6908,-32227,15799,-31176,-7661,-30447,5289,6431,16492,-18328,-23194,13856,-30485,10216,-4198,-18312,16179,-21372,4023,-823,16164,-9452,11140,18620,10240,-12491,-25243,10914,9165,-14702,21591,-18250,-13045,-7943,12950,1757,-10797,31679,-11651,-7886,-10227,21010,-8358,-17345,397,-3299,20551,18492,4276,22717,12800,19512,23177,-20890,-15536,-22627,-24282,13477,24666,13957,-6767,20978,-758,26684,-18281,-3863,-32471,-14662,-30702,-24640,-975,108,-27635,-4576,-11893,20973,-28113,4920,18311,-6996,-13794,10982,23347,-6270,-16060,17940,-26001,-9643,6045,9920,-13598,19006,25263,25997,3985,7063,-5916,-25504,-12333,8880,23026,20790,-8683,-14204,3036,-7517,-6106,15724,-15625,228,-13847,29161,22659,-30503,-10851,-9516,22465,-5154,-15641,-21958,-20416,-10974,-31349,5164,-9749,23702,12831,23949,2692,-15102,-18737,7613,12985,22572,-4656,-17937,2473,-21892,-17652,4448,-21552,-8382,-13351,-17712,-14262,5073,-31851,-10922,-1333,12722,11839,-9689,4059,468,16302,-12410,-10916,6771,5059,-7028,-27481,-6021,11740,1264,14024,25563,-8341,-28793,29459,20976,24173,-30640,-27633,-10593,-31102,-28149,18937,-15981,-3298,-19502,26992,22008,16951,30402,6701,-31685,-6039,5873,-11000,19839,-13559,-10920,17974,-20656,-2310,-4655,-14949,9449,-5736,-6114,20144,-2672,6140,-7988,-16719,-22671,-20755,-11646,3474,10942,-9341,8125,-5355,10095,-19361,-30444,5174,7263,15357,2905,-17808,-10468,-10303,-16573,24580,-28700,27684,5491,-27518,-3377,-9933,-11742,-11397,-17567,25941,12675,-12203,-29251,-20159,-15126,22996,3972,-20318,-23055,-9638,30616,-31294,-31654,15670,-12009,8812,20288,17832,-23767,25085,1931,259,-25276,8368,16285,-18865,19708,212,32370,-16491,-3441,-576,-4869,-14174,4042,-9797,-9536,-4952,2092,-903,22337,14598,2262,18882,-3820,13154,10342,18939,18422,17318,22900,7129,32165,-8762,-27972,29465,-25350,26639,-20341,28517,13827,-19218,16913,-13064,-10994,25900,10915,11056,-17572,28603,-1398,13577,29839,18603,-825,-8971,22056,-15814,-14298,-21836,-20148,5383,-27106,-16364,-11580,21207,14974,-12164,-19349,-2997,27581,16392,-5980,-26920,10709,-15426,-6193,-941,-7026,23290,-16160,710,7094,-29339,-17671,-4868,32406,-8707,-22523,16015,26272,2368,-23631,25222,4847,11283,6937,2669,-26728,-29056,18372,18163,-3882,-10505,30525,-10798,6483,31779,-24506,10449,30869,26411,30291,760,-4044,-18592,23962,26489,-17681,-28174,8671,-24165,-1501,-29597,5662,-26433,-27371,22444,7773,-31854,1777,-9371,18422,11213,25517,-3288,8990,-26621,-7469,10462,7544,3683,-11371,-2078,22102,20909,-5897,-5549,-11842,-16310,-27900,-20174,-15821,-14811,14789,26827,22347,18304,2902,-19421,-2174,18902,-22644,-15588,-11824,-20315,10338,12849,-25627,4670,-20627,21741,-8948,-12254,-10584,-9340,18372,29981,-18378,19498,28000,-11778,-5540,21337,6495,8636,-23016,7893,-5943,-21345,25534,3570,-26196,-32724,781,31617,359,23913,-18139,-6251,4602,14777,26988,-7506,7700,-13723,-10010,16544,22486,15387,11572,-1607,-17186,-31163,14438,14163,-31693,4723,-23349,-8384,11062,10147,-24515,-4652,-21720,15872,18396,-11633,-12419,7519,-22382,8809,24021,6851,-11493,25230,12598,-4539,-28152,13400,29119,-15322,2615,16619,30956,23314,10826,-31009,-16896,24725,-27877,13792,-7048,-2856,-28483,-7787,-19926,5467,11844,3381,1140,-14351,-12249,-20486,5226,8319,11271,26829,-8018,-6852,28871,-13761,-32248,-8598,32305,7072,11650,-24925,7003,13935,-13374,25606,2235,-28861,-1310,-8610,-23034,-32412,-17022,-12659,2229,-6333,18444,-11026,-9720,24948,30568,-32687,28702,-20251,-25666,-22806,13627,-3547,1517,25386,15212,22666,23218,8041,-108,-18867,-5361,-10682,19904,-17454,-28734,9592,-27711,10871,-23628,-21087,3383,18878,30267,-21331,29640,10056,6120,-25959,-32495,-16203,-8998,-4284,-11655,2286,15702,-12587,495,6309,5094,25901,-25530,15844,-30452,-5721,14600,-18143,-5211,-24454,12026,-20986,-31804,26380,-16813,-16467,-8930,-983,-10197,14030,14266,-30940,22678,14078,13142,6352,-6865,25087,-6255,6146,23191,-21467,17087,-18612,-31167,-15714,-3701,-5471,-2897,553,6833,20386,-1991,29253,-23557,-544,-16308,10350,32481,-28996,-31742,27560,2145,-17249,-10213,18498,-5019,-23336,-12827,25734,-21448,4099,25822,19963,5239,-3922,17746,397,2837,20563,-23714,504,-25717,12470,16347,-13946,252,20416,6767,5594,1924,3516,-7824,9875,-21748,6335,26684,-4951,6703,16528,4776,27630,12909,8559,-23151,-746,14364,-10274,31572,-18696,-30220,-23605,-3694,-10956,-122,-18506,-11827,15404,-32612,30674,29829,-5821,15745,3336,-28714,-12573,23424,14210,-29504,-4416,-32367,7247,25796,23269,2871,6248,-27888,21256,12265,-745,16392,-25005,15567,5990,-19973,26577,-27927,9088,4907,6156,5632,21283,20360,6027,12780,11741,-18744,-22134,-24819,-26532,22442,-7175,-31386,-25165,20748,8714,17832,-12153,3132,-19243,4354,28614,30162,-10756,-29558,10625,-19753,22231,-29888,-30758,6814,-4902,29782,-22727,8602,-5182,-13082,-4787,-28753,-4396,-28887,-2606,-17155,14874,25348,-14942,28177,-12215,-12536,14735,13703,-14209,-23046,-19857,17727,-5125,-11855,14906,30032,11987,-10185,-6190,-15592,20571,19024,6610,24276,-10692,13819,5237,-15731,24274,-17101,29790,13402,-281,27952,-299,-28279,6450,-20388,-11290,26775,-21794,29165,-14352,-30794,-8196,-24701,-12154,-17195,-350,-28494,22620,11729,29323,-32068,23594,-14686,-10378,-4500,5196,8274,2175,3565,22251,3259,-24095,12437,6870,15157,22676,27115,-13958,8312,-24716,-32300,-31195,4994,-27544,-30972,3580,27711,25088,6169,-12497,22526,13872,-117,16683,9471,24817,-6732,-30028,-32348,-7460,2072,12981,3304,14061,-23754,-3673,-290,-26513,3653,-11544,28734,-14030,-8213,9923,8041,1352,-19452,-27522,22603,-32302,-2967,-32632,31727,-30886,-1341,316,2109,-4707,-10015,19288,20595,-18428,2649,-31880,9458,31810,-32262,13230,-27417,-19625,-6617,30340,-2991,5492,-9258,20883,-25883,-23144,-3013,22716,-25018,2549,5270,-16453,-23099,13419,5738,-1033,27617,-17051,19138,6196,4799,32264,-46,-32641,2943,5510,-26568,-18321,22633,32555,16343,-29022,-1290,-27699,20510,-5400,22452,-19303,-26833,-7475,-21419,-6545,-16578,12786,-18773,32375,8592,30736,20704,-19902,-18299,14622,-6249,17434,-1586,-3415,15903,-22973,-28327,1961,24781,-14553,24613,22663,22964,18867,19422,-3779,-21042,24011,-26524,-26601,20159,21342,-32199,19741,-10689,-27990,12982,15281,-28049,-30511,11300,-32422,-396,29682,-23669,-8893,-26967,2676,10437,12450,-24968,-22399,8412,-25625,-1011,-28363,-3109,32170,-20932,22256,-478,-23764,-15214,12414,9344,7188,21184,-24592,32376,13606,29519,3336,20767,2955,-18945,-14035,31348,20537,-29649,4423,16278,4430,26259,14095,-12765,-15587,-29351,7640,-18821,24243,22877,5981,4008,7080,-15588,13949,-31006,-13993,-14508,-25250,14541,-24314,-9476,5416,3012,8846,-2209,-25390,-19282,-3816,21487,30591,13359,-15963,24448,4167,16705,-5831,24922,-22976,31826,-1058,28320,24789,-17061,-32514,19918,-5286,-26098,-10127,-23991,-7348,6175,-25022,-17446,30737,-11446,-30674,-12162,436,-6050,-15584,7300,-14851,8254,37,-23238,9059,19820,-13712,-14099,-1844,8629,-7125,-12439,3048,7755,-16617,11566,-26431,27088,-11412,-10776,7447,-26514,10254,-30323,2181,32394,7623,-8766,5042,23772,7036,65,17898,28323,22924,1668,21755,-28484,18055,-3539,23509,31389,17109,28167,-13498,11856,-31297,-23056,-28813,-2311,2289,-9233,-23670,25822,-19348,19246,-9205,-8637,-29391,19690,21921,14491,13150,9846,11722,-19565,-18568,18023,11087,-22306,-19007,2544,-27099,-5230,-7145,-103,-19716,1061,-10427,-25094,32635,23440,-3667,14678,25535,1890,22316,16331,-24115,32284,-22244,5357,-10962,-25646,-7787,-1936,-6511,22437,-13810,-320,-23984,-17403,2679,21388,-7867,-21406,21204,4995,-12418,16937,18345,17266,-17043,-26650,-24796,-15442,5111,-32644,-4220,22572,-10201,-456,-8688,-14969,-7224,20893,-3651,-25816,21266,24768,-16500,31839,-9492,5098,-24525,24561,8768,16015,-8964,8951,3122,11015,-22569,-5220,-12290,23538,28276,-4461,-5669,-736,24391,-10475,-17306,-4286,24023,-21863,26553,19863,-5485,12440,19015,26981,10135,7982,7198,-4228,-7536,28098,16235,-3946,-7412,7639,-20086,-31079,-5690,22293,17876,24528,25670,-7495,578,-5972,25447,24804,27580,21774,21398,9131,-18004,-8818,-20332,20841,26044,-13791,25810,15307,-17056,-863,-28485,-3616,-12271,-2259,7823,-31027,31103,32625,-15582,19807,6189,-1955,15491,31364,28755,-23555,18284,-21595,21142,-30213,12414,31975,-8757,6227,-26371,15974,-24983,-8688,-18211,31045,-15766,31997,23809,-9693,-7797,7916,-16279,-12146,32132,16882,-3871,11647,-25388,-31572,19527,-7177,11241,-10037,3398,-21789,24872,17460,19387,12297,-19591,-27711,-16561,-6062,28347,-15264,-29629,30278,18236,-19063,-12828,-19433,3187,-14337,-17084,-20552,13764,-19648,-13679,12391,28062,29005,10782,-19240,25114,-30892,7344,-26649,1374,-24560,3731,19706,5526,-18643,-5214,31605,11070,-22892,2150,18867,-10961,25703,10146,-17109,-21002,-31679,-24061,-29669,16631,28632,-858,29230,11758,27693,-15490,-17037,25975,-24882,16809,-29501,-3155,-27223,-16200,-7751,29315,4977,10321,-13284,11481,18175,-10503,-18877,-20258,-15138,8600,-14651,29325,2626,-6136,9367,-14929,-18349,-27126,-1135,24328,3764,-23312,16912,30384,-10046,27989,29229,-11812,23929,-6117,10316,-1788,-16843,-30403,29449,23044,6357,29792,3161,5771,6100,29087,3735,-28144,22318,-27587,-21478,5714,3482,-7559,-1382,-2011,-31414,13277,7826,31109,18922,-24509,-31097,17756,-15450,15738,2304,-10235,24453,-24951,14449,-7628,-30859,-10054,-28707,5914,-11830,12135,14222,-28346,18669,8899,17595,22907,17700,10283,-2354,85,-28892,3572,10007,-28002,-5051,-18182,30035,19293,-8007,-24162,-2722,22973,18558,-23629,-5548,19128,-11996,13105,17171,1406,26900,-21107,-4683 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice1.tflite b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice1.tflite new file mode 100644 index 0000000..fda8438 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice1.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice10.tflite b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice10.tflite new file mode 100644 index 0000000..e2fc5db Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice10.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice10_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice10_golden_int16.csv new file mode 100644 index 0000000..732f25e --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice10_golden_int16.csv @@ -0,0 +1 @@ +31955,-16366,-7670,9680,30673,-2631,28825,-6419,-29319,-30360,-5982,24051,272,-24655,-7366,20257,16869,26202,-12527,-18180,17101,17042,14718,-29462,30582,21071,25943,-1734,30970,-28894,-26131,28509,-26571,-1763,-25161,30970,23804,7454,-17315,-13526,25846,7860,-5336,19020,18630,23309,-26764,-11622,-22515,-18262,24099,-27875,-1931,25583,-26557,-30783,-21705,-26296,18203,15640,-1892,8,26837,26478,-8740,-1175,27514,-23443,6858,23987,-18463,10459,-7775,-30654,-12894,31132,-12292,-10346,2309,15147,-22500,26973,-15812,6456,31314,15696,-24935,-26795,27685,-29234,-28413,11166,21479,22952,9070,-16566,8987,15364,-7060,21635,-15898,-30500,-14674,-26422,-12255,-4454,-10641,32442,29370,-3983,-12570,-17849,-30126,13106,-26800,15479,-12966,12811,-5218,13603,-27226,-15207,719,-26513,-6645,-19843,10205,-29679,-20578,-3500,-10273,-29754,10801,24775,-5143,9955,19508,-30626,3607,32520,13552,23685,-20259,-6084,-12294,20721,20460,25113,-11247,12438,29128,-5609,-27800,1856,-2527,4332,-6347,-22693,22129,-121,5672,17105,-32652,9225,25851,30522,-18444,6367,-9838,23362,25897,-7506,13548,-8814,-23211,-22085,2387,380,-11450,-13198,-9168,4902,7371,-29619,-20918,24318,-31244,-29451,16199,-9482,-31590,-11912,-11030,-18528,-15089,-22996,-17262,-26995,31276,-22465,19995,-1718,-30554,26999,26069,-28358,-24224,23204,-15419,29898,-14092,-12163,18553,9699,12661,13943,-32078,-4096,-12008,-16181,27660,-26271,-5901,27721,-30877,25945,-14351,22710,24609,30738,-20518,12178,23442,7596,20694,12230,14711,-15973,-27604,13821,-10921,24290,18440,20493,3519,27616,22809,-4362,-10501,29832,32234,-2271,3115,-30468,1463,15523,16521,32308,1962,16049,27125,-32621,-12148,-29470,-8326,-29346,-16100,21229,14499,16175,-23128,6290,24333,15472,-14543,28881,-32163,3685,4160,-3604,-31080,13514,13607,29271,-32651,-10897,3418,-7992,-8708,17851,-30789,15393,-29488,27407,-12746,-24853,29370,-3370,21335,-3686,-20426,25603,-12534,28649,-27141,-4905,588,-6255,-11594,1217,-4673,-31090,-23940,3216,5113,30655,16902,-22407,-29519,366,22268,27279,5591,-18485,-3549,-3298,-17650,26331,-7182,25248,-14204,16547,30060,21784,32425,21945,1924,-10916,-31321,30755,-8035,28783,-27794,27693,-1590,16870,22622,-3213,-6196,24283,20197,-3404,-19608,10091,9919,6155,25393,-2303,-2752,-5835,-914,-31056,-801,-14521,-13974,-28427,18275,-32391,-24766,-11109,20474,-25411,-9199,17766,6364,25802,-15238,8697,29296,887,-19302,11555,-17673,-17465,-8944,29300,15022,-12697,10064,-24659,-237,-23957,3892,-30661,-21825,31772,-4405,-15646,-3988,2017,29538,-23446,15755,12438,3705,-2457,-14518,25270,-23141,2099,-8405,-31848,29718,-28541,-23977,10452,27886,10837,3374,-8711,14367,-13601,18606,-13804,-17465,-23580,1894,-5724,1906,25866,29475,14442,29429,23787,24120,6233,-23166,-9888,-26165,-26213,-23674,-27545,-26326,-11852,14469,-19960,27611,19421,26532,4012,13381,-19811,-11507,-30988,-17488,13562,14290,-8179,-14378,-889,-12054,-14398,-7270,8925,-2369,-20925,-59,-30969,10120,-19321,31450,-19136,-24151,-20865,26308,-20369,-8707,12022,32038,10319,13122,7746,-13476,4372,17320,32601,-9078,-6928,-10145,-19351,32416,-24624,-20008,-8137,26653,19980,-19956,4795,-14742,-31330,7167,7226,10082,-15798,-14473,3679,-18230,9052,18457,20206,27536,-5871,5843,-20086,-28402,20840,-28015,-28648,-6345,28277,27324,17932,-23565,12329,10451,3619,-23946,21082,26881,-29109,-1257,22083,-15314,21112,-7527,20594,-1020,-27635,-3369,17724,-6032,21641,10539,-18359,7499,15371,-4454,8131,5660,14080,1141,-21741,16703,-3464,-21932,8453,14701,27838,25592,7681,18935,-16761,20117,30699,-10694,13978,14562,18476,-22904,9928,-32284,2536,-10747,16334,29494,12724,-16079,20454,-18153,14401,-20397,-25789,-14142,-15864,31502,-5771,29686,27300,28881,-22978,-11783,15687,21200,-20868,21655,6956,11767,-3709,6073,15230,21092,-7874,17422,23694,-7832,12362,-19690,2113,-28446,-26636,-5376,-21727,11612,3870,-24697,-24329,22160,16701,-27570,-15647,-31921,20355,-7913,16311,-2975,21111,-11237,27739,-21412,-18625,17581,-3233,14647,-30496,-23783,-9919,4701,-31829,-13782,16271,-23841,14720,10162,-27586,14229,-15684,32311,21160,-3314,16532,7398,-16934,773,-28802,-10758,24894,-29715,8350,-4945,-11794,11247,-27635,20639,-8551,15966,-6,21556,-12297,-6078,32435,8187,10351,910,19871,-12475,-32722,16063,6621,-1142,20271,-4023,17322,11400,16449,21493,26304,21256,-2483,-21168,9132,17393,-2266,21731,1998,21043,14957,-20499,11892,-18213,21311,4568,14019,27186,-1124,-20508,6062,-17203,-31549,-19077,-31774,17930,10538,-21749,1884,23668,-9317,5370,3737,-6287,7623,-30830,9136,-22113,-26058,21459,429,11644,-5007,-4572,-429,32188,-14073,9974,16995,28324,-10581,7992,1059,1498,-6166,-31397,-22318,26516,5437,-26554,7664,27465,2350,23071,-6753,-29906,29655,-14845,-19593,-27760,-31408,-19968,20236,-32192,4999,1106,-29244,22074,16416,31343,21895,-27673,7344,31389,-19000,32600,-17964,27889,24702,-20883,-30133,4852,20193,25733,29472,17298,30024,2817,-7059,4239,21253,24165,-28952,-17646,29408,-30964,8105,10485,-18767,-6315,22404,-23720,-19177,-17226,12086,-15226,11125,6022,-24270,31817,12004,-16733,-7467,-101,12915,-19922,6454,8184,24616,-26673,5461,17849,2796,1856,-14381,15035,-5337,26288,-28037,29355,-13407,12310,-31716,19867,18515,-18950,-28162,30155,15198,-11061,-23719,-12230,-30719,30010,-19636,25914,-27825,9563,-3468,-28624,8867,-22274,7917,-5422,10906,-23119,25842,-29490,4286,-10253,-17334,20651,17647,11033,24000,-9805,-8132,-30616,-18459,-15559,-10811,-26996,9631,21461,10259,-7402,-21280,-28819,29330,-20857,-22229,-15854,-21030,-25871,27545,-28794,4160,20394,-10310,-17621,30152,9164,6772,-23087,19218,-7610,18535,29960,11374,30296,-26285,-29270,-32587,-13708,23207,-10266,12661,15235,334,13657,-28772,-27512,-17599,-10586,-29550,28832,-28916,27310,21964,-426,-25820,11644,8102,-31188,1449,32269,26028,-8555,-18314,27804,21521,-14570,-730,-28393,1475,12796,27981,-14985,1335,14512,12121,-8749,-19,-8193,-4822,23841,-30776,24876,29773,-9036,-17757,21971,-23664,-5590,-12156,-10224,-28113,-21947,13361,-13195,-18710,-21904,-15770,-24500,-6588,20554,-7102,13924,-21517,11775,-20526,16178,-22968,-4358,24315,-14402,-12761,1309,28325,-8196,-21651,-29082,-15457,-9292,12292,1414,-6735,28383,25453,24584,-9965,28524,22059,-11930,25637,-13361,-4564,4207,12514,26104,-11904,9222,9674,-18625,-16487,4834,-9976,-21547,-1408,842,-30879,30819,-29231,29987,4329,23773,25057,17632,-21355,-22534,-24643,-29311,-17175,19716,-6144,21785,-31145,-17047,-12603,-2490,-8241,-23329,27175,3917,-20610,12322,-1673,-31071,-20142,-18224,17818,8445,12441,24202,-8576,12399,22657,1852,-29163,11597,22992,-16159,-17362,12680,24115,-9821,-11610,-32638,-30245,18352,-28687,-5235,-3400,-25996,-24696,19601,-7927,-3512,-16513,18034,445,-19204,-6682,16743,2399,24560,-6630,10320,-29319,13268,-17948,29608,-2619,13547,17451,24850,30819,7096,-18886,6242,-5722,-17769,-20735,-8274,-16285,-27073,16685,5912,6833,-15160,-23382,-32523,16021,10932,-467,27060,-14947,-32271,-18183,28663,-8672,20112,18007,-2693,-18909,513,-19313,-30265,-5181,14364,-3571,6772,9026,-25703,17793,-882,-32232,24302,-22155,-31715,4167,32042,-29989,3546,-19409,-20062,-21361,26180,-3000,19204,13407,-8338,22673,16339,-25706,4896,-3130,-10161,-8184,6101,-21286,18641,-32252,-2266,7776,17112,-31131,-5110,-553,-6222,32239,-20177,5713,17013,1363,4495,-18978,27251,3700,406,11314,-8220,-10170,14540,22747,-17926,27158,18606,14525,27017,1213,15216,-3437,21514,-30209,-719,7260,-20694,21850,4271,23888,-2011,23480,5242,26342,7335,22829,1201,-21540,18499,-1198,20806,-2137,-22707,-22984,12920,854,-1038,-6428,27576,-20126,-31599,29818,-8650,-15235,-944,24989,29033,-26148,16562,27888,-1025,-29112,-21474,-32532,12785,-14504,-29339,-4384,20313,10871,-29994,14436,-24485,-23475,-4167,26353,-27225,-23202,-7304,18533,-13479,22794,-1339,-27100,-12385,-3934,15928,-10855,26785,-30044,-30393,-5136,-20283,-5829,-29685,-16637,13926,-3022,31808,8204,4738,-30277,23656,702,-1079,-3650,18748,-16849,7900,23430,-22184,-5538,-12774,17035,13002,24781,12032,480,-171,28127,15035,-16122,-12062,15459,-16757,-21154,-20548,20893,17027,21267,-22779,28919,-28224,14081,8979,15788,-45,6250,-28636,-1700,-3640,-3599,10288,-14915,16580,-16797,24995,-4686,-14731,-4519,-19301,-10539,12489,-11877,19901,-6755,-2130,2782,21410,13622,32163,3175,31338,-27697,-9901,-18328,21768,5718,27556,-31052,11703,7788,-22898,13291,3529,27548,19327,24376,-31701,-18856,647,28870,-26274,12805,-22992,-18983,15081,26849,7830,23999,-7144,-5205,25956,-16229,10088,-4460,20995,-801,-1273,-21351,-32191,-28248,-1565,13167,-11928,19582,-6258,3748,10680,-6681,7967,24367,10324,7814,22430,-8160,25921,-28499,14228,-22818,20258,10195,26242,3405,17411,-23341,13337,-1127,15926,25804,21902,4209,65,19257,-9062,-24991,27733,-20650,-26782,16267,-14880,-2843,-13363,12855,-32456,4909,29710,5843,20107,-13470,30612,-26036,-11278,-18114,-26330,2132,-21792,-1708,1399,11313,3904,13926,-15613,-31212,-13331,-26557,-11109,-7834,8836,-8867,-9146,-8673,-23374,-830,-472,17487,-32536,17601,-15300,6027,28316,-30744,-12857,-29938,23651,-30946,2152,-15491,-11763,-24764,25930,-31257,14142,-9912,10585,-16253,-5817,-22293,7103,30354,8625,2500,-17378,14640,20563,-24089,27943,-97,32331,7883,-26839,25370,-20575,11574,-8485,28109,-1733,23137,10275,27637,-18827,-9015,23652,-21872,30112,11184,-25127,9741,14479,12096,31706,-19643,23118,-19210,11132,-18672,16356,31346,-21325,-1948,-14633,27645,27217,19849,-5465,559,-11090,25989,-7360,14850,-7375,-12141,3677,-17619,13948,-13053,-23032,-2288,-3224,20542,26142,14360,7869,5025,-22776,-29670,-18427,27484,-7135,-2291,-32178,-31375,30238,23182,31820,-27860,-4254,13816,-5865,4944 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice10_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice10_input0_int16.csv new file mode 100644 index 0000000..05788a9 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice10_input0_int16.csv @@ -0,0 +1 @@ +21157,11284,18430,16877,6591,-5209,21003,14861,-1477,29898,-11211,-13575,-14753,8194,-27856,28827,-17331,15703,-11648,22657,-26183,-32532,26622,26855,-22527,27892,-16757,1094,5839,5028,-23694,-10488,32385,-10304,-25313,-16104,-14511,-10915,3643,-8155,977,-12256,-10839,-2021,12572,14987,754,8105,-15432,-4573,-27411,-8886,3281,32176,-24869,8410,-12664,-18860,29440,25006,16088,-22637,16021,8354,-1089,-31872,-11684,-20912,-11912,-23914,-22693,-22489,-14511,-21929,-8124,-16257,-12364,-20238,-18453,-21232,-27077,-14502,-12647,-29987,-17065,13147,-8276,931,19890,-27332,1801,-5736,12995,-8845,-8321,-31490,30870,-11643,14349,16028,8302,12137,-26399,-26852,-2836,-21599,22391,5752,-580,-16376,27087,-11451,-16998,20602,-3643,27590,1054,-18855,1163,-16312,-24253,-28657,3693,21946,17331,15946,16893,31309,18531,21628,-17226,19700,8345,-10434,-12606,8991,-27336,23448,1786,17057,26574,-735,-24456,-22396,4986,-20895,-8524,30062,2843,-13241,24879,24503,11761,29812,-3739,22386,-10452,-643,3569,-783,7328,32557,-25111,-7585,25499,23122,24831,-9660,-30008,23007,-12341,-8163,-13567,32015,10045,32574,14664,27329,994,-15870,3245,-10112,2652,26644,4538,8345,-3243,1858,2821,-18352,14383,12939,-24418,4845,3025,12123,-27485,7698,-27346,11253,20469,16959,11163,18838,10788,-9862,-3301,24568,-24643,-6176,-26840,-14838,-5133,12418,-3798,10929,31212,-26231,-14261,698,26186,506,-31283,17234,-18672,-31711,-27047,-23996,-7032,-23260,-8224,-5332,-8173,-26004,29030,-19198,4347,-10254,-9448,-6808,18383,21533,5061,-3695,-8628,23722,21423,-28897,-21316,19249,-32753,4500,-25100,24605,19096,16078,-12397,13604,10828,4969,-23736,28501,20312,-4336,1514,-15387,-3294,22669,-5793,-26121,8556,7774,-264,-2749,-7541,-29871,8678,-25021,-17017,-24629,23591,-27735,-8032,18071,12120,21191,-10328,-29135,-2977,-19994,9729,-17019,5370,21005,17805,21521,-15460,-30914,21443,11791,-32568,7202,26588,-27912,20791,-29477,-6458,29707,-24291,-23661,-26566,-21556,20456,-5621,24548,-16349,-19440,-5680,-9622,14444,-25516,-21665,-7387,-20458,-22059,-25897,26209,23055,18207,-1192,-10831,12120,-16350,-25059,20179,4277,17664,-28990,-27762,-16805,12303,-24728,-7567,-27663,14906,191,22767,-4854,-26730,-10534,-2450,11446,20298,-9875,-11203,-10443,25425,15674,19988,27572,-26897,5846,-2309,29982,2383,1689,-16654,17732,18418,24627,-2103,-11964,-252,3497,-25543,-178,-27721,12890,19534,-9524,-13812,32305,22578,-4007,-9443,30729,-26172,-20678,32263,24817,22316,-24787,15496,18063,-9112,-9877,13754,12170,26906,-28141,32052,-14383,-20799,25990,940,28436,7723,12258,-6053,-15401,-2179,-14291,19812,-20546,-18431,-31243,16216,-30721,23509,-5294,-29490,4518,-14077,-333,25991,2729,-11798,-10934,-10403,14902,20884,28795,-21644,580,-11831,-9670,-10044,10556,-848,4940,-620,280,-12510,29138,-6461,19459,-2796,28356,-3798,-599,-13793,4594,13357,7117,9334,11240,32266,2501,18418,12358,-8790,-28482,24566,-20239,-30453,-29776,-30730,-6757,-3020,-3186,-686,8165,-24690,18932,4773,17657,21087,-13685,12566,18577,-24047,1454,-13199,30719,11645,-6624,29548,-25268,29050,24928,11150,757,-11596,-11879,24203,-32033,-25769,-101,-23290,-3177,-11782,1746,-4644,-15506,-9520,17402,-18224,-29921,-23143,3285,29278,-26525,32260,-19897,8330,-11689,27441,-4898,2100,-10566,7660,-7883,-21795,-7830,-3574,5422,24481,-25131,31412,-27292,20562,-2206,24818,13503,-10564,-4781,-7080,-23756,-24523,-12683,32626,31501,-20936,-15483,11359,-19352,-25118,19152,-18877,26689,6122,-22097,23853,20802,22760,-15312,22011,-5562,9689,-8775,246,9750,13810,27982,26845,-12793,18836,-12581,-16275,16086,23774,-16399,16104,30468,9172,6030,30860,-25336,-146,-1152,-7493,12168,3345,21154,-10741,-16790,-23682,-1766,-30671,23672,-7909,-30149,-1490,-13838,7075,29513,-13816,17534,31879,-249,-5624,-6010,13392,20788,-15948,-16897,-8837,-8094,-24663,16424,1429,-20566,-29523,6827,-18564,-27566,-15080,-1400,-15252,22751,18073,-32763,-13533,-25933,-27499,-18119,3293,16906,26170,1631,-4602,-26580,3,17739,11256,-776,1709,-29851,2950,-5959,19449,-27861,-25576,10351,1216,24998,8944,-30482,-2452,-7140,29859,-13250,3984,-30711,30117,-32211,7916,-32648,-23808,-5993,-2375,3454,10600,-29761,-30123,-15703,31422,4646,-8493,-24825,-11726,-7555,-1500,6992,-24759,-22406,1478,-18342,6065,30133,31020,-30779,11091,-20692,-32465,-15403,-4754,27498,-27923,29944,25063,-31016,28348,6121,-24664,17126,-11050,468,24414,-19639,-6345,-25861,25404,-6006,-1304,-25719,12962,188,4930,-29145,23503,32594,2326,-4877,-31060,4041,-25080,22954,-11227,24880,-13900,-12516,7684,-29842,10717,28322,-22773,29173,-23720,-11286,15636,-19595,-11040,15910,13152,11385,4032,29223,7890,-5636,-18702,25310,4368,20334,13842,5524,-4768,13518,-20391,-14302,-20835,-9782,-7138,638,31689,8834,-32505,-3752,-27934,15308,13184,-25071,-8089,-11555,10504,-32377,-28954,-13662,31955,-16366,-7670,9680,30673,-2631,28825,-6419,-29319,-30360,-5982,24051,272,-24655,-7366,20257,16869,26202,-12527,-18180,17101,17042,14718,-29462,30582,21071,25943,-1734,30970,-28894,-26131,28509,-26571,-1763,-25161,30970,23804,7454,-17315,-13526,25846,7860,-5336,19020,18630,23309,-26764,-11622,-22515,-18262,24099,-27875,-1931,25583,-26557,-30783,-21705,-26296,18203,15640,-1892,8,26837,26478,-8740,-1175,27514,-23443,6858,23987,-18463,10459,-7775,-30654,-12894,31132,-12292,-10346,2309,15147,-22500,26973,-15812,6456,31314,15696,-24935,-26795,27685,-29234,-28413,11166,21479,22952,9070,-16566,8987,15364,-7060,21635,-15898,-30500,-14674,-26422,-12255,-4454,-10641,32442,29370,-3983,-12570,-17849,-30126,13106,-26800,15479,-12966,12811,-5218,13603,-27226,-15207,719,-26513,-6645,-19843,10205,-29679,-20578,-3500,-10273,-29754,10801,24775,-5143,9955,19508,-30626,3607,32520,13552,23685,-20259,-6084,-12294,20721,20460,25113,-11247,12438,29128,-5609,-27800,1856,-2527,4332,-6347,-22693,22129,-121,5672,17105,-32652,9225,25851,30522,-18444,6367,-9838,23362,25897,-7506,13548,-8814,-23211,-22085,2387,380,-11450,-13198,-9168,4902,7371,-29619,-20918,24318,-31244,-29451,16199,-9482,-31590,-11912,-11030,-18528,-15089,-22996,-17262,-26995,31276,-22465,19995,-1718,-30554,26999,26069,-28358,-24224,23204,-15419,29898,-14092,-12163,18553,9699,12661,13943,-32078,-4096,-12008,-16181,27660,-26271,-5901,27721,-30877,25945,-14351,22710,24609,30738,-20518,12178,23442,7596,20694,12230,14711,-15973,-27604,13821,-10921,24290,18440,20493,3519,27616,22809,-4362,-10501,29832,32234,-2271,3115,-30468,1463,15523,16521,32308,1962,16049,27125,-32621,-12148,-29470,-8326,-29346,-16100,21229,14499,16175,-23128,6290,24333,15472,-14543,28881,-32163,3685,4160,-3604,-31080,13514,13607,29271,-32651,-10897,3418,-7992,-8708,17851,-30789,15393,-29488,27407,-12746,-24853,29370,-3370,21335,-3686,-20426,25603,-12534,28649,-27141,-4905,588,-6255,-11594,1217,-4673,-31090,-23940,3216,5113,30655,16902,-22407,-29519,366,22268,27279,5591,-18485,-3549,-3298,-17650,26331,-7182,25248,-14204,16547,30060,21784,32425,21945,1924,-10916,-31321,30755,-8035,28783,-27794,27693,-1590,16870,22622,-3213,-6196,24283,20197,-3404,-19608,10091,9919,6155,25393,-2303,-2752,-5835,-914,-31056,-801,-14521,-13974,-28427,18275,-32391,-24766,-11109,20474,-25411,-9199,17766,6364,25802,-15238,8697,29296,887,-19302,11555,-17673,-17465,-8944,29300,15022,-12697,10064,-24659,-237,-23957,3892,-30661,-21825,31772,-4405,-15646,-3988,2017,29538,-23446,15755,12438,3705,-2457,-14518,25270,-23141,2099,-8405,-31848,29718,-28541,-23977,10452,27886,10837,3374,-8711,14367,-13601,18606,-13804,-17465,-23580,1894,-5724,1906,25866,29475,14442,29429,23787,24120,6233,-23166,-9888,-26165,-26213,-23674,-27545,-26326,-11852,14469,-19960,27611,19421,26532,4012,13381,-19811,-11507,-30988,-17488,13562,14290,-8179,-14378,-889,-12054,-14398,-7270,8925,-2369,-20925,-59,-30969,10120,-19321,31450,-19136,-24151,-20865,26308,-20369,-8707,12022,32038,10319,13122,7746,-13476,4372,17320,32601,-9078,-6928,-10145,-19351,32416,-24624,-20008,-8137,26653,19980,-19956,4795,-14742,-31330,7167,7226,10082,-15798,-14473,3679,-18230,9052,18457,20206,27536,-5871,5843,-20086,-28402,20840,-28015,-28648,-6345,28277,27324,17932,-23565,12329,10451,3619,-23946,21082,26881,-29109,-1257,22083,-15314,21112,-7527,20594,-1020,-27635,-3369,17724,-6032,21641,10539,-18359,7499,15371,-4454,8131,5660,14080,1141,-21741,16703,-3464,-21932,8453,14701,27838,25592,7681,18935,-16761,20117,30699,-10694,13978,14562,18476,-22904,9928,-32284,2536,-10747,16334,29494,12724,-16079,20454,-18153,14401,-20397,-25789,-14142,-15864,31502,-5771,29686,27300,28881,-22978,-11783,15687,21200,-20868,21655,6956,11767,-3709,6073,15230,21092,-7874,17422,23694,-7832,12362,-19690,2113,-28446,-26636,-5376,-21727,11612,3870,-24697,-24329,22160,16701,-27570,-15647,-31921,20355,-7913,16311,-2975,21111,-11237,27739,-21412,-18625,17581,-3233,14647,-30496,-23783,-9919,4701,-31829,-13782,16271,-23841,14720,10162,-27586,14229,-15684,32311,21160,-3314,16532,7398,-16934,773,-28802,-10758,24894,-29715,8350,-4945,-11794,11247,-27635,20639,-8551,15966,-6,21556,-12297,-6078,32435,8187,10351,910,19871,-12475,-32722,16063,6621,-1142,20271,-4023,17322,11400,16449,21493,26304,21256,-2483,-21168,9132,17393,-2266,21731,1998,21043,14957,-20499,11892,-18213,21311,4568,14019,27186,-1124,-20508,6062,-17203,-31549,-19077,-31774,17930,10538,-21749,1884,23668,-9317,5370,3737,-6287,7623,-30830,9136,-22113,-26058,21459,429,11644,-5007,-4572,-429,32188,-14073,9974,16995,28324,-10581,7992,1059,1498,-6166,-31397,-22318,26516,5437,-26554,7664,27465,2350,23071,-6753,-29906,29655,-14845,-19593,-27760,-31408,-19968,20236,-32192,4999,1106,-29244,22074,16416,31343,21895,-27673,7344,31389,-19000,32600,-17964,27889,24702,-20883,-30133,4852,20193,25733,29472,17298,30024,2817,-7059,4239,21253,24165,-28952,-17646,29408,-30964,8105,10485,-18767,-6315,22404,-23720,-19177,-17226,12086,-15226,11125,6022,-24270,31817,12004,-16733,-7467,-101,12915,-19922,6454,8184,24616,-26673,5461,17849,2796,1856,-14381,15035,-5337,26288,-28037,29355,-13407,12310,-31716,19867,18515,-18950,-28162,30155,15198,-11061,-23719,-12230,-30719,30010,-19636,25914,-27825,9563,-3468,-28624,8867,-22274,7917,-5422,10906,-23119,25842,-29490,4286,-10253,-17334,20651,17647,11033,24000,-9805,-8132,-30616,-18459,-15559,-10811,-26996,9631,21461,10259,-7402,-21280,-28819,29330,-20857,-22229,-15854,-21030,-25871,27545,-28794,4160,20394,-10310,-17621,30152,9164,6772,-23087,19218,-7610,18535,29960,11374,30296,-26285,-29270,-32587,-13708,23207,-10266,12661,15235,334,13657,-28772,-27512,-17599,-10586,-29550,28832,-28916,27310,21964,-426,-25820,11644,8102,-31188,1449,32269,26028,-8555,-18314,27804,21521,-14570,-730,-28393,1475,12796,27981,-14985,1335,14512,12121,-8749,-19,-8193,-4822,23841,-30776,24876,29773,-9036,-17757,21971,-23664,-5590,-12156,-10224,-28113,-21947,13361,-13195,-18710,-21904,-15770,-24500,-6588,20554,-7102,13924,-21517,11775,-20526,16178,-22968,-4358,24315,-14402,-12761,1309,28325,-8196,-21651,-29082,-15457,-9292,12292,1414,-6735,28383,25453,24584,-9965,28524,22059,-11930,25637,-13361,-4564,4207,12514,26104,-11904,9222,9674,-18625,-16487,4834,-9976,-21547,-1408,842,-30879,30819,-29231,29987,4329,23773,25057,17632,-21355,-22534,-24643,-29311,-17175,19716,-6144,21785,-31145,-17047,-12603,-2490,-8241,-23329,27175,3917,-20610,12322,-1673,-31071,-20142,-18224,17818,8445,12441,24202,-8576,12399,22657,1852,-29163,11597,22992,-16159,-17362,12680,24115,-9821,-11610,-32638,-30245,18352,-28687,-5235,-3400,-25996,-24696,19601,-7927,-3512,-16513,18034,445,-19204,-6682,16743,2399,24560,-6630,10320,-29319,13268,-17948,29608,-2619,13547,17451,24850,30819,7096,-18886,6242,-5722,-17769,-20735,-8274,-16285,-27073,16685,5912,6833,-15160,-23382,-32523,16021,10932,-467,27060,-14947,-32271,-18183,28663,-8672,20112,18007,-2693,-18909,513,-19313,-30265,-5181,14364,-3571,6772,9026,-25703,17793,-882,-32232,24302,-22155,-31715,4167,32042,-29989,3546,-19409,-20062,-21361,26180,-3000,19204,13407,-8338,22673,16339,-25706,4896,-3130,-10161,-8184,6101,-21286,18641,-32252,-2266,7776,17112,-31131,-5110,-553,-6222,32239,-20177,5713,17013,1363,4495,-18978,27251,3700,406,11314,-8220,-10170,14540,22747,-17926,27158,18606,14525,27017,1213,15216,-3437,21514,-30209,-719,7260,-20694,21850,4271,23888,-2011,23480,5242,26342,7335,22829,1201,-21540,18499,-1198,20806,-2137,-22707,-22984,12920,854,-1038,-6428,27576,-20126,-31599,29818,-8650,-15235,-944,24989,29033,-26148,16562,27888,-1025,-29112,-21474,-32532,12785,-14504,-29339,-4384,20313,10871,-29994,14436,-24485,-23475,-4167,26353,-27225,-23202,-7304,18533,-13479,22794,-1339,-27100,-12385,-3934,15928,-10855,26785,-30044,-30393,-5136,-20283,-5829,-29685,-16637,13926,-3022,31808,8204,4738,-30277,23656,702,-1079,-3650,18748,-16849,7900,23430,-22184,-5538,-12774,17035,13002,24781,12032,480,-171,28127,15035,-16122,-12062,15459,-16757,-21154,-20548,20893,17027,21267,-22779,28919,-28224,14081,8979,15788,-45,6250,-28636,-1700,-3640,-3599,10288,-14915,16580,-16797,24995,-4686,-14731,-4519,-19301,-10539,12489,-11877,19901,-6755,-2130,2782,21410,13622,32163,3175,31338,-27697,-9901,-18328,21768,5718,27556,-31052,11703,7788,-22898,13291,3529,27548,19327,24376,-31701,-18856,647,28870,-26274,12805,-22992,-18983,15081,26849,7830,23999,-7144,-5205,25956,-16229,10088,-4460,20995,-801,-1273,-21351,-32191,-28248,-1565,13167,-11928,19582,-6258,3748,10680,-6681,7967,24367,10324,7814,22430,-8160,25921,-28499,14228,-22818,20258,10195,26242,3405,17411,-23341,13337,-1127,15926,25804,21902,4209,65,19257,-9062,-24991,27733,-20650,-26782,16267,-14880,-2843,-13363,12855,-32456,4909,29710,5843,20107,-13470,30612,-26036,-11278,-18114,-26330,2132,-21792,-1708,1399,11313,3904,13926,-15613,-31212,-13331,-26557,-11109,-7834,8836,-8867,-9146,-8673,-23374,-830,-472,17487,-32536,17601,-15300,6027,28316,-30744,-12857,-29938,23651,-30946,2152,-15491,-11763,-24764,25930,-31257,14142,-9912,10585,-16253,-5817,-22293,7103,30354,8625,2500,-17378,14640,20563,-24089,27943,-97,32331,7883,-26839,25370,-20575,11574,-8485,28109,-1733,23137,10275,27637,-18827,-9015,23652,-21872,30112,11184,-25127,9741,14479,12096,31706,-19643,23118,-19210,11132,-18672,16356,31346,-21325,-1948,-14633,27645,27217,19849,-5465,559,-11090,25989,-7360,14850,-7375,-12141,3677,-17619,13948,-13053,-23032,-2288,-3224,20542,26142,14360,7869,5025,-22776,-29670,-18427,27484,-7135,-2291,-32178,-31375,30238,23182,31820,-27860,-4254,13816,-5865,4944 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice11.tflite b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice11.tflite new file mode 100644 index 0000000..a437db2 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice11.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice11_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice11_golden_int16.csv new file mode 100644 index 0000000..f3e3d74 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice11_golden_int16.csv @@ -0,0 +1 @@ +4937,14203,20565,26077,7434,5522,1824,-16531,-32601,-6768,9576,25651,27943,28140,-11095,29325,14936,16241,15710,7060,23954,-22031,-22109,-14235,-12683,-14864,9346,30993,7649,28252,-22387,7378,11667,4842,-20991,-23133,-14003,-14167,27243,10242,24261,32213,-19308,9313,25465,12100,10587,11364,-2542,-26992,28499,-5181,-2541,-30112,-12930,-27631,-9007,12411,23806,5958,8374,-28860,-21520,-25972,28970,427,-27306,28444,7836,21686,25744,9844,10392,10173,25469,-19669,1823,-24781,-12219,-6386,28192,-5233,5611,21173,19690,21212,22340,15749,-17774,-842,27589,21760,8974,-9995,-28350,-16461,-5036,16110,-18396,-4268,-20925,-22605,32091,27396,-10849,11320,16212,20135,18871,-13618,18775,-23347,-2318,-30180,7547,3218,21880,-18232,15338,-18772,17456,-5815,-17338,26019,-5219,13291,15665,-5151,-16612,31852,-6773,30034,3077,1304,29458,7846,-12537,-13174,19218,-23570,4002,-10130,9409,-30075,11416,3605,30179,-28900,2192,30538,-9289,17840,18583,-14985,22857,-30642,-9251,8848,-23080,-14782,9682,-24429,-18934,-2251,-22157,-15572,17113,15233,-8021,-9380,11750,-20918,16167,-8747,-2911,9433,31696,-19230,-13360,-12995,-1339,10323,9752,-20834,-14429,115,-8116,21961,-23293,-20182,-16,-19058,-13438,-24814,8318,-13568,184,20815,-4997,-24798,-6688,-28931,-3143,26308,19973,23221,926,-2170,21146,19863,-29601,-31818,-25137,8781,-31774,31458,-3317,-6283,-25952,22652,564,12667,5000,24273,-24849,31417,25541,18865,15054,-32591,-11544,-3555,23002,1459,-8167,-11241,3463,-4536,971,9825,-17182,5823,-16982,-25896,4777,-21365,14549,-27724,-16514,-10382,30296,-17630,26109,-20575,4129,-23392,-26449,-29714,8921,-17398,26456,25241,-11588,4413,16376,-17524,-4434,-10339,25545,1673,2369,-22029,494,-21158,-25087,31854,-5229,16445,11328,-27894,-8711,9851,-25008,-24242,-11608,-18529,-25661,-29216,-14627,14131,32590,10488,1572,31758,-21958,-8361,18923,-17335,-14562,-8510,15883,6048,29991,-26402,29733,-21086,-31258,-6586,-27488,-27653,-2138,8290,14208,11555,19856,16237,13221,32381,-12263,11385,-6396,15021,5285,-15335,13253,-30262,18612,-16752,-23700,31204,-3624,-17073,22696,-3067,-18770,6534,7959,13965,25172,21496,9038,-26267,24757,-17690,22303,-24128,-24815,-8309,-16161,4227,1420,28398,-15695,20041,24886,12522,9004,-14002,-21741,-32566,6694,-24274,18196,-32035,8696,22940,21532,627,-15754,9008,-24616,800,-4530,1370,4319,-14472,-471,4789,12169,3413,29393,-10544,33,-179,16874,13497,-21252,25101,-842,11249,-16163,-7726,-765,-7633,23427,-30333,6872,10383,6209,-27687,-14505,19878,-27898,22413,-3302,6544,32504,10520,31734,6183,17701,21427,1651,-20923,-25688,15537,20181,22260,-32179,-20380,25281,4088,3216,17569,-26613,-10937,2901,4346,18171,-8552,12357,8372,2319,14341,-30561,12201,-14358,14861,11401,-1993,-6346,26375,6505,-14501,28165,-17483,7738,26028,6124,-21389,-15743,15345,-28782,5155,30217,-1802,-12689,-3522,1905,-26875,-5262,17796,6935,-8659,6211,-26871,32089,981,-25317,8283,16301,17106,16779,-32120,-32297,30205,27652,-1343,12708,31304,-6617,-9237,31429,5505,29658,23568,-18595,30296,16476,18287,5280,-21107,-12939,-25229,-32297,-25253,30248,27987,15780,15623,-28143,12018,-27486,-29636,9786,-25601,20451,-4969,22314,-17390,-26587,8347,-27581,28967,-8109,-4832,14502,-5771,28001,-8072,4474,-27641,-26004,31519,-25541,32438,31193,-30783,-6715,-9102,-16837,14032,6051,-25498,19113,5977,22619,-25835,2370,-7287,30176,-4155,-1670,19628,24044,-17353,7641,23810,25377,28750,-29288,-27397,15754,-15301,-58,4916,-31504,14187,-2850,-27792,-18861,-12538,-2182,-1216,-15844,28248,5078,-14774,10954,-7378,-19538,19916,12745,-20303,8966,20130,5839,124,-14381,-10111,-3421,14744,-14153,-12976,18052,22643,28637,-8344,10625,17587,30627,20208,32748,14409,-24586,1147,25375,-19578,-30718,566,-32363,25211,-6184,-9499,-1298,-15021,7315,-7503,-6466,30837,26065,29221,10240,-26468,-22402,14411,-12633,486,23081,-3017,29153,14389,-7124,-1362,-5923,-16178,8375,-10703,14857,-25647,-27019,15801,-12978,-8598,19705,20597,-30398,-26220,4353,16727,-32026,20708,-5801,-1997,-10687,25746,-6121,-26035,-20184,-132,-14840,10642,11104,14152,-9046,9430,20324,25779,-4102,15865,-13540,23309,-32225,14181,1833,-16613,6109,13445,-27984,23205,17459,-7990,7818,-10089,-3293,-917,-12818,-30529,10048,-8815,-26510,-3771,-937,29743,13811,24787,3687,14022,-4296,27445,-20914,-16052,13058,-25321,16601,21987,-22111,-30097,1337,18948,-22404,7743,-20243,-2442,-16707,-24869,1776,23598,-9155,-4169,19604,-22276,-19558,31477,-18600,4449,25266,23319,5476,-10626,-6608,-31935,-24036,-27768,9310,27947,-21332,-9567,11139,-14433,2305,29320,-15067,24012,13432,15417,28116,-25375,5574,12202,11423,26679,10351,8775,-6735,31154,12798,21211,11407,-22379,17143,-12834,21259,9921,-14212,-5718,18914,-21348,-13176,-26810,343,-2588,-2181,-1560,-127,-14012,19528,-26240,19847,4414,11952,-14303,27726,-23715,-30580,14102,-28593,-5288,-17623,32623,-23835,-18817,-32104,-20440,-5627,1437,-17760,28129,-31626,-27675,18685,-8865,-7094,-5974,-13482,-997,6818,-30148,6026,-22659,-16370,29152,-24204,1439,26129,18666,19885,-10170,-28160,5338,-12847,8874,-21083,-2580,21624,-28762,-28468,22968,5320,-8627,6600,16263,-17538,29087,-16418,-28448,18032,-9571,-11588,-16571,-29386,29634,-17035,-17362,-29562,20706,-13976,7856,-17785,16327,-3130,-13327,-12049,1116,4711,24446,-29112,4029,11684,-27776,-16403,-24897,-29309,16464,7436,-23514,4109,30669,29002,-10716,2617,-9735,8688,15520,6974,-2971,-24775,-19899,21578,28420,-7372,30956,-2791,3106,6168,18530,-21515,28978,22802,23070,2681,-4333,16509,-30843,30096,-19440,-32630,21072,9681,5221,-32617,20256,6975,-6401,-18868,-20190 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice11_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice11_input0_int16.csv new file mode 100644 index 0000000..4ecdab1 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice11_input0_int16.csv @@ -0,0 +1 @@ +30994,3027,29651,-4766,-17720,-5056,32677,20084,-29123,3654,12316,-2072,23364,-15494,-3558,31634,-16492,15871,-24841,31132,24229,23734,12554,-2204,31545,-28680,375,-6623,-10421,3925,16896,-13469,1207,-16180,6991,7782,11251,12632,19726,-2679,-12154,-14961,-7305,12219,4740,-20021,-23577,12122,-24987,5866,10332,-23037,-9225,-15782,-32138,-29548,-10420,13039,17887,12264,-26191,866,22896,17422,-32211,-25204,-7977,-26527,-27016,-28676,-21913,-8731,555,14231,-28633,-29957,-10931,25290,32418,11399,16423,13994,26362,11027,-7300,-28678,-30667,18228,-31158,16811,-22244,21458,6254,-2040,-24486,3335,12364,-22356,1224,-30258,-31389,18248,31202,-31436,22425,11037,-14910,31430,-12904,11000,16431,15703,-30244,11607,-6882,-7932,16376,2311,-14130,-3678,21959,-3700,-25716,-22562,-30038,15954,1944,-8909,10254,21575,27884,20414,14717,-25448,24291,8032,-26701,-11451,-30769,1764,27505,10468,9211,324,20182,-453,-8377,-4727,-934,26112,-14095,-8417,-27591,18668,32555,13032,24294,-28431,20731,-26742,-1780,23181,22766,24314,-27551,13428,-5757,-10332,-13709,13608,-22136,20120,18273,-21770,12481,2254,3776,-6863,-4797,-29984,27393,23503,-19397,-28042,23437,-23626,11244,11511,-26512,-11953,24854,-15843,-32390,-9645,-2318,32477,-24772,15990,26854,-25757,-29277,32586,30826,-5320,-16806,13494,-492,21043,-22742,29557,20963,-658,-26958,31973,19995,10110,5520,-31226,-22250,6473,9092,-30816,24273,-8819,4083,28923,10714,-28678,14610,31058,-3221,6327,19326,28051,-14667,9608,25087,12378,12132,-22893,-705,11396,17080,29604,-11527,7803,18270,-15962,-23739,-27719,32508,13493,817,21824,14048,20972,32039,21302,-31687,-25018,11038,22137,15245,21393,-24743,-15709,2986,15652,-8997,-1880,-19067,-18327,19575,28805,-2313,8266,16510,-2836,-27843,26972,-18269,-23763,-22291,-20360,11551,-1381,15716,28629,-22975,-29103,9726,-22157,-12451,-8097,-8514,28677,29792,888,-20488,-1372,22625,16911,-10710,-7162,12104,19410,-29367,2869,-3204,13621,2525,3619,30817,-27142,-19871,-15850,-11080,16073,-11874,-4982,25384,7420,-15752,-23620,-29592,-5941,15725,-26115,3230,-24231,1279,1908,-17915,-16884,-16805,-30736,-14078,-24531,6008,-17872,10544,-10945,-13649,-9783,332,423,21615,11738,13009,-712,25264,-18695,-29434,17177,25165,-14118,23925,7386,-5286,11147,3004,31500,-19400,-11225,-11340,-6832,-27368,-32416,-11191,17484,246,-18442,31127,-27547,7608,6817,-11636,6348,32100,2496,-15556,-13264,-27796,-21884,9233,8769,-18346,-10659,17660,21712,-9759,31564,396,12636,5483,5149,-19337,-31512,12881,3074,15906,28855,-10499,14732,18159,19430,-21225,17495,18652,24735,-9694,-2631,-721,703,-16109,-6159,-32548,-5817,24974,5033,22699,-26571,10854,-30204,14842,-17256,-17941,24396,20300,-793,11198,-17392,25464,2428,536,-21931,4069,3878,25521,-12897,-14062,20703,-11442,13642,-3764,-5056,23685,-5722,4937,14203,20565,26077,7434,5522,1824,-16531,-32601,-6768,9576,25651,27943,28140,-11095,29325,14936,16241,15710,7060,23954,-22031,-22109,-14235,-12683,-14864,9346,30993,7649,28252,-22387,7378,11667,4842,-20991,-23133,-14003,-14167,27243,10242,24261,32213,-19308,9313,25465,12100,10587,11364,-2542,-26992,28499,-5181,-2541,-30112,-12930,-27631,-9007,12411,23806,5958,8374,-28860,-21520,-25972,28970,427,-27306,28444,7836,21686,25744,9844,10392,10173,25469,-19669,1823,-24781,-12219,-6386,28192,-5233,5611,21173,19690,21212,22340,15749,-17774,-842,27589,21760,8974,-9995,-28350,-16461,-5036,16110,-18396,-4268,-20925,-22605,32091,27396,-10849,11320,16212,20135,18871,-13618,18775,-23347,-2318,-30180,7547,3218,21880,-18232,15338,-18772,17456,-5815,-17338,26019,-5219,13291,15665,-5151,-16612,31852,-6773,30034,3077,1304,29458,7846,-12537,-13174,19218,-23570,4002,-10130,9409,-30075,11416,3605,30179,-28900,2192,30538,-9289,17840,18583,-14985,22857,-30642,-9251,8848,-23080,-14782,9682,-24429,-18934,-2251,-22157,-15572,17113,15233,-8021,-9380,11750,-20918,16167,-8747,-2911,9433,31696,-19230,-13360,-12995,-1339,10323,9752,-20834,-14429,115,-8116,21961,-23293,-20182,-16,-19058,-13438,-24814,8318,-13568,184,20815,-4997,-24798,-6688,-28931,-3143,26308,19973,23221,926,-2170,21146,19863,-29601,-31818,-25137,8781,-31774,31458,-3317,-6283,-25952,22652,564,12667,5000,24273,-24849,31417,25541,18865,15054,-32591,-11544,-3555,23002,1459,-8167,-11241,3463,-4536,971,9825,-17182,5823,-16982,-25896,4777,-21365,14549,-27724,-16514,-10382,30296,-17630,26109,-20575,4129,-23392,-26449,-29714,8921,-17398,26456,25241,-11588,4413,16376,-17524,-4434,-10339,25545,1673,2369,-22029,494,-21158,-25087,31854,-5229,16445,11328,-27894,-8711,9851,-25008,-24242,-11608,-18529,-25661,-29216,-14627,14131,32590,10488,1572,31758,-21958,-8361,18923,-17335,-14562,-8510,15883,6048,29991,-26402,29733,-21086,-31258,-6586,-27488,-27653,-2138,8290,14208,11555,19856,16237,13221,32381,-12263,11385,-6396,15021,5285,-15335,13253,-30262,18612,-16752,-23700,31204,-3624,-17073,22696,-3067,-18770,6534,7959,13965,25172,21496,9038,-26267,24757,-17690,22303,-24128,-24815,-8309,-16161,4227,1420,28398,-15695,20041,24886,12522,9004,-14002,-21741,-32566,6694,-24274,18196,-32035,8696,22940,21532,627,-15754,9008,-24616,800,-4530,1370,4319,-14472,-471,4789,12169,3413,29393,-10544,33,-179,16874,13497,-21252,25101,-842,11249,-16163,-7726,-765,-7633,23427,-30333,6872,10383,6209,-27687,-14505,19878,-27898,22413,-3302,6544,32504,10520,31734,6183,17701,21427,1651,-20923,-25688,15537,20181,22260,-32179,-20380,25281,4088,3216,17569,-26613,-10937,2901,4346,18171,-8552,12357,8372,2319,14341,-30561,12201,-14358,14861,11401,-1993,-6346,26375,6505,-14501,28165,-17483,7738,26028,6124,-21389,-15743,15345,-28782,5155,30217,-1802,-12689,-3522,1905,-26875,-5262,17796,6935,-8659,6211,-26871,32089,981,-25317,8283,16301,17106,16779,-32120,-32297,30205,27652,-1343,12708,31304,-6617,-9237,31429,5505,29658,23568,-18595,30296,16476,18287,5280,-21107,-12939,-25229,-32297,-25253,30248,27987,15780,15623,-28143,12018,-27486,-29636,9786,-25601,20451,-4969,22314,-17390,-26587,8347,-27581,28967,-8109,-4832,14502,-5771,28001,-8072,4474,-27641,-26004,31519,-25541,32438,31193,-30783,-6715,-9102,-16837,14032,6051,-25498,19113,5977,22619,-25835,2370,-7287,30176,-4155,-1670,19628,24044,-17353,7641,23810,25377,28750,-29288,-27397,15754,-15301,-58,4916,-31504,14187,-2850,-27792,-18861,-12538,-2182,-1216,-15844,28248,5078,-14774,10954,-7378,-19538,19916,12745,-20303,8966,20130,5839,124,-14381,-10111,-3421,14744,-14153,-12976,18052,22643,28637,-8344,10625,17587,30627,20208,32748,14409,-24586,1147,25375,-19578,-30718,566,-32363,25211,-6184,-9499,-1298,-15021,7315,-7503,-6466,30837,26065,29221,10240,-26468,-22402,14411,-12633,486,23081,-3017,29153,14389,-7124,-1362,-5923,-16178,8375,-10703,14857,-25647,-27019,15801,-12978,-8598,19705,20597,-30398,-26220,4353,16727,-32026,20708,-5801,-1997,-10687,25746,-6121,-26035,-20184,-132,-14840,10642,11104,14152,-9046,9430,20324,25779,-4102,15865,-13540,23309,-32225,14181,1833,-16613,6109,13445,-27984,23205,17459,-7990,7818,-10089,-3293,-917,-12818,-30529,10048,-8815,-26510,-3771,-937,29743,13811,24787,3687,14022,-4296,27445,-20914,-16052,13058,-25321,16601,21987,-22111,-30097,1337,18948,-22404,7743,-20243,-2442,-16707,-24869,1776,23598,-9155,-4169,19604,-22276,-19558,31477,-18600,4449,25266,23319,5476,-10626,-6608,-31935,-24036,-27768,9310,27947,-21332,-9567,11139,-14433,2305,29320,-15067,24012,13432,15417,28116,-25375,5574,12202,11423,26679,10351,8775,-6735,31154,12798,21211,11407,-22379,17143,-12834,21259,9921,-14212,-5718,18914,-21348,-13176,-26810,343,-2588,-2181,-1560,-127,-14012,19528,-26240,19847,4414,11952,-14303,27726,-23715,-30580,14102,-28593,-5288,-17623,32623,-23835,-18817,-32104,-20440,-5627,1437,-17760,28129,-31626,-27675,18685,-8865,-7094,-5974,-13482,-997,6818,-30148,6026,-22659,-16370,29152,-24204,1439,26129,18666,19885,-10170,-28160,5338,-12847,8874,-21083,-2580,21624,-28762,-28468,22968,5320,-8627,6600,16263,-17538,29087,-16418,-28448,18032,-9571,-11588,-16571,-29386,29634,-17035,-17362,-29562,20706,-13976,7856,-17785,16327,-3130,-13327,-12049,1116,4711,24446,-29112,4029,11684,-27776,-16403,-24897,-29309,16464,7436,-23514,4109,30669,29002,-10716,2617,-9735,8688,15520,6974,-2971,-24775,-19899,21578,28420,-7372,30956,-2791,3106,6168,18530,-21515,28978,22802,23070,2681,-4333,16509,-30843,30096,-19440,-32630,21072,9681,5221,-32617,20256,6975,-6401,-18868,-20190 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice12.tflite b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice12.tflite new file mode 100644 index 0000000..04c91f5 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice12.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice12_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice12_golden_int16.csv new file mode 100644 index 0000000..abf063f --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice12_golden_int16.csv @@ -0,0 +1 @@ +-10030,-8269,-5095,-29805,-12130,-10662,-13565,18211,3306,15918,29916,-11169,19191,13058,-17697,11716,-10962,5664,-810,-13381,-22017,17437,-18468,-17720,16764,18320,-13483,25939,-13411,-24340,17260,-25895,31140,-21627,15504,12079,-25785,16008,21586,-577,12325,8510,8387,15530,-11402,-5933,4037,17944,16497,503,17351,-2823,24093,-14177,11847,10693,-32285,-18849,3141,23847,4476,3679,-6007,-11339,-8990,16260,-9319,1196,-19149,-28588,20611,24841,-6405,-6267,23458,7002,-15164,13933,-3918,-1145,7643,-31374,-23575,15876,-11954,-18113,-25966,-28078,15275,-5009,-16801,18793,29405,-18227,-32623,-15644,24754,18437,10822,-31467,-59,13406,-9389,-22800,10886,-2935,-19135,-28021,11626,3805,-20375,20111,31271,28494,5393,16592,-17185,11197,-6526,15854,-23134,-1581,-26539,25554,-10509,-26214,10973,-18216,25056,-22905,-22669,16001,-1269,30605,30123,-32043,-19898,16570,-541,-30018,11075,-15155,1704,-5773,2433,25273,13502,-10147,23834,14973,5594,10197,-12065,15774,-31345,8531,-5551,6992,26517,6072,23098,20803,12552,24666,3285,17084,12374,-1088,-31316,-24970,24314,9359,542,-12798,17464,4163,-17570,14330,-11106,-1111,-14196,-23684,28060,24795,27076,-14752,-22332,29827,25889,-13002,-26956,-28760,-32188,795,-13064,-3653,-3746,13350,21379,30297,-11426,16016,-31308,31321,-28114,-22136,12991,-18371,-25801,16465,5093,-7700,-17158,-26377,30155,28069,-12553,20609,-2550,25374,9367,22906,-28951,3452,866,9009,-9434,-28985,19996,6259,23100,6234,-22666,-21791,-20924,-27354,23194,-11811,-22935,-20385,31588,-11985,-7485,-3139,1296,13907,-31701,26902,29072,23814,2586,-22082,24278,-7773,20976,-7799,361,-24970,10934,12215,29217,11852,-32646,21254,-796,-23293,-16240,-22344,11240,-3596,12508,-9964,-7233,17018,21930,9584,12208,-9729,15482,21049,25654,-12199,2423,8263,31848,8900,20998,-12954,-26578,28981,2158,21098,-13589,-21325,-18361,29048,-26513,-5584,-27562,-9216,-5542,3899,18403,14665,-24574,25184,19989,8137,-17695,-754,-10608,18754,-12551,-8154,9053,-18164,-14680,24866,28505,-651,-9209,-9522,-2459,18875,12660,18229,-31434,-13496,-32224,-20047,-268,-5702,7338,-6350,7352,-10218,-185,11898,2737,-6416,-32508,24153,13896,7120,18298,1834,8849,-239,6969,-16396,18542,-30404,10992,28513,31006,-9272,-1767,-24576,2741,-24956,12861,23792,-23863,7225,-5221,-10856,-26707,-19607,-28562,10820,-20603,-5106,-15935,-7665,-3865,11328,-21056,14680,24492,-5202,-26614,30871,4475,-22459,-8776,-26294,-30352,3344,6327,-5696,28455,-1445,3998,-12506,-18677,-32272,17752,32593,-21590,-28325,-26881,-28016,-23065,19753,30997,-17786,-14222,27229,-21530,-10880,28550,-25266,19894,22470,15035,-26911,23777,-9785,-26940,-23756,7660,-13392,8704,-5594,-24383,-32396,21240,-17107,3735,-8871,32442,31384,-12117,-10700,-2819,-2261,-1597,23252,-7228,-25544,31887,25090,-9118,-29656,-28751,31702,2104,-18586 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice12_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice12_input0_int16.csv new file mode 100644 index 0000000..f812720 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice12_input0_int16.csv @@ -0,0 +1 @@ +-17006,16359,-30703,11938,-23887,5370,28563,-19435,28413,-9533,8484,5009,-11295,-15453,-28153,-27990,23919,-11584,-24637,-14314,-9178,-26494,9122,20420,19048,2278,12369,-17368,9386,-29119,14045,-22581,-21671,-7488,9703,29435,-3488,24841,-7872,-2154,-21731,15657,-18814,-8507,-30796,4266,-808,18382,27821,13783,26605,15232,-28682,30868,-20088,26866,-12264,26864,-19169,-32677,24218,-13622,4300,-21155,20217,26204,11483,21807,23260,-7028,6165,27176,5739,29368,-9447,32022,15350,28099,20528,28433,-1008,19993,-15900,5384,26938,771,-8595,-3055,-22656,21944,23683,24196,-11099,-25269,-21736,-24635,8725,19579,-25926,18377,4788,2538,25432,-11514,-24576,-25080,-7721,-161,5691,-188,-1724,-26642,17466,-31598,21553,9339,-205,-18610,602,-3290,11313,-13081,-25836,10913,-26451,-13630,24575,16453,-10197,-27758,-5866,-12698,-2875,-736,-11889,-4279,17402,5549,-15048,7463,-3891,-3042,31118,7780,16074,-18796,-7795,26959,-16679,10065,-18218,18677,11917,14133,16329,19742,15695,-27629,-29578,9250,-27286,-18562,-6965,21986,-23653,-7869,-7723,24093,7188,-16393,6306,-20748,24039,-29168,2371,-22311,24316,16297,-32392,-12033,-18078,-16574,23822,5968,-7306,-18698,-16729,-8424,-18520,-3128,-9911,-14856,1948,-10879,-28746,15348,-32381,-3808,-7931,-16363,22850,-8478,705,1367,-18640,-9340,30917,661,3744,-12229,-30179,-16420,3305,-7056,-31037,31286,4296,13788,7126,-5069,-27015,18311,21030,-3064,-10030,-8269,-5095,-29805,-12130,-10662,-13565,18211,3306,15918,29916,-11169,19191,13058,-17697,11716,-10962,5664,-810,-13381,-22017,17437,-18468,-17720,16764,18320,-13483,25939,-13411,-24340,17260,-25895,31140,-21627,15504,12079,-25785,16008,21586,-577,12325,8510,8387,15530,-11402,-5933,4037,17944,16497,503,17351,-2823,24093,-14177,11847,10693,-32285,-18849,3141,23847,4476,3679,-6007,-11339,-8990,16260,-9319,1196,-19149,-28588,20611,24841,-6405,-6267,23458,7002,-15164,13933,-3918,-1145,7643,-31374,-23575,15876,-11954,-18113,-25966,-28078,15275,-5009,-16801,18793,29405,-18227,-32623,-15644,24754,18437,10822,-31467,-59,13406,-9389,-22800,10886,-2935,-19135,-28021,11626,3805,-20375,20111,31271,28494,5393,16592,-17185,11197,-6526,15854,-23134,-1581,-26539,25554,-10509,-26214,10973,-18216,25056,-22905,-22669,16001,-1269,30605,30123,-32043,-19898,16570,-541,-30018,11075,-15155,1704,-5773,2433,25273,13502,-10147,23834,14973,5594,10197,-12065,15774,-31345,8531,-5551,6992,26517,6072,23098,20803,12552,24666,3285,17084,12374,-1088,-31316,-24970,24314,9359,542,-12798,17464,4163,-17570,14330,-11106,-1111,-14196,-23684,28060,24795,27076,-14752,-22332,29827,25889,-13002,-26956,-28760,-32188,795,-13064,-3653,-3746,13350,21379,30297,-11426,16016,-31308,31321,-28114,-22136,12991,-18371,-25801,16465,5093,-7700,-17158,-26377,30155,28069,-12553,20609,-2550,25374,9367,22906,-28951,3452,866,9009,-9434,-28985,19996,6259,23100,6234,-22666,-21791,-20924,-27354,23194,-11811,-22935,-20385,31588,-11985,-7485,-3139,1296,13907,-31701,26902,29072,23814,2586,-22082,24278,-7773,20976,-7799,361,-24970,10934,12215,29217,11852,-32646,21254,-796,-23293,-16240,-22344,11240,-3596,12508,-9964,-7233,17018,21930,9584,12208,-9729,15482,21049,25654,-12199,2423,8263,31848,8900,20998,-12954,-26578,28981,2158,21098,-13589,-21325,-18361,29048,-26513,-5584,-27562,-9216,-5542,3899,18403,14665,-24574,25184,19989,8137,-17695,-754,-10608,18754,-12551,-8154,9053,-18164,-14680,24866,28505,-651,-9209,-9522,-2459,18875,12660,18229,-31434,-13496,-32224,-20047,-268,-5702,7338,-6350,7352,-10218,-185,11898,2737,-6416,-32508,24153,13896,7120,18298,1834,8849,-239,6969,-16396,18542,-30404,10992,28513,31006,-9272,-1767,-24576,2741,-24956,12861,23792,-23863,7225,-5221,-10856,-26707,-19607,-28562,10820,-20603,-5106,-15935,-7665,-3865,11328,-21056,14680,24492,-5202,-26614,30871,4475,-22459,-8776,-26294,-30352,3344,6327,-5696,28455,-1445,3998,-12506,-18677,-32272,17752,32593,-21590,-28325,-26881,-28016,-23065,19753,30997,-17786,-14222,27229,-21530,-10880,28550,-25266,19894,22470,15035,-26911,23777,-9785,-26940,-23756,7660,-13392,8704,-5594,-24383,-32396,21240,-17107,3735,-8871,32442,31384,-12117,-10700,-2819,-2261,-1597,23252,-7228,-25544,31887,25090,-9118,-29656,-28751,31702,2104,-18586 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice13.tflite b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice13.tflite new file mode 100644 index 0000000..7aa777b Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice13.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice13_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice13_golden_int16.csv new file mode 100644 index 0000000..80b9d38 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice13_golden_int16.csv @@ -0,0 +1 @@ +-31463,14901,10336,-9026,-12551,28092,-30377,17254,5807,-13858,17703,18945,-12216,17656,-1715,4024,12099,26971,-16343,32227,-18303,-27617,17014,21190,6823,-32754,1466,-1376,16252,-7677,-12630,-30148,-31496,14433,9680,-6494,8688,7625,-32070,-20211,28986,18926,1450,-11218,-2260,13832,30707,9388,14192,31870,-20669,22786,-16942,-22152,-3948,-12988,28288,24156,-21921,-1400,-9046,9365,4038,-28165,-22607,-5475,-22309,27288,11268,-18176,-2911,20639,32764,-14785,25414,19271,-25167,-12670,-21174,32738,-7267,-15533,-9065,-5095,-19430,26558,-12316,-8066,30550,-4261,-272,-19729,-7468,-12739,11450,4152,-31125,-6254,6622,10317,-18357,14957,-10651,-12628,30814,-7028,-15322,-13327,25490,-25549,-23328,18972,-12405,5546,-13602,21852,27178,22176,-19007,25222,2469,-14026,-3229,4496,23420,-19904,-19870,32020,-9319,22591,24089,-610,-25737,31113,18582,-30492,24295,-19618,20034,14467,-31268,10461,-12641,-22183,-23628,28636,-14222,-2951,7135,-5748,-21679,-19443,16909,25563,22462,11112,27545,4104,-31781,9149,-2440,17816,26898,28243,-16257,10536,30861,7672,-2494,-14831,-31993,23943,25696,-11749,15091,3074,-11501,-117,-22325,29643,-23575,-25677,23637,-32712,19236,31141,5269,-32105,-17056,-26856,-22152,-14688,7145,-25752,8213,19249,-7504,21805,-28706,-31904,-7096,15874,-26852,6650,27076,20958,7138,295,-24199,10905,-1056,-14476,-10040,-14607,21993,-15662,-12088,-26486,1309,-14700,-4705,28418,-29731,-12720,10294,7022,5336,-25925,-904,21906,20649,-24860,-6927,3717,24208,-22571,-29023,25438,3233,-32734,-16442,16033,-3901,-4873,-18794,14233,4996,4688,-13287,5819,-2204,-11854,-11186,-30303,-1528,30016,-13520,13545,2919,-16213,8905,-23474,-8359,28569,-7692,25744,21090,17606,22661,23055,-21131,22350,1409,16008,-18312,22646,29323,608,-16301,-17178,-5984,15396,-31821,30134,-23557,-4864,-28594,-27715,-16171,-30896,21043,-24004,-7815,2742,-15137,-24325,19978,-24348,-26188,24526,-7907,-21809,-14358,20737,26050,-26975,13590,1076,14187,-6611,21668,-16016,32502,-9943,2383,6360,-25928,-12104,-7624,3376,-12676,21398,-9741,3163,15195,15068,-5652,8288,30613,7823,-25308,18373,-2541,-13017,-17386,-24432,-9396,-27018,-32173,4636,-32121,-23874,23597,14157,14462,-23243,4891,-3802,-3933,-791,-5313,2967,7921,-20956,3636,14274,-19313,-16129,524,24582,-7692,-11913,2433,7013,18824,24958,29847,-15224,4422,-27069,23491,-21017,-15018,-13482,24490,3779,14239,-19555,-9820,19232,-6016,-7914,-28267,3321,2040,-26283,15329,10738,12555,19894,29951,-15585,22350,15688,-30668,-12428,15099,-3532,-13544,32014,-30469,-3361,30208,-5813,2581,-25852,15169,-4345,-14760,-6397,-17619,10103,-20672,-12966,-2546,12412,-2235,17040,192,16608,-8574,-30404,-3498,-19239,-26843,-18331,18122,4983,13868,-30827,-16324,15221,-12891,-9846,12803,31422,30382,8929,-16898,15807,14187,-20178,25120,21965,-27562,26343,20727,2736,-5331,-11551,18859,-14044,-11988,-32213,-31756,-2886,24101,-10007,17330,10770,-6329,28447,23468,20893,-17304,-13741,-29890,-15923,8987,14419,-28846,-19676,25456,31602,10581,-24350,-24794,6242,-27979,-2349,-12602,-28701,-21462,-28891,-23641,-17066,28959,26665,-7495,-6886,-6495,-23980,8968,5127,29951,23009,8294,-16190,29247,6193,-17326,-22467,4215,13014,-4251,-23248,-17569,12179,15113,-18811,-18514,-26901,-27960,-31641,9646,-9981,14218,-20931,-30600,1212,15376,-20893,-25103,-1015,9566,30249,-17698,-3866,-21361,-19749,14015,2457,25765,-27675,-5033,-28958,-28438,-28546,-15568,31002,7359,30746,5002,10363,26314,-18385,-12492,-18084,-4644,12184,-10636,21112,16787,-7063,9869,4325,-17769,-17335,-27874,-7933,-534,28675,-32147,-22716,-23308,-25719,13153,27545,-5089,5928,-23788,16996,10991,4148,-5855,-31396,8667,-9163,-3175,-2789,-19105,-2339,-1959,-26777,11898,-519,-14974,25181,-5172,14574,-13323,-22602,-7409,-22297,18187,27173,4947,-27546,-20633,14311,18945,15241,23461,-25616,11462,-2913,29227,-14227,2397,-26767,-30519,655,-8641,10747,826,26163,-30175,-8739,5217,20693,18312,9864,-25432,25887,30314,-17058,-9857,900,-11977,-25262,5798,-29934,-4065,2086,-9302,8917,29924,12134,3183,-29265,20270,-11460,-22302,-18258,-8460,15569,28842,-27730,-32237,-17981,-29798,26459,-20409,2331,13975,22952,12547,23729,15232,-5199,-29604,6474,16884,-24011,961,-15383,13145,15091,17074,31903,25819,-3946,823,-14806,25366,-12186,-16715,31419,-31336,-4107,-29989,-13025,9916,-19019,3365,-2328,-2536,14556,-486,-12836,3357,11427,-12759,-1794,19752,-29163,-1720,30038,-12622,-70,-13749,20613,-23540,-23554,-31373,-152,-13380,11541,31810,12720,-8245,-13435,-5146,-5469,15148,22331,11818,29130,16285,-14315,7839,-26128,-18363,18177,25442,-12452,6633,-23143,29135,32086,-30443,2554,7845,-3991,-13422,7284,2217,21876,16723,15847,-7636,2657,7338,-19443,2863,26032,14085,-15954,30694,-21619,-27285,-5567,25860,25087,24746,-18497,-15427,4359,-18820,-3216,-20308,10859,-8718,19084,-15055,8004,4029,-31231,-370,-28214,24425,-1092,-13973,758,7112,6828,8795,-22339,-19764,-27609,-2847,28875,8243,-26699,3026,-402,2361,-6537,-11795,-10199,-20990,12753,-26150,-12290,9210,-5105,-19932,-10654,-3075,-32151,-2318,16769,23622,-20893,-20027,12870,26446,16432,-23725,15518,-5873,15006,-5743,12677,32535,-20152,12512,-30495,29176,-7023,-4797,-17258,6135,-13242,5157,11858,16632,-21854,28651,61,-8747,-16841,17986,-22331,-13735,-17871,-29057,8325,12032,19506,23988,20179,13326,-932,11386,8795,-18387,-27323,2880,-27381,-17370,-18916,-21567,-29045,-24426,14699,24524,-30920,-1639,-20309,-911,25046,-27206,3418,25,1209,-4072,19797,-21727,30498,19701,-19286,14719,1746,19046,-4316,18440,-32312,-3432,-2544,27436,10608,-17669,20135,-10292,6099,-17060,-3534,-1021,23863,-25782,-4772,7275,-28482,-256,-14953,4327,23647,-13939,13654,-265,-8583,25206,-2718,8169,4656,12975,-21065,-17514,13875,-8494,2375,-23335,21084,-31311,24246,-9015,30827,15265,-6354,23419,26229,-9715,29222,23920,6523,-28695,-3495,-23250,13327,18259,3081,19571,-8890,-9939,-31729,27096,-12593,17572,-3662,-8567,9329,31007,-26555,-639,22197,30787,-16394,11942,8814,5705,-27945,11284,-14777,11380,-18443,-7392,22857,6551,-19337,3653,-18545,27151,6916,-12201,-3673,20993,19647,-11688,30372,11337,-27151,18074,8962,-27222,4228,6387,-19481,6364,-11193,21128,9113,6600,11279,-27502,-10128,-10743,-23768,1101,14152,-3678,21191,-5573,9432,27544,1898,31284,-30370,8136,7888,29873,15891,-787,17140,8553,-5013,28706,-9106,-18632,26515,14234,-18876,-19621,22483,1215,30963,31033,457,15895,-2499,6012,-3887,10968,-26677,32068,-8539,10087,20263,32012,11921,7030,7109,18097,-29848,10071,-23225,812,-3341,-25853,6876,-30680,-28345,-12236,-4923,957,17150,23202,16205,-6048,18825,16528,-16143,714,-820,-10042,-13129,-26526,-28700,-2313,17297,8905,8881,24577,20192,-26878,-25767,31664,-31463,-9069,32333,2159,-6557,23019,32492,-29718,7570,-10443,-14196,7861,-12878,-12361,-16915,-15653,15519,-5957,6390,-14340,12638,-4597,-10066,-20418,9763,26628,-28062,24918,31786,5757,32610,23588,25805,-12747,450,-4873,-19147,-8032,25660,3052,-1138,24285,-16783,27851,-14545,-4782,-29778,-263,22391,16399,25243,6529,-8350,23330,1575,1364,31516,-4997,-21614,26681,6746,-5265,-3002,26117,-25915,-5145,21439,-18492,19675,-6996,-23011,8899,11795,982,-17854,24010,32555,-31586,11874,21352,-10387,-20930,-17937,-11187,20869,6386,28643,-11506,-5280,-15516,12921,-28825,6809,23152,22305,-19681,8481,22331,-9987,21659,30785,-10990,2497,27595,5493,-20697,-19984,-23389,-25399,-14224,29910,-9951,1634,-6256,-31471,-17606,11121,23355,-24513,29723,5550,2781,3814,-31976,30328,12407,-13573,23085,21009,-6048,-1541,-18774,3092,2140,-16979,1419,-281,-28732,29114,6926,-30578,13015,22829,18546,4923,637,6249,13509,12761,22647,12336,17944,-10797,-23529,7025,-21571,3212,11149,-24662,-395,-23851,20115,-22212,-1268,-28062,11451,3785,-18373,-21168,-32033,-24082,-18652,-14812,20861,8928,-983,-29880,-26552,28426,28889,-22309,12150,26531,4335,30022,22470,-19778,-29824,28154,-9213,26426,5914,18672,-30438,14849,11551,-17786,-12332,-9354,14959,-31887,-7948,12653,20501,-25987,-23280,-14800,-22156,17045,15840,22867,21883,-13361,-10921,-24118,-20987,-30591,-25361,-31992,25973,25498,-23506,942,-25103,32226,17210,-28310,-15522,-26752,27547,13071,18909,24542,1925,-17335,-1353,8925,-28672,6781,24266,-18760,-14018,28063,23019,-22866,10271,29293,-29831,-17025,-2692,-9109,-2146,-2752,-27934,10164,-20386,-26627,26477,-29265,-8173,17651,-26528,14620,-1536,29223,27771,1100,28043,16328,13681,22296,-11239,31766,-7482,10545,22690,-23926,-15349,-6172,-18559,-15692,30150,-2505,-14609,26275,22855,2506,-7519,-23238,-32514,-7766,-5396,10176,26828,14891,-22575,31273,19838,26641,-18835,-12451,15038,-8544,-6345,-9142,21446,-1025,-22514,-6515,-6594,30228,17506,-2243,-14410,24952,11486,-11637,-29364,-31056,-20729,27714,-23808,-3828,-27590,-24604,18613,23048,14607,22581,-14236,17167,1138,14222,25426,19833,-9135,12451,15049,6041,24575,-28202,-9009,22037,-14178,-26918,1862,981,1554,-13703,-15490,-20466,-12103,-24332,-26713,20455,-16494,31816,-24565,15752,18148,-18219,-5671,-29954,27207,907,-4225,4776,25190,14190,7483,-24991,21269,6701,9472,7100,-15672,-3670,-25953,-6596,-24009,21371,-56,968,20259,-10905,20327,23838,17466,-5721,1264,-26914,-9654,-6644,25983,-10590,20168,-25951,-18819,32141,19915,-31992,-4997,32598,9728,1525,-13376,29581,-26103,5656,-6865,17170,-31239,-7437,-25819,-2588,-26303,-3988,-21974,-19519,-11390,-26247,1059,23628,1482,-1362,25126,-10929,-23817,-29838,-31518,31116,-23540,22135,-7539,21606,-27326,9778,-179,-13281,-6731,-20062,22901,13318,-28717,-20350,-15363,-32226,13145,-5158,-27287,22827,-12708,2407,-31868,-14042,-20209,-231,31538,-10302,7287,-14642,-30002,-19778,14207,24309,-26088,-10428,-22881,6733 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice13_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice13_input0_int16.csv new file mode 100644 index 0000000..53c9367 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice13_input0_int16.csv @@ -0,0 +1 @@ +-19372,31359,29572,-11976,28401,-26252,19149,28097,21321,-30522,19468,3648,-14802,5385,970,-31006,14756,-4373,-4017,1229,-9749,30988,13973,-27596,-32117,1295,9261,-12887,9481,28631,-23690,-28932,-15520,-9315,-1292,-3914,-5587,-25159,-6985,19252,24674,-23162,-27926,8195,-17924,8710,7813,-824,-29824,-21853,-22154,-28960,2660,-11364,-12305,3718,18730,18387,30655,17886,25132,-32598,16596,-20728,32258,7907,18077,-5528,-6773,30702,-11739,6828,-16894,5920,20908,4319,-20430,-4702,-4507,7043,-4311,-7747,-20708,17904,18931,21721,28155,-9175,6621,29765,-25971,15056,-13788,26788,11405,9767,-21256,19292,30374,-25508,5212,23202,-22588,7528,-3842,1797,7645,-24928,29397,1311,-19303,-15126,-20953,6621,-13665,-23847,-17625,-10342,15661,-4947,-6700,-23110,-22349,3832,3710,21659,-17825,6883,-12358,17586,21253,18805,24566,-30692,-26669,28335,-27040,6792,-4509,1289,-18329,14556,-14197,-15189,-21021,-32576,-5400,11641,-5507,-32717,-10334,27714,21621,-10490,-32357,6416,11601,-4714,-18035,31049,-11513,-19425,20227,-31614,24163,-22794,15948,29106,28516,12361,18529,12312,19129,18778,2455,16480,-28918,9093,30846,29144,7075,25716,-23795,1819,-9231,-836,21579,4889,2198,17105,15097,-29274,-3522,31325,24399,18271,-31585,-9479,-24982,-653,28212,4316,20176,19991,24265,18180,15231,32642,10335,-18225,-15158,24943,31068,-5271,29641,-14188,26668,-10065,-25809,8100,31285,6141,2599,-30974,29163,28046,440,16803,-13771,-923,-9663,-25929,-22315,29727,-25900,15108,-22101,24869,3534,-29695,14605,-5005,-8222,-8180,8902,-18612,-2365,22423,9905,-7191,25112,-7777,8882,8457,12082,-26068,-4180,-4222,-24985,19147,13109,-16805,-7862,28619,-31444,-19465,16493,-23039,-4430,-2277,20673,16490,-29397,22176,11924,-12808,-6007,-17541,-2193,-19190,-18620,-21238,14994,-17113,1090,22659,-9547,-18415,27454,7924,694,8945,5047,-2467,17642,23742,-32620,-9272,16475,-8790,16961,15947,27541,26403,-9988,25339,2973,-31524,-19826,3531,28576,26725,20103,23919,-12780,-26729,-17129,-23341,12477,30382,1449,19022,-18392,-24344,10038,-12057,-5944,8991,553,29046,22739,24269,10864,-8375,24702,-27560,19585,9575,11991,-10404,12231,-9920,-5683,22443,-9832,11181,-15241,-14464,-24681,-12152,28387,-15819,24333,13621,7372,11142,-20028,11229,-21793,-15205,-25584,-26294,22608,31146,241,9786,-3284,-31137,32135,-8929,-14134,-23947,-9653,7063,22223,21685,25968,19855,-13206,-4767,-26448,-22531,-18554,-2960,32039,29045,-28143,23167,-20213,-11532,-1331,-4876,-23118,-12993,14393,24881,-16376,24887,-32500,-10066,17590,-24098,368,4348,-14896,3487,-28296,-13981,-22987,2597,-31426,-21086,-29061,26545,-17824,-2204,-15069,15015,16610,-30570,-4039,19223,-13638,14134,-13761,-12434,12115,-17603,3867,32073,25361,18498,26353,-8411,23010,16390,-13133,-23510,-25384,-1627,20012,6730,25700,29391,29570,15445,21420,-13025,189,13862,-28609,-30469,-20490,-2326,-30408,-29699,-3731,8571,-24590,29069,23388,-23389,18993,19653,-20845,22412,-20703,-7667,26922,25872,-885,13235,22337,24660,-17100,32553,1395,-6366,-20519,16040,19404,-19411,28473,19522,20143,28615,17213,-10334,30659,-22344,-5664,9522,-7252,-5765,-29322,-21579,849,-19623,-27240,3042,5555,6660,16223,-17097,19803,-5893,2772,-9310,-28344,-11882,18977,22785,-19153,-8761,22482,20185,29962,29113,19416,27864,3811,-10142,-5345,-18739,-14283,23430,23414,-16036,9220,-24099,-5784,-6939,19363,29878,-26220,-15469,-14545,28602,16541,-9271,11908,7271,-5796,7438,21791,-31798,4473,-29469,-24727,-32371,19963,-22698,7360,25031,16653,4503,32454,-6545,17763,-14583,27532,1528,8030,-10728,31969,-12784,20171,23551,19887,23171,18270,-23683,-8697,17704,17899,-7535,-4208,25605,-25104,18328,-20181,12603,-15422,5615,1919,-31174,29128,25298,-27782,29779,-19582,18879,-20676,-8711,30958,-8751,-30011,15925,9936,-1910,-4195,-27365,-28643,16608,-28324,-1085,-13761,-551,-11415,-7092,25179,13023,26797,-19290,-30030,22593,-31017,21172,-8792,11713,-17794,3336,-11631,-27602,18397,6643,5593,1148,-24880,-5327,-25406,-25362,-29227,19557,23744,-15957,10106,13043,9855,-10372,-9343,-28912,5037,-8510,25698,-23049,18021,-4451,16998,-20671,31186,-22335,4657,-4108,-29465,-27969,-16554,-19867,-13081,28235,6437,-15331,-8401,-3083,14201,3776,31261,25510,-307,-15276,-2920,2922,31730,-30142,4761,4817,5358,533,-2791,-6555,-28041,-3426,14738,-27126,-932,-13121,-19785,-32341,-10150,14661,-21798,26914,-26882,-25661,-24065,-3670,-13749,13558,-10855,-16851,-27008,-17823,-3530,6732,25005,7511,-30350,2357,23386,24336,10326,11336,-12746,27467,25385,-6210,29368,24073,20857,13915,24894,6102,15704,5034,-14414,27506,12583,-8174,25604,30884,13904,29576,-27483,1758,-32360,29202,-31898,-24651,-4873,-12608,30984,-14269,7610,-10480,10042,1534,17374,-6081,-11642,-2329,13815,-1431,-29103,2060,16847,-4330,-401,18010,-8711,-6022,17610,2626,-16181,-16279,-23047,13129,28000,22061,-31463,14901,10336,-9026,-12551,28092,-30377,17254,5807,-13858,17703,18945,-12216,17656,-1715,4024,12099,26971,-16343,32227,-18303,-27617,17014,21190,6823,-32754,1466,-1376,16252,-7677,-12630,-30148,-31496,14433,9680,-6494,8688,7625,-32070,-20211,28986,18926,1450,-11218,-2260,13832,30707,9388,14192,31870,-20669,22786,-16942,-22152,-3948,-12988,28288,24156,-21921,-1400,-9046,9365,4038,-28165,-22607,-5475,-22309,27288,11268,-18176,-2911,20639,32764,-14785,25414,19271,-25167,-12670,-21174,32738,-7267,-15533,-9065,-5095,-19430,26558,-12316,-8066,30550,-4261,-272,-19729,-7468,-12739,11450,4152,-31125,-6254,6622,10317,-18357,14957,-10651,-12628,30814,-7028,-15322,-13327,25490,-25549,-23328,18972,-12405,5546,-13602,21852,27178,22176,-19007,25222,2469,-14026,-3229,4496,23420,-19904,-19870,32020,-9319,22591,24089,-610,-25737,31113,18582,-30492,24295,-19618,20034,14467,-31268,10461,-12641,-22183,-23628,28636,-14222,-2951,7135,-5748,-21679,-19443,16909,25563,22462,11112,27545,4104,-31781,9149,-2440,17816,26898,28243,-16257,10536,30861,7672,-2494,-14831,-31993,23943,25696,-11749,15091,3074,-11501,-117,-22325,29643,-23575,-25677,23637,-32712,19236,31141,5269,-32105,-17056,-26856,-22152,-14688,7145,-25752,8213,19249,-7504,21805,-28706,-31904,-7096,15874,-26852,6650,27076,20958,7138,295,-24199,10905,-1056,-14476,-10040,-14607,21993,-15662,-12088,-26486,1309,-14700,-4705,28418,-29731,-12720,10294,7022,5336,-25925,-904,21906,20649,-24860,-6927,3717,24208,-22571,-29023,25438,3233,-32734,-16442,16033,-3901,-4873,-18794,14233,4996,4688,-13287,5819,-2204,-11854,-11186,-30303,-1528,30016,-13520,13545,2919,-16213,8905,-23474,-8359,28569,-7692,25744,21090,17606,22661,23055,-21131,22350,1409,16008,-18312,22646,29323,608,-16301,-17178,-5984,15396,-31821,30134,-23557,-4864,-28594,-27715,-16171,-30896,21043,-24004,-7815,2742,-15137,-24325,19978,-24348,-26188,24526,-7907,-21809,-14358,20737,26050,-26975,13590,1076,14187,-6611,21668,-16016,32502,-9943,2383,6360,-25928,-12104,-7624,3376,-12676,21398,-9741,3163,15195,15068,-5652,8288,30613,7823,-25308,18373,-2541,-13017,-17386,-24432,-9396,-27018,-32173,4636,-32121,-23874,23597,14157,14462,-23243,4891,-3802,-3933,-791,-5313,2967,7921,-20956,3636,14274,-19313,-16129,524,24582,-7692,-11913,2433,7013,18824,24958,29847,-15224,4422,-27069,23491,-21017,-15018,-13482,24490,3779,14239,-19555,-9820,19232,-6016,-7914,-28267,3321,2040,-26283,15329,10738,12555,19894,29951,-15585,22350,15688,-30668,-12428,15099,-3532,-13544,32014,-30469,-3361,30208,-5813,2581,-25852,15169,-4345,-14760,-6397,-17619,10103,-20672,-12966,-2546,12412,-2235,17040,192,16608,-8574,-30404,-3498,-19239,-26843,-18331,18122,4983,13868,-30827,-16324,15221,-12891,-9846,12803,31422,30382,8929,-16898,15807,14187,-20178,25120,21965,-27562,26343,20727,2736,-5331,-11551,18859,-14044,-11988,-32213,-31756,-2886,24101,-10007,17330,10770,-6329,28447,23468,20893,-17304,-13741,-29890,-15923,8987,14419,-28846,-19676,25456,31602,10581,-24350,-24794,6242,-27979,-2349,-12602,-28701,-21462,-28891,-23641,-17066,28959,26665,-7495,-6886,-6495,-23980,8968,5127,29951,23009,8294,-16190,29247,6193,-17326,-22467,4215,13014,-4251,-23248,-17569,12179,15113,-18811,-18514,-26901,-27960,-31641,9646,-9981,14218,-20931,-30600,1212,15376,-20893,-25103,-1015,9566,30249,-17698,-3866,-21361,-19749,14015,2457,25765,-27675,-5033,-28958,-28438,-28546,-15568,31002,7359,30746,5002,10363,26314,-18385,-12492,-18084,-4644,12184,-10636,21112,16787,-7063,9869,4325,-17769,-17335,-27874,-7933,-534,28675,-32147,-22716,-23308,-25719,13153,27545,-5089,5928,-23788,16996,10991,4148,-5855,-31396,8667,-9163,-3175,-2789,-19105,-2339,-1959,-26777,11898,-519,-14974,25181,-5172,14574,-13323,-22602,-7409,-22297,18187,27173,4947,-27546,-20633,14311,18945,15241,23461,-25616,11462,-2913,29227,-14227,2397,-26767,-30519,655,-8641,10747,826,26163,-30175,-8739,5217,20693,18312,9864,-25432,25887,30314,-17058,-9857,900,-11977,-25262,5798,-29934,-4065,2086,-9302,8917,29924,12134,3183,-29265,20270,-11460,-22302,-18258,-8460,15569,28842,-27730,-32237,-17981,-29798,26459,-20409,2331,13975,22952,12547,23729,15232,-5199,-29604,6474,16884,-24011,961,-15383,13145,15091,17074,31903,25819,-3946,823,-14806,25366,-12186,-16715,31419,-31336,-4107,-29989,-13025,9916,-19019,3365,-2328,-2536,14556,-486,-12836,3357,11427,-12759,-1794,19752,-29163,-1720,30038,-12622,-70,-13749,20613,-23540,-23554,-31373,-152,-13380,11541,31810,12720,-8245,-13435,-5146,-5469,15148,22331,11818,29130,16285,-14315,7839,-26128,-18363,18177,25442,-12452,6633,-23143,29135,32086,-30443,2554,7845,-3991,-13422,7284,2217,21876,16723,15847,-7636,2657,7338,-19443,2863,26032,14085,-15954,30694,-21619,-27285,-5567,25860,25087,24746,-18497,-15427,4359,-18820,-3216,-20308,10859,-8718,19084,-15055,8004,4029,-31231,-370,-28214,24425,-1092,-13973,758,7112,6828,8795,-22339,-19764,-27609,-2847,28875,8243,-26699,3026,-402,2361,-6537,-11795,-10199,-20990,12753,-26150,-12290,9210,-5105,-19932,-10654,-3075,-32151,-2318,16769,23622,-20893,-20027,12870,26446,16432,-23725,15518,-5873,15006,-5743,12677,32535,-20152,12512,-30495,29176,-7023,-4797,-17258,6135,-13242,5157,11858,16632,-21854,28651,61,-8747,-16841,17986,-22331,-13735,-17871,-29057,8325,12032,19506,23988,20179,13326,-932,11386,8795,-18387,-27323,2880,-27381,-17370,-18916,-21567,-29045,-24426,14699,24524,-30920,-1639,-20309,-911,25046,-27206,3418,25,1209,-4072,19797,-21727,30498,19701,-19286,14719,1746,19046,-4316,18440,-32312,-3432,-2544,27436,10608,-17669,20135,-10292,6099,-17060,-3534,-1021,23863,-25782,-4772,7275,-28482,-256,-14953,4327,23647,-13939,13654,-265,-8583,25206,-2718,8169,4656,12975,-21065,-17514,13875,-8494,2375,-23335,21084,-31311,24246,-9015,30827,15265,-6354,23419,26229,-9715,29222,23920,6523,-28695,-3495,-23250,13327,18259,3081,19571,-8890,-9939,-31729,27096,-12593,17572,-3662,-8567,9329,31007,-26555,-639,22197,30787,-16394,11942,8814,5705,-27945,11284,-14777,11380,-18443,-7392,22857,6551,-19337,3653,-18545,27151,6916,-12201,-3673,20993,19647,-11688,30372,11337,-27151,18074,8962,-27222,4228,6387,-19481,6364,-11193,21128,9113,6600,11279,-27502,-10128,-10743,-23768,1101,14152,-3678,21191,-5573,9432,27544,1898,31284,-30370,8136,7888,29873,15891,-787,17140,8553,-5013,28706,-9106,-18632,26515,14234,-18876,-19621,22483,1215,30963,31033,457,15895,-2499,6012,-3887,10968,-26677,32068,-8539,10087,20263,32012,11921,7030,7109,18097,-29848,10071,-23225,812,-3341,-25853,6876,-30680,-28345,-12236,-4923,957,17150,23202,16205,-6048,18825,16528,-16143,714,-820,-10042,-13129,-26526,-28700,-2313,17297,8905,8881,24577,20192,-26878,-25767,31664,-31463,-9069,32333,2159,-6557,23019,32492,-29718,7570,-10443,-14196,7861,-12878,-12361,-16915,-15653,15519,-5957,6390,-14340,12638,-4597,-10066,-20418,9763,26628,-28062,24918,31786,5757,32610,23588,25805,-12747,450,-4873,-19147,-8032,25660,3052,-1138,24285,-16783,27851,-14545,-4782,-29778,-263,22391,16399,25243,6529,-8350,23330,1575,1364,31516,-4997,-21614,26681,6746,-5265,-3002,26117,-25915,-5145,21439,-18492,19675,-6996,-23011,8899,11795,982,-17854,24010,32555,-31586,11874,21352,-10387,-20930,-17937,-11187,20869,6386,28643,-11506,-5280,-15516,12921,-28825,6809,23152,22305,-19681,8481,22331,-9987,21659,30785,-10990,2497,27595,5493,-20697,-19984,-23389,-25399,-14224,29910,-9951,1634,-6256,-31471,-17606,11121,23355,-24513,29723,5550,2781,3814,-31976,30328,12407,-13573,23085,21009,-6048,-1541,-18774,3092,2140,-16979,1419,-281,-28732,29114,6926,-30578,13015,22829,18546,4923,637,6249,13509,12761,22647,12336,17944,-10797,-23529,7025,-21571,3212,11149,-24662,-395,-23851,20115,-22212,-1268,-28062,11451,3785,-18373,-21168,-32033,-24082,-18652,-14812,20861,8928,-983,-29880,-26552,28426,28889,-22309,12150,26531,4335,30022,22470,-19778,-29824,28154,-9213,26426,5914,18672,-30438,14849,11551,-17786,-12332,-9354,14959,-31887,-7948,12653,20501,-25987,-23280,-14800,-22156,17045,15840,22867,21883,-13361,-10921,-24118,-20987,-30591,-25361,-31992,25973,25498,-23506,942,-25103,32226,17210,-28310,-15522,-26752,27547,13071,18909,24542,1925,-17335,-1353,8925,-28672,6781,24266,-18760,-14018,28063,23019,-22866,10271,29293,-29831,-17025,-2692,-9109,-2146,-2752,-27934,10164,-20386,-26627,26477,-29265,-8173,17651,-26528,14620,-1536,29223,27771,1100,28043,16328,13681,22296,-11239,31766,-7482,10545,22690,-23926,-15349,-6172,-18559,-15692,30150,-2505,-14609,26275,22855,2506,-7519,-23238,-32514,-7766,-5396,10176,26828,14891,-22575,31273,19838,26641,-18835,-12451,15038,-8544,-6345,-9142,21446,-1025,-22514,-6515,-6594,30228,17506,-2243,-14410,24952,11486,-11637,-29364,-31056,-20729,27714,-23808,-3828,-27590,-24604,18613,23048,14607,22581,-14236,17167,1138,14222,25426,19833,-9135,12451,15049,6041,24575,-28202,-9009,22037,-14178,-26918,1862,981,1554,-13703,-15490,-20466,-12103,-24332,-26713,20455,-16494,31816,-24565,15752,18148,-18219,-5671,-29954,27207,907,-4225,4776,25190,14190,7483,-24991,21269,6701,9472,7100,-15672,-3670,-25953,-6596,-24009,21371,-56,968,20259,-10905,20327,23838,17466,-5721,1264,-26914,-9654,-6644,25983,-10590,20168,-25951,-18819,32141,19915,-31992,-4997,32598,9728,1525,-13376,29581,-26103,5656,-6865,17170,-31239,-7437,-25819,-2588,-26303,-3988,-21974,-19519,-11390,-26247,1059,23628,1482,-1362,25126,-10929,-23817,-29838,-31518,31116,-23540,22135,-7539,21606,-27326,9778,-179,-13281,-6731,-20062,22901,13318,-28717,-20350,-15363,-32226,13145,-5158,-27287,22827,-12708,2407,-31868,-14042,-20209,-231,31538,-10302,7287,-14642,-30002,-19778,14207,24309,-26088,-10428,-22881,6733 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice14.tflite b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice14.tflite new file mode 100644 index 0000000..8a76389 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice14.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice14_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice14_golden_int16.csv new file mode 100644 index 0000000..b4f9334 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice14_golden_int16.csv @@ -0,0 +1 @@ +-27007,27790,32002,18591,27327,31889,-31339,18864,25470,-23160,30110,-16271,19349,-14507,-14144,-22718,-5986,22580,10628,8902,-25370,3201,4440,-14681,-2710,-1213,29276,-30886,16080,31146,10157,4420,20396,745,20004,-27156,28542,29559,-30699,-9067,-16883,-25903,7751,-1067,18467,-16298,-2908,26571,-29299,-1943,6186,-515,10561,9008,-23200,9580,-8551,-14599,-12086,14336,-27092,4358,-25489,5972,23638,26653,6804,-28124,-22811,27197,5256,-16001,24071,-11788,1147,-12404,-19735,4808,-23194,21717,22479,-24298,-7360,-4047,-29199,-2082,-28294,19382,-1935,-17311,-29749,-31030,-8801,27466,-32352,8426,8734,-30335,-19345,-22493,27370,-27582,5907,29860,4619,-9410,27515,-19823,27913,-24404,18886,5506,-18990,26442,-20499,10854,-18814,-26480,-6565,10882,-22798,-22816,-21737,-29480,-4314,10281,19129,22101,9431,12383,-14882,-20645,-5080,-13137,-23772,-9467,-23994,6021,12993,28347,1903,772,-17393,25611,-26094,-29167,-32485,15667,17496,23313,-4536,1635,1584,10458,22734,-2698,13608,-21369,-24858,-2393,-1426,-14448,-29944,29281,21020,-13192,16209,29991,-6105,10031,-2737,411,-2900,22777,-31500,-1989,14189,28562,-121,10316,17135,4464,8352,11483,6763,-23367,-17322,1947,32647,-4591,18372,-7906,-28203,-13838,-5534,-1639,-6915,3836,23064,18735,-19630,5556,-30741,14124,20784,26020,7752,-30275,-30885,22793,4922,15862,28861,-18770,-18105,16392,-8885,8179,4075,1412,-18385,3506,-25437,2933,4615,20855,4279,-2382,4096,1199,-21005,-30004,8154,-26345,5396,-18336,10279,-2995,11776,9549,22658,19517,27228,30967,-28090,13689,21970,1561,20867,1506,6277,-23611,12359,-1291,3102,30466,-32495,18928,-8835,27049,28480,23254,12676,-32310,31104,19419,22827,32429,-5185,-14726,20006,24764,32054,11576,5141,-8149,-27488,-24616,-1504,-7657,5503,-14419,1567,-17839,-16662,19372,8298,23416,-32280,-26449,15051,-17604,-20380,-22269,-19227,3670,21388,-28021,27265,31853,6529,-27708,31356,-16868,14226,30482,5757,8008,1716,18146,-19103,1948,-12168,-15492,27303,-13632,2374,-23013,-22786,17355,-21975,-29007,-19274,2020,-27369,31000,718,17084,-13943,-11510,-32526,-18828,-578,2071,9356,15769,-27683,-14247,-27444,3470,-8581,-20183,-30223,10318,-31518,-15886,3308,16501,12084,22888,194,12219,25341,14737,-12597,-3551,-10874,31941,24787,-24048,-30327,-22255,-5834,-10050,-15363,-24406,-2885,-12162,-11477,-31842,20091,2928,-2103,17819,-31603,606,16580,-11522,301,-795,214,-23259,-8507,-12523,26426,16850,-21128,-17080,20838,21635,14144,9909,-5510,-31661,23616,-14924,23819,-15551,16070,2198,-17842,-12257,29605,14068,-6532,-23553,15460,-24470,22644,31470,57,-26944,-25464,22635,-20589,-16358,7583,201,-20368,-26161,-27619,-12739,9930,3743,14295,2029,-10393,11736,3032,27550,-30942,-15452,6557,998,1075,-25329,13882,-12574,12540,24276,6815,-20055,25503,-11732,16162,-27257,16883,17540,-13144,20476,4871,-14767,11530,13899,2468,-7495,18232,-30551,-12540,-27276,23035,21758,3241,1542,-6111,-22267,21644,-10772,13020,-4265,22843,-25394,32316,29574,-19786,-28669,-9525,16784,2536,23958,-25004,26450,-18256,12449,6935,31336,18916,27143,-17688,27734,-6479,-26900,-5129,-25723,-632,32086,-11585,19839,-19959,-16333,-23592,7665,30442,-3686,2584,-7965,22334,8628,27852,529,-26089,-24894,21784,22260,-15123,13482,-28458,-14028,-30544,586,-31273,-27300,-16504,8524,8777,14834,-19662,-12144,-1788,16331,-26308,8018,-6677,-18129,27516,7609,27199,17481,2052,-25661,15543,2097,-4507,-9486,6184,5615,20403,14661,3174,32463,-88,9050,28553,21932,-26703,-29450,-13628,3329,15421,10438,4834,5815,-11937,-7629,8397,15635,13316,-29901,17630,24271,21298,-10371,-6806,-20578,-9651,-2986,22619,627,-15204,-13808,29558,26069,31256,14836,-8135,-17256,-27085,-14795,2561,-8051,31281,-11874,6755,-9807,22914,-1562,8418,-11975,-32759,25542,-29189,-25720,-3159,-23174,23592,-2517,-21850,9569,23969,-1671,12797,-9946,-25503,9445,-11114,-4165,3689,-10656,-1500,21165,-27066,11134,5943,-5929,-16204,-22963,-32549,19111,-15972,-13492,-2176,20457,2823,-6559,-4503,29494,-15165,27818,-20006,-16603,-23208,12068,30976,24648,3530,2653,-18705,6120,-29806,4042,-5606,-26002,-821,10878,-1094,-10390,-30469,21010,16014,25126,-9271,20035,2993,5289,17580,29851,23894,-29503,21690,23787,-23181,2938,26940,-16059,-11489,23081,13085,20947,-27512,-20340,32626,-1046,-22372,15695,-1922,-17170,7543,-4973,-23974,15296,10129,3437,-14572,-22646,21271,-7820,-18136,-24954,-29007,666,-20174,-29506,11550,-2929,32311,-4214,-16693,-31193,19313,28229,15074,-23210,-2674,709,19623,1930,16252,-15015,-11855,8869,-4330,1130,-32316,4333,-20917,-26865,4252,-16607,27383,-26186,4374,-8302,22774,5400,2229,21883,1012,30596,-27712,-22669,-26751,-1831,-10198,-27533,-12582,12660,-10373,-12421,5253,5528,5971,15649,24887,5275,-951,13764,-3546,-31594,31896,-18405,-5515,-5583,12496,-10878,-28208,13482,-24110,8672 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice14_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice14_input0_int16.csv new file mode 100644 index 0000000..4ae1573 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice14_input0_int16.csv @@ -0,0 +1 @@ +-27007,27790,32002,18591,27327,31889,-31339,18864,25470,-23160,30110,-16271,19349,-14507,-14144,-22718,-5986,22580,10628,8902,-25370,3201,4440,-14681,-2710,-1213,29276,-30886,16080,31146,10157,4420,20396,745,20004,-27156,28542,29559,-30699,-9067,-16883,-25903,7751,-1067,18467,-16298,-2908,26571,-29299,-1943,6186,-515,10561,9008,-23200,9580,-8551,-14599,-12086,14336,-27092,4358,-25489,5972,23638,26653,6804,-28124,-22811,27197,5256,-16001,24071,-11788,1147,-12404,-19735,4808,-23194,21717,22479,-24298,-7360,-4047,-29199,-2082,-28294,19382,-1935,-17311,-29749,-31030,-8801,27466,-32352,8426,8734,-30335,-19345,-22493,27370,-27582,5907,29860,4619,-9410,27515,-19823,27913,-24404,18886,5506,-18990,26442,-20499,10854,-18814,-26480,-6565,10882,-22798,-22816,-21737,-29480,-4314,10281,19129,22101,9431,12383,-14882,-20645,-5080,-13137,-23772,-9467,-23994,6021,12993,28347,1903,772,-17393,25611,-26094,-29167,-32485,15667,17496,23313,-4536,1635,1584,10458,22734,-2698,13608,-21369,-24858,-2393,-1426,-14448,-29944,29281,21020,-13192,16209,29991,-6105,10031,-2737,411,-2900,22777,-31500,-1989,14189,28562,-121,10316,17135,4464,8352,11483,6763,-23367,-17322,1947,32647,-4591,18372,-7906,-28203,-13838,-5534,-1639,-6915,3836,23064,18735,-19630,5556,-30741,14124,20784,26020,7752,-30275,-30885,22793,4922,15862,28861,-18770,-18105,16392,-8885,8179,4075,1412,-18385,3506,-25437,2933,4615,20855,4279,-2382,4096,1199,-21005,-30004,8154,-26345,5396,-18336,10279,-2995,11776,9549,22658,19517,27228,30967,-28090,13689,21970,1561,20867,1506,6277,-23611,12359,-1291,3102,30466,-32495,18928,-8835,27049,28480,23254,12676,-32310,31104,19419,22827,32429,-5185,-14726,20006,24764,32054,11576,5141,-8149,-27488,-24616,-1504,-7657,5503,-14419,1567,-17839,-16662,19372,8298,23416,-32280,-26449,15051,-17604,-20380,-22269,-19227,3670,21388,-28021,27265,31853,6529,-27708,31356,-16868,14226,30482,5757,8008,1716,18146,-19103,1948,-12168,-15492,27303,-13632,2374,-23013,-22786,17355,-21975,-29007,-19274,2020,-27369,31000,718,17084,-13943,-11510,-32526,-18828,-578,2071,9356,15769,-27683,-14247,-27444,3470,-8581,-20183,-30223,10318,-31518,-15886,3308,16501,12084,22888,194,12219,25341,14737,-12597,-3551,-10874,31941,24787,-24048,-30327,-22255,-5834,-10050,-15363,-24406,-2885,-12162,-11477,-31842,20091,2928,-2103,17819,-31603,606,16580,-11522,301,-795,214,-23259,-8507,-12523,26426,16850,-21128,-17080,20838,21635,14144,9909,-5510,-31661,23616,-14924,23819,-15551,16070,2198,-17842,-12257,29605,14068,-6532,-23553,15460,-24470,22644,31470,57,-26944,-25464,22635,-20589,-16358,7583,201,-20368,-26161,-27619,-12739,9930,3743,14295,2029,-10393,11736,3032,27550,-30942,-15452,6557,998,1075,-25329,13882,-12574,12540,24276,6815,-20055,25503,-11732,16162,-27257,16883,17540,-13144,20476,4871,-14767,11530,13899,2468,-7495,18232,-30551,-12540,-27276,23035,21758,3241,1542,-6111,-22267,21644,-10772,13020,-4265,22843,-25394,32316,29574,-19786,-28669,-9525,16784,2536,23958,-25004,26450,-18256,12449,6935,31336,18916,27143,-17688,27734,-6479,-26900,-5129,-25723,-632,32086,-11585,19839,-19959,-16333,-23592,7665,30442,-3686,2584,-7965,22334,8628,27852,529,-26089,-24894,21784,22260,-15123,13482,-28458,-14028,-30544,586,-31273,-27300,-16504,8524,8777,14834,-19662,-12144,-1788,16331,-26308,8018,-6677,-18129,27516,7609,27199,17481,2052,-25661,15543,2097,-4507,-9486,6184,5615,20403,14661,3174,32463,-88,9050,28553,21932,-26703,-29450,-13628,3329,15421,10438,4834,5815,-11937,-7629,8397,15635,13316,-29901,17630,24271,21298,-10371,-6806,-20578,-9651,-2986,22619,627,-15204,-13808,29558,26069,31256,14836,-8135,-17256,-27085,-14795,2561,-8051,31281,-11874,6755,-9807,22914,-1562,8418,-11975,-32759,25542,-29189,-25720,-3159,-23174,23592,-2517,-21850,9569,23969,-1671,12797,-9946,-25503,9445,-11114,-4165,3689,-10656,-1500,21165,-27066,11134,5943,-5929,-16204,-22963,-32549,19111,-15972,-13492,-2176,20457,2823,-6559,-4503,29494,-15165,27818,-20006,-16603,-23208,12068,30976,24648,3530,2653,-18705,6120,-29806,4042,-5606,-26002,-821,10878,-1094,-10390,-30469,21010,16014,25126,-9271,20035,2993,5289,17580,29851,23894,-29503,21690,23787,-23181,2938,26940,-16059,-11489,23081,13085,20947,-27512,-20340,32626,-1046,-22372,15695,-1922,-17170,7543,-4973,-23974,15296,10129,3437,-14572,-22646,21271,-7820,-18136,-24954,-29007,666,-20174,-29506,11550,-2929,32311,-4214,-16693,-31193,19313,28229,15074,-23210,-2674,709,19623,1930,16252,-15015,-11855,8869,-4330,1130,-32316,4333,-20917,-26865,4252,-16607,27383,-26186,4374,-8302,22774,5400,2229,21883,1012,30596,-27712,-22669,-26751,-1831,-10198,-27533,-12582,12660,-10373,-12421,5253,5528,5971,15649,24887,5275,-951,13764,-3546,-31594,31896,-18405,-5515,-5583,12496,-10878,-28208,13482,-24110,8672,7009,18395,-7506,-1936,19863,11576,20263,-4832,21695,-31697,-17802,22457,23140,8671,23261,22495,-27975,8262,-24845,6412,-13607,-25767,12940,2182,28744,-20378,5490,-11482,-8783,-29413,30639,-30304,-14964,-8037,-28039,-19391,-18136,30094,23393,-22101,20032,-21214,2386,17208,23261,28281,9001,-15637,11986,16151,30299,5554,-31204,-19004,30913,-26784,-32607,-32176,-2106,-16173,8268,2510,23821,9896,-11718,-32326,8406,-231,-18899,-10517,-31404,1971,30011,-22225,25375,30289,-31841,3628,16318,-21993,-14433,32582,17176,-6269,-10353,27059,-2319,-17683,26858,-20343,17863,-22375,-18236,22193,12569,-12696,-22263,8840,-22577,-12890,-20668,-31517,-26716,18263,30568,25600,-606,-17427,-13485,28397,5926,24010,-6019,-28742,30046,1074,-24108,23419,29957,-25076,8782,-15224,19093,28880,-8195,10013,-22673,-11261,31741,24903,3814,25695,-22195,18516,-4387,-26722,3899,8148,21665,32764,22315,-27430,7655,3997,5626,-1815,-3447,26948,8578,9714,-1373,11025,-24812,-26890,-8388,-2278,-24888,-32412,-7696,16997,-5830,27084,-32330,16343,3563,-13657,-3538,26996,12866,14059,-24141,-26524,-2841,-24570,-1387,-22633,-16838,29981,14682,13662,-14128,-16275,-2169,28436,-27198,-11799,15214,-4325,-32016,-19668,5818,31856,-23726,18527,28807,9452,-19672,-21054,12983,18128,13206,-12244,-27158,21975,26212,7491,12659,8698,23726,-5198,-28755,12351,28584,-29202,813,32625,-29663,31905,-19054,3718,-8863,-13602,-5158,2480,-19326,-27504,14852,-28762,2820,-19850,7317,19345,-26227,28811,-10951,-24491,10290,4968,-3991,-2913,-9949,26859,15511,-11678,19148,551,18007,27515,-5962,-14498,32607,9167,24221,-15916,-32363,-6424,4496,-22884,-17010,15333,8182,-23550,15166,32506,-6699,20561,-18498,-18181,-21855,32332,15603,20952,2911,16278,7794,19345,4989,350,18570,-23432,-11385,18157,-20453,-32309,-1932,8755,9739,10379,-25623,28193,-32143,-25382,30177,25075,-16955,28324,-21571,21983,-17167,911,18379,26065,-5071,-27252,9883,-15897,164,-5118,-5604,-6899,6494,-10003,-30560,11779,-27080,-5238,9295,18062,-22551,-31606,-18638,-10972,22654,21675,-20110,-5032,-29392,30534,32372,-15692,21918,-10615,11270,19949,-2991,-17495,-23319,-6015,-30613,31637,6412,19124,-23568,3840,25323,22372,22257,-7172,-11566,4375,28147,5204,6463,10038,11108,-29998,30526,-4265,6944,25293,1063,26343,21265,-25990,-21555,-13160,15411,2364,22827,15155,-21468,23882,15262,-28070,12550,-7876,5639,24909,8475,10736,7776,-848,-17286,-5359,-30801,14757,-16324,24228,32065,-21499,1094,-30152,26405,10090,-1918,9455,-5372,-29333,-5942,-19877,6732,-25932,26934,10973,-3337,2648,-7657,24948,-29973,10051,-9010,23621,-24111,-32274,21483,26023,-30682,-27030,6621,-9887,7937,-24314,-6389,-5660,7711,-25186,10185,-22963,2594,-23574,9326,-26399,-6542,-9058,-24385,-8550,14364,25416,-24862,-13592,-10102,-15924,-29941,23304,20997,-16911,-21941,9595,-19445,5491,-14971,-1772,-15168,-16051,27050,30720,-9998,25950,32313,21474,4487,-24689,6805,-9699,-9001,-31608,-13853,9571,-27476,-6988,26134,18974,22424,-218,-25123,-26151,20397,-21186,7604,-301,-20839,1986,21881,5647,5612,7772,28112,-11017,9764,11313,28270,-9194,-14262,8705,-23490,-39,3263,-7194,-18190,24394,-8028,-17348,3376,-32561,24709,11231,15085,-24494,31760,30687,2216,27227,9379,-26147,26795,-1230,22376,21627,29685,18164,-7390,17327,-8703,-4041,25073,22921,29768,-13847,-11003,30098,-29891,10941,-12299,-27288,-11347,17382,31333,15841,7953,-24778,21623,-23386,32036,-20684,7298,25780,-9718,-11654,13824,-9851,-9769,1796,26523,9955,-27611,-26925,14210,13417,30205,21313,-1176,20345,4791,31316,29856,-31748,1093,12426,-24142,-6314,-6213,18783,-12156,-25576,-8552,301,-14058,-6894,12759,-6299,6609,7592,-4366,-21034,-31156,-8630,-22251,-24206,-28596,4756,8240,-17413,-1062,-5667,-18408,-17796,-1319,11445,23315,-8229,13290,-14163,-11433,16639,27827,-22264,-19403,-16684,23669,26157,22316,-9231,1128,-9522,3073,-10897,6952,-24463,-3418,-29717,-12447,18039,-14698,16532,13321,17698,-9626,15900,-4938,-6753,-13478,-365,-6426,-31058,8680,-26212,-14098,1545,-7569,23115,-11801,-17889,22540,3583,-4935,-32689,-16507,2558,21614,-5826,10469,-10798,-21648,-6597,-22868,-13840,-15646,14044,-10454,29496,30266,-5929,8251,-20664,16873,-32180,19807,6161,-11561,-11845,8079,16751,1957,10835,24229,23663,5677,-567,31634,26248,-5859,-7515,21013,-10366,-5393,3380,-29109,-30332,25140,-26934,-3649,-13225,-22706,32648,23993,-18195,27830,19954,3565,-5340,-3753,-17018,-13470,926,27513,-1440,12047,-27668,-9234,-28284,31778,24911,-25972,-27146,29506,7582,-18781,-27352,-30466,25539,30906,-13016,22531,-9482,-6505,-31519,-20411,5164,8725,-17471,7527,-15387,-21090,-12109,-26871,-10484,-24936,14285,-719,-3633,6782,19635,86,-8141,-22584,4635,-28558,-2085,9840,14405,-151,-9601,29446,-19603,-5018,-4369,31373,12086,5775,-13893,-7301,29032,-11089,-31039,21870,28996,18286,23996,-1771,15157,19953,-6885,9366,-32078,7050,-4200,-6865,-26407,-20679,-8300,-19237,2076,-5846,-30651,-18850,18675,-13480,-4182,4307,-11383,31655,26321,12621,-21243,229,-17994,-8445,-32269,16921,-10368,764,24151,-23458,19589,-8460,2221,25355,-18831,-26905,-11228,30514,13877,-16654,83,11327,-11468,16141,25172,-14499,16031,26933,-1821,20135,-9665,18067,-138,2846,1157,26787,-24363,10322,17676,31680,28735,-24964,7853,-1014,-4738,32097,-28668,-25084,18805,-5908,28472,371,-1114,-140,28819,-24405,-17916,26974,11871,13011,-10961,25813,15143,-22359,1160,-20127,21786,-5473,-29581,31204,22212,15950,-6858,31964,31574,23719,21865,32045,-3432,4981,-13160,-17455,-1296,23924,4090,-23908,-3581,23380,-2039,-28830,26863,-22453,-7250,26066,3495,16975,2669,23115,30740,2414,-21547,17543,-4189,13447,19007,19002,5388,-7762,-20462,-22394,14907,859,-8599,15679,-22808,13411,25522,-4074,-12282,-9271,-19775,302,5166,15781,-30385,7129,-16295,-11293,-18616,19267,11405,-13870,-27761,-14479,-16376,-29981,-31198,-17329,-3193,-20034,28399,9624,-19486,-16492,-21204,-26787,23607,-23680,-15679,-24848,27555,-29229,-3286,-31516,-26993,-9779,-19452,21830,21688,-16264,13097,7539,-5096,-27256,-12528,9347,-2874,19476,-26719,-24331,8053,21406,20590,-31406,15812,-13559,-7029,25448,7537,-19408,-9091,3605,-11088,-24945,26144,-10095,4416,11423,-1105,-14641,27612,-18311,-13815,-11151,30413,-25058,26980,-7037,-2283,-24649,23559,19783,-10672,26254,-19493,16295,26914,20859,-10705,-31513,-5548,-28835,1281,-7776,26083,31609,-7337,-10659,-15276,19312,30324,-19119,12769,4926,-16195,31046,-4299,13080,19789,-4553,9664,29746,9917,-28168,-411,23893,-31806,-31294,-32378,10913,10151,-2055,-3581,28584,-10877,-8787,19831,-19351,16642,-6236,-31232,1338,-10395,18525,9243,-31701,-2228,8494,-12714,-15748,12787,-5391,6735,-4002,28907,20030,24863,-7629,-21753,5816,1062,26438,-2559,-14030,-5664,-21942,-27704,-6457,-14126,14206,13659,-9414,-28827,4497,-14242,28595,-21256,-1267,14686,-26114,17629,2518,1374,-28211,-40,-11875,-2555,-11215,6880,-16019,28055,-8112,14881,16364,23877,-12237,6742,14756,2431,28168,24780,-18949,-18611,-3732,-4140,-1835,8135,-6116,-1474,666,29077,-4263,18095,-25087,-18300,-18080,14703,-26706,-18730,-18733,-24779,32061,30862,3596,15412,23878,-29726,-11197,23036,32299,-13137,-12009,-29325,-14592,24063,-26618,-28156,-27732,-6903,-9520,1101,1480,-14524,4787,-4227,-2874,25198,8910,14060,-9427,28173,20341,-21793,14288,-18907,31266,-995,10965,-26307,-16399,10866,28168,-23707,18771,13563,-19556,-10896,-17838,-13054,-17495,31243,-22424,-22698,26860,-11676,12807,29372,-23991,-9151,4201,-4967,4316,-25429,17510,30353,26279,5373,24844,-11038,-6198,-31319,-1192,-18819,14631,-19744,-24089,-27971,-20699,25439,-8519,3184,-19838,-11681,8206,-19336,21686,-22675,20663,-13124,-31833,-31284,-18994,-15723,28012,16887,-17483,-4537,-25076,-19366,4616,105,4300,26752,11142,-16842,-2050,-17026,-12495,31911,16246,23037,10518,20232,-13575,8466,12265,-16673,3065,17256,12676,-8926,-3906,-20796,-9394,18215,-12283,-7049,-25395,4731,-7940,18158,22316,17242,16490,21351,13567,-10330,-613,12388,32313,-2262,21062,25221,-7904,13603,31001,-31754,-30290,-15266,-8937,26560,702,-14074,32036,-18242,-27283,-22055,12170,7200,-16979,27229,25526,7854,16782,4886,-24496,294,22588,31778,6504,8482,-324,2972,6484,-4717,-20971,-8000,6067,-4710,-26434,-4295,-23802,15066,19080,-30915,8455,-7646,-28143,7872,-11217,-1241,-2660,-2700,6861,30262,28432,31624,-30715,5668,23337,8058,8033,-2056,4556,28896,30204,-30793,19809,-23986,14384,10791,21775,-25134,-2709,11054,-29371,22627,-2846,23108,13139,-4535,-16962,32169,-22547,14627,-10384,7580,-25775,23698,-18134,-4354,18575,5444,2258,-1308,-28108,2773,21551,-2035,9111,17744,21160,27862,-1500,14252,-20018,20169,-26164,-14691,18996,20758,19133,-3734,13311,-31303,-26827,-29021,22662,3718,5700,13828,-30014,-22258,-384,25391,4751,18073,-2751,1245,26172,-29404,14104,8824,-2147,-8963,-29839,-525,32749,22649,-7198,28824,-18690,-26230,-24665,11207,29618,-25154,-15447,-16842,-13909,30373,10492,-24875,-3536,-18121,24661,-32626,31231,22931,-5825,-13681,15275,-11417,6086,-24271,-14164,16887,24995,10720,-11858,-32310,4592,-24217,13608,1064,22260,-26697,5791,12075,-18519,-4341,-7932,25613,-29377,19484,11047,19657,11133,26456,-7780,-3085,2194,32526,-16607,-19156,-6018,4904,15543,6493,27907,2184,11085,-1452,-7019,-10901,-15259,-12194,-19187,-14309,-21878,-7174,-7787,19565,7538,17718,-25010,-7407,27424,-29860,-14172,30963,-17554,-19827,-9967,-16810,17568,-17058,-24922,51,2530,-7080,6972,-17744,31915,-7834,-24744,-27110,-28883,-1117,-4034,-24459,-8708,-4879,27998,944,-18744,-7254,-2412,-9116,25955,27215,19804,23546,26945,-4151,-15438,-13439,5214,21375,32290,-11496,-13695,28387,27604,-7505,20371,23568,26798,10356,-31270,-17393,-27439,29385 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice15.tflite b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice15.tflite new file mode 100644 index 0000000..be5a637 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice15.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice15_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice15_golden_int16.csv new file mode 100644 index 0000000..51130f0 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice15_golden_int16.csv @@ -0,0 +1 @@ +-16118,30493,12911,-15841,31521,20561,-29401,-29122,-32347,26788,-10880,30868,-16226,2867,-26954,-8067,-5616,15640,19695,22250,26716,9047,31124,-2567,22317,-9194,-28973,-5766,-10396,-2756,-18779,16077,27668,-3877,26917,23923,-28832,2058,31035,-11831,25510,11973,-14448,5791,15328,30413,20034,11886,-30551,-21860,7068,31180,-32072,-5803,23015,19746,6339,-27355,-7724,32336,-13749,19049,-12314,12400,-15015,-32497,-26675,-17689,-7332,4153,-20764,22529,26762,-21489,-4998,-22477,7112,24803,-24136,-12323,-24686,-15248,-30298,-31869,-15549,1501,2492,25125,-25801,-6087,-28962,12123,-11351,-8969,-5365,13580,30924,-28798,-30673,-23679,5287,8432,16574,5622,-6667,8728,9173,640,29715,-28507,-15648,-15741,16479,-23948,29033,19595,14743,20913,2915,18352,16276,23461,-8495,8758,-18254,20744,30289,24620,15967,31826,12047,26596,28321,-11727,26366,22149,-25054,-2636,11431,29785,28785,13919,2971,-27229,8231,-24051,-24246,26841,-16458,-15466,-2263,-19838,13614,-12806,29921,-11973,-29443,-18066,25874,-19679,-5941,20698,15714,29821,12809,3861,16080,-18872,24250,-9738,-31136,-18266,16837,27551,-16603,-1177,28550,18887,5445,27959,-20719,19481,-2875,-17302,4238,12362,-11752,15132,13019,-29104,-4744,10045,27335,-26010,-638,-29919,-6207,3851,-17243,5029,-25734,-21383,-15398,-16607,4133,22043,-14805,24754,8140,12812,30427,15743,-1622,4963,-23828,-7702,-10526,-8354,-14178,31789,-25895,20443,-22544,-7020,22527,-22452,-14365,32027,-5994,-7953,32422,17761,-8295,12162,23784,6124,7558,-10315,-17523,-8370,15096,19943,4804,17785,2030,12971,-3511,-8590,5243,23552,-17405,20069,12299,-19908,16168,13237,21495,-7316,2135,16574,13020,-354,13379,-15052,-23309,-15679,-17122,-1874,9642,30187,10678,18825,7400,-1864,-27893,18656,-26617,1204,-28714,23598,6240,-10314,-15823,-30593,12631,-7185,-2793,-26424,21217,-543,18272,-7302,-15067,17441,16832,20246,-17158,-22428,-26961,31866,-29852,-4912,-16903,18751,29088,16359,-7467,-2836,14543,-30552,16932,-6935,30600,26950,-26253,12860,5391,13879,16478,-6052,-28070,15884,-20672,-5038,-8117,10595,-32221,17170,-8454,19257,22813,26062,23509,-26098,25864,-1296,32577,-16336,-31675,-9350,23071,-26776,-18905,24562,-18150,-27512,-8048,-8485,-20851,-5285,-22593,21201,22454,17260,4394,28293,8324,-25730,-31151,-13148,-3143,8257,-2029,-8682,-16777,-13101,32255,12253,-7614,-26151,30456,11351,-32510,-21026,-4908,-29284,5928,-20694,7731,-692,-25440,-21042,-27255,-20050,21934,2516,-12686,-14545,-30291,12457,11812,5797,-28166,8697,-19529,-17908,-26574,2752,5264,-12833,15597,-16779,18075,-27123,114,17467,28630,15597,-10029,-11002,-1849,-23228,-12968,1957,-16222,-13874,-587,3798,-32158,10022,-30537,-8407,23665,1714,-25559,9558,-28878,6495,15895,-1546,14882,10090,-16090,20606,28255,-15762,-5986,8723,-21500,9190,-9323,-7135,-4366,24158,12109,-5254,22320,-16622,5100,-27905,6494,-11515,22876,-17925,26507,23009,12047,24773,5450,26388,-11729,29178,-19548,16274,19031,9277,32428,-26397,-31003,25671,-3289,-21160,-26883,-8737,-28834,-6032,20848,-2743,-12101,2768,-23974,10964,-23232,9734,30711,-926,6271,-544,14407,-25409,-26836,-15345,-21210,22300,-27825,-14830,-8651,-7745,29250,-9647,30505,7901,11083,24613,-8138,27919,18761,5782,-14288,4848,-14668,29913,-16688,1875,21727,-26942,-9370,-23964,20690,20896,-8287,13454,24762,13713,-15771,-10472,-12611,-6018,4654,-5424,-9235,-3053,24486,-30254,24566,-12213,-4065,-5128,-28597,2284,-25337,26903,28894,19342,-21686,-26885,-28604,-21996,20138,-20583,20687,4649,3870,-31125,-24440,-17547,-28310,22703,12176,21628,-28030,32689,5274,-12726,29788,18668,-16575,1609,-30330,4085,-12899,-21768,-32282,25959,22675,12910,-19420,15259,-12697,20083,24693,2687,12494,30889,-28352,-29061,30476,-18494,-12443,-22312,7810,4386,3599,28240,-27149,-26389,31862,-5433,-16576,-19209,10960,-5505,-30928,26903,6323,-26456,-23259,-5587,23285,4638,28792,-11177,-19709,18694,-21869,3016,15157,-31633,-59,-9862,3095,-1498,-3897,-984,-24668,26420,-17693,-26643,-28153,-30817,12301,17466,2443,23212,-31606,28154,-3714,19244,-23320,1726 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice15_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice15_input0_int16.csv new file mode 100644 index 0000000..62cbbdf --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice15_input0_int16.csv @@ -0,0 +1 @@ +-14414,-3999,-25663,-15560,-19025,1675,-29613,11151,-27242,25470,10717,1320,29747,-16850,-11179,31846,-2809,7628,15895,29483,12037,-13802,25355,-30004,-5939,17127,10168,-1906,-304,-20581,-12744,-24116,-28443,25554,-17670,7075,23868,-517,1791,10757,21971,-22708,13021,-9776,28951,21257,4118,-10473,-17274,17616,-16767,-4768,-530,3831,2246,-21594,-15838,24713,3192,24695,18645,28339,-29746,14748,-16118,30493,12911,-15841,31521,20561,-29401,-29122,-32347,26788,-10880,30868,-16226,2867,-26954,-8067,-5616,15640,19695,22250,26716,9047,31124,-2567,22317,-9194,-28973,-5766,-10396,-2756,-18779,16077,27668,-3877,26917,23923,-28832,2058,31035,-11831,25510,11973,-14448,5791,15328,30413,20034,11886,-30551,-21860,7068,31180,-32072,-5803,23015,19746,6339,-27355,-7724,32336,-13749,19049,-12314,12400,-15015,-32497,-26675,-17689,-7332,4153,-20764,22529,26762,-21489,-4998,-22477,7112,24803,-24136,-12323,-24686,-15248,-30298,-31869,-15549,1501,2492,25125,-25801,-6087,-28962,12123,-11351,-8969,-5365,13580,30924,-28798,-30673,-23679,5287,8432,16574,5622,-6667,8728,9173,640,29715,-28507,-15648,-15741,16479,-23948,29033,19595,14743,20913,2915,18352,16276,23461,-8495,8758,-18254,20744,30289,24620,15967,31826,12047,26596,28321,-11727,26366,22149,-25054,-2636,11431,29785,28785,13919,2971,-27229,8231,-24051,-24246,26841,-16458,-15466,-2263,-19838,13614,-12806,29921,-11973,-29443,-18066,25874,-19679,-5941,20698,15714,29821,12809,3861,16080,-18872,24250,-9738,-31136,-18266,16837,27551,-16603,-1177,28550,18887,5445,27959,-20719,19481,-2875,-17302,4238,12362,-11752,15132,13019,-29104,-4744,10045,27335,-26010,-638,-29919,-6207,3851,-17243,5029,-25734,-21383,-15398,-16607,4133,22043,-14805,24754,8140,12812,30427,15743,-1622,4963,-23828,-7702,-10526,-8354,-14178,31789,-25895,20443,-22544,-7020,22527,-22452,-14365,32027,-5994,-7953,32422,17761,-8295,12162,23784,6124,7558,-10315,-17523,-8370,15096,19943,4804,17785,2030,12971,-3511,-8590,5243,23552,-17405,20069,12299,-19908,16168,13237,21495,-7316,2135,16574,13020,-354,13379,-15052,-23309,-15679,-17122,-1874,9642,30187,10678,18825,7400,-1864,-27893,18656,-26617,1204,-28714,23598,6240,-10314,-15823,-30593,12631,-7185,-2793,-26424,21217,-543,18272,-7302,-15067,17441,16832,20246,-17158,-22428,-26961,31866,-29852,-4912,-16903,18751,29088,16359,-7467,-2836,14543,-30552,16932,-6935,30600,26950,-26253,12860,5391,13879,16478,-6052,-28070,15884,-20672,-5038,-8117,10595,-32221,17170,-8454,19257,22813,26062,23509,-26098,25864,-1296,32577,-16336,-31675,-9350,23071,-26776,-18905,24562,-18150,-27512,-8048,-8485,-20851,-5285,-22593,21201,22454,17260,4394,28293,8324,-25730,-31151,-13148,-3143,8257,-2029,-8682,-16777,-13101,32255,12253,-7614,-26151,30456,11351,-32510,-21026,-4908,-29284,5928,-20694,7731,-692,-25440,-21042,-27255,-20050,21934,2516,-12686,-14545,-30291,12457,11812,5797,-28166,8697,-19529,-17908,-26574,2752,5264,-12833,15597,-16779,18075,-27123,114,17467,28630,15597,-10029,-11002,-1849,-23228,-12968,1957,-16222,-13874,-587,3798,-32158,10022,-30537,-8407,23665,1714,-25559,9558,-28878,6495,15895,-1546,14882,10090,-16090,20606,28255,-15762,-5986,8723,-21500,9190,-9323,-7135,-4366,24158,12109,-5254,22320,-16622,5100,-27905,6494,-11515,22876,-17925,26507,23009,12047,24773,5450,26388,-11729,29178,-19548,16274,19031,9277,32428,-26397,-31003,25671,-3289,-21160,-26883,-8737,-28834,-6032,20848,-2743,-12101,2768,-23974,10964,-23232,9734,30711,-926,6271,-544,14407,-25409,-26836,-15345,-21210,22300,-27825,-14830,-8651,-7745,29250,-9647,30505,7901,11083,24613,-8138,27919,18761,5782,-14288,4848,-14668,29913,-16688,1875,21727,-26942,-9370,-23964,20690,20896,-8287,13454,24762,13713,-15771,-10472,-12611,-6018,4654,-5424,-9235,-3053,24486,-30254,24566,-12213,-4065,-5128,-28597,2284,-25337,26903,28894,19342,-21686,-26885,-28604,-21996,20138,-20583,20687,4649,3870,-31125,-24440,-17547,-28310,22703,12176,21628,-28030,32689,5274,-12726,29788,18668,-16575,1609,-30330,4085,-12899,-21768,-32282,25959,22675,12910,-19420,15259,-12697,20083,24693,2687,12494,30889,-28352,-29061,30476,-18494,-12443,-22312,7810,4386,3599,28240,-27149,-26389,31862,-5433,-16576,-19209,10960,-5505,-30928,26903,6323,-26456,-23259,-5587,23285,4638,28792,-11177,-19709,18694,-21869,3016,15157,-31633,-59,-9862,3095,-1498,-3897,-984,-24668,26420,-17693,-26643,-28153,-30817,12301,17466,2443,23212,-31606,28154,-3714,19244,-23320,1726,-31186,-30366,-13261,-12029,-11109,-21395,26487,-32670,-6686,-23483,22440,1940,-6734,17261,12646,26451,26652,-29812,-23753,18351,-15573,-19769,20912,-13831,-18004,-14667,1658,12213,-4315,-24765,-21872,-10872,-27576,-22638,-20139,32673,-3797,-26689,-9851,-10441,3452,28820,3810,19628,693,-12821,1765,-18940,20670,-12547,7783,-12336,-4874,-14800,-27316,26080,-18738,-10751,-23116,26560,27781,24455,-30926,-4626 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice16.tflite b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice16.tflite new file mode 100644 index 0000000..9b57068 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice16.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice16_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice16_golden_int16.csv new file mode 100644 index 0000000..91350e8 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice16_golden_int16.csv @@ -0,0 +1 @@ +31260,-672,24270,-28957,13462,-19635,-25683,-32042,24988,-32320,28738,1360,-30248,-15660,10407,-8766,-18587,12668,16292,-30364,-3426,-24512,23540,2265,17091,9493,-12129,31047,13048,8161,-2049,-9975,11428,23622,-24865,-3337,30295,26822,-6525,-21200,14580,22015,-14233,8470,25938,721,-17757,-8321,-27849,-7774,-2600,4271,31735,-9699,-27956,-25737,-13716,-20462,-17822,-20363,-23341,-16871,-32114,-4550,2150,-18426,16270,92,13263,10299,-9309,-3736,14802,15602,-25723,-14374,26399,21702,24422,-2126,-21523,-30836,7798,-13554,32365,27792,24381,-15846,19675,-21262,-164,-27652,-5770,5409,3429,28875,-6033,-3688,5142,5459,21199,11322,-12932,-26114,12799,-1874,9918,-31096,23078,11403,13702,2631,-4831,-20962,-15634,26906,-31843,-20882,26223,11418,20945,-14087,20195,-15442,-10029,29114,14695,-25675,4195,20959,-21038,7756,27281,-20465,23384,9028,-2865,23779,8633,10334,-14344,2734,977,-2145,-28908,-3907,-25664,-19372,5324,12038,-4053,-22757,-11027,-4902,-22882,-19841,-10219,-9077,27604,-14387,-15915,-17413,13097,16576,17197,8103,-23579,-30706,-32352,31958,-6138,-15019,-13348,-4701,-30935,-29716,-2930,605,18186,2644,-29208,-18908,-11716,1325,21889,-28854,30924,9945,-4539,16014,-17579,16207,-272,17954,-19390,-3112,16968,22244,-5708,-1573,-32708,179,10897,-7398,7194,15446,-802,26441,-22121,27483,24612,24895,1021,11878,-21634,21823,-3067,-15246,9283,27992,-19864,18431,-27322,9200,-23101,22164,14649,-11310,11085,32254,-12738,25311,-21553,-7190,14815,16433,-10598,-28215,32428,16307,20713,4941,7787,19891,-6121,-12863,16281,-8939,-7643,-26565,-9723,-25949,15568,6996,24419,-17645,15432,12462,-15137,-13936,5510,656,7827,15112,-21860,-10566,-5359,-26537,-31703,-20419,-1976,-11870,14097,29322,14720,31136,13845,-16510,30155,-11717,4165,-29518,1280,179,31934,-16042,-15376,31338,-31839,15943,-21254,17659,-8608,400,10607,17708,11262,16807,16562,-27893,9622,11321,26789,-25154,-8248,-13227,-3437,7306,-30595,31167,-29334,-21129,22478,15604,23100,30206,29158,-32338,21633,16429,23226,-9698,-19792,7244,15986,-24138,-8649,22077,-8354,-8419,-15480,352,-25092,16157,-29835,-11998,20872,4423,-11166,-29495,14768,27798,26899,-18016,24328,8093,4689,4883,23772,-18827,-27956,32676,23426,-402,-8193,30003,-15822,1083,-25748,2560,-29701,-19521,-4153,-10483,24105,-6190,29096,21600,30899,-13027,30856,25057,-3295,-7847,31323,28150,-28189,-19863,-10972,-14477,-9218,-20779,29149,227,-4411,-13746,-17314,-10394,1783,12567,-19032,-28089,-28012,-5451,13397,-9995,-31453,-20850,12754,-11617,-30783,-25956,14882,-4452,24542,11494,-19154,24079,-30524,-17706,-11593,15879,29401,9550,13099,-10750,13146,-20883,-4551,-11899,-29351,-194,-5805,26293,32337,25803,8260,17526,-1408,9987,-30749,2132,-18837,12392,23871,26942,7392,30011,-14804,-3720,-30317,-27759,29171,-30142,9146,16018,9809,9994,551,-10470,24836,-28332,1395,-654,-22015,23425,-28847,-22744,17810,18150,24763,18784,-11957,17822,29282,1526,5466,27917,14656,-2507,30160,15157,6846,24967,-30434,-13905,-31579,-9941,-27049,27418,18254,-17414,-19246,-28714,8324,-13189,-20705,28291,-23032,-23472,-22056,6179,-9526,27066,1598,1501,-11440,31433,-30037,-11006,10376,13232,22906,16542,-12508,22062,27764,-25433,-24739,-26457,-30377,1848,5840,-8067,-2848,-186,16752,-17686,14063,15865,-12979,17495,-4949,-17602,16894,-42,-13218,-17507,15086,14549,23084,30581,-5510,-26721,-21573,-25378,23826,-14732,20584,28419,1274,6845,-25815,20957,-8155,-30938,-16118,24180,5516,-11051,-4720,-8351,-10235,-23383,-9733,21741,-20959,-11974,11110,17281,15440,23127,20977,15356,-3662,-13133,-24631,-29759,-2844,13776,11023,-6703,23434,-25533,17893,-27226,6593,-14483,-1968,-29261,-16830,22852,-26862,3042,-22032,16788,12005,7104,-15448,-4074,20032,-8108,27070,-20776,-12733,-14593,-9824,-681,25961,3384,12186,-6102,-27533,20048,28196,-3961,16588,29304,14935,10051,26481,26491,-4681,28612,30262,-2096,20673,1220,23307,6475,6054,-6945,5481,7358,18662,-13727,19436,8380,-12325,22941,20349,-30065,-27125,1882,-32214,10126,-8358,-30829,-19389,14041,21285,16562,32493,22796,558,-14260,28665,-6033,-15596,-25255,20301,-6795,31936,21737,-557,-11760,17369,2768,17677,7443,-19574,-31619,29507,-3875,7002,24521,3608,12728,8228,-1966,-4661,-20542,-23187,5415,-7223,-1459,27511,-20173,29448,-22382,-28989,-11780,17453,-1048,-6274,2930,28665,-9231,-12868,3689,-28934,-13868,-19611,-28208,-1996,18797,-26373,16384,25475,-25990,2771,21644,-16286,19501,-15534,-6474,-10475,2081,-24734,-28,-14808,15292,-2307,21441,-14363,-14355,-12998,-19350,-19383,12308,-205,8958,-23846,26134,25958,5517,5171,10857,10967,-10914,-10957,23560,-8009,-28987,18883,-16590,-10182,-29183,-4156,7932,4008,3498,8372,22173,-4731,29814,9094,5693,26718,-19987,-14501,9816,-25716,-4723,-15839,26457,-2077,15410,32753,-22264,22533,-16221,-29966,-723,-3644,-30134,-15640,7738,-772,-3715,-19526,-14218,-25322,-9414,5778,5234,23070,-7547,12514,-4625,-32737,18190,-28414,-27115,-28142,30899,22322,30068,4274,-6387,3540,25295,18602,25300,7199,10617,5048,7117,12693,-28496,21885,-6308,10120,25571,-18777,6540,-28181,30308,-25910,-13814,-8982,-7087,-28875,31641,3319,3908,13601,-12105,14347,-10174,5152,31495,2567,16025,20801,6983,5052,-3786,-6428,16586,-29590,27664,-12323,-2906,17404,-32003,-16206,28258,-86,7544,-2535,5748,18065,-18734,8990,26772,2865,-32069,21970,-14176,-21847,19815,25027,25302,20470,7294,-4988,-6171,-19016,-6825,22344,26919,-1806,-29653,10238,460,-5719,-9046,13219,20938,29366,6018,14695,-25669,5878,14649,-8581,29628,-13562,-9029,-29810,1242,28847,25465,3114,26994,30735,-19525,30058,-23654,27338,-3791,-10941,15395,8776,29563,-28718,30353,-5745,-3398,31616,31557,-15108,-13389,15670,-166,-20457,-22796,-21708,12177,2359,-16760,-14278,-9975,15155,-2387,-10016,-30527,30321,2882,19162,-30019,-2659,-17895,30585,8939,10302,4797,-30559,3342,2514,20190,21073,-9410,-21046,7203,23223,31321,-27568,3813,-25474,409,-13012,761,-17241,16061,4267,-7681,-28777,18130,-6693,25443,-23541,16216,19232,23041,-2420,5714,30643,5773,13285,-11156,32247,14236,-15858,-11326,-2049,28039,-19892,18171,21342,-27888,1002,-22576,17477,24877,-29806,29901,2016,14585,-6844,19451,26183,-17539,-30588,-15687,9192,18240,-26440,472,21775,-26264,-30008,17138,-28279,26115,-29631,10173,-25825,-25489,-24920,21034,-3691,3672,-23053,-787,-20891,-16809,17107,20745,30162,-27013,32013,-28717,13067,25785,-18765,24464,-14825,-20468,-4766,-17204,-15530,-23935,5335,2631,25981,32604,360,-8011,25251,29211,-24381,-20849,-8278,24463,-9154,11775,22812,22320,-9111,-28333,-10625,-24042,-16808,-6222,-5867,29040,-23901,-11925,-27095,-20119,2040,7579,27939,-31850,14739,-9080,-12010,-2927,9487,-10896,17164,9491,-14457,11282,24708,26694,26445,10133,-32349,5424,-13540,8974,-3096,6304,8027,-13418,22559,3669,28879,9371,14544,20182,6825,-3111,27320,-21014,-19457,296,19352,31576,8026,5643,27,-21519,-25329,3441,-22706,11464,-15812,-11251,3509,30418,-10701,21387,-15563,-32261,-20414,-3516,27656,-19482,-2605,-32062,-20676,27275,-9219,25451,-15312,-2782,-12547,-167,-23440,-6187,4223,17742,4179,23935,-9210,637,14822,-15566,5069,11775,-4855,31597,4510,10622,-6676,6154,-16235,-25000,-4506,12718,14879,29193,-28988,22489,-2588,688,-20483,5404,12070,12201,-26361,-23035,-4182,-15784,86,332,31035,-4182,22118,29413,22707,24257,7548,-18336,2943,2945,31705,-17735,-6759,12141,-5722,30374,-22765,15984,-22167,28924,-32757,-21575,18925,25037,-13998,-5660,-29975,-2464,22093,23766,29540,-32563,29635,27667,-3234,-27346,31944,17330,32098,-27098,30302,-18945,-12033,-32491,-13532,6594,-8813,-22172,87,23509,-7417,-255,17396,2525,17231,-13533,-3542,-16398,28169,512,-27909,1778,-25828,24318,23988,-16159,-30137,4245,20429,21405,-8464,19068,-4790,-18395,31573,-25935,-26868,-18462,3291,17211,-11845,7449,28175,19498,9941,32335,-26812,-14654,-19449,17911,5713,-4921,25888,-7083,29041,20661,-13862,15265,16035,-17566,-22263,-17086,-11861,-7174,-14663,-31772,23436,3909,30515,-31499,-24880,11902,32336,-28779,27194,-952,-10377,-17120,17136,22507,30797,27922,-22541,-17830,7451,-8084,23447,-16997,22971,2470,29707,-11957,13082,320,-32521,3186,-30014,22393,25990,-29389,-32142,7828,32258,-29903,7885,1800,-29011,23749,-963,-19338,18535,-13696,-30724,631,7009,32295,23872,-20172,29155,-28060,13488,6670,-23682,-14905,-10725,-10492,-10961,11681,-21568,-19282,5545,16963,14851,-7681,28793,22253,14685,-28427,23272,-14046,-14398,-1757,-21327,-1855,10005,-19473,16110,30003,-22680,10201,-25078,-17831,-15207,7387,-9795,5561,30336,-17555,-9296,23339,-25910,23763,-24888,-4499,5352,-28057,-21080,-1349,-32062,-1953,-9436,31657,-10858,31178,23404,-15000,7774,-27214,24364,-30273,-15944,1936,-25897,-7907,-11756,-14915,-11278,-28366,-25739,-26220,21254,-25396,25685,-7299,-21324,14761,-4551,19792,-13268,12326,24300,22725,25262,-9384,-16249,-3975,-16387,14576,27193,18773,-12906,-23913,-5807,-25108,1118,25256,20427,-29713,-20059,23841,-29772,17677,15816,16857,-24224,-11536,-2954,-12987,22054,27753,-6357,-18036,30645,31870,31600,962,-13807,-23253,11829,-7255,-4888,-13556,21239,-7391,-29329,-10899,11639,-29649,10832,28652,-17102,879,-22952,-19891,29957,6943,4029,-23251,-13915,31101,1727,-28578,1650,6332,-23152,10623,25452,-19618,-23461,-23128,-20862,31131,-15637,-17050,23505,10560,-17975,-3639,17239,-30929,2415,-9579,-31505,-19591,-11685,21370,-21306,4554,19233,-12115,-11447,21707,-31211,-20819,-22233,31257,-30465,4459,11794,17076,30288,-30680,7511,-18341,-26277,4821,8099,13491,-8885,-22863,-24126,-23412,7866,5161,25701,-19534,-26078,-4913,9936,-10226,-32404,21777,-18936,12620,-13248,28691,11260,12920,21378,-2224,31794,-793,6353,-27206,-9410,-7362 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice16_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice16_input0_int16.csv new file mode 100644 index 0000000..0a59d12 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice16_input0_int16.csv @@ -0,0 +1 @@ +-30311,-12071,-17239,-23727,14295,20010,-28410,-9433,-29324,-6783,-7526,2003,-2639,3390,17124,20174,11869,26470,1300,30237,-23803,-1413,-27209,28756,7804,28076,-4340,-30567,21899,-12533,27527,-16076,13387,-8032,23785,-3039,25526,-28674,-20460,21068,20867,-3144,28484,-22192,14879,-16612,6597,27848,-29546,-12114,30506,-30816,-20536,-36,17168,-21763,23490,24405,16015,-28182,-16797,-30590,-22780,-9884,23691,20963,9006,32397,-26952,9697,26406,-18523,20746,-20442,-21732,-29927,10392,-29077,16523,9475,-30038,16440,-12899,12529,-27204,-29532,3560,-19546,6996,-11801,-16975,4431,24417,17444,-15110,20848,22616,29820,29034,12663,-14776,-4255,-28661,-26964,27124,16933,11755,26439,27600,22392,-1415,-30015,24997,-21042,-30215,-4455,-2740,11852,28691,-30637,957,17427,22819,30442,32700,4515,26141,-6347,-23470,9550,1776,-11332,12366,-20483,5807,15799,-21332,-19457,16449,15875,-6571,15859,16789,-16990,-9241,-15097,12884,31346,11996,28365,32720,-18305,-18387,11968,1772,-28696,28299,-15000,-17950,12228,-20463,-18575,20929,5331,-8109,26889,-22187,-4979,12211,3666,-29276,6948,-10036,-7051,-10766,-15347,-5762,25028,1094,-7990,13953,22143,10536,28711,-10219,-14343,20799,-8019,-14159,13849,3954,13708,-17587,-478,17539,8652,27863,28027,12802,-26105,11608,22323,3017,-11607,24032,1192,9620,-21181,20626,-8832,4463,15893,-30118,-10213,-19858,16179,13698,-7515,19811,31343,-13782,-20099,-6676,-32099,-21899,5517,-5330,21548,1642,3364,23675,5700,-21901,-8030,18795,17491,5170,-22688,-13884,28370,-20896,-6026,15920,713,26769,24458,25013,-7685,-1954,-23636,-24012,27536,-25881,-17934,20505,-8577,30665,31766,-708,-14269,3395,24518,-6756,-19078,12796,-29536,31401,25154,-30819,-2480,-9062,5222,17787,-9358,1694,-12153,-17919,28027,26673,7661,-12346,21026,31556,28092,-22561,8301,-30943,-19570,-16903,-8887,24384,16899,24913,-24488,27160,-26946,26556,11630,9503,-12645,177,26356,12971,-27199,2559,11530,-19559,5467,26716,-23597,-1464,-1721,-27926,9310,10671,12993,3719,24250,17336,21293,18222,-17396,-21923,-25913,-19043,-7688,-31505,-1868,-29511,24507,4304,31388,28247,-30459,27067,1883,-24902,-718,23409,-25650,-24676,-18366,-23828,15674,-12728,-29300,-16886,17192,-27764,28238,30446,9789,20910,-9728,-17872,-1886,-2119,27370,-5298,-9974,6189,-15379,-6041,-634,20709,-32472,-30885,22834,26928,16312,-30420,20075,-13503,-10618,-9062,7284,4923,8924,-7797,17387,22768,-31270,-16791,10894,1704,-8539,21548,-5783,26330,5921,-17954,-31058,23153,-9352,5427,12302,-27521,-22842,-7328,-18277,32348,-29393,-1149,-17586,4847,-10699,16829,-26950,-18897,-7564,13570,-4701,19436,14523,28452,-9894,-19672,-9758,-16605,12327,-1403,20463,-24435,-12181,14188,6772,212,31448,-16155,-25667,11355,5348,-20977,4175,22851,30682,-10096,8663,5912,-5246,1012,23074,5514,17736,-657,-15601,3898,31825,-22838,17246,-18435,6407,10714,-14990,-11289,-23577,18329,20465,4942,24333,19272,17083,-20715,31009,-23349,12124,10066,5571,3099,12779,-27409,16038,-32198,-30236,-6097,-14523,-1625,9973,8913,-8343,-30022,-28834,5526,14144,3503,-3632,-30518,-9198,-15494,-9883,28622,-1681,-11432,19940,-20567,-28964,-31890,18752,8761,9349,15041,-27734,1621,16202,-23253,-1354,21269,-12092,30300,12379,-29959,-28739,31981,-6302,-26579,-32232,-19634,-3178,9838,631,-21714,25594,116,-24456,-15330,25570,-8072,-30289,12859,-24558,31600,-1207,-6536,18614,-14829,-11271,1677,-28293,-5487,12998,32581,4928,24627,-10811,-23986,-25017,15638,-19532,-12539,-15395,1606,-227,-11384,7569,26160,-2012,13146,10168,5739,24820,-805,20746,24692,-21284,2913,25973,9885,4477,21844,-6894,-7572,17816,-23726,-7814,13671,2073,31158,-5014,-15856,-29965,14502,-11412,-25802,-29996,21991,-18007,-12509,-29201,-7673,8323,-22040,-23312,-22861,-9165,23809,-31622,-12804,11135,23248,30927,122,20040,-17546,-26425,14840,-27349,17206,23195,-85,11292,-19024,29984,8412,-24841,11954,3054,-8631,-28641,30864,-16924,7263,-25798,-5366,-11634,-30613,3505,-17340,-11495,-29361,9397,-5236,-29605,5669,-18832,-28738,-20567,10793,-20303,14771,-14820,-11342,-21500,-23503,-4561,-9865,-20424,-1370,-28568,-30264,-19922,-25151,31573,-31274,-24106,-24567,25812,4009,-11877,6420,10921,-29543,11855,-1707,-25243,16160,16275,-25138,28585,375,26820,9233,25980,5297,-14648,562,-16314,-26972,27429,5470,32153,-31790,-20325,25230,-27041,30955,-16444,17510,-21057,-21553,-29640,22172,8317,17232,-6218,3890,9400,-12300,9093,17939,18124,-8401,6693,-6316,4990,30280,-12389,-14455,32502,-26619,-28756,8032,2110,-984,7554,20595,-21916,12067,10364,-19128,21148,18573,-23580,9472,-11509,-20601,30892,-8073,-2209,-3999,-3871,-29070,6442,-7278,-29426,-20518,23790,25213,-7959,-5215,20407,-8097,4040,-3032,-5149,1044,-4014,-12365,29589,20285,31012,-21910,-4466,18878,7728,-9709,-15205,-4530,20572,26413,-24280,18779,1144,18472,-32134,789,25021,25659,-2588,31260,-672,24270,-28957,13462,-19635,-25683,-32042,24988,-32320,28738,1360,-30248,-15660,10407,-8766,-18587,12668,16292,-30364,-3426,-24512,23540,2265,17091,9493,-12129,31047,13048,8161,-2049,-9975,11428,23622,-24865,-3337,30295,26822,-6525,-21200,14580,22015,-14233,8470,25938,721,-17757,-8321,-27849,-7774,-2600,4271,31735,-9699,-27956,-25737,-13716,-20462,-17822,-20363,-23341,-16871,-32114,-4550,2150,-18426,16270,92,13263,10299,-9309,-3736,14802,15602,-25723,-14374,26399,21702,24422,-2126,-21523,-30836,7798,-13554,32365,27792,24381,-15846,19675,-21262,-164,-27652,-5770,5409,3429,28875,-6033,-3688,5142,5459,21199,11322,-12932,-26114,12799,-1874,9918,-31096,23078,11403,13702,2631,-4831,-20962,-15634,26906,-31843,-20882,26223,11418,20945,-14087,20195,-15442,-10029,29114,14695,-25675,4195,20959,-21038,7756,27281,-20465,23384,9028,-2865,23779,8633,10334,-14344,2734,977,-2145,-28908,-3907,-25664,-19372,5324,12038,-4053,-22757,-11027,-4902,-22882,-19841,-10219,-9077,27604,-14387,-15915,-17413,13097,16576,17197,8103,-23579,-30706,-32352,31958,-6138,-15019,-13348,-4701,-30935,-29716,-2930,605,18186,2644,-29208,-18908,-11716,1325,21889,-28854,30924,9945,-4539,16014,-17579,16207,-272,17954,-19390,-3112,16968,22244,-5708,-1573,-32708,179,10897,-7398,7194,15446,-802,26441,-22121,27483,24612,24895,1021,11878,-21634,21823,-3067,-15246,9283,27992,-19864,18431,-27322,9200,-23101,22164,14649,-11310,11085,32254,-12738,25311,-21553,-7190,14815,16433,-10598,-28215,32428,16307,20713,4941,7787,19891,-6121,-12863,16281,-8939,-7643,-26565,-9723,-25949,15568,6996,24419,-17645,15432,12462,-15137,-13936,5510,656,7827,15112,-21860,-10566,-5359,-26537,-31703,-20419,-1976,-11870,14097,29322,14720,31136,13845,-16510,30155,-11717,4165,-29518,1280,179,31934,-16042,-15376,31338,-31839,15943,-21254,17659,-8608,400,10607,17708,11262,16807,16562,-27893,9622,11321,26789,-25154,-8248,-13227,-3437,7306,-30595,31167,-29334,-21129,22478,15604,23100,30206,29158,-32338,21633,16429,23226,-9698,-19792,7244,15986,-24138,-8649,22077,-8354,-8419,-15480,352,-25092,16157,-29835,-11998,20872,4423,-11166,-29495,14768,27798,26899,-18016,24328,8093,4689,4883,23772,-18827,-27956,32676,23426,-402,-8193,30003,-15822,1083,-25748,2560,-29701,-19521,-4153,-10483,24105,-6190,29096,21600,30899,-13027,30856,25057,-3295,-7847,31323,28150,-28189,-19863,-10972,-14477,-9218,-20779,29149,227,-4411,-13746,-17314,-10394,1783,12567,-19032,-28089,-28012,-5451,13397,-9995,-31453,-20850,12754,-11617,-30783,-25956,14882,-4452,24542,11494,-19154,24079,-30524,-17706,-11593,15879,29401,9550,13099,-10750,13146,-20883,-4551,-11899,-29351,-194,-5805,26293,32337,25803,8260,17526,-1408,9987,-30749,2132,-18837,12392,23871,26942,7392,30011,-14804,-3720,-30317,-27759,29171,-30142,9146,16018,9809,9994,551,-10470,24836,-28332,1395,-654,-22015,23425,-28847,-22744,17810,18150,24763,18784,-11957,17822,29282,1526,5466,27917,14656,-2507,30160,15157,6846,24967,-30434,-13905,-31579,-9941,-27049,27418,18254,-17414,-19246,-28714,8324,-13189,-20705,28291,-23032,-23472,-22056,6179,-9526,27066,1598,1501,-11440,31433,-30037,-11006,10376,13232,22906,16542,-12508,22062,27764,-25433,-24739,-26457,-30377,1848,5840,-8067,-2848,-186,16752,-17686,14063,15865,-12979,17495,-4949,-17602,16894,-42,-13218,-17507,15086,14549,23084,30581,-5510,-26721,-21573,-25378,23826,-14732,20584,28419,1274,6845,-25815,20957,-8155,-30938,-16118,24180,5516,-11051,-4720,-8351,-10235,-23383,-9733,21741,-20959,-11974,11110,17281,15440,23127,20977,15356,-3662,-13133,-24631,-29759,-2844,13776,11023,-6703,23434,-25533,17893,-27226,6593,-14483,-1968,-29261,-16830,22852,-26862,3042,-22032,16788,12005,7104,-15448,-4074,20032,-8108,27070,-20776,-12733,-14593,-9824,-681,25961,3384,12186,-6102,-27533,20048,28196,-3961,16588,29304,14935,10051,26481,26491,-4681,28612,30262,-2096,20673,1220,23307,6475,6054,-6945,5481,7358,18662,-13727,19436,8380,-12325,22941,20349,-30065,-27125,1882,-32214,10126,-8358,-30829,-19389,14041,21285,16562,32493,22796,558,-14260,28665,-6033,-15596,-25255,20301,-6795,31936,21737,-557,-11760,17369,2768,17677,7443,-19574,-31619,29507,-3875,7002,24521,3608,12728,8228,-1966,-4661,-20542,-23187,5415,-7223,-1459,27511,-20173,29448,-22382,-28989,-11780,17453,-1048,-6274,2930,28665,-9231,-12868,3689,-28934,-13868,-19611,-28208,-1996,18797,-26373,16384,25475,-25990,2771,21644,-16286,19501,-15534,-6474,-10475,2081,-24734,-28,-14808,15292,-2307,21441,-14363,-14355,-12998,-19350,-19383,12308,-205,8958,-23846,26134,25958,5517,5171,10857,10967,-10914,-10957,23560,-8009,-28987,18883,-16590,-10182,-29183,-4156,7932,4008,3498,8372,22173,-4731,29814,9094,5693,26718,-19987,-14501,9816,-25716,-4723,-15839,26457,-2077,15410,32753,-22264,22533,-16221,-29966,-723,-3644,-30134,-15640,7738,-772,-3715,-19526,-14218,-25322,-9414,5778,5234,23070,-7547,12514,-4625,-32737,18190,-28414,-27115,-28142,30899,22322,30068,4274,-6387,3540,25295,18602,25300,7199,10617,5048,7117,12693,-28496,21885,-6308,10120,25571,-18777,6540,-28181,30308,-25910,-13814,-8982,-7087,-28875,31641,3319,3908,13601,-12105,14347,-10174,5152,31495,2567,16025,20801,6983,5052,-3786,-6428,16586,-29590,27664,-12323,-2906,17404,-32003,-16206,28258,-86,7544,-2535,5748,18065,-18734,8990,26772,2865,-32069,21970,-14176,-21847,19815,25027,25302,20470,7294,-4988,-6171,-19016,-6825,22344,26919,-1806,-29653,10238,460,-5719,-9046,13219,20938,29366,6018,14695,-25669,5878,14649,-8581,29628,-13562,-9029,-29810,1242,28847,25465,3114,26994,30735,-19525,30058,-23654,27338,-3791,-10941,15395,8776,29563,-28718,30353,-5745,-3398,31616,31557,-15108,-13389,15670,-166,-20457,-22796,-21708,12177,2359,-16760,-14278,-9975,15155,-2387,-10016,-30527,30321,2882,19162,-30019,-2659,-17895,30585,8939,10302,4797,-30559,3342,2514,20190,21073,-9410,-21046,7203,23223,31321,-27568,3813,-25474,409,-13012,761,-17241,16061,4267,-7681,-28777,18130,-6693,25443,-23541,16216,19232,23041,-2420,5714,30643,5773,13285,-11156,32247,14236,-15858,-11326,-2049,28039,-19892,18171,21342,-27888,1002,-22576,17477,24877,-29806,29901,2016,14585,-6844,19451,26183,-17539,-30588,-15687,9192,18240,-26440,472,21775,-26264,-30008,17138,-28279,26115,-29631,10173,-25825,-25489,-24920,21034,-3691,3672,-23053,-787,-20891,-16809,17107,20745,30162,-27013,32013,-28717,13067,25785,-18765,24464,-14825,-20468,-4766,-17204,-15530,-23935,5335,2631,25981,32604,360,-8011,25251,29211,-24381,-20849,-8278,24463,-9154,11775,22812,22320,-9111,-28333,-10625,-24042,-16808,-6222,-5867,29040,-23901,-11925,-27095,-20119,2040,7579,27939,-31850,14739,-9080,-12010,-2927,9487,-10896,17164,9491,-14457,11282,24708,26694,26445,10133,-32349,5424,-13540,8974,-3096,6304,8027,-13418,22559,3669,28879,9371,14544,20182,6825,-3111,27320,-21014,-19457,296,19352,31576,8026,5643,27,-21519,-25329,3441,-22706,11464,-15812,-11251,3509,30418,-10701,21387,-15563,-32261,-20414,-3516,27656,-19482,-2605,-32062,-20676,27275,-9219,25451,-15312,-2782,-12547,-167,-23440,-6187,4223,17742,4179,23935,-9210,637,14822,-15566,5069,11775,-4855,31597,4510,10622,-6676,6154,-16235,-25000,-4506,12718,14879,29193,-28988,22489,-2588,688,-20483,5404,12070,12201,-26361,-23035,-4182,-15784,86,332,31035,-4182,22118,29413,22707,24257,7548,-18336,2943,2945,31705,-17735,-6759,12141,-5722,30374,-22765,15984,-22167,28924,-32757,-21575,18925,25037,-13998,-5660,-29975,-2464,22093,23766,29540,-32563,29635,27667,-3234,-27346,31944,17330,32098,-27098,30302,-18945,-12033,-32491,-13532,6594,-8813,-22172,87,23509,-7417,-255,17396,2525,17231,-13533,-3542,-16398,28169,512,-27909,1778,-25828,24318,23988,-16159,-30137,4245,20429,21405,-8464,19068,-4790,-18395,31573,-25935,-26868,-18462,3291,17211,-11845,7449,28175,19498,9941,32335,-26812,-14654,-19449,17911,5713,-4921,25888,-7083,29041,20661,-13862,15265,16035,-17566,-22263,-17086,-11861,-7174,-14663,-31772,23436,3909,30515,-31499,-24880,11902,32336,-28779,27194,-952,-10377,-17120,17136,22507,30797,27922,-22541,-17830,7451,-8084,23447,-16997,22971,2470,29707,-11957,13082,320,-32521,3186,-30014,22393,25990,-29389,-32142,7828,32258,-29903,7885,1800,-29011,23749,-963,-19338,18535,-13696,-30724,631,7009,32295,23872,-20172,29155,-28060,13488,6670,-23682,-14905,-10725,-10492,-10961,11681,-21568,-19282,5545,16963,14851,-7681,28793,22253,14685,-28427,23272,-14046,-14398,-1757,-21327,-1855,10005,-19473,16110,30003,-22680,10201,-25078,-17831,-15207,7387,-9795,5561,30336,-17555,-9296,23339,-25910,23763,-24888,-4499,5352,-28057,-21080,-1349,-32062,-1953,-9436,31657,-10858,31178,23404,-15000,7774,-27214,24364,-30273,-15944,1936,-25897,-7907,-11756,-14915,-11278,-28366,-25739,-26220,21254,-25396,25685,-7299,-21324,14761,-4551,19792,-13268,12326,24300,22725,25262,-9384,-16249,-3975,-16387,14576,27193,18773,-12906,-23913,-5807,-25108,1118,25256,20427,-29713,-20059,23841,-29772,17677,15816,16857,-24224,-11536,-2954,-12987,22054,27753,-6357,-18036,30645,31870,31600,962,-13807,-23253,11829,-7255,-4888,-13556,21239,-7391,-29329,-10899,11639,-29649,10832,28652,-17102,879,-22952,-19891,29957,6943,4029,-23251,-13915,31101,1727,-28578,1650,6332,-23152,10623,25452,-19618,-23461,-23128,-20862,31131,-15637,-17050,23505,10560,-17975,-3639,17239,-30929,2415,-9579,-31505,-19591,-11685,21370,-21306,4554,19233,-12115,-11447,21707,-31211,-20819,-22233,31257,-30465,4459,11794,17076,30288,-30680,7511,-18341,-26277,4821,8099,13491,-8885,-22863,-24126,-23412,7866,5161,25701,-19534,-26078,-4913,9936,-10226,-32404,21777,-18936,12620,-13248,28691,11260,12920,21378,-2224,31794,-793,6353,-27206,-9410,-7362 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice17.tflite b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice17.tflite new file mode 100644 index 0000000..d0b7ecf Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice17.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice17_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice17_golden_int16.csv new file mode 100644 index 0000000..d8eed1e --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice17_golden_int16.csv @@ -0,0 +1 @@ +-25281,26768,-24980,24188,6820,20964,23892,23482,-11343,-5442,29348,-11088,-10572,-26894,-21541,-28320,-18329,14724,-7796,31165,-6056,19996,17531,6710,-5586,6848,-13107,-13016,-6801,-19027,18095,26540,-22672,24676,-24594,-27625,-19710,9249,-2582,13354,-29831,-14876,-14554,4630,-12212,-10410,-17658,-26298,20202,-17838,19475,28137,4577,12494,-25662,-6405,-17143,16232,-925,-8919,-21006,30315,-20067,2466,-16675,-14932,-9393,29864,15901,28574,22513,7002,24259,-26903,-22338,6157,28497,25034,-153,-30855,-16783,-3094,24906,30986,-1002,-22953,-984,26727,-29832,-22613,31308,-13880,13388,-6054,11153,-7418,9036,19205,-10997,-28076,28924,419,30762,-15498,-30099,23899,-26728,16514,-6996,-27458,-11130,-30911,344,-27403,26613,-22228,-16389,-1187,-13719,-30864,-2057,14822,14119,-10190,-22357,724,17886,31528,-19018,28176,-9251,8712,25257,3820,8787,-2266,11408,-29786,22393,-29880,-12556,11490,6783,-1066,30124,-8733,31957,22031,27738,-8141,28550,14585,25887,13612,-27498,15584,-30008,1964,-6376,16905,-11960,-29167,-14616,23979,15136,-29497,27789,-2441,19570,-23284,-22308,-23044,-12163,8378,5802,-4457,28359,-10963,-15600,-12034,-18874,-6406,10734,29245,13052,13843,-32024,12624,-5336,8984,6597,-21655,-17520,-6757,25772,-18729,7981,-3787,-16748,17157,-17517,-20680,5363,-9882,-23507,24910,-32391,-5146,19654,24166,-30199,-31286,17057,-21484,-10266,-2464,-21335,27901,32075,3485,-32175,-19581,3905,-731,-20192,4950,-27318,-11829,-17559,-24536,19176,-31325,24015,-14693,-5556,-20055,4428,5807,-162,18798,-8915,-13458,-595,24012,16811,-20118,18181,25844,-6985,-9857,-30387,16775,28861,-15602,-18775,10261,6775,7441,28,-4430,8464,-15590,8996,-15975,9563,-16054,30704,-5165,30453,22081,4418,-31224,7077,5021,-11086,7334,12330,19483,-14496,-4175,-20064,357,24650,-15176,21384,-11604,25965,-19327,22203,-172,-31639,-268,-13723,-9836,-11842,-956,12547,16955,-22769,-20013,4311,-28431,32553,-17406,-21359,-888,29626,-5502,-28832,-30979,27869,17486,4325,-27914,-12616,-9595,-29049,-25582,-20399,14628,26027,29357,22733,-30817,-6228,22527,21721,28516,-8031,-9375,17828,-31580,26888,13841,18349,4373,18800,7802,4725,-1054,4782,-1283,-10871,-11330,3172,-16263,-18350,32362,-9252,-23001,18862,8379,11597,-31333,23660,-20157,21348,-26524,-20527,-23406,11720,14932,24998,22814,698,-17075,8307,-8700,32424,12726,9783,-9527,-6414,-25403,17085,3455,15619,-4840,-15123,-31288,27726,-1440,24780,-23877,22318,24533,1086,21240,-16832,11869,30981,6863,27685,-276,-25418,-9527,-31362,-963,22490,16231,-3971,14380,-18215,13188,23803,-28762,15063,23301,-24287,-24730,-4456,32217,16350,-32013,1366,15138,1082,-26712,-18198,21204,-15208,15591,32239,-5780,15912,-21079,12254,-16081,-26796,1242,-26060,-3256,18006,-17642,-26703,-20813,-12222,11443,12070,-23436,-3346,21875,15139,8022,21814,-29970,23632,32564,7806,18717,31525,7911,-19708,-11631,21419,1969,29533,-20375,-22126,-29784,14024,-14998,-14892,7650,-30739,7266,-25393,6733,1857,-7276,20887,-14632,-3569,-9276,-21006,-19498,6010,-14627,20748,-30198,5391,7431,-7044,-25422,22987,24097,7313,29300,-4368,-23510,26159,-4678,9354,-32281,-1831,-7593,21425,32703,795,14199,27487,-10654,-3905,16204,-9753,25955,19348,-7250,6663,1646,-25706,28142,9344,-9031,14473,-4790,22234,-3751,5061,-18770,20725,-8332,-5335,-22419,13736,-24423,-6857,12649,27459,-23190,-12843,20306,2029,23730,-7697,18237,-10839,-4207,10851,-22822,-27110,13900,-5323,-2522,20478,-31730,-28133,-10735,30159,9655,-11990,-7566,14218,16251,-17255,-1004,-24327,6884,6034,7020,-1390,-11322,-6615,28958,23781,-15842,-10197,1608,-3306,20527,-13404,17660,8945,-18099,-28150,28062,26345,12105,14736,-18857,-23078,-32575,-3834,15404,-28404,28442,-3652,6940,2200,29654,22387,30107,-22878,1366,5717,-4467,-14594,32136,30169,1023,26715,-10000,-31998,29167,10554,-3416,1965,-28238,-16434,5167,-22943,31415,-27490,7098,-7666,-24394,-2546,-24157,-23481,-21049,-1138,-8916,-1222,12897,-22956,-24679,6728,-8173,-32756,-8324,9286,-21955,-10157,25243,9256,15019,22863,-11536,-14898,-5545,21096,-3060,30490,20082,14289,4822,-29713,25433,-27143,-12302,2959,21622,20292,1673,-19898,1382,-24012,8646,-19092,-20301,1171,-3879,26833,-15383,-19352,18174,-3668,-20579,-18675,22509,-23987,-3032,29354,-31973,-678,-30231,15591,-29484,-25529,-3545,21488,14829,-17964,-22453,7765,-20178,-8315,19235,5659,4024,22255,25999,32529,7430,25067,5187,14423,24643,496,-7031,-7668,-21908,9692,23341,-20259,9844,-19710,17384,7562,-2441,7555,-30552,-4035,-28789,-5418,22165,-7155,-315,24619,-6682,-13796,8606,13640,-25179,-19113,8615,24995,-26385,32531,20050,-18780,-7912,-24334,16535,-22033,-25172,5017,-6601,15723,-14291,-14054,8530,-30906,2233,23942,-26499,6085,-11201,-2702,6532,17131,-17930,-30329,-16858,-11283,2102,15003,-10931,-16112,-2079,32383,-4357,914,-28666,19104,-1075,-25822,21232,14291,2068,-19714,3690,18255,28641,-31212,14311,-16071,9067,12150,-2180,14136,-11925,6773,746,-14218,-4396,603,-7218,28554,628,18343,12797,28009,-23799,-30700,-20503,29562,2536,-29897,-21289,30037,-13843,28163,27409,13489,3584,-31473,-11360,-16305,29726,9314,27330,-7307,-22211,-4831,15740,30178,11958,2936,18587,-16594,25003,-24237,10503,22275,8662,-5546,24902,32192,19741,9888,-27996,-2611,19620,31666,25393,-14230,11236,-29161,-22096,-28196,-24201,-21243,10430,-19487,-25578,-1203,-32162,-11783,-19180,3180,15630,-31996,-16118,-19158,-6369,15546,-6969,-28078,28988,14346,11357,-28566,6292,25079,-10877,31642,9238,-3717,-14660,-26802,31062,-13458,-20057,16582,18198,-13560,7112,16329,29840,-15567,20708,10781,-25252,-27146,30226,5273,-30566,28695,-29458,-9704,-11495,15042,19627,-5930,-27878,-5204,-16060,18900,13880,-18969,-22163,25432,11128,-6729,-25720,25396,-29095,-26180,-13740,29639,12461,32293,-28060,28602,8787,-709,-14710,14621,1285,9069,-2139,-23020,-26174,31705,25212,22515,-31683,-7480,-7050,17763,-8754,-6668,22662,-25508,2076,-23871,-17520,26748,20212,24068,-5231,-29912,-30169,15582,29158,30863,29159,-9103,-31992,17632,-24140,-26700,32582,3459,24363,8841,25365,-24843,24920,26639,-6696,1167,-12995,-23488,18918,-19841,-11052,-18638,14040,-352,17034,21331,30015,14045,-32742,5600,27518,28658,16764,-6757,-1347,27917,4064,6130,-29744,31496,16347,-17384,-21004,-537,9430,24043,-29344,-9580,20912,-4093,17400,8020,-7786,-23455,19531,-29998,3140,28766,22022,6775,-9007,-29667,-14888,32696,8206,-19099,28117,14988,-18227,22281,19183,21082,-21390,16314,12776,-25303,2280,-24915,11514,9613,-9656,-11584,13857,-31413,-18789,-29519,-28202,-7648,-3881,5446,-22913,8691,6891,7654,29218,-6934,3330,1159,-32191,-27333,24999,-21478,5778,6871,16461,9511,27090,-12188,29624,-8297,374,23486,-13207,-17683,30746,-14410,27834,-14111,13914,-25136,21288,-1888,-29507,-18906,22426,14987,19354,-9657,-8872,-7306,-6515,7358,-12880,13423,-17661,-30455,-23690,-9239,12529,1696,20698,28755,-32378,-21343,2368,-29980,-31244,6009,-14560,16328,-20476,-11184,29106,-27910,-19074,3020,28982,-18309,-6084,-11504,19547,4323,31494,-1028,6162,-30174,25045,-15908,-13395,-28588,-28268,-11064,-5284,-14009,12904,-3072,-22868,-26888,21794,3353,30829,-21881,4573,23463,-25340,13380,7880,24086,22491,9789,157,31929,-606,-4908,-20900,26386,19683,-21241,-26295,-11920,23029,14339,26016,5639,-18318,-4923,-25562,-5917,-31364,-9085,-12696,1913,12141,-23261,-2505,-2177,-2946,19297,13701,-25310,23361,16406,11111,-11266,-6043,-17385,25740,-22854,-29626,-10465,3752,-11971,26743,13695,-10702,30714,16627,-11579,25768,-31263,-17837,10825,-8378,3371,31968,27404,15890,-22916,-18745,-29731,-14109,18,1119,28483,19737,-29337,-18128,31795,9344,15407,30165,-18179,20980,20879,-9079,15901,7238,17927,-7067,-18298,922,23631,-14695,-20830,24600,6431,13331,11918,-26072,21768,27385,-3588,32321,21598,31644,-26906,-25527,19412,-13955,-422,-32237,29005,-2161,10253,25946,-1132,14887,-23313,-12458,-14833,-31102,-7301,8881,10921,26141,-27986,-20540,-8300,-19470,-18691,4830,-21279,-25482,-1416,29306,-13012,27549,3348,18169,29610,24539,30721,3893,-17664,20262,-28820,-18973,-16141,8177,21732,4213,-5805,21089,-24073,19504,7869,-23592,1727,17179,-2831,31828,2589,16842,29519,2148,8589,-4269,13242,-29900,6790,-1694,32202,22584,-18783,-28623,714,-9637,-10977,12176,-3855,6952,-243,-25237,20939,-18322,26971,-7690,-8483,7121,-18198,-40,9011,-13242,3488,-32352,-2133,-12103,12976,8924,11392,15661,-26719,-3650,-7727,3468,-21252,1319,-25380,-4782,31012,14279,-8341,21201,7224,-25411,-653,-24731,-26618,-4514,7537,-19864,-17356,26238,-2429,10664,-9575,-11942,-625,18634,23650,32282,6805,-30005,30365,-19072,-17491,29036,1790,17954,-15544,2027,-25351,23341,-5827,-28497,-5423,-711,-12661,15228,2311,16660,-29508,31465,23597,18032,-18001,9243,-28377,20909,-24604,-16546,-16171,-3793,30902,-15388,5701,10139,-30572,3965,3112,-23209,-31054,2520,-9546,32123,-25917,27780,25049,23875,-6604,-29192 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice17_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice17_input0_int16.csv new file mode 100644 index 0000000..34a3e74 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice17_input0_int16.csv @@ -0,0 +1 @@ +3631,16263,-14157,5789,-4759,-3136,-11233,-5213,-32402,-22431,-8812,-28671,7227,-14620,8292,13412,-13462,1593,16755,-22762,-31819,-2628,-23421,7087,-10477,-8988,-32043,-8702,32692,-7385,-20441,-18326,11410,14674,30933,16850,11746,13594,-20815,-21634,-5059,-11967,27320,11267,20748,-11399,-31159,-31531,6078,-11819,24244,2097,8951,11334,3296,-24708,12199,18702,25969,27210,-23098,-8089,17335,-29286,7876,-7748,1562,16176,-10425,24623,-2376,21499,-15860,28423,12791,13491,24705,-17552,-12061,27427,-28639,6681,24286,13245,-9535,22525,2853,-27588,-13790,1188,23094,-19897,-17214,-20263,-31418,6536,-8770,-2132,22727,12268,-20004,21114,23583,7584,-22873,13997,20592,-9949,9420,13472,13043,3100,-3635,-9879,-14979,32399,-9315,11648,30747,28936,-11905,7774,1108,17456,-21708,7597,-1862,-30495,-20816,14410,-16437,7976,8721,19587,28664,-23169,29766,-26481,28701,27627,-4816,15265,-30872,15188,-23241,5046,3237,-4538,-3279,15262,11796,22706,25428,-144,14342,20691,-9044,23527,-6582,-29668,20998,-22612,-19586,-4454,-2398,19647,-24012,783,28514,17244,-3769,9770,22256,10987,-13296,7607,-7421,210,20715,22097,17224,-29873,2967,-23070,-18332,17165,-5287,-18559,23868,15562,-30728,8049,5861,-14726,22592,29897,-13244,29511,8394,17717,18930,-12803,-8008,9871,-15290,19789,28453,-16577,-12984,15799,24022,-10567,-27295,23954,27776,18698,-1513,-3889,-24429,-31009,-19700,-21988,-10313,30845,10578,-3713,14683,-22178,-8557,-19598,9533,8642,80,-19927,-10647,-2142,-1535,2462,9464,29887,-2685,30198,-10265,20459,14578,4842,-7713,719,-29121,23368,13983,5891,-23236,2884,-23799,-29864,-19682,-15886,-5463,10714,11007,21867,9449,-5019,27765,29688,-30706,2463,10223,-4926,14801,-18061,-9319,-6860,5070,10318,-12419,-10678,12829,-30326,25426,-28687,12914,30239,1428,2678,28665,20103,-8635,-1680,268,-607,4121,24937,-30461,-17464,-15553,10751,31404,9407,-9220,-25574,28651,-23690,-4780,30069,-2458,-32747,-6524,13486,-19759,5995,15419,1663,-10897,-27611,-22249,3692,4441,4922,-25246,-16842,16356,-19875,29400,19653,25240,30771,-22762,30211,14305,-5363,24082,10518,-23385,-7008,-29369,10799,27154,-31508,-19323,10577,7458,19559,16935,22455,-15184,11599,15107,-28729,-17695,23320,3286,-5625,-27829,21816,-21063,-7074,-31334,-3737,27378,-5584,-1040,15296,27535,-28064,14900,2469,-24464,9937,-3334,-14903,-10197,1358,5114,30788,-19536,-23612,23878,-19495,15910,-31079,12230,9325,-28961,-21210,-9803,31298,31684,-20482,8745,-5584,15058,20451,-12466,656,13773,32619,-27547,21746,-32118,-27408,-32338,-7282,-7774,672,-19349,23124,-9055,13395,-32133,12533,-26822,-12136,28039,4944,5239,-32450,-22700,-29911,-14863,6505,4901,14900,29354,28559,14620,-15733,25544,-29414,23563,13815,15037,10606,-30731,-13377,-6647,-30920,17613,-5709,23740,-17445,-13277,-6985,-28341,-31073,-21882,5686,-20689,-31168,27507,-16203,-25449,4507,10436,-27368,-11694,-23252,13132,-6625,-15748,10932,12912,710,15387,-26926,-10278,27289,10315,-13124,2944,12491,-23487,10668,11662,-22309,-11967,24683,-29411,957,13278,11079,2576,-3580,-28390,-8830,-9142,18509,-12411,-9565,-29010,23223,4904,29461,-24213,-8384,-7209,28485,-8552,3785,13469,-15797,17456,-5934,-31499,-13131,-26889,21968,9899,27680,-31961,-27267,-10916,-7139,17205,-13561,21201,-7450,-1043,28844,-9292,13429,10628,-21625,-10277,-20934,17076,11261,-19357,-18189,23709,10982,17312,19567,13280,-5570,-16755,-30409,-1144,-22155,19648,-31883,10697,-28731,-7982,7249,-12753,-20678,18998,13444,-29830,-28745,-32309,11697,-31410,326,7064,22181,28785,17702,26983,-21575,13074,-20684,26924,-11343,590,2206,26121,-12409,24147,25017,-32693,-26693,8366,-31194,-10030,-25336,22120,-7333,-13474,-16419,-2161,30594,24245,23458,-15991,-14609,15667,8999,-31296,-4413,27951,11238,31218,10082,17033,-23535,-23392,27376,12604,9736,19938,-2235,12457,-9137,-1874,-32139,-24371,11673,20453,19809,25875,15705,13497,-26479,4933,-5627,-26021,31708,-10516,-14266,2964,18769,-6857,-1,12837,-30406,24994,30233,15781,8099,-20362,-20205,-14784,25576,13605,557,-1654,-3728,18866,-1854,-1461,-5361,5859,-29542,-12044,-18562,26828,-4852,-25068,1810,5993,-30792,-16733,13995,-3034,-7048,14293,2401,28182,-10285,-28668,2620,-9468,-26957,6572,-29264,-22636,-11912,-28944,-26452,-4443,3621,-22414,-25576,26675,-30383,-10677,-1794,-10551,32517,-24707,-18699,15151,-3408,17917,-16904,-19742,-19178,6490,-26233,15990,2704,-23745,26743,17718,17912,-12324,11498,-15215,-7074,-19782,-22090,-25281,26768,-24980,24188,6820,20964,23892,23482,-11343,-5442,29348,-11088,-10572,-26894,-21541,-28320,-18329,14724,-7796,31165,-6056,19996,17531,6710,-5586,6848,-13107,-13016,-6801,-19027,18095,26540,-22672,24676,-24594,-27625,-19710,9249,-2582,13354,-29831,-14876,-14554,4630,-12212,-10410,-17658,-26298,20202,-17838,19475,28137,4577,12494,-25662,-6405,-17143,16232,-925,-8919,-21006,30315,-20067,2466,-16675,-14932,-9393,29864,15901,28574,22513,7002,24259,-26903,-22338,6157,28497,25034,-153,-30855,-16783,-3094,24906,30986,-1002,-22953,-984,26727,-29832,-22613,31308,-13880,13388,-6054,11153,-7418,9036,19205,-10997,-28076,28924,419,30762,-15498,-30099,23899,-26728,16514,-6996,-27458,-11130,-30911,344,-27403,26613,-22228,-16389,-1187,-13719,-30864,-2057,14822,14119,-10190,-22357,724,17886,31528,-19018,28176,-9251,8712,25257,3820,8787,-2266,11408,-29786,22393,-29880,-12556,11490,6783,-1066,30124,-8733,31957,22031,27738,-8141,28550,14585,25887,13612,-27498,15584,-30008,1964,-6376,16905,-11960,-29167,-14616,23979,15136,-29497,27789,-2441,19570,-23284,-22308,-23044,-12163,8378,5802,-4457,28359,-10963,-15600,-12034,-18874,-6406,10734,29245,13052,13843,-32024,12624,-5336,8984,6597,-21655,-17520,-6757,25772,-18729,7981,-3787,-16748,17157,-17517,-20680,5363,-9882,-23507,24910,-32391,-5146,19654,24166,-30199,-31286,17057,-21484,-10266,-2464,-21335,27901,32075,3485,-32175,-19581,3905,-731,-20192,4950,-27318,-11829,-17559,-24536,19176,-31325,24015,-14693,-5556,-20055,4428,5807,-162,18798,-8915,-13458,-595,24012,16811,-20118,18181,25844,-6985,-9857,-30387,16775,28861,-15602,-18775,10261,6775,7441,28,-4430,8464,-15590,8996,-15975,9563,-16054,30704,-5165,30453,22081,4418,-31224,7077,5021,-11086,7334,12330,19483,-14496,-4175,-20064,357,24650,-15176,21384,-11604,25965,-19327,22203,-172,-31639,-268,-13723,-9836,-11842,-956,12547,16955,-22769,-20013,4311,-28431,32553,-17406,-21359,-888,29626,-5502,-28832,-30979,27869,17486,4325,-27914,-12616,-9595,-29049,-25582,-20399,14628,26027,29357,22733,-30817,-6228,22527,21721,28516,-8031,-9375,17828,-31580,26888,13841,18349,4373,18800,7802,4725,-1054,4782,-1283,-10871,-11330,3172,-16263,-18350,32362,-9252,-23001,18862,8379,11597,-31333,23660,-20157,21348,-26524,-20527,-23406,11720,14932,24998,22814,698,-17075,8307,-8700,32424,12726,9783,-9527,-6414,-25403,17085,3455,15619,-4840,-15123,-31288,27726,-1440,24780,-23877,22318,24533,1086,21240,-16832,11869,30981,6863,27685,-276,-25418,-9527,-31362,-963,22490,16231,-3971,14380,-18215,13188,23803,-28762,15063,23301,-24287,-24730,-4456,32217,16350,-32013,1366,15138,1082,-26712,-18198,21204,-15208,15591,32239,-5780,15912,-21079,12254,-16081,-26796,1242,-26060,-3256,18006,-17642,-26703,-20813,-12222,11443,12070,-23436,-3346,21875,15139,8022,21814,-29970,23632,32564,7806,18717,31525,7911,-19708,-11631,21419,1969,29533,-20375,-22126,-29784,14024,-14998,-14892,7650,-30739,7266,-25393,6733,1857,-7276,20887,-14632,-3569,-9276,-21006,-19498,6010,-14627,20748,-30198,5391,7431,-7044,-25422,22987,24097,7313,29300,-4368,-23510,26159,-4678,9354,-32281,-1831,-7593,21425,32703,795,14199,27487,-10654,-3905,16204,-9753,25955,19348,-7250,6663,1646,-25706,28142,9344,-9031,14473,-4790,22234,-3751,5061,-18770,20725,-8332,-5335,-22419,13736,-24423,-6857,12649,27459,-23190,-12843,20306,2029,23730,-7697,18237,-10839,-4207,10851,-22822,-27110,13900,-5323,-2522,20478,-31730,-28133,-10735,30159,9655,-11990,-7566,14218,16251,-17255,-1004,-24327,6884,6034,7020,-1390,-11322,-6615,28958,23781,-15842,-10197,1608,-3306,20527,-13404,17660,8945,-18099,-28150,28062,26345,12105,14736,-18857,-23078,-32575,-3834,15404,-28404,28442,-3652,6940,2200,29654,22387,30107,-22878,1366,5717,-4467,-14594,32136,30169,1023,26715,-10000,-31998,29167,10554,-3416,1965,-28238,-16434,5167,-22943,31415,-27490,7098,-7666,-24394,-2546,-24157,-23481,-21049,-1138,-8916,-1222,12897,-22956,-24679,6728,-8173,-32756,-8324,9286,-21955,-10157,25243,9256,15019,22863,-11536,-14898,-5545,21096,-3060,30490,20082,14289,4822,-29713,25433,-27143,-12302,2959,21622,20292,1673,-19898,1382,-24012,8646,-19092,-20301,1171,-3879,26833,-15383,-19352,18174,-3668,-20579,-18675,22509,-23987,-3032,29354,-31973,-678,-30231,15591,-29484,-25529,-3545,21488,14829,-17964,-22453,7765,-20178,-8315,19235,5659,4024,22255,25999,32529,7430,25067,5187,14423,24643,496,-7031,-7668,-21908,9692,23341,-20259,9844,-19710,17384,7562,-2441,7555,-30552,-4035,-28789,-5418,22165,-7155,-315,24619,-6682,-13796,8606,13640,-25179,-19113,8615,24995,-26385,32531,20050,-18780,-7912,-24334,16535,-22033,-25172,5017,-6601,15723,-14291,-14054,8530,-30906,2233,23942,-26499,6085,-11201,-2702,6532,17131,-17930,-30329,-16858,-11283,2102,15003,-10931,-16112,-2079,32383,-4357,914,-28666,19104,-1075,-25822,21232,14291,2068,-19714,3690,18255,28641,-31212,14311,-16071,9067,12150,-2180,14136,-11925,6773,746,-14218,-4396,603,-7218,28554,628,18343,12797,28009,-23799,-30700,-20503,29562,2536,-29897,-21289,30037,-13843,28163,27409,13489,3584,-31473,-11360,-16305,29726,9314,27330,-7307,-22211,-4831,15740,30178,11958,2936,18587,-16594,25003,-24237,10503,22275,8662,-5546,24902,32192,19741,9888,-27996,-2611,19620,31666,25393,-14230,11236,-29161,-22096,-28196,-24201,-21243,10430,-19487,-25578,-1203,-32162,-11783,-19180,3180,15630,-31996,-16118,-19158,-6369,15546,-6969,-28078,28988,14346,11357,-28566,6292,25079,-10877,31642,9238,-3717,-14660,-26802,31062,-13458,-20057,16582,18198,-13560,7112,16329,29840,-15567,20708,10781,-25252,-27146,30226,5273,-30566,28695,-29458,-9704,-11495,15042,19627,-5930,-27878,-5204,-16060,18900,13880,-18969,-22163,25432,11128,-6729,-25720,25396,-29095,-26180,-13740,29639,12461,32293,-28060,28602,8787,-709,-14710,14621,1285,9069,-2139,-23020,-26174,31705,25212,22515,-31683,-7480,-7050,17763,-8754,-6668,22662,-25508,2076,-23871,-17520,26748,20212,24068,-5231,-29912,-30169,15582,29158,30863,29159,-9103,-31992,17632,-24140,-26700,32582,3459,24363,8841,25365,-24843,24920,26639,-6696,1167,-12995,-23488,18918,-19841,-11052,-18638,14040,-352,17034,21331,30015,14045,-32742,5600,27518,28658,16764,-6757,-1347,27917,4064,6130,-29744,31496,16347,-17384,-21004,-537,9430,24043,-29344,-9580,20912,-4093,17400,8020,-7786,-23455,19531,-29998,3140,28766,22022,6775,-9007,-29667,-14888,32696,8206,-19099,28117,14988,-18227,22281,19183,21082,-21390,16314,12776,-25303,2280,-24915,11514,9613,-9656,-11584,13857,-31413,-18789,-29519,-28202,-7648,-3881,5446,-22913,8691,6891,7654,29218,-6934,3330,1159,-32191,-27333,24999,-21478,5778,6871,16461,9511,27090,-12188,29624,-8297,374,23486,-13207,-17683,30746,-14410,27834,-14111,13914,-25136,21288,-1888,-29507,-18906,22426,14987,19354,-9657,-8872,-7306,-6515,7358,-12880,13423,-17661,-30455,-23690,-9239,12529,1696,20698,28755,-32378,-21343,2368,-29980,-31244,6009,-14560,16328,-20476,-11184,29106,-27910,-19074,3020,28982,-18309,-6084,-11504,19547,4323,31494,-1028,6162,-30174,25045,-15908,-13395,-28588,-28268,-11064,-5284,-14009,12904,-3072,-22868,-26888,21794,3353,30829,-21881,4573,23463,-25340,13380,7880,24086,22491,9789,157,31929,-606,-4908,-20900,26386,19683,-21241,-26295,-11920,23029,14339,26016,5639,-18318,-4923,-25562,-5917,-31364,-9085,-12696,1913,12141,-23261,-2505,-2177,-2946,19297,13701,-25310,23361,16406,11111,-11266,-6043,-17385,25740,-22854,-29626,-10465,3752,-11971,26743,13695,-10702,30714,16627,-11579,25768,-31263,-17837,10825,-8378,3371,31968,27404,15890,-22916,-18745,-29731,-14109,18,1119,28483,19737,-29337,-18128,31795,9344,15407,30165,-18179,20980,20879,-9079,15901,7238,17927,-7067,-18298,922,23631,-14695,-20830,24600,6431,13331,11918,-26072,21768,27385,-3588,32321,21598,31644,-26906,-25527,19412,-13955,-422,-32237,29005,-2161,10253,25946,-1132,14887,-23313,-12458,-14833,-31102,-7301,8881,10921,26141,-27986,-20540,-8300,-19470,-18691,4830,-21279,-25482,-1416,29306,-13012,27549,3348,18169,29610,24539,30721,3893,-17664,20262,-28820,-18973,-16141,8177,21732,4213,-5805,21089,-24073,19504,7869,-23592,1727,17179,-2831,31828,2589,16842,29519,2148,8589,-4269,13242,-29900,6790,-1694,32202,22584,-18783,-28623,714,-9637,-10977,12176,-3855,6952,-243,-25237,20939,-18322,26971,-7690,-8483,7121,-18198,-40,9011,-13242,3488,-32352,-2133,-12103,12976,8924,11392,15661,-26719,-3650,-7727,3468,-21252,1319,-25380,-4782,31012,14279,-8341,21201,7224,-25411,-653,-24731,-26618,-4514,7537,-19864,-17356,26238,-2429,10664,-9575,-11942,-625,18634,23650,32282,6805,-30005,30365,-19072,-17491,29036,1790,17954,-15544,2027,-25351,23341,-5827,-28497,-5423,-711,-12661,15228,2311,16660,-29508,31465,23597,18032,-18001,9243,-28377,20909,-24604,-16546,-16171,-3793,30902,-15388,5701,10139,-30572,3965,3112,-23209,-31054,2520,-9546,32123,-25917,27780,25049,23875,-6604,-29192 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice18.tflite b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice18.tflite new file mode 100644 index 0000000..15c0c75 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice18.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice18_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice18_golden_int16.csv new file mode 100644 index 0000000..86a523d --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice18_golden_int16.csv @@ -0,0 +1 @@ +8099,9097,1946,17247,-9168,-12984,-23471,-17691,-22121,19516,-25265,-29059,404,-31132,2577,-20147,-24998,-14276,-6607,23139,31562,10403,20955,24646,-17819,-21636,22587,14535,-9998,26913,18923,5503,3099,-2687,12612,-442,25960,-28861,-29109,9086,-26405,-37,-9754,-21872,-25430,3144,29783,30728,14157,19176,16782,-23025,15891,-20546,30484,-28461,26175,-29844,-2422,6957,-4700,-15233,-7204,-28042,17136,-30324,-22092,-18125,12607,-20557,32546,-11499,-29123,-24891,1620,27327,2536,-12744,15090,29713,13763,-17983,9782,-32420,32190,-22609,-24041,-30667,-13888,-17707,21734,30011,12835,24734,13886,-18588,10814,1162,-14474,29,22892,29468,-3562,23651,-1240,-28613,13151,15643,29695,2361,-18619,-24215,-14220,-3697,28893,-28911,5150,-7583,3966,-25701,-12564,2365,5574,17452,-1472,17363,3438,-21542,7900,19116,12721,29878,13153,15747,-14990,21274,-27587,7136,-32015,-17837,-25301,-1506,31323,-3540,7518,-16230,8915,16231,-14146,21573,14540,-14476,29773,-4065,-32268,31354,11641,15121,-4331,4654,-27864,17679,1459,-20049,-2902,-19244,17556,6356,-30826,-6929,-26160,12732,25763,-21392,2145,-2884,4863,-23426,14258,-13033,4646,-31787,-9322,-17209,14972,10535,-17931,-8619,8841,19625,20575,-26754,-10171,-17282,9850,17343,10628,32406,-15892,-1223,-13674,-15765,32478,15820,-12807,-12884,-13869,-23171,-13112,11148,24424,-20480,14476,-5058,-15177,-6835,17403,24939,-1117,18929,-20018,13942,-6351,2433,-28380,-30445,12202,-27410,4389,-30780,-8506,9627,8056,-16324,-26959,-5818,11856,-10450,-19895,-15156,-18797,-4855,26355,-31399,910,20773,25598,-15842,32113,15541,-14556,-11237,13747,-28005,23049,23715,-19199,-11221,-1089,-881,23345,-5162,7690,-20178,7085,-6626,21206,-13782,7833,-15930,-19906,16362,-5449,14655,-2169,-23980,6162,-18982,24104,-32236,-15458,29342,12875,19818,20896,-23040,-5697,-29982,-9079,-8628,-24561,26289,-3730,11692,-30301,-6791,-20467,32600,-22913,-24155,-11372,-7510,17292,-22827,8775,-31145,-802,-3157,-16979,-30503,11400,-138,11239,-1055,27280,22733,24730,21877,17875,-17023,-30102,-23756,-29287,27143,19455,-13843,2328,-30882,4480,-23377,12967,-837,-27817,13169,2262,-28291,30874,-5733,16406,-11343,30755,32649,22308,27825,-20201,28565,20137,-14834,-4729,5778,-17689,1034,10986,13014,-1522,-12965,-5697,22085,-17597,21219,-10256,-26076,3242,-11321,10500,-14746,32552,7857,14349,-14843,21924,-25075,-20303,-12885,11063,23165,8813,17651,-13063,-3293,27651,8819,-9582,1216,16255,-13636,-8595,-29012,32755,-6868,28968,11852,-19620,-29504,16612,-29541,-3836,-27610,5452,-31508,14851,-12031,-2240,32112,21967,30161,31899,-11387,-25246,3172,9980,26697,-21909,5311,-7878,-12176,4699,-28611,10594,23073,-11512,29801,13501,8299,15453,-20036,9485,12074,28989,18297,3873,1148,2009,12871,-17117,25036,14538,7037,-24292,-19433,-27684,-18293,-13860,7296,-17914,-17015,-22315,-28328,3869,-12700,26293,21522,-10175,873,25532,7196,-2294,-5773,3031,23418,32086,-11331,-28084,1811,21251,-2091,-19097,31785,13916,2510,3740,16332,4177,5348,17528,15501,22115,17972,-27447,27672,1542,20287,7836,26484,-28901,-9777,9091,-15639,-22054,9788,-29926,3396,-4193,5392,21577,18748,-12196,-5156,26211,-3289,29065,26881,-30418,-17683,-4662,8331,-29752,-29891,3750,-12396,-19834,22864,11233,-20285,5548,-8996,-28497,-13053,-10850,11296,19938,-30233,31532,-19007,-21376,-32249,-19085,27924,-18493,-2053,-16477,-23918,11102,-25229,-12522,-27367,-14951,-5050,24423,-5402,-6392,30785,-20690,21485,6826,-17413,-549,-14256,-6189,-17499,-4321,-24398,4428,5393,25421,-29242,-4162,-30106,15801,-5969,14619,-31634,30665,16570,11703,-27246,-19895,-30859,26451,28399,-26658,-25649,15621,-29826,-13977,16926,16533,31367,32215,-29644,-113,19086,-22374,13700,6025,32486,-29787,-16750,24538,25521,-32434,17458,4483,-24193,-7346,-14019,3637,12129,25707,-1048,11051,1184,3063,8903,-32340,-16660,-16481,30687,5573,14673,-2613,-26698,7349,-4490,26122,8512,-27866,-1394,-24480,-27106,10332,-26262,13919,-28054,3061,-18082,-19245,-20038,26655,10195,-18467,-10078,-11932,-18542,-9861,22918,-32751,-31237,30884,-24769,32160,8588,-6160,-27894,1024,-32049,24296,18812,-26288,1431,12423,-20535,13189,-27323,-26785,-12589,3253,-31239,10702,6136,-8539,-22105,-11402,-8808,19315,-15518,15828,13940,28485,-9552,-22215,-2417,17575,17692,23816,1647,1347,11930,28962,15114,30872,-7055,-4267,-22807,29175,-5683,6989,-8815,18839,8615,25952,-25758,-12774,23679,-10032,18741,22749,-19240,-27066,-19138,-24205,13970,-21633,-25951 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice18_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice18_input0_int16.csv new file mode 100644 index 0000000..96d7fd5 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice18_input0_int16.csv @@ -0,0 +1 @@ +8099,9097,1946,17247,-9168,-12984,-23471,-17691,-22121,19516,-25265,-29059,404,-31132,2577,-20147,-24998,-14276,-6607,23139,31562,10403,20955,24646,-17819,-21636,22587,14535,-9998,26913,18923,5503,3099,-2687,12612,-442,25960,-28861,-29109,9086,-26405,-37,-9754,-21872,-25430,3144,29783,30728,14157,19176,16782,-23025,15891,-20546,30484,-28461,26175,-29844,-2422,6957,-4700,-15233,-7204,-28042,17136,-30324,-22092,-18125,12607,-20557,32546,-11499,-29123,-24891,1620,27327,2536,-12744,15090,29713,13763,-17983,9782,-32420,32190,-22609,-24041,-30667,-13888,-17707,21734,30011,12835,24734,13886,-18588,10814,1162,-14474,29,22892,29468,-3562,23651,-1240,-28613,13151,15643,29695,2361,-18619,-24215,-14220,-3697,28893,-28911,5150,-7583,3966,-25701,-12564,2365,5574,17452,-1472,17363,3438,-21542,7900,19116,12721,29878,13153,15747,-14990,21274,-27587,7136,-32015,-17837,-25301,-1506,31323,-3540,7518,-16230,8915,16231,-14146,21573,14540,-14476,29773,-4065,-32268,31354,11641,15121,-4331,4654,-27864,17679,1459,-20049,-2902,-19244,17556,6356,-30826,-6929,-26160,12732,25763,-21392,2145,-2884,4863,-23426,14258,-13033,4646,-31787,-9322,-17209,14972,10535,-17931,-8619,8841,19625,20575,-26754,-10171,-17282,9850,17343,10628,32406,-15892,-1223,-13674,-15765,32478,15820,-12807,-12884,-13869,-23171,-13112,11148,24424,-20480,14476,-5058,-15177,-6835,17403,24939,-1117,18929,-20018,13942,-6351,2433,-28380,-30445,12202,-27410,4389,-30780,-8506,9627,8056,-16324,-26959,-5818,11856,-10450,-19895,-15156,-18797,-4855,26355,-31399,910,20773,25598,-15842,32113,15541,-14556,-11237,13747,-28005,23049,23715,-19199,-11221,-1089,-881,23345,-5162,7690,-20178,7085,-6626,21206,-13782,7833,-15930,-19906,16362,-5449,14655,-2169,-23980,6162,-18982,24104,-32236,-15458,29342,12875,19818,20896,-23040,-5697,-29982,-9079,-8628,-24561,26289,-3730,11692,-30301,-6791,-20467,32600,-22913,-24155,-11372,-7510,17292,-22827,8775,-31145,-802,-3157,-16979,-30503,11400,-138,11239,-1055,27280,22733,24730,21877,17875,-17023,-30102,-23756,-29287,27143,19455,-13843,2328,-30882,4480,-23377,12967,-837,-27817,13169,2262,-28291,30874,-5733,16406,-11343,30755,32649,22308,27825,-20201,28565,20137,-14834,-4729,5778,-17689,1034,10986,13014,-1522,-12965,-5697,22085,-17597,21219,-10256,-26076,3242,-11321,10500,-14746,32552,7857,14349,-14843,21924,-25075,-20303,-12885,11063,23165,8813,17651,-13063,-3293,27651,8819,-9582,1216,16255,-13636,-8595,-29012,32755,-6868,28968,11852,-19620,-29504,16612,-29541,-3836,-27610,5452,-31508,14851,-12031,-2240,32112,21967,30161,31899,-11387,-25246,3172,9980,26697,-21909,5311,-7878,-12176,4699,-28611,10594,23073,-11512,29801,13501,8299,15453,-20036,9485,12074,28989,18297,3873,1148,2009,12871,-17117,25036,14538,7037,-24292,-19433,-27684,-18293,-13860,7296,-17914,-17015,-22315,-28328,3869,-12700,26293,21522,-10175,873,25532,7196,-2294,-5773,3031,23418,32086,-11331,-28084,1811,21251,-2091,-19097,31785,13916,2510,3740,16332,4177,5348,17528,15501,22115,17972,-27447,27672,1542,20287,7836,26484,-28901,-9777,9091,-15639,-22054,9788,-29926,3396,-4193,5392,21577,18748,-12196,-5156,26211,-3289,29065,26881,-30418,-17683,-4662,8331,-29752,-29891,3750,-12396,-19834,22864,11233,-20285,5548,-8996,-28497,-13053,-10850,11296,19938,-30233,31532,-19007,-21376,-32249,-19085,27924,-18493,-2053,-16477,-23918,11102,-25229,-12522,-27367,-14951,-5050,24423,-5402,-6392,30785,-20690,21485,6826,-17413,-549,-14256,-6189,-17499,-4321,-24398,4428,5393,25421,-29242,-4162,-30106,15801,-5969,14619,-31634,30665,16570,11703,-27246,-19895,-30859,26451,28399,-26658,-25649,15621,-29826,-13977,16926,16533,31367,32215,-29644,-113,19086,-22374,13700,6025,32486,-29787,-16750,24538,25521,-32434,17458,4483,-24193,-7346,-14019,3637,12129,25707,-1048,11051,1184,3063,8903,-32340,-16660,-16481,30687,5573,14673,-2613,-26698,7349,-4490,26122,8512,-27866,-1394,-24480,-27106,10332,-26262,13919,-28054,3061,-18082,-19245,-20038,26655,10195,-18467,-10078,-11932,-18542,-9861,22918,-32751,-31237,30884,-24769,32160,8588,-6160,-27894,1024,-32049,24296,18812,-26288,1431,12423,-20535,13189,-27323,-26785,-12589,3253,-31239,10702,6136,-8539,-22105,-11402,-8808,19315,-15518,15828,13940,28485,-9552,-22215,-2417,17575,17692,23816,1647,1347,11930,28962,15114,30872,-7055,-4267,-22807,29175,-5683,6989,-8815,18839,8615,25952,-25758,-12774,23679,-10032,18741,22749,-19240,-27066,-19138,-24205,13970,-21633,-25951,-20708,-14651,-11349,19063,-3723,25921,-18454,-31826,7946,23163,-7104,27331,28268,4207,-17571,-8469,31360,29670,-7553,17822,-11839,-21910,11006,-20634,19472,1975,-27819,27413,-19510,-22376,-30779,-7925,18046,1802,18162,25878,3757,22146,28157,23826,24071,-555,17112,259,4994,-10716,3888,-26638,-1488,11746,12682,-2106,21413,-10257,-7049,26526,6296,4956,29169,11265,5020,4090,21484,-18075,-10720,19209,3151,5259,20458,15057,-28276,12775,-26781,-2193,26001,19634,13192,2680,-17101,25866,-10989,7557,18974,-30444,-28596,14891,-27318,-16844,-18501,-18282,16451,-17481,-26264,28386,-24549,14421,14406,-22761,-10432,24829,13476,25783,-1325,-27106,3014,-16037,-10089,4090,-4066,16674,1233,27387,16085,-32670,8736,32184,12969,-16290,-3072,-28901,-27693,-10195,-13637,-20920,9791,5626,25997,-10987,-4678,23517,-23453,-24019,2809,-20696,-12360,-11195,-27940,-21268,22697,-23302,2147,18369,6378,-20759,31437,-1976,23093,13506,15413,10832,-27658,4018,-11245,1202,24852,16701,-11274,-10704,9891,-12702,7973,32272,-19357,-30934,-18661,-22263,-8765,12018,29951,4186,16138,-21296,-17688,-10578,5078,11084,-25106,-19244,-24029,-27604,13155,-22282,16491,-30223,-26080,14702,-5624,-5899,-17612,-30815,14977,30029,-11601,4384,-22328,-25500,-16987,-21077,19996,-2164,-4832,28503,-2975,5843,-4627,-18434,-24541,16397,-32485,11855,13262,7251,-26812,-8555,23592,-13312,-498,-31193,14601,-9452,31483,7548,17159,-29733,31779,-20221,3738,9184,-10700,23436,1058,24893,-10883,-1619,-6514,26067,14535,-15878,-30428,-27345,-13496,-24793,30264,22766,3614,27539,17880,3683,19954,6443,-24632,7232,23607,-20333,23432,12829,6202,18822,-17383,-22738,-11970,-6511,23233,-13180,17135,-8675,-21222,-5574,-19173,-13104,-21915,2509,27031,27020,-3336,-2463,-18966,-20478,-27735,-20257,32091,-7575,-4179,14924,28422,-7351,-25967,-31746,-11725,-4632,20198,-31252,-16920,-18794,-31035,7451,-4544,13053,9261,30815,-8515,-26885,-15119,-11911,-21266,18311,-23180,-24795,-31120,15099,15833,19215,-17167,-26085,28665,-20913,1540,8141,-11261,18007,-21275,-15890,-13142,-3116,-17107,25618,-12385,6809,-30781,30460,14667,1876,5695,2074,-15072,2377,1774,31693,-27916,15114,-84,-17877,-6625,-19672,4751,-22944,7379,21217,-13643,-5971,-14748,-19336,23672,-11506,20637,9798,-20669,-25923,-17500,-32263,-22798,-6374,-24881,8005,-10164,26895,20314,23703,6430,-15374,-19766,-10100,-3061,-7066,5324,13327,-25706,26291,-3403,17451,-15130,-17390,-15318,30498,19511,3305,6207,21977,-4124,20937,-25982,-29004,32515,1561,1670,-7397,-24416,7370,-25056,-27098,13777,13070,-23483,-17956,30235,20085,24654,29466,-11739,-20151,-10498,19742,14723,-28966,20625,-1506,17430,16104,23259,-15330,12069,-14319,-10437,-12907,-20324,4535,7942,-18662,-18932,-6278,9132,22301,-13712,25078,-15187,31996,9005,5518,-27477,-29050,-24987,15653,-20671,-21606,8312,-28397,-31392,21317,-14202,-13749,11433,3812,12272,-24579,-22584,15879,944,-14318,26397,-9525,12024,-23696,27558,22871,543,-28162,25239,22732,-12961,-30222,28635,-28921,1431,23169,17282,16971,29041,-15575,-2092,-21597,17425,9206,14020,-15920,32164,-31366,23630,-19116,2264,-2108,-13742,13201,22373,29979,-21662,-99,10321,21192,5318,-11304,-31571,-12573,-24399,29861,5453,-559,-22062,-15858,15513,-14934,-4113,-21725,6865,-18517,12175,5588,-3437,15560,5683,17886,10760,7920,-1287,-15967,13985,-4119,-12131,-7689,17955,713,-18447,-7066,-24858,26583,7200,4760,-13873,29361,-24856,-1651,9450,-23289,-1078,29685,-288,-5471,-15196,-6892,-12015,7306,7232,-28097,-27258,-11422,20512,29628,-7313,-5297,9134,-21166,-25521,-13844,27143,-5757,-9440,20877,11642,7592,14317,-28228,23527,18229,-3656,-24140,-14035,14086,-30984,-26644,-8470,-29729,21701,-14049,1523,20897,-6524,19765,-22451,-16467,-18536,30062,21701,-25278,-21308,24662,-8799,-9708,27298,-2897,1170,11519,7326,6859,-32678,4505,-1478,-22783,-20228,1320,28495,-28025,-7882,28336,29576,32548,2971,-4394,-23276,-25308,4697,15337,-26429,-27080,-2033,26620,24605,32078,-28816,26691,-27188,24850,28431,1211,6768,1587,-28362,25741,19108,-19968,27128,-24301,-3106,-6808,-27306,-10625,146,17466,-3167,6257,21898,-16289,29,-14037,21767,10232,-19769,31426,-16918,4840,-12840,-766,7867,14686,-2209,22588,28141,26692,-14111,-11188,22520,31857,-21443,24838,28982,30416,-27931,1031,-8839,-3988,-25317,16461,19112,-25659,26209,27813,-18528,-6224,13975,18897,18973,-12406,-9468,-27989,25953,-19908,-4354,-12723,4850,-10479,-30557,17182,16041,-7525,13950,1515,1908,-17151,7677,21382,-8790,-25616,-23719,-17326,-7384,-30827,-3910,-30466,17515,-9549,-23590,-10193,24176,6524,-20105,-287,-15874,31944,25320,-31683,29314,-13495,8702,-8170,-18458,2784,-6752,10261,15665,18454,-14074,-23763,-27169,30631,22366,18502,-20829,-10364,-29169,22062,13990,-9844,3354,-31260,3285,7535,-565,24356,-5172,26467,15029,-14168,29434,1442,30350,18305,16238,20308,17985,13850,7759,11446,31187,5434,-3292,-16211,-31738,-29809,5810,14162,19416,-28972,32068,-20444,-28817,3017,30360,-30858,-29720,22473,15962,-22923,-13477,-16990,3408,-15393,-32650,-6324,31921,-23853,25047,-718,24009,24212,-6067,9415,24993,-25656,2324,-24343,2715,-25882,12293,20307,12857,3360,-14612,19382,-5479,8761,6145,29652,-3164,25105,27376,17211,-23678,3725,19065,-10037,-32689,25130,-14073,-22333,-828,14478,-5871,15047,-31460,29220,2792,23331,26621,-18981,19957,11476,-25181,-7577,-15531,25318,-27828,16475,16429,-3384,-28341,6176,-17729,-3189,27995,2016,13230,-7927,29916,-11757,-13560,-2126,12051,-11111,27282,-24400,14125,12914,18952,16866,14214,32350,-4908,-17469,9168,1577,-29732,-26520,11186,27417,4313,-19794,1244,18408,18790,-30134,11369,5790,10462,2309,9628,19895,-6035,16059,32611,27041,-21389,16226,32314,-11558,23123,-30774,9135,-31634,19795,29001,30878,30292,-19982,-10749,23097,30771,-12519,12353,13589,-4628,-30396,19251,5860,-12986,-15995,29057,-8284,20122,8751,20417,13870,-5770,-1307,-15060,-18731,10707,-30189,18944,26218,-27702,-18134,-20609,11639,-20897,-15033,24126,-11145,-12482,-22955,21421,-30946,-17986,-15077,15667,5698,21296,-28460,-28504,10326,-12717,-13399,-278,6903,-11444,-23606,30400,3203,14557,18982,-23023,-32555,-22763,-12491,32179,23544,6287,-23412,12913,25797,-18732,231,-13569,26550,-2966,-29582,-6541,-20716,-20983,21351,24346,14928,-11938,29993,26887,-30633,13584,-17771,16077,-22719,-17489,14608,31183,13519,-30068,-23597,24789,4198,4241,-29320,-29104,-12073,-32325,-23491,31065,316,-32059,19329,-14047,-8032,-14677,2799,27209,-28504,-22696,-26226,675,31141,16831,469,13009,23265,-20666,24572,29552,5405,25264,-28999,13182,27019,10830,-7370,-4695,24718,-13324,-12,-10389,18101,-25714,-27739,-23772,12561,-551,18349,-18153,2766,-12757,28259,-25927,13489,-32476,9301,-16799,-16920,23127,-16247,-24150,-3325,-13318,-2130,-18755,-6305,8844,-12292,-32039,-25582,-16658,19350,-32405,-22739,-29967,-25129,-7600,-25614,26613,27213,15652,-22957,17250,12130,-1036,-3579,28894,24694,-21477,-10392,29457,6320,8384,4147,25270,-20783,-10915,-17308,-20635,16173,6850,2063,431,-9981,16488,21676,4165,-17843,130,977,17020,-2144,26016,-23740,7057,27878,-3801,-19751,4888,-14665,-11726,-13653,-31832,8665,21773,31933,24897,16395,32201,-21364,-23404,1888,7802,17192,-12380,-5420,-5730,26149,-4293,12052,-13144,-30485,-30530,-24692,-30846,-31233,21437,-30959,-6465,-25166,-12012,-4427,-9825,25188,-25803,-18207,2170,22903,28000,-20738,3246,8954,-12431,-10716,-1310,2902,30227,7358,3093,-6049,-16173,30250,-32489,11601,5995,10050,996,-9344,31816,26310,29407,20531,-11449,-19655,30428,-28287,5884,-28802,17474,-30044,31593,26970,15384,7461,24152,681,16186,-15583,19188,28848,-1687,1446,16180,15512,18444,-18963,-25042,5830,21694,22266,-10364,-17529,32435,-17537,-26016,-30205,-2291,32048,-31715,-17151,-6521,2313,10664,-20557,-19233,20588,25461,27940,-17965,18955,-279,19141,19471,-4250,7923,-27124,-26411,-13843,-5940,-10442,6220,-13295,-1790,-24106,10654,-19270,-27339,9831,-22041,-9238,-20510,9925,8361,-21656,12439,17152,10119,-29696,-30089,27056,27577,31135,-8440,-29116,-13244,-11089,-6156,-23974,-1302,-13532,-928,-20785,-32666,28230,15562,19634,-25582,-23141,-1528,-29195,-22568,26864,-437,-16650,-27257,-6800,2901,5869,-6191,-23110,25302,-9233,-3547,-2207,14422,-26590,31738,-16953,-19569,8306,26137,23996,5449,12115,-18526,5004,-9938,1341,-10085,-27419,-13313,9624,22169,10021,18926,25249,4615,9471,380,-8932,-1092,-28518,-30868,-27302,25544,-17728,-24537,-29176,2759,28111,10126,23440,20141,-13942,-4930,15559,12837,-12871,-31957,-20614,31445,3203,-6997,22528,-2831,30287,-17988,7563,4101,2870,-8062,17664,-5986,20395,7891,23847,18132,-21193,-9887,8189,6310,-26460,4080,25393,4742,-17850,-14739,10470,23969,-19988,-8922,12503,29492,28410,30125,9577,16877,-30211,29913,19482,21867,9258,24347,7541,-6283,-10495,7631,-7819,17452,-24872,23487,-32575,16864,-9395,21539,22177,-13251,-10646,-18496,-23248,-12302,-14987,-6658,30226,-19920 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice19.tflite b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice19.tflite new file mode 100644 index 0000000..db7c526 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice19.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice19_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice19_golden_int16.csv new file mode 100644 index 0000000..4318f00 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice19_golden_int16.csv @@ -0,0 +1 @@ +15050,-2290,-28360,22114,21746,16020,-8316,-6255,-28380,13190,32172,22299,-24383,7881,31808,10374,19907,8992,-23476,-24325,31756,-24331,-12480,21196,-23299,-7744,-3539,1263,4875,3279,-5721,-19371,-17836,-11450,-20947,-13341,29203,12558,16482,29609,10821,-31679,14435,17725,-11814,-19404,27451,22286,-2913,-10015,23839,14028,-9448,-11081,-16170,11751,12019,26594,17337,-5700,-9385,-17196,31027,23268,-5326,-6165,-18614,15117,24961,-10511,-27832,-11246,-23653,3926,-26511,10094,29738,29963,11109,-21330,3860,-5481,-22250,-30081,-8005,-15394,-14953,1885,-11333,135,6445,18776,-20753,31852,9157,-32356,21563,20165,21167,24623,-15750,3985,-19052,-14014,-25703,17874,-5272,-7375,31800,24986,-144,25254,14114,-15638,-15218,-29068,26326,9276,-29758,-29323,-4691,27936,10577,-31111,31543,22596,-1742,5099,-2266,4103,28314,-12129,-30486,613,-18311,14689,21803,28248,17781,12422,7346,18786,30377,-1777,-17042,-27884,25710,17232,-9332,-18362,-26596,22396,-11505,-6141,15498,-6241,23089,-822,4108,7280,-29394,15122,26386,28624,-25294,32418,13126,30089,10753,-20622,988,3052,26487,29150,-29600,-25124,-29999,-19264,22474,-11372,-17890,11185,-30354,2182,-7973,19965,-30634,13616,13511,-25756,23406,-15957,23677,8621,-8249,6492,6931,4310,30040,-21561,-18178,28772,-914,15461,16040,-16276,26523,-17769,24937,31408,-21766,-29653,29358,-14795,-16564,-23160,3362,-2734,-22108,7025,-20607,-9373,23304,966,5322,7990,-15770,24246,783,16940,10650,-30441,6218,3727,-16347,-22327,-21403,14822,7889,6098,3088,2640,9152,-6309,7747,22616,-22725,3186,6813,24867,4147,-24609,-26407,-23356,18761,19062,-9749,11544,-14567,22296,4118,16143,28201,15670,18481,-32316,22768,-24860,-28319,14044,7905,-8361,8073,22760,8305,21459,22222,2387,9957,9898,-13370,31050,23296,10556,-24069,28467,28450,1159,-1366,11410,-15413,-11218,20302,-23852,-27461,-18982,-9045,-13078,-25352,22828,3149,13152,-21944,28019,-28693,-6514,20363,28986,1018,14657,-4252,-22505,-8148,14435,18774,7179,2843,-3114,12165,5631,-17233,19073,20297,12408,-9950,-2230,28234,-9457,25989,-14360,28484,-19205,11945,1729,16197,-6509,1252,29866,21724,19283,-610,14295,-32487,5024,5412,25697,870,-20392,-19740,-11758,25276,-14090,-26827,4618,9262,12085,-17402,5278,-30755,-14264,-5142,-3869,28285,-13525,-17657,31973,3218,-10629,26391,12841,-16791,-15265,-3011,-19527,25593,21112,14268,6865,14299,16770,-31263,-3781,-2770,32459,22750,-6818,-32494,17662,-17985,9888,-18572,-1785,7295,6621,-20407,-15220,-31247,-27192,-8563,-26621,-25139,763,31028,19208,20766,-40,28221,-14223,7981,16937,-14107,23120,-6994,-15285,28861,7488,-32462,30924,-25863,-15794,-2677,-4307,4334,7821,11554,10871,-29090,3409,-13556,29364,1802,5638,28917,2918,-3163,14624,-22344,7283,8377,11819,-10900,-15542,2364,16341,10579,14628,28585,9856,-22386,31877,-8281,-10594,-29396,26490,21382,-26333,-29529,-31083,9780,32126,1980,15800,-2800,-15884,-23099,22185,-10696,16631,3240,14724,18868,26887,10315,-29660,24796,-19623,-6403,-22334,856,12661,-2595,-12862,-27251,-23754,2536,28537,-27803,27413,-14019,-2028,-19665,30786,23189,10134,-15534,-20028,-2853,28291,-11590,-29591,26408,11708,18815,5900,-25856,28511,-8802,20628,-25555,8484,15086,18138,-30398,-11953,-21675,-25485,-31209,28361,23528,-23528,25559,-9132,-20030,30065,18958,-29783,15703,30236,12560,-26507,-29901,-6701,-21881,3049,10676,-19056,25396,-2267,27511,-14988,-8857,-29709,5094,32444,-2208,14071,-4001,13799,4496,17750,-16234,32210,2694,-3197,-21333,-20386,1963,-20643,-12949,-26079,2807,-22930,5909,10273,27159,9250,-5888,-9632,18583,22221,13810,17112,27448,-13516,-7312,24211,-30031,-3868,30233,30916,25881,2957,-14203,-443,-13550,26177,20846,5833,16775,-28510,22477,20042,-15726,16684,16739,17294,-32485,18821,11190,-11208,9373,22229,5948,-6258,5356,-8276,19401,28389,9575,9206,22074,-11966,19122,2034,-7079,-23413,6941,-25494,22541,-4967,-4740,12613,17778,21684,17049,-19693,16781,2387,-21941,-11735,-1800,-30719,31458,18820,29949,29072,18462,15402,-11131,24139 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice19_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice19_input0_int16.csv new file mode 100644 index 0000000..1d7a7ea --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice19_input0_int16.csv @@ -0,0 +1 @@ +-947,26781,-228,14537,-13957,-4514,-23104,-17818,7674,-31398,22659,-2728,-26771,28947,25365,3972,-17372,-8283,19120,30995,20960,-2836,16672,-17737,-17822,30080,-9437,-22651,4071,13697,-29027,-23840,15050,-2290,-28360,22114,21746,16020,-8316,-6255,-28380,13190,32172,22299,-24383,7881,31808,10374,19907,8992,-23476,-24325,31756,-24331,-12480,21196,-23299,-7744,-3539,1263,4875,3279,-5721,-19371,-17836,-11450,-20947,-13341,29203,12558,16482,29609,10821,-31679,14435,17725,-11814,-19404,27451,22286,-2913,-10015,23839,14028,-9448,-11081,-16170,11751,12019,26594,17337,-5700,-9385,-17196,31027,23268,-5326,-6165,-18614,15117,24961,-10511,-27832,-11246,-23653,3926,-26511,10094,29738,29963,11109,-21330,3860,-5481,-22250,-30081,-8005,-15394,-14953,1885,-11333,135,6445,18776,-20753,31852,9157,-32356,21563,20165,21167,24623,-15750,3985,-19052,-14014,-25703,17874,-5272,-7375,31800,24986,-144,25254,14114,-15638,-15218,-29068,26326,9276,-29758,-29323,-4691,27936,10577,-31111,31543,22596,-1742,5099,-2266,4103,28314,-12129,-30486,613,-18311,14689,21803,28248,17781,12422,7346,18786,30377,-1777,-17042,-27884,25710,17232,-9332,-18362,-26596,22396,-11505,-6141,15498,-6241,23089,-822,4108,7280,-29394,15122,26386,28624,-25294,32418,13126,30089,10753,-20622,988,3052,26487,29150,-29600,-25124,-29999,-19264,22474,-11372,-17890,11185,-30354,2182,-7973,19965,-30634,13616,13511,-25756,23406,-15957,23677,8621,-8249,6492,6931,4310,30040,-21561,-18178,28772,-914,15461,16040,-16276,26523,-17769,24937,31408,-21766,-29653,29358,-14795,-16564,-23160,3362,-2734,-22108,7025,-20607,-9373,23304,966,5322,7990,-15770,24246,783,16940,10650,-30441,6218,3727,-16347,-22327,-21403,14822,7889,6098,3088,2640,9152,-6309,7747,22616,-22725,3186,6813,24867,4147,-24609,-26407,-23356,18761,19062,-9749,11544,-14567,22296,4118,16143,28201,15670,18481,-32316,22768,-24860,-28319,14044,7905,-8361,8073,22760,8305,21459,22222,2387,9957,9898,-13370,31050,23296,10556,-24069,28467,28450,1159,-1366,11410,-15413,-11218,20302,-23852,-27461,-18982,-9045,-13078,-25352,22828,3149,13152,-21944,28019,-28693,-6514,20363,28986,1018,14657,-4252,-22505,-8148,14435,18774,7179,2843,-3114,12165,5631,-17233,19073,20297,12408,-9950,-2230,28234,-9457,25989,-14360,28484,-19205,11945,1729,16197,-6509,1252,29866,21724,19283,-610,14295,-32487,5024,5412,25697,870,-20392,-19740,-11758,25276,-14090,-26827,4618,9262,12085,-17402,5278,-30755,-14264,-5142,-3869,28285,-13525,-17657,31973,3218,-10629,26391,12841,-16791,-15265,-3011,-19527,25593,21112,14268,6865,14299,16770,-31263,-3781,-2770,32459,22750,-6818,-32494,17662,-17985,9888,-18572,-1785,7295,6621,-20407,-15220,-31247,-27192,-8563,-26621,-25139,763,31028,19208,20766,-40,28221,-14223,7981,16937,-14107,23120,-6994,-15285,28861,7488,-32462,30924,-25863,-15794,-2677,-4307,4334,7821,11554,10871,-29090,3409,-13556,29364,1802,5638,28917,2918,-3163,14624,-22344,7283,8377,11819,-10900,-15542,2364,16341,10579,14628,28585,9856,-22386,31877,-8281,-10594,-29396,26490,21382,-26333,-29529,-31083,9780,32126,1980,15800,-2800,-15884,-23099,22185,-10696,16631,3240,14724,18868,26887,10315,-29660,24796,-19623,-6403,-22334,856,12661,-2595,-12862,-27251,-23754,2536,28537,-27803,27413,-14019,-2028,-19665,30786,23189,10134,-15534,-20028,-2853,28291,-11590,-29591,26408,11708,18815,5900,-25856,28511,-8802,20628,-25555,8484,15086,18138,-30398,-11953,-21675,-25485,-31209,28361,23528,-23528,25559,-9132,-20030,30065,18958,-29783,15703,30236,12560,-26507,-29901,-6701,-21881,3049,10676,-19056,25396,-2267,27511,-14988,-8857,-29709,5094,32444,-2208,14071,-4001,13799,4496,17750,-16234,32210,2694,-3197,-21333,-20386,1963,-20643,-12949,-26079,2807,-22930,5909,10273,27159,9250,-5888,-9632,18583,22221,13810,17112,27448,-13516,-7312,24211,-30031,-3868,30233,30916,25881,2957,-14203,-443,-13550,26177,20846,5833,16775,-28510,22477,20042,-15726,16684,16739,17294,-32485,18821,11190,-11208,9373,22229,5948,-6258,5356,-8276,19401,28389,9575,9206,22074,-11966,19122,2034,-7079,-23413,6941,-25494,22541,-4967,-4740,12613,17778,21684,17049,-19693,16781,2387,-21941,-11735,-1800,-30719,31458,18820,29949,29072,18462,15402,-11131,24139,31030,-8205,-30738,25806,24476,-8441,-26123,-7782,-17025,-25026,3743,11190,-1420,-11896,-23737,24338,-17361,-27810,-31307,-11077,13325,14416,7122,10778,14123,18612,-30673,-17280,18956,27751,-14938,5673 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice1_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice1_golden_int16.csv new file mode 100644 index 0000000..c010988 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice1_golden_int16.csv @@ -0,0 +1 @@ +-11041,-10344,15525,11727,-27139,-26356,-13996,-22195,15370,-13741,32131,8129,24642,-10958,28911,687,7037,29019,16780,-5690,-15899,-10389,-21037,-10163,-2651,21485,12538,18846,-28430,21949,-10439,-2274,-7734,17435,18015,12372,4215,2241,-790,16738,-14203,-8436,-9316,25623,29383,398,-31840,-19519,21037,-9954,8186,28079,25865,2492,27851,7132,28644,-26962,26039,-26152,-17952,3005,-2787,16162,-24608,-18938,3311,22749,-16889,24025,-27035,-1939,-12048,7228,3921,2871,-14670,-21672,26543,26398,-19324,20750,-2217,-26205,15176,15160,-3828,14639,-3583,28313,-1840,24761,20695,19204,19303,-14929,-6332,24029,14831,-3865,30504,20538,-7078,-14027,-544,-20000,28007,27851,4432,-16478,25242,1943,-24758,-8603,5276,-20766,-3154,-1180,-2793,-27467,-18460,2363,20617,21331,23253,-21309,-31713,-6277,-9113,21094,-1797,-19867,-1538,-6904,-23135,-21125,31763,28041,20785,23750,-13665,-31560,25031,-22132,27087,18676,25222,-21890,32375,-32022,6240,-32378,-3351,4479,-12815,12623,30300,31421,-13434,6042,462,-10989,-3899,-18955,-9633,30703,-12079,-14427,11926,3438,-24202,-24154,5034,9378,-919,-3054,5854,-24250,-24021,-6234,14360,1736,10630,10159,-18296,2874,-30041,20766,24011,-29942,26734,15381,11417,-13388,-10274,-30063,-17494,-11501,-4402,-11564,28578,30069,-12773,-20236,-32173,-3239,-10319,22456,27240,30801,8615,30592,22150,10470,-22662,5565,-2537,-26268,-26831,-27768,27345,-2726,10952,30820,4663,29841,-11225,32304,19069,26660,-8132,19381,16422,30857,-8724,-14249,21082,15421,-2626,-7228,-17861,-14028,18701,10581,12858,-32202,-10606,16774,-18698,4529,18669,-4865,-26160,-28985,6046,21929,14208,8599,-28416,-18909,-2315,-23417,23145,-25309,13391,-10166,-13855,18275,-4029,31817,-2437,11332,26096,3210,5649,-17634,-14839,19139,13633,8999,27188,28784,14587,9794,27926,15436,4912,25978,31312,12040,-1896,18825,12618,-29982,17185,-30204,-32619,-4699,-9914,13660,25442,-27049,-27458,29161,-28404,5249,31730,27177,-23320,-16192,19488,25184,-22950,-32701,21753,13564,27397,29451,25656,-32667,-15176,-24305,-32332,-12154,-10507,-18278,-26694,18381,-25148,3348,6855,2142,8975,-30245,-30580,9898,3921,5708,-15713,-9355,-10166,28670,7825,12301,-13238,29527,9047,31634,-6133,19913,-20045,-27686,28708,24755,13473,-30452,12313,16163,-2426,30975,27658,2063,10327,32096,20109,-895,-11834,20517,-28418,-27856,-20712,-23362,12879,-1692,10620,24840,-1904,-21550,-13594,-28658,-25367,29427,-32089,-22035,-8250,15522,15696,-27253,1517,-27605,-17229,-28324,-14213,-1645,-7823,15655,32563,3113,4746,-13463,-21920,14562,25833,-22721,28111,-30004,20007,-10683,-30345,-13951,-22211,-4036,-24884,23338,4662,19880,3230,24779,6640,679,-18971,-24317,21165,11185,6739,-1111,-25134,-24095,-30541,2214,-31725,29272,1398,26360,-15616,25933,-9664,-16144,-30310,27916,-14144,25045,20250,-24133,-371,13735,-28482,-30145,12292,19639,372,-20194,17962,-6186,8060,-2978,-30425,24093,6353,2820,18191,25097,-25779,-25136,-22129,5682,-19675,-27259,11998,-7613,-21887,-19392,-9584,-1085,-783,-1985,6043,11980,24506,7673,-20486,30084,25370,2861,-14360,32025,6474,-24181,-4662,-22572,18552,-6410,-11306,-2123,-11974,-9901,-22810,11265,-1171,6608,2752,239,-16625,-3226,-4515,4789,30565,2447,-25575,-9303,-3806,-25538,6662,-3325,24139,18031,-23992,1797,19169,-18412,-3001,-27465,20543,-2384,-17276,21590,825,16656,-22470,28243,14498,16788,-27113,-19527,1126,5905,-3172,-28980,-8943,31764,-13634,-10114,31805,-6297,-6460,19806,12255,28597,-6247,-17469,8098,-29905,3902,26345,-15925,22921,-10095,4276,5089,7656,25144,16605,11239,-29980,-4938,-2282,-8816,30101,5314,-26944,-30154,-11585,14112,17409,-1570,6262,28952,29737,-7102,26756,7886,-2217,-20692,-29251,-27611,-23832,-17724,9084,-2841,11048,-23660,-18937,-19652,10422,-32450,-8337,-17481,30445,-13857,-6470,-31326,6702,-9496,-12679,-32714,-8521,-11598,-16154,-18963,22621,12781,-9536,1814,-11111,-7865,15725,-25112,-165,-21519,29963,4743,32045,-25424,-7880,5211,15237,-21990,-25204,2679,31548,27866,-26543,-7525,17437,-3460,-711,-20000,-880,-17036,-2782,8699,14176,12185,-25012,-16178,-26024,10122,20845,6120,-16496,1324,20827,30842,-15077,-16378,11052,-8297,-29583,7837,29957,9390,-21390,24156,12925,2374,26685,10570,23795,-27157,16036,11640,9014,28710,24386,20154,28332,-24061,6242,-23273,-1620,22905,-2330,-23092,-24237,-1913,-8371,-29283,-22601,-26090,-26182,-12556,-31483,-9016,2214,22114,15943,18063,-7573,-29759,98,25418,23650,25343,24639,-23123,11893,-22079,-17323,-20790,13082,-8992,26810,20952,-14537,30706,-7857,-14960,435,6690,8560,-18359,29573,5477,24882,-10419,-14227,2280,-13318,-8719,-31993,-11363,-13038,10450,27825,-18909,24522,17078,12489,24940,-19590,258,6767,-5092,22873,-26358,7918,-9422,29997,-26290,10545,-7284,21876,-14340,8249,-18201,-27957,-12701,-31521,12648,-25561,13476,-13397,14887,18549,-2903,-483,23657,-30749,18246,6185,-28952,-24586,23708,-16857,-7302,10645,-6675,-24596,18723,-24709,-29224,31622,13155,-7126,-14861,27589,-21821,19852,-12492,7736,-13133,-19627,-13805,-20439,-17374,-16218,21618,28527,-3438,30127,-11727,-32120,-23925,-20528,3038,22550,13801,4997,1109,23378,13520,1865,2409,10441,-13621,25572,653,-7320,-17328,13942,304,11008,5150,15252,-23462,-17415,-29907,-19179,-18537,-30684,-4937,4486,31155,-16999,-9814,-2787,6608,-13883,17274,22207,-29716,-27687,27667,4921,24294,-18421,32414,-15416,22212,-21183,6922,-4529,4077,30135,4280,-4932,-10758,12906,26291,-8879,27171,-20707,24261,24699,-23062,5594,-12280,-11404,17554,27748,-10326,11843,2213,6941,-11093,22178,-9652,23521,3382,21339,21565,-14590,28179,12932,-14192,1219,24039,26359,15522,-25433,19567,27915,-8194,-9558,20454,11797,-13268,1880,-19793,11663,-591,-15988,-17156,4780,11127,18735,24732,29958,-25294,4540,-9621,26104,23282,-28072,24953,210,-26907,22591,15376,3109,-30850,29546,7607,9532,-11545,-16937,20495,-99,-8388,11967,12250,-29377,15138,9967,9007,6736,-1579,8551,25278,30007,28319,-22222,-18261,-5126,10479,-28216,15986,-31849,-9804,-22021,238,27629,-247,-25002,-6503,-24719,-2298,-30180,-5018,16275,24537,23310,9544,-16176,-9690,27689,-32145,-2547,2434,-24766,11219,-5299,-26357,-23149,23859,-15138,-18415,-29775,25920,-7864,7975,-16967,-4813,-22294,-23533,-4201,-29280,3856,-6568,-28343,32143,710,-12367,19213,-18069,-19929,-19020,12375,-17376,-31259,5425,-19144,29826,-16391,28783,-30257,7947,-29930,15541,22545,3833,12022,-13636,5420,5667,500,-26947,-1131,17950,10675,11105,-2041,-1319,-31196,24589,-32533,-4072,-20225,-29884,19785,30804,-28207,-32238,31018,-6475,-8553,10140,-25803,-27084,31994,-14692,1201,-26485,-22268,-7086,29920,14744,-19496,14726,-19358,4595,26806,23221,12846,-3037,30145,26265,-10758,4874,-14389,13728,-26739,-31684,20997,12348,18467,-27339,-20770,-2033,-4559,-4321,31853,32670,-28895,-31080,-22604,-9176,331,-26105,-21120,-31756,-31448,14178,10788,-26994,-4423,5884,-31112,-31114,-6552,11759,28393,8894,22993,6358,28057,16234,8360,17048,582,5530,-22351,-5779,-3156,-19481,-16312,6412,-29843,-337,-19640,-28253,-18928,14580,19801,24759,2365,-3262,-30878,4131,-5613,-11299,-125,-4771,-29161,-20411,-19487,-15490,-18080,-27493,32308,-3652,11105,21646,26698,8125,26957,16952,8013,6227,-30875,-1165,7411,-13457,22399,-3507,3311,5045,27227,-27262,24411,-11221,7893,-21119,-31797,17874,6389,17833,10193,31521,13601,21567,13653,18055,-8868,-23715,24981,-18634,1773,-11305,-13975,10239,-2280,11648,-198,-12515,27387,25346,13833,-8437,-19387,-29781,10440,-7875,-1126,-14020,-24605,30676,19662,-11251,-4324,-2024,-13940,-15096,14032,-30600,-7765,-25368,21560,6264,14748,3124,-19911,27075,-20816,23480,-21693,-24779,-16323,-12733,17614,-9018,17797,7968,-11409,-21945,27479,15840,12271,-8187,3297,10441,-5167,-8659,9813,-15617,28513,-14577,-29090,26164,9504,-20368,28311,19217,15811,6823,-25580,-6303,-20036,6501,27847,-25467,-3256,4249,16167,12779,7550,23313,-28042,11271,31132,-2561,11523,7722,-14784,-11793,5097,-22081,-28223,5507,-12930,-5136,-28821,-20649,6671,6065,18485,-27350,18105,-8512,-11921,3366,23644,-2994,1588,-11520,15867,8699,-28676,-12997,11970,-5924,-21435,27378,24125,213,-8626,-10103,-27429,17289,18850,-19523,13340,-26517,29785,-5052,-6226,-24115,31842,27326,-32348,5174,-14063,13582,12789,-12551,27172,-1141,5426,-2057,-15347,6848,-24986,32749,18622,31817,16496,-4444,-14590,-14257,15281,27719,-4752,19597,-20269,24957,31977,5599,10363,-2081,-9431,12690,3251,-1214,-3321,17999,28578,14030,29482,-23898,32308,18134,-4340,-19823,-18675,-12817,-2395,-7724,-32178,9396,-21163,-26219,8844,17397,-28337,-21316,2251,-7002,22301,-31863,-22413,32442,6100,3032,13022,31477,13814,24466,23466,-17004,28382,-14585,-29608,-15305,-27078,23386,28961,21232,-14399,6973,-17695,-4186,11207,-30446,23744,-16702,20603,-24308,-17134,462,-7523,5581,-32316,-8648,-27847,5314,-1828,-6795,11651,-26270,19602,-8336,26328,-2269,-23452,-7513,2861,-8076,7568,-2338,-16326,15427,-13246,13579,-6059,25394,-2502,23494,-44,-27259,-23134,13079,16658,25565,3106,18426,-4500,28415,-16846,26992,-23249,32073,-18056,23743,-25739,15528,3222,-19280,-2537,13368,-3339,29978,13580,-16045,24732,-21225,4725,-1831,4469,19932,-26733,1429,-14120,23687,16867,5414,15546,14157,20298,12571,16995,29535,-438,-12986,27749,32568,-25258,15054,27087,-29016,20061,441,27299,-6979,590,26256,23336,31946,27338,28092,6915,7361,-4758,2978,24952,-7945,-17433,18800,10088,-19376,22166,21936,491,-24128,7629,-26666,-23670,14222,24759,-17102,-28282,4156,-3706,28616,-7499,-29217,-24299,-21276,-16190,27965,2810,7569,-22430,25591,4203,14446,-17872,-13099,-22155,-24935,-1995,-8952,27014,-30136,-21893,26713,-9628,-9948,-13122,-17063,-30363,-17879,-26556,-12507,-13354,22819,-30089,-19401,16518,-17636,-13036,-9152,-9491,17649,12173,6095,-181,12628,-1616,27051,26817,-17236,25577,-8786,-29586,5025,2541,-19012,-9832,-8492,12086,-15092,-25657,20697,-7652,31031,32002,28537,-21313,6214,-11053,1515,-5562,-16838,-8219,27,-26180,24439,-19911,8559,17592,-23158,18172,10443,19927,-18357,-19684,-7702,6807,2126,-27520,15630,16524,-15835,-19459,-15399,-4978,-20526,27139,-11105,21139,-20643,17178,-9151,17363,22213,-19172,28206,-28570,27877,5428,-11541,-896,-29922,31042,16721,31369,-23088,-439,-6045,832,-27053,-26876,11351,-7546,-11506,-30413,-23791,20694,-23827,19877,-24841,-27866,-15180,-12807,22445,15900,7172,-12400,-26786,-30608,13541,-18084,13279,31014,-1608,-13685,14389,-12366,-14754,28760,-24896,-2463,25748,-24986,-15336,13222,6798,15330,-29376,-3190,-17817,17356,5096,-198,27923,8544,2070,-31526,22169,-22099,-1974,311,16176,-1695,16365,28788,-8660,21082,19372,-22888,-22222,17449,29969,-30385,-19084,20021,15888,26763,-24955,30324,-11871,31825,18417,-10541,-379,-30404,13662,350,2379,5954,2899,13481,-31208,3232,30064,7736,27909,-23104,15970,29064,-17202,24322,-29229,-31563,-11511,4916,16178,20636,27569,1491,-889,-31020,-29755,-8917,3159,14644,-3480,10394,19592,-28295,-26345,15123,29044,-8785,-15896,30698,-25722,10304,-28243,-19238,-6004,-5836,-18778,24785,10660,8813,24632,-19488,-20804,32224,2280,-17130,-4880,13458,19407,-31436,-1647,8180,-8542,-18973,-18589,31764,2715,14255,-20834,30706,-20102,-3531,-19793,-22807,-29681,20367,10160,17043,-18012,15264,-12832,-18739,15315,1924,27158,25404,-3005,-28154,15581,-5724,-15375,-30326,-12879,28560,-18817,-25362,26640,20106,28180,10992,12012,7464,6260,18880,8895,16818,13879,-21984,3226,22947,-19663,28992,-24298,15070,22560,-7739,4430,22404,-21202,23623,21892,12984,-26387,-28202,15617,11619,-25045,16163,-1290,9304,29485,-8347,-13158,-19035,31929,20698,1546,30987,-10217,-5032,12095,3420,-1223,-6986,-16171,10139,17019,-20107,-7763,21634,21859,3105,-4383,-2881,7317,-2508,28457,18062,13209,8330,-6312,32305,15397,25925,21648,17977,-17737,-26939,-20414,-7645,-28532,-7728,-16933,10052,-25924,15688,-11251,2162,-18747,-9713,31692,10974,16592,-27432,20090,-32409,17614,-30116,27794,-30100,-13648,11560,19774,-26406,27004,2224,17042,3032,-10870,-10640,-26414,17226,-5783,-20272,13240,-1713,-10423,18385,27056,-16688,-27959,-16229,9584,3651,29004,-23045,-12345,-3075,-23978,-9779,-13349,18555,25752,-30150,-8716,-9438,-16738,5552,-29537,2533,-29087,-8003,28725,-24664,-17268,-7813,-15087,-16044,-2811,22446,4729,-1702,11283,16785,-7370,3733,-26288,-24933,15801,-22660,11574,-32361,31112,24339,-21157,10115,7197,-29551,9913,11146,-17620,-4194,-24970,-20548,123,-29441,28784,5393,-31947,-27352,14769,-31482,4402,-7149,-11448,24201,-1132,20015,-5701,25612,-31703,17230,1161,-4610,26546,12640,-32722,28821,25344,-16224,-29909,-12038,-583,-4471,-31013,-14505,-2904,-25839,6901,-23491,-17196,26268,-3734,18489,30316,7645,-20291,-14799,-22824,-11555,5766,-21934,-10647,15545,-19760,24914,22725,-25263,19297,9450,-7004,-7705,13758,10539,-26667,19250,-14711,10471,-1121,-1172,13535,30245,16580,12051,-18536,-4735,16770,21580,-25591,-15482,32298,-23109,-2387,-16237,-9137,14102,-27311,7853,10502,-21638,-8352,28877,22949,30118,18530,-30383,26593,-17031,1147,-8540,-24225,-13688,-1298,-28668,-28944,-28577,-7086,-32416,24007,-3187,-25919,-26148,-10079,20122,-2467,5137,18295,-11326,7403,5628,11319,-23382,10768,-10782,1162,8801,12229,20293,31469,-17436,2515,-2989,19727,-15096,12857,26507,-16353,28079,23594,24684,4565,-19430,-11450,-11945,-7594,-3276,-21179,20215,-7519,-10737,2859,21110,-16349,-5928,15638,12474,31781,-16494,-22984,-12636,-27128,8909,19253,-11749,-18851,27092,6936,-22555,-23077,-20073,521,-3203,-20773,22253,-10656,21774,27532,-5752,-3139,3879,11874,-1284,2818,9312,3605,-10626,13156,-17119,13976,5933,13746,5755,-10580,-12747,20322,-17679,-4658,16073,18767,-25760,-6804,-11687,8901,-26585,30611,-30720,21058,14328,16075,-2010,9838,-21460,28475,12073,1302,-15346,-3850,5919,25894,6775,-27483,-18012,16331,-27056,7552,-1431,-29238,-19605,-14895,-11831,846,-28702,-13094,-11638,-14158,10411,-18807,5953,7084,16417,-16656,-2681,-4443,3702,-17612,-10151,-12749,5598,15757,14496,8975,-24964,-19636,11494,8900,-27399,2393,12758,-11813,-22445,23254,16830,-7613,-10627,-433,21225,-13184,2953,-28174,-12195,12737,-20512,-6753,1117,16084,-3087,-4561,15768,8322,-9633,-12551,-2926,12414,1771,-7852,-8194,14081,-26863,-25722,-10851,15445,-10143,-12198,20787,10906,-22035,14469,14232,3848,-31189,-14643,12901,-27019,-12331,4951,-24768,-6555,12933,18492,24181,-31340,3631,939,5588,-29703,15392,-16257,-1547,-29966,-27520,22863,-1295,5298,-7972,-29934,-12870,-18121,-23148,2449,-11155,2465,28128,6560,22581,24448,8054,10305,-3924,-25631,-19337,25589,-18703,-27064,20840,31373,-22760,12853,20996,-1291,-26043,24459,29351,-17842,21421,9071,24197,-12819,18503,-28087,-26279,-14446,-27779,-27458,-30117,-10875,7709,8058,10039,12167,31054,13892,26380,20488,19656,23335,-14825,-22950,5907,-7133,-22785,-32268,26397,-20837,27100,-24910,-8345,30851,8366,15192,1331,-6134,10455,20744,-944,7942,15316,27326,-8415,12782,-7698,-14580,-26230,12272,18353,-18433,29846,16168,4118,32387,-594,24136,-26437,4369,6496,32050,-8284,5233,-2607,25412,13544,17412,11539,3605,8144,12585,-20414,12049,7881,-11489,-17660,-32460,-1159,-20361,23072,1604,10343,27033,-3111,-23750,19242,22750,-5732,416,-21544,16487,-16085,25723,28739,-23249,7844,-22818,-2231,12091,32288,-27475,717,-12727,-28036,16370,-24091,15810,-6485,-7153,11100,4783,-19428,-12173,-1003,-27722,-16800,19364,-30216,24502,17832,-717,-21621,-21701,-5921,9966,27012,-31439,17181,-28317,14704,14884,9955,-23023,13798,5972,12147,-16375,-20538,-22048,7827,-1654,-7150,-4017,13822,5480,7980,-22674,12311,11897,-27476,-22696,-8691,2534,-590,22481,-14801,-18898,-2172,-30978,19940,-9049,387,5906,-17752,23852,-8438,-12618,8084,14079,-6355,15384,-13187,-2100,30217,-28627,14040,27927,12725,-15488,-14812,10938,28305,3113,17596,-28284,17656,-1987,29764,-6040,22163,10582,-10436,-31326,6437,1156,25459,-14163,-3554,-9729,28444,-7314,26640,-29415,-10250,47,11542,29538,19943,11518,-8209,8182,1022,30074,-4902,7314,9247,25787,-29874,-21605,-30779,-14007,3323,-6487,27512,5160,-7640,16068,-31691,-30883,-3363,28047,-22147,27586,13974,-26145,8089,13725,-15037,3868,-18313,12142,-10732,14163,-4438,-28790,10376,16612,18149,-6555,-25446,-18326,-30722,18939,85,-28049,25370,17565,8262,-27721,-562,-16446,-21683,-9736,-22612,-25313,-14638,4424,18686,29638,9700,-9868,6112,12218,5700,-22873,3910,20524,10273,-13048,-29584,-24893,-8457 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice1_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice1_input0_int16.csv new file mode 100644 index 0000000..5f40865 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice1_input0_int16.csv @@ -0,0 +1 @@ +14007,21386,11440,7826,6489,13320,-3709,-10926,4534,2769,19856,-23542,-5985,-16486,-26452,6810,-9806,-25286,15898,-29297,3687,-28262,-11972,-23892,4826,-24412,-8368,-5594,13752,-7950,-13203,24844,-29503,-21603,31816,5183,2482,-32347,4064,-32019,28704,27489,-766,7311,-6384,-2653,21385,-26562,28993,25621,16475,-2018,-18326,-27580,16788,32369,25097,-26599,-19725,-9351,-17523,15861,22063,8228,-4483,-1068,26208,21516,-19600,-9829,4445,-23869,-28801,25045,-2910,-15284,30519,9953,538,6554,-22956,18384,-28456,6849,29119,15726,18059,15107,-26510,-27350,-28861,-11923,-27891,-17676,10058,-24673,-15846,-10419,-5666,-11118,27579,-25281,-15302,5158,14871,20077,22257,-14489,-19918,16220,-30002,-25262,25873,27625,-15621,1929,7480,29904,22410,27941,19003,11132,-8082,15676,-17908,-15342,561,22650,-5131,-3270,-27942,-8586,-29216,-13959,-19675,-25610,23743,13701,25401,-18795,-7897,-18170,1355,-4983,-25697,-11976,-5527,-12394,25945,9617,2093,-13720,25895,30572,2213,-30489,3056,-16102,-17418,2707,16031,23268,-24046,18731,9240,30732,3350,-18371,-6960,-25948,-1756,-21432,15131,-16781,-31823,22616,-14964,-14543,17987,-3406,24909,-13573,653,26783,24194,19317,27330,18385,-1209,3943,28707,-5141,13654,20626,14822,-27129,-29077,31882,23079,-29764,18329,29914,-841,27216,-15979,-24908,26682,28653,-18632,10131,28819,18676,-24920,3794,-7199,-40,-24762,-10429,20223,-22610,10491,-8574,-4186,-25906,30585,5734,-16296,-12802,-25882,-27114,19482,32217,-26242,4070,8774,24235,17205,-22608,24703,24901,8766,676,-27090,-25875,15174,28374,-6804,1274,428,-9840,7925,-7689,-21444,-25208,11763,-6946,-10937,8160,25730,-5050,-19604,-12346,23293,-2330,-24389,-21640,-22600,25647,23597,-6385,-16333,7602,26998,-26646,-21331,-28765,-11599,-20812,19432,-32477,-21088,6959,-24944,-32259,-25571,-28678,-2452,2133,3103,-22774,-5276,-9227,23486,17461,3946,-26148,13758,20288,-30716,-14767,17069,-5809,-23700,-27932,3933,-10647,15531,19188,18779,-2601,-31980,16410,7110,26943,-4866,31709,-22306,22782,26119,-16696,-24783,-20046,14179,-8131,16447,-13930,-27002,-17612,572,-30134,27181,-4515,-14739,-29418,12230,-10408,25562,18596,-27626,6190,-3471,-2681,32642,26471,29987,10210,-32347,-17185,460,17667,-20937,-2478,22544,13406,4024,28860,5278,10989,-31525,-24813,-23305,17685,10106,-2704,-9819,28105,-11094,-9839,-7551,-3922,6603,4971,10911,31761,18982,18215,-7533,6409,-11835,16931,-31849,16329,31525,-326,3788,-26852,-16571,-18818,-2184,29988,16151,-249,13068,20061,28958,29402,-31794,10452,30329,12907,9989,-31182,-15317,-7467,11317,5914,5200,25844,18287,6944,27257,-4706,31968,25283,20178,22248,17195,-6742,-6912,8334,-19820,19771,26333,782,14974,29155,1523,-13296,10233,31301,16970,-709,21841,11643,-23555,23247,-5076,-657,-14601,-20646,17870,3526,20813,-4408,-7293,30671,20737,-5887,26041,5656,5211,31332,3989,17382,-31547,-16490,-31222,-1809,7131,-16101,-18220,28814,-11110,-13146,14304,-5609,19676,-19706,-2049,19128,-6490,1065,16462,-10433,7656,-26266,30875,13829,6617,-18304,-20094,17931,-7215,-26675,-20438,-17571,-2064,26708,-12116,17778,-30129,16360,17289,31479,27902,-25876,24862,6067,-12031,-19201,-10770,4475,7709,-28717,14258,-22318,-1966,4272,26633,17148,8148,27756,19366,6254,-22015,-14643,-22650,-9350,6625,-32232,11790,-28771,9255,21373,-21754,-23227,19607,-9486,-14685,-31040,3907,-22742,4633,-21755,-6085,-10696,-1087,8685,22696,5753,28010,20773,-135,21610,30388,31404,-6342,-25803,-428,30595,-8333,-11785,-17072,5597,7216,-31912,-7203,-27971,16288,-23448,3582,-31768,6277,-14455,2527,16286,-29713,3992,-11357,-4872,-9086,20964,-1571,10135,-7255,26387,-14572,-24484,-6800,-19536,-13186,32673,7915,-22193,28168,23992,16141,-31894,17821,-31426,25476,-20912,-16892,-10241,3196,2809,-8593,-3805,45,-18794,-6753,15471,30685,-17573,-7349,-30693,11642,32428,-27957,29295,27226,-27102,-14779,-30304,18070,5749,-16051,7829,16359,-27904,16761,10688,13328,3055,30054,10186,22952,-14658,-27027,8098,-27520,20347,21449,-29805,-1639,10775,27104,-10450,-18442,-17023,10361,-27572,23542,27277,-31047,32764,16393,3151,-22745,17045,26833,-7772,-29108,-8435,5762,31260,-1680,16990,-270,21712,-11413,26602,27475,-23544,11727,-32440,29582,-12812,-3001,5758,4464,14123,14153,31359,-58,-19759,-13081,16733,13926,24923,-10522,-5486,7914,-5083,6558,-27749,6537,-26626,-6342,31148,-16260,-21918,26289,-19706,-23976,-11755,-4029,-17590,9709,-26368,-32385,-21662,7259,7364,9887,-19773,21414,21347,-11499,-1993,-26491,2671,-18883,-31329,-30385,-9146,-3747,1075,14258,13769,-27571,-11914,5591,29667,-20816,8779,-22206,4785,27993,-30931,-9556,14767,699,7299,19968,-13016,5409,-19627,20390,30402,-11238,-6756,-11947,-7238,30020,793,-19848,-28457,-24038,19188,-25980,17972,21284,21307,1266,-20655,15247,25871,18762,4464,-14280,11119,-27346,-13809,4488,18942,6524,20604,-7093,14823,-10079,-14481,18767,-3531,26369,11997,-4804,-4254,-29868,26593,10912,16633,-505,4403,2091,-25389,-17809,12269,6936,-17984,27046,-2917,19176,8056,14605,3930,10542,30731,11360,9284,-22563,-11310,-20592,8308,7884,643,-5250,9840,30256,383,8612,-7499,-26665,17995,-565,-17566,-7706,13126,-5875,-17620,2262,-19388,-19386,-32169,2597,-21244,-30517,-20460,9824,-12148,18425,3299,-10125,-20676,28504,-23038,21005,9888,-10741,-11954,30856,26176,-7013,2224,-3556,31474,-4132,-24202,13132,23036,13177,-3292,-12339,-14752,7010,-25586,-26905,-836,-17817,28382,6323,-19898,-5416,5402,6352,-11083,22618,-7281,28309,-29326,15734,10592,1154,-26477,-16520,31475,24913,14192,-9210,27700,-12431,19496,31859,-20156,2453,-23358,-7988,-24749,-5883,18635,9166,-23245,29552,13848,-28136,24378,28071,-12635,6518,-12560,30482,25144,-21313,-7400,-19318,-7178,30000,-13634,-31604,-13269,28626,29756,18543,26017,-1862,1155,-25954,2724,-6609,31705,10368,16139,14864,-2396,-24296,-3080,-20767,14622,-4305,-30190,-29841,-3522,8164,-17508,21239,31696,18581,14182,-29865,-28293,16960,24494,-17231,23382,22308,9637,-19661,12352,-481,-25182,19147,19617,-32304,-20309,-17,21329,4080,-2595,27534,-2624,22158,-32189,-30813,-15693,-20016,-13509,9775,24141,-5386,-21609,-19020,28054,-6192,28120,18304,-12087,17257,-15230,-23531,-20076,29971,-31605,-15,-16240,31649,8216,-4998,18732,30197,-22556,-4658,31055,-14515,4126,-4243,7520,28360,10666,-10820,26603,11419,-21405,-27141,2287,-16353,19706,7383,3098,32314,-9043,8413,-12026,-27466,-14419,-29523,23757,-3200,-8226,-31185,1230,-20440,-17918,1136,-1536,25230,-23635,-3112,-2848,32537,57,-13835,-3959,-32151,2450,-7813,-13365,32501,-7655,-26461,-14651,16930,-1155,-23929,-17944,-12170,15989,3094,15435,-24290,13930,-1931,5061,19493,17794,13169,-29038,16479,-28732,1092,10995,3613,-22677,9686,-18495,-31416,12573,20160,-31997,-6464,28369,18237,22329,-17217,24592,30883,-2657,17280,19482,30125,-1888,8820,-26519,-17278,9094,6622,-25920,1656,14184,8841,-9180,30793,-2143,13460,-742,19230,-21908,-32163,14166,20622,-3432,6673,-15719,24703,16996,23672,27994,29450,-12372,-7168,-4629,-28738,-12464,-7657,1140,17993,-8946,-19290,27151,26756,32396,29319,23551,13530,13871,-13905,9258,-4548,22368,17935,-4174,-17474,-25165,7419,2617,32748,-7113,-18195,5014,-6330,4934,-13484,-22272,26478,9264,-7282,10078,-2728,-29866,32218,27983,-29652,32718,3815,18261,21335,18357,-22206,16255,-30541,-31971,28511,-914,-20627,18646,-5166,-6229,2507,13635,18218,13543,24337,20753,-15433,-1760,-25496,-4699,21398,-6355,20226,-7845,22704,17166,3242,7254,-19129,2932,16313,-18717,-32593,3085,-18834,28320,-7669,19616,-5201,-1098,-8220,11678,21069,343,25392,-29530,20114,-15587,-14233,11446,32028,6190,14401,31523,-4988,2773,-3599,10161,350,-17140,5101,-27685,-25276,-12915,716,-14961,14088,-19721,-5478,-23609,-18024,9588,-17918,-2163,-28507,6597,-29775,23780,-1502,8851,-18100,22610,-22180,-32568,4502,-3359,4026,9788,2242,-22034,-5070,159,-5806,6443,-30093,7148,-8398,8839,-1040,-28091,5893,-29662,-32308,5205,12150,7520,21954,-14515,-25980,-31292,-16312,-29388,25316,14036,18742,-31803,-22456,25739,-7550,-26823,-29340,-22400,-23145,14723,2327,-15640,16964,21903,120,-30092,-11890,-18403,-9159,32455,8910,13432,7379,-32249,23754,30438,19895,4511,17616,32017,3226,-12164,3772,-19450,-19196,1929,24006,22446,-27641,12016,-18605,7038,21921,-16089,-328,30278,16660,-4880,2707,-23416,-5272,-32766,-20390,19537,20624,-24064,26534,-25789,-12102,-25588,-4670,2515,6843,25943,27781,-25428,-24047,-4356,-24976,9489,3339,19477,-4694,-7742,14874,-13932,-25844,-17920,2394,5746,12934,28703,30796,-20116,3521,-10307,15883,30168,4438,-22448,29858,-12575,6070,-28163,24367,30827,4400,9494,-29235,-4577,5716,15032,-18421,-13291,-23515,1485,24011,-23482,-18023,19876,-5284,-27559,-18929,-29174,27698,9843,-24401,-21669,-25698,-31834,-10817,22603,-2933,6419,-30297,23881,-3018,31525,-7144,-1653,-18070,-12997,-9454,-30472,25135,-9551,-11387,-12366,22535,-17344,27105,-13651,15268,30589,-27061,-7806,28232,-7595,-27455,25684,-29117,-6505,20148,-11745,1935,8835,-21395,-32018,-26192,31580,17117,-26424,5163,-3837,13221,-5048,5765,-7693,18099,-23371,6517,732,-16120,-32090,-27745,-3685,-21515,28832,-6042,11784,17925,14060,-23477,5010,-31702,-6371,-3936,-16676,-31640,13084,-11814,550,3850,-12700,8141,-18580,-25548,18450,7921,1970,-22946,-12955,-10447,16888,-4433,7643,31069,-9910,19787,28341,-20495,7867,-15702,26464,4374,1368,-15515,8077,-31733,16135,-14837,9761,17996,13051,-18724,-28110,24025,-13949,20501,27233,23179,-13387,31566,-10128,-16189,7480,-26218,4691,-113,8903,-26904,12788,25999,-2403,26711,1722,-22766,-4022,11453,-24844,-24144,-23223,31793,-2549,-7637,-3038,22745,1117,-4879,-502,16308,-8179,-16296,30342,15960,-18847,16957,13130,4152,3611,-3282,12334,-17182,-25998,19432,-12512,1245,12005,-12849,-348,18273,20777,12226,1413,15854,-31204,7876,-13139,-4485,-6331,-15051,12866,-1403,-13506,-20934,-20693,26948,-28761,13916,29348,-2556,-27088,25043,-22460,10863,-24829,-27811,-1723,-16833,20537,-25836,-6174,29173,-8004,32188,8614,5512,-19213,-26794,6504,-3959,-10063,-22075,-3256,-10568,28303,-14239,-30354,-10946,-28528,-9833,18501,17077,7325,-18278,26893,10029,11324,15920,-24474,-12637,26481,-9520,-9173,30053,-13357,-15413,17482,22028,10471,30338,3484,-16084,-2977,20799,15126,-19306,-12984,-15666,23848,19950,3033,25759,-1896,23375,27026,10680,19095,-23812,4745,-1798,-14949,-14895,14593,17252,27727,-26709,-10703,-19825,-6868,8481,-7350,-30971,-16759,21459,-21657,24721,-28881,-1450,27124,27385,-11928,20222,-11172,-17674,27224,-10070,1615,25231,-22410,4228,-16044,-14868,-2624,-22920,1423,16813,-31922,-5852,-31994,-25286,-29115,27045,-22273,2320,4469,14415,15429,-26157,20392,13994,-21717,16340,-29334,-24387,21527,-20499,26051,2656,-17986,21788,-30911,-30500,19285,-2799,-20103,-1695,-15067,-31486,-10774,-21223,-9318,7335,8676,-31708,-16721,32055,6887,901,19395,-1187,26369,-14538,6570,30553,9353,8233,17678,-19888,28432,10360,13321,-10077,13210,24936,20725,-13500,23233,-18927,-25160,31959,18903,-9749,18253,-18503,-4136,-7155,12830,-3674,-26235,-22794,5298,-26624,-27255,-21789,7122,-21784,32494,-23694,-20439,25783,8745,13771,-12714,-30531,12553,15434,2428,12972,-26652,-15230,26887,-20532,29572,-23190,-23250,-15525,-29628,-29989,10908,-8614,-6145,7617,-17258,26187,-5224,-26154,31143,-23963,-31284,-1584,-31035,26736,18942,3558,10734,6885,-21278,21256,-20554,9644,-13720,-1565,26736,25475,-5249,17017,18355,-24035,-25342,21581,-24397,1639,9606,32060,27810,8554,-6023,-5532,15668,-16764,-13796,3577,-22804,23141,19783,-26047,24887,18037,5630,-18817,-29826,-21970,11222,21903,24981,-7934,4636,-18933,27234,-17111,6114,-3531,21345,-13875,-19822,-26368,-5360,-3489,-26983,-11555,-19914,-32474,6609,44,9346,-3508,8231,-30879,14327,31510,10472,15303,19381,17548,-20906,-9282,-2784,-29217,-27614,-8460,22696,-659,-10776,-1377,21487,30699,31480,20655,11388,30854,25464,-3208,-29911,-13317,23423,9423,2631,19643,21180,18507,-20922,13286,-3855,3412,-21895,24591,751,-19507,4585,-15752,925,-19680,-18468,-13240,7024,-28973,32209,10790,8106,-2042,-11649,4054,-1499,-28590,30645,-13307,2351,28976,-25581,-21460,18865,22631,429,25732,-5522,12378,21492,22218,-27511,10327,-32074,2870,14765,-6225,23651,6034,5997,-32371,-18702,25346,-22870,-29051,-1764,-7553,-10840,14281,-15199,-17109,-22214,31236,-3954,2005,-10054,16648,31407,-10883,640,1772,14643,-20542,-24129,-20564,18125,-31589,-10083,-23132,-19659,3884,15151,-6636,-28880,-4350,28119,28813,-21863,-7717,12077,31045,-15953,-17495,9260,-17912,14112,-27594,-5874,-26412,31567,-18077,24541,15329,-22503,-25120,-28447,4183,-4446,-5210,4210,6246,-1558,-23554,-30170,-12992,26661,32153,30438,27788,-1187,-3565,-27726,-1571,13568,7671,-14954,19109,-21913,416,-14497,-9695,17382,26681,-4071,11754,5556,22754,-4879,19064,-3856,-32226,18768,-2597,-16572,20165,9466,19006,-17219,19690,10470,28565,-31379,24189,-624,8747,-14652,-2636,14247,-13043,-22727,25300,-8189,-6719,29203,-357,22976,14583,-31205,-13845,-15627,5783,22705,26427,22849,-32292,10141,18577,8977,16327,20959,12307,-20085,-3055,-535,-29157,-11773,-4160,-26059,-13222,7201,-7197,-18886,11319,-6310,4640,-26116,-21982,5501,-24838,-10725,26079,10687,-2384,23251,12498,-16107,-7869,6681,-150,15501,-26257,14575,-7948,-11636,-10702,-23480,21119,-623,3402,-14123,11014,16653,5398,25325,2716,6831,20695,-23741,-20879,2842,-20881,-12713,7226,-11385,9825,10197,-25669,-2663,-5499,17298,10611,-20798,-2455,11569,-12983,-26204,-22809,13688,-16458,10164,32635,-31946,-23790,-26182,-19536,18718,-28055,-17770,2396,23665,28937,-8688,30643,-400,12047,-3012,28936,-5318,-9742,-170,-23150,-15521,2183,9098,-22760,-25873,11713,14408,-23841,-27519,-30097,-1551,-14837,16127,-15643,-17366,17498,13933,28291,9626,-25639,25256,-32730,-13549,30168,-21875,15323,-20303,-13321,-18787,16784,-23587,1372,23352,498,-30123,13033,29889,-26890,-16680,21612,-30341,-2873,359,-4830,-25411,-5536,15639,-25260,28610,-17686,-22579,19697,-18762,1120,-17638,6419,-11591,-10730,22692,27875,-5607,-16615,4436,-20644,-12142,8293,-28836,-27200,-26638,11761,22734,-12323,15889,-19033,-9716,-27028,15166,2087,3091,10457,21831,18573,19591,5725,11086,21112,-18709,7715,-30320,-4489,2089,26634,8223,-20692,9378,-20230,-9200,20550,26361,23665,-25045,28830,14389,-22804,-20184,-10345,-24378,4445,-23491,9667,-13636,-4763,-5748,9644,27377,14611,-657,-6562,-2851,-32101,-16768,-25026,-15363,31416,-29687,17213,24146,28986,-29825,18693,16198,7589,31828,-15034,-20359,26278,-19981,-20375,12863,-7563,9208,-3621,31956,-2206,-23758,-2506,-9906,-6474,-26054,-9059,8201,11004,-26630,-16767,6483,-16463,-2913,-2932,-8067,25160,3890,-27165,-26887,13379,16505,13514,30588,-13727,29218,1156,13667,23710,-28685,-21589,24589,-25366,19693,23706,-8479,-14268,-12252,-2520,-27913,-5099,23090,-14356,32175,3345,31845,17081,-11774,-26845,25380,-7346,-3289,9421,5566,-21907,-30658,24121,-31867,-32437,-7258,-26349,27190,8880,-6961,13014,-23567,13776,540,-7492,22403,-9336,12319,22131,11565,-7099,23170,-22540,28229,-14584,8831,-32592,-24489,-26587,-31113,22043,-9424,20904,12190,-5480,-3117,7715,30583,29413,14338,7940,-9599,30485,-13212,15133,-10848,-14833,2231,6962,17816,-25862,23979,7391,26434,31731,27849,11990,-26935,-24384,-28747,6317,-26270,2291,28901,-26502,-24984,-11028,5526,-31274,-20553,-17887,6846,29033,-12939,-31001,3819,-32649,-16282,-31202,25740,-4381,-18173,15117,385,-16265,-20156,-31443,17634,3588,-8551,5247,-19831,-6699,20739,-24031,-19719,3731,-31982,23292,25151,-21329,-26658,-2782,28977,2092,-7964,29402,11683,20395,22421,-30237,-27240,-2368,6555,-25185,-28556,3723,8418,-24526,-265,-32521,6251,-19176,-2258,3261,-25137,-15335,8191,-31257,19307,-7404,-20511,-1918,-9727,-8314,1026,-13841,4525,18719,31500,-4558,31383,13458,-12520,-1189,13446,-15850,-25804,-17664,17538,-27590,-15077,-2275,-24431,-19614,27192,-32417,20925,-31890,-11630,11153,23076,-16443,-16748,-3518,-4782,28065,-29238,-8561,25176,-29870,-11908,-19658,-2684,31754,-6003,-9788,-19055,-5119,-24431,-6162,-11199,-22498,8719,-27356,-14204,-8409,-15874,23193,-30372,18399,10536,-2436,29855,-32742,23463,-5722,-29037,-16980,-5222,-27465,2374,12513,18717,21494,-27274,21120,13127,414,1709,-20128,5803,24006,-32067,3224,30139,-11281,-9828,20520,32189,11159,27716,9936,-24085,-8683,-8049,-20021,19544,12242,4532,-3208,20058,-5943,13429,-476,-31231,-11041,-10344,15525,11727,-27139,-26356,-13996,-22195,15370,-13741,32131,8129,24642,-10958,28911,687,7037,29019,16780,-5690,-15899,-10389,-21037,-10163,-2651,21485,12538,18846,-28430,21949,-10439,-2274,-7734,17435,18015,12372,4215,2241,-790,16738,-14203,-8436,-9316,25623,29383,398,-31840,-19519,21037,-9954,8186,28079,25865,2492,27851,7132,28644,-26962,26039,-26152,-17952,3005,-2787,16162,-24608,-18938,3311,22749,-16889,24025,-27035,-1939,-12048,7228,3921,2871,-14670,-21672,26543,26398,-19324,20750,-2217,-26205,15176,15160,-3828,14639,-3583,28313,-1840,24761,20695,19204,19303,-14929,-6332,24029,14831,-3865,30504,20538,-7078,-14027,-544,-20000,28007,27851,4432,-16478,25242,1943,-24758,-8603,5276,-20766,-3154,-1180,-2793,-27467,-18460,2363,20617,21331,23253,-21309,-31713,-6277,-9113,21094,-1797,-19867,-1538,-6904,-23135,-21125,31763,28041,20785,23750,-13665,-31560,25031,-22132,27087,18676,25222,-21890,32375,-32022,6240,-32378,-3351,4479,-12815,12623,30300,31421,-13434,6042,462,-10989,-3899,-18955,-9633,30703,-12079,-14427,11926,3438,-24202,-24154,5034,9378,-919,-3054,5854,-24250,-24021,-6234,14360,1736,10630,10159,-18296,2874,-30041,20766,24011,-29942,26734,15381,11417,-13388,-10274,-30063,-17494,-11501,-4402,-11564,28578,30069,-12773,-20236,-32173,-3239,-10319,22456,27240,30801,8615,30592,22150,10470,-22662,5565,-2537,-26268,-26831,-27768,27345,-2726,10952,30820,4663,29841,-11225,32304,19069,26660,-8132,19381,16422,30857,-8724,-14249,21082,15421,-2626,-7228,-17861,-14028,18701,10581,12858,-32202,-10606,16774,-18698,4529,18669,-4865,-26160,-28985,6046,21929,14208,8599,-28416,-18909,-2315,-23417,23145,-25309,13391,-10166,-13855,18275,-4029,31817,-2437,11332,26096,3210,5649,-17634,-14839,19139,13633,8999,27188,28784,14587,9794,27926,15436,4912,25978,31312,12040,-1896,18825,12618,-29982,17185,-30204,-32619,-4699,-9914,13660,25442,-27049,-27458,29161,-28404,5249,31730,27177,-23320,-16192,19488,25184,-22950,-32701,21753,13564,27397,29451,25656,-32667,-15176,-24305,-32332,-12154,-10507,-18278,-26694,18381,-25148,3348,6855,2142,8975,-30245,-30580,9898,3921,5708,-15713,-9355,-10166,28670,7825,12301,-13238,29527,9047,31634,-6133,19913,-20045,-27686,28708,24755,13473,-30452,12313,16163,-2426,30975,27658,2063,10327,32096,20109,-895,-11834,20517,-28418,-27856,-20712,-23362,12879,-1692,10620,24840,-1904,-21550,-13594,-28658,-25367,29427,-32089,-22035,-8250,15522,15696,-27253,1517,-27605,-17229,-28324,-14213,-1645,-7823,15655,32563,3113,4746,-13463,-21920,14562,25833,-22721,28111,-30004,20007,-10683,-30345,-13951,-22211,-4036,-24884,23338,4662,19880,3230,24779,6640,679,-18971,-24317,21165,11185,6739,-1111,-25134,-24095,-30541,2214,-31725,29272,1398,26360,-15616,25933,-9664,-16144,-30310,27916,-14144,25045,20250,-24133,-371,13735,-28482,-30145,12292,19639,372,-20194,17962,-6186,8060,-2978,-30425,24093,6353,2820,18191,25097,-25779,-25136,-22129,5682,-19675,-27259,11998,-7613,-21887,-19392,-9584,-1085,-783,-1985,6043,11980,24506,7673,-20486,30084,25370,2861,-14360,32025,6474,-24181,-4662,-22572,18552,-6410,-11306,-2123,-11974,-9901,-22810,11265,-1171,6608,2752,239,-16625,-3226,-4515,4789,30565,2447,-25575,-9303,-3806,-25538,6662,-3325,24139,18031,-23992,1797,19169,-18412,-3001,-27465,20543,-2384,-17276,21590,825,16656,-22470,28243,14498,16788,-27113,-19527,1126,5905,-3172,-28980,-8943,31764,-13634,-10114,31805,-6297,-6460,19806,12255,28597,-6247,-17469,8098,-29905,3902,26345,-15925,22921,-10095,4276,5089,7656,25144,16605,11239,-29980,-4938,-2282,-8816,30101,5314,-26944,-30154,-11585,14112,17409,-1570,6262,28952,29737,-7102,26756,7886,-2217,-20692,-29251,-27611,-23832,-17724,9084,-2841,11048,-23660,-18937,-19652,10422,-32450,-8337,-17481,30445,-13857,-6470,-31326,6702,-9496,-12679,-32714,-8521,-11598,-16154,-18963,22621,12781,-9536,1814,-11111,-7865,15725,-25112,-165,-21519,29963,4743,32045,-25424,-7880,5211,15237,-21990,-25204,2679,31548,27866,-26543,-7525,17437,-3460,-711,-20000,-880,-17036,-2782,8699,14176,12185,-25012,-16178,-26024,10122,20845,6120,-16496,1324,20827,30842,-15077,-16378,11052,-8297,-29583,7837,29957,9390,-21390,24156,12925,2374,26685,10570,23795,-27157,16036,11640,9014,28710,24386,20154,28332,-24061,6242,-23273,-1620,22905,-2330,-23092,-24237,-1913,-8371,-29283,-22601,-26090,-26182,-12556,-31483,-9016,2214,22114,15943,18063,-7573,-29759,98,25418,23650,25343,24639,-23123,11893,-22079,-17323,-20790,13082,-8992,26810,20952,-14537,30706,-7857,-14960,435,6690,8560,-18359,29573,5477,24882,-10419,-14227,2280,-13318,-8719,-31993,-11363,-13038,10450,27825,-18909,24522,17078,12489,24940,-19590,258,6767,-5092,22873,-26358,7918,-9422,29997,-26290,10545,-7284,21876,-14340,8249,-18201,-27957,-12701,-31521,12648,-25561,13476,-13397,14887,18549,-2903,-483,23657,-30749,18246,6185,-28952,-24586,23708,-16857,-7302,10645,-6675,-24596,18723,-24709,-29224,31622,13155,-7126,-14861,27589,-21821,19852,-12492,7736,-13133,-19627,-13805,-20439,-17374,-16218,21618,28527,-3438,30127,-11727,-32120,-23925,-20528,3038,22550,13801,4997,1109,23378,13520,1865,2409,10441,-13621,25572,653,-7320,-17328,13942,304,11008,5150,15252,-23462,-17415,-29907,-19179,-18537,-30684,-4937,4486,31155,-16999,-9814,-2787,6608,-13883,17274,22207,-29716,-27687,27667,4921,24294,-18421,32414,-15416,22212,-21183,6922,-4529,4077,30135,4280,-4932,-10758,12906,26291,-8879,27171,-20707,24261,24699,-23062,5594,-12280,-11404,17554,27748,-10326,11843,2213,6941,-11093,22178,-9652,23521,3382,21339,21565,-14590,28179,12932,-14192,1219,24039,26359,15522,-25433,19567,27915,-8194,-9558,20454,11797,-13268,1880,-19793,11663,-591,-15988,-17156,4780,11127,18735,24732,29958,-25294,4540,-9621,26104,23282,-28072,24953,210,-26907,22591,15376,3109,-30850,29546,7607,9532,-11545,-16937,20495,-99,-8388,11967,12250,-29377,15138,9967,9007,6736,-1579,8551,25278,30007,28319,-22222,-18261,-5126,10479,-28216,15986,-31849,-9804,-22021,238,27629,-247,-25002,-6503,-24719,-2298,-30180,-5018,16275,24537,23310,9544,-16176,-9690,27689,-32145,-2547,2434,-24766,11219,-5299,-26357,-23149,23859,-15138,-18415,-29775,25920,-7864,7975,-16967,-4813,-22294,-23533,-4201,-29280,3856,-6568,-28343,32143,710,-12367,19213,-18069,-19929,-19020,12375,-17376,-31259,5425,-19144,29826,-16391,28783,-30257,7947,-29930,15541,22545,3833,12022,-13636,5420,5667,500,-26947,-1131,17950,10675,11105,-2041,-1319,-31196,24589,-32533,-4072,-20225,-29884,19785,30804,-28207,-32238,31018,-6475,-8553,10140,-25803,-27084,31994,-14692,1201,-26485,-22268,-7086,29920,14744,-19496,14726,-19358,4595,26806,23221,12846,-3037,30145,26265,-10758,4874,-14389,13728,-26739,-31684,20997,12348,18467,-27339,-20770,-2033,-4559,-4321,31853,32670,-28895,-31080,-22604,-9176,331,-26105,-21120,-31756,-31448,14178,10788,-26994,-4423,5884,-31112,-31114,-6552,11759,28393,8894,22993,6358,28057,16234,8360,17048,582,5530,-22351,-5779,-3156,-19481,-16312,6412,-29843,-337,-19640,-28253,-18928,14580,19801,24759,2365,-3262,-30878,4131,-5613,-11299,-125,-4771,-29161,-20411,-19487,-15490,-18080,-27493,32308,-3652,11105,21646,26698,8125,26957,16952,8013,6227,-30875,-1165,7411,-13457,22399,-3507,3311,5045,27227,-27262,24411,-11221,7893,-21119,-31797,17874,6389,17833,10193,31521,13601,21567,13653,18055,-8868,-23715,24981,-18634,1773,-11305,-13975,10239,-2280,11648,-198,-12515,27387,25346,13833,-8437,-19387,-29781,10440,-7875,-1126,-14020,-24605,30676,19662,-11251,-4324,-2024,-13940,-15096,14032,-30600,-7765,-25368,21560,6264,14748,3124,-19911,27075,-20816,23480,-21693,-24779,-16323,-12733,17614,-9018,17797,7968,-11409,-21945,27479,15840,12271,-8187,3297,10441,-5167,-8659,9813,-15617,28513,-14577,-29090,26164,9504,-20368,28311,19217,15811,6823,-25580,-6303,-20036,6501,27847,-25467,-3256,4249,16167,12779,7550,23313,-28042,11271,31132,-2561,11523,7722,-14784,-11793,5097,-22081,-28223,5507,-12930,-5136,-28821,-20649,6671,6065,18485,-27350,18105,-8512,-11921,3366,23644,-2994,1588,-11520,15867,8699,-28676,-12997,11970,-5924,-21435,27378,24125,213,-8626,-10103,-27429,17289,18850,-19523,13340,-26517,29785,-5052,-6226,-24115,31842,27326,-32348,5174,-14063,13582,12789,-12551,27172,-1141,5426,-2057,-15347,6848,-24986,32749,18622,31817,16496,-4444,-14590,-14257,15281,27719,-4752,19597,-20269,24957,31977,5599,10363,-2081,-9431,12690,3251,-1214,-3321,17999,28578,14030,29482,-23898,32308,18134,-4340,-19823,-18675,-12817,-2395,-7724,-32178,9396,-21163,-26219,8844,17397,-28337,-21316,2251,-7002,22301,-31863,-22413,32442,6100,3032,13022,31477,13814,24466,23466,-17004,28382,-14585,-29608,-15305,-27078,23386,28961,21232,-14399,6973,-17695,-4186,11207,-30446,23744,-16702,20603,-24308,-17134,462,-7523,5581,-32316,-8648,-27847,5314,-1828,-6795,11651,-26270,19602,-8336,26328,-2269,-23452,-7513,2861,-8076,7568,-2338,-16326,15427,-13246,13579,-6059,25394,-2502,23494,-44,-27259,-23134,13079,16658,25565,3106,18426,-4500,28415,-16846,26992,-23249,32073,-18056,23743,-25739,15528,3222,-19280,-2537,13368,-3339,29978,13580,-16045,24732,-21225,4725,-1831,4469,19932,-26733,1429,-14120,23687,16867,5414,15546,14157,20298,12571,16995,29535,-438,-12986,27749,32568,-25258,15054,27087,-29016,20061,441,27299,-6979,590,26256,23336,31946,27338,28092,6915,7361,-4758,2978,24952,-7945,-17433,18800,10088,-19376,22166,21936,491,-24128,7629,-26666,-23670,14222,24759,-17102,-28282,4156,-3706,28616,-7499,-29217,-24299,-21276,-16190,27965,2810,7569,-22430,25591,4203,14446,-17872,-13099,-22155,-24935,-1995,-8952,27014,-30136,-21893,26713,-9628,-9948,-13122,-17063,-30363,-17879,-26556,-12507,-13354,22819,-30089,-19401,16518,-17636,-13036,-9152,-9491,17649,12173,6095,-181,12628,-1616,27051,26817,-17236,25577,-8786,-29586,5025,2541,-19012,-9832,-8492,12086,-15092,-25657,20697,-7652,31031,32002,28537,-21313,6214,-11053,1515,-5562,-16838,-8219,27,-26180,24439,-19911,8559,17592,-23158,18172,10443,19927,-18357,-19684,-7702,6807,2126,-27520,15630,16524,-15835,-19459,-15399,-4978,-20526,27139,-11105,21139,-20643,17178,-9151,17363,22213,-19172,28206,-28570,27877,5428,-11541,-896,-29922,31042,16721,31369,-23088,-439,-6045,832,-27053,-26876,11351,-7546,-11506,-30413,-23791,20694,-23827,19877,-24841,-27866,-15180,-12807,22445,15900,7172,-12400,-26786,-30608,13541,-18084,13279,31014,-1608,-13685,14389,-12366,-14754,28760,-24896,-2463,25748,-24986,-15336,13222,6798,15330,-29376,-3190,-17817,17356,5096,-198,27923,8544,2070,-31526,22169,-22099,-1974,311,16176,-1695,16365,28788,-8660,21082,19372,-22888,-22222,17449,29969,-30385,-19084,20021,15888,26763,-24955,30324,-11871,31825,18417,-10541,-379,-30404,13662,350,2379,5954,2899,13481,-31208,3232,30064,7736,27909,-23104,15970,29064,-17202,24322,-29229,-31563,-11511,4916,16178,20636,27569,1491,-889,-31020,-29755,-8917,3159,14644,-3480,10394,19592,-28295,-26345,15123,29044,-8785,-15896,30698,-25722,10304,-28243,-19238,-6004,-5836,-18778,24785,10660,8813,24632,-19488,-20804,32224,2280,-17130,-4880,13458,19407,-31436,-1647,8180,-8542,-18973,-18589,31764,2715,14255,-20834,30706,-20102,-3531,-19793,-22807,-29681,20367,10160,17043,-18012,15264,-12832,-18739,15315,1924,27158,25404,-3005,-28154,15581,-5724,-15375,-30326,-12879,28560,-18817,-25362,26640,20106,28180,10992,12012,7464,6260,18880,8895,16818,13879,-21984,3226,22947,-19663,28992,-24298,15070,22560,-7739,4430,22404,-21202,23623,21892,12984,-26387,-28202,15617,11619,-25045,16163,-1290,9304,29485,-8347,-13158,-19035,31929,20698,1546,30987,-10217,-5032,12095,3420,-1223,-6986,-16171,10139,17019,-20107,-7763,21634,21859,3105,-4383,-2881,7317,-2508,28457,18062,13209,8330,-6312,32305,15397,25925,21648,17977,-17737,-26939,-20414,-7645,-28532,-7728,-16933,10052,-25924,15688,-11251,2162,-18747,-9713,31692,10974,16592,-27432,20090,-32409,17614,-30116,27794,-30100,-13648,11560,19774,-26406,27004,2224,17042,3032,-10870,-10640,-26414,17226,-5783,-20272,13240,-1713,-10423,18385,27056,-16688,-27959,-16229,9584,3651,29004,-23045,-12345,-3075,-23978,-9779,-13349,18555,25752,-30150,-8716,-9438,-16738,5552,-29537,2533,-29087,-8003,28725,-24664,-17268,-7813,-15087,-16044,-2811,22446,4729,-1702,11283,16785,-7370,3733,-26288,-24933,15801,-22660,11574,-32361,31112,24339,-21157,10115,7197,-29551,9913,11146,-17620,-4194,-24970,-20548,123,-29441,28784,5393,-31947,-27352,14769,-31482,4402,-7149,-11448,24201,-1132,20015,-5701,25612,-31703,17230,1161,-4610,26546,12640,-32722,28821,25344,-16224,-29909,-12038,-583,-4471,-31013,-14505,-2904,-25839,6901,-23491,-17196,26268,-3734,18489,30316,7645,-20291,-14799,-22824,-11555,5766,-21934,-10647,15545,-19760,24914,22725,-25263,19297,9450,-7004,-7705,13758,10539,-26667,19250,-14711,10471,-1121,-1172,13535,30245,16580,12051,-18536,-4735,16770,21580,-25591,-15482,32298,-23109,-2387,-16237,-9137,14102,-27311,7853,10502,-21638,-8352,28877,22949,30118,18530,-30383,26593,-17031,1147,-8540,-24225,-13688,-1298,-28668,-28944,-28577,-7086,-32416,24007,-3187,-25919,-26148,-10079,20122,-2467,5137,18295,-11326,7403,5628,11319,-23382,10768,-10782,1162,8801,12229,20293,31469,-17436,2515,-2989,19727,-15096,12857,26507,-16353,28079,23594,24684,4565,-19430,-11450,-11945,-7594,-3276,-21179,20215,-7519,-10737,2859,21110,-16349,-5928,15638,12474,31781,-16494,-22984,-12636,-27128,8909,19253,-11749,-18851,27092,6936,-22555,-23077,-20073,521,-3203,-20773,22253,-10656,21774,27532,-5752,-3139,3879,11874,-1284,2818,9312,3605,-10626,13156,-17119,13976,5933,13746,5755,-10580,-12747,20322,-17679,-4658,16073,18767,-25760,-6804,-11687,8901,-26585,30611,-30720,21058,14328,16075,-2010,9838,-21460,28475,12073,1302,-15346,-3850,5919,25894,6775,-27483,-18012,16331,-27056,7552,-1431,-29238,-19605,-14895,-11831,846,-28702,-13094,-11638,-14158,10411,-18807,5953,7084,16417,-16656,-2681,-4443,3702,-17612,-10151,-12749,5598,15757,14496,8975,-24964,-19636,11494,8900,-27399,2393,12758,-11813,-22445,23254,16830,-7613,-10627,-433,21225,-13184,2953,-28174,-12195,12737,-20512,-6753,1117,16084,-3087,-4561,15768,8322,-9633,-12551,-2926,12414,1771,-7852,-8194,14081,-26863,-25722,-10851,15445,-10143,-12198,20787,10906,-22035,14469,14232,3848,-31189,-14643,12901,-27019,-12331,4951,-24768,-6555,12933,18492,24181,-31340,3631,939,5588,-29703,15392,-16257,-1547,-29966,-27520,22863,-1295,5298,-7972,-29934,-12870,-18121,-23148,2449,-11155,2465,28128,6560,22581,24448,8054,10305,-3924,-25631,-19337,25589,-18703,-27064,20840,31373,-22760,12853,20996,-1291,-26043,24459,29351,-17842,21421,9071,24197,-12819,18503,-28087,-26279,-14446,-27779,-27458,-30117,-10875,7709,8058,10039,12167,31054,13892,26380,20488,19656,23335,-14825,-22950,5907,-7133,-22785,-32268,26397,-20837,27100,-24910,-8345,30851,8366,15192,1331,-6134,10455,20744,-944,7942,15316,27326,-8415,12782,-7698,-14580,-26230,12272,18353,-18433,29846,16168,4118,32387,-594,24136,-26437,4369,6496,32050,-8284,5233,-2607,25412,13544,17412,11539,3605,8144,12585,-20414,12049,7881,-11489,-17660,-32460,-1159,-20361,23072,1604,10343,27033,-3111,-23750,19242,22750,-5732,416,-21544,16487,-16085,25723,28739,-23249,7844,-22818,-2231,12091,32288,-27475,717,-12727,-28036,16370,-24091,15810,-6485,-7153,11100,4783,-19428,-12173,-1003,-27722,-16800,19364,-30216,24502,17832,-717,-21621,-21701,-5921,9966,27012,-31439,17181,-28317,14704,14884,9955,-23023,13798,5972,12147,-16375,-20538,-22048,7827,-1654,-7150,-4017,13822,5480,7980,-22674,12311,11897,-27476,-22696,-8691,2534,-590,22481,-14801,-18898,-2172,-30978,19940,-9049,387,5906,-17752,23852,-8438,-12618,8084,14079,-6355,15384,-13187,-2100,30217,-28627,14040,27927,12725,-15488,-14812,10938,28305,3113,17596,-28284,17656,-1987,29764,-6040,22163,10582,-10436,-31326,6437,1156,25459,-14163,-3554,-9729,28444,-7314,26640,-29415,-10250,47,11542,29538,19943,11518,-8209,8182,1022,30074,-4902,7314,9247,25787,-29874,-21605,-30779,-14007,3323,-6487,27512,5160,-7640,16068,-31691,-30883,-3363,28047,-22147,27586,13974,-26145,8089,13725,-15037,3868,-18313,12142,-10732,14163,-4438,-28790,10376,16612,18149,-6555,-25446,-18326,-30722,18939,85,-28049,25370,17565,8262,-27721,-562,-16446,-21683,-9736,-22612,-25313,-14638,4424,18686,29638,9700,-9868,6112,12218,5700,-22873,3910,20524,10273,-13048,-29584,-24893,-8457 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice2.tflite b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice2.tflite new file mode 100644 index 0000000..2405faf Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice2.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice20.tflite b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice20.tflite new file mode 100644 index 0000000..56f9a7b Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice20.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice20_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice20_golden_int16.csv new file mode 100644 index 0000000..a1fc0f9 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice20_golden_int16.csv @@ -0,0 +1 @@ +4334,9124,-15491,29374,24486,12490,-12022,-8664,1233,-22913,-23346,-21556,7501,-6743,18926,-29412,8704,-19969,29671,-18948,-12679,7426,28045,-14925,26330,-7769,25805,15845,7905,15588,-19017,-4782,7329,-17582,9474,-20586,9963,-15275,-16974,-20450,16555,24097,13042,-28075,23528,11440,-20255,-11573,3661,10931,-2493,-15254,8228,-22394,711,-23683,26439,8265,-27840,-18527,-17283,30971,-25906,13657,14936,9654,9571,-12730,2261,-4809,26064,2618,-772,22312,18926,25126,-9261,32442,26465,7690,-19325,5279,-23075,25937,17643,-2223,5323,14564,-30025,16907,19473,12878,5200,22733,-11359,-22517,-5486,-12624,17206,15078,21152,30533,17978,29867,-8172,-287,30093,-994,-22609,12818,27790,24510,22001,19043,7318,-17149,-26025,17246,4371,12825,-29284,24020,-25748,11886,27412,-8188,11777,30996,30735,16366,-13730,24401,-4575,-29184,-28320,24642,15196,-21287,-25898,-21,-16640,-3393,19813,15806,-7379,-12135,-28088,-2233,24215,23143,-24370,-13773,17034,13953,14999,-6998,16971,26112,5882,-1473,-30390,-15034,13772,13422,30685,-4912,12585,25575,-9859,-27897,16864,16346,29447,27476,-5522,25672,-6856,32548,14929,11188,-1644,13726,-21872,-30120,10335,739,9730,17459,-27052,-13672,22060,30645,14726,21343,22951,-17859,30955,-26222,10339,13360,-18310,28660,23663,1883,-32597,7316,-14621,14832,26438,3241,-31752,31322,23563,-29666,-29302,10886,-2953,-19277,23961,159,-12957,-18563,-10267,-1921,18473,15099,31934,-16499,7465,30734,22723,-14272,-2733,-26759,-29385,-1548,3741,-28125,30800,-4979,-27197,-27400,30040,-30916,16639,-22859,28578,6230,-19348,29696,-20901,15198,1028,-896,-27341,29538,-802,3496,-16884,2216,32118,-17047,15600,14234,17587,-12019,13551,-24948,4081,31847,-4040,17902,-15348,14609,7344,8129,3706,19502,13948,-15392,13893,4057,-14139,-12282,-28329,17959,-8608,-25718,2356,12269,4295,-22884,31925,10098,6643,3558,3433,714,26492,446,-17566,5191,-18995,-22762,14666,16911,8659,-22825,-24951,-3785,-9505,28831,-22118,22380,6146,16413,-25887,6088,3206,-20226,-1478,-17000,27580,29036,-24013,-9695,27451,18186,11237,-18695,30459,2168,-6834,13796,3620,-6835,-31871,5419,7464,11987,20317,-30518,14379,17898,25340,29508,11971,27720,27745,-19553,14358,31723,-13390,18364,4920,32428,-27795,20222,30940,30234,17894,-30030,28552,-15259,-31737,-8456,-5733,-30434,3001,-28870,-27507,-1749,21971,-17920,26723,-23530,-5163,-6620,-28640,-15408,14364,-992,-717,15301,-14014,-27756,-26605,-4845,-14919,-1658,31751,9376,-13443,7084,17055,-15001,10268,23571,7121,-23260,21649,28264,13074,-6703,16374,-2666,26687,31179,27124,410,5760,-6631,-25812,-6236,-24057,-13834,-4460,12893,-19196,15985,-7905,-1995,28107,18697,-12729,16811,23459,-20126,22631,6214,5248,-25440,-6061,-5649,17515,-28027,22830,-29260,22184,-15015,22636,4232,-12245,16569,-12289,11265,-4883,-21971,-29673,-2001,13431,25532,-1596,6640,-23683,-29972,10761,-5417,20051,-9406,-25003,-792,12631,-15147,17649,7444,-9775,-7377,5680,16150,28328,-11222,-16092,24939,1789,-5318,20040,-28830,-31758,-22251,-12736,9251,518,-8218,30698,-12847,-16769,-9725,22128,4653,-12139,15094,13372,-2882,-31988,6407,20607,-9225,12585,-4591,8470,-28443,-30460,-3916,29204,35,16132,3630,-6540,24733,25728,26085,-19172,1641,20506,-29473,-307,-19088,7624,15895,20181,23161,-13100,25147,20491,-2221,9322,-7770,-1267,-25697,23412,-29645,17953,-21086,-22580,21932,8882,-23337,-12602,-22321,-18084,27361,21675,-11865,23681,16526,10318,-30448,-1931,3414,-26733,-12623,25195,-11584,-31959,22409,18305,-542,18543,-23615,-12989,-31602,11619,25100,23668,-14190,-1379,30833,-5342,30731,-2306,19667,11513,32515,25213,25954,29114,-15367,24532,-24920,-2759,10813,9961,1867,-21259,31738,252,-50,21900,9301,4456,7184,4366,-10933,-27459,17833,21268,-26723,6719,3631,-28204,-12107,-24524,-9572,14223,29275,-9644,8139,-29512,-2434,-1358,-27580,-22216,-12622,19711,-4105,-22657,9397,30608,-31195,-2111,-31894,-32210,-4864,-21960,-15007,-12883,-15101,-256,-28547,17066,26029,19016,-26639,-31458,-20249,17825,17909,17125,26620,-12091,-954,12189,14175,11718,27011,-22883,31987,-13243,23483,9966,17571,-24275,-17758,30969,11972,23969,6764,32080,-10668,-16094,-11155,9512,11552,-25491,-32052,-14140,4125,14494,-32373,-9946,4513,-22867,-4825,-5888,5172,-16280,4462,30079,-27081,-29401,3021,-18746,-28811,-22646,479,18223,-20295,6114,16960,-29334,18459,-5013,7796,-11162,1645,-3993,28989,-10330,-8022,4457,3233,21369,4689,-28864,19691,20816,8283,-10909,27572,-19729,-4889,5228,-6231,18285,-1913,-17339,-17491,22737,-4481,-20916,27510,30971,13708,-19994,23328,5272,-10427,-5360,20037,4890,-19946,7341,-15486,-10314,10939,2864,-17949,24939,15067,-22592,-11839,6905,27788,12077,-31157,-6960,-22985,-492,-10563,13450,6413,7082,12034,-2103,25916,-237,32671,-20385,32133,-24706,-24911,-16739,22440,-32083,16190,17685,24866,-5156,-771,9722,785,-20240,-4634,20253,-1995,11076,-17980,26630,2677,7533,17825,27832,8564,-2315,18796,32633,14632,24044,-7096,-28003,1982,-30730,12040,8440,1471,10348,24958,-16276,5690,-21065,-7450,9222,-17917,-14827,-27292,7176,-1090,-15112,21227,10860,-2747,14121,2047,28968,-9758,-11965,28276,-31019,13147,-2217,-16328,30257,2981,18704,-17178,31623,31892,23481,16912,30198,3598,12940,-3930,-12205,22772,29654,6392,-22334,-7488,-17716,-18508,-32639,-11119,-2108,-24498,-15627,27163,12582,15380,17539,-3179,-32464,-874,-6000,-14228,-30415,-31690,-27760,12778,-18466,8050,-12645,32329,18733,12818,322,30919,2775,-9760,-19798,5300,20396,10063,-6740,23259,-21525,13310,-16434,-22871,-12458,8356,-28961,2765,7081,-3911,27991,-8053,19420,19531,-21332,2705,2700,22245,-9721,11263,24775,-3756,5038,28637,-8670,20823,27481,-23833,-11490,25566,11716,29466,15738,30320,2342,-2255,-11278,31732,27310,31013,-29225,-8775,-29481,12072,24228,-9394,761,-13855,-21543,-20500,21014,-28121,-6577,-29095,3263,-24444,31987,-19333,-18877,-23797,20270,31175,30762,1145,-25449,22870,-28005,23186,-16154,19476,-23892,-25834,-11679,29570,-22118,-19792,-18514,16205,28910,17509,24930,-24171,-1122,-2148,-16008,-7136,15725,20597,-3108,2998,-5559,-4437,-28695,-28272,-23606,-15607,-24155,19987,-12446,6584,3749,-14896,16372,-22920,-10958,26477,24961,14468,-17124,24769,-18034,-15207,23857,-30981,9793,-2276,24635,-15365,19348,-10596,26081,11989,13243,-30827,5727,16688,23622,-10836,-21040,-20267,-16664,-4697,243,-475,-1926,15415,15934,-32439,-4220,116,-30628,-25334,-26956,-25428,-28473,-12400,13819,-17895,7643,-12577,-22597,21,-30440,-11056,19561,-1797,6214,-2202,17406,5326,-11231,-29170,21251,-8490,27149,-817,5504,-32027,6262,21003,20443,12369,29362,-18569,-15316,-3310,12057,28285,-24797,24908,-21510,-11255,29147,30502,-23879,18993,-21623,30047,-7517,-23707,9784,12396,19449,-17893,-18139,20578,4388,6641,5545,23512,-2311,17467,32011,-327,16134,22142,17183,14096,7535,-30409,7227,-29536,-3219,18580,-8924,6336,13303,-13735,31041,-24584,7708,-15342,-10548,-32140,-27159,-3302,14260,-16333,22228,-7881,6111,16249,11525,26671,-12745,30571,18797,-26232,31604,-14070,15216,-19352,-4787,-20213,-10545,21722,-26910,324,4494,30082,-6599,7363,-975,32199,-17832,-3079,2595,-4626,21403,29261,-24273,8578,-5486,10067,8313,-1057,-28275,-4417,18873,22808,4343,8864,-19854,9272,22974,241,8375,-26028,11412,-5114,-15088,31156,24705,-19537,-17931,24996,25483,31611,-18856,11781,-21872,-1199,1369,7009,27841,6257,-26601,12310,-1827,32658,24533,22050,12486,-6644,23551,12308,5655,5856,21805,-29462,13071,-11167,-98,18282,17382,-14048,13597,9427,-21094,-4052,-10748,-12770,17658,-29628,25162,5750,12909,-1687,-1919,-27578,-26016,14759,-19093,24753,-19776,9783,-9330,32688,-28520,-9096,-19329,23779,10585,-13212,-31751,-12146,32721,-14490,-2291,26661,-15865,-15968,398,-1839,13942,18903,30935,6523,8696,15678,-32070,17889,734,8581,-7306,-19195,19392,-16250,-21734,-9642,-547,12457,23933,-8690,11550,1950,9103,-15983,12455,-13766,-14838,-29075,-13693,23894,-30187,17085,-2519,26484,-21838,-12758,17012,-19976,29732,54,-14665,-21786,-8356,9009,-15328,-16810,26064,31462,3247,31730,-8296,-31997,-4647,623,14342,28580,24301,22849,5556,-15193,-7090,13878,12357,7033,16650,28781,-10623,-27872,-31302,-18242,9183,-5501,18018,-120,-3795,-18305,6264,24115,11064,14844,19294,19399,-12972,-10979,-16003,10218,-2162,17057,-13080,-11186,-9245,17066,-2703,20391,-9236,21353,23533,-23514,22152,-20903,-14549,-12122,9042,1562,17704,-14282,-12377,19498,-4835,-29265,20053,-22045,-10388,-20525,-32127,28217,28025,-19466,-18151,5251,27945,-14192,31256,-25072,-17021,27268,27636,-2158,26700,-24249,25556,-4143,-1775,-6150,16834,-4179,-13258,4148,-20145,21864,18616,-4599,-1763,-25461,20674,-18116,13088,8884,9385,15689,15301,6024,-8875,13762,11866,20591,-27255,29133,-6449,10028,19142,22100,-29273,-9092,20514,-16994,972,14667,-25519,-26695,-21180,-10546,8444,22599,27912,17680,18512,13991 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice20_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice20_input0_int16.csv new file mode 100644 index 0000000..3a8c7d8 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice20_input0_int16.csv @@ -0,0 +1 @@ +10704,15611,-24396,-7724,737,-8063,-27256,-12744,-23228,30545,-3010,-14244,-21052,4295,-30056,4910,23582,-14361,25341,6892,-11665,5095,1360,-1332,11346,-29175,-14548,24839,30835,-16937,16986,24076,-21284,7441,2952,-4675,29124,15873,-18725,24754,23546,18324,1153,25669,-13727,-26893,17834,-24601,-27524,-5180,-14543,21825,-6931,-16268,-13770,25945,11091,-19273,-23162,19283,17039,-26984,17346,-22632,9642,-23863,8891,31344,-19618,17014,-17412,31995,-30311,-18261,-32621,12387,27870,9466,18901,-3258,-8797,-26088,18725,-15071,14838,8229,6640,10456,-26926,-24956,17739,-15579,-8511,21926,-28321,-14874,17056,32037,-15459,8357,29034,7712,-21848,17818,4636,8634,-31993,25164,-12849,21817,25320,30908,18032,12735,-30404,-25368,-13440,-1957,31519,332,1803,-5042,23590,16156,-14449,-24050,-19274,28202,4498,-23840,18106,-3521,-9848,-13855,-6950,-10899,3511,3515,17455,-8514,28929,-16444,32686,12674,14553,4938,-14283,-7407,18810,26222,-30287,26788,16915,-24498,-16390,-10150,26400,16501,-28386,-11828,26707,-11961,18320,16315,688,5076,-17473,22431,20560,30239,27579,-21084,-8828,-23717,24609,-189,13319,10218,-15018,16890,32755,6661,-28107,-11277,-10240,-17399,1533,-28679,25380,15785,-18660,11832,17293,25963,-11629,-21920,-28501,15096,9202,4115,9838,17682,-32273,21828,-5114,12731,-30673,-4809,-4133,-29192,-835,-23689,26167,23414,-9256,25805,-13361,22147,28743,1213,-11721,-15093,-4007,-1541,25092,24817,-3987,11911,375,31992,-7200,-27135,-5953,-20780,-28871,24690,28787,12493,-21914,31870,-29158,-13357,15809,21776,-11440,-30418,16985,-17055,-32,-17915,618,-789,-19405,22853,-12578,-1942,-32678,29459,-10968,12820,14415,13112,-4375,6380,-13777,-30869,30820,-15589,-8800,-6147,-30163,5334,-18184,-31367,8991,3150,24751,9576,-10494,12399,15538,14772,-30186,-5274,-7416,13740,3564,1317,28669,-17731,26587,9902,10343,19968,-29,26774,15533,-10212,-1252,-29809,-30019,-20143,23591,-14058,-2208,30378,3133,-10859,8534,22753,-2358,-27382,26285,15098,-31268,-4703,7760,31430,-31393,1358,-26277,-15362,5338,-22945,14571,-14250,-24563,8345,-31043,14038,28947,-24260,-7812,18356,20509,-4396,-19825,-24001,23654,30653,22616,-27138,10609,10936,10516,-2314,-19747,1819,-27111,-10122,13746,-16746,9299,-16647,-16775,-11939,-30478,19711,17413,15063,-15572,27930,-22678,-7647,-22186,-17777,-27460,-10306,-9028,5133,12259,16290,-2802,21556,-21377,-29913,14458,-8031,24220,5919,-20134,-13106,-8231,-20694,-6538,27802,-10432,13854,27799,-9115,89,10675,21680,14988,-14311,1382,19166,-4566,26265,25537,-8570,19515,-30402,26857,-18595,20716,-28382,13473,-18119,13576,24873,23662,-5552,-7217,14056,-24930,18003,12332,10164,23392,-5399,-27920,21998,-27649,-12615,-25721,-13671,25384,16982,26585,-12182,-4990,14305,22518,1911,25550,57,5377,-19187,-2770,-10067,-10578,4859,-4346,32669,23348,30214,1808,22328,-10591,-30391,18666,6426,-14182,25323,-18091,-1753,29566,21930,24111,6085,27001,7628,-19998,19473,16529,-10739,21433,27723,-5354,29045,-3583,-16506,2463,10114,-21068,13145,4944,-20815,-28716,-27251,-16163,-6348,17836,-3125,26972,-23044,2493,15728,12049,27268,16664,437,-23799,29273,9696,9050,23681,-16848,24837,-28097,-29769,21588,-5667,28246,18882,-15437,13638,27880,-24270,-28501,-4747,-2939,25182,3333,-10493,-8799,27799,19383,-29630,-10875,4058,-12347,30682,-24109,-15688,2432,24483,25750,-23264,-10920,-9368,10747,2950,21731,-12535,27219,-7839,7096,4872,17770,14735,5730,29330,24701,-31097,-2860,-17368,246,20948,-4329,900,3959,-26493,-18376,-13253,3983,-3638,15082,-14884,-28398,25303,-31064,-19951,12720,-1309,8915,-20681,-9191,-1315,15265,31442,24983,-15835,-1170,-30738,24086,-8547,-15057,-30429,1264,17394,-20732,13079,19772,31703,15295,-17191,20286,25113,-4574,-24612,-8326,-26355,-28274,25754,10451,-5998,10605,10625,-10312,30764,20792,562,14009,-4126,19793,-15026,11719,-17404,-2456,-17197,21039,-23526,3602,-24881,7,17049,-10642,-1242,-12223,-19050,16303,32393,10932,-4925,26729,23943,-31116,-27061,16194,-22549,-1125,-8349,-31904,30632,30864,12628,18931,-8620,6590,-26174,-25491,-25100,-29478,-30280,-29154,16542,-21353,-27865,-6532,18000,26490,1197,-28603,-30755,20172,16345,6033,11866,-16178,4886,-3245,-8248,15002,19258,9743,-29571,1792,-22580,20034,3489,-23758,14369,-6209,4913,-19523,32227,26144,16571,-13251,-13911,23702,-10816,-19529,2542,-5045,25141,-16275,16388,-26893,12634,7068,-16981,-23678,-19078,-6085,5223,-297,-31873,18948,-19807,-13523,15704,4334,9124,-15491,29374,24486,12490,-12022,-8664,1233,-22913,-23346,-21556,7501,-6743,18926,-29412,8704,-19969,29671,-18948,-12679,7426,28045,-14925,26330,-7769,25805,15845,7905,15588,-19017,-4782,7329,-17582,9474,-20586,9963,-15275,-16974,-20450,16555,24097,13042,-28075,23528,11440,-20255,-11573,3661,10931,-2493,-15254,8228,-22394,711,-23683,26439,8265,-27840,-18527,-17283,30971,-25906,13657,14936,9654,9571,-12730,2261,-4809,26064,2618,-772,22312,18926,25126,-9261,32442,26465,7690,-19325,5279,-23075,25937,17643,-2223,5323,14564,-30025,16907,19473,12878,5200,22733,-11359,-22517,-5486,-12624,17206,15078,21152,30533,17978,29867,-8172,-287,30093,-994,-22609,12818,27790,24510,22001,19043,7318,-17149,-26025,17246,4371,12825,-29284,24020,-25748,11886,27412,-8188,11777,30996,30735,16366,-13730,24401,-4575,-29184,-28320,24642,15196,-21287,-25898,-21,-16640,-3393,19813,15806,-7379,-12135,-28088,-2233,24215,23143,-24370,-13773,17034,13953,14999,-6998,16971,26112,5882,-1473,-30390,-15034,13772,13422,30685,-4912,12585,25575,-9859,-27897,16864,16346,29447,27476,-5522,25672,-6856,32548,14929,11188,-1644,13726,-21872,-30120,10335,739,9730,17459,-27052,-13672,22060,30645,14726,21343,22951,-17859,30955,-26222,10339,13360,-18310,28660,23663,1883,-32597,7316,-14621,14832,26438,3241,-31752,31322,23563,-29666,-29302,10886,-2953,-19277,23961,159,-12957,-18563,-10267,-1921,18473,15099,31934,-16499,7465,30734,22723,-14272,-2733,-26759,-29385,-1548,3741,-28125,30800,-4979,-27197,-27400,30040,-30916,16639,-22859,28578,6230,-19348,29696,-20901,15198,1028,-896,-27341,29538,-802,3496,-16884,2216,32118,-17047,15600,14234,17587,-12019,13551,-24948,4081,31847,-4040,17902,-15348,14609,7344,8129,3706,19502,13948,-15392,13893,4057,-14139,-12282,-28329,17959,-8608,-25718,2356,12269,4295,-22884,31925,10098,6643,3558,3433,714,26492,446,-17566,5191,-18995,-22762,14666,16911,8659,-22825,-24951,-3785,-9505,28831,-22118,22380,6146,16413,-25887,6088,3206,-20226,-1478,-17000,27580,29036,-24013,-9695,27451,18186,11237,-18695,30459,2168,-6834,13796,3620,-6835,-31871,5419,7464,11987,20317,-30518,14379,17898,25340,29508,11971,27720,27745,-19553,14358,31723,-13390,18364,4920,32428,-27795,20222,30940,30234,17894,-30030,28552,-15259,-31737,-8456,-5733,-30434,3001,-28870,-27507,-1749,21971,-17920,26723,-23530,-5163,-6620,-28640,-15408,14364,-992,-717,15301,-14014,-27756,-26605,-4845,-14919,-1658,31751,9376,-13443,7084,17055,-15001,10268,23571,7121,-23260,21649,28264,13074,-6703,16374,-2666,26687,31179,27124,410,5760,-6631,-25812,-6236,-24057,-13834,-4460,12893,-19196,15985,-7905,-1995,28107,18697,-12729,16811,23459,-20126,22631,6214,5248,-25440,-6061,-5649,17515,-28027,22830,-29260,22184,-15015,22636,4232,-12245,16569,-12289,11265,-4883,-21971,-29673,-2001,13431,25532,-1596,6640,-23683,-29972,10761,-5417,20051,-9406,-25003,-792,12631,-15147,17649,7444,-9775,-7377,5680,16150,28328,-11222,-16092,24939,1789,-5318,20040,-28830,-31758,-22251,-12736,9251,518,-8218,30698,-12847,-16769,-9725,22128,4653,-12139,15094,13372,-2882,-31988,6407,20607,-9225,12585,-4591,8470,-28443,-30460,-3916,29204,35,16132,3630,-6540,24733,25728,26085,-19172,1641,20506,-29473,-307,-19088,7624,15895,20181,23161,-13100,25147,20491,-2221,9322,-7770,-1267,-25697,23412,-29645,17953,-21086,-22580,21932,8882,-23337,-12602,-22321,-18084,27361,21675,-11865,23681,16526,10318,-30448,-1931,3414,-26733,-12623,25195,-11584,-31959,22409,18305,-542,18543,-23615,-12989,-31602,11619,25100,23668,-14190,-1379,30833,-5342,30731,-2306,19667,11513,32515,25213,25954,29114,-15367,24532,-24920,-2759,10813,9961,1867,-21259,31738,252,-50,21900,9301,4456,7184,4366,-10933,-27459,17833,21268,-26723,6719,3631,-28204,-12107,-24524,-9572,14223,29275,-9644,8139,-29512,-2434,-1358,-27580,-22216,-12622,19711,-4105,-22657,9397,30608,-31195,-2111,-31894,-32210,-4864,-21960,-15007,-12883,-15101,-256,-28547,17066,26029,19016,-26639,-31458,-20249,17825,17909,17125,26620,-12091,-954,12189,14175,11718,27011,-22883,31987,-13243,23483,9966,17571,-24275,-17758,30969,11972,23969,6764,32080,-10668,-16094,-11155,9512,11552,-25491,-32052,-14140,4125,14494,-32373,-9946,4513,-22867,-4825,-5888,5172,-16280,4462,30079,-27081,-29401,3021,-18746,-28811,-22646,479,18223,-20295,6114,16960,-29334,18459,-5013,7796,-11162,1645,-3993,28989,-10330,-8022,4457,3233,21369,4689,-28864,19691,20816,8283,-10909,27572,-19729,-4889,5228,-6231,18285,-1913,-17339,-17491,22737,-4481,-20916,27510,30971,13708,-19994,23328,5272,-10427,-5360,20037,4890,-19946,7341,-15486,-10314,10939,2864,-17949,24939,15067,-22592,-11839,6905,27788,12077,-31157,-6960,-22985,-492,-10563,13450,6413,7082,12034,-2103,25916,-237,32671,-20385,32133,-24706,-24911,-16739,22440,-32083,16190,17685,24866,-5156,-771,9722,785,-20240,-4634,20253,-1995,11076,-17980,26630,2677,7533,17825,27832,8564,-2315,18796,32633,14632,24044,-7096,-28003,1982,-30730,12040,8440,1471,10348,24958,-16276,5690,-21065,-7450,9222,-17917,-14827,-27292,7176,-1090,-15112,21227,10860,-2747,14121,2047,28968,-9758,-11965,28276,-31019,13147,-2217,-16328,30257,2981,18704,-17178,31623,31892,23481,16912,30198,3598,12940,-3930,-12205,22772,29654,6392,-22334,-7488,-17716,-18508,-32639,-11119,-2108,-24498,-15627,27163,12582,15380,17539,-3179,-32464,-874,-6000,-14228,-30415,-31690,-27760,12778,-18466,8050,-12645,32329,18733,12818,322,30919,2775,-9760,-19798,5300,20396,10063,-6740,23259,-21525,13310,-16434,-22871,-12458,8356,-28961,2765,7081,-3911,27991,-8053,19420,19531,-21332,2705,2700,22245,-9721,11263,24775,-3756,5038,28637,-8670,20823,27481,-23833,-11490,25566,11716,29466,15738,30320,2342,-2255,-11278,31732,27310,31013,-29225,-8775,-29481,12072,24228,-9394,761,-13855,-21543,-20500,21014,-28121,-6577,-29095,3263,-24444,31987,-19333,-18877,-23797,20270,31175,30762,1145,-25449,22870,-28005,23186,-16154,19476,-23892,-25834,-11679,29570,-22118,-19792,-18514,16205,28910,17509,24930,-24171,-1122,-2148,-16008,-7136,15725,20597,-3108,2998,-5559,-4437,-28695,-28272,-23606,-15607,-24155,19987,-12446,6584,3749,-14896,16372,-22920,-10958,26477,24961,14468,-17124,24769,-18034,-15207,23857,-30981,9793,-2276,24635,-15365,19348,-10596,26081,11989,13243,-30827,5727,16688,23622,-10836,-21040,-20267,-16664,-4697,243,-475,-1926,15415,15934,-32439,-4220,116,-30628,-25334,-26956,-25428,-28473,-12400,13819,-17895,7643,-12577,-22597,21,-30440,-11056,19561,-1797,6214,-2202,17406,5326,-11231,-29170,21251,-8490,27149,-817,5504,-32027,6262,21003,20443,12369,29362,-18569,-15316,-3310,12057,28285,-24797,24908,-21510,-11255,29147,30502,-23879,18993,-21623,30047,-7517,-23707,9784,12396,19449,-17893,-18139,20578,4388,6641,5545,23512,-2311,17467,32011,-327,16134,22142,17183,14096,7535,-30409,7227,-29536,-3219,18580,-8924,6336,13303,-13735,31041,-24584,7708,-15342,-10548,-32140,-27159,-3302,14260,-16333,22228,-7881,6111,16249,11525,26671,-12745,30571,18797,-26232,31604,-14070,15216,-19352,-4787,-20213,-10545,21722,-26910,324,4494,30082,-6599,7363,-975,32199,-17832,-3079,2595,-4626,21403,29261,-24273,8578,-5486,10067,8313,-1057,-28275,-4417,18873,22808,4343,8864,-19854,9272,22974,241,8375,-26028,11412,-5114,-15088,31156,24705,-19537,-17931,24996,25483,31611,-18856,11781,-21872,-1199,1369,7009,27841,6257,-26601,12310,-1827,32658,24533,22050,12486,-6644,23551,12308,5655,5856,21805,-29462,13071,-11167,-98,18282,17382,-14048,13597,9427,-21094,-4052,-10748,-12770,17658,-29628,25162,5750,12909,-1687,-1919,-27578,-26016,14759,-19093,24753,-19776,9783,-9330,32688,-28520,-9096,-19329,23779,10585,-13212,-31751,-12146,32721,-14490,-2291,26661,-15865,-15968,398,-1839,13942,18903,30935,6523,8696,15678,-32070,17889,734,8581,-7306,-19195,19392,-16250,-21734,-9642,-547,12457,23933,-8690,11550,1950,9103,-15983,12455,-13766,-14838,-29075,-13693,23894,-30187,17085,-2519,26484,-21838,-12758,17012,-19976,29732,54,-14665,-21786,-8356,9009,-15328,-16810,26064,31462,3247,31730,-8296,-31997,-4647,623,14342,28580,24301,22849,5556,-15193,-7090,13878,12357,7033,16650,28781,-10623,-27872,-31302,-18242,9183,-5501,18018,-120,-3795,-18305,6264,24115,11064,14844,19294,19399,-12972,-10979,-16003,10218,-2162,17057,-13080,-11186,-9245,17066,-2703,20391,-9236,21353,23533,-23514,22152,-20903,-14549,-12122,9042,1562,17704,-14282,-12377,19498,-4835,-29265,20053,-22045,-10388,-20525,-32127,28217,28025,-19466,-18151,5251,27945,-14192,31256,-25072,-17021,27268,27636,-2158,26700,-24249,25556,-4143,-1775,-6150,16834,-4179,-13258,4148,-20145,21864,18616,-4599,-1763,-25461,20674,-18116,13088,8884,9385,15689,15301,6024,-8875,13762,11866,20591,-27255,29133,-6449,10028,19142,22100,-29273,-9092,20514,-16994,972,14667,-25519,-26695,-21180,-10546,8444,22599,27912,17680,18512,13991 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice21.tflite b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice21.tflite new file mode 100644 index 0000000..a7045c9 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice21.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice21_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice21_golden_int16.csv new file mode 100644 index 0000000..59e610a --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice21_golden_int16.csv @@ -0,0 +1 @@ +17643,24851,16368,-2364,28466,-9795,10596,-23660,27438,-2364,-17676,6564,2722,32135,-18120,6814,24503,28876,13404,-10207,-12851,30029,26696,23190,6114,-2171,-8098,-2573,-299,14870,99,21009,30194,-28846,14294,-1935,-9938,25292,18962,-15410,-15704,6783,11252,-27789,17192,25774,-30314,31317,1892,14741,-31655,27305,9641,90,-2710,-23327,3533,-25703,13901,-25231,-21328,12120,31975,-29573,-3281,31952,28133,23976,8373,3844,27812,29647,-20755,-23179,17862,-30842,-3948,-6929,-20669,-2496,-17188,29081,10685,26623,-23936,32756,1248,13544,-19351,4120,-17806,-21074,-5569,-8124,-10196,26432,17885,25561,-6893,21883,-7685,16785,-28462,9838,-23108,8068,16756,27682,15498,-19730,-17982,-8149,-25918,19361,8073,-95,13662,6875,-10368,-17307,-27132,18620,-21233,6186,12010,-6359,-26479,-6777,12465,-5132,19819,-24164,-17900,-17434,14186,-1235,-26835,24110,-28045,-11387,14350,-23814,-30178,14794,10286,13284,2811,-20524,-26726,29244,16353,-24294,30270,26271,-21022,-27709,-28989,-22302,25878,-25004,-516,11773,-6535,28790,771,-3463,23120,-26790,23780,-5701,-32656,-15824,6171,9618,1900,-25794,15986,-14710,-32690,-9799,2514,12671,-32051,18625,565,18711,-2954,23488,-29923,25342,-5884,21403,2203,-10221,-18607,29513,-30764,3964,31847,4189,30040,17711,28312,-5579,10931,-15151,15365,27619,-5464,19662,2246,20838,9450,17400,-10641,-8904,-10177,-11929,13719,22551,27146,12997,17456,-25943,25318,11788,4056,-1531,-3221,-30127,-19405,26982,-3438,-29556,24904,-25396,-13119,30,20092,20221,-24562,-22967,-4256,-22170,25408,12330,-23480,-30221,25176,7169,-21736,-25716,9791,-12908,-23908,-11685,-741,-16566,29498,5217,9371,22729,-22991,24969,-30519,7955,16238,-31815,-22601,23579,-23216,30246,-3090,-25500,-21558,-25645,29138,28914,13826,-28437,-18570,22421,-12672,18559,1537,-14116,-10351,31111,-982,-28411,-1441,14310,-325,-21634,15932,-6110,10210,7865,15849,-28164,-11043,1219,29369,-15763,-20800,14820,21348,-6333,14321,-11345,-12687,-30539,-27258,16963,26540,10077,22399,-6479,-512,15578,31685,-31049,-225,-2234,28178,-18457,21656,10927,14889,6072,-19048,-26411,26097,26007,-1026,-31484,30676,-1835,20212,19954,-19853,-24221,13208,23905,-11315,25329,14868,5112,-21953,2870,-8558,29700,-29522,-7888,24084,-31925,-20230,28148,-9863,3026,-13760,5749,28978,-18504,-13974,9045,11281,-17368,-6167,-12117,-25379,-26606,6275,-25055,28663,-13654,-16315,-1016,10850,-22374,23635,22237,7413,14902,-14156,-10924,4409,10508,-20320,5126,-22094,30841,-23935,-12595,17687,15597,-10399,-12990,28924,10539,1552,31668,10245,-2803,21395,1199,21585,15680,7476,20646,23475,6243,4511,4706,23202,-31248,24405,17473,-18569,4552,3747,-10731,7140,11804,-12824,15990,-1672,7312,-129,-76,-15833,8891,15590,-5207,5452,20983,18371,-28519,19914,25479,9375,26693,-1019,-23963,-16734,16686,-23473,-9989,11204,-23400,-28318,30834,18703,-12359,-19007,29939,23526,-25515,-3275,15340,-15794,17879,23383,-3697,1915,22218,16951,31028,28625,16408,-4607,-31082,27109,31440,2706,-4212,25687,19787,15321,15252,27723,5114,-24807,-6234,-17896,25988,-25776,-13170,6064,22616,8629,29986,7755,13773,9897,20347,6311,-20069,23468,-30981,-10980,-12952,-14875,-18103,12958,2398,-4001,2075,20213,3305,-13258,24918,944,253,-7485,-1310,20008,-6321,11566,-24313,-23302,30849,-8281,5559,-301,-25812,-3995,-21336,-31730,-11075,-29818,-32199,11364,-7000,-30189,-22949,15547,-24828,15142,9880,16488,7999,16457,20274,-31774,-31042,27567,-26047,-27416,29336,21026,-4687,-25515,-26901,-11685,-3080,22243,-5381,-2662,15480,23234,-8264,-13683,-12683,-3961,23756,-19966,-272,16612,-22457,24411,27346,23777,-25543,-7564,4247,5566,4752,-28809,5555,-26688,29501,23512,6491,10216,-31067,-961,-29437,-520,18223,-23736,7244,-22754,-18988,30844,-15247,8408,28702,-3235,11333,29228,2306,30348,-25902,10929,7818,-29164,13106,-6280,-24707,12821,-31082,27713,-8347,-32547,3729,-27284,-7886,27320,-9775,26191,-7391,-20240,31303,-13254,21557,24451,11813,-28929,-27764,24662,-30667,-10790,4916,-30726,4044,32005,2459,-28823,32468,4436,-15817,-4134,26482,-8753,-4959,12997,-7074,-20602,12542,-5407,-13641,2558,8690,23289,31696,-28817,27588,-6859,-24381,22957,13970,-19869,-25403,-12824,-13567,-15972,28180,11811,24898,-558,653,-27512,-18900,4159,-27670,-4969,-27284,31736,9231,27644,-31061,-12375,10940,5894,24920,-27778,-11219,29204,31242,19397,-5247,27073,-17825,11350,-142,-27011,-8022,3128,25097,26570,6282,24011,-6858,8307,13150,22028,-3588,-29354,8335,-3484,15764,-16154,-508,15679,-29509,-8236,-5570,-31971,-32600,-16436,-27345,25790,-3611,32275,-11444,-15344,20115,25946,10866,8589,-29391,32156,-22873,19388,-24509,-30760,24182,-32016,153,-26879,-31594,6183,-17692,-32045,-32744,2273,13250,-24090,6881,-14498,-2780,7223,9562,-28894,24812,-9234,-30545,19383,-7993,-30149,-28505,-7134,-20865,20368,-7607,15515,26464,-4844,-27660,9487,-7589,-7591,-31576,-31163,-6752,-25099,9819,11077,8121,5030,25911,-31052,16903,-12281,2140,-31723,11697,-28832,7813,27981,-8442,-2083,-24125,-8274,-29736,-24202,13469,-22425,22474,28962,24142,-27155,-1557,-19422,-9632,1823,-7991,-18395,-5465,-4108,10172,24677,4868,-2089,-7204,19974,-7236,26610,-28254,31182,-19785,-20260,-21103,24447,-29657,-32665,22331,-9893,-4521,-25041,-23214,-9752,-31829,25525,-5401,16780,-27341,-5123,17669,1944,-22753,-8229,2854,6954,-14265,17917,-29969,5560,-25190,14151,3428,2757,14228,-24015,28557,2758,2151,2876,18515,-30474,-9552,30599,6412,-24046,10463,-9523,-17869,-2879,2118,-3835,17001,19426,2956,31492,1975,-25455,1520,13327,-12168,-10001,-6377,6669,2698,-18240,-13602,-29066,20126,-24607,11188,-6877,-8642,8501,-3645,-26291,1811,-12215,18135,-15695,-25316,-12366,-8132,13717,-23070,-10042,8196,-18286,-520,-8441,-14800,-7650,18210,-3354,-27908,28182,-24970,17304,-19774,-31471,6865,13504,16705,-17602,-12744,-18062,24681,1802,-4408,-12246,19606,-5123,-16549,21155,4538,-10410,4288,24041,-3099,-9524,-28588,7935,-11643,-8378,-16334,10529,-5676,-7712,25144,-21331,-1399,27332,-30596,-17754,-19501,6444,16187,-32074,30782,9186,6944,-19366,25336,9644,3568,3714,-20430,-13649,1696,6313,1813,-19776,12390,-26005,22255,270,22150,12317,32358,5842,22457,21579,10585,-13957,20646,27953,-429,-7508,-12881,-22810,-8306,-3233,-19784,24329,27550,30404,12610,28688,10110,31872,-22495,-10923,11093,13066,-19816,17046,8286,-30931,6025,275,11309,-8355,20575,-8226,-18648,26514,-19314,-25961,-13369,-26377,-29944,23076,9685,-15380,-15497,-29629,31169,-27509,338,30920,11957,25297,20646,31813,2362,-26432,12261,-10353,19028,7578,-8200,18253,-20901,-15854,-23957,5814,12146,-599,-13544,9876,2162,22049,-7876,31122,9606,24449,-28101,31336,-32046,-17529,-6661,-18209,13009,-20642,-18374,-16762,23526,7576,-28611,-21951,-2256,12203,1085,26467,-10343,-4052,-32100,29819,3421,26593,20881,12676,17217,-21190,23626,20909,4445,-8547,17264,12833,-25810,-12301,-11632,-15281,-24221,-19682,16707,27184,6234,-8353,26217,-20214,-27823,20236,-25149,17731,-29012,19882,-19504,-24197,6455,30678,20532,23228,30289,13628,-359,-13726,15226,-9914,-15045,-28118,1631,19702,-27937,-6387,-8964,9186,6796,31806,20096,-21258,-23054,31513,25085,-13161,32218,-26228,-16667,-20116,693,-28764,18464,19467,-31464,-20051,18035,23506,-23526,22680,-4358,18305,-31407,31488,3029,6463,-7294,-5420,-14008,-6760,15645,28223,24742,5632,-18704,1001,-13539,15259,8392,-8025,25875,63,-31387,-23019,-13910,-4723,17388,-27212,17486,-22407,6712,10148,8674,30535,-25050,-3057,15933,-30778,23908,6997,-7001,-30958,2927,6389,28025,-9903,29872,-12073,-2610,13562,-11431,-8221,-26698,32104,-31664,-1207,-21441,28397,-29361,15247,-21885,-7344,-23274,-27245,-28526,-11653,-23506,12079,19639,9235,-26949,-11139,-14724,-3010,-16298,8131,28691,24418,-27862,29594,21790,20070,27632,17508,-13913,-23942,-29596,13856,-24735,2480,31305,26412,-26564,19606,-11484,15614,15839,-14926,-7418,-31656,24962,3940,-21652,-32041,-16632,-9076,18414,29296,7809,15427,-11421,-22442,8438,-30402,28096,81,-30335,-11110,-18489,25403,15763,-27607,7172,-27605,-31103,21955,-3552,19773,21720,-11050,15848,-9996,-3642,21869,23955,-6720,-25041,18317,-31407,-15899,7668,14304,15238,4384,17271,-16796,-27665,-16768,-10869,-14557,-11976,-13409,6593,-28030,13348,-30932,16360,-28447,25326,7207,6438,-8724,2782,-18444,23627,-24859,-4073,-26642,-4346,29227,-26791,28410,4230,2613,1994,10387,18354,1597,-9565,17100,-10441,9402,32642,-8188,21701,13089,-9515,-23745,18518,19877,-30670,-15575,19483,-6983,-13659,29834,-766,-4777,29814,6126,-9431,4030,-17001,16456,17189,-17446,-7937,7796,-9008,-27301,12545,-30860,-18809,30284,-23347,22737,3118,6366,-31635,4151,-2568,-14747,-5258,-20264,27441,27090,9616,-32507,-22321,22524,-31828,20499,-27189,11948,1255,-22411,31610,12675,19547,1493,9011,-15103,-25651,10695,3392,27511,-14452,-2106,10878,21751,-14950,11360,5151,20121,23947,-15307,-24423,-9430,-12606,8244,2963,16372,-22771,-10028,-25647,-32588,-5262,-2889,-14014,-8866,16281,-13143,16127,8183,-27186,2661,-8526,-12404,6734,17935,2908,-8722,-9911,6513,27192,9145,13270,16259,-11647,29281,-1014,4380,5993,-27814,-15768,4705,-2400,28656,-6534,30831,9186,-8359,-5547,18253,27338,-26564,-16700,16602,5092,-13692,26904,13238,-19582,-27078,-29115,-22769,28951,-14112,129,27317,-15871,-29268,-7966,-1352,-23391,22624,-4833,-10619,-6138,-3579,21179,-1930,-29360,20370,-5039,-14277,17290,31137,-29514,-14317,6106,1021,-28116,20337,-18149,-8786,8039,28011,25180,10546,-5676,-24327,-31382,-2341,-18184,31948,-28181,22707,-3979,-13426,11681,-27286,-6720,7028,-26315,4717,-30600,17457,8208,29824,-18378,-13364,-23277,32630,-29931,13613,28531,-12861,27455,29779,32092,-11188,-14647,-30720,-12812,-24061,7817,-6673,30288,-29446,-16916,-16726,-16444,-3302,-9381,9726,-13650,-3869,-22484,-25744,21402,-29963,-485,-15006,31440,9547,-27038,285,15385,28255,11223,27529,4495,-22979,-24658,-2163,-28653,20700,-25049,3595,4571,-27919,-22330,14577,3202,19010,19031,20382,-30717,212,28336,-13586,10726,-11758,17648,20418,1814,1592,5897,1629,-24718,-24699,-11260,15774,-17524,-24314,4429,11359,-12178,17288,4100,27076,17131,10303,15467,5585,15883,-18771,-18472,29733,31313,-6574,23048,14049,-3862,-23243,-6133,-11799,-26432,-18444,-22913,13707,14130,-16619,25172,17572,-27775,-10288,27728,-16804,6235,-10590,-27946,24530,-27321,31710,23113,-31918,-15061,-654,7816,24065,-13791,7556,-14718,21405,23101,-18288,-17101,21641,28511,-12845,-32374,-12362,7244,-298,-11174,-14521,-19269,-8968,-8201,-23871,-30361,7873,29463,-23121,7651,-5708,9572,27771,-30001,5370,-28151,-4028,31355,6005,-13219,-21459,8494,-3462,-28946,-10517,18029,-3267,-19125,5271,7236,23955,16449,3955,17787,-13131,23747,-27662,-31072,-2636,8383,13718,8154,6163,12174,20537,-11824,17404,-18954,-31798,-20114,29827,13103,20938,-14180,-13565,-29957,-28695,-15848,31557,-11982,-13164,32,3048,-10043,11568,-10326,-13958,-16481,-17330,-23581,-13399,23779,-21812,-22824,-12904,-2919,-10741,-10887,-12104,13755,-14918,1810,-31998,1650,-6362,-9711,7126,7609,-8629,15117,-11690,-12027,32137,15412,11841,-23749,-27069,-19410,28849,27110,-29397,16711,-10680,28480,22301,9166,25384,27850,-24087,563,5654,15477,12620,31014,28844,-14567,1737,30030,30359,-22697,3224,24139,9849,20615,306,-28240,-31962,-32436,-30783,585,17030,13955,10721,21469,19398,32527,5447,-11294,8867,-30325,-30246,-367,-30504,22676,-1930,-26439,8474,6244,-9513,-29665,-19765,25173,25499,9179,18174,-31341,12329,7313,31841,-17861,-18887,23778,11801,21238,24747,27965,9521,20948,22441,27505,16430,31220,12640,28513,-30447,20538,-22382,30698,-2404,9362,15583,-26979,32002,-13227,-29327,3409,-29596,15980,24427,1788,20415,20600,-3024,16326,-4319,-16187,-26114,12584,27308,-17337,-28774,-9277,-31816,-29510,-2897,-28159,-28532,-23017,1220,9073,-7413,-9339,-4668,-2476,4603,6431,9603,14075,9593,-13151,13417,31015,-20235,5370,-5195,17336,-12617,29819,-4521,-24042,-25798,12711,-4466,-2015,-29896,-5472,-29606,-11164,-26408,21293,-1760,-30145,7208,3713,390,4795,12359,-387,-13299,-21208,-14625,20660,-32208,-17547,18886,-799,-5813,-31980,20442,-25129,23442,-3323,-6753,-13741,30833,-1174,-1318,-16125,20389,15921,-15275,-30564,226,17176,-2993,28044,-20525,-30417,-23411,-3694,9428,4903,-30006,-31289,17662,-25377,-15208,-4520,5592,17643,-27368,-6583,22869,22178,-360,-14849,-26751,8667,-26599,-29657,-27863,5168,2671,4778,-20095,-18018,17529,7917,11103,-1698,30854,12758,-10908,8913,-11180,8483,23501,2893,11647,13174,-21776,-9843,2980,-25318,19967,18583,29467,-14858,-16571,-21401,-21790,20022,-1401,-14426,-10289,-4487,-9369,15033,-20762,-3218,-12186,30086,7691,-20805,-17225,26190,-27131,-14879,860,-1615,-16480,-13606,-11017,-17686,21669,-27686,24270,8215,10639,-21689,7003,5,23580,24212,9146,-14108,-29305,-27602,-20369,19919,14076,22340,7507,-26018,-30028,15790,-10343,-25482,-27656,2073,-22698,-7944,-31936,-4284,26474,30963,-28255,2338,-10721,-9146,-26812,-1632,26185,3167,10641,12322,3623,-5948,6422,5534,-15110,-14994,-30769,3615,-8382,-4572,2274,17823,12342,906,29778,-22842,20438,-31499,-4473,-333,16751,13890,-24930,-9992,-15819,19233,20040,23825,19940,-3432,-31146,-6955,32661,14195,-21597,-28865,-11160,22931,-7020,-20895,-16868,-29031,-12988,8622,-17328,10954,21813,11999,25493,12419,-27173,27819,28261,23809,10160,-30779,13497,-12470,-32316,20184,-25198,-6407,-17462,-28440,5426,-25151,-28778,-6649,-6916,-28238,12921,-5092,32176,-6655,2053,15144,-24851,-5820,21206,11273,26180,-24176,-23653,-586,12778,-21910,29261,28033,15667,-6155,-2180,-27971,29016,-31809,-3994,-28021,-27106,-19763,10729,-14167,29974,12355,-31350,19501,-14426,-32747,-24966,-1508,23870,17870,16552,13127,-7763,5933,20494,9380,31767,11544,-4735,-19581,-28796,5408,-28613,14271,26164,27122,-7111,27303,31352,20045,-15589,-14719,-11279,24973,-6975,2507,23105,-26641,911,30794,3592,-19550,-22264,21651,28449,19665,-30393,3263,-6120,-7457,-25577,-22758,12008,-30261,27854,-2314,9696,-7934,5490,-2536,-25459,5467,12525,-16,32099,31063,-29923,29541,18785,4087,-5240,-9191,-1012,-30752,12747,-27302,-7821,1349,11914,17260,-8559,6057,-19831,-4751,5482,953,29465,-22064,-7317,-15046,12127,-16967,19880,24192,15243,17141,15776,11996,-8331,28716,3668,16441,-15272,30998,-6208,-26727,-2625,-32362,5387,19475,-16160,-9989,26498,-32504,-20430,11103,-22474,-1020,6991,15966,-5365,11557,43,-1583,-28650,-29284,3185,15110,-28791,-28646,-20642,-25976,-13932,8933,30915,-999,-30509,-25190,574,30158,1796,7570,-163,19696,4678,-5858,7319,-9237,-4987,-4722,29641,21695,-23385,-28917,14853,29968,34,1246,24962,-11912,-8503,-30604,-28921,5816,31327,-19438,-7117,16548,-1597,21171,-17253,27732,-25649,-27706,23846,-10271,3910,28686,-25300,-14907,17749,-29985,21629,25192,-27937,25716,-19539,-28681,-18519,-27056,-23960,30199,-5449,-20174,22530,14521,24689,21875,19311,31734,-8626,-31101,-2614,-1051,1529,-973,-30349,935,8148,-3154,-25110,-18075,14895,-2447,-807,-11775,28839,25733,-29393,11952,13580,5818,19327,15981,-26787,-16886,11035,6433,31774,-17834,-27229,-28809,883,-11266,-8689,-28412,-17088,-14019,-27547,24706,10862,9176,-19742,-20028,-8333,3194,-17763,16150,-23628,26693,-28661,20978,-28633,-5991,-16287,-15243,23920,20447,967,21463,-564,-24751,-26494,-26510,2512,6561,-8690,-6463,14549,-905,8168,13192,4253,3535,-17066,4028,-17408,1075,23037,8889,-14992,-31406,-24706,29289,25236,-9090,-9663,2308,17573,-24588,32042,-19587,-5868,9323,10764,6190,-12833,-3215,30772,29248,16608,-4788,-10208,25705,-27316,-30227,18924,-14563,3924,-25908,-5962,9420,-7153,9994,4650,31297,14816,-32340,-26296,2245,16909,12675,27922,11006,-13979,-878,-21413,16674,17365,8742,-31868,29426,-7528,-31619,14459,6047,-14806,14572,-1369,-7160,-7190,9225,-28405,-17373,18860,78,-25999,25301,14237,527,2972,-27252,-7999,22357,1146,8807,-11721,20128,-11663,32517,-19234,-14679,11686,27146,16630,9117,-6343,30626,20841,25532,-3972,23660,27016,-6925,-23593,25999,-30482,-28402,-20917,-22423,30183,14906,28573,9806,-19126,-25692,14730,19666,5034,3367,8148,-15708,31339,-15517,24207,-20470,20723,32321,29175,-12578,19636,19054,-12654,-15516,3570,29853,-8671,31709,19648,-25887,1914,-16614,-7368,2806,10036,-24589,-30750,28870,5636,21735,31979,8155,-23844,-3306,-15247,1452,-22479,-20626,-24752,-18089,4142,-30920,-24820,-4075,782,9923,896,-32264,4639,12329,-25284,26613,-22578,-21944,722,5810,-23558,10549,-10673,32194,-30868,-23161,-13167,-17281,11947,19256,-3623,-15941,-2115,26060,10327,21537,-24569,16287,-28259,11205,-26642,-25811,-15718,3262,-16986,2992,9352,-1105,5221,12645,14988,-26805,-20264,26928,-11657,-2416,-6273,-3742,9152,12299,-23807,-24478,-18437,9226,-11027,-27703,9617,31251,-21820,-10914,-11937,24607,-9048,-30253,32263,-30801,-4021,789,-22884,-15490,-25916,-15047,25524,-24627,-10197,-5691,24186,-9617,-16704,6502,-23076,20251,-21153,5651,2436,26906,7015,24157,-230,-2035,-3675,-32327,16255,-2798,31918,-30857,8962,19595,5771,-26548 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice21_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice21_input0_int16.csv new file mode 100644 index 0000000..2648c5d --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice21_input0_int16.csv @@ -0,0 +1 @@ +11387,20637,13896,17903,-18621,21102,6447,-30204,19296,17007,15264,16069,21205,-29947,26708,-24925,15499,32337,-32046,6440,-12936,18399,-3656,20330,-31601,25480,7915,-6454,6205,7635,-11938,20303,209,-20553,-5922,-386,-1408,15586,-12430,11255,-14652,10671,-3644,-7559,13205,16658,-5200,23125,2440,-25014,21589,-17922,4535,-5973,-7367,20666,20549,-11849,-24316,-14118,-799,31026,-31663,8757,-1330,19416,-21269,23415,23439,-14993,19088,8909,1246,-17360,23547,7798,15286,18246,-14528,-30048,3911,-10235,-19027,-25155,25639,-22998,-5803,-11693,4865,7675,-12080,14335,1338,11010,10646,-12148,10646,3730,384,16743,-17310,-10140,-29682,20252,29350,22601,32101,-19310,-13915,-17304,-20659,21864,-27478,-13146,27051,-18254,-8161,-637,30011,-15154,13153,-17040,-31359,19266,29379,-3426,288,-1698,-31974,12515,8479,13094,-18086,-17642,-9942,-5825,15567,-10388,32735,24635,-21591,-1682,30711,25608,14879,2036,-14547,11431,23710,1103,-29007,11268,-9966,-11964,-24434,-6654,24441,22279,15541,-4015,-23982,12410,-20368,-15844,27739,9351,9122,7898,2487,-6658,-11242,4520,21172,5912,26825,8370,17323,-4851,-15205,-9,-20915,5009,13239,10140,14556,16876,16085,-27834,30690,-24869,19939,27081,-24490,-217,-29127,18099,20269,-32248,15544,9981,-5865,5674,18932,2419,14373,-11641,-7402,-4666,5145,-23910,-27096,-13725,-25635,-25941,8628,14446,21340,13228,-29806,9259,6172,-26352,-3644,1804,14420,16591,14154,-6799,-6302,-29874,11786,11611,-21200,-13037,-25513,-19622,-15129,13617,-12317,-14566,22601,5551,-21810,27409,-19761,15624,19267,-19595,31385,-26735,-3553,-331,18878,12559,819,-16681,-10589,18350,-5512,363,-6955,-6535,7614,-23875,-941,4510,-11529,7770,-17605,1721,-31892,10453,-14004,-1475,19011,-24479,-16525,5182,-1133,-45,26049,15860,10714,22172,-30594,-24118,25054,-28769,8638,-8114,11679,-26089,-31427,22182,-31010,26104,24321,32229,31539,18108,8122,431,2527,-26763,16348,6173,23040,20774,-15857,-7509,24469,-25016,7605,17819,-20211,19094,-9095,-20332,13592,1200,14524,17919,-17034,18243,16775,15499,6519,3657,-4363,-11634,15591,-17147,31969,3020,-1927,31892,-13834,-23126,-30830,10618,-4576,4784,12260,-14708,13668,4349,-27980,16523,-10329,29292,31295,15162,-28257,31477,-19203,-6673,32698,27882,-16118,-32044,22114,-16093,17916,18391,-17245,-4505,9757,-21369,-14200,11671,-14700,32026,29715,6545,-9181,-11015,-24309,-7701,-9102,6483,18019,-11334,-29865,9391,20812,32730,843,3034,-21796,-19639,2253,-21350,-5771,-29358,-7876,-19228,29669,28858,-18063,15954,30234,-9723,-4915,29426,7074,-19496,-27933,-24244,11414,-1933,2303,2319,14284,-17999,-1296,-22470,-14557,17341,5658,10746,-15479,18789,236,19089,-7289,-12854,-3324,-9028,-25431,15083,15962,6640,17932,9076,466,26046,-11012,-5676,26374,3082,21027,-13380,19336,-13745,-17169,7464,-466,7720,31609,-5609,27387,-17418,11709,22579,25147,29214,1061,-23474,-15470,-993,-28346,-21928,-26052,4573,19700,27240,22952,-27925,10921,30448,25346,32313,-24633,-29417,4009,28960,-4201,-11294,3496,22939,9713,6418,29060,26460,8762,-28516,22369,29191,-24975,-26519,24528,-22897,8812,11322,-8245,-30233,-25816,-8896,-25019,-4320,26624,32112,-5617,-21812,15239,-14059,-6689,-17167,-1968,-11098,-30915,5023,-24444,-11325,-7452,31297,-22413,-11139,-13636,21662,23645,24729,21583,-10830,-10865,18867,-7080,22507,17922,9839,-27146,-13684,-6192,-15556,24796,-29922,-14378,20299,26289,23765,-28927,-27395,-29680,27464,28804,545,29777,-18509,13872,32415,-20748,-29163,22326,-18432,-6224,8532,-7754,27717,-17849,5833,-22921,-18737,-24314,14009,11085,-30982,1518,14774,-32135,27361,-29403,-27407,20139,-27238,17021,26294,15953,28344,-31060,-3006,-8665,16851,23549,14593,-1066,22458,-30047,-14935,5423,-16967,31202,-16115,25633,5326,-7897,-3173,20965,-32486,-22659,-28198,-20620,27692,-10348,17797,-19374,-6094,-22004,-15651,1960,18151,11305,-13226,-27786,-641,-31491,21933,25956,4216,-28447,2916,-20332,-5138,7765,-27351,8605,-31801,1809,15985,13686,17065,-27267,-28903,19798,-13075,-3130,-2342,-22059,13046,-29735,-19263,-1248,22428,-15358,-14617,11349,-29217,-1678,27669,14530,17956,-23604,-13790,-3219,-11729,6657,-29464,5700,-2669,-24223,-768,-11037,7815,18555,-447,6485,-30218,-27574,-14452,-30072,-26541,-12522,28651,20801,-20321,13879,27244,10072,6847,-21288,-30566,-7358,-17640,-23299,11678,-22845,3480,-643,4963,-25756,11388,-12006,-13331,-4811,-17834,-21666,-22356,10452,-21623,22270,-9490,-18674,8529,28073,8448,-22227,19135,1029,6588,18044,-31772,7180,22252,-457,-20155,17327,13468,-17701,14304,16839,-29930,31194,-29129,15295,3236,-12480,21162,-7426,13900,14331,8116,-15596,31724,28603,-16768,-801,26511,2798,3803,-8078,30513,-3950,-4756,23581,-24557,8903,-30862,-8042,10183,-30859,-601,-14716,5067,-31246,29559,27638,-32310,-21667,-27805,-23271,16834,28754,-29367,5087,-28022,-19863,-27770,3443,-14060,11435,-2538,-25526,25511,25597,30208,-23598,7673,13614,-8120,-19300,10821,-15005,-11198,-25062,-27148,-8517,-14207,-29421,17696,-29915,-12919,-27702,23940,-6498,3202,20811,-17903,-12282,20945,-6927,-15508,7947,5271,29702,-17748,-24729,11935,4301,14465,20314,28003,15842,-18964,29395,-12660,32307,3781,-4942,-18148,6964,-14101,5727,21199,-1752,-28567,30045,17260,-24705,2623,-3947,-26836,-28770,-22000,-28317,8819,-28132,19183,22289,26376,-6190,12193,14434,-12249,-18862,-19811,-14191,-6167,-27205,26553,-6946,-20571,-20110,25149,-2498,-29346,-2711,-30000,-28724,-13623,-22149,-30497,11603,-9097,-751,-14665,4435,-31769,15938,-31790,10943,-25942,11679,-24492,-22035,2971,16909,5909,16796,28735,-29744,-6705,22537,-5126,31095,121,20173,3606,13313,-22597,2542,5277,934,7091,-3144,13623,-8237,29070,-5962,-26187,-1126,-16206,28117,-21894,-6403,15091,-43,3373,16790,-16316,-11232,20527,20242,-7419,-7975,-8050,23877,25272,28415,8856,5974,209,-14692,13738,20040,-21482,24704,-8937,-1931,-11955,20092,-27553,25378,17142,19423,-30152,-8776,-23774,28129,27498,30778,19906,1328,-28395,-26032,20987,31199,-25962,-29354,-18228,-2160,-140,-27708,-6161,23404,19149,-3570,20155,-10123,29471,-25951,-20901,11097,-29272,-21085,15540,32003,-1930,-25068,-17881,29780,-13364,32039,-22855,26813,650,13884,7299,28766,-12195,-8789,-14568,-28734,-25887,-25113,-27613,-1966,-9309,29446,-30792,6591,19123,29497,-22634,19416,-27813,-1016,23328,-17856,24058,-12513,15953,209,24382,3323,-21980,-2608,-25813,1078,-6361,8725,-2181,-1202,32130,15844,25107,-14218,3953,-32343,9100,-1969,-32221,-2151,-6896,-618,-20879,30717,-23364,25216,-580,-26448,24056,8797,-11524,-13336,-19600,-25573,-16092,-19400,16356,14888,-8757,-32154,23805,-18008,12707,11738,11737,-31742,18893,13283,19219,24630,-4974,-14803,-25193,19389,16004,-30798,12701,13437,16414,-2424,27868,3014,-17295,-5802,-12464,-694,-25122,31610,8374,-25495,18527,640,15787,22741,-4242,7751,-24005,-17417,-32510,-1159,19511,27585,17513,23628,32560,11673,11309,29700,-11260,11648,10102,842,9979,-5178,-9471,21270,-7620,-23678,30199,31617,22807,-32426,-18656,-3096,5284,24315,14339,14510,-26552,15629,29360,-7650,21140,16335,-20287,16337,-26804,13904,-29066,22928,10337,-32295,-10808,2599,-24748,-21640,17473,-19198,-7398,4487,-21808,-6022,-23531,13422,-10465,1191,-1031,9043,-25050,-14836,-12891,25669,7325,32065,-11925,-14411,-8743,14407,12436,-20691,-26807,16766,21216,26845,-2480,14690,-19751,-1986,6496,16632,16988,-25123,1295,-20106,-2071,31084,22,3150,-20705,699,21231,-29547,-8141,-25077,-26610,24554,25147,8640,-14862,-19825,2972,-12947,243,-5430,-25698,-12590,-19881,-2941,15844,14819,-21036,-16671,8507,-22971,18961,-28974,-16875,-6045,32160,2894,31580,27164,23891,-13556,18079,-17392,2518,-4235,1970,3360,16873,10130,-22832,-29010,4487,-30760,-22944,-2153,7985,6587,-8324,1798,-2044,-30657,-14688,-10512,27930,16518,-6102,-2722,12767,-27929,-14303,-24356,-9561,-17913,21720,-6688,8961,-29469,24349,193,19365,4741,-7605,-634,20860,-1198,-20718,-19062,4178,-8784,-18635,-21748,16510,-30245,-2822,26178,32288,-28218,5975,-2046,8510,-15320,-8493,-11565,-20336,-18763,-24769,-6351,2763,8046,1586,19777,-11261,-29305,-10264,10930,19343,-7632,-12160,-17348,-16647,-29641,15757,-7597,17864,22121,-9331,30008,-18163,-1628,7191,19741,3052,-3066,11672,-23867,6564,6074,950,4178,-10810,14765,16387,-11808,-19842,30350,2293,-25044,11579,5951,29603,2139,23768,963,23225,14697,-6487,19201,-14318,-31756,-26259,-31207,12475,-4166,13165,6123,-19924,-30201,-2770,21460,16220,-30668,13554,18881,-13606,23781,-5311,-13977,18797,-17872,-22363,17643,24851,16368,-2364,28466,-9795,10596,-23660,27438,-2364,-17676,6564,2722,32135,-18120,6814,24503,28876,13404,-10207,-12851,30029,26696,23190,6114,-2171,-8098,-2573,-299,14870,99,21009,30194,-28846,14294,-1935,-9938,25292,18962,-15410,-15704,6783,11252,-27789,17192,25774,-30314,31317,1892,14741,-31655,27305,9641,90,-2710,-23327,3533,-25703,13901,-25231,-21328,12120,31975,-29573,-3281,31952,28133,23976,8373,3844,27812,29647,-20755,-23179,17862,-30842,-3948,-6929,-20669,-2496,-17188,29081,10685,26623,-23936,32756,1248,13544,-19351,4120,-17806,-21074,-5569,-8124,-10196,26432,17885,25561,-6893,21883,-7685,16785,-28462,9838,-23108,8068,16756,27682,15498,-19730,-17982,-8149,-25918,19361,8073,-95,13662,6875,-10368,-17307,-27132,18620,-21233,6186,12010,-6359,-26479,-6777,12465,-5132,19819,-24164,-17900,-17434,14186,-1235,-26835,24110,-28045,-11387,14350,-23814,-30178,14794,10286,13284,2811,-20524,-26726,29244,16353,-24294,30270,26271,-21022,-27709,-28989,-22302,25878,-25004,-516,11773,-6535,28790,771,-3463,23120,-26790,23780,-5701,-32656,-15824,6171,9618,1900,-25794,15986,-14710,-32690,-9799,2514,12671,-32051,18625,565,18711,-2954,23488,-29923,25342,-5884,21403,2203,-10221,-18607,29513,-30764,3964,31847,4189,30040,17711,28312,-5579,10931,-15151,15365,27619,-5464,19662,2246,20838,9450,17400,-10641,-8904,-10177,-11929,13719,22551,27146,12997,17456,-25943,25318,11788,4056,-1531,-3221,-30127,-19405,26982,-3438,-29556,24904,-25396,-13119,30,20092,20221,-24562,-22967,-4256,-22170,25408,12330,-23480,-30221,25176,7169,-21736,-25716,9791,-12908,-23908,-11685,-741,-16566,29498,5217,9371,22729,-22991,24969,-30519,7955,16238,-31815,-22601,23579,-23216,30246,-3090,-25500,-21558,-25645,29138,28914,13826,-28437,-18570,22421,-12672,18559,1537,-14116,-10351,31111,-982,-28411,-1441,14310,-325,-21634,15932,-6110,10210,7865,15849,-28164,-11043,1219,29369,-15763,-20800,14820,21348,-6333,14321,-11345,-12687,-30539,-27258,16963,26540,10077,22399,-6479,-512,15578,31685,-31049,-225,-2234,28178,-18457,21656,10927,14889,6072,-19048,-26411,26097,26007,-1026,-31484,30676,-1835,20212,19954,-19853,-24221,13208,23905,-11315,25329,14868,5112,-21953,2870,-8558,29700,-29522,-7888,24084,-31925,-20230,28148,-9863,3026,-13760,5749,28978,-18504,-13974,9045,11281,-17368,-6167,-12117,-25379,-26606,6275,-25055,28663,-13654,-16315,-1016,10850,-22374,23635,22237,7413,14902,-14156,-10924,4409,10508,-20320,5126,-22094,30841,-23935,-12595,17687,15597,-10399,-12990,28924,10539,1552,31668,10245,-2803,21395,1199,21585,15680,7476,20646,23475,6243,4511,4706,23202,-31248,24405,17473,-18569,4552,3747,-10731,7140,11804,-12824,15990,-1672,7312,-129,-76,-15833,8891,15590,-5207,5452,20983,18371,-28519,19914,25479,9375,26693,-1019,-23963,-16734,16686,-23473,-9989,11204,-23400,-28318,30834,18703,-12359,-19007,29939,23526,-25515,-3275,15340,-15794,17879,23383,-3697,1915,22218,16951,31028,28625,16408,-4607,-31082,27109,31440,2706,-4212,25687,19787,15321,15252,27723,5114,-24807,-6234,-17896,25988,-25776,-13170,6064,22616,8629,29986,7755,13773,9897,20347,6311,-20069,23468,-30981,-10980,-12952,-14875,-18103,12958,2398,-4001,2075,20213,3305,-13258,24918,944,253,-7485,-1310,20008,-6321,11566,-24313,-23302,30849,-8281,5559,-301,-25812,-3995,-21336,-31730,-11075,-29818,-32199,11364,-7000,-30189,-22949,15547,-24828,15142,9880,16488,7999,16457,20274,-31774,-31042,27567,-26047,-27416,29336,21026,-4687,-25515,-26901,-11685,-3080,22243,-5381,-2662,15480,23234,-8264,-13683,-12683,-3961,23756,-19966,-272,16612,-22457,24411,27346,23777,-25543,-7564,4247,5566,4752,-28809,5555,-26688,29501,23512,6491,10216,-31067,-961,-29437,-520,18223,-23736,7244,-22754,-18988,30844,-15247,8408,28702,-3235,11333,29228,2306,30348,-25902,10929,7818,-29164,13106,-6280,-24707,12821,-31082,27713,-8347,-32547,3729,-27284,-7886,27320,-9775,26191,-7391,-20240,31303,-13254,21557,24451,11813,-28929,-27764,24662,-30667,-10790,4916,-30726,4044,32005,2459,-28823,32468,4436,-15817,-4134,26482,-8753,-4959,12997,-7074,-20602,12542,-5407,-13641,2558,8690,23289,31696,-28817,27588,-6859,-24381,22957,13970,-19869,-25403,-12824,-13567,-15972,28180,11811,24898,-558,653,-27512,-18900,4159,-27670,-4969,-27284,31736,9231,27644,-31061,-12375,10940,5894,24920,-27778,-11219,29204,31242,19397,-5247,27073,-17825,11350,-142,-27011,-8022,3128,25097,26570,6282,24011,-6858,8307,13150,22028,-3588,-29354,8335,-3484,15764,-16154,-508,15679,-29509,-8236,-5570,-31971,-32600,-16436,-27345,25790,-3611,32275,-11444,-15344,20115,25946,10866,8589,-29391,32156,-22873,19388,-24509,-30760,24182,-32016,153,-26879,-31594,6183,-17692,-32045,-32744,2273,13250,-24090,6881,-14498,-2780,7223,9562,-28894,24812,-9234,-30545,19383,-7993,-30149,-28505,-7134,-20865,20368,-7607,15515,26464,-4844,-27660,9487,-7589,-7591,-31576,-31163,-6752,-25099,9819,11077,8121,5030,25911,-31052,16903,-12281,2140,-31723,11697,-28832,7813,27981,-8442,-2083,-24125,-8274,-29736,-24202,13469,-22425,22474,28962,24142,-27155,-1557,-19422,-9632,1823,-7991,-18395,-5465,-4108,10172,24677,4868,-2089,-7204,19974,-7236,26610,-28254,31182,-19785,-20260,-21103,24447,-29657,-32665,22331,-9893,-4521,-25041,-23214,-9752,-31829,25525,-5401,16780,-27341,-5123,17669,1944,-22753,-8229,2854,6954,-14265,17917,-29969,5560,-25190,14151,3428,2757,14228,-24015,28557,2758,2151,2876,18515,-30474,-9552,30599,6412,-24046,10463,-9523,-17869,-2879,2118,-3835,17001,19426,2956,31492,1975,-25455,1520,13327,-12168,-10001,-6377,6669,2698,-18240,-13602,-29066,20126,-24607,11188,-6877,-8642,8501,-3645,-26291,1811,-12215,18135,-15695,-25316,-12366,-8132,13717,-23070,-10042,8196,-18286,-520,-8441,-14800,-7650,18210,-3354,-27908,28182,-24970,17304,-19774,-31471,6865,13504,16705,-17602,-12744,-18062,24681,1802,-4408,-12246,19606,-5123,-16549,21155,4538,-10410,4288,24041,-3099,-9524,-28588,7935,-11643,-8378,-16334,10529,-5676,-7712,25144,-21331,-1399,27332,-30596,-17754,-19501,6444,16187,-32074,30782,9186,6944,-19366,25336,9644,3568,3714,-20430,-13649,1696,6313,1813,-19776,12390,-26005,22255,270,22150,12317,32358,5842,22457,21579,10585,-13957,20646,27953,-429,-7508,-12881,-22810,-8306,-3233,-19784,24329,27550,30404,12610,28688,10110,31872,-22495,-10923,11093,13066,-19816,17046,8286,-30931,6025,275,11309,-8355,20575,-8226,-18648,26514,-19314,-25961,-13369,-26377,-29944,23076,9685,-15380,-15497,-29629,31169,-27509,338,30920,11957,25297,20646,31813,2362,-26432,12261,-10353,19028,7578,-8200,18253,-20901,-15854,-23957,5814,12146,-599,-13544,9876,2162,22049,-7876,31122,9606,24449,-28101,31336,-32046,-17529,-6661,-18209,13009,-20642,-18374,-16762,23526,7576,-28611,-21951,-2256,12203,1085,26467,-10343,-4052,-32100,29819,3421,26593,20881,12676,17217,-21190,23626,20909,4445,-8547,17264,12833,-25810,-12301,-11632,-15281,-24221,-19682,16707,27184,6234,-8353,26217,-20214,-27823,20236,-25149,17731,-29012,19882,-19504,-24197,6455,30678,20532,23228,30289,13628,-359,-13726,15226,-9914,-15045,-28118,1631,19702,-27937,-6387,-8964,9186,6796,31806,20096,-21258,-23054,31513,25085,-13161,32218,-26228,-16667,-20116,693,-28764,18464,19467,-31464,-20051,18035,23506,-23526,22680,-4358,18305,-31407,31488,3029,6463,-7294,-5420,-14008,-6760,15645,28223,24742,5632,-18704,1001,-13539,15259,8392,-8025,25875,63,-31387,-23019,-13910,-4723,17388,-27212,17486,-22407,6712,10148,8674,30535,-25050,-3057,15933,-30778,23908,6997,-7001,-30958,2927,6389,28025,-9903,29872,-12073,-2610,13562,-11431,-8221,-26698,32104,-31664,-1207,-21441,28397,-29361,15247,-21885,-7344,-23274,-27245,-28526,-11653,-23506,12079,19639,9235,-26949,-11139,-14724,-3010,-16298,8131,28691,24418,-27862,29594,21790,20070,27632,17508,-13913,-23942,-29596,13856,-24735,2480,31305,26412,-26564,19606,-11484,15614,15839,-14926,-7418,-31656,24962,3940,-21652,-32041,-16632,-9076,18414,29296,7809,15427,-11421,-22442,8438,-30402,28096,81,-30335,-11110,-18489,25403,15763,-27607,7172,-27605,-31103,21955,-3552,19773,21720,-11050,15848,-9996,-3642,21869,23955,-6720,-25041,18317,-31407,-15899,7668,14304,15238,4384,17271,-16796,-27665,-16768,-10869,-14557,-11976,-13409,6593,-28030,13348,-30932,16360,-28447,25326,7207,6438,-8724,2782,-18444,23627,-24859,-4073,-26642,-4346,29227,-26791,28410,4230,2613,1994,10387,18354,1597,-9565,17100,-10441,9402,32642,-8188,21701,13089,-9515,-23745,18518,19877,-30670,-15575,19483,-6983,-13659,29834,-766,-4777,29814,6126,-9431,4030,-17001,16456,17189,-17446,-7937,7796,-9008,-27301,12545,-30860,-18809,30284,-23347,22737,3118,6366,-31635,4151,-2568,-14747,-5258,-20264,27441,27090,9616,-32507,-22321,22524,-31828,20499,-27189,11948,1255,-22411,31610,12675,19547,1493,9011,-15103,-25651,10695,3392,27511,-14452,-2106,10878,21751,-14950,11360,5151,20121,23947,-15307,-24423,-9430,-12606,8244,2963,16372,-22771,-10028,-25647,-32588,-5262,-2889,-14014,-8866,16281,-13143,16127,8183,-27186,2661,-8526,-12404,6734,17935,2908,-8722,-9911,6513,27192,9145,13270,16259,-11647,29281,-1014,4380,5993,-27814,-15768,4705,-2400,28656,-6534,30831,9186,-8359,-5547,18253,27338,-26564,-16700,16602,5092,-13692,26904,13238,-19582,-27078,-29115,-22769,28951,-14112,129,27317,-15871,-29268,-7966,-1352,-23391,22624,-4833,-10619,-6138,-3579,21179,-1930,-29360,20370,-5039,-14277,17290,31137,-29514,-14317,6106,1021,-28116,20337,-18149,-8786,8039,28011,25180,10546,-5676,-24327,-31382,-2341,-18184,31948,-28181,22707,-3979,-13426,11681,-27286,-6720,7028,-26315,4717,-30600,17457,8208,29824,-18378,-13364,-23277,32630,-29931,13613,28531,-12861,27455,29779,32092,-11188,-14647,-30720,-12812,-24061,7817,-6673,30288,-29446,-16916,-16726,-16444,-3302,-9381,9726,-13650,-3869,-22484,-25744,21402,-29963,-485,-15006,31440,9547,-27038,285,15385,28255,11223,27529,4495,-22979,-24658,-2163,-28653,20700,-25049,3595,4571,-27919,-22330,14577,3202,19010,19031,20382,-30717,212,28336,-13586,10726,-11758,17648,20418,1814,1592,5897,1629,-24718,-24699,-11260,15774,-17524,-24314,4429,11359,-12178,17288,4100,27076,17131,10303,15467,5585,15883,-18771,-18472,29733,31313,-6574,23048,14049,-3862,-23243,-6133,-11799,-26432,-18444,-22913,13707,14130,-16619,25172,17572,-27775,-10288,27728,-16804,6235,-10590,-27946,24530,-27321,31710,23113,-31918,-15061,-654,7816,24065,-13791,7556,-14718,21405,23101,-18288,-17101,21641,28511,-12845,-32374,-12362,7244,-298,-11174,-14521,-19269,-8968,-8201,-23871,-30361,7873,29463,-23121,7651,-5708,9572,27771,-30001,5370,-28151,-4028,31355,6005,-13219,-21459,8494,-3462,-28946,-10517,18029,-3267,-19125,5271,7236,23955,16449,3955,17787,-13131,23747,-27662,-31072,-2636,8383,13718,8154,6163,12174,20537,-11824,17404,-18954,-31798,-20114,29827,13103,20938,-14180,-13565,-29957,-28695,-15848,31557,-11982,-13164,32,3048,-10043,11568,-10326,-13958,-16481,-17330,-23581,-13399,23779,-21812,-22824,-12904,-2919,-10741,-10887,-12104,13755,-14918,1810,-31998,1650,-6362,-9711,7126,7609,-8629,15117,-11690,-12027,32137,15412,11841,-23749,-27069,-19410,28849,27110,-29397,16711,-10680,28480,22301,9166,25384,27850,-24087,563,5654,15477,12620,31014,28844,-14567,1737,30030,30359,-22697,3224,24139,9849,20615,306,-28240,-31962,-32436,-30783,585,17030,13955,10721,21469,19398,32527,5447,-11294,8867,-30325,-30246,-367,-30504,22676,-1930,-26439,8474,6244,-9513,-29665,-19765,25173,25499,9179,18174,-31341,12329,7313,31841,-17861,-18887,23778,11801,21238,24747,27965,9521,20948,22441,27505,16430,31220,12640,28513,-30447,20538,-22382,30698,-2404,9362,15583,-26979,32002,-13227,-29327,3409,-29596,15980,24427,1788,20415,20600,-3024,16326,-4319,-16187,-26114,12584,27308,-17337,-28774,-9277,-31816,-29510,-2897,-28159,-28532,-23017,1220,9073,-7413,-9339,-4668,-2476,4603,6431,9603,14075,9593,-13151,13417,31015,-20235,5370,-5195,17336,-12617,29819,-4521,-24042,-25798,12711,-4466,-2015,-29896,-5472,-29606,-11164,-26408,21293,-1760,-30145,7208,3713,390,4795,12359,-387,-13299,-21208,-14625,20660,-32208,-17547,18886,-799,-5813,-31980,20442,-25129,23442,-3323,-6753,-13741,30833,-1174,-1318,-16125,20389,15921,-15275,-30564,226,17176,-2993,28044,-20525,-30417,-23411,-3694,9428,4903,-30006,-31289,17662,-25377,-15208,-4520,5592,17643,-27368,-6583,22869,22178,-360,-14849,-26751,8667,-26599,-29657,-27863,5168,2671,4778,-20095,-18018,17529,7917,11103,-1698,30854,12758,-10908,8913,-11180,8483,23501,2893,11647,13174,-21776,-9843,2980,-25318,19967,18583,29467,-14858,-16571,-21401,-21790,20022,-1401,-14426,-10289,-4487,-9369,15033,-20762,-3218,-12186,30086,7691,-20805,-17225,26190,-27131,-14879,860,-1615,-16480,-13606,-11017,-17686,21669,-27686,24270,8215,10639,-21689,7003,5,23580,24212,9146,-14108,-29305,-27602,-20369,19919,14076,22340,7507,-26018,-30028,15790,-10343,-25482,-27656,2073,-22698,-7944,-31936,-4284,26474,30963,-28255,2338,-10721,-9146,-26812,-1632,26185,3167,10641,12322,3623,-5948,6422,5534,-15110,-14994,-30769,3615,-8382,-4572,2274,17823,12342,906,29778,-22842,20438,-31499,-4473,-333,16751,13890,-24930,-9992,-15819,19233,20040,23825,19940,-3432,-31146,-6955,32661,14195,-21597,-28865,-11160,22931,-7020,-20895,-16868,-29031,-12988,8622,-17328,10954,21813,11999,25493,12419,-27173,27819,28261,23809,10160,-30779,13497,-12470,-32316,20184,-25198,-6407,-17462,-28440,5426,-25151,-28778,-6649,-6916,-28238,12921,-5092,32176,-6655,2053,15144,-24851,-5820,21206,11273,26180,-24176,-23653,-586,12778,-21910,29261,28033,15667,-6155,-2180,-27971,29016,-31809,-3994,-28021,-27106,-19763,10729,-14167,29974,12355,-31350,19501,-14426,-32747,-24966,-1508,23870,17870,16552,13127,-7763,5933,20494,9380,31767,11544,-4735,-19581,-28796,5408,-28613,14271,26164,27122,-7111,27303,31352,20045,-15589,-14719,-11279,24973,-6975,2507,23105,-26641,911,30794,3592,-19550,-22264,21651,28449,19665,-30393,3263,-6120,-7457,-25577,-22758,12008,-30261,27854,-2314,9696,-7934,5490,-2536,-25459,5467,12525,-16,32099,31063,-29923,29541,18785,4087,-5240,-9191,-1012,-30752,12747,-27302,-7821,1349,11914,17260,-8559,6057,-19831,-4751,5482,953,29465,-22064,-7317,-15046,12127,-16967,19880,24192,15243,17141,15776,11996,-8331,28716,3668,16441,-15272,30998,-6208,-26727,-2625,-32362,5387,19475,-16160,-9989,26498,-32504,-20430,11103,-22474,-1020,6991,15966,-5365,11557,43,-1583,-28650,-29284,3185,15110,-28791,-28646,-20642,-25976,-13932,8933,30915,-999,-30509,-25190,574,30158,1796,7570,-163,19696,4678,-5858,7319,-9237,-4987,-4722,29641,21695,-23385,-28917,14853,29968,34,1246,24962,-11912,-8503,-30604,-28921,5816,31327,-19438,-7117,16548,-1597,21171,-17253,27732,-25649,-27706,23846,-10271,3910,28686,-25300,-14907,17749,-29985,21629,25192,-27937,25716,-19539,-28681,-18519,-27056,-23960,30199,-5449,-20174,22530,14521,24689,21875,19311,31734,-8626,-31101,-2614,-1051,1529,-973,-30349,935,8148,-3154,-25110,-18075,14895,-2447,-807,-11775,28839,25733,-29393,11952,13580,5818,19327,15981,-26787,-16886,11035,6433,31774,-17834,-27229,-28809,883,-11266,-8689,-28412,-17088,-14019,-27547,24706,10862,9176,-19742,-20028,-8333,3194,-17763,16150,-23628,26693,-28661,20978,-28633,-5991,-16287,-15243,23920,20447,967,21463,-564,-24751,-26494,-26510,2512,6561,-8690,-6463,14549,-905,8168,13192,4253,3535,-17066,4028,-17408,1075,23037,8889,-14992,-31406,-24706,29289,25236,-9090,-9663,2308,17573,-24588,32042,-19587,-5868,9323,10764,6190,-12833,-3215,30772,29248,16608,-4788,-10208,25705,-27316,-30227,18924,-14563,3924,-25908,-5962,9420,-7153,9994,4650,31297,14816,-32340,-26296,2245,16909,12675,27922,11006,-13979,-878,-21413,16674,17365,8742,-31868,29426,-7528,-31619,14459,6047,-14806,14572,-1369,-7160,-7190,9225,-28405,-17373,18860,78,-25999,25301,14237,527,2972,-27252,-7999,22357,1146,8807,-11721,20128,-11663,32517,-19234,-14679,11686,27146,16630,9117,-6343,30626,20841,25532,-3972,23660,27016,-6925,-23593,25999,-30482,-28402,-20917,-22423,30183,14906,28573,9806,-19126,-25692,14730,19666,5034,3367,8148,-15708,31339,-15517,24207,-20470,20723,32321,29175,-12578,19636,19054,-12654,-15516,3570,29853,-8671,31709,19648,-25887,1914,-16614,-7368,2806,10036,-24589,-30750,28870,5636,21735,31979,8155,-23844,-3306,-15247,1452,-22479,-20626,-24752,-18089,4142,-30920,-24820,-4075,782,9923,896,-32264,4639,12329,-25284,26613,-22578,-21944,722,5810,-23558,10549,-10673,32194,-30868,-23161,-13167,-17281,11947,19256,-3623,-15941,-2115,26060,10327,21537,-24569,16287,-28259,11205,-26642,-25811,-15718,3262,-16986,2992,9352,-1105,5221,12645,14988,-26805,-20264,26928,-11657,-2416,-6273,-3742,9152,12299,-23807,-24478,-18437,9226,-11027,-27703,9617,31251,-21820,-10914,-11937,24607,-9048,-30253,32263,-30801,-4021,789,-22884,-15490,-25916,-15047,25524,-24627,-10197,-5691,24186,-9617,-16704,6502,-23076,20251,-21153,5651,2436,26906,7015,24157,-230,-2035,-3675,-32327,16255,-2798,31918,-30857,8962,19595,5771,-26548 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice22.tflite b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice22.tflite new file mode 100644 index 0000000..8a12507 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice22.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice22_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice22_golden_int16.csv new file mode 100644 index 0000000..993472e --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice22_golden_int16.csv @@ -0,0 +1 @@ +-8022,25213,-1802,-16772,-6541,9091,21831,-383,-12293,28850,28667,23346,-23117,17705,26327,10310,12353,-17825,-8040,-31104,-2701,-7989,-2853,-1465,28807,-31500,-9256,13876,-26087,3127,20025,1507,-15532,-27986,-27810,3244,-29711,-11533,-27418,1751,-11534,-25160,23306,21624,17219,30422,-340,-5564,-26375,15611,-19271,-18918,8331,-13326,-17305,4273,2747,-2475,-28820,2384,-1604,-15425,11581,1723,-13090,-19056,-17484,-15456,22639,-4079,11666,8367,18670,27992,22577,2553,-3190,15463,-28503,-7742,-13049,-31749,-24854,18981,7414,18858,-1434,-12754,21732,-20395,9892,-3916,-26515,-6003,-9970,16383,10295,17036,981,8553,-28392,30820,22113,-26623,-24042,-9013,-11794,-14750,-25847,7345,18995,18346,-7112,-28717,-18912,-15615,-24462,-811,22904,-6258,17863,-1192,-11119,24450,-3622,-12402,2770,21355,28112,-8917,-2406,-2271,14605,25597,-13477,-31157,21280,1369,-5720,-6365,-1074,18899,-10426,-6806,17086,24422,-25156,27640,-24723,13638,-15843,4250,9355,-23361,-443,-3679,-28697,-16232,-22479,-4518,9107,24392,12454,5257,4976,26675,29089,-31629,24391,18501,-4442,5162,1100,7599,-7065,20,28647,10661,-19145,19251,-188,-30616,-4724,19544,11074,31581,-28599,11408,8456,-1242,-9726,-28234,14903,-13149,6218,-24278,24973,-22821,-11814,6863,8890,-5100,-15995,-30692,-7032,7359,-7652,-29268,-2942,27289,-3967,-18826,-23711,32574,24606,12710,30148,943,-10449,25411,9512,-12221,-1522,13166,-18412,-20223,1895,-3402,7554,-24340,-19078,30798,7590,31632,5124,22770,6069,-15211,18003,-13506,9746,18439,-14216,6633,15622,-9136,24686,8375,27506,8992,-12880,20073,13424,-9567,26791,-27842,15653,14532,-18178,1472,-2669,-19902,5152,30815,-29262,-26513,11580,-30128,-8206,25166,-23453,-25598,5808,13916,-11532,151,30854,-16791,-15501,-17494,-25228,12664,-3604,14489,16832,1125,-12368,30987,-20089,6618,-29236,9802,22514,-26443,-19514,-15534,-18251,8365,32256,453,-18063,-19316,-7543,-3939,23134,-29302,-26816,27299,-12346,-14096,-3900,-13719,-13733,-7188,-30955,10540,-13644,15815,-22362,-26915,-26055,13311,9486,25986,-28200,-8182,19464,713,-29712,11750,5319,25880,-19391,28322,3860,9993,-3311,-19456,25387,15462,-3896,18605,12102,13524,-2592,30019,13243,9942,14601,7049,19442,-15690,-6467,-20338,-18116,12789,-7148,-18981,-1426,808,-21537,5898,-7920,-1509,10702,17240,-24826,32283,-3442,14877,-26651,12177,-1875,-28164,-18783,24410,21113,14003,-29824,-30540,26555,4803,-10836,16845,6348,-8922,-4927,17799,-6309,-29318,-21043,-16051,19074,17802,31556,-14404,-31762,-17438,-32603,4532,24570,30382,-18662,8173,-32166,-3252,-15601,347,7546,23477,-20337,-23515,-10471,-30345,290,28988,1831,9700,4142,11633,169,29580,26487,-29472,-20171,13836,16725,9707,-18403,-6336,28346,-5838,-32033,413,-16978,20405,1139,-28911,30477,-2424,8316,8519,13990,-27523,-5279,553,24780,-17932,-9261,-892,-20073,-23940,-17102,-628,-10774,12806,22886,-14546,21978,-30858,6861,6718,32067,-22792,-26079,-12645,3873,-27720,-18331,2419,32316,-10570,6977,-8264,-26291,22706,3110,29862,-9755,-26098,24972,-26541,-26451,12944,22084,-25871,-30289,-757,1747,-18970,26485,-21550,16420,4139,-4453,-21503,-17708,-2187,9270,30902,-23434,14643,27297,24158,-2875,31195,-17750,23211,30677,-31051,-6974,18271,-27473,5466,12418,31529,-27066,-22790,-32736,8100,1240,15254,-18428,20105,-20214,-8945,25081,-18559,15058,15208,16433,5814,-18848,-9311,-28214,-12340,-6964,22906,-30982,3119,29049,5563,395,-29580,24929,14972,10934,-14822,-16634,-17574,-13176,31456,7873,-31677,-6439,-1428,-13178,-555,4801,25850,10731,-8817,-14974,14808,7696,23304,14029,-8006,-1271,-31744,24419,-20986,3946,-1441,11869,9379,13942,26441,249,15907,10749,19151,22058,30174,-9944,-5454,-14482,15582,-18696,27769,23812,6339,-9371,-4915,-29209,-28763,-7505,4846,-12,20192,-4026,-9535,5946,-15069,17363,29918,17150,-19214,-3540,-9676,8899,7936,-9771,-24533,-16468,-3888,-32031,-11002,16143,4953,4444,9384,-233,27018,-8069,-18533,-8783,-31601,-12678,-17390,1701,8787,26842,-10014,17105,-5435,-28726,26236,13100,-1439,-16806,7231,-4760,-20681,-25505,-5827,-26779,-17933,-12284,23049,-25316,-25111,23140,3150,13996,29577,-10685,-730,-25975,7158,-30028,22216,18360,-20166,1621,-8028,-17277,-21149,-19839,14361,11681,6981,29803,13462,4933,14932,-19554,-1241,26157,-12829,-15890,-26296,-6059,-28720,-15000,-19813,16093,19207,-24146,-22031,-20297,-3592,20131,26386,16014,11412,30102,-9974,-7413,11259,-22835,-29027,-6807,-12510,-16569,-742,-18441,-15132,2863,31935,-28974,17174,7475,-266,13918,4400,214,24444,6505,-31812,15023,-22326,20976,-19319,-25922,12859,-13924,24683,-1222,-11959,27464,24778,13251,-10558,-830,20275,22235,-30364,-13464,-8687,21280,-25281,30436,16754,-8897,-28988,4301,-9189,-29472,20976,15097,-26245,14666,21904,-32762,10588,-23386,-30249,15298,-26493,-3424,-30846,-14869,32340,-7272,-26882,-4401,-21761,-2453,2367,10635,-28276,28851,27788,-13671,-27594,-1972,-17663,-29339,-31007,-13738,25774,4049,26542,-32657,7847,-2240,-27193,-32656,-24034,7304,3986,-26906,-4615,-29035,3063,-5012,31360,91,-18496,12599,-30559,-31809,20580,-10578,-26709,-22414,12445,-6511,-66,-4663,-5826,-1183,26156,6059,1175,19864,9387,2843,22071,11533,-12917,13089,26118,13588,-2915,24653,32308,-95,-15639,-4054,32121,-27738,22196,-3495,-14450,16927,-16571,-18018,7511,-25631,16124,12627,-19967,2154,-8872,-10836,18047,-25445,-18817,-1594,29571,20071,1625,1847,-31589,-25389,5172,8076,-9987,-13323,7258,5539,-31170,23853,-30710,-985,-3419,6434,11909,-3932,-15514,-11511,152,-4143,-22786,25849,14574,27302,9630,10118,14264,-7031,-25236,18646,25114,22460,-23876,-1197,-13089,-20183,14270,8359,5871,-21663,-12912,17661,14485,-13520,21012,21105,-29425,-3531,-6679,16986,-23279,3104,13980,-1362,-2511,-31237,-4988,21250,-21419,10622,-16065,-27771,-9003,31578,-20690,16163,4849,-14001,-7016,-15983,10672,-17790,19897,-26622,5002,-32392,-26692,8474,21561,-23883,-22738,4671,-11906,-10797,10141,-7519,24298,16204,-1920,-27448,-15716,-19312,7142,-7027,6611,-5210,10125,2718,-21454,26819,17178,2510,-22191,5463,-5670,-21670,-23929,-12478,-3605,-14181,30183,32097,26734,31220,22449,3751,8524,29488,11773,-28107,-28848,18136,-29564,5446,-15444,17623,18221,-27736,8030,-21379,-303,-17874,-32348,-12413,-11669,1342,10205,26697,5418,-23253,-1597,-15933,10355,-26798,24869,32714,20518,-3818,20513,-30224,-4084,-3716,7091,-31165,21837,-4829,-219,4970,-14587,-11304,-21460,15468,-13004,2664,32293,-31500,-1166,26638,17313,21261,8687,-2330,14684,10642,-5816,-9576,-4821,-14088,-7846,-11965,-28527,30128,20625,10947,-3455,-19443,-16607,-32186,11177,-1828,8347,13535,17954,-15368,21904,26960,18624,22341,21283,-10585,-23239,-17842,-3720,-11226,4916,-6195,8164,-11033,-21996,-10098,-13071,-16685,-6130,28829,-22668,27919,21591,14708,253,-3474,-5824,12616,-7822,-3361,-22522,-26849,-14344,4125,-28442,-20895,43,-21653,-16551,-14925,28397,-32047,1475,-20944,20776,-15515,-21371,26612,29470,26680,14950,-29054,13718,13958,9830,-3459,-32651,14827,-5485,-23209,-4671,24783,10605,25916,-12399,31948,-17533,-14223,-26191,-12488,8674,-2407,8305,1245,12868,-12707,5412,2913,-5093,-21976,11418,-1044,-21262,28774,31230,-30653,26570,-29455,-18281,9830,-26199,-13664,3092,-21118,19316,-27543,-11047,18742,-30533,-16116,15287,236,20807,-26129,-12592,-19875,-7135,-351,-11960,2175,4515,9319,5052,-16035,21609,-11767,-4687,-7518,-2850,-16332,-14814,-21982,6419,-12730,8385,-2525,-30685,20591,24729,-676,23513,26547,-3738,4354,-4500,-29775,-22697,-22852,-21632,-7071,-21356,-13805,25541,24971,29172,-844,-280,-14481,5450,9797,31480,-24662,-10566,773,-12978,29152,-9926,4974,10902,-10345,19333,11079,-21109,22801,25079,-2982,13293,9365,28346,856,6769,-16861,27358,1840,15506,960,6289,-10708,-14139,-19293,31566,3030,-31285,-26691,4422,-7050,-9669,-4449,16898,15233,21325,-18816,-12795,26713,763,25162,17832,-7908,25183,-11476,29377,-32380,5664,6656,31044,18999,-11251,-23429,-13798,32380,27667,9797,746,-8088,6865,-16702,22370,-15466,-13484,-21454,31846,18378,23701,-13286,9919,5324,6528,18425,-62,21187,-27841,-25077,18400,-175,28564,9196,-29754,11512,-15918,-30319,-8059,-3997,10366,-30327,-24464,-27509,32511,31994,16859,16464,28252,31179,1141,-14672,-31829,-25487,-28883,-25784,21312,3060,-5993,9609,-27377,-23239,26037,-11722,17829,-28021,-24339,23065,-15446,-1271,-30555,-20454,8256,-8421,15657,5989,-1061,18672,-32322,14503,-32596,-9131,7597,-14541,30092,19381,171,-2064,-9187,14584,-10648,-28721,-17530 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice22_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice22_input0_int16.csv new file mode 100644 index 0000000..f00bd91 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice22_input0_int16.csv @@ -0,0 +1 @@ +-8022,25213,-1802,-16772,-6541,9091,21831,-383,-12293,28850,28667,23346,-23117,17705,26327,10310,12353,-17825,-8040,-31104,-2701,-7989,-2853,-1465,28807,-31500,-9256,13876,-26087,3127,20025,1507,-15532,-27986,-27810,3244,-29711,-11533,-27418,1751,-11534,-25160,23306,21624,17219,30422,-340,-5564,-26375,15611,-19271,-18918,8331,-13326,-17305,4273,2747,-2475,-28820,2384,-1604,-15425,11581,1723,-13090,-19056,-17484,-15456,22639,-4079,11666,8367,18670,27992,22577,2553,-3190,15463,-28503,-7742,-13049,-31749,-24854,18981,7414,18858,-1434,-12754,21732,-20395,9892,-3916,-26515,-6003,-9970,16383,10295,17036,981,8553,-28392,30820,22113,-26623,-24042,-9013,-11794,-14750,-25847,7345,18995,18346,-7112,-28717,-18912,-15615,-24462,-811,22904,-6258,17863,-1192,-11119,24450,-3622,-12402,2770,21355,28112,-8917,-2406,-2271,14605,25597,-13477,-31157,21280,1369,-5720,-6365,-1074,18899,-10426,-6806,17086,24422,-25156,27640,-24723,13638,-15843,4250,9355,-23361,-443,-3679,-28697,-16232,-22479,-4518,9107,24392,12454,5257,4976,26675,29089,-31629,24391,18501,-4442,5162,1100,7599,-7065,20,28647,10661,-19145,19251,-188,-30616,-4724,19544,11074,31581,-28599,11408,8456,-1242,-9726,-28234,14903,-13149,6218,-24278,24973,-22821,-11814,6863,8890,-5100,-15995,-30692,-7032,7359,-7652,-29268,-2942,27289,-3967,-18826,-23711,32574,24606,12710,30148,943,-10449,25411,9512,-12221,-1522,13166,-18412,-20223,1895,-3402,7554,-24340,-19078,30798,7590,31632,5124,22770,6069,-15211,18003,-13506,9746,18439,-14216,6633,15622,-9136,24686,8375,27506,8992,-12880,20073,13424,-9567,26791,-27842,15653,14532,-18178,1472,-2669,-19902,5152,30815,-29262,-26513,11580,-30128,-8206,25166,-23453,-25598,5808,13916,-11532,151,30854,-16791,-15501,-17494,-25228,12664,-3604,14489,16832,1125,-12368,30987,-20089,6618,-29236,9802,22514,-26443,-19514,-15534,-18251,8365,32256,453,-18063,-19316,-7543,-3939,23134,-29302,-26816,27299,-12346,-14096,-3900,-13719,-13733,-7188,-30955,10540,-13644,15815,-22362,-26915,-26055,13311,9486,25986,-28200,-8182,19464,713,-29712,11750,5319,25880,-19391,28322,3860,9993,-3311,-19456,25387,15462,-3896,18605,12102,13524,-2592,30019,13243,9942,14601,7049,19442,-15690,-6467,-20338,-18116,12789,-7148,-18981,-1426,808,-21537,5898,-7920,-1509,10702,17240,-24826,32283,-3442,14877,-26651,12177,-1875,-28164,-18783,24410,21113,14003,-29824,-30540,26555,4803,-10836,16845,6348,-8922,-4927,17799,-6309,-29318,-21043,-16051,19074,17802,31556,-14404,-31762,-17438,-32603,4532,24570,30382,-18662,8173,-32166,-3252,-15601,347,7546,23477,-20337,-23515,-10471,-30345,290,28988,1831,9700,4142,11633,169,29580,26487,-29472,-20171,13836,16725,9707,-18403,-6336,28346,-5838,-32033,413,-16978,20405,1139,-28911,30477,-2424,8316,8519,13990,-27523,-5279,553,24780,-17932,-9261,-892,-20073,-23940,-17102,-628,-10774,12806,22886,-14546,21978,-30858,6861,6718,32067,-22792,-26079,-12645,3873,-27720,-18331,2419,32316,-10570,6977,-8264,-26291,22706,3110,29862,-9755,-26098,24972,-26541,-26451,12944,22084,-25871,-30289,-757,1747,-18970,26485,-21550,16420,4139,-4453,-21503,-17708,-2187,9270,30902,-23434,14643,27297,24158,-2875,31195,-17750,23211,30677,-31051,-6974,18271,-27473,5466,12418,31529,-27066,-22790,-32736,8100,1240,15254,-18428,20105,-20214,-8945,25081,-18559,15058,15208,16433,5814,-18848,-9311,-28214,-12340,-6964,22906,-30982,3119,29049,5563,395,-29580,24929,14972,10934,-14822,-16634,-17574,-13176,31456,7873,-31677,-6439,-1428,-13178,-555,4801,25850,10731,-8817,-14974,14808,7696,23304,14029,-8006,-1271,-31744,24419,-20986,3946,-1441,11869,9379,13942,26441,249,15907,10749,19151,22058,30174,-9944,-5454,-14482,15582,-18696,27769,23812,6339,-9371,-4915,-29209,-28763,-7505,4846,-12,20192,-4026,-9535,5946,-15069,17363,29918,17150,-19214,-3540,-9676,8899,7936,-9771,-24533,-16468,-3888,-32031,-11002,16143,4953,4444,9384,-233,27018,-8069,-18533,-8783,-31601,-12678,-17390,1701,8787,26842,-10014,17105,-5435,-28726,26236,13100,-1439,-16806,7231,-4760,-20681,-25505,-5827,-26779,-17933,-12284,23049,-25316,-25111,23140,3150,13996,29577,-10685,-730,-25975,7158,-30028,22216,18360,-20166,1621,-8028,-17277,-21149,-19839,14361,11681,6981,29803,13462,4933,14932,-19554,-1241,26157,-12829,-15890,-26296,-6059,-28720,-15000,-19813,16093,19207,-24146,-22031,-20297,-3592,20131,26386,16014,11412,30102,-9974,-7413,11259,-22835,-29027,-6807,-12510,-16569,-742,-18441,-15132,2863,31935,-28974,17174,7475,-266,13918,4400,214,24444,6505,-31812,15023,-22326,20976,-19319,-25922,12859,-13924,24683,-1222,-11959,27464,24778,13251,-10558,-830,20275,22235,-30364,-13464,-8687,21280,-25281,30436,16754,-8897,-28988,4301,-9189,-29472,20976,15097,-26245,14666,21904,-32762,10588,-23386,-30249,15298,-26493,-3424,-30846,-14869,32340,-7272,-26882,-4401,-21761,-2453,2367,10635,-28276,28851,27788,-13671,-27594,-1972,-17663,-29339,-31007,-13738,25774,4049,26542,-32657,7847,-2240,-27193,-32656,-24034,7304,3986,-26906,-4615,-29035,3063,-5012,31360,91,-18496,12599,-30559,-31809,20580,-10578,-26709,-22414,12445,-6511,-66,-4663,-5826,-1183,26156,6059,1175,19864,9387,2843,22071,11533,-12917,13089,26118,13588,-2915,24653,32308,-95,-15639,-4054,32121,-27738,22196,-3495,-14450,16927,-16571,-18018,7511,-25631,16124,12627,-19967,2154,-8872,-10836,18047,-25445,-18817,-1594,29571,20071,1625,1847,-31589,-25389,5172,8076,-9987,-13323,7258,5539,-31170,23853,-30710,-985,-3419,6434,11909,-3932,-15514,-11511,152,-4143,-22786,25849,14574,27302,9630,10118,14264,-7031,-25236,18646,25114,22460,-23876,-1197,-13089,-20183,14270,8359,5871,-21663,-12912,17661,14485,-13520,21012,21105,-29425,-3531,-6679,16986,-23279,3104,13980,-1362,-2511,-31237,-4988,21250,-21419,10622,-16065,-27771,-9003,31578,-20690,16163,4849,-14001,-7016,-15983,10672,-17790,19897,-26622,5002,-32392,-26692,8474,21561,-23883,-22738,4671,-11906,-10797,10141,-7519,24298,16204,-1920,-27448,-15716,-19312,7142,-7027,6611,-5210,10125,2718,-21454,26819,17178,2510,-22191,5463,-5670,-21670,-23929,-12478,-3605,-14181,30183,32097,26734,31220,22449,3751,8524,29488,11773,-28107,-28848,18136,-29564,5446,-15444,17623,18221,-27736,8030,-21379,-303,-17874,-32348,-12413,-11669,1342,10205,26697,5418,-23253,-1597,-15933,10355,-26798,24869,32714,20518,-3818,20513,-30224,-4084,-3716,7091,-31165,21837,-4829,-219,4970,-14587,-11304,-21460,15468,-13004,2664,32293,-31500,-1166,26638,17313,21261,8687,-2330,14684,10642,-5816,-9576,-4821,-14088,-7846,-11965,-28527,30128,20625,10947,-3455,-19443,-16607,-32186,11177,-1828,8347,13535,17954,-15368,21904,26960,18624,22341,21283,-10585,-23239,-17842,-3720,-11226,4916,-6195,8164,-11033,-21996,-10098,-13071,-16685,-6130,28829,-22668,27919,21591,14708,253,-3474,-5824,12616,-7822,-3361,-22522,-26849,-14344,4125,-28442,-20895,43,-21653,-16551,-14925,28397,-32047,1475,-20944,20776,-15515,-21371,26612,29470,26680,14950,-29054,13718,13958,9830,-3459,-32651,14827,-5485,-23209,-4671,24783,10605,25916,-12399,31948,-17533,-14223,-26191,-12488,8674,-2407,8305,1245,12868,-12707,5412,2913,-5093,-21976,11418,-1044,-21262,28774,31230,-30653,26570,-29455,-18281,9830,-26199,-13664,3092,-21118,19316,-27543,-11047,18742,-30533,-16116,15287,236,20807,-26129,-12592,-19875,-7135,-351,-11960,2175,4515,9319,5052,-16035,21609,-11767,-4687,-7518,-2850,-16332,-14814,-21982,6419,-12730,8385,-2525,-30685,20591,24729,-676,23513,26547,-3738,4354,-4500,-29775,-22697,-22852,-21632,-7071,-21356,-13805,25541,24971,29172,-844,-280,-14481,5450,9797,31480,-24662,-10566,773,-12978,29152,-9926,4974,10902,-10345,19333,11079,-21109,22801,25079,-2982,13293,9365,28346,856,6769,-16861,27358,1840,15506,960,6289,-10708,-14139,-19293,31566,3030,-31285,-26691,4422,-7050,-9669,-4449,16898,15233,21325,-18816,-12795,26713,763,25162,17832,-7908,25183,-11476,29377,-32380,5664,6656,31044,18999,-11251,-23429,-13798,32380,27667,9797,746,-8088,6865,-16702,22370,-15466,-13484,-21454,31846,18378,23701,-13286,9919,5324,6528,18425,-62,21187,-27841,-25077,18400,-175,28564,9196,-29754,11512,-15918,-30319,-8059,-3997,10366,-30327,-24464,-27509,32511,31994,16859,16464,28252,31179,1141,-14672,-31829,-25487,-28883,-25784,21312,3060,-5993,9609,-27377,-23239,26037,-11722,17829,-28021,-24339,23065,-15446,-1271,-30555,-20454,8256,-8421,15657,5989,-1061,18672,-32322,14503,-32596,-9131,7597,-14541,30092,19381,171,-2064,-9187,14584,-10648,-28721,-17530,6803,-14007,-29309,10328,-31497,26415,22968,22254,9884,18287,-2831,-13756,8229,24125,-25554,-10407,16466,-77,-723,1175,-848,-25697,-26748,-13240,-30535,9445,27493,7937,-30695,7729,-12550,-19806,-3653,-10551,23106,-18662,2499,21550,-18463,27596,10378,-20299,9898,16041,-21453,5388,5082,-9436,31985,-9546,3416,-7085,-8291,20886,171,27736,-4386,18080,30360,31266,26408,222,27673,-8274,13946,-15408,28595,17272,7374,20193,2534,-23334,-26309,-25771,-32017,-6050,24836,-3268,7289,-26179,-6521,-711,389,19350,20228,-32379,5140,-21320,23590,30199,-28067,30371,-20197,-12722,3867,-18825,-14559,21913,-5422,29055,-975,-13262,-20247,26352,5981,24634,9019,989,3489,27169,803,20206,-31183,25221,-23492,-10144,-29814,-24697,-3273,-10012,1865,-397,22777,-4577,1692,21681,-19099,-10823,9901,14500,17816,-20073,-8583,14475,25924,-945,32099,13854,2403,-23927,30718,12462,-14944,14906,17963,4919,10346,25055,-9204,31288,28297,-19102,16264,711,-9779,25672,-26866,9360,4045,584,-10529,22212,-686,-8353,-13276,31102,23999,22765,7122,-31087,-26346,-13493,27726,-18350,3686,4923,12932,28145,6399,-22046,-9419,21419,-30096,-9826,4354,6502,14977,3011,-5552,-28936,8793,6438,-794,26769,15183,-24532,4949,24227,-24040,4697,-26122,29080,-31013,-14302,18809,-8280,-20454,-5,-22986,-18035,406,5399,24797,-18288,-13414,25259,-25508,9685,-30769,-17396,29751,-7030,-14116,-11361,-4577,-1226,-25822,5450,-1599,-15273,10981,17589,-13358,349,22651,26184,10128,-28331,-32349,-4204,-31034,-3521,-11620,5328,-6785,9436,-10876,-22265,19139,6433,-21608,-32507,-27968,-10304,-28468,-31375,-25812,11089,752,-31616,5137,5112,-18329,-15926,27820,-2357,-11569,17090,19132,-29047,-22494,7096,26923,3251,1192,-13898,-21883,8622,-24639,18043,12678,5373,17441,3144,-4154,9775,-25723,30,10772,7608,22270,31066,599,6946,-16880,26128,6756,-1133,-21019,-31062,13729,16824,-14925,-24176,-22947,-32599,-10424,27078,-9501,-24623,-8024,16312,-10914,24879,23396,-14201,12915,12603,-31651,3600,-19957,-13219,-3213,-22580,-18567,12540,13712,-23101,8934,-16656,201,-15672,-30800,29785,21198,-772,31279,-22707,1611,-30589,-28566,19523,-32575,-21303,-27768,1686,-15587,22011,29090,-26719,28252,-31820,7888,-21809,-24008,-31515,-20901,20118,31829,11866,-9647,22138,-29646,20014,-6273,-22285,-20801,-24541,-25520,-7909,18814,28242,10879,-24181,13662,4714,-20588,-15201,27571,-11685,-14368,-11818,22079,4378,-12048,-19129,-18662,28094,-5956,12827,26362,17494,-3121,-30737,-4864,24106,-7000,14502,31421,-7385,-22843,-10797,-13073,-14664,-23194,13607,-91,-5440,1801,15900,-25940,5403,6258,-14455,2529,-24901,-21905,-24983,-17130,-22069,26833,-20739,-5766,-19007,15338,21402,-16380,12901,-26578,-32188,22724,31398,17487,-1271,-22133,29534,5701,18985,27311,16359,-5261,-2785,-8324,31490,-27743,-19221,-27974,22605,7726,26657,23508,-5767,31968,-2435,-25904,-22810,-24709,-11972,-30137,-9170,17833,1620,-15345,12338,31767,-24816,28666,-7449,12310,3161,23062,-7247,29568,8387,-22648,-2910,-5077,-29637,-5846,19653,-4300,-24154,8733,-14659,12840,6005,28395,-602,-31176,-28060,2957,-2562,3980,18450,22787,-2791,31589,1711,25325,-7303,3876,29767,-12981,10163,-10114,-12754,22586,997,-2616,-20108,-9146,25730,-18999,-21691,-31235,32566,3328,-29446,-22390,25204,-8201,-6950,-28073,10061,-24992,-12275,-25467,5808,6158,-22273,5324,-12064,16104,-21571,17908,14869,-13597,-18738,22886,24998,24619,-14840,-17170,4479,16197,10889,7029,31700,9760,-7025,-26665,2155,18956,-24376,7702,5221,-32181,-21568,-21658,21988,9948,6313,30251,-1018,1932,15804,12606,-23705,-25558,20906,-836,10359,18412,28008,-9746,9556,-30800,28092,-7386,20469,2840,5093,26513,1837,1854,19789,14071,-12499,17711,9468,31495,-241,12786,22623,-28999,-30375,-201,-10192,-17875,-23136,-5568,-28771,7928,22846,26451,24139,-9949,3149,-3160,17987,20977,22383,12115,27420,-30222,-22162,-23836,-26717,-7679,12024,28061,11624,-21558,26606,30397,7745,-8400,10699,-15086,23527,-4323,-23225,-4022,-29980,-30167,-22466,-27831,28739,32573,-30052,-31681,-21280,-6316,-24559,24650,11291,15821,519,-17381,-3607,-1850,16627,21588,-27353,13989,-22612,23667,29189,9471,8752,2061,-31136,-15678,-29476,16180,28002,27555,-474,-21772,2775,2703,-3373,-27060,16848,24941,-18750,-24261,32555,-17891,-26880,14937,-9187,22117,15386,21174,-1138,-8715,-31384,-4742,-6353,-2243,-8150,25635,13929,-21930,-24982,26676,-14687,7023,-6110,-15111,-11099,14123,19099,-14183,-15282,18099,25732,-14052,27325,8831,8134,-31744,-23703,-14123,21017,-24726,-7225,24511,5317,9461,28934,-31676,11959,-9194,32297,-25346,14467,3852,-26458,-32406,-27133,28691,5080,-3731,-29344,-3624,-1028,-17419,32345,1394,1367,-743,10772,11562,-5237,5739,16809,3061,-13597,5029,-20550,17714,-30278,-27883,30090,-14167,24062,-3863,32708,19677,10230,-7259,-16370,-23855,14854,-334,-10303,11523,803,-9742,-1735,-20372,-5330,14193,-12849,9921,3837,14809,2270,15205,22676,-20029,-11375,23380,-18150,-7308,22504,20555,-9268,-8288,-31303,10337,-12377,-7172,-14074,-26889,282,17463,24026,27302,27737,-1689,9426,26106,-5740,27660,-19289,-17606,-23326,17434,3030,28023,-31386,-8729,15016,3808,-31225,25584,941,-15177,-2016,3600,-30684,-18251,-17015,-31724,-4101,-493,-14203,-20615,24296,23444,3304,5094,-16921,-14088,8157,28009,31431,-13286,6384,-31267,-16726,18580,-9417,6547,-27545,-14034,2881,-31187,4964,514,5495,20819,-12407,-17244,12958,7382,18131,10958,21768,-15521,7091,-19578,29413,-13883,-30574,-9342,32487,-21260,22800,-14464,-1416,26685,-28681,30918,-6954,16065,-16956,-4377,-22606,4554,23535,31207,25044,20503,-9762,-23352,-2593,14217,-16627,-22350,12035,-3217,-29592,-272,3490,27453,-10715,17926,-14455,-7983,4735,-17910,-3524,6787,14103,-31319,-20220,-21129,14857,-8542,-8885,-24244,-19562,80,-26931,25071,6064,-52,-22275,15950,8554,-31293,-13526,-12615,21707,-31306,-966,-16156,-26489,-7887,26667,-2256,-14614,6018,2091,28319,10151,27066,14849,-17576,72,-23392,-17391,18421,28452,-26883,17002,-27076,-24619,21769,-5589,-28768,-19377,-4550,8578,7348,-11263,23543,-29177,12677,16652,4941,-11443,-27719,17092,16661,-24558,7616,32122,27376,-13911,-31169,10057,-31241,-24654,-29651,-8429,-32493,18260,-7643,31156,-26042,-19547,-28116,-6278,1316,4107,31356,-30558,16648,13176,-32178,23629,-15946,26033,15686,10208,-18141,3048,30043,-30291,20167,-28624,4248,7037,25009,15938,-15206,16108,-19986,27594,2797,-16134,-23005,31870,-31881,10091,-19352,28057,8938,4769,-17512,8318,-3671,9074,-8756,-22729,-9260,6192,-8453,-30432,-17172,-27613,-8395,-7429,22853,-20436,-10484,3288,-5793,13906,-13121,-25227,-4013,-29850,-1567,-30931,-11085,-25146,-20908,-743,3598,-29644,-9647,-14287,20510,9625,6945,-13461,20209,26386,-15985,14618,-12073,-7481,-15540,27070,15865,-22136,-22898,5933,11719,-15414,-14971,-16224,-21952,31783,-12566,3794,2263,11265,-27747,21367,-24162,-1959,22298,-31378,5779,-22395,-14354,28182,32005,20674,27062,3747,31836,11579,20798,-3267,26818,-6240,20689,-15578,-27208,-20756,-9780,32698,-30865,-23910,8670,-31013,24517,10941,17699,792,10371,-16695,-7415,21255,-28536,32269,-23536,22946,-9786,-31466,-4424,-27475,-20277,23562,-21380,10985,16809,-20473,13273,-5572,17674,-29120,23212,-11096,-6190,21418,11431,30860,16776,9861,18020,-24905,29127,-16527,10479,27195,-30346,31547,27091,-4555,-11633,-19218,-1312,28473,28641,-8558,-32382,-13941,-15536,-17890,-19956,-9989,-13177,10532,-27508,-16682,-10923,18924,14984,16889,30190,-16899,25009,26021,21906,-8648,22631,-25989,31242,-4986,24155,-26235,-463,541,-25437,9209,-30831,19925,1535,-21532,11291,-22670,11157,1756,7789,-20983,-31990,13235,-17451,-174,6866,-14205,30320,-11207,-12811,23812,12974,-6220,-2045,-5618,-4745,-11047,26469,861,15725,-14852,-14865,-17573,-10802,6433,-20366,-11804,-16928,-20126,29388,-11885,3444,5514,26673,17886,2965,28423,26001,-1389,23021,-14958,-18059,15027,-2390,-7159,20888,3992,-22525,-7787,-16001,18136,-14764,13855,12599,-905,10768,3661,24184,-13406,11808,-1548,1156,-7016,-15780,6758,-7240,23801,-3600,-18107,21546,13737,-15908,-3456,15192,-29791,13288,-9747,31739,28264,-24940,-3320,-5096,23368,-28221,19451,17047,-16634,29580,-14902,-23689,-28994,31073,6275,-24971,-5707,13107,4698,11911,2636,26360,10705,5730,-30884,11375,-18947,21486,-16232,-2993,-9681,-11735,29651,-27534,-30953,-28533,-31422,-31191,10403,15409,-13420,2426,-10060,-21441,11574,-10655,23113,27029,-28780,-1652,-26057,-1348,-2719,19853,15277,8993,-4210,21190,2115,-31122,820,-1692,-2619,23650,-4708,-10805,-20067,31600,16024,-10013,6217,-11996,-550,6452,8502,-31003,31644,6687,-24760,-8847,15793,2650,29981,9622,-32587,10791,27637,15887,15972,19319,14358,12965,-13141,-18018,9404,28747,-21855,-11998,-452,-7538,-28004,6665,-28117,31381,13251,11973,-8076,18874,-26181,1545,14574,-26043,16885,-20920,14314,24341,21242,-16089,-4005,-24480,13034,19460,8959,29675,-11247,-15204,22919,5863,347,-11047,-8711,22277,-789,29609,14519,3086,12685,-21765,-2553,31406,5067,5566,17515,-21727,-11819,28793,32276,-30864,-29351,-19317,-6155,-32724,31699,11087,-28964,-18464,13955,-13247,-3814,-22589,4685,-11091,27351,-5588,17151,-752,19743,-27944,25776,23281,-3156,11979,-30919,16255,-26831,-19295,-27422,-6144,-3335,-6288,4796,-22053,-11080,-16207,-1063,-28196,-23512,-18896,14587,24121,-1386,-30843,14400,-15667,-6270,-26276,3049,25023,-25877,638,7129,-25368,-3061,9211,-8410,28604,-18003,-11286,26475,-12724,32655,25223,-3957,-5545,30341,-12556,-1175,32560,14350,1080,-5654,8378,29105,5538,6917,5794,19771,-13322,3033,-22124,-22978,25800,-15043,19726,17103,23916,-3843,4512,27304,-19423,6549,4956,-24825,30978,25853,-22302,25544,6284,27522,-20888,29609,-5828,-31534,1815,5367,-26780,30393,-12153,291,3317,-31749,29920,-10592,8213,-4861,-11476,29059,-1233,-11806,15649,-19915,-9673,-30015,23346,-23803,-13268,-1616,-20449,3894,1890,-19956,7578,-6248,-12854,-26946,10548,-22929,-30054,24550,12301,-22739,8641,22309,-1181,14873,-13455,-15843,-14470,21090,-15516,1406,29612,-3322,-19906,-5844,-16554,1939,-31024,-14075,-13673,-25735,-8972,-6531,-18655,-14328,30039,-24327,-4877,-19941,-20846,24047,12854,-32253,-25611,-20965,22971,14325,6058,6941,18736,8473,32555,30738,16761,-24719,10158,-1925,-26242,11876,-15653,23482,-15151,6349,-20454,-10947,5309,22815,23949,10842,-14690,11232,29060,16829,-616,-20620,-17419,-1443,13321,-20850,27698,-31887,-22897,-17421,15104,-22989,-31804,23843,-8218,22411,-16774,-3412,19466,30751,20037,18049,-6393,-10634,1652,-23191,23641,-17668,-5266,8344,26551,-13110,-31761,-28524,20267,-29037,6064,7167,32506,22228,30571,17688,-31221,21657,-9120,14427,24646,-7012,20288,9137,-4826,31731,-4846,30337,-28849,16185,-2918,6469,21100,13718,-24162,-31518,-9899,-14746,9054,26592,-29530,-13334,-11958,31646,11694,-9058,30536,14168,-3359,-18484,-9913,-21450,-27435,-10018,12439,825,13846,-17409,18251,-17952,-8458,-17011,911,-22673,-30182,9470,-15939,11624,-22039,-11755,32547,-27263,-2815,-2455,-17791,6885,-2905,-31709,-15238,467,-20124,-11831,-17195,13714,26509,-30833,24138,-21728,-14918,1443,15947,-8574,-1160,-32270,28206,-4415,11759,-22207,-14651,-21520,-13604,15991,15445,21514,-16227,-16824,466,-23143,-25159,1557,9581,-23266,-9681,-29719,-1765,19039,19737,15569,-26055,20230,30160,-3219,-13969,-30728,41,27802,-5841,31101,-365,-2124,-29743,131,29752,22713,11906,23937,-4021,-21818,-43,-31718,-16590,-16829,22158,-21738,1835,-26253,-3151,-10940,-77,7768,13087,-13417,-23950,27184,6248,27761,17094,-29175,-26787,20945,-6559,-8772,-10462,-7862,13013,30991,-18455,-6309,22731,32606,-28902,-9666,32208,13199,-23018,18652,15750,21690,15266,2290,-19647,-5957,1264,-12827,-22217,29368,-30889,-27433,169,-16603,20790,-8240,29361,-1437,-4289,-4416,26030,15143,-27031,-22507,23521,22279,-13065,8963,-23418,-18182,-13197,3737,24425,7892,-27046,24452,-22981,14501,26167,-20385,31226,10438,20727,-692,27807,11202,-25377,14705,-23226,-13835,1355,-23961,6862,-27811,-3193,-24414,-19807,8485,5256,32417,-26471,27863,-32127,-32170,13430,1341,-25892,8801,9745,-5174,14156,-6386,-27009,-28419,15933,-30867,-19603,21786,-16766,-3630,26017,31269,2616,-1878,-7793,-5666,-2454,8279,-16240,-82,-4964,-12866,-4923,20069,-15764,10881,21019,-13788,-29970,-7686,-2747,15313,-24275,30220,-5675,27316,22771,11316,7019,-25608,17756,18786,28000,25665,19886,-28109,21496,-6646,-14805,13760,6451,6795,23507,-17328,-32037,14201,-11562,9801,-21515,-30336,3607,-5335,20586,23721,30400,22611,17870,-8821,-32183,30589,30329,-19103,-15327,2463,-29033,-21643,-26530,17568,7492,21725,9401,23944,-24962,8015,-2439,2171,-3184,5464,27917,-22874,7884,-29305,14817,20587,-10349,-14191,27820,-3227,18832,-4415,13659,-30699,-3466,4732,11,-11094,26258,-15996,21757,-3648,12102,17078,4586,-16043,3864,8703,-12017,6250,-2693,13838,-619,10706,-13512,17183,-28,-22420,-7400,-5897,-32064,16028,-28,-17991,6376,-19099,-14011,2469,-30782,-28372,-7962,5436,18598,-23467,-29341,21157,-23597,18419,-26957,26998,28371,28617,-18525,27566,-10448,25427,-8026,-23233,-31946,-18772,3071,27188,-25663,15586,764,6229,-3409,-27438,-19804,2556,-21077,22841,-7691,-293,14380,-2147,-5889,-31189,22797,13408,23305,10767,27869,-14698,-3773,32758,19604,19048,8792,6648,21456,2081,-26412,-3212,-23682,24181,22442,-18181,2798,-2677,1129,8743,30059,9856,-30699,-22620,-11075,-6972,-17450,-4091,-151,19787,-11047,23334,6498,-27743,-8131,23930,-22255,-3141,16151,27137,-553,16978,-5087,-10413,-27733,4927,17641,-7269,-19045,-2156,24242,970,12236,1214,-10010,-19272,-9186,22862,3975,-17320,2049,17100,-6098,14747,-7206,25681,-18587,8066,-1860,-6062,1082,23735,11000,-2302,-1110,11174,-25929,11125,26246,3345,-24254,-15549,-23307,16318,-31169,-32403,-24597,12151,-11008,5926,-32559,25245,31603,-7123,-929,-27147,-6240,21767,-13421,-16465,16643,-26091,-1857,26251,12603,24856,26531,6121,-30738,-24531,-18449,11983,7891,-21545,7759,-14220,-7728,19134,-24492,405,-10416,-23911,-31916,-13439,-22306,9402,23543,-13299,-7171,-26160,21826,9066,4852,-27430,-3195,22046,16795,-19313,8189,19626,-1286,-20686,5411,22280,-798,-3742,4028,-22062,-6918,-11383,20434,27777,-15728,16118,463,25926,31443,-15102,-9945,-11226,5728,-18581,-18592,-7061,32528,5147,12807,6365,11438,-26537,-9105,-23819,26623,6169,-28172,15095,-16959,4584,-30260,20609,-25928,-15328,-13435,27969,-676,1352,964,4472,-18487,-6344,-22845,3452,-13735,17534,24049,-9999,29057,836,-21468,-17831,-27795,1576,20459,28826,5935,-8451,13745,22441,-13559,-28952,29139,31982,28940,25200,-30580,12991,-20928,16698,984,17341,-6064,-3824,64,23923,-2455,-16497,5428,21421,15669,-25326,-5862,-14205,-22613,8930,595,-21420,11687,23477,-31886,-19056,-9095,23827,-13811,4122,26552,2666,25048,-27707,13368,9437,24195,-17563,-26111,12092,-7154,21822,13764,-1940,21908,-31541,-6635,-13262,1589,-3071,-7842,7160,18279,32749,13860,-7632,3439,30489,4472,17304,-29264,-31704,-27222,12640,-30543,-24003,-19537,27814,19664,-6770,18762,-25224,-5045,-31164,-7456,30049,-24155,12794,21680,25763,32168,-19905,860,-24181,-28488,32485,18207,-30917,-6046,-581,23996,27673,24417,-404,16515,3455,32037,-32387,3870,23242,26982,29660,-30698,17190,-8660,-32140,-17570,-7985,-23439,-30430,13490,15977,-12438,30527,-5630,-17569,16355,-13275,11002,24554,-25401,7441,30869,18845,-28944,23731,-27817,-6561,-1841,3420,-20435,24770,1202,16291,-31991,19038,11231,-32311,-32110,-27076,-24769,15963,-20453,-16682,874,-31332,-30016,-14756,-15317,20195,-5321,-7259,8813,-2214,4367,12550,-2359,10764,24803,2660,13522,7093,1649,-11708,32092,17747,-6797,-3226,10706,20312,958,-31998,7161,-8909,503,-11947,-26350,-5265,-8941,-20520,17976,-11153,-2916,18691,3655,23114,21113,28185,9406,25168,6519,23876,-3913,27467,-22186,6932,14867,24743,7867,6655,19613,25915,19328,14204,3977,-6497,-658,-12511,10380,-15602,-20299,-4432,6366,-31418,25826,-25822,23680,5684,18248,-16563,13126,1314,-25195,-22893,-16604,-20481,-15085,-18720,-23184,10488,11149,-2678,-7320,27125,14695,22976,2490,17728,-28301,-14477,-57,-2691,19192,-30424,-20735,-25773,17426,-29447,28480,-19875,23562,-20672,-11028,-30920,-20621,18728,19149,-24150,-17262,18801,-20629,21230,5125,-3466,29837,-17145,-7095,2000,-10997,3438,-31433,24697,25057,-27026,18196,12313,18999,34,-2111,25232,20173,12021,-26619,-19386,19323,18387,22982,-1695,11946,-14725,-22553,-18254,-14879,-17581,22945,-25810,18530,-31652,8672,-13469,-1128,19068,8736,13952,-10859,-15370,-3792,8904,-22350,1654,-19166,-8031,-1666,19116,-14214,20937,-16653,642,-7708,21637,-19324,-31687,-29016,21803,27726,-14889,27136,-16210,-10664,29030,22257,29653,14612,20483,18403,12463,-29027,-27754,19954,-15781,8986,-22764,-15935,30500,8039,-31188,-25662,30973,-24203,-27375,22028,926,-20112,-31327,12721,32458,-15218,10924,-25535,-31848,30246,6217,-18447,-5655,22930,-643,-24561,-13481,8964,30588,-30255,31631,11659,735,-27071,-17820,-21683,13114,28968,3946,18804,-27171,30431,-1700,5340,9137,7408,-6297 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice23.tflite b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice23.tflite new file mode 100644 index 0000000..fb665aa Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice23.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice23_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice23_golden_int16.csv new file mode 100644 index 0000000..5ff8dc2 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice23_golden_int16.csv @@ -0,0 +1 @@ +-66,17250,-12645,-687,26475,9884,-7873,21380,-31313,-26030,-10171,-21245,20754,-16899,-5149,-6950,-23335,-28038,-25553,-2280,-25869,11416,6955,-3872,-15648,-12927,24663,32438,14723,-21649,112,27267,1493,19435,22449,-25527,26257,-7058,-17878,18554,24050,-5664,-32403,2145,16205,-26066,-16922,-28562,-2824,-31204,6224,14544,30901,15261,26621,6708,17889,2190,-27448,15211,-13530,13087,21667,-716,-12603,2306,18738,-419,8964,-1347,-7516,-6061,-19105,24057,28230,8343,30654,25592,-4717,-11560,28059,14060,10149,-26943,31040,-9472,-26504,8310,22306,6865,13802,-7995,5374,26361,14311,-24170,12346,4781,18649,-29809,27858,-8293,-24591,-5913,8013,-16223,-15480,20031,-4196,31561,-23427,-31885,-15516,-19308,-8245,913,28219,6306,23568,9619,-6985,-18952,-18195,-3726,6602,7438,-21886,31537,26947,11316,13766,9267,14449,31966,14481,-6404,7313,-23081,15983,29443,-25400,-31751,-13779,-23018,-9351,-10579,12170,-784,29874,-6567,12293,14641,895,6958,-17350,-7216,25484,-25802,13798,20541,-21479,13036,-9717,-1107,-17315,-29609,5463,4815,-15595,-17170,6743,-3114,-4240,-26229,-4924,29850,-25041,4772,14569,24493,-6646,-17768,-10989,32052,31893,-31374,29201,-2715,27260,6746,18584,27239,-11162,16577,14431,-7496,-32681,28482,-27819,-1587,22816,32340,701,29850,23255,8557,3858,-25249,20347,-10981,17784,-4238,-25235,-6045,-6569,5587,-23709,-23218,6619,31531,26760,25233,-2672,9815,-25888,-27280,-6843,21561,14387,-3585,-20216,28157,-16670,-4477,15354,21635,20759,-17481,7711,18176,-10170,-234,-5595,-9861,19557,27057,-9688,14517,-27124,4146,16041,32218,19208,15490,11548,14260,18204,-1231,10107,3253,16528,-22372,27807,-15592,20650,-32211,-9563,-30773,18148,-19027,-6462,21118,9386,28212,-27953,4063,-21695,27196,-14247,-3015,-5930,17108,1480,13607,-17977,17441,14752,14731,-28956,2778,-10179,-22958,26044,-30558,-2664,8316,-18777,22412,-9451,-6196,-4160,28619,10255,-26585,-6074,30284,15101,-467,17068,-22821,24799,23196,-8449,19885,8209,9594,-26840,-1226,-28422,-17414,-1967,-20741,-11238,32361,26334,18390,25030,-6006,9438,5447,-5878,-3056,1522,30441,5677,-17212,29174,-25336,-77,-21359,-22557,7754,1629,31555,22794,-18607,-23080,-9173,-19188,19186,30252,-20782,-17781,-30880,32034,11708,6946,28684,6643,22371,25997,30680,-31555,-29791,-25221,-21020,-12222,16517,1975,19146,-11633,-9746,7109,-22169,23082,26675,-28984,1156,5945,-29920,24749,11899,5336,12905,15764,-23552,-26432,-30201,18389,-29423,8844,752,-21093,-25732,-11715,-1861,-12537,31303,9330,24062,26936,7114,-13471,26456,32582,-12355,25084,22751,-7782,22059,27579,-4716,1622,19984,24432,13786,-2290,31585,-23614,10777,24208,-12926,-30805,29094,8436,13105,2675,-26493,10988,8079,27662,6791,-7244,-5542,32433,14027,19475,-7589,2599,26056,27806,490,-22826,11020,-10900,-28044,-6642,-21852,16822,21306,24169,-28000,27864,23352,10139,-14001,24485,-21454,-3623,14933,-7705,-6715,20354,13444,-11121,374,1661,14003,10343,-20974,-27153,30857,28353,-31857,-22496,27155,17004,28828,23245,7909,-15105,-17273,25687,-6341,-28381,-27547,12754,5595,-18786,-21890,-3572,-23274,24482,-23837,-9911,30092,22702,15045,3748,2549,-30415,-200,-31198,20241,-25036,-21554,4994,-30480,-6675,25532,22768,-30199,-29305,10979,14003,-27885,-6931,29632,-12139,7119,-70,7184,29358,-2198,26663,-21848,23369,22988,-19047,23040,20884,10807,31655,-19785,-24446,-25731,-4702,24763,9756,14486,-10430,-30587,-29473,31959,29474,-25923,-26930,-14345,16515,1838,9710,1036,-9325,23873,22301,-6788,6962,12851,9807,9926,-27844,-20190,-16296,-3990,-2204,-32686,-5394,-19006,-32288,32047,12036,-18932,568,-18318,12122,11224,9648,1882,-4095,20697,15649,12029,-7522,3318,4442,-10066,738,29028,-7187,-10734,15671,13225,-14902,-12223,-29032,-18612,21289,27412,11934,9888,9273,-10800,15894,-22935,-20157,13503,-2399,-5188,-2315,-19003,3924,7270,-14197,31327,22189,-22937,-24609,5858,2821,10845,31506,-12980,-5575,-28340,27550,11027,-26966,3310,-29125,345,20925,-27347,-17692,-2008,27619,12256,31211,-31510,-18331,-29155,6970,12185,21239,11349,-1987,4581,-30968,16572,-5012,-20856,14223,-21489,7392,-13383,-28769,-23998,-9334,6796,-29349,16451,-7200,23885,76,21828,-2432,2928,4280,-7808,532,-16288,6927,505,-22110,11404,-9605,26060,-22389,29827,-19270,2064,-10511,-72,32383,19654,-27745,-17968,6123,32263,29151,11238,552,-5144,-8191,20061,27939,31632,-29290,31323,8717,-9384,-8122,29086,-4069,-9308,-9315,24056,-22863,23074,-23326,-11121,-9872,31346,25798,24169,19968,-12093,29645,22955,-23440,1269,-2828,4449,-8208,-17083,-8257,-4893,22932,-12184,2618,26834,-640,-10883,13608,18450,-7395,-20554,-22044,9041,23229,-21472,-29250,30404,17418,8054,-1295,-16153,26888,13429,14774,14773,13060,6793,-27553,3720,-26351,-27569,7557,15738,-10756,19253,-17849,3317,21178,-22464,-10021,-28429,20993,-13234,-10079,22202,27681,-919,-15213,14862,27600,-12792,-12285,-26364,28026,9219,-30432,11623,-18384,7513,23666,-11520,15170,26271,23828,-28586,-32740,-14981,-31838,-1097,-14208,16968,10341,32188,22079,22193,-21206,9452,-13472,11385,2996,-6082,14901,22275,-28607,16309,-28113,-23716,7453,17942,23132,10756,23149,25027,-21550,-3629,-12839,-27505,-17456,-27744,2084,-6841,28377,-15497,-31146,30458,-11433,-2384,12564,-4163,8458,2040,-11244,16460,-16417,-18029,-9481,1848,31363,-5863,-27776,7872,-31958,30005,-31400,-31588,8193,-26281,-1745,19440,31566,3333,13039,31746,-928,-22409,15762,-16035,-11352,14706,-7989,12524,-32611,25496,26883,-4949,246,-21555,7,-29925,-9977,20661,-11764,2170,7018,-4777,32741,-19575,-11303,-17653,-23159,-17279,-653,14896,-1065,-2158,-11174,-19200,10062,18272,24129,21233,-14788,8335,24828,6311,-997,-26459,7839,26409,-13678,-30081,16090,857,18656,506,17235,21965,9521,-19116,11186,9865,21731,29523,-20180,-7156,-24801,16513,-19670,19410,27432,488,-18151,13290,26492,9464,26037,25496,-19962,7699,4622,20245,-13434,20013,3386,9291,28432,-22690,25915,24382,-13543,31553,25653,28062,6674,7940,-22791,25635,20045,-29961,14195,29735,21821,-11408,-1350,26726,23865,18210,21314,-13477,-496,-8299,-7588,-32519,-18772,7956,24184,19880,21838,-18391,-8772,15626,16450,22594,24544,14787,-32422,22587,25543,-21301,-2666,6638,26237,903,-5890,32591,546,18088,-26189,-15784,-26909,-16886,-17415,-12109,27667,-24780,2017,2573,27662,-22866,-8635,30542,31159,-12138,340,-20281,30896,-26479,25032,-14078,-13061,-11160,-31749,-31573,-16397,-31005,19954,28849,24658,-12912,14993,247,-22269,-21445,20289,-20469,-1460,29428,21740,-18398,31195,-30071,-23549,-11856,1029,-17970,28653,8973,-7387,-24021,-4602,-679,-29677,-10951,-6567,-10226,814,17560,12361,-4182,30867,-25,18144,20333,-22694,7893,14819,2748,29010,28072,21817,1705,17505,7365,3712,-6731,29942,-17172,21166,-19569,16329,7080,-4113,-1120,1534,6871,-15020,-14236,-7255,5469,6022,889,14608,17790,-24,-21542,-1457,-17950,160,-31145,-12618,-28627,32070,7396,24092,26289,-11087,-24743,-23922,-2434,19437,9804,21799,864,16,-2189,6465,-7234,6377,4022,26334,991,-31573,872,-1347,19913,-3592,-4750,-13824,-11522,25594,-7813,2002,-10906,-2257,20382,6921,1407,15124,21129,27760,-2617,25744,10827,6785,2598,-32304,-8455,30739,-7311,-20138,-13669,-8591,2990,-20727,-6401,-2458,-30263,18741,8429,13284,16176,6511,-20867,-28400,11457,7586,-24065,22307,10845,28005,29932,-8524,-8133,-31800,19156,8890,-14930,-26956,-23495,-32537,28010,-25216,14930,20804,-22721,-354,-31136,9791,-24177,9302,-1715,2520,31388,5053,13600,-22523,-18071,-4603,-13961,-9425,16948,-18671,-28803,32223,-3545,-24951,24294,20584,-27960,24609,504,-30127,14483,12114,18804,8908,-8933,24168,-12746,16576,8677,3127,-25726,-2145,5231,-22152,-6813,30156,-18410,-28729,25130,12372,-7966,-2886,-13347,16869,20776,11315,30233,5336,25364,22997,-11203,1066,20551,-15045,15590,-15653,18449,23684,23792,-4599,-18987,-1475,14598,18448,-23055,-11307,27009,-8374,22553,-3764,-19738,12891,892,-2619,-29364,-31143,-8103,-5582,8696,26797,-22913,24214,-23544,30843,-24644,-25206,25926,-23971,18699,19343,-2177,-21264,-16565 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice23_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice23_input0_int16.csv new file mode 100644 index 0000000..d9667ce --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice23_input0_int16.csv @@ -0,0 +1 @@ +31049,26996,-1351,7944,28649,-16194,18926,-6615,1588,-22360,21964,15489,6434,-26334,14839,-1759,-16183,-19454,20320,15870,-9564,6579,-31671,-4923,32078,-27278,7774,12865,15760,20657,-2683,-6258,-66,17250,-12645,-687,26475,9884,-7873,21380,-31313,-26030,-10171,-21245,20754,-16899,-5149,-6950,-23335,-28038,-25553,-2280,-25869,11416,6955,-3872,-15648,-12927,24663,32438,14723,-21649,112,27267,1493,19435,22449,-25527,26257,-7058,-17878,18554,24050,-5664,-32403,2145,16205,-26066,-16922,-28562,-2824,-31204,6224,14544,30901,15261,26621,6708,17889,2190,-27448,15211,-13530,13087,21667,-716,-12603,2306,18738,-419,8964,-1347,-7516,-6061,-19105,24057,28230,8343,30654,25592,-4717,-11560,28059,14060,10149,-26943,31040,-9472,-26504,8310,22306,6865,13802,-7995,5374,26361,14311,-24170,12346,4781,18649,-29809,27858,-8293,-24591,-5913,8013,-16223,-15480,20031,-4196,31561,-23427,-31885,-15516,-19308,-8245,913,28219,6306,23568,9619,-6985,-18952,-18195,-3726,6602,7438,-21886,31537,26947,11316,13766,9267,14449,31966,14481,-6404,7313,-23081,15983,29443,-25400,-31751,-13779,-23018,-9351,-10579,12170,-784,29874,-6567,12293,14641,895,6958,-17350,-7216,25484,-25802,13798,20541,-21479,13036,-9717,-1107,-17315,-29609,5463,4815,-15595,-17170,6743,-3114,-4240,-26229,-4924,29850,-25041,4772,14569,24493,-6646,-17768,-10989,32052,31893,-31374,29201,-2715,27260,6746,18584,27239,-11162,16577,14431,-7496,-32681,28482,-27819,-1587,22816,32340,701,29850,23255,8557,3858,-25249,20347,-10981,17784,-4238,-25235,-6045,-6569,5587,-23709,-23218,6619,31531,26760,25233,-2672,9815,-25888,-27280,-6843,21561,14387,-3585,-20216,28157,-16670,-4477,15354,21635,20759,-17481,7711,18176,-10170,-234,-5595,-9861,19557,27057,-9688,14517,-27124,4146,16041,32218,19208,15490,11548,14260,18204,-1231,10107,3253,16528,-22372,27807,-15592,20650,-32211,-9563,-30773,18148,-19027,-6462,21118,9386,28212,-27953,4063,-21695,27196,-14247,-3015,-5930,17108,1480,13607,-17977,17441,14752,14731,-28956,2778,-10179,-22958,26044,-30558,-2664,8316,-18777,22412,-9451,-6196,-4160,28619,10255,-26585,-6074,30284,15101,-467,17068,-22821,24799,23196,-8449,19885,8209,9594,-26840,-1226,-28422,-17414,-1967,-20741,-11238,32361,26334,18390,25030,-6006,9438,5447,-5878,-3056,1522,30441,5677,-17212,29174,-25336,-77,-21359,-22557,7754,1629,31555,22794,-18607,-23080,-9173,-19188,19186,30252,-20782,-17781,-30880,32034,11708,6946,28684,6643,22371,25997,30680,-31555,-29791,-25221,-21020,-12222,16517,1975,19146,-11633,-9746,7109,-22169,23082,26675,-28984,1156,5945,-29920,24749,11899,5336,12905,15764,-23552,-26432,-30201,18389,-29423,8844,752,-21093,-25732,-11715,-1861,-12537,31303,9330,24062,26936,7114,-13471,26456,32582,-12355,25084,22751,-7782,22059,27579,-4716,1622,19984,24432,13786,-2290,31585,-23614,10777,24208,-12926,-30805,29094,8436,13105,2675,-26493,10988,8079,27662,6791,-7244,-5542,32433,14027,19475,-7589,2599,26056,27806,490,-22826,11020,-10900,-28044,-6642,-21852,16822,21306,24169,-28000,27864,23352,10139,-14001,24485,-21454,-3623,14933,-7705,-6715,20354,13444,-11121,374,1661,14003,10343,-20974,-27153,30857,28353,-31857,-22496,27155,17004,28828,23245,7909,-15105,-17273,25687,-6341,-28381,-27547,12754,5595,-18786,-21890,-3572,-23274,24482,-23837,-9911,30092,22702,15045,3748,2549,-30415,-200,-31198,20241,-25036,-21554,4994,-30480,-6675,25532,22768,-30199,-29305,10979,14003,-27885,-6931,29632,-12139,7119,-70,7184,29358,-2198,26663,-21848,23369,22988,-19047,23040,20884,10807,31655,-19785,-24446,-25731,-4702,24763,9756,14486,-10430,-30587,-29473,31959,29474,-25923,-26930,-14345,16515,1838,9710,1036,-9325,23873,22301,-6788,6962,12851,9807,9926,-27844,-20190,-16296,-3990,-2204,-32686,-5394,-19006,-32288,32047,12036,-18932,568,-18318,12122,11224,9648,1882,-4095,20697,15649,12029,-7522,3318,4442,-10066,738,29028,-7187,-10734,15671,13225,-14902,-12223,-29032,-18612,21289,27412,11934,9888,9273,-10800,15894,-22935,-20157,13503,-2399,-5188,-2315,-19003,3924,7270,-14197,31327,22189,-22937,-24609,5858,2821,10845,31506,-12980,-5575,-28340,27550,11027,-26966,3310,-29125,345,20925,-27347,-17692,-2008,27619,12256,31211,-31510,-18331,-29155,6970,12185,21239,11349,-1987,4581,-30968,16572,-5012,-20856,14223,-21489,7392,-13383,-28769,-23998,-9334,6796,-29349,16451,-7200,23885,76,21828,-2432,2928,4280,-7808,532,-16288,6927,505,-22110,11404,-9605,26060,-22389,29827,-19270,2064,-10511,-72,32383,19654,-27745,-17968,6123,32263,29151,11238,552,-5144,-8191,20061,27939,31632,-29290,31323,8717,-9384,-8122,29086,-4069,-9308,-9315,24056,-22863,23074,-23326,-11121,-9872,31346,25798,24169,19968,-12093,29645,22955,-23440,1269,-2828,4449,-8208,-17083,-8257,-4893,22932,-12184,2618,26834,-640,-10883,13608,18450,-7395,-20554,-22044,9041,23229,-21472,-29250,30404,17418,8054,-1295,-16153,26888,13429,14774,14773,13060,6793,-27553,3720,-26351,-27569,7557,15738,-10756,19253,-17849,3317,21178,-22464,-10021,-28429,20993,-13234,-10079,22202,27681,-919,-15213,14862,27600,-12792,-12285,-26364,28026,9219,-30432,11623,-18384,7513,23666,-11520,15170,26271,23828,-28586,-32740,-14981,-31838,-1097,-14208,16968,10341,32188,22079,22193,-21206,9452,-13472,11385,2996,-6082,14901,22275,-28607,16309,-28113,-23716,7453,17942,23132,10756,23149,25027,-21550,-3629,-12839,-27505,-17456,-27744,2084,-6841,28377,-15497,-31146,30458,-11433,-2384,12564,-4163,8458,2040,-11244,16460,-16417,-18029,-9481,1848,31363,-5863,-27776,7872,-31958,30005,-31400,-31588,8193,-26281,-1745,19440,31566,3333,13039,31746,-928,-22409,15762,-16035,-11352,14706,-7989,12524,-32611,25496,26883,-4949,246,-21555,7,-29925,-9977,20661,-11764,2170,7018,-4777,32741,-19575,-11303,-17653,-23159,-17279,-653,14896,-1065,-2158,-11174,-19200,10062,18272,24129,21233,-14788,8335,24828,6311,-997,-26459,7839,26409,-13678,-30081,16090,857,18656,506,17235,21965,9521,-19116,11186,9865,21731,29523,-20180,-7156,-24801,16513,-19670,19410,27432,488,-18151,13290,26492,9464,26037,25496,-19962,7699,4622,20245,-13434,20013,3386,9291,28432,-22690,25915,24382,-13543,31553,25653,28062,6674,7940,-22791,25635,20045,-29961,14195,29735,21821,-11408,-1350,26726,23865,18210,21314,-13477,-496,-8299,-7588,-32519,-18772,7956,24184,19880,21838,-18391,-8772,15626,16450,22594,24544,14787,-32422,22587,25543,-21301,-2666,6638,26237,903,-5890,32591,546,18088,-26189,-15784,-26909,-16886,-17415,-12109,27667,-24780,2017,2573,27662,-22866,-8635,30542,31159,-12138,340,-20281,30896,-26479,25032,-14078,-13061,-11160,-31749,-31573,-16397,-31005,19954,28849,24658,-12912,14993,247,-22269,-21445,20289,-20469,-1460,29428,21740,-18398,31195,-30071,-23549,-11856,1029,-17970,28653,8973,-7387,-24021,-4602,-679,-29677,-10951,-6567,-10226,814,17560,12361,-4182,30867,-25,18144,20333,-22694,7893,14819,2748,29010,28072,21817,1705,17505,7365,3712,-6731,29942,-17172,21166,-19569,16329,7080,-4113,-1120,1534,6871,-15020,-14236,-7255,5469,6022,889,14608,17790,-24,-21542,-1457,-17950,160,-31145,-12618,-28627,32070,7396,24092,26289,-11087,-24743,-23922,-2434,19437,9804,21799,864,16,-2189,6465,-7234,6377,4022,26334,991,-31573,872,-1347,19913,-3592,-4750,-13824,-11522,25594,-7813,2002,-10906,-2257,20382,6921,1407,15124,21129,27760,-2617,25744,10827,6785,2598,-32304,-8455,30739,-7311,-20138,-13669,-8591,2990,-20727,-6401,-2458,-30263,18741,8429,13284,16176,6511,-20867,-28400,11457,7586,-24065,22307,10845,28005,29932,-8524,-8133,-31800,19156,8890,-14930,-26956,-23495,-32537,28010,-25216,14930,20804,-22721,-354,-31136,9791,-24177,9302,-1715,2520,31388,5053,13600,-22523,-18071,-4603,-13961,-9425,16948,-18671,-28803,32223,-3545,-24951,24294,20584,-27960,24609,504,-30127,14483,12114,18804,8908,-8933,24168,-12746,16576,8677,3127,-25726,-2145,5231,-22152,-6813,30156,-18410,-28729,25130,12372,-7966,-2886,-13347,16869,20776,11315,30233,5336,25364,22997,-11203,1066,20551,-15045,15590,-15653,18449,23684,23792,-4599,-18987,-1475,14598,18448,-23055,-11307,27009,-8374,22553,-3764,-19738,12891,892,-2619,-29364,-31143,-8103,-5582,8696,26797,-22913,24214,-23544,30843,-24644,-25206,25926,-23971,18699,19343,-2177,-21264,-16565,29627,24988,24578,-14440,-10335,-16126,575,11781,-3589,-2091,-10748,-640,-2809,-27637,23478,19172,-16307,24527,27781,7282,-28005,-16109,30006,7556,-18410,-24788,15762,-2105,21327,-8996,-8883,19367 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice24.tflite b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice24.tflite new file mode 100644 index 0000000..5c55d64 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice24.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice24_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice24_golden_int16.csv new file mode 100644 index 0000000..d7b1437 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice24_golden_int16.csv @@ -0,0 +1 @@ +-142,14933,5969,-30223,-7143,-18827,5114,-1488,6766,14759,28499,309,29481,25665,3635,15777,24746,3966,-7809,3028,-3348,-23946,290,6505,-32128,680,22590,24135,5058,-4866,6734,-30940,-11589,-15595,-32117,-26441,-20241,-2013,-28499,-10546,14418,-508,-17675,20326,-18952,-29668,-17933,-18505,1476,21715,-10025,-21182,13138,10700,-12020,-5106,-11171,-20719,15782,30526,-7187,-11155,-14092,-23976,9303,-502,-16147,-16179,31250,-1155,1664,25337,5651,-15669,-25755,28306,-22475,-11951,-17033,7081,348,-15795,16465,-25250,-9446,-2600,30180,-28834,-6984,29360,23598,24173,5288,-24957,5645,-26909,20793,-1347,-21133,28774,17209,-4929,-11807,5464,-21643,16282,-29068,26320,-6678,-7845,-4490,-30116,-30572,-14622,-17379,-27178,26689,-20637,30820,14023,26493,-13979,20643,4697,-11830,-344,5458,-26011,-10732,-19181,-3953,19854,-30696,-22439,2273,2181,11544,-24327,9606,27586,-5344,-2013,19642,28901,21654,22579,16170,-31266,-3676,-3419,-16286,-28761,17573,-23166,9944,675,-25173,-10068,-18811,-17774,-12385,21307,8361,-30170,12300,20360,-4557,-20750,5750,-6493,4761,32267,-2868,19070,5182,-459,-32159,-12528,21747,-4026,-15662,4557,-27871,-5594,-1981,15967,30578,4111,-17086,-9423,10509,-16577,14886,-2411,-18235,29977,20606,-16980,7287,-25554,27313,17709,-7464,5618,-12970,-17219,32033,3943,3664,-21044,-25976,15127,-18569,-16253,26699,-3796,-26902,15515,27608,-23220,22750,18704,20985,2672,-1892,-21158,-10926,-10014,30239,20397,-30609,2251,31798,607,-11579,3922,-1202,-25428,20303,32265,5924,-26601,-10734,-18035,203,17500,-18292,5370,19140,13256,15864,18335,12997,-14873,19461,-27508,-17661,5359,-1092,-18456,816,-5968,-21798,20929,-17954,-18431,14607,-13349,-20356,14715,22339,-24262,2431,-11261,7101,-3071,24622,-27505,-17265,-2132,-9240,32292,-6381,-21380,12017,10577,-4657,-9734,-19460,-30307,-18118,16330,-18327,-25453,21203,3080,2869,9281,-14587,-20334,13191,14287,21498,-175,29172,-3814,13622,20653,-30728,-5574,-25966,29000,13658,-9003,15852,12240,-31347,32096,-2071,28063,20013,26679,1317,2500,13802,3825,31982,12832,11704,-14689,-1779,-18610,-17749,15959,-574,4363,-18688,10099,30613,-31272,9218,-28817,25792,3819,-16678,31117,-4981,-2099,-22816,-3254,0,-20572,21475,27498,-12261,-30588,12156,23517,-19757,-29095,-8254,2574,8560,-5684,31834,31593,-27704,3635,-28574,-28136,15640,-12358,-5078,-1104,31138,21075,6286,15679,10033,-13419,-23522,-16133,-27983,-5027,-10695,-19195,18049,-7750,-5815,-30391,-19807,4057,20430,-2832,-12842,31856,-27197,-14354,15212,-14025,-12291,22368,-14150,-16713,6855,-9385,-26666,7171,2211,15178,-16210,-25133,27677,2651,-10321,-18925,-14677,28217,18968,10012,-15395,16437,15568,28283,-15102,26597,12904,4090,-31232,-26468,-25874,24809,8748,14107,-5568,11334,1666,20868,604,-7017,-24568,-3001,-23005,16833,8862,21883,-1122,-13011,-12005,1315,-21447,-23789,-17139,-9891,-3739,10036,-27366,4140,-28439,-4334,28546,551,31901,3495,21695,26216,-28406,-23809,-924,-27852,-11743,-15671,-26224,21292,-11336,3596,-16392,-21211,-27236,21880,30366,-17457,28639,-31327,7030,28663,4560,18025,32517,25894,11536,-5224,25490,15860,20822,-2965,13499,-2509,-28020,22058,-16716,-26818,-893,16104,19506,2509,9933,12047,-24018,-9590,28676,-11017,-7459,-21749,32230,-3615,-19018,-14554,24816,-9285,26233,-20518,-14042,4458,3163,-11917,-24789,-29931,-27599,26973,-23173,8676,-21094,18736,-8975,11475,-19100,26832,-17071,-22804,26416,-16641,-25427,-18617,-16735,-8195,17472,-8790,20452,-1813,-8389,-12956,5941,87,19567,26442,10562,-542,1203,27957,-25454,-21164,8090,-26079,-2844,-127,55,-7792,-30470,16825,28616,-12677,-6549,32397,-14598,-27491,12351,-8861,21845,30461,-28787,-20218,25626,2825,28746,24410,-24947,-22940,-6207,-14378,-288,9975,2540,27212,-18556,-20849,-4924,25430,22845,2646,13509,-15522,4577,1624,-16711,12994,7610,-23768,23707,22903,-14253,31352,-19940,-11274,1256,-30223,-31445,18569,21544,24490,8740,-23444,25772,5515,29823,-20049,-25777,21013,-26602,-9019,24507,15569,31141,23612,29404,10522,5114,-28244,29493,1057,-10279,17962,26727,-29472,12355,26343,-19502,-17549,-3015,-9886,19688,7675,32660,-32726,-11746,28000,-32176,-9924,11855,-2455,-27587,12670,14167,-8971,16670,-8603,-6406,30815,8684,-6994,-17155,14695,27430,27951,-27728,-836,-19074,4644,27010,15737,19430,7304,3352,4669,-8737,-19153,11350,-23998,9312,-25948,-20724,6595,-2215,6223,-14644,-9141,-3801,-23446,-21052,-31408,-14997,30171,-18324,19566,-12193,7523,24800,-1217,-28359,-17192,7098,-1363,7950,-18380,31404,32350,30240,19136,13747,17606,23852,3523,12704,-6424,-1593,-1709,-20939,-15956,-5440,-29235,14440,-2093,19565,-24812,2631,-9540,13166,26060,14568,-12503,5293,7171,-32218,24360,22697,13754,-22548,29668,-3828,-27004,-3890,-17199,3395,13107,10594,-32049,12830,26513,-6450,30816,18515,-29683,20250,-22706,-22275,-25,26892,-17986,-11206,26822,-20371,21184,20667,-2872,-68,-16412,21738,13263,23300,17826,16424,-23582,2248,-20011,14127,-32393,843,-11119,26719,-5205,25517,-19841,10438,3112,-5823,-7588,21043,5230,-18217,-13310,-16031,-10776,-10708,28074,-18285,16689,-6025,-13243,-21895,-18586,-21937,-21873,20689,-4621,-22873,-6961,-16734,30350,-17830,-13936,15047,-5255,30626,-9182,-12858,-25981,-18428,8142,4413,24099,15738,24514,-30482,-15749,3865,-3496,-26183,-19524,-32307,-31291,-13752,-9711,-29002,-14369,-684,15281,-22584,-31163,-30524,-29262,20166,19905,-26693,-11072,-31542,-16581,15595,-13830,-17929,-16722,12426,20967,-20556,574,30985,2864,-31484,14249,-1189,9572,18655,-30534,-30011,11186,-8985,17050,14758,8902,-25153,-13431,1775,6427,-14471,28625,-12131,-9566,-21342,12109,30128,31470,8627,30134,-29957,-16718,27084,1142,5147,18352,31391,17579,-31739,-1060,6561,32013,11279,-9226,-30668,-5627,-32048,5648,-2457,-15955,18091,29583,22522,6367,-17373,-3235,-31675,-6253,13912,-2966,-9111,-27755,18122,-11947,-21618,23718,-1499,2279,26958,-27275,24050,-26837,-11805,-752,-16036,-2720,-28828,-9558,3395,27711,7404,9952,-6084,15381,-22493,-28690,-32641,-12763,473,24958,24415,24405,-2908,-25982,-19806,-30593,-879,28263,-16204,30304,29854,-11528,-6724,17356,18350,11592,6525,-18022,-6604,-16900,16774,-3478,-3872,-10399,-11092,27998,16263,2363,14348,15062,-7326,-21710,-15302,24536,30150,31695,16062,-19624,14458,-9362,-13682,10617,-19387,-6436,26249,-30535,18861,-27066,27934,-12394,-7890,15968,-26608,-7193,-19891,-7446,27504,24099,5497,25891,-13907,-20134,5322,-4459,-24901,9791,1805,-6530,-4825,-32455,-25812,-9859,-22359,9216,20946,-4123,28392,-13745,-7415,-7564,27826,15236,356,9194,-30205,-20136,3357,13882,25025,-15392,28140,-28943,20171,-8791,25630,-18504,-4252,6881,4037,-26336,16270,18608,-26322,6764,18578,6377,24319,13356,14895,21203,20975,29201,-27408,-13783,21729,-15827,-24950,-9348,-9394,-15067,-22151,-16478,-1651,6551,-10199,-18977,-27918,1018,1948,2837,27858,6506,-12659,-23330,31707,-7654,-9012,25363,-23587,12043,-27009,-3111,3484,15381,13159,-25294,-25741,21183,3205,453,-8032,-27091,-30987,1698,19547,32303,29978,30792,-26507,-8851,-9998,24557,31379,22812,-25024,10833,15716,27637,19873,27833,12892,27114,13357,-8310,1245,15276,-8407,19931,658,-15934,20741,9637,-10430,17055,-1639,10957,-21041,12045,-26851,-23784,-2697,25694,-23027,8046,-7946,-1428,17968,-23596,-14509,19383,24913,-1453,-4552,-27332,-363,-32280,-16348,6774,15171,-20411,6590,-26007,-22961,-1860,26598,-905,-17603,-21174,-28786,-15027,-18525,26743,22662,-28953,11118,13709,-1779,-10453,-6108,23344,18234,7755,8884,-8358,-638,19559,-23422,15333,-12877,25108,-23954,-25421,18681,17384,24487,17053,-28563,20947,-26686,16658,11584,27554,11577,-24425,20675,-5534,-16126,8054,14675,28158,22985,-23978,30782,25783,5200,9347,5466,15321,8838,-15546,14257,-23927,-30188,7618,1366,19591,-32422,-19924,-8793,1197,3801,-14417,15788,-23437,29273,29595,13238,-28599,-3242,-2569,-24753,32671,14221,-8060,10765,-26900,18149,-11371,25819,-12832,27677,-22814,-2684,-17323,-28108,-24368,-26974,-30304,-22115,3966,-29563,24444,-10648,24325,-20566,16601,-751,-7744,-31696,-1390,4465,-25248,31877,-26999,-25541,5996,32524,-23724,-12589,16123,24222,3910,-27023,-17673,-29226,-13763,30593,9824,-25698,-24097,2695,-10541,5135,13036,5199,22800,15325,-6158,-21636,-22647,-31669,17563,-15979,-15239,7830,-5221,24778,-26408,-883,-16401,2970,165,939,17631,22381,-21723,24718,5194,-25122,21568,-30549,9793,29763,3542,-1451,-10166,16223,4435,-6783,-4188,-1802,458,-8680,-29749,27561,16120,17187,-19893,-5973,30758,-15937,-23553,-21676,19418,-28236,8325,-24305,-21456,8536,-30567,1065,4595,-29129,-14007,13268,27673,19557,19460,-7336,21860,-14661,-31574,-17508,24012,3253,-16229,-24096,-25122,29038,28556,-8366,-17794,7643,-5662,28136,25452,3450,-22353,19448,18686,-32291,-8595,30966,-25733,-20986,-27990,-9404,24749,-14456,-5086,11186,-19091,-5485,-21402,-7135,16835,6820,32216,21860,1555,-7388,-11191,8637,12717,-28621,17734,-16872,-11215,26268,27773,28378,-16784,24762,16358,-3537,267,-13973,-20681,1934,23691,23557,-21965,30867,-7806,13327,-25301,-25186,-11832,868,18900,27897,17927,-9933,28469,-9858,-26142,18363,7642,-31490,-18645,12104,-8279,-9532,17993,22789,8865,-12157,24204,4109,29308,18226,-19220,32682,11264,-22688,-8219,-19150,-25628,-7900,18252,-29655,29917,-29649,-27830,-27949,-25708,3196,-28796,-4593,-16006,-23234,-8384,-22011,-18563,-14958,30488,27478,-19523,5843,-32576,32653,-28142,1448,-9111,3554,26784,-32468,25386,-30751,12651,-29110,-6362,-12208,-21778,-26705,-5951,-3698,-11321,-25492,-18662,27270,280,-6045,-2030,-5645,25176,-463,19553,22092,-28917,-10599,13127,-20893,7588,26148,-27097,24368,10967,-28354,-4824,-13413,-27387,-31671,-24590,17121,27943,-9790,17586,-897,24041,-14792,-26959,-10949,12735,29219,-28865,7007,18444,24439,-15862,1690,-5374,-14849,-32576,21909,-19303,-8833,-15930,15852,-12679,-6325,24643,-23962,-23680,-5019,10425,16390,-16719,-2040,-14732,17365,-31777,9211,11337,8330,30286,-30172,-11188,-25152,3035,19855,28308,-21184,1,12257,8352,4775,3323,-3500,-11637,-26264,-6078,-20766,-21535,1419,15369,-18253,21641,28223,17447,-31389,19048,-29028,-31877,14617,30225,21497,-278,-11794,-22406,-5858,12152,8830,30858,722,20446,15026,26423,1080,28252,30818,-8218,-14832,-8729,28231,-4509,19118,15762,-12600,-2911,10842,-14763,-4163,-20723,-15904,-21475,27566,-24019,10329,-21882,23511,-11334,-13891,-12987,-19395,10234,32576,-146,-4559,9535,27950,28673,17129,-21443,17532,151,32633,-11807,-23553,-17960,-11817,-19024,-9604,-13813,-6290,7855,-189,30568,-14362,-24159,13525,-26783,138,6693,28546,6655,962,-26884,-16434,15,89,-9479,-15620,-20282,-16693,5832,3508,-363,28629,-19160,-12661,-29778,19236,-26288,7956,16013,-17304,23751,-10192,13758,20954,-14217,-20282,-25291,376,15291,-7974,-18068,6901,17500,-9983,2560,15689,6386,-10396,22035,23915,21505,16525,-16296,27468,30832,17997,13641,-4442,5363,-18889,22477,30283,8460,18532,27766,-20768,-11690,2168,-17989,32529,10129,-18018,-26936,26962,-21089,-26753,-28463,-23955,12352,-1620,-1186,22225,31419,-5277,-20346,-10008,-7157,24936,-870,28237,-13164,2160,24907,18989,26889,26347,10730,2850,588,25352,2355,-17868,11310,-17398,257,3978,-30257,10192,28051,21149,-20739,20112,12024,-22331,-5957,6996,29861,-2172,-15623,2387,4153,-15827,-14843,24110,-9675,-15817,-1366,15706,-16026,-14079,-11150,-17368,25665,21631,27502,29736,32662,-15050,19231,-9142,-29498,20296,-12246,5435,30642,-31914,-30372,-21612,-27985,-14385,-25688,11534,-28579,15329,29106,30184,-5489,13602,-7072,17379,-12384,-15993,9960,11649,1430,32248,15143,19796,25819,13179,23541,28078,31052,-28525,-20473,-25581,-26081,16590,-24165,19679,-9696,8114,-29898,-5881,-28542,9880,27156,-22331,-18511,-30020,-17609,-12581,20282,4976,104,30069,-16132,2541,-32045,-1035,8553,30400,18339,-25243,-7182,-24748,-9217,-26821,-4690,626,-23427,-8111,-1540,14608,-3249,1341,-17875,28194,-11883,3219,3798,22610,3944,-21024,11794,6299,25206,-21496,3905,2997,29109,-25276,15832,-28574,16032,-23038,6426,22817,4612,-5313,-5989,2524,-2742,3856,-13608,24839,-11814,28702,31385,21495,-5499,-30697,-25327,2529,-17296,20172,-25785,14907,11743,-21318,483,24702,2618,17880,-517,10381,-32053,-4083,-27129,-13132,15539,-23174,-27467,3275,-23995,29091,6546,-21974,-23948,15964,-31223,-10755,32096,27698,-14577,31778,16994,-3600,10707,22013,2193,29938,3257,16070,-19013,19146,954,12472,-22555,13334,32231,-15908,-1133,-19294,32525,11619,-5968,3360,-2791,-11224,-18754,27220,12005,-18793,-3119,26115,-11848,26696,-14991,-23617,14202,14342,24427,31636,6644,-18369,28040,23116,-23239,27146,6021,26243,28979,-25856,-16514,19645,16761,1032,-18021,28142,25752,-9147,-28296,22501,3672,-6210,-27207,-23677,-7964,6369,-12248,-8939,-11692,-17450,10513,6731,17738,-30844,-25851,10082,-23489,11990,27317,-21482,30866,5890,-15058,-7020,-8580,-24949,-22927,20195,-4760,4720,-28323,10268,27587,3969,24451,-23291,22502,22640,32330,30952,-12655,-11901,-4258,-20030,-23592,13074,-17174,31973,29305,-31146,14491,-26820,23665,-29957,20497,-28942,30944,13760,11673,-18775,-31361,-13208,31812,-18674,13158,23901,-31071,21822,-22930,4439,6810,7221,-14531,-16451,21497,24747,-11338,9488,19175,6179,4278,-19024,26713,-4999,28825,-5522,-5969,-23118,9105,-13213,-20912,-20739,-28324,5104,-27855,-23998,32162,17685,16926,5933,14491,-21595,-24566,-4659,27160,-26223,26826,11136,-30021,16735,14535,497,19164,-27407,-22987,-10848,-1285,21585,-4304,1966,12898,28502,-30252,-5108,3068,18062,23829,21770,-10877,-1694,-11729,-31013,20770,-29531,27818,-9280,21067,18711,-23008,-26700,-12454,-25802,-12565,-18517,-15504,-29908,-30596,-32346,-5631,27721,-25781,26142,-21159,25782,13511,8604,-30192,-26095,-28550,-4425,11930,19819,-18268,23832,-19033,-10061,31656,-14344,22163,29926,6114,-8386,8416,16500,21045,-19750,30698,-11655,16087,20345,23098,-7792,878,-24613,24353,19436,-30445,-17188,8401,27454,25472,20063,-25525,11223,4733,-9605,-6028,27804,11074,5019,14993,9714,3191,6257,-478,-8890,29241,29486,-28701,-11600,-18673,24667,-21120,3566,-32222,13286,-10683,8715,-10840,14836,-20564,9695,-21392,8908,-30261,30943,-28713,-22063,-26572,23989,13806,19028,-18922,10355,32512,18299,-454,-22135,28393,8339,-25481,-11117,27349,-3031,12772,-22902,19286,-18374,-29989,-5801,31324,-19119,29117,-7766,19605,7761,20626,8943,20126,7921,-23238,-28333,-25813,-14288,-31452,20935,-14608,30204,-15032,18686,-32345,-16218,31934,-25118,-3564,-7594,18206,-22081,26478,1085,24388,10449,16021,-22790,15059,32552,-26031,7363,5834,27153,25338,10597,17142,14411,-26994,11529,-746,-15847,22184,-25829,-10786,7947,20150,-1260,-20529,-5928,-28770,-29500,-6013,7963,22535,-20178,-14187,9490,4387,-22527,23178,18581,-13113,15037,-4392,13732,-25833,31997,10862,-25574,-24955,10333,-2786,-15429,29929,-16048,-23695,14585,-696,-19604,-9800,30725,2887,-32170,-14261,26768,24880,-28215,-16190,-30098,-27429,-13294,-30633,-25827,19095,11229,-5284,-10246,-28371,19728,27569,11092,23064,-9768,32656,-7798,28784,-4953,-15499,2842,-17839,7455,-20013,6691,-4915,12791,5767,3877,4554,1967,-67,2688,-10503,-5248,28946,541,306,-22798,13104,16444,10031,-2351,1078,-951,-15709,-32605,-3951,21324,12974,-12242,18648,-26691,-28406,26345,24094,-27293,9129,28754,-6550,-3105,-603,-15885,10030,-4398,-27646,-7818,-281,9710,21124,6307,-14357,24502,-5350,-13379,5334,11266,-1004,-22385,-10653,-17668,-15595,-18490,-6485,7491,17087,23358,24920,12058,-26717,25018,793,-14778,-3280,8377,-18834,-8944,17998,9393,-8815,-19305,31655,-21443,18293,-14312,-7728,5717,30369,-4728,-28015,3993,-26952,-949,-9276,32206,6992,-28718,7988,-5592,-8889,-16500,16240,-6548,8815,4632,-16610,-28708,-3814,8333,14311,24121,-18304,1509,-32039,-32450,10236,-6980,-28835,27399,24307,25209,-2525,12929,20698,23800,3160,-28980,-32584,-26947,-4442,2951,15393,6479,-6708,16330,-17528,-25852,21326,-31318,25070,-2001,-15223,20993,-11861,-28860,-32732,23432,-16519,-23076,27009,-26402,-23604,16214,18249,-2014,-10822,-26605,31055,-6628,-8082,-3924,7650,-9081,32686,-26169,-9802,-32651,-22682,7400,4206,-13792,20057,25495,-8337,31033,14272,-14414,20176,-28180,24182,-26990,-21985,19528,6882,21152,-14645,7345,7543,9842,14382,-10409,25217,7103,17130,12839,-9893,4774,28955,-19008,10243,-14797,-23392,28995,-588,598,-4214,11372,-4108,-11322,-21963,-29527,26117,-22570,10852,14209,-31228,-15300,16184,3619,21835,31904,27215,4767,-12254,-15683,1201,27398,-1694,13123,27771,30925,-11089,-6952,1616,28945,-23762,18548,679,23909,29433,6188,-13392,29410,4938,3097,17156,18358,-5649,28008,-6952,-2238,-7235,-18069,23817,-13604,11122,-31610,-11793,19692,17222,-32020,26624,-26973,-10950,-21771,-6644,-12057,16471,19799,27521,-6169,25415,-3032,-19974,-8545,-23170,-17127,-16799,29504,24222,7094,17573,-28258,14068,23806,-30848,30399,21698,-25341,-18187,-7953,-13911,-9926,-3156,-20451,-11350,9229,-23290,-15233,31820,-16385,32513,23890,29239,27489,17142,-31450,-5152,585,-19846,19360,-30118,18466,17579,11286,25853,9542,31703,16839,14187 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice24_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice24_input0_int16.csv new file mode 100644 index 0000000..55be32d --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice24_input0_int16.csv @@ -0,0 +1 @@ +-12397,-17705,16878,15838,-32001,26730,22304,18944,-23958,-11612,22802,-3475,-20911,4925,-19094,-21558,10522,16872,-10772,18680,14962,-4867,-4121,-31060,30015,10279,8766,-18209,-13900,5161,12873,-5688,-15449,6566,18598,28557,-21865,19002,27156,27620,15716,23790,-1299,12999,23717,21159,-4768,18487,-14355,-16678,16070,-9659,-8970,-19023,23460,-8814,-15551,-22525,-6968,-25911,5949,4821,-13024,-13598,20477,17177,-23674,16121,18469,-14750,-1454,-24694,-26958,-29224,1457,-12640,-21154,-16351,16547,11549,21994,-28570,-2555,15681,-20491,-30316,4781,-18468,27638,-8945,-8942,23968,11082,12349,16543,4857,19152,-16818,-30989,-23847,29173,-15728,21575,-9082,5663,29032,-13901,-27623,27880,-20789,-28761,2101,12794,-17789,19779,-21178,11660,10832,25326,26721,-12380,-6589,58,29515,-23263,-11696,-24228,-32436,4755,-7969,-27923,-6387,-12520,19981,13768,-8456,30498,30,16784,22540,30616,29111,-9929,28719,-9924,15883,4758,-16406,-29970,14519,11078,31982,222,-3281,-30272,-4741,-21443,16223,488,21179,-16337,20256,24237,25712,2413,-8282,-9136,-27786,18123,-9044,-8005,16918,14076,7596,4465,8785,7126,23661,-16555,-31521,25981,1026,17993,-7780,2094,3996,-27540,-864,-3684,21479,19350,3216,-14648,3503,-30790,-14548,16149,-14259,18897,10188,-16427,18528,22635,18854,17874,-12782,22194,-28216,30155,-8903,-25008,5709,20839,546,-30981,-7112,8353,25390,-20678,-1204,23926,-16299,-3113,6351,24949,-13989,-12265,4914,20226,8507,-20613,-21731,-6113,-27795,24859,-12295,-15385,-21950,-14332,25184,-6343,-16694,26712,-5942,-14098,-31988,-29799,26805,-32577,-10823,-27006,-28972,1374,-21560,10771,4517,27127,15710,-18626,-16277,-1090,23055,-28533,14727,-5677,-19537,-31733,20013,9359,32299,23355,-3200,-24051,8002,768,17208,-29250,1765,-25660,-22362,29374,18520,-8581,7988,-19510,-3583,3946,-15638,27972,-1082,23418,-29354,-8435,-32235,-8506,27120,23699,3682,-9874,-10920,-4401,-23204,8213,-18389,29837,-9969,-13152,17602,15775,-29989,3018,299,15333,-1486,-1995,28285,32337,-23244,2979,-13616,-4160,-22354,14515,1780,-23396,-13756,28187,-15632,-32395,21263,-59,-3602,-31150,-23761,12103,-15153,3756,-27790,-18088,6958,14649,22543,4067,-23697,-13368,-24880,-24461,-5069,3259,-27632,5218,-19140,771,22923,22339,-32391,3883,-21556,15018,15523,1433,6096,5168,-29345,20755,-13295,-4757,14557,-18698,-8337,-5063,20734,23611,-27143,-16656,5624,5108,-3443,-1280,-10068,-2403,12785,-11121,-7993,25991,29892,-16293,-10108,21728,-2590,-29493,27279,-26054,6080,-25773,31123,-19809,2315,-1294,-22599,21032,-11980,-2940,-21363,-25720,-12127,-14336,594,7039,-17064,10808,18724,-8048,29433,16782,21738,-14977,27287,227,-27234,27837,13111,-27522,21166,5542,9881,-16799,-28912,15,-13165,6775,8922,9431,15996,-9466,30450,30679,9098,14081,-18090,28945,-161,1815,31232,22655,15201,23904,-3192,4184,-31196,-20844,23468,-26977,-17658,-5033,12984,27734,8660,-2710,30247,19931,-13928,3791,-24025,-9229,-24872,11454,-2922,13678,12897,15439,28291,-25649,-12027,15789,-4256,10548,6199,-2072,-12960,-1812,28400,-7079,8462,32764,-19513,23538,-7564,54,-27831,-4457,-10655,-23512,-4145,-22281,5487,-14025,1832,-13986,-12659,-27796,-5829,-23607,9528,32596,-25086,-20222,15915,23887,26983,25807,3536,29083,-3864,-8774,-31754,25535,-31234,-26095,-16787,-8986,-29427,-24801,-15748,32584,-677,24460,-10092,19883,11102,-23286,-10584,29557,-9688,19322,19903,29514,-15516,-2470,-6441,-2489,-4146,26090,-15535,21624,-18249,29252,-1768,-4506,9077,-23707,-8589,3494,24235,-10408,7299,18831,5490,-18003,7957,30369,18261,15453,227,-19644,-3727,-5446,11641,15785,32218,24796,-16461,-21824,-8931,-9966,23557,30271,-4164,2747,-28495,15136,29908,-31215,-4499,24398,11417,-13410,4909,24241,-1536,15717,29266,16807,-19360,5401,13554,-3490,8880,16341,3121,30799,-10948,5570,12601,32034,-2117,135,-2487,5525,3866,28096,12542,-9980,-175,-27772,1203,19308,-4120,25466,12076,-3021,21768,24216,11692,19734,16931,-345,3567,11444,13768,14803,-6305,4526,20640,30910,27215,-18319,29629,2174,-19430,20605,10418,4039,-30712,964,1536,-32374,-18631,23637,11507,-18937,16457,-19283,7591,6373,-11783,115,-861,13343,-11763,-21116,13608,-8896,19502,4367,-21108,27161,-14974,-30114,-2227,16060,31773,-18783,2889,32253,-23194,4875,24118,8843,-31211,4378,2605,-2038,-32339,-9981,-21865,-28101,24836,-6111,-12012,17402,-13916,-5355,-15263,10571,-14346,-14727,5695,19261,21134,29832,20977,-15459,-31697,1547,-26236,-8482,21710,-27574,8000,30626,-21876,23420,-29802,30281,-10208,19139,2152,-26053,24425,30602,-7965,-2047,27189,20329,-26774,23018,3394,4117,-4790,-20204,-10149,3724,10658,25646,-29894,-17040,28866,-30751,13018,31679,11891,-24024,7471,-18738,-21679,-31900,14976,-18769,31983,-27122,29998,7995,1344,22104,9461,-28526,24930,-11726,-27623,10592,11210,-23050,10635,-14512,-30574,12432,-23499,-27083,9108,30283,-13741,-6539,11773,-32264,22332,-3020,-20383,19507,26047,-14504,-975,18272,-7805,-2878,-28545,-16168,32161,-14598,21798,-9659,-5841,6793,-29296,-20688,7842,-25294,-19738,7114,-7649,-17176,7593,3198,-30862,6715,18550,-14880,1670,30330,-22290,-1290,-9286,-12318,24317,28509,-30486,-6993,26132,10227,14052,-27443,3768,31234,-14595,29805,-15169,2827,29204,18839,26783,-19877,16141,11483,24426,-22423,-9886,-21151,-13839,11138,3311,10814,18699,-1843,22351,-21440,29126,-3032,-25791,27521,-20741,-21319,17370,-1806,22756,-21902,-9949,-23956,8849,-24125,-725,19736,3024,23455,24670,13051,17294,-8639,30111,-7077,-15576,22462,21642,13304,25732,-2579,-30242,876,2603,5704,-10602,32168,14774,-4295,3528,12366,-28166,8764,6422,-10838,32656,-32413,20157,-13170,-24566,-9916,-23601,-22891,-5344,18646,-30197,7734,3379,-23898,-9431,20891,14993,-1894,-23362,5278,-26809,-3548,-19554,16827,-10686,-20009,768,-6261,-23587,23864,4617,463,3407,-7695,27463,-13836,2648,6396,16293,21880,-24585,9869,31070,-592,-6723,31684,19094,-12168,8128,2599,13701,22810,-24335,31900,-13554,13708,-11482,25794,-8623,-14332,17505,27553,-9775,-22467,-24143,-11606,-28999,-24065,-627,28820,25981,-4661,-9936,8057,28957,-22262,-27903,8335,-6798,28958,13972,-31804,-10520,-15830,-17299,20110,27890,-22627,-31475,28192,24444,-26814,7191,24240,-10875,-6187,-17980,-30963,10920,11747,11819,-2333,-29514,-31421,6475,6930,32036,-478,23844,-22095,-19017,10069,28377,32356,10662,-23528,28958,-15653,6855,-14174,-3539,-1082,-4996,1964,25222,12235,18790,-18798,20512,-9310,14718,31714,-5470,-22844,-18059,17288,28047,10941,27523,-18392,11699,-919,-12226,30810,29042,-7638,20785,14173,-28325,-8887,-15523,15350,6999,3827,4533,-8545,3523,-29547,-22649,-226,-20035,-14130,28944,-553,-12122,-31563,697,14717,2489,209,1562,-26126,-8019,-10682,7394,-24451,9382,17627,8514,15892,-11792,-30630,32055,-10692,-8446,-20123,24490,24043,-3110,20222,-20667,-10268,-15402,-27443,3906,32393,21305,25798,5176,-15446,-13462,31619,-22121,-29002,-1160,2723,26711,19899,-25674,-22139,3228,-1977,14943,11380,31628,-24837,-10982,32616,-16443,20479,-12309,-29058,-7272,10215,31455,-5985,-9909,-16166,26585,12164,-3186,-32316,-22994,12525,16461,-29911,12788,1222,27236,4722,28190,-30689,-25095,-23269,3323,23679,24024,-23953,-27763,-20600,9990,-3942,847,10618,15426,5766,8294,11563,30582,-15570,-29166,18621,15014,-11112,3344,9722,351,14899,-32479,-32222,30623,-26018,29204,12245,-1881,17476,-22199,-8924,-31585,15274,8853,-20232,-21712,17233,-5706,30625,13898,31987,-12480,-22074,16259,24581,3362,1966,28508,-9154,-8435,14983,19138,24035,-20558,-15301,6918,-4216,12917,25348,14256,-964,-13369,23549,-30173,-13663,-18220,24201,-19847,-23374,-11017,-4247,-29800,6319,-718,-22442,11254,-1772,2531,8188,-19965,20377,-11559,-16786,-32085,-31912,-29080,3358,-28379,-12189,-3809,-32646,-31250,27735,-7750,13993,-7317,17302,-8936,23852,-14499,-14964,-13125,31404,-10663,9007,20922,27454,20451,5705,-23977,9946,-29259,-13086,24027,-27163,-31376,-1740,22400,-22073,26515,18143,29379,-17202,20639,27414,1866,29757,-3280,-26469,20394,24341,-25926,-30844,-6608,-27165,32367,15276,-13394,7950,20171,-3435,21232,-6166,-11750,31164,-12700,31035,2844,8955,-5007,-21958,-11659,-5676,29195,-24653,-31866,-13808,-11905,-14891,-8137,-737,-28019,-27634,-2835,-21606,-17398,-31743,17272,-9882,30581,30952,-16482,16165,-2453,9801,32627,14721,-19375,18927,-24970,8669,-15819,17070,27987,21370,-22172,-15159,14568,-6417,-24057,-12076,28148,-5632,-15130,24347,-20265,5189,-19946,23768,9529,9161,-12839,25993,22602,-5831,23618,-5128,-12449,-142,14933,5969,-30223,-7143,-18827,5114,-1488,6766,14759,28499,309,29481,25665,3635,15777,24746,3966,-7809,3028,-3348,-23946,290,6505,-32128,680,22590,24135,5058,-4866,6734,-30940,-11589,-15595,-32117,-26441,-20241,-2013,-28499,-10546,14418,-508,-17675,20326,-18952,-29668,-17933,-18505,1476,21715,-10025,-21182,13138,10700,-12020,-5106,-11171,-20719,15782,30526,-7187,-11155,-14092,-23976,9303,-502,-16147,-16179,31250,-1155,1664,25337,5651,-15669,-25755,28306,-22475,-11951,-17033,7081,348,-15795,16465,-25250,-9446,-2600,30180,-28834,-6984,29360,23598,24173,5288,-24957,5645,-26909,20793,-1347,-21133,28774,17209,-4929,-11807,5464,-21643,16282,-29068,26320,-6678,-7845,-4490,-30116,-30572,-14622,-17379,-27178,26689,-20637,30820,14023,26493,-13979,20643,4697,-11830,-344,5458,-26011,-10732,-19181,-3953,19854,-30696,-22439,2273,2181,11544,-24327,9606,27586,-5344,-2013,19642,28901,21654,22579,16170,-31266,-3676,-3419,-16286,-28761,17573,-23166,9944,675,-25173,-10068,-18811,-17774,-12385,21307,8361,-30170,12300,20360,-4557,-20750,5750,-6493,4761,32267,-2868,19070,5182,-459,-32159,-12528,21747,-4026,-15662,4557,-27871,-5594,-1981,15967,30578,4111,-17086,-9423,10509,-16577,14886,-2411,-18235,29977,20606,-16980,7287,-25554,27313,17709,-7464,5618,-12970,-17219,32033,3943,3664,-21044,-25976,15127,-18569,-16253,26699,-3796,-26902,15515,27608,-23220,22750,18704,20985,2672,-1892,-21158,-10926,-10014,30239,20397,-30609,2251,31798,607,-11579,3922,-1202,-25428,20303,32265,5924,-26601,-10734,-18035,203,17500,-18292,5370,19140,13256,15864,18335,12997,-14873,19461,-27508,-17661,5359,-1092,-18456,816,-5968,-21798,20929,-17954,-18431,14607,-13349,-20356,14715,22339,-24262,2431,-11261,7101,-3071,24622,-27505,-17265,-2132,-9240,32292,-6381,-21380,12017,10577,-4657,-9734,-19460,-30307,-18118,16330,-18327,-25453,21203,3080,2869,9281,-14587,-20334,13191,14287,21498,-175,29172,-3814,13622,20653,-30728,-5574,-25966,29000,13658,-9003,15852,12240,-31347,32096,-2071,28063,20013,26679,1317,2500,13802,3825,31982,12832,11704,-14689,-1779,-18610,-17749,15959,-574,4363,-18688,10099,30613,-31272,9218,-28817,25792,3819,-16678,31117,-4981,-2099,-22816,-3254,0,-20572,21475,27498,-12261,-30588,12156,23517,-19757,-29095,-8254,2574,8560,-5684,31834,31593,-27704,3635,-28574,-28136,15640,-12358,-5078,-1104,31138,21075,6286,15679,10033,-13419,-23522,-16133,-27983,-5027,-10695,-19195,18049,-7750,-5815,-30391,-19807,4057,20430,-2832,-12842,31856,-27197,-14354,15212,-14025,-12291,22368,-14150,-16713,6855,-9385,-26666,7171,2211,15178,-16210,-25133,27677,2651,-10321,-18925,-14677,28217,18968,10012,-15395,16437,15568,28283,-15102,26597,12904,4090,-31232,-26468,-25874,24809,8748,14107,-5568,11334,1666,20868,604,-7017,-24568,-3001,-23005,16833,8862,21883,-1122,-13011,-12005,1315,-21447,-23789,-17139,-9891,-3739,10036,-27366,4140,-28439,-4334,28546,551,31901,3495,21695,26216,-28406,-23809,-924,-27852,-11743,-15671,-26224,21292,-11336,3596,-16392,-21211,-27236,21880,30366,-17457,28639,-31327,7030,28663,4560,18025,32517,25894,11536,-5224,25490,15860,20822,-2965,13499,-2509,-28020,22058,-16716,-26818,-893,16104,19506,2509,9933,12047,-24018,-9590,28676,-11017,-7459,-21749,32230,-3615,-19018,-14554,24816,-9285,26233,-20518,-14042,4458,3163,-11917,-24789,-29931,-27599,26973,-23173,8676,-21094,18736,-8975,11475,-19100,26832,-17071,-22804,26416,-16641,-25427,-18617,-16735,-8195,17472,-8790,20452,-1813,-8389,-12956,5941,87,19567,26442,10562,-542,1203,27957,-25454,-21164,8090,-26079,-2844,-127,55,-7792,-30470,16825,28616,-12677,-6549,32397,-14598,-27491,12351,-8861,21845,30461,-28787,-20218,25626,2825,28746,24410,-24947,-22940,-6207,-14378,-288,9975,2540,27212,-18556,-20849,-4924,25430,22845,2646,13509,-15522,4577,1624,-16711,12994,7610,-23768,23707,22903,-14253,31352,-19940,-11274,1256,-30223,-31445,18569,21544,24490,8740,-23444,25772,5515,29823,-20049,-25777,21013,-26602,-9019,24507,15569,31141,23612,29404,10522,5114,-28244,29493,1057,-10279,17962,26727,-29472,12355,26343,-19502,-17549,-3015,-9886,19688,7675,32660,-32726,-11746,28000,-32176,-9924,11855,-2455,-27587,12670,14167,-8971,16670,-8603,-6406,30815,8684,-6994,-17155,14695,27430,27951,-27728,-836,-19074,4644,27010,15737,19430,7304,3352,4669,-8737,-19153,11350,-23998,9312,-25948,-20724,6595,-2215,6223,-14644,-9141,-3801,-23446,-21052,-31408,-14997,30171,-18324,19566,-12193,7523,24800,-1217,-28359,-17192,7098,-1363,7950,-18380,31404,32350,30240,19136,13747,17606,23852,3523,12704,-6424,-1593,-1709,-20939,-15956,-5440,-29235,14440,-2093,19565,-24812,2631,-9540,13166,26060,14568,-12503,5293,7171,-32218,24360,22697,13754,-22548,29668,-3828,-27004,-3890,-17199,3395,13107,10594,-32049,12830,26513,-6450,30816,18515,-29683,20250,-22706,-22275,-25,26892,-17986,-11206,26822,-20371,21184,20667,-2872,-68,-16412,21738,13263,23300,17826,16424,-23582,2248,-20011,14127,-32393,843,-11119,26719,-5205,25517,-19841,10438,3112,-5823,-7588,21043,5230,-18217,-13310,-16031,-10776,-10708,28074,-18285,16689,-6025,-13243,-21895,-18586,-21937,-21873,20689,-4621,-22873,-6961,-16734,30350,-17830,-13936,15047,-5255,30626,-9182,-12858,-25981,-18428,8142,4413,24099,15738,24514,-30482,-15749,3865,-3496,-26183,-19524,-32307,-31291,-13752,-9711,-29002,-14369,-684,15281,-22584,-31163,-30524,-29262,20166,19905,-26693,-11072,-31542,-16581,15595,-13830,-17929,-16722,12426,20967,-20556,574,30985,2864,-31484,14249,-1189,9572,18655,-30534,-30011,11186,-8985,17050,14758,8902,-25153,-13431,1775,6427,-14471,28625,-12131,-9566,-21342,12109,30128,31470,8627,30134,-29957,-16718,27084,1142,5147,18352,31391,17579,-31739,-1060,6561,32013,11279,-9226,-30668,-5627,-32048,5648,-2457,-15955,18091,29583,22522,6367,-17373,-3235,-31675,-6253,13912,-2966,-9111,-27755,18122,-11947,-21618,23718,-1499,2279,26958,-27275,24050,-26837,-11805,-752,-16036,-2720,-28828,-9558,3395,27711,7404,9952,-6084,15381,-22493,-28690,-32641,-12763,473,24958,24415,24405,-2908,-25982,-19806,-30593,-879,28263,-16204,30304,29854,-11528,-6724,17356,18350,11592,6525,-18022,-6604,-16900,16774,-3478,-3872,-10399,-11092,27998,16263,2363,14348,15062,-7326,-21710,-15302,24536,30150,31695,16062,-19624,14458,-9362,-13682,10617,-19387,-6436,26249,-30535,18861,-27066,27934,-12394,-7890,15968,-26608,-7193,-19891,-7446,27504,24099,5497,25891,-13907,-20134,5322,-4459,-24901,9791,1805,-6530,-4825,-32455,-25812,-9859,-22359,9216,20946,-4123,28392,-13745,-7415,-7564,27826,15236,356,9194,-30205,-20136,3357,13882,25025,-15392,28140,-28943,20171,-8791,25630,-18504,-4252,6881,4037,-26336,16270,18608,-26322,6764,18578,6377,24319,13356,14895,21203,20975,29201,-27408,-13783,21729,-15827,-24950,-9348,-9394,-15067,-22151,-16478,-1651,6551,-10199,-18977,-27918,1018,1948,2837,27858,6506,-12659,-23330,31707,-7654,-9012,25363,-23587,12043,-27009,-3111,3484,15381,13159,-25294,-25741,21183,3205,453,-8032,-27091,-30987,1698,19547,32303,29978,30792,-26507,-8851,-9998,24557,31379,22812,-25024,10833,15716,27637,19873,27833,12892,27114,13357,-8310,1245,15276,-8407,19931,658,-15934,20741,9637,-10430,17055,-1639,10957,-21041,12045,-26851,-23784,-2697,25694,-23027,8046,-7946,-1428,17968,-23596,-14509,19383,24913,-1453,-4552,-27332,-363,-32280,-16348,6774,15171,-20411,6590,-26007,-22961,-1860,26598,-905,-17603,-21174,-28786,-15027,-18525,26743,22662,-28953,11118,13709,-1779,-10453,-6108,23344,18234,7755,8884,-8358,-638,19559,-23422,15333,-12877,25108,-23954,-25421,18681,17384,24487,17053,-28563,20947,-26686,16658,11584,27554,11577,-24425,20675,-5534,-16126,8054,14675,28158,22985,-23978,30782,25783,5200,9347,5466,15321,8838,-15546,14257,-23927,-30188,7618,1366,19591,-32422,-19924,-8793,1197,3801,-14417,15788,-23437,29273,29595,13238,-28599,-3242,-2569,-24753,32671,14221,-8060,10765,-26900,18149,-11371,25819,-12832,27677,-22814,-2684,-17323,-28108,-24368,-26974,-30304,-22115,3966,-29563,24444,-10648,24325,-20566,16601,-751,-7744,-31696,-1390,4465,-25248,31877,-26999,-25541,5996,32524,-23724,-12589,16123,24222,3910,-27023,-17673,-29226,-13763,30593,9824,-25698,-24097,2695,-10541,5135,13036,5199,22800,15325,-6158,-21636,-22647,-31669,17563,-15979,-15239,7830,-5221,24778,-26408,-883,-16401,2970,165,939,17631,22381,-21723,24718,5194,-25122,21568,-30549,9793,29763,3542,-1451,-10166,16223,4435,-6783,-4188,-1802,458,-8680,-29749,27561,16120,17187,-19893,-5973,30758,-15937,-23553,-21676,19418,-28236,8325,-24305,-21456,8536,-30567,1065,4595,-29129,-14007,13268,27673,19557,19460,-7336,21860,-14661,-31574,-17508,24012,3253,-16229,-24096,-25122,29038,28556,-8366,-17794,7643,-5662,28136,25452,3450,-22353,19448,18686,-32291,-8595,30966,-25733,-20986,-27990,-9404,24749,-14456,-5086,11186,-19091,-5485,-21402,-7135,16835,6820,32216,21860,1555,-7388,-11191,8637,12717,-28621,17734,-16872,-11215,26268,27773,28378,-16784,24762,16358,-3537,267,-13973,-20681,1934,23691,23557,-21965,30867,-7806,13327,-25301,-25186,-11832,868,18900,27897,17927,-9933,28469,-9858,-26142,18363,7642,-31490,-18645,12104,-8279,-9532,17993,22789,8865,-12157,24204,4109,29308,18226,-19220,32682,11264,-22688,-8219,-19150,-25628,-7900,18252,-29655,29917,-29649,-27830,-27949,-25708,3196,-28796,-4593,-16006,-23234,-8384,-22011,-18563,-14958,30488,27478,-19523,5843,-32576,32653,-28142,1448,-9111,3554,26784,-32468,25386,-30751,12651,-29110,-6362,-12208,-21778,-26705,-5951,-3698,-11321,-25492,-18662,27270,280,-6045,-2030,-5645,25176,-463,19553,22092,-28917,-10599,13127,-20893,7588,26148,-27097,24368,10967,-28354,-4824,-13413,-27387,-31671,-24590,17121,27943,-9790,17586,-897,24041,-14792,-26959,-10949,12735,29219,-28865,7007,18444,24439,-15862,1690,-5374,-14849,-32576,21909,-19303,-8833,-15930,15852,-12679,-6325,24643,-23962,-23680,-5019,10425,16390,-16719,-2040,-14732,17365,-31777,9211,11337,8330,30286,-30172,-11188,-25152,3035,19855,28308,-21184,1,12257,8352,4775,3323,-3500,-11637,-26264,-6078,-20766,-21535,1419,15369,-18253,21641,28223,17447,-31389,19048,-29028,-31877,14617,30225,21497,-278,-11794,-22406,-5858,12152,8830,30858,722,20446,15026,26423,1080,28252,30818,-8218,-14832,-8729,28231,-4509,19118,15762,-12600,-2911,10842,-14763,-4163,-20723,-15904,-21475,27566,-24019,10329,-21882,23511,-11334,-13891,-12987,-19395,10234,32576,-146,-4559,9535,27950,28673,17129,-21443,17532,151,32633,-11807,-23553,-17960,-11817,-19024,-9604,-13813,-6290,7855,-189,30568,-14362,-24159,13525,-26783,138,6693,28546,6655,962,-26884,-16434,15,89,-9479,-15620,-20282,-16693,5832,3508,-363,28629,-19160,-12661,-29778,19236,-26288,7956,16013,-17304,23751,-10192,13758,20954,-14217,-20282,-25291,376,15291,-7974,-18068,6901,17500,-9983,2560,15689,6386,-10396,22035,23915,21505,16525,-16296,27468,30832,17997,13641,-4442,5363,-18889,22477,30283,8460,18532,27766,-20768,-11690,2168,-17989,32529,10129,-18018,-26936,26962,-21089,-26753,-28463,-23955,12352,-1620,-1186,22225,31419,-5277,-20346,-10008,-7157,24936,-870,28237,-13164,2160,24907,18989,26889,26347,10730,2850,588,25352,2355,-17868,11310,-17398,257,3978,-30257,10192,28051,21149,-20739,20112,12024,-22331,-5957,6996,29861,-2172,-15623,2387,4153,-15827,-14843,24110,-9675,-15817,-1366,15706,-16026,-14079,-11150,-17368,25665,21631,27502,29736,32662,-15050,19231,-9142,-29498,20296,-12246,5435,30642,-31914,-30372,-21612,-27985,-14385,-25688,11534,-28579,15329,29106,30184,-5489,13602,-7072,17379,-12384,-15993,9960,11649,1430,32248,15143,19796,25819,13179,23541,28078,31052,-28525,-20473,-25581,-26081,16590,-24165,19679,-9696,8114,-29898,-5881,-28542,9880,27156,-22331,-18511,-30020,-17609,-12581,20282,4976,104,30069,-16132,2541,-32045,-1035,8553,30400,18339,-25243,-7182,-24748,-9217,-26821,-4690,626,-23427,-8111,-1540,14608,-3249,1341,-17875,28194,-11883,3219,3798,22610,3944,-21024,11794,6299,25206,-21496,3905,2997,29109,-25276,15832,-28574,16032,-23038,6426,22817,4612,-5313,-5989,2524,-2742,3856,-13608,24839,-11814,28702,31385,21495,-5499,-30697,-25327,2529,-17296,20172,-25785,14907,11743,-21318,483,24702,2618,17880,-517,10381,-32053,-4083,-27129,-13132,15539,-23174,-27467,3275,-23995,29091,6546,-21974,-23948,15964,-31223,-10755,32096,27698,-14577,31778,16994,-3600,10707,22013,2193,29938,3257,16070,-19013,19146,954,12472,-22555,13334,32231,-15908,-1133,-19294,32525,11619,-5968,3360,-2791,-11224,-18754,27220,12005,-18793,-3119,26115,-11848,26696,-14991,-23617,14202,14342,24427,31636,6644,-18369,28040,23116,-23239,27146,6021,26243,28979,-25856,-16514,19645,16761,1032,-18021,28142,25752,-9147,-28296,22501,3672,-6210,-27207,-23677,-7964,6369,-12248,-8939,-11692,-17450,10513,6731,17738,-30844,-25851,10082,-23489,11990,27317,-21482,30866,5890,-15058,-7020,-8580,-24949,-22927,20195,-4760,4720,-28323,10268,27587,3969,24451,-23291,22502,22640,32330,30952,-12655,-11901,-4258,-20030,-23592,13074,-17174,31973,29305,-31146,14491,-26820,23665,-29957,20497,-28942,30944,13760,11673,-18775,-31361,-13208,31812,-18674,13158,23901,-31071,21822,-22930,4439,6810,7221,-14531,-16451,21497,24747,-11338,9488,19175,6179,4278,-19024,26713,-4999,28825,-5522,-5969,-23118,9105,-13213,-20912,-20739,-28324,5104,-27855,-23998,32162,17685,16926,5933,14491,-21595,-24566,-4659,27160,-26223,26826,11136,-30021,16735,14535,497,19164,-27407,-22987,-10848,-1285,21585,-4304,1966,12898,28502,-30252,-5108,3068,18062,23829,21770,-10877,-1694,-11729,-31013,20770,-29531,27818,-9280,21067,18711,-23008,-26700,-12454,-25802,-12565,-18517,-15504,-29908,-30596,-32346,-5631,27721,-25781,26142,-21159,25782,13511,8604,-30192,-26095,-28550,-4425,11930,19819,-18268,23832,-19033,-10061,31656,-14344,22163,29926,6114,-8386,8416,16500,21045,-19750,30698,-11655,16087,20345,23098,-7792,878,-24613,24353,19436,-30445,-17188,8401,27454,25472,20063,-25525,11223,4733,-9605,-6028,27804,11074,5019,14993,9714,3191,6257,-478,-8890,29241,29486,-28701,-11600,-18673,24667,-21120,3566,-32222,13286,-10683,8715,-10840,14836,-20564,9695,-21392,8908,-30261,30943,-28713,-22063,-26572,23989,13806,19028,-18922,10355,32512,18299,-454,-22135,28393,8339,-25481,-11117,27349,-3031,12772,-22902,19286,-18374,-29989,-5801,31324,-19119,29117,-7766,19605,7761,20626,8943,20126,7921,-23238,-28333,-25813,-14288,-31452,20935,-14608,30204,-15032,18686,-32345,-16218,31934,-25118,-3564,-7594,18206,-22081,26478,1085,24388,10449,16021,-22790,15059,32552,-26031,7363,5834,27153,25338,10597,17142,14411,-26994,11529,-746,-15847,22184,-25829,-10786,7947,20150,-1260,-20529,-5928,-28770,-29500,-6013,7963,22535,-20178,-14187,9490,4387,-22527,23178,18581,-13113,15037,-4392,13732,-25833,31997,10862,-25574,-24955,10333,-2786,-15429,29929,-16048,-23695,14585,-696,-19604,-9800,30725,2887,-32170,-14261,26768,24880,-28215,-16190,-30098,-27429,-13294,-30633,-25827,19095,11229,-5284,-10246,-28371,19728,27569,11092,23064,-9768,32656,-7798,28784,-4953,-15499,2842,-17839,7455,-20013,6691,-4915,12791,5767,3877,4554,1967,-67,2688,-10503,-5248,28946,541,306,-22798,13104,16444,10031,-2351,1078,-951,-15709,-32605,-3951,21324,12974,-12242,18648,-26691,-28406,26345,24094,-27293,9129,28754,-6550,-3105,-603,-15885,10030,-4398,-27646,-7818,-281,9710,21124,6307,-14357,24502,-5350,-13379,5334,11266,-1004,-22385,-10653,-17668,-15595,-18490,-6485,7491,17087,23358,24920,12058,-26717,25018,793,-14778,-3280,8377,-18834,-8944,17998,9393,-8815,-19305,31655,-21443,18293,-14312,-7728,5717,30369,-4728,-28015,3993,-26952,-949,-9276,32206,6992,-28718,7988,-5592,-8889,-16500,16240,-6548,8815,4632,-16610,-28708,-3814,8333,14311,24121,-18304,1509,-32039,-32450,10236,-6980,-28835,27399,24307,25209,-2525,12929,20698,23800,3160,-28980,-32584,-26947,-4442,2951,15393,6479,-6708,16330,-17528,-25852,21326,-31318,25070,-2001,-15223,20993,-11861,-28860,-32732,23432,-16519,-23076,27009,-26402,-23604,16214,18249,-2014,-10822,-26605,31055,-6628,-8082,-3924,7650,-9081,32686,-26169,-9802,-32651,-22682,7400,4206,-13792,20057,25495,-8337,31033,14272,-14414,20176,-28180,24182,-26990,-21985,19528,6882,21152,-14645,7345,7543,9842,14382,-10409,25217,7103,17130,12839,-9893,4774,28955,-19008,10243,-14797,-23392,28995,-588,598,-4214,11372,-4108,-11322,-21963,-29527,26117,-22570,10852,14209,-31228,-15300,16184,3619,21835,31904,27215,4767,-12254,-15683,1201,27398,-1694,13123,27771,30925,-11089,-6952,1616,28945,-23762,18548,679,23909,29433,6188,-13392,29410,4938,3097,17156,18358,-5649,28008,-6952,-2238,-7235,-18069,23817,-13604,11122,-31610,-11793,19692,17222,-32020,26624,-26973,-10950,-21771,-6644,-12057,16471,19799,27521,-6169,25415,-3032,-19974,-8545,-23170,-17127,-16799,29504,24222,7094,17573,-28258,14068,23806,-30848,30399,21698,-25341,-18187,-7953,-13911,-9926,-3156,-20451,-11350,9229,-23290,-15233,31820,-16385,32513,23890,29239,27489,17142,-31450,-5152,585,-19846,19360,-30118,18466,17579,11286,25853,9542,31703,16839,14187 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice25.tflite b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice25.tflite new file mode 100644 index 0000000..13b11f5 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice25.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice25_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice25_golden_int16.csv new file mode 100644 index 0000000..13193d2 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice25_golden_int16.csv @@ -0,0 +1 @@ +-22209,-28654,28273,27119,14416,7567,28766,-19466,21969,22318,-9296,-10572,-13757,-12119,12616,-97,-26252,979,16533,9741,10389,11150,-20224,10069,-13482,3492,-3299,7003,10930,22050,30651,-27630,26081,-18809,-13882,-27533,-2366,-32573,-28985,4977,27422,-17,-9736,-26944,-32173,12587,-12348,15938,-9865,18005,17424,-29396,7639,-20669,-18571,-20472,-15386,12078,1877,10403,-3931,-9271,-20150,-14962,-31610,-25592,30453,19166,-4599,-26642,10766,15421,-25855,9789,27152,4404,-2222,29036,-18756,24515,-9086,-20081,-16497,25367,11500,13329,22768,-6926,19044,-10523,30716,3575,14821,1366,-1831,25159,-12384,6675,-28219,7100,-32066,13160,6627,-11837,4499,-11269,-26888,15871,-3172,17752,-20822,-20689,-9088,30383,28193,19630,32317,-10470,15796,-1660,12159,27097,-3858,29581,21178,18812,30241,-570,-16959,-6055,28199,-17505,16424,-7282,-28188,10001,-14607,1462,-2577,7192,-11282,19033,-11806,30261,-10277,-3442,25647,546,7150,-23278,2404,19930,8483,30068,-14378,18161,-24112,-26848,-18843,-23672,21611,-7361,5245,-8071,-23169,-10852,-24628,23024,-23889,-18513,-20872,-19408,29057,-17044,28034,4643,1220,-32751,5508,-13582,829,-15563,9232,8556,16549,23400,22722,-27669,29281,-14138,-29389,26633,-14720,8077,9663,26652,19680,5613,-9294,-19885,-27119,-15245,30716,10022,-6295,26638,19286,30749,-1684,-13334,-9065,5426,28063,2037,-16548,8467,-25326,28763,7331,20720,11678,12055,-19214,-13262,1579,-7003,32354,7945,-2732,-10033,-7935,-29866,-2010,7803,-20716,18029,-18363,-21169,-24757,28742,1859,-14198,31687,13538,24492,-22099,3628,-7649,-23718,-8537,-1168,12444,-5608,-32084,-24119,-7386,27465,-16321,-14681,-21633,-28375,10575,-26426,-5090,31955,-1288,-14086,1239,-20626,-24694,20805,26543,29963,-31128,7919,832,-1993,15554,-20332,-15856,-28431,-22181,11817,-18982,-15963,13222,-18779,2459,3857,-19936,-4093,11494,-962,4443,-13066,23736,-3258,32681,-16611,16016,-5747,-2171,12906,27039,-31575,31010,-1068,-2120,-6265,-6727,6577,-6505,-14227,-25111,-11061,-18495,-30377,-26284,-18759,-23073,-8134,-28096,25762,-23972,-6681,-27351,-22952,-9092,17992,23262,27129,1651,-13133,21969,29043,-20842,31619,23898,22773,11003,-8571,-946,-10039,19755,-1302,9652,-5365,-28894,32667,-10248,-19754,-25797,21323,9306,2732,-32556,7464,-10743,-9249,12795,17038,-27625,21565,28491,-24408,-7608,20687,-25672,6057,-11960,19604,22804,-27333,-10472,21812,14780,-14355,3786,-27721,-22662,-14804,14583,-10980,11661,9138,-29381,17174,161,5983,29842,9803,24189,-4585,22581,21842,-9853,-19591,-6371,-23859,19474,7455,-30768,10598,-21497,-17258,17493,-12290,-18055,-17558,-22962,-1584,-2595,25962,-11114,-25563,-21700,13576,-21198,-27344,-5396,8292,8353,4770,-4761,16761,-14760,30919,1672,-32059,24987,5694,-23939,308,-5573,21740,17499,-22491,-27614,29132,9422,14552,-27093,-13400,-22151,-12508,-20110,-4702,13629,13890,-15368,-27208,-28085,-2979,9670,-15361,-11461,-10742,21652,-31549,30661,-23718,6805,16320,-21179,-24456,-13005,-31650,-5029,-19119,9694,-17078,-32176,-5432,14549,-24884,-26550,-8968,26295,-30811,9566,-4064,-25305,30076,20483,27017,-29390,-18032,9042,26073,-9904,4328,7366,-26693,-6901,-607,28344,2752,-1678,3317,-2941,-28830,-19047,26039,-23834,-15731,-25590,-5021,-21361,-9634,863,14994,11839,-26267,-17063,-11113,25909,18398,-29784,15612,-17261,-24016,27210,20627,3026,-284,-2827,-7673,5169,15699,-28777,30390,-15095,6009,17416,20818,17985,3491,17665,-19956,-15523,16974,-289,-5222,2574,10938,9499,-11865,18224,25244,15758,9884,11225,-28219,29527,26096,-10839,-10998,17516,-13206,7286,-6242,-21691,26510,10500,15797,23322,17962,-30843,-1115,-15699,16417,-32745,8997,155,7820,19141,-18225,28077,-12409,-20173,-31890,-436,-1275,24898,-5851,20783,-9969,29534,15712,11109,12057,-29635,-18726,-8672,4028,19438,-27003,25880,4427,-23963,32051,-1393,27227,-28630,13271,5071,-22944,14295,23807,-11522,-25022,-15665,-14267,-6010,11318,18130,-18547,23975,-29666,-12221,2497,14308,-32160,-19356,11560,8995,-17664,-18049,-768,2626,-15502,-29061,-16899,-3637,21552,15211,31108,1515,24881,25443,31347,-20277,17610,-7203,-16327,12335,17032,-19025,8148,12758,-10771,-26023,13010,-14343,-26241,28685,31939,18329,-16114,-5530,-11786,-3485,-7144,-1382,-14989,-22860,25388,-2159,-13924,22067,13906,22283,-18830,14825,19929,21522,-17176,-13984,-31530,-25746,23249,253,-25970,29671,26286,-15165,-11062,1005,7705,24009,21464,-17641,18706,-21549,-11242,-8279,-22724,-1192,23436,8362,-29447,-20853,11693,-167,-27511,-8567,-31452,-21743,-16621,-9076,-21752,-27546,26788,-24110,-30011,7806,-18178,-6911,-18253,-14476,-21065,-25972,25533,-3119,21351,15131,5539,-2641,13877,15960,14911,-8562,-5196,-6585,20052,27455,-1541,-6537,1071,1848,-9404,30418,-9540,-23096,18042,-4578,-31105,26126,-19129,854,4284,-31104,-28551,7401,21291,-26538,14662,905,-9485,21731,11765,-25562,-29697,3681,-17483,3813,22415,-6450,-9434,9270,-28399,8326,-1527,-450,-19378,12413,-14632,28191,9641,26432,28274,-27854,7325,29415,-11382,-25077,2761,-31897,8573,-6148,-99,-30865,-147,-26631,15604,7419,7058,-27129,-5808,3454,4877,17798,-25005,21114,20278,26091,27965,-27436,10733,23575,-2434,29400,28748,-29626,20091,4515,1430,29195,958,-32052,-26658,17940,16837,11005,14072,-31842,30011,23387,-6033,-30664,-937,7325,-27518,-3059,-28442,-22351,701,17068,16471,-26728,-6589,10963,270,4272,-28419,17654,10133,-15104,8393,-6955,8105,27094,8452,-21750,-928,-21553,-15963,-24435,-31516,20248,20470,-14171,-17854,-13014,-9627,-29967,17035,11554,32078,-5476,-16787,-27548,31034,8224,8833,-9799,17209,-26650,-23349,-24322,6445,22026,10357,31251,31033,-24081,-13430,-12160,-30841,17412,3318,26838,3661,-9847,-31391,-8690,-24991,-3356,12026,5277,17983,6418,-15806,-22851,2030,14455,-1300,4746,7790,-32374,16795,17017,-2634,-26268,-11744,13196,18254,32464,-31504,-1853,703,3699,15950,-12475,24349,-16801,5545,-9680,8353,-18606,18711,6199,-28952,-9101,-28889,-24245,26691,13182,31876,-15670,-1298,22978,-727,15778,-7043,13384,-30384,15788,16570,-804,17762,-1737,-31807,17477,32729,18300,13039,23104,-11676,-4360,-14347,-13910,5152,29632,14387,19166,-32311,-24089,28950,844,31397,11175,-17635,15586,-19349,15280,-24187,-14406,22564,18200,4475,12821,11517,-29283,-5420,-12777,2510,3037,10603,32518,-32079,-10568,-21471,-10637,8856,21624,26529,-6059,10491,-2277,-29759,-1668,6660,24091,6243,-16814,-22315,32570,-23824,21260,-24379,13555,12355,24835,-16930,-21914,-18896,-10092,13368,13740,7598,-17957,-18894,2522,-2750,-12178,22248,-26909,21333,32277,-7791,24771,2591,5813,6702,-1416,-11070,-26051,-11860,-30895,12074,-4328,569,23790,21439,19103,29384,25173,-20420,-11506,16315,806,-23762,23349,7426,-5346,-27268,-23203,-18111,14668,-9103,-25112,20898,11637,-300,8980,27981,29081,27051,29405,-32739,-14959,-13543,-26350,10676,23051,29666,-6997,-5871,1638,-19602,32524,-9927,22467,3424,-15122,11482,18868,10476,13980,11517,8805,-29421,-6594,9856,16922,24067,-17476,16134,-23696,10177,4765,31426,-11291,-30387,4603,-24410,-9359,-3583,14883,26227,-32719,21356,-4324,30305,28226,-12735,19904,3340,18840,-27862,-3547,30328,24510,2247,21928,28451,-17467,30278,18407,-28553,30161,25927,31848,-16765,25001,739,-14004,-22911,-9900,-3775,-13387,17346,15893,27165,28964,-32009,-7528,23470,-2975,14635,22956,-4731,20774,-15317,5257,29272,-27950,233,28636,324,838,-23265,1299,18472,-4494,-304,18590,-11743,17159,32086,-119,-8372,-6714,20458,26284,-5865,12944,-9437,-32165,-13877,18723,21814,-20623,7173,-30263,-10992,10201,-4503,10189,-3862,-27521,-25253,28143,13347,-31038,-9824,8196,-9605,8095,28620,1687,-9686,-30628,18282,20387,24268,-26395,24421,14108,-5780,-5299,23981,-28531,5801,-16729,19532,14328,-30455,-13075,-11002,-19736,17749,14491,25608,9831,-25876,2540,-25768,-8727,20883,14471,-28210,15902,-14027,-13907,-30304,6576,-17617,-14587,5025,-8039,21369,31236,-1216,-5185,-29433,-18650,12446,-11677,-23739,-1986,-20597,-13225,27224,29413,-23800,-20945,-6697,-27715,16581,3151,13399,-29477,-18875,-7362,6783,22116,-12112,3622,15276,-4706,16226,-13298,23632,12668,9263,28457,15304,29432,6413,-2797,31543,-9539,-32115,30180,-30905,25332,-20834,-22077,-23311,5807,-18178,-19277,-21492,25868,-11469,20693,27240,1454,31162,15443,7960,28361,-30612,-26312,-25467,18083,1359,-1273,25734,-7633,14094,7952,-7641,3875,-9177,-30053,-25961,16129,5130,16031,-10871,2999,-24385,-877,-4905,-18601,-10250,16626,6501,18529,-18477,-21754,-27112,-32665,11457,-27161,-11392,-8371,-24786,6052,-17057,3081,22020,20296,12882,4414,5283,-28383,8582,-7520,-4226,26241,18857,24798,-4387,-5595,13488,-16001,10990,-16464,-8215,30278,30402,-29258,32261,7352,32469,-25279,-5637,13800,-28083,8342,14954,-24403,4947,12181,-10669,15573,24055,-467,-22882,-24141,26990,23811,-18380,-7980,15046,18706,-5211,-28880,6117,-16126,16385,24588,-25886,-15191,-16457,5533,-13951,-6885,18065,-17344,-31962,-12529,16598,30033,20885,20653,14035,14978,20771,21921,-2834,19049,26141,-30243,-17844,14769,-12198,8859,20380,26051,-21792,485,-11608,6387,7057,2795,-12980,28515,-30614,-29101,-22968,11101,6095,-5358,-6422,-30194,6998,-12595,-31602,2214,-31694,4575,31245,1499,24717,21828,-6001,22815,32567,-14674,5728,-25615,-19618,3811,-6736,-19494,27418,20023,-26571,-21705,-9785,-26940,6943,18147,3508,-30955,-2349,-19909,-29749,-9950,-10572,-19331,-8359,-16420,-7532,-10910,-7942,29878,32746,-5664,31979,21691,-16194,-23849,-18922,-15037,29058,-7246,15306,8350,-31042,21423,-16514,-31366,-19712,31520,-11424,-15405,-7478,32125,-19953,-8953,19017,-1444,32459,8889,-11489,-25336,16514,19164,-12950,3230,-23566,-27799,32120,-32605,-11639,4305,3264,-12675,-16684,28309,23131,-12716,5661,13587,-2886,12916,25787,23115,-25243,-12016,-31667,-22938,-26151,18137,26154,17862,-26297,-12949,3064,-20991,1491,18917,-12775,30054,-30061,-22504,2491,-30736,22050,7396,-27661,25355,-21756,-15145,-19074,-10176,-26054,-31029,24383,-30278,23849,-17060,-7915,3036,22204,-13715,21430,-26881,8725,-2816,25538,10683,-11392,-29637,811,10313,14752,-19243,19639,8516,9212,29610,-27849,22105,22408,15599,26966,8616,23912,8189,-950,-18428,-8771,-9415,-15838,-6660,-26450,19255,10694,-32386,17462,31741,27585,5119,18847,16312,-20566,-30025,-2195,-1487,11516,-11496,18750,-24877,-685,25382,-15260,1455,-20091,19505,-32683,-29276,-12809,25893,-5177,-9969,22647,32630,-29031,-3172,-5674,-21670,20533,19994,19972,-20,19850,19260,-28892,2518,-14074,26189,13120,28071,12083,-7604,2188,30467,-31234,-4213,-27726,5460,-1205,-12898,-31442,28060,-8473,23737,-21778,-1711,17531,-2455,-22455,9682,-14328,-19182,-32032,-19326,-28496,-22704,-31096,11183,-11227,25508,4585,7181,-9042,15723,8898,13119,-9673,27770,12282,10108,9147,4618,9904,21922,18245,19056,30523,-16770,-20214,23759,-26763,-27480,-23001,-17153,21047,32146,23807,32120,30805,6100,25749,-7629,14808,-25326,30559,-13205,-25469,-10477,-7296,7670,-9782,-5591,-4733,-20377,-83,1880,2300,18562,-2941,25800,-21727,-26948,-23988,10055,-4215,-26563,-31284,19141,-1705,-23720,18281,15478,-357,21743,14826,5227,8141,10358,-27351,23940,-25783,-735,-28227,25721,-15685,9286,29778,8700,7177,22472,31381,-10409,5935,3310,6513,-2203,-7036,-26792,-31850,-30165,-12172,30786,-29462,16070,-8044,10123,-20033,-2655,3312,-22660,4332,17471,-14501,19879,24148,5940,9427,11767,-9228,-19226,24981,1640,7341,-27382,25273,22563,28811,16783,-23225,-14235,7189,-29135,10535,15891,-26450,29387,-307,26871,-29930,-7272,-14792,-21418,-25485,-20763,12934,23338,13077,25084,5314,-10860,20642,7802,23570,-8143,13702,-32631,-21263,-26977,30729,-1399,11097,17990,-23809,-2602,15720,14084,-29595,4437,3600,-12193,30968,2895,-18832,12907,-26759,-14141,28147,11102,5125,15807,-4851,17528,16750,-4911,-19376,-15650,-2471,4993,-22176,-22819,-19113,20341,8017,-9392,-18964,96,-2778,-5920,19765,6098,7363,-9149,28347,-10285,23901,27779,-4019,-161,-27446,-6026,-4500,-22253,9998,-5554,31666,26184,21563,-11626,-23864,24835,17928,4940,-16460,18068,-12942,-31431,27113,-5938,-7683,-20067,-23757,24536,-13338,-14429,-24329,-10029,-31240,-11956,15063,27219,-26946,-10530,9380,-30184,5983,551,-11819,20145,22270,-26062,-32136,-11550,-6303,-27644,-3748,11549,-28643,-3968,25412,-10157,30717,25712,26352,-25200,-24380,-28481,-467,-15932,-8368,-17509,-2158,22713,8056,-23542,-27240,26545,6381,9360,18935,-1665,-24768,-2575,22364,5522,14468,22788,16197,-27833,-10498,24418,10200,17581,23429,6107,-30186,-12815,-3146,-28414,-16496,-21053,-11767,-9247,-20401,26094,-6013,-5953,11194,-25427,4396,-28609,-25850,12567,27000,15214,-7443,16872,16688,-3706,-29,14997,19131,1633,-1399,16481,8566,-7791,-29616,-20722,-12526,4141,-3179,22948,30334,945,31508,-3437,26148,-26508,5415,-3560,7145,30586,-9170,-20473,32243,-6140,7935,-32103,-7251,25563,7975,483,15178,20252,-15031,6508,10812,5033,10333,-4787,-413,-2258,1613,-8402,-17683,26994,-26664,-18503,-12002,-20026,23772,12186,19201,-26204,-22276,-30972,14458,17858,25257,-3997,-16301,-18977,-22163,-10135,10672,-27035,20469,5870,11657,-23560,-20176,-5950,15631,31690,25996,3311,27390,21878,25554,3129,-21743,25269,12139,-20360,-2716,30264,-11354,-13984,-25120,-17790,16184,-30842,-25872,19222,-8663,-19241,-7959,29664,-17402,20900,-18236,27793,5054,30879,8521,-28666,20473,-8886,-32048,-20600,-20507,22209,-13686,-24702,32593,-4224,23308,-32615,6289,-25341,7631,-13294,2132,-15294,-17713,15562,-24496,-23412,-31124,-9411,-30423,-12804,-12363,-2143,15660,-26291,15210,31187,28265,213,-1788,8255,-24456,-20602,-8296,-25631,24587,-19361,-4680,-26232,7189,-6638,14149,-28622,29737,23516,-22534,13912,24196,-30070,-11969,1675,28710,-2106,4998,-5543,11426,-3614,-6434,31694,-28707,-9645,-6131,101,7917,-21641,-28679,-23341,-14485,16518,22207,-8972,9076,-6072,4699,16032,-18279,32029,-2598,-17335,-8822,-6478,-24447,10746,-9444,-18394,25838,26582,15633,-7429,8142,565,2374,-14003,31538,30467,27024,6559,-25531,14197,16447,-12877,-14653,-4655,526,-29524,-8584,-6319,30594,9255,-11517,30871,8750,22802,-18442,20855,-12847,-31860,2203,15749,4167,9084,16526,20665,-588,-380,5378,-11109,31805,-26597,32180,-6612,24948,-23749,-19711,4603,25082,15161,-16467,-10582,7179,26522,-24247,17500,10539,-20785,29776,-29759,-7985,-30468,-28633,-12556,5878,-4429,-9450,22652,-468,2252,20785,-14416,-25578,18965,28005,-1448,-7301,-9986,7233,22940,-7085,14801,21717,-4785,19319,2246,-25463,10102,-30476,8682,-31970,-18547,-8863,-5697,7878,21769,22436,12389,832,-21548,-14238,-8692,11927,18606,15123,4301,31135,12333,30074,10113,20596,-25551,28112,24920,26947,1748,25106,4357,-920,30968,26004,1843,990,-21096,-16334,22502,-10525,10937,820,11283,-13693,-19866,-24599,7829,-23450,-22771,-6195,29773,-24342,-30743,-11091,-11312,28462,15425,8472,24692,15272,25266,13652,-25910,-22664,-643,5827,-3206,7589,-29621,25948,9128,16012,8891,-7066,6970,30102,9586,-27885,12790,-16333,16108,-5464,11329,6713,-11112,-29457,8179,-24424,7466,23244,-20761,903,32759,4566,-29651,10394,-24335,-1986,12436,-11454,31571,-13790,-21207,32474,10137,8534,22908,3698,-535,24816,-29201,17974,10484,22013,23510,-26260,-23445,-13908,1481,13012,30412,-11383,-25495,-9808,8329,16022,-21685,3515,-7785,-27283,-17032,-32079,16683,30418,-4402,6898,5875,15623,19277,19918,-25583,24643,-9075,-22063,31639,23205,19947,-8252,-27401,7863,27507,-27270,20506,-10396,7634,13515,32673,19730,-21969,17579,21141,23244,15987,-26162,-27648,-28866,-6980,-28611,18351,-26515,8705,965,-10112,-5236,-26073,-27079,-1912,2737,-15800,27803,14333,27641,-3851,-13294,7062,7988,6930,8210,22440,-30980,1354,32074,26571,-8352,18351,-31460,26420,-13952,-21182,-16807,-16803,-19280,15761,-13174,-31759,7709,30391,16771,-11858,20394,-31785,22883,-6466,-23022,12670,18820,10903,11215,-14277,27077,-7979,-1859,15078,-11734,-10681,10430,-24777,7865,1793,319,16925,12798,-21621,-31115,-22864,17065,-885,-9652,-28666,4199,28618,17833,-25659,593,10842,1804,-31327,15785,31289,28160,23355,15149,32214,26300,29288,19841,28466,4592,21009,-24567,21662,22331,11405,-12025,25837,30421,-27736,-23129,-22008,-28280,-9052,-7535,-18561,-11247,-20942,-30477,-22013,28276,-7614,12596,-25447,8163,-18626,-9876,-15929,-26335,27889,31810,-6293,14013,-30384,20253,-8679,21871,2176,-32486,6324,-28643,6105,-8664,-25491,-22362,-25295,-23910,-9577,-31296,11896,5506,22831,26455,725,16473,18600,30450,-10254,17304,10174,23440,-21195,-14122,-2102,-12558,30614,12193,7014,-18145,14578,-9809,10893,-13959,7274,28361,-15234,-13383,27505,9525,7136,-892,1008,7985,28648,5074,12862,-10937,-15319,-30420,20258,9429,2323 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice25_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice25_input0_int16.csv new file mode 100644 index 0000000..bf0f771 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice25_input0_int16.csv @@ -0,0 +1 @@ +-32090,18072,-10042,15739,-5891,-31109,-14452,14527,-18964,22408,28014,-13034,26816,7219,-1889,5655,12309,-9258,14518,-16880,-23878,3849,-13996,-29801,10859,-25067,-6466,196,-6215,14494,-21742,-3598,8621,-16666,-30683,20724,-14448,-3446,-15701,-19696,-16248,2837,25251,-26508,541,16001,-4777,-7936,-7756,-21225,21452,-282,-32532,29790,-22106,-13226,-16137,23524,-5193,-20464,32740,19824,-5532,-32551,27186,-22989,-10707,26637,-7391,19725,-23133,4231,20251,15040,26924,27156,-14826,30950,-15701,6923,698,18801,-18255,224,2579,-32365,19788,-4995,14318,-25160,4573,19709,29995,-327,-8633,-15092,-7595,-29024,4427,-32540,20298,-23415,-20876,16070,-16673,14389,12110,-13705,17657,-1007,20264,-14528,-23002,-13199,-12128,-6832,19459,-19131,-4652,22426,24613,13788,7495,-13557,-20716,7607,-23425,-17666,5579,1970,-26413,23362,-5461,-11040,13034,4865,20737,-22851,29916,14842,-23346,25013,24811,-24095,10310,16684,1342,-18684,-26804,24425,-16823,-17483,1996,-5809,-17357,-13526,30713,3638,31801,-11865,-19546,30981,-8394,15067,-10517,-27514,-2315,-20237,22501,-15757,-27803,-2172,-32710,25625,-18521,1502,14010,-22856,-11952,30957,31819,-17778,-21977,-11425,26723,-23807,9863,14770,23896,-20586,258,26471,-1545,-26755,-13480,32002,12671,31583,-10130,-20747,-14217,-22495,-25092,10742,22657,-21663,-18148,3197,19340,16552,8778,25719,-26698,30323,9614,-1207,18559,5994,881,-10901,-19789,544,-2625,26732,-17667,-23088,-18843,2695,30447,6251,-24766,17054,-4998,7319,6210,12957,-8617,8838,17255,10463,27731,-11584,13136,-28047,-29920,-17781,15002,-4685,3769,29049,-22263,11296,-29636,-20983,-10192,-10711,9250,-10666,-25864,-2885,12846,-12267,16472,9215,29555,-20713,-13664,-26202,-19083,30311,27457,15837,-11333,11698,-853,-31095,-19500,-27276,15041,-16633,27217,28654,20647,26786,31067,16372,-4581,-9580,-9617,213,-21251,-28998,4789,-10402,8241,-7350,-23262,-23347,31702,30220,-5838,-28931,6536,12572,25796,6768,-26095,-2432,30935,10010,-11182,21418,-3138,24349,-19006,16703,-7198,25282,-15965,19102,19016,26914,-18922,4943,7258,-15393,-27667,-3098,-9121,-9791,-21764,-26970,-16437,-4877,-16310,5143,-13913,-28257,10660,5694,195,-17877,3294,12104,22038,-32549,27867,32333,8760,-16457,31695,-4965,-6657,32132,-17644,-5508,-20379,17176,12553,27585,-5564,-20509,24430,24525,-30158,-2513,18835,-16110,13943,10401,30884,-13313,-1109,-2988,30756,8678,-795,23909,-5641,-6944,8146,30135,6611,29808,-3526,10687,14336,-10838,13474,21846,15474,-26342,-11448,-25346,31401,-18361,-11689,-23227,-28141,-14232,-19628,-19499,2838,-8260,-5393,-5872,-27203,-25750,-27821,-668,23319,30634,-25848,8719,24385,-15891,-23658,11182,-11662,5089,-4278,31744,16915,-2848,1725,19815,-18975,-6594,-26013,-24561,27227,-15874,26356,22285,23734,-15960,19745,19383,-19804,2007,-30604,-18848,-28144,-8137,22727,23891,16147,29434,-11575,-25406,20587,4853,20838,12647,-25546,26520,-13292,18469,-21275,14599,15036,1047,-24314,8129,-17950,-19884,12744,-5735,13187,-2111,-28406,29300,8341,-29053,-13471,-25640,-27109,30731,-25061,28435,-15666,-10387,-11412,22931,-28078,11983,13042,-3700,-2717,26468,-6142,17235,-16114,21768,8424,-21027,-2664,27943,1903,-9470,-27193,-9082,32250,-23937,27696,26427,5911,-27390,-10256,7056,-23906,16438,23378,-10519,4248,5671,-21313,16206,20885,-27653,7992,-17511,9826,-23365,18473,-8094,-2275,15347,8492,-1560,-21849,10435,22476,6761,-26159,-15626,23972,15440,17369,14374,22402,23957,-29160,-770,-24219,10777,10452,8374,-11106,18897,4930,24933,-9725,14082,32026,18992,8239,15133,20032,-29608,31079,-30409,10689,2166,-15526,26331,11880,-15688,22734,-32049,21147,11046,368,30494,25357,-21552,-10278,22695,-21042,-26610,-29113,1147,-27062,-28414,-4021,-25548,11323,30605,-12686,11830,-19161,-17046,-17738,-15990,23072,3022,31940,1191,206,-12599,20136,24288,-26198,-23463,-6933,-26794,20717,-4712,19546,31098,8927,12138,971,6920,-15252,-1547,-1773,475,12383,-27367,14686,6065,1296,-25003,-6631,17112,-17409,4801,-11812,122,7792,7025,27933,24621,22381,-26953,-12786,-4747,8293,-10131,-19966,16541,23730,17494,20417,32497,-27890,19508,-8898,15634,-28801,11925,3618,21942,30022,18076,13955,-13958,-24682,-25795,14710,-830,2212,11332,-32302,-11612,874,5217,30744,-21400,-14162,-21262,-2881,32763,-23068,17570,8922,-19002,22270,-13632,-27061,-5551,-22032,18548,-19943,6422,18283,4165,-12869,-30804,-2010,21684,29425,-26462,5563,23932,-7609,23178,-5188,-20794,-2786,-5055,31072,28192,-3573,-1595,4881,9982,-7203,31989,-11789,-14555,377,45,-29311,-17755,32607,-30253,-32413,-30354,24180,-31504,-29666,22349,29832,17981,9485,-16387,32304,15453,20404,-31249,-23130,32689,-31058,17632,15530,-6443,18273,-21085,1790,-22327,-28457,27554,13575,10868,-18717,30037,-19439,31258,21888,17119,7423,-6123,29201,-17183,-3059,13536,-5366,-18185,1411,17847,27314,20855,-1463,-27337,-4293,31678,19942,-27133,2379,-31357,-11691,24624,-19413,-18105,26232,-5864,-6273,-2644,934,5330,-20958,3763,21743,-26218,4186,-31006,-21139,11058,-32049,-25695,12843,-22879,1193,-18457,-21848,10077,-13819,26611,-2522,-9049,-23961,17542,18398,-897,-10929,16634,-8501,-9923,-5498,18198,9500,19151,31693,19416,15465,29243,-28307,-1600,26202,26186,-14729,-10608,11967,-3997,-25133,7744,11272,7691,-6397,-7690,30820,2458,-26149,-8837,25943,12355,8937,31518,6680,-8687,-27005,-22727,-26433,23728,-19277,4430,-16629,30731,29063,-642,16936,15890,-17515,2816,23866,-22634,-25540,-4552,-22960,11270,-7198,-16148,12621,3371,-4777,-26097,15843,-10358,-5746,5852,-13347,-6804,17338,-24715,-32571,-19240,27961,-23272,-22879,27178,-13453,13995,-2891,9647,29452,20926,18083,-11829,-13328,-21263,-27160,-10021,19572,-24813,-8010,25259,19466,15080,21238,-2879,23340,-25656,-10934,8615,-29072,908,-12255,6639,9207,5336,28029,2646,-11188,10677,-5727,29571,-27107,-7154,13921,3908,-28557,9,10766,5264,-17204,29434,17232,31433,-1617,-32290,16411,27069,1805,5352,-13987,5360,9865,-2596,-18826,-8909,19308,24542,3334,-24549,-16759,23263,-20665,-16900,-30466,30043,30922,20158,22267,7581,-22202,29080,13655,21083,861,244,-5657,-13063,23801,-226,-13274,-7084,-11926,-23367,-10577,-11607,-7152,-141,-13185,17981,27568,1383,17757,12115,3911,-11603,-14371,-10748,-20807,-32590,-14923,-18691,-3586,4023,-6301,26772,-31514,-10451,-18182,18322,11272,-32185,-32162,-28189,7728,-31901,-8854,-19452,-22439,-16480,-27563,14721,29670,-217,-14967,15483,16325,15760,29641,243,16912,1117,7240,10432,11536,27502,30768,-14838,30912,-9060,-14503,19005,-13500,-8168,16139,-15752,6321,-21024,7208,9007,8881,9977,22886,-7900,7532,-23983,7420,18904,31185,1221,-11071,-23662,-15105,8185,-25276,-12705,2164,5095,-3143,-3669,23289,-6392,31531,-30454,28096,23420,7203,12338,31969,32051,-3812,-30897,-6088,29271,-31307,10458,4456,-17841,-9411,5446,13497,-8790,-11368,22768,164,13328,-19493,-11838,-11168,7450,3682,-27015,3336,24057,-18543,-18852,-26017,-11526,-16249,-30612,28823,-7030,17610,-21664,1018,-19885,10013,28340,-28756,-28614,-24434,16942,-16147,-7948,31106,13436,-9658,19074,26082,28515,15740,1772,31055,-18691,-3436,27256,-25555,-32385,15776,-30903,-24843,-9243,10743,-9324,13204,30612,-19163,28329,28858,1,32164,21366,1162,19424,8035,7271,-17666,32606,-27404,13624,-28518,11881,31879,-7264,7638,29422,6444,-29411,-7108,14420,-15178,-23372,20685,-5789,6122,5287,-10068,16558,-21190,-3694,23339,-17177,9009,-14045,6420,-18097,3484,17418,-10639,20350,-16801,12599,-7815,-21333,-1621,22595,2548,-7536,-635,21876,-22306,-8310,-9257,21468,32003,-3938,-2069,23660,-27134,13140,30227,30942,7014,13662,-2108,7754,-7239,5533,10134,25673,2521,-21609,-20762,16734,2049,15816,9997,24016,4349,21783,-1911,-1646,-30245,-32291,-178,1158,-25498,-25668,23474,23742,-25903,-24454,10229,25217,-19899,-28813,20895,-27980,17715,17448,-16755,-6047,13630,-17946,-28001,1700,15293,29719,-16469,30252,22648,27117,-15581,-10498,-31606,-1227,-7725,28105,-4736,-19422,-5270,16359,-1890,23269,-1255,1200,-28889,13134,-7738,13938,-11392,-7601,29156,-28614,-21238,-30102,23552,4940,14463,7138,23778,-7873,31936,-3657,-17534,-4300,-18981,-22521,-9980,25141,6030,-7538,-11751,-29428,15606,28059,-31617,-14004,27969,-3405,11232,27258,-8846,17265,-28901,-18037,18058,8503,8833,19714,-8395,13875,2672,18994,5180,-22571,-11758,32250,6292,5584,-14557,-30776,-9414,4484,-1657,20253,-12364,7341,-20969,29774,4630,1421,20008,-26321,31601,-17946,-25468,7242,12478,-31119,10827,5517,-6326,-4225,-5305,-68,13190,-19546,11234,10616,23809,6063,-9336,-11442,-18710,-25254,-2210,-227,15777,-8639,-809,7409,-8476,588,3975,13980,-28015,-26962,-6394,-12196,25328,-826,-21343,-31444,28684,-2874,24160,1372,-21354,-269,24640,19173,29894,-31850,32099,31250,-27776,11988,29789,-2954,5194,30618,-2963,6990,-29328,-20131,-31383,18822,6735,-17670,488,-6750,-23853,13171,29333,4987,-12714,-6830,-21343,12705,19631,-18752,-27064,31824,9189,27556,-21567,-25456,-16506,-29591,-2727,9992,16395,-24694,-8166,-22265,3856,-7587,-15237,-3830,22649,24099,-16721,21947,-23337,-9537,-11648,22487,31756,-30725,5103,-23431,2436,-15338,-3204,-27160,-27959,26909,-9689,15365,-15966,-6937,5952,-15778,-14525,23259,-16152,31000,14756,19580,-29346,16479,15794,-24997,3865,30218,16895,-14071,14605,16702,-11931,17731,-10512,-1415,-4598,-30974,23162,-30555,9966,-15015,-9961,11641,-19250,-21224,10234,-15238,13306,19497,-23659,12961,16908,21970,-29372,-13050,243,29651,3696,23991,2444,-22981,-15406,-6841,17532,7723,7616,-20768,2867,8576,-20119,-3488,8401,2818,15457,14471,-21391,31300,-22853,29436,1914,20843,3890,-26818,-17809,-27518,12297,-2039,10486,18522,9021,-9991,368,-1160,23414,-27821,21700,22289,-13364,32477,-22204,13604,-6128,13129,300,29130,22090,-7138,26160,27934,13215,-21784,10568,-12609,-11887,-8021,-8234,-10215,2441,-24564,-620,11757,-14285,-2470,-2210,24359,18311,-12272,-20503,-18365,14135,-4958,-30514,-8029,13871,5626,-5263,25216,14616,-17015,-20985,32332,-2634,-21317,29778,-13494,-26420,-25672,-6514,-16911,28664,-17039,30378,-17146,-19727,5651,25837,-4300,29191,8801,31858,-13396,28223,-29557,-19502,-30051,3494,20392,22893,22092,-19949,-21752,-411,26433,2188,20141,12740,28431,-28323,24234,-5364,2176,-1109,-25056,-17804,-10283,80,-23375,14052,-6103,-7651,-22831,-9852,26132,29158,-30056,13729,31591,5933,6155,4007,-24134,-19638,7461,-13680,-10546,28201,-25307,3048,-23763,20622,-14406,15369,11684,12900,-16613,22778,302,22222,-15822,-9281,-19022,-28011,27093,-2582,18862,-28122,26655,-6006,-16618,-30878,28496,6142,31880,-13632,30098,31665,31416,28727,3478,22685,-8439,-3353,-3600,9151,-1854,-11639,-13151,-18297,9425,20528,-3836,5993,26293,-5988,29612,6619,-18146,-7815,-24699,952,-12818,-11685,-5088,-5169,16359,-14974,-12158,17962,-21533,-11016,20112,-18496,26323,25244,29631,-18548,-11888,26606,-10630,-8023,-28495,-5637,-7149,-29425,8985,-24659,24276,-628,24745,27452,-29170,-19996,-24585,-8273,20147,-31453,-27398,-1004,-5152,20998,14199,-1587,-21802,-4923,6568,-22165,22823,-27178,2998,3057,-18981,18956,18025,-7402,-15323,20965,-30027,23274,-1126,-282,-30634,30588,-12088,7110,-32533,-19784,11779,-32541,22403,-20530,456,-29641,-23052,23009,24505,-20516,13653,9882,13767,-3768,17550,-23440,-3914,-3328,-7784,31004,-8323,7033,-26357,30270,-30853,20307,-2599,21382,-28705,9871,1916,-30397,10152,23634,3132,31444,1789,28317,-28671,32735,-27144,9689,-10278,-28907,-18935,-30723,-17181,-5048,3962,-21954,936,-29436,-2493,1575,195,399,11604,18083,19843,25395,22840,25331,3203,-15955,30205,23242,-12960,-31039,29114,5169,13230,5040,-18213,3749,2548,19098,-24991,22633,-15092,30767,25771,-31803,19985,-6992,4483,-29582,-28728,-25643,10731,-16700,-18357,31007,-25230,-5000,-8736,14715,-1077,32038,-5750,12598,12451,-23923,-21079,24235,12427,24377,-15338,-26910,31957,165,12162,31054,-10714,-31666,6610,194,23913,31402,-18847,27587,-24465,17979,-12723,-26736,22651,-18325,2071,25889,7939,-10108,-25556,-14215,21955,-6762,22709,-12411,-11588,-2806,12589,-24571,-30023,-31794,-9735,5903,-1957,16903,11535,-3815,18884,268,-570,11890,-17768,-15513,-2351,15507,10652,4636,-4130,16023,16948,-11089,-28848,19525,-27260,-6396,13282,20764,18504,-2504,-4737,-5878,15321,-5682,24877,17273,-26831,-8502,32620,4501,-17795,-18551,-5382,10061,-22479,-22933,10180,-31470,-23372,22272,7419,-658,31768,22986,-19921,-26017,-10682,9358,-27366,-10351,-4016,23514,-21989,14221,30432,-23795,-25930,-13955,24558,21376,-16930,-24814,-3147,-24823,16710,-2024,-11906,30736,-26628,31930,-2691,-12418,7756,-32226,5482,-12071,-26059,-16678,-30686,13736,-16966,13614,21896,-31211,-3477,-5429,9116,7321,-17375,26852,-14726,17656,-6333,15842,11687,11538,-5991,10107,-25459,-7841,-827,29892,29634,25830,-387,25136,15472,12252,-30020,15272,11214,-23200,-15458,4263,14550,24316,-708,17832,19950,29727,-10193,24523,-23317,-5488,31881,-27565,-29457,13073,-4108,-171,-10149,17904,15336,27308,5606,19466,-13716,14131,-31194,-21580,27174,-874,-29588,-19895,-23267,4956,2829,-13565,4657,11572,24148,-6072,-21647,-17575,-25753,22204,27546,-22806,-13380,-12824,20551,23143,-19144,-17824,-9882,28946,32525,16447,-12935,26051,-28926,795,16837,-27071,-16484,-31836,-15084,-12382,23093,12258,30340,-9689,21969,17380,-985,-18012,12939,21304,-18888,24262,16691,23308,-7163,-13035,-16687,14849,-17745,22895,23911,-28639,-27346,3690,-23546,-4487,18528,2594,18913,-18604,-29561,-5061,-941,-21319,-31600,-8230,-32306,31251,-16815,-14069,1504,5830,3272,-14737,22370,-1583,-16313,6711,-5240,-19143,-19300,-31694,3258,-27762,28409,25693,14950,-29636,9785,-12591,-16174,7948,-32711,5570,-9008,-31927,8254,-12545,-28400,3656,3750,934,-5621,25152,20073,15005,-18225,22553,-19772,-16581,10814,9992,-18770,29364,-1481,-7749,-5523,29532,11647,22736,-27536,-1987,19454,2159,32086,30812,-18980,220,26805,5511,-15546,30436,-25485,23536,-7424,24229,-23734,25413,6827,12796,11815,1054,23613,-31179,23273,14912,13191,-23200,30295,19162,-1004,31037,22962,30916,5509,-10331,7131,-2662,-10679,-21888,29261,19969,-22836,-28001,-31088,31508,-17818,-26627,9090,-12127,13201,31593,-28193,29070,31156,7102,14325,-25550,11661,28519,12387,-10767,-12728,-19679,17778,28798,9171,-18799,20500,-22565,-2839,11699,9473,-5326,18390,-22252,-20088,-31328,-27632,-9035,-1847,-14574,14480,22360,-24011,10045,32371,23440,-27229,7246,2048,-10537,-25122,21434,29039,2053,4452,17271,-16200,15769,-18790,-22036,2879,-12247,-4574,-10698,19402,-17471,-15633,-22495,6613,29340,31390,24363,-15026,-32368,-15017,10908,-23088,13224,4603,-32001,27772,23900,21404,-26949,2660,-26614,27964,29497,12959,-26963,-6453,2746,-15293,25514,-23634,-1169,-27098,-7762,-32662,19137,2862,-23475,-23860,20193,31195,28632,30524,9333,-24500,27844,11581,8690,28125,-31332,7164,19299,-30432,-24169,-860,-26335,-25596,-20191,-946,-16708,-29974,20412,12809,-29025,32241,-25427,11099,8071,27444,18836,6266,1062,12101,13140,24313,-29237,-25528,-25227,-30983,-10774,-18500,14124,19958,22208,-5464,-1382,15198,-30428,-26841,-8423,-753,-23766,-17244,2145,-13780,-10012,-30159,-23799,-18742,18314,24692,4032,-24060,-20011,-21998,11166,-2597,14791,-27302,-30686,-22949,-17394,25404,-12089,-24115,22043,-31117,8632,-5361,-20543,32725,5035,-7797,-29428,25116,-16079,-28862,9736,28674,-22456,-13854,-14100,3917,29300,-29126,-4211,-14872,29601,-32071,-3185,14917,29496,-4246,-23046,-10198,3839,7797,-16208,27169,853,-2943,-6509,24498,-27872,8304,-12543,-14344,-8963,-19210,21046,-351,-15849,10983,-1348,3671,-7254,-5580,-21154,18759,29692,-22917,16549,19411,21739,22643,27815,16409,9105,10237,-8688,-7289,21713,-22454,-23503,-25578,-11488,24879,-13344,11767,-22947,-18511,-30329,22292,10295,17136,-13184,-18277,14027,-30356,-18158,2224,469,-29444,-6779,-22391,-19770,13049,-713,-28566,-401,-6775,-2589,2089,-10295,29744,-27826,-23734,22869,-3906,-2745,32640,-10087,7483,-7883,23303,5256,-8470,-2596,-18097,-8616,-12522,19276,-17051,-2802,159,6833,24992,25085,31674,10606,9733,-25444,31852,18800,-18285,-22820,-7641,27350,-8827,1754,-3120,20393,-18165,25106,13228,1314,22996,583,27770,15535,5064,-3615,23583,27321,-26888,-30970,-21886,-7665,-22891,4699,-2371,-13680,-10011,-31686,9399,-8735,-5174,-32161,-18400,-3027,10410,32707,-7419,-31278,30166,-17497,-1832,-15938,-28189,5335,27686,24641,2116,17419,23171,15928,-17506,-31096,20635,-18804,25032,13224,-25589,-15954,-18329,6657,23593,29641,-17271,7337,30276,-364,3561,4627,30281,-23793,10928,9828,-18736,-14783,-17465,30793,3774,-4176,-28755,-27907,-27078,13270,16991,20520,7655,-22653,25402,-13926,26777,29262,2794,-29134,28623,-22209,-28654,28273,27119,14416,7567,28766,-19466,21969,22318,-9296,-10572,-13757,-12119,12616,-97,-26252,979,16533,9741,10389,11150,-20224,10069,-13482,3492,-3299,7003,10930,22050,30651,-27630,26081,-18809,-13882,-27533,-2366,-32573,-28985,4977,27422,-17,-9736,-26944,-32173,12587,-12348,15938,-9865,18005,17424,-29396,7639,-20669,-18571,-20472,-15386,12078,1877,10403,-3931,-9271,-20150,-14962,-31610,-25592,30453,19166,-4599,-26642,10766,15421,-25855,9789,27152,4404,-2222,29036,-18756,24515,-9086,-20081,-16497,25367,11500,13329,22768,-6926,19044,-10523,30716,3575,14821,1366,-1831,25159,-12384,6675,-28219,7100,-32066,13160,6627,-11837,4499,-11269,-26888,15871,-3172,17752,-20822,-20689,-9088,30383,28193,19630,32317,-10470,15796,-1660,12159,27097,-3858,29581,21178,18812,30241,-570,-16959,-6055,28199,-17505,16424,-7282,-28188,10001,-14607,1462,-2577,7192,-11282,19033,-11806,30261,-10277,-3442,25647,546,7150,-23278,2404,19930,8483,30068,-14378,18161,-24112,-26848,-18843,-23672,21611,-7361,5245,-8071,-23169,-10852,-24628,23024,-23889,-18513,-20872,-19408,29057,-17044,28034,4643,1220,-32751,5508,-13582,829,-15563,9232,8556,16549,23400,22722,-27669,29281,-14138,-29389,26633,-14720,8077,9663,26652,19680,5613,-9294,-19885,-27119,-15245,30716,10022,-6295,26638,19286,30749,-1684,-13334,-9065,5426,28063,2037,-16548,8467,-25326,28763,7331,20720,11678,12055,-19214,-13262,1579,-7003,32354,7945,-2732,-10033,-7935,-29866,-2010,7803,-20716,18029,-18363,-21169,-24757,28742,1859,-14198,31687,13538,24492,-22099,3628,-7649,-23718,-8537,-1168,12444,-5608,-32084,-24119,-7386,27465,-16321,-14681,-21633,-28375,10575,-26426,-5090,31955,-1288,-14086,1239,-20626,-24694,20805,26543,29963,-31128,7919,832,-1993,15554,-20332,-15856,-28431,-22181,11817,-18982,-15963,13222,-18779,2459,3857,-19936,-4093,11494,-962,4443,-13066,23736,-3258,32681,-16611,16016,-5747,-2171,12906,27039,-31575,31010,-1068,-2120,-6265,-6727,6577,-6505,-14227,-25111,-11061,-18495,-30377,-26284,-18759,-23073,-8134,-28096,25762,-23972,-6681,-27351,-22952,-9092,17992,23262,27129,1651,-13133,21969,29043,-20842,31619,23898,22773,11003,-8571,-946,-10039,19755,-1302,9652,-5365,-28894,32667,-10248,-19754,-25797,21323,9306,2732,-32556,7464,-10743,-9249,12795,17038,-27625,21565,28491,-24408,-7608,20687,-25672,6057,-11960,19604,22804,-27333,-10472,21812,14780,-14355,3786,-27721,-22662,-14804,14583,-10980,11661,9138,-29381,17174,161,5983,29842,9803,24189,-4585,22581,21842,-9853,-19591,-6371,-23859,19474,7455,-30768,10598,-21497,-17258,17493,-12290,-18055,-17558,-22962,-1584,-2595,25962,-11114,-25563,-21700,13576,-21198,-27344,-5396,8292,8353,4770,-4761,16761,-14760,30919,1672,-32059,24987,5694,-23939,308,-5573,21740,17499,-22491,-27614,29132,9422,14552,-27093,-13400,-22151,-12508,-20110,-4702,13629,13890,-15368,-27208,-28085,-2979,9670,-15361,-11461,-10742,21652,-31549,30661,-23718,6805,16320,-21179,-24456,-13005,-31650,-5029,-19119,9694,-17078,-32176,-5432,14549,-24884,-26550,-8968,26295,-30811,9566,-4064,-25305,30076,20483,27017,-29390,-18032,9042,26073,-9904,4328,7366,-26693,-6901,-607,28344,2752,-1678,3317,-2941,-28830,-19047,26039,-23834,-15731,-25590,-5021,-21361,-9634,863,14994,11839,-26267,-17063,-11113,25909,18398,-29784,15612,-17261,-24016,27210,20627,3026,-284,-2827,-7673,5169,15699,-28777,30390,-15095,6009,17416,20818,17985,3491,17665,-19956,-15523,16974,-289,-5222,2574,10938,9499,-11865,18224,25244,15758,9884,11225,-28219,29527,26096,-10839,-10998,17516,-13206,7286,-6242,-21691,26510,10500,15797,23322,17962,-30843,-1115,-15699,16417,-32745,8997,155,7820,19141,-18225,28077,-12409,-20173,-31890,-436,-1275,24898,-5851,20783,-9969,29534,15712,11109,12057,-29635,-18726,-8672,4028,19438,-27003,25880,4427,-23963,32051,-1393,27227,-28630,13271,5071,-22944,14295,23807,-11522,-25022,-15665,-14267,-6010,11318,18130,-18547,23975,-29666,-12221,2497,14308,-32160,-19356,11560,8995,-17664,-18049,-768,2626,-15502,-29061,-16899,-3637,21552,15211,31108,1515,24881,25443,31347,-20277,17610,-7203,-16327,12335,17032,-19025,8148,12758,-10771,-26023,13010,-14343,-26241,28685,31939,18329,-16114,-5530,-11786,-3485,-7144,-1382,-14989,-22860,25388,-2159,-13924,22067,13906,22283,-18830,14825,19929,21522,-17176,-13984,-31530,-25746,23249,253,-25970,29671,26286,-15165,-11062,1005,7705,24009,21464,-17641,18706,-21549,-11242,-8279,-22724,-1192,23436,8362,-29447,-20853,11693,-167,-27511,-8567,-31452,-21743,-16621,-9076,-21752,-27546,26788,-24110,-30011,7806,-18178,-6911,-18253,-14476,-21065,-25972,25533,-3119,21351,15131,5539,-2641,13877,15960,14911,-8562,-5196,-6585,20052,27455,-1541,-6537,1071,1848,-9404,30418,-9540,-23096,18042,-4578,-31105,26126,-19129,854,4284,-31104,-28551,7401,21291,-26538,14662,905,-9485,21731,11765,-25562,-29697,3681,-17483,3813,22415,-6450,-9434,9270,-28399,8326,-1527,-450,-19378,12413,-14632,28191,9641,26432,28274,-27854,7325,29415,-11382,-25077,2761,-31897,8573,-6148,-99,-30865,-147,-26631,15604,7419,7058,-27129,-5808,3454,4877,17798,-25005,21114,20278,26091,27965,-27436,10733,23575,-2434,29400,28748,-29626,20091,4515,1430,29195,958,-32052,-26658,17940,16837,11005,14072,-31842,30011,23387,-6033,-30664,-937,7325,-27518,-3059,-28442,-22351,701,17068,16471,-26728,-6589,10963,270,4272,-28419,17654,10133,-15104,8393,-6955,8105,27094,8452,-21750,-928,-21553,-15963,-24435,-31516,20248,20470,-14171,-17854,-13014,-9627,-29967,17035,11554,32078,-5476,-16787,-27548,31034,8224,8833,-9799,17209,-26650,-23349,-24322,6445,22026,10357,31251,31033,-24081,-13430,-12160,-30841,17412,3318,26838,3661,-9847,-31391,-8690,-24991,-3356,12026,5277,17983,6418,-15806,-22851,2030,14455,-1300,4746,7790,-32374,16795,17017,-2634,-26268,-11744,13196,18254,32464,-31504,-1853,703,3699,15950,-12475,24349,-16801,5545,-9680,8353,-18606,18711,6199,-28952,-9101,-28889,-24245,26691,13182,31876,-15670,-1298,22978,-727,15778,-7043,13384,-30384,15788,16570,-804,17762,-1737,-31807,17477,32729,18300,13039,23104,-11676,-4360,-14347,-13910,5152,29632,14387,19166,-32311,-24089,28950,844,31397,11175,-17635,15586,-19349,15280,-24187,-14406,22564,18200,4475,12821,11517,-29283,-5420,-12777,2510,3037,10603,32518,-32079,-10568,-21471,-10637,8856,21624,26529,-6059,10491,-2277,-29759,-1668,6660,24091,6243,-16814,-22315,32570,-23824,21260,-24379,13555,12355,24835,-16930,-21914,-18896,-10092,13368,13740,7598,-17957,-18894,2522,-2750,-12178,22248,-26909,21333,32277,-7791,24771,2591,5813,6702,-1416,-11070,-26051,-11860,-30895,12074,-4328,569,23790,21439,19103,29384,25173,-20420,-11506,16315,806,-23762,23349,7426,-5346,-27268,-23203,-18111,14668,-9103,-25112,20898,11637,-300,8980,27981,29081,27051,29405,-32739,-14959,-13543,-26350,10676,23051,29666,-6997,-5871,1638,-19602,32524,-9927,22467,3424,-15122,11482,18868,10476,13980,11517,8805,-29421,-6594,9856,16922,24067,-17476,16134,-23696,10177,4765,31426,-11291,-30387,4603,-24410,-9359,-3583,14883,26227,-32719,21356,-4324,30305,28226,-12735,19904,3340,18840,-27862,-3547,30328,24510,2247,21928,28451,-17467,30278,18407,-28553,30161,25927,31848,-16765,25001,739,-14004,-22911,-9900,-3775,-13387,17346,15893,27165,28964,-32009,-7528,23470,-2975,14635,22956,-4731,20774,-15317,5257,29272,-27950,233,28636,324,838,-23265,1299,18472,-4494,-304,18590,-11743,17159,32086,-119,-8372,-6714,20458,26284,-5865,12944,-9437,-32165,-13877,18723,21814,-20623,7173,-30263,-10992,10201,-4503,10189,-3862,-27521,-25253,28143,13347,-31038,-9824,8196,-9605,8095,28620,1687,-9686,-30628,18282,20387,24268,-26395,24421,14108,-5780,-5299,23981,-28531,5801,-16729,19532,14328,-30455,-13075,-11002,-19736,17749,14491,25608,9831,-25876,2540,-25768,-8727,20883,14471,-28210,15902,-14027,-13907,-30304,6576,-17617,-14587,5025,-8039,21369,31236,-1216,-5185,-29433,-18650,12446,-11677,-23739,-1986,-20597,-13225,27224,29413,-23800,-20945,-6697,-27715,16581,3151,13399,-29477,-18875,-7362,6783,22116,-12112,3622,15276,-4706,16226,-13298,23632,12668,9263,28457,15304,29432,6413,-2797,31543,-9539,-32115,30180,-30905,25332,-20834,-22077,-23311,5807,-18178,-19277,-21492,25868,-11469,20693,27240,1454,31162,15443,7960,28361,-30612,-26312,-25467,18083,1359,-1273,25734,-7633,14094,7952,-7641,3875,-9177,-30053,-25961,16129,5130,16031,-10871,2999,-24385,-877,-4905,-18601,-10250,16626,6501,18529,-18477,-21754,-27112,-32665,11457,-27161,-11392,-8371,-24786,6052,-17057,3081,22020,20296,12882,4414,5283,-28383,8582,-7520,-4226,26241,18857,24798,-4387,-5595,13488,-16001,10990,-16464,-8215,30278,30402,-29258,32261,7352,32469,-25279,-5637,13800,-28083,8342,14954,-24403,4947,12181,-10669,15573,24055,-467,-22882,-24141,26990,23811,-18380,-7980,15046,18706,-5211,-28880,6117,-16126,16385,24588,-25886,-15191,-16457,5533,-13951,-6885,18065,-17344,-31962,-12529,16598,30033,20885,20653,14035,14978,20771,21921,-2834,19049,26141,-30243,-17844,14769,-12198,8859,20380,26051,-21792,485,-11608,6387,7057,2795,-12980,28515,-30614,-29101,-22968,11101,6095,-5358,-6422,-30194,6998,-12595,-31602,2214,-31694,4575,31245,1499,24717,21828,-6001,22815,32567,-14674,5728,-25615,-19618,3811,-6736,-19494,27418,20023,-26571,-21705,-9785,-26940,6943,18147,3508,-30955,-2349,-19909,-29749,-9950,-10572,-19331,-8359,-16420,-7532,-10910,-7942,29878,32746,-5664,31979,21691,-16194,-23849,-18922,-15037,29058,-7246,15306,8350,-31042,21423,-16514,-31366,-19712,31520,-11424,-15405,-7478,32125,-19953,-8953,19017,-1444,32459,8889,-11489,-25336,16514,19164,-12950,3230,-23566,-27799,32120,-32605,-11639,4305,3264,-12675,-16684,28309,23131,-12716,5661,13587,-2886,12916,25787,23115,-25243,-12016,-31667,-22938,-26151,18137,26154,17862,-26297,-12949,3064,-20991,1491,18917,-12775,30054,-30061,-22504,2491,-30736,22050,7396,-27661,25355,-21756,-15145,-19074,-10176,-26054,-31029,24383,-30278,23849,-17060,-7915,3036,22204,-13715,21430,-26881,8725,-2816,25538,10683,-11392,-29637,811,10313,14752,-19243,19639,8516,9212,29610,-27849,22105,22408,15599,26966,8616,23912,8189,-950,-18428,-8771,-9415,-15838,-6660,-26450,19255,10694,-32386,17462,31741,27585,5119,18847,16312,-20566,-30025,-2195,-1487,11516,-11496,18750,-24877,-685,25382,-15260,1455,-20091,19505,-32683,-29276,-12809,25893,-5177,-9969,22647,32630,-29031,-3172,-5674,-21670,20533,19994,19972,-20,19850,19260,-28892,2518,-14074,26189,13120,28071,12083,-7604,2188,30467,-31234,-4213,-27726,5460,-1205,-12898,-31442,28060,-8473,23737,-21778,-1711,17531,-2455,-22455,9682,-14328,-19182,-32032,-19326,-28496,-22704,-31096,11183,-11227,25508,4585,7181,-9042,15723,8898,13119,-9673,27770,12282,10108,9147,4618,9904,21922,18245,19056,30523,-16770,-20214,23759,-26763,-27480,-23001,-17153,21047,32146,23807,32120,30805,6100,25749,-7629,14808,-25326,30559,-13205,-25469,-10477,-7296,7670,-9782,-5591,-4733,-20377,-83,1880,2300,18562,-2941,25800,-21727,-26948,-23988,10055,-4215,-26563,-31284,19141,-1705,-23720,18281,15478,-357,21743,14826,5227,8141,10358,-27351,23940,-25783,-735,-28227,25721,-15685,9286,29778,8700,7177,22472,31381,-10409,5935,3310,6513,-2203,-7036,-26792,-31850,-30165,-12172,30786,-29462,16070,-8044,10123,-20033,-2655,3312,-22660,4332,17471,-14501,19879,24148,5940,9427,11767,-9228,-19226,24981,1640,7341,-27382,25273,22563,28811,16783,-23225,-14235,7189,-29135,10535,15891,-26450,29387,-307,26871,-29930,-7272,-14792,-21418,-25485,-20763,12934,23338,13077,25084,5314,-10860,20642,7802,23570,-8143,13702,-32631,-21263,-26977,30729,-1399,11097,17990,-23809,-2602,15720,14084,-29595,4437,3600,-12193,30968,2895,-18832,12907,-26759,-14141,28147,11102,5125,15807,-4851,17528,16750,-4911,-19376,-15650,-2471,4993,-22176,-22819,-19113,20341,8017,-9392,-18964,96,-2778,-5920,19765,6098,7363,-9149,28347,-10285,23901,27779,-4019,-161,-27446,-6026,-4500,-22253,9998,-5554,31666,26184,21563,-11626,-23864,24835,17928,4940,-16460,18068,-12942,-31431,27113,-5938,-7683,-20067,-23757,24536,-13338,-14429,-24329,-10029,-31240,-11956,15063,27219,-26946,-10530,9380,-30184,5983,551,-11819,20145,22270,-26062,-32136,-11550,-6303,-27644,-3748,11549,-28643,-3968,25412,-10157,30717,25712,26352,-25200,-24380,-28481,-467,-15932,-8368,-17509,-2158,22713,8056,-23542,-27240,26545,6381,9360,18935,-1665,-24768,-2575,22364,5522,14468,22788,16197,-27833,-10498,24418,10200,17581,23429,6107,-30186,-12815,-3146,-28414,-16496,-21053,-11767,-9247,-20401,26094,-6013,-5953,11194,-25427,4396,-28609,-25850,12567,27000,15214,-7443,16872,16688,-3706,-29,14997,19131,1633,-1399,16481,8566,-7791,-29616,-20722,-12526,4141,-3179,22948,30334,945,31508,-3437,26148,-26508,5415,-3560,7145,30586,-9170,-20473,32243,-6140,7935,-32103,-7251,25563,7975,483,15178,20252,-15031,6508,10812,5033,10333,-4787,-413,-2258,1613,-8402,-17683,26994,-26664,-18503,-12002,-20026,23772,12186,19201,-26204,-22276,-30972,14458,17858,25257,-3997,-16301,-18977,-22163,-10135,10672,-27035,20469,5870,11657,-23560,-20176,-5950,15631,31690,25996,3311,27390,21878,25554,3129,-21743,25269,12139,-20360,-2716,30264,-11354,-13984,-25120,-17790,16184,-30842,-25872,19222,-8663,-19241,-7959,29664,-17402,20900,-18236,27793,5054,30879,8521,-28666,20473,-8886,-32048,-20600,-20507,22209,-13686,-24702,32593,-4224,23308,-32615,6289,-25341,7631,-13294,2132,-15294,-17713,15562,-24496,-23412,-31124,-9411,-30423,-12804,-12363,-2143,15660,-26291,15210,31187,28265,213,-1788,8255,-24456,-20602,-8296,-25631,24587,-19361,-4680,-26232,7189,-6638,14149,-28622,29737,23516,-22534,13912,24196,-30070,-11969,1675,28710,-2106,4998,-5543,11426,-3614,-6434,31694,-28707,-9645,-6131,101,7917,-21641,-28679,-23341,-14485,16518,22207,-8972,9076,-6072,4699,16032,-18279,32029,-2598,-17335,-8822,-6478,-24447,10746,-9444,-18394,25838,26582,15633,-7429,8142,565,2374,-14003,31538,30467,27024,6559,-25531,14197,16447,-12877,-14653,-4655,526,-29524,-8584,-6319,30594,9255,-11517,30871,8750,22802,-18442,20855,-12847,-31860,2203,15749,4167,9084,16526,20665,-588,-380,5378,-11109,31805,-26597,32180,-6612,24948,-23749,-19711,4603,25082,15161,-16467,-10582,7179,26522,-24247,17500,10539,-20785,29776,-29759,-7985,-30468,-28633,-12556,5878,-4429,-9450,22652,-468,2252,20785,-14416,-25578,18965,28005,-1448,-7301,-9986,7233,22940,-7085,14801,21717,-4785,19319,2246,-25463,10102,-30476,8682,-31970,-18547,-8863,-5697,7878,21769,22436,12389,832,-21548,-14238,-8692,11927,18606,15123,4301,31135,12333,30074,10113,20596,-25551,28112,24920,26947,1748,25106,4357,-920,30968,26004,1843,990,-21096,-16334,22502,-10525,10937,820,11283,-13693,-19866,-24599,7829,-23450,-22771,-6195,29773,-24342,-30743,-11091,-11312,28462,15425,8472,24692,15272,25266,13652,-25910,-22664,-643,5827,-3206,7589,-29621,25948,9128,16012,8891,-7066,6970,30102,9586,-27885,12790,-16333,16108,-5464,11329,6713,-11112,-29457,8179,-24424,7466,23244,-20761,903,32759,4566,-29651,10394,-24335,-1986,12436,-11454,31571,-13790,-21207,32474,10137,8534,22908,3698,-535,24816,-29201,17974,10484,22013,23510,-26260,-23445,-13908,1481,13012,30412,-11383,-25495,-9808,8329,16022,-21685,3515,-7785,-27283,-17032,-32079,16683,30418,-4402,6898,5875,15623,19277,19918,-25583,24643,-9075,-22063,31639,23205,19947,-8252,-27401,7863,27507,-27270,20506,-10396,7634,13515,32673,19730,-21969,17579,21141,23244,15987,-26162,-27648,-28866,-6980,-28611,18351,-26515,8705,965,-10112,-5236,-26073,-27079,-1912,2737,-15800,27803,14333,27641,-3851,-13294,7062,7988,6930,8210,22440,-30980,1354,32074,26571,-8352,18351,-31460,26420,-13952,-21182,-16807,-16803,-19280,15761,-13174,-31759,7709,30391,16771,-11858,20394,-31785,22883,-6466,-23022,12670,18820,10903,11215,-14277,27077,-7979,-1859,15078,-11734,-10681,10430,-24777,7865,1793,319,16925,12798,-21621,-31115,-22864,17065,-885,-9652,-28666,4199,28618,17833,-25659,593,10842,1804,-31327,15785,31289,28160,23355,15149,32214,26300,29288,19841,28466,4592,21009,-24567,21662,22331,11405,-12025,25837,30421,-27736,-23129,-22008,-28280,-9052,-7535,-18561,-11247,-20942,-30477,-22013,28276,-7614,12596,-25447,8163,-18626,-9876,-15929,-26335,27889,31810,-6293,14013,-30384,20253,-8679,21871,2176,-32486,6324,-28643,6105,-8664,-25491,-22362,-25295,-23910,-9577,-31296,11896,5506,22831,26455,725,16473,18600,30450,-10254,17304,10174,23440,-21195,-14122,-2102,-12558,30614,12193,7014,-18145,14578,-9809,10893,-13959,7274,28361,-15234,-13383,27505,9525,7136,-892,1008,7985,28648,5074,12862,-10937,-15319,-30420,20258,9429,2323 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice26.tflite b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice26.tflite new file mode 100644 index 0000000..6029254 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice26.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice26_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice26_golden_int16.csv new file mode 100644 index 0000000..e99154c --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice26_golden_int16.csv @@ -0,0 +1 @@ +-29694,8909,-10494,-13973,-17943,16271,-30082,31224,-12346,-27333,-27361,22497,-26479,-1944,-6900,27182,-1634,-19147,-12306,-363,-19816,21197,-20870,-19663,21630,22577,-14677,-7751,-9838,3080,13322,13631,-23659,-17554,5611,-29301,-24220,-18705,18430,6694,26151,-23481,-30934,-14088,19546,-18913,23592,-16296,-23836,-5807,20767,-23956,-11263,31661,-10800,-5171,-23617,82,32550,3424,-18355,-29380,-18531,-29610,-28804,-30143,-10425,25671,-4162,-31178,5964,13629,14182,24945,25043,-15337,-26075,-30932,29406,1908,-9589,28874,15712,-19488,-27059,-4668,-4648,-30223,-22900,27402,25731,16801,-10257,12746,16458,14618,-7673,13520,-6169,12382,14453,29783,-231,24821,-18065,-5686,-10549,-9541,-1763,6579,-16330,-16056,-5931,27836,23971,23203,32596,-16631,-9923,-14541,18457,2308,-13816,-14455,-28619,14508,-31281,-5685,10051,16210,30611,5306,-26207,-4670,15706,15694,-12675,-22220,18625,-30448,20195,26344,-30443,-476,-20368,3183,4277,-13102,13200,22160,6280,-23390,23813,235,15416,7880,-12578,-29573,21192,20683,13562,-19991,11766,11899,28032,-4593,-12319,238,14085,29151,-19515,-27450,14503,3178,-4621,-31812,10820,173,6712,1313,287,26384,1552,4096,-8468,-22620,-4452,22322,-24098,-17188,-5187,-6275,16988,-18059,-23939,24880,16258,9288,-28649,-17293,19022,22938,-24182,-8175,-3459,-10589,-32471,-20790,-30543,-31940,22891,-32529,-8639,32576,29846,27150,-17451,15701,-23194,-5365,25763,14001,19920,8871,-26635,21901,-1973,29008,4013,-13203,-28917,-15037,-21679,4856,26958,-28424,-23963,9630,-2597,-2629,4145,30671,-23173,26285,23138,-9576,-4983,-5195,-8710,-26304,10667,-29813,12557,20699,9662,-5632,-29275,24985,21445,-25488,7078,13936,-31957,4883,28452,-14560,-8617,-3431,-15314,17036,20016,20949,-23200,23961,5569,14889,28279,4949,4385,12155,19988,14659,6176,13724,24776,-16124,-7727,801,18601,-9288,27191,30822,31344,-3912,-5918,25265,9042,18784,17874,-26974,-20666,-11323,-6514,-13429,28967,-28595,17533,26557,-19789,6212,21996,-4040,-20963,-31360,-21127,-22003,-26328,2053,10435,2645,-1820,21092,-8820,-18515,-32587,1828,-1889,13257,4607,27252,-9606,6720,-360,-4426,24308,-11253,-13639,19231,21122,8418,-19480,-28590,6189,24322,-867,-29419,-16832,28336,30218,23180,19906,1275,15973,-20972,-28017,-21697,-10253,-25693,-19299,-24297,-2934,15417,28267,2197,-24190,12500,24389,-28239,-7292,5442,32065,-18070,-15447,-4585,4812,-23566,32600,-8151,27740,1912,-19802,25505,-1264,10579,-23898,-1060,-1059,-29036,31831,-28504,30091,-14239,28060,6361,22973,-24345,30670,-29540,13895,-8280,-27972,-1165,11483,-15354,26608,-24214,21093,-15062,-13463,-28384,29752,21541,17746,7538,-27489,-13300,-21280,25133,28281,-32470,29510,-30370,13877,24756,12597,15875,-21718,-5655,-21844,30385,5239,3989,20735,26034,11612,24936,13410,-7606,-11031,25317,6726,-15773,10326,-8217,-8091,3205,21883,-19724,-15006,-10954,32700,31542,-18317,-9411,3567,-29365,-23052,737,13786,28451,-31900,-17477,-3732,11745,-12555,-17735,32009,-23143,10359,-24401,-18422,-3433,6037,-7816,17840,26843,17924,26912,-15649,-26939,-17044,19757,25509,27586,12962,14740,-8210,-27285,24687,-27740,-15654,-22380,-2130,-24639,8940,-11310,-29885,-27261,21865,-13631,24630,31372,6708,-7616,23609,-1844,31288,12846,20496,-16684,14197,-6551,-14823,10765,-32176,-6040,-7601,8770,-19783,18658,14690,-27161,13953,-9372,32103,31400,23947,-20121,-10085,29766,25010,-21771,1127,23171,-32191,32166,30164,32185,-8888,-24313,-29286,-9700,-23522,-29501,19447,22707,24146,12298,25859,325,-23452,-15654,-19555,-25228,-4510,11781,25217,15426,-27972,-12737,-10700,-26356,-4621,10325,-32399,-25951,-19274,25852,23637,32747,-243,-20162,-31696,-10663,6232,-25487,31718,3487,-1186,8975,10088,-11716,-12740,-15288,32006,25646,19000,-3216,-17834,28085,31129,-155,4454,16971,1637,25240,-4846,31301,-17408,8360,21233,6562,11723,-10404,-7570,-18854,-18494,19709,9929,7853,12153,-4504,3683,518,-26317,-32017,22648,-12990,531,19556,4715,12790,-28118,-12353,-26747,28613,31200,9008,-24408,-14430,-4293,29236,27825,9321,26900,-27323,-10282,16345,9820,-22314,-13611,-2426,-13220,-2483,18550,10607,-15165,8712,29878,-3661,28632,20504,10210,20947,-16784,31051,31609,-4453,-24914,12967,4344,-7819,-31710,27909,-24375,-13936,-17094,5589,21777,-1019,16678,6906,25705,20863,31596,-27323,23658,-14280,-16269,-26359,-304,-10543,31063,16774,23747,-21060,-29874,32205,22727,-13671,-20701,4120,-1232,2123,5963,-9778,-27309,-10789,12394,1861,-18215,-9100,22955,-3694,11190,26263,10704,-11086,-13529,11361,-26448,-15856,17095,27295,25270,-13018,-25691,31672,13486,15199,20864,-18300,-25801,-30108,-2387,7761,-11123,25165,22197,-2424,754,-25915,-23590,-4313,21974,8385,-30592,31939,5301,-20204,24575,-1569,32302,25353,5378,-24532,3733,-2058,-18496,22306,-11607,17973,24142,-29470,3901,16703,15108,28513,-177,26581,7992,3871,-27302,-25151,-21234,-7930,-2152,-20437,-20090,-22230,504,-24532,-22541,-15699,-10110,24400,31750,13223,-11503,-1535,14780,30522,4633,-15528,19195,-9107,-3349,-28098,462,-19108,-23496,23948,17423,31696,-26870,5909,-5377,-13941,-7776,30073,-9399,-26079,21755,32516,21241,6889,-24284,-18147,22447,-15443,-24283,31104,4900,-32137,-28150,20535,26541,1359,-10031,26707,-24982,26175,-19828,29152,-28865,-771,14414,-3097,-1990,9075,28225,20351,18695,15231,-5813,18112,-1153,22943,16311,29403,-27159,-8555,-14241,-2913,29858,-12126,-26071,-15871,-30186,-13821,-32213,7295,-18390,-13244,1333,-15328,-2324,29990,7214,-20130,24197,9636,-12212,-11412,-21151,4825,-22814,18049,16821,-10780,-19124,12671,14967,17444,11754,-4482,-21806,7455,-16164,-31811,-11209,-16092,-13038,-3254,-5634,14450,-7222,21195,-19702,16218,26037,-15490,-26273,6166,9795,-22789,-27489,18254,-10399,6025,5265,-30590,6116,5936,-4327,-5463,8228,30744,2062,-17242,14823,-1718,27380,-24861,11109,1535,-25970,-6043,217,-22209,-6148,4285,8717,-23478,7636,22309,7785,-13303,-18433,9837,-26847,-2811,-21577,-28137,3047,27313,-12906,-4459,9919,23716,9588,-19437,5559,9594,-9137,-1618,-21249,-15329,8822,-10563,-18794,13159,29903,-20733,-26984,16922,16072,29624,22439,22392,3390,27740,-7694,-21183,-15082,9611,-7708,-26474,25900,31709,1016,-11032,1452,-17914,17433,30935,-27716,17831,-22150,6572,32074,-17572,-8395,-1282,-8019,9739,-20822,-22947,27643,-11845,-24580,-23134,529,15150,19130,-22387,-25560,29714,-18272,30527,-16272,-11525,31204,-8757,-15109,14225,-10369,-24628,29709,18826,-22081,-31858,-171,31312,-26473,-3816,6571,21983,-7676,-1930,-29285,-23477,27864,-15248,-26487,-30793,19967,8996,-24135,27904,-21751,8389,31517,6813,23554,-24294,6985,-14838,-5911,-17072,-26063,-22210,4081,16279,-4650,27304,-11775,-7166,18745,26896,2667,17063,13947,-17406,-14049,-6103,-10112,-7057,-30261,-2232,-25543,7310,-8385,4464,1140,-32043,-29089,1583,-24943,-11602,-32036,-1830,15576,13438,-20031,32063,-12859,1007,3769,8382,-30214,5267,-24875,-16971,10137,-8597,15411,-28867,-28846,13612,-20042,11627,-8329,12797,-578,32395,11244,-13038,-30623,-13672,23635,-20825,9009,3563,-27051,-22312,17632,-16064,14991,31242,-23494,5928,-6616,-7461,17361,-13707,4097,11883,30365,28141,-29124,12445,-10829,21352,-1099,32580,3357,29434,14294,-28496,-139,24529,-32195,32001,-15020,16729,24135,-24596,10870,-2819,-1196,-21717,-32565,-15776,-1081,-18462,7810,-5859,-19816,17174,-4527,-21604,-30228,-25118,5162,20817,10655,15644,6410,4464,-20842,11731,24634,-31153,-9881,15879,26249,-10267,-30072,-2750,1580,-3455,26062,-9187,2712,-8916,25780,2636,-28172,-31889,-4234,-8258,-9822,7149,-22606,21748,15210,-12272,31969,29452,30145,-2691,-26557,11908,31751,11212,24071,-18450,-11185,6350,23363,10999,-31565,-28591,29834,-10198,-13643,-19768,-17116,-13416,-16017,5110,-26630,-6419,-14923,-28272,2937,15906,11048,-8809,-3295,-16322,-14083,-15997,-3285,-30341,16659,13261,3129,28983,-18278,9758,8295,8295,2171,30932,-18561,2419,28349,-6451,-27356,5564,2100,894,26291,19329,13931,-24827,-26368,-24405,18107,22156,10611,27410,27650,21022,11458,-5110,-27321,24354,12720,5734,-12289,6219,-5268,26133,-18500,-16575,-15290,-20659,-27377,31011,-31898,-21725,-31640,27413,4839,1988,-11385,-28031,-2151,-8022,27697,32227,-18455,18710,-8791,-32341,18609,-21978,25143,-5627,19902,5248,-16198,8092,9790,12085,-10630,27670,-21761,26938,4125,-11427,-7259,21688,5488,22543,15703,-13125,-14288,9386,-31816,17834,29611,-391,14644,-32376,5963,-10745,-26602,-8232,11003,-22271,-31685,-11867,-2447,-8854,11549,7826,-3913,23282,-5610,1785,11821,757,1477,6850,-30234,23080,20141,27242,11382,15083,-511,-24728,-6047,-7414,8933,19215,-7409,18998,-26230,7370,-13378,-17443,-2411,794,22022,3588,-24561,11683,11097,5104,24760,10738,-2853,14280,-8839,-30631,32555,16875,-6569,14717,-28562,-14117,17889,-21630,-13309,4607,17834,1558,-18298,-23573,10409,-12791,-20145,-14777,-7028,15105,730,5746,-5015,9434,16823,8054,-6597,15660,-28712,-20565,16539,-27202,285,21941,26241,19662,30232,122,-27161,30224,13660,26568,-7616,-10122,-19067,-5547,-2817,24735,-9178,17201,22198,-8512,-21870,-18696,29123,20417,-28900,-6313,4506,31483,6820,8171,12223,-20164,21933,9232,-18095,-20337,-5595,16396,-31981,-27173,12733,1411,-32659,13270,-32140,-18482,-27549,9974,26873,15870,-17203,24764,12437,8885,-1028,14090,31736,19610,-25507,-17812,-6103,-28648,24034,-30288,28892,20843,-10760,-21904,27490,6387,-1232,-31845,-29386,31925,9028,16528,-4357,9888,23957,-14226,8791,9114,-6745,-21712,479,15671,-974,-14214,25786,25677,25577,6555,15288,28523,-19510,22760,21871,12601,7467,-14862,-19917,-21060,-21918,28436,-30177,18113,7344,-27063,-18926,18428,6191,17464,-29344,-18959,-15869,302,-22700,11352,3169,-17599,-24748,-18471,30399,11176,8466,-27911,28323,-2017,-20633,26997,3132,27837,-5578,-8148,10654,-17440,23164,-19646,-25068,-7876,31053,-26525,-13920,16326,3981,12920,16927,3287,28636,22174,1358,-26880,19670,26613,11029,23741,-3042,-30193,18605,6309,-11233,5914,29248,3181,16055,-18311,10473,29821,17496,18932,-16048,-256,-23591,-19507,16122,-20291,23860,-12531,-31722,14136,16787,-25985,30699,28665,-8410,-28011,14156,-6460,-17342,14408,-30604,-14837,-18409,1653,-31385,-21491,15199,27495,32267,20127,-25796,-32364,17520,-21847,27619,9941,7981,-2313,31877,17689,-32030,27010,-29443,-553,16545,28950,-16837,25096,13830,-28230,3257,28962,-6466,15592,1251,24337,5720,7555,-16436,-25813,28211,-22108,-30700,18381,-23842,32542,-8446,-10683,-17836,-3102,12226,-18606,26848,-6240,16383,-14153,17053,30737,-20319,9478,8747,-19163,15223,-27291,-2742,12784,-4557,9717,21017,7780,-1300,19017,-25494,2181,-19984,3558,16522,-9093,12460,-29637,3536,8688,12277,-25234,-27649,1418,3725,-5405,-20279,-29297,-20221,31003,5995,4474,157,13590,-28704,16100,26891,-19928,-28418,16235,24969,-27279,-4561,-16938,-3769,-728,-12929,23343,-29098,-6579,17091,7276,-6915,-15299,-7695,-14855,29637,-27003,-17099,-23536,-3936,1692,23936,16361,-13324,-18205,-3720,-30600,22566,14193,-21352,8839,9476,-30234,16765,19916,-22776,-1070,-16651,29444,32658,-9554,-26577,7745,-3323,-18116,21592,-28807,-6874,-23747,10474,19174,-7282,20814,-19418,27813,-23600,-5188,-31021,-20267,-18511,-31157,-7561,8234,30576,19672,712,29817,12045,18285,3048,32492,-31144,-5064,-7540,-144,7171,-10291,-23681,30576,-9245,-26341,19030,12175,25466,28205,24287,28836,-5148,8704,-14051,-15762,-28580,19688,8380,1482,22311,-21286,-10656,-28527,14493,16171,14971,-9862,-6399,21226,7502,-8336,2816,26296,-2680,-2966,17133,29598,2847,-12138,15524,30062,13783,-25776,-11395,21776,26923,-16705,-31810,8359,30269,18139,-1280,-2917,-20631,-19751,-7695,10026,-2204,-13140,-15004,-3669,-22753,-3002,18360,-5617,-16664,25590,-23105,19897,7171,6358,-10761,4044,13706,-16026,-31946,24983,1435,27647,27723,28216,-7906,15727,-3803,13184,-23265,-26018,11652,-8417,30979,-9696,-26251,-3573,-12125,-15099,-30514,6242,16954,29219,18238,-31886,-2664,25488,3906,-25711,696,21016,6625,4666,-20174,22337,-32329,-5619,-28984,-23891,18480,-2582,18353,-8744,30464,-5999,-4668,17930,28027,12236,6464,-14392,12075,16354,-9712,-24260,19818,-11154,7796,20963,23665,-10747,-3504,-19355,-15216,-29981,2463,10172,-29516,10447,-11301,8417,22983,23149,18447,-20871,20333,28104,14708,14572,5278,-28696,23246,-11746,-14773,-30000,20505,83,-16642,12288,25792,-30067,10571,2247,-12227,7659,19199,-18938,12848,31540,-667,-21243,11739,-1413,-18110,-28472,6432,-29185,-18647,6791,18630,14116,-21437,-19850,-16589,1679,-32102,12882,-17274,-646,-5305,17151,6859,11047,11734,-21238,11207,-12247,8096,-2710,31155,-6205,-15963,5349,17863,18757,-25522,-4918,-8262,27617,-20637,-7470,-2675,-24002,14865,20172,-24080,20102,-31594,-28891,-26913,-15264,-12038,-27072,30278,-29546,-18086,13434,-1566,12560,14735,16351,8519,-32062,-25080,28784,425,14836,15426,30851,11614,-14371,-21190,20279,32673,13454,-4836,-23177,19377,-22187,-23881,2681,809,10167,26347,-8674,26791,-28277,-3856,-4976,-14538,8314,11451,-31665,20985,-29834,28210,12833,-8985,-24572,-31566,23222,23385,14878,19907,-23271,-5351,-26693,-5358,-12176,-8150,-27065,13293,14217,7100,31104,-6200,28317,11885,-9450,18941,28468,16408,29327,14480,-6921,-30911,-21335,-27231,22762,26628,-25651,18974,-28197,2843,-24057,6252,-1467,3323,-12042,-6459,-13593,32284,17694,12691,9240,-27600,-25608,12192,796,14298,-897,-884,1603,-21756,21253,-11285,-9562,-2855,-6891,-14457,32105,-10208,-11471,15713,22003,14162,11509,5185,27295,-15661,24247,19558,32111,-12502,28516,18022,-25404,29245,-8130,15710,7479,30548,-25493,-16070,9051,13283,-27507,25613,26114,22539,203,-17191,-14816,-31139,1260,8929,-11939,-8091,29076,-11433,11694,4147,6089,7804,-23298,25571,-807,8874,6162,20392,-12550,-107,30638,438,22308,-23999,-30671,-31970,-10555,16470,-31430,12469,-27951,-23065,6988,17950,30365,17669,325,13272,-18066,-19428,31189,17159,-6823,13387,-11435,18286,-26046,-21560,21044,15785,-13679,-21650,29455,1045,-4467,-12896,-10709,-31819,-24630,-21259,9092,-4557,15750,-3110,20760,-15222,17188,-12672,23755,-11268,13598,-32009,23527,3758,9507,-30133,-19775,32103,-7871,6546,-31652,32314,-8728,27643,-13818,22009,32517,27889,-4524,-30655,-10620,-20494,-29631,-22550,-30954,11881,3441,7897,6907,-30240,18991,28996,22714,27722,20554,17263,17667,16800,-24563,11632,30358,-7593,-779,-5706,19757,-32226,-25397,-26544,30823,24194,-12315,27887,-27850,10305,-31375,-11927,-436,22620,28746,13117,-16708,31905,-5256,-26980,-30656,-26684,-12054,-10903,11882,9780,-24871,-17954,-21730,22366,20468,22769,10401,-23370,-21476,-18517,32361,-1466,-21577,-11875,-30158,-11528,26084,9460,31813,15050,32145,-31596,6417,10952,5027,18729,17024,27549,9841,17860,-24347,-24631,27811,-15484,-4937,-32556,20014,18633,14859,16275,18540,1104,-20218,-23635,6147,-6021,-20369,24666,-17626,6985,3560,-6262,9685,-30785,-8005,27442,-16556,-10194,2196,-32359,11484,-10616,7986,3171,9155,-29116,10793,-20686,17666,-17443,-18751,-6254,-19871,17880,-29362,19479,8479,-6380,-9391,29879,-15114,7417,26521,29731,-13922,-23646,16065,17336,-29391,-2451,-8069,12075,27341,-18475,-2233,-30371,27618,-15141,-11502,-8785,9339,14156,-17021,-20926,21104,24382,16534,22564,188,-21426,-30364,30480,-24911,-8056,25408,13432,2146,-30622,-27078,-9785,2277,-6841,-20905,-28417,-31106,-21476,-5857,-5035,-19986,-1346,-25681,14177,-26799,-16348,-16337,-11238,21914,25608,6401,24357,-10396,16308,11440,24585,-5280,-8102,7780,-4033,2783,9502,25861,-20260,-19129,14308,-24551,1643,-6733,-24941,-2652,28075,29508,-9439,27185,-14161,1356,-2307,-18271,7266,-21200,-11141,-16389,1236,-25494,-2832,3429,5258,-26080,-15724,-32155,-20683,-7445,17562,6443,23706,19799,18994,-25344,32653,8506,30399,-23958,-3928,-22547,-11425,-10172,-19114,3000,22104,-21894,-28551,-24439,2174,-8475,-5024,5078,453,-98,-18163,8594,-17318,-4883,-934,-7351,23542,-13046,-21495,31971,30688,-23717,-1747,10048,14651,19901,-29968,-31204,29144,5993,-16465,19504,6275,-22290,28626,28021,-19407,15394,-11824,-9703,5369,13271,-1054,14452,3629,32010,-22178,-3425,11885,-7319,92,2461,14355,-30285,-9038,-19608,17816,611,30572,-6232,-10396,-32504,32055,20729,25911,-29773,5436,-11171,18973,-6180,-22490,31681,-11080,9450,13972,22355,-30233,13103,22850,14318,-25594,13607,8150,-21589,-13771,29677,4877,-29171,15262,-9025,-19461,-27468,-9553,9043,17951,16161,13548,-494,27435,-29132,32397,28797,30930,-22025,13345,-7988,25819,25023,20133,21211,31310,-4779,-20014,-3866,3924,-767,-28011,-17067,32341,-7550,-28432,27961,14433,-30256,3402,7752,-15388,29207,3689,2213,-32304,-31269,1926,-18711,-3206,14623,28630,28887,9908,-14380,-9967,32757,8433,-13975,4332,-14780,3693,30386 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice26_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice26_input0_int16.csv new file mode 100644 index 0000000..116563f --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice26_input0_int16.csv @@ -0,0 +1 @@ +-29694,8909,-10494,-13973,-17943,16271,-30082,31224,-12346,-27333,-27361,22497,-26479,-1944,-6900,27182,-1634,-19147,-12306,-363,-19816,21197,-20870,-19663,21630,22577,-14677,-7751,-9838,3080,13322,13631,-23659,-17554,5611,-29301,-24220,-18705,18430,6694,26151,-23481,-30934,-14088,19546,-18913,23592,-16296,-23836,-5807,20767,-23956,-11263,31661,-10800,-5171,-23617,82,32550,3424,-18355,-29380,-18531,-29610,-28804,-30143,-10425,25671,-4162,-31178,5964,13629,14182,24945,25043,-15337,-26075,-30932,29406,1908,-9589,28874,15712,-19488,-27059,-4668,-4648,-30223,-22900,27402,25731,16801,-10257,12746,16458,14618,-7673,13520,-6169,12382,14453,29783,-231,24821,-18065,-5686,-10549,-9541,-1763,6579,-16330,-16056,-5931,27836,23971,23203,32596,-16631,-9923,-14541,18457,2308,-13816,-14455,-28619,14508,-31281,-5685,10051,16210,30611,5306,-26207,-4670,15706,15694,-12675,-22220,18625,-30448,20195,26344,-30443,-476,-20368,3183,4277,-13102,13200,22160,6280,-23390,23813,235,15416,7880,-12578,-29573,21192,20683,13562,-19991,11766,11899,28032,-4593,-12319,238,14085,29151,-19515,-27450,14503,3178,-4621,-31812,10820,173,6712,1313,287,26384,1552,4096,-8468,-22620,-4452,22322,-24098,-17188,-5187,-6275,16988,-18059,-23939,24880,16258,9288,-28649,-17293,19022,22938,-24182,-8175,-3459,-10589,-32471,-20790,-30543,-31940,22891,-32529,-8639,32576,29846,27150,-17451,15701,-23194,-5365,25763,14001,19920,8871,-26635,21901,-1973,29008,4013,-13203,-28917,-15037,-21679,4856,26958,-28424,-23963,9630,-2597,-2629,4145,30671,-23173,26285,23138,-9576,-4983,-5195,-8710,-26304,10667,-29813,12557,20699,9662,-5632,-29275,24985,21445,-25488,7078,13936,-31957,4883,28452,-14560,-8617,-3431,-15314,17036,20016,20949,-23200,23961,5569,14889,28279,4949,4385,12155,19988,14659,6176,13724,24776,-16124,-7727,801,18601,-9288,27191,30822,31344,-3912,-5918,25265,9042,18784,17874,-26974,-20666,-11323,-6514,-13429,28967,-28595,17533,26557,-19789,6212,21996,-4040,-20963,-31360,-21127,-22003,-26328,2053,10435,2645,-1820,21092,-8820,-18515,-32587,1828,-1889,13257,4607,27252,-9606,6720,-360,-4426,24308,-11253,-13639,19231,21122,8418,-19480,-28590,6189,24322,-867,-29419,-16832,28336,30218,23180,19906,1275,15973,-20972,-28017,-21697,-10253,-25693,-19299,-24297,-2934,15417,28267,2197,-24190,12500,24389,-28239,-7292,5442,32065,-18070,-15447,-4585,4812,-23566,32600,-8151,27740,1912,-19802,25505,-1264,10579,-23898,-1060,-1059,-29036,31831,-28504,30091,-14239,28060,6361,22973,-24345,30670,-29540,13895,-8280,-27972,-1165,11483,-15354,26608,-24214,21093,-15062,-13463,-28384,29752,21541,17746,7538,-27489,-13300,-21280,25133,28281,-32470,29510,-30370,13877,24756,12597,15875,-21718,-5655,-21844,30385,5239,3989,20735,26034,11612,24936,13410,-7606,-11031,25317,6726,-15773,10326,-8217,-8091,3205,21883,-19724,-15006,-10954,32700,31542,-18317,-9411,3567,-29365,-23052,737,13786,28451,-31900,-17477,-3732,11745,-12555,-17735,32009,-23143,10359,-24401,-18422,-3433,6037,-7816,17840,26843,17924,26912,-15649,-26939,-17044,19757,25509,27586,12962,14740,-8210,-27285,24687,-27740,-15654,-22380,-2130,-24639,8940,-11310,-29885,-27261,21865,-13631,24630,31372,6708,-7616,23609,-1844,31288,12846,20496,-16684,14197,-6551,-14823,10765,-32176,-6040,-7601,8770,-19783,18658,14690,-27161,13953,-9372,32103,31400,23947,-20121,-10085,29766,25010,-21771,1127,23171,-32191,32166,30164,32185,-8888,-24313,-29286,-9700,-23522,-29501,19447,22707,24146,12298,25859,325,-23452,-15654,-19555,-25228,-4510,11781,25217,15426,-27972,-12737,-10700,-26356,-4621,10325,-32399,-25951,-19274,25852,23637,32747,-243,-20162,-31696,-10663,6232,-25487,31718,3487,-1186,8975,10088,-11716,-12740,-15288,32006,25646,19000,-3216,-17834,28085,31129,-155,4454,16971,1637,25240,-4846,31301,-17408,8360,21233,6562,11723,-10404,-7570,-18854,-18494,19709,9929,7853,12153,-4504,3683,518,-26317,-32017,22648,-12990,531,19556,4715,12790,-28118,-12353,-26747,28613,31200,9008,-24408,-14430,-4293,29236,27825,9321,26900,-27323,-10282,16345,9820,-22314,-13611,-2426,-13220,-2483,18550,10607,-15165,8712,29878,-3661,28632,20504,10210,20947,-16784,31051,31609,-4453,-24914,12967,4344,-7819,-31710,27909,-24375,-13936,-17094,5589,21777,-1019,16678,6906,25705,20863,31596,-27323,23658,-14280,-16269,-26359,-304,-10543,31063,16774,23747,-21060,-29874,32205,22727,-13671,-20701,4120,-1232,2123,5963,-9778,-27309,-10789,12394,1861,-18215,-9100,22955,-3694,11190,26263,10704,-11086,-13529,11361,-26448,-15856,17095,27295,25270,-13018,-25691,31672,13486,15199,20864,-18300,-25801,-30108,-2387,7761,-11123,25165,22197,-2424,754,-25915,-23590,-4313,21974,8385,-30592,31939,5301,-20204,24575,-1569,32302,25353,5378,-24532,3733,-2058,-18496,22306,-11607,17973,24142,-29470,3901,16703,15108,28513,-177,26581,7992,3871,-27302,-25151,-21234,-7930,-2152,-20437,-20090,-22230,504,-24532,-22541,-15699,-10110,24400,31750,13223,-11503,-1535,14780,30522,4633,-15528,19195,-9107,-3349,-28098,462,-19108,-23496,23948,17423,31696,-26870,5909,-5377,-13941,-7776,30073,-9399,-26079,21755,32516,21241,6889,-24284,-18147,22447,-15443,-24283,31104,4900,-32137,-28150,20535,26541,1359,-10031,26707,-24982,26175,-19828,29152,-28865,-771,14414,-3097,-1990,9075,28225,20351,18695,15231,-5813,18112,-1153,22943,16311,29403,-27159,-8555,-14241,-2913,29858,-12126,-26071,-15871,-30186,-13821,-32213,7295,-18390,-13244,1333,-15328,-2324,29990,7214,-20130,24197,9636,-12212,-11412,-21151,4825,-22814,18049,16821,-10780,-19124,12671,14967,17444,11754,-4482,-21806,7455,-16164,-31811,-11209,-16092,-13038,-3254,-5634,14450,-7222,21195,-19702,16218,26037,-15490,-26273,6166,9795,-22789,-27489,18254,-10399,6025,5265,-30590,6116,5936,-4327,-5463,8228,30744,2062,-17242,14823,-1718,27380,-24861,11109,1535,-25970,-6043,217,-22209,-6148,4285,8717,-23478,7636,22309,7785,-13303,-18433,9837,-26847,-2811,-21577,-28137,3047,27313,-12906,-4459,9919,23716,9588,-19437,5559,9594,-9137,-1618,-21249,-15329,8822,-10563,-18794,13159,29903,-20733,-26984,16922,16072,29624,22439,22392,3390,27740,-7694,-21183,-15082,9611,-7708,-26474,25900,31709,1016,-11032,1452,-17914,17433,30935,-27716,17831,-22150,6572,32074,-17572,-8395,-1282,-8019,9739,-20822,-22947,27643,-11845,-24580,-23134,529,15150,19130,-22387,-25560,29714,-18272,30527,-16272,-11525,31204,-8757,-15109,14225,-10369,-24628,29709,18826,-22081,-31858,-171,31312,-26473,-3816,6571,21983,-7676,-1930,-29285,-23477,27864,-15248,-26487,-30793,19967,8996,-24135,27904,-21751,8389,31517,6813,23554,-24294,6985,-14838,-5911,-17072,-26063,-22210,4081,16279,-4650,27304,-11775,-7166,18745,26896,2667,17063,13947,-17406,-14049,-6103,-10112,-7057,-30261,-2232,-25543,7310,-8385,4464,1140,-32043,-29089,1583,-24943,-11602,-32036,-1830,15576,13438,-20031,32063,-12859,1007,3769,8382,-30214,5267,-24875,-16971,10137,-8597,15411,-28867,-28846,13612,-20042,11627,-8329,12797,-578,32395,11244,-13038,-30623,-13672,23635,-20825,9009,3563,-27051,-22312,17632,-16064,14991,31242,-23494,5928,-6616,-7461,17361,-13707,4097,11883,30365,28141,-29124,12445,-10829,21352,-1099,32580,3357,29434,14294,-28496,-139,24529,-32195,32001,-15020,16729,24135,-24596,10870,-2819,-1196,-21717,-32565,-15776,-1081,-18462,7810,-5859,-19816,17174,-4527,-21604,-30228,-25118,5162,20817,10655,15644,6410,4464,-20842,11731,24634,-31153,-9881,15879,26249,-10267,-30072,-2750,1580,-3455,26062,-9187,2712,-8916,25780,2636,-28172,-31889,-4234,-8258,-9822,7149,-22606,21748,15210,-12272,31969,29452,30145,-2691,-26557,11908,31751,11212,24071,-18450,-11185,6350,23363,10999,-31565,-28591,29834,-10198,-13643,-19768,-17116,-13416,-16017,5110,-26630,-6419,-14923,-28272,2937,15906,11048,-8809,-3295,-16322,-14083,-15997,-3285,-30341,16659,13261,3129,28983,-18278,9758,8295,8295,2171,30932,-18561,2419,28349,-6451,-27356,5564,2100,894,26291,19329,13931,-24827,-26368,-24405,18107,22156,10611,27410,27650,21022,11458,-5110,-27321,24354,12720,5734,-12289,6219,-5268,26133,-18500,-16575,-15290,-20659,-27377,31011,-31898,-21725,-31640,27413,4839,1988,-11385,-28031,-2151,-8022,27697,32227,-18455,18710,-8791,-32341,18609,-21978,25143,-5627,19902,5248,-16198,8092,9790,12085,-10630,27670,-21761,26938,4125,-11427,-7259,21688,5488,22543,15703,-13125,-14288,9386,-31816,17834,29611,-391,14644,-32376,5963,-10745,-26602,-8232,11003,-22271,-31685,-11867,-2447,-8854,11549,7826,-3913,23282,-5610,1785,11821,757,1477,6850,-30234,23080,20141,27242,11382,15083,-511,-24728,-6047,-7414,8933,19215,-7409,18998,-26230,7370,-13378,-17443,-2411,794,22022,3588,-24561,11683,11097,5104,24760,10738,-2853,14280,-8839,-30631,32555,16875,-6569,14717,-28562,-14117,17889,-21630,-13309,4607,17834,1558,-18298,-23573,10409,-12791,-20145,-14777,-7028,15105,730,5746,-5015,9434,16823,8054,-6597,15660,-28712,-20565,16539,-27202,285,21941,26241,19662,30232,122,-27161,30224,13660,26568,-7616,-10122,-19067,-5547,-2817,24735,-9178,17201,22198,-8512,-21870,-18696,29123,20417,-28900,-6313,4506,31483,6820,8171,12223,-20164,21933,9232,-18095,-20337,-5595,16396,-31981,-27173,12733,1411,-32659,13270,-32140,-18482,-27549,9974,26873,15870,-17203,24764,12437,8885,-1028,14090,31736,19610,-25507,-17812,-6103,-28648,24034,-30288,28892,20843,-10760,-21904,27490,6387,-1232,-31845,-29386,31925,9028,16528,-4357,9888,23957,-14226,8791,9114,-6745,-21712,479,15671,-974,-14214,25786,25677,25577,6555,15288,28523,-19510,22760,21871,12601,7467,-14862,-19917,-21060,-21918,28436,-30177,18113,7344,-27063,-18926,18428,6191,17464,-29344,-18959,-15869,302,-22700,11352,3169,-17599,-24748,-18471,30399,11176,8466,-27911,28323,-2017,-20633,26997,3132,27837,-5578,-8148,10654,-17440,23164,-19646,-25068,-7876,31053,-26525,-13920,16326,3981,12920,16927,3287,28636,22174,1358,-26880,19670,26613,11029,23741,-3042,-30193,18605,6309,-11233,5914,29248,3181,16055,-18311,10473,29821,17496,18932,-16048,-256,-23591,-19507,16122,-20291,23860,-12531,-31722,14136,16787,-25985,30699,28665,-8410,-28011,14156,-6460,-17342,14408,-30604,-14837,-18409,1653,-31385,-21491,15199,27495,32267,20127,-25796,-32364,17520,-21847,27619,9941,7981,-2313,31877,17689,-32030,27010,-29443,-553,16545,28950,-16837,25096,13830,-28230,3257,28962,-6466,15592,1251,24337,5720,7555,-16436,-25813,28211,-22108,-30700,18381,-23842,32542,-8446,-10683,-17836,-3102,12226,-18606,26848,-6240,16383,-14153,17053,30737,-20319,9478,8747,-19163,15223,-27291,-2742,12784,-4557,9717,21017,7780,-1300,19017,-25494,2181,-19984,3558,16522,-9093,12460,-29637,3536,8688,12277,-25234,-27649,1418,3725,-5405,-20279,-29297,-20221,31003,5995,4474,157,13590,-28704,16100,26891,-19928,-28418,16235,24969,-27279,-4561,-16938,-3769,-728,-12929,23343,-29098,-6579,17091,7276,-6915,-15299,-7695,-14855,29637,-27003,-17099,-23536,-3936,1692,23936,16361,-13324,-18205,-3720,-30600,22566,14193,-21352,8839,9476,-30234,16765,19916,-22776,-1070,-16651,29444,32658,-9554,-26577,7745,-3323,-18116,21592,-28807,-6874,-23747,10474,19174,-7282,20814,-19418,27813,-23600,-5188,-31021,-20267,-18511,-31157,-7561,8234,30576,19672,712,29817,12045,18285,3048,32492,-31144,-5064,-7540,-144,7171,-10291,-23681,30576,-9245,-26341,19030,12175,25466,28205,24287,28836,-5148,8704,-14051,-15762,-28580,19688,8380,1482,22311,-21286,-10656,-28527,14493,16171,14971,-9862,-6399,21226,7502,-8336,2816,26296,-2680,-2966,17133,29598,2847,-12138,15524,30062,13783,-25776,-11395,21776,26923,-16705,-31810,8359,30269,18139,-1280,-2917,-20631,-19751,-7695,10026,-2204,-13140,-15004,-3669,-22753,-3002,18360,-5617,-16664,25590,-23105,19897,7171,6358,-10761,4044,13706,-16026,-31946,24983,1435,27647,27723,28216,-7906,15727,-3803,13184,-23265,-26018,11652,-8417,30979,-9696,-26251,-3573,-12125,-15099,-30514,6242,16954,29219,18238,-31886,-2664,25488,3906,-25711,696,21016,6625,4666,-20174,22337,-32329,-5619,-28984,-23891,18480,-2582,18353,-8744,30464,-5999,-4668,17930,28027,12236,6464,-14392,12075,16354,-9712,-24260,19818,-11154,7796,20963,23665,-10747,-3504,-19355,-15216,-29981,2463,10172,-29516,10447,-11301,8417,22983,23149,18447,-20871,20333,28104,14708,14572,5278,-28696,23246,-11746,-14773,-30000,20505,83,-16642,12288,25792,-30067,10571,2247,-12227,7659,19199,-18938,12848,31540,-667,-21243,11739,-1413,-18110,-28472,6432,-29185,-18647,6791,18630,14116,-21437,-19850,-16589,1679,-32102,12882,-17274,-646,-5305,17151,6859,11047,11734,-21238,11207,-12247,8096,-2710,31155,-6205,-15963,5349,17863,18757,-25522,-4918,-8262,27617,-20637,-7470,-2675,-24002,14865,20172,-24080,20102,-31594,-28891,-26913,-15264,-12038,-27072,30278,-29546,-18086,13434,-1566,12560,14735,16351,8519,-32062,-25080,28784,425,14836,15426,30851,11614,-14371,-21190,20279,32673,13454,-4836,-23177,19377,-22187,-23881,2681,809,10167,26347,-8674,26791,-28277,-3856,-4976,-14538,8314,11451,-31665,20985,-29834,28210,12833,-8985,-24572,-31566,23222,23385,14878,19907,-23271,-5351,-26693,-5358,-12176,-8150,-27065,13293,14217,7100,31104,-6200,28317,11885,-9450,18941,28468,16408,29327,14480,-6921,-30911,-21335,-27231,22762,26628,-25651,18974,-28197,2843,-24057,6252,-1467,3323,-12042,-6459,-13593,32284,17694,12691,9240,-27600,-25608,12192,796,14298,-897,-884,1603,-21756,21253,-11285,-9562,-2855,-6891,-14457,32105,-10208,-11471,15713,22003,14162,11509,5185,27295,-15661,24247,19558,32111,-12502,28516,18022,-25404,29245,-8130,15710,7479,30548,-25493,-16070,9051,13283,-27507,25613,26114,22539,203,-17191,-14816,-31139,1260,8929,-11939,-8091,29076,-11433,11694,4147,6089,7804,-23298,25571,-807,8874,6162,20392,-12550,-107,30638,438,22308,-23999,-30671,-31970,-10555,16470,-31430,12469,-27951,-23065,6988,17950,30365,17669,325,13272,-18066,-19428,31189,17159,-6823,13387,-11435,18286,-26046,-21560,21044,15785,-13679,-21650,29455,1045,-4467,-12896,-10709,-31819,-24630,-21259,9092,-4557,15750,-3110,20760,-15222,17188,-12672,23755,-11268,13598,-32009,23527,3758,9507,-30133,-19775,32103,-7871,6546,-31652,32314,-8728,27643,-13818,22009,32517,27889,-4524,-30655,-10620,-20494,-29631,-22550,-30954,11881,3441,7897,6907,-30240,18991,28996,22714,27722,20554,17263,17667,16800,-24563,11632,30358,-7593,-779,-5706,19757,-32226,-25397,-26544,30823,24194,-12315,27887,-27850,10305,-31375,-11927,-436,22620,28746,13117,-16708,31905,-5256,-26980,-30656,-26684,-12054,-10903,11882,9780,-24871,-17954,-21730,22366,20468,22769,10401,-23370,-21476,-18517,32361,-1466,-21577,-11875,-30158,-11528,26084,9460,31813,15050,32145,-31596,6417,10952,5027,18729,17024,27549,9841,17860,-24347,-24631,27811,-15484,-4937,-32556,20014,18633,14859,16275,18540,1104,-20218,-23635,6147,-6021,-20369,24666,-17626,6985,3560,-6262,9685,-30785,-8005,27442,-16556,-10194,2196,-32359,11484,-10616,7986,3171,9155,-29116,10793,-20686,17666,-17443,-18751,-6254,-19871,17880,-29362,19479,8479,-6380,-9391,29879,-15114,7417,26521,29731,-13922,-23646,16065,17336,-29391,-2451,-8069,12075,27341,-18475,-2233,-30371,27618,-15141,-11502,-8785,9339,14156,-17021,-20926,21104,24382,16534,22564,188,-21426,-30364,30480,-24911,-8056,25408,13432,2146,-30622,-27078,-9785,2277,-6841,-20905,-28417,-31106,-21476,-5857,-5035,-19986,-1346,-25681,14177,-26799,-16348,-16337,-11238,21914,25608,6401,24357,-10396,16308,11440,24585,-5280,-8102,7780,-4033,2783,9502,25861,-20260,-19129,14308,-24551,1643,-6733,-24941,-2652,28075,29508,-9439,27185,-14161,1356,-2307,-18271,7266,-21200,-11141,-16389,1236,-25494,-2832,3429,5258,-26080,-15724,-32155,-20683,-7445,17562,6443,23706,19799,18994,-25344,32653,8506,30399,-23958,-3928,-22547,-11425,-10172,-19114,3000,22104,-21894,-28551,-24439,2174,-8475,-5024,5078,453,-98,-18163,8594,-17318,-4883,-934,-7351,23542,-13046,-21495,31971,30688,-23717,-1747,10048,14651,19901,-29968,-31204,29144,5993,-16465,19504,6275,-22290,28626,28021,-19407,15394,-11824,-9703,5369,13271,-1054,14452,3629,32010,-22178,-3425,11885,-7319,92,2461,14355,-30285,-9038,-19608,17816,611,30572,-6232,-10396,-32504,32055,20729,25911,-29773,5436,-11171,18973,-6180,-22490,31681,-11080,9450,13972,22355,-30233,13103,22850,14318,-25594,13607,8150,-21589,-13771,29677,4877,-29171,15262,-9025,-19461,-27468,-9553,9043,17951,16161,13548,-494,27435,-29132,32397,28797,30930,-22025,13345,-7988,25819,25023,20133,21211,31310,-4779,-20014,-3866,3924,-767,-28011,-17067,32341,-7550,-28432,27961,14433,-30256,3402,7752,-15388,29207,3689,2213,-32304,-31269,1926,-18711,-3206,14623,28630,28887,9908,-14380,-9967,32757,8433,-13975,4332,-14780,3693,30386,3841,5164,-8099,-10776,2763,32674,-19246,28688,10374,-25597,7337,7773,-15502,21018,16361,9698,6595,25064,-22563,5254,4273,-23018,-14251,-2752,8421,31709,-28666,-14239,3241,24031,-4908,-31268,19212,22584,-3637,-21606,3777,5103,24296,18687,1534,31765,-10619,-27483,686,-20949,-30625,709,22040,-31808,-3681,-26314,-4206,-2276,-30705,-5608,-10609,-29991,16408,-4345,19738,-16245,14343,4458,-2666,5685,-3348,13881,-12128,14115,-668,-28511,21830,-17036,-22471,22202,20429,-13074,2425,31950,-22502,-28033,-15034,7522,-26150,-29564,-28751,7932,-19492,30097,9557,28437,-23859,25599,8328,6246,764,-18213,12437,-2340,-13471,15611,24264,-6869,9376,27710,-27928,3512,-14866,-8556,-16927,17336,-11899,-12600,-16077,-15435,-25284,32214,-5951,-2015,26491,-29695,30425,-29269,19916,27306,25695,31398,-5091,30354,16022,-7841,-22425,9955,30514,18862,9728,20763,20833,30434,-21711,-6715,-15847,-29820,1634,-11342,8223,-2824,-21410,-6143,8717,14367,-20509,25706,-25962,-11302,-23316,-21086,-12174,23790,-18474,28157,29836,-22810,12371,-21411,-1955,-27397,-25472,-29284,-16553,3390,16436,21866,4825,-30105,11137,7832,-18336,-6298,19058,-549,-32116,-14057,11189,3366,29039,-3317,23424,27997,-20904,28914,15623,-23483,-22717,103,-31612,-4715,6291,27432,5130,14723,-559,23781,-9516,-1285,-12085,-9378,-8052,12756,20713,3054,22542,-21817,-26373,-26004,30621,23147,-12525,10750,-3253,2771,28399,25988,18403,8638,19659,-13528,-11897,-20883,7990,-11600,-4539,-3785,-21741,1572,9093,10685,-27956,10159,12789,-20111,-23601,19282,-21595,-17529,-3727,3785,5220,28170,-32143,-30251,28401,-3671,-22946,-25406,13677,2906,27094,-748,15874,2075,32,-27055,-26531,-26016,-31441,31044,-13471,25033,-2364,24752,-26927,-20619,-17378,-1330,-12799,-16671,-25719,1116,4789,22614,-3266,31071,-22551,-20141,19639,1405,31947,-9177,-642,-14957,14356,20237,28188,-7455,-1939,-25407,31979,-1278,7939,-31703,-6165,-25884,30323,-9050,28288,-25998,-22320,-13924,8009,5003,-15583,25810,-12456,-24672,-23584,-31832,3220,-5563,-7623,-15394,-21920,18490,-21564,23675,20098,23905,-2972,24229,17585,2223,20316,6247,-25749,12770,4227,-32600,8560,-29455,-31678,-29523,1909,9432,19858,-19739,30012,22679,-6131,-13262,-18463,11522,1435,-27818,15444,-25626,7484,8044,-15523,11508,29192,-7117,-7386,-20074,28189,28059,18946,17938,-8719,31868,-30798,-26438,25155,-13092,10374,-13478,15496,-6596,-8608,28272,-23332,-31271,-8392,-27429,29247,9360,-5651,22829,-18105,2151,-18636,11595,24935,-3742,12987,-6547,-27320,-15083,22561,-8447,29449,32446,-17218,-18030,2141,-7899,-5035,23350,-128,-16043,7689,-29200,-16369,-28949,-11759,-16254,8851,8874,2351,-17537,-26747,24343,9097,-21724,-4499,13336,21027,28789,-27641,-27782,-6320,-4238,12587,-26382,6876,-16523,22659,-14144,15135,22688,-11728,-22212,786,6901,-23166,8512,-5936,-661,14712,-29638,-6944,19660,2693,-31701,6910,12748,17210,20737,9496,22980,1478,-14112,31932,-29049,-4104,28213,12136,-20494,-7289,-3585,-24922,26292,-17474,-28797,18480,-14147,28837,-17108,18886,6210,26170,-22764,-12016,-24054,12823,-32196,31723,-20667,6719,-9533,-8017,-6864,27461,-3186,-22565,20317,30064,-20930,27945,19018,-19247,27939,-6755,-23024,-8279,30368,-18292,4485,23559,1930,26735,-15629,1978,23267,-26654,194,27194,21644,20111,3088,23012,-25189,5141,879,32340,-30553,-29612,-30426,2,-27210,17801,-28855,31035,17584,-12260,17029,-29752,14451,30121,-22130,3871,-26883,-14990,27694,29946,22164,-32341,2040,-6493,7972,26673,20094,-15677,-27554,4549,13171,-24572,-26431,180,-31089,13748,-3011,13314,12456,-29919,24646,22640,-12094,28869,-13274,1727,-20398,-31453,17739,25400,11158,31501,19663,-29543,30662,30275,-23192,29481,-22685,10345,-8003,-5303,-16918,18966,-20504,-5857,3936,-4543,1645,-10114,18245,-32068,32485,-13759,-8005,-19642,-7001,13678,-2578,-23513,3944,-23996,-28843,10800,31945,2672,-17680,9730,24909,31719,-579,17020,-3650,-30200,22683,26904,19484,31223,17568,32310,12565,-2383,-10476,4059,-2117,-3009,427,28425,-16436,-25393,31137,26517,15286,-6504,25026,5991,29920,-8370,3141,10484,25537,-29153,4078,10782,-27581,20534,11989,8008,-3426,11027,3146,-26526,-13950,-21653,-15788,-29154,3708,752,28489,16847,11557,-18871,2355,30395,-31371,-25364,-24778,10294,14691,-20460,-1946,370,9199,28389,10486,-27165,5271,3954,7975,-23870,31421,-30953,-31442,-2784,-21878,5842,25523,-6518,-14902,-22143,11127,11592,28764,26354,9270,8018,19094,16946,-2238,-25878,-14747,-8106,27038,-3467,1459,12760,21563,-5518,-32120,1732,2206,1070,32676,17232,-7460,26446,24314,-13627,18679,-20027,-19405,17270,-18736,16144,21069,27035,23890,-25032,-15739,3436,-21048,5544,22731,18357,-21283,-13073,22375,-18259,638,-16298,25756,-28197,23411,-19124,10372,-1835,28058,29360,-24429,14682,-6680,10215,29161,28657,-20742,7529,-8588,1218,1084,-7700,12912,11643,12445,5752,27263,-10155,4827,11284,-31294,4687,-20819,3247,10621,-10129,15609,23167,-917,16908,19933,12153,30513,7653,3579,-32209,-14653,-235,-10256,-26452,3277,-5212,-24478,17366,-552,24204,-31080,14938,-14856,-15127,20263,19155,3829,9695,26554,25462,7399,-30554,-28129,768,10202,-7787,9002,-23764,30414,22961,31911,31161,-27435,-28515,-26261,14584,11148,-28936,145,10195,-7602,-3801,19410,-19513,-7211,-13229,-9458,-16536,15944,-1195,31829,-27548,-25856,-29036,17833,26970,-32479,23354,-22207,-8607,-29975,-6556,-11675,11624,-6972,-20124,-13965,-7829,13450,12771,-15052,6547,6651,-15908,-11024,24451,27499,-6674,3860,-30469,20041,18186,-16284,-13870,8287,-6209,2593,11274,-4521,18209,7355,12311,-15242,1954,31372,-31919,-4758,-10198,1573,-21152,-17858,12808,-30264,22614,-4487,-5966,-4563,28301,23925,-18807,-7285,29148,22623,-20068,3518,27575,-12559,-3080,-10897,-28286,10107,10884,956,-32057,1263,-16761,18402,2700,-32697,24256,-2020,31925,6396,-17490,20956,-30268,21080,24320,-17842,-21066,23461,-16064,-27583,-9526,31923,1338,-4510,30433,-9897,10405,-19898,-26872,-11522,19960,1278,5585,7824,15602,20662,6767,8847,-1883,3883,-14989,479,-27317,-28593,22524,-27524,-14932,27040,18800,28687,3047,-382,25281,-6327,32296,26129,29924,-26854,-1328,-24835,-26926,-18062,-11118,29555,-3213,1620,-32428,22311,-26177,-26723,31247,-2664,2678,14399,-5957,2198,-11217,29319,-6367,-3817,-8261,-17676,-23851,29149,18778,19425,-21046,20559,-8449,-2066,-9023,-2095,-19613,-27218,-14489,-12293,23373,27207,28568,14523,15889,31813,9205,19851,-29960,-11610,-6082,5088,20004,-8888,10675,-3204,-23604,12922,-13132,-30329,-19382,-15065,-2381,22431,-23802,-13068,463,10529,2241,-453,-13279,-19187,21876,29120,-155,-27085,-18402,-4154,31260,-12616,17481,4746,26542,1296,10629,4531,11746,1009,-20891,8396,8989,-19631,22880,-6636,-15316,8928,-27648,-19986,17309,1725,29708,-13748,-31379,24560,-23786,15743,19906,1025,4776,-12328,-452,12326,-16001,13591,4699,-16067,30406,-27949,-28125,-2395,-18177,1508,-8818,-8265,-5775,10981,-29006,-30196,20036,-32198,-28730,-10848,8564,20980,-27279,4466,18153,-3711,-25220,-20180,-29091,3972,21067,20196,9100,-22312,-12794,11126,-15341,-7260,-4670,21887,31422,-17615,26563,-10981,-317,-2457,15272,-30664,26679,-2165,20756,-2300,30388,-19830,-9444,-22504,-9201,-30082,24404,-1795,853,7678,14564,19809,-7402,-20912,-6781,-14684,-21476,-24750,-9156,13938,-30333,3077,27996,7560,10966,4061,-6836,-20304,-29205,-26273,27865,-7242,-14440,11885,-13970,19246,-31672,-1789,-21658,-4444,11740,19274,-20065,27620,26607,28939,-17316,29422,-13834,-32088,-29038,-16422,28263,-20326,-19720,19651,-12382,18552,-31538,2726,579,27277,11743,-20397,-6616,3879,4033,19942,-30296,-21050,-9019,-15228,1947,-25142,18487,30863,-17882,-18130,28470,27256,-29358,4191,9537,-28557,26253,13800,15243,-17773,-30552,5085,-20684,24538,11461,-8358,12742,-16595,-21989,-5967,-10521,24614,12787,-27151,8654,-31921,28120,23823,-24967,-25878,11363,-10096,-22508,-26156,-6975,-25055,13976,-18642,-17960,-23329,-16733,-27538,22202,25071,-19985,12834,18534,15718,-6739,-574,-31578,-29864,-27912,-632,23433,31358,-15075,27873,-21377,15243,29511,6470,2829,29005,14708,25466,-16341,9207,-9908,125,-7683,-4129,-5430,-20385,16038,-1158,-31803,28686,-12811,-29718,-2885,29289,-16952,-31525,-11990,-15384,8526,-28095,-27140,5941,27445,19821,22567,-30079,-19549,14063,28708,20128,1126,-23066,-20067,4419,13359,23525,16139,19567,-2044,-12244,-1195,-30176,17189,-4815,3650,20070,-4894,5236,24117,9163,10375,-20006,-16110,18916,18089,19590,-24332,10888,-2803,7440,6788,-29605,26147,-18683,-19004,22046,11736,-28980,-6059,12745,-27382,16623,-24063,8766,2957,-22607,-16489,21442,-6600,-12751,32294,-21544,-10474,26191,-30088,-13056,-16790,-876,-27889,11548,540,28139,14678,-14365,8926,19336,7958,1025,-29013,-9108,-30686,21595,-23266,23714,31756,32705,-30441,15790,-24450,-6202,-15917,-9965,-13494,-9792,-10808,-13929,1526,-23621,26218,-6780,-2075,-24682,7897,-23335,22133,-13843,-18052,2012,26512,-19965,29192,5900,-7203,30321,-7330,-16760,-1402,-19907,-1471,17676,-26293,-12134,23360,-14827,14776,7461,19303,32629,-8149,-26183,29246,-30384,-25138,22872,-16023,-850,-20732,-28935,-30225,12279,11714,-29209,-13369,-29450,-9525,17136,21899,-1240,-10520,-9144,-9959,-16870,13354,467,27238,-31774,-14924,29234,18470,1869,-29187,-22385,-10856,-10041,-1799,-7781,-15653,30282,8037,30798,-14466,-23649,-19479,28154,-17583,25438,-3552,-29343,15933,10354,13130,-27535,-13984,6627,4958,-22365,11425,-4453,2238,14406,27039,14676,-14989,-14727,-22101,-28538,27314,14102,-24333,-18105,18801,-9138,-14673,19855,-32350,19572,13778,-19770,15616,10890,-17929,-4916,-28221,-22817,16857,14524,-28029,-17220,-2874,-25446,25514,20831,-29420,2532,22765,-24929,25882,26678,-16216,23236,28049,-21573,-10699,-16426,-18487,30981,26536,-17458,17471,1981,3057,29573,27439,23516,-27802,-10980,30253,17225,10174,-21301,11260,-9547,19520,6813,-12757,5678,-3031,5066,-6325,-10006,-21580,28920,-18789,16338,-6915,-7920,-32625,-21249,5672,24022,15080,-1225,-25312,14840,-30468,-19178,-30014,-18342,18884,26391,-29973,25704,19690,-22507,11766,-7969,20232,-9526,19519,-7597,30292,-10343,27292,29028,5837,-25459,31217,-32659,-10798,-19235,9359,-24179,-32631,724,-5424,-26961,-2794,8730,-11257,-712,8511,12673,21903,-405,-9409,14450,-20853,14080,-637,25793,10520,22433,-25146,15661,27646,-8205,-10977,10425,-5012,-23330,4023,8538,26153,21285,27056,15379,7844,-16548,15224,14012,-23997,-26541,3501,-14377,-24660,-12164,-11850,-2909,21367,4369,-9837,-31759,1871,-16752,7296,9683,32679,-173,2976,988,15773,-28003,-32078,13021,-10368,-21819,-698,-9125,5874,32348,-6303,-8364,-12859,-22469,-9147,-12330,32494,-4269,20696,-30633,12207,11230,-28378,-26705,-12595,-8817,-4361,11529,634,7208,27131,-25885,-13654,23622,21739,-8777,-30774,8701,2967,-16807,-27105,4192,-32183,22870,10522,31523,-30111,-32588,29454,11970,13466,19662,3829,12262,-31298,-24410,-20512,31894,3473,6173,2313,17383,12574,-24624,-19329,16982,-23239,7223,-6295,15623,28037,4505,24907,1634,-8120,25404,15376,-31115,29167,6442,-11624,22507,-8252,31030,16404,25525,-7206,-23654,-27227,32379,-12251,-28954,-3532,-12402,-674,-17341,-20612,-26716,-21337,-1498,-10838,-25623,1898,11837,14576,30862,18402,17806,-1745,20706,-14923,2313,8069,17360,-5672,18732,-17738,-19580,-9697,16550,-28572,-7869,-1674,19132,-7730,2902,-15818,30945,25548,14709,-21908,3353,8135,-18784,14868,-25678,9060,17103,32576,14453,22226,14670,-28362,27217,-2843,-5603,-15094,32283,12456,31862,1778,-3293,27303,-247,4210,-13813,11450,-8638,-3755,6115,1436,-30035,26586,28119,-3097,3195,-28202,12823,-4902,18829,-4315,16430,31557,11611,-27915,17506,-18859,30295,20255,24834,-20736,-17055,23389,-233,-7003,-6427,-17484,3267,25693,-17377,-26688,31596,-5858,-2166,-3424,-29305,19893,-27574,267,22924,-31906,22483,25101,-18366,12877,14653,-18683,11880,-30775,4073,23238,-24411,-11089,-907,-3242,-13819,31276,-30742,27306,31202,13808,-18721,20502,15104,-18315,6058,31300,-12412,24683,-20930,-12324,30467,-23565,-2391,-24561,20540,24946,32316,23940,-21560,31967,16038,27683,-3264,7734,-7802,-20946,-17127,13985,-21762,29590,-1284,-17581,-26220,-4655,-12272,23317,-5316,-26903,-12646,-11434,3973,-5209,10105,23655,-25110,-6598,25349,-4558,-15237,-11066,8619,-19238,-8649,5858,23147,-18166,-23220,15456,11010,28557,-550,7217,28135,29798,-17649,14931,-30942,19743,31645,-22857,4066,-27203,-9028,24635,31751,-9126,17485,-20203,1059,5538,13988,25298,32517,23646,8130,-22977,-22046,6495,2885,-14876,-18954,-27872,28342,-8401,-22038,10956,-32128,6996,27365,353,-24371,27689,-22566,-1872,10762,-13807,-32411,26505,-919,29517,-5569,1109,1525,-536,25325,4751,26245,-13453,301,-18410,6799,-24585,30856,-31282,16454,7040,18945,21622,-13420,-1142,9181,-28166,-5940,4608,-16237,29783,2606,29931,15247,-19857,22965,-22391,28827,-7816,10389,27862,-16000,14251,10114,3615,-17704,-31047,19082,21098,26715,19370,-12274,12210,23837,-21435,14275,6788,-13369,-8045,-16160,-17878,-1331,15786,17242,-21277,6758,28901,-27428,31565,22502,29640,-32690,30695,23346,21263,19639,-16834,4194,9616,2305,29976,29731,-1331,-26748,-4397,2507,20302,19940,-28951,16862,-19963,-17248,-3076,15993,15981,-7915,-276,-13313,4062,-3318,4649,7808,-23456,-8387,-1679,-22025,28260,-2099,-6418,7956,-24126,-25379,-8515,4580,-31153,4853,21066,14003,-12346,-31946,-17596,-828,-24405,-12379,16449,18699,-18358,-10299,-9772,31176,-22043,23924,-5988,27539,-17,1362,3602,30102,-27271,14893,-7293,792,282,-20582,30577,14895,-20886,-27473,2696,14881,-3474,14929,29950,29328,23838,-23235,3980,10661,3922,14016,18714,-4783,6301,30175,-19267,-17722,2040,-22060,-13389,-8784,14889,-25492,26564,-18545,28589,-23426,-2988,-28813,-2689,19298,21379,-25423,13530,1797,-8900,-1706,20768,17023,-11184,-11389,-14256,-25771,-14154,15605,-23672,32280,18156,29043,-21354,24767,-10850,-901,25627,5945,28570,22225,-4828,-20792,30012,-9504,-711,1663,-17612,-8469,-16021,-19853,-29105,13895,20257,24855,-22486,-25475,-31821,2871,8733,6283,-6094,-10125,-28272,-2689,14886,6984,12134,28204,-31143,-24229,29392,-5760,-15660,22338,-29628,28910,-26584,-19589,30807,-15625,-14043,-8038,-32702,-23833,20635,-17900,-618,-16935,24147,-1955,84,32118,-7169,11060,-2423,28969,-30879,-18101,-1977,6442,29907,31274,16160,29584,13361,-5641,-29196,28178,-17242,-12402,-7903,3198,1257,5870,-19826,-27387,4287,3536,-6651,-20670,-29888,-19696,32455,-1458,32165,-26613,8921,-26071,-21758,2609,-22463,24265,-17641,-21411,-25983,19085,-32253,-11855,-1435,-30877,-10108,19095,-27299,13683,12495,-3190,30668,-28238,-15496,-16377,-9291,20352,26829,-30512,24786,-12760,-27500,-3684,-30842,-23414,-1454,8762,18637,-28679,-16334,-16474,-4770,25718,-5180,-11814,29178,-13149,-23897,16571,14844,3444,-19726,29639,-31160,26193,30488,-13080,-11969,-18185,-357,24862,10432,8891,-28170,9950,23541,-5850,-17714,-804,-22130,19458,-17029,-28930,7660,19228,17882,11141,-13030,14842,14400,19893,9920,13240,25361,15796,-9677,-10458,32454,-31672,7199,-24420,2195,-8196,-17776,-29408,-24749,10722,-10028,-2878,17241,14889,-5692,-11544,22001,6944,-25877,-22741,4228,18597,14762,-30175,-19938,-2325,-9327,-9860,-3873,8159,21210,-15664,-26113,-18325,15440,-10090,-26354,-8878,-1982,22961,-8834,1697,2355,18785,31181,-4736,-20263,10795,-13467,-7232,-22507,18199,11192,15257,23989,16113,-17047,-860,22146,17660,-25573,5495,19544,8383,23101,11838,-24247,-24709,-31901,23199,-15501,27412,-15843,1366,14349,-10729,5814,25145,13112,-1249,-19467,540,-26692,-10745,13625,16445,24165,10980,18544,14551,32480,-1667,24814,23569,-26731,634,6945,-30166,-4485,-19432,-1061,-15754,-10897,25315,31789,-23875,-21793,-10578,-19878,-28341,26752,-13552,-14017,29705,-15759,-24723,19667,31406,-21215,-25968,14120,-21226,-29891,3613,1095,-31904,-22390,-19351,15164,23954,-30092,2722,29950,8662,17219,-8714,17555,-25432,22505,14232,-29141,-6088,16990,-374,-30605,-19751,13099,2804,-16592,-3737,8301,-29357,2344,-9214,-13534,23232,619,-15241,-14461,-15383,-23279,29768,25685,-27381,11489,16363,-13085,-30043,24488,-5377,29233,27113,23434,4702,22127,-3669,27548,8845,-14596,1773,30083,1922,243,-13868,14378,-19751,-18645,8273,30351,-25315,27891,-4549,-23640,10528,-10676,-8543,17926,-9629,28423,-1740,19281,14934,8771,19581,1548,22394,-32137,-9335,-17296,-8994,-4113,-17671,-21154,-27296,-10976,16031,411,-19124,-673,18694,-28261,-28368,-15181,-13365,-18039,-15391,12630,-24559,-2974,-26093,-12058,-17008,-6228,29254,6700,-4060,27902,-13786,-17522,23930,-21820,-13972,18794,-21956,30678,-3028,-32648,8829,-9269,-26869,-25210,2405,24610,-13723,-14324,7383,8103,-18344,1892,-2315,21896,3521 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice27.tflite b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice27.tflite new file mode 100644 index 0000000..def590a Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice27.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice27_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice27_golden_int16.csv new file mode 100644 index 0000000..c1b736d --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice27_golden_int16.csv @@ -0,0 +1 @@ +8229,22741,17335,-25743,-12240,-17765,5038,23890,-525,-8593,-31216,6255,-31838,10835,-21089,21905,13987,-20550,-27378,17686,-3236,-3906,-22329,-3392,-22543,6309,-27494,25849,9941,28995,-7996,3026,-6048,17681,-1537,4815,18640,-4793,16810,-4876,-9381,230,1087,12375,4469,-4177,21942,-15387,-21631,8372,-10023,32615,1662,-5859,10471,25990,-3841,19631,31591,20616,7408,16475,19603,27795,21372,26639,3054,21468,-28944,-11057,3861,3874,18819,31443,-15011,18072,2212,8692,-2216,851,6022,-10525,-31625,-20533,-18288,5082,-31536,-9183,2207,23850,29849,32514,16323,26188,-27876,-469,-13108,-5425,-7768,32572,28431,18760,-21812,28581,-22784,27202,-31538,2434,-2903,-6838,-24009,700,12832,-8955,-14462,-16681,-13553,17200,29741,-19399,-5863,-26383,6049,-18704,-27890,2199,-16339,17031,-22186,-19580,-10456,-23422,-557,-2892,3303,21516,3323,-8339,-12458,17612,18035,-2735,-3078,-484,-4695,-10226,9349,20616,5312,12311,557,9941,-21661,17591,-3516,-7102,-16426,3926,-845,28599,32702,8824,27434,-31771,6622,-2431,31311,25687,-12769,2460,2502,32230,20712,26090,-10491,23443,-3095,-10898,22792,-29005,6879,19300,-28108,7060,13697,-16019,13815,-21871,9838,-11951,1055,12756,30132,17734,-8900,-26572,-26208,-11879,24116,-31583,-16294,31496,20150,22825,4060,24074,-19597,25896,27814,16634,19248,11829,-12851,-9780,22400,11014,-14212,1700,22366,31120,-12302,-23089,-9173,30368,9518,-27890,4540,-15960,-967,-24678,-23495,-26717,-6228,-29177,-21547,-13754,-24925,3954,24399,172,22258,-13084,-12316,24416,821,-15253,14284,-11943,-26478,19214,-27153,28321,18360,11842,5839,27707,13548,14729,3161,9851,-2287,31608,-30332,-24086,2699,9832,913,-32617,21761,-10774,7505,-21047,-12296,15264,-2304,-31585,5639,-18029,16520,20281,3657,-14721,-16711,12939,25674,-14689,-2439,-9106,7713,15661,12743,-4132,26983,-10594,-5238,-14180,20544,-1853,-3242,-17512,-11927,-16975,18829,-18059,14181,-28677,17309,1051,4392,7692,-28963,-4560,-12597,-6255,-23000,-29720,28053,32253,-15596,-23895,-26327,-27845,-20618,-17747,-6913,18589,16692,-30749,6187,29657,21725,5462,26581,-18439,-30405,23784,11311,-10446,-20925,24185,-26519,-27694,-14862,9385,32190,-25787,14269,-16950,31979,23053,-309,-7066,8405,-16903,-22843,-19149,8486,-15944,17604,11955,-14415,22971,-25654,-11893,18483,-15254,27112,-27719,-4767,19619,-18791,-19978,-13234,-21686,-30334,-9267,-24720,-28649,-20450,-3062,-3028,29042,20001,23962,11299,-17928,22453,-14330,-7736,-19953,30255,19644,-30536,7136,-11499,-27297,7946,15758,-32738,20620,-796,-3108,-32321,5134,-18067,24110,-7729,-9899,-21594,-22131,-17791,906,20937,5266,-28894,-9062,-3917,-1442,20506,28474,30515,-23569,-8432,-31070,-18444,-13377,21215,-21923,6160,-20613,3088,-23294,32553,-7369,-20882,-24666,-21421,-9848,5230,28370,-12028,22041,-32561,-22965,-25549,5004,-9620,-24192,22511,25450,-21764,4428,28329,27503,-30020,26274,-24387,24454,4558,8160,26818,1708,-8706,20617,7233,-16770,3745,1602,-16433,29682,2426,32481,15551,6326,9451,27837,-30489,515,21317,24282,23059,10836,28723,20862,-15058,32465,-27462,-25130,22096,-10902,-25128,6665,24767,23400,20176,-22798,-20223,18717,12336,17081,-3610,-12421,18738,10162,-10807,-25655,-23850,-17282,-29905,25647,9898,14123,9036,24088,-4899,-20840,-571,-20351,-3211,7079,-15847,28817,1374,25292,-22367,18333,-7215,-22933,11371,-27555,20040,-24169,13031,-12299,-18867,3221,-22333,-9838,-32284,-27684,-18286,-25445,-20379,26276,-12284,10916,17951,-9592,-22235,-27744,8698,28423,-22070,7265,5088,30568,27425,-22000,-4722,-26328,-26483,13881,-23574,22617,-1030,-26980,-17520,-11551,1523,-19606,14154,30861,10552,25687,-1468,21940,-15627,-7521,-3208,-2329,30255,-141,-8240,6752,19713,-29566,10396,26643,14602,5597,-17481,-10547,16005,24998,-14619,-6516,-16972,-1967,15566,-15308,17780,-3530,6226,-20216,32161,-31318,-9007,-29482,30390,-25069,-7246,31203,-28003,18729,-19144,23451,10591,-9055,-9868,-11167,-13015,-27310,-9240,-16522,-24543,-12164,-1642,-12722,15416,-19234,-21551,-1311,161,6016,-25028,11679,28083,19354,-16357,-19457,-1174,23443,8474,-8881,7610,13327,18805,1677,2055,23467,-1776,24140,21322,-8285,-9000,28661,32661,-23549,-24139,-25631,18702,27980,-30333,-24548,-25143,-26730,-8416,-32714,13601,11013,-1549,5532,-2414,29150,25781,-20336,28217,-23390,-12343,-31095,31480,-19485,20369,-29700,16775,27913,-19259,-8799,-28463,5230,-30665,16178,-30169,5459,23042,18457,-26577,-23228,-28674,-31946,16456,25221,18856,27464,2581,-22865,-12689,4304,28060,-25502,329,21238,-7074,-23839,-5337,14722,21304,-23698,20731,9957,-7872,-11388,-5462,-73,10969,-610,1824,-9207,15395,11767,-19480,14465,6816,-14749,-31998,-19802,29034,-26042,-19962,-10321,-24901,25015,-21075,25740,9775,-25351,-1501,-19131,-23243,21924,32125,-1139,20142,-6737,-4649,16209,21107,-14188,-4211,22467,32018,-18877,-718,-8920,-15494,-22581,10763,7040,8965,25143,-29246,29436,-25014,15296,-2985,16656,-8577,-8240,-26608,-8345,-16046,7794,14656,28484,-28825,4230,25774,-21935,1561,30739,26852,-31686,4617,31224,26424,-823,-3845,-21600,26658,-29124,20469,5761,14753,-15076,3267,-16216,-5427,1063,9273,13321,11290,19206,-24146,-28699,-7828,10388,5477,12649,-7229,13622,-11952,-5739,-31472,-24132,-13521,-30441,-30107,-11644,2466,25698,-24155,-835,949,-6838,-12391,-1708,-12883,10120,-19783,-24219,-2871,-17684,29579,-317,16676,316,23760,-8074,31123,-13820,23742,-20225,-18059,16736,-14937,12234,-13889,29117,29207,-6808,8127,14349,-28417,22536,-13258,17118,-26752,-5393,28481,6294,9390,-32075,-17479,-20475,3486,-14351,27984,-28276,1558,-12870,5335,14100,1613,6284,-7155,-28567,-21360,-7883,-8373,28183,-3729,-30181,21783,-2612,8155,-20705,4717,-26238,-1321,11320,-22169,-28426,-27193,31148,-19891,29020,29424,8096,-2147,14036,-18405,-29778,-1487,-30753,30573,-16038,7376,28846,18128,-23429,-21911,3312,-17838,3585,-19315,-28291,15005,17084,-32622,-9912,-13221,1203,13348,2428,32726,17372,-13619,1509,13447,1136,22066,-25749,-22070,19934,23465,-6293,-32118,-19999,-8814,-31697,-28422,32239,-2995,-16414,-9074,-30833,-9914,2838,22668,22834,11898,15767,15161,-15601,-18815,-12711,18860,-26515,4499,9516,-24893,9451,-16138,-28814,-18594,1599,27969,3045,-15519,-30891,-19450,9398,27932,7550,-29225,-21060,13720,111,20685,-2354,-4832,-12417,-32663,15237,238,30161,-32192,30020,22747,-16862,17124,-10613,-13728,-16548,12108,7016,-7405,-10115,-18300,-2729,8584,16453,-17684,9465,10198,11506,9467,-23789,1247,-30295,4501,9800,-5983,25394,10684,2007,-5550,-14685,14715,28100,-14580,-24654,889,12963,6814,17084,-10015,-1015,7073,-27793,-26022,28863,-17558,-14516,-27511,21344,23748,-28295,-18226,19142,-16157,-5808,-23799,-30905,14919,13121,-5054,26818,17002,-9762,-31334,-3831,13258,26358,4542,-2909,3789,24247,-3999,-8589,-13355,-9689,-418,30440,30345,9245,-20663,21700,10817,7248,-29210,6096,-1638,-29952,-14955,-10494,-23851,-26244,-9084,-17753,-3472,-10270,19909,-12387,12322,-21101,-32575,18359,2351,-7199,14142,17844,-16521,19762,-12580,14092,7782,-26612,6375,-22681,31129,-3127,-4952,-18885,27982,18787,21501,28215,7888,27779,30832,21506,-32379,-14956,-32431,-3363,13136,14630,-18363,9585,7174,27733,28678,-4992,8627,21884,-8962,-9362,28715,-21240,23877,17854,20300,31504,-6578,-7538,28747,-2708,-15147,11687,-14078,17151,21806,5455,20836,-1427,12289,-1191,30379,6792,-18313,15218,20190,9092,-3399,-22006,-8253,-5117,-3091,21792,892,-15902,28360,27532,20656,10349,29448,23956,-16019,16622,10079,3066,24227,220,6925,6050,-22634,-4192,-16108,-14765,-11949,30499,-31447,-554,-7899,-5554,-6968,6131,17924,8531,30648,-10890,10819,-27632,12923,-13191,-28696,31795,-12628,-11136,25794,-3941,19341,4701,-6922,-6370,21941,16935,4384,-14818,6819,23153,2764,-187,28092,-19389,31737,-13892,1872,-28420,23376,2599,-15367,-12621,25519,-26795,15374,-14029,7375,2065,-1623,-15302,22636,7593,-20428,-20137,6334,14881,-2823,-9703,28058,-2806,3328,-24255,-6038,15404,-21281,21491,-285,27718,-29515,-30186,5638,17263,-889,-10302,-1935,-14980,-5030,-31839,2046,-624,-8146,-28434,4760,-10561,17083,24150,-3657,-24549,-22911,3367,-7617,-17784,11640,-2167,20366,5458,-22351,-29473,-11202,-20225,-3350,-3907,-23953,-20474,-10481,-18497,28266,-15446,1247,25609,-11563,20625,2026,22947,21321,17660,5601,-29729,18142,21437,-12309,11206,-22857,464,4439,18243,-18580,-3198,3360,19871,-11442,-12123,32317,5713,-16321,18033,9774,16019,16616,2721,9495,-20776,-14637,23099,-6626,28810,15695,25250,7810,-2802,-9127,-22433,32654,-4027,32017,-10795,10602,-25943,12901,1236,14814,-25015,31851,27688,24490,-4139,-20895,-15872,23189,-17803,25436,12156,-12811,20277,10116,12388,-2481,-8097,11489,-20543,25748,29929,12530,30329,16462,28715,10866,-18117,12666,32634,-13730,20688,-21945,-30290,32490,-1035,-23870,-26860,542,5247,31525,-24007,-25855,-18781,-20659,24273,-1258,-13553,29957,-3229,-26748,-22035,13131,22262,13125,-4583,31685,15995,-23743,-6799,-24101,911,-31025,19503,-5452,-9918,2939,8753,27815,-13664,-8,24303,8381,-8109,-19553,15486,-11207,16212,10907,28325,-3244,-16796,-7339,-25799,29215,-17454,4622,24093,-4998,-21577,-18406,30189,13140,7751,24164,-7990,-27880,5379,-17231,-24011,1995,21312,-4806,5662,8937,26395,13381,-12859,-16494,-10601,-23690,4526,8203,12615,-13061,-13573,8431,11889,30508,-14767,-23469,8763,-10529,-31616,22776,19521,-7606,-30782,6443,21143,-4061,21528,-7876,-23958,24007,5991,16787,32166,12764,2880,26127,-12862,-21087,-31329,9996,-32741,7304,5402,-26009,10536,1004,12078,-25616,26312,-24570,30927,3954,-21346,11209,-29840,-17264,-22784,-15885,-14770,-24793,20005,-26708,13585,-8364,27704,24824,21754,11440,-21335,-10299,3858,14575,-26228,-16321,-31791,-21240,-32526,-8726,31450,-19522,9240,7397,31426,-6104,-3333,27530,21108,-19381,29119,-27763,-25636,-9691,13527,20748,-14923,-10772,11590,26971,-23090,-31165,-15602,-22441,19760,15109,-18642,26256,2845,6480,16071,17652,5168,26285,-21056,-13042,10331,-23264,6531,-21813,24167,-18257,-7662,-482,-31407,2029,22200,10533,937,-30896,-16045,-2569,-20982,-11351,18284,2994,30644,-24900,-20993,31417,-9557,19224,29507,13588,-11171,5432,31735,-9335,-7881,22650,-28230,2835,19288,-2298,5696,27793,-23373,5365,-5657,-17206,18812,441,-16285,-14400,24607,-21126,-3683,-24607,29337,3381,-8689,29627,18337,29431,-5790,-559,-7876,20908,9865,1944,18623,-14633,25321,31574,30139,-13821,23042,-29345,-7139,879,-31896,6436,6350,-1576,-16365,12156,-2770,-18754,25871,-19825,5448,17219,-31112,9114,5827,5439,-28728,7705,25783,17544,21604,-28047,-18557,25283,-24067,-21884,-9897,-7032,-8696,16235,6973,2974,-19385,-21046,-23015,4208,27726,-3163,-15565,-7249,14927,-23955,2895,17410,-12487,28703,17787,-21049,-17738,-17803,-24947,-21951,-29658,2393,17359,23420,16014,-31420,-15958,29585,3611,-29904,-17139,-28474,-31812,-16084,-3750,1987,20246,24384,-1464,-15249,-13165,-20198,31447,-10283,-14055,78,8813,12218,-18487,-11493,27475,10628,-32610,-29899,19239,32325,-7756,15436,12136,-16366,11493,2132,17043,13818,-16846,3917,13782,-15332,-29686,-21908,-26700,19017,23116,1848,-2038,1807,-232,-9221,-11277,-23125,23280,-8696,-17579,11331,11437,-30761,-30533,-17892,10176,-18497,20503,-7076,25610,13825,15442,-14239,7229,4190,-31676,13812,-27212,17217,-13800,2677,4363,-5289,-4861,-6079,23509,-8729,-14270,-20621,-15381,-16451,-23164,-20323,-13973,-19422,-151,-29897,18293,9636,-23122,17366,-12577,9486,13287,-26306,-3663,-27948,-29028,-9373,23970,-19714,-16955,26168,-20793,-31644,22119,7164,-2596,22589,-2139,-19414,28723,20265,-10846,2540,5957,-9215,-32752,7039,23766,-17924,7436,-25266,-26352,-15775,-31797,-9461,2286,18984,842,-17617,-5938,-32222,-3702,-3086,22559,-22315,13150,-32124,15399,-19649,17587,-28963,13516,-24566,-17855,29285,-27654,-10249,-27531,-3476,-4848,22730,-70,-29803,-10203,-17760,4097,31265,-11044,13083,18390,-10585,-8488,3563,32299,23835,1871,-811,-12148,-14289,-13850,26799,4812,8937,25156,-10639,-9672,27750,13040,-31484,28088,-26054,283,-14012,-31127,-21234,-8864,2431,4935,-23666,30011,-3122,16332,-9967,2112,6273,-10584,-8935,22462,-22783,29027,30541,27776,-25250,-26544,18835,31721,20186,28250,9587,15946,-25939,-15532,17263,-30537,-7736,24255,18000,-8681,26386,26412,25387,12994,27527,21093,-11350,-23915,7433,-9497,-26801,16131,8149,10535,-2396,-4814,-1164,-24995,13046,-28239,-15326,-14989,-16157,20076,-85,28817,-13750,-3186,13226,5248,-32084,15270,10985,20041,20746,25700,12769,30798,26314,23982,-15295,-2798,-15003,27368,6127,11729,-21238,-27929,21321,4985,26492,-7673,-31351,-9467,-22887,-29006,-17038,4718,7548,13953,-3745,-16502,-5177,-1829,-4741,-5077,-6075,7245,420,-7256,32325,-15734,15942,16481,30158,26474,19995,-18350,-26535,-27696,-21073,2968,29868,-26206,-4748,-4287,9942,-16952,-30179,32262,-29768,31368,-16146,-27206,21342,27273,5344,19756,-2947,29487,-9682,-1970,-8443,6689,-4602,-11651,23992,15392,-20899,-16235,-2485,22899,-30705,-1407,-29250,-28528,-15018,-28519,765,23584,-31950,2385,32682,15938,715,-23841,-14759,-22489,-18466,23515,-18189,7525,2622,29603,19716,-10301,-18587,-31269,16854,-7807,833,-23290,7661,19991,-9650,-20719,-28907,2480,3138,9170,17314,31639,-22507,8453,-6156,6025,25617,31272,-12657,13861,-11738,20553,32310,15050,5151,6001,-6429,27698,-22854,17169,-28891,31857,-25559,3438,-10393,-27757,-2245,-26670,26827,16482,-8137,-3177,16508,27627,18943,-2947,21430,-24238,27773,-1660,-23994,-12945,24649,16261,-29020,-13258,-4410,-27749,13286,21628,32135,24276,-5250,-20368,-23507,12214,13393,15685,23990,29069,10454,21216,22087,27565,-31103,-19731,-758,9844,20026,9686,-18496,-13204,-10905,24100,-28795,11451,-25208,-23838,-24514,14146,-4538,-28140,-11956,30850,697,-24305,20040,-21680,-26713,-31220,22622,7541,-706,22468,-31109,20408,-21081,12348,-4307,26808,26266,5434,20616,-15074,-17517,-15767,-2908,15170,-31672,-19493,-17121,-23930,-28243,5148,24141,-30277,-28329,20252,8598,-3465,-11266,3292,25936,2772,-18411,-29118,-20361,-24,-16651,-1062,1056,26331,4978,-22977,-32115,32537,10262,-8457,31617,-3652,-22818,-20817,-8357,-14446,20842,-14613,10760,24637,-27793,9840,-9866,-14142,-26129,-26423,-24627,-6717,15307,-16500,-8097,-229,3319,-30219,7610,23550,23677,7778,-7314,-8925,-6494,20489,17563,-27391,-29811,20991,14770,-13438,1586,-9369,-27715,18973,-22965,10993,10077,7041,-17520,6674,-15855,-21552,-26760,29981,-10236,29182,11489,6639,19816,30345,-2339,-31559,-11964,-1333,-20319,30070,6286,-17203,-17251,-19689,-11159,-8635,16887,-19314,-14482,15754,-24983,-14142,-28785,30824,11412,-17725,10679,4484,-21599,-2664,-13496,20330,-14993,-11550,-28721,-1049,-13427,29172,-32729,7336,-22947,29527,21762,-1495,27268,-25099,11462,16753,-4515,-27076,15749,-29931,-20915,7002,5959,15636,-30797,-155,17956,-24971,-18314,11462,3220,25806,-17201,-10327,7091,4038,-3338,-7403,-30655,4444,4376,-7954,32649,12991,-19941,-22834,28927,-12597,1184,13289,14652,-20351,13636,5075,6518,-15325,7851,1056,10266,-5942,25007,-21085,-24397,9858,2041,6705,-23294,-15614,-13719,2209,-1250,7308,3573,-20372,20674,-31372,24023,25670,26474,-3402,-32280,-29717,-15365,-4109,-29237,7425,32266,11497,-30172,26586,20332,8425,-17941,-29231,19948,19967,12098,-27299,-22828,-8136,-24552,-13685,-12010,26880,26220,966,-6425,-23947,-11193,1943,7736,-17381,1622,25914,-13191,-18847,24791,-4999,2068,-10803,8128,-28964,32380,19128,30709,-6440,1535,-12471,14185,-23113,19228,-5665,2466,21650,32421,-29926,-25057,-9481,2056,-32114,-21698,8600,-19530,-18032,-5153,7530,-12970,-15455,4223,-12427,-19519,22515,30272,-12975,14942,-27188,-17059,12071,-7720,4878,13423,2062,12873,-29263,29869,-19776,30689,16244,-10603,30145,18991,1426,-2974,2583,-11712,7736,23308,-31039,32139,16273,-18362,-11560,-1741,-8237,-18021,24640,-31307,-14995,-26041,-30791,6982,6308,4569,-23781,29378,5447,-28257,-16588,-23282,2140,11817,24858,30085,17138,1785,22734,-7888,1857,3887,23639,1878,-10881,-9714,-10495,-2190,1272,30260,-3918,4581,22481,-8757,371,2927,-30037,10224,-16139,-20464,21431,-32422,17698,17960,-5740,-1914,1903,-19295,-26765,-18504,23415,24482,10064,-22925,-994,8054,-15274,-28614,-17659,-28466,164,2223,7309,-6592,-19220,-28765,17155,25931,-10701 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice27_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice27_input0_int16.csv new file mode 100644 index 0000000..804a2e3 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice27_input0_int16.csv @@ -0,0 +1 @@ +-18606,-27171,-9490,-1257,17685,-8776,-3938,-6993,-14195,-23024,-29704,-753,-986,-22007,8086,32257,8229,22741,17335,-25743,-12240,-17765,5038,23890,-525,-8593,-31216,6255,-31838,10835,-21089,21905,13987,-20550,-27378,17686,-3236,-3906,-22329,-3392,-22543,6309,-27494,25849,9941,28995,-7996,3026,-6048,17681,-1537,4815,18640,-4793,16810,-4876,-9381,230,1087,12375,4469,-4177,21942,-15387,-21631,8372,-10023,32615,1662,-5859,10471,25990,-3841,19631,31591,20616,7408,16475,19603,27795,21372,26639,3054,21468,-28944,-11057,3861,3874,18819,31443,-15011,18072,2212,8692,-2216,851,6022,-10525,-31625,-20533,-18288,5082,-31536,-9183,2207,23850,29849,32514,16323,26188,-27876,-469,-13108,-5425,-7768,32572,28431,18760,-21812,28581,-22784,27202,-31538,2434,-2903,-6838,-24009,700,12832,-8955,-14462,-16681,-13553,17200,29741,-19399,-5863,-26383,6049,-18704,-27890,2199,-16339,17031,-22186,-19580,-10456,-23422,-557,-2892,3303,21516,3323,-8339,-12458,17612,18035,-2735,-3078,-484,-4695,-10226,9349,20616,5312,12311,557,9941,-21661,17591,-3516,-7102,-16426,3926,-845,28599,32702,8824,27434,-31771,6622,-2431,31311,25687,-12769,2460,2502,32230,20712,26090,-10491,23443,-3095,-10898,22792,-29005,6879,19300,-28108,7060,13697,-16019,13815,-21871,9838,-11951,1055,12756,30132,17734,-8900,-26572,-26208,-11879,24116,-31583,-16294,31496,20150,22825,4060,24074,-19597,25896,27814,16634,19248,11829,-12851,-9780,22400,11014,-14212,1700,22366,31120,-12302,-23089,-9173,30368,9518,-27890,4540,-15960,-967,-24678,-23495,-26717,-6228,-29177,-21547,-13754,-24925,3954,24399,172,22258,-13084,-12316,24416,821,-15253,14284,-11943,-26478,19214,-27153,28321,18360,11842,5839,27707,13548,14729,3161,9851,-2287,31608,-30332,-24086,2699,9832,913,-32617,21761,-10774,7505,-21047,-12296,15264,-2304,-31585,5639,-18029,16520,20281,3657,-14721,-16711,12939,25674,-14689,-2439,-9106,7713,15661,12743,-4132,26983,-10594,-5238,-14180,20544,-1853,-3242,-17512,-11927,-16975,18829,-18059,14181,-28677,17309,1051,4392,7692,-28963,-4560,-12597,-6255,-23000,-29720,28053,32253,-15596,-23895,-26327,-27845,-20618,-17747,-6913,18589,16692,-30749,6187,29657,21725,5462,26581,-18439,-30405,23784,11311,-10446,-20925,24185,-26519,-27694,-14862,9385,32190,-25787,14269,-16950,31979,23053,-309,-7066,8405,-16903,-22843,-19149,8486,-15944,17604,11955,-14415,22971,-25654,-11893,18483,-15254,27112,-27719,-4767,19619,-18791,-19978,-13234,-21686,-30334,-9267,-24720,-28649,-20450,-3062,-3028,29042,20001,23962,11299,-17928,22453,-14330,-7736,-19953,30255,19644,-30536,7136,-11499,-27297,7946,15758,-32738,20620,-796,-3108,-32321,5134,-18067,24110,-7729,-9899,-21594,-22131,-17791,906,20937,5266,-28894,-9062,-3917,-1442,20506,28474,30515,-23569,-8432,-31070,-18444,-13377,21215,-21923,6160,-20613,3088,-23294,32553,-7369,-20882,-24666,-21421,-9848,5230,28370,-12028,22041,-32561,-22965,-25549,5004,-9620,-24192,22511,25450,-21764,4428,28329,27503,-30020,26274,-24387,24454,4558,8160,26818,1708,-8706,20617,7233,-16770,3745,1602,-16433,29682,2426,32481,15551,6326,9451,27837,-30489,515,21317,24282,23059,10836,28723,20862,-15058,32465,-27462,-25130,22096,-10902,-25128,6665,24767,23400,20176,-22798,-20223,18717,12336,17081,-3610,-12421,18738,10162,-10807,-25655,-23850,-17282,-29905,25647,9898,14123,9036,24088,-4899,-20840,-571,-20351,-3211,7079,-15847,28817,1374,25292,-22367,18333,-7215,-22933,11371,-27555,20040,-24169,13031,-12299,-18867,3221,-22333,-9838,-32284,-27684,-18286,-25445,-20379,26276,-12284,10916,17951,-9592,-22235,-27744,8698,28423,-22070,7265,5088,30568,27425,-22000,-4722,-26328,-26483,13881,-23574,22617,-1030,-26980,-17520,-11551,1523,-19606,14154,30861,10552,25687,-1468,21940,-15627,-7521,-3208,-2329,30255,-141,-8240,6752,19713,-29566,10396,26643,14602,5597,-17481,-10547,16005,24998,-14619,-6516,-16972,-1967,15566,-15308,17780,-3530,6226,-20216,32161,-31318,-9007,-29482,30390,-25069,-7246,31203,-28003,18729,-19144,23451,10591,-9055,-9868,-11167,-13015,-27310,-9240,-16522,-24543,-12164,-1642,-12722,15416,-19234,-21551,-1311,161,6016,-25028,11679,28083,19354,-16357,-19457,-1174,23443,8474,-8881,7610,13327,18805,1677,2055,23467,-1776,24140,21322,-8285,-9000,28661,32661,-23549,-24139,-25631,18702,27980,-30333,-24548,-25143,-26730,-8416,-32714,13601,11013,-1549,5532,-2414,29150,25781,-20336,28217,-23390,-12343,-31095,31480,-19485,20369,-29700,16775,27913,-19259,-8799,-28463,5230,-30665,16178,-30169,5459,23042,18457,-26577,-23228,-28674,-31946,16456,25221,18856,27464,2581,-22865,-12689,4304,28060,-25502,329,21238,-7074,-23839,-5337,14722,21304,-23698,20731,9957,-7872,-11388,-5462,-73,10969,-610,1824,-9207,15395,11767,-19480,14465,6816,-14749,-31998,-19802,29034,-26042,-19962,-10321,-24901,25015,-21075,25740,9775,-25351,-1501,-19131,-23243,21924,32125,-1139,20142,-6737,-4649,16209,21107,-14188,-4211,22467,32018,-18877,-718,-8920,-15494,-22581,10763,7040,8965,25143,-29246,29436,-25014,15296,-2985,16656,-8577,-8240,-26608,-8345,-16046,7794,14656,28484,-28825,4230,25774,-21935,1561,30739,26852,-31686,4617,31224,26424,-823,-3845,-21600,26658,-29124,20469,5761,14753,-15076,3267,-16216,-5427,1063,9273,13321,11290,19206,-24146,-28699,-7828,10388,5477,12649,-7229,13622,-11952,-5739,-31472,-24132,-13521,-30441,-30107,-11644,2466,25698,-24155,-835,949,-6838,-12391,-1708,-12883,10120,-19783,-24219,-2871,-17684,29579,-317,16676,316,23760,-8074,31123,-13820,23742,-20225,-18059,16736,-14937,12234,-13889,29117,29207,-6808,8127,14349,-28417,22536,-13258,17118,-26752,-5393,28481,6294,9390,-32075,-17479,-20475,3486,-14351,27984,-28276,1558,-12870,5335,14100,1613,6284,-7155,-28567,-21360,-7883,-8373,28183,-3729,-30181,21783,-2612,8155,-20705,4717,-26238,-1321,11320,-22169,-28426,-27193,31148,-19891,29020,29424,8096,-2147,14036,-18405,-29778,-1487,-30753,30573,-16038,7376,28846,18128,-23429,-21911,3312,-17838,3585,-19315,-28291,15005,17084,-32622,-9912,-13221,1203,13348,2428,32726,17372,-13619,1509,13447,1136,22066,-25749,-22070,19934,23465,-6293,-32118,-19999,-8814,-31697,-28422,32239,-2995,-16414,-9074,-30833,-9914,2838,22668,22834,11898,15767,15161,-15601,-18815,-12711,18860,-26515,4499,9516,-24893,9451,-16138,-28814,-18594,1599,27969,3045,-15519,-30891,-19450,9398,27932,7550,-29225,-21060,13720,111,20685,-2354,-4832,-12417,-32663,15237,238,30161,-32192,30020,22747,-16862,17124,-10613,-13728,-16548,12108,7016,-7405,-10115,-18300,-2729,8584,16453,-17684,9465,10198,11506,9467,-23789,1247,-30295,4501,9800,-5983,25394,10684,2007,-5550,-14685,14715,28100,-14580,-24654,889,12963,6814,17084,-10015,-1015,7073,-27793,-26022,28863,-17558,-14516,-27511,21344,23748,-28295,-18226,19142,-16157,-5808,-23799,-30905,14919,13121,-5054,26818,17002,-9762,-31334,-3831,13258,26358,4542,-2909,3789,24247,-3999,-8589,-13355,-9689,-418,30440,30345,9245,-20663,21700,10817,7248,-29210,6096,-1638,-29952,-14955,-10494,-23851,-26244,-9084,-17753,-3472,-10270,19909,-12387,12322,-21101,-32575,18359,2351,-7199,14142,17844,-16521,19762,-12580,14092,7782,-26612,6375,-22681,31129,-3127,-4952,-18885,27982,18787,21501,28215,7888,27779,30832,21506,-32379,-14956,-32431,-3363,13136,14630,-18363,9585,7174,27733,28678,-4992,8627,21884,-8962,-9362,28715,-21240,23877,17854,20300,31504,-6578,-7538,28747,-2708,-15147,11687,-14078,17151,21806,5455,20836,-1427,12289,-1191,30379,6792,-18313,15218,20190,9092,-3399,-22006,-8253,-5117,-3091,21792,892,-15902,28360,27532,20656,10349,29448,23956,-16019,16622,10079,3066,24227,220,6925,6050,-22634,-4192,-16108,-14765,-11949,30499,-31447,-554,-7899,-5554,-6968,6131,17924,8531,30648,-10890,10819,-27632,12923,-13191,-28696,31795,-12628,-11136,25794,-3941,19341,4701,-6922,-6370,21941,16935,4384,-14818,6819,23153,2764,-187,28092,-19389,31737,-13892,1872,-28420,23376,2599,-15367,-12621,25519,-26795,15374,-14029,7375,2065,-1623,-15302,22636,7593,-20428,-20137,6334,14881,-2823,-9703,28058,-2806,3328,-24255,-6038,15404,-21281,21491,-285,27718,-29515,-30186,5638,17263,-889,-10302,-1935,-14980,-5030,-31839,2046,-624,-8146,-28434,4760,-10561,17083,24150,-3657,-24549,28158,-19856,31618,24518,-8027,23184,20140,17899,13052,32405,1250,-32376,954,10486,20329,-24048,20333,-27009,-29164,-19316,-24773,8179,21867,32617,24197,-7272,-27627,7689,17563,-3242,16696,30301,-22911,3367,-7617,-17784,11640,-2167,20366,5458,-22351,-29473,-11202,-20225,-3350,-3907,-23953,-20474,-10481,-18497,28266,-15446,1247,25609,-11563,20625,2026,22947,21321,17660,5601,-29729,18142,21437,-12309,11206,-22857,464,4439,18243,-18580,-3198,3360,19871,-11442,-12123,32317,5713,-16321,18033,9774,16019,16616,2721,9495,-20776,-14637,23099,-6626,28810,15695,25250,7810,-2802,-9127,-22433,32654,-4027,32017,-10795,10602,-25943,12901,1236,14814,-25015,31851,27688,24490,-4139,-20895,-15872,23189,-17803,25436,12156,-12811,20277,10116,12388,-2481,-8097,11489,-20543,25748,29929,12530,30329,16462,28715,10866,-18117,12666,32634,-13730,20688,-21945,-30290,32490,-1035,-23870,-26860,542,5247,31525,-24007,-25855,-18781,-20659,24273,-1258,-13553,29957,-3229,-26748,-22035,13131,22262,13125,-4583,31685,15995,-23743,-6799,-24101,911,-31025,19503,-5452,-9918,2939,8753,27815,-13664,-8,24303,8381,-8109,-19553,15486,-11207,16212,10907,28325,-3244,-16796,-7339,-25799,29215,-17454,4622,24093,-4998,-21577,-18406,30189,13140,7751,24164,-7990,-27880,5379,-17231,-24011,1995,21312,-4806,5662,8937,26395,13381,-12859,-16494,-10601,-23690,4526,8203,12615,-13061,-13573,8431,11889,30508,-14767,-23469,8763,-10529,-31616,22776,19521,-7606,-30782,6443,21143,-4061,21528,-7876,-23958,24007,5991,16787,32166,12764,2880,26127,-12862,-21087,-31329,9996,-32741,7304,5402,-26009,10536,1004,12078,-25616,26312,-24570,30927,3954,-21346,11209,-29840,-17264,-22784,-15885,-14770,-24793,20005,-26708,13585,-8364,27704,24824,21754,11440,-21335,-10299,3858,14575,-26228,-16321,-31791,-21240,-32526,-8726,31450,-19522,9240,7397,31426,-6104,-3333,27530,21108,-19381,29119,-27763,-25636,-9691,13527,20748,-14923,-10772,11590,26971,-23090,-31165,-15602,-22441,19760,15109,-18642,26256,2845,6480,16071,17652,5168,26285,-21056,-13042,10331,-23264,6531,-21813,24167,-18257,-7662,-482,-31407,2029,22200,10533,937,-30896,-16045,-2569,-20982,-11351,18284,2994,30644,-24900,-20993,31417,-9557,19224,29507,13588,-11171,5432,31735,-9335,-7881,22650,-28230,2835,19288,-2298,5696,27793,-23373,5365,-5657,-17206,18812,441,-16285,-14400,24607,-21126,-3683,-24607,29337,3381,-8689,29627,18337,29431,-5790,-559,-7876,20908,9865,1944,18623,-14633,25321,31574,30139,-13821,23042,-29345,-7139,879,-31896,6436,6350,-1576,-16365,12156,-2770,-18754,25871,-19825,5448,17219,-31112,9114,5827,5439,-28728,7705,25783,17544,21604,-28047,-18557,25283,-24067,-21884,-9897,-7032,-8696,16235,6973,2974,-19385,-21046,-23015,4208,27726,-3163,-15565,-7249,14927,-23955,2895,17410,-12487,28703,17787,-21049,-17738,-17803,-24947,-21951,-29658,2393,17359,23420,16014,-31420,-15958,29585,3611,-29904,-17139,-28474,-31812,-16084,-3750,1987,20246,24384,-1464,-15249,-13165,-20198,31447,-10283,-14055,78,8813,12218,-18487,-11493,27475,10628,-32610,-29899,19239,32325,-7756,15436,12136,-16366,11493,2132,17043,13818,-16846,3917,13782,-15332,-29686,-21908,-26700,19017,23116,1848,-2038,1807,-232,-9221,-11277,-23125,23280,-8696,-17579,11331,11437,-30761,-30533,-17892,10176,-18497,20503,-7076,25610,13825,15442,-14239,7229,4190,-31676,13812,-27212,17217,-13800,2677,4363,-5289,-4861,-6079,23509,-8729,-14270,-20621,-15381,-16451,-23164,-20323,-13973,-19422,-151,-29897,18293,9636,-23122,17366,-12577,9486,13287,-26306,-3663,-27948,-29028,-9373,23970,-19714,-16955,26168,-20793,-31644,22119,7164,-2596,22589,-2139,-19414,28723,20265,-10846,2540,5957,-9215,-32752,7039,23766,-17924,7436,-25266,-26352,-15775,-31797,-9461,2286,18984,842,-17617,-5938,-32222,-3702,-3086,22559,-22315,13150,-32124,15399,-19649,17587,-28963,13516,-24566,-17855,29285,-27654,-10249,-27531,-3476,-4848,22730,-70,-29803,-10203,-17760,4097,31265,-11044,13083,18390,-10585,-8488,3563,32299,23835,1871,-811,-12148,-14289,-13850,26799,4812,8937,25156,-10639,-9672,27750,13040,-31484,28088,-26054,283,-14012,-31127,-21234,-8864,2431,4935,-23666,30011,-3122,16332,-9967,2112,6273,-10584,-8935,22462,-22783,29027,30541,27776,-25250,-26544,18835,31721,20186,28250,9587,15946,-25939,-15532,17263,-30537,-7736,24255,18000,-8681,26386,26412,25387,12994,27527,21093,-11350,-23915,7433,-9497,-26801,16131,8149,10535,-2396,-4814,-1164,-24995,13046,-28239,-15326,-14989,-16157,20076,-85,28817,-13750,-3186,13226,5248,-32084,15270,10985,20041,20746,25700,12769,30798,26314,23982,-15295,-2798,-15003,27368,6127,11729,-21238,-27929,21321,4985,26492,-7673,-31351,-9467,-22887,-29006,-17038,4718,7548,13953,-3745,-16502,-5177,-1829,-4741,-5077,-6075,7245,420,-7256,32325,-15734,15942,16481,30158,26474,19995,-18350,-26535,-27696,-21073,2968,29868,-26206,-4748,-4287,9942,-16952,-30179,32262,-29768,31368,-16146,-27206,21342,27273,5344,19756,-2947,29487,-9682,-1970,-8443,6689,-4602,-11651,23992,15392,-20899,-16235,-2485,22899,-30705,-1407,-29250,-28528,-15018,-28519,765,23584,-31950,2385,32682,15938,715,-23841,-14759,-22489,-18466,23515,-18189,7525,2622,29603,19716,-10301,-18587,-31269,16854,-7807,833,-23290,7661,19991,-9650,-20719,-28907,2480,3138,9170,17314,31639,-22507,8453,-6156,6025,25617,31272,-12657,13861,-11738,20553,32310,15050,5151,6001,-6429,27698,-22854,17169,-28891,31857,-25559,3438,-10393,-27757,-2245,-26670,26827,16482,-8137,-3177,16508,27627,18943,-2947,21430,-24238,27773,-1660,-23994,-12945,24649,16261,-29020,-13258,-4410,-27749,13286,21628,32135,24276,-5250,-20368,-23507,12214,13393,15685,23990,29069,10454,21216,22087,27565,-31103,-19731,-758,9844,20026,9686,-18496,-13204,-10905,24100,-28795,11451,-25208,-23838,-24514,14146,-4538,-28140,-11956,30850,697,-24305,20040,-21680,-26713,-31220,22622,7541,-706,22468,-31109,20408,-21081,12348,-4307,26808,26266,5434,20616,-15074,-17517,-15767,-2908,15170,-31672,-19493,-17121,-23930,-28243,5148,24141,-30277,-28329,20252,8598,-3465,-11266,3292,25936,2772,-18411,-29118,-20361,-24,-16651,-1062,1056,26331,4978,-22977,-32115,32537,10262,-8457,31617,-3652,-22818,-20817,-8357,-14446,20842,-14613,10760,24637,-27793,9840,-9866,-14142,-26129,-26423,-24627,-6717,15307,-16500,-8097,-229,3319,-30219,7610,23550,23677,7778,-7314,-8925,-6494,20489,17563,-27391,-29811,20991,14770,-13438,1586,-9369,-27715,18973,-22965,10993,10077,7041,-17520,6674,-15855,-21552,-26760,29981,-10236,29182,11489,6639,19816,30345,-2339,-31559,-11964,-1333,-20319,30070,6286,-17203,-17251,-19689,-11159,-8635,16887,-19314,-14482,15754,-24983,-14142,-28785,30824,11412,-17725,10679,4484,-21599,-2664,-13496,20330,-14993,-11550,-28721,-1049,-13427,29172,-32729,7336,-22947,29527,21762,-1495,27268,-25099,11462,16753,-4515,-27076,15749,-29931,-20915,7002,5959,15636,-30797,-155,17956,-24971,-18314,11462,3220,25806,-17201,-10327,7091,4038,-3338,-7403,-30655,4444,4376,-7954,32649,12991,-19941,-22834,28927,-12597,1184,13289,14652,-20351,13636,5075,6518,-15325,7851,1056,10266,-5942,25007,-21085,-24397,9858,2041,6705,-23294,-15614,-13719,2209,-1250,7308,3573,-20372,20674,-31372,24023,25670,26474,-3402,-32280,-29717,-15365,-4109,-29237,7425,32266,11497,-30172,26586,20332,8425,-17941,-29231,19948,19967,12098,-27299,-22828,-8136,-24552,-13685,-12010,26880,26220,966,-6425,-23947,-11193,1943,7736,-17381,1622,25914,-13191,-18847,24791,-4999,2068,-10803,8128,-28964,32380,19128,30709,-6440,1535,-12471,14185,-23113,19228,-5665,2466,21650,32421,-29926,-25057,-9481,2056,-32114,-21698,8600,-19530,-18032,-5153,7530,-12970,-15455,4223,-12427,-19519,22515,30272,-12975,14942,-27188,-17059,12071,-7720,4878,13423,2062,12873,-29263,29869,-19776,30689,16244,-10603,30145,18991,1426,-2974,2583,-11712,7736,23308,-31039,32139,16273,-18362,-11560,-1741,-8237,-18021,24640,-31307,-14995,-26041,-30791,6982,6308,4569,-23781,29378,5447,-28257,-16588,-23282,2140,11817,24858,30085,17138,1785,22734,-7888,1857,3887,23639,1878,-10881,-9714,-10495,-2190,1272,30260,-3918,4581,22481,-8757,371,2927,-30037,10224,-16139,-20464,21431,-32422,17698,17960,-5740,-1914,1903,-19295,-26765,-18504,23415,24482,10064,-22925,-994,8054,-15274,-28614,-17659,-28466,164,2223,7309,-6592,-19220,-28765,17155,25931,-10701,15444,-24115,31102,5533,-23123,14053,-26862,-29217,28070,23025,-9748,7784,20562,-25509,7399,-22788 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice28.tflite b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice28.tflite new file mode 100644 index 0000000..7562bb3 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice28.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice28_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice28_golden_int16.csv new file mode 100644 index 0000000..7cd42ec --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice28_golden_int16.csv @@ -0,0 +1 @@ +27790,-28223,-5085,-26624,30770,19853,-9484,-32615,14522,17294,9637,25415,-31407,11020,30918,-29227,-31964,-6558,3664,-12840,18103,25617,31312,31686,-12779,-7983,-13035,-345,-10530,20204,32104,18455,7787,-7376,16932,17957,2458,-28588,11664,-17232,13922,32600,10452,-4242,-13727,22312,-29682,-29728,11636,21161,-1188,6270,-11848,13306,19916,28639,-8625,282,-2978,-16780,-13799,32491,5901,-2514,-29441,814,-22385,-30036,15499,-31537,8776,21593,-21544,7518,13620,-24979,-28649,-15868,-25740,-6798,-15158,-10432,-27671,-27150,-22744,31380,-203,-21615,-16568,-15292,-10763,-5908,29689,-28269,-4875,-28446,5916,-32333,-27578,22650,-20976,-16314,24704,-28067,-19305,12930,31896,-4836,20714,-11161,-3958,17033,8752,-19620,32330,21609,12009,12789,-31609,4264,20244,-25851,1372,-14010,-15221,-4532,23429,15472,4025,-4681,32591,4771,25048,5231,-8814,-21755,11450,13674,10604,-17349,2621,-13914,8461,-9933,-12321,16739,31556,23503,-7327,-2613,-20324,21443,2332,-8017,5285,-3600,-17011,3866,-30108,-17368,-961,2900,2880,17617,20631,1286,1422,-23389,25233,8549,2022,-28302,-17043,28234,-12709,20720,14602,25191,13623,31896,22883,-23659,11703,13942,-20225,-25928,1833,25995,7489,-24874,127,15376,-20935,-10648,8729,-3095,11691,-17728,16833,-26020,-7119,14952,-3228,-6696,30777,-19941,-16557,31818,-13739,-15278,4865,10021,-32740,-10928,-13465,5177,-14775,-21637,23818,-2851,-3281,21623,-28832,-8744,-2499,17521,13550,-26939,-23936,-10139,8745,-28393,-11344,-13867,-24475,14969,-25445,-12478,30524,29487,-16618,-24038,28257,-20082,22872,-19405,23759,3213,1041,21136,-14609,31338,745,-2202,-15411,12639,-24606,24938,-3855,-29719,-1630,-5109,1112,-12975,-25244,1860,-17501,21175,16954,28753,-20159,7082,28046,18416,16476,13658,-28616,3180,-4205,-24983,9734,2037,-20416,13085,-2643,5904,-29889,27128,23666,3638,-8748,-2562,-22049,-1481,-17458,20030,10738,-14121,30512,-28673,-27683,-14185,6114,22360,-22919,32747,-8754,-8431,-16541,20190,-1524,-26541,-30941,9295,-1173,18728,25148,29611,4043,-25376,14650,23393,21368,7972,11513,-31393,-27525,-17263,26743,-32001,11367,21827,7132,26414,10267,23388,-2617,-8419,2002,11590,-12806,-26297,12897,-4790,26683,20759,32473,13971,-27716,-15088,13162,-21280,-6547,-31527,-29597,9589,-16058,27785,-17095,-20948,-13177,8672,-14373,-3397,-16622,-4508,624,13053,3708,13097,-10931,-17796,23547,25009,-21219,16810,-23293,-20251,15675,25118,-6739,-11465,-28208,-11926,-15399,-14045,26441,7159,-25186,29176,23422,-14729,3897,1569,15611,-13724,19644,18889,-2127,-1362,287,14948,23875,-20103,-9844,-32114,14862,15053,-31375,1280,-5152,-6139,-23314,-14481,3064,-8221,16556,-9109,-26893,-14353,19869,-11107,21955,25456,25576,26987,13169,30499,24687,15500,-20433,-14934,13531,6262,-10022,24517,29411,30823,17659,22021,-26614,6602,-24820,32633,23068,-3447,-18526,-354,-8451,-21864,15975,20023,-23623,-5782,32167,14462,1612,12242,-10920,15952,12744,-4659,20189,-7187,-29134,16095,11184,-28114,-23191,-16997,-23428,5161,-31008,25219,17047,15995,20262,-8634,-27336,3100,-27105,-24291,-20734,9176,13179,15031,14509,-17161,30102,4086,9298,-18230,5390,-32202,30617,-18292,-3796,-28198,18645,-17737,-779,14151,-8345,-27592,-10321,-1945,15248,25631,7202,32086,-25784,-22524,16539,-24357,-31233,13115,-26245,-20601,-10801,17866,-12185,-6195,26673,17342,-31297,-253,-32095,14295,10073,4679,-3694,-17466,29642,-6682,-8605,-13244,-31938,30298,-14291,14633,14543,23490,548,29732,-5671,32717,-4607,10399,-8053,-1021,18771,-7498,27233,-16783,13454,-2263,-3848,5626,-15910,-5773,-28601,-10575,17355,32517,-24753,-18484,17026,15494,2971,-26210,10986,-1518,-30387,-27477,-6197,4513,1194,-16297,-21744,17618,234,4777,5967,12835,6073,-27619,-24415,-32569,-5725,-11388,10085,4714,10574,9644,-16965,65,-17060,16441,23762,29920,4898,-16931,20099,-23760,-30944,-32344,3202,12354,11400,-25070,23664,28877,235,16810,-14166,-10979,-18423,31254,-4760,8431,-7901,32232,29572,-12814,-13483,-6844,27153,-11165,-7792,12587,9154,25707,17102,-11777,13640,31442,-8157,-6803,19284,26580,-31853,24960,-30064,28207,-499,15240,-23258,-16981,-27584,2827,-1289,22972,31297,6792,-16907,24787,-24869,23572,29932,-9006,-312,-20044,29922,-16089,7147,27910,-6982,-14469,-22025,-23887,-22620,1255,11192,31529,-23361,-6954,15729,9342,11197,-10692,-28341,22732,7423,-22869,-20903,-12662,32197,-19615,-22002,4192,-22142,-13471,8291,14056,9101,4868,8193,-3381,28470,7325,-30706,-18768,27676,25259,-16899,-15560,-8145,12959,18731,11711,31639,6720,32157,-9123,-11881,10112,-22558,-7373,-1017,-30760,23673,30984,-10697,-10,30976,-3943,3580,11171,-764,-27743,19549,-18705,10326,29224,-19737,-4916,32664,4053,-6727,27924,26786,-7957,28788,-7460,1522,-28070,-16553,-6463,-27794,9612,30389,-30536,30481,-3728,-22323,10177,10284,-25632,13678,-23,31243,30160,6056,-9585,15258,-32494,-32424,14910,21877,-28810,25579,17573,-16409,27254,11830,24153,-4370,18235,-21278,-23975,-5135,-17546,15536,24797,15267,17863,2111,-28535,-6535,-2664,29858,-2176,15209,12335,-25783,19639,10410,19503,-18918,-3439,-29735,12220,20438,15708,9496,13246,26872,10489,32410,26336,24687,27561,26851,-13079,-24975,-27014,-29575,4805,-26473,31396,18667,-16326,20712,13227,-3744,-10123,26724,20032,-24889,19038,3596,1104,31439,-29129,-28288,29088,20904,21025,-20534,6323,5082,-32471,1229,-5641,7064,-28072,28996,-18919,17360,-7749,22664,-15282,27953,4755,8252,12260,-16921,30456,-1984,-9671,30414,-25411,-6626,-8963,23685,28033,-3545,14879,-27036,-15074,28232,-4223,-8811,169,31767,-31158,31864,-24190,11787,23612,14615,-32418,-27405,4960,-24900,-25506,-4838,20829,-29376,23574,-16707,-4500,13279,2025,26770,13806,26636,-28924,27766,-13146,8392,-18966,32065,-19158,-12161,23644,-15804,-29056,11618,-18693,-8650,14651,23173,-16102,15162,5641,6361,-11131,-17625,5955,-1568,-20525,-27582,-12970,-8131,5778,-5123,-5149,-28596,-12274,10954,-30182,14210,-31783,10052,-31495,19443,-160,23706,6232,-96,-9141,13271,-16002,630,-26488,2119,-1160,14614,-32028,26107,19887,26967,-19970,-12635,-7169,-19076,737,-3748,902,3106,-22829,-10039,11947,1251,-18937,8114,12515,10485,8191,-4202,-26588,17550,-24966,1556,-8157,26765,-10067,19866,-28228,-15395,10210,-14542,-29666,-28840,31044,-399,-31655,-13949,-7150,-11134,-22998,19986,-1812,30005,16826,3227,-3180,-13573,19592,-30600,-1300,15098,7833,14724,32470,-4242,-16901,10690,-18287,-14465,-16291,-30330,-22668,-12410,-1215,-27661,21439,9575,8410,10233,-23935,-24065,-27078,-15766,-4795,322,-20814,6357,-18925,18473,30139,-29670,-9664,-12010,9681,-30121,-23453,-835,30319,17632,3495,-29588,-13216,5504,31054,29117,-7455,4916,-29989,-12655,30714,3337,13300,17693,-9339,-25409,14733,-17976,24616,-30226,-28102,-23417,-2101,14892,8367,-29757,-12574,-13565,-6687,-20749,-10056,-30160,-21511,6368,-21029,21850,-15410,2635,21193,2505,-27052,-18122,-22518,31211,-27592,25720,10963,-24843,21829,-8025,17176,-30191,7084,-11358,3975,10558,419,3115,27712,21686,23319,-13825,-11935,-24424,4103,-9140,10070,22781,-10004,11390,-6895,18611,-9789,802,18785,-28758,11953,5471,3521,-28983,18222,-29900,-13605,-9207,-21381,-24534,-30657,-29585,-10148,20781,-2389,-24275,-10777,-26501,6164,-1374,-2164,-27189,7439,-124,31912,-4995,-1702,-7196,-24803,-30114,17484,-30794,-19355,-11808,-9846,564,-13565,27614,16837,9457,-6485,-11261,15742,-25118,-15912,20476,11176,-375,-30117,-14197,15118,1766,11663,23438,1704,-21669,-21985,10638,5196,-16976,-25265,28535,2693,19822,-27464,-1220,-20323,-25655,6406,-25606,-6625,-5853,-14168,-21805,-22462,8509,210,-14373,-1927,-27568,9645,23176,29016,14783,28652,-28880,-12062,26317,-521,-9589,-28620,28909,6680,-14747,7856,2102,3866,28236,-31000,27156,23378,-28278,23397,507,-2799,-27358,-1820,-29632,-8132,-28330,12459,30397,978,-14645,29311,-2338,-17237,9348,9814,13295,-8520,28668,16648,10937,-10750,12176,17368,28485,17497,11859,5663,-28086,8189,-16392,22215,15673,-17989,-3925,965,-8275,12888,19759,24134,-10292,4173,-30391,28145,29213,15918,1857,-5344,9233,-32098,9009,-23899,-14248,32197,9094,16965,-1546,-5109,-32251,16300,-9678,17731,-28008,26431,-22163,15754,-16683,27667,-25834,-17690,-13272,-21168,19964,11939,-20875,-28118,-22403,30613,31410,-21890,-26277,18706,19351,-16972,8187,27910,21935,28041,32398,-14816,24277,-15489,3305,11763,13568,3539,-31089,29475,30801,-7987,-9983,-32257,17886,18942,-19202,-3025,27081,13514,-21660,-18807,-10104,20860,-8738,10696,-8742,-13571,15039,-22806,6501,30978,-8436,3487,359,12907,32531,-22206,-1569,-30769,-28855,-2643,28528,5530,24557,-25995,28902,22674,-6080,16766,-18249,-14927,29484,-29310,-4141,11540,-3203,-6066,6436,23568,9594,-15341,30555,-15171,7694,12016,-14665,-1534,-3393,-11445,-1015,-1566,-10935,14770,-27339,9771,-6202,-28433,-24743,4410,25794,28743,-31966,9258,-8113,18272,-31795,6143,24215,19841,18601,29629,17620,-30330,13350,18867,13083,31474,-32132,6049,8908,21168,23080,-8392,12013,-15860,12484,22012,-8243,-30770,-4222,13400,-9245,26447,17069,-3169,-28670,-7338,14977,5742,-18919,22375,-20517,11634,-20752,-23677,26932,6281,-11312,29280,-1444,-1774,13935,-17689,-15031,526,12369,-18194,16854,29098,19024,31240,-4302,-23732,-15692,-21451,7454,-29232,-6212,-8687,-21340,5601,18335,9197,-30119,-18221,13619,15382,-10488,13836,-5785,18154,26671,-27761,-30771,8141,27420,-17408,18497,6168,-430,-29175,-25047,-16920,-19840,-26633,15916,6244,13266,-28826,-32422,-23834,21974,9935,29466,21290,7211,15074,18646,-29115,9565,-19365,16460,15671,31272,-11814,25977,-16562,20510,30670,8471,13459,-21052,-20472,-26875,-27097,29551,-23967,-14049,19966,17334,3105,-20271,-1347,-23285,-26489,-8170,-6375,-2804,32472,20468,-25951,-30112,-21551,25490,22061,-14037,18439,20684,-1902,-32523,19687,19065,17238,894,5093,-3035,-5463,-21954,15812,26676,-15215,-3398,24500,32182,26885,-26938,-13736,29660,-11810,10410,23726,-27399,14016,-24942,-21762,29706,-6617,3478,26038,29977,18927,-7732,16210,-11851,-42,-23042,-20668,-10540,27922,31924,-22394,3213,-27107,328,10434,4150,-22334,25582,27326,26776,28632,-8404,-1425,-15646,18292,14073,-1653,-23277,-444,7233,31805,-23536,-11220,-3777,-21587,3917,10623,-10748,28697,-25840,-16105,-11442,10086,9257,-6325,-25819,31886,-14929,12903,27683,4318,-30286,-3580,-14666,19643,-28838,-16475,19691,-18916,22552,23297,24883,23906,-13134,-19717,13229,-13196,6389,-13956,-21062,-1649,-14179,-27321,-16381,656,8464,4700,-16184,2860,25843,17376,-4700,-10463,16416,21156,-8274,23535,31436,-31280,-26721,-6187,10909,26735,-12024,-31520,-9392,12624,-11733,25626,-24294,-23640,8703,8382,-30019,20205,-15124,28403,3647,17983,-7077,-23586,-19259,-29036,24531,-26440,17579,-5367,-3080,-23360,-16737,-8802,14428,-25042,26313,-20268,-8902,9885,-22827,-24807,7841,-27890,163,-17748,31911,6685,18994,-6829,-5426,-32749,-11145,20445,-9742,14345,24520,-1873,30476,16467,32102,-13920,14945,30330,18190,-7128,31477,32610,-876,21097,-24710,6362,25480,-18028,23550,-30329,8502,19523,-17917,-30128,-549,-4368,-28968,-29863,-19512,6419,5403,28480,621,26807,26894,28166,-20986,-14796,-30209,-32298,-18741,-30284,-22539,580,-14567,14649,-7472,11760,-24075,24450,10338,32640,-23439,-32592,13885,-27475,22379,-7229,2855,-31107,-20064,-12895,28809,-8116,-14581,2632,-2324,6658,8303,-1354,23227,-1663,-17760,-22730,17320,-20919,-314,-25132,31913,26051,-32111,-18276,-8487,-23557,-14319,5994,200,-16912,-32324,9711,-1137,3001,22795,-22433,-32554,-29107,28081,-32003,-9238,7853,29038,20407,25862,15058,-19618,20988,31853,11200,-1377,30392,12464,-10142,28593,-18593,-12559,25996,-25652,-5561,-9446,-19577,27572,30261,22112,-10059,23384,14916,16515,-617,29022,-20112,28241,18314,-30437,11546,-3841,-17106,2552,8593,13909,-24035,6367,-7872,-28076,-11077,-1274,30389,17422,-14681,-25015,-4981,-4086,-14955,-27857,-23933,-7599,-14231,16743,-23935,12365,-13086,-25972,-13740,1168,21300,28421,-2645,-32084,24746,-13076,30983,17231,207,7833,31007,5326,6842,30951,-1067,-2358,8405,-27775,7131,16413,-9971,-647,-5122,-21109,16487,32680,-26636,7520,-23691,-12889,16041,-10532,15838,-5095,-23653,20371,-14115,-23119,-5542,29853,11126,-14706,-14702,-22666,11750,21957,-28406,25355,29859,-14202,2664,5381,16184,8544,16255,-11142,13604,28603,-16748,-30081,-6497,24274,521,12380,27414,6174,27341,28393,22093,-32252,30887,-1811,-32619,11384,-19995,12349,-31601,-31326,11958,-24523,15449,-19433,-32498,18185,13501,20932,30537,6840,-10410,-11576,-14630,14542,2904,-8862,7911,25345,2507,-13597,29533,28507,-27826,5589,15921,19478,-121,23198,4638,20958,-5385,-27019,-28036,-31456,-16940,-31417,-5325,-28943,-19575,-8982,17265,16842,-25469,24115,-25371,15625,-30555,20319,7993,-15861,-20255,-7004,-27712,26154,32392,-12414,31608,22438,-7800,-29935,-28727,-3487,-24077,-22565,32123,-3162,-10937,29093,21279,19556,26495,-23344,17132,19717,13931,-32678,25340,-6180,15280,-25372,-23299,30176,4758,-725,7407,-28680,-25795,7172,-2823,13249,16858,32700,-25776,-13987,8167,-17286,18082,32514,-22581,-211,-19968,24749,16315,-5150,9717,-23815,14621,-21946,-11825,-2420,-19504,-8044,-28047,-24298,29274,-16573,16950,6238,32344,-32708,-18834,-18448,941,-1221,-2883,-11025,13523,421,14582,-2238,3113,-32598,11976,-27084,15076,14208,1158,14497,28723,29896,-22591,19908,-32419,-11022,-11935,16835,16925,-9322,13533,30983,-6784,16542,30806,-5439,31714,3875,23193,-11192,-27383,14116,31864,-17673,-11865,-20864,25783,-10069,-22556,-3580,6828,-8887,-21803,-6882,20500,-21988,-280,-12029,-1509,24923,-5126,-3528,-10004,-27773,-24060,-12879,-6056,-24184,-308,14905,15408,26995,17808,2012,18834,-21964,16224,20428,23415,1280,-5192,-22811,16624,-15499,8268,-1063,-26860,-342,-19309,3670,-24831,19294,-10103,19290,14407,16369,-8892,9498,-28379,12757,24925,-11139,-22270,-18768,-18327,-2858,1619,-21070,22847,-28817,13149,12011,-30900,-10147,-23753,32371,1815,-22452,1151,-17431,-21871,13716,3966,-21293,12578,14827,-19324,-12794,-6545,645,27578,28214,10621,-11074,25256,3309,-1024,-31475,-9914,-21316,-19889,26495,-8615,6389,-22633,-30118,26234,29173,5542,10994,-25972,-26563,-31274,-7999,-26069,24834,-21692,-24481,-13479,-16799,3314,-15832,-31238,5328,-29114,-2256,-24423,-6813,-9880,-18331,19951,-18544,-4493,-267,-25818,28377,12829,-31039,-2041,-25090,-587,31668,-11002,-13401,-3735,-16294,32536,20127,9657,-28052,25903,-17815,-30809,2456,-13855,-12707,15561,-31760,19916,-25452,-17785,9210,-22974,-31882,16282,25264,-5943,-27828,-22886,28588,29246,24657,-14929,-4260,-27658,-15712,7286,5013,-31350,32107,-22995,10683,23387,-16272,-11059,2089,-8266,-17448,20989,-28898,17526,32198,5621,-16831,5870,-27228,-1208,-17442,-24482,-5054,-8820,-32650,16791,1241,-11969,11348,-14991,9579,-17394,17172,-943,-19136,-29335,-5847,-23168,-607,6745,20008,11196,-3686,17979,-17060,-27689,-1036,777,-30926,1500,13006,25527,25537,-20442,6383,27260,-3390,-4735,-21617,-26333,-24450,-15154,32220,-3751,814,21991,-8408,7006,-17404,32401,12234,18842,-21000,-27491,22622,15498,-17502,-11452,9097,-9320,25367,-28290,13819,12906,-6231,-25865,18865,-23510,24645,-19790,-32099,19337,-379,-12574,-25763,-29394,-19617,6532,-19633,-16578,-18831,20978,11100,-5552,-27749,692,-19491,-2148,-11059,-6401,-19883,14636,26772,20780,-23683,9286,22233,-16985,7664,-27150,-17066,-27144,-14644,28795,22248,-86,5819,24986,-27811,30126,-2906,-25089,26634,17521,27328,-10582,-13655,-7482,16758,-10068,25210,-28709,26589,3328,13536,-18302,335,2632,-29023,-16215,-29755,26541,18881,30939,18938,4275,-11420,-8608,27921,-28367,10994,1648,5400,-23025,8278,-14636,-3363,-3134,-30667,6793,-12198,-6181,-21581,13348,22406,14284,-18826,-1170,-18349,6553,27145,4160,26819,-1117,-2880,-27902,2131,-9898,-9308,-26409,27604,30998,-17340,11625,911,-20209,-6989,16243,-29097,8514,17520,-29671,-2178,3898,5108,10405,2244,-21323,28917,-12868,10161,8175,-17039,26822,-10947,-18742,-5878,-5712,-14472,-18740,-4148,-11434,-35,22565,22389,-32744,10711,30146,32211,22940,2042,16539,6472,8717,-14849,-13303,23680,-12035,19539,13077,29257,-27419,5463,-27850,364,14462,-18632,-17413,27901,-13045,26619,11044,17305,7049,12639,-3601,24080,-20563,1552,-21089,-12942,-21164,23124,17833,16446,-30635,12513,5161,-23023,1997,997,31413,-8188,23865,-25262,-24365,-13744,6792,-13229,21848,25938,13399,-24582,-11632,23700,6346,-30394,10510,-7982,-11756,-21447,16337,-21153,-13046,-11470,4567,-31253,-14507,29070,-12831,2003,-8770,-13758,-8687,10113,12976,-23089,17838,-2598,-8131,-24665,-5474,2587,-23170,-19149,-15728,15340,17817,-13409,20864,15145,-29900,-31853,-15648,25374,-5338,-3694,32493,-22468,-4630,25418,16739,1433,13194,-19099,-1791 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice28_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice28_input0_int16.csv new file mode 100644 index 0000000..9505d35 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice28_input0_int16.csv @@ -0,0 +1 @@ +-23501,-21779,-14547,-27056,-22558,-10348,-3552,-31175,25682,21836,-13665,29047,-11685,18903,23343,1445,-25680,-8868,-32345,26790,-20528,-20876,32358,27114,16281,5855,-22604,-9135,18198,18129,-11699,-16455,-9347,-4769,-9639,9457,-28801,-30593,16646,-23643,28147,21854,19983,-11345,-4039,11343,-18176,-10947,-19572,-12731,30217,-26763,-25664,-22654,22792,-15361,14147,24111,-14719,2430,-4565,-3951,13591,15405,17678,-3653,15653,5789,-30953,-26327,712,-8398,-28140,-3038,-2299,11706,14280,-4946,10277,-26619,7257,23276,27993,-5887,-31598,-10543,31787,-18353,-32692,18310,25211,6404,-6929,-8141,-11736,26204,-18290,4518,612,31417,28495,6052,23628,20202,-11201,16597,-10346,-20669,24243,-25148,14831,-15641,17604,25125,2607,16817,21310,-17132,-32488,-32008,-21405,-16159,17413,-30026,5042,-4340,26280,-16001,9758,12377,2739,-18837,-24958,-26273,-10932,-9174,-1012,-10124,-12721,14880,-2422,8621,-12228,-21185,-16541,-31003,-6808,-8280,-25894,4083,-24960,3447,-5163,-27117,27221,-10512,6490,15503,-5095,2147,-2236,29915,-25949,13843,22149,19065,-14071,-9373,-2658,17364,25138,-22064,-14971,-22772,-4206,-6792,23103,1003,219,-12035,8143,-28806,-31194,-6810,20030,27079,23104,-13923,10237,5650,-22494,-12257,31438,10389,7653,-14238,-1496,9967,-31380,-15391,16616,-5870,-9558,-22868,-16839,24435,-22687,-1697,2489,12713,15963,19294,-8604,21252,-11226,5474,-17656,-32517,-5523,10877,-23133,-28111,67,28154,-9244,5759,24248,-5018,2049,-30715,-26758,-32200,13057,-27907,10786,14255,-2655,4520,9528,25815,10774,27628,-30691,19946,24423,-20105,-13708,-25993,25115,25584,29359,6426,-26064,-26228,6163,-26405,16922,-14338,-23624,32315,-9086,-3186,-20512,3691,15646,24384,13735,-11488,-32098,7615,10456,-350,25134,-31924,3799,-22484,20839,2392,13656,-12776,14011,24572,-32278,-32388,-13512,2449,15676,14816,-10473,-30690,-6191,27603,4976,23953,-10119,-24854,-13039,-20158,5848,-15845,-23927,30625,7014,16252,-12799,-10462,9904,-4304,-3909,-23000,-9288,-17965,-5584,5597,-2504,-12876,-27994,-30947,20058,-25511,-29977,3963,-22492,4655,3988,26246,29591,-1580,-9457,-7753,-32242,-17649,2726,-14331,-19217,24808,12449,-15052,-17389,7745,29171,31491,30973,-8622,-26502,-19916,27262,-26955,-16186,-4239,12477,3459,28081,-5860,336,28839,20041,-9886,2467,-3389,-8340,-2164,-105,-5813,11542,8938,16183,15686,-9855,12751,-3188,-25045,27028,-8272,4742,-13204,4771,14850,31702,-15771,-15569,22575,32434,19832,16564,-14446,8464,-40,10629,7625,-14054,-9049,-11472,-32690,-11275,-24230,13057,-32342,-16408,-3424,-11120,-14851,-25499,-5223,2140,16202,3845,17234,24029,25952,7472,25411,-16053,-10932,-19841,-19273,5171,7708,6433,-24704,-31893,292,10584,-6905,12759,13051,-5318,-6616,-14741,-1108,-17018,-22789,14080,-29923,-3871,32210,21957,-2981,-29023,-11392,-24129,-31596,-20827,-2241,-32413,12676,9503,2855,-3176,21301,-30123,7977,28755,13863,-3212,-21569,-29853,20358,23347,23235,14111,32345,1930,-29305,-4115,5738,-18960,5512,-10463,32294,16625,-3685,9427,-26214,23644,14560,-13707,15719,9688,9420,30161,-5672,-9426,-5088,-12680,-29520,23615,-21658,-28056,3769,-6412,19770,12600,5854,22756,14684,17549,-15188,-22402,22470,22411,12363,8151,7704,11560,14579,-16030,3947,-61,10755,-29925,-21422,15902,-15706,12413,30499,-285,19677,20616,-21348,18979,2183,-15899,9022,24380,13043,8959,-25444,25188,16360,-23914,7843,4462,-10399,-7970,1993,-953,12347,-22753,-28472,11920,4477,-2468,-29466,30604,-25026,-2836,-13175,-3749,-13187,-26066,31114,17278,-3352,5618,-18016,26731,26763,-20387,-32023,1770,-24254,13411,25433,21560,26403,25234,-9774,22026,-26274,-7019,-25029,24082,-22898,391,21844,27033,-31344,12549,16956,17571,-1081,25202,23602,23310,-31735,6851,-30837,4257,12321,-19051,-29997,-5798,-15740,31169,-9210,-19194,10046,9070,123,27272,22975,-31942,-7100,6044,-32454,15890,-4078,11032,8578,20331,18290,-28104,23290,-21314,8416,11077,27471,26565,9578,-2429,-21360,23044,15977,-19333,28905,-14258,7283,23037,-14601,31247,7843,27806,-2760,-21761,19897,17167,18688,23807,-24448,1472,-32617,31760,-12007,21766,20153,-21838,2304,29944,-16100,-18721,-13588,27690,107,-12755,30793,-30789,-32711,-21970,-4175,27215,20666,6105,-30737,19051,-11432,9425,29053,9600,-3518,14194,24314,14165,5154,-13783,-24525,18166,28722,-128,-25676,9129,-14416,-18405,15554,16330,27057,-14144,-8299,-16403,-13682,-16850,18336,-27127,-2507,-18888,15535,-23105,-26828,-1696,3591,-1408,25821,28089,-444,-6623,-20718,8579,30471,18450,-32504,-8884,-21110,2855,-10726,-23742,-29256,20428,31803,11681,853,10944,-8362,2778,-1027,-15532,12394,10571,13150,15028,3798,26158,7183,11036,22915,12340,21773,10287,-9907,32384,-9698,-22952,5462,-9243,-11744,-1982,25004,20925,-681,-24337,-31778,20664,-1430,-23535,23956,4580,10296,-22827,15009,9872,-19095,27592,30622,-25726,12228,-27895,-4428,16721,12704,19252,25465,12209,-24015,509,-20960,31016,25666,19563,29864,22852,-8351,-14795,1273,-14547,-18666,26675,23097,-12703,-9223,-3618,-31721,-13465,-8938,11767,26241,-10137,-28123,11030,12442,1188,-5991,10528,15785,28545,-21618,32133,-6430,-1263,-31332,-28020,-22148,13498,27761,-5950,2590,25981,14780,6755,-27245,755,-27424,-5150,8579,12486,-7637,27636,-27234,-995,-14638,-25781,17313,-336,18925,-13256,-3552,2310,-26754,6231,-13355,25511,17301,28041,-6753,4484,11088,-24084,342,18947,4855,16991,-25647,-10879,7461,-25384,-17666,-5007,-23476,-30256,32167,28075,23247,-26279,-28037,-24069,2953,-8402,28796,8515,-10903,-14492,-13079,-11668,-3371,-2079,-25748,-6965,-4690,19113,12504,6617,20934,-21327,16397,-12513,-13405,17684,-731,-32583,2476,1887,-8238,-19891,-14957,22047,20001,27435,-155,-28463,-1849,-23089,-23086,21330,669,8358,30070,-15498,22266,13884,26569,-14758,-15341,-18910,28849,-23828,31504,11269,-32517,-30581,32719,-24870,-32047,2218,12372,-13130,-14152,-17729,18723,5846,18046,-23661,-18518,26315,-7824,-22667,6145,-8554,16329,-2648,-23821,19153,32105,19790,-10923,-17919,32015,-5252,3497,-4215,-3842,30931,-31415,-4004,-21193,-9322,-10438,-8730,10572,-6168,-30442,23282,-21965,8095,-2654,10748,-4501,2297,-32556,15378,-7,3761,16830,-4375,9848,-16965,11856,-6009,9116,-9057,6047,14899,-18131,-23674,1862,11082,-1178,405,-31299,-31445,11397,-9786,7608,-30734,-9397,-13381,18943,15695,23606,21515,8857,-9339,31950,-29355,-13000,-27818,-23097,17848,7867,-16609,-3033,19473,-4951,-9533,-15025,-18450,-11768,-3722,-25776,-16421,-12809,-10955,-7839,-11449,7454,-23101,21708,30274,-21666,-7915,-13837,10347,-16067,-21840,-32683,-24562,5990,24108,22098,7964,10413,1875,6067,-20590,-20883,-32361,25979,-20453,-28270,-970,29419,-31845,-11146,-8419,-32548,-11455,9580,18723,-27119,-9957,-4375,32079,10567,20214,-827,16949,12435,8110,-13844,14614,-27523,18749,22729,5189,833,26034,-32000,-22628,27326,-8138,809,15686,-26368,26600,3103,23240,-10685,-7960,24899,-29968,22420,-32543,21030,-4576,19246,5447,3722,-21999,-23271,13823,-17149,-7158,-16344,26051,-16995,30472,-7181,41,20707,12173,-3591,-17081,-20954,-9372,5415,2164,427,8593,10610,18141,27225,23338,2646,-10399,-19774,-21269,29352,-26699,25758,30740,-24024,2916,11129,731,-29557,31859,-10136,16948,2334,-10081,11452,-26565,-14743,-16387,-30952,28713,18261,-31039,-20085,14214,-17721,11564,10309,-25770,14540,-15407,-3527,8322,-4080,-10852,-31650,22680,197,-26299,23488,18965,-13727,-7821,-16213,-24906,9568,-1672,3152,-21892,-11912,26467,-18577,-25594,-25931,13363,15144,-32610,-25387,-30887,-5418,-14760,-16621,-24341,-24056,-12925,-26016,-7385,-23297,-30092,-16075,-27201,-20542,29442,-21488,-29922,-28063,-26476,-25272,-15396,-14295,-30991,27011,5414,-28277,-21475,20283,-9711,-28652,27311,-4989,-24912,-26077,-14786,-27767,425,-4726,-1053,22223,29870,-8045,-8027,5668,-32148,26006,-14438,10875,-17669,-12926,-17313,31543,-19086,18057,17762,3468,21250,10848,3741,-4080,-12478,21914,-30411,-10220,13681,2779,26572,-14758,14610,-2394,-7907,-20834,13570,14971,12803,29223,-74,-15147,9330,12871,-9868,-5214,-27506,-12359,19223,25561,-5266,-30411,-11059,13923,-5931,19735,-17881,-18704,14331,7843,13074,8218,5136,-25218,4209,29867,23508,15132,-18487,-12813,32143,29843,-6749,29730,-12227,32296,-8397,11864,-10614,-9048,-31805,20967,-23506,4639,-10768,-30249,17588,5321,-6536,-12929,-10447,-23712,-5923,-15390,-6315,16471,-22865,-14885,-16839,-29339,-18121,18097,16745,22141,9649,-25226,31646,20707,-6632,3691,19381,-30497,-23678,-9571,26526,-10898,13069,31682,-13313,-25745,-30785,21333,5169,30289,-1360,501,-12786,-3715,18409,-22379,6568,17328,-28782,4424,-3404,-4510,3440,-1632,-22027,-13150,-31382,-6422,9374,-2877,-11721,16483,-18978,16887,-14798,5564,-12570,13737,-17335,-17985,-8291,-15753,-16427,-14044,-12499,14938,19651,26173,8981,19235,10822,30386,16826,12041,20784,-18415,-13040,14450,13689,-10864,-23510,7784,-28653,9282,-22573,11119,22634,-9701,20021,-27346,-8219,-20580,29439,25901,-29540,5651,24385,8833,-9809,-23549,15155,-24242,-27557,32308,17920,-1528,26939,31736,-2109,-2154,16275,-10140,-14454,-10792,1088,16663,6173,17612,21325,20591,11256,-6755,-9269,2432,9657,-16532,-19011,24050,-2524,-26193,-17222,-24517,-10495,-14428,22859,7814,-27923,15869,-18444,1279,-22081,-23933,2948,30886,12376,8616,1649,14692,-25944,-14443,15255,10204,-31898,-28389,29868,3861,-16955,19166,10454,27533,-2956,-18023,5155,6749,-863,-7263,5791,6002,-1961,19061,23268,26788,18725,-23407,-26772,-24806,5163,-10738,-15112,-1557,32380,27773,-29243,-15046,-12277,-32058,-3482,25198,-17578,22274,-10371,5034,32478,-18801,-22308,1169,10444,-68,-6944,19434,24558,29573,-17042,-9927,-25070,-31306,25586,-22162,-25200,9416,-18540,9961,18117,12361,-23344,3983,-25911,27968,14350,30650,-10559,2921,-6468,-16058,1412,16268,29676,14526,16345,16814,-10128,-7917,-1015,-7673,8885,9158,-2297,22600,4621,18492,-23555,18111,14391,-5376,7837,4999,-27332,1211,9615,-13615,-26884,24241,-25474,-18393,18119,-19152,1036,-31201,21463,-29935,-10924,-22480,-25571,31337,3216,14785,-16216,-13797,-31526,27208,25962,-25733,-4770,16749,-2616,25793,-10957,20495,-27570,14850,-4167,-20789,-9539,5618,25097,21758,-7596,31769,28452,23255,-28031,-23254,-18993,16333,1914,-3751,-5063,-12410,2377,-29549,-3471,-19278,-3563,-18152,17534,14027,-6809,-26203,30680,17362,20826,22988,-17771,-10077,-10781,27582,-20208,8425,28337,31381,8006,25693,9134,-8451,23946,-5259,19364,-30842,-9919,16871,-32697,14647,4469,-17083,-638,6253,-31801,20442,-1688,-29764,-23482,-6627,-24009,22887,20890,6175,2253,-1358,-1304,-15450,-31875,-29895,6307,7312,-13487,17118,-12971,-19983,-495,-10463,29762,-19246,15257,22368,18323,7527,-16661,25162,13692,-27303,-27065,-6363,25518,27249,-22133,-9755,27998,-24536,-27902,20699,-15411,25727,19851,28865,-14623,32154,-25558,-27280,-27989,-1018,31397,1772,6813,-29793,29234,14885,-8369,-31907,-76,11820,15775,-1352,-7045,-11168,-14066,-9445,-21713,-13772,5021,29081,18080,-5149,-5699,30942,-32343,-28109,-27047,-185,9848,-286,-19193,-23614,22013,-12028,3893,-1775,-6213,-30990,1097,30922,-12154,19446,7513,-12382,-29640,12832,24081,-13085,-25364,21650,-6638,-32340,16244,2566,-12659,-10111,30095,22329,22634,2684,-19006,21473,-10679,9020,5355,19413,-28946,-7171,13265,8119,10505,24904,-23811,5816,-6404,-14246,-16602,32383,13112,-6778,16496,21685,-8081,-7777,8981,9243,-10665,-17353,-19983,31901,12589,-16579,15857,-29205,-26313,23109,-3079,2617,-27654,-10635,15300,28470,-30877,570,-30676,22504,-30626,5960,20039,32257,14869,12533,18535,18667,-24869,1641,-30773,-30287,-8932,-6297,-29966,-24993,7726,-15873,12517,-21594,19003,-1991,-8853,-20546,28324,2717,-28966,27484,10168,19519,-29420,-11536,-5268,-31566,-32302,5883,-7620,-23246,-550,23150,-19251,15569,28982,-19832,-27779,24261,1543,-6198,-1022,22257,3711,23875,-22850,29079,27008,30344,16506,30329,26916,25388,5992,-1136,-4015,24177,15980,-24063,3987,-25999,-11967,-19539,-28459,16845,-6814,-23784,16135,-20575,-31428,30993,16914,-17272,28555,-18870,130,-2040,-29040,-30415,-9822,-15279,-14191,-28952,7106,-29372,-19789,6405,11112,-29548,29920,12998,-29864,-8772,20489,19281,15650,20256,18947,-32553,-19718,-31513,8766,9051,-13509,30199,-3131,-18898,-12579,-3375,18332,-11950,-20812,-15040,-24617,-16093,-31464,-30704,22144,-4252,2167,-28228,-2420,-28481,6638,15922,24678,-12229,-11454,27279,-1036,14141,8782,-25261,-17804,-330,26167,-13501,27444,-4917,-23999,7420,19159,-3961,27918,29587,10755,18057,4911,29801,-1958,20384,-5579,-13824,26767,13490,22515,12153,14956,22408,-19440,7666,31950,-4697,15190,-19336,-5095,14198,-12512,-28116,-7086,3635,-20741,-17267,-30218,-140,28596,-4969,-26527,-7425,4619,-8688,8392,1767,-16983,23232,22140,-24055,-9501,31246,-16864,7539,-7601,-32570,-29881,30905,2889,-10447,-20170,8666,1902,10791,9780,-26555,-2489,-16739,10550,-5345,-20711,-12515,-19898,31730,-11367,-4328,29719,-26795,1262,28136,-7398,-26225,13237,4555,-6024,7556,12213,-31325,-8448,25963,25306,-12170,-27282,227,10301,4061,15477,-16314,24631,-2879,-19779,17093,-32299,14041,-25186,-1038,-3032,-11207,13563,28373,11764,6577,-29372,9641,13172,-22251,-2640,-2130,18267,31714,27782,9507,15928,-25087,16395,15849,13011,9007,1432,28181,-15438,-23856,20576,25522,-32309,-1297,14260,-11156,31745,19451,-11049,-25314,-5025,-21564,-30830,-9968,25043,3944,-4904,-24885,16327,16955,30569,-4149,3964,-4924,25633,15793,30348,18000,20697,-4736,14534,12846,-12938,6287,-19688,32460,31960,6571,-29542,-5748,29246,26759,-20060,3388,-19106,-6376,19661,-22058,24424,-31233,-495,31340,10166,30611,17346,22967,11473,4256,-24422,-29474,-9432,-10091,-12333,15984,-4059,5266,-12159,27887,-22021,28581,-6067,-25818,-23681,-26312,19184,-29421,-28504,-712,-10018,9033,-7876,8152,-29598,26515,17683,31384,-5410,31081,-10715,3541,1048,-24546,23182,13992,-30385,4409,-16710,-13135,-28038,15397,-15671,-32244,-15339,-9543,31789,87,-14267,-21418,-26308,-20506,20596,31056,25579,29760,-236,8631,30776,31668,-9981,24551,-13068,-1368,12566,2880,31560,26650,-9051,-14390,-13991,9439,-2265,5265,-3345,-6935,-18580,-8635,1698,-30292,-31509,11051,9749,-24719,-9957,-17934,-20215,-30010,9732,-6470,-16875,-9118,30865,-16367,26393,-27581,-13094,1069,270,-12584,20644,-23463,-29730,7517,5780,24247,11788,27945,-14458,-6422,8858,-25191,2564,-7261,26297,21137,-26425,11948,-15463,23525,-10369,9864,8568,26774,6561,-27431,26530,8188,-19593,24891,-4339,-14719,15798,25118,-28765,-30091,-24132,20478,-27595,12655,-16928,5377,-29034,26626,-26948,-26495,11991,8826,-31205,-1504,25280,-22661,16797,10862,-32671,-17607,-17178,16910,-11731,902,-27738,18965,2587,-20645,-5493,21508,25732,-9106,16307,5398,7913,20914,-30763,22540,7466,-30696,18423,16707,19296,21208,16465,11793,-27537,-4521,-8069,-139,-10351,5287,21386,30258,-11546,7509,-16547,8397,5717,570,29438,10731,5457,-5812,23992,-17908,11232,17113,-29807,30005,12609,5780,-18280,12289,-5703,-17015,-9654,-1246,-1511,14313,-14734,-24106,-23753,28441,23457,14187,5496,-32519,21667,15915,-11675,-8136,-8735,26752,-7586,8006,21104,4832,23608,18481,-2990,27803,553,6081,3354,-4905,9714,2557,-21865,11642,10588,-9626,31897,-12528,19930,3458,564,-14024,29851,20112,-27943,7234,-13835,23040,-8861,-25115,-19735,26379,-26244,-2629,-10008,-25549,-17895,-30812,32760,24285,17798,28992,1173,-1446,-11468,-17314,-14197,28846,12792,4637,-12987,21620,12437,1309,12092,-1091,-32181,4233,12901,8550,28007,25247,22740,2509,19693,10687,26814,2848,15559,-30568,18699,-17853,5403,31679,1810,-21762,19319,-27361,15378,15012,20901,-31507,3328,-10100,4419,17635,-29864,29134,-6659,-14641,-27005,-31831,11232,-31197,-12405,21156,25314,9542,-21033,10188,-16099,-27207,13043,-21083,-6150,-4350,-32573,12023,-5182,-4060,-13040,32257,-19371,-3965,-4066,-26102,29583,10527,21521,22168,25463,32549,5282,29665,-25703,23626,21751,2792,-3966,27791,-5838,3793,31478,4396,7016,6706,21979,-19418,-24693,-22190,-2530,5125,12381,7576,29701,-6260,-26096,9622,-7588,3740,-18219,-3369,22898,4973,-29699,-16222,-21484,-24172,32176,14316,-11415,-26748,13169,-22868,-6769,2374,27506,-13979,-26859,8335,-25635,2556,23464,-32591,-15897,-7671,15066,26941,-28193,-32234,-28660,-19639,-5531,5342,26057,7919,14956,21266,-25025,-12535,8354,18937,-22084,6139,17338,32260,-24793,15778,23596,-13543,16092,21913,-23247,-10932,-453,-21186,-20925,7258,16517,-29124,20588,-30045,-10512,-10796,25797,-7771,-18896,17449,-9612,24822,-15711,-5428,13681,6615,-28202,1335,-15070,9092,7770,-9997,15925,27323,-3090,-30840,31588,-19234,-31930,25788,-13754,-13993,-26625,-2470,19592,12383,9942,-24975,-5819,15414,-9386,-22818,28851,-16826,27790,-28223,-5085,-26624,30770,19853,-9484,-32615,14522,17294,9637,25415,-31407,11020,30918,-29227,-31964,-6558,3664,-12840,18103,25617,31312,31686,-12779,-7983,-13035,-345,-10530,20204,32104,18455,7787,-7376,16932,17957,2458,-28588,11664,-17232,13922,32600,10452,-4242,-13727,22312,-29682,-29728,11636,21161,-1188,6270,-11848,13306,19916,28639,-8625,282,-2978,-16780,-13799,32491,5901,-2514,-29441,814,-22385,-30036,15499,-31537,8776,21593,-21544,7518,13620,-24979,-28649,-15868,-25740,-6798,-15158,-10432,-27671,-27150,-22744,31380,-203,-21615,-16568,-15292,-10763,-5908,29689,-28269,-4875,-28446,5916,-32333,-27578,22650,-20976,-16314,24704,-28067,-19305,12930,31896,-4836,20714,-11161,-3958,17033,8752,-19620,32330,21609,12009,12789,-31609,4264,20244,-25851,1372,-14010,-15221,-4532,23429,15472,4025,-4681,32591,4771,25048,5231,-8814,-21755,11450,13674,10604,-17349,2621,-13914,8461,-9933,-12321,16739,31556,23503,-7327,-2613,-20324,21443,2332,-8017,5285,-3600,-17011,3866,-30108,-17368,-961,2900,2880,17617,20631,1286,1422,-23389,25233,8549,2022,-28302,-17043,28234,-12709,20720,14602,25191,13623,31896,22883,-23659,11703,13942,-20225,-25928,1833,25995,7489,-24874,127,15376,-20935,-10648,8729,-3095,11691,-17728,16833,-26020,-7119,14952,-3228,-6696,30777,-19941,-16557,31818,-13739,-15278,4865,10021,-32740,-10928,-13465,5177,-14775,-21637,23818,-2851,-3281,21623,-28832,-8744,-2499,17521,13550,-26939,-23936,-10139,8745,-28393,-11344,-13867,-24475,14969,-25445,-12478,30524,29487,-16618,-24038,28257,-20082,22872,-19405,23759,3213,1041,21136,-14609,31338,745,-2202,-15411,12639,-24606,24938,-3855,-29719,-1630,-5109,1112,-12975,-25244,1860,-17501,21175,16954,28753,-20159,7082,28046,18416,16476,13658,-28616,3180,-4205,-24983,9734,2037,-20416,13085,-2643,5904,-29889,27128,23666,3638,-8748,-2562,-22049,-1481,-17458,20030,10738,-14121,30512,-28673,-27683,-14185,6114,22360,-22919,32747,-8754,-8431,-16541,20190,-1524,-26541,-30941,9295,-1173,18728,25148,29611,4043,-25376,14650,23393,21368,7972,11513,-31393,-27525,-17263,26743,-32001,11367,21827,7132,26414,10267,23388,-2617,-8419,2002,11590,-12806,-26297,12897,-4790,26683,20759,32473,13971,-27716,-15088,13162,-21280,-6547,-31527,-29597,9589,-16058,27785,-17095,-20948,-13177,8672,-14373,-3397,-16622,-4508,624,13053,3708,13097,-10931,-17796,23547,25009,-21219,16810,-23293,-20251,15675,25118,-6739,-11465,-28208,-11926,-15399,-14045,26441,7159,-25186,29176,23422,-14729,3897,1569,15611,-13724,19644,18889,-2127,-1362,287,14948,23875,-20103,-9844,-32114,14862,15053,-31375,1280,-5152,-6139,-23314,-14481,3064,-8221,16556,-9109,-26893,-14353,19869,-11107,21955,25456,25576,26987,13169,30499,24687,15500,-20433,-14934,13531,6262,-10022,24517,29411,30823,17659,22021,-26614,6602,-24820,32633,23068,-3447,-18526,-354,-8451,-21864,15975,20023,-23623,-5782,32167,14462,1612,12242,-10920,15952,12744,-4659,20189,-7187,-29134,16095,11184,-28114,-23191,-16997,-23428,5161,-31008,25219,17047,15995,20262,-8634,-27336,3100,-27105,-24291,-20734,9176,13179,15031,14509,-17161,30102,4086,9298,-18230,5390,-32202,30617,-18292,-3796,-28198,18645,-17737,-779,14151,-8345,-27592,-10321,-1945,15248,25631,7202,32086,-25784,-22524,16539,-24357,-31233,13115,-26245,-20601,-10801,17866,-12185,-6195,26673,17342,-31297,-253,-32095,14295,10073,4679,-3694,-17466,29642,-6682,-8605,-13244,-31938,30298,-14291,14633,14543,23490,548,29732,-5671,32717,-4607,10399,-8053,-1021,18771,-7498,27233,-16783,13454,-2263,-3848,5626,-15910,-5773,-28601,-10575,17355,32517,-24753,-18484,17026,15494,2971,-26210,10986,-1518,-30387,-27477,-6197,4513,1194,-16297,-21744,17618,234,4777,5967,12835,6073,-27619,-24415,-32569,-5725,-11388,10085,4714,10574,9644,-16965,65,-17060,16441,23762,29920,4898,-16931,20099,-23760,-30944,-32344,3202,12354,11400,-25070,23664,28877,235,16810,-14166,-10979,-18423,31254,-4760,8431,-7901,32232,29572,-12814,-13483,-6844,27153,-11165,-7792,12587,9154,25707,17102,-11777,13640,31442,-8157,-6803,19284,26580,-31853,24960,-30064,28207,-499,15240,-23258,-16981,-27584,2827,-1289,22972,31297,6792,-16907,24787,-24869,23572,29932,-9006,-312,-20044,29922,-16089,7147,27910,-6982,-14469,-22025,-23887,-22620,1255,11192,31529,-23361,-6954,15729,9342,11197,-10692,-28341,22732,7423,-22869,-20903,-12662,32197,-19615,-22002,4192,-22142,-13471,8291,14056,9101,4868,8193,-3381,28470,7325,-30706,-18768,27676,25259,-16899,-15560,-8145,12959,18731,11711,31639,6720,32157,-9123,-11881,10112,-22558,-7373,-1017,-30760,23673,30984,-10697,-10,30976,-3943,3580,11171,-764,-27743,19549,-18705,10326,29224,-19737,-4916,32664,4053,-6727,27924,26786,-7957,28788,-7460,1522,-28070,-16553,-6463,-27794,9612,30389,-30536,30481,-3728,-22323,10177,10284,-25632,13678,-23,31243,30160,6056,-9585,15258,-32494,-32424,14910,21877,-28810,25579,17573,-16409,27254,11830,24153,-4370,18235,-21278,-23975,-5135,-17546,15536,24797,15267,17863,2111,-28535,-6535,-2664,29858,-2176,15209,12335,-25783,19639,10410,19503,-18918,-3439,-29735,12220,20438,15708,9496,13246,26872,10489,32410,26336,24687,27561,26851,-13079,-24975,-27014,-29575,4805,-26473,31396,18667,-16326,20712,13227,-3744,-10123,26724,20032,-24889,19038,3596,1104,31439,-29129,-28288,29088,20904,21025,-20534,6323,5082,-32471,1229,-5641,7064,-28072,28996,-18919,17360,-7749,22664,-15282,27953,4755,8252,12260,-16921,30456,-1984,-9671,30414,-25411,-6626,-8963,23685,28033,-3545,14879,-27036,-15074,28232,-4223,-8811,169,31767,-31158,31864,-24190,11787,23612,14615,-32418,-27405,4960,-24900,-25506,-4838,20829,-29376,23574,-16707,-4500,13279,2025,26770,13806,26636,-28924,27766,-13146,8392,-18966,32065,-19158,-12161,23644,-15804,-29056,11618,-18693,-8650,14651,23173,-16102,15162,5641,6361,-11131,-17625,5955,-1568,-20525,-27582,-12970,-8131,5778,-5123,-5149,-28596,-12274,10954,-30182,14210,-31783,10052,-31495,19443,-160,23706,6232,-96,-9141,13271,-16002,630,-26488,2119,-1160,14614,-32028,26107,19887,26967,-19970,-12635,-7169,-19076,737,-3748,902,3106,-22829,-10039,11947,1251,-18937,8114,12515,10485,8191,-4202,-26588,17550,-24966,1556,-8157,26765,-10067,19866,-28228,-15395,10210,-14542,-29666,-28840,31044,-399,-31655,-13949,-7150,-11134,-22998,19986,-1812,30005,16826,3227,-3180,-13573,19592,-30600,-1300,15098,7833,14724,32470,-4242,-16901,10690,-18287,-14465,-16291,-30330,-22668,-12410,-1215,-27661,21439,9575,8410,10233,-23935,-24065,-27078,-15766,-4795,322,-20814,6357,-18925,18473,30139,-29670,-9664,-12010,9681,-30121,-23453,-835,30319,17632,3495,-29588,-13216,5504,31054,29117,-7455,4916,-29989,-12655,30714,3337,13300,17693,-9339,-25409,14733,-17976,24616,-30226,-28102,-23417,-2101,14892,8367,-29757,-12574,-13565,-6687,-20749,-10056,-30160,-21511,6368,-21029,21850,-15410,2635,21193,2505,-27052,-18122,-22518,31211,-27592,25720,10963,-24843,21829,-8025,17176,-30191,7084,-11358,3975,10558,419,3115,27712,21686,23319,-13825,-11935,-24424,4103,-9140,10070,22781,-10004,11390,-6895,18611,-9789,802,18785,-28758,11953,5471,3521,-28983,18222,-29900,-13605,-9207,-21381,-24534,-30657,-29585,-10148,20781,-2389,-24275,-10777,-26501,6164,-1374,-2164,-27189,7439,-124,31912,-4995,-1702,-7196,-24803,-30114,17484,-30794,-19355,-11808,-9846,564,-13565,27614,16837,9457,-6485,-11261,15742,-25118,-15912,20476,11176,-375,-30117,-14197,15118,1766,11663,23438,1704,-21669,-21985,10638,5196,-16976,-25265,28535,2693,19822,-27464,-1220,-20323,-25655,6406,-25606,-6625,-5853,-14168,-21805,-22462,8509,210,-14373,-1927,-27568,9645,23176,29016,14783,28652,-28880,-12062,26317,-521,-9589,-28620,28909,6680,-14747,7856,2102,3866,28236,-31000,27156,23378,-28278,23397,507,-2799,-27358,-1820,-29632,-8132,-28330,12459,30397,978,-14645,29311,-2338,-17237,9348,9814,13295,-8520,28668,16648,10937,-10750,12176,17368,28485,17497,11859,5663,-28086,8189,-16392,22215,15673,-17989,-3925,965,-8275,12888,19759,24134,-10292,4173,-30391,28145,29213,15918,1857,-5344,9233,-32098,9009,-23899,-14248,32197,9094,16965,-1546,-5109,-32251,16300,-9678,17731,-28008,26431,-22163,15754,-16683,27667,-25834,-17690,-13272,-21168,19964,11939,-20875,-28118,-22403,30613,31410,-21890,-26277,18706,19351,-16972,8187,27910,21935,28041,32398,-14816,24277,-15489,3305,11763,13568,3539,-31089,29475,30801,-7987,-9983,-32257,17886,18942,-19202,-3025,27081,13514,-21660,-18807,-10104,20860,-8738,10696,-8742,-13571,15039,-22806,6501,30978,-8436,3487,359,12907,32531,-22206,-1569,-30769,-28855,-2643,28528,5530,24557,-25995,28902,22674,-6080,16766,-18249,-14927,29484,-29310,-4141,11540,-3203,-6066,6436,23568,9594,-15341,30555,-15171,7694,12016,-14665,-1534,-3393,-11445,-1015,-1566,-10935,14770,-27339,9771,-6202,-28433,-24743,4410,25794,28743,-31966,9258,-8113,18272,-31795,6143,24215,19841,18601,29629,17620,-30330,13350,18867,13083,31474,-32132,6049,8908,21168,23080,-8392,12013,-15860,12484,22012,-8243,-30770,-4222,13400,-9245,26447,17069,-3169,-28670,-7338,14977,5742,-18919,22375,-20517,11634,-20752,-23677,26932,6281,-11312,29280,-1444,-1774,13935,-17689,-15031,526,12369,-18194,16854,29098,19024,31240,-4302,-23732,-15692,-21451,7454,-29232,-6212,-8687,-21340,5601,18335,9197,-30119,-18221,13619,15382,-10488,13836,-5785,18154,26671,-27761,-30771,8141,27420,-17408,18497,6168,-430,-29175,-25047,-16920,-19840,-26633,15916,6244,13266,-28826,-32422,-23834,21974,9935,29466,21290,7211,15074,18646,-29115,9565,-19365,16460,15671,31272,-11814,25977,-16562,20510,30670,8471,13459,-21052,-20472,-26875,-27097,29551,-23967,-14049,19966,17334,3105,-20271,-1347,-23285,-26489,-8170,-6375,-2804,32472,20468,-25951,-30112,-21551,25490,22061,-14037,18439,20684,-1902,-32523,19687,19065,17238,894,5093,-3035,-5463,-21954,15812,26676,-15215,-3398,24500,32182,26885,-26938,-13736,29660,-11810,10410,23726,-27399,14016,-24942,-21762,29706,-6617,3478,26038,29977,18927,-7732,16210,-11851,-42,-23042,-20668,-10540,27922,31924,-22394,3213,-27107,328,10434,4150,-22334,25582,27326,26776,28632,-8404,-1425,-15646,18292,14073,-1653,-23277,-444,7233,31805,-23536,-11220,-3777,-21587,3917,10623,-10748,28697,-25840,-16105,-11442,10086,9257,-6325,-25819,31886,-14929,12903,27683,4318,-30286,-3580,-14666,19643,-28838,-16475,19691,-18916,22552,23297,24883,23906,-13134,-19717,13229,-13196,6389,-13956,-21062,-1649,-14179,-27321,-16381,656,8464,4700,-16184,2860,25843,17376,-4700,-10463,16416,21156,-8274,23535,31436,-31280,-26721,-6187,10909,26735,-12024,-31520,-9392,12624,-11733,25626,-24294,-23640,8703,8382,-30019,20205,-15124,28403,3647,17983,-7077,-23586,-19259,-29036,24531,-26440,17579,-5367,-3080,-23360,-16737,-8802,14428,-25042,26313,-20268,-8902,9885,-22827,-24807,7841,-27890,163,-17748,31911,6685,18994,-6829,-5426,-32749,-11145,20445,-9742,14345,24520,-1873,30476,16467,32102,-13920,14945,30330,18190,-7128,31477,32610,-876,21097,-24710,6362,25480,-18028,23550,-30329,8502,19523,-17917,-30128,-549,-4368,-28968,-29863,-19512,6419,5403,28480,621,26807,26894,28166,-20986,-14796,-30209,-32298,-18741,-30284,-22539,580,-14567,14649,-7472,11760,-24075,24450,10338,32640,-23439,-32592,13885,-27475,22379,-7229,2855,-31107,-20064,-12895,28809,-8116,-14581,2632,-2324,6658,8303,-1354,23227,-1663,-17760,-22730,17320,-20919,-314,-25132,31913,26051,-32111,-18276,-8487,-23557,-14319,5994,200,-16912,-32324,9711,-1137,3001,22795,-22433,-32554,-29107,28081,-32003,-9238,7853,29038,20407,25862,15058,-19618,20988,31853,11200,-1377,30392,12464,-10142,28593,-18593,-12559,25996,-25652,-5561,-9446,-19577,27572,30261,22112,-10059,23384,14916,16515,-617,29022,-20112,28241,18314,-30437,11546,-3841,-17106,2552,8593,13909,-24035,6367,-7872,-28076,-11077,-1274,30389,17422,-14681,-25015,-4981,-4086,-14955,-27857,-23933,-7599,-14231,16743,-23935,12365,-13086,-25972,-13740,1168,21300,28421,-2645,-32084,24746,-13076,30983,17231,207,7833,31007,5326,6842,30951,-1067,-2358,8405,-27775,7131,16413,-9971,-647,-5122,-21109,16487,32680,-26636,7520,-23691,-12889,16041,-10532,15838,-5095,-23653,20371,-14115,-23119,-5542,29853,11126,-14706,-14702,-22666,11750,21957,-28406,25355,29859,-14202,2664,5381,16184,8544,16255,-11142,13604,28603,-16748,-30081,-6497,24274,521,12380,27414,6174,27341,28393,22093,-32252,30887,-1811,-32619,11384,-19995,12349,-31601,-31326,11958,-24523,15449,-19433,-32498,18185,13501,20932,30537,6840,-10410,-11576,-14630,14542,2904,-8862,7911,25345,2507,-13597,29533,28507,-27826,5589,15921,19478,-121,23198,4638,20958,-5385,-27019,-28036,-31456,-16940,-31417,-5325,-28943,-19575,-8982,17265,16842,-25469,24115,-25371,15625,-30555,20319,7993,-15861,-20255,-7004,-27712,26154,32392,-12414,31608,22438,-7800,-29935,-28727,-3487,-24077,-22565,32123,-3162,-10937,29093,21279,19556,26495,-23344,17132,19717,13931,-32678,25340,-6180,15280,-25372,-23299,30176,4758,-725,7407,-28680,-25795,7172,-2823,13249,16858,32700,-25776,-13987,8167,-17286,18082,32514,-22581,-211,-19968,24749,16315,-5150,9717,-23815,14621,-21946,-11825,-2420,-19504,-8044,-28047,-24298,29274,-16573,16950,6238,32344,-32708,-18834,-18448,941,-1221,-2883,-11025,13523,421,14582,-2238,3113,-32598,11976,-27084,15076,14208,1158,14497,28723,29896,-22591,19908,-32419,-11022,-11935,16835,16925,-9322,13533,30983,-6784,16542,30806,-5439,31714,3875,23193,-11192,-27383,14116,31864,-17673,-11865,-20864,25783,-10069,-22556,-3580,6828,-8887,-21803,-6882,20500,-21988,-280,-12029,-1509,24923,-5126,-3528,-10004,-27773,-24060,-12879,-6056,-24184,-308,14905,15408,26995,17808,2012,18834,-21964,16224,20428,23415,1280,-5192,-22811,16624,-15499,8268,-1063,-26860,-342,-19309,3670,-24831,19294,-10103,19290,14407,16369,-8892,9498,-28379,12757,24925,-11139,-22270,-18768,-18327,-2858,1619,-21070,22847,-28817,13149,12011,-30900,-10147,-23753,32371,1815,-22452,1151,-17431,-21871,13716,3966,-21293,12578,14827,-19324,-12794,-6545,645,27578,28214,10621,-11074,25256,3309,-1024,-31475,-9914,-21316,-19889,26495,-8615,6389,-22633,-30118,26234,29173,5542,10994,-25972,-26563,-31274,-7999,-26069,24834,-21692,-24481,-13479,-16799,3314,-15832,-31238,5328,-29114,-2256,-24423,-6813,-9880,-18331,19951,-18544,-4493,-267,-25818,28377,12829,-31039,-2041,-25090,-587,31668,-11002,-13401,-3735,-16294,32536,20127,9657,-28052,25903,-17815,-30809,2456,-13855,-12707,15561,-31760,19916,-25452,-17785,9210,-22974,-31882,16282,25264,-5943,-27828,-22886,28588,29246,24657,-14929,-4260,-27658,-15712,7286,5013,-31350,32107,-22995,10683,23387,-16272,-11059,2089,-8266,-17448,20989,-28898,17526,32198,5621,-16831,5870,-27228,-1208,-17442,-24482,-5054,-8820,-32650,16791,1241,-11969,11348,-14991,9579,-17394,17172,-943,-19136,-29335,-5847,-23168,-607,6745,20008,11196,-3686,17979,-17060,-27689,-1036,777,-30926,1500,13006,25527,25537,-20442,6383,27260,-3390,-4735,-21617,-26333,-24450,-15154,32220,-3751,814,21991,-8408,7006,-17404,32401,12234,18842,-21000,-27491,22622,15498,-17502,-11452,9097,-9320,25367,-28290,13819,12906,-6231,-25865,18865,-23510,24645,-19790,-32099,19337,-379,-12574,-25763,-29394,-19617,6532,-19633,-16578,-18831,20978,11100,-5552,-27749,692,-19491,-2148,-11059,-6401,-19883,14636,26772,20780,-23683,9286,22233,-16985,7664,-27150,-17066,-27144,-14644,28795,22248,-86,5819,24986,-27811,30126,-2906,-25089,26634,17521,27328,-10582,-13655,-7482,16758,-10068,25210,-28709,26589,3328,13536,-18302,335,2632,-29023,-16215,-29755,26541,18881,30939,18938,4275,-11420,-8608,27921,-28367,10994,1648,5400,-23025,8278,-14636,-3363,-3134,-30667,6793,-12198,-6181,-21581,13348,22406,14284,-18826,-1170,-18349,6553,27145,4160,26819,-1117,-2880,-27902,2131,-9898,-9308,-26409,27604,30998,-17340,11625,911,-20209,-6989,16243,-29097,8514,17520,-29671,-2178,3898,5108,10405,2244,-21323,28917,-12868,10161,8175,-17039,26822,-10947,-18742,-5878,-5712,-14472,-18740,-4148,-11434,-35,22565,22389,-32744,10711,30146,32211,22940,2042,16539,6472,8717,-14849,-13303,23680,-12035,19539,13077,29257,-27419,5463,-27850,364,14462,-18632,-17413,27901,-13045,26619,11044,17305,7049,12639,-3601,24080,-20563,1552,-21089,-12942,-21164,23124,17833,16446,-30635,12513,5161,-23023,1997,997,31413,-8188,23865,-25262,-24365,-13744,6792,-13229,21848,25938,13399,-24582,-11632,23700,6346,-30394,10510,-7982,-11756,-21447,16337,-21153,-13046,-11470,4567,-31253,-14507,29070,-12831,2003,-8770,-13758,-8687,10113,12976,-23089,17838,-2598,-8131,-24665,-5474,2587,-23170,-19149,-15728,15340,17817,-13409,20864,15145,-29900,-31853,-15648,25374,-5338,-3694,32493,-22468,-4630,25418,16739,1433,13194,-19099,-1791 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice29.tflite b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice29.tflite new file mode 100644 index 0000000..77ed82d Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice29.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice29_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice29_golden_int16.csv new file mode 100644 index 0000000..94df4e8 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice29_golden_int16.csv @@ -0,0 +1 @@ +-19762,-4646,6654,-3288,-25019,27079,-3142,28388,12185,6369,27492,-24232,26194,11018,24351,10940,-16504,-8166,-6035,27889,-5144,31760,8363,4354,18288,-22248,-9581,28057,31717,32349,-30172,-14057,-19195,-7839,31440,-602,-31880,-25564,-7079,-18554,-31154,-24976,5838,3541,24853,28126,30502,30497,-14602,32258,-15114,-30611,8665,-7014,-17650,-21863,23567,20373,2154,-8537,-2329,-29146,2201,6884,19844,20989,27107,-12173,13959,-1746,-19919,-31920,23610,-31660,18305,-5401,-5031,-2397,23987,-22115,-31502,29970,9931,-10152,2728,936,29594,30034,-15180,32115,-1987,-13045,15883,-7927,-8544,30806,-25971,9265,-348,12527,-19911,9810,-8230,-13753,-23676,-14708,155,-8605,-5012,1176,2457,-16938,763,-24880,6678,24727,14657,22355,-11342,-1706,24726,-4404,-12267,19512,762,4231,-9174,11132,-26736,3758,28897,-14082,-10810,-19169,-22395,-820,-15898,18062,2387,1721,-15491,-18144,25751,-20618,-5471,27934,8914,23103,-13064,-19803,-29228,-32281,-14161,31153,29806,26156,1668,-15111,1387,-4578,-29701,-618,320,17694,11302,25037,-21267,27295,-3408,30708,-27150,11247,-5893,-6879,28113,15722,-12339,27725,-1029,-14692,-3397,-1122,12736,-18286,-3933,12049,-18820,10192,-3964,-359,-23978,187,20062,31544,-9168,27740,-14413,3470,-15686,20356,10316,11211,-1055,-21997,5167,-26198,28404,-18570,12385,18664,2418,23673,-734,-11261,-4727,-7945,-29268,-19382,3421,16364,15597,-6698,-5274,-29794,25153,-25645,29315,14718,25646,-5105,30601,13757,-26206,-17400,27997,9973,-19783,29291,21597,-10545,25254,32258,-24313,19063,21987,2474,-28104,-14187,32359,-32687,-6608,16619,-31263,-16,23059,22976,20742,-14678,3719,23832,3764,-30584,5310,-1918,16258,15593,-17168,-10602,-4021,25314,-26204,7598,14172,-5294,8468,9909,6559,12536,528,31984,19940,20464,31594,14142,26582,-13614,5918,4668,-31896,-19254,14011,-13258,-17342,-23582,28483,-11089,-29427,19533,-13144,-10105,-11025,-25564,-18218,-22222,32327,-7018,-30739,-4841,15040,-29402,-7494,7273,-17829,9322,419,17418,-10297,-24602,15997,16803,-22089,23316,-12514,-30954,-19129,30191,-27555,-14440,22521,29199,-25415,-15756,28851,21935,-27881,13080,-27801,-32154,16910,20615,24845,16031,23725,-2292,4922,9687,32456,-3777,27870,19502,9554,-8507,-1598,-26475,-9309,-13169,28673,21165,29511,3136,28542,-12910,-14421,-13400,31338,-31037,-20184,-17714,32427,-28709,-9485,-9585,20754,26597,14792,-5657,-21691,24781,-19423,-13112,21176,2900,-17128,26750,4661,-29995,-9181,31654,2182,27550,-10945,-26433,29728,25483,-31307,8057,3902,-25670,-25692,4232,-6602,28604,-17624,-29079,-14223,-21052,-20975,-32501,-32439,-14585,11222,-3984,224,15720,-27735,-32528,7480,1403,-24697,-13234,-11477,-23514,-5331,19744,-19436,-2360,-2359,12684,16013,803,-24107,-296,2379,-2650,18023,32031,-17018,-6998,1905,11307,26356,27370,21145,22822,17153,-4688,-2137,26482,-14534,9549,-9342,-29685,-9591,27462,9479,8894,18901,272,5746,-31038,25692,-26649,709,28750,-16785,20700,20374,23787,-7872,-13935,-21882,26497,-18219,-3989,1427,32488,-20744,-8793,26093,-12451,4183,26391,23970,6778,-10346,4722,14110,10294,-5116,20663,-13935,-25810,-13642,-16411,-28136,26270,-6760,-3789,13607,-30324,2515,-4836,1091,21851,-5733,-22676,-10654,15294,9901,-5957,14101,1878,-9404,28242,-11438,-21409,6,-13454,-9587,5328,-16306,31557,-16598,-27642,-32389,24775,-11494,9332,4179,-19623,-25380,20196,-28885,4453,-10006,-25337,12009,19542,21812,-11841,-3128,13502,-31620,25384,18720,-31573,32047,22814,-15711,24443,8563,23368,18595,6544,-13242,-15340,8699,2542,16860,21667,5949,-23585,12370,24601,10070,-22889,24707,25711,-26796,-14083,-16975,28111,3114,31263,18673,-11356,-23697,9022,-18700,26757,23882,6028,-21631,-16259,18100,-26843,25268,-7779,-30725,3002,2122,1491,-21955,-20681,-21945,-24828,-8557,7745,-18947,24617,11744,2322,-19525,28445,-3891,23505,13254,-7801,-6786,-24607,-6699,18844,26671,32241,15891,31339,6241,-4764,31478,-16304,-13688,-14167,18326,-24623,27456,3239,-11819,-505,14859,15219,4434,30836,31708,-23444,10477,-17246,-3781,-29851,21825,6940,31536,13735,-21594,-9345,5603,1537,-11784,26691,-18232,9015,18904,-739,8062,24052,-15400,-32073,-822,32449,11534,-11869,-26136,-31754,20235,3950,8293,-13735,8137,-2747,536,-25156,30043,2205,-3998,-9244,15696,26445,-16149,-10459,-16600,11853,9550,-6439,27328,25668,3763,-8553,-28276,9252,18223,30521,-23597,17903,-2117,-14521,-28576,-26728,-5828,-25153,13106,13498,29960,391,11512,-7445,-23028,29665,23787,758,-15723,-7280,-11027,-2523,-8294,9101,20648,23573,4209,3680,-11142,-27646,-13450,-10680,1773,18095,-31452,-3199,-28259,32408,-15973,31230,-29952,20447,25830,16918,2164,-20155,-30550,-22164,14058,29809,-17359,-29849,15043,14393,23007,-27212,-3173,14993,-13059,-23823,-13584,-2014,14838,-6282,9326,-24706,-30111,-21181,-12670,30392,-28676,30727,28547,-21032,-22678,-10679,-30944,-1112,30654,13768,14121,23703,-8546,5113,-28481,-4778,-20471,-23721,-25334,-21378,15738,15772,29143,-23168,12795,-24190,-19301,18081,7458,12439,-25990,-17590,14738,-11578,-6819,-6517,18957,-12693,-11531,-2380,19654,-26001,-27521,-4586,1004,-20394,14294,8508,31833,10577,21885,12758,-4671,-2574,-1392,24231,-31761,30822,-5514,-17349,-24802,-24014,-23353,-17083,24718,-15745,-5502,24394,28873,-30081,-30556,-14442,-3368,-29150,-14346,31389,8122,-18436,5085,2078,11161,29082,-7202,-15165,20633,-15024,-25907,-3151,-10289,27328,31030,-31191,-14012,-31042,22851,-11865,5671,-3291,9579,7821,-27724,1051,8458,29441,21965,-9162,-12026,23061,16991,-30466,-2781,-9676,-5548,-24841,-24574,-1991,25801,13861,-5255,-12811,30734,25766,-5590,-17591,-28639,879,15290,33,11092,-10932,-32217,-7700,16520,-23621,12543,12398,-17023,27910,-24059,-32474,-27789,-8073,-331,9565,-9132,22334,-8790,4045,5139,-4675,23616,20641,13403,19698,-24863,21447,1838,9518,-22437,2592,-31577,8909,-12553,-10198,28882,-6580,8174,17985,-24582,-19718,-21004,27086,6839,-4283,21176,-18358,30124,3847,17144,11493,-26799,-27341,-15976,-24383,-23423,-15893,25799,-21461,29964,-30534,6756,15206,-10998,31427,-30707,5378,-252,14393,-433,-27332,-24442,-20410,10189,1981,-5791,17109,20809,16322,-7204,7659,5073,-31650,1032,-11021,17623,-6228,5463,21442,4544,-23048,-9106,8760,32634,4852,-13119,-32508,3458,26651,15809,-28554,-30457,-16495,-21700,25336,-11202,-15142,30954,-25186,31142,23993,7052,-4417,-16629,-10839,18706,-11572,16624,-13231,-962,-20717,-9538,-22616,-26829,-21683,-20571,5342,-25826,27787,19249,12002,12966,14632,1688,-26948,13218,30183,-15497,-26594,-20934,-14542,-10243,-28822,17752,4567,-922,-27595,-5312,10580,-29376,13097,23958,-10942,5223,26597,-32124,25537,-17137,-19696,-26085,-6173,7002,25125,30967,-27849,27694,-8833,-17830,-26756,-26109,18582,12816,-28273,18067,-12996,11389,19775,12323,30740,25224,28219,20746,7171,27220,26841,20213,16969,12693,10380,-11246,-20686,729,-24709,-9329,10002,-26383,-4893,19418,-16371,22903,-4089,-20514,5275,30614,-31528,-22040,-4058,15999,15746,-10685,-10909,18393,848,16129,397,-10595,9745,29629,-14919,5081,7462,-6512,-16199,11983,3952,5764,17334,28224,23242,-8350,27491,-401,6843,-18442,-19221,-18219,-18550,909,12651,-12808,-31654,-31228,16215,31988,-9049,-29823,-14997,27669,45,-24075,9281,-20608,-10216,-13190,-7974,-13835,19766,-15516,-3338,-21426,-8853,3017,4895,31180,-30513,11653,-10149,-5340,-7804,-19918,1721,-11949,-1643,-17550,32602,25259,15814,-14909,-10265,31572,19503,-27206,-2231,5862,-5184,-16797,-29456,6429,-5015,11019,-24543,-2114,-2373,-16647,1921,-14853,22505,-19805,-28190,-23849,-11949,3967,-27225,-6339,28263,-25269,-14955,-4152,16389,1662,8322,22671,19304,-30882,2855,4239,4216,-9223,29765,-4361,-24231,-21415,-19050,23117,18319,-10127,31557,14416,6191,2942,-5237,22123,-2565,6019,7249,-31065,-30114,-8284,-506,-14346,28247,12192,26699,-30762,-26573,-14078,-4226,22977,-7526,4221,9951,562,16935,31746,9463,11710,-32222,-25420,-4086,-13013,31034,1186,-8806,276,-237,-13432,-19179,19518,11857,26022,8285,-10762,-22535,-10737,7218,-28918,12202,2451,31812,-23305,19950,23524,-7934,13296,27850,-26787,11519,11187,20820,26919,15943,-15985,2317,-31588,-2066,-8733,-3589,-20510,11118,-32724,26788,21774,19433,-11428,31173,-28039,18387,-19195,8787,-7320,22861,-17558,-7334,22936,28232,14493,-22233,-18557,-3716,645,-15617,-24464,-21875,-24083,-32661,-4847,27415,-9548,-14313,-24156,17896,22057,-13096,-20068,-10989,-24873,23400,-26030,-25727,11530,-21113,-1237,2572,28648,-12024,30098,4130,9799,-21757,-19130,20036,-3031,-24800,-1832,-644,-14620,5625,14590,20537,16206,536,27833,-16498,16182,22767,12733,-6402,-20001,-3793,-15433,-6865,-6928,18661,-13296,-32357,5093,-27911,-8908,-4320,9876,-19968,-10720,32673,17581,29748,31348,22481,-31178,-6000,4273,-32026,31857,4434,20933,-1365,-29838,-32246,18954,-32038,22799,2563,-9878,-21258,20112,-18984,-10174,-20998,9944,921,12424,16203,-27048,-12871,-7269,20370,26423,6130,11174,27242,253,-31509,6810,21482,23177,26803,14386,-9176,2300,4431,6977,23089,-1671,19504,29731,32168,-15227,3216,-14372,572,22609,3710,-13418,30985,-30562,10933,-14316,-9551,-11856,-8915,22401,-14333,20490,30580,-16225,10634,7015,-28032,29551,-20280,-14782,-8251,-1409,9061,27700,27810,28707,-26697,26424,-21058,8729,-9856,-23075,1935,-14441,-2357,-22842,-19193,-22558,-19654,14868,-15044,-25931,-15512,-9347,22349,-27109,-3964,-6578,17961,-14317,17220,6939,-28564,-12226,-10942,31329,23748,-26085,-23118,4442,-27150,-9321,1049,-1163,11233,17746,-4973,19815,-1833,19167,-18767,8946,-24059,4116,-16480,-1289,-31948,8058,-32212,5956,-19479,23004,-30700,-9828,-16062,392,-17144,-31723,-22369,-14821,27283,9028,16607,-18837,8912,22282,23128,7083,17175,15332,7751,-14036,6026,-3044,22824,4572,-25128,10441,16963,-30289,29422,31355,-14465,-12896,-13978,-11576,501,-2857,-25292,23698,-30756,1312,31801,27099,-19886,-28683,1238,-13484,-1127,-31530,-30532,7564,15676,-31,28319,11521,26583,-26901,-2525,-550,30044,-5118,30650,-31659,16331,-16638,-4387,-27608,2010,-15037,20294,-22173,-7850,-32138,27548,-27883,30058,-27520,16930,-19637,-15716,31322,-16470,19621,21263,19940,-4601,-8787,-2969,-8668,-8296,28447,21019,-26830,20202,30131,-25254,-6624,18543,-25602,16144,-31666,11452,-10849,-3016,-22271,25152,-8510,29724,-18890,-8858,-27158,-19449,30411,32667,-15233,28025,18416,-21949,-4903,20246,-15935,11803,-8626,10117,28863,14939,28033,-958,15513,-5658,11759,19206,-24787,-6888,31857,-24432,24901,-14320,-2834,-2752,30810,-12952,-19007,25049,1897,-16188,-25885,-12217,-764,7685,17546,28295,7443,28679,23897,-22119,-19328,16345,-7451,-15138,-17816,-20313,6466,-19409,-21629,-1071,-11887,-12552,-9480,-3739,-20011,-16220,15799,16172,-12187,15528,-6384,-6731,32321,32502,14091,3177,-12621,26423,21057,27420,-15957,-7344,-4173,20354,-30127,4303,7594,-21445,5551,29739,28528,-2578,25905,-3718,20808,7993,17955,-20940,4944,32552,4318,18363,-14154,27361,-20614,26517,-24050,24844,19553,13383,-32530,24983,-28765,-29315,-20000,-30422,24407,20130,31663,-3876,8269,-712,9064,17992,12091,7505,24459,12690,22485,27150,27804,25289,11191,22467,19426,3178,153,30646,-27550,28762,246,-6541,13716,-29534,-10588,21839,25715,-20846,-4101,23251,9047,24766,-6096,-22330,10105,4022,24983,25358,-16640,21623,472,-1299,22995,25500,-29321,3687,-10379,-8540,-24156,25760,-30818,-197,27761,-18239,3333,-16069,18001,8686,8392,-11944,845,18511,22733,-23819,16525,-14223,14167,-23169,31256,-21051,21680,24133,13619,-6838,-20117,-30659,-32261,-26103,14478,-12085,28852,-17268,-5358,12671,17389,26963,21261,-11483,-19075,-5285,-5536,19063,23301,26127,-821,9656,3666,2793,-1275,-31091,-10663,-32603,-31249,24250,437,28971,22200,4350,-1437,6426,23684,18114,-23376,28652,-19521,27509,21751,-29732,639,-8664,28486,13479,-8965,-31386,8161,-4544,-2518,15586,-20131,-25065,-10603,19333,2827,-22737,-7178,-13130,19424,-31430,-10141,-7213,-21512,-12674,30483,13664,-4150,-22292,-19178,-16732,8458,-6319,-21375,-28067,-6644,-17401,-9871,8394,16443,-3041,9764,24545,8228,4441,-30486,23191,31310,-19839,-7140,-2194,-26644,-11072,-15797,10065,-4759,-16871,24887,-2495,-10118,-23234,26914,-25413,28471,-29008,30276,31774,-27980,14659,-8521,-8202,-7660,28349,-12493,-9568,21136,16685,-18624,29466,3313,29154,11238,-28035,18809,13581,4964,800,13659,-8040,-31433,-13341,23883,25204,-25483,-22251,23928,-3175,2882,29839,12565,-2251,2363,18627,5642,1078,-12651,6249,-18968,-15809,-11118,-4282,406,14793,6401,-12326,-26106,-1090,31051,-21908,-8064,8073,-21299,12308,7952,-32304,-11985,23596,-7649,2188,-9258,-16566,-20896,15986,-30330,26550,-9537,-9011,22839,-28961,24627,-9759,-4539,-8697,11741,17476,-20401,26814,2733,18328,-26425,9705,25767,14460,-20532,-17164,2591,-1360,21934,5505,-10024,2322,-25923,1348,9565,117,-17528,-22223,9766,-27163,-25761,8151,-7648,-2536,13515,6575,786,-29227,19658,20802,-12233,-2347,-6624,27763,-31065,-3703,25420,6252,-12227,-29937,16248,22142,-3319,369,22017,11520,5158,25850,-13446,-15926,5792,-31073,-24389,17810,-26782,-16965,-19696,1673,2633,-11590,9434,6987,24976,309,30785,-21694,-29039,26634,6190,7823,-4976,23901,-19681,-14246,2173,30402,-23498,24316,-21046,8459,-27382,18897,15350,8791,-6095,-22907,-856,2523,-28635,-26260,25557,27825,-18123,24758,23989,-22013,-22399,21666,8820,10662,171,-10359,22405,-7519,-13238,1463,-22278,-30946,6539,-12989,7707,24191,-7131,23007,-16778,-13845,-29219,20824,-9107,-20143,-27293,-25247,15696,22606,-5231,3850,9823,-2735,25662,-29773,26482,-32213,14669,19694,8037,-11043,-6861,18969,24101,-30322,-17149,-5286,14123,3187,378,10447,-17853,-31022,23248,7086,-29173,-19979,-4995,-18527,1256,11462,2558,-4957,-7263,-32117,16072,10070,-25454,15312,-6543,25429,-11864,-6891,-9583,10272,16346,-25170,13385,15606,14240,19101,15758,-15853,15982,-10777,-20237,27840,3290,-4286,2612,-207,21511,10249,17754,9910,17155,2137,4739,10228,-6474,-9157,29703,-10986,8700,-14125,-10015,14991,-12241,-16263,14241,-6790,20060,14052,9377,17825,30677,18901,27376,-29869,3695,-6804,-30790,-18280,-9569,23744,14853,-11911,-30240,26506,-29244,-29019,-24599,10078,14256,14925,10968,4511,21790,-25252,23980,-25852,-604,-20060,3723,1113,-32564,-23286,-7338,9701,22727,-21034,-26807,-17762,13058,4915,-73,31956,16391,-32117,16527,-30538,27107,27854,-17359,11843,-22691,-10102,10297,30787,-2258,-25466,-7119,18047,-16810,-13903,-30388,17124,13709,14653,20865,-10663,9531,-13031,8641,29991,8996,-28256,-12583,31171,-21115,-30948,-31345,-6977,20186,-7614,-2190,-19366,13358,-26971,-7676,-10584,-14453,29160,5142,13900,-11307,-14223,24160,-158,23470,31864,-2981,11300,5935,-29631,26142,408,30903,-28111,6353,26907,-19065,6812,-15591,21341,598,822,-22716,-18496,-2064,-1930,15226,7429,-22970,-5328,10133,-15121,24718,26898,10326,11863,21569,32573,-673,-8218,16224,14764,11873,-5697,30941,23710,24656,20434,-8149,32343,26083,30743,-1239,6937,-27805,9416,17272,23185,27147,32421,23380,28717,-29315,15900,-24945,6990,10265,31978,-444,17207,-9421,-16020,16412,22810,350,-23821,-17627,-8730,27786,13043,20137,-5421,3378,-4583,17647,-23403,-15023,-28386,29288,-25544,18683,-31763,-1635,-20054,-4025,4888,-7910,5068,13702,27390,-12412,-12603,120,-16775,9157,16703,11438,29663,-6464,-9332,-29779,-13351,27385,-1593,29007,2272,27580,-24754,-27639,866,21460,24571,11774,26779,-23850,-30763,20214,6090,-16723,26148,9769,-5210,-25888,-12206,-7490,3553,-13768,-6275,-2915,-1888,-4805,-22166,-15139,-22802,-15469,1005,10195,-23439,-22091,30947,4153,-15180,30836,13723,-10040,21423,-9615,-19848,11326,-8578,-8182,-17071,8984,11590,-23254,-22313,-29629,32182,-2813,11337,25364,-17620,30657,-25445,14650,3878,-12200,-17558,-14005,18994,-11290,-274,6124,-9880,2911,-13054,23640,-26387,31855,9184,-20537,-14325,31140,31620,-1565,-1347,12774,-6290,-3959,32699,-27543,2194,18798,25594,-21409,-10895,16162,4052,30247,-15287,-2931,-2053,16412,20724,-11674,-18476,-12048,16917,-4129,-4097,16946,-10644,-15026,20577,-4303,1381,7625,-3095,1103,-31467,-7392,-1577,-20966,-8382,16142,-9688,2868,-13535,-7989,-28078,2817,-676,29760,4494,10637,-18775,3403,8829,19314,5522,5347,29620,99,-7510,19366,-22466,26854,32486,-443,-7418,-11743,-26316,3846,24778,2198,6518,-22808,-24963,31799,16595,10411,20454,-13958,8189,-1988,15181,14516,6792,10053,22600,2082,18316,-19862,-8853,15658,28043,-24111,-19530,-14386,-7355 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice29_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice29_input0_int16.csv new file mode 100644 index 0000000..d46f077 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice29_input0_int16.csv @@ -0,0 +1 @@ +690,-26336,-26373,-14256,-12993,19724,-4810,-31218,-9173,32373,24571,-27736,-4428,-22716,19275,3685,11786,28083,-6018,-641,27235,16950,31707,-29144,25914,22547,8278,-4546,-421,-22251,18921,2762,20978,-21598,20862,14930,-13518,-29139,-23010,-8330,-29834,15438,16261,7413,9393,-715,-19424,-9922,17612,-4745,-9326,-26481,17862,-22255,16666,16742,13993,10464,-15951,-11661,-32344,11239,-2234,-23863,-31328,-26826,-19870,-21756,-11349,19762,4784,-30664,12009,28786,-7860,4757,3788,-4961,4058,3099,-20529,-22547,-5769,9411,20527,29329,32020,-8853,-17047,18770,-31696,-10011,32414,-30237,27265,26333,-18146,15868,-11574,4779,24438,-24915,18107,-15646,27345,764,-8571,-14253,-4901,-17455,15739,18855,18878,11004,12828,-5008,29096,-3244,26314,21596,20565,-11163,-22795,14690,-26151,14551,-16759,10052,-30989,7049,29482,8129,-13098,287,-9343,30163,11526,-26321,-22539,17459,-5980,-5042,-12639,1297,-2706,29620,-15978,20955,-5096,-5229,1423,30113,1279,-8516,-15675,25125,-15060,31007,7662,6519,2380,1785,15598,-28320,-21707,23845,-2926,17508,22533,1284,14200,21959,-28593,16500,19084,-2986,18691,7828,30088,10854,-28923,20763,-16520,16906,-4729,23736,-10725,23503,24944,27013,-412,25357,19743,-6723,-24482,24629,-14964,-4618,9878,-13223,31878,-4172,-23717,-13300,30782,17637,15654,-2768,-29696,636,8825,10472,-20712,-17037,-2689,20603,-20178,-24150,23562,23101,-29340,29231,1320,-919,26313,-32667,-19588,30011,-2833,-24347,-25863,23344,-22235,-4342,-3778,10956,13072,24700,26676,9244,-23310,31915,-13110,23395,12517,-10508,15467,-1264,-14883,-16696,-24185,28663,12769,-2565,-18694,-15691,9957,-18558,-4808,20640,-26133,-15509,13610,-19179,15120,1401,19925,-27567,22109,1004,6818,7747,6336,-27225,-20046,-17110,25929,-7458,-12295,-32759,-28539,-12625,-14866,-2246,15664,2353,5367,-10577,-28883,-5795,-5133,-26938,23646,-9459,32184,-24485,30528,-29834,-4559,-12351,-24726,3448,-20512,-3776,-5357,-14454,1593,-19599,-25522,-14389,-7194,-4080,-4804,30280,29818,2185,1543,-967,29219,-11946,18680,-3392,-22143,-30650,8695,-14050,11708,11635,-32148,-24820,26754,28680,-14559,-19331,29784,31807,-25857,3014,13583,13666,7986,-2494,-9390,23976,9221,28114,-15473,14067,21861,15485,-15388,-23741,-24743,22410,27469,22690,18121,32594,17908,25052,-22999,16593,12168,31185,-4761,-13295,26509,-31379,13066,-9474,-3730,-1690,-31054,-5527,-32441,18585,-26428,-5853,9839,1167,-11174,8173,-24755,6550,-7640,-22738,31554,-17889,-18528,-9330,25286,-9722,-20504,-26004,-25312,-26625,27946,-14673,-2250,-3376,-25880,512,2455,-3261,16910,-8550,18184,-25727,29170,-5493,-16029,-16994,-29651,-7619,-21060,-13980,-2414,-9507,24485,9457,5263,23401,-23003,8876,24456,-144,-16612,11320,29002,-27999,-26977,2931,11396,-25140,-15635,10138,12801,-14408,26498,22234,14158,-886,-14980,6377,-32212,23876,-25356,-14045,-8059,26206,-11366,-17348,28311,19006,13647,3713,-28819,-31052,1979,-26455,24519,5126,-8486,-29257,-22904,-20700,9456,19147,9245,-1963,19421,30961,4931,-16849,-4752,3392,-19326,-12359,-15899,25913,-32027,-5956,-5851,18463,24144,9430,24307,-3425,-15930,23334,-25605,19617,-28843,-13902,32091,12525,-19604,-12339,1509,11934,-27766,-19304,-5932,-25267,-31372,1226,16849,31700,4823,2707,9833,-7967,29131,-9882,31133,22239,1367,-28622,-11162,-26336,-23564,4446,25560,-1936,21037,-16112,21590,-29008,-17378,-18552,-23043,-345,30335,-24351,12495,22404,-24425,-62,5788,-7160,-28169,-23332,-24707,25870,18703,-3234,19672,-32576,-451,22428,29853,8588,-29788,13943,10934,-24686,21445,13710,28571,-4527,28132,21697,-16262,15401,-17200,-29697,-11368,-24772,-20574,-21736,-32616,-2410,-7955,14590,28720,-28947,-26475,10371,-22991,-1059,-30595,23550,18894,-5806,-24010,-2738,30761,30520,-22673,-23975,-32246,-31830,268,-20942,-30596,-7582,11522,-30293,15307,-4974,20904,-15923,-31748,14642,29940,-28497,1241,6642,10620,22607,-16359,1275,26029,14968,-26388,-20970,7317,10878,3184,28709,-23822,5763,29251,14617,28674,12910,23926,-19170,24731,28440,-9751,-16734,30107,29089,12311,-8299,-9531,-7965,28482,25510,-16997,6662,-164,-1550,-28339,21241,28961,-8557,-24955,6210,-12949,-9565,-28975,-26112,30531,-4815,27887,-16271,-3316,-13300,-12403,-25871,-5705,-3597,-11004,-28585,-20024,21729,-31595,-17476,-6802,14605,-21848,-25260,-3844,17942,-21948,7262,2185,-387,7007,31434,-28603,-20990,-29382,-32562,-7993,21941,16951,2252,17229,30868,19677,14376,-7148,17279,-11893,-14730,-24771,-20033,-285,20871,260,14266,15712,-2821,19383,23185,-20886,-11028,24839,-23479,3274,-1232,13312,-16715,-21731,17901,-21455,31401,-3720,-16274,12020,30765,-32363,117,28603,-18850,-30349,18271,5942,2062,10251,-16427,-11508,-7427,23530,17430,-8680,-28725,-17253,-11618,-24719,9842,1756,31105,-4884,32212,15016,29014,32131,28503,-1990,17831,-23452,-21946,9695,-18273,29305,32584,-10620,27606,20081,17785,-30780,18359,10201,-11382,13228,-2040,29796,947,26549,24273,-31123,-7933,-5713,14387,-14928,-6884,4733,-6580,15632,-6189,13518,-25884,23412,-13122,-1334,-7664,-8317,-9545,28191,-25343,11503,12364,-28422,-2695,7862,15036,-879,-8876,-27299,-13329,-10406,-24629,-4880,-726,-7865,31206,25588,32548,19048,-21230,-1505,-8290,2266,-2388,24170,-364,29583,14968,23092,-4655,3389,25080,25668,-1859,-20175,-13416,11151,914,-20539,-14983,-3953,22862,8030,11257,-16599,28834,-907,-12578,-10602,-10506,4152,25455,-4012,-16850,-31500,29707,9441,-12108,22927,-3415,8103,-27977,7507,20432,18735,20246,952,-8655,-23503,-16965,19595,-10716,25436,-12634,22607,-13289,29534,15468,-16319,19783,-20772,20458,-21737,-26182,8238,27347,11668,-9992,-25974,-13075,29957,29210,24802,11953,9553,-25417,-9587,21161,8065,29339,31625,-1727,-10326,-25966,-16566,-28931,-17556,-32620,2437,-28098,16397,8727,3799,15743,-25444,656,-12972,-9028,15138,13864,1560,-633,13680,19097,-29638,-412,-1850,606,-4344,32291,-21461,25949,-28302,9785,-23053,20387,10985,-14943,-28666,30197,-4475,-196,-3888,-12862,-2320,-956,-8256,-26715,4482,-24012,-13852,12238,29999,29933,23796,-7655,-2964,23874,-29425,-26338,24332,-11055,-4329,22281,-19863,20789,-20280,2293,2647,30466,-22215,-17308,-17424,-32564,27404,21810,26638,-32052,21722,-9532,29660,-5552,11507,16416,25165,-3337,17941,22758,-7787,-28098,-22083,21855,-3947,17290,-9223,9126,25180,12122,-12253,30736,-18902,-18267,-14704,-20273,-1497,-11766,2799,23238,19952,-23819,-26644,-8381,-10726,23792,9277,23743,31963,2286,29681,15465,13176,32478,11561,-19914,29059,22102,13325,31346,9745,13902,-28988,-16578,24354,-15197,-6784,14352,-26180,31861,-4414,-21871,17817,-22194,-14306,-19569,28487,-63,20859,12772,16309,25886,31162,-23398,-13679,24914,31202,2694,-31779,30159,23714,-4849,3644,2590,26038,-6268,15506,-9722,-30165,24677,18311,11880,26773,-21659,22356,-29595,14255,5281,8180,22309,3363,2683,27047,28214,-29917,-3868,15727,6354,19359,-29497,-31181,8169,29837,24572,17800,-14923,-28613,-10674,16614,-12166,28277,32008,-32419,2335,-25011,18362,25842,11812,23020,-14460,-19686,-15580,21320,31159,-27370,-11803,26268,24897,-9014,-2040,12150,-3678,-31336,-16069,31511,-8799,10497,24658,-24892,-159,16291,19648,-1266,-2413,-5191,6621,-2724,-31230,2413,-23828,-1810,30792,-9776,30773,1968,-21827,-22702,2567,-28365,31994,-7357,-21110,-4419,6061,-29814,-31991,-12414,-4305,-22644,-10833,22479,30311,13508,-32295,10376,28963,22023,-15298,-28103,-15894,-10925,21723,-8576,9245,-23529,-32590,-20107,18061,-23370,20494,-9752,6293,14653,-14804,-32364,-29319,24897,28387,7279,5817,18341,-28525,5313,7737,12408,-2561,21707,-8243,24917,-21439,25624,29267,1615,16295,2970,-7181,-8758,32593,-9452,-19062,15074,-26300,-29400,-31627,-22224,19159,17714,7411,-23043,30794,-29438,-213,-2934,20661,-17516,19072,-25974,-5887,16872,16717,-11609,27529,1188,-25442,-28843,27207,3345,4395,-16323,2482,-32529,30004,-6935,-12905,22069,9008,-31092,9799,11650,-2460,-12935,-25301,1717,19748,-8323,-2470,3016,-8689,21710,14455,-13266,-13933,12383,-24829,-19927,28957,-20096,12136,-7512,-27959,17912,-22570,-19093,27163,13737,458,-26759,19364,-17608,-22184,-21236,16538,-8649,29351,-22313,30413,-10347,-1464,-19547,-4151,-11319,-19153,-25772,-19106,-22694,19151,-1690,-21827,-29247,-7556,30499,-22925,18038,-30477,-15138,16528,-21751,-27987,-14109,32117,-1119,-24195,-7143,-9204,23376,11091,7843,-14229,-117,29773,-31448,30825,13073,-17831,3666,-10643,-4368,23047,-32714,-22052,-10151,-31592,-23148,-5378,-30839,32555,10647,5450,-21702,5695,-8552,661,-26199,-2479,15700,-2425,27732,23991,15251,-31171,14168,6481,24396,-13972,7507,-14259,6316,23248,-22244,-28009,4519,-17046,14099,-11723,28911,11924,28506,-10167,6547,12909,23215,24706,15898,-18920,25578,-12620,3466,-6251,-5968,-18877,9107,27477,-30956,30993,13388,26759,-2111,-26637,-4559,14074,7132,9351,-19528,-1004,3333,-31568,-7241,9240,18773,-4363,9105,-24063,-20791,8030,5177,-2256,-2992,-20548,14949,-21381,-21934,11604,19944,-201,-20720,-6237,-6720,-21079,-29024,-14325,7954,14824,19368,1755,-14969,-12975,23457,12677,27965,12524,5277,-12229,-19377,-30258,-25618,21395,16221,13731,-2030,-11567,-2966,-29768,-8476,7170,30521,25281,30077,-323,22127,15302,-7216,4067,-1263,-24210,8404,-15555,-19708,-25425,-26439,-30655,-21010,-20486,10870,-26977,-11970,22380,-340,11244,16071,-25531,-24796,-11402,27382,-10892,-32455,-3177,-6091,5516,-8562,24840,3746,-11886,21519,-32528,13416,-27967,-18642,23130,15283,-6914,7160,1482,26195,-6264,14100,10874,-7159,-14039,13065,4166,15970,-9976,-19797,-26532,-19195,2347,-30789,4768,-16619,31178,28860,-780,1770,-27114,23326,1467,-13257,-27153,-24666,32091,-13188,-14020,-4523,-17710,-5969,-9553,2873,27299,26274,-14770,-12778,-21858,13753,18368,-2607,-13843,-21416,-17234,-5924,24772,-30659,-25824,21622,-334,-31876,-27568,25931,-12806,-5084,11865,23056,22015,-21902,-16216,31361,25267,-8160,23435,23813,-25762,-17300,15511,18826,31809,-11727,23118,18572,4602,23780,2717,-12845,32623,25290,-4159,-19670,15717,13595,-28367,-31899,-16851,-11141,30843,-5592,142,-20468,3675,29899,27642,31340,7973,-8621,23586,-25925,24381,-25021,17849,9490,13928,-8973,23925,-7222,8743,26460,9138,9940,6588,388,-26599,-18170,5875,2147,-15690,-17504,23835,-6690,1818,-19003,21028,-27735,-32159,-31726,-26683,26611,15759,16891,14896,16731,17187,-20578,31857,-3433,-25286,7209,-1816,-4187,21188,-20969,-25102,7058,1155,-14906,21109,-31641,-24007,-5872,20718,-18290,23494,-26159,13799,31444,-13673,-6,-10532,-17536,19910,-21000,31991,-16237,-28255,-31136,9998,-32660,3054,-15335,-20601,-8820,6276,-92,2638,-10885,14492,-31802,18865,26122,-15710,-7484,-28016,14387,-14958,-20457,31057,8755,-27966,23708,-1824,10167,12351,-16045,1864,385,950,29759,7413,10638,-20147,-10835,-8915,14054,16263,219,-27039,20727,-23659,2711,31645,-17667,16658,15178,11355,15216,-16017,-8366,1063,-27651,-31405,419,-32328,-10199,17053,-20818,26738,9836,-2615,16316,-28383,31406,-21346,15319,-31632,31864,-12935,-1158,12065,-29792,10686,-6787,24301,-30763,-16113,7041,6834,2010,524,-19007,31374,-1765,-18671,-7977,-15586,-25371,16297,5605,-14002,17256,-4604,-21182,-978,2232,10915,-22291,18292,-10471,-15265,-11404,-1364,-2163,-12421,-24990,-31282,-1831,-10991,-5780,-21108,16004,-4167,-11055,20469,-23774,21338,6244,-30588,5528,15802,-31384,-31277,-27553,17274,-16394,-19832,-18812,-23057,-30239,5228,3715,-6562,-22234,19987,4720,12708,-172,15682,19286,157,29639,17616,-16133,19139,-8545,-25859,27262,-13612,23753,10974,8186,-19310,7173,-31320,10845,-13785,-20887,-2547,32320,-31768,15942,31838,-14533,18462,7882,-29947,-30167,-22159,-24358,30017,-12547,29903,-22016,-1932,-30113,24100,-6141,1078,10575,1520,-12773,-15491,-14760,-16745,4887,14640,-24848,-17680,2178,-12427,27516,1060,-8870,14961,-1228,-582,22066,4955,16907,-16072,31150,2024,-32565,-17414,15086,8879,30430,-9503,-28999,3258,-4006,-10131,-26751,19316,-29405,18416,-10529,16441,30393,15687,-15259,-21635,-3335,9943,-10018,-18542,25596,-7432,-441,18386,23992,24034,30978,13564,16424,-11450,-24949,-26259,20228,14912,-2635,-2089,-1026,25456,-4691,-22074,31844,11209,23681,-30055,-23099,-1773,10367,25353,13983,-1333,31045,-12888,-5840,-4022,26769,16123,15019,-17671,12490,4751,32014,-8734,-30595,22476,-1597,30697,-9879,-27963,-23363,8099,-17351,10871,-28024,-11970,-2818,30245,13802,15297,18444,27050,853,-26800,-11546,-7331,-32599,13601,-4193,22363,-28510,-9172,6649,654,15113,-25795,27866,-25248,16533,-21017,11760,-9859,30633,10586,14759,-11413,-19086,3267,5118,6785,4969,-2395,-17116,1962,4697,-27187,10574,-16973,-19415,-1921,11530,3813,9079,26601,-5370,25436,17277,-30849,-18168,5813,19093,30351,-8034,14779,26109,9440,18498,-27842,19486,6333,30170,30519,25384,-4006,24815,-11104,29091,30573,13828,-29183,-24487,2970,-31181,-21676,29604,19138,-13073,1961,-28200,649,10618,-12929,-11854,-20528,21900,32323,1405,-26471,7174,30206,-11660,5204,2762,-12213,17373,31612,-4600,31797,13551,-7214,-10317,-26750,14029,-27784,32651,29824,31332,561,-29426,9140,-24431,505,-29973,-18953,-4656,-12841,28944,10755,-30088,15395,-25544,-13453,27721,-17711,-22966,-28302,31664,5960,15115,-9784,19387,-17740,-24989,10263,-4129,28120,-10962,-2551,-17262,5170,-15157,-22842,4670,-23018,-22898,8917,23592,32111,-12708,-20606,14590,9619,13941,-25167,31318,15893,-10548,-30133,-17253,-24266,-14395,-8736,-14271,-28580,-29194,-19488,31541,-12749,32385,29156,13742,-29576,-28539,-18358,-26991,12072,29810,-27626,-22929,31761,13334,-26980,-32088,-18221,9853,20154,28197,-32280,-5441,-28014,-31292,22688,25270,-24189,8144,19902,-23979,-2135,4847,13278,18211,-15250,-27892,25525,-31480,-810,25833,-20167,-31071,-18433,-9997,25372,9778,-3779,-16023,-9854,13899,-19849,17641,-19780,5726,20526,31584,-6594,21570,8784,20418,-9916,23214,29415,70,25877,-13542,-4171,22391,-8990,3426,10804,-10238,24082,-12172,-6536,-30017,-31068,-1951,-29409,-22429,-16278,13831,28773,-27008,-23388,13953,3408,5552,-5018,2138,-30198,-27794,-12076,-24351,-15278,-6486,26114,-30388,1336,-2578,14004,-14141,-15589,4806,-15103,-23889,-20617,5862,3706,-30885,-12448,-30631,20110,11878,16035,16919,2876,-24245,568,-25684,5849,24549,-13191,-11996,23057,19592,749,-14227,-25913,20596,31263,-28702,30511,-18496,-4010,10130,-15738,-8287,-21619,-21544,16846,-2076,-16946,10940,16483,29452,13429,24824,22342,-12195,10942,-29849,-20816,17524,-31690,-2151,-26187,-21489,6246,-3890,-30542,-20592,1162,-21826,-1138,-10248,-16350,25922,30254,-10351,-6412,10927,-3496,14767,-24267,4576,-5362,-29627,-15098,-5214,-3402,-18676,-10692,32713,9258,3344,26136,4510,18065,-4466,-2701,-23049,-27550,-579,24690,-16806,-26148,-31998,-9657,13210,-31046,-12947,-6980,26118,-21260,18601,-2071,-15255,-1346,-27095,-18364,7883,9992,-30411,-17530,109,32213,25477,19960,-2676,-20722,30662,28428,9106,-27296,2617,2332,-32273,30725,10478,-26815,9032,29604,-23643,-3679,-1424,4180,20936,-1742,-10379,-21620,11922,-22338,23545,12719,10017,-2605,-11377,32242,-26075,-23942,21955,-8275,-31016,-8724,-979,7614,2930,-17833,21770,16266,-27920,5484,13440,29901,3009,30699,-10781,-4428,18287,-7735,-28989,13517,6630,28102,18220,26348,19503,-6836,22696,13326,-21493,23153,-3109,-197,8259,28368,4929,14378,-29527,-21906,7230,23185,21598,-16999,-6253,-13363,-18675,-13181,1939,-17245,-18240,-20015,-30716,-6633,-26937,23293,31867,7641,28123,21752,14131,1669,-15623,13698,-24216,5965,-5842,-9398,-6659,875,-30483,31004,24926,26953,-9849,-14612,22796,15784,10299,3701,-6332,12905,-14482,25215,-8047,25914,6603,-17171,-6493,-1117,5853,-24060,3453,32652,-20372,-9347,28241,-29152,8876,-22892,10990,19474,-7432,-25522,-31647,13529,-7892,-26371,-12546,3377,-393,26629,-7539,-17882,28572,21523,-19699,9492,7075,-14574,613,-1885,1869,-20892,-32713,-8977,-16263,-15745,20438,14093,-944,-24017,9787,-27099,-253,32656,4331,29047,-5027,24477,13636,-3060,-1798,28789,-3034,-5906,-30144,-13301,-10618,20304,17898,9472,18209,-8902,25713,-22123,5797,28418,-4245,-6237,2832,26986,14462,7785,9036,-23752,-3187,6724,-9014,-4037,26497,29605,-718,-1666,-22981,24930,308,6056,-31711,-10254,22122,11067,22275,-12687,2694,-3527,-13338,5303,-3811,-9874,-27105,-24882,-20523,-26087,3729,3606,1143,-1189,22159,-426,23729,-19814,-14291,32348,-11352,-19504,-18379,-1035,23988,3113,-5778,-27349,-9632,-23508,5405,606,17277,6074,-3022,8180,-27984,-11017,-1292,28629,7476,30804,8775,-15907,21080,22309,21082,2216,19142,3567,-19762,-4646,6654,-3288,-25019,27079,-3142,28388,12185,6369,27492,-24232,26194,11018,24351,10940,-16504,-8166,-6035,27889,-5144,31760,8363,4354,18288,-22248,-9581,28057,31717,32349,-30172,-14057,-19195,-7839,31440,-602,-31880,-25564,-7079,-18554,-31154,-24976,5838,3541,24853,28126,30502,30497,-14602,32258,-15114,-30611,8665,-7014,-17650,-21863,23567,20373,2154,-8537,-2329,-29146,2201,6884,19844,20989,27107,-12173,13959,-1746,-19919,-31920,23610,-31660,18305,-5401,-5031,-2397,23987,-22115,-31502,29970,9931,-10152,2728,936,29594,30034,-15180,32115,-1987,-13045,15883,-7927,-8544,30806,-25971,9265,-348,12527,-19911,9810,-8230,-13753,-23676,-14708,155,-8605,-5012,1176,2457,-16938,763,-24880,6678,24727,14657,22355,-11342,-1706,24726,-4404,-12267,19512,762,4231,-9174,11132,-26736,3758,28897,-14082,-10810,-19169,-22395,-820,-15898,18062,2387,1721,-15491,-18144,25751,-20618,-5471,27934,8914,23103,-13064,-19803,-29228,-32281,-14161,31153,29806,26156,1668,-15111,1387,-4578,-29701,-618,320,17694,11302,25037,-21267,27295,-3408,30708,-27150,11247,-5893,-6879,28113,15722,-12339,27725,-1029,-14692,-3397,-1122,12736,-18286,-3933,12049,-18820,10192,-3964,-359,-23978,187,20062,31544,-9168,27740,-14413,3470,-15686,20356,10316,11211,-1055,-21997,5167,-26198,28404,-18570,12385,18664,2418,23673,-734,-11261,-4727,-7945,-29268,-19382,3421,16364,15597,-6698,-5274,-29794,25153,-25645,29315,14718,25646,-5105,30601,13757,-26206,-17400,27997,9973,-19783,29291,21597,-10545,25254,32258,-24313,19063,21987,2474,-28104,-14187,32359,-32687,-6608,16619,-31263,-16,23059,22976,20742,-14678,3719,23832,3764,-30584,5310,-1918,16258,15593,-17168,-10602,-4021,25314,-26204,7598,14172,-5294,8468,9909,6559,12536,528,31984,19940,20464,31594,14142,26582,-13614,5918,4668,-31896,-19254,14011,-13258,-17342,-23582,28483,-11089,-29427,19533,-13144,-10105,-11025,-25564,-18218,-22222,32327,-7018,-30739,-4841,15040,-29402,-7494,7273,-17829,9322,419,17418,-10297,-24602,15997,16803,-22089,23316,-12514,-30954,-19129,30191,-27555,-14440,22521,29199,-25415,-15756,28851,21935,-27881,13080,-27801,-32154,16910,20615,24845,16031,23725,-2292,4922,9687,32456,-3777,27870,19502,9554,-8507,-1598,-26475,-9309,-13169,28673,21165,29511,3136,28542,-12910,-14421,-13400,31338,-31037,-20184,-17714,32427,-28709,-9485,-9585,20754,26597,14792,-5657,-21691,24781,-19423,-13112,21176,2900,-17128,26750,4661,-29995,-9181,31654,2182,27550,-10945,-26433,29728,25483,-31307,8057,3902,-25670,-25692,4232,-6602,28604,-17624,-29079,-14223,-21052,-20975,-32501,-32439,-14585,11222,-3984,224,15720,-27735,-32528,7480,1403,-24697,-13234,-11477,-23514,-5331,19744,-19436,-2360,-2359,12684,16013,803,-24107,-296,2379,-2650,18023,32031,-17018,-6998,1905,11307,26356,27370,21145,22822,17153,-4688,-2137,26482,-14534,9549,-9342,-29685,-9591,27462,9479,8894,18901,272,5746,-31038,25692,-26649,709,28750,-16785,20700,20374,23787,-7872,-13935,-21882,26497,-18219,-3989,1427,32488,-20744,-8793,26093,-12451,4183,26391,23970,6778,-10346,4722,14110,10294,-5116,20663,-13935,-25810,-13642,-16411,-28136,26270,-6760,-3789,13607,-30324,2515,-4836,1091,21851,-5733,-22676,-10654,15294,9901,-5957,14101,1878,-9404,28242,-11438,-21409,6,-13454,-9587,5328,-16306,31557,-16598,-27642,-32389,24775,-11494,9332,4179,-19623,-25380,20196,-28885,4453,-10006,-25337,12009,19542,21812,-11841,-3128,13502,-31620,25384,18720,-31573,32047,22814,-15711,24443,8563,23368,18595,6544,-13242,-15340,8699,2542,16860,21667,5949,-23585,12370,24601,10070,-22889,24707,25711,-26796,-14083,-16975,28111,3114,31263,18673,-11356,-23697,9022,-18700,26757,23882,6028,-21631,-16259,18100,-26843,25268,-7779,-30725,3002,2122,1491,-21955,-20681,-21945,-24828,-8557,7745,-18947,24617,11744,2322,-19525,28445,-3891,23505,13254,-7801,-6786,-24607,-6699,18844,26671,32241,15891,31339,6241,-4764,31478,-16304,-13688,-14167,18326,-24623,27456,3239,-11819,-505,14859,15219,4434,30836,31708,-23444,10477,-17246,-3781,-29851,21825,6940,31536,13735,-21594,-9345,5603,1537,-11784,26691,-18232,9015,18904,-739,8062,24052,-15400,-32073,-822,32449,11534,-11869,-26136,-31754,20235,3950,8293,-13735,8137,-2747,536,-25156,30043,2205,-3998,-9244,15696,26445,-16149,-10459,-16600,11853,9550,-6439,27328,25668,3763,-8553,-28276,9252,18223,30521,-23597,17903,-2117,-14521,-28576,-26728,-5828,-25153,13106,13498,29960,391,11512,-7445,-23028,29665,23787,758,-15723,-7280,-11027,-2523,-8294,9101,20648,23573,4209,3680,-11142,-27646,-13450,-10680,1773,18095,-31452,-3199,-28259,32408,-15973,31230,-29952,20447,25830,16918,2164,-20155,-30550,-22164,14058,29809,-17359,-29849,15043,14393,23007,-27212,-3173,14993,-13059,-23823,-13584,-2014,14838,-6282,9326,-24706,-30111,-21181,-12670,30392,-28676,30727,28547,-21032,-22678,-10679,-30944,-1112,30654,13768,14121,23703,-8546,5113,-28481,-4778,-20471,-23721,-25334,-21378,15738,15772,29143,-23168,12795,-24190,-19301,18081,7458,12439,-25990,-17590,14738,-11578,-6819,-6517,18957,-12693,-11531,-2380,19654,-26001,-27521,-4586,1004,-20394,14294,8508,31833,10577,21885,12758,-4671,-2574,-1392,24231,-31761,30822,-5514,-17349,-24802,-24014,-23353,-17083,24718,-15745,-5502,24394,28873,-30081,-30556,-14442,-3368,-29150,-14346,31389,8122,-18436,5085,2078,11161,29082,-7202,-15165,20633,-15024,-25907,-3151,-10289,27328,31030,-31191,-14012,-31042,22851,-11865,5671,-3291,9579,7821,-27724,1051,8458,29441,21965,-9162,-12026,23061,16991,-30466,-2781,-9676,-5548,-24841,-24574,-1991,25801,13861,-5255,-12811,30734,25766,-5590,-17591,-28639,879,15290,33,11092,-10932,-32217,-7700,16520,-23621,12543,12398,-17023,27910,-24059,-32474,-27789,-8073,-331,9565,-9132,22334,-8790,4045,5139,-4675,23616,20641,13403,19698,-24863,21447,1838,9518,-22437,2592,-31577,8909,-12553,-10198,28882,-6580,8174,17985,-24582,-19718,-21004,27086,6839,-4283,21176,-18358,30124,3847,17144,11493,-26799,-27341,-15976,-24383,-23423,-15893,25799,-21461,29964,-30534,6756,15206,-10998,31427,-30707,5378,-252,14393,-433,-27332,-24442,-20410,10189,1981,-5791,17109,20809,16322,-7204,7659,5073,-31650,1032,-11021,17623,-6228,5463,21442,4544,-23048,-9106,8760,32634,4852,-13119,-32508,3458,26651,15809,-28554,-30457,-16495,-21700,25336,-11202,-15142,30954,-25186,31142,23993,7052,-4417,-16629,-10839,18706,-11572,16624,-13231,-962,-20717,-9538,-22616,-26829,-21683,-20571,5342,-25826,27787,19249,12002,12966,14632,1688,-26948,13218,30183,-15497,-26594,-20934,-14542,-10243,-28822,17752,4567,-922,-27595,-5312,10580,-29376,13097,23958,-10942,5223,26597,-32124,25537,-17137,-19696,-26085,-6173,7002,25125,30967,-27849,27694,-8833,-17830,-26756,-26109,18582,12816,-28273,18067,-12996,11389,19775,12323,30740,25224,28219,20746,7171,27220,26841,20213,16969,12693,10380,-11246,-20686,729,-24709,-9329,10002,-26383,-4893,19418,-16371,22903,-4089,-20514,5275,30614,-31528,-22040,-4058,15999,15746,-10685,-10909,18393,848,16129,397,-10595,9745,29629,-14919,5081,7462,-6512,-16199,11983,3952,5764,17334,28224,23242,-8350,27491,-401,6843,-18442,-19221,-18219,-18550,909,12651,-12808,-31654,-31228,16215,31988,-9049,-29823,-14997,27669,45,-24075,9281,-20608,-10216,-13190,-7974,-13835,19766,-15516,-3338,-21426,-8853,3017,4895,31180,-30513,11653,-10149,-5340,-7804,-19918,1721,-11949,-1643,-17550,32602,25259,15814,-14909,-10265,31572,19503,-27206,-2231,5862,-5184,-16797,-29456,6429,-5015,11019,-24543,-2114,-2373,-16647,1921,-14853,22505,-19805,-28190,-23849,-11949,3967,-27225,-6339,28263,-25269,-14955,-4152,16389,1662,8322,22671,19304,-30882,2855,4239,4216,-9223,29765,-4361,-24231,-21415,-19050,23117,18319,-10127,31557,14416,6191,2942,-5237,22123,-2565,6019,7249,-31065,-30114,-8284,-506,-14346,28247,12192,26699,-30762,-26573,-14078,-4226,22977,-7526,4221,9951,562,16935,31746,9463,11710,-32222,-25420,-4086,-13013,31034,1186,-8806,276,-237,-13432,-19179,19518,11857,26022,8285,-10762,-22535,-10737,7218,-28918,12202,2451,31812,-23305,19950,23524,-7934,13296,27850,-26787,11519,11187,20820,26919,15943,-15985,2317,-31588,-2066,-8733,-3589,-20510,11118,-32724,26788,21774,19433,-11428,31173,-28039,18387,-19195,8787,-7320,22861,-17558,-7334,22936,28232,14493,-22233,-18557,-3716,645,-15617,-24464,-21875,-24083,-32661,-4847,27415,-9548,-14313,-24156,17896,22057,-13096,-20068,-10989,-24873,23400,-26030,-25727,11530,-21113,-1237,2572,28648,-12024,30098,4130,9799,-21757,-19130,20036,-3031,-24800,-1832,-644,-14620,5625,14590,20537,16206,536,27833,-16498,16182,22767,12733,-6402,-20001,-3793,-15433,-6865,-6928,18661,-13296,-32357,5093,-27911,-8908,-4320,9876,-19968,-10720,32673,17581,29748,31348,22481,-31178,-6000,4273,-32026,31857,4434,20933,-1365,-29838,-32246,18954,-32038,22799,2563,-9878,-21258,20112,-18984,-10174,-20998,9944,921,12424,16203,-27048,-12871,-7269,20370,26423,6130,11174,27242,253,-31509,6810,21482,23177,26803,14386,-9176,2300,4431,6977,23089,-1671,19504,29731,32168,-15227,3216,-14372,572,22609,3710,-13418,30985,-30562,10933,-14316,-9551,-11856,-8915,22401,-14333,20490,30580,-16225,10634,7015,-28032,29551,-20280,-14782,-8251,-1409,9061,27700,27810,28707,-26697,26424,-21058,8729,-9856,-23075,1935,-14441,-2357,-22842,-19193,-22558,-19654,14868,-15044,-25931,-15512,-9347,22349,-27109,-3964,-6578,17961,-14317,17220,6939,-28564,-12226,-10942,31329,23748,-26085,-23118,4442,-27150,-9321,1049,-1163,11233,17746,-4973,19815,-1833,19167,-18767,8946,-24059,4116,-16480,-1289,-31948,8058,-32212,5956,-19479,23004,-30700,-9828,-16062,392,-17144,-31723,-22369,-14821,27283,9028,16607,-18837,8912,22282,23128,7083,17175,15332,7751,-14036,6026,-3044,22824,4572,-25128,10441,16963,-30289,29422,31355,-14465,-12896,-13978,-11576,501,-2857,-25292,23698,-30756,1312,31801,27099,-19886,-28683,1238,-13484,-1127,-31530,-30532,7564,15676,-31,28319,11521,26583,-26901,-2525,-550,30044,-5118,30650,-31659,16331,-16638,-4387,-27608,2010,-15037,20294,-22173,-7850,-32138,27548,-27883,30058,-27520,16930,-19637,-15716,31322,-16470,19621,21263,19940,-4601,-8787,-2969,-8668,-8296,28447,21019,-26830,20202,30131,-25254,-6624,18543,-25602,16144,-31666,11452,-10849,-3016,-22271,25152,-8510,29724,-18890,-8858,-27158,-19449,30411,32667,-15233,28025,18416,-21949,-4903,20246,-15935,11803,-8626,10117,28863,14939,28033,-958,15513,-5658,11759,19206,-24787,-6888,31857,-24432,24901,-14320,-2834,-2752,30810,-12952,-19007,25049,1897,-16188,-25885,-12217,-764,7685,17546,28295,7443,28679,23897,-22119,-19328,16345,-7451,-15138,-17816,-20313,6466,-19409,-21629,-1071,-11887,-12552,-9480,-3739,-20011,-16220,15799,16172,-12187,15528,-6384,-6731,32321,32502,14091,3177,-12621,26423,21057,27420,-15957,-7344,-4173,20354,-30127,4303,7594,-21445,5551,29739,28528,-2578,25905,-3718,20808,7993,17955,-20940,4944,32552,4318,18363,-14154,27361,-20614,26517,-24050,24844,19553,13383,-32530,24983,-28765,-29315,-20000,-30422,24407,20130,31663,-3876,8269,-712,9064,17992,12091,7505,24459,12690,22485,27150,27804,25289,11191,22467,19426,3178,153,30646,-27550,28762,246,-6541,13716,-29534,-10588,21839,25715,-20846,-4101,23251,9047,24766,-6096,-22330,10105,4022,24983,25358,-16640,21623,472,-1299,22995,25500,-29321,3687,-10379,-8540,-24156,25760,-30818,-197,27761,-18239,3333,-16069,18001,8686,8392,-11944,845,18511,22733,-23819,16525,-14223,14167,-23169,31256,-21051,21680,24133,13619,-6838,-20117,-30659,-32261,-26103,14478,-12085,28852,-17268,-5358,12671,17389,26963,21261,-11483,-19075,-5285,-5536,19063,23301,26127,-821,9656,3666,2793,-1275,-31091,-10663,-32603,-31249,24250,437,28971,22200,4350,-1437,6426,23684,18114,-23376,28652,-19521,27509,21751,-29732,639,-8664,28486,13479,-8965,-31386,8161,-4544,-2518,15586,-20131,-25065,-10603,19333,2827,-22737,-7178,-13130,19424,-31430,-10141,-7213,-21512,-12674,30483,13664,-4150,-22292,-19178,-16732,8458,-6319,-21375,-28067,-6644,-17401,-9871,8394,16443,-3041,9764,24545,8228,4441,-30486,23191,31310,-19839,-7140,-2194,-26644,-11072,-15797,10065,-4759,-16871,24887,-2495,-10118,-23234,26914,-25413,28471,-29008,30276,31774,-27980,14659,-8521,-8202,-7660,28349,-12493,-9568,21136,16685,-18624,29466,3313,29154,11238,-28035,18809,13581,4964,800,13659,-8040,-31433,-13341,23883,25204,-25483,-22251,23928,-3175,2882,29839,12565,-2251,2363,18627,5642,1078,-12651,6249,-18968,-15809,-11118,-4282,406,14793,6401,-12326,-26106,-1090,31051,-21908,-8064,8073,-21299,12308,7952,-32304,-11985,23596,-7649,2188,-9258,-16566,-20896,15986,-30330,26550,-9537,-9011,22839,-28961,24627,-9759,-4539,-8697,11741,17476,-20401,26814,2733,18328,-26425,9705,25767,14460,-20532,-17164,2591,-1360,21934,5505,-10024,2322,-25923,1348,9565,117,-17528,-22223,9766,-27163,-25761,8151,-7648,-2536,13515,6575,786,-29227,19658,20802,-12233,-2347,-6624,27763,-31065,-3703,25420,6252,-12227,-29937,16248,22142,-3319,369,22017,11520,5158,25850,-13446,-15926,5792,-31073,-24389,17810,-26782,-16965,-19696,1673,2633,-11590,9434,6987,24976,309,30785,-21694,-29039,26634,6190,7823,-4976,23901,-19681,-14246,2173,30402,-23498,24316,-21046,8459,-27382,18897,15350,8791,-6095,-22907,-856,2523,-28635,-26260,25557,27825,-18123,24758,23989,-22013,-22399,21666,8820,10662,171,-10359,22405,-7519,-13238,1463,-22278,-30946,6539,-12989,7707,24191,-7131,23007,-16778,-13845,-29219,20824,-9107,-20143,-27293,-25247,15696,22606,-5231,3850,9823,-2735,25662,-29773,26482,-32213,14669,19694,8037,-11043,-6861,18969,24101,-30322,-17149,-5286,14123,3187,378,10447,-17853,-31022,23248,7086,-29173,-19979,-4995,-18527,1256,11462,2558,-4957,-7263,-32117,16072,10070,-25454,15312,-6543,25429,-11864,-6891,-9583,10272,16346,-25170,13385,15606,14240,19101,15758,-15853,15982,-10777,-20237,27840,3290,-4286,2612,-207,21511,10249,17754,9910,17155,2137,4739,10228,-6474,-9157,29703,-10986,8700,-14125,-10015,14991,-12241,-16263,14241,-6790,20060,14052,9377,17825,30677,18901,27376,-29869,3695,-6804,-30790,-18280,-9569,23744,14853,-11911,-30240,26506,-29244,-29019,-24599,10078,14256,14925,10968,4511,21790,-25252,23980,-25852,-604,-20060,3723,1113,-32564,-23286,-7338,9701,22727,-21034,-26807,-17762,13058,4915,-73,31956,16391,-32117,16527,-30538,27107,27854,-17359,11843,-22691,-10102,10297,30787,-2258,-25466,-7119,18047,-16810,-13903,-30388,17124,13709,14653,20865,-10663,9531,-13031,8641,29991,8996,-28256,-12583,31171,-21115,-30948,-31345,-6977,20186,-7614,-2190,-19366,13358,-26971,-7676,-10584,-14453,29160,5142,13900,-11307,-14223,24160,-158,23470,31864,-2981,11300,5935,-29631,26142,408,30903,-28111,6353,26907,-19065,6812,-15591,21341,598,822,-22716,-18496,-2064,-1930,15226,7429,-22970,-5328,10133,-15121,24718,26898,10326,11863,21569,32573,-673,-8218,16224,14764,11873,-5697,30941,23710,24656,20434,-8149,32343,26083,30743,-1239,6937,-27805,9416,17272,23185,27147,32421,23380,28717,-29315,15900,-24945,6990,10265,31978,-444,17207,-9421,-16020,16412,22810,350,-23821,-17627,-8730,27786,13043,20137,-5421,3378,-4583,17647,-23403,-15023,-28386,29288,-25544,18683,-31763,-1635,-20054,-4025,4888,-7910,5068,13702,27390,-12412,-12603,120,-16775,9157,16703,11438,29663,-6464,-9332,-29779,-13351,27385,-1593,29007,2272,27580,-24754,-27639,866,21460,24571,11774,26779,-23850,-30763,20214,6090,-16723,26148,9769,-5210,-25888,-12206,-7490,3553,-13768,-6275,-2915,-1888,-4805,-22166,-15139,-22802,-15469,1005,10195,-23439,-22091,30947,4153,-15180,30836,13723,-10040,21423,-9615,-19848,11326,-8578,-8182,-17071,8984,11590,-23254,-22313,-29629,32182,-2813,11337,25364,-17620,30657,-25445,14650,3878,-12200,-17558,-14005,18994,-11290,-274,6124,-9880,2911,-13054,23640,-26387,31855,9184,-20537,-14325,31140,31620,-1565,-1347,12774,-6290,-3959,32699,-27543,2194,18798,25594,-21409,-10895,16162,4052,30247,-15287,-2931,-2053,16412,20724,-11674,-18476,-12048,16917,-4129,-4097,16946,-10644,-15026,20577,-4303,1381,7625,-3095,1103,-31467,-7392,-1577,-20966,-8382,16142,-9688,2868,-13535,-7989,-28078,2817,-676,29760,4494,10637,-18775,3403,8829,19314,5522,5347,29620,99,-7510,19366,-22466,26854,32486,-443,-7418,-11743,-26316,3846,24778,2198,6518,-22808,-24963,31799,16595,10411,20454,-13958,8189,-1988,15181,14516,6792,10053,22600,2082,18316,-19862,-8853,15658,28043,-24111,-19530,-14386,-7355 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice2_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice2_golden_int16.csv new file mode 100644 index 0000000..dc29f0e --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice2_golden_int16.csv @@ -0,0 +1 @@ +-20782,-22449,-15984,245,27464,-32660,26939,-13220,-4753,17670,2648,-19735,3648,29684,31878,-22550,-10332,-25981,-7459,30209,29116,15537,-7600,-7211,5067,-26101,-21455,-29883,-23995,9873,-8124,-19433,-24910,-9723,25372,11509,-28099,-9147,-26285,8464,-13082,-11248,14726,-31175,1398,-12471,7200,19362,26453,2115,20499,-31268,8199,12660,26609,-4530,-11321,9273,26028,23234,6989,-16352,8577,19173,-13828,-10926,2639,-7207,22073,1894,-13748,-28139,-25463,12687,-790,-24139,17557,22791,20454,689,8472,-16875,-27996,14836,-5967,-28903,253,-6858,-8645,13638,-2551,3970,10733,8501,12650,30429,11661,4196,-22928,-2636,13225,-23874,11745,5864,-13875,-21623,1313,28485,28296,1604,31562,-21211,-31045,-26520,-24061,15827,-7639,25126,-535,-17684,-20518,17227,-6731,-3521,-15899,21860,598,4843,22025,-2629,20160,21108,-32538,-31665,21741,322,25317,12392,20594,2473,16099,30198,-30390,6150,14907,-18752,-15051,-23486,9691,14986,17691,-21828,-25628,-30813,-17435,4576,11667,21017,9056,-1162,-11300,-10719,10718,-11095,-30985,1357,-31354,19535,-27906,5372,25359,-5758,26694,16310,15697,-12197,-19949,-25343,12158,-841,-28966,-1725,-11331,28338,-10408,-31670,22514,14475,-7193,-23996,-4752,-2305,-5901,29674,-25661,9982,14172,-28114,32023,27278,-18147,-8251,-16929,-3435,24704,-21992,-8566,-26348,11767,31675,-25724,-32485,-3030,19201,11265,5543,5393,7897,-10657,-11771,11609,-7692,-30560,-28076,9236,1012,-17015,-14301,6485,3102,14263,3234,16451,-31899,23888,-3562,-14235,28793,7936,23119,9990,-11169,28415,-20578,-16160,14457,-30162,-13580,-19722,31721,29387,-23114,-24562,26663,7189,-9751,6531,-22373,24815,1258,-14076,-3762,19692,-4657,-27151,-28056,-16471,-22214,-7249,-11195,-26652,14309,-16647,-682,4488,-9946,-6817,7463,24930,14501,-24893,19601,-11652,3265,8529,-13272,31899,961,-28537,-8354,-26918,12789,31535,-14025,-18537,32627,14270,-17168,-16838,-3183,2108,19535,8888,24509,28011,-15845,-23086,-5533,-3101,17862,-12021,25594,-32108,32250,-920,-30144,-9226,-25372,28539,20710,13559,11864,22099,3987,1870,-32602,-7780,3900,16939,3433,-27369,4908,-22394,-15004,21515,-9699,-27185,-21674,21066,-350,-30514,30833,18426,30428,8448,24688,-31641,-20292,3853,-10671,12934,31184,-13503,3605,17681,-26254,-3678,-8597,8689,-11726,-6285,9417,28069,-26180,-31272,26522,-1436,-25934,9824,-7015,-20051,15420,-12874,16097,-31732,4361,-18617,-22384,8224,30193,19004,8371,-24727,22290,27260,-1788,-8606,4948,-12883,19736,-27851,-24940,-16107,-9770,29875,-28243,-7610,-16506,-5716,9652,-943,24694,28045,14406,-28752,5089,-23504,-14867,13230,11673,30367,-29388,17127,-18287,-14075,-5596,6798,-25645,-21838,16809,7228,24084,-10567,-21321,-16253,-21337,17934,-8406,8293,-7180,-23401,25351,6126,-32146,10263,-9751,-20674,-19646,-26478,14167,11907,-3253,30534,-19670,-13673,32181,25468,14921,-7703,-32497,-2179,17775,-27770,6978,-6574,-32154,7538,-822,28688,-27428,-2862,-25335,-22756,-25046,-12584,29987,3794,-6140,-27222,-7406,-13276,28452,26189,-29840,27025,1530,24721,633,-15672,-30433,-3124,30434,7648,4613,-31391,31276,-11391,-16837,-13955,-23877,11541,14004,20083,-17205,2391,-4106,17064,-30370,3371,21821,26570,1558,4578,-30101,9956,14113,-13049,-7426,-14284,24990,-30792,-17938,-29246,18912,-16418,-18528,-4278,26195,-19381,13039,-26705,-28869,-7693,-30143,4540,-3290,-21921,12017,24077,29928,-18351,-230,17550,-14867,31431,7918,13095,-31947,-23539,27213,16494,-21444,-25649,24462,-24257,25313,2957,12049,-3354,-30303,-10033,6952,-11414,6627,-26549,1147,-17103,-24908,11883,-29282,3792,30570,3599,5506,-14708,-6191,3736,23398,-17417,11728,29005,22649,14204,13768,3548,-27310,31183,26360,21621,-18139,-32283,28732,2162,-4042,-7682,-4585,-28036,8598,11527,-24979,-14706,-7543,-16203,9221,-18865,-12219,10008,-28686,-27899,31096,-1247,-20174,1210,-24025,-13960,20866,-9070,24682,22515,-31522,-23224,-1256,-25064,12557,4087,-1964,25873,-16381,23494,-20451,28532,-10139,25338,1605,-24256,17614,30387,-6925,-18604,23435,-8538,-24724,9654,6860,14422,-26817,-13665,-20018,32588,19992,8297,-17617,-27948,24855,28122,13155,-6993,-20194,-4977,-18889,10827,5950,-13816,-26636,-15157,26778,30250,-26233,-27120,29870,5309,26284,7204,5722,-19417,29168,-10847,28925,-10549,-24433,-18430,2662,9463,1522,7028,-3383,-112,-26565,3173,3249,18305,783,-3918,-19914,10606,-17738,15779,30608,32231,-30204,31989,6596,22920,-17991,-20408,-18631,22078,3476,-17885,18083,16798,-2445,5556,-18060,-16961,-30350,15592,27986,25762,14746,12626,-5347,-16768,29235,14000,18013,-15734,-11942,-12265,17509,-30269,6023,-19689,12682,-6163,-22931,28093,16416,9153,27950,2255,-15053,22622,20661,-10360,26480,26332,-4919,-30839,291,9205,-10805,960,28887,23234,-31023,437,-115,10817,5186,-3352,13491,-19067,20319,-8330,-9511,-21070,23204,6370,-5572,-30960,7726,2218,22297,31025,-6208,-18062,-15988,-27482,-395,-18856,-6600,20135,21694,29928,-24496,24109,-21697,18765,22286,-6955,-7839,-21615,12688,-32553,-27072,-9188,21482,-28709,-13795,-15764,27488,-17522,16256,-23217,7693,26435,-838,-926,23973,29895,9203,12977,-28809,27231,-10429,-23601,12173,-23085,-9065,32606,30078,-15818,4194,21433,3703,-7441,-629,20074,18800,19999,26493,-32574,12006,2426,31868,-3534,-17807,-26081,-12710,-21186,4923,2260,18415,-9691,-9776,30809,26249,7571,-21926,15855,17339,-926,20775,30654,12279,-29544,-18036,10570,2368,-32207,20692,31404,-10498,23783,27296,3435,-23728,-14160,-16147,2928,11618,20516,-20883,-18680,-14106,-31837,-1165,-22677,32083,30418,12100,-24724,13386,-3030,27653,15093,18499,-27526,10617,-14928,31384,26798,-30301,11522,-4341,-21067,-15173,30071,2723,-9284,-14654,-28593,2480,-14291,-32392,19842,-24438,29408,14884,3509,16747,-12893,25431,-15243,27297,2802,16844,7084,19030,-24671,17980,-12762,-21931,-23938,-14626,5401,22907,-26323,31214,-10940,-20668,27890,-31671,1807,-4586,-2436,-18433,12527,-11915,29085,30449,18656,29346,30103,-16332,-28291,856,4872,-6374,-10586,2480,-32026,-25014,-29728,10897,-27639,19620,-9969,22237,29725,10997,-6672,6695,-12006,24823,29392,27579,29242,-10562,23055,3214,-19900,16873,8647,-9814,27437,2195,2950,2494,28595,-2691,7382,-4077,-10303,-25487,31263,-18858,16198,-26633,-7439,984,-25583,25303,-8872,26790,-10198,14012,9208,-27752,-9413,25433,14082,-1592,12593,7563,32624,8301,-27964,12246,30332,-14181,21565,18505,-20091,1060,-17004,27561,20997,18683,19273,30757,28015,-11426,20006,21217,-31721,15623,-11423,30997,19690,-4966,23645,-9329,19847,2219,24772,-12411,19879,10091,26184,28368,-10717,16866,213,20451,-26815,4336,-25809,-13831,21111,-25977,10945,-18986,-31012,22360,-3462,-28005,22056,-20905,-18755,23287,20082,2221,25060,15220,5438,-21102,22744,10066,-16979,8390,-17741,-24018,-19968,12890,-6512,702,-26993,-17582,-1445,22513,-18745,29329,25283,3081,2693,21422,-22063,12246,-13237,-21225,-23503,9785,-966,9500,-4413,23864,-23011,3376,912,18917,20178,-450,25867,-18011,18473,-4989,22935,2932,-29681,28315,-1140,-22700,-11824,-23355,12186,27958,21175,-20227,23868,29148,28101,-20578,13082,24741,-31096,-31133,-15142,-3662,-28652,-15661,-1706,14331,30815,-11143,25675,-13544,16003,14312,-19391,-300,-3668,-4693,9944,-7719,-1043,-23485,2096,326,-4542,-11324,-27269,-17581,-7755,4176,29729,16199,17668,8213,-19684,-30906,-870,-16318,7732,12239,-79,20341,-12891,-13319,-14065,-26372,15320,-31099,-25471,-29687,-24606,8521,-4596,2089,-2039,-9113,26871,17176,22509,25071,12198,16157,-6192,-5775,-8449,8333,-6715,28864,-18297,-26117,-23546,21134,-22587,-21970,2371,15866,-31876,14761,1404,-23563,14566,8788,19289,22885,-4004,6448,30010,-16415,-28175,-21238,-17961,-23178,-7605,-24466,-2127,14192,9194,-5475,-19518,14486,14283,939,-6283,-10806,5334,-18552,-17349,-21588,-4412,-12532,3249,-4766,-5169,-28406,16335,-28803,-23284,-7707,-4603,6120,4305,-1977,16648,-12506,26936,-6736,11662,-18190,16250,-19492,-10333,-11632,-30553,6865,-27440,25205,-19661,-20296,-27544,27188,-9207,-9645,-9904,7715,25101,-7334,25509,25415,-31629,-3471,16299,-13516,-4643,10158,-29750,-6231,6674,-3977,21081,13696,19840,111,-25868,-28384,-11878,20662,-21139,26862,5550,20849,19581,1165,-8446,-6023,-19612,-15564,22437,7953,-27347,-20677,3147,623,1508,-21461,-14500,3161,27147,-6671,30308,26271,27654,24541,1600,17257,-173,10180,-9352,-12646,13324,-8367,7650,4290,-12642,-1970,12234,-23336,-29780,-21822,9809,-23303,-31089,20184,-14223,-18983,-4418,14464,-27231,-21132,25079,14964,12676,10202,26332,13846,20993,30418,-31270,6681,-31103,20751,22733,3731,26348,-21267,26138,31376,26395,-19185,-4972,-24584,-4880,-12230,9356,-25566,8872,22637,912,16359,10222,18049,9707,15218,-18243,14464,3682,27116,28768,29954,18680,-22406,-11147,4811,15161,15619,23875,-28068,19146,-11490,-30103,27921,27558,28292,2436,-21011,31902,-29209,-15652,-14297,-7516,15947,-18968,26907,-9124,4444,-14928,8315,25417,6382,29454,30913,32667,-21726,15536,23476,-27929,44,11710,-31325,-16236,7495,-18885,17867,13714,9385,13141,-22054,9654,24061,22111,-2995,-30011,2890,-8770,-2898,-22101,17204,-30370,27700,-26985,29357,-6983,29646,-5887,-28461,1542,3025,103,11594,10943,-13564,18862,-4218,-13489,-15227,-3200,-27515,25713,22200,10180,27757,-31231,-2754,-9692,-28589,-3440,28751,30771,-2231,11409,-7715,-31129,25618,-22051,13635,9779,-30622,-1951,-8655,-8553,-10516,9783,3099,-21323,-24747,29129,23835,-3066,-9701,-6599,12081,1807,-3983,3207,-23400,-6714,-23500,-17695,27877,-4468,30064,12566,-29754,13997,9616,-14014,-10109,-12791,-13157,26177,-25976,9665,-9265,1035,16849,22992,24574,12916,-32466,9825,5257,-5974,24148,-8238,25160,-8076,22445,13399,-24036,21872,-18743,-26800,-25307,18647,22503,-27426,-14255,-26968,-9021,17541,-31317,5510,-19800,-24961,18922,-18414,-32564,13688,-6523,22334,-8761,1820,23933,4260,-14447,5524,20874,-18612,-29553,-19323,-25708,-19546,8470,23114,18013,-11575,-16209,30800,-6660,20669,-19619,-21540,15647,-24762,3513,28159,-26794,3177,-11777,5322,-7605,24766,-19769,133,5036,21536,24200,19605,-11387,-25068,11847,29012,20975,-24893,25867,-15514,24660,-3537,-4979,7557,14570,10497,-2950,29725,21369,31875,24151,10716,14883,27813,13181,-18867,-7237,438,17491,2306,-28960,-7873,16386,-22263,30028,12048,19169,-11424,9399,11446,-5500,-23495,-18117,29456,-24904,-20666,-30211,-29347,11858,-29763,-22739,-11586,-348,15001,31460,14332,-11231,8327,25119,-32462,-23861,16246,24969,11931,25112,30110,31644,6937,-13989,-15556,-9298,-27241,-29749,7821,21478,-22454,-22880,-22278,-14713,3891,2266,1335,30569,16743,2458,-3045,-31218,17589,-13726,-10510,-19942,9662,-5047,-18353,-19973,-15571,27775,-14562,-19570,3994,11570,-21458,12114,-12366,-7417,17312,-12556,-8572,-22794,2927,-9618,7370,-28458,28826,-18616,17706,-6901,-27162,28711,15000,-16948,6746,16515,-28736,17274,-29379,27271,32695,18243,-8544,-3343,-16982,-2195,-12026,1679,13563,-19228,7804,5962,21653,-28398,15759,25083,10960,-6689,11665,-14703,-12825,-10056,2670,-30205,1990,-9482,10971,27927,2316,7487,-22147,-7748,8288,13230,24271,19604,-18795,-27362,-5804,-1428,-2725,11573,25949,-16053,29262,27120,6553,4557,685,6124,-13196,5774,28154,2624,-28132,27045,-24393,-13166,-24153,-18176,-11099,30769,8787,494,-11513,-17463,6037,13513,-12931,-5718,28157,389,-21119,11288,-20840,-372,-32245,29774,-31780,-31936,-29284,-23897,-5887,29121,18936,24113,708,2044,-20547,-13682,-29841,-13387,18339,-32346,23355,-17784,-20181,11627,-31385,3533,-13585,-18486,6195,-12860,-16958,7612,-11035,-2438,-23315,-73,22730,-30706,-30259,20446,-13679,-32304,10107,6358,13518,-402,-5806,-23541,19643,30214,13602,-8098,22377,12010,12261,-29986,31800,-11460,11134,10876,29358,-4979,-14811,-17510,16770,-25696,15620,-5202,8349,-31781,-10755,32457,-18389,-16483,3699,23833,13243,-26492,19380,-17832,24255,-626,5376,-3790,28898,-19923,25523,-3059,-17364,-16784,-20163,-4597,812,-30603,-30966,-14893,30730,-12782,-10591,17868,-2085,13536,-2785,27250,22494,-5775,-10504,25111,-24314,-20464,-15812,3574,-4425,26898,10724,4174,28150,15561,-18229,-6161,-8242,-8143,-16449,16652,-21926,30494,-2867,-19447,21675,10503,-12095,-21539,25117,28586,24715,-19556,-9034,31859,4297,-2265,31022,5041,-22449,11776,30687,27726,-8705,-27600,3634,-24440,14934,22131,-32694,22269,29968,-17811,-32255,-23749,-15881,5790,-11091,-22367,-8509,2320,27218,-27602,-2218,17608,32625,-20835,11452,-2388,-30477,-28723,-18931,15237,-9885,26136,-21753,18840,13991,-3367,29091,28426,26185,-328,30925,-23085,26443,-13507,1639,-8240,-23454,-2848,30444,-12848,-20090,3829,-26285,11650,13639,-5793,19058,-17312,-29108,-32648,-7802,6991,-20806,-24905,-32174,1748,27761,19396,-14364,22399,1505,-25285,7299,13732,19434,3997,-30395,-27086,-22977,-25740,-5450,-22364,4246,30833,14516,3717,-3359,-20388,15011,26527,27790,13440,5063,-31313,-32308,26993,11177,-3822,-22321,-16516,21065,-32082,15875,1470,-9825,5421,22240,28182,-17274,901,-18173,-826,12177,-16825,-11896,19965,4551,16324,-4135,-15099,18971,13679,-32166,-24312,5716,-17629,11513,-9866,18139,-31940,-25308,18946,-24813,-10322,8515,27834,29034,-24273,27504,18108,9009,-21122,22307,9190,-28915,24443,26707,-5373,20553,-29928,6826,-18389,28217,-28540,5990,-18870,-22246,-31047,21991,30915,-30312,-31307,13495,-2129,15762,24765,-29556,-28667,-11456,-4995,-6440,23318,-2663,9530,-15661,-14551,29434,-20830,5417,-17339,-18934,29063,-16151,-177,-20304,16246,24349,-26053,-536,-1994,-21298,-1819,2588,17222,-28051,-6712,21263,-603,-22819,21709,32234,-19231,-5532,12797,-22189,13738,-21610,17854,-20268,-13152,1711,21776,-23989,32665,9097,11428,19735,-26018,-17822,5800,30928,24489,10673,-21765,-15393,23747,-113,8134,-15178,5193,-13406,-16973,16262,-11297,-14267,6407,20523,-31404,-22971,12536,-26415,20278,25352,20541,-5442,12359,-27851,29940,1573,-7370,27214,20278,-3163,-19888,23727,-30485,21726,9053,-14813,-23599,-27675,-24032,-11012,27766,-15943,11502,19904,-18055,25135,-2189,-29470,24086,-14009,23115,-12320,13440,17255,-31863,-19637,-4453,20998,22763,9710,-16200,29832,-6547,-18441,1410,30789,-16102,-31227,-12072,-15450,-28518,-6248,-9774,-7485,17883,-11473,-18332,-19597,18971,26908,4842,11236,-2070,-27996,-30543,-21017,1166,-26559,16260,-26667,31206,12668,7409,26984,-6621,-23749,28552,-15808,28337,18463,-23494,25334,5420,-20070,10807,-11437,12714,19987,-14371,-21749,-32155,-197,13585,7657,29122,27449,30959,14010,23674,-9564,-22034,9548,26746,21175,8142,218,-19936,10329,15339,-19830,9254,-23689,8038,20679,-28301,4102,24135,21868,-8812,-17993,-7515,6912,9027,-28211,20168,6012,12001,8213,10986,-20585,22947,-25650,19222,-13071,-32428,-26876,-13801,-17581,-6642,-14110,-18092,-11666,29065,-26390,16816,-31092,9485,-31369,18216,-13755,-26063,-15003,8366,28360,-32627,31404,18513,2945,-11367,8634,21967,-13109,-470,28804,21471,-29505,-29051,12913,21847,2987,31046,-27480,6497,-19053,22224,12947,-32485,-14542,-1356,-28862,23062,2184,-10337,-20676,-12284,-9974,-1353,-6239,-12493,1546,25402,-2620,-1237,19135,22073,28773,-30708,-11426,-23561,16320,-32747,-25195,-28442,7301,4583,-4881,-19845,12513,24374,25931,-22426,-32182,12091,-21462,-13298,14122,-23065,31501,-21569,-13590,-17694,-3748,31881,31805,-17738,5669,-10436,11738,-13500,1558,23703,25416,-6039,23580,7096,32190,7364,18834,4025,-20935,12916,4509,-25984,-3397,-7104,-15356,-2444,3539,-24664,11488,-14313,24572,-29198,20933,15161,26493,14116,-3059,29623,-6204,-31357,-25041,-19429,10285,-24127,27211,27671,32753,29540,5298,-3712,-18454,-28391,12696,6207,-25991,-9640,26706,14825,7241,27275,2614,16752,6529,-23283,-20322,-6479,12734,-15801,13000,-16061,-30125,-3754,4572,7765,-9094,-12559,14047,18465,16915,-1966,-28136,-10962,-18912,-25330,19616,13537,12806,19808,-13166,-12553,21483,3200,23587,27069,-29426,-3136,18320,-8525,17303,-14083,1260,24400,25147,4846,-19446,4939,30363,18831,8905,-25429,-15120,-28935,15065,28343,30247,-25512,-17788,17002,-12742,-2457,22535,14180,-20088,-11348,-25564,-25242,-18275,3565,20629,-6566,-24762,-27497,25300,32065,-16001,-25891,-4486,-8658,-4597,-15372,24487,30085,26387,32562,26910,10744,-19348,-22650,2983,2595,-20364,-16765,-5645,31073,17542,23390,15826,17552,-18198,-13449,25502,-30482,4334,20434,-15968,-14941,23037,17664,-32022,29945,20450,25255,2086,11598,-9326,10759,-9786,-15800,-24080,25774,-13170,-23915,-14476,31723,19333,-16270,-29444,7863,-31624,8845,12243,-16670,-7261,20816,-31882,30548,29752,10687,-13180,-12480 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice2_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice2_input0_int16.csv new file mode 100644 index 0000000..8b3591e --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice2_input0_int16.csv @@ -0,0 +1 @@ +25306,-31090,-17265,8918,30847,-15070,-32112,-4541,22588,28282,-13933,-23508,-31985,-29329,613,-23329,26717,-18698,-6986,-1502,-6323,31187,25837,27327,25730,-2600,22655,-12153,-20258,10952,5537,5734,19110,-32288,14614,27953,20351,-4779,-10489,9137,-14973,14020,-28949,25621,-19797,-11884,17428,-27342,30702,15755,32705,6092,12061,-3650,-25303,-19892,8042,-25267,165,-6136,-22760,-18782,20382,17015,705,-362,-17071,29677,27829,-5650,-7370,22120,-4990,-23524,-6847,308,-22830,-24353,-1779,-15819,3881,1815,-5280,8197,-4737,-680,17457,20885,-5669,-24393,10402,-10303,-6520,-14712,-18901,15416,-30016,-24423,-1470,-7603,17270,-23835,-2695,31262,-4489,-21590,23858,25011,-16896,-4095,26019,-12852,20745,4739,17870,24110,-25480,-4012,27285,-15366,-17121,800,11384,-1114,22932,10850,-30408,-24292,-14973,-825,-17971,21914,-2844,30601,10560,5147,-26010,17705,-1088,-21807,32474,10792,-3567,24412,15903,-28511,24196,-7304,-21305,-22731,28155,-8133,7662,-18581,30448,18384,-11549,-12653,18153,-4464,32697,3617,28965,5567,-12365,-8406,-26940,-27266,-15162,-14992,6188,-10441,17920,2869,27751,-19301,4562,16537,32208,4818,-21218,31694,13481,-6995,14511,-4077,-18224,6804,24648,27495,32352,-5306,25134,-23677,12901,-24293,-23000,14288,-20078,30360,31702,1981,-14052,24780,11946,29568,15584,-27717,19917,3709,-14105,23423,-32287,28252,20665,-28448,-20301,30464,19330,-31367,-5709,-16365,10535,26385,3748,-25901,-30396,12854,398,-24760,-30547,-22176,-7129,3927,4019,1970,14776,-29885,-19005,21043,11006,-28475,-8710,10998,4248,24500,-17606,259,-20703,216,18735,-14363,32084,-7563,-608,-9683,1591,13172,18990,-26792,-19550,21915,22231,-22341,-8898,6374,10046,28507,-2033,21964,-30717,-11194,-26367,-27109,26449,-10723,-17598,31212,-1165,7402,31241,3113,9465,-24728,6549,-29482,32310,26100,-6720,-18661,30578,6522,25184,-5276,13362,1720,9780,-6208,-30540,-7143,28534,-27984,2063,23261,-22180,2885,12150,-29471,5510,3132,6002,-20720,28922,5552,12957,-5448,-16502,-8243,-30083,-12849,3608,10091,21127,17045,-28239,-21534,13933,14120,-29027,-7510,31037,-32584,-29395,-15937,30052,26149,1462,17602,20039,24738,-2866,-10868,-19913,-2092,21779,5406,28733,-10480,17293,-20407,-2749,-18885,18540,-20482,12003,10051,-2677,-20828,22200,-14198,-19549,1110,26605,-5680,6297,10480,-17998,30806,16911,-22982,-9687,-7315,4240,30562,-2995,-1342,23257,11767,8784,24345,-5998,28134,143,27794,-28406,16831,3391,27826,25144,31589,-5947,32656,-15132,4216,-11421,6354,24776,-5207,-26037,-23690,14831,17584,8541,32188,23124,-9580,11232,-10774,25064,-31853,6544,15006,-13626,13569,-5090,-24460,15918,-12037,-5131,-1213,-2833,-20616,-20140,5386,-22000,5153,3484,9982,8845,-11617,13012,11236,9694,-28546,-26058,23590,28072,29358,-24691,-8282,12225,-14883,5123,20146,-31989,-10451,-22948,2509,2790,27225,-8534,30647,-2895,10036,4468,16019,9769,21270,-18619,32301,9824,-10394,10719,-26697,4616,-25630,-2824,15596,17963,25532,-28819,-20665,14147,18912,2786,13022,-20736,17870,15985,-9772,-22360,11824,16396,6502,32651,10448,16999,-14512,31967,32021,9898,13009,-6018,3711,6104,7887,30666,15068,-19757,31852,18899,-1827,21194,-8798,23810,-28942,18811,22323,20472,-22440,-29809,-21646,30480,7440,10125,26827,-16937,-3234,-20644,-9355,25076,-19028,-25178,21139,-28876,27581,-7961,-5975,20305,-20486,-21041,-8868,-4361,-4080,10882,-1063,-16820,10503,17232,19923,-36,-9486,-6168,3688,-963,5688,25633,15810,25367,-21566,-32693,-9546,-23691,-26328,-13544,-17528,-29122,14940,16975,-31132,-9490,7199,30492,14768,-23560,5831,30979,29182,-1437,28113,30137,-7718,6756,-9710,2094,8377,20840,-13824,7465,-10628,7576,-26365,-12146,4174,4916,26075,30278,-14675,-3178,19898,13384,6120,726,-32410,-5284,-4586,21572,3558,-18827,-5407,-6814,-31419,-17215,27280,-11537,9758,-31187,22293,8787,-8469,-13811,-20548,25,-12446,2946,-13984,-13936,26882,7122,8308,-15687,17771,-1208,18433,29628,1924,-1413,-539,-29091,30758,3705,-23102,-27953,12901,21049,-22621,-22614,21429,3939,-5773,3596,16361,6121,-16400,19411,16563,-15042,-16614,30310,12742,13497,-11947,28193,-5475,-4677,28479,-791,4447,-32368,19639,18266,22388,-21653,-24214,6671,-11022,1414,-11806,8626,17273,13004,-11708,-15560,27771,22288,-19319,23460,-28325,-20157,-16669,-6180,-26148,21870,20460,2924,3990,23951,17650,3929,-565,-21887,27951,3259,31682,-31729,7985,-19423,20299,10364,11157,30745,6185,14655,29,-28221,1594,-18959,9886,8100,-3396,22417,26389,-31445,25782,15906,4439,-21050,-9268,30609,5137,-723,-8867,-29843,-14006,-23870,24021,27680,-18508,-25267,-29597,22936,30875,-1453,1721,-21739,639,-11758,26861,-22208,-6514,29249,28547,-23283,-13900,8049,-22298,27046,5043,17582,-16982,-31229,22684,19492,-5250,-19922,23747,27564,19591,28280,-9326,-6342,12698,-9478,17856,15504,4841,22284,11497,31368,-20274,14871,-30771,-8002,-16509,14836,-16245,24549,26360,24737,-29986,6800,14934,28530,13270,-431,-21144,16981,-21339,-2211,-17877,-16984,-15066,-7565,-11423,12764,8694,-26015,24560,11180,20323,27872,-2789,30389,6860,26785,-27634,2722,280,8787,-25172,5862,13028,30736,-21865,-18497,-5118,-18909,-20300,-18204,32676,-30005,2683,-17833,-16454,26337,-1984,806,30718,-21049,32186,17989,-20962,-18556,-303,-19353,24353,13733,-196,-25264,-23116,16687,-5218,-23140,-4063,-20199,6390,1445,-7476,22643,-14375,-31461,10950,-10116,475,-11611,-28194,24546,5576,24786,12997,19746,-26688,-18107,29160,-25433,-27722,4652,28116,2959,-12626,-31218,-32467,4701,9816,16455,-25293,-8146,-17661,-30195,-20551,-19361,20903,-27446,657,14746,14251,-30809,3885,31192,10932,5371,25946,3476,28746,-9221,-29115,-9405,21307,-13714,-9378,-30764,19281,-28164,-10211,-1781,-25386,8515,-5089,-15482,-5540,-32310,19507,17459,14583,-23433,-23536,-22043,-14634,-20398,-13110,-21719,-12879,13291,12035,-23848,8563,-18584,-2232,17862,-15384,-1124,-16771,29930,-4175,18924,-17667,22596,19168,20859,32035,-1960,20027,3427,-27493,-22867,16978,-31420,-20415,-27515,-13759,19606,-24807,16522,-18933,-27679,-8026,-1925,-25814,-9860,963,-7744,4504,5497,-20686,-18496,-7946,31713,28683,-28740,30514,25820,-11031,-3315,-3257,-544,9210,18730,23245,-18670,31887,11385,-26629,-25882,22645,30830,-17251,-12471,-19725,2070,27542,8545,-26921,28305,14092,-4771,13316,31207,8736,-12780,-21615,-553,26980,7964,-11307,16439,-24953,-8805,-17576,-13172,-7184,-5300,19272,19026,-9460,-11412,-606,18854,20480,26487,29199,29621,-10033,-4507,-28071,-15143,-2103,26302,-5290,26513,29882,5608,5028,10915,14998,-13043,17667,16276,-24992,25954,-11970,32302,28668,-11980,-16945,7467,27387,-18686,23531,25344,18848,9452,31540,-21378,-15774,-15849,18670,-16662,25804,3912,-31062,23362,20658,19869,29623,-26204,-13824,-4278,25764,17863,-14011,12057,-9225,-8078,-10653,-8662,-22409,-5368,-19253,18167,-13065,8148,32712,-10873,7925,-9201,21179,-19554,-31716,-4430,16053,-29181,15833,-24727,13571,-24711,-14075,29187,24201,26675,25351,32189,28608,2842,-13925,-12184,20522,545,-12036,-30691,-16415,11624,-23922,25049,-25584,-6995,-5602,12544,13044,-15526,28433,-17945,26076,-31941,-3979,22349,-20503,-10058,-32463,9014,6095,13484,11487,-2469,-25139,-21152,16134,626,-27612,23587,15004,-5991,24668,-12107,-3506,8709,31490,-5413,-13464,-19824,13881,9045,-5657,-6176,27263,-14881,-11738,-74,16111,25514,-6355,-17007,-29367,6364,6958,-9378,-11816,25447,32105,-7007,6825,20494,16347,5486,14786,5910,-8313,-16004,31124,5674,-4001,-13123,-13260,13778,6347,15331,-30786,15738,213,-8745,25137,-29922,26474,-31906,-12795,-12857,-29127,1338,30593,-17191,-5324,30886,3596,-8436,20513,1992,7235,-11134,-3022,6217,19240,-6507,20440,-1068,-9856,-31516,-10038,-28487,3126,-11997,-31407,-13874,24542,-26093,-17907,-12685,-21121,10173,-14962,3587,9472,26236,12658,8535,3963,-13746,20754,-8611,5401,17695,27033,28385,-25831,-11236,-16636,20794,5010,6427,23573,-954,22734,-31257,17542,-6394,20424,30286,-1306,-29285,-19871,171,13739,-24533,-4199,5023,29023,-2970,12221,30137,-13721,-26215,-200,32461,-15936,-25528,30351,-29660,16668,4290,4282,-1203,-26520,-9994,19978,-26360,29175,22260,9515,-21552,28133,-17310,-23603,-3456,15632,-27467,20625,-25862,-20016,-2079,16305,-31892,-14907,24494,7062,17447,25310,949,30486,-8033,1844,-27140,-23400,-22970,-1865,-16668,-5301,16328,30710,-27372,-13835,-3736,-5503,28040,7519,-24178,24162,24960,19477,-2003,13888,27033,2222,-5103,-12625,22874,22094,-28761,-8562,6466,7191,19594,12223,-8908,-3542,6602,7190,29243,16351,-23879,7267,-18884,22693,-5547,19291,19848,22650,7124,32593,-3090,28151,29611,31751,-17056,-6082,-11652,18543,14102,-10829,-13367,16023,-8088,-16305,17711,-15679,2167,-29562,-1217,-8277,-14311,-20549,-20976,-11397,-17828,-14287,16665,-2072,-1771,30615,2056,-7015,12220,28210,-30380,-12849,-11510,-140,13481,13801,-24837,17427,-21603,14301,28124,-13041,-26134,-4266,24618,21106,-30755,-24011,32207,-19831,-6973,-22471,21897,20743,-25734,-15804,17507,-15172,29828,24957,11220,26233,-964,23548,26712,12339,32134,-8554,27042,-21514,-25779,30980,-23363,-1062,-13256,-19746,13599,19157,-30263,27039,7239,10636,-24989,-23330,-4219,-11253,-25997,-7692,-25727,-21973,3560,-27512,-10192,-12232,3756,10796,-31736,29684,22612,-538,27373,-25689,-19648,-2778,-6304,28516,17242,28261,10287,21098,-28897,19143,4031,-9284,-5163,-19247,11926,-19365,13619,19846,5144,-21904,-4251,16825,15953,11771,-180,30647,-25664,14265,-9925,-5616,25632,-14874,-5433,-8112,-4898,30957,21803,-27314,20290,-12353,-15709,11743,-13932,-3071,-14097,-13175,-13067,-4430,491,-17181,11679,-32510,8322,20811,4397,10578,-1424,11897,-15523,-19856,6305,1967,-9684,9111,29166,-24299,-13432,7099,17418,-14171,29238,21093,25004,1281,5182,-13458,-30790,-993,19372,-28982,3968,21713,-32472,-15050,-23359,-8683,-28971,6114,-16475,31240,-21744,28948,316,-3626,18678,18756,15978,-32728,17778,11990,-2893,-18034,-2650,-2646,-31,26997,12976,21548,-16944,-23808,14226,-1703,31154,-21821,-31556,-28849,28805,7531,-7572,-15449,25307,-137,-23333,13304,-20878,6962,9737,-9727,26900,-29418,28728,12288,16522,18479,-20074,-26753,-2966,-7234,27735,27709,32047,-31834,-27116,29425,-11442,11177,-8852,23256,29524,6153,-2907,-14728,32478,17233,-16493,-3596,-4292,6851,28974,-27465,32687,12763,13277,22685,-21449,19798,-13491,-20581,-15565,-32187,-25434,28781,13029,-4716,-29156,-21100,8394,-10002,18744,-5007,-6143,13832,-27964,-19668,13015,3968,23068,-30186,-29509,-5054,9456,12672,-6594,-19852,3991,-25084,8927,-27595,-3811,10568,-3017,-16653,12303,23288,19532,7589,-22145,20618,-32034,7871,-26287,20446,8880,-17295,1433,1115,7890,-18337,27665,1602,-127,-26840,-18188,27325,13262,-9387,-10627,18818,-28038,-25312,-5145,32038,30837,-31602,-13289,-29823,-7607,17792,-12092,-10550,19096,5963,14617,24803,2361,-2976,26614,-31932,-13768,29964,29707,5039,-15812,24140,-22524,8777,-422,-16839,4583,-15691,20224,16286,1791,29953,-9826,17310,-1655,-29182,-15108,-30440,4551,-3325,-24476,-5037,19268,15096,-19033,24230,23616,12402,23136,10523,19147,21131,1692,21502,26945,862,23231,-11120,-27894,7956,24194,29435,10903,2491,23090,-6749,18090,-27746,12057,22568,19234,16167,9061,1621,4103,-12822,23253,-12729,-25927,-24455,-904,-24092,27208,-16019,19110,30313,-117,24854,3511,24619,-645,6760,10066,7341,18858,27001,-22483,3306,-29731,8295,-9845,21528,-8705,-28117,-392,-22903,7042,-11172,28805,30516,-7903,14116,23231,-13488,-10119,-6624,29814,-16892,-31893,5338,19712,15507,-11052,-12719,30144,10024,7254,8427,12530,-20272,-8894,20799,-25188,15513,2937,-18514,9721,3772,9589,21556,30354,-8225,24490,7232,21346,-30350,10443,-10458,885,-31124,8742,31900,32296,12442,-13341,29743,2182,-8687,-31125,21232,14267,-1179,1242,-20617,-32726,-7724,18021,4164,-4414,-25541,11158,-7979,31559,15325,-31948,9417,-3048,8652,22915,18496,10395,24559,-14548,14509,-15954,-17082,12371,23110,27277,5090,7619,-448,6060,-32141,19501,17030,28527,-2356,-31489,14516,-22431,-28897,23737,2669,-3927,-28013,14027,-29698,-6430,3580,-26677,-21378,22373,23108,23036,-32755,21340,-10524,28128,-310,5906,32677,-1047,20025,19594,5762,23523,-8256,19680,-9834,15303,-26800,-32413,21949,18827,-20315,-26900,20613,20801,13865,-12406,18285,-8329,13633,23602,994,401,-30649,-25273,1127,15558,27094,-26313,7836,-30465,16359,3831,246,-19954,12881,-19936,-5626,-7672,29437,735,-3578,-10643,8266,21439,1492,-12700,13046,-24740,-4870,-16595,-82,16315,-9662,-10970,5562,-23502,-27839,23024,11952,-13658,-2461,26356,15695,-23697,-25520,19602,-6142,12636,-18263,616,-31533,-4416,7970,-2501,-18517,21019,-17508,13891,-2154,14113,-26942,-20663,-18130,-28597,-29531,-12616,-9402,10564,-4135,-22473,21963,-21852,-11686,-9679,-8405,-14289,-28430,-23598,8932,-6368,25131,23276,30959,26843,8756,401,-10995,23924,-16437,-30381,-17298,-7930,28851,2060,19601,-8429,-14382,-26101,29137,-7790,-16977,-26628,-4277,17650,8150,-27093,13568,3988,-8485,-16160,-17745,-22666,-19014,-21377,12833,23543,-14129,-4817,13628,17849,-5073,2182,32720,4035,14174,-9074,31069,-30197,-5043,222,-7904,-10908,-9526,-30196,-23886,23141,15818,18367,-30690,-31908,-1285,25116,21927,21419,-20496,2994,26653,-30893,26664,-5530,26803,2076,-5383,2563,24551,-6011,-882,17182,12806,3406,16147,10494,6275,-574,9579,-17308,477,-17442,23481,-25975,-555,-6054,9858,-5718,28534,30366,30251,-16808,3043,4595,-5730,14907,-25525,12368,4072,28930,-20213,3725,9920,-32761,14448,-2177,-26846,-23293,31680,-26214,-10590,-10596,19718,-31606,32329,-23065,20773,-13348,-20421,-24992,9529,-12959,-10215,-32048,4844,-8217,3645,-14292,-30277,22314,-16523,5293,-26316,-14082,30051,24471,1854,25125,-19287,23773,23934,-7909,-3484,9015,18743,-3352,-29228,29,-2700,-603,-18205,-16034,28128,30906,317,21489,14212,22086,17890,6138,-21913,-7662,32249,21881,-4691,20919,-13243,19910,-1655,-14651,-15537,11402,24886,11724,419,20736,-9697,-28227,31210,17223,-28339,10654,-29394,-6287,-14214,17502,-26521,-6860,-2690,19671,-20853,-26267,-31723,28833,-28748,-17650,-28958,-16617,-24678,-9031,30358,9909,1095,19600,18608,12505,-11876,25987,27954,-19789,17169,-1566,11477,-3858,-1894,9083,-10062,-18131,-17505,-13630,-21615,-14162,9396,29831,-26888,-17541,17458,-21833,-27092,-10067,21799,-14634,-8608,31265,-23005,-21375,-9595,-8326,-22685,-12050,28556,13763,-31884,-15825,-22839,-13052,-4768,-2615,27134,-26233,-7946,-13724,-4947,-26391,18845,16120,-29549,17209,-7499,-13490,-23011,-5239,-8127,13846,-3228,9392,-217,-20980,-28869,7687,-12602,18765,15308,-2295,-7479,-20885,-7957,16689,21746,20694,16534,25880,12997,-32614,-19198,-24028,21475,21235,-20399,-8873,17827,-27772,-21556,-8619,15884,-13983,-5077,-11477,-12313,26705,14719,-7395,-28334,22435,8286,-26391,-7459,-13654,-6812,5303,-30258,-21431,-16613,29375,-11322,5823,-1688,-2968,2514,20203,30236,-1751,-31115,-16087,14818,31035,13984,23116,22047,15073,-31560,-13018,-21893,-7262,-6813,-24407,3590,30205,15984,-10902,-22269,17758,-13040,-15587,14292,14554,-13158,-8502,7284,29492,24881,-9593,-21441,14720,2361,-30085,23966,-19327,-5028,12627,-1451,24983,-11666,-4252,31385,25401,-4652,-32589,21955,3622,-13437,6791,19775,4971,-12359,-16339,-29094,13892,-8175,-3094,-7870,7260,-19041,-9935,-21138,-16479,-17909,13769,30210,-29757,1217,4215,20512,-12440,22264,28516,23284,-15214,32354,14317,3909,-18445,-28274,15485,13756,-12921,23123,-32535,30693,19426,-14972,-22836,11092,-8218,-11411,-20097,-6729,-11936,27461,-14481,-7755,-4448,-14706,26933,-12280,-8126,15907,26074,-14288,16017,-657,-11949,-16127,-27367,-9559,7433,-12991,10602,-10510,3430,6847,6683,-14379,-25083,9311,-28895,27221,20261,11430,-16675,12424,13067,-1737,14964,27567,-27253,14087,19502,-13140,21186,-20399,-16156,6270,20732,20464,25739,-7350,-16216,9821,28114,13116,-22383,21747,-16479,-12607,23878,13183,7163,22471,-16273,26234,-25652,-29871,304,26981,-6914,-1713,10085,-9157,-5773,30350,27362,19361,5968,30450,-27571,3729,-20553,-31621,28477,19238,15742,-30537,-30780,-2908,-1164,12876,-3054,29366,4424,-18958,33,-25005,-28127,24177,-21904,30649,25940,24928,-26385,6158,2284,26062,25563,-4624,-14973,29108,22356,-30755,12317,-28487,26455,-8982,-11319,-27614,-747,-3585,18375,26431,2534,-18464,3291,14350,-25340,-18616,28444,-4360,-18556,-17636,6852,15058,-15965,-31407,10251,19902,-15726,30611,23770,-3960,-24834,20427,9484,7079,6817,-18955,-17649,-11787,-20782,-22449,-15984,245,27464,-32660,26939,-13220,-4753,17670,2648,-19735,3648,29684,31878,-22550,-10332,-25981,-7459,30209,29116,15537,-7600,-7211,5067,-26101,-21455,-29883,-23995,9873,-8124,-19433,-24910,-9723,25372,11509,-28099,-9147,-26285,8464,-13082,-11248,14726,-31175,1398,-12471,7200,19362,26453,2115,20499,-31268,8199,12660,26609,-4530,-11321,9273,26028,23234,6989,-16352,8577,19173,-13828,-10926,2639,-7207,22073,1894,-13748,-28139,-25463,12687,-790,-24139,17557,22791,20454,689,8472,-16875,-27996,14836,-5967,-28903,253,-6858,-8645,13638,-2551,3970,10733,8501,12650,30429,11661,4196,-22928,-2636,13225,-23874,11745,5864,-13875,-21623,1313,28485,28296,1604,31562,-21211,-31045,-26520,-24061,15827,-7639,25126,-535,-17684,-20518,17227,-6731,-3521,-15899,21860,598,4843,22025,-2629,20160,21108,-32538,-31665,21741,322,25317,12392,20594,2473,16099,30198,-30390,6150,14907,-18752,-15051,-23486,9691,14986,17691,-21828,-25628,-30813,-17435,4576,11667,21017,9056,-1162,-11300,-10719,10718,-11095,-30985,1357,-31354,19535,-27906,5372,25359,-5758,26694,16310,15697,-12197,-19949,-25343,12158,-841,-28966,-1725,-11331,28338,-10408,-31670,22514,14475,-7193,-23996,-4752,-2305,-5901,29674,-25661,9982,14172,-28114,32023,27278,-18147,-8251,-16929,-3435,24704,-21992,-8566,-26348,11767,31675,-25724,-32485,-3030,19201,11265,5543,5393,7897,-10657,-11771,11609,-7692,-30560,-28076,9236,1012,-17015,-14301,6485,3102,14263,3234,16451,-31899,23888,-3562,-14235,28793,7936,23119,9990,-11169,28415,-20578,-16160,14457,-30162,-13580,-19722,31721,29387,-23114,-24562,26663,7189,-9751,6531,-22373,24815,1258,-14076,-3762,19692,-4657,-27151,-28056,-16471,-22214,-7249,-11195,-26652,14309,-16647,-682,4488,-9946,-6817,7463,24930,14501,-24893,19601,-11652,3265,8529,-13272,31899,961,-28537,-8354,-26918,12789,31535,-14025,-18537,32627,14270,-17168,-16838,-3183,2108,19535,8888,24509,28011,-15845,-23086,-5533,-3101,17862,-12021,25594,-32108,32250,-920,-30144,-9226,-25372,28539,20710,13559,11864,22099,3987,1870,-32602,-7780,3900,16939,3433,-27369,4908,-22394,-15004,21515,-9699,-27185,-21674,21066,-350,-30514,30833,18426,30428,8448,24688,-31641,-20292,3853,-10671,12934,31184,-13503,3605,17681,-26254,-3678,-8597,8689,-11726,-6285,9417,28069,-26180,-31272,26522,-1436,-25934,9824,-7015,-20051,15420,-12874,16097,-31732,4361,-18617,-22384,8224,30193,19004,8371,-24727,22290,27260,-1788,-8606,4948,-12883,19736,-27851,-24940,-16107,-9770,29875,-28243,-7610,-16506,-5716,9652,-943,24694,28045,14406,-28752,5089,-23504,-14867,13230,11673,30367,-29388,17127,-18287,-14075,-5596,6798,-25645,-21838,16809,7228,24084,-10567,-21321,-16253,-21337,17934,-8406,8293,-7180,-23401,25351,6126,-32146,10263,-9751,-20674,-19646,-26478,14167,11907,-3253,30534,-19670,-13673,32181,25468,14921,-7703,-32497,-2179,17775,-27770,6978,-6574,-32154,7538,-822,28688,-27428,-2862,-25335,-22756,-25046,-12584,29987,3794,-6140,-27222,-7406,-13276,28452,26189,-29840,27025,1530,24721,633,-15672,-30433,-3124,30434,7648,4613,-31391,31276,-11391,-16837,-13955,-23877,11541,14004,20083,-17205,2391,-4106,17064,-30370,3371,21821,26570,1558,4578,-30101,9956,14113,-13049,-7426,-14284,24990,-30792,-17938,-29246,18912,-16418,-18528,-4278,26195,-19381,13039,-26705,-28869,-7693,-30143,4540,-3290,-21921,12017,24077,29928,-18351,-230,17550,-14867,31431,7918,13095,-31947,-23539,27213,16494,-21444,-25649,24462,-24257,25313,2957,12049,-3354,-30303,-10033,6952,-11414,6627,-26549,1147,-17103,-24908,11883,-29282,3792,30570,3599,5506,-14708,-6191,3736,23398,-17417,11728,29005,22649,14204,13768,3548,-27310,31183,26360,21621,-18139,-32283,28732,2162,-4042,-7682,-4585,-28036,8598,11527,-24979,-14706,-7543,-16203,9221,-18865,-12219,10008,-28686,-27899,31096,-1247,-20174,1210,-24025,-13960,20866,-9070,24682,22515,-31522,-23224,-1256,-25064,12557,4087,-1964,25873,-16381,23494,-20451,28532,-10139,25338,1605,-24256,17614,30387,-6925,-18604,23435,-8538,-24724,9654,6860,14422,-26817,-13665,-20018,32588,19992,8297,-17617,-27948,24855,28122,13155,-6993,-20194,-4977,-18889,10827,5950,-13816,-26636,-15157,26778,30250,-26233,-27120,29870,5309,26284,7204,5722,-19417,29168,-10847,28925,-10549,-24433,-18430,2662,9463,1522,7028,-3383,-112,-26565,3173,3249,18305,783,-3918,-19914,10606,-17738,15779,30608,32231,-30204,31989,6596,22920,-17991,-20408,-18631,22078,3476,-17885,18083,16798,-2445,5556,-18060,-16961,-30350,15592,27986,25762,14746,12626,-5347,-16768,29235,14000,18013,-15734,-11942,-12265,17509,-30269,6023,-19689,12682,-6163,-22931,28093,16416,9153,27950,2255,-15053,22622,20661,-10360,26480,26332,-4919,-30839,291,9205,-10805,960,28887,23234,-31023,437,-115,10817,5186,-3352,13491,-19067,20319,-8330,-9511,-21070,23204,6370,-5572,-30960,7726,2218,22297,31025,-6208,-18062,-15988,-27482,-395,-18856,-6600,20135,21694,29928,-24496,24109,-21697,18765,22286,-6955,-7839,-21615,12688,-32553,-27072,-9188,21482,-28709,-13795,-15764,27488,-17522,16256,-23217,7693,26435,-838,-926,23973,29895,9203,12977,-28809,27231,-10429,-23601,12173,-23085,-9065,32606,30078,-15818,4194,21433,3703,-7441,-629,20074,18800,19999,26493,-32574,12006,2426,31868,-3534,-17807,-26081,-12710,-21186,4923,2260,18415,-9691,-9776,30809,26249,7571,-21926,15855,17339,-926,20775,30654,12279,-29544,-18036,10570,2368,-32207,20692,31404,-10498,23783,27296,3435,-23728,-14160,-16147,2928,11618,20516,-20883,-18680,-14106,-31837,-1165,-22677,32083,30418,12100,-24724,13386,-3030,27653,15093,18499,-27526,10617,-14928,31384,26798,-30301,11522,-4341,-21067,-15173,30071,2723,-9284,-14654,-28593,2480,-14291,-32392,19842,-24438,29408,14884,3509,16747,-12893,25431,-15243,27297,2802,16844,7084,19030,-24671,17980,-12762,-21931,-23938,-14626,5401,22907,-26323,31214,-10940,-20668,27890,-31671,1807,-4586,-2436,-18433,12527,-11915,29085,30449,18656,29346,30103,-16332,-28291,856,4872,-6374,-10586,2480,-32026,-25014,-29728,10897,-27639,19620,-9969,22237,29725,10997,-6672,6695,-12006,24823,29392,27579,29242,-10562,23055,3214,-19900,16873,8647,-9814,27437,2195,2950,2494,28595,-2691,7382,-4077,-10303,-25487,31263,-18858,16198,-26633,-7439,984,-25583,25303,-8872,26790,-10198,14012,9208,-27752,-9413,25433,14082,-1592,12593,7563,32624,8301,-27964,12246,30332,-14181,21565,18505,-20091,1060,-17004,27561,20997,18683,19273,30757,28015,-11426,20006,21217,-31721,15623,-11423,30997,19690,-4966,23645,-9329,19847,2219,24772,-12411,19879,10091,26184,28368,-10717,16866,213,20451,-26815,4336,-25809,-13831,21111,-25977,10945,-18986,-31012,22360,-3462,-28005,22056,-20905,-18755,23287,20082,2221,25060,15220,5438,-21102,22744,10066,-16979,8390,-17741,-24018,-19968,12890,-6512,702,-26993,-17582,-1445,22513,-18745,29329,25283,3081,2693,21422,-22063,12246,-13237,-21225,-23503,9785,-966,9500,-4413,23864,-23011,3376,912,18917,20178,-450,25867,-18011,18473,-4989,22935,2932,-29681,28315,-1140,-22700,-11824,-23355,12186,27958,21175,-20227,23868,29148,28101,-20578,13082,24741,-31096,-31133,-15142,-3662,-28652,-15661,-1706,14331,30815,-11143,25675,-13544,16003,14312,-19391,-300,-3668,-4693,9944,-7719,-1043,-23485,2096,326,-4542,-11324,-27269,-17581,-7755,4176,29729,16199,17668,8213,-19684,-30906,-870,-16318,7732,12239,-79,20341,-12891,-13319,-14065,-26372,15320,-31099,-25471,-29687,-24606,8521,-4596,2089,-2039,-9113,26871,17176,22509,25071,12198,16157,-6192,-5775,-8449,8333,-6715,28864,-18297,-26117,-23546,21134,-22587,-21970,2371,15866,-31876,14761,1404,-23563,14566,8788,19289,22885,-4004,6448,30010,-16415,-28175,-21238,-17961,-23178,-7605,-24466,-2127,14192,9194,-5475,-19518,14486,14283,939,-6283,-10806,5334,-18552,-17349,-21588,-4412,-12532,3249,-4766,-5169,-28406,16335,-28803,-23284,-7707,-4603,6120,4305,-1977,16648,-12506,26936,-6736,11662,-18190,16250,-19492,-10333,-11632,-30553,6865,-27440,25205,-19661,-20296,-27544,27188,-9207,-9645,-9904,7715,25101,-7334,25509,25415,-31629,-3471,16299,-13516,-4643,10158,-29750,-6231,6674,-3977,21081,13696,19840,111,-25868,-28384,-11878,20662,-21139,26862,5550,20849,19581,1165,-8446,-6023,-19612,-15564,22437,7953,-27347,-20677,3147,623,1508,-21461,-14500,3161,27147,-6671,30308,26271,27654,24541,1600,17257,-173,10180,-9352,-12646,13324,-8367,7650,4290,-12642,-1970,12234,-23336,-29780,-21822,9809,-23303,-31089,20184,-14223,-18983,-4418,14464,-27231,-21132,25079,14964,12676,10202,26332,13846,20993,30418,-31270,6681,-31103,20751,22733,3731,26348,-21267,26138,31376,26395,-19185,-4972,-24584,-4880,-12230,9356,-25566,8872,22637,912,16359,10222,18049,9707,15218,-18243,14464,3682,27116,28768,29954,18680,-22406,-11147,4811,15161,15619,23875,-28068,19146,-11490,-30103,27921,27558,28292,2436,-21011,31902,-29209,-15652,-14297,-7516,15947,-18968,26907,-9124,4444,-14928,8315,25417,6382,29454,30913,32667,-21726,15536,23476,-27929,44,11710,-31325,-16236,7495,-18885,17867,13714,9385,13141,-22054,9654,24061,22111,-2995,-30011,2890,-8770,-2898,-22101,17204,-30370,27700,-26985,29357,-6983,29646,-5887,-28461,1542,3025,103,11594,10943,-13564,18862,-4218,-13489,-15227,-3200,-27515,25713,22200,10180,27757,-31231,-2754,-9692,-28589,-3440,28751,30771,-2231,11409,-7715,-31129,25618,-22051,13635,9779,-30622,-1951,-8655,-8553,-10516,9783,3099,-21323,-24747,29129,23835,-3066,-9701,-6599,12081,1807,-3983,3207,-23400,-6714,-23500,-17695,27877,-4468,30064,12566,-29754,13997,9616,-14014,-10109,-12791,-13157,26177,-25976,9665,-9265,1035,16849,22992,24574,12916,-32466,9825,5257,-5974,24148,-8238,25160,-8076,22445,13399,-24036,21872,-18743,-26800,-25307,18647,22503,-27426,-14255,-26968,-9021,17541,-31317,5510,-19800,-24961,18922,-18414,-32564,13688,-6523,22334,-8761,1820,23933,4260,-14447,5524,20874,-18612,-29553,-19323,-25708,-19546,8470,23114,18013,-11575,-16209,30800,-6660,20669,-19619,-21540,15647,-24762,3513,28159,-26794,3177,-11777,5322,-7605,24766,-19769,133,5036,21536,24200,19605,-11387,-25068,11847,29012,20975,-24893,25867,-15514,24660,-3537,-4979,7557,14570,10497,-2950,29725,21369,31875,24151,10716,14883,27813,13181,-18867,-7237,438,17491,2306,-28960,-7873,16386,-22263,30028,12048,19169,-11424,9399,11446,-5500,-23495,-18117,29456,-24904,-20666,-30211,-29347,11858,-29763,-22739,-11586,-348,15001,31460,14332,-11231,8327,25119,-32462,-23861,16246,24969,11931,25112,30110,31644,6937,-13989,-15556,-9298,-27241,-29749,7821,21478,-22454,-22880,-22278,-14713,3891,2266,1335,30569,16743,2458,-3045,-31218,17589,-13726,-10510,-19942,9662,-5047,-18353,-19973,-15571,27775,-14562,-19570,3994,11570,-21458,12114,-12366,-7417,17312,-12556,-8572,-22794,2927,-9618,7370,-28458,28826,-18616,17706,-6901,-27162,28711,15000,-16948,6746,16515,-28736,17274,-29379,27271,32695,18243,-8544,-3343,-16982,-2195,-12026,1679,13563,-19228,7804,5962,21653,-28398,15759,25083,10960,-6689,11665,-14703,-12825,-10056,2670,-30205,1990,-9482,10971,27927,2316,7487,-22147,-7748,8288,13230,24271,19604,-18795,-27362,-5804,-1428,-2725,11573,25949,-16053,29262,27120,6553,4557,685,6124,-13196,5774,28154,2624,-28132,27045,-24393,-13166,-24153,-18176,-11099,30769,8787,494,-11513,-17463,6037,13513,-12931,-5718,28157,389,-21119,11288,-20840,-372,-32245,29774,-31780,-31936,-29284,-23897,-5887,29121,18936,24113,708,2044,-20547,-13682,-29841,-13387,18339,-32346,23355,-17784,-20181,11627,-31385,3533,-13585,-18486,6195,-12860,-16958,7612,-11035,-2438,-23315,-73,22730,-30706,-30259,20446,-13679,-32304,10107,6358,13518,-402,-5806,-23541,19643,30214,13602,-8098,22377,12010,12261,-29986,31800,-11460,11134,10876,29358,-4979,-14811,-17510,16770,-25696,15620,-5202,8349,-31781,-10755,32457,-18389,-16483,3699,23833,13243,-26492,19380,-17832,24255,-626,5376,-3790,28898,-19923,25523,-3059,-17364,-16784,-20163,-4597,812,-30603,-30966,-14893,30730,-12782,-10591,17868,-2085,13536,-2785,27250,22494,-5775,-10504,25111,-24314,-20464,-15812,3574,-4425,26898,10724,4174,28150,15561,-18229,-6161,-8242,-8143,-16449,16652,-21926,30494,-2867,-19447,21675,10503,-12095,-21539,25117,28586,24715,-19556,-9034,31859,4297,-2265,31022,5041,-22449,11776,30687,27726,-8705,-27600,3634,-24440,14934,22131,-32694,22269,29968,-17811,-32255,-23749,-15881,5790,-11091,-22367,-8509,2320,27218,-27602,-2218,17608,32625,-20835,11452,-2388,-30477,-28723,-18931,15237,-9885,26136,-21753,18840,13991,-3367,29091,28426,26185,-328,30925,-23085,26443,-13507,1639,-8240,-23454,-2848,30444,-12848,-20090,3829,-26285,11650,13639,-5793,19058,-17312,-29108,-32648,-7802,6991,-20806,-24905,-32174,1748,27761,19396,-14364,22399,1505,-25285,7299,13732,19434,3997,-30395,-27086,-22977,-25740,-5450,-22364,4246,30833,14516,3717,-3359,-20388,15011,26527,27790,13440,5063,-31313,-32308,26993,11177,-3822,-22321,-16516,21065,-32082,15875,1470,-9825,5421,22240,28182,-17274,901,-18173,-826,12177,-16825,-11896,19965,4551,16324,-4135,-15099,18971,13679,-32166,-24312,5716,-17629,11513,-9866,18139,-31940,-25308,18946,-24813,-10322,8515,27834,29034,-24273,27504,18108,9009,-21122,22307,9190,-28915,24443,26707,-5373,20553,-29928,6826,-18389,28217,-28540,5990,-18870,-22246,-31047,21991,30915,-30312,-31307,13495,-2129,15762,24765,-29556,-28667,-11456,-4995,-6440,23318,-2663,9530,-15661,-14551,29434,-20830,5417,-17339,-18934,29063,-16151,-177,-20304,16246,24349,-26053,-536,-1994,-21298,-1819,2588,17222,-28051,-6712,21263,-603,-22819,21709,32234,-19231,-5532,12797,-22189,13738,-21610,17854,-20268,-13152,1711,21776,-23989,32665,9097,11428,19735,-26018,-17822,5800,30928,24489,10673,-21765,-15393,23747,-113,8134,-15178,5193,-13406,-16973,16262,-11297,-14267,6407,20523,-31404,-22971,12536,-26415,20278,25352,20541,-5442,12359,-27851,29940,1573,-7370,27214,20278,-3163,-19888,23727,-30485,21726,9053,-14813,-23599,-27675,-24032,-11012,27766,-15943,11502,19904,-18055,25135,-2189,-29470,24086,-14009,23115,-12320,13440,17255,-31863,-19637,-4453,20998,22763,9710,-16200,29832,-6547,-18441,1410,30789,-16102,-31227,-12072,-15450,-28518,-6248,-9774,-7485,17883,-11473,-18332,-19597,18971,26908,4842,11236,-2070,-27996,-30543,-21017,1166,-26559,16260,-26667,31206,12668,7409,26984,-6621,-23749,28552,-15808,28337,18463,-23494,25334,5420,-20070,10807,-11437,12714,19987,-14371,-21749,-32155,-197,13585,7657,29122,27449,30959,14010,23674,-9564,-22034,9548,26746,21175,8142,218,-19936,10329,15339,-19830,9254,-23689,8038,20679,-28301,4102,24135,21868,-8812,-17993,-7515,6912,9027,-28211,20168,6012,12001,8213,10986,-20585,22947,-25650,19222,-13071,-32428,-26876,-13801,-17581,-6642,-14110,-18092,-11666,29065,-26390,16816,-31092,9485,-31369,18216,-13755,-26063,-15003,8366,28360,-32627,31404,18513,2945,-11367,8634,21967,-13109,-470,28804,21471,-29505,-29051,12913,21847,2987,31046,-27480,6497,-19053,22224,12947,-32485,-14542,-1356,-28862,23062,2184,-10337,-20676,-12284,-9974,-1353,-6239,-12493,1546,25402,-2620,-1237,19135,22073,28773,-30708,-11426,-23561,16320,-32747,-25195,-28442,7301,4583,-4881,-19845,12513,24374,25931,-22426,-32182,12091,-21462,-13298,14122,-23065,31501,-21569,-13590,-17694,-3748,31881,31805,-17738,5669,-10436,11738,-13500,1558,23703,25416,-6039,23580,7096,32190,7364,18834,4025,-20935,12916,4509,-25984,-3397,-7104,-15356,-2444,3539,-24664,11488,-14313,24572,-29198,20933,15161,26493,14116,-3059,29623,-6204,-31357,-25041,-19429,10285,-24127,27211,27671,32753,29540,5298,-3712,-18454,-28391,12696,6207,-25991,-9640,26706,14825,7241,27275,2614,16752,6529,-23283,-20322,-6479,12734,-15801,13000,-16061,-30125,-3754,4572,7765,-9094,-12559,14047,18465,16915,-1966,-28136,-10962,-18912,-25330,19616,13537,12806,19808,-13166,-12553,21483,3200,23587,27069,-29426,-3136,18320,-8525,17303,-14083,1260,24400,25147,4846,-19446,4939,30363,18831,8905,-25429,-15120,-28935,15065,28343,30247,-25512,-17788,17002,-12742,-2457,22535,14180,-20088,-11348,-25564,-25242,-18275,3565,20629,-6566,-24762,-27497,25300,32065,-16001,-25891,-4486,-8658,-4597,-15372,24487,30085,26387,32562,26910,10744,-19348,-22650,2983,2595,-20364,-16765,-5645,31073,17542,23390,15826,17552,-18198,-13449,25502,-30482,4334,20434,-15968,-14941,23037,17664,-32022,29945,20450,25255,2086,11598,-9326,10759,-9786,-15800,-24080,25774,-13170,-23915,-14476,31723,19333,-16270,-29444,7863,-31624,8845,12243,-16670,-7261,20816,-31882,30548,29752,10687,-13180,-12480 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice3.tflite b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice3.tflite new file mode 100644 index 0000000..70e56bc Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice3.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice30.tflite b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice30.tflite new file mode 100644 index 0000000..4d77836 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice30.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice30_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice30_golden_int16.csv new file mode 100644 index 0000000..38e5fb5 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice30_golden_int16.csv @@ -0,0 +1 @@ +8841,-17133,28550,-20336,-27934,-16279,32498,-5848,24220,1362,26753,754,-11168,24726,-4013,-3808,-25618,-11740,-22777,24270,-11581,-21367,19662,10811,-13104,-860,-25146,28096,-29467,-31968,7310,-20852,32143,-16020,-32220,10811,-17837,32064,16884,29742,-17468,-20634,17267,-31574,-17417,13182,11522,2543,-26210,30406,-21968,-5464,-2265,-31755,-29634,2240,-11631,9575,31093,12421,-17837,-20711,20510,7920,-22076,-15656,14221,-17735,-6179,-24609,-20336,-19576,28335,9765,20840,-19766,28201,-12379,20193,2324,9771,30301,4521,5283,2217,28999,-5674,17683,1349,-24109,6510,-15436,-27370,15060,10888,-25486,-31918,12721,-27423,-27752,4916,30072,13915,8792,4137,-6703,-733,-6647,8968,14931,-3028,14502,-31379,1624,29032,31190,130,-15118,26616,4161,-7202,25415,-25180,11683,32573,8178,29105,12313,2846,7957,-2064,14293,29865,-31506,-22069,-264,-27382,730,21052,4621,-23123,19338,-20934,4512,29324,26117,-15103,-23139,-28739,-28619,-6609,-13377,13186,16353,11220,24870,12195,13162,4038,-3341,12441,-18841,20687,-3728,2768,8178,-2198,23031,7292,-17242,-32626,-31694,16819,19816,-7167,-9213,17753,3894,6421,14642,-7484,8811,-12566,-930,2042,2584,-16673,769,-18870,29130,13981,-25075,926,22365,30310,-23220,1523,12823,20817,20231,-1185,-7340,11058,-29508,15622,14156,9014,-11905,-19225,-23344,19341,-21275,-10101,-8155,16311,14273,17071,13199,31236,23742,-17607,20050,-25658,26797,-11181,24270,28141,-13294,-10903,7578,20338,23802,-12770,-648,26143,13461,-30718,19557,-6580,-4094,-28155,12663,-5171,-9965,-25013,-22755,15536,-5126,30683,29864,21228,-1408,5922,22678,2175,29417,-7958,7527,31892,-2629,32505,30519,13864,15922,-29467,31873,-15274,31684,-24350,10916,31297,-21784,-26195,-31770,-10994,-18777,-11018,13985,27420,27593,-5208,26077,4334,-4127,-2554,-9411,-4089,-10464,-22385,-6459,-3907,20448,-10235,20807,-9526,21841,29964,-10991,17955,-2277,15548,20426,30623,-15120,-9256,-23324,6692,-3739,4544,24884,32755,32307,-4843,-28851,-19002,3613,-9852,-7556,-17133,-30567,5090,-11045,21557,3061,-18826,7173,13883,-14978,29118,26474,-25319,7043,-29155,14676,-12003,-28204,13841,-13536,32216,9521,23121,-25929,-2744,-12860,6572,28390,-8062,-25554,20591,-2353,-3790,15773,18437,-26129,9823,-12114,16697,17823,-10246,25524,17830,20857,2183,-8326,-31309,29136,31279,-27741,-14750,-18765,-20250,17061,15302,17655,17387,4188,17181,-21143,-21801,-19756,-29357,5035,-21203,23785,-6320,-11370,23806,-12936,-14107,-15097,5747,8199,23086,11109,-16371,-10341,-32661,-6566,-6217,24903,12234,-31837,-1598,22751,-29249,-27792,-26037,-3475,10845,-25341,-25718,10670,-3919,30401,5216,-27535,-15229,11427,23749,-10997,-18998,6528,-9859,15077,23188,29677,-372,5485,23901,-26043,-1354,26598,17171,18984,27778,-20039,12197,-31715,-5024,-8410,22132,-27636,-8139,16392,-23202,3051,-9282,-31634,18969,20090,-11360,7147,-4213,-14444,10596,29600,-23993,-1147,3748,-19805,-10718,11562,-15675,-26414,-28172,-6675,-14090,-18407,-9891,-4795,22285,23714,18798,-21264,20690,26302,1252,5183,9620,9699,4699,-26804,-30213,-32399,25127,-9631,30798,15005,1214,25374,11192,-6642,-30581,-24204,-3839,-8789,-15241,12179,-26010,-22416,5165,825,8126,-14603,21759,24912,26366,15796,-16830,-24615,-7770,-8382,-13986,-17333,-12562,-31731,6142,-24233,-9557,-6160,-19070,3694,-4528,10575,-19306,4391,13037,25715,9928,-20671,-29361,-32554,-802,-7335,-25681,28552,-5163,-5623,5717,-2001,32007,20602,-14237,-15789,27686,14097,-4472,4083,-26659,-24929,-31817,7654,12706,-17144,7213,6118,27051,-5868,7440,140,5723,-29738,-29406,8230,11043,-25899,8863,-9251,18873,6063,-25694,14179,21154,-27476,-26364,16578,-1783,28546,16710,13133,-12263,17320,-2431,32735,-20434,-18013,-1377,-30464,-6567,14951,23467,-15832,-27098,-27621,-30003,23025,-25227,-7897,17528,-24454,30712,29211,-6596,30251,-7730,3295,-13361,22373,19419,16682,-10892,25631,-28976,8372,12343,19510,-9574,-12154,16561,-10086,-27431,-18987,11799,7641,-23062,9728,-18587,31476,16261,9894,19609,29437,16324,19005,-1460,-3024,2507,6645,18087,-1900,26886,-30059,-25293,-27120,1480,13183,-7267,-1851,-26386,10468,-29897,-24135,-30280,3397,-6441,8418,15028,-13830,-25480,-15645,-15561,13502,-1863,-28724,23268,-25605,-15666,-6133,-30391,10951,1189,13314,16800,16512,12407,-3371,-2181,21547,-22759,-13655,20866,-4662,2812,-31743,-16056,20994,-8079,2279,-24408,-24525,-14667,21269,-1933,-10927,-8372,650,-28797,15079,31116,19368,2616,2648,26595,32379,7799,32511,24096,-32585,21403,27077,-28521,-16978,1903,-10220,-8447,-194,6721,-8544,4842,-11770,20991,-29594,-23341,16453,4762,9915,16441,5435,-28121,26898,9094,-28450,-1375,24072,-1203,-1921,8534,17627,19779,-3090,-3782,-10799,-500,-32176,2512,-23156,-12860,-27754,-17679,-14450,467,18466,-4633,6015,22571,22702,14122,30867,26064,26527,-5517,19829,21274,-29404,-31675,-10660,-31918,-19330,-9649,5049,24607,22538,-7897,-30625,-32683,-29833,-6509,22041,23630,13383,12610,-25795,9763,-26321,-28453,26121,-11351,9554,-27008,-4788,-1222,-18136,-21748,-7160,-5871,-996,19788,25179,-13756,-16854,-27011,-31388,14455,22081,-26655,-4700,7743,-6449,-12628,-23221,24558,12402,-13474,-18513,25066,-12181,16014,-17308,31878,-15634,12017,-29515,-16682,-2659,-25695,8073,-19290,27697,23398,12416,14911,-28215,-2748,-15960,-15627,-18120,-4402,2019,9899,-21176,463,14187,1516,-30680,-27705,-13028,-6940,-30876,-10219,-18682,-29001,-9348,-7862,-1484,-18736,16474,28742,-12216,6507,-3776,-4886,535,-7653,19447,10074,-20354,24219,-6869,-13580,6763,10259,-11857,1557,14704,-8231,19859,-4274,14520,23766,2272,25322,-25300,-19722,11184,-24612,13869,21666,27575,13381,-18633,13945,-20300,-17794,8645,-8590,-465,-29587,25718,-6647,24781,-6484,25921,1731,-22322,-24647,-26828,-6774,-4970,-7933,-2444,12446,-15743,31158,6984,-11588,25652,20272,28637,26820,-29572,16127,20996,3965,-18277,7513,19107,2307,-21587,-2944,626,-1539,-138,12778,-18235,-6561,-435,-32735,-21376,4491,-32345,-27280,-15930,-21801,17403,-27579,-23992,-21802,-18230,25862,-5704,24003,21817,-1618,-24142,5222,29724,5046,24930,19372,12141,4520,-26228,5396,-29950,4652,-29161,-28514,10171,6638,-27781,-13977,-30433,-20056,19902,18582,-11359,-21494,30224,25237,-665,19641,11259,-2198,25277,-22414,-4300,9248,19455,-5672,-11837,-28766,27253,-27099,-22286,-18820,23549,1507,98,-30548,-13529,-2923,2674,-23595,19959,-22217,-5187,-24093,-21056,10251,20102,-21903,5881,18945,13910,31072,27483,-3137,-26934,-12690,-4816,3131,17909,27083,6946,-448,27101,15741,-8960,-1675,32691,-20418,-4162,27701,-30151,-3825,9403,9726,3867,-27554,-7123,16952,12808,-25340,17213,26241,-17843,21461,5079,-15362,-7727,-23528,-21357,-9899,27451,-26760,2637,-22831,11729,-30520,1071,-21387,14781,-16797,26233,744,11278,-17731,17809,-3349,11861,-2972,14995,-2247,27713,16474,-30113,6125,17731,-7396,-26802,28698,-32178,13072,-14600,13071,-22802,26996,-22148,703,-21711,-2546,-23700,20299,19148,-26709,-14995,-9266,-24027,25616,18643,-9794,-7236,19415,32415,-29527,-16130,-17299,-13254,17362,3283,-26334,15591,26032,-17863,-10550,13579,1451,-3269,-6486,-4491,-3966,-17120,-9000,15581,16457,24503,-18861,18134,-15645,-16823,23417,-2079,24611,-17311,-6153,-11849,-318,19558,-12629,-10013,-32479,10905,-25038,22712,25273,10287,-1962,-16792,6008,15606,24566,-3672,-11972,22839,-10802,3150,-31075,1852,353,29062,19959,27594,10969,-7584,3407,-20057,8950,15024,-19451,6764,27309,-20172,27007,23710,10010,-13674,31458,-7180,14150,20545,-8857,-25453,3486,-22894,-26311,29236,-4930,31995,32402,2995,4117,22236,-1444,-6039,-6802,-25795,16953,22796,-31450,-16332,-1998,-476,8209,13221,-20999,26499,20128,-26279,-2922,-12306,9117,-30692,28504,13313,7383,-29752,25614,-25169,23297,-23549,-14421,-18161,10540,27610,21842,-15586,-4407,-11454,548,23226,-25027,11097,-12383,1267,14527,18788,-7012,-2956,-20061,-13137,4052,6199,15079,26511,-350,-11438,-570,-2915,5678,12419,-1294,18771,20752,-211,28287,4056,-17488,10496,-22349,3428,13381,1041,-32755,26114,20738,-24920,-9377,28750,-24031,21446,20009,-22518,24056,-17285,8263,2870,23217,-17795,30345,-3673,6920,8670,2969,-1416,-25188,-11106,-1087,3718,19083,-26538,27769,22091,26729,11414,9078,21201,-5819,-25441,16845,-18302,-6807,-9393,-10405,-6008,14656,-1525,14248,-10347,-18353,14178,5495,17514,-28495,1902,-16802,-73,7532,-5654,-23213,4117,-30736,-17385,17504,7099,29923,-8410,19115,-8746,4333,6466,1983,655,-4924,-5233,14987,748,26749,1215,-2161,-15579,-23315,12422,6289,14655,-6605,950,-8324,-29925,-18913,9984,-2728,4670,21462,24561,-24221,-24480,2357,-23010,31738,27115,14971,-12979,-11211,-18480,-19040,-20055,31367,29623,-23700,24523,14088,32702,11877,20400,4495,-20393,28585,24549,7550,13080,5283,17604,-30360,10922,12086,32293,-23706,20046,3583,17228,1389,-21847,22129,13733,29345,-1566,-3516,23827,18077,-23586,-18851,-1828,16504,-29775,12983,-5829,20042,1059,23830,-9222,4470,5176,-25558,25691,-9641,4349,-23805,-30852,-31329,300,-31088,19877,-18867,8698,4652,3749,13949,-28588,-14752,27556,-29892,-11215,-1747,14520,32575,-15984,29503,2006,24051,-27486,-11594,-28695,-23436,-26952,-22009,-795,-3054,25912,-6680,-12140,-23027,-20906,11739,-11992,-28035,-18060,24146,-29056,-8636,24656,22884,-21423,10076,-16795,-23861,3396,28279,-27692,-28471,-5340,-21252,-13307,23056,30012,-1325,-22418,8028,30180,31143,-15898,30019,3243,-4849,-25016,27730,31667,-6828,-31988,-6933,-9185,3129,22075,13712,15534,-9028,-1571,10384,7377,-21438,360,23620,21565,-14118,9217,-6707,-12523,22342,-7907,-26358,-23345,19563,-23120,-19395,9654,-16752,-23107,4152,6342,29725,-28302,-11734,9382,-1045,-1075,26296,13374,-26018,32325,-16266,-30016,-29664,-21787,-23935,7591,23714,12790,24590,-14071,-21592,18650,21990,18220,-4747,-27511,-25515,-29203,20013,6090,14814,-14374,5058,5104,-28083,26716,1458,-28494,21881,9980,-10795,-5212,-156,30907,12995,-9823,28846,20924,-17818,7369,2130,25337,9067,-5901,12851,-15364,-24015,806,-6063,-17107,-18261,-5750,1591,6323,-26264,-14123,31610,8163,-23832,-1355,-15598,16209,-24133,-8024,-19452,-948,-5735,-27930,1386,23665,-18234,-21713,-22766,-11374,-2341,3268,15962,15925,17326,143,12684,10194,-674,-9025,1964,-31461,-19332,-9096,27280,29180,-481,6674,8834,155,-21377,31020,-26504,385,22483,-28294,23157,-14046,12261,25986,-2411,15720,2365,-9549,12656,3816,-31019,-12724,11910,-17202,6604,-12239,8090,25611,-19409,-1851,10848,19949,-5858,20849,16204,-3010,29427,6682,18812,-27438,-31153,-30850,3253,-27949,-187,18898,32295,16349,-5233,-2258,-863,8603,-28516,25656,-28902,5314,-17367,29113,20359,-20248,23132,3350,-5594,-20480,-9374,-16154,205,-11281,3564,2319,-1384,946,5663,-27344,-28270,-9181,1390,-14639,5294,13113,7775,-24740,3676,-16181,-30317,-28358,7626,-10837,-11191,9661,-20836,-24174,4690,-26475,-1873,26067,17155,3125,16015,-21403,-9916,15411,-7267,-19266,1441,-4847,31915,-13569,28115,-11323,-10848,-5915,27227,-3441,-28568,-22799,17528,-10769,-30124,32540,-29052,20305,-31713,-29940,28001,-24834,-32267,-6232,-16849,32586,9115,-25786,-17300,12909,-31733,31385,-6167,17155,81,31595,21735,-11992,14653,16959,13058,1539,18033,23007,-30059,-11571,14404,-4904,-27172,-17004,6662,-29790,3615,-11879,4148,8734,9124,7949,-29420,5012,14266,-7020,8322,-29216,13760,-9845,-19067,-1990,14226,20448,1710,-5813,12720,14085,6388,-18635,-23772,-30168,13985,-8480,31158,19804,-1009,-506,8271,7883,12138,9268,26975,-16452,19923,-4934,1268,-5901,-29831,-32462,-23371,23535,13414,11865,-31418,9138,-17172,-25081,11062,-6120,-8712,25959,11296,15976,-4608,3448,31305,-1078,-20043,-356,-14971,-11608,6539,-4712,-6554,-11438,23981,14183,13406,2240,-27322,26384,-32096,16215,-14865,17467,22265,-14008,-11597,-5256,11668,-21172,29672,-14628,-31224,19664,21849,-16728,-17178,9493,-32580,-8780,16394,9385,10764,10743,-12168,7775,-20047,-21150,-22832,1217,30303,7772,-24834,-19050,-9859,-20943,-27628,11583,-28079,10080,200,22529,24985,28532,4276,-8087,-19194,20967,9811,-1691,-5883,-5190,17000,-30684,-16207,-18430,-2615,10358,-14347,23488,-31201,17795,19467,23798,-17939,-26234,-10174,24944,17013,-18021,27702,21619,-13445,-23254,28073,-28363,-11300,25288,28072,5753,-29444,17874,4439,-3182,-9477,18177,26256,9430,18839,-15153,-17462,-8022,-13449,18239,-12851,-3069,-24330,-13490,-9272,-2864,-25073,17726,18424,-26984,11061,-4854,-31746,-15511,-2590,-12417,-32752,-20740,30724,3842,-2930,-812,-7376,14924,12199,2791,21744,7339,16022,-11371,-15857,-10251,5733,26289,12154,10864,-28447,-26311,-15205,-29550,-25950,-7266,-4060,-29559,-17597,-27033,15863,-29557,-1624,13211,-4669,-18758,-10961,16724,13889,-31284,-8328,-9288,20509,8687,-14127,28880,22178,-4163,-19128,30171,6148,-7506,10294,-28521,-24029,-31550,-18039,-26474,23604,16528,7010,7626,14390,-15001,-20297,-28099,-27184,13931,22792,-12527,11641,10815,-26478,-5740,25518,-14733,-14862,8285,23596,-8026,-27337,-8218,31941,-1608,1039,28355,4244,12640,22450,5765,6870,-17519,8063,-16687,17215,15066,-14305,-7631,-26792,8791,-15494,6206,-5450,27186,22104,18680,-18157,-4002,31134,-25198,-14378,32610,12624,-25992,4655,27298,-20544,-25748,-7156,7180,-20454,-3825,-18203,-5364,2161,-12000,-4175,-15883,-14671,22633,-2127,-15927,-7194,9723,-27125,29815,12631,8845,-21105,16885,-21381,21017,-21342,-24725,-3018,-25435,-23029,-4354,-9212,-6648,-25133,193,13925,-26249,-20103,-9411,-12855,-8652,-7539,-30272,-32286,-6201,-25598,16855,10377,-13676,-29335,-2228,27181,24248,8082,5684,-31305,26918,20448,-11255,20155,-7668,-26204,5460,-3014,7332,22433,-1902,-30260,25608,-21855,11827,-31930,25271,-225,-30669,-824,-25989,-14358,-9207,24069,-18145,989,30524,16564,-15090,-27010,-17998,16293,-21507,21477,-223,12782,17171,27628,-32211,-20177,12138,13908,2693,-6059,-7259,-9777,29659,22874,25439,-17226,-9281,8070,-20197,-6154,-3190,5913,22559,-22673,-23201,-27965,-27771,27134,-2179,19114,-32530,4084,-21746,-27568,25974,-3942,13243,29859,9918,-19953,-12769,15530,-32640,-25561,2272,25383,23020,-31327,-17876,6523,19585,22687,24809,-16849,-25603,-22285,24126,-15464,-29125,-13024,-9034,9354,-2973,18260,12257,7467,-13656,-23236,-17088,-7620,28659,-3815,14232,29706,-30334,18729,26456,-18270,-23832,15428,12274,-18316,-28346,12046,-30136,-23348,-11346,5714,-14215,-31020,-6069,-7444,-8544,-25394,-8797,13461,8912,15356,32116,-14378,-2013,11433,21632,6216,-3431,-2746,-680,-17295,-28108,16350,-26028,-20913,28634,-24813,10703,13038,-6834,-10168,8289,-20465,6706,17215,21409,25706,27588,-18963,-30612,16000,4485,-1625,-27893,-16710,28430,5462,-12015,18086,5851,-28336,13621,17990,23721,21933,-5178,7321,-18910,239,2869,-25142,-17751,8144,24115,3561,-20451,-28221,1084,-28795,-13297,-29162,-15520,-16503,-13609,-19932,29810,3583,-14315,-24398,30751,-18275,-14670,-27340,12823,9591,206,12,-32579,-21942,-3164,8192,1614,-4693,22841,-9168,19098,19841,18928,18899,-1374,13534,-17638,-5106,-12661,-29800,8702,1158,-26023,22158,-13850,-30219,25185,11344,-8180,-30413,-28922,-20248,-8779,15136,-19199,16680,-8498,-14225,28765,12685,-9740,-11152,23656,6388,3004,6932,-18347,13361,25378,2061,7621,-25601,-3569,14836,11994,15990,5877,4897,24697,-4452,4995,20496,-6837,-27305,-11766,32505,-5696,19049,-13356,31383,3586,1059,-1965,18798,8926,-16625,-7745,-4226,10767,26930,13234,-7220,26050,29839,13688,4004,29809,-26857,534,-18119,344,-19624,22537,-10995,-6995,-24965,14472,-4281,-13660,-652,-20628,12447,26663,-7486,27285,-27556,-2474,-31892,9469,5774,-19842,-17384,2462,12462,-21320,-31661,-25394,-32344,-7382,11442,13804,-4439,-15363,-9847,-20430,7040,-25448,-10101,8626,-30270,-8461,-24825,-25468,-18465,-8408,-18584,5072,17884,27964,9352,2811,14453,9749,9608,-12527,18664,11343,-8905,-22378,-14793,21021,-14764,-29653,-5437,-25640,-22538,13275,5928,13413,4350,-22892,10861,16534,-15605,-15706,-31171,26830,25727,-5002,-10435,18927,29618,21746,10431,630,-11257,-18327,19444,-28344,-30826,19468,14184,7042,17974,-18358,13616,-32453,7465,-17822,2623,-22545,29403,-26542,11558,-17086,16066,-28556,6319,-17677,8811,-17454,17692,-31035,-31415,-17301,8031,-12916,13291,12472,-4505,-459,18991,6095,-21253,-28519,1865,-26662,-5323,11503,17328,5965,31391,-4919,229,-10105,28969,-6266,17688,25932,-21547,-22602,-3708,-13481,9925,-24453,3547,-9604,246,26685,23093,7693,-2009,9748,3904,24206,-10111,-32009,-20049,27870,26993,-24364,-20107,5037 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice30_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice30_input0_int16.csv new file mode 100644 index 0000000..b8f9f87 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice30_input0_int16.csv @@ -0,0 +1 @@ +8841,-17133,28550,-20336,-27934,-16279,32498,-5848,24220,1362,26753,754,-11168,24726,-4013,-3808,-25618,-11740,-22777,24270,-11581,-21367,19662,10811,-13104,-860,-25146,28096,-29467,-31968,7310,-20852,32143,-16020,-32220,10811,-17837,32064,16884,29742,-17468,-20634,17267,-31574,-17417,13182,11522,2543,-26210,30406,-21968,-5464,-2265,-31755,-29634,2240,-11631,9575,31093,12421,-17837,-20711,20510,7920,-22076,-15656,14221,-17735,-6179,-24609,-20336,-19576,28335,9765,20840,-19766,28201,-12379,20193,2324,9771,30301,4521,5283,2217,28999,-5674,17683,1349,-24109,6510,-15436,-27370,15060,10888,-25486,-31918,12721,-27423,-27752,4916,30072,13915,8792,4137,-6703,-733,-6647,8968,14931,-3028,14502,-31379,1624,29032,31190,130,-15118,26616,4161,-7202,25415,-25180,11683,32573,8178,29105,12313,2846,7957,-2064,14293,29865,-31506,-22069,-264,-27382,730,21052,4621,-23123,19338,-20934,4512,29324,26117,-15103,-23139,-28739,-28619,-6609,-13377,13186,16353,11220,24870,12195,13162,4038,-3341,12441,-18841,20687,-3728,2768,8178,-2198,23031,7292,-17242,-32626,-31694,16819,19816,-7167,-9213,17753,3894,6421,14642,-7484,8811,-12566,-930,2042,2584,-16673,769,-18870,29130,13981,-25075,926,22365,30310,-23220,1523,12823,20817,20231,-1185,-7340,11058,-29508,15622,14156,9014,-11905,-19225,-23344,19341,-21275,-10101,-8155,16311,14273,17071,13199,31236,23742,-17607,20050,-25658,26797,-11181,24270,28141,-13294,-10903,7578,20338,23802,-12770,-648,26143,13461,-30718,19557,-6580,-4094,-28155,12663,-5171,-9965,-25013,-22755,15536,-5126,30683,29864,21228,-1408,5922,22678,2175,29417,-7958,7527,31892,-2629,32505,30519,13864,15922,-29467,31873,-15274,31684,-24350,10916,31297,-21784,-26195,-31770,-10994,-18777,-11018,13985,27420,27593,-5208,26077,4334,-4127,-2554,-9411,-4089,-10464,-22385,-6459,-3907,20448,-10235,20807,-9526,21841,29964,-10991,17955,-2277,15548,20426,30623,-15120,-9256,-23324,6692,-3739,4544,24884,32755,32307,-4843,-28851,-19002,3613,-9852,-7556,-17133,-30567,5090,-11045,21557,3061,-18826,7173,13883,-14978,29118,26474,-25319,7043,-29155,14676,-12003,-28204,13841,-13536,32216,9521,23121,-25929,-2744,-12860,6572,28390,-8062,-25554,20591,-2353,-3790,15773,18437,-26129,9823,-12114,16697,17823,-10246,25524,17830,20857,2183,-8326,-31309,29136,31279,-27741,-14750,-18765,-20250,17061,15302,17655,17387,4188,17181,-21143,-21801,-19756,-29357,5035,-21203,23785,-6320,-11370,23806,-12936,-14107,-15097,5747,8199,23086,11109,-16371,-10341,-32661,-6566,-6217,24903,12234,-31837,-1598,22751,-29249,-27792,-26037,-3475,10845,-25341,-25718,10670,-3919,30401,5216,-27535,-15229,11427,23749,-10997,-18998,6528,-9859,15077,23188,29677,-372,5485,23901,-26043,-1354,26598,17171,18984,27778,-20039,12197,-31715,-5024,-8410,22132,-27636,-8139,16392,-23202,3051,-9282,-31634,18969,20090,-11360,7147,-4213,-14444,10596,29600,-23993,-1147,3748,-19805,-10718,11562,-15675,-26414,-28172,-6675,-14090,-18407,-9891,-4795,22285,23714,18798,-21264,20690,26302,1252,5183,9620,9699,4699,-26804,-30213,-32399,25127,-9631,30798,15005,1214,25374,11192,-6642,-30581,-24204,-3839,-8789,-15241,12179,-26010,-22416,5165,825,8126,-14603,21759,24912,26366,15796,-16830,-24615,-7770,-8382,-13986,-17333,-12562,-31731,6142,-24233,-9557,-6160,-19070,3694,-4528,10575,-19306,4391,13037,25715,9928,-20671,-29361,-32554,-802,-7335,-25681,28552,-5163,-5623,5717,-2001,32007,20602,-14237,-15789,27686,14097,-4472,4083,-26659,-24929,-31817,7654,12706,-17144,7213,6118,27051,-5868,7440,140,5723,-29738,-29406,8230,11043,-25899,8863,-9251,18873,6063,-25694,14179,21154,-27476,-26364,16578,-1783,28546,16710,13133,-12263,17320,-2431,32735,-20434,-18013,-1377,-30464,-6567,14951,23467,-15832,-27098,-27621,-30003,23025,-25227,-7897,17528,-24454,30712,29211,-6596,30251,-7730,3295,-13361,22373,19419,16682,-10892,25631,-28976,8372,12343,19510,-9574,-12154,16561,-10086,-27431,-18987,11799,7641,-23062,9728,-18587,31476,16261,9894,19609,29437,16324,19005,-1460,-3024,2507,6645,18087,-1900,26886,-30059,-25293,-27120,1480,13183,-7267,-1851,-26386,10468,-29897,-24135,-30280,3397,-6441,8418,15028,-13830,-25480,-15645,-15561,13502,-1863,-28724,23268,-25605,-15666,-6133,-30391,10951,1189,13314,16800,16512,12407,-3371,-2181,21547,-22759,-13655,20866,-4662,2812,-31743,-16056,20994,-8079,2279,-24408,-24525,-14667,21269,-1933,-10927,-8372,650,-28797,15079,31116,19368,2616,2648,26595,32379,7799,32511,24096,-32585,21403,27077,-28521,-16978,1903,-10220,-8447,-194,6721,-8544,4842,-11770,20991,-29594,-23341,16453,4762,9915,16441,5435,-28121,26898,9094,-28450,-1375,24072,-1203,-1921,8534,17627,19779,-3090,-3782,-10799,-500,-32176,2512,-23156,-12860,-27754,-17679,-14450,467,18466,-4633,6015,22571,22702,14122,30867,26064,26527,-5517,19829,21274,-29404,-31675,-10660,-31918,-19330,-9649,5049,24607,22538,-7897,-30625,-32683,-29833,-6509,22041,23630,13383,12610,-25795,9763,-26321,-28453,26121,-11351,9554,-27008,-4788,-1222,-18136,-21748,-7160,-5871,-996,19788,25179,-13756,-16854,-27011,-31388,14455,22081,-26655,-4700,7743,-6449,-12628,-23221,24558,12402,-13474,-18513,25066,-12181,16014,-17308,31878,-15634,12017,-29515,-16682,-2659,-25695,8073,-19290,27697,23398,12416,14911,-28215,-2748,-15960,-15627,-18120,-4402,2019,9899,-21176,463,14187,1516,-30680,-27705,-13028,-6940,-30876,-10219,-18682,-29001,-9348,-7862,-1484,-18736,16474,28742,-12216,6507,-3776,-4886,535,-7653,19447,10074,-20354,24219,-6869,-13580,6763,10259,-11857,1557,14704,-8231,19859,-4274,14520,23766,2272,25322,-25300,-19722,11184,-24612,13869,21666,27575,13381,-18633,13945,-20300,-17794,8645,-8590,-465,-29587,25718,-6647,24781,-6484,25921,1731,-22322,-24647,-26828,-6774,-4970,-7933,-2444,12446,-15743,31158,6984,-11588,25652,20272,28637,26820,-29572,16127,20996,3965,-18277,7513,19107,2307,-21587,-2944,626,-1539,-138,12778,-18235,-6561,-435,-32735,-21376,4491,-32345,-27280,-15930,-21801,17403,-27579,-23992,-21802,-18230,25862,-5704,24003,21817,-1618,-24142,5222,29724,5046,24930,19372,12141,4520,-26228,5396,-29950,4652,-29161,-28514,10171,6638,-27781,-13977,-30433,-20056,19902,18582,-11359,-21494,30224,25237,-665,19641,11259,-2198,25277,-22414,-4300,9248,19455,-5672,-11837,-28766,27253,-27099,-22286,-18820,23549,1507,98,-30548,-13529,-2923,2674,-23595,19959,-22217,-5187,-24093,-21056,10251,20102,-21903,5881,18945,13910,31072,27483,-3137,-26934,-12690,-4816,3131,17909,27083,6946,-448,27101,15741,-8960,-1675,32691,-20418,-4162,27701,-30151,-3825,9403,9726,3867,-27554,-7123,16952,12808,-25340,17213,26241,-17843,21461,5079,-15362,-7727,-23528,-21357,-9899,27451,-26760,2637,-22831,11729,-30520,1071,-21387,14781,-16797,26233,744,11278,-17731,17809,-3349,11861,-2972,14995,-2247,27713,16474,-30113,6125,17731,-7396,-26802,28698,-32178,13072,-14600,13071,-22802,26996,-22148,703,-21711,-2546,-23700,20299,19148,-26709,-14995,-9266,-24027,25616,18643,-9794,-7236,19415,32415,-29527,-16130,-17299,-13254,17362,3283,-26334,15591,26032,-17863,-10550,13579,1451,-3269,-6486,-4491,-3966,-17120,-9000,15581,16457,24503,-18861,18134,-15645,-16823,23417,-2079,24611,-17311,-6153,-11849,-318,19558,-12629,-10013,-32479,10905,-25038,22712,25273,10287,-1962,-16792,6008,15606,24566,-3672,-11972,22839,-10802,3150,-31075,1852,353,29062,19959,27594,10969,-7584,3407,-20057,8950,15024,-19451,6764,27309,-20172,27007,23710,10010,-13674,31458,-7180,14150,20545,-8857,-25453,3486,-22894,-26311,29236,-4930,31995,32402,2995,4117,22236,-1444,-6039,-6802,-25795,16953,22796,-31450,-16332,-1998,-476,8209,13221,-20999,26499,20128,-26279,-2922,-12306,9117,-30692,28504,13313,7383,-29752,25614,-25169,23297,-23549,-14421,-18161,10540,27610,21842,-15586,-4407,-11454,548,23226,-25027,11097,-12383,1267,14527,18788,-7012,-2956,-20061,-13137,4052,6199,15079,26511,-350,-11438,-570,-2915,5678,12419,-1294,18771,20752,-211,28287,4056,-17488,10496,-22349,3428,13381,1041,-32755,26114,20738,-24920,-9377,28750,-24031,21446,20009,-22518,24056,-17285,8263,2870,23217,-17795,30345,-3673,6920,8670,2969,-1416,-25188,-11106,-1087,3718,19083,-26538,27769,22091,26729,11414,9078,21201,-5819,-25441,16845,-18302,-6807,-9393,-10405,-6008,14656,-1525,14248,-10347,-18353,14178,5495,17514,-28495,1902,-16802,-73,7532,-5654,-23213,4117,-30736,-17385,17504,7099,29923,-8410,19115,-8746,4333,6466,1983,655,-4924,-5233,14987,748,26749,1215,-2161,-15579,-23315,12422,6289,14655,-6605,950,-8324,-29925,-18913,9984,-2728,4670,21462,24561,-24221,-24480,2357,-23010,31738,27115,14971,-12979,-11211,-18480,-19040,-20055,31367,29623,-23700,24523,14088,32702,11877,20400,4495,-20393,28585,24549,7550,13080,5283,17604,-30360,10922,12086,32293,-23706,20046,3583,17228,1389,-21847,22129,13733,29345,-1566,-3516,23827,18077,-23586,-18851,-1828,16504,-29775,12983,-5829,20042,1059,23830,-9222,4470,5176,-25558,25691,-9641,4349,-23805,-30852,-31329,300,-31088,19877,-18867,8698,4652,3749,13949,-28588,-14752,27556,-29892,-11215,-1747,14520,32575,-15984,29503,2006,24051,-27486,-11594,-28695,-23436,-26952,-22009,-795,-3054,25912,-6680,-12140,-23027,-20906,11739,-11992,-28035,-18060,24146,-29056,-8636,24656,22884,-21423,10076,-16795,-23861,3396,28279,-27692,-28471,-5340,-21252,-13307,23056,30012,-1325,-22418,8028,30180,31143,-15898,30019,3243,-4849,-25016,27730,31667,-6828,-31988,-6933,-9185,3129,22075,13712,15534,-9028,-1571,10384,7377,-21438,360,23620,21565,-14118,9217,-6707,-12523,22342,-7907,-26358,-23345,19563,-23120,-19395,9654,-16752,-23107,4152,6342,29725,-28302,-11734,9382,-1045,-1075,26296,13374,-26018,32325,-16266,-30016,-29664,-21787,-23935,7591,23714,12790,24590,-14071,-21592,18650,21990,18220,-4747,-27511,-25515,-29203,20013,6090,14814,-14374,5058,5104,-28083,26716,1458,-28494,21881,9980,-10795,-5212,-156,30907,12995,-9823,28846,20924,-17818,7369,2130,25337,9067,-5901,12851,-15364,-24015,806,-6063,-17107,-18261,-5750,1591,6323,-26264,-14123,31610,8163,-23832,-1355,-15598,16209,-24133,-8024,-19452,-948,-5735,-27930,1386,23665,-18234,-21713,-22766,-11374,-2341,3268,15962,15925,17326,143,12684,10194,-674,-9025,1964,-31461,-19332,-9096,27280,29180,-481,6674,8834,155,-21377,31020,-26504,385,22483,-28294,23157,-14046,12261,25986,-2411,15720,2365,-9549,12656,3816,-31019,-12724,11910,-17202,6604,-12239,8090,25611,-19409,-1851,10848,19949,-5858,20849,16204,-3010,29427,6682,18812,-27438,-31153,-30850,3253,-27949,-187,18898,32295,16349,-5233,-2258,-863,8603,-28516,25656,-28902,5314,-17367,29113,20359,-20248,23132,3350,-5594,-20480,-9374,-16154,205,-11281,3564,2319,-1384,946,5663,-27344,-28270,-9181,1390,-14639,5294,13113,7775,-24740,3676,-16181,-30317,-28358,7626,-10837,-11191,9661,-20836,-24174,4690,-26475,-1873,26067,17155,3125,16015,-21403,-9916,15411,-7267,-19266,1441,-4847,31915,-13569,28115,-11323,-10848,-5915,27227,-3441,-28568,-22799,17528,-10769,-30124,32540,-29052,20305,-31713,-29940,28001,-24834,-32267,-6232,-16849,32586,9115,-25786,-17300,12909,-31733,31385,-6167,17155,81,31595,21735,-11992,14653,16959,13058,1539,18033,23007,-30059,-11571,14404,-4904,-27172,-17004,6662,-29790,3615,-11879,4148,8734,9124,7949,-29420,5012,14266,-7020,8322,-29216,13760,-9845,-19067,-1990,14226,20448,1710,-5813,12720,14085,6388,-18635,-23772,-30168,13985,-8480,31158,19804,-1009,-506,8271,7883,12138,9268,26975,-16452,19923,-4934,1268,-5901,-29831,-32462,-23371,23535,13414,11865,-31418,9138,-17172,-25081,11062,-6120,-8712,25959,11296,15976,-4608,3448,31305,-1078,-20043,-356,-14971,-11608,6539,-4712,-6554,-11438,23981,14183,13406,2240,-27322,26384,-32096,16215,-14865,17467,22265,-14008,-11597,-5256,11668,-21172,29672,-14628,-31224,19664,21849,-16728,-17178,9493,-32580,-8780,16394,9385,10764,10743,-12168,7775,-20047,-21150,-22832,1217,30303,7772,-24834,-19050,-9859,-20943,-27628,11583,-28079,10080,200,22529,24985,28532,4276,-8087,-19194,20967,9811,-1691,-5883,-5190,17000,-30684,-16207,-18430,-2615,10358,-14347,23488,-31201,17795,19467,23798,-17939,-26234,-10174,24944,17013,-18021,27702,21619,-13445,-23254,28073,-28363,-11300,25288,28072,5753,-29444,17874,4439,-3182,-9477,18177,26256,9430,18839,-15153,-17462,-8022,-13449,18239,-12851,-3069,-24330,-13490,-9272,-2864,-25073,17726,18424,-26984,11061,-4854,-31746,-15511,-2590,-12417,-32752,-20740,30724,3842,-2930,-812,-7376,14924,12199,2791,21744,7339,16022,-11371,-15857,-10251,5733,26289,12154,10864,-28447,-26311,-15205,-29550,-25950,-7266,-4060,-29559,-17597,-27033,15863,-29557,-1624,13211,-4669,-18758,-10961,16724,13889,-31284,-8328,-9288,20509,8687,-14127,28880,22178,-4163,-19128,30171,6148,-7506,10294,-28521,-24029,-31550,-18039,-26474,23604,16528,7010,7626,14390,-15001,-20297,-28099,-27184,13931,22792,-12527,11641,10815,-26478,-5740,25518,-14733,-14862,8285,23596,-8026,-27337,-8218,31941,-1608,1039,28355,4244,12640,22450,5765,6870,-17519,8063,-16687,17215,15066,-14305,-7631,-26792,8791,-15494,6206,-5450,27186,22104,18680,-18157,-4002,31134,-25198,-14378,32610,12624,-25992,4655,27298,-20544,-25748,-7156,7180,-20454,-3825,-18203,-5364,2161,-12000,-4175,-15883,-14671,22633,-2127,-15927,-7194,9723,-27125,29815,12631,8845,-21105,16885,-21381,21017,-21342,-24725,-3018,-25435,-23029,-4354,-9212,-6648,-25133,193,13925,-26249,-20103,-9411,-12855,-8652,-7539,-30272,-32286,-6201,-25598,16855,10377,-13676,-29335,-2228,27181,24248,8082,5684,-31305,26918,20448,-11255,20155,-7668,-26204,5460,-3014,7332,22433,-1902,-30260,25608,-21855,11827,-31930,25271,-225,-30669,-824,-25989,-14358,-9207,24069,-18145,989,30524,16564,-15090,-27010,-17998,16293,-21507,21477,-223,12782,17171,27628,-32211,-20177,12138,13908,2693,-6059,-7259,-9777,29659,22874,25439,-17226,-9281,8070,-20197,-6154,-3190,5913,22559,-22673,-23201,-27965,-27771,27134,-2179,19114,-32530,4084,-21746,-27568,25974,-3942,13243,29859,9918,-19953,-12769,15530,-32640,-25561,2272,25383,23020,-31327,-17876,6523,19585,22687,24809,-16849,-25603,-22285,24126,-15464,-29125,-13024,-9034,9354,-2973,18260,12257,7467,-13656,-23236,-17088,-7620,28659,-3815,14232,29706,-30334,18729,26456,-18270,-23832,15428,12274,-18316,-28346,12046,-30136,-23348,-11346,5714,-14215,-31020,-6069,-7444,-8544,-25394,-8797,13461,8912,15356,32116,-14378,-2013,11433,21632,6216,-3431,-2746,-680,-17295,-28108,16350,-26028,-20913,28634,-24813,10703,13038,-6834,-10168,8289,-20465,6706,17215,21409,25706,27588,-18963,-30612,16000,4485,-1625,-27893,-16710,28430,5462,-12015,18086,5851,-28336,13621,17990,23721,21933,-5178,7321,-18910,239,2869,-25142,-17751,8144,24115,3561,-20451,-28221,1084,-28795,-13297,-29162,-15520,-16503,-13609,-19932,29810,3583,-14315,-24398,30751,-18275,-14670,-27340,12823,9591,206,12,-32579,-21942,-3164,8192,1614,-4693,22841,-9168,19098,19841,18928,18899,-1374,13534,-17638,-5106,-12661,-29800,8702,1158,-26023,22158,-13850,-30219,25185,11344,-8180,-30413,-28922,-20248,-8779,15136,-19199,16680,-8498,-14225,28765,12685,-9740,-11152,23656,6388,3004,6932,-18347,13361,25378,2061,7621,-25601,-3569,14836,11994,15990,5877,4897,24697,-4452,4995,20496,-6837,-27305,-11766,32505,-5696,19049,-13356,31383,3586,1059,-1965,18798,8926,-16625,-7745,-4226,10767,26930,13234,-7220,26050,29839,13688,4004,29809,-26857,534,-18119,344,-19624,22537,-10995,-6995,-24965,14472,-4281,-13660,-652,-20628,12447,26663,-7486,27285,-27556,-2474,-31892,9469,5774,-19842,-17384,2462,12462,-21320,-31661,-25394,-32344,-7382,11442,13804,-4439,-15363,-9847,-20430,7040,-25448,-10101,8626,-30270,-8461,-24825,-25468,-18465,-8408,-18584,5072,17884,27964,9352,2811,14453,9749,9608,-12527,18664,11343,-8905,-22378,-14793,21021,-14764,-29653,-5437,-25640,-22538,13275,5928,13413,4350,-22892,10861,16534,-15605,-15706,-31171,26830,25727,-5002,-10435,18927,29618,21746,10431,630,-11257,-18327,19444,-28344,-30826,19468,14184,7042,17974,-18358,13616,-32453,7465,-17822,2623,-22545,29403,-26542,11558,-17086,16066,-28556,6319,-17677,8811,-17454,17692,-31035,-31415,-17301,8031,-12916,13291,12472,-4505,-459,18991,6095,-21253,-28519,1865,-26662,-5323,11503,17328,5965,31391,-4919,229,-10105,28969,-6266,17688,25932,-21547,-22602,-3708,-13481,9925,-24453,3547,-9604,246,26685,23093,7693,-2009,9748,3904,24206,-10111,-32009,-20049,27870,26993,-24364,-20107,5037,29603,-19393,32514,3548,818,24176,-28267,-17889,12329,-20286,1351,-25357,24384,2638,30399,-15881,-28034,30939,-18103,-27207,-16224,-27617,-7041,-5858,6372,-24527,-8517,16560,3387,30795,-3787,-24353,-29190,-25566,-23509,-23972,-23502,-27713,10143,-13582,7353,26574,31087,-16942,-5124,-29889,17695,26728,-29119,-8731,-7649,1542,-289,15690,-31427,-343,22191,20023,20988,-26526,20417,25210,26133,-29034,-31382,-885,25531,20865,10208,1395,2084,-15613,3403,-30756,-32348,-14062,23772,1432,-24851,13522,-7097,-15748,29823,8074,-1472,19895,18738,-9040,-5404,19984,31525,-4792,-2890,22521,1496,6874,5175,-16897,-12352,-11941,27643,-14452,12080,17393,-4564,25946,24096,4069,2170,-6326,-20210,-696,-12729,-14613,-21217,29989,-15170,-26816,-5204,-13155,31145,-20073,24134,-26972,2446,-30366,-15167,-13032,-4201,30900,27787,-19133,-2554,6970,-13407,7730,-30279,-31441,12138,-24865,-3206,-14559,-22752,-24578,31917,-13045,20703,20847,-7455,-15813,3165,-32699,16970,-1162,10197,-12008,3162,-24962,23749,-5487,6609,-4702,-28236,21482,-22617,-13579,11905,-2569,-30505,32274,10845,-1771,26495,30779,30346,-22620,-31274,21964,-11755,-25492,4850,-27191,-26782,23861,-2128,26062,-32234,19860,15043,15462,26477,-32305,-28936,13328,12007,31031,7862,22961,20302,17314,10900,-16388,17099,-1038,31516,-17498,10487,10022,15849,-10198,23026,14136,-4928,13226,-26603,32047,-15441,3210,-18723,27973,19560,20629,-6039,-21444,5224,13047,-3734,390,-4725,18356,18062,28100,-30697,15064,1256,-13261,31989,-16604,-13748,5873,-24257,9909,-30001,27708,-22450,-431,-24531,-16197,25177,-16189,2381,24335,-236,-10850,-32340,2929,17281,-22273,-30819,17958,21677,14685,4991,-22524,12827,27355,26933,3074,-4565,11071,-1435,-28110,-15043,2875,24549,-29539,-31629,7204,-29009,-26530,-19439,-20479,-22627,20988,1693,-10952,19250,-27978,-11964,24670,-3497,-13452,9228,15601,16131,28632,7643,-4726,-1789,7074,21810,19529,30558,-31952,15385,-5761,8234,-2770,15250,-12342,28700,-3931,-7342,-5227,14646,20220,9534,26430,13434,18368,7441,23530,25095,8137,-17640,-1200,13119,18483,-20955,27040,25704,-28923,-22272,10153,-25128,-25223,11780,26354,30639,-4167,28192,3360,-15310,17092,-5196,22370,7090,-21707,10581,-32086,-12474,12860,15315,-25600,4823,8534,13240,31102,-11095,-29918,1826,28814,-4190,-16516,-8239,15110,-21829,24388,-23710,13853,26287,29320,-13322,6515,13898,5831,-3525,11637,-23922,-888,737,-30316,-6859,-9018,21974,-16610,10663,16875,-22865,30077,17420,-6395,-21288,-13572,-14648,-24986,-23699,-20397,11266,-12424,-20167,-10939,-32121,-15079,-4600,4775,26750,5236,11985,7718,7357,19274,6490,-446,4178,-15528,274,31892,-8163,-19610,-5398,-22424,-29366,21086,-1940,27636,9116,31372,-17062,16822,978,4328,-12250,-2249,-21528,-7416,-16391,3566,9568,19345,15759,12423,26227,24180,-20163,-26487,-225,-31662,22456,15714,11048,26597,11735,-11937,29566,25723,3487,13160,-29597,30477,-4609,25704,-18580,4926,-23099,14014,1678,-19058,27789,395,-7451,6621,-2481,-25064,-31176,5306,-24094,1597,-10314,3745,-13269,22672,18053,-5161,-3472,6335,-2963,-18751,2518,-17670,20885,24096,-25695,27248,32380,-29693,29031,-13673,-15575,-5722,-5372,-12290,-23746,-19681,-1850,-26707,21958,6639,14649,-3638,-16808,-13033,-566,30482,4844,1393,-8898,26980,15293,-27626,-30640,-2453,19637,-10619,17795,21242,-15520,20115,-27128,19035,28014,-10365,19977,-26494,-16024,13650,1017,-2651,26673,-12141,-16883,-20999,-30458,-18550,21867,-21305,-26892,11896,16378,4138,-701,-4655,7089,-3445,3909,-11871,-30110,-7588,10677,-32134,-15103,24046,20800,-6997,-25128,-32053,-387,-14737,-505,-15779,13350,-10694,10799,-15946,-498,16220,-30474,-21470,-20520,10227,-16967,-26585,1310,29951,-5449,903,9458,11690,9487,-15876,-29163,8663,7407,4260,16479,-1709,11506,-2617,-13849,-837,24320,-20647,-2740,-6794,-24300,32455,12907,-5809,-23202,6655,8907,29524,-9771,-30805,18499,-25673,-22700,-1056,-12673,-30239,23237,-25437,1919,19598,30394,-5605,-19223,-28281,-23340,-19654,-6070,-22519,-10562,25357,-31078,-12748,-12144,-1407,4779,25871,17552,-18559,22897,18492,25563,-4454,27866,20484,-54,19836,-17574,9168,-25255,703,26865,-19641,-19463,-22026,-22316,11624,-29396,-4141,-16516,-3072,10242,-10586,3748,28959,-15501,-14958,-26617,-18062,4324,26788,818,1502,-21598,17387,-28110,29677,32560,2681,13069,27703,2473,-623,-20793,-11401,4332,8033,-26046,-25807,23196,3484,-18408,-32329,-197,23992,11576,-23561,13069,25735,29175,-21382,-14275,16555,-1707,-1239,-28713,-6002,-11695,23963,17819,29862,-12449,15448,11859,-12607,-17595,-19383,-3918,28979,-20779,10032,-1094,25424,-15293,-22690,-13650,-9576,9192,5723,6933,-25970,6203,-30440,-24259,10547,24424,15262,27342,31736,-26642,-4731,-12737,-7305,28474,16445,-4299,28460,-22667,-16165,-15819,27577,9130,-30731,-20050,-16893,13887,-6657,-20754,21617,-18586,-23504,-24979,20903,-6107,-15587,2107,9856,-334,-21344,-10273,10378,18171,-15808,3613,21866,7928,27054,14612,-3095,31051,-9927,3134,-12960,-1116,-6971,16506,-17353,16726,18376,18356,10037,-14784,-13151,3114,-24183,96,2696,9386,-23254,6048,14476,2682,20459,-24384,21218,28932,-17683,-5562,-1023,31152,1093,8653,16348,15234,-26372,-23820,-24061,24195,-19713,11247,13436,-2431,1021,-4792,-15519,-14367,-29568,-32138,-5091,22728,28385,27204,-18371,20043,-2929,16093,-24172,-25769,15654,8856,12703,-13224,-21980,15723,-32138,6335,-20744,-6563,8370,-7125,1941,8153,-6918,-13503,31692,13273,-28964,29753,-3236,30354,-31684,-13673,-20998,-8792,17942,-30274,-14949,-5648,22698,-9992,-7780,-30052,-8905,13758,-20911,26903,-18380,12460,1876,-13818,10438,1046,10116,14813,7166,-27148,11171,-29727,-1238,-10017,9909,19060,25022,-3940,-14830,29761,-22418,-2856,13375,-20982,21445,-14012,29769,-12437,-13882,17521,-24071,26343,13345,-12141,-21948,2020,32074,10950,-22029,14933,13723,-13346,-6800,-25355,-22690,-9221,18271,-13653,-22102,-24115,3735,23037,754,24150,26231,-4690,-32416,22865,-2305,-24385,-18940,-9968,11568,-31330,13175,9013,14996,-7030,3891,21648,9812,-22250,-19303,28799,-30166,27828,-16102,-6994,11865,22377,-15320,-15575,-15222,7424,-21610,30615,3117,2021,-1560,-119,-2523,-12911,31301,20785,-28962,28499,-25893,-8247,9337,-21640,18350,7879,22135,-12741,1252,-1594,18647,7639,-30027,-22015,31637,12388,11415,-20594,-10814,-17029,22080,-32069,-10440,-19658,28851,-18109,5960,14107,30848,-9404,-202,-14707,-29015,-18321,15389,24525,-4166,-30931,19955,-13147,14969,-5284,-22766,32036,-31006,18340,-12406,-16351,-8928,-7590,-14911,-8265,6934,17665,14448,-1238,-20857,-23975,-20971,15547,28124,24185,-425,27569,-21347,-20633,15914,-24254,-1487,21577,12893,-21181,-6730,-2700,22891,23763,-32485,-20896,-10374,-9098,29190,-27252,12510,4992,10656,-29200,26429,-32594,-21743,8969,-24043,8169,22735,-26970,-12576,17926,-32686,16251,17770,-18332,17018,9919,10540,-24659,-25801,8049,-26649,1264,-19630,7733,10561,-30144,-1121,18126,11251,-22551,15032,-1885,21079,-21014,-28347,-14656,24885,9181,-17389,10071,-21325,28558,-918,-21791,-20800,-17592,28153,-6570,-17857,2737,18742,24225,-7415,8047,3186,-1452,20136,18580,-13391,15133,8503,19875,-19412,21754,-13844,-10334,-2638,26059,11523,5365,1460,-19805,-24862,2849,23800,-16381,-30570,30278,-7171,-14167,-2328,22354,476,17053,9457,-25968,13792,26113,-4144,-6772,-22749,10968,24003,-20675,13809,-13724,4694,-557,-5873,-13354,-21024,-18735,-10945,-20614,-24021,3324,25471,-25741,6388,-10859,19889,-17913,-12950,15372,-18947,-11104,31406,17881,-31200,9690,25212,17751,9974,-27555,13108,-16431,28245,29385,109,31893,-8010,6446,27832,20330,5140,-1007,-12039,30391,30729,-28127,-25603,-7312,2761,-16145,44,-24593,19789,-22829,-2742,-4116,5170,-4483,-21711,-12469,24987,-26084,-21131,-10597,25582,24799,-29114,5104,15839,15313,-20849,332,31857,19996,-27819,15499,-12571,-23183,-13593,-20941,-1260,-14217,23083,-25191,3972,3570,-4725,-18592,-19469,27423,1780,-23417,-3423,31846,15607,18023,18400,8053,4062,-23491,7010,-12610,-21759,7343,-17382,32621,9169,-4709,-12286,23796,-1658,-18084,-21573,-7559,-22438,8792,27491,24666,14259,-12622,13674,-5693,-13111,-5247,-28646,6780,-22545,26001,4602,-26811,29076,-32702,20857,8264,-4296,-26426,-12322,-8780,540,23732,8410,-23486,28398,17655,31655,17666,5338,-31594,13851,-14205,-12286,-14,20813,-16290,-19294,-28360,13212,-21101,2880,-21588,13424,-17180,30766,27163,-10348,25016,9392,-24255,12049,24899,6640,-13486,-27002,-29692,30063,-962,-16631,-2051,15492,-30348,26577,-17715,-9209,12135,7671,-7131,6629,12679,-13058,-16746,23295,29699,9426,11654,20682,31633,25588,-15788,-9590,-16381,25602,-22900,31078,15624,28547,28213,14865,-8554,31647,10817,-24182,-25077,-5479,-12097,-23076,-1128,30722,20715,14190,29664,16857,-1941,-3916,14460,-523,-15975,-1992,15916,-27914,-17557,-29678,668,14108,12585,-20251,15645,-30154,24370,1338,-24644,24955,17656,27254,17127,5104,-5071,23035,26719,-30752,-27822,25966,5300,11639,-11534,25017,-14848,-30224,-2930,-10162,29017,-6712,-4313,6968,-10749,-13922,6264,-26024,11270,-22155,22499,-5451,-12212,9258,5466,-16249,-7571,3180,9608,-18257,11539,24720,-22052,5729,17433,22901,15614,-10143,21583,283,13617,24717,-24806,-16828,2763,-11575,-17518,-21130,-32010,-31795,30953,28672,24751,-1495,-7984,-19661,9782,5371,-29932,13801,22017,692,30097,-19814,-25275,-8906,5131,32486,-3569,-12674,-23271,-12652,-3215,-25443,1781,28186,-20213,16137,30966,7639,-10837,28230,9177,11057,-16772,-28571,-18557,-7157,957,-17777,22115,-8213,21264,-14235,29618,-30124,-22664,-20835,-28885,-16308,22908,-4211,-26871,24589,2174,13308,30179,-31432,24371,14854,-5277,-14948,32540,-25555,-2582,17788,-7541,-10557,-16370,-24394,-22702,-3756,-31884,21620,-9994,1524,11938,32252,23791,19236,-32562,-16999,14915,2696,-25222,543,492,-799,4482,19122,890,8452,-11558,17108,17208,526,-10664,31996,4900,-7114,26612,-24836,-21045,-26601,-22791,4074,-14767,-29299,29766,-4167,14682,3982,20955,-5578,30277,788,-16746,-11719,16152,-3645,-2713,-24479,-943,-25162,9546,-17953,-19588,-32542,26158,-16922,-9558,9546,18643,-10025,-11064,-25475,-4099,-4172,10030,9861,6046,-1043,6207,-17670,-15093,17704,11041,20742,23853,32684,15190,-15187,8999,-29378,-19155,26698,-16663,12865,-16804,-16409,-28989,-27558,-3636,-1968,-27051,27367,27204,-1969,20730,-7651,-23714,18886,1501,-7634,-6498,27288,-29022,14215,-8469,-29614,14958,-29077,10043,1272,25993,14990,-7402,20202,30964,-11699,-29185,-9342,22494,10893,-32413,19120,-18153,9607,-21954,9934,28750,1495,27198,-16120,-28748,-3371,-2494,28400,-9977,-16009,4132,-23619,-3199,-4831,-23225,-4333,27723,-19156,-15670,-16425,10887,2634,4959,-28493,6928,-32647,26344,-22859,-30030,28623,22867,-22613,-6932,7402,-17510,-6033,-15344,-14419,31626,-16904,-32606,-25553,-20709,-3174,3132,15454,30550,18726,22117,4978,32116,15817,-9048,-26300,-5453,9501,15680,17163,-7388,26057,22577,5464,19504,19848,3807,5050,-6669,-30421,-23774,7626,-17836,-2706,16669,-15952,4701,-29129,-18332,-24372,6392,-1211,-15597,-1627,-2754,24408,-1648,15801,-3011,23384,19173,5420,15384,-1280,-3703,12317,18028,12396,20385,-25029,6045,-20142,26016,8135,17437,-1265,-15470,31322,20739,15849,-28973,-22695,-6399,9737,25415,-9851,-489,-28270,-30452,-1802,8687,-30485,-12989,18528,17604,-12773,2733,-30949,-15165,17592,23807,-25693,2829,3823,14382,-26068,22581,-26054,18053,-15470,28943,31873,-5955,-5018,-24329,-3501,-15267,31185,-3345,-21447,-9134,-28074,-23717,-23380,13965,-13762,-4621,29746,20304,-31039,3168,-14648,2513,-17018,2061,-26747,-19072,-17108,12335,-2660,19927,29863,-25798,-30747,-25446,-15925,17517,7051,-31253,-28353,4640,-9290,2491,-26763,24104,27394,-7928,21011,14375,-18496,30348,-25775,-29758,-16933,-23645,-30509,30743,-7215,-8936,5566,-20423,-18183,11194,-8044,6625,27429,10057,-19844,-13849,25447,22712,-13587,-17599,14044,-32412,-31717,1869,290,-3015,-12771,-24523,-23182,-2225,12692,-25371,-4503,-6851,2100,25720,7410,24505,4686,32604,-12423,24419,-6970,-13798,2414,-11944,-3701,-8433,-32066,598,-20760,10300,29066,-13534,5421,-17566,-27968,-32139,21022,-6234,23739,525,32144,23316,-12531,-18190,-27089,2736,7284,-8816,-25496,-19296,13452,-15971,6522,422,-7220,16949,-14779,-25446,16434,-23959,-14434,-22949,-31046,8487,-17060,-31899,7791,-4072,13323,-3893,-32196,1692,-2231,-27854,28021,-31774,9142,-11309,12006,14653,13808,636,6980,-11203,-19457,-20345,24253,6162,-9736,-28770,-5421,13696,7960,-28378,31009,-28987,12758,-9962,-29375,-5677,-29064,31328,19335,-1157,14411,21772,-25365,14390,20738,-8083,3809,-25745,-29492,-12250,-10041,3917,-17157,11234,-29470,-8599,-29573,-2737,8359,-31834,-23845,2734,22054,-11519,-17011,-306,23047,27859,26673,13074,22107,11185,-20653,16877,15529,6368,23527,-16583,-18887,-9679,17104,7153,-6130,-1699,8585,-470,-5606,-25292,17899,6462,-31114,-23015,-22835,-3239,-16606,-19644,28014,26442,-19490,-4716,-32147,20987,-4585,186,9750,14597,-31741,4473,10230,-29259,1951,8471,913,16695,-31169,29,394,-5774,9598,19594,19978,5222,3402,-24522,-19161,-19430,30717,-13622,-28003,27546,-32322,-16657,2363,-2018,23632,-30756,-25414,-11023,368,-24585,-12444,-24236,-30490,31736,30271,-18554,-17395,-16167,9245,3657,-27953,30231,-4497,8613,974,-6925,6668,15359,-18444,5784,7894,-19227,-22539,-7673,-4690,28923,-24686,-24676,-30026,-14036,28334,-14822,3634,5903,-12769,5918,-20300,17600,11732,-11595,-27438,-24896,-13731,24624,-10613,-27706,-30780,2183,22286,-10424,16771,28928,-8103,29661,5954,-14253,17985,12341,6124,-13461,-30295,9156,32620,18909,9596,9370,2845,-31637,17325,31041,18510,-14287,-2830,-17587,-27540,29815,13191,10430,31403,-2752,29052,30334,32099,20440,-7898,11436,23982,29739,-20815,23107,4257,8485,-2000,32635,-31835,-3119,281,-9769,20384,-13397,-23620,6502,-29217,28341,-16160,19866,-15986,-15515,-14310,-4611,2808,-11961,-4735,13316,-22506,1077,14536,-10298,28243,-8925,29708,1379,1785,12115,7374,-7953,-15004,26688,14984,-21726,20822,-20292,17758,-16095,-24872,-13993,-18360,32000,14221,29113,17795,-973,-24351,-10111,-8221,-22221,-29700,-26007,-3929,-7992,24947,-17598,27468,26149,14160,-18697,-12226,-28472,-7608,-29734,5766,27123,5040,-23197,23252,-8671,26458,-13799,-9114,3273,15206,-2623,-23772,26817,9106,31015,29699,-14353,-4008,-17349,2416,-9945,29779,7258,22591,17826,-29708,4266,-11575,-24882,7238,1114,-11648,-5873,10144,30988,-524,-31441,-27511,280,8866,8576,14574,-2168,-740,-3266,27924,25561,-7216,-22205,-8908,-4877,-8663,-25586,-2909,10937,23818,-15356,6166,32691,-24588,-13029,-21503,14727,17467,18348,-22820,12449,-15346,-24539,12325,5224,4696,-3618,13684,-11526,-18003,27411,-15935,-22749,17204,20134,-16046,20745,18885,-15256,29359,13116,-180,22663,21831,32661,4011,-15837,4499,-23417,22412,-26403,-17514,32435,-5384,31781,25114,-22304,-15115,10073,9281,-12360,-23931,-25075,-32502,-32486,-30640,-1093,1939,8539,-12263,22066,-30128,25995,-11018,-27525,2819,-6518,12534,-4150,24858,-13120,-13376,-24725,-10552,39,-14786,-25381,-26786,-18089,-23221,27055,14561,4277,32253,-9740,11072,6233,20224,-22106,-14820,-12674,-3433,12104,18690,-29908,-28376,-11901,-27716,-9630,17239,-5829,7116,22273,-5290,23194,2177,-32329,-22134,-8530,-24789,-29321,-26110,-26393,30488,3875,-32039,-4271,21579,-26316,-8404,4907,28933,-23712,-19475,-4670,-19113,-11827,17936,6400,-11726,22172,25076,18248,-23054,26368,26898,20656,-25632,-23999,-5263,-17149,11044,-1972,-28859,-10199,-26504,2835,-15435,-23352,16059,25853,-2914,-19260,5394,-21893,32271,-16490,-23721,-27485,-11859,11683,1665,15391,8623,9991,-193,19283,-16036,1967,27368,-1890,8453,4875,27188,23731,-9325,-10150,-2115,-1586,-19125,855,-18515,16187,-839,27867,-20747,-14284,5838,15045,-17959,10317,31969,26604,-762,13552,8427,5239,15414,23625,12505,12235,11727,-21900,-21596,-17940,21245,11647,-3840,-11425,26539,-2091,-3179,-15865,15373,29944,-21077,30335,-23665,18745,-27871,5098,-12473,-12601,3235,-12489,1323,-30536,28623,-23093,-32369,28096,9798,-2835,30639,26911,29226,10983,-27123,-4298,-15289,-19350,-1652,1096,20562,-18219,-4813,12473,-26624,-7811,16769,27820,32398,-21223,22177,20469,10541,11506,16485,8997,22970,-11069,8634,-20886,2806,29739,-28118,28954,14271,19571,24076,-27202,19578,3783,27312,21578,3362,-3351,7263,-5409,7946,7798,27327,-30980,-5551,-1985,11820,10559,3553,20666,8303,20330,-32729,20325,-17511,-4035,-30636,11683,4284,19901,17172,4027,-13929,32027,-30353 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice31.tflite b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice31.tflite new file mode 100644 index 0000000..4d99fcf Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice31.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice31_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice31_golden_int16.csv new file mode 100644 index 0000000..c20ef6c --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice31_golden_int16.csv @@ -0,0 +1 @@ +2628,-17844,15505,20159,-8242,25522,-17117,-8073,13276,30194,-26212,15116,-10716,2772,-6214,-15886,-25618,23818,24867,12452,21424,-28240,-8140,9891,22939,9342,-28839,10283,14272,-16107,26841,-15524,-4170,-2334,24418,-21852,-11028,5132,-26532,-15189,-5547,11405,26174,-6820,-29879,29328,31589,-10152,7530,8293,5388,-27938,-3526,19889,5456,-5723,30231,-2516,5095,20701,24781,14417,-11727,11452,26078,-1835,20981,-26367,4063,7469,-10512,-694,5207,10742,-18826,-26789,1741,15544,10493,11602,28621,31900,12900,13427,-23820,18322,17718,17996,-28032,-18066,-32052,18925,3872,-16461,2061,-20317,4764,19945,-12996,13677,12245,-14565,14642,21095,-7142,-3863,-29640,29465,28396,-4835,-12724,-18384,14811,12899,23522,18880,-15612,32698,-21917,31027,-25887,-484,15808,-13926,2013,-20399,17825,-32289,18053,-31443,-2532,-26647,-12614,-11913,-11819,-1150,-25740,2321,-1339,13972,25409,-16900,17860,7172,29936,-2121,-31534,-12097,24145,-10989,-20539,-27321,15001,-6691,3446,-26548,25631,8157,984,-1748,11171,-1561,-26940,20912,28341,-29664,-18224,17919,27774,-28836,25211,20169,11970,1382,10978,27847,-21269,25039,27044,4325,28910,-18054,4240,-24761,23499,9415,28792,-4126,-31911,2637,4217,7605,-8569,-14938,-18202,-4399,4362,-22278,-17531,-19910,17147,23823,-21207,-2144,11137,26075,22276,-16743,28329,28456,20904,-13884,13566,-16717,11877,27349,21121,-25165,-13231,-22912,-29349,11683,-116,-2254,23377,-7118,26372,-22737,-4903,-12917,-11110,9501,-12725,-856,1503,18134,-20843,-18,-9110,-5342,-25585,-2035,32426,15534,-25814,-22691,-22834,-3529,9375,27522,3720,32330,-2868,23613,14568,2941,-30000,16613,-1605,7310,-16258,-1874,-30294,16788,-5302,31674,7385,20117,571,27697,31835,-15872,-24153,-31597,-30996,32311,25099,-21090,7354,26422,-12033,-3260,-13486,31593,9624,6503,-6838,26796,1117,-27545,-4158,-19539,-8677,4351,-5777,17187,14465,25353,10383,-22514,10749,13730,-29859,18597,-2493,-20515,-19373,-30025,-30553,3641,4803,6379,-21508,-22216,23934,30912,25946,-459,-2464,14094,22094,-9402,22377,28567,-24789,19686,-15643,3460,8265,5305,-13749,-5232,24704,3705,27239,-9202,-11995,28489,20913,14023,-30606,20070,22895,-6745,-17770,3451,24784,32276,-18682,6578,-6249,17313,-11506,-23543,-13703,15161,-2258,15348,-3654,24116,-16425,113,-6335,-7062,-22779,20168,10849,28162,28902,-31951,-6507,17897,-27287,-15835,-21432,-19521,25912,-2791,-13481,-16993,-10748,-17060,-32577,20232,31925,-17028,22459,-7304,13851,-1067,17754,4490,-17230,1003,14488,30319,-23331,-19757,3548,-19890,-2212,24061,8636,-26630,-15163,24128,31247,-3473,-21577,-10750,5121,-2843,8899,24001,14591,-19017,8299,17093,-31343,-31184,27201,26083,-27734,-20150,12628,-16090,-25904,11081,-10245,-10785,24805,21336,-10413,-4691,-7066,-13115,26617,-12382,-6018,15511,7819,19590,27032,-24581,7835,1818,17320,-15603,-22180,11515,-5285,-16543,9998,-23124,8832,-12384,3863,-22462,3399,-27369,15168,4543,-24290,-16093,27688,-29073,7447,27055,-14708,2035,13609,-28526,16046,-7580,-31822,5028,-959,-23270,1769,5420,30108,-16910,-21542,-12917,19035,13163,9370,-19496,27271,-26457,19872,-13228,-17128,20656,-16154,29090,24641,-1204,-12499,-13016,21609,5207,-19952,-1057,-11821,21800,-9456,26688,4794,-19410,25704,25663,-22041,-9704,-27669,27366,-15142,23368,-9268,-6551,-11160,19974,-11336,-27316,-31703,2124,-14421,-3671,-29738,-19058,13535,-1327,-17443,-5550,-24116,62,12561,-15113,-8743,-23449,-25097,-30796,-3867,20038,14943,-27023,-31328,27954,-1500,-19769,23499,7299,-9098,-5225,11876,21077,-3730,-1818,-3136,-11816,28038,-26672,-11640,31150,22802,15594,-6103,-14555,-12358,7147,-12746,-31652,3847,22682,-10203,-12316,17882,1298,-17840,-1637,24182,5011,31654,-10966,27592,-28634,-20007,-12799,-30064,18314,-21911,24496,-24425,-18852,-3245,-15160,13198,-7282,-16973,-791,-14637,-24480,22400,-20699,-2403,-27990,2550,17817,1145,18707,-5184,-11730,-5089,652,15387,25121,23003,9757,25440,31807,8414,7694,28009,3214,23823,-10336,-32030,-20792,-6599,-21426,1731,8191,31437,17800,18195,-4251,-14304,-1116,32096,26407,7168,-11807,11150,-371,-3707,24824,10152,-22510,28882,27593,2566,2914,-24908,-16664,-10308,23166,-30913,-28685,-12320,20639,20693,-7801,-28358,-22245,-5374,15858,24977,-1718,13657,-20906,-22546,-2805,3754,32631,-25511,-29741,-1864,-7208,29629,-4007,28900,7635,5109,-15311,29806,1762,-26930,-16066,-22649,-16558,-30655,234,23223,4749,-28458,-4557,-29393,22926,23006,-23614,25440,18803,-4902,15455,-23313,-20977,11784,-16158,16603,-22827,-12725,-12641,2694,30392,3812,23480,-17661,-19091,-29180,19800,8688,-27400,21852,4227,-32530,13673,-19686,31925,-1815,-29668,-23858,-20733,1286,-2780,25014,3393,-10382,-13133,-2901,14235,27457,-27777,-5003,6364,-23317,5224,-10191,-5543,24247,-27576,7244,11273,1887,-28118,2264,23391,1031,-28949,-28646,-21453,-3948,-19031,-22445,7814,-7100,-22184,18108,-3479,-13533,-6757,11294,-3523,16213,-23716,17602,31977,-24738,23625,13370,6840,-32421,16506,3751,9117,-27428,11753,25245,-15704,5853,510,-8816,30512,-20679,-5173,18820,29078,-15182,-15671,4565,5177,23321,12051,21580,-20175,-25480,309,13301,14387,-13915,-5349,26112,4712,13583,-819,-18195,32742,-25518,-13651,-32587,8206,23293,21609,-21534,28919,31673,9659,12905,22148,6395,21052,7636,-5208,30141,-16353,22928,2475,-24216,10262,29163,-15974,15928,13749,17463,26122,-4090,-487,184,28424,-17966,18103,24190,-26795,21175,-31637,13258,-29341,-313,15015,27084,-32400,27328,-29817,11534,2648,-29867,-15050,18722,-32184,-23879,-25686,12670,-8077,31915,-32244,20660,-13183,-25870,15327,3316,15001,-2116,28502,17016,28184,7501,8185,30770,20781,31662,7378,-2913,-3499,-3463,12000,28467,19604,-23809,15503,-25351,30199,-22294,-4179,-31532,-15516,-13909,31158,30162,-13728,-32746,-15573,12494,-19989,32200,27337,17087,9853,-27915,-4628,26843,-20010,-5085,28357,-11134,11327,-12959,30670,-11343,31010,25994,-29514,9076,-638,-5464,-17928,-17584,-26906,-11169,-31486,5675,2232,14167,-17960,-10460,13368,-22294,-4836,-8410,-20170,-18681,14811,27890,29572,-18319,-4769,11156,-21694,7411,-9045,26143,-28456,-25227,-28159,19358,14108,5222,-25894,-2183,-1813,18136,29140,-29737,14807,-30131,-6490,-6745,-27319,-16533,27417,-11571,-1639,16080,13052,-23080,-31485,4140,8968,-22737,25092,9853,19392,-24771,8185,-30185,4545,-1156,4821,-18095,16376,-26021,18452,-16614,9654,32583,-9868,24940,31357,-7032,19677,-3777,-1715,-32157,714,18703,-8212,15080,1890,-22424,23197,-13054,-1084,13608,-24524,-15955,-3337,28406,-3605,-1305,-8146,30095,21668,2717,-2347,15774,1231,-4147,-8248,-28309,25745,-18284,-25056,199,7668,-22065,18715,13701,5577,9292,6999,18625,8363,28024,-17674,24798,-18809,-31246,-15081,-26199,-9291,-22355,26979,-12548,3334,13535,5265,-21430,28870,-19756,-11830,-13669,5004,-13917,-8630,-7159,1440,-21841,-28805,-25790,13851,-18586,8268,9534,-4027,-4797,-23473,14542,-14722,-14609,17190,31727,-28095,2783,24031,15185,-5290,-5555,-27693,-13590,10231,5728,20221,15117,26767,-27804,24349,-32364,-11371,-22361,-19543,15326,-22440,20383,4888,29166,-10849,-30277,11590,-19708,-24083,-26913,14190,2805,-30506,11270,-15255,261,-19122,-25452,-18704,-29063,28131,11540,-6982,-28414,27992,-25690,2472,1284,-13362,30881,-22179,27778,-22444,-13717,-16977,-21694,16289,20190,-14329,1499,-10464,16625,1998,425,32219,-28054,15916,-16343,26218,7063,31253,-10336,-31787,24572,25661,-20736,4624,13866,17699,14222,56,-2922,-6953,-10109,6447,6467,-23772,-515,-11368,30509,-17324,-15929,-5199,-1352,-6330,25484,-8654,20719,-2107,2739,-30451,15003,8643,9323,-14295,-32736,-18554,-709,4936,-15080,21477,-15307,902,27791,23368,11455,644,-27826,7325,11713,9022,-4150,31777,-9196,-27855,3790,21618,13357,14252,22270,9724,32595,-27779,7320,27446,-17197,-13108,-20047,6344,-25548,-29116,-31415,-23796,-1593,-3429,3443,-25242,-18428,18320,8252,-920,29918,-4096,22693,-17413,-889,-17535,10086,-16065,10585,-28743,-29343,2496,5657,8784,-16131,-22297,-199,-3971,20107,-6148,19472,-4565,-9604,22821,-32349,16855,-9862,25479,21596,-13800,-14593,-15085,-32510,-5552,-12515,18914,-9136,22562,-31494,28556,-24402,-20162,8972,26268,-25057,-17692,-17256,4262,-13624,23087,-32748,597,-26005,-25859,18878,29591,-10765,-5147,-20952,23822,-22174,32648,-8572,-13273,-24137,-6307,20387,18979,-4274,-5179,-13389,18589,-13523,-5098,-10327,14482,-17915,30769,-5947,30727,1897,30996,14165,10022,20278,30034,-3212,-25993,19091,11690,13228,-14701,-11368,-31123,6964,3041,-9772,-23655,11466,14203,-18954,-14484,19000,-24961,-26210,28497,-22100,32276,22439,-2508,4712,-18298,19371,-1181,7003,-25146,-15878,18684,-29027,-1915,2966,20898,26050,1934,-30088,13676,17437,7599,18512,-24624,-27305,-22407,-1610,7669,25742,382,-24791,30794,-20010,-24330,-29307,-13645,9266,32675,9533,-76,4616,-13771,-20831,1995,4375,-3728,-6700,8757,-10494,18483,-21809,-26684,-17074,-4931,11851,6143,7990,-17489,-18601,-8749,-6910,18319,14210,18106,9141,22786,29249,9554,19389,-20554,12157,3453,-2464,-28246,9133,-6404,31765,-5584,-18013,22967,12179,28178,13223,5539,-6491,-21155,-10208,-16603,-30322,-1201,27019,-27630,21752,-26915,-22598,14613,7174,-427,5373,29867,31323,6661,-3322,15806,3003,32638,264,27747,3205,-12874,-15014,5407,12117,20624,-9085,32209,-11009,3880,32606,-22214,-743,9989,-10372,-1229,-28135,-17720,-1602,9223,-1957,-14963,2695,-7703,27893,26306,20254,-2227,-11011,21679,10761,-3063,-15197,1523,30028,-2281,17152,20567,5841,-6111,10058,31318,-1946,21905,18684,-12183,-23221,-29360,2659,-21913,16838,-29117,5404,-29073,6319,29363,-7134,-17583,24654,-12305,-14014,28095,-11519,-4592,17302,31048,-2292,-250,-29339,-16230,-6136,14803,20255,-10462,-31790,-4650,20300,9994,-3954,-21743,-9940,29345,24472,-22588,-742,-30847,-16950,-22413,3481,18402,21802,-28633,13896,27018,-8643,-27406,-12206,-28554,20845,4870,-16480,-9896,1801,25114,21217,20588,4285,-20264,22132,11664,30693,-12069,16374,2352,-2206,30051,-15231,12161,29679,11906,10383,-9386,17026,10872,1265,18765,-11939,-8787,-13011,17185,-11073,24558,21649,29356,-29869,18190,-3046,-10367,28247,-19384,1438,15356,2503,29076,4109,24373,20804,-15577,-18284,-3419,-28746,-15075,1635,5204,22744,-4724,30524,-27824,-30987,6946,-2379,-1170,-23274,-9644,-14224,12183,-1030,4159,-9481,17229,-15971,15114,20181,31841,8969,-32235,-18601,-22625,7978,-27636,-19688,6253,6782,30551,-293,9040,-6202,21532,25187,29879,-12502,-16115,-1437,-29184,873,1715,31901,1636,-29990,-8458,-1660,-31979,2024,22151,-3834,-21709,16433,758,-15277,-11365,28435,23535,5709,-29786,-24085,14612,-15884,-5374,-4029,-11416,-30042,9890,31836,-25319,9262,-32734,4410,676,-27060,-22172,5996,967,12844,28513,-28524,14646,-25977,-30348,7617,-5957,31576,3484,-6743,29070,29087,-3119,4264,-16310,-11157,-4712,24346,-19818,-8350,-8501,2225,19530,7234,-8268,20959,-3175,-6793,9624,-9281,9795,-1859,-5602,11716,-30090,-17340,12141,-28277,-9418,14388,-49,-10174,-26200,-2341,25158,12384,-21505,-16824,4758,-16532,-32709,28109,12010,6356,-9126,-20801,21553,-12080,16864,25006,-15138,6433,-18847,-29143,10846,17222,29511,31369,7029,-9037,26462,-25097,11831,29310,24486,-28796,-5252,-23441,-18582,-6431,27227,20143,13577,23489,-32718,19772,18478,-28076,4454,14505,-32500,6885,-28909,-4983,10867,-15977,-24675,-9635,9655,19632,-22970,20687,23904,-32056,27919,-2404,-20031,14461,26678,-20956,7631,26277,318,9241,8871,21128,12747,-20899,24268,-14885,-14152,-18368,28614,2352,3315,-1426,4755,-13064,-30788,4933,7429,8239,-12816,-14513,31691,1947,-1284,-27458,-30179,-7843,-12927,561,-30027,-29794,-3072,28060,-22450,22486,-23116,-11762,18033,4672,-20961,31578,21224,-31641,-32342,-12469,-28124,6580,26382,10632,-4665,32545,20193,9832,2967,-24266,-30237,25606,5581,-14657,-7642,-16860,2122,-15906,31410,3200,-13467,17565,4784,-25367,23221,-5516,31147,-11387,-25022,19248,-6914,30821,1194,26758,-22175,-31413,6123,-25917,-16119,-20533,-5726,-27991,-30045,-29332,3436,-21310,-23458,29098,5192,18208,4262,12666,-2270,-31397,-8564,-14590,15931,29804,-18952,-29185,32308,-10260,-7095,-32115,2496,21515,28538,-852,27524,-13305,17005,-13520,31767,-22716,8146,6818,20281,-3260,-10531,-16467,7560,19623,-26660,3748,-12534,17718,7282,32664,-17205,26341,16222,-30702,27504,-30664,-30704,108,18711,29383,-22677,-9212,2934,16445,-9724,22809,-21572,-26422,22716,-7233,-5219,-22241,1612,-16839,-11166,15594,27438,5413,-28750,18736,-15534,3051,26552,-1918,29283,15479,-12559,-22594,8439,26298,15228,18782,8585,-14515,8016,-3371,-16148,-10013,-24497,-17890,22606,22384,-5695,-29504,13686,-6596,-1048,-32181,-8841,-17778,29748,-18371,29065,-2151,15789,11575,17660,-15676,-13416,-1469,-6132,-22306,8339,-1216,-20695,11988,30854,31426,12990,-27526,18050,4963,13256,-2545,-21962,2045,-8196,4603,9661,-3416,8185,-3739,29394,15198,-3743,-16012,17934,-25813,-1948,501,-18312,29094,-27864,-21288,-9439,-4877,-20932,14144,-5412,27165,-23985,19297,25952,4121,29863,16121,12556,-24275,27379,4587,30004,-21777,-8409,25928,8032,1452,-27461,11730,-32019,11995,6608,-12475,30531,-6681,25778,-15132,-3022,-8213,-26836,1696,-28774,10289,17,5342,10808,21187,-28274,-10884,-31087,25880,19758,30983,28154,-28673,4486,18131,-9916,-16140,27710,12215,-17163,11747,3486,-17187,30730,8301,-19821,18342,18899,-26594,-6632,8872,14526,12421,-9600,7013,7733,-12934,20537,-6741,-904,-24316,-16316,8897,8227,-9829,-33,-24618,32734,-6565,-10922,23582,26528,14929,13464,28342,29160,18627,-704,12930,30601,-26757,26377,20119,5051,28516,17272,7872,17564,-11581,-26183,2262,13936,-23031,-14431,5,-28746,11105,30600,18972,-19742,5219,-24511,16944,19232,6121,31508,21343,-10446,7548,1235,14528,20845,-9809,-17854,26306,-27806,3188,-246,14295,21703,24185,1753,16075,13729,-23869,-32558,-19400,-22018,-1690,10321,9431,31193,28308,-15429,-2483,22482,-30543,19587,-15449,-8233,30016,-24661,-18467,29977,-29254,-7151,-30917,-18483,-24187,-15844,-4391,18512,-22023,-2078,223,7516,-21371,25689,-14568,-13923,1662,6265,-10033,12045,15421,-20708,18917,-6963,3422,-11227,-22285,-19862,-28062,27180,20678,18612,-15617,-6499,15129,-14121,24554,11035,-14112,30489,8836,31754,-24060,23824,-11099,20486,-24745,7440,-16420,-17541,-30963,5175,-26628,9702,-21676,-25381,-6910,18387,-21187,-17438,14116,18752,-16527,31077,20015,-21982,-1303,-11825,25103,-7032,17503,-26052,1459,-17493,8780,-22712,-10062,2037,15026,20300,22888,8080,4139,-8204,-32027,-23527,8284,17766,24740,-27904,14213,8503,27059,5082,-25794,-9453,26122,-2977,-26701,9654,2095,30307,-13398,30514,-18784,-20608,24639,-15267,25611,-332,8748,6981,7145,27147,-24939,20234,29122,-7852,-5025,1784,20416,-25923,6650,-5798,-29915,-15729,30015,-5900,14787,26228,5693,-9902,-2983,4100,-4180,-8305,-26134,-5206,-6920,-7845,-25760,-26466,-17361,14089,30589,11579,-20445,28685,16979,3958,12911,-18542,-25400,-15028,6609,-15713,-19141,-18317,-7032,-24289,25637,12893,-28769,-8208,-17843,-5525,5588,-8291,4977,-9927,14468,-31858,21642,-29193,24471,30365,22707,-18162,-13966,-31330,881,2299,-29619,340,-5631,30496,-7641,-12335,7473,-2552,-6795,26936,27843,-31841,23507,31066,31392,12588,-5398,-21734,8233,485,-21240,-26987,-15312,-7543,25235,17897,-30830,14879,-13244,21328,18952,4592,24660,15877,21069,31251,-13286,23495,-3019,31008,-2671,21981,7266,-27070,13098,32001,16363,-29462,30002,2012,15186,-22623,18871,6648,20745,-8963,-16237,28303,22696,-9537,-7215,-16495,20031,14934,25141,-21489,-24820,6065,-17148,-25747,-29540,-2169,-21190,-30651,30543,-2221,25005,-361,-14708,-31854,-27462,-30251,11451,4060,4097,-20505,-8522,-24976,1319,12951,-26511,5233,-876,19949,27014,-27997,-18407,-20663,-10954,-11050,-29561,-19456,-22086,-25299,-6253,3627,10065,-26752,-26046,-19596,-5043,-20121,31372,-25943,-13460,-20542,-26147,-17032,-912,-19529,-2215,30162,-18103,-9276,11373,5406,-2934,12580,-14827,-31745,-20157,1203,19601,-17169,25156,-17653,-16098,-15389,-22874,17728,12216,-29317,27417,958,-3538,15175,-3981,14640,-20917,23178,12815,13875,-29048,-18972,26088,32580,15606,16307,6483,21540,-9929,9782,-18948,-30228,-31532,-21141,-2586,11670,353,29865,4230,-9818,-32444,28512,30898,15579,25164,26745,14874,-9691 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice31_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice31_input0_int16.csv new file mode 100644 index 0000000..70d470b --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice31_input0_int16.csv @@ -0,0 +1 @@ +20671,18612,-24309,-1237,3227,20902,20018,19291,2628,-17844,15505,20159,-8242,25522,-17117,-8073,13276,30194,-26212,15116,-10716,2772,-6214,-15886,-25618,23818,24867,12452,21424,-28240,-8140,9891,22939,9342,-28839,10283,14272,-16107,26841,-15524,-4170,-2334,24418,-21852,-11028,5132,-26532,-15189,-5547,11405,26174,-6820,-29879,29328,31589,-10152,7530,8293,5388,-27938,-3526,19889,5456,-5723,30231,-2516,5095,20701,24781,14417,-11727,11452,26078,-1835,20981,-26367,4063,7469,-10512,-694,5207,10742,-18826,-26789,1741,15544,10493,11602,28621,31900,12900,13427,-23820,18322,17718,17996,-28032,-18066,-32052,18925,3872,-16461,2061,-20317,4764,19945,-12996,13677,12245,-14565,14642,21095,-7142,-3863,-29640,29465,28396,-4835,-12724,-18384,14811,12899,23522,18880,-15612,32698,-21917,31027,-25887,-484,15808,-13926,2013,-20399,17825,-32289,18053,-31443,-2532,-26647,-12614,-11913,-11819,-1150,-25740,2321,-1339,13972,25409,-16900,17860,7172,29936,-2121,-31534,-12097,24145,-10989,-20539,-27321,15001,-6691,3446,-26548,25631,8157,984,-1748,11171,-1561,-26940,20912,28341,-29664,-18224,17919,27774,-28836,25211,20169,11970,1382,10978,27847,-21269,25039,27044,4325,28910,-18054,4240,-24761,23499,9415,28792,-4126,-31911,2637,4217,7605,-8569,-14938,-18202,-4399,4362,-22278,-17531,-19910,17147,23823,-21207,-2144,11137,26075,22276,-16743,28329,28456,20904,-13884,13566,-16717,11877,27349,21121,-25165,-13231,-22912,-29349,11683,-116,-2254,23377,-7118,26372,-22737,-4903,-12917,-11110,9501,-12725,-856,1503,18134,-20843,-18,-9110,-5342,-25585,-2035,32426,15534,-25814,-22691,-22834,-3529,9375,27522,3720,32330,-2868,23613,14568,2941,-30000,16613,-1605,7310,-16258,-1874,-30294,16788,-5302,31674,7385,20117,571,27697,31835,-15872,-24153,-31597,-30996,32311,25099,-21090,7354,26422,-12033,-3260,-13486,31593,9624,6503,-6838,26796,1117,-27545,-4158,-19539,-8677,4351,-5777,17187,14465,25353,10383,-22514,10749,13730,-29859,18597,-2493,-20515,-19373,-30025,-30553,3641,4803,6379,-21508,-22216,23934,30912,25946,-459,-2464,14094,22094,-9402,22377,28567,-24789,19686,-15643,3460,8265,5305,-13749,-5232,24704,3705,27239,-9202,-11995,28489,20913,14023,-30606,20070,22895,-6745,-17770,3451,24784,32276,-18682,6578,-6249,17313,-11506,-23543,-13703,15161,-2258,15348,-3654,24116,-16425,113,-6335,-7062,-22779,20168,10849,28162,28902,-31951,-6507,17897,-27287,-15835,-21432,-19521,25912,-2791,-13481,-16993,-10748,-17060,-32577,20232,31925,-17028,22459,-7304,13851,-1067,17754,4490,-17230,1003,14488,30319,-23331,-19757,3548,-19890,-2212,24061,8636,-26630,-15163,24128,31247,-3473,-21577,-10750,5121,-2843,8899,24001,14591,-19017,8299,17093,-31343,-31184,27201,26083,-27734,-20150,12628,-16090,-25904,11081,-10245,-10785,24805,21336,-10413,-4691,-7066,-13115,26617,-12382,-6018,15511,7819,19590,27032,-24581,7835,1818,17320,-15603,-22180,11515,-5285,-16543,9998,-23124,8832,-12384,3863,-22462,3399,-27369,15168,4543,-24290,-16093,27688,-29073,7447,27055,-14708,2035,13609,-28526,16046,-7580,-31822,5028,-959,-23270,1769,5420,30108,-16910,-21542,-12917,19035,13163,9370,-19496,27271,-26457,19872,-13228,-17128,20656,-16154,29090,24641,-1204,-12499,-13016,21609,5207,-19952,-1057,-11821,21800,-9456,26688,4794,-19410,25704,25663,-22041,-9704,-27669,27366,-15142,23368,-9268,-6551,-11160,19974,-11336,-27316,-31703,2124,-14421,-3671,-29738,-19058,13535,-1327,-17443,-5550,-24116,62,12561,-15113,-8743,-23449,-25097,-30796,-3867,20038,14943,-27023,-31328,27954,-1500,-19769,23499,7299,-9098,-5225,11876,21077,-3730,-1818,-3136,-11816,28038,-26672,-11640,31150,22802,15594,-6103,-14555,-12358,7147,-12746,-31652,3847,22682,-10203,-12316,17882,1298,-17840,-1637,24182,5011,31654,-10966,27592,-28634,-20007,-12799,-30064,18314,-21911,24496,-24425,-18852,-3245,-15160,13198,-7282,-16973,-791,-14637,-24480,22400,-20699,-2403,-27990,2550,17817,1145,18707,-5184,-11730,-5089,652,15387,25121,23003,9757,25440,31807,8414,7694,28009,3214,23823,-10336,-32030,-20792,-6599,-21426,1731,8191,31437,17800,18195,-4251,-14304,-1116,32096,26407,7168,-11807,11150,-371,-3707,24824,10152,-22510,28882,27593,2566,2914,-24908,-16664,-10308,23166,-30913,-28685,-12320,20639,20693,-7801,-28358,-22245,-5374,15858,24977,-1718,13657,-20906,-22546,-2805,3754,32631,-25511,-29741,-1864,-7208,29629,-4007,28900,7635,5109,-15311,29806,1762,-26930,-16066,-22649,-16558,-30655,234,23223,4749,-28458,-4557,-29393,22926,23006,-23614,25440,18803,-4902,15455,-23313,-20977,11784,-16158,16603,-22827,-12725,-12641,2694,30392,3812,23480,-17661,-19091,-29180,19800,8688,-27400,21852,4227,-32530,13673,-19686,31925,-1815,-29668,-23858,-20733,1286,-2780,25014,3393,-10382,-13133,-2901,14235,27457,-27777,-5003,6364,-23317,5224,-10191,-5543,24247,-27576,7244,11273,1887,-28118,2264,23391,1031,-28949,-28646,-21453,-3948,-19031,-22445,7814,-7100,-22184,18108,-3479,-13533,-6757,11294,-3523,16213,-23716,17602,31977,-24738,23625,13370,6840,-32421,16506,3751,9117,-27428,11753,25245,-15704,5853,510,-8816,30512,-20679,-5173,18820,29078,-15182,-15671,4565,5177,23321,12051,21580,-20175,-25480,309,13301,14387,-13915,-5349,26112,4712,13583,-819,-18195,32742,-25518,-13651,-32587,8206,23293,21609,-21534,28919,31673,9659,12905,22148,6395,21052,7636,-5208,30141,-16353,22928,2475,-24216,10262,29163,-15974,15928,13749,17463,26122,-4090,-487,184,28424,-17966,18103,24190,-26795,21175,-31637,13258,-29341,-313,15015,27084,-32400,27328,-29817,11534,2648,-29867,-15050,18722,-32184,-23879,-25686,12670,-8077,31915,-32244,20660,-13183,-25870,15327,3316,15001,-2116,28502,17016,28184,7501,8185,30770,20781,31662,7378,-2913,-3499,-3463,12000,28467,19604,-23809,15503,-25351,30199,-22294,-4179,-31532,-15516,-13909,31158,30162,-13728,-32746,-15573,12494,-19989,32200,27337,17087,9853,-27915,-4628,26843,-20010,-5085,28357,-11134,11327,-12959,30670,-11343,31010,25994,-29514,9076,-638,-5464,-17928,-17584,-26906,-11169,-31486,5675,2232,14167,-17960,-10460,13368,-22294,-4836,-8410,-20170,-18681,14811,27890,29572,-18319,-4769,11156,-21694,7411,-9045,26143,-28456,-25227,-28159,19358,14108,5222,-25894,-2183,-1813,18136,29140,-29737,14807,-30131,-6490,-6745,-27319,-16533,27417,-11571,-1639,16080,13052,-23080,-31485,4140,8968,-22737,25092,9853,19392,-24771,8185,-30185,4545,-1156,4821,-18095,16376,-26021,18452,-16614,9654,32583,-9868,24940,31357,-7032,19677,-3777,-1715,-32157,714,18703,-8212,15080,1890,-22424,23197,-13054,-1084,13608,-24524,-15955,-3337,28406,-3605,-1305,-8146,30095,21668,2717,-2347,15774,1231,-4147,-8248,-28309,25745,-18284,-25056,199,7668,-22065,18715,13701,5577,9292,6999,18625,8363,28024,-17674,24798,-18809,-31246,-15081,-26199,-9291,-22355,26979,-12548,3334,13535,5265,-21430,28870,-19756,-11830,-13669,5004,-13917,-8630,-7159,1440,-21841,-28805,-25790,13851,-18586,8268,9534,-4027,-4797,-23473,14542,-14722,-14609,17190,31727,-28095,2783,24031,15185,-5290,-5555,-27693,-13590,10231,5728,20221,15117,26767,-27804,24349,-32364,-11371,-22361,-19543,15326,-22440,20383,4888,29166,-10849,-30277,11590,-19708,-24083,-26913,14190,2805,-30506,11270,-15255,261,-19122,-25452,-18704,-29063,28131,11540,-6982,-28414,27992,-25690,2472,1284,-13362,30881,-22179,27778,-22444,-13717,-16977,-21694,16289,20190,-14329,1499,-10464,16625,1998,425,32219,-28054,15916,-16343,26218,7063,31253,-10336,-31787,24572,25661,-20736,4624,13866,17699,14222,56,-2922,-6953,-10109,6447,6467,-23772,-515,-11368,30509,-17324,-15929,-5199,-1352,-6330,25484,-8654,20719,-2107,2739,-30451,15003,8643,9323,-14295,-32736,-18554,-709,4936,-15080,21477,-15307,902,27791,23368,11455,644,-27826,7325,11713,9022,-4150,31777,-9196,-27855,3790,21618,13357,14252,22270,9724,32595,-27779,7320,27446,-17197,-13108,-20047,6344,-25548,-29116,-31415,-23796,-1593,-3429,3443,-25242,-18428,18320,8252,-920,29918,-4096,22693,-17413,-889,-17535,10086,-16065,10585,-28743,-29343,2496,5657,8784,-16131,-22297,-199,-3971,20107,-6148,19472,-4565,-9604,22821,-32349,16855,-9862,25479,21596,-13800,-14593,-15085,-32510,-5552,-12515,18914,-9136,22562,-31494,28556,5774,11662,21203,-445,15461,23717,-23870,26892,-32533,9904,-15932,-24449,-23892,-4025,-24174,-11525,-24402,-20162,8972,26268,-25057,-17692,-17256,4262,-13624,23087,-32748,597,-26005,-25859,18878,29591,-10765,-5147,-20952,23822,-22174,32648,-8572,-13273,-24137,-6307,20387,18979,-4274,-5179,-13389,18589,-13523,-5098,-10327,14482,-17915,30769,-5947,30727,1897,30996,14165,10022,20278,30034,-3212,-25993,19091,11690,13228,-14701,-11368,-31123,6964,3041,-9772,-23655,11466,14203,-18954,-14484,19000,-24961,-26210,28497,-22100,32276,22439,-2508,4712,-18298,19371,-1181,7003,-25146,-15878,18684,-29027,-1915,2966,20898,26050,1934,-30088,13676,17437,7599,18512,-24624,-27305,-22407,-1610,7669,25742,382,-24791,30794,-20010,-24330,-29307,-13645,9266,32675,9533,-76,4616,-13771,-20831,1995,4375,-3728,-6700,8757,-10494,18483,-21809,-26684,-17074,-4931,11851,6143,7990,-17489,-18601,-8749,-6910,18319,14210,18106,9141,22786,29249,9554,19389,-20554,12157,3453,-2464,-28246,9133,-6404,31765,-5584,-18013,22967,12179,28178,13223,5539,-6491,-21155,-10208,-16603,-30322,-1201,27019,-27630,21752,-26915,-22598,14613,7174,-427,5373,29867,31323,6661,-3322,15806,3003,32638,264,27747,3205,-12874,-15014,5407,12117,20624,-9085,32209,-11009,3880,32606,-22214,-743,9989,-10372,-1229,-28135,-17720,-1602,9223,-1957,-14963,2695,-7703,27893,26306,20254,-2227,-11011,21679,10761,-3063,-15197,1523,30028,-2281,17152,20567,5841,-6111,10058,31318,-1946,21905,18684,-12183,-23221,-29360,2659,-21913,16838,-29117,5404,-29073,6319,29363,-7134,-17583,24654,-12305,-14014,28095,-11519,-4592,17302,31048,-2292,-250,-29339,-16230,-6136,14803,20255,-10462,-31790,-4650,20300,9994,-3954,-21743,-9940,29345,24472,-22588,-742,-30847,-16950,-22413,3481,18402,21802,-28633,13896,27018,-8643,-27406,-12206,-28554,20845,4870,-16480,-9896,1801,25114,21217,20588,4285,-20264,22132,11664,30693,-12069,16374,2352,-2206,30051,-15231,12161,29679,11906,10383,-9386,17026,10872,1265,18765,-11939,-8787,-13011,17185,-11073,24558,21649,29356,-29869,18190,-3046,-10367,28247,-19384,1438,15356,2503,29076,4109,24373,20804,-15577,-18284,-3419,-28746,-15075,1635,5204,22744,-4724,30524,-27824,-30987,6946,-2379,-1170,-23274,-9644,-14224,12183,-1030,4159,-9481,17229,-15971,15114,20181,31841,8969,-32235,-18601,-22625,7978,-27636,-19688,6253,6782,30551,-293,9040,-6202,21532,25187,29879,-12502,-16115,-1437,-29184,873,1715,31901,1636,-29990,-8458,-1660,-31979,2024,22151,-3834,-21709,16433,758,-15277,-11365,28435,23535,5709,-29786,-24085,14612,-15884,-5374,-4029,-11416,-30042,9890,31836,-25319,9262,-32734,4410,676,-27060,-22172,5996,967,12844,28513,-28524,14646,-25977,-30348,7617,-5957,31576,3484,-6743,29070,29087,-3119,4264,-16310,-11157,-4712,24346,-19818,-8350,-8501,2225,19530,7234,-8268,20959,-3175,-6793,9624,-9281,9795,-1859,-5602,11716,-30090,-17340,12141,-28277,-9418,14388,-49,-10174,-26200,-2341,25158,12384,-21505,-16824,4758,-16532,-32709,28109,12010,6356,-9126,-20801,21553,-12080,16864,25006,-15138,6433,-18847,-29143,10846,17222,29511,31369,7029,-9037,26462,-25097,11831,29310,24486,-28796,-5252,-23441,-18582,-6431,27227,20143,13577,23489,-32718,19772,18478,-28076,4454,14505,-32500,6885,-28909,-4983,10867,-15977,-24675,-9635,9655,19632,-22970,20687,23904,-32056,27919,-2404,-20031,14461,26678,-20956,7631,26277,318,9241,8871,21128,12747,-20899,24268,-14885,-14152,-18368,28614,2352,3315,-1426,4755,-13064,-30788,4933,7429,8239,-12816,-14513,31691,1947,-1284,-27458,-30179,-7843,-12927,561,-30027,-29794,-3072,28060,-22450,22486,-23116,-11762,18033,4672,-20961,31578,21224,-31641,-32342,-12469,-28124,6580,26382,10632,-4665,32545,20193,9832,2967,-24266,-30237,25606,5581,-14657,-7642,-16860,2122,-15906,31410,3200,-13467,17565,4784,-25367,23221,-5516,31147,-11387,-25022,19248,-6914,30821,1194,26758,-22175,-31413,6123,-25917,-16119,-20533,-5726,-27991,-30045,-29332,3436,-21310,-23458,29098,5192,18208,4262,12666,-2270,-31397,-8564,-14590,15931,29804,-18952,-29185,32308,-10260,-7095,-32115,2496,21515,28538,-852,27524,-13305,17005,-13520,31767,-22716,8146,6818,20281,-3260,-10531,-16467,7560,19623,-26660,3748,-12534,17718,7282,32664,-17205,26341,16222,-30702,27504,-30664,-30704,108,18711,29383,-22677,-9212,2934,16445,-9724,22809,-21572,-26422,22716,-7233,-5219,-22241,1612,-16839,-11166,15594,27438,5413,-28750,18736,-15534,3051,26552,-1918,29283,15479,-12559,-22594,8439,26298,15228,18782,8585,-14515,8016,-3371,-16148,-10013,-24497,-17890,22606,22384,-5695,-29504,13686,-6596,-1048,-32181,-8841,-17778,29748,-18371,29065,-2151,15789,11575,17660,-15676,-13416,-1469,-6132,-22306,8339,-1216,-20695,11988,30854,31426,12990,-27526,18050,4963,13256,-2545,-21962,2045,-8196,4603,9661,-3416,8185,-3739,29394,15198,-3743,-16012,17934,-25813,-1948,501,-18312,29094,-27864,-21288,-9439,-4877,-20932,14144,-5412,27165,-23985,19297,25952,4121,29863,16121,12556,-24275,27379,4587,30004,-21777,-8409,25928,8032,1452,-27461,11730,-32019,11995,6608,-12475,30531,-6681,25778,-15132,-3022,-8213,-26836,1696,-28774,10289,17,5342,10808,21187,-28274,-10884,-31087,25880,19758,30983,28154,-28673,4486,18131,-9916,-16140,27710,12215,-17163,11747,3486,-17187,30730,8301,-19821,18342,18899,-26594,-6632,8872,14526,12421,-9600,7013,7733,-12934,20537,-6741,-904,-24316,-16316,8897,8227,-9829,-33,-24618,32734,-6565,-10922,23582,26528,14929,13464,28342,29160,18627,-704,12930,30601,-26757,26377,20119,5051,28516,17272,7872,17564,-11581,-26183,2262,13936,-23031,-14431,5,-28746,11105,30600,18972,-19742,5219,-24511,16944,19232,6121,31508,21343,-10446,7548,1235,14528,20845,-9809,-17854,26306,-27806,3188,-246,14295,21703,24185,1753,16075,13729,-23869,-32558,-19400,-22018,-1690,10321,9431,31193,28308,-15429,-2483,22482,-30543,19587,-15449,-8233,30016,-24661,-18467,29977,-29254,-7151,-30917,-18483,-24187,-15844,-4391,18512,-22023,-2078,223,7516,-21371,25689,-14568,-13923,1662,6265,-10033,12045,15421,-20708,18917,-6963,3422,-11227,-22285,-19862,-28062,27180,20678,18612,-15617,-6499,15129,-14121,24554,11035,-14112,30489,8836,31754,-24060,23824,-11099,20486,-24745,7440,-16420,-17541,-30963,5175,-26628,9702,-21676,-25381,-6910,18387,-21187,-17438,14116,18752,-16527,31077,20015,-21982,-1303,-11825,25103,-7032,17503,-26052,1459,-17493,8780,-22712,-10062,2037,15026,20300,22888,8080,4139,-8204,-32027,-23527,8284,17766,24740,-27904,14213,8503,27059,5082,-25794,-9453,26122,-2977,-26701,9654,2095,30307,-13398,30514,-18784,-20608,24639,-15267,25611,-332,8748,6981,7145,27147,-24939,20234,29122,-7852,-5025,1784,20416,-25923,6650,-5798,-29915,-15729,30015,-5900,14787,26228,5693,-9902,-2983,4100,-4180,-8305,-26134,-5206,-6920,-7845,-25760,-26466,-17361,14089,30589,11579,-20445,28685,16979,3958,12911,-18542,-25400,-15028,6609,-15713,-19141,-18317,-7032,-24289,25637,12893,-28769,-8208,-17843,-5525,5588,-8291,4977,-9927,14468,-31858,21642,-29193,24471,30365,22707,-18162,-13966,-31330,881,2299,-29619,340,-5631,30496,-7641,-12335,7473,-2552,-6795,26936,27843,-31841,23507,31066,31392,12588,-5398,-21734,8233,485,-21240,-26987,-15312,-7543,25235,17897,-30830,14879,-13244,21328,18952,4592,24660,15877,21069,31251,-13286,23495,-3019,31008,-2671,21981,7266,-27070,13098,32001,16363,-29462,30002,2012,15186,-22623,18871,6648,20745,-8963,-16237,28303,22696,-9537,-7215,-16495,20031,14934,25141,-21489,-24820,6065,-17148,-25747,-29540,-2169,-21190,-30651,30543,-2221,25005,-361,-14708,-31854,-27462,-30251,11451,4060,4097,-20505,-8522,-24976,1319,12951,-26511,5233,-876,19949,27014,-27997,-18407,-20663,-10954,-11050,-29561,-19456,-22086,-25299,-6253,3627,10065,-26752,-26046,-19596,-5043,-20121,31372,-25943,-13460,-20542,-26147,-17032,-912,-19529,-2215,30162,-18103,-9276,11373,5406,-2934,12580,-14827,-31745,-20157,1203,19601,-17169,25156,-17653,-16098,-15389,-22874,17728,12216,-29317,27417,958,-3538,15175,-3981,14640,-20917,23178,12815,13875,-29048,-18972,26088,32580,15606,16307,6483,21540,-9929,9782,-18948,-30228,-31532,-21141,-2586,11670,353,29865,4230,-9818,-32444,28512,30898,15579,25164,26745,14874,-9691,-17837,21221,32141,22946,-32482,23812,11961,-11442 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice32.tflite b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice32.tflite new file mode 100644 index 0000000..de0c6ea Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice32.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice32_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice32_golden_int16.csv new file mode 100644 index 0000000..54ca08f --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice32_golden_int16.csv @@ -0,0 +1 @@ +-2383,-31570,26731,17265,5974,-29858,3552,-15574,24230,24426,-14689,-30055,-10688,2688,2361,13282,-9491,-22070,16800,-30262,-10092,9490,32327,25473,-14328,-25979,24390,-28787,19931,-22647,-2410,-12316,-31507,18576,-11838,-2958,25078,19850,15635,-26667,13489,-17936,2282,-15727,-6565,25766,-13095,17330,-20766,-6302,5608,-29338,-22378,-22651,18430,-7658,13504,-26459,-16913,29745,13074,-10745,-16244,-9283,-28169,28506,-22483,9662,23985,-22413,-18845,26987,-27820,19243,-9470,-2046,-17199,-9061,13877,31812,29288,-5763,13745,-15811,21429,-29214,5344,-13420,-9377,-12461,15917,-4187,31905,3284,28837,-10081,-26066,-25923,-21428,-30290,18631,-12356,18212,20924,-18419,10367,-10374,23722,-24402,-20139,-27754,25259,-16372,13619,21495,-2582,15293,14208,-15952,-12268,-12478,-10044,6494,31351,-25233,13403,-17452,-5640,29867,28831,-27299,-26501,1884,3199,26864,-21040,29628,-18159,2124,6643,4933,-597,-18696,10321,12954,-23868,-32401,13017,16367,-4531,27357,-20760,-10879,-17294,-17204,2707,3501,-32637,25497,8665,3556,-20670,1522,15408,-19661,26359,10635,30367,193,-16474,2971,-28282,-12626,-27640,12974,15408,-28757,-26043,-30900,9654,-24393,-17548,18760,26986,22741,-27847,-31362,29434,5232,28042,31591,-15348,-7916,23355,26591,-14246,20737,21697,1024,-17988,-26302,-7287,9437,-12106,-28630,542,3561,-13287,31631,-2802,-4574,14176,30046,-22238,9294,-12442,-4466,-19804,8245,-21857,20560,28483,-30572,-17782,20194,-12346,-25678,-24289,-1155,15203,9021,-20485,-14149,-8619,13143,11994,-4016,-10483,9186,-9520,13368,-29689,-8146,28513,4353,-7285,-9981,-22032,32295,-10429,-17688,23977,27805,24800,3636,-501,-31948,15841,-3914,-26425,24191,-9301,-28595,-9158,18379,-1548,25041,-2873,26056,28050,14920,4234,-7808,-19946,12815,-23540,7959,-25170,10267,30161,11409,26273,-7078,15064,-22484,23340,-10191,20132,21616,-7725,-22830,-8098,15285,26138,15803,-7504,-7851,-4945,20265,31541,18971,24646,7480,6668,-27800,8029,-13912,-7989,21419,838,-7902,-24815,24040,-21108,7790,24581,-25232,-18289,-8030,-32416,17745,28639,1514,5042,19215,6633,1360,25019,-15068,-26192,-14572,23468,23855,-22142,-32615,8178,-21051,30487,9959,-9229,-30203,14134,-30700,10107,17656,-17717,936,5743,21838,-26487,-3064,-2076,26008,-30155,18262,-771,1253,-19918,5817,15058,-12709,5161,-18302,626,-19355,-23932,-23167,-16444,-2513,-31834,6144,6194,-14265,8464,2247,-7336,8756,985,23732,-26042,22704,26806,-8797,17058,-3589,10217,28499,-6149,-1742,-20675,3879,-7277,-16430,25470,7115,13562,8382,28108,-18158,310,-10848,-15470,27830,-1636,19578,-7742,-6362,4759,1657,8295,12485,-26017,-20698,29216,-9045,-24146,27361,-29998,-10075,-32516,13945,-12012,1122,-7564,15032,11274,-28601,-23629,-14079,21263,-22025,-15550,10274,16920,-863,-18873,-5726,-3785,16062,4581,-30218,-9353,19650,-29165,-20842,8870,17684,-21106,20733,3259,-11639,-11148,1743,882,-46,3646,-4656,-4455,28713,12059,12224,-9883,-14692,-11317,-11039,-18553,6910,-16502,-2540,-13791,-21696,-16202,22595,-17355,14616,-12860,8050,28072,28807,6266,-25427,22019,22063,32726,-26123,239,435,-23834,-26542,26900,716,-23810,-11388,-29122,-18317,2032,3643,-19129,19597,8511,-27345,-6380,1115,-19705,17439,-23022,-26011,24777,-13589,25454,-22726,3380,8853,1886,4635,-11985,-8817,28165,-30753,-18536,-25654,1587,-6634,15120,11815,-7301,-18378,-17845,23309,-13451,17350,-224,-20224,-4336,-30858,11583,-23234,-17258,944,15625,2959,-25973,-22010,-23315,25122,10162,-14820,4041,29701,-9283,-8077,32720,-10505,-11634,-12577,13969,-5796,-12126,19380,-18633,-7396,1304,-5550,18214,1535,20326,-11506,20709,-22343,-5570,-6642,7334,3168,-27228,-29422,26543,2611,-4472,21675,-1422,-4546,2740,29598,-5565,-26541,-30112,-15547,30174,-7358,6582,-12662,30111,-18262,25168,31259,17112,25456,29242,4233,-980,-22612,-7410,-31759,-15747,23976,19387,23667,-21313,10134,28002,-31171,28618,28358,18842,13469,28031,-22707,-3355,-31471,-8781,25152,4135,-24221,-26562,21000,-28756,25504,-2487,14341,3019,-5876,-3799,-7409,6547,6179,27880,-23219,9587,-17197,28373,7397,-10907,25569,21065,14918,3945,-27126,26500,-30213,22772,-9423,6289,27247,-28573,31462,17297,-30973,6578,-5695,27019,24291,20635,-7990,-32487,-26567,5858,8299,-16329,-32450,21979,-9490,-13047,-31029,27197,-21557,-13347,28022,-20726,-6715,-30975,18758,-20433,-20537,28320,-27772,16170,22677,-13007,-9707,-16675,22396,-10897,21639,5877,21799,-26068,13129,-21120,12242,792,32149,888,21036,-26710,28199,3131,-2917,-23248,28999,20180,18727,13028,3644,-8260,-25574,-26700,-1223,15247,-30081,3920,-574,-18671,-10786,31394,15685,-3724,24555,-20041,869,31618,6722,-9286,-63,-24296,-894,25138,-30502,-27762,14102,4135,-7373,-32432,-10181,12919,-32545,16764,-22270,30282,-17530,8888,-28350,-9693,26253,-14845,14939,-22311,10120,4787,-26840,9033,-27727,-5318,23673,-20523,6206,9054,-10264,8671,-5526,-17068,-14715,12920,4899,1203,-939,-20913,10027,25513,23603,-30985,-22069,13327,-10718,25868,14688,-16136,31203,8993,14480,7689,3986,29438,16839,21973,-29331,-23395,-14472,-3557,-30356,-22491,-25345,-22516,-3362,-4588,6787,11897,-17863,-25989,-14832,-13945,-5508,-31346,-5547,15700,9808,5982,-12440,16942,-170,16278,4507,5987,26647,-22919,-23774,-19176,-22586,14345,20283,28518,-2222,26711,23321,31390,-30850,-9382,20189,-31464,12102,-17500,7314,-24666,1914,14233,32333,9612,-19491,-21682,-16421,-17494,23826,1594,-5963,24980,6422,30592,29953,23524,2436,28729,8224,13160,23137,-32325,-28827,1799,17756,8735,18837,23867,56,-464,-21779,-17947,12207,-27251,19777,-6671,-515,-19646,-7097,26557,-8974,4263,10068,-14688,-5836,-15530,19318,30888,-15226,-11909,12770,-13052,-15565,26444,15045,17960,30785,-1496,28826,-18464,20432,-32673,-13064,-24698,-27105,26830,14348,18687,-3278,-30362,-24576,15549,24697,14541,-27270,-18147,5415,-32340,-10815,-14980,14958,31651,19408,18092,22692,19139,7301,-12418,22284,8382,-24830,22155,7215,-563,27346,-8869,16736,15417,1869,-14096,-25102,-6872,19628,19108,5473,13902,26768,21728,30857,-24573,-13230,-16627,-22523,13231,-21992,27908,22643,21032,-2818,-30795,-19230,19018,-7954,-22534,32374,-13753,-3996,-18290,31617,-30614,26510,30936,-23356,-21632,9703,-2482,19580,-23636,-28290,-22506,-25192,-3732,-25825,18546,12628,27514,-28164,-23825,24881,22264,-24456,-7310,-23767,19393,-21273,-28271,23434,27751,29366,-5081,5166,-2438,1114,21054,25625,23553,488,-16988,12039,-1931,28478,-11129,11854,-6117,2959,-6885,-5264,28242,-10033,21654,-25680,7913,-8047,3189,-22728,6538,-28188,-27619,-4861,-7508,13511,-1206,11416,22739,-16206,26762,17780,18855,20030,2402,10977,25468,-10486,-7365,19523,14546,22118,30253,12536,11408,-15451,4929,-17459,644,-23183,7544,-4810,23278,19266,11008,-28275,-29668,-9761,-29324,9858,-6531,21072,18493,11477,9526,183,-28484,-15862,-3597,16407,30016,-14806,10156,3231,-21661,-21466,-10031,28643,8147,28371,23142,-24692,-29772,4052,-5735,30941,-16541,-1483,29969,-10714,16669,-14727,-8483,26370,12667,-12022,-15456,24740,-26281,31791,-26237,-20966,-2241,32699,-13721,-25979,-21871,874,-16576,-4067,26195,20064,-4595,11130,6413,22375,30001,5200,-30660,6338,30879,-3675,24304,8074,-6791,-11018,-32410,-4545,-24537,2679,-25635,5276,-1195,2810,10846,28882,-16947,-1413,24610,4178,-25837,9147,-23984,11000,-30969,18886,7921,25112,18766,18122,32216,20670,-10677,-32315,25885,13950,2389,-29908,27165,-5487,4248,11821,-600,-3991,11042,6071,-2762,24133,17904,-26894,-15116,3056,-28424,-19392,26461,-20413,-22613,-6478,-8652,28699,7230,-31617,21345,30086,24652,-8360,-1697,-16066,15480,22190,-4702,21306,-13264,4978,-1252,-27105,-15346,727,-20946,-25880,-9256,28991,8471,21391,-10704,744,21493,-13710,-9017,20157,2674,5297,-7335,-13332,-29440,-8841,30144,27998,19398,-27443,10251,3402,-26440,8490,14745,3719,30584,21188,-9179,31168,4402,-5152,23860,17970,26694,19447,13490,23347,6352,-440,-31751,29005,-11662,3615,-22365,17480,-11835,-26877,8454,-31746,18115,-7611,-28282,27691,-25195,4279,28672,-16355,31151,28888,12774,-7897,-24369,-30450,-19775,-15393,1244,13585,-5860,782,10178,-31266,632,30450,26176,-7793,-20039,-1119,-26255,8323,-23443,-17411,2798,-10965,29442,8843,-15878,-31874,-5820,-17725,-30663,29821,-4732,-28135,25003,26873,-7184,-9027,25154,525,-27309,27820,-17527,-18126,-13492,30432,-13010,16690,26719,601,7439,-22818,8688,13417,28366,27688,-16890,12202,30257,-22932,-18005,-28050,7926,-10031,526,19565,-22226,19732,20710,-19378,26875,-9495,20058,-9736,6726,10437,-25789,-13996,7551,-9567,-23681,10133,-31387,32092,15180,29659,17633,-12970,27172,19286,-23270,-13450,19994,-9728,-16216,-30669,9373,-27035,-32175,2846,-31738,16008,6825,1000,29348,31507,-10200,8054,-1352,-202,-29882,-21440,21355,29662,-23518,-20006,29168,-26301,30177,24414,-5979,22193,24393,-14611,-4140,-29538,371,6877,-10388,-26300,-11715,30465,1066,19401,17690,14536,24253,-4207,3260,31079,1686,-7319,-16928,-14191,8475,-27364,27598,16024,-15013,-27942,16223,-26957,19925,29687,25861,5728,234,21187,-12253,27271,29224,21683,-13474,-1580,-21068,-13247,17916,28497,18858,14025,-17978,-25086,29844,-7055,-16296,26421,171,14511,-15523,-32197,-25575,846,3737,-30184,20023,27074,-21825,-6851,-4426,25060,21670,-4380,17871,-27807,-21272,-3503,17704,-13144,-19988,5016,9275,13877,-19557,-9239,-10111,9602,1904,-9231,-31999,23068,2782,9240,-31220,-7119,17773,30547,-10521,11911,-31434,25560,11618,5178,-20845,-19090,-26086,12260,-19942,-15974,-26852,-5175,-294,18158,32197,12290,2461,-11268,31966,21873,1437,-3245,404,9075,-1826,27753,26962,-9712,-13680,8324,-25278,15741,13356,-26753,-5892,-477,-23609,-6052,-20014,-24762,9895,22942,14927,13761,2595,-32329,31786,-19207,-21552,-10293,23097,-28889,27741,-2648,-27459,-31299,32724,-9153,21155,29422,20381,-15360,-217,15572,32710,-25691,31817,15060,7516,-24316,-5965,-31891,-2858,11908,4033,25002,4463,-31367,-1485,26161,-24645,-21007,31018,14885,7792,-29699,-3297,413,-14176,18094,26829,12592,30866,-8687,-25213,32106,17911,30373,25331,-188,-27276,26164,-28454,4683,-12190,-5323,-14270,-12427,27542,-30829,-8925,11161,29185,-22064,8322,8861,21720,18981,-2565,-29530,-16830,-31000,9707,24552,-26448,22902,-16735,27263,6575,24900,-26192,-23752,29040,29489,-25526,-20675,-18680,-991,29212,32181,22589,32010,-28680,11878,-9109,-14879,-4963,6163,16907,5524,28918,6560,27810,-31299,-9966,-5517,14417,-16608,-13180,1722,18688,-21586,-7153,-19744,18338,-9725,8228,-22507,-23413,-12828,-17019,-31256,-8110,-25740,7384,32059,30768,-1272,22854,-19096,15295,30429,-3238,-22958,8890,-17926,4924,27268,23805,9940,101,25812,4901,5257,14801,-26730,-24029,-9883,17626,-13273,-224,9254,5518,4533,23624,-18286,-1501,-27885,-10988,-11717,-29497,2938,11693,2603,-31851,-7422,-25751,-206,29027,32657,-5278,-12885,-27527,-2601,15789,-28730,-28411,-3373,-4634,15108,-14025,29747,29128,29472,24004,-5580,-17759,-26411,7991,2566,22519,7789,-31410,-8805,8102,3556,-815,-30467,9953,-27102,25352,29048,16551,7549,29745,30178,-14858,-8138,21554,-5645,-8510,5698,-5878,24870,-7407,20053,-30863,-1638,18869,17127,-23785,-16865,4768,14433,-8046,-12951,-2976,-6659,21714,-18664,22166,8601,-2205,25954,-17735,4741,6057,107,29292,31106,20453,-15813,-4750,870,21642,7026,-23390,-31034,-29853,348,-12324,-5690,-32552,-25904,-883,-24849,-27679,15423,-1825,-15994,2383,29922,21456,-12216,-22107,-10575,-3092,-26574,-24524,30979,740,16770,23345,24364,-11554,-835,26079,5472,20269,-7763,31858,13800,-23656,-22439,-5711,-23042,10101,4944,-24599,-28950,12517,-23875,24979,-29433,26466,-11961,17428,-31913,8530,-797,-8017,18033,11654,30507,-20709,16149,-9774,20666,3739,-15425,26520,-388,21287,14043,29324,-12032,16004,-3435,-5952,-3622,2031,29228,18867,-24560,11259,9201,-7060,-4436,23610,-25008,-320,-22086,-32286,24329,27077,-26422,-10279,27070,32647,-28348,-21209,-4494,19503,28570,6581,23705,32096,3164,29926,-1279,-25848,-17000,28870,-1672,19690,32129,26362,15347,-30045,29566,28878,-20050,13804,15053,-19428,-20104,-28108,-30187,-9458,24198,-26614,28153,32766,-17282,-25724,1616,-20363,14121,9269,-910,27922,3628,-31865,-31709,-1426,-15298,15578,-27197,4108,-27225,-1641,29733,23377,15720,-31841,-18304,-5353,28025,12317,11363,28003,-19371,-1615,22581,-9553,-31961,-1822,26578,-20383,25632,-32722,-12006,25939,14800,19103,30896,-30611,-9518,-2883,17264,-19533,-4996,-8223,-31792,27290,21737,-29002,10810,-3734,6668,-16240,29413,-28855,-28302,-20653,-15686,-13275,-1258,11633,7133,7954,-30629,-29548,-1160,24146,25131,23175,11531,-16262,-9169,-4525,-15054,19761,17454,23267,-17146,17883,-2684,11564,-5698,-13944,-3700,31965,1516,-21078,5539,15474,-28998,-27594,-25024,-19700,25723,-32226,954,18768,11646,6510,-179,-24359,12876,9096,23044,-27178,26480,11396,28451,23532,23900,-22479,-3206,31032,22840,13952,14589,-20927,1051,-22399,-25196,-14364,6039,-26878,30336,-24259,5622,25818,-11630,2260,-6580,16768,-14615,-13858,233,-9926,-6417,22887,-29611,6664,16118,8788,20696,-28408,1450,-29957,11044,-145,18206,30490,-7218,-24118,27616,-6498,-22578,-31506,39,-29941,20487,-12192,-20706,15229,-10926,21875,-6250,5227,-29212,-20192,3675,-15891,-12495,-32730,-11248,-12538,8293,26150,-6866,29477,9112,3101,-3421,29046,26903,-19577,-6392,-16098,-4281,-13711,1116,-20211,-25759,-27052,-21511,23551,15868,19966,5166,-5709,15862,-1694,-4477,5782,30570,-21788,-5421,-27327,5843,26744,-18994,28303,32121,21359,-10970,-6380,-22161,18433,18174,30559,-11177,7671,30077,11456,-17438,-21174,30925,-19509,-30010,-32030,-7205,19101,14219,5423,-15196,-31633,-21888,-21602,19365,12851,17573,5600,12102,27257,-12905,-11393,-26791,-12008,-28923,-28561,3785,18277,14957,-32708,-24995,-17934,-1622,-8387,10345,24232,2370,-31752,-18017,-18670,-12657,25848,-27607,29058,-17429,25856,20895,-17126,-25220,13847,6950,18465,-26474,-17235,31480,15415,4324,-16418,-32364,-11945,-16572,-24293,16342,-13405,13013,9336,17936,4795,-7807,18200,-15291,21220,-15720,-30276,-26407,-7639,-12019,27851,-13778,-26782,-3927,14572,-7606,-21959,20274,-8085,-7211,-11457,7864,20793,20782,-30934,24545,-13903,-14664,-22868,27372,-13810,-14060,-17038,14230,-18535,14372,-29743,-3872,-30399,-841,-3445,-7721,6064,-17677,-31599,26943,7556,5272,-21645,1900,-22206,-20319,18454,18923,-14429,-316,7933,-21467,13632,-26929,11924,-17205,14803,-14801,-13729,18815,1668,27367,11084,-14168,21891,-11210,18616,4628,-19941,-16971,14856,15690,27240,-18629,-3082,-21579,-26487,-31880,8949,-13258,-24040,23526,15331,-30946,29616,11507,-3177,28364,31048,17089,-23348,9774,-13771,23350,-15887,10741,-21143,-25929,-24000,10058,-21601,13934,-5362,-32459,-6156,-15568,6501,8686,-20856,14991,-15710,9696,-9195,10184,29380,-21260,-12647,2557,22690,-26611,29383,1158,-25591,-2220,31422,2957,31612,8473,20640,25037,3809,32135,-5608,-22483,3285,-23163,18427,-20187,12789,-26785,-1367,-7411,1505,-19659,-10684,-8030,-4546,24542,6662,6132,21711,8558,-19507,-1227,-9569,22556,-26593,-19852,-22493,-18663,-28795,10649,19944,-26187,29563,30050,6048,-9873,21678,32003,27301,30926,-26806,-5937,-11625,8763,13044,-24788,-2800,13418,-3701,-19508,-12897,30837,-5133,23365,14692,20298,27950,-14975,-8612,-25506,-12512,-19730,-30444,-3494,30443,14959,30830,27822,-8631,-31341,9065,8114,20623,16747,-24770,-24113,-21884,-7730,8158,19232,25557,-20384,-16632,14909,8151,5818,4253,3311,-328,16376,-30478,9149,9044,-5222,-22544,34,-15927,3725,-16309,2803,-20419,-8469,-8681,30091,-15879,-5114,-18506,10002,7226,-29038,20226,13408,30962,10505,4097,29869,11672,10315,21969,19515,971,20700,31776,-27230,15700,596,3886,-15567,25714,3623,-27351,26655,-14874,8704,22269,-24544,16396,15791,9281,-4938,-20040,-5604,13451,7613,1115,-15863,31159,16025,17842,30553,-30065,-25619,15521,23269,18142,14653,27118,-14510,12211,2054,-7408,-22090,5604,-18036,21328,-32109,-13990,20433,9074,-5892,19152,30719,-25500,20270,-2298,-18262,20450,8854,6615,26373,-3880,19251,27280,-18437,-23902,-7034,24003,-31175,25383,-4489,22018,7051,27364,-9970,29855,27669,15449,24118,8650,25304,20969,-28766,6464,-18455,22917,-1657,-1153,30360,-14723,12195,-13218,-3338,-1090,21998,-156,-2220,-80,-2382,6104,-27777,20912,21601,-4635,-30021,-13360,-9173,-27529,174,14000,21402,-8329,13726,-5576,31867,-15887,15022,16015,-17462,-16863,-24596,26226,22049,-18618,-10694,22700,22206,26250,-7601,15102,-22845,17859,1209,18473 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice32_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice32_input0_int16.csv new file mode 100644 index 0000000..f0fa5b3 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice32_input0_int16.csv @@ -0,0 +1 @@ +-9269,-32192,-26870,9453,-4189,-1845,-25030,-13665,15847,-73,-3008,-7714,25761,-21020,-19511,-27536,7265,-4921,14141,-29650,10129,6381,3428,20420,-22502,-30987,-19551,4715,-18085,-12728,-17798,-23259,-2210,-26456,-23016,-8082,20300,10080,20194,10468,-32442,-30543,154,-21298,-19302,17974,-26535,-27322,8829,21090,-19961,-31515,16931,23030,11107,21651,25639,20115,-3983,-10996,-6443,478,27984,-8595,32455,-2871,-28109,-22190,5597,5644,-578,22477,-21789,7803,-32517,-6793,-6914,1370,-17717,-13272,-11974,30969,12802,-32480,5525,19566,-15214,-28367,-27970,20504,318,-32319,-12344,-16507,-1029,-13432,-24253,-19116,-23339,8333,28403,26237,2784,-16851,-18969,25215,-18940,29141,5627,-13710,18093,-14912,27968,27934,-997,28921,17243,-31342,12465,-10323,1812,2530,-20031,-15852,-5768,14414,-19163,22597,26236,-15729,32184,-3441,-28680,-23494,-977,-1886,-18727,26256,-25052,-21295,26320,18133,-26563,-27197,21583,-6023,29094,24922,24441,-2772,19019,-31431,26377,18375,17905,-26278,6435,-22941,14697,32352,-6693,-19752,24516,25255,27112,5131,583,4978,15438,9631,13081,5607,19632,18412,7566,17466,22643,-8654,-21618,-15634,-25204,-5712,-17791,5877,14845,-12798,-4371,31756,-10364,24024,2513,-7000,5542,11389,-7858,-10271,-4724,-9933,28369,20389,7620,-22325,-25136,27931,-2794,-14740,4585,-8358,9501,25684,4994,-28239,9395,2458,21435,-32104,26575,21930,15606,11083,14822,-22658,-24517,-22985,12818,21030,8319,9325,19492,-20543,16475,-21616,-7376,31282,-15115,16579,-853,31542,29023,23217,20130,-5870,-25501,-31951,31689,-15205,6297,9631,31885,-12631,27225,22071,26573,9596,24520,-9595,-25180,26121,-4244,3870,25799,-31950,24869,-3217,-22390,4164,-21943,31845,-197,18203,-13114,30684,-3746,-23213,3502,22069,12911,9098,-13803,-18344,4673,15092,-21098,-20691,-1745,53,-27341,-15857,-5396,24105,-27358,15218,-31428,-31109,20025,27525,-28515,-22130,16426,8242,-6864,22446,-27353,13965,-27595,2718,-24065,-28071,-29212,7483,21553,29757,-1039,13700,12433,10272,1069,28992,30808,15046,13482,-6198,12422,-3829,-4119,-15782,5637,26697,22783,-8353,-1216,16688,28428,-7119,-23310,3073,23589,-32524,-9470,5567,-2039,24099,2856,21109,18976,26959,-7919,-6620,-31160,29533,-15935,26096,-20203,24800,19192,-17158,-3746,-8575,-12392,-3589,-18421,11417,-10720,14512,-28535,28902,28170,18338,31178,27262,24257,-31809,17254,33,-3474,25169,-29130,21626,16047,14672,-25447,-5363,-32219,17126,-30138,32747,-16079,-1995,18836,32202,-23479,7932,-10704,-19730,24698,22815,-26840,27496,-22480,-2819,31441,-19385,-31959,-31823,-10144,-7183,-10765,-21214,29570,-16569,3373,-23177,14993,5959,-25759,-14398,31634,27563,21114,20108,22592,-10075,14934,17507,-24435,5704,-3382,-15211,-25385,-11714,16258,-22497,16303,-6117,-31310,-25592,-23730,2161,-23218,25446,1063,14137,1744,-25501,19509,6224,11264,-22160,-20123,-12865,-23431,-32268,12663,17772,-21815,-10852,15188,1750,27654,-4233,9524,-9659,-7378,21232,-15440,5469,-21377,918,-18693,-23158,16841,-8549,17768,12697,12698,-12214,-20888,-1788,29233,18232,-26182,-7977,14279,29890,-12582,-21172,21691,29816,25768,-17740,-3775,-24381,8527,-25731,19246,-13030,10322,-17846,29776,-31900,-28900,-8677,28297,-22041,-3386,-31809,22610,-4050,-6755,-32399,-27023,6898,19809,-1505,1227,-9086,12652,-28874,-24636,23072,31306,-12151,30944,-2180,18101,22824,-18136,4227,32686,-28655,13224,21528,-25266,1927,25778,-9934,-3450,23675,8822,-25204,32742,-23263,-29665,-28149,-32041,-21982,-21680,26699,28342,8320,-17182,-264,-7753,5659,27961,7371,-4366,15832,-11707,-9807,-15357,-1812,21734,-31165,5617,31894,-17587,-29856,32189,15852,9165,7866,-2137,-9791,17756,-5211,17295,-29703,22609,-22032,-32690,28111,23762,16081,6073,25683,22617,-25760,-12531,-16768,29729,29132,-12286,-21417,12805,7628,-18512,-25962,13050,4433,11255,-7201,-29543,-26348,16917,26337,-30513,-3786,-6408,-23028,-5985,9158,7716,17201,-11890,-16367,-25291,7231,18782,1703,24099,21759,9057,-2670,-19094,16503,-17149,-31256,-6323,-15656,31365,-23939,25633,15468,12250,-32116,5940,22597,17613,22099,9977,24241,23249,-9054,31243,-6958,-28083,12103,14953,-22664,24439,12849,-1613,-7084,-6655,-15672,-18400,-9980,21006,3055,-3412,-25999,-22549,-2674,-24347,24368,-14181,-9326,16771,-23695,1552,-8496,32145,-23475,-17231,-3642,-11544,21867,11876,-24134,6298,18299,10328,-26397,5327,-12301,-16391,24060,27794,-606,21509,5404,26479,27731,26527,14051,425,-31944,-24143,18514,13364,16033,11922,19574,640,-6023,27338,-30834,-14238,-5780,-20596,-11649,-3469,-2629,12038,-25293,14041,5732,-31109,-30867,22903,-3197,-23550,32399,-18163,-24650,27995,5910,-16365,18146,17806,835,-2498,16799,29030,-23736,-32614,-12993,-24282,-28954,-5618,2212,-27185,1962,20289,-690,-2716,5320,-18522,19614,-30217,-32598,-30657,-23283,20602,-1277,15944,-15531,-7634,-411,2724,5557,-306,-23454,10675,17161,-21304,19046,-28744,1366,-3744,26466,11021,-4454,14463,-31198,11838,-14753,29833,-27009,-18261,5350,-10443,-21668,21026,22742,-24584,-16847,-26138,11165,22013,26623,-32379,-30584,-6937,11311,17967,-9324,27373,2872,25316,23753,18234,7775,216,-4770,-12276,-31459,-21344,-13159,-28229,30448,-23294,25286,26537,30690,10991,-27683,25047,-10124,-12313,19535,-29801,-327,-26481,-27577,-29140,-30723,-23794,2517,-10344,7050,1391,18577,2988,-3314,28917,-21800,-10340,-26970,2853,-5898,-24219,-11206,-849,25624,22978,3636,-25317,26832,30645,2770,11224,17346,-8092,-9513,25949,32170,11364,-8886,-10821,11713,-18069,-32607,14644,-20797,-17802,-2163,-24578,-6470,5978,30475,26694,-19442,10651,19672,28218,-8311,-13774,15363,-26152,2267,-21385,-5083,11672,2422,9284,18597,-31673,-2337,-30211,-21915,-32619,-32275,-6588,23726,-11593,1326,-21610,-3725,-4750,19221,-18120,17690,31406,-24737,16704,9062,-29793,-7341,-20176,22814,-10979,-15037,-2480,23448,-4454,-9938,-22224,-10787,22313,-8541,-24891,-6572,27515,16753,-2674,-18942,2188,19482,10034,-6485,-1141,15932,3488,-6384,-14926,-26920,-4594,22889,10970,9136,459,13610,-14708,11436,-16689,-6664,-2408,-911,-755,31534,23285,22858,32741,-17640,4543,11300,-7036,792,25942,9695,-14305,-28557,-9555,-31413,-5819,-27362,-8108,17491,-436,23105,-6012,-20161,29038,22437,362,6175,28372,29280,86,-25890,9939,25232,12861,769,-26379,14519,13078,29520,-32391,7721,12736,6009,4039,-24729,7269,-22978,-20914,-17634,29566,-17921,32693,-18296,6388,27960,-19195,-23294,21522,2261,19860,-30756,-26862,-13588,-27733,-21451,674,8927,-8850,6195,26954,-14309,9792,16255,7510,-24120,11020,-32163,14241,10341,5042,-1338,20372,29325,-1576,16428,19773,3886,-7156,11366,6760,-12241,-7658,-5971,13160,-31566,-31682,27499,16537,23157,25524,16100,-25810,-16699,15919,-20073,8477,-21864,30448,-699,-2495,-6754,-15142,-13561,-28842,-29050,31821,-14365,-15825,10234,26070,25756,7768,-27543,21808,-25365,6992,4307,-14710,-20855,10816,-3134,-17810,9635,-18901,22509,-17756,29309,18065,-28853,29239,21447,30307,20831,-29366,26280,31624,14397,-8677,4957,16615,-3793,8712,-5875,1057,30666,24258,-16324,-9709,-20269,-24515,10210,23611,-9101,-5588,27447,32455,-30158,2572,-26113,-32332,-25479,2376,12566,-31270,17466,-20984,-13676,-26890,-9721,-1649,20612,-14412,-16179,13644,22805,9466,18106,26148,15335,-31124,-7887,10651,31568,6858,-16778,-13752,2506,25869,-2473,-1665,-26435,-1632,-32401,14926,30458,5305,-22563,31003,536,11568,-7775,-32410,-32406,-14747,443,12645,-26885,29993,29826,7981,15068,-23770,1825,-29009,-10304,11605,-20503,20911,22851,-4770,-21163,3123,4880,-31111,-21700,23180,-25472,14751,4695,12896,9088,29300,-25440,3129,-10828,-8063,2943,31367,-16193,-14373,-16424,1164,20170,-26467,-12675,31262,26749,4001,-4785,-31511,2580,26301,-17282,-30682,11144,27856,-19575,-15875,-7071,5698,-25242,-20990,20359,8193,-13884,-1181,5300,20044,13360,-3201,1863,-15118,31899,20178,12196,-3195,9887,3771,-32703,22902,996,18256,-16419,7897,23994,2141,-24711,-24887,23760,28927,-22574,7991,-31409,-16465,28658,-26489,23264,-11867,-3523,13903,29087,-15465,31681,-2596,-18174,5982,2784,5720,-10225,-4936,-308,-17998,9365,-7550,-4791,-25775,15840,-15577,27662,16440,5519,-10453,-32664,31568,26427,-10338,26497,10945,16619,-925,20617,-31150,-5233,-17769,13265,-24165,-11622,17391,29313,-25011,-5242,5758,-5823,11736,-25856,13384,4805,12890,9403,4375,19276,-7899,27303,-29876,24738,-8488,7891,13388,8702,-24485,24402,-9749,-10817,-21157,-10070,-29923,-7319,24768,20256,14278,29543,-31540,1377,28838,1396,-5922,-4293,27730,31275,21240,-11929,-30985,-19918,8328,-8100,-370,-6531,-21754,-10921,11173,-29801,-4033,12048,-12047,21717,31623,-8072,-7334,-31066,-6387,-24805,14693,20192,9715,4772,6289,-30580,-18704,24467,-22058,-22071,-22515,-18843,-24224,-17095,-4082,22888,-25260,14086,5767,11277,17829,-27835,10957,-16816,19433,-19329,9398,-5643,-15504,-27307,16082,11485,23865,19034,-18074,-15211,12203,-10813,25389,26395,-12844,22035,17428,-18803,-3148,-24910,7681,17013,-12549,10979,26746,15346,12401,-20681,-6891,15807,12161,15688,-28316,-21351,12662,-17150,21248,31038,18976,-32440,-29457,-21544,16633,-16471,-8108,22680,655,-23970,31146,19840,-10513,-9159,8777,-29398,-9160,-1941,-9166,-19374,-22379,-21567,26748,-28800,-5426,-6738,-8315,-14514,5178,7423,11086,30622,26830,-2696,20159,11764,15891,8345,25769,32327,-8050,-14865,20491,18393,31706,-9976,27481,-18906,-10789,25078,-24234,-11137,18535,-1418,18294,-13798,9858,-29145,-21304,-1078,-14183,29778,-20003,11072,17345,-11481,-19687,-20245,-23884,18960,30747,-5327,-21773,-18211,15442,-1781,-2855,-6856,-26798,526,5145,-25557,17570,888,67,9684,25211,-32532,-16931,30259,-19014,24037,1229,21245,-24506,25739,17814,23587,20878,-16648,-20141,-3941,11344,-18953,2239,-26100,-2286,-31059,652,-9958,3628,8306,15371,-11224,-32171,-724,25888,-15757,-6222,-22854,21126,12412,12503,26069,-26367,6169,15330,12137,-9813,25416,20643,-27909,-10372,21179,-4827,32437,31524,-6957,2457,6278,28487,6960,14217,-32727,-10706,30031,6269,18995,3493,4716,-23808,24330,17618,-21478,-31742,-13697,-12885,8003,18604,-29232,-1680,-3847,-27109,-25056,-9135,-18447,20711,-32293,849,-26632,9140,-26729,-21142,-32118,-4092,-21130,14999,-6427,10379,-24927,-8497,-13651,-13260,3630,-14018,28039,19950,-29685,7932,32059,-26220,11737,17034,-19750,30757,-32470,5520,-24523,-19323,-27729,11001,32491,-30320,14854,-12517,-1623,23326,-21287,-27226,17785,26698,29467,-18209,-6223,18484,-26051,-16968,-502,-9810,24999,-7428,-3837,-11289,-6075,31866,28074,-22545,28239,-16371,-8702,13215,-16574,11133,32636,25351,2106,-20345,-7386,-32623,-30394,-27625,2822,2999,30034,13313,19952,6596,9306,-2908,872,7905,-7679,-17930,-15287,15321,-2533,32276,-3885,-17375,5651,-26547,27026,-10532,-29683,17657,-9462,-23906,32075,-7436,-21509,-18238,22713,-28284,4530,27495,-13620,-10237,4921,5605,-9686,29761,16379,-6237,13030,-12753,-88,-14361,8192,-25291,-6301,-2395,-19982,-28006,-23262,9335,3413,-12090,22018,-15689,-18931,-30855,-29056,-22830,-24857,15134,25577,-12674,-20703,5414,4350,-16188,-30448,-11086,1937,30283,25889,28238,-8362,27491,-20036,8419,-11652,26034,30374,-8972,14162,-2495,-22355,-17629,-24167,-10494,-25330,29430,-3247,413,-32444,25804,15008,-21479,-824,-9246,26702,-20121,-2703,-28649,-19694,6804,-2646,15644,32536,22277,4221,-10326,14357,16089,29346,-20248,5297,-3556,-4605,-21759,-23505,11310,9801,-4158,13557,27812,19537,-5538,32245,14891,1756,29276,-14518,-3196,-28571,-16139,32146,25833,-13616,-8583,4728,-31149,-30826,474,-30556,-19977,-21959,-25085,6316,-30317,13274,7313,13990,29972,-7722,4350,-19381,-13068,15384,-19997,18306,-2693,14527,4146,-24126,-6914,28076,14479,12404,10282,-6179,5585,13867,-17444,32099,-29785,-2116,13275,-9052,20141,-8699,18874,-9745,3880,-12298,-6820,63,8256,18267,10305,29630,-18816,-16113,-14267,9541,17542,15163,-945,5112,-14074,31802,29549,23117,11541,-8111,-15447,20797,2481,1566,453,-8993,29773,16510,26944,-23752,4603,14531,-12620,-19156,-18807,-24222,-15762,7694,29204,4612,-5558,-25422,-15550,3148,22164,17967,-335,22801,27302,10318,31912,24848,-12288,-12453,-31816,15101,4513,-126,-20268,-15487,26608,-32130,10953,-21324,3359,27616,-25421,-2775,-70,14524,-25786,6024,16018,-32195,-13570,9776,7197,31277,11214,17274,-21143,-21404,-24928,-4720,4444,-3623,-29720,31130,-13950,5719,30779,-17358,-5731,11576,6,-8276,-29233,-13344,22432,-22587,-17986,-32072,9248,-29263,-3450,19526,25864,-32676,6604,-4913,-29976,2549,-3280,18266,-15689,-24592,-32552,-16063,-19184,-15732,30227,-24903,15645,-31595,4544,-15136,8732,30756,20557,26287,-6474,-4923,-14917,-22267,-12473,20660,-10143,10945,-22755,25526,-13504,-21874,-29394,-14141,-12194,1046,1070,24113,-18936,-29389,-10128,-28816,23113,-9900,26718,10798,-16779,-14948,-15497,7753,14737,20873,-11693,-17483,-32687,32083,17197,28675,-18267,5489,-28106,18564,-22023,11034,-31877,-9687,-17719,-15553,-13014,-18292,-11091,-18161,-22501,-14274,1387,-6586,11446,15861,6249,3909,7664,-30915,-23107,25351,-9338,-11790,13636,-303,10856,-32517,-32432,1747,6280,-2487,-8654,-423,20339,2340,25054,1645,2570,23270,-2617,-19467,2520,-15332,19932,-532,-19774,30874,-25787,-26302,-30089,-18028,28587,-10196,-11072,-5624,-6467,-11518,2718,21663,5784,-9240,-29623,-26991,18126,-10709,14097,-23513,-7900,-19520,8367,-12020,-28792,7627,-9878,29924,-26353,-2229,-30224,-31881,31153,15080,14700,-21803,13740,-18176,-4703,9906,-12979,-17001,23257,-19485,22543,-31691,21238,-25993,-1768,-20639,-24277,-880,-11347,11045,8186,-1957,13964,4919,13133,3641,-21898,5813,13371,24405,22468,24703,9975,-25964,14323,-14445,24896,5280,-22450,-15293,-25708,17449,2999,15662,-5296,18471,-17533,31647,-28258,-15171,27968,27388,-21175,29954,-26466,-4403,8175,9808,-27505,32642,11541,3017,9080,5586,-18068,-13634,29829,-24985,16261,-26742,504,-55,-9420,31754,-1119,12585,28257,27909,-21794,-16318,-30838,-17764,-22385,-5250,19122,2466,18162,26070,30881,23717,-4002,-19521,10669,17610,-3163,-29945,10106,-16661,-1632,26758,-14482,119,-14992,5877,14763,-6316,-17524,22939,8445,-19614,4751,-3280,2085,-6481,5059,-14651,-13742,-6312,7773,16260,-25999,-12326,24938,-15057,-31590,3832,-28655,-12691,-6552,-26425,14607,3529,-4021,-29627,23963,30843,-8506,28953,-4339,30682,1183,-15717,-5421,-29716,-22,-20085,27000,5712,8725,-25234,30217,-30591,-10810,28953,19248,-12271,-20605,-19239,8565,-13320,-7536,24,8592,-7460,30814,-20608,-18556,-8506,493,-5019,-459,-15547,-19893,-15700,10300,-19075,-20522,31207,-31685,-20503,820,3393,-32123,-25307,24886,-7387,21710,-326,-27982,26103,409,-26758,24953,-716,-20096,1062,-28279,-30606,-19820,-13498,21030,28860,30768,19595,-8193,-26995,6785,-24735,-24195,1798,-3386,32764,27833,9382,-1908,18720,-9553,-6257,8632,15760,-30418,-22976,-13132,12806,-15203,7709,-16056,11036,-12176,3709,-12782,3305,-11388,9377,30284,32401,26174,9025,4799,-3050,987,10158,13016,31152,22965,23052,31162,-14205,-8353,-2842,1950,-9406,-23351,30742,11650,17110,31674,27769,-23485,12063,-4108,3386,-8209,-7663,-6733,8671,24401,-3105,-32616,26365,-14629,23713,11066,10677,-31750,-12267,203,30685,-1159,-15074,-4621,-6295,-27533,12097,10116,-10473,-28423,-2135,-3447,-12691,18159,-17179,13808,-9095,2692,2629,14411,24941,-29434,15937,877,21316,-28772,16414,-29334,-22076,25191,-3470,30512,17964,-26005,-17713,-23021,-23252,17625,13008,-26866,-24221,-14436,-6809,-11412,17491,-30341,9925,3043,769,11864,22127,-3846,-9114,-17598,11799,-2610,-10648,3516,26838,-19080,6460,16446,-30101,15107,3070,30591,11484,8192,-11926,-9815,10543,24367,-3512,-7546,19571,-27575,-15050,-19923,-26804,29442,548,19845,-3766,14925,-4146,-10170,-25085,9078,-20470,-28122,-25707,-32113,31577,10033,-24634,30569,29424,27687,19208,-6957,30773,13095,32494,-6654,-30802,18279,-15485,-7206,19915,-20086,21763,25156,-16118,-6887,16553,17804,-17653,-10716,15583,-9924,-3735,11650,29216,-30221,17705,4671,-27523,-29705,-13705,17482,-22515,18147,7177,30598,-13888,22663,12506,11196,15113,24434,32260,26137,-21863,-1972,23030,-6751,6572,27676,-28437,-21386,-2296,7470,31673,-32209,10201,18230,-16362,16340,-26847,-9528,11372,-29764,28428,10560,27181,-6264,-3121,19457,250,-10653,19733,-603,29230,-17985,-15936,-32311,-6128,28138,-1531,-5402,21234,26004,-10204,32436,3912,-2426,8087,7045,3582,30284,15669,-2890,16329,-14309,-29621,15940,17320,-12784,14882,-10172,24118,6505,-31169,-19390,-26879,2229,19098,8593,-8426,-2383,-31570,26731,17265,5974,-29858,3552,-15574,24230,24426,-14689,-30055,-10688,2688,2361,13282,-9491,-22070,16800,-30262,-10092,9490,32327,25473,-14328,-25979,24390,-28787,19931,-22647,-2410,-12316,-31507,18576,-11838,-2958,25078,19850,15635,-26667,13489,-17936,2282,-15727,-6565,25766,-13095,17330,-20766,-6302,5608,-29338,-22378,-22651,18430,-7658,13504,-26459,-16913,29745,13074,-10745,-16244,-9283,-28169,28506,-22483,9662,23985,-22413,-18845,26987,-27820,19243,-9470,-2046,-17199,-9061,13877,31812,29288,-5763,13745,-15811,21429,-29214,5344,-13420,-9377,-12461,15917,-4187,31905,3284,28837,-10081,-26066,-25923,-21428,-30290,18631,-12356,18212,20924,-18419,10367,-10374,23722,-24402,-20139,-27754,25259,-16372,13619,21495,-2582,15293,14208,-15952,-12268,-12478,-10044,6494,31351,-25233,13403,-17452,-5640,29867,28831,-27299,-26501,1884,3199,26864,-21040,29628,-18159,2124,6643,4933,-597,-18696,10321,12954,-23868,-32401,13017,16367,-4531,27357,-20760,-10879,-17294,-17204,2707,3501,-32637,25497,8665,3556,-20670,1522,15408,-19661,26359,10635,30367,193,-16474,2971,-28282,-12626,-27640,12974,15408,-28757,-26043,-30900,9654,-24393,-17548,18760,26986,22741,-27847,-31362,29434,5232,28042,31591,-15348,-7916,23355,26591,-14246,20737,21697,1024,-17988,-26302,-7287,9437,-12106,-28630,542,3561,-13287,31631,-2802,-4574,14176,30046,-22238,9294,-12442,-4466,-19804,8245,-21857,20560,28483,-30572,-17782,20194,-12346,-25678,-24289,-1155,15203,9021,-20485,-14149,-8619,13143,11994,-4016,-10483,9186,-9520,13368,-29689,-8146,28513,4353,-7285,-9981,-22032,32295,-10429,-17688,23977,27805,24800,3636,-501,-31948,15841,-3914,-26425,24191,-9301,-28595,-9158,18379,-1548,25041,-2873,26056,28050,14920,4234,-7808,-19946,12815,-23540,7959,-25170,10267,30161,11409,26273,-7078,15064,-22484,23340,-10191,20132,21616,-7725,-22830,-8098,15285,26138,15803,-7504,-7851,-4945,20265,31541,18971,24646,7480,6668,-27800,8029,-13912,-7989,21419,838,-7902,-24815,24040,-21108,7790,24581,-25232,-18289,-8030,-32416,17745,28639,1514,5042,19215,6633,1360,25019,-15068,-26192,-14572,23468,23855,-22142,-32615,8178,-21051,30487,9959,-9229,-30203,14134,-30700,10107,17656,-17717,936,5743,21838,-26487,-3064,-2076,26008,-30155,18262,-771,1253,-19918,5817,15058,-12709,5161,-18302,626,-19355,-23932,-23167,-16444,-2513,-31834,6144,6194,-14265,8464,2247,-7336,8756,985,23732,-26042,22704,26806,-8797,17058,-3589,10217,28499,-6149,-1742,-20675,3879,-7277,-16430,25470,7115,13562,8382,28108,-18158,310,-10848,-15470,27830,-1636,19578,-7742,-6362,4759,1657,8295,12485,-26017,-20698,29216,-9045,-24146,27361,-29998,-10075,-32516,13945,-12012,1122,-7564,15032,11274,-28601,-23629,-14079,21263,-22025,-15550,10274,16920,-863,-18873,-5726,-3785,16062,4581,-30218,-9353,19650,-29165,-20842,8870,17684,-21106,20733,3259,-11639,-11148,1743,882,-46,3646,-4656,-4455,28713,12059,12224,-9883,-14692,-11317,-11039,-18553,6910,-16502,-2540,-13791,-21696,-16202,22595,-17355,14616,-12860,8050,28072,28807,6266,-25427,22019,22063,32726,-26123,239,435,-23834,-26542,26900,716,-23810,-11388,-29122,-18317,2032,3643,-19129,19597,8511,-27345,-6380,1115,-19705,17439,-23022,-26011,24777,-13589,25454,-22726,3380,8853,1886,4635,-11985,-8817,28165,-30753,-18536,-25654,1587,-6634,15120,11815,-7301,-18378,-17845,23309,-13451,17350,-224,-20224,-4336,-30858,11583,-23234,-17258,944,15625,2959,-25973,-22010,-23315,25122,10162,-14820,4041,29701,-9283,-8077,32720,-10505,-11634,-12577,13969,-5796,-12126,19380,-18633,-7396,1304,-5550,18214,1535,20326,-11506,20709,-22343,-5570,-6642,7334,3168,-27228,-29422,26543,2611,-4472,21675,-1422,-4546,2740,29598,-5565,-26541,-30112,-15547,30174,-7358,6582,-12662,30111,-18262,25168,31259,17112,25456,29242,4233,-980,-22612,-7410,-31759,-15747,23976,19387,23667,-21313,10134,28002,-31171,28618,28358,18842,13469,28031,-22707,-3355,-31471,-8781,25152,4135,-24221,-26562,21000,-28756,25504,-2487,14341,3019,-5876,-3799,-7409,6547,6179,27880,-23219,9587,-17197,28373,7397,-10907,25569,21065,14918,3945,-27126,26500,-30213,22772,-9423,6289,27247,-28573,31462,17297,-30973,6578,-5695,27019,24291,20635,-7990,-32487,-26567,5858,8299,-16329,-32450,21979,-9490,-13047,-31029,27197,-21557,-13347,28022,-20726,-6715,-30975,18758,-20433,-20537,28320,-27772,16170,22677,-13007,-9707,-16675,22396,-10897,21639,5877,21799,-26068,13129,-21120,12242,792,32149,888,21036,-26710,28199,3131,-2917,-23248,28999,20180,18727,13028,3644,-8260,-25574,-26700,-1223,15247,-30081,3920,-574,-18671,-10786,31394,15685,-3724,24555,-20041,869,31618,6722,-9286,-63,-24296,-894,25138,-30502,-27762,14102,4135,-7373,-32432,-10181,12919,-32545,16764,-22270,30282,-17530,8888,-28350,-9693,26253,-14845,14939,-22311,10120,4787,-26840,9033,-27727,-5318,23673,-20523,6206,9054,-10264,8671,-5526,-17068,-14715,12920,4899,1203,-939,-20913,10027,25513,23603,-30985,-22069,13327,-10718,25868,14688,-16136,31203,8993,14480,7689,3986,29438,16839,21973,-29331,-23395,-14472,-3557,-30356,-22491,-25345,-22516,-3362,-4588,6787,11897,-17863,-25989,-14832,-13945,-5508,-31346,-5547,15700,9808,5982,-12440,16942,-170,16278,4507,5987,26647,-22919,-23774,-19176,-22586,14345,20283,28518,-2222,26711,23321,31390,-30850,-9382,20189,-31464,12102,-17500,7314,-24666,1914,14233,32333,9612,-19491,-21682,-16421,-17494,23826,1594,-5963,24980,6422,30592,29953,23524,2436,28729,8224,13160,23137,-32325,-28827,1799,17756,8735,18837,23867,56,-464,-21779,-17947,12207,-27251,19777,-6671,-515,-19646,-7097,26557,-8974,4263,10068,-14688,-5836,-15530,19318,30888,-15226,-11909,12770,-13052,-15565,26444,15045,17960,30785,-1496,28826,-18464,20432,-32673,-13064,-24698,-27105,26830,14348,18687,-3278,-30362,-24576,15549,24697,14541,-27270,-18147,5415,-32340,-10815,-14980,14958,31651,19408,18092,22692,19139,7301,-12418,22284,8382,-24830,22155,7215,-563,27346,-8869,16736,15417,1869,-14096,-25102,-6872,19628,19108,5473,13902,26768,21728,30857,-24573,-13230,-16627,-22523,13231,-21992,27908,22643,21032,-2818,-30795,-19230,19018,-7954,-22534,32374,-13753,-3996,-18290,31617,-30614,26510,30936,-23356,-21632,9703,-2482,19580,-23636,-28290,-22506,-25192,-3732,-25825,18546,12628,27514,-28164,-23825,24881,22264,-24456,-7310,-23767,19393,-21273,-28271,23434,27751,29366,-5081,5166,-2438,1114,21054,25625,23553,488,-16988,12039,-1931,28478,-11129,11854,-6117,2959,-6885,-5264,28242,-10033,21654,-25680,7913,-8047,3189,-22728,6538,-28188,-27619,-4861,-7508,13511,-1206,11416,22739,-16206,26762,17780,18855,20030,2402,10977,25468,-10486,-7365,19523,14546,22118,30253,12536,11408,-15451,4929,-17459,644,-23183,7544,-4810,23278,19266,11008,-28275,-29668,-9761,-29324,9858,-6531,21072,18493,11477,9526,183,-28484,-15862,-3597,16407,30016,-14806,10156,3231,-21661,-21466,-10031,28643,8147,28371,23142,-24692,-29772,4052,-5735,30941,-16541,-1483,29969,-10714,16669,-14727,-8483,26370,12667,-12022,-15456,24740,-26281,31791,-26237,-20966,-2241,32699,-13721,-25979,-21871,874,-16576,-4067,26195,20064,-4595,11130,6413,22375,30001,5200,-30660,6338,30879,-3675,24304,8074,-6791,-11018,-32410,-4545,-24537,2679,-25635,5276,-1195,2810,10846,28882,-16947,-1413,24610,4178,-25837,9147,-23984,11000,-30969,18886,7921,25112,18766,18122,32216,20670,-10677,-32315,25885,13950,2389,-29908,27165,-5487,4248,11821,-600,-3991,11042,6071,-2762,24133,17904,-26894,-15116,3056,-28424,-19392,26461,-20413,-22613,-6478,-8652,28699,7230,-31617,21345,30086,24652,-8360,-1697,-16066,15480,22190,-4702,21306,-13264,4978,-1252,-27105,-15346,727,-20946,-25880,-9256,28991,8471,21391,-10704,744,21493,-13710,-9017,20157,2674,5297,-7335,-13332,-29440,-8841,30144,27998,19398,-27443,10251,3402,-26440,8490,14745,3719,30584,21188,-9179,31168,4402,-5152,23860,17970,26694,19447,13490,23347,6352,-440,-31751,29005,-11662,3615,-22365,17480,-11835,-26877,8454,-31746,18115,-7611,-28282,27691,-25195,4279,28672,-16355,31151,28888,12774,-7897,-24369,-30450,-19775,-15393,1244,13585,-5860,782,10178,-31266,632,30450,26176,-7793,-20039,-1119,-26255,8323,-23443,-17411,2798,-10965,29442,8843,-15878,-31874,-5820,-17725,-30663,29821,-4732,-28135,25003,26873,-7184,-9027,25154,525,-27309,27820,-17527,-18126,-13492,30432,-13010,16690,26719,601,7439,-22818,8688,13417,28366,27688,-16890,12202,30257,-22932,-18005,-28050,7926,-10031,526,19565,-22226,19732,20710,-19378,26875,-9495,20058,-9736,6726,10437,-25789,-13996,7551,-9567,-23681,10133,-31387,32092,15180,29659,17633,-12970,27172,19286,-23270,-13450,19994,-9728,-16216,-30669,9373,-27035,-32175,2846,-31738,16008,6825,1000,29348,31507,-10200,8054,-1352,-202,-29882,-21440,21355,29662,-23518,-20006,29168,-26301,30177,24414,-5979,22193,24393,-14611,-4140,-29538,371,6877,-10388,-26300,-11715,30465,1066,19401,17690,14536,24253,-4207,3260,31079,1686,-7319,-16928,-14191,8475,-27364,27598,16024,-15013,-27942,16223,-26957,19925,29687,25861,5728,234,21187,-12253,27271,29224,21683,-13474,-1580,-21068,-13247,17916,28497,18858,14025,-17978,-25086,29844,-7055,-16296,26421,171,14511,-15523,-32197,-25575,846,3737,-30184,20023,27074,-21825,-6851,-4426,25060,21670,-4380,17871,-27807,-21272,-3503,17704,-13144,-19988,5016,9275,13877,-19557,-9239,-10111,9602,1904,-9231,-31999,23068,2782,9240,-31220,-7119,17773,30547,-10521,11911,-31434,25560,11618,5178,-20845,-19090,-26086,12260,-19942,-15974,-26852,-5175,-294,18158,32197,12290,2461,-11268,31966,21873,1437,-3245,404,9075,-1826,27753,26962,-9712,-13680,8324,-25278,15741,13356,-26753,-5892,-477,-23609,-6052,-20014,-24762,9895,22942,14927,13761,2595,-32329,31786,-19207,-21552,-10293,23097,-28889,27741,-2648,-27459,-31299,32724,-9153,21155,29422,20381,-15360,-217,15572,32710,-25691,31817,15060,7516,-24316,-5965,-31891,-2858,11908,4033,25002,4463,-31367,-1485,26161,-24645,-21007,31018,14885,7792,-29699,-3297,413,-14176,18094,26829,12592,30866,-8687,-25213,32106,17911,30373,25331,-188,-27276,26164,-28454,4683,-12190,-5323,-14270,-12427,27542,-30829,-8925,11161,29185,-22064,8322,8861,21720,18981,-2565,-29530,-16830,-31000,9707,24552,-26448,22902,-16735,27263,6575,24900,-26192,-23752,29040,29489,-25526,-20675,-18680,-991,29212,32181,22589,32010,-28680,11878,-9109,-14879,-4963,6163,16907,5524,28918,6560,27810,-31299,-9966,-5517,14417,-16608,-13180,1722,18688,-21586,-7153,-19744,18338,-9725,8228,-22507,-23413,-12828,-17019,-31256,-8110,-25740,7384,32059,30768,-1272,22854,-19096,15295,30429,-3238,-22958,8890,-17926,4924,27268,23805,9940,101,25812,4901,5257,14801,-26730,-24029,-9883,17626,-13273,-224,9254,5518,4533,23624,-18286,-1501,-27885,-10988,-11717,-29497,2938,11693,2603,-31851,-7422,-25751,-206,29027,32657,-5278,-12885,-27527,-2601,15789,-28730,-28411,-3373,-4634,15108,-14025,29747,29128,29472,24004,-5580,-17759,-26411,7991,2566,22519,7789,-31410,-8805,8102,3556,-815,-30467,9953,-27102,25352,29048,16551,7549,29745,30178,-14858,-8138,21554,-5645,-8510,5698,-5878,24870,-7407,20053,-30863,-1638,18869,17127,-23785,-16865,4768,14433,-8046,-12951,-2976,-6659,21714,-18664,22166,8601,-2205,25954,-17735,4741,6057,107,29292,31106,20453,-15813,-4750,870,21642,7026,-23390,-31034,-29853,348,-12324,-5690,-32552,-25904,-883,-24849,-27679,15423,-1825,-15994,2383,29922,21456,-12216,-22107,-10575,-3092,-26574,-24524,30979,740,16770,23345,24364,-11554,-835,26079,5472,20269,-7763,31858,13800,-23656,-22439,-5711,-23042,10101,4944,-24599,-28950,12517,-23875,24979,-29433,26466,-11961,17428,-31913,8530,-797,-8017,18033,11654,30507,-20709,16149,-9774,20666,3739,-15425,26520,-388,21287,14043,29324,-12032,16004,-3435,-5952,-3622,2031,29228,18867,-24560,11259,9201,-7060,-4436,23610,-25008,-320,-22086,-32286,24329,27077,-26422,-10279,27070,32647,-28348,-21209,-4494,19503,28570,6581,23705,32096,3164,29926,-1279,-25848,-17000,28870,-1672,19690,32129,26362,15347,-30045,29566,28878,-20050,13804,15053,-19428,-20104,-28108,-30187,-9458,24198,-26614,28153,32766,-17282,-25724,1616,-20363,14121,9269,-910,27922,3628,-31865,-31709,-1426,-15298,15578,-27197,4108,-27225,-1641,29733,23377,15720,-31841,-18304,-5353,28025,12317,11363,28003,-19371,-1615,22581,-9553,-31961,-1822,26578,-20383,25632,-32722,-12006,25939,14800,19103,30896,-30611,-9518,-2883,17264,-19533,-4996,-8223,-31792,27290,21737,-29002,10810,-3734,6668,-16240,29413,-28855,-28302,-20653,-15686,-13275,-1258,11633,7133,7954,-30629,-29548,-1160,24146,25131,23175,11531,-16262,-9169,-4525,-15054,19761,17454,23267,-17146,17883,-2684,11564,-5698,-13944,-3700,31965,1516,-21078,5539,15474,-28998,-27594,-25024,-19700,25723,-32226,954,18768,11646,6510,-179,-24359,12876,9096,23044,-27178,26480,11396,28451,23532,23900,-22479,-3206,31032,22840,13952,14589,-20927,1051,-22399,-25196,-14364,6039,-26878,30336,-24259,5622,25818,-11630,2260,-6580,16768,-14615,-13858,233,-9926,-6417,22887,-29611,6664,16118,8788,20696,-28408,1450,-29957,11044,-145,18206,30490,-7218,-24118,27616,-6498,-22578,-31506,39,-29941,20487,-12192,-20706,15229,-10926,21875,-6250,5227,-29212,-20192,3675,-15891,-12495,-32730,-11248,-12538,8293,26150,-6866,29477,9112,3101,-3421,29046,26903,-19577,-6392,-16098,-4281,-13711,1116,-20211,-25759,-27052,-21511,23551,15868,19966,5166,-5709,15862,-1694,-4477,5782,30570,-21788,-5421,-27327,5843,26744,-18994,28303,32121,21359,-10970,-6380,-22161,18433,18174,30559,-11177,7671,30077,11456,-17438,-21174,30925,-19509,-30010,-32030,-7205,19101,14219,5423,-15196,-31633,-21888,-21602,19365,12851,17573,5600,12102,27257,-12905,-11393,-26791,-12008,-28923,-28561,3785,18277,14957,-32708,-24995,-17934,-1622,-8387,10345,24232,2370,-31752,-18017,-18670,-12657,25848,-27607,29058,-17429,25856,20895,-17126,-25220,13847,6950,18465,-26474,-17235,31480,15415,4324,-16418,-32364,-11945,-16572,-24293,16342,-13405,13013,9336,17936,4795,-7807,18200,-15291,21220,-15720,-30276,-26407,-7639,-12019,27851,-13778,-26782,-3927,14572,-7606,-21959,20274,-8085,-7211,-11457,7864,20793,20782,-30934,24545,-13903,-14664,-22868,27372,-13810,-14060,-17038,14230,-18535,14372,-29743,-3872,-30399,-841,-3445,-7721,6064,-17677,-31599,26943,7556,5272,-21645,1900,-22206,-20319,18454,18923,-14429,-316,7933,-21467,13632,-26929,11924,-17205,14803,-14801,-13729,18815,1668,27367,11084,-14168,21891,-11210,18616,4628,-19941,-16971,14856,15690,27240,-18629,-3082,-21579,-26487,-31880,8949,-13258,-24040,23526,15331,-30946,29616,11507,-3177,28364,31048,17089,-23348,9774,-13771,23350,-15887,10741,-21143,-25929,-24000,10058,-21601,13934,-5362,-32459,-6156,-15568,6501,8686,-20856,14991,-15710,9696,-9195,10184,29380,-21260,-12647,2557,22690,-26611,29383,1158,-25591,-2220,31422,2957,31612,8473,20640,25037,3809,32135,-5608,-22483,3285,-23163,18427,-20187,12789,-26785,-1367,-7411,1505,-19659,-10684,-8030,-4546,24542,6662,6132,21711,8558,-19507,-1227,-9569,22556,-26593,-19852,-22493,-18663,-28795,10649,19944,-26187,29563,30050,6048,-9873,21678,32003,27301,30926,-26806,-5937,-11625,8763,13044,-24788,-2800,13418,-3701,-19508,-12897,30837,-5133,23365,14692,20298,27950,-14975,-8612,-25506,-12512,-19730,-30444,-3494,30443,14959,30830,27822,-8631,-31341,9065,8114,20623,16747,-24770,-24113,-21884,-7730,8158,19232,25557,-20384,-16632,14909,8151,5818,4253,3311,-328,16376,-30478,9149,9044,-5222,-22544,34,-15927,3725,-16309,2803,-20419,-8469,-8681,30091,-15879,-5114,-18506,10002,7226,-29038,20226,13408,30962,10505,4097,29869,11672,10315,21969,19515,971,20700,31776,-27230,15700,596,3886,-15567,25714,3623,-27351,26655,-14874,8704,22269,-24544,16396,15791,9281,-4938,-20040,-5604,13451,7613,1115,-15863,31159,16025,17842,30553,-30065,-25619,15521,23269,18142,14653,27118,-14510,12211,2054,-7408,-22090,5604,-18036,21328,-32109,-13990,20433,9074,-5892,19152,30719,-25500,20270,-2298,-18262,20450,8854,6615,26373,-3880,19251,27280,-18437,-23902,-7034,24003,-31175,25383,-4489,22018,7051,27364,-9970,29855,27669,15449,24118,8650,25304,20969,-28766,6464,-18455,22917,-1657,-1153,30360,-14723,12195,-13218,-3338,-1090,21998,-156,-2220,-80,-2382,6104,-27777,20912,21601,-4635,-30021,-13360,-9173,-27529,174,14000,21402,-8329,13726,-5576,31867,-15887,15022,16015,-17462,-16863,-24596,26226,22049,-18618,-10694,22700,22206,26250,-7601,15102,-22845,17859,1209,18473 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice33.tflite b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice33.tflite new file mode 100644 index 0000000..e803cd2 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice33.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice33_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice33_golden_int16.csv new file mode 100644 index 0000000..3368fdd --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice33_golden_int16.csv @@ -0,0 +1 @@ +2930,16590,21525,8150,3546,-28425,26886,-9722,-30248,12640,9121,4845,3656,-19510,23213,-23950,3503,7003,-27142,-20349,-12148,26139,13873,-1828,-3920,22745,28679,-13628,16280,-12065,-6429,26707,-4185,-17402,362,7645,30041,20843,29798,-8623,-27737,-27806,-32755,-18626,-3024,-17602,-2660,-1108,-31788,-31805,-26574,-2735,-31992,-13369,31923,-8227,2156,-5952,20812,-11714,-5239,19475,-9337,27618,-14873,21099,-12563,1809,-25060,-1139,-24877,1743,-21053,-14380,-7732,30106,-25181,5208,9184,-3765,17638,-31415,-30683,-23410,-2954,15432,-32030,-13255,21750,-7207,9264,18255,10726,-9835,26996,1620,32366,9140,1580,29912,-15420,-16842,20667,-3815,-29586,-25701,-5586,-21931,13244,-16766,16536,-17789,17567,-23448,30273,13756,-22194,20613,-6330,-1378,-14929,2144,-3841,6872,4639,32181,-3298,-11797,-10041,17453,23994,29011,12761,11678,27789,11089,30850,20552,9891,17955,32332,-5428,31273,12570,-8765,2134,-15718,-29127,9295,-24830,-5994,21549,-29571,17196,-15505,-21343,32368,18012,25413,25454,7393,25288,-5442,7944,-28796,-29830,-3392,13067,-19124,30633,-18119,-18196,16426,10364,21012,-17454,-11187,31383,20461,-2139,-19287,-14315,7691,-16862,8634,-11540,-13036,20509,15849,-29573,13422,8354,31303,-14147,-12708,6488,-25080,-17046,28405,29576,14280,9856,26891,10712,-19293,-14006,-5747,-138,-12520,-1943,-11749,29431,2165,19087,-22543,-26603,27598,-19178,7922,-2249,9079,-21831,-25488,6079,9208,5992,15400,13300,25337,22547,28173,27147,2606,-2522,-10606,-14240,-11058,-14315,-22008,-4184,-14392,-25331,-13640,9226,-10553,-23754,-13400,22519,-4984,2724,11764,3512,28961,-5179,22512,-28878,-32037,24919,1986,32267,-16678,-3693,-9452,-19586,-31125,-3547,-27535,-25824,-31621,-30219,-26614,27954,2027,2506,-5114,-4434,15150,32176,-11072,20735,-28270,-7013,-4026,2256,9603,-859,-21592,-178,-24702,7110,-17909,23549,11973,3297,-4049,-31108,-24983,-20779,9836,-12719,8260,-19906,12490,18035,26685,16368,14350,-23936,-2489,32761,-23270,-16455,1590,-25641,-18929,21548,20725,29382,11104,5265,2911,-6110,-4476,-6501,-7973,-25419,1947,-11305,-31566,-5895,17381,-17671,29006,28186,13312,20702,3791,3792,-22852,31326,-16065,9975,-23446,-16619,-2201,17731,19415,-23458,-14349,5233,-3801,-10581,-29514,31273,4147,-21808,-20394,7932,31633,-5044,-32510,-23045,-10290,-29203,6640,-30499,-6482,13682,32633,13278,11348,-18241,11578,15848,30898,11194,-10243,5217,-8597,11709,18451,-27148,1776,-28995,-12269,6174,-21712,-25376,5363,5135,-20002,-9550,30778,-2191,-11820,-21139,11833,30905,-31093,13897,-26329,3110,-28258,-10215,18169,32456,8343,6938,5946,-12675,-20683,-14849,23756,1742,-13069,-6664,-8154,9532,-19335,-15585,-12447,-18256,13709,-12930,-15644,728,-21716,-12043,25481,-17108,-11590,28370,28365,5222,21923,-31858,22637,13667,23246,11714,-26821,-15750,-8709,14602,-24419,-19849,-28981,-26705,-32221,32077,13999,22824,27488,11355,-15574,15178,-32684,21007,11423,18799,-30178,-16969,-7300,3533,-26923,-29763,-2551,29602,20363,-31093,-15043,703,-17011,-29021,-28340,-30992,-3310,-21029,16871,29403,2638,-4149,-16076,-705,-29765,3820,-17384,-13624,10977,-17061,21725,-5608,14984,2809,-7679,-5926,-10215,7509,-7505,13675,13348,8038,-28067,14027,-11800,-9209,-4674,21509,1966,-18396,-22136,-17327,21969,10161,-11442,3564,31551,-25337,676,14240,-8613,28513,12527,4754,22683,24595,6730,-11360,13514,19636,12331,31128,-25149,-25411,-3994,-13233,-8217,-22293,-16571,18869,-29560,154,-16045,29342,-20822,-22179,12497,21359,-17791,22842,-4873,-18324,1493,14759,-11719,-7147,18557,-25764,21090,5726,-20897,-23416,-9369,30677,27435,-3941,15191,-8872,-7501,27714,-3442,-13127,20381,-1060,-30082,28566,14914,-13748,31192,10690,28340,2323,-4187,-6952,22851,-22619,19122,-14529,26018,6837,-31486,30789,9600,4493,12557,7483,609,24750,-9381,18517,-7420,24408,-5246,28928,-3089,-31533,4125,27661,12243,13531,-12297,-23753,26078,-3958,27344,-5596,7009,25445,12501,14873,-1048,6558,-18850,-17087,29038,-16399,18014,14538,-25727,-11564,-16084,-28905,-2370,-1912,-30396,-8544,-25752,-8902,-3096,-5155,2109,16040,-21048,-28923,-11922,27676,1760,-5861,-21795,13083,-32135,-17976,3133,-23340,15582,-24999,-27929,3508,21492,15941,10854,-22200,-24212,-14579,14656,-21121,2308,23400,-13295,6003,25196,2309,2335,25372,-23709,-10539,-6682,9600,-10868,26546,4025,-1181,-5442,-1607,-14471,24171,-11998,18877,-10581,-30797,-31851,31448,-1258,4701,17588,1533,3476,-20923,27711,7641,-24627,32403,-16406,-25871,-27893,12952,29205,10547,-15049,18542,-25006,-25412,-17409,-8463,6826,-24504,28883,15813,2561,7886,2971,-4776,-24133,-28949,7958,13513,-28822,-9408,-2697,-828,4713,5635,-8652,12325,19381,29644,-24177,30044,-28760,-14594,15517,-26159,15086,-24109,-10904,-14290,30014,-25507,6788,-22468,-17017,-1121,-11614,-17863,20161,-18072,-29072,-25938,-767,25236,-11235,18939,6917,-17669,-8400,21678,-1996,21915,-25835,13947,15354,-19801,-15199,-7229,-25644,32597,29752,1358,-17399,-13364,5481,-19443,27809,15498,-420,-25472,12054,-28739,-16664,21568,-10376,25135,23681,29239,14723,-3941,10295,-3793,26924,-20173,16389,619,-2041,7866,-14898,8104,-26580,-14689,-4797,-32200,-12328,19387,-3692,-32522,16808,13602,20077,-23855,-28106,12079,22264,-16693,6599,-27380,1409,-3040,26291,-5041,-25997,12234,28027,29696,-9261,5382,4636,12708,9392,-20427,30697,25845,-27889,-22047,-27512,-20861,-19672,-12602,27161,-7772,25962,-9473,13361,-21963,6943,-8984,5828,1024,-25638,3123,-24062,13748,26318,26674,-25000,18123,13661,6985,-6728,3045,-28505,-10840,21350,3678,29414,24214,25321,18924,-4442,17809,-17683,5802,-15237,-15076,-13823,16106,5584,1201,-19480,11625,-7385,4810,12381,-26895,-18739,15890,18154,-5744,11783,2311,21736,32626,-3734,-6790,-18376,3262,15286,1680,15712,14554,-9594,-28319,-1039,11415,-29517,-19466,9571,9594,18503,-32709,-2462,-27509,28166,9352,19763,13240,-19878,-18485,-9429,23380,-2757,8866,17085,-13912,4259,905,14886,13418,-14818,-26595,-23177,29595,-14668,20399,10386,6950,-15315,-2769,10574,-28200,31415,2356,19832,3065,21065,-1319,7271,-56,-1414,-17712,-31725,-31950,-31449,-26649,6091,25652,-31331,20626,-9883,8926,26700,8133,-27643,18490,-15068,-30598,29231,-27509,26424,-18627,-2273,14642,-14049,-5747,-14971,-11442,32405,-10652,-14211,21632,6176,-18850,-75,6837,-9731,-10642,719,-611,-32164,-32547,30949,12744,-18414,14582,17789,10677,-1864,-31782,17187,-25796,21277,-6049,3317,6299,20758,-22439,-13846,19499,-748,-32329,-7602,7901,149,1769,-492,-10883,126,-29107,7539,27682,19358,-19243,25622,11881,4061,-8923,-24705,8080,14253,1862,25367,-13888,-25739,31056,14342,-18493,-17388,31699,-26663,-13917,-28114,20442,30000,8178,-24517,-32088,-20664,32045,18261,-25605,24724,26937,32273,-22027,-12607,-1550,-29823,-15037,13454,-32134,-4074,-20485,-31066,25812,-15089,31890,10294,25058,-17125,32165,23784,-13015,14827,26408,9636,23887,18795,16655,5096,27796,27523,18074,13139,-27513,-1755,23644,-11264,-24041,-21080,-10689,-23773,25,19973,2403,3766,2692,-26463,-8896,-9097,20554,12013,18185,6702,12750,-27282,-15115,-8257,27835,5782,23487,12028,-24690,12129,16115,-30452,-24330,-12067,-7327,-17933,29794,-18098,28673,-4155,7806,10934,-3216,402,19625,19863,-30468,27444,5560,26159,-1987,-26788,3505,21510,-29339,-17913,22800,-5031,12194,12886,-8604,-22033,-2319,-12679,-1136,30986,12947,4,-11679,-5558,-20577,20629,15925,-25968,21227,20908,6548,7028,-29046,-652,31066,24340,20446,-30796,-24958,16647,-27715,-14446,4735,-10140,28711,21595,-17619,-1932,18480,31444,-26534,8731,-797,-20528,27676,-16461,11155,26196,-6245,22631,1948,-30459,-19350,-4673,26728,-18249,-26132,9100,-30443,-1695,-20975,-11178,-26242,15355,27986,26763,-15623,-3643,-28372,27887,-2618,-22405,-7346,-17388,13664,3593,-10923,31741,31979,7660,24744,15993,-2962,21020,5730,15628,-30570,-1522,12653,-18562,9726,27260,-1130,-23106,-9174,-7970,12281,-16838,-28396,29387,27741,18408,29956,19079,13366,-12189,-301,-19323,28876,31626,25835,-25601,19989,-13383,-26039,27855,21370,27495,17957,17116,-4876,-22611,-18080,29800,-4095,-21991,-23983,4956,14215,-3559,25161,-5380,5848,13416,20377,5166,-27542,-18913,4070,-4681,19260,-19599,-8721,16967,-1110,30840,27475,-9480,-26364,28406,-28158,18168,26643,25540,31799,14008,6188,-12008,13347,9980,1234,12745,-15546,24823,-20923,30840,-21243,-14846,12793,-6880,-3747,-14469,32253,-6363,2037,-7643,20014,-20197,-3102,9174,-18830,-28677,13394,30843,-834,-32107,31504,-31589,10641,-4679,15088,-5676,17779,-19598,6129,15835,-31327,-29783,-10409,-9977,32454,5294,6406,24447,18857,17523,-22103,16471,-10633,-12985,-6701,11594,14535,21225,4662,27114,-30836,26349,12251,30284,25787,-30743,-13015,17079,-16740,-17383,22341,15497,-3702,27033,16757,13058,26949,-18072,-31747,6489,-17690,12398,854,11548,-28293,-24733,6771,1416,-6575,-25180,18651,4804,-5386,17823,28161,-9744,-25118,4019,-6387,12954,-8804,-6207,6079,-22309,-31035,-1061,17570,6693,-10959,-10749,-1154,18599,-6197,5779,18110,-1925,-2623,20990,-28929,-15288,-20286,-8512,11224,-4716,-26333,-21950,-18182,-16796,-12409,-18523,23498,3607,28988,-12123,-8050,27607,-28052,19465,14905,19826,-7140,-18123,28142,-13721,8920,11152,-5446,12806,-17834,-16196,12728,-12112,30416,-27108,-420,-6966,8262,-18249,-28728,-23805,-9432,18044,3604,984,-4209,-22279,-28394,25413,14773,17831,-22209,-29813,24143,-24951,-32294,17379,13576,-8123,-8819,-20064,-31744,13679,3498,11879,32213,30174,24840,-14179,-31234,-29847,24272,24249,-29905,-18833,-5258,2055,-30436,1200,-13256,19055,-5655,7831,-6084,-932,-15820,13719,-706,-14639,30058,-31328,-1000,10475,13528,-30788,-7643,-6039,-18474,-26547,20961,-29033,7565,19291,2078,-13294,-23733,29024,7491,-5671,-17013,8938,-6092,-1343,-3613,4096,23507,-23897,29605,20400,-29386,2246,-10994,24763,26631,-11648,28771,23782,-19825,13846,-1164,18152,8400,30503,18730,-10770,-7403,25514,22346,-29451,-25385,19899,10997,3041,5522,31671,-25608,18317,-4070,-17336,-30254,-12561,-30376,-23860,-21751,28388,-27340,-22398,-13263,-28537,-12827,23688,-25894,-6031,-3782,-11641,-11925,-19676,-15859,-2403,6444,29978,-10965,-21210,-2688,-11010,-27534,19235,32091,25525,-30786,-13492,-10729,-31566,13704,-28830,6592,18313,-23825,23252,20750,-9570,-13518,27514,-7072,3581,20481,29857,-30249,-21517,9195,7718,-6043,18202,5515,-18684,-22008,27995,-9511,319,14025,-3903,3020,19509,329,-12014,25457,-20133,-7646,17840,308,10599,27590,-28045,-31536,31872,28270,-24543,3083,616,-11620,5599,-14336,-19011,16881,28822,-16613,-26165,15429,-2620,8940,14436,17466,15533,12653,-21539,13496,20583,-10033,-18450,-24457,32155,31141,-28521,17546,6310,16566,26547,-992,23260,21512,-14347,-12102,5871,2247,18085,-17821,-13579,21962,-18530,15937,30511,-1189,31457,-11760,-13601,876,15246,7796,29263,-32117,2090,4099,23021,13027,-17178,24292,19372,-29341,-4666,1868,-9415,-20374,-3388,-25428,-13833,-1950,2238,-31465,-301,10625,-6348,-23331,-12546,25004,22809,-84,7589,7769,-20763,-18425,14135,25687,7059,-4490,-6325,26323,4776,16121,-22244,-9256,-9894,-29796,-19630,20587,-12129,-11613,31704,10062,-18791,-5593,18623,-26163,-955,20661,-22979,14702,-18820,-9873,-15828,23542,-8903,-30183,-26410,-23488,29479,-8218,-29666,6395,-5785,24158,-23983,-25414,22585,13362,3938,29996,5607,-10928,7252,-21630,-21392,2392,-23294,17951,-16615,-26583,-4125,-10147,-17980,-1574,3756,11644,-17674,-13612,19009,-25988,714,26826,-9183,5812,20055,-31192,-4758,-5978,-17698,7114,15480,17766,-29635,24552,16186,7084,-2678,28081,5941,-23957,3307,-28629,-4818,-28640,-31570,11318,18840,11087,32173,25141,27544,22286,-10600,-3603,3924,19051,7806,26600,-24896,31074,27664,-22716,201,7284,-3303,11687,4878,19926,21428,-30470,31766,29251,-28738,1995,-16812,-7635,23385,-28170,-11233,-1243,2468,12909,-25108,-11728,-28461,-13665,-5007,27395,20776,-23587,-31944,31542,8311,18164,-31841,-9776,-5429,-28088,9387,25994,19380,1939,-2040,-30492,12200,-10001,32176,5618,-24619,22987,-16473,15565,-30755,10412,8499,20969,8518,16922,-25904,-19250,-8745,7550,31970,31796,-12090,6912,4697,-15533,-9966,24459,-18086,-7559,-27100,18396,32656,2450,-16869,20443,18872,-29570,23174,18452,23925,31715,-26826,-8250,23235,-6109,-8378,10184,-7446,8687,737,-3113,-17264,-3857,-9993,8199,7975,12649,-5930,4301,27929,-14782,5015,-17254,-10686,26186,5101,16689,8940,7138,31292,14263,-3526,-17646,-24749,4434,1815,16643,23224,-6345,-7225,26801,-11399,30800,10466,31563,-20400,14467,-14613,-21709,-9877,-3268,20776,8087,8307,-5994,-29196,-15884,-5497,15846,-29881,-7307,-2845,-27758,-9001,-18155,-6395,27136,-21535,14828,3339,-8054,4391,17732,5581,-20957,18959,-25998,13349,-29615,20938,-17676,-23946,-19862,-29320,25899,-15438,27934,-1716,4800,22313,27640,-23683,-6376,-17150,-2358,-11835,-12994,-11056,-18102,24991,11303,-26014,-1317,-27920,-2813,9123,25304,-18553,-25085,27127,6460,26127,-325,-8798,-8426,633,-4805,20431,2968,22433,23711,-22568,26480,-15225,-15260,12380,-2806,8784,-7809,-9837,25880,-13336,-20120,-20505,-642,-14895,10165,28322,-9008,-30180,13436,-26580,-18908,4418,8986,14060,-19514,24312,30372,18996,-31943,10075,16077,-3780,-7765,-22002,2630,-386,12554,18178,-10658,11537,10393,-27488,-308,9915,-15926,11148,-5022,-19191,-12268,-11185,4773,-27275,-10218,-12021,-2845,1282,3401,11379,9049,26385,25313,-31835,18390,-29988,437,-17147,569,27048,143,-28175,-22575,16396,27673,12371,-12215,14984,7812,-22265,-21496,1546,23257,3042,16263,-18125,28044,-20947,-17020,2182,-164,7850,-30946,10304,-26132,-10020,13650,-2613,-4860,15325,-10780,13549,-15894,32578,-13960,6333,17103,27517,23196,10863,-15666,-1101,24486,-9486,30706,-10251,32341,-9755,30794,20539,-1115,4991,-4668,30493,-29417,-1177,-14748,-27565,-32184,-5575,-7283,-3804,26946,-11854,13168,30924,27489,-21747,-24148,22079,-3589,2595,5977,-17087,-9705,19736,22838,9730,15083,1126,10785,-5224,-11662,-20264,-32168,-22132,-15918,21147,26947,6809,27422,23719,-11447,-15057,-29017,30831,4569,11581,216,-21752,7197,-18431,-17918,15105,-28014,2336,-29347,-15326,2390,-8292,-29659,23705,-17640,-16098,-6358,-7063,12342,-48,32613,-12851,-12211,-1131,30268,-24109,-22118,-28333,11450,-17351,5929,5724,-717,-31841,-29999,16154,-614,-23498,-24569,-17078,-7139,-29359,17698,2967,20385,14430,-3955,8823,-9712,4662,6226,-1170,6979,-381,12319,20369,-30653,-5018,-3972,22836,-617,-30131,-31482,1883,3760,15085,-3611,-14309,23611,20918,17608,-11933,23246,19109,4401,-6330,-3230,23414,-18684,-16058,30999,21169,26095,4680,3359,18131,-26652,-1197,19159,17,16017,-418,-18495,-17363,-13235,-19682,-12592,577,-18241,-10819,-21196,-27114,3916,15846,11367,11832,2576,-21551,18370,9515,8383,-14874,18508,21558,9265,-8620,-21357,16294,-17396,8251,-14294,21294,-29148,3778,-1418,-10380,23063,-2310,21872,-26543,-5185,-2691,3333,19300,29185,8996,22603,16570,428,-3122,-17444,-16237,-10461,18172,-31458,-25726,-25044,19590,-28758,27132,10270,-16367,-17595,-1159,18180,-18699,614,754,23790,8843,1350,7785,-26173,5743,-10592,14307,14348,-5555,-24248,-26020,-30276,-5358,-30340,25506,-11784,17144,9844,377,17223,23204,27770,-4174,7131,-2829,9911,25563,-12244,-1215,1753,112,-25184,-11880,-1618,-13294,-30320,-11787,1208,-8991,23936,-32657,-26715,10702,-11942,1017,15775,25552,-5245,-455,-9468,31400,13693,-22548,18646,19485,28324,20385,24247,-8980,-28051,11514,28077,-18851,31108,-18837,23498,-3224,-27691,20302,-1199,7453,4714,15825,-31307,13362,-24093,13196,12633,-29341,20404,25248,13037,-30449,24702,14220,-10759,17762,27749,5939,1772,-32383,15217,-11539,15507,10611,14948,-28883,24527,26677,-5806,7881,-8108,-21554,8228,4799,15425,-8740,15454,15188,-11482,-6588,16131,-6226,-12745,-25991,16860,-25492,6298,-2972,-15155,10560,32377,25870,-4701,-6714,2208,-14578,-2375,-23286,23037,3528,-6728,-2067,31294,-3591,-8567,-23738,30249,6397,-22895,-27181,17079,-13648,30452,21632,-9577,-25832,-1679,-31797,-6314,-3166,12908,-1989,12939,-29490,-11011,-28262,23405,-22870,20144,30701,-2086,-4043,-13883,-9627,-23788,24762,15708,11749,26053,-24903,-351,-17964,10447,-20788,-31828,-1720,-17713,15589,4409,10549,21876,-13454,2265,4084,3976,-31982,11303,2619,22228,-24379,-21232,2281,-18012,26628,22021,-20870,-11202,16222,-2034,8366,-19104,-20122,24487,28987,-4143,-31925,24715,25756,-14827,5094,8990,23582,-28201,19900,10514,19442,-1901,-19000,24404,16230,-29756,2255,-10610,-4459,14895,-8050,11334,-8492,16027,18156,-11347,-24138,29548,21656,31961,12152,-4210,-2449,533,-18268,5135,-14773,-6820,-31628,-7459,12775,13979,22671,-7365,-5902,27311,29256,15529,3260,20283,25784,-1610,-16139,29130,-28959,-1148,22209,7301,-19011,-1531,25593,-15537,10193,30582,-12401,-22748,768,31308,22764,-21810,5166,19726,22557,23526,30712,-834,-5142,17263,25212,14458,-30659,31993,12618,30142,-4462,18537,9979,22498,810,1417,-1496,22212,-23013,-4171,14086,12430,-5997,-7312,10477,-21934,4814,-6964,29435,973,-16235,8181,12425,22991,21048,7165,19839,14761,30936,-18846,582,14933,-9265,17880,17786,-20062,-724,11123,-20273,-17795,22618,13357,2276,22938,16310,16427,-8078,-21441,4150,-24281,-29609,-14780,6974,20000,-22505,9910,-20551,-9456,-5310,-8670,13021,-28837,-22629,-22927,-32004,-12252,10723,-4381,14040,17208,-13820,-15938,-8731,-28335,-25735,9936,31300,1788,11797,7200,-21396,-22533,-1846,8060,-9780,-16803,10869,-27188,-6271,-27265,-1770,-8294,22292,11675,-12169,-2200,-10725,-15064,31160,-32538,27574,-32250,-5509,20128,-31351,-6119,-28422,5953,11902,-7301,-3390,30663,-31715,-27494,6491,-32159,-18785,-6366,21223,27537,31669,3585,-8985,-2271,-15285,30462,31881,2576,2725,18935,-27301,23576,16802,-3831,-32035,10079,20333,-26914,19770,-28081,-23388,-10996,-8553,-18116,11528,1861,-22738,925,19435,-23696,14702,-27148,-23590,4572,-20008,-23691,10889,-6068,18767,5797,28807,-28932,1387,21925,-26939,-26543,5432,-2928,30245,14882,25155,-22529,21004,26538,23011,-15981,-11542,18464,31622,-27242,31125,-5703,-19025,-30698,-11079,1618,-4800,23113,-25869,29612,-1940,28922,12363,-30760,28146,-32650,-32351,4260,18347,-28303,7202,26059,24309,-31901,-7460,10605,-22084,8325,25040,18194,-7389,25955,217,22901,21727,484,21523,-11389,16997,-18301,13263,25983,22123,28811,23901,13379,-628,14019,27626,1298,28407,2827,-24447,-30220,20587,-6079,32688,-14925,31752,-22721,-3828,2405,21867,29295,-17820,4714,-12825,-22535,2780,-30002,21847,-6863,30019,31457,-15962,12897,-31799,-20867,-22138,-10502,-28042,-14147,20220,-20189,-26294,-23714,21110,3819,11182,20140,-16988,-12767,-26197,-17037,7914,31875,15156,-22152,4637,2084,27580,2437,3579,-17769,9025,-25837,-1635,-3643,-7241,11789,7611,-4641,-24249,-26731,-14086,-29232,2384,-31086,26697,-17835,-15339,31239,8945,11822,-1138,22950,-10388,-4923,10755,-28961,-4445,29825,-4629,-20932,-12551,21658,-15795,-24607,-3420,-4820,-13714,-23119,-5185,-24799,31457,29908,-8998,22210,11297,10286,1131,-17378,13147,-17828,11335,10271,-17504,-6292,9618,-1739,-2534,420,2290,29266,29130,-4306,28639,22372,1737,-4368,12160,1483,-28205,10928,13411,29409,-1333,32175,-8337,-24710,-7150,20652,-13187,-7709,-30001,31197,-2504,25741,-17384,-4785,24078,4556,-20047,-24141,6891,2831,-36,24083,-20528,4028,-27478,7395,30636,-5382,23893,31914,13262,27321,9167,-18471,15381,28065,-28241,3307,-11761,-17457,-17001,29819,28721,11539,-20964,24512,-22036,1132,4254,-8611,15522,25733,-24330,22741,11767,-17243,18930,12118,-10995,-10420,12869,-19072,22639,-20316,23441,1740,18386,-13025,-18398,-22485,-1360,9944,28931,-14394,29366,7512,31842,17030,22843,-15582,-29722,-21983,2853,-5518,20579,7529,-27054,-13898,18011,17670,-20812,-21780,-4744,18986,-17326,28164,-28771,24183,15908,-4930,13732,-5247,-2063,7334,-11116,21110,1329,-5107,30726,-4545,-28993,724,-9864,-2702,11859,-19609,-8109,-4822,-10234,-12564,-1366,-11150,11448,15075,32225,-5808,11626,-13505,-10129,11303,-23721,-7119,-27275,1598,9075,17224,-21461,14795,16009,-1375,-28341,-6399,-20705,-7260,-1253,32223,20382,31523,32523,6702,7437,6220,-25840,-16555,18254,4,9212,20710,20751,-17338,26376,3289,-527,12094,2634,30701,19783,5985,2009,-15901,-8663,-4981,27936,-20529,-17290,22273,14838,15726,-14319,29407,-22423,-5843,-1420,-32151,15830,-15401,21828,32615,10072,-8803,-27311,2518,11752,-3706,-30885,26752,24302,-28198,5883,-14524,18727,21809,9529,-32112,-15821,30804,-21736,-32210,-15550,-25508,18905,-11272,-26472,-13865,-2822,-17091,16882,11320,-23139,-7360,22547,2019,-27619,-11257,15850,-6487,-12498,-11704,25318,32410,891,4859,-29835,5697,3782,1796,2332,-18258,-23636,-5431,536,11012,10748,-32733,-9223,-5542,14691,-6108,-18577,-23939,-12312,1779,7961,-13419,-5257,13917,32739,13620,29491,11451,-7418,2553,-29588,28606,-17158,23838,1437,4173,-7734,-538,4830,17475,-28798,-8874,-5586,24927,-18690,24603,-9886,29003,-20865,-22282,32555,460,-260,-11114,-14930,-123,4830,5967,933,-21533,-18765,11323,-2467,-5383,-21744,-14146,18270,-25593,-27630,16149,28529,-32503,15090,-19623,-17279,-9651,13609,-19121,28669,-8728,-12263,15024,1106,7970,20388,28881,-8638,5018,-1361,-32578,32713,-15237,12386,-22508,-12476,29074,9594,24964,-32008,20159,-11025,-15808,-23896,4813,-6686,-22818,-25270,-7238,6142,5996,-29234,5904,-2100,5280,28473,24042,3295,11943,-28438,-28157,3748,30809,12862,5077,23362,29963,15655,-11379,15380,8295,-27851,-27893,-11060,17396,17357,15809,15524,15330,16100,-31998,18159,-6548,-2507,-22600,-6294,-10933,6061,-12433,-2445,17761,-17513,-5010,-5367,-15829,-21448,-2260,14880,10553,18054,-22606,-32309,18165,-5839,-12788,15376,26594,21335,17660,31451,-26156,-11287,4328,-13859,1078,182,25559,-10720,10509,-20550,-9168,-10071,25848,775,-30099,16545,-22492,-8724,15607,-29460,32653,26771,9492,-17065,27839,15710,1301,5107,24018,10883,-10731,12943,-10775,17172,31420,-11051,20553,-31088,-13475,-16325,-4791,24247,-8253,18549,1685,-82,15496,-32525,5184,14933,-29258,-3780,-24729,-17963,3222,-26057,5032,6968,-25971,11352,2252,-8098,-1997,-13104,-29121,-21949,-9520,18635,32504,-21713,-15184,23960,-14017,-10060,-12704,26135,5838,3916,-16597,9729,4596,-16643,-10609,5076,32280,1061,7098,11704,-21400,11156,-29748,-26713,18460,6174,-8219,-21827,-18065,-8580,-27375,-5395,-11871,-16989,23027,-17477,-23165,26616,-32549,-5878,10345,-14007,-9199,10544,5745,380,15698,-8255,13060,-17336,17641,-11028,-24915,3003,-17323,-9039,-13657,-25944,3413,31385,5832,-32349,-30374,26466,32030,-16254,22481,-21803,23646,5201,-10181,2234,16297,-21663,10637,-5646,-17441,-14492,-23388,-7261,-16569,-26750,26462,32526,-2704,26909,-9249,-2622,19806,-6269,-12862,20964,6938,27068,27694,7414,27123,-30288,6065,24105,-12227,-1323,17727,30096,16071,-15208,-20816,-19936,27372,2071,10327,-28179,2046,19608,1213,22532,20397,-24610,21260,-22365,-3078,1391,435,-6024,-15754,32516,15750,24947,-30750,-586,-26684,19306,-29497,12578,-22738,-15940,-197,-24449,-29076,-31566,-26987,13344,731,20234,6201,21755,-30753,67,15152,8449,-17360,15051,-7165,-6566,-13166,-11258,-15854,32736,-2498,-6290,-319,-15528,32224,8102,17714,138,12015,-17499,10675,13758,29986,9893,-11418,28631,5050,-5500,12293,9072,-18007,-24615,-716,12356,8734,29633,-27642,-2136,9729,16965,19429,-23079,-15685,-19420,-24567,-4977,-29427,-22650,2558,15239,7587,-18941,-5268,-7236,-20842,-15590,12317,8243,-23170,20814,6725,-7709,-1218,17227,-12570,-32217,-26546,17205,-8955,2539,23161,-9902,7130,-22950,-6228,5911,-4772,9207,19402,-21570,-13064,-19404,-30123,-28230,-6158,31355,22301,4223,10952,-12392,24326,-13814,1553,7358,-25475,12003,-22804,31240,-31251,9108,-24177,-9953,27831,2818,15948,-6727,-1076,12753,29551,-32009,22555,9930,11125,27721,14585,-9143,18228,-20956,19323,22667,9577,-28832,-552,6009,26025,7293,-14246,31103,-23511,-8135,-8643,-12734,-29077,30758,-3426,-16322,-31897,15073,8276,-24512,-8631,-23170,7641,8983,-6383,5587,28392,-5160,17215,-1515,-10222,1937,7092,12047,24266,21381,-614,9587,7449,-28275,11843,17773,-4369,-31898,20168,-2237,25184,-223,-18241,850,-11297,-29280,-2335,-21093,17862,11644,-25632,-11043,1051,26661,27043,-25647,-23,-28048,-237,4461,-20144,18919,-31689,24148,22748,-22669,15735,-16368,-5241,-21757,2823,-407,-13130,-23416,8288,-14713,-8106,31864,-5032,24108,-6985,31460,-12865,15361,-10980,16145,-17153,-19402,-22729,-22052,-32377,839,25301,3146,19466,7091,25734,20434,-25882,-14825,6807,-32324,-3944,-20212,32755,-5763,28493,-26307,4030,-5282,30524,3286,14190,17679,-7507,-12067,10786,31525,-3504,14423,27319,13731,-20285,-14154,-4480,-13167,26353,12027,6102,-25398,-31388,20586,-4114,9904,-21349,-4997,-4625,-20215,-12030,-25088,-26742,20772,1893,-2394,27725,29430,-9806,-15363,10086,-25689,-10430,32278,-6637,-14631,-24399,7197,2858,14776,-5127,10848,9222,-2,-19723,28913,8948,-29314,20890,31986,-23295,-3791,-25630,12902,13674,-20316,4760,11110,16603,-26093,-14686,-27800,2982,4443,2951,2227,-17534,18916,1860,-5001,-18817,-8060,305,-28512,-22986,13244,6093,24990,16022,9565,15078,27359,-21188,22185,-4238,23242,7100,5407,-26217,5753,10089,-25060,-31179,11746,27796,-5683,193,-29478,-20264,-24346,-12812,25118,13237,-9469,25179,29887,-20222,14144,13213,31115,18579,24711,-16987,-3610,13135,21937,-6691,-11283,24325,-17038,4745,1620,20701,14812,-15878,13081,-10064,-19566,26004,-27436,796,-25267,29753,-6860,-1287,-27576,-21324,1933,-6370,2295,-18967,-8654,1124,-9578,-30382,7858,-6928,-12787,23706,13869,-1097,15958,18467,14597,-20636,-1,31179,32309,-28370,-11641,-30146,-10799,2848,-15394,-26668,-27849,-23447,8230,12009,-18770,-10108,26084,2842,20004,24872,19676,-28657,-28928,-11955,2092,9220,19588,4590,16718,12668,15052,-22385,-23813,-12048,1443,-18036,-22425,-32073,-86,-326,5609,17637,19872,3999,-10870,-9584,-31303,12019,-23523,25264,-14857,19853,-20824,-18909,-18832,16633,-32603,-18474,18950,-9358,9112,15123,-14616,-5741,15753,-13553,8030,7341,-15739,-13944,-10181,-7142,-32586,-3812,3263,16101,-4306,-7694,-15509,-12918,20577,19577,-10902,-25283,-1497,13651,-3176,4528,6353,9510,-19513,-22212,-32175,31439,15545,27986,-20758,16637,-6521,17100,17660,9327,-422,-11835,311,21204,31660,-10940,-11291,-5446,23027,-6361,-4706,10572,13912,12004,-22210,24822,30369,20462,5920,8012,18053,6107,20134,2095,-27410,26058,2827,-3549,25128,-12863,-583,27143,28484,-31950,32536,-7519,-21109,26249,17975,12930,-12800,10136,15443,-156,6165,2752,23401,-3038,27904,-21593,30462,-2771,-14846,18494,22828,-10584,243,-6908,-2446,-4572,-16121,-6441,9680,-20146,-12672,-29975,-26805,-16719,19269,11487,-21075,28734,-6632,6801,-19501,-28755,-1481,-5091,627,-23882,-12704,-16417,1834,29704,-13727,-30651,-13121,14363,-7368,-6505,-5197,17469,-1656,-1266,19961,1836,20436,-14135,-28810,-8805,8066,-10303,-28793,29809,22412,-1666,17341,13076,-8688,8678,-25161,4094,8308,8310,-21880,-24664,-1479,8542,-7025,-26431,11784,-10430,9862,-20633,7707,16989,-12230,21217,12128,18586,14813,9559,-26647,11022,28708,-5999,3092,18632,-32638,25262,-30930,-24326,-17160,-14978,19129,-7926,16117,-28665,-25359,-5836,-4273,-21601,-17662,1433,4977,4535,10145,-18970,18791,-1733,-24732,21790,-17604,31678,-27636,-30068,26636,-4480,4464,28963,-22984,-13067,14534,-22063,16782,-8928,32762,16069,5204,-18086,13022,19408,7039,30282,-31958,-20695,31108,-745,-31933,27625,16018,-17092,8391,14540,-19953,-29134,-20626,-12259,-9362,-18180,-9796,-6510,-31705,32490,-9057,19518,-1679,-31018,23898,3770,-28990,20577,1627,-9010,27093,17360,-9802,16505,12447,-2717,3044,-669,-2075,12405,-4107,14454,1077,32550,11784,32515,-14613,3708,8774,2773,25081,10376,5930,717,-14959,25786,22042,-13224,-31572,28,-2530,12735,-22221,-19005,21685,5042,-4934,32486,-11385,-4694,1180,-27422,21695,3398,1823,9172,17263,11010,24647,-30796,16028,3011,18076,5010,-1577,-27765,-14908,-26244,884,-21727,27512,-1949,-10680,-30849,866,-14946,18739,10762,-32511,26543,-14425,23232,-2193,-25046,-24258,-13321,19844,-24383,13028,-7402,16124,-22825,-10288,-18069,-22379,28334,17387,29123,-22152,-28817,18444,26332,17790,-27398,-23128,-5126,-2056,22682,-19235,-3220,-7642,-11747,16469,-21623,4806,3696,-24015,-27024,-30537,933,-30828,2613,29631,1202,-17976,-18568,-6193,20176,29908,15457,26330,-1932,-22877,5205,9876,-18129,1824,18681,-31256,28326,27121,7605,-1527,9393,-21306,28280,5253,7144,31506,12011,-21902,-20377,-3262,30493,-9769,13675,27034,-30703,30656,17689,31047,-5804,22774,-13390,31395,11463,8358,11356,21650,-12422,11766,-12340,-25615,-27636,-17010,24244,-8600,13820,-24712,769,14985,26701,1188,-11542,7944,5681,3921,-24633,-12004,-23711,-9664,-27352,19727,29036,7375,11428,260,886,-26539,-9492,-24630,-6572,-10199,16913,-118,26572,4118,19367,-1521,30645,2698,-27676,20528,-17262,23874,21252,8858,27923,-29428,4489,30372,24335,-3579,26834,1781,-10604,836,-30169,9407,-9968,2364,-23259,9837,31127,14543,-548,-9026,-7434,27370,2259,19038,23245,-19664,-3726,29963,-17246,11616,-28115,24984,26797,-37,30146,-20074,-20571,11067,-23081,30598,-12032,-16179,-1182,-16672,20687,5964,11417,-5824,14909,-23885,11956,-7028,20072,28653,-10103,-9386,18463,-11244,-15943,3766,21394,-12699,-5561,-21335,19792,20922,-22466,-21960,-2454,16598,-21149,202,-22302,-14705,-21102,-31602,-32438,6939,32708,-29139,-13130,19639,-31733,-10615,-20818,-17301,7704,-32138,32056,-16965,-19945,-18880,-30157,26128,-17397,23019,-10825,-939,29086,-2794,-29982,18268,21661,-29003,-10508,27742,-6063,-12732,30421,9096,-5546,23623,15422,14928,13635,17344,15486,-12932,-13018,2594,10722,-14200,29223,2296,-15658,7870,10205,-12277,-18356,6149,-6717,-1032,-4892,-16979,-20987,8033,-5209,-19407,24013,-28131,32614,387,-19524,2767,13532,2597,-8215,12627,8996,22977,3336,3405,22430,687,-4851,22519,24838,1846,5837,-12152,32365,-1967,-17426,29496,7321,31772,-32065,-3085,24682,2152,-15187,-19138,5735,12145,-5500,-3104,-9387,15445,30119,2251,14307,-1438,3626,-1539,-21705,18073,-30828,5393,1087,-9422,22616,15438,24190,11600,-7256,-32406,18880,21999,13964,8516,-22475,17553,-32699,-29898,5921,-20655,-22165,-21739,-25104,-10994,16507,-32589,23346,9122,-28232,971,25421,17767,23500,-14706,17113,25347,17912,9399,-2075,25996,12779,-6347,4562,27497,11120,-28376,-26259,17123,16875,3367,29508,12865,31663,-15312,24840,31611,-30882,30619,-6576,28169,-15504,30215,-565,-17862,-30030,22645,-226,28701,-27267,15904,-5248,-30904,-27504,18098,-23347,26825,-31426,-11932,15950,-22315,32555,30325,-13724,32342,-11697,-10341,-31240,17930,-3215,-8613,30374,25793,29691,-31851,29772,14700,15273,14502,19160,-22903,14283,4417,11574,24440,-20621,2967,17405,382,32006,-1222,-5879,21772,15064,-28501,-20712,-27979,8767,-9567,29787,-11053,222,-12897,-5702,-10448,9730,31795,-7671,18434,-25168,21581,-960,-24825,-24594,17857,-6426,-13785,-9652,22227,15314,2120,23986,25449,-8327,-8382,6193,-9985,-11821,-3346,7858,-20876,23666,6768,15750,7431,-21428,-25263,11861,-22709,-2936,-29688,-7419,19633,31185,11584,-20919,-31502,28723,15247,-7905,-21613,27967,15031,10816,31891,27071,-1497,16703,-31759,16667,-30469,18597,-26450,-7698,-15943,-10605,-27279,-5823,-13695,21816,9358,12686,-3753,-10765,-23915,-5998,15721,6433,20155,-4123,6750,16050,-12821,14628,-21102,13838,-9544,13556,13119,1579,10069,-14992,19804,-24690,-19606,-7330,9847,-6428,31303,27693,-23619,-2267,25597,8083,3159,-112,-23842,-23770,10584,13215,-26114,5936,7731,17781,-12309,12386,-26928,-23447,-14074,-30270,29628,-383,-14802,10644,-11333,-25535,-18677,-17182,30139,-15565,3639,4104,29591,19192,22095,-6560,-26864,-5478,3837,-4674,19244,-14195,16485,26977,-30389,7069,-22412,-1274,24029,1761,-25442,16206,-12417,16495,23462,-27336,-542,-23564,-15793,13208,12106,22997,29714,15695,9741,-15536,30921,26640,-32006,11174,19013,12888,25275,-21584,400,22917,-18085,-12775,-3489,11717,-20472,20845,-3401,9255,5396,11671,30881,32146,-28574,-14928,1120,-11968,-16575,297,-25372,-18768,-13495,9287,-8104,27792,-20332,-4109,-16563,15684,-17678,-27567,639,-32386,-14100,3262,17907,-3999,28119,24461,19045,-19760,-22370,22230,-30680,8435,5129,-3379,20641,7951,-135,-23737,8498,-6522,6488,29235,-27887,8421,-32271,-11849,23183,7492,-12657,-17974,16344,23906,14426,30113,4506,21262,4154,-19781,-23147,-27101,-27535,4889,20459,1739,17102,-14649,-6335,-23769,-13681,20450,15145,18715,-20923,-2051,24458,25701,12279,-18928,15480,663,-15841,-534,-14365,4299,-29711,-19559,7754,-29070,14188,-26579,22277,-7672,-9221,10527,19480,23664,-21354,-17160,-4661,6555,29405,13219,25460,-24821,-17747,-30305,-17609,15960,29756,7183,14851,1763,-12631,23171,14418,-29903,-21305,30246,23879,-20993,24786,-18677,-5959,10173,11293,-14627,-23913,-8647,7196,21379,30280,17121,18600,11997,-15614,7420,-29737,-32625,-22023,11770,19663,31929,-18434,1353,26855,20407,-29949,24941,16610,-8509,-24235,30026,30035,4569,10345,3572,-5703,-4605,28108,-27469,-28900,-26698,-7857,-11994,25231,-27667,20121,7417,-18464,-29376,18470,-17020,21124,27188,-2181,3359,15767,13332,-3380,13090,-1287,-30869,-13494,1449,7210,-6605,9364,17046,-17029,15047,15710,8230,11544,-3644,-28170,-18358,-13121,-364,3954,16955,-26308,24439,32303,508,-10122,384,16086,4749,-13763,-19221,620,21055,-2068,11427,6062,28035,10551,-20976,-2677,-18298,23828,-25142,-15340,-13548,-12343,-12147,-13127,-22940,-17009,-24509,-3622,2164,30200,20885,7072,2174,20209,23877,-21535,28312,5133,-30924,-9878,13479,25,-4805,-9511,-17799,4234,22779,-17549,-6036,30582,5773,-3023,-23068,18142,3315,-7751,2292,8326,4538,-26276,11968,-12469,23898,22490,32610,-9003,-11455,7801,8182,29178,21835,-32553,30305,-17674,-24560,139,-9796,-21708,30486,-3187,-30970,25476,5761,-18047,-11392,-11480,30373,24124,28664,29577,19413,26134,-12112,-13590,22658,16526,-16198,-12039,32286,-432,30086,-30137,-6078,-27778,-1290,-17397,20001,-14719,25505,-2254,17452,3518,5081,21447,-12703,23750,-27232,-30785,17729,-4182,-8546,14261,10960,2566,10328,-22331,15089,27652,-11833,-28974,-7707,-8667,10059,-32193,12685,4615,-29382,-5352,13513,-24157,27216,22731,-22937,22017,24250,-2989,-17565,998,12230,-12186,21218,-28288,11029,-25211,23163,-16543,25661,-17197,32162,-8230,-1740,28840,30203,18702,-11097,10325,-20165,7421,-8887,27070,-17630,-30347,-1171,-5642,-11959,28129,-7460,-1874,-4100,-32324,-31405,-24138,-14954,19205,22524,6898,-15407,20155,14129,771,-5483,-28639,7204,-31599,12005,28787,-13534,10727,-18138,17476,-11002,-27649,3992,2684,3897,23798,-1471,24008,-18696,3226,-16052,7647,-8348,-22614,13298,-899,7203,15935,11778,6909,-15693,9452,-32294,1696,-6674,-5781,29838,-31967,-15122,25030,-106,21665,-2399,31320,2372,22988,11091,-29157,-24700,-4280,536,-17478,19641,-19373,-29143,-21526,-9819,26340,-27953,30050,-27613,-700,-18489,-6056,31225,14119,-10882,-9235,-23211,31009,5634,6194,-17689,5472,-27086,23392,21120,29745,19138,3806,545,2876,14692,-2020,31035,-7470,-31077,-19639,-15974,-19873,-15166,2275,-29069,10624,4041,782,3049,-15944,18604,-15235,6176,20551,-5369,27295,4729,11678,3986,13959,-31491,10519,-21544,30234,6453,-22429,19146,-118,-26233,-30567,1808,26820,-7651,19634,-28430,30817,19165,4100,13682,-27365,6677,14072,-29286,-25133,24429,3364,30818,24066,31507,6561,25177,30303,-4445,-20744,-20011,13472,1956,17838,-1316,-10145,-19049,-16145,-1276,6762,-29348,-12806,11536,-15778,30301,-8263,-46,-9131,-23501,-10421,23089,18187,9519,-27703,1120,-11099,-1220,-29217,-22467,-26826,8823,20362,-28847,25274,3927,-17649,25622,-31745,-13952,-26422,29914,-25873,26047,30165,-22066,-21469,-22585,32478,-17912,-31896,-25002,-3254,-1722,-25612,5713,-21435,7321,30911,-20282,-29416,26806,9446,-19762,-3307,-2804,7459,22325,28276,-24265,3269,-12883,-26859,28005,28795,-18465,15486,-13669,-26834,-17472,29025,28959,-11931,29117,-556,27824,12032,12187,7493,-31135,3154,-32509,6344,12171,9411,-1606,-12388,-14932,-9082,8454,20562,15344,29400,-16225,-3630,-23167,-32386,-20863,-20471,31136,-5186,-32112,16431,-24544,18259,-13073,12378,-14075,32232,13450,-5275,15855,-20275,16191,-13846,-29965,-27713,-17653,12848,23659,30592,-18158,21203,10014,23493,22062,19555,18163,21999,-32530,11442,26696,27048,-29825,10799,4893,18655,10963,-14109,-20897,-22542,26622,29756,17951,11238,26802,14901,19996,32159,-12236,-20134,7252,635,-26175,-16521,12821,24148,6338,8351,-28014,-31485,-9117,-6076,-15314,7391,-8746,30666,8759,-4362,-30978,3021,32068,1989,-10618,-29251,9193,26438,11988,-15901,-20434,1221,-20127,-31647,3527,6167,-30290,15988,622,-24419,20251,6254,1877,-24897,1145,-10043,-5424,-11003,-13568,30651,-10342,21642,-26697,-1346,-1017,17321,28393,-19666,17264,31233,32210,-802,-14658,12727,6525,28051,-27425,16880,-16068,31400,-24176,15958,1594,16611,12557,9056,-17422,-2015,13854,20216,-30979,15953,13568,7718,32220,-15718,29024,-26238,28758,5385,945,7826,30888,5851,12828,4266,6339,-16147,-2629,20884,20905,-22478,10401,-5193,-27050,975,15175,23350,-28475,-18034,-18904,20197,26891,26195,30785,-3148,-13013,-1938,18752,9127,-25554,24457,30573,-31208,10191,-26083,-32379,14513,-1567,-14605,10051,-9319,8542,10851,19950,28419,-4152,9565,11057,-17508,-7980,18844,6515,22992,29867,-28354,-25734,2237,-18337,5842,-24382,-17672,24945,2670,-27175,-8747,28681,22086,-22287,-18511,22264,-14143,677,13354,-17534,-28374,-29484,26091,32200,-25507,-266,26559,-3789,-27422,9424,-6420,11513,-15431,26459,-10361,13718,12137,29592,23973,22997,10144,6919,23912,19020,-20870,-13374,26701,6336,-11935,14173,12598,30828,-20680,25232,21295,17010,-14410,19374,6910,9623,-30454,19214,11728,-4514,440,-6830,20395,-3899,-3321,-7454,12934,-15182,-6373,7437,-14806,1601,-22815,-1630,9405,-1739,8438,-14750,19924,870,-20489,9070,-15796,-14432,29472,-3768,2886,24550,-24655,483,-4419,22137,1041,5378,5524,-29892,17950,-14632,-13397,22437,13403,7999,-16521,-20573,-15054,31181,-7658,5458,22974,20030,13371,-2163,30169,19229,-14685,24265,-4427,112,30991,-30809,-17028,-20179,10426,-620,-7610,-8993,-15864,-28971,-6672,14465,-21848,-6539,-17356,14048,23715,-30864,-21130,30316,-30134,-12541,-10860,-23716,16844,-27836,-19327,-4406,-25284,-5928,28184,-25973,-28045,-30009,9402,7189,-23660,10103,512,-1049,-12592,-23175,-27307,-5730,-280,-11536,10952,23786,25059,18980,-32263,30726,-26347,-26147,-5970,-20681,6204,32583,20776,-7119,26109,8480,5306,-10404,30066,19225,9526,-26840,6529,7478,24919,-14371,3170,11357,-13211,27675,-20598,15417,28504,-7218,25657,20407,-16187,-1835,-25735,15255,-32320,23881,27026,-32504,11693,18263,-21866,-13785,-10559,-23301,-8351,14882,17741,28065,342,-29949,12843,-14582,-25069,1491,-8648,16066,5933,32573,19147,12987,28628,2525,-8115,10360,30749,23074,-9221,-15240,-3373,7385,23000,218,12288,-24797,19563,14979,-19687,-31817,-10948,-3390,7949,26662,-3632,-10187,30706,-3118,-1467,-6660,-30232,9186,-18052,28928,14635,14269,-5463,14868,8397,19949,12485,-26569,6914,11347,-11951,-8276,13583,-13170,5980,-8910,-9905,5645,-22098,30871,17213,31153,27394,-10167,9499,-578,14021,-19971,31149,-1636,-21173,10975,-12310,-12390,-16381,15343,-13620,14887,-121,-14499,12782,-17957,8694,-21480,-17104,5628,28564,1867,-7472,-27322,5533,24509,11692,-20934,10498,-2108,24139,-7619,11960,-32466,19812,24935,-32428,-20777,8362,22231,10710,-13722,262,13903,-29581,10178,-22040,7397,11669,7097,-22989,22360,12219,19318,5393,2847,22374,7127,26887,14365,2656,19409,-20841,20612,2619,-6943,-21935,-19962,-30445,-9452,-1988,-23454,4733,19427,-31740,2780,24694,-17864,29663,11896,-9586,-20268,-2786,-18397,-22909,1522,-6050,-26,13593,32565,15486,4928,4840,31246,26391,-22968,11737,-10083,-26725,-6567,-20308,13003,15725,15675,364,-15646,-825,10499,-20265,-13508,-31990,9047,13632,-20967,3064,11950,6640,31931,29994,19108,7078,3303,8206,22115,-28603,14469,-7157,-4251,-8262,7770,3698,21252,-13999,30397,-18008,5883,-16704,-14101,-8131,-32008,-11676,-22908,20760,31736,-32114,20133,13181,-27143,27537,-32012,19458,20164,26156,-22881,-27370,14971,-20716,13722,-31159,13678,-4941,-27115,-15892,-24378,-13914,30231,25260,24752,31999,20565,10866,-23694,-5768,-20087,-3589,29431,20180,32104,4451,-31995,31614,-16738,-1710,-22073,17290,-4745,15601,8169,-6955,12688,-11147,30189,-2034,23395,10702,-19461,-20188,-21228,-30906,-27998,18976,21677,-18091,-31590,4201,-31850,14027,21117,4445,30850,-2725,-5726,25671,-29332,-18899,9145,27895,18552,-28829,31241,2567,-8661,15197,-20399,17444,5995,-30864,-18749,-16552,21663,-13980,-22284,20474,18458,-24491,-17884,26428,-4158,-90,9001,-6683,-25904,5812,24647,17434,-18880,25450,17290,-7352,-22877,7702,-16010,9096,-11990,-16285,-11204,-15024,-17867,17544,-26865,4929,3678,15992,-10235,-14473,-10209,12700,-7771,-15583,12253,-4020,7566,-25847,-16648,30931,13092,-25928,3646,-5667,29582,-22592,11717,-3018,962,-11065,-1631,-1291,-12759,25571,19964,11827,9961,7207,-12787,-28451,17913,-15020,27141,-15710,19140,-13706,6032,13308,875,20706,-379,-22353,19179,-15115,5031,14425,4745,-8200,-17356,-14533,-4007,-14896,-27853,9143,-6873,23736,-18636,-189,1019,11430,20129,14320,9709,-15990,13477,-19414,2677,10022,-18273,24560,-7821,-2718,31308,27984,-8695,-8660,-26657,22284,3189,4577,-12088,19287,5714,19577,-30284,24924,-8017,10044,30975,14637,-26646,-25171,-31909,24966,-30070,15758,-11032,15879,-14201,9898,-29902,-9827,18338,-22047,22335,28741,19854,-11991,22514,19068,4750,-5800,10966,19193,6624,17194,-20157,18612,32167,5060,-1966,5365,-15595,18814,25253,-13767,-20609,8518,-15610,-30654,-18934,32725,23677,16643,-23091,803,29838,25774,-19476,-8169,4885,-14658,-29063,-16712,-1116,27348,-5823,12067,15996,-30858,26519,26448,23035,30640,23885,6972,-14882,-4531,-6592,-12841,26520,17329,15546,5657,-11395,8457,32700,3353,6136,-1488,-9217,-20837,-29591,-25911,-9698,25350,-16629,6377,9424,-25835,-15394,22567,32263,3736,-7097,-27647,-25116,-4862,27856,23911,3779,2955,-14794,24199,-11125,-4995,7027,-11414,-24382,-30940,-20089,8469,-13297,15386,11092,4585,-24969,3394,22267,-16661,17617,-4468,-12294,-57,15090,9389,19338,-10858,-17054,28304,-31730,-13480,-10114,-16278,17933,-13274,1640,1362,15260,-21313,11547,18233,-31358,-9660,-21900,-6549,7505,18381,-15609,13165,1263,9148,-7667,-4580,21366,-21538,22497,14074,11040,18242,-19917,22696,-16850,17113,-6431,9605,20858,-29495,-30176,-14108,14269,8512,-4084,-15581,-27383,24661,20380,-9356,-125,-24133,-11379,-4601,29603,-21254,-24177,31645,-11366,-18931,25925,19443,-9191,-32511,31428,-30205,5402,-15378,13616,21351,15048,24226,11695,4464,-1402,5138,-18279,-1339,24855,25592,319,-23420,-3346,-3400,-9913,-24914,26156,24348,6995,-8152,-24651,-10367,4408,-32674,-30578,18634,28926,-23557,-18611,-18587,7197,-23366,-11962,-25790,-17191,-7433,-32367,4496,16171,-14372,27197,-8117,-14109,26673,22649,18101,-15299,9960,28283,2970,21894,6392,-29362,3372,796,-31874,10216,25886,6941,27185,-10041,27404,-27505,27076,2261,-31589,30088,5740,-10234,7551,20700,-24057,-28459,25729,20129,-31940,-5642,25750,2017,4534,-11352,12403,27406,-1158,23859,-30423,-21912,-31116,-27459,16619,7889,5302,-13507,19128,12115,-10827,-9303,20309,17625,14766,-2688,-970,-769,-32120,19837,-10065,15809,-18546,18851,24674,29946,-18249,26075,-23097,5979,18556,7076,-9371,2747,30127,8049,-9095,-29637,-14936,-2159,15766,17240,31837,-25063,10776,5004,-12844,-9002,23935,31897,-28755,1139,-6624,15623,-20111,7498,15066,-12185,-5437,30853,26611,22007,7991,19725,22620,-9953,11410,26068,-13440,-12291,7542,22975,16763,30836,-25419,11518,9105,-19842,11888,26547,5592,-26950,23584,-32165,-5842,-28716,8605,-93,-18661,2686,12621,-3317,1332,21633,23177,-6916,-19777,6546,-9334,7223,11680,-21770,2113,-29587,-27245,32100,-30975,10805,12014,-713,-977,-18899,26374,-19701,-17616,25717,-21915,23021,-26698,-27789,22150,13763,-12018,-5672,15886,-11240,-3763,-6465,-5386,-19419,-894,-7763,22659,4417,-25148,3320,12649,-8768,2444,29478,-14369,19200,-7840,13300,29063,24342,2846,27953,4989,5318,-30329,-16902,4801,884,4084,-3913,-31207,-11771,29972,-7700,-19118,20339,3289,-28410,-16146,11365,-25357,-2365,4308,9446,3481,-23420,-29998,-18903,23294,-4697,-13273,-6746,28733,2374,9572,-24022,-8833,-16984,-5624,-21943,-11602,-19537,-26430,-30863,-21438,-30923,29359,-847,-24820,-11921,-21564,-1698,27825,23301,1329,14736,-22846,25122,23634,-5646,29408,-28609,8992,8381,-11458,-21719,-9896,24078,-4577,-29803,-29706,21575,23693,-10760,9916,-13091,-26168,13103,7414,-31739,15788,-28471,12100,-17659,-26256,-4426,-18887,13367,4866,28080,22750,3845,-1470,-26489,24548,-17958,-2410,4977,-28137,-14856,7606,22902,31458,11763,-18084,-2391,-714,-15605,-31918,12067,18978,-26347,28304,-31628,-22612,24071,-22708,-8596,8970,-32272,6321,-27218,30973,-7176,24927,-8993,-31594,9164,-5643,-20764,6316,6950,19243,-19042,18712,-29450,-25644,-3117,8305,-18662,22701,14187,27254,-3651,-15968,-18870,-18565,-17062,-17517,-28257,-29813,22126,-30349,-355,12785,4461,-10083,-22071,-32225,-16634,-7594,23487,-17130,-13251,-17741,494,25227,19787,30024,813,5090,-8917,-7796,-21038,-12050,11277,-22321,-3065,-11293,-12268,1805,30532,-20643,2865,19736,20239,-6712,29280,24216,-31220,6562,-31378,4549,17875,21909,14923,23692,-1369,-32102,18833,-8506,-28675,13791,-14093,32299,16204,9624,-23190,3900,3330,-13833,17850,13841,27346,9226,-9575,31876,18637,-25038,-10127,-31420,-7944,5156,12823,22350,12385,-10634,11222,20308,-27855,20173,-7261,-28933,-7109,-13871,-18494,-20898,-29203,17705,20655,-9579,129,5293,16111,6592,-27303,-216,5137,-24915,-12498,-21617,-5395,-17206,-9825,-20345,12124,9462,4577,-27166,6838,-25064,3624,-6943,17002,25769,-22342,-22645,-3704,13209,8436,-22882,-7006,-17765,-31519,-25853,26663,23375,-11303,-28781,1570,24741,-8777,-10556,19829,-13403,13332,3422,15122,-24886,-992,-8697,14019,26894,-27090,3352,-16137,-28402,2635,-8290,-10790,-9827,-26003,15601,28440,20672,-3427,27361,13257,-23433,24472,-17973,-14726,5131,-5078,22704,-24635,-26617,18271,30825,31980,20847,15074,12799,-19810,7751,-25164,8565,-15778,26854,7339,19147,-25683,32475,13098,-26997,-32644,-20939,8183,-10166,-14624,-2736,-6090,14637,-2264,17049,2116,28829,20263,-22503,28721,-23838,30677,29109,29542,16480,-11236,-21932,6648,-30077,-14555,-13848,-26720,-19926,-4060,22221,23491,-294,27439,-26710,-28011,16197,-12384,10090,-18120,-16348,4480,12513,-3420,-1827,-25763,29836,-12252,-30159,6630,-25068,32496,-19353,-19944,-31184,-15480,-17873,23622,-21611,25855,-15584,-22992,-31892,-31500,-20485,25702,-4208,-6549,3391,-3205,-18370,-30742,18421,7226,-23732,-29142,-31828,-15849,-29498,6360,25518,27469,-24392,-23635,14867,-28212,1068,17201,-3315,23290,11900,-7283,-28235,10667,25168,7700,13041,-23492,16531,-11672,-30988,-21651,29710,8550,25761,12475,-24684,-20682,29603,-3532,-27248,-6629,17608,-10868,-8279,-13400,-19113,13537,20277,-14906,27573,23003,21826,-3943,14909,-27262,-31974,-29445,5847,4564,-30024,23345,29272,2241,-27765,-31737,3134,-31302,-27481,26478,26742,-31015,8432,23029,31899,-12791,12879,-9640,9738,-13999,5888,-20627,-12428,-18137,-29080,-21818,-31686,-13807,-18585,3108,-25061,-21971,27941,23539,31499,-21515,-18089,17286,28082,-9682,-2676,23428,24188,-23411,17732,17098,-3697,23039,30569,11695,11514,-23115,20041,-20502,201,-2215,10385,896,-22484,21520,264,-21511,17504,-32667,3076,26092,8073,-21094,10803,-7405,-32728,-8664,25348,9253,-27564,-25796,32680,-26410,-10476,-15153,-8160,-12946,-27669,-22116,-16064,-11185,4261,-16581,-2993,-5745,12702,-121,9649,-4156,-25089,6164,3637,5084,13261,16431,2069,-20222,-27636,28664,-7879,5011,-18347,-17207,-14240,3883,-4619,-22216,-2966,26900,-9106,-15259,5279,29366,-20091,3871,26541,1188,-18718,14252,1500,16980,1644,15974,-500,-5125,-21330,-27698,2922,3696,-26075,-3557,-18733,-32224,-29767,24237,1504,17067,-46,-12740,4260,-10924,834,13040,11316,28498,-8258,-5597,-17418,2297,10747,9863,-15848,28138,-17300,-5029,-28222,-11600,-25154,-13542,25271,-24095,10083,-30517,-27584,-18899,-13902,-29765,28751,-3208,14749,-15020,20167,-11149,14918,-26433,-7941,-16087,-26733,-16134,1701,-15814,25843,-13629,23094,17047,-15614,26191,13331,-31981,-25393,14766,8856,-30871,-6812,13945,13516,-2949,12309,-7047,26348,-7337,205,-593,21522,-20877,-26100,31188,5723,10371,-8911,19281,22738,-2256,3549,5421,17222,-19448,-32027,31027,-3397,15562,12402,-11920,31954,2407,-30302,-2732,-15130,17703,-28106,30372,26284,-15220,-1927,-26943,18378,12104,-25352,15598,-173,12308,-16540,29634,14624,21830,-6413,10470,-25957,-4002,5006,-1995,25752,-8840,-6717,16221,23482,-10075,23569,-28437,21465,22297,-2743,-13149,21476,26881,-1131,29095,-23780,-28451,310,-2745,-7808,24520,-28835,17544,-14317,767,-19906,-20587,25832,-21152,-12588,-15208,22579,-17756,-11647,12604,14273,8676,26219,17943,15796,-26002,13949,-29792,11729,-4558,2190,-19522,15513,-13363,-851,-1907,-12989,26613,-16154,-6971,-8044,-9934,-10322,19922,-16047,11809,-22227,25907,19526,-9625,-1450,14833,-20898,-17350,13105,-1991,17931,14867,-24383,27009,-26864,6029,-12675,-15549,-25706,10299,9581,27869,-32407,23629,24553,-8991,-20666,-22255,1502,-22570,10202,31386,-26381,-21392,4390,32475,19763,-14351,-24768,10754,-17971,23745,4548,22063,29481,17252,-14595,17326,25800,31589,12164,14296,-20205,-5483,20270,-29366,22577,-31471,2865,18108,29136,-22215,28724,32008,-13824,20650,6798,5600,-18020,-28592,-5352,-763,-2203,28979,-28332,16103,-5453,25277,-16429,7508,31963,32485,-21505,-15193,-2576,-31863,-4934,13994,-31863,-11522,-4995,13632,4621,28525,7228,-3643,-22495,26311,-29033,3487,-30844,17687,29251,20669,-12803,21035,23825,-13355,-23118,-9717,-14230,-16140,5470,18543,-8949,-31100,25542,18166,-32310,-8962,26847,-12602,30378,15338,-2283,6550,-22949,-25422,-28797,-2938,31467,32048,-8771,5941,-12290,-13159,1743,-26731,-19226,12387,12045,-18210,-16119,9697,-32262,-28239,17038,1168,15330,-10041,16396,27754,13226,-19999,-20918,4938,-6481,10464,-3497,11935,-22982,3673,-26179,14546,-7487,10732,-2933,-25482,16515,21785,-4439,28996,-820,-19207,-7639,-10802,-3200,4306,-14846,24779,-10880,19482,25772,23515,8551,-31044,-29295,-8599,-12243,-30155,9652,29502,20372,-27879,31372,-19742,22231,-5617,-25683,27167,-17684,26097,-20918,25187,-7744,-1395,-17269,-29208,31915,-7072,-9351,23766,18087,22457,24688,-23935,-18393,-22950,-9471,-32649,27931,19715,22693,-14382,-9027,21696 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice33_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice33_input0_int16.csv new file mode 100644 index 0000000..a9df372 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice33_input0_int16.csv @@ -0,0 +1 @@ +-3121,-7905,21,19518,-483,-14769,-13837,720,-3596,32497,7715,-4086,23927,29515,-7108,-23328,14029,26684,-6068,9355,-4427,-27245,827,14284,4951,-25772,-22113,11720,1720,-23070,16910,27640,16561,21028,-2943,-24380,23762,-14876,799,-10706,-13005,-26968,-5601,29406,-7601,24811,22137,-19459,-17950,8560,17070,-12068,-23248,4796,3434,17814,8446,9790,13383,-13670,27734,11761,-25911,32218,16438,22376,-24975,744,-20978,-2211,-31618,14986,12936,28496,15942,26031,16654,-27166,6559,-29704,-10859,-2359,26638,16830,11677,18911,8411,-29565,18864,-26238,27093,-28850,-9195,-23166,-11394,-9449,30553,-1858,-9030,14681,-4204,7887,-13638,-8632,-6820,13971,-8044,-25616,23946,-23951,18060,4025,-14239,15706,25000,-17757,-7236,12012,12513,-24088,22174,15490,-18936,-8696,-28515,1854,19803,-5099,-23290,-20478,2525,30193,-12169,28361,-300,32449,-11121,-28768,-31232,18761,-24022,21452,5521,-12192,11776,23723,-31440,7952,5462,-12540,21169,323,4094,-19443,1424,-15625,21006,4309,-21722,-641,28626,32170,30629,19765,17002,6765,-29706,-15825,15277,17323,17929,31869,-3062,27016,31962,-9878,-2870,18286,-22316,-32496,-19899,-5733,23658,9978,31912,-27980,-87,11726,-19258,-27835,21432,-12876,32286,-18023,11411,-11372,21463,2193,16284,8592,28357,1190,-23575,-32625,-8992,-30427,-31331,-9455,-24815,-10011,-7720,25092,5694,11395,-22210,26506,21799,9775,-30373,-17713,17967,15036,-3356,-2135,-18095,348,-28037,14966,22219,-6924,-2298,26777,29726,-31180,29762,18731,-8902,7662,-29106,-26746,-15610,-5304,-27753,-8110,-24721,-14903,3347,-27672,-31462,18174,-3871,11068,-11170,-615,-28169,19167,-15263,15415,18573,-28889,-29895,2748,-26918,28239,-7204,7785,5265,6809,27328,10657,-11697,9938,19538,21578,24302,-11305,23785,11380,31559,20622,-10486,20401,14200,-23150,-21675,8698,8828,-10623,9349,11585,-29024,-8796,17098,-3975,29939,-19047,3318,11161,-20885,-29440,-32306,-9069,-24581,-9146,20124,-13389,-6763,12721,-24082,-30783,12327,12043,7178,-7204,-607,9963,-8860,-27606,9449,626,-319,-13805,30810,14274,-9192,-27435,-30847,-28658,-5641,-11562,14184,12297,-24678,6921,3988,-25164,25293,7480,18466,12968,12007,10833,21498,-4999,-22649,-19834,-13103,6264,5214,7070,32517,12282,26670,8240,-31953,14918,7742,17424,-3662,-94,-27042,15899,7702,-13795,-18343,4398,5740,-16684,-9345,-7589,19331,-15479,2278,14754,1811,-8212,-20147,21352,-32090,-27166,22984,912,-31195,-29430,22284,-11042,7356,15010,-20378,1945,-18802,13154,5012,-20897,27494,13862,7289,26012,-28655,29686,-15363,-26535,16099,-12926,3461,11815,-13957,-29371,28985,15259,-15525,10486,25670,29137,19611,-13488,2538,-20226,-10921,-22474,-21261,17188,-8366,-11598,-5877,-26134,19445,6088,6168,26299,-5159,16018,17413,1244,1851,23108,16976,-27028,14552,-7912,12328,-20569,-18094,29292,-27850,-17772,21989,31442,-8115,-2693,8769,-27455,-32291,18681,-1087,6067,-5193,8430,10529,-12564,17516,-25551,-14379,8768,-8635,-18427,-26603,2197,-13243,-30361,-947,27396,-30645,8409,-17680,10980,-7483,15200,27607,15061,-4028,31536,-402,11553,28537,23714,-7102,16840,-32586,11436,-29013,4870,2946,15783,11771,-3220,26799,-30170,-11437,-32471,16068,-7098,10154,27015,-25716,15667,-19466,-16913,-29403,17897,-4156,-9641,21535,-4861,-4330,-7678,-4635,-20435,7314,-14792,14526,23924,-8341,-8932,26778,-22687,3648,-19735,-31166,13479,-27370,-29069,25449,-11798,7281,766,23054,3700,-19857,-9290,-19439,-22305,22138,12333,15148,-1066,31761,-31707,2949,-18876,26949,-5017,10150,11672,-26717,-7944,-4979,-22216,-17795,32541,19578,-8351,-17726,2123,30788,28711,-8173,-11780,11814,15607,-9882,-14277,-7053,9206,3877,-29571,10827,22945,17904,-30961,26336,-7362,-12564,13969,32597,31612,17622,-14158,23000,32644,25058,-16538,20201,25041,160,8701,24582,18061,-26620,-17717,11275,11618,-28733,17596,-24323,7719,-6824,-7058,-30327,-1623,24596,-367,9683,-26782,19018,320,-14850,-8769,-1016,-32112,5215,-5658,20740,12134,-23991,32027,-24263,-29961,14171,7773,32216,22268,20526,18949,-18036,-18289,-17746,2023,18668,-21770,21863,7179,17762,9977,-14376,-2857,-21076,22763,17605,20855,-11764,-24183,26908,-8034,14951,5200,4003,17644,-6566,17867,8687,8708,-24808,23836,14823,-7017,-759,-25173,-13300,24605,-23518,-1795,-6102,30379,-32623,5843,30279,7045,15303,-4024,12872,-26963,-325,-14611,14804,-857,-28996,-15294,-15720,13512,-13056,-19780,3775,22758,15933,-29492,-17163,-10877,-17376,30667,28693,28092,7958,14752,3516,-30272,-19013,-18997,5668,15232,-23437,-3216,31488,2762,-13291,13247,12154,-16750,26744,18000,-16079,8660,-27469,-20315,-28843,19847,23481,7235,3421,3616,-9526,13792,-15721,17840,11437,-17043,-21909,10888,-270,-22075,9258,6145,16160,-15809,32703,4071,-29810,-5198,8037,29448,-30612,17736,20501,-9138,18045,17762,30504,-7228,-26670,-28025,-17895,20868,-31763,1133,3242,15459,-19053,-24365,-8509,967,6033,25592,32576,28749,24320,-19249,6328,-12248,1593,21426,20704,7253,-32052,31820,1049,27837,15360,-17955,3144,17317,-24560,18736,30450,-21738,-8945,13066,25783,-32279,-28860,-1837,15307,14073,19912,18352,-24770,27784,-12214,14864,28290,32355,-23829,29534,16067,-15782,6961,25991,-20012,14070,-21743,-8881,-22789,17729,11063,2778,-32437,6901,-5518,23904,-17929,-20677,-17859,-4293,-28085,-4094,-8946,-22244,368,-32595,617,-24526,-12098,4305,10941,-19937,23919,25828,8986,21443,-16524,2667,32709,-11723,-31829,18824,-15000,-23466,-11489,18648,5678,21872,-10710,1774,4206,14404,-16574,-4405,-14266,-744,-29126,-17449,-26363,-6455,-30516,1324,30252,-11153,2395,10617,25040,-21911,1965,-4716,17301,1451,-27753,-14218,-26363,-5782,2903,14897,24809,-5107,-21725,-1132,26620,-3848,19398,-10641,-29638,-19668,-9783,-10760,11499,-16535,4574,29895,24207,-15908,10766,25480,-1878,30793,31860,27729,-3019,4262,-20889,-15405,25723,18830,-9735,2200,19989,24939,12721,26471,-28649,-25191,25229,-7113,29222,-17242,19061,25597,22667,7335,-9533,-22883,-18545,11763,1340,-22409,-13996,-24001,-9093,-8800,-1278,-30327,19150,22661,-32718,6289,4054,-18053,-30269,-4916,12844,2277,-17917,14402,-9059,11068,13278,-26758,1918,-9208,-19387,18361,21893,19954,-17506,6799,-3488,32059,-11463,25023,-4178,26505,-20565,-15320,-21399,-14537,-3477,12845,4719,9797,-31720,8623,-3362,30213,-31325,297,18824,11789,30918,-25165,-12785,-30196,-9987,7714,20185,31945,-23232,31929,27411,3188,20567,-24316,-24073,7188,-13143,32257,1481,-12300,-16807,24264,26615,21418,5937,-25554,-22018,15005,28920,4425,4360,16470,25726,22001,-4646,-12754,20278,-19420,26168,-6635,1183,-23248,25995,21799,31233,2812,18343,-16318,-4156,-14941,28688,27032,-26710,-20543,-21565,-20395,-17283,-12183,-8588,10788,10918,30080,-26008,14580,15565,18218,21760,-6283,-29334,9360,15936,-27449,-30742,25579,-8173,-12834,-5896,21716,-21647,14584,31283,-29424,15378,13704,19159,-20648,22252,-6396,-13649,800,19148,-900,-30951,-7885,-32518,30928,18585,-6907,19710,5730,26250,29252,1420,-12657,-20057,-29188,-5554,21339,-10861,27055,-25961,-15521,-2604,2220,-16117,20561,16437,-23828,13473,-26956,9536,15123,-22384,-32303,830,23501,3103,23950,18771,-31046,-32367,-1283,-23019,783,17019,-13542,24173,-3977,-15557,22319,-2253,15165,11031,-13814,24090,-21904,18749,-6773,-30871,28636,21566,-1331,16242,7031,-10441,15693,8679,-12619,5339,25220,30550,11821,22611,28166,-2957,24403,-18055,-12837,4572,-12046,-15780,27651,14177,-1648,-26063,12168,4077,814,-26737,-22067,-5507,30921,-11271,42,20271,7555,-16376,-154,-22603,12361,3898,18273,-17506,-25120,-23809,-32479,7858,1091,5004,4985,-21304,-12472,9014,25682,21685,26715,10970,10221,-17027,25383,-19660,-7911,-555,7428,25308,-18796,-869,31398,11220,-5476,15588,-15296,18954,-32439,-2168,21639,22002,11382,1460,8597,-5893,-31403,18443,-22711,4309,29094,-31667,-30344,-9728,19088,-19779,-30745,24894,-2175,-12409,-8744,-5022,-15213,12734,-705,22943,-3736,-31491,13371,-7620,-17423,24365,25530,10435,9831,-28617,17819,-4005,16794,-1011,21291,-18158,16763,9200,-28625,20795,22722,4058,-10822,-28284,-30748,-32725,22540,28216,-1003,29741,-31555,-18275,4279,-1122,10707,8279,27059,-19576,16019,8379,26926,27505,-15873,-2901,731,29850,17352,10027,21264,-26352,-23788,-28221,-18702,1836,6615,30353,-1293,-30490,14274,6276,25584,-464,18071,21723,-13375,-2364,-12179,2254,4198,-14840,9715,636,-3658,-21363,-6324,-21273,-28191,15342,-14516,31144,-6504,-3974,-31332,3265,28186,22029,-9005,-16370,6628,3476,-29997,1347,-13057,1170,-24915,21831,24597,-9430,-30194,-11396,9088,-4105,-30033,805,-1586,-14525,-31352,24152,851,-24451,-9694,-25807,-14139,29597,-31577,10969,-25696,-11922,-13278,-22645,26363,28936,7391,23169,15941,-5288,-6264,-28341,26609,31590,-31795,19031,27908,-11002,26416,-27113,13943,-6922,-7887,15321,-2855,-22715,-344,-24147,-20728,-30955,-27467,26358,-29479,28548,6328,-6266,-14191,8517,22947,23974,-25533,-17262,-8573,11462,12978,-24654,-17219,-27779,-17727,12656,-6064,31197,-2878,23159,-7910,-16697,18227,-5997,-13751,13862,19985,9153,-28735,-25285,16697,24820,-15316,-15185,79,-22992,13560,-23994,-30677,5533,-10296,-17104,-9238,-7121,28597,9382,-3939,20066,5137,29254,21610,27912,-14379,20194,-27245,-23649,-2757,-4390,25641,-31599,26553,-12371,-19267,13401,18501,-21047,-2077,24966,16520,2602,-29730,25692,6304,-31789,9688,1997,-6321,-13218,15225,19428,-29121,17619,11904,9616,27152,-25679,9948,14539,-1332,-14651,-2413,-10594,5633,26092,13563,12693,-9917,20170,-21758,-23188,20995,12302,32241,24660,-9251,-18339,17337,-18693,9034,-12805,-28194,-20107,21261,5763,19362,-7554,17158,9140,-17276,25166,20026,-2766,16647,8453,23376,-18872,-2191,-4186,-5919,2201,31423,-3582,10206,-5048,-5663,31756,-28507,14656,-28774,-13521,-9565,27035,-22457,29316,8656,-2037,-20873,20872,24888,7672,18903,6409,-9396,-30489,27906,13251,-30089,-20788,18309,-27898,-19103,-32562,15158,191,-32221,20023,-9195,2659,-504,14638,-17560,-16669,-24010,7325,-10958,-21134,-11225,-22930,7853,16848,-11721,15123,20043,-30669,-21536,50,-20924,23115,9182,-7344,6324,16023,23163,-1438,11925,-20216,-30947,19756,29751,2419,-20288,14512,8042,11573,-17243,-19722,-27332,5213,2484,-15641,-26331,-2213,20279,21232,-6734,-21854,-9536,-12218,-21193,-18428,31705,-23659,19988,27445,22428,32620,6262,1442,6350,16577,-6622,18059,28321,16759,27658,-23548,-5975,13914,-30593,-39,-2555,-28925,-4033,2530,-17885,-15271,-30686,-19878,-24694,-14918,-32612,15872,6330,18149,27356,-6180,16352,-1700,27051,30429,-5528,-8732,21903,23626,-8569,29228,3160,-23834,-16911,26153,-22020,-13808,13023,17654,17910,-3648,5285,31366,28311,12581,8033,-3689,-9786,1866,-26928,18432,26163,-10939,-425,8216,-26058,-23526,-11938,3890,28270,-9326,24626,8752,14720,-10085,11127,-20269,18283,25541,18915,1151,12137,-21937,-26329,-6652,13581,-25477,4679,4401,3887,27071,-15743,-27542,-7184,15061,-14753,-2256,-24813,-13157,-25669,1126,6872,20427,-22844,-8846,-28535,26384,-14937,-24948,-23948,-13048,-22720,7766,29620,-20247,15445,17985,-30139,-2040,8501,-20056,-16898,21550,10148,24414,-28977,-250,31785,-4325,22154,-15801,1092,-20995,-12661,11192,29903,-18037,-31504,-18192,-21602,23788,-7700,22877,-29407,15978,-22071,10177,-12130,15895,9457,29763,-791,25848,12652,32496,27752,-24549,1933,30311,-23423,25944,15279,2833,-28290,-19926,-11665,25652,-11456,-8480,-15645,-8232,-5881,27451,-24802,-13662,21133,-16309,535,-31125,-20898,-22161,-25868,1341,28458,-12146,14061,5977,26422,-12003,-21722,-29803,-27928,-19970,21719,-19275,29479,-8171,-21958,23460,-9092,-28991,-13937,-27080,-26649,-5515,-29316,-8957,3469,-24236,28053,30959,-22798,25328,-19910,10846,-4346,24912,-12805,9542,9147,17284,27409,4136,-4265,9896,-30419,23182,-24420,17833,10709,12697,-5625,-9737,-10807,-20934,7927,10841,-13619,22673,8443,-4726,-19861,5165,5026,1148,24891,3784,4826,4816,25406,30231,-9875,12899,25219,-8307,3366,-29669,4675,32634,10850,-30136,9858,-5919,1532,-2833,10791,-24591,10776,7363,-21972,498,-1990,-2966,-9560,-10779,17570,9116,9233,-20771,8790,-30520,-14316,-2792,-15764,-17736,19153,1947,-7977,-28792,-13216,-5050,-4687,-30737,-28326,-6159,-7886,3070,23926,3323,-4420,-27664,10584,-17716,-27700,-4735,497,20584,18279,-20929,-14509,-20390,-15266,-30452,5661,5769,14925,24469,-15381,18496,28332,-10662,26892,23507,-8438,-11976,-30126,-24973,22581,-24900,-22082,-2248,-4203,-13597,16786,-14711,3426,-16614,21137,11110,24554,-16216,-3415,-17755,14753,6830,-23878,27432,2173,-14134,18206,-22825,6108,-11329,18923,11145,-8711,-32296,3197,31949,16155,28929,-26051,-8457,25485,3717,-6752,-7409,-16142,-17949,15592,-31622,-7857,13809,31937,-7829,-21574,9775,-1061,-6558,31382,14429,5460,-17674,11254,13553,5759,26221,23766,8109,13346,300,-13861,-5682,31657,-8607,21288,-357,-16112,-18155,-4574,23770,15797,18582,11896,-12785,27003,-6793,-8955,3501,-12418,21864,-5395,24236,25203,8890,10499,31744,-13749,-19632,6104,26684,-22961,-6575,-30745,-20763,-145,-30076,-19586,-17865,-2884,12654,25643,1288,10190,-3162,30062,3401,26012,22085,5920,7720,-424,-16688,-9788,15002,-24411,-9907,-12184,-19137,23862,30703,-23021,12184,-8834,13916,-17184,-10043,23991,27154,-27911,-9056,15544,1276,-329,-12388,-18685,-30371,8055,5310,9710,7941,31663,-30055,31258,-11727,22907,-13542,-18115,-30619,-27280,-6023,31105,7530,26544,6076,-24669,21002,28663,-5095,24482,24296,-8098,5229,20507,15397,10181,-10031,13927,-2309,-5218,22079,13069,-12649,-20613,30772,-22557,11926,-14521,-8742,7777,-19998,22789,24959,-8264,-10824,2151,17507,-20709,-32197,-5614,16600,8951,13974,-19359,-6396,-7544,6091,16387,23587,-12553,20748,-309,-593,2204,-27312,-213,7889,30255,-28322,21378,-22716,19073,-24133,-23776,2863,-2830,15911,-16548,-8857,29451,12288,-3326,-6085,-31748,-16510,-32183,26848,18292,16090,31039,-5913,-22722,29739,-13789,22772,-17833,17098,-5214,19388,438,109,-26669,-5469,-21380,-22601,11896,-15834,9641,-16045,30186,-11094,1915,9984,-15411,-335,19641,-32026,-13247,-20847,22861,-11139,26577,30538,-17118,-15129,26646,-24393,-6671,2048,-26344,11268,10753,-16506,-13780,-18704,-192,-27168,10820,-689,16212,-6507,-13969,-14052,-3448,-30938,-1046,-568,24778,-21928,-24170,-1305,-2725,-1747,16590,-32249,-27247,13229,10421,-30466,23557,-31714,-31394,9521,-12228,22693,-2275,3973,22461,16463,14501,31661,-23504,419,-28735,-588,22555,-24152,-24806,20889,-4925,-15490,-3924,7019,10851,-17971,14366,5014,-6393,-31744,-21514,8113,6852,-17905,13913,-10286,18524,-30024,-8623,-26730,16764,-14101,-8993,-21502,27677,-17864,5198,31086,-22361,9565,6658,-30171,24218,27034,9394,5827,-5535,4592,14700,-18337,18942,24440,-13169,-24185,-21888,30347,2434,28637,763,32464,17942,9162,-23747,3024,21982,23222,26857,3485,-23006,-14390,24565,31108,27070,-19759,-2204,20935,-23034,-26302,-6683,-3092,-32027,963,10435,3221,13581,11694,-776,-22006,-9412,6855,22491,-18059,6964,-11288,3709,22494,-13967,19134,32462,-17566,11170,-30371,18629,-1824,-3796,-20619,-25919,28261,24749,18003,-10649,-412,-27837,22498,3153,-8214,-29368,-32645,-10028,-21605,20829,-29510,29654,31483,-22706,-9403,18223,-19131,-14942,-5748,31811,-20466,-4706,-6327,-14636,21573,-2631,-21702,14904,32096,17633,-22351,15011,25610,998,-11629,-29614,-30009,-14533,-4669,19339,-17155,-20848,15320,-13799,-23061,22273,17415,-4542,3528,26108,-634,15704,-27447,-23561,25786,2550,-26154,-28946,-13879,25782,-27265,-27648,-19664,-13817,-9563,18112,-26444,-3359,30132,24807,-16520,-8936,-11134,-24789,28820,-13136,-23152,-26105,-2986,5379,10,4750,-9085,-20421,14650,16184,-29362,17265,-12438,15570,-22602,-24202,-4955,15536,-32250,24864,-20607,-1975,-28778,19315,-6576,-32186,-13061,-16883,3799,-26375,19045,-17776,-9787,17176,-1499,-10272,-5983,29937,31142,-26313,32519,3446,2152,-1139,-23065,13945,-21509,180,1670,20482,-7759,5346,-13485,27687,-23274,-13392,20567,15715,22458,31954,-10719,15856,29485,19381,3950,8213,10447,27746,11311,-7233,-31417,31690,30113,-25067,23687,12102,4316,17371,-29478,2834,-3124,11097,23638,23989,-31811,-2634,-15018,-8203,21078,14936,-24815,11508,-9575,-23606,-8484,14296,-13114,-8489,-31620,-31240,-32555,29527,30367,-15672,17663,-7298,-28290,11795,11020,23297,32059,8556,26280,21855,-27541,-9063,-30648,25543,-13741,-9430,-19385,-26996,-20047,-23093,-19621,24146,32491,-5694,-27827,-9618,28856,25782,29856,-20900,-25418,12676,-12612,24100,-13174,20208,7598,-7841,21103,-12211,980,-11524,-26807,25669,27251,-6921,-7271,-26566,-15002,18237,7708,32695,-25157,-9577,28111,2442,-31026,3194,-19501,-30741,24541,-20910,29987,-9976,-91,-31232,28960,-7393,-29423,-30107,-26541,-15195,-15727,-1796,11035,-31496,-7262,3498,4834,31935,22128,-15666,-13362,-4457,-12057,-2847,-1118,-16267,-6641,19003,21656,-31812,20745,30465,27405,-7124,-10515,2829,-31261,-27032,-29121,30488,32579,-18031,14044,-4040,2930,16590,21525,8150,3546,-28425,26886,-9722,-30248,12640,9121,4845,3656,-19510,23213,-23950,3503,7003,-27142,-20349,-12148,26139,13873,-1828,-3920,22745,28679,-13628,16280,-12065,-6429,26707,-4185,-17402,362,7645,30041,20843,29798,-8623,-27737,-27806,-32755,-18626,-3024,-17602,-2660,-1108,-31788,-31805,-26574,-2735,-31992,-13369,31923,-8227,2156,-5952,20812,-11714,-5239,19475,-9337,27618,-14873,21099,-12563,1809,-25060,-1139,-24877,1743,-21053,-14380,-7732,30106,-25181,5208,9184,-3765,17638,-31415,-30683,-23410,-2954,15432,-32030,-13255,21750,-7207,9264,18255,10726,-9835,26996,1620,32366,9140,1580,29912,-15420,-16842,20667,-3815,-29586,-25701,-5586,-21931,13244,-16766,16536,-17789,17567,-23448,30273,13756,-22194,20613,-6330,-1378,-14929,2144,-3841,6872,4639,32181,-3298,-11797,-10041,17453,23994,29011,12761,11678,27789,11089,30850,20552,9891,17955,32332,-5428,31273,12570,-8765,2134,-15718,-29127,9295,-24830,-5994,21549,-29571,17196,-15505,-21343,32368,18012,25413,25454,7393,25288,-5442,7944,-28796,-29830,-3392,13067,-19124,30633,-18119,-18196,16426,10364,21012,-17454,-11187,31383,20461,-2139,-19287,-14315,7691,-16862,8634,-11540,-13036,20509,15849,-29573,13422,8354,31303,-14147,-12708,6488,-25080,-17046,28405,29576,14280,9856,26891,10712,-19293,-14006,-5747,-138,-12520,-1943,-11749,29431,2165,19087,-22543,-26603,27598,-19178,7922,-2249,9079,-21831,-25488,6079,9208,5992,15400,13300,25337,22547,28173,27147,2606,-2522,-10606,-14240,-11058,-14315,-22008,-4184,-14392,-25331,-13640,9226,-10553,-23754,-13400,22519,-4984,2724,11764,3512,28961,-5179,22512,-28878,-32037,24919,1986,32267,-16678,-3693,-9452,-19586,-31125,-3547,-27535,-25824,-31621,-30219,-26614,27954,2027,2506,-5114,-4434,15150,32176,-11072,20735,-28270,-7013,-4026,2256,9603,-859,-21592,-178,-24702,7110,-17909,23549,11973,3297,-4049,-31108,-24983,-20779,9836,-12719,8260,-19906,12490,18035,26685,16368,14350,-23936,-2489,32761,-23270,-16455,1590,-25641,-18929,21548,20725,29382,11104,5265,2911,-6110,-4476,-6501,-7973,-25419,1947,-11305,-31566,-5895,17381,-17671,29006,28186,13312,20702,3791,3792,-22852,31326,-16065,9975,-23446,-16619,-2201,17731,19415,-23458,-14349,5233,-3801,-10581,-29514,31273,4147,-21808,-20394,7932,31633,-5044,-32510,-23045,-10290,-29203,6640,-30499,-6482,13682,32633,13278,11348,-18241,11578,15848,30898,11194,-10243,5217,-8597,11709,18451,-27148,1776,-28995,-12269,6174,-21712,-25376,5363,5135,-20002,-9550,30778,-2191,-11820,-21139,11833,30905,-31093,13897,-26329,3110,-28258,-10215,18169,32456,8343,6938,5946,-12675,-20683,-14849,23756,1742,-13069,-6664,-8154,9532,-19335,-15585,-12447,-18256,13709,-12930,-15644,728,-21716,-12043,25481,-17108,-11590,28370,28365,5222,21923,-31858,22637,13667,23246,11714,-26821,-15750,-8709,14602,-24419,-19849,-28981,-26705,-32221,32077,13999,22824,27488,11355,-15574,15178,-32684,21007,11423,18799,-30178,-16969,-7300,3533,-26923,-29763,-2551,29602,20363,-31093,-15043,703,-17011,-29021,-28340,-30992,-3310,-21029,16871,29403,2638,-4149,-16076,-705,-29765,3820,-17384,-13624,10977,-17061,21725,-5608,14984,2809,-7679,-5926,-10215,7509,-7505,13675,13348,8038,-28067,14027,-11800,-9209,-4674,21509,1966,-18396,-22136,-17327,21969,10161,-11442,3564,31551,-25337,676,14240,-8613,28513,12527,4754,22683,24595,6730,-11360,13514,19636,12331,31128,-25149,-25411,-3994,-13233,-8217,-22293,-16571,18869,-29560,154,-16045,29342,-20822,-22179,12497,21359,-17791,22842,-4873,-18324,1493,14759,-11719,-7147,18557,-25764,21090,5726,-20897,-23416,-9369,30677,27435,-3941,15191,-8872,-7501,27714,-3442,-13127,20381,-1060,-30082,28566,14914,-13748,31192,10690,28340,2323,-4187,-6952,22851,-22619,19122,-14529,26018,6837,-31486,30789,9600,4493,12557,7483,609,24750,-9381,18517,-7420,24408,-5246,28928,-3089,-31533,4125,27661,12243,13531,-12297,-23753,26078,-3958,27344,-5596,7009,25445,12501,14873,-1048,6558,-18850,-17087,29038,-16399,18014,14538,-25727,-11564,-16084,-28905,-2370,-1912,-30396,-8544,-25752,-8902,-3096,-5155,2109,16040,-21048,-28923,-11922,27676,1760,-5861,-21795,13083,-32135,-17976,3133,-23340,15582,-24999,-27929,3508,21492,15941,10854,-22200,-24212,-14579,14656,-21121,2308,23400,-13295,6003,25196,2309,2335,25372,-23709,-10539,-6682,9600,-10868,26546,4025,-1181,-5442,-1607,-14471,24171,-11998,18877,-10581,-30797,-31851,31448,-1258,4701,17588,1533,3476,-20923,27711,7641,-24627,32403,-16406,-25871,-27893,12952,29205,10547,-15049,18542,-25006,-25412,-17409,-8463,6826,-24504,28883,15813,2561,7886,2971,-4776,-24133,-28949,7958,13513,-28822,-9408,-2697,-828,4713,5635,-8652,12325,19381,29644,-24177,30044,-28760,-14594,15517,-26159,15086,-24109,-10904,-14290,30014,-25507,6788,-22468,-17017,-1121,-11614,-17863,20161,-18072,-29072,-25938,-767,25236,-11235,18939,6917,-17669,-8400,21678,-1996,21915,-25835,13947,15354,-19801,-15199,-7229,-25644,32597,29752,1358,-17399,-13364,5481,-19443,27809,15498,-420,-25472,12054,-28739,-16664,21568,-10376,25135,23681,29239,14723,-3941,10295,-3793,26924,-20173,16389,619,-2041,7866,-14898,8104,-26580,-14689,-4797,-32200,-12328,19387,-3692,-32522,16808,13602,20077,-23855,-28106,12079,22264,-16693,6599,-27380,1409,-3040,26291,-5041,-25997,12234,28027,29696,-9261,5382,4636,12708,9392,-20427,30697,25845,-27889,-22047,-27512,-20861,-19672,-12602,27161,-7772,25962,-9473,13361,-21963,6943,-8984,5828,1024,-25638,3123,-24062,13748,26318,26674,-25000,18123,13661,6985,-6728,3045,-28505,-10840,21350,3678,29414,24214,25321,18924,-4442,17809,-17683,5802,-15237,-15076,-13823,16106,5584,1201,-19480,11625,-7385,4810,12381,-26895,-18739,15890,18154,-5744,11783,2311,21736,32626,-3734,-6790,-18376,3262,15286,1680,15712,14554,-9594,-28319,-1039,11415,-29517,-19466,9571,9594,18503,-32709,-2462,-27509,28166,9352,19763,13240,-19878,-18485,-9429,23380,-2757,8866,17085,-13912,4259,905,14886,13418,-14818,-26595,-23177,29595,-14668,20399,10386,6950,-15315,-2769,10574,-28200,31415,2356,19832,3065,21065,-1319,7271,-56,-1414,-17712,-31725,-31950,-31449,-26649,6091,25652,-31331,20626,-9883,8926,26700,8133,-27643,18490,-15068,-30598,29231,-27509,26424,-18627,-2273,14642,-14049,-5747,-14971,-11442,32405,-10652,-14211,21632,6176,-18850,-75,6837,-9731,-10642,719,-611,-32164,-32547,30949,12744,-18414,14582,17789,10677,-1864,-31782,17187,-25796,21277,-6049,3317,6299,20758,-22439,-13846,19499,-748,-32329,-7602,7901,149,1769,-492,-10883,126,-29107,7539,27682,19358,-19243,25622,11881,4061,-8923,-24705,8080,14253,1862,25367,-13888,-25739,31056,14342,-18493,-17388,31699,-26663,-13917,-28114,20442,30000,8178,-24517,-32088,-20664,32045,18261,-25605,24724,26937,32273,-22027,-12607,-1550,-29823,-15037,13454,-32134,-4074,-20485,-31066,25812,-15089,31890,10294,25058,-17125,32165,23784,-13015,14827,26408,9636,23887,18795,16655,5096,27796,27523,18074,13139,-27513,-1755,23644,-11264,-24041,-21080,-10689,-23773,25,19973,2403,3766,2692,-26463,-8896,-9097,20554,12013,18185,6702,12750,-27282,-15115,-8257,27835,5782,23487,12028,-24690,12129,16115,-30452,-24330,-12067,-7327,-17933,29794,-18098,28673,-4155,7806,10934,-3216,402,19625,19863,-30468,27444,5560,26159,-1987,-26788,3505,21510,-29339,-17913,22800,-5031,12194,12886,-8604,-22033,-2319,-12679,-1136,30986,12947,4,-11679,-5558,-20577,20629,15925,-25968,21227,20908,6548,7028,-29046,-652,31066,24340,20446,-30796,-24958,16647,-27715,-14446,4735,-10140,28711,21595,-17619,-1932,18480,31444,-26534,8731,-797,-20528,27676,-16461,11155,26196,-6245,22631,1948,-30459,-19350,-4673,26728,-18249,-26132,9100,-30443,-1695,-20975,-11178,-26242,15355,27986,26763,-15623,-3643,-28372,27887,-2618,-22405,-7346,-17388,13664,3593,-10923,31741,31979,7660,24744,15993,-2962,21020,5730,15628,-30570,-1522,12653,-18562,9726,27260,-1130,-23106,-9174,-7970,12281,-16838,-28396,29387,27741,18408,29956,19079,13366,-12189,-301,-19323,28876,31626,25835,-25601,19989,-13383,-26039,27855,21370,27495,17957,17116,-4876,-22611,-18080,29800,-4095,-21991,-23983,4956,14215,-3559,25161,-5380,5848,13416,20377,5166,-27542,-18913,4070,-4681,19260,-19599,-8721,16967,-1110,30840,27475,-9480,-26364,28406,-28158,18168,26643,25540,31799,14008,6188,-12008,13347,9980,1234,12745,-15546,24823,-20923,30840,-21243,-14846,12793,-6880,-3747,-14469,32253,-6363,2037,-7643,20014,-20197,-3102,9174,-18830,-28677,13394,30843,-834,-32107,31504,-31589,10641,-4679,15088,-5676,17779,-19598,6129,15835,-31327,-29783,-10409,-9977,32454,5294,6406,24447,18857,17523,-22103,16471,-10633,-12985,-6701,11594,14535,21225,4662,27114,-30836,26349,12251,30284,25787,-30743,-13015,17079,-16740,-17383,22341,15497,-3702,27033,16757,13058,26949,-18072,-31747,6489,-17690,12398,854,11548,-28293,-24733,6771,1416,-6575,-25180,18651,4804,-5386,17823,28161,-9744,-25118,4019,-6387,12954,-8804,-6207,6079,-22309,-31035,-1061,17570,6693,-10959,-10749,-1154,18599,-6197,5779,18110,-1925,-2623,20990,-28929,-15288,-20286,-8512,11224,-4716,-26333,-21950,-18182,-16796,-12409,-18523,23498,3607,28988,-12123,-8050,27607,-28052,19465,14905,19826,-7140,-18123,28142,-13721,8920,11152,-5446,12806,-17834,-16196,12728,-12112,30416,-27108,-420,-6966,8262,-18249,-28728,-23805,-9432,18044,3604,984,-4209,-22279,-28394,25413,14773,17831,-22209,-29813,24143,-24951,-32294,17379,13576,-8123,-8819,-20064,-31744,13679,3498,11879,32213,30174,24840,-14179,-31234,-29847,24272,24249,-29905,-18833,-5258,2055,-30436,1200,-13256,19055,-5655,7831,-6084,-932,-15820,13719,-706,-14639,30058,-31328,-1000,10475,13528,-30788,-7643,-6039,-18474,-26547,20961,-29033,7565,19291,2078,-13294,-23733,29024,7491,-5671,-17013,8938,-6092,-1343,-3613,4096,23507,-23897,29605,20400,-29386,2246,-10994,24763,26631,-11648,28771,23782,-19825,13846,-1164,18152,8400,30503,18730,-10770,-7403,25514,22346,-29451,-25385,19899,10997,3041,5522,31671,-25608,18317,-4070,-17336,-30254,-12561,-30376,-23860,-21751,28388,-27340,-22398,-13263,-28537,-12827,23688,-25894,-6031,-3782,-11641,-11925,-19676,-15859,-2403,6444,29978,-10965,-21210,-2688,-11010,-27534,19235,32091,25525,-30786,-13492,-10729,-31566,13704,-28830,6592,18313,-23825,23252,20750,-9570,-13518,27514,-7072,3581,20481,29857,-30249,-21517,9195,7718,-6043,18202,5515,-18684,-22008,27995,-9511,319,14025,-3903,3020,19509,329,-12014,25457,-20133,-7646,17840,308,10599,27590,-28045,-31536,31872,28270,-24543,3083,616,-11620,5599,-14336,-19011,16881,28822,-16613,-26165,15429,-2620,8940,14436,17466,15533,12653,-21539,13496,20583,-10033,-18450,-24457,32155,31141,-28521,17546,6310,16566,26547,-992,23260,21512,-14347,-12102,5871,2247,18085,-17821,-13579,21962,-18530,15937,30511,-1189,31457,-11760,-13601,876,15246,7796,29263,-32117,2090,4099,23021,13027,-17178,24292,19372,-29341,-4666,1868,-9415,-20374,-3388,-25428,-13833,-1950,2238,-31465,-301,10625,-6348,-23331,-12546,25004,22809,-84,7589,7769,-20763,-18425,14135,25687,7059,-4490,-6325,26323,4776,16121,-22244,-9256,-9894,-29796,-19630,20587,-12129,-11613,31704,10062,-18791,-5593,18623,-26163,-955,20661,-22979,14702,-18820,-9873,-15828,23542,-8903,-30183,-26410,-23488,29479,-8218,-29666,6395,-5785,24158,-23983,-25414,22585,13362,3938,29996,5607,-10928,7252,-21630,-21392,2392,-23294,17951,-16615,-26583,-4125,-10147,-17980,-1574,3756,11644,-17674,-13612,19009,-25988,714,26826,-9183,5812,20055,-31192,-4758,-5978,-17698,7114,15480,17766,-29635,24552,16186,7084,-2678,28081,5941,-23957,3307,-28629,-4818,-28640,-31570,11318,18840,11087,32173,25141,27544,22286,-10600,-3603,3924,19051,7806,26600,-24896,31074,27664,-22716,201,7284,-3303,11687,4878,19926,21428,-30470,31766,29251,-28738,1995,-16812,-7635,23385,-28170,-11233,-1243,2468,12909,-25108,-11728,-28461,-13665,-5007,27395,20776,-23587,-31944,31542,8311,18164,-31841,-9776,-5429,-28088,9387,25994,19380,1939,-2040,-30492,12200,-10001,32176,5618,-24619,22987,-16473,15565,-30755,10412,8499,20969,8518,16922,-25904,-19250,-8745,7550,31970,31796,-12090,6912,4697,-15533,-9966,24459,-18086,-7559,-27100,18396,32656,2450,-16869,20443,18872,-29570,23174,18452,23925,31715,-26826,-8250,23235,-6109,-8378,10184,-7446,8687,737,-3113,-17264,-3857,-9993,8199,7975,12649,-5930,4301,27929,-14782,5015,-17254,-10686,26186,5101,16689,8940,7138,31292,14263,-3526,-17646,-24749,4434,1815,16643,23224,-6345,-7225,26801,-11399,30800,10466,31563,-20400,14467,-14613,-21709,-9877,-3268,20776,8087,8307,-5994,-29196,-15884,-5497,15846,-29881,-7307,-2845,-27758,-9001,-18155,-6395,27136,-21535,14828,3339,-8054,4391,17732,5581,-20957,18959,-25998,13349,-29615,20938,-17676,-23946,-19862,-29320,25899,-15438,27934,-1716,4800,22313,27640,-23683,-6376,-17150,-2358,-11835,-12994,-11056,-18102,24991,11303,-26014,-1317,-27920,-2813,9123,25304,-18553,-25085,27127,6460,26127,-325,-8798,-8426,633,-4805,20431,2968,22433,23711,-22568,26480,-15225,-15260,12380,-2806,8784,-7809,-9837,25880,-13336,-20120,-20505,-642,-14895,10165,28322,-9008,-30180,13436,-26580,-18908,4418,8986,14060,-19514,24312,30372,18996,-31943,10075,16077,-3780,-7765,-22002,2630,-386,12554,18178,-10658,11537,10393,-27488,-308,9915,-15926,11148,-5022,-19191,-12268,-11185,4773,-27275,-10218,-12021,-2845,1282,3401,11379,9049,26385,25313,-31835,18390,-29988,437,-17147,569,27048,143,-28175,-22575,16396,27673,12371,-12215,14984,7812,-22265,-21496,1546,23257,3042,16263,-18125,28044,-20947,-17020,2182,-164,7850,-30946,10304,-26132,-10020,13650,-2613,-4860,15325,-10780,13549,-15894,32578,-13960,6333,17103,27517,23196,10863,-15666,-1101,24486,-9486,30706,-10251,32341,-9755,30794,20539,-1115,4991,-4668,30493,-29417,-1177,-14748,-27565,-32184,-5575,-7283,-3804,26946,-11854,13168,30924,27489,-21747,-24148,22079,-3589,2595,5977,-17087,-9705,19736,22838,9730,15083,1126,10785,-5224,-11662,-20264,-32168,-22132,-15918,21147,26947,6809,27422,23719,-11447,-15057,-29017,30831,4569,11581,216,-21752,7197,-18431,-17918,15105,-28014,2336,-29347,-15326,2390,-8292,-29659,23705,-17640,-16098,-6358,-7063,12342,-48,32613,-12851,-12211,-1131,30268,-24109,-22118,-28333,11450,-17351,5929,5724,-717,-31841,-29999,16154,-614,-23498,-24569,-17078,-7139,-29359,17698,2967,20385,14430,-3955,8823,-9712,4662,6226,-1170,6979,-381,12319,20369,-30653,-5018,-3972,22836,-617,-30131,-31482,1883,3760,15085,-3611,-14309,23611,20918,17608,-11933,23246,19109,4401,-6330,-3230,23414,-18684,-16058,30999,21169,26095,4680,3359,18131,-26652,-1197,19159,17,16017,-418,-18495,-17363,-13235,-19682,-12592,577,-18241,-10819,-21196,-27114,3916,15846,11367,11832,2576,-21551,18370,9515,8383,-14874,18508,21558,9265,-8620,-21357,16294,-17396,8251,-14294,21294,-29148,3778,-1418,-10380,23063,-2310,21872,-26543,-5185,-2691,3333,19300,29185,8996,22603,16570,428,-3122,-17444,-16237,-10461,18172,-31458,-25726,-25044,19590,-28758,27132,10270,-16367,-17595,-1159,18180,-18699,614,754,23790,8843,1350,7785,-26173,5743,-10592,14307,14348,-5555,-24248,-26020,-30276,-5358,-30340,25506,-11784,17144,9844,377,17223,23204,27770,-4174,7131,-2829,9911,25563,-12244,-1215,1753,112,-25184,-11880,-1618,-13294,-30320,-11787,1208,-8991,23936,-32657,-26715,10702,-11942,1017,15775,25552,-5245,-455,-9468,31400,13693,-22548,18646,19485,28324,20385,24247,-8980,-28051,11514,28077,-18851,31108,-18837,23498,-3224,-27691,20302,-1199,7453,4714,15825,-31307,13362,-24093,13196,12633,-29341,20404,25248,13037,-30449,24702,14220,-10759,17762,27749,5939,1772,-32383,15217,-11539,15507,10611,14948,-28883,24527,26677,-5806,7881,-8108,-21554,8228,4799,15425,-8740,15454,15188,-11482,-6588,16131,-6226,-12745,-25991,16860,-25492,6298,-2972,-15155,10560,32377,25870,-4701,-6714,2208,-14578,-2375,-23286,23037,3528,-6728,-2067,31294,-3591,-8567,-23738,30249,6397,-22895,-27181,17079,-13648,30452,21632,-9577,-25832,-1679,-31797,-6314,-3166,12908,-1989,12939,-29490,-11011,-28262,23405,-22870,20144,30701,-2086,-4043,-13883,-9627,-23788,24762,15708,11749,26053,-24903,-351,-17964,10447,-20788,-31828,-1720,-17713,15589,4409,10549,21876,-13454,2265,4084,3976,-31982,11303,2619,22228,-24379,-21232,2281,-18012,26628,22021,-20870,-11202,16222,-2034,8366,-19104,-20122,24487,28987,-4143,-31925,24715,25756,-14827,5094,8990,23582,-28201,19900,10514,19442,-1901,-19000,24404,16230,-29756,2255,-10610,-4459,14895,-8050,11334,-8492,16027,18156,-11347,-24138,29548,21656,31961,12152,-4210,-2449,533,-18268,5135,-14773,-6820,-31628,-7459,12775,13979,22671,-7365,-5902,27311,29256,15529,3260,20283,25784,-1610,-16139,29130,-28959,-1148,22209,7301,-19011,-1531,25593,-15537,10193,30582,-12401,-22748,768,31308,22764,-21810,5166,19726,22557,23526,30712,-834,-5142,17263,25212,14458,-30659,31993,12618,30142,-4462,18537,9979,22498,810,1417,-1496,22212,-23013,-4171,14086,12430,-5997,-7312,10477,-21934,4814,-6964,29435,973,-16235,8181,12425,22991,21048,7165,19839,14761,30936,-18846,582,14933,-9265,17880,17786,-20062,-724,11123,-20273,-17795,22618,13357,2276,22938,16310,16427,-8078,-21441,4150,-24281,-29609,-14780,6974,20000,-22505,9910,-20551,-9456,-5310,-8670,13021,-28837,-22629,-22927,-32004,-12252,10723,-4381,14040,17208,-13820,-15938,-8731,-28335,-25735,9936,31300,1788,11797,7200,-21396,-22533,-1846,8060,-9780,-16803,10869,-27188,-6271,-27265,-1770,-8294,22292,11675,-12169,-2200,-10725,-15064,31160,-32538,27574,-32250,-5509,20128,-31351,-6119,-28422,5953,11902,-7301,-3390,30663,-31715,-27494,6491,-32159,-18785,-6366,21223,27537,31669,3585,-8985,-2271,-15285,30462,31881,2576,2725,18935,-27301,23576,16802,-3831,-32035,10079,20333,-26914,19770,-28081,-23388,-10996,-8553,-18116,11528,1861,-22738,925,19435,-23696,14702,-27148,-23590,4572,-20008,-23691,10889,-6068,18767,5797,28807,-28932,1387,21925,-26939,-26543,5432,-2928,30245,14882,25155,-22529,21004,26538,23011,-15981,-11542,18464,31622,-27242,31125,-5703,-19025,-30698,-11079,1618,-4800,23113,-25869,29612,-1940,28922,12363,-30760,28146,-32650,-32351,4260,18347,-28303,7202,26059,24309,-31901,-7460,10605,-22084,8325,25040,18194,-7389,25955,217,22901,21727,484,21523,-11389,16997,-18301,13263,25983,22123,28811,23901,13379,-628,14019,27626,1298,28407,2827,-24447,-30220,20587,-6079,32688,-14925,31752,-22721,-3828,2405,21867,29295,-17820,4714,-12825,-22535,2780,-30002,21847,-6863,30019,31457,-15962,12897,-31799,-20867,-22138,-10502,-28042,-14147,20220,-20189,-26294,-23714,21110,3819,11182,20140,-16988,-12767,-26197,-17037,7914,31875,15156,-22152,4637,2084,27580,2437,3579,-17769,9025,-25837,-1635,-3643,-7241,11789,7611,-4641,-24249,-26731,-14086,-29232,2384,-31086,26697,-17835,-15339,31239,8945,11822,-1138,22950,-10388,-4923,10755,-28961,-4445,29825,-4629,-20932,-12551,21658,-15795,-24607,-3420,-4820,-13714,-23119,-5185,-24799,31457,29908,-8998,22210,11297,10286,1131,-17378,13147,-17828,11335,10271,-17504,-6292,9618,-1739,-2534,420,2290,29266,29130,-4306,28639,22372,1737,-4368,12160,1483,-28205,10928,13411,29409,-1333,32175,-8337,-24710,-7150,20652,-13187,-7709,-30001,31197,-2504,25741,-17384,-4785,24078,4556,-20047,-24141,6891,2831,-36,24083,-20528,4028,-27478,7395,30636,-5382,23893,31914,13262,27321,9167,-18471,15381,28065,-28241,3307,-11761,-17457,-17001,29819,28721,11539,-20964,24512,-22036,1132,4254,-8611,15522,25733,-24330,22741,11767,-17243,18930,12118,-10995,-10420,12869,-19072,22639,-20316,23441,1740,18386,-13025,-18398,-22485,-1360,9944,28931,-14394,29366,7512,31842,17030,22843,-15582,-29722,-21983,2853,-5518,20579,7529,-27054,-13898,18011,17670,-20812,-21780,-4744,18986,-17326,28164,-28771,24183,15908,-4930,13732,-5247,-2063,7334,-11116,21110,1329,-5107,30726,-4545,-28993,724,-9864,-2702,11859,-19609,-8109,-4822,-10234,-12564,-1366,-11150,11448,15075,32225,-5808,11626,-13505,-10129,11303,-23721,-7119,-27275,1598,9075,17224,-21461,14795,16009,-1375,-28341,-6399,-20705,-7260,-1253,32223,20382,31523,32523,6702,7437,6220,-25840,-16555,18254,4,9212,20710,20751,-17338,26376,3289,-527,12094,2634,30701,19783,5985,2009,-15901,-8663,-4981,27936,-20529,-17290,22273,14838,15726,-14319,29407,-22423,-5843,-1420,-32151,15830,-15401,21828,32615,10072,-8803,-27311,2518,11752,-3706,-30885,26752,24302,-28198,5883,-14524,18727,21809,9529,-32112,-15821,30804,-21736,-32210,-15550,-25508,18905,-11272,-26472,-13865,-2822,-17091,16882,11320,-23139,-7360,22547,2019,-27619,-11257,15850,-6487,-12498,-11704,25318,32410,891,4859,-29835,5697,3782,1796,2332,-18258,-23636,-5431,536,11012,10748,-32733,-9223,-5542,14691,-6108,-18577,-23939,-12312,1779,7961,-13419,-5257,13917,32739,13620,29491,11451,-7418,2553,-29588,28606,-17158,23838,1437,4173,-7734,-538,4830,17475,-28798,-8874,-5586,24927,-18690,24603,-9886,29003,-20865,-22282,32555,460,-260,-11114,-14930,-123,4830,5967,933,-21533,-18765,11323,-2467,-5383,-21744,-14146,18270,-25593,-27630,16149,28529,-32503,15090,-19623,-17279,-9651,13609,-19121,28669,-8728,-12263,15024,1106,7970,20388,28881,-8638,5018,-1361,-32578,32713,-15237,12386,-22508,-12476,29074,9594,24964,-32008,20159,-11025,-15808,-23896,4813,-6686,-22818,-25270,-7238,6142,5996,-29234,5904,-2100,5280,28473,24042,3295,11943,-28438,-28157,3748,30809,12862,5077,23362,29963,15655,-11379,15380,8295,-27851,-27893,-11060,17396,17357,15809,15524,15330,16100,-31998,18159,-6548,-2507,-22600,-6294,-10933,6061,-12433,-2445,17761,-17513,-5010,-5367,-15829,-21448,-2260,14880,10553,18054,-22606,-32309,18165,-5839,-12788,15376,26594,21335,17660,31451,-26156,-11287,4328,-13859,1078,182,25559,-10720,10509,-20550,-9168,-10071,25848,775,-30099,16545,-22492,-8724,15607,-29460,32653,26771,9492,-17065,27839,15710,1301,5107,24018,10883,-10731,12943,-10775,17172,31420,-11051,20553,-31088,-13475,-16325,-4791,24247,-8253,18549,1685,-82,15496,-32525,5184,14933,-29258,-3780,-24729,-17963,3222,-26057,5032,6968,-25971,11352,2252,-8098,-1997,-13104,-29121,-21949,-9520,18635,32504,-21713,-15184,23960,-14017,-10060,-12704,26135,5838,3916,-16597,9729,4596,-16643,-10609,5076,32280,1061,7098,11704,-21400,11156,-29748,-26713,18460,6174,-8219,-21827,-18065,-8580,-27375,-5395,-11871,-16989,23027,-17477,-23165,26616,-32549,-5878,10345,-14007,-9199,10544,5745,380,15698,-8255,13060,-17336,17641,-11028,-24915,3003,-17323,-9039,-13657,-25944,3413,31385,5832,-32349,-30374,26466,32030,-16254,22481,-21803,23646,5201,-10181,2234,16297,-21663,10637,-5646,-17441,-14492,-23388,-7261,-16569,-26750,26462,32526,-2704,26909,-9249,-2622,19806,-6269,-12862,20964,6938,27068,27694,7414,27123,-30288,6065,24105,-12227,-1323,17727,30096,16071,-15208,-20816,-19936,27372,2071,10327,-28179,2046,19608,1213,22532,20397,-24610,21260,-22365,-3078,1391,435,-6024,-15754,32516,15750,24947,-30750,-586,-26684,19306,-29497,12578,-22738,-15940,-197,-24449,-29076,-31566,-26987,13344,731,20234,6201,21755,-30753,67,15152,8449,-17360,15051,-7165,-6566,-13166,-11258,-15854,32736,-2498,-6290,-319,-15528,32224,8102,17714,138,12015,-17499,10675,13758,29986,9893,-11418,28631,5050,-5500,12293,9072,-18007,-24615,-716,12356,8734,29633,-27642,-2136,9729,16965,19429,-23079,-15685,-19420,-24567,-4977,-29427,-22650,2558,15239,7587,-18941,-5268,-7236,-20842,-15590,12317,8243,-23170,20814,6725,-7709,-1218,17227,-12570,-32217,-26546,17205,-8955,2539,23161,-9902,7130,-22950,-6228,5911,-4772,9207,19402,-21570,-13064,-19404,-30123,-28230,-6158,31355,22301,4223,10952,-12392,24326,-13814,1553,7358,-25475,12003,-22804,31240,-31251,9108,-24177,-9953,27831,2818,15948,-6727,-1076,12753,29551,-32009,22555,9930,11125,27721,14585,-9143,18228,-20956,19323,22667,9577,-28832,-552,6009,26025,7293,-14246,31103,-23511,-8135,-8643,-12734,-29077,30758,-3426,-16322,-31897,15073,8276,-24512,-8631,-23170,7641,8983,-6383,5587,28392,-5160,17215,-1515,-10222,1937,7092,12047,24266,21381,-614,9587,7449,-28275,11843,17773,-4369,-31898,20168,-2237,25184,-223,-18241,850,-11297,-29280,-2335,-21093,17862,11644,-25632,-11043,1051,26661,27043,-25647,-23,-28048,-237,4461,-20144,18919,-31689,24148,22748,-22669,15735,-16368,-5241,-21757,2823,-407,-13130,-23416,8288,-14713,-8106,31864,-5032,24108,-6985,31460,-12865,15361,-10980,16145,-17153,-19402,-22729,-22052,-32377,839,25301,3146,19466,7091,25734,20434,-25882,-14825,6807,-32324,-3944,-20212,32755,-5763,28493,-26307,4030,-5282,30524,3286,14190,17679,-7507,-12067,10786,31525,-3504,14423,27319,13731,-20285,-14154,-4480,-13167,26353,12027,6102,-25398,-31388,20586,-4114,9904,-21349,-4997,-4625,-20215,-12030,-25088,-26742,20772,1893,-2394,27725,29430,-9806,-15363,10086,-25689,-10430,32278,-6637,-14631,-24399,7197,2858,14776,-5127,10848,9222,-2,-19723,28913,8948,-29314,20890,31986,-23295,-3791,-25630,12902,13674,-20316,4760,11110,16603,-26093,-14686,-27800,2982,4443,2951,2227,-17534,18916,1860,-5001,-18817,-8060,305,-28512,-22986,13244,6093,24990,16022,9565,15078,27359,-21188,22185,-4238,23242,7100,5407,-26217,5753,10089,-25060,-31179,11746,27796,-5683,193,-29478,-20264,-24346,-12812,25118,13237,-9469,25179,29887,-20222,14144,13213,31115,18579,24711,-16987,-3610,13135,21937,-6691,-11283,24325,-17038,4745,1620,20701,14812,-15878,13081,-10064,-19566,26004,-27436,796,-25267,29753,-6860,-1287,-27576,-21324,1933,-6370,2295,-18967,-8654,1124,-9578,-30382,7858,-6928,-12787,23706,13869,-1097,15958,18467,14597,-20636,-1,31179,32309,-28370,-11641,-30146,-10799,2848,-15394,-26668,-27849,-23447,8230,12009,-18770,-10108,26084,2842,20004,24872,19676,-28657,-28928,-11955,2092,9220,19588,4590,16718,12668,15052,-22385,-23813,-12048,1443,-18036,-22425,-32073,-86,-326,5609,17637,19872,3999,-10870,-9584,-31303,12019,-23523,25264,-14857,19853,-20824,-18909,-18832,16633,-32603,-18474,18950,-9358,9112,15123,-14616,-5741,15753,-13553,8030,7341,-15739,-13944,-10181,-7142,-32586,-3812,3263,16101,-4306,-7694,-15509,-12918,20577,19577,-10902,-25283,-1497,13651,-3176,4528,6353,9510,-19513,-22212,-32175,31439,15545,27986,-20758,16637,-6521,17100,17660,9327,-422,-11835,311,21204,31660,-10940,-11291,-5446,23027,-6361,-4706,10572,13912,12004,-22210,24822,30369,20462,5920,8012,18053,6107,20134,2095,-27410,26058,2827,-3549,25128,-12863,-583,27143,28484,-31950,32536,-7519,-21109,26249,17975,12930,-12800,10136,15443,-156,6165,2752,23401,-3038,27904,-21593,30462,-2771,-14846,18494,22828,-10584,243,-6908,-2446,-4572,-16121,-6441,9680,-20146,-12672,-29975,-26805,-16719,19269,11487,-21075,28734,-6632,6801,-19501,-28755,-1481,-5091,627,-23882,-12704,-16417,1834,29704,-13727,-30651,-13121,14363,-7368,-6505,-5197,17469,-1656,-1266,19961,1836,20436,-14135,-28810,-8805,8066,-10303,-28793,29809,22412,-1666,17341,13076,-8688,8678,-25161,4094,8308,8310,-21880,-24664,-1479,8542,-7025,-26431,11784,-10430,9862,-20633,7707,16989,-12230,21217,12128,18586,14813,9559,-26647,11022,28708,-5999,3092,18632,-32638,25262,-30930,-24326,-17160,-14978,19129,-7926,16117,-28665,-25359,-5836,-4273,-21601,-17662,1433,4977,4535,10145,-18970,18791,-1733,-24732,21790,-17604,31678,-27636,-30068,26636,-4480,4464,28963,-22984,-13067,14534,-22063,16782,-8928,32762,16069,5204,-18086,13022,19408,7039,30282,-31958,-20695,31108,-745,-31933,27625,16018,-17092,8391,14540,-19953,-29134,-20626,-12259,-9362,-18180,-9796,-6510,-31705,32490,-9057,19518,-1679,-31018,23898,3770,-28990,20577,1627,-9010,27093,17360,-9802,16505,12447,-2717,3044,-669,-2075,12405,-4107,14454,1077,32550,11784,32515,-14613,3708,8774,2773,25081,10376,5930,717,-14959,25786,22042,-13224,-31572,28,-2530,12735,-22221,-19005,21685,5042,-4934,32486,-11385,-4694,1180,-27422,21695,3398,1823,9172,17263,11010,24647,-30796,16028,3011,18076,5010,-1577,-27765,-14908,-26244,884,-21727,27512,-1949,-10680,-30849,866,-14946,18739,10762,-32511,26543,-14425,23232,-2193,-25046,-24258,-13321,19844,-24383,13028,-7402,16124,-22825,-10288,-18069,-22379,28334,17387,29123,-22152,-28817,18444,26332,17790,-27398,-23128,-5126,-2056,22682,-19235,-3220,-7642,-11747,16469,-21623,4806,3696,-24015,-27024,-30537,933,-30828,2613,29631,1202,-17976,-18568,-6193,20176,29908,15457,26330,-1932,-22877,5205,9876,-18129,1824,18681,-31256,28326,27121,7605,-1527,9393,-21306,28280,5253,7144,31506,12011,-21902,-20377,-3262,30493,-9769,13675,27034,-30703,30656,17689,31047,-5804,22774,-13390,31395,11463,8358,11356,21650,-12422,11766,-12340,-25615,-27636,-17010,24244,-8600,13820,-24712,769,14985,26701,1188,-11542,7944,5681,3921,-24633,-12004,-23711,-9664,-27352,19727,29036,7375,11428,260,886,-26539,-9492,-24630,-6572,-10199,16913,-118,26572,4118,19367,-1521,30645,2698,-27676,20528,-17262,23874,21252,8858,27923,-29428,4489,30372,24335,-3579,26834,1781,-10604,836,-30169,9407,-9968,2364,-23259,9837,31127,14543,-548,-9026,-7434,27370,2259,19038,23245,-19664,-3726,29963,-17246,11616,-28115,24984,26797,-37,30146,-20074,-20571,11067,-23081,30598,-12032,-16179,-1182,-16672,20687,5964,11417,-5824,14909,-23885,11956,-7028,20072,28653,-10103,-9386,18463,-11244,-15943,3766,21394,-12699,-5561,-21335,19792,20922,-22466,-21960,-2454,16598,-21149,202,-22302,-14705,-21102,-31602,-32438,6939,32708,-29139,-13130,19639,-31733,-10615,-20818,-17301,7704,-32138,32056,-16965,-19945,-18880,-30157,26128,-17397,23019,-10825,-939,29086,-2794,-29982,18268,21661,-29003,-10508,27742,-6063,-12732,30421,9096,-5546,23623,15422,14928,13635,17344,15486,-12932,-13018,2594,10722,-14200,29223,2296,-15658,7870,10205,-12277,-18356,6149,-6717,-1032,-4892,-16979,-20987,8033,-5209,-19407,24013,-28131,32614,387,-19524,2767,13532,2597,-8215,12627,8996,22977,3336,3405,22430,687,-4851,22519,24838,1846,5837,-12152,32365,-1967,-17426,29496,7321,31772,-32065,-3085,24682,2152,-15187,-19138,5735,12145,-5500,-3104,-9387,15445,30119,2251,14307,-1438,3626,-1539,-21705,18073,-30828,5393,1087,-9422,22616,15438,24190,11600,-7256,-32406,18880,21999,13964,8516,-22475,17553,-32699,-29898,5921,-20655,-22165,-21739,-25104,-10994,16507,-32589,23346,9122,-28232,971,25421,17767,23500,-14706,17113,25347,17912,9399,-2075,25996,12779,-6347,4562,27497,11120,-28376,-26259,17123,16875,3367,29508,12865,31663,-15312,24840,31611,-30882,30619,-6576,28169,-15504,30215,-565,-17862,-30030,22645,-226,28701,-27267,15904,-5248,-30904,-27504,18098,-23347,26825,-31426,-11932,15950,-22315,32555,30325,-13724,32342,-11697,-10341,-31240,17930,-3215,-8613,30374,25793,29691,-31851,29772,14700,15273,14502,19160,-22903,14283,4417,11574,24440,-20621,2967,17405,382,32006,-1222,-5879,21772,15064,-28501,-20712,-27979,8767,-9567,29787,-11053,222,-12897,-5702,-10448,9730,31795,-7671,18434,-25168,21581,-960,-24825,-24594,17857,-6426,-13785,-9652,22227,15314,2120,23986,25449,-8327,-8382,6193,-9985,-11821,-3346,7858,-20876,23666,6768,15750,7431,-21428,-25263,11861,-22709,-2936,-29688,-7419,19633,31185,11584,-20919,-31502,28723,15247,-7905,-21613,27967,15031,10816,31891,27071,-1497,16703,-31759,16667,-30469,18597,-26450,-7698,-15943,-10605,-27279,-5823,-13695,21816,9358,12686,-3753,-10765,-23915,-5998,15721,6433,20155,-4123,6750,16050,-12821,14628,-21102,13838,-9544,13556,13119,1579,10069,-14992,19804,-24690,-19606,-7330,9847,-6428,31303,27693,-23619,-2267,25597,8083,3159,-112,-23842,-23770,10584,13215,-26114,5936,7731,17781,-12309,12386,-26928,-23447,-14074,-30270,29628,-383,-14802,10644,-11333,-25535,-18677,-17182,30139,-15565,3639,4104,29591,19192,22095,-6560,-26864,-5478,3837,-4674,19244,-14195,16485,26977,-30389,7069,-22412,-1274,24029,1761,-25442,16206,-12417,16495,23462,-27336,-542,-23564,-15793,13208,12106,22997,29714,15695,9741,-15536,30921,26640,-32006,11174,19013,12888,25275,-21584,400,22917,-18085,-12775,-3489,11717,-20472,20845,-3401,9255,5396,11671,30881,32146,-28574,-14928,1120,-11968,-16575,297,-25372,-18768,-13495,9287,-8104,27792,-20332,-4109,-16563,15684,-17678,-27567,639,-32386,-14100,3262,17907,-3999,28119,24461,19045,-19760,-22370,22230,-30680,8435,5129,-3379,20641,7951,-135,-23737,8498,-6522,6488,29235,-27887,8421,-32271,-11849,23183,7492,-12657,-17974,16344,23906,14426,30113,4506,21262,4154,-19781,-23147,-27101,-27535,4889,20459,1739,17102,-14649,-6335,-23769,-13681,20450,15145,18715,-20923,-2051,24458,25701,12279,-18928,15480,663,-15841,-534,-14365,4299,-29711,-19559,7754,-29070,14188,-26579,22277,-7672,-9221,10527,19480,23664,-21354,-17160,-4661,6555,29405,13219,25460,-24821,-17747,-30305,-17609,15960,29756,7183,14851,1763,-12631,23171,14418,-29903,-21305,30246,23879,-20993,24786,-18677,-5959,10173,11293,-14627,-23913,-8647,7196,21379,30280,17121,18600,11997,-15614,7420,-29737,-32625,-22023,11770,19663,31929,-18434,1353,26855,20407,-29949,24941,16610,-8509,-24235,30026,30035,4569,10345,3572,-5703,-4605,28108,-27469,-28900,-26698,-7857,-11994,25231,-27667,20121,7417,-18464,-29376,18470,-17020,21124,27188,-2181,3359,15767,13332,-3380,13090,-1287,-30869,-13494,1449,7210,-6605,9364,17046,-17029,15047,15710,8230,11544,-3644,-28170,-18358,-13121,-364,3954,16955,-26308,24439,32303,508,-10122,384,16086,4749,-13763,-19221,620,21055,-2068,11427,6062,28035,10551,-20976,-2677,-18298,23828,-25142,-15340,-13548,-12343,-12147,-13127,-22940,-17009,-24509,-3622,2164,30200,20885,7072,2174,20209,23877,-21535,28312,5133,-30924,-9878,13479,25,-4805,-9511,-17799,4234,22779,-17549,-6036,30582,5773,-3023,-23068,18142,3315,-7751,2292,8326,4538,-26276,11968,-12469,23898,22490,32610,-9003,-11455,7801,8182,29178,21835,-32553,30305,-17674,-24560,139,-9796,-21708,30486,-3187,-30970,25476,5761,-18047,-11392,-11480,30373,24124,28664,29577,19413,26134,-12112,-13590,22658,16526,-16198,-12039,32286,-432,30086,-30137,-6078,-27778,-1290,-17397,20001,-14719,25505,-2254,17452,3518,5081,21447,-12703,23750,-27232,-30785,17729,-4182,-8546,14261,10960,2566,10328,-22331,15089,27652,-11833,-28974,-7707,-8667,10059,-32193,12685,4615,-29382,-5352,13513,-24157,27216,22731,-22937,22017,24250,-2989,-17565,998,12230,-12186,21218,-28288,11029,-25211,23163,-16543,25661,-17197,32162,-8230,-1740,28840,30203,18702,-11097,10325,-20165,7421,-8887,27070,-17630,-30347,-1171,-5642,-11959,28129,-7460,-1874,-4100,-32324,-31405,-24138,-14954,19205,22524,6898,-15407,20155,14129,771,-5483,-28639,7204,-31599,12005,28787,-13534,10727,-18138,17476,-11002,-27649,3992,2684,3897,23798,-1471,24008,-18696,3226,-16052,7647,-8348,-22614,13298,-899,7203,15935,11778,6909,-15693,9452,-32294,1696,-6674,-5781,29838,-31967,-15122,25030,-106,21665,-2399,31320,2372,22988,11091,-29157,-24700,-4280,536,-17478,19641,-19373,-29143,-21526,-9819,26340,-27953,30050,-27613,-700,-18489,-6056,31225,14119,-10882,-9235,-23211,31009,5634,6194,-17689,5472,-27086,23392,21120,29745,19138,3806,545,2876,14692,-2020,31035,-7470,-31077,-19639,-15974,-19873,-15166,2275,-29069,10624,4041,782,3049,-15944,18604,-15235,6176,20551,-5369,27295,4729,11678,3986,13959,-31491,10519,-21544,30234,6453,-22429,19146,-118,-26233,-30567,1808,26820,-7651,19634,-28430,30817,19165,4100,13682,-27365,6677,14072,-29286,-25133,24429,3364,30818,24066,31507,6561,25177,30303,-4445,-20744,-20011,13472,1956,17838,-1316,-10145,-19049,-16145,-1276,6762,-29348,-12806,11536,-15778,30301,-8263,-46,-9131,-23501,-10421,23089,18187,9519,-27703,1120,-11099,-1220,-29217,-22467,-26826,8823,20362,-28847,25274,3927,-17649,25622,-31745,-13952,-26422,29914,-25873,26047,30165,-22066,-21469,-22585,32478,-17912,-31896,-25002,-3254,-1722,-25612,5713,-21435,7321,30911,-20282,-29416,26806,9446,-19762,-3307,-2804,7459,22325,28276,-24265,3269,-12883,-26859,28005,28795,-18465,15486,-13669,-26834,-17472,29025,28959,-11931,29117,-556,27824,12032,12187,7493,-31135,3154,-32509,6344,12171,9411,-1606,-12388,-14932,-9082,8454,20562,15344,29400,-16225,-3630,-23167,-32386,-20863,-20471,31136,-5186,-32112,16431,-24544,18259,-13073,12378,-14075,32232,13450,-5275,15855,-20275,16191,-13846,-29965,-27713,-17653,12848,23659,30592,-18158,21203,10014,23493,22062,19555,18163,21999,-32530,11442,26696,27048,-29825,10799,4893,18655,10963,-14109,-20897,-22542,26622,29756,17951,11238,26802,14901,19996,32159,-12236,-20134,7252,635,-26175,-16521,12821,24148,6338,8351,-28014,-31485,-9117,-6076,-15314,7391,-8746,30666,8759,-4362,-30978,3021,32068,1989,-10618,-29251,9193,26438,11988,-15901,-20434,1221,-20127,-31647,3527,6167,-30290,15988,622,-24419,20251,6254,1877,-24897,1145,-10043,-5424,-11003,-13568,30651,-10342,21642,-26697,-1346,-1017,17321,28393,-19666,17264,31233,32210,-802,-14658,12727,6525,28051,-27425,16880,-16068,31400,-24176,15958,1594,16611,12557,9056,-17422,-2015,13854,20216,-30979,15953,13568,7718,32220,-15718,29024,-26238,28758,5385,945,7826,30888,5851,12828,4266,6339,-16147,-2629,20884,20905,-22478,10401,-5193,-27050,975,15175,23350,-28475,-18034,-18904,20197,26891,26195,30785,-3148,-13013,-1938,18752,9127,-25554,24457,30573,-31208,10191,-26083,-32379,14513,-1567,-14605,10051,-9319,8542,10851,19950,28419,-4152,9565,11057,-17508,-7980,18844,6515,22992,29867,-28354,-25734,2237,-18337,5842,-24382,-17672,24945,2670,-27175,-8747,28681,22086,-22287,-18511,22264,-14143,677,13354,-17534,-28374,-29484,26091,32200,-25507,-266,26559,-3789,-27422,9424,-6420,11513,-15431,26459,-10361,13718,12137,29592,23973,22997,10144,6919,23912,19020,-20870,-13374,26701,6336,-11935,14173,12598,30828,-20680,25232,21295,17010,-14410,19374,6910,9623,-30454,19214,11728,-4514,440,-6830,20395,-3899,-3321,-7454,12934,-15182,-6373,7437,-14806,1601,-22815,-1630,9405,-1739,8438,-14750,19924,870,-20489,9070,-15796,-14432,29472,-3768,2886,24550,-24655,483,-4419,22137,1041,5378,5524,-29892,17950,-14632,-13397,22437,13403,7999,-16521,-20573,-15054,31181,-7658,5458,22974,20030,13371,-2163,30169,19229,-14685,24265,-4427,112,30991,-30809,-17028,-20179,10426,-620,-7610,-8993,-15864,-28971,-6672,14465,-21848,-6539,-17356,14048,23715,-30864,-21130,30316,-30134,-12541,-10860,-23716,16844,-27836,-19327,-4406,-25284,-5928,28184,-25973,-28045,-30009,9402,7189,-23660,10103,512,-1049,-12592,-23175,-27307,-5730,-280,-11536,10952,23786,25059,18980,-32263,30726,-26347,-26147,-5970,-20681,6204,32583,20776,-7119,26109,8480,5306,-10404,30066,19225,9526,-26840,6529,7478,24919,-14371,3170,11357,-13211,27675,-20598,15417,28504,-7218,25657,20407,-16187,-1835,-25735,15255,-32320,23881,27026,-32504,11693,18263,-21866,-13785,-10559,-23301,-8351,14882,17741,28065,342,-29949,12843,-14582,-25069,1491,-8648,16066,5933,32573,19147,12987,28628,2525,-8115,10360,30749,23074,-9221,-15240,-3373,7385,23000,218,12288,-24797,19563,14979,-19687,-31817,-10948,-3390,7949,26662,-3632,-10187,30706,-3118,-1467,-6660,-30232,9186,-18052,28928,14635,14269,-5463,14868,8397,19949,12485,-26569,6914,11347,-11951,-8276,13583,-13170,5980,-8910,-9905,5645,-22098,30871,17213,31153,27394,-10167,9499,-578,14021,-19971,31149,-1636,-21173,10975,-12310,-12390,-16381,15343,-13620,14887,-121,-14499,12782,-17957,8694,-21480,-17104,5628,28564,1867,-7472,-27322,5533,24509,11692,-20934,10498,-2108,24139,-7619,11960,-32466,19812,24935,-32428,-20777,8362,22231,10710,-13722,262,13903,-29581,10178,-22040,7397,11669,7097,-22989,22360,12219,19318,5393,2847,22374,7127,26887,14365,2656,19409,-20841,20612,2619,-6943,-21935,-19962,-30445,-9452,-1988,-23454,4733,19427,-31740,2780,24694,-17864,29663,11896,-9586,-20268,-2786,-18397,-22909,1522,-6050,-26,13593,32565,15486,4928,4840,31246,26391,-22968,11737,-10083,-26725,-6567,-20308,13003,15725,15675,364,-15646,-825,10499,-20265,-13508,-31990,9047,13632,-20967,3064,11950,6640,31931,29994,19108,7078,3303,8206,22115,-28603,14469,-7157,-4251,-8262,7770,3698,21252,-13999,30397,-18008,5883,-16704,-14101,-8131,-32008,-11676,-22908,20760,31736,-32114,20133,13181,-27143,27537,-32012,19458,20164,26156,-22881,-27370,14971,-20716,13722,-31159,13678,-4941,-27115,-15892,-24378,-13914,30231,25260,24752,31999,20565,10866,-23694,-5768,-20087,-3589,29431,20180,32104,4451,-31995,31614,-16738,-1710,-22073,17290,-4745,15601,8169,-6955,12688,-11147,30189,-2034,23395,10702,-19461,-20188,-21228,-30906,-27998,18976,21677,-18091,-31590,4201,-31850,14027,21117,4445,30850,-2725,-5726,25671,-29332,-18899,9145,27895,18552,-28829,31241,2567,-8661,15197,-20399,17444,5995,-30864,-18749,-16552,21663,-13980,-22284,20474,18458,-24491,-17884,26428,-4158,-90,9001,-6683,-25904,5812,24647,17434,-18880,25450,17290,-7352,-22877,7702,-16010,9096,-11990,-16285,-11204,-15024,-17867,17544,-26865,4929,3678,15992,-10235,-14473,-10209,12700,-7771,-15583,12253,-4020,7566,-25847,-16648,30931,13092,-25928,3646,-5667,29582,-22592,11717,-3018,962,-11065,-1631,-1291,-12759,25571,19964,11827,9961,7207,-12787,-28451,17913,-15020,27141,-15710,19140,-13706,6032,13308,875,20706,-379,-22353,19179,-15115,5031,14425,4745,-8200,-17356,-14533,-4007,-14896,-27853,9143,-6873,23736,-18636,-189,1019,11430,20129,14320,9709,-15990,13477,-19414,2677,10022,-18273,24560,-7821,-2718,31308,27984,-8695,-8660,-26657,22284,3189,4577,-12088,19287,5714,19577,-30284,24924,-8017,10044,30975,14637,-26646,-25171,-31909,24966,-30070,15758,-11032,15879,-14201,9898,-29902,-9827,18338,-22047,22335,28741,19854,-11991,22514,19068,4750,-5800,10966,19193,6624,17194,-20157,18612,32167,5060,-1966,5365,-15595,18814,25253,-13767,-20609,8518,-15610,-30654,-18934,32725,23677,16643,-23091,803,29838,25774,-19476,-8169,4885,-14658,-29063,-16712,-1116,27348,-5823,12067,15996,-30858,26519,26448,23035,30640,23885,6972,-14882,-4531,-6592,-12841,26520,17329,15546,5657,-11395,8457,32700,3353,6136,-1488,-9217,-20837,-29591,-25911,-9698,25350,-16629,6377,9424,-25835,-15394,22567,32263,3736,-7097,-27647,-25116,-4862,27856,23911,3779,2955,-14794,24199,-11125,-4995,7027,-11414,-24382,-30940,-20089,8469,-13297,15386,11092,4585,-24969,3394,22267,-16661,17617,-4468,-12294,-57,15090,9389,19338,-10858,-17054,28304,-31730,-13480,-10114,-16278,17933,-13274,1640,1362,15260,-21313,11547,18233,-31358,-9660,-21900,-6549,7505,18381,-15609,13165,1263,9148,-7667,-4580,21366,-21538,22497,14074,11040,18242,-19917,22696,-16850,17113,-6431,9605,20858,-29495,-30176,-14108,14269,8512,-4084,-15581,-27383,24661,20380,-9356,-125,-24133,-11379,-4601,29603,-21254,-24177,31645,-11366,-18931,25925,19443,-9191,-32511,31428,-30205,5402,-15378,13616,21351,15048,24226,11695,4464,-1402,5138,-18279,-1339,24855,25592,319,-23420,-3346,-3400,-9913,-24914,26156,24348,6995,-8152,-24651,-10367,4408,-32674,-30578,18634,28926,-23557,-18611,-18587,7197,-23366,-11962,-25790,-17191,-7433,-32367,4496,16171,-14372,27197,-8117,-14109,26673,22649,18101,-15299,9960,28283,2970,21894,6392,-29362,3372,796,-31874,10216,25886,6941,27185,-10041,27404,-27505,27076,2261,-31589,30088,5740,-10234,7551,20700,-24057,-28459,25729,20129,-31940,-5642,25750,2017,4534,-11352,12403,27406,-1158,23859,-30423,-21912,-31116,-27459,16619,7889,5302,-13507,19128,12115,-10827,-9303,20309,17625,14766,-2688,-970,-769,-32120,19837,-10065,15809,-18546,18851,24674,29946,-18249,26075,-23097,5979,18556,7076,-9371,2747,30127,8049,-9095,-29637,-14936,-2159,15766,17240,31837,-25063,10776,5004,-12844,-9002,23935,31897,-28755,1139,-6624,15623,-20111,7498,15066,-12185,-5437,30853,26611,22007,7991,19725,22620,-9953,11410,26068,-13440,-12291,7542,22975,16763,30836,-25419,11518,9105,-19842,11888,26547,5592,-26950,23584,-32165,-5842,-28716,8605,-93,-18661,2686,12621,-3317,1332,21633,23177,-6916,-19777,6546,-9334,7223,11680,-21770,2113,-29587,-27245,32100,-30975,10805,12014,-713,-977,-18899,26374,-19701,-17616,25717,-21915,23021,-26698,-27789,22150,13763,-12018,-5672,15886,-11240,-3763,-6465,-5386,-19419,-894,-7763,22659,4417,-25148,3320,12649,-8768,2444,29478,-14369,19200,-7840,13300,29063,24342,2846,27953,4989,5318,-30329,-16902,4801,884,4084,-3913,-31207,-11771,29972,-7700,-19118,20339,3289,-28410,-16146,11365,-25357,-2365,4308,9446,3481,-23420,-29998,-18903,23294,-4697,-13273,-6746,28733,2374,9572,-24022,-8833,-16984,-5624,-21943,-11602,-19537,-26430,-30863,-21438,-30923,29359,-847,-24820,-11921,-21564,-1698,27825,23301,1329,14736,-22846,25122,23634,-5646,29408,-28609,8992,8381,-11458,-21719,-9896,24078,-4577,-29803,-29706,21575,23693,-10760,9916,-13091,-26168,13103,7414,-31739,15788,-28471,12100,-17659,-26256,-4426,-18887,13367,4866,28080,22750,3845,-1470,-26489,24548,-17958,-2410,4977,-28137,-14856,7606,22902,31458,11763,-18084,-2391,-714,-15605,-31918,12067,18978,-26347,28304,-31628,-22612,24071,-22708,-8596,8970,-32272,6321,-27218,30973,-7176,24927,-8993,-31594,9164,-5643,-20764,6316,6950,19243,-19042,18712,-29450,-25644,-3117,8305,-18662,22701,14187,27254,-3651,-15968,-18870,-18565,-17062,-17517,-28257,-29813,22126,-30349,-355,12785,4461,-10083,-22071,-32225,-16634,-7594,23487,-17130,-13251,-17741,494,25227,19787,30024,813,5090,-8917,-7796,-21038,-12050,11277,-22321,-3065,-11293,-12268,1805,30532,-20643,2865,19736,20239,-6712,29280,24216,-31220,6562,-31378,4549,17875,21909,14923,23692,-1369,-32102,18833,-8506,-28675,13791,-14093,32299,16204,9624,-23190,3900,3330,-13833,17850,13841,27346,9226,-9575,31876,18637,-25038,-10127,-31420,-7944,5156,12823,22350,12385,-10634,11222,20308,-27855,20173,-7261,-28933,-7109,-13871,-18494,-20898,-29203,17705,20655,-9579,129,5293,16111,6592,-27303,-216,5137,-24915,-12498,-21617,-5395,-17206,-9825,-20345,12124,9462,4577,-27166,6838,-25064,3624,-6943,17002,25769,-22342,-22645,-3704,13209,8436,-22882,-7006,-17765,-31519,-25853,26663,23375,-11303,-28781,1570,24741,-8777,-10556,19829,-13403,13332,3422,15122,-24886,-992,-8697,14019,26894,-27090,3352,-16137,-28402,2635,-8290,-10790,-9827,-26003,15601,28440,20672,-3427,27361,13257,-23433,24472,-17973,-14726,5131,-5078,22704,-24635,-26617,18271,30825,31980,20847,15074,12799,-19810,7751,-25164,8565,-15778,26854,7339,19147,-25683,32475,13098,-26997,-32644,-20939,8183,-10166,-14624,-2736,-6090,14637,-2264,17049,2116,28829,20263,-22503,28721,-23838,30677,29109,29542,16480,-11236,-21932,6648,-30077,-14555,-13848,-26720,-19926,-4060,22221,23491,-294,27439,-26710,-28011,16197,-12384,10090,-18120,-16348,4480,12513,-3420,-1827,-25763,29836,-12252,-30159,6630,-25068,32496,-19353,-19944,-31184,-15480,-17873,23622,-21611,25855,-15584,-22992,-31892,-31500,-20485,25702,-4208,-6549,3391,-3205,-18370,-30742,18421,7226,-23732,-29142,-31828,-15849,-29498,6360,25518,27469,-24392,-23635,14867,-28212,1068,17201,-3315,23290,11900,-7283,-28235,10667,25168,7700,13041,-23492,16531,-11672,-30988,-21651,29710,8550,25761,12475,-24684,-20682,29603,-3532,-27248,-6629,17608,-10868,-8279,-13400,-19113,13537,20277,-14906,27573,23003,21826,-3943,14909,-27262,-31974,-29445,5847,4564,-30024,23345,29272,2241,-27765,-31737,3134,-31302,-27481,26478,26742,-31015,8432,23029,31899,-12791,12879,-9640,9738,-13999,5888,-20627,-12428,-18137,-29080,-21818,-31686,-13807,-18585,3108,-25061,-21971,27941,23539,31499,-21515,-18089,17286,28082,-9682,-2676,23428,24188,-23411,17732,17098,-3697,23039,30569,11695,11514,-23115,20041,-20502,201,-2215,10385,896,-22484,21520,264,-21511,17504,-32667,3076,26092,8073,-21094,10803,-7405,-32728,-8664,25348,9253,-27564,-25796,32680,-26410,-10476,-15153,-8160,-12946,-27669,-22116,-16064,-11185,4261,-16581,-2993,-5745,12702,-121,9649,-4156,-25089,6164,3637,5084,13261,16431,2069,-20222,-27636,28664,-7879,5011,-18347,-17207,-14240,3883,-4619,-22216,-2966,26900,-9106,-15259,5279,29366,-20091,3871,26541,1188,-18718,14252,1500,16980,1644,15974,-500,-5125,-21330,-27698,2922,3696,-26075,-3557,-18733,-32224,-29767,24237,1504,17067,-46,-12740,4260,-10924,834,13040,11316,28498,-8258,-5597,-17418,2297,10747,9863,-15848,28138,-17300,-5029,-28222,-11600,-25154,-13542,25271,-24095,10083,-30517,-27584,-18899,-13902,-29765,28751,-3208,14749,-15020,20167,-11149,14918,-26433,-7941,-16087,-26733,-16134,1701,-15814,25843,-13629,23094,17047,-15614,26191,13331,-31981,-25393,14766,8856,-30871,-6812,13945,13516,-2949,12309,-7047,26348,-7337,205,-593,21522,-20877,-26100,31188,5723,10371,-8911,19281,22738,-2256,3549,5421,17222,-19448,-32027,31027,-3397,15562,12402,-11920,31954,2407,-30302,-2732,-15130,17703,-28106,30372,26284,-15220,-1927,-26943,18378,12104,-25352,15598,-173,12308,-16540,29634,14624,21830,-6413,10470,-25957,-4002,5006,-1995,25752,-8840,-6717,16221,23482,-10075,23569,-28437,21465,22297,-2743,-13149,21476,26881,-1131,29095,-23780,-28451,310,-2745,-7808,24520,-28835,17544,-14317,767,-19906,-20587,25832,-21152,-12588,-15208,22579,-17756,-11647,12604,14273,8676,26219,17943,15796,-26002,13949,-29792,11729,-4558,2190,-19522,15513,-13363,-851,-1907,-12989,26613,-16154,-6971,-8044,-9934,-10322,19922,-16047,11809,-22227,25907,19526,-9625,-1450,14833,-20898,-17350,13105,-1991,17931,14867,-24383,27009,-26864,6029,-12675,-15549,-25706,10299,9581,27869,-32407,23629,24553,-8991,-20666,-22255,1502,-22570,10202,31386,-26381,-21392,4390,32475,19763,-14351,-24768,10754,-17971,23745,4548,22063,29481,17252,-14595,17326,25800,31589,12164,14296,-20205,-5483,20270,-29366,22577,-31471,2865,18108,29136,-22215,28724,32008,-13824,20650,6798,5600,-18020,-28592,-5352,-763,-2203,28979,-28332,16103,-5453,25277,-16429,7508,31963,32485,-21505,-15193,-2576,-31863,-4934,13994,-31863,-11522,-4995,13632,4621,28525,7228,-3643,-22495,26311,-29033,3487,-30844,17687,29251,20669,-12803,21035,23825,-13355,-23118,-9717,-14230,-16140,5470,18543,-8949,-31100,25542,18166,-32310,-8962,26847,-12602,30378,15338,-2283,6550,-22949,-25422,-28797,-2938,31467,32048,-8771,5941,-12290,-13159,1743,-26731,-19226,12387,12045,-18210,-16119,9697,-32262,-28239,17038,1168,15330,-10041,16396,27754,13226,-19999,-20918,4938,-6481,10464,-3497,11935,-22982,3673,-26179,14546,-7487,10732,-2933,-25482,16515,21785,-4439,28996,-820,-19207,-7639,-10802,-3200,4306,-14846,24779,-10880,19482,25772,23515,8551,-31044,-29295,-8599,-12243,-30155,9652,29502,20372,-27879,31372,-19742,22231,-5617,-25683,27167,-17684,26097,-20918,25187,-7744,-1395,-17269,-29208,31915,-7072,-9351,23766,18087,22457,24688,-23935,-18393,-22950,-9471,-32649,27931,19715,22693,-14382,-9027,21696 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice3_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice3_golden_int16.csv new file mode 100644 index 0000000..940209f --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice3_golden_int16.csv @@ -0,0 +1 @@ +-14914,14047,-3632,-18461,21142,11630,12759,-19194,-9596,-24268,-19106,-30603,-24220,-25502,4735,13590,7049,3485,16877,12809,8224,26703,16075,762,-25911,5842,18077,30888,-15802,5635,27115,-15568,-1657,22283,-3160,18440,8964,-21785,-9682,-29444,-27754,26643,17276,25511,11648,-14077,-8411,-7252,5289,32465,-4021,2938,41,12553,30790,-26762,32579,-1556,27270,-17422,-15504,-24302,-24719,-6111,30015,16472,13098,14873,-13030,-3226,-24505,-6848,-30727,-27880,-3977,-2516,-24158,-13397,-19505,-9150,10168,-5912,22600,4214,-19590,-6604,-29409,18319,18972,14826,15637,4346,18285,19556,-21498,-10529,10793,22680,23128,18097,-21920,25290,13528,-10112,16249,-11460,-10159,531,28584,-4349,1174,1729,21836,-14435,-12092,25398,-1946,-12991,-21051,-2826,8868,-22247,16304,-4914,-25971,-28543,17599,7239,-24768,2443,-17042,-18628,10611,10864,-7274,8174,23794,-10447,-6790,32536,4949,-26014,-11948,6960,32148,-22879,8212,-11798,-22692,6343,30397,-17445,7861,-24998,29298,-5789,24293,-18317,-15757,-7555,10201,6229,8965,5346,11821,18425,-24538,-9060,-1821,-7325,29708,-4136,-27300,-2710,22873,-20815,-11104,-9600,-30744,7595,21576,23960,-22636,-162,-604,-4455,15641,83,-32246,21332,16243,-6452,-19112,-22447,4049,-27171,13538,-26228,2924,6038,1240,23398,-32742,26899,-8949,14445,21083,-22056,12385,13360,32511,-10717,-10188,-13475,-18842,-14772,29904,295,-5657,-11505,17384,31257,19026,2315,-25207,10157,-17769,-20484,-1639,-21630,14053,-13344,20836,6102,-26676,19832,-21254,26561,16099,24029,-20135,12865,10897,-2538,-11904,5097,10274,-32385,32230,-13546,-22957,-12422,-22418,-30020,2829,29818,19871,2295,17575,-28422,-23954,-15593,-21894,27108,-19521,8362,-7460,-9717,3002,24414,-10485,5799,-18385,18890,-14652,14231,24101,-12922,-17135,-26414,-7658,-1206,-8272,23193,21083,-2476,-22420,-31792,-1170,-18080,-32566,-17588,28352,-11291,17319,-31491,11449,-18061,25179,19781,5076,-9135,-29214,1234,597,19443,-7828,6300,-7333,11169,-20156,27416,-15883,-25068,19317,-27179,18209,1149,32197,20585,-28196,-22862,-32213,-31485,12787,-13600,20328,16236,-16157,-20892,8753,-13201,-7310,15499,-11147,25861,7922,-24918,-11454,3984,2380,-15228,-20956,13742,-17065,-29140,3896,15535,-19733,-26547,11496,-9037,2883,-14098,16610,29098,32256,22156,14685,5682,28303,22074,2593,-28852,20167,30085,5692,-24921,-9126,-19247,-3063,-21597,-12526,-4905,12104,30238,23044,29072,-6595,-24403,-4743,-3081,1990,26447,4536,-5835,14401,-6509,11570,-12284,5082,15325,19661,-22793,11375,-16795,14664,5997,-9749,-29756,-14061,-5294,12891,28871,-6426,20438,24873,-16765,-21054,20919,-8212,-4154,-866,23025,5233,-14567,1800,-30997,19155,-10576,-27474,6803,17792,-4115,18075,5156,-592,20111,-20629,28064,-25424,21062,-19436,22829,-16038,11222,-4690,8791,27974,-32297,-19032,-30811,12006,-19374,-20888,18999,-9291,10661,11006,5054,18718,5816,-2671,-7296,-28171,23985,24468,-583,7658,-13858,18373,26077,-30908,-912,29945,1293,4780,-12589,23895,29333,24035,6589,16924,15131,-21438,-11129,28423,26294,1953,12206,23196,-5009,-4031,22620,-3831,19933,23963,-21056,28831,-7418,-23126,6941,-23366,6928,-12607,-29613,-2908,28197,-26805,-20134,29323,13998,248,10891,31031,12156,-266,-6808,-26129,10387,-22525,-28403,1953,18768,-20679,-3527,22757,18686,24879,-14159,13504,-14981,-28741,-31903,-16375,1646,13579,14404,-23629,-24321,1783,-9685,21635,-30999,-29721,30076,-2972,29471,-6177,30244,22476,27576,-1250,-27782,2962,-24300,-3655,16795,29406,16803,24875,29435,-27681,-31525,12821,-24813,-5874,16938,-21955,25739,-18065,2710,-17908,-3624,-25477,22368,-11501,30132,-5919,-10613,19677,-10203,-29248,-22134,2429,-21801,-20829,18171,18459,-14621,22198,19064,766,8455,-25693,-5923,-17204,435,-13648,-1052,-23432,-29209,26989,-21911,-10126,-12595,-21020,-8870,5220,-22409,-29969,18964,-3410,-2117,-19324,-9608,7418,-19215,18767,703,-24580,5630,-19487,-6276,-2368,26216,15459,-13745,-23125,-20753,-13181,-20405,-19737,-31949,-18901,-24290,-463,15832,11107,15030,6823,21349,-14718,-8315,4145,-28738,17689,-868,6678,-1433,-24132,-450,-17162,7906,-4364,26527,-20853,-3417,-15503,-14845,22042,-32608,-6841,7375,22269,-13226,-22006,-7462,10720,-1927,-10959,-7572,9648,32558,1243,-23539,18173,32505,-709,-20607,13637,-5768,16529,-31981,24581,18102,-8089,14265,-25853,24469,26933,32465,23091,-8693,-20327,-12163,80,4068,-7632,30399,31019,-22117,-3993,-26846,-18202,21764,12736,29569,-29114,-18801,23230,13571,-19807,-17342,-15347,-31202,-1153,-16849,-6146,3328,26597,6759,-7213,-15339,7278,-30266,-9888,14111,31566,9452,7041,21256,14007,1183,12943,9830,-31351,-10209,11693,-7282,-12109,650,16887,-27064,16264,-8860,-29722,-15760,10313,-4596,-32735,9492,29847,-2945,4794,3050,23190,20289,-27383,-20686,11622,17378,8026,31480,9903,-16066,-14197,-15129,10586,-20195,-22276,5883,-13796,-17043,4118,-20573,9073,24361,-25555,27399,-7125,2889,27156,28658,-4384,-7717,25475,15224,-10148,8893,-7138,-4991,-14420,-178,-17520,-26097,-23589,-8543,20572,-23567,16197,26587,-29571,7690,-10338,-4212,-20886,-21401,-29250,-17220,21759,-7247,27794,3131,12058,-21664,18591,-13211,-6380,22251,28197,9338,-9536,-18618,16745,10096,28326,-32675,10608,-9489,13532,-28808,14510,-2111,-11449,14720,-5488,-26468,16420,-6013,-5428,18503,-15350,-29216,-21442,27851,-26177,4975,9687,26728,-29169,11110,18922,-3498,-24086,14973,26474,10921,23854,-16367,26049,-19301,-30882,-8398,-31162,-13459,5357,-4620,29840,-20872,28949,-9625,10699,27988,24824,10001,-8377,21957,-17479,31256,-17126,-27723,-23121,477,26359,-4745,-9999,-15164,17463,17520,-32443,25920,-27521,5805,22284,-22451,22768,14144,13714,25966,28101,-12442,-1986,-21693,-455,-18677,2704,27216,542,-23759,-14628,-23935,-28044,-9958,17664,-13534,-21468,-8570,-6222,32653,-3872,-31427,21783,-29209,-18866,32382,412,-2482,-31792,-5476,10925,12113,29760,-5152,-17047,17442,13906,-12465,15796,11340,-20563,18734,322,14917,-32708,25853,-23142,11769,21337,-24254,-15973,5110,7908,-1142,3515,-31764,24428,-1539,-19154,-2010,20691,8148,-30905,30119,-5840,-6193,-16784,-14124,31138,-2772,30848,9232,-32353,6646,13364,22728,-2442,-13698,-17093,-617,8949,12937,8804,-25528,-16380,-803,1022,8704,15165,9411,-4190,-20421,-30121,-29890,-16816,-18203,-21517,-15441,-30143,22450,3290,-925,-14238,7329,8180,-1607,-8922,10736,-8638,-17438,-6853,32614,-25266,-30035,991,15252,7714,19516,22031,-13129,12488,-1143,-31320,-22172,549,15691,7646,12685,21049,-24272,2954,21662,-1535,-13765,25885,-22376,24715,25998,-26031,-18061,12919,-22011,8446,8372,25018,28101,2722,9492,-5296,5367,-29040,10819,-355,31178,-10138,5754,-25250,-31034,-26776,23449,-31683,1124,19846,29084,-18933,17053,-21954,-4271,-8983,3452,19151,-11377,-303,18534,673,22919,-12222,-16941,3777,13083,31364,29681,-29025,-24126,-17260,-21146,30617,7245,13653,13434,-19397,12386,-4478,-6649,-5715,-10082,-11241,-17657,4000,-9597,8322,-19570,-2681,21797,-28549,13597,5569,23629,11400,-32704,18938,25150,30245,11227,-24854,14601,919,11272,18366,-705,15148,32512,17138,-13815,9938,12207,25634,-21855,-28580,-6430,-29252,10395,16844,-25636,4256,18522,32433,-3675,3708,8167,-1271,1332,26578,29768,-12738,-16590,11905,-28135,18671,-11099,25393,31740,-27542,15685,29709,-13622,27151,27391,24043,-30356,-9704,14780,1902,15820,-566,-6594,4588,-11059,-2098,6999,-25822,12896,-22849,-18709,-24572,18386,29242,-30573,18279,-22428,11399,-26374,-10867,-8014,-9169,7300,8321,2350,25028,-5290,20832,24912,11560,23062,-29779,-726,-22681,31978,7041,-12066,-10256,-24942,-4884,4805,-20731,32706,-6678,22760,-3790,-1164,-17249,-24271,-17841,-7145,1958,-21647,11285,31749,15494,17531,-25474,12911,16209,17703,4240,-4049,2666,-14754,-31191,14720,-24680,13121,24913,-23439,794,-28966,8827,-9925,17548,13465,-13831,27791,17100,22588,25492,-6616,-1728,-16415,24933,20902,-18476,5684,-9203,-26308,-30721,-4750,-6162,-19344,17488,30951,-25851,-12477,-10272,15080,31347,-17133,-17877,-1191,-5083,-13757,-21195,17311,13958,-30850,-9461,-13027,-1853,-4856,-8499,-15059,-5687,17212,-25019,-27277,1963,7695,15299,-20678,22822,19854,22640,10095,-2171,20607,20724,-10592,1724,17951,-9975,3864,9113,17524,-17741,2368,-11142,-11090,-21556,-24639,26656,-22909,24828,-9020,-28946,10308,-10605,-27135,3183,1211,-23275,-5898,12154,10488,-1663,1383,-3510,25275,-26477,6204,-4865,-3241,-23625,-30212,29192,31309,31530,-16146,22089,25188,-7082,-24708,8825,22830,-1067,13824,14107,18387,30053,22197,-11623,30677,-26216,28801,20826,18769,-2902,-30415,-13775,-27430,26622,12480,10043,15761,24823,-9960,7524,17584,-13111,-25980,-22672,-5964,-19610,-3425,15444,-3974,-1361,15858,-18287,-30663,74,-18098,27262,5306,6077,-9610,7569,-319,17535,27486,20780,-16180,15611,-30671,26814,-30404,-20453,13965,-1628,-7582,15667,-17706,28829,-23140,-11065,-6975,13779,9484,11950,23570,-8911,1444,-29333,17334,-30185,-32492,13516,-29866,8111,-11876,-20067,15363,14199,-32658,-14247,-30491,-5310,6253,29028,-23861,15080,30134,1055,-11469,-23697,199,23761,23611,16030,4093,3540,11583,-21702,20244,17073,10446,8592,-6293,-7126,-16568,31102,-24890,-28831,-18547,-10702,17020,-27230,-28491,-17248,10922,24742,29544,-10614,-23483,17481,-27726,-8377,18765,8842,21587,-32485,-14784,-30092,29352,-22526,-29520,14449,20994,-1108,-26484,16013,23279,-29878,24859,-16356,-16815,-17399,-14268,-19458,12542,-11162,-16272,27888,-28858,-21614,844,23533,-8605,-7526,21430,31387,31595,3417,-2261,-5120,-1688,-18917,-19158,23666,-14007,631,16281,-1369,-6649,-4741,-25673,11685,26386,-11703,18679,-26846,25919,164,12852,28146,30636,-10158,20740,-27582,-4022,-10691,-24726,1218,-26478,-7149,5116,-1850,-876,28690,13147,-23945,18402,-17782,9403,-13310,17715,13429,-18416,9230,5152,-5464,-15139,5319,-21131,15088,31276,25439,18891,11881,-3299,26905,2588,9255,-29021,-18302,-14119,30244,26631,8923,-4827,-19115,27415,-14815,-22832,20959,3043,-16615,-21653,-20719,26689,7965,30817,24903,15009,14097,-9065,-9446,-26889,7168,-1636,22674,-2794,28002,1447,-3006,-16245,4830,-20478,10616,-25799,18612,12707,12519,16952,25122,-1647,-6325,-3098,16122,9359,17351,22291,10332,6816,-22319,-17177,20091,21940,3331,9083,5411,-3751,20145,-16943,22001,-23167,-21451,32710,-8327,-26601,7743,-20368,4972,28501,1769,668,30434,28260,11737,26344,-10545,-29941,24816,16164,-10236,10910,29092,8996,18599,18004,-8118,-654,5253,653,-11404,-9938,-31022,32302,24053,25354,26960,6211,-29910,-9521,-14036,-28104,-27606,17054,-24488,-28303,32095,25204,-26754,-21052,-3529,-15910,-17844,-7336,-18572,934,13863,28704,-4966,23967,14336,-12799,29333,31062,-17917,27999,25899,-28538,25832,13099,-22614,-32394,-19891,-16867,880,-23959,24912,22749,-25872,14542,31576,23502,-2362,-24739,-31165,-26266,-9164,-22084,23709,1420,28199,-2072,-18223,18987,-10883,10975,27600,-25811,-4568,-6396,-31116,9233,-863,24846,-28105,26275,16308,21590,12052,9105,23312,-30679,28815,27030,-11053,28162,-15694,-31261,18366,-1770,-3183,31905,-1362,32652,-4616,-28809,23930,-28059,-13453,-16503,14041,-29951,-21707,-28455,23988,24122,-1197,15258,-9771,4727,8465,-5858,-32461,32621,-12159,7263,14958,28219,-9944,458,13176,-7216,23129,-13225,31766,-12793,-1683,-6458,-11410,26351,-4445,-8685,-23700,3247,21680,24690,-2974,-6466,23733,-27418,-21386,22686,-1555,-32524,15363,-31687,-17136,1371,23485,-17030,9720,32493,-18520,-20982,-6038,955,-17149,-16797,27480,11231,6673,-3978,9724,-11626,572,-22623,-15649,20879,-19986,7774,27842,14758,19364,10256,-9159,4199,25375,-23728,1547,-19006,1042,22067,-13627,1553,-29626,2285,11699,2322,-27421,-16891,-7642,-27225,10849,-17461,-27226,-19873,-4332,-20613,10835,31550,-12336,-7398,27630,20614,654,-1514,7052,22484,-559,4861,18778,-20091,6636,8811,-23023,18185,9837,-23633,31576,-28938,-30235,9509,-30847,3991,-23141,-1301,-23701,-21829,-18305,-13480,20258,-14573,-15114,18478,18151,-17382,17390,13350,-14497,19552,-21143,30865,-25553,-14215,19610,-8441,20699,21209,-3953,-31268,-6711,-21923,12596,-26056,-31710,-27560,-15608,28624,-20354,29185,27596,-28744,30381,-17116,-3865,11641,-24030,31532,-21600,-17589,-16111,-23863,12660,8220,17297,-17683,12604,30649,4194,25024,11676,-9826,3031,-9054,-9300,10678,-8357,-17848,6380,21433,3170,-27883,27193,27001,-16191,-10049,12640,-18421,20573,-3648,6495,-25826,-688,21957,-16448,-1357,-5662,-32595,-13152,-10703,26959,-9292,3949,23413,-27830,-16210,29305,32490,26157,26492,12462,13633,25354,-4761,22572,15698,-28670,-26048,-19268,21406,-15447,15619,2732,-29260,1351,-19798,-31881,32665,11029,24551,-15211,-26468,18644,7538,22140,20261,27442,-15558,-32105,-1054,20287,-14834,-7417,24536,-1254,-31331,19700,20654,-21706,16005,26507,25622,25643,1167,17036,30822,-22470,24048,-18203,17443,32124,20935,-28809,32629,4779,-20508,-21391,-23270,7002,-15807,-9351,29549,-7692,-21877,15591,28183,7313,-26112,-12611,23117,4038,26245,25570,31682,-4737,-12483,-28811,-19593,6198,13304,-16842,21068,14131,18762,-22202,-8409,23841,4468,481,20839,116,15861,-30802,-22988,-17670,-14018,16186,-20589,26544,28124,-14155,27489,-3143,30052,-17685,8394,-3369,19924,-28600,-1636,-8252,3286,7548,-19912,-24414,32520,-13922,14935,15688,23443,7858,-19401,26971,25363,-5250,30936,25384,13546,24699,23530,26943,-31964,25824,11880,-11314,14383,24110,-1060,241,32262,-27571,13872,-25860,25992,20100,-14462,-10918,-2975,-1339,2137,-663,-32575,9561,-29438,-14577,17648,26262,19330,-14063,-2496,-24647,-12033,-9191,-10553,-11959,-9431,31625,-16470,-10382,-24447,-20303,22921,15747,3606,-14555,-5262,938,31325,-5935,-26600,-9858,-30389,22696,-2552,2271,10254,-7217,-3812,21965,-4632,24352,-32061,-4731,-24500,-19447,23311,-32246,16062,-28367,31054,-7090,20334,-26601,-1515,4143,-29043,22124,-3473,-22745,19232,30161,25318,-27854,-29599,7160,22085,-4737,4850,-15883,-24903,-32291,9395,-25578,-2686,26820,-6116,-27162,-15200,-6739,-2909,-8361,-29705,-19824,-22199,-10368,-27891,-14200,6486,1660,-14662,11201,-20100,-29630,-4871,-193,-7288,-8062,21614,21613,22725,-14633,24875,21126,15820,-7011,30699,-20322,3713,29227,-20816,6119,10550,15196,26431,-12042,4408,-31190,6278,-6180,11280,-15210,5752,-7151,30987,12335,-13185,-9306,-16279,28866,29486,5306,19040,19687,-3225,-11727,-23765,-19415,-20512,1456,-30336,-32740,-19130,-10973,31348,12801,31559,16739,11786,30975,-19356,32625,-3764,-15495,-27159,-5045,14077,11828,15721,-532,-1467,16570,32277,-28795,16098,-2754,5486,29445,-2414,31629,8533,3312,-21295,-12283,-32414,-18016,-6503,-8067,8256,4690,-3237,-1277,-24069,9301,-11492,29152,8374,19627,20304,32619,-3586,-12467,-31484,19410,-10565,23359,13678,-1278,-23195,-18101,-14651,-1221,24217,17576,-6371,-32506,-20610,20506,28160,10455,-20378,31799,-2851,3781,-18688,-23961,-19353,-14741,31354,2505,-585,-4258,-6838,-23671,-18404,-7098,-6676,-8296,-11235,26632,22033,-20498,14763,23458,-19075,10993,-11152,-5871,4107,-12425,11643,18990,23108,-26838,31383,-15758,-28226,-21287,1044,30497,-26352,25249,9782,7054,-9954,-14372,12783,13515,2966,13814,-28643,265,-2056,-21442,-14664,-13198,-1144,-19310,275,25062,-7076,14855,-969,-31095,22432,-9788,-4809,30965,-19942,23326,6296,-762,-26384,-31673,-1041,-2258,26642,22276,27787,20329,24762,-13271,7193,7094,-17570,4993,-4616,10616,17676,-6681,-13829,-30565,25438,17219,7737,22540,-3142,-7282,19,-3199,14044,15768,-16744,16859,-19452,-12970,-8044,-11274,-20573,16363,5846,18893,-6676,-10228,28616,-12982,19272,8909,-23160,-27399,17662,13134,-10902,-12526,29995,-8023,17138,6032,-30445,27218,-22453,-2121,-28791,-3394,-31055,10992,23,21636,-24795,24987,-21720,-26068,-10416,-30027,7409,25758,17912,-27527,21489,23362,11544,-30257,-17491,-25970,-19385,-32430,31057,-24314,965,-18980,-541,-25864,-8492,-11615,-31705,-1915,6940,-20068,-28263,-6344,-922,28343,18110,6362,28862,5149,-16822,29142,-3480,-534,-31268,1873,4458,-17314,28657,-13101,-6452,-31488,4927,-13737,-30156,22679,-2265,-20018,11561,18987,-32717,-8432,13780,29462,26477,22260,28007,-14031,-29200,-28828,-9434,-4601,27363,-19945,-5275,-18735,20283,-1282,-23821,24504,4372,28312,11993,28812,4500,-9011,-22779,5543,-25924,-20885,25032,-31328,-16208,-13638,15868,-7755,-25037,17435,13311,26260,-6406,-8519,16394,-4917,16244,-1316,-23744,-21024,-15427,-31684,-6649,-12854,-23174,7709,12990,-14371,-27095,32536,-17071,-11650,-636,6238,-5291,-5350,3486,31191,-23961,4090,30840,-31524,9834,21790,14680,-20988,4380,-31636,10604,11858,-31060,3260,6652,1940,22168,-29303,28899,131,-8741,11122,-18259,20629,-2127,32057,-19186,14671,-15467,-10251,24154,3029,-13624,8688,-15854,27853,24089,-11735,9173,-16035,-22819 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice3_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice3_input0_int16.csv new file mode 100644 index 0000000..f57d677 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice3_input0_int16.csv @@ -0,0 +1 @@ +-29023,25339,6378,6576,27797,6059,29721,-20496,-578,-17138,29203,29531,-193,26348,-15116,3886,-14075,10520,9241,-920,8708,-4254,8913,-24902,-29511,-3131,-27111,29432,-4983,30724,-773,11317,27605,-97,5232,-1551,-15750,2256,11259,-14800,-16279,-27262,-3110,-27532,-14572,32459,-7523,3835,8710,-24789,19885,25182,8876,-27899,-12489,16047,-28171,25709,-8649,-21652,20538,16884,-20334,31910,8691,22576,-31283,5775,-31549,-4330,-18755,1430,-6368,17138,-32528,-19475,-25216,-25109,-1628,5127,27454,-2499,-6060,-21640,23091,11924,14613,-14334,-17333,12458,-32039,32294,29011,18843,-14992,-2466,23252,7996,7949,-25348,258,27697,-2232,31848,-23888,-24701,21573,-6614,26314,5873,24926,-14478,-1558,-4211,-12608,15345,-7639,-25828,11343,16917,15510,-28658,-7135,-6158,-27876,28376,15355,1654,5428,12930,27554,-14756,14716,4174,-4706,-16082,12784,-32012,15909,-3568,31151,-29646,-28279,-32150,19430,29329,-19654,-13003,19531,22739,-31045,-14559,26575,-20240,697,-13241,29369,-8634,19592,-7179,19426,-31626,21901,11830,9842,-27747,16602,14241,-10874,20668,8699,-12044,21603,4343,3849,-18390,-3514,17056,-32502,7604,-16012,-19663,28114,-10191,23764,-30210,-18063,19800,-1446,-22699,-5596,5745,-15667,20542,5608,7873,16725,-14068,21978,-21361,-15261,11929,-18189,-2394,21689,-1554,-12883,27857,145,19921,21493,-22454,15200,2087,16890,-16201,-32564,2715,14856,-21397,29366,-32424,18885,20851,-26297,-17550,-25166,11297,-26583,24975,-1195,2528,18200,19882,22137,-20619,7084,-25459,-11359,29806,-30287,-26840,-19264,-24501,15684,-31808,-32745,-60,-14921,27336,11442,2687,-4770,-19567,29216,-3755,-6278,-29438,9606,-16799,29352,-25108,-13191,-18834,-12112,-10792,-6562,-19733,-20283,1721,5618,-20404,13097,30636,-17481,-25568,19858,-17654,18867,-13582,12825,-8489,-7798,-23543,-14991,-27351,12489,-19932,-8939,-23583,-8936,13101,-12989,19129,-25164,31827,-17507,-2776,-11673,24761,22199,-20084,19884,5441,-14261,-27853,-12873,9035,-4844,9988,-11641,9822,-7037,10262,-1553,1081,-11558,-25414,25643,-22973,-20746,-28301,26810,-24109,24995,-13477,873,-9987,27450,-13680,-22274,-3392,-4897,-2883,15886,-20483,-6198,10067,17060,1073,30621,28215,-18637,16128,-30059,-3145,3191,28177,-21425,-13226,20549,3091,26288,15770,-15588,7521,26947,19678,-5669,3121,25030,-17690,7364,19062,-27253,3595,-17271,-14827,-3062,20646,29187,-31531,14556,-22138,5418,-3215,-11710,29506,-3952,15071,25048,-23150,-14396,6315,17477,3909,16970,11982,-30901,25108,2690,-11701,-23322,-26522,-16053,16647,22822,-16846,-12089,11804,1782,-28586,23409,-18831,2146,13255,-29431,18671,13042,20093,-4431,17556,6337,-26032,18301,-6220,-31321,-16609,-17664,23302,-2342,-27532,-9830,-18356,-10227,19567,-15702,-13931,11080,-17632,23781,-17713,15055,1899,-12780,-23824,27649,21102,4449,-14893,15038,-28543,-29569,-5924,-9070,10246,-15234,5096,-28599,-12578,-10373,-25176,5423,6376,31790,-20361,10480,25576,1227,9751,3845,20108,-23318,-15514,24722,28123,14266,390,29209,-8118,-1473,-14634,-28492,-28822,15633,-10941,14359,1385,27216,31192,28669,17383,26885,-363,-13927,10698,-26297,-28776,24563,32090,-5071,-18087,-19874,21358,7480,-10461,-29979,-3146,7382,-27037,-11008,15451,10358,-25035,-20129,12867,17134,21685,-2880,12239,4291,-22562,10129,-7707,-24944,-31169,19202,-28549,-13641,-2403,16438,6323,31515,23830,-4570,-20326,-16853,4487,12649,-14613,-12648,-8624,-8685,-9275,6325,9655,-13017,-6030,7426,-20905,30148,-18585,11798,20518,870,30203,6509,-5408,-4778,-29661,30807,-1572,-4236,7313,9578,-9515,-5671,7823,-8286,-32718,6531,-5368,20124,22033,-19461,-28109,23581,-1476,13207,24420,17614,-5762,-14994,-31804,26420,-20064,29414,-15000,-10077,185,-17128,5445,-8392,-29325,2480,21044,17677,26938,-3490,-2590,-23316,8848,29619,-15375,26673,21225,-15939,12182,-29323,-15371,-20678,-19836,-27126,-29639,-30786,-22083,-19104,23108,-5568,31139,-14607,-9537,26743,-11072,15845,-28760,30269,12531,29067,-17990,-30714,-10951,-24405,862,29680,-23820,-32403,8652,21673,-2532,-15631,-9396,156,-29542,10564,-28237,-4774,-27600,-24483,-14807,-29308,-12246,9402,-30849,8972,-2051,30994,-13654,-21060,-26698,-8876,-4481,-25702,-18477,5210,-17780,31372,-21949,-29283,-28470,-10260,-23021,661,-27650,25540,-7489,-25739,11098,-21763,12897,9845,-11531,-2317,-4099,27679,12161,23682,-18838,-18255,13938,10618,-3068,8337,-28273,11602,24067,-12450,16667,22753,-21795,-29127,5396,11839,-22186,-23809,12084,-12299,-21673,24254,17989,8370,4192,-32072,-21168,-11664,-24478,14161,32407,-7018,12258,-23464,6561,19704,-7981,18382,-21115,-22730,-13688,32493,16365,28191,9333,-4697,17901,-12930,-21778,-24428,7167,22889,-7811,9504,-17441,17068,15080,-21468,14132,20144,-29652,31281,-29374,-32241,-1646,-30340,-15477,-28218,21450,21065,2282,-10988,8173,-192,-2874,6790,8992,-24150,-10639,24490,16206,4901,2827,4072,11847,27687,-11235,5636,-18393,28945,-3935,-20038,-12179,-17022,-31984,9403,-7221,-9638,29531,-11595,-25097,31956,5485,-11495,22754,-19362,-31580,2079,28274,11837,4963,-1278,-21989,17667,19113,17763,8185,31134,7495,-9615,15176,31568,30341,32586,23927,-1066,27491,-9309,-7529,4365,14485,-11602,-25259,-29680,-16677,-19428,24982,13574,6944,29996,24825,24943,23619,28178,16040,27759,9773,5617,26421,18961,-25141,15344,5129,-28874,-9405,10031,-32511,-15003,11555,2356,-29887,26574,-21154,-11452,-24487,-771,24343,25616,-10558,26860,-21908,28536,-5898,-14074,29269,-27072,-23092,-1288,-13628,-22683,5766,-20459,20498,-11480,20886,3824,5930,2732,10823,30640,15255,32127,13712,-4419,16496,-28459,25398,17487,1030,-24770,-23292,-11537,-24408,-20950,-2056,-12006,9344,-18571,31974,-26712,-25108,-26524,3847,27998,-30054,-28445,22974,-18348,14678,-30297,-14821,-1302,-6912,7622,16541,16481,-26203,497,-32378,8595,28771,-22508,16819,25400,955,20398,1389,11891,23383,-5496,-972,-21975,16417,28726,-7530,18784,-21786,16124,30275,-26517,-16207,-6454,16770,-31537,-5752,31842,23154,-4052,23937,20864,-19489,-9567,-30152,-7013,30177,-5405,-1745,12281,-7610,763,9819,11721,-22758,-21903,-26294,25902,17373,-1188,-27406,20263,-11676,16449,-25003,-19132,-32738,13290,-3101,-10303,21662,29644,15789,-27333,32668,28594,20225,-7673,2084,7604,-7410,-12749,16397,-13510,-12519,-32624,-24770,-18763,84,-6455,17056,27898,8903,-50,-15428,-8761,-2328,23356,-13886,21540,-22256,-30513,24226,9723,18626,-16187,-17176,31166,11929,-6048,-10069,22542,7333,29811,24008,5472,17445,-21828,3253,32482,-12538,29092,12232,9647,-31743,2419,11595,-12348,14957,15618,25056,20518,5275,32383,1107,-1936,-8495,-11911,3467,-32128,26496,4337,25949,27905,8097,21643,-10848,-31420,32106,19776,2978,10982,6525,-9568,-18263,14295,18127,24426,9146,-16160,-30292,15345,-25741,-2973,24879,-4437,-30859,-4886,-26921,-1759,-31564,-24614,-15318,17124,-30726,11568,-265,15023,12376,-26376,-9426,345,-5966,-2616,-14590,-12590,15961,-11001,-32411,-32681,-4677,9411,-12400,-4899,4718,22081,-25406,17863,-23756,-6776,-19524,4719,-13739,-27661,3119,24310,11237,-29897,-2835,11970,3826,-11751,-30967,1440,19397,32510,-28982,4924,3856,2413,2342,-14304,-14845,-24858,-17499,-32644,19414,29379,3124,-16982,-10336,27300,5665,-8745,15425,-12271,-10474,24837,20792,1133,3107,18544,-28513,-14643,30948,-20858,4998,-32058,22073,2674,1898,-6802,-30907,-10494,-2621,29215,-21170,28862,-11631,-19420,27389,-24454,-17438,26743,16020,11017,23703,-14235,-25072,-17129,-20995,31080,2860,-11152,8092,8303,16691,23377,11767,-27519,-30163,31216,-15500,-21958,24018,28323,27825,-12075,4171,-3886,-19747,17985,32756,950,15077,-27939,20095,-10943,7101,5280,22128,30761,-21131,-21937,25757,5281,9961,-24280,8434,-28116,-2243,-10623,-24495,22858,13539,5756,11424,-17552,6915,2236,-12901,15680,-5839,13242,-25291,19591,8953,-11748,-26530,29869,-28610,21570,24158,-13130,2258,-12784,18768,-31155,30888,-21678,-6563,-8309,-30683,-32643,-15824,13662,8363,869,8235,7430,-16362,-3836,13657,5055,20172,32576,-13710,25946,-27546,19704,837,5499,2397,-3850,18120,-17432,-16552,24722,1286,19054,1794,17109,18479,-5185,-30433,16156,32140,29179,27740,-27856,-17899,20135,-24123,-9498,-32498,-23517,13592,-11785,28591,-6444,-220,-32297,17623,890,-13577,20008,-27304,-18830,-13589,-27569,26314,1912,15443,22372,-6162,8880,-31999,5298,-6876,-5737,-16950,-315,87,18916,32742,-23155,17883,6979,-9324,29333,-17847,-27281,-11326,16469,15003,10544,-28764,5870,2308,-28433,28759,31309,5428,30271,-29998,26392,26004,-7992,-21730,16389,-9310,22201,17802,-14887,-12263,-29844,14384,-28907,15351,11073,-28542,-26220,23386,-31705,2620,25692,27478,12370,18009,21202,-9977,5884,-24364,16424,-26921,29530,-26087,27510,14019,-31925,31203,-1814,-5774,17422,23577,628,-1611,-14226,-25615,24777,17984,-5868,16248,-12400,15112,10663,-1010,-11268,12993,-7137,3720,-12051,27438,-28257,27240,-1939,-18193,-19129,-10392,-15319,-24080,-3065,2684,7561,-28832,-31871,-24352,-9003,-9186,21180,10133,2666,7217,24024,-22588,-25057,-14673,2077,612,-9015,-15240,2625,-18240,-11359,-22905,-26662,31191,31916,-4294,-17201,-995,14260,30106,-22580,11089,-17043,4896,12536,-14727,-30705,2170,-30400,-31275,7609,-10816,27817,16924,-6599,5199,-15645,5038,31597,-30934,13676,13447,3663,17920,23502,6401,-30173,-16226,22386,16743,26622,13570,-6330,5419,-795,22853,22707,22123,-4507,6673,-12185,-24799,-1456,26890,23237,-3078,-23465,-25780,18880,24577,18225,5065,5622,18869,24023,-7735,309,407,-1292,18505,-14479,-28988,-7560,13621,-11144,30383,-10306,-2216,-31267,-27791,22490,17817,5457,-25745,-7446,-23564,-2291,1944,-23355,8327,-32476,-4229,32179,-12298,-24118,21350,-12495,-27154,-2605,-24333,-30813,-24264,29366,27330,28160,-7477,-1411,-14342,-32507,12714,2743,-15616,24862,4093,22474,8877,-4678,10247,6036,15247,-21034,-27581,-24137,16129,-7734,15519,-9296,-16748,10525,29173,-8013,-10356,19738,31912,17480,32011,24561,-3246,-4909,-29348,7225,-8686,-24690,13488,-28664,6058,-228,24885,30101,14276,-10046,18438,32152,779,-4123,22049,-25744,17113,-22270,6282,-27550,13893,2894,-8628,27399,27967,-18065,16537,16522,2562,-12541,13914,-7525,26560,20647,15784,-3760,16491,-8990,23567,5851,12384,-16630,14203,-21106,-8421,6915,-24906,-24824,4242,5,25327,-11401,-12320,26525,2562,-6448,7571,-21291,-30312,-1384,-7237,16070,-23204,5316,1868,23481,-8192,5534,29617,-17289,22798,-12013,15315,-25670,14627,8538,-5432,13693,-21816,1206,1172,-26160,-7769,-29639,24235,15444,6140,18406,25281,15602,27334,25029,-14907,28536,15396,-17770,31214,-1159,25247,13999,-9795,-28354,-27763,14933,-26247,-29062,17182,1773,6367,-23597,-25838,-12559,-6183,-24141,13198,-29239,-17946,-7939,-2989,2699,-24917,24450,23566,28969,-30532,14157,14604,27015,11455,24961,-2908,-2179,-10790,-31288,-28179,31559,-5839,-3339,-32148,7262,13919,298,-22906,-26125,4303,3635,17925,15566,11643,21946,-17110,31038,-16732,-7718,32463,10866,-1280,-9829,-7920,-8536,16865,29597,18859,-27882,-14073,-17978,-17010,-8533,27412,-18916,14016,-6855,-433,4385,-4233,16208,-5701,28477,-18614,1469,12205,29742,29392,13992,12716,-133,9354,2949,-8423,-1527,-23039,-15185,20369,-12537,10980,10189,-12617,11514,620,-32267,-10571,7610,9367,-11960,-26772,22547,3457,-1753,-29647,-14711,-3678,-31526,20759,1758,773,3737,22146,31012,55,-13552,14664,1244,2532,13725,-26321,22212,-12819,-6163,-30827,8883,-3514,-27485,8657,11584,30195,-2532,4713,12956,29878,25810,15179,-4836,-4894,-4016,24626,2615,-13404,-8783,25458,13437,-3922,4248,30095,-16103,-12270,-2075,-27811,6644,20714,13135,28907,6417,-18174,28606,-32352,30599,-28859,-25974,29910,-3021,-13982,-1213,-18083,-15956,15999,29438,26916,10340,-12012,25950,2577,8225,4616,23370,-28502,-4873,1200,-13949,28750,1838,-31771,-2179,6163,-29988,-3599,15301,-13919,23962,12188,-23183,-28663,18246,-11362,23936,-14720,31888,-31874,3314,15981,3281,-23099,-7969,29702,-22074,2854,17342,-14019,-20884,31579,17946,-2277,23378,-5770,23258,3201,29978,1617,23430,-8545,15996,32380,21527,-403,6033,-26093,20112,19289,-31487,799,-17614,-5359,-3923,-17923,1449,22156,22907,11918,13554,11269,3645,-5354,-11350,18386,-15406,-20272,32254,-27134,12121,-19929,-26354,-29050,32511,-24444,2087,6455,-32212,1954,-5943,-2789,-18810,16072,-17250,21214,16855,-19643,1079,-22215,-15002,-2861,-24068,-701,29942,1282,-1407,-31136,-19245,17379,-25481,-569,32640,-2724,-26687,4050,-27049,-22971,25644,-22971,27845,-20696,6035,-6024,-23078,-12415,9180,-12651,13430,2538,-10761,23219,-20027,22176,26414,29866,6173,12311,4275,926,-16029,24555,-16906,6137,-3873,-27632,-7658,-25385,-22592,21089,27961,4790,12345,-1227,-22728,-11664,-14071,5338,-883,-25583,-11701,-14472,-26711,-21395,26935,31255,-23793,-14193,-7942,-12316,-29997,-364,1152,6641,1661,-27908,-22534,23148,-29003,20281,3189,4086,-15679,31477,-4528,4552,7730,14421,-156,-17178,-19061,21720,7846,-25271,19859,-17428,26846,-19329,1547,-9865,-9728,26483,-24011,-22289,-25997,-25734,6025,-13551,308,5534,28823,-4253,-8285,10908,7621,-10083,22265,15263,-28946,32058,31546,9351,12074,-22557,20785,-25396,-27058,-19483,15611,10796,-16455,3269,-18357,23827,-19576,-12878,-26682,31267,-5909,12814,10829,32129,732,31086,-17970,-17493,-3899,-13014,-24365,13538,31821,16985,13743,19018,12375,-735,24025,21191,-18490,30325,-30519,-8086,-25523,20443,-32652,14398,-15680,1699,-22586,4559,-8096,24313,20795,-6646,13376,-21759,-26852,14553,9462,8894,12295,11379,31517,-31852,6516,-5595,26869,14674,12132,-15327,-13781,11847,17599,2927,-5081,-27741,-4694,9646,20020,7609,21029,11135,-15492,30197,20491,26508,-8970,-28613,17342,13989,-11131,-17602,-20871,12763,262,-29788,-31259,26788,20847,-3514,-8910,1856,-4063,5796,6575,31380,15216,-6135,-10962,-25690,25946,-1075,-4151,28280,-25629,-8433,16395,-4894,-27668,-28131,30295,4479,13992,-18705,14587,14541,-15413,-1705,-6853,-5864,-6928,-5899,19303,9491,-18939,-30626,31552,12964,-1256,-11213,-2104,-26036,-17209,-31249,15951,-27406,5927,18147,28236,-16458,22527,-19036,-27327,18592,-15092,-11427,-25941,28839,13928,2662,-10718,5381,19846,-8758,10798,26844,-20357,-19667,8014,31420,-32115,-24880,3215,-11368,-28692,-15376,11927,-6973,19835,20536,9469,-22186,28790,-32321,18781,-19280,18018,-1844,26496,-11025,20093,4600,11020,-11751,30966,-18770,-243,3450,-4369,-7122,-10048,16007,15692,16719,-6726,-1509,19442,-5059,-9123,10438,3020,7687,15828,-20954,-8382,26593,-27536,1558,28216,27990,29986,-23172,10212,-4901,100,23691,-31437,-21240,-8981,-31224,-1601,16119,29551,3422,3172,15943,-25426,-14943,26437,29099,25637,20731,652,-31885,-1875,13024,18004,3184,-21820,-31871,22814,5652,11117,-9318,-17376,2122,-30221,-4190,-32473,2824,17172,-30227,-2518,-7999,3321,-28380,10960,-469,-4769,-30185,-17209,-29556,23620,8013,27168,-4840,-17077,-31179,24072,10898,30227,-5052,-24119,-26578,3218,-18686,-1572,-27894,15917,-28079,-5454,384,13797,-9295,-32551,30849,27683,17377,23408,-22229,9208,29010,-3122,16525,-14080,-862,-13753,-5699,-31133,-7474,10984,9484,-23197,-15154,-5456,-7646,-3266,-14022,4006,-28086,-19416,-7290,18392,11457,-28161,-4308,16711,-20896,27805,16924,24034,12381,-13034,-23763,2199,24931,-15750,-10237,-7808,-18883,-9246,-19084,20179,29143,4860,-13857,-26705,-8256,30277,-32445,-32425,-19654,-1924,31968,9504,-11959,-8033,-2773,-15873,-20678,4347,-11157,14366,-22217,24101,23533,17755,-22874,7427,-8064,-22694,24854,30409,25053,-25820,32281,18427,-4717,-31626,-12658,-12826,30391,-27242,-28863,-28980,-16836,16910,-3035,24733,-30154,-27928,-14513,-19206,-17433,-6551,15891,15844,-21134,25153,-26611,24212,17775,15535,13779,1208,8847,-32406,17472,-10575,-12509,32560,16436,-31159,18398,-30787,14006,14889,24280,5407,-2348,10492,24550,-6826,17377,-13626,-9152,-1669,15161,26470,16600,12020,-18662,-18875,-22127,-4993,10122,15586,14613,16328,7338,-1178,8752,25121,-26035,18317,8060,-22152,-4468,-27180,27511,-29902,18409,-28272,-12187,23615,-13875,157,31654,-24569,31717,15729,-16959,-20310,16278,-17146,23927,31817,5812,31995,13585,-14639,-13900,-26471,26329,29220,31516,-25909,-25867,-5713,-13434,-3636,10737,10758,-27097,13187,21403,-1093,18112,23120,-8447,-24440,-27805,29922,18230,31618,-12119,-1046,-5695,-11537,2920,-19375,-11467,-20877,-30122,-8100,29729,-8869,-26991,4787,-13555,6635,21138,28753,15997,32611,-4303,-32677,-31317,-16719,-13821,8434,20663,6194,19045,-20467,3591,25428,2166,8662,20698,-30564,-25674,-22254,8946,30251,1156,-23612,-5230,-30726,-32707,-25988,13826,-7919,31632,14809,-27543,-14457,-5453,16923,19661,-13723,15418,-28058,-18962,-15282,-272,-6417,16112,-19228,10664,-187,20973,-21842,16295,19255,12745,-4837,10916,-14914,14047,-3632,-18461,21142,11630,12759,-19194,-9596,-24268,-19106,-30603,-24220,-25502,4735,13590,7049,3485,16877,12809,8224,26703,16075,762,-25911,5842,18077,30888,-15802,5635,27115,-15568,-1657,22283,-3160,18440,8964,-21785,-9682,-29444,-27754,26643,17276,25511,11648,-14077,-8411,-7252,5289,32465,-4021,2938,41,12553,30790,-26762,32579,-1556,27270,-17422,-15504,-24302,-24719,-6111,30015,16472,13098,14873,-13030,-3226,-24505,-6848,-30727,-27880,-3977,-2516,-24158,-13397,-19505,-9150,10168,-5912,22600,4214,-19590,-6604,-29409,18319,18972,14826,15637,4346,18285,19556,-21498,-10529,10793,22680,23128,18097,-21920,25290,13528,-10112,16249,-11460,-10159,531,28584,-4349,1174,1729,21836,-14435,-12092,25398,-1946,-12991,-21051,-2826,8868,-22247,16304,-4914,-25971,-28543,17599,7239,-24768,2443,-17042,-18628,10611,10864,-7274,8174,23794,-10447,-6790,32536,4949,-26014,-11948,6960,32148,-22879,8212,-11798,-22692,6343,30397,-17445,7861,-24998,29298,-5789,24293,-18317,-15757,-7555,10201,6229,8965,5346,11821,18425,-24538,-9060,-1821,-7325,29708,-4136,-27300,-2710,22873,-20815,-11104,-9600,-30744,7595,21576,23960,-22636,-162,-604,-4455,15641,83,-32246,21332,16243,-6452,-19112,-22447,4049,-27171,13538,-26228,2924,6038,1240,23398,-32742,26899,-8949,14445,21083,-22056,12385,13360,32511,-10717,-10188,-13475,-18842,-14772,29904,295,-5657,-11505,17384,31257,19026,2315,-25207,10157,-17769,-20484,-1639,-21630,14053,-13344,20836,6102,-26676,19832,-21254,26561,16099,24029,-20135,12865,10897,-2538,-11904,5097,10274,-32385,32230,-13546,-22957,-12422,-22418,-30020,2829,29818,19871,2295,17575,-28422,-23954,-15593,-21894,27108,-19521,8362,-7460,-9717,3002,24414,-10485,5799,-18385,18890,-14652,14231,24101,-12922,-17135,-26414,-7658,-1206,-8272,23193,21083,-2476,-22420,-31792,-1170,-18080,-32566,-17588,28352,-11291,17319,-31491,11449,-18061,25179,19781,5076,-9135,-29214,1234,597,19443,-7828,6300,-7333,11169,-20156,27416,-15883,-25068,19317,-27179,18209,1149,32197,20585,-28196,-22862,-32213,-31485,12787,-13600,20328,16236,-16157,-20892,8753,-13201,-7310,15499,-11147,25861,7922,-24918,-11454,3984,2380,-15228,-20956,13742,-17065,-29140,3896,15535,-19733,-26547,11496,-9037,2883,-14098,16610,29098,32256,22156,14685,5682,28303,22074,2593,-28852,20167,30085,5692,-24921,-9126,-19247,-3063,-21597,-12526,-4905,12104,30238,23044,29072,-6595,-24403,-4743,-3081,1990,26447,4536,-5835,14401,-6509,11570,-12284,5082,15325,19661,-22793,11375,-16795,14664,5997,-9749,-29756,-14061,-5294,12891,28871,-6426,20438,24873,-16765,-21054,20919,-8212,-4154,-866,23025,5233,-14567,1800,-30997,19155,-10576,-27474,6803,17792,-4115,18075,5156,-592,20111,-20629,28064,-25424,21062,-19436,22829,-16038,11222,-4690,8791,27974,-32297,-19032,-30811,12006,-19374,-20888,18999,-9291,10661,11006,5054,18718,5816,-2671,-7296,-28171,23985,24468,-583,7658,-13858,18373,26077,-30908,-912,29945,1293,4780,-12589,23895,29333,24035,6589,16924,15131,-21438,-11129,28423,26294,1953,12206,23196,-5009,-4031,22620,-3831,19933,23963,-21056,28831,-7418,-23126,6941,-23366,6928,-12607,-29613,-2908,28197,-26805,-20134,29323,13998,248,10891,31031,12156,-266,-6808,-26129,10387,-22525,-28403,1953,18768,-20679,-3527,22757,18686,24879,-14159,13504,-14981,-28741,-31903,-16375,1646,13579,14404,-23629,-24321,1783,-9685,21635,-30999,-29721,30076,-2972,29471,-6177,30244,22476,27576,-1250,-27782,2962,-24300,-3655,16795,29406,16803,24875,29435,-27681,-31525,12821,-24813,-5874,16938,-21955,25739,-18065,2710,-17908,-3624,-25477,22368,-11501,30132,-5919,-10613,19677,-10203,-29248,-22134,2429,-21801,-20829,18171,18459,-14621,22198,19064,766,8455,-25693,-5923,-17204,435,-13648,-1052,-23432,-29209,26989,-21911,-10126,-12595,-21020,-8870,5220,-22409,-29969,18964,-3410,-2117,-19324,-9608,7418,-19215,18767,703,-24580,5630,-19487,-6276,-2368,26216,15459,-13745,-23125,-20753,-13181,-20405,-19737,-31949,-18901,-24290,-463,15832,11107,15030,6823,21349,-14718,-8315,4145,-28738,17689,-868,6678,-1433,-24132,-450,-17162,7906,-4364,26527,-20853,-3417,-15503,-14845,22042,-32608,-6841,7375,22269,-13226,-22006,-7462,10720,-1927,-10959,-7572,9648,32558,1243,-23539,18173,32505,-709,-20607,13637,-5768,16529,-31981,24581,18102,-8089,14265,-25853,24469,26933,32465,23091,-8693,-20327,-12163,80,4068,-7632,30399,31019,-22117,-3993,-26846,-18202,21764,12736,29569,-29114,-18801,23230,13571,-19807,-17342,-15347,-31202,-1153,-16849,-6146,3328,26597,6759,-7213,-15339,7278,-30266,-9888,14111,31566,9452,7041,21256,14007,1183,12943,9830,-31351,-10209,11693,-7282,-12109,650,16887,-27064,16264,-8860,-29722,-15760,10313,-4596,-32735,9492,29847,-2945,4794,3050,23190,20289,-27383,-20686,11622,17378,8026,31480,9903,-16066,-14197,-15129,10586,-20195,-22276,5883,-13796,-17043,4118,-20573,9073,24361,-25555,27399,-7125,2889,27156,28658,-4384,-7717,25475,15224,-10148,8893,-7138,-4991,-14420,-178,-17520,-26097,-23589,-8543,20572,-23567,16197,26587,-29571,7690,-10338,-4212,-20886,-21401,-29250,-17220,21759,-7247,27794,3131,12058,-21664,18591,-13211,-6380,22251,28197,9338,-9536,-18618,16745,10096,28326,-32675,10608,-9489,13532,-28808,14510,-2111,-11449,14720,-5488,-26468,16420,-6013,-5428,18503,-15350,-29216,-21442,27851,-26177,4975,9687,26728,-29169,11110,18922,-3498,-24086,14973,26474,10921,23854,-16367,26049,-19301,-30882,-8398,-31162,-13459,5357,-4620,29840,-20872,28949,-9625,10699,27988,24824,10001,-8377,21957,-17479,31256,-17126,-27723,-23121,477,26359,-4745,-9999,-15164,17463,17520,-32443,25920,-27521,5805,22284,-22451,22768,14144,13714,25966,28101,-12442,-1986,-21693,-455,-18677,2704,27216,542,-23759,-14628,-23935,-28044,-9958,17664,-13534,-21468,-8570,-6222,32653,-3872,-31427,21783,-29209,-18866,32382,412,-2482,-31792,-5476,10925,12113,29760,-5152,-17047,17442,13906,-12465,15796,11340,-20563,18734,322,14917,-32708,25853,-23142,11769,21337,-24254,-15973,5110,7908,-1142,3515,-31764,24428,-1539,-19154,-2010,20691,8148,-30905,30119,-5840,-6193,-16784,-14124,31138,-2772,30848,9232,-32353,6646,13364,22728,-2442,-13698,-17093,-617,8949,12937,8804,-25528,-16380,-803,1022,8704,15165,9411,-4190,-20421,-30121,-29890,-16816,-18203,-21517,-15441,-30143,22450,3290,-925,-14238,7329,8180,-1607,-8922,10736,-8638,-17438,-6853,32614,-25266,-30035,991,15252,7714,19516,22031,-13129,12488,-1143,-31320,-22172,549,15691,7646,12685,21049,-24272,2954,21662,-1535,-13765,25885,-22376,24715,25998,-26031,-18061,12919,-22011,8446,8372,25018,28101,2722,9492,-5296,5367,-29040,10819,-355,31178,-10138,5754,-25250,-31034,-26776,23449,-31683,1124,19846,29084,-18933,17053,-21954,-4271,-8983,3452,19151,-11377,-303,18534,673,22919,-12222,-16941,3777,13083,31364,29681,-29025,-24126,-17260,-21146,30617,7245,13653,13434,-19397,12386,-4478,-6649,-5715,-10082,-11241,-17657,4000,-9597,8322,-19570,-2681,21797,-28549,13597,5569,23629,11400,-32704,18938,25150,30245,11227,-24854,14601,919,11272,18366,-705,15148,32512,17138,-13815,9938,12207,25634,-21855,-28580,-6430,-29252,10395,16844,-25636,4256,18522,32433,-3675,3708,8167,-1271,1332,26578,29768,-12738,-16590,11905,-28135,18671,-11099,25393,31740,-27542,15685,29709,-13622,27151,27391,24043,-30356,-9704,14780,1902,15820,-566,-6594,4588,-11059,-2098,6999,-25822,12896,-22849,-18709,-24572,18386,29242,-30573,18279,-22428,11399,-26374,-10867,-8014,-9169,7300,8321,2350,25028,-5290,20832,24912,11560,23062,-29779,-726,-22681,31978,7041,-12066,-10256,-24942,-4884,4805,-20731,32706,-6678,22760,-3790,-1164,-17249,-24271,-17841,-7145,1958,-21647,11285,31749,15494,17531,-25474,12911,16209,17703,4240,-4049,2666,-14754,-31191,14720,-24680,13121,24913,-23439,794,-28966,8827,-9925,17548,13465,-13831,27791,17100,22588,25492,-6616,-1728,-16415,24933,20902,-18476,5684,-9203,-26308,-30721,-4750,-6162,-19344,17488,30951,-25851,-12477,-10272,15080,31347,-17133,-17877,-1191,-5083,-13757,-21195,17311,13958,-30850,-9461,-13027,-1853,-4856,-8499,-15059,-5687,17212,-25019,-27277,1963,7695,15299,-20678,22822,19854,22640,10095,-2171,20607,20724,-10592,1724,17951,-9975,3864,9113,17524,-17741,2368,-11142,-11090,-21556,-24639,26656,-22909,24828,-9020,-28946,10308,-10605,-27135,3183,1211,-23275,-5898,12154,10488,-1663,1383,-3510,25275,-26477,6204,-4865,-3241,-23625,-30212,29192,31309,31530,-16146,22089,25188,-7082,-24708,8825,22830,-1067,13824,14107,18387,30053,22197,-11623,30677,-26216,28801,20826,18769,-2902,-30415,-13775,-27430,26622,12480,10043,15761,24823,-9960,7524,17584,-13111,-25980,-22672,-5964,-19610,-3425,15444,-3974,-1361,15858,-18287,-30663,74,-18098,27262,5306,6077,-9610,7569,-319,17535,27486,20780,-16180,15611,-30671,26814,-30404,-20453,13965,-1628,-7582,15667,-17706,28829,-23140,-11065,-6975,13779,9484,11950,23570,-8911,1444,-29333,17334,-30185,-32492,13516,-29866,8111,-11876,-20067,15363,14199,-32658,-14247,-30491,-5310,6253,29028,-23861,15080,30134,1055,-11469,-23697,199,23761,23611,16030,4093,3540,11583,-21702,20244,17073,10446,8592,-6293,-7126,-16568,31102,-24890,-28831,-18547,-10702,17020,-27230,-28491,-17248,10922,24742,29544,-10614,-23483,17481,-27726,-8377,18765,8842,21587,-32485,-14784,-30092,29352,-22526,-29520,14449,20994,-1108,-26484,16013,23279,-29878,24859,-16356,-16815,-17399,-14268,-19458,12542,-11162,-16272,27888,-28858,-21614,844,23533,-8605,-7526,21430,31387,31595,3417,-2261,-5120,-1688,-18917,-19158,23666,-14007,631,16281,-1369,-6649,-4741,-25673,11685,26386,-11703,18679,-26846,25919,164,12852,28146,30636,-10158,20740,-27582,-4022,-10691,-24726,1218,-26478,-7149,5116,-1850,-876,28690,13147,-23945,18402,-17782,9403,-13310,17715,13429,-18416,9230,5152,-5464,-15139,5319,-21131,15088,31276,25439,18891,11881,-3299,26905,2588,9255,-29021,-18302,-14119,30244,26631,8923,-4827,-19115,27415,-14815,-22832,20959,3043,-16615,-21653,-20719,26689,7965,30817,24903,15009,14097,-9065,-9446,-26889,7168,-1636,22674,-2794,28002,1447,-3006,-16245,4830,-20478,10616,-25799,18612,12707,12519,16952,25122,-1647,-6325,-3098,16122,9359,17351,22291,10332,6816,-22319,-17177,20091,21940,3331,9083,5411,-3751,20145,-16943,22001,-23167,-21451,32710,-8327,-26601,7743,-20368,4972,28501,1769,668,30434,28260,11737,26344,-10545,-29941,24816,16164,-10236,10910,29092,8996,18599,18004,-8118,-654,5253,653,-11404,-9938,-31022,32302,24053,25354,26960,6211,-29910,-9521,-14036,-28104,-27606,17054,-24488,-28303,32095,25204,-26754,-21052,-3529,-15910,-17844,-7336,-18572,934,13863,28704,-4966,23967,14336,-12799,29333,31062,-17917,27999,25899,-28538,25832,13099,-22614,-32394,-19891,-16867,880,-23959,24912,22749,-25872,14542,31576,23502,-2362,-24739,-31165,-26266,-9164,-22084,23709,1420,28199,-2072,-18223,18987,-10883,10975,27600,-25811,-4568,-6396,-31116,9233,-863,24846,-28105,26275,16308,21590,12052,9105,23312,-30679,28815,27030,-11053,28162,-15694,-31261,18366,-1770,-3183,31905,-1362,32652,-4616,-28809,23930,-28059,-13453,-16503,14041,-29951,-21707,-28455,23988,24122,-1197,15258,-9771,4727,8465,-5858,-32461,32621,-12159,7263,14958,28219,-9944,458,13176,-7216,23129,-13225,31766,-12793,-1683,-6458,-11410,26351,-4445,-8685,-23700,3247,21680,24690,-2974,-6466,23733,-27418,-21386,22686,-1555,-32524,15363,-31687,-17136,1371,23485,-17030,9720,32493,-18520,-20982,-6038,955,-17149,-16797,27480,11231,6673,-3978,9724,-11626,572,-22623,-15649,20879,-19986,7774,27842,14758,19364,10256,-9159,4199,25375,-23728,1547,-19006,1042,22067,-13627,1553,-29626,2285,11699,2322,-27421,-16891,-7642,-27225,10849,-17461,-27226,-19873,-4332,-20613,10835,31550,-12336,-7398,27630,20614,654,-1514,7052,22484,-559,4861,18778,-20091,6636,8811,-23023,18185,9837,-23633,31576,-28938,-30235,9509,-30847,3991,-23141,-1301,-23701,-21829,-18305,-13480,20258,-14573,-15114,18478,18151,-17382,17390,13350,-14497,19552,-21143,30865,-25553,-14215,19610,-8441,20699,21209,-3953,-31268,-6711,-21923,12596,-26056,-31710,-27560,-15608,28624,-20354,29185,27596,-28744,30381,-17116,-3865,11641,-24030,31532,-21600,-17589,-16111,-23863,12660,8220,17297,-17683,12604,30649,4194,25024,11676,-9826,3031,-9054,-9300,10678,-8357,-17848,6380,21433,3170,-27883,27193,27001,-16191,-10049,12640,-18421,20573,-3648,6495,-25826,-688,21957,-16448,-1357,-5662,-32595,-13152,-10703,26959,-9292,3949,23413,-27830,-16210,29305,32490,26157,26492,12462,13633,25354,-4761,22572,15698,-28670,-26048,-19268,21406,-15447,15619,2732,-29260,1351,-19798,-31881,32665,11029,24551,-15211,-26468,18644,7538,22140,20261,27442,-15558,-32105,-1054,20287,-14834,-7417,24536,-1254,-31331,19700,20654,-21706,16005,26507,25622,25643,1167,17036,30822,-22470,24048,-18203,17443,32124,20935,-28809,32629,4779,-20508,-21391,-23270,7002,-15807,-9351,29549,-7692,-21877,15591,28183,7313,-26112,-12611,23117,4038,26245,25570,31682,-4737,-12483,-28811,-19593,6198,13304,-16842,21068,14131,18762,-22202,-8409,23841,4468,481,20839,116,15861,-30802,-22988,-17670,-14018,16186,-20589,26544,28124,-14155,27489,-3143,30052,-17685,8394,-3369,19924,-28600,-1636,-8252,3286,7548,-19912,-24414,32520,-13922,14935,15688,23443,7858,-19401,26971,25363,-5250,30936,25384,13546,24699,23530,26943,-31964,25824,11880,-11314,14383,24110,-1060,241,32262,-27571,13872,-25860,25992,20100,-14462,-10918,-2975,-1339,2137,-663,-32575,9561,-29438,-14577,17648,26262,19330,-14063,-2496,-24647,-12033,-9191,-10553,-11959,-9431,31625,-16470,-10382,-24447,-20303,22921,15747,3606,-14555,-5262,938,31325,-5935,-26600,-9858,-30389,22696,-2552,2271,10254,-7217,-3812,21965,-4632,24352,-32061,-4731,-24500,-19447,23311,-32246,16062,-28367,31054,-7090,20334,-26601,-1515,4143,-29043,22124,-3473,-22745,19232,30161,25318,-27854,-29599,7160,22085,-4737,4850,-15883,-24903,-32291,9395,-25578,-2686,26820,-6116,-27162,-15200,-6739,-2909,-8361,-29705,-19824,-22199,-10368,-27891,-14200,6486,1660,-14662,11201,-20100,-29630,-4871,-193,-7288,-8062,21614,21613,22725,-14633,24875,21126,15820,-7011,30699,-20322,3713,29227,-20816,6119,10550,15196,26431,-12042,4408,-31190,6278,-6180,11280,-15210,5752,-7151,30987,12335,-13185,-9306,-16279,28866,29486,5306,19040,19687,-3225,-11727,-23765,-19415,-20512,1456,-30336,-32740,-19130,-10973,31348,12801,31559,16739,11786,30975,-19356,32625,-3764,-15495,-27159,-5045,14077,11828,15721,-532,-1467,16570,32277,-28795,16098,-2754,5486,29445,-2414,31629,8533,3312,-21295,-12283,-32414,-18016,-6503,-8067,8256,4690,-3237,-1277,-24069,9301,-11492,29152,8374,19627,20304,32619,-3586,-12467,-31484,19410,-10565,23359,13678,-1278,-23195,-18101,-14651,-1221,24217,17576,-6371,-32506,-20610,20506,28160,10455,-20378,31799,-2851,3781,-18688,-23961,-19353,-14741,31354,2505,-585,-4258,-6838,-23671,-18404,-7098,-6676,-8296,-11235,26632,22033,-20498,14763,23458,-19075,10993,-11152,-5871,4107,-12425,11643,18990,23108,-26838,31383,-15758,-28226,-21287,1044,30497,-26352,25249,9782,7054,-9954,-14372,12783,13515,2966,13814,-28643,265,-2056,-21442,-14664,-13198,-1144,-19310,275,25062,-7076,14855,-969,-31095,22432,-9788,-4809,30965,-19942,23326,6296,-762,-26384,-31673,-1041,-2258,26642,22276,27787,20329,24762,-13271,7193,7094,-17570,4993,-4616,10616,17676,-6681,-13829,-30565,25438,17219,7737,22540,-3142,-7282,19,-3199,14044,15768,-16744,16859,-19452,-12970,-8044,-11274,-20573,16363,5846,18893,-6676,-10228,28616,-12982,19272,8909,-23160,-27399,17662,13134,-10902,-12526,29995,-8023,17138,6032,-30445,27218,-22453,-2121,-28791,-3394,-31055,10992,23,21636,-24795,24987,-21720,-26068,-10416,-30027,7409,25758,17912,-27527,21489,23362,11544,-30257,-17491,-25970,-19385,-32430,31057,-24314,965,-18980,-541,-25864,-8492,-11615,-31705,-1915,6940,-20068,-28263,-6344,-922,28343,18110,6362,28862,5149,-16822,29142,-3480,-534,-31268,1873,4458,-17314,28657,-13101,-6452,-31488,4927,-13737,-30156,22679,-2265,-20018,11561,18987,-32717,-8432,13780,29462,26477,22260,28007,-14031,-29200,-28828,-9434,-4601,27363,-19945,-5275,-18735,20283,-1282,-23821,24504,4372,28312,11993,28812,4500,-9011,-22779,5543,-25924,-20885,25032,-31328,-16208,-13638,15868,-7755,-25037,17435,13311,26260,-6406,-8519,16394,-4917,16244,-1316,-23744,-21024,-15427,-31684,-6649,-12854,-23174,7709,12990,-14371,-27095,32536,-17071,-11650,-636,6238,-5291,-5350,3486,31191,-23961,4090,30840,-31524,9834,21790,14680,-20988,4380,-31636,10604,11858,-31060,3260,6652,1940,22168,-29303,28899,131,-8741,11122,-18259,20629,-2127,32057,-19186,14671,-15467,-10251,24154,3029,-13624,8688,-15854,27853,24089,-11735,9173,-16035,-22819 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice4.tflite b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice4.tflite new file mode 100644 index 0000000..77134e7 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice4.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice4_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice4_golden_int16.csv new file mode 100644 index 0000000..cbfad94 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice4_golden_int16.csv @@ -0,0 +1 @@ +10263,-22645,-22461,-11989,-29358,32214,29316,-12917,4981,25055,-23584,11176,-31175,18294,5021,-5174,-26565,27757,8318,-18559,-27120,-7778,13373,-32250,-3738,16378,-211,-5168,16205,15842,-16280,18056,24074,22281,26550,8855,-10555,29328,16087,12153,19193,8202,8312,-6979,1695,-14283,-17052,-24269,-25533,29598,4603,18914,10109,-13993,14825,12886,-24414,29803,-16860,-23767,-19842,12692,-10420,15578,17337,28766,-22675,27222,17032,-7926,-3938,-5435,-27705,501,-9060,12927,24270,-12000,-29851,-6652,-7334,30685,25181,-12511,4525,-12983,-15701,18006,11138,10464,18569,-23809,-14490,-6660,-21956,-21404,29879,-5078,14950,-8748,-9466,-31865,-21305,-32378,-19231,-28625,-32485,-26339,-24623,20272,19847,30974,31298,29044,-32594,-14914,19557,11131,11714,13584,-27990,-15388,18056,-21412,14871,20829,1846,-12494,-20993,-2121,-27210,23654,18805,22047,4620,27675,-23501,-27084,-26888,19378,-21158,21061,-6543,-1978,-19426,32292,23729,-21042,-11466,5016,5975,-5452,29662,-12923,1438,-22259,2019,-30367,-23817,16828,-18135,20212,16661,-15737,6636,-3202,17031,-15553,7931,12175,14960,13327,-1886,-30237,-8416,4478,-24356,5895,-23013,2901,30907,13463,-24087,9237,26709,12303,31897,-1488,11067,10355,-13784,-6422,16913,12894,12415,12116,12501,-4604,-22208,24983,-13392,27753,16779,-12443,31423,-16448,6659,12907,28378,4856,-13411,-14381,-3603,19244,-24978,-31872,-613,-2240,28676,-32372,-3338,31516,2561,-2531,4117,-20048,30054,-11263,-1048,26839,5083,-15899,-16546,10064,-17387,23797,-25102,26084,27846,-23556,30594,-2939,-3003,-31766,-17415,-12158,19046,22856,9367,27158,-32333,-22541,-12105,28407,-10360,16295,10010,27632,14229,11373,-21461,30992,32402,-14033,-12685,19328,22950,-31456,-22509,20745,6841,-19963,7769,-26911,27401,-15975,27297,2912,12511,29477,6895,22416,29730,23467,20913,7415,30220,23529,-9753,-17521,4043,28953,-24695,-18850,-10402,23255,-14979,5231,-14057,18299,-4979,-17118,11403,12523,16734,17807,17420,5459,-7697,-26493,-27385,-31804,5133,12449,-10746,19344,-17838,-15605,25992,-17299,-20117,18814,13416,-27127,-22354,-7487,-4708,2068,5335,24823,-2298,9530,-13019,-12716,26218,-2415,9337,-15785,11389,-3842,15054,17271,-22259,13113,9378,23419,26917,-1510,2670,-24811,17445,12720,-15782,7422,23014,26811,-15009,17902,9826,19289,16979,-9741,20951,16199,26760,21171,20634,-16905,11773,18612,-23613,-3410,-1797,10835,-24726,-756,-24359,28916,-9039,-21422,-3895,19476,16524,-12422,26952,-21680,30722,2485,-9289,-10914,-22347,30484,-30817,-9759,-3659,31063,6298,-23505,-8033,-31270,-26271,8354,13421,9520,-22852,-22965,16380,22065,-19099,29274,32399,-9908,7371,24657,5961,-32563,-32068,-6853,21963,-2338,857,23499,27246,13814,-31047,-28128,21316,-17133,-16015,-32235,29085,23036,-24284,4467,27452,26681,18865,12522,10686,19028,20242,-14760,-23212,31644,-27682,-7436,-28465,-12130,-3586,-21161,-20653,19954,4144,28306,-23497,15321,-29648,22785,22342,-17419,13884,-8783,15575,8919,20499,17837,16934,-8021,16777,8098,-5054,1069,6279,12727,13037,-26124,-12471,28450,-11166,9890,22990,-9675,28150,-9834,-28283,-1408,-32419,9177,-712,-13266,-3925,-29129,24823,-32035,10966,14550,21048,14926,-13270,-10356,17268,2086,13563,-32011,-30320,8849,-14575,30329,3749,9892,12033,-10983,7986,1063,-14993,-21484,-21124,15678,-4737,20387,15313,5645,10370,6580,-9842,974,-7644,27389,-32682,-27211,13674,7952,5587,2132,-2370,-29254,-15104,-28041,314,10673,20086,11940,-27057,-20077,13990,24913,-31959,29185,28494,-32423,3522,-16522,-10293,14942,-13095,21455,14563,16354,25585,-2611,30831,23144,-25813,-23524,-32644,994,-26674,7946,-18670,25939,-5607,-1498,-1293,22631,-31599,16580,-16060,-8880,11604,-14400,-25261,-25912,-9082,13736,-32663,-22793,-6162,-10389,21407,411,-21587,-8824,-15148,-1543,-9200,-25060,12336,30788,-13449,10317,17470,-23956,-23076,16746,7241,7558,-21570,-13697,-27975,17733,-24678,18972,9109,-24064,-21421,-20656,32257,10774,4165,28138,17096,29094,-12959,24837,19489,8184,24862,-19245,-29064,-15136,23098,29689,-12849,17111,1693,-29048,28616,26009,28180,5370,24481,12129,-8463,19353,-23361,6049,-24428,-25536,13686,32393,-15099,-8540,-23186,-6836,-2722,32434,30256,773,31262,-7947,-26297,22663,6415,26689,-9091,-20886,-13231,28960,18933,-17027,21127,-17603,-23511,8348,26991,-13232,9516,-6970,-29682,29595,-3151,-30359,-17418,16367,4411,7287,-26733,-8477,-5781,-25214,-10946,8450,14166,11348,-11923,18926,-29561,32728,-26120,13232,29983,-28828,20674,-30096,1666,15777,24900,22985,-16740,-30998,20597,13568,-14841,-18029,-7269,-21069,-7663,-15676,-3087,18197,-31752,14621,-24811,5418,-31530,-20298,30733,-22115,-27800,-2532,-7482,29702,29726,11808,-2543,25764,3539,11862,26971,-21420,6282,-5276,5833,1659,13403,-26204,-19530,18956,18302,10315,-27903,-6120,-11644,-14882,18075,-25024,-13646,-6043,-27393,20103,3215,27812,6069,-18463,18887,-31697,10382,-30711,24222,29133,8825,14077,-9649,-23487,-16139,-22429,-10532,-20585,5586,20687,20651,31194,6783,29687,26890,-931,25702,-29493,-25467,3219,1276,10431,-25376,-11970,-5933,13351,31071,-2045,11634,-28337,-15644,24640,-11416,-16962,-8841,14337,21234,13632,-2503,-29550,25438,-18564,8074,24529,-29524,-14082,16447,21218,-16761,-25753,-7037,24388,-22091,23929,-27158,7514,-26660,-1022,-31080,-8444,-16479,-4476,-21524,14087,28152,-18664,-19538,-22445,24049,9467,-4427,-21309,28799,17434,9833,16323,28825,5849,29512,-14760,6058,15017,-29410,4791,-2123,-11530,32597,-7122,17272,22464,-11810,-22897,11551,22832,3614,24166,17596,-13916,8556,-26746,2012,-13668,-13033,19902,1162,6139,-14835,4840,15229,6194,18034,21421,-32147,-25318,-10840,27061,10409,15268,8335,25976,-10597,27817,14315,26803,-10342,-13302,28753,-26300,-25006,-31576,-27033,12066,-21757,27971,-27471,16379,-23113,24515,-12548,3536,-19763,29091,-11751,-14682,-686,12265,-15611,1397,32218,24173,-7329,-5705,-30829,-6601,-3440,5270,3047,11223,13098,-28536,-12201,11032,-7320,-18693,24570,3020,8162,-21289,23083,-2183,-12041,12483,-22903,14405,-12766,28574,24697,-15562,-1819,-1213,8786,10521,22222,22781,15682,-8401,6727,-24046,7800,29270,1852,-22895,12544,30115,6663,13372,18372,2524,11252,-3000,23048,-26364,-12456,19851,27540,-23362,-12606,-9588,-9506,20847,-1655,1596,-16201,18185,21926,10999,16299,-30117,14354,12712,-2929,-5725,-24409,-19309,19275,24881,-29656,-32626,-29607,22024,-22671,32480,13394,-21095,-17691,-30986,-9453,-11532,11029,-10271,-15680,-20615,-17893,-24918,-7118,26680,-26107,-27682,18950,5454,-8960,-19261,19603,5982,-18225,19762,10422,3255,-20015,31006,-27695,-4324,-26795,32249,2922,19995,28472,28655,29653,8302,13947,-28952,3702,-8381,-20081,26418,1335,2723,15321,17553,-8170,-17953,7890,-15510,9343,-30941,4205,-11375,8089,-16579,26742,27293,17877,8577,-2,25173,-20258,-15635,-11824,-27799,32229,26054,-27806,26840,24069,-687,-5099,-25682,1336,18879,-21857,15379,26431,-22170,-9562,-29563,-31098,-1198,-23073,-17016,-16977,-23952,25126,-8487,-9254,-23416,-4328,5920,-15867,19165,30975,22669,-29643,19585,-12772,18889,-9434,-985,23682,5114,7898,-11652,13672,-22591,-32493,25274,-30891,-25647,-31892,-3034,3373,-24703,26709,-4373,24215,23723,-26941,-20309,-31333,21695,15030,-21485,3898,-25868,-24154,-1752,-27739,5759,2492,-4311,-7725,28392,-8989,23327,-8839,-18622,-13956,13400,25584,28652,3978,-28738,-20236,-29479,-29870,-9203,12839,-28775,28047,-16219,1413,13081,28687,-26042,-24867,-26294,30290,26455,31300,900,22292,21197,-12187,14093,-19441,7764,-31659,-3483,-31546,23500,1223,-28982,30977,11231,-536,-26860,-16267,-20641,4635,24779,13646,-18964,1960,31286,-20289,19380,-1848,25062,25415,20902,6559,-913,-23598,28890,-15232,-6138,-7854,-6339,-20296,-8371,-19956,-24678,-32101,326,-2445,-13836,24630,-2549,-25281,-23869,-22577,-15743,10061,-2275,29589,-29714,12554,-11683,30697,25806,24476,-16362,-24195,-23632,17316,25797,-27146,31366,-2374,-21293,32223,-24033,10435,27857,20176,1051,10248,13430,20374,20512,20800,-25807,20332,27100,25702,-20915,16742,-20297,-16633,14859,-18644,705,-14454,31035,-1930,5859,7157,-23103,5881,10743,-32055,8494,12912,26412,-27958,4872,10933,9674,13167,-28452,11390,8111,-24456,28998,3524,-29696,-3161,-7970,-7783,9009,-29734,-22654,-32735,6634,1300,-19926,-22372,14004,18707,-25249,-27608,-28736,31960,9804,32520,18139,-32330,-28893,-8026,3149,-24041,-25251,-561,13258,-23768,6848,21934,-25202,-3616,29928,-14014,-25253,24539,32341,-14388,24112,20788,-23189,-17693,6262,-9436,16790,-30989,27475,26468,26241,-24392,-2809,2420,25298,31283,-27001,-14625,17253,15131,18090,632,15124,-8249,4658,7409,-716,20958,-8157,-28854,-31059,-28957,-21072,-12853,16684,-9272,-15172,-6121,15662,2900,17901,-10149,14344,-10623,3224,14724,-19471,-12328,6327,25169,-24664,27868,31858,2715,-161,21047,18855,-18112,6520,-8822,16452,4061,29883,21734,-18563,-22952,7989,23171,-26530,28427,-23116,-28683,29761,2072,-32503,4697,5160,-23447,-7136,-6070,-7828,-31429,-15577,29637,-22658,1678,26705,-1769,2937,-22806,23546,29033,-825,19457,7860,-6391,-8653,11411,9161,-13413,4311,-27560,18834,-6994,28047,6167,24438,28085,-30515,18536,25844,-29286,18072,-5776,-10364,-8786,-15812,-14819,-25062,-5147,18022,15143,-26169,26301,30056,-18065,30735,-29417,31270,-9390,32423,16658,30829,-10553,-27087,-12230,17743,5450,11529,23862,-32183,-17759,-19166,-26872,22776,-8766,17712,-29271,-21185,-11787,1162,-2776,21748,-30916,7658,-7300,5583,30236,31003,-13445,-2874,-29652,-19586,-12263,-22388,-11839,-12385,2475,1533,-21915,-31342,29365,-20746,21282,14739,-27759,16890,-16018,9893,-7964,20525,-1603,-14292,13567,6325,-9691,17840,27098,12782,-5128,-29433,-31742,15023,21458,-31705,256,5194,16677,-7064,27263,24740,-23095,28514,27672,9619,4975,3932,-10963,32262,23050,21581,-8232,10467,3775,-17416,-32069,7766,-7047,18681,3402,-11913,3342,-7951,-13424,-32538,-8400,-10275,-22782,6487,-24138,-19147,-12053,-7658,3862,-26454,8913,25880,-3132,27585,-311,-27628,-26926,20068,-7011,8305,-1217,18787,9155,-21107,5598,4799,-20964,24187,-17822,-26824,7396,-22104,28545,28941,-23906,-17645,2252,22318,31608,-9516,-32653,23800,7768,31996,-25832,-5961,-25925,21485,-17036,6332,-31101,22802,-28331,15381,-12098,28167,18789,26049,24711,-30379,8419,3556,-7628,9861,2172,-5283,-1312,-1524,14610,17834,-21937,-7338,-17462,-29358,-2354,2762,23013,1969,28824,-21110,-29902,-25408,30303,-15755,-4904,1616,15256,31577,-2287,-21222,-2581,21441,-18443,-15024,-25917,-978,-22190,14184,-17531,-994,15652,3592,26654,19719,21782,-20820,10042,13284,-3048,25084,-19839,-24160,29211,4134,-6652,11074,-13591,15868,-131,-3861,-31295,-32368,-25955,20847,-25904,5169,6481,25259,19336,17484,-31479,-28107,-22549,-17932,-19405,-20775,10584,-4344,-13511,6130,12705,19257,15133,9915,-5220,11590,-13966,8300,23855,-30515,-30565,-7496,16033,-15047,-13322,29085,-9991,9512,28187,-26535,-5920,-23459,-20266,-8078,12526,25903,12075,-32721,-7313,-23433,17862,9263,5043,-11042,-3648,-11107,-20401,12884,24872,28034,29681,-1135,-12007,26962,10387,25470,28379,-29820,13983,-19255,22034,18922,-18509,-24116,15405,-8215,31516,-26685,477,-9562,27477,20473,-953,-17904,-22158,12467,-17468,-30285,-24658,2870,30651,-796,4291,12185,-1031,1516,-8752,-30667,663,-19514,-10330,-14479,24815,-27936,-2480,-29374,-11434,-26085,-6799,32075,-4818,15237,-30112,6503,-25586,2230,-25187,-26028,-2633,-17766,-28974,-2450,9284,-8857,-7269,15893,-28607,-30173,31771,-1006,-16800,6433,-9461,-17353,4213,-6905,5996,4608,-17571,22186,-25101,9464,-2738,-14560,30641,12246,12446,29454,29625,-30549,-11939,-10926,10696,6331,-28796,-7267,-19461,-16647,-2508,-25406,22953,15145,32167,-22816,-8481,-4874,17058,-25272,-22250,-12219,-26778,-7331,-1058,-7174,-26549,5269,1240,23931,13659,25080,22615,10687,205,9275,-5493,20658,-2909,26673,-10737,-9389,4902,-31111,-14391,3939,-14785,-8500,30971,-31313,26142,-28056,20396,-30868,-22608,18983,-24585,17225,-15348,-30660,2059,-27038,27111,22324,-15882,-22891,3745,-19975,-20498,-26543,-2687,-9089,-30084,-9846,-28108,-3683,13392,-23441,9851,19454,14777,-26808,12147,-20624,-27870,32758,32634,1821,-26008,18993,19912,-2574,-11586,-21706,14594,3378,10322,-29767,427,-11462,-19298,28627,6307,28667,15711,18875,-29497,-10798,-25725,8292,26728,20452,-5725,14769,8387,8515,-4039,1996,-14502,26074,32098,-32255,385,195,-30947,25468,-19906,15946,30326,-12026,11831,13120,-14880,-15756,7665,-12395,-9555,15684,-13345,17162,23767,-11482,-16607,8597,3007,-24329,31640,29487,20947,-6000,-16377,-32443,18236,389,-19766,-31087,-12862,2232,-3626,-30193,19619,-11525,5226,21177,-14873,-8762,-17232,26520,-29835,-12096,-5869,8581,-25684,28501,-30451,18300,4394,-4408,-23057,-9298,-21031,-3333,14297,2745,-26373,6715,6268,-11693,28031,-21301,-18866,20073,5866,-20598,-9001,1717,2868,14192,-32254,8307,7549,27513,10566,16622,8016,-15308,-17896,14875,31473,16895,-13801,-23908,32082,27256,-8560,-13441,16224,11484,22496,-13557,19384,31984,14448,-26290,13570,-20569,-5558,21114,-13043,25956,-22692,9899,-24643,831,26673,17865,3410,23625,-6596,15793,2016,5504,19274,17240,-1024,16482,-28847,-27248,21838,23276,-18692,20363,-12424,-748,4179,9492,7336,-24993,-18598,-2288,-24272,-22193,9542,832,21660,-22070,-17776,-24077,21893,-10702,11424,8550,-23475,1598,20039,17695,-21346,9640,-890,-12359,-19367,29444,9024,-29676,-2926,23292,31322,32480,-9520,-15417,32004,-5310,2887,-2532,-28349,3131,-2731,-3350,-16954,-23677,8964,-6508,-27671,14899,15560,22901,-20784,-6601,24419,-19803,4286,-15877,293,-13099,3476,4286,-30003,-12409,-32315,-25079,-1382,-25132,-13038,12854,27185,10999,-23961,8180,-25286,-18594,27880,-15565,-3024,12765,-10112,32253,6915,-23827,24963,-11044,28031,-30211,19352,26108,12786,9832,-4309,-17450,-30558,-27424,23864,-21140,25294,11729,-31221,13543,30491,-32437,-2053,-2821,-26015,9360,20308,-8177,25222,18611,1999,-31577,19548,-23864,30198,-15619,228,-25102,-23634,-8128,-15357,30918,-18371,-12552,9249,23068,23027,7497,23182,-11921,-6553,3788,3932,-1202,-32586,-17833,-29491,15237,26039,19987,6743,-16231,27213,-17359,-21492,-15000,-30867,5302,14902,130,23847,13693,-23725,32570,-13148,9308,-25134,-26599,-23771,-442,-28820,-5507,-3081,4306,-22263,-11894,-9076,-21958,-15465,-1777,6729,-11885,1332,10293,-27625,-151,-10928,-18218,-218,-25444,17320,-2342,9980,-12332,-27457,-5781,-14436,-29540,21262,27940,20343,12391,-25211,-24246,30806,-31038,-17747,6882,9535,-31839,-28406,23842,-8609,-27155,-16362,31652,31722,2919,-8720,-29582,-23201,-5046,-27596,32523,-23556,-10221,31187,-23797,12478,-4391,-29915,-22643,-30629,17672,-3448,-23670,-27370,26159,-5779,6448,-28224,31052,-11661,-26015,-19064,-26638,14606,13139,29540,-5324,2550,4106,-22310,-20987,-22835,23471,-1744,-11997,19567,6224,15075,-16679,31684,9154,-29610,-20223,-5139,-10606,6585,-19955,-17875,11584,-21132,-12807,-31693,-8047,18342,14758,-11068,7369,12711,25984,22794,-27428,-8883,-5328,-26269,10561,25524,-31483,31775,26170,367,-19860,31013,-23948,868,15708,17476,-23387,-14226,6590,826,-8082,21154,-19070,17131,28007,-1031,21435,19439,-23930,23007,24569,-31864,-797,-4130,27339,12775,8840,9188,16960,-3834,26274,-25982,2127,3622,-27300,-25628,-6166,-25841,3272,-1098,27672,21464,7254,-3048,12574,-30108,-32628,16821,-29075,-31185,12651,-28391,26794,-10935,3916,-1272,-11178,12459,-9517,19535,2655,-28240,-31038,5961,18382,-29939,3539,32600,12670,-31930,-17591,29361,-23398,-7904,-25458,29645,20257,14565,16625,32739,-23144,-21496,-3536,-23173,15966,5814,14933,-29005,-19593,17700,-302,-22333,-30847,28079,5000,-7919,28869,-9645,4423,12141,6289,6543,21000,-16580,32511,12596,7403,15265,25501,-2742,31803,-8636,-27228,28954,15850,16725,4293,-25683,-28208,-14379,32054,11203,27250,-22170,-4214,-29285,15616,29403,31845,22994,27844,30652,2662,-4999,11457,22033,-27266,4136,22801,32652,-1119,-11353,-13064,24315,-32683,14421,794,27990,-4453,-16662,-23140,-10497,1399,-8495,-2372,4406,15350,26750,26535,-1448,-29177,19606,-5153,24169,6752,21050,-14024,5288,-1599,21731,24795,-9634,18929,-10242,-21493,14372,-8423,-4597,27103,23938,-17465,-29899,-1838,24250,32752,-23035,-6729,29544,-32222,11036,12227,-18136,6886,-11809,1156,2675,-1712,2160,-14440,-7241,22989,4252,-15744,-18805,-5733,5557,-22244,-9536,-20847,4231,-31999,-3638,13102,-21850,24195,-12128,-294,27403,17248,19514,-29380,-5903,29914,-20973,3822,10919,30281,15419,-25812,-20676,-2245,-4072,-25605,16133,5197,31561,-23610,-24957,18917,-24928,7919,13817,1058,-4551,21173,9729,-7030,-30497,-18079,9556,11591,17711,231,20699,-26682,-17857,-17302,12934,19913,17943,-18901,-2492,11685,-16481,-20384,30198,27093 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice4_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice4_input0_int16.csv new file mode 100644 index 0000000..6730fb4 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice4_input0_int16.csv @@ -0,0 +1 @@ +-31894,-12501,22867,-28890,324,13438,-7496,14389,23846,28873,-10984,11267,-25536,21299,26090,-355,8872,26764,-26875,24187,414,-26120,-22820,-28457,19302,23826,-29045,2578,-5991,20098,21557,-3453,-452,-18059,13032,-7535,-397,-2001,-17747,19547,25657,-27745,5673,20598,25324,-1539,-21010,-30786,-7439,-13862,-22990,-24349,-3441,-20660,-18472,21261,1414,30319,7770,2963,-28841,13472,-21098,24665,2986,31387,-10588,29260,-3068,20724,16028,-31059,-10656,-4804,1829,30759,9111,-21606,10651,24596,-8501,-27809,28567,-6672,22750,26872,-20492,12631,-9019,-3672,-24958,3068,-20042,-22456,-2669,1140,-11890,-29223,28067,19892,5092,-4682,-24890,11651,11876,15297,-13299,-13969,-28011,32283,-31446,-28088,-9425,-15992,2492,1045,-248,23450,9992,-32298,25478,11022,8484,10537,-13103,12320,-10574,8472,518,-27607,-4900,1786,-19053,-26836,-10740,-14890,25925,-30814,-15808,206,7877,-14058,32130,-3434,18601,-19774,-9093,30284,26015,-28487,-17864,-28959,32562,11942,12062,-9771,-10798,-4271,-778,-27645,-14720,8366,-31996,-21455,11072,13801,25921,-6488,-12972,2389,24663,30288,13133,827,12994,-23375,-13303,29400,17131,24263,7282,-4530,4529,-16877,15679,-27761,-8470,-24964,26263,-31188,-2333,22801,7096,28586,-8337,-16656,26543,10863,-29767,16566,13962,16707,-15870,-7999,6969,-19867,-26710,32598,9083,-18472,31665,-14945,-27756,-4468,-30678,-31100,29622,21693,-5013,-24879,22377,4966,-19603,18615,24961,-15580,4194,16424,10056,-6620,15769,6582,-16679,17527,16037,-11753,20341,32162,16077,-30713,-26844,-31309,32302,-9734,12589,-8930,-16888,22393,-30146,27927,-27096,-10169,15851,-28636,26343,-4429,-15476,-29659,-12102,25335,28987,-14920,-22313,12235,19243,19213,-19578,5065,-11104,21623,14909,8557,-13710,7083,-30116,22869,-940,32028,27218,26421,27181,-8143,8946,12455,-9659,8412,8203,-817,5973,-21925,13012,21835,-32350,11959,2291,-15637,-12262,-25778,-22283,-23988,22377,-8797,-29271,19797,5900,-4218,30120,4909,-10044,17468,14188,-16660,32214,15820,-9285,6170,3175,-17482,30816,-3719,12703,-29589,25896,25919,-13150,-14656,-20606,-26751,28030,-6022,29337,-3804,24843,1744,-24119,25192,9111,25916,-25204,25622,-17299,26269,2038,8292,-4171,-24108,10270,32562,-17336,-19181,-29259,-10124,-22286,173,-18600,-30463,5661,-23426,-26379,-25276,11134,24069,5671,21699,-655,4312,32472,7349,-3949,-6747,2122,27876,2499,18860,-7174,13891,-560,-6189,26521,12666,-26043,-14106,-20202,31752,-29619,-27493,-12076,-5790,-31701,-18615,-15914,-24902,25350,17536,23555,20863,-8880,4382,21214,-3753,-31124,-26427,11915,20814,28356,8064,28013,30543,-17135,-32398,17248,-11528,-3949,6293,8244,26083,20182,306,-3901,-17626,-9006,-1706,-4136,30405,-17250,-28511,-10047,-20836,-17331,31319,-14303,27166,4627,19048,30053,26278,-21205,14126,-6141,12711,9492,9839,11491,722,31173,-23917,-9838,13336,10631,-18449,3503,24580,-11438,-4350,-27547,-12370,16508,-7016,284,-7336,20554,-1944,-15804,-6820,-17467,12123,26735,11828,9619,17516,-17267,-5937,27577,30806,20228,-12351,-3971,-29800,-32201,1266,27529,17686,-18351,-26262,13477,-16193,25628,-705,11631,7913,-1920,-24498,-25382,-24107,-22438,-1571,2367,-19244,-26283,-20227,8065,-24087,-8180,-4701,-32274,-30419,-28687,-23823,15148,1384,-2845,-25054,18198,32081,16484,-32559,27703,-4466,-27690,30757,9543,-4836,26278,-2685,-25016,-23908,-5066,5037,-485,6829,-10270,30649,13974,29154,7135,14153,16624,-26232,-1846,20732,21951,5085,-26707,-25042,21009,26722,13762,-23999,-19991,-18657,31681,-27822,18529,-10851,13161,3304,24704,-27818,28368,10216,-16855,18463,10653,-1465,-32670,10329,-18773,5119,-6466,-24891,-1637,-31135,11954,25666,29875,10910,19195,4114,28094,1912,-15942,-23105,-1681,24910,-18064,-14190,-21757,-25618,1051,18012,18651,-28930,1031,-24766,23825,11186,-29318,14033,-30844,-21109,-19291,-6095,28829,19534,20423,15196,-24940,-24054,-24025,4954,-2968,27203,10407,20414,-25218,17966,16932,9590,-24882,-7640,-4638,14271,27094,-31374,5088,-31856,-29450,30660,28899,16413,-31162,9067,25245,-798,24586,23953,-29292,3537,7923,31771,29769,-25413,6720,-2623,15493,7631,10329,30119,29284,26139,11496,-23983,14001,25291,4633,3559,-12788,30942,-32115,-26155,28862,17493,20262,-20676,32224,-4974,-16543,-18791,-3745,19563,27113,-17608,23102,-25354,13233,2609,-26388,23031,-29445,18307,5606,-17245,21062,-13167,-20031,10511,29281,24176,-107,-2239,4676,10272,8063,-19054,-24533,-11922,-30231,-32219,-30382,14087,16695,-6510,4158,-18867,2304,-19303,-23055,17520,-15376,-15016,-18715,-10330,28429,25869,28983,-8091,-25158,5769,29006,18742,1338,19628,2975,29517,-22915,14280,26429,10893,20894,-18000,21807,2678,-11217,31847,29492,-1344,-21475,-31987,-2157,-14112,22909,-14341,-8707,32220,11992,-7856,-20310,-29471,-1783,-30457,19274,-22292,-31883,-18995,-536,10499,10428,-18902,18291,25737,7580,10659,733,-3120,-13707,-10030,17756,30640,21535,-7657,30666,10770,7057,-11054,31874,-15845,10289,3188,-25994,3872,513,2786,12630,-23521,-3331,8121,9907,-11727,-204,-4192,-3114,-14636,-5755,-1809,-19349,-17324,30801,-30325,-6869,25435,16671,2993,-11057,7580,-8290,-6884,-8120,22185,-15444,-937,-11810,8648,-15519,21049,11201,4235,21599,-17534,-3121,6785,27443,-15455,4726,-14669,13119,22543,14159,-17342,-20118,-6470,28306,17871,21196,8626,-3027,-13221,-26352,21241,-26913,-454,7624,-12945,24006,29141,-4653,-2053,-20967,26331,-28451,28240,9823,27410,26299,26993,9229,-31114,-24910,-2547,16340,30852,27355,8139,-15912,27463,-4663,21311,-16335,6468,27709,-14825,2249,14316,626,-1179,26646,-24327,19838,-14329,-30507,-8949,23224,18076,-23328,5420,4009,-15009,-23811,-31020,-24183,17149,-2297,28260,13236,27776,-27056,-24062,12292,-31690,30968,-7251,24922,32348,-20928,-8383,-24111,6773,-9103,-22986,2725,-3421,-28562,-31700,4671,25593,-26136,-13032,26837,-5863,29309,-22012,-20081,5484,-91,-21575,30081,449,32659,-19165,-7781,-22448,31023,-26266,-5727,23673,2485,5370,3101,5511,12815,-24922,17152,9337,26802,-31229,-18501,2951,18456,9459,18584,21033,807,6238,-17607,-6880,-3930,-24502,-24684,-12578,11808,11339,-434,-19689,-26968,-29597,-2988,20177,23404,-32411,17758,-28958,-20212,-27835,3661,26392,31303,24417,26725,-12364,21756,-15279,-28265,10896,15149,23707,14514,11654,-12515,-26504,-305,21317,815,-25430,29490,28158,15016,-7826,-5251,-31156,-26035,10517,22488,-32606,-13985,-10969,-904,-14943,-7795,-24374,-16667,-16693,-4369,-7128,-30020,10509,-10307,10809,19775,-17264,5423,-28643,-26309,1750,9756,-7962,6493,29734,13509,-14755,802,-28123,9178,14306,28430,-22866,2617,-1522,10314,-22205,13608,-14576,-18224,-31600,76,-8851,2510,-10025,29371,3590,-15801,23083,-40,3904,-1144,16135,-10094,20354,15552,19655,-2041,-8631,21452,-19950,-3803,-11556,29962,25284,-16693,-23873,-22453,-22574,14631,25589,-24155,-13790,-17388,6377,-17636,14974,14103,9603,-3438,-18200,22565,-7001,5809,-9630,-29299,6832,21957,-16019,-4594,-28009,17758,-2394,-28180,8292,18,-3696,-3460,24100,5549,-23617,14730,-4658,-31620,-30017,13349,15087,-11741,-22868,-4877,16369,20893,-11789,-31260,27179,15336,16622,19998,4980,-20988,28054,-8921,4530,29823,2165,-23283,-12930,-21264,-5936,26132,-22903,-20853,-29193,-19824,-571,-12561,3902,31214,15101,21995,-15981,4248,-25620,-28776,-24312,-11701,-31234,14685,-2103,-12999,23102,-9623,-18095,19718,7757,22427,23419,-1611,-24173,-9841,19698,-32272,1418,25605,12861,24022,-30605,26247,917,2122,-17545,-7311,-9626,21551,8366,3932,174,-22773,-4537,-17168,-279,-16064,16320,-32663,-5552,2377,-2846,23407,243,-25647,16661,31110,29428,9134,10813,-1709,7898,32756,-22307,-22597,-31852,-2265,-11193,13681,-24135,-27380,27616,-19466,3573,6717,-13636,25125,9813,-8685,-26155,-4199,15639,-25479,-27034,10255,-27496,24286,-12715,11014,22253,30859,-27089,-6525,-15891,28434,-32241,4487,-11856,9,8455,-5810,17559,-14420,-9195,-28913,343,-25209,4594,-22964,-18865,-20624,14173,-5069,-1535,-7262,14549,-19500,-15895,14959,25983,-30838,-5732,22418,4150,10167,30924,31876,26564,-23029,-1650,-15960,-18723,31403,9431,-11010,13926,-10372,22355,16030,-28058,14117,-32014,-19423,17845,1247,14270,14070,18489,-26689,-32500,24205,21039,20414,1882,23089,-19367,-32050,30591,-16224,3044,-14227,31086,-26543,-30902,-27700,20894,-17571,28751,-15801,13324,-26773,18667,-5176,4706,4109,-23831,-16069,-19086,32152,29621,22601,-15250,-19605,5354,-21723,-18837,18521,-2972,-4227,18724,-26128,13653,-9819,-16954,3091,-6029,-27287,-10081,19780,-16591,18504,32594,-20910,-15137,-21204,-26993,8965,7485,-19580,2037,-19318,13646,-19357,-19803,-11343,-24629,-1847,2501,-18777,-6001,-2022,13657,-11644,-31558,-6078,17325,30617,29581,-27771,12410,11284,-13091,-12276,-12432,23090,-19915,25709,-1105,10310,30801,-17603,16670,23241,-29613,13300,-1204,-30305,-7144,17724,24269,-27675,11368,-9310,27855,28130,-29878,18744,20904,-2552,569,21900,-11042,-4651,-10232,4325,-31756,-11355,-23342,-10203,29112,31019,17946,-25656,18631,-26493,-17967,-20678,-6190,-1974,-24288,-22123,-30703,28930,32652,-10597,10086,-32213,4204,-14552,-22971,-14150,18208,-29134,2015,-227,13804,-6618,7671,-26415,-10829,-22528,-18846,28887,7863,10649,-23061,18869,8389,32220,-31666,-24832,643,-26626,-11696,22437,-1426,29635,32246,9381,5453,308,-2265,-13849,-19280,29422,-22411,25209,20617,389,-9921,-31767,23354,13673,-7190,20849,28474,17492,-2783,9393,-26279,2677,-27990,10225,57,2769,8043,-21411,4038,-1960,-7054,-2178,-21505,16705,22349,26724,-30932,12061,15180,-27818,-4520,-28099,9690,8877,-7009,-12878,-13387,21133,-25336,-16741,2151,30898,29537,2205,-21258,-30315,-30141,-5847,-27329,16413,24075,-1651,-6252,-14566,-7241,-10611,-20592,5148,5775,-28197,28730,-28649,7309,-27216,-23061,28933,-11813,-19077,25154,17034,15,-14213,11475,-27436,234,-27530,-9752,-24502,-20763,7242,10351,16494,-18880,-12005,-154,-20520,26439,-23204,5701,-29891,7244,26479,-20233,15869,-17961,-31623,24770,19016,5424,-2960,-2393,25054,9680,26634,-32161,16494,1744,7011,9569,29435,-14964,-20248,-21714,-2429,-7969,5138,-17021,-12290,-32424,28602,-14302,-23475,14942,-20357,-12620,-19331,-5388,-29256,17191,-28627,-20333,-25376,26747,-3578,17127,14244,26738,16107,-10988,19105,-30160,-12501,-2173,-662,3793,-9891,16587,-25614,-17599,5407,24390,-23350,1156,22048,11769,-14819,6536,10277,-32654,13744,-25583,-6864,-22630,-19957,-6862,-9332,-24007,16549,13506,22828,2923,-9650,-20727,-13196,22633,-7641,-8734,8118,30395,26122,10348,-1675,-23421,-26398,30619,12563,-9034,-28173,1983,7737,-28895,-9020,630,19341,5121,7100,10170,-3156,17322,-4156,-18838,-19938,-20080,29629,27327,31371,-20944,-6579,-16775,-32636,-7417,-30120,-30172,-17565,590,12633,-4865,9622,31069,-10971,-32764,-15811,-32444,-24917,-16122,-29912,-25623,13826,12963,-18037,-19814,18218,-7362,-21604,-32216,8850,-8980,15612,3244,4694,23476,-580,13547,-29454,-12536,14877,-21275,-30183,22267,25720,-8478,-4609,-9898,-30987,29996,-10745,-3923,30088,-4497,-30127,-30257,-28328,15963,-340,14494,8367,17130,3668,-30268,-14999,29133,-15513,14717,-1050,27408,-6812,29904,-15019,29639,23141,-22280,-12173,13400,-12756,-2503,11664,-13103,20911,27435,-2687,-30139,7794,-19455,12432,19926,-29896,-17376,2268,-20417,14865,21251,19068,-20151,-6469,11959,30775,3913,-1903,-21860,6433,32539,4032,28476,25838,-15983,17169,-18488,18082,-29735,15035,-8646,-5034,-13354,32736,29679,-9786,15710,15722,-24423,-11196,30804,-9438,-15444,-7738,-22569,-8349,26958,-7800,-1885,4073,6104,-19261,21142,-10449,20882,16616,-2438,-28452,-22441,8399,9350,12968,16744,-23192,26251,-17394,29114,29546,-17103,10145,-17221,10545,24400,-21832,-28428,4488,-17337,-7863,-25467,11007,28795,-24868,19420,-31840,16694,-18232,6692,31937,30430,-3618,1140,1612,17677,-3210,5855,-22836,-24869,-6484,-12352,-10767,-16798,-26540,-19518,11506,-32430,-27503,-30403,17439,17741,-23452,708,-2107,-11245,29181,-32308,-22289,-20496,6255,16183,-12730,29950,-8759,-3079,-12224,9647,28513,4241,-5416,31890,24711,32108,-28012,-25644,-12522,-7704,-1255,28365,-7918,17344,-23717,-1244,23910,6743,18516,-26546,-13942,3919,-20725,25331,-9746,15024,9252,-10469,22896,2763,-27638,-144,17293,-16491,-5727,-31551,-21829,20459,-498,19249,-15280,-30848,-20492,-14913,-30471,-8147,-22125,-20061,-29812,-14999,-12759,14047,-14550,10379,-27559,25794,11801,26474,22708,-31280,1510,11355,19931,17090,-8204,843,-24355,-3252,15051,22133,20005,31735,-12111,13585,-7856,31600,13279,23130,24451,26264,-7307,-6112,18991,-27861,18278,-7761,22487,-14654,4918,27931,-23831,-26085,30807,5054,-2031,22534,11514,-22416,28027,25302,23587,9841,4705,13586,-10692,-4020,19699,5733,19054,25737,-21295,22370,25372,10990,-20744,-12186,-19866,-27209,-30496,24778,29724,15870,-11652,-29034,24040,-32219,-16165,20118,3571,-12543,-25829,28035,20882,18697,31239,-28145,-27265,-16299,-32488,6989,29341,-7695,22746,29510,12106,32575,-16722,-26877,-20801,5437,-30167,11305,-7895,14381,-14931,-19487,-4603,8562,-26431,19632,-12757,-27845,22504,6047,21895,-23578,2530,-6485,8750,27370,-27552,-26935,-9258,-1724,-28705,495,-21664,-32444,-30195,93,-22633,31891,20956,27472,-8764,19029,-1968,-15272,19645,-18084,2302,-30162,-10604,-8784,-13498,-18066,-30326,-10225,22198,17738,10519,-5517,-30991,-24926,-16698,-2894,-29774,5819,8118,-19390,26375,8053,-3922,-2438,24033,-8113,-26746,10603,-25919,1423,-7018,2092,-17968,4374,3320,24049,-12518,4719,16281,11159,-648,26029,-18241,8492,-21693,-20681,3711,-26201,14189,-12646,-18763,5707,-14791,-2733,27702,-30259,-8054,-7080,-10706,-26119,-22377,-24637,11507,4027,7072,1619,-2910,26519,-25286,9555,-9352,2547,26537,-27846,-25055,-13876,24745,-28522,-29942,-12645,16942,-5998,6625,16641,15488,22683,-29279,23348,-1335,-4790,-810,-32228,32463,-3624,28640,-280,-23277,6008,19670,233,-28908,-8899,19593,-5527,-30444,23806,32032,17443,13916,-3196,-14352,16108,-30937,-21139,-6992,-13569,-14261,21383,-30685,-3675,9739,-11142,-18520,-5817,-5733,-2003,15809,-16685,-28590,24803,10407,-14523,10236,-16613,16725,4743,-4620,31824,2936,-5440,-11566,-5823,29185,4572,-10587,-24168,2045,-6361,13221,6599,-12536,10406,-10165,-23961,-11125,-10647,-18865,21634,-6711,6060,10282,-31693,-15972,4216,-2015,-9931,-713,-16990,-30106,7665,26591,-31182,-29334,-8387,22368,26580,-9527,13523,-21582,31879,-2763,-6923,-27777,-30387,2148,-32514,-26829,-22394,-19732,-10957,31886,15044,31303,-23007,-13321,-10156,30163,-25008,-30909,14041,19437,-18218,-12995,-30114,14797,19168,8191,12578,23298,-22915,-24716,18452,-18680,-6874,-17098,-2495,-21268,11770,-32058,27056,7176,3193,-17726,13513,2245,30344,21417,29548,-20458,16352,-32482,-22139,31351,23610,24770,19656,2568,-10524,-30792,-20896,-19898,-19458,-21897,-27764,3553,-12890,-10599,-8477,3196,13531,21725,18988,-473,-15484,27993,29382,-15296,17406,32378,12820,-21699,-30344,-27646,-15387,-752,17514,-17395,-31564,-14413,29303,-343,11721,31658,10448,-23510,-4886,-21870,16482,14481,-2600,-4639,-30996,28127,-1927,-18221,-709,24802,23626,-29656,-11698,3080,14930,31349,-15750,-14381,9872,-12985,6745,-25792,32466,-28236,14390,1559,-16171,-3420,-2599,2991,15804,-24339,-18250,20753,26987,12339,-31501,23639,133,17769,-21093,-7415,30276,-26140,12301,-8424,23597,2100,8297,11518,-30918,20291,23790,21531,-8067,-30226,-25239,-29040,-13438,-10295,-12650,2555,24542,-12145,-15346,13494,-7428,7784,-3438,28885,14282,13300,14388,-4094,-3911,485,-28741,-12929,-26031,24721,8469,2068,-24489,8965,-10377,4912,16923,5825,32121,-11004,-5201,-12506,-21166,-12950,-23743,30827,29783,-11181,18635,7634,-13975,-3982,3335,5763,-5348,15815,19098,1617,-31749,-28939,32662,1557,-24682,21830,-12753,30586,9656,7399,-2923,22489,-21641,5150,9944,32255,27180,-5729,5751,-13867,4387,11224,9288,153,-22361,-19154,-843,1194,-4420,8371,1555,11997,2888,-16875,24054,410,-24654,-5105,-5920,22127,9295,-20890,32138,-28751,10851,24274,-9937,-29555,-9320,-24482,-18929,15333,-25690,-18794,-25824,31337,-32618,-10844,9443,12913,6709,-15939,27199,18832,32340,23181,-5881,-32030,-21706,-13842,-16139,10430,4702,-4074,-1799,7094,7715,25634,2097,30436,10604,6835,-6425,-28758,-29948,-16607,-11963,-101,-17473,12949,31034,-20814,-7587,21649,-10186,463,-16415,30995,4454,-20585,-5493,31813,301,-8773,12820,-18383,-5105,11266,-23202,-10712,13644,-14088,-12437,24399,-16222,-32086,29117,19057,9094,-30450,-28403,5388,4763,-21070,-12477,-634,-16289,-9879,-8227,-32586,-3672,26289,9608,-6086,-32083,26535,-399,-5932,-28582,21783,-31781,-9087,5151,-3998,8343,-4778,31330,-26707,1977,-16136,-19528,28578,23930,20479,9396,-9446,28143,-32653,-28735,-29095,8064,-20863,10911,-24988,19444,-18107,19875,-12841,13631,10263,-22645,-22461,-11989,-29358,32214,29316,-12917,4981,25055,-23584,11176,-31175,18294,5021,-5174,-26565,27757,8318,-18559,-27120,-7778,13373,-32250,-3738,16378,-211,-5168,16205,15842,-16280,18056,24074,22281,26550,8855,-10555,29328,16087,12153,19193,8202,8312,-6979,1695,-14283,-17052,-24269,-25533,29598,4603,18914,10109,-13993,14825,12886,-24414,29803,-16860,-23767,-19842,12692,-10420,15578,17337,28766,-22675,27222,17032,-7926,-3938,-5435,-27705,501,-9060,12927,24270,-12000,-29851,-6652,-7334,30685,25181,-12511,4525,-12983,-15701,18006,11138,10464,18569,-23809,-14490,-6660,-21956,-21404,29879,-5078,14950,-8748,-9466,-31865,-21305,-32378,-19231,-28625,-32485,-26339,-24623,20272,19847,30974,31298,29044,-32594,-14914,19557,11131,11714,13584,-27990,-15388,18056,-21412,14871,20829,1846,-12494,-20993,-2121,-27210,23654,18805,22047,4620,27675,-23501,-27084,-26888,19378,-21158,21061,-6543,-1978,-19426,32292,23729,-21042,-11466,5016,5975,-5452,29662,-12923,1438,-22259,2019,-30367,-23817,16828,-18135,20212,16661,-15737,6636,-3202,17031,-15553,7931,12175,14960,13327,-1886,-30237,-8416,4478,-24356,5895,-23013,2901,30907,13463,-24087,9237,26709,12303,31897,-1488,11067,10355,-13784,-6422,16913,12894,12415,12116,12501,-4604,-22208,24983,-13392,27753,16779,-12443,31423,-16448,6659,12907,28378,4856,-13411,-14381,-3603,19244,-24978,-31872,-613,-2240,28676,-32372,-3338,31516,2561,-2531,4117,-20048,30054,-11263,-1048,26839,5083,-15899,-16546,10064,-17387,23797,-25102,26084,27846,-23556,30594,-2939,-3003,-31766,-17415,-12158,19046,22856,9367,27158,-32333,-22541,-12105,28407,-10360,16295,10010,27632,14229,11373,-21461,30992,32402,-14033,-12685,19328,22950,-31456,-22509,20745,6841,-19963,7769,-26911,27401,-15975,27297,2912,12511,29477,6895,22416,29730,23467,20913,7415,30220,23529,-9753,-17521,4043,28953,-24695,-18850,-10402,23255,-14979,5231,-14057,18299,-4979,-17118,11403,12523,16734,17807,17420,5459,-7697,-26493,-27385,-31804,5133,12449,-10746,19344,-17838,-15605,25992,-17299,-20117,18814,13416,-27127,-22354,-7487,-4708,2068,5335,24823,-2298,9530,-13019,-12716,26218,-2415,9337,-15785,11389,-3842,15054,17271,-22259,13113,9378,23419,26917,-1510,2670,-24811,17445,12720,-15782,7422,23014,26811,-15009,17902,9826,19289,16979,-9741,20951,16199,26760,21171,20634,-16905,11773,18612,-23613,-3410,-1797,10835,-24726,-756,-24359,28916,-9039,-21422,-3895,19476,16524,-12422,26952,-21680,30722,2485,-9289,-10914,-22347,30484,-30817,-9759,-3659,31063,6298,-23505,-8033,-31270,-26271,8354,13421,9520,-22852,-22965,16380,22065,-19099,29274,32399,-9908,7371,24657,5961,-32563,-32068,-6853,21963,-2338,857,23499,27246,13814,-31047,-28128,21316,-17133,-16015,-32235,29085,23036,-24284,4467,27452,26681,18865,12522,10686,19028,20242,-14760,-23212,31644,-27682,-7436,-28465,-12130,-3586,-21161,-20653,19954,4144,28306,-23497,15321,-29648,22785,22342,-17419,13884,-8783,15575,8919,20499,17837,16934,-8021,16777,8098,-5054,1069,6279,12727,13037,-26124,-12471,28450,-11166,9890,22990,-9675,28150,-9834,-28283,-1408,-32419,9177,-712,-13266,-3925,-29129,24823,-32035,10966,14550,21048,14926,-13270,-10356,17268,2086,13563,-32011,-30320,8849,-14575,30329,3749,9892,12033,-10983,7986,1063,-14993,-21484,-21124,15678,-4737,20387,15313,5645,10370,6580,-9842,974,-7644,27389,-32682,-27211,13674,7952,5587,2132,-2370,-29254,-15104,-28041,314,10673,20086,11940,-27057,-20077,13990,24913,-31959,29185,28494,-32423,3522,-16522,-10293,14942,-13095,21455,14563,16354,25585,-2611,30831,23144,-25813,-23524,-32644,994,-26674,7946,-18670,25939,-5607,-1498,-1293,22631,-31599,16580,-16060,-8880,11604,-14400,-25261,-25912,-9082,13736,-32663,-22793,-6162,-10389,21407,411,-21587,-8824,-15148,-1543,-9200,-25060,12336,30788,-13449,10317,17470,-23956,-23076,16746,7241,7558,-21570,-13697,-27975,17733,-24678,18972,9109,-24064,-21421,-20656,32257,10774,4165,28138,17096,29094,-12959,24837,19489,8184,24862,-19245,-29064,-15136,23098,29689,-12849,17111,1693,-29048,28616,26009,28180,5370,24481,12129,-8463,19353,-23361,6049,-24428,-25536,13686,32393,-15099,-8540,-23186,-6836,-2722,32434,30256,773,31262,-7947,-26297,22663,6415,26689,-9091,-20886,-13231,28960,18933,-17027,21127,-17603,-23511,8348,26991,-13232,9516,-6970,-29682,29595,-3151,-30359,-17418,16367,4411,7287,-26733,-8477,-5781,-25214,-10946,8450,14166,11348,-11923,18926,-29561,32728,-26120,13232,29983,-28828,20674,-30096,1666,15777,24900,22985,-16740,-30998,20597,13568,-14841,-18029,-7269,-21069,-7663,-15676,-3087,18197,-31752,14621,-24811,5418,-31530,-20298,30733,-22115,-27800,-2532,-7482,29702,29726,11808,-2543,25764,3539,11862,26971,-21420,6282,-5276,5833,1659,13403,-26204,-19530,18956,18302,10315,-27903,-6120,-11644,-14882,18075,-25024,-13646,-6043,-27393,20103,3215,27812,6069,-18463,18887,-31697,10382,-30711,24222,29133,8825,14077,-9649,-23487,-16139,-22429,-10532,-20585,5586,20687,20651,31194,6783,29687,26890,-931,25702,-29493,-25467,3219,1276,10431,-25376,-11970,-5933,13351,31071,-2045,11634,-28337,-15644,24640,-11416,-16962,-8841,14337,21234,13632,-2503,-29550,25438,-18564,8074,24529,-29524,-14082,16447,21218,-16761,-25753,-7037,24388,-22091,23929,-27158,7514,-26660,-1022,-31080,-8444,-16479,-4476,-21524,14087,28152,-18664,-19538,-22445,24049,9467,-4427,-21309,28799,17434,9833,16323,28825,5849,29512,-14760,6058,15017,-29410,4791,-2123,-11530,32597,-7122,17272,22464,-11810,-22897,11551,22832,3614,24166,17596,-13916,8556,-26746,2012,-13668,-13033,19902,1162,6139,-14835,4840,15229,6194,18034,21421,-32147,-25318,-10840,27061,10409,15268,8335,25976,-10597,27817,14315,26803,-10342,-13302,28753,-26300,-25006,-31576,-27033,12066,-21757,27971,-27471,16379,-23113,24515,-12548,3536,-19763,29091,-11751,-14682,-686,12265,-15611,1397,32218,24173,-7329,-5705,-30829,-6601,-3440,5270,3047,11223,13098,-28536,-12201,11032,-7320,-18693,24570,3020,8162,-21289,23083,-2183,-12041,12483,-22903,14405,-12766,28574,24697,-15562,-1819,-1213,8786,10521,22222,22781,15682,-8401,6727,-24046,7800,29270,1852,-22895,12544,30115,6663,13372,18372,2524,11252,-3000,23048,-26364,-12456,19851,27540,-23362,-12606,-9588,-9506,20847,-1655,1596,-16201,18185,21926,10999,16299,-30117,14354,12712,-2929,-5725,-24409,-19309,19275,24881,-29656,-32626,-29607,22024,-22671,32480,13394,-21095,-17691,-30986,-9453,-11532,11029,-10271,-15680,-20615,-17893,-24918,-7118,26680,-26107,-27682,18950,5454,-8960,-19261,19603,5982,-18225,19762,10422,3255,-20015,31006,-27695,-4324,-26795,32249,2922,19995,28472,28655,29653,8302,13947,-28952,3702,-8381,-20081,26418,1335,2723,15321,17553,-8170,-17953,7890,-15510,9343,-30941,4205,-11375,8089,-16579,26742,27293,17877,8577,-2,25173,-20258,-15635,-11824,-27799,32229,26054,-27806,26840,24069,-687,-5099,-25682,1336,18879,-21857,15379,26431,-22170,-9562,-29563,-31098,-1198,-23073,-17016,-16977,-23952,25126,-8487,-9254,-23416,-4328,5920,-15867,19165,30975,22669,-29643,19585,-12772,18889,-9434,-985,23682,5114,7898,-11652,13672,-22591,-32493,25274,-30891,-25647,-31892,-3034,3373,-24703,26709,-4373,24215,23723,-26941,-20309,-31333,21695,15030,-21485,3898,-25868,-24154,-1752,-27739,5759,2492,-4311,-7725,28392,-8989,23327,-8839,-18622,-13956,13400,25584,28652,3978,-28738,-20236,-29479,-29870,-9203,12839,-28775,28047,-16219,1413,13081,28687,-26042,-24867,-26294,30290,26455,31300,900,22292,21197,-12187,14093,-19441,7764,-31659,-3483,-31546,23500,1223,-28982,30977,11231,-536,-26860,-16267,-20641,4635,24779,13646,-18964,1960,31286,-20289,19380,-1848,25062,25415,20902,6559,-913,-23598,28890,-15232,-6138,-7854,-6339,-20296,-8371,-19956,-24678,-32101,326,-2445,-13836,24630,-2549,-25281,-23869,-22577,-15743,10061,-2275,29589,-29714,12554,-11683,30697,25806,24476,-16362,-24195,-23632,17316,25797,-27146,31366,-2374,-21293,32223,-24033,10435,27857,20176,1051,10248,13430,20374,20512,20800,-25807,20332,27100,25702,-20915,16742,-20297,-16633,14859,-18644,705,-14454,31035,-1930,5859,7157,-23103,5881,10743,-32055,8494,12912,26412,-27958,4872,10933,9674,13167,-28452,11390,8111,-24456,28998,3524,-29696,-3161,-7970,-7783,9009,-29734,-22654,-32735,6634,1300,-19926,-22372,14004,18707,-25249,-27608,-28736,31960,9804,32520,18139,-32330,-28893,-8026,3149,-24041,-25251,-561,13258,-23768,6848,21934,-25202,-3616,29928,-14014,-25253,24539,32341,-14388,24112,20788,-23189,-17693,6262,-9436,16790,-30989,27475,26468,26241,-24392,-2809,2420,25298,31283,-27001,-14625,17253,15131,18090,632,15124,-8249,4658,7409,-716,20958,-8157,-28854,-31059,-28957,-21072,-12853,16684,-9272,-15172,-6121,15662,2900,17901,-10149,14344,-10623,3224,14724,-19471,-12328,6327,25169,-24664,27868,31858,2715,-161,21047,18855,-18112,6520,-8822,16452,4061,29883,21734,-18563,-22952,7989,23171,-26530,28427,-23116,-28683,29761,2072,-32503,4697,5160,-23447,-7136,-6070,-7828,-31429,-15577,29637,-22658,1678,26705,-1769,2937,-22806,23546,29033,-825,19457,7860,-6391,-8653,11411,9161,-13413,4311,-27560,18834,-6994,28047,6167,24438,28085,-30515,18536,25844,-29286,18072,-5776,-10364,-8786,-15812,-14819,-25062,-5147,18022,15143,-26169,26301,30056,-18065,30735,-29417,31270,-9390,32423,16658,30829,-10553,-27087,-12230,17743,5450,11529,23862,-32183,-17759,-19166,-26872,22776,-8766,17712,-29271,-21185,-11787,1162,-2776,21748,-30916,7658,-7300,5583,30236,31003,-13445,-2874,-29652,-19586,-12263,-22388,-11839,-12385,2475,1533,-21915,-31342,29365,-20746,21282,14739,-27759,16890,-16018,9893,-7964,20525,-1603,-14292,13567,6325,-9691,17840,27098,12782,-5128,-29433,-31742,15023,21458,-31705,256,5194,16677,-7064,27263,24740,-23095,28514,27672,9619,4975,3932,-10963,32262,23050,21581,-8232,10467,3775,-17416,-32069,7766,-7047,18681,3402,-11913,3342,-7951,-13424,-32538,-8400,-10275,-22782,6487,-24138,-19147,-12053,-7658,3862,-26454,8913,25880,-3132,27585,-311,-27628,-26926,20068,-7011,8305,-1217,18787,9155,-21107,5598,4799,-20964,24187,-17822,-26824,7396,-22104,28545,28941,-23906,-17645,2252,22318,31608,-9516,-32653,23800,7768,31996,-25832,-5961,-25925,21485,-17036,6332,-31101,22802,-28331,15381,-12098,28167,18789,26049,24711,-30379,8419,3556,-7628,9861,2172,-5283,-1312,-1524,14610,17834,-21937,-7338,-17462,-29358,-2354,2762,23013,1969,28824,-21110,-29902,-25408,30303,-15755,-4904,1616,15256,31577,-2287,-21222,-2581,21441,-18443,-15024,-25917,-978,-22190,14184,-17531,-994,15652,3592,26654,19719,21782,-20820,10042,13284,-3048,25084,-19839,-24160,29211,4134,-6652,11074,-13591,15868,-131,-3861,-31295,-32368,-25955,20847,-25904,5169,6481,25259,19336,17484,-31479,-28107,-22549,-17932,-19405,-20775,10584,-4344,-13511,6130,12705,19257,15133,9915,-5220,11590,-13966,8300,23855,-30515,-30565,-7496,16033,-15047,-13322,29085,-9991,9512,28187,-26535,-5920,-23459,-20266,-8078,12526,25903,12075,-32721,-7313,-23433,17862,9263,5043,-11042,-3648,-11107,-20401,12884,24872,28034,29681,-1135,-12007,26962,10387,25470,28379,-29820,13983,-19255,22034,18922,-18509,-24116,15405,-8215,31516,-26685,477,-9562,27477,20473,-953,-17904,-22158,12467,-17468,-30285,-24658,2870,30651,-796,4291,12185,-1031,1516,-8752,-30667,663,-19514,-10330,-14479,24815,-27936,-2480,-29374,-11434,-26085,-6799,32075,-4818,15237,-30112,6503,-25586,2230,-25187,-26028,-2633,-17766,-28974,-2450,9284,-8857,-7269,15893,-28607,-30173,31771,-1006,-16800,6433,-9461,-17353,4213,-6905,5996,4608,-17571,22186,-25101,9464,-2738,-14560,30641,12246,12446,29454,29625,-30549,-11939,-10926,10696,6331,-28796,-7267,-19461,-16647,-2508,-25406,22953,15145,32167,-22816,-8481,-4874,17058,-25272,-22250,-12219,-26778,-7331,-1058,-7174,-26549,5269,1240,23931,13659,25080,22615,10687,205,9275,-5493,20658,-2909,26673,-10737,-9389,4902,-31111,-14391,3939,-14785,-8500,30971,-31313,26142,-28056,20396,-30868,-22608,18983,-24585,17225,-15348,-30660,2059,-27038,27111,22324,-15882,-22891,3745,-19975,-20498,-26543,-2687,-9089,-30084,-9846,-28108,-3683,13392,-23441,9851,19454,14777,-26808,12147,-20624,-27870,32758,32634,1821,-26008,18993,19912,-2574,-11586,-21706,14594,3378,10322,-29767,427,-11462,-19298,28627,6307,28667,15711,18875,-29497,-10798,-25725,8292,26728,20452,-5725,14769,8387,8515,-4039,1996,-14502,26074,32098,-32255,385,195,-30947,25468,-19906,15946,30326,-12026,11831,13120,-14880,-15756,7665,-12395,-9555,15684,-13345,17162,23767,-11482,-16607,8597,3007,-24329,31640,29487,20947,-6000,-16377,-32443,18236,389,-19766,-31087,-12862,2232,-3626,-30193,19619,-11525,5226,21177,-14873,-8762,-17232,26520,-29835,-12096,-5869,8581,-25684,28501,-30451,18300,4394,-4408,-23057,-9298,-21031,-3333,14297,2745,-26373,6715,6268,-11693,28031,-21301,-18866,20073,5866,-20598,-9001,1717,2868,14192,-32254,8307,7549,27513,10566,16622,8016,-15308,-17896,14875,31473,16895,-13801,-23908,32082,27256,-8560,-13441,16224,11484,22496,-13557,19384,31984,14448,-26290,13570,-20569,-5558,21114,-13043,25956,-22692,9899,-24643,831,26673,17865,3410,23625,-6596,15793,2016,5504,19274,17240,-1024,16482,-28847,-27248,21838,23276,-18692,20363,-12424,-748,4179,9492,7336,-24993,-18598,-2288,-24272,-22193,9542,832,21660,-22070,-17776,-24077,21893,-10702,11424,8550,-23475,1598,20039,17695,-21346,9640,-890,-12359,-19367,29444,9024,-29676,-2926,23292,31322,32480,-9520,-15417,32004,-5310,2887,-2532,-28349,3131,-2731,-3350,-16954,-23677,8964,-6508,-27671,14899,15560,22901,-20784,-6601,24419,-19803,4286,-15877,293,-13099,3476,4286,-30003,-12409,-32315,-25079,-1382,-25132,-13038,12854,27185,10999,-23961,8180,-25286,-18594,27880,-15565,-3024,12765,-10112,32253,6915,-23827,24963,-11044,28031,-30211,19352,26108,12786,9832,-4309,-17450,-30558,-27424,23864,-21140,25294,11729,-31221,13543,30491,-32437,-2053,-2821,-26015,9360,20308,-8177,25222,18611,1999,-31577,19548,-23864,30198,-15619,228,-25102,-23634,-8128,-15357,30918,-18371,-12552,9249,23068,23027,7497,23182,-11921,-6553,3788,3932,-1202,-32586,-17833,-29491,15237,26039,19987,6743,-16231,27213,-17359,-21492,-15000,-30867,5302,14902,130,23847,13693,-23725,32570,-13148,9308,-25134,-26599,-23771,-442,-28820,-5507,-3081,4306,-22263,-11894,-9076,-21958,-15465,-1777,6729,-11885,1332,10293,-27625,-151,-10928,-18218,-218,-25444,17320,-2342,9980,-12332,-27457,-5781,-14436,-29540,21262,27940,20343,12391,-25211,-24246,30806,-31038,-17747,6882,9535,-31839,-28406,23842,-8609,-27155,-16362,31652,31722,2919,-8720,-29582,-23201,-5046,-27596,32523,-23556,-10221,31187,-23797,12478,-4391,-29915,-22643,-30629,17672,-3448,-23670,-27370,26159,-5779,6448,-28224,31052,-11661,-26015,-19064,-26638,14606,13139,29540,-5324,2550,4106,-22310,-20987,-22835,23471,-1744,-11997,19567,6224,15075,-16679,31684,9154,-29610,-20223,-5139,-10606,6585,-19955,-17875,11584,-21132,-12807,-31693,-8047,18342,14758,-11068,7369,12711,25984,22794,-27428,-8883,-5328,-26269,10561,25524,-31483,31775,26170,367,-19860,31013,-23948,868,15708,17476,-23387,-14226,6590,826,-8082,21154,-19070,17131,28007,-1031,21435,19439,-23930,23007,24569,-31864,-797,-4130,27339,12775,8840,9188,16960,-3834,26274,-25982,2127,3622,-27300,-25628,-6166,-25841,3272,-1098,27672,21464,7254,-3048,12574,-30108,-32628,16821,-29075,-31185,12651,-28391,26794,-10935,3916,-1272,-11178,12459,-9517,19535,2655,-28240,-31038,5961,18382,-29939,3539,32600,12670,-31930,-17591,29361,-23398,-7904,-25458,29645,20257,14565,16625,32739,-23144,-21496,-3536,-23173,15966,5814,14933,-29005,-19593,17700,-302,-22333,-30847,28079,5000,-7919,28869,-9645,4423,12141,6289,6543,21000,-16580,32511,12596,7403,15265,25501,-2742,31803,-8636,-27228,28954,15850,16725,4293,-25683,-28208,-14379,32054,11203,27250,-22170,-4214,-29285,15616,29403,31845,22994,27844,30652,2662,-4999,11457,22033,-27266,4136,22801,32652,-1119,-11353,-13064,24315,-32683,14421,794,27990,-4453,-16662,-23140,-10497,1399,-8495,-2372,4406,15350,26750,26535,-1448,-29177,19606,-5153,24169,6752,21050,-14024,5288,-1599,21731,24795,-9634,18929,-10242,-21493,14372,-8423,-4597,27103,23938,-17465,-29899,-1838,24250,32752,-23035,-6729,29544,-32222,11036,12227,-18136,6886,-11809,1156,2675,-1712,2160,-14440,-7241,22989,4252,-15744,-18805,-5733,5557,-22244,-9536,-20847,4231,-31999,-3638,13102,-21850,24195,-12128,-294,27403,17248,19514,-29380,-5903,29914,-20973,3822,10919,30281,15419,-25812,-20676,-2245,-4072,-25605,16133,5197,31561,-23610,-24957,18917,-24928,7919,13817,1058,-4551,21173,9729,-7030,-30497,-18079,9556,11591,17711,231,20699,-26682,-17857,-17302,12934,19913,17943,-18901,-2492,11685,-16481,-20384,30198,27093 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice5.tflite b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice5.tflite new file mode 100644 index 0000000..fdef43a Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice5.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice5_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice5_golden_int16.csv new file mode 100644 index 0000000..0379c82 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice5_golden_int16.csv @@ -0,0 +1 @@ +30327,-32525,15603,13533,-26203,149,-3502,15108,-14364,22614,-18271,-7467,-17595,-26240,-28968,-11951,15806,-9391,-30420,-4776,32706,-31594,20545,-29681,-14582,20216,31762,-11479,-12755,1427,-10266,-2957,-18827,10766,-21304,4083,-15749,13286,7961,-16483,31817,-22357,28539,32708,-2367,-15301,-21378,-8705,-16312,19395,20538,8119,31094,31585,-24363,-9772,6669,22856,12416,-74,6444,-31749,-9141,-16659,-20552,-15100,9931,23196,-24706,25318,6933,-17017,16017,22866,-6041,32057,-4026,-17338,-11308,-13201,-26591,-18561,29878,-10617,22616,18659,-15318,26306,19555,18490,16360,-24897,19683,-10520,20566,-4010,-16981,-26389,-12948,18537,-19080,1374,-11274,27377,-16987,27925,-1088,29100,-18908,-28772,-21030,8623,-21329,7573,-10982,-2736,16675,8005,7429,-18456,6461,25156,-6455,-9665,-4516,-14383,-5221,-30507,-32352,-5339,27034,-12931,-12276,28725,-12227,21883,30195,20583,-21694,-24495,-22252,-18396,19178,-17681,28395,-876,24952,-4732,-4043,-6453,-21930,-2492,-29247,-7877,-7623,24982,20738,-9770,6341,24190,31100,18550,-7213,-1892,26808,13699,-17287,21604,17480,-31924,16741,-24276,-25434,-18647,-26680,-19837,22955,-8644,24453,-30305,-23223,30406,2943,-14600,-1437,-20071,21579,-17687,-23250,-2466,23292,25314,-18153,-21097,-12371,15931,5157,18186,-31170,-13977,7806,1934,20268,-18248,3081,-6376,-29887,15016,-7595,-32255,6362,-25600,-15893,-19533,-5283,2829,-9748,-21197,17745,-10089,6985,-19194,-21880,14790,6746,4179,29465,-30410,8781,-23592,-21590,-23144,7188,9209,27840,6909,5569,14170,30089,3429,-25452,31152,28542,20862,-1715,16468,-17556,11795,26254,-10863,-9431,-8312,1609,16801,-5074,13237,9941,-29320,20416,-24983,8158,-4152,284,-11769,27365,10007,-19902,-20122,-20810,23886,-11500,26390,2975,9893,11947,9112,-7735,7714,19243,-5038,-4285,-17589,16577,20243,-30535,-20160,-11604,-5445,-24762,22722,-609,2451,-20950,-5240,-26090,12321,-3891,-6322,1544,10487,-10032,396,17930,-23688,27383,1400,-337,18247,25061,15006,5682,24364,9737,-12909,13944,3329,2233,8252,-13988,9240,12153,31306,12364,-7713,26838,1392,-3734,8956,10177,-2298,-27539,-21654,-1776,14645,4339,24169,11705,27677,5820,23666,17940,651,-13729,-1716,-20722,-9982,30406,-12011,-30849,-13605,-26036,32121,-12075,6307,-7251,13695,-4253,4490,3151,15271,-20257,-26738,-24325,2598,-14845,8248,-22260,-6370,-4642,29060,29802,18201,-30940,30887,8542,10015,-13725,-16752,-28895,27109,9052,-528,26102,-8382,16520,-30831,-31871,9775,-27197,-16131,1540,3323,11318,-13727,20990,-29993,-31967,13283,1549,-26278,-23479,-28823,30819,3883,-1235,-14701,26325,-8085,-1037,24919,-17233,11545,-4064,22554,-7386,9327,12903,-2457,19339,-11710,24114,-25271,4458,31849,-32254,28117,-7422,-7332,-21639,-188,-13944,-9732,-17734,4339,20794,-31881,-25049,-7805,5095,29792,-28640,8028,-27562,11463,962,-12481,-19139,-18130,15422,30457,1046,-30162,26306,-19325,-31481,1536,-7823,28958,18846,19346,32287,-31625,4707,22587,-13475,1691,13541,-12388,9421,4295,-26561,-16722,26274,26077,6793,31641,13229,3023,7935,19006,-10598,-12593,16068,-28736,32018,-5982,16322,12841,28440,-24228,22893,16617,10666,-10548,-9151,-21325,27468,10943,21431,4060,-2688,-4191,-3282,9106,-17240,7654,22015,-23175,1520,-9382,26952,25641,3961,1638,-16071,-1603,20041,4341,10196,23412,-29044,18872,25667,28354,22557,7758,-25894,9532,-24367,-6349,-5262,27905,6222,-921,-11969,-25273,-24023,-21030,5077,-9868,7051,-6836,-19242,-23716,9260,10332,9327,1093,10645,27868,-9245,1105,-16844,-19411,-16688,-19837,22309,8234,23487,-13123,17910,25023,10388,25206,12379,-18819,-30939,17874,13636,-10227,6164,-13708,7536,-20332,-30576,13936,24626,-31814,-19195,12942,-24439,5861,15102,-26221,-21247,5906,-26048,-16041,-313,-6890,-12704,-19435,-32224,20631,12600,1452,-13759,-22724,28892,4038,-11845,-29659,6704,26068,-6084,-1066,-26987,13721,8478,-2539,19555,-15225,16198,-6658,-20955,9367,-27958,-27717,-30520,-25932,15377,22588,3743,11995,1376,-24596,-3129,-22546,-15992,25474,18554,22637,12278,15112,4859,31357,-21234,29326,-30836,-6926,-6882,16298,-15462,5324,-21048,12633,-32125,-24560,-11258,31429,11797,17262,5516,-7109,-83,-26043,-24798,18903,14377,-31237,-15437,20610,-13227,7512,28492,23253,15569,22973,23157,27098,9340,-6342,-28144,-12773,1216,-27945,-5148,-24556,-26643,32681,-12993,12314,32045,-16985,-24085,-920,-13978,13979,24494,-26024,-20381,-15290,-6053,9314,-25442,24135,4817,5757,8510,14399,26332,61,12542,4816,-17562,-30370,19358,-772,-4909,-13237,-30604,14076,115,12826,-4823,-10653,-30051,2013,-3658,23410,-19522,5436,10461,7747,17741,-1489,-20414,16728,-18503,-25127,-16575,-17863,-29196,32408,-31245,9260,2177,-23542,-24392,-13618,-6419,-21660,-21498,-21718,13807,1701,-27152,20442,-10758,29812,6510,4176,25439,16919,-2172,-2196,21185,-23985,10668,-20076,5329,10437,-4300,14430,6182,-26660,-29862,-6709,12496,4526,11647,12001,-30993,-22029,-22237,20178,-22271,3949,-25609,15138,31626,18181,-9847,24310,9925,-1976,-3785,-6009,20791,1594,-2647,10917,-31362,-28315,20943,32274,-30931,4247,25742,2980,30832,-14105,-25088,18561,14499,1259,-7140,-32672,-31886,-19954,29309,25905,9121,7087,-9042,17718,11234,11980,21899,6535,21179,-22968,7632,-22050,22472,12407,-19994,-10585,11713,-25042,17663,22551,16686,8884,31019,-32021,-9848,-23713,351,20140,3187,20034,25403,19733,-15244,-351,20151,11572,-14659,-25994,20351,20451,5598,-2731,9140,-22971,-11103,-21580,15349,14339,15423,-28573,10793,13584,-9534,15832,-22933,-15951,23489,-927,-12701,4742,31249,440,-32119,74,10774,4719,7766,13994,-24294,1347,27480,-10480,-19626,14359,-3331,-3431,-26053,1701,-17067,1591,-10124,14265,16365,-5442,-21180,-29583,3896,-8219,14370,8743,31051,29247,-31955,24078,-9641,2296,27722,6370,-19335,3964,-29036,14692,-11965,2566,-19185,-23725,-3668,-26162,14066,-29571,19765,17720,31355,-28518,8069,-17094,-3865,16538,-23747,2203,311,30143,805,21871,-18338,-32652,-30202,10604,31723,-16357,28615,11641,-6606,23903,-22158,31869,-27944,-27571,31586,16806,-18702,2388,4212,30176,4593,-3305,-6623,5798,-2758,-17323,-3362,-13587,-30359,29094,-14053,-13591,8282,24313,28325,-26256,-598,-23313,-14151,11930,-4806,-2346,-26983,-2635,30923,-12814,-2831,-3128,19268,28409,3887,17949,-28233,18247,24860,-2852,-6190,-5786,-14192,-8338,18875,-11858,-32480,9184,12046,30946,-9854,8188,-10106,111,30112,-12251,-32325,9510,16184,20537,6540,32051,-7202,21021,661,-7801,-22905,-1133,-15184,2472,17578,-27622,4287,-6069,-2410,-18685,42,-27781,-22528,-30928,28279,-20260,18391,30363,20848,4910,32736,19840,23230,4368,-30129,-16521,19724,17384,-23568,8136,1487,-6655,-22576,-16523,-2791,-20904,24042,20272,8020,-24183,16033,-27017,-705,-21481,-18271,19283,13225,-9811,-1913,-9557,9508,-24219,-3782,14295,22114,-29476,5131,-18458,17291,24003,-24667,19387,-10573,-24673,26783,-15390,15092,17816,8999,-4889,872,-16602,-9449,10538,17195,2361,7827,-19910,18977,22608,-8002,3484,4320,-12867,-691,-25013,5379,8616,-30968,1836,31796,15446,32675,11239,27776,5082,30937,26334,762,-249,-9334,-9686,28512,688,25429,-18593,5822,-15633,23301,19267,27614,6861,-27513,29130,-16946,2183,19336,-11831,-26620,10877,-11342,-26327,-4535,17922,-9918,-27304,1415,11991,-24642,16286,-15663,-9643,26885,-8277,19534,6057,30170,2050,-6180,5264,3,23155,-10330,17945,-28553,-20187,14927,25466,3437,18302,18322,132,-25239,1500,-9037,-30436,-2265,-984,26403,-15731,26550,14003,-6726,18774,-31791,9731,17188,-10150,-3404,-18811,-7212,-31400,7593,2085,12712,28834,8900,-25373,13203,-26117,-30824,-18043,-27710,30737,-32229,-32521,21349,28697,13353,10244,5529,-23763,10900,24016,12643,32071,-16535,5525,-20801,-20043,2475,19451,264,-10507,-15548,-1446,10076,-24424,13096,-19908,24953,12085,-7208,17118,27192,-30913,-30795,5651,11989,-748,-24289,-8869,-15162,23166,-21324,32390,-23628,13680,24445,-10500,-15118,-12947,29811,12015,-23658,-5945,-12095,-21017,-1153,-9103,18183,9520,-26608,-27540,14773,28100,-26924,-3002,-28840,26528,19058,-1343,878,-11883,-20607,-4067,-30515,-12285,18294,-3690,30287,31111,30433,-7983,-3926,-7776,18952,-7917,26780,-8546,2948,27457,3030,25275,9194,26206,-31042,12295,-14445,26395,32157,12376,17553,-14098,16778,23602,30355,5003,23763,26081,-8230,31,4122,-8049,25112,7020,-28458,32366,-14006,-7050,-31062,6386,-17100,-2374,-10753,-18414,-26087,-12073,-32322,13050,-14329,10875,-4394,-19028,-29676,17330,-13375,-18443,-19376,7356,28672,986,17344,14543,27205,-24194,24118,852,-8516,-23609,-4480,16284,-14666,-25764,16902,18251,28586,21633,-16627,20804,27558,10678,-15538,-17917,-19192,16908,3718,-6310,-20340,9823,18131,-3384,-28729,16902,-5675,-10160,-3036,20725,-29252,-2155,-11201,28810,-14743,-3249,23596,-28063,-23197,21609,-32364,-1745,-8071,-32324,32215,-6424,-13901,17032,-16947,3213,-2024,4013,7530,-25565,28901,-20546,-16926,20811,24313,-9345,29512,15706,17814,11649,24738,14541,-16737,1694,27943,4912,3422,-10857,17656,315,-1229,445,-25747,24395,-7339,-2187,15117,10917,-16892,-32532,-25433,3212,22457,13783,25717,-133,17231,-1889,-1348,-16386,27877,-169,-29099,5123,-4937,3518,6304,11185,-3560,-19126,28719,23395,-16360,4052,-29717,31711,-8326,-5718,5313,11389,-21033,-19260,-8567,2440,-24707,967,21890,24856,799,-819,-5502,31557,-27036,-29003,-21561,-11774,-20641,-4240,-2077,-273,-15103,20357,-19687,-4674,16517,22662,-23908,25511,-26002,16921,8721,7194,10198,-18002,12999,-8185,20331,9302,32392,5535,9849,-13462,-14757,-2718,30381,14391,27984,-18942,-15097,4798,-16438,-31310,-37,10637,-18034,-6117,32552,-31503,-1859,-30830,18098,-16703,-1970,-13205,-22124,16915,20546,4954,16495,-12199,-26703,12508,-9879,-26709,-26399,-32199,31681,5962,-7357,-8205,-18179,-1614,22011,8534,-5742,23409,-26000,-1258,-31431,-16354,31262,12030,6174,2016,-6720,-28513,-14699,17369,27845,7247,14591,-7552,-9775,2442,23256,11455,-31774,-17504,20526,29520,-3601,30562,-22286,-3741,-10812,-14074,29912,-7419,15001,23791,-2446,20815,28281,-15863,-29569,-3733,-25831,-17008,-27571,22497,15711,-8536,10823,-18181,-976,12368,-10983,-22217,-32446,-23237,-4163,-15024,-18238,9498,19384,-7370,4864,13547,31318,16597,-7393,24199,4477,15644,-27626,-9050,-11796,-13001,-11653,13490,14095,-1056,6269,-19531,-26602,32693,-19881,27342,21644,-6673,-16215,-17796,8170,-11023,2518,1821,-6074,-23283,30021,-28762,24627,-26354,3759,15879,23824,-9868,5817,-6683,26109,17267,26247,19615,16390,7523,19011,16219,27588,26971,22465,-14139,17141,-30933,21427,18270,-5820,10880,-25162,7377,20245,-1913,-31737,-13409,24807,19554,12218,-23185,-23203,-21805,16785,-10875,18159,-2871,1648,11706,-18696,15521,-675,-26781,-19834,3636,1462,-24411,1502,7060,21085,-31982,-4388,28444,423,-30469,31526,2461,28179,-16478,-17553,-15537,1957,-31730,-1488,-292,-22206,26453,22319,10646,29779,-12134,-18519,-17479,11840,-833,-24212,9610,883,-164,-26028,-14149,21932,13306,-19100,26828,31232,-1989,-11548,-29253,26679,-28138,-17557,-25044,-14404,27604,27248,-13359,-21356,6086,-18003,24306,32578,-757,-15341,20260,-18514,-16050,-21786,-2570,-30531,-21195,-23630,13910,-11417,27837,19636,-26121,1265,-13222,7996,32517,26624,-22326,-32576,-3375,8536,26629,-4020,-20159,29243,11708,26705,-9010,3073,20761,-6021,2583,23048,-19746,-200,30982,-6442,15624,7170,17630,1205,9595,6976,-30974,16220,19961,-85,29737,31765,3968,23671,-17398,-15766,-10902,7786,-1780,5556,-22891,3852,30474,-15158,21750,18515,7107,-17636,-28701,-14078,-22988,-6689,-5966,12392,16113,-14265,-30713,-11911,-21070,-15554,7353,6894,234,18426,-8416,-31980,-2007,26908,22203,18288,-13827,-8903,2348,1275,18962,18351,19164,17873,1450,-22567,-18079,-16933,-28834,4870,23137,15445,18718,22807,14181,-4607,-19611,11191,-3276,-17096,2713,-10927,26325,28416,-116,16032,20856,-698,-17947,-28463,-8006,21372,-24749,30015,-391,-25482,3008,-4057,-29484,-8582,-31824,-13205,20234,18786,-31780,27357,-24147,-32488,-11334,-26506,326,13449,-16382,-13255,-25014,4421,-14600,775,10361,-3663,18828,-12528,-26143,16106,-28418,-16836,2913,11583,-8018,19035,32734,-15362,-11268,-15721,19137,-8638,-15421,-23514,11936,-29233,-2618,30563,-29697,21344,19394,2484,-10333,20925,-25760,-27999,5226,-18154,-26211,-8487,-23913,12597,-32562,12122,-22913,-30581,20416,-10894,977,-26781,26291,-7377,29932,-27124,19608,-5551,9976,-9169,16998,-8900,-26371,28887,29411,-15141,-30686,-11927,-31099,13723,-7142,3721,-1716,-13297,-17857,-6048,8478,-14532,5421,14963,-12892,1560,-12333,15222,-7146,-29827,-237,23158,-28038,30815,12444,-20455,18795,-15336,3590,20960,31362,-27796,-9359,1125,23616,-20644,4603,31061,-25832,-25246,-22495,32719,15216,31332,-9265,17034,-14788,12700,26895,-27928,28024,-10939,26169,12134,17333,30820,-1381,-14512,27684,-13164,32680,-19512,22822,14495,17159,24783,-17479,17327,2947,-6361,-5406,5035,22630,8923,-22292,-32306,24526,29639,24534,19156,20104,-6396,21107,27899,-3286,-21524,-17780,15803,-22595,-23780,24965,25771,19916,19110,4997,26115,-10130,8437,25252,-8112,1025,-26681,-13803,15786,-29049,-23932,2075,-15612,13965,16672,-10985,25197,-24165,-28270,-7761,-30085,19842,3549,-22555,27065,15402,1383,21549,-18631,7813,17327,29352,10337,5658,-11436,-17430,9335,-2446,-21405,-30737,-24217,26938,-474,557,-27027,-6508,4675,21091,-32123,29485,-10082,-5613,19273,-19914,-31179,-20355,13740,20370,3148,6363,26238,-26480,17464,3831,-28777,-3533,16807,28551,-29762,-3533,28837,-8248,11382,27028,23575,736,20995,-25074,-14018,-1905,-32657,3053,-25929,-18665,-29067,-3450,-13615,16852,5541,-25078,-7958,17749,-10947,23119,12022,27411,15002,-23427,-94,13338,13054,-14310,13681,23397,30337,-23221,-17006,7282,7244,20379,4869,25552,9848,-18634,6930,26216,-9603,9555,17968,16362,-17421,-28809,13750,-8102,-18998,2901,18025,13111,30705,-31685,32582,30872,-27495,29618,21567,-20703,27346,-24830,31939,20467,25384,-10496,-29283,-30985,20898,-23130,30834,-16432,-23764,-22377,6008,4130,-4091,-26624,2258,-30398,-794,8453,4964,-1293,-12771,22682,27622,-12044,18600,-29615,-765,-1827,31695,-21036,-6119,-20760,-12642,-21913,-24129,25997,-31454,31478,-6830,10501,-22978,16618,-9614,-17083,-32017,30287,29632,16866,6012,647,-23994,26658,-2288,30473,-7056,32151,20551,12193,-11263,19842,17685,31335,-32191,-24434,3428,28050,9334,19208,-2845,22878,28223,27877,-27588,8428,17095,6896,6461,-10606,-9075,-1933,-12326,15825,-24247,-17500,23906,19579,2300,13906,1187,-24151,14664,15594,-7590,29415,-24506,-5388,-25655,-7215,9023,-20969,-15040,-19615,2223,-2334,18817,-12555,7582,-235,9960,-1025,-16707,-20747,-26369,1473,9356,30148,-28305,-28036,-22122,-32022,-7093,-27253,27376,26317,27576,-27961,-1922,-8510,-4264,15951,18293,10173,-27712,27401,-3548,-24324,-22055,22618,8783,-23736,18880,-20418,25263,-27003,5713,-29186,-13353,-15974,8557,18978,-6298,11188,-28501,20629,21472,17296,6666,28694,1435,15503,-12669,-19961,-19054,-8583,26327,5860,19202,28256,-22145,30939,-3690,24526,-4274,15015,-8479,12603,8369,-20219,12545,-874,-12092,-21740,24713,-3907,20689,18685,21773,-17002,264,-1371,-30753,20293,3888,-5455,-6465,-4316,-10163,19275,-4212,16819,-3639,4964,-15768,-28608,516,17508,7389,-23626,28997,-3733,-14835,-18285,26874,32114,-19526,31167,-15745,-1723,30260,-8740,-11211,28563,10622,-1613,3801,-19995,-5666,22560,-26019,-11313,-8650,-24043,-31300,-20639,-23979,-9404,-2889,5712,31076,23646,23684,-12618,-3953,-13156,-31317,1184,20511,-7710,4965,11113,-18107,17410,-3752,5435,-13873,-802,-15233,21110,1488,-31842,4598,-16873,-31098,-22878,-28032,-22415,29956,7761,-8444,-26551,-19503,2788,14735,20829,-6063,-31025,6921,25745,2236,25186,28279,3763,23041,-30559,6730,-23450,5372,-2423,12772,6123,-18944,25435,9561,-31913,12639,31623,-32129,12739,-13930,16675,-13731,15458,13132,-22720,27103,-6644,16348,-905,-929,29288,5106,-28866,-3628,4734,14749,26525,29173,29740,9887,-26758,17646,-4811,24237,-1366,-28554,30715,-26010,31416,31330,7482,28634,29756,31589,-15656,-27916,15416,-203,-29475,20302,-5501,29506,-4297,10169,19060,-3429,8871,19051,-22167,20540,-24608,-4669,9617,-3559,-3760,14460,13181,-10776,-21037,-6658,-27221,15085,3908,17959,-16172,17770,610,-15256,16397,-27359,-29821,196,19742,-13627,-31507,-16366,-28432,-2604,-3809,-3670,20676,3431,-8815,-9448,17104,28416,10415,20093,3300,-5372,23156,12720,18222,30891,-27074,10526,11577,-6022,-7449,4811,-5660,25069,-8756,15870,24228,18760,-12171,1018,13893,19379,-17218,9407,7294,9661,504,25634,-25732,-3465,27899,31774,31525,16287,5283,-22157,-2356,23139,-1846,-22103,-26304,29029,27425,-15869,401,10003,-5836,-30776,12247,-25598,-26758,-18985,14010,17536,-4420,4114,13934,4380,-23854,9505,28303,29954,-17898,25713,-22930,-20317,-8383,17370,-23924,23133,28603,7483,14844,-22680,-6057,25400,9447,-30790,2172,7724,-31403,-11710,24695,-5563,20930,12244,-16467,24016,31184,5738,3827,2950,21125,13176,24427,2960,-26719,-12855,23831,27602,-5376,-4840,-10009,-7986,-15167,-20957,-30843 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice5_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice5_input0_int16.csv new file mode 100644 index 0000000..e390bdb --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice5_input0_int16.csv @@ -0,0 +1 @@ +4933,-12224,32557,18113,9128,-1096,-24029,27567,30078,-29175,6850,-8192,-18397,13324,-32317,-7016,9861,-23709,12712,-31125,10397,11143,19505,-18753,-30927,-4148,-21607,-3766,7981,15084,13934,17672,25776,-26746,9376,-3262,-14492,-17759,32239,-15486,-30705,23508,-2595,-5197,-7342,-12979,29462,-2778,-30653,11308,-18952,29021,-11225,28788,16129,-20017,-25746,-22427,7814,30088,17611,16762,-1616,-20654,-7715,-16244,-1566,-1922,7457,11654,-13882,12253,11266,11649,-17261,-4382,-27552,-10350,-15240,-27422,-13363,-3567,1625,23219,16875,-10525,30283,-14492,-8761,-15782,17893,-23502,20343,17375,15795,17822,-24576,-7694,17985,27573,22253,-1975,-27151,19335,-19883,18382,826,23497,13269,-27800,-20003,-3864,23012,15980,-28829,-27596,-13790,-12208,-32696,-23342,6096,-3410,-31968,21850,-16224,18995,12917,25455,-26289,28155,-27054,-13890,-18371,19426,19173,-5444,-23988,-4149,-15539,28048,-2288,-612,-9551,17250,-29101,23750,-4552,18210,24546,18199,31827,14935,3037,-10262,2719,3697,13092,6741,18064,-28200,-21890,4446,-19398,15736,10947,1397,-1856,27426,-10173,-9198,5824,-13713,31744,27988,-10223,5878,7076,-7753,8608,-17537,28845,-732,-24287,-1447,-32328,14029,26561,6989,8088,-20202,-31708,-18338,24581,5448,14219,5994,22292,30226,22460,17882,2388,-26432,27315,-5975,15633,18911,1869,31265,-21230,-17529,-4506,-3406,19672,32237,14676,-29361,-7359,-12813,4703,14267,32393,-19884,-25121,-9846,-13240,10323,346,-6444,-24285,-23917,9482,25328,-18944,-7126,18060,17799,-28497,14362,27157,-1413,19847,22944,-10096,-21247,-4067,27154,-31839,-17352,27528,7984,10934,-19003,-17594,-10503,-24411,16652,-8658,4210,15049,-25408,-13671,7620,-27351,-14971,-3269,22254,-6208,23456,-26257,-20804,29993,-7882,17520,-5630,4103,-22501,-5818,-28156,-28532,12857,-30817,-28254,7925,-12640,-19807,3240,-15256,-21496,17954,29075,30962,24651,30492,10754,19773,30534,301,1564,17117,-10443,6091,16538,21617,-27015,11691,-9790,10180,-30855,-31018,31797,-30506,-3648,-8901,-13446,-18522,32756,-32356,-25535,13125,14928,20259,2960,-31499,7840,28740,-19111,-19208,10323,-22524,-3568,20688,-18235,1750,6366,-29075,-23353,-10223,23839,-22356,-10403,21201,24665,14749,20709,8513,-13743,23332,-23240,-26238,284,-3137,3212,-3765,3031,-20566,21364,-9108,29530,19714,-21550,28275,6244,-24401,16579,22066,25500,-12793,-2692,-20105,23847,22717,15622,28411,21529,27925,23890,-11818,-26537,29414,-6515,-28674,-6715,-21294,26103,-23651,-17561,4159,-2610,2965,4410,-1798,-21869,23133,15727,-20500,25996,-25881,5543,-988,10768,-1344,23931,11755,-13463,16114,-22318,30921,-32331,-25857,-29966,-4247,27685,21418,30190,-27532,-14038,-15032,-31247,28304,-31414,-12065,-20674,6037,21955,5746,-15313,-1699,-3500,-3609,-15708,576,-9064,-15586,2376,23310,-5105,23840,12319,14925,12503,-10384,31055,-19791,-26813,-20076,-18353,22573,22050,-28335,22877,9679,27151,1684,-834,27407,20053,14138,3930,376,2840,-19952,-31927,-22010,-8823,-24857,17394,-25032,-6758,14619,-6412,-7222,23603,-6484,3592,28197,29895,-17882,-24182,-8240,25392,-5811,32136,-9479,-18228,-27799,-30667,11173,-29495,-27010,23323,-32021,-4716,-4210,-19252,-23330,9924,-14644,-30309,16296,-7036,19234,7633,6786,-22437,5712,-17319,-12536,6994,9863,8966,791,-25921,-10905,8469,-9468,-3614,-19256,30208,-31868,-6942,29582,7999,-25756,4072,-14996,5065,5204,4487,7529,-9023,-13787,-17426,-14508,-9289,32666,1268,-1547,17784,-26564,11088,-29900,20047,22971,16402,-27,-7343,15663,-29992,20205,32208,-21133,29214,-18164,24092,-19521,28454,-7307,-4095,-21924,-80,-29607,32643,28314,-16110,8225,24507,-13637,-18067,9485,7109,32133,14630,-2693,12261,5967,-21028,-12289,31197,-20820,-28359,-30833,26524,19743,22064,7033,30741,-6748,13574,-20617,-14020,-12556,-8541,-23564,-24088,16039,-9315,-29606,8981,-31499,4307,-2450,22119,23229,-32059,28251,16729,31510,18539,-29873,-22627,-10886,29643,-15871,12910,20745,2561,4932,28821,16980,-15069,11096,-24434,-20578,-29780,-18790,-15635,16500,12400,12036,-15558,-8564,10942,-1773,-18809,25981,30647,-13185,-7439,5002,15419,4419,-2010,-15624,22089,-4861,-10885,13976,-23257,1391,-25919,-27592,6887,30421,-29721,32450,19491,5662,-6734,-23969,-25438,17425,6744,13754,21317,-7493,-30156,-20232,-20000,-3717,17535,23186,-16047,4830,27773,2789,18245,24380,-6524,-8321,-3318,-29120,19099,17643,943,4015,16471,-17233,28669,-27072,-7813,22048,15898,-16762,-6037,-28197,6618,-17001,-4794,-13907,20461,-21215,-6438,25916,-18227,-23258,-31364,3938,-30799,5283,-15070,32243,-19758,-25219,-24912,-30598,-17042,6919,22515,-3438,-17473,-989,-19172,12847,-11504,26400,-30647,20045,-21710,32609,30181,-4505,-27352,2181,-5448,-15116,26801,16803,1830,32496,29088,20806,-1577,-20750,19737,-32171,-29954,30461,-15589,624,31427,-13612,-6023,27112,6884,-14114,2460,19355,-21842,17907,-6648,24341,-1824,-8670,-13349,27116,28722,-20459,-6412,26685,4197,-28056,23292,28769,5226,22738,-18036,3443,-20534,11204,-31182,10441,-31081,6704,-4824,9891,12763,30840,-2088,15823,-18850,-21099,-15159,-11340,20332,-23707,-15636,-21958,12776,-24814,-23657,-3012,-4106,-12545,28769,-7966,6026,-14538,27850,-10666,-9383,24157,28684,29077,-14657,-15787,15524,-17253,-2442,12342,-20598,23215,29108,25354,-14191,-26723,-14034,1923,-20910,-22363,15019,-18923,-4807,-4606,-2746,-28750,-20809,20084,2701,-3809,-20536,20072,-24515,10563,-24863,19446,4522,16943,-26254,21570,-3543,9284,17002,10496,-25301,15891,-10632,1578,27052,-19559,9436,3129,-13406,-28659,3903,267,-17915,-30880,-24676,481,28686,5513,-27538,-8778,20285,-10228,-654,29310,26705,15298,-26115,18730,19266,-4490,1686,28447,32361,-29065,-14059,-12199,-5928,-29161,15402,-560,-10578,31932,-13614,-21628,-750,28223,21674,-16683,4632,-7375,-15201,-271,2570,-1103,-7017,-24472,-15833,11719,-21896,27796,-25938,1479,2567,-7644,27561,-7155,19304,20900,6170,19285,14035,-31746,-7263,-32617,-9536,12908,-26096,25261,-32151,19616,-10427,6295,7575,-12571,18262,-20291,-14348,-28239,25303,-28371,-18045,13033,24397,31536,22941,18047,27407,26102,4958,4030,6479,-11419,25787,21777,16878,-22920,751,27014,11845,-6642,-8039,-1554,24758,16842,28681,24932,16444,-15584,10144,27746,-29154,-20909,-10080,-26354,-25503,14512,24183,14919,-1351,-21144,-23930,20111,-10593,-10879,-12830,-28409,31974,-5162,28787,-15980,22962,24441,5717,17138,-4696,-15378,15128,27626,3464,16650,-9805,-24948,19049,16793,-17530,-31996,-25964,22751,32392,25964,29111,19126,8996,-27722,-23042,-13411,-27380,31837,-25784,11570,23685,27619,-19661,-6445,-27075,22289,22633,-28346,-6562,-23021,-13069,18244,30895,-18569,25772,2001,21027,562,-8584,-5402,8207,-18183,-3517,-23326,-25410,6309,-26215,16601,-23536,27065,-16445,-16340,-8383,-28658,-5957,5102,24452,-20368,-10787,26736,-28770,-1261,19468,-32634,3979,-6124,-24975,-11213,-25585,23113,-3380,-5162,14582,387,-12835,2658,-7521,22397,-19633,23372,17722,-9982,19389,28571,-21464,13008,-21330,-18595,7456,29161,26996,19145,28130,-24475,-19825,-9856,26182,14532,27115,-4836,-28894,-27668,4867,10950,-2296,-2607,-27762,7402,-16088,24163,9154,-28853,-32119,-7066,15596,18931,-32291,25288,15415,-9416,-7287,-5019,28045,5050,8491,19162,11682,-31815,-9769,-19712,-19959,-20759,-2902,-9117,-10847,1346,-25483,20812,-27214,-11791,32605,5298,-16197,-17991,25602,23439,-17376,6892,-32020,31867,-10929,1253,-1012,5872,-480,15836,-19597,26398,-29376,-14196,25652,21629,24348,32519,17854,17938,24854,27642,-17882,11656,-31473,-27194,-25147,8002,-15477,-22093,-8818,2888,-29857,13216,12060,-2264,15856,-7199,-32287,-15355,-9761,13606,-4557,22845,-30787,13793,-7323,-24094,-27137,-31103,-3513,-10347,-240,-16389,25241,24522,10088,12018,-31892,-25581,-26518,-8529,-2638,-21910,31297,16400,-11385,-18864,-27456,30311,-17908,-4086,-16791,-1496,31000,-23978,18243,3898,-18900,26104,-32501,27255,-17709,-14087,-10314,29488,9787,-27146,9689,21025,13518,-22694,-27639,-23626,-1392,15290,113,-7717,23929,32054,13211,26840,21448,14447,-12302,-5842,-10400,20454,-6490,-25293,-5492,947,32562,-8681,9920,26332,-27318,-25614,8153,-28142,15777,30579,30581,-30804,2128,31596,919,26741,-12492,30594,4054,19512,-32675,24607,20440,-23014,-4069,15798,24901,30589,8936,-12833,25891,-26193,1716,715,-27613,-11481,17196,32560,8390,1978,-12570,-25632,6478,-14867,12832,-28050,-9828,-12287,-17441,-30287,5912,1387,4297,21313,-25919,15213,-10275,-23534,17061,-8673,5941,-21229,4585,-18563,-30696,-4802,-18518,9155,1702,-27534,-29756,10932,-32509,-11770,14710,28044,-23414,15466,6555,-15735,14149,-9736,-13658,30327,-32525,15603,13533,-26203,149,-3502,15108,-14364,22614,-18271,-7467,-17595,-26240,-28968,-11951,15806,-9391,-30420,-4776,32706,-31594,20545,-29681,-14582,20216,31762,-11479,-12755,1427,-10266,-2957,-18827,10766,-21304,4083,-15749,13286,7961,-16483,31817,-22357,28539,32708,-2367,-15301,-21378,-8705,-16312,19395,20538,8119,31094,31585,-24363,-9772,6669,22856,12416,-74,6444,-31749,-9141,-16659,-20552,-15100,9931,23196,-24706,25318,6933,-17017,16017,22866,-6041,32057,-4026,-17338,-11308,-13201,-26591,-18561,29878,-10617,22616,18659,-15318,26306,19555,18490,16360,-24897,19683,-10520,20566,-4010,-16981,-26389,-12948,18537,-19080,1374,-11274,27377,-16987,27925,-1088,29100,-18908,-28772,-21030,8623,-21329,7573,-10982,-2736,16675,8005,7429,-18456,6461,25156,-6455,-9665,-4516,-14383,-5221,-30507,-32352,-5339,27034,-12931,-12276,28725,-12227,21883,30195,20583,-21694,-24495,-22252,-18396,19178,-17681,28395,-876,24952,-4732,-4043,-6453,-21930,-2492,-29247,-7877,-7623,24982,20738,-9770,6341,24190,31100,18550,-7213,-1892,26808,13699,-17287,21604,17480,-31924,16741,-24276,-25434,-18647,-26680,-19837,22955,-8644,24453,-30305,-23223,30406,2943,-14600,-1437,-20071,21579,-17687,-23250,-2466,23292,25314,-18153,-21097,-12371,15931,5157,18186,-31170,-13977,7806,1934,20268,-18248,3081,-6376,-29887,15016,-7595,-32255,6362,-25600,-15893,-19533,-5283,2829,-9748,-21197,17745,-10089,6985,-19194,-21880,14790,6746,4179,29465,-30410,8781,-23592,-21590,-23144,7188,9209,27840,6909,5569,14170,30089,3429,-25452,31152,28542,20862,-1715,16468,-17556,11795,26254,-10863,-9431,-8312,1609,16801,-5074,13237,9941,-29320,20416,-24983,8158,-4152,284,-11769,27365,10007,-19902,-20122,-20810,23886,-11500,26390,2975,9893,11947,9112,-7735,7714,19243,-5038,-4285,-17589,16577,20243,-30535,-20160,-11604,-5445,-24762,22722,-609,2451,-20950,-5240,-26090,12321,-3891,-6322,1544,10487,-10032,396,17930,-23688,27383,1400,-337,18247,25061,15006,5682,24364,9737,-12909,13944,3329,2233,8252,-13988,9240,12153,31306,12364,-7713,26838,1392,-3734,8956,10177,-2298,-27539,-21654,-1776,14645,4339,24169,11705,27677,5820,23666,17940,651,-13729,-1716,-20722,-9982,30406,-12011,-30849,-13605,-26036,32121,-12075,6307,-7251,13695,-4253,4490,3151,15271,-20257,-26738,-24325,2598,-14845,8248,-22260,-6370,-4642,29060,29802,18201,-30940,30887,8542,10015,-13725,-16752,-28895,27109,9052,-528,26102,-8382,16520,-30831,-31871,9775,-27197,-16131,1540,3323,11318,-13727,20990,-29993,-31967,13283,1549,-26278,-23479,-28823,30819,3883,-1235,-14701,26325,-8085,-1037,24919,-17233,11545,-4064,22554,-7386,9327,12903,-2457,19339,-11710,24114,-25271,4458,31849,-32254,28117,-7422,-7332,-21639,-188,-13944,-9732,-17734,4339,20794,-31881,-25049,-7805,5095,29792,-28640,8028,-27562,11463,962,-12481,-19139,-18130,15422,30457,1046,-30162,26306,-19325,-31481,1536,-7823,28958,18846,19346,32287,-31625,4707,22587,-13475,1691,13541,-12388,9421,4295,-26561,-16722,26274,26077,6793,31641,13229,3023,7935,19006,-10598,-12593,16068,-28736,32018,-5982,16322,12841,28440,-24228,22893,16617,10666,-10548,-9151,-21325,27468,10943,21431,4060,-2688,-4191,-3282,9106,-17240,7654,22015,-23175,1520,-9382,26952,25641,3961,1638,-16071,-1603,20041,4341,10196,23412,-29044,18872,25667,28354,22557,7758,-25894,9532,-24367,-6349,-5262,27905,6222,-921,-11969,-25273,-24023,-21030,5077,-9868,7051,-6836,-19242,-23716,9260,10332,9327,1093,10645,27868,-9245,1105,-16844,-19411,-16688,-19837,22309,8234,23487,-13123,17910,25023,10388,25206,12379,-18819,-30939,17874,13636,-10227,6164,-13708,7536,-20332,-30576,13936,24626,-31814,-19195,12942,-24439,5861,15102,-26221,-21247,5906,-26048,-16041,-313,-6890,-12704,-19435,-32224,20631,12600,1452,-13759,-22724,28892,4038,-11845,-29659,6704,26068,-6084,-1066,-26987,13721,8478,-2539,19555,-15225,16198,-6658,-20955,9367,-27958,-27717,-30520,-25932,15377,22588,3743,11995,1376,-24596,-3129,-22546,-15992,25474,18554,22637,12278,15112,4859,31357,-21234,29326,-30836,-6926,-6882,16298,-15462,5324,-21048,12633,-32125,-24560,-11258,31429,11797,17262,5516,-7109,-83,-26043,-24798,18903,14377,-31237,-15437,20610,-13227,7512,28492,23253,15569,22973,23157,27098,9340,-6342,-28144,-12773,1216,-27945,-5148,-24556,-26643,32681,-12993,12314,32045,-16985,-24085,-920,-13978,13979,24494,-26024,-20381,-15290,-6053,9314,-25442,24135,4817,5757,8510,14399,26332,61,12542,4816,-17562,-30370,19358,-772,-4909,-13237,-30604,14076,115,12826,-4823,-10653,-30051,2013,-3658,23410,-19522,5436,10461,7747,17741,-1489,-20414,16728,-18503,-25127,-16575,-17863,-29196,32408,-31245,9260,2177,-23542,-24392,-13618,-6419,-21660,-21498,-21718,13807,1701,-27152,20442,-10758,29812,6510,4176,25439,16919,-2172,-2196,21185,-23985,10668,-20076,5329,10437,-4300,14430,6182,-26660,-29862,-6709,12496,4526,11647,12001,-30993,-22029,-22237,20178,-22271,3949,-25609,15138,31626,18181,-9847,24310,9925,-1976,-3785,-6009,20791,1594,-2647,10917,-31362,-28315,20943,32274,-30931,4247,25742,2980,30832,-14105,-25088,18561,14499,1259,-7140,-32672,-31886,-19954,29309,25905,9121,7087,-9042,17718,11234,11980,21899,6535,21179,-22968,7632,-22050,22472,12407,-19994,-10585,11713,-25042,17663,22551,16686,8884,31019,-32021,-9848,-23713,351,20140,3187,20034,25403,19733,-15244,-351,20151,11572,-14659,-25994,20351,20451,5598,-2731,9140,-22971,-11103,-21580,15349,14339,15423,-28573,10793,13584,-9534,15832,-22933,-15951,23489,-927,-12701,4742,31249,440,-32119,74,10774,4719,7766,13994,-24294,1347,27480,-10480,-19626,14359,-3331,-3431,-26053,1701,-17067,1591,-10124,14265,16365,-5442,-21180,-29583,3896,-8219,14370,8743,31051,29247,-31955,24078,-9641,2296,27722,6370,-19335,3964,-29036,14692,-11965,2566,-19185,-23725,-3668,-26162,14066,-29571,19765,17720,31355,-28518,8069,-17094,-3865,16538,-23747,2203,311,30143,805,21871,-18338,-32652,-30202,10604,31723,-16357,28615,11641,-6606,23903,-22158,31869,-27944,-27571,31586,16806,-18702,2388,4212,30176,4593,-3305,-6623,5798,-2758,-17323,-3362,-13587,-30359,29094,-14053,-13591,8282,24313,28325,-26256,-598,-23313,-14151,11930,-4806,-2346,-26983,-2635,30923,-12814,-2831,-3128,19268,28409,3887,17949,-28233,18247,24860,-2852,-6190,-5786,-14192,-8338,18875,-11858,-32480,9184,12046,30946,-9854,8188,-10106,111,30112,-12251,-32325,9510,16184,20537,6540,32051,-7202,21021,661,-7801,-22905,-1133,-15184,2472,17578,-27622,4287,-6069,-2410,-18685,42,-27781,-22528,-30928,28279,-20260,18391,30363,20848,4910,32736,19840,23230,4368,-30129,-16521,19724,17384,-23568,8136,1487,-6655,-22576,-16523,-2791,-20904,24042,20272,8020,-24183,16033,-27017,-705,-21481,-18271,19283,13225,-9811,-1913,-9557,9508,-24219,-3782,14295,22114,-29476,5131,-18458,17291,24003,-24667,19387,-10573,-24673,26783,-15390,15092,17816,8999,-4889,872,-16602,-9449,10538,17195,2361,7827,-19910,18977,22608,-8002,3484,4320,-12867,-691,-25013,5379,8616,-30968,1836,31796,15446,32675,11239,27776,5082,30937,26334,762,-249,-9334,-9686,28512,688,25429,-18593,5822,-15633,23301,19267,27614,6861,-27513,29130,-16946,2183,19336,-11831,-26620,10877,-11342,-26327,-4535,17922,-9918,-27304,1415,11991,-24642,16286,-15663,-9643,26885,-8277,19534,6057,30170,2050,-6180,5264,3,23155,-10330,17945,-28553,-20187,14927,25466,3437,18302,18322,132,-25239,1500,-9037,-30436,-2265,-984,26403,-15731,26550,14003,-6726,18774,-31791,9731,17188,-10150,-3404,-18811,-7212,-31400,7593,2085,12712,28834,8900,-25373,13203,-26117,-30824,-18043,-27710,30737,-32229,-32521,21349,28697,13353,10244,5529,-23763,10900,24016,12643,32071,-16535,5525,-20801,-20043,2475,19451,264,-10507,-15548,-1446,10076,-24424,13096,-19908,24953,12085,-7208,17118,27192,-30913,-30795,5651,11989,-748,-24289,-8869,-15162,23166,-21324,32390,-23628,13680,24445,-10500,-15118,-12947,29811,12015,-23658,-5945,-12095,-21017,-1153,-9103,18183,9520,-26608,-27540,14773,28100,-26924,-3002,-28840,26528,19058,-1343,878,-11883,-20607,-4067,-30515,-12285,18294,-3690,30287,31111,30433,-7983,-3926,-7776,18952,-7917,26780,-8546,2948,27457,3030,25275,9194,26206,-31042,12295,-14445,26395,32157,12376,17553,-14098,16778,23602,30355,5003,23763,26081,-8230,31,4122,-8049,25112,7020,-28458,32366,-14006,-7050,-31062,6386,-17100,-2374,-10753,-18414,-26087,-12073,-32322,13050,-14329,10875,-4394,-19028,-29676,17330,-13375,-18443,-19376,7356,28672,986,17344,14543,27205,-24194,24118,852,-8516,-23609,-4480,16284,-14666,-25764,16902,18251,28586,21633,-16627,20804,27558,10678,-15538,-17917,-19192,16908,3718,-6310,-20340,9823,18131,-3384,-28729,16902,-5675,-10160,-3036,20725,-29252,-2155,-11201,28810,-14743,-3249,23596,-28063,-23197,21609,-32364,-1745,-8071,-32324,32215,-6424,-13901,17032,-16947,3213,-2024,4013,7530,-25565,28901,-20546,-16926,20811,24313,-9345,29512,15706,17814,11649,24738,14541,-16737,1694,27943,4912,3422,-10857,17656,315,-1229,445,-25747,24395,-7339,-2187,15117,10917,-16892,-32532,-25433,3212,22457,13783,25717,-133,17231,-1889,-1348,-16386,27877,-169,-29099,5123,-4937,3518,6304,11185,-3560,-19126,28719,23395,-16360,4052,-29717,31711,-8326,-5718,5313,11389,-21033,-19260,-8567,2440,-24707,967,21890,24856,799,-819,-5502,31557,-27036,-29003,-21561,-11774,-20641,-4240,-2077,-273,-15103,20357,-19687,-4674,16517,22662,-23908,25511,-26002,16921,8721,7194,10198,-18002,12999,-8185,20331,9302,32392,5535,9849,-13462,-14757,-2718,30381,14391,27984,-18942,-15097,4798,-16438,-31310,-37,10637,-18034,-6117,32552,-31503,-1859,-30830,18098,-16703,-1970,-13205,-22124,16915,20546,4954,16495,-12199,-26703,12508,-9879,-26709,-26399,-32199,31681,5962,-7357,-8205,-18179,-1614,22011,8534,-5742,23409,-26000,-1258,-31431,-16354,31262,12030,6174,2016,-6720,-28513,-14699,17369,27845,7247,14591,-7552,-9775,2442,23256,11455,-31774,-17504,20526,29520,-3601,30562,-22286,-3741,-10812,-14074,29912,-7419,15001,23791,-2446,20815,28281,-15863,-29569,-3733,-25831,-17008,-27571,22497,15711,-8536,10823,-18181,-976,12368,-10983,-22217,-32446,-23237,-4163,-15024,-18238,9498,19384,-7370,4864,13547,31318,16597,-7393,24199,4477,15644,-27626,-9050,-11796,-13001,-11653,13490,14095,-1056,6269,-19531,-26602,32693,-19881,27342,21644,-6673,-16215,-17796,8170,-11023,2518,1821,-6074,-23283,30021,-28762,24627,-26354,3759,15879,23824,-9868,5817,-6683,26109,17267,26247,19615,16390,7523,19011,16219,27588,26971,22465,-14139,17141,-30933,21427,18270,-5820,10880,-25162,7377,20245,-1913,-31737,-13409,24807,19554,12218,-23185,-23203,-21805,16785,-10875,18159,-2871,1648,11706,-18696,15521,-675,-26781,-19834,3636,1462,-24411,1502,7060,21085,-31982,-4388,28444,423,-30469,31526,2461,28179,-16478,-17553,-15537,1957,-31730,-1488,-292,-22206,26453,22319,10646,29779,-12134,-18519,-17479,11840,-833,-24212,9610,883,-164,-26028,-14149,21932,13306,-19100,26828,31232,-1989,-11548,-29253,26679,-28138,-17557,-25044,-14404,27604,27248,-13359,-21356,6086,-18003,24306,32578,-757,-15341,20260,-18514,-16050,-21786,-2570,-30531,-21195,-23630,13910,-11417,27837,19636,-26121,1265,-13222,7996,32517,26624,-22326,-32576,-3375,8536,26629,-4020,-20159,29243,11708,26705,-9010,3073,20761,-6021,2583,23048,-19746,-200,30982,-6442,15624,7170,17630,1205,9595,6976,-30974,16220,19961,-85,29737,31765,3968,23671,-17398,-15766,-10902,7786,-1780,5556,-22891,3852,30474,-15158,21750,18515,7107,-17636,-28701,-14078,-22988,-6689,-5966,12392,16113,-14265,-30713,-11911,-21070,-15554,7353,6894,234,18426,-8416,-31980,-2007,26908,22203,18288,-13827,-8903,2348,1275,18962,18351,19164,17873,1450,-22567,-18079,-16933,-28834,4870,23137,15445,18718,22807,14181,-4607,-19611,11191,-3276,-17096,2713,-10927,26325,28416,-116,16032,20856,-698,-17947,-28463,-8006,21372,-24749,30015,-391,-25482,3008,-4057,-29484,-8582,-31824,-13205,20234,18786,-31780,27357,-24147,-32488,-11334,-26506,326,13449,-16382,-13255,-25014,4421,-14600,775,10361,-3663,18828,-12528,-26143,16106,-28418,-16836,2913,11583,-8018,19035,32734,-15362,-11268,-15721,19137,-8638,-15421,-23514,11936,-29233,-2618,30563,-29697,21344,19394,2484,-10333,20925,-25760,-27999,5226,-18154,-26211,-8487,-23913,12597,-32562,12122,-22913,-30581,20416,-10894,977,-26781,26291,-7377,29932,-27124,19608,-5551,9976,-9169,16998,-8900,-26371,28887,29411,-15141,-30686,-11927,-31099,13723,-7142,3721,-1716,-13297,-17857,-6048,8478,-14532,5421,14963,-12892,1560,-12333,15222,-7146,-29827,-237,23158,-28038,30815,12444,-20455,18795,-15336,3590,20960,31362,-27796,-9359,1125,23616,-20644,4603,31061,-25832,-25246,-22495,32719,15216,31332,-9265,17034,-14788,12700,26895,-27928,28024,-10939,26169,12134,17333,30820,-1381,-14512,27684,-13164,32680,-19512,22822,14495,17159,24783,-17479,17327,2947,-6361,-5406,5035,22630,8923,-22292,-32306,24526,29639,24534,19156,20104,-6396,21107,27899,-3286,-21524,-17780,15803,-22595,-23780,24965,25771,19916,19110,4997,26115,-10130,8437,25252,-8112,1025,-26681,-13803,15786,-29049,-23932,2075,-15612,13965,16672,-10985,25197,-24165,-28270,-7761,-30085,19842,3549,-22555,27065,15402,1383,21549,-18631,7813,17327,29352,10337,5658,-11436,-17430,9335,-2446,-21405,-30737,-24217,26938,-474,557,-27027,-6508,4675,21091,-32123,29485,-10082,-5613,19273,-19914,-31179,-20355,13740,20370,3148,6363,26238,-26480,17464,3831,-28777,-3533,16807,28551,-29762,-3533,28837,-8248,11382,27028,23575,736,20995,-25074,-14018,-1905,-32657,3053,-25929,-18665,-29067,-3450,-13615,16852,5541,-25078,-7958,17749,-10947,23119,12022,27411,15002,-23427,-94,13338,13054,-14310,13681,23397,30337,-23221,-17006,7282,7244,20379,4869,25552,9848,-18634,6930,26216,-9603,9555,17968,16362,-17421,-28809,13750,-8102,-18998,2901,18025,13111,30705,-31685,32582,30872,-27495,29618,21567,-20703,27346,-24830,31939,20467,25384,-10496,-29283,-30985,20898,-23130,30834,-16432,-23764,-22377,6008,4130,-4091,-26624,2258,-30398,-794,8453,4964,-1293,-12771,22682,27622,-12044,18600,-29615,-765,-1827,31695,-21036,-6119,-20760,-12642,-21913,-24129,25997,-31454,31478,-6830,10501,-22978,16618,-9614,-17083,-32017,30287,29632,16866,6012,647,-23994,26658,-2288,30473,-7056,32151,20551,12193,-11263,19842,17685,31335,-32191,-24434,3428,28050,9334,19208,-2845,22878,28223,27877,-27588,8428,17095,6896,6461,-10606,-9075,-1933,-12326,15825,-24247,-17500,23906,19579,2300,13906,1187,-24151,14664,15594,-7590,29415,-24506,-5388,-25655,-7215,9023,-20969,-15040,-19615,2223,-2334,18817,-12555,7582,-235,9960,-1025,-16707,-20747,-26369,1473,9356,30148,-28305,-28036,-22122,-32022,-7093,-27253,27376,26317,27576,-27961,-1922,-8510,-4264,15951,18293,10173,-27712,27401,-3548,-24324,-22055,22618,8783,-23736,18880,-20418,25263,-27003,5713,-29186,-13353,-15974,8557,18978,-6298,11188,-28501,20629,21472,17296,6666,28694,1435,15503,-12669,-19961,-19054,-8583,26327,5860,19202,28256,-22145,30939,-3690,24526,-4274,15015,-8479,12603,8369,-20219,12545,-874,-12092,-21740,24713,-3907,20689,18685,21773,-17002,264,-1371,-30753,20293,3888,-5455,-6465,-4316,-10163,19275,-4212,16819,-3639,4964,-15768,-28608,516,17508,7389,-23626,28997,-3733,-14835,-18285,26874,32114,-19526,31167,-15745,-1723,30260,-8740,-11211,28563,10622,-1613,3801,-19995,-5666,22560,-26019,-11313,-8650,-24043,-31300,-20639,-23979,-9404,-2889,5712,31076,23646,23684,-12618,-3953,-13156,-31317,1184,20511,-7710,4965,11113,-18107,17410,-3752,5435,-13873,-802,-15233,21110,1488,-31842,4598,-16873,-31098,-22878,-28032,-22415,29956,7761,-8444,-26551,-19503,2788,14735,20829,-6063,-31025,6921,25745,2236,25186,28279,3763,23041,-30559,6730,-23450,5372,-2423,12772,6123,-18944,25435,9561,-31913,12639,31623,-32129,12739,-13930,16675,-13731,15458,13132,-22720,27103,-6644,16348,-905,-929,29288,5106,-28866,-3628,4734,14749,26525,29173,29740,9887,-26758,17646,-4811,24237,-1366,-28554,30715,-26010,31416,31330,7482,28634,29756,31589,-15656,-27916,15416,-203,-29475,20302,-5501,29506,-4297,10169,19060,-3429,8871,19051,-22167,20540,-24608,-4669,9617,-3559,-3760,14460,13181,-10776,-21037,-6658,-27221,15085,3908,17959,-16172,17770,610,-15256,16397,-27359,-29821,196,19742,-13627,-31507,-16366,-28432,-2604,-3809,-3670,20676,3431,-8815,-9448,17104,28416,10415,20093,3300,-5372,23156,12720,18222,30891,-27074,10526,11577,-6022,-7449,4811,-5660,25069,-8756,15870,24228,18760,-12171,1018,13893,19379,-17218,9407,7294,9661,504,25634,-25732,-3465,27899,31774,31525,16287,5283,-22157,-2356,23139,-1846,-22103,-26304,29029,27425,-15869,401,10003,-5836,-30776,12247,-25598,-26758,-18985,14010,17536,-4420,4114,13934,4380,-23854,9505,28303,29954,-17898,25713,-22930,-20317,-8383,17370,-23924,23133,28603,7483,14844,-22680,-6057,25400,9447,-30790,2172,7724,-31403,-11710,24695,-5563,20930,12244,-16467,24016,31184,5738,3827,2950,21125,13176,24427,2960,-26719,-12855,23831,27602,-5376,-4840,-10009,-7986,-15167,-20957,-30843 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice6.tflite b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice6.tflite new file mode 100644 index 0000000..e292050 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice6.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice6_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice6_golden_int16.csv new file mode 100644 index 0000000..53d4823 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice6_golden_int16.csv @@ -0,0 +1 @@ +-23707,251,3265,-23633,-21642,-17842,-29967,19742,-21826,-65,12359,-7672,19647,3686,-24964,6013,-5427,22152,-9750,-6493,24446,-7494,-18673,-26251,6088,13269,-12631,-30677,12616,-31418,-6476,7855,-30582,32394,-27181,-1973,8786,-27433,20245,21698,-27077,-11840,-22288,-5412,18711,-29367,6059,-28779,-23007,23861,7212,28200,-13678,-23250,-3192,7559,5013,11756,-23412,-7983,24077,32028,21753,21534,-14593,26341,2725,-21445,30815,689,2944,18525,20952,-20731,27947,-18719,-14060,28414,29243,21377,3190,-29929,-25065,17083,10842,9236,28481,-15373,-5647,10434,-3153,3816,4451,-28424,28992,-31504,-28985,19044,523,-2594,-10221,31072,24875,15710,20003,8938,-23726,-9028,30035,24241,21969,-31711,-19013,-22005,27070,-14349,-15284,-8297,-10710,-6598,-26511,28058,-22388,-3534,16879,4957,-17163,-13286,17732,-20616,-3279,-18677,-1042,-15850,2100,-30556,-7211,17346,-30939,-25841,28374,13571,25982,-27139,22182,12620,27694,18356,-19930,-19207,-11446,17935,16104,-28604,-4211,3662,15992,3906,-30211,19148,-21106,7366,-11002,3383,-15176,-15039,29720,15232,-15126,-14241,-8940,-27113,15405,-32082,32172,20541,-14586,-7272,23353,-23192,-16044,31335,-29387,-9149,-20647,-8263,-16066,23983,7561,3837,12667,21226,21459,24690,5284,22753,5941,-24350,21394,-2498,22389,17727,-14694,22184,20884,-8975,21000,-15361,-7455,-413,-19790,-4150,8646,-11681,4851,14446,15099,18816,32374,-5242,966,17400,-26365,7911,-7799,-17894,-28706,4213,21752,24720,14383,6757,29478,20469,-9717,-13194,7164,-22652,21184,24778,20512,27707,-18467,-25593,-9622,23210,-23925,-1138,4933,10571,19989,-11071,-15695,-24036,-22836,10331,-4003,-23183,-26309,32702,-13726,15973,11419,-17435,-31168,28952,11393,28333,-16274,-3179,21074,-7221,-20247,-25242,-24414,-328,-7424,27910,4553,22901,-7715,3002,-10101,6875,7192,17805,6114,-24511,30922,30016,-1049,4728,12523,-13802,12824,15200,-16206,-13529,11546,25899,26891,31665,-13868,21013,-25492,-9734,16271,23156,-19218,-23010,-5986,-11770,3439,-21737,-27253,-15355,-8820,-18550,2920,11851,-29033,-32028,-3836,-10363,9537,24098,13271,-11974,-31388,-18262,-31752,14812,-20045,-23348,5508,18502,2537,-6395,-13858,14194,-24859,-7479,-32376,1373,-10853,-9912,-1333,-7204,-5618,29540,26123,13013,792,-16344,-2996,22491,7095,395,-19123,13613,27296,-30626,-31829,-19948,14334,-10165,24242,-19053,23117,-3505,14270,17066,24103,-11709,12427,5293,-24415,11678,22453,-19805,-28315,26626,-8483,18422,4984,23054,-8482,32515,25663,-14128,-17203,-25940,-15799,-4565,-20292,28744,-22014,30005,5831,20955,-30201,-20827,-18869,-26054,9868,18452,7149,3761,32654,29009,14381,-7802,-5002,17737,-20719,1204,-17380,10789,-29392,-16080,-24635,6464,-7338,9665,12942,26362,4425,-19179,225,25890,-4293,-25596,8384,11836,1110,-4906,21774,4644,-28859,23984,-13331,-5347,30817,-32722,-1742,-30713,-1693,-29149,-16684,-8701,28095,32224,-18749,-4613,17760,-22336,-5423,-21272,8701,17789,3695,-11334,19398,21555,13747,-19018,-18007,31890,-20106,6700,16254,-18570,8662,-16203,-14401,11986,-18092,6978,-13533,-11441,-10660,27834,-18859,-9338,12508,15422,11514,27984,-9603,16643,-24263,17194,25930,28958,-21400,28158,11939,-1831,-6905,-6140,2029,-15961,-3614,1900,-3030,21449,-7555,10502,-9790,10206,3182,27863,-26546,3481,-10587,-4706,24900,-6090,7491,3539,-7134,21093,-13852,-27933,29261,-17813,17020,3291,30281,2428,15717,23867,-19701,-12255,27660,-24684,17907,-4583,31110,-3573,3435,27687,-30639,8375,19031,-5145,-15079,1507,28582,5390,12572,-4225,23925,3952,25512,23703,15937,-5323,-28683,-18352,-14379,29720,17474,15458,4806,11305,4334,-27800,31210,-20447,-14467,-10885,14768,12318,-26796,-3792,-15582,-15576,-13303,5571,-32553,3147,14206,-27467,20057,8455,-14449,12462,-21756,-20691,19816,2220,-25881,-8428,-14619,31083,27465,-6835,26450,-30887,-32099,13512,-5042,23533,6165,-30354,-10829,38,17187,12743,-22622,32494,20232,670,-81,14963,13289,-3120,9218,18634,6486,-24024,15555,-10141,17925,-17654,2645,-16020,25296,13459,-13719,-16487,-16847,1681,1092,-19946,-27199,18423,30503,-13000,-15347,-30331,21499,10399,-11398,12540,-4349,10321,14928,27364,-19824,22725,21735,-322,-10976,24318,-16116,-23140,-12280,23349,18901,26701,5555,12169,-13679,-24752,195,-8001,-13452,-14695,14572,12205,20468,-5851,-18186,16609,-24469,-22603,-27272,-32116,-24296,5005,27488,32235,-7142,-12675,-7919,22455,8966,-19838,-11221,-262,-6732,-10417,-10679,-10557,17914,21172,11347,-20188,-648,30238,18292,-14504,8463,7212,-17482,14466,22907,-32163,-21144,1725,32634,-2904,-13436,-16670,24381,23314,-25299,-31176,-8387,-20925,32487,29867,-6677,-29595,-29769,15182,10506,11027,25860,27240,-18875,-22089,1330,-30873,-25172,604,-19700,-19053,8356,-32240,-19357,1708,-5281,-26059,-8236,9453,31465,-31756,4381,-16686,27167,9951,9550,29747,5712,14917,-10755,-17493,-1594,7243,3903,17008,21379,27734,11320,-28108,6408,11868,1663,-26182,21142,31399,-11639,-26476,3294,31649,-13774,-31082,-660,26799,-22614,-21499,1624,-2893,-17603,5543,7941,25108,-7168,10391,-6872,-9537,6305,-8088,14444,-4326,-15949,-29804,-19254,18017,-2313,16512,5986,-31558,-23562,-6052,-28067,-21744,17896,24031,-10062,-24322,-7476,8765,-28964,-25056,17814,-28643,24333,17123,-17584,-12850,13341,-27307,-5335,6286,-18099,28479,15148,-66,28356,-5472,13063,9840,23922,6163,-4149,-24800,6555,-15296,28032,3505,-15603,24537,32693,-20401,19740,7719,18090,22439,24361,-11513,28037,-8118,-699,-6569,-22509,26521,-25685,21957,10565,-2726,32060,4749,-9053,3132,17067,-12212,9218,-32209,-18230,-9019,24372,32514,29975,-11817,9824,-22635,-30479,-2791,5986,7174,29348,1475,-29998,-5983,-14926,-3721,18560,-24067,-6984,22554,-23357,-28944,-29488,18382,13712,23530,-29187,-17996,-9127,31766,8143,27489,-9303,-11135,-29682,-19832,20134,-28683,-23398,14077,-13413,-29853,-15802,27077,5252,3595,-31523,-24487,-256,-26947,-21151,25572,10227,-2931,-15091,-9182,12327,7630,9108,27701,-27778,21950,-25297,-17067,-9549,8261,-20391,29449,2909,29183,-383,-29987,-25336,-6115,-8439,30389,-29162,25980,-21256,14862,-32519,-7618,-25061,24786,11204,6385,28841,-11465,19929,-26063,10715,26332,-11122,9270,-1682,14797,-27152,-28475,-8692,-3124,29003,-24041,32423,-6301,-20902,-14325,-7557,10961,-32379,-18716,-27143,-28102,-15901,10903,11303,15275,11455,-24260,-25552,18370,-14717,-7323,-30440,21968,34,-21414,-27522,6425,15294,11753,-5657,-14363,-5011,6206,319,-7584,17103,-8474,-7794,-24743,-30849,-29680,-20858,-31641,11320,-9390,28213,-8374,9516,-18466,-8916,23601,32671,14757,-7078,-18806,12670,-13730,-6825,-23756,27882,17346,27042,6365,17554,-26981,1643,-9914,15672,3469,14488,-25416,25368,-25266,23387,26983,2241,14545,-13362,-20529,5582,3981,3881,-15853,14745,1571,16749,-1521,10010,-28899,-10551,11815,11939,14693,-17971,-14297,25465,-22603,-979,-18331,29666,-30485,-11600,-23050,-20207,-16396,28682,-28342,-11631,7507,-28256,-20363,15259,-722,-25315,5450,-25429,-20143,-4590,-25782,-28821,-5517,-32368,-14448,-7952,-18470,4968,-22215,-32556,-26188,11659,-14240,-22220,26225,-13786,-23889,3576,-28142,25567,-13045,-7027,17287,-3192,6868,6034,24795,-25581,-8861,-2764,-21954,-24683,-7908,21573,-31641,31425,4453,-19620,2639,-9036,-23443,22301,-2087,5380,-9300,-26081,-18163,-32650,11906,14861,2426,25172,27132,-20320,-27731,6421,-4314,-5515,-16948,15119,9318,-26539,-9352,29910,17467,-30975,25364,-12028,-3720,8790,24075,28374,25057,-17896,-19039,-18713,-26330,-11327,23894,-16530,6903,25482,800,11827,-26051,-4554,7675,-30125,9707,31313,-30480,5865,-13392,-13849,6099,24241,4978,22666,-5184,10322,-3051,-11808,-17324,29340,29556,23824,-4812,5391,-29673,-10606,4720,-8199,-32274,2813,-10793,-26945,14177,3629,18706,-24315,21879,-18529,19270,-11760,-10061,-29208,15597,-10384,-8600,-13318,-10571,-23799,8758,19371,-12422,-2894,-5262,-20588,24051,11737,-26204,2951,-5542,-6082,-7040,9009,29324,-10513,-21048,-4248,7880,-3809,7892,11823,8982,-6533,12631,-31213,27915,-18828,-28188,-31012,24765,16826,-9192,12801,-5766,20370,3464,-8087,14879,-341,29270,960,-14416,-9356,-24228,-13223,618,144,27493,1478,-9988,-21737,-13109,17080,400,7016,-28919,15704,-10587,-29600,9955,21325,-9456,4841,583,20637,11011,1540,22147,-5751,17107,-24850,31843,-30679,-23046,-13281,-18892,-6290,29973,12687,5990,27955,-25607,-30684,5655,13870,29558,6385,-31676,-19843,25948,-32044,16179,-27175,-27607,-7539,29575,-31821,18268,-9720,-23373,-21012,-3297,4408,32358,-31607,-27849,-27865,-18189,10377,-6305,2678,23935,16538,12930,13393,-11405,-3584,258,31724,9909,-14047,-7702,-5445,24552,8970,29104,-7534,-12482,30915,-24655,4534,-19686,-15755,-11526,16136,-2942,28230,-16901,1241,-25277,11705,-27014,-4737,30962,-24622,-20288,-24533,-23177,11266,22836,-32309,-18863,-6748,29989,26212,29672,-23163,23882,3751,22311,21282,-5402,21323,5224,-8645,12780,3100,-10406,21449,-27781,-19946,4160,-17036,-32263,-20677,-20755,11261,27409,-10438,12625,-18556,30082,-2586,19698,27149,-1190,24395,-28865,863,-28366,7634,12059,20603,32592,-32416,-26899,-22235,10019,-15564,-29679,8714,-18100,12315,25402,6270,28495,-7032,-28722,1572,-3648,22836,-13434,-19994,-27944,3190,-12806,-15937,25590,29791,-13436,27829,31649,-30773,12325,31762,-21336,10493,-22289,-25853,31289,32261,10630,29099,7542,26142,-764,-22785,10865,15297,-6928,10698,-17122,-145,-2558,-4046,-22615,-7411,-3326,12488,-22267,8130,-32212,-764,23296,3757,31041,26375,24546,-21631,-11422,-20203,22341,11463,-23573,21887,-3542,-31015,-11711,20248,19587,28886,31833,-5573,8574,11565,15114,25113,-21942,-12579,-5076,-15154,32286,1335,16264,-15307,-14005,-20541,21277,2728,14888,6591,5613,-22448,2848,-20962,5293,-28753,24622,-11174,-24558,18012,12771,6698,13206,22947,8270,-14757,-1507,21566,6318,25295,-14594,-3859,6047,-17475,20755,-807,-28534,23695,-13984,26945,20171,17088,-2177,-20907,32419,-10450,26758,18617,23980,-31190,-4752,21732,-10836,-9085,28316,-13676,26299,-6853,-32021,22982,7986,30083,-21644,-18552,28229,5210,12746,16253,21858,-16430,-8773,24236,16863,-29752,-2722,-30915,-6457,20203,25521,16403,3420,6636,-28795,-29783,12975,16915,31841,27906,13530,2869,-17472,28355,28548,31485,-29780,26497,25339,-17489,-26226,17448,16022,4924,-25690,-12723,2447,-763,-20593,3729,-11118,9376,24571,-18064,-25321,-12939,-18832,-14199,-20206,28784,-3928,-10374,2413,16194,-29119,1788,502,11052,-8785,-5546,-13589,-30773,14706,6045,27584,969,-10989,23306,-18138,18150,-19658,8867,12201,-81,8672,5144,-16010,-28310,27998,-28519,-12293,-25940,9746,14340,-32687,-5414,10261,14522,-18594,-19240,-18150,-12050,-2832,15968,-8459,-13634,-25496,-20888,-9261,6798,-4380,3166,32630,-26249,6739,-31342,32636,-15069,-14023,-27792,32561,-709,23162,-24281,-30936,6100,24141,27794,-28688,-8566,-8239,17677,246,7317,-14714,13136,-26552,32099,-22332,32443,3309,3983,5932,16911,19558,11158,-11500,2815,666,13410,-18253,20104,-27909,32531,25664,-14077,-7540,21980,30161,-12017,28148,-32731,-27535,-147,-28131,17380,22149,29971,22113,-25547,-3321,28479,-12324,-27921,27591,7502,23090,2453,20153,-14364,-9267,29688,32606,-8901,-32407,14256,-30549,23196,7793,-291,10326,22950,13186,5318,-32457,15449,-30764,-23508,-28250,6236,21751,26961,-31339,22568,690,27333,12761,-2443,-12501,30232,-13605,17661,-4813,-7499,-8604,16512,-23208,-1846,-28603,14308,-28533,23914,-28928,-20350,-19575,-3091,-17331,-9228,30459,-24056,12280,15358,12415,2834,14623,2067,18744,-13965,18676,-29482,-1753,12171,2267,30687,10102,-27181,5397,23013,-2572,27009,-3515,-14966,5518,10674,13441,-28292,13947,30154,2257,-31143,-24520,-28092,15204,10444,-22387,25930,-1946,-23221,1489,-2114,12831,-24983,-20610,-22230,-23440,-5457,-13652,18972,-11739,11026,-21439,24202,-6819,-605,-21830,-1818,18716,2682,20881,5015,-12040,-3225,-657,2276,-7342,27680,2679,7473,29106,-9271,29964,-27959,7081,-5811,16333,-18680,2010,8329,-29051,13929,8483,25869,-25698,-12597,-15135,-25178,16986,18996,-23845,-31733,25196,22792,-28395,-28102,-31232,-3547,2412,31542,-23371,-32654,-14305,-30063,12843,-21972,12798,-24778,12980,-26145,15265,-2792,11,-32569,10632,25941,11971,-5471,29114,-25044,30386,-11009,20774,-1797,-22967,-26370,-2441,23745,-5238,196,-8797,-20011,-10109,8666,-32600,15083,28315,14639,-22860,-30263,-32535,10584,8155,13375,-28376,107,31492,-19726,-1265,-9137,323,1103,-6948,-31208,10171,4272,15719,12443,30054,21681,13149,23742,-72,10267,-30232,-20113,-31164,5767,-8603,-15328,-13312,15445,26538,-2145,-5358,-16769,11244,-19565,-32069,773,-8807,-12413,1634,20434,7618,-20746,18388,21849,16903,-25209,-20111,13033,-15214,-17021,30013,-13331,-9897,-21637,-5750,-18249,-5873,-16188,23774,-29643,20329,-13152,-20849,25200,17839,-22488,27743,23704,-27463,-32056,915,19894,5590,14134,15115,-3497,32302,32184,-12952,30248,20557,9931,-7593,-19612,-4666,-21001,-26423,1280,29038,3888,16587,26974,8861,-15879,13018,7935,32028,20704,987,14559,8199,-26941,-7393,-5186,-15173,-13886,32591,-8155,19504,31113,-24694,20518,6887,25580,28732,16368,14728,-18316,-2915,32236,-21418,30838,26583,-7652,-3646,-20097,294,24707,8341,16548,-19615,-6367,-11216,22728,-9746,14972,23573,-25023,10368,-20433,22797,14942,21705,-21718,-8677,27242,3880,10123,-10452,30259,-17467,10004,-31176,-15042,-651,-28873,18470,10574,-5632,16009,24458,-9328,-20161,-10758,23494,16870,-16025,-11688,-29025,-7668,31317,-6588,-4182,-15671,-18788,29855,-5788,-28018,-11209,-12965,10441,19750,29206,22971,-20117,20091,-5255,-25310,-13002,31691,-28366,-23697,-8320,-29139,1298,-9043,-29720,28917,32316,-7976,-755,26171,-5050,-16435,-6049,7127,-8054,-3830,-2873,-6067,24201,-18175,-11449,-15947,-12485,27617,-11677,-3339,28322,4039,32455,25436,-26337,14878,-14904,5777,3156,-21652,-18192,-17719,-26412,27408,16897,13623,-14539,-29587,3096,9611,9347,-25398,32260,-1238,-20361,21841,18853,-28411,10348,-11929,23503,5898,-32492,-7155,2695,-22,17125,-28975,-14585,25582,-27426,17370,-14313,1025,30345,-18725,30745,-13312,11993,24507,-27054,-20954,24659,29370,23137,-2003,-23470,-29165,18241,-5075,-705,-24912,19571,-10261,-12813,-14202,14165,-11328,5358,677,-4659,20886,-31176,20123,1198,-7398,-3484,-11236,-16021,-20431,9988,23740,-4776,-29826,-21433,-5890,-18460,-22516,-12950,-7403,-8761,20362,29555,27859,-3811,-26152,24282,-6020,5957,6172,7499,1415,-3347,-11336,19720,-14257,14616,-24665,-12630,12549,17072,23499,27010,16038,-21396,-28876,-25069,31682,32137,-10248,3992,-30727,-19479,-9301,-24892,-16244,-9613,-31654,-28420,-2388,406,13481,1289,16191,-81,-24334,-25475,-5924,-22465,-30960,-5776,30931,29911,-28140,32035,30029,-10691,-27207,17839,261,4019,-26178,18269,-8101,883,-8631,-22927,-4064,-3148,-29462,6871,-9458,10343,-20234,23740,13017,-22440,-24647,5765,23584,26500,-18547,16131,2337,10156,-22442,-24437,18829,5501,23602,-22861,406,-28791,31750,-14596,29482,12374,-25231,-4155,-30679,15698,14390,-5667,-18349,5353,-19515,-17571,9976,-31746,3017,-7854,17382,24728,4944,27923,11687,20690,6482,-15964,-30496,22227,-23233,6734,25398,5968,18176,-11996,-2529,-6546,23054,-30559,2174,-12558,-24257,16979,-7038,25374,14009,16140,-23647,-20425,-11889,24671,-25203,31247,-30749,6187,20993,-13283,1841,-22370,18639,-13717,-23843,6557,-30102,2149,32596,-29599,28567,-19745,932,-22593,3550,4279,-3422,-6063,23008,15000,-9818,28824,-4311,15630,-999,1599,-1850,4850,17059,15845,32300,26324,-16376,14076,20493,14502,-508,692,-6438,31076,-9851,13706,13809,-19590,25011,24946,-28738,29984,31493,4688,3286,31650,29192,-4636,567,-8152,13725,-15893,-29996,23913,30295,-11813,-10672,3180,9685,23577,15451,-8090,-8743,-6126,24501,-9594,-10443,-30606,-9578,14973,11138,-18963,-18727,-19758,-32759,-25940,-13315,-31723,12523,16802,8988,17242,9878,-5760,-1674,32194,31813,22991,-29149,28230,11466,32517,9228,-27949,20727,-25661,13932,10836,-549,-9517,14800,-15059,-6316,-29839,-7939,-20767,17656,-950,13242,-24455,-31262,-21705,12508,17702,-16460,-32220,-19603,-27671,24233,14474,-16664,18820,-18720,4953,-14407,-8012,-5412,-29245,-32192,11587,31472,-5696,3068,6225,-32741,13067,-27825,-13818,18753,-1429,32248,-14668,-8996,-8972,-17443,-11435,7823,19143,-18583,-31288,-28837,-19637,24783,-12584,-8724,21752,1026,-20462,1992,24120,-20512,31153,-3485,-24542,19326,-7452,8275,14251,30073,7687,13435,19969,17224,-8226,15045,16594,-419,28523,-31064,24334,11352,-20761,-5354,28690,16179,32314,-8373,-27313,28497,-7123,-8120,-12312,3480,13183,2684,4158,-7578,2982,29262,-25782,21492,-5838,27743,-26117,15583,-32479,8079,26506,-26153,25980,20069,11129,-11822,-3762,18387,22393,31460,30788,27336,-25972,-4140,16189,588,32233,22277,-12797,9378,-9924,-9250,-1444,-12147,2787,-9332,-19750,10557,-31307,21936,-13539,5683,-7633,-32107,-4540,4858,-20879,3776,-421,10880,-24767,2417,-1803,-30511,29924,8495,5115,-16815,1412,29407,14903,-32450,-11954,26650,-20319,-21766,17225,-6586,9633,-7276,-308,22039,28943,13871,24175,27661,-260,18500,9680,-26767,12631,25744,18670,12749,26623,-23030,12645,18822,14227,-8435,-11460 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice6_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice6_input0_int16.csv new file mode 100644 index 0000000..4613316 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice6_input0_int16.csv @@ -0,0 +1 @@ +23985,11644,-27838,-10700,24183,9398,634,15261,3696,-13956,11368,-22682,-11608,-13580,-27926,15838,3484,-23465,-9290,-11119,-18207,16966,-29443,13377,3941,31261,-6093,26181,30257,10672,-17467,20895,-5911,27190,-21026,-26185,-18519,-5005,-21077,25614,8587,-2210,8327,-24335,-11575,-19649,-26169,-14484,-3398,-20026,-11540,-24283,3981,22071,416,-32075,-9131,30810,-20067,-1027,17843,-12147,-5415,28283,-3738,27119,-18471,24701,14323,25821,-20553,-23416,-24518,29923,-9129,32387,-15280,-6861,13073,30960,1033,-27095,22965,-8995,8504,-14865,5698,16322,17906,12493,8593,21261,9446,24654,28945,-10712,-5973,-10551,15832,-32459,-29151,-22591,-21392,21862,832,-6704,-17160,344,-7382,-27558,8273,-2176,29429,32035,-9990,24636,31799,-919,-27113,-20443,28218,-24157,3145,-20997,-15272,-11799,-3736,-1767,18389,-18374,24339,11919,-17022,-1123,5638,23447,30631,319,-27880,20913,-11839,29905,-9646,-24935,-30412,-22601,29580,1244,-21048,15465,31571,1156,-7232,-30689,-13064,-1609,-19806,-24118,-6884,-2271,31336,-12337,8177,-7959,-16321,767,12608,14113,5917,8786,23174,-11796,-792,-13981,13282,-9060,-23823,21203,31008,-15005,-15710,-9782,-2898,-19963,-2512,-3410,28911,-12280,-27108,18598,-648,-18011,22296,9425,-19688,-29604,24537,-22635,4496,17261,-5909,31572,12001,-19558,-481,24791,-3445,24819,27176,-28615,-8233,5073,-19216,-30572,25886,32447,28124,-28904,20095,-13959,-21843,-22890,10816,9044,-15127,21492,-11677,5543,-22267,-10071,27515,-4713,3484,-1803,6679,28932,4078,15304,-32500,-23015,-22246,2590,-17616,-4153,-27830,29804,-16891,-22180,20928,-12703,18654,14450,22610,-15542,-31346,-15150,-10909,11941,23117,32207,-1567,16999,5114,-27705,-9195,-12467,-26826,-14601,-26999,-17864,-8203,21136,18680,-27135,-13603,7954,15431,-28131,-8969,15843,-6337,-21985,5722,-4442,-18467,18928,4589,-14306,-24548,-17698,25788,11670,4301,1821,-23603,24860,24197,25770,26202,28415,-10238,-7976,-16348,3076,11156,-31078,-491,4096,-15122,-23691,23973,-13031,-4122,9321,-4545,-31718,-32143,13824,9600,16578,-7925,32055,-32520,-4269,-2887,26220,28361,-22942,14978,-11585,-16411,23969,8889,-26989,-3066,-9115,8778,6376,23148,-25568,-28038,28348,23817,31808,4386,-10484,-22401,11939,27054,26635,23030,21301,-25891,9632,-28793,4893,2843,-3921,-6793,12763,-239,28026,-24213,-10107,-22561,-15607,-17562,8846,-11515,8111,-96,-29115,19379,31021,16509,-29665,28233,-14909,4483,29661,30436,-18716,-24338,-3694,-9733,11914,11221,25661,14983,-10547,-6794,-6819,-8338,935,3496,11080,10725,-26331,8315,-20783,-28106,7025,32205,31591,-13832,-27840,15534,28843,-3372,30835,-10996,14488,18746,5818,-2733,27930,19359,13381,-13638,-12601,-1372,-2167,15790,-9095,11537,5128,-2340,30698,10943,-20390,8089,-28701,-1201,6531,-21927,2524,-29888,-9382,12642,-28102,-25267,27851,9828,-22952,-13288,31771,-3571,1207,9275,32256,-16982,-1728,-9023,-1958,-16146,32723,-11678,17569,-7272,-11381,-2188,7543,17892,-14460,21204,-10886,-5156,20750,-32411,15544,17507,-21335,20781,-18000,-2601,3294,-15760,-26774,13759,1547,31011,23871,25632,13013,-31784,-30602,-11395,-12369,2521,249,32357,17571,6496,28632,-9209,12359,7268,3792,31797,-10210,28006,30949,-30945,10475,-6402,-9916,26457,-25293,-30472,4655,12821,-1263,-25539,11614,27980,22920,-8503,24734,-20300,23058,13721,28768,18336,22055,-15436,17568,21332,-13518,-29644,22983,11742,22040,-31877,30336,-20767,-8375,-7008,12536,-22451,-5937,16482,-16464,-9334,-29016,28018,-16718,-9580,21026,-25432,-9300,-5292,19164,-8314,15370,18290,15244,16657,-21450,8318,22540,21053,29792,1419,30962,18882,17495,-14800,7807,-26136,-21563,3448,20582,-32263,-19137,23953,18111,31702,-7302,-32618,-14126,-8014,-21014,-26440,-18228,6546,25648,-11028,-19941,10276,23160,-2784,-4156,-22374,26307,28467,24665,1033,13885,21215,-214,-15242,-8334,2694,31209,-28603,-27529,13515,6090,-7124,9557,21035,14435,19038,-4291,-10571,19749,-2724,-26371,14,19133,-4693,-17512,30863,19000,10804,-17330,-1860,-16162,-11090,5819,-23937,26429,13034,2940,-15768,-23666,-29433,-5436,7177,24209,4240,-18975,-22077,-13103,5396,12796,-8090,-20144,-24367,-11345,28011,-3253,16837,-27286,-27240,17382,23193,-16918,-1821,12469,-22027,25265,-14830,-18716,15359,-25124,15160,-32288,7584,-12074,-27967,21915,11551,-10675,-4999,8939,11218,-15090,-5398,3155,9103,-12686,-25460,5920,-11471,-6466,19523,8662,-12136,9500,2430,20373,10102,-4390,21042,-17831,17545,-19026,21446,29234,5957,-9673,31238,-24543,-23048,-4684,-19694,29743,-14289,-14982,10740,-30570,-3781,13505,29663,-12851,-24435,-3396,6412,15632,8100,6632,-22008,28382,5444,20251,6071,11821,9667,-7109,-13951,437,28414,-23653,13676,19767,-10902,23385,20297,-9496,-14686,4062,-26191,-16726,29653,-10792,30251,333,22071,11252,12017,-9949,-15264,-16672,25452,13601,28717,27538,6589,10035,9383,-22055,-5125,2496,-18037,24975,16533,23429,18898,-27167,-4214,8056,6525,-4031,-4755,-8165,-12735,-23573,-14829,2952,10301,-4412,-27716,-14556,14618,18771,-20311,15210,-30420,18673,-10062,-468,-26945,-11709,28371,-27298,-29409,14048,10401,1649,15264,23397,-32431,-6066,-6747,-6689,16772,27628,-12644,-28151,-16617,23161,-23567,3580,15983,19296,-2701,-6940,1805,-29681,-18180,7564,-26710,-3013,14726,15607,1609,30001,-10514,-29881,2574,-27175,-22218,-2476,462,-27380,29682,5400,897,14484,9640,20604,-31116,11971,31397,17620,-3244,-19013,-7861,11197,-15475,-11280,12478,-17456,-16484,4341,-31550,7970,-1146,-10199,1867,11572,-10074,-4450,9003,-27621,31999,-30889,-25685,3352,14710,-30277,-15277,-15595,17732,28047,26804,-32217,-27016,-19179,-21024,-29526,-300,-20798,6085,-21582,7842,-17372,-15281,-14054,-9178,-14599,16372,25852,4742,-25608,-3176,-22319,30258,18423,-10791,-9937,-23048,-19393,-1649,29676,-1219,-23133,29945,12726,6984,-31368,20783,-24752,-32621,-10283,-29057,23568,13702,15110,4165,20731,-28020,6870,-28422,6659,-2973,-4396,11400,-16318,23476,-8939,2079,-14965,-19595,11050,-28454,-15646,15836,-3696,-17065,15456,18376,901,2644,32377,-20284,4226,21470,-13027,-9784,18339,-7302,18983,-26375,1870,-29358,-3356,-27525,2867,10988,-19458,13854,-10012,5082,30049,-31643,22449,-15193,24697,-12855,11544,-25363,12530,-20833,24669,-18422,3533,-30590,471,7274,6577,12080,19377,-32320,-24917,-28257,-12261,-15256,29737,19847,-20826,-27995,15422,-3903,20297,-123,21648,-14935,16183,17655,28625,-24020,-3018,25293,-11628,-11958,23241,18615,1887,-29654,28819,-2056,-13839,-5214,-155,25128,5730,-19901,-24717,1509,-19196,3310,-6104,29974,12105,-21627,30765,28791,17753,-5556,-19331,-3276,-25654,8442,-17000,-25054,22275,30361,-13948,-6519,4244,32567,-26435,23722,-30986,4288,11991,790,24308,-9093,29259,11923,402,16443,31322,32742,6354,-27545,-11941,-6204,-13736,19030,23776,12595,-8812,-22389,5791,-28388,15484,-9217,-23056,26945,9954,31851,-20813,20726,26088,-6105,26805,2767,-15761,-31292,-23624,-18598,-18673,27151,-7906,24420,11706,-3396,3283,11760,6734,-8728,-9363,-3241,30147,-12824,25452,-10502,169,3950,-15586,-28981,12663,28167,29955,-11095,15167,9379,17677,-2978,1080,5335,32056,-3050,18193,28277,-27387,-8384,32385,9072,20369,-25068,21013,-82,26611,-11648,19469,-28234,-7793,3141,8681,-32155,28582,-3048,-14080,-5117,-31489,8398,14802,24017,-18712,-14229,-16210,-6672,-20382,2269,9797,-13417,32661,4131,1689,32487,-16622,2565,23203,-19723,23411,10244,20586,-13802,27357,-5484,-26443,28675,-10755,27368,-20571,22935,-15237,13342,-8687,10833,26558,-4014,24087,26073,-23651,17455,-30927,25212,-29651,15421,-20769,11475,4976,-25044,21162,-3610,2202,26897,23253,-2935,-17053,23541,29134,-19213,30277,-29646,19458,-29935,13214,-14473,-15649,-6652,20608,-10185,10280,-18154,14546,8512,7704,11965,-15977,20672,-8640,-8450,26252,9835,-26906,-25923,-31728,-813,-4028,12309,13992,23535,-30785,18780,26322,5659,-7955,2322,21421,1968,-6644,10094,-16217,-20565,17668,26642,-9740,8767,-30308,-14985,-26948,20650,9765,-21607,4175,12657,30767,-17816,-27073,23210,-22298,-6030,-14424,9816,-25948,-3362,15925,1685,-19625,-19296,12377,9077,19702,-25479,-27603,-2356,19021,-7761,5499,32756,-6138,12948,-12171,2481,31635,29203,29913,-9575,3057,-5862,-9636,-8610,-30406,13774,-7485,7969,20013,2505,24429,-4960,16637,21797,-9230,-24540,10823,-12783,-28336,3793,-9660,21578,19973,16386,-29951,-17476,-19259,-3686,31987,1498,-21538,-13258,-6921,-444,-9824,-18901,7403,-17592,30089,-290,32103,31493,15945,18016,11068,-17004,29984,20674,15293,733,-23842,28999,-6104,31703,22290,29119,-9628,-6857,29423,-23707,251,3265,-23633,-21642,-17842,-29967,19742,-21826,-65,12359,-7672,19647,3686,-24964,6013,-5427,22152,-9750,-6493,24446,-7494,-18673,-26251,6088,13269,-12631,-30677,12616,-31418,-6476,7855,-30582,32394,-27181,-1973,8786,-27433,20245,21698,-27077,-11840,-22288,-5412,18711,-29367,6059,-28779,-23007,23861,7212,28200,-13678,-23250,-3192,7559,5013,11756,-23412,-7983,24077,32028,21753,21534,-14593,26341,2725,-21445,30815,689,2944,18525,20952,-20731,27947,-18719,-14060,28414,29243,21377,3190,-29929,-25065,17083,10842,9236,28481,-15373,-5647,10434,-3153,3816,4451,-28424,28992,-31504,-28985,19044,523,-2594,-10221,31072,24875,15710,20003,8938,-23726,-9028,30035,24241,21969,-31711,-19013,-22005,27070,-14349,-15284,-8297,-10710,-6598,-26511,28058,-22388,-3534,16879,4957,-17163,-13286,17732,-20616,-3279,-18677,-1042,-15850,2100,-30556,-7211,17346,-30939,-25841,28374,13571,25982,-27139,22182,12620,27694,18356,-19930,-19207,-11446,17935,16104,-28604,-4211,3662,15992,3906,-30211,19148,-21106,7366,-11002,3383,-15176,-15039,29720,15232,-15126,-14241,-8940,-27113,15405,-32082,32172,20541,-14586,-7272,23353,-23192,-16044,31335,-29387,-9149,-20647,-8263,-16066,23983,7561,3837,12667,21226,21459,24690,5284,22753,5941,-24350,21394,-2498,22389,17727,-14694,22184,20884,-8975,21000,-15361,-7455,-413,-19790,-4150,8646,-11681,4851,14446,15099,18816,32374,-5242,966,17400,-26365,7911,-7799,-17894,-28706,4213,21752,24720,14383,6757,29478,20469,-9717,-13194,7164,-22652,21184,24778,20512,27707,-18467,-25593,-9622,23210,-23925,-1138,4933,10571,19989,-11071,-15695,-24036,-22836,10331,-4003,-23183,-26309,32702,-13726,15973,11419,-17435,-31168,28952,11393,28333,-16274,-3179,21074,-7221,-20247,-25242,-24414,-328,-7424,27910,4553,22901,-7715,3002,-10101,6875,7192,17805,6114,-24511,30922,30016,-1049,4728,12523,-13802,12824,15200,-16206,-13529,11546,25899,26891,31665,-13868,21013,-25492,-9734,16271,23156,-19218,-23010,-5986,-11770,3439,-21737,-27253,-15355,-8820,-18550,2920,11851,-29033,-32028,-3836,-10363,9537,24098,13271,-11974,-31388,-18262,-31752,14812,-20045,-23348,5508,18502,2537,-6395,-13858,14194,-24859,-7479,-32376,1373,-10853,-9912,-1333,-7204,-5618,29540,26123,13013,792,-16344,-2996,22491,7095,395,-19123,13613,27296,-30626,-31829,-19948,14334,-10165,24242,-19053,23117,-3505,14270,17066,24103,-11709,12427,5293,-24415,11678,22453,-19805,-28315,26626,-8483,18422,4984,23054,-8482,32515,25663,-14128,-17203,-25940,-15799,-4565,-20292,28744,-22014,30005,5831,20955,-30201,-20827,-18869,-26054,9868,18452,7149,3761,32654,29009,14381,-7802,-5002,17737,-20719,1204,-17380,10789,-29392,-16080,-24635,6464,-7338,9665,12942,26362,4425,-19179,225,25890,-4293,-25596,8384,11836,1110,-4906,21774,4644,-28859,23984,-13331,-5347,30817,-32722,-1742,-30713,-1693,-29149,-16684,-8701,28095,32224,-18749,-4613,17760,-22336,-5423,-21272,8701,17789,3695,-11334,19398,21555,13747,-19018,-18007,31890,-20106,6700,16254,-18570,8662,-16203,-14401,11986,-18092,6978,-13533,-11441,-10660,27834,-18859,-9338,12508,15422,11514,27984,-9603,16643,-24263,17194,25930,28958,-21400,28158,11939,-1831,-6905,-6140,2029,-15961,-3614,1900,-3030,21449,-7555,10502,-9790,10206,3182,27863,-26546,3481,-10587,-4706,24900,-6090,7491,3539,-7134,21093,-13852,-27933,29261,-17813,17020,3291,30281,2428,15717,23867,-19701,-12255,27660,-24684,17907,-4583,31110,-3573,3435,27687,-30639,8375,19031,-5145,-15079,1507,28582,5390,12572,-4225,23925,3952,25512,23703,15937,-5323,-28683,-18352,-14379,29720,17474,15458,4806,11305,4334,-27800,31210,-20447,-14467,-10885,14768,12318,-26796,-3792,-15582,-15576,-13303,5571,-32553,3147,14206,-27467,20057,8455,-14449,12462,-21756,-20691,19816,2220,-25881,-8428,-14619,31083,27465,-6835,26450,-30887,-32099,13512,-5042,23533,6165,-30354,-10829,38,17187,12743,-22622,32494,20232,670,-81,14963,13289,-3120,9218,18634,6486,-24024,15555,-10141,17925,-17654,2645,-16020,25296,13459,-13719,-16487,-16847,1681,1092,-19946,-27199,18423,30503,-13000,-15347,-30331,21499,10399,-11398,12540,-4349,10321,14928,27364,-19824,22725,21735,-322,-10976,24318,-16116,-23140,-12280,23349,18901,26701,5555,12169,-13679,-24752,195,-8001,-13452,-14695,14572,12205,20468,-5851,-18186,16609,-24469,-22603,-27272,-32116,-24296,5005,27488,32235,-7142,-12675,-7919,22455,8966,-19838,-11221,-262,-6732,-10417,-10679,-10557,17914,21172,11347,-20188,-648,30238,18292,-14504,8463,7212,-17482,14466,22907,-32163,-21144,1725,32634,-2904,-13436,-16670,24381,23314,-25299,-31176,-8387,-20925,32487,29867,-6677,-29595,-29769,15182,10506,11027,25860,27240,-18875,-22089,1330,-30873,-25172,604,-19700,-19053,8356,-32240,-19357,1708,-5281,-26059,-8236,9453,31465,-31756,4381,-16686,27167,9951,9550,29747,5712,14917,-10755,-17493,-1594,7243,3903,17008,21379,27734,11320,-28108,6408,11868,1663,-26182,21142,31399,-11639,-26476,3294,31649,-13774,-31082,-660,26799,-22614,-21499,1624,-2893,-17603,5543,7941,25108,-7168,10391,-6872,-9537,6305,-8088,14444,-4326,-15949,-29804,-19254,18017,-2313,16512,5986,-31558,-23562,-6052,-28067,-21744,17896,24031,-10062,-24322,-7476,8765,-28964,-25056,17814,-28643,24333,17123,-17584,-12850,13341,-27307,-5335,6286,-18099,28479,15148,-66,28356,-5472,13063,9840,23922,6163,-4149,-24800,6555,-15296,28032,3505,-15603,24537,32693,-20401,19740,7719,18090,22439,24361,-11513,28037,-8118,-699,-6569,-22509,26521,-25685,21957,10565,-2726,32060,4749,-9053,3132,17067,-12212,9218,-32209,-18230,-9019,24372,32514,29975,-11817,9824,-22635,-30479,-2791,5986,7174,29348,1475,-29998,-5983,-14926,-3721,18560,-24067,-6984,22554,-23357,-28944,-29488,18382,13712,23530,-29187,-17996,-9127,31766,8143,27489,-9303,-11135,-29682,-19832,20134,-28683,-23398,14077,-13413,-29853,-15802,27077,5252,3595,-31523,-24487,-256,-26947,-21151,25572,10227,-2931,-15091,-9182,12327,7630,9108,27701,-27778,21950,-25297,-17067,-9549,8261,-20391,29449,2909,29183,-383,-29987,-25336,-6115,-8439,30389,-29162,25980,-21256,14862,-32519,-7618,-25061,24786,11204,6385,28841,-11465,19929,-26063,10715,26332,-11122,9270,-1682,14797,-27152,-28475,-8692,-3124,29003,-24041,32423,-6301,-20902,-14325,-7557,10961,-32379,-18716,-27143,-28102,-15901,10903,11303,15275,11455,-24260,-25552,18370,-14717,-7323,-30440,21968,34,-21414,-27522,6425,15294,11753,-5657,-14363,-5011,6206,319,-7584,17103,-8474,-7794,-24743,-30849,-29680,-20858,-31641,11320,-9390,28213,-8374,9516,-18466,-8916,23601,32671,14757,-7078,-18806,12670,-13730,-6825,-23756,27882,17346,27042,6365,17554,-26981,1643,-9914,15672,3469,14488,-25416,25368,-25266,23387,26983,2241,14545,-13362,-20529,5582,3981,3881,-15853,14745,1571,16749,-1521,10010,-28899,-10551,11815,11939,14693,-17971,-14297,25465,-22603,-979,-18331,29666,-30485,-11600,-23050,-20207,-16396,28682,-28342,-11631,7507,-28256,-20363,15259,-722,-25315,5450,-25429,-20143,-4590,-25782,-28821,-5517,-32368,-14448,-7952,-18470,4968,-22215,-32556,-26188,11659,-14240,-22220,26225,-13786,-23889,3576,-28142,25567,-13045,-7027,17287,-3192,6868,6034,24795,-25581,-8861,-2764,-21954,-24683,-7908,21573,-31641,31425,4453,-19620,2639,-9036,-23443,22301,-2087,5380,-9300,-26081,-18163,-32650,11906,14861,2426,25172,27132,-20320,-27731,6421,-4314,-5515,-16948,15119,9318,-26539,-9352,29910,17467,-30975,25364,-12028,-3720,8790,24075,28374,25057,-17896,-19039,-18713,-26330,-11327,23894,-16530,6903,25482,800,11827,-26051,-4554,7675,-30125,9707,31313,-30480,5865,-13392,-13849,6099,24241,4978,22666,-5184,10322,-3051,-11808,-17324,29340,29556,23824,-4812,5391,-29673,-10606,4720,-8199,-32274,2813,-10793,-26945,14177,3629,18706,-24315,21879,-18529,19270,-11760,-10061,-29208,15597,-10384,-8600,-13318,-10571,-23799,8758,19371,-12422,-2894,-5262,-20588,24051,11737,-26204,2951,-5542,-6082,-7040,9009,29324,-10513,-21048,-4248,7880,-3809,7892,11823,8982,-6533,12631,-31213,27915,-18828,-28188,-31012,24765,16826,-9192,12801,-5766,20370,3464,-8087,14879,-341,29270,960,-14416,-9356,-24228,-13223,618,144,27493,1478,-9988,-21737,-13109,17080,400,7016,-28919,15704,-10587,-29600,9955,21325,-9456,4841,583,20637,11011,1540,22147,-5751,17107,-24850,31843,-30679,-23046,-13281,-18892,-6290,29973,12687,5990,27955,-25607,-30684,5655,13870,29558,6385,-31676,-19843,25948,-32044,16179,-27175,-27607,-7539,29575,-31821,18268,-9720,-23373,-21012,-3297,4408,32358,-31607,-27849,-27865,-18189,10377,-6305,2678,23935,16538,12930,13393,-11405,-3584,258,31724,9909,-14047,-7702,-5445,24552,8970,29104,-7534,-12482,30915,-24655,4534,-19686,-15755,-11526,16136,-2942,28230,-16901,1241,-25277,11705,-27014,-4737,30962,-24622,-20288,-24533,-23177,11266,22836,-32309,-18863,-6748,29989,26212,29672,-23163,23882,3751,22311,21282,-5402,21323,5224,-8645,12780,3100,-10406,21449,-27781,-19946,4160,-17036,-32263,-20677,-20755,11261,27409,-10438,12625,-18556,30082,-2586,19698,27149,-1190,24395,-28865,863,-28366,7634,12059,20603,32592,-32416,-26899,-22235,10019,-15564,-29679,8714,-18100,12315,25402,6270,28495,-7032,-28722,1572,-3648,22836,-13434,-19994,-27944,3190,-12806,-15937,25590,29791,-13436,27829,31649,-30773,12325,31762,-21336,10493,-22289,-25853,31289,32261,10630,29099,7542,26142,-764,-22785,10865,15297,-6928,10698,-17122,-145,-2558,-4046,-22615,-7411,-3326,12488,-22267,8130,-32212,-764,23296,3757,31041,26375,24546,-21631,-11422,-20203,22341,11463,-23573,21887,-3542,-31015,-11711,20248,19587,28886,31833,-5573,8574,11565,15114,25113,-21942,-12579,-5076,-15154,32286,1335,16264,-15307,-14005,-20541,21277,2728,14888,6591,5613,-22448,2848,-20962,5293,-28753,24622,-11174,-24558,18012,12771,6698,13206,22947,8270,-14757,-1507,21566,6318,25295,-14594,-3859,6047,-17475,20755,-807,-28534,23695,-13984,26945,20171,17088,-2177,-20907,32419,-10450,26758,18617,23980,-31190,-4752,21732,-10836,-9085,28316,-13676,26299,-6853,-32021,22982,7986,30083,-21644,-18552,28229,5210,12746,16253,21858,-16430,-8773,24236,16863,-29752,-2722,-30915,-6457,20203,25521,16403,3420,6636,-28795,-29783,12975,16915,31841,27906,13530,2869,-17472,28355,28548,31485,-29780,26497,25339,-17489,-26226,17448,16022,4924,-25690,-12723,2447,-763,-20593,3729,-11118,9376,24571,-18064,-25321,-12939,-18832,-14199,-20206,28784,-3928,-10374,2413,16194,-29119,1788,502,11052,-8785,-5546,-13589,-30773,14706,6045,27584,969,-10989,23306,-18138,18150,-19658,8867,12201,-81,8672,5144,-16010,-28310,27998,-28519,-12293,-25940,9746,14340,-32687,-5414,10261,14522,-18594,-19240,-18150,-12050,-2832,15968,-8459,-13634,-25496,-20888,-9261,6798,-4380,3166,32630,-26249,6739,-31342,32636,-15069,-14023,-27792,32561,-709,23162,-24281,-30936,6100,24141,27794,-28688,-8566,-8239,17677,246,7317,-14714,13136,-26552,32099,-22332,32443,3309,3983,5932,16911,19558,11158,-11500,2815,666,13410,-18253,20104,-27909,32531,25664,-14077,-7540,21980,30161,-12017,28148,-32731,-27535,-147,-28131,17380,22149,29971,22113,-25547,-3321,28479,-12324,-27921,27591,7502,23090,2453,20153,-14364,-9267,29688,32606,-8901,-32407,14256,-30549,23196,7793,-291,10326,22950,13186,5318,-32457,15449,-30764,-23508,-28250,6236,21751,26961,-31339,22568,690,27333,12761,-2443,-12501,30232,-13605,17661,-4813,-7499,-8604,16512,-23208,-1846,-28603,14308,-28533,23914,-28928,-20350,-19575,-3091,-17331,-9228,30459,-24056,12280,15358,12415,2834,14623,2067,18744,-13965,18676,-29482,-1753,12171,2267,30687,10102,-27181,5397,23013,-2572,27009,-3515,-14966,5518,10674,13441,-28292,13947,30154,2257,-31143,-24520,-28092,15204,10444,-22387,25930,-1946,-23221,1489,-2114,12831,-24983,-20610,-22230,-23440,-5457,-13652,18972,-11739,11026,-21439,24202,-6819,-605,-21830,-1818,18716,2682,20881,5015,-12040,-3225,-657,2276,-7342,27680,2679,7473,29106,-9271,29964,-27959,7081,-5811,16333,-18680,2010,8329,-29051,13929,8483,25869,-25698,-12597,-15135,-25178,16986,18996,-23845,-31733,25196,22792,-28395,-28102,-31232,-3547,2412,31542,-23371,-32654,-14305,-30063,12843,-21972,12798,-24778,12980,-26145,15265,-2792,11,-32569,10632,25941,11971,-5471,29114,-25044,30386,-11009,20774,-1797,-22967,-26370,-2441,23745,-5238,196,-8797,-20011,-10109,8666,-32600,15083,28315,14639,-22860,-30263,-32535,10584,8155,13375,-28376,107,31492,-19726,-1265,-9137,323,1103,-6948,-31208,10171,4272,15719,12443,30054,21681,13149,23742,-72,10267,-30232,-20113,-31164,5767,-8603,-15328,-13312,15445,26538,-2145,-5358,-16769,11244,-19565,-32069,773,-8807,-12413,1634,20434,7618,-20746,18388,21849,16903,-25209,-20111,13033,-15214,-17021,30013,-13331,-9897,-21637,-5750,-18249,-5873,-16188,23774,-29643,20329,-13152,-20849,25200,17839,-22488,27743,23704,-27463,-32056,915,19894,5590,14134,15115,-3497,32302,32184,-12952,30248,20557,9931,-7593,-19612,-4666,-21001,-26423,1280,29038,3888,16587,26974,8861,-15879,13018,7935,32028,20704,987,14559,8199,-26941,-7393,-5186,-15173,-13886,32591,-8155,19504,31113,-24694,20518,6887,25580,28732,16368,14728,-18316,-2915,32236,-21418,30838,26583,-7652,-3646,-20097,294,24707,8341,16548,-19615,-6367,-11216,22728,-9746,14972,23573,-25023,10368,-20433,22797,14942,21705,-21718,-8677,27242,3880,10123,-10452,30259,-17467,10004,-31176,-15042,-651,-28873,18470,10574,-5632,16009,24458,-9328,-20161,-10758,23494,16870,-16025,-11688,-29025,-7668,31317,-6588,-4182,-15671,-18788,29855,-5788,-28018,-11209,-12965,10441,19750,29206,22971,-20117,20091,-5255,-25310,-13002,31691,-28366,-23697,-8320,-29139,1298,-9043,-29720,28917,32316,-7976,-755,26171,-5050,-16435,-6049,7127,-8054,-3830,-2873,-6067,24201,-18175,-11449,-15947,-12485,27617,-11677,-3339,28322,4039,32455,25436,-26337,14878,-14904,5777,3156,-21652,-18192,-17719,-26412,27408,16897,13623,-14539,-29587,3096,9611,9347,-25398,32260,-1238,-20361,21841,18853,-28411,10348,-11929,23503,5898,-32492,-7155,2695,-22,17125,-28975,-14585,25582,-27426,17370,-14313,1025,30345,-18725,30745,-13312,11993,24507,-27054,-20954,24659,29370,23137,-2003,-23470,-29165,18241,-5075,-705,-24912,19571,-10261,-12813,-14202,14165,-11328,5358,677,-4659,20886,-31176,20123,1198,-7398,-3484,-11236,-16021,-20431,9988,23740,-4776,-29826,-21433,-5890,-18460,-22516,-12950,-7403,-8761,20362,29555,27859,-3811,-26152,24282,-6020,5957,6172,7499,1415,-3347,-11336,19720,-14257,14616,-24665,-12630,12549,17072,23499,27010,16038,-21396,-28876,-25069,31682,32137,-10248,3992,-30727,-19479,-9301,-24892,-16244,-9613,-31654,-28420,-2388,406,13481,1289,16191,-81,-24334,-25475,-5924,-22465,-30960,-5776,30931,29911,-28140,32035,30029,-10691,-27207,17839,261,4019,-26178,18269,-8101,883,-8631,-22927,-4064,-3148,-29462,6871,-9458,10343,-20234,23740,13017,-22440,-24647,5765,23584,26500,-18547,16131,2337,10156,-22442,-24437,18829,5501,23602,-22861,406,-28791,31750,-14596,29482,12374,-25231,-4155,-30679,15698,14390,-5667,-18349,5353,-19515,-17571,9976,-31746,3017,-7854,17382,24728,4944,27923,11687,20690,6482,-15964,-30496,22227,-23233,6734,25398,5968,18176,-11996,-2529,-6546,23054,-30559,2174,-12558,-24257,16979,-7038,25374,14009,16140,-23647,-20425,-11889,24671,-25203,31247,-30749,6187,20993,-13283,1841,-22370,18639,-13717,-23843,6557,-30102,2149,32596,-29599,28567,-19745,932,-22593,3550,4279,-3422,-6063,23008,15000,-9818,28824,-4311,15630,-999,1599,-1850,4850,17059,15845,32300,26324,-16376,14076,20493,14502,-508,692,-6438,31076,-9851,13706,13809,-19590,25011,24946,-28738,29984,31493,4688,3286,31650,29192,-4636,567,-8152,13725,-15893,-29996,23913,30295,-11813,-10672,3180,9685,23577,15451,-8090,-8743,-6126,24501,-9594,-10443,-30606,-9578,14973,11138,-18963,-18727,-19758,-32759,-25940,-13315,-31723,12523,16802,8988,17242,9878,-5760,-1674,32194,31813,22991,-29149,28230,11466,32517,9228,-27949,20727,-25661,13932,10836,-549,-9517,14800,-15059,-6316,-29839,-7939,-20767,17656,-950,13242,-24455,-31262,-21705,12508,17702,-16460,-32220,-19603,-27671,24233,14474,-16664,18820,-18720,4953,-14407,-8012,-5412,-29245,-32192,11587,31472,-5696,3068,6225,-32741,13067,-27825,-13818,18753,-1429,32248,-14668,-8996,-8972,-17443,-11435,7823,19143,-18583,-31288,-28837,-19637,24783,-12584,-8724,21752,1026,-20462,1992,24120,-20512,31153,-3485,-24542,19326,-7452,8275,14251,30073,7687,13435,19969,17224,-8226,15045,16594,-419,28523,-31064,24334,11352,-20761,-5354,28690,16179,32314,-8373,-27313,28497,-7123,-8120,-12312,3480,13183,2684,4158,-7578,2982,29262,-25782,21492,-5838,27743,-26117,15583,-32479,8079,26506,-26153,25980,20069,11129,-11822,-3762,18387,22393,31460,30788,27336,-25972,-4140,16189,588,32233,22277,-12797,9378,-9924,-9250,-1444,-12147,2787,-9332,-19750,10557,-31307,21936,-13539,5683,-7633,-32107,-4540,4858,-20879,3776,-421,10880,-24767,2417,-1803,-30511,29924,8495,5115,-16815,1412,29407,14903,-32450,-11954,26650,-20319,-21766,17225,-6586,9633,-7276,-308,22039,28943,13871,24175,27661,-260,18500,9680,-26767,12631,25744,18670,12749,26623,-23030,12645,18822,14227,-8435,-11460 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice7.tflite b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice7.tflite new file mode 100644 index 0000000..259f6f9 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice7.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice7_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice7_golden_int16.csv new file mode 100644 index 0000000..658b675 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice7_golden_int16.csv @@ -0,0 +1 @@ +-15433,-23583,-16812,27634,12449,14808,-26704,-19594,-30391,7646,-17151,-31149,7780,-6077,26751,-1226,-12982,22855,22683,-27026,19114,23248,-12844,-26527,-8095,-23638,-21165,-17631,-29918,-27701,11371,14748,28123,5819,5657,24392,9173,18585,31406,1511,10367,13351,-7163,27159,4411,5275,-16301,15990,-19759,-24330,-3052,-12033,848,-17868,-20447,25612,-28912,31388,6996,10431,31121,27429,7652,-11877,-5069,5981,-24997,-7718,21547,5105,12840,-28360,14154,22751,-5423,17076,6885,4991,-24303,-18909,-129,32428,-2749,25558,22175,-18721,-483,26446,-10078,25374,-24909,5738,6462,-7790,-20638,-21889,6722,7108,-7209,-12255,14533,24539,18872,24135,3004,22179,17330,13524,-4888,12186,-11237,-8378,-29793,-1873,2339,16563,10002,6581,9093,9931,24445,-2354,-21488,3503,11657,-19020,32328,22953,-23721,2951,-1183,15469,30303,11059,10862,7467,2889,27229,-188,5592,16755,24690,-9398,-7840,-30008,23702,-27466,-20510,24123,31667,-22141,6029,29448,7491,18885,17610,31045,-3653,4363,25603,23999,26696,-26726,-15499,-24158,6223,-8938,-20736,-23884,25460,-19010,-25207,-11011,-6596,-20388,-17350,-26666,17913,-18281,23427,-8790,23001,-21530,-27402,13893,-19361,-4628,-23802,30777,-21420,18220,28572,25747,-22360,12508,14876,15511,8134,8018,-5083,-27203,-7956,-21506,2760,25146,15771,-28403,-7827,9128,14504,12130,-30760,25118,-28054,-1544,12470,27065,-1000,-6590,-11355,8985,-8372,25353,21153,17600,-3649,-5238,-14097,764,-1784,29833,5635,-22147,23018,31780,710,-15201,-19831,27736,21885,65,-31044,-26375,7900,11973,-30040,-9713,26110,4924,-16007,30298,-3385,8541,5004,-14533,32119,21816,-17325,-7130,-22872,29903,14396,-32678,-13927,15329,6670,-50,32601,10576,-1891,-31987,13862,-7681,-12757,21986,-1983,-29971,-28211,3665,7871,-22137,-28939,21616,22242,31146,-16723,-24952,-8255,4457,-5422,-18592,12744,-27790,-24436,20299,6631,7843,20418,7715,19466,-10899,28282,-21583,-4382,5085,12267,26704,4646,8701,29350,-32124,12829,4820,-2974,-6477,-6690,535,25792,-25486,-9113,-17127,16603,30998,5630,14549,-22540,23299,29291,-23058,30389,24447,17524,-15319,-24888,20093,-31386,-11081,17694,-27661,-22868,-25112,12296,22947,19947,25201,-10294,-24591,4794,14371,-16480,-4339,-13848,25234,5417,-22987,-32110,-29531,4529,-31897,-10938,-22787,-3866,-25286,2326,1500,-28677,21208,-8539,4212,1957,-9960,611,-22311,-21386,7244,-32106,16594,-20962,25821,5228,-15386,11684,-17037,-31769,1955,-4657,-22061,27993,-6761,-21621,28892,28640,-4379,6345,-16236,18610,24950,-28886,-16930,-26825,17744,29247,-14046,-6894,14725,11211,-29597,-29399,20450,6194,-7641,-4930,-21622,28565,21745,-2705,-10135,-4315,12314,30367,5315,5455,-3350,26621,8677,21485,18843,-25760,-24701,21189,-30909,2810,-5559,29458,-27363,-26890,-20354,29193,11437,22554,-29080,-23493,27780,14461,-1834,-29863,-3495,-11739,-8128,-30954,8256,-25083,-386,20165,-402,30710,-17987,21554,5811,-25960,2741,15261,-22994,24912,-17718,13868,27491,-22946,-22114,-25173,16851,-15633,4197,-29118,5759,28565,24363,-24590,7045,-16334,-11328,16822,-13041,-16147,-29748,-2456,-8698,4253,11488,-7893,25080,27877,-22192,-6290,29877,28934,31394,17476,22156,11212,19674,1776,-11761,7218,-24206,3765,22088,-7726,12646,14935,-16850,486,23855,-375,-23037,-4343,-19429,25141,-3862,-30399,24367,-4512,21985,-16042,-21030,22938,925,-8475,8722,-14593,-10369,30559,9667,32283,16290,24509,-16641,-8283,-28830,31726,-29821,-6990,193,-18384,-30526,-23723,752,-30435,-2440,-22312,3675,-28617,-1052,-13369,15083,-14656,-367,9133,-15710,-15403,-28109,27763,30266,19995,-29753,13958,22529,29194,-4682,-21313,-20829,-32106,20795,-29799,-8178,16079,14358,-13022,-7732,15137,-17216,-21211,-32758,-26107,22959,31256,-15609,20642,24218,-28862,14882,-18951,11948,-761,-23018,11931,31091,3417,12932,-5204,-6193,9963,23886,-20015,28397,20927,-28441,-18773,21368,-20135,-18365,14562,31289,-13910,34,24299,4004,-3370,-6828,-23933,5373,6019,30819,-32121,26892,-2937,-6830,-27627,16130,20660,-29828,-21808,-6907,-22863,2708,13464,-31593,14918,21318,17843,12318,-18559,-15571,25299,-13611,-17439,-3781,-14839,1137,3327,-26446,7983,-2994,-14585,587,-12956,28335,-31730,-24318,-13962,-32486,22589,-29895,-10259,-31016,19716,-11263,-24667,-19218,-32077,25940,20727,28314,14178,-7480,-26010,11931,12179,12049,-29161,15750,-17830,13656,-16181,-9806,-24742,-21578,-28832,-27034,18633,25569,18566,21375,-16224,1009,26856,-13681,-21270,1097,-32177,12323,-11580,4175,11665,-14256,21128,1334,-16952,4987,4806,5691,3962,-32174,-15281,-10216,-3722,-5869,-18284,14130,11708,28647,-18296,-12992,14007,-735,-3411,-29991,28443,28249,22079,-17516,-15064,608,3213,20472,-5095,-18520,30202,-5078,-7257,1321,-15255,4530,-9950,-25309,-10730,-3595,29572,18305,-25115,24961,-26195,-5833,-28918,21579,8129,-14566,-16581,-7639,29926,3534,13599,18422,27394,-14930,12733,24068,-24634,-8505,-271,-4715,29537,13807,-23756,15736,-15429,-26838,7308,-24110,-27300,-18548,20980,1437,13668,-31996,9715,-17843,11896,23732,23197,-12406,21477,-3954,7965,-12122,-7648,1995,27534,-7082,-11840,-6445,-9840,-15552,-18235,25487,1283,-22429,-12570,26871,15375,-11354,-15270,-21642,-16303,-20621,1011,32074,-26198,21521,-7628,4648,-23464,19841,26902,22388,8590,-28429,-22670,-4958,29682,-18067,-14373,-1828,-7973,22032,15207,7337,-32559,19467,-22928,14987,25869,-16464,10299,26018,-3274,-26480,-24938,32082,-31629,-30657,23521,2646,-5628,-8959,30954,-31775,-15252,-10560,21987,13270,-7404,25750,-22475,14345,350,14602,-8808,10412,3746,28267,-31397,618,14063,-18400,27677,-436,-5871,-3007,25398,27049,12176,-26436,23171,-23184,-10643,-3818,17681,5067,-24134,-1959,-2594,-23901,2213,-20730,-17679,-16666,13235,6611,-25489,1939,25466,12517,5416,21492,-14121,17266,5076,2256,20607,17037,-24905,-8493,7597,-21279,-25193,16189,-24474,-16560,15665,28728,1603,20759,-28594,-27456,27805,17586,-31760,22633,-7529,-28830,2810,-20732,-15452,-6783,-25865,-7564,11046,-18559,16564,-11501,-21717,19324,-26789,-10874,23110,9543,-9588,-21503,15809,11015,30636,21186,-15632,15013,15636,32143,-22378,652,-21479,-23430,-20090,-17963,-12105,24664,-12635,9148,-16119,26449,4028,14275,25594,8924,-8468,12281,-17933,-1116,29841,-4080,-25716,-20997,5928,26817,-30602,-28098,28434,16321,5292,-276,23023,-21723,11681,-9980,7608,-6758,11539,-1087,-20584,20938,9567,32318,17034,-12040,50,922,28824,13475,-26272,20670,12368,-24968,1581,-18470,-17905,20154,22261,26425,8210,-12680,-6642,-12797,17388,16197,13362,-19988,14366,-23517,-1486,3381,1173,6720,4095,29853,-12137,-18642,-25330,8282,-32269,26288,-23385,25463,3590,32068,4011,14000,3264,-24826,-19673,14246,21864,9984,-23748,-11373,-27461,-4783,-31954,32621,27326,-23485,18772,-20568,-9111,-12501,23925,6940,-3616,-17867,-22239,-22987,-27931,-26161,-1240,-26440,22129,-23240,18097,-17462,-31300,-31271,32035,-32192,16001,19302,9752,-21278,3684,25658,4941,-21701,-17995,-24322,-4450,-5445,-31037,-16961,-2392,-27102,12118,18088,17820,-17875,13182,26133,32388,32137,-24788,23368,-9630,16228,-23608,19610,21386,-9937,-15051,-29930,22868,616,28946,25144,237,-30405,30203,4786,-18727,29244,19524,17308,-1060,-18841,4841,-25757,-20140,4832,14897,28132,17106,22740,13248,26818,22429,8013,-3143,5569,-28954,15234,29383,-17623,-3501,-2919,-15277,-17444,29598,23559,-6522,1132,23088,-29728,26311,10093,7305,-7148,10723,-21203,-13288,7528,146,-23654,11523,-18970,29825,-18446,-31700,24254,3723,24547,-24418,1932,5286,-26554,-31423,-13928,22297,19828,21432,1250,20119,-12212,-2662,22328,-19197,-3064,-6543,25981,-15750,-27271,11353,-19759,-5670,3756,-10111,-18696,1035,15367,14173,-4065,-663,-30847,12220,19517,10163,17099,16083,8304,25608,-7355,14707,-1465,10880,624,-16148,30226,-9845,14562,30216,7645,4359,11185,-26625,-20423,4467,-29179,-7592,4371,15784,-18616,27567,8706,29069,-12853,-2163,15662,21260,-31224,31459,-18896,-15320,-16021,-9167,-10398,21601,18824,14381,21318,-29825,-9978,19640,23275,-19901,31864,21264,10691,-15834,31245,18421,-14393,12255,-5954,8526,-21121,3455,-19222,14362,-18015,192,8762,-364,10812,-2055,-17616,-2700,22785,-10999,28313,11258,8143,-16897,23066,5933,-6982,-13106,14711,-16458,-5324,30263,-3997,-30126,3914,-30588,-19355,-20556,11260,13893,14659,-16296,-1148,20957,-25327,25002,20101,-22531,-18402,2430,-10807,-3937,8952,17446,9370,-23864,-32701,6028,-4791,6474,-8667,-25869,14310,-389,-3984,-501,6356,-26746,17901,-28220,-19337,10864,-19916,12553,3053,-27213,-31084,2376,-13918,-13284,20667,-6626,1327,-18245,-15088,28201,-17634,-7449,-26691,-21550,32732,-8610,31891,1894,29776,17003,31930,-16478,-609,7550,26472,20039,28828,-24390,-18851,-1013,24028,-19306,-28975,-26822,-22970,-16888,23426,18658,10460,-1869,-24138,17989,6659,-28071,-3878,-2588,-17131,17728,21931,27105,32735,12940,26836,19033,-3096,10638,2171,20519,15953,28168,-15999,-22580,18448,-5826,-8715,-14749,-3716,-12969,-28425,30042 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice7_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice7_input0_int16.csv new file mode 100644 index 0000000..caec12b --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice7_input0_int16.csv @@ -0,0 +1 @@ +-14120,29733,-11540,-1472,-19263,30245,1925,-24692,-23222,9730,1823,-12703,-21373,-20146,11455,15463,-5506,-13396,-11286,-9056,24219,-22090,-29048,-17649,-24385,-14066,-12604,-32280,31287,31576,-19583,-22755,32451,-9067,6987,-15288,-13316,-8520,27387,30899,25762,16683,8624,-12199,16751,14431,11328,-27589,29356,-1815,29508,-3162,13156,-30871,25464,-31722,25959,16324,-14086,18555,-8086,-22897,-9720,-10445,-13231,13222,-12997,25193,15569,-21565,9480,-1879,-10384,5665,-6548,-3327,-9234,13791,13032,23549,14143,27630,10362,24110,22838,3062,31676,-22702,-20215,-218,-10044,4525,1237,-10642,-14395,-8200,31892,23444,27860,-17459,-23219,-27489,27844,-3986,-32239,-1977,1181,-28015,27120,27449,-754,-19246,121,9541,-2027,22814,-20253,13274,32765,3489,-906,-5157,25977,26050,-23711,1767,11195,-8161,23037,7951,-923,-11116,15553,-26149,-10382,9018,-17949,-23523,11628,-6981,656,1766,959,-8228,21270,23899,6648,10223,-9953,-12909,-7650,-25862,20557,-31208,22713,30065,-23202,6809,-14708,11555,31718,1947,6262,14103,16385,5586,-26153,14395,3176,-5142,899,23654,-351,1254,24345,32304,29183,11341,-17608,1039,-12340,-6744,23648,12219,5412,-5751,-1700,-5605,-6127,-29175,-21948,-30472,-28057,16317,7633,19448,6890,31728,670,-16276,12758,-22426,31416,-18165,-1490,-9068,90,-6342,10023,-20654,-15320,-5115,-21328,30408,19459,-24919,-5164,-10003,6372,-15594,20196,-17203,-511,19990,-17129,9104,-20760,-21485,30496,10650,9280,-31409,-1043,-29059,15717,-27503,-10621,23854,31738,25312,-20748,16294,18506,26546,31322,-9940,12165,21991,-9888,-28201,-4319,-25535,-31339,23152,16420,-1762,-13243,-31114,-5166,-12280,-18057,-11198,32479,26303,20858,11769,-4922,12412,-8382,-14707,-32529,-21734,25629,6247,-6013,28403,-5453,6519,-16519,11735,-28362,7257,-31107,-7859,-24642,-30810,20742,-7192,-22412,5541,-3256,-7101,-31673,-26782,10867,-7880,-16140,18412,14765,17554,23310,-26583,23980,-15195,-3093,31416,31241,-10347,-26721,24042,7261,-18601,1865,17484,-27004,-20752,20554,-6888,-30901,-9546,5426,-22739,18222,21060,13187,15962,-16362,-19613,-10822,-11259,-22917,-21876,-6993,19207,14668,-2665,-29495,14261,-31945,-12630,-22723,-25210,-19465,-21144,-15987,23756,22327,3774,-28457,-14900,18515,30728,17404,22363,6880,10509,-27904,-14267,23900,-11820,18065,-27881,26100,10192,-12184,15784,6183,30843,3973,-19301,20910,11735,20460,-30461,-32132,-21757,24903,23026,-21508,-10474,-8603,26716,20061,24847,21987,2585,1888,-16590,-10244,23467,-4811,-26178,-420,-19019,-20203,-13282,5223,14849,4913,12544,922,-11737,27727,15298,18909,4619,14472,14351,-13015,12378,13962,8299,-1881,-21334,-24793,19925,12262,-21122,-8680,18271,26248,-23425,-22833,28742,-9760,11349,-15195,14626,-28797,3409,21642,-30525,4238,-20505,-20529,-6497,-7339,-14495,4413,24671,-26516,25205,22807,-19256,-2148,31559,-31903,27932,19297,1809,32040,-14534,-25603,17457,-24117,23163,-6833,-1173,-15999,-10477,9939,-26978,18355,5587,-15720,-751,16783,-588,10835,-12981,25002,30426,15488,17217,-23962,22930,21697,15340,10441,-27980,-24530,-7105,-27724,-6212,30727,28610,4854,-258,-21658,-19747,2307,-19038,27917,-30446,-14306,13077,24401,-2935,-10255,-29882,32632,-7486,21184,13225,-32713,-26748,-21732,-1960,-21651,10464,-31197,9241,-9048,-12397,25477,18902,-94,-10652,-23067,-11154,-29907,-2959,9160,-5157,18100,32750,-10239,-30994,-11668,-23202,-1060,11252,-5964,10397,-19128,17490,-17991,-3839,5589,30446,16392,27665,-19363,-5906,-21913,19357,15140,30716,26417,-1474,9166,-15823,-2075,7881,17165,5440,27720,-21890,-15940,-26909,-8068,-28565,-15761,-16342,22152,31378,17238,-17540,25067,-1780,-2399,25962,-6371,1216,11065,-32367,-22811,16654,29402,-12284,-15158,20304,-22098,10961,25605,-14061,-16469,26241,29970,-7677,32501,7696,2509,-10956,5796,-2897,29328,22095,-8158,-2904,-21574,-27708,26595,25620,-32094,2013,16709,27937,-19979,22946,26719,-13666,-561,3246,-6375,-16973,16917,-29765,18729,-22104,-10983,8045,-2757,14827,-8740,-13855,14557,2408,15114,16409,-5426,-21639,22192,28360,24027,-15057,-14779,15860,-16611,-25545,-13269,3205,-29794,9399,-16477,-17977,2260,-8350,28274,-30825,-28178,20155,-12159,31914,3180,-20647,-22515,2913,-17629,-25161,23088,20427,-5644,16608,25633,-15343,-854,27646,-8099,-32213,20455,1675,-8933,-10003,-22197,3489,-9690,7885,-576,-16598,-28675,-3799,29319,-15142,-10113,-10086,-12992,15060,23445,-20350,1066,10498,-17613,-10217,-29847,4289,26793,15467,-26623,-13833,12685,24367,-23112,23781,-15433,-23583,-16812,27634,12449,14808,-26704,-19594,-30391,7646,-17151,-31149,7780,-6077,26751,-1226,-12982,22855,22683,-27026,19114,23248,-12844,-26527,-8095,-23638,-21165,-17631,-29918,-27701,11371,14748,28123,5819,5657,24392,9173,18585,31406,1511,10367,13351,-7163,27159,4411,5275,-16301,15990,-19759,-24330,-3052,-12033,848,-17868,-20447,25612,-28912,31388,6996,10431,31121,27429,7652,-11877,-5069,5981,-24997,-7718,21547,5105,12840,-28360,14154,22751,-5423,17076,6885,4991,-24303,-18909,-129,32428,-2749,25558,22175,-18721,-483,26446,-10078,25374,-24909,5738,6462,-7790,-20638,-21889,6722,7108,-7209,-12255,14533,24539,18872,24135,3004,22179,17330,13524,-4888,12186,-11237,-8378,-29793,-1873,2339,16563,10002,6581,9093,9931,24445,-2354,-21488,3503,11657,-19020,32328,22953,-23721,2951,-1183,15469,30303,11059,10862,7467,2889,27229,-188,5592,16755,24690,-9398,-7840,-30008,23702,-27466,-20510,24123,31667,-22141,6029,29448,7491,18885,17610,31045,-3653,4363,25603,23999,26696,-26726,-15499,-24158,6223,-8938,-20736,-23884,25460,-19010,-25207,-11011,-6596,-20388,-17350,-26666,17913,-18281,23427,-8790,23001,-21530,-27402,13893,-19361,-4628,-23802,30777,-21420,18220,28572,25747,-22360,12508,14876,15511,8134,8018,-5083,-27203,-7956,-21506,2760,25146,15771,-28403,-7827,9128,14504,12130,-30760,25118,-28054,-1544,12470,27065,-1000,-6590,-11355,8985,-8372,25353,21153,17600,-3649,-5238,-14097,764,-1784,29833,5635,-22147,23018,31780,710,-15201,-19831,27736,21885,65,-31044,-26375,7900,11973,-30040,-9713,26110,4924,-16007,30298,-3385,8541,5004,-14533,32119,21816,-17325,-7130,-22872,29903,14396,-32678,-13927,15329,6670,-50,32601,10576,-1891,-31987,13862,-7681,-12757,21986,-1983,-29971,-28211,3665,7871,-22137,-28939,21616,22242,31146,-16723,-24952,-8255,4457,-5422,-18592,12744,-27790,-24436,20299,6631,7843,20418,7715,19466,-10899,28282,-21583,-4382,5085,12267,26704,4646,8701,29350,-32124,12829,4820,-2974,-6477,-6690,535,25792,-25486,-9113,-17127,16603,30998,5630,14549,-22540,23299,29291,-23058,30389,24447,17524,-15319,-24888,20093,-31386,-11081,17694,-27661,-22868,-25112,12296,22947,19947,25201,-10294,-24591,4794,14371,-16480,-4339,-13848,25234,5417,-22987,-32110,-29531,4529,-31897,-10938,-22787,-3866,-25286,2326,1500,-28677,21208,-8539,4212,1957,-9960,611,-22311,-21386,7244,-32106,16594,-20962,25821,5228,-15386,11684,-17037,-31769,1955,-4657,-22061,27993,-6761,-21621,28892,28640,-4379,6345,-16236,18610,24950,-28886,-16930,-26825,17744,29247,-14046,-6894,14725,11211,-29597,-29399,20450,6194,-7641,-4930,-21622,28565,21745,-2705,-10135,-4315,12314,30367,5315,5455,-3350,26621,8677,21485,18843,-25760,-24701,21189,-30909,2810,-5559,29458,-27363,-26890,-20354,29193,11437,22554,-29080,-23493,27780,14461,-1834,-29863,-3495,-11739,-8128,-30954,8256,-25083,-386,20165,-402,30710,-17987,21554,5811,-25960,2741,15261,-22994,24912,-17718,13868,27491,-22946,-22114,-25173,16851,-15633,4197,-29118,5759,28565,24363,-24590,7045,-16334,-11328,16822,-13041,-16147,-29748,-2456,-8698,4253,11488,-7893,25080,27877,-22192,-6290,29877,28934,31394,17476,22156,11212,19674,1776,-11761,7218,-24206,3765,22088,-7726,12646,14935,-16850,486,23855,-375,-23037,-4343,-19429,25141,-3862,-30399,24367,-4512,21985,-16042,-21030,22938,925,-8475,8722,-14593,-10369,30559,9667,32283,16290,24509,-16641,-8283,-28830,31726,-29821,-6990,193,-18384,-30526,-23723,752,-30435,-2440,-22312,3675,-28617,-1052,-13369,15083,-14656,-367,9133,-15710,-15403,-28109,27763,30266,19995,-29753,13958,22529,29194,-4682,-21313,-20829,-32106,20795,-29799,-8178,16079,14358,-13022,-7732,15137,-17216,-21211,-32758,-26107,22959,31256,-15609,20642,24218,-28862,14882,-18951,11948,-761,-23018,11931,31091,3417,12932,-5204,-6193,9963,23886,-20015,28397,20927,-28441,-18773,21368,-20135,-18365,14562,31289,-13910,34,24299,4004,-3370,-6828,-23933,5373,6019,30819,-32121,26892,-2937,-6830,-27627,16130,20660,-29828,-21808,-6907,-22863,2708,13464,-31593,14918,21318,17843,12318,-18559,-15571,25299,-13611,-17439,-3781,-14839,1137,3327,-26446,7983,-2994,-14585,587,-12956,28335,-31730,-24318,-13962,-32486,22589,-29895,-10259,-31016,19716,-11263,-24667,-19218,-32077,25940,20727,28314,14178,-7480,-26010,11931,12179,12049,-29161,15750,-17830,13656,-16181,-9806,-24742,-21578,-28832,-27034,18633,25569,18566,21375,-16224,1009,26856,-13681,-21270,1097,-32177,12323,-11580,4175,11665,-14256,21128,1334,-16952,4987,4806,5691,3962,-32174,-15281,-10216,-3722,-5869,-18284,14130,11708,28647,-18296,-12992,14007,-735,-3411,-29991,28443,28249,22079,-17516,-15064,608,3213,20472,-5095,-18520,30202,-5078,-7257,1321,-15255,4530,-9950,-25309,-10730,-3595,29572,18305,-25115,24961,-26195,-5833,-28918,21579,8129,-14566,-16581,-7639,29926,3534,13599,18422,27394,-14930,12733,24068,-24634,-8505,-271,-4715,29537,13807,-23756,15736,-15429,-26838,7308,-24110,-27300,-18548,20980,1437,13668,-31996,9715,-17843,11896,23732,23197,-12406,21477,-3954,7965,-12122,-7648,1995,27534,-7082,-11840,-6445,-9840,-15552,-18235,25487,1283,-22429,-12570,26871,15375,-11354,-15270,-21642,-16303,-20621,1011,32074,-26198,21521,-7628,4648,-23464,19841,26902,22388,8590,-28429,-22670,-4958,29682,-18067,-14373,-1828,-7973,22032,15207,7337,-32559,19467,-22928,14987,25869,-16464,10299,26018,-3274,-26480,-24938,32082,-31629,-30657,23521,2646,-5628,-8959,30954,-31775,-15252,-10560,21987,13270,-7404,25750,-22475,14345,350,14602,-8808,10412,3746,28267,-31397,618,14063,-18400,27677,-436,-5871,-3007,25398,27049,12176,-26436,23171,-23184,-10643,-3818,17681,5067,-24134,-1959,-2594,-23901,2213,-20730,-17679,-16666,13235,6611,-25489,1939,25466,12517,5416,21492,-14121,17266,5076,2256,20607,17037,-24905,-8493,7597,-21279,-25193,16189,-24474,-16560,15665,28728,1603,20759,-28594,-27456,27805,17586,-31760,22633,-7529,-28830,2810,-20732,-15452,-6783,-25865,-7564,11046,-18559,16564,-11501,-21717,19324,-26789,-10874,23110,9543,-9588,-21503,15809,11015,30636,21186,-15632,15013,15636,32143,-22378,652,-21479,-23430,-20090,-17963,-12105,24664,-12635,9148,-16119,26449,4028,14275,25594,8924,-8468,12281,-17933,-1116,29841,-4080,-25716,-20997,5928,26817,-30602,-28098,28434,16321,5292,-276,23023,-21723,11681,-9980,7608,-6758,11539,-1087,-20584,20938,9567,32318,17034,-12040,50,922,28824,13475,-26272,20670,12368,-24968,1581,-18470,-17905,20154,22261,26425,8210,-12680,-6642,-12797,17388,16197,13362,-19988,14366,-23517,-1486,3381,1173,6720,4095,29853,-12137,-18642,-25330,8282,-32269,26288,-23385,25463,3590,32068,4011,14000,3264,-24826,-19673,14246,21864,9984,-23748,-11373,-27461,-4783,-31954,32621,27326,-23485,18772,-20568,-9111,-12501,23925,6940,-3616,-17867,-22239,-22987,-27931,-26161,-1240,-26440,22129,-23240,18097,-17462,-31300,-31271,32035,-32192,16001,19302,9752,-21278,3684,25658,4941,-21701,-17995,-24322,-4450,-5445,-31037,-16961,-2392,-27102,12118,18088,17820,-17875,13182,26133,32388,32137,-24788,23368,-9630,16228,-23608,19610,21386,-9937,-15051,-29930,22868,616,28946,25144,237,-30405,30203,4786,-18727,29244,19524,17308,-1060,-18841,4841,-25757,-20140,4832,14897,28132,17106,22740,13248,26818,22429,8013,-3143,5569,-28954,15234,29383,-17623,-3501,-2919,-15277,-17444,29598,23559,-6522,1132,23088,-29728,26311,10093,7305,-7148,10723,-21203,-13288,7528,146,-23654,11523,-18970,29825,-18446,-31700,24254,3723,24547,-24418,1932,5286,-26554,-31423,-13928,22297,19828,21432,1250,20119,-12212,-2662,22328,-19197,-3064,-6543,25981,-15750,-27271,11353,-19759,-5670,3756,-10111,-18696,1035,15367,14173,-4065,-663,-30847,12220,19517,10163,17099,16083,8304,25608,-7355,14707,-1465,10880,624,-16148,30226,-9845,14562,30216,7645,4359,11185,-26625,-20423,4467,-29179,-7592,4371,15784,-18616,27567,8706,29069,-12853,-2163,15662,21260,-31224,31459,-18896,-15320,-16021,-9167,-10398,21601,18824,14381,21318,-29825,-9978,19640,23275,-19901,31864,21264,10691,-15834,31245,18421,-14393,12255,-5954,8526,-21121,3455,-19222,14362,-18015,192,8762,-364,10812,-2055,-17616,-2700,22785,-10999,28313,11258,8143,-16897,23066,5933,-6982,-13106,14711,-16458,-5324,30263,-3997,-30126,3914,-30588,-19355,-20556,11260,13893,14659,-16296,-1148,20957,-25327,25002,20101,-22531,-18402,2430,-10807,-3937,8952,17446,9370,-23864,-32701,6028,-4791,6474,-8667,-25869,14310,-389,-3984,-501,6356,-26746,17901,-28220,-19337,10864,-19916,12553,3053,-27213,-31084,2376,-13918,-13284,20667,-6626,1327,-18245,-15088,28201,-17634,-7449,-26691,-21550,32732,-8610,31891,1894,29776,17003,31930,-16478,-609,7550,26472,20039,28828,-24390,-18851,-1013,24028,-19306,-28975,-26822,-22970,-16888,23426,18658,10460,-1869,-24138,17989,6659,-28071,-3878,-2588,-17131,17728,21931,27105,32735,12940,26836,19033,-3096,10638,2171,20519,15953,28168,-15999,-22580,18448,-5826,-8715,-14749,-3716,-12969,-28425,30042 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice8.tflite b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice8.tflite new file mode 100644 index 0000000..c18fdac Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice8.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice8_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice8_golden_int16.csv new file mode 100644 index 0000000..450d722 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice8_golden_int16.csv @@ -0,0 +1 @@ +2945,-5099,1335,1595,285,24331,-29219,30581,-28002,-22277,22015,32133,-19702,31668,20473,-5417,-1199,-27138,-15350,-18042,-27701,21475,-4360,17653,-26625,-9495,20972,2237,9419,29552,9522,-14525,-22450,1033,32649,-26435,31552,-32541,30488,10117,-31918,20501,-22443,-30891,-21293,28066,1490,-21957,27545,-24147,12824,-20133,32143,-24241,-29610,-24301,24554,18501,5872,3615,-22125,2810,23852,11919,28973,16388,-2110,14448,-860,20947,9793,-28594,29317,17643,10107,26252,18699,24867,-4783,-32396,16122,-9993,-5340,16861,31189,-32278,10260,1850,3327,-13519,-17821,-31232,12538,-18963,-2460,-13057,31163,-26547,23524,25261,28530,18118,15779,-6497,1399,-1723,15936,-13635,7959,9090,20061,-28161,26319,-18944,-29357,-9278,2595,-14592,-13890,17694,-30819,6601,-19802,-22810,20014,19930,3105,23057,6648,-10834,-6236,30879,21979,-24964,13505,-183,7217,25600,18571,23086,-20098,14371,-18441,31573,10954,8834,-2251,-1556,7348,-10323,10870,7639,10496,-16110,-14488,-15816,-1077,26795,11333,25642,129,-27378,23562,-7640,3475,-7089,-23010,-8322,-24973,-8141,-5212,-19193,-24580,-31406,-17971,6427,-14110,14868,-29646,-15654,-8486,-21448,-15781,27835,-19330,5598,-25364,-19172,-4845,10138,-26811,-2772,8585,18789,29043,3551,27186,32516,6923,15744,-6488,20862,-20971,-20232,32718,-30662,28486,-9897,-14672,-10126,17418,-31142,-32254,-3572,24167,29392,1487,-7937,13836,-8997,-3800,-29896,7057,-9816,30480,8543,-21983,-4027,4484,10832,-4972,28471,30535,-25573,-24153,14260,4821,5528,-20779,9621,-19379,3082,-23907,-22566,7772,14876,-17914,9540,1034,29789,-16480,31274,-11325,-4634,-14806,-28627,30756,17752,-23581,17361,10402,-31166,-27311,-22648,4968,19054,-26310,-24655,-4407,28430,28632,-20637,-10799,12247,-20297,-15424,8237,1656,-9669,-2539,-18943,-2812,-28829,9278,-5199,-8976,24552,25402,3620,-4366,6321,-20472,-18198,13978,12312,22121,14692,-12638,-1545,-13276,870,18827,14839,-11602,1235,-11838,-30399,-5816,5445,-3969,9817,-3083,4885,-15205,6126,-8860,22861,5274,-20346,26917,-9684,26761,16052,13115,32646,-23715,-3596,-7676,-4143,-7571,26375,14267,-14088,3601,25442,-31720,-21211,21764,14180,8724,-16667,-3194,21,-26258,17751,-13974,15138,-23841,14433,15657,-23933,13773,-19952,-11867,23146,-2028,14686,10145,-21794,-3607,19802,-7289,-9592,9014,7249,-10360,32431,421,-21277,-464,31345,22073,26579,31032,-22095,-6882,-118,25260,30474,21806,1749,-11708,133,-22531,-2717,26381,5391,-13840,-4590,-25989,-3825,1815,7540,4893,2611,11188,-27067,1992,12371,20430,26950,-11388,-11803,4606,-30280,-18910,-20150,-10218,-16885,28337,7979,24589,-27334,-13673,11665,-9469,-24690,7410,-14807,11073,29052,969,-12724,-14884,3903,-13833,30175,13333,25732,22041,15234,-31468,10049,-23573,12944,13123,13526,2998,-22506,-17521,-25607,8429,12812,-19839,25014,20217,7417,-32197,-848,15460,-15410,1612,23392,-10962,-29896,-16671,-17848,23540,-11451,13976,29084,-21610,-21991,-14961,28554,-31894,-23831,25326,9180,17803,-11927,29754,-6308,-17581,25656,18753,3236,16090,3270,8462,-14977,13767,12432,6005,-25884,-26961,3188,-11693,31676,28924,-30952,14870,16890,18600,16475,31895,-29718,-31619,-13458,-20431,-7413,-19231,10898,30066,27015,24089,4704,-30344,18890,-31949,18893,-10209,-17729,14444,16659,-32559,4639,-5625,-19889,-15072,29698,-17450,-16316,28840,-3849,15376,-14047,20297,1169,-18714,19698,11875,-7548,23748,-28059,19474,-31536,-10512,-13429,11713,29255,5173,7760,6317,-17513,-22489,23240,-5037,-7886,29391,3167,-7097,26151,11682,23751,4104,-24145,-8610,22071,6602,-20770,23650,-31416,31626,20164,11721,27993,22948,-18174,14171,1182,15025,-20211,-9557,-27425,4558,-14610,16297,-9898,25549,-30538,-6743,28106,17302,18917,11289,13962,-2476,-29705,-11761,-5735,16233,28609,-5381,-20052,28874,18307,7150,4444,-7375,5534,24633,17592,17559,21327,12498,-22187,28033,13802,-27268,-12182,-25178,-12190,-17291,-23831,-12349,-5750,28049,-21448,-27512,13891,262,-3223,-16405,-4336,26999,-17365,-28168,-24134,-19715,28950,32214,15286,192,-14455,-31575,-2903,-26458,-28105,-22322,-24240,21619,-24509,4341,-16642,-4938,-6147,19728,29868,22660,-3465,-18823,-1916,-14318,8570,9866,-19788,16206,-8589,29983,29117,-20498,1772,-11067,-3334,-28479,19716,-20657,-18404,-10336,-14846,-29633,9564,28605,14620,-211,-7405,-5809,-29319,-15637,26830,-25158,26125,16625,-10925,724,-23562,-26449,26340,2968,-4168,-25376,-30362,-18267,-15528,-9587,-31206,7389,-22463,21444,-24609,8923,-18064,-8594,32447,-1865,3533,4690,-11796,12120,-29590,16393,19436,-8132,-9969,-31965,10885,11777,-30840,31817,-15736,-19494,-26481,-4150,24451,16085,16460,20788,19398,-18165,-7507,7114,-5095,13232,-18236,-25990,28374,32270,-5181,16588,-6466,-1415,-17540,-10102,-25579,-1400,29414,10866,-7661,3748,-31100,-22205,-10160,22503,-30834,27238,21736,10158,-2229,-15721,28866,-6432,25633,3197,-14087,-31911,-20813,28603,-10192,11092,8487,2564,17891,77,2589,-12267,-25649,-19921,-28376,13177,-31010,32130,9203,6494,-8902,26578,3376,-22654,-3401,-3241,9805,-25188,-3313,22348,2447,-7186,-7549,-2944,-30599,28583,8266,25389,-19721,13381,30982,15038,-4662,-18386,-30865,8145,-2137,13417,-593,19517,-314,29421,7038,-17153,-17468,-10394,-18531,-1316,5921,-9775,24302,13987,-30440,4641,-2406,-29074,24661,31730,22311,-19068,7590,-32286,24843,-21068,-14946,-12579,9923,19845,-21249,30695,4266,-2301,-30278,-9586,-27793,3457,17560,-27227,508,-28170,2829,8169,-11078,-16975,-11389,-5233,13570,-9955,-2694,12123,16496,-28665,17826,3753,-32365,-5195,-13462,-1300,-6247,13918,-17954,20245,-4422,27431,16223,27694,263,-27039,-21374,-20517,-11589,-10630,26574,-25349,28430,15250,29664,-18267,18482,17619,13692,30174,9866,6393,23689,12607,29737,-23001,-18214,-18790,-31472,29402,-10624,-23683,16115,-20409,5067,22769,-16597,26673,-9920,-791,-23315,15282,32740,-31904,-20627,7173,-28628,8571,-19850,-4910,22603,16565,-28152,31108,315,4668,-1771,15971,-30592,-24252,3333,-29136,910,21057,10462,16500,6800,-1070,-20641,-23585,5313,6596,-6641,-1442,-12788,3713,-26970,3598,-27527,-1393,3972,-22477,17892,19005,28088,-11623,28085,29104,18809,-12524,-27225,7037,-10525,31575,1481,-25129,-31892,26955,-7041,9744,23601,14228,8479,29948,20334,-13564,-12714,29840,13251,1444,2692,21195,-9836,-24991,4196,-18906,-981,30403,31877,-17470,6402,21107,26783,-734,-26344,-15253,-8050,15026,-13971,-16342,25302,-31455,-5080,-15359,5841,20126,13003,28971,8282,9171,-12579,7049,10457,-18205,15440,14501,-6752,23089,-28786,-28389,32592,-22598,17084,-14960,-9932,28272,32057,-8054,-22938,399,6525,15383,10688,11252,-7652,16451,-30308,32622,2249,3832,16095,-5598,-19917,21618,22834,5545,-21755,-10166,20469,30271,20643,20664,7549,25965,-26679,-16496,17788,21941,-476,8560,-9064,-29904,4828,-25667,18801,3476,5290,8430,-17428,5630,17440,-21542,-12764,-264,-28495,5365,31666,-1432,-2125,-19551,-8829,23422,1593,31870,1252,-22350,13273,6275,-6070,9025,27790,20187,18776,7559,-4787,-25805,25774,16069,-19006,3805,-23326,-27608,28180,-2689,-3479,-19725,-26390,-8454,21211,-25523,-27411,-21145,29651,30810,3533,1191,490,-4978,28929,15201,-26536,7721,-27994,-13832,26020,-992,-90,2659,10985,26573,4492,27301,-4437,26108,-30040,-12371,-27806,14115,4456,19610,-12741,30555,-15811,-26573,12009,-15419,21214,-29824,17266,12326,28080,-10103,5906,-15791,10066,24384,27434,-19875,-8929,-2100,8448,15004,3428,-3126,3406,-8299,29625,-23466,1361,-25465,10865,-1423,-3922,-25557,147,745,17051,9491,-21459,-23278,18990,-16596,-25334,-22421,-27984,19687,-6420,-11547,-8356,16321,-29665,9097,-5821,-18593,9624,-11154,18564,32106,15058,14316,-1486,-8152,-4559,-13543,-10126,28834,1205,-7788,-24012,-4624,-24141,-19525,-20522,9835,-31476,28838,17557,-18559,14766,27387,-17333,19879,-13359,-10445,11114,3875,-30361,-8052,7916,12767,18080,3190,10799,14725,-30747,-28169,-3050,20544,-10299,10971,-13849,19648,-7755,15223,8764,-13120,-15429,30090,19207,2685,24153,-14912,-969,1936,-17546,22883,-32333,-8451,-13424,-19616,10173,-9968,1908,28332,-187,-5222,2726,-18840,11913,27780,-9202,32424,19365,24627,13772,-19396,28567,27817,32690,-25159,16885,580,14702,-27419,9819,-21375,-23886,19952,23986,-16256,16457,-95,-18740,20769,6818,-29493,7183,-26379,4531,2496,-21571,-8561,-6289,17535,-13918,28554,18711,-20153,29525,-11067,31821,18439,4841,16184,9688,25329,-11392,-10405,-18812,-11443,14356,24294,-30221,-16606,-10972,20737,4576,5989,-5531,-19313,16069,-22746,24966,24718,-20465,-9656,12029,13865,2468,1293,6397,-1686,-6073,19768,-32341,-27600,17879,16317,31443,-31453,30538,16031,-29779,22961,-28525,-20618,-2148,-27622,-3627,1375,-18543,15139,24650,-21286,-24747,32737,19480,18940,2444,-10109,-24340,1881,12345,20546,13146,31984,29107,22831,-22084,6406,-4809,29474,26219,10924,17176,-12143,20544,-30236,-30693,1757,22713,16212,18907,23138,-24957,11302,10628,9907,10622,11250,21372,-31290 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice8_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice8_input0_int16.csv new file mode 100644 index 0000000..ce3760a --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice8_input0_int16.csv @@ -0,0 +1 @@ +-4098,-24073,16923,15746,-12176,-13981,16584,-23623,-9941,-18479,-3105,15738,-29427,589,-6156,-8072,-27331,1004,16108,-1872,16179,-2962,18879,-26536,23272,2577,18439,6052,-7891,16501,9310,4039,20656,18140,-14563,-4377,-26563,28496,16426,-20182,-20761,22140,-26081,-19587,-23843,-21074,25534,-21108,31458,13398,17719,8996,30578,26766,15351,18185,6021,-6257,-14656,-10767,-4486,23477,26243,-6783,2258,16071,-22552,27455,16967,-18932,522,7087,-9388,-21562,-31047,4062,20312,15285,-22073,5383,-14720,25387,32742,16604,-10818,-29861,1695,-22459,-25761,8466,-13081,3524,13357,-30393,-11244,19450,24475,7755,-25866,-15185,20797,25319,-21687,14284,-11249,19827,-6797,15309,-6015,6869,18796,-785,-9735,5840,-910,9699,3728,936,14248,-29627,-23710,23624,-25607,15701,27036,-20795,-19433,-23193,6279,19898,-13410,29748,-2782,-18616,18363,-10443,-29245,-24279,-6096,8712,-24895,-26047,-21730,22991,-4149,32134,19988,-10219,-11007,28112,-27349,-19687,-12587,-7495,-15585,-10676,32635,15612,-1863,-21740,14299,-22890,-15388,24663,7792,31451,2456,-31501,21523,4761,-11429,2251,21772,7124,4665,-4779,24247,-26733,-31834,-21542,-24238,-2887,-16906,7786,15183,-18409,8014,-8415,-17352,29130,32358,28608,-29992,27062,-29329,26174,27120,29799,-27335,9937,24262,-18076,-28840,3909,18150,-19360,30319,5684,-31579,19484,24682,30480,10352,3933,-24858,-25815,-25650,29011,-22723,27581,25032,-3072,6164,-10688,-10485,-12965,3056,30035,29908,1907,-32497,-24692,5377,-7944,-11634,-30844,23826,-24260,268,-24027,-20898,1545,11756,17602,-10021,15887,-24261,28377,-26210,1551,-23328,15469,-11289,6742,19245,18838,8585,-14204,-23306,-31437,-9057,-28651,-18366,19340,-2359,6825,-25790,2223,-31484,-5553,10581,27455,6227,-9932,8843,-18880,8143,-6656,-6763,-30852,-21967,-8870,-17361,-709,-21566,10028,26402,-714,2856,16609,30609,7729,23951,28321,14457,-27319,-13070,-12853,-27961,-26659,-3840,-23978,-10652,15066,4136,-11334,-23069,23372,8870,-19795,4858,31972,22998,11738,-7841,-9391,28326,4638,-15443,2730,-15942,-259,-2521,-22497,15091,-22728,19179,1235,-26463,1946,-3469,15423,3681,9001,-14274,-9542,-19132,-10848,-1019,-27076,19962,16404,-29526,-10821,-3220,7146,-31682,-14936,-12721,-23837,5271,28040,-17932,-25189,-11113,16536,11056,4786,26727,-4755,6249,-28408,21082,-1358,-31189,22336,-23474,10795,-20194,1641,-6613,18629,3375,-8466,13142,10372,-6565,14344,-21234,19912,7930,-28787,-16485,-11452,28669,-25981,-3320,-26581,27457,7805,3538,-30400,11220,1448,3777,-5267,4477,-15491,31327,26667,2435,-18055,31169,24289,8908,3199,23593,18325,-30361,-1712,-14714,-30101,18421,-28461,-22782,2679,-18275,-32474,22252,4786,-8611,20872,-9747,1536,15467,11594,3954,16138,-28948,25250,11809,3914,-31153,-29422,28930,19860,6154,16124,-27328,-28701,18543,-27356,-14187,-32227,12950,-27638,-8014,-11287,-5149,-15922,-29223,15460,7681,-20045,-16383,-22232,-27176,19812,-20531,-27256,-29239,-23496,-10199,-21596,-17525,24226,-31854,-30356,-22268,6184,-10337,-23535,-11425,-6593,2762,29062,-29088,24945,-11536,-22642,-19716,6779,24005,-24948,-19636,-23553,5470,3293,3683,17646,26854,24610,-24008,23651,-29742,-17936,-7573,-7405,-22725,5178,12112,13463,4807,-4368,18385,14921,-9163,-4329,-25801,-30758,-11878,28241,-7675,24390,-19098,-24802,-27988,-2027,-6316,-23591,-11694,-16347,29818,-18894,3011,2644,-21878,9520,17991,14978,-26798,24343,-24081,1329,22724,-1676,-17545,-30417,-21512,-22171,-21146,-11529,-27730,2352,-22092,-14333,9638,25340,9221,-12207,-28268,-15884,-18982,13717,13921,2040,-10657,11821,-13766,-19666,27199,-24025,-6171,13172,31195,-25048,25060,-26872,-8153,-10488,19536,-9207,5225,2459,29861,-27171,7634,-28802,-23851,6277,30167,2741,2856,-19382,24847,1406,11489,-24138,7431,-15052,32281,-25716,-29714,11299,14885,19579,-14547,18210,7884,-1655,-27268,11516,-15114,-4693,-26420,17005,13863,28805,8023,-16139,-29192,-1133,19696,-18658,-6682,23127,11113,-9393,-22191,-31334,-20292,-24371,-846,-27410,23240,2820,-22131,-20748,6977,-26506,29276,7818,19471,30214,27569,-12221,24229,76,9849,6818,4673,-10790,20297,32402,-956,-26662,-15444,25259,-17385,-26945,-18023,8453,19320,18379,-21405,-3948,-3951,-680,32361,22051,-19805,31657,-23452,-22988,-10741,28397,-15814,1518,5545,-3038,15192,-12066,22675,9620,-17699,-19345,-854,-29262,-28892,16210,15929,27180,21437,-28917,-1630,-2480,-9578,24634,30603,10527,7080,-12933,26815,29084,-32717,26699,-10777,-11361,-8595,-7693,-28789,20234,-14621,7495,2945,-5099,1335,1595,285,24331,-29219,30581,-28002,-22277,22015,32133,-19702,31668,20473,-5417,-1199,-27138,-15350,-18042,-27701,21475,-4360,17653,-26625,-9495,20972,2237,9419,29552,9522,-14525,-22450,1033,32649,-26435,31552,-32541,30488,10117,-31918,20501,-22443,-30891,-21293,28066,1490,-21957,27545,-24147,12824,-20133,32143,-24241,-29610,-24301,24554,18501,5872,3615,-22125,2810,23852,11919,28973,16388,-2110,14448,-860,20947,9793,-28594,29317,17643,10107,26252,18699,24867,-4783,-32396,16122,-9993,-5340,16861,31189,-32278,10260,1850,3327,-13519,-17821,-31232,12538,-18963,-2460,-13057,31163,-26547,23524,25261,28530,18118,15779,-6497,1399,-1723,15936,-13635,7959,9090,20061,-28161,26319,-18944,-29357,-9278,2595,-14592,-13890,17694,-30819,6601,-19802,-22810,20014,19930,3105,23057,6648,-10834,-6236,30879,21979,-24964,13505,-183,7217,25600,18571,23086,-20098,14371,-18441,31573,10954,8834,-2251,-1556,7348,-10323,10870,7639,10496,-16110,-14488,-15816,-1077,26795,11333,25642,129,-27378,23562,-7640,3475,-7089,-23010,-8322,-24973,-8141,-5212,-19193,-24580,-31406,-17971,6427,-14110,14868,-29646,-15654,-8486,-21448,-15781,27835,-19330,5598,-25364,-19172,-4845,10138,-26811,-2772,8585,18789,29043,3551,27186,32516,6923,15744,-6488,20862,-20971,-20232,32718,-30662,28486,-9897,-14672,-10126,17418,-31142,-32254,-3572,24167,29392,1487,-7937,13836,-8997,-3800,-29896,7057,-9816,30480,8543,-21983,-4027,4484,10832,-4972,28471,30535,-25573,-24153,14260,4821,5528,-20779,9621,-19379,3082,-23907,-22566,7772,14876,-17914,9540,1034,29789,-16480,31274,-11325,-4634,-14806,-28627,30756,17752,-23581,17361,10402,-31166,-27311,-22648,4968,19054,-26310,-24655,-4407,28430,28632,-20637,-10799,12247,-20297,-15424,8237,1656,-9669,-2539,-18943,-2812,-28829,9278,-5199,-8976,24552,25402,3620,-4366,6321,-20472,-18198,13978,12312,22121,14692,-12638,-1545,-13276,870,18827,14839,-11602,1235,-11838,-30399,-5816,5445,-3969,9817,-3083,4885,-15205,6126,-8860,22861,5274,-20346,26917,-9684,26761,16052,13115,32646,-23715,-3596,-7676,-4143,-7571,26375,14267,-14088,3601,25442,-31720,-21211,21764,14180,8724,-16667,-3194,21,-26258,17751,-13974,15138,-23841,14433,15657,-23933,13773,-19952,-11867,23146,-2028,14686,10145,-21794,-3607,19802,-7289,-9592,9014,7249,-10360,32431,421,-21277,-464,31345,22073,26579,31032,-22095,-6882,-118,25260,30474,21806,1749,-11708,133,-22531,-2717,26381,5391,-13840,-4590,-25989,-3825,1815,7540,4893,2611,11188,-27067,1992,12371,20430,26950,-11388,-11803,4606,-30280,-18910,-20150,-10218,-16885,28337,7979,24589,-27334,-13673,11665,-9469,-24690,7410,-14807,11073,29052,969,-12724,-14884,3903,-13833,30175,13333,25732,22041,15234,-31468,10049,-23573,12944,13123,13526,2998,-22506,-17521,-25607,8429,12812,-19839,25014,20217,7417,-32197,-848,15460,-15410,1612,23392,-10962,-29896,-16671,-17848,23540,-11451,13976,29084,-21610,-21991,-14961,28554,-31894,-23831,25326,9180,17803,-11927,29754,-6308,-17581,25656,18753,3236,16090,3270,8462,-14977,13767,12432,6005,-25884,-26961,3188,-11693,31676,28924,-30952,14870,16890,18600,16475,31895,-29718,-31619,-13458,-20431,-7413,-19231,10898,30066,27015,24089,4704,-30344,18890,-31949,18893,-10209,-17729,14444,16659,-32559,4639,-5625,-19889,-15072,29698,-17450,-16316,28840,-3849,15376,-14047,20297,1169,-18714,19698,11875,-7548,23748,-28059,19474,-31536,-10512,-13429,11713,29255,5173,7760,6317,-17513,-22489,23240,-5037,-7886,29391,3167,-7097,26151,11682,23751,4104,-24145,-8610,22071,6602,-20770,23650,-31416,31626,20164,11721,27993,22948,-18174,14171,1182,15025,-20211,-9557,-27425,4558,-14610,16297,-9898,25549,-30538,-6743,28106,17302,18917,11289,13962,-2476,-29705,-11761,-5735,16233,28609,-5381,-20052,28874,18307,7150,4444,-7375,5534,24633,17592,17559,21327,12498,-22187,28033,13802,-27268,-12182,-25178,-12190,-17291,-23831,-12349,-5750,28049,-21448,-27512,13891,262,-3223,-16405,-4336,26999,-17365,-28168,-24134,-19715,28950,32214,15286,192,-14455,-31575,-2903,-26458,-28105,-22322,-24240,21619,-24509,4341,-16642,-4938,-6147,19728,29868,22660,-3465,-18823,-1916,-14318,8570,9866,-19788,16206,-8589,29983,29117,-20498,1772,-11067,-3334,-28479,19716,-20657,-18404,-10336,-14846,-29633,9564,28605,14620,-211,-7405,-5809,-29319,-15637,26830,-25158,26125,16625,-10925,724,-23562,-26449,26340,2968,-4168,-25376,-30362,-18267,-15528,-9587,-31206,7389,-22463,21444,-24609,8923,-18064,-8594,32447,-1865,3533,4690,-11796,12120,-29590,16393,19436,-8132,-9969,-31965,10885,11777,-30840,31817,-15736,-19494,-26481,-4150,24451,16085,16460,20788,19398,-18165,-7507,7114,-5095,13232,-18236,-25990,28374,32270,-5181,16588,-6466,-1415,-17540,-10102,-25579,-1400,29414,10866,-7661,3748,-31100,-22205,-10160,22503,-30834,27238,21736,10158,-2229,-15721,28866,-6432,25633,3197,-14087,-31911,-20813,28603,-10192,11092,8487,2564,17891,77,2589,-12267,-25649,-19921,-28376,13177,-31010,32130,9203,6494,-8902,26578,3376,-22654,-3401,-3241,9805,-25188,-3313,22348,2447,-7186,-7549,-2944,-30599,28583,8266,25389,-19721,13381,30982,15038,-4662,-18386,-30865,8145,-2137,13417,-593,19517,-314,29421,7038,-17153,-17468,-10394,-18531,-1316,5921,-9775,24302,13987,-30440,4641,-2406,-29074,24661,31730,22311,-19068,7590,-32286,24843,-21068,-14946,-12579,9923,19845,-21249,30695,4266,-2301,-30278,-9586,-27793,3457,17560,-27227,508,-28170,2829,8169,-11078,-16975,-11389,-5233,13570,-9955,-2694,12123,16496,-28665,17826,3753,-32365,-5195,-13462,-1300,-6247,13918,-17954,20245,-4422,27431,16223,27694,263,-27039,-21374,-20517,-11589,-10630,26574,-25349,28430,15250,29664,-18267,18482,17619,13692,30174,9866,6393,23689,12607,29737,-23001,-18214,-18790,-31472,29402,-10624,-23683,16115,-20409,5067,22769,-16597,26673,-9920,-791,-23315,15282,32740,-31904,-20627,7173,-28628,8571,-19850,-4910,22603,16565,-28152,31108,315,4668,-1771,15971,-30592,-24252,3333,-29136,910,21057,10462,16500,6800,-1070,-20641,-23585,5313,6596,-6641,-1442,-12788,3713,-26970,3598,-27527,-1393,3972,-22477,17892,19005,28088,-11623,28085,29104,18809,-12524,-27225,7037,-10525,31575,1481,-25129,-31892,26955,-7041,9744,23601,14228,8479,29948,20334,-13564,-12714,29840,13251,1444,2692,21195,-9836,-24991,4196,-18906,-981,30403,31877,-17470,6402,21107,26783,-734,-26344,-15253,-8050,15026,-13971,-16342,25302,-31455,-5080,-15359,5841,20126,13003,28971,8282,9171,-12579,7049,10457,-18205,15440,14501,-6752,23089,-28786,-28389,32592,-22598,17084,-14960,-9932,28272,32057,-8054,-22938,399,6525,15383,10688,11252,-7652,16451,-30308,32622,2249,3832,16095,-5598,-19917,21618,22834,5545,-21755,-10166,20469,30271,20643,20664,7549,25965,-26679,-16496,17788,21941,-476,8560,-9064,-29904,4828,-25667,18801,3476,5290,8430,-17428,5630,17440,-21542,-12764,-264,-28495,5365,31666,-1432,-2125,-19551,-8829,23422,1593,31870,1252,-22350,13273,6275,-6070,9025,27790,20187,18776,7559,-4787,-25805,25774,16069,-19006,3805,-23326,-27608,28180,-2689,-3479,-19725,-26390,-8454,21211,-25523,-27411,-21145,29651,30810,3533,1191,490,-4978,28929,15201,-26536,7721,-27994,-13832,26020,-992,-90,2659,10985,26573,4492,27301,-4437,26108,-30040,-12371,-27806,14115,4456,19610,-12741,30555,-15811,-26573,12009,-15419,21214,-29824,17266,12326,28080,-10103,5906,-15791,10066,24384,27434,-19875,-8929,-2100,8448,15004,3428,-3126,3406,-8299,29625,-23466,1361,-25465,10865,-1423,-3922,-25557,147,745,17051,9491,-21459,-23278,18990,-16596,-25334,-22421,-27984,19687,-6420,-11547,-8356,16321,-29665,9097,-5821,-18593,9624,-11154,18564,32106,15058,14316,-1486,-8152,-4559,-13543,-10126,28834,1205,-7788,-24012,-4624,-24141,-19525,-20522,9835,-31476,28838,17557,-18559,14766,27387,-17333,19879,-13359,-10445,11114,3875,-30361,-8052,7916,12767,18080,3190,10799,14725,-30747,-28169,-3050,20544,-10299,10971,-13849,19648,-7755,15223,8764,-13120,-15429,30090,19207,2685,24153,-14912,-969,1936,-17546,22883,-32333,-8451,-13424,-19616,10173,-9968,1908,28332,-187,-5222,2726,-18840,11913,27780,-9202,32424,19365,24627,13772,-19396,28567,27817,32690,-25159,16885,580,14702,-27419,9819,-21375,-23886,19952,23986,-16256,16457,-95,-18740,20769,6818,-29493,7183,-26379,4531,2496,-21571,-8561,-6289,17535,-13918,28554,18711,-20153,29525,-11067,31821,18439,4841,16184,9688,25329,-11392,-10405,-18812,-11443,14356,24294,-30221,-16606,-10972,20737,4576,5989,-5531,-19313,16069,-22746,24966,24718,-20465,-9656,12029,13865,2468,1293,6397,-1686,-6073,19768,-32341,-27600,17879,16317,31443,-31453,30538,16031,-29779,22961,-28525,-20618,-2148,-27622,-3627,1375,-18543,15139,24650,-21286,-24747,32737,19480,18940,2444,-10109,-24340,1881,12345,20546,13146,31984,29107,22831,-22084,6406,-4809,29474,26219,10924,17176,-12143,20544,-30236,-30693,1757,22713,16212,18907,23138,-24957,11302,10628,9907,10622,11250,21372,-31290 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice9.tflite b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice9.tflite new file mode 100644 index 0000000..d9028d2 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice9.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice9_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice9_golden_int16.csv new file mode 100644 index 0000000..eca840d --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice9_golden_int16.csv @@ -0,0 +1 @@ +2245,-1242,-12978,-28029,17096,17481,33,28810,27450,30462,6954,-11153,30694,23642,10717,-30259,-889,9864,-11959,-22151,30686,-20073,30435,-26424,24201,16616,709,-29962,-828,26726,31462,-27701,20872,-27654,-9628,-587,9500,5602,-24664,-12806,-12074,-21283,-19782,21438,-27283,-110,19514,-32755,-19264,-5198,29483,-2132,17734,17980,12530,28944,-7650,7,5986,-21053,-11319,-12341,-29100,7680,-2700,30795,16522,1246,15079,-20709,20479,-25387,-26073,9097,16002,7074,24729,-1923,-26744,-21913,9804,16558,-4877,19964,-3873,-22195,-32140,-10857,-15269,-2480,23659,21819,-13271,-23953,10639,2180,-27364,-3988,29347,-14298,21831,-24942,24789,16031,281,843,-13370,-12284,30823,28874,-24439,29607,11028,-22135,10680,-32177,-21989,-16147,8093,-14129,-29423,-25339,26410,31602,11917,12037,32503,-4274,-27389,27140,2296,4091,15040,-26525,27732,-16380,-1449,-14848,-22734,27022,-9926,4176,1790,23016,-22789,-20025,14172,-7300,25320,-17760,-31761,12975,24539,28388,20347,27787,26005,-28134,-18854,27925,-27787,4197,-25572,-1334,-27671,-27143,-31426,-17657,-26997,9480,-31657,14082,9743,-15925,-31093,32194,3432,-16623,-7356,26770,31168,27075,-19121,24580,-15066,-22822,-6972,-12400,16770,20644,-7599,14268,5926,7999,5324,3544,-32352,-16147,17320,11090,21840,10736,-27741,-18909,11616,-18160,18974,27030,-15992,-27039,-16965,-15785,30430,-9374,-9551,-135,-14037,-28914,17061,5873,14406,4756,-15245,12451,-19171,2827,15603,-11758,18017,-4158,21607,32642,12537,-13323,-13152,26358,22602,2248,-29827,11592,-8173,1979,21677,-26834,20621,10015,-31443,-2543,28723,-32632,3565,-5271,-3732,13531,18382,3068,25673,-12530,-21281,-14548,23181,-29415,23623,5425,-493,17267,-20831,13028,-29024,23299,19578,-14390,-12586,-15677,2379,-14349,-10082,-2938,19191,-1552,3062,5534,-25385,19844,30155,-14197,-693,-1668,31695,-18200,-2005,-22804,29649,6571,-28687,-8622,8343,-21455,-25208,23874,-21188,-4887,24563,254,18899,20957,-7397,16985,-8695,29685,7962,28516,-23303,8587,16859,-18060,-17320,27427,-12856,32461,-9412,1407,-3988,-25171,-4656,15405,10601,32325,10573,22154,-14071,32534,26317,-27564,-13417,26397,13450,19632,-13853,30050,9911,-11687,-23622,26273,24295,12329,-25115,-32607,7956,-18016,25412,20797,32106,32391,2649,11443,-2508,30367,12793,-9495,-30771,-18401,30914,24882,-18905,-20820,4339,-1768,1741,-1243,-9846,16376,25503,28462,21809,1676,8590,17251,-19236,-19356,18916,-9928,-10894,16403,8343,26614,-32358,-22039,-8564,-17773,-22131,-3979,10554,-28166,10854,28719,27961,-23240,-1407,20799,-23559,6814,-19729,918,-18992,-4525,12822,2335,21788,32296,-23589,21415,-6593,-14132,3679,-30970,-31025,2616,-32696,-28522,6817,25236,31563,-14061,2984,-26074,29948,4577,22554,7687,-18871,-8776,27811,-2811,2832,-2233,6638,-3152,-27057,11394,7306,-24090,-6092,5702,14346,-13467,941,7136,-19250,-29423,-32294,345,24597,-12989,-31522,-2923,-29462,16623,4735,24341,-24224,28663,16337,12320,5767,16549,-32678,23894,-4380,-631,30230,-20228,3425,-1656,19159,-16202,16053,-26486,-22980,-24641,17947,-31202,23752,-16558,12780,15026,-7299,-26146,1313,-23100,10075,12208,-13930,11069,10476,-8322,-9306,-12078,6166,1626,-5657,13658,-16015,12316,-1978,24340,7430,-10684,-31668,-5560,771,8876,-17969,29215,22663,26449,15170,16044,4148,-28883,-21058,-10794,29667,-22108,-27127,-16985,-13064,3887,-17089,13579,2370,-28691,5024,-24842,-26274,4999,12671,16314,-30479,5186,-3408,-14929,-30583,-17287,5064,22643,-4293,-14062,-27885,-20940,23056,-2822,7654,-16974,-6150,21780,9369,23523,-5332,26350,10371,-583,3294,24487,8751,1835,23378,-27887,-8650,-30827,-20008,19544,-16482,30155,17120,17600,-25619,-15247,-26231,20327,10521,-16881,-24841,15694,26431,8595,-25340,-1598,10108,20080,-12626,-30548,84,-9994,7172,-20087,-22178,-12390,32738,-29491,27177,-4049,-11971,-2365,6050,16446,31238,-28688,6156,20619,21040,19380,30189,-13202,28713,9851,-12186,30192,16368,-22208,-6363,5004,-4648,-5248,20368,7779,10555,30774,11212,-4076,582,-15393,-15499,13611,-19333,-28069,28905,22585,8435,-28350,-10509,9936,-29494,-9478,-27045,17031,30768,29197,2996,-14526,631,-10924,-2145,3310,-16279,-10052,-18157,-6209,-10408,-9924,-7351,-5106,30373,-7686,9296,-1696,18600,-28276,-1598,25701,25101,2611,-177,11037,-4034,15557,-20307,-1605,2434,-12842,-143,1723,-13529,-10186,-12023,-16281,2194,11752,21630,-29697,18370,18660,23726,-5350,31048,-8749,-2620,-4319,-3698,21131,13889,31704,12681,6143,-26126,2175,12598,-30124,-680,-28458,-10159,17359,-17305,-17811,16564,9295,-19005,5181,-3726,16948,25334,26198,18881,1749,23519,-21672,-11251,13115,-24695,19856,26197,18443,-11301,18575,-24941,-4128,17632,4469,-10091,30539,-597,-6140,24752,7282,11142,-17609,15030,-8402,26214,-26574,27709,15519,-20588,6150,12949,3965,1785,12700,28265,942,29557,16385,-1091,27240,6393,-9376,-9173,32565,-26707,30405,9498,-733,-12655,-30388,1024,2662,-8948,9794,-28009,-29748,-11211,21302,-21215,-16946,12395,1737,19713,10941,2059,-2879,30849,-32079,7627,-24869,2044,6967,21435,-23050,2300,10238,21693,-30622,14098,31164,1601,3610,31026,-17134,9903,10897,-16249,-14509,3476,-31098,-15222,3760,-31247,32065,32074,19670,-16872,631,14783,9581,-12208,6437,-4439,-11396,26863,-27408,-8390,17166,10470,-25360,-26423,9277,-18674,-24004,-16820,-22612,-25698,9601,14124,-6646,16857,-10270,22066,27305,6687,16871,-22080,-27694,-27683,13764,-21537,-25819,-27123,-12543,12619,-16312,-22845,26206,22637,4219,28077,28486,12123,-19637,-27683,3599,16949,-2707,18624,-32472,24962,20625,-21337,21920,16987,26231,-11276,-7800,22470,-23724,1808,8625,5001,-7651,-5786,31420,30652,-14142,31517,27779,30395,13034,25672,7952,9724,-21651,1200,-32601,5494,15243,8461,-32723,-4317,-14166,-8936,136,8405,20192,23758,-17736,-24739,-9778,-14988,16240,-13267,9288,7536,28267,26223,-14816,30586,-28675,27672,13578,-14491,7728,30182,-926,-9434,15885,-26563,-20579,5190,23661,29612,-26665,28108,13092,16622,-26114,-4175,11364,-5438,-17117,4948,2022,30362,20537,5922,-17767,14261,30261,15776,-18962,32093,-21093,10474,-30472,10441,18887,-20788,-24476,-12906,-6050,5757,-19558,-2670,8423,-8320,-3895,468,27281,-4251,22128,7053,14802,29019,-22123,-14030,24740,17453,6783,10517,31246,32389,-29938,-15321,5598,5899,20317,20767,-31366,-5351,-3618,-28691,-28490,-13812,-26055,310,-31940,-29660,20622,-6444,-25880,13713,30101,-5464,6964,22755,5160,19726,-19451,-3070,29365,22209,-32530,-21226,26534,2444,-13098,9583,-8670,5342,-7395,-7100,-5326,-2608,590,1923,24874,25611,-29040,23470,18409,-6786,-7365,-11127,-6204,-7559,26528,20796,-14919,-5157,4855,14024,-6164,-1985,-28490,-24745,-20460,13970,-30464,460,19987,26052,-7988,14149,-27763,9439,17222,13325,-22666,-4924,15630,-20837,-9263,29195,7313,8936,-21379,-17863,6721,-24623,-6888,-6606,7739,-10533,28803,13689,-16314,-1117,10476,-5489,-22646,13055,-22324,6844,9794,-16681,24155,7978,11113,-1097,-10007,-21984,-27683,17613,12382,11592,3209,8087,-10669,20114,-17774,-9498,24052,-16904,-22386,-4750,-21014,17099,6848,15400,-27790,15456,-7061,-20991,27444,-1005,21397,-12724,-32707,-19182,-25071,-14515,-2753,1690,30201,18810,28246,21043,30009,22572,-17441,-6818,12976,19011,-11685,26958,-32262,-11564,22321,-24560,4083,-12457,2942,26074,-614,27329,20442,24620,26008,-19173,27162,30112,9710,-24599,27702,27927,-6620,30231,-9878,2674,-15922,-29122,25696,15894,27941,13289,10062,-5939,2471,-13874,-28945,25083,5588,-25811,12877,-5147,-1433,-24170,27722,-15905,-7280,-7586,-22506,21380,9076,13952,-3385,-23073,6009,28239,3428,8532,-30731,22740,-3557,17887,7704,-15208,-14510,31979,18966,-30286,-30762,-22195,26681,-7250,28831,-19516,-12460,-13320,-29514,31684,-15007,28332,23275,-11056,-32461,-8937,2521,7870,-24363,-9255,26846,20682,16093,-13387,-1505,19461,19948,6451,-3954,-12491,688,13092,-16490,-14719,-12112,1291,-23969,22529,-24087,-7583,-1891,-9588,-19298,27472,21065,-8472,23579,10372,-94,26385,-1349,5606,-21960,-22848,28995,-8226,-12737,27552,-28924,17872,-4513,-11870,-15765,-13073,-20586,22649,5742,-11743,25407,5317,-32070,-7115,-22727,-8667,32311,11517,5787,-10685,-20316,19807,-280,17212,13248,20952,12428,28850,-23476,32315,-8234,8766,-3860,9886,1910,28813,-21066,25224,-27435,5896,5605,-11208,-7406,32577,-25857,-1787,16941,-3406,-24068,25518,-1596,-6668,28331,-32434,3311,-20753,-11027,27258,-21929,-1226,-12263,-16289,-17957,-5778,26298,21928,-11329,-20601,-6949,-12327,-19358,-22145,-23702,4632,21764,21584,20046,-205,-9074,6984,11026,7936,11716,5658,-31766,-20049,22411,32488,13423,17916,29765,-28258,30348,-18513,3816,22423,32500,21505,30155,9771,16555,21847,-7889,26938,-16173,6870,-15159,30829,10011,-12951,14902,25809,15813,10875,303,23743,-23344,2456,-6597,-19409,24182,-14010,3960,28203,14315,25168,-29661,-7654,-15911,29525,6333,29678,-23815,-14536,26205,-3703,-30576,-28256,16876,8310,14389,12357,-13655,23267,18408,30357,-18592,-19191,-27455,-21961,21302,29926,20788,-17496,12369,6115,16796,18872,22443,25411,13476,-21683,-14001,9841,11812,32747,26122,25422,10647,-17087,-26603,-28068,17256,13734,-10826,22752,30603,-12066,-8000,12205,30622,-16328,24241,-12509,895,6517,-29690,-31583,28042,-9831,-25124,11801,19281,6428,2069,14956,16304,30065,15200,-3588,-16457,-4284,16026,9399,18653,5127,11288,-23037,-7727,-3097,-24493,4535,6254,19695,-797,-27540,-20640,-4102,-29253,-11993,-12990,26657,31478,5368,-24829,-20146,-2418,30133,18227,29778,-22943,-323,-28406,-4427,31789,-6736,-28100,31293,-23632,-11735,8285,-18455,-518,-18373,29521,-22279,-27347,17307,1297,-7122,11958,-26222,-13372,5200,-9936,-6615,17767,-10102,18854,-29026,1187,-14643,252,9488,-24371,19772,13583,407,-6531 diff --git a/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice9_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice9_input0_int16.csv new file mode 100644 index 0000000..3777be8 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/strided_slice/strided_slice9_input0_int16.csv @@ -0,0 +1 @@ +12304,14092,-1751,-9340,-7123,-17731,1752,-11838,-3637,-22398,-27321,30616,-16555,-9851,1132,20355,-9902,1942,14438,-20499,9331,8650,7016,-30237,31281,6162,26120,-28044,3914,14298,-29632,3082,6589,-5374,-5224,-29001,-16749,-11511,11820,-18154,21529,-282,3719,14308,15147,-30736,-30934,-17925,-5142,2074,10186,5009,-10660,-27510,-23802,26239,21568,24968,-28245,-22463,-7121,4054,-27325,22874,27878,-24230,3801,-27485,-7579,28593,-29783,-2783,-30785,19381,30983,30488,-2819,13073,-14777,-20023,10544,-9177,26112,-3243,28244,-15349,-32648,31693,-10562,9139,-15811,21370,18982,18851,-30340,22952,-24556,16412,-21329,-22907,-18904,-12790,-22582,-5924,-28099,-19518,-8933,15567,31314,13355,20456,15880,-3819,-17783,-9099,-15270,18969,27756,-21434,-32519,-27521,-31896,30516,-25711,-6290,-19556,-29754,-15848,3897,12832,-24156,-31387,31897,22034,20143,-12356,13766,20423,-18366,20711,4660,-28378,-4613,7277,9645,7031,19734,23274,26472,-19021,-29989,4347,14442,-31424,12232,-5672,-18852,-23139,682,29159,-32136,-30668,6053,-24603,11114,15703,2397,-4266,739,16734,32404,-3351,-10777,-30692,17882,-27162,-32072,27575,-15434,-17879,-18633,3852,14863,-4175,-22923,24969,-664,-14114,-16574,15716,28361,23243,26554,-5209,-12951,26409,-32504,-19192,-16380,22687,-28296,3235,-18331,-1380,-13483,-14144,-17829,-98,2731,-30335,14557,-27688,-2101,-23687,-17329,19715,-25969,-28619,-12809,26818,-22966,28703,-18230,-30243,-1802,16559,19422,12256,13258,-10521,-5462,-21939,-11060,1596,6941,-17027,-17411,20112,6773,-22037,-29545,2522,-18974,-996,21244,-31462,-12603,21847,-3045,-7868,-1996,9290,16597,-21481,5221,-19447,25387,13620,-32551,-10951,10863,-27441,10518,-5718,29418,12591,7122,-21542,-14360,-8737,-29569,-11832,21225,-29780,30505,-4477,-18744,16705,27717,-11902,-13844,-19324,27867,21677,-9490,13679,5919,362,-3664,22371,-30771,21934,27767,21376,-12072,-11606,-1682,27241,-13348,21479,16406,30699,21745,25027,-142,27681,-18045,-27412,-23369,29987,-776,26868,5379,-30177,-3981,-19534,-24698,9372,-21779,-7848,-18417,-9977,15048,7212,19479,-1505,-8470,-15995,-930,12434,-30686,8574,14921,12655,-32174,6396,-2566,3777,-6584,25689,17037,21984,-32638,-15985,11467,31335,26967,-32202,-27877,-9936,9506,22223,7020,14470,15895,-22877,25023,23508,6755,32379,-19059,1117,-13881,2030,-28698,-12830,9082,18984,-16535,-25331,15783,13453,-26068,8416,13877,21444,-27758,-21494,-31009,-22769,2512,-18001,13151,12631,-2301,27938,31994,21405,32006,16508,9293,-4297,-17036,-28958,14687,12,-13350,-26500,-11734,-16160,7755,-15852,-3525,22070,18274,-17974,11199,-16786,-7964,-1349,11947,4331,-30896,15741,26135,13954,-24804,-12887,26720,31362,6867,-30557,23754,20741,-1139,-5496,6764,-14361,-2257,3291,15333,19870,21168,21799,-17687,-16528,-15759,22039,-12088,16614,1312,-13301,-16501,-1204,11096,5152,4507,4362,-31121,-7873,7602,-7144,-22481,-25669,25825,29234,23112,17169,-20620,-17954,-29615,4545,3491,14008,24891,22244,-25950,-15241,-17037,-32483,13875,24122,-19588,11713,-2527,22694,5971,-29433,14229,5949,1878,15572,22542,29293,-27855,-5698,21421,17241,1647,-22599,-32581,32426,23732,-19771,-15909,8453,-7335,29430,29259,4348,-16141,-32509,18383,-26854,10515,-22660,-29921,20077,-18935,26750,29905,-18289,27830,18074,15490,-8221,26693,-10811,31457,-31227,-8170,13055,-25324,-24646,29243,3539,-3629,6145,-32131,-24875,-3980,-17011,12342,-20229,-901,28240,-19389,26142,-19427,8958,-23174,-12016,-10882,10931,1752,-27014,23621,-10663,-13019,1337,-26636,23386,-28422,-2440,-7736,17316,-1537,9753,-21172,-23304,-31751,-19265,28706,-24925,-15702,14117,-22598,-17894,-13678,-16262,29530,32137,-28395,23298,17235,-683,-9678,15694,28405,17842,4133,-3059,2410,6401,-10262,-24918,-3097,4027,12782,-21963,-448,-25714,11164,-22636,14010,31259,-12918,-7856,10323,-29575,4599,-15306,4138,2755,-26421,-3548,-7278,16204,-7967,8468,5228,5823,6401,26204,17663,-1886,8450,-25167,23098,15721,2240,18024,2436,25438,-16315,-31445,17802,30573,21604,-19640,-22220,-25540,7926,-13084,7940,9889,11952,8009,-22276,16010,-4767,1040,-19673,-32093,12672,-24212,10744,-10820,20469,-4830,14293,21308,20644,27620,31467,19308,27852,13351,-18615,18660,15108,11595,-30494,8233,-14436,-23042,-30810,2510,-8189,-24480,11758,-5605,9853,32371,4157,20009,12999,-21317,-31315,27042,19005,22853,25163,-17133,-19504,24724,-19327,13618,-16812,-9541,22991,-26244,-7200,24912,26899,-411,-20247,20438,14877,-4046,-19908,-5426,-10778,19060,7465,31790,32163,19477,-32335,19676,24979,-21048,6179,12773,27887,-32753,20690,21008,31894,17349,9950,12325,-24875,-28061,26425,19761,-21008,17210,7347,-20644,2221,19675,-14039,-32256,1580,16109,18817,-19604,25306,-28930,24337,16298,19112,15210,-12174,13768,-17414,-30093,-18686,9224,-957,-31049,-19513,6232,25156,27552,19598,-13756,-22296,5047,6500,5527,-16260,-2424,-10705,8258,1155,30159,2245,-1242,-12978,-28029,17096,17481,33,28810,27450,30462,6954,-11153,30694,23642,10717,-30259,-889,9864,-11959,-22151,30686,-20073,30435,-26424,24201,16616,709,-29962,-828,26726,31462,-27701,20872,-27654,-9628,-587,9500,5602,-24664,-12806,-12074,-21283,-19782,21438,-27283,-110,19514,-32755,-19264,-5198,29483,-2132,17734,17980,12530,28944,-7650,7,5986,-21053,-11319,-12341,-29100,7680,-2700,30795,16522,1246,15079,-20709,20479,-25387,-26073,9097,16002,7074,24729,-1923,-26744,-21913,9804,16558,-4877,19964,-3873,-22195,-32140,-10857,-15269,-2480,23659,21819,-13271,-23953,10639,2180,-27364,-3988,29347,-14298,21831,-24942,24789,16031,281,843,-13370,-12284,30823,28874,-24439,29607,11028,-22135,10680,-32177,-21989,-16147,8093,-14129,-29423,-25339,26410,31602,11917,12037,32503,-4274,-27389,27140,2296,4091,15040,-26525,27732,-16380,-1449,-14848,-22734,27022,-9926,4176,1790,23016,-22789,-20025,14172,-7300,25320,-17760,-31761,12975,24539,28388,20347,27787,26005,-28134,-18854,27925,-27787,4197,-25572,-1334,-27671,-27143,-31426,-17657,-26997,9480,-31657,14082,9743,-15925,-31093,32194,3432,-16623,-7356,26770,31168,27075,-19121,24580,-15066,-22822,-6972,-12400,16770,20644,-7599,14268,5926,7999,5324,3544,-32352,-16147,17320,11090,21840,10736,-27741,-18909,11616,-18160,18974,27030,-15992,-27039,-16965,-15785,30430,-9374,-9551,-135,-14037,-28914,17061,5873,14406,4756,-15245,12451,-19171,2827,15603,-11758,18017,-4158,21607,32642,12537,-13323,-13152,26358,22602,2248,-29827,11592,-8173,1979,21677,-26834,20621,10015,-31443,-2543,28723,-32632,3565,-5271,-3732,13531,18382,3068,25673,-12530,-21281,-14548,23181,-29415,23623,5425,-493,17267,-20831,13028,-29024,23299,19578,-14390,-12586,-15677,2379,-14349,-10082,-2938,19191,-1552,3062,5534,-25385,19844,30155,-14197,-693,-1668,31695,-18200,-2005,-22804,29649,6571,-28687,-8622,8343,-21455,-25208,23874,-21188,-4887,24563,254,18899,20957,-7397,16985,-8695,29685,7962,28516,-23303,8587,16859,-18060,-17320,27427,-12856,32461,-9412,1407,-3988,-25171,-4656,15405,10601,32325,10573,22154,-14071,32534,26317,-27564,-13417,26397,13450,19632,-13853,30050,9911,-11687,-23622,26273,24295,12329,-25115,-32607,7956,-18016,25412,20797,32106,32391,2649,11443,-2508,30367,12793,-9495,-30771,-18401,30914,24882,-18905,-20820,4339,-1768,1741,-1243,-9846,16376,25503,28462,21809,1676,8590,17251,-19236,-19356,18916,-9928,-10894,16403,8343,26614,-32358,-22039,-8564,-17773,-22131,-3979,10554,-28166,10854,28719,27961,-23240,-1407,20799,-23559,6814,-19729,918,-18992,-4525,12822,2335,21788,32296,-23589,21415,-6593,-14132,3679,-30970,-31025,2616,-32696,-28522,6817,25236,31563,-14061,2984,-26074,29948,4577,22554,7687,-18871,-8776,27811,-2811,2832,-2233,6638,-3152,-27057,11394,7306,-24090,-6092,5702,14346,-13467,941,7136,-19250,-29423,-32294,345,24597,-12989,-31522,-2923,-29462,16623,4735,24341,-24224,28663,16337,12320,5767,16549,-32678,23894,-4380,-631,30230,-20228,3425,-1656,19159,-16202,16053,-26486,-22980,-24641,17947,-31202,23752,-16558,12780,15026,-7299,-26146,1313,-23100,10075,12208,-13930,11069,10476,-8322,-9306,-12078,6166,1626,-5657,13658,-16015,12316,-1978,24340,7430,-10684,-31668,-5560,771,8876,-17969,29215,22663,26449,15170,16044,4148,-28883,-21058,-10794,29667,-22108,-27127,-16985,-13064,3887,-17089,13579,2370,-28691,5024,-24842,-26274,4999,12671,16314,-30479,5186,-3408,-14929,-30583,-17287,5064,22643,-4293,-14062,-27885,-20940,23056,-2822,7654,-16974,-6150,21780,9369,23523,-5332,26350,10371,-583,3294,24487,8751,1835,23378,-27887,-8650,-30827,-20008,19544,-16482,30155,17120,17600,-25619,-15247,-26231,20327,10521,-16881,-24841,15694,26431,8595,-25340,-1598,10108,20080,-12626,-30548,84,-9994,7172,-20087,-22178,-12390,32738,-29491,27177,-4049,-11971,-2365,6050,16446,31238,-28688,6156,20619,21040,19380,30189,-13202,28713,9851,-12186,30192,16368,-22208,-6363,5004,-4648,-5248,20368,7779,10555,30774,11212,-4076,582,-15393,-15499,13611,-19333,-28069,28905,22585,8435,-28350,-10509,9936,-29494,-9478,-27045,17031,30768,29197,2996,-14526,631,-10924,-2145,3310,-16279,-10052,-18157,-6209,-10408,-9924,-7351,-5106,30373,-7686,9296,-1696,18600,-28276,-1598,25701,25101,2611,-177,11037,-4034,15557,-20307,-1605,2434,-12842,-143,1723,-13529,-10186,-12023,-16281,2194,11752,21630,-29697,18370,18660,23726,-5350,31048,-8749,-2620,-4319,-3698,21131,13889,31704,12681,6143,-26126,2175,12598,-30124,-680,-28458,-10159,17359,-17305,-17811,16564,9295,-19005,5181,-3726,16948,25334,26198,18881,1749,23519,-21672,-11251,13115,-24695,19856,26197,18443,-11301,18575,-24941,-4128,17632,4469,-10091,30539,-597,-6140,24752,7282,11142,-17609,15030,-8402,26214,-26574,27709,15519,-20588,6150,12949,3965,1785,12700,28265,942,29557,16385,-1091,27240,6393,-9376,-9173,32565,-26707,30405,9498,-733,-12655,-30388,1024,2662,-8948,9794,-28009,-29748,-11211,21302,-21215,-16946,12395,1737,19713,10941,2059,-2879,30849,-32079,7627,-24869,2044,6967,21435,-23050,2300,10238,21693,-30622,14098,31164,1601,3610,31026,-17134,9903,10897,-16249,-14509,3476,-31098,-15222,3760,-31247,32065,32074,19670,-16872,631,14783,9581,-12208,6437,-4439,-11396,26863,-27408,-8390,17166,10470,-25360,-26423,9277,-18674,-24004,-16820,-22612,-25698,9601,14124,-6646,16857,-10270,22066,27305,6687,16871,-22080,-27694,-27683,13764,-21537,-25819,-27123,-12543,12619,-16312,-22845,26206,22637,4219,28077,28486,12123,-19637,-27683,3599,16949,-2707,18624,-32472,24962,20625,-21337,21920,16987,26231,-11276,-7800,22470,-23724,1808,8625,5001,-7651,-5786,31420,30652,-14142,31517,27779,30395,13034,25672,7952,9724,-21651,1200,-32601,5494,15243,8461,-32723,-4317,-14166,-8936,136,8405,20192,23758,-17736,-24739,-9778,-14988,16240,-13267,9288,7536,28267,26223,-14816,30586,-28675,27672,13578,-14491,7728,30182,-926,-9434,15885,-26563,-20579,5190,23661,29612,-26665,28108,13092,16622,-26114,-4175,11364,-5438,-17117,4948,2022,30362,20537,5922,-17767,14261,30261,15776,-18962,32093,-21093,10474,-30472,10441,18887,-20788,-24476,-12906,-6050,5757,-19558,-2670,8423,-8320,-3895,468,27281,-4251,22128,7053,14802,29019,-22123,-14030,24740,17453,6783,10517,31246,32389,-29938,-15321,5598,5899,20317,20767,-31366,-5351,-3618,-28691,-28490,-13812,-26055,310,-31940,-29660,20622,-6444,-25880,13713,30101,-5464,6964,22755,5160,19726,-19451,-3070,29365,22209,-32530,-21226,26534,2444,-13098,9583,-8670,5342,-7395,-7100,-5326,-2608,590,1923,24874,25611,-29040,23470,18409,-6786,-7365,-11127,-6204,-7559,26528,20796,-14919,-5157,4855,14024,-6164,-1985,-28490,-24745,-20460,13970,-30464,460,19987,26052,-7988,14149,-27763,9439,17222,13325,-22666,-4924,15630,-20837,-9263,29195,7313,8936,-21379,-17863,6721,-24623,-6888,-6606,7739,-10533,28803,13689,-16314,-1117,10476,-5489,-22646,13055,-22324,6844,9794,-16681,24155,7978,11113,-1097,-10007,-21984,-27683,17613,12382,11592,3209,8087,-10669,20114,-17774,-9498,24052,-16904,-22386,-4750,-21014,17099,6848,15400,-27790,15456,-7061,-20991,27444,-1005,21397,-12724,-32707,-19182,-25071,-14515,-2753,1690,30201,18810,28246,21043,30009,22572,-17441,-6818,12976,19011,-11685,26958,-32262,-11564,22321,-24560,4083,-12457,2942,26074,-614,27329,20442,24620,26008,-19173,27162,30112,9710,-24599,27702,27927,-6620,30231,-9878,2674,-15922,-29122,25696,15894,27941,13289,10062,-5939,2471,-13874,-28945,25083,5588,-25811,12877,-5147,-1433,-24170,27722,-15905,-7280,-7586,-22506,21380,9076,13952,-3385,-23073,6009,28239,3428,8532,-30731,22740,-3557,17887,7704,-15208,-14510,31979,18966,-30286,-30762,-22195,26681,-7250,28831,-19516,-12460,-13320,-29514,31684,-15007,28332,23275,-11056,-32461,-8937,2521,7870,-24363,-9255,26846,20682,16093,-13387,-1505,19461,19948,6451,-3954,-12491,688,13092,-16490,-14719,-12112,1291,-23969,22529,-24087,-7583,-1891,-9588,-19298,27472,21065,-8472,23579,10372,-94,26385,-1349,5606,-21960,-22848,28995,-8226,-12737,27552,-28924,17872,-4513,-11870,-15765,-13073,-20586,22649,5742,-11743,25407,5317,-32070,-7115,-22727,-8667,32311,11517,5787,-10685,-20316,19807,-280,17212,13248,20952,12428,28850,-23476,32315,-8234,8766,-3860,9886,1910,28813,-21066,25224,-27435,5896,5605,-11208,-7406,32577,-25857,-1787,16941,-3406,-24068,25518,-1596,-6668,28331,-32434,3311,-20753,-11027,27258,-21929,-1226,-12263,-16289,-17957,-5778,26298,21928,-11329,-20601,-6949,-12327,-19358,-22145,-23702,4632,21764,21584,20046,-205,-9074,6984,11026,7936,11716,5658,-31766,-20049,22411,32488,13423,17916,29765,-28258,30348,-18513,3816,22423,32500,21505,30155,9771,16555,21847,-7889,26938,-16173,6870,-15159,30829,10011,-12951,14902,25809,15813,10875,303,23743,-23344,2456,-6597,-19409,24182,-14010,3960,28203,14315,25168,-29661,-7654,-15911,29525,6333,29678,-23815,-14536,26205,-3703,-30576,-28256,16876,8310,14389,12357,-13655,23267,18408,30357,-18592,-19191,-27455,-21961,21302,29926,20788,-17496,12369,6115,16796,18872,22443,25411,13476,-21683,-14001,9841,11812,32747,26122,25422,10647,-17087,-26603,-28068,17256,13734,-10826,22752,30603,-12066,-8000,12205,30622,-16328,24241,-12509,895,6517,-29690,-31583,28042,-9831,-25124,11801,19281,6428,2069,14956,16304,30065,15200,-3588,-16457,-4284,16026,9399,18653,5127,11288,-23037,-7727,-3097,-24493,4535,6254,19695,-797,-27540,-20640,-4102,-29253,-11993,-12990,26657,31478,5368,-24829,-20146,-2418,30133,18227,29778,-22943,-323,-28406,-4427,31789,-6736,-28100,31293,-23632,-11735,8285,-18455,-518,-18373,29521,-22279,-27347,17307,1297,-7122,11958,-26222,-13372,5200,-9936,-6615,17767,-10102,18854,-29026,1187,-14643,252,9488,-24371,19772,13583,407,-6531 diff --git a/tensorflow/lite/micro/integration_tests/seanet/sub/BUILD b/tensorflow/lite/micro/integration_tests/seanet/sub/BUILD new file mode 100644 index 0000000..a7e7f1c --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/sub/BUILD @@ -0,0 +1,321 @@ +# Description: +# generated integration test for one specific kernel in a model. +load( + "//tensorflow/lite/micro:build_def.bzl", + "generate_cc_arrays", + "micro_copts", +) + +package( + default_visibility = ["//visibility:public"], + # Disabling layering_check because of http://b/177257332 + features = ["-layering_check"], + licenses = ["notice"], +) + +generate_cc_arrays( + name = "generated_sub0_model_data_cc", + src = "sub0.tflite", + out = "sub0_model_data.cc", +) + +generate_cc_arrays( + name = "generated_sub0_model_data_hdr", + src = "sub0.tflite", + out = "sub0_model_data.h", +) + +generate_cc_arrays( + name = "generated_sub1_model_data_cc", + src = "sub1.tflite", + out = "sub1_model_data.cc", +) + +generate_cc_arrays( + name = "generated_sub1_model_data_hdr", + src = "sub1.tflite", + out = "sub1_model_data.h", +) + +generate_cc_arrays( + name = "generated_sub2_model_data_cc", + src = "sub2.tflite", + out = "sub2_model_data.cc", +) + +generate_cc_arrays( + name = "generated_sub2_model_data_hdr", + src = "sub2.tflite", + out = "sub2_model_data.h", +) + +generate_cc_arrays( + name = "generated_sub3_model_data_cc", + src = "sub3.tflite", + out = "sub3_model_data.cc", +) + +generate_cc_arrays( + name = "generated_sub3_model_data_hdr", + src = "sub3.tflite", + out = "sub3_model_data.h", +) + +generate_cc_arrays( + name = "generated_sub4_model_data_cc", + src = "sub4.tflite", + out = "sub4_model_data.cc", +) + +generate_cc_arrays( + name = "generated_sub4_model_data_hdr", + src = "sub4.tflite", + out = "sub4_model_data.h", +) + +generate_cc_arrays( + name = "generated_sub0_input0_int16_test_data_cc", + src = "sub0_input0_int16.csv", + out = "sub0_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_sub0_input0_int16_test_data_hdr", + src = "sub0_input0_int16.csv", + out = "sub0_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_sub0_input1_int16_test_data_cc", + src = "sub0_input1_int16.csv", + out = "sub0_input1_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_sub0_input1_int16_test_data_hdr", + src = "sub0_input1_int16.csv", + out = "sub0_input1_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_sub0_golden_int16_test_data_cc", + src = "sub0_golden_int16.csv", + out = "sub0_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_sub0_golden_int16_test_data_hdr", + src = "sub0_golden_int16.csv", + out = "sub0_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_sub1_input0_int16_test_data_cc", + src = "sub1_input0_int16.csv", + out = "sub1_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_sub1_input0_int16_test_data_hdr", + src = "sub1_input0_int16.csv", + out = "sub1_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_sub1_input1_int16_test_data_cc", + src = "sub1_input1_int16.csv", + out = "sub1_input1_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_sub1_input1_int16_test_data_hdr", + src = "sub1_input1_int16.csv", + out = "sub1_input1_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_sub1_golden_int16_test_data_cc", + src = "sub1_golden_int16.csv", + out = "sub1_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_sub1_golden_int16_test_data_hdr", + src = "sub1_golden_int16.csv", + out = "sub1_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_sub2_input0_int16_test_data_cc", + src = "sub2_input0_int16.csv", + out = "sub2_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_sub2_input0_int16_test_data_hdr", + src = "sub2_input0_int16.csv", + out = "sub2_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_sub2_input1_int16_test_data_cc", + src = "sub2_input1_int16.csv", + out = "sub2_input1_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_sub2_input1_int16_test_data_hdr", + src = "sub2_input1_int16.csv", + out = "sub2_input1_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_sub2_golden_int16_test_data_cc", + src = "sub2_golden_int16.csv", + out = "sub2_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_sub2_golden_int16_test_data_hdr", + src = "sub2_golden_int16.csv", + out = "sub2_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_sub3_input0_int16_test_data_cc", + src = "sub3_input0_int16.csv", + out = "sub3_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_sub3_input0_int16_test_data_hdr", + src = "sub3_input0_int16.csv", + out = "sub3_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_sub3_input1_int16_test_data_cc", + src = "sub3_input1_int16.csv", + out = "sub3_input1_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_sub3_input1_int16_test_data_hdr", + src = "sub3_input1_int16.csv", + out = "sub3_input1_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_sub3_golden_int16_test_data_cc", + src = "sub3_golden_int16.csv", + out = "sub3_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_sub3_golden_int16_test_data_hdr", + src = "sub3_golden_int16.csv", + out = "sub3_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_sub4_input0_int16_test_data_cc", + src = "sub4_input0_int16.csv", + out = "sub4_input0_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_sub4_input0_int16_test_data_hdr", + src = "sub4_input0_int16.csv", + out = "sub4_input0_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_sub4_input1_int16_test_data_cc", + src = "sub4_input1_int16.csv", + out = "sub4_input1_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_sub4_input1_int16_test_data_hdr", + src = "sub4_input1_int16.csv", + out = "sub4_input1_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_sub4_golden_int16_test_data_cc", + src = "sub4_golden_int16.csv", + out = "sub4_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_sub4_golden_int16_test_data_hdr", + src = "sub4_golden_int16.csv", + out = "sub4_golden_int16_test_data.h", +) + +cc_library( + name = "models_and_testdata", + srcs = [ + "generated_sub0_golden_int16_test_data_cc", + "generated_sub0_input0_int16_test_data_cc", + "generated_sub0_input1_int16_test_data_cc", + "generated_sub0_model_data_cc", + "generated_sub1_golden_int16_test_data_cc", + "generated_sub1_input0_int16_test_data_cc", + "generated_sub1_input1_int16_test_data_cc", + "generated_sub1_model_data_cc", + "generated_sub2_golden_int16_test_data_cc", + "generated_sub2_input0_int16_test_data_cc", + "generated_sub2_input1_int16_test_data_cc", + "generated_sub2_model_data_cc", + "generated_sub3_golden_int16_test_data_cc", + "generated_sub3_input0_int16_test_data_cc", + "generated_sub3_input1_int16_test_data_cc", + "generated_sub3_model_data_cc", + "generated_sub4_golden_int16_test_data_cc", + "generated_sub4_input0_int16_test_data_cc", + "generated_sub4_input1_int16_test_data_cc", + "generated_sub4_model_data_cc", + ], + hdrs = [ + "generated_sub0_golden_int16_test_data_hdr", + "generated_sub0_input0_int16_test_data_hdr", + "generated_sub0_input1_int16_test_data_hdr", + "generated_sub0_model_data_hdr", + "generated_sub1_golden_int16_test_data_hdr", + "generated_sub1_input0_int16_test_data_hdr", + "generated_sub1_input1_int16_test_data_hdr", + "generated_sub1_model_data_hdr", + "generated_sub2_golden_int16_test_data_hdr", + "generated_sub2_input0_int16_test_data_hdr", + "generated_sub2_input1_int16_test_data_hdr", + "generated_sub2_model_data_hdr", + "generated_sub3_golden_int16_test_data_hdr", + "generated_sub3_input0_int16_test_data_hdr", + "generated_sub3_input1_int16_test_data_hdr", + "generated_sub3_model_data_hdr", + "generated_sub4_golden_int16_test_data_hdr", + "generated_sub4_input0_int16_test_data_hdr", + "generated_sub4_input1_int16_test_data_hdr", + "generated_sub4_model_data_hdr", + ], + copts = micro_copts(), +) + +cc_test( + name = "integration_test", + srcs = [ + "integration_tests.cc", + ], + copts = micro_copts(), + deps = [ + ":models_and_testdata", + "//python/tflite_micro:python_ops_resolver", + "//tensorflow/lite/micro:micro_framework", + "//tensorflow/lite/micro:micro_log", + "//tensorflow/lite/micro:micro_resource_variable", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:recording_allocators", + "//tensorflow/lite/micro/testing:micro_test", + ], +) diff --git a/tensorflow/lite/micro/integration_tests/seanet/sub/Makefile.inc b/tensorflow/lite/micro/integration_tests/seanet/sub/Makefile.inc new file mode 100644 index 0000000..80cc5b8 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/sub/Makefile.inc @@ -0,0 +1,31 @@ +integration_tests_seanet_sub_GENERATOR_INPUTS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/sub/sub0.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/sub/sub1.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/sub/sub2.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/sub/sub3.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/sub/sub4.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/sub/sub0_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/sub/sub0_input1_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/sub/sub0_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/sub/sub1_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/sub/sub1_input1_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/sub/sub1_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/sub/sub2_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/sub/sub2_input1_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/sub/sub2_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/sub/sub3_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/sub/sub3_input1_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/sub/sub3_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/sub/sub4_input0_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/sub/sub4_input1_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/sub/sub4_golden_int16.csv \ + +integration_tests_seanet_sub_SRCS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/sub/integration_tests.cc \ +$(TENSORFLOW_ROOT)python/tflite_micro/python_ops_resolver.cc \ + +integration_tests_seanet_sub_HDR := \ +$(TENSORFLOW_ROOT)python/tflite_micro/python_ops_resolver.h + +$(eval $(call microlite_test,integration_tests_seanet_sub_test,\ +$(integration_tests_seanet_sub_SRCS),$(integration_tests_seanet_sub_HDR),$(integration_tests_seanet_sub_GENERATOR_INPUTS))) diff --git a/tensorflow/lite/micro/integration_tests/seanet/sub/integration_tests.cc b/tensorflow/lite/micro/integration_tests/seanet/sub/integration_tests.cc new file mode 100644 index 0000000..d90c654 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/sub/integration_tests.cc @@ -0,0 +1,134 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include "python/tflite_micro/python_ops_resolver.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/integration_tests/seanet/sub/sub0_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/sub/sub0_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/sub/sub0_input1_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/sub/sub0_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/sub/sub1_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/sub/sub1_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/sub/sub1_input1_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/sub/sub1_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/sub/sub2_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/sub/sub2_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/sub/sub2_input1_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/sub/sub2_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/sub/sub3_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/sub/sub3_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/sub/sub3_input1_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/sub/sub3_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/sub/sub4_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/sub/sub4_input0_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/sub/sub4_input1_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/sub/sub4_model_data.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_profiler.h" +#include "tensorflow/lite/micro/recording_micro_allocator.h" +#include "tensorflow/lite/micro/recording_micro_interpreter.h" +#include "tensorflow/lite/micro/system_setup.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +constexpr size_t kTensorArenaSize = 1024 * 100; +uint8_t tensor_arena[kTensorArenaSize]; + +namespace tflite { +namespace micro { +namespace { + +void RunModel(const uint8_t* model, const int16_t* input0, + const uint32_t input0_size, const int16_t* input1, + const uint32_t input1_size, const int16_t* golden, + const uint32_t golden_size, const char* name) { + InitializeTarget(); + MicroProfiler profiler; + PythonOpsResolver op_resolver; + + MicroInterpreter interpreter(GetModel(model), op_resolver, tensor_arena, + kTensorArenaSize, nullptr, &profiler); + interpreter.AllocateTensors(); + TfLiteTensor* input_tensor0 = interpreter.input(0); + TF_LITE_MICRO_EXPECT_EQ(input_tensor0->bytes, input0_size * sizeof(int16_t)); + memcpy(interpreter.input(0)->data.raw, input0, input_tensor0->bytes); + TfLiteTensor* input_tensor1 = interpreter.input(1); + TF_LITE_MICRO_EXPECT_EQ(input_tensor1->bytes, input1_size * sizeof(int16_t)); + memcpy(interpreter.input(1)->data.raw, input1, input_tensor1->bytes); + if (kTfLiteOk != interpreter.Invoke()) { + TF_LITE_MICRO_EXPECT(false); + return; + } + profiler.Log(); + MicroPrintf(""); + + TfLiteTensor* output_tensor = interpreter.output(0); + TF_LITE_MICRO_EXPECT_EQ(output_tensor->bytes, golden_size * sizeof(int16_t)); + int16_t* output = ::tflite::GetTensorData(output_tensor); + for (uint32_t i = 0; i < golden_size; i++) { + // TODO(b/205046520): Better understand why TfLite and TFLM can sometimes be + // off by 1. + TF_LITE_MICRO_EXPECT_NEAR(golden[i], output[i], 1); + } +} + +} // namespace +} // namespace micro +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(sub0_test) { + tflite::micro::RunModel( + g_sub0_model_data, g_sub0_input0_int16_test_data, + g_sub0_input0_int16_test_data_size, g_sub0_input1_int16_test_data, + g_sub0_input1_int16_test_data_size, g_sub0_golden_int16_test_data, + g_sub0_golden_int16_test_data_size, "sub0 test"); +} + +TF_LITE_MICRO_TEST(sub1_test) { + tflite::micro::RunModel( + g_sub1_model_data, g_sub1_input0_int16_test_data, + g_sub1_input0_int16_test_data_size, g_sub1_input1_int16_test_data, + g_sub1_input1_int16_test_data_size, g_sub1_golden_int16_test_data, + g_sub1_golden_int16_test_data_size, "sub1 test"); +} + +TF_LITE_MICRO_TEST(sub2_test) { + tflite::micro::RunModel( + g_sub2_model_data, g_sub2_input0_int16_test_data, + g_sub2_input0_int16_test_data_size, g_sub2_input1_int16_test_data, + g_sub2_input1_int16_test_data_size, g_sub2_golden_int16_test_data, + g_sub2_golden_int16_test_data_size, "sub2 test"); +} + +TF_LITE_MICRO_TEST(sub3_test) { + tflite::micro::RunModel( + g_sub3_model_data, g_sub3_input0_int16_test_data, + g_sub3_input0_int16_test_data_size, g_sub3_input1_int16_test_data, + g_sub3_input1_int16_test_data_size, g_sub3_golden_int16_test_data, + g_sub3_golden_int16_test_data_size, "sub3 test"); +} + +TF_LITE_MICRO_TEST(sub4_test) { + tflite::micro::RunModel( + g_sub4_model_data, g_sub4_input0_int16_test_data, + g_sub4_input0_int16_test_data_size, g_sub4_input1_int16_test_data, + g_sub4_input1_int16_test_data_size, g_sub4_golden_int16_test_data, + g_sub4_golden_int16_test_data_size, "sub4 test"); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/integration_tests/seanet/sub/sub0.tflite b/tensorflow/lite/micro/integration_tests/seanet/sub/sub0.tflite new file mode 100644 index 0000000..e79d029 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/sub/sub0.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/sub/sub0_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/sub/sub0_golden_int16.csv new file mode 100644 index 0000000..c72a4fd --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/sub/sub0_golden_int16.csv @@ -0,0 +1 @@ +32767,29089,-5880,-30011,32767,-27013,32767,-32768,32767,-32768,-32768,-32768,32767,-25339,3988,-14791,32767,25654,-15103,-32768,-27911,-11546,-531,31434,-32768,1436,18827,32767,375,-16963,32767,10605,7685,32767,-6380,-22748,-32768,-28296,32767,5543,-16797,21793,32767,-32768,32767,25086,-32768,-32768,5390,12039,-12466,14389,17511,-32768,28117,32767,32767,-8987,-32768,-11396,32767,16507,32767,-10757,-32768,-11469,32767,-32768,-4448,-24538,-32768,-25611,-26952,842,-8242,4057,-11439,-32768,20459,-30724,32767,-32768,31456,-32768,-25900,25566,4401,-13105,-32768,7231,-32768,-32768,-32768,-91,2290,21653,26265,10164,-21713,27434,-32768,2077,-6680,-32768,1961,-32768,-32768,18335,25571,30566,-32768,26687,18166,-23672,31783,13886,-31688,-24448,-3716,11490,31577,-32768,76,32767,20790,-5558,32767,-11344,2262,-20668,32767,32767,-32768,-11041,-17639,2728,14225,32767,-32768,902,-10,32767,-32768,-7638,32767,-14875,-32768,88,32767,32767,21014,23103,-32768,32767,32767,-32768,32767,32767,-5902,32767,29445,-23827,26541,1648,29877,-15862,29536,-26280,-32768,-32768,-32768,32767,-32768,1007,-12956,32767,-9890,32767,-11234,3149,13434,-9874,-32768,-32768,20553,32767,-20323,-6933,-32768,-24477,32767,-32768,-32147,-25163,-31660,-7693,-32768,-12354,24136,-24459,32767,-26582,32767,17225,-32768,29201,13040,-32768,8273,32767,21033,-32759,-32768,25026,-25413,7272,-2936,-32768,5827,32767,-20336,-32768,18225,32767,-32768,-23145,18887,26880,-8656,18661,-32768,8549,-32768,-4627,-32768,32767,12672,-8282,32767,-6146,24824,-32768,27657,-2656,-3803,32767,32767,18132,11442,-21854,-20063,-22276,-32768,16114,-3168,32306,7638,7106,-6073,-32768,-31378,17023,-25236,12195,-32092,7501,-32768,2184,-12408,17278,2899,-32768,21360,-1700,-32768,-32222,-22870,32767,14191,8011,-6747,13803,-32768,-22754,32767,-28233,32767,-22337,-32768,-30105,-4391,25511,-11893,-32768,-32768,-32768,26510,-20441,-32389,-32768,-32768,32767,-20584,-28664,-13407,-32768,1376,32767,-20628,-3912,32767,-32768,-30186,-11285,32767,-23120,17372,-18765,-32768,-13814,22292,1991,-32768,32767,-32768,24065,4930,-12471,-32768,-32768,-32768,32767,-32768,-29715,26117,-32768,737,2858,17993,6650,2182,-32768,19335,-32768,32767,5706,32767,-10426,32767,5401,9336,32767,-21444,32767,-19754,-14700,32767,32767,-32768,32767,32767,-18791,-19102,-13968,4790,32767,22071,31559,-6105,-32768,6807,-6092,302,32767,29978,22380,17601,9909,5379,-11446,-398,-32768,-32768,1607,32767,1859,-6582,-22861,-32768,32767,-19555,-32768,-32768,-32768,-7960,-32768,32767,15402,32767,-14898,2755,13567,-27496,-18653,32767,32767,-32768,10307,32767,8013,4463,14262,32767,32767,-28574,32767,32767,-32768,32767,32767,-32768,-6841,12112,16773,-32768,-32768,-14711,29749,-18567,-31556,31363,12718,-16303,13271,-1075,-27646,-31220,15691,-32768,20522,29612,-32768,32767,32767,-13116,-31726,-7074,26359,11377,-18083,-2852,32767,2352,-6317,32767,-32768,32767,-24647,-32768,25303,32767,-32768,-3823,3331,-674,-32768,27681,-32768,-32768,19905,-14505,-1628,6919,-18820,-28154,-32768,32767,-32768,32767,32767,30389,32767,-21743,-32768,-32768,31656,32767,7405,32767,9033,-32768,3673,-32768,-5046,32767,-21786,3931,4369,-822,32767,17121,5309,-19000,3962,-32768,32767,-23372,18435,32767,-15411,26575,26746,18874,25235,29808,-32768,-32768,32767,-23371,32767,32767,-10099,-32768,32767,-206,27212,32767,-3835,32081,28082,12730,14177,32767,32767,-9793,-32768,-23227,-32768,-32768,-21741,32767,7497,-25218,-17325,-5166,18906,14848,-32768,-32768,-30339,-1583,-6289,-22325,-32768,32767,27119,32767,-32768,-32768,-6340,-32768,-17429,32767,-6725,-32768,-31162,-32768,32767,-27256,-22301,32767,-11555,32767,6504,4242,11987,-10427,-32768,13651,25634,29200,32767,-32768,-336,24239,32767,24600,32767,-32768,-15914,-9681,-32768,32767,30159,5929,-32768,21211,32767,-26981,-30902,32767,-32768,-32768,22702,32767,-32768,-32768,-32768,-11743,-32768,19998,32767,23317,25336,-32768,-32768,-996,-25357,32767,-17420,-32768,3417,-13681,7554,32767,32767,-47,-26401,-32768,-32768,-32768,32037,-32768,24523,-1261,25632,-29281,-32768,-32768,32767,-23939,2630,6526,7307,-9022,32767,14719,32767,32767,32767,382,32767,28757,16921,-3768,32767,-32768,28021,-20018,-5374,-29412,-1447,32767,-25616,-32768,13543,-32768,-32768,-30523,2772,32767,-10343,-10973,32767,-7920,-32768,12885,-32768,22650,-16215,32203,23604,21881,-13176,-14683,7814,2152,-32768,10222,-32768,-32768,-6364,-10951,32767,-23097,-5149,-24987,-18564,-23171,22950,-32768,-32768,6350,13801,-9209,-11864,20733,-15993,-23074,25597,32767,-32768,32767,32767,31188,16148,-5238,-19697,-8896,-9343,-6644,32767,32767,-26838,14267,-13892,21673,32767,18199,-2227,3084,32767,-14800,-30168,-14601,-25474,32767,24409,32767,-21636,17400,-4924,-32768,-6827,-18569,32767,1524,-28566,-32768,4554,32767,6232,32767,4901,-32768,32767,-32768,-18682,20762,-2987,10714,15277,1475,-32627,-26731,-32768,29959,2131,-32768,-944,-32768,32767,10894,12621,2995,30060,-11201,-28878,18670,-4929,-32768,27354,-30889,32767,-13999,-32768,-32768,32767,32767,-12139,32767,15893,-32768,-32768,-2938,32767,32767,-32768,-32768,-26823,-32768,-22843,-32768,-32768,-20874,-32768,32134,-5833,32767,-10392,-32768,17043,-32768,-32768,32767,1705,29345,-32768,17840,-19361,-7623,22987,-23560,-29834,2627,-32768,-32768,-3293,26197,16642,-6201,2385,-26029,-25350,31200,14478,-3641,6966,20816,-17116,-32768,-13544,-32768,-32768,-32768,-31058,-32768,32767,-17593,19255,-5369,-12836,32767,-10969,32767,-31547,21087,32767,31068,-32768,8679,3012,-9708,22848,-32400,-32768,-20239,25395,32767,10211,4931,32767,-18587,32767,-15496,-32768,32767,-15445,32767,-32768,28144,32767,21382,24596,-3017,8344,-32768,32767,28216,10968,1416,-7132,18310,-12221,-24437,32767,-32768,-2713,-9747,-32768,-25055,-26976,-32768,-32768,32767,9651,-8308,32767,18757,-32768,-23614,11785,-16913,-32768,-32768,-25267,32767,-3239,32767,-32768,22682,-31963,-32768,15771,28671,32767,32767,25261,5659,12207,-2192,27366,30047,-25578,-32768,577,32767,21705,32767,8476,26153,3900,-32768,32767,-32768,32767,15862,-32768,-30769,-27365,11514,-3217,23643,14006,646,-3388,-26531,-32768,22489,27372,32767,15276,-25955,-27754,-3623,21852,-13122,-6002,32767,32767,32767,25980,-14006,-13318,16168,28566,-15896,32767,32767,-20449,8403,-18823,30269,-32768,22570,-32768,14812,-1526,-22155,-32768,12095,32690,32767,-32768,2404,-6090,20917,32767,-8659,-14867,-31944,20687,-32768,13491,-31303,32767,32767,30076,-32768,-24148,12072,-13146,-4797,32767,32642,32673,-19410,-13618,32767,27218,-3342,-32768,25979,28560,-32768,17164,-32768,25880,20824,32767,-29035,32767,1566,-30467,4443,743,-16920,-9056,-27928,16724,32767,-32768,-23050,-28272,-30013,32767,23131,-10870,-29703,14787,23991,-5677,21466,-15455,32767,-32486,-32648,-13646,3939,-32768,21095,14493,-31679,19137,-32768,32429,-14310,-15457,-8545,32767,-32768,-32768,-25303,-21294,21005,-32768,9050,2002,32767,24632,-17893,-20106,-25666,32767,-1217,-14699,26180,-11949,-32681,-30708,-32768,-10857,-32768,32767,-32768,32767,32767,-32768,32767,-32768,6969,3470,18739,32767,9827,32767,32767,-32768,14278,-31157,-32768,-15700,-32768,-28082,-32768,22511,-32768,32767,32767,-32768,30610,14207,18344,-14840,30382,-21850,24833,32767,22784,32767,2340,-32768,-2289,32767,-32768,-22391,-28535,-1749,-6333,6137,-15439,32767,-11205,-32768,32767,-32768,9637,25736,27401,-16009,-32768,22279,-7457,20143,29207,32767,5894,-32768,-7586,7362,-2234,-32768,-12176,-32768,12835,7404,32767,32767,-3430,32767,-3980,11945,-32768,-1560,26531,7677,19238,6086,-32768,32767,11334,17094,-20927,18482,-14629,-19117,-8850,24323,-23839,1426,-15189,27097,32767,-32768,32767,-24302,-15382,32767,29220,-2097,580,-32768,-19506,-18819,-4384,-31026,-7939,32767,-7441,-17444,-20817,-32768,32767,-32768,20373,19352,-55,-13487,-32768,-32768,2058,22973,-32768,-621,-32768,-31463,-32768,-6479,-32768,22866,2298,-31843,-32768,-14511,-32768,-32768,-3607,-32768,-22781,-32768,32767,-32768,-32768,-1970,-32768,-32768,32767,-32768,-32768,32767,-2334,24106,-32768,-16877,22772,32767,-24689,32767,-15258,-20977,10923,-32768,2822,-4387,-31390,21762,31191,28831,-3320,-32768,-9771,-7028,-21370,-8894,25611,-32768,-24101,-32768,11805,6203,18336,-32768,32767,-32768,-32768,-14793,-3711,-32768,-7127,31363,32767,-32768,28321,26253,-32768,19716,-18340,-27576,-20144,16123,32767,32767,-32768,-32768,32767,10928,11883,-154,32767,-32768,-32768,32767,-8217,32767,-32768,-32768,32767,12507,28252,24547,-28195,885,-11584,27889,7811,9919,23665,21306,32767,-31402,-19362,10902,32767,-25762,-4914,13642,-32768,32767,7224,-31672,32767,32767,24994,11914,348,32767,-32768,32767,-8064,-27717,-31773,32068,-32768,-30261,32767,32767,3577,-7640,5436,22824,-19583,-19073,32767,-27081,6726,32767,4343,31734,16538,-32768,32767,15241,32767,-21585,32767,27984,-19103,142,32767,6528,3421,27723,10771,32767,-32768,-32768,14296,32767,11986,-32768,-30980,32767,-29749,-30297,3304,2781,17893,32767,29787,-26544,-28496,12366,32767,5452,26899,32767,-32121,-30831,-1863,-9232,-17441,29532,32767,-8861,5706,-14016,-11946,32767,-1644,32767,22032,-27669,32767,-20351,32767,14059,-11711,-32768,-14345,-11879,8087,7763,14407,32767,-28608,-32768,32767,32767,-14486,-32768,7763,-19684,22679,-32768,-25816,-12995,31827,-29680,-32768,32767,15335,-32768,32767,23239,13842,32767,16959,27206,-12375,-20084,-32768,-32768,11609,-32768,-15742,22667,14726,8625,-32768,22929,-14587,-32768,-24717,-32768,32767,30953,-16859,32767,-24348,-8506,-32768,-32768,-32768,-11917,-32253,-30000,-21511,14404,32767,-32768,-32768,24021,-17217,32767,28900,-16323,-32768,-27941,-4277,32767,26917,3428,-32768,-32768,12550,-5741,-18640,20610,30938,20260,11368,32767,28116,4688,-32768,32767,-21923,32767,-32615,9969,-17555,5006,3875,-32768,32767,-7164,-20085,-13681,-32768,-10262,12223,32767,-21055,32767,32767,4639 diff --git a/tensorflow/lite/micro/integration_tests/seanet/sub/sub0_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/sub/sub0_input0_int16.csv new file mode 100644 index 0000000..a5a38ba --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/sub/sub0_input0_int16.csv @@ -0,0 +1 @@ +30031,17761,-3886,-18113,31041,-16782,32121,-31681,26418,-23248,-30522,-27208,29490,-15382,2312,-9104,31710,15532,-9146,-28873,-17219,-7308,-36,19491,-21887,899,11261,30886,67,-10157,31343,6393,4513,32635,-4006,-14199,-28722,-17452,30765,3139,-10414,13221,24725,-30254,26494,15335,-21183,-26695,3593,7119,-7906,8614,10459,-21986,17208,28840,22264,-5296,-30856,-6995,29595,10108,22148,-6610,-25539,-7098,31069,-31132,-2784,-15265,-27978,-15879,-16284,538,-5295,2768,-7046,-25278,12407,-18870,26869,-22237,19391,-24445,-15986,15439,2987,-7808,-32767,4451,-22995,-22103,-22315,184,1520,13164,15901,6442,-13404,16559,-32551,1164,-4053,-20515,1083,-26597,-27523,11520,15592,18694,-26961,16443,11424,-14769,19215,8306,-19696,-14839,-2303,7133,19614,-30617,333,29716,12711,-3416,25744,-6970,1429,-12736,28770,22644,-23602,-6992,-10968,1491,8954,20248,-24204,834,-41,22948,-26595,-4720,26767,-9309,-28373,303,23810,26762,13169,14385,-25501,22285,30320,-22526,25972,28139,-3501,31940,17850,-14392,16172,754,18456,-9831,18145,-16366,-29126,-25502,-27015,29701,-30730,577,-8024,24764,-5772,19844,-7151,1725,7960,-5906,-30331,-28303,12857,22965,-12170,-4260,-25393,-15012,30225,-28121,-19661,-15491,-19687,-4434,-27883,-7797,14637,-15173,25273,-16271,30572,10839,-25440,18047,7860,-32610,5257,30589,13002,-19830,-26664,15108,-15287,4682,-1982,-31464,3293,28176,-12627,-24314,11287,21165,-22418,-13974,11481,16219,-5162,11329,-28363,4981,-31107,-2972,-21593,32556,7686,-5117,20676,-3681,15505,-29586,16686,-1833,-2605,21022,29222,11204,7273,-13182,-12011,-13664,-27222,9867,-1740,19784,4724,4287,-4004,-22595,-19290,10209,-15624,7293,-19435,4619,-23533,1620,-7640,10739,1644,-27832,13278,-1234,-30779,-19501,-14129,30715,8987,5135,-4318,8479,-30589,-14017,21481,-17065,23686,-13798,-29366,-18240,-2787,15380,-7146,-28369,-21860,-24757,16130,-12665,-19734,-30670,-30446,21678,-12699,-17483,-7928,-22426,578,22977,-12917,-2252,22587,-20978,-18242,-6704,25231,-14181,10616,-11511,-24182,-8484,13706,1152,-22501,27072,-30096,14525,2865,-7825,-20450,-28936,-22541,30312,-23585,-18064,15875,-28185,638,1560,11139,4325,1226,-26362,12140,-22621,26397,3516,23703,-6461,31139,3550,5839,28979,-13341,24016,-12203,-9266,29713,24720,-27983,29326,27474,-11654,-11590,-8279,2855,32146,13445,19429,-3452,-20529,3907,-3939,-89,27471,18349,13808,11048,6286,3583,-7026,-276,-27886,-31344,968,24171,1071,-4316,-13731,-25260,26485,-12142,-31618,-32099,-22934,-5122,-32612,20733,9589,31353,-9170,1875,8124,-16742,-11184,25351,20018,-30650,6542,29071,4930,2457,8671,26513,28866,-17397,29729,31175,-22577,26663,23473,-31618,-4302,7465,10022,-27862,-32243,-8899,18516,-11461,-19382,19140,7881,-9703,7874,-924,-17150,-19409,9763,-27488,12669,18410,-29691,30953,32070,-8071,-19455,-4134,16139,7016,-11152,-2030,22784,1384,-4097,22123,-25251,26981,-15085,-27146,15790,26630,-25701,-2476,2003,-227,-32496,17077,-20108,-23247,11969,-8601,-773,4058,-11516,-17535,-27225,28396,-19913,29977,30844,18429,27429,-13422,-25713,-24614,19294,26274,4280,27423,5400,-24536,2533,-28555,-3133,21478,-13267,2699,2418,-769,23339,10220,3400,-11671,2519,-25748,20368,-14039,11289,27180,-9455,16490,16376,11611,15399,17988,-21794,-31711,25162,-14481,30280,27045,-6168,-20440,32656,-161,16828,29838,-2389,19849,17020,7913,8938,21001,23388,-5713,-27641,-14419,-26699,-31833,-13396,25439,4835,-15340,-10726,-3364,11800,9005,-20803,-20514,-18704,-929,-4113,-13802,-28797,28612,16904,24318,-29427,-28770,-3800,-26596,-10943,31528,-4327,-24777,-18954,-27818,23907,-16446,-13456,23130,-7093,31782,3977,2802,7330,-6348,-23431,8085,15993,17840,20853,-23018,-387,15092,24150,14835,29208,-20550,-9605,-6066,-27343,25234,18293,3745,-30040,12889,31390,-16248,-18716,25214,-26929,-20959,13844,27853,-31699,-25399,-28130,-7395,-30762,12162,19946,14435,15420,-31193,-27976,-729,-15678,29187,-10395,-27832,2054,-8468,4716,31238,20688,-294,-16387,-26379,-24924,-22142,19727,-21595,15243,-487,15700,-17979,-28583,-31690,27648,-14630,1544,3718,4760,-5587,31600,8865,31219,20833,25888,-9,31761,17591,10520,-2442,24631,-32388,16983,-12159,-3045,-18139,-1118,26103,-15476,-23125,8320,-25188,-24987,-18871,1939,23750,-6447,-6923,31163,-4950,-27058,8041,-32755,13924,-10197,19619,14331,13529,-7794,-9080,4749,1236,-20386,6555,-31528,-28837,-4106,-6986,31697,-14182,-3065,-15055,-11166,-13916,14056,-24954,-28186,4094,8442,-5602,-7340,12426,-9521,-14200,15464,23761,-25564,29028,26727,18873,10179,-3245,-11924,-5585,-5765,-3886,22065,28101,-16201,8633,-8746,13573,26887,10972,-1346,1612,20709,-9234,-18251,-8833,-15721,22284,15173,27114,-13517,10808,-3127,-31229,-4443,-11500,24406,1052,-17227,-28989,2751,24553,3906,21080,2744,-23142,29202,-21315,-11305,12700,-1740,6827,9576,1190,-20008,-16416,-29246,18564,1289,-22563,-647,-24483,21054,6620,7511,1679,18243,-6630,-17678,11200,-2740,-25877,16915,-19065,30970,-8394,-20359,-20408,21497,27892,-7671,24833,9966,-24877,-22741,-2079,23863,28001,-30588,-21572,-16548,-22715,-13789,-27390,-23792,-12651,-25444,19737,-3834,30085,-6506,-30455,10728,-24106,-26601,20478,1131,18276,-23076,10669,-12072,-4946,14235,-14466,-18195,1870,-26340,-25093,-2029,16025,10191,-3599,1445,-15911,-15606,18841,9155,-2289,4045,12602,-10672,-26600,-8280,-31057,-28370,-20368,-18887,-26118,26864,-10597,11610,-3180,-7619,24988,-6954,29853,-19111,12742,25443,18764,-28193,5157,2086,-5834,13897,-20056,-31047,-12500,15309,24095,6150,3064,28465,-11511,28253,-9380,-31663,21105,-9507,20869,-29332,17540,31273,12840,14870,-2123,5260,-27450,31804,17554,6935,1154,-4382,11191,-7500,-14776,25245,-22771,-1731,-6256,-28867,-15414,-16759,-29613,-27905,25404,5937,-5335,28491,11462,-32315,-14606,7185,-10180,-26565,-21152,-15238,31638,-2216,22594,-31813,13720,-19572,-31084,9596,17410,27011,28127,15376,3271,7694,-1439,16517,18560,-15786,-25465,95,27774,13167,20893,5477,15949,2350,-26960,20854,-25081,22385,9457,-23720,-19133,-16627,7032,-1881,14751,8797,682,-2087,-16293,-29206,13986,16760,32608,9295,-16190,-16730,-2278,13169,-8199,-3860,29156,31205,31611,16205,-8619,-8014,9777,17470,-9557,29832,27839,-12285,5039,-11768,18842,-22743,13651,-25695,8800,-1006,-13742,-25704,7530,19929,26793,-20144,1378,-3989,12964,25155,-5266,-9371,-19698,12543,-23649,8551,-19267,31617,24226,18520,-24007,-15061,7134,-8263,-3214,29737,19982,20117,-11637,-8134,24493,16672,-2080,-32388,16125,17488,-24284,10452,-27729,16144,12706,29297,-17953,31307,1195,-18652,2480,737,-10405,-5402,-17250,10212,25353,-27849,-14017,-17080,-18507,23538,14467,-6438,-18388,9082,14426,-3550,12994,-9233,21678,-20019,-20208,-8152,2319,-22294,13073,8774,-19375,11471,-26857,19740,-8653,-9192,-5318,21302,-22278,-23096,-15219,-13312,12609,-28313,5273,1373,27956,15188,-10707,-12111,-15445,22481,-778,-9019,16248,-7341,-19988,-18890,-22170,-6373,-20996,23967,-21211,25553,21206,-20640,20798,-23416,4237,2276,11353,22151,6209,30257,27460,-22560,8640,-19328,-23626,-9398,-24999,-17193,-32344,13727,-25463,29242,26388,-23430,18564,8920,11148,-9352,18765,-13501,15262,30067,13846,28642,1552,-26276,-1484,31487,-28019,-13638,-17200,-1332,-4147,3556,-9737,22919,-6893,-23527,29149,-28922,6193,15764,16763,-9822,-20443,13638,-4528,12278,17620,23365,3555,-30941,-4806,4331,-1134,-30151,-7706,-22645,7832,4687,29710,25640,-1916,26641,-2329,7570,-26971,-1187,16551,4930,11609,3749,-27677,27692,6784,10717,-12710,11221,-9164,-11505,-5520,14652,-14468,765,-9268,16350,27643,-27798,22637,-14613,-9509,26767,17827,-1199,645,-25783,-12221,-11740,-2961,-18871,-4891,21817,-4301,-10479,-12473,-26284,28088,-20411,12689,11844,9,-8335,-26002,-23668,1204,13856,-31405,-562,-27258,-19263,-30282,-3690,-27772,14164,1276,-19556,-28214,-9086,-23812,-24140,-2322,-31742,-13674,-31335,30973,-31686,-27959,-1278,-29646,-29011,20527,-31754,-27703,28808,-1526,14519,-30893,-10453,13999,29267,-15251,24062,-9234,-12575,6614,-30191,1647,-2603,-18950,13078,18852,17466,-2309,-21035,-6014,-4217,-12838,-5239,15984,-30897,-14804,-32480,7437,3785,11281,-29877,31145,-30087,-27678,-9292,-2431,-28871,-4133,19245,31044,-23827,17324,16240,-26235,12046,-11055,-17094,-12236,10131,31191,20805,-27285,-24895,30333,6717,7005,-165,29669,-20059,-24426,23902,-5234,31353,-24442,-23100,21153,7557,17358,14787,-17400,406,-6982,17376,4707,6039,14422,13145,29364,-19507,-12133,6477,23362,-15644,-3037,8452,-32470,27025,4714,-19423,31310,25199,15521,7285,256,24017,-20440,26649,-5000,-17213,-19631,19474,-25751,-18526,27378,29471,2158,-4534,3199,13951,-11817,-11882,23615,-16350,4011,27802,2951,19675,9954,-30779,30859,9271,32108,-12990,25500,17045,-11906,299,30779,3745,2240,16883,6643,23719,-21643,-23651,8880,20459,7266,-24292,-19071,20317,-17944,-18830,1760,1499,10693,25433,18232,-16179,-17206,7792,30535,3331,16455,27807,-19486,-18914,-1099,-5727,-10972,18382,28340,-5656,3341,-8772,-7087,24381,-1251,32391,13469,-16810,21535,-12512,26244,8425,-7067,-24267,-8904,-7512,5246,4983,8648,21407,-17813,-21745,32349,22561,-8762,-26066,4561,-11853,13805,-31606,-15680,-8074,19549,-18450,-25231,23254,9517,-31684,20137,14203,8401,31534,10684,16415,-7850,-12515,-26923,-20970,7090,-32725,-9389,14106,9312,5276,-20158,14044,-8739,-25336,-15107,-21124,21649,19253,-10391,27090,-15080,-5395,-20994,-30065,-26855,-7023,-19803,-18239,-13317,8790,32368,-23053,-26921,14972,-10664,27217,18003,-9780,-23112,-17107,-2900,31394,16335,2341,-20603,-26802,7495,-3307,-11520,12376,19106,12309,7009,32704,17114,2737,-27422,31008,-13518,25150,-20073,6196,-10470,2808,2110,-31673,32094,-4245,-12336,-8295,-27794,-6077,7778,30446,-12937,29564,26807,2826 diff --git a/tensorflow/lite/micro/integration_tests/seanet/sub/sub0_input1_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/sub/sub0_input1_int16.csv new file mode 100644 index 0000000..51ef7eb --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/sub/sub0_input1_int16.csv @@ -0,0 +1 @@ +4750,-7598,-31374,31304,-6396,-25027,-17421,-20184,26177,2412,-27054,31318,-3853,16572,-14755,-4278,20714,-21349,12318,27687,-12407,-25709,32194,25001,-20313,2109,-30997,-7846,-18124,26681,12966,-11944,-21967,23599,-10614,-28498,15962,-12110,4615,-28780,-13217,-15167,13114,31385,-8986,-4506,-9214,9573,32234,-28953,-29535,-22852,-30479,16229,-2815,10094,28924,23660,31851,-1160,-3542,-1059,22451,-1903 diff --git a/tensorflow/lite/micro/integration_tests/seanet/sub/sub1.tflite b/tensorflow/lite/micro/integration_tests/seanet/sub/sub1.tflite new file mode 100644 index 0000000..e2d654a Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/sub/sub1.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/sub/sub1_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/sub/sub1_golden_int16.csv new file mode 100644 index 0000000..59755ba --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/sub/sub1_golden_int16.csv @@ -0,0 +1 @@ +4714,22514,1246,-1023,32767,32767,9912,-32768,-25525,24771,-32768,22860,-2742,30434,-1025,-20629,23761,32767,4755,-2956,2277,15788,-20806,1071,25506,25759,26637,7409,-1270,-32768,32767,-32768,32767,16961,32767,-6510,27823,-31856,32767,-32768,32767,-30832,-26718,-15207,4660,-32768,-8445,20872,-3143,-610,-23386,26396,30997,32767,1474,-529,32767,-32609,-10018,32767,-10287,-25229,-19236,-2739,-20717,54,16545,18004,-32768,7716,-6208,32767,-19797,13830,-6353,-14790,7509,6430,-11651,23220,-5096,-15258,-30141,-27620,-27005,32767,30166,5600,-22172,-5958,2383,32767,32767,-32768,16415,-26368,32767,-25390,26172,-23450,18681,-13902,32767,-16556,8572,-32768,32767,31745,-18536,-32768,-26873,-29610,27215,-24801,-32768,-13465,-3525,32767,-22746,-23616,-2100,9076,-19208,9182,-1615,17373,-24757,-32768,-13348,32767,26712,12987,-32768,32767,26289,30971,-18760,-32768,3548,-31331,18440,32767,-27890,26413,-16798,30628,15688,20297,-17631,-11677,-11784,24137,-32768,5957,-30908,19775,23787,-19471,-32768,-16518,22648,32767,32767,25317,-32768,32767,-23424,5190,-3151,11599,-3172,32767,18382,22888,-26122,-16738,-29488,-20477,32767,-7241,-32768,-25172,-8782,11165,-32768,27385,8191,27711,22426,-32768,14262,32767,-20032,19606,-29395,28200,-9203,7484,-3938,-4064,23104,3585,-28435,32767,7706,32767,24628,-31135,23690,-22223,-17974,-16029,30275,32767,-26318,3388,16821,-10239,-23615,-20945,19265,18257,-27625,30884,-26925,-10206,434,-5610,-32768,-32768,16724,14567,-32768,32767,-8126,-32768,14214,7844,10872,-5929,28985,4416,32767,-32768,3668,-18288,-5785,16694,-32768,-15624,-30242,24682,-32768,-22242,-26596,5298,-3735,-32768,27213,9921,-32768,32767,-32768,-24965,23528,8838,-15311,-3147,-9048,14850,-25870,-25233,19080,3625,-23091,-31946,9833,-17981,32767,-7959,25742,-29516,21358,25668,-23698,-25647,31846,12976,15263,-15865,-29380,-32768,20392,32767,30354,-24350,-32768,-3286,32767,-32768,-11607,-32768,-30773,-8857,4913,-9833,10537,32767,19651,-10337,-28994,8966,-32481,15040,30677,-32768,30902,13201,-24602,32767,-32768,-32768,-8804,32767,5308,-5303,32767,8032,17795,10875,32767,-21735,-25992,32767,-13353,27279,-30319,-32768,12457,3761,17381,32767,28921,4721,32767,6141,-19432,32767,32767,-5550,32767,-31447,18516,-32768,27584,-6987,-22343,-26163,32767,6971,7624,12923,-4553,26512,-32151,-32768,-32768,-32768,6163,32767,-32768,-16923,-2040,20157,32767,24830,-20826,21343,-23359,9977,21427,28186,11922,-32768,32767,28867,14059,26126,-32768,3012,-8808,-9965,11835,23265,-32768,11492,11149,-20786,-14002,-26362,6292,8631,32767,32767,9970,-12599,-26690,32767,-22095,-21197,4926,16180,-21646,-32768,17386,32767,32767,-28534,32767,14198,-32768,-19724,32767,11124,27097,-11242,-29664,17996,32767,29622,-18862,653,-30756,-32768,888,3992,32767,-11221,23419,-32768,-3801,17594,32767,18661,-17446,32767,-22115,25792,19440,7364,-27297,-15068,25933,32767,-26135,7957,32767,-32768,-28776,22916,10208,-10032,14189,32767,-24434,-20716,10918,19411,-475,-18043,-32768,-32768,-16206,-4772,23770,20311,-32176,183,-1678,-9900,-13645,32767,32241,24731,16247,12549,8806,32767,-23874,2371,-32768,26677,22065,9054,15653,23804,27942,9151,29007,17481,27127,6485,24362,19571,-16170,-32768,20709,-25982,-22820,18144,80,-28472,-3446,-8053,32767,31204,10142,-18578,-31989,-32768,-32768,19101,32767,-10354,-32768,-3619,-13674,-2837,22195,13102,-21464,13376,32767,-3841,-8408,-4492,-32768,15721,-32768,-4065,19591,32041,-32768,26829,32746,23092,19330,32767,30950,-18883,28421,25951,4956,16096,-1491,-32768,32767,-32768,29542,32767,28796,-29679,27026,24262,23861,32767,22386,5135,-4837,13970,-4203,-23982,-11115,-7069,20949,24959,-32768,-32768,32767,14811,-3912,14546,-1965,997,-32768,19154,5289,2350,-31651,-32768,13615,9883,-23203,-15892,-24355,-30595,-3242,-32584,-32768,24360,-32768,-1951,-28348,32767,-23651,15856,28052,26713,-15680,-32768,32767,32703,-13963,-32768,-9592,32767,-32768,22088,32767,-10321,-32768,-32768,-32768,-26232,-4063,-6311,-19322,32767,32767,1927,467,17331,-27130,-18448,-27695,21004,15073,31818,4711,-8289,-25184,-32768,-27195,32767,-111,-7854,-19425,-5671,-29584,-14235,24714,32767,32310,32767,32767,17413,-32768,-2844,32767,32767,7684,29457,-32768,32767,11157,32767,-18432,-23890,-32768,-20967,-26339,20611,-32768,-13263,-32768,-940,32320,32559,32767,-7818,-24063,28068,9722,-10160,32767,-20155,-7521,-26824,-8964,-20295,26698,22717,16176,13649,32767,7206,-13183,-32768,-12982,32767,18669,-26256,12341,32767,-32768,32767,26013,17851,32767,12098,-12676,21541,10869,-32768,4414,-14262,-4790,-16591,21734,31450,-23027,32767,24823,4516,27076,-6134,28402,-19192,3405,25880,-11229,29656,-16571,-2722,31740,32767,32767,-32768,-3065,-2995,32767,-21175,10520,-32768,2684,-234,32767,-32275,-10045,-27970,17926,32767,13969,-19395,-32768,30226,3133,1575,20865,21526,28171,2678,-11490,-6447,-17489,24469,5759,25211,23763,-21144,-14950,-27220,25139,23866,26918,15580,-26978,-3971,32767,-8649,-25357,-32768,13301,-19066,-32768,6768,24527,-461,30166,2061,-4522,3124,12323,18104,-16081,-32768,21909,-5801,-2815,-17446,2607,5380,-30376,32767,-32768,5200,15217,25141,21023,-11961,-23016,9291,-31463,-6711,32767,-29871,-32768,-32768,6845,15332,-12188,-32768,-651,13462,-22070,-28733,-5062,28361,8239,32767,-29967,13438,31986,-6904,20457,-32768,1486,30074,-23264,27094,-22537,-16208,-20759,24161,-32768,3906,-11039,-32768,32767,-23504,-12707,-28154,-20610,1610,28187,-16893,8884,27244,24290,1599,-17006,-32174,-1363,-1874,1496,14995,16359,4814,11069,-15434,6792,-18272,13518,13662,31153,18940,15027,-2268,-32768,29381,28389,-19561,-6233,2994,9172,15372,28700,-32768,9236,-5129,26218,30388,10455,-32768,32767,24346,12313,825,32386,7182,-30591,-13341,-32768,32767,8966,-22247,1093,32767,14433,31042,13963,32767,-15608,-6379,17581,32767,-228,-32768,27208,-32768,-31893,-32768,-20577,-15604,32767,16734,13404,18416,12799,-30098,8980,-18888,-8352,-5895,-11491,-32768,8680,-32768,22652,21768,18299,-2604,13357,-32768,31576,21603,23695,-22171,32767,32767,32676,12283,24270,17791,-12062,12997,-3006,-21454,-20485,20831,32704,1753,-8520,12238,-30562,-4355,-4255,11720,-29024,-32768,27830,24008,32767,-18579,-32768,31451,-13007,20820,-32768,32767,18805,-21623,28163,3283,-32768,32767,24211,-8513,-32768,-32484,-24729,-28486,-20993,-24240,4533,30709,26234,-11204,32767,-32768,10282,-31486,25568,30501,-3529,32767,-24407,-32768,-32768,14695,7302,16874,-29034,-16502,-6642,-32768,11595,-32768,7938,-20317,9063,12136,4224,7463,-32768,-10671,-16680,20117,-15824,32767,-31566,-32768,7161,-17260,32767,-20447,-23475,2612,32767,-32768,-32768,-5530,21588,25867,-32768,-32768,-8387,-9905,-22858,-18034,-32768,-12375,-25490,32767,24845,32767,32767,-4484,-2544,3124,-8256,-32768,-8627,16293,251,-32768,-20962,-29817,-2900,-2891,-22346,-23082,727,-16448,18095,32767,-32768,3356,-27803,30136,-32768,-32768,-32768,-26475,32767,-19562,9037,28297,-21938,-27610,8589,32767,21494,3146,3852,-23310,-18998,-2410,26085,7455,24937,-27323,-4308,-2497,-11727,-18794,-15565,32767,-5411,-32768,-23144,-30936,-32768,32767,11793,-20802,32767,273,-32768,32767,5720,25535,7661,-8624,24011,32767,32767,-22859,19087,-32768,-21192,27248,-14525,-32573,-14725,28347,12156,15740,20859,-32768,32767,12689,-32768,17183,-23844,29873,-32768,28221,22225,-32768,-32646,-31368,-28190,26958,-19107,-32353,-32768,-23951,32767,18258,32767,27797,15952,-1664,32767,32767,31844,20431,29665,31261,14172,-9515,32767,-32768,32767,32767,-22358,-29967,16389,15918,15457,30025,28257,15177,32767,16819,27315,18716,18130,-32768,-21323,-10901,-12261,26936,-28656,-16123,32767,-31549,-32768,-20111,-19407,-4839,6480,9310,32767,28809,32767,14679,-21737,-30781,-16011,28847,32767,-14341,32767,-24024,-17316,13508,32767,-32768,-3933,26394,32767,18665,23796,32767,-126,10711,-27549,-32766,-26846,-32768,832,7229,-32768,28735,-4254,-23749,24257,-22743,-18667,32767,-16250,-29599,-32768,32767,-4791,28265,-1179,-32768,-19456,30636,21169,-12650,-32768,21944,32767,32767,-13220,1694,-24496,-27763,-22857,-21473,-20285,14104,-18782,12851,32767,15953,-2630,-20854,7321,6048,-20863,-17174,13647,19627,-16459,-27530,-14682,-32768,29277,-32768,-32768,29352,-32768,-7969,-2954,-12693,21697,32767,84,-8201,-7548,28816,8180,-32768,-18043,32767,-23312,-24789,-23565,21340,-19700,-32768,-1507,457,-25228,8587,-2385,-9772,-5046,32767,23170,14629,32767,-21536,32767,-32768,32767,31034,-28001,-10315,-2331,-16775,-31036,26601,-2216,32767,20033,28054,16725,-9461,26969,32767,9732,-6581,14352,-16482,42,13273,-3580,-720,-8560,32767,22828,15554,14889,-25172,9123,-24694,32767,21832,9401,-32768,32767,1124,24465,-28432,-32768,30882,29483,-21134,-15051,-14552,-666,32767,9181,32767,-32768,28077,-32768,-519,4493,32767,20827,-32768,-32768,32767,-32768,11469,-32768,32767,24810,4501,-32768,-13992,30346,-29538,-7781,-32768,-16521,29734,32767,-8265,22022,-32768,3402,10197,32767,-4099,-21206,12361,10854,-267,20284,1833 diff --git a/tensorflow/lite/micro/integration_tests/seanet/sub/sub1_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/sub/sub1_input0_int16.csv new file mode 100644 index 0000000..9b5e393 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/sub/sub1_input0_int16.csv @@ -0,0 +1 @@ +3553,16794,890,-819,25632,32336,7223,-30505,-18802,18573,-31776,16781,-2147,22443,-563,-15474,17805,29932,3394,-2326,1887,11726,-15314,940,18769,19415,19739,5329,-1005,-25487,32046,-31585,30380,12660,27788,-4904,20625,-23612,31648,-30346,31084,-22821,-19732,-11558,3363,-26208,-6087,15422,-2224,-473,-17556,19526,23268,32517,1273,-251,25915,-24038,-7549,31674,-7718,-18970,-14509,-2084,-15380,73,12280,13346,-30577,5848,-4778,27743,-14538,10428,-4571,-11248,5484,4573,-8474,17170,-3678,-11378,-22585,-20687,-19913,28237,22633,4312,-16725,-4197,1683,28164,27676,-28800,12032,-19675,26491,-18869,19447,-17515,13819,-10246,26705,-12373,6582,-31912,27440,23396,-13905,-32494,-19806,-22160,20376,-18482,-30140,-10149,-2433,26148,-16758,-17438,-1782,6995,-14391,6649,-1262,12746,-18619,-25925,-9894,28678,19849,9611,-27534,32523,19415,23009,-13766,-28265,2800,-23562,13622,25918,-20563,19547,-12390,22783,11533,14985,-12934,-8721,-8597,18112,-28450,4673,-23101,14535,17649,-14683,-26967,-12342,16904,28906,24396,18790,-30100,30285,-17595,3816,-2146,8767,-2203,29138,13579,16826,-19247,-12577,-21837,-15263,27380,-5516,-30981,-18767,-6362,8455,-25782,20626,6007,20443,16636,-27014,10429,31324,-14870,14629,-21921,20936,-6940,5675,-3088,-3073,17400,2801,-21010,25029,5631,28708,18535,-23295,17752,-16563,-13527,-12058,22730,32656,-19417,2665,12304,-7384,-17672,-15779,14283,13404,-20754,22947,-20001,-7565,286,-4234,-29793,-28535,12294,10797,-30493,26383,-5891,-26803,10476,5626,8294,-4530,21694,3269,28217,-30836,2922,-13642,-4131,12571,-28974,-11393,-22605,18188,-28608,-16746,-19988,3899,-2737,-26121,20222,7328,-31623,27746,-24805,-18633,17716,6712,-11240,-2580,-6842,10842,-19059,-18901,14320,2680,-17336,-23908,7512,-13414,29390,-5782,18945,-21735,15809,18922,-17702,-19281,23520,9615,11406,-11778,-21910,-31430,15093,27224,22441,-18175,-27189,-2314,26354,-30341,-8747,-25029,-22709,-6710,3773,-7339,7699,32523,14821,-7723,-21409,6818,-24400,11435,22747,-25482,22946,9640,-18504,30630,-28806,-29022,-6592,25383,3863,-3844,28551,5932,13448,8228,30271,-16418,-19456,31752,-9741,20192,-22456,-26981,9128,2675,13131,30881,21706,3658,32316,4810,-14558,27973,26632,-4319,24327,-23456,13828,-29327,20498,-5259,-16722,-19374,26928,5142,5876,9753,-3231,19500,-24041,-29703,-28186,-25860,4704,30969,-30136,-12724,-1327,14979,29733,18628,-15723,16128,-17481,7241,15892,20796,8687,-31490,30599,21523,10429,19392,-29078,2346,-6714,-7466,9011,17452,-27925,8318,8194,-15688,-10224,-19742,4800,6407,29263,31453,7614,-9407,-19694,24764,-16668,-15542,3576,11859,-16174,-32666,12755,32747,32340,-21210,30330,10512,-26276,-14580,32170,8234,20373,-8237,-21925,13160,27949,21839,-13842,370,-22781,-27376,515,2847,32543,-8381,17610,-32655,-3049,13337,30883,13706,-13047,27800,-16652,19156,14516,5515,-20359,-11275,19218,30886,-19613,5876,30377,-24987,-21264,16823,7494,-7682,10763,24730,-18075,-15441,7982,14326,-162,-13460,-31480,-24371,-12284,-3314,17605,14934,-24013,-51,-1438,-7415,-10115,25240,23965,18354,12007,9446,6399,26078,-17573,1897,-30666,19623,16321,6527,11853,17605,20917,6794,21449,12889,20387,4800,18312,14713,-12257,-25128,15326,-19529,-17048,13320,-129,-21241,-2522,-5962,30404,23173,7462,-13727,-23971,-27497,-28208,14352,26343,-7945,-26745,-2908,-9980,-2228,16639,9735,-16125,9833,25569,-2887,-6084,-3201,-31553,11942,-30603,-3213,14525,23666,-28845,19928,24422,17224,14353,29249,22953,-13954,21002,19272,3890,12115,-951,-24737,25951,-25767,22193,25604,21553,-22114,19974,17937,17955,28206,16841,3966,-3820,10639,-3220,-18040,-8334,-5450,15407,18536,-30957,-25802,25045,10969,-3001,10933,-1619,695,-32377,14392,4096,1512,-23669,-29332,10336,7241,-17158,-11850,-18277,-22902,-2222,-24285,-27215,18278,-30767,-1214,-21195,31634,-17667,11617,20695,19842,-11630,-26538,27594,24289,-10483,-29476,-7297,32527,-24719,16576,25993,-7921,-25372,-30702,-24507,-19645,-2909,-4717,-14530,30480,29201,1407,523,13045,-20416,-13495,-20709,15450,11162,23500,3319,-6216,-18705,-29880,-20283,30452,-171,-5743,-14618,-4269,-21824,-10465,18557,26100,23948,29251,32255,12847,-26844,-2136,31789,28345,5912,21902,-31137,30332,8087,28146,-13813,-17972,-26213,-15797,-19797,15299,-25863,-9841,-29055,-757,23973,24343,31358,-5868,-17714,21028,7396,-7801,30941,-15218,-5399,-20086,-6558,-15128,19730,16787,12234,10134,26135,5508,-10033,-27946,-9756,31229,13839,-19734,8999,29114,-32037,25028,19328,13232,28150,9110,-9593,15989,8292,-32349,3445,-10855,-3672,-12565,16380,23297,-17027,32620,18334,3237,20349,-4594,21320,-14145,2316,19505,-8451,21891,-12396,-2214,23441,29848,27696,-26040,-2319,-2287,27095,-15660,7675,-24523,2198,-42,31860,-24265,-7584,-21036,13545,31975,10515,-14458,-32271,22377,2524,1145,15709,16168,20753,2232,-8645,-4986,-13079,18029,4099,18724,17734,-15708,-11167,-20322,18627,17871,19883,11551,-19884,-2824,32159,-6676,-18983,-30666,10102,-14310,-31940,5020,18114,-468,22649,1507,-3191,2469,8955,13716,-12063,-25077,16251,-4506,-2284,-13033,1984,4038,-22651,27202,-28290,3975,11172,18669,15851,-8772,-16976,6680,-23529,-5210,24912,-22354,-24466,-24555,4950,11289,-8882,-26704,-309,10165,-16649,-21152,-3860,20927,6074,32192,-22498,9959,23856,-5107,15192,-25171,1018,22493,-17476,20123,-16578,-11934,-15296,17750,-25888,2694,-8018,-27147,29628,-17517,-9606,-21085,-15152,1171,21160,-12433,6395,20521,17992,1004,-12720,-24140,-1203,-1440,1157,11196,12141,3526,8152,-11386,4900,-13650,10264,10303,23351,13863,11081,-1902,-31774,21757,21250,-14581,-4786,2104,7020,11416,21542,-29628,6657,-3580,19427,22436,7724,-26546,29381,18080,9210,647,24073,5289,-22862,-9828,-29159,28223,6875,-16430,972,27231,10639,22896,10595,25649,-11504,-4768,12943,30432,22,-26610,20431,-30857,-23962,-24941,-15410,-11803,29791,12270,9790,13665,9572,-22374,6648,-14119,-6306,-4285,-8711,-29809,6662,-26618,17022,15968,13517,-2152,10144,-32356,23623,16064,17494,-16631,26765,31830,24502,9287,17849,13483,-9071,9489,-2297,-16159,-15439,15463,24390,1338,-6380,9053,-22841,-3138,-3324,8678,-21407,-28750,20877,17636,24432,-14045,-27970,23298,-9568,15481,-30543,27784,14191,-16125,21142,2587,-28672,27524,17933,-6524,-30393,-24371,-18598,-21252,-15585,-18013,3337,22804,19442,-8237,26170,-30151,7855,-23308,19193,22470,-2733,30191,-17970,-26397,-25873,10921,5290,12437,-21423,-12313,-4769,-28416,8413,-29356,5818,-15312,6688,8847,2956,5511,-26360,-7911,-12455,14919,-11869,29165,-23656,-30437,5531,-12717,30742,-15459,-17582,1731,32318,-28909,-25401,-4136,15926,19132,-32218,-26553,-6068,-7231,-17236,-13187,-26319,-9399,-19036,24314,18308,27823,32246,-3305,-1931,2268,-6235,-27735,-6579,12082,387,-24680,-15447,-22435,-2265,-2366,-16436,-17300,657,-12264,13325,26549,-29916,2471,-20523,22578,-24759,-32072,-29002,-19896,25544,-14751,6539,21021,-16289,-20522,6357,30280,15913,2446,2711,-17401,-13943,-1662,19578,5313,18459,-20555,-3007,-1975,-8615,-14010,-11733,29529,-3837,-31060,-17054,-22888,-24849,29977,8688,-15673,26114,16,-31841,30129,4302,19043,5666,-6478,17787,30919,24865,-17065,14410,-28836,-15618,20048,-10919,-24463,-10762,20987,9165,11699,15383,-24806,31456,9419,-30967,12935,-17970,22478,-25287,20823,16486,-28948,-24492,-23397,-20943,20102,-14262,-24143,-27976,-17727,31275,13545,28399,20826,12034,-1476,28733,31884,23907,15094,22200,23254,10405,-7209,31277,-24819,26235,30602,-16864,-22071,12110,11664,11448,22165,20848,11254,30566,12554,20298,13876,13409,-27352,-16031,-8163,-8928,20185,-21175,-12240,29227,-23701,-25919,-15088,-14332,-3621,4678,6806,24742,21420,26144,11071,-16401,-22677,-12011,21289,26763,-10864,27158,-17930,-12848,10089,25809,-28348,-3016,19753,31319,13848,17915,31181,65,7737,-20615,-24607,-19786,-29198,735,5363,-29235,21267,-2975,-17708,18234,-16788,-14116,25370,-12189,-22222,-24917,27685,-3755,20997,-834,-31808,-14522,22750,15671,-9314,-32561,16289,31213,30692,-9683,1024,-18342,-20882,-16816,-16102,-14986,10481,-14128,9442,25547,11849,-1782,-15382,5231,4741,-15623,-12972,10100,14424,-12442,-20540,-10887,-25953,21758,-24560,-27287,21955,-28425,-5980,-1999,-9317,16311,29567,-43,-6319,-5419,21336,6205,-31619,-13578,28850,-17163,-18482,-17368,16030,-14885,-28931,-1213,154,-18841,6205,-1964,-7320,-3713,32758,17212,10833,28356,-15929,31775,-32028,28541,23236,-20687,-7916,-1841,-12702,-22905,19687,-1534,27109,14768,20760,12643,-7071,20253,28104,7026,-4661,10593,-12457,-28,9694,-2854,-581,-6329,32018,16957,11522,10996,-18636,6635,-18431,32151,16385,7157,-30352,25019,623,18413,-21283,-27235,22972,21803,-15859,-11013,-10861,-320,26297,6616,31478,-27433,20716,-28329,-574,3156,27339,15548,-30984,-25461,27613,-27647,8642,-32182,32670,18670,3483,-27798,-10654,22486,-22204,-5593,-28982,-12184,22117,26876,-6278,16586,-28542,2708,7734,31460,-2813,-15878,9016,8021,-386,14912,1320 diff --git a/tensorflow/lite/micro/integration_tests/seanet/sub/sub1_input1_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/sub/sub1_input1_int16.csv new file mode 100644 index 0000000..adb9fbd --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/sub/sub1_input1_int16.csv @@ -0,0 +1 @@ +5938,4498,-5114,-7875,-12086,14196,-21400,-6493,27371,18085,21698,-32443,-14478,-29225,27362,-15896,15805,-2570,-19942,-17116,26207,-3766,24027,19564,-29948,32634,-12477,-25519,-8130,-25654,-25785,-6149 diff --git a/tensorflow/lite/micro/integration_tests/seanet/sub/sub2.tflite b/tensorflow/lite/micro/integration_tests/seanet/sub/sub2.tflite new file mode 100644 index 0000000..d240d76 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/sub/sub2.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/sub/sub2_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/sub/sub2_golden_int16.csv new file mode 100644 index 0000000..ccd81eb --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/sub/sub2_golden_int16.csv @@ -0,0 +1 @@ +-32768,26616,32767,32767,-32768,3063,-32768,32767,31217,-32768,32767,32767,32767,-32768,-27140,27113,32767,18437,-11205,-7261,-18116,-32768,-30245,-32768,32767,-3790,7110,-21571,32767,-32768,32176,32767,32767,-1747,32767,-20148,30885,-14164,32767,-32768,6125,-26798,2667,-18807,-32768,32767,32767,5972,32767,-21130,32767,32767,-32768,32767,-7797,-32768,32767,24668,26112,3588,32767,-20004,-32768,-32768,-32768,32767,32767,-32768,32767,11234,-32768,-13668,32767,-18441,32767,-32768,32767,-32768,-32768,-32768,-21354,29525,-32768,-32768,9765,32767,-7545,-32768,31880,22837,32767,32767,32767,-32768,-18219,32767,32767,32767,32767,32767,32767,-32768,-10943,5794,-6916,-32768,-32768,32767,-32768,30534,-3224,28898,-32768,21558,-14765,-8014,23445,-32768,4693,32521,7341,32767,32767,32685,-32768,-32768,-32768,-12572,32767,-32768,-32768,-32768,5758,-22003,-32768,32767,-14486,32767,-12332,-32768,-32768,-18472,-32768,-7707,-32768,3557,32767,32767,32767,10880,-32768,9800,32767,-32768,32767,32767,-24522,32767,-21635,32767,32767,32767,16630,32767,-32768,32767,-32768,-32768,-11940,-24571,-31703,-32768,6155,5074,-32768,32767,32767,32767,32767,32767,-32768,-32768,31741,-32768,-32768,-14453,13318,-720,32767,-32768,32767,4719,-32768,-12936,-25128,-32768,-32768,1990,29186,32767,-32768,-24454,-32768,32767,-10954,30451,2685,32767,-32768,32767,-3361,-10470,-27932,32767,-32768,-32768,24472,-32768,-32768,32767,32767,32767,32767,-32768,32767,-11283,-32768,32767,-32768,32767,-14474,-32768,-32768,32767,8476,32767,-5702,32767,-32768,32767,31408,32767,32767,24896,32767,-27559,-32365,32767,-32768,32767,14939,30873,-32768,32767,32767,-31427,32767,6896,32767,-29998,-11961,-29179,-32768,-23542,-32768,15784,2452,-32768,5078,-6338,-32768,-32768,32767,-21442,-16093,32767,-32768,-24052,3437,-32768,-32768,32767,32767,-32768,-32768,-32768,8600,-20067,-32768,32767,31536,-32768,-26771,32767,32767,19138,-32768,-13936,-23718,-32768,2736,-9662,1057,12563,32767,32767,30717,-32768,-32768,32767,32767,25486,-32768,32767,-32768,-32768,27634,-32768,-26369,-32768,-32768,-32768,-28161,32767,32767,-32768,-32768,12837,32767,-7212,32767,27931,-32768,-22799,14396,32767,31209,28620,-19637,32767,-32768,19209,32767,-11753,32767,32767,30271,-32768,-32768,-14523,-23336,-32768,21780,32767,8197,-32768,-32768,32767,17579,27592,32767,32767,32767,-569,32767,32767,-32768,32767,-32768,-32768,32767,-22513,-3215,-27258,32767,32767,-32768,16342,-32768,-32768,32767,-32768,-32768,32767,-32768,15421,531,-17766,4598,-32768,-32768,-32768,32767,-32768,-32768,23381,32767,-32768,-32768,-32768,12766,32767,32767,32767,26489,-32768,32767,-20005,32767,-20394,32767,-32768,32767,11006,-32768,-32624,29606,32767,-8329,-32768,-32768,32767,-32768,32767,32767,-32768,-10248,32767,29832,-32768,32767,32767,32767,32767,-32768,32767,32767,-14758,-32768,19070,-32768,-10514,-32768,32767,-32768,-32768,-32768,-32768,-32768,-32768,-5905,-15321,32767,32767,32767,25735,-32768,-7294,-6914,27543,9189,-32768,11128,-32768,-16578,32767,-32768,-32768,32767,32767,-32768,-32447,10353,-26429,-11702,32767,-32768,-32768,-19751,-32768,-8618,32767,32767,32767,-32768,-20774,32767,19136,-22040,32767,25615,-11857,32767,-32768,-32768,-16959,32767,-32768,1964,-10156,32767,-3225,32767,32767,32767,17566,-32622,-20019,-22793,-2017,-19230,-19716,32767,-10900,-32768,-32768,-32768,32767,32767,32767,-32768,-32768,32767,-32768,-30179,16881,32767,-23167,-32768,-32768,-23158,32767,-32768,32767,-24106,-4981,-30010,32767,-32332,-32768,-32768,-32768,-32768,10421,-32768,32767,32767,32767,-32768,-3233,23066,-17750,10462,-32768,-32680,-32768,-32768,23160,-32768,-32768,19876,27670,-32768,-27898,-32768,26763,19133,32767,-32768,-32768,-32768,-32768,32767,10295,7326,-10850,-32768,-32768,-32768,-32768,-32768,-32768,25242,2997,-32768,-30556,-32768,-32768,-32768,32767,-32768,7981,-32768,32767,-13791,-8729,-15753,32767,-32768,32767,32767,32767,32767,-589,-30507,-32768,23006,32767,-32768,-6760,32767,-731,32767,-32768,32767,-32768,-32768,-32768,-6175,32767,-13727,25677,32767,-32768,-5676,32093,32375,-32768,32767,-24699,-19516,-32768,-32768,31977,32767,32767,32767,-32768,-32768,-7508,-31342,32767,-7180,32767,16040,-23043,-13104,-32768,-32768,31446,-20626,3534,16503,32767,-3437,-32768,32767,-5543,-1487,32767,-32768,11912,-32768,6224,32767,14022,23919,-13524,32767,-32768,-32768,9876,-2921,-32768,32767,32767,32767,32767,32767,32767,-32768,-32768,32767,32767,-32768,28187,-32768,-32768,-32768,29818,-32768,26517,-32768,27758,-32768,-32768,-12645,-32768,-11340,32767,24694,-32768,-11990,-32768,-32768,32767,-32768,-32768,32767,32767,32767,27227,32767,30126,-32768,11620,-32768,-32768,-32768,32767,-10817,-32768,-32768,-32768,32767,27612,32767,-20864,-8611,-32768,32767,5052,-32768,530,32767,-6715,9133,-105,-32768,12935,32767,-14744,-4578,-32768,-20617,-32768,-32768,-32768,32767,32767,32767,32767,32767,-32768,6567,-3216,32767,32767,32767,18775,-32768,-32768,32767,6188,15910,-25446,27575,-26866,32767,-32768,-24585,32767,32767,26351,32767,32767,-32768,-26798,32767,32767,-32768,32767,-32768,-32768,32767,32767,-13837,-14134,-11482,32767,-8854,32767,-31713,-32768,-32768,9407,-32768,13715,-32768,-9309,-15361,-23771,32767,-32768,-25492,-2296,19865,-92,3767,3637,9833,-32768,29654,32767,7073,-32768,32767,-32768,6980,12805,-32768,-32768,-32768,-14047,-32768,32767,32767,-32768,32767,32767,32024,-32768,32767,-16422,32767,-27005,-32768,-32768,32767,32767,32767,-2246,32767,16104,1616,32767,791,-17074,13833,-13334,-32768,32767,32767,-32768,-14277,-25680,-32768,32767,-28399,32767,32767,-32768,32767,32767,32767,-29380,856,-32768,31039,-32768,-32768,32767,-4205,-25215,-32768,-22029,32767,17463,26040,-32768,32767,23791,-32768,-32768,-29486,32767,-4604,32767,32767,-32768,32767,-5584,-808,24954,32767,32767,-32768,-32768,29197,32767,-12533,-10425,-9243,-32768,32767,-32768,24910,-32768,10679,-32768,-32768,-32563,32767,-32768,-32768,-32768,-10718,-11397,32767,32767,32767,-32568,20500,32767,-24293,-32768,20117,-29766,-26869,8828,32767,32216,32767,32767,-10624,7249,-2353,32767,-32768,22522,-32768,-32768,-32768,32767,-32768,27244,-22679,4437,32767,-32768,-32768,32767,-25928,-32768,-32768,32767,32767,32767,17283,-32768,32767,21967,32767,-9305,32767,32767,32767,16331,-32768,-32768,32767,-15207,32767,-28291,-32768,32767,32767,32767,32767,-32768,-32768,32767,-24274,-6541,-32768,-32768,17198,32767,-24964,15489,4263,-32768,-32768,-5049,32767,22224,-32768,32767,-32768,32767,32767,-32768,32767,-3201,-32768,-32768,32767,32767,-32768,32767,1980,32767,-32768,32767,-32768,26848,2828,32767,32767,-27699,32767,32767,-31228,12592,-32768,-32768,-32768,-32768,32767,-32768,-17595,31799,32767,32767,32767,5492,32767,-32768,-32768,32767,32767,-30295,32767,-32768,-1796,-32768,-6219,32767,-985,-32768,-32768,32767,-32768,32767,32767,-32768,6166,32767,-7446,-24384,-8528,-72,21272,-8311,8892,633,-17452,12280,-15129,-32768,32767,32767,-32768,-32768,-17147,2960,-9540,32767,-19590,-32768,32767,32767,32767,-978,20694,-32768,-32768,32767,32767,-1694,-32768,32767,-32768,32767,-32768,-32768,-32768,-32768,-32768,-17905,13419,23009,32072,17872,18242,-32768,-32768,32767,-32768,22077,5233,-32768,-32768,7774,8215,32767,-32768,24260,-32768,32767,32767,-4187,-32768,32767,16819,30770,16323,5709,22388,32767,-22329,32767,-32768,-32768,25090,-32768,32767,3507,-32768,-9810,-17921,-31111,-32768,32767,-32768,32767,32767,-32768,32767,-9991,-25448,32767,96,12997,-2289,32767,32767,32767,-32768,-17840,-32768,-32768,12473,-32768,32767,32767,-32768,-32768,-32768,-14084,32767,-32768,32767,-32768,-27102,-32768,-32768,32767,-32768,32767,-32768,-32768,-9204,-11744,32767,20650,32767,-32768,-19542,32767,-32768,16339,-32768,-10410,15120,-32768,-32768,7453,-9219,32767,20086,32767,-27215,-32768,32767,-32768,15959,-32768,-32768,32767,32767,15120,-32768,-32768,-26805,-18290,32767,32767,-32768,-32768,6692,-32768,25306,-11600,-3486,14565,-32768,-20606,-24626,-32768,-14472,32767,31196,-23350,32767,-8139,24890,-26915,32767,-26992,-32768,-32768,-14424,32767,-32768,-32768,-32768,-32768,29252,-32768,-32768,-32768,-32768,32767,-32768,32083,-32768,22665,-28973,32767,-27005,-32768,24276,32767,15603,-32768,-32768,32767,-32768,-27211,-32768,-32768,14541,17119,16248,24838,8175,-32768,-32768,32767,-32768,-29792,4432,-32768,-32768,32767,-19921,-12372,32767,16864,32767,32767,32767,32767,-32768,32767,29030,-32768,31760,-32768,-32768,32767,-32768,-6541,32767,-29843,-32768,32767,19672,32767,32767,32767,32767,-16267,32767,21269,-28506,-11625,11709,-9049,32767,32767,-32768,32767,-32768,32767,13113,32767,-32768,-31175,-32768,-32768,723,-1158,22569,-32768,-32768,32767,20154,-10499,32767,-10127,-10143,-7524,32767,-32768,32767,32767,12527,-13009,32767,32767,16673,-20159,-24165,-32768,-16348,-32768,-9387,967,9411,-32768,-6410,32767,-32768,-32768,32767,32767,32767,-27049,32719,32767,32767,-32768,-32768,-32768,32767,-32768,32767,-22545,-32768,32767,-29827,32767,-32768,-21429,-20269,-27841,-14661,27918,10003,-528,-25617,7759,-11615,32767,32767,32767,-32768,32767,6016,-17387,26093,32767,32767,20699,11317,24186,-32768,-32768,-27491,29387,32767,5641,-2325,14420,-8904,32767,32767,32767,-32768,-32768,32767,-17430,-10362,-17703,32767,32767,-21828,-32768,-14697,-20521,32767,32767,14460,-32768,-16114,-22237,30241,32767,-32768,-140,-32768,-32768,-32768,21694,-32768,-32768,-22831,32767,-1688,32767,-32768,-32768,32767,32767,-32768,32767,32767,32767,6708,-32768,22337,-14595,-32768,-32768,32767,-32768,32767,32767,-32768,-32768,-26896,-3495,25293,17658,-11083,-32768,-8860,32767,-32768,-32768,4051,-32768,-32768,-8901,-32768,-10576,32767,32767,32767,-32768,-32768,32767,-32768,-32768,32767,-32768,22677,26400,-25400,-32768,-32768,-11587,-17101,-32768,-26482,-32768,-32768,-32768,12089,32565,-32768,19121,-32768,-32768,32767,-30704,17046,-28941,32767,32767,32767,-27446,32767,32767,29532,-32768,6242,-166,-10832,17531,-8216,32767,-32768,-3125,-32768,-32768,32767,-2285,32767,32767,6552,32767,-32768,32767,32767,32767,-32768,11343,-32768,25109,-25644,32767,32767,3950,-32768,-32768,-31227,21228,-32768,32767,25577,-32768,-12410,-32768,11000,32767,-32768,-32768,-19782,-31925,-26870,-17658,7127,-31137,13464,32767,-29628,32767,32767,30840,1081,-32768,8291,-28416,23714,-32768,21199,20270,-25035,-27495,32767,32767,-32768,32052,32767,25377,-32768,32767,9295,32767,-32768,-16750,-32768,-32768,32767,25522,-32768,32767,-32768,32767,23334,14212,32767,32767,10917,-15340,-29246,32767,-32768,-32768,32767,-32768,19171,15749,-32768,-19596,-18376,-32768,-27439,-32768,32767,32767,22476,32767,4310,3954,-32281,20785,-32768,-32768,-32768,-32768,26931,32767,31827,-32768,5712,12405,-23317,32767,10382,10763,-32768,-11647,32767,32767,32767,32767,32767,-32768,32767,1145,32767,32767,32767,32767,-32768,-32768,32767,-32768,-32768,-32768,24489,-31718,-32768,-32768,32767,-28204,-32768,32767,31563,-32768,32767,-6959,6502,-20199,-25824,-11169,32767,20155,-2982,32767,3108,2476,32767,1223,10084,-32768,32767,32767,32767,32767,32767,571,32767,32767,32767,17524,-32768,32767,-32768,-12852,5668,-32768,22998,32767,32767,-3521,32767,-10371,32767,32767,-32768,28019,14444,29401,-32768,-32768,-12946,32767,-32768,32767,-27827,-22893,10393,-3452,-13801,-32768,-32768,32767,-6191,32767,28274,12471,-32768,-32278,-32768,-32768,-29649,32767,-5673,32767,32767,-27733,11275,-606,-21106,-32768,21216,-32768,7726,32767,32767,809,32767,6576,-32768,-32408,-32768,32767,-771,-32768,32767,-32768,-9841,32767,-32768,-32768,32767,32767,-32768,-32768,-32768,-32768,32767,-32768,32767,10197,19020,-32768,24380,32767,31954,-32768,32767,32767,-26517,-32768,32767,19022,-23367,32767,14811,-32768,-32768,15046,27643,26002,32767,32767,32767,32767,32767,-5657,32767,29146,30776,5637,32767,32767,20620,-11723,32767,32767,-31945,32767,32767,-32768,-10826,3338,-32768,-32768,32767,-32768,32767,32767,22711,-32768,32767,-20808,-22319,-24860,-32768,17314,32767,32767,30217,32767,32767,13428,32767,-32768,-3249,-32768,-14512,25655,14219,8795,-32768,-32768,32767,-32768,32767,7987,-257,-9531,28488,-32768,-32768,-32768,32767,-32768,32767,-32768,31949,-24592,-32768,-32768,30092,-9556,-32768,-32768,32767,29251,28616,32767,32767,-32768,32767,6594,32767,-32768,32767,-24122,32767,32767,-510,28043,-728,3105,-32768,31581,28910,-32768,-22695,-32768,-3936,1293,-32768,-25143,32767,8309,30007,-32768,32767,-32768,-32768,32767,-32768,32767,1361,32767,-32768,-32768,-32768,-32768,32767,-32768,32767,-32768,32767,-6589,32767,-32768,-13353,32767,32767,32767,32767,-32306,-3452,-32768,-32768,-32768,-32768,3886,32767,26700,-32768,28622,1131,32767,32767,-32768,32767,32767,-32768,-32768,4230,32767,32767,14824,4197,716,8465,-32768,-32768,-32768,32767,32767,32767,27843,-32768,32767,-32768,32767,32767,-32768,-10912,-32768,-32768,-21001,-32768,-32768,32767,-32768,22258,28503,32767,32767,26452,-32768,-28018,-32768,15205,-32768,-32768,-32768,32767,26412,32767,32767,32767,-32768,17763,-27543,-28248,32767,32767,-32768,32767,32767,-32768,15102,-27404,8056,32767,-32768,-16862,32767,-25935,-19016,32767,32767,-10759,12562,-32768,22684,-29062,-28561,-27378,-27232,5889,-16901,29778,32767,32767,-3223,-32768,-32768,32767,-32768,32767,-12157,32767,20639,31545,32767,-32768,-12857,-30494,-32768,32767,25107,-32768,14158,32767,-32768,32767,32767,-32223,32767,-32768,-26319,32767,32767,1458,-1615,9552,23030,20779,27281,-32768,-32768,26961,-17764,32767,32767,32767,813,-32768,-18330,29363,-3954,-32768,-252,32767,32767,3367,-32768,-32768,32767,-32768,-20605,-3721,26861,-13270,3008,26385,28844,-874,32767,16276,-5469,-32768,32767,32767,-32768,32767,-32768,7832,32767,8221,-32768,32767,-7030,32767,-14897,-32768,32767,32767,-32768,-32768,32767,28908,-32768,7426,-32768,-3238,-32768,-7946,32767,-18553,22082,32767,3197,-32768,32767,-32768,-11106,-30217,-20597,32767,32767,32767,-32768,32767,4305,32767,-32768,4531,32767,32767,-32768,-32768,-32768,-32768,-32768,22803,-32768,32767,-32768,-32768,32767,-32768,-30422,-32768,32767,32767,-81,-32768,28810,32767,-12417,6562,32767,32767,-32095,-32768,-1911,32767,32767,-6432,32767,32767,32767,16312,-32768,-32768,32767,32767,-8608,-32768,-30429,32767,-32768,32767,-32768,-32768,32767,31274,-6664,32767,-32553,32767,-32768,-28061,32767,32767,-32768,-32768,-32768,32767,28973,19850,-31471,32767,-32768,-16550,22636,-32768,32767,32767,-32768,32767,32767,-32768,-988,32767,9856,-32768,-32768,-15739,32767,18608,-32768,-19589,32767,32767,32767,-1800,-32768,3903,32767,-4046,32767,-32768,8621,-13641,-85,-32768,12650,-32768,-5637,-32768,-32768,32767,32767,32767,-32768,32767,-32768,-32768,32767,-32768,32767,-28487,-5117,-8805,-32768,13622,-31259,16350,-32768,-426,13272,-8455,-12356,-24292,-31894,32767,32767,-32768,-24291,-32768,-4913,32767,28780,18041,-32768,1838,32767,32767,-32768,22354,32767,14186,-32768,-16132,-12365,32767,13729,-32768,10624,15911,15423,32767,32767,32767,-32768,-9774,32767,-32768,32767,32767,-32768,-32768,32767,30868,-32768,-32768,-32768,-28243,18722,32767,10473,32767,32767,-12186,-32768,-1961,-32768,-32768,32767,-18822,32767,32767,-32768,32767,-5952,-5329,-32768,-32768,30408,32767,32767,-27545,32767,-32768,5954,-32768,32767,-32768,32767,32767,4707,-31152,-32768,-8536,32767,32767,-32768,30013,32767,32767,16214,-32768,-32768,-32768,11642,-31393,-32768,32767,-19189,32767,-32768,32767,-32768,32767,-32768,32767,-32768,-16714,32767,29940,-10222,-29374,7855,32767,-32768,-9814,30503,32767,-3862,-22221,25814,32767,26671,-32768,32767,-22685,32767,-32768,-32768,-28745,32767,32767,32767,-32768,-32768,-24985,32295,-1250,-16927,32767,32767,-21801,32767,32767,-20396,32767,-7858,-29861,14671,32767,32198,32767,-1112,-32768,-32768,32767,-32768,32767,-32768,-32768,32767,26344,32767,-8731,-26774,-25759,-23746,-32768,1399,-32768,-32518,32767,-32768,32767,10376,-32768,32767,-15099,32767,-14047,-32768,32767,-18348,-32768,-32768,-32768,-25413,-32768,-32768,-32768,-32768,-32768,-32768,5094,-9722,-13810,-17546,744,-32768,-32768,26204,24462,-32768,32767,-32768,6620,9692,-32768,32767,-28616,20566,-32768,-32768,-32154,-13737,-26220,-10670,32767,20878,-16508,-1646,32767,32767,32767,-29557,-31275,-32768,-2722,32767,32767,-18299,32767,-17046,32767,32767,32767,-18826,-26433,-17518,32767,32767,32767,-32768,3927,32767,32767,32767,-32768,-29371,24188,32767,-1636,-32768,-32768,32767,-32768,-32768,8143,-32768,32767,-18081,32767,32767,32767,-32768,32767,-24702,-32768,-17275,32767,32767,-5628,-32768,-32768,-32768,-32768,-22770,14300,32767,-32768,-9262,18746,-2846,32767,-17983,32767,-32768,32767,32767,-32768,-32768,-32768,22673,11688,32767,-32768,-32768,-32768,32767,32767,-31697,22218,-5241,-10005,-27812,32767,32767,-32768,32767,-32768,-32768,32767,-32768,-32768,32767,-23373,-12672,32767,32767,32604,29139,32767,32767,-32768,-32768,-32768,5205,32767,32767,6291,-32768,32767,-32768,-32768,32767,32767,-18698,32767,-25670,5947,249,-2913,32767,-32768,32767,-32768,-31826,-32768,-32768,-23425,-32768,-32768,-727,-32768,-7314,32140,19427,-32768,23693,32767,32767,-3586,-32768,32767,32767,6543,-26837,-14992,-17429,-32232,-17565,-9766,-15299,32096,12526,-19828,-26891,32767,32767,-15911,-32421,12444,2103,-32768,-32768,-32768,5416,32767,-32768,-32768,-32768,32767,-32768,-29618,-31727,32767,32767,3890,2513,-32768,32767,-32768,32767,23809,32767,-32768,-32315,32767,14409,-27657,-7795 diff --git a/tensorflow/lite/micro/integration_tests/seanet/sub/sub2_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/sub/sub2_input0_int16.csv new file mode 100644 index 0000000..8ca4c0c --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/sub/sub2_input0_int16.csv @@ -0,0 +1 @@ +-31478,9956,19905,22025,-24962,1111,-32511,25205,11721,-19700,16794,21406,15807,-27577,-10217,10240,12874,6955,-4248,-2722,-6856,-22939,-11406,-28821,24492,-1417,2717,-8119,20308,-20958,12138,30504,17735,-703,13770,-7557,11617,-5363,24219,-17130,2291,-10062,972,-7087,-26680,23219,19324,2295,29657,-7915,27515,26572,-23441,29282,-2970,-22173,12974,9278,9858,1336,27018,-7524,-30062,-27195,-19683,30294,14672,-22578,12568,4182,-19503,-5160,19015,-6921,25987,-32331,32573,-16900,-14387,-24742,-7979,11122,-14148,-24112,3622,27593,-2875,-12347,11934,8590,22112,25172,28819,-15571,-6801,27329,13069,25545,12396,27445,19189,-26608,-4085,2154,-2610,-20612,-19536,22621,-14151,11499,-1229,10911,-13806,8128,-5586,-3005,8763,-27032,1724,12261,2712,15678,18046,12271,-32677,-26659,-30163,-4682,18576,-29071,-24851,-20263,2174,-8309,-27228,13036,-5455,29256,-4665,-25834,-22554,-6918,-13870,-2846,-30483,1363,24544,29523,14482,4093,-19827,3722,26939,-27287,15498,20420,-9199,26074,-8085,24269,29348,15806,6222,15161,-12889,15935,-13700,-31117,-4498,-9225,-11945,-16246,2335,1931,-22972,32073,17725,12916,15706,17878,-14256,-23754,11889,-18855,-21002,-5424,5050,-283,13035,-19351,29717,1816,-18896,-4908,-9471,-23275,-13300,708,10996,21296,-32516,-9181,-24248,20978,-4095,11468,992,24026,-31421,19663,-1300,-3928,-10545,22097,-13305,-29767,9150,-24439,-29473,30200,18761,32495,28994,-18494,18649,-4287,-25006,24046,-28785,29172,-5412,-16451,-25640,13383,3155,27257,-2121,13784,-17088,19851,11850,30392,25602,9363,29634,-10353,-12203,32249,-17670,25934,5659,11590,-31512,15747,24563,-11768,19217,2545,29623,-11259,-4485,-11006,-30007,-8871,-29957,5941,891,-26147,1930,-2358,-17887,-21048,31717,-8032,-6085,12771,-31185,-9035,1252,-23278,-16867,31607,24882,-13558,-19233,-15143,3278,-7499,-19172,15139,11824,-15114,-10051,13243,31198,7169,-16816,-5228,-8944,-31684,1050,-3607,380,4772,25060,30379,11507,-17464,-17874,16444,25522,9617,-21814,30722,-22509,-22910,10402,-19768,-9864,-22684,-15291,-13202,-10611,26573,15449,-12881,-32552,4801,27216,-2701,30809,10478,-30228,-8544,5393,17071,11775,10782,-7417,20218,-15029,7223,23680,-4378,28492,22430,11421,-21030,-29474,-5464,-8724,-13825,8133,18656,3053,-16079,-25188,18707,6634,10346,23950,26581,23601,-233,13490,25093,-12706,12462,-26714,-15046,16458,-8454,-1256,-10240,20196,30132,-13849,6149,-23489,-13763,29620,-20699,-17536,29455,-12983,5749,172,-6662,1738,-22635,-18085,-26031,32700,-23854,-20343,8768,13603,-12580,-20732,-32456,4844,22060,19750,31636,9907,-16018,17610,-7479,13327,-7657,22288,-25468,13578,4130,-29313,-12218,11074,17214,-3158,-32045,-22892,31488,-20638,14967,30514,-19336,-3882,27335,11233,-19283,21226,27400,27206,31907,-25048,23814,20380,-5542,-19968,7206,-32717,-3944,-20496,22785,-25405,-23013,-29834,-23605,-27294,-25051,-2247,-5743,19523,13196,31644,9648,-30497,-2732,-2629,10332,3475,-31017,4165,-26973,-6184,18066,-32606,-32029,31194,19859,-24739,-12155,3844,-9925,-4353,20301,-19906,-19253,-7377,-31922,-3291,26156,18116,27012,-31902,-7847,19444,7168,-8294,18225,9596,-4475,26835,-21981,-16919,-6323,28630,-29200,701,-3810,19368,-1208,25033,25504,22200,6609,-12215,-7536,-8549,-764,-7181,-7367,13966,-4143,-21338,-17876,-27992,22588,28741,16174,-13321,-12684,29845,-31392,-11320,6368,27685,-8656,-30247,-12622,-8740,15521,-23745,15483,-9099,-1833,-11325,23345,-12106,-24021,-26790,-17452,-21064,3959,-29159,24795,30182,17981,-23765,-1255,8696,-6694,3921,-25707,-12312,-24982,-25034,8728,-26512,-26015,7516,10425,-14353,-10478,-19252,10062,7151,23662,-25338,-20268,-16166,-27384,20261,3863,2799,-4035,-32748,-27156,-12811,-18653,-25034,-16089,9514,1103,-12958,-11474,-24557,-28494,-31948,29261,-19468,3050,-15956,22826,-5220,-3274,-5968,26511,-20025,26822,23310,30729,28249,-234,-11448,-15078,8692,20278,-21375,-2587,26199,-260,22954,-25100,15319,-13957,-22357,-21019,-2351,22555,-5137,9674,22093,-30545,-2087,12087,12130,-29383,28149,-9278,-7374,-19662,-31472,12025,18935,25371,12458,-17316,-20325,-2779,-11831,15437,-2726,17334,6038,-8700,-4897,-18522,-23038,11827,-7782,1309,6224,21537,-1309,-13365,25792,-2057,-596,28855,-29553,4481,-29869,2378,26929,5277,9034,-5095,29314,-17885,-27958,3754,-1150,-23372,25226,30354,17570,25692,21126,16436,-22762,-18777,29163,21147,-27638,10617,-17899,-23595,-16676,11232,-17447,9972,-23992,10436,-21280,-23499,-4799,-21844,-4217,29888,9297,-25833,-4460,-19354,-24825,23664,-18632,-14744,22892,24701,25538,10209,12418,11331,-15315,4348,-16888,-25433,-24201,27031,-4019,-22195,-18368,-14865,30969,10381,19588,-7802,-3283,-28429,25422,1886,-20061,193,20283,-2481,3380,-86,-13404,4876,12459,-5581,-1693,-18989,-7759,-28134,-16377,-24571,26396,28550,27336,31806,21368,-16713,2431,-1202,15101,25545,24419,7095,-12554,-29323,30268,2313,5996,-9569,10409,-10054,17861,-12962,-9267,32258,31927,9863,26353,18819,-23544,-10062,26914,26391,-27530,32389,-23335,-28118,14168,24782,-5237,-5305,-4363,23838,-3367,14177,-11965,-25421,-13883,3523,-28667,5148,-25232,-3456,-5825,-8980,30811,-31054,-9570,-903,7493,-58,1405,1376,3665,-18826,11166,15731,2641,-14329,19354,-32127,2586,4819,-19550,-32159,-21042,-5240,-32402,13856,12474,-28529,23916,22123,12081,-14400,28601,-6218,31170,-10134,-18339,-17184,12856,24749,15244,-835,19059,6033,629,32542,280,-6366,5245,-4985,-17275,22314,12777,-20491,-5405,-9612,-31964,16218,-10628,16704,30416,-24102,21153,23791,30908,-11088,294,-27608,11675,-25358,-29917,32340,-1591,-9467,-16582,-8298,32627,6587,9769,-13767,23638,8967,-14507,-28187,-11129,17704,-1770,15371,32671,-20812,28827,-2111,-287,9372,16264,28166,-16442,-27564,10945,20897,-4700,-3958,-3446,-12460,22680,-14461,9331,-18556,4035,-22963,-22633,-12187,25955,-15417,-14986,-20170,-4076,-4279,16039,17726,15604,-12232,7749,23010,-9113,-30804,7606,-11144,-10150,3271,14847,12122,16018,23890,-3965,2701,-895,21859,-18016,8445,-20507,-12459,-14032,30973,-14278,10265,-8560,1674,22915,-32332,-24200,20247,-9791,-21962,-29904,32436,32380,26804,6541,-29148,18810,8209,21319,-3482,16943,23401,27038,6114,-19097,-17613,18823,-5734,17989,-10608,-19773,28606,23104,23472,32396,-20435,-18413,32252,-9162,-2419,-19677,-21239,6508,29198,-9365,5815,1648,-13680,-22907,-1944,21656,8367,-28430,31669,-13890,24655,15302,-14026,16923,-1222,-21890,-29458,24074,20795,-17797,17121,707,26992,-29896,31930,-22324,10129,1016,23539,31411,-10422,22430,18045,-11690,4775,-31504,-15198,-26834,-27681,14593,-16091,-6585,11927,28871,19318,15232,2045,25615,-24242,-18541,16648,19475,-11359,25315,-32353,-723,-32562,-2377,30579,-417,-18588,-13737,20594,-17078,20606,17733,-16454,2265,30453,-2826,-9149,-3195,-67,8022,-3147,3331,247,-6589,4596,-5664,-24914,29572,24015,-31151,-17974,-6481,1119,-3633,24675,-7402,-29005,23316,23482,30869,-380,7794,-29382,-30087,14276,15513,-683,-18247,19745,-21571,23040,-31872,-21792,-30907,-18370,-30146,-6748,5065,8671,12036,6767,6902,-32193,-27852,18584,-30707,8301,1927,-14585,-18513,2929,3132,21407,-28637,9111,-20518,17683,28602,-1620,-16891,16471,6331,11524,6162,2122,8403,25885,-8422,24117,-27398,-15174,9412,-13799,24422,1344,-24696,-3680,-6783,-11688,-17119,28978,-29918,29015,30313,-27840,24932,-3761,-9518,18407,-16,4838,-888,21030,17905,21798,-14537,-6728,-21630,-22830,4657,-17811,30729,22110,-24065,-30346,-23212,-5267,22602,-28269,26819,-22630,-10225,-23665,-17883,27040,-26998,32552,-19558,-27086,-3413,-4371,15974,7714,26856,-18135,-7334,19636,-23837,6117,-13138,-3903,5652,-13118,-22689,2825,-3482,12931,7595,17367,-10265,-28008,24623,-25453,5958,-28640,-32343,27482,22371,5670,-24521,-30762,-10028,-6831,16275,21248,-29900,-27659,2525,-23604,9538,-4383,-1321,5483,-12610,-7763,-9233,-29202,-5456,25553,11770,-8749,16349,-3052,9306,-10111,14247,-10105,-20267,-27465,-5376,17234,-30657,-16977,-13025,-30809,10941,-19547,-32672,-25222,-26287,20506,-14911,12034,-31981,8527,-10919,16670,-10127,-13028,9106,25338,5910,-32748,-30770,29811,-29184,-10222,-16281,-25066,5418,6441,6151,9322,3089,-17261,-29254,29560,-24010,-11243,1638,-24030,-15071,20249,-7459,-4673,26063,6347,17179,30512,25602,32483,-29535,25392,10956,-20353,11899,-30264,-27364,32076,-15834,-2419,31726,-11208,-14328,23053,7410,32500,28714,15188,31240,-6160,29362,8008,-10703,-4409,4428,-3424,26905,19500,-18377,15843,-25226,13365,4911,12457,-21458,-11690,-17179,-21942,224,-431,8442,-22195,-27614,29904,7619,-3958,12412,-3812,-3766,-2785,32387,-20794,20082,23317,4718,-4929,22540,13179,6255,-7567,-9112,-22888,-6122,-14551,-3545,414,3583,-25361,-2446,31726,-15767,-29389,18844,27797,26797,-10158,12341,16534,20563,-25491,-32590,-19653,27844,-20363,24831,-8458,-26723,19662,-11182,13219,-21948,-8044,-7648,-10482,-5488,10516,3742,-148,-9581,2942,-4402,19719,31406,26366,-12733,23832,2214,-6527,9851,21400,27334,7773,4299,9132,-25857,-32337,-10359,11059,12469,2080,-846,5396,-3357,22073,21933,29884,-32017,-25079,27541,-6500,-3848,-6627,18050,16822,-8251,-20916,-5563,-7673,31073,25018,5479,-14642,-6039,-8363,11411,23015,-32214,-99,-14494,-29974,-23187,8113,-24648,-28077,-8591,16452,-665,18821,-24711,-15233,19176,17482,-23397,16272,23560,16514,2473,-22974,8355,-5446,-21368,-14533,30319,-27135,12786,23849,-16866,-29829,-10160,-1360,9478,6651,-4155,-14166,-3302,25278,-29975,-26243,1492,-14605,-29701,-3321,-17526,-3924,22035,22744,26664,-22090,-18809,27945,-13590,-17615,25750,-24354,8567,9909,-9529,-24239,-20237,-4312,-6479,-12894,-9980,-31695,-29320,-19599,4571,12215,-28113,7195,-22421,-27530,31507,-11515,6389,-10826,24750,16837,13221,-10308,17680,15580,11059,-12863,2299,-55,-4026,6576,-3071,19470,-29707,-1132,-29747,-25125,22961,-844,16647,29059,2490,21938,-18910,22640,30586,31198,-21827,4287,-29977,9487,-9591,25208,14234,1491,-14194,-14360,-11775,8017,-14908,25405,9657,-25670,-4647,-17638,4180,14596,-15571,-14554,-7462,-11983,-10088,-6676,2706,-11725,5049,25927,-11165,13256,31299,11614,389,-24906,3162,-10653,8875,-24720,7919,7622,-9448,-10294,29594,14362,-13075,12033,19242,9531,-32637,28968,3441,15429,-30971,-6280,-15616,-18610,15596,9568,-20963,16063,-23689,25690,8791,5365,29907,25440,4149,-5739,-11028,31455,-25271,-25156,24767,-15092,7158,5926,-18288,-7377,-6889,-12465,-10266,-29150,29511,23708,8419,25227,1630,1446,-12104,7788,-29945,-15744,-20594,-22833,10143,19364,11944,-31695,2193,4688,-8800,27942,3854,4049,-16296,-4338,32522,30057,17042,13922,18185,-31296,17094,473,27311,17421,24530,18894,-15758,-20976,28292,-14585,-15679,-27958,9173,-11939,-26155,-17976,30984,-10549,-32498,27836,11825,-22620,28858,-2611,2404,-7552,-9752,-4190,15283,7562,-1104,27603,1214,973,14429,413,3762,-27650,29886,25060,30414,27519,13465,224,21408,17810,23491,6610,-21736,22939,-20587,-4804,2093,-20718,8595,23840,16576,-1284,16759,-3890,26685,15100,-24229,10524,5474,11092,-25649,-24028,-4893,15664,-15357,30839,-10430,-8627,3895,-1288,-5217,-18573,-25388,15050,-2344,31462,10672,4713,-21273,-12124,-18353,-23606,-11182,28574,-2179,19500,16036,-10435,4254,-234,-7886,-26690,7921,-19051,2876,20355,28922,264,29989,2448,-16494,-12170,-16548,20627,-268,-12491,25424,-23326,-3652,24321,-13789,-31483,26013,31685,-20045,-17384,-14475,-27305,23255,-32048,25134,3826,7194,-24724,9110,13611,11981,-16348,30956,29118,-9938,-26853,26355,7158,-8812,23312,5588,-29458,-23976,5705,10435,9798,28843,24006,22681,21417,29547,-2087,29112,10961,11611,2106,27075,29964,7795,-4363,25602,28693,-12033,29985,27594,-22616,-4041,1231,-27475,-23067,29789,-25725,23220,30976,8518,-28553,29985,-7794,-8425,-9336,-26068,6511,31192,18507,11309,25750,18300,5034,18872,-32096,-1175,-17841,-5506,9595,5316,3320,-22590,-21093,29429,-19423,27166,3011,-127,-3601,10728,-29006,-19770,-20574,25338,-32612,17489,-23844,11959,-9238,-28158,-15475,11262,-3584,-15981,-25235,14871,10987,10800,22125,27253,-31000,27985,2493,26635,-16407,28053,-9089,16610,16465,-222,10520,-252,1191,-32428,11919,10911,-27546,-8566,-24501,-1527,490,-21646,-9410,22345,3130,11322,-24770,26724,-13465,-17513,22808,-18054,26412,484,21272,-14965,-23751,-25716,-15717,18615,-19072,29953,-29935,30558,-2452,21355,-23013,-4972,30239,19254,17636,30770,-12137,-1337,-27783,-25254,-17612,-29276,1448,18466,10028,-23634,10799,373,18937,21096,-19957,27791,14510,-26391,-15166,1579,32652,20504,5552,1599,293,3164,-30742,-29550,-19520,19346,13200,21651,10468,-18597,24880,-22135,32280,31732,-23985,-4084,-23442,-22790,-7850,-28346,-23427,13930,-18708,8375,10672,13194,21644,9930,-15758,-10560,-18624,5736,-14732,-24041,-20181,16995,9952,21469,30242,15908,-31969,6636,-10312,-10663,23586,19784,-28569,18953,20568,-26793,5718,-10351,2981,19325,-25180,-6327,23889,-9719,-7170,15615,28523,-4074,4702,-14361,8549,-10939,-10683,-10243,-10208,2176,-6345,11143,23707,16551,-1172,-29594,-15314,27965,-17784,29891,-4575,17411,7799,11803,29968,-17425,-4817,-11450,-28842,28675,9412,-29943,5330,13689,-23249,13569,14162,-12127,13598,-22622,-9865,13115,20502,500,-603,3550,8694,7762,10260,-32416,-24623,10149,-6682,32069,13632,22206,259,-29313,-6874,11045,-1526,-32493,-118,20513,15499,1235,-14601,-19629,28771,-15300,-7693,-1352,10121,-5024,1137,9868,10844,-368,13460,6070,-2048,-28424,32352,17081,-16963,23701,-20425,2891,15711,3062,-15246,18442,-2682,25571,-5622,-29800,19325,17442,-28566,-27968,30810,10847,-12644,2837,-31455,-1254,-17299,-3034,23131,-7012,8338,24010,1209,-12292,13437,-17118,-4180,-11310,-7698,14461,23495,19508,-31256,16552,1578,31954,-23435,1692,29282,19192,-28295,-16615,-21188,-29477,-26417,8616,-21305,25727,-23670,-27857,18070,-14088,-11394,-26740,25035,18152,-43,-12813,10821,22020,-4624,2414,15073,32522,-12047,-32283,-758,20064,13867,-2428,13748,22896,17883,6152,-30086,-32275,12777,23758,-3209,-19513,-11429,20109,-29752,31734,-17941,-29898,13275,11798,-2517,25330,-12240,25117,-21167,-10598,27486,12832,-17376,-24703,-27716,14676,10865,7449,-11818,28828,-14768,-6198,8531,-14030,30637,25484,-15908,24339,30549,-28602,-367,16811,3743,-15515,-29501,-5870,29301,7010,-16430,-7316,25356,17792,24782,-704,-18313,1477,27501,-1493,22253,-19573,3249,-5157,-51,-14039,4778,-14252,-2068,-17626,-22553,14203,30846,18430,-23935,23673,-12415,-22988,27998,-32292,31679,-10689,-1929,-3263,-28765,5067,-11794,6117,-13789,-150,4948,-3150,-4667,-9140,-11977,15522,30384,-28233,-9105,-23009,-1796,23840,10842,6743,-26960,643,16275,13652,-32427,8354,20460,5376,-18608,-6046,-4653,26343,5202,-30657,3946,5952,5811,17425,29657,24326,-25369,-3684,32448,-30976,12550,23276,-16500,-24724,14443,11647,-31388,-13774,-14988,-10662,7040,19265,3975,19286,25771,-4535,-31921,-720,-20393,-21066,26811,-7126,14412,31500,-24068,19185,-2277,-1975,-13398,-13001,11437,15606,30790,-10330,12431,-23626,2288,-18389,25880,-24003,29515,20975,1773,-11747,-20598,-3255,24789,26148,-14638,11296,27976,32017,6136,-23909,-22117,-20800,4390,-11788,-31593,12692,-7235,17112,-13086,28469,-31922,18804,-22529,14683,-18337,-6235,29542,11215,-3835,-11087,2956,14642,-18071,-3735,11471,13463,-1464,-8334,9695,13038,10066,-27006,30769,-8553,31148,-31754,-14595,-10775,15554,22728,22264,-27754,-25391,-9368,12161,-487,-6311,27245,14109,-8230,21726,20765,-7661,31082,-2914,-11269,5521,19439,12088,24585,-424,-24562,-29222,28414,-13308,12382,-22546,-27274,26231,9928,19832,-3292,-10053,-9711,-8943,-18237,550,-24220,-12170,22943,-18277,25129,3906,-31762,22098,-5714,23821,-5326,-14482,18371,-6908,-13938,-27344,-14705,-9508,-27942,-20289,-20079,-12480,-32243,-26129,1942,-3677,-5201,-6585,249,-19554,-31175,9872,9176,-18994,26003,-31521,2451,3649,-25970,31040,-10794,7768,-25209,-31322,-12039,-5175,-9837,-4016,31987,7889,-6256,-665,13534,25595,25027,-11148,-11726,-27851,-1034,12661,29829,-6896,17504,-6382,26609,20275,22927,-7049,-9971,-6577,18694,27490,15549,-19509,1429,32150,18559,15814,-23441,-11044,9136,22493,-667,-27621,-21141,17427,-29808,-20623,3088,-13212,12939,-6786,21241,19754,25771,-20076,31512,-9233,-20532,-6466,18213,26369,-2163,-16359,-29171,-14177,-20458,-8550,5419,25951,-17006,-3487,7091,-1027,20931,-6805,24421,-31509,17397,12924,-27596,-19552,-14119,8530,4362,22359,-16006,-25755,-27722,27056,29351,-11886,8313,-1963,-3808,-10448,27477,16398,-15463,15520,-19705,-21077,23193,-31330,-20966,18105,-8836,-4809,16121,13121,12263,10911,12405,23588,-15564,-32430,-30424,1937,29351,31010,2347,-14319,25711,-17612,-28381,26555,26320,-7023,20251,-9608,2188,101,-1050,24071,-21268,31475,-30009,-11918,-19193,-13879,-8831,-31533,-15852,-313,-12636,-2772,12068,7310,-31968,8885,26401,29860,-1365,-20080,27845,31284,2422,-10079,-5682,-6546,-12153,-6562,-3717,-5742,12107,4695,-7435,-10112,14054,26035,-6032,-12231,4649,805,-16139,-18629,-15867,2012,27622,-30660,-22756,-13514,26179,-32693,-11148,-11873,14889,26202,1425,951,-31212,16280,-30488,15479,8901,23975,-25515,-12157,26616,5409,-10348,-2887 diff --git a/tensorflow/lite/micro/integration_tests/seanet/sub/sub2_input1_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/sub/sub2_input1_int16.csv new file mode 100644 index 0000000..f54c655 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/sub/sub2_input1_int16.csv @@ -0,0 +1 @@ +-32566,-29020,-17250,9250,6288,-24922,17214,-14602,-6775,5753,-18997,-11879,13570,15001,-10735,31526,28829,16289,-23086,4151,-29845,2570,-24706,24366,-29224,4655,27948,-7778,10486,-3815,28603,26556 diff --git a/tensorflow/lite/micro/integration_tests/seanet/sub/sub3.tflite b/tensorflow/lite/micro/integration_tests/seanet/sub/sub3.tflite new file mode 100644 index 0000000..7b33d4e Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/sub/sub3.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/sub/sub3_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/sub/sub3_golden_int16.csv new file mode 100644 index 0000000..10270f6 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/sub/sub3_golden_int16.csv @@ -0,0 +1 @@ +-17286,32767,32767,-20935,27868,-8232,21461,32069,528,-32768,5114,-17058,-32768,32767,-22295,17558,-32768,-32768,-21075,32767,20540,18162,15798,32767,-17797,8780,27829,-32768,32767,30494,26232,23806,23288,9772,4705,-14998,32767,32767,-32768,-32768,32456,-6455,-14297,19732,32767,-20472,-12807,32767,-16157,8103,-17956,-13444,-32768,-2791,-32768,-15873,-10560,9529,-31489,25717,6209,27356,-19755,32767,30540,1821,23659,32767,-2074,-32768,14803,32767,-32768,21077,-330,-26377,-4712,29565,-13566,5852,-6300,-4121,-29669,20489,-32768,3289,32767,13695,28572,32767,-18952,30277,-27124,-22579,11299,15903,3368,29119,-22289,-32768,28281,-19482,-32768,-29494,-29042,-13139,9113,25967,29200,32767,1471,32767,32767,18563,-32768,32767,-10321,-3333,-14500,-32768,16390,-29179,19688,3153,-2705,-5492,-20356,-31739,15866,-32768,-16728,-30704,2686,27595,-32768,-26929,19198,32767,-10808,-32768,32767,-23780,32767,-14929,-3037,29856,7291,30698,11077,-12027,8119,18196,-32309,-32768,-13493,-14419,30658,-28122,-32768,-30573,-23081,7814,-32768,-5302,-7118,-7308,-19343,12685,-32323,-7187,-30930,-7632,-32768,32767,30079,-4529,-920,-6168,-28488,-22647,18278,32767,24966,32767,32767,-23466,-5811,-32314,11570,1297,31096,792,32767,-2157,-32768,-5469,25028,-17067,-21724,32767,19622,3539,17157,9840,9109,-21471,-15439,-11163,-19192,32767,6203,-5452,-4247,22353,30643,-9339,-27550,-29680,-29375,8755,6305,-32768,-29005,19229,32767,-20001,-29846,-11511,32767,26733,32767,-32768,-32768,-261,32767,11580,-12212,-28355,2253,14118,32767,-32768,12552,5499,-9534,22913,21722,32767,-32768,-7725,-16435,10659,99,2694,27316,23565,4625,-32768,201,-29829,26599,10316,13503,-25314,8015,-31715,22022,-12391,-3163,339,-30261,-32768,26549,-4587,-27485,-32768,-899,32767,-32768,-32768,-12809,-26940,-4623,6688,-22358,-32768,-11223,-32768,-20826,-32768,1069,25437,-19671,-21143,7141,-29990,24985,-1719,-32768,32767,-11780,12952,24728,-13200,3251,-32768,32767,-32768,32767,-15723,703,-9791,-21849,-32768,-7572,-29390,31203,-7963,499,-451,-15133,-31648,18341,-22798,8279,-19815,32767,-12290,-32768,28672,-29720,9431,-17420,32767,12745,6250,-32768,6469,-29241,1161,-14802,32767,-14020,-16952,-32768,-17699,-7704,-31039,-17193,1756,-363,-5908,-5609,-25136,-3272,-3581,-32768,25004,-22410,24140,2990,2165,32767,32767,32767,-17104,-9374,-10464,5518,-10900,8271,28338,-30320,-13799,32357,32767,-10950,-32768,9889,-5724,14600,5125,-16219,-8827,16418,-3453,32767,6546,31778,-9454,-2070,-24468,-694,-32768,25759,-17301,22050,769,20661,25086,15861,-22164,-10965,-32768,32363,32767,3486,-22268,-30574,-14713,-32768,28873,9790,32767,9103,13297,-20583,29453,13412,-20247,-16643,-21696,32767,18617,-13918,25869,-22618,3099,309,-23049,546,3289,19182,-15078,-1197,4328,32767,-32768,-9115,7782,32767,6486,22628,-3889,6321,32767,-28576,-24100,32767,-8385,-19473,10353,24035,-32603,-10053,2243,-2261,13093,19231,-15788,32767,21908,17129,19763,-32768,-11952,32767,-11144,-16642,-18243,-20909,-9981,-32768,20657,13820,-19470,-9959,-32768,32767,23374,-23693,28276,-16374,-134,19819,-19579,29153,-13461,13101,-6978,-24946,32767,27999,4217,-31033,-13968,-20728,-13919,-3307,-24292,-21556,32767,10678,30337,32767,32767,13310,16084,-27017,32767,-32146,-26504,-32768,-23915,-22948,-9449,-24178,-32768,-3329,-16186,9932,-32768,15172,-8180,-23532,-30320,-32768,-29883,-23855,-32768,-7484,32767,32767,-19693,32767,-13990,-8179,11317,10694,-15382,25770,31890,-22067,28640,29395,-1892,9152,27772,6966,-32021,-26193,9078,29851,-531,32767,27528,354,-32768,-1361,27531,32767,14669,32767,-7533,24469,-32768,14193,-4936,-28194,24409,32767,-25378,32767,32767,-14871,-32768,32767,-32768,17128,2464,-5916,-26485,-14343,-27619,-9206,2104,32767,-32768,-4467,6053,2360,-11078,-4576,199,31404,12732,-32768,7626,32767,-3622,-32768,-32768,-32768,-32768,32767,-28247,-32768,-8272,-919,32767,31378,-6060,27718,-1912,15157,17189,-10389,24603,-2999,6498,32767,29504,24796,23163,21294,-32768,4739,-17510,115,5042,-8476,11942,-3423,-2866,-23557,2621,24436,6088,32767,-32768,24879,-32768,-12354,-1536,30227,1599,32767,7219,13975,-32768,-22903,-18169,-28710,-29774,-1983,-20329,-32768,-16162,-32768,-32768,-10639,-17406,6154,14952,27091,10105,-7791,12458,5400,32459,-27993,32767,-32768,16466,12048,-24182,-536,-30057,5034,31997,-32768,32767,32767,22491,3134,-23386,32767,32426,-10294,-32768,10228,14609,-5172,-22661,27320,-9999,27498,-24755,-5395,-32768,25620,-26779,5889,27360,23111,-10036,-13303,9133,15471,-18510,-32261,24065,-185,-32768,-9408,12846,-2815,-32768,-20000,-10114,-32768,26085,32767,32767,455,32767,-32768,4360,-12032,-7423,29842,-27003,-1155,6534,18704,32767,10852,29151,4442,31514,24869,16424,-18057,-20623,-6977,32767,-14402,-30439,-29941,-12684,-32768,27773,27861,1970,11268,11598,-32768,4684,-31607,-32768,27198,-512,814,-32768,-32768,25729,-32768,-32768,19305,-12416,-2744,14905,15300,9019,32767,4448,32322,32767,-27152,-31636,-16178,-28332,-20798,32767,-28887,30250,-23298,13721,32767,-7761,7179,-29783,32767,-32768,26884,-15427,32767,-32768,-24730,-32768,-22593,28142,-24590,32767,22289,-32768,28151,26053,-10304,-27841,513,-29370,-29167,32767,6969,15574,-32768,-23857,-19037,-4009,32767,31463,3739,23330,-31822,-498,-30254,32767,-32768,32421,-28017,31922,16786,-32768,-19839,12195,-2982,28319,11089,3937,17630,-5275,-27948,-25596,-17003,11792,-29714,-32768,-14811,-21615,-32768,19163,-4950,1725,-16877,-10727,-32768,-10081,-26370,9458,5014,21446,23550,17932,-32768,32301,32692,-20373,-13486,-21577,32767,27551,-32768,4992,-19046,18289,5784,30804,32767,32767,10951,19114,7828,1014,-30020,5158,-9006,24748,-19217,-32768,-24968,-9912,-32768,21820,8732,-6810,-32768,-32768,-32336,-22374,17499,31626,24167,32767,19879,-32768,26903,12825,-27856,26602,-27158,-24308,32767,18289,22326,26306,32767,32767,9224,21699,-10417,2185,32107,6305,-18399,-19401,594,-10989,-4258,-20744,32767,13225,32767,-16427,-26605,-32174,31368,16283,32767,-29498,32767,-27130,29439,32767,-32768,25550,3882,-30926,11435,-32768,32767,-32768,32767,-32768,-29874,-5800,-28104,7740,-22283,-12447,765,-16919,4694,-19002,-31343,-6664,31951,-30022,32767,-25347,27457,12676,-32768,-14303,-23699,-21652,-8336,32750,32767,-30929,-10766,32767,-32768,-4287,-32768,-32768,23288,22331,-32768,-18852,-26605,7880,14507,32767,4957,25176,9302,6986,13666,-21978,-24370,-24231,-32768,-32768,-1846,25160,-29374,32095,22480,16771,18799,-5941,3570,32767,32767,-23817,13717,-31462,-15281,17346,28100,15630,19870,-30443,-32768,6207,19552,21804,22658,4495,23567,-32768,-11288,32767,-113,-2209,-3309,32767,4717,-30347,-12962,32211,-16079,-13773,-16479,27054,-23172,-28656,-27623,32767,16676,-24356,-31337,1178,11035,3139,563,32767,8914,9783,31919,-16633,11425,-16443,21209,32134,12737,12986,-23868,-7446,7062,22650,-18863,10448,-24899,-13576,21016,-11934,-4705,-6083,-939,14372,8757,32767,-21495,30594,20055,27948,-27759,32767,32767,15343,25197,-32768,-32768,32767,-6452,32767,27786,-20827,4405,27803,21971,-24189,32767,-9958,32767,-25170,14987,12663,32767,-29170,18762,-32768,-19660,-8581,-19090,-1322,15488,-32768,19103,-7425,-22126,-32768,-30024,16213,16861,11317,-32768,-30988,23462,-7663,-32768,-29562,-479,-28805,11940,31331,-32768,-27093,24401,20688,6307,4816,-32768,8318,-32015,10994,27473,-16479,-23321,-30740,32767,29075,-27764,-29648,27912,-32768,16007,-27019,-9808,-17976,-30650,5400,-25971,28537,-21428,32767,-32768,-32768,-18642,32767,-29390,-3091,-14617,1783,2237,32767,17511,5627,14146,5053,-32768,15595,32767,-32768,4112,-1749,23892,-32768,-615,-19945,32767,-17136,-32242,32767,-32768,-32768,18536,27424,22280,-20305,-2723,17686,-32768,28139,19556,32436,-32768,2165,-28867,-1903,3059,-8594,-7086,-27271,-21155,-32768,-32768,-30638,1033,25068,-27297,-32768,3734,-23449,-32768,17460,32767,18292,-12918,-15416,32767,-28459,32767,32767,10248,16245,27311,16362,23842,-32768,24694,-4507,21606,4512,-32768,30131,16004,-3740,29733,-12448,32767,32767,-30912,32767,26041,4071,32767,-31349,261,-6879,23394,-23454,-32768,3406,2816,-20628,-32768,-7556,18430,-1736,6373,32767,-23646,-28163,27508,-368,-16942,17106,4376,32767,25518,29341,-32768,-2914,-32768,-16376,16781,-32768,27987,3845,7888,-8822,8617,32767,31810,32731,14401,15491,11516,17381,20774,31421,-30802,-20612,-11837,-32768,5077,7418,-1420,-24123,-29894,-10672,-32768,29161,20662,-32768,-32768,-32768,-32768,32767,9957,11369,-32768,17399,25174,-20491,2378,-19288,-30314,28628,32767,-12315,7623,32767,25573,-11867,21348,-6702,32767,-3705,25056,-32768,14808,-4031,-15739,32767,32767,-11813,23500,-32768,14957,32767,21071,-32768,15526,27040,-16384,-16527,32767,-32768,-32768,-8486,-21975,-8379,-32768,15586,-16933,32767,-24769,32767,17052,-16656,11874,-32768,-16712,-12384,-62,-13448,10281,22824,-28479,-6397,-31510,-14462,-19003,32346,3444,24541,20602,32767,-10839,32767,3764,4689,-14261,-20860,-9511,32767,-2952,-30965,10242,13422,-3784,-20837,1962,-5651,-2370,-10489,24449,-32768,31527,-995,30954,-20672,-32768,32767,32767,-10490,13846,-14915,-32768,15191,30866,-4174,-27876,-32768,32767,-12851,-11196,-20239,-7557,14240,-25992,-30783,-24932,105,-32768,24710,31301,32767,15246,21235,-17986,-32768,-19938,-21697,29690,25934,-6882,-32768,-18560,-5922,9309,-10613,32767,-4102,-18729,19202,22127,-32768,-30806,-32768,25052,-12557,-32768,1742,32190,-26223,-12871,-20189,-32768,20421,-32768,-32768,17278,-15884,-12201,-32768,-2262,-13188,32767,29708,2295,27508,-3693,-32768,-28632,11036,9796,32164,23130,8211,9938,-28426,-5470,3355,-32768,28862,-32768,-29255,5898,4555,-4452,-29360,26325,30471,18022,-23050,-10733,-5416,8571,1389,22220,-12051,26107,32767,21172,-1318,-24106,12671,-32768,-32768,18009,30448,6326,32767,-25243,-32768,-989,-30747,18735,-32768,7175,5554,32767,3256,1438,5825,-7887,-32768,-32768,-21684,3312,32767,-23883,32140,25666,-32768,25725,-32768,-27363,2276,-28788,24021,16653,-15542,-32768,-32768,-11434,-5537,-32768,-8564,-13374,32767,-15994,17031,32767,-32565,-2839,-9642,31549,15027,1140,-32768,32767,19058,-32768,-32768,31995,32767,6615,-7063,32162,32767,14343,22673,-32768,-4758,-32768,19434,-32768,32767,8330,5639,22668,-17430,17403,-32768,-25322,27265,-5348,13555,31313,-2168,24011,-23472,15604,-27369,-2526,30369,22495,32767,-30402,-9962,-32768,23998,32679,21297,-25591,32767,28056,32767,-32768,-16541,16607,31020,32767,-30496,-13875,29551,15497,-7883,-20166,-27455,30064,25946,-16273,-32768,30864,28803,-15808,-5265,29711,4485,32767,32767,27409,-15455,-4150,14968,32767,-32768,-19193,-29987,11169,16657,-29761,1826,18602,4562,-32768,32767,11745,16148,24007,1101,4879,11703,3697,-28417,174,10947,32767,27914,23660,-32768,-31317,32767,-32768,-938,-28416,10839,32767,-31649,-10835,-17043,-27689,-17448,21001,6020,28157,-3150,-25931,-22591,11138,-16968,-6919,32767,25087,-1259,-32768,20208,-529,24885,-26453,3015,-16640,-21461,-26788,-32768,23389,32767,-17966,12468,-32768,32767,-967,32767,-23873,-16209,16354,-24344,-32768,19594,-32768,29289,15467,-18738,-32768,27519,32767,-30283,31602,-32768,32767,18272,-27020,-15932,-29699,-21053,28805,-32768,-19808,23626,-30339,1032,-8479,-32768,-18974,-4437,18326,-13701,28750,16315,7168,-11352,9778,-32768,-4833,-22451,-32768,2244,-16837,26842,-5484,24996,-13029,-27300,6672,30036,6693,-32768,32767,24116,-19549,10972,772,20803,-32768,32767,-24056,31187,-16211,-6101,-32768,-7727,32767,20144,30811,-32768,-192,32767,20422,20372,-32768,12189,-22917,-8249,-12822,19071,-32768,-11939,32767,32767,-3158,-21067,32767,-32768,-13851,-11966,-32768,32767,7899,-32768,-32768,26262,-402,7203,29356,2952,-3403,-20134,-32768,-14859,-13238,-7673,-32768,-32768,2732,32767,26377,31775,32767,32767,-22238,-509,-32768,1786,-14004,-32768,21454,-22539,-3136,7811,32767,-7849,8799,-32768,24058,-29760,32767,5844,-23256,11507,32767,5824,8233,17496,-9002,18514,2996,-30061,21300,-32768,21815,32767,10359,-9153,-28671,9506,2143,-32768,-197,-32768,21955,-32768,8072,12170,-1600,-394,-10694,-28893,-32768,2706,12771,32767,-9309,16512,-882,-32768,-32768,-28085,32767,9246,28155,-12309,-32768,32767,-15222,7309,-26225,-32768,32767,-32096,-27133,-5651,-4015,2611,-19275,-32278,23935,-4755,-32768,-8010,-32768,-8961,4909,-26629,32767,30551,32767,18932,-11478,-32768,32767,32767,-32768,2494,-7230,-17420,-2026,23222,-7762,-32768,-5287,-23131,-27346,7037,-9346,-32768,-10389,18562,29613,-13739,-5989,-23055,-15990,-18578,4312,20167,32767,-29925,-27804,32767,-32768,-23977,7257,-16272,-18338,4417,32767,29596,10676,31942,21291,2893,432,-8025,-19851,-6066,-1337,-27738,4385,2932,-32768,-29059,-11543,-8947,-10246,-15432,11538,29197,16476,-32768,-15947,-32768,28613,30224,-32768,-32768,-32768,-32294,29538,-25851,31420,19771,32767,-10,21757,-26176,2711,-29567,19101,14519,26942,-5535,-32768,-9555,-27141,32767,11737,7375,189,973,17898,-26291,6340,14735,-32768,13159,16590,12891,-13432,-24033,31349,32767,-20646,26530,-6950,23646,-18196,13721,29025,32767,32767,19673,15759,14269,30596,-4734,-13698,13706,9205,32767,-9590,-31200,-14698,4513,21971,11153,32221,-5430,-32768,-32768,-14404,-32768,30193,30464,32767,-14058,32767,29633,31832,-1856,30958,-30363,22846,-32177,-1391,-32768,-25485,8519,-8425,-27407,-24486,32767,953,-23267,18338,23658,-32768,-24778,-22009,-16052,11660,18244,-31185,-16026,-7945,-18432,-31209,20620,-7248,4751,13201,26870,15389,32767,-11656,-32768,-32768,26806,24836,-26809,13235,32704,-31543,31636,32767,-23603,-24821,27640,24229,-28408,-29989,-9550,-17300,-17329,-19617,-20978,18673,-24011,14573,-8009,19344,1398,-5815,20984,32352,-32768,7082,-15117,-32768,-27145,26677,19275,4993,26977,5860,11827,-6170,32767,-8319,-32768,16328,10022,-17009,26334,-4215,-32768,-1479,9781,-24867,10557,-10585,24794,13421,-17264,-10435,32767,15515,-32768,9860,22380,-27708,30577,21586,-32768,-22864,-12271,-16586,11841,-32768,-21681,-23660,2617,-12067,40,-12871,-24548,-15589,10250,32767,28569,32767,-32768,-28940,32767,32767,-31921,-4554,-19138,23410,32767,13408,-13669,-7687,32767,24044,2598,31609,-17775,-1894,-18957,32767,-2970,-406,32767,2207,32767,-32768,32767,-32768,-24441,-4911,-17831,-32553,23612,4986,-28086,21174,-32768,30643,25681,6386,-27403,21460,32767,24179,-30246,26283,22741,-12417,-27165,-3636,32767,-13802,-27287,-3359,-32768,-31430,32767,-32220,-32768,30141,20832,-28555,-7337,32767,-563,9824,-14203,32767,7140,32767,-22687,-32768,-20468,27683,-32768,9039,-32768,-21997,32767,32767,-6860,27090,26320,32767,-24730,30059,-32768,-22898,-32129,-31722,23330,5835,32767,-32655,-9123,29649,-31765,-13722,14036,33,16293,-19044,-32768,23311,-25516,-32768,30660,32767,15973,-32768,32767,20677,-31294,-1678,-13158,-9410,-9370,1867,-21622,-14605,4606,24321,-32768,32767,25028,10227,-32768,-19030,4056,23280,5443,-11161,9536,30737,-32768,-10343,10063,-13484,6213,-32768,-11687,1021,32767,25137,-25940,-32768,32767,32767,21138,-839,21147,1849,-23871,-12404,-22675,671,-29642,6509,-25140,28541,22326,-23560,7775,-15485,89,-32768,-32768,32767,-28509,7463,13060,15853,-32768,9930,3201,-18702,-29796,32767,-7115,-13364,-6903,-22252,-2778,9614,-3806,-7166,20049,-1279,-20749,29502,-28326,5917,32767,-7589,15412,3394,-17911,-7241,25590,18949,-11570,32767,11394,23582,-5976,-5030,-28528,19951,-25883,-32174,-32768,-18798,4403,31199,12899,-32768,-16838,-27341,-32768,6145,15376,-30330,-10429,32228,-8616,25613,-14431,-6071,1406,-32768,7785,-32768,-14470,-18863,23465,32767,-20004,-25146,-1061,5410,32767,-32560,-32768,-11958,32445,913,-32768,23799,32767,-19996,-32768,32767,-27662,-24347,23753,-32768,32767,-5365,-24249,-5170,19765,-18505,16188,-27825,18866,27899,-29446,-32768,-32768,-12756,32767,-29686,18372,-32768,-32768,26500,-11499,-4316,21948,-3338,-27771,-29062,9049,-17128,-9928,1505,-26664,21911,31768,-23233,21834,-1327,-11434,-32600,-16847,-553,20611,-28708,5197,32767,-14631,32767,-24408,-30180,-3671,25017,-195,-10589,17757,-32768,27383,15386,32767,-9772,9781,21008,21935,-32644,-28238,32767,-9901,14992,19128,-5045,1365,26336,31374,32767,-23272,14583,-27330,-32768,3404,-28120,21866,25482,-32768,-32768,-31349,-2921,-9345,-21220,-32768,-32600,4287,24674,14817,17048,32767,-13606,-10066,-21478,-32768,-17416,5424,6164,-14666,-29295,-13361,-2422,32767,7098,19884,10940,32767,-6701,83,-6982,32767,-1479,-7239,25613,7829,16633,-15001,-19296,16991,3008,-32768,-26534,32767,-30633,15310,-26836,-254,6130,-8117,-28775,-21585,-30404,-32768,31627,-10727,-32768,-16222,12935,24918,4832,-25692,-24928,-17851,17982,-17752,-21706,25517,-4671,30799,13694,32767,-32768,16532,21921,-14728,27911,6738,14203,32767,-2025,17516,10114,-32768,-15983,-22566,-22099,-17089,-16006,32767,32234,-17513,32767,6068,28073,15116,-32768,-21027,3537,0 diff --git a/tensorflow/lite/micro/integration_tests/seanet/sub/sub3_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/sub/sub3_input0_int16.csv new file mode 100644 index 0000000..0ee6276 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/sub/sub3_input0_int16.csv @@ -0,0 +1 @@ +-13480,29305,27213,-16314,21709,-6436,16716,25016,393,-29083,4006,-13293,-30593,27827,-17395,13690,-26768,-29355,-16432,28239,15996,14141,12301,26287,-13893,6836,21715,-31280,31720,23767,20437,18561,18152,7621,3666,-11686,31711,30713,-30368,-27552,25284,-5041,-11127,15389,27291,-15966,-9998,30944,-12600,6320,-14000,-10474,-27387,-2194,-27636,-12360,-8251,7420,-24530,20055,4858,21321,-15415,27808,23806,1423,18443,31093,-1634,-26699,11526,32678,-27659,16423,-238,-20558,-3656,23043,-10590,4564,-4915,-3210,-23132,15980,-26347,2546,30041,10692,22256,25844,-14756,23610,-21128,-17609,8795,12400,2622,22704,-17378,-26543,22031,-15207,-26289,-22979,-22660,-10252,7124,20250,22782,27206,1133,29334,28351,14475,-27880,32340,-8063,-2617,-11319,-29204,12759,-22757,15368,2464,-2091,-4288,-15883,-24742,12366,-30822,-13043,-23930,2077,21495,-26083,-20979,14948,27268,-8407,-29584,25782,-18545,32181,-11637,-2371,23279,5682,23939,8619,-9395,6315,14201,-25207,-25854,-10500,-11235,23919,-21930,-31722,-23833,-17998,6095,-28811,-4127,-5566,-5716,-15095,9904,-25218,-5612,-24094,-5944,-26790,28795,23436,-3529,-721,-4806,-22211,-17649,14233,31376,19449,28804,30940,-18303,-4511,-25186,9038,1005,24229,619,32334,-1679,-29074,-4257,19495,-13324,-16951,29462,15279,2750,13395,7677,7119,-16745,-12050,-8701,-14966,27847,4834,-4244,-3328,17408,23875,-7266,-21497,-23147,-22882,6831,4933,-30321,-22626,14993,31714,-15590,-23270,-8967,30739,20823,28530,-31864,-31040,-212,28439,9034,-9503,-22112,1743,11008,30515,-29415,9784,4294,-7450,17845,16920,29859,-26301,-6031,-12794,8316,95,2094,21282,18373,3602,-31697,155,-23248,20720,8024,10512,-19720,6230,-24734,17188,-9654,-2448,258,-23605,-26584,20694,-3573,-21429,-27829,-718,31752,-28891,-25611,-10005,-21011,-3585,5220,-17413,-31747,-8763,-28710,-16240,-27127,832,19838,-15353,-16502,5552,-23365,19460,-1349,-28788,26856,-9166,10091,19264,-10289,2531,-27376,29176,-29772,31370,-12276,533,-7618,-17052,-28992,-5884,-22907,24344,-6214,375,-350,-11801,-24670,14297,-17767,6437,-15466,25687,-9566,-27878,22344,-23151,7358,-13563,31856,9922,4874,-25780,5046,-22798,912,-11557,28650,-10945,-13201,-30557,-13807,-5987,-24192,-13386,1363,-297,-4604,-4376,-19593,-2553,-2785,-28332,19475,-17486,18835,2312,1679,31137,30941,26490,-13341,-7322,-8156,4298,-8495,6446,22099,-23655,-10776,25211,27359,-8555,-31962,7729,-4457,11400,3989,-12658,-6880,12796,-2689,28538,5110,24757,-7389,-1629,-19060,-560,-30410,20101,-13482,17208,593,16094,19559,12362,-17276,-8550,-31854,25213,28025,2703,-17345,-23854,-11479,-30639,22515,7650,29707,7083,10368,-16050,22965,10454,-15778,-12992,-16933,26792,14529,-10869,20159,-17614,2422,259,-17975,412,2566,14951,-11752,-935,3381,27245,-28279,-7121,6082,30914,5048,17660,-3026,4946,28699,-22292,-18787,32452,-6534,-15183,8078,18721,-25436,-7852,1764,-1781,10199,15012,-12303,26865,17073,13340,15409,-31797,-9315,30493,-8681,-12991,-14241,-16316,-7766,-29668,16096,10793,-15173,-7746,-26837,26527,18224,-18475,22047,-12767,-98,15434,-15282,22713,-10479,10195,-5449,-19429,29190,21846,3281,-24207,-10888,-16163,-10848,-2580,-18931,-16822,27439,8310,23666,26002,26064,10396,12545,-21045,29432,-25075,-20661,-26664,-18641,-17892,-7360,-18866,-26898,-2610,-12604,7724,-30773,11847,-6371,-18328,-23644,-29807,-23295,-18601,-30964,-5836,29649,29154,-15371,29932,-10892,-6395,8814,8356,-11986,20108,24855,-17217,22330,22913,-1472,7133,21658,5414,-24982,-20435,7092,23253,-423,31945,21467,294,-28110,-1075,21465,28566,11439,27507,-5866,19059,-32233,11050,-3833,-21999,19021,30266,-19779,31737,31762,-11607,-29063,30077,-25575,13351,1928,-4629,-20666,-11197,-21517,-7196,1632,25790,-31279,-3465,4713,1826,-8635,-3571,158,24481,9933,-31866,5927,26597,-2809,-31985,-28578,-29198,-26954,29250,-22028,-28341,-6447,-720,25923,24461,-4718,21592,-1509,11802,13416,-8118,19172,-2319,5072,25841,22995,19317,18060,16597,-32017,3693,-13644,73,3912,-6623,9325,-2687,-2243,-18346,2049,19068,4740,30140,-31716,19392,-30230,-9633,-1191,23548,1228,32186,5643,10876,-26047,-17836,-14159,-22365,-23218,-1560,-15847,-25890,-12597,-26249,-32490,-8311,-13588,4783,11672,21102,7869,-6055,9718,4228,25299,-21837,29562,-25704,12840,9391,-18846,-435,-23451,3910,24960,-27046,32657,29388,17540,2461,-18238,28855,25281,-8029,-31609,7972,11396,-4049,-17685,21284,-7780,21419,-19308,-4187,-30376,19991,-20883,4577,21332,18014,-7821,-10373,7127,12044,-14449,-25166,18776,-163,-29658,-7315,10021,-2177,-30869,-15606,-7883,-28254,20339,25987,29718,338,32084,-29471,3414,-9399,-5796,23284,-21046,-883,5088,14568,25950,8457,22729,3461,24575,19371,12786,-14092,-16063,-5458,32241,-11209,-23725,-23324,-9895,-26629,21654,21717,1539,8783,9049,-29383,3633,-24656,-26124,21185,-408,654,-28880,-26543,20052,-29613,-32135,15047,-9677,-2141,11627,11911,7013,29946,3483,25180,29176,-21149,-24658,-12595,-22094,-16228,32457,-22524,23586,-18165,10704,28375,-6069,5582,-23204,29163,-28015,20978,-12021,30387,-32242,-19293,-31374,-17617,21943,-19172,32646,17360,-30564,21932,20326,-8052,-21714,419,-22891,-22721,27623,5419,12143,-29643,-18596,-14843,-3119,30272,24510,2900,18203,-24827,-397,-23567,28995,-30703,25269,-21856,24888,13083,-28992,-15468,9514,-2342,22059,8630,3084,13726,-4121,-21769,-19949,-13238,9187,-23179,-27153,-11550,-16848,-29536,14946,-3876,1326,-13172,-8348,-32597,-7868,-20539,7379,3927,16713,18346,13982,-32692,25185,25485,-15876,-10531,-16840,30160,21494,-27023,3883,-14829,14264,4527,24009,29046,28597,8534,14904,6101,797,-23421,4003,-7036,19309,-15000,-26928,-19446,-7722,-31501,17005,6794,-5307,-29352,-27768,-25211,-17436,13625,24637,18826,30851,15479,-28547,20993,10004,-21699,20733,-21186,-18949,27579,14261,17404,20515,30627,26938,7176,16932,-8140,1695,25050,4921,-14326,-15131,449,-8565,-3323,-16169,30467,10317,27654,-12825,-20756,-25068,24436,12686,27096,-22991,28997,-21157,22937,27630,-26664,19922,3025,-24103,8898,-31991,28605,-28131,29887,-30938,-23271,-4516,-21892,6028,-17386,-9702,593,-13187,3658,-14807,-24452,-5214,24894,-23390,32151,-19769,21425,9888,-27239,-11157,-18490,-16878,-6502,25535,27136,-24106,-8410,31267,-28832,-3327,-25657,-30951,18175,17415,-32448,-14703,-20755,6145,11306,26262,3863,19634,7235,5428,10639,-17119,-19018,-18899,-26851,-30310,-1421,19609,-22914,25023,17522,13078,14654,-4625,2766,26226,32427,-18553,10675,-24537,-11894,13529,21925,12179,15477,-23732,-28609,4842,15241,17005,17647,3486,18358,-29116,-8819,28293,-69,-1716,-2562,25944,3664,-23657,-10109,25115,-12537,-10731,-12864,21073,-18080,-22325,-21554,27208,13020,-18982,-24413,912,8589,2449,435,30828,6948,7634,24867,-12986,8892,-12804,16516,25043,9949,10130,-18590,-5811,5492,17660,-14709,8148,-19413,-10577,16367,-9322,-3683,-4727,-751,11196,6846,29095,-16740,23845,15621,21790,-21645,31244,27224,11968,19627,-31237,-28050,32365,-5049,26784,21681,-16231,3452,21669,17115,-18856,30242,-7760,29463,-19616,11667,9854,28659,-22726,14608,-27312,-15308,-6684,-14865,-1037,12061,-31947,14889,-5786,-17251,-26572,-23424,12621,13130,8838,-31689,-24167,18310,-5968,-26099,-23053,-387,-22455,9305,24429,-29130,-21115,19006,16110,4902,3770,-29275,6476,-24940,8577,21436,-12853,-18195,-23963,30922,22670,-21647,-23107,21743,-30797,12464,-21049,-7665,-14023,-23876,4216,-20229,22241,-16719,28420,-31839,-26227,-14535,29657,-22930,-2428,-11410,1405,1725,27765,13671,4393,11046,3933,-25592,12160,25674,-30761,3204,-1357,18609,-28196,-494,-15534,26144,-13368,-25117,30067,-30573,-31011,14437,21382,17366,-15827,-2125,13795,-29622,21919,15231,25302,-28096,1679,-22486,-1478,2403,-6706,-5538,-21259,-16496,-27837,-32586,-23879,788,19525,-21296,-27326,2892,-18290,-32320,13618,31259,14254,-10085,-12017,30193,-22184,25854,26250,7972,12646,21277,12771,18569,-27800,19271,-3508,16862,3511,-30243,23492,12473,-2913,23178,-9698,27063,29763,-24114,32566,20283,3165,29172,-24434,221,-5369,18224,-18283,-26497,2658,2194,-16075,-30311,-5909,14353,-1338,4950,32759,-18415,-21950,21463,-293,-13222,13338,3408,28522,19892,22881,-27375,-2290,-30196,-12752,13064,-27065,21838,3003,6167,-6884,6704,25748,24796,25520,11225,12084,8961,13532,16181,24511,-24032,-16078,-9209,-28875,3976,5777,-1121,-18805,-23309,-8317,-29751,22741,16091,-28284,-31883,-30632,-25965,28511,7782,8869,-27150,13558,19612,-15973,1850,-15034,-23635,22325,31636,-9619,5928,31065,19918,-9260,16662,-5219,26841,-2895,19520,-29327,11541,-3140,-12272,27542,31631,-9228,18306,-30267,11642,28440,16446,-26625,12122,21074,-12787,-12883,26759,-28395,-28360,-6609,-17149,-6551,-27137,12166,-13220,26438,-19291,27918,13312,-12991,9243,-32683,-13032,-9652,-50,-10477,7998,17775,-22217,-4972,-24584,-11283,-14796,25223,2703,19126,16048,26039,-8454,27164,2933,3662,-11135,-16281,-7430,28361,-2320,-24149,8004,10470,-2932,-16251,1516,-4404,-1851,-8174,19059,-31803,24562,-794,24117,-16101,-26472,28644,26163,-8172,10812,-11634,-31602,11845,24060,-3251,-21734,-30436,29909,-10037,-8743,-15763,-5910,11093,-20244,-23993,-19419,76,-30447,19266,24399,28051,11884,16562,-14039,-32501,-15559,-16900,23128,20210,-5346,-32105,-14452,-4623,7244,-8272,29711,-3195,-14603,14977,17233,-28818,-24031,-32100,19512,-9798,-27884,1364,25113,-20450,-10048,-15738,-27029,15923,-30192,-28725,13453,-12402,-9527,-30682,-1782,-10290,30995,23166,1807,21439,-2893,-32077,-22325,8607,7635,25082,18015,6383,7733,-22146,-4283,2607,-30588,22507,-30198,-22814,4584,3553,-3474,-22886,20521,23762,14033,-17988,-8382,-4207,6663,1074,17342,-9389,20371,29632,16492,-1026,-18797,9881,-32388,-31403,14023,23719,4917,32270,-19698,-31244,-752,-23965,14624,-27331,5580,4332,31075,2541,1119,4548,-6166,-28260,-28947,-16890,2563,29377,-18600,25062,20027,-32420,20042,-29700,-21336,1777,-22445,18734,12966,-12135,-26829,-26248,-8933,-4325,-26942,-6671,-10409,31283,-12483,13279,31495,-25385,-2215,-7510,24579,11697,874,-32262,28038,14849,-27133,-32143,24961,28075,5143,-5505,25070,27453,11180,17683,-26776,-3728,-26210,15166,-26763,31856,6513,4402,17690,-13595,13554,-29318,-19745,21259,-4171,10574,24395,-1709,18704,-18284,12146,-21346,-1950,23682,17555,27798,-23715,-7765,-25744,18712,25475,16610,-19968,32127,21858,32542,-26070,-12904,12966,24189,30173,-23781,-10831,23040,12078,-6143,-15723,-21397,23421,20209,-12701,-28564,24043,22446,-12305,-4099,23181,3490,28313,29944,21365,-12046,-3237,11676,31272,-31594,-14978,-23363,8689,12977,-23183,1429,14520,3550,-29029,28034,9153,12592,18714,865,3787,9105,2867,-22139,117,8526,27638,21768,18463,-32669,-24429,32279,-29691,-728,-22155,8457,27485,-24692,-8462,-13272,-21605,-13611,16392,4699,21969,-2462,-20230,-17610,8680,-13225,-5396,26696,19541,-1000,-26873,15769,-431,19392,-20604,2356,-12955,-16737,-20898,-30830,18231,32662,-14008,9727,-32568,25612,-769,31531,-18630,-12645,12769,-18973,-32023,15269,-28849,22836,12055,-14605,-28879,21461,29263,-23627,24622,-31927,30406,14236,-21046,-12415,-23136,-16419,22443,-25950,-15446,18422,-23654,811,-6627,-29230,-14807,-3444,14268,-10690,22433,12725,5606,-8856,7609,-26509,-3771,-17500,-26008,1756,-13143,20908,-4290,19502,-10176,-21292,5221,23422,5236,-32516,30747,18803,-15244,8557,600,16225,-28927,26794,-18769,24329,-12657,-4765,-25548,-6018,32466,15698,24007,-31536,-153,27017,15919,15889,-30602,9484,-17881,-6416,-10015,14859,-31612,-9302,25819,31984,-2476,-16422,30458,-26495,-10800,-9322,-28534,28481,6143,-29994,-27124,20465,-294,5621,22904,2295,-2667,-15695,-32524,-11581,-10322,-5975,-31161,-31909,2115,27390,20545,24763,29285,28157,-17319,-403,-29750,1394,-10921,-31734,16724,-17565,-2462,6071,27826,-6104,6841,-28990,18775,-23195,25734,4550,-18144,8973,27430,4543,6417,13647,-7035,14415,2321,-23421,16587,-30676,17026,31753,8094,-7142,-22366,7413,1667,-25772,-155,-30983,17099,-25819,6278,9503,-1266,-316,-8318,-22519,-26158,2103,9943,26609,-7261,12876,-689,-26759,-31731,-21914,31001,7223,21931,-9605,-26219,28696,-11849,5692,-20459,-28388,31098,-25019,-21155,-4399,-3147,2017,-15042,-25149,18641,-3716,-32390,-6239,-30244,-6992,3813,-20758,26896,23821,31638,14766,-8965,-26073,32729,32132,-28496,1936,-5617,-13575,-1562,18098,-6065,-30982,-4125,-18030,-21321,5493,-7303,-30689,-8114,14486,23068,-10720,-4650,-17968,-12448,-14490,3348,15724,27905,-23327,-21678,27769,-31599,-18711,5643,-12671,-14315,3435,26988,23079,8341,24896,16585,2257,333,-6253,-15478,-4722,-1059,-21643,3404,2301,-32588,-22663,-8980,-6969,-7970,-12037,8981,22764,12841,-26510,-12434,-32370,22290,23544,-31059,-28815,-28168,-25185,23047,-20148,24513,15407,29678,-6,16958,-20404,2112,-23044,14874,11301,20989,-4300,-28018,-7458,-21140,26911,9168,5743,134,760,13950,-20494,4941,11494,-28083,10240,12919,10065,-10490,-18745,24459,31696,-16078,20677,-5432,18436,-14189,10700,22626,29576,27249,15319,12271,11139,23834,-3699,-10660,10691,7194,25845,-7490,-24322,-11462,3521,17127,8702,25103,-4252,-30999,-28577,-11248,-31504,23558,23756,25678,-10966,27316,23104,24813,-1444,24133,-23664,17794,-25104,-1099,-28902,-19887,6633,-6549,-21361,-19072,27244,729,-18137,14293,18447,-31337,-19310,-17175,-12533,9075,14238,-24331,-12503,-6175,-14364,-24313,16069,-5664,3706,10288,20951,11996,26824,-9104,-30254,-26991,20913,19344,-20909,10337,25502,-24573,24657,32630,-18399,-19354,21551,18887,-22140,-23397,-7464,-13502,-13495,-15312,-16363,14577,-18713,11379,-6250,15067,1092,-4537,16362,25220,-27907,5504,-11804,-29320,-21147,20779,15018,3912,21037,4586,9214,-4824,29039,-6489,-28869,12728,7820,-13277,20512,-3301,-26805,-1172,7617,-19367,8236,-8234,19323,10449,-13457,-8139,31039,12094,-25586,7670,17429,-21616,23853,16810,-26530,-17806,-9561,-12913,9225,-31949,-16901,-18449,2043,-9409,38,-10051,-19156,-12168,8006,27472,22264,30266,-32671,-22544,28113,26564,-24884,-3554,-14917,18249,28793,10436,-10675,-6008,30608,18726,2017,24662,-13852,-1459,-14785,29664,-2314,-320,27571,1719,28934,-31563,28490,-26978,-19039,-3847,-13910,-25359,18414,3905,-21902,16494,-31356,23886,20024,4977,-21357,16713,32348,18835,-23565,20472,17720,-9661,-21172,-2817,30645,-10774,-21271,-2622,-31535,-24505,26172,-25136,-29183,23483,16256,-22280,-5729,28492,-433,7677,-11079,32705,5568,31927,-17684,-27669,-15950,21565,-27927,7032,-29991,-17168,26442,32498,-5342,21137,20513,26263,-19278,23431,-31579,-17853,-25041,-24748,18170,4534,32656,-25477,-7121,23134,-24758,-10680,10936,12,12704,-14850,-31595,18172,-19886,-31323,23884,28831,12468,-28446,30155,16139,-24391,-1290,-10264,-7350,-7303,1452,-16854,-11388,3598,18944,-26068,26985,19527,7954,-32208,-14817,3168,18167,4237,-8715,7436,23959,-27015,-8065,7852,-10529,4825,-27522,-9096,777,28135,19616,-20217,-28718,32749,30486,16481,-658,16489,1440,-18603,-9687,-17696,508,-23094,5056,-19608,22270,17411,-18350,6055,-12086,71,-29368,-25613,26682,-22219,5801,10163,12344,-30778,7723,2487,-14561,-23223,31541,-5553,-10432,-5380,-17351,-2163,7493,-2960,-5604,15612,-1012,-16161,22981,-22092,4632,30282,-5899,12009,2632,-13962,-5649,19953,14771,-9013,27732,8864,18370,-4644,-3940,-22249,15573,-20173,-25065,-32706,-14669,3434,24319,10059,-28031,-13120,-21332,-29427,4776,12002,-23664,-8139,25144,-6711,19986,-11257,-4747,1098,-27092,6072,-27306,-11274,-14723,18275,27641,-15580,-19623,-836,4237,26325,-25366,-27502,-9336,25296,708,-26570,18552,30921,-15606,-27079,28712,-21550,-19000,18509,-27047,26781,-4165,-18911,-4044,15411,-14430,12623,-21694,14715,21733,-22975,-28890,-32533,-9963,31943,-23124,14329,-31550,-26098,20646,-8963,-3368,17114,-2604,-21644,-22674,7036,-13368,-7725,1155,-20796,17101,24772,-18095,17016,-1048,-8912,-25419,-13131,-433,16075,-22398,4033,29088,-11391,30466,-19037,-23509,-2856,19521,-158,-8269,13845,-28749,21351,11993,29842,-7635,7607,16363,17116,-25468,-22023,31974,-7713,11706,14906,-3947,1066,20528,24462,27414,-18136,11352,-21325,-32140,2669,-21941,17038,19885,-28248,-27139,-24446,-2291,-7284,-16547,-29013,-25417,3349,19219,11533,13276,28991,-10626,-7856,-16725,-27487,-13560,4222,4792,-11432,-22842,-10413,-1890,29388,5517,15483,8514,29895,-5243,56,-5424,30254,-1135,-5650,19954,6105,12964,-11692,-15045,13253,2328,-32675,-20701,27286,-23900,11927,-20902,-192,4797,-6334,-22447,-16826,-23707,-27531,24655,-8356,-32428,-12665,10069,19441,3748,-20038,-19415,-13911,14037,-13846,-16936,19895,-3645,24014,10674,27258,-26990,12870,17075,-11467,21741,5244,11092,26949,-1561,13649,7871,-28407,-12464,-17590,-17230,-13316,-12495,27636,25115,-13638,29394,4722,21905,11790,-27159,-16399,2744,2 diff --git a/tensorflow/lite/micro/integration_tests/seanet/sub/sub3_input1_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/sub/sub3_input1_int16.csv new file mode 100644 index 0000000..6571dd6 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/sub/sub3_input1_int16.csv @@ -0,0 +1 @@ +-5939,4971,-2926,11410,-28551,-30987,-24917,25288,-31322,-14515,32174,9730,29889,-10472,-23038,2924 diff --git a/tensorflow/lite/micro/integration_tests/seanet/sub/sub4.tflite b/tensorflow/lite/micro/integration_tests/seanet/sub/sub4.tflite new file mode 100644 index 0000000..079263a Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/sub/sub4.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/sub/sub4_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/sub/sub4_golden_int16.csv new file mode 100644 index 0000000..46eecff --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/sub/sub4_golden_int16.csv @@ -0,0 +1 @@ +-32768,-5391,32767,32767,-32768,32767,32767,-32768,32767,-31816,-32768,-2897,32767,28601,32767,-32768,32767,-32768,32767,-26188,-32768,-32768,-32768,-12867,-32768,-20112,32767,-32768,-32768,-22166,32767,-32768,-32768,-32768,14803,32767,-32768,-32768,-32457,32767,32767,-32768,-32768,-32768,4390,-32768,32767,-32768,32767,32767,5867,-32768,-7423,-2858,32767,-13316,-32768,-32768,-32768,-26800,32767,-32768,32767,-32768,-14108,-12339,32767,-32768,-32768,-11281,-22190,32767,-32768,-32768,-2981,-32768,19258,-8413,-32768,32767,32767,32767,32767,17761,32767,32767,32767,32767,32767,32767,-32768,-23034,32767,-32768,-25552,23734,-32768,-32768,-32768,-32768,-32768,-32768,7051,-16675,-32768,-17448,-9160,32767,-32768,32767,-32768,32767,6712,28914,-32768,32767,-32768,-18274,2174,32767,-32768,32767,32767,32767,32767,-32768,-32768,32767,32767,32767,1001,-32768,-32768,32767,32767,-32768,87,-9094,27245,32767,32767,-32768,-32768,7830,-28166,-26935,-32768,-32768,15239,-32768,-27131,-32768,-32768,9000,-32768,-32768,32767,32767,-14390,32767,-32768,32767,-32768,21126,-32768,-32768,29243,-23321,32767,32767,8157,-32768,-3386,-32768,-14204,10153,32767,-32768,-27432,5388,27916,32767,5707,-32768,32767,-32768,-28330,32767,-27719,-32768,-1565,32767,-29380,-32768,-32768,-22106,-32768,-32768,28143,-32768,32767,32767,32767,-32768,32767,32767,-32768,32767,32767,18864,9675,-25106,31519,-28869,-7310,32767,8996,26067,-32768,32767,-8715,32767,26679,-32768,-32768,-32768,32767,-32768,32767,7545,21087,26151,32767,-32768,32767,-32768,-17317,-17960,32767,-32768,-14096,4710,32767,30056,-22757,32767,-32768,18354,494,-32768,3490,32767,-32768,-26706,-32768,10497,32767,13772,-14638,-32768,32767,-32768,32767,-32768,32767,32767,32767,-32768,-29837,-32768,32767,-13295,-7625,32767,-32768,-4506,-32768,-19033,-32768,32767,-32768,-32768,-32768,-32768,-14762,408,-32768,-32768,32767,32767,-32768,-32768,-32768,32767,32767,-32768,32767,32767,21536,-32768,32767,-32768,32767,32767,-32768,32767,-32768,1222,32767,32767,-32768,29106,-32768,32767,-32768,-32768,32767,-32768,-32768,-31633,-19712,-2370,-32768,-32768,-32768,32767,14788,-27858,-32768,-32768,32767,32767,27907,-32768,32767,-32768,32767,32767,32767,32767,32767,-8374,-32768,32767,-16064,-30825,3068,-30743,-24972,-32768,-23064,2621,32767,-32768,-32768,32767,32767,32767,32228,32767,32767,-32768,32767,-32506,-32768,32767,-32768,-32768,-32768,17110,-15144,-32768,32767,-32768,32767,32767,-32768,12841,32767,18761,-32768,-5126,-32768,-32768,-13214,32767,-32768,32767,-32768,1746,-32768,-32768,32767,-231,-32768,-22416,32767,-31770,32767,7641,11322,27673,-32768,-32768,32767,32767,-32768,32767,13497,-32768,-32768,-32768,-20784,32767,32767,-32768,-32768,32767,7476,-11763,-32768,-32768,25721,-32768,32767,32767,-32768,-32768,32767,31213,12257,-25436,-32768,32767,-32768,-32768,32767,-32768,32767,32767,32767,32767,32767,23678,32767,-32768,32767,-20576,9092,-32768,-32768,32767,32767,32767,8883,32767,2892,32767,32767,-13824,-32768,32767,-32768,-20239,-32768,10745,32767,-32768,32767,-25302,-32768,32767,-21845,32767,27402,12808,32767,-32768,32767,10569,-32768,-7425,32767,-32768,-24884,32767,32767,-27,23198,32767,-32768,1191,-32768,-32768,22440,-15786,32767,875,-32768,32767,32767,-32768,-32768,-9472,32767,32767,-32596,-32768,9048,28914,32767,-32768,-32768,32767,-28532,-15656,24801,32767,-32768,-32768,-30058,32767,32767,27462,-24713,32767,32767,25379,-18859,22982,32767,32767,1212,-32768,32767,32767,-31311,-32768,-32768,-32768,32767,32767,32767,7869,-26849,32767,-32768,-32768,-32768,32767,-32768,32767,-32768,-32768,-32768,32767,32767,26721,32767,32767,-32768,32767,-32768,32767,32767,32767,32767,-3386,-19700,-14297,-27116,15366,25729,32767,-32768,-14593,-32768,32767,16143,-32768,-32768,32767,-32768,-9893,32767,-32768,27468,1547,-32768,27254,32767,32767,32767,-18171,32767,-31520,32767,32767,32767,4004,32767,32767,17342,-32671,-32768,32767,32767,-18338,-32768,18505,3381,-3198,32767,20159,32767,-32768,-32768,-32768,32767,29420,-32768,-32768,-32768,32767,32767,-32768,-22757,32767,32767,-3279,-11863,678,-32242,20394,-24887,-19205,-28247,32767,32767,-32768,32767,-9835,10284,-32768,-32768,-21093,-32768,32767,14684,32767,7608,-32768,26844,32767,32767,-32768,-32768,-32768,-32768,-32768,8831,32767,-32768,-32768,-32768,-23389,4505,-32768,-32768,-32768,32767,22812,-1012,245,-32768,-24423,32767,-32768,10118,14916,32767,32767,-32768,-9838,-15726,32767,32767,-14120,-32768,-26824,32767,32767,32767,-32768,-11778,-7480,-32768,-9636,-32768,32767,-32768,-32768,31424,-32768,32767,32767,-32768,32767,3932,-8964,32767,32767,32767,-32768,32767,-32768,-32052,32767,32767,-32768,-32768,-32768,32767,-32768,31516,-32768,-32768,-32768,-17416,-14351,21801,18397,32767,-26631,13149,-32768,32767,-19175,20593,-863,31432,4783,-32768,29941,-32768,-32768,32767,32767,-32768,32767,24799,32767,-32768,32767,32767,-32768,30622,32767,27161,14985,32767,32767,-32768,-32768,32767,32767,-1547,-32768,-20646,-7546,32767,-32768,-32768,-29637,22118,-5591,-26789,-32768,19807,6605,32767,-32768,32767,32767,-32768,-32768,-32768,32767,-10369,-21039,-30074,32767,32767,32767,32767,32767,-32768,32767,26628,32767,32767,-1617,-32768,-32768,-32768,-32768,32767,32767,-22110,-29680,-8163,32767,-32768,-32768,-18479,-32768,32767,-32768,32767,-18810,32767,16502,-32768,-32768,-32768,32767,-32768,28833,-32768,32767,-1267,32767,-32768,32767,13405,-15674,32767,32767,-32768,32767,18459,-32768,-32768,32767,-31714,-32768,-32768,-32768,-27476,32767,1459,26483,-32768,-4765,-32768,32767,-32768,-26598,2119,-32768,-32768,32767,-32768,32767,-32768,-32768,-12209,-13788,32767,32767,32767,32767,32767,22253,11073,32767,-32768,-32768,-32768,-32768,32767,7009,-32768,32767,-32768,32767,32767,28470,27518,32767,32767,32767,-32768,-32768,-32768,-6808,32767,32767,32767,32767,27277,32767,17144,32767,-11439,-963,32767,-32768,32767,-32768,-32768,32767,-16024,-32768,8621,-32768,19583,9516,-32768,-32768,32767,-32768,32767,-17792,-22965,-32768,-7133,32767,-32768,6554,-32768,9377,-32768,-32768,32767,32767,-32768,32767,32767,-32768,32767,3207,-6639,32767,-32768,-5780,32767,16273,-32768,869,28318,32767,-32768,32767,32767,-9160,-32768,-29333,32767,19483,-32768,-32768,-32768,32767,-32768,32767,-32768,32767,-32768,7361,-32768,-32768,-8525,32767,-8934,17176,8300,1999,-32768,-22440,27714,-7841,393,-32768,-32768,32767,-24697,-32768,-32768,-32768,32767,-32768,32767,-25474,-32768,32767,32767,31466,-32768,14555,32767,32767,26070,-21374,25593,-32768,8301,-32768,14823,-32768,32767,-32768,32767,22301,-32768,32767,20529,32767,-12261,-32768,-17919,-32768,32767,-18302,32767,-32768,32767,-32768,32767,-32768,32767,-32768,-32768,31692,-27758,6382,22945,16791,32767,-30627,10469,32767,-32768,32767,-32768,32767,32767,-25701,26798,20605,16595,32767,32767,-32768,-32768,-29262,12581,-32768,-32768,32767,-32768,19120,-32768,22062,-32768,32767,-32768,-32768,32767,-32768,32767,-23478,-23315,-20045,32767,-32768,32767,664,32767,-20742,32767,32767,15270,-32768,-18799,25697,498,-32768,-5890,-3572,32767,-32768,-20106,-31996,32767,-32768,-32768,-32768,32767,32767,21445,32767,-32768,32767,10081,-32768,-32768,29918,32767,22759,31430,29654,-32768,32767,32767,-32768,-32768,-24359,-28529,32767,31679,-32768,18963,32767,16242,-32768,-32768,32767,32767,-5166,-32768,-32768,32767,-32768,24341,-32768,1305,32767,24175,31114,32767,21205,-32768,32767,23665,-18277,-32768,-8573,32767,-32768,-32768,20423,-32768,-5310,32767,32767,31272,25019,-921,-32768,3052,25712,32767,32767,-32768,32767,28104,-20540,32767,-3409,-32768,32767,-32768,32767,32767,-9544,32767,3851,-32768,-32768,-32768,8190,-32768,-32768,5302,1905,-32768,29017,32767,32767,-2397,-28750,32767,-32768,-32768,13592,-32768,32767,-32768,-32768,-32768,32767,32767,-32768,-4335,-32768,-32768,32767,-32768,32767,32767,32767,32767,32767,30559,32767,32767,-18057,3535,-32768,-32768,32767,32767,8026,-32664,-32768,-32768,32767,32767,32767,-32768,32767,32767,-2150,-32768,-32768,14483,32767,32767,32767,-19718,32767,21867,32767,29805,-30626,32767,32767,-18106,-24091,32767,-32768,-32768,-10025,32767,-12542,32767,32767,-30322,13083,-1735,-32768,-32768,-16476,-12400,-15520,-32768,32767,-32768,-32768,32767,-3485,32767,-32768,32767,19976,-20444,-5690,-4099,-32768,-32768,32767,13456,32767,32767,-32768,13875,1603,-21026,32767,29776,32767,-32768,32767,18213,-32768,-32768,32767,32767,32767,-32768,21482,32767,32767,-32768,-28213,-32768,-32768,32767,10317,-21838,9056,32767,-19530,32767,32767,-32768,32767,24591,-32768,32767,32767,-3331,-32768,-30669,-32768,-32768,32767,-32768,32767,32767,9470,-31561,-32768,23136,-19670,-32768,19855,-32768,32767,-32768,32767,32767,-32768,7191,32767,18285,26742,32767,-32768,-32768,32767,-32768,-29424,32767,-22274,30079,31162,-32768,32767,-23360,-32768,24934,-10642,29471,32767,-32768,10382,14621,-32768,-4440,-32768,32767,32767,-32768,32767,3685,-12363,-32768,32767,-32768,32767,-23938,32767,32767,32767,32767,-32768,32767,32767,30957,-32768,32767,-26245,32767,-24037,32767,32767,-19780,-32768,-32768,32767,-27384,32508,-32768,-32768,-32768,-28876,-7333,-32768,-16225,32767,10807,-65,32767,32767,-32768,-32768,116,-3990,32767,19873,32767,-32768,-2525,-31527,-11435,19171,27344,32767,-17927,32767,4610,-32768,-32768,-32768,28323,-32768,-22338,18480,-32768,32767,-32768,-32768,32767,-32768,32767,-32768,-32768,11910,-32768,32767,-32768,32767,5560,-32768,-32768,-32768,-32768,-18424,32767,-32768,-24480,6011,-32768,-32768,-10134,32767,-25059,-3331,-32768,32767,6050,7367,-32768,32767,32767,32767,-32768,-15431,9324,32767,-32768,32767,18068,-25817,25688,32767,19149,32767,32767,24778,2935,-32768,29203,32767,-32768,-18937,32767,-32768,-32768,32767,32767,32767,-32768,-32768,-1038,32767,-30539,-32768,32767,32767,-32768,-32768,32767,32767,-32768,32767,29556,-32768,5387,-32768,-5624,-32768,-32768,6132,-1610,32767,32767,32767,32767,32767,32767,32767,-32768,-32768,32767,-32768,-22898,8000,-32768,-32768,9039,8240,32767,32767,-32768,-32768,32767,32767,32767,1437,32767,-32768,22190,-32768,31411,-32768,-32768,20026,-32031,-631,-32768,9154,-32768,-32768,32767,32767,-32768,22452,26694,32767,32767,-18265,-32768,3260,32767,-5319,-28716,-32768,32767,14703,-2011,32767,32767,32767,32767,19195,27326,-14294,-32768,-16166,32767,-32768,-32768,32767,32767,-32768,32767,32767,23499,-32768,32767,32767,32767,-32768,-32768,9526,-32768,4816,-32768,32767,-19419,32767,27203,-32768,-27876,-32768,32767,-32768,-29479,-32768,-32768,-32768,20053,-29746,-1782,32767,32767,-3291,-32768,-1530,464,32767,32767,-32768,32767,32767,-32768,27024,-8210,11208,3544,32767,-32768,1893,32767,-32768,-32768,32767,32767,-32768,-32768,-32768,32767,5188,-32768,32767,11338,-32768,32767,-25375,-16644,3619,32767,7178,-11565,-9037,-29578,-32768,-32768,22617,-32768,-32768,19876,32767,-32768,26103,-30647,2552,32767,28739,-12559,-25103,32767,-32768,-32768,-31627,32767,11266,-32768,32767,22505,-32768,32767,32767,-32768,-32768,-13713,32767,32767,32767,32767,7888,6044,-12489,32767,32767,-30127,8699,13408,-1849,32767,32767,-13571,32767,-32768,-32768,-32768,-32768,3944,14501,-16894,-11432,32767,-32768,-32768,32767,-1419,32767,32767,-29984,-19772,-7039,-32768,32767,32767,32767,32767,-32768,-31012,-32768,-32768,-32768,15537,-32768,32767,32767,32767,-32768,-32768,-7390,-32768,-11257,-7232,6910,28437,25814,-32768,-13352,32767,32767,-32768,32767,32767,-31614,-32768,32767,32767,32767,26332,32767,-9809,-32768,-32768,32767,27437,32767,32767,32767,32767,-32768,-32768,32767,4396,-18093,-32768,-32768,-32768,-6313,32767,-32768,32767,-32768,32767,5105,32767,32767,1722,-32768,32767,32767,-32768,32767,32767,-17171,32767,-32768,32767,791,32767,-32768,-32768,32767,-32768,-32768,16953,-11694,31330,32767,-32768,32767,32767,30793,19571,32767,-22323,32767,-32768,16156,11549,32767,-8878,-32768,-32768,32767,-32040,32767,-390,-32768,-32768,-32768,32767,32767,23283,32767,32767,-19679,-17904,-32768,-32768,-18596,-32768,32767,-32768,-32768,32767,32767,-32768,32767,16649,32767,-32768,32767,-2910,23214,12429,-32768,32767,-31752,30264,-32768,-32768,-32768,-32768,-19301,-3820,32767,32767,-32768,-32768,32767,32767,32767,32767,-30086,4560,-32768,-32768,-32768,-32768,-32768,32767,-32768,24422,-32768,31332,32767,32767,32767,-32768,-32768,-29731,25220,32767,32767,-11640,32767,-24113,32767,32767,9207,32767,32767,-13502,-32768,-32768,-32249,-3051,32767,582,29938,-32768,-31743,32767,2830,32767,-32768,-32768,14390,-32768,32767,898,-32768,721,32767,7113,32767,-32768,-1038,13103,-32768,-30147,32767,32767,32767,32767,-32768,32767,32767,32475,32767,-11849,-32768,-32768,-32768,32767,-32768,-13474,32767,-32768,32767,269,32767,32767,32767,32767,32767,-32768,-28276,-32768,32767,32767,-32768,-32768,32767,32767,32767,-32768,32767,1950,32767,-3972,32767,-4483,32767,-32768,-2327,32767,-9190,-32768,-32768,16715,32767,-25751,-13857,32767,-32768,-32768,-32768,28564,-2629,32767,32767,10629,25411,8151,-22803,31926,-32768,32767,31936,3456,-32768,32767,11430,32767,-32768,13965,-21642,-32768,32767,-32768,-32768,12620,32767,32767,32767,-32768,32767,32767,-28840,-32768,31686,-32768,6506,-32768,32767,29014,32767,-32768,-32768,3764,32767,32767,32767,32767,-32768,32767,-32768,32767,16026,-837,-32768,32767,-13623,-32768,32767,6027,32767,-32768,-32768,-32768,15474,32767,32767,-11703,-32768,-22562,-32768,2701,-32768,32767,32767,32767,-32768,32767,-32768,-32768,32767,32767,-32768,32767,-32768,32767,-32768,-32768,-32768,29546,-32768,-32768,9805,-32768,-32768,-14508,-32768,-32768,-32768,32767,-32768,-32768,32767,-7630,-32768,-4552,8726,32767,-32768,32767,32767,-32768,18709,-7863,32767,32767,-23901,-32768,32767,-17560,32767,28678,-8648,-32768,-32768,-32768,32767,-2936,32767,-32768,-19160,32767,-16993,16902,-32768,-5958,32767,-32768,-11845,30167,-22856,2450,27498,-32768,32767,32767,32767,32767,-32768,32767,32767,-32768,-32768,-32768,32767,-25775,824,-32768,3246,32767,-32768,10876,-32768,-7509,32767,25033,-14523,32767,-12695,-32768,5671,-32768,-32768,32767,16372,32767,-3359,32767,32767,32767,32767,32767,31752,32767,-32768,32767,4186,32767,32767,32767,32767,29692,32767,32767,16408,7584,-32768,32767,7711,-29412,32767,-32768,5210,-32768,32767,32767,32767,-24748,32767,1359,-32768,32767,-23127,32767,32767,18436,28073,-32768,31993,29026,32767,-32768,32767,32767,11991,-32768,32767,32767,32767,32767,32767,32767,3950,-32768,-32768,32767,-32768,32767,-32768,20578,-32768,32767,-15909,32767,-32768,-32768,23076,-32768,-32768,-32768,-6973,-20886,-32768,-32768,32767,-32768,-11342,32767,-28164,-32608,-32768,27190,1107,32767,-32768,32767,26799,-32768,-32768,-25638,32767,-32768,32767,-17681,32767,32767,32767,32767,26703,32767,32767,820,-18036,-32768,-32768,-32768,-31490,18876,32767,32767,32767,-32768,32767,32767,-31213,32767,32767,12123,3794,12537,32767,-32768,32767,-28583,16133,32767,28634,-32768,-32768,-7426,-25709,-32768,32767,17564,-32768,-32768,-32768,32767,-32768,-32768,-4160,32767,13486,8955,-32768,-1057,32767,6548,32767,-32768,24413,32767,29755,-28961,32767,-32768,-32768,32767,2571,-32768,32767,-18166,-32768,-4087,32767,-32768,32767,-32768,-18864,28766,18780,-32768,-32768,-32768,-26146,-32768,32767,-32768,32767,-32768,-32768,-16886,32767,-32768,32767,-21381,-2611,-32768,32767,-21941,32767,32767,32767,-32768,-32768,-32768,32767,30992,32767,32767,32767,32767,6458,-32768,30829,16751,-4696,32767,-32768,32767,32767,-15493,32767,32767,-32768,-22263,-32768,32767,-32768,-20398,277,-32768,32767,-32768,32767,-32768,-32768,19334,-32768,9909,32767,32767,-28237,136,32767,10269,-32768,12925,32767,32767,14535,32767,-32768,-32768,32767,-27666,-32768,8259,32767,-32768,20506,-32768,32767,32767,-32768,-10965,-8359,-32768,32767,32767,-32768,-32768,32767,-6829,32767,-32768,-32768,32767,-28129,27325,-32768,17746,-6849,30474,-32768,32767,-13695,-32768,5713,-32768,30131,-2346,2927,12260,-26126,32767,-3608,15917,32767,-32768,-26016,32767,11087,32767,-32768,32767,-32768,17809,32767,-21130,32767,1895,-32768,32767,-24992,-27237,25965,32767,32767,-6116,32767,-32768,-32768,-32768,-32768,32767,-32768,11695,27501,-27182,28414,15606,32767,31650,-32768,-32768,-4033,-12456,-32768,-31964,15363,32767,32767,-4284,-32768,-32768,-20283,31125,-32768,-32768,-32768,32767,32767,32767,-4632,32767,-32768,27049,32767,-11554,32767,-32768,25353,-32768,-32768,-32768,6520,-32768,32767,-32768,-32768,-32768,19873,-32768,32767,-2027,-32768,-25173,-20840,-24773,-32768,-32768,32767,-19805,-32768,8825,-32768,-32768,-32768,3327,-32768,-32768,32767,-32768,-32768,32767,-12281,32767,27895,11240,9467,26407,32767,15841,4728,-32768,-32768,-32768,32767,32767,32767,-19835,13429,-3849,-18992,7330,-17424,17267,32767,6758,-11513,-32768,9325 diff --git a/tensorflow/lite/micro/integration_tests/seanet/sub/sub4_input0_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/sub/sub4_input0_int16.csv new file mode 100644 index 0000000..563e99d --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/sub/sub4_input0_int16.csv @@ -0,0 +1 @@ +-12072,-1790,11291,23374,-21315,16826,32605,-24704,18694,-10561,-26290,-962,21391,9494,17448,-19624,27411,-22829,22956,-8693,-22847,-12907,-18936,-4271,-31649,-6676,14710,-22691,-23520,-7357,28637,-28571,-14983,-22921,4913,11353,-23148,-13592,-10774,24535,31052,-11080,-18332,-11872,1457,-23048,25993,-11634,25715,31784,1947,-13326,-2464,-948,14306,-4420,-18277,-19426,-12751,-8896,20569,-22710,31663,-27874,-4683,-4096,18791,-31290,-23709,-3744,-7366,29521,-16411,-11256,-990,-21106,6392,-2792,-18287,14119,29672,31658,18799,5895,22617,15883,20399,31988,20934,32260,-15653,-7646,25264,-26221,-8482,7878,-25221,-18271,-14363,-25119,-17726,-15000,2340,-5535,-27889,-5792,-3041,11969,-24980,32344,-11777,17964,2228,9597,-21687,22121,-16611,-6065,721,22217,-21685,28864,31763,18340,14634,-23698,-11940,12387,31589,22746,332,-16744,-20646,14556,27653,-14377,29,-3019,9043,23348,16869,-13348,-19595,2599,-9349,-8941,-14359,-25318,5058,-18965,-9006,-12838,-28666,2987,-22918,-19595,24350,29227,-4777,18207,-29224,22925,-25237,7012,-29225,-19478,9706,-7741,27688,17701,2707,-32053,-1124,-21311,-4715,3370,25440,-12939,-9106,1788,9266,24635,1894,-25860,18483,-12622,-9404,31340,-9201,-30491,-520,11776,-9752,-29607,-27904,-7338,-20018,-12822,9341,-24756,13030,16101,12705,-29675,24413,17656,-22982,16272,19959,6261,3211,-8334,10462,-9582,-2427,20014,2986,8652,-30252,20865,-2893,30688,8855,-10911,-26592,-13061,30283,-21000,18254,2505,6999,8680,11720,-29956,26602,-22520,-5748,-5961,13844,-23083,-4679,1563,21584,9976,-7554,25080,-32653,6092,164,-18756,1158,19428,-26402,-8864,-18600,3484,31839,4571,-4859,-12351,16586,-18837,22679,-32224,11577,15548,16907,-29339,-9904,-15930,11867,-4413,-2531,15772,-19627,-1496,-13724,-6317,-21416,20118,-17298,-31497,-26313,-24527,-4900,136,-23559,-30826,27084,11402,-15700,-15631,-31976,21762,24490,-27500,15800,28203,7148,-14643,22184,-31539,16889,11154,-25724,23961,-17444,405,32182,12227,-17387,9661,-16897,14269,-29431,-19577,31611,-16917,-13396,-10500,-6543,-787,-25688,-30783,-16923,21925,4908,-9247,-19067,-21673,21954,18428,9263,-18910,18224,-22219,18775,25838,26440,15634,16345,-2779,-32266,25415,-5332,-10232,1018,-10205,-8289,-19439,-7656,870,24903,-16976,-26458,29975,12406,28532,10697,18735,19342,-29419,17339,-10790,-20804,22901,-28561,-19255,-17665,5679,-5027,-31398,25358,-24255,22791,12083,-20418,4262,22014,6227,-30086,-1701,-16160,-29187,-4386,29557,-25115,18000,-21522,580,-31437,-23386,17313,-77,-31987,-7441,29347,-10545,13883,2536,3758,9185,-28793,-28946,13372,27444,-17617,11420,4480,-14226,-31895,-19099,-6899,18861,24749,-23569,-17805,19348,2481,-3905,-16234,-11235,8537,-18290,30467,30069,-24437,-19533,20798,10361,4068,-8443,-26996,15729,-14257,-32657,17086,-20000,15383,30685,16511,32530,22281,7859,18399,-28399,18178,-6830,3018,-20049,-18546,31290,13558,18667,2948,29254,960,14106,12197,-4589,-27177,30062,-11715,-6718,-17710,3566,11162,-27831,26147,-8398,-11688,21338,-7251,19725,9095,4251,28480,-32180,16043,3508,-31077,-2465,20889,-16479,-8260,29538,14799,-9,7700,26346,-30166,395,-26445,-23870,7448,-5240,13388,290,-13836,18659,17092,-19787,-13491,-3144,28808,20378,-10820,-21682,3003,9598,22706,-21906,-17543,20887,-9471,-5197,8232,27321,-17073,-25353,-9977,26464,30444,9115,-8203,24017,22836,8424,-6260,7628,22110,23618,402,-18562,14257,23655,-10393,-20667,-26128,-16609,11045,19519,15817,2612,-8912,14717,-31941,-21695,-11831,23887,-21322,29886,-27116,-16543,-18979,30053,23538,8870,31622,12852,-21478,32619,-19684,12265,24554,22060,11810,-1124,-6539,-4746,-9001,5100,8540,15651,-25245,-4844,-25196,17486,5358,-23717,-21173,14588,-29267,-3284,12089,-29487,9117,513,-31877,9047,21478,26289,18756,-6032,20268,-10463,16996,13699,25596,1329,18379,12318,5756,-10845,-28088,16273,13309,-6087,-24057,6142,1122,-1062,17992,6692,18390,-32285,-32441,-19548,23361,9765,-19060,-31476,-31089,17305,19100,-31201,-7554,32466,21822,-1088,-3938,225,-10702,6769,-8261,-6375,-9376,20307,20784,-28804,32281,-3265,3413,-16249,-18081,-7001,-20670,28708,4874,16164,2525,-16151,8910,20865,24771,-32318,-18081,-16389,-17115,-17006,2931,31050,-13227,-14973,-12625,-7764,1495,-30530,-21871,-24011,26369,7572,-336,81,-29279,-8107,14332,-31958,3358,4951,29302,29370,-12297,-3266,-5220,27621,22073,-4687,-22666,-8904,13580,26592,22410,-21883,-3910,-2483,-29877,-3199,-27508,17530,-25930,-25050,10430,-16773,26814,13529,-26878,23066,1305,-2975,21079,30932,22319,-10944,15024,-21109,-10639,11589,12162,-31571,-29354,-15239,16183,-12250,10461,-32034,-31128,-19452,-5781,-4764,7236,6106,24719,-8839,4364,-11744,22318,-6365,6835,-287,10433,1588,-32365,9938,-31946,-23238,27628,17629,-16753,16871,8231,26273,-29565,29024,20073,-19297,10164,19216,9015,4974,27663,24922,-22023,-23316,32391,23008,-514,-21397,-6853,-2505,26303,-19144,-30829,-9837,7341,-1856,-8892,-15389,6574,2192,28351,-23280,12181,32310,-11900,-15441,-27580,32582,-3442,-6983,-9983,11086,23589,19397,26369,32361,-12531,28484,8838,18497,22441,-537,-22450,-27681,-20198,-32329,20265,22160,-7339,-9852,-2710,28289,-24706,-12436,-6134,-20643,14551,-17530,12860,-6244,18559,5478,-23170,-16882,-29609,23911,-32543,9570,-25084,25351,-421,13488,-17815,17317,4449,-5203,30483,14317,-18447,23718,6127,-30339,-31896,30047,-10527,-11413,-11941,-21578,-9120,27851,484,8790,-28271,-1581,-30549,18891,-23622,-8829,703,-22855,-12982,29034,-26821,26558,-14492,-23203,-4053,-4577,11369,12359,14933,13934,19509,7386,3675,13081,-12789,-19306,-31982,-18836,24774,2326,-17212,30049,-18040,23860,20050,9450,9134,17878,28866,26591,-24779,-27975,-11043,-2260,16089,19744,18898,15050,9054,32582,5690,22635,-3797,-320,29195,-30460,14852,-32182,-32256,22387,-5319,-12726,2861,-15947,6500,3159,-17202,-17247,29511,-17104,31479,-5906,-7623,-11853,-2368,31884,-23011,2175,-12883,3112,-26248,-24289,28598,21002,-25233,27834,18323,-11636,28790,1065,-2204,11713,-26533,-1919,19972,5401,-26820,289,9399,28408,-25194,30128,17466,-3041,-18375,-9736,22701,6467,-11564,-12250,-18111,21788,-19808,30098,-20808,27960,-24416,2443,-30142,-22945,-2830,22270,-2966,5701,2755,663,-17815,-7449,9199,-2602,130,-11391,-20854,14114,-8198,-28861,-18410,-27916,29199,-25405,22153,-8456,-22368,27381,22125,10445,-21571,4831,28272,28252,8653,-7095,8495,-17108,2755,-30875,4920,-30282,25554,-20516,26809,7403,-21193,19649,6814,15735,-4070,-13111,-5948,-13587,23313,-6075,27882,-22936,12000,-25412,25084,-29649,29637,-31040,-20767,10519,-9214,2118,7616,5574,26683,-10166,3475,16778,-13926,21757,-16065,22856,18071,-8531,8895,6839,5508,18496,28847,-27863,-27401,-9713,4176,-31220,-15011,30985,-12977,6347,-12411,7323,-19355,31072,-29341,-22996,15660,-24290,30771,-7793,-7739,-6654,14686,-12031,21120,221,22852,-6885,28983,26408,5068,-19305,-6240,8530,165,-12515,-1955,-1186,15600,-18955,-6674,-10620,17098,-15485,-16535,-17813,28086,21525,7118,28795,-16349,26453,3346,-32467,-12868,9930,21885,7555,10432,9843,-21603,27254,14341,-30982,-22496,-8085,-9470,11199,10515,-23273,6294,20673,5391,-22956,-11143,32420,27533,-1715,-21369,-10917,26432,-31758,8079,-24937,433,18370,8024,10327,15818,7039,-19361,12234,7855,-6067,-14575,-2846,27856,-18401,-18059,6779,-20349,-1763,30275,19413,10380,8305,-306,-21464,1013,8534,31849,12406,-18592,14339,9328,-6818,15899,-1132,-19441,25735,-20863,30856,25696,-3168,29252,1278,-13183,-19117,-13778,2719,-17610,-12859,1760,632,-13684,9631,26696,26695,-796,-9543,31261,-22790,-23563,4511,-13064,28209,-32194,-25479,-24271,14550,26718,-18303,-1439,-10938,-31196,18970,-14515,14084,23028,32083,15815,32114,10143,19999,18888,-5994,1173,-25037,-19562,16803,30146,2664,-10842,-15778,-14452,11747,17364,24663,-18817,13574,26268,-714,-29361,-12965,4807,22094,22608,14612,-6545,14774,7258,20419,9893,-10165,20962,29162,-6010,-7997,12420,-20768,-22527,-3327,24674,-4163,31905,22727,-10065,4342,-576,-20500,-15802,-5469,-4116,-5152,-28226,28160,-15251,-15267,28538,-1157,16361,-14931,15470,6630,-6786,-1888,-1361,-20888,-17013,21154,4466,17317,21469,-11882,4605,532,-6979,13793,9883,12114,-31991,20241,6045,-12473,-18597,26719,25452,29084,-26074,7131,28780,30438,-16878,-9365,-23477,-24916,27516,3425,-7249,3006,29489,-6483,25666,19739,-25648,22762,8162,-24403,23030,13263,-1106,-30975,-10180,-14983,-23981,26722,-12130,14161,20242,3143,-10476,-27310,7679,-6529,-16870,6590,-22625,28135,-13727,29917,14376,-29875,2387,16517,6069,8876,25992,-24319,-26715,30808,-26879,-9767,26677,-7394,9984,10344,-28851,29353,-7754,-22186,8276,-3533,9782,19055,-21365,3446,4853,-12511,-1474,-14597,28038,18187,-30048,17964,1223,-4104,-23854,13764,-21873,21366,-7946,27860,16253,12144,28217,-25318,20045,22331,10275,-26892,28121,-8712,32604,-7979,30601,16408,-6566,-11142,-14493,17096,-9090,10790,-30793,-13351,-19955,-9585,-2434,-21172,-5386,13724,3587,-21,25480,20118,-17464,-16548,38,-1325,21601,6597,24539,-24292,-838,-10465,-3796,6363,9076,19880,-5951,32646,1530,-10957,-28417,-19461,9401,-24675,-7415,6134,-18287,20551,-29090,-17732,17317,-17612,11915,-31126,-27430,3953,-13385,13636,-11585,13160,1845,-21573,-14373,-14184,-23913,-6116,28087,-26965,-8126,1995,-27403,-17365,-3364,29134,-8318,-1105,-12481,27597,2008,2445,-22720,24483,16436,18312,-32579,-5122,3095,11442,-22762,20932,5997,-8569,8526,14549,6356,21478,15801,8224,974,-17252,9693,31142,-31866,-6286,20322,-21484,-25854,25249,11926,28263,-29815,-18273,-345,25002,-10137,-12432,25365,22839,-28181,-20912,19179,24637,-27658,12453,9810,-23441,1788,-11832,-1867,-24549,-14758,2036,-535,27184,11070,23762,23541,19190,28443,27741,-14899,-32197,27763,-21652,-7601,2655,-23416,-18285,3000,2735,14709,13691,-29439,-25559,30943,26861,16245,477,19007,-26266,7365,-11268,10426,-25145,-22740,6647,-10632,-210,-30038,3038,-11349,-20004,16499,30042,-30768,7452,8860,24450,23571,-6062,-13584,1082,28094,-1766,-9532,-18415,21738,4881,-668,29460,12769,25086,24965,6371,9070,-4744,-30499,-5366,18046,-16371,-31080,16387,27027,-13986,24918,26879,7800,-18047,17657,11503,12581,-29122,-21442,3162,-13738,1598,-23144,17633,-6446,30404,9029,-17319,-9253,-23922,11256,-13604,-9785,-19177,-13987,-13179,6656,-9874,-592,27955,22305,-1092,-15045,-508,154,31226,18336,-30595,18226,25287,-11814,8970,-2725,3720,1176,20483,-20297,629,26559,-24584,-16939,17767,30281,-17102,-24663,-13769,32494,1722,-11799,29245,3763,-12607,17103,-8422,-5525,1201,16117,2382,-3839,-3000,-9818,-12503,-31722,7507,-21020,-31687,6597,16045,-25988,8665,-10173,847,29838,9539,-4169,-8333,11056,-23560,-30150,-10498,19629,3739,-21441,14746,7470,-14782,30378,21729,-19055,-20533,-4552,13223,32269,13692,20978,2618,2006,-4146,31720,15514,-10000,2888,4450,-614,20968,11128,-4505,26028,-13296,-26393,-16436,-18024,1309,4813,-5608,-3795,20283,-30734,-12090,19209,-471,23832,20714,-9953,-6563,-2336,-32649,19470,20267,31605,27632,-28121,-10294,-19941,-28877,-18479,5157,-31977,23777,19809,29543,-12053,-22303,-2453,-16276,-3737,-2401,2293,9439,8569,-23389,-4432,27495,17604,-24900,26990,21817,-10493,-23487,30967,16740,12979,8740,28472,-3256,-23745,-21629,22979,9107,13902,21941,31714,24623,-16191,-26152,18435,1459,-6006,-12014,-13768,-11649,-2095,20688,-23735,21592,-11731,16700,1694,18328,18366,571,-12391,30649,25266,-29852,31265,28112,-5699,31266,-29996,12297,262,17855,-26796,-12962,24174,-28182,-19849,5627,-3882,10399,11718,-21656,14517,16197,10221,6496,17392,-7410,23271,-12508,5363,3833,20284,-2947,-22170,-31840,21425,-10635,30057,-130,-29461,-19731,-16511,11005,19158,7728,12031,26590,-6532,-5943,-21335,-22111,-6173,-27303,28087,-32561,-23669,28998,27339,-14953,17371,5526,28095,-23677,18405,-966,7705,4125,-22260,13221,-10539,10045,-13891,-31537,-14905,-21888,-6407,-1268,13581,27001,-14514,-20578,20173,28481,19761,25825,-9986,1513,-27658,-13808,-15480,-23323,-22525,25874,-16597,8106,-23800,10400,30091,22008,15418,-28941,-24376,-9869,8371,11821,21386,-3864,29045,-8004,15093,14378,3056,26679,26696,-4482,-13900,-21234,-10704,-1013,21439,193,9937,-15673,-10537,20738,940,31168,-17314,-11874,4776,-26888,23411,298,-24055,239,20258,2361,20916,-26320,-345,4349,-12178,-10007,24390,22594,28393,27296,-27560,27974,24748,10779,18966,-3933,-16065,-26642,-19148,32419,-27529,-4473,24410,-28380,15140,89,30152,28728,15604,12354,20582,-22169,-9386,-13726,25632,18146,-11905,-14608,21177,10894,31927,-23498,26051,647,19878,-1319,24704,-1488,17654,-29731,-773,12508,-3050,-22363,-30240,5548,13159,-8548,-4600,32021,-20189,-16319,-18888,9481,-873,27788,25409,3528,8435,2705,-7569,10597,-30982,31790,10600,1147,-25245,31708,3794,22428,-29501,4635,-7184,-16056,13480,-19529,-18723,4189,24976,24283,24454,-30445,15237,26976,-9573,-20977,10517,-28699,2159,-18017,21855,9630,27788,-23726,-20087,1249,20223,19467,20351,21991,-25873,15636,-14782,28359,5319,-278,-19661,31767,-4522,-12842,22469,2000,27043,-17512,-27298,-26554,5136,28409,14582,-3885,-11636,-7489,-12690,896,-19458,24033,28875,21229,-28363,24527,-28153,-28102,27782,26710,-20545,18566,-31273,13954,-19340,-25048,-26544,9807,-28262,-11917,3254,-17074,-28618,-4816,-22874,-12311,-29951,23509,-10953,-14591,14771,-2533,-31539,-1511,2896,11333,-23935,17500,26074,-27162,6210,-2610,20589,17468,-7934,-12809,29535,-5829,22102,9519,-2871,-21865,-23629,-27369,17735,-975,17770,-13718,-6360,27420,-5641,5610,-12771,-1978,13811,-18223,-3932,10013,-7587,813,9128,-32340,26010,16918,31448,19635,-16854,31091,24291,-24741,-13611,-27869,25949,-8556,273,-17249,1078,15414,-11751,3610,-14091,-2493,24898,8309,-4820,15889,-4214,-18500,1882,-26751,-26068,15772,5435,26767,-1115,11452,17621,19612,18743,15772,10540,15254,-27838,17006,1389,31787,18881,12162,24561,9855,29942,14912,5446,2517,-12146,23242,2560,-9763,21028,-12329,1729,-24447,20955,27986,26717,-8215,22473,451,-24913,25315,-7677,11150,23246,6119,9318,-28838,10619,9634,21237,-26937,14142,22481,3980,-29697,24055,32420,11933,29016,30827,32358,1311,-15618,-17255,26300,-32551,14310,-21113,6830,-24173,29957,-5281,11540,-23360,-18308,7660,-20584,-23355,-19319,-2315,-6933,-22628,-20733,32589,-17436,-3765,11049,-9349,-10824,-27622,9025,368,20376,-32030,17432,8895,-16831,-15160,-8510,23026,-12817,32577,-5869,27212,18083,15735,31340,8864,26898,31317,272,-5987,-20953,-12130,-21792,-10452,6265,18098,14755,26612,-30870,15052,19512,-10360,18121,30006,4024,1259,4161,20437,-11706,16630,-9488,5355,16734,9504,-16911,-17974,-2465,-8533,-30497,30963,5830,-19862,-12142,-23201,32503,-23914,-14216,-1381,30791,4476,2972,-20103,-351,11720,2173,18131,-26966,8103,28761,9876,-9613,20384,-31350,-22134,12311,853,-26200,24418,-6030,-23601,-1357,17606,-19065,15048,-11054,-6262,9548,6234,-24540,-25557,-29762,-8679,-17272,19771,-25578,14008,-29201,-21594,-5605,23246,-19279,28051,-7097,-866,-22329,32038,-7283,21520,14514,32467,-25924,-12605,-25322,27698,10287,24532,20931,31696,17683,2144,-19286,10233,5560,-1559,31950,-23658,19242,31340,-5143,26511,22836,-16701,-7390,-29252,14123,-17255,-6771,92,-26288,25847,-25211,12928,-30824,-31052,6417,-14959,3289,22565,27807,-9373,45,28057,3408,-22484,4290,24287,31522,4824,20720,-23620,-30788,13773,-9183,-16733,2741,25877,-23471,6807,-17591,21617,24718,-11276,-3640,-2775,-30127,24849,10916,-26684,-26316,32607,-2267,28955,-18604,-30397,26902,-9337,9070,-23747,5890,-2274,10115,-18606,13797,-4546,-20778,1896,-25356,10001,-779,972,4069,-8672,14373,-1198,5283,31448,-23268,-8635,14828,3680,21338,-25031,19782,-19951,5911,28934,-7014,17674,629,-10966,25591,-8296,-9041,8619,16994,24451,-2030,32707,-28974,-29920,-12896,-11680,15658,-24827,3882,9128,-9023,9431,5180,15037,10505,-14800,-25913,-1339,-4135,-22391,-10610,5100,28361,28855,-1422,-22413,-31529,-6733,10331,-27860,-23015,-15246,26101,28703,21049,-1538,22934,-29489,8978,14912,-3835,20131,-22036,8415,-20657,-29792,-19450,2164,-24869,21530,-23924,-16171,-18398,6597,-15870,24118,-673,-32494,-8356,-6918,-8223,-23734,-30046,23193,-6574,-11826,2929,-17352,-30678,-16758,1104,-19136,-19640,29885,-15642,-27710,13399,-4076,29469,9259,3731,3142,8765,15537,5258,1570,-11148,-30413,-27384,21485,30421,31721,-6584,4458,-1278,-6304,2433,-5784,5731,11697,2243,-3821,-11635,3095 diff --git a/tensorflow/lite/micro/integration_tests/seanet/sub/sub4_input1_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/sub/sub4_input1_int16.csv new file mode 100644 index 0000000..5db9e41 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/sub/sub4_input1_int16.csv @@ -0,0 +1 @@ +-1934,-23813,-22295,-26838,-9113,29392,-25661,-6047 diff --git a/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/BUILD b/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/BUILD new file mode 100644 index 0000000..174596d --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/BUILD @@ -0,0 +1,321 @@ +# Description: +# generated integration test for one specific kernel in a model. +load( + "//tensorflow/lite/micro:build_def.bzl", + "generate_cc_arrays", + "micro_copts", +) + +package( + default_visibility = ["//visibility:public"], + # Disabling layering_check because of http://b/177257332 + features = ["-layering_check"], + licenses = ["notice"], +) + +generate_cc_arrays( + name = "generated_transpose_conv0_model_data_cc", + src = "transpose_conv0.tflite", + out = "transpose_conv0_model_data.cc", +) + +generate_cc_arrays( + name = "generated_transpose_conv0_model_data_hdr", + src = "transpose_conv0.tflite", + out = "transpose_conv0_model_data.h", +) + +generate_cc_arrays( + name = "generated_transpose_conv1_model_data_cc", + src = "transpose_conv1.tflite", + out = "transpose_conv1_model_data.cc", +) + +generate_cc_arrays( + name = "generated_transpose_conv1_model_data_hdr", + src = "transpose_conv1.tflite", + out = "transpose_conv1_model_data.h", +) + +generate_cc_arrays( + name = "generated_transpose_conv2_model_data_cc", + src = "transpose_conv2.tflite", + out = "transpose_conv2_model_data.cc", +) + +generate_cc_arrays( + name = "generated_transpose_conv2_model_data_hdr", + src = "transpose_conv2.tflite", + out = "transpose_conv2_model_data.h", +) + +generate_cc_arrays( + name = "generated_transpose_conv3_model_data_cc", + src = "transpose_conv3.tflite", + out = "transpose_conv3_model_data.cc", +) + +generate_cc_arrays( + name = "generated_transpose_conv3_model_data_hdr", + src = "transpose_conv3.tflite", + out = "transpose_conv3_model_data.h", +) + +generate_cc_arrays( + name = "generated_transpose_conv4_model_data_cc", + src = "transpose_conv4.tflite", + out = "transpose_conv4_model_data.cc", +) + +generate_cc_arrays( + name = "generated_transpose_conv4_model_data_hdr", + src = "transpose_conv4.tflite", + out = "transpose_conv4_model_data.h", +) + +generate_cc_arrays( + name = "generated_transpose_conv0_input0_int32_test_data_cc", + src = "transpose_conv0_input0_int32.csv", + out = "transpose_conv0_input0_int32_test_data.cc", +) + +generate_cc_arrays( + name = "generated_transpose_conv0_input0_int32_test_data_hdr", + src = "transpose_conv0_input0_int32.csv", + out = "transpose_conv0_input0_int32_test_data.h", +) + +generate_cc_arrays( + name = "generated_transpose_conv0_input1_int16_test_data_cc", + src = "transpose_conv0_input1_int16.csv", + out = "transpose_conv0_input1_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_transpose_conv0_input1_int16_test_data_hdr", + src = "transpose_conv0_input1_int16.csv", + out = "transpose_conv0_input1_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_transpose_conv0_golden_int16_test_data_cc", + src = "transpose_conv0_golden_int16.csv", + out = "transpose_conv0_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_transpose_conv0_golden_int16_test_data_hdr", + src = "transpose_conv0_golden_int16.csv", + out = "transpose_conv0_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_transpose_conv1_input0_int32_test_data_cc", + src = "transpose_conv1_input0_int32.csv", + out = "transpose_conv1_input0_int32_test_data.cc", +) + +generate_cc_arrays( + name = "generated_transpose_conv1_input0_int32_test_data_hdr", + src = "transpose_conv1_input0_int32.csv", + out = "transpose_conv1_input0_int32_test_data.h", +) + +generate_cc_arrays( + name = "generated_transpose_conv1_input1_int16_test_data_cc", + src = "transpose_conv1_input1_int16.csv", + out = "transpose_conv1_input1_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_transpose_conv1_input1_int16_test_data_hdr", + src = "transpose_conv1_input1_int16.csv", + out = "transpose_conv1_input1_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_transpose_conv1_golden_int16_test_data_cc", + src = "transpose_conv1_golden_int16.csv", + out = "transpose_conv1_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_transpose_conv1_golden_int16_test_data_hdr", + src = "transpose_conv1_golden_int16.csv", + out = "transpose_conv1_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_transpose_conv2_input0_int32_test_data_cc", + src = "transpose_conv2_input0_int32.csv", + out = "transpose_conv2_input0_int32_test_data.cc", +) + +generate_cc_arrays( + name = "generated_transpose_conv2_input0_int32_test_data_hdr", + src = "transpose_conv2_input0_int32.csv", + out = "transpose_conv2_input0_int32_test_data.h", +) + +generate_cc_arrays( + name = "generated_transpose_conv2_input1_int16_test_data_cc", + src = "transpose_conv2_input1_int16.csv", + out = "transpose_conv2_input1_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_transpose_conv2_input1_int16_test_data_hdr", + src = "transpose_conv2_input1_int16.csv", + out = "transpose_conv2_input1_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_transpose_conv2_golden_int16_test_data_cc", + src = "transpose_conv2_golden_int16.csv", + out = "transpose_conv2_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_transpose_conv2_golden_int16_test_data_hdr", + src = "transpose_conv2_golden_int16.csv", + out = "transpose_conv2_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_transpose_conv3_input0_int32_test_data_cc", + src = "transpose_conv3_input0_int32.csv", + out = "transpose_conv3_input0_int32_test_data.cc", +) + +generate_cc_arrays( + name = "generated_transpose_conv3_input0_int32_test_data_hdr", + src = "transpose_conv3_input0_int32.csv", + out = "transpose_conv3_input0_int32_test_data.h", +) + +generate_cc_arrays( + name = "generated_transpose_conv3_input1_int16_test_data_cc", + src = "transpose_conv3_input1_int16.csv", + out = "transpose_conv3_input1_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_transpose_conv3_input1_int16_test_data_hdr", + src = "transpose_conv3_input1_int16.csv", + out = "transpose_conv3_input1_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_transpose_conv3_golden_int16_test_data_cc", + src = "transpose_conv3_golden_int16.csv", + out = "transpose_conv3_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_transpose_conv3_golden_int16_test_data_hdr", + src = "transpose_conv3_golden_int16.csv", + out = "transpose_conv3_golden_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_transpose_conv4_input0_int32_test_data_cc", + src = "transpose_conv4_input0_int32.csv", + out = "transpose_conv4_input0_int32_test_data.cc", +) + +generate_cc_arrays( + name = "generated_transpose_conv4_input0_int32_test_data_hdr", + src = "transpose_conv4_input0_int32.csv", + out = "transpose_conv4_input0_int32_test_data.h", +) + +generate_cc_arrays( + name = "generated_transpose_conv4_input1_int16_test_data_cc", + src = "transpose_conv4_input1_int16.csv", + out = "transpose_conv4_input1_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_transpose_conv4_input1_int16_test_data_hdr", + src = "transpose_conv4_input1_int16.csv", + out = "transpose_conv4_input1_int16_test_data.h", +) + +generate_cc_arrays( + name = "generated_transpose_conv4_golden_int16_test_data_cc", + src = "transpose_conv4_golden_int16.csv", + out = "transpose_conv4_golden_int16_test_data.cc", +) + +generate_cc_arrays( + name = "generated_transpose_conv4_golden_int16_test_data_hdr", + src = "transpose_conv4_golden_int16.csv", + out = "transpose_conv4_golden_int16_test_data.h", +) + +cc_library( + name = "models_and_testdata", + srcs = [ + "generated_transpose_conv0_golden_int16_test_data_cc", + "generated_transpose_conv0_input0_int32_test_data_cc", + "generated_transpose_conv0_input1_int16_test_data_cc", + "generated_transpose_conv0_model_data_cc", + "generated_transpose_conv1_golden_int16_test_data_cc", + "generated_transpose_conv1_input0_int32_test_data_cc", + "generated_transpose_conv1_input1_int16_test_data_cc", + "generated_transpose_conv1_model_data_cc", + "generated_transpose_conv2_golden_int16_test_data_cc", + "generated_transpose_conv2_input0_int32_test_data_cc", + "generated_transpose_conv2_input1_int16_test_data_cc", + "generated_transpose_conv2_model_data_cc", + "generated_transpose_conv3_golden_int16_test_data_cc", + "generated_transpose_conv3_input0_int32_test_data_cc", + "generated_transpose_conv3_input1_int16_test_data_cc", + "generated_transpose_conv3_model_data_cc", + "generated_transpose_conv4_golden_int16_test_data_cc", + "generated_transpose_conv4_input0_int32_test_data_cc", + "generated_transpose_conv4_input1_int16_test_data_cc", + "generated_transpose_conv4_model_data_cc", + ], + hdrs = [ + "generated_transpose_conv0_golden_int16_test_data_hdr", + "generated_transpose_conv0_input0_int32_test_data_hdr", + "generated_transpose_conv0_input1_int16_test_data_hdr", + "generated_transpose_conv0_model_data_hdr", + "generated_transpose_conv1_golden_int16_test_data_hdr", + "generated_transpose_conv1_input0_int32_test_data_hdr", + "generated_transpose_conv1_input1_int16_test_data_hdr", + "generated_transpose_conv1_model_data_hdr", + "generated_transpose_conv2_golden_int16_test_data_hdr", + "generated_transpose_conv2_input0_int32_test_data_hdr", + "generated_transpose_conv2_input1_int16_test_data_hdr", + "generated_transpose_conv2_model_data_hdr", + "generated_transpose_conv3_golden_int16_test_data_hdr", + "generated_transpose_conv3_input0_int32_test_data_hdr", + "generated_transpose_conv3_input1_int16_test_data_hdr", + "generated_transpose_conv3_model_data_hdr", + "generated_transpose_conv4_golden_int16_test_data_hdr", + "generated_transpose_conv4_input0_int32_test_data_hdr", + "generated_transpose_conv4_input1_int16_test_data_hdr", + "generated_transpose_conv4_model_data_hdr", + ], + copts = micro_copts(), +) + +cc_test( + name = "integration_test", + srcs = [ + "integration_tests.cc", + ], + copts = micro_copts(), + deps = [ + ":models_and_testdata", + "//python/tflite_micro:python_ops_resolver", + "//tensorflow/lite/micro:micro_framework", + "//tensorflow/lite/micro:micro_log", + "//tensorflow/lite/micro:micro_resource_variable", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:recording_allocators", + "//tensorflow/lite/micro/testing:micro_test", + ], +) diff --git a/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/Makefile.inc b/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/Makefile.inc new file mode 100644 index 0000000..c27beda --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/Makefile.inc @@ -0,0 +1,31 @@ +integration_tests_seanet_transpose_conv_GENERATOR_INPUTS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv0.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv1.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv2.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv3.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv4.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv0_input0_int32.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv0_input1_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv0_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv1_input0_int32.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv1_input1_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv1_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv2_input0_int32.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv2_input1_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv2_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv3_input0_int32.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv3_input1_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv3_golden_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv4_input0_int32.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv4_input1_int16.csv \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv4_golden_int16.csv \ + +integration_tests_seanet_transpose_conv_SRCS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests/seanet/transpose_conv/integration_tests.cc \ +$(TENSORFLOW_ROOT)python/tflite_micro/python_ops_resolver.cc \ + +integration_tests_seanet_transpose_conv_HDR := \ +$(TENSORFLOW_ROOT)python/tflite_micro/python_ops_resolver.h \ + +$(eval $(call microlite_test,integration_tests_seanet_transpose_conv_test,\ +$(integration_tests_seanet_transpose_conv_SRCS),$(integration_tests_seanet_transpose_conv_HDR),$(integration_tests_seanet_transpose_conv_GENERATOR_INPUTS))) diff --git a/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/integration_tests.cc b/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/integration_tests.cc new file mode 100644 index 0000000..105459c --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/integration_tests.cc @@ -0,0 +1,144 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include "python/tflite_micro/python_ops_resolver.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv0_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv0_input0_int32_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv0_input1_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv0_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv1_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv1_input0_int32_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv1_input1_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv1_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv2_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv2_input0_int32_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv2_input1_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv2_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv3_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv3_input0_int32_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv3_input1_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv3_model_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv4_golden_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv4_input0_int32_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv4_input1_int16_test_data.h" +#include "tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv4_model_data.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_profiler.h" +#include "tensorflow/lite/micro/recording_micro_allocator.h" +#include "tensorflow/lite/micro/recording_micro_interpreter.h" +#include "tensorflow/lite/micro/system_setup.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +constexpr size_t kTensorArenaSize = 1024 * 100; +uint8_t tensor_arena[kTensorArenaSize]; + +namespace tflite { +namespace micro { +namespace { + +void RunModel(const uint8_t* model, const int32_t* input0, + const uint32_t input0_size, const int16_t* input1, + const uint32_t input1_size, const int16_t* golden, + const uint32_t golden_size, const char* name) { + InitializeTarget(); + MicroProfiler profiler; + PythonOpsResolver op_resolver; + + MicroInterpreter interpreter(GetModel(model), op_resolver, tensor_arena, + kTensorArenaSize, nullptr, &profiler); + interpreter.AllocateTensors(); + TfLiteTensor* input_tensor0 = interpreter.input(0); + TF_LITE_MICRO_EXPECT_EQ(input_tensor0->bytes, input0_size * sizeof(int32_t)); + memcpy(interpreter.input(0)->data.raw, input0, input_tensor0->bytes); + TfLiteTensor* input_tensor1 = interpreter.input(1); + TF_LITE_MICRO_EXPECT_EQ(input_tensor1->bytes, input1_size * sizeof(int16_t)); + memcpy(interpreter.input(1)->data.raw, input1, input_tensor1->bytes); + if (kTfLiteOk != interpreter.Invoke()) { + TF_LITE_MICRO_EXPECT(false); + return; + } + profiler.Log(); + MicroPrintf(""); + + TfLiteTensor* output_tensor = interpreter.output(0); + TF_LITE_MICRO_EXPECT_EQ(output_tensor->bytes, golden_size * sizeof(int16_t)); + int16_t* output = ::tflite::GetTensorData(output_tensor); + for (uint32_t i = 0; i < golden_size; i++) { + // TODO(b/205046520): Better understand why TfLite and TFLM can sometimes be + // off by 1. + TF_LITE_MICRO_EXPECT_NEAR(golden[i], output[i], 1); + } +} + +} // namespace +} // namespace micro +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(transpose_conv0_test) { + tflite::micro::RunModel( + g_transpose_conv0_model_data, g_transpose_conv0_input0_int32_test_data, + g_transpose_conv0_input0_int32_test_data_size, + g_transpose_conv0_input1_int16_test_data, + g_transpose_conv0_input1_int16_test_data_size, + g_transpose_conv0_golden_int16_test_data, + g_transpose_conv0_golden_int16_test_data_size, "transpose_conv0 test"); +} + +TF_LITE_MICRO_TEST(transpose_conv1_test) { + tflite::micro::RunModel( + g_transpose_conv1_model_data, g_transpose_conv1_input0_int32_test_data, + g_transpose_conv1_input0_int32_test_data_size, + g_transpose_conv1_input1_int16_test_data, + g_transpose_conv1_input1_int16_test_data_size, + g_transpose_conv1_golden_int16_test_data, + g_transpose_conv1_golden_int16_test_data_size, "transpose_conv1 test"); +} + +TF_LITE_MICRO_TEST(transpose_conv2_test) { + tflite::micro::RunModel( + g_transpose_conv2_model_data, g_transpose_conv2_input0_int32_test_data, + g_transpose_conv2_input0_int32_test_data_size, + g_transpose_conv2_input1_int16_test_data, + g_transpose_conv2_input1_int16_test_data_size, + g_transpose_conv2_golden_int16_test_data, + g_transpose_conv2_golden_int16_test_data_size, "transpose_conv2 test"); +} + +TF_LITE_MICRO_TEST(transpose_conv3_test) { + tflite::micro::RunModel( + g_transpose_conv3_model_data, g_transpose_conv3_input0_int32_test_data, + g_transpose_conv3_input0_int32_test_data_size, + g_transpose_conv3_input1_int16_test_data, + g_transpose_conv3_input1_int16_test_data_size, + g_transpose_conv3_golden_int16_test_data, + g_transpose_conv3_golden_int16_test_data_size, "transpose_conv3 test"); +} + +TF_LITE_MICRO_TEST(transpose_conv4_test) { + tflite::micro::RunModel( + g_transpose_conv4_model_data, g_transpose_conv4_input0_int32_test_data, + g_transpose_conv4_input0_int32_test_data_size, + g_transpose_conv4_input1_int16_test_data, + g_transpose_conv4_input1_int16_test_data_size, + g_transpose_conv4_golden_int16_test_data, + g_transpose_conv4_golden_int16_test_data_size, "transpose_conv4 test"); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv0.tflite b/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv0.tflite new file mode 100644 index 0000000..c6f4c28 Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv0.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv0_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv0_golden_int16.csv new file mode 100644 index 0000000..8493a7a --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv0_golden_int16.csv @@ -0,0 +1 @@ +-5716,-27853,7641,-20887,-32768,12435,-10474,-4898,32767,32767,17230,10763,18016,12586,-32768,-15281,12969,-32768,14931,-2795,25460,-9437,-32768,-1147,145,3256,-13922,20208,-12845,-8593,-21399,16642,9315,-5133,32767,1514,1212,7269,4745,-10295,-9168,-3756,18305,-28011,-13193,-14920,11081,9846,20175,-6120,-16352,-7427,-32768,15012,17433,-28295,10,-32768,-32768,-28152,32767,-19497,9082,-15507,-16195,4330,-24752,9447,-3884,13018,21247,17581,-21603,-25269,-27473,-3858,13197,-11253,4010,-8857,5896,15588,32767,-21264,-9922,-1553,-32768,19289,-12074,25399,16455,24437,10595,566,-32768,8115,18678,-18830,32767,-30173,32767,-3066,-18716,32767,32767,-6138,3355,-16565,6126,-32768,11940,13489,-7512,11651,194,29633,32767,-4816,-21934,-11095,28484,-3469,22775,11010,14027,19684,32767,5597,-7454,9657,-3412,-18939,817,27952,11590,7967,-6206,13708,9154,30497,-19736,31303,-9165,-31936,-32768,32767,-31225,2099,16610,-32768,32767,16549,32767,11498,-19195,-5508,20384,-11630,-5122,19134,32767,-1394,32767,-3680,32767,32767,29513,-20608,32767,-29495,-14270,13104,32767,-16523,-32768,-19860,-10131,32767,32767,5877,-32768,32767,-11697,32767,26323,-2004,-32159,17446,25901,32767,32767,-14451,-21089,-32768,32767,-20794,-7183,32767,31158,-20940,-23498,11763,-14720,5510,-32768,-15246,2485,8346,7713,32767,10292,32767,10110,21264,14399,-1887,31154,20664,32767,-27988,-23025,32767,3490,32767,32767,17236,32767,32767,16756,4991,8006,24332,-28346,32767,18996,20318,32767,-27948,32767,-32768,32767,4466,32767,6247,32767,3204,-32768,-10199,32767,16375,32767,24239,32767,-287,4897,-32768,5805,32767,-11114,32767,-23889,-32768,-32768,-29621,-20154,-3258,-7616,6641,32767,14317,418,32767,27921,-2227,32767,-32768,32767,-22706,13057,32767,28320,-11503,732,8768,32767,-32768,32767,2767,-32768,-32768,32767,-32768,32767,-16166,32767,27977,32767,-5500,-20700,-32768,-30657,25108,32767,-13446,-13367,1440,9620,-32768,32767,-12436,18386,32767,11910,14606,1234,14993,-5560,32767,-32768,32767,32767,19987,-28329,32767,-18651,-32768,32767,-10683,32767,-12522,32767,-3296,-6012,32767,10852,5577,-29510,-32768,32767,-1288,-16481,-10617,-32768,32767,-6692,21433,-1477,-26740,32767,32767,18518,-32768,-9480,5527,32767,32767,32767,32767,-26303,18228,16617,-22837,2462,15481,-32768,32767,32767,8844,22904,-261,12766,-32768,-32768,28198,5444,4346,-32768,-32768,18794,-32768,-10569,3683,-32768,27853,24394,-32768,-29174,-32768,-32768,-18430,17536,-32768,19178,-32768,-21720,18653,16780,-32768,32767,-32768,-6710,-27419,-22170,-29345,-1316,-25193,-32768,-4874,-30862,-13627,28207,1677,-6553,-32768,-22533,25113,7254,-32768,-32768,22668,32767,-809,-10954,-32768,-32768,18489,-32768,19405,-32768,-32768,-5649,-32768,-32768,32767,32767,-13562,-22291,-107,4066,12382,-24868,4194,-32768,-8148,-32768,-12243,21555,32767,20076,-27244,-12532,6225,-32768,8758,32767,-32768,1355,-32518,-32768,32767,-15879,-28204,-7980,27492,32767,-32768,14428,-32768,-20844,23985,5157,13797,23496,4425,32767,4903,8050,-32768,21596,4667,32342,-32768,-32768,11385,-32768,7093,20570,32767,-15839,32767,-15282,-32768,-6491,-32768,-29373,-27189,32767,-11484,-9012,-14353,-1580,-29223,-32081,-30119,-32768,-15384,-11763,32767,6649,-12745,-11771,-32768,14467,-19779,6638,32767,4427,-21195,-32768,20650,-10842,31197,-32768,-27290,31016,6482,-32768,-2924,-7808,32767,32767,-32768,19102,22704,17928,25539,-24815,-32768,27912,26308,32767,-27447,29222,3937,3596,32767,13060,-8568,1617,-1436,32767,31488,21355,1707,-12778,-368,17,-4725,14403,32767,-28858,-29911,-26514,32767,32767,32767,27532,-32768,-28326,-32768,13586,10766,32767,5267,5771,-32768,32767,27062,32767,-20218,1940,6795,-8883,2501,12258,32767,3931,28860,32767,21460,-32768,3794,23068,-16023,32191,-32768,-23768,-9573,-5014,-38,32767,4592,-27453,24585,3894,-32768,10108,15000,22436,32767,32767,-32768,2819,-4292,13974,32767,32767,-3511,32767,32767,-32768,16913,-15133,14227,-32768,25303,4999,-32768,18056,11643,32767,28659,32767,32767,-12673,32767,32767,31797,32767,-28510,32767,-8965,32767,2361,32767,1269,-25844,32767,13717,-4461,-32768,31690,-32768,23978,-18516,6480,-16212,13595,29767,24573,18826,-7260,-10304,32767,-32768,-2117,5634,25096,32291,950,28167,10084,32767,32767,15719,26452,2021,24363,858,192,32767,-14406,26218,29602,-32768,32767,-32768,26482,32767,26971,-5450,27208,-2966,-243,-4480,4162,-19295,13303,20189,18802,-32768,300,32767,23508,32767,323,-2470,-5881,-31156,32405,7025,32767,26128,-19813,32767,7422,32767,-2173,32767,-20900,-18895,31594,19886,-29951,39,32767,32767,32767,-23759,-32768,-26224,32767,-3903,17632,-27220,-1173,11666,-32768,5206,-11281,32767,-5051,-8881,9144,19489,23286,-5868,1074,32767,10110,-32768,32767,7162,-4138,32767,17507,15622,-6109,-7069,19744,-7625,-32768,20402,27524,-7938,-19310,32767,12843,-1751,32767,-21143,1846,-6578,19839,-32768,4020,-9962,32767,32767,7554,26150,19776,-7492,24932,19062,2094,22530,12935,32767,32767,24524,-32768,-10567,-1831,26305,19853,-2782,11536,14579,3078,13316,-20350,32767,31923,-12156,-10597,32767,7773,7011,-11161,-9727,11747,-32768,32767,8316,5430,10861,20890,12353,-2210,-23343,-5062,2726,11627,-24455,-32768,-3378,-12722,-29389,17636,9056,11064,4137,17325,-10502,-3408,-29546,-10128,32767,6685,19441,3457,21864,-14086,28090,25140,10110,-16365,-24419,17304,7765,15527,-16140,-6051,29244,-13631,18579,-32768,3740,6038,30709,5493,-7478,17291,26469,-18197,24968,20990,-13238,-8812,12076,-4196,23923,-10136,-315,-322,20146,-32768,-518,-32768,-17791,712,20200,17023,32767,8725,32767,8975,32767,5506,25807,24470,22766,15269,15065,32767,-28727,32767,32767,-32768,-3283,21358,19671,22148,-4857,32767,32767,6256,2652,6433,-19218,-14610,21150,32767,4642,-23780,13047,-32768,-1846,-10359,32767,3713,5446,32767,11794,-32768,-21783,-16723,3464,6271,-30722,9120,32767,32767,-32768,25090,32767,32767,-21668,-10696,-10930,-32768,17573,1568,-11322,-1728,-23760,-15067,32767,7389,32767,12130,2416,32767,10944,10354,32767,-4481,32767,26248,-32768,-15395,-5323,32767,11856,-5146,5906,22587,32767,-32768,233,17864,-32768,32767,-3958,32767,2929,17917,1275,-3523,8249,-17654,11274,32767,32767,-20340,32767,-4878,32767,32767,32767,-12412,32767,-32768,32767,17402,19795,32767,32492,32767,32767,32767,32767,18586,4346,27827,28755,-9799,27044,13248,21543,28507,-293,-6635,14648,20740,32767,2569,32556,-15220,31436,29637,-32714,12999,-9881,19908,-14534,-21642,-32083,30598,-30159,-32768,-13403,-24102,32767,-9401,32767,-8078,-14842,-14657,10084,-3557,-9387,28212,31008,-32768,12861,-9744,-32768,-6092,23511,-19780,29649,32767,31654,-32768,7093,18528,-32514,-9688,26709,32767,32767,-32768,-32768,-15676,9874,-17230,32767,-9329,5474,-19108,-32768,32767,-15322,6173,-32768,-32768,15209,-10716,17761,-32768,6258,27305,9734,-10286,-12868,32767,27947,-18100,-32768,-32768,-32768,27320,12246,22767,21679,26503,12575,16305,-15782,25239,13745,2331,19751,-32131,32619,-2649,15385,32767,1113,5335,-1317,-1708,-16623,-3456,-11384,32767,18856,592,-7431,20182,-32768,-8832,10389,-1745,32767,26892,-32768,12390,-32768,3676,-21534,-316,8808,-32768,10291,-6628,-9553,-26155,-32768,30760,-12991,4697,27224,-20014,-32768,-12117,-17801,-29245,4571,-24854,32767,-140,16634,-32768,7945,8709,32767,-29147,32767,-32768,-32768,-4047,-4414,-25706,-27088,-4825,-32768,-32768,-32768,6772,9284,-32768,16107,32767,28007,5756,-28498,-17850,-19573,-7171,-11232,-31313,-9095,-28567,-32768,-22701,-21386,-32768,-32768,-26753,-32768,-11348,-11529,-32768,-27155,-32768,-5643,14312,-13680,-7359,12411,26514,-4742,-6120,3840,11239,-4069,12155,-19665,24323,-28375,-13049,-1494,-9299,-6164,27952,15433,32767,291,25090,2207,32767,22229,32767,-23759,20176,-26727,-32768,25930,-32768,32767,-16794,-6220,28921,26156,-32768,22244,-4465,-18780,32767,-7956,-32768,-32768,4117,-32768,-30317,234,-32768,-32768,5452,-19461,-7347,6384,25761,-14079,-32768,32767,16128,15765,25073,1706,-21902,-11509,32767,25455,19898,2950,-26245,10637,-22456,-32768,16168,21635,-9368,26492,23369,20775,-32768,32767,7041,-11095,4360,32767,-3672,32767,7339,32767,16455,2806,-32768,12066,-3727,-3556,32767,-32768,9588,5570,14560,32767,32767,18145,2638,-2641,-10131,19289,18317,-8017,14312,14649,-10824,32767,32767,32767,25471,-22511,30639,28597,21170,-32768,-7061,26129,5561,-32768,23644,32767,11016,15972,1065,2506,32767,32767,22528,-15736,-14537,-4761,1867,-12699,9822,32767,3681,-15781,22169,22976,32767,-5245,23106,14562,12611,32767,-32768,26311,2514,17774,-19650,32767,-32768,21596,23037,32767,-25327,7039,13329,-9493,1959,25994,6322,26180,32767,9147,23031,-13858,-22357,14679,-32768,-21039,2893,32767,-32768,5909,-28493,28998,-13809,-14822,12816,-27901,25791,32767,-31175,32767,24882,-11183,32767,-11945,-12948,873,-32768,20058,-12324,5997,-4129,15994,32767,32767,29234,32767,-14770,-11161,19612,-19626,-26812,22091,-6200,26608,-8524,31136,-32768,-5912,-27015,-13292,-32768,30088,7600,19241,-9550,-32768,-24405,-19764,3268,4307,901,-17992,584,-13228,712,15509,32767,7082,30046,18685,32767,32767,30606,-8505,-32768,31613,-252,1384,32767,20407,32767,-19327,-9278,-3608,-8893,29689,-797,32767,-5968,-12722,-12772,17795,27834,20293,-11586,20450,7447,12804,32767,-25642,-11938,8681,-25576,32767,18408,32767,12382,-14671,-19379,-5227,9742,32767,19596,8436,-32768,32767,-12496,4868,22209,32767,-10464,1991,9905,15935,29416,-7118,10173,24131,-10266,11805,-17392,18287,-2818,32767,32767,-10710,32767,17667,12925,20763,-8683,32767,5887,23383,-6740,-1484,-9944,-30309,-23970,12287,31893,8098,-28435,4384,22743,9679,9842,-32768,2072,9664,19687,15642,13454,-11068,21343,-18298,12319,15045,-18783,-31562,2516,-17181,-13177,1595,20279,32767,-4802,32500,-1843,7191,-2943,1431,8612,31986,1550,1293,6115,32767,4761,18757,-10423,-12008,-11135,30358,28972,-19382,-17567,10054,-32768,31451,-16625,32767,5264,32767,31492,-21922,-21566,-16233,9071,32767,22657,8298,-19610,30226,1482,-2750,25419,-24021,23372,-16523,9099,11138,-16157,32767,5920,-6079,-26659,24909,12887,-12798,17348,22850,-5231,-32768,10605,-32768,8699,16388,-12106,30484,15954,25285,29860,4068,10088,8724,32767,17615,9820,26184,-4489,29600,-6632,16614,-16609,32005,10070,-19735,-3374,-258,-31422,-18456,-32768,-20313,-28472,32767,2263,26131,7,-3583,-32768,894,7683,14065,31068,-20266,-14995,5476,7028,1974,8886,-32768,11193,10807,-2625,24514,-15298,-3587,-8273,-32768,-31693,9544,28549,-17121,32767,7152,-2374,5705,4796,2744,11479,32767,32767,32767,30898,32767,19619,23165,32767,32767,32767,32767,11587,-32060,27867,515,10275,32767,32767,32767,17500,-2633,5488,24806,-9525,-32768,32767,32767,-6926,21522,-9858,-18762,-4829,32767,337,9857,8655,-15421,32767,-1350,32767,7566,-22505,32767,4800,13979,-8167,32767,9546,32767,32767,24870,7814,24605,32767,24058,32767,32767,21621,25802,32767,-19714,-20221,32767,32767,32767,-10656,32767,23430,-1441,2870,32767,17792,32767,15474,31224,3129,32767,3299,-1434,32767,25628,32767,32767,-24221,17134,2169,-24065,-29599,19207,32767,32767,-32768,17629,8420,-1410,22448,9884,32767,32767,30299,-13157,32767,-5817,-2741,32767,24239,-10831,27297,-32768,29854,-25186,32767,12086,32767,22255,-17838,-9171,8064,-3910,2419,32767,-32768,32767,-32768,-32768,24867,-1825,-32768,3325,-32768,29324,8453,-32768,32767,32767,1594,9574,32767,18687,32767,32767,5667,32767,32767,-32768,32767,-16834,-18473,4564,7381,-2321,31162,4982,31330,-32034,-32768,-4150,-13102,-3985,-14673,-19132,-18776,-32768,16880,32767,3454,-32768,-32768,-32768,-32768,-15102,-32768,-31151,11550,-8546,-32768,-18229,-14314,-8874,2178,32406,32767,-28406,-27621,26653,-28492,9208,-28893,-3442,-27970,32767,-10204,22423,32767,32767,29443,-32768,11596,30644,-25826,21808,-32768,32767,7898,13361,-1165,32767,32767,16048,-5785,31489,-32768,-20384,-439,9094,3845,-5479,-32768,-32768,-32768,-32768,-14244,32767,32767,-32768,-20557,4464,-32768,17688,-32768,32767,18711,-32768,1721,32427,2155,-18637,-16386,31577,-16124,5873,32767,14701,-22743,32767,24305,-5461,2968,-31982,4391,-32768,-22177,151,32767,-5170,5260,-14412,-17423,7346,2957,7576,-1575,-11072,32767,-32768,-32768,4056,-22076,-13792,7553,-22788,-6972,-29022,2442,992,32767,-3438,-32768,23528,-229,-7145,-32768,-2587,-32768,-32768,-30876,-13874,-508,1396,-32768,-32768,7786,-2152,29583,-32768,-6709,159,-32768,-28505,22785,-7861,-4224,15688,2839,28078,-32768,17692,6823,-18525,-27215,-32768,-26844,-15561,32767,-32768,19428,32767,32767,-8365,32767,-3384,32767,-32768,-32768,-31484,-32768,3695,17776,32767,-32768,-32768,10000,-32768,-20884,7106,32767,-9446,-2360,-10290,-1779,9509,15027,4123,-22256,32767,21983,32767,-5132,18586,-26340,-32768,29411,-4036,-5177,13177,-16257,-32768,32767,6905,-24542,15181,-30818,19336,32767,32767,32415,-22119,-32768,-14738,1791,14850,5787,-32768,-19964,10165,-11651,19992,32767,2257,32767,13240,18948,31272,32767,29426,32767,32767,27290,-31760,32767,15383,31379,-10629,-32768,-32768,27753,-14094,32767,32767,27653,6301,-16735,-1934,32767,25598,13337,13342,22141,32767,32767,32767,-32768,32767,6831,-2011,-7270,1008,-3677,28506,32767,32767,30371,32767,-22353,-3484,32767,-32768,8596,21251,7354,-14979,7644,11517,-803,6028,7839,32767,855,-9050,12892,10570,32767,32767,24179,-99,-32768,32767,-20703,-32768,-32768,32767,13352,32767,-6684,-6177,-21333,-21599,5130,-16716,-13851,-3168,-6626,-4394,4744,1164,-32768,-32768,32767,-5223,32767,21711,8118,-6092,32767,-12294,15091,32767,9502,-13119,-12162,21411,-18672,32767,20906,-20608,-32768,32767,-14162,7133,22011,32767,1864,32767,-32768,32767,14452,32767,17495,-11932,32767,-32768,32767,32767,3883,-529,-15319,26269,-26743,32767,-32768,1356,32767,14618,-8261,32767,-4259,-8213,-2931,-30440,7835,12407,20384,-11307,-32768,-3519,-4254,15822,5537,19533,10359,5991,1639,32767,4036,16719,11155,32767,6812,32767,-4896,-7044,15359,32767,-32768,19367,-13697,5029,8965,1253,8339,4656,-32135,7318,32767,3851,31176,32767,-26572,1218,1755,32767,10219,32767,32767,32767,-5961,-1041,32767,20577,-32768,8191,32767,-25365,20164,29436,-1171,-10682,14736,22490,31594,29853,-19404,32767,3913,-30624,748,32767,30521,29308,16284,4325,32325,15506,13795,13925,23459,22242,6227,22523,-1156,32767,-20904,5845,-25384,-7316,10651,14648,14886,10277,-23358,1126,32767,-19141,-17727,-12099,10864,32767,11129,-24158,25699,-21008,32767,31954,15664,32767,-18169,29048,13758,-32768,14188,22322,-4129 diff --git a/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv0_input0_int32.csv b/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv0_input0_int32.csv new file mode 100644 index 0000000..dea04fa --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv0_input0_int32.csv @@ -0,0 +1 @@ +1,3,12,64 diff --git a/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv0_input1_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv0_input1_int16.csv new file mode 100644 index 0000000..3b534bc --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv0_input1_int16.csv @@ -0,0 +1 @@ +12914,17691,24214,-29025,1277,3349,-31462,27742,-19240,27166,-22178,-31456,-18449,-1113,6322,1442,-15663,-6974,18280,4362,3614,-12506,-19582,-30651,-20843,28985,-5807,-19004,8977,-25950,2371,-7566,-25694,-26474,25474,-21258,-14127,26458,6145,-2396,21852,-4185,-32315,-8488,29930,22005,9291,21350,11902,-19978,21823,-1622,-28003,275,-5875,2294,-30093,-12012,7949,6255,21190,6173,1003,1013,11783,-1960,-1107,-12315,2285,20033,23389,-14287,-18443,18036,23059,21574,-22401,-5074,28968,-6150,-25456,29114,-7897,14680,4048,30827,-11491,-9284,-12840,-20578,17647,9707,-30875,-31409,-28965,18200,22412,-5050,-28522,-31422,-18719,-10901,15441,-8620,30889,-1799,2864,10156,-17956,-2365,-23889,12191,18443,30232,-31324,-14007,-27610,-14654,-1764,8161,-31690,-31196,13754,-489,-21151,-24919,16579,-31744,28978,28241,-906,5201,-9723,17831,-5688,32383,-28138,-20418,-14727,743,-5127,-7601,-953,-31168,2649,26706,-21521,-26759,24017,-25643,-8631,26743,3469,19437,-31852,-13415,23460,3847,22454,-15942,-5482,5218,-23681,5946,-1952,26337,1614,-2490,13816,-4280,-11534,8561,-28845,25657,29136,-13034,17229,4943,-121,7856,31588,31716,3927,-25682,2975,32369,4065,-12539,-27210,-16280,21305,22356,-26140,-2200,-22311,-18145,11655,31976,-28173,6265,-24230,-14080,28183,24957,-5980,402,-30531,-26282,5605,167,12707,-3889,29115,-17350,24895,13017,-30287,-8654,-11496,-14746,-10659,30150,-26807,-29917,18492,22455,-12199,-6724,-20472,26788,-4048,-10353,24435,-1911,21774,-13034,24377,-23761,23469,-32507,-22518,4451,11,-2797,6493,-4913,29806,-3384,20991,-5248,-3622,-19763,31494,30976,5032,-12708,-32593,28579,21334,973,28176,-6790,6070,32393,-5296,25288,-28972,-4212,-4558,-13571,3121,-32096,-18675,-12058,-22253,-8300,-24745,-15237,29636,25512,21525,7658,15527,28671,-27382,31507,-18588,-8025,-25857,-13444,-22668,-15356,-12469,22946,4590,23839,-27189,8345,17987,-25943,-12239,24798,30652,-11605,-29728,6052,25579,-10053,-5466,27709,-5899,-7724,-30135,-5428,-26407,-4918,-3823,12454,-31508,-32203 diff --git a/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv1.tflite b/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv1.tflite new file mode 100644 index 0000000..3ab86bc Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv1.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv1_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv1_golden_int16.csv new file mode 100644 index 0000000..eff96c4 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv1_golden_int16.csv @@ -0,0 +1 @@ +-32768,-28900,32767,-32768,-13232,-32768,-17290,-32768,-13580,-32768,-32768,-27269,-32768,32767,12773,-32768,-32768,-32768,-30043,-32768,-32768,32767,-20508,6461,32767,32767,-779,-22432,-32768,-32768,18280,-1461,21818,32767,5545,-32768,-8333,-32768,-32768,-7542,12078,-4178,31618,-32768,-24257,-27046,-3202,-32768,-26788,-32102,-28397,32767,-32768,32767,-32019,-32768,-32768,-32768,-15290,-32768,32767,-31796,-16627,-32768,-32768,-6295,32767,-32768,-32768,-32768,-32768,-14580,-25321,610,-15423,5225,-32768,-32768,-23139,-32768,-21277,7605,-26576,-25462,-760,-25166,-32768,-32768,15486,-32768,-32768,-18296,887,8275,22684,-18608,-32768,-10592,-21572,-32768,12582,32767,-32768,19868,-4548,-32768,18376,5476,-32768,-32378,-32768,-32768,5062,26010,-32768,6327,6406,-32768,18848,-32768,-32768,8141,-16838,-21267,12813,852,-32768,32767,32767,-32768,-27105,5951,32767,32767,32767,17055,19579,32767,19754,19963,-32768,-3181,-32768,23990,32680,29898,32767,13846,10502,-24653,32767,26577,-1291,32767,16641,32767,32767,32767,3977,32767,-30847,6620,-1840,32767,32767,32767,32767,32767,-13804,32767,32767,-1024,-32768,26844,30540,32767,20318,-17797,32767,32767,-32768,-12487,5005,-14927,-32768,-22933,10034,-527,-19502,8959,32767,32767,32767,32767,32767,14249,14558,16167,32767,32767,-32768,32767,21843,32767,-18639,9088,32767,32767,32767,-26084,31375,32767,32767,32767,1037,32767,26592,27380,32767,32767,-29708,21831,32767,32767,27236,-14886,32767,32767,32767,32767,-32768,2061,-6084,26075,-11762,-19699,-32768,-9090,14784,32767,32767,-12025,32767,22398,16470,10867,32767,32767,24904,32767,21982,32767,18536,-16325,32767,-14519,30543,-8563,32767,-32768,-32768,5301,32767,32767,7432,-32768,-19819,32767,32767,29140,-32768,-21777,32767,13397,-14272,-3733,32767,17413,10610,1497,-32768,-32768,2459,-10039,-6555,32767,-22045,18766,32767,-32768,15571,32767,-13630,-10842,32767,-32413,-11622,32767,32767,32767,32767,27057,-25447,-32768,-32768,-27345,-28889,32767,-32768,-32768,11313,-28522,32767,-27766,-28520,-30201,-32768,-32768,-32768,32767,-32768,31249,32767,-32768,-32768,-32768,-32768,-21138,-12910,19277,-10625,-32768,774,-32768,-27807,-32768,-32768,27851,-32768,24703,-17157,-32768,-32768,32767,1850,-32768,-32768,32767,-12362,-28035,20926,5930,-20941,22472,22175,-31166,9175,-22227,-32768,-21438,2384,-25182,-273,-10904,-32768,-28036,32767,-257,3373,27027,-32768,-32768,-32768,32767,2854,-32768,-16919,-32768,12329,32767,-32768,-4140,3477,2011,26602,-23003,-24827,-24127,29451,11967,11548,-5059,32767,5866,32767,6613,32767,28146,18759,32767,18493,2094,-32768,32767,32767,32767,7597,-14767,-30889,-29175,23364,-32768,-13743,5751,-32768,8618,-18017,-32607,-32768,-22504,-32768,16267,11331,-26742,-32768,-3260,18690,340,5805,-32768,-32768,24097,32160,-14627,32767,3835,-32768,-15359,17591,-12016,32767,-14781,-32768,-14823,-19573,-32768,-32768,-5757,-32768,-1314,-32768,-29821,-32768,-32768,-32768,-711,-6547,-21097,-28697,-32768,32767,-14597,-32768,32767,-32768,-32768,4329,-16850,-32768,23708,12291,4818,-32768,-2846,-21103,-24008,31042,-32768,31275,32767,16404,-32768,-32768,11860,-32768,-32768,32767,-32768,552,-32768,-3494,32767,-7209,-25098,-29915,11078,11909,-15493,-31459,-32768,-19298,32767,32767,17579,20230,3972,-15163,-4071,32767,-22080,16618,-12273,274,16928,28382,-16031,-32768,-32768,32767,-2658,-6848,-26729,-32768,-9994,-32768,-32768,-32768,32767,-32768,-7205,-32768,-29424,23699,-32768,26063,27152,-32768,32767,-32768,32309,3792,-1143,-32768,-32768,32767,-32768,-21708,-32768,32767,-32768,32767,24423,-32768,594,32767,-17824,-2685,-32768,7927,-32768,-32768,1916,1531,-32768,32767,-29220,-13193,18734,11672,-32768,22618,-5033,32767,-32768,9846,-26443,-24076,-8354,12394,14138,-2049,-30189,-9826,32767,32767,19378,32767,14798,-3804,-10203,13048,696,-5866,-5901,27203,-1765,-32768,-11985,-16919,8029,-32768,-7839,31255,-14649,-32768,-9306,16845,8564,170,-12300,-32768,-32768,-32768,-14673,-32768,-32768,-32768,-32768,-32768,-12483,32767,-10474,-32768,32767,-32768,-15688,-22328,4115,2594,3930,21418,29705,-20986,-32768,-6290,-32768,-29149,32767,11622,-9164,-21212,-20092,17524,27276,32767,9860,19873,5276,13108,-20279,32767,6948,679,32767,-14196,32767,21339,13339,32767,29992,23348,13416,31716,4439,-15149,5425,10984,651,21118,32767,-32768,23503,29350,2962,12324,32767,32767,32767,22413,10126,9047,32767,11500,31523,25270,32767,4252,-32768,2586,-11731,-12351,27352,12397,32767,32767,831,14671,-32768,-12553,24385,9180,-12302,-6428,1407,-32768,12501,-32768,-32768,-32768,-32768,-32768,-32768,10546,-32768,-6369,-19619,-32768,-10061,25904,-32564,-32768,-14499,-19128,-32768,-32768,-32768,-30009,16316,-14071,-32768,-32768,-12125,-29727,-13399,-32768,10592,-32768,-32768,-32768,-4893,-32768,-9297,-32768,22054,8937,-32768,-14882,-29987,-32768,-28356,-13873,12373,-5880,-14568,-32768,-32768,-16122,-2971,-8062,-2355,-29915,6039,-32768,-4120,-32768,-10238,-16415,-18472,-32768,26605,12999,-32768,-32768,-9866,-10668,-32768,-26660,-32768,-32768,-29279,-8073,29859,-32768,-14933,-5698,-7173,-32768,-25550,-32768,-32768,-22581,6067,-17157,-32768,-19509,-32768,5355,-20368,-32768,-22958,4984,-32768,-14227,-32768,-32768,32767,27211,-32768,32767,15023,-32768,-22675,-1511,-32768,-21528,11321,-32485,-32768,-26869,-32768,3603,-4927,-32768,17498,24997,-32768,-14817,-14806,1262,978,32767,-28816,9644,32767,32767,-2037,-32768,32767,-1944,32767,-6055,-32768,30123,-6655,20463,-31567,32767,21752,20807,-25602,-32768,22855,27719,18188,32767,-13462,6602,24763,32767,-32768,18960,-1611,32767,10026,-32768,-25482,11270,32767,13802,-27491,32767,32767,-32768,7679,32767,14319,5907,-4177,-6396,-9339,32767,-11872,3814,-3084,-32768,2275,32767,32767,32767,32767,29001,18708,22409,-9743,-32768,3133,-32768,32767,-6572,-32768,32767,32767,32767,31225,32767,14477,-2659,-2093,6900,12710,32767,32767,-11229,32767,32767,18675,15393,-19228,23071,-880,-8165,32767,32767,32767,-9645,25497,32767,31226,32767,21955,32767,32767,-32098,32767,3491,8086,-5791,-32768,15150,-32768,32767,32767,32767,17180,-32102,12996,32767,32767,32767,-5691,32767,32767,32767,-32768,32767,32767,32767,31166,-2322,-814,-32768,-11217,-32768,-32768,24222,-24928,32767,-32768,-7341,-32768,1112,19108,-8801,4592,-32768,32767,32767,28511,-483,-32768,-24511,-32768,-19124,11859,32767,-9475,-25071,-26938,-26950,-32768,-17441,32767,1183,-32215,-32768,13883,32767,-9690,7832,-32768,20073,-3231,18397,-32768,-32768,26987,4722,32767,-32768,-26078,11378,-12517,-32768,5088,-32768,32767,-15031,-19414,24184,829,28319,28875,-19563,-10517,-3934,-32768,-5591,1542,-32768,505,-25876,-32768,-12451,-32768,-31461,-32768,32767,-32768,-32768,-11384,-8522,-6051,32767,-32768,-28974,-32768,-30639,-5247,-2666,-32768,-31687,-32768,14740,3673,29720,-4178,20416,-7845,-32768,13767,32767,-32768,32767,19726,-5657,22574,32767,-32768,-10947,-23580,-32768,-32768,32767,-32768,-32768,-27247,24872,-16565,-16142,32767,-23438,32767,-26810,32767,-32768,-32768,-32768,-32768,-28788,-32768,-32768,-19842,20859,-13475,12735,26422,-32768,-27825,-1696,1462,-23445,8359,-19488,11464,360,27042,-22514,25729,6940,-32768,4858,8668,2356,-21448,-6820,-12149,4262,-17615,-32768,1665,23497,-32768,28636,-7095,-32768,32767,32767,-32768,-18024,25,-15053,-1555,6148,-13530,-23674,32767,-32768,-26369,-9439,-23590,-16191,32767,9524,-5956,3413,-6306,20964,-25970,-4813,-7895,-32768,9191,30771,-17854,-32768,31592,32767,-11250,-32768,-32768,-32768,18736,13440,-25235,8116,14644,-32768,32767,32767,-5268,-32042,32767,24608,-32768,2412,17283,-20888,-8871,-32768,-2161,4674,-32768,-18604,6776,-6103,-32768,32767,10219,-22447,19161,32767,-14276,21961,-21171,2125,-10953,-23986,-32768,-32768,-4966,-7770,-32768,-29438,-32386,-1534,-32768,15003,5656,-32768,-32768,-15827,-6635,-32768,-16845,-32768,-17031,16749,-32768,-32768,32767,-32768,-32768,32767,-32768,-32768,-32768,-32768,7639,14912,-32768,-15378,32767,-32768,-32565,32767,-32768,-32768,25331,-32768,6415,-7108,-32768,18265,-32768,-9699,-24397,32767,2806,22690,-3446,-32768,26573,32767,26686,-32768,-32768,-533,18630,-4175,-32768,-4503,17432,-32768,3404,-8788,-20704,-16311,9596,24074,-32768,-32768,-32768,-32768,-32768,-32768,-29485,-9420,-32768,-20191,16171,-5439,32767,-16085,-13192,32767,-12066,7121,27288,16686,25674,21643,-26234,32767,29297,30124,32767,-24827,31325,-29041,32767,-32768,-32768,1131,-14259,27978,-14166,923,12417,1932,6615,32767,5572,24055,32767,-27562,2269,32767,22113,-32768,-32768,-32768,-7062,-24065,4690,-6386,-18264,17710,-32768,-32768,32767,32767,9265,-32768,32767,-32768,32767,4936,-20643,32767,9591,-32768,-32768,1232,-11369,32767,-21694,-31874,-6315,19570,8539,32767,13119,23700,17200,3799,2398,32767,4002,32767,-10775,32767,4039,-7506,-4013,32767,-7823,29313,6830,17888,-5493,20436,16058,6462,1129,991,32767,25101,31829,16525,32767,32767,-9776,-6157,32767,30611,18076,-552,32767,29186,-8411,32767,32767,23693,-5009,-1170,2439,4995,32767,-1012,8816,5247,10553,13146,10019,32767,32767,19073,28896,16791,85,-32768,4268,-32768,-32768,-32768,-21850,-32768,3110,-32768,-32768,23828,-13321,-32768,-32768,-19088,-1595,-2671,-32768,-25133,-65,3737,-20425,-32768,-32768,-15187,16114,-24134,-32768,-32768,-14107,-16717,-6341,5445,-32768,-32768,-32768,-32768,10836,-32768,7385,-17271,-1660,-32768,-32768,-32768,-32768,-31631,-10755,-23495,-32768,-13739,-32768,30096,-12322,-3587,-28352,-29812,-32768,-18196,32767,14413,10764,-13163,-22919,-32768,-32768,20514,-32768,-32768,-32768,-2162,-25058,-30481,-32768,-18410,-32768,-11013,-21323,1550,14749,-32768,-32768,-12820,32767,-17628,-32768,-7918,17643,-23858,-32768,-3400,-32768,-22215,-27600,-5697,-32768,18204,32767,-32274,-32768,-32768,-32768,-6125,16395,-32646,-11031,-17067,-32768,-4796,32767,-32768,-32768,-18048,-23816,-32768,25939,-30864,713,-32768,32767,-13713,-32768,-8945,-32768,-32768,-10416,20541,32767,-8831,32767,-1238,20517,32767,8849,16542,-31965,6201,-3475,-9992,13475,-27606,-5635,32767,-32768,-32768,-30719,32767,19021,16951,32369,-32768,-12813,-19262,-32768,22915,19559,-23474,10184,22852,32767,-32768,-16128,-32768,8333,30973,32767,12557,-14515,-21365,1651,32306,32767,11229,-2378,32767,-8461,32767,-32768,-20780,24911,-20543,21406,-3192,32767,-21482,32767,8690,32767,9843,-6635,7738,-21551,32767,32767,-806,32767,32767,24457,32767,28219,32767,-6863,9559,32767,32767,13026,32767,32767,32767,32767,6906,-1361,-5196,25558,32767,32767,22921,32767,14923,32767,-30840,23684,32767,32767,16131,32767,32767,2046,-32611,-14061,-28489,5595,32767,19092,-1638,32767,-16517,8793,32767,14036,24774,32767,32767,-7547,2955,21631,32767,2165,3802,24439,-11358,20582,23746,32767,-32768,-32768,-29026,-32768,32767,-32768,-32768,-32768,12430,-32768,29979,-27375,-31150,-20488,26230,-16773,-32768,24214,22476,9250,32767,-32768,-31349,22416,32767,26101,-6113,-32768,-31211,-6214,-2129,32767,6439,-32768,-32768,30393,20685,-22141,-27318,-32768,-21540,16768,29693,-28211,-27053,-16673,32767,-5789,9886,19575,11613,14004,-20245,-17125,21037,-27393,24523,-14305,9559,32767,-25113,16002,-32768,24719,-32768,-32768,-32768,12848,9116,-31653,-32768,-17305,-31768,-32768,-32768,-4287,-32768,-23738,-32768,-10389,10665,-2344,-29577,-2512,-11665,-32768,4814,16756,7944,-32768,18049,-21463,-156,-32768,-10980,10949,-32768,-32768,-10027,-32768,-5809,-32768,-32768,-1841,-32768,-2000,-26195,-29399,-32768,10188,-31896,-25069,-32768,4504,-24236,-32768,14045,-32768,19621,-32768,4242,20264,-8752,-25394,-26780,32767,-32768,-32768,-27517,-30982,-32768,16790,32767,32767,202,32767,-8427,16106,-32768,-32768,-9507,-29459,-21989,6426,-32768,32767,-2863,-5404,-19494,6945,-32768,-1276,25313,-32768,8785,2882,-32768,30023,16971,-4860,-32768,-2662,-20363,-32580,-32768,32767,-32768,-7195,11244,-26788,5017,-6549,17400,-32768,-14637,-20656,-2427,11288,7266,-2856,25912,-28123,32767,-17491,32767,-13304,161,32767,-7326,-9477,-20659,15649,6389,14409,-32768,-24117,2155,6497,-32768,-16651,-32768,-26887,-32768,32767,27846,-5158,-32768,-20165,32767,29438,32767,-24457,-17691,-13720,-32768,-17585,-11776,-32768,13790,-25427,-3878,-4155,24660,-10895,-32768,-32768,17947,32767,-29405,-32768,32767,32767,16258,-32768,6733,-24372,-32768,886,-17619,-27457,-32768,-32768,5672,-32768,-21631,-31177,32767,-6696,-32768,-32768,-29146,-32768,-32768,12457,-24836,-32768,-32768,-32768,-11614,3194,-32768,-20436,-32768,-6704,-17180,-32768,-32768,28915,-6075,-32768,-26848,3938,-32768,-32768,-32768,-7565,1513,-6840,-32768,17321,-16211,-32768,15066,-32768,-3494,32767,-32768,2122,-32768,-20667,5579,-32768,-32768,32767,-25788,-32768,-13877,-32768,-32768,-5614,7840,-2048,-27251,-32768,-32768,1127,11725,32767,32767,-32768,1612,15535,-10045,-32768,2654,-30538,-4706,22390,-29785,-32669,32767,32767,16377,23480,-32768,1309,-32768,-32676,-32032,32767,-2134,21848,-17800,25490,-2304,-20885,-32768,32767,32767,32767,1690,-32768,22284,-32768,1204,29584,24638,-3794,-905,16435,9885,5027,32767,13291,-32308,6801,-32768,-32768,32767,-11160,-26715,10776,-25101,-32768,2040,-7014,-8565,-14688,21304,-16737,32767,17974,-15879,24643,17439,32767,-17586,24482,-675,-23499,-32768,2904,-30443,16262,32767,2656,-28800,32767,18529,247,28039,-7318,25267,15025,17125,25218,32767,22502,-15929,25134,23457,9713,32767,9422,32767,2533,32767,7639,19849,24338,-25073,18922,4281,3004,9427,20069,24985,29049,9275,20950,32489,10809,32767,32767,1866,32767,15579,10932,32767,6732,17904,-11307,1660,20518,32767,2405,8472,-5450,-9586,32001,-23089,15087,19454,32767,32767,8846,22247,32767 diff --git a/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv1_input0_int32.csv b/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv1_input0_int32.csv new file mode 100644 index 0000000..beb3029 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv1_input0_int32.csv @@ -0,0 +1 @@ +1,3,22,32 diff --git a/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv1_input1_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv1_input1_int16.csv new file mode 100644 index 0000000..2b30206 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv1_input1_int16.csv @@ -0,0 +1 @@ +26927,6992,-17969,12166,26327,-32281,6996,18737,-10610,1940,-30854,28087,29122,5403,20080,9741,-8831,24437,-27156,-4050,8474,-29740,-22704,29175,16900,-24604,-32123,13864,18107,25433,29166,-26856,14286,30736,-8643,-22158,18825,19414,15408,-32043,-1083,30053,14383,29389,26456,24755,20532,-13585,17758,-26866,5211,14936,-27581,2012,12301,-32602,29610,16051,15555,-13915,26383,24602,-6418,-23220,23934,21312,10630,23663,7521,10536,-28363,17954,1468,-29445,-27266,-29116,-21672,-5486,-16415,28997,32061,-29897,22489,-6702,-11766,9532,-19851,13456,21852,-15858,18110,25332,8221,31093,23725,-11533,-27367,-29499,-6158,18988,-28392,-21079,-25570,-25926,7965,-4455,15705,8393,7755,-25956,2229,13634,20642,-14613,6121,26668,25945,3129,14691,28119,22055,-21170,4760,-12903,-11367,-6399,15529,-19651,30944,-6667,7082,11903,-13822,6138,11535,28203,-19864,-29940,-31255,-3669,15330,-12119,-28655,12460,2697,18900,-1730,19642,-30017,12705,6960,-31738,-26989,23989,19480,-20473,18597,31064,-22910,-5532,-14853,-24275,-29280,4867,-20431,-22850,11228,29782,4518,9510,-13143,25629,-11008,-22800,-19291,-9664,9623,-14522,-3329,-5908,-23350,-13556,-19868,-28978,-26300,31733,28635,17130,16791,-26297,-7730,-1333,29856,-22233,525,31694,-11233,4909,27210,-26669,2355,17132,14318,14806,-6531,-32538,2296,384,-5257,6787,24286,-14077,18643,-25975,-21203,31491,-10695,-10340,-17611,-30417,20074,-2630,19750,-10098,19834,-24511,6455,8093,-16375,-30164,10266,12618,5397,-11865,-31120,-18646,-9219,-10392,-492,-25692,-25557,20395,-1778,16023,30388,-21813,30677,-16916,-3053,-22495,-4860,32310,16419,30115,-10075,-24124,18509,32598,30323,26346,-28491,11145,11568,-32086,29823,27697,-30015,21082,-26429,13526,30968,16344,-10201,14021,-10859,-10967,32110,-13935,-27894,-20852,22987,28136,-17357,-9649,1006,26266,-27008,-11047,-26152,16096,15172,28577,23252,11937,-18627,-5867,-18889,-27765,-4408,-27467,-21947,-26718,-4470,17884,16202,23897,4325,-12942,32114,22731,-6398,31272,-7250,1300,-3379,-11874,-25039,31484,29523,-12985,-25441,31925,-5897,-16904,3938,-26661,-11895,-8212,1564,-6750,17233,21584,-6820,-11487,-28331,21328,10277,9552,-2290,25642,-21207,-10758,30824,5662,29971,3727,6263,-3318,27891,-507,8222,-18431,-15911,-16427,22699,-22546,-2173,-28042,7254,-19669,26172,14657,-14544,2654,29983,25443,31960,-23803,-9305,15407,-10530,-15990,-11855,247,19242,-29471,24411,-29574,-7911,249,31666,24258,10146,-3996,1639,26170,-23376,-6671,-29697,6966,4069,-23605,30965,31534,-45,6977,-11908,19122,1535,-22146,-3844,-13141,-11537,22265,24307,1746,-15553,-5443,4248,591,-32379,25186,-26050,17877,-21096,8082,-2399,2688,32186,-23866,32652,-29874,27656,15193,-6428,-32462,-29850,-2972,12592,-8621,20080,-26346,230,18305,832,23477,5868,-2860,18435,18743,3574,4315,6275,3847,-3710,-30572,-1858,-31030,20929,-13515,8180,-32641,766,-12191,32649,23979,10494,-6895,-19497,26213,27113,-11388,16797,9180,-17681,-17211,-23380,15804,12330,-15525,14775,-29908,9009,2648,21321,-13819,4678,-1365,-18556,30630,21695,17402,29510,31433,-31871,4393,-8662,-11177,-21998,-13414,-5497,-20855,2696,-26047,30962,25199,-22229,-2959,17330,10067,-29304,28791,31656,-14512,-12564,-8889,-7818,-32626,19220,-6839,-4269,16490,-15907,18405,7837,-13800,16803,18880,-4921,5572,28235,17709,15271,-15366,11215,32296,-29133,4599,-6150,24918,5459,-4210,-19431,29146,5037,-578,-20767,24927,11774,-28821,27818,24274,-15437,28585,10254,4383,32115,1818,31930,24774,25159,13549,5201,-11633,-22559,12278,-10408,-17676,-29912,-18271,-22422,31583,-9188,24759,-10229,-8017,4067,-5285,14378,-14987,20555,-22468,23665,22681,-24757,3977,3979,-21211,7604,10557,21776,-15852,-11070,21603,16969,16549,17268,-7384,10051,-18060,13089,-12004,8591,-11098,18737,-1519,-13454,-13435,6103,5581,-27320,-16415,-21736,7272,32697,11076,-17325,-17186,-1032,25881,-21665,4340,-4904,-8080,-12422,-1608,-10122,-27508,-9876,-22899,2345,20259,-4189,9386,-9412,-9104,-24196,-20257,27139,-14965,2751,-12598,-11471,-21005,-2610,-390,-28974,2623,-21541,-31575 diff --git a/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv2.tflite b/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv2.tflite new file mode 100644 index 0000000..84da3bc Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv2.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv2_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv2_golden_int16.csv new file mode 100644 index 0000000..77c49b0 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv2_golden_int16.csv @@ -0,0 +1 @@ +-21733,-3869,-5914,9658,-25736,-20286,-20772,-20104,-4153,-5385,-352,28800,32767,15474,-8045,-6946,20447,12071,11657,32767,7344,15846,21236,3885,7073,32767,1894,-19475,-30158,17897,32767,12764,-18651,-14869,12879,29026,15486,2238,-14411,-16499,-21976,-32768,27768,-13856,15118,7663,-30383,-19061,12513,11765,-7175,32767,-27238,13037,29558,8554,-19426,-32768,-10511,21728,15880,13962,15811,-3744,9659,9434,32767,16693,32767,-32768,-32768,-1246,-9731,26971,23212,32767,32754,32767,-18658,-30370,32767,32767,5701,28923,21650,10964,32767,32767,32767,-6540,2924,-4884,1955,16542,32767,11411,-5344,20103,362,32767,7306,24487,32767,1790,16533,22131,32767,32767,32767,-29143,-2489,14305,32767,-13481,-32768,14451,32767,-13450,30477,2342,32767,26487,-32768,-22221,32217,-21214,3820,12454,18313,4910,24714,5981,32767,13572,6456,-7803,-7187,-20211,32767,26629,-32768,32767,32767,28633,-4118,-24219,-32768,770,6940,-21999,-28638,-32768,32285,-32768,-32768,636,-32768,18600,32767,-32768,-32768,25170,17754,10786,32767,-24728,-32768,32767,1947,-23942,32767,10444,-32768,32767,-2035,21584,32767,-32768,32767,19373,32767,32027,32767,-14167,32767,-32768,20391,32767,-32768,32767,32767,-5263,-32768,13348,23601,-7880,-21331,-31504,28679,-9841,-32768,-29098,-27999,-32768,-18606,-32768,5188,32767,-24791,-6615,-4470,-32768,-32768,-24881,-32768,-31896,-20744,32767,32701,32767,27602,-32768,-32768,30121,19419,20500,16171,32767,9131,11849,32767,-24227,-32768,-11451,-32768,10636,-1360,32767,-32405,-3715,10175,-14288,23201,-32768,-9894,32767,-32768,-24115,-32768,-11552,10225,-27429,-26920,-32768,-1286,-9881,17808,9378,-32768,-27322,-32768,-12452,32767,32767,292,-11088,1531,-32768,32767,25412,-32614,32767,17601,-2744,-1524,-32768,7387,-32768,-8935,17476,-10054,24425,21258,27519,7530,-25398,3975,-27779,-32768,-32768,-5238,-13435,642,32767,-30906,7048,20666,14130,-32768,28118,-21568,-32768,20012,-32768,32767,12912,-14586,32767,-32768,-4464,-29223,-12223,2496,32767,-32768,-19394,32767,26477,-32768,-27057,-14271,-26032,-32768,-10513,-32768,-32768,-29403,-29533,32767,32767,-32768,-6269,-32768,-32768,-11032,17189,-14393,15939,8078,32767,-19514,-17668,-32768,32767,-9388,-32768,8658,32767,-28410,-29050,-9585,25250,30797,-10298,32767,4343,-17691,12258,-32768,-13788,-11897,-32768,-3097,-11751,-32768,-32768,-225,-32768,-32768,17948,18173,6756,6302,-32768,20915,-24891,-32768,-32768,32767,-27903,-32768,17284,-3498,26272,32767,-32768,-19375,18465,32767,-27902,-32768,7602,-32768,-13461,19917,-4961,-32768,23574,10442,-13275,23678,-32117,7876,32767,-6086,32767,-32768,-13825,1934,32767,-8113,32767,-13917,-30185,-18676,9576,20559,-31378,28126,3926,32767,-2202,-20495,-13912,-8689,-16818,-32309,32767,11812,-32768,-22005,11121,-13204,32767,-9918,32767,-31924,-20084,-32768,16525,16943,-31766,14325,1562,11264,9743,-13315,-2402,32767,22325,32767,32767,28987,32767,-8914,32767,3554,-14970,-10651,32767,32767,32767,15585,32767,18125,-20242,32767,15866,-19426,12954,32767,32767,28882,-32768,12167,-32768,-13936,32767,32767,32767,28656,16694,8483,32767,-32768,-5435,32767,32767,2534,24029,11369,32767,32767,18978,-11744,6854,21064,-32768,11857,32767,30153,5211,32767,-20767,23040,32767,-18029,-19431,16427,-32768,32767,-10653,-23050,10075,32767,-5658,-32768,-32768,4460,-1173,-27227,-32768,23793,-9586,-21861,32767,25640,-18079,15788,18696,-32768,-32768,25299,-32768,-32768,-32768,-10643,-26283,-1589,11217,1562,-32768,32767,1182,-19716,-6502,-11441,10816,32767,-32333,-32768,1203,2477,-9578,4644,20847,-14642,-3250,-32768,8098,30236,-32768,32767,-32768,-11911,12876,-102,17183,14724,-32768,-29493,-20654,32079,9592,-31604,29172,-32768,16597,-16317,-8698,32767,-32768,-32768,-32768,32767,-32768,-3219,-9783,6219,-32768,-863,-32768,-32768,20914,1564,-32768,-32768,-25434,11012,-22154,30243,-32768,-24174,2408,32767,32767,-32768,-29437,-32768,-9602,32767,-32768,32767,32767,-32768,-1903,3507,3054,12154,-32768,-20843,-32768,-32768,23075,-12909,-31780,-29753,-32768,-16457,26479,-14161,14645,23899,-32768,25375,5088,-32768,-14401,10875,12114,-32768,16417,-32768,-14125,3265,32767,-32768,27451,22499,-125,22531,25847,32767,32767,-29564,-24651,32767,7776,-22494,-32768,32767,-22430,-26156,32767,-32768,3844,32767,32767,-24620,-17917,-32768,32767,17386,-28052,-32768,-6397,14144,10141,-32768,-18895,-8138,-7047,26631,-16287,-31498,-32768,-32768,25295,-570,8288,2040,-32768,-6539,-32768,-15282,19476,7231,28996,-18306,-4306,9362,32767,-32768,-14960,26381,-5973,-24115,-8434,32767,32767,-32768,-16735,-31301,-8247,-16714,-16813,-32768,1989,32767,12189,-28771,32767,-19542,-32768,-19551,32767,-32768,-32768,-469,-32768,-12546,-5940,31533,32064,32767,-13608,-31426,20566,-20002,334,-32371,32235,1394,19018,16231,-27061,6678,32767,32767,12906,-16670,9026,32767,-32768,-32768,2373,-13156,32767,-12746,32767,32767,-32768,-32768,-17137,-7260,-5259,-32768,-32768,-20620,-32768,-32768,-32768,-32768,9755,-32768,-22257,19950,32767,-17052,32767,-32768,-32768,-32768,-32768,31228,-9053,-32768,-32768,10859,11476,-32768,-22646,9390,30822,-4616,-21456,-31427,16255,-32768,21545,-16061,-32768,-32768,-32768,32767,-4704,29091,-7486,-10290,-28191,32767,-32768,-10513,-32768,-32768,-32768,-27817,4985,7490,-32768,22854,-10573,-11307,-25429,4439,-32768,20194,-29106,-13268,6466,-32768,-4737,-25526,11078,-9895,-18333,32767,-32768,-32768,3043,27268,9712,-32768,-10396,32767,-32768,32767,16364,-32768,14764,-6082,-32768,-415,-2831,32767,32767,-11143,-32768,6616,27425,32767,32767,-32768,18194,-32768,32767,27549,-29284,-32768,3652,-32768,32767,-21178,-11273,-1912,24269,-32768,-32768,20356,-6133,-32768,9916,12013,-23222,3532,29304,-11150,-27476,-16362,-32768,-32768,-32768,-32768,-26801,-32768,-24999,-16755,1454,-32768,-11311,-32768,9705,11014,16121,-8772,-11646,32767,25970,-32768,-32768,2888,27531,22727,15003,-25732,-15978,32767,-32768,-32768,6582,-32768,-21681,22830,32767,-14261,-26557,-32768,2538,4684,32767,32767,-11144,32767,12679,-32768,32767,10481,25102,-13905,30026,-32768,13583,32767,3951,-32768,24972,-32768,-26664,-2983,-32768,-32768,-10918,-20736,-32768,-12223,32767,22389,3676,16022,-32768,-32768,31629,32767,22963,31855,-32768,9415,-32768,21031,-22795,5358,-32768,32767,32767,3315,-20899,-8954,18783,32767,19410,-21591,32767,32767,-14291,-13212,32767,-32768,-32768,6165,1310,-28438,-32768,-12472,-32768,19,32767,32767,-2631,-32768,-32768,-32768,16083,32767,32767,32767,32767,-1246,8828,-9926,23021,3221,32767,-14372,23979,32767,8075,-18636,6887,32767,-7422,-32768,-10377,32767,32767,32767,6460,32767,-26787,3852,-24982,21386,18683,-32768,-32768,8575,8974,9043,-32768,6858,32767,32767,32767,-10077,-30016,-1461,-26117,-32768,27784,10982,-32768,11840,-21730,16187,-18752,19192,32767,10262,14639,1399,28522,4910,-2024,-17995,-18440,24593,32767,32259,-29462,14475,1211,16023,9718,22325,32767,32767,32753,32767,-32768,-32768,14302,15393,32767,16659,-9265,27007,32767,-4828,30173,32767,-30636,-7738,-5340,-32768,32767,29216,-2677,4461,27589,-32768,-14809,-32768,20042,32767,-10176,-32768,-13276,-15214,32767,5342,-32768,-17464,-13461,32767,26438,-1097,-32428,21185,-7365,13496,31552,23206,-7690,-32768,-32768,-15696,-21139,32767,-18291,9871,-32768,-32768,32767,-32768,-29833,-32768,32767,-3517,6084,11455,-23117,-25027,811,-1875,-16407,-26748,7035,11547,-32768,6698,-11163,-30898,29606,9684,4507,26065,14686,26077,32767,32767,-32768,30196,32767,1740,32586,-32768,32767,25588,-7221,16106,-27025,-23228,15504,16961,-29355,4925,-26055,27541,-32768,-27733,6542,32767,32767,-32768,-14634,32767,19527,-25062,-23596,-32768,22078,27475,32767,-13280,-32768,20800,-32768,-8532,12011,-4975,19752,32767,-32768,-32768,-29010,29192,-32768,32767,-32768,31838,-32768,-30707,26357,-1473,-27286,-32768,-15164,32767,32767,32767,-16501,4026,27428,-32768,8580,-14803,11058,11166,32767,32767,32767,32767,17793,32767,14138,-24027,-32768,9530,32767,-32768,-15384,17967,-13490,-32768,12806,-32768,18842,-32768,14659,32767,31313,4601,-11524,-32768,32767,3279,32767,32767,-17492,9730,-23089,168,32767,24260,17487,32767,-31170,-22212,32767,-12523,22817,32767,24162,32767,21233,-16714,32767,-3241,-32768,-28730,-32249,7341,3289,32767,-8384,-2112,-32768,883,5584,5884,9753,4742,32767,32767,32767,-32768,-21577,26315,-1791,11161,-25644,-8367,28498,-32768,-18194,13791,32767,6061,-22281,-32768,-720,11742,15758,24307,7661,-32768,26216,25202,-394,-15294,-31967,27692,13360,880,7674,6743,32767,-12286,714,8868,16281,27664,10079,-4745,32767,32767,10374,4676,26047,2796,-8805,-3929,-8243,27081,27928,-18544,-19418,4685,10901,-23557,-32768,32767,17222,28252,-8216,-32768,384,-11146,-15884,28430,-19847,32767,-3501,28763,32767,29607,22460,-8823,-21241,-14192,32767,-8554,6471,-15547,20048,-729,-21715,-13202,6584,-32768,449,12189,32767,19536,-6726,23690,837,-5293,-382,17569,16204,-15509,-10494,19708,-3605,32767,32767,5162,-15296,-3315,-32227,2147,13373,-5589,-9042,5646,2914,32767,-8277,-32768,-2034,32767,32767,5545,21736,32767,-2168,32767,30502,29083,-13856,32767,20327,32767,23112,30304,32767,20111,27226,18332,-1452,32767,-5361,-5004,-32768,21376,21035,-12447,13509,15566,14536,12050,-3618,32767,32767,29375,32767,32767,-8147,30216,32767,32767,32767,-32768,32767,8599,22881,26184,24898,25775,32767,23128,-13351,17497,14375,32767,8743,-32768,-8525,8975,32767,20046,-28012,-17270,32767,32767,18631,-30136,-17720,29534,-26259,-17103,32767,32767,-21579,1088,32767,32767,3094,22372,18359,24388,12262,24795,-32768,12862,6516,32767,-32768,8242,13383,-6478,17416,-30235,-28642,27498,15999,1708,-27310,-32768,-32768,-1594,-7427,5649,-4704,-13647,-30932,32767,23823,23767,-21423,10558,-2114,-3096,-32768,2392,-32768,32767,32767,860,-32768,32767,32767,-4536,31809,13401,32767,8437,-28638,-32768,10815,-32768,32767,-32768,33,-32296,-846,1281,32767,-32768,32767,-22043,22573,5976,280,979,-32768,-32768,32767,-32768,-7135,-32768,-32768,23792,23018,-9256,-32768,-32768,-20218,6802,4725,32767,-25735,19708,32767,16877,-32768,11266,-32768,-12987,9191,32767,32767,-4174,-9422,-8199,32767,-16872,30285,4152,13855,-32768,-7323,6899,11867,11337,4915,-13858,-20310,-32768,32767,-17244,10559,-24698,-3840,-7385,-20070,-13631,12696,14688,-32555,-1548,-17323,8870,-3259,12702,32767,-19808,-16475,-32768,-509,-5435,-4900,26095,32767,-16556,-22821,-32768,32227,-25946,-15275,32767,32767,-14785,27807,-32768,-7673,-32768,-11839,6221,12093,5927,-3246,10730,-25621,-32768,32767,15546,6486,4563,-12226,-26120,-23988,-15167,-32768,10420,-22766,-16174,5848,-32768,-32768,31612,192,-13324,-32768,-7287,31148,-3419,1942,-32768,-13083,-31199,-16680,-867,32767,-23308,-25309,-32768,21365,-32768,-7931,44,19859,-6976,-3475,8273,-10643,-32768,-32768,-14100,-32768,1677,-1813,-32768,-32768,4282,13937,-2472,-32768,-28039,-21522,5439,-10203,12331,-1502,743,-32768,-32768,-32768,-32768,6672,-13169,-1254,512,-32768,11021,-12846,-26775,-7143,-16300,-32768,2883,-32768,7456,-32768,-21366,2790,-14464,12019,17951,5087,-22353,-16774,32767,12288,-3569,-18996,32767,-24549,-13820,15965,32767,17800,-10438,-5025,-19533,7929,32767,26613,22623,32767,-28277,16909,-23471,9436,5679,30124,-9216,-9707,-32768,-27165,12617,-10939,-32768,10084,-16612,8865,10809,-12211,15900,-3943,-26396,31691,32767,-28404,10857,-14231,16065,2544,-32768,32767,19844,19682,-32768,-32768,-12401,10717,6193,-32768,-17971,-32768,-25986,9141,-32768,-12483,32767,28027,32767,32767,23336,30334,32767,3858,32767,32767,32767,32767,32767,20403,15588,32767,13381,-32768,11201,32767,32418,3912,26307,14726,-32768,32767,-16012,14153,15670,26284,1099,3219,4338,32767,32767,-10542,1363,308,18736,17782,27564,32767,32520,-32768,-32287,8708,-20268,-283,15638,32767,12820,-32768,9920,-6512,5932,-2234,-24173,32767,19560,32767,-25730,-5771,2411,14157,13923,-4996,-32768,-18310,32767,-32768,-30784,32767,-10508,-32768,-32768,8967,32767,8332,7735,26656,23152,29101,9976,-32768,-7903,-20730,-8448,-32768,17765,23788,3890,-32768,176,-32768,32767,-8078,-20477,-32768,-32768,-10139,7533,-20175,-2694,32767,24395,-32768,3617,32767,21669,-10212,-1931,32767,-10093,-22309,-16623,5756,20255,19972,19286,-5214,2662,-4636,-40,-6151,23035,30592,-20177,-23083,4550,-31318,-32768,-6868,-32768,1089,8998,-4486,-32768,-26132,-23702,-32768,-32768,-25376,-29364,-14258,-32768,-32768,-14391,6213,-32768,-24051,-12488,-32768,-32768,-32768,167,8458,-24013,7220,-32768,-21245,-9697,-2465,-32277,-32768,-18300,-17526,-19648,6331,-32768,-32768,-32768,-32768,3518,-27508,-26263,-32768,22496,-32768,-15278,-32768,-32768,-6833,-32768,-32768,32767,17738,1215,-17543,32767,-22115,-32768,-27019,32767,393,-32768,27782,20503,-3549,-18996,4375,11749,5003,4372,26975,-26402,-72,18911,-6551,2697,23388,13665,-25110,32767,18361,-20617,32767,9937,32767,17862,32767,29496,1,24539,-26396,-32768,-9173,-32768,-22546,31918,32767,4265,32767,31517,-24542,-32768,-32768,-21015,32767,-13346,-12697,32767,-29099,5379,-19399,32767,26434,29279,-20583,32767,-15113,1056,-19362,15970,10902,17628,-58,-32768,-19259,16547,-32768,32767,780,-1625,-32768,-25588,-32768,32767,30764,-4459,-14133,-14780,-31937,-6898,-32768,32767,3412,-9757,-1469,-2254,-29217,14121,-26811,-5547,-3226,-32768,14485,20868,18003,-2301,-21170,-11854,-32768,23166,-4325,2646,-32768,21925,-17441,30617,13056,-4842,-22423,-32768,-32768,17419,-32768,15347,32767,-5826,32767,-32768,-16892,-9343,25618,11478,22160,9421,-32768,-7143,-14914,-23671,-32768,-32768,-32768,-32768,-147,-32768,6861,-32768,-32768,-32768,-32768,-32768,-32768,-32768,-21983,3972,-790,-7508,10027,-29605,-32768,-22118,-8708,9675,-18097,28898,11325,-32768,-18493,-32768,-32768,-29863,-32768,-32768,-32768,-32768,21893,-11031,-12688,-32768,-32768,-32768,-32768,-32768,-9919,2734,-32768,17078,-32768,-8210,6831,-26646,1572,-3806,14083,21928,-32768,-15467,-32768,-32768,-7733,-32768,21689,-18656,-24459,-32768,4209,7998,428,-3191,3140,-32768,-32768,26763,-32500,-1012,4321,-32768,17424,6596,-32768,9022,-32768,5075,-21546,27272,-32768,-26200,-10814,-7882,-24384,-32768,7143,7158,-12169,22868,7959,-32768,-32768,-32768,23459,-28512,-17220,32767,-32768,-32768,-32768,-32768,-5814,-7138,-20860,9874,-3803,13447,496,-32768,-29330,-20621,-32768,26736,24455,21965,4508,32767,5717,-63,6766,-32768,-5415,-32768,5360,-32768,-22884,15587,-4288,-23965,25163,-32768,32767,-32768,6044,-32768,-26035,-32768,32767,19678,-7276,28014,32767,-32768,-32768,-18018,32767,15072,32767,23227,-29464,-32768,32758,-32768,3168,-32768,-22105,2944,-29274,17006,-12575,-12057,-1142,-22444,-12825,-27076,18462,-11836,-13401,-14403,-3901,-32768,12633,-27832,-11732,16301,24255,30127,-530,-2501,-28783,-22894,-26824,-614,-1990,32767,32767,-16372,26726,28076,14376,22305,-20322,32767,15644,29842,-28167,32767,-5743,32767,21546,-14627,-24802,-15007,32767,25096,-20419,21775,-10691,-32768,-25185,-5388,-31651,-16633,12708,16649,-17139,-15591,16347,32767,-3824,-24390,11092,-12431,13188,-32768,32767,32767,-13578,30691,-12610,19843,9601,-24434,3635,1938,-29513,32767,32767,28422,-30330,14342,-18853,3805,30418,9909,32767,20772,6444,-32768,4206,32767,7842,32767,9136,12384,8111,32767,32767,32767,22970,5325,9655,30338,-7446,-27165,32767,22731,-9774,-26060,32767,9728,-10821,27338,28111,25558,-22824,11199,373,-10465,22818,32767,-7381,16835,19759,32767,20630,20506,-3110,32767,32767,32767,15398,-2759,30659,14029,2440,-32768,24630,21985,-13710,12433,23852,-2409,32767,-16933,10558,3960,-14760,-10510,8067,5058,8105,16009,20847,28514,4229,-32768,-1833,-2118,-18647,27668,11801,32767,-32768,-550,-10592,18655,-4036,21446,-32768,-11452,-10191,21197,-32768,-23470,32767,-9908,-28134,-28702,-29748,-5856,18010,-14505,2585,25960,-6546,32767,27498,-32768,3125,-3068,-21322,-31308,-1466,-20120,-32768,5699,-13132,-9109,13460,5754,-13855,-28486,-7872,-24658,3571,7918,-12876,2762,-11881,-26171,-21781,-18289,24789,-8914,-19819,12727,20262,7494,-5601,-32768,-10784,114,9298,32767,32767,32767,-4567,-1364,616,-22819,16051,-5253,24757,-5874,-30236,31887,-6989,-17144,-32768,-5328,-8906,-13231,32767,-16729,1935,-27778,-29142,32767,12023,-32768,-22161,32767,24526,32767,22395,-3695,-18550,-32768,-32768,26925,-684,11679,-12697,31097,32767,-7132,-10553,-19391,28394,-20086,-12304,-32768,24086,-22648,1856,-31231,-22113,32767,-8017,24687,20699,1754,27923,16715,13109,21000,18131,32767,18721,32767,32767,22355,5419,-32768,12410,-4994,11325,32767,-10475,-2297,28886,-285,-28923,23535,-17687,32767,10723,-14958,-23248,-16553,20666,21069,11376,-32768,-32725,1375,-6634,16827,21865,32767,32767,32767,-32768,26356,-7948,-10449,-8173,-14757,-5988,9165,20861,32767,26058,-2964,32767,494,32767,-29867,12655,21767,1707,30471,4408,-21811,32767,32767,-7512,32767,18785,-1734,9359,12172,28337,-8713,22534,6865,-2195,1452,25043,14547,-9426,-11001,583,-6233,-23083,12037,32767,9676,19341,22291,-24808,32767,11523,22292,10009,1113,-8637,26300,5521,-11089,10265,-32063,31870,-23302,32767,32767,29172,28963,7434,28852,30254,-11337,-16125,-7179,-4622,24671,28445,32767,32767,8890,622,11839,9328,19400,32767,-7353,1772,21518,-28945,-22322,-30822,-16007,8935,19741,-10612,-21754,32767,-6382,32767,14837,-16545,-7105,18995,25513,32767,-3066,18697,32767,-13869,3405,11293,7303,9363,-16876,23160,24680,-25763,-2865,9085,28513,-19852,-19152,32767,-11701,-3940,14980,-28114,-4763,-6751,17085,9436,15057,-32768,22790,-2792,-18107,32767,-21180,19442,16572,-15769,-26034,-32768,-10282,1065,-2042,20182,17692,-7926,-13524,4084,13172,18225,32767,32767,31631,30980,7849,32767,32767,32767,32767,32767,7957,10363,32767,4890,-4690,32767,-16729,32767,-32768,-32768,32767,24104,4,32767,-17412,32767,32767,-9102,19866,21473,32767,32767,16585,-25733,32767,32767,-20541,32767,-918,32767,-32768,5238,32767,32767,14224,23192,32767,32767,481,14775,32767,11069,32767,32767,10821,28248,11772,3686,12372,7876,7788,-13199,15002,24443,-32768,23715,32767,732,422,-30309,32767,-32768,-23386,32767,28051,-7305,26493,-19830,-2085,32767,-27832,-4271,24018,32767,18996,-21485,13060,4106,-1399,31803,-12428,-9486,24305,3268,-25102,2919,-32768,-31884,1971,21638,-6677,-21428,1141,-22944,7604,32767,4811,12186,1926,14421,-32768,32767,32767,-9295,32250,32767,-8109,32767,32767,-7778,-24197,2163,16490,30203,15232,-3415,11000,-32768,-32768,-10903,-18418,-32768,-27601,-22410,-95,23418,-29983,30006,-24643,11467,28855,-7785,16160,8793,3537,-350,32767,6148,-32768,-32768,28166,26973,-10584,14221,-32768,-16565,-32768,-4259,29278,-32247,-32768,32767,-14232,-31763,-11261,17762,12833,-194,-3950,32767,-13337,-13466,1511,-16815,-3687,2195,-32768,32767,32767,-32768,1650,-9070,16649,-21103,-15031,3955,-32768,-12549,-32768,15819,-22671,32767,32767,23755,32767,32767,-7004,11225,-41,-32768,-641,-13416,-10891,4024,-21062,32767,-32768,15757,-1521,-32768,14453,-10250,32767,-4586,-11130,-18211,-25985,1170,-21613,20198,-6722,12275,32767,8115,1122,-32768,32767,-8793,25244,8091,32767,-14032,-32768,-32768,-460,6394,-12019,-25643,27608,31781,32767,-2087,12284,12571,8518,32767,-19357,14797,9758,-3655,4796,-25296,-4741,22692,-32768,-32768,-32768,-32768,25376,5758,-32768,30989,-32768,10590,-32768,32767,-32768,-493,-27180,32767,-25067,-32768,32767,-7065,106,-32768,-13046,-19509,31224,-10867,-26462,4265,-32768,10309,-6407,32543,-16976,-15831,-32768,-32768,25706,29693,-15833,-11082,-6700,-32768,-32768,17851,-23987,11181,-7938,32767,-8429,-11908,-21393,10193,-10286,-32768,-29291,9071,10723,-32768,17681,-464,2404,32767,32767,32767,-32768,17227,24652,-5349,13318,1363,19747,3039,-22323,32767,-32768,32767,-5459,-681,-20975,32767,-12415,-27930,32767,23624,32767,32767,32767,10884,32767,-25520,-18184,32767,15579,-23175,-9576,-10202,4098,4915,-20577,32767,32767,-1646,16951,4586,32767,12500,15459,-21603,11091,-11208,21571,9798,-32768,-6631,-13941,-17950,26575,-32768,32767,-32768,28393,-22169,10289,8878,32767,-10700,-7933,-3713,-4857,24776,32767,8107,32767,6401,32767,-6436,32767,-6295,26684,32767,32767,2690,-2166,5750,-23010,11112,-17301,7139,-32768,14175,21904,-32768,-21302,4943,1007,124,32767,11661,32767,-21915,30972,32767,29114,-32768,32767,32767,-32768,32767,16591,32767,7331,-32768,19401,17510,-21438,32767,27980,22581,18254,2459,28565,17393,32767,8972,18990,29165,-417,-9710,-7443,6085,-26222,-32768,-17265,9757,-32768,23013,-23214,-24206,-3032,7841,-15575,-32768,-32768,8417,-1076,-29376,6934,-22954,12769,32767,-32768,13230,535,6060,-22749,-32768,23873,32767,-20867,32767,-7954,-32768,-1037,8513,-20302,23634,-26360,-32768,-32768,-4399,-3916,-15529,13051,-29764,6340,32767,-8267,-9165,-2954,-32768,-260,32767,-6714,4634,18182,5575,5293,-1571,-1176,32292,-2202,30536,-23263,6532,-11784,5813,4739,-32768,-32768,-32768,-32768,-7415,-25284,-32768,-23163,-32768,12510,-32768,2433,-24216,-15858,-18950,-32768,-32115,-6236,-1485,-32768,-21548,11810,-32768,-32768,-32768,-14392,-32768,-32768,-12648,-20945,-32768,-17311,-32768,-20167,-32768,-32768,11112,3977,-428,13121,-32768,7454,-32768,31940,-19489,-32768,-32768,7314,-32768,-32768,-10453,11386,7322,-32768,-32768,11830,-25452,-21760,22109,-17942,-14982,-32768,11383,-14764,-2590,32767,32767,11095,-11878,362,32767,7347,-32768,20883,-32768,11006,-10233,-1477,9899,16803,32767,-32401,-32768,32767,3963,-21489,32767,-2978,32111,32767,-5233,-3950,3092,-12360,5271,-671,-1298,-32768,12670,-8848,-25741,24559,4937,32767,-12552,13584,32767,24402,-1664,4576,-32768,20154,-1031,6842,22452,-32768,10591,-23381,32767,-9540,12081,-16319,32767,-28293,32767,-25997,32767,4033,-9471,32767,13313,-32768,-26267,7801,-32768,5922,-32768,22415,32767,9682,-14188,32767,-31635,1900,32767,-21105,32767,-11860,22239,-3352,-32768,5135,-1921,-23280,1007,32767,-32768,-9392,-4733,-2318,5406,23583,-6487,14565,-32768,5987,5588,-10302,-20742,6720,-2206,18136,-2553,17681,-32768,32767,-19268,-8947,-17324,32767,32767,-7234,1696,-6587,-8309,-32768,32767,-32768,-32768,-32768,4661,32767,-32768,-32768,-32768,-32768,-7634,-32768,6231,-25574,-1942,-32768,-32768,-10670,-23866,-8758,-13009,-32768,-32768,13447,-6228,-2874,-32768,-32768,-30683,-23962,-10335,-21824,709,-25699,-5072,-32768,-32659,-32768,6030,-32768,-25495,-32768,-4115,-1103,-32768,-32768,-32768,-32768,16482,-163,-16034,20141,-32768,-18293,-32768,-32768,-12446,-32768,-11599,-32768,-16545,-18852,32767,-27631,30366,15831,-8715,-32768,21971,-12589,-12956,32767,-32768,11916,12524,-32768,-19542,6376,-21524,-29496,-32768,-29676,-16624,1725,-18764,-29997,-32768,-11298,32767,-22216,-10138,-29727,-32597,-32768,-28036,-11126,-5562,-32768,-23546,-32768,27883,-10436,-32768,-32768,-32768,4825,8057,-580,15076,-5314,-32768,32767,-32768,-14230,32767,-32768,4663,22211,-32768,-5157,-5264,-25852,-21712,-20224,-32768,-414,-740,-24437,-17244,-32768,7961,-21314,-23959,-29012,14340,32767,-25802,-28718,25114,-10437,-28683,1555,-14876,-32768,-32768,-12169,26035,-5832,-10772,7095,-2037,30990,-19219,32767,32767,20452,-32768,32767,-94,24678,-21028,24894,32767,-22212,686,-25612,-28169,-32768,32767,23821,-32139,11011,-18161,32767,6776,-9484,28533,-32768,17070,9769,-32768,-12315,-18451,8233,-23816,-32768,11616,-23203,32767,-10854,-16610,32767,-32768,1457,987,-19785,-14330,8401,17603,32767,32767,-17588,16388,20993,30728,29026,32767,32767,3627,26962,21199,-8283,-19158,-18667,-32768,8702,12528,-32768,-32768,2729,32767,-12329,12352,-2802,-3796,-32768,-12179,-14266,-2640,32767,-13670,32767,-26665,-32768,24630,-11116,-432,6081,32767,32767,-23746,8946,-13751,-32768,32767,-14298,32767,-16659,-32768,32767,-10314,-6391,-32768,-11214,-27591,32767,24470,32767,-32768,20677,29401,1614,32767,31639,10975,32767,31628,-30154,9912,28319,-23531,15799,-27046,11210,8899,-27175,7904,9961,9981,-3909,4746,32767,32767,28427,-31367,-13506,27241,4752,-19309,12108,32767,26960,13312,32767,32767,7251,7525,32767,-2296,8605,32767,20317,32767,-32768,10599,-11166,13198,32767,-16676,11289,32767,21038,32767,-32768,32767,32767,32767,-22244,-8556,-11602,-21420,13535,32767,6549,20900,2567,-9100,-7500,17278,8631,-32768,7651,6572,-24583,-32768,-32768,15382,14462,-5926,-1554,-20421,-507,-32768,-10752,-17366,29401,-29668,-3355,-32768,-5496,-11940,-28297,3946,-14226,-32768,32767,2072,-32768,-99,2513,-32768,-32768,-16907,17473,-32768,-32768,-25158,8959,1668,27900,-13866,-23572,6613,-14681,-5372,-14405,-32768,24405,-2196,-29841,-25077,-23349,-28934,-21727,-1130,32767,3313,-32768,-11444,-23488,-32768,-32768,1879,-32768,-30263,29010,7885,-5494,-12116,-8291,16476,4922,12862,15054,16313,3103,9419,-7621,-4129,-21824,-1193,-13047,-32768,-20570,-11705,-8065,16253,32767,24254,32767,-26193,24987,9329,32767,-32768,22527,2230,-2271,32767,32767,-19965,32767,-25120,32767,2207,-7321,24216,-12689,32767,-32768,17866,-3959,4776,-32768,32767,-12253,-4905,-9639,-19929,20018,10653,32767,-15814,32767,-69,-31303,30700,24129,29916,14542,-6833,32767,23936,32767,5882,-9379,29605,1898,-8747,23817,32767,1005,10700,-6046,32767,-29749,-32768,-1072,32767,-1737,26335,32767,11514,10616,-13471,32662,1826,32767,32767,32767,-8064,-32768,32767,-32768,32767,24700,32767,-67,21579,32767,-5873,155,32767,27334,20718,32767,30749,2928,32767,-3948,20727,-7658,4320,9379,-32768,20719,-30940,10674,32767,-9629,31059,-8096,8740,19362,12472,32767,21305,-25739,22986,-7868,1239,21526,-4525,32767,23536,1845,4859,-10470,-22296,14436,18832,6081,-12429,-522,10832,-13183,-32768,-13818,10437,-3152,25386,4550,-16624,11374,-12963,-5194,2591,-16421,32767,3010,9910,21209,-11684,17507,-8360,18047,21419,1888,-5153,32680,-5718,51,-8928,5249,26236,6845,32767,16099,-9339 diff --git a/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv2_input0_int32.csv b/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv2_input0_int32.csv new file mode 100644 index 0000000..fa38108 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv2_input0_int32.csv @@ -0,0 +1 @@ +1,3,42,32 diff --git a/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv2_input1_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv2_input1_int16.csv new file mode 100644 index 0000000..9c8e5ab --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv2_input1_int16.csv @@ -0,0 +1 @@ +-21857,21067,-13211,13556,-25895,12679,23373,-30383,-2637,6266,23780,-27617,-21499,-31635,-7476,-7953,28137,-18999,-3247,20894,30809,16428,27379,20152,2126,1743,436,-14827,-28522,-2632,-24458,-18466,564,4730,4060,-20378,-26500,5513,-11967,-23866,27662,-24024,-24712,29951,24335,7619,-25657,-18443,17521,-12916,-29738,23294,-32127,-8533,29841,-24416,-24104,-6767,-6368,-19295,-9163,4113,-20464,-4589,-27840,12830,28638,-30552,15008,16769,-32358,-3284,29822,22671,26268,-25733,12499,17926,12079,-8316,-28166,24437,-9071,28679,4797,-17828,-21994,-11430,7709,-15489,28118,25078,23134,10908,27315,-21184,-1550,-8652,-11563,-8889,-18589,-13509,1692,29257,29373,3444,-13965,-28374,-22438,9311,7494,12828,19931,21339,-20820,-27148,-22172,-4388,-23150,22093,28278,30007,-17707,-30210,-6377,-20241,6751,8349,-6335,18568,13773,21347,-21012,7184,11687,-6024,6807,-16481,31479,30994,27371,8394,1622,-26057,27127,11109,8275,-17820,-25941,18490,3053,-28119,30740,-31844,-7467,20455,17876,31140,-21417,-7047,31433,25498,6577,-11138,-26602,-10454,-2819,28388,-6762,-9481,27745,-7415,-10459,-16864,-20432,-24836,-18100,-29029,-27676,8067,-21447,21989,-24544,17684,9541,1764,18293,2121,12245,27285,-7393,26794,-23304,-27934,-17930,32247,-30015,19087,29581,2668,-10672,-22386,15157,-11350,-1746,-25778,15122,23769,-29503,-24643,-15288,-26432,2591,-9139,30972,23892,11178,22324,12562,12183,-20166,-7781,13002,21584,7262,-15369,-13199,-20131,11162,31821,-10142,-27333,30673,9050,-25431,-12754,27688,21057,-32057,122,-6485,13343,-26101,13641,-7696,13538,-1559,-11827,-31752,-18460,-2901,11091,-28461,29197,-13254,-12676,-21123,9901,10995,-20445,7875,28579,-2330,28149,5904,11417,13893,31925,1017,29769,4827,-21077,31562,-4424,841,-2854,-30644,-16296,10501,15593,11631,3743,22358,-29701,21912,4664,-20573,2680,-4757,14828,31187,-342,18627,9861,26982,32546,-21930,3457,-30424,15014,-9274,25168,-4780,65,-307,7156,-616,32650,17732,-20042,15738,-11391,-18638,18486,-20701,-32598,-17578,-6861,17577,-25224,-26112,-29910,13454,32301,11323,-19521,-8968,-12738,13738,-18144,10541,10992,-29931,14763,12234,-31698,15123,-32128,-27736,-21755,-8304,-3814,5182,-3828,-32532,-8866,28028,-17204,29421,32182,-13777,839,-27475,-4466,9213,-19118,17814,13471,-14438,-26950,19398,459,11494,13148,-3636,10312,-13944,18224,15133,26307,22598,10009,12051,1164,23833,-16182,21582,-28715,-26332,-29862,17995,9920,29257,24769,8766,-31644,19412,10079,21129,-465,3046,13211,26879,-5465,16368,4203,-7013,-27983,23882,-11600,-22725,-21511,16493,-18208,31241,-1712,21325,9169,-25519,19693,-27788,-6100,11618,16285,-21651,8477,18167,31506,27825,-32613,16004,-13268,-14508,15000,7177,-16337,-3076,19728,-26426,3047,-22284,-3300,-29200,19640,-22833,25330,-24692,11675,27747,2529,22053,15711,-25927,4779,-6679,30980,16428,-21058,-8808,25411,26667,-15765,-27057,4677,-4357,15757,-32664,696,-11495,-1511,9218,-23481,32678,26896,32757,2829,21150,-19171,-2686,6831,-24710,-27519,-14523,-13143,5068,16673,7919,-18669,-14847,21673,28041,-1800,184,-4002,-31765,26196,23626,8118,8247,-18817,-20668,-26390,5151,8008,180,-5507,-11422,-9583,-7653,29918,13982,-26493,27982,-8182,23960,-11550,-1461,-12325,-27683,6033,-15793,-27573,17909,-12609,-523,8785,26037,8224,-5028,-14103,-14003,2132,26915,1869,11788,7853,-3574,3064,-4635,19201,6185,-11753,-28683,3184,-32676,6354,-238,-20228,12450,-339,-9313,-24063,-10626,21373,10517,-6762,-20666,839,19917,8628,-26048,5432,31686,-20157,14196,14206,-21847,-24696,-16202,-7659,-10779,26783,-1031,-7577,-13354,-29404,18153,32335,-4038,-7112,28509,26534,30470,-30843,20235,-16253,28166,5758,-27134,9724,-12854,4762,-15486,-28552,-4632,-11481,-31038,26276,-17212,32216,11363,-18081,-3484,-2602,-28810,13548,5604,-18947,25164,-10625,-19093,-8330,6442,-26264,18929,9916,24406,-3486,-10622,-25371,-18291,-6242,-10854,5208,-12624,-5489,-29189,27258,-25797,-23631,22226,23230,20113,-2478,-13439,23906,15380,26396,-17326,26891,16615,-15381,-14094,-21820,-21182,7413,-14567,4512,54,17086 diff --git a/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv3.tflite b/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv3.tflite new file mode 100644 index 0000000..58e941e Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv3.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv3_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv3_golden_int16.csv new file mode 100644 index 0000000..923d7eb --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv3_golden_int16.csv @@ -0,0 +1 @@ +5212,7515,-22411,-32768,32767,8917,-27313,-32768,-30919,-4232,-32366,7843,-18464,-2989,-32768,-32768,-32768,-32768,-21608,-8167,-19683,11053,-17499,4781,-32768,32767,-32768,17497,-15867,-525,-32768,-10641,32767,17023,14679,-32768,4125,-28479,-32768,-3820,6128,15210,32767,13951,32767,32767,25688,-29991,32767,-18058,1549,15826,-17814,23705,-32224,-1823,32767,25878,5845,-32768,-29661,32767,-30886,-32768,-32768,32767,-26158,32767,-26443,-9155,25485,5543,31322,12469,30439,4626,13541,32188,19243,32122,-18349,32767,32767,32767,-29068,-22316,-15981,-1126,-29349,4355,-32768,13269,-32768,32767,-32768,-15649,15856,-32768,-15207,-32768,-32768,-32768,-32768,32767,-32768,-32768,-32768,4582,31131,-21218,-32768,-32768,32767,-32768,-26647,-32768,32767,2171,-6251,-7502,-32768,-9101,14728,-32768,-32768,-17059,-32088,-15134,-21375,-32768,-16377,-32768,-32768,-32768,13018,32767,16,32767,-27524,-3315,-32768,-5458,28559,18550,24798,8491,-32768,32767,-32768,-13407,18297,13261,-32768,-32768,-27098,7275,20941,9469,-30843,-32768,32767,-32768,-21806,-2967,-29304,-32768,23192,614,-7241,-32768,22989,32767,27560,-18722,13844,-32768,-32768,10163,32767,-32768,-32768,-32768,-32454,32767,-32768,32767,-32768,22390,4723,-338,-32768,32767,-3131,32767,32767,30387,32767,32767,13279,32767,-32768,27617,24492,-32768,32767,-30417,-32768,32767,-32768,17373,32767,-32768,32767,27439,-32768,-20706,17899,32767,10819,-15528,32767,32767,32767,32767,24446,29617,-12738,32767,32767,32767,32767,-7399,32767,-32768,-20094,10186,23954,32767,32767,32767,-32768,32767,32767,-18023,32767,32767,-32768,-32768,32767,32767,-32768,32767,32767,-24010,-2773,20465,32767,32767,30669,-32768,6520,-9476,-32768,13150,15722,9024,32767,-24141,32767,-12767,-32768,-6073,-940,-13519,-32768,10526,32767,32767,20689,-22126,32767,-32768,32767,-29878,2212,32767,-32768,-1332,4757,32767,17370,-32768,-25076,-20035,5339,32767,32767,32767,32767,-14821,-7524,22338,32767,18477,21429,32767,-23037,32767,-3056,-32768,8586,-19439,32767,32767,-32768,27138,-23874,-28188,32767,32767,-32768,-20238,12290,32767,20290,32767,8837,-15115,32767,32767,-17076,-27013,15595,32767,32767,-32768,32767,-32768,10914,32767,32767,-32768,-32768,32767,-5426,1265,32767,-15875,29171,-22377,-18276,-2085,-32768,-31410,-32768,-32768,-32768,-32768,12318,-32768,30227,-32768,32767,-25697,-32768,-32768,32767,-28755,-8394,-10251,-9760,32767,-17330,32767,32767,-25820,-32768,-32768,-31986,32767,32767,-18565,-31268,-32768,-32768,32767,-26916,32767,-32768,26098,32767,-32768,-8779,-32768,-32768,97,-14081,-32768,-32768,32767,-32768,-32768,32767,-27386,-11769,-32768,-24364,18331,-32768,32767,-32768,-16699,-32768,-32768,-2668,-18865,-32768,32767,32767,32767,-32768,32767,32767,-29995,32767,-32768,-12821,-19857,-32768,-32768,-32768,32767,-32768,-32768,-32768,2656,32767,32767,22192,-31085,30397,29020,28374,-30938,19633,-17215,32767,13481,20623,29773,6516,2685,-3983,-32768,-2101,-31020,-12919,20738,-32768,21915,-32768,-5190,5250,-32768,-15267,-32768,24150,-19706,-32768,-32768,25178,32767,-32768,-32768,-32768,32767,-26207,23002,32767,32767,-1554,-32768,-5538,32767,32767,15836,1356,32767,20457,32767,29576,-29282,-32768,-32768,-19424,-24764,2051,32767,10444,-32768,32767,32767,-30747,11106,3686,-32768,-29691,2510,22173,-11570,32767,11449,32767,-32768,31150,32767,-32768,32767,32767,32767,32767,25520,-28991,-21466,-32768,32767,32767,-32768,32767,32767,11426,1468,30841,32767,-32768,11498,18459,-32768,2471,-22969,-10760,-14702,5560,32767,32767,-12597,8532,15468,-7307,32767,32767,5964,-32768,5980,16273,-11939,-32768,32767,-3138,32767,32767,12707,15464,12186,-32768,1904,-32768,32767,15386,-32768,24576,-23541,-11105,32767,9941,31881,-29134,-885,-3528,-14008,-4960,-32768,32767,-7348,-32768,-32768,-11765,22442,-32768,10120,32767,-31041,-11726,-32768,3906,29628,32767,32767,-32768,32767,-32768,12643,-32768,25358,-21910,-32768,329,6063,32767,13100,-4811,-32768,19061,-19708,6626,-32768,-20912,-26576,30236,16516,-13405,-31264,3237,-32768,-32768,-32768,-32768,-32374,12348,-32768,-32768,-5190,10209,-20045,-32768,-22612,-23073,-32768,-32768,-30678,-31922,25883,3403,-31384,32767,11190,32767,26906,25587,17070,10554,-32768,32767,32767,-32768,32767,-15148,32767,-32768,32767,-32768,-17428,32767,14015,-32768,30764,32767,17019,-32768,32767,-32768,-24138,14726,32610,-27045,-7621,-2678,8299,32767,5119,32767,32767,-12453,-11983,32767,32767,32767,-17787,4767,32767,32767,-5386,32767,32767,32767,-32768,32767,32767,-1366,-4249,32767,-32768,32767,32767,-32768,-21605,-27215,32767,18387,-2987,32767,-32768,32767,32767,-24389,-32768,-32768,88,32767,-4135,32767,-28792,-5472,7464,32767,32767,23894,32767,-32768,29792,-32768,29377,13920,-32768,-32768,32767,-32768,-32768,9499,32767,32767,-21327,10610,-20087,-32768,32767,-32768,-11030,-16272,-21962,32767,-32768,-24030,-32768,-28826,32767,-7747,-32768,-25648,31418,-29466,-32768,-29016,29225,22789,336,-32768,-32768,-27948,-32662,2060,11338,-32768,10134,28693,-28141,14236,-32768,-23965,22935,-28526,-32768,-32768,-32768,-31291,-32768,32767,-30829,-10841,-32768,-15159,32767,-32768,-32768,-32768,-12030,-29052,5782,-32768,32767,-21041,11285,32767,2014,11362,-32768,32767,-32768,-32768,-32768,32767,6212,32767,-32456,10525,32767,-3332,-32768,-14808,32767,-32768,32767,-8956,12850,-17543,24615,-4165,-24063,3150,24394,32767,14032,10121,-32768,-32768,23558,32731,-23143,32767,32767,-32768,-8512,-31156,16922,5814,32767,32767,6242,32767,-32768,-32768,-32768,32767,32767,-18944,-32768,22434,32767,32767,-12626,2811,32767,32767,-8924,25956,-14544,17619,32767,20429,-32135,32767,-32768,32767,32767,-4377,32767,-32768,1456,32767,-17979,15163,3621,-32768,12309,-10810,32767,-13815,17751,32767,-32768,-6773,-22458,32767,-32768,-7699,4396,27110,32767,32767,28938,27514,32767,20758,-32768,-32768,2950,15229,-23648,-2885,-4646,32767,-32768,-32768,-27509,32767,32767,32767,8315,-22349,-32768,32767,23128,-32768,21864,32767,13722,32767,32767,-32768,-32768,13647,32767,-25446,5724,-5548,9415,-23638,30960,-32768,-4166,-13990,-29039,32767,-32768,-30743,-32768,-9354,24132,32767,17525,-32768,32767,29648,-32768,-32768,726,15816,-30810,-32768,-32768,26160,-32768,12775,-32768,-32768,-32768,16587,-12331,-32768,-32768,14040,-22682,-25074,-32768,-32768,10915,32767,-32768,-32768,31127,-32768,-28117,6565,-11780,608,-32768,1188,-32768,-32768,-32768,-14643,32767,32767,8404,-17058,19608,-20419,-32768,32767,-32768,20313,32767,32767,32767,-8343,32767,32767,31917,32767,-32768,32767,32767,-32768,32767,-32768,28064,-21312,-29618,-32768,32767,-9230,13823,-32768,-10448,-23118,32767,32767,32767,-4663,32767,14147,16218,-32768,-15466,32767,2853,-3872,-32768,32767,32767,-23415,32767,-15406,32767,32767,-32768,-7779,125,32767,32767,-1243,-26913,5626,24112,32767,-32768,21584,-9102,-32768,-20902,-32768,-32768,32767,-30711,-3385,-32768,-32768,3612,-32768,-32768,-32768,-10668,-7811,-32768,-28963,-32768,-32768,32767,32767,-32768,-16121,32767,-32768,-20385,32767,-10943,-4754,32767,-32768,32767,16329,32767,32767,32767,-15152,-29807,32767,32767,32767,-25279,23150,32767,-8117,32767,-32768,32767,18447,-3764,32767,-29816,11950,-32768,-26763,-3331,32767,14663,32767,22177,32767,10590,32767,32767,24230,32767,-32768,8920,32767,3267,32767,-5082,32767,-32768,-13356,-16001,32767,32767,32767,-32768,-32768,32767,32767,32767,2722,-32768,32767,-9273,27594,15080,32767,-30737,32767,-12909,32767,-1558,5255,-9358,-32768,32767,-11043,-6208,32767,32767,-32768,23787,-32768,13029,32767,-32768,-12673,-28833,-32768,32767,-32768,3193,20660,32767,-32768,32767,-32768,26690,7391,-32768,-32768,32767,-32768,-30510,-8637,32767,17917,32767,-17999,3611,32767,-8644,-29432,14362,32767,-19679,32767,-32768,16129,-32768,-32768,-7537,-32768,-32768,32767,14855,-32768,32767,-32768,11470,-32768,18178,-32768,-32768,-22077,-32768,-32768,-32768,-32768,-18553,11812,-32768,1793,-32768,-32768,32767,-32768,-32768,21115,-18638,-3647,-32768,-14734,2585,6017,-32768,26894,1994,-11979,-32768,-20192,23,32767,10023,-32768,-32768,-32768,-32768,-32768,-29260,32767,18537,32767,-32768,22213,-32768,32767,-11737,32767,24822,20851,16839,-8687,-32768,-32768,-32768,-32768,7936,-9033,-3875,2076,32767,26962,2954,-32768,-32768,-13088,-32768,-11604,-32768,11803,19587,29437,32767,-32768,24527,15527,11269,-32768,-27328,32767,32767,27639,1927,4243,31011,-19960,-14003,32767,32767,32767,-32768,29118,32767,32767,-6662,26496,32767,1400,18559,-26084,-32768,20587,-5054,15009,9795,9239,13457,24643,19266,-32768,-19579,16112,-32768,-32768,-32768,-27966,-32768,-14083,-27649,6591,-32768,-714,-32768,16269,-32768,-15293,10506,-32768,-32768,24743,-9093,-32768,-32768,32767,-32768,10899,32767,22089,10096,32767,32767,32767,32767,32767,7176,-28066,32767,-6556,-32768,11509,2945,-32768,-6457,32767,24875,-32768,-10108,22556,-14368,2617,28921,32767,-32768,-32768,15147,-6721,22130,32767,-4177,-19427,9959,7540,-32768,32767,872,-32768,-14637,15094,-32768,-24824,32767,-898,-30785,-32768,-32768,-10242,27309,-10955,-23073,22419,14634,32767,-32768,13433,32767,32767,-32768,-32768,6022,-32768,-32768,-32768,-32768,-32768,32767,-32768,-213,-26259,22567,-32768,-32768,-25614,-32768,-32768,9841,-32768,-13092,-398,68,-32768,-32768,-32768,5251,-32768,-21390,-1806,-32768,-32768,32767,32767,15616,-2246,-32768,16743,-32768,-22559,-32768,-9166,-13209,-9578,-3946,32767,5759,-32768,32767,-16063,-32768,-32768,-29337,-2319,-28473,32767,-32768,-8706,6965,22239,-32768,11855,-17338,32767,8753,-8812,-16614,3589,23634,32767,32767,32767,-3167,8610,4969,8251,32767,32767,2378,32767,32767,-11708,32767,32767,32767,10148,-28171,32767,19511,28417,-7654,16423,-32768,32767,32767,32767,32767,32767,32767,32767,-9902,18963,-32768,-26243,32767,-24812,32767,32767,-25428,16089,32767,32767,32767,32767,32767,12644,32767,14732,32767,32767,32767,32767,-8326,32767,29390,32767,16421,32767,16267,32767,32767,32767,-2572,5642,32767,32767,32767,32767,-32768,1969,27571,32767,-18489,26668,32767,32767,-4440,32767,32767,-6438,32767,8950,30551,-32004,32393,1911,12983,-3252,32767,32767,-32768,-31147,32767,-16683,-693,19248,23293,-32768,-29346,27183,32767,32767,30269,32767,17750,32767,32767,32767,32767,-32768,32767,32767,14586,12890,21006,27004,32767,3612,32767,32767,32767,32767,29523,32767,32767,-10759,-14873,-18412,-32768,17839,-32768,32767,32767,-32768,32767,3460,-7424,32767,32767,32767,21552,32767,32767,-24253,32767,32767,32767,7650,439,-15379,32767,32767,-12316,-10723,32767,32767,-32768,-5248,32767,14723,26184,-32768,32767,-32768,-7172,-7129,1445,32767,22309,-11569,-21960,-32768,31785,22216,-32768,-21888,-19832,32767,32767,-32768,-6912,11705,22640,32767,-3488,-32768,-32768,-11140,-27217,23448,-25338,-32768,-32768,-32768,-32768,-7147,-16934,30746,-32768,-30954,-20078,-27969,-32768,32767,-32768,-32768,-32768,-18176,-32768,-32768,-32768,-1799,25590,14442,-27320,-512,-32768,32767,32767,-25477,-18432,-32768,-22829,-32768,32767,23573,32767,32767,-4773,-32768,32767,-32768,-18877,26402,12031,-4642,32767,-12852,-31120,-32768,-32768,32767,28400,-32768,13214,25622,10726,-32768,-32768,-20412,32767,9947,5272,32767,-5252,21837,-28229,-17860,-15323,-17018,-32768,-8743,-32768,32767,-32768,32767,1865,-32768,15171,-32768,13037,-26576,-32768,32767,-32768,-32768,8639,-22784,1778,5155,-12648,21707,32767,-20759,-32768,-8611,-32768,-1914,-32768,32767,-32768,-31885,-4864,-31079,32767,-32768,32660,32767,32767,-32768,24421,-29888,21324,2790,-32768,189,-32768,32767,825,-13527,-19615,32767,-8607,19473,32767,-2274,32767,27311,18798,-24682,13111,10977,28805,-13542,32767,17012,9521,-9811,32767,32767,-1186,18591,32767,32767,20058,-22894,32767,-3519,28987,32767,-5980,32767,32767,32767,6538,32767,13425,32767,32767,32767,-32768,-32768,-163,32767,32767,32767,-32768,32767,19306,32767,24054,-2514,4415,-12621,32767,32767,32767,-1117,32767,4628,29902,32767,32767,-9615,30355,32767,32767,32767,32767,32767,32767,32767,32767,30003,-5053,32767,32767,12211,-16862,17065,-21185,30043,11525,32767,32767,32767,29760,-32768,23634,32767,25034,32767,-30122,32767,-13797,32767,-23825,32767,-8280,22852,32767,12897,-30661,-12382,-32768,-12477,-32768,-24483,-9049,-32768,6319,32767,32466,2534,14594,32767,16518,-249,32767,11521,10608,14662,9327,22894,32767,-10259,-4917,8066,-32768,13008,20217,27287,-32768,-30830,-6476,32767,-32245,32767,31184,-21727,-32768,22408,536,-32768,32767,28814,5733,-31129,32767,-13124,-4575,-32768,-32768,-22232,-10456,32767,-16434,-32768,32767,-8102,-32768,-18441,-32030,32767,-32768,-31710,-32768,32767,-32557,-302,-32768,-9854,105,-32768,32767,-32768,14491,29026,-32768,-12066,32767,-9738,-32768,-32768,25261,32767,16969,19276,-32768,-3509,3478,-32768,17749,-28947,-32768,19270,18757,32767,32767,32767,32767,32767,32767,32767,-32768,32767,10677,-27114,30494,-10823,32767,32767,754,-10327,32767,-32768,32767,8930,32767,32767,32767,-32768,15911,23079,32767,27685,7063,32767,8188,6370,5230,32767,-32768,-32768,32767,12522,32767,-30757,-21910,-32768,32767,32767,-23312,-32768,-6417,-32768,32767,32767,-17484,-27536,32767,30125,3379,-32768,16912,-32768,-2922,32767,-2073,-6015,32767,-32768,-28352,32142,-19524,-32768,-32768,-32768,-32768,-5326,-27762,-16460,30870,-32768,32767,-21745,-25292,-14746,-6978,-32768,-32768,-28683,-32768,-1640,-32768,-32768,32767,23916,-19932,32767,-32768,5439,32767,-11528,-32768,3184,19660,-32768,32767,2542,32767,-26571,-32768,-10066,-25855,32767,-32768,-8415,-32768,-16554,32767,-5302,-32768,-17614,-32768,-32768,-963,-32768,-28366,-6603,-32768,3731,976,-9038,-32768,-32768,-32768,32767,32767,32767,-30909,-3083,32767,-9976,-7691,12051,-32768,32767,-13948,26075,-10837,-28834,-32768,32767,3986,-15050,-30145,-6537,13983,32767,-32768,32767,-32768,-32768,-9322,32767,32767,9557,-32768,32767,32767,32767,-8517,-32768,-20324,-32768,8116,3129,32767,-32768,32767,-30691,-32768,-32768,15375,32767,-32768,-11678,17247,32767,32767,19128,-1233,7568,32767,-3000,-21487,17208,-32768,19643,-1307,-18121,-32768,16887,32767,-29358,-5218,32767,-32768,-32768,32767,32767,-9256,-32768,-20715,2555,32767,1477,32767,-14805,-29055,32767,-26794,-32768,-25524,-10264,32767,32767,32767,32767,32767,-17467,-9079,32767,32767,32767,32767,32767,-29966,10966,-2245,32767,22689,-3336,32767,32767,7072,32767,32767,32767,32767,12309,-6917,32767,13237,23139,-11858,32767,22855,28278,22267,-1679,32767,-2570,32767,7084,-32768,-20630,-32768,-32768,32767,-24326,-12289,27536,16952,32767,32767,5416,20010,32767,-18197,-23959,-32768,-13055,16104,-32768,-32768,-20489,32767,32767,-29635,32767,14360,3511,15314,-28592,-32768,-27673,-6327,-10826,-32768,-32768,29521,-2683,2433,-1695,-32768,32767,-32768,15338,-181,19417,-6946,-32768,32767,1373,-9351,32767,32767,-32768,-11441,-7277,-32768,3274,-32768,13724,-16074,32767,-32768,20899,32767,28385,-21682,22831,32767,32767,-15027,-16857,32767,29641,32767,13383,-32768,-22605,29493,-21580,-16293,-1523,981,13205,19780,23698,-24743,7074,28409,22699,-2419,-17396,12915,-8939,-31639,-1768,-32768,-32768,2098,-17879,-21836,25032,-20401,-13432,22550,-32768,-32768,-32768,6853,13992,-28041,2515,-32768,-32768,19100,-32768,18393,-9539,16746,17748,32767,-32768,-32768,-17062,-27788,-32768,-32768,-2729,18891,-5298,13450,-32768,-32768,-32768,-32768,22896,-32768,-7860,-32768,-21201,-32768,-14688,26750,-32768,29118,4336,-32768,32767,-32768,-32768,-14484,18399,5702,-32768,-17242,17990,28678,17137,23896,-32768,-32768,-10712,23182,-32637,32767,32767,32767,-32768,32767,32767,32767,32767,32767,32767,32767,32767,32767,-32768,32767,16420,32767,-1734,2117,26773,32767,-9143,-4458,32767,17198,13598,32767,30097,299,32767,32767,-9574,-32768,31294,-26050,-24618,32767,32767,21629,32767,28471,-8972,23198,30147,32767,-32768,32767,28413,-32768,19241,24375,25538,32767,-31005,32767,21062,-19371,32767,32767,32767,-32768,-32768,-32768,32767,-32768,-8865,-32638,32767,32767,32767,9035,-27448,-11348,-500,-11279,-32768,-16839,12748,-32768,26393,-32768,31340,32767,-32768,12662,32767,-1182,-32768,32767,32767,-32768,32767,-32768,-5722,-32768,-32768,-32765,32767,1545,1281,4693,-2533,25835,-24684,-25091,32767,-32768,-13070,-32768,-32768,-1114,-32768,-32768,-22144,21975,-3667,-20641,-32768,-32768,-32768,-32768,-30869,-32768,-16775,-32768,-32768,32767,-14162,-26456,-32768,-32768,22845,-32768,-32768,-32768,-32768,-32768,4733,7156,-32768,-32768,-32768,-32768,-32768,-32768,724,8712,-636,32767,-21466,-32768,-32768,-32768,-32768,-11542,-14049,-11599,-32768,-32768,21439,-30050,-188,32767,-32768,32767,32767,-23606,-32768,-32768,-32768,-27625,32767,-32768,-32768,-32768,-32768,21516,29658,21466,-27053,32767,-32768,-7015,11846,-15438,6083,-15834,15507,20596,24741,12536,32767,26518,-9545,1086,-1535,-2041,-32768,32282,-3300,-28578,32767,-14461,15058,-22974,32767,19267,28134,7427,-7336,3786,-32768,32767,-7345,-11291,-14083,-32768,803,655,32767,-20335,-25446,7405,1129,-32768,-23312,-7432,-32159,-19478,11517,-14023,-22077,-17872,-32768,18705,-32768,-32768,-11689,32767,-32768,-8130,-19513,-18680,32767,-3292,-19307,-27641,11744,-30791,-24833,-29918,-32768,21386,20414,32767,32767,5360,32767,28666,9129,-32768,32767,-1723,-618,32767,16742,-1603,-21800,-5139,-4624,32767,-8196,32767,32767,-13737,-23432,32767,32767,14544,10230,12759,-32768,32767,-6412,609,4028,-32768,6441,-32768,-26395,-8853,-32768,2562,32767,702,-32768,-32768,-17251,-5720,-32768,-32768,-6969,25059,-12345,-1846,32767,12170,-4826,-597,-32768,-26469,-32768,-32768,-32768,-32768,-32768,-32768,-32768,408,-4172,-32768,-32768,10878,32767,-32768,-32768,-6688,26568,-32768,1213,-32768,10397,-32768,19993,-32768,-32768,13512,-32768,21271,12243,18678,19710,-32768,9933,-32768,-32768,32767,-32768,10062,-15991,-32768,-32768,32767,8080,-14512,-32768,27276,21905,-19419,20679,-1938,-12494,-32768,-32768,-32768,-23381,-32768,-32768,14236,-32768,-7173,-32768,-4078,-11234,32767,8788,-32768,32767,16521,32767,-4054,13267,-32768,-5637,31161,10945,14191,32767,-32768,-29718,13657,15579,17656,13429,-20458,-13421,-32768,-8546,-13860,-32768,-32768,-32768,12009,32767,32767,-4574,13014,32767,-32768,32767,32767,29918,32767,-32768,32767,-31609,32767,-23830,27547,32767,-1798,-27034,32767,32767,32767,32767,-12333,32767,32767,-3962,2997,-29711,15546,32767,32767,10910,32767,11600,32767,32767,14661,32767,32767,32767,32767,32767,-32768,25069,32767,-3124,9265,14902,32767,-32768,32767,32767,32767,32767,32767,32767,32767,32767,32767,32767,32767,-29954,-9879,32767,32767,14522,1721,14805,32767,-25662,-21137,32767,32420,-5972,10884,-32768,12678,30444,9526,-242,18959,-32768,32767,23648,-3058,-32768,-9988,32767,-6306,-27497,-21040,32767,12707,27299,18920,32767,8831,32767,32767,-19779,32767,-8010,-32768,3169,32767,9762,32767,32767,29897,32767,23319,-32768,32767,31319,31122,32767,32767,32767,-19466,22537,-32768,23033,7237,-28485,32767,-2435,32767,-15766,32767,32767,32767,14389,32767,19621,8237,-32768,7995,32767,32767,28344,-2336,32767,21155,-28738,8992,20019,-25407,-18103,-30536,-19488,32767,20121,32767,-13152,32767,32767,32767,-18241,7839,-30111,32767,32767,-14028,-32768,-32768,-30247,-32768,-32768,-27750,-6807,-32768,6864,-32768,-32768,-32768,18818,-21707,-32768,-28315,-25443,32767,-21244,-32768,-32768,-12679,-3801,-32768,-32768,-32768,-13349,-32768,-18623,-6589,-25519,-19520,-7218,-19532,32767,-19957,-31791,-32768,-32768,-31987,-9108,-32768,-32768,32767,12728,-32768,-32768,-1740,-32768,32767,15246,-32768,-32768,11786,32767,-2160,-3955,-32768,-2383,-3036,-29541,-32768,-32768,6530,19093,14402,-32768,-32768,-32768,-23390,19031,3966,18790,32767,-893,-29572,-23230,-30947,-32768,-9963,-32768,-29634,-20871,-32768,32767,-31372,-32768,-5744,-13633,32767,32767,-18840,23318,-18124,15163,-32768,-22615,-17131,-32768,-24504,32767,-32768,19495,-32768,14032,13903,32767,-21063,-10278,-32768,32767,-20549,32767,16297,-11896,-31971,-32768,-32768,-24399,-23928,-32768,9517,-24088,31808,-1532,5464,3027,-22149,32767,14272,32767,32767,-2223,25756,19054,-21064,-32768,32767,27899,3580,25882,32767,-32768,276,21420,32767,17044,-32768,32767,32767,-7620,-32768,32767,-32768,32767,22477,-32768,19117,12216,32767,32767,32767,32767,32767,32767,32767,32767,32767,32767,25311,32767,9392,26757,18296,32767,32767,32767,2042,32767,24231,-26143,32767,32767,29406,32767,32767,19187,32144,32767,32767,32767,-15931,21441,-29119,-24859,32767,32767,-9081,-30956,-32768,-11640,-30308,30989,32767,-32768,32767,32767,-22095,-21777,-32768,29697,32767,32767,3982,32767,23131,24970,-17106,-32768,19889,-2496,-32768,32767,32767,-32768,-1188,-32768,-32768,32767,4484,10499,-32768,-18824,32767,-32768,-2438,-6290,4189,-32768,7616,32767,32767,-3825,-32768,32767,-8255,32767,32767,-32768,25773,-32768,8840,14504,32767,-23477,-28886,32767,32767,-32768,-32768,-32768,-32768,-32768,-32768,-32768,16493,-32768,3286,-32768,-32768,-32768,26991,-32768,-32768,-32768,-32768,-32768,-32768,-32768,-32768,-32768,-32768,-32768,-6878,16735,-32768,-32768,15846,2580,-32768,-32768,8685,-20118,21585,-32768,-21308,-32768,-32768,32767,18358,-32768,-32768,-9165,32767,-2495,-22824,-32768,21849,13314,-32768,-16404,32767,-32768,-26934,32767,-2738,-31338,32767,-32768,-26561,711,32767,-11190,-28830,32767,32767,26473,32767,32767,32767,20057,19286,32767,1055,-32768,29333,32767,32767,32767,-32768,-22962,-32768,7499,-14344,32767,21852,32767,-24770,22070,26295,-31810,22102,2342,32767,-32768,-7759,-32768,17105,-5951,-32768,3489,32767,-26029,1694,-340,-32768,-5474,28599,32767,15463,2666,-2956,-32768,32767,32767,32767,32767,26822,-25221,-13864,12426,32767,32767,-32768,32399,3989,-32768,32767,-32768,-32768,-32768,-4960,-9796,24090,-15988,-9307,-30312,-8659,-32768,-32768,7190,14240,-6585,-32768,-32768,-27842,-30653,-16919,14243,-32768,19476,-32768,-32768,-32768,-32768,32767,-13813,-32768,6123,-21064,-32768,-24656,-17495,-32768,4530,-32768,-32768,-32768,-3189,-16660,24427,7073,-32768,-32768,17081,-32768,-17758,-32768,-32768,-27366,-32768,-32768,-10722,-11056,14728,20455,-32768,-32768,-2056,-26590,-32768,-32768,-32768,-32768,-10816,-10183,1373,-32768,-26820,19730,32767,-32768,32767,-15844,-32768,-32768,-19349,12138,-17340,-5291,-9752,32767,17634,32767,13025,-808,32767,-32768,-32768,14871,-1980,26534,-32768,-32768,551,-25578,-2794,-30206,-32768,19575,-32768,11990,32767,-32768,-32768,-25074,32767,29725,-32768,-8893,32767,17409,-6285,-17815,19972,32767,-1745,18594,12827,-32768,32767,20120,9715,32767,11568,32767,-15594,32767,2296,6442,4766,32767,32767,-1644,-32768,-19268,-29179,32767,-32768,32767,16464,32767,-11723,32767,32767,32767,28233,32767,-32768,-24948,-32768,32767,32767,6960,14015,8525,-215,2470,32767,-32768,16924,-20572,32767,32767,-1009,3227,32767,-4897,32767,-637,31015,14175,-11406,22529,13531,27003,-18177,8879,-15480,32767,850,-3202,32767,-21502,32767,-2685,32767,32767,-27284,25461,-5361,-4706,7885,-32768,-28836,-7570,-19056,-1767,-16801,32767,-32768,824,-32768,-21577,16308,-32768,28002,-32768,8863,32767,31324,-31200,-3342,-1356,1666,-22149,13272,-22110,-32768,-16247,-32768,-32768,-32768,-21979,-32768,28624,24192,-32768,-32768,11482,-23831,941,-6421,-18026,-18469,-32768,15727,-23614,-23881,-32768,8720,-8657,-32768,-16371,-18560,-2733,14191,-22260,-30755,32767,-18855,19147,9219,-4031,32767,-7375,32767,-4161,32767,-32768,20314,7515,12595,-27173,32767,-32768,-2841,21585,32767,-9971,-32768,32767,32767,32767,29362,-32768,24823,-13549,-10453,5289,5403,-26834,-22130,32767,-26572,-6398,-32768,-32768,-6631,-111,18238,13322,-32768,7214,-25475,32767,-14602,-4321,-8089,-21954,-24234,3837,24783,2561,30344,-1321,32767,-30290,32767,27078,-16640,-32768,32767,-32768,-7751,-32768,292,-32768,-32768,24482,-32768,-9054,-6447,-23672,-10098,-17018,-14661,-32768,-32768,-19990,-32768,-32768,-32768,-32768,-23809,-27794,-32768,17167,26892,-32768,-22658,-32768,32767,-28206,-8462,-3238,10332,32767,-12577,-32768,3230,-9642,-7325,10340,-32768,32767,-5117,24174,32767,32767,-32768,32767,32767,32767,21918,4039,32767,32767,30588,-21271,-32768,32767,-32768,27399,-32768,32587,19639,32767,32767,32767,32767,1007,15860,8290,18760,32767,32767,32767,32767,-1426,-7043,-15265,-3279,32767,23277,6003,32767,-23333,32767,32767,-32224,11100,32767,25763,14763,26672,32767,-32768,32767,15315,32767,32767,-2782,32767,32767,32767,32767,-18265,9743,24914,32767,-32768,-32768,29820,32767,-4318,32767,32767,-5157,7472,-24431,-32768,-13788,18972,-32476,16539,1976,-6859,1883,-32768,32767,-15557,-28383,-3724,-19273,29830,11908,-30995,-28623,32767,3551,-32768,-32768,-32768,-32768,32767,32679,18662,-32768,-32768,32767,-32768,-32768,17901,-32768,-9340,32767,-32768,32767,-32768,-21894,-32768,-25752,-20008,-13757,19082,-9174,-32768,-32768,-32768,-32768,32767,-3184,-32768,-32768,-30762,5500,-355,-25160,24247,-32768,-32768,3208,-32768,-32768,-32768,-25649,-22008,-11329,-32768,-32768,-32768,-32768,-28092,-32768,-32768,24097,-3476,-15049,32767,-32768,20714,12264,-32768,-10074,-32768,-32768,32767,14471,-32768,-17770,-32768,4890,-17571,-27028,32767,26836,32767,32767,30109,-15996,-20605,-12722,-4712,6207,-32768,-32768,4199,14016,-22468,1453,10343,32767,32767,-25916,32767,6634,-22005,32767,-16848,3126,29572,-10619,20311,5648,-28426,32287,32767,4934,32767,-32768,-2922,32767,32767,22608,-26863,32767,-10260,-2543,14665,-9270,-23666,-32768,-32768,-11043,-32768,-28501,-32768,-32768,-32768,-6539,12946,-25097,-1470,32767,-27148,-13672,-32768,-7168,-14521,-32768,-32768,-472,23307,-32768,-32768,-32768,-9732,1045,1052,21881,-22566,-2280,-2812,32767,32767,32767,-4983,-31096,-7748,32767,-29125,-22634,2148,-32768,32767,-32768,32767,32767,-11636,588,-31687,-1755,32767,-25061,-27773,32767,8933,30877,25595,32767,32767,32767,-7415,-7290,-32768,23629,-32768,-32768,9451,32767,-32768,-19430,20515,11613,32767,-32768,32767,32767,-32768,-12443,17087,32767,-10154,32767,-2638,13095,16058,2184,-32768,2472,-32768,-17970,4628,-30377,17009,-32768,-32768,-32768,-32768,-32768,-32768,-28962,-32768,-32768,-32768,-32768,-7638,32767,32767,-32768,-12570,-32768,19900,-32768,-32768,-32768,-32768,-32768,-32768,32767,-32768,-32768,-30112,22526,-32768,-21116,-22361,-32768,-6221,30925,11489,20562,21415,-32768,-32768,32767,-32768,32767,8153,-16866,-32768,188,8730,17843,-32768,-5154,-32768,-25338,32767,-32768,-32768,19967,30690,32767,-31572,32767,-32768,-32768,3221,-32768,-32768,-21830,-13927,32767,-32768,24847,-12546,32767,-24543,-10978,32767,-17378,32767,-32768,-26427,-25151,32767,9860,32767,32767,-19510,32767,-13064,-17487,1361,32767,-12584,-8235,32767,32767,32767,-12158,32767,29756,32767,32767,32767,32767,25599,32767,32767,32767,32281,31063,22937,32767,32767,32767,27465,32767,32767,32767,32767,32767,-20359,32767,15877,32767,12472,-12091,32767,32767,32767,32767,15439,-12771,32767,32767,32767,32767,32767,-15093,32767,32767,19894,32767,-4534,32767,29808,32767,30985,32767,16855,-19353,32767,32767,32767,32767,32767,-32768,32767,32767,32767,32767,32767,32767,32767,32767,25863,-4484,-11520,18248,-25079,32767,-16427,26550,3163,19649,29200,194,32767,32767,-1852,-32768,10640,32767,-6272,18631,32767,10039,32767,-32529,32767,7995,32767,10524,-3794,-32768,4968,30354,32767,8160,-12385,19354,-32768,32767,-23212,13039,32767,18070,32767,32767,-32768,32767,12161,32767,32767,32767,3518,-7325,32767,32767,-4982,-13817,14925,10105,26463,32767,13724,2711,32767,22908,32767,28009,-3014,-32768,16470,-10171,29643,-13032,-6496,32767,-8692,32767,25681,32767,13281,-15215,14359,-32768,-32768,-11727,-6179,-1191,32767,32767,32767,32767,32767,-10056,-17177,-20760,-32768,32767,-32768,-32768,-3629,3364,-32768,-32052,-32768,14017,-32768,-32768,-32768,-32768,-32768,-32144,-32768,25584,-32768,32767,-32768,-32768,18705,-32768,-32768,-32768,-32768,-15827,-32768,-32768,-32768,-32768,-32768,-19899,-32768,-32768,-32768,17780,-32768,19718,32767,-11884,6799,-13247,-32768,292,-32768,32767,-32768,-4286,32238,-32768,-32768,22509,-32768,-32768,-32768,-25741,-12077,1393,-4411,-32768,20793,-10106,-32768,-27325,32767,-32768,5839,293,-24113,-32768,-32768,27185,-32768,32767,-32768,-32768,32767,16130,-22306,-14698,-12258,32767,14583,2769,-21412,32767,-32768,-720,22236,-19689,-32768,-16228,-355,-32768,-32768,-23394,1810,32767,-32768,25231,-32768,32767,-32768,29160,30452,-32768,-32768,-32768,-32768,32688,32767,32767,320,-32768,-22585,-30643,-32768,28511,-8419,-9946,-32768,32767,4679,32767,19208,992,9332,32767,32767,32767,-2090,-32768,32767,32767,-3976,25315,27619,32767,17724,32767,-29350,-16352,-4292,-326,30674,32767,-3018,21996,-11711,32767,32767,32767,-2900,-28271,32767,-32768,21030,30655,32767,-6483,32767,32767,25409,-20494,32767,32767,32767,-19977,-3537,22183,24868,32767,32767,32767,32767,32767,32767,32767,6782,-32768,32767,32767,32767,19047,25061,32767,-745,-6986,4767,369,5535,-25304,32767,17445,-24483,-4453,-17509,32767,31376,32767,32767,32767,32767,32767,20671,-8651,32767,-3509,-25485,32767,32767,32767,-32768,32767,32767,32767,-18278,32767,32767,32767,32767,4664,-7179,-32768,-32768,32767,-32768,-32768,-7478,-32768,-32768,1179,32767,-10253,6650,6099,-12998,-24371,-32162,32767,-32768,-20225,19317,32767,-7745,32767,-23178,32767,-25601,-32768,32767,32767,9685,1633,-32768,-23246,-32768,-7864,-32768,5905,-32768,-32768,-32768,32767,-11304,-32768,11510,-32768,-32768,-23363,-32768,-32768,-32768,32767,-32768,-32768,-32768,-32768,-32768,-32768,-30113,-32768,-32768,-26695,-32768,-32768,-27177,-1231,-6332,-3208,-32768,-32768,-32768,-32768,-32768,-32768,32767,3021,18546,-32768,14925,-32768,30950,-12047,-24765,10593,1498,32767,1330,14090,-32768,25107,28969,-32768,3979,32767,-32768,32767,-32768,32767,32767,32363,32767,32767,29954,6524,32767,32767,11401,32767,31716,-15890,-2221,25219,-32768,8231,26431,10819,32767,32767,32767,32767,22905,1113,-10046,-13021,27999,32767,32767,32767,32767,3193,-2791,32767,-32768,-6338,2582,-32768,-32768,6692,6607,32767,5807,-16657,10776,32767,-10739,32767,32767,-32768,-27297,21063,32767,32767,32767,32767,-31350,3312,-408,32767,-28994,-32768,-32768,-17296,-32768,32767,-10294,-6136,-32768,-28599,-32768,32767,-434,-32768,-32768,-22433,-32768,18342,-32768,-22669,5316,-32768,-32768,-16350,-6343,-32768,-17981,-32768,-32768,-12571,-13110,-32768,-7586,-32768,-32768,-23405,-32768,-815,-32768,-21480,-28547,-32768,-32768,-32768,16574,18632,2812,-32768,-31273,-32768,-32768,32767,-748,-32768,-32768,-32768,-32768,-31647,-28531,32767,-8139,-16869,-32768,-23355,-228,9580,32767,-32768,-3866,-32768,32767,30923,-372,5288,5809,4926,26807,-32768,23485,8778,32767,19330,32767,-25905,32767,32767,-1159,-32768,2020,529,-32768,-10477,26066,-32768,-32768,-32768,-10798,28618,-32768,-32768,-2275,32767,15202,344,13872,-32768,32767,-7099,-23720,30427,4357,-27055,32767,-32768,-32768,-17730,-9629,-32768,-1422,3942,-32768,-32768,32767,-26174,-18176,16959,32767,5961,7130,32767,32767,32767,32767,32767,1150,4500,-32768,32767,-13086,-23261,-27847,9240,-32768,-31293,32767,32767,-1056,-32768,9404,-32768,31465,27251,-32768,32767,10736,32767,32767,32767,-30097,23078,1430,9232,-7525,11140,32767,32767,32767,32767,17650,32767,32767,32767,4402,-16241,10742,32767,8368,32767,32767,6824,32767,32767,524,28287,32767,32767,32767,13686,-4462,32767,8422,32767,-836,32767,25400,-9554,-32768,9004,-32768,-32768,7018,-31849,-32768,8443,-6163,4952,32767,32767,-17677,-675,3611,32767,-31182,32767,-4970,-9948,-14145,32767,32767,-32768,-14844,-23886,32767,32767,-11066,-32768,32767,-31794,29604,-32768,-11076,-5264,-32768,5348,-27596,21322,32767,-32768,30511,-26712,-5071,-29439,-18452,-7360,-1521,32767,18725,-32768,-21466,-27456,-32768,16296,-5106,-3537,-32768,32767,-32768,26592,-30546,-3424,32767,32767,32767,-373,-7703,-7811,32767,-20584,-12107,6944,23408,32767,13593,32767,-1389,-4532,17482,-6453,32767,32767,-32768,27943,28952,-20260,-4726,6802,32767,32767,-30095,26637,-28137,-16139,-32768,-32768,32767,32767,13486,-32768,14729,32767,-25280,-32768,18438,-23120,-32768,-17948,-4422,32767,-9171,-32768,-5949,-32768,25797,32767,-5186,615,-32768,-10537,6001,-4473,31989,-20200,-32768,-32768,5652,-8446,-32768,22940,-32768,25290,-32768,-32768,-24451,4692,-32768,-32768,-23056,-32768,-9884,-32768,17713,28830,8593,-14812,-32768,-32768,-32768,-32768,22436,13994,-11819,1715,-32768,9199,32767,32767,-2099,32767,-21646,20965,21261,-30293,29770,4367,-5863,-5854,-32768,3628,32767,32767,16594,32767,15114,19375,-17247,22780,-7877,32767,32767,32767,30801,-11250,-5010,-16150,-15004,32767,32767,15006,32767,32767,32767,32767,228,32767,32767,32767,-32768,32767,-21958,32767,32767,11710,1252,-29905,32767,32767,32767,32767,29033,32767,-32768,-32768,14169,23071,20607,32767,-18213,32767,-4448,11489,32767,-8948,-32768,-2073,32767,-32768,-32768,32767,-16425,11302,-26149,-24691,-32768,32767,-10085,8664,20693,-32768,32767,14425,32767,32767,1215,-5916,-16869,18117,32767,32767,32767,2853,19267,-31554,-32768,32767,-6446,-32768,3837,-32768,-28693,32767,-2422,-32768,7556,3198,-32768,-20156,-19857,20505,-28814,-3617,18121,-7156,32767,25744,22071,32767,-5759,-11749,7978,-32768,-13054,-968,-32768,-8450,-32678,-32768,-32768,13972,-32768,-32768,-13348,-32768,-32768,-32768,-15397,-32768,-32768,-32768,1161,-32768,32767,-32768,-32768,-32768,16115,-32768,-32768,-32768,-24525,702,-9242,-32768,-32768,23061,1802,-22318,32767,-32768,32767,-18311,20002,-32768,-14054,-32768,28689,-32768,-19824,26224,-28813,-11954,-32768,32767,32767,-32768,-32768,-27047,32767,-32768,-32768,-32768,24838,-32768,13613,-26791,-32768,24530,-11180,18293,22048,32767,32767,15493,17908,4435,32767,-32768,-30439,22307,-19102,-10991,32767,12510,32767,-32768,25307,32767,16599,13388,23854,-10362,32767,4385,22527,20367,-24175,-8933,-7374 diff --git a/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv3_input0_int32.csv b/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv3_input0_int32.csv new file mode 100644 index 0000000..4abcf90 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv3_input0_int32.csv @@ -0,0 +1 @@ +1,4,82,16 diff --git a/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv3_input1_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv3_input1_int16.csv new file mode 100644 index 0000000..98b001f --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv3_input1_int16.csv @@ -0,0 +1 @@ +16905,21624,12028,-4919,8135,-6205,14737,-387,28900,-15228,-3732,15350,21773,18261,-28520,10132,24666,-29043,-19136,30109,30844,5419,-25980,11648,1149,26432,5616,-25374,6404,12493,-13845,-440,-12176,-32217,22136,-6944,4219,-21780,-332,-14502,2040,-32639,-31691,46,8927,-14163,-13076,-16305,-9311,27220,2159,-3884,-10760,-19676,-11280,15821,8674,-3890,1910,-11654,-13830,17797,-8552,-25839,20477,-14114,-4766,-26872,-1446,-28001,31456,17069,-19903,6975,28192,31222,10672,27458,18880,-28465,14629,-1807,14690,-28497,9727,13874,26486,20640,29372,-27692,13121,27102,-4341,8734,20532,30959,-32338,1393,-1060,-10763,27716,11756,-13563,-7678,32115,-30454,-14154,-18237,-19117,4261,6457,4323,32574,13306,24514,26861,-17373,-16132,-12297,16324,-7791,19386,-7377,-7207,-2238,17778,16454,-16762,-27907,4150,-6095,19859,-1975,22348,9615,14061,6958,-10027,-2850,-30601,-529,-17653,-26123,29685,-204,12858,16600,-20822,-24273,-8653,6678,11713,19459,-8371,24747,17528,-3588,-4866,-26250,29376,-5535,31331,1989,-13926,-22953,-28930,28830,-10123,5307,18559,-17118,3042,-24477,-14228,19372,-26134,-10568,-10982,15916,31274,10550,-30960,-31626,-24585,31425,-802,1290,3808,22496,-10202,11075,-12782,29754,-28030,-29191,23081,-9664,-8877,-16481,-31718,10451,3636,-29429,-4529,-11379,-31767,29102,-15289,-17527,-28712,3389,-13785,-11492,-5742,4065,26535,26369,-32445,-2208,-2717,27734,-21994,-23980,-13366,-30805,26051,-32721,10502,-22718,-10090,-175,-3988,-19704,246,30616,30130,14618,16026,-1341,9768,28331,-4367,-7677,27074,30487,-13849,4412,-24881,-1772,-16904,9405,-24341,-23746,-31240,-21467,-29512,-32461,-29443,22862,7521,17695,-14683,26218,-17373,17773,-28515,15977,-25184,-25677,8788,32452,-973,5540,-31813,-20139,10136,5421,-4050,21886,18423,-17746,12205,-10106,27040,-16445,-32443,-12231,12568,-19753,24186,10197,-30730,2686,-12652,-19838,-25831,16856,-9129,-19294,-26242,-2748,-23383,5992,-26397,19403,-14341,-10158,-8971,-15272,28726,-8348,-23859,-22589,24534,-20294,5365,16820,19706,-11086,19987,8735,2319,11333,19688,-6545,-6956,-29593,7212,25405,-8168,-12146,10306,-9323,15320,-31528,9853,-23479,24250,-5762,25727,704,19190,27278,32256,-5769,12834,-11310,-6421,-27236,2982,9007,-29700,12696,4345,21311,-16957,30833,29293,23890,-28940,-23981,21298,-3276,-26410,17271,-25726,21997,19812,-7358,30726,22276,1429,15522,12645,11095,28992,23216,-10914,-15951,25611,-27158,1950,1350,19951,19450,-25555,-28905,8026,-25890,-15254,-29582,-22568,-18202,17811,12276,29482,-29231,-29604,30595,-8570,8480,-18569,-22479,26812,9970,29856,-30825,-6978,-7711,-4334,24249,22726,13621,32129,135,-29987,-27422,-3427,-19942,733,24776,-21510,-25786,12198,14006,-29201,1102,-961,-8542,6675,32019,5221,11483,32146,-27870,-1914,-21009,11380,12583,-14704,29790,20609,19285,18637,16769,14251,-19897,18833,-820,24577,11201,-20908,12670,-3131,20077,-8190,5651,15643,-13560,-17203,-4480,-15382,-12508,-6935,10443,-4188,30222,-6060,-4031,1035,-31101,-27759,5684,-17728,21638,4457,11427,-8280,21819,-19660,32355,-18024,-9492,-8466,22013,-422,-20297,-5604,-26582,3900,-17090,-22698,27743,-24982,16827,465,-10439,-7809,-4663,22028,-19698,-7102,16428,-24818,-15880,-20798,-12526,30898,25576,-9185,-10677,-2053,-28922,10999,18879,-25695,31094,-4528,17484,16602,-4237,-27788,-31048,-18466,-26764,-259,-11789,-10070,227,-4418,-22061,-13683,-420,-11817,19969,-11093,-25179,-23011,-9636,-13551,3319,-3119,18499,-6519,-29624,-30452,-5884,-23543,-19922,-7886,28100,12315,-4361,-32730,17852,-12751,-15539,-5139,-10068,-31713,23017,26582,-26663,28558,-9962,-12012,4795,21931,28190,-19045,31517,17393,28370,-4931,-21800,196,-32622,4585,-2650,25451,8060,29767,-15355,9343,22105,21164,-13807,-8248,4850,26376,3702,13815,-7494,7509,19780,28260,31530,6836,-24672,-28457,28248,-28663,4961,-25933,27804,-31188,6086,23652,-3854,-15735,-21037,5142,-3209,-21295,28408,-16520,24623,-11312,27526,-2681,-14073,-17155,6708,-2736,-2563,-28432,9278,20123,17438,-16231,26172,12786,2197,25739,25328,17402,-4187,27322,8495,31795,-7384,-32315,28451,562,-25692,6984,-1197,-18680,10099,29583,-3248,11210,-11903,30515,13236,-8306,10154,-30147,-5867,-18949,-12595,-23394,12520,26407,21317,13042,22149,-16748,-13306,-21743,-13897,22064,-2738,-11728,-27072,-26676,-8866,-9150,-19439,-20554,-30285,-10028,-24496,-27527,-25200,-13067,13537,-3043,9687,14474,27678,28894,5192,29856,22655,-16637,10994,-26762,10111,6325,-13345,160,-13188,17317,-12097,32745,28325,-8756,26865,-4617,-24787,-18617,7190,9074,-20778,-20965,17865,4141,4614,-4340,25846,-3716,20724,27849,-1781,16909,18440,-19109,23727,5634,-2880,-31294,-31656,6045,13490,2878,23437,-3420,-26693,-10889,2567,21175,2293,-5103,22363,-85,-22590,-1550,25306,-30514,7889,21771,11953,15233,7383,9096,-32766,15492,-3846,-772,-3088,-24320,11227,22884,1099,26081,15603,17773,3960,390,14224,23504,-27192,-28302,-3290,2236,-12255,30524,11871,1384,1099,420,-11353,-19033,19166,11530,777,11048,11296,6001,-5652,-27179,31516,58,-27679,21648,10786,31706,5573,-27316,-19139,9831,-3294,-10564,-934,5920,8259,7280,-21320,7750,5321,19061,-29295,-3661,-28915,-31952,9659,25405,-13774,-32625,23829,-12076,-21175,-25625,24336,12814,1008,-24656,7269,29749,-16913,29287,-23119,1891,-25719,17191,14143,20342,-23996,22268,6941,9744,-28761,-14885,17108,-9002,31707,31295,-5260,-8550,29165,28696,31552,-3687,-4492,16896,-14599,-12987,20596,-3265,-15034,30131,-28820,-8274,-28045,21787,17028,7991,-11541,19289,-21667,5929,-16098,-5830,-26268,21404,-9754,29282,-30211,45,-23045,-7118,-9833,25713,-32372,23908,-4984,-24241,11371,3827,-29378,23325,-25377,-29908,-2752,-26628,-10759,23211,-12098,-11375,5610,-30370,-2344,14617,20605,3751,-29852,-9682,-23935,-29383,1522,-3329,-335,-25380,-13942,27795,19342,14199,-29623,-27147,6779,-31085,23449,-674,32108,-9753,15498,-7249,-6387,19532,4405,-5763,2100,-20764,16482,-11174,6231,-26875,28590,-51,2142,16414,19806,19373,26348,7118,-10558,28821,5417,-18848,31064,4523,-25809,-29173,-19622,-493,1582,-3619,26928,16601,-31474,11409,21544,-18855,-13021,10803,-3096,-4350,1601,5435,-7833,-17780,-24670,-12594,-8015,-9981,-9611,-3379,1690,-16382,13673,9906,16765,8599,10320,23617,-12884,-19743,25474,21106,-25690,24414,7310,-26786,25185,-31573,-6632,4580,18634,10666,-28892,-17287,-12196,19120,28723,12808,1248,14065,-5651,25593,-29758,-11160,21197,27625,-23791,-20254,-10122,-19249,6695,-8741,-10337,25965,14172,-27660,16414,13556,-31634,20343,-9175,-1306,-23232,-13949,7165,-16816,29636,20732,5126,-5582,25022,9871,10415,-12304,23460,-25438,5874,3574,31767,-6396,-14801,-21265,14740,1820,-20018,31978,17180,-1361,-941,-6632,-11740,-12429,10076,-10215,11830,27443,-27794,14330,-19995,15003,-18217,-9433,5540,-9034,1954,-384,-4664,32181,31220,18881,-27997,6002,18736,27789,315,1151,18880,-24975,-6296,24117,2716,-1002,-6399,19159,-14576,24996,-25669,-12801,10418,14472,29253,-13454,-13063,-21549,12214,18481,-19783,-18890,-9385,-8656,10623,6714,2440,-32522,14301,10401,-32184,7901,-8466,-31609,-28032,-13487,-30147,-30796,117,24961,30413,12567,-29651,-20212,-27970,-20237,-19356,6592,-18659,-2449,27457,18519,16544,-21791,-23648,3303,-15423,20843,18846,-13579,-12607,9394,11256,-4486,10144,-3814,17299,8721,-603,31729,5143,16869,28706,-24603,30701,-25715,15661,-31303,1265,-15263,13863,4107,-8228,12767,-18142,559,-16687,12119,-23153,-20003,-15168,-15640,-7849,-6648,15132,-16226,-9353,-6672,25481,27819,25640,6332,11637,20692,27960,30255,-25831,-23627,-7090,-30857,24698,20007,16067,4484,-1434,20825,-19009,12701,-21189,-27266,27694,30583,11257,-8753,8971,-28134,-6811,16295,-15965,30724,-3328,26513,30509,2667,14796,-2332,24950,23929,28003,546,-3628,12909,7588,-12257,-12922,-11980,-6770,2355,11206,19370,-24767,29101,14207,5119,32053,-19168,29907,17050,-31487,-11982,-21994,-5618,25665,1123,15435,18341,-9154,6342,1275,-12660,-23479,-6852,-23813,16377,10077,25230,23688,26789,10320,-17644,-31043,-19224,-1348,-2994,15533,-24738,1479,-17258,-31956,16742,-3574,-20025,-22851,-5689,-5689,14753 diff --git a/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv4.tflite b/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv4.tflite new file mode 100644 index 0000000..618d91e Binary files /dev/null and b/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv4.tflite differ diff --git a/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv4_golden_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv4_golden_int16.csv new file mode 100644 index 0000000..39c6f57 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv4_golden_int16.csv @@ -0,0 +1 @@ +2179,32767,156,-23798,-3090,-148,25153,20500,32767,-123,14867,12422,-32404,-9801,18095,-8331,21968,6139,-32768,-32768,-32768,-32768,-5234,-7923,-5516,-32768,-32768,-32768,32767,32767,-32768,8692,-31233,-32768,14964,-12443,-5994,32767,-32768,-15814,-32768,8203,-18739,-32768,-32768,2845,8226,-32768,-32768,-27318,32767,28298,-21730,29041,32767,25727,-12628,-19898,-32768,6095,-32768,-1853,32767,506,-32768,-9563,28450,-3648,-25639,32767,32767,-32768,19126,-32768,-10244,25655,32767,32767,32767,-15438,-32768,-1624,-32768,24029,22352,13582,-11712,10871,-31462,-2591,-12212,27859,10264,-2474,32767,-11398,14465,-3167,32767,-19031,10621,352,-1171,32767,-8326,32767,25601,32767,32767,18003,-32768,32520,-4962,32767,-21803,32767,32576,4513,32767,13552,-9536,13426,11200,32767,-11612,-16873,27374,-12861,3985,32767,-32768,-32768,-32768,-32768,-26813,-977,2714,-32690,-32768,-32768,20796,9011,-32768,14262,32767,-4509,-32768,-32768,-32768,10392,12298,-32768,32767,-29564,21122,-15685,777,13101,21165,5474,-24976,3572,-32768,9427,8725,28762,28186,13442,-7711,14881,-17294,-21166,2352,20705,1470,-19029,-32768,31995,-32768,-32768,-32768,-32768,-7434,-6254,116,-32768,-17567,-32768,-25199,-25853,-24604,-32768,18612,-29191,-8016,-32768,-31671,22056,32767,-32768,-7300,32767,-16477,32767,28594,20391,32767,-13116,-28981,27561,32767,15433,-14978,15787,1443,32767,-32768,32767,-32768,-32768,-32768,-23868,-32768,-18642,23064,32767,26972,32767,31952,-32768,13629,6779,32767,-12951,32767,32767,21684,32767,-26436,32767,11376,9403,-32768,-32768,-32768,-32768,-21417,-26868,23455,28079,32767,-32768,-11888,12439,-32768,-26545,-32768,-32768,32767,12601,32767,6606,-16036,26243,-12464,21383,9588,-32768,-9347,-32768,-32768,-21621,-32768,32767,-32768,28189,-32768,-23431,32767,-32768,24521,19927,-9971,-32768,18968,32767,32767,-11442,-3997,16345,623,13337,-32768,11031,-32768,16781,-19611,-24842,-32768,-32768,-5092,32767,32767,4649,-32768,-23662,-8255,-6065,17883,21519,-32480,1374,-27165,-11201,-32768,-32768,-30476,-11580,-32768,-32768,12803,23746,-19718,-32768,-18195,-17,32767,-25624,-11139,-13359,32767,-32768,-32768,32767,-32768,-27229,32767,1244,-32768,4804,-22708,26152,32720,1760,-32768,-22684,32767,-22736,-32768,-14715,-3170,28783,-14330,21781,32767,3441,32767,8587,28924,32767,5127,-32768,32767,-15438,-32768,14794,-32768,-2931,-11521,-24511,-26191,-32768,3413,32310,32767,-32768,32767,-32768,32767,25836,15497,-23997,32767,-606,11058,-13920,-32768,32767,7744,2463,-4915,25582,-29439,-8603,-32768,29897,32767,870,12731,17999,18913,696,32767,1880,1180,20823,32767,15507,32767,-32768,32767,1280,-32768,-13164,26338,20144,18319,-23656,-21148,-32768,-16513,32767,25935,-23081,32767,21289,32767,32767,32767,27087,32767,-11482,-32768,-32768,-32768,-20955,3379,-29338,-32768,-6053,-32768,-6296,-32768,-13222,32767,-32768,-32768,-32768,-14174,-1599,-10427,-32768,-32768,-6778,-32768,994,-32768,-32768,-32768,-32768,-32768,-15902,-32768,-10444,-32768,-29132,32767,-32768,19036,16305,-29130,30639,-32768,32767,-32768,-28120,21357,18602,-32768,-8023,17618,-10646,32767,27962,32767,22156,5276,32767,-21260,21581,15901,17752,20025,11944,27328,23829,5268,32767,-26487,-32768,-15531,-2675,-26431,17878,16774,19800,27724,32767,22132,-8429,-32768,7711,32767,-32768,32767,13154,32767,-875,-18173,-11337,1276,20560,23782,32767,22405,-32768,-32768,3009,32767,-23344,-32768,1020,-32768,-32768,2283,-13724,-9519,-32768,-32768,32767,29904,27113,32767,32767,30609,-15121,-32768,-10504,-32768,22668,-29934,-20811,-17086,-32768,-32768,-32768,-32768,2836,8899,-28519,-10539,10284,-18745,32767,-32768,-11764,-32768,-19072,-32768,-32611,-32768,-31853,10398,-15943,-27562,-7724,-16710,463,-12065,-22421,18171,-16006,-32768,-14771,9006,-4523,-32768,-5190,-32768,7678,4545,-32768,12851,-12971,28489,-32768,-32768,-20361,-32768,-2292,-32768,-32768,-32768,-32768,-32768,-32768,-32768,-18127,-13708,-32768,32767,-26060,32767,32767,32767,25743,-4416,32767,31788,32767,-32040,24111,32767,-20012,-32768,32767,-32768,32767,-10191,-25622,14652,32767,-32768,12180,-32768,32767,32767,-10159,15326,31318,32767,12791,12722,-9697,-32768,-32768,-7270,30007,-32768,-32768,-32768,-32768,-9171,23084,-32768,32767,3339,-31495,-27052,-32768,-32768,-10155,-32768,-32768,28776,-32768,-32768,-16508,-20548,32767,32767,3839,19385,-6105,-5631,29311,-32768,-14131,-32768,4060,-32768,-32768,-32768,-32768,-32768,32767,-32768,-12461,-32768,-32768,32306,32767,32767,9534,32767,16875,-32768,-22584,28563,-32768,-32768,-32768,465,-32768,-32768,32767,32767,-32768,-12251,32767,32767,-32768,15420,15042,32767,-6501,30285,22407,32767,-18128,32767,21566,-32768,32767,-32768,-12567,-9607,2652,18141,6395,-32768,1799,32767,32767,20063,32498,-32768,-4344,-32768,-3332,22559,-16008,-32768,25926,-32768,32767,-32768,-32768,32767,22495,-32768,-32768,-32768,8680,-32768,-32768,-23167,-26161,-10829,8103,-32768,6483,-32768,-32768,-32768,-32768,-32768,-32768,876,16571,16777,-32768,-11571,5788,20052,-32768,31755,32767,32767,-10196,-32768,32767,5808,-30512,-32768,32767,-32768,32767,-10724,-32768,9347,21585,-32768,4641,-32768,-32768,18786,-2022,-32768,-23170,-27306,32767,-32768,-32768,6942,-28602,-32768,-12828,-32768,9845,-32768,-32768,-32768,32767,4642,-21174,-12565,24247,22382,-32768,28866,-10413,-9034,32767,-32768,18266,32767,-23735,-32768,4128,-32768,-32768,32767,21037,-26479,-15696,12197,-32768,17814,32767,-27757,-32768,3928,-32768,-702,11804,-32768,32767,-11357,6834,-19596,-32768,-4860,32767,32767,-12605,3428,5495,18656,-32768,-32768,32767,-4241,-32768,32767,-2399,6792,17583,-32768,19243,32767,-32768,32767,-32768,17096,32767,-32768,29609,32767,9094,32767,-32768,-32768,-18481,-32768,19939,-20715,-32768,24320,-20822,-16641,1828,-23781,4814,1513,-24096,-502,-23419,-31712,32767,-7371,16670,-6466,11562,-32768,-32768,-32768,-32768,-28910,-32768,-5879,12411,-32768,32767,32767,32767,32767,32767,20048,26150,1311,32767,13043,7270,32767,5624,31850,-19640,32767,-361,32767,-4942,6539,-29104,-21405,29874,32767,-701,-16038,32767,7240,15876,32767,32767,27875,-32768,-32768,32767,-26916,32767,-32768,-16996,-8512,-12051,-32768,32767,24170,-1488,22174,-32768,-32768,-32768,12118,32767,-905,32767,-18819,17965,-15131,32767,-32768,-23852,2319,-31338,-24364,-32768,-32768,-28338,5706,32767,-1165,18767,-23806,-32768,32767,-15609,-32723,-32768,-12690,32767,32767,-32768,-29545,-32768,-18023,32767,-29858,9260,-18033,-32768,6334,-27334,-9256,8453,-25079,-16549,7275,-32768,-22653,-15592,-29823,-17240,-17063,13548,-32768,10226,-32768,-7986,-32768,-28413,13820,11756,9891,14707,32767,-29329,32767,-11615,-18611,17803,32767,-15049,32767,-32768,32767,32767,-10625,-24314,32767,20853,32767,32767,-32768,32767,11938,23351,32767,-32768,32767,-26412,-32768,32767,32767,4964,9038,14847,17861,32767,-32768,-7851,-13030,26445,32767,-32768,3694,31762,25632,32767,3506,-32071,24968,-13791,32767,32767,-32768,5655,-32768,24523,32767,22346,-32768,-32768,32767,-30837,-26927,12618,32767,32767,32767,30904,32767,-8680,-18636,7635,32767,32767,14979,-32768,32767,-20927,-32768,13223,32767,32767,-32131,12055,-32768,-32768,-571,7584,15314,-5746,10855,-22620,9149,-32768,-1729,32767,19857,-32768,-32768,-32768,8608,-29369,-18470,-8984,-32768,-20746,-32768,-32768,-32768,-32768,-14343,-20774,4850,-32009,-495,2830,-31163,13865,-32768,21286,-32768,-29439,5690,26572,-32768,-18054,-32768,32767,32767,-30408,-29148,-14431,-15440,32767,30703,17704,-32768,17011,-29021,25655,-32768,29788,-26859,26866,-32768,-32768,32767,-32768,15384,32767,-7335,-28989,-32768,4470,-32768,-32768,-32768,-32768,2145,-22932,-32768,32767,12438,2084,12400,32767,32767,10974,-24263,-20497,-32768,32767,6018,32767,-32768,32767,32767,-296,32767,9661,-18499,27722,32767,-32768,-32768,32767,-32768,-32768,-32768,32141,32767,-8906,-32768,32767,-32768,-25684,-32768,157,29852,17498,-19528,-32768,25028,32767,-32768,32767,32767,-26039,32767,-32768,-14914,26179,-32768,-32768,-32768,-22293,2527,37,5535,-19782,-14221,-32768,32767,32767,-22125,6348,-9265,-4736,-3595,-21306,32767,21799,-28685,32767,21315,1820,12171,-32768,24267,-2709,13395,-30382,-32768,22200,-32768,-1855,19416,-8127,12583,25650,4688,32767,-3250,-3337,32767,7241,28737,-15205,-15979,-13714,32767,-32768,31962,-18569,-32768,32767,-19370,-32768,-21502,-32768,-32768,-21860,-32768,-32768,31379,-20200,-29503,15031,-17426,9481,-2884,32767,23895,32767,-32768,-13879,32767,32767,-8659,-32768,-32768,160,-32768,26499,5631,-32768,-28338,-8273,-32768,-32768,-18100,-32768,-32768,32767,-558,-32768,-32768,21889,25228,-32768,-32768,9777,-20852,-32768,16201,-32768,2460,-32768,-32768,8483,-32768,-20963,-32768,32767,32767,32767,-17700,32767,32767,-24018,-23396,32767,-32768,32767,32767,16681,32767,-32768,2291,32767,-32768,13177,-32768,-32768,32767,-2533,9017,27777,32767,-8468,7291,-12984,11957,4472,30201,-32768,32767,16063,-32768,-24342,20962,-17300,-11385,-32768,-32768,-32768,-32768,20609,32767,9861,23152,13901,-32768,32767,32767,25536,17443,4426,-17790,32767,-5552,32767,32767,19866,14109,3386,32767,811,1334,32767,18180,-32768,5563,32767,-20882,-32768,32767,10395,-32768,-32768,-32768,-31442,32767,-32192,2849,-32768,32767,-32768,32767,-20396,-1224,32767,32767,26909,-23056,30857,-2604,13468,-4670,32767,-32768,32767,5087,32767,32767,28001,1935,-32768,32767,6998,-32768,-9041,23250,32767,-28515,32767,-32768,-32768,-1341,338,7846,3623,32767,26253,32767,32767,15050,32767,-32768,-32768,10563,-32768,-32768,22271,-32768,-32768,32767,-10315,17443,-32768,32767,-32768,32767,-2076,32767,-23810,-32768,-19997,-32768,-32768,5107,-12290,-31632,32767,-15976,32767,-32768,32767,32767,9204,20660,-25293,-20946,-32768,32767,-7572,-6834,-32768,-32768,15880,-19727,-32768,-32768,-21151,-17686,-32768,-1406,19198,-23806,10107,32767,-32768,31380,32767,32767,-6594,6343,-22443,-13349,32767,32767,-32768,31665,32767,-13290,-15778,-32768,-13324,-32768,32767,32767,-32768,32767,32339,32767,-32768,32767,-20718,32767,-32768,-32768,23577,-32768,5719,-14716,28974,-19175,32767,-32768,-32768,-32768,1549,18749,14708,-19693,32767,3173,32767,32767,-4032,29148,17157,27487,5032,-16984,32767,-32768,32767,32767,-22468,-25209,-24071,32767,32767,29343,-2928,-13884,-23815,16842,32767,29341,32767,32767,32767,-21954,32767,20748,32767,32767,-20073,32767,32767,32767,-12073,-32768,-1568,-23804,-9523,-17419,32767,-32768,6220,32767,28052,-32768,12671,16745,32767,-29910,32767,-5568,-32768,32767,-32768,28823,32767,8033,-19259,32767,32767,-32768,32767,32767,4188,-30916,28602,-32768,32767,7418,32767,-32768,17124,32767,-1558,-32768,-32768,-13629,-32768,32767,-32768,32767,-3323,32767,-4730,-23539,32767,32767,32767,32767,5558,-26059,26342,32767,32767,25425,32767,32767,13412,-22277,27354,31686,32767,32767,27965,21089,30018,1088,32767,32767,11956,26728,19704,-32768,24227,6923,17550,16466,32767,32767,-25079,-32768,-32768,13463,26714,-32768,-14111,32767,32767,-32768,32044,32767,32767,-32768,2502,32767,32767,-25706,-27906,32767,-32768,32767,-31270,-12551,-21487,5066,-3247,-17578,-32768,32767,28863,-4315,-21870,32767,32767,-32768,-24194,-32768,-32768,-32768,-32768,-17027,-32768,-32768,-32768,-32768,32767,-32768,-32768,14052,16162,-32768,-32768,-32768,-32768,-32768,-32768,-32768,-32768,-32768,-32768,8764,-32768,-32768,-32768,-32768,-32768,-32768,-27947,-32768,-32768,-10530,-32768,-32768,-32768,-32768,-32768,-32768,-32691,-32768,-25930,15067,-24159,14739,-17097,-32768,32767,31563,-21952,32767,15460,27577,1996,16210,32767,12543,-5789,32767,32767,4100,-32768,25426,-32768,10809,-32768,32767,-26182,-4175,16138,524,32767,32767,-28064,32767,-32768,-32768,22330,-9087,-7430,32767,-32768,32767,26986,-32768,30637,32767,-9578,32767,-32768,-32768,-9626,-32768,16636,-2243,-32768,-32768,-32768,-32768,-2729,8544,-32768,13848,26382,-32768,-23134,3073,-32768,-32768,-32768,32767,-13519,-32768,23633,-32768,-32768,-32768,29070,-32768,-32768,-32768,16666,-32768,-1242,-32768,-32768,-32768,-15347,-32768,30378,-12101,-32768,-32768,-32768,-32768,32767,-32768,-32768,-32768,-32768,-32768,1930,32767,-27296,-30584,-32768,-32768,-32768,-32768,-17584,32767,-32768,-32768,-32768,-32768,18568,-32768,345,-32768,-32768,-32768,-32768,-21513,-32768,-32768,-32768,-32768,-17020,-32768,-32768,-32768,-32768,-32768,-32768,-32768,32767,-12267,-15668,-32768,14725,32767,-32768,32767,27687,-29474,-17440,32767,-1008,646,-32768,7742,27018,-32768,-32768,-24886,-32768,-32768,17804,7647,-22650,2465,-32768,32767,15443,-32768,17247,32767,-32768,27328,-32768,14408,-32768,32767,-3236,-32768,32767,32767,-32700,-32768,-32768,-32768,-32768,32767,-6945,32767,-11378,8947,-32566,-32768,1390,-29921,32767,-31815,22090,-32768,32767,11565,23953,-22723,-32768,32767,32767,32767,-14565,-32768,26225,-18596,9619,32452,24021,-32768,-32768,-872,32767,17350,32767,32767,32767,32767,-4469,32767,26831,19356,32767,32767,32767,32767,-2038,27731,24557,-560,32767,32767,32767,32767,-32768,32767,32767,32767,18051,32767,32767,32767,7565,32767,25637,8195,-4820,-23662,-32768,14772,-10556,-7333,-32768,32767,9254,13735,-32768,-32768,-21528,-32768,-32768,3049,31607,-17784,32767,29981,19885,32767,-32768,4081,-17128,-32768,32767,-32768,32767,32767,-8472,-2276,19294,-17513,28317,-8979,20803,3990,-7701,6207,-19388,3378,32767,-32768,29582,32767,-24533,-9632,32767,-32768,28407,-32768,15326,-17984,-32768,32767,-32768,-5063,-5227,-26431,-20001,-28592,32767,-32768,-1179,11006,-9138,9619,-32768,-32768,-32768,6849,-32768,-32768,-32768,-606,-32768,-32768,-32768,-32768,-25435,32767,-32768,-32768,-23616,-32768,-32768,14045,-32768,-32768,-32768,-27185,32767,-32768,133,-32768,-32768,-32768,-32768,-32768,-16453,-32768,3848,18210,-32768,-32768,-32768,-19576,-32768,-32768,-32768,-10188,-32768,24520,-29981,-32768,-19391,28940,-32768,23444,-32768,-32768,32767,32767,-32768,6814,-12406,-9031,-19089,32767,32767,-32768,32767,7638,-20406,32767,23905,-32768,32767,11350,32767,32767,18076,-10349,-32768,19988,20622,-23489,22278,-29791,32767,-32768,-32768,-17296,22,-6605,-14330,-2532,2827,-32768,32767,32767,-32768,16249,-32768,-32768,32767,-32654,32767,-14304,32767,-32768,-23275,-32768,27808,31582,32767,734,-23933,-32768,-32768,-32768,32767,-32768,32767,-32768,-32768,-1753,-32768,-32768,7388,11703,-32768,32767,32767,21133,-15530,-32768,-32768,32767,-32768,22270,14126,-32768,-27618,28667,13834,32767,-4419,32767,32767,32767,32767,16079,32767,-27098,32767,-32768,32767,32767,-14194,-15056,-32768,12846,32767,-32768,32767,-17368,9967,-32768,10689,1187,32767,-13364,-21728,-32768,32767,32767,32767,-32768,-24988,32767,-32768,-12824,-17342,-32768,-832,-32768,32767,-32768,-32768,-32768,792,-32768,-32768,18034,-29024,32767,-32768,-12336,32767,-28621,-6465,29459,-32768,32767,-14751,1102,32767,-32768,32767,-5285,21285,-32768,32767,-15804,27888,-32768,18429,-32768,32767,-32768,20590,-23229,-32768,32767,32767,-29660,-32768,-32768,-13247,-32768,-25986,-32768,2026,-32768,-32768,-32768,32767,-20919,-8592,27648,10077,32767,-32768,-32768,32767,-32768,-6461,7991,32767,-32768,-32768,-32768,-11978,32767,31823,-18850,-32768,19803,25797,32767,32767,32767,32767,32767,-32768,25705,-32768,17070,32767,-32768,32767,32767,-12392,-32768,32767,-32768,32767,1330,32767,15141,-32768,32767,-32768,167,-132,28296,-24778,32767,-32768,-27174,15169,-32768,8499,-32768,-32768,32767,3132,32767,-32768,32767,32767,32767,-32768,-32768,32767,32767,-32768,32767,32767,-32768,-32768,-10769,13807,29784,3348,32767,32767,32767,4061,-8506,7207,14537,-12420,-32768,-32768,22741,-31090,-32768,-21022,32767,-28734,32767,20901,-32768,5891,-15445,-32768,-26191,-32768,-32768,16277,-32768,-32768,-5492,-11343,-32768,-32768,-32768,32767,9735,-32768,-32768,-32768,-19531,20634,2717,-32768,-32768,-16968,-32768,-32768,32767,-2245,-32768,7250,-32768,32767,3404,32767,-17884,32767,-32768,15390,-32768,-7069,-32768,-9458,17388,-5515,-32768,-32768,-32768,32767,16618,32767,11043,-18063,22742,-32768,11840,-26162,-30170,-23472,-32768,-32768,-32768,-3723,-32768,11281,-4491,32767,-7671,-30414,32767,-32768,-32768,32767,-19163,-32768,-32768,-32768,-32768,-32768,-32768,32767,32401,32767,22288,20017,-32768,32767,32767,-10927,8864,18874,-32768,-6333,-32768,23439,-15671,-32768,32767,12912,14402,32767,-32768,-10166,32767,32400,1021,-32768,-27131,32767,-32768,8048,-32768,32767,-32768,-32768,20893,11003,-32768,6999,-21563,-32768,32767,-32768,15567,-32768,32767,-950,32767,-32768,-7923,32767,-11350,32767,-32768,-32768,28196,32767,32767,32767,32767,32767,32767,-2146,-8399,-546,32767,-32768,-1476,32767,32767,-8081,32767,-3679,-32768,32767,32767,32210,32767,-32768,-32768,32767,-32768,-32768,-32768,21828,-32768,-18988,-22615,22831,2683,-11216,26838,30966,-32768,-32768,32767,-10192,8085,-32768,32767,16166,-32768,32767,32767,23216,-6652,-32768,22517,13042,32767,-13152,25622,32767,-5956,-12232,26180,-11524,1365,30982,9960,32767,32767,32767,10642,32767,-32768,32767,-32768,-27087,-11863,6870,32767,-4150,32767,14661,-32768,32767,19471,-32768,-32768,32767,25211,-12142,-32768,-9892,-22778,-27674,32685,2383,-30832,32767,32767,-28878,-12024,-32768,32767,-32768,-21582,-32306,32767,32767,-32768,23659,32767,1562,15269,32767,32767,-4217,32767,-25166,32767,32767,-11157,-32768,32767,-32768,-32768,-32768,-32768,-32768,32767,-21872,-29979,-32768,32767,-13318,-32768,-19510,32767,32767,32767,8581,-32768,3228,-32768,32767,15245,-13344,9745,32767,-32768,-32768,32767,27563,32767,17376,-32768,32767,-22244,32767,32767,-32768,32767,20987,6068,-25421,-4684,-6138,-10700,30103,32767,-32768,-7535,-32768,32767,-4673,-32768,32767,32767,-32768,-32768,-27986,-32768,27462,-11427,32767,32767,-32768,14211,-6845,-32768,4976,32767,23800,32767,32767,-32768,15871,-16701,25439,12364,32767,32767,-25493,8154,-32768,-12857,32767,24678,32767,-32768,16203,-32768,-9600,-5530,-24466,32767,-32768,23975,14273,15768,-32768,32767,-10316,-24779,7324,-32768,25268,-32768,-32768,32767,32767,-32768,-32768,-10089,-32768,32767,-32768,18161,-31756,-26345,-32768,-32768,18579,7149,-32768,32767,-22586,-23825,70,-32768,11550,-12100,-29698,32767,-28531,32767,5994,32767,176,8965,-32768,26843,32767,-32768,32767,-32768,-26771,-32768,32767,-32768,-32768,32767,-13309,-4651,32767,32767,-10035,32767,-21496,32767,32767,-32768,32767,32767,32767,5976,-10546,26057,32767,-32768,32767,2554,13243,15357,-32768,9191,-32768,32767,-32768,11507,32767,12736,-10872,-8048,-21151,-32768,-32768,-32768,-32768,10588,14357,-26302,12582,-32768,5567,-18053,-32768,-32768,-32768,5089,32547,-19297,32767,-32768,32767,-32768,-12639,32767,-32768,32767,-20363,-15280,16547,-9380,-32768,32767,3382,-32768,-32768,-13273,25569,-28972,12092,32767,-32768,11583,-32768,1446,-26532,-32768,-32768,7328,-32768,29899,13879,-32768,-19648,28777,32767,-29948,-32768,-23244,-32768,4512,-32768,-32768,-32768,32767,32767,32767,32767,-11895,-32768,18186,-32768,-6151,-2625,-20852,-32768,-32768,32767,32767,12481,9803,32767,-26459,30741,32767,19158,-841,-32768,32767,32767,-32768,32767,-32768,32767,32767,-28794,18853,-175,32767,-25820,3227,32767,3189,-3733,32767,32767,32767,29938,17844,32767,-32768,-32768,32767,-7751,32767,28450,32767,32767,32767,-4128,32767,15052,-31399,-14476,32767,-1925,-32768,32767,-5183,32767,32767,32767,32767,32767,30282,-14113,32767,32767,-23071,32767,-5306,-1961,32767,-32768,32767,-32768,32767,730,5888,17797,-3138,15794,-24513,32767,32767,24820,32767,32767,-32768,16311,1881,2664,-32768,15241,4134,27389,-32768,-32768,18961,-32768,-32768,-9940,28070,-11345,-32768,9863,-32768,11555,-32768,-20642,-26871,-32768,-32768,-6017,-32768,-24089,-32768,-22344,-32768,-32768,-4741,-25840,26430,-32768,-32768,-32768,-32768,-27628,-32768,-18646,-32768,801,-32768,-32768,-32768,-32768,-32768,13870,-32768,-32768,-32768,-22229,-11650,-32768,-32768,-32768,-32768,-32768,-32768,-25849,-32768,-32768,-32768,-14013,13956,-17136,12020,-8205,32767,-21364,32767,-32768,24152,-16938,-15473,32767,14299,32767,-8181,-16376,-32768,2137,32767,-6191,32767,32767,-5426,32767,-32768,31441,32767,32767,32767,32767,-32768,12909,32767,-32768,32767,-6173,-30481,32267,9910,-30669,5516,32767,4114,-1467,-12971,-32768,32767,11179,-31573,4157,-32768,-9130,-8167,-32768,-32768,-32768,-5908,-4359,-12622,-32768,-2070,-2530,-32768,-7214,17952,-32768,-32768,-32768,-32768,-32768,-32768,-13351,-32768,-32768,-32768,-32768,-32768,-32768,-32768,-32768,-405,19301,-32768,-24535,306,-32768,-32768,-32768,32767,32767,-32768,-32768,-32768,11962,-32768,29238,32767,-32768,-32768,-32768,-32768,12256,-30660,-32768,-32768,-32768,-32768,15060,32767,-32768,13028,-6760,-25488,-32768,-32768,-32768,-14071,-32768,-32768,-13154,-32768,-32768,-32768,-32768,-32768,14051,-32768,-471,-16946,12474,14564,32266,23549,-22910,-32210,-32768,-32768,-12452,-32768,-32768,-32768,11113,10228,-19752,-32768,-32768,-24856,-32768,32767,-20699,-32768,22052,-9066,8305,26311,32767,32767,-20579,-32768,-8303,-4046,-32768,-20391,-32768,-32768,-11782,-32768,-32768,28496,32767,32767,-32768,-5273,-32768,-32768,14052,-10057,-12505,32767,-13330,-32768,32767,-32768,-32768,16398,22877,-32768,-32768,10149,-32768,-32768,3704,32767,-32768,-12769,-32768,-3697,29658,-30301,18417,1456,-32768,16299,32767,-3100,32767,-32768,32767,32767,32767,32767,32767,14424,32767,32767,-32768,32767,32767,32767,14871,32767,-15419,32767,-16287,32767,14767,32767,32767,32767,32767,32767,32767,32767,32767,32767,32767,32767,32767,32767,6993,2057,29584,32767,569,32767,20621,-1636,-5131,-19200,22038,-32768,-32768,-18316,-26153,8261,14679,-32768,-15305,-32768,8017,-32768,-32768,-31127,-22727,-32768,-32768,32767,-32768,-32768,-32768,-32768,-19174,-6157,9549,32767,-2869,-32768,-32768,-11817,26330,-20713,-17253,32767,-32768,-25777,-32365,-32768,29576,32767,4558,-32768,32767,-32768,-6395,19517,-16816,12841,25052,-767,-32768,13130,-32768,32767,6461,31705,25283,-32768,-1121,-2476,-32768,-32768,2894,-292,-3490,-25621,-32768,-32768,-21366,-1890,-6994,-32768,-32768,-32768,-32768,-32768,-12102,-32768,-32768,-32768,-32768,-32768,-32768,-32768,-32768,-190,-32768,4160,-32768,-32768,-32768,-32768,1001,-16740,-6063,-32768,-32768,20270,-32768,-744,32767,-32768,-32768,-32768,32767,-32768,14640,-32768,-32768,-32768,32767,-32768,18792,-32768,32206,-17533,-15309,23714,-27344,32767,-32768,32767,32767,-32230,-6355,-25929,-21279,-32768,32767,-32768,12127,-32768,-3737,-32768,-32768,32767,-32768,32767,-6823,-32768,19109,-21008,-3379,32767,-32768,2957,2605,-32768,-3939,-9371,32767,18704,-32768,-32768,-32768,-32768,-32768,-32768,-6965,-12329,-32768,-32768,-32768,-22483,-32768,-29427,-15350,28125,-5912,-32768,-5027,-25302,-32768,-32768,-30964,32767,-32768,-32768,-32768,-32768,-11285,-32768,-32768,7208,18843,32767,-29584,31081,32767,-9126,32767,33,32767,32767,-14733,-19881,32767,4106,-26104,-506,14826,32767,32767,32767,32767,-11770,32767,28056,14049,-3491,32767,-8688,30488,28143,32767,32767,-32768,-18484,-32768,-24456,32767,-32768,-32768,32767,-32768,-32768,-26149,-32768,-32768,-32768,31008,20827,-32768,-32768,-32768,32767,-20736,-32768,-30219,-1443,3337,-32768,-32768,-32768,-32768,-32768,-32768,5672,8077,32767,-7992,-32768,32767,24853,30429,32767,-32768,-32484,-32768,-31036,16188,-6207,-32768,-21201,24213,-32768,28699,-1384,6762,-32768,26406,32767,-13470,-32768,-32768,-32768,-9038,-29570,-11610,-4419,-32768,-32768,13968,-32768,-32768,-32768,-29357,-32768,-32768,-32768,-32768,-32768,-32768,18022,-32768,25769,-32193,-32768,32767,-32768,-32768,-13134,32767,-32768,32767,-23320,32767,-22256,32767,32767,9796,-6444,32767,32767,32767,-15371,-32768,32767,12132,-8201,24217,1759,-4367,6609,-3360,32767,21013,-18874,20682,-31296,24787,32767,-32768,32767,25052,-32768,12120,25025,-32768,32767,32767,-299,8174,-5515,15361,-32768,5608,32767,-14608,29550,32767,-32768,32767,-32768,8273,27424,-32768,26645,26426,32767,10489,32767,32767,32767,23933,-353,-24015,-24179,32767,32767,-32768,32767,10562,-32768,10152,32767,10247,-32768,5732,-32768,32767,12038,-6285,-3099,-32768,-5495,-32768,-19962,-32768,29954,-11292,-12403,-31411,-32768,32767,-32768,-32768,-32768,12124,-32768,32767,-32768,-29213,-30850,-32768,-32768,-30708,32767,-32768,32767,-32768,-32768,-32768,-32768,-32768,-32768,-32768,-18561,-13335,-32768,-32768,-32768,19796,-19747,-25016,32767,-32768,-18632,32767,-32768,5568,22331,-32768,-31375,-32768,-32768,-32768,-32768,32767,-32768,-32768,7913,-32768,4761,-20663,-4663,32767,-26134,-13325,-32768,32767,-12248,-32768,-7638,29729,-32768,-18849,32767,7170,32767,-32768,-32768,-32768,-32768,-32768,-2778,29872,-10466,32767,-32768,32767,-25071,26920,-32768,7743,-15183,-18564,-32768,-4548,-3069,22231,-32768,32767,-23463,6047,-32768,32767,32753,-25194,-32768,12491,-28784,20550,-230,-32768,-32768,3433,32767,-21920,-6225,-25394,32767,-23147,32767,32767,-32768,32767,27128,-32768,-9803,32767,-22931,-20734,32767,-32768,32767,282,-32768,-32768,32767,-18548,-32768,29651,-32768,-7849,5025,-23406,32767,-6154,32767,16729,22698,-29689,20989,-32768,32767,32767,32767,32767,32767,32767,32767,32767,32767,29239,32767,32767,32767,32767,32767,-22311,-20056,182,29629,-29467,-20626,16784,-20017,-31500,-7092,-9299,-32455,4365,-27915,32767,10901,1516,15590,32767,-32768,-12313,12477,5312,-30989,-19063,13080,-27064,-4620,-32768,-32768,-26317,-32768,14211,15463,-32768,29651,23089,-32768,-28820,-32768,9873,-32768,-32768,30612,-20398,5792,-29209,-32768,1562,32767,18003,-1948,32767,10657,32767,32767,3773,32767,32767,23871,13174,26564,31426,-20827,-4197,18783,1120,4506,24922,32767,10285,-1731,-27422,32767,-513,-9757,-32768,10355,-21370,16043,22786,-13047,32767,-32768,-31723,32767,32767,-637,27319,-32768,-32768,32767,-17727,-9022,7408,25389,2412,12276,599,32767,-10725,-17469,-27623,-25114,-8877,-20159,13325,-13004,-32768,-21181,-32768,31176,16707,-32768,32767,-32768,-9552,32767,9381,-32768,-32768,3410,-32768,18989,14558,-32768,-18762,28127,1955,-32768,-10974,-27708,-32768,-32768,-32768,-32768,-4003,-24627,3533,32767,29511,19396,24826,4347,-8449,26282,-3965,32767,9613,-9391,283,-16997,-32768,-24203,32767,32767,-32768,9128,8634,19363,-23,-1465,22825,32767,2115,32767,32767,-32481,10794,6936,32767,-2215,32767,32767,30453,-32768,10917,5769,-32768,-32768,-29309,-32768,21586,32767,9332,-20480,-32768,32767,-1119,32767,-8192,32767,-32044,18711,14524,-17927,32767,-12944,-2042,32767,32767,-13859,-32768,-18214,-8962,-22714,-898,32767,11580,-32768,25564,-32768,18093,17633,-32768,32767,-10724,-951,6401,-32768,32767,32767,12929,-6138,258,7788,3194,28001,6267,20864,-26614,-31562,-1868,-22829,32767,-15061,7136,-32768,-32768,-24177,27894,18596,-9380,-21729,32767,13022,-32768,-6555,-17526,-22764,32767,32767,21818,32767,-3327,-32768,-17131,32767,32767,32767,32767,32767,32767,32767,-5292,32767,32767,32767,-32768,2749,-32768,27411,-5661,-32768,21161,-32768,-26707,-32768,-32768,-32768,22638,-20449,-31871,-32768,-19879,-14806,-2145,-32768,-5179,-19746,-24021,-32768,4851,-32768,-32768,6002,-2751,-26853,3542,16451,-32768,-23047,-22432,32767,-32768,-21831,11648,-32768,-4532,31536,9379,-6539,1785,-26895,-32768,-32768,-20763,6977,11358,4595,3860,-32768,-32768,-32768,-30028,-22588,-32768,7795,-4796,-32768,18081,-20869,32767,-32768,816,32767,7335,32365,32767,32767,-5645,32767,32767,20399,32767,25785,32767,32767,32767,-15347,20334,20506,13142,-4737,24306,-32768,15662,26616,-27869,-32768,-6184,19150,-32768,32767,-32768,19494,3956,-32768,2268,-32768,-27722,13036,25419,-10844,-19770,482,-9925,-10464,-3990,32767,14157,-32768,-18985,32767,-20605,8389,-17054,16998,-32768,-1001,32767,29904,6872,-8340,32767,-32768,32767,6719,8107,15670,26652,29035,32767,-32768,3585,30065,32767,-9677,10455,-16924,-32768,-11904,4734,-27429,119,-20922,2218,-10733,-32768,32767,-32768,12975,28232,-16706,-3365,26003,-25894,-32768,-26465,-32768,-1875,-28230,32767,25237,-32768,-32768,-21950,-32768,-32768,-32768,-32768,-18191,-2077,-32768,-32768,-32768,-32768,-13496,2806,-20840,-12512,-32768,20581,-32768,-32768,-30051,9767,-32768,-14556,386,-10299,-10541,-32768,-32768,-15870,8505,-32768,-18536,-3300,-32768,537,-16006,-2575,-32768,-1992,27053,19092,32767,15206,-32768,309,5465,-6043,-32768,-1049,5061,4925,-16079,-32768,391,-1840,-32768,-31219,-32768,-32768,-2505,1317,2105,7472,-32768,-4345,-2714,8142,7221,2515,-32768,-32768,89,3584,4013,-18461,32767,-18391,-32768,22833,29200,-19838,-9245,5419,-2459,16882,31536,21551,-606,32767,-32768,-5734,-6327,32767,32767,-9037,32767,14599,7679,24723,14840,-19644,-2463,32767,32767,32767,32767,32767,7632,-11326,16807,14160,-12366,19732,6090,30512,32767,8895,17516,2283,-32768,-20580,12190,-32705,3763,-20973,-32768,-32768,-32768,-30699,-13158,-32768,-32768,-32768,-32768,-32768,-32768,-32768,-32768,-15524,-23028,25318,-21469,-769,-11628,-32768,-32768,28209,-26040,30435,-32768,-16133,27903,-32768,-32768,-16769,32767,18842,32767,-32768,-32768,-11299,-32768,-32004,-32768,8624,-4983,-32768,-32768,-32768,8163,-19953,-31819,9816,-16001,-32768,-32768,9524,-32768,-32768,-27375,-7864,-27896,11551,-32768,3031,32767,-9812,-32768,-8981,29561,20799,-32768,26398,-28126,-32768,-32768,-29554,6152,32394,32767,-32768,-31066,-32768,-13765,-8715,-31555,-10390,-32768,4820,-11592,-17502,-10142,7217,5356,16928,19655,32767,32767,5056,-32768,-16932,-4075,21832,-5527,-25408,-14718,32767,32767,-8847,7251,21410,32767,32767,2235,15363,32718,-29895,9654,2574,32767,1321,-15872,-32768,17122,-31405,-4950,-26765,-32768,-32768,-32768,-10244,32767,5386,-23363,-9656,-32768,-23198,-3871,618,-15071,26963,1538,13436,-9613,32767,32767,32767,-32768,30151,-8439,32767,-15306,15351,4334,32767,32767,26017,32767,-20156,32767,32767,32767,32767,32767,-3459,-10638,32767,32767,4230,32767,32767,32767,32767,32767,32767,32767,32767,32767,32767,32767,13940,32603,-8660,32767,-13270,11666,32767,32767,32767,-21384,-29085,-5892,16823,2771,24015,32767,-1032,1211,32767,-1924,32767,32767,731,3026,-2303,32767,20873,-27045,-21988,-32768,9626,6541,32035,7223,6628,32767,2548,26261,23595,19210,25103,8803,32767,32767,32664,32767,32767,32767,10410,31143,32767,2829,16948,32767,32767,32767,32767,32767,32767,32767,31852,32767,32767,7998,3661,25893,-2077,-23457,-7781,-19212,2363,32767,32767,32767,32767,-32768,-19080,-21243,-14641,-16818,-3657,32767,-32768,-32768,32767,6927,-23048,2268,-32768,11902,-32768,-32768,-32768,13179,-11496,-32768,2427,6110,11411,24874,-12493,-2305,8829,-7366,14442,-25014,-28203,-2422,-32768,-32768,-27974,21034,-4045,-16775,-32768,-32768,32159,-32768,-32768,2648,-8313,-32768,-235,-32768,-25168,-24788,-32768,-3488,470,10093,-25522,-16556,26326,19074,-29892,-29697,-32768,28518,15866,-32768,-32768,-31692,-3198,-32768,22052,-26833,-21491,26761,-31693,30896,18081,32767,10997,32767,15010,-32768,32767,1597,10193,-23325,-9576,-32768,-32768,-22751,-32768,-26280,-7458,-32768,10286,-14621,-27322,-32768,13136,948,32767,-4718,32767,-18092,17565,-32768,-32768,-32768,-23212,-27004,25706,-32768,-32768,-25290,-32768,-32768,-32008,32767,7516,8379,-28600,32767,-29887,25767,15791,-29453,-4842,5064,-3744,-32768,-12236,-32768,486,16369,32767,-28770,-15644,7629,-10587,-21991,-27767,-32768,-26882,23629,-5006,-32768,-14524,7781,-11113,-32768,-2571,-2474,6843,-32768,-25071,32767,20062,-32768,-32768,-10895,3690,-32768,-9791,-8861,-22412,-32768,-1596,-18491,-32768,-32768,-32768,-32768,-22882,-32768,5594,-28381,-32768,-20059,-32768,-18869,5746,-21802,19800,-31515,-32768,-32768,15545,-32768,-32768,-16214,-15009,-32768,-13240,-32768,-17839,-32768,-32768,32767,-17584,-23243,7209,-17446,9461,28341,-7755,7586,12110,9351,17621,-21570,32767,-32768,18737,-26737,4307,25376,-28734,-16747,3310,28432,-2826,-32768,-21197,-5021,-24125,-32768,-32768,-32768,-32768,-32768,32767,12150,-19317,-32768,-32768,-15671,-14182,3857,-10048,7846,-7683,-32768,13822,-32768,-31652,-32768,24320,32767,-17914,18689,32767,6726,-27491,32767,-32768,-25126,-6450,4182,-5850,32767,-12308,14288,8975,-356,32767,10575,1871,23908,25647,21838,32767,-29434,-22759,32767,9811,32682,6423,32767,3919,-32768,17504,-32768,-5969,-956,10539,-2263,32767,-9180,-1103,32767,32767,32767,32767,-16273,17210,-11806,8784,-31002,-16143,-31013,26000,-19407,-28038,32767,-12532,18853,27440,-24288,32767,-14851,32767,20833,-32768,-32768,-20664,-32768,12164,27677,915,-32768,-32768,358,-29697,27237,-2956,32767,7011,-32768,-7560,-32768,27006,-14829,-32768,-17654,937,12754,32767,-7091,-16644,-2931,-4622,-20861,32767,11807,-32768,32767,-499,-983,-15082,-32768,-32768,-32768,32767,-11471,-31239,28208,-32768,-32768,-17697,26905,-8685,32767,-11272,4442,32767,-26251,-4025,8550,-32768,5711,-24494,-26580,-427,-32768,-12722,-3708,-32768,-17054,24289,12591,-4108,3162,-19051,14501,-18080,-32768,32767,22306,5440,-32768,13692,32767,7237,32767,-22299,-32768,12515,-29772,-32768,-9327,20301,-32768,-22900,-32768,-32768,-25659,20589,7388,1017,-32768,-5239,-21546,-3487,-32768,-32768,-32768,-19961,-16231,3810,26087,-5304,-32768,13360,-25895,-6530,-32768,10328,-5390,-7156,435,2435,-32768,-3654,32767,32767,32767,23913,30378,-23929,10730,32767,32767,-3287,32767,23056,-23109,10691,24603,32767,32767,-32768,-27673,-5407,7155,32767,-10211,32767,-32768,-20724,-19668,14679,10913,-21772,-25774,2700,32767,13217,32767,20946,1907,-27985,10153,481,23272,-4771,-29920,-32768,10233,32767,13032,32767,32767,6429,29384,-1656,-3476,-12751,19004,15678,32767,32767,32767,10735,-15769,-24081,30995,-2157,32767,-32768,-8738,-26153,4763,-13638 diff --git a/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv4_input0_int32.csv b/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv4_input0_int32.csv new file mode 100644 index 0000000..c546cd8 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv4_input0_int32.csv @@ -0,0 +1 @@ +1,4,162,8 diff --git a/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv4_input1_int16.csv b/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv4_input1_int16.csv new file mode 100644 index 0000000..b7bdb97 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/seanet/transpose_conv/transpose_conv4_input1_int16.csv @@ -0,0 +1 @@ +4994,250,-17232,22470,-7357,-19440,-10394,18088,-22878,-13835,5625,1186,12173,-20700,-25756,-31560,5093,-25790,10475,30538,-6915,-861,-2202,-31629,20656,12989,13004,24762,-18223,-22264,9499,28287,32143,13713,-28758,-31780,25862,8594,-11261,-18718,1023,28513,11255,-17201,3272,24984,18179,11902,11771,14961,-23388,25741,-28519,-27392,-659,-21512,-12274,26233,9549,-28579,-5050,-21428,-32477,2103,7966,-4131,-28762,-73,-18388,68,-10133,9088,28767,23321,-10158,-4280,-16636,-20403,20672,27602,-82,-6445,-16687,-20416,-13517,23251,30244,-17837,210,-4287,-16458,10380,17650,6898,-16378,10147,-31326,-6632,-11395,15792,-6202,-21770,-30613,11719,21896,41,6797,-3457,30838,11071,-22312,5483,-22703,28145,-22991,-18457,3873,5051,8578,13553,11580,23587,6569,4837,15485,-30542,-420,-27119,-23350,-22506,17705,22034,23559,3876,17463,-16065,-22,-24127,12287,-8042,14115,-7001,-24438,-21812,18731,-5673,-15634,6168,-15972,3207,-2202,31599,31624,-13522,11391,-24519,-9728,12234,-24752,-22957,3783,15019,-23995,-9404,27758,-18070,-5040,16828,-8119,12600,14349,14429,16644,-21045,20953,5461,26215,6452,-16268,17600,-8008,-13438,26500,17916,3925,8775,4305,-706,-18169,8182,24408,-31040,-5970,28676,20326,-10656,-17652,29445,14397,18194,-12095,31777,17199,19105,15565,-25344,-19597,24901,-23014,4420,29789,-27993,21052,-26690,-14433,-22333,-17462,17172,-6143,-22627,8529,20234,-30246,-14841,-15636,211,-19202,8802,16315,-4653,20987,-28626,-2146,-21863,-31045,19181,25304,3645,12870,-9265,27448,1710,22549,14660,13121,-10238,2845,32464,9021,12443,21348,23426,-32148,23473,4093,-17368,-26987,25523,-29312,-17694,24105,-26696,13946,-19917,3031,-17326,-25784,-9412,-12086,-14403,14992,9293,18064,13532,-8266,-15545,4597,7349,25287,-19321,-7684,19892,20311,-6769,-3738,-25822,-7685,9347,19547,-17847,-32498,22451,3613,-8485,-30600,-13354,-8089,-10288,14933,2739,-18183,-21770,-2080,25927,6777,-8073,-4462,1449,29272,-32722,13220,-26099,4461,13056,22340,-12878,21582,11799,26137,2719,15089,-32537,-7160,-27540,5038,-18376,-969,18309,22319,9734,19692,-3855,13914,-7110,25340,-31880,6293,-4844,7369,-17310,-6615,32518,-23922,-27565,-9640,10803,26610,20,3084,14498,-16255,-9065,11325,-29543,22296,21217,15678,-26242,-19289,-13433,3956,-14164,-23774,-16147,-4444,-26874,10751,-25269,16497,-972,5677,22410,-12096,30384,-29843,-25697,16105,-8704,-24517,-31096,10653,-19174,-30018,-7865,-25012,-31105,-24648,-7674,-16295,-4949,12280,4738,-20347,9281,6975,14132,-27722,19298,-2422,16233,27841,-29554,-14930,3698,-25649,-19917,-5578,-17769,6901,-3152,-9401,-26424,9900,11559,785,-31444,15302,27554,6459,31399,-6221,5972,-25875,31383,29931,-32540,7692,11175,-5022,10890,-31674,-7861,-20307,9053,20918,26879,31403,17938,1933,20484,4158,27647,12978,4272,6475,-29877,4539,15124,19041,18656,7511,9375,13809,30782,-24397,-29744,28985,15890,-5688,3254,-20029,3462,28296,4497,-219,2145,9529,-14390,-18300,-7194,8509,-31567,23575,28870,11647,21621,15598,2105,18980,30141,-29688,-11771,-10118,3168,-20180,-3727,-14462,-1569,10284,-26241,-12890,-11898,-19499,-2293,-11512,2818,8691,-14568,17064,30830,-2951,-2367,-21895,11839,3073,-14549,17086,32408,31626,29348,-14532,-23131,-22823,22528,10189,-7808,-10467,17517,9374,10785,26103,-13677,-32003,-17590,-273,31970,-15896,7384,1827,-21026,-9762,25217,-11778,29067,30796,20394,13531,28330,30749,2192,26816,-19181,-26409,9861,17344,14254,-22622,13928,15603,7234,6195,-3427,-24469,10542,9112,-30517,4526,-28732,18791,-13715,-18810,-20529,-30887,16041,31329,32167,-17906,-13540,8483,-15049,8884,-5950,-19236,8721,-19392,-22157,15485,16921,24376,25351,12368,8011,12196,27197,-30628,-5880,16111,28113,-27575,-17632,29494,25878,24553,-13608,-4765,5591,-7942,32655,-12398,-22689,27012,17035,20752,12933,3661,132,-22563,-21048,-3980,15724,-18941,-27863,-19148,-3101,-19239,30194,5241,18388,-7732,-28040,31449,-12015,-25988,25472,-23497,25440,-4556,10747,-6079,16500,28448,-19749,13780,21799,-28723,27893,26636,12458,-27002,4520,-27516,-21205,-31458,15628,9405,-7508,28551,-32630,30960,20866,11342,5716,-18471,-12977,10855,-24204,11464,-2962,-9011,31461,7033,22801,28339,-24677,-1605,-2663,-1886,-14322,-17152,-1103,-24071,25972,15790,-32341,-21623,1904,6369,21265,-25455,-11598,24244,-28181,20449,30746,-21036,25404,-29855,-30411,-14211,13661,-26987,-13287,21331,-10752,21143,-19124,4316,16641,-3274,-21087,-27449,-29339,15157,-1296,-3129,15040,22939,-5320,-30650,-3036,-29837,-7825,690,5747,-5938,-17968,26957,-8209,12462,-10959,12672,-1966,-28001,13229,24355,10248,6307,2930,14499,-19664,31521,11172,1814,-12876,-13843,8897,-17927,3289,-31057,-3282,24260,-20918,13457,25788,20547,-9045,30562,20833,9169,7918,389,21063,-721,21242,-3584,13262,29081,-19309,-2972,-22269,-3544,32712,4590,-3234,31763,5118,18739,-26089,19739,27861,14010,-31519,23591,15322,-23579,1729,-7095,-10012,-10289,-188,20309,-32344,6423,1320,-1898,9272,2142,-20886,27035,7330,10280,-9273,27130,29847,31121,9012,-23452,-20942,22308,-2542,23034,-17518,19687,-12087,28052,24850,21424,1693,-10654,15274,-8034,28698,805,-13531,12215,24583,17132,-5596,21656,-26693,4366,14180,-22777,27326,10444,21340,-22981,10796,17101,-8652,2777,-24433,-2049,-30932,-20885,-8261,-10905,-16085,16074,-18461,14014,19261,17331,2776,-5351,-23317,25005,15144,23430,-23837,31895,-26006,-21537,-31315,-2619,-24528,-13868,27725,23503,-12483,8615,6027,8726,20670,29005,-5548,-7468,29594,-17052,23495,25973,30691,-20129,-11572,6891,-6487,-4652,1228,-315,-30602,27359,-4464,3486,-19747,25187,24502,-4721,25164,10824,-14183,282,-16679,10762,6908,-12989,-2797,-17166,23908,-4097,1703,-4789,17179,-8483,-18627,-8373,6484,25715,-24697,-13387,-18298,1716,-2280,-28645,3088,-10538,-28240,14645,-28123,-23116,9048,-29269,-5572,-23566,16041,-21383,1454,3950,5715,19476,-32383,-9917,30417,-30824,-6599,-12550,19958,-20157,-19699,21878,-17377,27447,-10262,-31924,32624,22000,23244,1327,1358,-23881,23272,-30191,5650,16917,-18028,12969,154,17827,28049,-15611,19885,5210,30138,29446,13654,17301,-30873,-11276,-5145,-1786,-1429,23023,-23840,-20635,886,-29360,22827,27517,-23073,-15748,-17178,23914,-15196,-7878,23088,-31732,-29875,-19113,-20607,-17650,20935,-5024,8229,26621,32336,30527,-8244,22869,28364,12594,13750,22521,24277,29252,-18039,-5691,12413,17999,16703,20458,1049,-13495,-8863,-32557,-23499,27062,23847,-13108,-3239,-31097,13042,-7013,26627,29253,5638,-1841,-3361,15145,11633,-31808,-1631,-13879,11469,-25218,-26728,20646,-31864,20500,-28434,313,-21571,-24315,-28110,-8417,-12204,2640,6514,25478,6501,-10118,-3874,8853,-16760,30250,-31372,29867,3076,-27781,9283,27972,-31920,-3518,31515,24260,20395,10090,21403,9066,-23677,13077,-7244,-24472,-30497,8660,3465,-22999,-26347,-29733,884,-14666,-3416,-7769,32367,-10268,4955,3599,14313,31557,-22333,18680,-27382,-28024,-4181,-17662,-16025,-14775,3135,21955,7030,18145,-4617,-15094,-13290,-21459,-31036,29337,10268,-22806,8337,25758,-9485,27398,16782,24551,9638,15583,22087,-7071,-2564,6632,9061,-15265,16386,-6616,32384,5521,20683,-3420,1541,-7905,13023,4594,-23758,-20755,-30158,32097,22985,26501,-23897,-24270,-22925,23608,23426,1719,-7,-25306,1952,24184,-6728,24969,-16812,-29380,24745,-1276,-23,32750,-19546,12186,11904,-12239,14360,-4207,-6153,-5199,-4932,2695,19165,-7262,29059,-3471,-6172,14209,8479,15434,18579,16295,-29994,12359,-22059,11368,-18336,-25879,29585,30766,30799,16074,-32285,9615,-24921,14020,29589,-20115,1587,-14749,-27268,-13835,22333,1538,-18862,-28954,-6846,-17584,1535,-28408,29321,-25478,25957,17985,-26739,29128,-29103,22075,24113,31215,-15202,22809,27208,-2259,20073,25061,6804,-5269,14941,31502,32429,2017,-30607,-26393,-15208,-15282,-16314,-24819,9723,26497,-31995,20728,-1047,21954,-15943,-26459,14110,26502,19416,4551,5485,-8747,-23442,18270,7551,-7613,-13606,-20139,-7815,-26957,-21264,13724,24978,6010,-31204,2840,-2135,-22660,-2532,-4227,16574,-2618,-16885,1946,11860,29341,-8323,-18001,26645,4618,31284,7743,32132,-16576,-6259,-8801,17604,7,11330,-29218,20376,29093,-13048,-7312,22392,31604,1613,22457,20521,-27452,1988,-30947,27772,30679,-19870,-30372,22351,-4438,-20937,21286,-27034,3396,6327,-3587,-2418,13924,-12566,-2315,-21188,5510,16553,-8486,14881,15421,-13818,-29337,-31028,-32139,-2530,30963,-17228,9082,-28747,-15853,-27410,-4187,16950,5745,5718,-11048,13382,-10260,11866,23246,-11850,-13560,11681,7348,-27778,-1355,-15871,11173,32007,-22604,20694,-9815,-22613,-6326,4007,-1618,5347,3093,21008,-9949,25829,-30133,-31448,30185,7001,-25669,-17145,4366,21416,-20352,-9423,16651,-23324,21284,-26686,22161,-14210,16794,-27325,-5312,-10183,-26070,-6785,-6310,28272,-18118,10253,3557,-13095,-23267,26399,12244,31001,-8057,20868,-30809,-29780,-10858,-8940,18514,-2704,17740,2489,13617,10737,-500,14817,14103,2946,30644,-24593,9412,-23547,-10281,-32498,-13122,31804,366,15973,30197,17803,-17046,-24971,3480,-180,-26839,-8063,-1345,-9202,-29092,13091,12203,22806,-14927,-27291,-11549,30630,-23432,-17858,23160,-16435,-18043,-3274,537,25697,22722,-27540,2547,-32323,19775,7917,-12802,-12091,-28131,25808,-871,14174,25378,18991,32357,-13888,-4370,-12170,-32260,-7251,15001,16584,-2947,20825,-12024,-10884,-13330,14322,32081,8348,-27894,14856,-7910,9660,-31177,-14070,-5397,9498,17880,-5232,-7461,32069,-2649,-3016,6137,28294,-14708,9868,-26060,26312,-23207,10058,3428,-20863,10083,11147,17926,-27635,3625,-9932,24151,17817,-24870,11490,-22204,23908,-18717,-31030,12174,-14283,-2474,-22990,-14903,-10405,12784,-32166,8948,-19026,5165,21108,4356,-31899,59,12383,1061,11887,27119,15975,10876,-26221,-21544,-12160,-9504,708,-24527,4845,-26511,32326,24273,3723,-9734,31745,-20817,2787,26736,-1790,25812,10794,28153,-12685,-23007,261,18161,-24198,-13043,-26068,-22653,30472,-11866,22000,7267,-12107,24279,-877,27031,7376,29002,12597,-6030,24306,-1814,-5440,-27334,24266,9053,6900,-14021,32238,21418,12638,15740,-15914,-13541,-5360,-109,-25573,-32167,30748,9324,-12916,-27300,-28892,4502,3442,7283,-13692,5607,-5742,-3212,14024,-28652,-17722,1693,17571,-11357,-20280,-31302,-21052,19498,-27768,16302,-450,9866,23621,-13161,28856,-17009,32001,15210,-29770,-4663,10966,19863,-25737,30775,17362,23534,3083,-16104,12,-25281,-26776,30838,-17398,-25705,-6849,-9343,13028,-17493,11510,-24445,23410,30191,-2407,-12109,-3079,16251,9799,15229,-1268,7360,30717,14411,-15996,8516,-22155,-15584,13282,82,-9865,23635,7265,14909,26153,-21874,16114,-6833,-27773,-29559,-25761,-19517,24262,9502,6959,-9778,-9849,-23196,16529,3992,20341,-4765,21932,26787,19839,-20612,-19344,-15720,23667,-11132,-7534,24271,29548,-13037,-27367,-11769,654,-2982,-7250,25555,3270,32212,27099,26033,5894,31042,-16044,-9894,-23727,14021,23228,-27944,-4325,-23973,22394,-3136,-18867,31583,14086,-8257,7172,-19924,123,-1143,6204,19304,31371,25302,6886,18069,10811,15411,1180,6018,-30572,7071,-18633,-12296,-174,22770,-12346,-468,-27805,11546,28864,17332,-14186,-17486,15881,13513,-27734,18528,-4950,7577,6810,-11399,-8525,7459,10651,7861,17618,25938,7450,-5361,-31268,17512,28563,2492,-29248,-19749,28693,-25528,-9779,-7475,14422,-27445,-18263,-11090,8159,-315,22264,-7579,-25899,17076,16432,-28298,14545,3798,24157,13718,4995,-7848,-24429,8319,13461,-9209,-28475,-6052,4997,9614,-18069,-23647,-17356,-25772,-5887,-1692,-13052,23630,25789,26687,10886,18472,-14061,-17216,14627,1003,31850,30605,-3023,6273,4879,32650,24881,-14115,18133,-10739,26379,3322,31922,-13507,27632,-11360,-29116,12824,23133,-13480,-31136,-10714,-17587,1689,28451,-14584,-2728,29934,18858,-4873,-6267,-28619,21727,-5040,2182,19834,22334,-26633,-4461,16389,-6827,8013,26711,15728,-32075,13564,11262,16456,4849,-13063,-8132,-10893,-5617,19508,-944,-7190,31687,7933,22113,15114,289,8374,-24560,1399,-31123,26131,-11846,14802,-20457,5663,-25144,-19726,25007,-15149,-405,22156,-8065,23168,3878,28916,29696,-20447,-21022,-6023,-22364,3143,-19551,-9262,-23008,22378,30697,-8880,-19379,-14667,-18887,575,-15788,-28322,22491,-9849,3294,30414,-7137,3493,13124,3344,-11967,32636,-29358,-20466,-28511,-10596,32616,5990,24671,-2955,15388,30876,-23567,328,-5331,-26021,-27284,-1186,-3404,-2357,26510,-5674,23256,17317,1413,-4080,17510,-19843,26427,-17812,25387,-29774,30688,21920,14641,-3561,-23891,-30726,-26392,-27880,3949,18721,-20886,2719,-13541,-30103,-19435,-3257,-26911,-25546,-26497,-21748,29706,-13064,-15840,-18685,25198,-12976,7170,-10295,-19360,-1689,-12272,29035,5063,-3843,1997,-15146,-17529,-8717,-30435,-13715,25483,-18241,-11752,-14315,4648,31373,31763,18705,1123,4929,-17922,3650,-17384,-23225,-11788,22280,-3385,14251,7876,-10407,-11916,-25304,14493,-9521,3445,-13408,17829,-19129,-7996,-10302,9743,-485,-10117,-13186,-24991,-26317,-27301,-14672,-32694,-16322,-31666,-9143,27059,2067,6542,-13996,-20036,-11065,-25161,26142,-3833,-20388,-23784,-11168,-511,-4234,10262,9418,2862,-5267,10175,-31248,-15259,19173,-6209,-16671,-13943,-13692,-15115,-10791,8360,21496,25035,29159,-25952,16896,-19209,-27107,31273,5570,-9324,-14210,7898,19503,17916,27590,12091,20369,10120,-22614,-25722,-28157,1885,29543,-10425,13547,16266,-819,-10725,-14087,9445,13900,9331,-5235,28280,30835,-16564,17635,16271,2298,-4041,5137,-13290,-20799,21496,-1075,-28218,12369,9574,-3319,5683,-30193,-3829,25244,-21642,-10535,23817,-8839,-29290,26734,29781,27737,25725,-18529,-3463,10785,28162,1226,11573,6218,21445,-8700,29772,4197,-32242,-10687,8879,-29445,20907,-30046,1409,23530,-29499,2600,15827,-28043,-6277,-31746,15678,-7924,19895,-23621,422,-9787,17100,-31586,29815,22279,21852,25551,30290,-2867,32004,-12465,-321,21155,28355,31409,-15762,-10218,-17556,-9026,20985,-18440,22256,617,-23933,-31623,31578,-22971,-4966,19916,31738,-13689,16517,14717,14649,20264,-27185,23725,29603,19043,23890,-25357,2723,7762,-25326,-23653,-20219,-18337,10029,-16822,1871,-22513,-2496,-1423,-4306,5655,18292,-9891,74,30590,8199,21054,-25778,18205,21335,13778,-4280,8809,14450,-7131,25597,28071,18868,-13574,-12299,-24846,10727,-9445,32091,-24199,-21936,-11796,2469,-12695,16694,20396,30842,31664,-15193,-4028,13663,15855,-13891,9086,13463,3095,28330,-5253,-17564,-1816,-21227,20576,108,-9882,-11561,9361,24786,-17563,-10818,1398,1915,17646,-6346,30919,32014,-20268,15548,-13203,-9235,-7305,9523,-10513,-24785,-18352,15094,27784,-3548,24628,1657,-21494,15101,17731,4901,-22440,22138,-27044,13117,14660,-8755,-11937,16747,27616,11389,15580,-168,-4462,27827,7135,-14145,-14925,26896,-15161,2858,9530,-30826,-20402,332,13757,1649,-31425,32588,29529,14591,26479,27327,-25074,-26634,-31738,10628,24158,1391,2132,-29305,21026,-15793,-22017,-27940,5432,-4570,18249,28978,6026,-9755,-14259,26638,19522,15990,-31444,-31864,-25961,-20950,-1611,6636,3563,-22719,5320,-6695,-5530,13242,3884,-4667,24063,-32595,-6145,-7903,-32713,9708,18636,23743,-9393,-13041,-5438,-30023,-30127,30436,20594,10371,5036,9494,-9412,-10072,25004,20433,-9783,26556,-26276,-4329,30255,18178,-20536,22363,8039,-25944,3015,32398,-21254,-10299,-17065,8873,15940,14751,19471,-3519,-30586,7456,-17658,21066,-20234,9848,-146,-21794,17309,30020,-21782,-31623,14524,-2682,2918,-22353,5996,-6351,18130,-16801,12279,13982,22865,-16751,18068,8298,-5976,-10695,19442,-12664,-10492,14224,24510,-23852,8190,-7174,20757,-4143,17471,-15828,9386,-9328,1125,420,-26982,27611,-31101,8870,9591,9909,28822,7162,-32759,16953,32202,-31599,-2853,1830,28500,22587,-6346,-30469,-8862,-9686,30900,-32075,-11883,-1593,4826,27443,-7369,-5313,25304,-5492,-394,-4797,564,-23827,30796,5813,5691,-6403,-21453,25899,-7149,26908,2448,13069,12122,-15131,-6885,-7323,-5866,-9063,10998,-261,-10025,10694,9501,-16108,-26185,-4281,-19298,9474,-5503,-19191,-3069,7129,27658,-31839,-7781,-20380,-2982,-26759,-6883,31704,-10079,31760,20704,16230,28926,-1684,-7715,-19442,-22149,13153,18381,-22176,-6208,2761,-26502,-1017,28877,-17111,-30680,29262,-7656,26056,-4273,26489,-18657,-5847,-21734,-11766,-23553,7622,-14522,94,21491,-30828,-20414,-29798,10344,-17801,24246,-17948,14656,-32649,24211,5101 diff --git a/tensorflow/lite/micro/integration_tests/templates/BUILD.mako b/tensorflow/lite/micro/integration_tests/templates/BUILD.mako new file mode 100644 index 0000000..7f50965 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/templates/BUILD.mako @@ -0,0 +1,88 @@ +# Description: +# generated integration test for one specific kernel in a model. +load( + "//tensorflow/lite/micro:build_def.bzl", + "generate_cc_arrays", + "micro_copts", +) + +package( + default_visibility = ["//visibility:public"], + # Disabling layering_check because of http://b/177257332 + features = ["-layering_check"], + licenses = ["notice"], +) + +% for target in targets: +generate_cc_arrays(name = "generated_${target}_model_data_cc",src = "${target}.tflite",out = "${target}_model_data.cc",) +generate_cc_arrays(name = "generated_${target}_model_data_hdr",src = "${target}.tflite",out = "${target}_model_data.h",) +% endfor + +% for target in targets: +% for input_idx, input in enumerate(inputs): +generate_cc_arrays( + name = "generated_${target}_input${input_idx}_${input_dtypes[input_idx]}_test_data_cc", + src = "${target}_input${input_idx}_${input_dtypes[input_idx]}.csv", + out = "${target}_input${input_idx}_${input_dtypes[input_idx]}_test_data.cc", +) + +generate_cc_arrays( + name = "generated_${target}_input${input_idx}_${input_dtypes[input_idx]}_test_data_hdr", + src = "${target}_input${input_idx}_${input_dtypes[input_idx]}.csv", + out = "${target}_input${input_idx}_${input_dtypes[input_idx]}_test_data.h", +) +% endfor + +generate_cc_arrays( + name = "generated_${target}_golden_${output_dtype}_test_data_cc", + src = "${target}_golden_${output_dtype}.csv", + out = "${target}_golden_${output_dtype}_test_data.cc", +) + +generate_cc_arrays( + name = "generated_${target}_golden_${output_dtype}_test_data_hdr", + src = "${target}_golden_${output_dtype}.csv", + out = "${target}_golden_${output_dtype}_test_data.h", +) +% endfor + +cc_library( + name = "models_and_testdata", + srcs = [ +% for target in targets: + "generated_${target}_model_data_cc", +% for input_idx, input in enumerate(inputs): + "generated_${target}_input${input_idx}_${input_dtypes[input_idx]}_test_data_cc", +% endfor + "generated_${target}_golden_${output_dtype}_test_data_cc", +% endfor + ], + hdrs = [ +% for target in targets: + "generated_${target}_model_data_hdr", +% for input_idx, input in enumerate(inputs): + "generated_${target}_input${input_idx}_${input_dtypes[input_idx]}_test_data_hdr", +% endfor + "generated_${target}_golden_${output_dtype}_test_data_hdr", +% endfor + ], + copts = micro_copts(), +) + +cc_test( + name = "integration_test", + srcs = [ + "integration_tests.cc", + ], + copts = micro_copts(), + deps = [ + ":models_and_testdata", + "//tensorflow/lite/micro:micro_framework", + "//tensorflow/lite/micro:micro_log", + "//tensorflow/lite/micro:micro_resource_variable", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:recording_allocators", + "//python/tflite_micro:python_ops_resolver", + "//tensorflow/lite/micro/testing:micro_test", + ], +) diff --git a/tensorflow/lite/micro/integration_tests/templates/integration_tests_cc.mako b/tensorflow/lite/micro/integration_tests/templates/integration_tests_cc.mako new file mode 100644 index 0000000..eae1fb1 --- /dev/null +++ b/tensorflow/lite/micro/integration_tests/templates/integration_tests_cc.mako @@ -0,0 +1,107 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include "tensorflow/lite/c/common.h" + +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_profiler.h" +#include "python/tflite_micro/python_ops_resolver.h" +#include "tensorflow/lite/micro/recording_micro_allocator.h" +#include "tensorflow/lite/micro/recording_micro_interpreter.h" +#include "tensorflow/lite/micro/system_setup.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +% for target_with_path in targets_with_path: +#include "${target_with_path}_model_data.h" +% for input_idx, input in enumerate(inputs): +#include "${target_with_path}_input${input_idx}_${input_dtypes[input_idx]}_test_data.h" +% endfor +#include "${target_with_path}_golden_${output_dtype}_test_data.h" +% endfor + +constexpr size_t kTensorArenaSize = 1024 * 100; +uint8_t tensor_arena[kTensorArenaSize]; + +namespace tflite { +namespace micro { +namespace { + +void RunModel(const uint8_t* model, +% for input_idx, input in enumerate(inputs): + const ${input_dtypes[input_idx]}_t* input${input_idx}, + const uint32_t input${input_idx}_size, +% endfor + const ${output_dtype}_t* golden, + const uint32_t golden_size, + const char* name) { + InitializeTarget(); + MicroProfiler profiler; + PythonOpsResolver op_resolver; + + MicroInterpreter interpreter(GetModel(model), op_resolver, tensor_arena, + kTensorArenaSize, + nullptr, &profiler); + interpreter.AllocateTensors(); +% for input_idx, input in enumerate(inputs): + TfLiteTensor* input_tensor${input_idx} = interpreter.input(${input_idx}); + TF_LITE_MICRO_EXPECT_EQ(input_tensor${input_idx}->bytes, + input${input_idx}_size * sizeof( + ${input_dtypes[input_idx]}_t)); + memcpy(interpreter.input(${input_idx})->data.raw, + input${input_idx}, + input_tensor${input_idx}->bytes); +% endfor + if (kTfLiteOk != interpreter.Invoke()) { + TF_LITE_MICRO_EXPECT(false); + return; + } + profiler.Log(); + MicroPrintf(""); + + TfLiteTensor* output_tensor = interpreter.output(0); + TF_LITE_MICRO_EXPECT_EQ(output_tensor->bytes, + golden_size * sizeof(${output_dtype}_t)); + ${output_dtype}_t* output = ::tflite::GetTensorData<${output_dtype}_t>(output_tensor); + for (uint32_t i = 0; i < golden_size; i++) { + // TODO(b/205046520): Better understand why TfLite and TFLM can sometimes be + // off by 1. + TF_LITE_MICRO_EXPECT_NEAR(golden[i], output[i], 1); + } +} + +} // namespace +} // namespace micro +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +% for target in targets: + +TF_LITE_MICRO_TEST(${target}_test) {tflite::micro::RunModel( +g_${target}_model_data, +% for input_idx, input in enumerate(inputs): +g_${target}_input${input_idx}_${input_dtypes[input_idx]}_test_data, +g_${target}_input${input_idx}_${input_dtypes[input_idx]}_test_data_size, +% endfor +g_${target}_golden_${output_dtype}_test_data, +g_${target}_golden_${output_dtype}_test_data_size, +"${target} test"); +} + +% endfor + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/BUILD b/tensorflow/lite/micro/kernels/BUILD new file mode 100644 index 0000000..31767b8 --- /dev/null +++ b/tensorflow/lite/micro/kernels/BUILD @@ -0,0 +1,1453 @@ +load("//tensorflow/lite/micro:build_def.bzl", "micro_copts", "tflm_kernel_cc_library") +load( + "//tensorflow:extra_rules.bzl", + "tflm_kernel_friends", + "xtensa_fusion_f1_config", + "xtensa_hifi_3z_config", + "xtensa_hifi_5_config", + "xtensa_vision_p6_config", +) + +package( + features = [ + "-layering_check", # buildozer: disable=no-layering-check, TODO(b/177257333): consider enabling layering check + "-parse_headers", # buildozer: disable=no-parse-headers, paser_headers is unavailable with bazel (http://b/175817117#comment4) + ], + licenses = ["notice"], +) + +package_group( + name = "tflite_micro", + packages = ["//..."], +) + +package_group( + name = "micro_top_level", + packages = ["//tensorflow/lite/micro"], +) + +package_group( + name = "kernel_friends", + packages = tflm_kernel_friends(), +) + +#################################### +# C++ libraries +#################################### + +cc_library( + name = "activation_utils", + hdrs = ["activation_utils.h"], + deps = [ + "//tensorflow/lite/c:common", + "//tensorflow/lite/kernels/internal:cppmath", + ], +) + +cc_library( + name = "circular_buffer_flexbuffers_generated_data", + srcs = [ + "circular_buffer_flexbuffers_generated_data.cc", + ], + hdrs = [ + "circular_buffer_flexbuffers_generated_data.h", + ], +) + +cc_library( + name = "conv_test_common", + srcs = [ + "conv_test_common.cc", + ], + hdrs = [ + "conv_test.h", + ], + deps = [ + ":kernel_runner", + ":micro_ops", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_library( + name = "detection_postprocess_flexbuffers_generated_data", + srcs = [ + "detection_postprocess_flexbuffers_generated_data.cc", + ], + hdrs = [ + "detection_postprocess_flexbuffers_generated_data.h", + ], +) + +cc_library( + name = "kernel_runner", + srcs = [ + "kernel_runner.cc", + ], + hdrs = ["kernel_runner.h"], + visibility = [ + "//visibility:public", + ], + deps = [ + "//tensorflow/lite/c:common", + "//tensorflow/lite/kernels/internal:compatibility", + "//tensorflow/lite/micro:fake_micro_context", + "//tensorflow/lite/micro:micro_allocator", + "//tensorflow/lite/micro:micro_arena_constants", + "//tensorflow/lite/micro:mock_micro_graph", + "//tensorflow/lite/micro:test_helpers", + ], +) + +cc_library( + name = "kernel_util", + srcs = [ + "kernel_util.cc", + ], + hdrs = ["kernel_util.h"], + visibility = [ + ":kernel_friends", + ":tflite_micro", + ], + deps = [ + "//tensorflow/lite/c:common", + "//tensorflow/lite/kernels/internal:compatibility", + "//tensorflow/lite/kernels/internal:tensor_utils_no_eigen", + "//tensorflow/lite/kernels/internal:types", + "//tensorflow/lite/micro:debug_log", + "//tensorflow/lite/micro:micro_context", + ], +) + +cc_library( + name = "lstm_shared", + hdrs = [ + "lstm_shared.h", + ], + visibility = ["//tensorflow/lite/micro/kernels/testdata:__pkg__"], +) + +cc_library( + name = "lstm_eval_test_lib", + hdrs = [ + "lstm_eval_test.h", + ], + deps = [ + ":kernel_util", + ":micro_ops", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/kernels/testdata:lstm_test_data", + ], +) + +cc_library( + name = "micro_tensor_utils", + srcs = [ + "micro_tensor_utils.cc", + ], + hdrs = ["micro_tensor_utils.h"], + deps = [ + "//tensorflow/lite/c:common", + "//tensorflow/lite/core:macros", + "//tensorflow/lite/kernels:op_macros", + "//tensorflow/lite/kernels/internal:common", + "//tensorflow/lite/kernels/internal:compatibility", + "//tensorflow/lite/kernels/internal:cppmath", + "//tensorflow/lite/kernels/internal:tensor_utils_no_eigen", + ], +) + +HIFI4_COPTS = [ + "-DXTENSA=1", + "-DHIFI4=1", +] + +HIFI5_COPTS = [ + "-DXTENSA=1", + "-DHIFI5=1", +] + +VP6_COPTS = [ + "-DXTENSA=1", + "-DVISION_P6=1", +] + +tflm_kernel_cc_library( + name = "micro_ops", + srcs = [ + "activations.cc", + "activations_common.cc", + "add.cc", + "add_common.cc", + "add_n.cc", + "arg_min_max.cc", + "assign_variable.cc", + "batch_to_space_nd.cc", + "broadcast_args.cc", + "broadcast_to.cc", + "call_once.cc", + "cast.cc", + "ceil.cc", + "circular_buffer.cc", + "circular_buffer_common.cc", + "comparisons.cc", + "concatenation.cc", + "conv.cc", + "conv_common.cc", + "cumsum.cc", + "depth_to_space.cc", + "depthwise_conv.cc", + "depthwise_conv_common.cc", + "dequantize.cc", + "dequantize_common.cc", + "detection_postprocess.cc", + "div.cc", + "elementwise.cc", + "elu.cc", + "embedding_lookup.cc", + "ethosu.cc", + "exp.cc", + "expand_dims.cc", + "fill.cc", + "floor.cc", + "floor_div.cc", + "floor_mod.cc", + "fully_connected.cc", + "fully_connected_common.cc", + "gather.cc", + "gather_nd.cc", + "hard_swish.cc", + "hard_swish_common.cc", + "if.cc", + "l2_pool_2d.cc", + "l2norm.cc", + "leaky_relu.cc", + "leaky_relu_common.cc", + "log_softmax.cc", + "logical.cc", + "logical_common.cc", + "logistic.cc", + "logistic_common.cc", + "lstm_eval.cc", + "lstm_eval_common.cc", + "maximum_minimum.cc", + "mirror_pad.cc", + "mul.cc", + "mul_common.cc", + "neg.cc", + "pack.cc", + "pad.cc", + "pooling.cc", + "pooling_common.cc", + "prelu.cc", + "prelu_common.cc", + "quantize.cc", + "quantize_common.cc", + "read_variable.cc", + "reduce.cc", + "reduce_common.cc", + "reshape.cc", + "resize_bilinear.cc", + "resize_nearest_neighbor.cc", + "round.cc", + "select.cc", + "shape.cc", + "slice.cc", + "softmax.cc", + "softmax_common.cc", + "space_to_batch_nd.cc", + "space_to_depth.cc", + "split.cc", + "split_v.cc", + "squared_difference.cc", + "squeeze.cc", + "strided_slice.cc", + "sub.cc", + "sub_common.cc", + "svdf.cc", + "svdf_common.cc", + "tanh.cc", + "transpose.cc", + "transpose_conv.cc", + "unidirectional_sequence_lstm.cc", + "unpack.cc", + "var_handle.cc", + "while.cc", + "zeros_like.cc", + ], + hdrs = [ + "activations.h", + "add.h", + "circular_buffer.h", + "conv.h", + "depthwise_conv.h", + "dequantize.h", + "ethosu.h", + "fully_connected.h", + "hard_swish.h", + "leaky_relu.h", + "logical.h", + "logistic.h", + "lstm_eval.h", + "lstm_shared.h", + "micro_ops.h", + "mul.h", + "pad.h", + "pooling.h", + "prelu.h", + "quantize.h", + "reduce.h", + "softmax.h", + "sub.h", + "svdf.h", + ] + select({ + xtensa_fusion_f1_config(): glob(["xtensa/**/*.h"]), + xtensa_hifi_3z_config(): glob(["xtensa/**/*.h"]), + xtensa_hifi_5_config(): glob(["xtensa/**/*.h"]), + xtensa_vision_p6_config(): glob(["xtensa/**/*.h"]), + "//conditions:default": [], + }), + accelerated_srcs = { + xtensa_fusion_f1_config(): glob(["xtensa/**/*.cc"]), + xtensa_hifi_3z_config(): glob(["xtensa/**/*.cc"]), + xtensa_hifi_5_config(): glob(["xtensa/**/*.cc"]), + xtensa_vision_p6_config(): glob(["xtensa/**/*.cc"]), + }, + copts = micro_copts() + select({ + xtensa_fusion_f1_config(): HIFI4_COPTS, + xtensa_hifi_3z_config(): HIFI4_COPTS, + xtensa_hifi_5_config(): HIFI5_COPTS, + xtensa_vision_p6_config(): VP6_COPTS, + "//conditions:default": [], + }), + visibility = [ + # Public visibility to allow application code to select kernel variants. + "//visibility:public", + ], + deps = [ + ":activation_utils", + ":kernel_util", + ":micro_tensor_utils", + "//tensorflow/lite/c:common", + "//tensorflow/lite/kernels:kernel_util", + "//tensorflow/lite/kernels:op_macros", + "//tensorflow/lite/kernels:padding", + "//tensorflow/lite/kernels/internal:common", + "//tensorflow/lite/kernels/internal:compatibility", + "//tensorflow/lite/kernels/internal:cppmath", + "//tensorflow/lite/kernels/internal:quantization_util", + "//tensorflow/lite/kernels/internal:reference_base", + "//tensorflow/lite/kernels/internal:tensor", + "//tensorflow/lite/kernels/internal:tensor_utils_no_eigen", + "//tensorflow/lite/kernels/internal:types", + "//tensorflow/lite/micro:flatbuffer_utils", + "//tensorflow/lite/micro:memory_helpers", + "//tensorflow/lite/micro:micro_graph", + "//tensorflow/lite/micro:micro_log", + "//tensorflow/lite/micro:micro_utils", + "//tensorflow/lite/schema:schema_fbs", + "//signal/micro/kernels:register_signal_ops", + "@flatbuffers//:runtime_cc", + ] + select({ + xtensa_fusion_f1_config(): ["//third_party/xtensa/nnlib_hifi4:nnlib_hifi4_lib"], + xtensa_hifi_3z_config(): ["//third_party/xtensa/nnlib_hifi4:nnlib_hifi4_lib"], + xtensa_hifi_5_config(): ["//third_party/xtensa/nnlib_hifi5:nnlib_hifi5_lib"], + xtensa_vision_p6_config(): ["//third_party/xtensa/xi_tflmlib_vision_p6:xi_tflmlib_vision_p6_lib"], + "//conditions:default": [], + }), +) + +#################################### +# C++ tests +#################################### + +cc_test( + name = "activations_test", + srcs = [ + "activations_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "add_n_test", + srcs = [ + "add_n_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:debug_log", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "add_test", + srcs = [ + "add_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "arg_min_max_test", + srcs = [ + "arg_min_max_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "batch_to_space_nd_test", + srcs = [ + "batch_to_space_nd_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "broadcast_args_test", + srcs = [ + "broadcast_args_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "broadcast_to_test", + srcs = [ + "broadcast_to_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "call_once_test", + srcs = ["call_once_test.cc"], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "cast_test", + srcs = ["cast_test.cc"], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:debug_log", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "ceil_test", + srcs = [ + "ceil_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "circular_buffer_test", + srcs = [ + "circular_buffer_test.cc", + ], + deps = [ + "circular_buffer_flexbuffers_generated_data", + ":kernel_runner", + ":micro_ops", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "comparisons_test", + srcs = [ + "comparisons_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "concatenation_test", + srcs = [ + "concatenation_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "conv_test", + srcs = [ + "conv_test.cc", + ], + deps = [ + ":conv_test_common", + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:micro_utils", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/kernels/testdata:conv_test_data", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "cumsum_test", + srcs = [ + "cumsum_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:debug_log", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "depth_to_space_test", + srcs = [ + "depth_to_space_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:debug_log", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "depthwise_conv_test", + srcs = [ + "depthwise_conv_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/kernels/internal:tensor", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "dequantize_test", + srcs = [ + "dequantize_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "detection_postprocess_test", + srcs = [ + "detection_postprocess_test.cc", + ], + deps = [ + ":detection_postprocess_flexbuffers_generated_data", + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/kernels/internal:tensor", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + "@flatbuffers//:runtime_cc", + ], +) + +cc_test( + name = "div_test", + srcs = [ + "div_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "elementwise_test", + srcs = ["elementwise_test.cc"], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:debug_log", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "elu_test", + srcs = [ + "elu_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:debug_log", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "embedding_lookup_test", + srcs = [ + "embedding_lookup_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:debug_log", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "exp_test", + srcs = ["exp_test.cc"], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:debug_log", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "expand_dims_test", + srcs = ["expand_dims_test.cc"], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:debug_log", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "fill_test", + srcs = [ + "fill_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "floor_div_test", + srcs = ["floor_div_test.cc"], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:debug_log", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "floor_mod_test", + srcs = ["floor_mod_test.cc"], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:debug_log", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "floor_test", + srcs = [ + "floor_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "fully_connected_test", + srcs = [ + "fully_connected_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:micro_utils", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "gather_test", + srcs = [ + "gather_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:micro_utils", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "gather_nd_test", + srcs = [ + "gather_nd_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:micro_utils", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "hard_swish_test", + srcs = ["hard_swish_test.cc"], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "if_test", + srcs = ["if_test.cc"], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:micro_framework", + "//tensorflow/lite/micro:mock_micro_graph", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "l2norm_test", + srcs = [ + "l2norm_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "l2_pool_2d_test", + srcs = [ + "l2_pool_2d_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:debug_log", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "leaky_relu_test", + srcs = [ + "leaky_relu_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:debug_log", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "logical_test", + srcs = [ + "logical_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "logistic_test", + srcs = [ + "logistic_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "log_softmax_test", + srcs = [ + "log_softmax_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:debug_log", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "lstm_eval_test", + srcs = [ + "lstm_eval_test.cc", + ], + deps = [ + ":lstm_eval_test_lib", + "//tensorflow/lite/micro:debug_log", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "maximum_minimum_test", + srcs = [ + "maximum_minimum_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "mirror_pad_test", + srcs = [ + "mirror_pad_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "mul_test", + srcs = [ + "mul_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "neg_test", + srcs = [ + "neg_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "pack_test", + srcs = [ + "pack_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:debug_log", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "pad_test", + srcs = [ + "pad_test.cc", + ], + tags = [ + "noasan", + "nomsan", # TODO(b/175133159): currently failing with asan and msan + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "pooling_test", + srcs = [ + "pooling_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "prelu_test", + srcs = [ + "prelu_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "quantization_util_test", + srcs = [ + "quantization_util_test.cc", + ], + deps = [ + "//tensorflow/lite/c:common", + "//tensorflow/lite/kernels/internal:quantization_util", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "quantize_test", + srcs = [ + "quantize_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "reduce_test", + srcs = [ + "reduce_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "reshape_test", + srcs = [ + "reshape_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/kernels/internal:tensor", + "//tensorflow/lite/micro:micro_utils", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "resize_bilinear_test", + srcs = [ + "resize_bilinear_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "resize_nearest_neighbor_test", + srcs = [ + "resize_nearest_neighbor_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "round_test", + srcs = [ + "round_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "select_test", + srcs = [ + "select_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "shape_test", + srcs = ["shape_test.cc"], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "slice_test", + srcs = ["slice_test.cc"], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "softmax_test", + srcs = [ + "softmax_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "space_to_batch_nd_test", + srcs = [ + "space_to_batch_nd_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:micro_utils", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "space_to_depth_test", + srcs = [ + "space_to_depth_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:debug_log", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "split_test", + srcs = [ + "split_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:debug_log", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "split_v_test", + srcs = [ + "split_v_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:debug_log", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "squared_difference_test", + srcs = [ + "squared_difference_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "squeeze_test", + srcs = ["squeeze_test.cc"], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "strided_slice_test", + srcs = [ + "strided_slice_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "sub_test", + srcs = [ + "sub_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "svdf_test", + srcs = [ + "svdf_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "tanh_test", + srcs = ["tanh_test.cc"], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "transpose_test", + srcs = ["transpose_test.cc"], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "transpose_conv_test", + srcs = [ + "transpose_conv_test.cc", + ], + deps = [ + ":conv_test_common", + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:micro_utils", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "unidirectional_sequence_lstm_test", + srcs = [ + "unidirectional_sequence_lstm_test.cc", + ], + deps = [ + ":kernel_runner", + ":lstm_eval_test_lib", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:debug_log", + "//tensorflow/lite/micro:micro_utils", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "unpack_test", + srcs = [ + "unpack_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:debug_log", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "while_test", + srcs = [ + "while_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:debug_log", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "zeros_like_test", + srcs = ["zeros_like_test.cc"], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:debug_log", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +#################################### +# Bazel config settings. +#################################### + +config_setting( + name = "xtensa_fusion_f1_default", + values = { + "cpu": "F1_190305_swupgrade", + }, +) + +config_setting( + name = "xtensa_hifi_3z_default", + values = { + "cpu": "HIFI_190304_swupgrade", + }, +) + +config_setting( + name = "xtensa_hifi_5_default", + values = { + "cpu": "AE_HiFi5_LE5_AO_FP_XC", + }, +) + +config_setting( + name = "xtensa_vision_p6_default", + values = { + "cpu": "P6_200528", + }, +) diff --git a/tensorflow/lite/micro/kernels/Makefile.inc b/tensorflow/lite/micro/kernels/Makefile.inc new file mode 100644 index 0000000..3689f2e --- /dev/null +++ b/tensorflow/lite/micro/kernels/Makefile.inc @@ -0,0 +1,130 @@ +# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +# This file includes kernel test targets only. + +# These tests needs additional dependencies beyond libtensorflow-microlite.a. +$(eval $(call microlite_test,kernel_detection_postprocess_test,\ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/detection_postprocess_test.cc \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/detection_postprocess_flexbuffers_generated_data.cc,\ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/detection_postprocess_flexbuffers_generated_data.h)) + +$(eval $(call microlite_test,kernel_circular_buffer_test,\ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/circular_buffer_test.cc \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/circular_buffer_flexbuffers_generated_data.cc,\ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/circular_buffer_flexbuffers_generated_data.h)) + +$(eval $(call microlite_test,kernel_conv_test,\ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/conv_test.cc \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/conv_test_common.cc \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/testdata/conv_test_data.cc,\ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/conv_test.h \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/testdata/conv_test_data.h)) + +$(eval $(call microlite_test,kernel_transpose_conv_test,\ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/transpose_conv_test.cc \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/conv_test_common.cc,\ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/conv_test.h)) + +$(eval $(call microlite_test,kernel_lstm_eval_test,\ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/lstm_eval_test.cc \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/testdata/lstm_test_data.cc,\ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/testdata/lstm_test_data.h)) + +$(eval $(call microlite_test,unidirectional_sequence_lstm_test,\ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/unidirectional_sequence_lstm_test.cc \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/testdata/lstm_test_data.cc,\ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/testdata/lstm_test_data.h)) + +$(eval $(call microlite_test,kernel_window_test,\ + $(TENSORFLOW_ROOT)signal/micro/kernels/window_test.cc \ + $(TENSORFLOW_ROOT)signal/micro/kernels/window_flexbuffers_generated_data.cc, \ + $(TENSORFLOW_ROOT)signal/micro/kernels/window_flexbuffers_generated_data.h)) + +# For kernel tests without extra dependencies (beyond libtensorflow-microlite.a), +# use simple for loop to generate their make targets in a common way. +MICROLITE_KERNEL_SIMPLE_TEST_SRCS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/activations_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/add_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/add_n_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/arg_min_max_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/batch_to_space_nd_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/broadcast_args_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/broadcast_to_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/cast_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/ceil_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/comparisons_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/concatenation_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/cumsum_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/depth_to_space_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/depthwise_conv_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/dequantize_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/div_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/elementwise_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/elu_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/embedding_lookup_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/exp_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/expand_dims_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/fill_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/floor_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/floor_div_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/floor_mod_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/fully_connected_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/gather_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/gather_nd_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/hard_swish_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/l2norm_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/l2_pool_2d_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/leaky_relu_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/logical_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/logistic_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/log_softmax_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/maximum_minimum_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/mirror_pad_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/mul_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/neg_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/pack_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/pad_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/pooling_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/prelu_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/quantization_util_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/quantize_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/reduce_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/reshape_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/resize_bilinear_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/resize_nearest_neighbor_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/round_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/select_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/shape_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/slice_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/softmax_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/space_to_batch_nd_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/space_to_depth_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/split_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/split_v_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/squared_difference_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/squeeze_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/strided_slice_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/sub_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/svdf_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/tanh_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/transpose_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/unpack_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/while_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/zeros_like_test.cc + +# Generate simple kernel test targets in a common way +$(foreach TEST_TARGET,$(MICROLITE_KERNEL_SIMPLE_TEST_SRCS),\ +$(eval $(call microlite_test,kernel_$(notdir $(basename $(TEST_TARGET))),$(TEST_TARGET)))) diff --git a/tensorflow/lite/micro/kernels/README.md b/tensorflow/lite/micro/kernels/README.md new file mode 100644 index 0000000..fa18bcf --- /dev/null +++ b/tensorflow/lite/micro/kernels/README.md @@ -0,0 +1,5 @@ +# Info + +* [Porting Ops from Lite to Micro](../docs/porting_reference_ops.md) explains, + step-by-step, the code changes necessary to port an op from lite to micro, + and the process of submitting them for review and acceptance by the project. diff --git a/tensorflow/lite/micro/kernels/activation_utils.h b/tensorflow/lite/micro/kernels/activation_utils.h new file mode 100644 index 0000000..95ecc26 --- /dev/null +++ b/tensorflow/lite/micro/kernels/activation_utils.h @@ -0,0 +1,57 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_ACTIVATION_UTILS_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_ACTIVATION_UTILS_H_ + +#include +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/kernels/internal/cppmath.h" +#include "tensorflow/lite/kernels/internal/max.h" +#include "tensorflow/lite/kernels/internal/min.h" + +namespace tflite { +namespace ops { +namespace micro { + +// Returns the floating point value for a fused activation: +inline float ActivationValFloat(TfLiteFusedActivation act, float a) { + switch (act) { + case kTfLiteActNone: + return a; + case kTfLiteActRelu: + return TfLiteMax(0.0f, a); + case kTfLiteActReluN1To1: + return TfLiteMax(-1.0f, TfLiteMin(a, 1.0f)); + case kTfLiteActRelu6: + return TfLiteMax(0.0f, TfLiteMin(a, 6.0f)); + case kTfLiteActTanh: + return std::tanh(a); + case kTfLiteActSignBit: + return std::signbit(a); + case kTfLiteActSigmoid: + return 1.0f / (1.0f + std::exp(-a)); + } + return 0.0f; // To indicate an unsupported activation (i.e. when a new fused + // activation is added to the enum and not handled here). +} + +} // namespace micro +} // namespace ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_ACTIVATION_UTILS_H_ diff --git a/tensorflow/lite/micro/kernels/activations.cc b/tensorflow/lite/micro/kernels/activations.cc new file mode 100644 index 0000000..1086325 --- /dev/null +++ b/tensorflow/lite/micro/kernels/activations.cc @@ -0,0 +1,120 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/kernels/activations.h" + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/internal/types.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_utils.h" + +namespace tflite { +namespace { + +void* ReluInit(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(ReluOpData)); +} + +TfLiteStatus ReluEval(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + const ReluOpData& data = *(static_cast(node->user_data)); + + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kActivationsInputTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kActivationsOutputTensor); + + switch (input->type) { + case kTfLiteFloat32: { + ReluFloat(tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + + return kTfLiteOk; + } + case kTfLiteInt8: { + tflite::ReluQuantized(data, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorData(output)); + return kTfLiteOk; + } + default: { + MicroPrintf("Only float32 is supported currently, got %s", + TfLiteTypeGetName(input->type)); + return kTfLiteError; + } + } +} + +void* Relu6Init(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(Relu6OpData)); +} + +TfLiteStatus Relu6Eval(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + const Relu6OpData& data = *(static_cast(node->user_data)); + + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kActivationsInputTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kActivationsOutputTensor); + + switch (input->type) { + case kTfLiteFloat32: { + Relu6Float(tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + + return kTfLiteOk; + } + case kTfLiteInt8: { + Relu6Quantized(data.zero_int8, data.six_int8, + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + return kTfLiteOk; + } + default: { + MicroPrintf("Only float32 is supported currently, got %s", + TfLiteTypeGetName(input->type)); + return kTfLiteError; + } + } +} + +} // namespace + +TFLMRegistration Register_RELU() { + return tflite::micro::RegisterOp(ReluInit, ReluPrepare, ReluEval); +} + +TFLMRegistration Register_RELU6() { + return tflite::micro::RegisterOp(Relu6Init, Relu6Prepare, Relu6Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/activations.h b/tensorflow/lite/micro/kernels/activations.h new file mode 100644 index 0000000..e953f0e --- /dev/null +++ b/tensorflow/lite/micro/kernels/activations.h @@ -0,0 +1,63 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_ACTIVATIONS_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_ACTIVATIONS_H_ + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { + +extern const int kActivationsInputTensor; +extern const int kActivationsOutputTensor; + +struct ReluOpData { + ReluParams params; +}; + +struct Relu6OpData { + int8_t six_int8; + int8_t zero_int8; +}; + +void ReluQuantized(const ReluOpData& data, const RuntimeShape& input_shape, + const RuntimeShape& output_shape, const int8_t* input_data, + int8_t* output_data); + +template +void CalculateReluOpData(const TfLiteTensor* input, TfLiteTensor* output, + ReluOpData* data); + +void ReluFloat(const RuntimeShape& input_shape, const float* input_data, + const RuntimeShape& output_shape, float* output_data); + +void Relu6Float(const RuntimeShape& input_shape, const float* input_data, + const RuntimeShape& output_shape, float* output_data); + +void Relu6Quantized(int8_t lower, int8_t upper, const RuntimeShape& input_shape, + const int8_t* input_data, const RuntimeShape& output_shape, + int8_t* output_data); + +TfLiteStatus ReluPrepare(TfLiteContext* context, TfLiteNode* node); + +TfLiteStatus Relu6Prepare(TfLiteContext* context, TfLiteNode* node); + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_ACTIVATIONS_H_ diff --git a/tensorflow/lite/micro/kernels/activations_common.cc b/tensorflow/lite/micro/kernels/activations_common.cc new file mode 100644 index 0000000..2ec3a1b --- /dev/null +++ b/tensorflow/lite/micro/kernels/activations_common.cc @@ -0,0 +1,158 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/internal/types.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/activations.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_utils.h" + +namespace tflite { + +const int kActivationsInputTensor = 0; +const int kActivationsOutputTensor = 0; + +void ReluQuantized(const ReluOpData& data, const RuntimeShape& input_shape, + const RuntimeShape& output_shape, const int8_t* input_data, + int8_t* output_data) { + const int flat_size = MatchingFlatSize(input_shape, output_shape); + for (int i = 0; i < flat_size; ++i) { + const int32_t val = static_cast(input_data[i]); + int32_t clamped = + data.params.output_offset + + MultiplyByQuantizedMultiplier(val - data.params.input_offset, + data.params.output_multiplier, + data.params.output_shift); + clamped = std::max(data.params.quantized_activation_min, clamped); + clamped = std::min(data.params.quantized_activation_max, clamped); + output_data[i] = static_cast(clamped); + } +} + +template +void CalculateReluOpData(const TfLiteTensor* input, TfLiteTensor* output, + ReluOpData* data) { + float act_min = 0.0; + float act_max = std::numeric_limits::infinity(); + double real_multiplier = + static_cast(input->params.scale / output->params.scale); + + const RuntimeShape input_shape = GetTensorShape(input); + const RuntimeShape output_shape = GetTensorShape(output); + + QuantizeMultiplier(real_multiplier, &data->params.output_multiplier, + &data->params.output_shift); + + data->params.quantized_activation_min = std::max( + static_cast(std::numeric_limits::min()), + output->params.zero_point + + static_cast(roundf(act_min / output->params.scale))); + data->params.quantized_activation_max = + act_max == std::numeric_limits::infinity() + ? static_cast(std::numeric_limits::max()) + : std::min(static_cast(std::numeric_limits::max()), + output->params.zero_point + + static_cast( + roundf(act_max / output->params.scale))); + data->params.input_offset = input->params.zero_point; + data->params.output_offset = output->params.zero_point; +} + +void ReluFloat(const RuntimeShape& input_shape, const float* input_data, + const RuntimeShape& output_shape, float* output_data) { + const int flat_size = MatchingFlatSize(input_shape, output_shape); + for (int i = 0; i < flat_size; ++i) { + const float val = input_data[i]; + const float lower = 0.0f; + const float clamped = val < lower ? lower : val; + output_data[i] = clamped; + } +} + +void Relu6Float(const RuntimeShape& input_shape, const float* input_data, + const RuntimeShape& output_shape, float* output_data) { + const int flat_size = MatchingFlatSize(input_shape, output_shape); + for (int i = 0; i < flat_size; ++i) { + const float val = input_data[i]; + const float upper = 6.0f; + const float lower = 0.0f; + const float clamped = val > upper ? upper : val < lower ? lower : val; + output_data[i] = clamped; + } +} + +void Relu6Quantized(int8_t lower, int8_t upper, const RuntimeShape& input_shape, + const int8_t* input_data, const RuntimeShape& output_shape, + int8_t* output_data) { + const int flat_size = MatchingFlatSize(input_shape, output_shape); + for (int i = 0; i < flat_size; ++i) { + const int8_t val = input_data[i]; + const int8_t clamped = val > upper ? upper : val < lower ? lower : val; + output_data[i] = clamped; + } +} + +TfLiteStatus ReluPrepare(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + ReluOpData* data = static_cast(node->user_data); + + MicroContext* micro_context = GetMicroContext(context); + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kActivationsInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kActivationsOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + + if (input->type == kTfLiteInt8) { + CalculateReluOpData(input, output, data); + } + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(output); + + return kTfLiteOk; +} + +TfLiteStatus Relu6Prepare(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + Relu6OpData* data = static_cast(node->user_data); + + MicroContext* micro_context = GetMicroContext(context); + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kActivationsInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + + if (input->type == kTfLiteInt8) { + data->six_int8 = FloatToQuantizedType(6.0f, input->params.scale, + input->params.zero_point); + data->zero_int8 = input->params.zero_point; + } + + micro_context->DeallocateTempTfLiteTensor(input); + + return kTfLiteOk; +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/activations_test.cc b/tensorflow/lite/micro/kernels/activations_test.cc new file mode 100644 index 0000000..25402a8 --- /dev/null +++ b/tensorflow/lite/micro/kernels/activations_test.cc @@ -0,0 +1,250 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +void TestReluFloat(int* input_dims_data, const float* input_data, + int* output_dims_data, const float* golden, + float* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_elements_count = ElementCount(*output_dims); + + constexpr int inputs_size = 1; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(input_data, input_dims), + CreateTensor(output_data, output_dims), + }; + + int inputs_array_data[] = {1, 0}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 1}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = Register_RELU(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, + /*builtin_data=*/nullptr); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + for (int i = 0; i < output_elements_count; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(golden[i], output_data[i], 1e-5f); + } +} + +void TestRelu6Float(int* input_dims_data, const float* input_data, + int* output_dims_data, const float* golden, + float* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_elements_count = ElementCount(*output_dims); + + constexpr int inputs_size = 1; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(input_data, input_dims), + CreateTensor(output_data, output_dims), + }; + + int inputs_array_data[] = {1, 0}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 1}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = Register_RELU6(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, + /*builtin_data=*/nullptr); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + for (int i = 0; i < output_elements_count; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(golden[i], output_data[i], 1e-5f); + } +} + +void TestReluInt8(int* input_dims_data, const float* input_data, + int8_t* input_data_quantized, const float input_scale, + const int input_zero_point, const float* golden, + int8_t* golden_quantized, int* output_dims_data, + const float output_scale, const int output_zero_point, + int8_t* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_elements_count = ElementCount(*output_dims); + constexpr int inputs_size = 1; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateQuantizedTensor(input_data, input_data_quantized, input_dims, + input_scale, input_zero_point), + CreateQuantizedTensor(output_data, output_dims, output_scale, + output_zero_point), + }; + + int inputs_array_data[] = {1, 0}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 1}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = Register_RELU(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, + /*builtin_data=*/nullptr); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + Quantize(golden, golden_quantized, output_elements_count, output_scale, + output_zero_point); + + for (int i = 0; i < output_elements_count; ++i) { + TF_LITE_MICRO_EXPECT_EQ(golden_quantized[i], output_data[i]); + } +} + +void TestRelu6Int8(int* input_dims_data, const float* input_data, + int8_t* input_data_quantized, const float input_scale, + const int input_zero_point, const float* golden, + int8_t* golden_quantized, int* output_dims_data, + const float output_scale, const int output_zero_point, + int8_t* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_elements_count = ElementCount(*output_dims); + constexpr int inputs_size = 1; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateQuantizedTensor(input_data, input_data_quantized, input_dims, + input_scale, input_zero_point), + CreateQuantizedTensor(output_data, output_dims, output_scale, + output_zero_point), + }; + + int inputs_array_data[] = {1, 0}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 1}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = Register_RELU6(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, + /*builtin_data=*/nullptr); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + Quantize(golden, golden_quantized, output_elements_count, output_scale, + output_zero_point); + + for (int i = 0; i < output_elements_count; ++i) { + TF_LITE_MICRO_EXPECT_EQ(golden_quantized[i], output_data[i]); + } +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(SimpleReluTestFloat) { + const int output_elements_count = 10; + int input_shape[] = {2, 1, 5}; + const float input_data[] = { + 1.0, 2.0, 3.0, 4.0, 5.0, -1.0, -2.0, -3.0, -4.0, -5.0, + }; + const float golden[] = {1.0, 2.0, 3.0, 4.0, 5.0, 0, 0, 0, 0, 0}; + int output_shape[] = {2, 1, 5}; + float output_data[output_elements_count]; + tflite::testing::TestReluFloat(input_shape, input_data, output_shape, golden, + output_data); +} + +TF_LITE_MICRO_TEST(SimpleRelu6TestFloat) { + const int output_elements_count = 10; + float output_data[output_elements_count]; + int input_shape[] = {2, 1, 5}; + const float input_data[] = {4.0, 5.0, 6.0, 7.0, 8.0, + -4.0, -5.0, -6.0, -7.0, -8.0}; + int output_shape[] = {2, 1, 5}; + const float golden[] = { + 4.0, 5.0, 6.0, 6.0, 6.0, 0.0, 0.0, 0.0, 0.0, 0.0, + }; + + tflite::testing::TestRelu6Float(input_shape, input_data, output_shape, golden, + output_data); +} + +TF_LITE_MICRO_TEST(SimpleReluTestInt8) { + const int elements_count = 10; + + int input_shape[] = {2, 1, 5}; + const float input_data[] = {1, 2, 3, 4, 5, -1, -2, -3, -4, -5}; + int8_t input_quantized[elements_count]; + int output_shape[] = {2, 1, 5}; + const float golden[] = {1, 2, 3, 4, 5, 0, 0, 0, 0, 0}; + int8_t golden_quantized[elements_count]; + int8_t output_data[elements_count]; + + const float input_scale = 0.5f; + const int input_zero_point = 0; + const float output_scale = 0.5f; + const int output_zero_point = 0; + + tflite::testing::TestReluInt8(input_shape, input_data, input_quantized, + input_scale, input_zero_point, golden, + golden_quantized, output_shape, output_scale, + output_zero_point, output_data); +} + +TF_LITE_MICRO_TEST(SimpleRelu6TestInt8) { + const int elements_count = 10; + + int input_shape[] = {2, 1, 5}; + const float input_data[] = {4, 5, 6, 7, 8, -1, -2, -3, -4, -5}; + int8_t input_quantized[elements_count]; + int output_shape[] = {2, 1, 5}; + const float golden[] = {4, 5, 6, 6, 6, 0, 0, 0, 0, 0}; + int8_t golden_quantized[elements_count]; + int8_t output_data[elements_count]; + + const float input_scale = 0.5f; + const int input_zero_point = 127; + const float output_scale = 0.5f; + const int output_zero_point = 127; + + tflite::testing::TestRelu6Int8(input_shape, input_data, input_quantized, + input_scale, input_zero_point, golden, + golden_quantized, output_shape, output_scale, + output_zero_point, output_data); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/add.cc b/tensorflow/lite/micro/kernels/add.cc new file mode 100644 index 0000000..b27206c --- /dev/null +++ b/tensorflow/lite/micro/kernels/add.cc @@ -0,0 +1,200 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/reference/add.h" + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/add.h" +#include "tensorflow/lite/kernels/internal/reference/process_broadcast_shapes.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/add.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/memory_helpers.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { + +TfLiteStatus EvalAdd(TfLiteContext* context, TfLiteNode* node, + TfLiteAddParams* params, const OpDataAdd* data, + const TfLiteEvalTensor* input1, + const TfLiteEvalTensor* input2, TfLiteEvalTensor* output) { + switch (output->type) { + case kTfLiteFloat32: { + tflite::ArithmeticParams op_params; + SetActivationParams(data->output_activation_min_f32, + data->output_activation_max_f32, &op_params); + if (data->requires_broadcast) { + reference_ops::BroadcastAdd4DSlow( + op_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } else { + reference_ops::Add(op_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } + } break; + case kTfLiteInt32: { + tflite::ArithmeticParams op_params; + SetActivationParams(std::numeric_limits::lowest(), + std::numeric_limits::max(), &op_params); + if (data->requires_broadcast) { + reference_ops::BroadcastAdd4DSlow( + op_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } else { + reference_ops::Add(op_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } + } break; + default: + MicroPrintf("Type %s (%d) not supported.", + TfLiteTypeGetName(output->type), output->type); + return kTfLiteError; + } + + return kTfLiteOk; +} + +TfLiteStatus EvalAddQuantized(TfLiteContext* context, TfLiteNode* node, + TfLiteAddParams* params, const OpDataAdd* data, + const TfLiteEvalTensor* input1, + const TfLiteEvalTensor* input2, + TfLiteEvalTensor* output) { + tflite::ArithmeticParams op_params; + op_params.left_shift = data->left_shift; + op_params.input1_offset = data->input1_offset; + op_params.input1_multiplier = data->input1_multiplier; + op_params.input1_shift = data->input1_shift; + op_params.input2_offset = data->input2_offset; + op_params.input2_multiplier = data->input2_multiplier; + op_params.input2_shift = data->input2_shift; + op_params.output_offset = data->output_offset; + op_params.output_multiplier = data->output_multiplier; + op_params.output_shift = data->output_shift; + SetActivationParams(data->output_activation_min, data->output_activation_max, + &op_params); + bool need_broadcast = reference_ops::ProcessBroadcastShapes( + tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorShape(input2), &op_params); + + switch (output->type) { + case kTfLiteInt8: { + if (need_broadcast) { + reference_integer_ops::BroadcastAdd4DSlow( + op_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } else { + reference_integer_ops::Add( + op_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } + break; + } + case kTfLiteInt16: { + if (need_broadcast) { + reference_ops::BroadcastAdd4DSlow( + op_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } else { + reference_ops::Add(op_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output), + false); + } + break; + } + default: + MicroPrintf("Type %s (%d) not supported.", + TfLiteTypeGetName(output->type), output->type); + return kTfLiteError; + } + + return kTfLiteOk; +} + +void* AddInit(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(OpDataAdd)); +} + +TfLiteStatus AddEval(TfLiteContext* context, TfLiteNode* node) { + auto* params = reinterpret_cast(node->builtin_data); + + TFLITE_DCHECK(node->user_data != nullptr); + const OpDataAdd* data = static_cast(node->user_data); + + const TfLiteEvalTensor* input1 = + tflite::micro::GetEvalInput(context, node, kAddInputTensor1); + const TfLiteEvalTensor* input2 = + tflite::micro::GetEvalInput(context, node, kAddInputTensor2); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kAddOutputTensor); + + if (output->type == kTfLiteFloat32 || output->type == kTfLiteInt32) { + TF_LITE_ENSURE_OK( + context, EvalAdd(context, node, params, data, input1, input2, output)); + } else if (output->type == kTfLiteInt8 || output->type == kTfLiteInt16) { + TF_LITE_ENSURE_OK(context, EvalAddQuantized(context, node, params, data, + input1, input2, output)); + } else { + MicroPrintf("Type %s (%d) not supported.", TfLiteTypeGetName(output->type), + output->type); + return kTfLiteError; + } + + return kTfLiteOk; +} + +TFLMRegistration Register_ADD() { + return tflite::micro::RegisterOp(AddInit, AddPrepare, AddEval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/add.h b/tensorflow/lite/micro/kernels/add.h new file mode 100644 index 0000000..7ee0fc3 --- /dev/null +++ b/tensorflow/lite/micro/kernels/add.h @@ -0,0 +1,78 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_ADD_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_ADD_H_ + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/micro_common.h" + +namespace tflite { + +extern const int kAddInputTensor1; +extern const int kAddInputTensor2; +extern const int kAddOutputTensor; + +struct OpDataAdd { + bool requires_broadcast; + + // These fields are used in both the general 8-bit -> 8bit quantized path, + // and the special 16-bit -> 16bit quantized path + int input1_shift; + int input2_shift; + int32_t output_activation_min; + int32_t output_activation_max; + + // These fields are used only in the general 8-bit -> 8bit quantized path + int32_t input1_multiplier; + int32_t input2_multiplier; + int32_t output_multiplier; + int output_shift; + int left_shift; + int32_t input1_offset; + int32_t input2_offset; + int32_t output_offset; + + // Used only for float evals: + float output_activation_min_f32; + float output_activation_max_f32; +}; + +TfLiteStatus CalculateOpDataAdd(TfLiteContext* context, TfLiteAddParams* params, + const TfLiteTensor* input1, + const TfLiteTensor* input2, + TfLiteTensor* output, OpDataAdd* data); + +TfLiteStatus AddPrepare(TfLiteContext* context, TfLiteNode* node); + +// Generic must define registration function. +TFLMRegistration Register_ADD(); + +#if defined(CMSIS_NN) +TFLMRegistration Register_ADD_INT8(); + +TFLMRegistration Register_ADD_INT16(); +#else +// Fallback registration +inline TFLMRegistration Register_ADD_INT8() { return Register_ADD(); } + +inline TFLMRegistration Register_ADD_INT16() { return Register_ADD(); } +#endif +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_ADD_H_ diff --git a/tensorflow/lite/micro/kernels/add_common.cc b/tensorflow/lite/micro/kernels/add_common.cc new file mode 100644 index 0000000..cc94509 --- /dev/null +++ b/tensorflow/lite/micro/kernels/add_common.cc @@ -0,0 +1,116 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/add.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/add.h" +#include "tensorflow/lite/kernels/internal/reference/process_broadcast_shapes.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/add.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/memory_helpers.h" + +namespace tflite { + +const int kAddInputTensor1 = 0; +const int kAddInputTensor2 = 1; +const int kAddOutputTensor = 0; + +TfLiteStatus CalculateOpDataAdd(TfLiteContext* context, TfLiteAddParams* params, + const TfLiteTensor* input1, + const TfLiteTensor* input2, + TfLiteTensor* output, OpDataAdd* data) { + data->requires_broadcast = !HaveSameShapes(input1, input2); + + if (output->type == kTfLiteInt8 || output->type == kTfLiteInt16) { + TFLITE_CHECK_NE(output->quantization.type, kTfLiteNoQuantization); + + // 8bit -> 8bit general quantized path, with general rescalings + data->input1_offset = -input1->params.zero_point; + data->input2_offset = -input2->params.zero_point; + data->output_offset = output->params.zero_point; + data->left_shift = (output->type == kTfLiteInt16) ? 15 : 20; + const double twice_max_input_scale = + 2 * static_cast( + std::max(input1->params.scale, input2->params.scale)); + const double real_input1_multiplier = + static_cast(input1->params.scale) / twice_max_input_scale; + const double real_input2_multiplier = + static_cast(input2->params.scale) / twice_max_input_scale; + const double real_output_multiplier = + twice_max_input_scale / + ((1 << data->left_shift) * static_cast(output->params.scale)); + + QuantizeMultiplierSmallerThanOneExp( + real_input1_multiplier, &data->input1_multiplier, &data->input1_shift); + + QuantizeMultiplierSmallerThanOneExp( + real_input2_multiplier, &data->input2_multiplier, &data->input2_shift); + + QuantizeMultiplierSmallerThanOneExp( + real_output_multiplier, &data->output_multiplier, &data->output_shift); + + TF_LITE_ENSURE_STATUS(CalculateActivationRangeQuantized( + context, params->activation, output, &data->output_activation_min, + &data->output_activation_max)); + } else if (output->type == kTfLiteFloat32) { + CalculateActivationRange(params->activation, + &data->output_activation_min_f32, + &data->output_activation_max_f32); + } + + return kTfLiteOk; +} + +TfLiteStatus AddPrepare(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + TFLITE_DCHECK(node->builtin_data != nullptr); + + MicroContext* micro_context = GetMicroContext(context); + TfLiteTensor* input1 = + micro_context->AllocateTempInputTensor(node, kAddInputTensor1); + TF_LITE_ENSURE(context, input1 != nullptr); + TfLiteTensor* input2 = + micro_context->AllocateTempInputTensor(node, kAddInputTensor2); + TF_LITE_ENSURE(context, input2 != nullptr); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kAddOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + + OpDataAdd* data = static_cast(node->user_data); + auto* params = reinterpret_cast(node->builtin_data); + + TF_LITE_ENSURE_STATUS( + CalculateOpDataAdd(context, params, input1, input2, output, data)); + + if (output->type == kTfLiteInt32) { + // Only support int32 unquantized add for now. + TF_LITE_ENSURE_EQ(context, input1->quantization.type, + kTfLiteNoQuantization); + TF_LITE_ENSURE_EQ(context, input2->quantization.type, + kTfLiteNoQuantization); + } + + micro_context->DeallocateTempTfLiteTensor(input1); + micro_context->DeallocateTempTfLiteTensor(input2); + micro_context->DeallocateTempTfLiteTensor(output); + return kTfLiteOk; +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/add_n.cc b/tensorflow/lite/micro/kernels/add_n.cc new file mode 100644 index 0000000..765d5d6 --- /dev/null +++ b/tensorflow/lite/micro/kernels/add_n.cc @@ -0,0 +1,215 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/reference/add_n.h" + +#include + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { +namespace { + +constexpr int kInputTensor0 = 0; +constexpr int kOutputTensor = 0; + +constexpr int kAddNIntegerShift = 20; + +// only used with INT8 tensors +struct OpData { + int32_t output_activation_min; + int32_t output_activation_max; + int32_t input_offset; + int32_t output_offset; + int32_t input_multiplier; + int32_t output_multiplier; + int input_shift; + int output_shift; + int left_shift; + int scratch_index; +}; + +TfLiteStatus CalculateOpData(TfLiteContext* context, TfLiteNode* node) { + int num_inputs = NumInputs(node); + TF_LITE_ENSURE(context, num_inputs >= 2); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + + MicroContext* micro_context = GetMicroContext(context); + TfLiteTensor* input_tensor_first = + micro_context->AllocateTempInputTensor(node, kInputTensor0); + TF_LITE_ENSURE(context, input_tensor_first != nullptr); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + + // Check that all tensors have the same shape and type. + TF_LITE_ENSURE_TYPES_EQ(context, output->type, input_tensor_first->type); + for (int i = kInputTensor0 + 1; i < num_inputs; ++i) { + TfLiteTensor* input = micro_context->AllocateTempInputTensor(node, i); + TF_LITE_ENSURE(context, input != nullptr); + TF_LITE_ENSURE(context, HaveSameShapes(input_tensor_first, input)); + TF_LITE_ENSURE_TYPES_EQ(context, input_tensor_first->type, input->type); + + // Check that all INT8 input tensors have the same zero-point and scale. + if (input_tensor_first->type == kTfLiteInt8) { + TF_LITE_ENSURE(context, input_tensor_first->params.zero_point == + input->params.zero_point); + TF_LITE_ENSURE(context, + input_tensor_first->params.scale == input->params.scale); + } + + micro_context->DeallocateTempTfLiteTensor(input); + } + + if (output->type == kTfLiteFloat32) { + // Allocate scratch buffer space for pointer to each tensor's data + // and store the scratch buffer index in the node's user_data + int scratch_index; + size_t scratch_size = sizeof(float*) * num_inputs; + TF_LITE_ENSURE_OK(context, context->RequestScratchBufferInArena( + context, scratch_size, &scratch_index)); + node->user_data = + reinterpret_castuser_data)>(scratch_index); + } else if (output->type == kTfLiteInt8) { + node->user_data = + context->AllocatePersistentBuffer(context, sizeof(OpData)); + OpData* data = static_cast(node->user_data); + + // Allocate scratch buffer space for pointer to each tensor's data + // and store the scratch buffer index in OpData + size_t scratch_size = sizeof(int8_t*) * num_inputs; + TF_LITE_ENSURE_OK( + context, context->RequestScratchBufferInArena(context, scratch_size, + &data->scratch_index)); + + // 8bit -> 8bit general quantized path, with general rescalings + data->input_offset = -input_tensor_first->params.zero_point; + data->output_offset = output->params.zero_point; + data->left_shift = kAddNIntegerShift; + const double twice_max_input_scale = + 2 * static_cast(input_tensor_first->params.scale); + const double real_input_multiplier = + static_cast(input_tensor_first->params.scale) / + twice_max_input_scale; + const double real_output_multiplier = + twice_max_input_scale / + ((1 << data->left_shift) * static_cast(output->params.scale)); + + QuantizeMultiplierSmallerThanOneExp( + real_input_multiplier, &data->input_multiplier, &data->input_shift); + + QuantizeMultiplierSmallerThanOneExp( + real_output_multiplier, &data->output_multiplier, &data->output_shift); + + TF_LITE_ENSURE_STATUS(CalculateActivationRangeQuantized( + context, kTfLiteActNone, output, &data->output_activation_min, + &data->output_activation_max)); + } else { + MicroPrintf("ADD_N only supports FLOAT32 and INT8, got %s.", + TfLiteTypeGetName(output->type)); + return kTfLiteError; + } + + micro_context->DeallocateTempTfLiteTensor(input_tensor_first); + micro_context->DeallocateTempTfLiteTensor(output); + + return kTfLiteOk; +} + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + return CalculateOpData(context, node); +} + +template +inline const T** CopyInputsToScratchBuffer(TfLiteContext* context, + TfLiteNode* node, + const int scratch_index) { + int num_inputs = NumInputs(node); + void* scratch_buffer = context->GetScratchBuffer(context, scratch_index); + const T** all_inputs = static_cast(scratch_buffer); + for (int i = 0; i < num_inputs; i++) { + const TfLiteEvalTensor* next_input = + tflite::micro::GetEvalInput(context, node, kInputTensor0 + i); + all_inputs[i] = tflite::micro::GetTensorData(next_input); + } + + return all_inputs; +} + +template +void EvalAddN(TfLiteContext* context, TfLiteNode* node, + TfLiteEvalTensor* output) { + int num_inputs = NumInputs(node); + + int scratch_index = + static_cast(reinterpret_cast(node->user_data)); + const T** all_inputs = + CopyInputsToScratchBuffer(context, node, scratch_index); + + reference_ops::AddN(tflite::micro::GetTensorShape(output), num_inputs, + all_inputs, tflite::micro::GetTensorData(output)); +} + +template +void EvalAddNQuantized(TfLiteContext* context, TfLiteNode* node, + TfLiteEvalTensor* output) { + int num_inputs = NumInputs(node); + + OpData* data = static_cast(node->user_data); + const T** all_inputs = + CopyInputsToScratchBuffer(context, node, data->scratch_index); + + ArithmeticParams params; + params.left_shift = data->left_shift; + params.input1_offset = data->input_offset; + params.input1_multiplier = data->input_multiplier; + params.input1_shift = data->input_shift; + params.output_offset = data->output_offset; + params.output_multiplier = data->output_multiplier; + params.output_shift = data->output_shift; + SetActivationParams(data->output_activation_min, data->output_activation_max, + ¶ms); + + reference_ops::AddN(params, tflite::micro::GetTensorShape(output), num_inputs, + all_inputs, tflite::micro::GetTensorData(output)); +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + if (output->type == kTfLiteFloat32) { + EvalAddN(context, node, output); + } else if (output->type == kTfLiteInt8) { + EvalAddNQuantized(context, node, output); + } else { + MicroPrintf("ADD_N only supports FLOAT32 and INT8, got %s.", + TfLiteTypeGetName(output->type)); + return kTfLiteError; + } + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_ADD_N() { + return tflite::micro::RegisterOp(nullptr, Prepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/add_n_test.cc b/tensorflow/lite/micro/kernels/add_n_test.cc new file mode 100644 index 0000000..8cc5cdc --- /dev/null +++ b/tensorflow/lite/micro/kernels/add_n_test.cc @@ -0,0 +1,170 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +constexpr int kMaxInputTensors = 3; +constexpr int kMaxOutputTensors = 1; + +void ExecuteAddN(TfLiteTensor* tensors, int tensors_count) { + int input_array_data[kMaxInputTensors + kMaxOutputTensors] = {tensors_count - + 1}; + for (int i = 1; i < tensors_count; i++) { + input_array_data[i] = i - 1; + } + TfLiteIntArray* inputs_array = IntArrayFromInts(input_array_data); + int kOutputArrayData[] = {1, tensors_count - 1}; + TfLiteIntArray* outputs_array = IntArrayFromInts(kOutputArrayData); + + const TFLMRegistration registration = tflite::Register_ADD_N(); + micro::KernelRunner runner(registration, tensors, tensors_count, inputs_array, + outputs_array, nullptr); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); +} + +template +void TestAddN(int* input_dims_data, const T* const* input_data, + int input_data_count, int* expected_dims, const T* expected_data, + T* output_data) { + TF_LITE_MICRO_EXPECT_LE(input_data_count, kMaxInputTensors); + + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(expected_dims); + const int output_count = ElementCount(*output_dims); + + TfLiteTensor tensors[kMaxInputTensors + kMaxOutputTensors] = {}; + for (int i = 0; i < input_data_count; i++) { + tensors[i] = CreateTensor(input_data[i], input_dims); + } + tensors[input_data_count] = CreateTensor(output_data, output_dims); + + ExecuteAddN(tensors, input_data_count + 1); + + for (int i = 0; i < output_count; i++) { + TF_LITE_MICRO_EXPECT_EQ(expected_data[i], output_data[i]); + } +} + +// min/max are used to compute scale, zero-point, compare tolerance +template +struct TestQuantParams { + float data_min; // input and output data minimum value + float data_max; // input and output data maximum value + T input_data[kNumInputs][kOutputSize]; // quantized input storage + T output_data[kOutputSize]; // quantized output storage +}; + +// for quantized Add, the error shouldn't exceed step +template +float GetTolerance(float min, float max) { + float kQuantizedStep = + 2.0f * (max - min) / + (std::numeric_limits::max() - std::numeric_limits::min()); + return kQuantizedStep; +} + +template +void TestAddNQuantized(TestQuantParams* params, + int* input_dims_data, const float* const* input_data, + int* expected_dims, const float* expected_data, + float* output_data) { + TF_LITE_MICRO_EXPECT_LE(kNumInputs, kMaxInputTensors); + + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(expected_dims); + + const float scale = ScaleFromMinMax(params->data_min, params->data_max); + const int zero_point = + ZeroPointFromMinMax(params->data_min, params->data_max); + + TfLiteTensor tensors[kMaxInputTensors + kMaxOutputTensors] = {}; + for (int i = 0; i < kNumInputs; i++) { + tensors[i] = CreateQuantizedTensor(input_data[i], params->input_data[i], + input_dims, scale, zero_point); + } + tensors[kNumInputs] = CreateQuantizedTensor(params->output_data, output_dims, + scale, zero_point); + + ExecuteAddN(tensors, kNumInputs + 1); + + Dequantize(params->output_data, kOutputSize, scale, zero_point, output_data); + const float kTolerance = GetTolerance(params->data_min, params->data_max); + for (int i = 0; i < kOutputSize; i++) { + TF_LITE_MICRO_EXPECT_NEAR(expected_data[i], output_data[i], kTolerance); + } +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(FloatAddNOpAddMultipleTensors) { + int kDims[] = {4, 1, 2, 2, 1}; + constexpr float kInput1[] = {-2.0, 0.2, 0.7, 0.8}; + constexpr float kInput2[] = {0.1, 0.2, 0.3, 0.5}; + constexpr float kInput3[] = {0.5, 0.1, 0.1, 0.2}; + constexpr float kExpect[] = {-1.4, 0.5, 1.1, 1.5}; + const float* kInputs[tflite::testing::kMaxInputTensors] = { + kInput1, + kInput2, + kInput3, + }; + constexpr int kInputCount = std::extent::value; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + tflite::testing::TestAddN(kDims, kInputs, kInputCount, kDims, kExpect, + output_data); +} + +TF_LITE_MICRO_TEST(Int8AddNOpAddMultipleTensors) { + int kDims[] = {4, 1, 2, 2, 1}; + constexpr float kInput1[] = {-2.0, 0.2, 0.7, 0.8}; + constexpr float kInput2[] = {0.1, 0.2, 0.3, 0.5}; + constexpr float kInput3[] = {0.5, 0.1, 0.1, 0.2}; + constexpr float kExpect[] = {-1.4, 0.5, 1.1, 1.5}; + const float* kInputs[tflite::testing::kMaxInputTensors] = { + kInput1, + kInput2, + kInput3, + }; + constexpr int kInputCount = std::extent::value; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + tflite::testing::TestQuantParams params = + {}; + params.data_min = -3.0; + params.data_max = 3.0; + + tflite::testing::TestAddNQuantized( + ¶ms, kDims, kInputs, kDims, kExpect, output_data); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/add_test.cc b/tensorflow/lite/micro/kernels/add_test.cc new file mode 100644 index 0000000..6e8b40c --- /dev/null +++ b/tensorflow/lite/micro/kernels/add_test.cc @@ -0,0 +1,524 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +// Shapes and values for mixed broadcast tests. +constexpr int kBroadcastOutputDimsCount = 36; +constexpr int kBroadcastNumShapes = 4; + +int broadcast_input1_shape[] = {4, 2, 3, 1, 2}; +const float broadcast_input1_values[] = {-0.3, 2.3, 0.9, 0.5, 0.8, -1.1, + 1.2, 2.8, -1.6, 0.0, 0.7, -2.2}; +const float broadcast_input2_values[] = {0.2, 0.3, -0.4, 0.5, 1.0, 0.9}; +// clang-format off +const float + broadcast_goldens[kBroadcastNumShapes][kBroadcastOutputDimsCount] = { + {-0.1, 2.6, -0.7, 2.8, 0.7, 3.2, 1.1, 0.8, 0.5, 1.0, 1.9, 1.4, + 1.0, -0.8, 0.4, -0.6, 1.8, -0.2, 1.4, 3.1, 0.8, 3.3, 2.2, 3.7, + -1.4, 0.3, -2.0, 0.5, -0.6, 0.9, 0.9, -1.9, 0.3, -1.7, 1.7, -1.3}, + {-0.1, 2.6, 0.5, 1.0, 1.8, -0.2, 1.4, 3.1, -2.0, 0.5, 1.7, -1.3}, + {-0.1, 2.5, 0.0, 2.6, -0.7, 1.9, 1.1, 0.7, 1.2, 0.8, 0.5, 0.1, + 1.0, -0.9, 1.1, -0.8, 0.4, -1.5, 1.7, 3.3, 2.2, 3.8, 2.1, 3.7, + -1.1, 0.5, -0.6, 1.0, -0.7, 0.9, 1.2, -1.7, 1.7, -1.2, 1.6, -1.3}, + {-0.1, 2.5, 1.2, 0.8, 0.4, -1.5, 1.7, 3.3, -0.6, 1.0, 1.6, -1.3}, +}; +// clang-format on + +constexpr int kBroadcastMaxShapeSize = 5; +int broadcast_input2_shapes[kBroadcastNumShapes][kBroadcastMaxShapeSize] = { + {4, 1, 1, 3, 2}, + {4, 1, 3, 1, 2}, + {4, 2, 1, 3, 1}, + {4, 2, 3, 1, 1}, +}; +int broadcast_output_shapes[kBroadcastNumShapes][kBroadcastMaxShapeSize] = { + {4, 2, 3, 3, 2}, + {4, 2, 3, 1, 2}, + {4, 2, 3, 3, 2}, + {4, 2, 3, 1, 2}, +}; + +template +void ValidateAddGoldens(TfLiteTensor* tensors, int tensors_size, + const T* golden, T* output, int output_size, + TfLiteFusedActivation activation, + float tolerance = 1e-5) { + TfLiteAddParams builtin_data; + builtin_data.activation = activation; + + int inputs_array_data[] = {2, 0, 1}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 2}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = Register_ADD(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, &builtin_data); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + for (int i = 0; i < output_size; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(golden[i], output[i], tolerance); + } +} + +void TestAddFloat(int* input1_dims_data, const float* input1_data, + int* input2_dims_data, const float* input2_data, + int* output_dims_data, const float* expected_output, + TfLiteFusedActivation activation, float* output_data) { + TfLiteIntArray* input1_dims = IntArrayFromInts(input1_dims_data); + TfLiteIntArray* input2_dims = IntArrayFromInts(input2_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + + constexpr int inputs_size = 2; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(input1_data, input1_dims), + CreateTensor(input2_data, input2_dims), + CreateTensor(output_data, output_dims), + }; + + ValidateAddGoldens(tensors, tensors_size, expected_output, output_data, + ElementCount(*output_dims), activation); +} + +void TestAddInt32(int* input1_dims_data, const int32_t* input1_data, + int* input2_dims_data, const int32_t* input2_data, + int* output_dims_data, const int32_t* expected_output, + TfLiteFusedActivation activation, int32_t* output_data) { + TfLiteIntArray* input1_dims = IntArrayFromInts(input1_dims_data); + TfLiteIntArray* input2_dims = IntArrayFromInts(input2_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + + constexpr int inputs_size = 2; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(input1_data, input1_dims), + CreateTensor(input2_data, input2_dims), + CreateTensor(output_data, output_dims), + }; + ValidateAddGoldens(tensors, tensors_size, expected_output, output_data, + ElementCount(*output_dims), activation); +} + +template +void TestAddQuantized(int* input1_dims_data, const float* input1_data, + T* input1_quantized, float input1_scale, + int input1_zero_point, int* input2_dims_data, + const float* input2_data, T* input2_quantized, + float input2_scale, int input2_zero_point, + int* output_dims_data, const float* golden, + T* golden_quantized, float output_scale, + int output_zero_point, TfLiteFusedActivation activation, + T* output_data) { + TfLiteIntArray* input1_dims = IntArrayFromInts(input1_dims_data); + TfLiteIntArray* input2_dims = IntArrayFromInts(input2_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + + constexpr int inputs_size = 2; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + tflite::testing::CreateQuantizedTensor(input1_data, input1_quantized, + input1_dims, input1_scale, + input1_zero_point), + tflite::testing::CreateQuantizedTensor(input2_data, input2_quantized, + input2_dims, input2_scale, + input2_zero_point), + tflite::testing::CreateQuantizedTensor(output_data, output_dims, + output_scale, output_zero_point), + }; + tflite::Quantize(golden, golden_quantized, ElementCount(*output_dims), + output_scale, output_zero_point); + + ValidateAddGoldens(tensors, tensors_size, golden_quantized, output_data, + ElementCount(*output_dims), activation); +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(FloatAddNoActivation) { + int inout_shape[] = {4, 1, 2, 2, 1}; + const float input1_values[] = {-2.0, 0.2, 0.7, 0.8}; + const float input2_values[] = {0.1, 0.2, 0.3, 0.5}; + const float golden_values[] = {-1.9, 0.4, 1.0, 1.3}; + constexpr int kOutputDimsCount = 4; + float output_data[kOutputDimsCount]; + tflite::testing::TestAddFloat(inout_shape, input1_values, inout_shape, + input2_values, inout_shape, golden_values, + kTfLiteActNone, output_data); +} + +TF_LITE_MICRO_TEST(FloatAddActivationRelu1) { + int inout_shape[] = {4, 1, 2, 2, 1}; + const float input1_values[] = {-2.0, 0.2, 0.7, 0.8}; + const float input2_values[] = {0.1, 0.2, 0.3, 0.5}; + const float golden_values[] = {-1.0, 0.4, 1.0, 1.0}; + + constexpr int kOutputDimsCount = 4; + float output_data[kOutputDimsCount]; + tflite::testing::TestAddFloat(inout_shape, input1_values, inout_shape, + input2_values, inout_shape, golden_values, + kTfLiteActReluN1To1, output_data); +} + +TF_LITE_MICRO_TEST(FloatAddVariousInputShapes) { + constexpr int kOutputDimsCount = 6; + float output_data[kOutputDimsCount]; + + const float input1_values[] = {-2.0, 0.2, 0.7, 0.8, 1.1, 2.0}; + const float input2_values[] = {0.1, 0.2, 0.3, 0.5, 1.1, 0.1}; + const float expected_output[] = {-1.9, 0.4, 1.0, 1.3, 2.2, 2.1}; + + constexpr int num_shapes = 4; + constexpr int max_shape_size = 5; + int test_shapes[num_shapes][max_shape_size] = { + {1, 6}, + {2, 2, 3}, + {3, 2, 1, 3}, + {4, 1, 3, 1, 2}, + }; + + for (int i = 0; i < num_shapes; ++i) { + tflite::testing::TestAddFloat(test_shapes[i], input1_values, test_shapes[i], + input2_values, test_shapes[i], + expected_output, kTfLiteActNone, output_data); + } +} + +TF_LITE_MICRO_TEST(FloatAddWithScalarBroadcast) { + constexpr int kOutputDimsCount = 6; + float output_data[kOutputDimsCount]; + + const float input1_values[] = {-2.0, 0.2, 0.7, 0.8, 1.1, 2.0}; + int input2_shape[] = {0}; + const float input2_values[] = {0.1}; + const float expected_output[] = {-1.9, 0.3, 0.8, 0.9, 1.2, 2.1}; + + constexpr int num_shapes = 4; + constexpr int max_shape_size = 5; + int test_shapes[num_shapes][max_shape_size] = { + {1, 6}, + {2, 2, 3}, + {3, 2, 1, 3}, + {4, 1, 3, 1, 2}, + }; + + for (int i = 0; i < num_shapes; ++i) { + tflite::testing::TestAddFloat(test_shapes[i], input1_values, input2_shape, + input2_values, test_shapes[i], + expected_output, kTfLiteActNone, output_data); + } +} + +TF_LITE_MICRO_TEST(Int32AddNoActivation) { + int inout_shape[] = {4, 1, 2, 2, 1}; + const int32_t input1_values[] = {-2, 2147483646, -1, 1146622854}; + const int32_t input2_values[] = {3, 1, -2147483647, -726978367}; + const int32_t golden_values[] = {1, 2147483647, -2147483648, 419644487}; + constexpr int kOutputDimsCount = 4; + int32_t output_data[kOutputDimsCount]; + tflite::testing::TestAddInt32(inout_shape, input1_values, inout_shape, + input2_values, inout_shape, golden_values, + kTfLiteActNone, output_data); +} + +TF_LITE_MICRO_TEST(QuantizedAddNoActivationInt8) { + const float scales[] = {0.25, 0.5, 1.0}; + const int zero_points[] = {-10, 4, 13}; + int inout_shape[] = {4, 1, 2, 2, 1}; + const float input1_values[] = {-2.01, -1.01, -0.01, 0.98}; + const float input2_values[] = {1.01, 1.99, 2.99, 4.02}; + const float golden_values[] = {-1, 1, 3, 5}; + + constexpr int kOutputDimsCount = 4; + int8_t input1_quantized[kOutputDimsCount]; + int8_t input2_quantized[kOutputDimsCount]; + int8_t golden_quantized[kOutputDimsCount]; + int8_t output[kOutputDimsCount]; + + tflite::testing::TestAddQuantized( + inout_shape, input1_values, input1_quantized, scales[0], zero_points[0], + inout_shape, input2_values, input2_quantized, scales[1], zero_points[1], + inout_shape, golden_values, golden_quantized, scales[2], zero_points[2], + kTfLiteActNone, output); +} + +TF_LITE_MICRO_TEST(QuantizedAddActivationRelu1Int8) { + const float scales[] = {0.25, 0.5, 1.0}; + const int zero_points[] = {-10, 4, 13}; + int inout_shape[] = {4, 1, 2, 2, 1}; + const float input1_values[] = {-2.01, -1.01, -0.01, 0.98}; + const float input2_values[] = {1.01, 1.99, 2.99, 4.02}; + const float golden_values[] = {-1, 1, 1, 1}; + + constexpr int kOutputDimsCount = 4; + int8_t input1_quantized[kOutputDimsCount]; + int8_t input2_quantized[kOutputDimsCount]; + int8_t golden_quantized[kOutputDimsCount]; + int8_t output[kOutputDimsCount]; + + tflite::testing::TestAddQuantized( + inout_shape, input1_values, input1_quantized, scales[0], zero_points[0], + inout_shape, input2_values, input2_quantized, scales[1], zero_points[1], + inout_shape, golden_values, golden_quantized, scales[2], zero_points[2], + kTfLiteActReluN1To1, output); +} + +TF_LITE_MICRO_TEST(QuantizedAddVariousInputShapesInt8) { + const float scales[] = {0.1, 0.05, 0.1}; + const int zero_points[] = {-9, 5, 14}; + + constexpr int num_shapes = 4; + constexpr int max_shape_size = 5; + int test_shapes[num_shapes][max_shape_size] = { + {1, 6}, + {2, 2, 3}, + {3, 2, 1, 3}, + {4, 1, 3, 1, 2}, + }; + + const float input1_values[] = {-2.0, 0.2, 0.7, 0.8, 1.1, 2.0}; + const float input2_values[] = {0.1, 0.2, 0.3, 0.5, 1.1, 0.1}; + const float golden_values[] = {-1.9, 0.4, 1.0, 1.3, 2.2, 2.1}; + + constexpr int kOutputDimsCount = 6; + int8_t input1_quantized[kOutputDimsCount]; + int8_t input2_quantized[kOutputDimsCount]; + int8_t golden_quantized[kOutputDimsCount]; + int8_t output[kOutputDimsCount]; + + for (int i = 0; i < num_shapes; i++) { + tflite::testing::TestAddQuantized( + test_shapes[i], input1_values, input1_quantized, scales[0], + zero_points[0], test_shapes[i], input2_values, input2_quantized, + scales[1], zero_points[1], test_shapes[i], golden_values, + golden_quantized, scales[2], zero_points[2], kTfLiteActNone, output); + } +} + +TF_LITE_MICRO_TEST(QuantizedAddWithScalarBroadcastFloat) { + float output_float[tflite::testing::kBroadcastOutputDimsCount]; + + for (int i = 0; i < tflite::testing::kBroadcastNumShapes; ++i) { + tflite::testing::TestAddFloat(tflite::testing::broadcast_input1_shape, + tflite::testing::broadcast_input1_values, + tflite::testing::broadcast_input2_shapes[i], + tflite::testing::broadcast_input2_values, + tflite::testing::broadcast_output_shapes[i], + tflite::testing::broadcast_goldens[i], + kTfLiteActNone, output_float); + } +} + +TF_LITE_MICRO_TEST(QuantizedAddWithScalarBroadcastInt8) { + const float input1_values[] = {-2.0, 0.2, 0.7, 0.8, 1.1, 2.0}; + int input2_shape[] = {0}; + const float input2_values[] = {0.1}; + const float golden[] = {-1.9, 0.3, 0.8, 0.9, 1.2, 2.1}; + + constexpr int num_shapes = 4; + constexpr int max_shape_size = 5; + int test_shapes[num_shapes][max_shape_size] = { + {1, 6}, + {2, 2, 3}, + {3, 2, 1, 3}, + {4, 1, 3, 1, 2}, + }; + + const float scales[] = {0.1, 0.05, 0.05}; + const int zero_points[] = {-8, 4, 12}; + + constexpr int kOutputDimsCount = 6; + int8_t input1_quantized[kOutputDimsCount]; + int8_t input2_quantized[kOutputDimsCount]; + int8_t golden_quantized[kOutputDimsCount]; + int8_t output[kOutputDimsCount]; + + for (int i = 0; i < num_shapes; ++i) { + tflite::testing::TestAddQuantized( + test_shapes[i], input1_values, input1_quantized, scales[0], + zero_points[0], input2_shape, input2_values, input2_quantized, + scales[1], zero_points[1], test_shapes[i], golden, golden_quantized, + scales[2], zero_points[2], kTfLiteActNone, output); + } +} + +TF_LITE_MICRO_TEST(QuantizedAddWithMixedBroadcastInt8) { + const float scales[] = {0.1, 0.05, 0.1}; + const int zero_points[] = {-10, -5, 7}; + int8_t input1_quantized[tflite::testing::kBroadcastOutputDimsCount]; + int8_t input2_quantized[tflite::testing::kBroadcastOutputDimsCount]; + int8_t golden_quantized[tflite::testing::kBroadcastOutputDimsCount]; + int8_t output[tflite::testing::kBroadcastOutputDimsCount]; + + for (int i = 0; i < tflite::testing::kBroadcastNumShapes; ++i) { + tflite::testing::TestAddQuantized( + tflite::testing::broadcast_input1_shape, + tflite::testing::broadcast_input1_values, input1_quantized, scales[0], + zero_points[0], tflite::testing::broadcast_input2_shapes[i], + tflite::testing::broadcast_input2_values, input2_quantized, scales[1], + zero_points[1], tflite::testing::broadcast_output_shapes[i], + tflite::testing::broadcast_goldens[i], golden_quantized, scales[2], + zero_points[2], kTfLiteActNone, output); + } +} + +TF_LITE_MICRO_TEST(QuantizedAddNoActivationInt16) { + const float scales[] = {0.25, 0.5, 1.0}; + const int zero_points[] = {0, 0, 0}; + int inout_shape[] = {4, 1, 2, 2, 1}; + const float input1_values[] = {-2.01, -1.01, -0.01, 0.98}; + const float input2_values[] = {1.01, 2.01, 3.01, 4.02}; + const float golden_values[] = {-1, 1, 3, 5}; + + constexpr int kOutputDimsCount = 4; + int16_t input1_quantized[kOutputDimsCount]; + int16_t input2_quantized[kOutputDimsCount]; + int16_t golden_quantized[kOutputDimsCount]; + int16_t output[kOutputDimsCount]; + + tflite::testing::TestAddQuantized( + inout_shape, input1_values, input1_quantized, scales[0], zero_points[0], + inout_shape, input2_values, input2_quantized, scales[1], zero_points[1], + inout_shape, golden_values, golden_quantized, scales[2], zero_points[2], + kTfLiteActNone, output); +} + +TF_LITE_MICRO_TEST(QuantizedAddActivationRelu1Int16) { + const float scales[] = {0.25, 0.5, 1.0}; + const int zero_points[] = {0, 0, 0}; + int inout_shape[] = {4, 1, 2, 2, 1}; + const float input1_values[] = {-2.01, -1.01, -0.01, 0.98}; + const float input2_values[] = {1.01, 1.99, 2.99, 4.02}; + const float golden_values[] = {-1, 1, 1, 1}; + + constexpr int kOutputDimsCount = 4; + int16_t input1_quantized[kOutputDimsCount]; + int16_t input2_quantized[kOutputDimsCount]; + int16_t golden_quantized[kOutputDimsCount]; + int16_t output[kOutputDimsCount]; + + tflite::testing::TestAddQuantized( + inout_shape, input1_values, input1_quantized, scales[0], zero_points[0], + inout_shape, input2_values, input2_quantized, scales[1], zero_points[1], + inout_shape, golden_values, golden_quantized, scales[2], zero_points[2], + kTfLiteActReluN1To1, output); +} + +TF_LITE_MICRO_TEST(QuantizedAddVariousInputShapesInt16) { + const float scales[] = {0.1, 0.05, 0.1}; + const int zero_points[] = {0, 0, 0}; + + constexpr int num_shapes = 4; + constexpr int max_shape_size = 5; + int test_shapes[num_shapes][max_shape_size] = { + {1, 6}, + {2, 2, 3}, + {3, 2, 1, 3}, + {4, 1, 3, 1, 2}, + }; + + const float input1_values[] = {-2.0, 0.2, 0.7, 0.8, 1.1, 2.0}; + const float input2_values[] = {0.1, 0.2, 0.3, 0.5, 1.1, 0.1}; + const float golden_values[] = {-1.9, 0.4, 1.0, 1.3, 2.2, 2.1}; + + constexpr int kOutputDimsCount = 6; + int16_t input1_quantized[kOutputDimsCount]; + int16_t input2_quantized[kOutputDimsCount]; + int16_t golden_quantized[kOutputDimsCount]; + int16_t output[kOutputDimsCount]; + + for (int i = 0; i < num_shapes; i++) { + tflite::testing::TestAddQuantized( + test_shapes[i], input1_values, input1_quantized, scales[0], + zero_points[0], test_shapes[i], input2_values, input2_quantized, + scales[1], zero_points[1], test_shapes[i], golden_values, + golden_quantized, scales[2], zero_points[2], kTfLiteActNone, output); + } +} + +TF_LITE_MICRO_TEST(QuantizedAddWithScalarBroadcastInt16) { + const float input1_values[] = {-2.0, 0.2, 0.7, 0.8, 1.1, 2.0}; + int input2_shape[] = {0}; + const float input2_values[] = {0.1}; + const float golden[] = {-1.9, 0.3, 0.8, 0.9, 1.2, 2.1}; + + constexpr int num_shapes = 4; + constexpr int max_shape_size = 5; + int test_shapes[num_shapes][max_shape_size] = { + {1, 6}, + {2, 2, 3}, + {3, 2, 1, 3}, + {4, 1, 3, 1, 2}, + }; + + const float scales[] = {0.1, 0.05, 0.05}; + const int zero_points[] = {0, 0, 0}; + + constexpr int kOutputDimsCount = 6; + int16_t input1_quantized[kOutputDimsCount]; + int16_t input2_quantized[kOutputDimsCount]; + int16_t golden_quantized[kOutputDimsCount]; + int16_t output[kOutputDimsCount]; + + for (int i = 0; i < num_shapes; ++i) { + tflite::testing::TestAddQuantized( + test_shapes[i], input1_values, input1_quantized, scales[0], + zero_points[0], input2_shape, input2_values, input2_quantized, + scales[1], zero_points[1], test_shapes[i], golden, golden_quantized, + scales[2], zero_points[2], kTfLiteActNone, output); + } +} + +TF_LITE_MICRO_TEST(QuantizedAddWithMixedBroadcastInt16) { + const float scales[] = {0.1, 0.05, 0.1}; + const int zero_points[] = {0, 0, 0}; + int16_t input1_quantized[tflite::testing::kBroadcastOutputDimsCount]; + int16_t input2_quantized[tflite::testing::kBroadcastOutputDimsCount]; + int16_t golden_quantized[tflite::testing::kBroadcastOutputDimsCount]; + int16_t output[tflite::testing::kBroadcastOutputDimsCount]; + + for (int i = 0; i < tflite::testing::kBroadcastNumShapes; ++i) { + tflite::testing::TestAddQuantized( + tflite::testing::broadcast_input1_shape, + tflite::testing::broadcast_input1_values, input1_quantized, scales[0], + zero_points[0], tflite::testing::broadcast_input2_shapes[i], + tflite::testing::broadcast_input2_values, input2_quantized, scales[1], + zero_points[1], tflite::testing::broadcast_output_shapes[i], + tflite::testing::broadcast_goldens[i], golden_quantized, scales[2], + zero_points[2], kTfLiteActNone, output); + } +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/arc_mli/README.md b/tensorflow/lite/micro/kernels/arc_mli/README.md new file mode 100644 index 0000000..fc77f26 --- /dev/null +++ b/tensorflow/lite/micro/kernels/arc_mli/README.md @@ -0,0 +1,106 @@ +# EmbARC MLI Library Based Optimizations of TensorFlow Lite Micro Kernels for ARC Platforms. + +## Maintainers + +* [dzakhar](https://github.com/dzakhar) +* [JaccovG](https://github.com/JaccovG) +* [gerbauz](https://github.com/gerbauz) + +## Introduction + +This folder contains kernel implementations which use optimized +[embARC MLI Library](https://github.com/foss-for-synopsys-dwc-arc-processors/embarc_mli). +It allows acceleration of inference operations which use int8 (asymmetric +quantization). + +## Usage + +embARC MLI Library is used to speed up execution of some kernels for +asymmetrically quantized layers and can be applied with the option `OPTIMIZED_KERNEL_DIR=arc_mli`. +This means that usual library generation for +ARC specific target implies usage of embARC MLI. + +For example: + +``` +make -f tensorflow/lite/micro/tools/make/Makefile clean +make -f tensorflow/lite/micro/tools/make/Makefile TARGET=arc_emsdp \ +OPTIMIZED_KERNEL_DIR=arc_mli TARGET_ARCH=arc \ +microlite +``` + +In case MLI implementation can’t be used, kernels in this folder fallback to +TFLM reference implementations. For applications which may not benefit from MLI +library, TF Lite Micro library can be generated without these implementations **removing** `OPTIMIZED_KERNEL_DIR=arc_mli` in the command line, which can reduce overall code size: + +``` +make -f tensorflow/lite/micro/tools/make/Makefile clean +make -f tensorflow/lite/micro/tools/make/Makefile TARGET=arc_emsdp \ +TARGET_ARCH=arc \ +microlite +``` +--- +### Optional (experimental features): + +TFLM can be built using [embARC MLI Library 2.0](https://github.com/foss-for-synopsys-dwc-arc-processors/embarc_mli/tree/Release_2.0_EA) as an experimental feature. +To build TFLM using the embARC MLI Library 2.0, add the following tag to the command: +``` +ARC_TAGS=mli20_experimental +``` +In this case, generated projectes will be in _mli20_arc_default folder. + +Some of configurations may require a custom run-time library specified using the BUILD_LIB_DIR option. Please, check MLI Library 2.0 [documentation](https://github.com/foss-for-synopsys-dwc-arc-processors/embarc_mli/tree/Release_2.0_EA#build-configuration-options) for more details. The following option can be added: +``` +BUILD_LIB_DIR= +``` +## Limitations + +Currently, the MLI Library provides optimized implementation only for int8 +(asymmetric) versions of the following kernels: +1. Convolution 2D – Per axis +quantization only, `dilation_ratio==1` +2. Depthwise Convolution 2D – Per axis +quantization only, `dilation_ratio==1` +3. Average Pooling +4. Max Pooling +5. Fully Connected + +## Scratch Buffers and Slicing + +The following information applies only for ARC EM SDP, VPX and other targets with XY or VCCM +memory. embARC MLI uses specific optimizations which assumes node operands are +in XY, VCCM memory and/or DCCM (Data Closely Coupled Memory). As operands might be +quite big and may not fit in available XY or VCCM memory, special slicing logic is +applied which allows kernel calculations to be split into multiple parts. For +this reason, internal static buffers are allocated in these X, Y, VCCM and DCCM memory +banks and used to execute sub-calculations. + +All this is performed automatically and invisible to the user. Half of the DCCM +memory bank and the full XY banks or 3/4 of VCCM bank are occupied for MLI specific needs. +If the user needs space in XY or VCCM memory for other tasks, these arrays can be reduced by +setting specific sizes. For this, add the following option to build command +replacing **** with required values: + +**For EM:** +``` +EXT_CFLAGS="-DSCRATCH_MEM_Z_SIZE= -DSCRATCH_MEM_X_SIZE= -DSCRATCH_MEM_Y_SIZE=" +``` +**For VPX:** +``` +EXT_CFLAGS="-DSCRATCH_MEM_VEC_SIZE=" +``` + +For example, to reduce sizes of arrays placed in DCCM and XCCM to 32k and 8k +respectively, use next command: + +``` +make -f tensorflow/lite/micro/tools/make/Makefile <...> \ +EXT_CFLAGS="-DSCRATCH_MEM_Z_SIZE=32*1024 -DSCRATCH_MEM_X_SIZE=8*1024" \ +microlite +``` + +## License + +TensorFlow's code is covered by the Apache2 License included in the repository, +and third party dependencies are covered by their respective licenses, in the +third_party folder of this package. diff --git a/tensorflow/lite/micro/kernels/arc_mli/add.cc b/tensorflow/lite/micro/kernels/arc_mli/add.cc new file mode 100644 index 0000000..d6cbddd --- /dev/null +++ b/tensorflow/lite/micro/kernels/arc_mli/add.cc @@ -0,0 +1,424 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/reference/add.h" + +#include +#include + +#include "mli_api.h" // NOLINT +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/add.h" +#include "tensorflow/lite/kernels/internal/reference/process_broadcast_shapes.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/arc_mli/mli_slicers.h" +#include "tensorflow/lite/micro/kernels/arc_mli/mli_tf_utils.h" +#include "tensorflow/lite/micro/kernels/arc_mli/scratch_buf_mgr.h" +#include "tensorflow/lite/micro/kernels/arc_mli/scratch_buffers.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/memory_helpers.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { + +constexpr int kInputTensor1 = 0; +constexpr int kInputTensor2 = 1; +constexpr int kOutputTensor = 0; + +struct OpData { + bool requires_broadcast; + + // These fields are used in both the general 8-bit -> 8bit quantized path, + // and the special 16-bit -> 16bit quantized path + int input1_shift; + int input2_shift; + int32_t output_activation_min; + int32_t output_activation_max; + + // These fields are used only in the general 8-bit -> 8bit quantized path + int32_t input1_multiplier; + int32_t input2_multiplier; + int32_t output_multiplier; + int output_shift; + int left_shift; + int32_t input1_offset; + int32_t input2_offset; + int32_t output_offset; + + // Used only for float evals: + float output_activation_min_f32; + float output_activation_max_f32; + + // The result of checking if MLI optimized version of tensors can be used. + bool is_mli_applicable; + + // Tensors in MLI format. + mutable ops::micro::MliTensorInterface mli_input1; + mutable ops::micro::MliTensorInterface mli_input2; + mutable ops::micro::MliTensorInterface mli_out; +}; + +TfLiteStatus CalculateOpData(TfLiteContext* context, TfLiteAddParams* params, + const TfLiteTensor* input1, + const TfLiteTensor* input2, TfLiteTensor* output, + OpData* data) { + data->requires_broadcast = !HaveSameShapes(input1, input2); + + if (output->type == kTfLiteUInt8 || output->type == kTfLiteInt8) { + TF_LITE_ENSURE_STATUS(CalculateActivationRangeQuantized( + context, params->activation, output, &data->output_activation_min, + &data->output_activation_max)); + + // MLI 2.0 optimized version only supports int8_t datatype and min/max + // within container range. Broadcasting isn't supported on the primitive + // level (but might be implemented as part of slicing in future) +#ifdef MLI_2_0 // + data->is_mli_applicable = + (input1->type == kTfLiteInt8) && (input2->type == kTfLiteInt8) && + (output->type == kTfLiteInt8) && !data->requires_broadcast && + data->output_activation_min == std::numeric_limits::min() && + data->output_activation_max == std::numeric_limits::max(); +#else + data->is_mli_applicable = false; +#endif + + if (data->is_mli_applicable) { + data->mli_input1 = + ops::micro::MliTensorInterface(static_cast( + context->AllocatePersistentBuffer(context, sizeof(mli_tensor)))); + data->mli_input2 = + ops::micro::MliTensorInterface(static_cast( + context->AllocatePersistentBuffer(context, sizeof(mli_tensor)))); + data->mli_out = ops::micro::MliTensorInterface(static_cast( + context->AllocatePersistentBuffer(context, sizeof(mli_tensor)))); + + ops::micro::ConvertToMliTensor(input1, &data->mli_input1); + ops::micro::ConvertToMliTensor(input2, &data->mli_input2); + ops::micro::ConvertToMliTensor(output, &data->mli_out); + /* Flatten tensors to simplify the process (as we don't support + * broadcasting). */ + data->mli_input1.Shape()[0] = + mli_hlp_count_elem_num(data->mli_input1.MliTensor(), 0); + data->mli_input2.Shape()[0] = + mli_hlp_count_elem_num(data->mli_input2.MliTensor(), 0); + data->mli_out.Shape()[0] = + mli_hlp_count_elem_num(data->mli_out.MliTensor(), 0); + data->mli_input1.MemStride()[0] = data->mli_input2.MemStride()[0] = 1; + data->mli_out.MemStride()[0] = 1; + *data->mli_input1.Rank() = *data->mli_input2.Rank() = 1; + *data->mli_out.Rank() = 1; + } + } else { + data->is_mli_applicable = false; + } + +#if !defined(TF_LITE_STRIP_REFERENCE_IMPL) + if (output->type == kTfLiteInt8 || output->type == kTfLiteInt16) { + // 8bit -> 8bit general quantized path, with general rescalings + data->input1_offset = -input1->params.zero_point; + data->input2_offset = -input2->params.zero_point; + data->output_offset = output->params.zero_point; + data->left_shift = (output->type == kTfLiteInt16) ? 15 : 20; + const double twice_max_input_scale = + 2 * static_cast( + std::max(input1->params.scale, input2->params.scale)); + const double real_input1_multiplier = + static_cast(input1->params.scale) / twice_max_input_scale; + const double real_input2_multiplier = + static_cast(input2->params.scale) / twice_max_input_scale; + const double real_output_multiplier = + twice_max_input_scale / + ((1 << data->left_shift) * static_cast(output->params.scale)); + + QuantizeMultiplierSmallerThanOneExp( + real_input1_multiplier, &data->input1_multiplier, &data->input1_shift); + + QuantizeMultiplierSmallerThanOneExp( + real_input2_multiplier, &data->input2_multiplier, &data->input2_shift); + + QuantizeMultiplierSmallerThanOneExp( + real_output_multiplier, &data->output_multiplier, &data->output_shift); + + TF_LITE_ENSURE_STATUS(CalculateActivationRangeQuantized( + context, params->activation, output, &data->output_activation_min, + &data->output_activation_max)); + } else if (output->type == kTfLiteFloat32) { + CalculateActivationRange(params->activation, + &data->output_activation_min_f32, + &data->output_activation_max_f32); +#endif // !defined(TF_LITE_STRIP_REFERENCE_IMPL) + } + + return kTfLiteOk; +} + +TfLiteStatus EvalAdd(TfLiteContext* context, TfLiteNode* node, + TfLiteAddParams* params, const OpData* data, + const TfLiteEvalTensor* input1, + const TfLiteEvalTensor* input2, TfLiteEvalTensor* output) { +#if !defined(TF_LITE_STRIP_REFERENCE_IMPL) + tflite::ArithmeticParams op_params; + SetActivationParams(data->output_activation_min_f32, + data->output_activation_max_f32, &op_params); + if (data->requires_broadcast) { + reference_ops::BroadcastAdd4DSlow( + op_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } else { + reference_ops::Add(op_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } + return kTfLiteOk; +#else + MicroPrintf("Node configuration is not supported by ARC MLI Library."); + return kTfLiteError; +#endif +} + +TfLiteStatus EvalAddQuantized(TfLiteContext* context, TfLiteNode* node, + TfLiteAddParams* params, const OpData* data, + const TfLiteEvalTensor* input1, + const TfLiteEvalTensor* input2, + TfLiteEvalTensor* output) { +#if !defined(TF_LITE_STRIP_REFERENCE_IMPL) + tflite::ArithmeticParams op_params; + op_params.left_shift = data->left_shift; + op_params.input1_offset = data->input1_offset; + op_params.input1_multiplier = data->input1_multiplier; + op_params.input1_shift = data->input1_shift; + op_params.input2_offset = data->input2_offset; + op_params.input2_multiplier = data->input2_multiplier; + op_params.input2_shift = data->input2_shift; + op_params.output_offset = data->output_offset; + op_params.output_multiplier = data->output_multiplier; + op_params.output_shift = data->output_shift; + SetActivationParams(data->output_activation_min, data->output_activation_max, + &op_params); + bool need_broadcast = reference_ops::ProcessBroadcastShapes( + tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorShape(input2), &op_params); + + switch (output->type) { + case kTfLiteInt8: { + if (need_broadcast) { + reference_integer_ops::BroadcastAdd4DSlow( + op_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } else { + reference_integer_ops::Add( + op_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } + break; + } + case kTfLiteInt16: { + if (need_broadcast) { + reference_ops::BroadcastAdd4DSlow( + op_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } else { + reference_ops::Add(op_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output), + false); + } + break; + } + default: + MicroPrintf("Type %s (%d) not supported.", + TfLiteTypeGetName(output->type), output->type); + return kTfLiteError; + } + + return kTfLiteOk; +#else + MicroPrintf("Node configuration is not supported by ARC MLI Library."); + return kTfLiteError; +#endif +} + +TfLiteStatus EvalMLIAddInt8(TfLiteContext* context, TfLiteNode* node, + TfLiteAddParams* params, const OpData* data, + const TfLiteEvalTensor* input1, + const TfLiteEvalTensor* input2, + TfLiteEvalTensor* output) { +#ifdef MLI_2_0 + TF_LITE_ENSURE(context, data->is_mli_applicable == true); + TF_LITE_ENSURE(context, input1->type == kTfLiteInt8); + TF_LITE_ENSURE(context, input2->type == kTfLiteInt8); + TF_LITE_ENSURE(context, output->type == kTfLiteInt8); + + ops::micro::MliTensorAttachBuffer(input1, &data->mli_input1); + ops::micro::MliTensorAttachBuffer(input2, &data->mli_input2); + ops::micro::MliTensorAttachBuffer(output, &data->mli_out); + + // mli_mov config and tensors for data in fast (local) memory with interface + mli_mov_cfg_t copy_config; + mli_mov_cfg_for_copy(©_config); + mli_tensor input1_local_tsr = *data->mli_input1.MliTensor(); + mli_tensor input2_local_tsr = *data->mli_input2.MliTensor(); + mli_tensor out_local_tsr = *data->mli_out.MliTensor(); + ops::micro::MliTensorInterface input1_local(&input1_local_tsr); + ops::micro::MliTensorInterface input2_local(&input2_local_tsr); + ops::micro::MliTensorInterface out_local(&out_local_tsr); + + /* allocate the local buffers, and compute the slice size */ + TF_LITE_ENSURE_STATUS(ops::micro::get_arc_scratch_buffer_for_eltwise_tensors( + context, &input1_local, &input2_local, &out_local)); + TF_LITE_ENSURE(context, *input1_local.Rank() == 1 && + *input2_local.Rank() == 1 && + *out_local.Rank() == 1); + uint32_t min_capacity = *input1_local.DataCapacity(); + min_capacity = std::min(min_capacity, *input2_local.DataCapacity()); + min_capacity = std::min(min_capacity, *out_local.DataCapacity()); + const int slice_dim = 0; + const int slice_size = + min_capacity / mli_hlp_tensor_element_size(out_local.MliTensor()); + + /* is_local indicates that the tensor is already in local memory, + so in that case the original tensor can be used, + and there is no need to copy it to the local tensor*/ + const bool input1_is_local = + input1_local.Data() == data->mli_input1.Data(); + const bool input2_is_local = + input2_local.Data() == data->mli_input2.Data(); + const bool out_is_local = + out_local.Data() == data->mli_out.Data(); + + ops::micro::TensorSlicer input1_slice(data->mli_input1.MliTensor(), slice_dim, + slice_size); + ops::micro::TensorSlicer input2_slice(data->mli_input2.MliTensor(), slice_dim, + slice_size); + ops::micro::TensorSlicer out_slice(data->mli_out.MliTensor(), slice_dim, + slice_size); + + mli_tensor* input1_tsr = + input1_is_local ? input1_slice.Sub() : input1_local.MliTensor(); + mli_tensor* input2_tsr = + input2_is_local ? input2_slice.Sub() : input2_local.MliTensor(); + mli_tensor* out_tsr = out_is_local ? out_slice.Sub() : out_local.MliTensor(); + + while (!out_slice.Done()) { + mli_mov_tensor_sync(input1_slice.Sub(), ©_config, input1_tsr); + mli_mov_tensor_sync(input2_slice.Sub(), ©_config, input2_tsr); + + mli_krn_eltwise_add_sa8(input1_tsr, input2_tsr, out_tsr); + + mli_mov_tensor_sync(out_tsr, ©_config, out_slice.Sub()); + input1_slice.Next(); + input2_slice.Next(); + out_slice.Next(); + } + return kTfLiteOk; +#else + return kTfLiteError; +#endif +} + +void* AddInit(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(OpData)); +} + +TfLiteStatus AddPrepare(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + TFLITE_DCHECK(node->builtin_data != nullptr); + + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* input1 = + micro_context->AllocateTempInputTensor(node, kInputTensor1); + TF_LITE_ENSURE(context, input1 != nullptr); + TfLiteTensor* input2 = + micro_context->AllocateTempInputTensor(node, kInputTensor2); + TF_LITE_ENSURE(context, input2 != nullptr); + TfLiteTensor* output = AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + + OpData* data = static_cast(node->user_data); + auto* params = reinterpret_cast(node->builtin_data); + + TF_LITE_ENSURE_STATUS( + CalculateOpData(context, params, input1, input2, output, data)); + + micro_context->DeallocateTempTfLiteTensor(input1); + micro_context->DeallocateTempTfLiteTensor(input2); + micro_context->DeallocateTempTfLiteTensor(output); + + return kTfLiteOk; +} + +TfLiteStatus AddEval(TfLiteContext* context, TfLiteNode* node) { + TfLiteStatus ret_val = kTfLiteOk; + auto* params = reinterpret_cast(node->builtin_data); + + TFLITE_DCHECK(node->user_data != nullptr); + const OpData* data = static_cast(node->user_data); + + const TfLiteEvalTensor* input1 = + tflite::micro::GetEvalInput(context, node, kInputTensor1); + const TfLiteEvalTensor* input2 = + tflite::micro::GetEvalInput(context, node, kInputTensor2); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + if (data->is_mli_applicable) { + ret_val = + EvalMLIAddInt8(context, node, params, data, input1, input2, output); + } else if (output->type == kTfLiteFloat32) { + ret_val = EvalAdd(context, node, params, data, input1, input2, output); + } else if (output->type == kTfLiteInt8 || output->type == kTfLiteInt16) { + ret_val = + EvalAddQuantized(context, node, params, data, input1, input2, output); + } else { + MicroPrintf("Type %s (%d) not supported.", TfLiteTypeGetName(output->type), + output->type); + ret_val = kTfLiteError; + } + + return ret_val; +} + +TFLMRegistration Register_ADD() { + return tflite::micro::RegisterOp(AddInit, AddPrepare, AddEval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/arc_mli/conv.cc b/tensorflow/lite/micro/kernels/arc_mli/conv.cc new file mode 100644 index 0000000..41d2c53 --- /dev/null +++ b/tensorflow/lite/micro/kernels/arc_mli/conv.cc @@ -0,0 +1,711 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/reference/conv.h" + +#include "mli_api.h" // NOLINT +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/conv.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/padding.h" +#include "tensorflow/lite/micro/kernels/arc_mli/mli_function_specializations.h" +#include "tensorflow/lite/micro/kernels/arc_mli/mli_slicers.h" +#include "tensorflow/lite/micro/kernels/arc_mli/mli_tf_utils.h" +#include "tensorflow/lite/micro/kernels/arc_mli/scratch_buf_mgr.h" +#include "tensorflow/lite/micro/kernels/arc_mli/scratch_buffers.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { +namespace { + +constexpr int kInputTensor = 0; +constexpr int kFilterTensor = 1; +constexpr int kBiasTensor = 2; +constexpr int kOutputTensor = 0; + +// Conv is quantized along dimension 0: +// https://www.tensorflow.org/lite/performance/quantization_spec +#if defined(MLI_2_0) && !defined(MLI_2_0_KRNL_TEST) +constexpr int kConvQuantizedDimension = 3; +#else +constexpr int kConvQuantizedDimension = 0; +#endif + +// This file has 2 implementation of Conv. + +struct OpData { + TfLitePaddingValues padding; + + // Cached tensor zero point values for quantized operations. + int32_t input_zero_point; + int32_t filter_zero_point; + int32_t output_zero_point; + + // The scaling factor from input to output (aka the 'real multiplier') can + // be represented as a fixed point multiplier plus a left shift. + int32_t output_multiplier; + int output_shift; + + // Per channel output multiplier and shift. + int32_t* per_channel_output_multiplier; + int32_t* per_channel_output_shift; +#ifdef MLI_2_0 + int8_t* per_channel_scale_frac_bits; +#endif + + // The range of the fused activation layer. For example for kNone and + // uint8_t these would be 0 and 255. + int32_t output_activation_min; + int32_t output_activation_max; + + // The result of checking if MLI optimized version of tensors can be used. + bool is_mli_applicable; + + // Tensors in MLI format. + mutable ops::micro::MliTensorInterface mli_in; + mutable ops::micro::MliTensorInterface mli_weights; + mutable ops::micro::MliTensorInterface mli_bias; + mutable ops::micro::MliTensorInterface mli_out; + mli_conv2d_cfg* cfg; + + // Pointer to the mli convolution function. + conv_func_ptr p_mli_krn_conv2d_sa8_sa8_sa32; +}; + +#if !defined(TF_LITE_STRIP_REFERENCE_IMPL) +inline PaddingType RuntimePaddingType(TfLitePadding padding) { + switch (padding) { + case TfLitePadding::kTfLitePaddingSame: + return PaddingType::kSame; + case TfLitePadding::kTfLitePaddingValid: + return PaddingType::kValid; + case TfLitePadding::kTfLitePaddingUnknown: + default: + return PaddingType::kNone; + } +} +#endif + +bool IsMliApplicable(TfLiteContext* context, const TfLiteTensor* input, + const TfLiteTensor* filter, const TfLiteTensor* bias, + const TfLiteConvParams* params) { + const auto* affine_quantization = + reinterpret_cast(filter->quantization.params); + // MLI optimized version only supports int8_t datatype, dilation factor of 1 + // and per-axis quantization of weights (no broadcasting/per-tensor) + bool ret_val = (filter->type == kTfLiteInt8) && + (input->type == kTfLiteInt8) && (bias->type == kTfLiteInt32) && + (params->dilation_width_factor == 1) && + (params->dilation_height_factor == 1) && + (affine_quantization->scale->size == + filter->dims->data[kConvQuantizedDimension]); + return ret_val; +} + +TfLiteStatus CalculateOpData(TfLiteContext* context, TfLiteNode* node, + const TfLiteConvParams* params, int width, + int height, int filter_width, int filter_height, + int out_width, int out_height, + const TfLiteType data_type, OpData* data) { + bool has_bias = node->inputs->size == 3; + // Check number of inputs/outputs + TF_LITE_ENSURE(context, has_bias || node->inputs->size == 2); + TF_LITE_ENSURE_EQ(context, node->outputs->size, 1); + + // Matching GetWindowedOutputSize in TensorFlow. + auto padding = params->padding; + data->padding = ComputePaddingHeightWidth( + params->stride_height, params->stride_width, + params->dilation_height_factor, params->dilation_width_factor, height, + width, filter_height, filter_width, padding, &out_height, &out_width); + // Note that quantized inference requires that all tensors have their + // parameters set. This is usually done during quantized training. +#if !defined(TF_LITE_STRIP_REFERENCE_IMPL) + MicroContext* micro_context = GetMicroContext(context); + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TfLiteTensor* filter = + micro_context->AllocateTempInputTensor(node, kFilterTensor); + TfLiteTensor* bias = + micro_context->AllocateTempInputTensor(context, node, kBiasTensor); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + + if (data_type != kTfLiteFloat32 && !data->is_mli_applicable) { + int output_channels = filter->dims->data[kConvQuantizedDimension]; + + TF_LITE_ENSURE_STATUS(tflite::PopulateConvolutionQuantizationParams( + context, input, filter, bias, output, params->activation, + &data->output_multiplier, &data->output_shift, + &data->output_activation_min, &data->output_activation_max, + data->per_channel_output_multiplier, + reinterpret_cast(data->per_channel_output_shift), + output_channels)); + } + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(filter); + micro_context->DeallocateTempTfLiteTensor(bias); + micro_context->DeallocateTempTfLiteTensor(output); +#endif + return kTfLiteOk; +} +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(OpData)); +} + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + TFLITE_DCHECK(node->builtin_data != nullptr); + + OpData* data = static_cast(node->user_data); + const auto params = static_cast(node->builtin_data); + + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TfLiteTensor* filter = + micro_context->AllocateTempInputTensor(node, kFilterTensor); + TfLiteTensor* bias = + micro_context->AllocateTempInputTensor(context, node, kBiasTensor); + + int input_width = input->dims->data[2]; + int input_height = input->dims->data[1]; +#if defined(MLI_2_0) && !defined(MLI_2_0_KRNL_TEST) + int filter_width = filter->dims->data[1]; + int filter_height = filter->dims->data[0]; +#else + int filter_width = filter->dims->data[2]; + int filter_height = filter->dims->data[1]; +#endif + int output_width = output->dims->data[2]; + int output_height = output->dims->data[1]; + + // Dynamically allocate per-channel quantization parameters. + const int num_channels = filter->dims->data[kConvQuantizedDimension]; + data->per_channel_output_multiplier = + reinterpret_cast(context->AllocatePersistentBuffer( + context, num_channels * sizeof(int32_t))); + data->per_channel_output_shift = + reinterpret_cast(context->AllocatePersistentBuffer( + context, num_channels * sizeof(int32_t))); + + data->is_mli_applicable = + IsMliApplicable(context, input, filter, bias, params); + + // All per-channel quantized tensors need valid zero point and scale arrays. + if (input->type == kTfLiteInt8) { + TF_LITE_ENSURE_EQ(context, filter->quantization.type, + kTfLiteAffineQuantization); + + const auto* affine_quantization = + static_cast(filter->quantization.params); + TF_LITE_ENSURE(context, affine_quantization); + TF_LITE_ENSURE(context, affine_quantization->scale); + TF_LITE_ENSURE(context, affine_quantization->zero_point); + + TF_LITE_ENSURE(context, + affine_quantization->scale->size == 1 || + affine_quantization->scale->size == + filter->dims->data[kConvQuantizedDimension]); + TF_LITE_ENSURE_EQ(context, affine_quantization->scale->size, + affine_quantization->zero_point->size); + } + + TF_LITE_ENSURE_STATUS(CalculateOpData( + context, node, params, input_width, input_height, filter_width, + filter_height, output_width, output_height, input->type, data)); + + data->input_zero_point = input->params.zero_point; + data->filter_zero_point = filter->params.zero_point; + data->output_zero_point = output->params.zero_point; + + if (data->is_mli_applicable) { + data->mli_in = ops::micro::MliTensorInterface(static_cast( + context->AllocatePersistentBuffer(context, sizeof(mli_tensor)))); + data->mli_weights = ops::micro::MliTensorInterface(static_cast( + context->AllocatePersistentBuffer(context, sizeof(mli_tensor)))); + data->mli_bias = ops::micro::MliTensorInterface(static_cast( + context->AllocatePersistentBuffer(context, sizeof(mli_tensor)))); + data->mli_out = ops::micro::MliTensorInterface(static_cast( + context->AllocatePersistentBuffer(context, sizeof(mli_tensor)))); + data->cfg = static_cast( + context->AllocatePersistentBuffer(context, sizeof(mli_conv2d_cfg))); + +#ifdef MLI_2_0 + data->per_channel_scale_frac_bits = + static_cast(context->AllocatePersistentBuffer( + context, 2 * num_channels * sizeof(int16_t))); +#endif + + // Reuse space allocated for OpData parameters. +#ifdef MLI_2_0 + *data->mli_weights.Scale() = + reinterpret_cast(data->per_channel_output_multiplier); + *data->mli_bias.Scale() = + reinterpret_cast(data->per_channel_output_multiplier) + + num_channels; +#else + *data->mli_weights.Scale() = + static_cast(data->per_channel_output_multiplier); + *data->mli_bias.Scale() = + static_cast(data->per_channel_output_shift); +#endif + +#ifdef MLI_2_0 + *data->mli_weights.ZeroPoint() = + reinterpret_cast(data->per_channel_output_shift); + *data->mli_bias.ZeroPoint() = + reinterpret_cast(data->per_channel_output_shift) + + num_channels; +#else + *data->mli_weights.ZeroPoint() = + reinterpret_cast(&data->filter_zero_point); + *data->mli_bias.ZeroPoint() = + reinterpret_cast(&data->filter_zero_point) + sizeof(int16_t); +#endif + +#ifdef MLI_2_0 + *data->mli_weights.ScaleFracBits() = + reinterpret_cast(data->per_channel_scale_frac_bits); + *data->mli_bias.ScaleFracBits() = + reinterpret_cast(data->per_channel_scale_frac_bits) + + num_channels; +#endif + + ops::micro::ConvertToMliTensor(input, &data->mli_in); + ops::micro::ConvertToMliTensorPerChannel(filter, &data->mli_weights, + /* is_bias_tensor = */ false); + ops::micro::ConvertToMliTensorPerChannel(bias, &data->mli_bias, + /* is_bias_tensor = */ true); +#ifdef MLI_2_0 + ops::micro::AdjustBiasTensor(&data->mli_bias, &data->mli_in, + &data->mli_weights); +#endif + ops::micro::ConvertToMliTensor(output, &data->mli_out); + +#ifdef MLI_2_0 + // Choose convolution mli specialized function. + data->p_mli_krn_conv2d_sa8_sa8_sa32 = + mli_krn_conv2d_hwcn(data->mli_weights.MliTensor()); +#else + data->p_mli_krn_conv2d_sa8_sa8_sa32 = + mli_krn_conv2d_hwcn(data->mli_weights.MliTensor(), data->cfg); +#endif + +#ifdef MLI_2_0 + data->cfg->dilation_width = 1; + data->cfg->dilation_height = 1; +#endif + + if (data->output_activation_min == -128 && + data->output_activation_max == 127) { + data->cfg->relu.type = MLI_RELU_NONE; + } else if (params->activation == kTfLiteActRelu) { + data->cfg->relu.type = MLI_RELU_GEN; + } else if (params->activation == kTfLiteActRelu6) { + data->cfg->relu.type = MLI_RELU_6; + } else if (params->activation == kTfLiteActReluN1To1) { + data->cfg->relu.type = MLI_RELU_1; + } else { + data->cfg->relu.type = MLI_RELU_NONE; + } + data->cfg->stride_width = params->stride_width; + data->cfg->stride_height = params->stride_height; + if (params->padding == kTfLitePaddingValid) { + data->cfg->padding_left = 0; + data->cfg->padding_right = 0; + data->cfg->padding_top = 0; + data->cfg->padding_bottom = 0; + } else { + data->cfg->padding_left = data->padding.width; + data->cfg->padding_right = + data->padding.width + data->padding.width_offset; + data->cfg->padding_top = data->padding.height; + data->cfg->padding_bottom = + data->padding.height + data->padding.height_offset; + } + } + + micro_context->DeallocateTempTfLiteTensor(output); + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(filter); + micro_context->DeallocateTempTfLiteTensor(bias); + return kTfLiteOk; +} + +TfLiteStatus EvalMliQuantizedPerChannel( + TfLiteContext* context, TfLiteNode* node, TfLiteConvParams* params, + const OpData& data, const TfLiteEvalTensor* input, + const TfLiteEvalTensor* filter, const TfLiteEvalTensor* bias, + TfLiteEvalTensor* output) { + // Run Conv MLI kernel + // MLI optimized version only supports int8_t dataype and dilation factor of 1 + if (data.is_mli_applicable) { + // Copy configuration data from external to local memory + mli_conv2d_cfg cfg_local = *data.cfg; + + ops::micro::MliTensorAttachBuffer(input, &data.mli_in); + ops::micro::MliTensorAttachBuffer(filter, &data.mli_weights); + ops::micro::MliTensorAttachBuffer(bias, &data.mli_bias); + ops::micro::MliTensorAttachBuffer(output, &data.mli_out); + + // for height slicing + const int height_dimension = 1; + int in_slice_height = 0; + int out_slice_height = 0; + const int kernel_height = + static_cast(data.mli_weights.Shape()[KRNL_H_DIM_HWC]); + const int overlap = kernel_height - cfg_local.stride_height; + +// for weight slicing (on output channels) +#if defined(MLI_2_0) && !defined(MLI_2_0_KRNL_TEST) + // HWCN layout for weights, output channel dimension is the first dimension. + const int weight_out_ch_dimension = 3; +#else + // NHWC layout for weights, output channel dimension is the first dimension. + const int weight_out_ch_dimension = 0; +#endif + // bias has only 1 dimension + const int bias_out_ch_dimension = 0; + int slice_channels = + static_cast(data.mli_weights.Shape()[weight_out_ch_dimension]); + // Batch-Height-Width-Channel layout means last dimension is output + // channels. + const int out_tensor_ch_dimension = 3; + + // Tensors for data in fast (local) memory and config to copy data from + // external to local memory + mli_tensor weights_local = *data.mli_weights.MliTensor(); + mli_tensor bias_local = *data.mli_bias.MliTensor(); + mli_tensor in_local = *data.mli_in.MliTensor(); + mli_tensor out_local = *data.mli_out.MliTensor(); + + ops::micro::MliTensorInterface weights_local_interface(&weights_local); + ops::micro::MliTensorInterface bias_local_interface(&bias_local); + ops::micro::MliTensorInterface in_local_interface(&in_local); + ops::micro::MliTensorInterface out_local_interface(&out_local); + + mli_mov_cfg_t copy_config; + mli_mov_cfg_for_copy(©_config); + + TF_LITE_ENSURE_STATUS(ops::micro::get_arc_scratch_buffer_for_conv_tensors( + context, &in_local_interface, &weights_local_interface, + &bias_local_interface, &out_local_interface)); + TF_LITE_ENSURE_STATUS(ops::micro::arc_scratch_buffer_calc_slice_size_io( + &in_local_interface, &out_local_interface, kernel_height, + cfg_local.stride_height, cfg_local.padding_top, + cfg_local.padding_bottom, &in_slice_height, &out_slice_height)); + TF_LITE_ENSURE_STATUS( + ops::micro::arc_scratch_buffer_calc_slice_size_weights( + &weights_local_interface, &bias_local_interface, + weight_out_ch_dimension, &slice_channels)); + + /* is_local indicates that the tensor is already in local memory, + so in that case the original tensor can be used, + and there is no need to copy it to the local tensor*/ + const bool in_is_local = + in_local_interface.Data() == data.mli_in.Data(); + const bool out_is_local = + out_local_interface.Data() == data.mli_out.Data(); + const bool b_is_local = + bias_local_interface.Data() == data.mli_bias.Data(); +#ifndef MLI_2_0_KRNL_TEST + const bool w_is_local = weights_local_interface.Data() == + data.mli_weights.Data(); +#endif + +#if defined(MLI_2_0) && !defined(MLI_2_0_KRNL_TEST) + ops::micro::TensorSlicer w_slice(data.mli_weights.MliTensor(), + weight_out_ch_dimension, slice_channels, 0, + 0, 0, true); +#else + ops::micro::TensorSlicer w_slice(data.mli_weights.MliTensor(), + weight_out_ch_dimension, slice_channels); +#endif + ops::micro::TensorSlicer b_slice(data.mli_bias.MliTensor(), + bias_out_ch_dimension, slice_channels); + ops::micro::TensorSlicer out_ch_slice(data.mli_out.MliTensor(), + out_tensor_ch_dimension, + slice_channels, 0, 0, 0, true); + +#ifdef MLI_2_0_KRNL_TEST + mli_tensor* w_ptr = &weights_local; +#else + mli_tensor* w_ptr = w_is_local ? w_slice.Sub() : &weights_local; +#endif + mli_tensor* b_ptr = b_is_local ? b_slice.Sub() : &bias_local; + + void* input_buffer_ptr = NULL; + uint32_t input_buffer_size = 0; + + while (!w_slice.Done()) { +#ifndef MLI_2_0_KRNL_TEST + mli_mov_tensor_sync(w_slice.Sub(), ©_config, w_ptr); +#endif + mli_mov_tensor_sync(b_slice.Sub(), ©_config, b_ptr); + + /* mli_in tensor contains batches of HWC tensors. so it is a 4 dimensional + tensor. because the mli kernel will process one HWC tensor at a time, the + 4 dimensional tensor needs to be sliced into nBatch 3 dimensional tensors. + on top of that there could be a need to also slice in the Height + dimension. for that the sliceHeight has been calculated. The tensor slicer + is configured that it will completely slice the nBatch dimension (0) and + slice the height dimension (1) in chunks of 'sliceHeight' */ + ops::micro::TensorSlicer in_slice( + data.mli_in.MliTensor(), height_dimension, in_slice_height, + cfg_local.padding_top, cfg_local.padding_bottom, overlap); + + /* output tensor is already sliced in the output channel dimension. + out_ch_slice.Sub() is the tensor for the amount of output channels of this + iteration of the weight slice loop. This tensor needs to be further + sliced over the batch and height dimension. */ + ops::micro::TensorSlicer out_slice(out_ch_slice.Sub(), height_dimension, + out_slice_height); + + /* setup the pointers to the local or remote tensor to make the code + * inside the loop easier. */ + mli_tensor* in_ptr = in_is_local ? in_slice.Sub() : &in_local; + mli_tensor* out_ptr = out_is_local ? out_slice.Sub() : &out_local; + +#ifdef MLI_2_0_KRNL_TEST + /* Permute weights tensor to the HWCN layout */ + // Checking conditions here to prevent usage non-contiguous buffer memory. + if (data.mli_out.Shape()[out_tensor_ch_dimension] != + out_slice.Sub()->shape[FMAP_C_DIM_HWC] || + data.mli_out.Shape()[height_dimension] != + out_slice.Sub()->shape[FMAP_H_DIM_HWC]) { + MicroPrintf("Slicing is not supported with real-time permutation."); + return kTfLiteError; + } + mli_permute_cfg permute_cfg = {{1, 2, 3, 0}}; + ops::micro::permute_weights(data.mli_weights.MliTensor(), &permute_cfg, + w_ptr, &out_ptr->data); +#endif + + while (!out_slice.Done()) { + if (!out_is_local) { + ops::micro::PrepareLocalTensor(out_slice.Sub(), &out_local); + ops::micro::PrepareLocalTensor(in_slice.Sub(), &in_local); + } + + TF_LITE_ENSURE(context, !in_slice.Done()); + cfg_local.padding_top = in_slice.GetPaddingPre(); + cfg_local.padding_bottom = in_slice.GetPaddingPost(); + + // if same input copy as previous iteration, skip the copy of input +#ifdef MLI_2_0 + if ((in_slice.Sub()->data.mem.pi8 != input_buffer_ptr) || + (mli_hlp_count_elem_num(in_slice.Sub(), 0) != input_buffer_size)) { + mli_mov_tensor_sync(in_slice.Sub(), ©_config, in_ptr); + input_buffer_ptr = in_slice.Sub()->data.mem.pi8; + input_buffer_size = mli_hlp_count_elem_num(in_slice.Sub(), 0); + } + + data.p_mli_krn_conv2d_sa8_sa8_sa32(in_ptr, w_ptr, b_ptr, &cfg_local, + out_ptr); +#else + if ((in_slice.Sub()->data != input_buffer_ptr) || + (mli_hlp_count_elem_num(in_slice.Sub(), 0) != input_buffer_size)) { + mli_mov_tensor_sync(in_slice.Sub(), ©_config, in_ptr); + input_buffer_ptr = in_slice.Sub()->data; + input_buffer_size = mli_hlp_count_elem_num(in_slice.Sub(), 0); + } + data.p_mli_krn_conv2d_sa8_sa8_sa32(in_ptr, w_ptr, b_ptr, &cfg_local, + out_ptr); +#endif + mli_mov_tensor_sync(out_ptr, ©_config, out_slice.Sub()); + + in_slice.Next(); + out_slice.Next(); + } + w_slice.Next(); + b_slice.Next(); + out_ch_slice.Next(); + TF_LITE_ENSURE(context, in_slice.Done()); + } + } + return kTfLiteOk; +} + +void EvalQuantizedPerChannel(TfLiteContext* context, TfLiteNode* node, + TfLiteConvParams* params, const OpData& data, + const TfLiteEvalTensor* input, + const TfLiteEvalTensor* filter, + const TfLiteEvalTensor* bias, + TfLiteEvalTensor* output, + TfLiteEvalTensor* im2col) { +#if !defined(TF_LITE_STRIP_REFERENCE_IMPL) + ConvParams op_params; + op_params.input_offset = -data.input_zero_point; + op_params.output_offset = data.output_zero_point; + op_params.stride_height = params->stride_height; + op_params.stride_width = params->stride_width; + op_params.dilation_height_factor = params->dilation_height_factor; + op_params.dilation_width_factor = params->dilation_width_factor; + op_params.padding_values.height = data.padding.height; + op_params.padding_values.width = data.padding.width; + op_params.quantized_activation_min = data.output_activation_min; + op_params.quantized_activation_max = data.output_activation_max; + + reference_integer_ops::ConvPerChannel( + op_params, data.per_channel_output_multiplier, + data.per_channel_output_shift, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), + tflite::micro::GetTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); +#else + MicroPrintf("Node configuration is not supported by ARC MLI Library."); +#endif +} + +void EvalQuantizedPerChannelInt16(TfLiteContext* context, TfLiteNode* node, + TfLiteConvParams* params, const OpData& data, + const TfLiteEvalTensor* input, + const TfLiteEvalTensor* filter, + const TfLiteEvalTensor* bias, + TfLiteEvalTensor* output) { +#if !defined(TF_LITE_STRIP_REFERENCE_IMPL) + ConvParams op_params; + op_params.input_offset = -data.input_zero_point; + op_params.output_offset = data.output_zero_point; + op_params.stride_height = params->stride_height; + op_params.stride_width = params->stride_width; + op_params.dilation_height_factor = params->dilation_height_factor; + op_params.dilation_width_factor = params->dilation_width_factor; + op_params.padding_values.height = data.padding.height; + op_params.padding_values.width = data.padding.width; + op_params.quantized_activation_min = data.output_activation_min; + op_params.quantized_activation_max = data.output_activation_max; + + reference_integer_ops::ConvPerChannel( + op_params, data.per_channel_output_multiplier, + data.per_channel_output_shift, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), + tflite::micro::GetTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); +#else + MicroPrintf("Node configuration is not supported by ARC MLI Library."); +#endif +} + +void EvalFloat(TfLiteContext* context, TfLiteNode* node, + TfLiteConvParams* params, const OpData& data, + const TfLiteEvalTensor* input, const TfLiteEvalTensor* filter, + const TfLiteEvalTensor* bias, TfLiteEvalTensor* im2col, + TfLiteEvalTensor* hwcn_weights, TfLiteEvalTensor* output) { +#if !defined(TF_LITE_STRIP_REFERENCE_IMPL) + float output_activation_min, output_activation_max; + CalculateActivationRange(params->activation, &output_activation_min, + &output_activation_max); + ConvParams op_params; + op_params.padding_type = RuntimePaddingType(params->padding); + op_params.padding_values.width = data.padding.width; + op_params.padding_values.height = data.padding.height; + op_params.stride_width = params->stride_width; + op_params.stride_height = params->stride_height; + op_params.dilation_width_factor = params->dilation_width_factor; + op_params.dilation_height_factor = params->dilation_height_factor; + op_params.float_activation_min = output_activation_min; + op_params.float_activation_max = output_activation_max; + + reference_ops::Conv(op_params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), + tflite::micro::GetTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output), + tflite::micro::GetTensorShape(im2col), + tflite::micro::GetTensorData(im2col)); +#else + MicroPrintf("Type %s (%d) is not supported by ARC MLI Library.", + TfLiteTypeGetName(input->type), input->type); +#endif +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + auto* params = reinterpret_cast(node->builtin_data); + + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + const TfLiteEvalTensor* filter = + tflite::micro::GetEvalInput(context, node, kFilterTensor); + const TfLiteEvalTensor* bias = + tflite::micro::GetEvalInput(context, node, kBiasTensor); + + TFLITE_DCHECK(node->user_data != nullptr); + const OpData& data = *(static_cast(node->user_data)); + + TF_LITE_ENSURE_EQ(context, input->type, output->type); + TF_LITE_ENSURE_MSG( + context, + input->type == filter->type || + (input->type == kTfLiteInt16 && filter->type == kTfLiteInt8), + "Hybrid models are not supported on TFLite Micro."); + + switch (input->type) { // Already know in/out types are same. + case kTfLiteFloat32: + EvalFloat(context, node, params, data, input, filter, bias, nullptr, + nullptr, output); + break; + case kTfLiteInt8: + if (data.is_mli_applicable) { + EvalMliQuantizedPerChannel(context, node, params, data, input, filter, + bias, output); + } else { + EvalQuantizedPerChannel(context, node, params, data, input, filter, + bias, output, nullptr); + } + break; + case kTfLiteInt16: + EvalQuantizedPerChannelInt16(context, node, params, data, input, filter, + bias, output); + break; + default: + MicroPrintf("Type %s (%d) not supported.", TfLiteTypeGetName(input->type), + input->type); + return kTfLiteError; + } + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_CONV_2D() { + return tflite::micro::RegisterOp(Init, Prepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/arc_mli/depthwise_conv.cc b/tensorflow/lite/micro/kernels/arc_mli/depthwise_conv.cc new file mode 100644 index 0000000..1fa1d19 --- /dev/null +++ b/tensorflow/lite/micro/kernels/arc_mli/depthwise_conv.cc @@ -0,0 +1,677 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/reference/integer_ops/depthwise_conv.h" + +#include "mli_api.h" // NOLINT +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/depthwiseconv_float.h" +#include "tensorflow/lite/kernels/internal/reference/depthwiseconv_uint8.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/padding.h" +#include "tensorflow/lite/micro/kernels/arc_mli/mli_function_specializations.h" +#include "tensorflow/lite/micro/kernels/arc_mli/mli_slicers.h" +#include "tensorflow/lite/micro/kernels/arc_mli/mli_tf_utils.h" +#include "tensorflow/lite/micro/kernels/arc_mli/scratch_buf_mgr.h" +#include "tensorflow/lite/micro/kernels/arc_mli/scratch_buffers.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { +namespace { + +constexpr int kInputTensor = 0; +constexpr int kFilterTensor = 1; +constexpr int kBiasTensor = 2; +constexpr int kOutputTensor = 0; + +// Depthwise conv is quantized along dimension 3: +// https://www.tensorflow.org/lite/performance/quantization_spec +constexpr int kDepthwiseConvQuantizedDimension = 3; + +struct OpData { + TfLitePaddingValues padding; + + // Cached tensor zero point values for quantized operations. + int32_t input_zero_point; + int32_t filter_zero_point; + int32_t output_zero_point; + + // The scaling factor from input to output (aka the 'real multiplier') can + // be represented as a fixed point multiplier plus a left shift. + int32_t output_multiplier; + int output_shift; + + // Per channel output multiplier and shift. + int32_t* per_channel_output_multiplier; + int32_t* per_channel_output_shift; +#ifdef MLI_2_0 + int8_t* per_channel_scale_frac_bits; +#endif + + // The range of the fused activation layer. For example for kNone and + // uint8_t these would be 0 and 255. + int32_t output_activation_min; + int32_t output_activation_max; + + // The result of checking if MLI optimized version of tensors can be used. + bool is_mli_applicable; + + // Tensors in MLI format. + mutable ops::micro::MliTensorInterface mli_in; + mutable ops::micro::MliTensorInterface mli_weights; + mutable ops::micro::MliTensorInterface mli_bias; + mutable ops::micro::MliTensorInterface mli_out; + mli_conv2d_cfg* cfg; + + // Pointer to the required depthwise function. For “channel multiplier” + // functionality group convolution is used. + depthwise_func_ptr p_mli_krn_depthwise_conv2d_sa8_sa8_sa32; +}; + +bool IsMliApplicable(TfLiteContext* context, const TfLiteTensor* input, + const TfLiteTensor* filter, const TfLiteTensor* bias, + const TfLiteDepthwiseConvParams* params) { + const auto* affine_quantization = + reinterpret_cast(filter->quantization.params); + +#ifndef MLI_2_0 + const int in_ch = SizeOfDimension(input, 3); + const int filters_num = SizeOfDimension(filter, 3); +#endif + + // MLI optimized version only supports int8_t datatype, dilation factor of 1 + // and per-axis quantization of weights (no broadcasting/per-tensor). For + // MLI 1.1 (in_ch == filters_num) || (in_ch == 1)) is used to prevent usage of + // channel multiplier logic for multichannel input. + + bool ret_val = (filter->type == kTfLiteInt8) && + (input->type == kTfLiteInt8) && (bias->type == kTfLiteInt32) && + (params->dilation_width_factor == 1) && + (params->dilation_height_factor == 1) && + (affine_quantization->scale->size == +#ifdef MLI_2_0 + filter->dims->data[kDepthwiseConvQuantizedDimension]); +#else + filter->dims->data[kDepthwiseConvQuantizedDimension]) && + ((in_ch == filters_num) || (in_ch == 1)); +#endif + return ret_val; +} + +TfLiteStatus CalculateOpData(TfLiteContext* context, TfLiteNode* node, + TfLiteDepthwiseConvParams* params, int width, + int height, int filter_width, int filter_height, + const TfLiteType data_type, OpData* data) { + bool has_bias = node->inputs->size == 3; + // Check number of inputs/outputs + TF_LITE_ENSURE(context, has_bias || node->inputs->size == 2); + TF_LITE_ENSURE_EQ(context, node->outputs->size, 1); + + int unused_output_height, unused_output_width; + data->padding = ComputePaddingHeightWidth( + params->stride_height, params->stride_width, 1, 1, height, width, + filter_height, filter_width, params->padding, &unused_output_height, + &unused_output_width); + + // Note that quantized inference requires that all tensors have their + // parameters set. This is usually done during quantized training. +#if !defined(TF_LITE_STRIP_REFERENCE_IMPL) + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TfLiteTensor* filter = + micro_context->AllocateTempInputTensor(node, kFilterTensor); + TfLiteTensor* bias = + micro_context->AllocateTempInputTensor(context, node, kBiasTensor); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + + if (data_type != kTfLiteFloat32 && !data->is_mli_applicable) { + int num_channels = filter->dims->data[kDepthwiseConvQuantizedDimension]; + + return tflite::PopulateConvolutionQuantizationParams( + context, input, filter, bias, output, params->activation, + &data->output_multiplier, &data->output_shift, + &data->output_activation_min, &data->output_activation_max, + data->per_channel_output_multiplier, + reinterpret_cast(data->per_channel_output_shift), num_channels); + } + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(filter); + micro_context->DeallocateTempTfLiteTensor(bias); + micro_context->DeallocateTempTfLiteTensor(output); + +#endif + return kTfLiteOk; +} + +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(OpData)); +} + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + TFLITE_DCHECK(node->builtin_data != nullptr); + + auto* params = + reinterpret_cast(node->builtin_data); + OpData* data = static_cast(node->user_data); + + TfLiteTensor* output = AllocateTempOutputTensor(node, kOutputTensor); + const TfLiteTensor* input = AllocateTempInputTensor(node, kInputTensor); + const TfLiteTensor* filter = AllocateTempInputTensor(node, kFilterTensor); + const TfLiteTensor* bias = + AllocateTempInputTensor(context, node, kBiasTensor); + + const TfLiteType data_type = input->type; + int width = SizeOfDimension(input, 2); + int height = SizeOfDimension(input, 1); + +#if defined(MLI_2_0) && !defined(MLI_2_0_KRNL_TEST) + int filter_width = SizeOfDimension(filter, 1); + int filter_height = SizeOfDimension(filter, 0); +#else + int filter_width = SizeOfDimension(filter, 2); + int filter_height = SizeOfDimension(filter, 1); +#endif + + // Per channel quantization is only needed for int8 inference. For other + // quantized types, only a single scale and zero point is needed. + const int num_channels = filter->dims->data[kDepthwiseConvQuantizedDimension]; + // Dynamically allocate per-channel quantization parameters. + data->per_channel_output_multiplier = + reinterpret_cast(context->AllocatePersistentBuffer( + context, num_channels * sizeof(int32_t))); + data->per_channel_output_shift = + reinterpret_cast(context->AllocatePersistentBuffer( + context, num_channels * sizeof(int32_t))); + + data->is_mli_applicable = + IsMliApplicable(context, input, filter, bias, params); + + // All per-channel quantized tensors need valid zero point and scale arrays. + if (input->type == kTfLiteInt8) { + TF_LITE_ENSURE_EQ(context, filter->quantization.type, + kTfLiteAffineQuantization); + + const auto* affine_quantization = + reinterpret_cast( + filter->quantization.params); + TF_LITE_ENSURE(context, affine_quantization); + TF_LITE_ENSURE(context, affine_quantization->scale); + TF_LITE_ENSURE(context, affine_quantization->zero_point); + TF_LITE_ENSURE( + context, affine_quantization->scale->size == 1 || + affine_quantization->scale->size == + filter->dims->data[kDepthwiseConvQuantizedDimension]); + TF_LITE_ENSURE_EQ(context, affine_quantization->scale->size, + affine_quantization->zero_point->size); + } + + TF_LITE_ENSURE_STATUS(CalculateOpData(context, node, params, width, height, + filter_width, filter_height, data_type, + data)); + + data->input_zero_point = input->params.zero_point; + data->filter_zero_point = filter->params.zero_point; + data->output_zero_point = output->params.zero_point; + + if (data->is_mli_applicable) { + data->mli_in = ops::micro::MliTensorInterface(static_cast( + context->AllocatePersistentBuffer(context, sizeof(mli_tensor)))); + data->mli_weights = ops::micro::MliTensorInterface(static_cast( + context->AllocatePersistentBuffer(context, sizeof(mli_tensor)))); + data->mli_bias = ops::micro::MliTensorInterface(static_cast( + context->AllocatePersistentBuffer(context, sizeof(mli_tensor)))); + data->mli_out = ops::micro::MliTensorInterface(static_cast( + context->AllocatePersistentBuffer(context, sizeof(mli_tensor)))); + data->cfg = static_cast( + context->AllocatePersistentBuffer(context, sizeof(mli_conv2d_cfg))); + +#ifdef MLI_2_0 + const int num_buffers = 2; + data->per_channel_scale_frac_bits = + static_cast(context->AllocatePersistentBuffer( + context, num_buffers * num_channels * sizeof(int16_t))); +#endif + + // Reuse space allocated for OpData parameters. +#ifdef MLI_2_0 + *data->mli_weights.Scale() = + reinterpret_cast(data->per_channel_output_multiplier); + *data->mli_bias.Scale() = + reinterpret_cast(data->per_channel_output_multiplier) + + num_channels; +#else + *data->mli_weights.Scale() = + static_cast(data->per_channel_output_multiplier); + *data->mli_bias.Scale() = + static_cast(data->per_channel_output_shift); +#endif + +#ifdef MLI_2_0 + *data->mli_weights.ZeroPoint() = + reinterpret_cast(data->per_channel_output_shift); + *data->mli_bias.ZeroPoint() = + reinterpret_cast(data->per_channel_output_shift) + + num_channels; +#else + *data->mli_weights.ZeroPoint() = + reinterpret_cast(&data->filter_zero_point); + *data->mli_bias.ZeroPoint() = + reinterpret_cast(&data->filter_zero_point) + sizeof(int16_t); +#endif + +#ifdef MLI_2_0 + *data->mli_weights.ScaleFracBits() = + reinterpret_cast(data->per_channel_scale_frac_bits); + *data->mli_bias.ScaleFracBits() = + reinterpret_cast(data->per_channel_scale_frac_bits) + + num_channels; +#endif + + ops::micro::ConvertToMliTensor(input, &data->mli_in); + ops::micro::ConvertToMliTensorPerChannel(filter, &data->mli_weights, + /* is_bias_tensor = */ false); + ops::micro::ConvertToMliTensorPerChannel(bias, &data->mli_bias, + /* is_bias_tensor = */ true); +#ifdef MLI_2_0 + ops::micro::AdjustBiasTensor(&data->mli_bias, &data->mli_in, + &data->mli_weights); +#endif + ops::micro::ConvertToMliTensor(output, &data->mli_out); + +#ifdef MLI_2_0 + // Choose group convolution function for "channel multiplier" functionality. + const int in_ch = SizeOfDimension(input, 3); + const int filters_num = SizeOfDimension(filter, 3); + const int channels_num = SizeOfDimension(filter, 2); + if (in_ch == filters_num && channels_num == 1) { + data->p_mli_krn_depthwise_conv2d_sa8_sa8_sa32 = + mli_krn_depthwise_conv2d(data->mli_weights.MliTensor()); + } else { + data->p_mli_krn_depthwise_conv2d_sa8_sa8_sa32 = + mli_krn_group_conv2d(data->mli_weights.MliTensor()); + } +#else + data->p_mli_krn_depthwise_conv2d_sa8_sa8_sa32 = + mli_krn_depthwise_conv2d(data->mli_weights.MliTensor(), data->cfg); +#endif + +#ifdef MLI_2_0 + data->cfg->dilation_width = 1; + data->cfg->dilation_height = 1; +#endif + + if (data->output_activation_min == -128 && + data->output_activation_max == 127) { + data->cfg->relu.type = MLI_RELU_NONE; + } else if (params->activation == kTfLiteActRelu) { + data->cfg->relu.type = MLI_RELU_GEN; + } else if (params->activation == kTfLiteActRelu6) { + data->cfg->relu.type = MLI_RELU_6; + } else if (params->activation == kTfLiteActReluN1To1) { + data->cfg->relu.type = MLI_RELU_1; + } else { + data->cfg->relu.type = MLI_RELU_NONE; + } + + data->cfg->stride_width = params->stride_width; + data->cfg->stride_height = params->stride_height; + if (params->padding == kTfLitePaddingValid) { + data->cfg->padding_left = 0; + data->cfg->padding_right = 0; + data->cfg->padding_top = 0; + data->cfg->padding_bottom = 0; + } else { + data->cfg->padding_left = data->padding.width; + data->cfg->padding_right = + data->padding.width + data->padding.width_offset; + data->cfg->padding_top = data->padding.height; + data->cfg->padding_bottom = + data->padding.height + data->padding.height_offset; + } + } + return kTfLiteOk; +} + +void EvalFloat(TfLiteContext* context, TfLiteNode* node, + TfLiteDepthwiseConvParams* params, const OpData& data, + const TfLiteEvalTensor* input, const TfLiteEvalTensor* filter, + const TfLiteEvalTensor* bias, TfLiteEvalTensor* output) { +#if !defined(TF_LITE_STRIP_REFERENCE_IMPL) + float output_activation_min, output_activation_max; + CalculateActivationRange(params->activation, &output_activation_min, + &output_activation_max); + + tflite::DepthwiseParams op_params; + // Padding type is ignored, but still set. + op_params.padding_type = PaddingType::kSame; + op_params.padding_values.width = data.padding.width; + op_params.padding_values.height = data.padding.height; + op_params.stride_width = params->stride_width; + op_params.stride_height = params->stride_height; + op_params.dilation_width_factor = params->dilation_width_factor; + op_params.dilation_height_factor = params->dilation_height_factor; + op_params.depth_multiplier = params->depth_multiplier; + op_params.float_activation_min = output_activation_min; + op_params.float_activation_max = output_activation_max; + + tflite::reference_ops::DepthwiseConv( + op_params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), + tflite::micro::GetTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); +#else + MicroPrintf("Type %s (%d) is not supported by ARC MLI Library.", + TfLiteTypeGetName(input->type), input->type); +#endif +} +TfLiteStatus EvalMliQuantizedPerChannel( + TfLiteContext* context, TfLiteNode* node, TfLiteDepthwiseConvParams* params, + const OpData& data, const TfLiteEvalTensor* input, + const TfLiteEvalTensor* filter, const TfLiteEvalTensor* bias, + TfLiteEvalTensor* output) { + // Run Depthwise Conv MLI kernel + // MLI optimized version only supports int8_t dataype and dilation factor of 1 + if (data.is_mli_applicable) { + // Copy configuration data from external to local memory + mli_conv2d_cfg cfg_local = *data.cfg; + + ops::micro::MliTensorAttachBuffer(input, &data.mli_in); + ops::micro::MliTensorAttachBuffer(filter, &data.mli_weights); + ops::micro::MliTensorAttachBuffer(bias, &data.mli_bias); + ops::micro::MliTensorAttachBuffer(output, &data.mli_out); + + // for height slicing + const int height_dimension = 1; + int in_slice_height = 0; + int out_slice_height = 0; + uint32_t* mli_weights_shape = data.mli_weights.Shape(); +#ifdef MLI_2_0 + const int kernel_height = + static_cast(mli_weights_shape[KRNL_DW_H_DIM_HW1N]); +#else + const int kernel_height = + static_cast(mli_weights_shape[KRNL_DW_H_DIM_HWC]); +#endif + const int overlap = kernel_height - cfg_local.stride_height; + + // for weight slicing (on output channels) + // HWCN layout for weights, output channel dimension is the first dimension. + const int weight_out_ch_dimension = 3; + // bias has only 1 dimension + const int bias_out_ch_dimension = 0; + // Batch-Height-Width-Channel layout means last dimension is output + // channels. + const int out_tensor_ch_dimension = 3; + const int32_t in_channels = data.mli_in.Shape()[out_tensor_ch_dimension]; + const int32_t out_channels = data.mli_out.Shape()[out_tensor_ch_dimension]; + int slice_channels = + static_cast(mli_weights_shape[weight_out_ch_dimension]); + + // Tensors for data in fast (local) memory + // and config to copy data from external to local memory + mli_tensor weights_local = *data.mli_weights.MliTensor(); + mli_tensor bias_local = *data.mli_bias.MliTensor(); + mli_tensor in_local = *data.mli_in.MliTensor(); + mli_tensor out_local = + *data.mli_out.MliTensor(); // this assumes that output shape + // is already filled in the tensor struct. + + ops::micro::MliTensorInterface weights_local_interface(&weights_local); + ops::micro::MliTensorInterface bias_local_interface(&bias_local); + ops::micro::MliTensorInterface in_local_interface(&in_local); + ops::micro::MliTensorInterface out_local_interface(&out_local); + + mli_mov_cfg_t copy_config; + mli_mov_cfg_for_copy(©_config); + + TF_LITE_ENSURE_STATUS(ops::micro::get_arc_scratch_buffer_for_conv_tensors( + context, &in_local_interface, &weights_local_interface, + &bias_local_interface, &out_local_interface)); + + /* is_local indicates that the tensor is already in local memory, + so in that case the original tensor can be used, + and there is no need to copy it to the local tensor*/ + const bool in_is_local = + in_local_interface.Data() == data.mli_in.Data(); + const bool out_is_local = + out_local_interface.Data() == data.mli_out.Data(); + const bool w_is_local = weights_local_interface.Data() == + data.mli_weights.Data(); + const bool b_is_local = + bias_local_interface.Data() == data.mli_bias.Data(); + + TF_LITE_ENSURE_STATUS(ops::micro::arc_scratch_buffer_calc_slice_size_io( + &in_local_interface, &out_local_interface, kernel_height, + cfg_local.stride_height, cfg_local.padding_top, + cfg_local.padding_bottom, &in_slice_height, &out_slice_height)); + TF_LITE_ENSURE_STATUS( + ops::micro::arc_scratch_buffer_calc_slice_size_weights( + &weights_local_interface, &bias_local_interface, + weight_out_ch_dimension, &slice_channels)); + + /* if input channels is not equal to output channels, a channel multiplier + is used. in this case the slice channels needs to be rounded down to a + multiple of the input channels */ + if (in_channels != out_channels) { + slice_channels = (slice_channels / in_channels) * in_channels; + } + + ops::micro::TensorSlicer b_slice(data.mli_bias.MliTensor(), + bias_out_ch_dimension, slice_channels); + ops::micro::TensorSlicer w_slice(data.mli_weights.MliTensor(), + weight_out_ch_dimension, slice_channels, 0, + 0, 0, true); + ops::micro::TensorSlicer out_ch_slice(data.mli_out.MliTensor(), + out_tensor_ch_dimension, + slice_channels, 0, 0, 0, true); + ops::micro::TensorSlicer in_ch_slice(data.mli_in.MliTensor(), + out_tensor_ch_dimension, + slice_channels, 0, 0, 0, true); + + mli_tensor* w_ptr = w_is_local ? w_slice.Sub() : &weights_local; + mli_tensor* b_ptr = b_is_local ? b_slice.Sub() : &bias_local; + + void* input_buffer_ptr = NULL; + uint32_t input_buffer_size = 0; + int padding_top = cfg_local.padding_top; + int padding_bottom = cfg_local.padding_bottom; + + while (!w_slice.Done()) { + mli_mov_tensor_sync(w_slice.Sub(), ©_config, w_ptr); + mli_mov_tensor_sync(b_slice.Sub(), ©_config, b_ptr); + + /* input tensor is already sliced in the channel dimension. + out_ch_slice.Sub() is the tensor for the amount of channels of this + iteration of the weight slice loop. This tensor needs to be further + sliced over the batch and height dimension. in_ch_slice.Sub() tensor + contains batches of HWC tensors. so it is a 4 dimensional tensor. because + the mli kernel will process one HWC tensor at a time, the 4 dimensional + tensor needs to be sliced into nBatch 3 dimensional tensors. on top of + that there could be a need to also slice in the Height dimension. for that + the sliceHeight has been calculated. The tensor slicer is configured that + it will completely slice the nBatch dimension (0) and slice the height + dimension (1) in chunks of 'sliceHeight' */ + ops::micro::TensorSlicer in_slice(in_ch_slice.Sub(), height_dimension, + in_slice_height, padding_top, + padding_bottom, overlap); + + /* output tensor is already sliced in the output channel dimension. + out_ch_slice.Sub() is the tensor for the amount of output channels of this + iteration of the weight slice loop. This tensor needs to be further + sliced over the batch and height dimension. */ + ops::micro::TensorSlicer out_slice(out_ch_slice.Sub(), height_dimension, + out_slice_height); + + /* setup the pointers to the local or remote tensor to make the code + * inside the loop easier. */ + mli_tensor* in_ptr = in_is_local ? in_slice.Sub() : &in_local; + mli_tensor* out_ptr = out_is_local ? out_slice.Sub() : &out_local; + + while (!out_slice.Done()) { + if (!out_is_local) { + ops::micro::PrepareLocalTensor(out_slice.Sub(), &out_local); + ops::micro::PrepareLocalTensor(in_slice.Sub(), &in_local); + } + TF_LITE_ENSURE(context, !in_slice.Done()); + cfg_local.padding_top = in_slice.GetPaddingPre(); + cfg_local.padding_bottom = in_slice.GetPaddingPost(); + + // if same input copy as previous iteration, skip the copy of input +#ifdef MLI_2_0 + if ((in_slice.Sub()->data.mem.pi8 != input_buffer_ptr) || + (mli_hlp_count_elem_num(in_slice.Sub(), 0) != input_buffer_size)) { + mli_mov_tensor_sync(in_slice.Sub(), ©_config, in_ptr); + input_buffer_ptr = in_slice.Sub()->data.mem.pi8; + input_buffer_size = mli_hlp_count_elem_num(in_slice.Sub(), 0); + } + +#ifdef MLI_2_0_KRNL_TEST + // Checking conditions here to prevent usage non-contiguous buffer + // memory. + if (mli_weights_shape[weight_out_ch_dimension] != + w_slice.Sub()->shape[3]) { + MicroPrintf("Slicing is not supported with real-time permutation."); + return kTfLiteError; + } + uint8_t dim_order[] = {1, 2, 0, 3}; + ops::micro::change_shape(w_ptr, dim_order); +#endif + + data.p_mli_krn_depthwise_conv2d_sa8_sa8_sa32(in_ptr, w_ptr, b_ptr, + &cfg_local, out_ptr); +#else + if ((in_slice.Sub()->data != input_buffer_ptr) || + (mli_hlp_count_elem_num(in_slice.Sub(), 0) != input_buffer_size)) { + mli_mov_tensor_sync(in_slice.Sub(), ©_config, in_ptr); + input_buffer_ptr = in_slice.Sub()->data; + input_buffer_size = mli_hlp_count_elem_num(in_slice.Sub(), 0); + } + data.p_mli_krn_depthwise_conv2d_sa8_sa8_sa32(in_ptr, w_ptr, b_ptr, + &cfg_local, out_ptr); +#endif + + mli_mov_tensor_sync(out_ptr, ©_config, out_slice.Sub()); + + in_slice.Next(); + out_slice.Next(); + } + w_slice.Next(); + b_slice.Next(); + out_ch_slice.Next(); + in_ch_slice.Next(); + TF_LITE_ENSURE(context, in_slice.Done()); + } + } + return kTfLiteOk; +} + +void EvalQuantizedPerChannel(TfLiteContext* context, TfLiteNode* node, + TfLiteDepthwiseConvParams* params, + const OpData& data, const TfLiteEvalTensor* input, + const TfLiteEvalTensor* filter, + const TfLiteEvalTensor* bias, + TfLiteEvalTensor* output) { +#if !defined(TF_LITE_STRIP_REFERENCE_IMPL) + DepthwiseParams op_params; + op_params.padding_type = PaddingType::kSame; + op_params.padding_values.width = data.padding.width; + op_params.padding_values.height = data.padding.height; + op_params.stride_width = params->stride_width; + op_params.stride_height = params->stride_height; + op_params.dilation_width_factor = params->dilation_width_factor; + op_params.dilation_height_factor = params->dilation_height_factor; + op_params.depth_multiplier = params->depth_multiplier; + op_params.input_offset = -data.input_zero_point; + op_params.weights_offset = 0; + op_params.output_offset = data.output_zero_point; + op_params.quantized_activation_min = std::numeric_limits::min(); + op_params.quantized_activation_max = std::numeric_limits::max(); + + reference_integer_ops::DepthwiseConvPerChannel( + op_params, data.per_channel_output_multiplier, + data.per_channel_output_shift, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), + tflite::micro::GetTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); +#else + MicroPrintf("Node configuration is not supported by ARC MLI Library."); +#endif +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + TFLITE_DCHECK(node->builtin_data != nullptr); + + auto* params = + reinterpret_cast(node->builtin_data); + const OpData& data = *(static_cast(node->user_data)); + + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + const TfLiteEvalTensor* filter = + tflite::micro::GetEvalInput(context, node, kFilterTensor); + const TfLiteEvalTensor* bias = + (NumInputs(node) == 3) + ? tflite::micro::GetEvalInput(context, node, kBiasTensor) + : nullptr; + + switch (input->type) { // Already know in/out types are same. + case kTfLiteFloat32: + EvalFloat(context, node, params, data, input, filter, bias, output); + break; + case kTfLiteInt8: + if (data.is_mli_applicable) { + EvalMliQuantizedPerChannel(context, node, params, data, input, filter, + bias, output); + } else { + EvalQuantizedPerChannel(context, node, params, data, input, filter, + bias, output); + } + break; + default: + MicroPrintf("Type %s (%d) not supported.", TfLiteTypeGetName(input->type), + input->type); + return kTfLiteError; + } + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_DEPTHWISE_CONV_2D() { + return tflite::micro::RegisterOp(Init, Prepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/arc_mli/fully_connected.cc b/tensorflow/lite/micro/kernels/arc_mli/fully_connected.cc new file mode 100644 index 0000000..4af0660 --- /dev/null +++ b/tensorflow/lite/micro/kernels/arc_mli/fully_connected.cc @@ -0,0 +1,476 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/reference/fully_connected.h" + +#include "mli_api.h" // NOLINT +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/fully_connected.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/arc_mli/mli_slicers.h" +#include "tensorflow/lite/micro/kernels/arc_mli/mli_tf_utils.h" +#include "tensorflow/lite/micro/kernels/arc_mli/scratch_buf_mgr.h" +#include "tensorflow/lite/micro/kernels/arc_mli/scratch_buffers.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { +namespace { + +struct OpData { + // The scaling factor from input to output (aka the 'real multiplier') can + // be represented as a fixed point multiplier plus a left shift. + int32_t output_multiplier; + int output_shift; + // The range of the fused activation layer. For example for kNone and + // uint8_t these would be 0 and 255. + int32_t output_activation_min; + int32_t output_activation_max; + // The index of the temporary tensor where the quantized inputs are cached. + int input_quantized_index; + // Cached tensor zero point values for quantized operations. + int32_t input_zero_point; + int32_t filter_zero_point; + int32_t output_zero_point; + + // The result of checking if MLI optimized version of tensors can be used. + bool is_mli_applicable; + + // Tensors in MLI format. + mutable ops::micro::MliTensorInterface mli_in; + mutable ops::micro::MliTensorInterface mli_weights; + mutable ops::micro::MliTensorInterface mli_bias; + mutable ops::micro::MliTensorInterface mli_out; + +#ifdef MLI_2_0 + mli_fully_connected_cfg* cfg; +#endif +}; + +constexpr int kInputTensor = 0; +constexpr int kWeightsTensor = 1; +constexpr int kBiasTensor = 2; +constexpr int kOutputTensor = 0; + +bool IsMliApplicable(TfLiteContext* context, const TfLiteTensor* input, + const TfLiteTensor* filter, const TfLiteTensor* bias, + const TfLiteFullyConnectedParams* params, + int32_t output_activation_min, + int32_t output_activation_max) { + // MLI optimized version only supports int8_t datatype and no fused Relu and + // symmetric per-tensor quantization of weights (not per-axis) + bool ret_val = + (filter->type == kTfLiteInt8) && (input->type == kTfLiteInt8) && + (bias->type == kTfLiteInt32) && +#ifndef MLI_2_0 + (params->activation == kTfLiteActNone || + (output_activation_min == -128 && output_activation_max == 127)) && +#endif + (filter->params.zero_point == 0); + return ret_val; +} + +TfLiteStatus CalculateOpData(TfLiteContext* context, + const TfLiteFullyConnectedParams* params, + TfLiteType data_type, const TfLiteTensor* input, + const TfLiteTensor* filter, + const TfLiteTensor* bias, TfLiteTensor* output, + OpData* data) { + TfLiteStatus status = kTfLiteOk; +#if !defined(TF_LITE_STRIP_REFERENCE_IMPL) + if (data_type != kTfLiteFloat32 && !data->is_mli_applicable) { + double real_multiplier = 0.0; + TF_LITE_ENSURE_STATUS(GetQuantizedConvolutionMultipler( + context, input, filter, bias, output, &real_multiplier)); + int exponent; + QuantizeMultiplier(real_multiplier, &data->output_multiplier, &exponent); + data->output_shift = -exponent; + TF_LITE_ENSURE_STATUS(CalculateActivationRangeQuantized( + context, params->activation, output, &data->output_activation_min, + &data->output_activation_max)); + } +#endif + return status; +} + +} // namespace + +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(OpData)); +} + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + TFLITE_DCHECK(node->builtin_data != nullptr); + + OpData* data = static_cast(node->user_data); + const auto params = + static_cast(node->builtin_data); + + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TfLiteTensor* filter = + micro_context->AllocateTempInputTensor(node, kWeightsTensor); + TfLiteTensor* bias = + micro_context->AllocateTempInputTensor(context, node, kBiasTensor); + TfLiteTensor* output = AllocateTempOutputTensor(node, kOutputTensor); + + TF_LITE_ENSURE_TYPES_EQ(context, input->type, output->type); + TF_LITE_ENSURE_MSG(context, input->type == filter->type, + "Hybrid models are not supported on TFLite Micro."); + + data->input_zero_point = input->params.zero_point; + data->filter_zero_point = filter->params.zero_point; + data->output_zero_point = output->params.zero_point; + + TfLiteStatus status = CalculateOpData(context, params, input->type, input, + filter, bias, output, data); + + data->is_mli_applicable = + IsMliApplicable(context, input, filter, bias, params, + data->output_activation_min, data->output_activation_max); + + if (input->type == kTfLiteInt8 && data->is_mli_applicable) { + data->mli_in = ops::micro::MliTensorInterface(static_cast( + context->AllocatePersistentBuffer(context, sizeof(mli_tensor)))); + data->mli_weights = ops::micro::MliTensorInterface(static_cast( + context->AllocatePersistentBuffer(context, sizeof(mli_tensor)))); + data->mli_bias = ops::micro::MliTensorInterface(static_cast( + context->AllocatePersistentBuffer(context, sizeof(mli_tensor)))); + data->mli_out = ops::micro::MliTensorInterface(static_cast( + context->AllocatePersistentBuffer(context, sizeof(mli_tensor)))); + + ops::micro::ConvertToMliTensor(input, &data->mli_in); + ops::micro::ConvertToMliTensor(filter, &data->mli_weights); + ops::micro::ConvertToMliTensor(bias, &data->mli_bias); +#ifdef MLI_2_0 + ops::micro::AdjustBiasTensor(&data->mli_bias, &data->mli_in, + &data->mli_weights); +#endif + ops::micro::ConvertToMliTensor(output, &data->mli_out); + +#ifdef MLI_2_0 + if (data->output_activation_min == -128 && + data->output_activation_max == 127) { + data->cfg->relu.type = MLI_RELU_NONE; + } else if (params->activation == kTfLiteActRelu) { + data->cfg->relu.type = MLI_RELU_GEN; + } else if (params->activation == kTfLiteActRelu6) { + data->cfg->relu.type = MLI_RELU_6; + } else if (params->activation == kTfLiteActReluN1To1) { + data->cfg->relu.type = MLI_RELU_1; + } else { + data->cfg->relu.type = MLI_RELU_NONE; + } +#endif + + /* The input tensor can have more than 2 dimensions. for the compute this + doesn't make any difference because all the inputs or a batch entry will + be used anyway. because the MLI kernel doesn't recognize the multiple + dimensions, the tensor shape is casted to a {batchnum, inputsize} shape. */ + data->mli_in.Shape()[0] = data->mli_out.Shape()[0]; +#if defined(MLI_2_0) && !defined(MLI_2_0_KRNL_TEST) + data->mli_in.Shape()[1] = data->mli_weights.Shape()[0]; +#else + data->mli_in.Shape()[1] = data->mli_weights.Shape()[1]; +#endif + data->mli_in.Shape()[2] = 0; + data->mli_in.Shape()[3] = 0; + data->mli_in.MemStride()[0] = data->mli_in.Shape()[1]; + data->mli_in.MemStride()[1] = 0; + *data->mli_in.Rank() = 2; + } + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(filter); + micro_context->DeallocateTempTfLiteTensor(bias); + micro_context->DeallocateTempTfLiteTensor(output); + return status; +} + +TfLiteStatus EvalMliQuantizedInt8(TfLiteContext* context, TfLiteNode* node, + const TfLiteFullyConnectedParams* params, + const OpData& data, + const TfLiteEvalTensor* input, + const TfLiteEvalTensor* filter, + const TfLiteEvalTensor* bias, + TfLiteEvalTensor* output) { + ops::micro::MliTensorAttachBuffer(input, &data.mli_in); + ops::micro::MliTensorAttachBuffer(filter, &data.mli_weights); + ops::micro::MliTensorAttachBuffer(bias, &data.mli_bias); + ops::micro::MliTensorAttachBuffer(output, &data.mli_out); + + // Tensors for data in fast (local) memory and config to copy data from + // external to local memory + mli_tensor weights_local = *data.mli_weights.MliTensor(); + mli_tensor bias_local = *data.mli_bias.MliTensor(); + mli_tensor in_local = *data.mli_in.MliTensor(); + mli_tensor out_local = *data.mli_out.MliTensor(); + + ops::micro::MliTensorInterface weights_local_interface(&weights_local); + ops::micro::MliTensorInterface bias_local_interface(&bias_local); + ops::micro::MliTensorInterface in_local_interface(&in_local); + ops::micro::MliTensorInterface out_local_interface(&out_local); + + mli_mov_cfg_t copy_config; + mli_mov_cfg_for_copy(©_config); +#if defined(MLI_2_0) && !defined(MLI_2_0_KRNL_TEST) + const int weight_out_dimension = 1; +#else + const int weight_out_dimension = 0; +#endif + // bias has only 1 dimension + const int bias_out_ch_dimension = 0; + const int out_tensor_dimension = 1; + const int input_size_dimension = 1; + int slice_size = data.mli_weights.Shape()[weight_out_dimension]; + + /* allocate the local buffers, and compute the slice size */ + TF_LITE_ENSURE_STATUS( + ops::micro::get_arc_scratch_buffer_for_fully_connect_tensors( + context, &in_local_interface, &weights_local_interface, + &bias_local_interface, &out_local_interface)); + TF_LITE_ENSURE_STATUS(ops::micro::arc_scratch_buffer_calc_slice_size_weights( + &weights_local_interface, &bias_local_interface, weight_out_dimension, + &slice_size)); + + int max_out_slice_size = *out_local_interface.DataCapacity() / + mli_hlp_tensor_element_size(&out_local); + + if (slice_size > max_out_slice_size) slice_size = max_out_slice_size; + + /* is_local indicates that the tensor is already in local memory, + so in that case the original tensor can be used, + and there is no need to copy it to the local tensor*/ + const bool in_is_local = + in_local_interface.Data() == data.mli_in.Data(); + const bool out_is_local = + out_local_interface.Data() == data.mli_out.Data(); + const bool b_is_local = + bias_local_interface.Data() == data.mli_bias.Data(); +#ifndef MLI_2_0_KRNL_TEST + const bool w_is_local = + weights_local_interface.Data() == data.mli_weights.Data(); +#endif + +#if defined(MLI_2_0) && !defined(MLI_2_0_KRNL_TEST) + ops::micro::TensorSlicer w_slice(data.mli_weights.MliTensor(), + weight_out_dimension, slice_size, 0, 0, 0, + true); +#else + ops::micro::TensorSlicer w_slice(data.mli_weights.MliTensor(), + weight_out_dimension, slice_size); +#endif + ops::micro::TensorSlicer b_slice(data.mli_bias.MliTensor(), + bias_out_ch_dimension, slice_size); + ops::micro::TensorSlicer out_ch_slice(data.mli_out.MliTensor(), + out_tensor_dimension, slice_size, 0, 0, + 0, true); + +#ifdef MLI_2_0_KRNL_TEST + mli_tensor* w_ptr = &weights_local; +#else + mli_tensor* w_ptr = w_is_local ? w_slice.Sub() : &weights_local; +#endif + mli_tensor* b_ptr = b_is_local ? b_slice.Sub() : &bias_local; + + void* input_buffer_ptr = NULL; + + while (!w_slice.Done()) { +#if defined(MLI_2_0) && !defined(MLI_2_0_KRNL_TEST) + w_ptr->el_params.sa.scale.mem.pi16 = NULL; + b_ptr->el_params.sa.scale.mem.pi16 = NULL; +#endif + +#ifndef MLI_2_0_KRNL_TEST + mli_mov_tensor_sync(w_slice.Sub(), ©_config, w_ptr); +#endif + mli_mov_tensor_sync(b_slice.Sub(), ©_config, b_ptr); + + // Slice the input over the batches (one at a time with the size of a + // complete input) + ops::micro::TensorSlicer in_slice( + data.mli_in.MliTensor(), input_size_dimension, + data.mli_in.Shape()[input_size_dimension]); + + /* output tensor is already sliced in the output size dimension. + out_ch_slice.Sub() is the tensor for the amount of output size of this + iteration of the weight slice loop. This tensor needs to be further + sliced over the batch */ + ops::micro::TensorSlicer out_slice(out_ch_slice.Sub(), out_tensor_dimension, + slice_size); + + /* setup the pointers to the local or remote tensor to make the code + * inside the loop easier. */ + mli_tensor* in_ptr = in_is_local ? in_slice.Sub() : &in_local; + mli_tensor* out_ptr = out_is_local ? out_slice.Sub() : &out_local; + +#ifdef MLI_2_0_KRNL_TEST + /* Permute weights tensor to the HWCN layout */ + // Assertion here to prevent usage non-contiguous buffer memory. + if (data.mli_out.Shape()[out_tensor_dimension] != + out_slice.Sub()->shape[0]) { + MicroPrintf("Slicing is not supported with real-time permutation."); + return kTfLiteError; + } + mli_permute_cfg permute_cfg = {{1, 0, 2, 3}}; + ops::micro::permute_weights(data.mli_weights.MliTensor(), &permute_cfg, + w_ptr, &out_ptr->data); +#endif + + while (!out_slice.Done()) { + if (!out_is_local) { + ops::micro::PrepareLocalTensor(out_slice.Sub(), &out_local); + ops::micro::PrepareLocalTensor(in_slice.Sub(), &in_local); + } + // if same input copy as previous iteration, skip the copy of input +#ifdef MLI_2_0 + if (in_slice.Sub()->data.mem.pi8 != input_buffer_ptr) { + mli_mov_tensor_sync(in_slice.Sub(), ©_config, in_ptr); + input_buffer_ptr = in_slice.Sub()->data.mem.pi8; + } + mli_fully_connected_cfg cfg; + cfg.relu.type = MLI_RELU_NONE; + mli_krn_fully_connected_sa8_sa8_sa32(in_ptr, w_ptr, b_ptr, &cfg, out_ptr); +#else + if (in_slice.Sub()->data != input_buffer_ptr) { + mli_mov_tensor_sync(in_slice.Sub(), ©_config, in_ptr); + input_buffer_ptr = in_slice.Sub()->data; + } + mli_krn_fully_connected_sa8_sa8_sa32(in_ptr, w_ptr, b_ptr, out_ptr); +#endif + + mli_mov_tensor_sync(out_ptr, ©_config, out_slice.Sub()); + + in_slice.Next(); + out_slice.Next(); + } + w_slice.Next(); + b_slice.Next(); + out_ch_slice.Next(); + } + return kTfLiteOk; +} + +TfLiteStatus EvalQuantized(TfLiteContext* context, TfLiteNode* node, + const OpData& data, const TfLiteEvalTensor* input, + const TfLiteEvalTensor* filter, + const TfLiteEvalTensor* bias, + TfLiteEvalTensor* output) { +#if !defined(TF_LITE_STRIP_REFERENCE_IMPL) + tflite::FullyConnectedParams op_params; + op_params.input_offset = -data.input_zero_point; + op_params.weights_offset = -data.filter_zero_point; + op_params.output_offset = data.output_zero_point; + op_params.output_multiplier = data.output_multiplier; + op_params.output_shift = -data.output_shift; + op_params.quantized_activation_min = data.output_activation_min; + op_params.quantized_activation_max = data.output_activation_max; + + reference_integer_ops::FullyConnected( + op_params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), + tflite::micro::GetTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + return kTfLiteOk; +#else + MicroPrintf("Node configuration is not supported by ARC MLI Library."); + return kTfLiteError; +#endif +} + +TfLiteStatus EvalFloat(TfLiteContext* context, TfLiteNode* node, + TfLiteFusedActivation activation, + const TfLiteEvalTensor* input, + const TfLiteEvalTensor* filter, + const TfLiteEvalTensor* bias, TfLiteEvalTensor* output) { +#if !defined(TF_LITE_STRIP_REFERENCE_IMPL) + float output_activation_min, output_activation_max; + CalculateActivationRange(activation, &output_activation_min, + &output_activation_max); + tflite::FullyConnectedParams op_params; + op_params.float_activation_min = output_activation_min; + op_params.float_activation_max = output_activation_max; + tflite::reference_ops::FullyConnected( + op_params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), + tflite::micro::GetTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + return kTfLiteOk; +#else + MicroPrintf("Type %s (%d) is not supported by ARC MLI Library.", + TfLiteTypeGetName(input->type), input->type); + return kTfLiteError; +#endif +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->builtin_data != nullptr); + const auto* params = + static_cast(node->builtin_data); + + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + const TfLiteEvalTensor* filter = + tflite::micro::GetEvalInput(context, node, kWeightsTensor); + const TfLiteEvalTensor* bias = + tflite::micro::GetEvalInput(context, node, kBiasTensor); + + TFLITE_DCHECK(node->user_data != nullptr); + const OpData& data = *(static_cast(node->user_data)); + + // Checks in Prepare ensure input, output and filter types are all the same. + switch (input->type) { + case kTfLiteFloat32: + return EvalFloat(context, node, params->activation, input, filter, bias, + output); + case kTfLiteInt8: + if (data.is_mli_applicable) { + return EvalMliQuantizedInt8(context, node, params, data, input, filter, + bias, output); + } else { + return EvalQuantized(context, node, data, input, filter, bias, output); + } + + default: + MicroPrintf("Type %s (%d) not supported.", TfLiteTypeGetName(input->type), + input->type); + return kTfLiteError; + } + return kTfLiteOk; +} + +TFLMRegistration Register_FULLY_CONNECTED() { + return tflite::micro::RegisterOp(Init, Prepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/arc_mli/mli_function_specializations.h b/tensorflow/lite/micro/kernels/arc_mli/mli_function_specializations.h new file mode 100644 index 0000000..6276fe7 --- /dev/null +++ b/tensorflow/lite/micro/kernels/arc_mli/mli_function_specializations.h @@ -0,0 +1,141 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mli_api.h" // NOLINT + +namespace tflite { + +// Convolution specialized function. +typedef mli_status (*conv_func_ptr)(const mli_tensor* /*in*/, + const mli_tensor* /*weights*/, + const mli_tensor* /*bias*/, + const mli_conv2d_cfg* /*cfg*/, + mli_tensor* /*out*/); + +#ifdef MLI_2_0 +conv_func_ptr __attribute__((weak)) +mli_krn_conv2d_hwcn(const mli_tensor* weights) { + int filter_w = weights->shape[KRNL_W_DIM_HWCN]; + int filter_h = weights->shape[KRNL_H_DIM_HWCN]; + + if (filter_w == 1 && filter_h == 1) { + return mli_krn_conv2d_hwcn_sa8_sa8_sa32_k1x1; + } else if (filter_w == 3 && filter_h == 3) { + return mli_krn_conv2d_hwcn_sa8_sa8_sa32_k3x3; + } else if (filter_w == 5 && filter_h == 5) { + return mli_krn_conv2d_hwcn_sa8_sa8_sa32_k5x5; + } else { + return mli_krn_conv2d_hwcn_sa8_sa8_sa32; + } +} +#else +conv_func_ptr __attribute__((weak)) +mli_krn_conv2d_hwcn(const mli_tensor* weights, const mli_conv2d_cfg* cfg) { + return mli_krn_conv2d_nhwc_sa8_sa8_sa32; +} +#endif + +// Depthwise convolution specialized function. +typedef mli_status (*depthwise_func_ptr)(const mli_tensor* /*in*/, + const mli_tensor* /*weights*/, + const mli_tensor* /*bias*/, + const mli_conv2d_cfg* /*cfg*/, + mli_tensor* /*out*/); + +#ifdef MLI_2_0 +depthwise_func_ptr __attribute__((weak)) +mli_krn_depthwise_conv2d(const mli_tensor* weights) { + int filter_w = weights->shape[KRNL_DW_W_DIM_HW1N]; + int filter_h = weights->shape[KRNL_DW_H_DIM_HW1N]; + + if (filter_w == 3 && filter_h == 3) { + return mli_krn_depthwise_conv2d_hwcn_sa8_sa8_sa32_k3x3; + } else if (filter_w == 5 && filter_h == 5) { + return mli_krn_depthwise_conv2d_hwcn_sa8_sa8_sa32_k5x5; + } else { + return mli_krn_depthwise_conv2d_hwcn_sa8_sa8_sa32; + } +} +#else +depthwise_func_ptr __attribute__((weak)) +mli_krn_depthwise_conv2d(const mli_tensor* weights, const mli_conv2d_cfg* cfg) { + return mli_krn_depthwise_conv2d_hwcn_sa8_sa8_sa32; +} +#endif + +#ifdef MLI_2_0 +depthwise_func_ptr __attribute__((weak)) +mli_krn_group_conv2d(const mli_tensor* weights) { + int filter_w = weights->shape[KRNL_DW_W_DIM_HW1N]; + int filter_h = weights->shape[KRNL_DW_H_DIM_HW1N]; + + if (filter_w == 3 && filter_h == 3) { + return mli_krn_group_conv2d_hwcn_sa8_sa8_sa32_k3x3; + } else if (filter_w == 5 && filter_h == 5) { + return mli_krn_group_conv2d_hwcn_sa8_sa8_sa32_k5x5; + } else { + return mli_krn_group_conv2d_hwcn_sa8_sa8_sa32; + } +} +#endif + +// Pooling specialized functions. +typedef mli_status (*pooling_func_ptr)(const mli_tensor* /*in*/, + const mli_pool_cfg* /*cfg*/, + mli_tensor* /*out*/); + +#ifdef MLI_2_0 +pooling_func_ptr __attribute__((weak)) +mli_krn_avepool(const mli_pool_cfg* cfg) { + int filter_w = cfg->kernel_width; + int filter_h = cfg->kernel_height; + + if (filter_w == 2 && filter_h == 2) { + return mli_krn_avepool_hwc_sa8_k2x2; + } else if (filter_w == 3 && filter_h == 3) { + return mli_krn_avepool_hwc_sa8_k3x3; + } else { + return mli_krn_avepool_hwc_sa8; + } +} +#else +pooling_func_ptr __attribute__((weak)) +mli_krn_avepool(const mli_pool_cfg* cfg) { + return mli_krn_avepool_hwc_sa8; +} +#endif + +#ifdef MLI_2_0 +pooling_func_ptr __attribute__((weak)) +mli_krn_maxpool(const mli_pool_cfg* cfg) { + int filter_w = cfg->kernel_width; + int filter_h = cfg->kernel_height; + + if (filter_w == 2 && filter_h == 2) { + return mli_krn_maxpool_hwc_sa8_k2x2; + } else if (filter_w == 3 && filter_h == 3) { + return mli_krn_maxpool_hwc_sa8_k3x3; + } else { + return mli_krn_maxpool_hwc_sa8; + } +} +#else +pooling_func_ptr __attribute__((weak)) +mli_krn_maxpool(const mli_pool_cfg* cfg) { + return mli_krn_maxpool_hwc_sa8; +} +#endif + +} // namespace tflite \ No newline at end of file diff --git a/tensorflow/lite/micro/kernels/arc_mli/mli_interface.cc b/tensorflow/lite/micro/kernels/arc_mli/mli_interface.cc new file mode 100644 index 0000000..3a9890b --- /dev/null +++ b/tensorflow/lite/micro/kernels/arc_mli/mli_interface.cc @@ -0,0 +1,155 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mli_interface.h" // NOLINT + +#include + +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { +namespace ops { +namespace micro { + +#ifndef MLI_2_0 +template <> +int8_t* MliTensorInterface::Data(void) { + TFLITE_DCHECK(tensor_->el_type == MLI_EL_ASYM_I8); + return static_cast(tensor_->data); +} + +template <> +int32_t* MliTensorInterface::Data(void) { + TFLITE_DCHECK(tensor_->el_type == MLI_EL_ASYM_I32); + return static_cast(tensor_->data); +} + +template <> +int32_t* MliTensorInterface::Scale(void) { + return &tensor_->el_params.asym.scale.i32; +} + +template <> +int32_t** MliTensorInterface::Scale(void) { + return &tensor_->el_params.asym.scale.pi32; +} + +template <> +void MliTensorInterface::SetData(int8_t* data, uint32_t capacity) const { + TFLITE_DCHECK(tensor_->el_type == MLI_EL_ASYM_I8); + tensor_->data = data; + tensor_->capacity = capacity; +} + +template <> +void MliTensorInterface::SetData(int32_t* data, uint32_t capacity) const { + TFLITE_DCHECK(tensor_->el_type == MLI_EL_ASYM_I32); + tensor_->data = data; + tensor_->capacity = capacity; +} + +mli_tensor* MliTensorInterface::MliTensor(void) { return tensor_; } + +const mli_tensor* MliTensorInterface::MliTensor(void) const { + return static_cast( + const_cast(this)->MliTensor()); +} + +uint32_t* MliTensorInterface::Rank(void) { return &tensor_->rank; } + +const uint32_t* MliTensorInterface::DataCapacity(void) const { + return &tensor_->capacity; +} + +mli_element_type* MliTensorInterface::ElType(void) { return &tensor_->el_type; } + +template <> +int16_t* MliTensorInterface::ZeroPoint(void) { + return &tensor_->el_params.asym.zero_point.i16; +} + +template <> +int16_t** MliTensorInterface::ZeroPoint(void) { + return &tensor_->el_params.asym.zero_point.pi16; +} + +uint32_t* MliTensorInterface::ZeroPointCapacity(void) { return nullptr; } + +int32_t* MliTensorInterface::Dim(void) { return &tensor_->el_params.asym.dim; } + +uint32_t* MliTensorInterface::ScaleCapacity(void) { return nullptr; } + +template <> +int8_t* MliTensorInterface::ScaleFracBits(void) { + return &tensor_->el_params.asym.scale_frac_bits; +} + +uint32_t* MliTensorInterface::ScaleFracBitsCapacity(void) { return nullptr; } + +int32_t* MliTensorInterface::MemStride(void) { return tensor_->mem_stride; } + +uint32_t* MliTensorInterface::Shape(void) { return tensor_->shape; } + +const uint32_t* MliTensorInterface::Shape(void) const { + return static_cast( + const_cast(this)->Shape()); +} + +void MliTensorInterface::SetScale(float fscale) { + int exp; + frexpf(fscale, &exp); + int frac_bits = 31 - exp; + int32_t iscale = (int32_t)((1ll << frac_bits) * fscale + 0.5f); + *(this->ScaleFracBits()) = frac_bits; + *(this->Scale()) = (int32_t)iscale; +} + +void MliTensorInterface::SetScalePerChannel(float* fscale, + const int num_channels) { + int min_frac_bits; + for (int i = 0; i < num_channels; i++) { + int exp; + frexpf(fscale[i], &exp); + int cur_frac_bits = 31 - exp; + if (i == 0) { + min_frac_bits = cur_frac_bits; + } else { + min_frac_bits = + min_frac_bits < cur_frac_bits ? min_frac_bits : cur_frac_bits; + } + } + *this->ScaleFracBits() = min_frac_bits; + + for (int i = 0; i < num_channels; i++) { + int32_t iscale = (int32_t)((1ll << min_frac_bits) * fscale[i] + 0.5f); + (*this->Scale())[i] = iscale; + } +} + +void MliTensorInterface::SetElType(TfLiteType type) { + if (type == kTfLiteInt8) { + *this->ElType() = MLI_EL_ASYM_I8; + } else if (type == kTfLiteInt32) { + *this->ElType() = MLI_EL_ASYM_I32; + } else { + MicroPrintf("Wrong data type. Expected int8_t or int32_t."); + TFLITE_ABORT; + } +} +#endif + +} // namespace micro +} // namespace ops +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/arc_mli/mli_interface.h b/tensorflow/lite/micro/kernels/arc_mli/mli_interface.h new file mode 100644 index 0000000..b4087f3 --- /dev/null +++ b/tensorflow/lite/micro/kernels/arc_mli/mli_interface.h @@ -0,0 +1,75 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_ARC_MLI_INTERFACE_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_ARC_MLI_INTERFACE_H_ + +#include "mli_api.h" // NOLINT +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +namespace tflite { +namespace ops { +namespace micro { + +// Abstracts access to mli_tensor fields to use different versions of MLI +// Library (1.x and 2.x) +// Example: +// ops::micro::MliTensorInterface mli_in = +// ops::micro::MliTensorInterface(static_cast( +// context->AllocatePersistentBuffer(context, sizeof(mli_tensor)))); + +class MliTensorInterface { + public: + // Make sure that lifetime of MliTensorInterface instance isn't bigger than + // related mli_tensor. + MliTensorInterface(mli_tensor* tensor) : tensor_(tensor){}; + MliTensorInterface() = default; + ~MliTensorInterface() = default; + + template + T* Data(); + template + T Scale(); + template + T ZeroPoint(); + template + T ScaleFracBits(); + mli_tensor* MliTensor(); + const mli_tensor* MliTensor() const; + int32_t* Dim(); + uint32_t* Rank(); + uint32_t* Shape(); + const uint32_t* Shape() const; + const uint32_t* DataCapacity() const; + uint32_t* ScaleCapacity(); + mli_element_type* ElType(); + uint32_t* ScaleFracBitsCapacity(); + int32_t* MemStride(); + uint32_t* ZeroPointCapacity(); + + template + void SetData(T* data, uint32_t capacity) const; + void SetScale(float fscale); + void SetScalePerChannel(float* fscale, const int num_channels); + void SetElType(TfLiteType type); + + private: + mli_tensor* tensor_; +}; + +} // namespace micro +} // namespace ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_ARC_MLI_SLICERS_H_ diff --git a/tensorflow/lite/micro/kernels/arc_mli/mli_interface_mli_20.cc b/tensorflow/lite/micro/kernels/arc_mli/mli_interface_mli_20.cc new file mode 100644 index 0000000..cef2a6e --- /dev/null +++ b/tensorflow/lite/micro/kernels/arc_mli/mli_interface_mli_20.cc @@ -0,0 +1,164 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include "mli_interface.h" // NOLINT +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { +namespace ops { +namespace micro { + +#ifdef MLI_2_0 +template <> +int8_t* MliTensorInterface::Data(void) { + TFLITE_DCHECK(tensor_->el_type == MLI_EL_SA_8); + return tensor_->data.mem.pi8; +} + +template <> +int32_t* MliTensorInterface::Data(void) { + TFLITE_DCHECK(tensor_->el_type == MLI_EL_SA_32); + return tensor_->data.mem.pi32; +} + +template <> +int16_t** MliTensorInterface::Scale(void) { + return &tensor_->el_params.sa.scale.mem.pi16; +} + +template <> +int16_t* MliTensorInterface::Scale(void) { + return &tensor_->el_params.sa.scale.mem.i16; +} + +template <> +void MliTensorInterface::SetData(int8_t* data, uint32_t capacity) const { + TFLITE_DCHECK(tensor_->el_type == MLI_EL_SA_8); + tensor_->data.mem.pi8 = data; + tensor_->data.capacity = capacity; +} + +template <> +void MliTensorInterface::SetData(int32_t* data, uint32_t capacity) const { + TFLITE_DCHECK(tensor_->el_type == MLI_EL_SA_32); + tensor_->data.mem.pi32 = data; + tensor_->data.capacity = capacity; +} + +mli_tensor* MliTensorInterface::MliTensor(void) { return tensor_; } + +const mli_tensor* MliTensorInterface::MliTensor(void) const { + return static_cast( + const_cast(this)->MliTensor()); +} + +uint32_t* MliTensorInterface::Rank(void) { return &tensor_->rank; } + +const uint32_t* MliTensorInterface::DataCapacity(void) const { + return &tensor_->data.capacity; +} + +mli_element_type* MliTensorInterface::ElType(void) { return &tensor_->el_type; } + +template <> +int16_t* MliTensorInterface::ZeroPoint(void) { + return &tensor_->el_params.sa.zero_point.mem.i16; +} + +template <> +int16_t** MliTensorInterface::ZeroPoint(void) { + return &tensor_->el_params.sa.zero_point.mem.pi16; +} + +uint32_t* MliTensorInterface::ZeroPointCapacity(void) { + return &tensor_->el_params.sa.zero_point.capacity; +} + +int32_t* MliTensorInterface::Dim(void) { return &tensor_->el_params.sa.dim; } + +uint32_t* MliTensorInterface::ScaleCapacity(void) { + return &tensor_->el_params.sa.scale.capacity; +} + +template <> +int8_t** MliTensorInterface::ScaleFracBits(void) { + return &tensor_->el_params.sa.scale_frac_bits.mem.pi8; +} + +template <> +int8_t* MliTensorInterface::ScaleFracBits(void) { + return &tensor_->el_params.sa.scale_frac_bits.mem.i8; +} + +uint32_t* MliTensorInterface::ScaleFracBitsCapacity(void) { + return &tensor_->el_params.sa.scale_frac_bits.capacity; +} + +int32_t* MliTensorInterface::MemStride(void) { return tensor_->mem_stride; } + +uint32_t* MliTensorInterface::Shape(void) { return tensor_->shape; } + +const uint32_t* MliTensorInterface::Shape(void) const { + return static_cast( + const_cast(this)->Shape()); +} + +void MliTensorInterface::SetScale(float fscale) { + int exp; + frexpf(fscale, &exp); + int frac_bits = 15 - exp; + int16_t iscale = (int16_t)((1ll << frac_bits) * fscale + 0.5f); + *(this->Scale()) = (int16_t)iscale; + *(this->ScaleFracBits()) = frac_bits; + *this->ScaleCapacity() = 0; + *this->ScaleFracBitsCapacity() = 0; +} + +void MliTensorInterface::SetScalePerChannel(float* fscale, + const int num_channels) { + for (int i = 0; i < num_channels; i++) { + int exp; + frexpf(fscale[i], &exp); + int cur_frac_bits = 15 - exp; + (*this->ScaleFracBits())[i] = cur_frac_bits; + } + + for (int i = 0; i < num_channels; i++) { + int16_t iscale = + (int16_t)((1ll << (*this->ScaleFracBits())[i]) * fscale[i] + + 0.5f); + (*this->Scale())[i] = iscale; + } + *this->ScaleCapacity() = num_channels * sizeof(int16_t); + *this->ScaleFracBitsCapacity() = num_channels * sizeof(int8_t); +} + +void MliTensorInterface::SetElType(TfLiteType type) { + if (type == kTfLiteInt8) { + *this->ElType() = MLI_EL_SA_8; + } else if (type == kTfLiteInt32) { + *this->ElType() = MLI_EL_SA_32; + } else { + MicroPrintf("Wrong data type. Expected int8_t or int32_t."); + TFLITE_ABORT; + } +} +#endif + +} // namespace micro +} // namespace ops +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/arc_mli/mli_slicers.cc b/tensorflow/lite/micro/kernels/arc_mli/mli_slicers.cc new file mode 100644 index 0000000..905c6fe --- /dev/null +++ b/tensorflow/lite/micro/kernels/arc_mli/mli_slicers.cc @@ -0,0 +1,126 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mli_slicers.h" // NOLINT + +#include + +namespace tflite { +namespace ops { +namespace micro { + +TensorSlicer::TensorSlicer(const mli_tensor* full_tensor, int slice_dim, + int slice_size, int padding_pre, int padding_post, + int overlap, bool interleave_mode) + : full_tensor_(full_tensor), + sub_tensor_{}, + sub_cfg_{}, + done_(false), + sliceDim_(slice_dim), + pad_pre_(padding_pre), + pad_post_(padding_post), + overlap_(overlap) { + /* In the interleave mode, the slicing happens from the deepest dimension up + to the slice_dim for example in an HWC layout this can mode can be used to + slice in the C dimenstion. in this mode the data is not contiguous in memory + anymore */ + if (interleave_mode) { + for (int i = 0; i < static_cast(full_tensor->rank); i++) { + if (i > slice_dim) { + sub_cfg_.size[i] = 1; + } else if (i == slice_dim) { + sub_cfg_.size[i] = slice_size; + } else { + sub_cfg_.size[i] = full_tensor->shape[i]; + } + } + sub_cfg_.sub_tensor_rank = full_tensor->rank; + + } else { + /* In the not interleaved mode, the slicing happens from the outer most + dimension up to the slice_dim for example in an HWC layout this mode can be + used to slice in the H dimension. in this mode the data of the slice is + still contiguous in memory (if that was the case in the input tensor */ + for (int i = 0; i < static_cast(full_tensor->rank); i++) { + if (i < slice_dim) { + sub_cfg_.size[i] = 1; + } else if (i == slice_dim) { + sub_cfg_.size[i] = slice_size; + } else { + sub_cfg_.size[i] = full_tensor->shape[i]; + } + } + sub_cfg_.sub_tensor_rank = full_tensor->rank - slice_dim; + } + + ComputeSubTensor(); +} + +void TensorSlicer::ComputeSubTensor(void) { + // subtsr_cfg_ is used to keep track of the iteration. + // A copy is created to update it with the correct clipping and padding for + // the current slice + mli_sub_tensor_cfg cfg_new = sub_cfg_; + + // begin and end spans the complete input region including padding areas. + const int begin = (int)sub_cfg_.offset[sliceDim_] - pad_pre_; + // end is clipped to the end of the full input region. this is needed for + // cases where the last slice is smaller than the rest. + const int end = std::min(begin + sub_cfg_.size[sliceDim_] + overlap_, + full_tensor_->shape[sliceDim_] + pad_post_); + // The start coordinate of the subtensor is clipped to zero + cfg_new.offset[sliceDim_] = std::max(begin, 0); + // and the stop coordinate is clipped to the size of the full tensor + const int stop_coord = + std::min(end, static_cast(full_tensor_->shape[sliceDim_])); + // compute the size of the subtensor + cfg_new.size[sliceDim_] = stop_coord - cfg_new.offset[sliceDim_]; + + // compute the padding configuration for the current slice. + actual_padding_pre = cfg_new.offset[sliceDim_] - begin; + actual_padding_post = end - stop_coord; + + mli_hlp_create_subtensor(full_tensor_, &cfg_new, &sub_tensor_); +} + +void TensorSlicer::Next(void) { + for (int i = full_tensor_->rank - 1; i >= 0; i--) { + sub_cfg_.offset[i] += sub_cfg_.size[i]; + if (sub_cfg_.offset[i] >= full_tensor_->shape[i]) { + // wrap + sub_cfg_.offset[i] = 0; + // and continue to the next dimension, if no next dimension we are done. + if (i == 0) done_ = true; + continue; + } else { + // carry is false, so break from the loop + break; + } + } + + if (!done_) ComputeSubTensor(); +} + +bool TensorSlicer::Done(void) { return done_; } + +int TensorSlicer::GetPaddingPre(void) { return actual_padding_pre; } + +int TensorSlicer::GetPaddingPost(void) { return actual_padding_post; } + +mli_tensor* TensorSlicer::Sub(void) { return &sub_tensor_; } + +} // namespace micro +} // namespace ops +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/arc_mli/mli_slicers.h b/tensorflow/lite/micro/kernels/arc_mli/mli_slicers.h new file mode 100644 index 0000000..b21a5b6 --- /dev/null +++ b/tensorflow/lite/micro/kernels/arc_mli/mli_slicers.h @@ -0,0 +1,56 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_ARC_MLI_SLICERS_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_ARC_MLI_SLICERS_H_ + +#include "mli_api.h" // NOLINT +namespace tflite { +namespace ops { +namespace micro { + +class TensorSlicer { + public: + TensorSlicer(const mli_tensor* full_tensor, int slice_dim, int slice_size, + int padding_pre = 0, int padding_post = 0, int overlap = 0, + bool interleave_mode = false); + ~TensorSlicer() = default; + + void Next(); + bool Done(); + int GetPaddingPre(); + int GetPaddingPost(); + + mli_tensor* Sub(); + + // Default constructor is deleted + TensorSlicer() = delete; + + private: + const mli_tensor* full_tensor_; + mli_tensor sub_tensor_; + mli_sub_tensor_cfg sub_cfg_; + bool done_; + int sliceDim_; + int pad_pre_, pad_post_, overlap_; + int actual_padding_pre, actual_padding_post; + + void ComputeSubTensor(); +}; + +} // namespace micro +} // namespace ops +} // namespace tflite +#endif // TENSORFLOW_LITE_MICRO_KERNELS_ARC_MLI_SLICERS_H_ diff --git a/tensorflow/lite/micro/kernels/arc_mli/mli_tf_utils.h b/tensorflow/lite/micro/kernels/arc_mli/mli_tf_utils.h new file mode 100644 index 0000000..6e4e16e --- /dev/null +++ b/tensorflow/lite/micro/kernels/arc_mli/mli_tf_utils.h @@ -0,0 +1,310 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_ARC_MLI_TF_UTILS_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_ARC_MLI_TF_UTILS_H_ + +#include "mli_api.h" // NOLINT +#include "mli_interface.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +#define KRNL_C_DIM_NHWC 0 // output channels + +namespace tflite { +namespace ops { +namespace micro { + +inline void ConvertToMliTensorData(const TfLiteTensor* tfT, + MliTensorInterface* mliT, + bool is_bias_tensor) { + // Data is NULL until MliTensorAttachBuffer is called. + mliT->SetElType(tfT->type); + if (tfT->type == kTfLiteInt8) { + mliT->SetData(nullptr, tfT->bytes); + } else if (tfT->type == kTfLiteInt32) { + mliT->SetData(nullptr, tfT->bytes); + } else { + MicroPrintf("Wrong data type. Expected int8_t or int32_t."); + TFLITE_ABORT; + } + const int32_t dims_count = GetTensorShape(tfT).DimensionsCount(); + *mliT->Rank() = is_bias_tensor ? 1 : dims_count; + + int mli_tensor_memstride = 1; + if (is_bias_tensor) { + mliT->Shape()[0] = GetTensorShape(tfT).Dims(dims_count - 1); + mliT->MemStride()[0] = mli_tensor_memstride; + } else { + for (int i = dims_count - 1; i >= 0; --i) { + mliT->Shape()[i] = GetTensorShape(tfT).Dims(i); + mliT->MemStride()[i] = mli_tensor_memstride; + mli_tensor_memstride *= GetTensorShape(tfT).Dims(i); + } + } +} + +inline void ConvertToMliQuantParams(const TfLiteTensor* tfT, + MliTensorInterface* mliT) { + *mliT->Dim() = -1; +#ifdef MLI_2_0 + *mliT->ZeroPointCapacity() = 0; +#endif + *mliT->ZeroPoint() = tfT->params.zero_point; + float fscale = tfT->params.scale; + mliT->SetScale(fscale); +} + +inline void ConvertToMliQuantParamsPerChannel(const TfLiteTensor* tfT, + MliTensorInterface* mliT, + bool is_bias_tensor) { + // mli tensor scale and zero_point arrays should be allocated at this point +#ifdef MLI_2_0 + TFLITE_DCHECK_NE(*mliT->Scale(), 0); + TFLITE_DCHECK_NE(*mliT->ZeroPoint(), 0); +#else + TFLITE_DCHECK_NE(*mliT->Scale(), 0); + TFLITE_DCHECK_NE(*mliT->ZeroPoint(), 0); +#endif + + // get per channel quantization parameters + const auto* affine_quantization = + reinterpret_cast(tfT->quantization.params); + int32_t quantized_dimension = + is_bias_tensor ? 0 : affine_quantization->quantized_dimension; + const int num_channels = mliT->Shape()[quantized_dimension]; + + *mliT->Dim() = quantized_dimension; + + // set capacities +#ifdef MLI_2_0 + *mliT->ScaleFracBitsCapacity() = num_channels * sizeof(int8_t); + *mliT->ScaleCapacity() = num_channels * sizeof(int16_t); + *mliT->ZeroPointCapacity() = num_channels * sizeof(int16_t); +#endif + float* fscale = affine_quantization->scale->data; + mliT->SetScalePerChannel(fscale, num_channels); + +#ifdef MLI_2_0 + int16_t* zero_point = *mliT->ZeroPoint(); + for (int i = 0; i < num_channels; i++) { + zero_point[i] = tfT->params.zero_point; + } +#endif +} + +template +inline void MliTensorAttachBuffer(const TfLiteEvalTensor*, + const MliTensorInterface*); + +template <> +inline void MliTensorAttachBuffer(const TfLiteEvalTensor* tfT, + const MliTensorInterface* mliT) { + // "const_cast" here used to attach const data buffer to the initially + // non-const mli_tensor. This is required by current implementation of MLI + // backend and planned for redesign due to this and some other aspects. + mliT->SetData( + const_cast(tflite::micro::GetTensorData(tfT)), + *mliT->DataCapacity()); +} + +template <> +inline void MliTensorAttachBuffer(const TfLiteEvalTensor* tfT, + const MliTensorInterface* mliT) { + // "const_cast" here used to attach const data buffer to the initially + // non-const mli_tensor. This is required by current implementation of MLI + // backend and planned for redesign due to this and some other aspects. + mliT->SetData( + const_cast(tflite::micro::GetTensorData(tfT)), + *mliT->DataCapacity()); +} + +inline void ConvertToMliTensor(const TfLiteTensor* tfT, + MliTensorInterface* mliT) { + ConvertToMliTensorData(tfT, mliT, false); + ConvertToMliQuantParams(tfT, mliT); +} + +inline void ConvertToMliTensorPerChannel(const TfLiteTensor* tfT, + MliTensorInterface* mliT, + bool is_bias_tensor) { + ConvertToMliTensorData(tfT, mliT, is_bias_tensor); + ConvertToMliQuantParamsPerChannel(tfT, mliT, is_bias_tensor); +} + +inline void PrepareLocalTensor(mli_tensor* tensor, mli_tensor* tensor_local) { +#ifdef MLI_2_0 + int8_t* local_data = tensor_local->data.mem.pi8; + *tensor_local = *tensor; + tensor_local->data.mem.pi8 = local_data; +#else + int8_t* local_data = static_cast(tensor_local->data); + *tensor_local = *tensor; + tensor_local->data = local_data; +#endif +} + +inline void AdjustBiasTensor(MliTensorInterface* bias, MliTensorInterface* in, + MliTensorInterface* weights) { + int32_t quantized_dimension = *bias->Dim(); + const int num_channels = + quantized_dimension < 0 ? 1 : bias->Shape()[quantized_dimension]; + for (int i = 0; i < num_channels; i++) { + int32_t adjusted_bias_scale = + (*in->Scale()) * (*weights->Scale())[i]; + int in_shift = *in->ScaleFracBits(); + int w_shift = (*weights->ScaleFracBits())[i]; + int b_shift = (*bias->ScaleFracBits())[i]; + int bias_shift = in_shift + w_shift - b_shift; + (*bias->Scale())[i] = + (int16_t)(adjusted_bias_scale >> bias_shift); + } +} + +#ifdef MLI_2_0_KRNL_TEST +// Reorder an array according to given indexes. If backward is true, order of +// index array must be reversed. +inline static void reorder(uint32_t* arr, const uint8_t index[], + bool backward) { + uint32_t temp[MLI_MAX_RANK]; + for (int8_t i = 0; i < MLI_MAX_RANK; i++) { + if (backward) + temp[index[i]] = arr[i]; + else + temp[i] = arr[index[i]]; + } + for (int8_t i = 0; i < MLI_MAX_RANK; i++) { + arr[i] = temp[i]; + } +} + +// Change shape of mli tensor and recalculate mem strides. +inline void change_shape(mli_tensor* mliT, const uint8_t dim_order[]) { + reorder(mliT->shape, dim_order, false); + + // Calculate strides for new layout + int mli_tensor_memstride = 1; + for (int shape_idx = mliT->rank - 1; shape_idx >= 0; --shape_idx) { + mliT->mem_stride[shape_idx] = mli_tensor_memstride; + mli_tensor_memstride *= mliT->shape[shape_idx]; + } +} + +inline void permute_weights(const mli_tensor* weights_src, + const mli_permute_cfg* permute_cfg, + mli_tensor* weights_dst, + mli_data_container* buffer_data) { + mli_tensor buffer = {}; + buffer.el_params = weights_dst->el_params; + buffer.data = *buffer_data; + // Compare weights tensor size and avaliable buffer capacity. + int buffer_size = buffer_data->capacity; + int weights_size = mli_hlp_count_elem_num(weights_src, 0) * + mli_hlp_tensor_element_size(weights_src); + + // Need to change shape of distanation weights buffer according to permute + // dimensions order to calculate slice sizes + change_shape(weights_dst, permute_cfg->perm_dim); + + if (buffer_size >= weights_size) { + mli_mov_cfg_t copy_config; + mli_mov_cfg_for_copy(©_config); + mli_mov_tensor_sync(weights_src, ©_config, &buffer); + mli_krn_permute_sa8(&buffer, permute_cfg, weights_dst); + } else { + // Weights shape is NHWC and output (buffer) shape is HWC where N_w = C_o. + // Buffer size (H_o * W_o) must be more or equal then the weights size (H_w + // * W_w * C_w). So, this is the reason, why buffer size (output tensor) is + // divided by channel shape. + uint32_t slice_size = buffer_size / weights_src->shape[KRNL_C_DIM_NHWC]; + + mli_mov_cfg_t copy_config = {}; + uint32_t src_offsets[] = {0, 0, 0, 0}; + uint32_t src_sizes[] = {0, 0, 0, 0}; + int dst_mem_stride[] = {0, 0, 0, 0}; + + mli_tensor weights_dst_sub_tensor; + mli_sub_tensor_cfg sub_tensor_cfg = {}; + sub_tensor_cfg.sub_tensor_rank = weights_src->rank; + + // Calculate dimensions for slice accroding to buffer capacity. + // Now, after calling change_shape() function, dst weights buffer has the + // MLI layout (HWCN). This means, the innermost dimension (N) of dst weights + // tensor is equal to the innermost dimension of output tensor (N). + sub_tensor_cfg.size[weights_dst->rank - 1] = + src_sizes[weights_dst->rank - 1] = weights_src->shape[KRNL_C_DIM_NHWC]; + // Now need to calculate other shapes for weights slice. Total slice size is + // H*W*C*N, so to calculate sizes for each axis, avaliable slice size is + // divided by shape for each axis. + uint32_t slice_size_left = slice_size; + for (uint32_t i = 0; i < weights_dst->rank - 1; i++) { + sub_tensor_cfg.size[i] = src_sizes[i] = + slice_size_left / weights_dst->shape[i] > 0 ? weights_dst->shape[i] + : slice_size_left; + slice_size_left /= weights_dst->shape[i]; + slice_size_left = slice_size_left > 0 ? slice_size_left : 1; + } + // Need to reorder src tensor sizes because it is still in TFLM format + // (NHWC) and src_sizes array calculated as (HWCN). + reorder(src_sizes, permute_cfg->perm_dim, true); + + sub_tensor_cfg.offset[KRNL_C_DIM_HWCN] = src_offsets[KRNL_H_DIM_HWCN] = 0; + sub_tensor_cfg.offset[KRNL_H_DIM_HWCN] = src_offsets[KRNL_W_DIM_HWCN] = 0; + sub_tensor_cfg.offset[KRNL_W_DIM_HWCN] = src_offsets[KRNL_D_DIM_HWCN] = 0; + sub_tensor_cfg.offset[KRNL_D_DIM_HWCN] = src_offsets[KRNL_C_DIM_HWCN] = 0; + do { + do { + do { + do { + mli_mov_cfg_for_slice(©_config, (int*)src_offsets, + (int*)src_sizes, dst_mem_stride); + mli_mov_tensor_sync(weights_src, ©_config, &buffer); + + mli_hlp_create_subtensor(weights_dst, &sub_tensor_cfg, + &weights_dst_sub_tensor); + mli_krn_permute_sa8(&buffer, permute_cfg, &weights_dst_sub_tensor); + + // For each axis, it is necessary to recalculate the offsets and + // slice sizes. + sub_tensor_cfg.offset[2] = src_offsets[3] += src_sizes[3]; + src_sizes[3] = + std::min(src_sizes[3], weights_src->shape[3] - src_offsets[3]); + } while (src_offsets[3] < weights_src->shape[3]); + + sub_tensor_cfg.offset[1] = src_offsets[2] += src_sizes[2]; + src_sizes[2] = + std::min(src_sizes[2], weights_src->shape[2] - src_offsets[2]); + } while (src_offsets[2] < weights_src->shape[2]); + + sub_tensor_cfg.offset[0] = src_offsets[1] += src_sizes[1]; + src_sizes[1] = + std::min(src_sizes[1], weights_src->shape[1] - src_offsets[1]); + } while (src_offsets[1] < weights_src->shape[1]); + + sub_tensor_cfg.offset[3] = src_offsets[0] += src_sizes[0]; + src_sizes[0] = + std::min(src_sizes[0], weights_src->shape[0] - src_offsets[0]); + } while (src_offsets[0] < weights_src->shape[0]); + } +} +#endif + +} // namespace micro +} // namespace ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_ARC_MLI_TF_UTILS_H_ diff --git a/tensorflow/lite/micro/kernels/arc_mli/pooling.cc b/tensorflow/lite/micro/kernels/arc_mli/pooling.cc new file mode 100644 index 0000000..104ec31 --- /dev/null +++ b/tensorflow/lite/micro/kernels/arc_mli/pooling.cc @@ -0,0 +1,419 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/kernels/internal/reference/pooling.h" + +#include "mli_api.h" // NOLINT +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/pooling.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/padding.h" +#include "tensorflow/lite/micro/kernels/arc_mli/mli_function_specializations.h" +#include "tensorflow/lite/micro/kernels/arc_mli/mli_slicers.h" +#include "tensorflow/lite/micro/kernels/arc_mli/mli_tf_utils.h" +#include "tensorflow/lite/micro/kernels/arc_mli/scratch_buf_mgr.h" +#include "tensorflow/lite/micro/kernels/arc_mli/scratch_buffers.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { + +namespace { + +constexpr int kInputTensor = 0; +constexpr int kOutputTensor = 0; + +struct OpData { + TfLitePaddingValues padding; + int32_t activation_min; + int32_t activation_max; + float activation_min_f32; + float activation_max_f32; + + // The result of checking if MLI optimized version of tensors can be used. + bool is_mli_applicable; + + // Tensors in MLI format. + mutable ops::micro::MliTensorInterface mli_in; + mutable ops::micro::MliTensorInterface mli_out; + mli_pool_cfg* cfg; + + // Pointer to the mli convolution function. + pooling_func_ptr p_mli_krn_avepool_hwc_sa8; + pooling_func_ptr p_mli_krn_maxpool_hwc_sa8; +}; + +enum MliPoolingType { AveragePooling = 0, MaxPooling = 1 }; + +bool IsMliApplicable(TfLiteContext* context, const TfLiteTensor* input, + const TfLitePoolParams* params) { + // MLI optimized version only supports int8_t datatype and no fused Relu + return (input->type == kTfLiteInt8 && params->activation == kTfLiteActNone); +} + +TfLiteStatus CalculateOpData(TfLiteContext* context, + const TfLitePoolParams* params, + const TfLiteTensor* input, + const TfLiteTensor* output, OpData* data) { + // input: batch, height, width, channel + int height = SizeOfDimension(input, 1); + int width = SizeOfDimension(input, 2); + + int out_height, out_width; + + data->padding = ComputePaddingHeightWidth( + params->stride_height, params->stride_width, + /*dilation_rate_height=*/1, + /*dilation_rate_width=*/1, height, width, params->filter_height, + params->filter_width, params->padding, &out_height, &out_width); + return kTfLiteOk; +} + +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(OpData)); +} + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->builtin_data != nullptr); + auto* params = reinterpret_cast(node->builtin_data); + + TFLITE_DCHECK(node->user_data != nullptr); + OpData* data = static_cast(node->user_data); + + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + + data->is_mli_applicable = IsMliApplicable(context, input, params); + + TF_LITE_ENSURE_STATUS(CalculateOpData(context, params, input, output, data)); + + if (input->type == kTfLiteFloat32) { + CalculateActivationRange(params->activation, &data->activation_min_f32, + &data->activation_max_f32); + } else if (input->type == kTfLiteInt8) { + CalculateActivationRangeQuantized(context, params->activation, output, + &data->activation_min, + &data->activation_max); + } + + if (data->is_mli_applicable) { + data->mli_in = ops::micro::MliTensorInterface(static_cast( + context->AllocatePersistentBuffer(context, sizeof(mli_tensor)))); + data->mli_out = ops::micro::MliTensorInterface(static_cast( + context->AllocatePersistentBuffer(context, sizeof(mli_tensor)))); + data->cfg = static_cast( + context->AllocatePersistentBuffer(context, sizeof(mli_pool_cfg))); + + ops::micro::ConvertToMliTensor(input, &data->mli_in); + ops::micro::ConvertToMliTensor(output, &data->mli_out); + + data->cfg->kernel_width = params->filter_width; + data->cfg->kernel_height = params->filter_height; + data->cfg->stride_width = params->stride_width; + data->cfg->stride_height = params->stride_height; + + if (params->padding == kTfLitePaddingValid) { + data->cfg->padding_left = 0; + data->cfg->padding_right = 0; + data->cfg->padding_top = 0; + data->cfg->padding_bottom = 0; + } else { + data->cfg->padding_left = data->padding.width; + data->cfg->padding_right = + data->padding.width + data->padding.width_offset; + data->cfg->padding_top = data->padding.height; + data->cfg->padding_bottom = + data->padding.height + data->padding.height_offset; + } + + // Choose pooling mli specialized functions. + data->p_mli_krn_avepool_hwc_sa8 = mli_krn_avepool(data->cfg); + data->p_mli_krn_maxpool_hwc_sa8 = mli_krn_maxpool(data->cfg); + } + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(output); + return kTfLiteOk; +} + +void AverageEvalFloat(TfLiteContext* context, const TfLiteNode* node, + const TfLitePoolParams* params, const OpData& data, + const TfLiteEvalTensor* input, TfLiteEvalTensor* output) { +#if !defined(TF_LITE_STRIP_REFERENCE_IMPL) + float activation_min, activation_max; + CalculateActivationRange(params->activation, &activation_min, + &activation_max); + + PoolParams op_params; + op_params.stride_height = params->stride_height; + op_params.stride_width = params->stride_width; + op_params.filter_height = params->filter_height; + op_params.filter_width = params->filter_width; + op_params.padding_values.height = data.padding.height; + op_params.padding_values.width = data.padding.width; + op_params.float_activation_min = activation_min; + op_params.float_activation_max = activation_max; + reference_ops::AveragePool(op_params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); +#else + MicroPrintf("Type %s (%d) is not supported by ARC MLI Library.", + TfLiteTypeGetName(input->type), input->type); +#endif +} + +// Prepare MLI tensors and run Average or Max Pooling +TfLiteStatus EvalMli(TfLiteContext* context, const TfLitePoolParams* params, + const OpData& data, const TfLiteEvalTensor* input, + TfLiteEvalTensor* output, + const MliPoolingType pooling_type) { + mli_pool_cfg cfg_local = *data.cfg; + + ops::micro::MliTensorAttachBuffer(input, &data.mli_in); + ops::micro::MliTensorAttachBuffer(output, &data.mli_out); + + const int height_dimension = 1; + int in_slice_height = 0; + int out_slice_height = 0; + const int overlap = cfg_local.kernel_height - cfg_local.stride_height; + + // Tensors for data in fast (local) memory and config to copy data from + // external to local memory + mli_tensor in_local = *data.mli_in.MliTensor(); + mli_tensor out_local = *data.mli_out.MliTensor(); + + ops::micro::MliTensorInterface in_local_interface(&in_local); + ops::micro::MliTensorInterface out_local_interface(&out_local); + + mli_mov_cfg_t copy_config; + mli_mov_cfg_for_copy(©_config); + TF_LITE_ENSURE_STATUS(get_arc_scratch_buffer_for_pooling_tensors( + context, &in_local_interface, &out_local_interface)); + + bool in_is_local = + in_local_interface.Data() == data.mli_in.Data(); + bool out_is_local = + out_local_interface.Data() == data.mli_out.Data(); + + TF_LITE_ENSURE_STATUS(arc_scratch_buffer_calc_slice_size_io( + &in_local_interface, &out_local_interface, cfg_local.kernel_height, + cfg_local.stride_height, cfg_local.padding_top, cfg_local.padding_bottom, + &in_slice_height, &out_slice_height)); + + /* mli_in tensor contains batches of HWC tensors. so it is a 4 dimensional + tensor. because the mli kernel will process one HWC tensor at a time, the 4 + dimensional tensor needs to be sliced into nBatch 3 dimensional tensors. on + top of that there could be a need to also slice in the Height dimension. + for that the sliceHeight has been calculated. The tensor slicer is + configured that it will completely slice the nBatch dimension (0) and slice + the height dimension (1) in chunks of 'sliceHeight' */ + ops::micro::TensorSlicer in_slice(data.mli_in.MliTensor(), height_dimension, + in_slice_height, cfg_local.padding_top, + cfg_local.padding_bottom, overlap); + ops::micro::TensorSlicer out_slice(data.mli_out.MliTensor(), height_dimension, + out_slice_height); + + /* is_local indicates that the tensor is already in local memory, + so in that case the original tensor can be used, + and there is no need to copy it to the local tensor*/ + mli_tensor* in_ptr = in_is_local ? in_slice.Sub() : &in_local; + mli_tensor* out_ptr = out_is_local ? out_slice.Sub() : &out_local; + + while (!out_slice.Done()) { + if (!out_is_local) { + ops::micro::PrepareLocalTensor(out_slice.Sub(), &out_local); + ops::micro::PrepareLocalTensor(in_slice.Sub(), &in_local); + } + cfg_local.padding_top = in_slice.GetPaddingPre(); + cfg_local.padding_bottom = in_slice.GetPaddingPost(); + + mli_mov_tensor_sync(in_slice.Sub(), ©_config, in_ptr); + if (pooling_type == AveragePooling) { + TFLITE_DCHECK(data.p_mli_krn_avepool_hwc_sa8 != nullptr); + data.p_mli_krn_avepool_hwc_sa8(in_ptr, &cfg_local, out_ptr); + } else if (pooling_type == MaxPooling) { + TFLITE_DCHECK(data.p_mli_krn_maxpool_hwc_sa8 != nullptr); + data.p_mli_krn_maxpool_hwc_sa8(in_ptr, &cfg_local, out_ptr); + } + mli_mov_tensor_sync(out_ptr, ©_config, out_slice.Sub()); + + in_slice.Next(); + out_slice.Next(); + } + return kTfLiteOk; +} + +void AverageEvalQuantized(TfLiteContext* context, const TfLiteNode* node, + const TfLitePoolParams* params, const OpData& data, + const TfLiteEvalTensor* input, + TfLiteEvalTensor* output) { +#if !defined(TF_LITE_STRIP_REFERENCE_IMPL) + TFLITE_DCHECK(input->type == kTfLiteInt8); + + PoolParams op_params; + op_params.stride_height = params->stride_height; + op_params.stride_width = params->stride_width; + op_params.filter_height = params->filter_height; + op_params.filter_width = params->filter_width; + op_params.padding_values.height = data.padding.height; + op_params.padding_values.width = data.padding.width; + op_params.quantized_activation_min = data.activation_min; + op_params.quantized_activation_max = data.activation_max; + + reference_integer_ops::AveragePool( + op_params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); +#else + MicroPrintf("Type %s (%d) is not supported by ARC MLI Library.", + TfLiteTypeGetName(input->type), input->type); +#endif +} + +void MaxEvalFloat(TfLiteContext* context, TfLiteNode* node, + TfLitePoolParams* params, const OpData& data, + const TfLiteEvalTensor* input, TfLiteEvalTensor* output) { +#if !defined(TF_LITE_STRIP_REFERENCE_IMPL) + tflite::PoolParams op_params; + op_params.stride_height = params->stride_height; + op_params.stride_width = params->stride_width; + op_params.filter_height = params->filter_height; + op_params.filter_width = params->filter_width; + op_params.padding_values.height = data.padding.height; + op_params.padding_values.width = data.padding.width; + op_params.float_activation_min = data.activation_min_f32; + op_params.float_activation_max = data.activation_max_f32; + reference_ops::MaxPool(op_params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); +#else + MicroPrintf( + + "Node configuration or type %s (%d) is not supported by ARC MLI Library.", + TfLiteTypeGetName(input->type), input->type); +#endif +} + +void MaxEvalQuantized(TfLiteContext* context, TfLiteNode* node, + TfLitePoolParams* params, const OpData& data, + const TfLiteEvalTensor* input, TfLiteEvalTensor* output) { +#if !defined(TF_LITE_STRIP_REFERENCE_IMPL) + TFLITE_DCHECK(input->type == kTfLiteInt8); + tflite::PoolParams op_params; + op_params.stride_height = params->stride_height; + op_params.stride_width = params->stride_width; + op_params.filter_height = params->filter_height; + op_params.filter_width = params->filter_width; + op_params.padding_values.height = data.padding.height; + op_params.padding_values.width = data.padding.width; + op_params.quantized_activation_min = data.activation_min; + op_params.quantized_activation_max = data.activation_max; + + reference_integer_ops::MaxPool(op_params, + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); +#else + MicroPrintf( + + "Node configuration or type %s (%d) is not supported by ARC MLI Library.", + TfLiteTypeGetName(input->type), input->type); +#endif +} + +TfLiteStatus AverageEval(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->builtin_data != nullptr); + auto* params = reinterpret_cast(node->builtin_data); + + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + + TFLITE_DCHECK(node->user_data != nullptr); + const OpData& data = *(static_cast(node->user_data)); + + // Inputs and outputs share the same type, guaranteed by the converter. + switch (input->type) { + case kTfLiteFloat32: + AverageEvalFloat(context, node, params, data, input, output); + break; + case kTfLiteInt8: + if (data.is_mli_applicable) { + EvalMli(context, params, data, input, output, AveragePooling); + } else { + AverageEvalQuantized(context, node, params, data, input, output); + } + break; + default: + MicroPrintf("Input type %s is not currently supported", + TfLiteTypeGetName(input->type)); + return kTfLiteError; + } + return kTfLiteOk; +} + +TfLiteStatus MaxEval(TfLiteContext* context, TfLiteNode* node) { + auto* params = reinterpret_cast(node->builtin_data); + + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + + TFLITE_DCHECK(node->user_data != nullptr); + const OpData& data = *(static_cast(node->user_data)); + + switch (input->type) { + case kTfLiteFloat32: + MaxEvalFloat(context, node, params, data, input, output); + break; + case kTfLiteInt8: + if (data.is_mli_applicable) { + EvalMli(context, params, data, input, output, MaxPooling); + } else { + MaxEvalQuantized(context, node, params, data, input, output); + } + break; + default: + MicroPrintf("Type %s not currently supported.", + TfLiteTypeGetName(input->type)); + return kTfLiteError; + } + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_AVERAGE_POOL_2D() { + return tflite::micro::RegisterOp(Init, Prepare, AverageEval); +} + +TFLMRegistration Register_MAX_POOL_2D() { + return tflite::micro::RegisterOp(Init, Prepare, MaxEval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/arc_mli/scratch_buf_mgr.cc b/tensorflow/lite/micro/kernels/arc_mli/scratch_buf_mgr.cc new file mode 100644 index 0000000..ef489fa --- /dev/null +++ b/tensorflow/lite/micro/kernels/arc_mli/scratch_buf_mgr.cc @@ -0,0 +1,392 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/kernels/arc_mli/scratch_buf_mgr.h" + +#include + +#include + +#include "tensorflow/lite/micro/kernels/arc_mli/scratch_buffers.h" + +namespace tflite { +namespace ops { +namespace micro { + +#if (defined(__Xxy)) || (defined(__Xvdsp)) +static void get_arc_two_buffer_sizes(int request_size_1, int request_size_2, + int* grant_size_1, int* grant_size_2) { + int maxrequest = 0; + int secondrequest = 0; + int maxavailable = 0; + int secondavail = 0; + + // determine the largest requested buffer. + if (request_size_1 > request_size_2) { + maxrequest = request_size_1; + secondrequest = request_size_2; + } else { + maxrequest = request_size_2; + secondrequest = request_size_1; + } + + // find the two largest available buffers. + get_arc_scratch_buffer_two_max_sizes(&maxavailable, &secondavail); + + // in case two buffers are available, the largest buffer can go to the largest + // request. + if (secondavail > 0) { // this condition can be enhanced to prevent cases + // where the second buffer is so small that it is + // better to use one buffer and split it. + if (request_size_1 > request_size_2) { + *grant_size_1 = maxavailable; + *grant_size_2 = secondavail; + } else { + *grant_size_1 = secondavail; + *grant_size_2 = maxavailable; + } + } else { + // In case only one buffer is available, + // use only the max buffer, and split it. + *grant_size_1 = maxavailable / 2; + *grant_size_2 = maxavailable / 2; + } +} + +static TfLiteStatus get_arc_scratch_buffer_for_io_tensors( + TfLiteContext* context, MliTensorInterface* in, MliTensorInterface* out) { + int request_size_in = 0; + int request_size_out = 0; + int grant_size_in = 0; + int grant_size_out = 0; + if (!inside_arc_ccm(in->Data())) { + // In case the input tensor contains multiple batches, it has rank 4 + // because the mli kernel cannot operate on batches, we need to have the + // size of a single HWC tensor. that is why the start_rank is 1 in case of + // input rank 4 + int start_rank = *in->Rank() - 3; + request_size_in = mli_hlp_count_elem_num(in->MliTensor(), start_rank) * + mli_hlp_tensor_element_size(in->MliTensor()); + } + if (!inside_arc_ccm(out->Data())) { + // In case the input tensor contains multiple batches, it has rank 4 + // because the mli kernel cannot operate on batches, we need to have the + // size of a single batch. that is why the start_rank is 1 in case of input + // rank 4 + int start_rank = *out->Rank() - 3; + request_size_out = mli_hlp_count_elem_num(out->MliTensor(), start_rank) * + mli_hlp_tensor_element_size(out->MliTensor()); + } + + get_arc_two_buffer_sizes(request_size_in, request_size_out, &grant_size_in, + &grant_size_out); + if (!inside_arc_ccm(in->Data())) { + in->SetData( + static_cast(get_arc_scratch_buffer(grant_size_in)), + grant_size_in); + if (in->Data() == NULL) return kTfLiteError; + } + + if (!inside_arc_ccm(out->Data())) { + out->SetData( + static_cast(get_arc_scratch_buffer(grant_size_out)), + grant_size_out); + if (out->Data() == NULL) return kTfLiteError; + } + + return kTfLiteOk; +} +#endif + +TfLiteStatus get_arc_scratch_buffer_for_conv_tensors( + TfLiteContext* context, MliTensorInterface* in, MliTensorInterface* weights, + MliTensorInterface* bias, MliTensorInterface* out) { + TfLiteStatus ret_val = kTfLiteOk; +#if (defined(__Xxy)) || (defined(__Xvdsp)) + init_arc_scratch_buffers(); + + if (!inside_arc_ccm(bias->Data())) { + uint32_t bias_mem_requirements = + mli_hlp_count_elem_num(bias->MliTensor(), 0) * + mli_hlp_tensor_element_size(bias->MliTensor()); + bias->SetData( + static_cast(get_arc_scratch_buffer(bias_mem_requirements)), + bias_mem_requirements); + } + + if (bias->Data() == NULL) { + int max_bias_size = 0; + get_arc_scratch_buffer_max_size(&max_bias_size); + bias->SetData( + static_cast(get_arc_scratch_buffer(max_bias_size)), + max_bias_size); + if (max_bias_size == 0) ret_val = kTfLiteError; + } + if (bias->Data() == NULL) ret_val = kTfLiteError; + + if (!inside_arc_ccm(weights->Data())) { + int weights_size = mli_hlp_count_elem_num(weights->MliTensor(), 0) * + mli_hlp_tensor_element_size(weights->MliTensor()); + int max_weights_size = 0; + weights->SetData( + static_cast(get_arc_scratch_buffer(weights_size)), + weights_size); + if (weights->Data() == NULL) { + get_arc_scratch_buffer_max_size(&max_weights_size); + weights->SetData( + static_cast(get_arc_scratch_buffer(max_weights_size)), + max_weights_size); + if (max_weights_size == 0) ret_val = kTfLiteError; + } + if (weights->Data() == NULL) ret_val = kTfLiteError; + } + + if (ret_val == kTfLiteOk) { + ret_val = get_arc_scratch_buffer_for_io_tensors(context, in, out); + } +#endif + return ret_val; +} + +TfLiteStatus get_arc_scratch_buffer_for_fully_connect_tensors( + TfLiteContext* context, MliTensorInterface* in, MliTensorInterface* weights, + MliTensorInterface* bias, MliTensorInterface* out) { + TfLiteStatus ret_val = kTfLiteOk; + +#if (defined(__Xxy)) || (defined(__Xvdsp)) + init_arc_scratch_buffers(); + + if (!inside_arc_ccm(bias->Data())) { + int bias_mem_requirements = mli_hlp_count_elem_num(bias->MliTensor(), 0) * + mli_hlp_tensor_element_size(bias->MliTensor()); + bias->SetData( + static_cast(get_arc_scratch_buffer(bias_mem_requirements)), + bias_mem_requirements); + } + + if (bias->Data() == NULL) { + int max_bias_size = 0; + get_arc_scratch_buffer_max_size(&max_bias_size); + bias->SetData( + static_cast(get_arc_scratch_buffer(max_bias_size)), + max_bias_size); + if (max_bias_size == 0) ret_val = kTfLiteError; + } + if (bias->Data() == NULL) ret_val = kTfLiteError; + + if (!inside_arc_ccm(weights->Data())) { + int weights_size = mli_hlp_count_elem_num(weights->MliTensor(), 0) * + mli_hlp_tensor_element_size(weights->MliTensor()); + int max_weights_size = 0; + weights->SetData( + static_cast(get_arc_scratch_buffer(weights_size)), + weights_size); + if (weights->Data() == NULL) { + get_arc_scratch_buffer_max_size(&max_weights_size); + weights->SetData( + static_cast(get_arc_scratch_buffer(max_weights_size)), + max_weights_size); + if (max_weights_size == 0) ret_val = kTfLiteError; + } + if (weights->Data() == NULL) ret_val = kTfLiteError; + } + + /* strategy for FC kernels: + first allocate input, because this cannot be sliced. (in case of batch + processing, only a single input needs to be allocated) then weights & + bias because if fully loaded, they can be reused over batches. then + output. The number of output channels (for weights slicing) depends on + size of output and size of weights&bias */ + + if (!inside_arc_ccm(in->Data())) { + /* In case the input tensor contains multiple batches, + only count the size if the inner most dimension */ + int size_in = mli_hlp_count_elem_num(in->MliTensor(), *in->Rank() - 1) * + mli_hlp_tensor_element_size(in->MliTensor()); + in->SetData(static_cast(get_arc_scratch_buffer(size_in)), + size_in); + if (in->Data() == NULL) { + in->SetData(nullptr, 0); + ret_val = kTfLiteError; + } + } + if (!inside_arc_ccm(out->Data())) { + /* In case the input tensor contains multiple batches, + only count the size if the inner most dimension */ + int out_size = mli_hlp_count_elem_num(out->MliTensor(), *out->Rank() - 1) * + mli_hlp_tensor_element_size(out->MliTensor()); + int max_out_size = 0; + out->SetData(static_cast(get_arc_scratch_buffer(out_size)), + out_size); + if (out->Data() == NULL) { + get_arc_scratch_buffer_max_size(&max_out_size); + out->SetData( + static_cast(get_arc_scratch_buffer(max_out_size)), + max_out_size); + if (max_out_size == 0) ret_val = kTfLiteError; + } + if (out->Data() == NULL) ret_val = kTfLiteError; + } +#endif + return ret_val; +} + +TfLiteStatus get_arc_scratch_buffer_for_eltwise_tensors( + TfLiteContext* context, MliTensorInterface* in1, MliTensorInterface* in2, + MliTensorInterface* out) { + TfLiteStatus ret_val = kTfLiteOk; +#if (defined(__Xxy)) || (defined(__Xvdsp)) + init_arc_scratch_buffers(); + constexpr int tsr_num = 3; + int in1_size = mli_hlp_count_elem_num(in1->MliTensor(), 0) * + mli_hlp_tensor_element_size(in1->MliTensor()); + int in2_size = mli_hlp_count_elem_num(in2->MliTensor(), 0) * + mli_hlp_tensor_element_size(in2->MliTensor()); + int out_size = mli_hlp_count_elem_num(out->MliTensor(), 0) * + mli_hlp_tensor_element_size(out->MliTensor()); + int sizes[tsr_num] = {in1_size, in2_size, out_size}; + MliTensorInterface* in_tensors[tsr_num] = {in1, in2, out}; + for (int i = 0; i < tsr_num; ++i) { + if (!inside_arc_ccm(in_tensors[i]->Data())) { + auto* data_ptr = get_arc_scratch_buffer(sizes[i]); + if (data_ptr == nullptr) { + get_arc_scratch_buffer_max_size(&sizes[i]); + data_ptr = get_arc_scratch_buffer(sizes[i]); + } + if (data_ptr == nullptr || sizes[i] == 0) { + in_tensors[i]->SetData(nullptr, 0); + ret_val = kTfLiteError; + } else { + in_tensors[i]->SetData(static_cast(data_ptr), + sizes[i]); + } + } + } +#endif + return ret_val; +} + +TfLiteStatus arc_scratch_buffer_calc_slice_size_io( + const MliTensorInterface* in, const MliTensorInterface* out, + const int kernel_height, const int stride_height, const int padding_top, + const int padding_bot, int* in_slice_height, int* out_slice_height) { + const int height_dimension = 1; + const int in_height = in->Shape()[height_dimension]; + const int out_height = out->Shape()[height_dimension]; + const int line_size_in = + mli_hlp_count_elem_num(in->MliTensor(), height_dimension + 1) * + mli_hlp_tensor_element_size(in->MliTensor()); + const int line_size_out = + mli_hlp_count_elem_num(out->MliTensor(), height_dimension + 1) * + mli_hlp_tensor_element_size(out->MliTensor()); + int max_lines_in = 0; + int max_lines_out = 0; + int max_out_lines_for_input = 0; + bool fit = + (static_cast(*in->DataCapacity()) >= in_height * line_size_in) && + (static_cast(*out->DataCapacity()) >= out_height * line_size_out); + if (fit) { + // in case both tensors completely fit in the capacity, there is no need + // for slicing. As padding can affect effective input region, we also + // derive it from output height, and rely on a clipping logic which intend + // to reduce last smaller slice. I.e the only slice is a kind of "smaller + // last slice that need to be corrected" + *in_slice_height = std::max(in_height, out_height * stride_height); + *out_slice_height = out_height; + } else { + // First compute how many lines fit into the input tensor, and compute how + // many output lines can be computed with that. + max_lines_in = std::min( + in_height, static_cast(*in->DataCapacity()) / line_size_in); + if (max_lines_in >= in_height) { + max_out_lines_for_input = out_height; + } else if (2 * max_lines_in >= in_height) { + // in this case only two slices are needed, so both could benefit from + // padding. take the MIN to get the worst case. + max_out_lines_for_input = + (max_lines_in + std::min(padding_top, padding_bot) - kernel_height + + 1) / + stride_height; + } else { + max_out_lines_for_input = + (max_lines_in - kernel_height + 1) / stride_height; + } + // Then compute how many output lines fit into the output tensor. + max_lines_out = std::min( + out_height, static_cast(*out->DataCapacity()) / line_size_out); + // the smallest of the two determines the slice height for the output, and + // the derived sliceheight for the input. + *out_slice_height = std::min(max_out_lines_for_input, max_lines_out); + *in_slice_height = *out_slice_height * stride_height; + } + + if ((*in_slice_height > 0) && (*out_slice_height > 0)) { + return kTfLiteOk; + } else { + return kTfLiteError; + } +} + +TfLiteStatus arc_scratch_buffer_calc_slice_size_weights( + const MliTensorInterface* weights, const MliTensorInterface* bias, + const int weight_out_ch_dimension, int* slice_channels) { + const int channels = weights->Shape()[weight_out_ch_dimension]; + const int ch_size_w = + (mli_hlp_count_elem_num(weights->MliTensor(), 0) / channels) * + mli_hlp_tensor_element_size(weights->MliTensor()); + const int ch_size_b = + (mli_hlp_count_elem_num(bias->MliTensor(), 0) / channels) * + mli_hlp_tensor_element_size(bias->MliTensor()); + int max_ch_weigths = 0; + int max_ch_bias = 0; + + bool fit = + (static_cast(*weights->DataCapacity()) >= channels * ch_size_w) && + (static_cast(*bias->DataCapacity()) >= channels * ch_size_b); + if (fit) { + // in case both tensors completely fit in the capacity, there is no need + // for slicing + *slice_channels = channels; + } else { + // First compute how many channels fit into the weights tensor + max_ch_weigths = std::min( + channels, static_cast(*weights->DataCapacity()) / ch_size_w); + // Ten compute how many channels fit into the bias tensor. + max_ch_bias = + std::min(channels, static_cast(*bias->DataCapacity()) / ch_size_b); + // the smallest of the two determines the slice size + *slice_channels = std::min(max_ch_weigths, max_ch_bias); + } + + if (*slice_channels > 0) { + return kTfLiteOk; + } else { + return kTfLiteError; + } +} + +TfLiteStatus get_arc_scratch_buffer_for_pooling_tensors( + TfLiteContext* context, MliTensorInterface* in, MliTensorInterface* out) { +#if (defined(__Xxy)) || (defined(__Xvdsp)) + init_arc_scratch_buffers(); + return get_arc_scratch_buffer_for_io_tensors(context, in, out); +#else + return kTfLiteOk; +#endif +} + +} // namespace micro +} // namespace ops +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/arc_mli/scratch_buf_mgr.h b/tensorflow/lite/micro/kernels/arc_mli/scratch_buf_mgr.h new file mode 100644 index 0000000..be6dd8f --- /dev/null +++ b/tensorflow/lite/micro/kernels/arc_mli/scratch_buf_mgr.h @@ -0,0 +1,145 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_ARC_SCRATCH_BUF_MGR_H_ +#define TENSORFLOW_LITE_MICRO_ARC_SCRATCH_BUF_MGR_H_ + +#include "mli_api.h" // NOLINT +#include "mli_interface.h" +#include "tensorflow/lite/c/common.h" + +namespace tflite { +namespace ops { +namespace micro { + +/** + * @brief Function to allocate scratch buffers for the convolution tensors + * + * @detail This function will update the data pointers in the 4 tensors with + * pointers to scratch buffers in fast local memory. + * + * @param context [I] pointer to TfLite context (needed for error handling) + * @param in [IO] pointer to the input tensor + * @param weights [IO] pointer to the weights tensor + * @param bias [IO] pointer to the bias tensor + * @param output [IO] pointer to the output tensor + * + * @return Tf Lite status code + */ +TfLiteStatus get_arc_scratch_buffer_for_conv_tensors( + TfLiteContext* context, MliTensorInterface* in, MliTensorInterface* weights, + MliTensorInterface* bias, MliTensorInterface* out); + +/** + * @brief Function to allocate scratch buffers for pooling kernels with only + * input and output buffers + * + * @detail This function will update the data pointers in the 2 tensors with + * pointers to scratch buffers in fast local memory. + * + * @param context [I] pointer to TfLite context (needed for error handling) + * @param in [IO] pointer to the input tensor + * @param output [IO] pointer to the output tensor + * + * @return Tf Lite status code + */ +TfLiteStatus get_arc_scratch_buffer_for_pooling_tensors( + TfLiteContext* context, MliTensorInterface* in, MliTensorInterface* out); + +/** + * @brief Function to allocate scratch buffers for the fully connect tensors + * + * @detail This function will update the data pointers in the 4 tensors with + * pointers to scratch buffers in fast local memory. + * + * @param context [I] pointer to TfLite context (needed for error handling) + * @param in [IO] pointer to the input tensor + * @param weights [IO] pointer to the weights tensor + * @param bias [IO] pointer to the bias tensor + * @param output [IO] pointer to the output tensor + * + * @return Tf Lite status code + */ +TfLiteStatus get_arc_scratch_buffer_for_fully_connect_tensors( + TfLiteContext* context, MliTensorInterface* in, MliTensorInterface* weights, + MliTensorInterface* bias, MliTensorInterface* out); + +/** + * @brief Function to allocate scratch buffers for the eltwise function tensors + * + * @detail This function will update the data pointers in the 3 tensors with + * pointers to scratch buffers in fast local memory. + * + * @param context [I] pointer to TfLite context (needed for error handling) + * @param in1 [IO] pointer to the first input tensor + * @param in2 [IO] pointer to the second input tensor + * @param output [IO] pointer to the output tensor + * + * @return Tf Lite status code + */ +TfLiteStatus get_arc_scratch_buffer_for_eltwise_tensors( + TfLiteContext* context, MliTensorInterface* in1, MliTensorInterface* in2, + MliTensorInterface* out); + +/** + * @brief Function to calculate slice size for io tensors + * + * @detail This function will calculate the slice size in the height dimension + * for input and output tensors. it takes into account the kernel size and the + * padding. the function will look at the capacity filed in the in and out + * tensor to determine the available buffersize. + * + * @param in [I] pointer to the input tensor + * @param out [I] pointer to the output tensor + * @param kernelHeight [I] size of the kernel in height dimension + * @param strideHeight [I] input stride in height dimension + * @param padding_top [I] number of lines with zeros at the top + * @param padding_bot [I] number of lines with zeros at the bottom + * @param inSliceHeight [O] slice size in height dimension for the input + * tensor + * @param outSliceHeight [O] slice size in height dimension for the output + * tensor + * + * @return Tf Lite status code + */ +TfLiteStatus arc_scratch_buffer_calc_slice_size_io( + const MliTensorInterface* in, const MliTensorInterface* out, + const int kernelHeight, const int strideHeight, const int padding_top, + const int padding_bot, int* in_slice_height, int* out_slice_height); + +/** + * @brief Function to calculate slice size for weight slicing + * + * @detail This function will calculate the slice size in the output channel + * dimension for weight and bias tensors. the function will look at the capacity + * filed in the weights and bias tensor to determine the available buffersize. + * + * @param weights [I] pointer to the input tensor + * @param bias [I] pointer to the output tensor + * @param weightOutChDimension [I] dimension of the output channels in the + * weights tensor + * @param sliceChannels [O] slice size in output channel dimension + * + * @return Tf Lite status code + */ +TfLiteStatus arc_scratch_buffer_calc_slice_size_weights( + const MliTensorInterface* weights, const MliTensorInterface* bias, + const int weight_out_ch_dimension, int* slice_channels); + +} // namespace micro +} // namespace ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_ARC_SCRATCH_BUF_MGR_H_ diff --git a/tensorflow/lite/micro/kernels/arc_mli/scratch_buffers.cc b/tensorflow/lite/micro/kernels/arc_mli/scratch_buffers.cc new file mode 100644 index 0000000..bf87122 --- /dev/null +++ b/tensorflow/lite/micro/kernels/arc_mli/scratch_buffers.cc @@ -0,0 +1,192 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/kernels/arc_mli/scratch_buffers.h" + +#include + +namespace tflite { +namespace ops { +namespace micro { + +/* by default use all the XY memory, and half of the DCCM because DCCM is also + * used for the data section and the stack. the values can be overruled by + * adding a -D option to the makefile of the application + */ + +#ifdef __Xxy + +#ifndef SCRATCH_MEM_X_SIZE +#ifdef core_config_xy_size +#define SCRATCH_MEM_X_SIZE (core_config_xy_size) +#endif +#endif + +#ifndef SCRATCH_MEM_Y_SIZE +#ifdef core_config_xy_size +#define SCRATCH_MEM_Y_SIZE (core_config_xy_size) +#endif +#endif + +#ifndef SCRATCH_MEM_Z_SIZE +#ifdef core_config_dccm_size +#define SCRATCH_MEM_Z_SIZE ((core_config_dccm_size) / 2) +#endif +#endif + +#elif defined(__Xvdsp) + +#ifndef SCRATCH_MEM_VEC_SIZE +#ifdef core_config_vec_mem_size +#define SCRATCH_MEM_VEC_SIZE ((core_config_vec_mem_size * 3) / 4) +#endif +#endif + +#else + +#define SCRATCH_MEM_SIZE (65536) + +#endif + +namespace { + +#ifdef __Xxy + +#pragma Bss(".Xdata") +static int8_t scratch_mem_x[SCRATCH_MEM_X_SIZE]; +#pragma Bss() + +#pragma Bss(".Ydata") +static int8_t scratch_mem_y[SCRATCH_MEM_Y_SIZE]; +#pragma Bss() + +#pragma Bss(".Zdata") +static int8_t scratch_mem_z[SCRATCH_MEM_Z_SIZE]; +#pragma Bss() + +#elif defined(__Xvdsp) + +#pragma Bss(".vecmem_data") +static int8_t scratch_mem_vec_1[SCRATCH_MEM_VEC_SIZE / 4]; +static int8_t scratch_mem_vec_2[SCRATCH_MEM_VEC_SIZE / 4]; +static int8_t scratch_mem_vec_3[SCRATCH_MEM_VEC_SIZE / 2]; +#pragma Bss() + +#else + +static int8_t scratch_mem_stack[SCRATCH_MEM_SIZE]; + +#endif +} // namespace + +#ifdef __Xxy + +static int8_t* scratch_mem[] = {scratch_mem_x, scratch_mem_y, scratch_mem_z}; +static uint32_t scratch_sizes[] = {SCRATCH_MEM_X_SIZE, SCRATCH_MEM_Y_SIZE, + SCRATCH_MEM_Z_SIZE}; + +#elif defined(__Xvdsp) + +static int8_t* scratch_mem[] = {scratch_mem_vec_1, scratch_mem_vec_2, + scratch_mem_vec_3}; +static uint32_t scratch_sizes[] = {SCRATCH_MEM_VEC_SIZE / 4, + SCRATCH_MEM_VEC_SIZE / 4, + SCRATCH_MEM_VEC_SIZE / 2}; + +#else + +static int8_t* scratch_mem[] = {scratch_mem_stack}; +static uint32_t scratch_sizes[] = {SCRATCH_MEM_SIZE}; + +#endif + +void* get_arc_scratch_buffer(int size) { + // Function to asign fast memory from one of 3 scratch buffers. + // Best Fit strategy - memory is allocated from that memory bank that leaves + // the least unused memory. + void* buf = NULL; + int best_mem_idx = -1; + int best_mem_delta = INT_MAX; + const int num_mem = sizeof(scratch_mem) / sizeof(scratch_mem[0]); + // find a local memory that fits the data size. + for (int mem_idx = 0; mem_idx < num_mem; ++mem_idx) { + // Best Fit + if ((size <= static_cast(scratch_sizes[mem_idx])) && + (static_cast(scratch_sizes[mem_idx]) - size < best_mem_delta)) { + best_mem_idx = mem_idx; + best_mem_delta = scratch_sizes[mem_idx] - size; + } + } + if (best_mem_idx >= 0) { + buf = scratch_mem[best_mem_idx]; + scratch_mem[best_mem_idx] += size; + scratch_sizes[best_mem_idx] -= size; + } + return buf; +} + +void get_arc_scratch_buffer_max_size(int* size) { + int maxavailable = 0; + const int num_mem = sizeof(scratch_mem) / sizeof(scratch_mem[0]); + // find the largest available buffer. + for (int i = 0; i < num_mem; i++) { + if (static_cast(scratch_sizes[i]) > maxavailable) { + maxavailable = scratch_sizes[i]; + } + } + *size = maxavailable; +} + +void get_arc_scratch_buffer_two_max_sizes(int* size1, int* size2) { + int maxavailable = 0; + int secondavail = 0; + const int num_mem = sizeof(scratch_mem) / sizeof(scratch_mem[0]); + // find the two largest available buffers. + for (int i = 0; i < num_mem; i++) { + if (static_cast(scratch_sizes[i]) > maxavailable) { + secondavail = maxavailable; + maxavailable = scratch_sizes[i]; + } else if (static_cast(scratch_sizes[i]) > secondavail) { + secondavail = scratch_sizes[i]; + } + } + *size1 = maxavailable; + *size2 = secondavail; +} + +void init_arc_scratch_buffers(void) { +#ifdef __Xxy + scratch_mem[0] = scratch_mem_x; + scratch_mem[1] = scratch_mem_y; + scratch_mem[2] = scratch_mem_z; + scratch_sizes[0] = SCRATCH_MEM_X_SIZE; + scratch_sizes[1] = SCRATCH_MEM_Y_SIZE; + scratch_sizes[2] = SCRATCH_MEM_Z_SIZE; +#elif defined(__Xvdsp) + scratch_mem[0] = scratch_mem_vec_1; + scratch_mem[1] = scratch_mem_vec_2; + scratch_mem[2] = scratch_mem_vec_3; + scratch_sizes[0] = SCRATCH_MEM_VEC_SIZE / 4; + scratch_sizes[1] = SCRATCH_MEM_VEC_SIZE / 4; + scratch_sizes[2] = SCRATCH_MEM_VEC_SIZE / 2; +#else + scratch_mem[0] = scratch_mem_stack; + scratch_sizes[0] = SCRATCH_MEM_SIZE; +#endif +} + +} // namespace micro +} // namespace ops +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/arc_mli/scratch_buffers.h b/tensorflow/lite/micro/kernels/arc_mli/scratch_buffers.h new file mode 100644 index 0000000..645781b --- /dev/null +++ b/tensorflow/lite/micro/kernels/arc_mli/scratch_buffers.h @@ -0,0 +1,78 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_ARC_SCRATCH_BUFFERS_H_ +#define TENSORFLOW_LITE_MICRO_ARC_SCRATCH_BUFFERS_H_ + +#include "mli_api.h" // NOLINT +#include "tensorflow/lite/c/common.h" + +namespace tflite { +namespace ops { +namespace micro { + +void init_arc_scratch_buffers(void); +void* get_arc_scratch_buffer(int size); // Function to assign fast memory + // from one of 3 scratch buffers. + +void get_arc_scratch_buffer_max_size(int* size); +void get_arc_scratch_buffer_two_max_sizes(int* size1, int* size2); + +static inline bool inside_arc_dccm(void* p) { +#if core_config_dccm_present + return ((unsigned)p >= core_config_dccm_base) && + ((unsigned)p < core_config_dccm_base + core_config_dccm_size); +#else + return false; +#endif +} + +static inline bool inside_arc_xccm(void* p) { +#if core_config_xy + return ((unsigned)p >= core_config_xy_x_base) && + ((unsigned)p < core_config_xy_x_base + core_config_xy_size); +#else + return false; +#endif +} + +static inline bool inside_arc_yccm(void* p) { +#if core_config_xy_size + return ((unsigned)p >= core_config_xy_y_base) && + ((unsigned)p < core_config_xy_y_base + core_config_xy_size); +#else + return false; +#endif +} + +static inline bool inside_arc_vccm(void* p) { +#if core_config_vec_mem_size + return ((unsigned)p >= core_config_vec_mem_base) && + ((unsigned)p < core_config_vec_mem_base + core_config_vec_mem_size); +#else + return false; +#endif +} + +static inline bool inside_arc_ccm(void* p) { + return inside_arc_dccm(p) || inside_arc_xccm(p) || inside_arc_yccm(p) || + inside_arc_vccm(p); +} + +} // namespace micro +} // namespace ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_ARC_SCRATCH_BUFFERS_H_ diff --git a/tensorflow/lite/micro/kernels/arg_min_max.cc b/tensorflow/lite/micro/kernels/arg_min_max.cc new file mode 100644 index 0000000..2ba058c --- /dev/null +++ b/tensorflow/lite/micro/kernels/arg_min_max.cc @@ -0,0 +1,118 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/reference/arg_min_max.h" + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/reference/comparisons.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { + +namespace { + +constexpr int kInputTensor = 0; +constexpr int kAxis = 1; +constexpr int kOutputTensor = 0; + +template +inline void ArgMinMaxHelper(const RuntimeShape& input1_shape, + const T1* input1_data, const T3* input2_data, + const RuntimeShape& output_shape, T2* output_data, + bool is_arg_max) { + // Use Greater/Less from comparisons.h (formerly from kernels/micro_utils.h + // which was deprecated). Same as gtl::Greater but used here to reduce + // dependencies and binary size for micro environment. + if (is_arg_max) { + reference_ops::ArgMinMax(input1_shape, input1_data, input2_data, + output_shape, output_data, + reference_ops::GreaterFn); + } else { + reference_ops::ArgMinMax(input1_shape, input1_data, input2_data, + output_shape, output_data, + reference_ops::LessFn); + } +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node, bool is_arg_max) { + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + const TfLiteEvalTensor* axis = + tflite::micro::GetEvalInput(context, node, kAxis); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + +#define TF_LITE_ARG_MIN_MAX(data_type, axis_type, output_type) \ + ArgMinMaxHelper(tflite::micro::GetTensorShape(input), \ + tflite::micro::GetTensorData(input), \ + tflite::micro::GetTensorData(axis), \ + tflite::micro::GetTensorShape(output), \ + tflite::micro::GetTensorData(output), \ + is_arg_max) + if (axis->type == kTfLiteInt32) { + if (output->type == kTfLiteInt32) { + switch (input->type) { + case kTfLiteFloat32: + TF_LITE_ARG_MIN_MAX(float, int32_t, int32_t); + break; + case kTfLiteInt8: + TF_LITE_ARG_MIN_MAX(int8_t, int32_t, int32_t); + break; + default: + MicroPrintf( + "Only float32, uint8_t and int8_t are " + "supported currently, got %s.", + TfLiteTypeGetName(input->type)); + return kTfLiteError; + } + } else { + MicroPrintf("Only int32_t are supported currently, got %s.", + TfLiteTypeGetName(output->type)); + return kTfLiteError; + } + } else { + MicroPrintf("Only int32_t are supported currently, got %s.", + TfLiteTypeGetName(axis->type)); + return kTfLiteError; + } + +#undef TF_LITE_ARG_MIN_MAX + + return kTfLiteOk; +} + +TfLiteStatus ArgMinEval(TfLiteContext* context, TfLiteNode* node) { + return Eval(context, node, false); +} + +TfLiteStatus ArgMaxEval(TfLiteContext* context, TfLiteNode* node) { + return Eval(context, node, true); +} + +} // namespace + +TFLMRegistration Register_ARG_MAX() { + return tflite::micro::RegisterOp(nullptr, nullptr, ArgMaxEval); +} + +TFLMRegistration Register_ARG_MIN() { + return tflite::micro::RegisterOp(nullptr, nullptr, ArgMinEval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/arg_min_max_test.cc b/tensorflow/lite/micro/kernels/arg_min_max_test.cc new file mode 100644 index 0000000..ab06fec --- /dev/null +++ b/tensorflow/lite/micro/kernels/arg_min_max_test.cc @@ -0,0 +1,226 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +void ValidateArgMinMaxGoldens(TfLiteTensor* tensors, int tensors_size, + const int32_t* golden, int32_t* output, + int output_size, bool using_min) { + int inputs_array_data[] = {2, 0, 1}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 2}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = + using_min ? Register_ARG_MIN() : Register_ARG_MAX(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, + /*builtin_data=*/nullptr); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + for (int i = 0; i < output_size; ++i) { + TF_LITE_MICRO_EXPECT_EQ(golden[i], output[i]); + } +} + +void TestArgMinMaxFloat(int* input_dims_data, const float* input_values, + int* axis_dims_data, const int32_t* axis_values, + int* output_dims_data, int32_t* output, + const int32_t* goldens, bool using_min) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* axis_dims = IntArrayFromInts(axis_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_dims_count = ElementCount(*output_dims); + + constexpr int inputs_size = 2; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(input_values, input_dims), + CreateTensor(axis_values, axis_dims), + CreateTensor(output, output_dims), + }; + + ValidateArgMinMaxGoldens(tensors, tensors_size, goldens, output, + output_dims_count, using_min); +} + +template +void TestArgMinMaxQuantized(int* input_dims_data, const float* input_values, + T* input_quantized, float input_scale, + int input_zero_point, int* axis_dims_data, + const int32_t* axis_values, int* output_dims_data, + int32_t* output, const int32_t* goldens, + bool using_min) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* axis_dims = IntArrayFromInts(axis_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_dims_count = ElementCount(*output_dims); + + constexpr int inputs_size = 2; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateQuantizedTensor(input_values, input_quantized, input_dims, + input_scale, input_zero_point), + CreateTensor(axis_values, axis_dims), + CreateTensor(output, output_dims), + }; + + ValidateArgMinMaxGoldens(tensors, tensors_size, goldens, output, + output_dims_count, using_min); +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(GetMaxArgFloat) { + int32_t output_data[1]; + int input_dims[] = {4, 1, 1, 1, 4}; + const float input_values[] = {0.1, 0.9, 0.7, 0.3}; + int axis_dims[] = {3, 1, 1, 1}; + const int32_t axis_values[] = {3}; + int output_dims[] = {3, 1, 1, 1}; + const int32_t goldens[] = {1}; + + tflite::testing::TestArgMinMaxFloat(input_dims, input_values, axis_dims, + axis_values, output_dims, output_data, + goldens, false); +} + +TF_LITE_MICRO_TEST(GetMinArgFloat) { + int32_t output_data[1]; + int input_dims[] = {4, 1, 1, 1, 4}; + const float input_values[] = {0.1, 0.9, 0.7, 0.3}; + int axis_dims[] = {3, 1, 1, 1}; + const int32_t axis_values[] = {3}; + int output_dims[] = {3, 1, 1, 1}; + const int32_t goldens[] = {0}; + + tflite::testing::TestArgMinMaxFloat(input_dims, input_values, axis_dims, + axis_values, output_dims, output_data, + goldens, true); +} + +TF_LITE_MICRO_TEST(GetMaxArgInt8) { + int32_t output_data[1]; + const int input_size = 4; + int input_dims[] = {4, 1, 1, 1, input_size}; + const float input_values[] = {1, 9, 7, 3}; + int axis_dims[] = {3, 1, 1, 1}; + const int32_t axis_values[] = {3}; + int output_dims[] = {3, 1, 1, 1}; + const int32_t goldens[] = {1}; + + float input_scale = 0.5; + int input_zero_point = -9; + int8_t input_quantized[input_size]; + + tflite::testing::TestArgMinMaxQuantized( + input_dims, input_values, input_quantized, input_scale, input_zero_point, + axis_dims, axis_values, output_dims, output_data, goldens, false); +} + +TF_LITE_MICRO_TEST(GetMinArgInt8) { + int32_t output_data[1]; + const int input_size = 4; + int input_dims[] = {4, 1, 1, 1, input_size}; + const float input_values[] = {1, 9, 7, 3}; + int axis_dims[] = {3, 1, 1, 1}; + const int32_t axis_values[] = {3}; + int output_dims[] = {3, 1, 1, 1}; + const int32_t goldens[] = {0}; + + float input_scale = 0.5; + int input_zero_point = -9; + int8_t input_quantized[input_size]; + + tflite::testing::TestArgMinMaxQuantized( + input_dims, input_values, input_quantized, input_scale, input_zero_point, + axis_dims, axis_values, output_dims, output_data, goldens, true); +} + +TF_LITE_MICRO_TEST(GetMaxArgMulDimensions) { + int32_t output_data[2]; + const int input_size = 8; + int input_dims[] = {4, 1, 1, 2, 4}; + const float input_values[] = {1, 2, 7, 8, 1, 9, 7, 3}; + int axis_dims[] = {3, 1, 1, 1}; + const int32_t axis_values[] = {3}; + int output_dims[] = {3, 1, 1, 2}; + const int32_t goldens[] = {3, 1}; + + float input_scale = 0.5; + int input_zero_point = -9; + int8_t input_quantized[input_size]; + + tflite::testing::TestArgMinMaxQuantized( + input_dims, input_values, input_quantized, input_scale, input_zero_point, + axis_dims, axis_values, output_dims, output_data, goldens, false); +} + +TF_LITE_MICRO_TEST(GetMinArgMulDimensions) { + int32_t output_data[2]; + const int input_size = 8; + int input_dims[] = {4, 1, 1, 2, 4}; + const float input_values[] = {1, 2, 7, 8, 1, 9, 7, 3}; + int axis_dims[] = {3, 1, 1, 1}; + const int32_t axis_values[] = {3}; + int output_dims[] = {3, 1, 1, 2}; + const int32_t goldens[] = {0, 0}; + + float input_scale = 0.5; + int input_zero_point = -9; + int8_t input_quantized[input_size]; + + tflite::testing::TestArgMinMaxQuantized( + input_dims, input_values, input_quantized, input_scale, input_zero_point, + axis_dims, axis_values, output_dims, output_data, goldens, true); +} + +TF_LITE_MICRO_TEST(GetMaxArgNegativeAxis) { + const int input_size = 8; + const int output_size = 4; + int input_dims[] = {4, 1, 1, 2, 4}; + const float input_values[] = {1, 2, 7, 8, 1, 9, 7, 3}; + int axis_dims[] = {3, 1, 1, 1}; + const int32_t axis_values[] = {-2}; + int output_dims[] = {3, 1, 1, 4}; + const int32_t goldens[] = {0, 1, 0, 0}; + + float input_scale = 0.5; + int input_zero_point = -9; + int32_t output_data[output_size]; + int8_t input_quantized[input_size]; + + tflite::testing::TestArgMinMaxQuantized( + input_dims, input_values, input_quantized, input_scale, input_zero_point, + axis_dims, axis_values, output_dims, output_data, goldens, false); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/assign_variable.cc b/tensorflow/lite/micro/kernels/assign_variable.cc new file mode 100644 index 0000000..bd99bd1 --- /dev/null +++ b/tensorflow/lite/micro/kernels/assign_variable.cc @@ -0,0 +1,107 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/memory_helpers.h" +#include "tensorflow/lite/micro/micro_graph.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_resource_variable.h" +#include "tensorflow/lite/schema/schema_generated.h" + +namespace tflite { + +namespace { + +constexpr int kInputVariableId = 0; +constexpr int kInputValue = 1; + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + TF_LITE_ENSURE_EQ(context, NumInputs(node), 2); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 0); + + // This must be a TfLiteEvalTensor despite this being in Prepare, because + // CreateTensor allocates a temp tensor from the flatbuffer, which does not + // contain the correct ID generated within the VAR_HANDLE op. EvalTensors are + // all allocated during StartModelAllocation which happens before + // init/prepare, and VAR_HANDLE Prepare() references its own op_data in the + // TfLiteEvalTensor, so reading the ID here is valid. + const TfLiteEvalTensor* input_resource_id_tensor = + tflite::micro::GetEvalInput(context, node, kInputVariableId); + TFLITE_DCHECK(input_resource_id_tensor != nullptr); + TF_LITE_ENSURE(context, (input_resource_id_tensor->type == kTfLiteResource || + input_resource_id_tensor->type == kTfLiteInt32)); + TF_LITE_ENSURE_EQ(context, NumElements(input_resource_id_tensor->dims), 1); + + tflite::MicroContext* micro_context = tflite::GetMicroContext(context); + TfLiteTensor* input_value = + micro_context->AllocateTempInputTensor(node, kInputValue); + TFLITE_DCHECK(input_value != nullptr); + + MicroGraph& graph_info = micro_context->graph(); + + MicroResourceVariables* resources = graph_info.GetResourceVariables(); + // If the data field of this tensor is nullptr, we assume that this is a case + // of using resource variables in another subgraph, and the resource_id + // will be valid during Eval time. In case it wasn't valid, this will + // still be caught during Invoke. More info in b/277231654. + if (input_resource_id_tensor->data.i32 != nullptr) { + TF_LITE_ENSURE_OK(context, + resources->Allocate(input_resource_id_tensor->data.i32[0], + context, input_value)); + } + + micro_context->DeallocateTempTfLiteTensor(input_value); + return kTfLiteOk; +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input_id = + tflite::micro::GetEvalInput(context, node, kInputVariableId); + TFLITE_DCHECK(input_id != nullptr); + + const TfLiteEvalTensor* input_value = + tflite::micro::GetEvalInput(context, node, kInputValue); + TFLITE_DCHECK(input_value != nullptr); + + tflite::MicroContext* micro_context = tflite::GetMicroContext(context); + MicroGraph& graph_info = micro_context->graph(); + + MicroResourceVariables* resources = graph_info.GetResourceVariables(); + if (resources == nullptr) { + MicroPrintf( + "ASSIGN_VARIABLE requires resource variables. Please create " + "ResourceVariables and pass it to the interpreter."); + return kTfLiteError; + } + TF_LITE_ENSURE_OK(context, + resources->Assign(input_id->data.i32[0], input_value)); + return kTfLiteOk; +} + +} // namespace. + +TFLMRegistration Register_ASSIGN_VARIABLE() { + return tflite::micro::RegisterOp(nullptr, Prepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/batch_to_space_nd.cc b/tensorflow/lite/micro/kernels/batch_to_space_nd.cc new file mode 100644 index 0000000..090a040 --- /dev/null +++ b/tensorflow/lite/micro/kernels/batch_to_space_nd.cc @@ -0,0 +1,112 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/reference/batch_to_space_nd.h" + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_utils.h" + +namespace tflite { + +namespace { + +constexpr int kInputTensor = 0; +constexpr int kBlockShapeTensor = 1; +constexpr int kCropsTensor = 2; +constexpr int kOutputTensor = 0; + +// Currently, only 3D NHC and 4D NHWC input/output op_context are supported. +// In case of 3D input, it will be extended to 3D NHWC by adding W=1. +// The 4D array need to have exactly 2 spatial dimensions. +// TODO(b/149952582): Support arbitrary dimension in SpaceToBatchND. +const int kInputOutputMinDimensionNum = 3; +const int kInputOutputMaxDimensionNum = 4; + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + TF_LITE_ENSURE_EQ(context, NumInputs(node), 3); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, input != nullptr && output != nullptr); + + TF_LITE_ENSURE(context, NumDimensions(input) >= kInputOutputMinDimensionNum); + TF_LITE_ENSURE(context, NumDimensions(output) >= kInputOutputMinDimensionNum); + TF_LITE_ENSURE(context, NumDimensions(input) <= kInputOutputMaxDimensionNum); + TF_LITE_ENSURE(context, NumDimensions(output) <= kInputOutputMaxDimensionNum); + TF_LITE_ENSURE_TYPES_EQ(context, input->type, output->type); + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(output); + + return kTfLiteOk; +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + const TfLiteEvalTensor* block_shape = + tflite::micro::GetEvalInput(context, node, kBlockShapeTensor); + const TfLiteEvalTensor* crops = + tflite::micro::GetEvalInput(context, node, kCropsTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + + switch (input->type) { // Already know in/out types are same. + case kTfLiteFloat32: + reference_ops::BatchToSpaceND( + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(block_shape), + tflite::micro::GetTensorData(block_shape), + tflite::micro::GetTensorShape(crops), + tflite::micro::GetTensorData(crops), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + case kTfLiteInt8: + reference_ops::BatchToSpaceND( + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(block_shape), + tflite::micro::GetTensorData(block_shape), + tflite::micro::GetTensorShape(crops), + tflite::micro::GetTensorData(crops), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + default: + MicroPrintf("Type %s (%d) not supported.", TfLiteTypeGetName(input->type), + input->type); + return kTfLiteError; + } + return kTfLiteOk; +} + +} // namespace. + +TFLMRegistration Register_BATCH_TO_SPACE_ND() { + return tflite::micro::RegisterOp(nullptr, Prepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/batch_to_space_nd_test.cc b/tensorflow/lite/micro/kernels/batch_to_space_nd_test.cc new file mode 100644 index 0000000..455c325 --- /dev/null +++ b/tensorflow/lite/micro/kernels/batch_to_space_nd_test.cc @@ -0,0 +1,154 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +constexpr int kBasicInputOutputSize = 16; +int basic_input_dims[] = {4, 4, 2, 2, 1}; +const float basic_input[kBasicInputOutputSize] = { + 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}; +int basic_block_shape_dims[] = {1, 2}; +const int32_t basic_block_shape[] = {2, 2}; +int basic_crops_dims[] = {1, 4}; +const int32_t basic_crops[] = {0, 0, 0, 0}; +int basic_output_dims[] = {4, 1, 4, 4, 1}; +const float basic_golden[kBasicInputOutputSize] = {1, 5, 2, 6, 9, 13, 10, 14, + 3, 7, 4, 8, 11, 15, 12, 16}; + +template +TfLiteStatus ValidateBatchToSpaceNdGoldens(TfLiteTensor* tensors, + int tensors_size, const T* golden, + T* output, int output_size) { + int inputs_array_data[] = {3, 0, 1, 2}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 3}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = Register_BATCH_TO_SPACE_ND(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, nullptr); + + TF_LITE_ENSURE_STATUS(runner.InitAndPrepare()); + TF_LITE_ENSURE_STATUS(runner.Invoke()); + + for (int i = 0; i < output_size; ++i) { + // TODO(b/158102673): workaround for not having fatal test assertions. + TF_LITE_MICRO_EXPECT_EQ(golden[i], output[i]); + if (golden[i] != output[i]) { + return kTfLiteError; + } + } + return kTfLiteOk; +} + +TfLiteStatus TestBatchToSpaceNdFloat( + int* input_dims_data, const float* input_data, int* block_shape_dims_data, + const int32_t* block_shape_data, int* crops_dims_data, + const int32_t* crops_data, int* output_dims_data, const float* golden, + float* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* block_shape_dims = IntArrayFromInts(block_shape_dims_data); + TfLiteIntArray* crops_dims = IntArrayFromInts(crops_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + + constexpr int inputs_size = 3; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(input_data, input_dims), + CreateTensor(block_shape_data, block_shape_dims), + CreateTensor(crops_data, crops_dims), + CreateTensor(output_data, output_dims), + }; + + return ValidateBatchToSpaceNdGoldens(tensors, tensors_size, golden, + output_data, ElementCount(*output_dims)); +} + +template +TfLiteStatus TestBatchToSpaceNdQuantized( + int* input_dims_data, const float* input_data, T* input_quantized, + float input_scale, int input_zero_point, int* block_shape_dims_data, + const int32_t* block_shape_data, int* crops_dims_data, + const int32_t* crops_data, int* output_dims_data, const float* golden, + T* golden_quantized, float output_scale, int output_zero_point, + T* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* block_shape_dims = IntArrayFromInts(block_shape_dims_data); + TfLiteIntArray* crops_dims = IntArrayFromInts(crops_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + + constexpr int inputs_size = 3; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + tflite::testing::CreateQuantizedTensor(input_data, input_quantized, + input_dims, input_scale, + input_zero_point), + tflite::testing::CreateTensor(block_shape_data, block_shape_dims), + tflite::testing::CreateTensor(crops_data, crops_dims), + tflite::testing::CreateQuantizedTensor(output_data, output_dims, + output_scale, output_zero_point), + }; + tflite::Quantize(golden, golden_quantized, ElementCount(*output_dims), + output_scale, output_zero_point); + + return ValidateBatchToSpaceNdGoldens(tensors, tensors_size, golden_quantized, + output_data, ElementCount(*output_dims)); +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(BatchToSpaceBasicFloat) { + float output[tflite::testing::kBasicInputOutputSize]; + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + tflite::testing::TestBatchToSpaceNdFloat( + tflite::testing::basic_input_dims, tflite::testing::basic_input, + tflite::testing::basic_block_shape_dims, + tflite::testing::basic_block_shape, tflite::testing::basic_crops_dims, + tflite::testing::basic_crops, tflite::testing::basic_output_dims, + tflite::testing::basic_golden, output)); +} + +TF_LITE_MICRO_TEST(BatchToSpaceBasicInt8) { + int8_t output[tflite::testing::kBasicInputOutputSize]; + int8_t input_quantized[tflite::testing::kBasicInputOutputSize]; + int8_t golden_quantized[tflite::testing::kBasicInputOutputSize]; + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + tflite::testing::TestBatchToSpaceNdQuantized( + tflite::testing::basic_input_dims, tflite::testing::basic_input, + input_quantized, 1.0f, 0, tflite::testing::basic_block_shape_dims, + tflite::testing::basic_block_shape, tflite::testing::basic_crops_dims, + tflite::testing::basic_crops, tflite::testing::basic_output_dims, + tflite::testing::basic_golden, golden_quantized, 1.0f, 0, output)); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/broadcast_args.cc b/tensorflow/lite/micro/kernels/broadcast_args.cc new file mode 100644 index 0000000..283410a --- /dev/null +++ b/tensorflow/lite/micro/kernels/broadcast_args.cc @@ -0,0 +1,91 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/kernels/internal/reference/broadcast_args.h" + +#include + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_context.h" + +namespace tflite { +namespace { +constexpr int kShape1Tensor = 0; +constexpr int kShape2Tensor = 1; +constexpr int kOutputTensor = 0; + +TfLiteStatus BroadcastArgsPrepare(TfLiteContext* context, TfLiteNode* node) { + TF_LITE_ENSURE(context, NumInputs(node) == 2); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + + MicroContext* micro_context = GetMicroContext(context); + TfLiteTensor* shape1 = + micro_context->AllocateTempInputTensor(node, kShape1Tensor); + TfLiteTensor* shape2 = + micro_context->AllocateTempInputTensor(node, kShape2Tensor); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + + TF_LITE_ENSURE(context, + shape1->type == kTfLiteInt32 || shape1->type == kTfLiteInt64); + TF_LITE_ENSURE_EQ(context, shape1->type, shape2->type); + TF_LITE_ENSURE_EQ(context, shape1->type, output->type); + + // Ensures the shapes are 1D tensor. + TF_LITE_ENSURE_EQ(context, NumDimensions(shape1), 1); + TF_LITE_ENSURE_EQ(context, NumDimensions(shape2), 1); + + // Ensure the shape of the output tensor is compatible + TF_LITE_ENSURE_EQ(context, NumDimensions(output), 1); + + micro_context->DeallocateTempTfLiteTensor(shape1); + micro_context->DeallocateTempTfLiteTensor(shape2); + micro_context->DeallocateTempTfLiteTensor(output); + + return kTfLiteOk; +} + +TfLiteStatus BroadcastArgsEval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* shape1 = + micro::GetEvalInput(context, node, kShape1Tensor); + const TfLiteEvalTensor* shape2 = + micro::GetEvalInput(context, node, kShape2Tensor); + TfLiteEvalTensor* output = micro::GetEvalOutput(context, node, kOutputTensor); + + if (output->type == kTfLiteInt32) { + reference_ops::BroadcastArgs( + micro::GetTensorShape(shape1), micro::GetTensorData(shape1), + micro::GetTensorShape(shape2), micro::GetTensorData(shape2), + micro::GetTensorShape(output), micro::GetTensorData(output)); + } else { + reference_ops::BroadcastArgs( + micro::GetTensorShape(shape1), micro::GetTensorData(shape1), + micro::GetTensorShape(shape2), micro::GetTensorData(shape2), + micro::GetTensorShape(output), micro::GetTensorData(output)); + } + + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_BROADCAST_ARGS() { + return tflite::micro::RegisterOp(nullptr, BroadcastArgsPrepare, + BroadcastArgsEval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/broadcast_args_test.cc b/tensorflow/lite/micro/kernels/broadcast_args_test.cc new file mode 100644 index 0000000..ff5f0bb --- /dev/null +++ b/tensorflow/lite/micro/kernels/broadcast_args_test.cc @@ -0,0 +1,140 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/micro_utils.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace { +using ::tflite::testing::CreateTensor; +using ::tflite::testing::IntArrayFromInts; + +// The layout of tensors is fixed. +constexpr int kShape1Index = 0; +constexpr int kShape2Index = 1; +constexpr int kOutputIndex = 2; +constexpr int kInputsTensor[] = {2, kShape1Index, kShape2Index}; +constexpr int kOutputsTensor[] = {1, kOutputIndex}; + +// This function is NOT thread safe. +template +tflite::micro::KernelRunner CreateBroadcastArgsTestRunner( + int* input1_shape, DimsType* input1_data, int* input2_shape, + DimsType* input2_data, int* output_shape, DimsType* output_data) { + // Some targets do not support dynamic memory (i.e., no malloc or new), thus, + // the test need to place non-transient memories in static variables. This is + // safe because tests are guaranteed to run serially. + // Both below structures are trivially destructible. + static TFLMRegistration registration; + static TfLiteTensor tensors[3]; + + tensors[0] = CreateTensor(input1_data, IntArrayFromInts(input1_shape)); + tensors[1] = CreateTensor(input2_data, IntArrayFromInts(input2_shape)); + tensors[2] = CreateTensor(output_data, IntArrayFromInts(output_shape)); + + registration = tflite::Register_BROADCAST_ARGS(); + tflite::micro::KernelRunner runner = tflite::micro::KernelRunner( + registration, tensors, sizeof(tensors) / sizeof(TfLiteTensor), + IntArrayFromInts(const_cast(kInputsTensor)), + IntArrayFromInts(const_cast(kOutputsTensor)), + /*builtin_data=*/nullptr); + return runner; +} + +template +void TestBroadcastArgs(int* input1_shape, DimsType* input1_data, + int* input2_shape, DimsType* input2_data, + int* output_shape, DimsType* output_data, + DimsType* expected_output_data) { + tflite::micro::KernelRunner runner = + CreateBroadcastArgsTestRunner(input1_shape, input1_data, input2_shape, + input2_data, output_shape, output_data); + + TF_LITE_MICRO_EXPECT_EQ(runner.InitAndPrepare(), kTfLiteOk); + TF_LITE_MICRO_EXPECT_EQ(runner.Invoke(), kTfLiteOk); + + // The output elements contain the fill value. + const auto elements = tflite::ElementCount(*IntArrayFromInts(output_shape)); + for (int i = 0; i < elements; ++i) { + TF_LITE_MICRO_EXPECT_EQ(output_data[i], expected_output_data[i]); + } +} +} // namespace + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(BroadcastArgsWithScalar) { + int input1_shape[] = {1, 0}; + int32_t input1_data[] = {}; + + int input2_shape[] = {1, 2}; + int32_t input2_data[2] = {2, 4}; + + int output_shape[] = {1, 2}; + int32_t output_data[2]; + int32_t expected_output_data[2] = {2, 4}; + + TestBroadcastArgs(input1_shape, input1_data, input2_shape, input2_data, + output_shape, output_data, expected_output_data); +} + +TF_LITE_MICRO_TEST(BroadcastArgsDifferentDims) { + int input1_shape[] = {1, 1}; + int32_t input1_data[] = {1}; + + int input2_shape[] = {1, 2}; + int32_t input2_data[2] = {2, 4}; + + int output_shape[] = {1, 2}; + int32_t output_data[2]; + int32_t expected_output_data[2] = {2, 4}; + + TestBroadcastArgs(input1_shape, input1_data, input2_shape, input2_data, + output_shape, output_data, expected_output_data); +} + +TF_LITE_MICRO_TEST(BroadcastArgsSameDims) { + int input1_shape[] = {1, 6}; + int32_t input1_data[] = {1, 4, 6, 3, 1, 5}; + + int input2_shape[] = {1, 6}; + int32_t input2_data[6] = {4, 4, 1, 3, 4, 1}; + + int output_shape[] = {1, 6}; + int32_t output_data[6]; + int32_t expected_output_data[6] = {4, 4, 6, 3, 4, 5}; + + TestBroadcastArgs(input1_shape, input1_data, input2_shape, input2_data, + output_shape, output_data, expected_output_data); +} + +TF_LITE_MICRO_TEST(BroadcastArgsComplex) { + int input1_shape[] = {1, 4}; + int32_t input1_data[] = {6, 3, 1, 5}; + + int input2_shape[] = {1, 6}; + int32_t input2_data[6] = {4, 4, 1, 3, 4, 1}; + + int output_shape[] = {1, 6}; + int32_t output_data[6]; + int32_t expected_output_data[6] = {4, 4, 6, 3, 4, 5}; + + TestBroadcastArgs(input1_shape, input1_data, input2_shape, input2_data, + output_shape, output_data, expected_output_data); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/broadcast_to.cc b/tensorflow/lite/micro/kernels/broadcast_to.cc new file mode 100644 index 0000000..61deaa3 --- /dev/null +++ b/tensorflow/lite/micro/kernels/broadcast_to.cc @@ -0,0 +1,123 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/kernels/internal/reference/broadcast_to.h" + +#include + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_context.h" + +namespace tflite { + +namespace { +constexpr int kInputTensor = 0; +constexpr int kShapeTensor = 1; +constexpr int kOutputTensor = 0; +// Support a maximum of 5 dimensions in TFLM. +constexpr int kMaxDims = 5; + +TfLiteStatus ValidateOutputTensor(TfLiteContext* context, TfLiteTensor* input, + TfLiteTensor* shape, TfLiteTensor* output) { + // Ensures the shape is 1D tensor. + TF_LITE_ENSURE_EQ(context, NumDimensions(shape), 1); + + // Ensure output dims is not less than input dims. + int input_num_dims = NumDimensions(input); + int output_num_dims = NumDimensions(output); + int shape_num_dims = SizeOfDimension(shape, 0); + TF_LITE_ENSURE_MSG(context, output_num_dims == shape_num_dims, + "Output must match with the expected shape dimension."); + TF_LITE_ENSURE_MSG(context, input_num_dims <= output_num_dims, + "Output shape must be broadcastable from input shape."); + TF_LITE_ENSURE_MSG(context, output_num_dims <= kMaxDims, + "BroadcastTo only supports 1-5D tensor."); + + // Check if output shape is broadcastable from input shape. + auto get_shape_data = [shape](int i) -> int32_t { + if (shape->type == kTfLiteInt32) { + return GetTensorData(shape)[i]; + } else { + return GetTensorData(shape)[i]; + } + }; + + int extending_dims = output_num_dims - input_num_dims; + for (int idx = 0; idx < input_num_dims; ++idx) { + TF_LITE_ENSURE_MSG( + context, + (SizeOfDimension(input, idx) == 1 || + SizeOfDimension(input, idx) == get_shape_data(extending_dims + idx)), + "Output shape must be broadcastable from input shape."); + } + + // Validating the shape of the output tensor. + tflite::RuntimeShape output_shape = tflite::GetTensorShape(output); + for (int idx = 0; idx < output_num_dims; ++idx) { + TF_LITE_ENSURE(context, output_shape.Dims(idx) == get_shape_data(idx)); + } + return kTfLiteOk; +} + +TfLiteStatus BroadcastToPrepare(TfLiteContext* context, TfLiteNode* node) { + TF_LITE_ENSURE(context, NumInputs(node) == 2); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + MicroContext* micro_context = GetMicroContext(context); + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TfLiteTensor* shape = + micro_context->AllocateTempInputTensor(node, kShapeTensor); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + + TF_LITE_ENSURE_MSG(context, (NumDimensions(input) <= kMaxDims), + "BroadcastTo only supports 1-5D tensor."); + + TF_LITE_ENSURE(context, + shape->type == kTfLiteInt32 || shape->type == kTfLiteInt64); + TF_LITE_ENSURE_EQ(context, input->type, output->type); + + // Does not support String type due to its variable size. This limitation is + // the same as TFLite. + TF_LITE_ENSURE(context, input->type != kTfLiteString); + + TF_LITE_ENSURE_STATUS(ValidateOutputTensor(context, input, shape, output)); + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(shape); + micro_context->DeallocateTempTfLiteTensor(output); + return kTfLiteOk; +} + +TfLiteStatus BroadcastToEval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = + micro::GetEvalInput(context, node, kInputTensor); + TfLiteEvalTensor* output = micro::GetEvalOutput(context, node, kOutputTensor); + + // BroadcastTo op support up to 5 dims, different from 8 dims in TFLite. + reference_ops::BroadcastTo( + micro::GetTensorShape(input), input->data.raw, + micro::GetTensorShape(output), output->data.raw, input->type); + return kTfLiteOk; +} +} // namespace + +TFLMRegistration Register_BROADCAST_TO() { + return tflite::micro::RegisterOp(nullptr, BroadcastToPrepare, + BroadcastToEval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/broadcast_to_test.cc b/tensorflow/lite/micro/kernels/broadcast_to_test.cc new file mode 100644 index 0000000..09a9756 --- /dev/null +++ b/tensorflow/lite/micro/kernels/broadcast_to_test.cc @@ -0,0 +1,214 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/micro_utils.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace { +using ::tflite::testing::CreateTensor; +using ::tflite::testing::IntArrayFromInts; + +// The layout of tensors is fixed. +constexpr int kInputIndex = 0; +constexpr int kShapeIndex = 1; +constexpr int kOutputIndex = 2; +constexpr int kInputsTensor[] = {2, kInputIndex, kShapeIndex}; +constexpr int kOutputsTensor[] = {1, kOutputIndex}; + +// This function is NOT thread safe. +template +tflite::micro::KernelRunner CreateBroadcastToTestRunner( + int* dims_shape, DimsType* dims_data, int* input_shape, + ValueType* input_data, int* output_shape, ValueType* output_data) { + // Some targets do not support dynamic memory (i.e., no malloc or new), thus, + // the test need to place non-transient memories in static variables. This is + // safe because tests are guaranteed to run serially. + // Both below structures are trivially destructible. + static TFLMRegistration registration; + static TfLiteTensor tensors[3]; + + tensors[0] = CreateTensor(input_data, IntArrayFromInts(input_shape)); + tensors[1] = CreateTensor(dims_data, IntArrayFromInts(dims_shape)); + tensors[2] = CreateTensor(output_data, IntArrayFromInts(output_shape)); + + // The output type matches the value type. + TF_LITE_MICRO_EXPECT_EQ(tensors[kOutputIndex].type, + tensors[kInputIndex].type); + + registration = tflite::Register_BROADCAST_TO(); + tflite::micro::KernelRunner runner = tflite::micro::KernelRunner( + registration, tensors, sizeof(tensors) / sizeof(TfLiteTensor), + IntArrayFromInts(const_cast(kInputsTensor)), + IntArrayFromInts(const_cast(kOutputsTensor)), + /*builtin_data=*/nullptr); + return runner; +} + +template +void TestBroadcastTo(int* dims_shape, DimsType* dims_data, int* input_shape, + ValueType* input_data, int* output_shape, + ValueType* output_data, ValueType* expected_output_data) { + tflite::micro::KernelRunner runner = + CreateBroadcastToTestRunner(dims_shape, dims_data, input_shape, + input_data, output_shape, output_data); + + TF_LITE_MICRO_EXPECT_EQ(runner.InitAndPrepare(), kTfLiteOk); + TF_LITE_MICRO_EXPECT_EQ(runner.Invoke(), kTfLiteOk); + + // The output elements contain the fill value. + const auto elements = tflite::ElementCount(*IntArrayFromInts(output_shape)); + for (int i = 0; i < elements; ++i) { + TF_LITE_MICRO_EXPECT_EQ(output_data[i], expected_output_data[i]); + } +} +} // namespace + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(ShapeMustBe1D) { + int dims_shape[] = {2, 2, 2}; + int32_t dims_data[] = {2, 3, 4, 4}; + + int input_shape[] = {2, 2, 2}; + int input_data[] = {2, 3, 4, 4}; + + int output_shape[] = {2, 2, 2}; + int output_data[] = {2, 3, 4, 4}; + + tflite::micro::KernelRunner runner = + CreateBroadcastToTestRunner(dims_shape, dims_data, input_shape, + input_data, output_shape, output_data); + + TF_LITE_MICRO_EXPECT_EQ(runner.InitAndPrepare(), kTfLiteError); +} + +TF_LITE_MICRO_TEST(TooManyDimensionShouldFail) { + int dims_shape[] = {1, 6}; + int32_t dims_data[] = {2, 2, 2, 2, 2, 2}; + + int input_shape[] = {2, 2, 2}; + int input_data[] = {2, 3, 4, 4}; + + int output_shape[] = {6, 2, 2, 2, 2, 2, 2}; + int output_data[12]; + + tflite::micro::KernelRunner runner = + CreateBroadcastToTestRunner(dims_shape, dims_data, input_shape, + input_data, output_shape, output_data); + + TF_LITE_MICRO_EXPECT_EQ(runner.InitAndPrepare(), kTfLiteError); +} + +TF_LITE_MICRO_TEST(MismatchDimensionShouldFail) { + int dims_shape[] = {1, 4}; + int32_t dims_data[] = {2, 4, 1, 3}; + + int input_shape[] = {2, 4, 1, 3}; + int input_data[24] = {2, 3, 4, 4}; + + int output_shape[] = {4, 2, 4, 1, 2}; + int output_data[24]; + + tflite::micro::KernelRunner runner = + CreateBroadcastToTestRunner(dims_shape, dims_data, input_shape, + input_data, output_shape, output_data); + + TF_LITE_MICRO_EXPECT_EQ(runner.InitAndPrepare(), kTfLiteError); +} + +TF_LITE_MICRO_TEST(Broadcast1DConstTest) { + constexpr int kDimension = 4; + constexpr int kSize = 4; + int dims_shape[] = {1, 1}; + int32_t dims_data[] = {kDimension}; + + int input_shape[] = {1, 1}; + int32_t input_data[] = {3}; + + int output_shape[] = {1, kDimension}; + int32_t output_data[kSize]; + int32_t expected_output_data[kSize] = {3, 3, 3, 3}; + + TestBroadcastTo(dims_shape, dims_data, input_shape, input_data, output_shape, + output_data, expected_output_data); +} + +TF_LITE_MICRO_TEST(Broadcast4DConstTest) { + int dims_shape[] = {1, 4}; + int32_t dims_data[] = {2, 2, 2, 2}; + + int input_shape[] = {2, 2, 2}; + int32_t input_data[4] = {2, 3, 4, 5}; + + int output_shape[] = {4, 2, 2, 2, 2}; + int32_t output_data[16]; + int32_t expected_output_data[16] = {2, 3, 4, 5, 2, 3, 4, 5, + 2, 3, 4, 5, 2, 3, 4, 5}; + + TestBroadcastTo(dims_shape, dims_data, input_shape, input_data, output_shape, + output_data, expected_output_data); +} + +TF_LITE_MICRO_TEST(ComplexBroadcast4DConstTest) { + int dims_shape[] = {1, 4}; + int32_t dims_data[] = {3, 3, 2, 2}; + + int input_shape[] = {4, 1, 3, 1, 2}; + int32_t input_data[6] = {1, 2, 3, 4, 5, 6}; + + int output_shape[] = {4, 3, 3, 2, 2}; + int32_t output_data[36]; + int32_t expected_output_data[36] = {1, 2, 1, 2, 3, 4, 3, 4, 5, 6, 5, 6, + 1, 2, 1, 2, 3, 4, 3, 4, 5, 6, 5, 6, + 1, 2, 1, 2, 3, 4, 3, 4, 5, 6, 5, 6}; + + TestBroadcastTo(dims_shape, dims_data, input_shape, input_data, output_shape, + output_data, expected_output_data); +} + +TF_LITE_MICRO_TEST(NoBroadcastingConstTest) { + int dims_shape[] = {1, 3}; + int32_t dims_data[] = {3, 1, 2}; + + int input_shape[] = {3, 3, 1, 2}; + int32_t input_data[6] = {1, 2, 3, 4, 5, 6}; + + int output_shape[] = {3, 3, 1, 2}; + int32_t output_data[6]; + int32_t expected_output_data[6] = {1, 2, 3, 4, 5, 6}; + + TestBroadcastTo(dims_shape, dims_data, input_shape, input_data, output_shape, + output_data, expected_output_data); +} + +TF_LITE_MICRO_TEST(BroadcastInt64ShapeTest) { + int dims_shape[] = {1, 4}; + int64_t dims_data[] = {1, 1, 2, 2}; + + int input_shape[] = {4, 1, 1, 1, 2}; + int32_t input_data[2] = {3, 4}; + + int output_shape[] = {4, 1, 1, 2, 2}; + int32_t output_data[4]; + int32_t expected_output_data[4] = {3, 4, 3, 4}; + + TestBroadcastTo(dims_shape, dims_data, input_shape, input_data, output_shape, + output_data, expected_output_data); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/call_once.cc b/tensorflow/lite/micro/kernels/call_once.cc new file mode 100644 index 0000000..8ad1c20 --- /dev/null +++ b/tensorflow/lite/micro/kernels/call_once.cc @@ -0,0 +1,88 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/memory_helpers.h" +#include "tensorflow/lite/micro/micro_context.h" +#include "tensorflow/lite/micro/micro_graph.h" +#include "tensorflow/lite/schema/schema_generated.h" + +namespace tflite { + +namespace { + +struct OpData { + int init_subgraph_index; + bool has_run; +}; + +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(OpData)); +} + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + OpData* op_data = reinterpret_cast(node->user_data); + const auto* params = + reinterpret_cast(node->builtin_data); + op_data->init_subgraph_index = params->init_subgraph_index; + op_data->has_run = false; + + TF_LITE_ENSURE(context, NumInputs(node) == 0); + TF_LITE_ENSURE(context, NumOutputs(node) == 0); + + tflite::MicroContext* micro_context = tflite::GetMicroContext(context); + MicroGraph& graph_info = micro_context->graph(); + + TF_LITE_ENSURE(context, + op_data->init_subgraph_index < graph_info.NumSubgraphs()); + + return kTfLiteOk; +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + OpData* op_data = reinterpret_cast(node->user_data); + + // Call once only runs one time then is a no-op for every subsequent call. + if (op_data->has_run) { + return kTfLiteOk; + } + + tflite::MicroContext* micro_context = tflite::GetMicroContext(context); + MicroGraph& graph_info = micro_context->graph(); + + TF_LITE_ENSURE_OK(context, + graph_info.InvokeSubgraph(op_data->init_subgraph_index)); + + op_data->has_run = true; + + return kTfLiteOk; +} + +} // namespace. + +TFLMRegistration Register_CALL_ONCE() { + return tflite::micro::RegisterOp(Init, Prepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/call_once_test.cc b/tensorflow/lite/micro/kernels/call_once_test.cc new file mode 100644 index 0000000..aec5261 --- /dev/null +++ b/tensorflow/lite/micro/kernels/call_once_test.cc @@ -0,0 +1,62 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/micro_interpreter.h" +#include "tensorflow/lite/micro/mock_micro_graph.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +void TestCallOnce(const int subgraph0_invoke_count_golden, + const int subgraph1_invoke_count_golden) { + int inputs_array_data[] = {0}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {0}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + TfLiteCallOnceParams params; + params.init_subgraph_index = 1; + + const TFLMRegistration registration = tflite::Register_CALL_ONCE(); + micro::KernelRunner runner(registration, nullptr, 0, inputs_array, + outputs_array, ¶ms); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + for (int i = 0; i < subgraph0_invoke_count_golden; i++) { + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + } + + TF_LITE_MICRO_EXPECT_EQ(subgraph1_invoke_count_golden, + runner.GetMockGraph()->get_invoke_count(1)); +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(CallOnceShouldOnlyInvokeSubgraphOnce) { + tflite::testing::TestCallOnce(1, 1); + tflite::testing::TestCallOnce(10, 1); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/cast.cc b/tensorflow/lite/micro/kernels/cast.cc new file mode 100644 index 0000000..a493618 --- /dev/null +++ b/tensorflow/lite/micro/kernels/cast.cc @@ -0,0 +1,114 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { +namespace { + +constexpr int kInputTensor = 0; +constexpr int kOutputTensor = 0; + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + TF_LITE_ENSURE_EQ(context, NumInputs(node), 1); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(output); + + return kTfLiteOk; +} + +template +void copyCast(const FromT* in, ToT* out, int num_elements) { + std::transform(in, in + num_elements, out, + [](FromT a) { return static_cast(a); }); +} + +template +TfLiteStatus copyToTensor(TfLiteContext* context, const FromT* in, + TfLiteEvalTensor* out, int num_elements) { + switch (out->type) { + case kTfLiteInt8: + copyCast(in, out->data.int8, num_elements); + break; + case kTfLiteInt16: + copyCast(in, out->data.i16, num_elements); + break; + case kTfLiteInt32: + copyCast(in, out->data.i32, num_elements); + break; + case kTfLiteFloat32: + copyCast(in, tflite::micro::GetTensorData(out), num_elements); + break; + default: + // Unsupported type. + MicroPrintf("Output type %s (%d) not supported.", + TfLiteTypeGetName(out->type), out->type); + } + return kTfLiteOk; +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + int num_elements = MatchingFlatSize(tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorShape(output)); + + switch (input->type) { + case kTfLiteInt8: + return copyToTensor(context, input->data.int8, output, num_elements); + case kTfLiteInt16: + return copyToTensor(context, tflite::micro::GetTensorData(input), + output, num_elements); + case kTfLiteInt32: + return copyToTensor(context, tflite::micro::GetTensorData(input), + output, num_elements); + case kTfLiteUInt32: + return copyToTensor(context, + tflite::micro::GetTensorData(input), output, + num_elements); + case kTfLiteFloat32: + return copyToTensor(context, tflite::micro::GetTensorData(input), + output, num_elements); + default: + // Unsupported type. + MicroPrintf("Input type %s (%d) not supported.", + TfLiteTypeGetName(input->type), input->type); + } + return kTfLiteOk; +} +} // namespace + +TFLMRegistration Register_CAST() { + return tflite::micro::RegisterOp(nullptr, Prepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/cast_test.cc b/tensorflow/lite/micro/kernels/cast_test.cc new file mode 100644 index 0000000..f5ab660 --- /dev/null +++ b/tensorflow/lite/micro/kernels/cast_test.cc @@ -0,0 +1,116 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +template +void TestCast(int* input_dims_data, const inputT* input_data, + const outputT* expected_output_data, outputT* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(input_dims_data); + const int output_dims_count = ElementCount(*output_dims); + constexpr int inputs_size = 1; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(input_data, input_dims), + CreateTensor(output_data, output_dims), + }; + + int inputs_array_data[] = {1, 0}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 1}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = Register_CAST(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, + /*builtin_data=*/nullptr); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + for (int i = 0; i < output_dims_count; ++i) { + TF_LITE_MICRO_EXPECT_EQ(expected_output_data[i], output_data[i]); + } +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(CastFloatToInt8) { + int8_t output_data[6]; + int input_dims[] = {2, 3, 2}; + + // TODO(b/178391195): Test negative and out-of-range numbers. + const float input_values[] = {100.f, 1.0f, 0.f, 0.4f, 1.999f, 1.1f}; + const int8_t golden[] = {100, 1, 0, 0, 1, 1}; + tflite::testing::TestCast(input_dims, input_values, golden, output_data); +} + +TF_LITE_MICRO_TEST(CastFloatToInt16) { + int16_t output_data[6]; + int input_dims[] = {2, 3, 2}; + + // TODO(b/178391195): Test negative and out-of-range numbers. + const float input_values[] = {100.f, 1.0f, 0.f, 0.4f, 1.999f, 1.1f}; + const int16_t golden[] = {100, 1, 0, 0, 1, 1}; + tflite::testing::TestCast(input_dims, input_values, golden, output_data); +} + +TF_LITE_MICRO_TEST(CastInt8ToFloat) { + float output_data[6]; + int input_dims[] = {2, 3, 2}; + const int8_t input_values[] = {123, 0, 1, 2, 3, 4}; + const float golden[] = {123.f, 0.f, 1.f, 2.f, 3.f, 4.f}; + tflite::testing::TestCast(input_dims, input_values, golden, output_data); +} + +TF_LITE_MICRO_TEST(CastInt16ToFloat) { + float output_data[6]; + int input_dims[] = {2, 3, 2}; + const int16_t input_values[] = {123, 0, 1, 2, 3, 4}; + const float golden[] = {123.f, 0.f, 1.f, 2.f, 3.f, 4.f}; + tflite::testing::TestCast(input_dims, input_values, golden, output_data); +} + +TF_LITE_MICRO_TEST(CastInt16ToInt32) { + int32_t output_data[6]; + int input_dims[] = {2, 3, 2}; + const int16_t input_values[] = {123, 0, 1, 2, 3, 4}; + const int32_t golden[] = {123, 0, 1, 2, 3, 4}; + tflite::testing::TestCast(input_dims, input_values, golden, output_data); +} + +TF_LITE_MICRO_TEST(CastInt32ToInt16) { + int16_t output_data[6]; + int input_dims[] = {2, 3, 2}; + const int32_t input_values[] = {123, 0, 1, 2, 3, 4}; + const int16_t golden[] = {123, 0, 1, 2, 3, 4}; + tflite::testing::TestCast(input_dims, input_values, golden, output_data); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/ceil.cc b/tensorflow/lite/micro/kernels/ceil.cc new file mode 100644 index 0000000..46b55e7 --- /dev/null +++ b/tensorflow/lite/micro/kernels/ceil.cc @@ -0,0 +1,73 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/reference/ceil.h" + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" + +namespace tflite { + +namespace { + +constexpr int kInputTensor = 0; +constexpr int kOutputTensor = 0; + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + TF_LITE_ENSURE_EQ(context, NumInputs(node), 1); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + TF_LITE_ENSURE_TYPES_EQ(context, input->type, kTfLiteFloat32); + TF_LITE_ENSURE_TYPES_EQ(context, output->type, input->type); + TF_LITE_ENSURE_EQ(context, output->bytes, input->bytes); + TF_LITE_ENSURE_EQ(context, output->dims->size, input->dims->size); + for (int i = 0; i < output->dims->size; ++i) { + TF_LITE_ENSURE_EQ(context, output->dims->data[i], input->dims->data[i]); + } + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(output); + return kTfLiteOk; +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + + reference_ops::Ceil(tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_CEIL() { + return tflite::micro::RegisterOp(nullptr, Prepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/ceil_test.cc b/tensorflow/lite/micro/kernels/ceil_test.cc new file mode 100644 index 0000000..caba85e --- /dev/null +++ b/tensorflow/lite/micro/kernels/ceil_test.cc @@ -0,0 +1,82 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +void TestCeil(int* input_dims_data, const float* input_data, + const float* expected_output_data, float* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(input_dims_data); + const int output_dims_count = ElementCount(*output_dims); + constexpr int inputs_size = 1; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(input_data, input_dims), + CreateTensor(output_data, output_dims), + }; + + int inputs_array_data[] = {1, 0}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 1}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = Register_CEIL(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, + /*builtin_data=*/nullptr); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + for (int i = 0; i < output_dims_count; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(expected_output_data[i], output_data[i], 1e-5f); + } +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(SingleDim) { + float output_data[2]; + int input_dims[] = {1, 2}; + const float input_values[] = {8.5, 0.0}; + const float golden[] = {9, 0}; + tflite::testing::TestCeil(input_dims, input_values, golden, output_data); +} + +TF_LITE_MICRO_TEST(MultiDims) { + float output_data[10]; + int input_dims[] = {4, 2, 1, 1, 5}; + const float input_values[] = { + 0.0001, 8.0001, 0.9999, 9.9999, 0.5, + -0.0001, -8.0001, -0.9999, -9.9999, -0.5, + }; + const float golden[] = {1, 9, 1, 10, 1, 0, -8, 0, -9, 0}; + tflite::testing::TestCeil(input_dims, input_values, golden, output_data); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/ceva/ceva_common.cc b/tensorflow/lite/micro/kernels/ceva/ceva_common.cc new file mode 100644 index 0000000..c776290 --- /dev/null +++ b/tensorflow/lite/micro/kernels/ceva/ceva_common.cc @@ -0,0 +1,23 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/kernels/ceva/ceva_tflm_lib.h" +#define CEVA_TFLM_KERNELS_SCRATCH_SIZE_VAL_DEF 32768 +int32_t CEVA_TFLM_KERNELS_SCRATCH_SIZE_VAL = + CEVA_TFLM_KERNELS_SCRATCH_SIZE_VAL_DEF; +#ifndef WIN32 +__attribute__((section(".MODEL_DATA"))) +#endif +int32_t CEVA_TFLM_KERNELS_SCRATCH[CEVA_TFLM_KERNELS_SCRATCH_SIZE_VAL_DEF]; diff --git a/tensorflow/lite/micro/kernels/ceva/ceva_common.h b/tensorflow/lite/micro/kernels/ceva/ceva_common.h new file mode 100755 index 0000000..15b05ca --- /dev/null +++ b/tensorflow/lite/micro/kernels/ceva/ceva_common.h @@ -0,0 +1,24 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_CEVA_CEVA_COMMON_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_CEVA_CEVA_COMMON_H_ + +#if defined(CEVA_BX1) || defined(CEVA_SP500) +extern int32_t CEVA_TFLM_KERNELS_SCRATCH[]; +extern int32_t CEVA_TFLM_KERNELS_SCRATCH_SIZE_VAL; +#endif + +#endif diff --git a/tensorflow/lite/micro/kernels/ceva/ceva_tflm_lib.h b/tensorflow/lite/micro/kernels/ceva/ceva_tflm_lib.h new file mode 100644 index 0000000..49134c2 --- /dev/null +++ b/tensorflow/lite/micro/kernels/ceva/ceva_tflm_lib.h @@ -0,0 +1,613 @@ + +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +// API header for CEVA TFLM optimized kernel library + +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_CEVA_CEVA_TFLM_LIB_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_CEVA_CEVA_TFLM_LIB_H_ + +#include "tensorflow/lite/micro/kernels/ceva/types.h" + +#if defined(__cplusplus) +extern "C" { +#endif /* __cplusplus */ + +void CEVA_TFLM_ResizeNearestNeighbor_float32( + const bool align_corners, int32_t output_height, int32_t output_width, + int32_t row_offset, int32_t input_height, int32_t input_width, + int32_t col_offset, int32_t depth, const int32_t* input_ptr, + int32_t* output_ptr, const bool half_pixel_centers, int32_t* scratch); +void CEVA_TFLM_ResizeNearestNeighbor_int8( + const bool align_corners, int32_t output_height, int32_t output_width, + int32_t row_offset, int32_t input_height, int32_t input_width, + int32_t col_offset, int32_t depth, const int8_t* input_ptr, + int8_t* output_ptr, const bool half_pixel_centers, int32_t* scratch); + +void CEVA_TFLM_Abs_Float32(const float* input_data, float* output_data, + int flat_size); +void CEVA_TFLM_Sqrt_Float32(const float* input_data, float* output_data, + int flat_size); +void CEVA_TFLM_Rsqrt_Float32(const float* input_data, float* output_data, + int flat_size); +void CEVA_TFLM_Square_Float32(const float* input_data, float* output_data, + int flat_size); + +void CEVA_TFLM_Cos_Float32(const float* input_data, float* output_data, + int flat_size); +void CEVA_TFLM_Sin_Float32(const float* input_data, float* output_data, + int flat_size); +void CEVA_TFLM_Tanh_Float32(const float* input_data, float* output_data, + int flat_size); + +void CEVA_TFLM_Sigmoid_Float32(const float* input_data, float* output_data, + int flat_size); +void CEVA_TFLM_Log_Float32(const float* input_data, float* output_data, + int flat_size); + +void CEVA_TFLM_LogicalNot(const bool* input_data, bool* output_data, + int flat_size); + +void CEVA_TFLM_AffineQuantize_Int8(const float_32* input_data, + int8_t* output_data, int flat_size, + float_32 scale, int zero_point); + +void CEVA_TFLM_Softmax_Float32(const float* input_data, float* output_data, + const float beta, const int depth); + +void CEVA_TFLM_Neg_Float32(const float_32* input_data, float_32* output_data, + const int flat_size); + +void CEVA_TFLM_RoundToNearest_asm(const float* input_arr, float* output_arr, + const int size); +float RoundToNearest(float value); + +void CEVA_TFLM_Round_float32(const float* input_data, float* output_data, + const int flat_size); + +void CEVA_TFLM_Softmax_Int8(const int8_t* input_data, int8_t* output_data, + const int32_t input_beta_multiplier, + const int32_t input_beta_left_shift, + const int32_t depth, void* scratch); + +void CEVA_TFLM_Min_Max_Float32(const float* input_data, + const float float_activation_min, + const float float_activation_max, + const int flat_size, float* output_data); + +void CEVA_TFLM_Add_Float32(const void* params_inp, const float* input1_data, + const float* input2_data, float* output_data, + const int flat_size); + +void CEVA_TFLM_BroadcastAdd4DSlow_Float32(const void* params_inp, + const float* input1_data, + const float* input2_data, + float* output_data, const int* Dims, + const int* desc1, const int* desc2); + +void CEVA_TFLM_BroadcastSubSlow_Float32( + const void* params_inp, const float* input1_data, const float* input2_data, + float* output_data, const int* strides1, const int* strides2, + const int* output_strides, const int* output_extents); + +void CEVA_TFLM_BroadcastSubSlow_Float32_loop( + const void* params_inp, const float* input1_data, const float* input2_data, + float* output_data, const int* output_extents, const int* strides1, + const int* strides2, const int* output_strides); + +void CEVA_TFLM_SubWithActivation_Float32(const void* params_inp, + const float* input1_data, + const float* input2_data, + float* output_data, + const int flat_size); + +void CEVA_TFLM_MaximumBroadcastSlow_Float32( + const float* input1_data, const float* input2_data, float* output_data, + const int* strides1, const int* strides2, const int* output_strides, + const int* output_extents); +void CEVA_TFLM_MinimumBroadcastSlow_Float32( + const float* input1_data, const float* input2_data, float* output_data, + const int* strides1, const int* strides2, const int* output_strides, + const int* output_extents); + +void CEVA_TFLM_Maximum_Float32(const float* input1_data, + const float* input2_data, float* output_data, + const int flat_size); +void CEVA_TFLM_Minimum_Float32(const float* input1_data, + const float* input2_data, float* output_data, + const int flat_size); +void CEVA_TFLM_Maximum_Float32_asm(const float* input1_data, + const float* input2_data, float* output_data, + const int flat_size); +void CEVA_TFLM_Minimum_Float32_asm(const float* input1_data, + const float* input2_data, float* output_data, + const int flat_size); +void CEVA_TFLM_DepthwiseConv_Float32( + // const DepthwiseParams& params, + // const int batches, // always 1 + const int stride_width, const int stride_height, const int pad_width, + const int pad_height, const int depth_multiplier, const int input_height, + const int input_width, const int input_depth, const float* input_data, + const int filter_height, const int filter_width, const int filter_depth, + const float* filter_data, const float* bias_data, const int output_height, + const int output_width, const int output_depth, float* output_data, + const int dilation_width_factor, const int dilation_height_factor, + const float output_activation_min, const float output_activation_max + +); +void CEVA_TFLM_DepthwiseConvPerChannel_int8( + const int stride_width, const int stride_height, const int pad_width, + const int pad_height, const int depth_multiplier_, + const int32_t input_offset_, const int32_t output_offset, + const int32_t* output_multiplier, const int32_t* output_shift, + const int input_height, const int input_width_, const int input_depth_, + const int8_t* input_data, const int filter_height, const int filter_width, + const int filter_depth_, const int8_t* filter_data, + const int32_t* bias_data, const int output_height, const int output_width, + const int output_depth, + + int8_t* output_data, int32_t* scratch_ + + , + const int dilation_width_factor_, const int dilation_height_factor, + const int32_t output_activation_min, const int32_t output_activation_max); + +void CEVA_TFLM_ConvPerChannel_Int8( + const int stride_width, const int stride_height, const int pad_width, + const int pad_height, // const int depth_multiplier, + const int32_t input_offset, const int32_t output_offset, + const int32_t* output_multiplier, const int32_t* output_shift, + const int input_height, const int input_width, const int input_depth_Dims3, + const int input_depth, const int8_t* input_data, const int filter_height, + const int filter_width, const int filter_depth, const int8_t* filter_data, + const int32_t* bias_data, const int output_height, const int output_width, + const int output_depth_Dims3, const int output_depth, int8_t* output_data, + int32_t* scratch, const int dilation_width_factor, + const int dilation_height_factor, const int32_t output_activation_min, + const int32_t output_activation_max); + +void CEVA_TFLM_Conv_Float32( + // const int batches, + const int stride_width, const int stride_height, const int pad_width, + const int pad_height, // const int depth_multiplier, + const int input_height, const int input_width, const int input_depth_Dims3, + const int input_depth, const float* input_data, const int filter_height, + const int filter_width, const int filter_depth, const float* filter_data, + const float* bias_data, const int output_height, const int output_width, + const int output_depth_Dims3, const int output_depth, float* output_data, + const int dilation_width_factor, const int dilation_height_factor, + const float output_activation_min, const float output_activation_max + +); + +/////////////////// +void CEVA_TFLM_MaximumBroadcastSlow_Int8( + const int8_t* input1_data, const int8_t* input2_data, int8_t* output_data, + const int* strides1, const int* strides2, const int* output_strides, + const int* output_extents); +void CEVA_TFLM_MinimumBroadcastSlow_Int8( + const int8_t* input1_data, const int8_t* input2_data, int8_t* output_data, + const int* strides1, const int* strides2, const int* output_strides, + const int* output_extents); + +void CEVA_TFLM_Maximum_Int8(const int8_t* input1_data, + const int8_t* input2_data, int8_t* output_data, + const int flat_size); +void CEVA_TFLM_Minimum_Int8(const int8_t* input1_data, + const int8_t* input2_data, int8_t* output_data, + const int flat_size); + +void CEVA_TFLM_BroadcastSubSlow_Int8( + const void* params_inp, const int8_t* input1_data, + const int8_t* input2_data, int8_t* output_data, const int* strides1, + const int* strides2, const int* output_strides, const int* output_extents); + +void CEVA_TFLM_BroadcastSubSlow_Int8_loop( + const void* params_inp, const int8_t* input1_data, + const int8_t* input2_data, int8_t* output_data, const int* output_extents, + const int* strides1, const int* strides2, const int* output_strides); + +void CEVA_TFLM_BroadcastAddSlow_Int8(const void* params_inp, + const int8_t* input1_data, + const int8_t* input2_data, + int8_t* output_data, const int* strides1, + const int* strides2, + const int* output_extents); + +void CEVA_TFLM_BroadcastAddSlow_Int8_loop( + const void* params_inp, const int8_t* input1_data, + const int8_t* input2_data, int8_t* output_data, const int* output_extents, + const int* strides1, const int* strides2); + +void CEVA_TFLM_Sub_Int8(const void* params_inp, const int8_t* input1_data, + const int8_t* input2_data, int8_t* output_data, + const int flat_size); + +void CEVA_TFLM_Sub_Uint8(const void* params_inp, const uint8_t* input1_data, + const uint8_t* input2_data, uint8_t* output_data, + const int flat_size); + +void CEVA_TFLM_Add_Uint8(const void* params, const uint8_t* input1_data, + const uint8_t* input2_data, uint8_t* output_data, + const int flat_size); + +void CEVA_TFLM_Add_Int8(const void* params_inp, const int8_t* input1_data, + const int8_t* input2_data, int8_t* output_data, + const int flat_size); + +void CEVA_TFLM_BroadcastAdd4DSlow_Uint8(const void* params, + const uint8_t* input1_data, + const uint8_t* input2_data, + uint8_t* output_data, const int* Dims, + const int* desc1, const int* desc2, + const int* dims_data); +void CEVA_TFLM_svdf_Float32(float_32* vector1_ptr, float_32* vector2_ptr, + int32_t num_units, int32_t memory_size_rank, + float_32* output_ptr_batch); +void CEVA_TFLM_svdf_Int8(int n_memory, const int8_t* matrix_ptr, + const int8_t* vector_in_batch_t, + int16_t* result_in_batch, int input_zp, int n_input, + int effective_scale_1_a, int effective_scale_1_b, + int n_filter, int* scratch); +void CEVA_TFLM_AffineQuantize_Int8(const float_32* input_data, + int8_t* output_data, int flat_size, + float_32 scale, int zero_point); + +// int32_t MultiplyByQuantizedMultiplier_t(int32_t x, int32_t +// quantized_multiplier, int shift); int32_t +// MultiplyByQuantizedMultiplier_t1(int32_t x, int32_t quantized_multiplier, int +// shift); + +void CEVA_TFLM_L2Normalization_Float32(const float* input_data, + float* output_data, float epsilon, + const int outer_size, const int depth); +void CEVA_TFLM_L2Normalization_Int8(int32_t input_zero_point, + int32_t outer_size, int32_t depth, + const int8_t* input_data, + int8_t* output_data); + +void CEVA_TFLM_prelu_Float32(const float* in1_data, const int32_t* in1_strides, + const float* in2_data, const int32_t* in2_strides, + float* out_data, const int32_t* out_strides, + const int32_t* dims); + +void CEVA_TFLM_prelu_Int8(const int8_t* in1_data, const int32_t* in1_strides, + const int8_t* alpha_data, + const int32_t* alpha_strides, int8_t* out_data, + const int32_t* out_strides, const int32_t* dims, + const int32_t* params); +void CEVA_TFLM_FullyConnected_Float32( + const void* params_inp, const int input_shape, const float* input_data, + const int weights_shape_DimensionsCount, const int* weights_shape_DimsData, + const float* weights_data, const int bias_shape, const float* bias_data, + const int output_shape_DimensionsCount, const int* output_shape_DimsData, + float* output_data); +void CEVA_TFLM_FullyConnected_int8( + const void* params_inp, const int input_shape, const int8_t* input_data, + const int filter_shape_DimensionsCount, const int* filter_shape_DimsData, + const int8_t* filter_data, const int bias_shape, const int32_t* bias_data, + const int output_shape_DimensionsCount, const int* output_shape_DimsData, + int8_t* output_data, int* scratch); + +void CEVA_TFLM_tanh_Int8(int32_t input_zero_point, int32_t input_range_radius, + int32_t input_multiplier, int32_t input_shift, + int32_t input_size, const int8_t* input_data, + int8_t* output_data); + +void CEVA_TFLM_Logistic_Int8(int32_t input_zero_point, + int32_t input_range_radius, + int32_t input_multiplier, int32_t input_left_shift, + int32_t input_size, const int8_t* input_data, + int8_t* output_data); + +void CEVA_TFLM_Tanh_float32(const float_32* input_data, float_32* output_data, + const int flat_size); +void CEVA_TFLM_Logistic_float32(const float_32* input_data, + float_32* output_data, const int flat_size); + +void CEVA_TFLM_PackImplLoop_float(const float* input_ptr, float* output_ptr, + int outer_size, int copy_size, + int step_vcount_copy_size); +void CEVA_TFLM_PackUnpackImplLoopInitSizes(int* const copy_size, + int* const outer_size, + const int* const outputDimsData, + const int dimensions, int axis); +void CEVA_TFLM_PackImplLoop_Int8(const int8_t* input_ptr, int8_t* output_ptr, + int outer_size, int copy_size, + int step_vcount_copy_size); +void CEVA_TFLM_UnpackImplLoop_float(const float* input_ptr, float* output_ptr, + int outer_size, int copy_size, + int step_vcount_copy_size); +void CEVA_TFLM_UnpackImplLoop_Int8(const int8_t* input_ptr, int8_t* output_ptr, + int outer_size, int copy_size, + int step_vcount_copy_size); + +void CEVA_TFLM_ComparisonEqual_Float32(const float* input1, const float* input2, + bool* output, const int32_t size); +void CEVA_TFLM_ComparisonNotEqual_Float32(const float* input1, + const float* input2, bool* output, + const int32_t size); +void CEVA_TFLM_ComparisonGreater_Float32(const float* input1, + const float* input2, bool* output, + const int32_t size); +void CEVA_TFLM_ComparisonGreaterEqual_Float32(const float* input1, + const float* input2, bool* output, + const int32_t size); +void CEVA_TFLM_ComparisonLess_Float32(const float* input1, const float* input2, + bool* output, const int32_t size); +void CEVA_TFLM_ComparisonLessEqual_Float32(const float* input1, + const float* input2, bool* output, + const int32_t size); + +void CEVA_TFLM_ComparisonEqual_Float32_Broadcast(const float* input1, + const float* input2, + bool* output, + const int32_t* dims, + const int32_t** op_param); + +void CEVA_TFLM_ComparisonNotEqual_Float32_Broadcast(const float* input1, + const float* input2, + bool* output, + const int32_t* dims, + const int32_t** op_param); + +void CEVA_TFLM_ComparisonGreater_Float32_Broadcast(const float* input1, + const float* input2, + bool* output, + const int32_t* dims, + const int32_t** op_param); +void CEVA_TFLM_ComparisonGreaterEqual_Float32_Broadcast( + const float* input1, const float* input2, bool* output, const int32_t* dims, + const int32_t** op_param); + +void CEVA_TFLM_ComparisonLess_Float32_Broadcast(const float* input1, + const float* input2, + bool* output, + const int32_t* dims, + const int32_t** op_param); + +void CEVA_TFLM_ComparisonLessEqual_Float32_Broadcast(const float* input1, + const float* input2, + bool* output, + const int32_t* dims, + const int32_t** op_param); + +void CEVA_TFLM_ComparisonEqual_Int8(const int8_t* input1, const int8_t* input2, + bool* output, const int32_t flatsize, + void* op_params); +void CEVA_TFLM_ComparisonNotEqual_Int8(const int8_t* input1, + const int8_t* input2, bool* output, + const int32_t flatsize, void* op_params); +void CEVA_TFLM_ComparisonGreater_Int8(const int8_t* input1, + const int8_t* input2, bool* output, + const int32_t flatsize, void* op_params); +void CEVA_TFLM_ComparisonGreaterEqual_Int8(const int8_t* input1, + const int8_t* input2, bool* output, + const int32_t flatsize, + void* op_params); +void CEVA_TFLM_ComparisonLess_Int8(const int8_t* input1, const int8_t* input2, + bool* output, const int32_t flatsize, + void* op_params); +void CEVA_TFLM_ComparisonLessEqual_Int8(const int8_t* input1, + const int8_t* input2, bool* output, + const int32_t flatsize, + void* op_params); + +void CEVA_TFLM_ComparisonEqual_Int8_Broadcast(const int8_t* input1, + const int8_t* input2, + bool* output, const int32_t* dims, + void* op_params); +void CEVA_TFLM_ComparisonNotEqual_Int8_Broadcast(const int8_t* input1, + const int8_t* input2, + bool* output, + const int32_t* dims, + void* op_params); +void CEVA_TFLM_ComparisonGreater_Int8_Broadcast(const int8_t* input1, + const int8_t* input2, + bool* output, + const int32_t* dims, + void* op_params); +void CEVA_TFLM_ComparisonGreaterEqual_Int8_Broadcast(const int8_t* input1, + const int8_t* input2, + bool* output, + const int32_t* dims, + void* op_params); +void CEVA_TFLM_ComparisonLess_Int8_Broadcast(const int8_t* input1, + const int8_t* input2, bool* output, + const int32_t* dims, + void* op_params); +void CEVA_TFLM_ComparisonLessEqual_Int8_Broadcast(const int8_t* input1, + const int8_t* input2, + bool* output, + const int32_t* dims, + void* op_params); + +void CEVA_TFLM_Mul_Float32(const void* params_inp, const float* input1_data, + const float* input2_data, float* output_data, + const int flat_size); + +void CEVA_TFLM_BroadcastMul4DSlow_Float32(const void* params_inp, + const float* input1_data, + const float* input2_data, + float* output_data, const int* Dims, + const int* desc1, const int* desc2); + +void CEVA_TFLM_AveragePool_Float32(const void* params, const int* input_shape, + const float* input_data, + const int* output_shape, float* output_data); + +void CEVA_TFLM_AveragePool_Int8(const void* params_inp, const int* input_shape, + const int8_t* input_data, + const int* output_shape, int8_t* output_data); + +void CEVA_TFLM_AveragePool_Int8_Loop( + const int* input_shape, const int8_t* input_data, int8_t* output_data, + const int depth, int batch, int in_y, const int filter_y_start, + const int filter_y_end, const int in_x_origin, const int filter_x_start, + const int filter_x_end, int filter_count, int32_t quantized_activation_min, + int32_t quantized_activation_max, int indx_out); + +void CEVA_TFLM_MaxPool_Float32(const void* params_inp, const int* input_shape, + const float* input_data, const int* output_shape, + float* output_data); + +void CEVA_TFLM_MaxPool_Int8(const void* params_inp, const int* input_shape, + const int8_t* input_data, const int* output_shape, + int8_t* output_data); + +void CEVA_TFLM_MaxPool_Int8_Loop( + const int* input_shape, const int8_t* input_data, int8_t* output_data, + const int depth, int batch, int in_y, const int filter_y_start, + const int filter_y_end, const int in_x_origin, const int filter_x_start, + const int filter_x_end, int32_t quantized_activation_min, + int32_t quantized_activation_max, int indx_out); + +void CEVA_TFLM_Mul_Int8(const void* params_inp, const int8_t* input1_data, + const int8_t* input2_data, int8_t* output_data, + const int flat_size); + +void CEVA_TFLM_BroadcastMul4DSlow_Int8(const void* params_inp, + const int8_t* input1_data, + const int8_t* input2_data, + int8_t* output_data, const int* Dims, + const int* desc1, const int* desc2); + +void CEVA_TFLM_Dequantize_Float32(const int8_t* input_data, + float_32* output_data, int flat_size, + float_32 scale, int zero_point); + +void CEVA_TFLM_Ceil_Float32(const float* input_data, float* output_data, + const int flat_size); + +void CEVA_TFLM_Logical_And_Int8(const int8_t* input1_data, + const int8_t* input2_data, int8_t* output_data, + const int flat_size); + +void CEVA_TFLM_BroadcastLogicalAnd4DSlow_Int8(const int8_t* input1_data, + const int8_t* input2_data, + int8_t* output_data, + const int* Dims, const int* desc1, + const int* desc2); + +void CEVA_TFLM_Logical_Or_Int8(const int8_t* input1_data, + const int8_t* input2_data, int8_t* output_data, + const int flat_size); + +void CEVA_TFLM_BroadcastLogicalOr4DSlow_Int8(const int8_t* input1_data, + const int8_t* input2_data, + int8_t* output_data, + const int* Dims, const int* desc1, + const int* desc2); + +void CEVA_TFLM_SplitLoops_Float32(float** out_ptrs, const int* dataIndex, + const float* input_ptr, int outer_size, + int output_count, int copy_size); +void CEVA_TFLM_SplitLoops_int8(int8_t** out_ptrs, const int* dataIndex, + const int8_t* input_ptr, int outer_size, + int output_count, int copy_size); + +void CEVA_TFLM_Relu_Float32(const float* input_data, float* output_data, + const int flat_size); +void CEVA_TFLM_Relu6_Float32(const float* input_data, float* output_data, + const int flat_size); +void CEVA_TFLM_Relu_int8(const void* params, const int8_t* input_data, + int8_t* output_data, const int flat_size); +void CEVA_TFLM_Relu6_int8(const int8_t lower, const int8_t upper, + const int8_t* input_data, int8_t* output_data, + const int flat_size); +void CEVA_TFLM_Floor_float32(const float* input_data, float* output_data, + const int flat_size); + +void CEVA_TFLM_Concatenation_Float32(const void* params_inp, + const int** input_shape, + const float** input_data, + const int output_shape_DimensionsCount, + const int* output_shape_DimsData, + float* output_data); + +void CEVA_TFLM_Concatenation_int8(const void* params_inp, + const int** input_shape, + const int8_t** input_data, + const int output_shape_DimensionsCount, + const int* output_shape_DimsData, + int8_t* output_data); + +void CEVA_TFLM_Mean4D_Float32(const float* input_data, float* output_data, + const int* Dims, const int* Dims_inp, + const int* dims_data, const int* dims_data_inp); +bool CEVA_TFLM_Mean_Float32(const float* input_data, const int* input_dims, + const int input_num_dims, float* output_data, + const int* output_dims, const int output_num_dims, + const int* axis, const int num_axis_dimensions, + bool keep_dims, int* temp_index, int* resolved_axis, + float* temp_sum); +void CEVA_TFLM_Mean_Float32_loop(float* temp_sum, float* output_data, + int num_elements_in_axis, size_t num_outputs); +void CEVA_TFLM_Mean4D_Int8(int32_t multiplier, int32_t shift, + const int8_t* input_data, int32_t input_zero_point, + int8_t* output_data, int32_t output_zero_point, + int* input_shape, int* output_shape); +bool CEVA_TFLM_Mean_Int8(const int8_t* input_data, const int* input_dims, + const int input_num_dims, int8_t* output_data, + const int* output_dims, const int output_num_dims, + const int* axis, const int num_axis_dimensions, + bool keep_dims, int* temp_index, int* resolved_axis, + int32_t* temp_sum); +void CEVA_TFLM_Mean_Int8_loop(int32_t* temp_sum, int8_t* output_data, + int num_elements_in_axis, size_t num_outputs); +void CEVA_TFLM_StridedSlice_Float32(void* op_params, + int unextended_input_shape_DimensionsCount, + int* unextended_input_shape_DimsData, + float* input_data, + + float* output_data); + +void CEVA_TFLM_StridedSlice_Float32(void* op_params, + int unextended_input_shape_DimensionsCount, + int* unextended_input_shape_DimsData, + float* input_data, float* output_data); + +void CEVA_TFLM_StridedSlice_loop_Float32(float* input_data, float* output_data, + void* params); + +void CEVA_TFLM_StridedSlice_int8(void* op_params, + int unextended_input_shape_DimensionsCount, + int* unextended_input_shape_DimsData, + int8_t* input_data, int8_t* output_data); + +void CEVA_TFLM_StridedSlice_loop_int8(int8_t* input_data, int8_t* output_data, + void* params); + +void CEVA_TFLM_Pad_Float32(void* op_params, int input_shape, int* output_shape, + const float* input_data, const float* pad_value_ptr, + float* output_data); + +void CEVA_TFLM_Pad_Int8(void* op_params, int input_shape, int* output_shape, + const int8_t* input_data, const int8_t* pad_value_ptr, + int8_t* output_data); + +int CEVA_TFLM_ReshapeOutput(int input_type, const int input_size, + const int* input_data, int output_type, + int* output_size, int* output_data, + int node_in_size); + +int CEVA_TFLM_EvalRashape(const int8_t* input, int8_t* output, + unsigned int N_cnt); + +#if defined(__cplusplus) +} +#endif /* __cplusplus */ + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_CEVA_CEVA_TFLM_LIB_H_ diff --git a/tensorflow/lite/micro/kernels/ceva/conv.cc b/tensorflow/lite/micro/kernels/ceva/conv.cc new file mode 100644 index 0000000..c68913d --- /dev/null +++ b/tensorflow/lite/micro/kernels/ceva/conv.cc @@ -0,0 +1,258 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/reference/conv.h" + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/conv.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/padding.h" +#include "tensorflow/lite/micro/kernels/ceva/ceva_common.h" +#include "tensorflow/lite/micro/kernels/ceva/ceva_tflm_lib.h" +#include "tensorflow/lite/micro/kernels/conv.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +#ifdef MCPS_MEASUREMENT +#include "tensorflow/lite/micro/kernels/ceva/mcps_macros.h" +#endif + +namespace tflite { +namespace { + +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(OpDataConv)); +} + +void EvalQuantizedPerChannel(TfLiteContext* context, TfLiteNode* node, + TfLiteConvParams* params, const OpDataConv& data, + const TfLiteEvalTensor* input, + const TfLiteEvalTensor* filter, + const TfLiteEvalTensor* bias, + TfLiteEvalTensor* output, + TfLiteEvalTensor* im2col) { + // TODO(b/154032858): Investigate removing extra copies. + + ConvParams op_params = ConvParamsQuantized(*params, data); + const int32_t input_offset = op_params.input_offset; // r = s(q - Z) + const int32_t output_offset = op_params.output_offset; + + const int8_t *input_data, *filter_data; + const int32_t* bias_data; + int8_t* output_data; + + const RuntimeShape& input_shape = tflite::micro::GetTensorShape(input); + const RuntimeShape& filter_shape = tflite::micro::GetTensorShape(filter); + const RuntimeShape& bias_shape = tflite::micro::GetTensorShape(bias); + const RuntimeShape& output_shape = tflite::micro::GetTensorShape(output); + const RuntimeShape& im2col_shape = tflite::micro::GetTensorShape(im2col); + + const int stride_width = op_params.stride_width; + const int stride_height = op_params.stride_height; + const int dilation_width_factor = op_params.dilation_width_factor; + const int dilation_height_factor = op_params.dilation_height_factor; + const int pad_width = op_params.padding_values.width; + const int pad_height = op_params.padding_values.height; + TFLITE_DCHECK_EQ(input_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(filter_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(output_shape.DimensionsCount(), 4); + + const int batches = MatchingDim(input_shape, 0, output_shape, 0); + const int input_depth = MatchingDim(input_shape, 3, filter_shape, 3); + const int output_depth = MatchingDim(filter_shape, 0, output_shape, 3); + const int input_height = input_shape.Dims(1); + const int input_width = input_shape.Dims(2); + const int input_depth_Dims3 = input_shape.Dims(3); + const int filter_height = filter_shape.Dims(1); + const int filter_width = filter_shape.Dims(2); + const int filter_depth = filter_shape.Dims(3); + const int output_height = output_shape.Dims(1); + const int output_width = output_shape.Dims(2); + const int output_depth_Dims3 = output_shape.Dims(3); + + input_data = tflite::micro::GetTensorData(input); + filter_data = tflite::micro::GetTensorData(filter); + bias_data = tflite::micro::GetTensorData(bias); + output_data = tflite::micro::GetTensorData(output); + + int sizeof_scratch = filter_depth; + if (sizeof_scratch < output_depth_Dims3) sizeof_scratch = output_depth_Dims3; + + if (sizeof_scratch > CEVA_TFLM_KERNELS_SCRATCH_SIZE_VAL) { + MicroPrintf("Scratch size (%d) less that required (%d)", + CEVA_TFLM_KERNELS_SCRATCH_SIZE_VAL, sizeof_scratch); + } + +#ifdef MCPS_MEASUREMENT + MCPS_START_ONE; +#endif + for (int k = 0; k < batches; k++) { + CEVA_TFLM_ConvPerChannel_Int8( + + stride_width, stride_height, pad_width, + pad_height, // const int depth_multiplier, + input_offset, output_offset, data.per_channel_output_multiplier, + data.per_channel_output_shift, input_height, input_width, + input_depth_Dims3, input_depth, + + &input_data[k * input_height * input_width * input_depth_Dims3], + filter_height, filter_width, filter_depth, filter_data, bias_data, + output_height, output_width, output_depth_Dims3, output_depth, + + &output_data[k * output_height * output_width * output_depth_Dims3], + CEVA_TFLM_KERNELS_SCRATCH, dilation_width_factor, + dilation_height_factor, data.output_activation_min, + data.output_activation_max); + } +#ifdef MCPS_MEASUREMENT + MCPS_STOP_ONE( + "Test params:Call CEVA_TFLM_ConvPerChannel_Int8 %d times, inetrnal loop " + "= %dx%dx%dx%dx%dx%d", + batches, output_height, output_width, filter_height, filter_width, + output_depth, input_depth); +#endif +} + +void EvalFloat(TfLiteContext* context, TfLiteNode* node, + TfLiteConvParams* params, const OpDataConv& data, + const TfLiteEvalTensor* input, const TfLiteEvalTensor* filter, + const TfLiteEvalTensor* bias, TfLiteEvalTensor* im2col, + TfLiteEvalTensor* hwcn_weights, TfLiteEvalTensor* output) { + float output_activation_min, output_activation_max; + CalculateActivationRange(params->activation, &output_activation_min, + &output_activation_max); + // TODO(b/154032858): Investigate removing extra copies. + ConvParams op_params = ConvParamsFloat(*params, data); + + const float *input_data, *filter_data, *bias_data, *im2col_data; + float* output_data; + + const RuntimeShape& input_shape = tflite::micro::GetTensorShape(input); + const RuntimeShape& filter_shape = tflite::micro::GetTensorShape(filter); + const RuntimeShape& bias_shape = tflite::micro::GetTensorShape(bias); + const RuntimeShape& output_shape = tflite::micro::GetTensorShape(output); + const RuntimeShape& im2col_shape = tflite::micro::GetTensorShape(im2col); + + const int stride_width = op_params.stride_width; + const int stride_height = op_params.stride_height; + const int dilation_width_factor = op_params.dilation_width_factor; + const int dilation_height_factor = op_params.dilation_height_factor; + const int pad_width = op_params.padding_values.width; + const int pad_height = op_params.padding_values.height; + + TFLITE_DCHECK_EQ(input_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(filter_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(output_shape.DimensionsCount(), 4); + + const int batches = MatchingDim(input_shape, 0, output_shape, 0); + const int input_depth = MatchingDim(input_shape, 3, filter_shape, 3); + const int output_depth = MatchingDim(filter_shape, 0, output_shape, 3); + const int input_height = input_shape.Dims(1); + const int input_width = input_shape.Dims(2); + const int input_depth_Dims3 = input_shape.Dims(3); + const int filter_height = filter_shape.Dims(1); + const int filter_width = filter_shape.Dims(2); + const int filter_depth = filter_shape.Dims(3); + const int output_height = output_shape.Dims(1); + const int output_width = output_shape.Dims(2); + const int output_depth_Dims3 = output_shape.Dims(3); + + input_data = tflite::micro::GetTensorData(input); + filter_data = tflite::micro::GetTensorData(filter); + bias_data = tflite::micro::GetTensorData(bias); + output_data = tflite::micro::GetTensorData(output); + im2col_data = tflite::micro::GetTensorData(im2col); + +#ifdef MCPS_MEASUREMENT + MCPS_START_ONE; +#endif + for (int k = 0; k < batches; k++) { + CEVA_TFLM_Conv_Float32( + stride_width, stride_height, pad_width, pad_height, input_height, + input_width, input_depth_Dims3, input_depth, + &input_data[k * input_height * input_width * input_depth_Dims3], + filter_height, filter_width, filter_depth, filter_data, bias_data, + output_height, output_width, output_depth_Dims3, output_depth, + &output_data[k * output_height * output_width * output_depth_Dims3], + dilation_width_factor, dilation_height_factor, output_activation_min, + output_activation_max + + ); + } +#ifdef MCPS_MEASUREMENT + MCPS_STOP_ONE( + "Test params:Call CEVA_TFLM_Conv_Float32 %d times, inetrnal loop = " + "%dx%dx%dx%dx%dx%d", + batches, output_height, output_width, filter_height, filter_width, + output_depth, input_depth); +#endif +} + +TfLiteStatus EvalCEVA(TfLiteContext* context, TfLiteNode* node) { + auto* params = reinterpret_cast(node->builtin_data); + + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kConvInputTensor); + const TfLiteEvalTensor* filter = + tflite::micro::GetEvalInput(context, node, kConvWeightsTensor); + const TfLiteEvalTensor* bias = + (NumInputs(node) == 3) + ? tflite::micro::GetEvalInput(context, node, kConvBiasTensor) + : nullptr; + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kConvOutputTensor); + + TFLITE_DCHECK(node->user_data != nullptr); + const OpDataConv& data = *(static_cast(node->user_data)); + + TF_LITE_ENSURE_EQ(context, input->type, output->type); + TF_LITE_ENSURE_MSG(context, input->type == filter->type, + "Hybrid models are not supported on TFLite Micro."); + + switch (input->type) { // Already know in/out types are same. + case kTfLiteFloat32: + EvalFloat(context, node, params, data, input, filter, bias, nullptr, + nullptr, output); + break; + case kTfLiteInt8: + EvalQuantizedPerChannel(context, node, params, data, input, filter, bias, + output, nullptr); + break; + default: + MicroPrintf("Type %s (%d) not supported.", TfLiteTypeGetName(input->type), + input->type); + return kTfLiteError; + } + return kTfLiteOk; +} +TfLiteStatus ConvEval(TfLiteContext* context, TfLiteNode* node) { +#if defined(CEVA_BX1) || defined(CEVA_SP500) + return EvalCEVA(context, node); +#else + return Eval(context, node); // reference fallback +#endif +} +} // namespace + +TFLMRegistration Register_CONV_2D() { + return tflite::micro::RegisterOp(Init, ConvPrepare, ConvEval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/ceva/depthwise_conv.cc b/tensorflow/lite/micro/kernels/ceva/depthwise_conv.cc new file mode 100644 index 0000000..533014f --- /dev/null +++ b/tensorflow/lite/micro/kernels/ceva/depthwise_conv.cc @@ -0,0 +1,250 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/reference/integer_ops/depthwise_conv.h" + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/depthwiseconv_float.h" +#include "tensorflow/lite/kernels/internal/reference/depthwiseconv_uint8.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/padding.h" +#include "tensorflow/lite/micro/kernels/ceva/ceva_common.h" +#include "tensorflow/lite/micro/kernels/ceva/ceva_tflm_lib.h" +#include "tensorflow/lite/micro/kernels/depthwise_conv.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +#ifdef MCPS_MEASUREMENT +#include "tensorflow/lite/micro/kernels/ceva/mcps_macros.h" +#endif + +namespace tflite { +namespace { + +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(OpDataConv)); +} + +void EvalFloat(TfLiteContext* context, TfLiteNode* node, + TfLiteDepthwiseConvParams* params, const OpDataConv& data, + const TfLiteEvalTensor* input, const TfLiteEvalTensor* filter, + const TfLiteEvalTensor* bias, TfLiteEvalTensor* output) { + float output_activation_min, output_activation_max; + CalculateActivationRange(params->activation, &output_activation_min, + &output_activation_max); + + tflite::DepthwiseParams op_params = DepthwiseConvParamsFloat(*params, data); + + const float *input_data, *filter_data, *bias_data; + float* output_data; + input_data = tflite::micro::GetTensorData(input); + filter_data = tflite::micro::GetTensorData(filter); + bias_data = tflite::micro::GetTensorData(bias); + output_data = tflite::micro::GetTensorData(output); + + const RuntimeShape& input_shape = tflite::micro::GetTensorShape(input); + const RuntimeShape& filter_shape = tflite::micro::GetTensorShape(filter); + const RuntimeShape& bias_shape = tflite::micro::GetTensorShape(bias); + const RuntimeShape& output_shape = tflite::micro::GetTensorShape(output); + + const int batches = MatchingDim(input_shape, 0, output_shape, 0); + const int input_height = input_shape.Dims(1); + const int input_width = input_shape.Dims(2); + const int input_depth = input_shape.Dims(3); + + const int filter_height = filter_shape.Dims(1); + const int filter_width = filter_shape.Dims(2); + const int filter_depth = filter_shape.Dims(3); + + const int output_height = output_shape.Dims(1); + const int output_width = output_shape.Dims(2); + const int output_depth = output_shape.Dims(3); + + const int stride_width = params->stride_width; + const int stride_height = params->stride_height; + const int pad_width = data.padding.width; + const int pad_height = data.padding.height; + const int depth_multiplier = params->depth_multiplier; + + const int dilation_width_factor = params->dilation_width_factor; + const int dilation_height_factor = params->dilation_height_factor; + +#ifdef MCPS_MEASUREMENT + MCPS_START_ONE; +#endif + for (int k = 0; k < batches; k++) { + CEVA_TFLM_DepthwiseConv_Float32( + // 1, + stride_width, stride_height, pad_width, pad_height, depth_multiplier, + input_height, input_width, input_depth, + &input_data[k * input_height * input_width * input_depth], + filter_height, filter_width, filter_depth, filter_data, bias_data, + output_height, output_width, output_depth, + &output_data[k * output_height * output_width * output_depth], + dilation_width_factor, dilation_height_factor, output_activation_min, + output_activation_max + + ); + } +#ifdef MCPS_MEASUREMENT + MCPS_STOP_ONE( + "Test params:Call CEVA_TFLM_DepthwiseConv_Float32 %d times, inetrnal " + "loop = %dx%dx%dx%dx%dx%d", + batches, output_height, output_width, filter_height, filter_width, + output_depth, input_depth); +#endif +} + +void EvalQuantizedPerChannel(TfLiteContext* context, TfLiteNode* node, + TfLiteDepthwiseConvParams* params, + const OpDataConv& data, + const TfLiteEvalTensor* input, + const TfLiteEvalTensor* filter, + const TfLiteEvalTensor* bias, + TfLiteEvalTensor* output) { + DepthwiseParams op_params = DepthwiseConvParamsQuantized(*params, data); + + op_params.quantized_activation_min = std::numeric_limits::min(); + op_params.quantized_activation_max = std::numeric_limits::max(); + const int8_t* input_data; + const int8_t* filter_data; + const int32_t* bias_data; + int8_t* output_data; + const int32_t input_offset = op_params.input_offset; + const int32_t output_offset = op_params.output_offset; + + input_data = tflite::micro::GetTensorData(input); + filter_data = tflite::micro::GetTensorData(filter); + bias_data = tflite::micro::GetTensorData(bias); + output_data = tflite::micro::GetTensorData(output); + + const RuntimeShape& input_shape = tflite::micro::GetTensorShape(input); + const RuntimeShape& filter_shape = tflite::micro::GetTensorShape(filter); + const RuntimeShape& bias_shape = tflite::micro::GetTensorShape(bias); + const RuntimeShape& output_shape = tflite::micro::GetTensorShape(output); + + const int batches = MatchingDim(input_shape, 0, output_shape, 0); + const int input_height = input_shape.Dims(1); + const int input_width = input_shape.Dims(2); + const int input_depth = input_shape.Dims(3); + + const int filter_height = filter_shape.Dims(1); + const int filter_width = filter_shape.Dims(2); + const int filter_depth = filter_shape.Dims(3); + + const int output_height = output_shape.Dims(1); + const int output_width = output_shape.Dims(2); + const int output_depth = output_shape.Dims(3); + + const int stride_width = params->stride_width; + const int stride_height = params->stride_height; + const int pad_width = data.padding.width; + const int pad_height = data.padding.height; + const int depth_multiplier = params->depth_multiplier; + + const int dilation_width_factor = params->dilation_width_factor; + const int dilation_height_factor = params->dilation_height_factor; + + if ((input_depth * 4) > CEVA_TFLM_KERNELS_SCRATCH_SIZE_VAL) { + MicroPrintf("Scratch size (%d) less that required (%d)", + CEVA_TFLM_KERNELS_SCRATCH_SIZE_VAL, (input_depth * 4)); + } + +#ifdef MCPS_MEASUREMENT + MCPS_START_ONE; +#endif + for (int k = 0; k < batches; k++) { + CEVA_TFLM_DepthwiseConvPerChannel_int8( + // 1, + stride_width, stride_height, pad_width, pad_height, depth_multiplier, + input_offset, output_offset, data.per_channel_output_multiplier, + data.per_channel_output_shift, input_height, input_width, input_depth, + &input_data[k * input_height * input_width * input_depth], + filter_height, filter_width, filter_depth, filter_data, bias_data, + output_height, output_width, output_depth, + &output_data[k * output_height * output_width * output_depth], + CEVA_TFLM_KERNELS_SCRATCH, dilation_width_factor, + dilation_height_factor, op_params.quantized_activation_min, + op_params.quantized_activation_max + + ); + } +#ifdef MCPS_MEASUREMENT + MCPS_STOP_ONE( + "Test params:Call CEVA_TFLM_DepthwiseConvPerChannel_int8 %d times, " + "inetrnal loop = %dx%dx%dx%dx%dx%d", + batches, output_height, output_width, filter_height, filter_width, + output_depth, input_depth); +#endif +} + +TfLiteStatus EvalCEVA(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + TFLITE_DCHECK(node->builtin_data != nullptr); + + auto* params = + reinterpret_cast(node->builtin_data); + const OpDataConv& data = *(static_cast(node->user_data)); + + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kDepthwiseConvOutputTensor); + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kDepthwiseConvInputTensor); + const TfLiteEvalTensor* filter = + tflite::micro::GetEvalInput(context, node, kDepthwiseConvWeightsTensor); + const TfLiteEvalTensor* bias = + (NumInputs(node) == 3) + ? tflite::micro::GetEvalInput(context, node, kDepthwiseConvBiasTensor) + : nullptr; + + // TODO(aselle): Consider whether float conv and quantized conv should be + // separate ops to avoid dispatch overhead here. + switch (input->type) { // Already know in/out types are same. + case kTfLiteFloat32: + EvalFloat(context, node, params, data, input, filter, bias, output); + break; + case kTfLiteInt8: + EvalQuantizedPerChannel(context, node, params, data, input, filter, bias, + output); + break; + default: + MicroPrintf("Type %s (%d) not supported.", TfLiteTypeGetName(input->type), + input->type); + return kTfLiteError; + } + return kTfLiteOk; +} + +TfLiteStatus DepthWiseConvEval(TfLiteContext* context, TfLiteNode* node) { +#if defined(CEVA_BX1) || defined(CEVA_SP500) + return EvalCEVA(context, node); +#else + return Eval(context, node); // reference fallback +#endif +} + +} // namespace + +TFLMRegistration Register_DEPTHWISE_CONV_2D() { + return tflite::micro::RegisterOp(Init, DepthwiseConvPrepare, + DepthWiseConvEval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/ceva/fully_connected.cc b/tensorflow/lite/micro/kernels/ceva/fully_connected.cc new file mode 100644 index 0000000..0e0b14d --- /dev/null +++ b/tensorflow/lite/micro/kernels/ceva/fully_connected.cc @@ -0,0 +1,250 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/kernels/fully_connected.h" + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/fully_connected.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/fully_connected.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/ceva/ceva_tflm_lib.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +// #define MCPS_MEASUREMENT +#ifdef MCPS_MEASUREMENT +#include "tensorflow/lite/micro/kernels/ceva/mcps_macros.h" +#endif + +#if defined(CEVA_BX1) || defined(CEVA_SP500) +extern int32_t* CEVA_TFLM_KERNELS_SCRATCH; +extern int32_t CEVA_TFLM_KERNELS_SCRATCH_SIZE_VAL; +#endif // CEVA platform + +namespace tflite { +namespace { + +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + + return context->AllocatePersistentBuffer(context, + sizeof(OpDataFullyConnected)); +} + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + TFLITE_DCHECK(node->builtin_data != nullptr); + + auto* data = static_cast(node->user_data); + + const auto params = + static_cast(node->builtin_data); + + const TfLiteTensor* input = + AllocateTempInputTensor(node, kFullyConnectedInputTensor); + const TfLiteTensor* filter = + AllocateTempInputTensor(node, kFullyConnectedWeightsTensor); + const TfLiteTensor* bias = + AllocateTempInputTensor(context, node, kFullyConnectedBiasTensor); + TfLiteTensor* output = + AllocateTempOutputTensor(node, kFullyConnectedOutputTensor); + + TF_LITE_ENSURE_TYPES_EQ(context, input->type, output->type); + TF_LITE_ENSURE_MSG(context, input->type == filter->type, + "Hybrid models are not supported on TFLite Micro."); + + return CalculateOpDataFullyConnected(context, params->activation, input->type, + input, filter, bias, output, data); +} + +__attribute__((optnone)) TfLiteStatus EvalQuantizedInt8CEVA( + TfLiteContext* context, TfLiteNode* node, const OpDataFullyConnected& data, + const TfLiteEvalTensor* input, const TfLiteEvalTensor* filter, + const TfLiteEvalTensor* bias, TfLiteEvalTensor* output) { + tflite::FullyConnectedParams op_params = FullyConnectedParamsQuantized(data); + + int input_shape_dimensions_count = + tflite::micro::GetTensorShape(input).DimensionsCount(); + int weights_shape_dimensions_count = + tflite::micro::GetTensorShape(filter).DimensionsCount(); + int* weights_shape_dims_data = + const_cast(tflite::micro::GetTensorShape(filter).DimsData()); + int bias_shape_dimensions_count = + tflite::micro::GetTensorShape(bias).DimensionsCount(); + int output_shape_dimensions_count = + tflite::micro::GetTensorShape(output).DimensionsCount(); + int* output_shape_dims_data = + const_cast(tflite::micro::GetTensorShape(output).DimsData()); + + void* params = (void*)&op_params; + int8_t* inputp = + const_cast(tflite::micro::GetTensorData(input)); + int8_t* filterp = + const_cast(tflite::micro::GetTensorData(filter)); + int32_t* biasp = + const_cast(tflite::micro::GetTensorData(bias)); + int8_t* outputp = + const_cast(tflite::micro::GetTensorData(output)); + +#ifdef MCPS_MEASUREMENT + int batches = output_shape_dims_data[0]; + int output_depth = + weights_shape_dims_data[weights_shape_dimensions_count - 2]; + int accum_depth = weights_shape_dims_data[weights_shape_dimensions_count - 1]; + MCPS_START_ONE; +#endif + + int sizeof_scratch_required = output_shape_dims_data[1]; + + if (sizeof_scratch_required > CEVA_TFLM_KERNELS_SCRATCH_SIZE_VAL) { + MicroPrintf("Scratch size (%d) less that required (%d)", + CEVA_TFLM_KERNELS_SCRATCH_SIZE_VAL, sizeof_scratch_required); + return kTfLiteError; + } + + CEVA_TFLM_FullyConnected_int8( + params, input_shape_dimensions_count, inputp, + weights_shape_dimensions_count, weights_shape_dims_data, filterp, + bias_shape_dimensions_count, biasp, output_shape_dimensions_count, + output_shape_dims_data, outputp, CEVA_TFLM_KERNELS_SCRATCH); +#ifdef MCPS_MEASUREMENT + MCPS_STOP_ONE( + "Test params:Call CEVA_TFLM_FullyConnected_int8 inetrnal loop = %dx%dx%d", + batches, output_depth, accum_depth); +#endif + + return kTfLiteOk; +} + +TfLiteStatus EvalFloatCEVA(TfLiteContext* context, TfLiteNode* node, + TfLiteFusedActivation activation, + const TfLiteEvalTensor* input, + const TfLiteEvalTensor* filter, + const TfLiteEvalTensor* bias, + TfLiteEvalTensor* output) { + // float output_activation_min, output_activation_max; + tflite::FullyConnectedParams op_params; + CalculateActivationRange(activation, &op_params.float_activation_min, + &op_params.float_activation_max); + + // op_params.float_activation_min = output_activation_min; + // op_params.float_activation_max = output_activation_max; + + int input_shape_dimensions_count = + tflite::micro::GetTensorShape(input).DimensionsCount(); + int weights_shape_dimensions_count = + tflite::micro::GetTensorShape(filter).DimensionsCount(); + int* weights_shape_dims_data = + const_cast(tflite::micro::GetTensorShape(filter).DimsData()); + int bias_shape_dimensions_count = + tflite::micro::GetTensorShape(bias).DimensionsCount(); + int output_shape_dimensions_count = + tflite::micro::GetTensorShape(output).DimensionsCount(); + int* output_shape_dims_data = + const_cast(tflite::micro::GetTensorShape(output).DimsData()); + + void* params = (void*)&op_params; + float* inputp = + const_cast(tflite::micro::GetTensorData(input)); + float* filterp = + const_cast(tflite::micro::GetTensorData(filter)); + float* biasp = const_cast(tflite::micro::GetTensorData(bias)); + float* outputp = + const_cast(tflite::micro::GetTensorData(output)); + +#ifdef MCPS_MEASUREMENT + int batches = 1; + int i; + for (i = 0; i < (output_shape_dimensions_count - 1); i++) + batches *= output_shape_dims_data[i]; + + int output_depth = + weights_shape_dims_data[weights_shape_dimensions_count - 2]; + int accum_depth = weights_shape_dims_data[weights_shape_dimensions_count - 1]; + MCPS_START_ONE; +#endif + CEVA_TFLM_FullyConnected_Float32( + params, + input_shape_dimensions_count, // GetTensorShape(input), + inputp, + weights_shape_dimensions_count, // GetTensorShape(filter), + weights_shape_dims_data, filterp, + bias_shape_dimensions_count, // GetTensorShape(bias), + biasp, + output_shape_dimensions_count, // GetTensorShape(output), + output_shape_dims_data, outputp); +#ifdef MCPS_MEASUREMENT + MCPS_STOP_ONE( + "Test params:Call CEVA_TFLM_FullyConnected_Float32 inetrnal loop = " + "%dx%dx%d", + batches, output_depth, accum_depth); +#endif + + return kTfLiteOk; +} + +TfLiteStatus EvalCEVA(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->builtin_data != nullptr); + const auto* params = + static_cast(node->builtin_data); + + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kFullyConnectedInputTensor); + const TfLiteEvalTensor* filter = + tflite::micro::GetEvalInput(context, node, kFullyConnectedWeightsTensor); + const TfLiteEvalTensor* bias = + tflite::micro::GetEvalInput(context, node, kFullyConnectedBiasTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kFullyConnectedOutputTensor); + + TFLITE_DCHECK(node->user_data != nullptr); + const OpDataFullyConnected& data = + *(static_cast(node->user_data)); + + // Checks in Prepare ensure input, output and filter types are all the same. + switch (input->type) { + case kTfLiteFloat32: + return EvalFloatCEVA(context, node, params->activation, input, filter, + bias, output); + case kTfLiteInt8: + return EvalQuantizedInt8CEVA(context, node, data, input, filter, bias, + output); + + default: + MicroPrintf("Type %s (%d) not supported.", TfLiteTypeGetName(input->type), + input->type); + return kTfLiteError; + } + return kTfLiteOk; +} +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +#if defined(CEVA_BX1) || defined(CEVA_SP500) + return EvalCEVA(context, node); +#else + return EvalQuantizeReference(context, node); +#endif +} + +} // namespace + +TFLMRegistration Register_FULLY_CONNECTED() { + return tflite::micro::RegisterOp(Init, Prepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/ceva/logistic.cc b/tensorflow/lite/micro/kernels/ceva/logistic.cc new file mode 100644 index 0000000..00e438f --- /dev/null +++ b/tensorflow/lite/micro/kernels/ceva/logistic.cc @@ -0,0 +1,138 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/reference/integer_ops/logistic.h" + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/logistic.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/ceva/ceva_common.h" +#include "tensorflow/lite/micro/kernels/ceva/ceva_tflm_lib.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/logistic.h" +#include "tensorflow/lite/micro/micro_log.h" + +#ifdef MCPS_MEASUREMENT +#include "tensorflow/lite/micro/kernels/ceva/mcps_macros.h" +#endif + +namespace tflite { +namespace { + +void* LogisticInit(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(OpDataLogistic)); +} + +TfLiteStatus LogisticEval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kLogisticInputTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kLogisticOutputTensor); + + TFLITE_DCHECK(node->user_data != nullptr); + OpDataLogistic* data = static_cast(node->user_data); + + if (input->type == kTfLiteFloat32) { + switch (output->type) { + case kTfLiteFloat32: { +#if defined(CEVA_BX1) || defined(CEVA_SP500) + + const float* input_data = tflite::micro::GetTensorData(input); + float* output_data = tflite::micro::GetTensorData(output); + const int flat_size = + MatchingFlatSize(tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorShape(output)); +#ifdef MCPS_MEASUREMENT + MCPS_START_ONE; +#endif + CEVA_TFLM_Logistic_float32(input_data, output_data, flat_size); +#ifdef MCPS_MEASUREMENT + MCPS_STOP_ONE("Test params:CEVA_TFLM_Logistic_float32 loop = %d", + flat_size); +#endif + +#else + reference_ops::Logistic(tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); +#endif // ceva platform + return kTfLiteOk; + } + default: + MicroPrintf("Input %s, output %s not supported.", + TfLiteTypeGetName(input->type), + TfLiteTypeGetName(output->type)); + return kTfLiteError; + } + } else if (input->type == kTfLiteInt8) { + switch (output->type) { + case kTfLiteInt8: { +#if defined(CEVA_BX1) || defined(CEVA_SP500) + int32_t input_zero_point = data->input_zero_point; + int32_t input_range_radius = data->input_range_radius; + int32_t input_multiplier = data->input_multiplier; + int32_t input_left_shift = data->input_left_shift; + int32_t input_size = NumElements(input->dims); + const int8_t* input_data = tflite::micro::GetTensorData(input); + int8_t* output_data = tflite::micro::GetTensorData(output); + +#ifdef MCPS_MEASUREMENT + MCPS_START_ONE; +#endif + CEVA_TFLM_Logistic_Int8(input_zero_point, input_range_radius, + input_multiplier, input_left_shift, input_size, + input_data, output_data); +#ifdef MCPS_MEASUREMENT + MCPS_STOP_ONE("Test params:CEVA_TFLM_Logistic_Int8 loop = %d", + input_size); +#endif +#else + reference_integer_ops::Logistic( + data->input_zero_point, data->input_range_radius, + data->input_multiplier, data->input_left_shift, + NumElements(input->dims), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorData(output)); +#endif // ceva platform + return kTfLiteOk; + } + default: + MicroPrintf("Input %s, output %s not supported.", + TfLiteTypeGetName(input->type), + TfLiteTypeGetName(output->type)); + return kTfLiteError; + } + } else { + MicroPrintf("Input %s, output %s not supported.", + TfLiteTypeGetName(input->type), + TfLiteTypeGetName(output->type)); + return kTfLiteError; + } + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_LOGISTIC() { + return tflite::micro::RegisterOp(LogisticInit, LogisticPrepare, LogisticEval); +} +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/ceva/mcps_macros.h b/tensorflow/lite/micro/kernels/ceva/mcps_macros.h new file mode 100644 index 0000000..0d51e5a --- /dev/null +++ b/tensorflow/lite/micro/kernels/ceva/mcps_macros.h @@ -0,0 +1,115 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +// MCPS measurement macros for CEVA optimized kernels + +#ifndef MCPS_MACROS_ +#define MCPS_MACROS_ + +#ifndef WIN32 +#include +#endif + +#ifdef MCPS_MEASUREMENT + +#ifdef STACK_MEASUREMENT +#if defined(__cplusplus) +extern "C" { +#endif /* __cplusplus */ +void CEVA_BX_Stack_Marking(const int32_t _count); +int32_t CEVA_BX_Stack_Measurement(const int32_t count); +#if defined(__cplusplus) +} +#endif /* __cplusplus */ +#endif + +#define MCPS_CALL_RET_VALUE 4 + +#ifdef STACK_MEASUREMENT +#define MCPS_VARIBLES \ + clock_t c1, c2; \ + int ClockCEVA, Constant_cycles; \ + int StackSize; \ + FILE* f_mcps_report; +#else +#define MCPS_VARIBLES \ + clock_t c1, c2; \ + int ClockCEVA, Constant_cycles; \ + FILE* f_mcps_report; +#endif +#define MCPS_OPEN_FILE f_mcps_report = fopen("mcps_report.txt", "at"); + +#define MCPS_CLOSE_FILE fclose(f_mcps_report); + +#ifdef STACK_MEASUREMENT +#define MCPS_START_CLOCK \ + CEVA_BX_Stack_Marking(0x800); \ + reset_clock(); \ + start_clock(); \ + c1 = clock(); \ + c2 = clock(); \ + Constant_cycles = c2 - c1; \ + c1 = clock(); + +#define MCPS_STOP_AND_LOG(...) \ + c2 = clock(); \ + ClockCEVA = c2 - c1 - Constant_cycles - MCPS_CALL_RET_VALUE; \ + StackSize = CEVA_BX_Stack_Measurement(0x800) * 4; \ + fprintf(f_mcps_report, __VA_ARGS__); \ + fprintf(f_mcps_report, ":cycles:%d:Stack:%d\r\n", ClockCEVA, StackSize); + +#else // STACK_MEASUREMENT +#define MCPS_START_CLOCK \ + reset_clock(); \ + start_clock(); \ + c1 = clock(); \ + c2 = clock(); \ + Constant_cycles = c2 - c1; \ + c1 = clock(); + +#define MCPS_STOP_AND_LOG(...) \ + c2 = clock(); \ + ClockCEVA = c2 - c1 - Constant_cycles - MCPS_CALL_RET_VALUE; \ + fprintf(f_mcps_report, __VA_ARGS__); \ + fprintf(f_mcps_report, ":cycles:%d\r\n", ClockCEVA); +#endif // STACK_MEASUREMENT + +#define MCPS_STOP_AND_PRINT(...) \ + c2 = clock(); \ + ClockCEVA = c2 - c1 - Constant_cycles - MCPS_CALL_RET_VALUE; \ + fprintf(stdout, __VA_ARGS__); \ + fprintf(stdout, ":cycles=%d\n", ClockCEVA); + +#define MCPS_START_ONE \ + MCPS_VARIBLES; \ + MCPS_OPEN_FILE; \ + MCPS_START_CLOCK; +#define MCPS_STOP_ONE(...) \ + MCPS_STOP_AND_LOG(__VA_ARGS__); \ + MCPS_CLOSE_FILE; + +#else +#define MCPS_VARIBLES +#define MCPS_OPEN_FILE +#define MCPS_START_CLOCK +#define MCPS_STOP_AND_LOG(...) +#define MCPS_STOP_AND_PRINT(...) +#define MCPS_CLOSE_FILE + +#define MCPS_START_ONE +#define MCPS_STOP_ONE(...) +#endif + +#endif diff --git a/tensorflow/lite/micro/kernels/ceva/quantize.cc b/tensorflow/lite/micro/kernels/ceva/quantize.cc new file mode 100644 index 0000000..585f968 --- /dev/null +++ b/tensorflow/lite/micro/kernels/ceva/quantize.cc @@ -0,0 +1,86 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/kernels/internal/reference/quantize.h" + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/requantize.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/ceva/ceva_tflm_lib.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/quantize.h" +#include "tensorflow/lite/micro/micro_utils.h" +#ifdef MCPS_MEASUREMENT +#include "tensorflow/lite/micro/kernels/ceva/mcps_macros.h " +#endif + +namespace tflite { +namespace { + +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, + sizeof(OpDataQuantizeReference)); +} + +TfLiteStatus EvalCEVA(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + + auto* data = static_cast(node->user_data); + + const TfLiteEvalTensor* input = tflite::micro::GetEvalInput(context, node, 0); + TfLiteEvalTensor* output = tflite::micro::GetEvalOutput(context, node, 0); + + if (input->type == kTfLiteFloat32 && output->type == kTfLiteInt8) { + const float* input_data = tflite::micro::GetTensorData(input); + int8_t* output_data = tflite::micro::GetTensorData(output); + const int flat_size = + MatchingFlatSize(tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorShape(output)); + +#ifdef MCPS_MEASUREMENT + MCPS_START_ONE; +#endif + CEVA_TFLM_AffineQuantize_Int8(input_data, output_data, flat_size, + data->quantization_params.scale, + data->quantization_params.zero_point); +#ifdef MCPS_MEASUREMENT + MCPS_STOP_ONE("Test params:CEVA_TFLM_AffineQuantize_Int8 loop = %d", + flat_size); +#endif + } else + return EvalQuantizeReference(context, node); + return kTfLiteOk; +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +#if defined(CEVA_BX1) || defined(CEVA_SP500) + return EvalCEVA(context, node); +#else + return EvalQuantizeReference(context, node); +#endif +} + +} // namespace + +// This Op (QUANTIZE) quantizes the input and produces quantized output. +// AffineQuantize takes scale and zero point and quantizes the float value to +// quantized output, in int8_t or uint8_t format. +TFLMRegistration Register_QUANTIZE() { + return tflite::micro::RegisterOp(Init, PrepareQuantizeReference, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/ceva/softmax.cc b/tensorflow/lite/micro/kernels/ceva/softmax.cc new file mode 100644 index 0000000..beb43cd --- /dev/null +++ b/tensorflow/lite/micro/kernels/ceva/softmax.cc @@ -0,0 +1,167 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/reference/softmax.h" + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/ceva/ceva_common.h" +#include "tensorflow/lite/micro/kernels/ceva/ceva_tflm_lib.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/softmax.h" +#include "tensorflow/lite/micro/micro_log.h" + +#ifdef MCPS_MEASUREMENT +#include "tensorflow/lite/micro/kernels/ceva/mcps_macros.h" +#endif + +namespace tflite { +namespace { + +// Takes a tensor and performs softmax along the last dimension. +void SoftmaxFloatCEVA(const TfLiteEvalTensor* input, TfLiteEvalTensor* output, + const SoftmaxParams& op_data) { + const RuntimeShape& input_shape = tflite::micro::GetTensorShape(input); + const float* input_data = tflite::micro::GetTensorData(input); + const RuntimeShape& output_shape = tflite::micro::GetTensorShape(output); + float* output_data = tflite::micro::GetTensorData(output); + + const float beta = static_cast(op_data.beta); + const int trailing_dim = input_shape.DimensionsCount() - 1; + const int outer_size = + MatchingFlatSizeSkipDim(input_shape, trailing_dim, output_shape); + const int depth = + MatchingDim(input_shape, trailing_dim, output_shape, trailing_dim); + int outer_size_mcps = outer_size; + int depth_mcps = depth; + +#ifdef MCPS_MEASUREMENT + MCPS_START_ONE; +#endif + for (int i = 0; i < outer_size; ++i) { + CEVA_TFLM_Softmax_Float32(&input_data[i * depth], &output_data[i * depth], + beta, depth); + } +#ifdef MCPS_MEASUREMENT + MCPS_STOP_ONE( + "Test params:Call CEVA_TFLM_Softmax_Float32 %d times, inetrnal loop = %d", + outer_size_mcps, depth_mcps); +#endif +} + +TfLiteStatus SoftmaxQuantizedCEVA(TfLiteContext* context, + const TfLiteEvalTensor* input, + TfLiteEvalTensor* output, + const SoftmaxParams& op_data) { + if (input->type == kTfLiteInt8) { + if (output->type == kTfLiteInt16) { + tflite::reference_ops::Softmax( + op_data, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } else { + const RuntimeShape& input_shape = tflite::micro::GetTensorShape(input); + const int8_t* input_data = tflite::micro::GetTensorData(input); + + const RuntimeShape& output_shape = tflite::micro::GetTensorShape(output); + int8_t* output_data = tflite::micro::GetTensorData(output); + + const int32_t input_beta_multiplier = + static_cast(op_data.input_multiplier); + const int32_t input_beta_left_shift = + static_cast(op_data.input_left_shift); + const int trailing_dim = input_shape.DimensionsCount() - 1; + const int outer_size = + MatchingFlatSizeSkipDim(input_shape, trailing_dim, output_shape); + const int depth = + MatchingDim(input_shape, trailing_dim, output_shape, trailing_dim); + int outer_size_mcps = outer_size; + int depth_mcps = depth; + + if (depth > CEVA_TFLM_KERNELS_SCRATCH_SIZE_VAL) { + MicroPrintf("Scratch size (%d) less that required (%d)", + CEVA_TFLM_KERNELS_SCRATCH_SIZE_VAL, depth); + return kTfLiteError; + } + +#ifdef MCPS_MEASUREMENT + MCPS_START_ONE; +#endif + for (int i = 0; i < outer_size; ++i) { + CEVA_TFLM_Softmax_Int8(&input_data[i * depth], &output_data[i * depth], + input_beta_multiplier, input_beta_left_shift, + depth, CEVA_TFLM_KERNELS_SCRATCH); + } +#ifdef MCPS_MEASUREMENT + MCPS_STOP_ONE( + "Test params:Call CEVA_TFLM_Softmax_Int8 %d times, inetrnal loop = " + "%d", + outer_size_mcps, depth_mcps); +#endif + } + } else { + tflite::reference_ops::SoftmaxInt16( + op_data, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } + + return kTfLiteOk; +} + +TfLiteStatus SoftmaxEvalCEVA(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = tflite::micro::GetEvalInput(context, node, 0); + TfLiteEvalTensor* output = tflite::micro::GetEvalOutput(context, node, 0); + + TFLITE_DCHECK(node->user_data != nullptr); + SoftmaxParams op_data = *static_cast(node->user_data); + + switch (input->type) { + case kTfLiteFloat32: { + SoftmaxFloatCEVA(input, output, op_data); + return kTfLiteOk; + } + case kTfLiteInt8: + case kTfLiteInt16: { + return SoftmaxQuantizedCEVA(context, input, output, op_data); + } + default: + MicroPrintf("Type %s (%d) not supported.", TfLiteTypeGetName(input->type), + input->type); + return kTfLiteError; + } +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +#if defined(CEVA_BX1) || defined(CEVA_SP500) + return SoftmaxEvalCEVA(context, node); +#else + return SoftmaxEval(context, node); // reference fallback +#endif +} +} // namespace + +TFLMRegistration Register_SOFTMAX() { + return tflite::micro::RegisterOp(SoftmaxInit, SoftmaxPrepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/ceva/types.h b/tensorflow/lite/micro/kernels/ceva/types.h new file mode 100644 index 0000000..048a116 --- /dev/null +++ b/tensorflow/lite/micro/kernels/ceva/types.h @@ -0,0 +1,1286 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef CEVA_TYPES_H_ +#define CEVA_TYPES_H_ + +#include +#include +#include + +// typedef int8_t int8_t; +// typedef int16_t int16; +// typedef int32_t int32_t; +// typedef uint8_t uint8; +// typedef uint16_t uint16; +// typedef uint32_t uint32; + +typedef float float_32; +typedef unsigned long long uint64; +typedef long long int64; + +#if 1 +enum BroadcastableOpCategory { + kNone, + kNonBroadcast, // Matching input shapes. + kFirstInputBroadcastsFast, // Fivefold nested loops. + kSecondInputBroadcastsFast, // Fivefold nested loops. + kGenericBroadcast, // Fall-back. +}; + +#else +enum class BroadcastableOpCategory : uint8_t { + kNone, + kNonBroadcast, // Matching input shapes. + kFirstInputBroadcastsFast, // Fivefold nested loops. + kSecondInputBroadcastsFast, // Fivefold nested loops. + kGenericBroadcast, // Fall-back. +}; +#endif + +struct ArithmeticParams_ceva { + // Shape dependent / common to data / op types. + uint8_t broadcast_category; // BroadcastableOpCategory broadcast_category; + // uint8_t inference params. + int32_t input1_offset; + int32_t input2_offset; + int32_t output_offset; + int32_t output_multiplier; + int output_shift; + // Add / Sub, not Mul, uint8_t inference params. + int left_shift; + int32_t input1_multiplier; + int input1_shift; + int32_t input2_multiplier; + int input2_shift; + // uint8_t, etc, activation params. + int32_t quantized_activation_min; + int32_t quantized_activation_max; + // float activation params. + float float_activation_min; + float float_activation_max; + + // Processed output dimensions. + // Let input "a" be the one that broadcasts in the faster-changing dimension. + // Then, after coalescing, for shapes {a0, a1, a2, a3, a4} and + // {b0, b1, b2, b3, b4}, + // broadcast_shape[4] = b0 = a0. + // broadcast_shape[3] = b1; a1 = 1. + // broadcast_shape[2] = b2 = a2. + // broadcast_shape[1] = a3; b3 = 1. + // broadcast_shape[0] = b4 = a4. + int broadcast_shape[5]; +}; + +struct SoftmaxParams_ceva { + // beta is not really used (not a Tensorflow parameter) and not implemented + // for LogSoftmax. + double beta; + // uint8_t inference params. Used even when beta defaults to 1.0. + int32_t input_multiplier; + int32_t input_left_shift; + // Reverse scaling is only used by LogSoftmax. + int32_t reverse_scaling_divisor; + int32_t reverse_scaling_right_shift; + int diff_min; + int32_t zero_point; + float scale; + float* table; + int16_t* exp_lut; + int16_t* one_over_one_plus_x_lut; + uint8_t* uint8_table1; + uint8_t* uint8_table2; +}; + +enum class FusedActivationFunctionType_ceva : uint8_t { + kNone, + kRelu6, + kRelu1, + kRelu +}; +enum class PaddingType_ceva : uint8_t { kNone, kSame, kValid }; + +struct PaddingValues_ceva { + int16_t width; + int16_t height; + // offset is used for calculating "remaining" padding, for example, `width` + // is 1 and `width_offset` is 1, so padding_left is 1 while padding_right is + // 1 + 1 = 2. + int16_t width_offset; + // Same as width_offset except it's over the height dimension. + int16_t height_offset; +}; + +struct StridedSliceParams_ceva { + int8_t start_indices_count; + int32_t start_indices[5]; + int8_t stop_indices_count; + int32_t stop_indices[5]; + int8_t strides_count; + int32_t strides[5]; + + int16_t begin_mask; + int16_t ellipsis_mask; + int16_t end_mask; + int16_t new_axis_mask; + int16_t shrink_axis_mask; +}; + +struct PoolParams_ceva { + FusedActivationFunctionType_ceva activation; + PaddingType_ceva padding_type; + PaddingValues_ceva padding_values; + int stride_height; + int stride_width; + int filter_height; + int filter_width; + // uint8, etc, activation params. + int32_t quantized_activation_min; + int32_t quantized_activation_max; + // float activation params. + float float_activation_min; + float float_activation_max; +}; + +inline size_t ReducedOutputOffset(const int num_dims, const int* dims, + const int* index, const int num_axis, + const int* axis) { + if (num_dims == 0) { + return 0; + } + // TFLITE_DCHECK(dims != nullptr); + // TFLITE_DCHECK(index != nullptr); + size_t offset = 0; + for (int idx = 0; idx < num_dims; ++idx) { + // if we need to skip this axis + bool is_axis = false; + if (axis != nullptr) { + for (int axis_idx = 0; axis_idx < num_axis; ++axis_idx) { + if (idx == axis[axis_idx]) { + is_axis = true; + break; + } + } + } + if (!is_axis) { + offset = offset * static_cast(dims[idx]) + + static_cast(index[idx]); + } + } + return offset; +} +inline bool NextIndex(const int num_dims, const int* dims, int* current) { + if (num_dims == 0) { + return false; + } + // TFLITE_DCHECK(dims != nullptr); + // TFLITE_DCHECK(current != nullptr); + int carry = 1; + for (int idx = num_dims - 1; idx >= 0; --idx) { + int current_val = current[idx] + carry; + // TFLITE_DCHECK_GE(dims[idx], current_val); + if (dims[idx] == current_val) { + current[idx] = 0; + } else { + current[idx] = current_val; + carry = 0; + break; + } + } + return (carry == 0); +} + +#if 0 +#include +#include +#include +#include + +#include "tensorflow/lite/kernels/internal/compatibility.h" + +namespace tflite { + +enum class FusedActivationFunctionType : uint8_t { kNone, kRelu6, kRelu1, kRelu }; +enum class PaddingType : uint8_t { kNone, kSame, kValid }; + +struct PaddingValues { + int16 width; + int16 height; + // offset is used for calculating "remaining" padding, for example, `width` + // is 1 and `width_offset` is 1, so padding_left is 1 while padding_right is + // 1 + 1 = 2. + int16 width_offset; + // Same as width_offset except it's over the height dimension. + int16 height_offset; +}; + +// This enumeration allows for non-default formats for the weights array +// of a fully-connected operator, allowing the use of special optimized +// runtime paths. +enum class FullyConnectedWeightsFormat : uint8_t { + // Default format (flat 2D layout, the inner contiguous dimension + // is input_depth, the outer non-contiguous dimension is output_depth) + kDefault, + // Summary: optimized layout for fast CPU runtime implementation, + // aimed specifically at ARM CPUs at the moment, and specialized for + // 8-bit quantized layers. + // + // The use case we're concerned with here is: 8-bit quantization, + // large weights matrix that doesn't fit in cache (e.g. 4096x2048 in + // a key application that drove this), very small batch size (e.g. 1 -- 4). + // + // Even with 8-bit quantization of weights, the performance of memory + // accesses to the weights can become the dominant issue when + // the batch size is small, so each weight value is used in only a few + // arithmetic ops, i.e. the fully-connected node has a low arithmetic + // intensity. The specific issues that arise are of three kinds: + // (1) One may, ideally, max out DRAM bandwidth, i.e. be truly memory + // bound. That's the "good" issue to run into. + // (2) One may run into sub-optimal pre-fetching: the data hasn't been + // prefetched into the cache by the time we need it. + // (3) One may run into cache aliasing: multiple values that are + // pre-fetched, alias each other in the L1 cache (which typically + // has only 4-way set associativity in ARM CPUs) and thus evict + // each other before we get to using them. + // + // The point of this shuffling is to avoid issues (2) and (3) so that + // we get as fast as possible given only the hard constraint (1). + // This is achieved by turning the difficulty into a solution: the + // difficulty, that each value loaded from memory is used only in + // one kernel iteration, making this operation memory-intensive, hints at + // the solution, of shuffling the weights so that they are stored in the + // exact order as the kernel needs to load them, so that the memory + // accesses made by the kernel are trivial. This solves (2) because the + // trivial memory access pattern allows the CPU's automatic prefetching + // to perform very well (no need even for preload instructions), and this + // solves (3) because the values being loaded concurrently are now + // contiguous in the address space, thus don't alias each other in the cache. + // + // On ARM, we typically want our kernel to process a 4x16 block of weights + // at a time, because: + // - 16 is the number of bytes in a NEON register. + // - 4 is how many rows we need to handle concurrently in the kernel in + // order to have sufficient mutual independence of instructions to + // maximize arithmetic throughput. + // + // Finally, the 'int8_t' part in the name refers to the fact that this + // weights format has each weights value encoded as a signed int8_t value, + // even if the data type of the weights buffer is uint8_t. This is intended + // to save runtime kernels the effort to have to XOR the top bit of these + // bytes before using them in signed arithmetic, see this file for more + // explanations on the 'signed int8_t trick' in matrix multiplication kernels: + // + // tensorflow/lite/toco/graph_transformations/ensure_uint8_weights_safe_for_fast_int8_kernels.cc + // + kShuffled4x16Int8, +}; + +// Quantization parameters, determining the mapping of quantized values +// to real values (i.e. determining how quantized values are mathematically +// interpreted). +// +// The correspondence is as follows: +// +// real_value = scale * (quantized_value - zero_point); +// +// In other words, zero_point designates which quantized value corresponds to +// the real 0 value, and scale designates the difference between the real values +// corresponding to consecutive quantized values differing by 1. +struct QuantizationParams { + int32_t zero_point = 0; + double scale = 0.0; +}; + +inline bool operator==(const QuantizationParams& qp1, + const QuantizationParams& qp2) { + return qp1.zero_point == qp2.zero_point && qp1.scale == qp2.scale; +} + +template +struct Dims { + int sizes[N]; + int strides[N]; +}; + +class RuntimeShape { + public: + // Shapes with dimensions up to 5 are stored directly in the structure, while + // larger shapes are separately allocated. + static constexpr int kMaxSmallSize = 5; + + RuntimeShape& operator=(RuntimeShape const&) = delete; + + RuntimeShape() : size_(0) {} + + explicit RuntimeShape(int dimensions_count) : size_(dimensions_count) { + if (dimensions_count > kMaxSmallSize) { +#ifdef TF_LITE_STATIC_MEMORY + TFLITE_CHECK(false && "No shape resizing supported on this platform"); +#else // TF_LITE_STATIC_MEMORY + dims_pointer_ = new int32_t[dimensions_count]; +#endif // TF_LITE_STATIC_MEMORY + } + } + + RuntimeShape(int shape_size, int32_t value) : size_(0) { + Resize(shape_size); + for (int i = 0; i < shape_size; ++i) { + SetDim(i, value); + } + } + + RuntimeShape(int dimensions_count, const int32_t* dims_data) : size_(0) { + ReplaceWith(dimensions_count, dims_data); + } + + RuntimeShape(const std::initializer_list init_list) : size_(0) { + BuildFrom(init_list); + } + + // Avoid using this constructor. We should be able to delete it when C++17 + // rolls out. + RuntimeShape(RuntimeShape const& other) : size_(other.DimensionsCount()) { + if (size_ > kMaxSmallSize) { + dims_pointer_ = new int32_t[size_]; + } + std::memcpy(DimsData(), other.DimsData(), sizeof(int32_t) * size_); + } + + bool operator==(const RuntimeShape& comp) const { + return this->size_ == comp.size_ && + std::memcmp(DimsData(), comp.DimsData(), size_ * sizeof(int32_t)) == 0; + } + + ~RuntimeShape() { + if (size_ > kMaxSmallSize) { +#ifdef TF_LITE_STATIC_MEMORY + TFLITE_CHECK(false && "No shape resizing supported on this platform"); +#else // TF_LITE_STATIC_MEMORY + delete[] dims_pointer_; +#endif // TF_LITE_STATIC_MEMORY + } + } + + inline int32_t DimensionsCount() const { return size_; } + inline int32_t Dims(int i) const { + TFLITE_DCHECK_GE(i, 0); + TFLITE_DCHECK_LT(i, size_); + return size_ > kMaxSmallSize ? dims_pointer_[i] : dims_[i]; + } + inline void SetDim(int i, int32_t val) { + TFLITE_DCHECK_GE(i, 0); + TFLITE_DCHECK_LT(i, size_); + if (size_ > kMaxSmallSize) { + dims_pointer_[i] = val; + } else { + dims_[i] = val; + } + } + + inline int32_t* DimsData() { + return size_ > kMaxSmallSize ? dims_pointer_ : dims_; + } + inline const int32_t* DimsData() const { + return size_ > kMaxSmallSize ? dims_pointer_ : dims_; + } + // The caller must ensure that the shape is no bigger than 5-D. + inline const int32_t* DimsDataUpTo5D() const { return dims_; } + + inline void Resize(int dimensions_count) { + if (size_ > kMaxSmallSize) { +#ifdef TF_LITE_STATIC_MEMORY + TFLITE_CHECK(false && "No shape resizing supported on this platform"); +#else // TF_LITE_STATIC_MEMORY + delete[] dims_pointer_; +#endif // TF_LITE_STATIC_MEMORY + } + size_ = dimensions_count; + if (dimensions_count > kMaxSmallSize) { +#ifdef TF_LITE_STATIC_MEMORY + TFLITE_CHECK(false && "No shape resizing supported on this platform"); +#else // TF_LITE_STATIC_MEMORY + dims_pointer_ = new int32_t[dimensions_count]; +#endif // TF_LITE_STATIC_MEMORY + } + } + + inline void ReplaceWith(int dimensions_count, const int32_t* dims_data) { + Resize(dimensions_count); + int32_t* dst_dims = DimsData(); + std::memcpy(dst_dims, dims_data, dimensions_count * sizeof(int32_t)); + } + + template + inline void BuildFrom(const T& src_iterable) { + const int dimensions_count = + std::distance(src_iterable.begin(), src_iterable.end()); + Resize(dimensions_count); + int32_t* data = DimsData(); + for (auto it : src_iterable) { + *data = it; + ++data; + } + } + + // This will probably be factored out. Old code made substantial use of 4-D + // shapes, and so this function is used to extend smaller shapes. Note that + // (a) as Dims<4>-dependent code is eliminated, the reliance on this should be + // reduced, and (b) some kernels are stricly 4-D, but then the shapes of their + // inputs should already be 4-D, so this function should not be needed. + inline static RuntimeShape ExtendedShape(int new_shape_size, + const RuntimeShape& shape) { + return RuntimeShape(new_shape_size, shape, 1); + } + + inline void BuildFrom(const std::initializer_list init_list) { + BuildFrom>(init_list); + } + + // Returns the total count of elements, that is the size when flattened into a + // vector. + inline int FlatSize() const { + int buffer_size = 1; + const int* dims_data = reinterpret_cast(DimsData()); + for (int i = 0; i < size_; i++) { + buffer_size *= dims_data[i]; + } + return buffer_size; + } + + bool operator!=(const RuntimeShape& comp) const { return !((*this) == comp); } + + private: + // For use only by ExtendedShape(), written to guarantee (return-value) copy + // elision in C++17. + // This creates a shape padded to the desired size with the specified value. + RuntimeShape(int new_shape_size, const RuntimeShape& shape, int pad_value) + : size_(0) { + // If the following check fails, it is likely because a 4D-only kernel is + // being used with an array of larger dimension count. + TFLITE_CHECK_GE(new_shape_size, shape.DimensionsCount()); + Resize(new_shape_size); + const int size_increase = new_shape_size - shape.DimensionsCount(); + for (int i = 0; i < size_increase; ++i) { + SetDim(i, pad_value); + } + std::memcpy(DimsData() + size_increase, shape.DimsData(), + sizeof(int32_t) * shape.DimensionsCount()); + } + + int32_t size_; + union { + int32_t dims_[kMaxSmallSize]; + int32_t* dims_pointer_; + }; +}; + +// Converts inference-style shape to legacy tflite::Dims<4>. +inline tflite::Dims<4> ToRuntimeDims(const tflite::RuntimeShape& array_shape) { + tflite::Dims<4> result; + const int dimensions_count = array_shape.DimensionsCount(); + TFLITE_CHECK_LE(dimensions_count, 4); + int cum_prod = 1; + for (int i = 0; i < 4; i++) { + const int new_dim = + (i < dimensions_count) ? array_shape.Dims(dimensions_count - 1 - i) : 1; + result.sizes[i] = new_dim; + result.strides[i] = cum_prod; + cum_prod *= new_dim; + } + return result; +} + +// TODO(b/80418076): Move to legacy ops file, update invocations. +inline RuntimeShape DimsToShape(const tflite::Dims<4>& dims) { + return RuntimeShape( + {dims.sizes[3], dims.sizes[2], dims.sizes[1], dims.sizes[0]}); +} + +// Gets next index to iterate through a multidimensional array. +inline bool NextIndex(const int num_dims, const int* dims, int* current) { + if (num_dims == 0) { + return false; + } + TFLITE_DCHECK(dims != nullptr); + TFLITE_DCHECK(current != nullptr); + int carry = 1; + for (int idx = num_dims - 1; idx >= 0; --idx) { + int current_val = current[idx] + carry; + TFLITE_DCHECK_GE(dims[idx], current_val); + if (dims[idx] == current_val) { + current[idx] = 0; + } else { + current[idx] = current_val; + carry = 0; + break; + } + } + return (carry == 0); +} + +// Gets offset of index if reducing on axis. When reducing, the flattened offset +// will not change, if the input index changes on the given axis. For example, +// if you have a 3D tensor and you are reducing to 2D by eliminating axis 0, +// then index (0, 1, 2) and index (1, 1, 2) will map to the same flattened +// offset. +// TODO(kanlig): uses Dims to represent dimensions. +inline size_t ReducedOutputOffset(const int num_dims, const int* dims, + const int* index, const int num_axis, + const int* axis) { + if (num_dims == 0) { + return 0; + } + TFLITE_DCHECK(dims != nullptr); + TFLITE_DCHECK(index != nullptr); + size_t offset = 0; + for (int idx = 0; idx < num_dims; ++idx) { + // if we need to skip this axis + bool is_axis = false; + if (axis != nullptr) { + for (int axis_idx = 0; axis_idx < num_axis; ++axis_idx) { + if (idx == axis[axis_idx]) { + is_axis = true; + break; + } + } + } + if (!is_axis) { + offset = offset * static_cast(dims[idx]) + + static_cast(index[idx]); + } + } + return offset; +} + +inline int Offset(const RuntimeShape& shape, int i0, int i1, int i2, int i3) { + TFLITE_DCHECK_EQ(shape.DimensionsCount(), 4); + const int* dims_data = reinterpret_cast(shape.DimsDataUpTo5D()); + TFLITE_DCHECK(i0 >= 0 && i0 < dims_data[0]); + TFLITE_DCHECK(i1 >= 0 && i1 < dims_data[1]); + TFLITE_DCHECK(i2 >= 0 && i2 < dims_data[2]); + TFLITE_DCHECK(i3 >= 0 && i3 < dims_data[3]); + return ((i0 * dims_data[1] + i1) * dims_data[2] + i2) * dims_data[3] + i3; +} + +inline int Offset(const Dims<4>& dims, int i0, int i1, int i2, int i3) { + TFLITE_DCHECK(i0 >= 0 && i0 < dims.sizes[0]); + TFLITE_DCHECK(i1 >= 0 && i1 < dims.sizes[1]); + TFLITE_DCHECK(i2 >= 0 && i2 < dims.sizes[2]); + TFLITE_DCHECK(i3 >= 0 && i3 < dims.sizes[3]); + return i0 * dims.strides[0] + i1 * dims.strides[1] + i2 * dims.strides[2] + + i3 * dims.strides[3]; +} + +inline int Offset(const Dims<4>& dims, int* index) { + return Offset(dims, index[0], index[1], index[2], index[3]); +} + +inline int Offset(const RuntimeShape& shape, int* index) { + return Offset(shape, index[0], index[1], index[2], index[3]); +} + +// Get array size, DCHECKing that the dim index is in range. +// +// Note that this will be phased out with Dims<4>, since RuntimeShape::Dims() +// already performs this check. +template +int ArraySize(const Dims& array, int index) { + TFLITE_DCHECK(index >= 0 && index < N); + return array.sizes[index]; +} + +// Get common array size, DCHECKing that they all agree. +template +int MatchingArraySize(const ArrayType1& array1, int index1, + const ArrayType2& array2, int index2) { + TFLITE_DCHECK_EQ(ArraySize(array1, index1), ArraySize(array2, index2)); + return ArraySize(array1, index1); +} + +template +int MatchingArraySize(const ArrayType1& array1, int index1, + const ArrayType2& array2, int index2, Args... args) { + TFLITE_DCHECK_EQ(ArraySize(array1, index1), ArraySize(array2, index2)); + return MatchingArraySize(array1, index1, args...); +} + +// Get common shape dim, DCHECKing that they all agree. +inline int MatchingDim(const RuntimeShape& shape1, int index1, + const RuntimeShape& shape2, int index2) { + TFLITE_DCHECK_EQ(shape1.Dims(index1), shape2.Dims(index2)); + return shape1.Dims(index1); +} + +template +int MatchingDim(const RuntimeShape& shape1, int index1, + const RuntimeShape& shape2, int index2, Args... args) { + TFLITE_DCHECK_EQ(shape1.Dims(index1), shape2.Dims(index2)); + return MatchingDim(shape1, index1, args...); +} + +// Will be phased out with Dims<4>, replaced by RuntimeShape::FlatSize(). +template +inline int FlatSize(const Dims& dims) { + int flat_size = 1; + for (int i = 0; i < N; ++i) { + flat_size *= dims.sizes[i]; + } + return flat_size; +} + +TFLITE_DEPRECATED("Prefer FlatSize.") +inline int RequiredBufferSizeForDims(const Dims<4>& dims) { + return FlatSize(dims); +} + +inline int MatchingElementsSize(const RuntimeShape& shape, + const RuntimeShape& check_shape_0) { + const int size_1 = shape.FlatSize(); + const int size_2 = check_shape_0.FlatSize(); + TFLITE_CHECK_EQ(size_1, size_2); + return size_1; +} + +inline int MatchingElementsSize(const RuntimeShape& shape, + const RuntimeShape& check_shape_0, + const RuntimeShape& check_shape_1) { + const int size_1 = shape.FlatSize(); + const int size_2 = check_shape_0.FlatSize(); + const int size_3 = check_shape_1.FlatSize(); + TFLITE_CHECK_EQ(size_1, size_2); + TFLITE_CHECK_EQ(size_2, size_3); + return size_1; +} + +// Flat size calculation, checking that dimensions match with one or more other +// arrays. +inline int MatchingFlatSize(const RuntimeShape& shape, + const RuntimeShape& check_shape_0) { + TFLITE_DCHECK_EQ(shape.DimensionsCount(), check_shape_0.DimensionsCount()); + const int dims_count = shape.DimensionsCount(); + for (int i = 0; i < dims_count; ++i) { + TFLITE_DCHECK_EQ(shape.Dims(i), check_shape_0.Dims(i)); + } + return shape.FlatSize(); +} + +inline int MatchingFlatSize(const RuntimeShape& shape, + const RuntimeShape& check_shape_0, + const RuntimeShape& check_shape_1) { + TFLITE_DCHECK_EQ(shape.DimensionsCount(), check_shape_0.DimensionsCount()); + const int dims_count = shape.DimensionsCount(); + for (int i = 0; i < dims_count; ++i) { + TFLITE_DCHECK_EQ(shape.Dims(i), check_shape_0.Dims(i)); + } + return MatchingFlatSize(shape, check_shape_1); +} + +inline int MatchingFlatSize(const RuntimeShape& shape, + const RuntimeShape& check_shape_0, + const RuntimeShape& check_shape_1, + const RuntimeShape& check_shape_2) { + TFLITE_DCHECK_EQ(shape.DimensionsCount(), check_shape_0.DimensionsCount()); + const int dims_count = shape.DimensionsCount(); + for (int i = 0; i < dims_count; ++i) { + TFLITE_DCHECK_EQ(shape.Dims(i), check_shape_0.Dims(i)); + } + return MatchingFlatSize(shape, check_shape_1, check_shape_2); +} + +inline int MatchingFlatSize(const RuntimeShape& shape, + const RuntimeShape& check_shape_0, + const RuntimeShape& check_shape_1, + const RuntimeShape& check_shape_2, + const RuntimeShape& check_shape_3) { + TFLITE_DCHECK_EQ(shape.DimensionsCount(), check_shape_0.DimensionsCount()); + const int dims_count = shape.DimensionsCount(); + for (int i = 0; i < dims_count; ++i) { + TFLITE_DCHECK_EQ(shape.Dims(i), check_shape_0.Dims(i)); + } + return MatchingFlatSize(shape, check_shape_1, check_shape_2, check_shape_3); +} + +// Flat size calculation, checking that dimensions match with one or more other +// arrays. +template +inline int MatchingFlatSize(const Dims& dims, const Dims& check_dims_0) { + for (int i = 0; i < N; ++i) { + TFLITE_DCHECK_EQ(ArraySize(dims, i), ArraySize(check_dims_0, i)); + } + return FlatSize(dims); +} + +template +inline int MatchingFlatSize(const Dims& dims, const Dims& check_dims_0, + const Dims& check_dims_1) { + for (int i = 0; i < N; ++i) { + TFLITE_DCHECK_EQ(ArraySize(dims, i), ArraySize(check_dims_0, i)); + } + return MatchingFlatSize(dims, check_dims_1); +} + +template +inline int MatchingFlatSize(const Dims& dims, const Dims& check_dims_0, + const Dims& check_dims_1, + const Dims& check_dims_2) { + for (int i = 0; i < N; ++i) { + TFLITE_DCHECK_EQ(ArraySize(dims, i), ArraySize(check_dims_0, i)); + } + return MatchingFlatSize(dims, check_dims_1, check_dims_2); +} + +template +inline int MatchingFlatSize(const Dims& dims, const Dims& check_dims_0, + const Dims& check_dims_1, + const Dims& check_dims_2, + const Dims& check_dims_3) { + for (int i = 0; i < N; ++i) { + TFLITE_DCHECK_EQ(ArraySize(dims, i), ArraySize(check_dims_0, i)); + } + return MatchingFlatSize(dims, check_dims_1, check_dims_2, check_dims_3); +} + +// Data is required to be contiguous, and so many operators can use either the +// full array flat size or the flat size with one dimension skipped (commonly +// the depth). +template +inline int FlatSizeSkipDim(const Dims& dims, int skip_dim) { + TFLITE_DCHECK(skip_dim >= 0 && skip_dim < N); + int flat_size = 1; + for (int i = 0; i < N; ++i) { + flat_size *= (i == skip_dim) ? 1 : dims.sizes[i]; + } + return flat_size; +} + +// A combination of MatchingFlatSize() and FlatSizeSkipDim(). +template +inline int MatchingFlatSizeSkipDim(const Dims& dims, int skip_dim, + const Dims& check_dims_0) { + for (int i = 0; i < N; ++i) { + if (i != skip_dim) { + TFLITE_DCHECK_EQ(ArraySize(dims, i), ArraySize(check_dims_0, i)); + } + } + return FlatSizeSkipDim(dims, skip_dim); +} + +template +inline int MatchingFlatSizeSkipDim(const Dims& dims, int skip_dim, + const Dims& check_dims_0, + const Dims& check_dims_1) { + for (int i = 0; i < N; ++i) { + if (i != skip_dim) { + TFLITE_DCHECK_EQ(ArraySize(dims, i), ArraySize(check_dims_0, i)); + } + } + return MatchingFlatSizeSkipDim(dims, skip_dim, check_dims_1); +} + +template +inline int MatchingFlatSizeSkipDim(const Dims& dims, int skip_dim, + const Dims& check_dims_0, + const Dims& check_dims_1, + const Dims& check_dims_2) { + for (int i = 0; i < N; ++i) { + if (i != skip_dim) { + TFLITE_DCHECK_EQ(ArraySize(dims, i), ArraySize(check_dims_0, i)); + } + } + return MatchingFlatSizeSkipDim(dims, skip_dim, check_dims_1, check_dims_2); +} + +template +inline int MatchingFlatSizeSkipDim(const Dims& dims, int skip_dim, + const Dims& check_dims_0, + const Dims& check_dims_1, + const Dims& check_dims_2, + const Dims& check_dims_3) { + for (int i = 0; i < N; ++i) { + if (i != skip_dim) { + TFLITE_DCHECK_EQ(ArraySize(dims, i), ArraySize(check_dims_0, i)); + } + } + return MatchingFlatSizeSkipDim(dims, skip_dim, check_dims_1, check_dims_2, + check_dims_3); +} + +// Data is required to be contiguous, and so many operators can use either the +// full array flat size or the flat size with one dimension skipped (commonly +// the depth). +inline int FlatSizeSkipDim(const RuntimeShape& shape, int skip_dim) { + const int dims_count = shape.DimensionsCount(); + TFLITE_DCHECK(skip_dim >= 0 && skip_dim < dims_count); + const auto* dims_data = shape.DimsData(); + int flat_size = 1; + for (int i = 0; i < dims_count; ++i) { + flat_size *= (i == skip_dim) ? 1 : dims_data[i]; + } + return flat_size; +} + +// A combination of MatchingFlatSize() and FlatSizeSkipDim(). +inline int MatchingFlatSizeSkipDim(const RuntimeShape& shape, int skip_dim, + const RuntimeShape& check_shape_0) { + const int dims_count = shape.DimensionsCount(); + for (int i = 0; i < dims_count; ++i) { + if (i != skip_dim) { + TFLITE_DCHECK_EQ(shape.Dims(i), check_shape_0.Dims(i)); + } + } + return FlatSizeSkipDim(shape, skip_dim); +} + +inline int MatchingFlatSizeSkipDim(const RuntimeShape& shape, int skip_dim, + const RuntimeShape& check_shape_0, + const RuntimeShape& check_shape_1) { + const int dims_count = shape.DimensionsCount(); + for (int i = 0; i < dims_count; ++i) { + if (i != skip_dim) { + TFLITE_DCHECK_EQ(shape.Dims(i), check_shape_0.Dims(i)); + } + } + return MatchingFlatSizeSkipDim(shape, skip_dim, check_shape_1); +} + +inline int MatchingFlatSizeSkipDim(const RuntimeShape& shape, int skip_dim, + const RuntimeShape& check_shape_0, + const RuntimeShape& check_shape_1, + const RuntimeShape& check_shape_2) { + const int dims_count = shape.DimensionsCount(); + for (int i = 0; i < dims_count; ++i) { + if (i != skip_dim) { + TFLITE_DCHECK_EQ(shape.Dims(i), check_shape_0.Dims(i)); + } + } + return MatchingFlatSizeSkipDim(shape, skip_dim, check_shape_1, check_shape_2); +} + +inline int MatchingFlatSizeSkipDim(const RuntimeShape& shape, int skip_dim, + const RuntimeShape& check_shape_0, + const RuntimeShape& check_shape_1, + const RuntimeShape& check_shape_2, + const RuntimeShape& check_shape_3) { + const int dims_count = shape.DimensionsCount(); + for (int i = 0; i < dims_count; ++i) { + if (i != skip_dim) { + TFLITE_DCHECK_EQ(shape.Dims(i), check_shape_0.Dims(i)); + } + } + return MatchingFlatSizeSkipDim(shape, skip_dim, check_shape_1, check_shape_2, + check_shape_3); +} + +template +bool IsPackedWithoutStrides(const Dims& dims) { + int expected_stride = 1; + for (int d = 0; d < N; d++) { + if (dims.strides[d] != expected_stride) return false; + expected_stride *= dims.sizes[d]; + } + return true; +} + +template +void ComputeStrides(Dims* dims) { + dims->strides[0] = 1; + for (int d = 1; d < N; d++) { + dims->strides[d] = dims->strides[d - 1] * dims->sizes[d - 1]; + } +} + +enum class BroadcastableOpCategory : uint8_t { + kNone, + kNonBroadcast, // Matching input shapes. + kFirstInputBroadcastsFast, // Fivefold nested loops. + kSecondInputBroadcastsFast, // Fivefold nested loops. + kGenericBroadcast, // Fall-back. +}; + +struct MinMax { + float min; + float max; +}; +static_assert(sizeof(MinMax) == 8, ""); + +struct ActivationParams { + FusedActivationFunctionType activation_type; + // uint8_t, etc, activation params. + int32_t quantized_activation_min; + int32_t quantized_activation_max; +}; + +struct ReluParams : public ActivationParams { + int32_t input_offset; + int32_t output_offset; + int32_t output_multiplier; + int32_t output_shift; +}; + +// Styles of resizing op usages. For example, kImageStyle can be used with a Pad +// op for pattern-specific optimization. +enum class ResizingCategory : uint8_t { + kNone, + kImageStyle, // 4D, operating on inner dimensions, say {0, a, b, 0}. + kGenericResize, +}; + +// For Add, Sub, Mul ops. + + +struct ConcatenationParams { + int8_t axis; + const int32_t* input_zeropoint; + const float* input_scale; + uint16 inputs_count; + int32_t output_zeropoint; + float output_scale; +}; + +struct ComparisonParams { + // uint8_t inference params. + int left_shift; + int32_t input1_offset; + int32_t input1_multiplier; + int input1_shift; + int32_t input2_offset; + int32_t input2_multiplier; + int input2_shift; + // Shape dependent / common to inference types. + bool is_broadcast; +}; + +struct ConvParams { + PaddingType padding_type; + PaddingValues padding_values; + // TODO(starka): This was just "stride", so check that width+height is OK. + int16 stride_width; + int16 stride_height; + int16 dilation_width_factor; + int16 dilation_height_factor; + // uint8_t inference params. + // TODO(b/65838351): Use smaller types if appropriate. + int32_t input_offset; + int32_t weights_offset; + int32_t output_offset; + int32_t output_multiplier; + int output_shift; + // uint8_t, etc, activation params. + int32_t quantized_activation_min; + int32_t quantized_activation_max; + // float activation params. + float float_activation_min; + float float_activation_max; +}; + +struct DepthToSpaceParams { + int32_t block_size; +}; + +struct DepthwiseParams { + PaddingType padding_type; + PaddingValues padding_values; + int16 stride_width; + int16 stride_height; + int16 dilation_width_factor; + int16 dilation_height_factor; + int16 depth_multiplier; + // uint8_t inference params. + // TODO(b/65838351): Use smaller types if appropriate. + int32_t input_offset; + int32_t weights_offset; + int32_t output_offset; + int32_t output_multiplier; + int output_shift; + // uint8_t, etc, activation params. + int32_t quantized_activation_min; + int32_t quantized_activation_max; + // float activation params. + float float_activation_min; + float float_activation_max; + const int32_t* output_multiplier_per_channel; + const int32_t* output_shift_per_channel; +}; + +struct DequantizationParams { + double scale; + int32_t zero_point; +}; + +struct PerChannelDequantizationParams { + const float* scale; + const int32_t* zero_point; + int32_t quantized_dimension; +}; + +struct FakeQuantParams { + MinMax minmax; + int32_t num_bits; +}; + +struct FullyConnectedParams { + // uint8_t inference params. + // TODO(b/65838351): Use smaller types if appropriate. + int32_t input_offset; + int32_t weights_offset; + int32_t output_offset; + int32_t output_multiplier; + int output_shift; + // uint8_t, etc, activation params. + int32_t quantized_activation_min; + int32_t quantized_activation_max; + // float activation params. + float float_activation_min; + float float_activation_max; + // Mark the operands as cacheable if they are unchanging, e.g. weights. + bool lhs_cacheable; + bool rhs_cacheable; + FullyConnectedWeightsFormat weights_format; +}; + +struct GatherParams { + int16 axis; +}; + +struct L2NormalizationParams { + // uint8_t inference params. + int32_t input_zero_point; +}; + +struct LocalResponseNormalizationParams { + int32_t range; + double bias; + double alpha; + double beta; +}; + +struct HardSwishParams { + // zero_point of the input activations. + int16_t input_zero_point; + // zero_point of the output activations. + int16_t output_zero_point; + // 16bit fixed-point component of the multiplier to apply to go from the + // "high-res input scale", which is the input scale multiplied by 2^7, to the + // "relu-ish scale", which 3.0/32768. + // See the implementation of HardSwishPrepare. + int16_t reluish_multiplier_fixedpoint_int16; + // exponent/bit-shift component of the aforementioned multiplier. + int reluish_multiplier_exponent; + // 16bit fixed-point component of the multiplier to apply to go from the + // "high-res input scale", which is the input scale multiplied by 2^7, to the + // output scale. + // See the implementation of HardSwishPrepare. + int16_t output_multiplier_fixedpoint_int16; + // exponent/bit-shift component of the aforementioned multiplier. + int output_multiplier_exponent; +}; + +struct LogisticParams { + // uint8_t inference params. + int32_t input_zero_point; + int32_t input_range_radius; + int32_t input_multiplier; + int input_left_shift; +}; + +struct LstmCellParams { + int32_t weights_zero_point; + int32_t accum_multiplier; + int accum_shift; + int state_integer_bits; +}; + +struct MeanParams { + int8_t axis_count; + int16 axis[4]; +}; + +struct PackParams { + int8_t axis; + const int32_t* input_zeropoint; + const float* input_scale; + uint16 inputs_count; + int32_t output_zeropoint; + float output_scale; +}; + +struct PadParams { + int8_t left_padding_count; + int32_t left_padding[4]; + int8_t right_padding_count; + int32_t right_padding[4]; + ResizingCategory resizing_category; +}; + +struct PreluParams { + int32_t input_offset; + int32_t alpha_offset; + int32_t output_offset; + int32_t output_multiplier_1; + int32_t output_shift_1; + int32_t output_multiplier_2; + int32_t output_shift_2; +}; + +struct PoolParams { + FusedActivationFunctionType activation; + PaddingType padding_type; + PaddingValues padding_values; + int stride_height; + int stride_width; + int filter_height; + int filter_width; + // uint8_t, etc, activation params. + int32_t quantized_activation_min; + int32_t quantized_activation_max; + // float activation params. + float float_activation_min; + float float_activation_max; +}; + +struct ReshapeParams { + int8_t shape_count; + int32_t shape[4]; +}; + +struct ResizeBilinearParams { + bool align_corners; + // half_pixel_centers assumes pixels are of half the actual dimensions, and + // yields more accurate resizes. Corresponds to the same argument for the + // original TensorFlow op in TF2.0. + bool half_pixel_centers; +}; + +struct ResizeNearestNeighborParams { + bool align_corners; + bool half_pixel_centers; +}; + +struct SliceParams { + int8_t begin_count; + int32_t begin[4]; + int8_t size_count; + int32_t size[4]; +}; + +struct SoftmaxParams { + // beta is not really used (not a Tensorflow parameter) and not implemented + // for LogSoftmax. + double beta; + // uint8_t inference params. Used even when beta defaults to 1.0. + int32_t input_multiplier; + int32_t input_left_shift; + // Reverse scaling is only used by LogSoftmax. + int32_t reverse_scaling_divisor; + int32_t reverse_scaling_right_shift; + int diff_min; + int32_t zero_point; + float scale; + float* table; + int16_t* exp_lut; + int16_t* one_over_one_plus_x_lut; + uint8_t* uint8_table1; + uint8_t* uint8_table2; +}; + +struct SpaceToBatchParams { + // "Zero" padding for uint8_t means padding with the output offset. + int32_t output_offset; +}; + +struct SpaceToDepthParams { + int32_t block_size; +}; + +struct SplitParams { + // Graphs that split into, say, 2000 nodes are encountered. The indices in + // OperatorEdges are of type uint16. + uint16 num_split; + int16 axis; +}; + +struct SqueezeParams { + int8_t squeeze_dims_count; + int32_t squeeze_dims[4]; +}; + +struct StridedSliceParams { + int8_t start_indices_count; + int32_t start_indices[5]; + int8_t stop_indices_count; + int32_t stop_indices[5]; + int8_t strides_count; + int32_t strides[5]; + + int16 begin_mask; + int16 ellipsis_mask; + int16 end_mask; + int16 new_axis_mask; + int16 shrink_axis_mask; +}; + +struct TanhParams { + int32_t input_zero_point; + int32_t input_range_radius; + int32_t input_multiplier; + int input_left_shift; +}; + +struct TransposeParams { + int8_t perm_count; + int32_t perm[5]; +}; + +struct UnpackParams { + uint16 num_split; + int16 axis; +}; + +struct LeakyReluParams { + float alpha; + int32_t input_offset; + int32_t output_offset; + int32_t output_multiplier_alpha; + int32_t output_shift_alpha; + int32_t output_multiplier_identity; + int32_t output_shift_identity; +}; + +template +inline void SetActivationParams(float min, float max, P* params) { + params->float_activation_min = min; + params->float_activation_max = max; +} + +template +inline void SetActivationParams(int32_t min, int32_t max, P* params) { + params->quantized_activation_min = min; + params->quantized_activation_max = max; +} + +template +inline void GetActivationParams(const P& params, int32_t* min, int32_t* max) { + *min = params.quantized_activation_min; + *max = params.quantized_activation_max; +} + +template +inline void GetActivationParams(const P& params, float* min, float* max) { + *min = params.float_activation_min; + *max = params.float_activation_max; +} + +} // namespace tflite +#endif +#endif // CEVA_TYPES_H_ diff --git a/tensorflow/lite/micro/kernels/circular_buffer.cc b/tensorflow/lite/micro/kernels/circular_buffer.cc new file mode 100644 index 0000000..0bed5cb --- /dev/null +++ b/tensorflow/lite/micro/kernels/circular_buffer.cc @@ -0,0 +1,117 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/kernels/circular_buffer.h" + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/flatbuffer_utils.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +/* + * The circular buffer custom operator is used to implement strided streaming + * convolutions on TFLite Micro. Each time this operator is invoked, it checks + * whether or not to run, based on a predetermined stride in time. If the op + * runs, it inserts the input into the end of the output buffer and shifts the + * output values towards the start of the buffer. It discards the oldest value + * in the output buffer. + * + * Input: [, , , ] + * + * After shifting: + * Output: [, , , ] + * + * We make some assumptions in this custom operator: + * - Input shape must be [1, 1, 1, depth] + * - Output shape must be [1, num_slots, 1, depth] + * - Input and output types must match. + * - Input and output quantization params must be identical. + */ +namespace tflite { + +void* CircularBufferInit(TfLiteContext* context, const char* buffer, + size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + OpDataCircularBuffer* op_data = static_cast( + context->AllocatePersistentBuffer(context, sizeof(OpDataCircularBuffer))); + + if (buffer != nullptr && length > 0) { + const uint8_t* buffer_t = reinterpret_cast(buffer); + tflite::FlexbufferWrapper wrapper(buffer_t, length); + op_data->cycles_max = wrapper.ElementAsInt32(kCircularBufferCyclesMaxIndex); + } else { + op_data->cycles_max = 0; + } + + return op_data; +} + +// Shifts buffer over by the output depth, and write new input to end of buffer. +// num_slots is the number of samples stored in the output buffer. +// depth is the size of each sample. +void EvalInt8(const int8_t* input, int num_slots, int depth, int8_t* output) { + memmove(output, &output[depth], (num_slots - 1) * depth); + memcpy(&output[(num_slots - 1) * depth], input, depth); +} + +TfLiteStatus CircularBufferEval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kCircularBufferInputTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kCircularBufferOutputTensor); + + TFLITE_DCHECK(node->user_data != nullptr); + OpDataCircularBuffer* data = + reinterpret_cast(node->user_data); + + int num_slots = output->dims->data[1]; + int depth = output->dims->data[2] * output->dims->data[3]; + + if (input->type == kTfLiteInt8) { + EvalInt8(tflite::micro::GetTensorData(input), num_slots, depth, + tflite::micro::GetTensorData(output)); + } else { + MicroPrintf("Type %s (%d) not supported.", + TfLiteTypeGetName(input->type), input->type); + return kTfLiteError; + } + + if (--data->cycles_until_run != 0) { + // Signal the interpreter to end current run if the delay before op invoke + // has not been reached. + // TODO(b/149795762): Add kTfLiteAbort to TfLiteStatus enum. + return static_cast(kTfLiteAbort); + } + + data->cycles_until_run = data->cycles_max; + + return kTfLiteOk; +} + +TFLMRegistration* Register_CIRCULAR_BUFFER() { + static TFLMRegistration r = tflite::micro::RegisterOp( + CircularBufferInit, CircularBufferPrepare, CircularBufferEval); + return &r; +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/circular_buffer.h b/tensorflow/lite/micro/kernels/circular_buffer.h new file mode 100644 index 0000000..51adf74 --- /dev/null +++ b/tensorflow/lite/micro/kernels/circular_buffer.h @@ -0,0 +1,48 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_CIRCULAR_BUFFER_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_CIRCULAR_BUFFER_H_ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" + +namespace tflite { + +// The CircularBuffer op has one input and one output tensor. +extern const int kCircularBufferInputTensor; +extern const int kCircularBufferOutputTensor; + +// Indices into the init flexbuffer's vector. +// The parameter's name is in the comment that follows. +// Elements in the vectors are ordered alphabetically by parameter name. +extern const int kCircularBufferCyclesMaxIndex; // 'cycles_max' + +// TODO(b/149795762): Add this to TfLiteStatus enum. +extern const TfLiteStatus kTfLiteAbort; + +// These fields control the stride period of a strided streaming model. This op +// returns kTfLiteAbort until cycles_until_run-- is zero. At this time, +// cycles_until_run is reset to cycles_max. +struct OpDataCircularBuffer { + int cycles_until_run; + int cycles_max; +}; + +TfLiteStatus CircularBufferPrepare(TfLiteContext* context, TfLiteNode* node); + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_CIRCULAR_BUFFER_H_ diff --git a/tensorflow/lite/micro/kernels/circular_buffer_common.cc b/tensorflow/lite/micro/kernels/circular_buffer_common.cc new file mode 100644 index 0000000..81db6e6 --- /dev/null +++ b/tensorflow/lite/micro/kernels/circular_buffer_common.cc @@ -0,0 +1,97 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/flatbuffer_utils.h" +#include "tensorflow/lite/micro/kernels/circular_buffer.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" + +namespace tflite { + +// The CircularBuffer op has one input and one output tensor. +const int kCircularBufferInputTensor = 0; +const int kCircularBufferOutputTensor = 0; + +// Indices into the init flexbuffer's vector. +// The parameter's name is in the comment that follows. +// Elements in the vectors are ordered alphabetically by parameter name. +const int kCircularBufferCyclesMaxIndex = 0; // 'cycles_max' + +// TODO(b/149795762): Add this to TfLiteStatus enum. +const TfLiteStatus kTfLiteAbort = static_cast(-9); + +TfLiteStatus CircularBufferPrepare(TfLiteContext* context, TfLiteNode* node) { + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kCircularBufferInputTensor); + TfLiteTensor* output = micro_context->AllocateTempOutputTensor( + node, kCircularBufferOutputTensor); + + TFLITE_DCHECK(node->user_data != nullptr); + OpDataCircularBuffer* op_data = + static_cast(node->user_data); + + TF_LITE_ENSURE(context, input != nullptr); + TF_LITE_ENSURE(context, output != nullptr); + TF_LITE_ENSURE_EQ(context, input->dims->data[0], output->dims->data[0]); + TF_LITE_ENSURE_EQ(context, 1, input->dims->data[1]); + TF_LITE_ENSURE_EQ(context, input->dims->data[2], output->dims->data[2]); + TF_LITE_ENSURE_EQ(context, output->dims->data[3], input->dims->data[3]); + + TF_LITE_ENSURE_TYPES_EQ(context, input->type, output->type); + + // The circular buffer custom operator currently only supports int8. + TF_LITE_ENSURE_TYPES_EQ(context, input->type, kTfLiteInt8); + + if (op_data->cycles_max <= 0) { + // The last circular buffer layer simply accumulates outputs, and does not + // run periodically. + // TODO(b/150001379): Move this special case logic to the tflite flatbuffer. + static int cb_prepare_count = 0; + cb_prepare_count++; + // These checks specifically work for the only two streaming models + // supported on TFLM. They use the shape of the output tensor along with the + // layer number to determine if the circular buffer period should be 1 or 2. + + // These models are outlined int the following documents: + // https://docs.google.com/document/d/1lc_G2ZFhjiKFo02UHjBaljye1xsL0EkfybkaVELEE3Q/edit?usp=sharing + // https://docs.google.com/document/d/1pGc42PuWyrk-Jy1-9qeqtggvsmHr1ifz8Lmqfpr2rKA/edit?usp=sharing + if (output->dims->data[1] == 5 || output->dims->data[1] == 13 || + output->dims->data[1] == 25 || + (cb_prepare_count == 5 && output->dims->data[2] == 2 && + output->dims->data[3] == 96)) { + op_data->cycles_max = 1; + cb_prepare_count = 0; + } else { + op_data->cycles_max = 2; + } + } + op_data->cycles_until_run = op_data->cycles_max; + node->user_data = op_data; + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(output); + + return kTfLiteOk; +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/circular_buffer_flexbuffers_generated_data.cc b/tensorflow/lite/micro/kernels/circular_buffer_flexbuffers_generated_data.cc new file mode 100644 index 0000000..e292198 --- /dev/null +++ b/tensorflow/lite/micro/kernels/circular_buffer_flexbuffers_generated_data.cc @@ -0,0 +1,25 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +// This file is generated. See: +// third_party/tensorflow/lite/micro/kernels/test_data_generation/README.md + +#include "tensorflow/lite/micro/kernels/circular_buffer_flexbuffers_generated_data.h" + +const int g_gen_data_size_circular_buffer_config = 21; +const unsigned char g_gen_data_circular_buffer_config[] = { + 0x63, 0x79, 0x63, 0x6c, 0x65, 0x73, 0x5f, 0x6d, 0x61, 0x78, 0x00, + 0x01, 0x0c, 0x01, 0x01, 0x01, 0x01, 0x04, 0x02, 0x24, 0x01, +}; diff --git a/tensorflow/lite/micro/kernels/circular_buffer_flexbuffers_generated_data.h b/tensorflow/lite/micro/kernels/circular_buffer_flexbuffers_generated_data.h new file mode 100644 index 0000000..2fbf4fe --- /dev/null +++ b/tensorflow/lite/micro/kernels/circular_buffer_flexbuffers_generated_data.h @@ -0,0 +1,22 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_FLEXBUFFERS_GENERATED_DATA_H +#define TENSORFLOW_LITE_MICRO_KERNELS_FLEXBUFFERS_GENERATED_DATA_H + +extern const int g_gen_data_size_circular_buffer_config; +extern const unsigned char g_gen_data_circular_buffer_config[]; + +#endif diff --git a/tensorflow/lite/micro/kernels/circular_buffer_test.cc b/tensorflow/lite/micro/kernels/circular_buffer_test.cc new file mode 100644 index 0000000..faf2794 --- /dev/null +++ b/tensorflow/lite/micro/kernels/circular_buffer_test.cc @@ -0,0 +1,242 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/circular_buffer_flexbuffers_generated_data.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/kernels/micro_ops.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +constexpr int kRunPeriod = 2; + +// TODO(b/149795762): Add this to TfLiteStatus enum. +const TfLiteStatus kTfLiteAbort = static_cast(-9); + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(OutputTensorLength4) { + constexpr int depth = 3; + constexpr int num_slots = 4; + int8_t input_data[depth]; + int8_t output_data[depth * num_slots]; + + memset(output_data, 0, sizeof(output_data)); + + // There are four input dimensions - [1, 1, 1, depth]. + int input_dims[] = {4, 1, 1, 1, depth}; + // There are four output dimensions - [1, num_slots, 1, depth]. + int output_dims[] = {4, 1, num_slots, 1, depth}; + + TfLiteIntArray* input_tensor_dims = + tflite::testing::IntArrayFromInts(input_dims); + TfLiteIntArray* output_tensor_dims = + tflite::testing::IntArrayFromInts(output_dims); + + const int output_dims_count = tflite::ElementCount(*output_tensor_dims); + + constexpr int inputs_size = 2; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + tflite::testing::CreateQuantizedTensor(input_data, input_tensor_dims, 1, + 0), + tflite::testing::CreateQuantizedTensor(output_data, output_tensor_dims, 1, + 0), + }; + + // There is one input - tensor 0. + int inputs_array_data[] = {1, 0}; + TfLiteIntArray* inputs_array = + tflite::testing::IntArrayFromInts(inputs_array_data); + // There is one output - tensor 1. + int outputs_array_data[] = {1, 1}; + TfLiteIntArray* outputs_array = + tflite::testing::IntArrayFromInts(outputs_array_data); + + const TFLMRegistration* registration = tflite::Register_CIRCULAR_BUFFER(); + tflite::micro::KernelRunner runner = tflite::micro::KernelRunner( + *registration, tensors, tensors_size, inputs_array, outputs_array, + /*builtin_data=*/nullptr); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + + const int8_t goldens[5][16] = {{0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3}, + {0, 0, 0, 0, 0, 0, 1, 2, 3, 4, 5, 6}, + {0, 0, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9}, + {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}, + {4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15}}; + + // Expect the circular buffer to run every other invoke for 4xN output. + for (int i = 0; i < 5; i++) { + for (int j = 0; j < depth; j++) { + input_data[j] = i * depth + j + 1; + } + TfLiteStatus status = runner.Invoke(); + + for (int j = 0; j < output_dims_count; ++j) { + TF_LITE_MICRO_EXPECT_EQ(goldens[i][j], output_data[j]); + } + + // Every kRunPeriod iterations, the circular buffer should return kTfLiteOk. + if (i % tflite::testing::kRunPeriod == tflite::testing::kRunPeriod - 1) { + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, status); + } else { + TF_LITE_MICRO_EXPECT_EQ(tflite::testing::kTfLiteAbort, status); + } + } +} + +TF_LITE_MICRO_TEST(OutputTensorOnEveryIterationLength4) { + constexpr int depth = 3; + constexpr int num_slots = 4; + int8_t input_data[depth]; + int8_t output_data[depth * num_slots]; + + memset(output_data, 0, sizeof(output_data)); + + // There are four input dimensions - [1, 1, 1, depth]. + int input_dims[] = {4, 1, 1, 1, depth}; + // There are four output dimensions - [1, num_slots, 1, depth]. + int output_dims[] = {4, 1, num_slots, 1, depth}; + + TfLiteIntArray* input_tensor_dims = + tflite::testing::IntArrayFromInts(input_dims); + TfLiteIntArray* output_tensor_dims = + tflite::testing::IntArrayFromInts(output_dims); + + const int output_dims_count = tflite::ElementCount(*output_tensor_dims); + + constexpr int inputs_size = 2; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + tflite::testing::CreateQuantizedTensor(input_data, input_tensor_dims, 1, + 0), + tflite::testing::CreateQuantizedTensor(output_data, output_tensor_dims, 1, + 0), + }; + + // There is one input - tensor 0. + int inputs_array_data[] = {1, 0}; + TfLiteIntArray* inputs_array = + tflite::testing::IntArrayFromInts(inputs_array_data); + // There is one output - tensor 1. + int outputs_array_data[] = {1, 1}; + TfLiteIntArray* outputs_array = + tflite::testing::IntArrayFromInts(outputs_array_data); + + const TFLMRegistration* registration = tflite::Register_CIRCULAR_BUFFER(); + tflite::micro::KernelRunner runner = tflite::micro::KernelRunner( + *registration, tensors, tensors_size, inputs_array, outputs_array, + /*builtin_data=*/nullptr); + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, runner.InitAndPrepare(reinterpret_cast( + g_gen_data_circular_buffer_config), + g_gen_data_size_circular_buffer_config)); + + const int8_t goldens[5][16] = {{0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3}, + {0, 0, 0, 0, 0, 0, 1, 2, 3, 4, 5, 6}, + {0, 0, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9}, + {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}, + {4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15}}; + + // Expect the circular buffer to run every other invoke for 4xN output. + for (int i = 0; i < 5; i++) { + for (int j = 0; j < depth; j++) { + input_data[j] = i * depth + j + 1; + } + TfLiteStatus status = runner.Invoke(); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, status); + + for (int j = 0; j < output_dims_count; ++j) { + TF_LITE_MICRO_EXPECT_EQ(goldens[i][j], output_data[j]); + } + } +} + +TF_LITE_MICRO_TEST(OutputTensorLength5) { + constexpr int depth = 4; + constexpr int num_slots = 5; + int8_t input_data[depth]; + int8_t output_data[depth * num_slots]; + + memset(output_data, 0, sizeof(output_data)); + int input_dims[] = {4, 1, 1, 1, depth}; + int output_dims[] = {4, 1, num_slots, 1, depth}; + TfLiteIntArray* input_tensor_dims = + tflite::testing::IntArrayFromInts(input_dims); + TfLiteIntArray* output_tensor_dims = + tflite::testing::IntArrayFromInts(output_dims); + + const int output_dims_count = tflite::ElementCount(*output_tensor_dims); + + constexpr int inputs_size = 2; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + tflite::testing::CreateQuantizedTensor(input_data, input_tensor_dims, 1, + 0), + tflite::testing::CreateQuantizedTensor(output_data, output_tensor_dims, 1, + 0), + }; + + // There is one input - tensor 0. + int inputs_array_data[] = {1, 0}; + TfLiteIntArray* inputs_array = + tflite::testing::IntArrayFromInts(inputs_array_data); + // There is one output - tensor 1. + int outputs_array_data[] = {1, 1}; + TfLiteIntArray* outputs_array = + tflite::testing::IntArrayFromInts(outputs_array_data); + + const TFLMRegistration* registration = tflite::Register_CIRCULAR_BUFFER(); + tflite::micro::KernelRunner runner = tflite::micro::KernelRunner( + *registration, tensors, tensors_size, inputs_array, outputs_array, + /*builtin_data=*/nullptr); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + + const int8_t goldens[6][20] = { + {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3, 4}, + {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3, 4, 5, 6, 7, 8}, + {0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}, + {0, 0, 0, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}, + {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20}, + {5, 6, 7, 8, 9, 10, 11, 12, 13, 14, + 15, 16, 17, 18, 19, 20, 21, 22, 23, 24}}; + + // Expect circular buffer to run every cycle for 5xN output. + for (int i = 0; i < 6; i++) { + for (int j = 0; j < depth; j++) { + input_data[j] = i * depth + j + 1; + } + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + for (int j = 0; j < output_dims_count; ++j) { + TF_LITE_MICRO_EXPECT_EQ(goldens[i][j], output_data[j]); + } + } +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/cmsis_nn/README.md b/tensorflow/lite/micro/kernels/cmsis_nn/README.md new file mode 100644 index 0000000..e4a4de3 --- /dev/null +++ b/tensorflow/lite/micro/kernels/cmsis_nn/README.md @@ -0,0 +1,41 @@ + + +# Info +CMSIS-NN is a library containing kernel optimizations for Arm(R) Cortex(R)-M +processors. To use CMSIS-NN optimized kernels instead of reference kernels, add +`OPTIMIZED_KERNEL_DIR=cmsis_nn` to the make command line. See examples below. + +For more information about the optimizations, check out +[CMSIS-NN documentation](https://github.com/ARM-software/CMSIS_5/blob/develop/CMSIS/NN/README.md). + +By default CMSIS-NN is built by code that is downloaded to the TFLM tree. +It also possible to build CMSIS-NN code from an external path by specifying +CMSIS_PATH=<../path> and CMSIS_NN_PATH=<../path>. Note that both CMSIS_PATH and CMSIS_NN_PATH is needed +since CMSIS-NN has a dependency to CMSIS-Core. As a third option CMSIS-NN can be provided manually as an external library. +The examples below will illustrate this. + +# Example - FVP based on Arm Corstone-300 software. +In this example, the kernel conv unit test is built. For more information about +this specific target, check out the [Corstone-300 readme](https://github.com/tensorflow/tflite-micro/tree/main/tensorflow/lite/micro/cortex_m_corstone_300/README.md). + +Downloaded CMSIS-NN code is built: +``` +make -f tensorflow/lite/micro/tools/make/Makefile OPTIMIZED_KERNEL_DIR=cmsis_nn TARGET=cortex_m_corstone_300 TARGET_ARCH=cortex-m55 kernel_conv_test +``` + +External CMSIS-NN code is built: +``` +make -f tensorflow/lite/micro/tools/make/Makefile OPTIMIZED_KERNEL_DIR=cmsis_nn CMSIS_PATH= CMSIS_NN_PATH= TARGET=cortex_m_corstone_300 TARGET_ARCH=cortex-m55 kernel_conv_test +``` + +External CMSIS-NN library is linked in: +``` +make -f tensorflow/lite/micro/tools/make/Makefile OPTIMIZED_KERNEL_DIR=cmsis_nn CMSIS_NN_LIBS= CMSIS_PATH= TARGET=cortex_m_corstone_300 TARGET_ARCH=cortex-m55 kernel_conv_test +``` + +Please note that performance and/or size might be affected when using an +external CMSIS-NN library as different compiler options may have been used. + +Also note that if specifying CMSIS_NN_LIBS but not CMSIS_PATH and or CMSIS_NN_PATH, headers and +system/startup code from the default downloaded path of CMSIS would be used. +So CMSIS_NN_LIBS, CMSIS_NN_PATH and CMSIS_PATH should have the same base path and if not there will be a build error. diff --git a/tensorflow/lite/micro/kernels/cmsis_nn/add.cc b/tensorflow/lite/micro/kernels/cmsis_nn/add.cc new file mode 100644 index 0000000..898410a --- /dev/null +++ b/tensorflow/lite/micro/kernels/cmsis_nn/add.cc @@ -0,0 +1,411 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/reference/add.h" + +#include "Include/arm_nnfunctions.h" +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/add.h" +#include "tensorflow/lite/kernels/internal/reference/process_broadcast_shapes.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/memory_helpers.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { + +namespace { +constexpr int kInputTensor1 = 0; +constexpr int kInputTensor2 = 1; +constexpr int kOutputTensor = 0; + +struct OpData { + bool requires_broadcast; + + // These fields are used in both the general 8-bit -> 8bit quantized path, + // and the special 16-bit -> 16bit quantized path + int input1_shift; + int input2_shift; + int32_t output_activation_min; + int32_t output_activation_max; + + // These fields are used only in the general 8-bit -> 8bit quantized path + int32_t input1_multiplier; + int32_t input2_multiplier; + int32_t output_multiplier; + + int output_shift; + int left_shift; + + int32_t input1_offset; + int32_t input2_offset; + int32_t output_offset; + + // Used only for float evals: + float output_activation_min_f32; + float output_activation_max_f32; +}; + +TfLiteStatus CalculateOpData(TfLiteContext* context, TfLiteAddParams* params, + const TfLiteTensor* input1, + const TfLiteTensor* input2, TfLiteTensor* output, + OpData* data) { + data->requires_broadcast = !HaveSameShapes(input1, input2); + + if (output->type == kTfLiteInt8 || output->type == kTfLiteInt16) { + // 8bit -> 8bit general quantized path, with general rescalings + data->input1_offset = -input1->params.zero_point; + data->input2_offset = -input2->params.zero_point; + data->output_offset = output->params.zero_point; + data->left_shift = (output->type == kTfLiteInt16) ? 15 : 20; + const double twice_max_input_scale = + 2 * static_cast( + std::max(input1->params.scale, input2->params.scale)); + const double real_input1_multiplier = + static_cast(input1->params.scale) / twice_max_input_scale; + const double real_input2_multiplier = + static_cast(input2->params.scale) / twice_max_input_scale; + const double real_output_multiplier = + twice_max_input_scale / + ((1 << data->left_shift) * static_cast(output->params.scale)); + + QuantizeMultiplierSmallerThanOneExp( + real_input1_multiplier, &data->input1_multiplier, &data->input1_shift); + + QuantizeMultiplierSmallerThanOneExp( + real_input2_multiplier, &data->input2_multiplier, &data->input2_shift); + + QuantizeMultiplierSmallerThanOneExp( + real_output_multiplier, &data->output_multiplier, &data->output_shift); + + TF_LITE_ENSURE_STATUS(CalculateActivationRangeQuantized( + context, params->activation, output, &data->output_activation_min, + &data->output_activation_max)); + } else if (output->type == kTfLiteFloat32) { + CalculateActivationRange(params->activation, + &data->output_activation_min_f32, + &data->output_activation_max_f32); + } + + return kTfLiteOk; +} + +void UpdateOpParams(tflite::ArithmeticParams* const op_params, + const OpData* data) { + op_params->left_shift = data->left_shift; + op_params->input1_offset = data->input1_offset; + op_params->input1_multiplier = data->input1_multiplier; + op_params->input1_shift = data->input1_shift; + op_params->input2_offset = data->input2_offset; + op_params->input2_multiplier = data->input2_multiplier; + op_params->input2_shift = data->input2_shift; + op_params->output_offset = data->output_offset; + op_params->output_multiplier = data->output_multiplier; + op_params->output_shift = data->output_shift; + SetActivationParams(data->output_activation_min, data->output_activation_max, + op_params); +} + +TfLiteStatus EvalAddQuantizedInt8(TfLiteContext* context, TfLiteNode* node, + TfLiteAddParams* params, const OpData* data, + const TfLiteEvalTensor* input1, + const TfLiteEvalTensor* input2, + TfLiteEvalTensor* output) { + tflite::ArithmeticParams op_params; + UpdateOpParams(&op_params, data); + + bool need_broadcast = reference_ops::ProcessBroadcastShapes( + tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorShape(input2), &op_params); + + if (need_broadcast) { + reference_integer_ops::BroadcastAdd4DSlow( + op_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } else { + arm_elementwise_add_s8( + tflite::micro::GetTensorData(input1), + + tflite::micro::GetTensorData(input2), op_params.input1_offset, + op_params.input1_multiplier, op_params.input1_shift, + op_params.input2_offset, op_params.input2_multiplier, + op_params.input2_shift, op_params.left_shift, + tflite::micro::GetTensorData(output), op_params.output_offset, + op_params.output_multiplier, op_params.output_shift, + op_params.quantized_activation_min, op_params.quantized_activation_max, + MatchingElementsSize(tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorShape(output))); + } + + return kTfLiteOk; +} + +TfLiteStatus EvalAddQuantizedInt16(TfLiteContext* context, TfLiteNode* node, + TfLiteAddParams* params, const OpData* data, + const TfLiteEvalTensor* input1, + const TfLiteEvalTensor* input2, + TfLiteEvalTensor* output) { + tflite::ArithmeticParams op_params; + UpdateOpParams(&op_params, data); + + bool need_broadcast = reference_ops::ProcessBroadcastShapes( + tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorShape(input2), &op_params); + + if (need_broadcast) { + reference_ops::BroadcastAdd4DSlow( + op_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } else { + arm_elementwise_add_s16( + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorData(input2), op_params.input1_offset, + op_params.input1_multiplier, op_params.input1_shift, + op_params.input2_offset, op_params.input2_multiplier, + op_params.input2_shift, op_params.left_shift, + tflite::micro::GetTensorData(output), op_params.output_offset, + op_params.output_multiplier, op_params.output_shift, + op_params.quantized_activation_min, op_params.quantized_activation_max, + MatchingElementsSize(tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorShape(output))); + } + + return kTfLiteOk; +} + +TfLiteStatus EvalAdd(TfLiteContext* context, TfLiteNode* node, + TfLiteAddParams* params, const OpData* data, + const TfLiteEvalTensor* input1, + const TfLiteEvalTensor* input2, TfLiteEvalTensor* output) { + switch (output->type) { + case kTfLiteFloat32: { + tflite::ArithmeticParams op_params; + SetActivationParams(data->output_activation_min_f32, + data->output_activation_max_f32, &op_params); + if (data->requires_broadcast) { + reference_ops::BroadcastAdd4DSlow( + op_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } else { + reference_ops::Add(op_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } + } break; + case kTfLiteInt32: { + tflite::ArithmeticParams op_params; + SetActivationParams(std::numeric_limits::lowest(), + std::numeric_limits::max(), &op_params); + if (data->requires_broadcast) { + reference_ops::BroadcastAdd4DSlow( + op_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } else { + reference_ops::Add(op_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } + } break; + default: + MicroPrintf("Type %s (%d) not supported.", + TfLiteTypeGetName(output->type), output->type); + return kTfLiteError; + } + + return kTfLiteOk; +} + +TfLiteStatus EvalAddQuantized(TfLiteContext* context, TfLiteNode* node, + TfLiteAddParams* params, const OpData* data, + const TfLiteEvalTensor* input1, + const TfLiteEvalTensor* input2, + TfLiteEvalTensor* output) { + switch (output->type) { + case kTfLiteInt8: { + EvalAddQuantizedInt8(context, node, params, data, input1, input2, output); + break; + } + case kTfLiteInt16: { + EvalAddQuantizedInt16(context, node, params, data, input1, input2, + output); + break; + } + default: + MicroPrintf("Type %s (%d) not supported.", + TfLiteTypeGetName(output->type), output->type); + return kTfLiteError; + } + + return kTfLiteOk; +} + +} // namespace + +void* InitAdd(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(OpData)); +} + +TfLiteStatus PrepareAdd(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + TFLITE_DCHECK(node->builtin_data != nullptr); + + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* input1 = + micro_context->AllocateTempInputTensor(node, kInputTensor1); + TF_LITE_ENSURE(context, input1 != nullptr); + TfLiteTensor* input2 = + micro_context->AllocateTempInputTensor(node, kInputTensor2); + TF_LITE_ENSURE(context, input2 != nullptr); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + + if (input1->type == kTfLiteInt16) { + TF_LITE_ENSURE_EQ(context, input1->params.zero_point, 0); + TF_LITE_ENSURE_EQ(context, input2->params.zero_point, 0); + TF_LITE_ENSURE_EQ(context, output->params.zero_point, 0); + } + + OpData* data = static_cast(node->user_data); + auto* params = reinterpret_cast(node->builtin_data); + + TF_LITE_ENSURE_STATUS( + CalculateOpData(context, params, input1, input2, output, data)); + + if (output->type == kTfLiteInt32) { + // Only support int32 unquantized add for now. + TF_LITE_ENSURE_EQ(context, input1->quantization.type, + kTfLiteNoQuantization); + TF_LITE_ENSURE_EQ(context, input2->quantization.type, + kTfLiteNoQuantization); + } + + micro_context->DeallocateTempTfLiteTensor(input1); + micro_context->DeallocateTempTfLiteTensor(input2); + micro_context->DeallocateTempTfLiteTensor(output); + + return kTfLiteOk; +} + +TfLiteStatus EvalAdd(TfLiteContext* context, TfLiteNode* node) { + auto* params = reinterpret_cast(node->builtin_data); + + const TfLiteEvalTensor* input1 = + tflite::micro::GetEvalInput(context, node, kInputTensor1); + const TfLiteEvalTensor* input2 = + tflite::micro::GetEvalInput(context, node, kInputTensor2); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + + TFLITE_DCHECK(node->user_data != nullptr); + const OpData* data = static_cast(node->user_data); + + if (output->type == kTfLiteFloat32 || output->type == kTfLiteInt32) { + TF_LITE_ENSURE_OK( + context, EvalAdd(context, node, params, data, input1, input2, output)); + } else if (output->type == kTfLiteInt8 || output->type == kTfLiteInt16) { + TF_LITE_ENSURE_OK(context, EvalAddQuantized(context, node, params, data, + input1, input2, output)); + } else { + MicroPrintf("Type %s (%d) not supported.", TfLiteTypeGetName(output->type), + output->type); + return kTfLiteError; + } + + return kTfLiteOk; +} + +TfLiteStatus EvalAddInt8(TfLiteContext* context, TfLiteNode* node) { + auto* params = reinterpret_cast(node->builtin_data); + + const TfLiteEvalTensor* input1 = + tflite::micro::GetEvalInput(context, node, kInputTensor1); + const TfLiteEvalTensor* input2 = + tflite::micro::GetEvalInput(context, node, kInputTensor2); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + + TFLITE_DCHECK(node->user_data != nullptr); + TFLITE_DCHECK(output->type == kTfLiteInt8); + const OpData* data = static_cast(node->user_data); + + TF_LITE_ENSURE_OK(context, EvalAddQuantizedInt8(context, node, params, data, + input1, input2, output)); + + return kTfLiteOk; +} + +TfLiteStatus EvalAddInt16(TfLiteContext* context, TfLiteNode* node) { + auto* params = reinterpret_cast(node->builtin_data); + + const TfLiteEvalTensor* input1 = + tflite::micro::GetEvalInput(context, node, kInputTensor1); + const TfLiteEvalTensor* input2 = + tflite::micro::GetEvalInput(context, node, kInputTensor2); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + + TFLITE_DCHECK(node->user_data != nullptr); + TFLITE_DCHECK(output->type == kTfLiteInt16); + const OpData* data = static_cast(node->user_data); + + TF_LITE_ENSURE_OK(context, EvalAddQuantizedInt16(context, node, params, data, + input1, input2, output)); + + return kTfLiteOk; +} + +TFLMRegistration Register_ADD() { + return tflite::micro::RegisterOp(InitAdd, PrepareAdd, EvalAdd); +} + +TFLMRegistration Register_ADD_INT8() { + return tflite::micro::RegisterOp(InitAdd, PrepareAdd, EvalAddInt8); +} + +TFLMRegistration Register_ADD_INT16() { + return tflite::micro::RegisterOp(InitAdd, PrepareAdd, EvalAddInt16); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/cmsis_nn/conv.cc b/tensorflow/lite/micro/kernels/cmsis_nn/conv.cc new file mode 100644 index 0000000..8b6928b --- /dev/null +++ b/tensorflow/lite/micro/kernels/cmsis_nn/conv.cc @@ -0,0 +1,481 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/kernels/conv.h" + +#include "Include/arm_nnfunctions.h" +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/conv.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/conv.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/padding.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { +namespace { + +struct OpData { + OpDataConv reference_op_data; + + // Index to buffer for optimizations if applicable. + int buffer_idx; +}; + +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(OpData)); +} + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + TFLITE_DCHECK(node->builtin_data != nullptr); + + int32_t buf_size = 0; + const auto& params = + *(static_cast(node->builtin_data)); + OpData* data = static_cast(node->user_data); + + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kConvInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* filter = + micro_context->AllocateTempInputTensor(node, kConvWeightsTensor); + TF_LITE_ENSURE(context, filter != nullptr); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kConvOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + + RuntimeShape input_shape = GetTensorShape(input); + RuntimeShape output_shape = GetTensorShape(output); + + // Initialize cmsis_nn input dimensions + cmsis_nn_dims input_dims; + input_dims.n = MatchingDim(input_shape, 0, output_shape, 0); + input_dims.h = input->dims->data[1]; + input_dims.w = input->dims->data[2]; + input_dims.c = input_shape.Dims(3); + + // Initialize cmsis_nn filter dimensions + cmsis_nn_dims filter_dims; + filter_dims.n = output_shape.Dims(3); + filter_dims.h = filter->dims->data[1]; + filter_dims.w = filter->dims->data[2]; + filter_dims.c = input_dims.c; + + // Initialize cmsis_nn output dimensions + cmsis_nn_dims output_dims; + output_dims.n = input_dims.n; + output_dims.h = output->dims->data[1]; + output_dims.w = output->dims->data[2]; + output_dims.c = output_shape.Dims(3); + + if (filter->type == kTfLiteInt4) { + int filter_size = + RuntimeShape(filter->dims->size, + reinterpret_cast(filter->dims->data)) + .FlatSize(); + context->RequestScratchBufferInArena( + context, filter_size, &data->reference_op_data.filter_buffer_index); + } + + if (input->type == kTfLiteInt8 || input->type == kTfLiteInt16) { + const int num_channels = filter->dims->data[kConvQuantizedDimension]; + data->reference_op_data.per_channel_output_multiplier = + static_cast(context->AllocatePersistentBuffer( + context, num_channels * sizeof(int32_t))); + data->reference_op_data.per_channel_output_shift = + static_cast(context->AllocatePersistentBuffer( + context, num_channels * sizeof(int32_t))); + } + + TF_LITE_ENSURE_STATUS(CalculateOpDataConv( + context, node, params, input_dims.w, input_dims.h, filter_dims.w, + filter_dims.h, output_dims.w, output_dims.h, input->type, + &data->reference_op_data)); + + if (input->type == kTfLiteInt8 || input->type == kTfLiteInt16) { + // Initialize cmsis_nn convolution parameters + cmsis_nn_conv_params conv_params; + conv_params.input_offset = -input->params.zero_point; + conv_params.output_offset = output->params.zero_point; + conv_params.stride.h = params.stride_height; + conv_params.stride.w = params.stride_width; + conv_params.dilation.h = params.dilation_height_factor; + conv_params.dilation.w = params.dilation_width_factor; + conv_params.padding.h = data->reference_op_data.padding.height; + conv_params.padding.w = data->reference_op_data.padding.width; + conv_params.activation.min = data->reference_op_data.output_activation_min; + conv_params.activation.max = data->reference_op_data.output_activation_max; + + if (input->type == kTfLiteInt8) { + buf_size = arm_convolve_wrapper_s8_get_buffer_size( + &conv_params, &input_dims, &filter_dims, &output_dims); + } else if (input->type == kTfLiteInt16) { + TF_LITE_ENSURE_EQ(context, input->params.zero_point, 0); + TF_LITE_ENSURE_EQ(context, output->params.zero_point, 0); + buf_size = arm_convolve_wrapper_s16_get_buffer_size( + &conv_params, &input_dims, &filter_dims, &output_dims); + } + + if (buf_size > 0) { + TF_LITE_ENSURE_STATUS(context->RequestScratchBufferInArena( + context, buf_size, &data->buffer_idx)); + } else { + data->buffer_idx = -1; + } + } + + micro_context->DeallocateTempTfLiteTensor(output); + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(filter); + + return kTfLiteOk; +} + +TfLiteStatus EvalQuantizedPerChannel(TfLiteContext* context, TfLiteNode* node, + const TfLiteConvParams& params, + const OpData& data, + const TfLiteEvalTensor* input, + const TfLiteEvalTensor* filter, + const TfLiteEvalTensor* bias, + TfLiteEvalTensor* output) { + cmsis_nn_conv_params conv_params; + conv_params.dilation.h = params.dilation_height_factor; + conv_params.dilation.w = params.dilation_width_factor; + + // Initialize cmsis_nn convolution parameters + conv_params.input_offset = -data.reference_op_data.input_zero_point; + conv_params.output_offset = data.reference_op_data.output_zero_point; + conv_params.stride.h = params.stride_height; + conv_params.stride.w = params.stride_width; + conv_params.padding.h = data.reference_op_data.padding.height; + conv_params.padding.w = data.reference_op_data.padding.width; + conv_params.activation.min = data.reference_op_data.output_activation_min; + conv_params.activation.max = data.reference_op_data.output_activation_max; + + // Initialize cmsis_nn per channel quantization parameters + cmsis_nn_per_channel_quant_params quant_params; + quant_params.multiplier = const_cast( + data.reference_op_data.per_channel_output_multiplier); + quant_params.shift = + const_cast(data.reference_op_data.per_channel_output_shift); + + RuntimeShape filter_shape = tflite::micro::GetTensorShape(filter); + RuntimeShape input_shape = tflite::micro::GetTensorShape(input); + RuntimeShape output_shape = tflite::micro::GetTensorShape(output); + RuntimeShape bias_shape = tflite::micro::GetTensorShape(bias); + + // Consistency check. + TFLITE_DCHECK_LE(conv_params.activation.min, conv_params.activation.max); + TFLITE_DCHECK_EQ(input_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(filter_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(output_shape.DimensionsCount(), 4); + const int batch_size = MatchingDim(input_shape, 0, output_shape, 0); + const int input_depth = MatchingDim(input_shape, 3, filter_shape, 3); + const int output_depth = MatchingDim(filter_shape, 0, output_shape, 3); + if (tflite::micro::GetOptionalTensorData(bias)) { + TFLITE_DCHECK_EQ(bias_shape.FlatSize(), output_depth); + } + + // Initialize cmsis_nn dimensions + // Input + cmsis_nn_dims input_dims; + input_dims.n = batch_size; + input_dims.h = input_shape.Dims(1); + input_dims.w = input_shape.Dims(2); + input_dims.c = input_depth; + + // Filter + cmsis_nn_dims filter_dims; + filter_dims.n = output_depth; + filter_dims.h = filter_shape.Dims(1); + filter_dims.w = filter_shape.Dims(2); + filter_dims.c = input_depth; + + // Bias + cmsis_nn_dims bias_dims; + bias_dims.n = 1; + bias_dims.h = 1; + bias_dims.w = 1; + bias_dims.c = output_depth; + + // Output + cmsis_nn_dims output_dims; + output_dims.n = batch_size; + output_dims.h = output_shape.Dims(1); + output_dims.w = output_shape.Dims(2); + output_dims.c = output_depth; + + // Initialize cmsis_nn context + cmsis_nn_context ctx; + ctx.buf = nullptr; + ctx.size = 0; + + if (data.buffer_idx > -1) { + ctx.buf = context->GetScratchBuffer(context, data.buffer_idx); + // Note: ctx.size is currently not used in cmsis_nn. + // The buffer should be allocated in the Prepare function through + // arm_convolve_wrapper_s8_get_buffer_size + } + + // arm_convolve_wrapper_s8 dispatches the optimized kernel accordingly with + // the parameters passed + TFLITE_DCHECK_EQ( + arm_convolve_wrapper_s8( + &ctx, &conv_params, &quant_params, &input_dims, + tflite::micro::GetTensorData(input), &filter_dims, + tflite::micro::GetTensorData(filter), &bias_dims, + tflite::micro::GetOptionalTensorData(bias), &output_dims, + tflite::micro::GetTensorData(output)), + ARM_CMSIS_NN_SUCCESS); + + return kTfLiteOk; +} + +TfLiteStatus EvalQuantizedPerChannel16x8( + TfLiteContext* context, TfLiteNode* node, const TfLiteConvParams& params, + const OpData& data, const TfLiteEvalTensor* input, + const TfLiteEvalTensor* filter, const TfLiteEvalTensor* bias, + TfLiteEvalTensor* output) { + cmsis_nn_conv_params conv_params; + conv_params.dilation.h = params.dilation_height_factor; + conv_params.dilation.w = params.dilation_width_factor; + + // Initialize cmsis_nn convolution parameters + conv_params.input_offset = -data.reference_op_data.input_zero_point; + conv_params.output_offset = data.reference_op_data.output_zero_point; + conv_params.stride.h = params.stride_height; + conv_params.stride.w = params.stride_width; + conv_params.padding.h = data.reference_op_data.padding.height; + conv_params.padding.w = data.reference_op_data.padding.width; + conv_params.activation.min = data.reference_op_data.output_activation_min; + conv_params.activation.max = data.reference_op_data.output_activation_max; + + // Initialize cmsis_nn per channel quantization parameters + cmsis_nn_per_channel_quant_params quant_params; + quant_params.multiplier = const_cast( + data.reference_op_data.per_channel_output_multiplier); + quant_params.shift = + const_cast(data.reference_op_data.per_channel_output_shift); + + RuntimeShape filter_shape = tflite::micro::GetTensorShape(filter); + RuntimeShape input_shape = tflite::micro::GetTensorShape(input); + RuntimeShape output_shape = tflite::micro::GetTensorShape(output); + RuntimeShape bias_shape = tflite::micro::GetTensorShape(bias); + + // Consistency check. + TFLITE_DCHECK_LE(conv_params.activation.min, conv_params.activation.max); + TFLITE_DCHECK_EQ(input_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(filter_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(output_shape.DimensionsCount(), 4); + const int batch_size = MatchingDim(input_shape, 0, output_shape, 0); + const int input_depth = MatchingDim(input_shape, 3, filter_shape, 3); + const int output_depth = MatchingDim(filter_shape, 0, output_shape, 3); + if (tflite::micro::GetOptionalTensorData(bias)) { + TFLITE_DCHECK_EQ(bias_shape.FlatSize(), output_depth); + } + + // Initialize cmsis_nn dimensions + // Input + cmsis_nn_dims input_dims; + input_dims.n = batch_size; + input_dims.h = input_shape.Dims(1); + input_dims.w = input_shape.Dims(2); + input_dims.c = input_depth; + + // Filter + cmsis_nn_dims filter_dims; + filter_dims.n = output_depth; + filter_dims.h = filter_shape.Dims(1); + filter_dims.w = filter_shape.Dims(2); + filter_dims.c = input_depth; + + // Bias + cmsis_nn_dims bias_dims; + bias_dims.n = 1; + bias_dims.h = 1; + bias_dims.w = 1; + bias_dims.c = output_depth; + + // Output + cmsis_nn_dims output_dims; + output_dims.n = batch_size; + output_dims.h = output_shape.Dims(1); + output_dims.w = output_shape.Dims(2); + output_dims.c = output_depth; + + // Initialize cmsis_nn context + cmsis_nn_context ctx; + ctx.buf = nullptr; + ctx.size = 0; + + if (data.buffer_idx > -1) { + ctx.buf = context->GetScratchBuffer(context, data.buffer_idx); + // Note: ctx.size is currently not used in cmsis_nn. + // The buffer should be allocated in the Prepare function through + // arm_convolve_wrapper_s8_get_buffer_size + } + + TFLITE_DCHECK_EQ( + arm_convolve_wrapper_s16( + &ctx, &conv_params, &quant_params, &input_dims, + tflite::micro::GetTensorData(input), &filter_dims, + tflite::micro::GetTensorData(filter), &bias_dims, + tflite::micro::GetOptionalTensorData(bias), &output_dims, + tflite::micro::GetTensorData(output)), + ARM_CMSIS_NN_SUCCESS); + + return kTfLiteOk; +} + +TfLiteStatus EvalInt8(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kConvInputTensor); + const TfLiteEvalTensor* filter = + tflite::micro::GetEvalInput(context, node, kConvWeightsTensor); + const TfLiteEvalTensor* bias = + (NumInputs(node) == 3) + ? tflite::micro::GetEvalInput(context, node, kConvBiasTensor) + : nullptr; + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kConvOutputTensor); + + TFLITE_DCHECK(node->builtin_data != nullptr); + const auto& params = + *(reinterpret_cast(node->builtin_data)); + TFLITE_DCHECK(node->user_data != nullptr); + const OpData& data = *(static_cast(node->user_data)); + TfLiteEvalTensor filter_int8 = tflite::micro::MakeUnpackedInt4Tensor( + context, data.reference_op_data.filter_buffer_index, filter); + + return EvalQuantizedPerChannel(context, node, params, data, input, + &filter_int8, bias, output); +} + +TfLiteStatus EvalInt16x8(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kConvInputTensor); + const TfLiteEvalTensor* filter = + tflite::micro::GetEvalInput(context, node, kConvWeightsTensor); + const TfLiteEvalTensor* bias = + (NumInputs(node) == 3) + ? tflite::micro::GetEvalInput(context, node, kConvBiasTensor) + : nullptr; + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kConvOutputTensor); + + TFLITE_DCHECK(node->builtin_data != nullptr); + const auto& params = + *(reinterpret_cast(node->builtin_data)); + TFLITE_DCHECK(node->user_data != nullptr); + const OpData& data = *(static_cast(node->user_data)); + + return EvalQuantizedPerChannel16x8(context, node, params, data, input, filter, + bias, output); +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kConvInputTensor); + const TfLiteEvalTensor* filter = + tflite::micro::GetEvalInput(context, node, kConvWeightsTensor); + const TfLiteEvalTensor* bias = + (NumInputs(node) == 3) + ? tflite::micro::GetEvalInput(context, node, kConvBiasTensor) + : nullptr; + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kConvOutputTensor); + + TFLITE_DCHECK(node->builtin_data != nullptr); + const auto& params = + *(reinterpret_cast(node->builtin_data)); + TFLITE_DCHECK(node->user_data != nullptr); + const OpData& data = *(static_cast(node->user_data)); + + TF_LITE_ENSURE_EQ(context, input->type, output->type); + TF_LITE_ENSURE_MSG( + context, + input->type == filter->type || + (input->type == kTfLiteInt16 && filter->type == kTfLiteInt8) || + (input->type == kTfLiteInt8 && filter->type == kTfLiteInt4), + "Hybrid models are not supported on TFLite Micro."); + + TfLiteEvalTensor filter_int8 = tflite::micro::MakeUnpackedInt4Tensor( + context, data.reference_op_data.filter_buffer_index, filter); + + switch (input->type) { // Already know in/out types are same. + case kTfLiteFloat32: { + tflite::reference_ops::Conv( + ConvParamsFloat(params, data.reference_op_data), + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), + tflite::micro::GetOptionalTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output), + tflite::micro::GetTensorShape(nullptr), nullptr); + break; + } + case kTfLiteInt8: + switch (filter_int8.type) { + case kTfLiteInt8: { + return EvalQuantizedPerChannel(context, node, params, data, input, + &filter_int8, bias, output); + } + + default: { + MicroPrintf("Filter type %s (%d) not supported.", + TfLiteTypeGetName(filter->type), filter->type); + return kTfLiteError; + } + } + + break; + case kTfLiteInt16: + return EvalQuantizedPerChannel16x8(context, node, params, data, input, + filter, bias, output); + break; + default: + MicroPrintf("Type %s (%d) not supported.", TfLiteTypeGetName(input->type), + input->type); + return kTfLiteError; + } + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_CONV_2D() { + return tflite::micro::RegisterOp(Init, Prepare, Eval); +} + +TFLMRegistration Register_CONV_2D_INT8() { + return tflite::micro::RegisterOp(Init, Prepare, EvalInt8); +} + +TFLMRegistration Register_CONV_2D_INT16() { + return tflite::micro::RegisterOp(Init, Prepare, EvalInt16x8); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/cmsis_nn/depthwise_conv.cc b/tensorflow/lite/micro/kernels/cmsis_nn/depthwise_conv.cc new file mode 100644 index 0000000..7b733b7 --- /dev/null +++ b/tensorflow/lite/micro/kernels/cmsis_nn/depthwise_conv.cc @@ -0,0 +1,448 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/kernels/depthwise_conv.h" + +#include "Include/arm_nnfunctions.h" +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/depthwiseconv_float.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/depthwise_conv.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/padding.h" +#include "tensorflow/lite/micro/kernels/conv.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { +namespace { + +struct OpData { + OpDataConv reference_op_data; + + // Index to buffer for optimizations if applicable. + int buffer_idx; +}; + +// Always inline for optimal code size. +void PopulateDwConvParams( + cmsis_nn_dw_conv_params* const dw_conv_params, + cmsis_nn_per_channel_quant_params* const quant_params, + cmsis_nn_dims* const input_dims, cmsis_nn_dims* const filter_dims, + cmsis_nn_dims* const bias_dims, cmsis_nn_dims* const output_dims, + const TfLiteDepthwiseConvParams& params, const OpData& data, + const TfLiteEvalTensor* input, const TfLiteEvalTensor* filter, + const TfLiteEvalTensor* bias, TfLiteEvalTensor* output) + __attribute__((always_inline)); + +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(OpData)); +} + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + TFLITE_DCHECK(node->builtin_data != nullptr); + + OpData* data = static_cast(node->user_data); + const auto& params = + *(reinterpret_cast(node->builtin_data)); + + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kDepthwiseConvInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* filter = + micro_context->AllocateTempInputTensor(node, kDepthwiseConvWeightsTensor); + TF_LITE_ENSURE(context, filter != nullptr); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kDepthwiseConvOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + + const TfLiteType data_type = input->type; + int input_width = SizeOfDimension(input, 2); + int input_height = SizeOfDimension(input, 1); + int filter_width = SizeOfDimension(filter, 2); + int filter_height = SizeOfDimension(filter, 1); + int output_width = SizeOfDimension(output, 2); + int output_height = SizeOfDimension(output, 1); + + if (input->type == kTfLiteInt8 || input->type == kTfLiteInt16) { + TF_LITE_ENSURE_EQ(context, filter->quantization.type, + kTfLiteAffineQuantization); + + if (input->type == kTfLiteInt16) { + TF_LITE_ENSURE_EQ(context, input->params.zero_point, 0); + TF_LITE_ENSURE_EQ(context, output->params.zero_point, 0); + } + + // All per-channel quantized tensors need valid zero point and scale arrays. + const auto* affine_quantization = + reinterpret_cast( + filter->quantization.params); + TF_LITE_ENSURE(context, affine_quantization); + TF_LITE_ENSURE(context, affine_quantization->scale); + TF_LITE_ENSURE(context, affine_quantization->zero_point); + TF_LITE_ENSURE( + context, affine_quantization->scale->size == 1 || + affine_quantization->scale->size == + filter->dims->data[kDepthwiseConvQuantizedDimension]); + TF_LITE_ENSURE_EQ(context, affine_quantization->scale->size, + affine_quantization->zero_point->size); + + // Allocate memory for per-channel quantization parameters + const int num_channels = + filter->dims->data[kDepthwiseConvQuantizedDimension]; + + data->reference_op_data.per_channel_output_multiplier = + reinterpret_cast(context->AllocatePersistentBuffer( + context, num_channels * sizeof(int32_t))); + data->reference_op_data.per_channel_output_shift = + reinterpret_cast(context->AllocatePersistentBuffer( + context, num_channels * sizeof(int32_t))); + } + + if (filter->type == kTfLiteInt4) { + int filter_size = + RuntimeShape(filter->dims->size, + reinterpret_cast(filter->dims->data)) + .FlatSize(); + context->RequestScratchBufferInArena( + context, filter_size, &data->reference_op_data.filter_buffer_index); + } + + TF_LITE_ENSURE_STATUS(CalculateOpDataDepthwiseConv( + context, node, params, input_width, input_height, filter_width, + filter_height, output_width, output_height, data_type, + &data->reference_op_data)); + + if (input->type == kTfLiteInt8) { + RuntimeShape input_shape = GetTensorShape(input); + RuntimeShape output_shape = GetTensorShape(output); + RuntimeShape filter_shape = GetTensorShape(filter); + TFLITE_DCHECK_EQ(input_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(filter_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(output_shape.DimensionsCount(), 4); + + const int batch_size = MatchingDim(input_shape, 0, output_shape, 0); + const int output_depth = MatchingDim(output_shape, 3, filter_shape, 3); + TFLITE_DCHECK_EQ(batch_size, 1); /* Only batch = 1 is supported */ + + cmsis_nn_dims input_dims; + input_dims.n = batch_size; + input_dims.h = input_height; + input_dims.w = input_width; + input_dims.c = input_shape.Dims(3); + + cmsis_nn_dims filter_dims; + filter_dims.n = 1; + filter_dims.h = filter_height; + filter_dims.w = filter_width; + filter_dims.c = output_depth; + + cmsis_nn_dims output_dims; + output_dims.n = batch_size; + output_dims.h = output_height; + output_dims.w = output_width; + output_dims.c = output_depth; + + cmsis_nn_dw_conv_params dw_conv_params; + dw_conv_params.padding.h = data->reference_op_data.padding.height; + dw_conv_params.padding.w = data->reference_op_data.padding.width; + dw_conv_params.dilation.h = params.dilation_height_factor; + dw_conv_params.dilation.w = params.dilation_width_factor; + + const int32_t buf_size = arm_depthwise_conv_wrapper_s8_get_buffer_size( + &dw_conv_params, &input_dims, &filter_dims, &output_dims); + + if (buf_size > 0) { + TF_LITE_ENSURE_STATUS(context->RequestScratchBufferInArena( + context, buf_size, &data->buffer_idx)); + } else { + data->buffer_idx = -1; + } + } + + micro_context->DeallocateTempTfLiteTensor(output); + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(filter); + + return kTfLiteOk; +} + +inline void PopulateDwConvParams( + cmsis_nn_dw_conv_params* const dw_conv_params, + cmsis_nn_per_channel_quant_params* const quant_params, + cmsis_nn_dims* const input_dims, cmsis_nn_dims* const filter_dims, + cmsis_nn_dims* const bias_dims, cmsis_nn_dims* const output_dims, + const TfLiteDepthwiseConvParams& params, const OpData& data, + const TfLiteEvalTensor* input, const TfLiteEvalTensor* filter, + const TfLiteEvalTensor* bias, TfLiteEvalTensor* output) { + dw_conv_params->dilation.h = params.dilation_height_factor; + dw_conv_params->dilation.w = params.dilation_width_factor; + + dw_conv_params->input_offset = -data.reference_op_data.input_zero_point; + dw_conv_params->output_offset = data.reference_op_data.output_zero_point; + dw_conv_params->stride.h = params.stride_height; + dw_conv_params->stride.w = params.stride_width; + dw_conv_params->padding.h = data.reference_op_data.padding.height; + dw_conv_params->padding.w = data.reference_op_data.padding.width; + + dw_conv_params->activation.min = data.reference_op_data.output_activation_min; + dw_conv_params->activation.max = data.reference_op_data.output_activation_max; + + dw_conv_params->ch_mult = params.depth_multiplier; + + quant_params->multiplier = + data.reference_op_data.per_channel_output_multiplier; + quant_params->shift = data.reference_op_data.per_channel_output_shift; + + RuntimeShape filter_shape = tflite::micro::GetTensorShape(filter); + RuntimeShape input_shape = tflite::micro::GetTensorShape(input); + RuntimeShape output_shape = tflite::micro::GetTensorShape(output); + RuntimeShape bias_shape = tflite::micro::GetTensorShape(bias); + + TFLITE_DCHECK_LE(dw_conv_params->activation.min, + dw_conv_params->activation.max); + + const int batch_size = MatchingDim(input_shape, 0, output_shape, 0); + const int output_depth = MatchingDim(filter_shape, 3, output_shape, 3); + + if (tflite::micro::GetOptionalTensorData(bias)) { + TFLITE_DCHECK_EQ(bias_shape.FlatSize(), output_depth); + } + + input_dims->n = batch_size; + input_dims->h = input_shape.Dims(1); + input_dims->w = input_shape.Dims(2); + input_dims->c = input_shape.Dims(3); + + filter_dims->n = filter_shape.Dims(0); + filter_dims->h = filter_shape.Dims(1); + filter_dims->w = filter_shape.Dims(2); + filter_dims->c = output_depth; + + bias_dims->n = 1; + bias_dims->h = 1; + bias_dims->w = 1; + bias_dims->c = output_depth; + + output_dims->n = batch_size; + output_dims->h = output_shape.Dims(1); + output_dims->w = output_shape.Dims(2); + output_dims->c = output_depth; +} + +void EvalQuantizedPerChannel(TfLiteContext* context, TfLiteNode* node, + const TfLiteDepthwiseConvParams& params, + const OpData& data, const TfLiteEvalTensor* input, + const TfLiteEvalTensor* filter, + const TfLiteEvalTensor* bias, + TfLiteEvalTensor* output) { + cmsis_nn_dw_conv_params dw_conv_params; + cmsis_nn_per_channel_quant_params quant_params; + cmsis_nn_dims input_dims; + cmsis_nn_dims filter_dims; + cmsis_nn_dims bias_dims; + cmsis_nn_dims output_dims; + + PopulateDwConvParams(&dw_conv_params, &quant_params, &input_dims, + &filter_dims, &bias_dims, &output_dims, params, data, + input, filter, bias, output); + + cmsis_nn_context ctx; + ctx.buf = nullptr; + /* 'size' is unused */ + ctx.size = 0; + + if (data.buffer_idx > -1) { + ctx.buf = context->GetScratchBuffer(context, data.buffer_idx); + } + + TFLITE_DCHECK_EQ( + arm_depthwise_conv_wrapper_s8( + &ctx, &dw_conv_params, &quant_params, &input_dims, + tflite::micro::GetTensorData(input), &filter_dims, + tflite::micro::GetTensorData(filter), &bias_dims, + tflite::micro::GetOptionalTensorData(bias), &output_dims, + tflite::micro::GetTensorData(output)), + ARM_CMSIS_NN_SUCCESS); +} + +void EvalQuantizedPerChannel16x8(TfLiteContext* context, TfLiteNode* node, + const TfLiteDepthwiseConvParams& params, + const OpData& data, + const TfLiteEvalTensor* input, + const TfLiteEvalTensor* filter, + const TfLiteEvalTensor* bias, + TfLiteEvalTensor* output) { + cmsis_nn_dw_conv_params dw_conv_params; + cmsis_nn_per_channel_quant_params quant_params; + cmsis_nn_dims input_dims; + cmsis_nn_dims filter_dims; + cmsis_nn_dims bias_dims; + cmsis_nn_dims output_dims; + + PopulateDwConvParams(&dw_conv_params, &quant_params, &input_dims, + &filter_dims, &bias_dims, &output_dims, params, data, + input, filter, bias, output); + + cmsis_nn_context ctx; + ctx.buf = nullptr; + /* 'size' is unused */ + ctx.size = 0; + + TFLITE_DCHECK_EQ( + arm_depthwise_conv_s16( + &ctx, &dw_conv_params, &quant_params, &input_dims, + tflite::micro::GetTensorData(input), &filter_dims, + tflite::micro::GetTensorData(filter), &bias_dims, + tflite::micro::GetOptionalTensorData(bias), &output_dims, + tflite::micro::GetTensorData(output)), + ARM_CMSIS_NN_SUCCESS); +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + TFLITE_DCHECK(node->builtin_data != nullptr); + + const auto& params = + *(reinterpret_cast(node->builtin_data)); + const OpData& data = *(static_cast(node->user_data)); + + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kDepthwiseConvOutputTensor); + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kDepthwiseConvInputTensor); + const TfLiteEvalTensor* filter = + tflite::micro::GetEvalInput(context, node, kDepthwiseConvWeightsTensor); + const TfLiteEvalTensor* bias = + (NumInputs(node) == 3) + ? tflite::micro::GetEvalInput(context, node, kDepthwiseConvBiasTensor) + : nullptr; + + TfLiteEvalTensor filter_int8 = tflite::micro::MakeUnpackedInt4Tensor( + context, data.reference_op_data.filter_buffer_index, filter); + + switch (input->type) { // Already know in/out types are same. + case kTfLiteFloat32: { + tflite::reference_ops::DepthwiseConv( + DepthwiseConvParamsFloat(params, data.reference_op_data), + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), + tflite::micro::GetOptionalTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + } + case kTfLiteInt8: + switch (filter_int8.type) { + case kTfLiteInt8: { + EvalQuantizedPerChannel(context, node, params, data, input, + &filter_int8, bias, output); + break; + } + default: { + MicroPrintf("Filter type %s (%d) not supported.", + TfLiteTypeGetName(filter->type), filter->type); + return kTfLiteError; + } + } + break; + case kTfLiteInt16: + EvalQuantizedPerChannel16x8(context, node, params, data, input, filter, + bias, output); + break; + default: + MicroPrintf("Type %s (%d) not supported.", TfLiteTypeGetName(input->type), + input->type); + return kTfLiteError; + } + return kTfLiteOk; +} + +TfLiteStatus EvalInt8(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + TFLITE_DCHECK(node->builtin_data != nullptr); + + const auto& params = + *(reinterpret_cast(node->builtin_data)); + const OpData& data = *(static_cast(node->user_data)); + + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kDepthwiseConvOutputTensor); + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kDepthwiseConvInputTensor); + const TfLiteEvalTensor* filter = + tflite::micro::GetEvalInput(context, node, kDepthwiseConvWeightsTensor); + const TfLiteEvalTensor* bias = + (NumInputs(node) == 3) + ? tflite::micro::GetEvalInput(context, node, kDepthwiseConvBiasTensor) + : nullptr; + + TfLiteEvalTensor filter_int8 = tflite::micro::MakeUnpackedInt4Tensor( + context, data.reference_op_data.filter_buffer_index, filter); + + EvalQuantizedPerChannel(context, node, params, data, input, &filter_int8, + bias, output); + return kTfLiteOk; +} + +TfLiteStatus EvalInt16x8(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + TFLITE_DCHECK(node->builtin_data != nullptr); + + const auto& params = + *(reinterpret_cast(node->builtin_data)); + const OpData& data = *(static_cast(node->user_data)); + + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kDepthwiseConvOutputTensor); + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kDepthwiseConvInputTensor); + const TfLiteEvalTensor* filter = + tflite::micro::GetEvalInput(context, node, kDepthwiseConvWeightsTensor); + const TfLiteEvalTensor* bias = + (NumInputs(node) == 3) + ? tflite::micro::GetEvalInput(context, node, kDepthwiseConvBiasTensor) + : nullptr; + + EvalQuantizedPerChannel16x8(context, node, params, data, input, filter, bias, + output); + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_DEPTHWISE_CONV_2D() { + return tflite::micro::RegisterOp(Init, Prepare, Eval); +} + +TFLMRegistration Register_DEPTHWISE_CONV_2D_INT8() { + return tflite::micro::RegisterOp(Init, Prepare, EvalInt8); +} + +TFLMRegistration Register_DEPTHWISE_CONV_2D_INT16() { + return tflite::micro::RegisterOp(Init, Prepare, EvalInt16x8); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/cmsis_nn/fully_connected.cc b/tensorflow/lite/micro/kernels/cmsis_nn/fully_connected.cc new file mode 100644 index 0000000..a7ab8f1 --- /dev/null +++ b/tensorflow/lite/micro/kernels/cmsis_nn/fully_connected.cc @@ -0,0 +1,436 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/kernels/fully_connected.h" + +#include "Include/arm_nnfunctions.h" +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/portable_tensor_utils.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/fully_connected.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/fully_connected.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { +namespace { + +struct OpData { + OpDataFullyConnected reference_op_data; + + // Conv 1x1 that may be invoked in some cases currently need per channel + // quantization. + int32_t* per_channel_output_multiplier; + int32_t* per_channel_output_shift; + + // Index to buffer for optimizations if applicable. + int buffer_idx; + + int32_t batches; + int32_t accum_depth; + int32_t output_depth; +}; + +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(OpData)); +} + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + TFLITE_DCHECK(node->builtin_data != nullptr); + + OpData* data = static_cast(node->user_data); + const auto params = + static_cast(node->builtin_data); + + MicroContext* micro_context = GetMicroContext(context); + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kFullyConnectedInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* filter = micro_context->AllocateTempInputTensor( + node, kFullyConnectedWeightsTensor); + TF_LITE_ENSURE(context, filter != nullptr); + TfLiteTensor* bias = + micro_context->AllocateTempInputTensor(node, kFullyConnectedBiasTensor); + TfLiteTensor* output = micro_context->AllocateTempOutputTensor( + node, kFullyConnectedOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + + TF_LITE_ENSURE_TYPES_EQ(context, input->type, output->type); + + const RuntimeShape filter_shape = GetTensorShape(filter); + const RuntimeShape output_shape = GetTensorShape(output); + const int filter_dim_count = filter_shape.DimensionsCount(); + const int output_dim_count = output_shape.DimensionsCount(); + cmsis_nn_dims filter_dims; + filter_dims.n = filter_shape.Dims(filter_dim_count - 1); + filter_dims.h = 1; + filter_dims.w = 1; + filter_dims.c = output_shape.Dims(output_dim_count - 1); + + data->accum_depth = filter_shape.Dims(filter_dim_count - 1); + data->batches = FlatSizeSkipDim(output_shape, output_dim_count - 1); + data->output_depth = output_shape.Dims(output_dim_count - 1); + + // Set buffer index to a reset value + data->buffer_idx = -1; + TF_LITE_ENSURE_STATUS(CalculateOpDataFullyConnected( + context, params->activation, input->type, input, filter, bias, output, + &(data->reference_op_data))); + + int32_t buf_size = 0; + + if (input->type == kTfLiteInt16) { + TF_LITE_ENSURE_EQ(context, input->params.zero_point, 0); + TF_LITE_ENSURE_EQ(context, output->params.zero_point, 0); + buf_size = arm_fully_connected_s16_get_buffer_size(&filter_dims); + } else if (input->type == kTfLiteInt8) { + const RuntimeShape input_shape = GetTensorShape(input); + + TFLITE_DCHECK_GE(output_dim_count, 2); + TFLITE_DCHECK_LE(output_dim_count, 4); + + if (output_dim_count > 2 && data->accum_depth % 4 == 0) { + data->per_channel_output_multiplier = + static_cast(context->AllocatePersistentBuffer( + context, data->output_depth * sizeof(int32_t))); + data->per_channel_output_shift = + static_cast(context->AllocatePersistentBuffer( + context, data->output_depth * sizeof(int32_t))); + + cmsis_nn_dims input_dims; + input_dims.n = data->batches; + input_dims.h = 1; + input_dims.w = 1; + input_dims.c = data->accum_depth; + + buf_size = arm_convolve_1x1_s8_fast_get_buffer_size(&input_dims); + } else { + buf_size = arm_fully_connected_s8_get_buffer_size(&filter_dims); + } + } + + if (filter->type == kTfLiteInt4) { + int filter_size = + RuntimeShape(filter->dims->size, + reinterpret_cast(filter->dims->data)) + .FlatSize(); + context->RequestScratchBufferInArena( + context, filter_size, &data->reference_op_data.filter_buffer_index); + } + + if (buf_size > 0) { + TF_LITE_ENSURE_STATUS(context->RequestScratchBufferInArena( + context, buf_size, &data->buffer_idx)); + } + + micro_context->DeallocateTempTfLiteTensor(output); + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(filter); + if (bias != nullptr) { + micro_context->DeallocateTempTfLiteTensor(bias); + } + + return kTfLiteOk; +} + +void PopulateCommonParams(TfLiteContext* context, + cmsis_nn_per_tensor_quant_params* const quant_params, + cmsis_nn_dims* const input_dims, + cmsis_nn_dims* const filter_dims, + cmsis_nn_dims* const bias_dims, + cmsis_nn_dims* const output_dims, + cmsis_nn_context* const ctx, const OpData& data) { + quant_params->multiplier = data.reference_op_data.output_multiplier; + quant_params->shift = data.reference_op_data.output_shift; + + input_dims->n = data.batches; + input_dims->h = 1; + input_dims->w = 1; + input_dims->c = data.accum_depth; + + filter_dims->n = data.accum_depth; + filter_dims->h = 1; + filter_dims->w = 1; + filter_dims->c = data.output_depth; + + bias_dims->n = 1; + bias_dims->h = 1; + bias_dims->w = 1; + bias_dims->c = data.output_depth; + + output_dims->n = data.batches; + output_dims->h = 1; + output_dims->w = 1; + output_dims->c = data.output_depth; + + ctx->buf = nullptr; + ctx->size = 0; + if (data.buffer_idx > -1) { + ctx->buf = context->GetScratchBuffer(context, data.buffer_idx); + } +} + +TfLiteStatus EvalQuantizedInt8(TfLiteContext* context, TfLiteNode* node, + const OpData& data, + const TfLiteEvalTensor* input, + const TfLiteEvalTensor* filter, + const TfLiteEvalTensor* bias, + TfLiteEvalTensor* output) { + const RuntimeShape output_shape = tflite::micro::GetTensorShape(output); + const int output_dim_count = output_shape.DimensionsCount(); + TFLITE_DCHECK_GE(output_dim_count, 2); + TFLITE_DCHECK_LE(output_dim_count, 4); + + cmsis_nn_per_tensor_quant_params quant_params; + cmsis_nn_dims input_dims; + cmsis_nn_dims filter_dims; + cmsis_nn_dims bias_dims; + cmsis_nn_dims output_dims; + cmsis_nn_context ctx; + + PopulateCommonParams(context, &quant_params, &input_dims, &filter_dims, + &bias_dims, &output_dims, &ctx, data); + + const int32_t* bias_data = + tflite::micro::GetOptionalTensorData(bias); + + if (output_dim_count > 2 && data.accum_depth % 4 == 0) { + cmsis_nn_conv_params conv_params; + conv_params.dilation.h = 1; + conv_params.dilation.w = 1; + conv_params.input_offset = -data.reference_op_data.input_zero_point; + conv_params.output_offset = data.reference_op_data.output_zero_point; + conv_params.stride.h = 1; + conv_params.stride.w = 1; + conv_params.padding.h = 0; + conv_params.padding.w = 0; + conv_params.activation.min = data.reference_op_data.output_activation_min; + conv_params.activation.max = data.reference_op_data.output_activation_max; + + cmsis_nn_per_channel_quant_params per_channel_quant_params; + per_channel_quant_params.multiplier = + const_cast(data.per_channel_output_multiplier); + per_channel_quant_params.shift = + const_cast(data.per_channel_output_shift); + + for (int i = 0; i < data.output_depth; i++) { + per_channel_quant_params.multiplier[i] = quant_params.multiplier; + per_channel_quant_params.shift[i] = quant_params.shift; + } + + TF_LITE_ENSURE_EQ( + context, + arm_convolve_1x1_s8_fast( + &ctx, &conv_params, &per_channel_quant_params, &input_dims, + tflite::micro::GetTensorData(input), &filter_dims, + tflite::micro::GetTensorData(filter), &bias_dims, bias_data, + &output_dims, tflite::micro::GetTensorData(output)), + ARM_CMSIS_NN_SUCCESS); + } else { + cmsis_nn_fc_params fc_params; + fc_params.input_offset = -data.reference_op_data.input_zero_point; + fc_params.output_offset = data.reference_op_data.output_zero_point; + fc_params.filter_offset = 0; + fc_params.activation.min = data.reference_op_data.output_activation_min; + fc_params.activation.max = data.reference_op_data.output_activation_max; + + TF_LITE_ENSURE_EQ( + context, + arm_fully_connected_s8( + &ctx, &fc_params, &quant_params, &input_dims, + tflite::micro::GetTensorData(input), &filter_dims, + tflite::micro::GetTensorData(filter), &bias_dims, bias_data, + &output_dims, tflite::micro::GetTensorData(output)), + ARM_CMSIS_NN_SUCCESS); + } + return kTfLiteOk; +} + +TfLiteStatus EvalQuantizedInt16(TfLiteContext* context, TfLiteNode* node, + const OpData& data, + const TfLiteEvalTensor* input, + const TfLiteEvalTensor* filter, + const TfLiteEvalTensor* bias, + TfLiteEvalTensor* output) { + cmsis_nn_per_tensor_quant_params quant_params; + cmsis_nn_dims input_dims; + cmsis_nn_dims filter_dims; + cmsis_nn_dims bias_dims; + cmsis_nn_dims output_dims; + cmsis_nn_context ctx; + + PopulateCommonParams(context, &quant_params, &input_dims, &filter_dims, + &bias_dims, &output_dims, &ctx, data); + + const int64_t* bias_data = + tflite::micro::GetOptionalTensorData(bias); + + cmsis_nn_fc_params fc_params; + fc_params.input_offset = -data.reference_op_data.input_zero_point; + fc_params.output_offset = data.reference_op_data.output_zero_point; + fc_params.filter_offset = 0; + fc_params.activation.min = data.reference_op_data.output_activation_min; + fc_params.activation.max = data.reference_op_data.output_activation_max; + + TF_LITE_ENSURE_EQ( + context, + arm_fully_connected_s16( + &ctx, &fc_params, &quant_params, &input_dims, + tflite::micro::GetTensorData(input), &filter_dims, + tflite::micro::GetTensorData(filter), &bias_dims, bias_data, + &output_dims, tflite::micro::GetTensorData(output)), + ARM_CMSIS_NN_SUCCESS); + + return kTfLiteOk; +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->builtin_data != nullptr); + const auto* params = + static_cast(node->builtin_data); + + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kFullyConnectedInputTensor); + const TfLiteEvalTensor* filter = + tflite::micro::GetEvalInput(context, node, kFullyConnectedWeightsTensor); + const TfLiteEvalTensor* bias = + tflite::micro::GetEvalInput(context, node, kFullyConnectedBiasTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kFullyConnectedOutputTensor); + + TFLITE_DCHECK(node->user_data != nullptr); + const OpData& data = *(static_cast(node->user_data)); + + TfLiteEvalTensor filter_int8 = tflite::micro::MakeUnpackedInt4Tensor( + context, data.reference_op_data.filter_buffer_index, filter); + + // Checks in Prepare ensure input, output and filter types are all the same. + switch (input->type) { + case kTfLiteFloat32: { + const float* bias_data = + tflite::micro::GetOptionalTensorData(bias); + tflite::reference_ops::FullyConnected( + FullyConnectedParamsFloat(params->activation), + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), bias_data, + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + } + case kTfLiteInt8: { + switch (filter_int8.type) { + case kTfLiteInt8: + return EvalQuantizedInt8(context, node, data, input, &filter_int8, + bias, output); + default: + MicroPrintf("Filter Type %s (%d) not supported.", + TfLiteTypeGetName(filter->type), filter->type); + return kTfLiteError; + } + break; + } + case kTfLiteInt16: { + return EvalQuantizedInt16(context, node, data, input, filter, bias, + output); + } + default: { + MicroPrintf("Type %s (%d) not supported.", TfLiteTypeGetName(input->type), + input->type); + return kTfLiteError; + } + } + return kTfLiteOk; +} + +// Note that the current function names are not ideal at all (this EvalInt8 +// function internally calls EvalQuantizedInt8, and there is similar name +// aliasing in the Eval function too). We will be attempting to have a more +// descriptive naming convention but holding off on that for now, since the +// renaming might be coupled with reducing code duplication and some additional +// refactoring. +TfLiteStatus EvalInt8(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kFullyConnectedInputTensor); + const TfLiteEvalTensor* filter = + tflite::micro::GetEvalInput(context, node, kFullyConnectedWeightsTensor); + const TfLiteEvalTensor* bias = + tflite::micro::GetEvalInput(context, node, kFullyConnectedBiasTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kFullyConnectedOutputTensor); + + TFLITE_DCHECK(node->user_data != nullptr); + const OpData& data = *(static_cast(node->user_data)); + + // Checks in Prepare ensure input, output and filter types are all the same. + if (input->type != kTfLiteInt8) { + MicroPrintf("Type %s (%d) not supported.", TfLiteTypeGetName(input->type), + input->type); + return kTfLiteError; + } + + TfLiteEvalTensor filter_int8 = tflite::micro::MakeUnpackedInt4Tensor( + context, data.reference_op_data.filter_buffer_index, filter); + + return EvalQuantizedInt8(context, node, data, input, &filter_int8, bias, + output); +} + +TfLiteStatus EvalInt16(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kFullyConnectedInputTensor); + const TfLiteEvalTensor* filter = + tflite::micro::GetEvalInput(context, node, kFullyConnectedWeightsTensor); + const TfLiteEvalTensor* bias = + tflite::micro::GetEvalInput(context, node, kFullyConnectedBiasTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kFullyConnectedOutputTensor); + + TFLITE_DCHECK(node->user_data != nullptr); + const OpData& data = *(static_cast(node->user_data)); + + // Checks in Prepare ensure input, output and filter types are all the same. + if (input->type != kTfLiteInt16) { + MicroPrintf("Type %s (%d) not supported.", TfLiteTypeGetName(input->type), + input->type); + return kTfLiteError; + } + + return EvalQuantizedInt16(context, node, data, input, filter, bias, output); +} + +} // namespace + +TFLMRegistration Register_FULLY_CONNECTED() { + return tflite::micro::RegisterOp(Init, Prepare, Eval); +} + +TFLMRegistration Register_FULLY_CONNECTED_INT8() { + return tflite::micro::RegisterOp(Init, Prepare, EvalInt8); +} + +TFLMRegistration Register_FULLY_CONNECTED_INT16() { + return tflite::micro::RegisterOp(Init, Prepare, EvalInt16); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/cmsis_nn/mul.cc b/tensorflow/lite/micro/kernels/cmsis_nn/mul.cc new file mode 100644 index 0000000..571d88a --- /dev/null +++ b/tensorflow/lite/micro/kernels/cmsis_nn/mul.cc @@ -0,0 +1,184 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/reference/mul.h" + +#include "Include/arm_nnfunctions.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/mul.h" +#include "tensorflow/lite/kernels/internal/reference/process_broadcast_shapes.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/mul.h" +#include "tensorflow/lite/micro/memory_helpers.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { +namespace { + +void EvalQuantized(TfLiteContext* context, TfLiteNode* node, + const OpDataMul* data, const TfLiteEvalTensor* input1, + const TfLiteEvalTensor* input2, TfLiteEvalTensor* output) { + tflite::ArithmeticParams op_params = {}; + + op_params.quantized_activation_min = data->output_activation_min; + op_params.quantized_activation_max = data->output_activation_max; + op_params.float_activation_max = data->output_activation_max_f32; + op_params.input1_offset = -data->input1_zero_point; + op_params.input2_offset = -data->input2_zero_point; + op_params.output_offset = data->output_zero_point; + op_params.output_multiplier = data->output_multiplier; + op_params.output_shift = data->output_shift; + + bool need_broadcast = reference_ops::ProcessBroadcastShapes( + tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorShape(input2), &op_params); + + if (need_broadcast) { + if (input1->type == kTfLiteInt8) { + reference_integer_ops::BroadcastMul4DSlow( + op_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } else if (input1->type == kTfLiteInt16) { + reference_integer_ops::BroadcastMul4DSlow( + op_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } + + } else { + if (input1->type == kTfLiteInt8) { + arm_elementwise_mul_s8( + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorData(input2), op_params.input1_offset, + op_params.input2_offset, tflite::micro::GetTensorData(output), + op_params.output_offset, op_params.output_multiplier, + op_params.output_shift, op_params.quantized_activation_min, + op_params.quantized_activation_max, + MatchingElementsSize(tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorShape(output))); + } else if (input1->type == kTfLiteInt16) { + arm_elementwise_mul_s16( + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorData(input2), + op_params.input1_offset, op_params.input2_offset, + tflite::micro::GetTensorData(output), + op_params.output_offset, op_params.output_multiplier, + op_params.output_shift, op_params.quantized_activation_min, + op_params.quantized_activation_max, + MatchingElementsSize(tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorShape(output))); + } + } +} + +} // namespace + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->builtin_data != nullptr); + auto* params = reinterpret_cast(node->builtin_data); + + TFLITE_DCHECK(node->user_data != nullptr); + const OpDataMul* data = static_cast(node->user_data); + + const TfLiteEvalTensor* input1 = + tflite::micro::GetEvalInput(context, node, kMulInput1Tensor); + const TfLiteEvalTensor* input2 = + tflite::micro::GetEvalInput(context, node, kMulInput2Tensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kMulOutputTensor); + + switch (input1->type) { + case kTfLiteInt8: + EvalQuantized(context, node, data, input1, input2, output); + break; + case kTfLiteInt16: + EvalQuantized(context, node, data, input1, input2, output); + break; + case kTfLiteInt32: + EvalMulQuantizedReference(context, node, data, input1, input2, output); + break; + case kTfLiteFloat32: + EvalMulFloatReference(context, node, params, data, input1, input2, + output); + break; + default: + MicroPrintf("Type %s (%d) not supported.", + TfLiteTypeGetName(input1->type), input1->type); + return kTfLiteError; + } + + return kTfLiteOk; +} + +TfLiteStatus EvalInt8(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->builtin_data != nullptr); + TFLITE_DCHECK(node->user_data != nullptr); + + const OpDataMul* data = static_cast(node->user_data); + const TfLiteEvalTensor* input1 = + tflite::micro::GetEvalInput(context, node, kMulInput1Tensor); + const TfLiteEvalTensor* input2 = + tflite::micro::GetEvalInput(context, node, kMulInput2Tensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kMulOutputTensor); + TFLITE_DCHECK(input1->type == kTfLiteInt8); + + EvalQuantized(context, node, data, input1, input2, output); + + return kTfLiteOk; +} + +TfLiteStatus EvalInt16(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->builtin_data != nullptr); + TFLITE_DCHECK(node->user_data != nullptr); + + const OpDataMul* data = static_cast(node->user_data); + const TfLiteEvalTensor* input1 = + tflite::micro::GetEvalInput(context, node, kMulInput1Tensor); + const TfLiteEvalTensor* input2 = + tflite::micro::GetEvalInput(context, node, kMulInput2Tensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kMulOutputTensor); + TFLITE_DCHECK(input1->type == kTfLiteInt16); + + EvalQuantized(context, node, data, input1, input2, output); + + return kTfLiteOk; +} + +TFLMRegistration Register_MUL() { + return tflite::micro::RegisterOp(MulInit, MulPrepare, Eval); +} + +TFLMRegistration Register_MUL_INT8() { + return tflite::micro::RegisterOp(MulInit, MulPrepare, EvalInt8); +} + +TFLMRegistration Register_MUL_INT16() { + return tflite::micro::RegisterOp(MulInit, MulPrepare, EvalInt16); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/cmsis_nn/pooling.cc b/tensorflow/lite/micro/kernels/cmsis_nn/pooling.cc new file mode 100644 index 0000000..d8311db --- /dev/null +++ b/tensorflow/lite/micro/kernels/cmsis_nn/pooling.cc @@ -0,0 +1,346 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/kernels/internal/reference/pooling.h" + +#include "Include/arm_nnfunctions.h" +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/pooling.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { + +namespace { + +struct OpData { + OpDataPooling reference_op_data; + + // Index to buffer for optimizations if applicable. + int buffer_idx; +}; + +void PopulateCommonParams( + TfLiteContext* const context, cmsis_nn_dims* const input_dims, + cmsis_nn_dims* const output_dims, cmsis_nn_pool_params* const pool_params, + cmsis_nn_context* const ctx, cmsis_nn_dims* const filter_dims, + const OpData& data, const RuntimeShape& input_shape, + const RuntimeShape& output_shape, const TfLitePoolParams* params) { + const int depth = MatchingDim(input_shape, 3, output_shape, 3); + + input_dims->n = 1; + input_dims->h = input_shape.Dims(1); + input_dims->w = input_shape.Dims(2); + input_dims->c = depth; + + output_dims->n = 1; + output_dims->h = output_shape.Dims(1); + output_dims->w = output_shape.Dims(2); + output_dims->c = depth; + + pool_params->stride.h = params->stride_height; + pool_params->stride.w = params->stride_width; + pool_params->padding.h = data.reference_op_data.padding.height; + pool_params->padding.w = data.reference_op_data.padding.width; + pool_params->activation.min = data.reference_op_data.activation_min; + pool_params->activation.max = data.reference_op_data.activation_max; + + filter_dims->n = 1; + filter_dims->h = params->filter_height; + filter_dims->w = params->filter_width; + filter_dims->c = 1; + ctx->buf = nullptr; + ctx->size = 0; + if (data.buffer_idx > -1) { + ctx->buf = context->GetScratchBuffer(context, data.buffer_idx); + } +} + +void AverageEvalQuantized(TfLiteContext* context, const TfLiteNode* node, + const TfLitePoolParams* params, const OpData& data, + const TfLiteEvalTensor* input, + TfLiteEvalTensor* output) { + TFLITE_DCHECK((input->type == kTfLiteInt8) || (input->type == kTfLiteInt16)); + + RuntimeShape input_shape = micro::GetTensorShape(input); + TFLITE_DCHECK_EQ(input_shape.DimensionsCount(), 4); + + RuntimeShape output_shape = micro::GetTensorShape(output); + TFLITE_DCHECK_EQ(output_shape.DimensionsCount(), 4); + + cmsis_nn_dims input_dims; + cmsis_nn_dims output_dims; + cmsis_nn_pool_params pool_params; + cmsis_nn_dims filter_dims; + cmsis_nn_context ctx; + + PopulateCommonParams(context, &input_dims, &output_dims, &pool_params, &ctx, + &filter_dims, data, input_shape, output_shape, params); + + if (input->type == kTfLiteInt8) { + TFLITE_DCHECK_EQ( + arm_avgpool_s8(&ctx, &pool_params, &input_dims, + micro::GetTensorData(input), &filter_dims, + &output_dims, micro::GetTensorData(output)), + ARM_CMSIS_NN_SUCCESS); + } else { + TFLITE_DCHECK_EQ( + arm_avgpool_s16(&ctx, &pool_params, &input_dims, + micro::GetTensorData(input), &filter_dims, + &output_dims, micro::GetTensorData(output)), + ARM_CMSIS_NN_SUCCESS); + } +} + +TfLiteStatus MaxEvalQuantized(TfLiteContext* context, const TfLiteNode* node, + const TfLitePoolParams* params, + const OpData& data, const TfLiteEvalTensor* input, + TfLiteEvalTensor* output) { + TFLITE_DCHECK((input->type == kTfLiteInt8) || (input->type == kTfLiteInt16)); + + RuntimeShape input_shape = micro::GetTensorShape(input); + TFLITE_DCHECK_EQ(input_shape.DimensionsCount(), 4); + + RuntimeShape output_shape = micro::GetTensorShape(output); + TFLITE_DCHECK_EQ(output_shape.DimensionsCount(), 4); + + cmsis_nn_dims input_dims; + cmsis_nn_dims output_dims; + cmsis_nn_pool_params pool_params; + cmsis_nn_dims filter_dims; + cmsis_nn_context ctx; + + PopulateCommonParams(context, &input_dims, &output_dims, &pool_params, &ctx, + &filter_dims, data, input_shape, output_shape, params); + + if (input->type == kTfLiteInt8) { + TFLITE_DCHECK_EQ( + arm_max_pool_s8(&ctx, &pool_params, &input_dims, + micro::GetTensorData(input), &filter_dims, + &output_dims, micro::GetTensorData(output)), + ARM_CMSIS_NN_SUCCESS); + } else { + TFLITE_DCHECK_EQ( + arm_max_pool_s16(&ctx, &pool_params, &input_dims, + micro::GetTensorData(input), &filter_dims, + &output_dims, micro::GetTensorData(output)), + ARM_CMSIS_NN_SUCCESS); + } + + return kTfLiteOk; +} + +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(OpData)); +} + +TfLiteStatus MaxPrepare(TfLiteContext* context, TfLiteNode* node) { + TF_LITE_ENSURE_STATUS(PoolingPrepare(context, node)); + // Set buffer index to a reset value + static_cast(node->user_data)->buffer_idx = -1; + return kTfLiteOk; +} + +TfLiteStatus AveragePrepare(TfLiteContext* context, TfLiteNode* node) { + TF_LITE_ENSURE_STATUS(PoolingPrepare(context, node)); + + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kPoolingInputTensor); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kPoolingOutputTensor); + + if (input->type == kTfLiteInt8 || input->type == kTfLiteInt16) { + RuntimeShape input_shape = GetTensorShape(input); + TFLITE_DCHECK_EQ(input_shape.DimensionsCount(), 4); + + RuntimeShape output_shape = GetTensorShape(output); + TFLITE_DCHECK_EQ(output_shape.DimensionsCount(), 4); + + const int depth = MatchingDim(input_shape, 3, output_shape, 3); + const int output_width = output_shape.Dims(2); + + const int32_t buffer_size = + input->type == kTfLiteInt16 + ? arm_avgpool_s16_get_buffer_size(output_width, depth) + : arm_avgpool_s8_get_buffer_size(output_width, depth); + + auto* data = static_cast(node->user_data); + if (buffer_size > 0) { + TF_LITE_ENSURE_STATUS(context->RequestScratchBufferInArena( + context, buffer_size, &data->buffer_idx)); + } else { + data->buffer_idx = -1; + } + } + + micro_context->DeallocateTempTfLiteTensor(output); + micro_context->DeallocateTempTfLiteTensor(input); + return kTfLiteOk; +} + +TfLiteStatus AverageEval(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->builtin_data != nullptr); + auto* params = reinterpret_cast(node->builtin_data); + + TFLITE_DCHECK(node->user_data != nullptr); + const OpData& data = *(static_cast(node->user_data)); + + const TfLiteEvalTensor* input = + micro::GetEvalInput(context, node, kPoolingInputTensor); + TfLiteEvalTensor* output = + micro::GetEvalOutput(context, node, kPoolingOutputTensor); + + // Inputs and outputs share the same type, guaranteed by the converter. + if (input->type == kTfLiteFloat32) { + AveragePoolingEvalFloat(context, node, params, &data.reference_op_data, + input, output); + } else if (input->type == kTfLiteInt8 || input->type == kTfLiteInt16) { + AverageEvalQuantized(context, node, params, data, input, output); + } else { + MicroPrintf("Input type %s is not currently supported", + TfLiteTypeGetName(input->type)); + return kTfLiteError; + } + + return kTfLiteOk; +} + +TfLiteStatus AverageEvalInt8(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->builtin_data != nullptr); + auto* params = reinterpret_cast(node->builtin_data); + + TFLITE_DCHECK(node->user_data != nullptr); + const OpData& data = *(static_cast(node->user_data)); + + const TfLiteEvalTensor* input = + micro::GetEvalInput(context, node, kPoolingInputTensor); + TFLITE_DCHECK(input->type == kTfLiteInt8); + TfLiteEvalTensor* output = + micro::GetEvalOutput(context, node, kPoolingOutputTensor); + + AverageEvalQuantized(context, node, params, data, input, output); + + return kTfLiteOk; +} + +TfLiteStatus AverageEvalInt16(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->builtin_data != nullptr); + auto* params = reinterpret_cast(node->builtin_data); + + TFLITE_DCHECK(node->user_data != nullptr); + const OpData& data = *(static_cast(node->user_data)); + + const TfLiteEvalTensor* input = + micro::GetEvalInput(context, node, kPoolingInputTensor); + TFLITE_DCHECK(input->type == kTfLiteInt16); + TfLiteEvalTensor* output = + micro::GetEvalOutput(context, node, kPoolingOutputTensor); + + AverageEvalQuantized(context, node, params, data, input, output); + + return kTfLiteOk; +} +TfLiteStatus MaxEval(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->builtin_data != nullptr); + auto* params = reinterpret_cast(node->builtin_data); + + TFLITE_DCHECK(node->user_data != nullptr); + const OpData& data = *(static_cast(node->user_data)); + + const TfLiteEvalTensor* input = + micro::GetEvalInput(context, node, kPoolingInputTensor); + TfLiteEvalTensor* output = + micro::GetEvalOutput(context, node, kPoolingOutputTensor); + + if (input->type == kTfLiteFloat32) { + MaxPoolingEvalFloat(context, node, params, &data.reference_op_data, input, + output); + } else if (input->type == kTfLiteInt8 || input->type == kTfLiteInt16) { + MaxEvalQuantized(context, node, params, data, input, output); + } else { + MicroPrintf("Input type %s is not currently supported", + TfLiteTypeGetName(input->type)); + return kTfLiteError; + } + + return kTfLiteOk; +} + +TfLiteStatus MaxEvalInt8(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->builtin_data != nullptr); + auto* params = reinterpret_cast(node->builtin_data); + + TFLITE_DCHECK(node->user_data != nullptr); + const OpData& data = *(static_cast(node->user_data)); + + const TfLiteEvalTensor* input = + micro::GetEvalInput(context, node, kPoolingInputTensor); + TFLITE_DCHECK(input->type == kTfLiteInt8); + TfLiteEvalTensor* output = + micro::GetEvalOutput(context, node, kPoolingOutputTensor); + + MaxEvalQuantized(context, node, params, data, input, output); + return kTfLiteOk; +} + +TfLiteStatus MaxEvalInt16(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->builtin_data != nullptr); + auto* params = reinterpret_cast(node->builtin_data); + + TFLITE_DCHECK(node->user_data != nullptr); + const OpData& data = *(static_cast(node->user_data)); + + const TfLiteEvalTensor* input = + micro::GetEvalInput(context, node, kPoolingInputTensor); + TFLITE_DCHECK(input->type == kTfLiteInt16); + TfLiteEvalTensor* output = + micro::GetEvalOutput(context, node, kPoolingOutputTensor); + + MaxEvalQuantized(context, node, params, data, input, output); + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_AVERAGE_POOL_2D_INT8() { + return tflite::micro::RegisterOp(Init, AveragePrepare, AverageEvalInt8); +} + +TFLMRegistration Register_AVERAGE_POOL_2D_INT16() { + return tflite::micro::RegisterOp(Init, AveragePrepare, AverageEvalInt16); +} + +TFLMRegistration Register_AVERAGE_POOL_2D() { + return tflite::micro::RegisterOp(Init, AveragePrepare, AverageEval); +} + +TFLMRegistration Register_MAX_POOL_2D_INT8() { + return tflite::micro::RegisterOp(Init, MaxPrepare, MaxEvalInt8); +} + +TFLMRegistration Register_MAX_POOL_2D_INT16() { + return tflite::micro::RegisterOp(Init, MaxPrepare, MaxEvalInt16); +} + +TFLMRegistration Register_MAX_POOL_2D() { + return tflite::micro::RegisterOp(Init, MaxPrepare, MaxEval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/cmsis_nn/softmax.cc b/tensorflow/lite/micro/kernels/cmsis_nn/softmax.cc new file mode 100644 index 0000000..f83a090 --- /dev/null +++ b/tensorflow/lite/micro/kernels/cmsis_nn/softmax.cc @@ -0,0 +1,209 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/kernels/softmax.h" + +#include "Include/arm_nnfunctions.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/softmax.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { +namespace { + +struct CMSISNNSoftmaxParams { + SoftmaxParams softmax_params; + int32_t num_rows; + int32_t row_size; +}; + +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, + sizeof(CMSISNNSoftmaxParams)); +} + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + MicroContext* micro_context = GetMicroContext(context); + + TF_LITE_ENSURE_EQ(context, NumInputs(node), 1); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + TfLiteTensor* input = micro_context->AllocateTempInputTensor(node, 0); + TF_LITE_ENSURE(context, input != nullptr); + TF_LITE_ENSURE(context, NumDimensions(input) >= 1); + TfLiteTensor* output = micro_context->AllocateTempOutputTensor(node, 0); + TF_LITE_ENSURE(context, output != nullptr); + + TF_LITE_ENSURE(context, node->user_data != nullptr); + CMSISNNSoftmaxParams* op_data = + static_cast(node->user_data); + + auto* params = static_cast(node->builtin_data); + auto ret_val = CalculateSoftmaxParams(context, input, output, params, + &op_data->softmax_params); + + const auto input_shape = GetTensorShape(input); + const auto output_shape = GetTensorShape(output); + const int trailing_dim = input_shape.DimensionsCount() - 1; + const int outer_size = + MatchingFlatSizeSkipDim(input_shape, trailing_dim, output_shape); + const int depth = + MatchingDim(input_shape, trailing_dim, output_shape, trailing_dim); + op_data->num_rows = outer_size; + op_data->row_size = depth; + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(output); + return ret_val; +} + +TfLiteStatus SoftmaxEval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = tflite::micro::GetEvalInput(context, node, 0); + TfLiteEvalTensor* output = tflite::micro::GetEvalOutput(context, node, 0); + + TFLITE_DCHECK(node->user_data != nullptr); + const CMSISNNSoftmaxParams op_data = + *static_cast(node->user_data); + + switch (input->type) { + case kTfLiteFloat32: { + tflite::reference_ops::Softmax( + op_data.softmax_params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + return kTfLiteOk; + } + case kTfLiteInt8: { + if (output->type == kTfLiteInt8) { + arm_softmax_s8(tflite::micro::GetTensorData(input), + op_data.num_rows, op_data.row_size, + op_data.softmax_params.input_multiplier, + op_data.softmax_params.input_left_shift, + op_data.softmax_params.diff_min, + tflite::micro::GetTensorData(output)); + } else { + arm_softmax_s8_s16(tflite::micro::GetTensorData(input), + op_data.num_rows, op_data.row_size, + op_data.softmax_params.input_multiplier, + op_data.softmax_params.input_left_shift, + op_data.softmax_params.diff_min, + tflite::micro::GetTensorData(output)); + } + return kTfLiteOk; + } + case kTfLiteInt16: { + const cmsis_nn_softmax_lut_s16 softmax_params = { + .exp_lut = op_data.softmax_params.exp_lut, + .one_by_one_lut = op_data.softmax_params.one_over_one_plus_x_lut}; + + TFLITE_DCHECK_EQ( + arm_softmax_s16( + tflite::micro::GetTensorData(input), op_data.num_rows, + op_data.row_size, op_data.softmax_params.input_multiplier, + op_data.softmax_params.input_left_shift, &softmax_params, + tflite::micro::GetTensorData(output)), + ARM_CMSIS_NN_SUCCESS); + return kTfLiteOk; + } + default: + MicroPrintf("Type %s (%d) not supported.", TfLiteTypeGetName(input->type), + input->type); + return kTfLiteError; + } +} + +TfLiteStatus SoftmaxEvalInt8(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = tflite::micro::GetEvalInput(context, node, 0); + TfLiteEvalTensor* output = tflite::micro::GetEvalOutput(context, node, 0); + + TFLITE_DCHECK(node->user_data != nullptr); + const CMSISNNSoftmaxParams op_data = + *static_cast(node->user_data); + + arm_softmax_s8(tflite::micro::GetTensorData(input), op_data.num_rows, + op_data.row_size, op_data.softmax_params.input_multiplier, + op_data.softmax_params.input_left_shift, + op_data.softmax_params.diff_min, + tflite::micro::GetTensorData(output)); + + return kTfLiteOk; +} + +TfLiteStatus SoftmaxEvalInt8_Int16(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = tflite::micro::GetEvalInput(context, node, 0); + TfLiteEvalTensor* output = tflite::micro::GetEvalOutput(context, node, 0); + + TFLITE_DCHECK(node->user_data != nullptr); + const CMSISNNSoftmaxParams op_data = + *static_cast(node->user_data); + + arm_softmax_s8_s16( + tflite::micro::GetTensorData(input), op_data.num_rows, + op_data.row_size, op_data.softmax_params.input_multiplier, + op_data.softmax_params.input_left_shift, op_data.softmax_params.diff_min, + tflite::micro::GetTensorData(output)); + + return kTfLiteOk; +} + +TfLiteStatus SoftmaxEvalInt16(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = tflite::micro::GetEvalInput(context, node, 0); + TfLiteEvalTensor* output = tflite::micro::GetEvalOutput(context, node, 0); + + TFLITE_DCHECK(node->user_data != nullptr); + const CMSISNNSoftmaxParams op_data = + *static_cast(node->user_data); + + const cmsis_nn_softmax_lut_s16 softmax_params = { + .exp_lut = op_data.softmax_params.exp_lut, + .one_by_one_lut = op_data.softmax_params.one_over_one_plus_x_lut}; + + TFLITE_DCHECK_EQ( + arm_softmax_s16(tflite::micro::GetTensorData(input), + op_data.num_rows, op_data.row_size, + op_data.softmax_params.input_multiplier, + op_data.softmax_params.input_left_shift, &softmax_params, + tflite::micro::GetTensorData(output)), + ARM_CMSIS_NN_SUCCESS); + + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_SOFTMAX() { + return tflite::micro::RegisterOp(Init, Prepare, SoftmaxEval); +} + +TFLMRegistration Register_SOFTMAX_INT8() { + return tflite::micro::RegisterOp(Init, Prepare, SoftmaxEvalInt8); +} + +TFLMRegistration Register_SOFTMAX_INT8_INT16() { + return tflite::micro::RegisterOp(Init, Prepare, SoftmaxEvalInt8_Int16); +} + +TFLMRegistration Register_SOFTMAX_INT16() { + return tflite::micro::RegisterOp(Init, Prepare, SoftmaxEvalInt16); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/cmsis_nn/svdf.cc b/tensorflow/lite/micro/kernels/cmsis_nn/svdf.cc new file mode 100644 index 0000000..03dbaee --- /dev/null +++ b/tensorflow/lite/micro/kernels/cmsis_nn/svdf.cc @@ -0,0 +1,223 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/kernels/svdf.h" + +#include "Include/arm_nnfunctions.h" +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/activation_utils.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_utils.h" + +namespace tflite { +namespace { + +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(OpDataSvdf)); +} + +TfLiteStatus EvalIntegerSVDF(TfLiteContext* context, TfLiteNode* node, + const TfLiteEvalTensor* input_tensor, + const TfLiteEvalTensor* weights_feature_tensor, + const TfLiteEvalTensor* weights_time_tensor, + const TfLiteEvalTensor* bias_tensor, + const TfLiteSVDFParams* params, + TfLiteEvalTensor* activation_state_tensor, + TfLiteEvalTensor* output_tensor, + const OpDataSvdf& data) { + cmsis_nn_dims input_dims; + input_dims.n = input_tensor->dims->data[0]; + input_dims.h = input_tensor->dims->data[1]; + + cmsis_nn_dims weights_feature_dims; + weights_feature_dims.n = weights_feature_tensor->dims->data[0]; + weights_feature_dims.h = weights_feature_tensor->dims->data[1]; + + cmsis_nn_dims weights_time_dims; + weights_time_dims.n = weights_time_tensor->dims->data[0]; + weights_time_dims.h = weights_time_tensor->dims->data[1]; + + cmsis_nn_dims bias_dims; + bias_dims.n = bias_tensor->dims->data[0]; + + cmsis_nn_dims state_dims; + state_dims.n = bias_tensor->dims->data[0]; + state_dims.h = bias_tensor->dims->data[1]; + + cmsis_nn_dims output_dims; + output_dims.n = output_tensor->dims->data[0]; + output_dims.h = output_tensor->dims->data[1]; + + cmsis_nn_svdf_params svdf_params; + svdf_params.rank = params->rank; + svdf_params.input_offset = data.input_zero_point; + svdf_params.output_offset = data.output_zero_point; + + svdf_params.input_activation.min = INT16_MIN; + svdf_params.input_activation.max = INT16_MAX; + + svdf_params.output_activation.min = INT8_MIN; + svdf_params.output_activation.max = INT8_MAX; + + cmsis_nn_per_tensor_quant_params in_quant_params; + in_quant_params.multiplier = data.effective_scale_1_a; + in_quant_params.shift = data.effective_scale_1_b; + + cmsis_nn_per_tensor_quant_params out_quant_params; + out_quant_params.multiplier = data.effective_scale_2_a; + out_quant_params.shift = data.effective_scale_2_b; + + TFLITE_DCHECK(context != nullptr); + TFLITE_DCHECK(context->GetScratchBuffer != nullptr); + + cmsis_nn_context scratch_ctx; + scratch_ctx.buf = static_cast( + context->GetScratchBuffer(context, data.scratch_tensor_index)); + + cmsis_nn_context scratch_output_ctx; + scratch_output_ctx.buf = static_cast( + context->GetScratchBuffer(context, data.scratch_output_tensor_index)); + + int8_t* output_data = tflite::micro::GetTensorData(output_tensor); + + switch (weights_time_tensor->type) { + case kTfLiteInt8: { + arm_svdf_s8( + &scratch_ctx, &scratch_output_ctx, &svdf_params, &in_quant_params, + &out_quant_params, &input_dims, + tflite::micro::GetTensorData(input_tensor), &state_dims, + tflite::micro::GetTensorData(activation_state_tensor), + &weights_feature_dims, + tflite::micro::GetTensorData(weights_feature_tensor), + &weights_time_dims, + tflite::micro::GetTensorData(weights_time_tensor), &bias_dims, + tflite::micro::GetTensorData(bias_tensor), &output_dims, + output_data); + return kTfLiteOk; + } + + case kTfLiteInt16: { + arm_svdf_state_s16_s8( + &scratch_ctx, &scratch_output_ctx, &svdf_params, &in_quant_params, + &out_quant_params, &input_dims, + tflite::micro::GetTensorData(input_tensor), &state_dims, + tflite::micro::GetTensorData(activation_state_tensor), + &weights_feature_dims, + tflite::micro::GetTensorData(weights_feature_tensor), + &weights_time_dims, + tflite::micro::GetTensorData(weights_time_tensor), + &bias_dims, tflite::micro::GetTensorData(bias_tensor), + &output_dims, output_data); + return kTfLiteOk; + } + + default: + MicroPrintf("Could not find matching function for type %s.", + TfLiteTypeGetName(weights_time_tensor->type)); + return kTfLiteError; + } +} + +TfLiteStatus EvalSvdf(TfLiteContext* context, TfLiteNode* node) { + auto* params = reinterpret_cast(node->builtin_data); + TFLITE_DCHECK(node->user_data != nullptr); + const OpDataSvdf& data = *(static_cast(node->user_data)); + + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kSvdfInputTensor); + const TfLiteEvalTensor* weights_feature = + tflite::micro::GetEvalInput(context, node, kSvdfWeightsFeatureTensor); + const TfLiteEvalTensor* weights_time = + tflite::micro::GetEvalInput(context, node, kSvdfWeightsTimeTensor); + const TfLiteEvalTensor* bias = + (NumInputs(node) == 5) + ? tflite::micro::GetEvalInput(context, node, kSvdfBiasTensor) + : nullptr; + TfLiteEvalTensor* activation_state = tflite::micro::GetMutableEvalInput( + context, node, kSvdfInputActivationStateTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kSvdfOutputTensor); + + switch (weights_time->type) { + case kTfLiteFloat32: { + EvalFloatSvdfReference( + context, node, input, weights_feature, weights_time, bias, params, + data.scratch_tensor_index, activation_state, output); + return kTfLiteOk; + } + + case kTfLiteInt8: + case kTfLiteInt16: { + return EvalIntegerSVDF(context, node, input, weights_feature, + weights_time, bias, params, activation_state, + output, data); + } + + default: + MicroPrintf("Type %s not currently supported.", + TfLiteTypeGetName(weights_feature->type)); + return kTfLiteError; + } + return kTfLiteOk; +} + +TfLiteStatus EvalSvdfInt8(TfLiteContext* context, TfLiteNode* node) { + auto* params = reinterpret_cast(node->builtin_data); + TFLITE_DCHECK(node->user_data != nullptr); + const OpDataSvdf& data = *(static_cast(node->user_data)); + + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kSvdfInputTensor); + const TfLiteEvalTensor* weights_feature = + tflite::micro::GetEvalInput(context, node, kSvdfWeightsFeatureTensor); + const TfLiteEvalTensor* weights_time = + tflite::micro::GetEvalInput(context, node, kSvdfWeightsTimeTensor); + const TfLiteEvalTensor* bias = + (NumInputs(node) == 5) + ? tflite::micro::GetEvalInput(context, node, kSvdfBiasTensor) + : nullptr; + TfLiteEvalTensor* activation_state = tflite::micro::GetMutableEvalInput( + context, node, kSvdfInputActivationStateTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kSvdfOutputTensor); + + TFLITE_DCHECK((weights_time->type == kTfLiteInt8) || + (weights_time->type == kTfLiteInt16)); + // Because of the TODO mentioned below, the int16 weight data type is not + // split into a separate registration. + // TODO(#523): remove 16-bit code when no longer needed. + return EvalIntegerSVDF(context, node, input, weights_feature, weights_time, + bias, params, activation_state, output, data); +} + +} // namespace + +TFLMRegistration Register_SVDF() { + return tflite::micro::RegisterOp(Init, PrepareSvdf, EvalSvdf); +} + +TFLMRegistration Register_SVDF_INT8() { + return tflite::micro::RegisterOp(Init, PrepareSvdf, EvalSvdfInt8); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/cmsis_nn/unidirectional_sequence_lstm.cc b/tensorflow/lite/micro/kernels/cmsis_nn/unidirectional_sequence_lstm.cc new file mode 100644 index 0000000..f66ce80 --- /dev/null +++ b/tensorflow/lite/micro/kernels/cmsis_nn/unidirectional_sequence_lstm.cc @@ -0,0 +1,683 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +// Integer version of unidirectional sequence LSTM. Only the standard LSTM +// (defined in the keras LSTM layer, e.g., no peephole etc.) is supported here. +// Currently used by the 8 bits activation case only, except for fallbacks. + +#include +#include + +#include "Include/arm_nnfunctions.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/fully_connected.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/lstm_eval.h" +#include "tensorflow/lite/micro/kernels/lstm_shared.h" +#include "tensorflow/lite/micro/kernels/micro_tensor_utils.h" + +namespace tflite { + +namespace { + +struct OpData { + OpDataLSTM params_ref; + cmsis_nn_lstm_params params_cmsis_nn; +}; + +/*Helper Functions*/ +TfLiteStatus PrecomputeZeroPointTimesWeightWithBias( + TfLiteContext* context, int32_t zero_point, + const TfLiteTensor* weight_tensor, const TfLiteTensor* bias_tensor, + int32_t** output) { + if (weight_tensor == nullptr) { + return kTfLiteOk; + } + + const RuntimeShape& weight_shape = GetTensorShape(weight_tensor); + TF_LITE_ENSURE_EQ(context, weight_shape.DimensionsCount(), 2); + const int row = weight_shape.Dims(0); + const int col = weight_shape.Dims(1); + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + *output = static_cast( + context->AllocatePersistentBuffer(context, row * sizeof(int32_t))); + + if (bias_tensor == nullptr) { + memset(*output, 0, row * sizeof(int32_t)); + } else { + const int32_t* bias = GetTensorData(bias_tensor); + memcpy(*output, bias, row * sizeof(int32_t)); + } + + if (zero_point != 0) { + const int8_t* weight = GetTensorData(weight_tensor); + tflite::tensor_utils::MatrixScalarMultiplyAccumulate(weight, zero_point, + row, col, *output); + } + return kTfLiteOk; +} + +TfLiteStatus CalculateOpData(TfLiteContext* context, TfLiteNode* node, + const LstmTensors& lstm_tensors, OpData* op_data) { + const TfLiteTensor* input = lstm_tensors.GetInternalTensor(kLstmInputTensor); + const TfLiteTensor* output_state = + lstm_tensors.GetInternalTensor(tflite::kLstmOutputStateTensor); + + TF_LITE_ENSURE(context, input->type == kTfLiteInt8); + + op_data->params_cmsis_nn.output_state_offset = + output_state->params.zero_point; + + const TfLiteTensor* input_to_forget_weights = + lstm_tensors.GetInternalTensor(kLstmInputToForgetWeightsTensor); + const TfLiteTensor* input_to_input_weights = + lstm_tensors.GetInternalTensor(kLstmInputToInputWeightsTensor); + const TfLiteTensor* input_to_output_weights = + lstm_tensors.GetInternalTensor(kLstmInputToOutputWeightsTensor); + const TfLiteTensor* input_to_cell_weights = + lstm_tensors.GetInternalTensor(kLstmInputToCellWeightsTensor); + const TfLiteTensor* forget_gate_bias = + lstm_tensors.GetInternalTensor(kLstmForgetGateBiasTensor); + const TfLiteTensor* cell_state = + lstm_tensors.GetInternalTensor(kLstmCellStateTensor); + + const TfLiteTensor* cell_gate_bias = + lstm_tensors.GetInternalTensor(kLstmCellGateBiasTensor); + const TfLiteTensor* output_gate_bias = + lstm_tensors.GetInternalTensor(kLstmOutputGateBiasTensor); + const TfLiteTensor* input_gate_bias = + lstm_tensors.GetInternalTensor(kLstmInputGateBiasTensor); + const TfLiteTensor* recurrent_to_forget_weights = + lstm_tensors.GetInternalTensor(kLstmRecurrentToForgetWeightsTensor); + const TfLiteTensor* recurrent_to_cell_weights = + lstm_tensors.GetInternalTensor(kLstmRecurrentToCellWeightsTensor); + const TfLiteTensor* recurrent_to_output_weights = + lstm_tensors.GetInternalTensor(kLstmRecurrentToOutputWeightsTensor); + const TfLiteTensor* recurrent_to_input_weights = + lstm_tensors.GetInternalTensor(kLstmRecurrentToInputWeightsTensor); + const TfLiteTensor* cell_to_output_weights = + lstm_tensors.GetInternalTensor(kLstmCellToOutputWeightsTensor); + const TfLiteTensor* forget_layer_norm_coefficients = + lstm_tensors.GetInternalTensor(kLstmForgetLayerNormCoefficientsTensor); + const TfLiteTensor* projection_weights = + lstm_tensors.GetInternalTensor(kLstmProjectionWeightsTensor); + + const bool use_layer_norm = (forget_layer_norm_coefficients != nullptr); + const bool use_peephole = (cell_to_output_weights != nullptr); + const bool use_projection = (projection_weights != nullptr); + const bool use_cifg = (input_to_input_weights == nullptr); + const bool lstm_unsupported_config = + use_layer_norm || use_peephole || use_projection || use_cifg; + TFLITE_DCHECK(!lstm_unsupported_config); + + // Pre-calculate bias + zero_point * weight. + int32_t* input_to_forget_effective_bias = nullptr; + int32_t* recurrent_to_forget_effective_bias = nullptr; + int32_t* input_to_cell_effective_bias = nullptr; + int32_t* recurrent_to_cell_effective_bias = nullptr; + int32_t* input_to_output_effective_bias = nullptr; + int32_t* recurrent_to_output_effective_bias = nullptr; + int32_t* input_to_input_effective_bias = nullptr; + int32_t* recurrent_to_input_effective_bias = nullptr; + + const int32_t output_state_zero_point = + -op_data->params_cmsis_nn.output_state_offset; + const int32_t input_zero_point = -input->params.zero_point; + + TF_LITE_ENSURE_OK(context, + PrecomputeZeroPointTimesWeightWithBias( + context, input_zero_point, input_to_forget_weights, + forget_gate_bias, &input_to_forget_effective_bias)); + + TF_LITE_ENSURE_OK(context, PrecomputeZeroPointTimesWeightWithBias( + context, output_state_zero_point, + recurrent_to_forget_weights, nullptr, + &recurrent_to_forget_effective_bias)); + + // Modulation gate. + TF_LITE_ENSURE_OK(context, + PrecomputeZeroPointTimesWeightWithBias( + context, input_zero_point, input_to_cell_weights, + cell_gate_bias, &input_to_cell_effective_bias)); + TF_LITE_ENSURE_OK( + context, PrecomputeZeroPointTimesWeightWithBias( + context, output_state_zero_point, recurrent_to_cell_weights, + nullptr, &recurrent_to_cell_effective_bias)); + + // Output gate. + TF_LITE_ENSURE_OK(context, + PrecomputeZeroPointTimesWeightWithBias( + context, input_zero_point, input_to_output_weights, + output_gate_bias, &input_to_output_effective_bias)); + + TF_LITE_ENSURE_OK(context, PrecomputeZeroPointTimesWeightWithBias( + context, output_state_zero_point, + recurrent_to_output_weights, nullptr, + &recurrent_to_output_effective_bias)); + + // Input gate. The calculation is only meaningful for non-cifg case. + TF_LITE_ENSURE_OK(context, + PrecomputeZeroPointTimesWeightWithBias( + context, input_zero_point, input_to_input_weights, + input_gate_bias, &input_to_input_effective_bias)); + TF_LITE_ENSURE_OK( + context, PrecomputeZeroPointTimesWeightWithBias( + context, output_state_zero_point, recurrent_to_input_weights, + nullptr, &recurrent_to_input_effective_bias)); + + op_data->params_cmsis_nn.i2f_effective_bias = input_to_forget_effective_bias; + op_data->params_cmsis_nn.r2f_effective_bias = + recurrent_to_forget_effective_bias; + op_data->params_cmsis_nn.i2c_effective_bias = input_to_cell_effective_bias; + op_data->params_cmsis_nn.r2c_effective_bias = + recurrent_to_cell_effective_bias; + op_data->params_cmsis_nn.i2o_effective_bias = input_to_output_effective_bias; + op_data->params_cmsis_nn.r2o_effective_bias = + recurrent_to_output_effective_bias; + op_data->params_cmsis_nn.i2i_effective_bias = input_to_input_effective_bias; + op_data->params_cmsis_nn.r2i_effective_bias = + recurrent_to_input_effective_bias; + + // Get intermediate scales and zero points. + float intermediate_scale[5]; + int32_t intermediate_zp[5]; + for (int i = 0; i < 4; ++i) { + // Q3.12 for activation functions. + intermediate_scale[i] = std::pow(2.0f, -12.0f); + intermediate_zp[i] = 0; + } + + MicroContext* micro_context = GetMicroContext(context); + // In the absence of projection, hidden becomes otuput and this intermediate + // is ignored. + TfLiteTensor* hidden = micro_context->AllocateTempIntermediateTensor(node, 4); + TF_LITE_ENSURE(context, hidden->quantization.type != kTfLiteNoQuantization); + auto* hidden_params = + static_cast(hidden->quantization.params); + intermediate_scale[4] = hidden_params->scale->data[0]; + intermediate_zp[4] = hidden_params->zero_point->data[0]; + if (hidden != nullptr) { + micro_context->DeallocateTempTfLiteTensor(hidden); + } + + // Scales. + const float default_scale = 1.0; + float input_scale = default_scale; + float input_to_input_weight_scale = default_scale; + float recurrent_to_input_weight_scale = default_scale; + float input_to_forget_weight_scale = default_scale; + float recurrent_to_forget_weight_scale = default_scale; + float input_to_cell_weight_scale = default_scale; + float recurrent_to_cell_weight_scale = default_scale; + float input_to_output_weight_scale = default_scale; + float recurrent_to_output_weight_scale = default_scale; + float output_state_scale = default_scale; + int cell_scale = 1; + + // Effective scales. + float effective_input_to_input_scale = default_scale; + float effective_recurrent_to_input_scale = default_scale; + float effective_cell_to_input_scale = default_scale; + float effective_input_to_forget_scale = default_scale; + float effective_recurrent_to_forget_scale = default_scale; + float effective_cell_to_forget_scale = default_scale; + float effective_input_to_cell_scale = default_scale; + float effective_recurrent_to_cell_scale = default_scale; + float effective_input_to_output_scale = default_scale; + float effective_recurrent_to_output_scale = default_scale; + float effective_cell_to_output_scale = default_scale; + float effective_hidden_scale = default_scale; + + // Populate scales. + input_to_input_weight_scale = input_to_input_weights->params.scale; + recurrent_to_input_weight_scale = recurrent_to_input_weights->params.scale; + + output_state_scale = output_state->params.scale; + + input_to_forget_weight_scale = input_to_forget_weights->params.scale; + input_to_cell_weight_scale = input_to_cell_weights->params.scale; + input_to_output_weight_scale = input_to_output_weights->params.scale; + recurrent_to_forget_weight_scale = recurrent_to_forget_weights->params.scale; + recurrent_to_cell_weight_scale = recurrent_to_cell_weights->params.scale; + recurrent_to_output_weight_scale = recurrent_to_output_weights->params.scale; + + // Check cell state (already used above) + TF_LITE_ENSURE(context, CheckedLog2(cell_state->params.scale, &cell_scale)); + TF_LITE_ENSURE(context, cell_scale <= -9); + + op_data->params_cmsis_nn.cell_state_shift = cell_scale; + input_scale = input->params.scale; + + // Calculate effective scales. + effective_input_to_input_scale = + input_to_input_weight_scale * input_scale / intermediate_scale[0]; + effective_recurrent_to_input_scale = recurrent_to_input_weight_scale * + output_state_scale / + intermediate_scale[0]; + + effective_input_to_forget_scale = + input_to_forget_weight_scale * input_scale / intermediate_scale[1]; + effective_recurrent_to_forget_scale = recurrent_to_forget_weight_scale * + output_state_scale / + intermediate_scale[1]; + + effective_input_to_cell_scale = + input_to_cell_weight_scale * input_scale / intermediate_scale[2]; + effective_recurrent_to_cell_scale = recurrent_to_cell_weight_scale * + output_state_scale / + intermediate_scale[2]; + + effective_input_to_output_scale = + input_to_output_weight_scale * input_scale / intermediate_scale[3]; + effective_recurrent_to_output_scale = recurrent_to_output_weight_scale * + output_state_scale / + intermediate_scale[3]; + + effective_hidden_scale = + std::pow(2.0f, -15.0f) / intermediate_scale[4] * std::pow(2.0f, -15.0f); + + // Decompose scales. + int shift_output; + QuantizeMultiplier( + static_cast(effective_input_to_input_scale), + &op_data->params_cmsis_nn.input_to_input_scaling.multiplier, + &shift_output); + op_data->params_cmsis_nn.input_to_input_scaling.shift = + static_cast(shift_output); + + QuantizeMultiplier( + static_cast(effective_recurrent_to_input_scale), + &op_data->params_cmsis_nn.recurrent_to_input_scaling.multiplier, + &shift_output); + op_data->params_cmsis_nn.recurrent_to_input_scaling.shift = + static_cast(shift_output); + QuantizeMultiplier(static_cast(effective_cell_to_input_scale), + &op_data->params_cmsis_nn.cell_to_input_scaling.multiplier, + &shift_output); + op_data->params_cmsis_nn.cell_to_input_scaling.shift = + static_cast(shift_output); + QuantizeMultiplier( + static_cast(effective_input_to_forget_scale), + &op_data->params_cmsis_nn.input_to_forget_scaling.multiplier, + &shift_output); + op_data->params_cmsis_nn.input_to_forget_scaling.shift = + static_cast(shift_output); + QuantizeMultiplier( + static_cast(effective_recurrent_to_forget_scale), + &op_data->params_cmsis_nn.recurrent_to_forget_scaling.multiplier, + &shift_output); + op_data->params_cmsis_nn.recurrent_to_forget_scaling.shift = + static_cast(shift_output); + QuantizeMultiplier( + static_cast(effective_cell_to_forget_scale), + &op_data->params_cmsis_nn.cell_to_forget_scaling.multiplier, + &shift_output); + // ok + op_data->params_cmsis_nn.cell_to_forget_scaling.shift = + static_cast(shift_output); + QuantizeMultiplier(static_cast(effective_input_to_cell_scale), + &op_data->params_cmsis_nn.input_to_cell_scaling.multiplier, + &shift_output); + op_data->params_cmsis_nn.input_to_cell_scaling.shift = + static_cast(shift_output); + QuantizeMultiplier( + static_cast(effective_recurrent_to_cell_scale), + &op_data->params_cmsis_nn.recurrent_to_cell_scaling.multiplier, + &shift_output); + op_data->params_cmsis_nn.recurrent_to_cell_scaling.shift = + static_cast(shift_output); + QuantizeMultiplier( + static_cast(effective_input_to_output_scale), + &op_data->params_cmsis_nn.input_to_output_scaling.multiplier, + &shift_output); + op_data->params_cmsis_nn.input_to_output_scaling.shift = + static_cast(shift_output); + QuantizeMultiplier( + static_cast(effective_recurrent_to_output_scale), + &op_data->params_cmsis_nn.recurrent_to_output_scaling.multiplier, + &shift_output); + op_data->params_cmsis_nn.recurrent_to_output_scaling.shift = + static_cast(shift_output); + QuantizeMultiplier( + static_cast(effective_cell_to_output_scale), + &op_data->params_cmsis_nn.cell_to_output_scaling.multiplier, + &shift_output); + op_data->params_cmsis_nn.cell_to_output_scaling.shift = + static_cast(shift_output); + + op_data->params_cmsis_nn.projection_scaling.shift = + static_cast(shift_output); + + QuantizeMultiplier(static_cast(effective_hidden_scale), + &op_data->params_cmsis_nn.hidden_scaling.multiplier, + &shift_output); + op_data->params_cmsis_nn.hidden_scaling.shift = + static_cast(shift_output); + + op_data->params_cmsis_nn.hidden_offset = intermediate_zp[4]; + + op_data->params_cmsis_nn.activation.min = std::numeric_limits::min(); + op_data->params_cmsis_nn.activation.max = std::numeric_limits::max(); + + return kTfLiteOk; +} + +template +TfLiteStatus CMSIS_NN_EvalInteger8x8_16Lstm( + const OpData& op_data, const LSTMKernelContents& kernel_content, + const LSTMBuffers& buffers) { + const OpDataLSTM& op_data_lstm = op_data.params_ref; + const TfLiteEvalTensor* input = + kernel_content.GetInternalTensor(tflite::kLstmInputTensor); + const TfLiteEvalTensor* input_gate_bias = + kernel_content.GetInternalTensor(tflite::kLstmInputGateBiasTensor); + const TfLiteEvalTensor* forget_gate_bias = + kernel_content.GetInternalTensor(tflite::kLstmForgetGateBiasTensor); + const TfLiteEvalTensor* cell_gate_bias = + kernel_content.GetInternalTensor(tflite::kLstmCellGateBiasTensor); + const TfLiteEvalTensor* output_gate_bias = + kernel_content.GetInternalTensor(tflite::kLstmOutputGateBiasTensor); + const TfLiteEvalTensor* input_to_output_weights = + kernel_content.GetInternalTensor(tflite::kLstmInputToOutputWeightsTensor); + const TfLiteEvalTensor* recurrent_to_output_weights = + kernel_content.GetInternalTensor( + tflite::kLstmRecurrentToOutputWeightsTensor); + const TfLiteEvalTensor* input_to_input_weights = + kernel_content.GetInternalTensor(tflite::kLstmInputToInputWeightsTensor); + const TfLiteEvalTensor* input_to_forget_weights = + kernel_content.GetInternalTensor(tflite::kLstmInputToForgetWeightsTensor); + const TfLiteEvalTensor* input_to_cell_weights = + kernel_content.GetInternalTensor(tflite::kLstmInputToCellWeightsTensor); + const TfLiteEvalTensor* recurrent_to_input_weights = + kernel_content.GetInternalTensor( + tflite::kLstmRecurrentToInputWeightsTensor); + const TfLiteEvalTensor* recurrent_to_forget_weights = + kernel_content.GetInternalTensor( + tflite::kLstmRecurrentToForgetWeightsTensor); + const TfLiteEvalTensor* recurrent_to_cell_weights = + kernel_content.GetInternalTensor( + tflite::kLstmRecurrentToCellWeightsTensor); + const TfLiteEvalTensor* cell_to_input_weights = + kernel_content.GetInternalTensor(tflite::kLstmCellToInputWeightsTensor); + const TfLiteEvalTensor* cell_to_forget_weights = + kernel_content.GetInternalTensor(tflite::kLstmCellToForgetWeightsTensor); + const TfLiteEvalTensor* cell_to_output_weights = + kernel_content.GetInternalTensor(tflite::kLstmCellToOutputWeightsTensor); + const TfLiteEvalTensor* cell_state = + kernel_content.GetInternalTensor(tflite::kLstmCellStateTensor); + const TfLiteEvalTensor* output_state = + kernel_content.GetInternalTensor(tflite::kLstmOutputStateTensor); + const TfLiteEvalTensor* output = kernel_content.output_tensor; + + TFLITE_DCHECK(input->dims->size >= 2 && input->dims->size <= 3); + + cmsis_nn_lstm_context scratch_buffers; + scratch_buffers.input_gate = reinterpret_cast(buffers.buffer0); + scratch_buffers.forget_gate = reinterpret_cast(buffers.buffer1); + scratch_buffers.cell_gate = reinterpret_cast(buffers.buffer2); + scratch_buffers.output_gate = reinterpret_cast(buffers.buffer3); + + cmsis_nn_lstm_params cmsis_lstm_params = op_data.params_cmsis_nn; + cmsis_lstm_params.time_major = op_data_lstm.size_info.time_major; + cmsis_lstm_params.clip.cell = + op_data_lstm.cell_state_info.quantized_cell_clip; + + cmsis_lstm_params.input_gate_bias = const_cast( + tflite::micro::GetOptionalTensorData(input_gate_bias)); + cmsis_lstm_params.forget_gate_bias = const_cast( + tflite::micro::GetOptionalTensorData(forget_gate_bias)); + cmsis_lstm_params.cell_gate_bias = const_cast( + tflite::micro::GetOptionalTensorData(cell_gate_bias)); + cmsis_lstm_params.output_gate_bias = const_cast( + tflite::micro::GetOptionalTensorData(output_gate_bias)); + + const bool time_major = op_data_lstm.size_info.time_major; + const int n_input = input->dims->data[input->dims->size - 1]; + const int n_output = recurrent_to_output_weights->dims->data[1]; + + int max_time, n_batch; + if (input->dims->size == 2) { + max_time = 1; + n_batch = input->dims->data[0]; + } else { + max_time = (time_major) ? input->dims->data[0] : input->dims->data[1]; + n_batch = (time_major) ? input->dims->data[1] : input->dims->data[0]; + } + + cmsis_nn_lstm_dims lstm_dims; + lstm_dims.num_inputs = n_input; + lstm_dims.num_outputs = n_output; + lstm_dims.num_batches = n_batch; + lstm_dims.max_time = max_time; + + arm_lstm_unidirectional_s16_s8( + &scratch_buffers, + const_cast(tflite::micro::GetTensorData(input)), + &lstm_dims, + const_cast( + tflite::micro::GetOptionalTensorData(input_to_input_weights)), + const_cast(tflite::micro::GetOptionalTensorData( + input_to_forget_weights)), + const_cast( + tflite::micro::GetOptionalTensorData(input_to_cell_weights)), + const_cast(tflite::micro::GetOptionalTensorData( + input_to_output_weights)), + const_cast(tflite::micro::GetOptionalTensorData( + recurrent_to_input_weights)), + const_cast(tflite::micro::GetOptionalTensorData( + recurrent_to_forget_weights)), + const_cast(tflite::micro::GetOptionalTensorData( + recurrent_to_cell_weights)), + const_cast(tflite::micro::GetOptionalTensorData( + recurrent_to_output_weights)), + const_cast( + tflite::micro::GetOptionalTensorData(cell_to_input_weights)), + const_cast(tflite::micro::GetOptionalTensorData( + cell_to_forget_weights)), + const_cast(tflite::micro::GetOptionalTensorData( + cell_to_output_weights)), + nullptr, &cmsis_lstm_params, + const_cast(tflite::micro::GetTensorData(output_state)), + const_cast(tflite::micro::GetTensorData(cell_state)), + const_cast(tflite::micro::GetTensorData(output))); + + return kTfLiteOk; +} + +/*Kernel functions*/ + +void* UnidirectionalSequenceLstmInit(TfLiteContext* context, const char* buffer, + size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(OpData)); +} + +TfLiteStatus UnidirectionalSequenceLstmPrepare(TfLiteContext* context, + TfLiteNode* node) { + TF_LITE_ENSURE_EQ(context, node->outputs->size, 1); + TF_LITE_ENSURE_EQ(context, node->inputs->size, 24); + + TFLITE_DCHECK(node->builtin_data != nullptr); + TFLITE_DCHECK(node->user_data != nullptr); + + OpData* op_data = reinterpret_cast(node->user_data); + OpDataLSTM* op_data_lstm = &op_data->params_ref; + + const auto* builtin_data = + static_cast(node->builtin_data); + // All TempTfLiteTensors will be deallocated through the destructor. + LstmTensors lstm_tensors(context, node); + TF_LITE_ENSURE_OK(context, lstm_tensors.ValidateTensorStatus(context)); + + op_data_lstm->cell_gate_nonlinear_type = builtin_data->activation; + op_data_lstm->size_info = + CreateLstmSizeInfo(builtin_data->time_major, + lstm_tensors.GetInternalTensor(kLstmInputTensor)->dims, + lstm_tensors.HiddenStateTensor()->dims); + + const TfLiteTensor* input = lstm_tensors.GetInternalTensor(kLstmInputTensor); + const auto activation_type = input->type; + + if (kTfLiteInt8 == activation_type) { + TF_LITE_ENSURE_STATUS( + CalculateOpData(context, node, lstm_tensors, op_data)); + } + + TF_LITE_ENSURE_OK(context, ValidateTensorSize(context, lstm_tensors, + op_data_lstm->size_info)); + + // Create cell state information and gate parameters (Fully Connected and Mul) + auto cell_state_type = + lstm_tensors.GetInternalTensor(kLstmCellStateTensor)->type; + if (cell_state_type == kTfLiteFloat32) { + op_data_lstm->cell_state_info = + CreateLstmCellStateInfoFloat(builtin_data->cell_clip); + TF_LITE_ENSURE_OK(context, PrepareGateParametersFloat(context, lstm_tensors, + op_data_lstm)); + } else if (cell_state_type == kTfLiteInt16) { + op_data_lstm->cell_state_info = CreateLstmCellStateInfo( + lstm_tensors.CellStateTensor()->params.scale, builtin_data->cell_clip); + TF_LITE_ENSURE_OK(context, PrepareGateParametersInteger( + context, lstm_tensors, op_data_lstm)); + } else { + MicroPrintf( + "Cell state type %s (%d) not supported. The quantized Unidirectional " + "Sequence LSTM Op only support int16 cell state", + TfLiteTypeGetName(cell_state_type), cell_state_type); + return kTfLiteError; + } + // request buffers (four buffers) + for (size_t i = 0; i < 4; i++) { + TF_LITE_ENSURE_OK(context, context->RequestScratchBufferInArena( + context, + op_data_lstm->size_info.batch_size * + op_data_lstm->size_info.state_dimension * + TfLiteTypeGetSize(cell_state_type), + &(op_data_lstm->buffer_indices[i]))); + } + + return kTfLiteOk; +} + +TfLiteStatus UnidirectionalSequenceLstmEval(TfLiteContext* context, + TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + const OpData& op_data = *reinterpret_cast(node->user_data); + const OpDataLSTM& op_data_lstm = op_data.params_ref; + + auto kernel_content = CreateLSTMKernelContent(context, node); + + const auto activation_type = + kernel_content.internal_tensors[kLstmInputTensor]->type; + const auto weight_type = + kernel_content.internal_tensors[kLstmInputToInputWeightsTensor]->type; + + switch (activation_type) { + case kTfLiteFloat32: { + LSTMBuffers buffers = + CreateLSTMBuffers(context, op_data_lstm.buffer_indices); + EvalLstm(op_data_lstm, kernel_content, + buffers); + break; + } + case kTfLiteInt8: { + switch (weight_type) { + case kTfLiteInt8: { + // 8(activation)x8(weight)->16(cell) LSTM with 32 bits bias + LSTMBuffers buffers = + CreateLSTMBuffers(context, op_data_lstm.buffer_indices); + return CMSIS_NN_EvalInteger8x8_16Lstm( + op_data, kernel_content, buffers); + break; + } + default: { + MicroPrintf("Filter type %s (%d) not supported.", + TfLiteTypeGetName(weight_type), activation_type); + return kTfLiteError; + } + } + break; + } + case kTfLiteInt16: { + switch (weight_type) { + case kTfLiteInt8: { + // 16(activation)x8(weight)->16(cell) LSTM with 64 bits bias + LSTMBuffers buffers = + CreateLSTMBuffers(context, op_data_lstm.buffer_indices); + EvalLstm(op_data_lstm, + kernel_content, buffers); + break; + } + default: { + MicroPrintf("Filter type %s (%d) not supported.", + TfLiteTypeGetName(weight_type), weight_type); + return kTfLiteError; + } + } + break; + } + default: { + MicroPrintf("Input type %s (%d) not supported.", + TfLiteTypeGetName(activation_type), activation_type); + return kTfLiteError; + } + } + return kTfLiteOk; +} + +TfLiteStatus UnidirectionalSequenceLstmEvalInt8(TfLiteContext* context, + TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + const OpData& op_data = *reinterpret_cast(node->user_data); + const OpDataLSTM& op_data_lstm = op_data.params_ref; + auto kernel_content = CreateLSTMKernelContent(context, node); + const auto activation_type = + kernel_content.internal_tensors[kLstmInputTensor]->type; + const auto weight_type = + kernel_content.internal_tensors[kLstmInputToInputWeightsTensor]->type; + + TFLITE_DCHECK(weight_type == kTfLiteInt16 && + "Only int16 filter type supported."); + + if (activation_type == kTfLiteInt8) { + LSTMBuffers buffers = + CreateLSTMBuffers(context, op_data_lstm.buffer_indices); + + return CMSIS_NN_EvalInteger8x8_16Lstm(op_data, kernel_content, + buffers); + } else { + MicroPrintf("Input type %s (%d) not supported.", + TfLiteTypeGetName(activation_type), activation_type); + return kTfLiteError; + } + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_UNIDIRECTIONAL_SEQUENCE_LSTM() { + return tflite::micro::RegisterOp(UnidirectionalSequenceLstmInit, + UnidirectionalSequenceLstmPrepare, + UnidirectionalSequenceLstmEval); +} + +TFLMRegistration Register_UNIDIRECTIONAL_SEQUENCE_LSTM_INT8() { + return tflite::micro::RegisterOp(UnidirectionalSequenceLstmInit, + UnidirectionalSequenceLstmPrepare, + UnidirectionalSequenceLstmEvalInt8); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/comparisons.cc b/tensorflow/lite/micro/kernels/comparisons.cc new file mode 100644 index 0000000..4056316 --- /dev/null +++ b/tensorflow/lite/micro/kernels/comparisons.cc @@ -0,0 +1,606 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/kernels/internal/reference/comparisons.h" + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { + +namespace { + +struct OpData { + ComparisonParams params; +}; + +constexpr int kInputTensor1 = 0; +constexpr int kInputTensor2 = 1; +constexpr int kOutputTensor = 0; + +TfLiteStatus EqualEval(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + const OpData* data = static_cast(node->user_data); + + const TfLiteEvalTensor* input1 = + tflite::micro::GetEvalInput(context, node, kInputTensor1); + const TfLiteEvalTensor* input2 = + tflite::micro::GetEvalInput(context, node, kInputTensor2); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + + RuntimeShape input1_shape = tflite::micro::GetTensorShape(input1); + RuntimeShape input2_shape = tflite::micro::GetTensorShape(input2); + RuntimeShape output_shape = tflite::micro::GetTensorShape(output); + bool* output_data = tflite::micro::GetTensorData(output); + + bool requires_broadcast = !tflite::micro::HaveSameShapes(input1, input2); + switch (input1->type) { + case kTfLiteBool: + requires_broadcast + ? reference_ops::Broadcast4DSlowEqualNoScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data) + : reference_ops::EqualNoScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data); + break; + case kTfLiteFloat32: + requires_broadcast + ? reference_ops::Broadcast4DSlowEqualNoScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data) + : reference_ops::EqualNoScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data); + break; + case kTfLiteInt32: + requires_broadcast + ? reference_ops::Broadcast4DSlowEqualNoScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data) + : reference_ops::EqualNoScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data); + break; + case kTfLiteInt64: + requires_broadcast + ? reference_ops::Broadcast4DSlowEqualNoScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data) + : reference_ops::EqualNoScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data); + break; + case kTfLiteInt8: + requires_broadcast + ? reference_ops::Broadcast4DSlowEqualWithScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data) + : reference_ops::EqualWithScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data); + break; + default: + MicroPrintf("Type %s (%d) not supported.", + TfLiteTypeGetName(input1->type), input1->type); + return kTfLiteError; + } + return kTfLiteOk; +} + +// TODO(renjieliu): Refactor the logic to avoid duplications. +TfLiteStatus NotEqualEval(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + const OpData* data = static_cast(node->user_data); + + const TfLiteEvalTensor* input1 = + tflite::micro::GetEvalInput(context, node, kInputTensor1); + const TfLiteEvalTensor* input2 = + tflite::micro::GetEvalInput(context, node, kInputTensor2); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + + RuntimeShape input1_shape = tflite::micro::GetTensorShape(input1); + RuntimeShape input2_shape = tflite::micro::GetTensorShape(input2); + RuntimeShape output_shape = tflite::micro::GetTensorShape(output); + bool* output_data = tflite::micro::GetTensorData(output); + + bool requires_broadcast = !tflite::micro::HaveSameShapes(input1, input2); + switch (input1->type) { + case kTfLiteBool: + requires_broadcast + ? reference_ops::Broadcast4DSlowNotEqualNoScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data) + : reference_ops::NotEqualNoScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data); + break; + case kTfLiteFloat32: + requires_broadcast + ? reference_ops::Broadcast4DSlowNotEqualNoScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data) + : reference_ops::NotEqualNoScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data); + break; + case kTfLiteInt32: + requires_broadcast + ? reference_ops::Broadcast4DSlowNotEqualNoScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data) + : reference_ops::NotEqualNoScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data); + break; + case kTfLiteInt64: + requires_broadcast + ? reference_ops::Broadcast4DSlowNotEqualNoScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data) + : reference_ops::NotEqualNoScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data); + break; + case kTfLiteInt8: + requires_broadcast + ? reference_ops::Broadcast4DSlowNotEqualWithScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data) + : reference_ops::NotEqualWithScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data); + break; + default: + MicroPrintf("Type %s (%d) not supported.", + TfLiteTypeGetName(input1->type), input1->type); + return kTfLiteError; + } + return kTfLiteOk; +} + +TfLiteStatus GreaterEval(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + const OpData* data = static_cast(node->user_data); + + const TfLiteEvalTensor* input1 = + tflite::micro::GetEvalInput(context, node, kInputTensor1); + const TfLiteEvalTensor* input2 = + tflite::micro::GetEvalInput(context, node, kInputTensor2); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + + RuntimeShape input1_shape = tflite::micro::GetTensorShape(input1); + RuntimeShape input2_shape = tflite::micro::GetTensorShape(input2); + RuntimeShape output_shape = tflite::micro::GetTensorShape(output); + bool* output_data = tflite::micro::GetTensorData(output); + + bool requires_broadcast = !tflite::micro::HaveSameShapes(input1, input2); + switch (input1->type) { + case kTfLiteFloat32: + requires_broadcast + ? reference_ops::Broadcast4DSlowGreaterNoScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data) + : reference_ops::GreaterNoScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data); + break; + case kTfLiteInt32: + requires_broadcast + ? reference_ops::Broadcast4DSlowGreaterNoScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data) + : reference_ops::GreaterNoScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data); + break; + case kTfLiteInt64: + requires_broadcast + ? reference_ops::Broadcast4DSlowGreaterNoScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data) + : reference_ops::GreaterNoScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data); + break; + case kTfLiteInt8: + requires_broadcast + ? reference_ops::Broadcast4DSlowGreaterWithScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data) + : reference_ops::GreaterWithScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data); + break; + default: + MicroPrintf("Type %s (%d) not supported.", + TfLiteTypeGetName(input1->type), input1->type); + return kTfLiteError; + } + return kTfLiteOk; +} + +TfLiteStatus GreaterEqualEval(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + const OpData* data = static_cast(node->user_data); + + const TfLiteEvalTensor* input1 = + tflite::micro::GetEvalInput(context, node, kInputTensor1); + const TfLiteEvalTensor* input2 = + tflite::micro::GetEvalInput(context, node, kInputTensor2); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + + RuntimeShape input1_shape = tflite::micro::GetTensorShape(input1); + RuntimeShape input2_shape = tflite::micro::GetTensorShape(input2); + RuntimeShape output_shape = tflite::micro::GetTensorShape(output); + bool* output_data = tflite::micro::GetTensorData(output); + + bool requires_broadcast = !tflite::micro::HaveSameShapes(input1, input2); + switch (input1->type) { + case kTfLiteFloat32: + requires_broadcast + ? reference_ops::Broadcast4DSlowGreaterEqualNoScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data) + : reference_ops::GreaterEqualNoScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data); + break; + case kTfLiteInt32: + requires_broadcast + ? reference_ops::Broadcast4DSlowGreaterEqualNoScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data) + : reference_ops::GreaterEqualNoScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data); + break; + case kTfLiteInt64: + requires_broadcast + ? reference_ops::Broadcast4DSlowGreaterEqualNoScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data) + : reference_ops::GreaterEqualNoScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data); + break; + case kTfLiteInt8: + requires_broadcast + ? reference_ops::Broadcast4DSlowGreaterEqualWithScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data) + : reference_ops::GreaterEqualWithScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data); + break; + default: + MicroPrintf("Type %s (%d) not supported.", + TfLiteTypeGetName(input1->type), input1->type); + return kTfLiteError; + } + return kTfLiteOk; +} + +TfLiteStatus LessEval(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + const OpData* data = static_cast(node->user_data); + + const TfLiteEvalTensor* input1 = + tflite::micro::GetEvalInput(context, node, kInputTensor1); + const TfLiteEvalTensor* input2 = + tflite::micro::GetEvalInput(context, node, kInputTensor2); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + + RuntimeShape input1_shape = tflite::micro::GetTensorShape(input1); + RuntimeShape input2_shape = tflite::micro::GetTensorShape(input2); + RuntimeShape output_shape = tflite::micro::GetTensorShape(output); + bool* output_data = tflite::micro::GetTensorData(output); + + bool requires_broadcast = !tflite::micro::HaveSameShapes(input1, input2); + switch (input1->type) { + case kTfLiteFloat32: + requires_broadcast + ? reference_ops::Broadcast4DSlowLessNoScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data) + : reference_ops::LessNoScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data); + break; + case kTfLiteInt32: + requires_broadcast + ? reference_ops::Broadcast4DSlowLessNoScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data) + : reference_ops::LessNoScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data); + break; + case kTfLiteInt64: + requires_broadcast + ? reference_ops::Broadcast4DSlowLessNoScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data) + : reference_ops::LessNoScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data); + break; + case kTfLiteInt8: + requires_broadcast + ? reference_ops::Broadcast4DSlowLessWithScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data) + : reference_ops::LessWithScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data); + break; + default: + MicroPrintf("Type %s (%d) not supported.", + TfLiteTypeGetName(input1->type), input1->type); + return kTfLiteError; + } + return kTfLiteOk; +} + +TfLiteStatus LessEqualEval(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + const OpData* data = static_cast(node->user_data); + + const TfLiteEvalTensor* input1 = + tflite::micro::GetEvalInput(context, node, kInputTensor1); + const TfLiteEvalTensor* input2 = + tflite::micro::GetEvalInput(context, node, kInputTensor2); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + + RuntimeShape input1_shape = tflite::micro::GetTensorShape(input1); + RuntimeShape input2_shape = tflite::micro::GetTensorShape(input2); + RuntimeShape output_shape = tflite::micro::GetTensorShape(output); + bool* output_data = tflite::micro::GetTensorData(output); + + bool requires_broadcast = !tflite::micro::HaveSameShapes(input1, input2); + switch (input1->type) { + case kTfLiteFloat32: + requires_broadcast + ? reference_ops::Broadcast4DSlowLessEqualNoScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data) + : reference_ops::LessEqualNoScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data); + break; + case kTfLiteInt32: + requires_broadcast + ? reference_ops::Broadcast4DSlowLessEqualNoScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data) + : reference_ops::LessEqualNoScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data); + break; + case kTfLiteInt64: + requires_broadcast + ? reference_ops::Broadcast4DSlowLessEqualNoScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data) + : reference_ops::LessEqualNoScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data); + break; + case kTfLiteInt8: + requires_broadcast + ? reference_ops::Broadcast4DSlowLessEqualWithScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data) + : reference_ops::LessEqualWithScaling( + data->params, input1_shape, + tflite::micro::GetTensorData(input1), input2_shape, + tflite::micro::GetTensorData(input2), output_shape, + output_data); + break; + default: + MicroPrintf("Type %s (%d) not supported.", + TfLiteTypeGetName(input1->type), input1->type); + return kTfLiteError; + } + return kTfLiteOk; +} + +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(OpData)); +} + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + OpData* data = static_cast(node->user_data); + + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* input1 = + micro_context->AllocateTempInputTensor(node, kInputTensor1); + TF_LITE_ENSURE(context, input1 != nullptr); + TfLiteTensor* input2 = + micro_context->AllocateTempInputTensor(node, kInputTensor2); + TF_LITE_ENSURE(context, input2 != nullptr); + + if (input1->type == kTfLiteInt8) { + auto input1_offset = -input1->params.zero_point; + auto input2_offset = -input2->params.zero_point; + const int kLeftShift = 8; + + int32_t input1_multiplier; + int input1_shift; + QuantizeMultiplierSmallerThanOneExp( + static_cast(input1->params.scale), &input1_multiplier, + &input1_shift); + int32_t input2_multiplier; + int input2_shift; + QuantizeMultiplierSmallerThanOneExp( + static_cast(input2->params.scale), &input2_multiplier, + &input2_shift); + + data->params.left_shift = kLeftShift; + data->params.input1_offset = input1_offset; + data->params.input1_multiplier = input1_multiplier; + data->params.input1_shift = input1_shift; + data->params.input2_offset = input2_offset; + data->params.input2_multiplier = input2_multiplier; + data->params.input2_shift = input2_shift; + } + + micro_context->DeallocateTempTfLiteTensor(input1); + micro_context->DeallocateTempTfLiteTensor(input2); + + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_EQUAL() { + return tflite::micro::RegisterOp(Init, Prepare, EqualEval); +} + +TFLMRegistration Register_NOT_EQUAL() { + return tflite::micro::RegisterOp(Init, Prepare, NotEqualEval); +} + +TFLMRegistration Register_GREATER() { + return tflite::micro::RegisterOp(Init, Prepare, GreaterEval); +} + +TFLMRegistration Register_GREATER_EQUAL() { + return tflite::micro::RegisterOp(Init, Prepare, GreaterEqualEval); +} + +TFLMRegistration Register_LESS() { + return tflite::micro::RegisterOp(Init, Prepare, LessEval); +} + +TFLMRegistration Register_LESS_EQUAL() { + return tflite::micro::RegisterOp(Init, Prepare, LessEqualEval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/comparisons_test.cc b/tensorflow/lite/micro/kernels/comparisons_test.cc new file mode 100644 index 0000000..eec57d6 --- /dev/null +++ b/tensorflow/lite/micro/kernels/comparisons_test.cc @@ -0,0 +1,746 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +constexpr int inputs_size = 2; +constexpr int outputs_size = 1; +constexpr int tensors_size = inputs_size + outputs_size; + +void TestComparison(const TFLMRegistration& registration, TfLiteTensor* tensors, + bool* expected_output_data, bool* output_data) { + const int output_dims_count = ElementCount(*tensors[inputs_size].dims); + + int inputs_array_data[] = {2, 0, 1}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 2}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, /*builtin_data=*/nullptr); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + for (int i = 0; i < output_dims_count; ++i) { + TF_LITE_MICRO_EXPECT_EQ(expected_output_data[i], output_data[i]); + } +} + +void TestComparisonFloat(const TFLMRegistration& registration, + int* input1_dims_data, float* input1_data, + int* input2_dims_data, float* input2_data, + bool* expected_output_data, int* output_dims_data, + bool* output_data) { + TfLiteIntArray* input1_dims = IntArrayFromInts(input1_dims_data); + TfLiteIntArray* input2_dims = IntArrayFromInts(input2_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + + TfLiteTensor tensors[tensors_size] = { + CreateTensor(input1_data, input1_dims), + CreateTensor(input2_data, input2_dims), + CreateTensor(output_data, output_dims), + }; + + TestComparison(registration, tensors, expected_output_data, output_data); +} + +void TestComparisonBool(const TFLMRegistration& registration, + int* input1_dims_data, bool* input1_data, + int* input2_dims_data, bool* input2_data, + bool* expected_output_data, int* output_dims_data, + bool* output_data) { + TfLiteIntArray* input1_dims = IntArrayFromInts(input1_dims_data); + TfLiteIntArray* input2_dims = IntArrayFromInts(input2_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + + TfLiteTensor tensors[tensors_size] = { + CreateTensor(input1_data, input1_dims), + CreateTensor(input2_data, input2_dims), + CreateTensor(output_data, output_dims), + }; + + TestComparison(registration, tensors, expected_output_data, output_data); +} + +void TestComparisonInt(const TFLMRegistration& registration, + int* input1_dims_data, int32_t* input1_data, + int* input2_dims_data, int32_t* input2_data, + bool* expected_output_data, int* output_dims_data, + bool* output_data) { + TfLiteIntArray* input1_dims = IntArrayFromInts(input1_dims_data); + TfLiteIntArray* input2_dims = IntArrayFromInts(input2_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + + TfLiteTensor tensors[tensors_size] = { + CreateTensor(input1_data, input1_dims), + CreateTensor(input2_data, input2_dims), + CreateTensor(output_data, output_dims), + }; + + TestComparison(registration, tensors, expected_output_data, output_data); +} + +void TestComparisonQuantizedInt8(const TFLMRegistration& registration, + int* input1_dims_data, float* input1_data, + int8_t* input1_quantized, float input1_scale, + int input1_zero_point, int* input2_dims_data, + float* input2_data, int8_t* input2_quantized, + float input2_scale, int input2_zero_point, + bool* expected_output_data, + int* output_dims_data, bool* output_data) { + TfLiteIntArray* input1_dims = IntArrayFromInts(input1_dims_data); + TfLiteIntArray* input2_dims = IntArrayFromInts(input2_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + + TfLiteTensor tensors[tensors_size] = { + CreateQuantizedTensor(input1_data, input1_quantized, input1_dims, + input1_scale, input1_zero_point), + CreateQuantizedTensor(input2_data, input2_quantized, input2_dims, + input2_scale, input2_zero_point), + CreateTensor(output_data, output_dims), + }; + + TestComparison(registration, tensors, expected_output_data, output_data); +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(EqualBool) { + int input1_dim[] = {4, 1, 1, 1, 4}; + int input2_dim[] = {4, 1, 1, 1, 4}; + + bool input1_data[] = {true, false, true, false}; + bool input2_data[] = {true, true, false, false}; + + bool expected_data[] = {true, false, false, true}; + int expected_dim[] = {4, 1, 1, 1, 4}; + + bool output_data[4]; + tflite::testing::TestComparisonBool(tflite::Register_EQUAL(), input1_dim, + input1_data, input2_dim, input2_data, + expected_data, expected_dim, output_data); +} + +TF_LITE_MICRO_TEST(EqualFloat) { + int input1_dim[] = {4, 1, 1, 1, 4}; + int input2_dim[] = {4, 1, 1, 1, 4}; + + float input1_data[] = {0.1, 0.9, 0.7, 0.3}; + float input2_data[] = {0.1, 0.2, 0.6, 0.5}; + + bool expected_data[] = {true, false, false, false}; + int expected_dim[] = {4, 1, 1, 1, 4}; + + bool output_data[4]; + tflite::testing::TestComparisonFloat( + tflite::Register_EQUAL(), input1_dim, input1_data, input2_dim, + input2_data, expected_data, expected_dim, output_data); +} + +TF_LITE_MICRO_TEST(EqualInt) { + int input1_dim[] = {4, 1, 1, 1, 4}; + int input2_dim[] = {4, 1, 1, 1, 4}; + + int32_t input1_data[] = {-1, 9, 7, 3}; + int32_t input2_data[] = {1, 2, 7, 5}; + + bool expected_data[] = {false, false, true, false}; + int expected_dim[] = {4, 1, 1, 1, 4}; + bool output_data[4]; + tflite::testing::TestComparisonInt(tflite::Register_EQUAL(), input1_dim, + input1_data, input2_dim, input2_data, + expected_data, expected_dim, output_data); +} + +TF_LITE_MICRO_TEST(EqualBroadcast) { + int input1_dim[] = {4, 1, 1, 1, 4}; + int input2_dim[] = {4, 1, 1, 1, 1}; + + int32_t input1_data[] = {-1, 9, 7, 3}; + int32_t input2_data[] = {7}; + + bool expected_data[] = {false, false, true, false}; + int expected_dim[] = {4, 1, 1, 1, 4}; + + bool output_data[4]; + tflite::testing::TestComparisonInt(tflite::Register_EQUAL(), input1_dim, + input1_data, input2_dim, input2_data, + expected_data, expected_dim, output_data); +} + +TF_LITE_MICRO_TEST(EqualBroadcastTwoD) { + int input1_dim[] = {4, 1, 1, 2, 4}; + int input2_dim[] = {4, 1, 1, 1, 4}; + + int32_t input1_data[] = {-1, 9, 7, 3, 2, 4, 2, 8}; + int32_t input2_data[] = {7, 1, 2, 4}; + + bool expected_data[] = {false, false, false, false, + false, false, true, false}; + int expected_dim[] = {4, 1, 1, 2, 4}; + + bool output_data[8]; + tflite::testing::TestComparisonInt(tflite::Register_EQUAL(), input1_dim, + input1_data, input2_dim, input2_data, + expected_data, expected_dim, output_data); +} + +TF_LITE_MICRO_TEST(NotEqualBool) { + int input1_dim[] = {4, 1, 1, 1, 4}; + int input2_dim[] = {4, 1, 1, 1, 4}; + + bool input1_data[] = {true, false, true, false}; + bool input2_data[] = {true, true, false, false}; + + bool expected_data[] = {false, true, true, false}; + int expected_dim[] = {4, 1, 1, 1, 4}; + + bool output_data[4]; + tflite::testing::TestComparisonBool(tflite::Register_NOT_EQUAL(), input1_dim, + input1_data, input2_dim, input2_data, + expected_data, expected_dim, output_data); +} + +TF_LITE_MICRO_TEST(NotEqualFloat) { + int input1_dim[] = {4, 1, 1, 1, 4}; + int input2_dim[] = {4, 1, 1, 1, 4}; + + float input1_data[] = {0.1, 0.9, 0.7, 0.3}; + float input2_data[] = {0.1, 0.2, 0.6, 0.5}; + + bool expected_data[] = {false, true, true, true}; + int expected_dim[] = {4, 1, 1, 1, 4}; + + bool output_data[4]; + tflite::testing::TestComparisonFloat( + tflite::Register_NOT_EQUAL(), input1_dim, input1_data, input2_dim, + input2_data, expected_data, expected_dim, output_data); +} + +TF_LITE_MICRO_TEST(NotEqualInt) { + int input1_dim[] = {4, 1, 1, 1, 4}; + int input2_dim[] = {4, 1, 1, 1, 4}; + + int32_t input1_data[] = {-1, 9, 7, 3}; + int32_t input2_data[] = {1, 2, 7, 5}; + + bool expected_data[] = {true, true, false, true}; + int expected_dim[] = {4, 1, 1, 1, 4}; + + bool output_data[4]; + tflite::testing::TestComparisonInt(tflite::Register_NOT_EQUAL(), input1_dim, + input1_data, input2_dim, input2_data, + expected_data, expected_dim, output_data); +} + +TF_LITE_MICRO_TEST(NotEqualBroadcast) { + int input1_dim[] = {4, 1, 1, 1, 4}; + int input2_dim[] = {4, 1, 1, 1, 1}; + + int32_t input1_data[] = {-1, 9, 7, 3}; + int32_t input2_data[] = {7}; + + bool expected_data[] = {true, true, false, true}; + int expected_dim[] = {4, 1, 1, 1, 4}; + + bool output_data[4]; + tflite::testing::TestComparisonInt(tflite::Register_NOT_EQUAL(), input1_dim, + input1_data, input2_dim, input2_data, + expected_data, expected_dim, output_data); +} + +TF_LITE_MICRO_TEST(NotEqualBroadcastTwoD) { + int input1_dim[] = {4, 1, 1, 2, 4}; + int input2_dim[] = {4, 1, 1, 1, 4}; + + int32_t input1_data[] = {-1, 9, 7, 3, 2, 4, 2, 8}; + int32_t input2_data[] = {7, 1, 2, 4}; + + bool expected_data[] = {true, true, true, true, true, true, false, true}; + int expected_dim[] = {4, 1, 1, 2, 4}; + + bool output_data[8]; + tflite::testing::TestComparisonInt(tflite::Register_NOT_EQUAL(), input1_dim, + input1_data, input2_dim, input2_data, + expected_data, expected_dim, output_data); +} + +TF_LITE_MICRO_TEST(GreaterFloat) { + int input1_dim[] = {4, 1, 1, 1, 4}; + int input2_dim[] = {4, 1, 1, 1, 4}; + + float input1_data[] = {0.1, 0.9, 0.7, 0.3}; + float input2_data[] = {0.1, 0.2, 0.6, 0.5}; + + bool expected_data[] = {false, true, true, false}; + int expected_dim[] = {4, 1, 1, 1, 4}; + + bool output_data[4]; + tflite::testing::TestComparisonFloat( + tflite::Register_GREATER(), input1_dim, input1_data, input2_dim, + input2_data, expected_data, expected_dim, output_data); +} + +TF_LITE_MICRO_TEST(GreaterInt) { + int input1_dim[] = {4, 1, 1, 1, 4}; + int input2_dim[] = {4, 1, 1, 1, 4}; + + int32_t input1_data[] = {-1, 9, 7, 3}; + int32_t input2_data[] = {1, 2, 7, 5}; + + bool expected_data[] = {false, true, false, false}; + int expected_dim[] = {4, 1, 1, 1, 4}; + + bool output_data[4]; + tflite::testing::TestComparisonInt(tflite::Register_GREATER(), input1_dim, + input1_data, input2_dim, input2_data, + expected_data, expected_dim, output_data); +} + +TF_LITE_MICRO_TEST(GreaterBroadcast) { + int input1_dim[] = {4, 1, 1, 1, 4}; + int input2_dim[] = {4, 1, 1, 1, 1}; + + int32_t input1_data[] = {-1, 9, 7, 3}; + int32_t input2_data[] = {7}; + + bool expected_data[] = {false, true, false, false}; + int expected_dim[] = {4, 1, 1, 1, 4}; + + bool output_data[4]; + tflite::testing::TestComparisonInt(tflite::Register_GREATER(), input1_dim, + input1_data, input2_dim, input2_data, + expected_data, expected_dim, output_data); +} + +TF_LITE_MICRO_TEST(GreaterBroadcastTwoD) { + int input1_dim[] = {4, 1, 1, 2, 4}; + int input2_dim[] = {4, 1, 1, 1, 4}; + + int32_t input1_data[] = {-1, 9, 7, 3, 2, 4, 2, 8}; + int32_t input2_data[] = {7, 1, 2, 4}; + + bool expected_data[] = {false, true, true, false, false, true, false, true}; + int expected_dim[] = {4, 1, 1, 2, 4}; + + bool output_data[8]; + tflite::testing::TestComparisonInt(tflite::Register_GREATER(), input1_dim, + input1_data, input2_dim, input2_data, + expected_data, expected_dim, output_data); +} + +TF_LITE_MICRO_TEST(GreaterEqualFloat) { + int input1_dim[] = {4, 1, 1, 1, 4}; + int input2_dim[] = {4, 1, 1, 1, 4}; + + float input1_data[] = {0.1, 0.9, 0.7, 0.3}; + float input2_data[] = {0.1, 0.2, 0.6, 0.5}; + + bool expected_data[] = {true, true, true, false}; + int expected_dim[] = {4, 1, 1, 1, 4}; + + bool output_data[4]; + tflite::testing::TestComparisonFloat( + tflite::Register_GREATER_EQUAL(), input1_dim, input1_data, input2_dim, + input2_data, expected_data, expected_dim, output_data); +} + +TF_LITE_MICRO_TEST(GreaterEqualInt) { + int input1_dim[] = {4, 1, 1, 1, 4}; + int input2_dim[] = {4, 1, 1, 1, 4}; + + int32_t input1_data[] = {-1, 9, 7, 3}; + int32_t input2_data[] = {1, 2, 7, 5}; + + bool expected_data[] = {false, true, true, false}; + int expected_dim[] = {4, 1, 1, 1, 4}; + + bool output_data[4]; + tflite::testing::TestComparisonInt( + tflite::Register_GREATER_EQUAL(), input1_dim, input1_data, input2_dim, + input2_data, expected_data, expected_dim, output_data); +} + +TF_LITE_MICRO_TEST(GreaterEqualBroadcast) { + int input1_dim[] = {4, 1, 1, 1, 4}; + int input2_dim[] = {4, 1, 1, 1, 1}; + + int32_t input1_data[] = {-1, 9, 7, 3}; + int32_t input2_data[] = {7}; + + bool expected_data[] = {false, true, true, false}; + int expected_dim[] = {4, 1, 1, 1, 4}; + + bool output_data[4]; + tflite::testing::TestComparisonInt( + tflite::Register_GREATER_EQUAL(), input1_dim, input1_data, input2_dim, + input2_data, expected_data, expected_dim, output_data); +} + +TF_LITE_MICRO_TEST(GreaterEqualBroadcastTwoD) { + int input1_dim[] = {4, 1, 1, 2, 4}; + int input2_dim[] = {4, 1, 1, 1, 4}; + + int32_t input1_data[] = {-1, 9, 7, 3, 2, 4, 2, 8}; + int32_t input2_data[] = {7, 1, 2, 4}; + + bool expected_data[] = {false, true, true, false, false, true, true, true}; + int expected_dim[] = {4, 1, 1, 2, 4}; + + bool output_data[8]; + tflite::testing::TestComparisonInt( + tflite::Register_GREATER_EQUAL(), input1_dim, input1_data, input2_dim, + input2_data, expected_data, expected_dim, output_data); +} + +TF_LITE_MICRO_TEST(LessFloat) { + int input1_dim[] = {4, 1, 1, 1, 4}; + int input2_dim[] = {4, 1, 1, 1, 4}; + + float input1_data[] = {0.1, 0.9, 0.7, 0.3}; + float input2_data[] = {0.1, 0.2, 0.6, 0.5}; + + bool expected_data[] = {false, false, false, true}; + int expected_dim[] = {4, 1, 1, 1, 4}; + + bool output_data[4]; + tflite::testing::TestComparisonFloat( + tflite::Register_LESS(), input1_dim, input1_data, input2_dim, input2_data, + expected_data, expected_dim, output_data); +} + +TF_LITE_MICRO_TEST(LessInt) { + int input1_dim[] = {4, 1, 1, 1, 4}; + int input2_dim[] = {4, 1, 1, 1, 4}; + + int32_t input1_data[] = {-1, 9, 7, 3}; + int32_t input2_data[] = {1, 2, 6, 5}; + + bool expected_data[] = {true, false, false, true}; + int expected_dim[] = {4, 1, 1, 1, 4}; + + bool output_data[4]; + tflite::testing::TestComparisonInt(tflite::Register_LESS(), input1_dim, + input1_data, input2_dim, input2_data, + expected_data, expected_dim, output_data); +} + +TF_LITE_MICRO_TEST(LessBroadcast) { + int input1_dim[] = {4, 1, 1, 1, 4}; + int input2_dim[] = {4, 1, 1, 1, 1}; + + int32_t input1_data[] = {-1, 9, 7, 3}; + int32_t input2_data[] = {7}; + + bool expected_data[] = {true, false, false, true}; + int expected_dim[] = {4, 1, 1, 1, 4}; + + bool output_data[4]; + tflite::testing::TestComparisonInt(tflite::Register_LESS(), input1_dim, + input1_data, input2_dim, input2_data, + expected_data, expected_dim, output_data); +} + +TF_LITE_MICRO_TEST(LessBroadcastTwoD) { + int input1_dim[] = {4, 1, 1, 2, 4}; + int input2_dim[] = {4, 1, 1, 1, 4}; + + int32_t input1_data[] = {-1, 9, 7, 3, 2, 4, 6, 8}; + int32_t input2_data[] = {7, 1, 2, 4}; + + bool expected_data[] = {true, false, false, true, true, false, false, false}; + int expected_dim[] = {4, 1, 1, 2, 4}; + + bool output_data[8]; + tflite::testing::TestComparisonInt(tflite::Register_LESS(), input1_dim, + input1_data, input2_dim, input2_data, + expected_data, expected_dim, output_data); +} + +TF_LITE_MICRO_TEST(LessEqualFloat) { + int input1_dim[] = {4, 1, 1, 1, 4}; + int input2_dim[] = {4, 1, 1, 1, 4}; + + float input1_data[] = {0.1, 0.9, 0.7, 0.3}; + float input2_data[] = {0.1, 0.2, 0.6, 0.5}; + + bool expected_data[] = {true, false, false, true}; + int expected_dim[] = {4, 1, 1, 1, 4}; + + bool output_data[4]; + tflite::testing::TestComparisonFloat( + tflite::Register_LESS_EQUAL(), input1_dim, input1_data, input2_dim, + input2_data, expected_data, expected_dim, output_data); +} + +TF_LITE_MICRO_TEST(LessEqualInt) { + int input1_dim[] = {4, 1, 1, 1, 4}; + int input2_dim[] = {4, 1, 1, 1, 4}; + + int32_t input1_data[] = {-1, 9, 7, 3}; + int32_t input2_data[] = {1, 2, 7, 5}; + + bool expected_data[] = {true, false, true, true}; + int expected_dim[] = {4, 1, 1, 1, 4}; + + bool output_data[4]; + tflite::testing::TestComparisonInt(tflite::Register_LESS_EQUAL(), input1_dim, + input1_data, input2_dim, input2_data, + expected_data, expected_dim, output_data); +} + +TF_LITE_MICRO_TEST(LessEqualBroadcast) { + int input1_dim[] = {4, 1, 1, 1, 4}; + int input2_dim[] = {4, 1, 1, 1, 1}; + + int32_t input1_data[] = {-1, 9, 7, 3}; + int32_t input2_data[] = {7}; + + bool expected_data[] = {true, false, true, true}; + int expected_dim[] = {4, 1, 1, 1, 4}; + + bool output_data[4]; + tflite::testing::TestComparisonInt(tflite::Register_LESS_EQUAL(), input1_dim, + input1_data, input2_dim, input2_data, + expected_data, expected_dim, output_data); +} + +TF_LITE_MICRO_TEST(LessEqualBroadcastTwoD) { + int input1_dim[] = {4, 1, 1, 2, 4}; + int input2_dim[] = {4, 1, 1, 1, 4}; + + int32_t input1_data[] = {-1, 9, 7, 3, 2, 4, 2, 8}; + int32_t input2_data[] = {7, 1, 2, 4}; + + bool expected_data[] = {true, false, false, true, true, false, true, false}; + int expected_dim[] = {4, 1, 1, 2, 4}; + + bool output_data[8]; + tflite::testing::TestComparisonInt(tflite::Register_LESS_EQUAL(), input1_dim, + input1_data, input2_dim, input2_data, + expected_data, expected_dim, output_data); +} + +TF_LITE_MICRO_TEST(EqualQuantizedInt8) { + int input1_dim[] = {4, 1, 2, 2, 1}; + int input2_dim[] = {4, 1, 2, 2, 1}; + + float input1_data[] = {1, -9, 7, 3}; + float input2_data[] = {-1, 2, 7, 5}; + + bool expected_data[] = {false, false, true, false}; + int expected_dim[] = {4, 1, 2, 2, 1}; + + const float input1_scale = 0.5; + const int input1_zero_point = -5; + const float input2_scale = 0.25; + const int input2_zero_point = 5; + int8_t input1_quantized[4]; + int8_t input2_quantized[4]; + + bool output_data[4]; + tflite::testing::TestComparisonQuantizedInt8( + tflite::Register_EQUAL(), input1_dim, input1_data, input1_quantized, + input1_scale, input1_zero_point, input2_dim, input2_data, + input2_quantized, input2_scale, input2_zero_point, expected_data, + expected_dim, output_data); +} + +TF_LITE_MICRO_TEST(NotEqualQuantizedInt8) { + int input1_dim[] = {4, 1, 2, 2, 1}; + int input2_dim[] = {4, 1, 2, 2, 1}; + + float input1_data[] = {1, -9, 7, 3}; + float input2_data[] = {1, 2, 7, 5}; + + bool expected_data[] = {false, true, false, true}; + int expected_dim[] = {4, 1, 2, 2, 1}; + + const float input1_scale = 0.5; + const int input1_zero_point = -5; + const float input2_scale = 0.25; + const int input2_zero_point = 5; + int8_t input1_quantized[4]; + int8_t input2_quantized[4]; + + bool output_data[4]; + tflite::testing::TestComparisonQuantizedInt8( + tflite::Register_NOT_EQUAL(), input1_dim, input1_data, input1_quantized, + input1_scale, input1_zero_point, input2_dim, input2_data, + input2_quantized, input2_scale, input2_zero_point, expected_data, + expected_dim, output_data); +} + +TF_LITE_MICRO_TEST(NotEqualQuantizedInt8WithBroadcast) { + const int num_shapes = 4; + const int max_shape_size = 5; + int test_shapes[num_shapes][max_shape_size] = { + {1, 6}, {2, 2, 3}, {3, 2, 1, 3}, {4, 1, 3, 1, 2}}; + + for (int i = 0; i < num_shapes; ++i) { + int* input1_dim = test_shapes[i]; + int input2_dim[] = {1, 1}; + float input1_data[] = {20, -2, -71, 8, 11, 20}; + float input2_data[] = {8}; + + bool expected_data[] = {true, true, true, false, true, true}; + int* expected_dim = input1_dim; + + const float input1_scale = 0.5; + const int input1_zero_point = -9; + int8_t input1_quantized[6]; + int8_t input2_quantized[6]; + + bool output_data[6]; + tflite::testing::TestComparisonQuantizedInt8( + tflite::Register_NOT_EQUAL(), input1_dim, input1_data, input1_quantized, + input1_scale, input1_zero_point, input2_dim, input2_data, + input2_quantized, input1_scale, input1_zero_point, expected_data, + expected_dim, output_data); + } +} + +TF_LITE_MICRO_TEST(GreaterQuantizedInt8WithBroadcast) { + const int num_shapes = 4; + const int max_shape_size = 5; + int test_shapes[num_shapes][max_shape_size] = { + {1, 6}, {2, 2, 3}, {3, 2, 1, 3}, {4, 1, 3, 1, 2}}; + + for (int i = 0; i < num_shapes; ++i) { + int* input1_dim = test_shapes[i]; + int input2_dim[] = {1, 1}; + float input1_data[] = {20, -2, -71, 8, 11, 20}; + float input2_data[] = {8}; + + bool expected_data[] = {true, false, false, false, true, true}; + int* expected_dim = input1_dim; + + const float input1_scale = 0.5; + const int input1_zero_point = -9; + int8_t input1_quantized[6]; + int8_t input2_quantized[6]; + + bool output_data[6]; + tflite::testing::TestComparisonQuantizedInt8( + tflite::Register_GREATER(), input1_dim, input1_data, input1_quantized, + input1_scale, input1_zero_point, input2_dim, input2_data, + input2_quantized, input1_scale, input1_zero_point, expected_data, + expected_dim, output_data); + } +} + +TF_LITE_MICRO_TEST(GreaterEqualQuantizedInt8WithBroadcast) { + const int num_shapes = 4; + const int max_shape_size = 5; + int test_shapes[num_shapes][max_shape_size] = { + {1, 6}, {2, 2, 3}, {3, 2, 1, 3}, {4, 1, 3, 1, 2}}; + + for (int i = 0; i < num_shapes; ++i) { + int* input1_dim = test_shapes[i]; + int input2_dim[] = {1, 1}; + float input1_data[] = {20, -2, -71, 8, 11, 20}; + float input2_data[] = {8}; + + bool expected_data[] = {true, false, false, true, true, true}; + int* expected_dim = input1_dim; + + const float input1_scale = 0.5; + const int input1_zero_point = -9; + int8_t input1_quantized[6]; + int8_t input2_quantized[6]; + + bool output_data[6]; + tflite::testing::TestComparisonQuantizedInt8( + tflite::Register_GREATER_EQUAL(), input1_dim, input1_data, + input1_quantized, input1_scale, input1_zero_point, input2_dim, + input2_data, input2_quantized, input1_scale, input1_zero_point, + expected_data, expected_dim, output_data); + } +} + +TF_LITE_MICRO_TEST(LessQuantizedInt8WithBroadcast) { + const int num_shapes = 4; + const int max_shape_size = 5; + int test_shapes[num_shapes][max_shape_size] = { + {1, 6}, {2, 2, 3}, {3, 2, 1, 3}, {4, 1, 3, 1, 2}}; + + for (int i = 0; i < num_shapes; ++i) { + int* input1_dim = test_shapes[i]; + int input2_dim[] = {1, 1}; + float input1_data[] = {20, -2, -71, 8, 11, 20}; + float input2_data[] = {8}; + + bool expected_data[] = {false, true, true, false, false, false}; + int* expected_dim = input1_dim; + + const float input1_scale = 0.5; + const int input1_zero_point = -9; + int8_t input1_quantized[6]; + int8_t input2_quantized[6]; + + bool output_data[6]; + tflite::testing::TestComparisonQuantizedInt8( + tflite::Register_LESS(), input1_dim, input1_data, input1_quantized, + input1_scale, input1_zero_point, input2_dim, input2_data, + input2_quantized, input1_scale, input1_zero_point, expected_data, + expected_dim, output_data); + } +} + +TF_LITE_MICRO_TEST(LessEqualQuantizedInt8WithBroadcast) { + const int num_shapes = 4; + const int max_shape_size = 5; + int test_shapes[num_shapes][max_shape_size] = { + {1, 6}, {2, 2, 3}, {3, 2, 1, 3}, {4, 1, 3, 1, 2}}; + + for (int i = 0; i < num_shapes; ++i) { + int* input1_dim = test_shapes[i]; + int input2_dim[] = {1, 1}; + float input1_data[] = {20, -2, -71, 8, 11, 20}; + float input2_data[] = {8}; + + bool expected_data[] = {false, true, true, true, false, false}; + int* expected_dim = input1_dim; + + const float input1_scale = 0.5; + const int input1_zero_point = -9; + int8_t input1_quantized[6]; + int8_t input2_quantized[6]; + + bool output_data[6]; + tflite::testing::TestComparisonQuantizedInt8( + tflite::Register_LESS_EQUAL(), input1_dim, input1_data, + input1_quantized, input1_scale, input1_zero_point, input2_dim, + input2_data, input2_quantized, input1_scale, input1_zero_point, + expected_data, expected_dim, output_data); + } +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/concatenation.cc b/tensorflow/lite/micro/kernels/concatenation.cc new file mode 100644 index 0000000..b4a838f --- /dev/null +++ b/tensorflow/lite/micro/kernels/concatenation.cc @@ -0,0 +1,258 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/kernels/internal/reference/concatenation.h" + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/internal/types.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { + +namespace { + +constexpr int kMaxInputNum = 10; // Maximum number of input tensors +constexpr int kOutputTensor = 0; + +struct OpData { + ConcatenationParams params; +}; + +// Handles negative axis index, coerces to positive index value. +inline int CalculatePositiveAxis(int axis, const TfLiteTensor* output_tensor) { + if (axis >= 0) { + return axis; + } else { + return NumDimensions(output_tensor) + axis; + } +} + +// The following functions are helpers to get tensor data in the format that the +// reference op implementation expects. They provide the same functionality as +// class VectorOfTensors and class VectorOfQuantizedTensors in TFLite. + +// Gets shapes from a list of tensors. +inline void GetAllInputTensorShapes(const TfLiteContext* context, + const TfLiteNode* node, + RuntimeShape all_shapes[kMaxInputNum]) { + TFLITE_DCHECK(context != nullptr); + TFLITE_DCHECK(node != nullptr); + for (int i = 0; i < node->inputs->size; ++i) { + const TfLiteEvalTensor* t = tflite::micro::GetEvalInput(context, node, i); + RuntimeShape shape = tflite::micro::GetTensorShape(t); + all_shapes[i].ReplaceWith(shape.DimensionsCount(), shape.DimsData()); + } +} + +// Get shape pointers from a list of shapes. +inline void GetShapesPointers(const RuntimeShape* shapes, size_t num, + const RuntimeShape* pointers[]) { + for (size_t i = 0; i < num; ++i) { + pointers[i] = &shapes[i]; + } +} + +// Gets data pointers from a list of tensors. +template +inline void GetAllInputTensorData(const TfLiteContext* context, + const TfLiteNode* node, + T* all_data[kMaxInputNum]) { + TFLITE_DCHECK(context != nullptr); + TFLITE_DCHECK(node != nullptr); + for (int i = 0; i < node->inputs->size; ++i) { + const TfLiteEvalTensor* t = tflite::micro::GetEvalInput(context, node, i); + all_data[i] = tflite::micro::GetTensorData(t); + } +} + +template +void EvalUnquantized(TfLiteContext* context, TfLiteNode* node) { + // Collect the shapes and data pointer of input tensors + RuntimeShape inputs_shape[kMaxInputNum]; + const RuntimeShape* inputs_shape_ptr[kMaxInputNum]; + const data_type* inputs_data[kMaxInputNum]; + GetAllInputTensorShapes(context, node, inputs_shape); + GetShapesPointers(inputs_shape, node->inputs->size, inputs_shape_ptr); + GetAllInputTensorData(context, node, inputs_data); + + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + + TFLITE_DCHECK(node->user_data != nullptr); + const OpData* data = static_cast(node->user_data); + + reference_ops::Concatenation(data->params, inputs_shape_ptr, inputs_data, + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); +} + +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(OpData)); +} + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + // This function only checks the types. Additional shape validations are + // performed in the reference implementation called during Eval(). + const TfLiteConcatenationParams* params = + reinterpret_cast(node->builtin_data); + + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* input_tensor = micro_context->AllocateTempInputTensor(node, 0); + TF_LITE_ENSURE(context, input_tensor != nullptr); + TfLiteType input_type = input_tensor->type; + TfLiteTensor* output_tensor = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, output_tensor != nullptr); + TfLiteType output_type = output_tensor->type; + + micro_context->DeallocateTempTfLiteTensor(input_tensor); + micro_context->DeallocateTempTfLiteTensor(output_tensor); + + // Check activation and input type + TF_LITE_ENSURE_EQ(context, params->activation, kTfLiteActNone); + TF_LITE_ENSURE(context, + input_type == kTfLiteFloat32 || input_type == kTfLiteInt8 || + input_type == kTfLiteInt16 || input_type == kTfLiteInt32 || + input_type == kTfLiteInt64 || input_type == kTfLiteBool); + + // Output type must match input type + TF_LITE_ENSURE_EQ(context, output_type, input_type); + + // This implementation does not support large number of input tensors + const int num_inputs = NumInputs(node); + TF_LITE_ENSURE(context, num_inputs <= kMaxInputNum); + + // Shapes with dimensions >4 are not yet supported with static allocation. + for (int i = 0; i < num_inputs; ++i) { + TfLiteTensor* input = micro_context->AllocateTempInputTensor(node, i); + TF_LITE_ENSURE(context, input != nullptr); + int num_dimensions = NumDimensions(input); + + if (num_dimensions > RuntimeShape::kMaxSmallSize) { + MicroPrintf( + "Op Concatenation does not currently support num dimensions > %d " + "Tensor has %d dimensions.", + RuntimeShape::kMaxSmallSize, num_dimensions); + return kTfLiteError; + } + micro_context->DeallocateTempTfLiteTensor(input); + } + + // Calculate OpData. + TFLITE_DCHECK(node->user_data != nullptr); + OpData* data = static_cast(node->user_data); + + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + + switch (output_type) { // Already know in/outtypes are same. + case kTfLiteBool: + case kTfLiteFloat32: + case kTfLiteInt16: + case kTfLiteInt32: + case kTfLiteInt64: { + data->params.axis = CalculatePositiveAxis(params->axis, output); + data->params.inputs_count = node->inputs->size; + break; + } + case kTfLiteInt8: { + data->params.axis = CalculatePositiveAxis(params->axis, output); + data->params.inputs_count = node->inputs->size; + + float* input_scales = + reinterpret_cast(context->AllocatePersistentBuffer( + context, node->inputs->size * sizeof(float))); + + int32_t* input_zero_points = + reinterpret_cast(context->AllocatePersistentBuffer( + context, node->inputs->size * sizeof(int32_t))); + + // Allocate persistent scale and zeropoint buffers. + // Store input scale and zero point values in OpParams: + for (int i = 0; i < node->inputs->size; ++i) { + TfLiteTensor* t = micro_context->AllocateTempInputTensor(node, i); + TF_LITE_ENSURE(context, t != nullptr); + input_scales[i] = t->params.scale; + input_zero_points[i] = t->params.zero_point; + micro_context->DeallocateTempTfLiteTensor(t); + } + + data->params.input_scale = input_scales; + data->params.input_zeropoint = input_zero_points; + data->params.output_zeropoint = output->params.zero_point; + data->params.output_scale = output->params.scale; + break; + } + default: + MicroPrintf("Op Concatenation does not currently support Type '%s'.", + TfLiteTypeGetName(output_type)); + return kTfLiteError; + } + + micro_context->DeallocateTempTfLiteTensor(output); + + return kTfLiteOk; +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* output_tensor = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + TF_LITE_ENSURE(context, output_tensor != nullptr); + TfLiteType output_type = output_tensor->type; + + switch (output_type) { // Already know in/outtypes are same. + case kTfLiteFloat32: + EvalUnquantized(context, node); + break; + case kTfLiteInt32: + EvalUnquantized(context, node); + break; + case kTfLiteInt8: + EvalUnquantized(context, node); + break; + case kTfLiteInt64: + EvalUnquantized(context, node); + break; + case kTfLiteInt16: + EvalUnquantized(context, node); + break; + case kTfLiteBool: + EvalUnquantized(context, node); + break; + + default: + MicroPrintf("Op Concatenation does not currently support Type '%s'.", + TfLiteTypeGetName(output_type)); + return kTfLiteError; + } + + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_CONCATENATION() { + return tflite::micro::RegisterOp(Init, Prepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/concatenation_test.cc b/tensorflow/lite/micro/kernels/concatenation_test.cc new file mode 100644 index 0000000..ddbc74d --- /dev/null +++ b/tensorflow/lite/micro/kernels/concatenation_test.cc @@ -0,0 +1,372 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +template +void TestConcatenateOneInput(int* input1_dims_data, const T* input1_data, + int axis, int* output_dims_data, T* output_data) { + TfLiteIntArray* input1_dims = IntArrayFromInts(input1_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + + constexpr int input_size = 1; + constexpr int output_size = 1; + constexpr int tensors_size = input_size + output_size; + TfLiteTensor tensors[tensors_size] = {CreateTensor(input1_data, input1_dims), + CreateTensor(output_data, output_dims)}; + + int inputs_array_data[] = {1, 0}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 1}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + TfLiteConcatenationParams builtin_data = { + .axis = axis, + .activation = kTfLiteActNone // Only activation supported in this impl + }; + + const TFLMRegistration registration = Register_CONCATENATION(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, + reinterpret_cast(&builtin_data)); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); +} + +template +void TestConcatenateTwoInputs(int* input1_dims_data, const T* input1_data, + int* input2_dims_data, const T* input2_data, + int axis, int* output_dims_data, T* output_data) { + TfLiteIntArray* input1_dims = IntArrayFromInts(input1_dims_data); + TfLiteIntArray* input2_dims = IntArrayFromInts(input2_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + + constexpr int input_size = 2; + constexpr int output_size = 1; + constexpr int tensors_size = input_size + output_size; + TfLiteTensor tensors[tensors_size] = {CreateTensor(input1_data, input1_dims), + CreateTensor(input2_data, input2_dims), + CreateTensor(output_data, output_dims)}; + + int inputs_array_data[] = {2, 0, 1}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 2}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + TfLiteConcatenationParams builtin_data = { + .axis = axis, + .activation = kTfLiteActNone // Only activation supported in this impl + }; + + const TFLMRegistration registration = Register_CONCATENATION(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, + reinterpret_cast(&builtin_data)); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); +} + +void TestConcatenateTwoFloatInputs( + int* input1_dims_data, const float* input1_data, int* input2_dims_data, + const float* input2_data, int axis, int* output_dims_data, + const float* expected_output_data, float* output_data) { + TestConcatenateTwoInputs(input1_dims_data, input1_data, input2_dims_data, + input2_data, axis, output_dims_data, output_data); + + TfLiteIntArray* dims = tflite::testing::IntArrayFromInts(output_dims_data); + const int output_dims_count = ElementCount(*dims); + for (int i = 0; i < output_dims_count; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(expected_output_data[i], output_data[i], 1e-5f); + } +} + +template +void TestConcatenateQuantizedTwoInputs( + int* input1_dims_data, const T* input1_data, int* input2_dims_data, + const T* input2_data, const float input_scale, const int input_zero_point, + int axis, int* output_dims_data, const T* expected_output_data, + const float output_scale, const int output_zero_point, T* output_data) { + TfLiteIntArray* input1_dims = IntArrayFromInts(input1_dims_data); + TfLiteIntArray* input2_dims = IntArrayFromInts(input2_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + + constexpr int input_size = 2; + constexpr int output_size = 1; + constexpr int tensors_size = input_size + output_size; + TfLiteTensor tensors[tensors_size] = { + CreateQuantizedTensor(input1_data, input1_dims, input_scale, + input_zero_point), + CreateQuantizedTensor(input2_data, input2_dims, input_scale, + input_zero_point), + CreateQuantizedTensor(output_data, output_dims, output_scale, + output_zero_point)}; + + int inputs_array_data[] = {2, 0, 1}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 2}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + TfLiteConcatenationParams builtin_data = { + .axis = axis, + .activation = kTfLiteActNone // Only activation supported in this impl + }; + + const TFLMRegistration registration = Register_CONCATENATION(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, + reinterpret_cast(&builtin_data)); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + const int output_dims_count = ElementCount(*output_dims); + for (int i = 0; i < output_dims_count; ++i) { + TF_LITE_MICRO_EXPECT_EQ(expected_output_data[i], output_data[i]); + } +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(BoolTypeOneInput) { + int input_shape[] = {3, 2, 1, 2}; + int output_shape[] = {3, 2, 1, 2}; + const bool input_value[] = {true, false, false, true}; + int axis = 1; + + bool output_data[4]; + tflite::testing::TestConcatenateOneInput(input_shape, input_value, axis, + output_shape, output_data); + + TfLiteIntArray* dims = tflite::testing::IntArrayFromInts(output_shape); + const int output_dims_count = tflite::ElementCount(*dims); + for (int i = 0; i < output_dims_count; ++i) { + TF_LITE_MICRO_EXPECT_EQ(input_value[i], output_data[i]); + } +} + +TF_LITE_MICRO_TEST(BoolTypeTwoInputs) { + int input1_shape[] = {3, 2, 1, 2}; + const bool input1_value[] = {false, false, false, false}; + int input2_shape[] = {3, 2, 3, 2}; + const bool input2_value[] = {true, true, true, true, true, true, + true, true, true, true, true, true}; + + const bool expected_output[] = {false, false, true, true, true, true, + true, true, false, false, true, true, + true, true, true, true}; + + const int axis = 1; + int output_shape[] = {3, 2, 4, 2}; + bool output_data[16]; + + tflite::testing::TestConcatenateTwoInputs(input1_shape, input1_value, + input2_shape, input2_value, axis, + output_shape, output_data); + + TfLiteIntArray* dims = tflite::testing::IntArrayFromInts(output_shape); + const int output_dims_count = tflite::ElementCount(*dims); + for (int i = 0; i < output_dims_count; ++i) { + TF_LITE_MICRO_EXPECT_EQ(expected_output[i], output_data[i]); + } +} + +TF_LITE_MICRO_TEST(TwoInputsAllAxesCombinations) { + // Concatenate the same two input tensors along all possible axes. + + int input_shape[] = {2, 2, 3}; + const float input1_value[] = {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f}; + const float input2_value[] = {7.0f, 8.0f, 9.0f, 10.0f, 11.0f, 12.0f}; + + // expected output when concatenating on axis 0 + int output_shape_axis0[] = {2, 4, 3}; + const float output_value_axis0[] = {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, + 7.0f, 8.0f, 9.0f, 10.0f, 11.0f, 12.0f}; + + // expected output when concatenating on axis 1 + int output_shape_axis1[] = {2, 2, 6}; + const float output_value_axis1[] = {1.0f, 2.0f, 3.0f, 7.0f, 8.0f, 9.0f, + 4.0f, 5.0f, 6.0f, 10.0f, 11.0f, 12.0f}; + + float output_data[12]; + + // Axis = 0 + tflite::testing::TestConcatenateTwoFloatInputs( + input_shape, input1_value, input_shape, input2_value, /* axis */ 0, + output_shape_axis0, output_value_axis0, output_data); + + // Axis = -2 (equivalent to axis = 0) + tflite::testing::TestConcatenateTwoFloatInputs( + input_shape, input1_value, input_shape, input2_value, /* axis */ -2, + output_shape_axis0, output_value_axis0, output_data); + + // Axis = 1 + tflite::testing::TestConcatenateTwoFloatInputs( + input_shape, input1_value, input_shape, input2_value, /* axis */ 1, + output_shape_axis1, output_value_axis1, output_data); + + // Axis = -1 (equivalent to axis = 1) + tflite::testing::TestConcatenateTwoFloatInputs( + input_shape, input1_value, input_shape, input2_value, /* axis */ -1, + output_shape_axis1, output_value_axis1, output_data); +} + +TF_LITE_MICRO_TEST(TwoInputsQuantizedInt8) { + const int axis = 2; + int input_shape[] = {3, 2, 1, 2}; + int output_shape[] = {3, 2, 1, 4}; + + const float input_scale = 0.1f; + const int input_zero_point = 0; + const float output_scale = 0.1f; + const int output_zero_point = 0; + + const int8_t input1_values[] = {1, 2, 3, 4}; + + const int8_t input2_values[] = {5, 6, 7, 8}; + + const int8_t output_value[] = {1, 2, 5, 6, 3, 4, 7, 8}; + + int8_t output_data[8]; + tflite::testing::TestConcatenateQuantizedTwoInputs( + input_shape, input1_values, input_shape, input2_values, input_scale, + input_zero_point, axis, output_shape, output_value, output_scale, + output_zero_point, output_data); +} + +TF_LITE_MICRO_TEST(TwoInputsQuantizedInt16) { + const int axis = 2; + int input_shape[] = {3, 2, 1, 2}; + int output_shape[] = {3, 2, 1, 4}; + + const float input_scale = 0.1f; + const int input_zero_point = 0; + const float output_scale = 0.1f; + const int output_zero_point = 0; + + const int16_t input1_values[] = {1, 2, 3, 4}; + + const int16_t input2_values[] = {5, 6, 7, 8}; + + const int16_t output_value[] = {1, 2, 5, 6, 3, 4, 7, 8}; + + int16_t output_data[8]; + tflite::testing::TestConcatenateQuantizedTwoInputs( + input_shape, input1_values, input_shape, input2_values, input_scale, + input_zero_point, axis, output_shape, output_value, output_scale, + output_zero_point, output_data); +} + +TF_LITE_MICRO_TEST(ThreeDimensionalTwoInputsDifferentShapes) { + const int axis = 1; + + int input1_shape[] = {3, 2, 1, 2}; + int input2_shape[] = {3, 2, 3, 2}; + int output_shape[] = {3, 2, 4, 2}; + + const float input1_values[] = {1.0f, 3.0f, 4.0f, 7.0f}; + const float input2_values[] = {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, + 7.0f, 8.0f, 9.0f, 10.0f, 11.0f, 12.0f}; + const float output_values[] = {1.0f, 3.0f, 1.0f, 2.0f, 3.0f, 4.0f, + 5.0f, 6.0f, 4.0f, 7.0f, 7.0f, 8.0f, + 9.0f, 10.0f, 11.0f, 12.0f}; + + float output_data[16]; + tflite::testing::TestConcatenateTwoFloatInputs( + input1_shape, input1_values, input2_shape, input2_values, axis, + output_shape, output_values, output_data); +} + +TF_LITE_MICRO_TEST(TwoInputsFiveDimensionsAllAxesCombinations) { + // Concatenate the same two input tensors along all possible axes. + int input_shape[] = {5, 2, 1, 2, 1, 3}; + const int kInputSize = 12; + const float input1_value[] = {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, + 7.0f, 8.0f, 9.0f, 10.0f, 11.0f, 12.0f}; + const float input2_value[] = {13.0f, 14.0f, 15.0f, 16.0f, 17.0f, 18.0f, + 19.0f, 20.0f, 21.0f, 22.0f, 23.0f, 24.0f}; + + float output_data[2 * kInputSize]; + + // Axis = 0 + int output_shape_axis0[] = {5, 4, 1, 2, 1, 3}; + const float output_value_axis0[2 * kInputSize] = { + 1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, + 9.0f, 10.0f, 11.0f, 12.0f, 13.0f, 14.0f, 15.0f, 16.0f, + 17.0f, 18.0f, 19.0f, 20.0f, 21.0f, 22.0f, 23.0f, 24.0f}; + tflite::testing::TestConcatenateTwoFloatInputs( + input_shape, input1_value, input_shape, input2_value, /* axis */ 0, + output_shape_axis0, output_value_axis0, output_data); + + // Axis = 4 + int output_shape_axis4[] = {5, 2, 1, 2, 1, 6}; + const float output_value_axis4[2 * kInputSize] = { + 1.0f, 2.0f, 3.0f, 13.0f, 14.0f, 15.0f, 4.0f, 5.0f, + 6.0f, 16.0f, 17.0f, 18.0f, 7.0f, 8.0f, 9.0f, 19.0f, + 20.0f, 21.0f, 10.0f, 11.0f, 12.0f, 22.0f, 23.0f, 24.0f}; + tflite::testing::TestConcatenateTwoFloatInputs( + input_shape, input1_value, input_shape, input2_value, /* axis */ 4, + output_shape_axis4, output_value_axis4, output_data); + + // Axis = -2 + int output_shape_axis_minus2[] = {5, 2, 1, 2, 2, 3}; + const float output_value_axis_minus2[2 * kInputSize] = { + 1.0f, 2.0f, 3.0f, 13.0f, 14.0f, 15.0f, 4.0f, 5.0f, + 6.0f, 16.0f, 17.0f, 18.0f, 7.0f, 8.0f, 9.0f, 19.0f, + 20.0f, 21.0f, 10.0f, 11.0f, 12.0f, 22.0f, 23.0f, 24.0f}; + tflite::testing::TestConcatenateTwoFloatInputs( + input_shape, input1_value, input_shape, input2_value, /* axis */ -2, + output_shape_axis_minus2, output_value_axis_minus2, output_data); +} + +TF_LITE_MICRO_TEST(TwoInputsQuantizedInt8FiveDimensions) { + const int axis = 2; + int input_shape[] = {5, 2, 1, 2, 1, 3}; + const int kInputSize = 12; + const int8_t input1_values[] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}; + const int8_t input2_values[] = {13, 14, 15, 16, 17, 18, + 19, 20, 21, 22, 23, 24}; + + const float input_scale = 0.1f; + const int input_zero_point = 0; + const float output_scale = 0.1f; + const int output_zero_point = 0; + + const int8_t output_value[] = {1, 2, 3, 4, 5, 6, 13, 14, 15, 16, 17, 18, + 7, 8, 9, 10, 11, 12, 19, 20, 21, 22, 23, 24}; + int output_shape[] = {5, 2, 1, 4, 1, 3}; + int8_t output_data[2 * kInputSize]; + + tflite::testing::TestConcatenateQuantizedTwoInputs( + input_shape, input1_values, input_shape, input2_values, input_scale, + input_zero_point, axis, output_shape, output_value, output_scale, + output_zero_point, output_data); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/conv.cc b/tensorflow/lite/micro/kernels/conv.cc new file mode 100644 index 0000000..550f5b0 --- /dev/null +++ b/tensorflow/lite/micro/kernels/conv.cc @@ -0,0 +1,168 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/kernels/conv.h" + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/portable_tensor_utils.h" +#include "tensorflow/lite/kernels/internal/reference/conv.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/conv.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { +namespace { + +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(OpDataConv)); +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kConvInputTensor); + const TfLiteEvalTensor* filter = + tflite::micro::GetEvalInput(context, node, kConvWeightsTensor); + const TfLiteEvalTensor* bias = + (NumInputs(node) == 3) + ? tflite::micro::GetEvalInput(context, node, kConvBiasTensor) + : nullptr; + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kConvOutputTensor); + + TFLITE_DCHECK(node->builtin_data != nullptr); + const auto& params = + *(reinterpret_cast(node->builtin_data)); + TFLITE_DCHECK(node->user_data != nullptr); + const auto& data = *(static_cast(node->user_data)); + + TF_LITE_ENSURE_EQ(context, input->type, output->type); + TF_LITE_ENSURE_MSG( + context, + input->type == filter->type || + (input->type == kTfLiteInt16 && filter->type == kTfLiteInt8) || + (input->type == kTfLiteInt8 && filter->type == kTfLiteInt4), + "Hybrid models are not supported on TFLite Micro."); + + switch (input->type) { // Already know in/out types are same. + case kTfLiteFloat32: { + tflite::reference_ops::Conv( + ConvParamsFloat(params, data), tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), + tflite::micro::GetOptionalTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output), + tflite::micro::GetTensorShape(nullptr), nullptr); + break; + } + case kTfLiteInt16: { + switch (bias->type) { + case kTfLiteInt32: { + reference_integer_ops::ConvPerChannel( + ConvParamsQuantized(params, data), + data.per_channel_output_multiplier, data.per_channel_output_shift, + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), + tflite::micro::GetOptionalTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + } + case kTfLiteInt64: { + reference_integer_ops::ConvPerChannel( + ConvParamsQuantized(params, data), + data.per_channel_output_multiplier, data.per_channel_output_shift, + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), + tflite::micro::GetOptionalTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + } + default: + MicroPrintf("Bias type %s (%d) not supported.", + TfLiteTypeGetName(bias->type), bias->type); + return kTfLiteError; + } + break; + } + case kTfLiteInt8: { + switch (filter->type) { + case kTfLiteInt4: { + int8_t* unpacked_filter_data = static_cast( + context->GetScratchBuffer(context, data.filter_buffer_index)); + tflite::tensor_utils::UnpackDenseInt4IntoInt8( + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(filter).FlatSize(), + unpacked_filter_data); + reference_integer_ops::ConvPerChannel( + ConvParamsQuantized(params, data), + data.per_channel_output_multiplier, data.per_channel_output_shift, + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), unpacked_filter_data, + tflite::micro::GetTensorShape(bias), + tflite::micro::GetOptionalTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + } + case kTfLiteInt8: { + reference_integer_ops::ConvPerChannel( + ConvParamsQuantized(params, data), + data.per_channel_output_multiplier, data.per_channel_output_shift, + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), + tflite::micro::GetOptionalTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + } + default: + MicroPrintf("Weight type %s (%d) not supported.", + TfLiteTypeGetName(filter->type), filter->type); + return kTfLiteError; + } + break; + } + default: + MicroPrintf("Type %s (%d) not supported.", TfLiteTypeGetName(input->type), + input->type); + return kTfLiteError; + } + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_CONV_2D() { + return tflite::micro::RegisterOp(Init, ConvPrepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/conv.h b/tensorflow/lite/micro/kernels/conv.h new file mode 100644 index 0000000..3b122ad --- /dev/null +++ b/tensorflow/lite/micro/kernels/conv.h @@ -0,0 +1,114 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_CONV_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_CONV_H_ + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/kernels/internal/types.h" +#include "tensorflow/lite/micro/micro_common.h" + +namespace tflite { + +struct OpDataConv { + TfLitePaddingValues padding; + + // Cached tensor zero point values for quantized operations. + int32_t input_zero_point; + int32_t filter_zero_point; + int32_t output_zero_point; + + // The scaling factor from input to output (aka the 'real multiplier') can + // be represented as a fixed point multiplier plus a left shift. + int32_t output_multiplier; + int output_shift; + + // Per channel output multiplier and shift. + int32_t* per_channel_output_multiplier; + int32_t* per_channel_output_shift; + + // The range of the fused activation layer. For example for kNone and + // uint8_t these would be 0 and 255. + int32_t output_activation_min; + int32_t output_activation_max; + + // A buffer used to store unpacked filter values. This is used if the source + // tensor is of n-bit precision that cannot be easily processed by kernels. + int filter_buffer_index; +}; + +extern const int kConvInputTensor; +extern const int kConvWeightsTensor; +extern const int kConvBiasTensor; +extern const int kConvOutputTensor; +extern const int kConvQuantizedDimension; + +// Returns a ConvParams struct with all the parameters needed for a +// float computation. +ConvParams ConvParamsFloat(const TfLiteConvParams& params, + const OpDataConv& data); + +// Returns a ConvParams struct with all the parameters needed for a +// quantized computation. +ConvParams ConvParamsQuantized(const TfLiteConvParams& params, + const OpDataConv& data); + +TfLiteStatus CalculateOpDataConv(TfLiteContext* context, TfLiteNode* node, + const TfLiteConvParams& params, int width, + int height, int filter_width, + int filter_height, int out_width, + int out_height, const TfLiteType data_type, + OpDataConv* data); + +TfLiteStatus ConvPrepare(TfLiteContext* context, TfLiteNode* node); + +// This is the most generic TFLMRegistration. The actual supported types +// may still be target dependent. The only requirement is that every +// implementation (reference or optimized) must define this function. +TFLMRegistration Register_CONV_2D(); + +#if defined(XTENSA) +// Returns a TFLMRegistration struct for kernel variant that only supports +// int8 activations and int8 weights and always calls the reference +// implementation. +TFLMRegistration Register_CONV_2D_INT8REF(); +#else +inline TFLMRegistration Register_CONV_2D_INT8REF() { + return Register_CONV_2D(); +} +#endif + +#if defined(CMSIS_NN) +// Returns a TFLMRegistration struct for kernel variant that only supports +// int8 activations and int8 weights and uses the latency optimized +// implementations. +TFLMRegistration Register_CONV_2D_INT8(); + +// Returns a TFLMRegistration struct for kernel variant that only supports +// int16 activations and int8 weights and uses the latency optimized +// implementations. +TFLMRegistration Register_CONV_2D_INT16(); + +#else +inline TFLMRegistration Register_CONV_2D_INT8() { return Register_CONV_2D(); } + +inline TFLMRegistration Register_CONV_2D_INT16() { return Register_CONV_2D(); } +#endif + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_CONV_H_ diff --git a/tensorflow/lite/micro/kernels/conv_common.cc b/tensorflow/lite/micro/kernels/conv_common.cc new file mode 100644 index 0000000..c548c93 --- /dev/null +++ b/tensorflow/lite/micro/kernels/conv_common.cc @@ -0,0 +1,202 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/c_api_types.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/padding.h" +#include "tensorflow/lite/micro/kernels/conv.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" + +namespace tflite { + +const int kConvInputTensor = 0; +const int kConvWeightsTensor = 1; +const int kConvBiasTensor = 2; +const int kConvOutputTensor = 0; + +// Conv is quantized along dimension 0: +// https://www.tensorflow.org/lite/performance/quantization_spec +const int kConvQuantizedDimension = 0; + +// Returns a ConvParams struct with all the parameters needed for a +// float computation. +ConvParams ConvParamsFloat(const TfLiteConvParams& params, + const OpDataConv& data) { + ConvParams op_params; + CalculateActivationRange(params.activation, &op_params.float_activation_min, + &op_params.float_activation_max); + op_params.padding_type = tflite::micro::RuntimePaddingType(params.padding); + op_params.padding_values.width = data.padding.width; + op_params.padding_values.height = data.padding.height; + op_params.stride_width = params.stride_width; + op_params.stride_height = params.stride_height; + op_params.dilation_width_factor = params.dilation_width_factor; + op_params.dilation_height_factor = params.dilation_height_factor; + return op_params; +} + +// Returns a ConvParams struct with all the parameters needed for a +// quantized computation. +ConvParams ConvParamsQuantized(const TfLiteConvParams& params, + const OpDataConv& data) { + ConvParams op_params; + op_params.input_offset = -data.input_zero_point; + op_params.weights_offset = -data.filter_zero_point; + op_params.output_offset = data.output_zero_point; + op_params.output_multiplier = data.output_multiplier; + op_params.output_shift = -data.output_shift; + op_params.padding_type = tflite::micro::RuntimePaddingType(params.padding); + op_params.padding_values.height = data.padding.height; + op_params.padding_values.width = data.padding.width; + op_params.stride_height = params.stride_height; + op_params.stride_width = params.stride_width; + op_params.dilation_height_factor = params.dilation_height_factor; + op_params.dilation_width_factor = params.dilation_width_factor; + op_params.quantized_activation_min = data.output_activation_min; + op_params.quantized_activation_max = data.output_activation_max; + return op_params; +} + +TfLiteStatus CalculateOpDataConv(TfLiteContext* context, TfLiteNode* node, + const TfLiteConvParams& params, int width, + int height, int filter_width, + int filter_height, int out_width, + int out_height, const TfLiteType data_type, + OpDataConv* data) { + bool has_bias = node->inputs->size == 3; + // Check number of inputs/outputs + TF_LITE_ENSURE(context, has_bias || node->inputs->size == 2); + TF_LITE_ENSURE_EQ(context, node->outputs->size, 1); + + // Matching GetWindowedOutputSize in TensorFlow. + auto padding = params.padding; + data->padding = ComputePaddingHeightWidth( + params.stride_height, params.stride_width, params.dilation_height_factor, + params.dilation_width_factor, height, width, filter_height, filter_width, + padding, &out_height, &out_width); + + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kConvInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* filter = + micro_context->AllocateTempInputTensor(node, kConvWeightsTensor); + TF_LITE_ENSURE(context, filter != nullptr); + TfLiteTensor* bias = + micro_context->AllocateTempInputTensor(node, kConvBiasTensor); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kConvOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + + // Note that quantized inference requires that all tensors have their + // parameters set. This is usually done during quantized training. + if (data_type != kTfLiteFloat32) { + int output_channels = filter->dims->data[kConvQuantizedDimension]; + + TF_LITE_ENSURE_STATUS(tflite::PopulateConvolutionQuantizationParams( + context, input, filter, bias, output, params.activation, + &data->output_multiplier, &data->output_shift, + &data->output_activation_min, &data->output_activation_max, + data->per_channel_output_multiplier, data->per_channel_output_shift, + output_channels)); + } + + data->input_zero_point = input->params.zero_point; + data->filter_zero_point = filter->params.zero_point; + data->output_zero_point = output->params.zero_point; + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(filter); + micro_context->DeallocateTempTfLiteTensor(output); + micro_context->DeallocateTempTfLiteTensor(bias); + + return kTfLiteOk; +} + +TfLiteStatus ConvPrepare(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + TFLITE_DCHECK(node->builtin_data != nullptr); + + OpDataConv* data = static_cast(node->user_data); + const auto& params = + *(static_cast(node->builtin_data)); + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kConvOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kConvInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* filter = + micro_context->AllocateTempInputTensor(node, kConvWeightsTensor); + TF_LITE_ENSURE(context, filter != nullptr); + + const int input_width = input->dims->data[2]; + const int input_height = input->dims->data[1]; + const int filter_width = filter->dims->data[2]; + const int filter_height = filter->dims->data[1]; + const int output_width = output->dims->data[2]; + const int output_height = output->dims->data[1]; + + // Dynamically allocate per-channel quantization parameters. + const int num_channels = filter->dims->data[kConvQuantizedDimension]; + data->per_channel_output_multiplier = + static_cast(context->AllocatePersistentBuffer( + context, num_channels * sizeof(int32_t))); + data->per_channel_output_shift = + static_cast(context->AllocatePersistentBuffer( + context, num_channels * sizeof(int32_t))); + + // All per-channel quantized tensors need valid zero point and scale arrays. + if (input->type == kTfLiteInt8 || input->type == kTfLiteInt16) { + TF_LITE_ENSURE_EQ(context, filter->quantization.type, + kTfLiteAffineQuantization); + + const auto* affine_quantization = + static_cast(filter->quantization.params); + TFLITE_DCHECK(affine_quantization != nullptr); + TFLITE_DCHECK(affine_quantization->scale != nullptr); + TFLITE_DCHECK(affine_quantization->zero_point != nullptr); + + TF_LITE_ENSURE(context, + affine_quantization->scale->size == 1 || + affine_quantization->scale->size == + filter->dims->data[kConvQuantizedDimension]); + } + + TF_LITE_ENSURE_STATUS(CalculateOpDataConv( + context, node, params, input_width, input_height, filter_width, + filter_height, output_width, output_height, input->type, data)); + + if (filter->type == kTfLiteInt4) { + int filter_size = + RuntimeShape(filter->dims->size, + reinterpret_cast(filter->dims->data)) + .FlatSize(); + context->RequestScratchBufferInArena(context, filter_size, + &data->filter_buffer_index); + } + + micro_context->DeallocateTempTfLiteTensor(filter); + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(output); + + return kTfLiteOk; +} +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/conv_test.cc b/tensorflow/lite/micro/kernels/conv_test.cc new file mode 100644 index 0000000..98c2615 --- /dev/null +++ b/tensorflow/lite/micro/kernels/conv_test.cc @@ -0,0 +1,1207 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/kernels/conv_test.h" + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/kernels/testdata/conv_test_data.h" +#include "tensorflow/lite/micro/micro_utils.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { +// Common inputs and outputs. +constexpr int kInputElements = 16; +static int kInputShape[] = {4, 2, 2, 4, 1}; +static const float kInputData[kInputElements] = {1, 1, 1, 1, 2, 2, 2, 2, + 1, 2, 3, 4, 1, 2, 3, 4}; + +constexpr int kFilterElements = 12; +static int kFilterShape[] = {4, 3, 2, 2, 1}; +static const float kFilterData[kFilterElements] = {1, 2, 3, 4, -1, 1, + -1, 1, -1, -1, 1, 1}; + +constexpr int kBiasElements = 3; +static int kBiasShape[] = {1, 3}; +static const float kBiasData[kBiasElements] = {1, 2, 3}; + +constexpr int kOutputElements = 12; +static int kOutputShape[] = {4, 2, 1, 2, 3}; +static const float kGoldenData[kOutputElements] = {18, 2, 5, 18, 2, 5, + 17, 4, 3, 37, 4, 3}; + +static TfLiteConvParams common_conv_params = { + kTfLitePaddingValid, // padding + 2, // stride_width + 2, // stride_height + kTfLiteActNone, // activation + 1, // dilation_width_factor + 1, // dilation_height_factor +}; + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +#if !defined(VISION_P6) // TODO(b/270720625): disabled int8 and int4 test for +// conv for fully connected vision p6 kernels, because vision p6 conv doesn't +// work with per channel quantization + +TF_LITE_MICRO_TEST(SimpleTestQuantized4bitPerChannel) { + const int output_dims_count = 12; + int8_t output_data[output_dims_count]; + + const float input_scale = 0.5f; + const float output_scale = 1.0f; + const int input_zero_point = 0; + const int output_zero_point = 0; + + int8_t input_quantized[tflite::testing::kInputElements]; + int8_t filter_quantized[tflite::testing::kFilterElements]; + int32_t bias_quantized[tflite::testing::kBiasElements]; + int8_t golden_quantized[tflite::testing::kOutputElements]; + int zero_points[tflite::testing::kBiasElements + 1]; + float scales[tflite::testing::kBiasElements + 1]; + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + tflite::testing::TestConvQuantizedPerChannel( + tflite::testing::kInputShape, tflite::testing::kInputData, + input_quantized, input_scale, input_zero_point, + tflite::testing::kFilterShape, tflite::testing::kFilterData, + filter_quantized, tflite::testing::kBiasShape, + tflite::testing::kBiasData, bias_quantized, scales, zero_points, + tflite::testing::kOutputShape, tflite::testing::kGoldenData, + golden_quantized, output_scale, output_zero_point, + &tflite::testing::common_conv_params, tflite::Register_CONV_2D(), + output_data, kTfLiteInt4)); +} + +TF_LITE_MICRO_TEST(SimpleTestQuantizedPerChannel) { + const int output_dims_count = 12; + int8_t output_data[output_dims_count]; + + const float input_scale = 0.5f; + const float output_scale = 1.0f; + const int input_zero_point = 0; + const int output_zero_point = 0; + + int8_t input_quantized[tflite::testing::kInputElements]; + int8_t filter_quantized[tflite::testing::kFilterElements]; + int32_t bias_quantized[tflite::testing::kBiasElements]; + int8_t golden_quantized[tflite::testing::kOutputElements]; + int zero_points[tflite::testing::kBiasElements + 1]; + float scales[tflite::testing::kBiasElements + 1]; + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + tflite::testing::TestConvQuantizedPerChannel( + tflite::testing::kInputShape, tflite::testing::kInputData, + input_quantized, input_scale, input_zero_point, + tflite::testing::kFilterShape, tflite::testing::kFilterData, + filter_quantized, tflite::testing::kBiasShape, + tflite::testing::kBiasData, bias_quantized, scales, zero_points, + tflite::testing::kOutputShape, tflite::testing::kGoldenData, + golden_quantized, output_scale, output_zero_point, + &tflite::testing::common_conv_params, tflite::Register_CONV_2D(), + output_data)); +} + +#endif // !defined(VISION_P6) + +#if !defined(XTENSA) // TODO(b/170321206): xtensa kernels are less general than + // reference kernels and we ifdef out test cases that are + // currently known to fail. + +TF_LITE_MICRO_TEST(SimpleTestFloat) { + float output_data[tflite::testing::kOutputElements]; + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + tflite::testing::TestConvFloat( + tflite::testing::kInputShape, tflite::testing::kInputData, + tflite::testing::kFilterShape, tflite::testing::kFilterData, + tflite::testing::kBiasShape, tflite::testing::kBiasData, + tflite::testing::kOutputShape, tflite::testing::kGoldenData, + &tflite::testing::common_conv_params, tflite::Register_CONV_2D(), + output_data)); +} + +TF_LITE_MICRO_TEST(InputAndFilterSameWidthHeight) { + const int output_dims_count = 2; + float output_data[output_dims_count]; + + int kFilterShape[] = {4, 1, 2, 4, 1}; + const float filter_values[] = {1, 2, 3, 4, -1, -1, 1, 1}; + int kBiasShape[] = {1, 1}; + const float bias_values[] = {0}; + int kOutputShape[] = {4, 2, 1, 1, 1}; + const float expected_output[] = {10, 34}; + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + tflite::testing::TestConvFloat( + tflite::testing::kInputShape, tflite::testing::kInputData, + kFilterShape, filter_values, kBiasShape, bias_values, kOutputShape, + expected_output, &tflite::testing::common_conv_params, + tflite::Register_CONV_2D(), output_data)); +} + +TF_LITE_MICRO_TEST(InputOutputDifferentTypeIsError) { + using tflite::testing::CreateQuantizedTensor; + using tflite::testing::CreateTensor; + using tflite::testing::IntArrayFromInts; + + TfLiteIntArray* input_dims = IntArrayFromInts(tflite::testing::kInputShape); + TfLiteIntArray* filter_dims = IntArrayFromInts(tflite::testing::kFilterShape); + TfLiteIntArray* bias_dims = IntArrayFromInts(tflite::testing::kBiasShape); + TfLiteIntArray* output_dims = IntArrayFromInts(tflite::testing::kOutputShape); + const int output_dims_count = tflite::ElementCount(*output_dims); + constexpr int inputs_size = 3; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + + int8_t output_data[tflite::testing::kOutputElements]; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(tflite::testing::kInputData, input_dims), + CreateTensor(tflite::testing::kFilterData, filter_dims), + CreateTensor(tflite::testing::kBiasData, bias_dims), + CreateQuantizedTensor(output_data, output_dims, /*scale=*/0.0f, + /*zero_point=*/0), + }; + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteError, + tflite::testing::InvokeConv(tensors, tensors_size, output_dims_count, + &tflite::testing::common_conv_params, + tflite::Register_CONV_2D(), output_data)); +} + +TF_LITE_MICRO_TEST(HybridModeIsError) { + using tflite::testing::CreateQuantizedTensor; + using tflite::testing::CreateTensor; + using tflite::testing::IntArrayFromInts; + + TfLiteIntArray* input_dims = IntArrayFromInts(tflite::testing::kInputShape); + TfLiteIntArray* filter_dims = IntArrayFromInts(tflite::testing::kFilterShape); + TfLiteIntArray* bias_dims = IntArrayFromInts(tflite::testing::kBiasShape); + TfLiteIntArray* output_dims = IntArrayFromInts(tflite::testing::kOutputShape); + const int output_dims_count = tflite::ElementCount(*output_dims); + constexpr int inputs_size = 3; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + + int8_t filter_data[tflite::testing::kFilterElements] = {}; + float output_data[tflite::testing::kOutputElements]; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(tflite::testing::kInputData, input_dims), + CreateQuantizedTensor(filter_data, filter_dims, + /*scale=*/0.0f, + /*zero_point=*/0), + CreateTensor(tflite::testing::kBiasData, bias_dims), + CreateTensor(output_data, output_dims), + }; + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteError, + tflite::testing::InvokeConv(tensors, tensors_size, output_dims_count, + &tflite::testing::common_conv_params, + tflite::Register_CONV_2D(), output_data)); +} + +TF_LITE_MICRO_TEST(SimpleTestQuantized16x8PerChannel64bBias) { + const int output_dims_count = 12; + int16_t output_data[output_dims_count]; + + const float input_scale = 0.5f; + const float output_scale = 1.0f; + const int input_zero_point = 0; + const int output_zero_point = 0; + + int16_t input_quantized[tflite::testing::kInputElements]; + int8_t filter_quantized[tflite::testing::kFilterElements]; + std::int64_t bias_quantized[tflite::testing::kBiasElements]; + int16_t golden_quantized[tflite::testing::kOutputElements]; + int zero_points[tflite::testing::kBiasElements + 1]; + float scales[tflite::testing::kBiasElements + 1]; + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + tflite::testing::TestConvQuantizedPerChannel( + tflite::testing::kInputShape, tflite::testing::kInputData, + input_quantized, input_scale, input_zero_point, + tflite::testing::kFilterShape, tflite::testing::kFilterData, + filter_quantized, tflite::testing::kBiasShape, + tflite::testing::kBiasData, bias_quantized, scales, zero_points, + tflite::testing::kOutputShape, tflite::testing::kGoldenData, + golden_quantized, output_scale, output_zero_point, + &tflite::testing::common_conv_params, tflite::Register_CONV_2D(), + output_data)); +} + +#if !defined(CMSIS_NN) +TF_LITE_MICRO_TEST(SimpleTestQuantized16x8PerChannel32bBias) { + const int output_dims_count = 12; + int16_t output_data[output_dims_count]; + + const float input_scale = 0.5f; + const float output_scale = 1.0f; + const int input_zero_point = 0; + const int output_zero_point = 0; + + int16_t input_quantized[tflite::testing::kInputElements]; + int8_t filter_quantized[tflite::testing::kFilterElements]; + int32_t bias_quantized[tflite::testing::kBiasElements]; + int16_t golden_quantized[tflite::testing::kOutputElements]; + int zero_points[tflite::testing::kBiasElements + 1]; + float scales[tflite::testing::kBiasElements + 1]; + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + tflite::testing::TestConvQuantizedPerChannel( + tflite::testing::kInputShape, tflite::testing::kInputData, + input_quantized, input_scale, input_zero_point, + tflite::testing::kFilterShape, tflite::testing::kFilterData, + filter_quantized, tflite::testing::kBiasShape, + tflite::testing::kBiasData, bias_quantized, scales, zero_points, + tflite::testing::kOutputShape, tflite::testing::kGoldenData, + golden_quantized, output_scale, output_zero_point, + &tflite::testing::common_conv_params, tflite::Register_CONV_2D(), + output_data)); +} +#endif + +TF_LITE_MICRO_TEST(SimpleTestDilatedQuantizedPerChannel) { + const int output_dims_count = 24; + int8_t output_data[output_dims_count]; + + const float input_scale = 0.5f; + const float output_scale = 1.0f; + const int input_zero_point = 0; + const int output_zero_point = 0; + + const int input_elements = 48; + int input_shape[] = {4, 2, 4, 6, 1}; + const float input_data[] = { + // b = 0 + 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, + // b = 1 + 1, 2, 3, 4, 5, 6, 2, 6, 2, 4, 4, 2, 3, 2, 6, 5, 1, 4, 1, 2, 1, 4, 6, 3}; + const int output_elements = 24; + int output_shape[] = {4, 2, 2, 2, 3}; + const float golden_data[] = {25, 2, 7, 25, 2, 7, 10, 2, -3, 10, 2, -3, + 39, 7, 6, 50, 3, 4, 14, 4, -5, 15, 0, -7}; + + int8_t input_quantized[input_elements]; + int8_t filter_quantized[tflite::testing::kFilterElements]; + int32_t bias_quantized[tflite::testing::kBiasElements]; + int8_t golden_quantized[output_elements]; + int zero_points[tflite::testing::kBiasElements + 1]; + float scales[tflite::testing::kBiasElements + 1]; + + TfLiteConvParams conv_params{tflite::testing::common_conv_params}; + conv_params.dilation_width_factor = 3; + conv_params.dilation_height_factor = 2; + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + tflite::testing::TestConvQuantizedPerChannel( + input_shape, input_data, input_quantized, input_scale, + input_zero_point, tflite::testing::kFilterShape, + tflite::testing::kFilterData, filter_quantized, + tflite::testing::kBiasShape, tflite::testing::kBiasData, + bias_quantized, scales, zero_points, output_shape, golden_data, + golden_quantized, output_scale, output_zero_point, &conv_params, + tflite::Register_CONV_2D(), output_data)); +} + +TF_LITE_MICRO_TEST(SimpleTestQuantizedPerChannelRelu6) { + const int output_dims_count = 12; + int8_t output_data[output_dims_count]; + + const float bias_values[] = {1, 2, -3}; + const float golden_data[] = {6, 2, 0, 6, 2, 0, 6, 4, 0, 6, 4, 0}; + + const float input_scale = 0.023529f; + const float output_scale = 0.023529f; + const int input_zero_point = -128; + const int output_zero_point = -128; + + int8_t input_quantized[tflite::testing::kInputElements]; + int8_t filter_quantized[tflite::testing::kFilterElements]; + int32_t bias_quantized[tflite::testing::kBiasElements]; + int8_t golden_quantized[tflite::testing::kOutputElements]; + int zero_points[tflite::testing::kBiasElements + 1]; + float scales[tflite::testing::kBiasElements + 1]; + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + tflite::testing::TestConvQuantizedPerChannel( + tflite::testing::kInputShape, tflite::testing::kInputData, + input_quantized, input_scale, input_zero_point, + tflite::testing::kFilterShape, tflite::testing::kFilterData, + filter_quantized, tflite::testing::kBiasShape, bias_values, + bias_quantized, scales, zero_points, tflite::testing::kOutputShape, + golden_data, golden_quantized, output_scale, output_zero_point, + &tflite::testing::common_conv_params, tflite::Register_CONV_2D(), + output_data)); +} + +TF_LITE_MICRO_TEST(SimpleTestQuantized16x8PerChannelRelu664bBias) { + const int output_dims_count = 12; + int16_t output_data[output_dims_count]; + + const float bias_values[] = {1, 2, -3}; + const float golden_data[] = {6, 2, 0, 6, 2, 0, 6, 4, 0, 6, 4, 0}; + + const float input_scale = 0.023529f; + const float output_scale = 0.023529f; + const int input_zero_point = 0; + const int output_zero_point = 0; + + int16_t input_quantized[tflite::testing::kInputElements]; + int8_t filter_quantized[tflite::testing::kFilterElements]; + std::int64_t bias_quantized[tflite::testing::kBiasElements]; + int16_t golden_quantized[tflite::testing::kOutputElements]; + int zero_points[tflite::testing::kBiasElements + 1]; + float scales[tflite::testing::kBiasElements + 1]; + + TfLiteConvParams conv_params{tflite::testing::common_conv_params}; + conv_params.activation = kTfLiteActRelu6; + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + tflite::testing::TestConvQuantizedPerChannel( + tflite::testing::kInputShape, tflite::testing::kInputData, + input_quantized, input_scale, input_zero_point, + tflite::testing::kFilterShape, tflite::testing::kFilterData, + filter_quantized, tflite::testing::kBiasShape, bias_values, + bias_quantized, scales, zero_points, tflite::testing::kOutputShape, + golden_data, golden_quantized, output_scale, output_zero_point, + &conv_params, tflite::Register_CONV_2D(), output_data)); +} + +#if !defined(CMSIS_NN) +TF_LITE_MICRO_TEST(SimpleTestQuantized16x8PerChannelRelu632bBias) { + const int output_dims_count = 12; + int16_t output_data[output_dims_count]; + + const float bias_values[] = {1, 2, -3}; + const float golden_data[] = {6, 2, 0, 6, 2, 0, 6, 4, 0, 6, 4, 0}; + + const float input_scale = 0.023529f; + const float output_scale = 0.023529f; + const int input_zero_point = 0; + const int output_zero_point = 0; + + int16_t input_quantized[tflite::testing::kInputElements]; + int8_t filter_quantized[tflite::testing::kFilterElements]; + int32_t bias_quantized[tflite::testing::kBiasElements]; + int16_t golden_quantized[tflite::testing::kOutputElements]; + int zero_points[tflite::testing::kBiasElements + 1]; + float scales[tflite::testing::kBiasElements + 1]; + + TfLiteConvParams conv_params{tflite::testing::common_conv_params}; + conv_params.activation = kTfLiteActRelu6; + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + tflite::testing::TestConvQuantizedPerChannel( + tflite::testing::kInputShape, tflite::testing::kInputData, + input_quantized, input_scale, input_zero_point, + tflite::testing::kFilterShape, tflite::testing::kFilterData, + filter_quantized, tflite::testing::kBiasShape, bias_values, + bias_quantized, scales, zero_points, tflite::testing::kOutputShape, + golden_data, golden_quantized, output_scale, output_zero_point, + &conv_params, tflite::Register_CONV_2D(), output_data)); +} +#endif + +TF_LITE_MICRO_TEST(Kernel1x1QuantizedPerChannel) { + // conv params: + // padding, stride_, activation, dilation_ + TfLiteConvParams conv_params = {kTfLitePaddingValid, 1, 1, + kTfLiteActNone, 1, 1}; + + int input_shape[] = {4, 1, 2, 2, 4}; // [len,N,H,W,C] + constexpr int input_elements = + 1 * 2 * 2 * + 4; // input_shape[1] * input_shape[2] * input_shape[3] * input_shape[4]; + constexpr float input_data[input_elements] = {1, 1, 1, 1, 2, 2, 2, 2, + 1, 2, 3, 4, 1, 2, 3, 4}; + + int filter_shape[] = {4, 3, 1, 1, 4}; + constexpr int filter_elements = + 3 * 1 * 1 * 4; // filter_shape[1] * filter_shape[2] * + // filter_shape[3] * filter_shape[4]; + const float filter_data[filter_elements] = {1, 2, 3, 4, -1, 1, + -1, 1, -1, -1, 1, 1}; + + constexpr int bias_elements = 3; // filter_shape[1]; + int bias_shape[] = {1, bias_elements}; + constexpr float bias_data[bias_elements] = {1, 2, 3}; + + int output_shape[] = {4, 1, 2, 2, bias_elements}; + constexpr int output_elements = 4 * 3; + int8_t output_data[output_elements]; + + const float golden_data[output_elements] = {11, 2, 3, 21, 2, 3, + 31, 4, 7, 31, 4, 7}; + + const float input_scale = 0.5f; + const float output_scale = 1.0f; + const int input_zero_point = 0; + const int output_zero_point = 0; + + int8_t input_quantized[input_elements]; + int8_t filter_quantized[filter_elements]; + int32_t bias_quantized[bias_elements]; + int8_t golden_quantized[output_elements]; + int zero_points[bias_elements + 1]; + float scales[bias_elements + 1]; + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, tflite::testing::TestConvQuantizedPerChannel( + input_shape, input_data, input_quantized, input_scale, + input_zero_point, filter_shape, filter_data, + filter_quantized, bias_shape, bias_data, bias_quantized, + scales, zero_points, output_shape, golden_data, + golden_quantized, output_scale, output_zero_point, + &conv_params, tflite::Register_CONV_2D(), output_data)); +} + +TF_LITE_MICRO_TEST(Kernel1x1QuantizedPerChannelRelu6) { + // conv params: + // padding, stride_, activation, dilation_ + TfLiteConvParams conv_params = {kTfLitePaddingValid, 1, 1, + kTfLiteActRelu6, 1, 1}; + + int input_shape[] = {4, 1, 2, 2, 4}; // [len,N,H,W,C] + constexpr int input_elements = + 1 * 2 * 2 * + 4; // input_shape[1] * input_shape[2] * input_shape[3] * input_shape[4]; + constexpr float input_data[input_elements] = {1, 1, 1, 1, 2, 2, 2, 2, + 1, 2, 3, 4, 1, 2, 3, 4}; + + int filter_shape[] = {4, 3, 1, 1, 4}; + constexpr int filter_elements = + 3 * 1 * 1 * 4; // filter_shape[1] * filter_shape[2] * + // filter_shape[3] * filter_shape[4]; + const float filter_data[filter_elements] = {1, 2, 3, 4, -1, 1, + -1, 1, -1, -1, 1, 1}; + + constexpr int bias_elements = 3; // filter_shape[1]; + int bias_shape[] = {1, bias_elements}; + constexpr float bias_data[bias_elements] = {1, 2, -3}; + + int output_shape[] = {4, 1, 2, 2, bias_elements}; + constexpr int output_elements = 4 * 3; + int8_t output_data[output_elements]; + + const float golden_data[output_elements] = {6, 2, 0, 6, 2, 0, + 6, 4, 1, 6, 4, 1}; + + const float input_scale = 0.023529f; + const float output_scale = 0.023529f; + const int input_zero_point = -128; + const int output_zero_point = -128; + + int8_t input_quantized[input_elements]; + int8_t filter_quantized[filter_elements]; + int32_t bias_quantized[bias_elements]; + int8_t golden_quantized[output_elements]; + int zero_points[bias_elements + 1]; + float scales[bias_elements + 1]; + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, tflite::testing::TestConvQuantizedPerChannel( + input_shape, input_data, input_quantized, input_scale, + input_zero_point, filter_shape, filter_data, + filter_quantized, bias_shape, bias_data, bias_quantized, + scales, zero_points, output_shape, golden_data, + golden_quantized, output_scale, output_zero_point, + &conv_params, tflite::Register_CONV_2D(), output_data)); +} + +TF_LITE_MICRO_TEST(Kernel1x1Quantized16x8PerChannelRelu6) { + // conv params: + // padding, stride_, activation, dilation_ + TfLiteConvParams conv_params = {kTfLitePaddingValid, 1, 1, + kTfLiteActRelu6, 1, 1}; + + int input_shape[] = {4, 1, 2, 2, 4}; // [len,N,H,W,C] + const int input_elements = 1 * 2 * 2 * 4; + const float input_data[input_elements] = {1, 1, 1, 1, 2, 2, 2, 2, + 1, 2, 3, 4, 1, 2, 3, 4}; + + int filter_shape[] = {4, 3, 1, 1, 4}; + const int filter_elements = 3 * 1 * 1 * 4; + const float filter_data[filter_elements] = {1, 2, 3, 4, -1, 1, + -1, 1, -1, -1, 1, 1}; + + const int bias_elements = 3; + int bias_shape[] = {1, bias_elements}; + const float bias_data[bias_elements] = {1, 2, -3}; + + int output_shape[] = {4, 1, 2, 2, bias_elements}; + const int output_elements = 4 * 3; + int16_t output_data[output_elements]; + + const float golden_data[output_elements] = {6, 2, 0, 6, 2, 0, + 6, 4, 1, 6, 4, 1}; + + const float input_scale = 0.023529f; + const float output_scale = 0.023529f; + const int input_zero_point = 0; + const int output_zero_point = 0; + + int16_t input_quantized[input_elements]; + int8_t filter_quantized[filter_elements]; + std::int64_t bias_quantized[bias_elements]; + int16_t golden_quantized[output_elements]; + int zero_points[bias_elements + 1]; + float scales[bias_elements + 1]; + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, tflite::testing::TestConvQuantizedPerChannel( + input_shape, input_data, input_quantized, input_scale, + input_zero_point, filter_shape, filter_data, + filter_quantized, bias_shape, bias_data, bias_quantized, + scales, zero_points, output_shape, golden_data, + golden_quantized, output_scale, output_zero_point, + &conv_params, tflite::Register_CONV_2D(), output_data)); +} + +TF_LITE_MICRO_TEST(BroadcastPerLayerQuantizationToPerChannelShouldMatchGolden) { + const int output_dims_count = 12; + int8_t output_data[output_dims_count]; + + const float input_scale = 1.0f; + const float filter_scale = 1.0f; + const float output_scale = 1.0f; + + int8_t input_quantized[tflite::testing::kInputElements]; + int8_t filter_quantized[tflite::testing::kFilterElements]; + int32_t bias_quantized[tflite::testing::kBiasElements]; + int8_t golden_quantized[tflite::testing::kOutputElements]; + + TfLiteIntArray* input_dims = + tflite::testing::IntArrayFromInts(tflite::testing::kInputShape); + TfLiteIntArray* filter_dims = + tflite::testing::IntArrayFromInts(tflite::testing::kFilterShape); + TfLiteIntArray* bias_dims = + tflite::testing::IntArrayFromInts(tflite::testing::kBiasShape); + TfLiteIntArray* output_dims = + tflite::testing::IntArrayFromInts(tflite::testing::kOutputShape); + + // Create per-layer quantized int8_t input tensor. + TfLiteTensor input_tensor = tflite::testing::CreateQuantizedTensor( + tflite::testing::kInputData, input_quantized, input_dims, input_scale, 0); + int input_zero_points[2] = {1, 0}; + float input_scales[2] = {1, input_scale}; + TfLiteAffineQuantization input_quant = { + tflite::testing::FloatArrayFromFloats(input_scales), + tflite::testing::IntArrayFromInts(input_zero_points), 0}; + input_tensor.quantization = {kTfLiteAffineQuantization, &input_quant}; + + // Create per-layer quantized int8_t filter tensor. + TfLiteTensor filter_tensor = tflite::testing::CreateQuantizedTensor( + tflite::testing::kFilterData, filter_quantized, filter_dims, filter_scale, + 0); + int filter_zero_points[2] = {1, 0}; + float filter_scales[2] = {1, filter_scale}; + TfLiteAffineQuantization filter_quant = { + tflite::testing::FloatArrayFromFloats(filter_scales), + tflite::testing::IntArrayFromInts(filter_zero_points), 0}; + filter_tensor.quantization = {kTfLiteAffineQuantization, &filter_quant}; + + // Create per-layer quantized int32_t bias tensor. + tflite::SymmetricQuantize(tflite::testing::kBiasData, bias_quantized, + tflite::testing::kBiasElements, + input_scale * output_scale); + TfLiteTensor bias_tensor = + tflite::testing::CreateTensor(bias_quantized, bias_dims); + + int bias_zero_points[2] = {1, 0}; + float bias_scales[2] = {1, input_scale * filter_scale}; + TfLiteAffineQuantization bias_quant = { + tflite::testing::FloatArrayFromFloats(bias_scales), + tflite::testing::IntArrayFromInts(bias_zero_points), 0}; + bias_tensor.quantization = {kTfLiteAffineQuantization, &bias_quant}; + + // Create per-layer quantized int8_t output tensor. + TfLiteTensor output_tensor = tflite::testing::CreateQuantizedTensor( + output_data, output_dims, output_scale, 0 /* quantized dimension */); + int output_zero_points[2] = {1, 0}; + float output_scales[2] = {1, output_scale}; + TfLiteAffineQuantization output_quant = { + tflite::testing::FloatArrayFromFloats(output_scales), + tflite::testing::IntArrayFromInts(output_zero_points), 0}; + output_tensor.quantization = {kTfLiteAffineQuantization, &output_quant}; + + constexpr int inputs_size = 3; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + input_tensor, + filter_tensor, + bias_tensor, + output_tensor, + }; + + tflite::Quantize(tflite::testing::kGoldenData, golden_quantized, + output_dims_count, output_scale, 0); + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, tflite::testing::ValidateConvGoldens( + tensors, tensors_size, golden_quantized, output_dims_count, + &tflite::testing::common_conv_params, + tflite::Register_CONV_2D(), output_data)); +} + +#endif // !defined(XTENSA) + +TF_LITE_MICRO_TEST(Int8Filter1x3x3x1ShouldMatchGoldenEvenInputPaddingSame) { + using tflite::ElementCount; + using tflite::kConvFilter1x3x3x1; + using tflite::kConvGoldenOutput4x4InputPaddingSame2x2; + using tflite::kConvInput1x4x4x1; + using tflite::kConvZeroBias; + using tflite::testing::CreateTensor; + using tflite::testing::FloatArrayFromFloats; + using tflite::testing::IntArrayFromInts; + using tflite::testing::ValidateConvGoldens; + + constexpr int kInDepth = 1; + constexpr int kOutDepth = 1; + + // Input quantization parameters: same scale and zero point for all input + // elements. + constexpr float kInputScale = 0.00392120517f; + constexpr int kInputZeroPoint = -128; + float input_scales[] = {1, kInputScale}; + int input_zero_points[] = {1, kInputZeroPoint}; + TfLiteAffineQuantization input_quant = {FloatArrayFromFloats(input_scales), + IntArrayFromInts(input_zero_points), + 0}; + // Create input tensor of size 1x4x4x1. + int input_shape[] = {4, 1, 4, 4, kInDepth}; + TfLiteIntArray* input_dims = IntArrayFromInts(input_shape); + TfLiteTensor input_tensor = CreateTensor(kConvInput1x4x4x1, input_dims); + input_tensor.params = {kInputScale, kInputZeroPoint}; + input_tensor.quantization = {kTfLiteAffineQuantization, &input_quant}; + + // Filter quantization parameters. + int filter_zero_points[kOutDepth + 1] = {kOutDepth, 0}; + float filter_scales[kOutDepth + 1] = {kOutDepth, 0.00448552053f}; + TfLiteAffineQuantization filter_quant; + filter_quant.scale = FloatArrayFromFloats(filter_scales); + filter_quant.zero_point = IntArrayFromInts(filter_zero_points); + filter_quant.quantized_dimension = 0; + + // Create filter tensor of size 1x3x3x1. + int filter_shape[] = {4, kOutDepth, 3, 3, kInDepth}; + TfLiteIntArray* filter_dims = IntArrayFromInts(filter_shape); + TfLiteTensor filter_tensor = CreateTensor(kConvFilter1x3x3x1, filter_dims); + filter_tensor.quantization = {kTfLiteAffineQuantization, &filter_quant}; + + // Bias quantization parameters: same zero point, but different scale per + // output channel. + int bias_zero_points[kOutDepth + 1] = {kOutDepth, 0}; + float bias_scales[kOutDepth + 1] = {kOutDepth, 0.00001758864f}; + TfLiteAffineQuantization bias_quant; + bias_quant.scale = FloatArrayFromFloats(bias_scales); + bias_quant.zero_point = IntArrayFromInts(bias_zero_points); + bias_quant.quantized_dimension = 0; + + // Create size 1 zero bias tensor. + int bias_shape[] = {1, kOutDepth}; + TfLiteIntArray* bias_dims = IntArrayFromInts(bias_shape); + TfLiteTensor bias_tensor = CreateTensor(kConvZeroBias, bias_dims); + bias_tensor.quantization = {kTfLiteAffineQuantization, &bias_quant}; + + // Output quantization parameters: same zero point and scale for all elements. + const float output_scale = 0.00627814838f; + const int output_zero_point = -7; + float output_scales[] = {1, output_scale}; + int output_zero_points[] = {1, output_zero_point}; + TfLiteAffineQuantization output_quant = {FloatArrayFromFloats(output_scales), + IntArrayFromInts(output_zero_points), + 0}; + + // Create output tensor of 1x2x2x1. + int8_t output_data[4 * 2 * 2 * kOutDepth]; + int output_shape[] = {4, 1, 2, 2, kOutDepth}; + TfLiteIntArray* output_dims = IntArrayFromInts(output_shape); + const int output_dims_count = ElementCount(*output_dims); + TfLiteTensor output_tensor = CreateTensor(output_data, output_dims); + output_tensor.params = {output_scale, output_zero_point}; + output_tensor.quantization = {kTfLiteAffineQuantization, &output_quant}; + + // The 3 inputs include the input, filter and bias tensors. + constexpr int inputs_size = 3; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + input_tensor, + filter_tensor, + bias_tensor, + output_tensor, + }; + + TfLiteConvParams conv_params{tflite::testing::common_conv_params}; + conv_params.padding = kTfLitePaddingSame; + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, ValidateConvGoldens(tensors, tensors_size, + kConvGoldenOutput4x4InputPaddingSame2x2, + output_dims_count, &conv_params, + tflite::Register_CONV_2D(), output_data, + 1.0 /* tolerance */)); +} + +TF_LITE_MICRO_TEST(Int8Filter1x3x3x1ShouldMatchGoldenOddInputPaddingSame) { + using tflite::ElementCount; + using tflite::kConvFilter1x3x3x1; + using tflite::kConvGoldenOutput5x5InputPaddingSame3x3; + using tflite::kConvInput1x5x5x1; + using tflite::kConvZeroBias; + using tflite::testing::CreateTensor; + using tflite::testing::FloatArrayFromFloats; + using tflite::testing::IntArrayFromInts; + using tflite::testing::ValidateConvGoldens; + + constexpr int kInDepth = 1; + constexpr int kOutDepth = 1; + + // Input quantization parameters: same scale and zero point for all input + // elements. + constexpr float kInputScale = 0.00392120517f; + constexpr int kInputZeroPoint = -128; + float input_scales[] = {1, kInputScale}; + int input_zero_points[] = {1, kInputZeroPoint}; + TfLiteAffineQuantization input_quant = {FloatArrayFromFloats(input_scales), + IntArrayFromInts(input_zero_points), + 0}; + // Create input tensor of size 1x5x5x1. + int input_shape[] = {4, 1, 5, 5, kInDepth}; + TfLiteIntArray* input_dims = IntArrayFromInts(input_shape); + TfLiteTensor input_tensor = CreateTensor(kConvInput1x5x5x1, input_dims); + input_tensor.params = {kInputScale, kInputZeroPoint}; + input_tensor.quantization = {kTfLiteAffineQuantization, &input_quant}; + + // Filter quantization parameters. + int filter_zero_points[kOutDepth + 1] = {kOutDepth, 0}; + float filter_scales[kOutDepth + 1] = {kOutDepth, 0.00448552053f}; + TfLiteAffineQuantization filter_quant; + filter_quant.scale = FloatArrayFromFloats(filter_scales); + filter_quant.zero_point = IntArrayFromInts(filter_zero_points); + filter_quant.quantized_dimension = 0; + + // Create filter tensor of size 1x3x3x1. + int filter_shape[] = {4, kOutDepth, 3, 3, kInDepth}; + TfLiteIntArray* filter_dims = IntArrayFromInts(filter_shape); + TfLiteTensor filter_tensor = CreateTensor(kConvFilter1x3x3x1, filter_dims); + filter_tensor.quantization = {kTfLiteAffineQuantization, &filter_quant}; + + // Bias quantization parameters: same zero point, but different scale per + // output channel. + int bias_zero_points[kOutDepth + 1] = {kOutDepth, 0}; + float bias_scales[kOutDepth + 1] = {kOutDepth, 0.00001758864f}; + TfLiteAffineQuantization bias_quant; + bias_quant.scale = FloatArrayFromFloats(bias_scales); + bias_quant.zero_point = IntArrayFromInts(bias_zero_points); + bias_quant.quantized_dimension = 0; + + // Create size 1 zero bias tensor. + int bias_shape[] = {1, kOutDepth}; + TfLiteIntArray* bias_dims = IntArrayFromInts(bias_shape); + TfLiteTensor bias_tensor = CreateTensor(kConvZeroBias, bias_dims); + bias_tensor.quantization = {kTfLiteAffineQuantization, &bias_quant}; + + // Output quantization parameters: same zero point and scale for all elements. + const float output_scale = 0.00627814838f; + const int output_zero_point = -7; + float output_scales[] = {1, output_scale}; + int output_zero_points[] = {1, output_zero_point}; + TfLiteAffineQuantization output_quant = {FloatArrayFromFloats(output_scales), + IntArrayFromInts(output_zero_points), + 0}; + + // Create output tensor. + int8_t output_data[4 * 3 * 3 * kOutDepth]; + int output_shape[] = {4, 1, 3, 3, kOutDepth}; + TfLiteIntArray* output_dims = IntArrayFromInts(output_shape); + const int output_dims_count = ElementCount(*output_dims); + TfLiteTensor output_tensor = CreateTensor(output_data, output_dims); + output_tensor.params = {output_scale, output_zero_point}; + output_tensor.quantization = {kTfLiteAffineQuantization, &output_quant}; + + // The 3 inputs include the input, filter and bias tensors. + constexpr int inputs_size = 3; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + input_tensor, + filter_tensor, + bias_tensor, + output_tensor, + }; + + TfLiteConvParams conv_params{tflite::testing::common_conv_params}; + conv_params.padding = kTfLitePaddingSame; + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, ValidateConvGoldens(tensors, tensors_size, + kConvGoldenOutput5x5InputPaddingSame3x3, + output_dims_count, &conv_params, + tflite::Register_CONV_2D(), output_data, + 1.0 /* tolerance */)); +} + +TF_LITE_MICRO_TEST(FilterDimsNotMatchingAffineQuantization) { + const int output_dims_count = 12; + int8_t output_data[output_dims_count]; + + const float input_scale = 0.5f; + const float output_scale = 1.0f; + + int8_t input_quantized[tflite::testing::kInputElements]; + int8_t filter_quantized[tflite::testing::kFilterElements]; + int32_t bias_quantized[tflite::testing::kBiasElements]; + int8_t golden_quantized[tflite::testing::kOutputElements]; + int zero_points[tflite::testing::kBiasElements + 1]; + float scales[tflite::testing::kBiasElements + 1]; + + TfLiteIntArray* input_dims = + tflite::testing::IntArrayFromInts(tflite::testing::kInputShape); + TfLiteIntArray* filter_dims = + tflite::testing::IntArrayFromInts(tflite::testing::kFilterShape); + TfLiteIntArray* bias_dims = + tflite::testing::IntArrayFromInts(tflite::testing::kBiasShape); + TfLiteIntArray* output_dims = + tflite::testing::IntArrayFromInts(tflite::testing::kOutputShape); + + int filter_zero_points[5]; + float filter_scales[5]; + TfLiteAffineQuantization filter_quant; + TfLiteAffineQuantization bias_quant; + TfLiteTensor input_tensor = tflite::testing::CreateQuantizedTensor( + tflite::testing::kInputData, input_quantized, input_dims, input_scale, 0); + TfLiteTensor filter_tensor = + tflite::testing::CreateSymmetricPerChannelQuantizedTensor( + tflite::testing::kFilterData, filter_quantized, filter_dims, + filter_scales, filter_zero_points, &filter_quant, + 0 /* quantized dimension */); + TfLiteTensor bias_tensor = + tflite::testing::CreatePerChannelQuantizedBiasTensor( + tflite::testing::kBiasData, bias_quantized, bias_dims, input_scale, + &filter_scales[1], scales, zero_points, &bias_quant, 0); + TfLiteTensor output_tensor = tflite::testing::CreateQuantizedTensor( + output_data, output_dims, output_scale, 0 /* quantized dimension */); + + float input_scales[] = {1, input_scale}; + int input_zero_points[] = {1, 128}; + TfLiteAffineQuantization input_quant = { + tflite::testing::FloatArrayFromFloats(input_scales), + tflite::testing::IntArrayFromInts(input_zero_points), 0}; + input_tensor.quantization = {kTfLiteAffineQuantization, &input_quant}; + + constexpr int inputs_size = 3; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + input_tensor, + filter_tensor, + bias_tensor, + output_tensor, + }; + + tflite::Quantize(tflite::testing::kGoldenData, golden_quantized, + output_dims_count, output_scale, 0); + + // Set filter quant to mismatched dimension. + TfLiteAffineQuantization* quant = reinterpret_cast( + filter_tensor.quantization.params); + + // Choose arbitrary incorrect scale and zero point sizes which are neither 1 + // (for broadcast case) nor the quantized dimension size. + quant->scale->size = 2; + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteError, tflite::testing::ValidateConvGoldens( + tensors, tensors_size, golden_quantized, + output_dims_count, &tflite::testing::common_conv_params, + tflite::Register_CONV_2D(), output_data)); + + // Set scale back to correct dimension, and make zero point array too short. + quant->scale->size = tflite::testing::kFilterShape[0]; + quant->zero_point->size = 2; + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteError, tflite::testing::ValidateConvGoldens( + tensors, tensors_size, golden_quantized, + output_dims_count, &tflite::testing::common_conv_params, + tflite::Register_CONV_2D(), output_data)); +} + +TF_LITE_MICRO_TEST(Int8Input32x1Filter32x32ShouldMatchGolden) { + constexpr int kSampleSize = 32; + constexpr int kNumFilters = 32; + int input_shape[] = {4, 1, 1, 1, kSampleSize}; + int filter_shape[] = {4, kNumFilters, 1, 1, kSampleSize}; + int bias_shape[] = {1, kSampleSize}; + int output_shape[] = {4, 1, 1, 1, kSampleSize}; + float filter_values[kNumFilters * kSampleSize]; + float input_values[kSampleSize]; + float bias_values[kSampleSize]; + + // Generated these outputs using the floating point reference conv kernel. + // TODO(b/149942509): Do this comparison automatically on random inputs. + float expected_output[kSampleSize] = { + 5168.000000, 3377.000000, 306.000000, -4045.000000, -4556.000000, + -1227.000000, 822.000000, 1591.000000, 5176.000000, 3385.000000, + 314.000000, -4037.000000, -4548.000000, -1219.000000, 830.000000, + 1599.000000, 5184.000000, 3393.000000, 322.000000, -4029.000000, + -4540.000000, -1211.000000, 838.000000, 1607.000000, 5192.000000, + 3401.000000, 330.000000, -4021.000000, -4532.000000, -1203.000000, + 846.000000, 1615.000000}; + + for (int i = 0; i < kSampleSize; i++) { + bias_values[i] = i; + // Generate inputs from -16 to 15. + input_values[i] = i - 16; + } + + // Generate samples of varying values between -128 and 127. + for (int i = 0; i < kNumFilters * kSampleSize; i++) { + filter_values[i] = (i * 25) % 256 - 128; + } + + TfLiteConvParams conv_params; + conv_params.activation = kTfLiteActNone; + conv_params.dilation_height_factor = 1; + conv_params.dilation_width_factor = 1; + conv_params.stride_height = 1; + conv_params.stride_width = 1; + conv_params.padding = kTfLitePaddingValid; + + TfLiteIntArray* input_dims = tflite::testing::IntArrayFromInts(input_shape); + TfLiteIntArray* filter_dims = tflite::testing::IntArrayFromInts(filter_shape); + TfLiteIntArray* bias_dims = tflite::testing::IntArrayFromInts(bias_shape); + TfLiteIntArray* output_dims = tflite::testing::IntArrayFromInts(output_shape); + const int output_dims_count = tflite::ElementCount(*output_dims); + + // Quantization Parameters. All scales except output are 1.0, and all zero + // points are 0. This direct-maps the values to floating point and makes it + // easy to reson about them. + int input_zero_point = 0; + float input_scale = 1.0f; + int filter_zero_point = 0; + float filter_scale = 1.0f; + int output_zero_point = 0; + // Output scale of 50 is needed to accomodate a float range of [-6400, 6350] + float output_scale = 50.0f; + + // Create per-tensor quantized int8_t input tensor. + int8_t input_quantized[kSampleSize]; + TfLiteTensor input_tensor = tflite::testing::CreateQuantizedTensor( + input_values, input_quantized, input_dims, input_scale, input_zero_point); + // Set zero point and scale arrays with a single element for each. + int input_zero_points[] = {1, input_zero_point}; + float input_scales[] = {1, input_scale}; + TfLiteAffineQuantization input_quant = { + tflite::testing::FloatArrayFromFloats(input_scales), + tflite::testing::IntArrayFromInts(input_zero_points), 0}; + input_tensor.quantization = {kTfLiteAffineQuantization, &input_quant}; + + // Create per-tensor quantized int8_t filter tensor. + int8_t filter_quantized[kNumFilters * kSampleSize]; + TfLiteTensor filter_tensor = tflite::testing::CreateQuantizedTensor( + filter_values, filter_quantized, filter_dims, filter_scale, + filter_zero_point); + // Set zero point and scale arrays with a single element for each. + int filter_zero_points[] = {1, filter_zero_point}; + float filter_scales[] = {1, filter_scale}; + TfLiteAffineQuantization filter_quant = { + tflite::testing::FloatArrayFromFloats(filter_scales), + tflite::testing::IntArrayFromInts(filter_zero_points), 0}; + filter_tensor.quantization = {kTfLiteAffineQuantization, &filter_quant}; + + // Create per-tensor quantized int32_t bias tensor. + int32_t bias_quantized[kSampleSize]; + tflite::SymmetricQuantize(bias_values, bias_quantized, kSampleSize, + input_scale * output_scale); + TfLiteTensor bias_tensor = + tflite::testing::CreateTensor(bias_quantized, bias_dims); + + // There is a single zero point of 0, and a single scale of + // input_scale * filter_scale. + int bias_zero_points[] = {1, 0}; + float bias_scales[] = {1, input_scale * filter_scale}; + TfLiteAffineQuantization bias_quant = { + tflite::testing::FloatArrayFromFloats(bias_scales), + tflite::testing::IntArrayFromInts(bias_zero_points), 0}; + bias_tensor.quantization = {kTfLiteAffineQuantization, &bias_quant}; + + // Create per-tensor quantized int8_t output tensor. + int8_t output_quantized[kSampleSize]; + TfLiteTensor output_tensor = tflite::testing::CreateQuantizedTensor( + output_quantized, output_dims, output_scale, output_zero_point); + // Set zero point and scale arrays with a single element for each. + int output_zero_points[] = {1, output_zero_point}; + float output_scales[] = {1, output_scale}; + TfLiteAffineQuantization output_quant = { + tflite::testing::FloatArrayFromFloats(output_scales), + tflite::testing::IntArrayFromInts(output_zero_points), 0}; + output_tensor.quantization = {kTfLiteAffineQuantization, &output_quant}; + + // The 3 inputs include the input, filter and bias tensors. + constexpr int kInputsSize = 3; + constexpr int kOutputsSize = 1; + constexpr int kTensorsSize = kInputsSize + kOutputsSize; + TfLiteTensor tensors[kTensorsSize] = { + input_tensor, + filter_tensor, + bias_tensor, + output_tensor, + }; + + int8_t golden_quantized[kSampleSize]; + tflite::Quantize(expected_output, golden_quantized, output_dims_count, + output_scale, output_zero_point); + + // Rounding errors due to quantization should not exceed 1. + constexpr int kQuantizationTolerance = 1; + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, tflite::testing::ValidateConvGoldens( + tensors, kTensorsSize, golden_quantized, output_dims_count, + &conv_params, tflite::Register_CONV_2D(), output_quantized, + kQuantizationTolerance)); +} + +// This test is created based on +// https://github.com/tensorflow/tflite-micro/issues/329 +// Input, output and filter are all 8 bits. +// Filter tensor is of dimension 8x3x3x3 with different scales per output +// channel. Some arbitrary parameters come from the above issue. +TF_LITE_MICRO_TEST(Int8Filter8x3x3x3PerChannelScaleRelu6ShouldMatchGolden) { + using tflite::ElementCount; + using tflite::kConvBiasQuantized8; + using tflite::kConvFilter8x3x3x3; + using tflite::kConvGoldenOutput1x16x16x8; + using tflite::kConvInput1x32x32x3; + using tflite::testing::CreateTensor; + using tflite::testing::FloatArrayFromFloats; + using tflite::testing::IntArrayFromInts; + using tflite::testing::ValidateConvGoldens; + + constexpr int kInDepth = 3; + constexpr int kOutDepth = 8; + + // Input quantization parameters: same scale and zero point for all input + // elements. + constexpr float kInputScale = 0.00784313772f; + constexpr int kInputZeroPoint = -1; + float input_scales[] = {1, kInputScale}; + int input_zero_points[] = {1, kInputZeroPoint}; + TfLiteAffineQuantization input_quant = {FloatArrayFromFloats(input_scales), + IntArrayFromInts(input_zero_points), + 0}; + // Create input tensor of size 1x32x32x3. + int input_shape[] = {4, 1, 32, 32, kInDepth}; + TfLiteIntArray* input_dims = IntArrayFromInts(input_shape); + TfLiteTensor input_tensor = CreateTensor(kConvInput1x32x32x3, input_dims); + input_tensor.params = {kInputScale, kInputZeroPoint}; + input_tensor.quantization = {kTfLiteAffineQuantization, &input_quant}; + + // Filter quantization parameters: same zero point, but different scale per + // output channel. + int filter_zero_points[kOutDepth + 1] = {kOutDepth, 0, 0, 0, 0, 0, 0, 0, 0}; + float filter_scales[kOutDepth + 1] = { + kOutDepth, 2.18926089e-05, 0.00453596329, + 0.000504297379, 0.00184638216, 0.00596635276, + 0.000199135626, 0.0047677448, 0.00193942268}; + TfLiteAffineQuantization filter_quant; + filter_quant.scale = FloatArrayFromFloats(filter_scales); + filter_quant.zero_point = IntArrayFromInts(filter_zero_points); + filter_quant.quantized_dimension = 0; + + // Create filter tensor of size 8x3x3x3. + int filter_shape[] = {4, kOutDepth, 3, 3, kInDepth}; + TfLiteIntArray* filter_dims = IntArrayFromInts(filter_shape); + TfLiteTensor filter_tensor = CreateTensor(kConvFilter8x3x3x3, filter_dims); + filter_tensor.quantization = {kTfLiteAffineQuantization, &filter_quant}; + + // Bias quantization parameters: same zero point, but different scale per + // output channel. + int bias_zero_points[kOutDepth + 1] = {kOutDepth, 0, 0, 0, 0, 0, 0, 0, 0}; + float bias_scales[kOutDepth + 1] = { + kOutDepth, 1.71706745e-07, 3.5576184e-05, + 3.95527377e-06, 1.44814294e-05, 4.67949249e-05, + 1.56184819e-06, 3.73940784e-05, 1.52111588e-05}; + TfLiteAffineQuantization bias_quant; + bias_quant.scale = FloatArrayFromFloats(bias_scales); + bias_quant.zero_point = IntArrayFromInts(bias_zero_points); + bias_quant.quantized_dimension = 0; + + // Create per output channel bias of size 8 + int bias_shape[] = {1, kOutDepth}; + TfLiteIntArray* bias_dims = IntArrayFromInts(bias_shape); + TfLiteTensor bias_tensor = CreateTensor(kConvBiasQuantized8, bias_dims); + bias_tensor.quantization = {kTfLiteAffineQuantization, &bias_quant}; + + // Output quantization parameters: same zero point and scale for all elements. + const float output_scale = 0.0235294122f; + const int output_zero_point = -128; + float output_scales[] = {1, output_scale}; + int output_zero_points[] = {1, output_zero_point}; + TfLiteAffineQuantization output_quant = {FloatArrayFromFloats(output_scales), + IntArrayFromInts(output_zero_points), + 0}; + + // Create output tensor of 16x16x8 + int8_t output_data[1 * 16 * 16 * kOutDepth]; + int output_shape[] = {4, 1, 16, 16, kOutDepth}; + TfLiteIntArray* output_dims = IntArrayFromInts(output_shape); + const int output_dims_count = ElementCount(*output_dims); + TfLiteTensor output_tensor = CreateTensor(output_data, output_dims); + output_tensor.params = {output_scale, output_zero_point}; + output_tensor.quantization = {kTfLiteAffineQuantization, &output_quant}; + + // The 3 inputs include the input, filter and bias tensors. + constexpr int inputs_size = 3; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + input_tensor, + filter_tensor, + bias_tensor, + output_tensor, + }; + + TfLiteConvParams conv_params{tflite::testing::common_conv_params}; + conv_params.activation = kTfLiteActRelu6; + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + ValidateConvGoldens(tensors, tensors_size, kConvGoldenOutput1x16x16x8, + output_dims_count, &conv_params, + tflite::Register_CONV_2D(), output_data, + 1.0 /* tolerance */)); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/conv_test.h b/tensorflow/lite/micro/kernels/conv_test.h new file mode 100644 index 0000000..39d3fa7 --- /dev/null +++ b/tensorflow/lite/micro/kernels/conv_test.h @@ -0,0 +1,114 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_CONV_TEST_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_CONV_TEST_H_ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/kernels/micro_ops.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { + +TfLiteStatus InvokeConv(TfLiteTensor* tensors, int tensors_size, + int output_length, TfLiteConvParams* conv_params, + TFLMRegistration registration, float* output_data); + +TfLiteStatus InvokeConv(TfLiteTensor* tensors, int tensors_size, + int output_length, TfLiteConvParams* conv_params, + TFLMRegistration registration, int8_t* output_data); + +TfLiteStatus InvokeConv(TfLiteTensor* tensors, int tensors_size, + int output_length, TfLiteConvParams* conv_params, + TFLMRegistration registration, uint8_t* output_data); + +TfLiteStatus ValidateConvGoldens(TfLiteTensor* tensors, int tensors_size, + const float* expected_output_data, + int output_length, + TfLiteConvParams* conv_params, + TFLMRegistration registration, + float* output_data, float tolerance = 1e-5); + +TfLiteStatus ValidateConvGoldens(TfLiteTensor* tensors, int tensors_size, + const int8_t* expected_output_data, + int output_length, + TfLiteConvParams* conv_params, + TFLMRegistration registration, + int8_t* output_data, float tolerance = 1e-5); + +TfLiteStatus ValidateConvGoldens(TfLiteTensor* tensors, int tensors_size, + const uint8_t* expected_output_data, + int output_length, + TfLiteConvParams* conv_params, + TFLMRegistration registration, + uint8_t* output_data, float tolerance = 1e-5); + +TfLiteStatus TestConvFloat(int* input_dims_data, const float* input_data, + int* filter_dims_data, const float* filter_data, + int* bias_dims_data, const float* bias_data, + int* output_dims_data, + const float* expected_output_data, + TfLiteConvParams* conv_params, + TFLMRegistration registration, float* output_data); + +TfLiteStatus TestConvQuantizedPerLayer( + int* input_dims_data, const float* input_data, uint8_t* input_quantized, + float input_scale, int* filter_dims_data, const float* filter_data, + uint8_t* filter_quantized, float filter_scale, int* bias_dims_data, + const float* bias_data, int32_t* bias_quantized, int* output_dims_data, + const float* expected_output_data, uint8_t* expected_output_quantized, + float output_scale, TfLiteConvParams* conv_params, + TFLMRegistration registration, uint8_t* output_data); + +TfLiteStatus TestConvQuantizedPerChannel( + int* input_dims_data, const float* input_data, int8_t* input_quantized, + float input_scale, int input_zero_point, int* filter_dims_data, + const float* filter_data, int8_t* filter_data_quantized, + int* bias_dims_data, const float* bias_data, int32_t* bias_data_quantized, + float* bias_scales, int* bias_zero_points, int* output_dims_data, + const float* expected_output_data, int8_t* expected_output_data_quantized, + float output_scale, int output_zero_point, TfLiteConvParams* conv_params, + TFLMRegistration registration, int8_t* output_data, + TfLiteType tensor_weight_type = kTfLiteNoType); + +TfLiteStatus TestConvQuantizedPerChannel( + int* input_dims_data, const float* input_data, int16_t* input_quantized, + float input_scale, int input_zero_point, int* filter_dims_data, + const float* filter_data, int8_t* filter_data_quantized, + int* bias_dims_data, const float* bias_data, + std::int64_t* bias_data_quantized, float* bias_scales, + int* bias_zero_points, int* output_dims_data, + const float* expected_output_data, int16_t* expected_output_data_quantized, + float output_scale, int output_zero_point, TfLiteConvParams* conv_params, + TFLMRegistration registration, int16_t* output_data); + +TfLiteStatus TestConvQuantizedPerChannel( + int* input_dims_data, const float* input_data, int16_t* input_quantized, + float input_scale, int input_zero_point, int* filter_dims_data, + const float* filter_data, int8_t* filter_data_quantized, + int* bias_dims_data, const float* bias_data, int32_t* bias_data_quantized, + float* bias_scales, int* bias_zero_points, int* output_dims_data, + const float* expected_output_data, int16_t* expected_output_data_quantized, + float output_scale, int output_zero_point, TfLiteConvParams* conv_params, + TFLMRegistration registration, int16_t* output_data); + +} // namespace testing +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_CONV_TEST_H_ diff --git a/tensorflow/lite/micro/kernels/conv_test_common.cc b/tensorflow/lite/micro/kernels/conv_test_common.cc new file mode 100644 index 0000000..bdc9466 --- /dev/null +++ b/tensorflow/lite/micro/kernels/conv_test_common.cc @@ -0,0 +1,247 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/kernels/conv_test.h" + +namespace tflite { +namespace testing { + +template +TfLiteStatus InvokeConv(TfLiteTensor* tensors, int tensors_size, + int output_length, TfLiteConvParams* conv_params, + TFLMRegistration registration, T* output_data) { + int inputs_array_data[] = {3, 0, 1, 2}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 3}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, conv_params); + + const char* init_data = reinterpret_cast(conv_params); + TfLiteStatus status = runner.InitAndPrepare(init_data); + if (status != kTfLiteOk) { + return status; + } + return runner.Invoke(); +} + +template +TfLiteStatus ValidateConvGoldens(TfLiteTensor* tensors, int tensors_size, + const T* expected_output_data, + int output_length, + TfLiteConvParams* conv_params, + TFLMRegistration registration, T* output_data, + float tolerance) { + TfLiteStatus status = InvokeConv(tensors, tensors_size, output_length, + conv_params, registration, output_data); + if (status != kTfLiteOk) { + return status; + } + for (int i = 0; i < output_length; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(expected_output_data[i], output_data[i], + tolerance); + } + return kTfLiteOk; +} + +TfLiteStatus InvokeConv(TfLiteTensor* tensors, int tensors_size, + int output_length, TfLiteConvParams* conv_params, + TFLMRegistration registration, float* output_data) { + return InvokeConv(tensors, tensors_size, output_length, conv_params, + registration, output_data); +} + +TfLiteStatus InvokeConv(TfLiteTensor* tensors, int tensors_size, + int output_length, TfLiteConvParams* conv_params, + TFLMRegistration registration, int8_t* output_data) { + return InvokeConv(tensors, tensors_size, output_length, conv_params, + registration, output_data); +} + +TfLiteStatus ValidateConvGoldens(TfLiteTensor* tensors, int tensors_size, + const float* expected_output_data, + int output_length, + TfLiteConvParams* conv_params, + TFLMRegistration registration, + float* output_data, float tolerance) { + return ValidateConvGoldens(tensors, tensors_size, expected_output_data, + output_length, conv_params, registration, + output_data, tolerance); +} + +TfLiteStatus ValidateConvGoldens(TfLiteTensor* tensors, int tensors_size, + const int8_t* expected_output_data, + int output_length, + TfLiteConvParams* conv_params, + TFLMRegistration registration, + int8_t* output_data, float tolerance) { + return ValidateConvGoldens( + tensors, tensors_size, expected_output_data, output_length, conv_params, + registration, output_data, tolerance); +} + +TfLiteStatus TestConvFloat(int* input_dims_data, const float* input_data, + int* filter_dims_data, const float* filter_data, + int* bias_dims_data, const float* bias_data, + int* output_dims_data, + const float* expected_output_data, + TfLiteConvParams* conv_params, + TFLMRegistration registration, float* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* filter_dims = IntArrayFromInts(filter_dims_data); + TfLiteIntArray* bias_dims = IntArrayFromInts(bias_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_dims_count = ElementCount(*output_dims); + constexpr int inputs_size = 3; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(input_data, input_dims), + CreateTensor(filter_data, filter_dims), + CreateTensor(bias_data, bias_dims), + CreateTensor(output_data, output_dims), + }; + + return ValidateConvGoldens(tensors, tensors_size, expected_output_data, + output_dims_count, conv_params, registration, + output_data); +} + +template +TfLiteStatus TestConvQuantizedPerChannel( + int* input_dims_data, const float* input_data, T* input_quantized, + float input_scale, int input_zero_point, int* filter_dims_data, + const float* filter_data, int8_t* filter_data_quantized, + int* bias_dims_data, const float* bias_data, BiasT* bias_data_quantized, + float* bias_scales, int* bias_zero_points, int* output_dims_data, + const float* expected_output_data, T* expected_output_data_quantized, + float output_scale, int output_zero_point, TfLiteConvParams* conv_params, + TFLMRegistration registration, T* output_data, + TfLiteType tensor_weight_type = kTfLiteNoType) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* filter_dims = IntArrayFromInts(filter_dims_data); + TfLiteIntArray* bias_dims = IntArrayFromInts(bias_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_dims_count = ElementCount(*output_dims); + + int filter_zero_points[5]; + float filter_scales[5]; + TfLiteAffineQuantization filter_quant; + TfLiteAffineQuantization bias_quant; + TfLiteTensor input_tensor = CreateQuantizedTensor( + input_data, input_quantized, input_dims, input_scale, input_zero_point); + TfLiteTensor filter_tensor = CreateSymmetricPerChannelQuantizedTensor( + filter_data, filter_data_quantized, filter_dims, filter_scales, + filter_zero_points, &filter_quant, 0, false, + tensor_weight_type /* quantized dimension */); + TfLiteTensor bias_tensor = CreatePerChannelQuantizedBiasTensor( + bias_data, bias_data_quantized, bias_dims, input_scale, &filter_scales[1], + bias_scales, bias_zero_points, &bias_quant, 0 /* quantized dimension */); + TfLiteTensor output_tensor = CreateQuantizedTensor( + output_data, output_dims, output_scale, output_zero_point); + + float input_scales[] = {1, input_scale}; + int input_zero_points[] = {1, input_zero_point}; + TfLiteAffineQuantization input_quant = {FloatArrayFromFloats(input_scales), + IntArrayFromInts(input_zero_points), + 0}; + input_tensor.quantization = {kTfLiteAffineQuantization, &input_quant}; + + float output_scales[] = {1, output_scale}; + int output_zero_points[] = {1, output_zero_point}; + TfLiteAffineQuantization output_quant = {FloatArrayFromFloats(output_scales), + IntArrayFromInts(output_zero_points), + 0}; + output_tensor.quantization = {kTfLiteAffineQuantization, &output_quant}; + + constexpr int inputs_size = 3; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + input_tensor, + filter_tensor, + bias_tensor, + output_tensor, + }; + + tflite::Quantize(expected_output_data, expected_output_data_quantized, + output_dims_count, output_scale, output_zero_point); + return ValidateConvGoldens( + tensors, tensors_size, expected_output_data_quantized, output_dims_count, + conv_params, registration, output_data, 1.0 /* tolerance */); +} + +// Test conv with int8 input, int8 weight, int32 bias, int32 accumulator +TfLiteStatus TestConvQuantizedPerChannel( + int* input_dims_data, const float* input_data, int8_t* input_quantized, + float input_scale, int input_zero_point, int* filter_dims_data, + const float* filter_data, int8_t* filter_data_quantized, + int* bias_dims_data, const float* bias_data, int32_t* bias_data_quantized, + float* bias_scales, int* bias_zero_points, int* output_dims_data, + const float* expected_output_data, int8_t* expected_output_data_quantized, + float output_scale, int output_zero_point, TfLiteConvParams* conv_params, + TFLMRegistration registration, int8_t* output_data, + TfLiteType tensor_weight_type) { + return TestConvQuantizedPerChannel( + input_dims_data, input_data, input_quantized, input_scale, + input_zero_point, filter_dims_data, filter_data, filter_data_quantized, + bias_dims_data, bias_data, bias_data_quantized, bias_scales, + bias_zero_points, output_dims_data, expected_output_data, + expected_output_data_quantized, output_scale, output_zero_point, + conv_params, registration, output_data, tensor_weight_type); +} + +// Test conv with int16 input, int8 weight, int64 bias, int64 accumulator +TfLiteStatus TestConvQuantizedPerChannel( + int* input_dims_data, const float* input_data, int16_t* input_quantized, + float input_scale, int input_zero_point, int* filter_dims_data, + const float* filter_data, int8_t* filter_data_quantized, + int* bias_dims_data, const float* bias_data, + std::int64_t* bias_data_quantized, float* bias_scales, + int* bias_zero_points, int* output_dims_data, + const float* expected_output_data, int16_t* expected_output_data_quantized, + float output_scale, int output_zero_point, TfLiteConvParams* conv_params, + TFLMRegistration registration, int16_t* output_data) { + return TestConvQuantizedPerChannel( + input_dims_data, input_data, input_quantized, input_scale, + input_zero_point, filter_dims_data, filter_data, filter_data_quantized, + bias_dims_data, bias_data, bias_data_quantized, bias_scales, + bias_zero_points, output_dims_data, expected_output_data, + expected_output_data_quantized, output_scale, output_zero_point, + conv_params, registration, output_data); +} + +// Test conv with int16 input, int8 weight, int32 bias, int32 accumulator +TfLiteStatus TestConvQuantizedPerChannel( + int* input_dims_data, const float* input_data, int16_t* input_quantized, + float input_scale, int input_zero_point, int* filter_dims_data, + const float* filter_data, int8_t* filter_data_quantized, + int* bias_dims_data, const float* bias_data, int32_t* bias_data_quantized, + float* bias_scales, int* bias_zero_points, int* output_dims_data, + const float* expected_output_data, int16_t* expected_output_data_quantized, + float output_scale, int output_zero_point, TfLiteConvParams* conv_params, + TFLMRegistration registration, int16_t* output_data) { + return TestConvQuantizedPerChannel( + input_dims_data, input_data, input_quantized, input_scale, + input_zero_point, filter_dims_data, filter_data, filter_data_quantized, + bias_dims_data, bias_data, bias_data_quantized, bias_scales, + bias_zero_points, output_dims_data, expected_output_data, + expected_output_data_quantized, output_scale, output_zero_point, + conv_params, registration, output_data); +} + +} // namespace testing +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/cumsum.cc b/tensorflow/lite/micro/kernels/cumsum.cc new file mode 100644 index 0000000..f62f2a5 --- /dev/null +++ b/tensorflow/lite/micro/kernels/cumsum.cc @@ -0,0 +1,175 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/reference/cumsum.h" + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/types.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { +namespace { + +constexpr int kInputTensor = 0; +constexpr int kAxisTensor = 1; +constexpr int kOutputTensor = 0; + +constexpr int kCumSumIntegerShift = 20; + +// only used with INT8 tensors +struct OpData { + int32_t output_activation_min; + int32_t output_activation_max; + int32_t input_offset; + int32_t output_offset; + int32_t input_multiplier; + int32_t output_multiplier; + int input_shift; + int output_shift; + int left_shift; +}; + +TfLiteStatus CalculateOpData(TfLiteContext* context, TfLiteNode* node) { + TF_LITE_ENSURE_EQ(context, NumInputs(node), 2); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TfLiteTensor* axis = + micro_context->AllocateTempInputTensor(node, kAxisTensor); + + TF_LITE_ENSURE(context, + input->type == kTfLiteFloat32 || input->type == kTfLiteInt8); + TF_LITE_ENSURE_EQ(context, axis->type, kTfLiteInt32); + + TF_LITE_ENSURE_EQ(context, NumElements(axis), 1); + + TF_LITE_ENSURE(context, NumDimensions(input) >= 1); + + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + + TF_LITE_ENSURE_EQ(context, input->type, output->type); + TF_LITE_ENSURE(context, HaveSameShapes(input, output)); + + if (output->type == kTfLiteInt8) { + node->user_data = + context->AllocatePersistentBuffer(context, sizeof(OpData)); + OpData* data = static_cast(node->user_data); + + // 8bit -> 8bit general quantized path, with general rescalings + data->input_offset = -input->params.zero_point; + data->output_offset = output->params.zero_point; + data->left_shift = kCumSumIntegerShift; + const double twice_max_input_scale = + 2 * static_cast(input->params.scale); + const double real_input_multiplier = + static_cast(input->params.scale) / twice_max_input_scale; + const double real_output_multiplier = + twice_max_input_scale / + ((1 << data->left_shift) * static_cast(output->params.scale)); + + QuantizeMultiplierSmallerThanOneExp( + real_input_multiplier, &data->input_multiplier, &data->input_shift); + + QuantizeMultiplierSmallerThanOneExp( + real_output_multiplier, &data->output_multiplier, &data->output_shift); + + TF_LITE_ENSURE_STATUS(CalculateActivationRangeQuantized( + context, kTfLiteActNone, output, &data->output_activation_min, + &data->output_activation_max)); + } + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(axis); + micro_context->DeallocateTempTfLiteTensor(output); + + return kTfLiteOk; +} + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + return CalculateOpData(context, node); +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + const TfLiteEvalTensor* axis_tensor = + tflite::micro::GetEvalInput(context, node, kAxisTensor); + + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + + auto* cs_params = static_cast(node->builtin_data); + auto input_shape = tflite::micro::GetTensorShape(input); + + int32_t axis = *tflite::micro::GetTensorData(axis_tensor); + if (axis < 0) axis += input_shape.DimensionsCount(); + + if (axis < 0 || axis >= input_shape.DimensionsCount()) { + MicroPrintf("CUMSUM Invalid axis: %d", axis); + return kTfLiteError; + } + + switch (input->type) { + case kTfLiteFloat32: { + reference_ops::CumSum(tflite::micro::GetTensorData(input), + input_shape, axis, cs_params->exclusive, + cs_params->reverse, + tflite::micro::GetTensorData(output)); + return kTfLiteOk; + } break; + + case kTfLiteInt8: { + auto* data = static_cast(node->user_data); + ArithmeticParams params; + params.left_shift = data->left_shift; + params.input1_offset = data->input_offset; + params.input1_multiplier = data->input_multiplier; + params.input1_shift = data->input_shift; + params.output_offset = data->output_offset; + params.output_multiplier = data->output_multiplier; + params.output_shift = data->output_shift; + SetActivationParams(data->output_activation_min, + data->output_activation_max, ¶ms); + reference_ops::CumSum(params, tflite::micro::GetTensorData(input), + input_shape, axis, cs_params->exclusive, + cs_params->reverse, + tflite::micro::GetTensorData(output)); + return kTfLiteOk; + } break; + + default: { + MicroPrintf("CUMSUM only supports FLOAT32 and INT8, got %s.", + TfLiteTypeGetName(output->type)); + return kTfLiteError; + } + } + + return kTfLiteError; +} + +} // namespace + +TFLMRegistration Register_CUMSUM() { + return tflite::micro::RegisterOp(nullptr, Prepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/cumsum_test.cc b/tensorflow/lite/micro/kernels/cumsum_test.cc new file mode 100644 index 0000000..1d85f69 --- /dev/null +++ b/tensorflow/lite/micro/kernels/cumsum_test.cc @@ -0,0 +1,350 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +struct CumSumTestParams { + bool exclusive = false; + bool reverse = false; + int32_t axis = std::numeric_limits::max(); +}; + +void ExecuteCumSumTest(CumSumTestParams& test_params, TfLiteTensor* tensors, + int tensors_count) { + int kInputArrayData[] = {2, 0, 1}; + TfLiteIntArray* inputs_array = IntArrayFromInts(kInputArrayData); + int kOutputArrayData[] = {1, 2}; + TfLiteIntArray* outputs_array = IntArrayFromInts(kOutputArrayData); + + TfLiteCumsumParams params; + params.exclusive = test_params.exclusive; + params.reverse = test_params.reverse; + + const TFLMRegistration registration = tflite::Register_CUMSUM(); + micro::KernelRunner runner(registration, tensors, tensors_count, inputs_array, + outputs_array, static_cast(¶ms)); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); +} + +template +void TestCumSum(CumSumTestParams& test_params, int* input_dims_data, + const T* input_data, int* expected_dims, const T* expected_data, + T* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(expected_dims); + const int output_count = ElementCount(*output_dims); + + int axis_dims_data[] = {1, 1}; + TfLiteIntArray* axis_dims = IntArrayFromInts(axis_dims_data); + const int32_t axis_data[] = {test_params.axis}; + + TfLiteTensor tensors[] = { + CreateTensor(input_data, input_dims), + CreateTensor(axis_data, axis_dims), + CreateTensor(output_data, output_dims), + }; + constexpr int tensors_count = std::extent::value; + ExecuteCumSumTest(test_params, tensors, tensors_count); + + constexpr float kTolerance = 1e-5; + for (int i = 0; i < output_count; i++) { + TF_LITE_MICRO_EXPECT_NEAR(expected_data[i], output_data[i], kTolerance); + } +} + +// min/max are used to compute scale, zero-point, compare tolerance +template +struct TestQuantParams { + float data_min; // input and output data minimum value + float data_max; // input and output data maximum value + T input_data[kOutputSize]; // quantized input storage + T output_data[kOutputSize]; // quantized output storage +}; + +// for quantized int, the error shouldn't exceed step +template +float GetTolerance(float min, float max) { + float kQuantizedStep = + 2.0f * (max - min) / + (std::numeric_limits::max() - std::numeric_limits::min()); + return kQuantizedStep; +} + +template +void TestCumSumQuantized(CumSumTestParams& test_params, + TestQuantParams* params, + int* input_dims_data, const float* input_data, + int* expected_dims, const float* expected_data, + float* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(expected_dims); + + int axis_dims_data[] = {1, 1}; + TfLiteIntArray* axis_dims = IntArrayFromInts(axis_dims_data); + const int32_t axis_data[] = {test_params.axis}; + + const float scale = ScaleFromMinMax(params->data_min, params->data_max); + const int zero_point = + ZeroPointFromMinMax(params->data_min, params->data_max); + + TfLiteTensor tensors[] = { + CreateQuantizedTensor(input_data, params->input_data, input_dims, scale, + zero_point), + CreateTensor(axis_data, axis_dims), + CreateQuantizedTensor(params->output_data, output_dims, scale, + zero_point), + }; + + constexpr int tensors_count = std::extent::value; + ExecuteCumSumTest(test_params, tensors, tensors_count); + + Dequantize(params->output_data, kOutputSize, scale, zero_point, output_data); + const float kTolerance = GetTolerance(params->data_min, params->data_max); + for (int i = 0; i < kOutputSize; i++) { + TF_LITE_MICRO_EXPECT_NEAR(expected_data[i], output_data[i], kTolerance); + } +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(CumSumOpTestSimpleTest) { + int kDims[] = {2, 2, 4}; + constexpr float kInput[] = {1, 2, 3, 4, 5, 6, 7, 8}; + constexpr float kExpect[] = {1, 3, 6, 10, 5, 11, 18, 26}; + + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + tflite::testing::CumSumTestParams test_params; + test_params.axis = 1; + + tflite::testing::TestCumSum(test_params, kDims, kInput, kDims, kExpect, + output_data); +} + +TF_LITE_MICRO_TEST(CumSumOpTestSimpleAxis0Test) { + int kDims[] = {2, 2, 4}; + constexpr float kInput[] = {1, 2, 3, 4, 5, 6, 7, 8}; + constexpr float kExpect[] = {1, 2, 3, 4, 6, 8, 10, 12}; + + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + tflite::testing::CumSumTestParams test_params; + test_params.axis = 0; + + tflite::testing::TestCumSum(test_params, kDims, kInput, kDims, kExpect, + output_data); +} + +TF_LITE_MICRO_TEST(CumSumOpTestSimple1DTest) { + int kDims[] = {1, 8}; + constexpr float kInput[] = {1, 2, 3, 4, 5, 6, 7, 8}; + constexpr float kExpect[] = {1, 3, 6, 10, 15, 21, 28, 36}; + + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + tflite::testing::CumSumTestParams test_params; + test_params.axis = 0; + + tflite::testing::TestCumSum(test_params, kDims, kInput, kDims, kExpect, + output_data); +} + +TF_LITE_MICRO_TEST(CumSumOpTestSimpleReverseTest) { + int kDims[] = {2, 2, 4}; + constexpr float kInput[] = {1, 2, 3, 4, 5, 6, 7, 8}; + constexpr float kExpect[] = {10, 9, 7, 4, 26, 21, 15, 8}; + + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + tflite::testing::CumSumTestParams test_params; + test_params.axis = 1; + test_params.reverse = true; + + tflite::testing::TestCumSum(test_params, kDims, kInput, kDims, kExpect, + output_data); +} + +TF_LITE_MICRO_TEST(CumSumOpTestSimpleExclusiveTest) { + int kDims[] = {2, 2, 4}; + constexpr float kInput[] = {1, 2, 3, 4, 5, 6, 7, 8}; + constexpr float kExpect[] = {0, 1, 3, 6, 0, 5, 11, 18}; + + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + tflite::testing::CumSumTestParams test_params; + test_params.axis = 1; + test_params.exclusive = true; + + tflite::testing::TestCumSum(test_params, kDims, kInput, kDims, kExpect, + output_data); +} + +TF_LITE_MICRO_TEST(CumSumOpTestSimpleReverseExclusiveTest) { + int kDims[] = {2, 2, 4}; + constexpr float kInput[] = {1, 2, 3, 4, 5, 6, 7, 8}; + constexpr float kExpect[] = {9, 7, 4, 0, 21, 15, 8, 0}; + + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + tflite::testing::CumSumTestParams test_params; + test_params.axis = -1; + test_params.exclusive = true; + test_params.reverse = true; + + tflite::testing::TestCumSum(test_params, kDims, kInput, kDims, kExpect, + output_data); +} + +TF_LITE_MICRO_TEST(CumSumOpTestSimpleTestInt8) { + int kDims[] = {2, 2, 4}; + constexpr float kInput[] = {1, 2, 3, 4, 5, 6, 7, 8}; + constexpr float kExpect[] = {1, 3, 6, 10, 5, 11, 18, 26}; + + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + tflite::testing::CumSumTestParams test_params; + test_params.axis = 1; + + tflite::testing::TestQuantParams params = {}; + params.data_min = -26.0f; + params.data_max = 26.0f; + + tflite::testing::TestCumSumQuantized( + test_params, ¶ms, kDims, kInput, kDims, kExpect, output_data); +} + +TF_LITE_MICRO_TEST(CumSumOpTestSimpleAxis0TestInt8) { + int kDims[] = {2, 2, 4}; + constexpr float kInput[] = {1, 2, 3, 4, 5, 6, 7, 8}; + constexpr float kExpect[] = {1, 2, 3, 4, 6, 8, 10, 12}; + + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + tflite::testing::CumSumTestParams test_params; + test_params.axis = 0; + + tflite::testing::TestQuantParams params = {}; + params.data_min = -12.0f; + params.data_max = 12.0f; + + tflite::testing::TestCumSumQuantized( + test_params, ¶ms, kDims, kInput, kDims, kExpect, output_data); +} + +TF_LITE_MICRO_TEST(CumSumOpTestSimple1DTestInt8) { + int kDims[] = {1, 8}; + constexpr float kInput[] = {1, 2, 3, 4, 5, 6, 7, 8}; + constexpr float kExpect[] = {1, 3, 6, 10, 15, 21, 28, 36}; + + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + tflite::testing::CumSumTestParams test_params; + test_params.axis = 0; + + tflite::testing::TestQuantParams params = {}; + params.data_min = -36.0f; + params.data_max = 36.0f; + + tflite::testing::TestCumSumQuantized( + test_params, ¶ms, kDims, kInput, kDims, kExpect, output_data); +} + +TF_LITE_MICRO_TEST(CumSumOpTestSimpleReverseTestInt8) { + int kDims[] = {2, 2, 4}; + constexpr float kInput[] = {1, 2, 3, 4, 5, 6, 7, 8}; + constexpr float kExpect[] = {10, 9, 7, 4, 26, 21, 15, 8}; + + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + tflite::testing::CumSumTestParams test_params; + test_params.axis = 1; + test_params.reverse = true; + + tflite::testing::TestQuantParams params = {}; + params.data_min = -26.0f; + params.data_max = 26.0f; + + tflite::testing::TestCumSumQuantized( + test_params, ¶ms, kDims, kInput, kDims, kExpect, output_data); +} + +TF_LITE_MICRO_TEST(CumSumOpTestSimpleExclusiveTestInt8) { + int kDims[] = {2, 2, 4}; + constexpr float kInput[] = {1, 2, 3, 4, 5, 6, 7, 8}; + constexpr float kExpect[] = {0, 1, 3, 6, 0, 5, 11, 18}; + + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + tflite::testing::CumSumTestParams test_params; + test_params.axis = 1; + test_params.exclusive = true; + + tflite::testing::TestQuantParams params = {}; + params.data_min = -18.0f; + params.data_max = 18.0f; + + tflite::testing::TestCumSumQuantized( + test_params, ¶ms, kDims, kInput, kDims, kExpect, output_data); +} + +TF_LITE_MICRO_TEST(CumSumOpTestSimpleReverseExclusiveTestInt8) { + int kDims[] = {2, 2, 4}; + constexpr float kInput[] = {1, 2, 3, 4, 5, 6, 7, 8}; + constexpr float kExpect[] = {9, 7, 4, 0, 21, 15, 8, 0}; + + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + tflite::testing::CumSumTestParams test_params; + test_params.axis = -1; + test_params.exclusive = true; + test_params.reverse = true; + + tflite::testing::TestQuantParams params = {}; + params.data_min = -21.0f; + params.data_max = 21.0f; + + tflite::testing::TestCumSumQuantized( + test_params, ¶ms, kDims, kInput, kDims, kExpect, output_data); +} +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/depth_to_space.cc b/tensorflow/lite/micro/kernels/depth_to_space.cc new file mode 100644 index 0000000..7e0a8fa --- /dev/null +++ b/tensorflow/lite/micro/kernels/depth_to_space.cc @@ -0,0 +1,142 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/kernels/internal/reference/depth_to_space.h" + +#include + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/types.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { +namespace { + +constexpr int kInputTensor = 0; +constexpr int kOutputTensor = 0; + +// input/output tensor shape rank associations +constexpr int kBatchRank = 0; +constexpr int kHeightRank = 1; +constexpr int kWidthRank = 2; +constexpr int kDepthRank = 3; + +TfLiteStatus CalculateOpData(TfLiteContext* context, TfLiteNode* node) { + auto* params = + reinterpret_cast(node->builtin_data); + + TF_LITE_ENSURE_EQ(context, NumInputs(node), 1); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + + TF_LITE_ENSURE_EQ(context, NumDimensions(input), 4); + + auto data_type = output->type; + TF_LITE_ENSURE(context, + data_type == kTfLiteFloat32 || data_type == kTfLiteInt8); + TF_LITE_ENSURE_TYPES_EQ(context, input->type, output->type); + + const int block_size = params->block_size; + TF_LITE_ENSURE(context, block_size > 0); + const int input_height = input->dims->data[kHeightRank]; + const int input_width = input->dims->data[kWidthRank]; + const int input_channels = input->dims->data[kDepthRank]; + int output_height = input_height * block_size; + int output_width = input_width * block_size; + int output_channels = input_channels / block_size / block_size; + + TF_LITE_ENSURE_EQ(context, input_height, output_height / block_size); + TF_LITE_ENSURE_EQ(context, input_width, output_width / block_size); + TF_LITE_ENSURE_EQ(context, input_channels, + output_channels * block_size * block_size); + + // We must update the output tensor dimensions. + // The dims storage is expected to be the same area in memory + // for both TfLiteTensor and TfLiteEvalTensor. This is important + // because TfLiteTensor in the MicroInterpreter is a temporary + // allocation. For the KernelRunner interpreter, TfLiteEvalTensor + // is a temporary allocation. We must therefore relocate the dims + // from the FlatBuffer to the persistent storage arena. + TfLiteEvalTensor* output_eval = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + TF_LITE_ENSURE_OK(context, tflite::micro::CreateWritableTensorDimsWithCopy( + context, output, output_eval)); + output->dims->data[kBatchRank] = input->dims->data[kBatchRank]; + output->dims->data[kHeightRank] = output_height; + output->dims->data[kWidthRank] = output_width; + output->dims->data[kDepthRank] = output_channels; + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(output); + + return kTfLiteOk; +} + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + return CalculateOpData(context, node); +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + auto* params = + reinterpret_cast(node->builtin_data); + + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + + tflite::DepthToSpaceParams op_params; + op_params.block_size = static_cast(params->block_size); + + switch (input->type) { // Already know in/out types are same. + case kTfLiteFloat32: + reference_ops::DepthToSpace(op_params, + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + case kTfLiteInt8: + reference_ops::DepthToSpace(op_params, + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + default: + MicroPrintf("DEPTH_TO_SPACE only supports FLOAT32 and INT8, got %s.", + TfLiteTypeGetName(output->type)); + return kTfLiteError; + } + + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_DEPTH_TO_SPACE() { + return tflite::micro::RegisterOp(nullptr, Prepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/depth_to_space_test.cc b/tensorflow/lite/micro/kernels/depth_to_space_test.cc new file mode 100644 index 0000000..4053436 --- /dev/null +++ b/tensorflow/lite/micro/kernels/depth_to_space_test.cc @@ -0,0 +1,307 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +constexpr int kOutputDimsCount = 4; + +struct DepthToSpaceTestParams { + int block_size; + // output_dims_data is a TfLiteIntArray + int output_dims_data[kOutputDimsCount + 1] = {kOutputDimsCount, 0, 0, 0, 0}; +}; + +void ExecuteDepthToSpaceTest(const DepthToSpaceTestParams& params, + TfLiteTensor* tensors, int tensors_count) { + int kInputArrayData[] = {1, 0}; + TfLiteIntArray* inputs_array = IntArrayFromInts(kInputArrayData); + int kOutputArrayData[] = {1, 1}; + TfLiteIntArray* outputs_array = IntArrayFromInts(kOutputArrayData); + + TfLiteDepthToSpaceParams op_params = {}; + op_params.block_size = params.block_size; + + const TFLMRegistration registration = tflite::Register_DEPTH_TO_SPACE(); + micro::KernelRunner runner(registration, tensors, tensors_count, inputs_array, + outputs_array, static_cast(&op_params)); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); +} + +template +void TestDepthToSpace(DepthToSpaceTestParams& params, int* input_dims_data, + const T* input_data, int* expected_dims_data, + const T* expected_data, T* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* expected_dims = IntArrayFromInts(expected_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(params.output_dims_data); + const int expected_count = ElementCount(*expected_dims); + + TfLiteTensor tensors[] = { + CreateTensor(input_data, input_dims), + CreateTensor(output_data, output_dims), + }; + constexpr int tensors_count = std::extent::value; + ExecuteDepthToSpaceTest(params, tensors, tensors_count); + + constexpr float kTolerance = 1e-5; + for (int i = 0; i < expected_count; i++) { + TF_LITE_MICRO_EXPECT_NEAR(expected_data[i], output_data[i], kTolerance); + } + for (int i = 0; i < expected_dims->size; i++) { + // output dims will have been relocated during prepare phase, + // so use the tensor dims pointer. + TF_LITE_MICRO_EXPECT_EQ(expected_dims->data[i], tensors[1].dims->data[i]); + } +} + +// min/max are used to compute scale, zero-point, compare tolerance +template +struct TestQuantParams { + float data_min; // input and output data minimum value + float data_max; // input and output data maximum value + T input_data[kOutputSize]; // quantized input storage + T output_data[kOutputSize]; // quantized output storage +}; + +// for quantized, the error shouldn't exceed step +template +float GetTolerance(float min, float max) { + float kQuantizedStep = + 2.0f * (max - min) / + (std::numeric_limits::max() - std::numeric_limits::min()); + return kQuantizedStep; +} + +template +void TestDepthToSpaceQuantized(DepthToSpaceTestParams& params, + TestQuantParams* quant_params, + int* input_dims_data, const float* input_data, + int* expected_dims_data, + const float* expected_data, float* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* expected_dims = IntArrayFromInts(expected_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(params.output_dims_data); + + const float scale = + ScaleFromMinMax(quant_params->data_min, quant_params->data_max); + const int zero_point = + ZeroPointFromMinMax(quant_params->data_min, quant_params->data_max); + + TfLiteTensor tensors[] = { + CreateQuantizedTensor(input_data, quant_params->input_data, input_dims, + scale, zero_point), + CreateQuantizedTensor(quant_params->output_data, output_dims, scale, + zero_point), + }; + constexpr int kTensorsCount = std::extent::value; + + ExecuteDepthToSpaceTest(params, tensors, kTensorsCount); + + Dequantize(quant_params->output_data, kOutputSize, scale, zero_point, + output_data); + const float kTolerance = + GetTolerance(quant_params->data_min, quant_params->data_max); + for (int i = 0; i < kOutputSize; i++) { + TF_LITE_MICRO_EXPECT_NEAR(expected_data[i], output_data[i], kTolerance); + } + for (int i = 0; i < expected_dims->size; i++) { + // output dims will have been relocated during prepare phase, + // so use the tensor dims pointer. + TF_LITE_MICRO_EXPECT_EQ(expected_dims->data[i], tensors[1].dims->data[i]); + } +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(DepthToSpaceOpModelFloat32_1114_2) { + int kInputDims[] = {4, 1, 1, 1, 4}; + constexpr float kInput[] = {1.4, 2.3, 3.2, 4.1}; + int kExpectDims[] = {4, 1, 2, 2, 1}; + constexpr float kExpect[] = {1.4, 2.3, 3.2, 4.1}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + tflite::testing::DepthToSpaceTestParams params; + params.block_size = 2; + + tflite::testing::TestDepthToSpace(params, kInputDims, kInput, kExpectDims, + kExpect, output_data); +} + +TF_LITE_MICRO_TEST(DepthToSpaceOpModelFloat32_1124_2) { + int kInputDims[] = {4, 1, 1, 2, 4}; + constexpr float kInput[] = {1, 2, 3, 4, 5, 6, 7, 8}; + int kExpectDims[] = {4, 1, 2, 4, 1}; + constexpr float kExpect[] = {1, 2, 5, 6, 3, 4, 7, 8}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + tflite::testing::DepthToSpaceTestParams params; + params.block_size = 2; + + tflite::testing::TestDepthToSpace(params, kInputDims, kInput, kExpectDims, + kExpect, output_data); +} + +TF_LITE_MICRO_TEST(DepthToSpaceOpModelFloat32_1214_2) { + int kInputDims[] = {4, 1, 2, 1, 4}; + constexpr float kInput[] = {1, 2, 3, 4, 5, 6, 7, 8}; + int kExpectDims[] = {4, 1, 4, 2, 1}; + constexpr float kExpect[] = {1, 2, 3, 4, 5, 6, 7, 8}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + tflite::testing::DepthToSpaceTestParams params; + params.block_size = 2; + + tflite::testing::TestDepthToSpace(params, kInputDims, kInput, kExpectDims, + kExpect, output_data); +} + +TF_LITE_MICRO_TEST(DepthToSpaceOpModelFloat32_1224_2) { + int kInputDims[] = {4, 1, 2, 2, 4}; + constexpr float kInput[] = {1, 2, 3, 4, 5, 6, 7, 8, + 9, 10, 11, 12, 13, 14, 15, 16}; + int kExpectDims[] = {4, 1, 4, 4, 1}; + constexpr float kExpect[] = {1, 2, 5, 6, 3, 4, 7, 8, + 9, 10, 13, 14, 11, 12, 15, 16}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + tflite::testing::DepthToSpaceTestParams params; + params.block_size = 2; + + tflite::testing::TestDepthToSpace(params, kInputDims, kInput, kExpectDims, + kExpect, output_data); +} + +TF_LITE_MICRO_TEST(DepthToSpaceOpModelFloat32_1111_1) { + int kInputDims[] = {4, 1, 1, 1, 1}; + constexpr float kInput[] = {4}; + int kExpectDims[] = {4, 1, 1, 1, 1}; + constexpr float kExpect[] = {4}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + tflite::testing::DepthToSpaceTestParams params; + params.block_size = 1; + + tflite::testing::TestDepthToSpace(params, kInputDims, kInput, kExpectDims, + kExpect, output_data); +} + +TF_LITE_MICRO_TEST(DepthToSpaceOpModelInt8_1114_2) { + int kInputDims[] = {4, 1, 1, 1, 4}; + constexpr float kInput[] = {1.4, 2.3, 3.2, 4.1}; + int kExpectDims[] = {4, 1, 2, 2, 1}; + constexpr float kExpect[] = {1.4, 2.3, 3.2, 4.1}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + tflite::testing::DepthToSpaceTestParams params; + params.block_size = 2; + tflite::testing::TestQuantParams quant_params = {}; + quant_params.data_min = 0.0; + quant_params.data_max = 5.0; + + tflite::testing::TestDepthToSpaceQuantized( + params, &quant_params, kInputDims, kInput, kExpectDims, kExpect, + output_data); +} + +TF_LITE_MICRO_TEST(DepthToSpaceOpModelInt8_1124_2) { + int kInputDims[] = {4, 1, 1, 2, 4}; + constexpr float kInput[] = {1, 2, 3, 4, 5, 6, 7, 8}; + int kExpectDims[] = {4, 1, 2, 4, 1}; + constexpr float kExpect[] = {1, 2, 5, 6, 3, 4, 7, 8}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + tflite::testing::DepthToSpaceTestParams params; + params.block_size = 2; + tflite::testing::TestQuantParams quant_params = {}; + quant_params.data_min = 0.0; + quant_params.data_max = 9.0; + + tflite::testing::TestDepthToSpaceQuantized( + params, &quant_params, kInputDims, kInput, kExpectDims, kExpect, + output_data); +} + +TF_LITE_MICRO_TEST(DepthToSpaceOpModelInt8_1214_2) { + int kInputDims[] = {4, 1, 2, 1, 4}; + constexpr float kInput[] = {1, 2, 3, 4, 5, 6, 7, 8}; + int kExpectDims[] = {4, 1, 4, 2, 1}; + constexpr float kExpect[] = {1, 2, 3, 4, 5, 6, 7, 8}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + tflite::testing::DepthToSpaceTestParams params; + params.block_size = 2; + tflite::testing::TestQuantParams quant_params = {}; + quant_params.data_min = 0.0; + quant_params.data_max = 9.0; + + tflite::testing::TestDepthToSpaceQuantized( + params, &quant_params, kInputDims, kInput, kExpectDims, kExpect, + output_data); +} + +TF_LITE_MICRO_TEST(DepthToSpaceOpModelInt8_1224_2) { + int kInputDims[] = {4, 1, 2, 2, 4}; + constexpr float kInput[] = {1, 2, 3, 4, 5, 6, 7, 8, + 9, 10, 11, 12, 13, 14, 15, 16}; + int kExpectDims[] = {4, 1, 4, 4, 1}; + constexpr float kExpect[] = {1, 2, 5, 6, 3, 4, 7, 8, + 9, 10, 13, 14, 11, 12, 15, 16}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + tflite::testing::DepthToSpaceTestParams params; + params.block_size = 2; + tflite::testing::TestQuantParams quant_params = {}; + quant_params.data_min = 0.0; + quant_params.data_max = 17.0; + + tflite::testing::TestDepthToSpaceQuantized( + params, &quant_params, kInputDims, kInput, kExpectDims, kExpect, + output_data); +} + +TF_LITE_MICRO_TEST(DepthToSpaceOpModelInt8_1111_1) { + int kInputDims[] = {4, 1, 1, 1, 1}; + constexpr float kInput[] = {4}; + int kExpectDims[] = {4, 1, 1, 1, 1}; + constexpr float kExpect[] = {4}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + tflite::testing::DepthToSpaceTestParams params; + params.block_size = 1; + tflite::testing::TestQuantParams quant_params = {}; + quant_params.data_min = 3.0; + quant_params.data_max = 5.0; + + tflite::testing::TestDepthToSpaceQuantized( + params, &quant_params, kInputDims, kInput, kExpectDims, kExpect, + output_data); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/depthwise_conv.cc b/tensorflow/lite/micro/kernels/depthwise_conv.cc new file mode 100644 index 0000000..9290c2d --- /dev/null +++ b/tensorflow/lite/micro/kernels/depthwise_conv.cc @@ -0,0 +1,124 @@ +/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/kernels/depthwise_conv.h" + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/portable_tensor_utils.h" +#include "tensorflow/lite/kernels/internal/reference/depthwiseconv_float.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/depthwise_conv.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { +namespace { + +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(OpDataConv)); +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + TFLITE_DCHECK(node->builtin_data != nullptr); + + auto& params = + *(reinterpret_cast(node->builtin_data)); + const OpDataConv& data = *(static_cast(node->user_data)); + + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kDepthwiseConvOutputTensor); + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kDepthwiseConvInputTensor); + const TfLiteEvalTensor* filter = + tflite::micro::GetEvalInput(context, node, kDepthwiseConvWeightsTensor); + const TfLiteEvalTensor* bias = + (NumInputs(node) == 3) + ? tflite::micro::GetEvalInput(context, node, kDepthwiseConvBiasTensor) + : nullptr; + + switch (input->type) { // Already know in/out types are same. + case kTfLiteFloat32: { + tflite::reference_ops::DepthwiseConv( + DepthwiseConvParamsFloat(params, data), + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), + tflite::micro::GetOptionalTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + } + case kTfLiteInt8: { + switch (filter->type) { + case kTfLiteInt4: { + int8_t* unpacked_filter_data = static_cast( + context->GetScratchBuffer(context, data.filter_buffer_index)); + tflite::tensor_utils::UnpackDenseInt4IntoInt8( + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(filter).FlatSize(), + unpacked_filter_data); + reference_integer_ops::DepthwiseConvPerChannel( + DepthwiseConvParamsQuantized(params, data), + data.per_channel_output_multiplier, data.per_channel_output_shift, + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), unpacked_filter_data, + tflite::micro::GetTensorShape(bias), + tflite::micro::GetOptionalTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + } + case kTfLiteInt8: { + reference_integer_ops::DepthwiseConvPerChannel( + DepthwiseConvParamsQuantized(params, data), + data.per_channel_output_multiplier, data.per_channel_output_shift, + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), + tflite::micro::GetOptionalTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + } + default: + MicroPrintf("Filter type %s (%d) not supported.", + TfLiteTypeGetName(filter->type), filter->type); + return kTfLiteError; + } + break; + } + default: + MicroPrintf("Input type %s (%d) not supported.", + TfLiteTypeGetName(input->type), input->type); + return kTfLiteError; + } + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_DEPTHWISE_CONV_2D() { + return tflite::micro::RegisterOp(Init, DepthwiseConvPrepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/depthwise_conv.h b/tensorflow/lite/micro/kernels/depthwise_conv.h new file mode 100644 index 0000000..d8cc78d --- /dev/null +++ b/tensorflow/lite/micro/kernels/depthwise_conv.h @@ -0,0 +1,80 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_DEPTHWISE_CONV_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_DEPTHWISE_CONV_H_ + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/types.h" +#include "tensorflow/lite/micro/kernels/conv.h" + +namespace tflite { + +extern const int kDepthwiseConvInputTensor; +extern const int kDepthwiseConvWeightsTensor; +extern const int kDepthwiseConvBiasTensor; +extern const int kDepthwiseConvOutputTensor; +extern const int kDepthwiseConvQuantizedDimension; + +// Returns a DepthwiseParams struct with all the parameters needed for a +// float computation. +DepthwiseParams DepthwiseConvParamsFloat( + const TfLiteDepthwiseConvParams& params, const OpDataConv& data); + +// Returns a DepthwiseParams struct with all the parameters needed for a +// quantized computation. +DepthwiseParams DepthwiseConvParamsQuantized( + const TfLiteDepthwiseConvParams& params, const OpDataConv& data); + +TfLiteStatus CalculateOpDataDepthwiseConv( + TfLiteContext* context, TfLiteNode* node, + const TfLiteDepthwiseConvParams& params, int width, int height, + int filter_width, int filter_height, int out_width, int out_height, + const TfLiteType data_type, OpDataConv* data); + +TfLiteStatus DepthwiseConvPrepare(TfLiteContext* context, TfLiteNode* node); + +// This is the most generic TFLMRegistration. The actual supported types +// may still be target dependent. The only requirement is that every +// implementation (reference or optimized) must define this function. +TFLMRegistration Register_DEPTHWISE_CONV_2D(); + +#if defined(CMSIS_NN) +// Returns a TFLMRegistration struct for kernel variant that only supports +// int8 activations and int8 weights and uses the latency optimized +// implementations. +TFLMRegistration Register_DEPTHWISE_CONV_2D_INT8(); + +// Returns a TFLMRegistration struct for kernel variant that only supports +// int16 activations and int8 weights and uses the latency optimized +// implementations. +TFLMRegistration Register_DEPTHWISE_CONV_2D_INT16(); + +#else +inline TFLMRegistration Register_DEPTHWISE_CONV_2D_INT8() { + return Register_DEPTHWISE_CONV_2D(); +} + +inline TFLMRegistration Register_DEPTHWISE_CONV_2D_INT16() { + return Register_DEPTHWISE_CONV_2D(); +} +#endif + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_DEPTHWISE_CONV_H_ diff --git a/tensorflow/lite/micro/kernels/depthwise_conv_common.cc b/tensorflow/lite/micro/kernels/depthwise_conv_common.cc new file mode 100644 index 0000000..6d5f6c2 --- /dev/null +++ b/tensorflow/lite/micro/kernels/depthwise_conv_common.cc @@ -0,0 +1,218 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/depthwiseconv_float.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/depthwise_conv.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/padding.h" +#include "tensorflow/lite/micro/kernels/depthwise_conv.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" + +namespace tflite { + +const int kDepthwiseConvInputTensor = 0; +const int kDepthwiseConvWeightsTensor = 1; +const int kDepthwiseConvBiasTensor = 2; +const int kDepthwiseConvOutputTensor = 0; + +// DepthwiseConv is quantized along dimension 3: +// https://www.tensorflow.org/lite/performance/quantization_spec +const int kDepthwiseConvQuantizedDimension = 3; + +// Returns a DepthwiseParams struct with all the parameters needed for a +// float computation. +DepthwiseParams DepthwiseConvParamsFloat( + const TfLiteDepthwiseConvParams& params, const OpDataConv& data) { + DepthwiseParams op_params; + CalculateActivationRange(params.activation, &op_params.float_activation_min, + &op_params.float_activation_max); + op_params.padding_type = tflite::micro::RuntimePaddingType(params.padding); + op_params.padding_values.width = data.padding.width; + op_params.padding_values.height = data.padding.height; + op_params.stride_width = params.stride_width; + op_params.stride_height = params.stride_height; + op_params.dilation_width_factor = params.dilation_width_factor; + op_params.dilation_height_factor = params.dilation_height_factor; + op_params.depth_multiplier = params.depth_multiplier; + return op_params; +} + +// Returns a DepthwiseParams struct with all the parameters needed for a +// quantized computation. +DepthwiseParams DepthwiseConvParamsQuantized( + const TfLiteDepthwiseConvParams& params, const OpDataConv& data) { + DepthwiseParams op_params; + op_params.input_offset = -data.input_zero_point; + op_params.weights_offset = -data.filter_zero_point; + op_params.output_offset = data.output_zero_point; + op_params.output_multiplier = data.output_multiplier; + op_params.output_shift = -data.output_shift; + op_params.padding_type = tflite::micro::RuntimePaddingType(params.padding); + op_params.padding_values.height = data.padding.height; + op_params.padding_values.width = data.padding.width; + op_params.stride_height = params.stride_height; + op_params.stride_width = params.stride_width; + op_params.dilation_height_factor = params.dilation_height_factor; + op_params.dilation_width_factor = params.dilation_width_factor; + op_params.depth_multiplier = params.depth_multiplier; + op_params.quantized_activation_min = data.output_activation_min; + op_params.quantized_activation_max = data.output_activation_max; + return op_params; +} + +TfLiteStatus CalculateOpDataDepthwiseConv( + TfLiteContext* context, TfLiteNode* node, + const TfLiteDepthwiseConvParams& params, int width, int height, + int filter_width, int filter_height, int out_width, int out_height, + const TfLiteType data_type, OpDataConv* data) { + bool has_bias = node->inputs->size == 3; + // Check number of inputs/outputs + TF_LITE_ENSURE(context, has_bias || node->inputs->size == 2); + TF_LITE_ENSURE_EQ(context, node->outputs->size, 1); + + // Matching GetWindowedOutputSize in TensorFlow. + auto padding = params.padding; + data->padding = ComputePaddingHeightWidth( + params.stride_height, params.stride_width, params.dilation_height_factor, + params.dilation_width_factor, height, width, filter_height, filter_width, + padding, &out_height, &out_width); + + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kConvInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* filter = + micro_context->AllocateTempInputTensor(node, kConvWeightsTensor); + TF_LITE_ENSURE(context, filter != nullptr); + TfLiteTensor* bias = + micro_context->AllocateTempInputTensor(node, kConvBiasTensor); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kConvOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + + // Note that quantized inference requires that all tensors have their + // parameters set. This is usually done during quantized training. + if (data_type != kTfLiteFloat32) { + int output_channels = filter->dims->data[kDepthwiseConvQuantizedDimension]; + + TF_LITE_ENSURE_STATUS(tflite::PopulateConvolutionQuantizationParams( + context, input, filter, bias, output, params.activation, + &data->output_multiplier, &data->output_shift, + &data->output_activation_min, &data->output_activation_max, + data->per_channel_output_multiplier, data->per_channel_output_shift, + output_channels)); + } + + data->input_zero_point = input->params.zero_point; + data->filter_zero_point = filter->params.zero_point; + data->output_zero_point = output->params.zero_point; + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(filter); + micro_context->DeallocateTempTfLiteTensor(bias); + micro_context->DeallocateTempTfLiteTensor(output); + + return kTfLiteOk; +} + +TfLiteStatus DepthwiseConvPrepare(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + TFLITE_DCHECK(node->builtin_data != nullptr); + + OpDataConv* data = static_cast(node->user_data); + const auto& params = + *(static_cast(node->builtin_data)); + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kDepthwiseConvOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kDepthwiseConvInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* filter = + micro_context->AllocateTempInputTensor(node, kDepthwiseConvWeightsTensor); + TF_LITE_ENSURE(context, filter != nullptr); + + const int input_width = input->dims->data[2]; + const int input_height = input->dims->data[1]; + const int filter_width = filter->dims->data[2]; + const int filter_height = filter->dims->data[1]; + const int output_width = output->dims->data[2]; + const int output_height = output->dims->data[1]; + + // Dynamically allocate per-channel quantization parameters. + const int num_channels = filter->dims->data[kDepthwiseConvQuantizedDimension]; + data->per_channel_output_multiplier = + static_cast(context->AllocatePersistentBuffer( + context, num_channels * sizeof(int32_t))); + data->per_channel_output_shift = + static_cast(context->AllocatePersistentBuffer( + context, num_channels * sizeof(int32_t))); + + // All per-channel quantized tensors need valid zero point and scale arrays. + if (input->type == kTfLiteInt8) { + TF_LITE_ENSURE_EQ(context, filter->quantization.type, + kTfLiteAffineQuantization); + + const auto* affine_quantization = + static_cast(filter->quantization.params); + TFLITE_DCHECK(affine_quantization != nullptr); + TFLITE_DCHECK(affine_quantization->scale != nullptr); + TFLITE_DCHECK(affine_quantization->zero_point != nullptr); + + TF_LITE_ENSURE( + context, affine_quantization->scale->size == 1 || + affine_quantization->scale->size == + filter->dims->data[kDepthwiseConvQuantizedDimension]); + + TF_LITE_ENSURE_EQ(context, affine_quantization->scale->size, + affine_quantization->zero_point->size); + } + + TF_LITE_ENSURE_MSG( + context, + input->type == filter->type || + (input->type == kTfLiteInt8 && + (filter->type == kTfLiteInt4 || filter->type == kTfLiteInt8)), + "Hybrid models are not supported on TFLite Micro."); + + if (filter->type == kTfLiteInt4) { + int filter_size = + RuntimeShape(filter->dims->size, + reinterpret_cast(filter->dims->data)) + .FlatSize(); + context->RequestScratchBufferInArena(context, filter_size, + &data->filter_buffer_index); + } + + TF_LITE_ENSURE_STATUS(CalculateOpDataDepthwiseConv( + context, node, params, input_width, input_height, filter_width, + filter_height, output_width, output_height, input->type, data)); + + micro_context->DeallocateTempTfLiteTensor(output); + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(filter); + + return kTfLiteOk; +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/depthwise_conv_test.cc b/tensorflow/lite/micro/kernels/depthwise_conv_test.cc new file mode 100644 index 0000000..c3b916c --- /dev/null +++ b/tensorflow/lite/micro/kernels/depthwise_conv_test.cc @@ -0,0 +1,992 @@ + +/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +// Index of the output tensor in context->tensors, specific to +// DepthwiseConv. +constexpr int kOutputTensorIndex = 3; + +constexpr int kMaxFilterChannels = 64; +constexpr int kMaxBiasChannels = 64; + +// Creates a DepthwiseConv opeerator, calls it with the provided input tensors +// and some defaults parameters, and compares the output with +// expected_output_data. +// +// The tensors parameter contains both the input tensors as well as a +// preallocated output tensor into which the output is stored. +template +TfLiteStatus ValidateDepthwiseConvGoldens( + const T* expected_output_data, int output_length, + TfLiteDepthwiseConvParams* conv_params, float tolerance, int tensors_size, + TfLiteTensor* tensors) { + int inputs_array_data[] = {3, 0, 1, 2}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 3}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = Register_DEPTHWISE_CONV_2D(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, + reinterpret_cast(conv_params)); + + int input_depth = tensors[0].dims->data[3]; + int output_depth = tensors[1].dims->data[3]; + int depth_mul = output_depth / input_depth; + + conv_params->padding = kTfLitePaddingValid; + conv_params->depth_multiplier = depth_mul; + + const char* init_data = reinterpret_cast(conv_params); + + // TODO(b/154240825): Use a test macro here which fails and returns. + TfLiteStatus status = runner.InitAndPrepare(init_data); + if (status != kTfLiteOk) { + return status; + } + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + const T* output_data = tflite::GetTensorData(&tensors[kOutputTensorIndex]); + + for (int i = 0; i < output_length; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(expected_output_data[i], output_data[i], + tolerance); + } + return kTfLiteOk; +} + +void TestDepthwiseConvQuantizedPerChannel( + int* input_dims_data, const float* input_data, int8_t* input_quantized, + float input_scale, int input_zero_point, int* filter_dims_data, + const float* filter_data, int8_t* filter_data_quantized, + int* bias_dims_data, const float* bias_data, int32_t* bias_data_quantized, + int* output_dims_data, const float* expected_output_data, + int8_t* expected_output_data_quantized, int8_t* output_data, + float output_scale, int output_zero_point, + TfLiteDepthwiseConvParams* conv_params, + TfLiteType filter_packed_type = kTfLiteNoType) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* filter_dims = IntArrayFromInts(filter_dims_data); + TfLiteIntArray* bias_dims = IntArrayFromInts(bias_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_dims_count = ElementCount(*output_dims); + + int filter_zero_points[kMaxFilterChannels]; + float filter_scales[kMaxFilterChannels]; + int bias_zero_points[kMaxBiasChannels]; + float bias_scales[kMaxBiasChannels]; + TfLiteAffineQuantization filter_quant; + TfLiteAffineQuantization bias_quant; + TfLiteTensor input_tensor = CreateQuantizedTensor( + input_data, input_quantized, input_dims, input_scale, input_zero_point); + TfLiteTensor filter_tensor = CreateSymmetricPerChannelQuantizedTensor( + filter_data, filter_data_quantized, filter_dims, filter_scales, + filter_zero_points, &filter_quant, 3 /* quantized dimension */, false, + filter_packed_type); + TfLiteTensor bias_tensor = CreatePerChannelQuantizedBiasTensor( + bias_data, bias_data_quantized, bias_dims, input_scale, &filter_scales[1], + bias_scales, bias_zero_points, &bias_quant, 3 /* quantized dimension */ + ); + TfLiteTensor output_tensor = CreateQuantizedTensor( + output_data, output_dims, output_scale, input_zero_point); + + // TODO(njeff): Affine Quantization Params should be set on tensor creation. + float input_scales[] = {1, input_scale}; + int input_zero_points[] = {1, input_zero_point}; + TfLiteAffineQuantization input_quant = {FloatArrayFromFloats(input_scales), + IntArrayFromInts(input_zero_points), + 0}; + input_tensor.quantization = {kTfLiteAffineQuantization, &input_quant}; + + float output_scales[] = {1, output_scale}; + int output_zero_points[] = {1, output_zero_point}; + TfLiteAffineQuantization output_quant = {FloatArrayFromFloats(output_scales), + IntArrayFromInts(output_zero_points), + 0}; + output_tensor.quantization = {kTfLiteAffineQuantization, &output_quant}; + + constexpr int inputs_size = 3; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + input_tensor, + filter_tensor, + bias_tensor, + output_tensor, + }; + + Quantize(expected_output_data, expected_output_data_quantized, + output_dims_count, output_scale, output_zero_point); + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, ValidateDepthwiseConvGoldens(expected_output_data_quantized, + output_dims_count, conv_params, + 1.0, tensors_size, tensors)); +} + +// Xtensa kernels do not support float activations., and the corresponding tests +// are disabled. As a result, helper functions that are only needed for float +// kernel tests also need to be ifdef'd out to avoid build errors due to unused +// functions. +#if !defined(XTENSA) +void TestDepthwiseConvFloat(int* input_dims_data, const float* input_data, + int* filter_dims_data, const float* filter_data, + int* bias_dims_data, const float* bias_data, + const float* expected_output_data, + int* output_dims_data, + TfLiteDepthwiseConvParams* conv_params, + float* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* filter_dims = IntArrayFromInts(filter_dims_data); + TfLiteIntArray* bias_dims = IntArrayFromInts(bias_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_dims_count = ElementCount(*output_dims); + + constexpr int inputs_size = 3; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(input_data, input_dims), + CreateTensor(filter_data, filter_dims), + CreateTensor(bias_data, bias_dims), + CreateTensor(output_data, output_dims), + }; + + ValidateDepthwiseConvGoldens(expected_output_data, output_dims_count, + conv_params, 1e-5, tensors_size, tensors); +} + +#endif // !defined(XTENSA) + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +#if !defined(XTENSA) // TODO(b/170322965): xtensa kernels are less general than + // reference kernels and we ifdef out test cases that are + // currently known to fail. +TF_LITE_MICRO_TEST(SimpleTest) { + int input_shape[] = {4, 1, 3, 2, 2}; + const float input_values[] = {1, 2, 7, 8, 3, 4, 9, 10, 5, 6, 11, 12}; + int filter_shape[] = {4, 1, 2, 2, 4}; + const float filter_values[] = {1, 2, 3, 4, -9, 10, -11, 12, + 5, 6, 7, 8, 13, -14, 15, -16}; + int bias_shape[] = {4, 1, 1, 1, 4}; + const float bias_values[] = {1, 2, 3, 4}; + const float golden[] = { + 71, -34, 99, -20, 91, -26, 127, -4, + }; + int output_shape[] = {4, 1, 2, 1, 4}; + const int output_dims_count = 8; + float output_data[output_dims_count]; + + TfLiteDepthwiseConvParams conv_params; + conv_params.activation = kTfLiteActNone; + conv_params.dilation_width_factor = 1; + conv_params.dilation_height_factor = 1; + conv_params.stride_height = 1; + conv_params.stride_width = 1; + + tflite::testing::TestDepthwiseConvFloat( + input_shape, input_values, filter_shape, filter_values, bias_shape, + bias_values, golden, output_shape, &conv_params, output_data); +} + +TF_LITE_MICRO_TEST(SimpleTestRelu) { + int input_shape[] = {4, 1, 3, 2, 2}; + const float input_values[] = {1, 2, 7, 8, 3, 4, 9, 10, 5, 6, 11, 12}; + int filter_shape[] = {4, 1, 2, 2, 4}; + const float filter_values[] = {1, 2, 3, 4, -9, 10, -11, 12, + 5, 6, 7, 8, 13, -14, 15, -16}; + int bias_shape[] = {4, 1, 1, 1, 4}; + const float bias_values[] = {1, 2, 3, 4}; + int output_shape[] = {4, 1, 2, 1, 4}; + const int output_dims_count = 8; + const float golden_relu[] = {71, 0, 99, 0, 91, 0, 127, 0}; + float output_data[output_dims_count]; + + TfLiteDepthwiseConvParams conv_params; + conv_params.activation = kTfLiteActRelu; + conv_params.dilation_width_factor = 1; + conv_params.dilation_height_factor = 1; + conv_params.stride_height = 1; + conv_params.stride_width = 1; + + tflite::testing::TestDepthwiseConvFloat( + input_shape, input_values, filter_shape, filter_values, bias_shape, + bias_values, golden_relu, output_shape, &conv_params, output_data); +} + +TF_LITE_MICRO_TEST(SimpleTestQuantizedPerChannelDepthMultiplier1) { + const int input_elements = 12; + int input_shape[] = {4, 1, 3, 2, 2}; + const float input_values[] = {1, 2, 7, 8, 3, 4, 9, 10, 5, 6, 11, 12}; + const int filter_elements = 8; + int filter_shape[] = {4, 1, 2, 2, 2}; + const float filter_values[] = {1, 2, 3, 4, -9, 10, -11, 12}; + const int bias_elements = 2; + int bias_shape[] = {4, 1, 1, 1, 2}; + const int output_elements = 4; + const float bias_values[] = {1, 2}; + const float golden[] = { + -103, + 127, + -128, + 127, + }; + int output_shape[] = {4, 1, 2, 1, 2}; + const int output_dims_count = 4; + int8_t output_data[output_dims_count]; + + const float input_scale = 1.0f; + const float output_scale = 1.0f; + const int input_zero_point = 0; + const int output_zero_point = 0; + + int8_t input_quantized[input_elements]; + int8_t filter_quantized[filter_elements]; + int32_t bias_quantized[bias_elements]; + int8_t golden_quantized[output_elements]; + + TfLiteDepthwiseConvParams conv_params; + conv_params.activation = kTfLiteActNone; + conv_params.dilation_width_factor = 1; + conv_params.dilation_height_factor = 1; + conv_params.stride_height = 1; + conv_params.stride_width = 1; + + tflite::testing::TestDepthwiseConvQuantizedPerChannel( + input_shape, input_values, input_quantized, input_scale, input_zero_point, + filter_shape, filter_values, filter_quantized, bias_shape, bias_values, + bias_quantized, output_shape, golden, golden_quantized, output_data, + output_scale, output_zero_point, &conv_params); +} + +TF_LITE_MICRO_TEST(TestQuantizedPerChannelDepthMultiplier1Relu6) { + const int input_elements = 24; + int input_shape[] = {4, 1, 3, 2, 4}; + const float input_values[] = {1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1}; + const int filter_elements = 16; + int filter_shape[] = {4, 1, 2, 2, 4}; + const float filter_values[] = {0, 1, 8, -2, -1, 2, -10, 0, + -1, 3, -18, 0, 0, 4, 20, -3}; + const int bias_elements = 4; + int bias_shape[] = {4, 1, 1, 1, 4}; + const int output_elements = 8; + const float bias_values[] = {1, 2, 3, 4}; + const float golden[] = { + 0, 6, 3, 0, 0, 6, 3, 0, + }; + int output_shape[] = {4, 1, 2, 1, 4}; + int8_t output_data[output_elements]; + + const float input_scale = 0.023529f; + const float output_scale = 0.023529f; + const int input_zero_point = -128; + const int output_zero_point = -128; + + int8_t input_quantized[input_elements]; + int8_t filter_quantized[filter_elements]; + int32_t bias_quantized[bias_elements]; + int8_t golden_quantized[output_elements]; + + TfLiteDepthwiseConvParams conv_params; + conv_params.activation = kTfLiteActRelu6; + conv_params.dilation_width_factor = 1; + conv_params.dilation_height_factor = 1; + conv_params.stride_height = 1; + conv_params.stride_width = 1; + + tflite::testing::TestDepthwiseConvQuantizedPerChannel( + input_shape, input_values, input_quantized, input_scale, input_zero_point, + filter_shape, filter_values, filter_quantized, bias_shape, bias_values, + bias_quantized, output_shape, golden, golden_quantized, output_data, + output_scale, output_zero_point, &conv_params); +} + +TF_LITE_MICRO_TEST(SimpleTestDilatedQuantizedPerChannel) { + const int input_elements = 48; + int input_shape[] = {4, 1, 4, 6, 2}; + const float input_values[] = {1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, // h = 0 + 3, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, // h = 1 + 1, 2, 3, 4, 5, 6, 2, 6, 2, 4, 4, 2, // h = 2 + 3, 2, 6, 5, 1, 4, 1, 2, 1, 4, 6, 3}; // h = 3 + const int filter_elements = 16; + int filter_shape[] = {4, 1, 2, 2, 4}; + const float filter_values[] = {1, 2, 3, 4, -9, 10, -11, 12, + 5, 6, 7, 8, 13, -14, 15, -16}; + const int bias_elements = 4; + int bias_shape[] = {4, 1, 1, 1, 4}; + const int output_elements = 24; + const float bias_values[] = {1, 2, 3, 4}; + const float golden[] = { + 15, 2, 88, -48, 25, 14, 72, 0, 61, -2, 56, 48, // h = 0 + -4, 52, 12, 48, 11, 70, 63, 40, 51, -30, 41, 48 // h = 1 + }; + int output_shape[] = {4, 1, 2, 3, 4}; + int8_t output_data[output_elements]; + + const float input_scale = 0.5; + const float output_scale = 1.0f; + const int input_zero_point = 0; + const int output_zero_point = 0; + + int8_t input_quantized[input_elements]; + int8_t filter_quantized[filter_elements]; + int32_t bias_quantized[bias_elements]; + int8_t golden_quantized[output_elements]; + + TfLiteDepthwiseConvParams conv_params; + conv_params.activation = kTfLiteActNone; + conv_params.dilation_width_factor = 3; + conv_params.dilation_height_factor = 2; + conv_params.stride_height = 1; + conv_params.stride_width = 1; + + tflite::testing::TestDepthwiseConvQuantizedPerChannel( + input_shape, input_values, input_quantized, input_scale, input_zero_point, + filter_shape, filter_values, filter_quantized, bias_shape, bias_values, + bias_quantized, output_shape, golden, golden_quantized, output_data, + output_scale, output_zero_point, &conv_params); +} + +TF_LITE_MICRO_TEST(TestQuantizedPerChannelCompareWithFloat) { + int input_dims[] = {4, 1, 2, 3, 2}; + const float input_data[] = {3, 2, 1, -1, -2, -3, 4, 3, 2, -2, -3, -4}; + int filter_dims[] = {4, 1, 2, 2, 4}; + const float filter_data[] = {1, 2, 3, 4, 3, 4, 5, 6, 7, 8, 5, 6, 3, 4, 1, 2}; + int bias_dims[] = {4, 1, 1, 1, 4}; + const float bias_data[] = {3, -2, 4, 6}; + int output_dims[] = {4, 1, 1, 2, 4}; + const float golden[] = {43, 48, 18, 22, 3, -4, -28, -36}; + + const int input_size = 12; + const int filter_size = 16; + const int output_size = 8; + const int bias_size = 4; + int8_t input_quantized[input_size]; + int8_t filter_quantized[filter_size]; + int32_t bias_quantized[bias_size]; + int8_t golden_quantized[output_size]; + int8_t output_data[output_size]; + float output_float[output_size]; + + const float input_scale = 0.5; + const float output_scale = 1.0; + const int input_zero_point = 0; + const int output_zero_point = 0; + + TfLiteDepthwiseConvParams conv_params; + conv_params.activation = kTfLiteActNone; + conv_params.dilation_width_factor = 1; + conv_params.dilation_height_factor = 1; + conv_params.stride_height = 1; + conv_params.stride_width = 1; + + tflite::testing::TestDepthwiseConvQuantizedPerChannel( + input_dims, input_data, input_quantized, input_scale, input_zero_point, + filter_dims, filter_data, filter_quantized, bias_dims, bias_data, + bias_quantized, output_dims, golden, golden_quantized, output_data, + output_scale, output_zero_point, &conv_params); + + tflite::testing::TestDepthwiseConvFloat( + input_dims, input_data, filter_dims, filter_data, bias_dims, bias_data, + golden, output_dims, &conv_params, output_float); +} + +TF_LITE_MICRO_TEST(PerChannelBroadcastQuantizationParams) { + const float input_scale = 1.0f; + const float filter_scale = 1.0f; + const float output_scale = 1.0f; + + const int input_elements = 12; + int input_shape[] = {4, 1, 3, 2, 2}; + const float input_values[] = {1, 2, 7, 8, 3, 4, 9, 10, 5, 6, 11, 12}; + const int filter_elements = 16; + int filter_shape[] = {4, 1, 2, 2, 4}; + const float filter_values[] = {1, 2, 3, 4, -9, 10, -11, 12, + 5, 6, 7, 8, 13, -14, 15, -16}; + const int bias_elements = 4; + int bias_shape[] = {4, 1, 1, 1, 4}; + const int output_elements = 8; + const float bias_values[] = {1, 2, 3, 4}; + const float golden[] = { + 71, -34, 99, -20, 91, -26, 127, -4, + }; + int output_shape[] = {4, 1, 2, 1, 4}; + const int output_dims_count = 8; + int8_t output_data[output_dims_count]; + + int8_t input_quantized[input_elements]; + int8_t filter_quantized[filter_elements]; + int32_t bias_quantized[bias_elements]; + int8_t golden_quantized[output_elements]; + + TfLiteIntArray* input_dims = tflite::testing::IntArrayFromInts(input_shape); + TfLiteIntArray* filter_dims = tflite::testing::IntArrayFromInts(filter_shape); + TfLiteIntArray* bias_dims = tflite::testing::IntArrayFromInts(bias_shape); + TfLiteIntArray* output_dims = tflite::testing::IntArrayFromInts(output_shape); + + // Create per-layer quantized int8_t input tensor. + TfLiteTensor input_tensor = tflite::testing::CreateQuantizedTensor( + input_values, input_quantized, input_dims, input_scale, 0); + int input_zero_points[2] = {1, 0}; + float input_scales[2] = {1, input_scale}; + TfLiteAffineQuantization input_quant = { + tflite::testing::FloatArrayFromFloats(input_scales), + tflite::testing::IntArrayFromInts(input_zero_points), 0}; + input_tensor.quantization = {kTfLiteAffineQuantization, &input_quant}; + + // Create per-layer quantized int8_t filter tensor. + TfLiteTensor filter_tensor = tflite::testing::CreateQuantizedTensor( + filter_values, filter_quantized, filter_dims, filter_scale, 0); + int filter_zero_points[2] = {1, 0}; + float filter_scales[2] = {1, filter_scale}; + TfLiteAffineQuantization filter_quant = { + tflite::testing::FloatArrayFromFloats(filter_scales), + tflite::testing::IntArrayFromInts(filter_zero_points), 0}; + filter_tensor.quantization = {kTfLiteAffineQuantization, &filter_quant}; + + // Create per-layer quantized int32_t bias tensor. + tflite::SymmetricQuantize(bias_values, bias_quantized, bias_elements, + input_scale * output_scale); + TfLiteTensor bias_tensor = + tflite::testing::CreateTensor(bias_quantized, bias_dims); + + int bias_zero_points[2] = {1, 0}; + float bias_scales[2] = {1, input_scale * filter_scale}; + TfLiteAffineQuantization bias_quant = { + tflite::testing::FloatArrayFromFloats(bias_scales), + tflite::testing::IntArrayFromInts(bias_zero_points), 0}; + bias_tensor.quantization = {kTfLiteAffineQuantization, &bias_quant}; + + // Create per-layer quantized int8_t output tensor. + TfLiteTensor output_tensor = tflite::testing::CreateQuantizedTensor( + output_data, output_dims, output_scale, 0); + int output_zero_points[2] = {1, 0}; + float output_scales[2] = {1, output_scale}; + TfLiteAffineQuantization output_quant = { + tflite::testing::FloatArrayFromFloats(output_scales), + tflite::testing::IntArrayFromInts(output_zero_points), 0}; + output_tensor.quantization = {kTfLiteAffineQuantization, &output_quant}; + + constexpr int inputs_size = 3; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + input_tensor, + filter_tensor, + bias_tensor, + output_tensor, + }; + + tflite::Quantize(golden, golden_quantized, output_dims_count, output_scale, + 0); + + TfLiteDepthwiseConvParams conv_params; + conv_params.activation = kTfLiteActNone; + conv_params.dilation_width_factor = 1; + conv_params.dilation_height_factor = 1; + conv_params.stride_height = 1; + conv_params.stride_width = 1; + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, tflite::testing::ValidateDepthwiseConvGoldens( + golden_quantized, output_dims_count, &conv_params, 1e-5, + tensors_size, tensors)); +} + +#endif // !defined(XTENSA) + +TF_LITE_MICRO_TEST(FilterDimsNotMatchingAffineQuantization) { + int input_shape[] = {4, 1, 2, 3, 2}; + const float input_data[] = {3, 2, 1, -1, -2, -3, 4, 3, 2, -2, -3, -4}; + int filter_shape[] = {4, 1, 2, 2, 4}; + const float filter_data[] = {1, 2, 3, 4, 3, 4, 5, 6, 7, 8, 5, 6, 3, 4, 1, 2}; + int bias_shape[] = {4, 1, 1, 1, 4}; + const float bias_data[] = {3, -2, 4, 6}; + int output_shape[] = {4, 1, 1, 2, 4}; + + const int input_size = 12; + const int filter_size = 16; + const int output_size = 8; + const int bias_size = 4; + int8_t input_quantized[input_size]; + int8_t filter_quantized[filter_size]; + int32_t bias_quantized[bias_size]; + int8_t golden_quantized[output_size] = {}; + int zero_points[bias_size + 1]; + float scales[bias_size + 1]; + int8_t output_data[output_size]; + + const float input_scale = 0.5; + const float output_scale = 1.0; + const int input_zero_point = 0; + const int output_zero_point = 0; + + TfLiteIntArray* input_dims = tflite::testing::IntArrayFromInts(input_shape); + TfLiteIntArray* filter_dims = tflite::testing::IntArrayFromInts(filter_shape); + TfLiteIntArray* bias_dims = tflite::testing::IntArrayFromInts(bias_shape); + TfLiteIntArray* output_dims = tflite::testing::IntArrayFromInts(output_shape); + + int filter_zero_points[5]; + float filter_scales[5]; + TfLiteAffineQuantization filter_quant; + TfLiteAffineQuantization bias_quant; + TfLiteTensor input_tensor = tflite::testing::CreateQuantizedTensor( + input_data, input_quantized, input_dims, input_scale, input_zero_point); + TfLiteTensor filter_tensor = + tflite::testing::CreateSymmetricPerChannelQuantizedTensor( + filter_data, filter_quantized, filter_dims, filter_scales, + filter_zero_points, &filter_quant, 0 /* quantized dimension */); + TfLiteTensor bias_tensor = + tflite::testing::CreatePerChannelQuantizedBiasTensor( + bias_data, bias_quantized, bias_dims, input_scale, &filter_scales[1], + scales, zero_points, &bias_quant, 0); + TfLiteTensor output_tensor = tflite::testing::CreateQuantizedTensor( + output_data, output_dims, output_scale, output_zero_point); + + float input_scales[] = {1, input_scale}; + int input_zero_points[] = {1, input_zero_point}; + TfLiteAffineQuantization input_quant = { + tflite::testing::FloatArrayFromFloats(input_scales), + tflite::testing::IntArrayFromInts(input_zero_points), 0}; + input_tensor.quantization = {kTfLiteAffineQuantization, &input_quant}; + + constexpr int inputs_size = 3; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + input_tensor, + filter_tensor, + bias_tensor, + output_tensor, + }; + + TfLiteDepthwiseConvParams conv_params; + conv_params.activation = kTfLiteActNone; + conv_params.dilation_width_factor = 1; + conv_params.dilation_height_factor = 1; + conv_params.stride_height = 1; + conv_params.stride_width = 1; + + // Set filter quant to mismatched dimension. + TfLiteAffineQuantization* quant = reinterpret_cast( + filter_tensor.quantization.params); + quant->scale->size = 2; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteError, + tflite::testing::ValidateDepthwiseConvGoldens( + golden_quantized, output_size, &conv_params, 1e-5, + tensors_size, tensors)); + + // Set scale back to correct dimension, and make zero point array too short. + quant->scale->size = filter_shape[0]; + quant->zero_point->size = 2; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteError, + tflite::testing::ValidateDepthwiseConvGoldens( + golden_quantized, output_size, &conv_params, 1e-5, + tensors_size, tensors)); +} + +TF_LITE_MICRO_TEST(Int8Input32x4Filter32x4ShouldMatchGolden) { + const int input_elements = 32 * 4; + const int filter_elements = 32 * 4; + const int bias_elements = 32; + const int output_elements = 32; + int input_shape[] = {4, 1, 4, 1, 32}; + int filter_shape[] = {4, 1, 4, 1, 32}; + int bias_shape[] = {1, 32}; + int output_shape[] = {4, 1, 1, 1, 32}; + const float input_values[] = { + 11.0589, 10.8824, 11.1766, 11.5295, 10.8236, 9.5295, 9.5295, 10.0001, + 11.2354, 10.8824, 9.1765, 9.0589, 9.6471, 8.9412, 7.9412, 9.0001, + 9.3530, 7.5295, 9.2354, 9.5883, 7.5883, 8.1765, 7.5883, 9.2942, + 9.1177, 8.5883, 8.2354, 8.6471, 8.0589, 8.0001, 7.4118, 7.3530, + 11.0001, 11.1177, 11.0589, 11.2354, 10.5883, 9.2942, 9.2942, 10.1177, + 11.2354, 10.8824, 8.9412, 8.8236, 9.2354, 8.8824, 7.0001, 9.1177, + 9.5883, 8.2354, 9.1765, 9.5295, 7.4118, 8.5883, 8.1177, 9.1765, + 9.0001, 9.0589, 8.9412, 8.2942, 7.8824, 8.4118, 7.2942, 7.2354, + 10.4118, 10.8824, 11.1177, 11.0001, 10.0001, 9.7060, 9.7648, 10.1766, + 11.1766, 10.6471, 8.6471, 8.5295, 9.5295, 9.0001, 7.0001, 9.4118, + 9.8236, 8.0001, 9.2354, 9.5883, 7.5295, 9.0001, 8.5295, 9.0589, + 8.9412, 9.1177, 8.9412, 8.0001, 8.0589, 8.8824, 7.0589, 7.3530, + 11.3530, 11.0589, 10.7060, 10.7648, 9.9413, 9.1177, 9.1177, 9.7648, + 10.7060, 10.2354, 8.5883, 8.8236, 9.7648, 9.2942, 7.5295, 9.2354, + 9.7060, 8.1177, 9.2942, 9.5883, 7.7648, 9.6471, 9.1177, 9.4707, + 9.3530, 8.8236, 8.5295, 8.0589, 8.6471, 9.5883, 7.4118, 7.5883}; + const float filter_values[] = { + -0.1617, -0.1948, 0.1419, -0.2311, -0.0891, 0.1551, 0.0033, 0.3037, + -0.1683, 0.1353, 0.1518, -0.1683, -0.1386, 0.1452, 0.1816, 0.1716, + -0.1948, 0.2080, 0.2245, -0.1981, -0.2410, 0.1849, 0.1981, 0.1584, + 0.2509, 0.1783, -0.2146, -0.1518, 0.2080, -0.2872, 0.2014, 0.2476, + -0.4126, -0.0561, -0.3235, -0.0594, -0.0957, 0.2014, -0.1056, 0.1386, + -0.2542, -0.1617, 0.1287, -0.1816, -0.0363, 0.1419, -0.0594, 0.2344, + -0.0099, 0.4192, 0.1287, -0.2311, -0.2212, -0.0528, -0.2080, 0.1816, + -0.1452, 0.1221, 0.1254, -0.1056, -0.0759, 0.1221, 0.1023, 0.1485, + 0.2707, 0.1716, -0.1882, -0.1783, 0.1650, -0.2740, 0.1915, 0.2080, + -0.2971, -0.2575, -0.3169, 0.0198, -0.0231, 0.2410, -0.0429, 0.0660, + -0.1816, 0.1981, 0.2014, -0.1386, -0.1915, 0.1716, 0.1320, 0.1419, + 0.1320, 0.1353, -0.1386, -0.1716, 0.1320, -0.1650, 0.1386, 0.0825, + -0.1419, -0.1023, 0.1783, 0.0462, 0.2047, -0.2179, -0.1518, -0.1551, + 0.1518, 0.3334, 0.3103, -0.2047, -0.2047, -0.0957, -0.1650, 0.1221, + 0.0990, 0.1353, -0.1617, -0.1485, 0.1650, -0.1816, 0.1518, 0.1254, + -0.0363, -0.1254, 0.1386, 0.0429, 0.2113, -0.2839, -0.1056, -0.2278}; + const float bias_values[] = { + 0.0000, 0.0000, 0.0000, 0.0000, 0.0000, 0.0000, 0.0000, 0.0000, + 0.0000, 0.0000, 0.0000, 0.0000, 0.0000, 0.0000, 0.0000, 0.0000, + 0.0000, 0.0000, 0.0000, 0.0000, 0.0000, 0.0000, 0.0000, 0.0000, + 0.0000, 0.0000, 0.0000, 0.0000, 0.0000, 0.0000, 0.0000, 0.0000}; + const float golden[] = { + -5.1194, -2.0075, -2.1751, -4.7958, 1.7073, -1.2963, -0.4641, 5.0416, + -6.4424, 0.3836, 2.4684, -4.7643, -3.8913, 3.8382, -0.5164, 5.4304, + -2.7400, 7.7016, 3.6115, -6.8545, -3.6290, 0.8509, 2.3247, 5.6117, + 1.8215, 2.7645, -0.7032, -3.2156, 3.9689, -5.4583, 2.4346, 1.7731}; + + // Quantization Parameters. All scales except output are 1.0, and all zero + // points are 0. This direct-maps the values to floating point and makes it + // easy to reson about them. + const float input_scale = 0.058824; + const float filter_scale = 0.003301; + const float output_scale = 0.092596; + const int input_zero_point = -128; + const int output_zero_point = 0; + + TfLiteIntArray* input_dims = tflite::testing::IntArrayFromInts(input_shape); + TfLiteIntArray* filter_dims = tflite::testing::IntArrayFromInts(filter_shape); + TfLiteIntArray* bias_dims = tflite::testing::IntArrayFromInts(bias_shape); + TfLiteIntArray* output_dims = tflite::testing::IntArrayFromInts(output_shape); + + // Create per-tensor quantized int8_t input tensor. + int8_t input_quantized[input_elements]; + TfLiteTensor input_tensor = tflite::testing::CreateQuantizedTensor( + input_values, input_quantized, input_dims, input_scale, input_zero_point); + + // Set zero point and scale arrays with a single element for each. + int input_zero_points[] = {1, input_zero_point}; + float input_scales[] = {1, input_scale}; + TfLiteAffineQuantization input_quant = { + tflite::testing::FloatArrayFromFloats(input_scales), + tflite::testing::IntArrayFromInts(input_zero_points), 0}; + input_tensor.quantization = {kTfLiteAffineQuantization, &input_quant}; + + // Create per-tensor quantized int8_t filter tensor. + int8_t filter_quantized[filter_elements]; + TfLiteTensor filter_tensor = tflite::testing::CreateQuantizedTensor( + filter_values, filter_quantized, filter_dims, filter_scale, 0); + + // Set zero point and scale arrays with a single element for each. + int filter_zero_points[] = {1, 0}; + float filter_scales[] = {1, filter_scale}; + TfLiteAffineQuantization filter_quant = { + tflite::testing::FloatArrayFromFloats(filter_scales), + tflite::testing::IntArrayFromInts(filter_zero_points), 0}; + filter_tensor.quantization = {kTfLiteAffineQuantization, &filter_quant}; + + // Create per-tensor quantized int32_t bias tensor. + int32_t bias_quantized[bias_elements]; + // See https://www.tensorflow.org/lite/performance/quantization_spec for a + // detailed explanation of why bias scale is input_scale * filter_scale. + tflite::SymmetricQuantize(bias_values, bias_quantized, bias_elements, + input_scale * output_scale); + TfLiteTensor bias_tensor = + tflite::testing::CreateTensor(bias_quantized, bias_dims); + + // Set zero point and scale arrays with a single element for each. + int bias_zero_points[] = {1, 0}; + float bias_scales[] = {1, input_scale * filter_scale}; + TfLiteAffineQuantization bias_quant = { + tflite::testing::FloatArrayFromFloats(bias_scales), + tflite::testing::IntArrayFromInts(bias_zero_points), 0}; + bias_tensor.quantization = {kTfLiteAffineQuantization, &bias_quant}; + + // Create per-tensor quantized int8_t output tensor. + int8_t output_quantized[output_elements]; + TfLiteTensor output_tensor = tflite::testing::CreateQuantizedTensor( + output_quantized, output_dims, output_scale, output_zero_point); + + // Set zero point and scale arrays with a single element for each. + int output_zero_points[] = {1, output_zero_point}; + float output_scales[] = {1, output_scale}; + TfLiteAffineQuantization output_quant = { + tflite::testing::FloatArrayFromFloats(output_scales), + tflite::testing::IntArrayFromInts(output_zero_points), 0}; + output_tensor.quantization = {kTfLiteAffineQuantization, &output_quant}; + + // The 3 inputs include the input, filter and bias tensors. + constexpr int kInputsSize = 3; + constexpr int kOutputsSize = 1; + constexpr int kTensorsSize = kInputsSize + kOutputsSize; + TfLiteTensor tensors[kTensorsSize] = { + input_tensor, + filter_tensor, + bias_tensor, + output_tensor, + }; + + int8_t golden_quantized[output_elements]; + tflite::Quantize(golden, golden_quantized, output_elements, output_scale, 0); + + // Errors due to quantization should not exceed 1. + constexpr int kQuantizationTolerance = 1; + + TfLiteDepthwiseConvParams conv_params; + conv_params.activation = kTfLiteActNone; + conv_params.dilation_width_factor = 1; + conv_params.dilation_height_factor = 1; + conv_params.stride_height = 1; + conv_params.stride_width = 1; + tflite::testing::ValidateDepthwiseConvGoldens( + golden_quantized, output_elements, &conv_params, kQuantizationTolerance, + kTensorsSize, tensors); +} + +TF_LITE_MICRO_TEST(Int8Input32x1Filter32x1ShouldMatchGolden) { + const int input_elements = 32 * 1; + const int filter_elements = 32 * 1; + const int bias_elements = 32; + const int output_elements = 32; + int input_shape[] = {4, 1, 1, 1, 32}; + int filter_shape[] = {4, 1, 1, 1, 32}; + int bias_shape[] = {1, 32}; + int output_shape[] = {4, 1, 1, 1, 32}; + const float input_values[] = { + 11.0589, 10.8824, 11.1766, 11.5295, 10.8236, 9.5295, 9.5295, 10.0001, + 11.2354, 10.8824, 9.1765, 9.0589, 9.6471, 8.9412, 7.9412, 9.0001, + 9.3530, 7.5295, 9.2354, 9.5883, 7.5883, 8.1765, 7.5883, 9.2942, + 9.3530, 8.8236, 8.5295, 8.0589, 8.6471, 9.5883, 7.4118, 7.5883}; + const float filter_values[] = { + -0.1419, -0.1023, 0.1783, 0.0462, 0.2047, -0.2179, -0.1518, -0.1551, + 0.1518, 0.3334, 0.3103, -0.2047, -0.2047, -0.0957, -0.1650, 0.1221, + 0.0990, 0.1353, -0.1617, -0.1485, 0.1650, -0.1816, 0.1518, 0.1254, + -0.0363, -0.1254, 0.1386, 0.0429, 0.2113, -0.2839, -0.1056, -0.2278}; + const float bias_values[] = { + 0.0000, 0.0000, 0.0000, 0.0000, 0.0000, 0.0000, 0.0000, 0.0000, + 0.0000, 0.0000, 0.0000, 0.0000, 0.0000, 0.0000, 0.0000, 0.0000, + 0.0000, 0.0000, 0.0000, 0.0000, 0.0000, 0.0000, 0.0000, 0.0000, + 0.0000, 0.0000, 0.0000, 0.0000, 0.0000, 0.0000, 0.0000, 0.0000}; + const float golden[] = { + -1.5741, -1.1112, 2.0371, 0.5556, 2.2223, -2.0371, -1.4815, -1.5741, + 1.6667, 3.6112, 2.8705, -1.8519, -1.9445, -0.8334, -1.2963, 1.1112, + 0.9260, 1.0186, -1.4815, -1.3889, 1.2963, -1.4815, 1.1112, 1.2037, + -0.3704, -1.1112, 1.2037, 0.3704, 1.8519, -2.6853, -0.7408, -1.7593}; + + // Quantization Parameters. All scales except output are 1.0, and all zero + // points are 0. This direct-maps the values to floating point and makes it + // easy to reson about them. + const float input_scale = 0.058824; + const float filter_scale = 0.003301; + const float output_scale = 0.092596; + const int input_zero_point = -128; + const int output_zero_point = 0; + + TfLiteIntArray* input_dims = tflite::testing::IntArrayFromInts(input_shape); + TfLiteIntArray* filter_dims = tflite::testing::IntArrayFromInts(filter_shape); + TfLiteIntArray* bias_dims = tflite::testing::IntArrayFromInts(bias_shape); + TfLiteIntArray* output_dims = tflite::testing::IntArrayFromInts(output_shape); + + // Create per-tensor quantized int8_t input tensor. + int8_t input_quantized[input_elements]; + TfLiteTensor input_tensor = tflite::testing::CreateQuantizedTensor( + input_values, input_quantized, input_dims, input_scale, input_zero_point); + + // Set zero point and scale arrays with a single element for each. + int input_zero_points[] = {1, input_zero_point}; + float input_scales[] = {1, input_scale}; + TfLiteAffineQuantization input_quant = { + tflite::testing::FloatArrayFromFloats(input_scales), + tflite::testing::IntArrayFromInts(input_zero_points), 0}; + input_tensor.quantization = {kTfLiteAffineQuantization, &input_quant}; + + // Create per-tensor quantized int8_t filter tensor. + int8_t filter_quantized[filter_elements]; + TfLiteTensor filter_tensor = tflite::testing::CreateQuantizedTensor( + filter_values, filter_quantized, filter_dims, filter_scale, 0); + + // Set zero point and scale arrays with a single element for each. + int filter_zero_points[] = {1, 0}; + float filter_scales[] = {1, filter_scale}; + TfLiteAffineQuantization filter_quant = { + tflite::testing::FloatArrayFromFloats(filter_scales), + tflite::testing::IntArrayFromInts(filter_zero_points), 0}; + filter_tensor.quantization = {kTfLiteAffineQuantization, &filter_quant}; + + // Create per-tensor quantized int32_t bias tensor. + int32_t bias_quantized[bias_elements]; + // See https://www.tensorflow.org/lite/performance/quantization_spec for a + // detailed explanation of why bias scale is input_scale * filter_scale. + tflite::SymmetricQuantize(bias_values, bias_quantized, bias_elements, + input_scale * output_scale); + TfLiteTensor bias_tensor = + tflite::testing::CreateTensor(bias_quantized, bias_dims); + + // Set zero point and scale arrays with a single element for each. + int bias_zero_points[] = {1, 0}; + float bias_scales[] = {1, input_scale * filter_scale}; + TfLiteAffineQuantization bias_quant = { + tflite::testing::FloatArrayFromFloats(bias_scales), + tflite::testing::IntArrayFromInts(bias_zero_points), 0}; + bias_tensor.quantization = {kTfLiteAffineQuantization, &bias_quant}; + + // Create per-tensor quantized int8_t output tensor. + int8_t output_quantized[output_elements]; + TfLiteTensor output_tensor = tflite::testing::CreateQuantizedTensor( + output_quantized, output_dims, output_scale, output_zero_point); + + // Set zero point and scale arrays with a single element for each. + int output_zero_points[] = {1, output_zero_point}; + float output_scales[] = {1, output_scale}; + TfLiteAffineQuantization output_quant = { + tflite::testing::FloatArrayFromFloats(output_scales), + tflite::testing::IntArrayFromInts(output_zero_points), 0}; + output_tensor.quantization = {kTfLiteAffineQuantization, &output_quant}; + + // The 3 inputs include the input, filter and bias tensors. + constexpr int kInputsSize = 3; + constexpr int kOutputsSize = 1; + constexpr int kTensorsSize = kInputsSize + kOutputsSize; + TfLiteTensor tensors[kTensorsSize] = { + input_tensor, + filter_tensor, + bias_tensor, + output_tensor, + }; + + int8_t golden_quantized[output_elements]; + tflite::Quantize(golden, golden_quantized, output_elements, output_scale, 0); + + // Errors due to quantization should not exceed 1. + constexpr int kQuantizationTolerance = 1; + + TfLiteDepthwiseConvParams conv_params; + conv_params.activation = kTfLiteActNone; + conv_params.dilation_width_factor = 1; + conv_params.dilation_height_factor = 1; + conv_params.stride_height = 2; + conv_params.stride_width = 2; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + tflite::testing::ValidateDepthwiseConvGoldens( + golden_quantized, output_elements, &conv_params, + kQuantizationTolerance, kTensorsSize, tensors)); +} + +// TODO(b/268384678): xtensa vision p6 kernels break +// this test, will if def till properly investigated. + +// Quantizing int8-ranged filter values down to int4 doesn't always yield the +// accuracy sufficient to meet the golden values. So this test was created by +// handcrafting filter values within the int4 range, and the golden data was +// obtained by running TestDepthwiseConvQuantizedPerChannel() with int8 +// quantization, and ensuring that int4 quantization yields the same outputs. +TF_LITE_MICRO_TEST(SimpleTestQuantizedPerChannelInt4Filter) { + const int input_elements = 12; + int input_shape[] = {4, 1, 3, 2, 2}; + const float input_values[] = {1, 2, 7, 8, 3, 4, 9, 10, 5, 6, 11, 12}; + const int filter_elements = 16; + int filter_shape[] = {4, 1, 2, 2, 4}; + const float filter_values[] = {1, 2, 3, 4, -5, 7, -6, 7, + 5, 6, 7, 4, 2, -5, 4, 0}; + const int bias_elements = 4; + int bias_shape[] = {4, 1, 1, 1, 4}; + const int output_elements = 8; + const float bias_values[] = {1, 2, 3, 4}; + const float golden[] = { + 0, 26, 29, 84, 6, 46, 45, 114, + }; + int output_shape[] = {4, 1, 2, 1, 4}; + const int output_dims_count = 8; + int8_t output_data[output_dims_count]; + + const float input_scale = 0.5; + const float output_scale = 1.0f; + const int input_zero_point = 0; + const int output_zero_point = 0; + + int8_t input_quantized[input_elements]; + int8_t filter_quantized[filter_elements]; + int32_t bias_quantized[bias_elements]; + int8_t golden_quantized[output_elements]; + + TfLiteDepthwiseConvParams conv_params; + conv_params.activation = kTfLiteActNone; + conv_params.dilation_width_factor = 1; + conv_params.dilation_height_factor = 1; + conv_params.stride_height = 1; + conv_params.stride_width = 1; + + tflite::testing::TestDepthwiseConvQuantizedPerChannel( + input_shape, input_values, input_quantized, input_scale, input_zero_point, + filter_shape, filter_values, filter_quantized, bias_shape, bias_values, + bias_quantized, output_shape, golden, golden_quantized, output_data, + output_scale, output_zero_point, &conv_params, kTfLiteInt4); +} + +TF_LITE_MICRO_TEST(SimpleTestQuantizedPerChannel) { + const int input_elements = 12; + int input_shape[] = {4, 1, 3, 2, 2}; + const float input_values[] = {1, 2, 7, 8, 3, 4, 9, 10, 5, 6, 11, 12}; + const int filter_elements = 16; + int filter_shape[] = {4, 1, 2, 2, 4}; + const float filter_values[] = {1, 2, 3, 4, -9, 10, -11, 12, + 5, 6, 7, 8, 13, -14, 15, -16}; + const int bias_elements = 4; + int bias_shape[] = {4, 1, 1, 1, 4}; + const int output_elements = 8; + const float bias_values[] = {1, 2, 3, 4}; + const float golden[] = { + 71, -34, 99, -20, 91, -26, 127, -4, + }; + int output_shape[] = {4, 1, 2, 1, 4}; + const int output_dims_count = 8; + int8_t output_data[output_dims_count]; + + const float input_scale = 0.5; + const float output_scale = 1.0f; + const int input_zero_point = 0; + const int output_zero_point = 0; + + int8_t input_quantized[input_elements]; + int8_t filter_quantized[filter_elements]; + int32_t bias_quantized[bias_elements]; + int8_t golden_quantized[output_elements]; + + TfLiteDepthwiseConvParams conv_params; + conv_params.activation = kTfLiteActNone; + conv_params.dilation_width_factor = 1; + conv_params.dilation_height_factor = 1; + conv_params.stride_height = 1; + conv_params.stride_width = 1; + + tflite::testing::TestDepthwiseConvQuantizedPerChannel( + input_shape, input_values, input_quantized, input_scale, input_zero_point, + filter_shape, filter_values, filter_quantized, bias_shape, bias_values, + bias_quantized, output_shape, golden, golden_quantized, output_data, + output_scale, output_zero_point, &conv_params); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/dequantize.cc b/tensorflow/lite/micro/kernels/dequantize.cc new file mode 100644 index 0000000..428c866 --- /dev/null +++ b/tensorflow/lite/micro/kernels/dequantize.cc @@ -0,0 +1,84 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/reference/dequantize.h" + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/quantize.h" +#include "tensorflow/lite/kernels/internal/reference/requantize.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/dequantize.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { + +void* DequantizeInit(TfLiteContext* context, const char* buffer, + size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(DequantizeOpData)); +} + +TfLiteStatus DequantizeEval(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + DequantizeOpData* data = static_cast(node->user_data); + + const TfLiteEvalTensor* input = tflite::micro::GetEvalInput(context, node, 0); + TfLiteEvalTensor* output = tflite::micro::GetEvalOutput(context, node, 0); + + // Output type ensured to be kTfLiteFloat32 at the Prepare stage + TFLITE_DCHECK(output->type == kTfLiteFloat32); + + switch (input->type) { + case kTfLiteInt8: + reference_ops::Dequantize(data->quantization_params, + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + case kTfLiteInt16: + reference_ops::Dequantize(data->quantization_params, + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + case kTfLiteUInt8: + reference_ops::Dequantize(data->quantization_params, + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + default: + MicroPrintf("Input %s, output %s not supported.", + TfLiteTypeGetName(input->type), + TfLiteTypeGetName(output->type)); + return kTfLiteError; + } + + return kTfLiteOk; +} + +TFLMRegistration Register_DEQUANTIZE() { + return tflite::micro::RegisterOp(DequantizeInit, DequantizePrepare, + DequantizeEval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/dequantize.h b/tensorflow/lite/micro/kernels/dequantize.h new file mode 100644 index 0000000..fe6ec16 --- /dev/null +++ b/tensorflow/lite/micro/kernels/dequantize.h @@ -0,0 +1,38 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_DEQUANTIZE_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_DEQUANTIZE_H_ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { + +struct DequantizeOpData { + tflite::DequantizationParams quantization_params; + // The scaling factor from input to output (aka the 'real multiplier') can + // be represented as a fixed point multiplier plus a left shift. + int32_t output_multiplier; + int output_shift; + int32_t output_zero_point; +}; + +TfLiteStatus DequantizePrepare(TfLiteContext* context, TfLiteNode* node); + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_DEQUANTIZE_H_ diff --git a/tensorflow/lite/micro/kernels/dequantize_common.cc b/tensorflow/lite/micro/kernels/dequantize_common.cc new file mode 100644 index 0000000..08e448e --- /dev/null +++ b/tensorflow/lite/micro/kernels/dequantize_common.cc @@ -0,0 +1,58 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/reference/dequantize.h" +#include "tensorflow/lite/kernels/internal/reference/quantize.h" +#include "tensorflow/lite/kernels/internal/reference/requantize.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/dequantize.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" + +namespace tflite { + +TfLiteStatus DequantizePrepare(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + DequantizeOpData* data = static_cast(node->user_data); + + TF_LITE_ENSURE_EQ(context, NumInputs(node), 1); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + + MicroContext* micro_context = GetMicroContext(context); + + // TODO(b/140515557): Add cached dequant to improve hybrid model performance. + TfLiteTensor* input = micro_context->AllocateTempInputTensor(node, 0); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* output = micro_context->AllocateTempOutputTensor(node, 0); + TF_LITE_ENSURE(context, output != nullptr); + + TF_LITE_ENSURE(context, input->type == kTfLiteInt8 || + input->type == kTfLiteInt16 || + input->type == kTfLiteUInt8); + TF_LITE_ENSURE(context, output->type == kTfLiteFloat32); + + data->quantization_params.zero_point = input->params.zero_point; + data->quantization_params.scale = static_cast(input->params.scale); + data->output_zero_point = output->params.zero_point; + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(output); + + return kTfLiteOk; +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/dequantize_test.cc b/tensorflow/lite/micro/kernels/dequantize_test.cc new file mode 100644 index 0000000..3a34b8d --- /dev/null +++ b/tensorflow/lite/micro/kernels/dequantize_test.cc @@ -0,0 +1,115 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +template +void ValidateDequantizeGoldens(TfLiteTensor* tensors, int tensors_size, + const T* expected_output_data, T* output_data, + int output_length, float tolerance = 1e-5) { + int inputs_array_data[] = {1, 0}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 1}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = tflite::Register_DEQUANTIZE(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, + /*builtin_data=*/nullptr); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + for (int i = 0; i < output_length; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(expected_output_data[i], output_data[i], 0.001f); + } +} + +template +void TestDequantizeToFloat(int* input_dims_data, const float* input_data, + T* input_data_quantized, float scale, int zero_point, + int* output_dims_data, + const float* expected_output_data, + float* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_length = ElementCount(*output_dims); + + // 1 input, 1 output. + const int tensors_size = 2; + TfLiteTensor tensors[tensors_size] = { + CreateQuantizedTensor(input_data, input_data_quantized, input_dims, scale, + zero_point), + CreateTensor(output_data, output_dims), + }; + + ValidateDequantizeGoldens(tensors, tensors_size, expected_output_data, + output_data, output_length); +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(DequantizeOpTestInt8) { + const int length = 10; + int dims[] = {2, 5, 2}; + const float values[] = {-63.5, -63, -62.5, -62, -61.5, + 62, 62.5, 63, 63.5, 64}; + const float scale = 0.5; + const int zero_point = -1; + int8_t input_quantized[length]; + float output[length]; + tflite::testing::TestDequantizeToFloat(dims, values, input_quantized, scale, + zero_point, dims, values, output); +} + +TF_LITE_MICRO_TEST(DequantizeOpTestInt16) { + const int length = 10; + int dims[] = {2, 5, 2}; + const float values[] = {-63.5, -63, -62.5, -62, -61.5, + 62, 62.5, 63, 63.5, 64}; + const float scale = 0.5; + const int zero_point = -1; + int16_t input_quantized[length]; + float output[length]; + tflite::testing::TestDequantizeToFloat(dims, values, input_quantized, scale, + zero_point, dims, values, output); +} + +TF_LITE_MICRO_TEST(DequantizeOpTestUint8) { + const int length = 10; + int dims[] = {2, 5, 2}; + const float values[] = {-63.5, -63, -62.5, -62, -61.5, + 62, 62.5, 63, 63.5, 64}; + const float scale = 0.5; + const int zero_point = 127; + uint8_t input_quantized[length]; + float output[length]; + tflite::testing::TestDequantizeToFloat(dims, values, input_quantized, scale, + zero_point, dims, values, output); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/detection_postprocess.cc b/tensorflow/lite/micro/kernels/detection_postprocess.cc new file mode 100644 index 0000000..e807f35 --- /dev/null +++ b/tensorflow/lite/micro/kernels/detection_postprocess.cc @@ -0,0 +1,807 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include +#include +#include + +#include "flatbuffers/flexbuffers.h" +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_utils.h" + +namespace tflite { +namespace { + +/** + * This version of detection_postprocess is specific to TFLite Micro. It + * contains the following differences between the TFLite version: + * + * 1.) Temporaries (temporary tensors) - Micro use instead scratch buffer API. + * 2.) Output dimensions - the TFLite version does not support undefined out + * dimensions. So model must have static out dimensions. + */ + +// Input tensors +constexpr int kInputTensorBoxEncodings = 0; +constexpr int kInputTensorClassPredictions = 1; +constexpr int kInputTensorAnchors = 2; + +// Output tensors +constexpr int kOutputTensorDetectionBoxes = 0; +constexpr int kOutputTensorDetectionClasses = 1; +constexpr int kOutputTensorDetectionScores = 2; +constexpr int kOutputTensorNumDetections = 3; + +constexpr int kNumCoordBox = 4; +constexpr int kBatchSize = 1; + +constexpr int kNumDetectionsPerClass = 100; + +// Object Detection model produces axis-aligned boxes in two formats: +// BoxCorner represents the lower left corner (xmin, ymin) and +// the upper right corner (xmax, ymax). +// CenterSize represents the center (xcenter, ycenter), height and width. +// BoxCornerEncoding and CenterSizeEncoding are related as follows: +// ycenter = y / y_scale * anchor.h + anchor.y; +// xcenter = x / x_scale * anchor.w + anchor.x; +// half_h = 0.5*exp(h/ h_scale)) * anchor.h; +// half_w = 0.5*exp(w / w_scale)) * anchor.w; +// ymin = ycenter - half_h +// ymax = ycenter + half_h +// xmin = xcenter - half_w +// xmax = xcenter + half_w +struct BoxCornerEncoding { + float ymin; + float xmin; + float ymax; + float xmax; +}; + +struct CenterSizeEncoding { + float y; + float x; + float h; + float w; +}; +// We make sure that the memory allocations are contiguous with static_assert. +static_assert(sizeof(BoxCornerEncoding) == sizeof(float) * kNumCoordBox, + "Size of BoxCornerEncoding is 4 float values"); +static_assert(sizeof(CenterSizeEncoding) == sizeof(float) * kNumCoordBox, + "Size of CenterSizeEncoding is 4 float values"); + +struct OpData { + int max_detections; + int max_classes_per_detection; // Fast Non-Max-Suppression + int detections_per_class; // Regular Non-Max-Suppression + float non_max_suppression_score_threshold; + float intersection_over_union_threshold; + int num_classes; + bool use_regular_non_max_suppression; + CenterSizeEncoding scale_values; + + // Scratch buffers indexes + int active_candidate_idx; + int decoded_boxes_idx; + int scores_idx; + int score_buffer_idx; + int keep_scores_idx; + int scores_after_regular_non_max_suppression_idx; + int sorted_values_idx; + int keep_indices_idx; + int sorted_indices_idx; + int buffer_idx; + int selected_idx; + + // Cached tensor scale and zero point values for quantized operations + TfLiteQuantizationParams input_box_encodings; + TfLiteQuantizationParams input_class_predictions; + TfLiteQuantizationParams input_anchors; +}; + +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + OpData* op_data = nullptr; + + const uint8_t* buffer_t = reinterpret_cast(buffer); + const flexbuffers::Map& m = flexbuffers::GetRoot(buffer_t, length).AsMap(); + op_data = reinterpret_cast( + context->AllocatePersistentBuffer(context, sizeof(OpData))); + + op_data->max_detections = m["max_detections"].AsInt32(); + op_data->max_classes_per_detection = m["max_classes_per_detection"].AsInt32(); + if (m["detections_per_class"].IsNull()) + op_data->detections_per_class = kNumDetectionsPerClass; + else + op_data->detections_per_class = m["detections_per_class"].AsInt32(); + if (m["use_regular_nms"].IsNull()) + op_data->use_regular_non_max_suppression = false; + else + op_data->use_regular_non_max_suppression = m["use_regular_nms"].AsBool(); + + op_data->non_max_suppression_score_threshold = + m["nms_score_threshold"].AsFloat(); + op_data->intersection_over_union_threshold = m["nms_iou_threshold"].AsFloat(); + op_data->num_classes = m["num_classes"].AsInt32(); + op_data->scale_values.y = m["y_scale"].AsFloat(); + op_data->scale_values.x = m["x_scale"].AsFloat(); + op_data->scale_values.h = m["h_scale"].AsFloat(); + op_data->scale_values.w = m["w_scale"].AsFloat(); + + return op_data; +} + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + auto* op_data = static_cast(node->user_data); + + MicroContext* micro_context = GetMicroContext(context); + + // Inputs: box_encodings, scores, anchors + TF_LITE_ENSURE_EQ(context, NumInputs(node), 3); + TfLiteTensor* input_box_encodings = + micro_context->AllocateTempInputTensor(node, kInputTensorBoxEncodings); + TfLiteTensor* input_class_predictions = + micro_context->AllocateTempInputTensor(node, + kInputTensorClassPredictions); + TfLiteTensor* input_anchors = + micro_context->AllocateTempInputTensor(node, kInputTensorAnchors); + TF_LITE_ENSURE_EQ(context, NumDimensions(input_box_encodings), 3); + TF_LITE_ENSURE_EQ(context, NumDimensions(input_class_predictions), 3); + TF_LITE_ENSURE_EQ(context, NumDimensions(input_anchors), 2); + + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 4); + const int num_boxes = input_box_encodings->dims->data[1]; + const int num_classes = op_data->num_classes; + + op_data->input_box_encodings.scale = input_box_encodings->params.scale; + op_data->input_box_encodings.zero_point = + input_box_encodings->params.zero_point; + op_data->input_class_predictions.scale = + input_class_predictions->params.scale; + op_data->input_class_predictions.zero_point = + input_class_predictions->params.zero_point; + op_data->input_anchors.scale = input_anchors->params.scale; + op_data->input_anchors.zero_point = input_anchors->params.zero_point; + + // Scratch tensors + context->RequestScratchBufferInArena(context, num_boxes, + &op_data->active_candidate_idx); + context->RequestScratchBufferInArena(context, + num_boxes * kNumCoordBox * sizeof(float), + &op_data->decoded_boxes_idx); + context->RequestScratchBufferInArena( + context, + input_class_predictions->dims->data[1] * + input_class_predictions->dims->data[2] * sizeof(float), + &op_data->scores_idx); + + // Additional buffers + context->RequestScratchBufferInArena(context, num_boxes * sizeof(float), + &op_data->score_buffer_idx); + context->RequestScratchBufferInArena(context, num_boxes * sizeof(float), + &op_data->keep_scores_idx); + context->RequestScratchBufferInArena( + context, op_data->max_detections * num_boxes * sizeof(float), + &op_data->scores_after_regular_non_max_suppression_idx); + context->RequestScratchBufferInArena( + context, op_data->max_detections * num_boxes * sizeof(float), + &op_data->sorted_values_idx); + context->RequestScratchBufferInArena(context, num_boxes * sizeof(int), + &op_data->keep_indices_idx); + context->RequestScratchBufferInArena( + context, op_data->max_detections * num_boxes * sizeof(int), + &op_data->sorted_indices_idx); + int buffer_size = std::max(num_classes, op_data->max_detections); + context->RequestScratchBufferInArena( + context, buffer_size * num_boxes * sizeof(int), &op_data->buffer_idx); + buffer_size = std::min(num_boxes, op_data->max_detections); + context->RequestScratchBufferInArena( + context, buffer_size * num_boxes * sizeof(int), &op_data->selected_idx); + + // Outputs: detection_boxes, detection_scores, detection_classes, + // num_detections + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 4); + + micro_context->DeallocateTempTfLiteTensor(input_box_encodings); + micro_context->DeallocateTempTfLiteTensor(input_class_predictions); + micro_context->DeallocateTempTfLiteTensor(input_anchors); + + return kTfLiteOk; +} + +class Dequantizer { + public: + Dequantizer(int zero_point, float scale) + : zero_point_(zero_point), scale_(scale) {} + float operator()(uint8_t x) { + return (static_cast(x) - zero_point_) * scale_; + } + + private: + int zero_point_; + float scale_; +}; + +template +T ReInterpretTensor(const TfLiteEvalTensor* tensor) { + const float* tensor_base = tflite::micro::GetTensorData(tensor); + return reinterpret_cast(tensor_base); +} + +template +T ReInterpretTensor(TfLiteEvalTensor* tensor) { + float* tensor_base = tflite::micro::GetTensorData(tensor); + return reinterpret_cast(tensor_base); +} + +TfLiteStatus DecodeCenterSizeBoxes(TfLiteContext* context, TfLiteNode* node, + OpData* op_data) { + // Parse input tensor boxencodings + const TfLiteEvalTensor* input_box_encodings = + tflite::micro::GetEvalInput(context, node, kInputTensorBoxEncodings); + TF_LITE_ENSURE_EQ(context, input_box_encodings->dims->data[0], kBatchSize); + const int num_boxes = input_box_encodings->dims->data[1]; + TF_LITE_ENSURE(context, input_box_encodings->dims->data[2] >= kNumCoordBox); + const TfLiteEvalTensor* input_anchors = + tflite::micro::GetEvalInput(context, node, kInputTensorAnchors); + + // Decode the boxes to get (ymin, xmin, ymax, xmax) based on the anchors + CenterSizeEncoding box_centersize; + CenterSizeEncoding scale_values = op_data->scale_values; + CenterSizeEncoding anchor; + for (int idx = 0; idx < num_boxes; ++idx) { + switch (input_box_encodings->type) { + // Float + case kTfLiteFloat32: { + // Please see DequantizeBoxEncodings function for the support detail. + const int box_encoding_idx = idx * input_box_encodings->dims->data[2]; + const float* boxes = &(tflite::micro::GetTensorData( + input_box_encodings)[box_encoding_idx]); + box_centersize = *reinterpret_cast(boxes); + anchor = + ReInterpretTensor(input_anchors)[idx]; + break; + } + default: + // Unsupported type. + return kTfLiteError; + } + + float ycenter = static_cast(static_cast(box_centersize.y) / + static_cast(scale_values.y) * + static_cast(anchor.h) + + static_cast(anchor.y)); + + float xcenter = static_cast(static_cast(box_centersize.x) / + static_cast(scale_values.x) * + static_cast(anchor.w) + + static_cast(anchor.x)); + + float half_h = + static_cast(0.5 * + (std::exp(static_cast(box_centersize.h) / + static_cast(scale_values.h))) * + static_cast(anchor.h)); + float half_w = + static_cast(0.5 * + (std::exp(static_cast(box_centersize.w) / + static_cast(scale_values.w))) * + static_cast(anchor.w)); + + float* decoded_boxes = reinterpret_cast( + context->GetScratchBuffer(context, op_data->decoded_boxes_idx)); + auto& box = reinterpret_cast(decoded_boxes)[idx]; + box.ymin = ycenter - half_h; + box.xmin = xcenter - half_w; + box.ymax = ycenter + half_h; + box.xmax = xcenter + half_w; + } + return kTfLiteOk; +} + +void DecreasingPartialArgSort(const float* values, int num_values, + int num_to_sort, int* indices) { + std::iota(indices, indices + num_values, 0); + std::partial_sort(indices, indices + num_to_sort, indices + num_values, + [&values](const int i, const int j) { + return std::tie(values[i], j) > std::tie(values[j], i); + }); +} + +template +void InsertionSort(int* start, int* end, Compare compare) { + for (int* i = start; i != end; ++i) { + std::rotate(std::upper_bound(start, i, *i, compare), i, i + 1); + } +} + +template +void TopDownMerge(int* values, int* scratch, const int half_num_values, + int num_values, Compare compare) { + int left = 0; + int right = half_num_values; + + for (int i = 0; i < num_values; i++) { + if (left >= half_num_values || + (right < num_values && compare(values[right], values[left]))) { + scratch[i] = values[right++]; + } else { + scratch[i] = values[left++]; + } + } + memcpy(values, scratch, num_values * sizeof(int)); +} + +template +void MergeSort(int* values, int* scratch, const int num_values, + Compare compare) { + constexpr int threshold = 20; + + if (num_values < threshold) { + InsertionSort(values, values + num_values, compare); + return; + } + + const int half_num_values = num_values / 2; + + MergeSort(values, scratch, half_num_values, compare); + MergeSort(values + half_num_values, scratch, num_values - half_num_values, + compare); + TopDownMerge(values, scratch, half_num_values, num_values, compare); +} + +void DecreasingArgSort(const float* values, int num_values, int* indices, + int* scratch) { + std::iota(indices, indices + num_values, 0); + + MergeSort(indices, scratch, num_values, [&values](const int i, const int j) { + return values[i] > values[j]; + }); +} + +int SelectDetectionsAboveScoreThreshold(const float* values, int size, + const float threshold, + float* keep_values, int* keep_indices) { + int counter = 0; + for (int i = 0; i < size; i++) { + if (values[i] >= threshold) { + keep_values[counter] = values[i]; + keep_indices[counter] = i; + counter++; + } + } + return counter; +} + +bool ValidateBoxes(const float* decoded_boxes, const int num_boxes) { + for (int i = 0; i < num_boxes; ++i) { + // ymax>=ymin, xmax>=xmin + auto& box = reinterpret_cast(decoded_boxes)[i]; + if (box.ymin >= box.ymax || box.xmin >= box.xmax) { + return false; + } + } + return true; +} + +float ComputeIntersectionOverUnion(const float* decoded_boxes, const int i, + const int j) { + auto& box_i = reinterpret_cast(decoded_boxes)[i]; + auto& box_j = reinterpret_cast(decoded_boxes)[j]; + const float area_i = (box_i.ymax - box_i.ymin) * (box_i.xmax - box_i.xmin); + const float area_j = (box_j.ymax - box_j.ymin) * (box_j.xmax - box_j.xmin); + if (area_i <= 0 || area_j <= 0) return 0.0; + const float intersection_ymin = std::max(box_i.ymin, box_j.ymin); + const float intersection_xmin = std::max(box_i.xmin, box_j.xmin); + const float intersection_ymax = std::min(box_i.ymax, box_j.ymax); + const float intersection_xmax = std::min(box_i.xmax, box_j.xmax); + const float intersection_area = + std::max(intersection_ymax - intersection_ymin, 0.0) * + std::max(intersection_xmax - intersection_xmin, 0.0); + return intersection_area / (area_i + area_j - intersection_area); +} + +// NonMaxSuppressionSingleClass() prunes out the box locations with high overlap +// before selecting the highest scoring boxes (max_detections in number) +// It assumes all boxes are good in beginning and sorts based on the scores. +// If lower-scoring box has too much overlap with a higher-scoring box, +// we get rid of the lower-scoring box. +// Complexity is O(N^2) pairwise comparison between boxes +TfLiteStatus NonMaxSuppressionSingleClassHelper( + TfLiteContext* context, TfLiteNode* node, OpData* op_data, + const float* scores, int* selected, int* selected_size, + int max_detections) { + const TfLiteEvalTensor* input_box_encodings = + tflite::micro::GetEvalInput(context, node, kInputTensorBoxEncodings); + const int num_boxes = input_box_encodings->dims->data[1]; + const float non_max_suppression_score_threshold = + op_data->non_max_suppression_score_threshold; + const float intersection_over_union_threshold = + op_data->intersection_over_union_threshold; + // Maximum detections should be positive. + TF_LITE_ENSURE(context, (max_detections >= 0)); + // intersection_over_union_threshold should be positive + // and should be less than 1. + TF_LITE_ENSURE(context, (intersection_over_union_threshold > 0.0f) && + (intersection_over_union_threshold <= 1.0f)); + // Validate boxes + float* decoded_boxes = reinterpret_cast( + context->GetScratchBuffer(context, op_data->decoded_boxes_idx)); + + TF_LITE_ENSURE(context, ValidateBoxes(decoded_boxes, num_boxes)); + + // threshold scores + int* keep_indices = reinterpret_cast( + context->GetScratchBuffer(context, op_data->keep_indices_idx)); + float* keep_scores = reinterpret_cast( + context->GetScratchBuffer(context, op_data->keep_scores_idx)); + int num_scores_kept = SelectDetectionsAboveScoreThreshold( + scores, num_boxes, non_max_suppression_score_threshold, keep_scores, + keep_indices); + int* sorted_indices = reinterpret_cast( + context->GetScratchBuffer(context, op_data->sorted_indices_idx)); + + // Reusing keep_indices for scratch buffer and write back its values + // after the sorting is done. + DecreasingArgSort(keep_scores, num_scores_kept, sorted_indices, keep_indices); + int counter = 0; + for (int i = 0; i < num_boxes; i++) { + if (scores[i] >= non_max_suppression_score_threshold) { + keep_indices[counter] = i; + counter++; + } + } + + const int num_boxes_kept = num_scores_kept; + const int output_size = std::min(num_boxes_kept, max_detections); + *selected_size = 0; + + int num_active_candidate = num_boxes_kept; + uint8_t* active_box_candidate = reinterpret_cast( + context->GetScratchBuffer(context, op_data->active_candidate_idx)); + + for (int row = 0; row < num_boxes_kept; row++) { + active_box_candidate[row] = 1; + } + for (int i = 0; i < num_boxes_kept; ++i) { + if (num_active_candidate == 0 || *selected_size >= output_size) break; + if (active_box_candidate[i] == 1) { + selected[(*selected_size)++] = keep_indices[sorted_indices[i]]; + active_box_candidate[i] = 0; + num_active_candidate--; + } else { + continue; + } + for (int j = i + 1; j < num_boxes_kept; ++j) { + if (active_box_candidate[j] == 1) { + float intersection_over_union = ComputeIntersectionOverUnion( + decoded_boxes, keep_indices[sorted_indices[i]], + keep_indices[sorted_indices[j]]); + + if (intersection_over_union > intersection_over_union_threshold) { + active_box_candidate[j] = 0; + num_active_candidate--; + } + } + } + } + + return kTfLiteOk; +} + +// This function implements a regular version of Non Maximal Suppression (NMS) +// for multiple classes where +// 1) we do NMS separately for each class across all anchors and +// 2) keep only the highest anchor scores across all classes +// 3) The worst runtime of the regular NMS is O(K*N^2) +// where N is the number of anchors and K the number of +// classes. +TfLiteStatus NonMaxSuppressionMultiClassRegularHelper(TfLiteContext* context, + TfLiteNode* node, + OpData* op_data, + const float* scores) { + const TfLiteEvalTensor* input_box_encodings = + tflite::micro::GetEvalInput(context, node, kInputTensorBoxEncodings); + const TfLiteEvalTensor* input_class_predictions = + tflite::micro::GetEvalInput(context, node, kInputTensorClassPredictions); + TfLiteEvalTensor* detection_boxes = + tflite::micro::GetEvalOutput(context, node, kOutputTensorDetectionBoxes); + TfLiteEvalTensor* detection_classes = tflite::micro::GetEvalOutput( + context, node, kOutputTensorDetectionClasses); + TfLiteEvalTensor* detection_scores = + tflite::micro::GetEvalOutput(context, node, kOutputTensorDetectionScores); + TfLiteEvalTensor* num_detections = + tflite::micro::GetEvalOutput(context, node, kOutputTensorNumDetections); + + const int num_boxes = input_box_encodings->dims->data[1]; + const int num_classes = op_data->num_classes; + const int num_detections_per_class = op_data->detections_per_class; + const int max_detections = op_data->max_detections; + const int num_classes_with_background = + input_class_predictions->dims->data[2]; + // The row index offset is 1 if background class is included and 0 otherwise. + int label_offset = num_classes_with_background - num_classes; + TF_LITE_ENSURE(context, num_detections_per_class > 0); + + // For each class, perform non-max suppression. + float* class_scores = reinterpret_cast( + context->GetScratchBuffer(context, op_data->score_buffer_idx)); + int* box_indices_after_regular_non_max_suppression = reinterpret_cast( + context->GetScratchBuffer(context, op_data->buffer_idx)); + float* scores_after_regular_non_max_suppression = + reinterpret_cast(context->GetScratchBuffer( + context, op_data->scores_after_regular_non_max_suppression_idx)); + + int size_of_sorted_indices = 0; + int* sorted_indices = reinterpret_cast( + context->GetScratchBuffer(context, op_data->sorted_indices_idx)); + float* sorted_values = reinterpret_cast( + context->GetScratchBuffer(context, op_data->sorted_values_idx)); + + for (int col = 0; col < num_classes; col++) { + for (int row = 0; row < num_boxes; row++) { + // Get scores of boxes corresponding to all anchors for single class + class_scores[row] = + *(scores + row * num_classes_with_background + col + label_offset); + } + // Perform non-maximal suppression on single class + int selected_size = 0; + int* selected = reinterpret_cast( + context->GetScratchBuffer(context, op_data->selected_idx)); + TF_LITE_ENSURE_STATUS(NonMaxSuppressionSingleClassHelper( + context, node, op_data, class_scores, selected, &selected_size, + num_detections_per_class)); + // Add selected indices from non-max suppression of boxes in this class + int output_index = size_of_sorted_indices; + for (int i = 0; i < selected_size; i++) { + int selected_index = selected[i]; + + box_indices_after_regular_non_max_suppression[output_index] = + (selected_index * num_classes_with_background + col + label_offset); + scores_after_regular_non_max_suppression[output_index] = + class_scores[selected_index]; + output_index++; + } + // Sort the max scores among the selected indices + // Get the indices for top scores + int num_indices_to_sort = std::min(output_index, max_detections); + DecreasingPartialArgSort(scores_after_regular_non_max_suppression, + output_index, num_indices_to_sort, sorted_indices); + + // Copy values to temporary vectors + for (int row = 0; row < num_indices_to_sort; row++) { + int temp = sorted_indices[row]; + sorted_indices[row] = box_indices_after_regular_non_max_suppression[temp]; + sorted_values[row] = scores_after_regular_non_max_suppression[temp]; + } + // Copy scores and indices from temporary vectors + for (int row = 0; row < num_indices_to_sort; row++) { + box_indices_after_regular_non_max_suppression[row] = sorted_indices[row]; + scores_after_regular_non_max_suppression[row] = sorted_values[row]; + } + size_of_sorted_indices = num_indices_to_sort; + } + + // Allocate output tensors + for (int output_box_index = 0; output_box_index < max_detections; + output_box_index++) { + if (output_box_index < size_of_sorted_indices) { + const int anchor_index = floor( + box_indices_after_regular_non_max_suppression[output_box_index] / + num_classes_with_background); + const int class_index = + box_indices_after_regular_non_max_suppression[output_box_index] - + anchor_index * num_classes_with_background - label_offset; + const float selected_score = + scores_after_regular_non_max_suppression[output_box_index]; + // detection_boxes + float* decoded_boxes = reinterpret_cast( + context->GetScratchBuffer(context, op_data->decoded_boxes_idx)); + ReInterpretTensor(detection_boxes)[output_box_index] = + reinterpret_cast(decoded_boxes)[anchor_index]; + // detection_classes + tflite::micro::GetTensorData(detection_classes)[output_box_index] = + class_index; + // detection_scores + tflite::micro::GetTensorData(detection_scores)[output_box_index] = + selected_score; + } else { + ReInterpretTensor( + detection_boxes)[output_box_index] = {0.0f, 0.0f, 0.0f, 0.0f}; + // detection_classes + tflite::micro::GetTensorData(detection_classes)[output_box_index] = + 0.0f; + // detection_scores + tflite::micro::GetTensorData(detection_scores)[output_box_index] = + 0.0f; + } + } + tflite::micro::GetTensorData(num_detections)[0] = + size_of_sorted_indices; + + return kTfLiteOk; +} + +// This function implements a fast version of Non Maximal Suppression for +// multiple classes where +// 1) we keep the top-k scores for each anchor and +// 2) during NMS, each anchor only uses the highest class score for sorting. +// 3) Compared to standard NMS, the worst runtime of this version is O(N^2) +// instead of O(KN^2) where N is the number of anchors and K the number of +// classes. +TfLiteStatus NonMaxSuppressionMultiClassFastHelper(TfLiteContext* context, + TfLiteNode* node, + OpData* op_data, + const float* scores) { + const TfLiteEvalTensor* input_box_encodings = + tflite::micro::GetEvalInput(context, node, kInputTensorBoxEncodings); + const TfLiteEvalTensor* input_class_predictions = + tflite::micro::GetEvalInput(context, node, kInputTensorClassPredictions); + TfLiteEvalTensor* detection_boxes = + tflite::micro::GetEvalOutput(context, node, kOutputTensorDetectionBoxes); + + TfLiteEvalTensor* detection_classes = tflite::micro::GetEvalOutput( + context, node, kOutputTensorDetectionClasses); + TfLiteEvalTensor* detection_scores = + tflite::micro::GetEvalOutput(context, node, kOutputTensorDetectionScores); + TfLiteEvalTensor* num_detections = + tflite::micro::GetEvalOutput(context, node, kOutputTensorNumDetections); + + const int num_boxes = input_box_encodings->dims->data[1]; + const int num_classes = op_data->num_classes; + const int max_categories_per_anchor = op_data->max_classes_per_detection; + const int num_classes_with_background = + input_class_predictions->dims->data[2]; + + // The row index offset is 1 if background class is included and 0 otherwise. + int label_offset = num_classes_with_background - num_classes; + TF_LITE_ENSURE(context, (max_categories_per_anchor > 0)); + const int num_categories_per_anchor = + std::min(max_categories_per_anchor, num_classes); + float* max_scores = reinterpret_cast( + context->GetScratchBuffer(context, op_data->score_buffer_idx)); + int* sorted_class_indices = reinterpret_cast( + context->GetScratchBuffer(context, op_data->buffer_idx)); + + for (int row = 0; row < num_boxes; row++) { + const float* box_scores = + scores + row * num_classes_with_background + label_offset; + int* class_indices = sorted_class_indices + row * num_classes; + DecreasingPartialArgSort(box_scores, num_classes, num_categories_per_anchor, + class_indices); + max_scores[row] = box_scores[class_indices[0]]; + } + + // Perform non-maximal suppression on max scores + int selected_size = 0; + int* selected = reinterpret_cast( + context->GetScratchBuffer(context, op_data->selected_idx)); + TF_LITE_ENSURE_STATUS(NonMaxSuppressionSingleClassHelper( + context, node, op_data, max_scores, selected, &selected_size, + op_data->max_detections)); + + // Allocate output tensors + int output_box_index = 0; + + for (int i = 0; i < selected_size; i++) { + int selected_index = selected[i]; + + const float* box_scores = + scores + selected_index * num_classes_with_background + label_offset; + const int* class_indices = + sorted_class_indices + selected_index * num_classes; + + for (int col = 0; col < num_categories_per_anchor; ++col) { + int box_offset = num_categories_per_anchor * output_box_index + col; + + // detection_boxes + float* decoded_boxes = reinterpret_cast( + context->GetScratchBuffer(context, op_data->decoded_boxes_idx)); + ReInterpretTensor(detection_boxes)[box_offset] = + reinterpret_cast(decoded_boxes)[selected_index]; + + // detection_classes + tflite::micro::GetTensorData(detection_classes)[box_offset] = + class_indices[col]; + + // detection_scores + tflite::micro::GetTensorData(detection_scores)[box_offset] = + box_scores[class_indices[col]]; + + output_box_index++; + } + } + + tflite::micro::GetTensorData(num_detections)[0] = output_box_index; + return kTfLiteOk; +} + +TfLiteStatus NonMaxSuppressionMultiClass(TfLiteContext* context, + TfLiteNode* node, OpData* op_data) { + // Get the input tensors + const TfLiteEvalTensor* input_box_encodings = + tflite::micro::GetEvalInput(context, node, kInputTensorBoxEncodings); + const TfLiteEvalTensor* input_class_predictions = + tflite::micro::GetEvalInput(context, node, kInputTensorClassPredictions); + const int num_boxes = input_box_encodings->dims->data[1]; + const int num_classes = op_data->num_classes; + + TF_LITE_ENSURE_EQ(context, input_class_predictions->dims->data[0], + kBatchSize); + TF_LITE_ENSURE_EQ(context, input_class_predictions->dims->data[1], num_boxes); + const int num_classes_with_background = + input_class_predictions->dims->data[2]; + + TF_LITE_ENSURE(context, (num_classes_with_background - num_classes <= 1)); + TF_LITE_ENSURE(context, (num_classes_with_background >= num_classes)); + + const float* scores; + switch (input_class_predictions->type) { + case kTfLiteFloat32: + scores = tflite::micro::GetTensorData(input_class_predictions); + break; + default: + // Unsupported type. + return kTfLiteError; + } + + if (op_data->use_regular_non_max_suppression) { + TF_LITE_ENSURE_STATUS(NonMaxSuppressionMultiClassRegularHelper( + context, node, op_data, scores)); + } else { + TF_LITE_ENSURE_STATUS( + NonMaxSuppressionMultiClassFastHelper(context, node, op_data, scores)); + } + + return kTfLiteOk; +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + TF_LITE_ENSURE(context, (kBatchSize == 1)); + auto* op_data = static_cast(node->user_data); + + // These two functions correspond to two blocks in the Object Detection model. + // In future, we would like to break the custom op in two blocks, which is + // currently not feasible because we would like to input quantized inputs + // and do all calculations in float. Mixed quantized/float calculations are + // currently not supported in TFLite. + + // This fills in temporary decoded_boxes + // by transforming input_box_encodings and input_anchors from + // CenterSizeEncodings to BoxCornerEncoding + TF_LITE_ENSURE_STATUS(DecodeCenterSizeBoxes(context, node, op_data)); + + // This fills in the output tensors + // by choosing effective set of decoded boxes + // based on Non Maximal Suppression, i.e. selecting + // highest scoring non-overlapping boxes. + TF_LITE_ENSURE_STATUS(NonMaxSuppressionMultiClass(context, node, op_data)); + + return kTfLiteOk; +} +} // namespace + +TFLMRegistration* Register_DETECTION_POSTPROCESS() { + static TFLMRegistration r = tflite::micro::RegisterOp(Init, Prepare, Eval); + return &r; +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/detection_postprocess_flexbuffers_generated_data.cc b/tensorflow/lite/micro/kernels/detection_postprocess_flexbuffers_generated_data.cc new file mode 100644 index 0000000..bd1f05f --- /dev/null +++ b/tensorflow/lite/micro/kernels/detection_postprocess_flexbuffers_generated_data.cc @@ -0,0 +1,68 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +// This file is generated. See: +// tflite-micro/tensorflow/lite/micro/kernels/test_data_generation/README.md + +#include "tensorflow/lite/micro/kernels/detection_postprocess_flexbuffers_generated_data.h" + +const int g_gen_data_size_none_regular_nms = 242; +alignas(4) const unsigned char g_gen_data_none_regular_nms[] = { + 0x6d, 0x61, 0x78, 0x5f, 0x64, 0x65, 0x74, 0x65, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x73, 0x00, 0x6d, 0x61, 0x78, 0x5f, 0x63, 0x6c, 0x61, 0x73, 0x73, + 0x65, 0x73, 0x5f, 0x70, 0x65, 0x72, 0x5f, 0x64, 0x65, 0x74, 0x65, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x00, 0x64, 0x65, 0x74, 0x65, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x73, 0x5f, 0x70, 0x65, 0x72, 0x5f, 0x63, 0x6c, 0x61, 0x73, + 0x73, 0x00, 0x75, 0x73, 0x65, 0x5f, 0x72, 0x65, 0x67, 0x75, 0x6c, 0x61, + 0x72, 0x5f, 0x6e, 0x6d, 0x73, 0x00, 0x6e, 0x6d, 0x73, 0x5f, 0x73, 0x63, + 0x6f, 0x72, 0x65, 0x5f, 0x74, 0x68, 0x72, 0x65, 0x73, 0x68, 0x6f, 0x6c, + 0x64, 0x00, 0x6e, 0x6d, 0x73, 0x5f, 0x69, 0x6f, 0x75, 0x5f, 0x74, 0x68, + 0x72, 0x65, 0x73, 0x68, 0x6f, 0x6c, 0x64, 0x00, 0x6e, 0x75, 0x6d, 0x5f, + 0x63, 0x6c, 0x61, 0x73, 0x73, 0x65, 0x73, 0x00, 0x79, 0x5f, 0x73, 0x63, + 0x61, 0x6c, 0x65, 0x00, 0x78, 0x5f, 0x73, 0x63, 0x61, 0x6c, 0x65, 0x00, + 0x68, 0x5f, 0x73, 0x63, 0x61, 0x6c, 0x65, 0x00, 0x77, 0x5f, 0x73, 0x63, + 0x61, 0x6c, 0x65, 0x00, 0x0b, 0x78, 0x12, 0x94, 0xa4, 0x43, 0x58, 0x33, + 0x6a, 0x11, 0x22, 0x2b, 0x0b, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x0b, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0xa0, 0x40, + 0x01, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x3f, + 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0xa0, 0x40, 0x00, 0x00, 0x20, 0x41, 0x00, 0x00, 0x20, 0x41, + 0x06, 0x0e, 0x06, 0x06, 0x0e, 0x0e, 0x06, 0x6a, 0x0e, 0x0e, 0x0e, 0x37, + 0x26, 0x01, +}; +const int g_gen_data_size_regular_nms = 242; +alignas(4) const unsigned char g_gen_data_regular_nms[] = { + 0x6d, 0x61, 0x78, 0x5f, 0x64, 0x65, 0x74, 0x65, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x73, 0x00, 0x6d, 0x61, 0x78, 0x5f, 0x63, 0x6c, 0x61, 0x73, 0x73, + 0x65, 0x73, 0x5f, 0x70, 0x65, 0x72, 0x5f, 0x64, 0x65, 0x74, 0x65, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x00, 0x64, 0x65, 0x74, 0x65, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x73, 0x5f, 0x70, 0x65, 0x72, 0x5f, 0x63, 0x6c, 0x61, 0x73, + 0x73, 0x00, 0x75, 0x73, 0x65, 0x5f, 0x72, 0x65, 0x67, 0x75, 0x6c, 0x61, + 0x72, 0x5f, 0x6e, 0x6d, 0x73, 0x00, 0x6e, 0x6d, 0x73, 0x5f, 0x73, 0x63, + 0x6f, 0x72, 0x65, 0x5f, 0x74, 0x68, 0x72, 0x65, 0x73, 0x68, 0x6f, 0x6c, + 0x64, 0x00, 0x6e, 0x6d, 0x73, 0x5f, 0x69, 0x6f, 0x75, 0x5f, 0x74, 0x68, + 0x72, 0x65, 0x73, 0x68, 0x6f, 0x6c, 0x64, 0x00, 0x6e, 0x75, 0x6d, 0x5f, + 0x63, 0x6c, 0x61, 0x73, 0x73, 0x65, 0x73, 0x00, 0x79, 0x5f, 0x73, 0x63, + 0x61, 0x6c, 0x65, 0x00, 0x78, 0x5f, 0x73, 0x63, 0x61, 0x6c, 0x65, 0x00, + 0x68, 0x5f, 0x73, 0x63, 0x61, 0x6c, 0x65, 0x00, 0x77, 0x5f, 0x73, 0x63, + 0x61, 0x6c, 0x65, 0x00, 0x0b, 0x78, 0x12, 0x94, 0xa4, 0x43, 0x58, 0x33, + 0x6a, 0x11, 0x22, 0x2b, 0x0b, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x0b, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0xa0, 0x40, + 0x01, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x3f, + 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x00, 0x00, 0xa0, 0x40, 0x00, 0x00, 0x20, 0x41, 0x00, 0x00, 0x20, 0x41, + 0x06, 0x0e, 0x06, 0x06, 0x0e, 0x0e, 0x06, 0x6a, 0x0e, 0x0e, 0x0e, 0x37, + 0x26, 0x01, +}; diff --git a/tensorflow/lite/micro/kernels/detection_postprocess_flexbuffers_generated_data.h b/tensorflow/lite/micro/kernels/detection_postprocess_flexbuffers_generated_data.h new file mode 100644 index 0000000..f5b9eae --- /dev/null +++ b/tensorflow/lite/micro/kernels/detection_postprocess_flexbuffers_generated_data.h @@ -0,0 +1,25 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_FLEXBUFFERS_GENERATED_DATA_H +#define TENSORFLOW_LITE_MICRO_KERNELS_FLEXBUFFERS_GENERATED_DATA_H + +extern const int g_gen_data_size_none_regular_nms; +extern const unsigned char g_gen_data_none_regular_nms[]; + +extern const int g_gen_data_size_regular_nms; +extern const unsigned char g_gen_data_regular_nms[]; + +#endif diff --git a/tensorflow/lite/micro/kernels/detection_postprocess_test.cc b/tensorflow/lite/micro/kernels/detection_postprocess_test.cc new file mode 100644 index 0000000..728e2e7 --- /dev/null +++ b/tensorflow/lite/micro/kernels/detection_postprocess_test.cc @@ -0,0 +1,343 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "flatbuffers/flexbuffers.h" +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/micro_mutable_op_resolver.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +// See: tensorflow/lite/micro/kernels/detection_postprocess_test/README.md +#include "tensorflow/lite/micro/kernels/detection_postprocess_flexbuffers_generated_data.h" + +namespace tflite { +namespace testing { +namespace { + +// Common inputs and outputs. + +int kInputShape1[] = {3, 1, 6, 4}; +int kInputShape2[] = {3, 1, 6, 3}; +int kInputShape3[] = {2, 6, 4}; +int kOutputShape1[] = {3, 1, 3, 4}; +int kOutputShape2[] = {2, 1, 3}; +int kOutputShape3[] = {2, 1, 3}; +int kOutputShape4[] = {1, 1}; + +// six boxes in center-size encoding +constexpr float kInputData1[] = { + 0.0, 0.0, 0.0, 0.0, // box #1 + 0.0, 1.0, 0.0, 0.0, // box #2 + 0.0, -1.0, 0.0, 0.0, // box #3 + 0.0, 0.0, 0.0, 0.0, // box #4 + 0.0, 1.0, 0.0, 0.0, // box #5 + 0.0, 0.0, 0.0, 0.0 // box #6 +}; + +// class scores - two classes with background +constexpr float kInputData2[] = {0., .9, .8, 0., .75, .72, 0., .6, .5, + 0., .93, .95, 0., .5, .4, 0., .3, .2}; + +// six anchors in center-size encoding +constexpr float kInputData3[] = { + 0.5, 0.5, 1.0, 1.0, // anchor #1 + 0.5, 0.5, 1.0, 1.0, // anchor #2 + 0.5, 0.5, 1.0, 1.0, // anchor #3 + 0.5, 10.5, 1.0, 1.0, // anchor #4 + 0.5, 10.5, 1.0, 1.0, // anchor #5 + 0.5, 100.5, 1.0, 1.0 // anchor #6 +}; +// Same boxes in box-corner encoding: +// { 0.0, 0.0, 1.0, 1.0, +// 0.0, 0.1, 1.0, 1.1, +// 0.0, -0.1, 1.0, 0.9, +// 0.0, 10.0, 1.0, 11.0, +// 0.0, 10.1, 1.0, 11.1, +// 0.0, 100.0, 1.0, 101.0} + +constexpr float kGolden1[] = {0.0, 10.0, 1.0, 11.0, 0.0, 0.0, + 1.0, 1.0, 0.0, 100.0, 1.0, 101.0}; +constexpr float kGolden2[] = {1, 0, 0}; +constexpr float kGolden3[] = {0.95, 0.9, 0.3}; +constexpr float kGolden4[] = {3.0}; + +void TestDetectionPostprocess(int* input_dims_data1, const float* input_data1, + int* input_dims_data2, const float* input_data2, + int* input_dims_data3, const float* input_data3, + int* output_dims_data1, float* output_data1, + int* output_dims_data2, float* output_data2, + int* output_dims_data3, float* output_data3, + int* output_dims_data4, float* output_data4, + const float* golden1, const float* golden2, + const float* golden3, const float* golden4, + const float tolerance, bool use_regular_nms) { + TfLiteIntArray* input_dims1 = IntArrayFromInts(input_dims_data1); + TfLiteIntArray* input_dims2 = IntArrayFromInts(input_dims_data2); + TfLiteIntArray* input_dims3 = IntArrayFromInts(input_dims_data3); + TfLiteIntArray* output_dims1 = IntArrayFromInts(output_dims_data1); + TfLiteIntArray* output_dims2 = IntArrayFromInts(output_dims_data2); + TfLiteIntArray* output_dims3 = IntArrayFromInts(output_dims_data3); + TfLiteIntArray* output_dims4 = IntArrayFromInts(output_dims_data4); + + constexpr int inputs_size = 3; + constexpr int outputs_size = 4; + constexpr int tensors_size = inputs_size + outputs_size; + + TfLiteTensor tensors[tensors_size]; + tensors[0] = CreateTensor(input_data1, input_dims1); + tensors[1] = CreateTensor(input_data2, input_dims2); + tensors[2] = CreateTensor(input_data3, input_dims3); + tensors[3] = CreateTensor(output_data1, output_dims1); + tensors[4] = CreateTensor(output_data2, output_dims2); + tensors[5] = CreateTensor(output_data3, output_dims3); + tensors[6] = CreateTensor(output_data4, output_dims4); + + MicroMutableOpResolver<1> resolver; + TF_LITE_MICRO_EXPECT_EQ(resolver.AddDetectionPostprocess(), kTfLiteOk); + const TFLMRegistration* registration = + resolver.FindOp("TFLite_Detection_PostProcess"); + TF_LITE_MICRO_EXPECT(registration != nullptr); + + int inputs_array_data[] = {3, 0, 1, 2}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {4, 3, 4, 5, 6}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + micro::KernelRunner runner(*registration, tensors, tensors_size, inputs_array, + outputs_array, nullptr); + + // Using generated data as input to operator. + int data_size = 0; + const unsigned char* init_data = nullptr; + if (use_regular_nms) { + init_data = g_gen_data_regular_nms; + data_size = g_gen_data_size_regular_nms; + } else { + init_data = g_gen_data_none_regular_nms; + data_size = g_gen_data_size_none_regular_nms; + } + + // TfLite uses a char* for the raw bytes whereas flexbuffers use an unsigned + // char*. This small discrepancy results in compiler warnings unless we + // reinterpret_cast right before passing in the flexbuffer bytes to the + // KernelRunner. + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, runner.InitAndPrepare(reinterpret_cast(init_data), + data_size)); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + int output_elements_count1 = ElementCount(*tensors[3].dims); + int output_elements_count2 = ElementCount(*tensors[4].dims); + int output_elements_count3 = ElementCount(*tensors[5].dims); + int output_elements_count4 = ElementCount(*tensors[6].dims); + + for (int i = 0; i < output_elements_count1; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(golden1[i], output_data1[i], tolerance); + } + for (int i = 0; i < output_elements_count2; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(golden2[i], output_data2[i], tolerance); + } + for (int i = 0; i < output_elements_count3; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(golden3[i], output_data3[i], tolerance); + } + for (int i = 0; i < output_elements_count4; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(golden4[i], output_data4[i], tolerance); + } +} +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(DetectionPostprocessFloatFastNMS) { + float output_data1[12]; + float output_data2[3]; + float output_data3[3]; + float output_data4[1]; + + tflite::testing::TestDetectionPostprocess( + tflite::testing::kInputShape1, tflite::testing::kInputData1, + tflite::testing::kInputShape2, tflite::testing::kInputData2, + tflite::testing::kInputShape3, tflite::testing::kInputData3, + tflite::testing::kOutputShape1, output_data1, + tflite::testing::kOutputShape2, output_data2, + tflite::testing::kOutputShape3, output_data3, + tflite::testing::kOutputShape4, output_data4, tflite::testing::kGolden1, + tflite::testing::kGolden2, tflite::testing::kGolden3, + tflite::testing::kGolden4, + /* tolerance */ 0, /* Use regular NMS: */ false); +} + +TF_LITE_MICRO_TEST(DetectionPostprocessFloatRegularNMS) { + float output_data1[12]; + float output_data2[3]; + float output_data3[3]; + float output_data4[1]; + const float kGolden1[] = {0.0, 10.0, 1.0, 11.0, 0.0, 10.0, + 1.0, 11.0, 0.0, 0.0, 0.0, 0.0}; + const float kGolden3[] = {0.95, 0.9, 0.0}; + const float kGolden4[] = {2.0}; + + tflite::testing::TestDetectionPostprocess( + tflite::testing::kInputShape1, tflite::testing::kInputData1, + tflite::testing::kInputShape2, tflite::testing::kInputData2, + tflite::testing::kInputShape3, tflite::testing::kInputData3, + tflite::testing::kOutputShape1, output_data1, + tflite::testing::kOutputShape2, output_data2, + tflite::testing::kOutputShape3, output_data3, + tflite::testing::kOutputShape4, output_data4, kGolden1, + tflite::testing::kGolden2, kGolden3, kGolden4, + /* tolerance */ 1e-1, /* Use regular NMS: */ true); +} + +TF_LITE_MICRO_TEST( + DetectionPostprocessFloatFastNMSwithNoBackgroundClassAndKeypoints) { + int kInputShape1[] = {3, 1, 6, 5}; + int kInputShape2[] = {3, 1, 6, 2}; + + // six boxes in center-size encoding + const float kInputData1[] = { + 0.0, 0.0, 0.0, 0.0, 1.0, // box #1 + 0.0, 1.0, 0.0, 0.0, 1.0, // box #2 + 0.0, -1.0, 0.0, 0.0, 1.0, // box #3 + 0.0, 0.0, 0.0, 0.0, 1.0, // box #4 + 0.0, 1.0, 0.0, 0.0, 1.0, // box #5 + 0.0, 0.0, 0.0, 0.0, 1.0, // box #6 + }; + + // class scores - two classes without background + const float kInputData2[] = {.9, .8, .75, .72, .6, .5, + .93, .95, .5, .4, .3, .2}; + + float output_data1[12]; + float output_data2[3]; + float output_data3[3]; + float output_data4[1]; + + tflite::testing::TestDetectionPostprocess( + kInputShape1, kInputData1, kInputShape2, kInputData2, + tflite::testing::kInputShape3, tflite::testing::kInputData3, + tflite::testing::kOutputShape1, output_data1, + tflite::testing::kOutputShape2, output_data2, + tflite::testing::kOutputShape3, output_data3, + tflite::testing::kOutputShape4, output_data4, tflite::testing::kGolden1, + tflite::testing::kGolden2, tflite::testing::kGolden3, + tflite::testing::kGolden4, + /* tolerance */ 0, /* Use regular NMS: */ false); +} + +TF_LITE_MICRO_TEST( + DetectionPostprocessFloatRegularNMSwithNoBackgroundClassAndKeypoints) { + int kInputShape2[] = {3, 1, 6, 2}; + + // class scores - two classes without background + const float kInputData2[] = {.9, .8, .75, .72, .6, .5, + .93, .95, .5, .4, .3, .2}; + + const float kGolden1[] = {0.0, 10.0, 1.0, 11.0, 0.0, 10.0, + 1.0, 11.0, 0.0, 0.0, 0.0, 0.0}; + const float kGolden3[] = {0.95, 0.9, 0.0}; + const float kGolden4[] = {2.0}; + + float output_data1[12]; + float output_data2[3]; + float output_data3[3]; + float output_data4[1]; + + tflite::testing::TestDetectionPostprocess( + tflite::testing::kInputShape1, tflite::testing::kInputData1, kInputShape2, + kInputData2, tflite::testing::kInputShape3, tflite::testing::kInputData3, + tflite::testing::kOutputShape1, output_data1, + tflite::testing::kOutputShape2, output_data2, + tflite::testing::kOutputShape3, output_data3, + tflite::testing::kOutputShape4, output_data4, kGolden1, + tflite::testing::kGolden2, kGolden3, kGolden4, + /* tolerance */ 1e-1, /* Use regular NMS: */ true); +} + +TF_LITE_MICRO_TEST( + DetectionPostprocessFloatFastNMSWithBackgroundClassAndKeypoints) { + int kInputShape1[] = {3, 1, 6, 5}; + + // six boxes in center-size encoding + const float kInputData1[] = { + 0.0, 0.0, 0.0, 0.0, 1.0, // box #1 + 0.0, 1.0, 0.0, 0.0, 1.0, // box #2 + 0.0, -1.0, 0.0, 0.0, 1.0, // box #3 + 0.0, 0.0, 0.0, 0.0, 1.0, // box #4 + 0.0, 1.0, 0.0, 0.0, 1.0, // box #5 + 0.0, 0.0, 0.0, 0.0, 1.0, // box #6 + }; + + float output_data1[12]; + float output_data2[3]; + float output_data3[3]; + float output_data4[1]; + + tflite::testing::TestDetectionPostprocess( + kInputShape1, kInputData1, tflite::testing::kInputShape2, + tflite::testing::kInputData2, tflite::testing::kInputShape3, + tflite::testing::kInputData3, tflite::testing::kOutputShape1, + output_data1, tflite::testing::kOutputShape2, output_data2, + tflite::testing::kOutputShape3, output_data3, + tflite::testing::kOutputShape4, output_data4, tflite::testing::kGolden1, + tflite::testing::kGolden2, tflite::testing::kGolden3, + tflite::testing::kGolden4, + /* tolerance */ 0, /* Use regular NMS: */ false); +} + +TF_LITE_MICRO_TEST( + DetectionPostprocessFloatFNMSSwithNoBackgroundClassAndKeypointsStableSort) { + int kInputShape1[] = {3, 1, 6, 5}; + int kInputShape2[] = {3, 1, 6, 2}; + + // six boxes in center-size encoding + const float kInputData1[] = { + 0.0, 0.0, 0.0, 0.0, 1.0, // box #1 + 0.0, 1.0, 0.0, 0.0, 1.0, // box #2 + 0.0, -1.0, 0.0, 0.0, 1.0, // box #3 + 0.0, 0.0, 0.0, 0.0, 1.0, // box #4 + 0.0, 1.0, 0.0, 0.0, 1.0, // box #5 + 0.0, 0.0, 0.0, 0.0, 1.0, // box #6 + }; + + // class scores - two classes without background + const float kInputData2[] = {0.015625, 0.007812, 0.003906, 0.015625, + 0.015625, 0.007812, 0.019531, 0.019531, + 0.007812, 0.003906, 0.003906, 0.003906}; + float output_data1[12]; + float output_data2[3]; + float output_data3[3]; + float output_data4[1]; + + const float kGolden2[] = {0, 0, 0}; + const float kGolden3[] = {0.0196078, 0.0156863, 0.00392157}; + + tflite::testing::TestDetectionPostprocess( + kInputShape1, kInputData1, kInputShape2, kInputData2, + tflite::testing::kInputShape3, tflite::testing::kInputData3, + tflite::testing::kOutputShape1, output_data1, + tflite::testing::kOutputShape2, output_data2, + tflite::testing::kOutputShape3, output_data3, + tflite::testing::kOutputShape4, output_data4, tflite::testing::kGolden1, + kGolden2, kGolden3, tflite::testing::kGolden4, + /* tolerance */ 3e-1, /* Use regular NMS: */ false); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/div.cc b/tensorflow/lite/micro/kernels/div.cc new file mode 100644 index 0000000..cc90e22 --- /dev/null +++ b/tensorflow/lite/micro/kernels/div.cc @@ -0,0 +1,208 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/reference/div.h" + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/process_broadcast_shapes.h" +#include "tensorflow/lite/kernels/internal/types.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { +namespace { + +constexpr int kInputTensor1 = 0; +constexpr int kInputTensor2 = 1; +constexpr int kOutputTensor = 0; + +struct OpDataDiv { + // Parameters used in the quantized paths where the output is 8bit + int32_t input1_zero_point; + int32_t input2_zero_point; + int32_t output_zero_point; + int32_t output_activation_min; + int32_t output_activation_max; + + // Parameters used in all quantized paths + int32_t output_multiplier; + int output_shift; +}; + +TfLiteStatus CalculateOpDataDiv(TfLiteContext* context, TfLiteTensor* input1, + TfLiteTensor* input2, TfLiteTensor* output, + TfLiteDivParams* params, OpDataDiv* data) { + TF_LITE_ENSURE_TYPES_EQ(context, input1->type, input2->type); + TF_LITE_ENSURE_TYPES_EQ(context, input1->type, output->type); + + if (output->type == kTfLiteInt8) { + TF_LITE_ENSURE_STATUS(CalculateActivationRangeQuantized( + context, params->activation, output, &data->output_activation_min, + &data->output_activation_max)); + const double real_multiplier = static_cast( + input1->params.scale / (input2->params.scale * output->params.scale)); + QuantizeMultiplier(real_multiplier, &data->output_multiplier, + &data->output_shift); + data->input1_zero_point = input1->params.zero_point; + data->input2_zero_point = input2->params.zero_point; + data->output_zero_point = output->params.zero_point; + } + + return kTfLiteOk; +} + +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(OpDataDiv)); +} + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + TFLITE_DCHECK(node->builtin_data != nullptr); + + MicroContext* micro_context = GetMicroContext(context); + TfLiteTensor* input1 = + micro_context->AllocateTempInputTensor(node, kInputTensor1); + TF_LITE_ENSURE(context, input1 != nullptr); + TfLiteTensor* input2 = + micro_context->AllocateTempInputTensor(node, kInputTensor2); + TF_LITE_ENSURE(context, input2 != nullptr); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + + OpDataDiv* data = static_cast(node->user_data); + auto* params = reinterpret_cast(node->builtin_data); + + TF_LITE_ENSURE_STATUS( + CalculateOpDataDiv(context, input1, input2, output, params, data)); + + micro_context->DeallocateTempTfLiteTensor(input1); + micro_context->DeallocateTempTfLiteTensor(input2); + micro_context->DeallocateTempTfLiteTensor(output); + return kTfLiteOk; +} + +void EvalDiv(TfLiteContext* context, TfLiteNode* node, TfLiteDivParams* params, + const OpDataDiv* data, const TfLiteEvalTensor* input1, + const TfLiteEvalTensor* input2, TfLiteEvalTensor* output) { + tflite::ArithmeticParams op_params = {}; + +#define TF_LITE_DIV(type, opname, data_type) \ + data_type output_activation_min, output_activation_max; \ + CalculateActivationRange(params->activation, &output_activation_min, \ + &output_activation_max); \ + SetActivationParams(output_activation_min, output_activation_max, \ + &op_params); \ + type::opname(op_params, tflite::micro::GetTensorShape(input1), \ + tflite::micro::GetTensorData(input1), \ + tflite::micro::GetTensorShape(input2), \ + tflite::micro::GetTensorData(input2), \ + tflite::micro::GetTensorShape(output), \ + tflite::micro::GetTensorData(output)) + + bool requires_broadcast = reference_ops::ProcessBroadcastShapes( + tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorShape(input2), &op_params); + + if (requires_broadcast) { + TF_LITE_DIV(reference_ops, BroadcastDivSlow, float); + } else { + TF_LITE_DIV(reference_ops, Div, float); + } +#undef TF_LITE_DIV +} + +TfLiteStatus EvalQuantized(TfLiteContext* context, TfLiteNode* node, + TfLiteDivParams* params, const OpDataDiv* data, + const TfLiteEvalTensor* input1, + const TfLiteEvalTensor* input2, + TfLiteEvalTensor* output) { + tflite::ArithmeticParams op_params = {}; + +#define TF_LITE_DIV(type, opname, dtype) \ + type::opname(op_params, tflite::micro::GetTensorShape(input1), \ + tflite::micro::GetTensorData(input1), \ + tflite::micro::GetTensorShape(input2), \ + tflite::micro::GetTensorData(input2), \ + tflite::micro::GetTensorShape(output), \ + tflite::micro::GetTensorData(output)) + + if (input1->type == kTfLiteInt8 && input2->type == kTfLiteInt8 && + output->type == kTfLiteInt8) { + SetActivationParams(data->output_activation_min, + data->output_activation_max, &op_params); + op_params.input1_offset = -data->input1_zero_point; + op_params.input2_offset = -data->input2_zero_point; + op_params.output_offset = data->output_zero_point; + op_params.output_multiplier = data->output_multiplier; + op_params.output_shift = data->output_shift; + + bool requires_broadcast = reference_ops::ProcessBroadcastShapes( + tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorShape(input2), &op_params); + + if (requires_broadcast) { + TF_LITE_DIV(reference_ops, BroadcastDivSlow, int8_t); + } else { + TF_LITE_DIV(reference_ops, Div, int8_t); + } +#undef TF_LITE_DIV + } else { + MicroPrintf("Unsupported combination of input and output types in DIV."); + return kTfLiteError; + } + + return kTfLiteOk; +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->builtin_data != nullptr); + auto* params = static_cast(node->builtin_data); + TFLITE_DCHECK(node->user_data != nullptr); + auto* data = static_cast(node->user_data); + + const TfLiteEvalTensor* input1 = + tflite::micro::GetEvalInput(context, node, kInputTensor1); + const TfLiteEvalTensor* input2 = + tflite::micro::GetEvalInput(context, node, kInputTensor2); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + + if (output->type == kTfLiteFloat32) { + EvalDiv(context, node, params, data, input1, input2, output); + } else if (output->type == kTfLiteInt8) { + TF_LITE_ENSURE_OK(context, EvalQuantized(context, node, params, data, + input1, input2, output)); + } else { + MicroPrintf( + "DIV only supports FLOAT32, quantized INT8 " + "now, got type %s (%d).", + TfLiteTypeGetName(output->type), output->type); + return kTfLiteError; + } + + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_DIV() { + return tflite::micro::RegisterOp(Init, Prepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/div_test.cc b/tensorflow/lite/micro/kernels/div_test.cc new file mode 100644 index 0000000..e020255 --- /dev/null +++ b/tensorflow/lite/micro/kernels/div_test.cc @@ -0,0 +1,377 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +void ExecuteDivTest(TfLiteTensor* tensors, int tensors_count, + TfLiteFusedActivation activation) { + TfLiteDivParams builtin_data = {}; + builtin_data.activation = activation; + + int kInputArrayData[] = {2, 0, 1}; + TfLiteIntArray* inputs_array = IntArrayFromInts(kInputArrayData); + int kOutputArrayData[] = {1, 2}; + TfLiteIntArray* outputs_array = IntArrayFromInts(kOutputArrayData); + + const TFLMRegistration registration = tflite::Register_DIV(); + micro::KernelRunner runner(registration, tensors, tensors_count, inputs_array, + outputs_array, static_cast(&builtin_data)); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); +} + +template +void TestDiv(int* input1_dims_data, const T* input1_data, int* input2_dims_data, + const T* input2_data, int* expected_dims, const T* expected_data, + T* output_data, TfLiteFusedActivation activation) { + TfLiteIntArray* input1_dims = IntArrayFromInts(input1_dims_data); + TfLiteIntArray* input2_dims = IntArrayFromInts(input2_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(expected_dims); + const int output_count = ElementCount(*output_dims); + + constexpr int inputs_size = 2; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(input1_data, input1_dims), + CreateTensor(input2_data, input2_dims), + CreateTensor(output_data, output_dims), + }; + constexpr int tensors_count = std::extent::value; + + ExecuteDivTest(tensors, tensors_count, activation); + + constexpr float kTolerance = 1e-5; + for (int i = 0; i < output_count; i++) { + TF_LITE_MICRO_EXPECT_NEAR(expected_data[i], output_data[i], kTolerance); + } +} + +// For quantized Div, the error shouldn't exceed (2*step + step^2). +inline float GetTolerance(int min, int max) { + const float kQuantizedStep = (max - min) / 255.0f; + const float kQuantizedTolerance = + 2.0f * kQuantizedStep + kQuantizedStep * kQuantizedStep; + return kQuantizedTolerance; +} + +// min/max are used to compute scale, zero-point, compare tolerance +template +struct TestQuantParams { + float data_min; // input and output data minimum value + float data_max; // input and output data maximum value + T* input1_data; // quantized input1 storage + T* input2_data; // quantized input2 storage + T* output_data; // quantized output storage +}; + +template +void TestDivQuantized(int* input1_dims_data, const float* input1_data, + int* input2_dims_data, const float* input2_data, + int* expected_dims, const float* expected_data, + float* output_data, TfLiteFusedActivation activation, + const TestQuantParams* params) { + TfLiteIntArray* input1_dims = IntArrayFromInts(input1_dims_data); + TfLiteIntArray* input2_dims = IntArrayFromInts(input2_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(expected_dims); + const int output_count = ElementCount(*output_dims); + + const float scale = ScaleFromMinMax(params->data_min, params->data_max); + const int zero_point = + ZeroPointFromMinMax(params->data_min, params->data_max); + + TfLiteTensor tensors[] = { + CreateQuantizedTensor(input1_data, params->input1_data, input1_dims, + scale, zero_point), + CreateQuantizedTensor(input2_data, params->input2_data, input2_dims, + scale, zero_point), + CreateQuantizedTensor(params->output_data, output_dims, scale, + zero_point), + }; + constexpr int kTensorsCount = std::extent::value; + + ExecuteDivTest(tensors, kTensorsCount, activation); + + Dequantize(params->output_data, output_count, scale, zero_point, output_data); + const float kTolerance = GetTolerance(params->data_min, params->data_max); + for (int i = 0; i < output_count; i++) { + TF_LITE_MICRO_EXPECT_NEAR(expected_data[i], output_data[i], kTolerance); + } +} + +template +void TestDivMultiShape(int** shapes, const int shapes_count, + const T* input1_data, const T* input2_data, + const T* expected_data, T* output_data, + TfLiteFusedActivation activation) { + for (int i = 0; i < shapes_count; i++) { + TestDiv(shapes[i], input1_data, shapes[i], input2_data, shapes[i], + expected_data, output_data, activation); + } +} + +template +void TestDivMultiShapeQuant(int** shapes, const int shapes_count, + const float* input1_data, const float* input2_data, + const float* expected_data, float* output_data, + TfLiteFusedActivation activation, + const TestQuantParams* params) { + for (int i = 0; i < shapes_count; i++) { + TestDivQuantized(shapes[i], input1_data, shapes[i], input2_data, shapes[i], + expected_data, output_data, activation, params); + } +} + +// when broadcasting input2 is a scaler +template +void TestDivMultiBroadcast(int** shapes, const int shapes_count, + const T* input1_data, const T* input2_data, + const T* expected_data, T* output_data, + TfLiteFusedActivation activation) { + int kDimScaler[] = {1, 1}; + for (int i = 0; i < shapes_count; i++) { + TestDiv(shapes[i], input1_data, kDimScaler, input2_data, shapes[i], + expected_data, output_data, activation); + } +} + +// when broadcasting input2 is a scaler +template +void TestDivMultiBroadcastQuant(int** shapes, const int shapes_count, + const float* input1_data, + const float* input2_data, + const float* expected_data, float* output_data, + TfLiteFusedActivation activation, + const TestQuantParams* params) { + int kDimScaler[] = {1, 1}; + for (int i = 0; i < shapes_count; i++) { + TestDivQuantized(shapes[i], input1_data, kDimScaler, input2_data, shapes[i], + expected_data, output_data, activation, params); + } +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(FloatDivOpTestActNone) { + int kDims[] = {4, 1, 2, 2, 1}; + constexpr float kInput1[] = {-0.2, 0.2, -1.2, 0.8}; + constexpr float kInput2[] = {0.5, 0.2, -1.5, 0.5}; + constexpr float kExpect[] = {-0.4, 1.0, 0.8, 1.6}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + tflite::testing::TestDiv(kDims, kInput1, kDims, kInput2, kDims, kExpect, + output_data, kTfLiteActNone); +} + +TF_LITE_MICRO_TEST(FloatDivOpTestActReluN1To1) { + int kDims[] = {4, 1, 2, 2, 1}; + constexpr float kInput1[] = {-0.2, 0.2, -1.2, 0.8}; + constexpr float kInput2[] = {0.1, 0.2, -1.5, 0.5}; + constexpr float kExpect[] = {-1.0, 1.0, 0.8, 1.0}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + tflite::testing::TestDiv(kDims, kInput1, kDims, kInput2, kDims, kExpect, + output_data, kTfLiteActReluN1To1); +} + +TF_LITE_MICRO_TEST(FloatDivOpTestMultiShape) { + int kShape1[] = {1, 6}; + int kShape2[] = {2, 2, 3}; + int kShape3[] = {3, 2, 1, 3}; + int kShape4[] = {4, 1, 3, 1, 2}; + int* kDims[] = {kShape1, kShape2, kShape3, kShape4}; + constexpr int kDimsCount = std::extent::value; + + constexpr float kInput1[] = {-2.0, 0.2, 0.3, 0.8, 1.1, -2.0}; + constexpr float kInput2[] = {0.1, 0.2, 0.6, 0.5, -1.1, -0.1}; + constexpr float kExpect[] = {-20.0, 1.0, 0.5, 1.6, -1.0, 20.0}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + tflite::testing::TestDivMultiShape(kDims, kDimsCount, kInput1, kInput2, + kExpect, output_data, kTfLiteActNone); +} + +TF_LITE_MICRO_TEST(FloatDivOpTestBroadcast) { + int kShape1[] = {1, 8}; + int kShape2[] = {2, 2, 4}; + int kShape3[] = {3, 2, 1, 4}; + int kShape4[] = {4, 1, 2, 2, 2}; + int* kDims[] = {kShape1, kShape2, kShape3, kShape4}; + constexpr int kDimsCount = std::extent::value; + + constexpr float kInput1[] = {-0.2, 0.2, 0.07, 0.08, + 0.11, -0.123, -0.32, 0.54}; + constexpr float kInput2[] = {0.1}; + constexpr float kExpect[] = {-2.0, 2.0, 0.7, 0.8, 1.1, -1.23, -3.2, 5.4}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + tflite::testing::TestDivMultiBroadcast(kDims, kDimsCount, kInput1, kInput2, + kExpect, output_data, kTfLiteActNone); +} + +TF_LITE_MICRO_TEST(FloatDivOpTestBroadcast5D) { + int kShape1[] = {5, 1, 2, 1, 2, 2}; + int* kDims[] = {kShape1}; + constexpr int kDimsCount = std::extent::value; + + constexpr float kInput1[] = {-0.2, 0.2, 0.07, 0.08, + 0.11, -0.123, -0.32, 0.54}; + constexpr float kInput2[] = {0.1}; + constexpr float kExpect[] = {-2.0, 2.0, 0.7, 0.8, 1.1, -1.23, -3.2, 5.4}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + tflite::testing::TestDivMultiBroadcast(kDims, kDimsCount, kInput1, kInput2, + kExpect, output_data, kTfLiteActNone); +} + +TF_LITE_MICRO_TEST(QuantizedDivOpTestActNone) { + int kDims[] = {4, 1, 2, 2, 1}; + constexpr float kInput1[] = {-0.8, -0.2, 0.3, 0.7}; + constexpr float kInput2[] = {-0.8, 0.4, 0.8, 1.0}; + constexpr float kExpect[] = {1.0, -0.5, 0.375, 0.7}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + // setup quantization storage and parameters + int8_t q_output_data[kOutputCount]; + int8_t q_input1_data[kOutputCount]; + int8_t q_input2_data[kOutputCount]; + tflite::testing::TestQuantParams params = {}; + params.data_min = -1.0; + params.data_max = 1.0; + params.input1_data = q_input1_data; + params.input2_data = q_input2_data; + params.output_data = q_output_data; + + tflite::testing::TestDivQuantized(kDims, kInput1, kDims, kInput2, kDims, + kExpect, output_data, kTfLiteActNone, + ¶ms); +} + +TF_LITE_MICRO_TEST(QuantizedDivOpTestActReluN1To1) { + int kDims[] = {4, 1, 2, 2, 1}; + constexpr float kInput1[] = {-0.8, 0.2, 0.9, 0.7}; + constexpr float kInput2[] = {0.6, 0.4, 0.9, -0.8}; + constexpr float kExpect1[] = {-1.0, 0.5, 1.0, -0.875}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + // setup quantization storage and parameters + int8_t q_output_data[kOutputCount]; + int8_t q_input1_data[kOutputCount]; + int8_t q_input2_data[kOutputCount]; + tflite::testing::TestQuantParams params = {}; + params.data_min = -1.0; + params.data_max = 1.0; + params.input1_data = q_input1_data; + params.input2_data = q_input2_data; + params.output_data = q_output_data; + + tflite::testing::TestDivQuantized(kDims, kInput1, kDims, kInput2, kDims, + kExpect1, output_data, kTfLiteActReluN1To1, + ¶ms); + + constexpr float kInput3[] = {-0.5, 0.2, 0.6, 0.3}; + constexpr float kInput4[] = {0.6, 0.5, -0.8, 0.5}; + constexpr float kExpect2[] = {-0.833, 0.4, -0.75, 0.6}; + + tflite::testing::TestDivQuantized(kDims, kInput3, kDims, kInput4, kDims, + kExpect2, output_data, kTfLiteActReluN1To1, + ¶ms); +} + +TF_LITE_MICRO_TEST(QuantizedDivOpTestMultiShape) { + int kShape1[] = {1, 6}; + int kShape2[] = {2, 2, 3}; + int kShape3[] = {3, 2, 1, 3}; + int kShape4[] = {4, 1, 3, 1, 2}; + int* kDims[] = {kShape1, kShape2, kShape3, kShape4}; + constexpr int kDimsCount = std::extent::value; + + constexpr float kInput1[] = {-2.0, 0.2, 1.7, 0.9, 0.4, 2.0}; + constexpr float kInput2[] = {1.3, 0.3, 1.1, 0.4, -1.1, 1.9}; + constexpr float kExpect[] = {-1.538, 0.667, 1.545, 2.25, -0.364, 1.053}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + // setup quantization storage and parameters + int8_t q_output_data[kOutputCount]; + int8_t q_input1_data[kOutputCount]; + int8_t q_input2_data[kOutputCount]; + tflite::testing::TestQuantParams params = {}; + params.data_min = -3.0; + params.data_max = 3.0; + params.input1_data = q_input1_data; + params.input2_data = q_input2_data; + params.output_data = q_output_data; + + tflite::testing::TestDivMultiShapeQuant(kDims, kDimsCount, kInput1, kInput2, + kExpect, output_data, kTfLiteActNone, + ¶ms); +} + +TF_LITE_MICRO_TEST(QuantizedDivOpTestBroadcast) { + int kShape1[] = {1, 8}; + int kShape2[] = {2, 2, 4}; + int kShape3[] = {3, 2, 1, 4}; + int kShape4[] = {4, 1, 4, 1, 2}; + int kShape5[] = {5, 1, 2, 1, 2, 2}; + int* kDims[] = {kShape1, kShape2, kShape3, kShape4, kShape5}; + constexpr int kDimsCount = std::extent::value; + + constexpr float kInput1[] = {-2.0, 0.2, 0.7, 0.8, -0.5, 1.1, -1.3, 1.2}; + constexpr float kInput2[] = {0.7}; + constexpr float kExpect[] = {-2.857, 0.286, 1.0, 1.143, + -0.714, 1.571, -1.857, 1.714}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + // setup quantization storage and parameters + int8_t q_output_data[kOutputCount]; + int8_t q_input1_data[kOutputCount]; + int8_t q_input2_data[kOutputCount]; + tflite::testing::TestQuantParams params = {}; + params.data_min = -3.0; + params.data_max = 3.0; + params.input1_data = q_input1_data; + params.input2_data = q_input2_data; + params.output_data = q_output_data; + + tflite::testing::TestDivMultiBroadcastQuant(kDims, kDimsCount, kInput1, + kInput2, kExpect, output_data, + kTfLiteActNone, ¶ms); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/elementwise.cc b/tensorflow/lite/micro/kernels/elementwise.cc new file mode 100644 index 0000000..a33c340 --- /dev/null +++ b/tensorflow/lite/micro/kernels/elementwise.cc @@ -0,0 +1,416 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_utils.h" + +namespace tflite { +namespace { + +constexpr int kAbsNameId = 0; +constexpr int kRsrqtNameId = 1; + +const int kElementwiseInputTensor = 0; +const int kElementwiseOutputTensor = 0; + +struct OpDataAbsRsqrt { + int32_t multiplier; + int shift; + int input_offset; + int output_offset; + bool needs_rescale; + TfLiteQuantizationType input_quantization_type; + TfLiteType input_type; +}; + +bool IsNumericSupportedType(const TfLiteType type) { + return type == kTfLiteFloat32; +} + +bool IsLogicalSupportedType(const TfLiteType type) { + return type == kTfLiteBool; +} + +bool IsAbsSupportedType(const TfLiteType type) { + return type == kTfLiteFloat32 || type == kTfLiteInt8 || type == kTfLiteInt16; +} + +bool IsRsqrtSupportedType(const TfLiteType type) { + return type == kTfLiteFloat32 || type == kTfLiteInt8; +} + +inline void SetAbsOutputMultiplier(const float input_scale, + const float output_scale, + int32_t* multiplier, int* shift) { + QuantizeMultiplier(static_cast(input_scale / output_scale), + multiplier, shift); +} + +inline void SetRsqrtOutputMultiplier(const float input_scale, + const float output_scale, + int32_t* multiplier, int* shift) { + const double scale = + 1. / static_cast((std::sqrt(input_scale) * output_scale)); + QuantizeMultiplier(scale, multiplier, shift); +} + +typedef bool (*IsSupportedType)(TfLiteType); +template +TfLiteStatus GenericPrepare(TfLiteContext* context, TfLiteNode* node) { + MicroContext* micro_context = GetMicroContext(context); + TF_LITE_ENSURE_EQ(context, NumInputs(node), 1); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kElementwiseInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kElementwiseOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + TF_LITE_ENSURE_TYPES_EQ(context, input->type, output->type); + if (!IsSupportedType(input->type)) { + MicroPrintf("Input data type %s (%d) is not supported.", + TfLiteTypeGetName(input->type), input->type); + return kTfLiteError; + } + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(output); + return kTfLiteOk; +} + +typedef bool (*IsSupportedType)(TfLiteType); +template +TfLiteStatus PrepareAbsRsqrt(TfLiteContext* context, TfLiteNode* node) { + MicroContext* micro_context = GetMicroContext(context); + TF_LITE_ENSURE_EQ(context, NumInputs(node), 1); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + TfLiteTensor* input = micro_context->AllocateTempInputTensor(node, 0); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* output = micro_context->AllocateTempOutputTensor(node, 0); + TF_LITE_ENSURE(context, output != nullptr); + TF_LITE_ENSURE_TYPES_EQ(context, input->type, output->type); + if (!IsSupportedType(input->type)) { + MicroPrintf("Input data type %s (%d) is not supported.", + TfLiteTypeGetName(input->type), input->type); + return kTfLiteError; + } + + auto* op_data = static_cast(node->user_data); + op_data->input_type = input->type; + + // For int16 type input, we support both quantized and non-quantized + // evaluation. + if (op_nameid == kAbsNameId) { + op_data->input_quantization_type = input->quantization.type; + } + + if (input->type == kTfLiteInt8 || + (input->type == kTfLiteInt16 && + input->quantization.type != kTfLiteNoQuantization)) { + TF_LITE_ENSURE_EQ(context, input->quantization.type, + kTfLiteAffineQuantization); + TF_LITE_ENSURE_EQ(context, output->quantization.type, + kTfLiteAffineQuantization); + const auto* input_params = + reinterpret_cast(input->quantization.params); + const auto* output_params = reinterpret_cast( + output->quantization.params); + TF_LITE_ENSURE(context, input_params != nullptr); + TF_LITE_ENSURE(context, input_params->scale != nullptr); + TF_LITE_ENSURE(context, input_params->scale->size > 0); + TF_LITE_ENSURE(context, input_params->zero_point->size > 0); + TF_LITE_ENSURE(context, output_params != nullptr); + TF_LITE_ENSURE(context, output_params->scale != nullptr); + TF_LITE_ENSURE(context, output_params->scale->size > 0); + TF_LITE_ENSURE(context, output_params->zero_point->size > 0); + op_data->input_offset = input_params->zero_point->data[0]; + op_data->output_offset = output_params->zero_point->data[0]; + if (input->type == kTfLiteInt16) { + TF_LITE_ENSURE_EQ(context, op_data->input_offset, 0); + TF_LITE_ENSURE_EQ(context, op_data->output_offset, 0); + } + const float input_scale = input_params->scale->data[0]; + const float output_scale = output_params->scale->data[0]; + op_data->needs_rescale = input_scale != output_scale; + if (op_nameid == kAbsNameId && op_data->needs_rescale) { + SetAbsOutputMultiplier(input_scale, output_scale, &op_data->multiplier, + &op_data->shift); + } else if (op_nameid == kRsrqtNameId) { + SetRsqrtOutputMultiplier(input_scale, output_scale, &op_data->multiplier, + &op_data->shift); + } + } + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(output); + return kTfLiteOk; +} + +template +inline TfLiteStatus EvalImplQuantized( + TfLiteContext* context, TfLiteNode* node, + T func(TfLiteContext*, TfLiteNode*, T), + TfLiteStatus validate_input_func(TfLiteContext*, TfLiteNode*, T), + TfLiteType expected_type) { + const TfLiteEvalTensor* input = tflite::micro::GetEvalInput(context, node, 0); + TfLiteEvalTensor* output = tflite::micro::GetEvalOutput(context, node, 0); + TF_LITE_ENSURE_TYPES_EQ(context, input->type, expected_type); + const size_t num_elements = ElementCount(*input->dims); + const T* in_data = tflite::micro::GetTensorData(input); + T* out_data = tflite::micro::GetTensorData(output); + for (size_t i = 0; i < num_elements; ++i) { + if (validate_input_func) { + TF_LITE_ENSURE_OK(context, + validate_input_func(context, node, in_data[i])); + } + out_data[i] = func(context, node, in_data[i]); + } + return kTfLiteOk; +} + +template +inline T AbsHelper(T i) { + return std::abs(i); +} + +template +inline TfLiteStatus EvalImpl(TfLiteContext* context, TfLiteNode* node, + T func(T), TfLiteStatus validate_input_func(T), + TfLiteType expected_type) { + const TfLiteEvalTensor* input = tflite::micro::GetEvalInput(context, node, 0); + TfLiteEvalTensor* output = tflite::micro::GetEvalOutput(context, node, 0); + TF_LITE_ENSURE_TYPES_EQ(context, input->type, expected_type); + const size_t num_elements = ElementCount(*input->dims); + const T* in_data = tflite::micro::GetTensorData(input); + T* out_data = tflite::micro::GetTensorData(output); + for (size_t i = 0; i < num_elements; ++i) { + if (validate_input_func) { + TF_LITE_ENSURE_OK(context, validate_input_func(in_data[i])); + } + out_data[i] = func(in_data[i]); + } + return kTfLiteOk; +} + +inline TfLiteStatus EvalNumeric(TfLiteContext* context, TfLiteNode* node, + float float_func(float)) { + return EvalImpl(context, node, float_func, + /*validate_input_func=*/nullptr, kTfLiteFloat32); +} + +inline TfLiteStatus EvalLogical(TfLiteContext* context, TfLiteNode* node, + + bool bool_func(bool)) { + return EvalImpl(context, node, bool_func, + /*validate_input_func=*/nullptr, kTfLiteBool); +} + +void* ElementWiseAbsRsqrtInit(TfLiteContext* context, const char* buffer, + size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(OpDataAbsRsqrt)); +} + +template +inline T AbsEvalQuantized(TfLiteContext* context, TfLiteNode* node, T i) { + const auto* op_data = static_cast(node->user_data); + const int kMin = std::numeric_limits::min(); + const int kMax = std::numeric_limits::max(); + + const int32_t value = std::abs(i - op_data->input_offset); + if (!op_data->needs_rescale) { + return static_cast( + std::min(std::max(static_cast(value + op_data->output_offset), + static_cast(kMin)), + static_cast(kMax))); + } + + const int32_t output = tflite::MultiplyByQuantizedMultiplier( + value, op_data->multiplier, op_data->shift) + + op_data->output_offset; + return static_cast(std::min( + std::max(static_cast(output), static_cast(kMin)), + static_cast(kMax))); +} + +template +inline T RsqrtEvalQuantized(TfLiteContext* context, TfLiteNode* node, T i) { + const auto* op_data = static_cast(node->user_data); + const int kMin = std::numeric_limits::min(); + const int kMax = std::numeric_limits::max(); + + const int32_t value = (i - op_data->input_offset); + const int32_t kShift = 20; // Shift to keep value integer. + if (value == 0) { + // Assume that any value close to 0 represents the max output value. + return static_cast(kMax); + } + int32_t inv_sqrt_multiplier; + int inv_sqrt_shift; + GetInvSqrtQuantizedMultiplierExp(value, kReverseShift, &inv_sqrt_multiplier, + &inv_sqrt_shift); + const int32_t data = tflite::MultiplyByQuantizedMultiplier( + static_cast(1), inv_sqrt_multiplier, inv_sqrt_shift + kShift); + const int32_t output = + tflite::MultiplyByQuantizedMultiplier(data, op_data->multiplier, + op_data->shift - kShift) + + op_data->output_offset; + return static_cast(std::min( + std::max(static_cast(output), static_cast(kMin)), + static_cast(kMax))); +} + +template +TfLiteStatus validate_input_func(TfLiteContext* context, TfLiteNode* node, + T i) { + const auto* op_data = static_cast(node->user_data); + + TF_LITE_ENSURE_MSG(context, i >= op_data->input_offset, + "Rsqrt is only defined for positive values"); + return static_cast(kTfLiteOk); +} + +TfLiteStatus AbsEval(TfLiteContext* context, TfLiteNode* node) { + OpDataAbsRsqrt* op_data = reinterpret_cast(node->user_data); + TfLiteType type = op_data->input_type; + TfLiteQuantizationType input_quantization_type = + op_data->input_quantization_type; + TfLiteStatus eval_result; + + switch (type) { + case kTfLiteFloat32: + eval_result = EvalNumeric(context, node, std::abs); + break; + case kTfLiteInt8: + eval_result = + EvalImplQuantized(context, node, AbsEvalQuantized, + /*validate_input_func=*/nullptr, type); + break; + case kTfLiteInt16: + eval_result = + input_quantization_type == kTfLiteNoQuantization + ? EvalImpl(context, node, AbsHelper, + /*validate_input_func=*/nullptr, type) + : EvalImplQuantized(context, node, AbsEvalQuantized, + /*validate_input_func=*/nullptr, + type); + break; + default: + MicroPrintf("Current data type %s is not supported.", + TfLiteTypeGetName(type)); + return kTfLiteError; + break; + } + return eval_result; +} + +TfLiteStatus SinEval(TfLiteContext* context, TfLiteNode* node) { + return EvalNumeric(context, node, std::sin); +} + +TfLiteStatus CosEval(TfLiteContext* context, TfLiteNode* node) { + return EvalNumeric(context, node, std::cos); +} + +TfLiteStatus LogEval(TfLiteContext* context, TfLiteNode* node) { + return EvalNumeric(context, node, std::log); +} + +TfLiteStatus SqrtEval(TfLiteContext* context, TfLiteNode* node) { + return EvalNumeric(context, node, std::sqrt); +} + +TfLiteStatus RsqrtEval(TfLiteContext* context, TfLiteNode* node) { + const auto* op_data = static_cast(node->user_data); + TfLiteType type = op_data->input_type; + switch (type) { + case kTfLiteFloat32: + return EvalImpl( + context, node, [](float f) { return 1.f / std::sqrt(f); }, + /*validate_input_func=*/nullptr, type); + case kTfLiteInt8: + return EvalImplQuantized(context, node, RsqrtEvalQuantized, + validate_input_func, type); + case kTfLiteInt16: + return EvalImplQuantized(context, node, RsqrtEvalQuantized, + validate_input_func, type); + + default: + MicroPrintf("Current data type %s is not supported.", + TfLiteTypeGetName(type)); + return kTfLiteError; + } +} + +TfLiteStatus SquareEval(TfLiteContext* context, TfLiteNode* node) { + return EvalNumeric(context, node, [](float f) { return f * f; }); +} + +TfLiteStatus LogicalNotEval(TfLiteContext* context, TfLiteNode* node) { + return EvalLogical(context, node, [](bool v) { return !v; }); +} + +} // namespace + +TFLMRegistration Register_ABS() { + return tflite::micro::RegisterOp( + ElementWiseAbsRsqrtInit, PrepareAbsRsqrt, + AbsEval); +} + +TFLMRegistration Register_SIN() { + return tflite::micro::RegisterOp( + nullptr, GenericPrepare, SinEval); +} + +TFLMRegistration Register_COS() { + return tflite::micro::RegisterOp( + nullptr, GenericPrepare, CosEval); +} + +TFLMRegistration Register_LOG() { + return tflite::micro::RegisterOp( + nullptr, GenericPrepare, LogEval); +} + +TFLMRegistration Register_SQRT() { + return tflite::micro::RegisterOp( + nullptr, GenericPrepare, SqrtEval); +} + +TFLMRegistration Register_RSQRT() { + return tflite::micro::RegisterOp( + ElementWiseAbsRsqrtInit, + PrepareAbsRsqrt, RsqrtEval); +} + +TFLMRegistration Register_SQUARE() { + return tflite::micro::RegisterOp( + nullptr, GenericPrepare, SquareEval); +} + +TFLMRegistration Register_LOGICAL_NOT() { + return tflite::micro::RegisterOp( + nullptr, GenericPrepare, LogicalNotEval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/elementwise_test.cc b/tensorflow/lite/micro/kernels/elementwise_test.cc new file mode 100644 index 0000000..10e96c3 --- /dev/null +++ b/tensorflow/lite/micro/kernels/elementwise_test.cc @@ -0,0 +1,382 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/debug_log.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { + +void TestElementwiseFloat(const TFLMRegistration& registration, + int* input_dims_data, const float* input_data, + int* output_dims_data, + const float* expected_output_data, + float* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_dims_count = ElementCount(*output_dims); + + constexpr int input_size = 1; + constexpr int output_size = 1; + constexpr int tensors_size = input_size + output_size; + TfLiteTensor tensors[tensors_size] = {CreateTensor(input_data, input_dims), + CreateTensor(output_data, output_dims)}; + + // Place a unique value in the uninitialized output buffer. + for (int i = 0; i < output_dims_count; ++i) { + output_data[i] = 23; + } + + static int inputs_array_data[] = {1, 0}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + static int outputs_array_data[] = {1, 1}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, + /*builtin_data=*/nullptr); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + for (int i = 0; i < output_dims_count; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(expected_output_data[i], output_data[i], 1e-5f); + } +} + +template +void TestElementwiseQuantized(const TFLMRegistration& registration, + int* input_dims_data, const float* input_data, + T* input_quantized, float input_scale, + int32_t input_zero_point, int* output_dims_data, + const float* expected_output_data, T* output_data, + const float output_scale, + const int output_zero_point, + TfLiteStatus expected_invoke_status = kTfLiteOk) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_dims_count = ElementCount(*output_dims); + + constexpr int input_size = 1; + constexpr int output_size = 1; + constexpr int tensors_size = input_size + output_size; + + TfLiteTensor tensors[tensors_size] = { + CreateQuantizedTensor(input_data, input_quantized, input_dims, + input_scale, input_zero_point), + CreateQuantizedTensor(output_data, output_dims, output_scale, + output_zero_point)}; + + int input_zero_points[2] = {1, input_zero_point}; + float input_scales[2] = {1, input_scale}; + TfLiteAffineQuantization input_quant = { + tflite::testing::FloatArrayFromFloats(input_scales), + tflite::testing::IntArrayFromInts(input_zero_points), 0}; + tensors[0].quantization = {kTfLiteAffineQuantization, &input_quant}; + + int output_zero_points[2] = {1, output_zero_point}; + float output_scales[2] = {1, output_scale}; + TfLiteAffineQuantization output_quant = { + tflite::testing::FloatArrayFromFloats(output_scales), + tflite::testing::IntArrayFromInts(output_zero_points), 0}; + tensors[1].quantization = {kTfLiteAffineQuantization, &output_quant}; + + static int inputs_array_data[] = {1, 0}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + static int outputs_array_data[] = {1, 1}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, + /*builtin_data=*/nullptr); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(expected_invoke_status, runner.Invoke()); + + if (expected_invoke_status == kTfLiteOk) { + for (int i = 0; i < output_dims_count; ++i) { + float f = (output_data[i] - output_zero_point) * output_scale; + TF_LITE_MICRO_EXPECT_NEAR(expected_output_data[i], f, input_scale); + } + } +} + +void TestElementwiseBool(const TFLMRegistration& registration, + int* input_dims_data, const bool* input_data, + int* output_dims_data, + const bool* expected_output_data, bool* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_dims_count = ElementCount(*output_dims); + + constexpr int input_size = 1; + constexpr int output_size = 1; + constexpr int tensors_size = input_size + output_size; + TfLiteTensor tensors[tensors_size] = {CreateTensor(input_data, input_dims), + CreateTensor(output_data, output_dims)}; + + // Place false in the uninitialized output buffer. + for (int i = 0; i < output_dims_count; ++i) { + output_data[i] = false; + } + + int inputs_array_data[] = {1, 0}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 1}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, + /*builtin_data=*/nullptr); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + for (int i = 0; i < output_dims_count; ++i) { + TF_LITE_MICRO_EXPECT_EQ(expected_output_data[i], output_data[i]); + } +} + +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(Abs) { + constexpr int output_dims_count = 4; + int shape[] = {2, 2, 2}; + const float input[] = {0.01, -0.01, 10, -10}; + const float golden[] = {0.01, 0.01, 10, 10}; + float output_data[output_dims_count]; + tflite::testing::TestElementwiseFloat(tflite::Register_ABS(), shape, input, + shape, golden, output_data); +} + +TF_LITE_MICRO_TEST(AbsInt8) { + int shape[] = {2, 1, 8}; + + const float input_data[] = {15., 46., 78., -142., -1., -17., -49., 113.}; + int8_t input_quantized[8]; + + const float golden[] = {15., 46., 78., 142., 1., 17., 49., 113.}; + int8_t output_quantized[8]; + + const float abs_max = 142; + const float data_min = -142; + const float data_max = 113; + const float input_scale = (data_max - data_min) / 255.0f; + const float output_scale = abs_max / 255.0f; + const int input_zero_point = 127 - data_max; + const int output_zero_point = -128; + tflite::testing::TestElementwiseQuantized( + tflite::Register_ABS(), shape, input_data, input_quantized, input_scale, + input_zero_point, shape, golden, output_quantized, output_scale, + output_zero_point); +} + +TF_LITE_MICRO_TEST(AbsInt8SameScale) { + int shape[] = {2, 1, 8}; + + const float input_data[] = {15., 46., 78., -142., -1., -17., -49., 113.}; + int8_t input_quantized[8]; + + const float golden[] = {15., 46., 78., 142., 1., 17., 49., 113.}; + int8_t output_quantized[8]; + + const float data_min = -142; + const float data_max = 113; + const float scale = (data_max - data_min) / 255.0f; + const int zero_point = 127 - data_max; + tflite::testing::TestElementwiseQuantized( + tflite::Register_ABS(), shape, input_data, input_quantized, scale, + zero_point, shape, golden, output_quantized, scale, -128); +} + +TF_LITE_MICRO_TEST(AbsInt16) { + int shape[] = {2, 1, 8}; + + const float input_data[] = {15., 46., 78., -142., -1., -17., -49., 113.}; + int16_t input_quantized[8]; + + const float golden[] = {15., 46., 78., 142., 1., 17., 49., 113.}; + int16_t output_quantized[8]; + + const float input_max = 142; + const float output_max = 150; + const float input_scale = input_max / std::numeric_limits::max(); + const float output_scale = output_max / std::numeric_limits::max(); + tflite::testing::TestElementwiseQuantized( + tflite::Register_ABS(), shape, input_data, input_quantized, input_scale, + /*input_zero_point*/ 0, shape, golden, output_quantized, output_scale, + /*output_zero_point*/ 0); +} + +TF_LITE_MICRO_TEST(Sin) { + constexpr int output_dims_count = 4; + int shape[] = {2, 2, 2}; + const float input[] = {0, 3.1415926, -3.1415926, 1}; + const float golden[] = {0, 0, 0, 0.84147}; + float output_data[output_dims_count]; + tflite::testing::TestElementwiseFloat(tflite::Register_SIN(), shape, input, + shape, golden, output_data); +} + +TF_LITE_MICRO_TEST(Cos) { + constexpr int output_dims_count = 4; + int shape[] = {2, 2, 2}; + const float input[] = {0, 3.1415926, -3.1415926, 1}; + const float golden[] = {1, -1, -1, 0.54030}; + float output_data[output_dims_count]; + tflite::testing::TestElementwiseFloat(tflite::Register_COS(), shape, input, + shape, golden, output_data); +} + +TF_LITE_MICRO_TEST(Log) { + constexpr int output_dims_count = 4; + int shape[] = {2, 2, 2}; + const float input[] = {1, 2.7182818, 0.5, 2}; + const float golden[] = {0, 1, -0.6931472, 0.6931472}; + float output_data[output_dims_count]; + tflite::testing::TestElementwiseFloat(tflite::Register_LOG(), shape, input, + shape, golden, output_data); +} + +TF_LITE_MICRO_TEST(Sqrt) { + constexpr int output_dims_count = 4; + int shape[] = {2, 2, 2}; + const float input[] = {0, 1, 2, 4}; + const float golden[] = {0, 1, 1.41421, 2}; + float output_data[output_dims_count]; + tflite::testing::TestElementwiseFloat(tflite::Register_SQRT(), shape, input, + shape, golden, output_data); +} + +TF_LITE_MICRO_TEST(Rsqrt) { + constexpr int output_dims_count = 4; + int shape[] = {2, 2, 2}; + const float input[] = {1, 2, 4, 9}; + const float golden[] = {1, 0.7071, 0.5, 0.33333}; + float output_data[output_dims_count]; + tflite::testing::TestElementwiseFloat(tflite::Register_RSQRT(), shape, input, + shape, golden, output_data); +} + +TF_LITE_MICRO_TEST(RsqrtInt8) { + int shape[] = {2, 1, 8}; + + const float input_data[] = {15., 46., 78., 142., 1., 17., 49., 113.}; + int8_t input_quantized[8]; + + const float golden[] = {0.2582, 0.14744, 0.11323, 0.08392, + 1., 0.24254, 0.142857, 0.09407}; + int8_t output_quantized[8]; + + const float data_max = 142; + const float input_scale = 142.0 / 255.0; + const float output_scale = 1.0 / 255.0; + const int input_zero_point = 127 - data_max; + const int output_zero_point = -128; + tflite::testing::TestElementwiseQuantized( + tflite::Register_RSQRT(), shape, input_data, input_quantized, input_scale, + input_zero_point, shape, golden, output_quantized, output_scale, + output_zero_point); +} + +TF_LITE_MICRO_TEST(RsqrtInt16) { + int shape[] = {2, 1, 8}; + + const float input_data[] = {15., 46., 78., 142., 1., 17., 49., 113.}; + int16_t input_quantized[8]; + + const float golden[] = {0.2582, 0.14744, 0.11323, 0.08392, + 1., 0.24254, 0.142857, 0.09407}; + int16_t output_quantized[8]; + + const float input_scale = 142.0 / 32768.0; + const float output_scale = 1.0 / 32768.0; + const int input_zero_point = 0; + const int output_zero_point = 0; + tflite::testing::TestElementwiseQuantized( + tflite::Register_RSQRT(), shape, input_data, input_quantized, input_scale, + input_zero_point, shape, golden, output_quantized, output_scale, + output_zero_point); +} + +TF_LITE_MICRO_TEST(RsqrtCloseTo0Int8) { + int shape[] = {2, 1, 8}; + + const float input_data[] = {15., 46., 78., 142., 1., 0.1, 49., 113.}; + int8_t input_quantized[8]; + + const float golden[] = {0.2582, 0.14744, 0.11323, 0.08392, + 1., 3.16228, 0.142857, 0.09407}; + int8_t output_quantized[8]; + + const float data_max = 142; + const float input_scale = 142.0 / 255.0; + const float output_scale = 3.16 / 255.0; + const int input_zero_point = 127 - data_max; + const int output_zero_point = -128; + tflite::testing::TestElementwiseQuantized( + tflite::Register_RSQRT(), shape, input_data, input_quantized, input_scale, + input_zero_point, shape, golden, output_quantized, output_scale, + output_zero_point); +} + +TF_LITE_MICRO_TEST(RsqrtNanInt8) { + int shape[] = {2, 1, 8}; + + const float input_data[] = {15., 46., 78., 142., 1., 17., -49., 113.}; + int8_t input_quantized[8]; + + const float golden[] = {0.2582, 0.14744, 0.11323, 0.08392, + 1., 0.24254, 0.142857, 0.09407}; + int8_t output_quantized[8]; + + const float data_max = 142; + const float input_scale = 142.0 / 255.0; + const float output_scale = 1.0 / 255.0; + const int input_zero_point = 127 - data_max; + const int output_zero_point = -128; + + tflite::testing::TestElementwiseQuantized( + tflite::Register_RSQRT(), shape, input_data, input_quantized, input_scale, + input_zero_point, shape, golden, output_quantized, output_scale, + output_zero_point, kTfLiteError); +} + +TF_LITE_MICRO_TEST(Square) { + constexpr int output_dims_count = 4; + int shape[] = {2, 2, 2}; + const float input[] = {1, 2, 0.5, -3.0}; + const float golden[] = {1, 4.0, 0.25, 9.0}; + float output_data[output_dims_count]; + tflite::testing::TestElementwiseFloat(tflite::Register_SQUARE(), shape, input, + shape, golden, output_data); +} + +TF_LITE_MICRO_TEST(LogicalNot) { + constexpr int output_dims_count = 4; + int shape[] = {2, 2, 2}; + const bool input[] = {true, false, false, true}; + const bool golden[] = {false, true, true, false}; + bool output_data[output_dims_count]; + tflite::testing::TestElementwiseBool(tflite::Register_LOGICAL_NOT(), shape, + input, shape, golden, output_data); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/elu.cc b/tensorflow/lite/micro/kernels/elu.cc new file mode 100644 index 0000000..aacd21e --- /dev/null +++ b/tensorflow/lite/micro/kernels/elu.cc @@ -0,0 +1,151 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/reference/elu.h" + +#include +#include + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/cppmath.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/process_broadcast_shapes.h" +#include "tensorflow/lite/kernels/internal/types.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { +namespace { + +// Input/output tensor index. +constexpr int kInputTensor = 0; +constexpr int kOutputTensor = 0; + +// OLD-TODO(b/142762739): We should figure out a multi-threading plan for most +// of the activation ops below. + +struct OpData { + int8_t table[256]; +}; + +using TransformFunc = float (*)(float); + +template +void PopulateLookupTable(const TfLiteTensor* input, const TfLiteTensor* output, + const TransformFunc transform, OpData* data) { + if (sizeof(T) != 1) { + MicroPrintf("Lookup table valid only for 8bit"); + TFLITE_ABORT; + } + + const float inverse_scale = 1 / output->params.scale; + int32_t maxval = std::numeric_limits::max(); + int32_t minval = std::numeric_limits::min(); + for (int32_t val = minval; val <= maxval; ++val) { + const float dequantized = + input->params.scale * (val - input->params.zero_point); + const float transformed = transform(dequantized); + const float rescaled = TfLiteRound(transformed * inverse_scale); + const int32_t quantized = + static_cast(rescaled + output->params.zero_point); + data->table[static_cast(static_cast(val))] = + static_cast(std::max(std::min(maxval, quantized), minval)); + } +} + +// OLD-TODO(b/143696793): move this to optimized_ops. +void EvalUsingLookupTable(const OpData* data, const TfLiteEvalTensor* input, + TfLiteEvalTensor* output) { + const int size = MatchingFlatSize(tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorShape(output)); + int8_t* output_data = tflite::micro::GetTensorData(output); + const int8_t* input_data = tflite::micro::GetTensorData(input); + + for (int i = 0; i < size; ++i) { + output_data[i] = data->table[static_cast(input_data[i])]; + } +} + +TfLiteStatus CalculateOpData(TfLiteContext* context, TfLiteNode* node) { + MicroContext* micro_context = GetMicroContext(context); + + TF_LITE_ENSURE_EQ(context, NumInputs(node), 1); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + TF_LITE_ENSURE_TYPES_EQ(context, input->type, output->type); + + // Use LUT to handle quantized elu path. + if (input->type == kTfLiteInt8) { + OpData* data = static_cast(node->user_data); + TransformFunc transform = [](float value) { + return value < 0.0f ? std::exp(value) - 1.0f : value; + }; + PopulateLookupTable(input, output, transform, data); + } + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(output); + return kTfLiteOk; +} + +void* EluInit(TfLiteContext* context, const char* buffer, size_t length) { + // This is a builtin op, so we don't use the contents in 'buffer', if any. + // Instead, we allocate a new object to carry information from Prepare() to + // Eval(). + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(OpData)); +} + +TfLiteStatus EluPrepare(TfLiteContext* context, TfLiteNode* node) { + return CalculateOpData(context, node); +} + +TfLiteStatus EluEval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + switch (input->type) { + case kTfLiteFloat32: { + reference_ops::Elu(tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + return kTfLiteOk; + } + case kTfLiteInt8: { + const OpData* data = static_cast(node->user_data); + EvalUsingLookupTable(data, input, output); + return kTfLiteOk; + } + default: + MicroPrintf("ELU only supports float32 and int8 currently, got %s.", + TfLiteTypeGetName(input->type)); + return kTfLiteError; + } +} + +} // namespace + +TFLMRegistration Register_ELU() { + return tflite::micro::RegisterOp(EluInit, EluPrepare, EluEval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/elu_test.cc b/tensorflow/lite/micro/kernels/elu_test.cc new file mode 100644 index 0000000..e8fa378 --- /dev/null +++ b/tensorflow/lite/micro/kernels/elu_test.cc @@ -0,0 +1,169 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +// min/max are used to compute scale, zero-point +template +struct TestEluParams { + // quantization parameters + float data_min; // input and output data minimum value + float data_max; // input and output data maximum value + T* input_data; // quantized input storage + T* output_data; // quantized output storage + float tolerance; // output vs expected value tolerance +}; + +// Our fixed-point math function implementations have roughly 12 bits of +// accuracy, when specialized to 16-bit fixed-point arithmetic. +// That is purely an implementation compromise, it would have been possible +// to get closer to 16 bits of accuracy but that would be more expensive, +// and not needed for our purposes as ultimately the output is either +// immediately down-quantized to 8 bits, or will typically be at the output +// of the surrounding LSTM cell. +// So we can require roughly 2^-12 accuracy when the output is 16-bit, and +// we can more or less expect the full 2^-8 accuracy when the output is 8-bit. +// +// However, the representable output interval is often [-1, 1] (it has to be +// for tanh, and even for logistic, when we implement it in fixed-point, we +// typically have to do so on such a symmetric interval, e.g. ARM NEON only +// has signed fixed-point arithmetic (SQRDMULH)). As the width of [-1, 1] +// is 2, our representable values are often diluted by a factor of 2, whence +// the factor of 2 below. +constexpr float kQuantizedTolerance = 2 * (1. / 256); + +void ExecuteEluTest(TfLiteTensor* tensors, int tensors_count) { + int kInputArrayData[] = {1, 0}; + TfLiteIntArray* inputs_array = IntArrayFromInts(kInputArrayData); + int kOutputArrayData[] = {1, 1}; + TfLiteIntArray* outputs_array = IntArrayFromInts(kOutputArrayData); + + const TFLMRegistration registration = tflite::Register_ELU(); + micro::KernelRunner runner(registration, tensors, tensors_count, inputs_array, + outputs_array, nullptr); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); +} + +template +void TestElu(int* input_dims_data, const T* input_data, int* expected_dims, + const T* expected_data, T* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(expected_dims); + const int output_count = ElementCount(*output_dims); + + TfLiteTensor tensors[] = { + CreateTensor(input_data, input_dims), + CreateTensor(output_data, output_dims), + }; + constexpr int tensors_count = std::extent::value; + ExecuteEluTest(tensors, tensors_count); + + constexpr float kTolerance = 1e-5; + for (int i = 0; i < output_count; i++) { + TF_LITE_MICRO_EXPECT_NEAR(expected_data[i], output_data[i], kTolerance); + } +} + +template +void TestEluQuantized(const TestEluParams& params, int* input_dims_data, + const float* input_data, int* expected_dims, + const float* expected_data, float* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(expected_dims); + const int output_count = ElementCount(*output_dims); + + const float scale = ScaleFromMinMax(params.data_min, params.data_max); + const int zero_point = + ZeroPointFromMinMax(params.data_min, params.data_max); + + TfLiteTensor tensors[] = { + CreateQuantizedTensor(input_data, params.input_data, input_dims, scale, + zero_point), + CreateQuantizedTensor(params.output_data, output_dims, scale, zero_point), + }; + constexpr int kTensorsCount = std::extent::value; + + ExecuteEluTest(tensors, kTensorsCount); + + Dequantize(params.output_data, output_count, scale, zero_point, output_data); + const float kTolerance = params.tolerance; + for (int i = 0; i < output_count; i++) { + TF_LITE_MICRO_EXPECT_NEAR(expected_data[i], output_data[i], kTolerance); + } +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(FloatActivationsOpTestElu) { + int kDims[] = {4, 1, 2, 4, 1}; + constexpr float kInput[] = { + 0, -6, 2, -4, // + 3, -2, 10, -0.1, // + }; + constexpr float kExpect[] = { + 0.0, -0.997521, 2.0, -0.981684, // + 3.0, -0.864665, 10.0, -0.0951626, // + }; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + tflite::testing::TestElu(kDims, kInput, kDims, kExpect, output_data); +} + +TF_LITE_MICRO_TEST(QuantizedActivationsOpTestEluInt8) { + int kDims[] = {4, 1, 2, 4, 1}; + constexpr float kInput[] = { + 0, -6, 2, -4, // + 3, -2, 6, -0.1, // + }; + constexpr float kExpect[] = { + 0, -1.0, 2.0, -1, // + 3.0, -0.875, 6.0, -0.125, // + }; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + // setup quantization storage and parameters + int8_t q_output_data[kOutputCount]; + int8_t q_input_data[kOutputCount]; + constexpr float kMin = -1; + constexpr float kMax = 127.f / 128.f; + tflite::testing::TestEluParams params = {}; + params.data_min = 8 * kMin; + params.data_max = 8 * kMax; + params.input_data = q_input_data; + params.output_data = q_output_data; + params.tolerance = tflite::testing::kQuantizedTolerance; + + tflite::testing::TestEluQuantized(params, kDims, kInput, kDims, kExpect, + output_data); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/embedding_lookup.cc b/tensorflow/lite/micro/kernels/embedding_lookup.cc new file mode 100644 index 0000000..77ac0e0 --- /dev/null +++ b/tensorflow/lite/micro/kernels/embedding_lookup.cc @@ -0,0 +1,213 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +// Ops that looks up items from matrix. +// +// Input: +// Tensor[0]: Row numbers to lookup, dim.size == 1, int32 +// Tensor[1]: 2-dimensional matrix of multi-dimensional items +// dim.size >= 2, all items are INT8 or FLOAT32. +// first dimension is row, second dimension is column. +// +// Output: +// Output.dim[0] == Tensor[0].dim[0], num of lookups +// Output.dim[1] == Tensor[1].dim[1], num of items per row +// Each item in output is a raw bytes copy of the corresponding item in input, +// or a dequantized value in the case of a INT8 input. +// When indices are out of bound, the ops will not succeed. +// + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/types.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_utils.h" + +namespace tflite { +namespace { + +constexpr int kInputTensor_0 = 0; +constexpr int kInputTensor_1 = 1; +constexpr int kOutputTensor = 0; + +struct OpData { + float scale; // quantization scale for tensor 1 + size_t num_columns; // number of columns after flattening tensor 1 into 2D +}; + +TfLiteStatus CalculateOpData(TfLiteContext* context, TfLiteNode* node, + const TfLiteTensor* tensor_1, + const TfLiteTensor* output) { + node->user_data = context->AllocatePersistentBuffer(context, sizeof(OpData)); + OpData* op_data = static_cast(node->user_data); + TF_LITE_ENSURE(context, op_data != nullptr); + + if (tensor_1->type == kTfLiteInt8 && output->type == kTfLiteFloat32) { + TF_LITE_ENSURE_EQ(context, tensor_1->params.zero_point, 0); + op_data->scale = tensor_1->params.scale; + } + + op_data->num_columns = NumElements(tensor_1) / tensor_1->dims->data[0]; + + return kTfLiteOk; +} + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + TF_LITE_ENSURE_EQ(context, NumInputs(node), 2); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* lookup = + micro_context->AllocateTempInputTensor(node, kInputTensor_0); + TF_LITE_ENSURE(context, lookup != nullptr); + TF_LITE_ENSURE_EQ(context, NumDimensions(lookup), 1); + TF_LITE_ENSURE_EQ(context, lookup->type, kTfLiteInt32); + + TfLiteTensor* value = + micro_context->AllocateTempInputTensor(node, kInputTensor_1); + TF_LITE_ENSURE(context, value != nullptr); + TF_LITE_ENSURE(context, NumDimensions(value) >= 2); + TF_LITE_ENSURE(context, + value->type == kTfLiteFloat32 || value->type == kTfLiteInt8); + + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + if (value->type == kTfLiteFloat32) { + TF_LITE_ENSURE(context, output->type == kTfLiteFloat32); + } else { + TF_LITE_ENSURE( + context, output->type == kTfLiteFloat32 || output->type == kTfLiteInt8); + } + + // make sure output dimensions size can hold the new dimension data + TF_LITE_ENSURE(context, output->dims->size >= NumDimensions(value)); + // make the output tensor dimensions mutable + TfLiteEvalTensor* output_eval = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + TF_LITE_ENSURE_OK(context, tflite::micro::CreateWritableTensorDimsWithCopy( + context, output, output_eval)); + // set the new output dimensions + output->dims->data[0] = SizeOfDimension(lookup, 0); + output->dims->data[1] = SizeOfDimension(value, 1); + for (int i = 2; i < NumDimensions(value); i++) { + output->dims->data[i] = SizeOfDimension(value, i); + } + // check the new output dimensions do not exceed the output data buffer size + size_t new_dims_size = NumElements(output) * TfLiteTypeGetSize(output->type); + TF_LITE_ENSURE(context, new_dims_size <= output->bytes); + + TF_LITE_ENSURE_OK(context, CalculateOpData(context, node, value, output)); + + micro_context->DeallocateTempTfLiteTensor(lookup); + micro_context->DeallocateTempTfLiteTensor(value); + micro_context->DeallocateTempTfLiteTensor(output); + + return kTfLiteOk; +} + +TfLiteStatus EvalSimple(const OpData& op_data, const TfLiteEvalTensor* lookup, + const TfLiteEvalTensor* value, + TfLiteEvalTensor* output) { + const int num_rows = value->dims->data[0]; + if (num_rows == 0) { + // Propagate empty tensor if input is empty + return kTfLiteOk; + } + const size_t row_bytes = op_data.num_columns * TfLiteTypeGetSize(value->type); + + int8_t* output_raw = tflite::micro::GetTensorData(output); + const int8_t* value_raw = tflite::micro::GetTensorData(value); + const int32_t* lookup_data = tflite::micro::GetTensorData(lookup); + for (int i = 0; i < lookup->dims->data[0]; i++) { + int32_t idx = lookup_data[i]; + if (idx >= num_rows || idx < 0) { + MicroPrintf( + "EMBEDDING_LOOKUP: index out of bounds. " + "Got %d, and bounds are [0, %d]", + idx, num_rows - 1); + return kTfLiteError; + } else { + std::memcpy(output_raw + i * row_bytes, value_raw + idx * row_bytes, + row_bytes); + } + } + + return kTfLiteOk; +} + +TfLiteStatus EvalHybrid(const OpData& op_data, const TfLiteEvalTensor* lookup, + const TfLiteEvalTensor* value, + TfLiteEvalTensor* output) { + const int num_rows = value->dims->data[0]; + const size_t num_colums = op_data.num_columns; + + float* output_ptr = tflite::micro::GetTensorData(output); + const int8_t* value_ptr = tflite::micro::GetTensorData(value); + const int32_t* lookup_data = tflite::micro::GetTensorData(lookup); + + for (int i = 0; i < lookup->dims->data[0]; i++) { + int32_t idx = lookup_data[i]; + if (idx >= num_rows || idx < 0) { + MicroPrintf( + "EMBEDDING_LOOKUP: index out of bounds. " + "Got %d, and bounds are [0, %d]", + idx, num_rows - 1); + return kTfLiteError; + } else { + // Dequantize embedding values. + Dequantize(&value_ptr[idx * num_colums], num_colums, op_data.scale, 0, + &output_ptr[i * num_colums]); + } + } + + return kTfLiteOk; +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* lookup = + tflite::micro::GetEvalInput(context, node, kInputTensor_0); + const TfLiteEvalTensor* value = + tflite::micro::GetEvalInput(context, node, kInputTensor_1); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + + OpData& op_data = *static_cast(node->user_data); + + switch (value->type) { + case kTfLiteFloat32: + return EvalSimple(op_data, lookup, value, output); + case kTfLiteInt8: + if (output->type == kTfLiteFloat32) { + return EvalHybrid(op_data, lookup, value, output); + } else { + return EvalSimple(op_data, lookup, value, output); + } + default: + MicroPrintf("EMBEDDING_LOOKUP only supports FLOAT32 and INT8, got %s.", + TfLiteTypeGetName(output->type)); + return kTfLiteError; + } +} + +} // namespace + +TFLMRegistration Register_EMBEDDING_LOOKUP() { + return tflite::micro::RegisterOp(nullptr, Prepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/embedding_lookup_test.cc b/tensorflow/lite/micro/kernels/embedding_lookup_test.cc new file mode 100644 index 0000000..c94cebb --- /dev/null +++ b/tensorflow/lite/micro/kernels/embedding_lookup_test.cc @@ -0,0 +1,278 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include +#include +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +constexpr float kTestTolerance = 7.41e-03; +constexpr int kNumInputs = 2; +constexpr int kNumOutputs = 1; +constexpr int kInputTensorIndex_0 = 0; +constexpr int kInputTensorIndex_1 = 1; +constexpr int kOutputTensorIndex = 2; + +// min/max are used to compute scale, zero-point is 0 +template +struct TestEmbeddingLookupParams { + // quantization parameters + float data_min; // input data minimum value + float data_max; // input data maximum value + int8_t input_data[kInputSize]; // quantized input storage +}; + +void ExecuteEmbeddingLookupTest(TfLiteTensor* tensors, int tensors_count) { + int kInputArrayData[] = {kNumInputs, kInputTensorIndex_0, + kInputTensorIndex_1}; + TfLiteIntArray* inputs_array = IntArrayFromInts(kInputArrayData); + int kOutputArrayData[] = {kNumOutputs, kOutputTensorIndex}; + TfLiteIntArray* outputs_array = IntArrayFromInts(kOutputArrayData); + + const TFLMRegistration registration = tflite::Register_EMBEDDING_LOOKUP(); + micro::KernelRunner runner(registration, tensors, tensors_count, inputs_array, + outputs_array, nullptr); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); +} + +template +void TestEmbeddingLookupQuantized(TestEmbeddingLookupParams& params, + int* input_dims_data[kNumInputs], + const int32_t* input_data_0, + const float* input_data_1, int* expected_dims, + const float* expected_data, + float* output_data) { + TfLiteIntArray* input_dims_0 = IntArrayFromInts(input_dims_data[0]); + TfLiteIntArray* input_dims_1 = IntArrayFromInts(input_dims_data[1]); + TfLiteIntArray* output_dims = IntArrayFromInts(expected_dims); + const int output_count = ElementCount(*output_dims); + + const float scale = + SymmetricScaleFromMinMax(params.data_min, params.data_max); + + TfLiteTensor tensors[] = { + CreateTensor(input_data_0, input_dims_0), + CreateQuantizedTensor(input_data_1, params.input_data, input_dims_1, + scale, 0), + CreateTensor(output_data, output_dims), + }; + constexpr int tensors_count = std::extent::value; + ExecuteEmbeddingLookupTest(tensors, tensors_count); + + // check output data against expected + for (int i = 0; i < output_count; i++) { + TF_LITE_MICRO_EXPECT_NEAR(expected_data[i], output_data[i], kTestTolerance); + } + + // check output dimensions (relocated) against original dimensions + TF_LITE_MICRO_EXPECT_EQ(output_dims->size, + tensors[kOutputTensorIndex].dims->size); + for (int i = 0; i < output_dims->size; i++) { + TF_LITE_MICRO_EXPECT_EQ(output_dims->data[i], + tensors[kOutputTensorIndex].dims->data[i]); + } +} // namespace + +template +void TestEmbeddingLookup(int* input_dims_data[kNumInputs], + const int32_t* input_data_0, const T* input_data_1, + int* expected_dims, const T* expected_data, + T* output_data) { + TfLiteIntArray* input_dims_0 = IntArrayFromInts(input_dims_data[0]); + TfLiteIntArray* input_dims_1 = IntArrayFromInts(input_dims_data[1]); + TfLiteIntArray* output_dims = IntArrayFromInts(expected_dims); + const int output_count = ElementCount(*output_dims); + + TfLiteTensor tensors[] = { + CreateTensor(input_data_0, input_dims_0), + CreateTensor(input_data_1, input_dims_1), + CreateTensor(output_data, output_dims), + }; + constexpr int tensors_count = std::extent::value; + ExecuteEmbeddingLookupTest(tensors, tensors_count); + + // check output data against expected + for (int i = 0; i < output_count; i++) { + TF_LITE_MICRO_EXPECT_NEAR(expected_data[i], output_data[i], kTestTolerance); + } + + // check output dimensions (relocated) against original dimensions + TF_LITE_MICRO_EXPECT_EQ(output_dims->size, + tensors[kOutputTensorIndex].dims->size); + for (int i = 0; i < output_dims->size; i++) { + TF_LITE_MICRO_EXPECT_EQ(output_dims->data[i], + tensors[kOutputTensorIndex].dims->data[i]); + } +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(EmbeddingLookupOpTestSimpleFloat) { + int kInputDims_0[] = {1, 3}; + int kInputDims_1[] = {3, 3, 2, 4}; + int* kInputDims[tflite::testing::kNumInputs] = {kInputDims_0, kInputDims_1}; + int kOutputDims[] = {3, 3, 2, 4}; + + constexpr int32_t kInput_0[] = {1, 0, 2}; + constexpr float kInput_1[] = { + 0.00, 0.01, 0.02, 0.03, 0.10, 0.11, 0.12, 0.13, // Row 0 + 1.00, 1.01, 1.02, 1.03, 1.10, 1.11, 1.12, 1.13, // Row 1 + 2.00, 2.01, 2.02, 2.03, 2.10, 2.11, 2.12, 2.13, // Row 2 + }; + constexpr float kExpect[] = { + 1.00, 1.01, 1.02, 1.03, 1.10, 1.11, 1.12, 1.13, // Row 1 + 0.00, 0.01, 0.02, 0.03, 0.10, 0.11, 0.12, 0.13, // Row 0 + 2.00, 2.01, 2.02, 2.03, 2.10, 2.11, 2.12, 2.13, // Row 2 + }; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + tflite::testing::TestEmbeddingLookup(kInputDims, kInput_0, kInput_1, + kOutputDims, kExpect, output_data); +} + +TF_LITE_MICRO_TEST(HybridEmbeddingLookupHybridOpTestSimple2DTestInt8) { + int kInputDims_0[] = {1, 3}; + int kInputDims_1[] = {2, 3, 8}; + int* kInputDims[tflite::testing::kNumInputs] = {kInputDims_0, kInputDims_1}; + int kOutputDims[] = {2, 3, 8}; + + constexpr int32_t kInput_0[] = {1, 0, 2}; + constexpr float kInput_1[] = { + 0.00, 0.01, 0.02, 0.03, 0.10, 0.11, 0.12, 0.13, // Row 0 + 1.00, -1.01, 1.02, 1.03, 1.10, 1.11, 1.12, 1.13, // Row 1 + 2.00, 2.01, 2.02, 2.03, 2.10, 2.11, 2.12, 2.13, // Row 2 + }; + constexpr int kInputCount_1 = std::extent::value; + constexpr float kExpect[] = { + 1.00, -1.01, 1.02, 1.03, 1.10, 1.11, 1.12, 1.13, // Row 1 + 0.00, 0.01, 0.02, 0.03, 0.10, 0.11, 0.12, 0.13, // Row 0 + 2.00, 2.01, 2.02, 2.03, 2.10, 2.11, 2.12, 2.13, // Row 2 + }; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + tflite::testing::TestEmbeddingLookupParams params = {}; + auto minmax = std::minmax_element(std::begin(kInput_1), std::end(kInput_1)); + params.data_max = *minmax.second; + params.data_min = *minmax.first; + + tflite::testing::TestEmbeddingLookupQuantized(params, kInputDims, kInput_0, + kInput_1, kOutputDims, kExpect, + output_data); +} + +TF_LITE_MICRO_TEST(HybridEmbeddingLookupHybridOpTestSimple3DTestInt8) { + int kInputDims_0[] = {1, 3}; + int kInputDims_1[] = {3, 3, 2, 4}; + int* kInputDims[tflite::testing::kNumInputs] = {kInputDims_0, kInputDims_1}; + int kOutputDims[] = {3, 3, 2, 4}; + + constexpr int32_t kInput_0[] = {1, 0, 2}; + constexpr float kInput_1[] = { + 0.00, 0.01, 0.02, 0.03, 0.10, 0.11, 0.12, 0.13, // Row 0 + 1.00, -1.01, 1.02, 1.03, 1.10, 1.11, 1.12, 1.13, // Row 1 + 2.00, 2.01, 2.02, 2.03, 2.10, 2.11, 2.12, 2.13, // Row 2 + }; + constexpr int kInputCount_1 = std::extent::value; + constexpr float kExpect[] = { + 1.00, -1.01, 1.02, 1.03, 1.10, 1.11, 1.12, 1.13, // Row 1 + 0.00, 0.01, 0.02, 0.03, 0.10, 0.11, 0.12, 0.13, // Row 0 + 2.00, 2.01, 2.02, 2.03, 2.10, 2.11, 2.12, 2.13, // Row 2 + }; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + tflite::testing::TestEmbeddingLookupParams params = {}; + auto minmax = std::minmax_element(std::begin(kInput_1), std::end(kInput_1)); + params.data_max = *minmax.second; + params.data_min = *minmax.first; + + tflite::testing::TestEmbeddingLookupQuantized(params, kInputDims, kInput_0, + kInput_1, kOutputDims, kExpect, + output_data); +} + +TF_LITE_MICRO_TEST(HybridEmbeddingLookupHybridOpTestSimple4DTestInt8) { + int kInputDims_0[] = {1, 3}; + int kInputDims_1[] = {4, 3, 2, 2, 2}; + int* kInputDims[tflite::testing::kNumInputs] = {kInputDims_0, kInputDims_1}; + int kOutputDims[] = {4, 3, 2, 2, 2}; + + constexpr int32_t kInput_0[] = {1, 0, 2}; + constexpr float kInput_1[] = { + 0.00, 0.01, 0.02, 0.03, 0.10, 0.11, 0.12, 0.13, // Row 0 + 1.00, -1.01, 1.02, 1.03, 1.10, 1.11, 1.12, 1.13, // Row 1 + 2.00, 2.01, 2.02, 2.03, 2.10, 2.11, 2.12, 2.13, // Row 2 + }; + constexpr int kInputCount_1 = std::extent::value; + constexpr float kExpect[] = { + 1.00, -1.01, 1.02, 1.03, 1.10, 1.11, 1.12, 1.13, // Row 1 + 0.00, 0.01, 0.02, 0.03, 0.10, 0.11, 0.12, 0.13, // Row 0 + 2.00, 2.01, 2.02, 2.03, 2.10, 2.11, 2.12, 2.13, // Row 2 + }; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + tflite::testing::TestEmbeddingLookupParams params = {}; + auto minmax = std::minmax_element(std::begin(kInput_1), std::end(kInput_1)); + params.data_max = *minmax.second; + params.data_min = *minmax.first; + + tflite::testing::TestEmbeddingLookupQuantized(params, kInputDims, kInput_0, + kInput_1, kOutputDims, kExpect, + output_data); +} + +TF_LITE_MICRO_TEST(EmbeddingLookupOpTestSimpleInt8) { + int kInputDims_0[] = {1, 3}; + int kInputDims_1[] = {3, 3, 2, 4}; + int* kInputDims[tflite::testing::kNumInputs] = {kInputDims_0, kInputDims_1}; + int kOutputDims[] = {3, 3, 2, 4}; + + constexpr int32_t kInput_0[] = {1, 0, 2}; + constexpr int8_t kInput_1[] = { + 0, 1, 2, 3, 10, 11, 12, 13, // Row 0 + 100, 101, 102, 103, 110, 111, 112, 113, // Row 1 + -56, -55, -54, -53, -46, -45, -44, -43, // Row 2 + }; + constexpr int8_t kExpect[] = { + 100, 101, 102, 103, 110, 111, 112, 113, // Row 1 + 0, 1, 2, 3, 10, 11, 12, 13, // Row 0 + -56, -55, -54, -53, -46, -45, -44, -43, // Row 2 + }; + constexpr int kOutputCount = std::extent::value; + int8_t output_data[kOutputCount]; + + tflite::testing::TestEmbeddingLookup(kInputDims, kInput_0, kInput_1, + kOutputDims, kExpect, output_data); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/ethos_u/README.md b/tensorflow/lite/micro/kernels/ethos_u/README.md new file mode 100644 index 0000000..30978c4 --- /dev/null +++ b/tensorflow/lite/micro/kernels/ethos_u/README.md @@ -0,0 +1,78 @@ + + +# Info +Arm(R) Ethos(TM)-U is a new class of machine learning processors, called a +microNPU, specifically designed to accelerate ML inference in area-constrained +embedded and IoT devices. This readme briefly describes how to integrate Ethos-U +related hardware and software into TFLM. See also [Ethos-U ML Evaluation kit examples](https://review.mlplatform.org/plugins/gitiles/ml/ethos-u/ml-embedded-evaluation-kit). + +To enable the Ethos-U software stack, add `CO_PROCESSOR=ethos_u` to the make +command. Use ETHOSU_ARCH to specify the architecture. See examples below. + +## Requirements: +- Armclang 6.14 or later +- GCC 10.2.1 or later + +## Ethos-U custom operator +The TFLM runtime will dispatch workloads to Ethos-U when it encounters an +Ethos-U custom op in the tflite file. See an ASCII art example below. +The Ethos-U custom op is added by a tool called Ethos-U Vela and contains +information the Ethos-U hardware need to execute the workload. More info in the +[Vela repository](https://review.mlplatform.org/plugins/gitiles/ml/ethos-u/ethos-u-vela). + +``` + | tensor0 + | + v ++------------+ +| ethos-u | +| custom op | ++------------+ + + + | + | tensor1 + | + v ++-----------+ +| transpose | +| | ++----|------+ + | + | tensor2 + | + v +``` + +Note that the `ethousu_init()` API of the Ethos-U driver need to be called at +startup, before calling the TFLM API. More info in the [Ethos-U driver repo](https://review.mlplatform.org/plugins/gitiles/ml/ethos-u/ethos-u-core-driver). + +For even more info regarding Vela and Ethos-U, checkout [Ethos-U landing page](https://review.mlplatform.org/plugins/gitiles/ml/ethos-u/ethos-u/+/refs/heads/master). + +# Some examples of compiling a binary and running a network with Ethos-U support. +In order to run a test with Ethos-U55 enabled, a platform with corresponding +hardware support is required. One such platform is the fixed virtual platform +(FVP) based on Arm Corstone-300 software. See [Corstone-300 readme](https://github.com/tensorflow/tflite-micro/tree/main/tensorflow/lite/micro/cortex_m_corstone_300/README.md) +for more info. + +On top of that the .tflite model needs to be modified according subchapter +"Ethos-U custom operator" above. + +The log level of the Ethos-U driver can be set in the build command. For +example: ETHOSU_LOG_SEVERITY=ETHOSU_LOG_INFO. + +## Example using network tester +See tensorflow/lite/micro/examples/network_tester/README.md for more info. + +``` +make -f tensorflow/lite/micro/tools/make/Makefile network_tester_test CO_PROCESSOR=ethos_u ETHOSU_ARCH=u55 TARGET=cortex_m_generic TARGET_ARCH=cortex-m55 microlite +``` + +For the Arm Corstone-300 target, ETHOSU_ARCH is defined in +cortex_m_corstone_300_makefile.inc so it doesn't need to be defined on the +command line. + +``` +make -f tensorflow/lite/micro/tools/make/Makefile network_tester_test CO_PROCESSOR=ethos_u TARGET=cortex_m_corstone_300 TARGET_ARCH=cortex-m55 test_network_tester_test NETWORK_MODEL=path/to/network_model.h INPUT_DATA=path/to/input_data.h OUTPUT_DATA=path/to/expected_output_data.h + +make -f tensorflow/lite/micro/tools/make/Makefile network_tester_test CO_PROCESSOR=ethos_u TARGET=cortex_m_corstone_300 TARGET_ARCH=cortex-m55 test_network_tester_test +``` diff --git a/tensorflow/lite/micro/kernels/ethos_u/ethosu.cc b/tensorflow/lite/micro/kernels/ethos_u/ethosu.cc new file mode 100644 index 0000000..b167b6b --- /dev/null +++ b/tensorflow/lite/micro/kernels/ethos_u/ethosu.cc @@ -0,0 +1,173 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include "flatbuffers/flexbuffers.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_context.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { +namespace { + +constexpr uint8_t CO_TYPE_ETHOSU = 1; + +struct OpData { + int cms_data_size; + int base_addr_idx; + int base_addr_size_idx; +}; + +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(OpData)); +} + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(context != nullptr); + TF_LITE_ENSURE(context, node->inputs->size > 0); + TFLITE_DCHECK(node->user_data != nullptr); + TF_LITE_ENSURE(context, node->custom_initial_data_size > 0); + + OpData* data = static_cast(node->user_data); + int num_base_addr = node->inputs->size + node->outputs->size; + + // Request arrays for the base address pointers and sizes. + TF_LITE_ENSURE_STATUS(context->RequestScratchBufferInArena( + context, num_base_addr * sizeof(uint64_t), &data->base_addr_idx)); + TF_LITE_ENSURE_STATUS(context->RequestScratchBufferInArena( + context, num_base_addr * sizeof(size_t), &data->base_addr_size_idx)); + + // Get command stream data size. + MicroContext* micro_context = GetMicroContext(context); + TfLiteTensor* tensor = micro_context->AllocateTempInputTensor(node, 0); + data->cms_data_size = tensor->bytes; + micro_context->DeallocateTempTfLiteTensor(tensor); + return kTfLiteOk; +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + TFLITE_DCHECK(context != nullptr); + TFLITE_DCHECK(context->GetScratchBuffer != nullptr); + + // Get base addresses. + TfLiteEvalTensor* tensor; + int i = 0; + int num_tensors = 0; + void* cms_data; + uint8_t co_type; + int result; + const OpData* data = static_cast(node->user_data); + uint64_t* base_addrs = static_cast( + context->GetScratchBuffer(context, data->base_addr_idx)); + size_t* base_addrs_size = static_cast( + context->GetScratchBuffer(context, data->base_addr_size_idx)); + + const uint8_t* custom_data = + static_cast(node->custom_initial_data); + auto root = flexbuffers::GetRoot(custom_data, node->custom_initial_data_size); + co_type = root.AsInt8(); + if (co_type != CO_TYPE_ETHOSU) { + MicroPrintf("CO_TYPE != ETHOSU"); + return kTfLiteError; + } + + // Get command stream data address. + tensor = context->GetEvalTensor(context, node->inputs->data[0]); + cms_data = reinterpret_cast(tensor->data.uint8); + + // Get addresses to weights/scratch/input data. + for (i = 1; i < node->inputs->size; ++i) { + tensor = context->GetEvalTensor(context, node->inputs->data[i]); + base_addrs[num_tensors] = + static_cast(reinterpret_cast(tensor->data.uint8)); + size_t byte_size = 1; + for (int k = 0; k < tensor->dims->size; k++) { + byte_size = byte_size * tensor->dims->data[k]; + } + base_addrs_size[num_tensors] = byte_size; + num_tensors++; + } + + // Get addresses to output data. + for (i = 0; i < node->outputs->size; ++i) { + tensor = context->GetEvalTensor(context, node->outputs->data[i]); + base_addrs[num_tensors] = + static_cast(reinterpret_cast(tensor->data.uint8)); + size_t byte_size = 1; + for (int k = 0; k < tensor->dims->size; k++) { + byte_size = byte_size * tensor->dims->data[k]; + } + base_addrs_size[num_tensors] = byte_size; + num_tensors++; + } + + // When Vela optimizes a tflite file it will assign the tensors like this: + // + // +-------+------------------------+ +--------+-------------+ + // | INPUT | Description | | OUTPUT | Description | + // +-------+------------------------+ +--------+-------------+ + // | 0 | Ethos-U command stream | | 0..m | Outputs | + // | 1 | TFLM model | +--------+-------------+ + // | 2 | TFLM arena | + // | 3 | Ethos-U fast scratch | + // | 4..n | Inputs | + // +-------+------------------------+ + // + // This code will assign the NPU base addresses like this: + // + // +--------------+----------------------+ + // | Base address | Description | + // +--------------+----------------------+ + // | 0 | TFLM model | + // | 1 | TFLM arena | + // | 2 | Ethos-U fast scratch | + // | 3..n | Input tensors | + // | n..m | Output tensors | + // +--------------+----------------------+ + // + // The number of base address will be limited to 8. + // + // NOTE! The command stream produced by Vela will access the IFM and OFM + // buffers using base address 1. This means that it is not possible to point + // the input and output tensors outside of the TFLM arena. + num_tensors = std::min(num_tensors, 8); + + struct ethosu_driver* drv = ethosu_reserve_driver(); + result = ethosu_invoke_v3(drv, cms_data, data->cms_data_size, base_addrs, + base_addrs_size, num_tensors, + GetMicroContext(context)->external_context()); + ethosu_release_driver(drv); + + if (-1 == result) { + return kTfLiteError; + } else { + return kTfLiteOk; + } +} + +} // namespace + +TFLMRegistration* Register_ETHOSU() { + static TFLMRegistration r = tflite::micro::RegisterOp(Init, Prepare, Eval); + return &r; +} + +const char* GetString_ETHOSU() { return "ethos-u"; } + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/ethosu.cc b/tensorflow/lite/micro/kernels/ethosu.cc new file mode 100644 index 0000000..5620359 --- /dev/null +++ b/tensorflow/lite/micro/kernels/ethosu.cc @@ -0,0 +1,27 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +// +// This is a stub file for non-Ethos platforms +// +#include "tensorflow/lite/micro/micro_common.h" + +namespace tflite { + +TFLMRegistration* Register_ETHOSU() { return nullptr; } + +const char* GetString_ETHOSU() { return ""; } + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/ethosu.h b/tensorflow/lite/micro/kernels/ethosu.h new file mode 100644 index 0000000..5b86303 --- /dev/null +++ b/tensorflow/lite/micro/kernels/ethosu.h @@ -0,0 +1,28 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_ETHOSU_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_ETHOSU_H_ + +#include "tensorflow/lite/c/common.h" + +namespace tflite { + +TFLMRegistration* Register_ETHOSU(); + +const char* GetString_ETHOSU(); + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_ETHOSU_H_ diff --git a/tensorflow/lite/micro/kernels/exp.cc b/tensorflow/lite/micro/kernels/exp.cc new file mode 100644 index 0000000..1a2e00c --- /dev/null +++ b/tensorflow/lite/micro/kernels/exp.cc @@ -0,0 +1,79 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/reference/exp.h" + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { +namespace { + +constexpr int kInputTensor = 0; +constexpr int kOutputTensor = 0; + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + MicroContext* micro_context = GetMicroContext(context); + + TF_LITE_ENSURE_EQ(context, NumInputs(node), 1); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + TF_LITE_ENSURE_TYPES_EQ(context, input->type, kTfLiteFloat32); + TF_LITE_ENSURE_TYPES_EQ(context, output->type, input->type); + TF_LITE_ENSURE_EQ(context, output->bytes, input->bytes); + TF_LITE_ENSURE_EQ(context, output->dims->size, input->dims->size); + for (int i = 0; i < output->dims->size; ++i) { + TF_LITE_ENSURE_EQ(context, output->dims->data[i], input->dims->data[i]); + } + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(output); + + return kTfLiteOk; +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + int flat_size = MatchingFlatSize(tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorShape(output)); + + if (input->type == kTfLiteFloat32) { + reference_ops::Exp(tflite::micro::GetTensorData(input), + static_cast(flat_size), + tflite::micro::GetTensorData(output)); + } else { + MicroPrintf("Type %s (%d) currently not supported by Exp.", + TfLiteTypeGetName(input->type), input->type); + return kTfLiteError; + } + return kTfLiteOk; +} +} // namespace + +TFLMRegistration Register_EXP() { + return tflite::micro::RegisterOp(nullptr, Prepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/exp_test.cc b/tensorflow/lite/micro/kernels/exp_test.cc new file mode 100644 index 0000000..ff18106 --- /dev/null +++ b/tensorflow/lite/micro/kernels/exp_test.cc @@ -0,0 +1,76 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +void TestExp(int* input_dims_data, const float* input_data, + const float* expected_output_data, float* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(input_dims_data); + const int output_dims_count = ElementCount(*output_dims); + constexpr int inputs_size = 1; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(input_data, input_dims), + CreateTensor(output_data, output_dims), + }; + + int inputs_array_data[] = {1, 0}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 1}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = Register_EXP(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, + /*builtin_data=*/nullptr); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + for (int i = 0; i < output_dims_count; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(expected_output_data[i], output_data[i], 1e-5f); + } +} +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(SingleDim) { + constexpr int kInputSize = 7; + float output_data[kInputSize]; + int input_dims[] = {2, 1, kInputSize}; + const float input_values[kInputSize] = {0.0f, 1.0f, -1.0f, 100.0f, + -100.0f, 0.01f, -0.01f}; + float golden[kInputSize]; + for (int i = 0; i < kInputSize; ++i) { + golden[i] = std::exp(input_values[i]); + } + + tflite::testing::TestExp(input_dims, input_values, golden, output_data); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/expand_dims.cc b/tensorflow/lite/micro/kernels/expand_dims.cc new file mode 100644 index 0000000..0c4c6ff --- /dev/null +++ b/tensorflow/lite/micro/kernels/expand_dims.cc @@ -0,0 +1,149 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_utils.h" + +namespace tflite { +namespace { + +constexpr int kInputTensor = 0; +constexpr int kAxisTensor = 1; +constexpr int kOutputTensor = 0; + +TfLiteStatus GetAxisValueFromTensor(TfLiteContext* context, + const TfLiteTensor* axis, + int32_t* axis_value) { + const int axis_dims = (tflite::GetTensorShape(axis)).DimensionsCount(); + if (axis_dims > 1) { + MicroPrintf("Axis has only one element for Expand_Dims.", axis_dims); + return kTfLiteError; + } + + if (kTfLiteInt32 == (axis->type)) { + const int32_t* axis_ptr = tflite::GetTensorData(axis); + *axis_value = axis_ptr[0]; + return kTfLiteOk; + } else { + MicroPrintf("Axis type %s (%d) not supported by Expand_Dims.", + TfLiteTypeGetName(axis->type), axis->type); + return kTfLiteError; + } +} + +// Verifies that the output tensor's dimension shape is equivalent to inserting +// a dimension of length 1 at the dimension index axis of input's shape as +// defined in https://www.tensorflow.org/api_docs/python/tf/expand_dims. +TfLiteStatus VerifyTensorDim(TfLiteContext* context, const TfLiteTensor* input, + const TfLiteTensor* axis_tensor, + const TfLiteTensor* output) { + int32_t axis_value = 0; + TF_LITE_ENSURE_OK(context, + GetAxisValueFromTensor(context, axis_tensor, &axis_value)); + + tflite::RuntimeShape input_shape = tflite::GetTensorShape(input); + if (axis_value < 0) { + axis_value = input_shape.DimensionsCount() + 1 + axis_value; + } + TF_LITE_ENSURE(context, axis_value <= input_shape.DimensionsCount()); + + // TFLM only supports fixed dimension tensor and assumes that the output shape + // is fully specified in the model. As such, TFLM directly use the pointer to + // the dimension array in the model buffer. + tflite::RuntimeShape output_shape = tflite::GetTensorShape(output); + + TF_LITE_ENSURE(context, output_shape.DimensionsCount() == + input_shape.DimensionsCount() + 1); + for (int i = 0; i < output_shape.DimensionsCount(); ++i) { + if (i < axis_value) { + TF_LITE_ENSURE(context, output_shape.Dims(i) == input_shape.Dims(i)); + } else if (i == axis_value) { + TF_LITE_ENSURE(context, output_shape.Dims(i) == 1); + } else { + TF_LITE_ENSURE(context, output_shape.Dims(i) == input_shape.Dims(i - 1)); + } + } + return kTfLiteOk; +} + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + MicroContext* micro_context = GetMicroContext(context); + + TF_LITE_ENSURE_EQ(context, NumInputs(node), 2); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* axis = + micro_context->AllocateTempInputTensor(node, kAxisTensor); + TF_LITE_ENSURE(context, axis != nullptr); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + output->type = input->type; + if (IsDynamicTensor(axis)) { + MicroPrintf("DynamicTensor is not yet supported by Expand_Dims."); + return kTfLiteError; + } + TF_LITE_ENSURE_OK(context, VerifyTensorDim(context, input, axis, output)); + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(axis); + micro_context->DeallocateTempTfLiteTensor(output); + return kTfLiteOk; +} + +template +void memCopyN(T* out, const T* in, const int num_elements) { + for (int i = 0; i < num_elements; ++i) { + out[i] = in[i]; + } +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + const int flat_size = ElementCount(*input->dims); + + switch (input->type) { + case kTfLiteFloat32: { + memCopyN(tflite::micro::GetTensorData(output), + tflite::micro::GetTensorData(input), flat_size); + } break; + case kTfLiteInt8: { + memCopyN(tflite::micro::GetTensorData(output), + tflite::micro::GetTensorData(input), flat_size); + } break; + default: + MicroPrintf( + "Expand_Dims only currently supports int8 and float32, got %d.", + input->type); + return kTfLiteError; + } + return kTfLiteOk; +} +} // namespace + +TFLMRegistration Register_EXPAND_DIMS() { + return tflite::micro::RegisterOp(nullptr, Prepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/expand_dims_test.cc b/tensorflow/lite/micro/kernels/expand_dims_test.cc new file mode 100644 index 0000000..d8e217e --- /dev/null +++ b/tensorflow/lite/micro/kernels/expand_dims_test.cc @@ -0,0 +1,235 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +// The tensor layout is fixed. +constexpr int kInputsTensorSize = 2; +constexpr int kOutputsTensorSize = 1; +constexpr int kTensorsSize = kInputsTensorSize + kOutputsTensorSize; + +constexpr int kDimsTensorIndex = 0; +constexpr int kAxisTensorIndex = 1; +constexpr int kOutputTensorIndex = 2; +constexpr int kInputTensors[] = {2, kDimsTensorIndex, kAxisTensorIndex}; +constexpr int kOutputTensors[] = {1, kOutputTensorIndex}; + +template +micro::KernelRunner CreateExpandDimsKernelRunner( + int* input_dims, const T* input_data, int* axis_dims, + const int32_t* axis_data, int* output_dims, T* output_data) { + // Some targets do not support dynamic memory (i.e., no malloc or new), thus, + // the test need to place non-transitent memories in static variables. This is + // safe because tests are guaranteed to run serially. + // Both below structures are trivially destructible. + static TFLMRegistration registration; + static TfLiteTensor tensors[kTensorsSize]; + + TfLiteIntArray* in_dims = IntArrayFromInts(input_dims); + TfLiteIntArray* ax_dims = IntArrayFromInts(axis_dims); + TfLiteIntArray* out_dims = IntArrayFromInts(output_dims); + + const int out_dims_size = out_dims->size; + const int in_dims_size = in_dims->size; + TF_LITE_MICRO_EXPECT_EQ(out_dims_size, (in_dims_size + 1)); + + tensors[kDimsTensorIndex] = CreateTensor(input_data, in_dims); + tensors[kAxisTensorIndex] = CreateTensor(axis_data, ax_dims); + tensors[kOutputTensorIndex] = CreateTensor(output_data, out_dims, true); + + TfLiteIntArray* inputs_array = + IntArrayFromInts(const_cast(kInputTensors)); + + TfLiteIntArray* outputs_array = + IntArrayFromInts(const_cast(kOutputTensors)); + + registration = Register_EXPAND_DIMS(); + micro::KernelRunner runner(registration, tensors, kTensorsSize, inputs_array, + outputs_array, + /*builtin_data=*/nullptr); + return runner; +} + +template +void TestExpandDims(int* input_dims, const T* input_data, int* axis_dims, + const int32_t* axis_data, int* expected_output_dims, + int* output_dims, const T* expected_output_data, + T* output_data) { + micro::KernelRunner runner = CreateExpandDimsKernelRunner( + input_dims, input_data, axis_dims, axis_data, output_dims, output_data); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + // The output tensor's data have been updated by the kernel. + TfLiteIntArray* actual_out_dims = IntArrayFromInts(output_dims); + const int output_size = ElementCount(*actual_out_dims); + + for (int i = 0; i < output_size; ++i) { + TF_LITE_MICRO_EXPECT_EQ(expected_output_data[i], output_data[i]); + } +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(ExpandDimsPositiveAxisTest0) { + int8_t output_data[4]; + int input_dims[] = {2, 2, 2}; + const int8_t input_data[] = {-1, 1, -2, 2}; + const int8_t golden_data[] = {-1, 1, -2, 2}; + int axis_dims[] = {1, 1}; + const int32_t axis_data[] = {0}; + int golden_dims[] = {1, 2, 2}; + int output_dims[] = {3, 1, 2, 2}; + tflite::testing::TestExpandDims(input_dims, input_data, axis_dims, + axis_data, golden_dims, output_dims, + golden_data, output_data); +} + +TF_LITE_MICRO_TEST(ExpandDimsPositiveAxisTest1) { + float output_data[4]; + int input_dims[] = {2, 2, 2}; + const float input_data[] = {-1.1, 1.2, -2.1, 2.2}; + const float golden_data[] = {-1.1, 1.2, -2.1, 2.2}; + int axis_dims[] = {1, 1}; + const int32_t axis_data[] = {1}; + int golden_dims[] = {2, 1, 2}; + int output_dims[] = {3, 2, 1, 2}; + tflite::testing::TestExpandDims(input_dims, input_data, axis_dims, + axis_data, golden_dims, output_dims, + golden_data, output_data); +} + +TF_LITE_MICRO_TEST(ExpandDimsPositiveAxisTest2) { + int8_t output_data[4]; + int input_dims[] = {2, 2, 2}; + const int8_t input_data[] = {-1, 1, -2, 2}; + const int8_t golden_data[] = {-1, 1, -2, 2}; + int axis_dims[] = {1, 1}; + const int32_t axis_data[] = {2}; + int golden_dims[] = {2, 2, 1}; + int output_dims[] = {3, 2, 2, 1}; + tflite::testing::TestExpandDims(input_dims, input_data, axis_dims, + axis_data, golden_dims, output_dims, + golden_data, output_data); +} + +TF_LITE_MICRO_TEST(ExpandDimsNegativeAxisTest4) { + int8_t output_data[6]; + int input_dims[] = {3, 3, 1, 2}; + const int8_t input_data[] = {-1, 1, 2, -2, 0, 3}; + const int8_t golden_data[] = {-1, 1, 2, -2, 0, 3}; + int axis_dims[] = {1, 1}; + const int32_t axis_data[] = {-4}; + int golden_dims[] = {1, 3, 1, 2}; + int output_dims[] = {4, 1, 3, 1, 2}; + tflite::testing::TestExpandDims(input_dims, input_data, axis_dims, + axis_data, golden_dims, output_dims, + golden_data, output_data); +} + +TF_LITE_MICRO_TEST(ExpandDimsNegativeAxisTest3) { + float output_data[6]; + int input_dims[] = {3, 3, 1, 2}; + const float input_data[] = {0.1, -0.8, -1.2, -0.5, 0.9, 1.3}; + const float golden_data[] = {0.1, -0.8, -1.2, -0.5, 0.9, 1.3}; + int axis_dims[] = {1, 1}; + const int32_t axis_data[] = {-3}; + int golden_dims[] = {3, 1, 1, 2}; + int output_dims[] = {4, 3, 1, 1, 2}; + tflite::testing::TestExpandDims(input_dims, input_data, axis_dims, + axis_data, golden_dims, output_dims, + golden_data, output_data); +} + +TF_LITE_MICRO_TEST(ExpandDimsNegativeAxisTest2) { + int8_t output_data[6]; + int input_dims[] = {3, 1, 2, 3}; + const int8_t input_data[] = {-1, 1, 2, -2, 0, 3}; + const int8_t golden_data[] = {-1, 1, 2, -2, 0, 3}; + int axis_dims[] = {1, 1}; + const int32_t axis_data[] = {-2}; + int golden_dims[] = {1, 2, 1, 3}; + int output_dims[] = {4, 1, 2, 1, 3}; + tflite::testing::TestExpandDims(input_dims, input_data, axis_dims, + axis_data, golden_dims, output_dims, + golden_data, output_data); +} + +TF_LITE_MICRO_TEST(ExpandDimsNegativeAxisTest1) { + float output_data[6]; + int input_dims[] = {3, 1, 3, 2}; + const float input_data[] = {0.1, -0.8, -1.2, -0.5, 0.9, 1.3}; + const float golden_data[] = {0.1, -0.8, -1.2, -0.5, 0.9, 1.3}; + int axis_dims[] = {1, 1}; + const int32_t axis_data[] = {-1}; + int golden_dims[] = {1, 3, 2, 1}; + int output_dims[] = {4, 1, 3, 2, 1}; + tflite::testing::TestExpandDims(input_dims, input_data, axis_dims, + axis_data, golden_dims, output_dims, + golden_data, output_data); +} + +TF_LITE_MICRO_TEST(ExpandDimsInputOutputDimsMismatchShallFail) { + float output_data[6]; + int input_dims[] = {3, 1, 3, 2}; + const float input_data[] = {0.1, -0.8, -1.2, -0.5, 0.9, 1.3}; + int axis_dims[] = {1, 1}; + const int32_t axis_data[] = {-1}; + // When input dimension is [1, 3, 2] and the axis is -1, the output dimension + // should be [1, 3, 2, 1] as in the test case ExpandDimsNegativeAxisTest1. + // Shuffle the output dimension to make it incorrect so that the EXPAND_DIMS + // op would fail at prepare. + int output_dims[] = {4, 1, 3, 1, 2}; + + tflite::micro::KernelRunner runner = + tflite::testing::CreateExpandDimsKernelRunner(input_dims, input_data, + axis_dims, axis_data, + output_dims, output_data); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteError, runner.InitAndPrepare()); +} + +TF_LITE_MICRO_TEST(ExpandDimsAxisOutOfRangeShallFail) { + int8_t output_data[6]; + int input_dims[] = {3, 1, 3, 2}; + const int8_t input_data[] = {1, 8, 2, 5, 9, 3}; + int axis_dims[] = {1, 1}; + // The input dimension is 3-D, so that axis value should not exceed 3. + // The below axis value 4 shall lead to failure at prepare. + const int32_t axis_data[] = {4}; + int output_dims[] = {4, 1, 3, 2, 1}; + + tflite::micro::KernelRunner runner = + tflite::testing::CreateExpandDimsKernelRunner(input_dims, input_data, + axis_dims, axis_data, + output_dims, output_data); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteError, runner.InitAndPrepare()); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/fill.cc b/tensorflow/lite/micro/kernels/fill.cc new file mode 100644 index 0000000..b1b366e --- /dev/null +++ b/tensorflow/lite/micro/kernels/fill.cc @@ -0,0 +1,140 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/reference/fill.h" + +#include + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { + +namespace { + +template +TfLiteStatus EnsureEqImpl(TfLiteContext* context, const TfLiteIntArray* array, + const TfLiteTensor* tensor) { + for (int i = 0; i < array->size; ++i) { + TF_LITE_ENSURE_EQ(context, array->data[i], GetTensorData(tensor)[i]); + } + return kTfLiteOk; +} + +// Ensure the equality of an int array and a tensor, which must be +// one-dimensional and of an integer type. +TfLiteStatus EnsureEq(TfLiteContext* context, const TfLiteIntArray* array, + const TfLiteTensor* tensor) { + TF_LITE_ENSURE_EQ(context, NumDimensions(tensor), 1); + const auto tensor_len = tensor->dims->data[0]; + TF_LITE_ENSURE_EQ(context, array->size, tensor_len); + + switch (tensor->type) { + case kTfLiteInt8: + return EnsureEqImpl(context, array, tensor); + case kTfLiteInt16: + return EnsureEqImpl(context, array, tensor); + case kTfLiteInt32: + return EnsureEqImpl(context, array, tensor); + case kTfLiteInt64: + return EnsureEqImpl(context, array, tensor); + default: + MicroPrintf("cannot compare int array to tensor of type %d.", + tensor->type); + return kTfLiteError; + } +} + +constexpr int kDimsTensor = 0; +constexpr int kValueTensor = 1; +constexpr int kOutputTensor = 0; + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + MicroContext* micro_context = GetMicroContext(context); + + // Ensure inputs and outputs exist. + TfLiteTensor* dims = + micro_context->AllocateTempInputTensor(node, kDimsTensor); + TF_LITE_ENSURE(context, dims != nullptr); + TfLiteTensor* value = + micro_context->AllocateTempInputTensor(node, kValueTensor); + TF_LITE_ENSURE(context, value != nullptr); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + + // The value tensor must be a scalar. + TF_LITE_ENSURE_EQ(context, NumDimensions(value), 0); + + // The value type and output type must match. + TF_LITE_ENSURE_EQ(context, value->type, output->type); + + // The dimension of the output tensor is known in model already. + TFLITE_DCHECK(output->dims != nullptr); + + if (dims->data.data != nullptr) { + // When the dims tensor is specified in model already (i.e. is not an + // activation tensor), the dims tensor must match the output tensor shape. + // As a byproduct, ensures the dims tensor is of an integer type. + TF_LITE_ENSURE_OK(context, EnsureEq(context, output->dims, dims)); + } + + micro_context->DeallocateTempTfLiteTensor(dims); + micro_context->DeallocateTempTfLiteTensor(value); + micro_context->DeallocateTempTfLiteTensor(output); + return kTfLiteOk; +} + +template +void FillImpl(const TfLiteEvalTensor* value, TfLiteEvalTensor* output) { + reference_ops::Fill( + micro::GetTensorShape(value), micro::GetTensorData(value), + micro::GetTensorShape(output), micro::GetTensorData(output)); +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* value = + micro::GetEvalInput(context, node, kValueTensor); + TfLiteEvalTensor* output = micro::GetEvalOutput(context, node, kOutputTensor); + + switch (value->type) { + case kTfLiteFloat32: + FillImpl(value, output); + break; + case kTfLiteInt32: + FillImpl(value, output); + break; + case kTfLiteInt8: + FillImpl(value, output); + break; + default: + MicroPrintf("Fill only currently supports float32 for input 1, got %d.", + TfLiteTypeGetName(value->type)); + return kTfLiteError; + } + + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_FILL() { + return tflite::micro::RegisterOp(nullptr, Prepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/fill_test.cc b/tensorflow/lite/micro/kernels/fill_test.cc new file mode 100644 index 0000000..7035540 --- /dev/null +++ b/tensorflow/lite/micro/kernels/fill_test.cc @@ -0,0 +1,236 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/micro_utils.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace { +using ::tflite::testing::CreateTensor; +using ::tflite::testing::IntArrayFromInts; + +// The layout of tensors is fixed. +constexpr int kDimsIndex = 0; +constexpr int kValueIndex = 1; +constexpr int kOutputIndex = 2; +constexpr int kInputsTensor[] = {2, kDimsIndex, kValueIndex}; +constexpr int kOutputsTensor[] = {1, kOutputIndex}; + +// This function is NOT thread safe. +template +tflite::micro::KernelRunner CreateFillTestRunner( + int* dims_shape, DimsType* dims_data, int* value_shape, + ValueType* value_data, int* output_shape, OutputType* output_data) { + // Some targets do not support dynamic memory (i.e., no malloc or new), thus, + // the test need to place non-transitent memories in static variables. This is + // safe because tests are guaranteed to run serially. + // Both below structures are trivially destructible. + static TFLMRegistration registration; + static TfLiteTensor tensors[3]; + + tensors[0] = CreateTensor(dims_data, IntArrayFromInts(dims_shape)); + tensors[1] = CreateTensor(value_data, IntArrayFromInts(value_shape)); + tensors[2] = CreateTensor(output_data, IntArrayFromInts(output_shape)); + + // The output type matches the value type. + TF_LITE_MICRO_EXPECT_EQ(tensors[kOutputIndex].type, + tensors[kValueIndex].type); + + registration = tflite::Register_FILL(); + tflite::micro::KernelRunner runner = tflite::micro::KernelRunner( + registration, tensors, sizeof(tensors) / sizeof(TfLiteTensor), + IntArrayFromInts(const_cast(kInputsTensor)), + IntArrayFromInts(const_cast(kOutputsTensor)), + /*builtin_data=*/nullptr); + return runner; +} + +template +void TestFill(int* dims_shape, DimsType* dims_data, int* value_shape, + ValueType* value_data, int* output_shape, + OutputType* output_data) { + tflite::micro::KernelRunner runner = + CreateFillTestRunner(dims_shape, dims_data, value_shape, value_data, + output_shape, output_data); + + TF_LITE_MICRO_EXPECT_EQ(runner.InitAndPrepare(), kTfLiteOk); + TF_LITE_MICRO_EXPECT_EQ(runner.Invoke(), kTfLiteOk); + + // The output shape must match the shape requested via dims. + const auto output_rank = output_shape[0]; + if (dims_data != nullptr) { + const auto requested_rank = dims_shape[1]; // yes, 1 + if (output_rank == requested_rank) { + for (int i = 0; i < requested_rank; ++i) { + TF_LITE_MICRO_EXPECT_EQ(output_shape[i + 1], dims_data[i]); + } + } else { + TF_LITE_MICRO_FAIL( + "output shape does not match shape requested via dims"); + } + } + + // The output elements contain the fill value. + const auto elements = tflite::ElementCount(*IntArrayFromInts(output_shape)); + for (int i = 0; i < elements; ++i) { + TF_LITE_MICRO_EXPECT_EQ(output_data[i], value_data[0]); + } +} + +} // namespace + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(FillFloatInt64Dims) { + constexpr int kDim1 = 2; + constexpr int kDim2 = 2; + constexpr int kDim3 = 2; + + int dims_shape[] = {1, 3}; + int64_t dims_data[] = {kDim1, kDim2, kDim3}; + + int value_shape[] = {0}; + float value_data[] = {4.0}; + + int output_shape[] = {3, kDim1, kDim2, kDim3}; + float output_data[kDim1 * kDim2 * kDim3]; + + TestFill(dims_shape, dims_data, value_shape, value_data, output_shape, + output_data); +} + +// Fill a 2x2x2 tensor with a int32 scalar value. The dimension of the tensor is +// of int64 type. +TF_LITE_MICRO_TEST(FillInt32Int64Dims) { + constexpr int kDim1 = 2; + constexpr int kDim2 = 2; + constexpr int kDim3 = 2; + + int dims_shape[] = {1, 3}; + int64_t dims_data[] = {kDim1, kDim2, kDim3}; + + int value_shape[] = {0}; + int32_t value_data[] = {4}; + + int output_shape[] = {3, kDim1, kDim2, kDim3}; + int32_t output_data[kDim1 * kDim2 * kDim3]; + + TestFill(dims_shape, dims_data, value_shape, value_data, output_shape, + output_data); +} + +// Fill a 2x2x2 tensor with a int8 scalar value. The dimension of the tensor is +// of int32 type. +TF_LITE_MICRO_TEST(FillInt8Int32Dims) { + constexpr int kDim1 = 2; + constexpr int kDim2 = 2; + constexpr int kDim3 = 2; + + int dims_shape[] = {1, 3}; + int32_t dims_data[] = {kDim1, kDim2, kDim3}; + + int value_shape[] = {0}; + int8_t value_data[] = {4}; + + int output_shape[] = {3, kDim1, kDim2, kDim3}; + int8_t output_data[kDim1 * kDim2 * kDim3]; + + TestFill(dims_shape, dims_data, value_shape, value_data, output_shape, + output_data); +} + +// Verify the FILL still works when the input dims tensor is an activation +// tensor (i.e. has not prepopulated value). Fill a 2x2x2 tensor with a int8 +// scalar value. +TF_LITE_MICRO_TEST(FillInt8NoInputDimsData) { + constexpr int kDim1 = 2; + constexpr int kDim2 = 2; + constexpr int kDim3 = 2; + + // The dims tensor with unknown data. Note that shape is always known. + int dims_shape[] = {1, 3}; + int32_t* dims_data = nullptr; + + int value_shape[] = {0}; + int8_t value_data[] = {4}; + + int output_shape[] = {3, kDim1, kDim2, kDim3}; + int8_t output_data[kDim1 * kDim2 * kDim3]; + + TestFill(dims_shape, dims_data, value_shape, value_data, output_shape, + output_data); +} + +TF_LITE_MICRO_TEST(FillFloatInt32Dims) { + constexpr int kDim1 = 2; + constexpr int kDim2 = 2; + constexpr int kDim3 = 2; + + int dims_shape[] = {1, 3}; + int32_t dims_data[] = {kDim1, kDim2, kDim3}; + + int value_shape[] = {0}; + float value_data[] = {4.0}; + + int output_shape[] = {3, kDim1, kDim2, kDim3}; + float output_data[kDim1 * kDim2 * kDim3]; + + TestFill(dims_shape, dims_data, value_shape, value_data, output_shape, + output_data); +} + +TF_LITE_MICRO_TEST(FillScalar) { + int dims_shape[] = {1, 0}; + int64_t dims_data[] = {0}; + + int value_shape[] = {0}; + float value_data[] = {4.0}; + + int output_shape[] = {0}; + float output_data[] = {0}; + + TestFill(dims_shape, dims_data, value_shape, value_data, output_shape, + output_data); +} + +// When input dimension tensor mismatch with the output tensor's dimension, +// the FILL op shall return error at init/prepare stage. +TF_LITE_MICRO_TEST(FillInputDimsMismatchWithOutputShallFail) { + constexpr int kDim1 = 2; + constexpr int kDim2 = 2; + constexpr int kDim3 = 2; + + int dims_shape[] = {1, 3}; + int64_t dims_data[] = {kDim1, kDim2, kDim3}; + + int value_shape[] = {0}; + int8_t value_data[] = {4}; + + // Output shape is supposed to be the same as dims_data. + // Intentionally +1 to kDim1 to verify the code catches this error. + int output_shape[] = {3, kDim1 + 1, kDim2, kDim3}; + int8_t output_data[(kDim1 + 1) * kDim2 * kDim3]; + + tflite::micro::KernelRunner runner = + CreateFillTestRunner(dims_shape, dims_data, value_shape, value_data, + output_shape, output_data); + + TF_LITE_MICRO_EXPECT_EQ(runner.InitAndPrepare(), kTfLiteError); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/floor.cc b/tensorflow/lite/micro/kernels/floor.cc new file mode 100644 index 0000000..094c8b5 --- /dev/null +++ b/tensorflow/lite/micro/kernels/floor.cc @@ -0,0 +1,48 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/reference/floor.h" + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" + +namespace tflite { + +namespace { + +constexpr int kInputTensor = 0; +constexpr int kOutputTensor = 0; + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + TF_LITE_ENSURE_TYPES_EQ(context, input->type, kTfLiteFloat32); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + reference_ops::Floor(tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_FLOOR() { + return tflite::micro::RegisterOp(nullptr, nullptr, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/floor_div.cc b/tensorflow/lite/micro/kernels/floor_div.cc new file mode 100644 index 0000000..5c00808 --- /dev/null +++ b/tensorflow/lite/micro/kernels/floor_div.cc @@ -0,0 +1,130 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/reference/floor_div.h" + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/reference/binary_function.h" +#include "tensorflow/lite/kernels/internal/types.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_utils.h" + +namespace tflite { +namespace { + +// Input/output tensor index. +constexpr int kInputTensor1 = 0; +constexpr int kInputTensor2 = 1; +constexpr int kOutputTensor = 0; + +TfLiteStatus CalculateOpData(TfLiteContext* context, TfLiteNode* node) { + MicroContext* micro_context = GetMicroContext(context); + + TF_LITE_ENSURE_EQ(context, NumInputs(node), 2); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + + TfLiteTensor* input1 = + micro_context->AllocateTempInputTensor(node, kInputTensor1); + TF_LITE_ENSURE(context, input1 != nullptr); + TfLiteTensor* input2 = + micro_context->AllocateTempInputTensor(node, kInputTensor2); + TF_LITE_ENSURE(context, input2 != nullptr); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + + TF_LITE_ENSURE_TYPES_EQ(context, input1->type, input2->type); + TF_LITE_ENSURE_TYPES_EQ(context, input1->type, output->type); + + micro_context->DeallocateTempTfLiteTensor(input1); + micro_context->DeallocateTempTfLiteTensor(input2); + micro_context->DeallocateTempTfLiteTensor(output); + + return kTfLiteOk; +} + +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + return nullptr; +} + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + return CalculateOpData(context, node); +} + +template +TfLiteStatus EvalFloorDiv(TfLiteContext* context, + const TfLiteEvalTensor* input1, + const TfLiteEvalTensor* input2, + TfLiteEvalTensor* output) { + const T* denominator_data = tflite::micro::GetTensorData(input2); + + // Validate the denominator. + for (int i = 0; i < tflite::ElementCount(*input2->dims); ++i) { + if (std::equal_to()(denominator_data[i], 0)) { + MicroPrintf("Division by 0"); + return kTfLiteError; + } + } + + bool requires_broadcast = !tflite::micro::HaveSameShapes(input1, input2); + + if (requires_broadcast) { + reference_ops::BroadcastBinaryFunction4DSlow( + tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), denominator_data, + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output), reference_ops::FloorDiv); + } else { + reference_ops::BinaryFunction( + tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), denominator_data, + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output), reference_ops::FloorDiv); + } + + return kTfLiteOk; +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input1 = + tflite::micro::GetEvalInput(context, node, kInputTensor1); + const TfLiteEvalTensor* input2 = + tflite::micro::GetEvalInput(context, node, kInputTensor2); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + + switch (input1->type) { + case kTfLiteFloat32: { + return EvalFloorDiv(context, input1, input2, output); + } + default: { + MicroPrintf("Type '%s' is not supported by FLOOR_DIV.", + TfLiteTypeGetName(input1->type)); + return kTfLiteError; + } + } +} + +} // namespace + +TFLMRegistration Register_FLOOR_DIV() { + return tflite::micro::RegisterOp(Init, Prepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/floor_div_test.cc b/tensorflow/lite/micro/kernels/floor_div_test.cc new file mode 100644 index 0000000..531ee63 --- /dev/null +++ b/tensorflow/lite/micro/kernels/floor_div_test.cc @@ -0,0 +1,108 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +void ExecuteFloorDivTest(TfLiteTensor* tensors, int tensors_count) { + int kInputArrayData[] = {2, 0, 1}; + TfLiteIntArray* inputs_array = IntArrayFromInts(kInputArrayData); + int kOutputArrayData[] = {1, 2}; + TfLiteIntArray* outputs_array = IntArrayFromInts(kOutputArrayData); + + const TFLMRegistration registration = tflite::Register_FLOOR_DIV(); + micro::KernelRunner runner(registration, tensors, tensors_count, inputs_array, + outputs_array, nullptr); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); +} + +template +void TestFloorDiv(int* input1_dims_data, const T* input1_data, + int* input2_dims_data, const T* input2_data, + int* expected_dims, const T* expected_data, T* output_data) { + TfLiteIntArray* input1_dims = IntArrayFromInts(input1_dims_data); + TfLiteIntArray* input2_dims = IntArrayFromInts(input2_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(expected_dims); + const int output_count = ElementCount(*output_dims); + + TfLiteTensor tensors[] = { + CreateTensor(input1_data, input1_dims), + CreateTensor(input2_data, input2_dims), + CreateTensor(output_data, output_dims), + }; + constexpr int tensors_count = std::extent::value; + + ExecuteFloorDivTest(tensors, tensors_count); + + for (int i = 0; i < output_count; i++) { + TF_LITE_MICRO_EXPECT_EQ(expected_data[i], output_data[i]); + } +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(FloorDivTestSimpleFloat) { + int kDims[] = {4, 1, 2, 2, 1}; + constexpr float kInput1[] = {10.05, 9.09, 11.9, 3.01}; + constexpr float kInput2[] = {2.05, 2.03, 3.03, 4.03}; + constexpr float kExpect[] = {4.0, 4.0, 3.0, 0.0}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + tflite::testing::TestFloorDiv(kDims, kInput1, kDims, kInput2, kDims, kExpect, + output_data); +} + +TF_LITE_MICRO_TEST(FloorDivTestNegativeValueFloat) { + int kDims[] = {4, 1, 2, 2, 1}; + constexpr float kInput1[] = {10.03, -9.9, -11.0, 7.0}; + constexpr float kInput2[] = {2.0, 2.3, -3.0, -4.1}; + constexpr float kExpect[] = {5.0, -5.0, 3.0, -2.0}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + tflite::testing::TestFloorDiv(kDims, kInput1, kDims, kInput2, kDims, kExpect, + output_data); +} + +TF_LITE_MICRO_TEST(FloorDivTestBroadcastFloat) { + int kDims1[] = {4, 1, 2, 2, 1}; + int kDims2[] = {1, 1}; + constexpr float kInput1[] = {10.03, -9.9, -11.0, 7.0}; + constexpr float kInput2[] = {-3.3}; + constexpr float kExpect[] = {-4.0, 2.0, 3.0, -3.0}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + tflite::testing::TestFloorDiv(kDims1, kInput1, kDims2, kInput2, kDims1, + kExpect, output_data); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/floor_mod.cc b/tensorflow/lite/micro/kernels/floor_mod.cc new file mode 100644 index 0000000..f459892 --- /dev/null +++ b/tensorflow/lite/micro/kernels/floor_mod.cc @@ -0,0 +1,128 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/reference/floor_mod.h" + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/reference/binary_function.h" +#include "tensorflow/lite/kernels/internal/reference/process_broadcast_shapes.h" +#include "tensorflow/lite/kernels/internal/types.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_utils.h" + +// OLD-TODO(b/117523611): We should factor out a binary_op and put binary ops +// there. +namespace tflite { +namespace { + +// Input/output tensor index. +constexpr int kInputTensor1 = 0; +constexpr int kInputTensor2 = 1; +constexpr int kOutputTensor = 0; + +// OLD-TODO(b/117912880): Support quantization. + +TfLiteStatus CalculateOpData(TfLiteContext* context, TfLiteNode* node) { + MicroContext* micro_context = GetMicroContext(context); + + TF_LITE_ENSURE_EQ(context, NumInputs(node), 2); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + + TfLiteTensor* input1 = + micro_context->AllocateTempInputTensor(node, kInputTensor1); + TF_LITE_ENSURE(context, input1 != nullptr); + TfLiteTensor* input2 = + micro_context->AllocateTempInputTensor(node, kInputTensor2); + TF_LITE_ENSURE(context, input2 != nullptr); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + + TF_LITE_ENSURE_TYPES_EQ(context, input1->type, input2->type); + TF_LITE_ENSURE_TYPES_EQ(context, input1->type, output->type); + + micro_context->DeallocateTempTfLiteTensor(input1); + micro_context->DeallocateTempTfLiteTensor(input2); + micro_context->DeallocateTempTfLiteTensor(output); + + return kTfLiteOk; +} + +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + return nullptr; +} + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + return CalculateOpData(context, node); +} + +template +TfLiteStatus EvalFloorMod(TfLiteContext* context, bool requires_broadcast, + const TfLiteEvalTensor* input1, + const TfLiteEvalTensor* input2, + TfLiteEvalTensor* output) { + const T* denominator_data = tflite::micro::GetTensorData(input2); + + if (requires_broadcast) { + reference_ops::BroadcastBinaryFunction4DSlow( + tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), denominator_data, + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output), reference_ops::FloorMod); + } else { + reference_ops::BinaryFunction( + tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), denominator_data, + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output), reference_ops::FloorMod); + } + + return kTfLiteOk; +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input1 = + tflite::micro::GetEvalInput(context, node, kInputTensor1); + const TfLiteEvalTensor* input2 = + tflite::micro::GetEvalInput(context, node, kInputTensor2); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + + bool requires_broadcast = !tflite::micro::HaveSameShapes(input1, input2); + + switch (input1->type) { + case kTfLiteFloat32: { + return EvalFloorMod(context, requires_broadcast, input1, input2, + output); + } + default: { + MicroPrintf("Type '%s' is not supported by FLOOR_MOD.", + TfLiteTypeGetName(input1->type)); + return kTfLiteError; + } + } +} + +} // namespace + +TFLMRegistration Register_FLOOR_MOD() { + return tflite::micro::RegisterOp(Init, Prepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/floor_mod_test.cc b/tensorflow/lite/micro/kernels/floor_mod_test.cc new file mode 100644 index 0000000..dd0d179 --- /dev/null +++ b/tensorflow/lite/micro/kernels/floor_mod_test.cc @@ -0,0 +1,108 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +void ExecuteFloorModTest(TfLiteTensor* tensors, int tensors_count) { + int kInputArrayData[] = {2, 0, 1}; + TfLiteIntArray* inputs_array = IntArrayFromInts(kInputArrayData); + int kOutputArrayData[] = {1, 2}; + TfLiteIntArray* outputs_array = IntArrayFromInts(kOutputArrayData); + + const TFLMRegistration registration = tflite::Register_FLOOR_MOD(); + micro::KernelRunner runner(registration, tensors, tensors_count, inputs_array, + outputs_array, nullptr); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); +} + +template +void TestFloorMod(int* input1_dims_data, const T* input1_data, + int* input2_dims_data, const T* input2_data, + int* expected_dims, const T* expected_data, T* output_data) { + TfLiteIntArray* input1_dims = IntArrayFromInts(input1_dims_data); + TfLiteIntArray* input2_dims = IntArrayFromInts(input2_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(expected_dims); + const int output_count = ElementCount(*output_dims); + + TfLiteTensor tensors[] = { + CreateTensor(input1_data, input1_dims), + CreateTensor(input2_data, input2_dims), + CreateTensor(output_data, output_dims), + }; + constexpr int tensors_count = std::extent::value; + + ExecuteFloorModTest(tensors, tensors_count); + + for (int i = 0; i < output_count; i++) { + TF_LITE_MICRO_EXPECT_EQ(expected_data[i], output_data[i]); + } +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(FloorModFloatSimple) { + int kDims[] = {4, 1, 2, 2, 1}; + constexpr float kInput1[] = {10, 9, 11, 3}; + constexpr float kInput2[] = {2, 2, 3, 4}; + constexpr float kExpect[] = {0, 1, 2, 3}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + tflite::testing::TestFloorMod(kDims, kInput1, kDims, kInput2, kDims, kExpect, + output_data); +} + +TF_LITE_MICRO_TEST(FloorModFloatNegativeValue) { + int kDims[] = {4, 1, 2, 2, 1}; + constexpr float kInput1[] = {10, -9, -11, 7}; + constexpr float kInput2[] = {2, 2, -3, -4}; + constexpr float kExpect[] = {0, 1, -2, -1}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + tflite::testing::TestFloorMod(kDims, kInput1, kDims, kInput2, kDims, kExpect, + output_data); +} + +TF_LITE_MICRO_TEST(FloorModFloatBroadcast) { + int kDims1[] = {4, 1, 2, 2, 1}; + int kDims2[] = {1, 1}; + constexpr float kInput1[] = {10, -9, -11, 7}; + constexpr float kInput2[] = {-3}; + constexpr float kExpect[] = {-2, 0, -2, -2}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + tflite::testing::TestFloorMod(kDims1, kInput1, kDims2, kInput2, kDims1, + kExpect, output_data); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/floor_test.cc b/tensorflow/lite/micro/kernels/floor_test.cc new file mode 100644 index 0000000..466b335 --- /dev/null +++ b/tensorflow/lite/micro/kernels/floor_test.cc @@ -0,0 +1,81 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +void TestFloor(int* input_dims_data, const float* input_data, + const float* expected_output_data, int* output_dims_data, + float* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_dims_count = ElementCount(*output_dims); + constexpr int inputs_size = 1; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(input_data, input_dims), + CreateTensor(output_data, output_dims), + }; + + int inputs_array_data[] = {1, 0}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 1}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = Register_FLOOR(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, /*builtin_data=*/nullptr); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + for (int i = 0; i < output_dims_count; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(expected_output_data[i], output_data[i], 1e-5f); + } +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(FloorOpSingleDimFloat32) { + int dims[] = {1, 2}; + const float input[] = {8.5f, 0.0f}; + const float golden[] = {8, 0}; + float output_data[2]; + tflite::testing::TestFloor(dims, input, golden, dims, output_data); +} + +TF_LITE_MICRO_TEST(FloorOpMultiDimFloat32) { + int dims[] = {4, 2, 1, 1, 5}; + const float input[] = {0.0001f, 8.0001f, 0.9999f, 9.9999f, 0.5f, + -0.0001f, -8.0001f, -0.9999f, -9.9999f, -0.5f}; + const float golden[] = {0.0f, 8.0f, 0.0f, 9.0f, 0.0f, + -1.0f, -9.0f, -1.0f, -10.0f, -1.0f}; + float output_data[10]; + tflite::testing::TestFloor(dims, input, golden, dims, output_data); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/fully_connected.cc b/tensorflow/lite/micro/kernels/fully_connected.cc new file mode 100644 index 0000000..f732b29 --- /dev/null +++ b/tensorflow/lite/micro/kernels/fully_connected.cc @@ -0,0 +1,206 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/kernels/fully_connected.h" + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/portable_tensor_utils.h" +#include "tensorflow/lite/kernels/internal/reference/fully_connected.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/fully_connected.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { +namespace { + +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, + sizeof(OpDataFullyConnected)); +} + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + MicroContext* micro_context = GetMicroContext(context); + + TFLITE_DCHECK(node->user_data != nullptr); + TFLITE_DCHECK(node->builtin_data != nullptr); + + auto* data = static_cast(node->user_data); + const auto params = + static_cast(node->builtin_data); + + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kFullyConnectedInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* filter = micro_context->AllocateTempInputTensor( + node, kFullyConnectedWeightsTensor); + TF_LITE_ENSURE(context, filter != nullptr); + TfLiteTensor* bias = + micro_context->AllocateTempInputTensor(node, kFullyConnectedBiasTensor); + TfLiteTensor* output = micro_context->AllocateTempOutputTensor( + node, kFullyConnectedOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + TF_LITE_ENSURE_TYPES_EQ(context, input->type, output->type); + + if ((input->type == kTfLiteFloat32 && filter->type != kTfLiteFloat32) || + (input->type == kTfLiteInt8 && + (filter->type != kTfLiteInt8 && filter->type != kTfLiteInt4)) || + (input->type == kTfLiteInt16 && filter->type != kTfLiteInt8)) { + MicroPrintf("Input type: %s with filter type : %s not supported.", + TfLiteTypeGetName(input->type), + TfLiteTypeGetName(filter->type)); + return kTfLiteError; + } + + if (filter->type == kTfLiteInt4) { + int filter_size = + RuntimeShape(filter->dims->size, + reinterpret_cast(filter->dims->data)) + .FlatSize(); + context->RequestScratchBufferInArena(context, filter_size, + &data->filter_buffer_index); + } + + TF_LITE_ENSURE_OK(context, CalculateOpDataFullyConnected( + context, params->activation, input->type, + input, filter, bias, output, data)); + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(filter); + if (bias != nullptr) { + micro_context->DeallocateTempTfLiteTensor(bias); + } + micro_context->DeallocateTempTfLiteTensor(output); + return kTfLiteOk; +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->builtin_data != nullptr); + const auto* params = + static_cast(node->builtin_data); + + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kFullyConnectedInputTensor); + const TfLiteEvalTensor* filter = + tflite::micro::GetEvalInput(context, node, kFullyConnectedWeightsTensor); + const TfLiteEvalTensor* bias = + tflite::micro::GetEvalInput(context, node, kFullyConnectedBiasTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kFullyConnectedOutputTensor); + + TFLITE_DCHECK(node->user_data != nullptr); + + const auto& data = + *(static_cast(node->user_data)); + + // Checks in Prepare ensure input, output and filter types are all the same. + switch (input->type) { + case kTfLiteFloat32: { + tflite::reference_ops::FullyConnected( + FullyConnectedParamsFloat(params->activation), + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), + tflite::micro::GetOptionalTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + } + + case kTfLiteInt8: { + switch (filter->type) { + case kTfLiteInt4: { + int8_t* unpacked_filter_data = static_cast( + context->GetScratchBuffer(context, data.filter_buffer_index)); + tflite::tensor_utils::UnpackDenseInt4IntoInt8( + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(filter).FlatSize(), + unpacked_filter_data); + tflite::reference_integer_ops::FullyConnected( + FullyConnectedParamsQuantized(data), + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), unpacked_filter_data, + tflite::micro::GetTensorShape(bias), + tflite::micro::GetOptionalTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + } + case kTfLiteInt8: { + tflite::reference_integer_ops::FullyConnected( + FullyConnectedParamsQuantized(data), + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), + tflite::micro::GetOptionalTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + } + default: { + MicroPrintf("Filter type %s (%d) not supported.", + TfLiteTypeGetName(filter->type), input->type); + return kTfLiteError; + } + } + break; + } + + case kTfLiteInt16: { + switch (filter->type) { + case kTfLiteInt8: { + tflite::reference_integer_ops::FullyConnected( + FullyConnectedParamsQuantized(data), + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), + tflite::micro::GetOptionalTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + } + default: { + MicroPrintf("Filter type %s (%d) not supported.", + TfLiteTypeGetName(filter->type), input->type); + return kTfLiteError; + } + } + break; + } + + default: { + MicroPrintf("Input type %s (%d) not supported.", + TfLiteTypeGetName(input->type), input->type); + return kTfLiteError; + } + } + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_FULLY_CONNECTED() { + return tflite::micro::RegisterOp(Init, Prepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/fully_connected.h b/tensorflow/lite/micro/kernels/fully_connected.h new file mode 100644 index 0000000..3fa6060 --- /dev/null +++ b/tensorflow/lite/micro/kernels/fully_connected.h @@ -0,0 +1,112 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_FULLY_CONNECTED_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_FULLY_CONNECTED_H_ + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/kernels/internal/types.h" +#include "tensorflow/lite/micro/micro_common.h" + +namespace tflite { + +struct OpDataFullyConnected { + // The scaling factor from input to output (aka the 'real multiplier') can + // be represented as a fixed point multiplier plus a left shift. + int32_t output_multiplier; + int output_shift; + // The range of the fused activation layer. For example for kNone and + // uint8_t these would be 0 and 255. + int32_t output_activation_min; + int32_t output_activation_max; + // The index of the temporary tensor where the quantized inputs are cached. + int input_quantized_index; + // Cached zero point values of tensors. + int32_t input_zero_point; + int32_t filter_zero_point; + int32_t output_zero_point; + +// TODO(b/258710417): enable by default once optimized fully-connected works for +// all targets. +#if !defined(HEXAGON) + // A buffer used to store unpacked filter values. This is used if the source + // tensor is of n-bit precision that cannot be easily processed by kernels. + int filter_buffer_index; +#endif +}; + +extern const int kFullyConnectedInputTensor; +extern const int kFullyConnectedWeightsTensor; +extern const int kFullyConnectedBiasTensor; +extern const int kFullyConnectedOutputTensor; + +// Returns a FullyConnectedParams struct with all the parameters needed for a +// float computation. +FullyConnectedParams FullyConnectedParamsFloat( + TfLiteFusedActivation activation); + +// Returns a FullyConnectedParams struct with all the parameters needed for a +// quantized computation. +FullyConnectedParams FullyConnectedParamsQuantized( + const OpDataFullyConnected& op_data); + +TfLiteStatus CalculateOpDataFullyConnected( + TfLiteContext* context, TfLiteFusedActivation activation, + TfLiteType data_type, const TfLiteTensor* input, const TfLiteTensor* filter, + const TfLiteTensor* bias, TfLiteTensor* output, OpDataFullyConnected* data); + +// This is the most generic TFLMRegistration. The actual supported types +// may still be target dependent. The only requirement is that every +// implementation (reference or optimized) must define this function. +TFLMRegistration Register_FULLY_CONNECTED(); + +#if defined(CMSIS_NN) || defined(HEXAGON) || defined(XTENSA) +// Returns a TFLMRegistration struct for kernel variant that only supports +// int8. +TFLMRegistration Register_FULLY_CONNECTED_INT8(); + +#else +// Note that while this block gets used for both reference and optimized kernels +// that do not have any specialized implementations, the only goal here is to +// define fallback implementation that allow reference kernels to still be used +// from applications that call a more specific kernel variant. + +inline TFLMRegistration Register_FULLY_CONNECTED_INT8() { + return Register_FULLY_CONNECTED(); +} + +#endif + +#if defined(CMSIS_NN) +// Returns a TFLMRegistration struct for kernel variant that only supports +// int16. +TFLMRegistration Register_FULLY_CONNECTED_INT16(); + +#else +// Note that while this block gets used for both reference and optimized kernels +// that do not have any specialized implementations, the only goal here is to +// define fallback implementation that allow reference kernels to still be used +// from applications that call a more specific kernel variant. + +inline TFLMRegistration Register_FULLY_CONNECTED_INT16() { + return Register_FULLY_CONNECTED(); +} + +#endif + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_FULLY_CONNECTED_H_ diff --git a/tensorflow/lite/micro/kernels/fully_connected_common.cc b/tensorflow/lite/micro/kernels/fully_connected_common.cc new file mode 100644 index 0000000..5a8d312 --- /dev/null +++ b/tensorflow/lite/micro/kernels/fully_connected_common.cc @@ -0,0 +1,84 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/fully_connected.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/fully_connected.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/fully_connected.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" + +namespace tflite { + +const int kFullyConnectedInputTensor = 0; +const int kFullyConnectedWeightsTensor = 1; +const int kFullyConnectedBiasTensor = 2; +const int kFullyConnectedOutputTensor = 0; + +FullyConnectedParams FullyConnectedParamsQuantized( + const OpDataFullyConnected& op_data) { + FullyConnectedParams op_params; + op_params.input_offset = -op_data.input_zero_point; + op_params.weights_offset = -op_data.filter_zero_point; + op_params.output_offset = op_data.output_zero_point; + op_params.output_multiplier = op_data.output_multiplier; + op_params.output_shift = op_data.output_shift; + op_params.quantized_activation_min = op_data.output_activation_min; + op_params.quantized_activation_max = op_data.output_activation_max; + return op_params; +} + +FullyConnectedParams FullyConnectedParamsFloat( + TfLiteFusedActivation activation) { + FullyConnectedParams op_params; + CalculateActivationRange(activation, &op_params.float_activation_min, + &op_params.float_activation_max); + return op_params; +} + +TfLiteStatus CalculateOpDataFullyConnected( + TfLiteContext* context, TfLiteFusedActivation activation, + TfLiteType data_type, const TfLiteTensor* input, const TfLiteTensor* filter, + const TfLiteTensor* bias, TfLiteTensor* output, + OpDataFullyConnected* data) { + if (data_type != kTfLiteFloat32) { + double real_multiplier = 0.0; + TF_LITE_ENSURE_STATUS(GetQuantizedConvolutionMultipler( + context, input, filter, bias, output, &real_multiplier)); + QuantizeMultiplier(real_multiplier, &data->output_multiplier, + &data->output_shift); + + // Filter weights will always be symmetric quantized since we only support + // int8 quantization. See + // https://github.com/tensorflow/tensorflow/issues/44912 for additional + // context. + TFLITE_DCHECK(filter->params.zero_point == 0); + + data->input_zero_point = input->params.zero_point; + data->filter_zero_point = filter->params.zero_point; + data->output_zero_point = output->params.zero_point; + + return CalculateActivationRangeQuantized(context, activation, output, + &data->output_activation_min, + &data->output_activation_max); + } + return kTfLiteOk; +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/fully_connected_test.cc b/tensorflow/lite/micro/kernels/fully_connected_test.cc new file mode 100644 index 0000000..2e9206a --- /dev/null +++ b/tensorflow/lite/micro/kernels/fully_connected_test.cc @@ -0,0 +1,650 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/micro_utils.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +// Simple test data for 2x2x10 input 2x3x10 weights. +const int simple_input_size = 20; +int simple_input_dims[] = {2, 2, 10}; +const float simple_input_data[] = { + 1, 2, 3, 4, 5, 6, 7, 8, -9, -10, // b = 0 + 1, 2, 3, 4, 5, 6, 7, -8, 9, -10, // b = 1 +}; +const int simple_weights_size = 30; +int simple_weights_dims[] = {2, 3, 10}; +const float simple_weights_data[] = { + 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, // u = 0 + 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, // u = 1 + 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, // u = 2 +}; + +// TODO(b/258710417): INT4 isn't currently supported on Hexagon. +#if !defined(HEXAGON) +const float simple_int4_weights_data[] = { + -2, -1, 0, 1, 2, 3, 4, 5, 6, 7, // u = 0 + -2, -1, 0, 1, 2, 3, 4, 5, 6, 7, // u = 1 + -2, -1, 0, 1, 2, 3, 4, 5, 6, 7, // u = 2 +}; +const float simple_golden_null_bias_int4_weights[] = { + -28, -28, -28, 0, 0, 0, +}; +#endif +int simple_bias_dims[] = {1, 3}; +const float simple_bias_data[] = {1, 2, 3}; +const float simple_golden[] = { + 24, 25, 26, 58, 59, 60, +}; +const float simple_golden_null_bias[] = { + 23, 23, 23, 57, 57, 57, +}; + +const int simple_output_size = 6; +int simple_output_dims[] = {2, 2, 3}; + +// Test data for 2x2x10 input 2x3x10 weights with negative outputs to test relu. +const int relu_input_size = 20; +int relu_input_dims[] = {2, 2, 10}; +const float relu_input_data[] = { + 1, 2, 3, 4, 5, 6, 7, 8, -9, -10, // b = 0 + 1, 2, 3, 4, 5, 6, 7, -8, 9, -10, // b = 1 +}; +const int relu_weights_size = 30; +int relu_weights_dims[] = {2, 3, 10}; +const float relu_weights_data[] = { + 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, // u = 0 + -1, -2, -3, -4, -5, -6, -7, -8, -9, -10, // u = 1 + 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, // u = 2 +}; +int relu_bias_dims[] = {1, 3}; +const float relu_bias_data[] = {1, -2, 3}; +const float relu_golden[] = { + 24, 0, 26, 58, 0, 60, +}; +const int relu_output_size = 6; +int relu_output_dims[] = {2, 2, 3}; + +// Input and filter similar to real model. Input shape is 1x64 and output is +// 1x16. +const int representative_64x16_input_size = 64; +int representative_64x16_input_dims[] = {2, 1, 64}; +const float representative_64x16_input_data[] = { + 0.0000, 0.1543, 0.0000, 0.0000, 1.8520, 0.0000, 4.7844, 1.1832, + 0.0000, 0.0000, 0.0000, 0.0000, 0.0000, 0.0000, 1.5948, 0.0000, + 1.5948, 1.9549, 0.0000, 1.2347, 0.0000, 1.5948, 1.5948, 0.5145, + 0.0000, 0.0000, 0.0000, 0.0000, 2.6237, 0.0000, 0.0000, 0.0000, + 1.3890, 5.3503, 2.3665, 2.9838, 0.0000, 1.2861, 0.0000, 3.0867, + 0.9775, 0.0000, 5.9676, 0.0000, 0.0000, 1.4405, 0.5145, 2.5723, + 3.1896, 4.4757, 0.0000, 0.0000, 0.0000, 0.0000, 4.1671, 0.0000, + 2.8295, 3.0353, 0.0000, 2.7780, 0.0000, 0.0000, 0.0000, 0.0000}; +const int representative_64x16_weights_size = 64 * 16; +int representative_64x16_weights_dims[] = {2, 16, 64}; +const float representative_64x16_weights_data[] = { + -0.1075, 0.1245, 0.1811, -0.1302, -0.1868, 0.0679, 0.1245, 0.2321, + -0.1981, -0.2094, 0.1358, -0.1698, 0.0113, 0.0566, 0.1358, -0.2490, + 0.0000, -0.1189, -0.0170, -0.0396, -0.3113, 0.1641, -0.4188, 0.0566, + -0.4471, 0.4754, -0.0396, 0.0113, -0.0340, 0.0170, 0.0170, 0.1811, + -0.0792, 0.4981, 0.2490, -0.1924, 0.0792, 0.1868, -0.1075, -0.3962, + 0.1358, 0.2547, -0.1245, -0.0962, -0.0283, 0.4132, -0.0057, -0.5150, + 0.1019, 0.1585, -0.0962, -0.2207, -0.2377, 0.2830, 0.4471, 0.0170, + 0.0566, 0.2038, 0.1019, -0.0226, 0.2830, 0.1415, 0.0283, -0.0792, + 0.4301, 0.3226, -0.1132, 0.4981, -0.3849, -0.2943, -0.2547, -0.2264, + 0.0453, -0.0170, 0.0396, 0.1415, 0.3000, 0.2547, 0.0962, 0.2151, + -0.1585, -0.1302, -0.0057, -0.2773, 0.0283, -0.0906, 0.1302, -0.1075, + -0.0566, 0.1755, 0.2773, 0.0283, 0.0566, 0.1528, -0.0736, -0.2830, + 0.0792, 0.0962, -0.2321, -0.0113, 0.2660, -0.2887, -0.0566, 0.0057, + -0.2547, -0.0679, -0.2321, 0.0340, 0.1868, 0.2490, 0.2264, -0.3509, + 0.1585, -0.0849, -0.0623, 0.1132, 0.3396, -0.2490, 0.1528, 0.0679, + 0.1755, 0.4754, -0.0057, -0.2151, -0.1415, -0.1302, -0.2717, 0.1641, + 0.5037, -0.2321, 0.0170, -0.1755, -0.1075, -0.0226, 0.2038, -0.0340, + -0.5150, -0.3113, 0.1472, -0.0226, 0.1528, 0.1189, -0.1472, 0.0396, + -0.3000, -0.1924, -0.0283, 0.0283, 0.1641, 0.0736, 0.1472, -0.1755, + -0.1132, 0.0113, -0.1868, -0.2604, -0.3283, -0.0509, 0.0283, -0.0679, + 0.0623, 0.0792, -0.0283, -0.0962, 0.0396, 0.1641, 0.4584, 0.3226, + 0.0226, -0.1811, 0.2377, -0.1019, 0.2321, 0.1811, -0.1924, -0.0057, + 0.0736, 0.0113, 0.2547, -0.2264, -0.0170, -0.0396, 0.1245, -0.1415, + 0.1755, 0.3679, -0.2377, -0.0396, -0.1585, -0.3000, -0.1641, -0.1302, + -0.0396, -0.1698, 0.1189, 0.2434, 0.1132, -0.1245, -0.1415, 0.0453, + 0.1868, -0.0906, -0.1189, -0.0509, 0.0057, -0.1189, -0.0057, 0.0170, + -0.1924, 0.2207, 0.0792, -0.4641, -0.2660, 0.2943, 0.1358, -0.0340, + -0.3339, -0.1189, 0.0906, -0.4358, 0.0453, -0.1755, 0.1415, 0.0340, + 0.1924, -0.0057, 0.2321, -0.2094, -0.1132, 0.0000, 0.1924, -0.3000, + 0.0340, -0.3396, -0.0906, -0.0340, 0.1641, -0.0226, -0.1472, -0.1019, + 0.2377, -0.0962, -0.3396, -0.5433, 0.0906, 0.2151, -0.0679, 0.1755, + 0.1528, 0.0283, -0.4188, -0.0340, -0.0057, -0.0679, 0.0509, 0.1472, + -0.3849, -0.0113, 0.3962, 0.0849, 0.1472, 0.0340, -0.1358, 0.1641, + -0.2038, 0.2151, -0.1189, -0.3679, 0.0906, -0.0679, 0.5716, -0.0057, + -0.0736, 0.0113, 0.2830, -0.2887, 0.0396, 0.0849, -0.0736, -0.0736, + -0.3679, 0.2264, 0.0113, -0.1641, 0.0396, -0.1132, -0.0623, 0.3113, + 0.5999, -0.1415, 0.1472, -0.2038, -0.1132, -0.2377, 0.0566, 0.1755, + -0.0057, -0.0453, 0.0226, 0.1132, 0.1698, 0.0340, -0.0226, 0.0226, + 0.4415, -0.3792, 0.0792, 0.3736, -0.5999, -0.3056, -0.1924, -0.1132, + -0.0962, 0.0283, 0.0000, -0.3339, -0.3226, 0.3679, -0.0453, -0.1641, + 0.0170, 0.1302, -0.0170, -0.0509, 0.1755, -0.0283, -0.1302, -0.2887, + -0.0679, 0.0340, 0.4641, 0.2321, 0.7188, 0.3339, -0.1075, 0.4754, + -0.0226, 0.3226, -0.1528, -0.0849, 0.0509, -0.1981, 0.0113, 0.2321, + 0.2773, -0.1019, 0.4075, 0.0396, 0.0792, 0.1132, -0.0906, -0.4188, + 0.1924, -0.3679, -0.6396, 0.1358, 0.4981, 0.4132, -0.0283, 0.3849, + -0.3509, -0.0566, -0.0962, 0.3113, -0.1811, 0.4019, 0.0453, -0.0057, + -0.1868, -0.2490, -0.0792, -0.3622, 0.1924, -0.0453, -0.1528, -0.1811, + 0.5943, -0.1302, 0.3170, -0.0170, 0.0509, -0.1528, -0.1755, 0.5547, + 0.2490, -0.0906, 0.0000, 0.1698, 0.0000, 0.0340, -0.1132, -0.0509, + -0.1755, -0.2943, 0.1472, 0.0849, 0.0000, 0.1528, -0.0566, 0.1528, + -0.5264, -0.5320, -0.0736, 0.0566, 0.2604, -0.4075, 0.0962, -0.3453, + -0.1415, 0.0057, 0.3905, 0.2830, 0.3679, 0.5320, -0.2660, 0.0340, + 0.0736, 0.0057, 0.2207, 0.4471, 0.0849, 0.3000, -0.0057, -0.0623, + 0.1415, -0.0566, 0.5264, -0.0340, 0.0226, -0.0623, -0.0113, -0.5037, + -0.4471, 0.0170, -0.0396, -0.1358, -0.1698, 0.1924, 0.0057, -0.1585, + 0.0849, -0.1698, 0.0057, -0.1245, -0.0170, -0.1755, -0.0792, 0.5264, + 0.1358, 0.2434, 0.1585, -0.4188, -0.1472, -0.1358, -0.0849, -0.1189, + 0.5037, 0.0736, -0.0453, -0.2434, 0.1868, -0.0679, 0.1415, -0.2717, + 0.2604, 0.0057, -0.1528, -0.1811, 0.0226, -0.1641, 0.3170, -0.1981, + 0.1245, 0.0226, 0.0566, 0.2830, -0.1755, 0.0396, -0.2094, 0.1924, + 0.1698, 0.0283, 0.1641, 0.0849, 0.0000, -0.1698, -0.1415, -0.3000, + 0.4471, 0.3056, -0.0283, -0.4245, -0.0453, 0.0226, 0.0000, -0.1075, + -0.1528, -0.3226, 0.2773, -0.2264, -0.1811, 0.1755, -0.3566, -0.4188, + 0.1755, -0.0057, 0.2038, 0.1075, 0.3679, -0.0792, 0.2207, -0.0453, + 0.3736, 0.2943, -0.0113, -0.0623, 0.2264, 0.0113, -0.0396, -0.2207, + 0.0453, -0.2830, -0.1302, 0.0623, -0.1924, -0.1811, -0.2717, 0.2830, + 0.2094, 0.0170, -0.3170, -0.0283, -0.1189, -0.0509, -0.0566, -0.3622, + 0.1132, -0.0906, 0.1132, 0.4019, -0.4698, -0.1019, -0.1075, -0.2094, + -0.2207, -0.0509, 0.0057, 0.1019, -0.0509, 0.2264, -0.5716, 0.0226, + -0.4019, 0.1641, -0.3000, 0.3849, 0.1245, 0.0679, 0.3056, 0.2377, + 0.0679, -0.0170, -0.5377, -0.0170, 0.0057, 0.1358, -0.1132, -0.2038, + 0.0679, 0.1075, -0.2773, 0.5943, 0.0623, -0.1472, 0.3566, 0.0396, + -0.2377, 0.2604, 0.0849, 0.1358, -0.3792, -0.0340, -0.1415, 0.3566, + -0.3736, 0.1245, 0.0566, 0.3396, 0.0736, 0.4019, -0.1528, 0.1075, + 0.0792, -0.2547, 0.0453, -0.1755, 0.1868, -0.2547, 0.1075, 0.0623, + 0.1698, -0.0170, 0.1585, -0.0736, -0.4358, -0.0113, -0.6792, -0.0849, + -0.0396, -0.6056, 0.1358, 0.1189, 0.2547, 0.1528, 0.2887, 0.0453, + -0.1075, -0.3283, -0.0453, -0.0509, 0.2038, 0.2547, 0.0849, -0.0566, + -0.1698, 0.0509, -0.0113, -0.1585, 0.1924, -0.0792, -0.1868, 0.0509, + -0.1698, -0.0849, -0.0170, 0.0453, 0.3170, 0.0906, -0.5943, -0.1245, + 0.1585, -0.1755, -0.2151, 0.0906, 0.1924, 0.3170, -0.2490, -0.5660, + -0.0283, 0.0962, -0.1358, 0.1585, 0.0057, -0.2604, 0.1189, -0.0170, + 0.3509, 0.0623, 0.0679, -0.1302, -0.0792, 0.0906, -0.0792, 0.0849, + -0.1924, 0.2604, -0.1245, -0.3679, 0.0340, 0.0113, -0.1698, 0.2490, + 0.0283, 0.1019, -0.3736, 0.1019, -0.2207, -0.0340, 0.3170, 0.1755, + 0.0962, 0.3226, -0.0113, -0.1189, -0.2321, -0.0226, -0.2434, -0.0170, + -0.1585, -0.0283, -0.1132, 0.0679, -0.4188, -0.0453, 0.1528, -0.1302, + -0.3792, 0.1415, -0.1358, -0.1811, 0.1302, 0.1415, 0.5207, 0.0509, + -0.1358, -0.0396, -0.2434, 0.0396, 0.0792, -0.2264, -0.1415, 0.0906, + 0.1245, 0.0170, 0.0623, -0.1415, 0.2773, -0.3566, -0.0396, 0.2887, + 0.4188, 0.1698, -0.2547, 0.1132, -0.0453, -0.0113, -0.1358, 0.1075, + 0.0566, 0.1075, 0.2604, -0.0849, -0.2490, 0.1415, 0.0509, -0.2151, + 0.0340, 0.1698, 0.0509, -0.0906, 0.0566, -0.1075, -0.2151, 0.2038, + -0.1924, -0.0113, 0.2830, 0.1358, -0.1189, 0.0113, -0.5603, -0.2830, + -0.2943, 0.0453, -0.0396, 0.1358, 0.0566, 0.2038, -0.3283, -0.0509, + 0.0509, 0.1641, 0.2094, -0.2038, -0.1868, -0.1585, -0.2207, -0.1302, + 0.0396, -0.1019, -0.0679, 0.1075, -0.4584, -0.2207, 0.2434, -0.0113, + 0.0849, 0.1755, -0.3056, 0.1585, -0.2547, 0.0453, 0.0906, -0.1358, + -0.0679, -0.0509, 0.0679, -0.3509, 0.0057, 0.0453, 0.4132, -0.1981, + 0.2264, -0.0736, 0.1075, 0.0679, -0.0906, -0.3113, 0.0509, 0.0849, + 0.2604, 0.0623, -0.3113, 0.3849, 0.0000, 0.6396, -0.2038, -0.1019, + 0.1245, -0.0453, 0.1641, 0.1075, -0.1075, -0.2660, -0.4528, -0.0566, + -0.0170, 0.0453, 0.0340, 0.1189, -0.2434, -0.0283, -0.1811, 0.2547, + 0.0000, -0.0226, 0.4471, 0.1019, -0.1472, 0.0849, 0.1075, 0.1075, + 0.0283, -0.2773, 0.4415, -0.1811, 0.2717, 0.3170, 0.0509, 0.0623, + -0.0962, 0.1585, -0.0792, -0.1811, -0.0792, -0.3283, 0.0962, -0.1698, + -0.0736, 0.0453, 0.0962, -0.3566, -0.4584, 0.3396, -0.4811, 0.3056, + -0.1755, 0.2490, -0.1698, -0.2377, -0.3339, -0.0453, 0.1811, 0.0736, + 0.0340, -0.0962, -0.0113, -0.3056, -0.3339, 0.2038, 0.2038, -0.1924, + 0.2547, -0.4471, -0.0849, -0.2038, 0.3566, -0.4811, 0.3453, 0.0849, + 0.1189, 0.3170, -0.1358, 0.2717, 0.0113, -0.4754, -0.1924, 0.4245, + -0.2773, 0.3453, 0.2264, 0.2943, 0.5320, 0.2773, -0.2264, -0.1019, + -0.1132, -0.3962, 0.3679, 0.0509, -0.0623, -0.0906, -0.5603, -0.1641, + -0.3170, -0.2377, 0.1415, -0.0509, 0.0792, 0.0170, -0.0226, -0.0057, + -0.1358, -0.4245, 0.3905, 0.3113, 0.0340, -0.1189, 0.2887, -0.2943, + -0.3056, 0.2434, 0.1019, -0.0170, 0.3849, 0.1528, -0.0736, -0.0170, + 0.0792, 0.1755, 0.0509, 0.3509, 0.1472, 0.1528, 0.1472, 0.0057, + 0.0113, -0.0113, -0.3283, -0.3962, -0.0792, -0.1245, -0.0283, -0.1868, + 0.4019, 0.2943, -0.0906, -0.2321, 0.6056, 0.1189, 0.0340, -0.2207, + -0.0453, 0.3339, 0.2377, -0.1641, 0.3736, 0.2151, -0.2547, 0.0453, + 0.1924, -0.1019, -0.0340, -0.2207, 0.3962, -0.4471, -0.2547, -0.2151, + -0.3736, 0.0283, 0.1189, 0.0283, 0.0736, 0.0396, 0.1019, 0.0283, + 0.0170, 0.2321, 0.3509, -0.0226, -0.0226, 0.0736, 0.0283, 0.1641, + -0.0906, 0.1811, 0.0226, 0.5716, -0.0396, -0.0509, -0.1641, -0.0509, + 0.4132, -0.2604, 0.1019, -0.0283, -0.0340, 0.0453, 0.1472, -0.0057, + 0.2717, -0.2094, 0.3396, 0.0340, 0.1245, 0.2547, -0.5886, 0.2717, + -0.0906, 0.1641, 0.0962, -0.0792, -0.0113, 0.2264, -0.0736, 0.3170, + 0.0623, 0.0679, 0.0623, -0.0792, -0.2207, 0.1924, 0.1245, -0.2773}; +int representative_64x16_bias_dims[] = {1, 16}; +const float representative_64x16_bias_data[] = { + -0.0084, 0.0006, 0.0000, 0.0000, -0.0087, -0.0006, -0.0003, -0.0003, + 0.0006, -0.0003, -0.0003, -0.0003, -0.0253, 0.0012, 0.0000, 0.0000}; +const float representative_64x16_golden[] = { + 3.8624, -2.9580, 4.3043, -1.2844, -1.5769, -2.7998, -0.1011, -3.4029, + -1.0557, -7.1931, -1.4852, -0.4163, 1.7186, -0.6965, 0.3580, 2.7378}; +const int representative_64x16_output_size = 16; +int representative_64x16_output_dims[] = {2, 1, 16}; + +template +TfLiteStatus ValidateFullyConnectedGoldens( + TfLiteTensor* tensors, const int tensors_size, bool null_bias, + const TfLiteFusedActivation activation, const float tolerance, + const int output_len, const T* golden, T* output_data) { + TfLiteFullyConnectedParams builtin_data = { + activation, kTfLiteFullyConnectedWeightsFormatDefault, false, false}; + + // Avoid variable length array warning. + constexpr int inputs_array_len = 4; + constexpr int outputs_array_len = 2; + int inputs_array_data[inputs_array_len]; + int outputs_array_data[outputs_array_len]; + + outputs_array_data[0] = 1; + inputs_array_data[1] = 0; + inputs_array_data[2] = 1; + + if (null_bias) { + inputs_array_data[0] = 2; + outputs_array_data[1] = 2; + } else { + inputs_array_data[0] = 3; + inputs_array_data[3] = 2; + outputs_array_data[1] = 3; + } + + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = Register_FULLY_CONNECTED(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, + reinterpret_cast(&builtin_data)); + + TfLiteStatus status = runner.InitAndPrepare(); + if (status != kTfLiteOk) { + return status; + } + + status = runner.Invoke(); + if (status != kTfLiteOk) { + return status; + } + + for (int i = 0; i < output_len; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(golden[i], output_data[i], tolerance); + } + return kTfLiteOk; +} + +TfLiteStatus TestFullyConnectedFloat( + int* input_dims_data, const float* input_data, int* weights_dims_data, + const float* weights_data, int* bias_dims_data, const float* bias_data, + const float* golden, int* output_dims_data, + TfLiteFusedActivation activation, float* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* weights_dims = IntArrayFromInts(weights_dims_data); + TfLiteIntArray* bias_dims = IntArrayFromInts(bias_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_dims_count = ElementCount(*output_dims); + bool null_bias = bias_data == nullptr ? true : false; + + constexpr int array_size = 4; // Avoid variable length array warning. + const int inputs_size = bias_data == nullptr ? 2 : 3; + constexpr int outputs_size = 1; + const int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[array_size]; + + tensors[0] = CreateTensor(input_data, input_dims); + tensors[1] = CreateTensor(weights_data, weights_dims); + + if (bias_data == nullptr) { + tensors[2] = CreateTensor(output_data, output_dims); + } else { + tensors[2] = CreateTensor(bias_data, bias_dims); + tensors[3] = CreateTensor(output_data, output_dims); + } + + return ValidateFullyConnectedGoldens(tensors, tensors_size, null_bias, + activation, 1e-4f, output_dims_count, + golden, output_data); +} + +template +TfLiteStatus TestFullyConnectedQuantized( + int* input_dims_data, const float* input_data, dataT* input_quantized, + const float input_scale, const int input_zero_point, int* weights_dims_data, + const float* weights_data, weightT* weights_quantized, + const float weights_scale, const int weights_zero_point, + int* bias_dims_data, const float* bias_data, biasT* bias_quantized, + const float* golden, dataT* golden_quantized, int* output_dims_data, + const float output_scale, const int output_zero_point, + TfLiteFusedActivation activation, dataT* output_data, + TfLiteType weights_packed_type = kTfLiteNoType) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* weights_dims = IntArrayFromInts(weights_dims_data); + TfLiteIntArray* bias_dims = IntArrayFromInts(bias_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_dims_count = ElementCount(*output_dims); + bool null_bias = bias_data == nullptr ? true : false; + + constexpr int array_size = 4; // Avoid variable length array warning. + const int inputs_size = bias_data == nullptr ? 2 : 3; + constexpr int outputs_size = 1; + const int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[array_size]; + + tensors[0] = CreateQuantizedTensor(input_data, input_quantized, input_dims, + input_scale, input_zero_point); + tensors[1] = CreateQuantizedTensor( + weights_data, weights_quantized, weights_dims, weights_scale, + weights_zero_point, false, weights_packed_type); + if (bias_data == nullptr) { + tensors[2] = CreateQuantizedTensor(output_data, output_dims, output_scale, + output_zero_point); + } else { + tensors[2] = CreateQuantizedBiasTensor(bias_data, bias_quantized, bias_dims, + input_scale, weights_scale), + tensors[3] = CreateQuantizedTensor(output_data, output_dims, output_scale, + output_zero_point); + } + + Quantize(golden, golden_quantized, output_dims_count, output_scale, + output_zero_point); + + return ValidateFullyConnectedGoldens(tensors, tensors_size, null_bias, + activation, 0.0f, output_dims_count, + golden_quantized, output_data); +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(SimpleTest) { + float output_data[tflite::testing::simple_output_size]; + TF_LITE_MICRO_EXPECT_EQ( + tflite::testing::TestFullyConnectedFloat( + tflite::testing::simple_input_dims, + tflite::testing::simple_input_data, + tflite::testing::simple_weights_dims, + tflite::testing::simple_weights_data, + tflite::testing::simple_bias_dims, tflite::testing::simple_bias_data, + tflite::testing::simple_golden, tflite::testing::simple_output_dims, + kTfLiteActNone, output_data), + kTfLiteOk); +} + +TF_LITE_MICRO_TEST(SimpleTestNullBias) { + float output_data[tflite::testing::simple_output_size]; + TF_LITE_MICRO_EXPECT_EQ( + tflite::testing::TestFullyConnectedFloat( + tflite::testing::simple_input_dims, + tflite::testing::simple_input_data, + tflite::testing::simple_weights_dims, + tflite::testing::simple_weights_data, nullptr, nullptr, + tflite::testing::simple_golden_null_bias, + tflite::testing::simple_output_dims, kTfLiteActNone, output_data), + kTfLiteOk); +} + +TF_LITE_MICRO_TEST(SimpleTestQuantizedInt8) { + const float input_scale = 1.0f; + const int input_zero_point = -1; + const float weights_scale = 1.0f; + const int weights_zero_point = 0; + const float output_scale = 0.5f; + const int output_zero_point = -1; + + int8_t input_quantized[tflite::testing::simple_input_size]; + int8_t weights_quantized[tflite::testing::simple_weights_size]; + int32_t bias_quantized[tflite::testing::simple_output_size]; + int8_t golden_quantized[tflite::testing::simple_output_size]; + int8_t output_data[tflite::testing::simple_output_size]; + + TF_LITE_MICRO_EXPECT_EQ( + tflite::testing::TestFullyConnectedQuantized( + tflite::testing::simple_input_dims, + tflite::testing::simple_input_data, input_quantized, input_scale, + input_zero_point, tflite::testing::simple_weights_dims, + tflite::testing::simple_weights_data, weights_quantized, + weights_scale, weights_zero_point, tflite::testing::simple_bias_dims, + tflite::testing::simple_bias_data, bias_quantized, + tflite::testing::simple_golden, golden_quantized, + tflite::testing::simple_output_dims, output_scale, output_zero_point, + kTfLiteActNone, output_data), + kTfLiteOk); +} + +#if !defined(HEXAGON) +TF_LITE_MICRO_TEST(SimpleTestQuantizedInt16) { + const float input_scale = 128.0 / 65536; + const int input_zero_point = 0; + const float weights_scale = 1.0f; + const int weights_zero_point = 0; + const float output_scale = 128.0 / 65536; + const int output_zero_point = 0; + + const float simple_golden[] = {24, 25, 26, 58, 59, 60}; + int16_t input_quantized[tflite::testing::simple_input_size]; + int8_t weights_quantized[tflite::testing::simple_weights_size]; + int64_t bias_quantized[tflite::testing::simple_output_size]; + int16_t golden_quantized[tflite::testing::simple_output_size]; + int16_t output_data[tflite::testing::simple_output_size]; + + TF_LITE_MICRO_EXPECT_EQ( + tflite::testing::TestFullyConnectedQuantized( + tflite::testing::simple_input_dims, + tflite::testing::simple_input_data, input_quantized, input_scale, + input_zero_point, tflite::testing::simple_weights_dims, + tflite::testing::simple_weights_data, weights_quantized, + weights_scale, weights_zero_point, tflite::testing::simple_bias_dims, + tflite::testing::simple_bias_data, bias_quantized, simple_golden, + golden_quantized, tflite::testing::simple_output_dims, output_scale, + output_zero_point, kTfLiteActNone, output_data), + kTfLiteOk); +} +#endif + +TF_LITE_MICRO_TEST(SimpleTest4DInputQuantizedInt8) { + const float input_scale = 1.0f; + const int input_zero_point = -1; + const float weights_scale = 1.0f; + const int weights_zero_point = 0; + + const float output_scale = 0.5f; + const int output_zero_point = -1; + + int input_dims_4d[] = {4, 1, 1, 2, 10}; + + int8_t input_quantized[tflite::testing::simple_input_size]; + int8_t weights_quantized[tflite::testing::simple_weights_size]; + int32_t bias_quantized[tflite::testing::simple_output_size]; + int8_t golden_quantized[tflite::testing::simple_output_size]; + int8_t output_data[tflite::testing::simple_output_size]; + + TF_LITE_MICRO_EXPECT_EQ( + tflite::testing::TestFullyConnectedQuantized( + input_dims_4d, tflite::testing::simple_input_data, input_quantized, + input_scale, input_zero_point, tflite::testing::simple_weights_dims, + tflite::testing::simple_weights_data, weights_quantized, + weights_scale, weights_zero_point, tflite::testing::simple_bias_dims, + tflite::testing::simple_bias_data, bias_quantized, + tflite::testing::simple_golden, golden_quantized, + tflite::testing::simple_output_dims, output_scale, output_zero_point, + kTfLiteActNone, output_data), + kTfLiteOk); +} + +TF_LITE_MICRO_TEST(SimpleTestQuantizedInt8Relu) { + const float input_scale = 1.0f; + const int input_zero_point = -1; + const float weights_scale = 1.0f; + const int weights_zero_point = 0; + + const float output_scale = 0.5f; + const int output_zero_point = -128; + + int8_t input_quantized[tflite::testing::relu_input_size]; + int8_t weights_quantized[tflite::testing::relu_weights_size]; + int32_t bias_quantized[tflite::testing::relu_output_size]; + int8_t golden_quantized[tflite::testing::relu_output_size]; + int8_t output_data[tflite::testing::relu_output_size]; + + TF_LITE_MICRO_EXPECT_EQ( + tflite::testing::TestFullyConnectedQuantized( + tflite::testing::relu_input_dims, tflite::testing::relu_input_data, + input_quantized, input_scale, input_zero_point, + tflite::testing::relu_weights_dims, + tflite::testing::relu_weights_data, weights_quantized, weights_scale, + weights_zero_point, tflite::testing::relu_bias_dims, + tflite::testing::relu_bias_data, bias_quantized, + tflite::testing::relu_golden, golden_quantized, + tflite::testing::relu_output_dims, output_scale, output_zero_point, + kTfLiteActRelu, output_data), + kTfLiteOk); +} + +TF_LITE_MICRO_TEST(SimpleTest4DInput) { + int input_dims_4d[] = {4, 1, 1, 2, 10}; + + float output_data[tflite::testing::simple_output_size]; + + TF_LITE_MICRO_EXPECT_EQ( + tflite::testing::TestFullyConnectedFloat( + input_dims_4d, tflite::testing::simple_input_data, + tflite::testing::simple_weights_dims, + tflite::testing::simple_weights_data, + tflite::testing::simple_bias_dims, tflite::testing::simple_bias_data, + tflite::testing::simple_golden, tflite::testing::simple_output_dims, + kTfLiteActNone, output_data), + kTfLiteOk); +} + +TF_LITE_MICRO_TEST(Representative1x64Input1x16Output) { + float output_data[tflite::testing::representative_64x16_output_size]; + + TF_LITE_MICRO_EXPECT_EQ( + tflite::testing::TestFullyConnectedFloat( + tflite::testing::representative_64x16_input_dims, + tflite::testing::representative_64x16_input_data, + tflite::testing::representative_64x16_weights_dims, + tflite::testing::representative_64x16_weights_data, + tflite::testing::representative_64x16_bias_dims, + tflite::testing::representative_64x16_bias_data, + tflite::testing::representative_64x16_golden, + tflite::testing::representative_64x16_output_dims, kTfLiteActNone, + output_data), + kTfLiteOk); +} + +TF_LITE_MICRO_TEST(Representative1x64Input1x16OutputQuantizedInt8) { + const float input_scale = 0.051445; + const int input_zero_point = -128; + const float weights_scale = 0.005660; + const int weights_zero_point = 0; + + const float output_scale = 0.069785; + const int output_zero_point = -9; + + int8_t input_quantized[tflite::testing::representative_64x16_input_size]; + int8_t weights_quantized[tflite::testing::representative_64x16_weights_size]; + int32_t bias_quantized[tflite::testing::representative_64x16_output_size]; + int8_t golden_quantized[tflite::testing::representative_64x16_output_size]; + int8_t output_data[tflite::testing::representative_64x16_output_size]; + + TF_LITE_MICRO_EXPECT_EQ( + tflite::testing::TestFullyConnectedQuantized( + tflite::testing::representative_64x16_input_dims, + tflite::testing::representative_64x16_input_data, input_quantized, + input_scale, input_zero_point, + tflite::testing::representative_64x16_weights_dims, + tflite::testing::representative_64x16_weights_data, weights_quantized, + weights_scale, weights_zero_point, + tflite::testing::representative_64x16_bias_dims, + tflite::testing::representative_64x16_bias_data, bias_quantized, + tflite::testing::representative_64x16_golden, golden_quantized, + tflite::testing::representative_64x16_output_dims, output_scale, + output_zero_point, kTfLiteActNone, output_data), + kTfLiteOk); +} + +TF_LITE_MICRO_TEST(SimpleTestQuantizedInt8NullBias) { + const float input_scale = 1.0f; + const int input_zero_point = -1; + const float weights_scale = 1.0f; + const int weights_zero_point = 0; + const float output_scale = 0.5f; + const int output_zero_point = -1; + + int8_t input_quantized[tflite::testing::simple_input_size]; + int8_t weights_quantized[tflite::testing::simple_weights_size]; + int8_t golden_quantized[tflite::testing::simple_output_size]; + int8_t output_data[tflite::testing::simple_output_size]; + + TF_LITE_MICRO_EXPECT_EQ( + tflite::testing::TestFullyConnectedQuantized( + tflite::testing::simple_input_dims, + tflite::testing::simple_input_data, input_quantized, input_scale, + input_zero_point, tflite::testing::simple_weights_dims, + tflite::testing::simple_weights_data, weights_quantized, + weights_scale, weights_zero_point, nullptr, nullptr, + static_cast(nullptr), + tflite::testing::simple_golden_null_bias, golden_quantized, + tflite::testing::simple_output_dims, output_scale, output_zero_point, + kTfLiteActNone, output_data), + kTfLiteOk); +} + +// TODO(b/258710417): INT4 isn't currently supported on Hexagon. +#if !defined(HEXAGON) +// This test was created by handcrafting simple_int4_weights_data, and +// simple_golden_null_bias_int4_weights was obtained by running +// TestFullyConnectedQuantized() with int8 quantization, and ensuring that int4 +// quantization yields the same outputs. +TF_LITE_MICRO_TEST(SimpleTestQuantizedInt4Weights) { + const float input_scale = 1.0f; + const int input_zero_point = -1; + const float weights_scale = 1.0f; + const int weights_zero_point = 0; + const float output_scale = 0.5f; + const int output_zero_point = -1; + + int8_t input_quantized[tflite::testing::simple_input_size]; + int8_t weights_quantized[tflite::testing::simple_weights_size]; + int8_t golden_quantized[tflite::testing::simple_output_size]; + int8_t output_data[tflite::testing::simple_output_size]; + + TF_LITE_MICRO_EXPECT_EQ( + tflite::testing::TestFullyConnectedQuantized( + tflite::testing::simple_input_dims, + tflite::testing::simple_input_data, input_quantized, input_scale, + input_zero_point, tflite::testing::simple_weights_dims, + tflite::testing::simple_int4_weights_data, weights_quantized, + weights_scale, weights_zero_point, nullptr, nullptr, + static_cast(nullptr), + tflite::testing::simple_golden_null_bias_int4_weights, + golden_quantized, tflite::testing::simple_output_dims, output_scale, + output_zero_point, kTfLiteActNone, output_data, kTfLiteInt4), + kTfLiteOk); +} +#endif + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/gather.cc b/tensorflow/lite/micro/kernels/gather.cc new file mode 100644 index 0000000..9955601 --- /dev/null +++ b/tensorflow/lite/micro/kernels/gather.cc @@ -0,0 +1,224 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_utils.h" + +namespace tflite { +namespace { + +constexpr int kInputTensor = 0; +constexpr int kInputPositions = 1; +constexpr int kOutputTensor = 0; + +template +TfLiteStatus Gather(const TfLiteGatherParams* params, + const TfLiteEvalTensor* input, + const TfLiteEvalTensor* coords, TfLiteEvalTensor* output) { + const InputT* input_data = tflite::micro::GetTensorData(input); + const CoordsT* coords_data = tflite::micro::GetTensorData(coords); + InputT* output_data = tflite::micro::GetTensorData(output); + const TfLiteIntArray* input_dims = input->dims; + const int input_dims_size = input_dims->size; + int axis = params->axis; + if (axis < 0) { + axis += input_dims_size; + } + TFLITE_DCHECK_GE(axis, 0); + TFLITE_DCHECK_LT(axis, input_dims_size); + + int batch_dims = params->batch_dims; + // batch_dims should be in range: [-rank(coords), rank(coords)]. + // Negative batch_dims is added with rank of coords. + const TfLiteIntArray* coords_dims = coords->dims; + const int coords_dims_size = coords_dims->size; + if (batch_dims < 0) { + batch_dims += coords_dims_size; + } + TFLITE_DCHECK_GE(batch_dims, 0); + TFLITE_DCHECK_LT(batch_dims, input_dims_size); + TFLITE_DCHECK_LE(batch_dims, coords_dims_size); + TFLITE_DCHECK_GE(axis, batch_dims); + for (int i = 0; i < batch_dims; ++i) { + TFLITE_DCHECK_EQ(input_dims->data[i], coords_dims->data[i]); + } + + const int axis_size = input_dims->data[axis]; + + int batch_size = 1; + for (int i = 0; i < batch_dims; ++i) { + batch_size *= input_dims->data[i]; + } + int outer_size = 1; + for (int i = batch_dims; i < axis; ++i) { + outer_size *= input_dims->data[i]; + } + int inner_size = 1; + for (int i = axis + 1; i < input_dims_size; ++i) { + inner_size *= input_dims->data[i]; + } + int coord_size = 1; + for (int i = batch_dims; i < coords_dims_size; ++i) { + coord_size *= coords_dims->data[i]; + } + + for (int batch = 0; batch < batch_size; ++batch) { + for (int outer = 0; outer < outer_size; ++outer) { + for (int coord = 0; coord < coord_size; ++coord) { + TFLITE_DCHECK_GE(coords_data[coord], 0); + TFLITE_DCHECK_LT(coords_data[coord], axis_size); + std::memcpy(output_data + + (((batch * outer_size) + outer) * coord_size + coord) * + inner_size, + input_data + (((batch * outer_size) + outer) * axis_size + + coords_data[batch * coord_size + coord]) * + inner_size, + sizeof(InputT) * inner_size); + } + } + } + return kTfLiteOk; +} + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + MicroContext* micro_context = GetMicroContext(context); + + TF_LITE_ENSURE_EQ(context, NumInputs(node), 2); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + + const auto* params = + reinterpret_cast(node->builtin_data); + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* coords = + micro_context->AllocateTempInputTensor(node, kInputPositions); + TF_LITE_ENSURE(context, coords != nullptr); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + + switch (coords->type) { + case kTfLiteInt32: + break; + default: + MicroPrintf("Positions of type '%s' are not supported by gather.", + TfLiteTypeGetName(coords->type)); + return kTfLiteError; + break; + } + + // Assign to output the input type. + output->type = input->type; + + // Check conditions for different types. + switch (input->type) { + case kTfLiteFloat32: + case kTfLiteInt8: + break; + default: + MicroPrintf("Type '%s' is not supported by gather.", + TfLiteTypeGetName(input->type)); + return kTfLiteError; + break; + } + + int axis = params->axis; + if (axis < 0) { + axis += NumDimensions(input); + } + TF_LITE_ENSURE(context, 0 <= axis && axis < NumDimensions(input)); + + int batch_dims = params->batch_dims; + // batch_dims should be in range: [-rank(coords), rank(coords)]. + // Negative batch_dims is added with rank of coords. + if (batch_dims < 0) { + batch_dims += NumDimensions(coords); + } + TF_LITE_ENSURE(context, batch_dims <= axis); + TF_LITE_ENSURE(context, 0 <= batch_dims && batch_dims < NumDimensions(input)); + TF_LITE_ENSURE(context, batch_dims <= NumDimensions(coords)); + for (int i = 0; i < batch_dims; ++i) { + TF_LITE_ENSURE_EQ(context, input->dims->data[i], coords->dims->data[i]); + } + + // GATHER updates the output tensor dimensions, but TfLiteTensor in the + // MicroInterpreter is a temporary allocation. We must therefore relocate the + // dims from the FlatBuffer to the persistent storage arena. + TfLiteEvalTensor* output_eval = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + TF_LITE_ENSURE_OK(context, tflite::micro::CreateWritableTensorDimsWithCopy( + context, output, output_eval)); + + TfLiteIntArray* output_shape = output->dims; + output_shape->size = + NumDimensions(input) + NumDimensions(coords) - 1 - batch_dims; + int output_index = 0; + for (int i = 0; i < axis; ++i) { + output_shape->data[output_index++] = input->dims->data[i]; + } + for (int i = batch_dims; i < coords->dims->size; ++i) { + output_shape->data[output_index++] = coords->dims->data[i]; + } + for (int i = axis + 1; i < input->dims->size; ++i) { + output_shape->data[output_index++] = input->dims->data[i]; + } + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(coords); + micro_context->DeallocateTempTfLiteTensor(output); + + return kTfLiteOk; +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + const auto* params = + reinterpret_cast(node->builtin_data); + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + const TfLiteEvalTensor* coords = + tflite::micro::GetEvalInput(context, node, kInputPositions); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + + if (coords->type == kTfLiteInt32) { + switch (input->type) { + case kTfLiteFloat32: + return Gather(params, input, coords, output); + break; + case kTfLiteInt8: + return Gather(params, input, coords, output); + break; + default: + MicroPrintf("Type '%s' is not supported by gather.", + TfLiteTypeGetName(input->type)); + return kTfLiteError; + break; + } + } + return kTfLiteOk; +} +} // namespace + +TFLMRegistration Register_GATHER() { + return tflite::micro::RegisterOp(nullptr, Prepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/gather_nd.cc b/tensorflow/lite/micro/kernels/gather_nd.cc new file mode 100644 index 0000000..3774ddd --- /dev/null +++ b/tensorflow/lite/micro/kernels/gather_nd.cc @@ -0,0 +1,212 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_utils.h" + +namespace tflite { +namespace { + +constexpr int kParams = 0; +constexpr int kIndices = 1; +constexpr int kOutputTensor = 0; +constexpr int MAX_INDICES_ND = 5; + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + MicroContext* micro_context = GetMicroContext(context); + + TF_LITE_ENSURE_EQ(context, NumInputs(node), 2); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + + TfLiteTensor* params = micro_context->AllocateTempInputTensor(node, kParams); + TF_LITE_ENSURE(context, params != nullptr); + TfLiteTensor* indices = + micro_context->AllocateTempInputTensor(node, kIndices); + TF_LITE_ENSURE(context, indices != nullptr); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + + switch (params->type) { + case kTfLiteFloat32: + case kTfLiteInt8: + break; + default: + MicroPrintf("Params of type '%s' are not supported by gather_nd.", + TfLiteTypeGetName(params->type)); + return kTfLiteError; + break; + } + switch (indices->type) { + case kTfLiteInt32: + break; + default: + MicroPrintf("Indices of type '%s' are not supported by gather_nd.", + TfLiteTypeGetName(indices->type)); + return kTfLiteError; + } + + const int params_rank = NumDimensions(params); + const int indices_rank = NumDimensions(indices); + const int indices_nd = SizeOfDimension(indices, indices_rank - 1); + if (params_rank < 1) { + MicroPrintf("Params must be at least a vector."); + return kTfLiteError; + } + if (indices_rank < 1) { + MicroPrintf("Indices must be at least a vector."); + return kTfLiteError; + } + if (indices_nd > params_rank) { + MicroPrintf("Index innermost dimension length must be <= params rank."); + return kTfLiteError; + } + if (indices_nd > MAX_INDICES_ND) { + MicroPrintf("Index innermost dimension length must not exceed %d.", + MAX_INDICES_ND); + return kTfLiteError; + } + + // Assign to output the input type. + output->type = params->type; + + // The tensor output dims must be relocated + // from the FlatBuffer to the persistent storage arena. + TfLiteEvalTensor* output_eval = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + TF_LITE_ENSURE_OK(context, tflite::micro::CreateWritableTensorDimsWithCopy( + context, output, output_eval)); + + // TFLM gather_nd does not create the output tensor, but it needs to ensure + // that the output shape is correct. The result shape is + // indices.shape[:-1] + params.shape[indices.shape[-1]:] + TfLiteIntArray* output_shape = output->dims; + int output_index = 0; + for (int i = 0; i < indices_rank - 1; ++i) { + output_shape->data[output_index++] = indices->dims->data[i]; + } + for (int i = indices_nd; i < params_rank; ++i) { + output_shape->data[output_index++] = params->dims->data[i]; + } + output_shape->size = output_index; + + micro_context->DeallocateTempTfLiteTensor(params); + micro_context->DeallocateTempTfLiteTensor(indices); + micro_context->DeallocateTempTfLiteTensor(output); + return kTfLiteOk; +} + +template +TfLiteStatus GatherNd(const TfLiteEvalTensor* params, + const TfLiteEvalTensor* indices, + TfLiteEvalTensor* output) { + const int indices_dims = indices->dims->size; + const int indices_nd = indices->dims->data[indices_dims - 1]; + const int params_dims = params->dims->size; + const IndicesT* index_data = tflite::micro::GetTensorData(indices); + const ParamsT* param_data = tflite::micro::GetTensorData(params); + ParamsT* output_data = tflite::micro::GetTensorData(output); + + int n_slices = 1; + for (int i = 0; i < indices_dims - 1; ++i) { + n_slices *= indices->dims->data[i]; + } + + // If indices[-1] == params.rank, fetch single elements. + // If indices[-1] < params.rank, fetch slices. + int slice_size = 1; + for (int i = indices_nd; i < params_dims; ++i) { + slice_size *= params->dims->data[i]; + } + + int params_flat_size = ElementCount(*params->dims); + int remain_flat_size = params_flat_size; + + // Number of elements per dimension + int dims_to_count[MAX_INDICES_ND]; + for (int i = 0; i < indices_nd; ++i) { + dims_to_count[i] = remain_flat_size / params->dims->data[i]; + remain_flat_size = dims_to_count[i]; + } + + for (int i = 0; i < n_slices; ++i) { + int from_pos = 0; + for (int j = 0; j < indices_nd; ++j) { + int offset = i * indices_nd + j; + IndicesT index = index_data[offset]; + from_pos += index * dims_to_count[j]; + } + if (from_pos < 0 || from_pos + slice_size > params_flat_size) { + return kTfLiteError; + } + std::memcpy(output_data + i * slice_size, param_data + from_pos, + sizeof(ParamsT) * slice_size); + } + return kTfLiteOk; +} + +template +TfLiteStatus EvalGatherNd(TfLiteContext* context, + const TfLiteEvalTensor* params, + const TfLiteEvalTensor* indices, + TfLiteEvalTensor* output) { + TfLiteStatus status = kTfLiteError; + switch (params->type) { + case kTfLiteFloat32: + status = GatherNd(params, indices, output); + break; + case kTfLiteInt8: + status = GatherNd(params, indices, output); + break; + default: + MicroPrintf("Params type '%s' are not supported by gather_nd.", + TfLiteTypeGetName(params->type)); + return kTfLiteError; + } + if (status != kTfLiteOk) { + MicroPrintf("gather_nd index out of bounds"); + } + return status; +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* params = + tflite::micro::GetEvalInput(context, node, kParams); + const TfLiteEvalTensor* indices = + tflite::micro::GetEvalInput(context, node, kIndices); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + + switch (indices->type) { + case kTfLiteInt32: + return EvalGatherNd(context, params, indices, output); + break; + default: + MicroPrintf("Indices of type '%s' are not supported by gather_nd.", + TfLiteTypeGetName(indices->type)); + return kTfLiteError; + } +} +} // namespace + +TFLMRegistration Register_GATHER_ND() { + return tflite::micro::RegisterOp(nullptr, Prepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/gather_nd_test.cc b/tensorflow/lite/micro/kernels/gather_nd_test.cc new file mode 100644 index 0000000..39dd337 --- /dev/null +++ b/tensorflow/lite/micro/kernels/gather_nd_test.cc @@ -0,0 +1,330 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +template +void TestGatherNd(int* param_dims, const ParamType* param_data, int* index_dims, + const IndexType* index_data, int* output_dims, + ParamType* output_data, const ParamType* expected_output_data, + const TfLiteStatus expected_status = kTfLiteOk) { + TfLiteIntArray* pdims = IntArrayFromInts(param_dims); + TfLiteIntArray* idims = IntArrayFromInts(index_dims); + TfLiteIntArray* odims = IntArrayFromInts(output_dims); + + constexpr int inputs_size = 2; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(param_data, pdims), + CreateTensor(index_data, idims), + CreateTensor(output_data, odims), + }; + int inputs_array_data[] = {2, 0, 1}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 2}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = Register_GATHER_ND(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, /*builtin_data=*/nullptr); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(expected_status, runner.Invoke()); + + if (expected_status == kTfLiteOk) { + // The output tensor's data and shape have been updated by the kernel. + TfLiteTensor* actual_output_tensor = &tensors[2]; + TfLiteIntArray* actual_output_dims = actual_output_tensor->dims; + const int output_size = ElementCount(*actual_output_dims); + for (int i = 0; i < output_size; ++i) { + TF_LITE_MICRO_EXPECT_EQ(expected_output_data[i], output_data[i]); + } + } +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(GatherNd_ElementIndexingIntoMatrix) { + // For input_dims[], index_dims[], or output_dims[], element 0 is the + // number of dimensions in that array, not the actual dimension data. + int input_dims[] = {2, 2, 2}; + int index_dims[] = {2, 2, 2}; + const int32_t index_data[] = {0, 0, 1, 1}; + const float input_data[] = {1.1, 1.2, 2.1, 2.2}; + const float golden_data[] = {1.1, 2.2}; + float output_data[2]; + int output_dims[] = {1, 0}; + tflite::testing::TestGatherNd( + input_dims, input_data, index_dims, index_data, output_dims, output_data, + golden_data); +} + +TF_LITE_MICRO_TEST(GatherNd_SliceIndexingIntoMatrix) { + // For input_dims[], index_dims[], or output_dims[], element 0 is the + // number of dimensions in that array, not the actual dimension data. + int input_dims[] = {2, 2, 2}; + int index_dims[] = {2, 2, 1}; + const int32_t index_data[] = {1, 0}; + const float input_data[] = {1.1, 1.2, 2.1, 2.2}; + const float golden_data[] = {2.1, 2.2, 1.1, 1.2}; + float output_data[4]; + int output_dims[] = {2, 0, 0}; + tflite::testing::TestGatherNd( + input_dims, input_data, index_dims, index_data, output_dims, output_data, + golden_data); +} + +TF_LITE_MICRO_TEST(GatherNd_BatchedIndexingIntoMatrix1) { + // For input_dims[], index_dims[], or output_dims[], element 0 is the + // number of dimensions in that array, not the actual dimension data. + int input_dims[] = {2, 2, 2}; + int index_dims[] = {3, 2, 1, 1}; + const int32_t index_data[] = {1, 0}; + const float input_data[] = {1.1, 1.2, 2.1, 2.2}; + const float golden_data[] = {2.1, 2.2, 1.1, 1.2}; + float output_data[4]; + int output_dims[] = {3, 0, 0, 0}; + tflite::testing::TestGatherNd( + input_dims, input_data, index_dims, index_data, output_dims, output_data, + golden_data); +} + +TF_LITE_MICRO_TEST(GatherNd_BatchedIndexingIntoMatrix2) { + // For input_dims[], index_dims[], or output_dims[], element 0 is the + // number of dimensions in that array, not the actual dimension data. + int input_dims[] = {2, 2, 2}; + int index_dims[] = {3, 2, 1, 2}; + const int32_t index_data[] = {0, 0, 1, 1}; + const float input_data[] = {1.1, 1.2, 2.1, 2.2}; + const float golden_data[] = {1.1, 2.2}; + float output_data[2]; + int output_dims[] = {3, 0, 0, 0}; + tflite::testing::TestGatherNd( + input_dims, input_data, index_dims, index_data, output_dims, output_data, + golden_data); +} + +TF_LITE_MICRO_TEST(GatherNd_DuplicateIndexingIntoMatrix) { + // For input_dims[], index_dims[], or output_dims[], element 0 is the + // number of dimensions in that array, not the actual dimension data. + int input_dims[] = {2, 2, 2}; + int index_dims[] = {2, 2, 2}; + const int32_t index_data[] = {0, 0, 0, 0}; + const float input_data[] = {1.1, 1.2, 2.1, 2.2}; + const float golden_data[] = {1.1, 1.1}; + float output_data[2]; + int output_dims[] = {1, 0}; + tflite::testing::TestGatherNd( + input_dims, input_data, index_dims, index_data, output_dims, output_data, + golden_data); +} + +TF_LITE_MICRO_TEST(GatherNd_ElementIndexingIntoRank3Tensor) { + // For input_dims[], index_dims[], or output_dims[], element 0 is the + // number of dimensions in that array, not the actual dimension data. + int input_dims[] = {3, 3, 2, 3}; + int index_dims[] = {3, 1, 2, 3}; + const int32_t index_data[] = {0, 0, 1, 1, 1, 0}; + const float input_data[] = {1.1, -1.2, 1.3, -2.1, 2.2, 2.3, // + 3.1, 3.2, -3.3, -4.1, -4.2, 4.3, // + 5.1, -5.2, 5.3, 6.1, -6.2, 6.3}; + const float golden_data[] = {-1.2, -4.1}; + float output_data[2]; + int output_dims[] = {2, 0, 0}; + tflite::testing::TestGatherNd( + input_dims, input_data, index_dims, index_data, output_dims, output_data, + golden_data); +} + +TF_LITE_MICRO_TEST(GatherNd_SliceIndexingIntoRank3Tensor) { + // For input_dims[], index_dims[], or output_dims[], element 0 is the + // number of dimensions in that array, not the actual dimension data. + int input_dims[] = {3, 3, 2, 3}; + int index_dims[] = {2, 2, 1}; + const int32_t index_data[] = {0, 2}; + const float input_data[] = {1.1, -1.2, 1.3, -2.1, 2.2, 2.3, // + 3.1, 3.2, -3.3, -4.1, -4.2, 4.3, // + 5.1, -5.2, 5.3, 6.1, -6.2, 6.3}; + const float golden_data[] = {1.1, -1.2, 1.3, -2.1, 2.2, 2.3, + 5.1, -5.2, 5.3, 6.1, -6.2, 6.3}; + float output_data[12]; + int output_dims[] = {3, 0, 0, 0}; + tflite::testing::TestGatherNd( + input_dims, input_data, index_dims, index_data, output_dims, output_data, + golden_data); +} + +TF_LITE_MICRO_TEST(GatherNd_BatchedIndexingIntoRank3Tensor1) { + // For input_dims[], index_dims[], or output_dims[], element 0 is the + // number of dimensions in that array, not the actual dimension data. + int input_dims[] = {3, 3, 2, 3}; + int index_dims[] = {3, 2, 1, 3}; + const int32_t index_data[] = {0, 0, 1, 1, 1, 0}; + const float input_data[] = {1.1, -1.2, 1.3, -2.1, 2.2, 2.3, // + 3.1, 3.2, -3.3, -4.1, -4.2, 4.3, // + 5.1, -5.2, 5.3, 6.1, -6.2, 6.3}; + const float golden_data[] = {-1.2, -4.1}; + float output_data[2]; + int output_dims[] = {2, 0, 0}; + tflite::testing::TestGatherNd( + input_dims, input_data, index_dims, index_data, output_dims, output_data, + golden_data); +} + +TF_LITE_MICRO_TEST(GatherNd_BatchedIndexingIntoRank3Tensor2) { + // For input_dims[], index_dims[], or output_dims[], element 0 is the + // number of dimensions in that array, not the actual dimension data. + int input_dims[] = {3, 3, 2, 3}; + int index_dims[] = {3, 3, 1, 1}; + const int32_t index_data[] = {1, 2, 0}; + const float input_data[] = {1.1, -1.2, 1.3, -2.1, 2.2, 2.3, // + 3.1, 3.2, -3.3, -4.1, -4.2, 4.3, // + 5.1, -5.2, 5.3, 6.1, -6.2, 6.3}; + const float golden_data[] = {3.1, 3.2, -3.3, -4.1, -4.2, 4.3, + 5.1, -5.2, 5.3, 6.1, -6.2, 6.3, + 1.1, -1.2, 1.3, -2.1, 2.2, 2.3}; + float output_data[18]; + int output_dims[] = {4, 0, 0, 0, 0}; + tflite::testing::TestGatherNd( + input_dims, input_data, index_dims, index_data, output_dims, output_data, + golden_data); +} + +TF_LITE_MICRO_TEST(GatherNd_BatchedIndexingIntoRank3Tensor3) { + // For input_dims[], index_dims[], or output_dims[], element 0 is the + // number of dimensions in that array, not the actual dimension data. + int input_dims[] = {3, 3, 2, 3}; + int index_dims[] = {3, 2, 2, 2}; + const int32_t index_data[] = {0, 1, 1, 0, 0, 0, 2, 1}; + const float input_data[] = {1.1, -1.2, 1.3, -2.1, 2.2, 2.3, // + 3.1, 3.2, -3.3, -4.1, -4.2, 4.3, // + 5.1, -5.2, 5.3, 6.1, -6.2, 6.3}; + const float golden_data[] = {-2.1, 2.2, 2.3, 3.1, 3.2, -3.3, + 1.1, -1.2, 1.3, 6.1, -6.2, 6.3}; + float output_data[12]; + int output_dims[] = {3, 0, 0, 0}; + tflite::testing::TestGatherNd( + input_dims, input_data, index_dims, index_data, output_dims, output_data, + golden_data); +} + +TF_LITE_MICRO_TEST(GatherNd_BatchedIndexingIntoRank3Tensor4) { + // For input_dims[], index_dims[], or output_dims[], element 0 is the + // number of dimensions in that array, not the actual dimension data. + int input_dims[] = {3, 3, 2, 3}; + int index_dims[] = {3, 2, 2, 3}; + const int32_t index_data[] = {0, 0, 1, 1, 0, 1, 1, 1, 2, 2, 1, 2}; + const float input_data[] = {1.1, -1.2, 1.3, -2.1, 2.2, 2.3, // + 3.1, 3.2, -3.3, -4.1, -4.2, 4.3, // + 5.1, -5.2, 5.3, 6.1, -6.2, 6.3}; + const float golden_data[] = {-1.2, 3.2, 4.3, 6.3}; + float output_data[4]; + int output_dims[] = {2, 0, 0}; + tflite::testing::TestGatherNd( + input_dims, input_data, index_dims, index_data, output_dims, output_data, + golden_data); +} + +TF_LITE_MICRO_TEST(GatherNd_DuplicateIndexingIntoRank3Tensor) { + // For input_dims[], index_dims[], or output_dims[], element 0 is the + // number of dimensions in that array, not the actual dimension data. + int input_dims[] = {3, 3, 2, 3}; + int index_dims[] = {2, 2, 2}; + const int32_t index_data[] = {0, 1, 0, 1}; + const float input_data[] = {1.1, -1.2, 1.3, -2.1, 2.2, 2.3, // + 3.1, 3.2, -3.3, -4.1, -4.2, 4.3, // + 5.1, -5.2, 5.3, 6.1, -6.2, 6.3}; + const float golden_data[] = {-2.1, 2.2, 2.3, -2.1, 2.2, 2.3}; + float output_data[6]; + int output_dims[] = {2, 0, 0}; + tflite::testing::TestGatherNd( + input_dims, input_data, index_dims, index_data, output_dims, output_data, + golden_data); +} + +TF_LITE_MICRO_TEST(GatherNd_Float32Int32) { + // For input_dims[], index_dims[], or output_dims[], element 0 is the + // number of dimensions in that array, not the actual dimension data. + int input_dims[] = {3, 3, 2, 3}; + int index_dims[] = {2, 2, 2}; + const int32_t index_data[] = {0, 1, 1, 0}; + const float input_data[] = {1.1, -1.2, 1.3, -2.1, 2.2, 2.3, // + 3.1, 3.2, -3.3, -4.1, -4.2, 4.3, // + 5.1, -5.2, 5.3, 6.1, -6.2, 6.3}; + const float golden_data[] = {-2.1, 2.2, 2.3, 3.1, 3.2, -3.3}; + float output_data[6]; + int output_dims[] = {2, 0, 0}; + tflite::testing::TestGatherNd( + input_dims, input_data, index_dims, index_data, output_dims, output_data, + golden_data); +} + +TF_LITE_MICRO_TEST(GatherNd_Int8Int32) { + // For input_dims[], index_dims[], or output_dims[], element 0 is the + // number of dimensions in that array, not the actual dimension data. + int input_dims[] = {3, 3, 2, 3}; + int index_dims[] = {2, 2, 2}; + const int32_t index_data[] = {0, 1, 1, 0}; + const int8_t input_data[] = {1, -1, 1, -2, 2, 2, // + 3, 3, -3, -4, -4, 4, // + 5, -5, 5, 6, -6, 6}; + const int8_t golden_data[] = {-2, 2, 2, 3, 3, -3}; + int8_t output_data[6]; + int output_dims[] = {2, 0, 0}; + tflite::testing::TestGatherNd( + input_dims, input_data, index_dims, index_data, output_dims, output_data, + golden_data); +} + +TF_LITE_MICRO_TEST(GatherNd_ReadOOB) { + // For input_dims[], index_dims[], or output_dims[], element 0 is the + // number of dimensions in that array, not the actual dimension data. + int input_dims[] = {2, 2, 2}; + int index_dims[] = {2, 2, 2}; + const int32_t index_data[] = {0, 1, 2, 0}; + const int8_t input_data[] = {1, -1, 1, -2}; + int8_t output_data; + int output_dims[] = {1, 0, 0}; + tflite::testing::TestGatherNd( + input_dims, input_data, index_dims, index_data, output_dims, &output_data, + nullptr, kTfLiteError); +} + +TF_LITE_MICRO_TEST(GatherNd_ReadOOBNegative) { + // For input_dims[], index_dims[], or output_dims[], element 0 is the + // number of dimensions in that array, not the actual dimension data. + int input_dims[] = {2, 2, 2}; + int index_dims[] = {2, 2, 2}; + const int32_t index_data[] = {0, -1, 1, 0}; + const int8_t input_data[] = {1, -1, 1, -2}; + int8_t output_data; + int output_dims[] = {1, 0, 0}; + tflite::testing::TestGatherNd( + input_dims, input_data, index_dims, index_data, output_dims, &output_data, + nullptr, kTfLiteError); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/gather_test.cc b/tensorflow/lite/micro/kernels/gather_test.cc new file mode 100644 index 0000000..91c010b --- /dev/null +++ b/tensorflow/lite/micro/kernels/gather_test.cc @@ -0,0 +1,464 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +template +void TestGather(int* input_dims, const InType* input_data, int* positions_dims, + const PosType* positions_data, int* output_dims, + InType* output_data, const int* expected_output_dims, + const InType* expected_output_data, const int axis = 0, + const int batch_dims = 0) { + TfLiteIntArray* in_dims = IntArrayFromInts(input_dims); + TfLiteIntArray* pos_dims = IntArrayFromInts(positions_dims); + TfLiteIntArray* out_dims = IntArrayFromInts(output_dims); + TfLiteGatherParams params = {axis, batch_dims}; + + constexpr int inputs_size = 2; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(input_data, in_dims), + CreateTensor(positions_data, pos_dims), + CreateTensor(output_data, out_dims, true), + }; + int inputs_array_data[] = {2, 0, 1}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 2}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = Register_GATHER(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, ¶ms); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + // The output tensor's data and shape have been updated by the kernel. + TfLiteTensor* actual_output_tensor = &tensors[2]; + TfLiteIntArray* actual_output_dims = actual_output_tensor->dims; + const int actual_output_dims_size = actual_output_dims->size; + const int output_size = ElementCount(*actual_output_dims); + for (int i = 0; i < output_size; ++i) { + TF_LITE_MICRO_EXPECT_EQ(expected_output_data[i], output_data[i]); + } + + // Compare output tensor's shape if expected_output_dims[] is provided. + for (int i = 0; i < actual_output_dims_size; ++i) { + TF_LITE_MICRO_EXPECT_EQ(expected_output_dims[i], + actual_output_dims->data[i]); + } +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +// For all test functions below, dims[0] is the dimension count. +TF_LITE_MICRO_TEST(GatherOp_Shuffle) { + // For input_dims[], positions_dims[], or output_dims[], element 0 is the + // number of dimensions in that array, not the actual dimension data. + int input_dims[] = {2, 2, 2}; + int positions_dims[] = {1, 2}; + const int32_t positions_data[] = {1, 0}; + const float input_data[] = {-2.0, 0.2, 0.7, 0.8}; + const float golden_data[] = {0.7, 0.8, -2, 0.2}; + float output_data[4]; + + // The kernel under test will fill output_dims[1] onward, to be compared + // against golden_dims[0] onward. + const int golden_dims[] = {2, 2}; + int output_dims[] = {2, 0, 0}; + tflite::testing::TestGather( + input_dims, input_data, positions_dims, positions_data, output_dims, + output_data, golden_dims, golden_data); +} + +TF_LITE_MICRO_TEST(GatherOp_Test0DIndex) { + // For input_dims[], positions_dims[], or output_dims[], element 0 is the + // number of dimensions in that array, not the actual dimension data. + int input_dims[] = {2, 2, 2}; + int positions_dims[] = {0}; + const int32_t positions_data[] = {1}; + const float input_data[] = {-2.0, 0.2, 0.7, 0.8}; + const float golden_data[] = {0.7, 0.8}; + float output_data[2]; + + // The kernel under test will fill output_dims[1] onward, to be compared + // against golden_dims[0] onward. + const int golden_dims[] = {2}; + int output_dims[] = {1, 0}; + tflite::testing::TestGather( + input_dims, input_data, positions_dims, positions_data, output_dims, + output_data, golden_dims, golden_data); +} + +TF_LITE_MICRO_TEST(GatherOp_Test0DIndexWith0DResult) { + // 0D tensor is special case in current TFLite. Test it once to make sure + // existing workarounds are fine with it. + // For input_dims[], positions_dims[], or output_dims[], element 0 is the + // number of dimensions in that array, not the actual dimension data. + int input_dims[] = {1, 3}; + int positions_dims[] = {0}; + const int32_t positions_data[] = {1}; + const float input_data[] = {1.0, 2.0, 3.0}; + const float golden_data[] = {2.0}; + float output_data[1]; + + // The kernel under test will fill output_dims[1] onward, to be compared + // against golden_dims[0] onward. + const int golden_dims[] = {0}; + int output_dims[] = {1, 0}; + tflite::testing::TestGather( + input_dims, input_data, positions_dims, positions_data, output_dims, + output_data, golden_dims, golden_data); +} + +TF_LITE_MICRO_TEST(GatherOp_Test1DInput1DIndex) { + // For input_dims[], positions_dims[], or output_dims[], element 0 is the + // number of dimensions in that array, not the actual dimension data. + int input_dims[] = {1, 3}; + int positions_dims[] = {1, 1}; + const int32_t positions_data[] = {1}; + const float input_data[] = {1.0, 3.0, 5.0}; + const float golden_data[] = {3.0}; + float output_data[1]; + + // The kernel under test will fill output_dims[1] onward, to be compared + // against golden_dims[0] onward. + const int golden_dims[] = {1}; + int output_dims[] = {1, 0}; + tflite::testing::TestGather( + input_dims, input_data, positions_dims, positions_data, output_dims, + output_data, golden_dims, golden_data); +} + +TF_LITE_MICRO_TEST(GatherOp_Test2DIndexWith2DResult) { + // For input_dims[], positions_dims[], or output_dims[], element 0 is the + // number of dimensions in that array, not the actual dimension data. + int input_dims[] = {1, 3}; + int positions_dims[] = {2, 1, 2}; + const int32_t positions_data[] = {1, 0}; + const float input_data[] = {1.0, 2.0, 3.0}; + const float golden_data[] = {2.0, 1.0}; + float output_data[2]; + + // The kernel under test will fill output_dims[1] onward, to be compared + // against golden_dims[0] onward. + const int golden_dims[] = {1, 2}; + int output_dims[] = {2, 0, 0}; + tflite::testing::TestGather( + input_dims, input_data, positions_dims, positions_data, output_dims, + output_data, golden_dims, golden_data); +} + +TF_LITE_MICRO_TEST(GatherOp_Duplicate) { + // For input_dims[], positions_dims[], or output_dims[], element 0 is the + // number of dimensions in that array, not the actual dimension data. + int input_dims[] = {3, 1, 2, 2}; + int positions_dims[] = {1, 2}; + const int32_t positions_data[] = {0, 0}; + const float input_data[] = {-2.0, 0.2, 0.7, 0.8}; + const float golden_data[] = {-2, 0.2, 0.7, 0.8, -2, 0.2, 0.7, 0.8}; + float output_data[8]; + + // The kernel under test will fill output_dims[1] onward, to be compared + // against golden_dims[0] onward. + const int golden_dims[] = {2, 2, 2}; + int output_dims[] = {3, 0, 0, 0}; + tflite::testing::TestGather( + input_dims, input_data, positions_dims, positions_data, output_dims, + output_data, golden_dims, golden_data); +} + +TF_LITE_MICRO_TEST(GatherOp_Slice) { + // For input_dims[], positions_dims[], or output_dims[], element 0 is the + // number of dimensions in that array, not the actual dimension data. + int input_dims[] = {2, 4, 1}; + int positions_dims[] = {1, 2}; + const int32_t positions_data[] = {1, 3}; + const float input_data[] = {-2.0, 0.2, 0.7, 0.8}; + const float golden_data[] = {0.2, 0.8}; + float output_data[2]; + + // The kernel under test will fill output_dims[1] onward, to be compared + // against golden_dims[0] onward. + const int golden_dims[] = {2, 1}; + int output_dims[] = {2, 0, 0}; + tflite::testing::TestGather( + input_dims, input_data, positions_dims, positions_data, output_dims, + output_data, golden_dims, golden_data); +} + +TF_LITE_MICRO_TEST(GatherOp_Axis1) { + // For input_dims[], positions_dims[], or output_dims[], element 0 is the + // number of dimensions in that array, not the actual dimension data. + const int axis = 1; + int input_dims[] = {3, 1, 2, 3}; + int positions_dims[] = {1, 2}; + const int32_t positions_data[] = {1, 0}; + const float input_data[] = {1, 2, 3, 4, 5, 6}; + const float golden_data[] = {4, 5, 6, 1, 2, 3}; + float output_data[6]; + + // The kernel under test will fill output_dims[1] onward, to be compared + // against golden_dims[0] onward. + const int golden_dims[] = {1, 2, 3}; + int output_dims[] = {3, 0, 0, 0}; + tflite::testing::TestGather( + input_dims, input_data, positions_dims, positions_data, output_dims, + output_data, golden_dims, golden_data, axis); +} + +TF_LITE_MICRO_TEST(GatherOp_Axis1_0DIndex) { + // For input_dims[], positions_dims[], or output_dims[], element 0 is the + // number of dimensions in that array, not the actual dimension data. + const int axis = 1; + int input_dims[] = {3, 1, 3, 2}; + int positions_dims[] = {0}; + const int32_t positions_data[] = {1}; + const float input_data[] = {1, 2, 3, 4, 5, 6}; + const float golden_data[] = {3, 4}; + float output_data[2]; + + // The kernel under test will fill output_dims[1] onward, to be compared + // against golden_dims[0] onward. + const int golden_dims[] = {1, 2}; + int output_dims[] = {2, 0, 0}; + tflite::testing::TestGather( + input_dims, input_data, positions_dims, positions_data, output_dims, + output_data, golden_dims, golden_data, axis); +} + +TF_LITE_MICRO_TEST(GatherOp_Axis1Slice) { + // For input_dims[], positions_dims[], or output_dims[], element 0 is the + // number of dimensions in that array, not the actual dimension data. + const int axis = 1; + int input_dims[] = {3, 1, 4, 2}; + int positions_dims[] = {1, 2}; + const int32_t positions_data[] = {3, 1}; + const float input_data[] = {1, 2, 3, 4, 5, 6, 7, 8}; + const float golden_data[] = {7, 8, 3, 4}; + float output_data[4]; + + // The kernel under test will fill output_dims[1] onward, to be compared + // against golden_dims[0] onward. + const int golden_dims[] = {1, 2, 2}; + int output_dims[] = {3, 0, 0, 0}; + tflite::testing::TestGather( + input_dims, input_data, positions_dims, positions_data, output_dims, + output_data, golden_dims, golden_data, axis); +} + +TF_LITE_MICRO_TEST(GatherOp_LastAxis) { + // For input_dims[], positions_dims[], or output_dims[], element 0 is the + // number of dimensions in that array, not the actual dimension data. + const int axis = -1; + int input_dims[] = {3, 1, 2, 3}; + int positions_dims[] = {1, 2}; + const int32_t positions_data[] = {2, 0}; + const float input_data[] = {1, 2, 3, 4, 5, 6}; + const float golden_data[] = {3, 1, 6, 4}; + float output_data[4]; + + // The kernel under test will fill output_dims[1] onward, to be compared + // against golden_dims[0] onward. + const int golden_dims[] = {1, 2, 2}; + int output_dims[] = {3, 0, 0, 0}; + tflite::testing::TestGather( + input_dims, input_data, positions_dims, positions_data, output_dims, + output_data, golden_dims, golden_data, axis); +} + +TF_LITE_MICRO_TEST(GatherOp_LastAxis0DIndex) { + // For input_dims[], positions_dims[], or output_dims[], element 0 is the + // number of dimensions in that array, not the actual dimension data. + const int axis = -1; + int input_dims[] = {3, 1, 2, 3}; + int positions_dims[] = {0}; + const int32_t positions_data[] = {2}; + const float input_data[] = {1, 2, 3, 4, 5, 6}; + const float golden_data[] = {3, 6}; + float output_data[2]; + + // The kernel under test will fill output_dims[1] onward, to be compared + // against golden_dims[0] onward. + const int golden_dims[] = {1, 2}; + int output_dims[] = {2, 0, 0}; + tflite::testing::TestGather( + input_dims, input_data, positions_dims, positions_data, output_dims, + output_data, golden_dims, golden_data, axis); +} + +TF_LITE_MICRO_TEST(GatherOp_Float32Int32) { + // For input_dims[], positions_dims[], or output_dims[], element 0 is the + // number of dimensions in that array, not the actual dimension data. + int input_dims[] = {2, 2, 2}; + int positions_dims[] = {1, 2}; + const int32_t positions_data[] = {1, 0}; + const float input_data[] = {13.3, -13.4, -1.4, 1.5}; + const float golden_data[] = {-1.4, 1.5, 13.3, -13.4}; + float output_data[4]; + + // The kernel under test will fill output_dims[1] onward, to be compared + // against golden_dims[0] onward. + const int golden_dims[] = {2, 2}; + int output_dims[] = {2, 0, 0}; + tflite::testing::TestGather( + input_dims, input_data, positions_dims, positions_data, output_dims, + output_data, golden_dims, golden_data); +} + +TF_LITE_MICRO_TEST(GatherOp_Int8Int32) { + // For input_dims[], positions_dims[], or output_dims[], element 0 is the + // number of dimensions in that array, not the actual dimension data. + int input_dims[] = {2, 2, 2}; + int positions_dims[] = {1, 2}; + const int32_t positions_data[] = {1, 0}; + const int8_t input_data[] = {-13, -120, 14, 15}; + const int8_t golden_data[] = {14, 15, -13, -120}; + int8_t output_data[4]; + + // The kernel under test will fill output_dims[1] onward, to be compared + // against golden_dims[0] onward. + const int golden_dims[] = {2, 2}; + int output_dims[] = {2, 0, 0}; + tflite::testing::TestGather( + input_dims, input_data, positions_dims, positions_data, output_dims, + output_data, golden_dims, golden_data); +} + +TF_LITE_MICRO_TEST(GatherOp_BatchDims2) { + // For input_dims[], positions_dims[], or output_dims[], element 0 is the + // number of dimensions in that array, not the actual dimension data. + const int axis = 2; + const int batch_dims = 2; + int input_dims[] = {4, 2, 2, 3, 5}; + int positions_dims[] = {3, 2, 2, 2}; + const int32_t positions_data[] = {1, 0, 0, 1, 1, 0, 0, 1}; + const float input_data[] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, + 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, + 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, + 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, + 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59}; + const float golden_data[] = {5, 6, 7, 8, 9, 0, 1, 2, 3, 4, + 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, + 35, 36, 37, 38, 39, 30, 31, 32, 33, 34, + 45, 46, 47, 48, 49, 50, 51, 52, 53, 54}; + float output_data[40]; + + // The kernel under test will fill output_dims[1] onward, to be compared + // against golden_dims[0] onward. + const int golden_dims[] = {2, 2, 2, 5}; + int output_dims[] = {4, 0, 0, 0, 0}; + tflite::testing::TestGather( + input_dims, input_data, positions_dims, positions_data, output_dims, + output_data, golden_dims, golden_data, axis, batch_dims); +} + +TF_LITE_MICRO_TEST(GatherOp_BatchDims1) { + // For input_dims[], positions_dims[], or output_dims[], element 0 is the + // number of dimensions in that array, not the actual dimension data. + const int axis = 2; + const int batch_dims = 1; + int input_dims[] = {4, 2, 2, 3, 5}; + int positions_dims[] = {3, 2, 2, 2}; + const int32_t positions_data[] = {1, 0, 0, 1, 1, 0, 0, 1}; + const int8_t input_data[] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, + 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, + 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, + 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, + 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59}; + const int8_t golden_data[] = { + 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 0, 1, 2, 3, 4, 5, + 6, 7, 8, 9, 20, 21, 22, 23, 24, 15, 16, 17, 18, 19, 15, 16, + 17, 18, 19, 20, 21, 22, 23, 24, 35, 36, 37, 38, 39, 30, 31, 32, + 33, 34, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 50, 51, 52, 53, + 54, 45, 46, 47, 48, 49, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54}; + int8_t output_data[80]; + + // The kernel under test will fill output_dims[1] onward, to be compared + // against golden_dims[0] onward. + const int golden_dims[] = {2, 2, 2, 2, 5}; + int output_dims[] = {5, 0, 0, 0, 0, 0}; + tflite::testing::TestGather( + input_dims, input_data, positions_dims, positions_data, output_dims, + output_data, golden_dims, golden_data, axis, batch_dims); +} + +TF_LITE_MICRO_TEST(GatherOp_NegativeBatchDims) { + // For input_dims[], positions_dims[], or output_dims[], element 0 is the + // number of dimensions in that array, not the actual dimension data. + const int axis = 2; + const int batch_dims = -2; + int input_dims[] = {4, 2, 2, 3, 5}; + int positions_dims[] = {3, 2, 2, 2}; + const int32_t positions_data[] = {1, 0, 0, 1, 1, 0, 0, 1}; + const int8_t input_data[] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, + 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, + 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, + 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, + 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59}; + const int8_t golden_data[] = { + 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 0, 1, 2, 3, 4, 5, + 6, 7, 8, 9, 20, 21, 22, 23, 24, 15, 16, 17, 18, 19, 15, 16, + 17, 18, 19, 20, 21, 22, 23, 24, 35, 36, 37, 38, 39, 30, 31, 32, + 33, 34, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 50, 51, 52, 53, + 54, 45, 46, 47, 48, 49, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54}; + int8_t output_data[80]; + + // The kernel under test will fill output_dims[1] onward, to be compared + // against golden_dims[0] onward. + const int golden_dims[] = {2, 2, 2, 2, 5}; + int output_dims[] = {5, 0, 0, 0, 0, 0}; + tflite::testing::TestGather( + input_dims, input_data, positions_dims, positions_data, output_dims, + output_data, golden_dims, golden_data, axis, batch_dims); +} + +TF_LITE_MICRO_TEST(GatherOp_BatchDimsEqualIndexDims) { + // For input_dims[], positions_dims[], or output_dims[], element 0 is the + // number of dimensions in that array, not the actual dimension data. + const int axis = 3; + const int batch_dims = 3; + int input_dims[] = {4, 2, 2, 2, 5}; + int positions_dims[] = {3, 2, 2, 2}; + const int32_t positions_data[] = {1, 0, 0, 1, 1, 0, 0, 1}; + const int8_t input_data[] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, + 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, + 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, + 30, 31, 32, 33, 34, 35, 36, 37, 38, 39}; + const int8_t golden_data[] = {1, 5, 10, 16, 21, 25, 30, 36}; + int8_t output_data[8]; + + // The kernel under test will fill output_dims[1] onward, to be compared + // against golden_dims[0] onward. + const int golden_dims[] = {2, 2, 2}; + int output_dims[] = {3, 0, 0, 0}; + tflite::testing::TestGather( + input_dims, input_data, positions_dims, positions_data, output_dims, + output_data, golden_dims, golden_data, axis, batch_dims); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/hard_swish.cc b/tensorflow/lite/micro/kernels/hard_swish.cc new file mode 100644 index 0000000..f7f49ec --- /dev/null +++ b/tensorflow/lite/micro/kernels/hard_swish.cc @@ -0,0 +1,75 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/reference/hard_swish.h" + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/internal/types.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/hard_swish.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_utils.h" + +namespace tflite { +namespace { +void* HardSwishInit(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(HardSwishParams)); +} + +TfLiteStatus HardSwishEval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kHardSwishInputTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kHardSwishOutputTensor); + HardSwishParams* params = static_cast(node->user_data); + + switch (input->type) { + case kTfLiteFloat32: { + tflite::reference_ops::HardSwish( + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } break; + case kTfLiteInt8: { + tflite::reference_ops::HardSwish( + *params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } break; + default: { + MicroPrintf("Unsupported type %s", TfLiteTypeGetName(input->type)); + return kTfLiteError; + } + } + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_HARD_SWISH() { + return tflite::micro::RegisterOp(HardSwishInit, tflite::HardSwishPrepare, + HardSwishEval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/hard_swish.h b/tensorflow/lite/micro/kernels/hard_swish.h new file mode 100644 index 0000000..3ffe60d --- /dev/null +++ b/tensorflow/lite/micro/kernels/hard_swish.h @@ -0,0 +1,30 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_HARD_SWISH_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_HARD_SWISH_H_ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" + +namespace tflite { + +extern const int kHardSwishInputTensor; +extern const int kHardSwishOutputTensor; + +TfLiteStatus HardSwishPrepare(TfLiteContext* context, TfLiteNode* node); +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_HARD_SWISH_H_ diff --git a/tensorflow/lite/micro/kernels/hard_swish_common.cc b/tensorflow/lite/micro/kernels/hard_swish_common.cc new file mode 100644 index 0000000..8f84652 --- /dev/null +++ b/tensorflow/lite/micro/kernels/hard_swish_common.cc @@ -0,0 +1,86 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/hard_swish.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/internal/types.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/hard_swish.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_utils.h" + +namespace tflite { + +const int kHardSwishInputTensor = 0; +const int kHardSwishOutputTensor = 0; + +TfLiteStatus HardSwishPrepare(TfLiteContext* context, TfLiteNode* node) { + MicroContext* micro_context = GetMicroContext(context); + + TFLITE_DCHECK(node->user_data != nullptr); + TF_LITE_ENSURE_EQ(context, NumInputs(node), 1); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kHardSwishInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kHardSwishOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + + if (input->type == kTfLiteInt8) { + HardSwishParams* params = static_cast(node->user_data); + + params->input_zero_point = input->params.zero_point; + params->output_zero_point = output->params.zero_point; + + const float input_scale = input->params.scale; + const float hires_input_scale = (1.0f / 128.0f) * input_scale; + const float reluish_scale = 3.0f / 32768.0f; + const float output_scale = output->params.scale; + + const double output_multiplier = + static_cast(hires_input_scale / output_scale); + int32_t output_multiplier_fixedpoint_int32; + QuantizeMultiplier(output_multiplier, &output_multiplier_fixedpoint_int32, + ¶ms->output_multiplier_exponent); + DownScaleInt32ToInt16Multiplier( + output_multiplier_fixedpoint_int32, + ¶ms->output_multiplier_fixedpoint_int16); + + TF_LITE_ENSURE(context, params->output_multiplier_exponent <= 0); + + const double reluish_multiplier = + static_cast(hires_input_scale / reluish_scale); + int32_t reluish_multiplier_fixedpoint_int32; + QuantizeMultiplier(reluish_multiplier, &reluish_multiplier_fixedpoint_int32, + ¶ms->reluish_multiplier_exponent); + DownScaleInt32ToInt16Multiplier( + reluish_multiplier_fixedpoint_int32, + ¶ms->reluish_multiplier_fixedpoint_int16); + } + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(output); + + return kTfLiteOk; +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/hard_swish_test.cc b/tensorflow/lite/micro/kernels/hard_swish_test.cc new file mode 100644 index 0000000..2a33deb --- /dev/null +++ b/tensorflow/lite/micro/kernels/hard_swish_test.cc @@ -0,0 +1,293 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +void GenerateUniformRandomVector(int size, float min, float max, + std::minstd_rand* random_engine, + float* result) { + // Never use std::uniform_*_distribution in tests, it's + // implementation-defined. Likewise, don't use std::default_random_engine, + // implementation-defined. Implementation-defined is bad because it means that + // any toolchain update or new platform may run into test failures. + // std::minstd_rand is a standard instantiation of + // std::linear_congruential_engine, the cheapest generator in c++11 stdlib, + // it's good enough here. + for (int i = 0; i < size; i++) { + // We don't care whether the `max` value may ever be produced exactly. + // It may actually be thanks to rounding, as std::minstd_rand::modulus + // is 2^31 - 1 is greater than the inverse float epsilon. + float random_value_scaled_0_1 = + (*random_engine)() * + (1.0f / static_cast(std::minstd_rand::modulus)); + result[i] = min + (max - min) * random_value_scaled_0_1; + } +} + +void EvalTestReferenceHardSwish(int size, float* input, float* result) { + for (int i = 0; i < size; i++) { + const float in = input[i]; + result[i] = in * std::min(6.0f, std::max(0.0f, in + 3)) * (1.0f / 6.0f); + } +} + +template +void TestHardSwishQuantized(int size, const T* output_data, + T* input_data_quantized, float* dequantized_output, + float input_min, float input_max, float output_min, + float output_max, std::minstd_rand* random_engine, + float* float_input_values, + float* float_ref_output_values) { + int input_dims_data[] = {2, 1, size}; + int output_dims_data[] = {2, 1, size}; + const float input_scale = ScaleFromMinMax(input_min, input_max); + const int input_zero_point = ZeroPointFromMinMax(input_min, input_max); + const float output_scale = ScaleFromMinMax(output_min, output_max); + const int output_zero_point = ZeroPointFromMinMax(output_min, output_max); + + // The numerical error for any 8bit quantized function is at least one half + // times the quantization step: 0.5 * (kOutMax - kOutMin) / 256. + // To that we add again the quantization step (kOutMax - kOutMin) / 256 + // to allow for an off-by-one rounding error. + const float kTolerance = + std::max(input_max - input_min, output_max - output_min) * (1.5f / 256.f); + + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_elements_count = ElementCount(*output_dims); + + TF_LITE_MICRO_EXPECT_EQ(output_elements_count, size); + + GenerateUniformRandomVector(size, input_min, input_max, random_engine, + float_input_values); + EvalTestReferenceHardSwish(size, float_input_values, float_ref_output_values); + for (int i = 0; i < size; i++) { + float val = float_ref_output_values[i]; + float_ref_output_values[i] = + std::min(output_max, std::max(output_min, val)); + } + + constexpr int inputs_size = 1; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateQuantizedTensor(float_input_values, input_data_quantized, + input_dims, input_scale, input_zero_point), + CreateQuantizedTensor(output_data, output_dims, output_scale, + output_zero_point), + }; + + int inputs_array_data[] = {1, 0}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 1}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = tflite::Register_HARD_SWISH(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, /*builtin_data=*/nullptr); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + Dequantize(output_data, output_elements_count, output_scale, + output_zero_point, dequantized_output); + + for (int i = 0; i < output_elements_count; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(float_ref_output_values[i], dequantized_output[i], + kTolerance); + } +} + +template +void TestHardSwishQuantizedBias(const int size, const T* output_data, + T* input_data_quantized, + float* dequantized_output, float input_min, + float input_max, float output_min, + float output_max, float tolerated_bias, + float* float_input_values, + float* float_ref_output_values) { + const float input_scale = ScaleFromMinMax(input_min, input_max); + const float output_scale = ScaleFromMinMax(output_min, output_max); + + const int input_zero_point = ZeroPointFromMinMax(input_min, input_max); + const int output_zero_point = ZeroPointFromMinMax(output_min, output_max); + + const float max_scale = std::max(output_scale, input_scale); + + // In this bias-focused test case, no need for randomly generated input + // values. + TF_LITE_MICRO_EXPECT_LE(input_min, -3.0f); + TF_LITE_MICRO_EXPECT_GE(input_max, 3.0f); + const int quantized_input_negative_three = TfLiteRound( + std::numeric_limits::min() + (-3.0f - input_min) / input_scale); + const int quantized_input_positive_three = TfLiteRound( + std::numeric_limits::min() + (3.0f - input_min) / input_scale); + + for (int i = quantized_input_negative_three; + i < size && i <= quantized_input_positive_three; i++) { + float_input_values[i] = + input_min + (i - std::numeric_limits::min()) * input_scale; + } + + EvalTestReferenceHardSwish(size, float_input_values, float_ref_output_values); + for (int i = 0; i < size; i++) { + float val = float_ref_output_values[i]; + float_ref_output_values[i] = + std::min(output_max, std::max(output_min, val)); + } + + int input_dims_data[] = {2, 1, size}; + int output_dims_data[] = {2, 1, size}; + + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_elements_count = ElementCount(*output_dims); + + TF_LITE_MICRO_EXPECT_EQ(output_elements_count, size); + + constexpr int inputs_size = 1; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateQuantizedTensor(float_input_values, input_data_quantized, + input_dims, input_scale, input_zero_point), + CreateQuantizedTensor(output_data, output_dims, output_scale, + output_zero_point), + }; + + int inputs_array_data[] = {1, 0}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 1}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = tflite::Register_HARD_SWISH(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, /*builtin_data=*/nullptr); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + Dequantize(output_data, output_elements_count, output_scale, + output_zero_point, dequantized_output); + + float sum_diff = 0; + for (int i = 0; i < size; i++) { + sum_diff += dequantized_output[i] - float_ref_output_values[i]; + } + const float bias = sum_diff / (size * max_scale); + TF_LITE_MICRO_EXPECT_LE(std::abs(bias), tolerated_bias); +} + +void TestHardSwishFloat(const int size, float* output_data, + std::minstd_rand* random_engine, + float* float_input_values, + float* float_ref_output_values) { + const float kMin = -10.0f; + const float kMax = 10.0f; + GenerateUniformRandomVector(size, kMin, kMax, random_engine, + float_input_values); + + EvalTestReferenceHardSwish(size, float_input_values, float_ref_output_values); + + int input_dims_data[] = {1, size}; + int output_dims_data[] = {1, size}; + + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_elements_count = ElementCount(*output_dims); + + TF_LITE_MICRO_EXPECT_EQ(output_elements_count, size); + + constexpr int inputs_size = 1; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(float_input_values, input_dims), + CreateTensor(output_data, output_dims), + }; + + int inputs_array_data[] = {1, 0}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 1}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = tflite::Register_HARD_SWISH(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, /*builtin_data=*/nullptr); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + for (int i = 0; i < output_elements_count; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(float_ref_output_values[i], output_data[i], + 1e-5f); + } +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(SimpleHardSwishTestFloat) { + std::minstd_rand random_engine; + constexpr int size = 100; + float output_data[size] = {0.f}; + float input_values[size] = {0.f}; + float output_values[size] = {0.f}; + + tflite::testing::TestHardSwishFloat(size, output_data, &random_engine, + input_values, output_values); +} + +TF_LITE_MICRO_TEST(SimpleHardSwishTestInt8) { + std::minstd_rand random_engine; + constexpr int pairs = 4, one_pair = 2; + constexpr int size = 101; + constexpr float minmax_pairs[pairs][one_pair] = { + {0.f, 1.f}, {-2.f, 1.f}, {-5.f, 10.f}, {-40.f, 60.f}}; + int8_t output_data[size] = {0}; + int8_t input_data_quantized[size] = {0}; + float dequantized_output[size] = {0.f}; + float input_values[size] = {0.f}; + float output_values[size] = {0.f}; + + for (int x = 0; x < pairs; x++) { + for (int y = 0; y < pairs; y++) { + float input_min = minmax_pairs[x][0]; + float input_max = minmax_pairs[x][1]; + float output_min = minmax_pairs[y][0]; + float output_max = minmax_pairs[y][1]; + + tflite::testing::TestHardSwishQuantized( + size, output_data, input_data_quantized, dequantized_output, + input_min, input_max, output_min, output_max, &random_engine, + input_values, output_values); + } + } +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/if.cc b/tensorflow/lite/micro/kernels/if.cc new file mode 100644 index 0000000..92f58be --- /dev/null +++ b/tensorflow/lite/micro/kernels/if.cc @@ -0,0 +1,121 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/memory_helpers.h" +#include "tensorflow/lite/micro/micro_context.h" +#include "tensorflow/lite/micro/micro_graph.h" +#include "tensorflow/lite/schema/schema_generated.h" + +namespace tflite { + +namespace { + +struct OpData { + int then_subgraph_index; + int else_subgraph_index; +}; + +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(OpData)); +} + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + OpData* op_data = reinterpret_cast(node->user_data); + const auto* params = + reinterpret_cast(node->builtin_data); + op_data->then_subgraph_index = params->then_subgraph_index; + op_data->else_subgraph_index = params->else_subgraph_index; + + TF_LITE_ENSURE(context, node->inputs->size > 0); + + // The first input is the condition. + tflite::MicroContext* micro_context = tflite::GetMicroContext(context); + TfLiteTensor* cond = micro_context->AllocateTempInputTensor(node, 0); + + TF_LITE_ENSURE(context, cond != nullptr); + TF_LITE_ENSURE_EQ(context, cond->type, kTfLiteBool); + TF_LITE_ENSURE_EQ(context, NumElements(cond), 1); + + micro_context->DeallocateTempTfLiteTensor(cond); + + // The first input of the node is the condition. The rest of inputs are + // passed to the branch subgraphs. Therefore, the number of subgraph inputs + // will be the number of node inputs - 1. + size_t num_inputs = node->inputs->size - 1; + size_t num_outputs = node->outputs->size; + + MicroGraph& graph_info = micro_context->graph(); + + TF_LITE_ENSURE(context, + op_data->then_subgraph_index < graph_info.NumSubgraphs()); + TF_LITE_ENSURE(context, + op_data->else_subgraph_index < graph_info.NumSubgraphs()); + + TF_LITE_ENSURE_EQ(context, num_inputs, + graph_info.NumSubgraphInputs(op_data->then_subgraph_index)); + TF_LITE_ENSURE_EQ( + context, num_outputs, + graph_info.NumSubgraphOutputs(op_data->then_subgraph_index)); + + return kTfLiteOk; +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + const OpData* op_data = reinterpret_cast(node->user_data); + + tflite::MicroContext* micro_context = tflite::GetMicroContext(context); + TfLiteTensor* cond = micro_context->AllocateTempInputTensor(node, 0); + + TF_LITE_ENSURE(context, cond != nullptr); + bool cond_value = cond->data.b[0]; + micro_context->DeallocateTempTfLiteTensor(cond); + + MicroGraph* graph_info = µ_context->graph(); + // Currently we copy the input / output between the subgraphs. + int active_branch_subgraph_index = + cond_value ? op_data->then_subgraph_index : op_data->else_subgraph_index; + + TF_LITE_ENSURE_OK(context, + tflite::micro::CopyOpInputsToSubgraphInputs( + context, node, graph_info, active_branch_subgraph_index, + /*first_tensor_idx=*/1)); + + TF_LITE_ENSURE_OK(context, + graph_info->InvokeSubgraph(active_branch_subgraph_index)); + + TF_LITE_ENSURE_OK( + context, tflite::micro::CopySubgraphOutputsToOpOutputs( + context, node, graph_info, active_branch_subgraph_index)); + + return kTfLiteOk; +} + +} // namespace. + +TFLMRegistration Register_IF() { + return tflite::micro::RegisterOp(Init, Prepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/if_test.cc b/tensorflow/lite/micro/kernels/if_test.cc new file mode 100644 index 0000000..6e33941 --- /dev/null +++ b/tensorflow/lite/micro/kernels/if_test.cc @@ -0,0 +1,203 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/micro_arena_constants.h" +#include "tensorflow/lite/micro/micro_interpreter.h" +#include "tensorflow/lite/micro/mock_micro_graph.h" +#include "tensorflow/lite/micro/test_helper_custom_ops.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +void TestIf(int* input1_dims_data, const bool* input1_data, + int* input2_dims_data, float* input2_data, int* output_dims_data, + const float* expected_output_data, + const int subgraph1_invoke_count_golden, + const int subgraph2_invoke_count_golden, float* output_data) { + TfLiteIntArray* input1_dims = IntArrayFromInts(input1_dims_data); + TfLiteIntArray* input2_dims = IntArrayFromInts(input2_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_dims_count = ElementCount(*output_dims); + + constexpr int inputs_size = 2; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(input1_data, input1_dims), + CreateTensor(input2_data, input2_dims), + CreateTensor(output_data, output_dims), + }; + + int inputs_array_data[] = {2, 0, 1}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 2}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + TfLiteIfParams params; + params.then_subgraph_index = 1; + params.else_subgraph_index = 2; + + const TFLMRegistration registration = tflite::Register_IF(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, ¶ms); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + + TF_LITE_MICRO_EXPECT_TRUE(runner.ValidateTempBufferDeallocated()); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + TF_LITE_MICRO_EXPECT_EQ(output_dims_count, 2); + for (int i = 0; i < output_dims_count; ++i) { + TF_LITE_MICRO_EXPECT_EQ(expected_output_data[i], output_data[i]); + } + + TF_LITE_MICRO_EXPECT_EQ(subgraph1_invoke_count_golden, + runner.GetMockGraph()->get_invoke_count(1)); + TF_LITE_MICRO_EXPECT_EQ(subgraph2_invoke_count_golden, + runner.GetMockGraph()->get_invoke_count(2)); + + TF_LITE_MICRO_EXPECT_TRUE(runner.ValidateTempBufferDeallocated()); +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(IfShouldInvokeSubgraphWithMockModelConditionTrue) { + int shape[] = {2, 1, 2}; + int condition_shape[] = {1, 1}; + const bool condition[] = {true}; + float input[] = {5.0, 2.0}; + const float golden[] = {5.0, 2.0}; + float output_data[2] = {0}; + tflite::testing::TestIf(condition_shape, condition, shape, input, shape, + golden, 1, 0, output_data); +} + +TF_LITE_MICRO_TEST(IfShouldInvokeSubgraphWithMockModelConditionFalse) { + int shape[] = {2, 1, 2}; + int condition_shape[] = {1, 1}; + const bool condition[] = {false}; + float input[] = {5.0, 2.0}; + const float golden[] = {5.0, 2.0}; + float output_data[2] = {0}; + tflite::testing::TestIf(condition_shape, condition, shape, input, shape, + golden, 0, 1, output_data); +} + +TF_LITE_MICRO_TEST(IfShouldInvokeSubgraphConditionTrue) { + constexpr int kArenaSize = 5000; + uint8_t arena[kArenaSize]; + + const tflite::Model* model = + tflite::testing::GetSimpleModelWithSubgraphsAndIf(); + tflite::MicroMutableOpResolver<3> resolver; + resolver.AddIf(); + resolver.AddAdd(); + resolver.AddMul(); + tflite::MicroInterpreter interpreter(model, resolver, arena, kArenaSize); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, interpreter.AllocateTensors()); + TfLiteTensor* condition = interpreter.input(0); + TfLiteTensor* input1 = interpreter.input(1); + TfLiteTensor* input2 = interpreter.input(2); + TfLiteTensor* output = interpreter.output(0); + float input1_data[] = {2.0, 5.0}; + float input2_data[] = {3.0, 7.0}; + memcpy(input1->data.f, input1_data, 2 * sizeof(float)); + memcpy(input2->data.f, input2_data, 2 * sizeof(float)); + condition->data.b[0] = true; + + interpreter.Invoke(); + + TF_LITE_MICRO_EXPECT_EQ(output->data.f[0], 5.0f); + TF_LITE_MICRO_EXPECT_EQ(output->data.f[1], 12.0f); +} + +TF_LITE_MICRO_TEST(IfShouldInvokeSubgraphConditionFalse) { + constexpr int kArenaSize = 5000; + uint8_t arena[kArenaSize]; + + const tflite::Model* model = + tflite::testing::GetSimpleModelWithSubgraphsAndIf(); + tflite::MicroMutableOpResolver<3> resolver; + resolver.AddIf(); + resolver.AddAdd(); + resolver.AddMul(); + tflite::MicroInterpreter interpreter(model, resolver, arena, kArenaSize); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, interpreter.AllocateTensors()); + TfLiteTensor* condition = interpreter.input(0); + TfLiteTensor* input1 = interpreter.input(1); + TfLiteTensor* input2 = interpreter.input(2); + TfLiteTensor* output = interpreter.output(0); + float input1_data[] = {2.0, 5.0}; + float input2_data[] = {3.0, 7.0}; + memcpy(input1->data.f, input1_data, 2 * sizeof(float)); + memcpy(input2->data.f, input2_data, 2 * sizeof(float)); + condition->data.b[0] = false; + + interpreter.Invoke(); + + TF_LITE_MICRO_EXPECT_EQ(output->data.f[0], 6.0f); + TF_LITE_MICRO_EXPECT_EQ(output->data.f[1], 35.0f); +} + +TF_LITE_MICRO_TEST(IfShouldNotOverwriteTensorAcrossSubgraphs) { + constexpr int kArenaSize = 5000; + uint8_t arena[kArenaSize]; + + const tflite::Model* model = + tflite::testing::GetModelWithIfAndSubgraphInputTensorOverlap(); + + tflite::testing::TestingOpResolver op_resolver; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + tflite::testing::GetTestingOpResolver(op_resolver)); + + tflite::MicroInterpreter interpreter(model, op_resolver, arena, kArenaSize); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, interpreter.AllocateTensors()); + + TfLiteTensor* condition = interpreter.input(0); + TfLiteTensor* input1 = interpreter.input(1); + TfLiteTensor* input2 = interpreter.input(2); + TfLiteTensor* output = interpreter.output(0); + constexpr int32_t block_size = + tflite::MicroArenaBufferAlignment() / sizeof(int32_t); + int32_t input1_data[2 * block_size] = {1, 1, 1, 1, 2, 2, 2, 2}; + int32_t input2_data[4 * block_size] = {3, 3, 3, 3, 4, 4, 4, 4, + 5, 5, 5, 5, 6, 6, 6, 6}; + memcpy(input1->data.i32, input1_data, 2 * block_size * sizeof(int32_t)); + memcpy(input2->data.i32, input2_data, 4 * block_size * sizeof(int32_t)); + condition->data.b[0] = true; + + interpreter.Invoke(); + // Input1 and input2 are first concatenated, then cut to 3 blocks; + // the new tensor of size 3 is then concatenated with input2. + int32_t expect_output_data[8 * block_size] = {1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, + 3, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, + 5, 5, 6, 6, 6, 6, 0, 0, 0, 0}; + for (int i = 0; i < 8 * block_size; i++) { + TF_LITE_MICRO_EXPECT_EQ(output->data.i32[i], expect_output_data[i]); + } +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/kernel_runner.cc b/tensorflow/lite/micro/kernels/kernel_runner.cc new file mode 100644 index 0000000..d5112a1 --- /dev/null +++ b/tensorflow/lite/micro/kernels/kernel_runner.cc @@ -0,0 +1,134 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/kernels/kernel_runner.h" + +#include "tensorflow/lite/micro/arena_allocator/single_arena_buffer_allocator.h" +#include "tensorflow/lite/micro/micro_arena_constants.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/test_helpers.h" + +namespace tflite { +namespace micro { + +// TODO(b/161841696): Consider moving away from global arena buffers: +constexpr int KernelRunner::kKernelRunnerBufferSize_; +uint8_t KernelRunner::kKernelRunnerBuffer_[]; + +void ClearBufferApi(TfLiteContext* context_) { + context_->GetScratchBuffer = nullptr; + context_->GetExternalContext = nullptr; + context_->AllocatePersistentBuffer = nullptr; + context_->RequestScratchBufferInArena = nullptr; +} + +KernelRunner::KernelRunner(const TFLMRegistration& registration, + TfLiteTensor* tensors, int tensors_size, + TfLiteIntArray* inputs, TfLiteIntArray* outputs, + void* builtin_data, TfLiteIntArray* intermediates) + : registration_(registration), + allocator_(SingleArenaBufferAllocator::Create(kKernelRunnerBuffer_, + kKernelRunnerBufferSize_)), + mock_micro_graph_(allocator_), + fake_micro_context_(tensors, allocator_, &mock_micro_graph_) { + // Prepare TfLiteContext: + context_.impl_ = static_cast(&fake_micro_context_); + context_.ReportError = MicroContextReportOpError; + context_.recommended_num_threads = 1; + context_.GetTensor = MicroContextGetTensor; + context_.GetEvalTensor = MicroContextGetEvalTensor; + tflite::micro::ClearBufferApi(&context_); + context_.AllocatePersistentBuffer = MicroContextAllocatePersistentBuffer; + + context_.recommended_num_threads = 0; + + // Prepare TfLiteNode: + node_.inputs = inputs; + node_.outputs = outputs; + node_.builtin_data = builtin_data; + node_.intermediates = intermediates; +} + +bool KernelRunner::ValidateTempBufferDeallocated() { + return fake_micro_context_.IsAllTempTfLiteTensorDeallocated(); +} + +TfLiteStatus KernelRunner::InitAndPrepare(const char* init_data, + size_t length) { + if (registration_.init) { + tflite::micro::ClearBufferApi(&context_); + context_.AllocatePersistentBuffer = MicroContextAllocatePersistentBuffer; + node_.user_data = registration_.init(&context_, init_data, length); + } + + TF_LITE_ENSURE(&context_, ValidateTempBufferDeallocated()); + + if (registration_.prepare) { + tflite ::micro::ClearBufferApi(&context_); + context_.AllocatePersistentBuffer = MicroContextAllocatePersistentBuffer; + context_.RequestScratchBufferInArena = + MicroContextRequestScratchBufferInArena; + context_.GetExternalContext = MicroContextGetExternalContext; + TF_LITE_ENSURE_STATUS(registration_.prepare(&context_, &node_)); + } + + TF_LITE_ENSURE(&context_, ValidateTempBufferDeallocated()); + + return kTfLiteOk; +} + +TfLiteStatus KernelRunner::Invoke() { + tflite::micro::ClearBufferApi(&context_); + context_.GetScratchBuffer = MicroContextGetScratchBuffer; + + if (registration_.invoke == nullptr) { + MicroPrintf("TFLMRegistration missing invoke function pointer!"); + return kTfLiteError; + } + + TF_LITE_ENSURE_STATUS(registration_.invoke(&context_, &node_)); + + TF_LITE_ENSURE(&context_, ValidateTempBufferDeallocated()); + + return kTfLiteOk; +} + +TfLiteStatus KernelRunner::Reset() { + tflite::micro::ClearBufferApi(&context_); + context_.GetScratchBuffer = MicroContextGetScratchBuffer; + + if (registration_.reset == nullptr) { + MicroPrintf("TFLMRegistration missing reset function pointer!"); + return kTfLiteError; + } + + registration_.reset(&context_, node_.user_data); + return kTfLiteOk; +} + +TfLiteStatus KernelRunner::Free() { + tflite::micro::ClearBufferApi(&context_); + context_.GetScratchBuffer = MicroContextGetScratchBuffer; + + if (registration_.free == nullptr) { + MicroPrintf("TFLMRegistration missing free function pointer!"); + return kTfLiteError; + } + + registration_.free(&context_, node_.user_data); + return kTfLiteOk; +} +} // namespace micro +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/kernel_runner.h b/tensorflow/lite/micro/kernels/kernel_runner.h new file mode 100644 index 0000000..d617c44 --- /dev/null +++ b/tensorflow/lite/micro/kernels/kernel_runner.h @@ -0,0 +1,86 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_KERNEL_RUNNER_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_KERNEL_RUNNER_H_ + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/micro/arena_allocator/single_arena_buffer_allocator.h" +#include "tensorflow/lite/micro/fake_micro_context.h" +#include "tensorflow/lite/micro/mock_micro_graph.h" + +namespace tflite { +namespace micro { + +// Helper class to perform a simulated kernel (i.e. TFLMRegistration) +// lifecycle (init, prepare, invoke). All internal allocations are handled by +// this class. Simply pass in the registration, list of required tensors, inputs +// array, outputs array, and any pre-builtin data. Calling Invoke() will +// automatically walk the kernel and outputs will be ready on the TfLiteTensor +// output provided during construction. +class KernelRunner { + public: + KernelRunner(const TFLMRegistration& registration, TfLiteTensor* tensors, + int tensors_size, TfLiteIntArray* inputs, + TfLiteIntArray* outputs, void* builtin_data, + TfLiteIntArray* intermediates = nullptr); + + // Calls init and prepare on the kernel (i.e. TFLMRegistration) struct. + // Any exceptions will be DebugLog'd and returned as a status code. + TfLiteStatus InitAndPrepare(const char* init_data = nullptr, + size_t length = 0); + + // Calls invoke on a given TFLMRegistration pointer. After successful + // invoke, results will be available in the output tensor as passed into the + // constructor of this class. + TfLiteStatus Invoke(); + + // Calls Free on a given TFLMRegistration pointer(if it's implemented). + // After successful Free, kTfLiteOk status will be returned. If Free is not + // implemented for a given kernel kTfLiteError will be returned. + TfLiteStatus Free(); + + // Calls Reset on a given TFLMRegistration pointer(if it's implemented). + // After successful Reset, kTfLiteOk status will be returned. If Free is not + // implemented for a given kernel kTfLiteError will be returned. + TfLiteStatus Reset(); + + // Returns a pointer to the internal MockMicroGraph which KernelRunner uses + // to stub out MicroGraph methods and track invocations on each subgraph. + MockMicroGraph* GetMockGraph() { return &mock_micro_graph_; } + + // Returns true if all temp buffer in tests are deallocated. + // TODO(b/209453859): move this function to private after deallocation checks + // are enabled for all kernel tests. + bool ValidateTempBufferDeallocated(); + + private: + static constexpr int kKernelRunnerBufferSize_ = 10000; + static uint8_t kKernelRunnerBuffer_[kKernelRunnerBufferSize_]; + + TfLiteContext context_ = {}; + TfLiteNode node_ = {}; + const TFLMRegistration& registration_; + + SingleArenaBufferAllocator* allocator_; + MockMicroGraph mock_micro_graph_; + FakeMicroContext fake_micro_context_; +}; + +} // namespace micro +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_KERNEL_RUNNER_H_ diff --git a/tensorflow/lite/micro/kernels/kernel_util.cc b/tensorflow/lite/micro/kernels/kernel_util.cc new file mode 100644 index 0000000..ffffa08 --- /dev/null +++ b/tensorflow/lite/micro/kernels/kernel_util.cc @@ -0,0 +1,279 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/kernels/kernel_util.h" + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/portable_tensor_utils.h" +#include "tensorflow/lite/micro/memory_helpers.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { +namespace micro { + +namespace { + +int ValidateTensorIndexing(const TfLiteContext* context, int index, + int max_size, const int* tensor_indices) { + if (index >= 0 && index < max_size) { + const int tensor_index = tensor_indices[index]; + if (tensor_index != kTfLiteOptionalTensor) { + return tensor_index; + } + } + return -1; +} + +} // namespace + +TFLMRegistration RegisterOp( + void* (*init)(TfLiteContext* context, const char* buffer, size_t length), + TfLiteStatus (*prepare)(TfLiteContext* context, TfLiteNode* node), + TfLiteStatus (*invoke)(TfLiteContext* context, TfLiteNode* node), + void (*free)(TfLiteContext* context, void* buffer), + void (*reset)(TfLiteContext* context, void* buffer)) { + return {/*init=*/init, + /*free=*/free, + /*prepare=*/prepare, + /*invoke=*/invoke, + /*reset*/ reset, + /*builtin_code=*/0, + /*custom_name=*/nullptr}; +} + +// Returns a mutable tensor for a given input index. is_variable must be checked +// during prepare when the full TfLiteTensor is available. +TfLiteEvalTensor* GetMutableEvalInput(const TfLiteContext* context, + const TfLiteNode* node, int index) { + TFLITE_DCHECK(context != nullptr); + TFLITE_DCHECK(node != nullptr); + const int tensor_index = ValidateTensorIndexing( + context, index, node->inputs->size, node->inputs->data); + + if (tensor_index < 0) { + return nullptr; + } + + return context->GetEvalTensor(context, node->inputs->data[index]); +} + +// Returns the TfLiteEvalTensor struct for a given input index in a node. +const TfLiteEvalTensor* GetEvalInput(const TfLiteContext* context, + const TfLiteNode* node, int index) { + return GetMutableEvalInput(context, node, index); +} + +// Returns the TfLiteEvalTensor struct for a given output index in a node. +TfLiteEvalTensor* GetEvalOutput(const TfLiteContext* context, + const TfLiteNode* node, int index) { + TFLITE_DCHECK(context != nullptr); + TFLITE_DCHECK(node != nullptr); + return context->GetEvalTensor(context, node->outputs->data[index]); +} + +bool HaveSameShapes(const TfLiteEvalTensor* input1, + const TfLiteEvalTensor* input2) { + TFLITE_DCHECK(input1 != nullptr); + TFLITE_DCHECK(input2 != nullptr); + return TfLiteIntArrayEqual(input1->dims, input2->dims); +} + +const RuntimeShape GetTensorShape(const TfLiteEvalTensor* tensor) { + if (tensor == nullptr || tensor->dims == nullptr) { + return RuntimeShape(); + } + TfLiteIntArray* dims = tensor->dims; + const int dims_size = dims->size; + const int32_t* dims_data = reinterpret_cast(dims->data); + return RuntimeShape(dims_size, dims_data); +} + +PaddingType RuntimePaddingType(TfLitePadding padding) { + switch (padding) { + case TfLitePadding::kTfLitePaddingSame: + return PaddingType::kSame; + case TfLitePadding::kTfLitePaddingValid: + return PaddingType::kValid; + case TfLitePadding::kTfLitePaddingUnknown: + default: + return PaddingType::kNone; + } +} + +// Relocate tensor dims from FlatBuffer to the persistent storage arena. +// The old dims data is copied to the new storage area. +// The tensor and eval_tensor must be the same tensor. +// Only use during Prepare phase. +TfLiteStatus CreateWritableTensorDimsWithCopy(TfLiteContext* context, + TfLiteTensor* tensor, + TfLiteEvalTensor* eval_tensor) { + TF_LITE_ENSURE(context, tensor != nullptr); + TF_LITE_ENSURE(context, eval_tensor != nullptr); + TF_LITE_ENSURE(context, context->AllocatePersistentBuffer != nullptr); + int ranks = tensor->dims->size; + size_t alloc_size = TfLiteIntArrayGetSizeInBytes(ranks); + TfLiteIntArray* new_dims = static_cast( + context->AllocatePersistentBuffer(context, alloc_size)); + TfLiteIntArray* old_dims = tensor->dims; + new_dims->size = ranks; + tensor->dims = new_dims; + eval_tensor->dims = new_dims; + for (int i = 0; i < ranks; i++) { + new_dims->data[i] = old_dims->data[i]; + } + + return kTfLiteOk; +} + +// Verify that both tensors have the same type and size, then return the size +// of both tensors in bytes if they are the same, or -1 if they are different. +size_t ValidateAndGetTensorSizes(const TfLiteEvalTensor* tensor1, + const TfLiteEvalTensor* tensor2) { + TFLITE_DCHECK(tensor1->type == tensor2->type); + size_t tensor1_size = 0; + size_t tensor2_size = 0; + TfLiteEvalTensorByteLength(tensor1, &tensor1_size); + TfLiteEvalTensorByteLength(tensor2, &tensor2_size); + return (tensor1_size == tensor2_size) ? tensor1_size : -1; +} + +TfLiteStatus CopyOpInputsToOpOutputs(TfLiteContext* context, TfLiteNode* node) { + TF_LITE_ENSURE(context, node->inputs->size == node->outputs->size); + for (int i = 0; i < node->inputs->size; i++) { + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, i); + TfLiteEvalTensor* output = tflite::micro::GetEvalOutput(context, node, i); + int bytes = ValidateAndGetTensorSizes(input, output); + TF_LITE_ENSURE(context, bytes >= 0); + memcpy(output->data.raw, input->data.raw, bytes); + } + return kTfLiteOk; +} + +// Args: +// 1. int8_t tensor_data - int8_t buffer of unknown size who's data you'd +// like +// to print +// 2. int n_btyes - a small int representing number of bytes you want to +// print +// to debug output. It should always be <= tensor_data's size. +// 3. prefix - optional message you'd like to print before printing bytes +// +// Purpose: +// Function takes in parameters above and prints n_bytes bytes from the +// tensor_data buffer. This can be use to debug the output of a model and it's +// op. + +void PrintNBytes(const int8_t* tensor_data, int n_bytes, const char* prefix) { + if (prefix != nullptr) { + MicroPrintf("%s", prefix); + } + + for (int i = 0; i < n_bytes; ++i) { + MicroPrintf(" %x", tensor_data[i]); + } + MicroPrintf("\n"); +} + +// same as the PrintNBytes above but the buffer needs to be extracted out of the +// TfLiteEvalTensor* +void PrintNBytes(const TfLiteEvalTensor* tensor, int n_bytes, + const char* prefix) { + const int8_t* tensor_data = tflite::micro::GetTensorData(tensor); + PrintNBytes(tensor_data, n_bytes, prefix); +} + +// same as the PrintNBytes above but the buffer needs to be extracted out of the +// TfLiteEvalTensor* +void PrintNBytes(const TfLiteTensor* tensor, int n_bytes, const char* prefix) { + const int8_t* tensor_data = tflite::GetTensorData(tensor); + PrintNBytes(tensor_data, n_bytes, prefix); +} + +TfLiteStatus CopyOpInputsToSubgraphInputs(TfLiteContext* context, + TfLiteNode* node, + MicroGraph* graph_info, + int subgraph_idx, + int first_tensor_idx) { + TF_LITE_ENSURE(context, + static_cast(node->inputs->size - first_tensor_idx) == + graph_info->NumSubgraphInputs(subgraph_idx)); + for (int i = 0; i < node->inputs->size - first_tensor_idx; i++) { + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, i + first_tensor_idx); + TfLiteEvalTensor* subgraph_input = + graph_info->GetSubgraphInput(subgraph_idx, i); + int bytes = ValidateAndGetTensorSizes(input, subgraph_input); + TF_LITE_ENSURE(context, bytes >= 0); + memcpy(subgraph_input->data.raw, input->data.raw, bytes); + } + return kTfLiteOk; +} + +TfLiteStatus CopyOpOutputsToSubgraphInputs(TfLiteContext* context, + TfLiteNode* node, + MicroGraph* graph_info, + int subgraph_idx) { + TF_LITE_ENSURE(context, static_cast(node->outputs->size) == + graph_info->NumSubgraphInputs(subgraph_idx)); + for (int i = 0; i < node->outputs->size; i++) { + TfLiteEvalTensor* output = tflite::micro::GetEvalOutput(context, node, i); + TfLiteEvalTensor* subgraph_input = + graph_info->GetSubgraphInput(subgraph_idx, i); + int bytes = ValidateAndGetTensorSizes(output, subgraph_input); + TF_LITE_ENSURE(context, bytes >= 0); + memcpy(subgraph_input->data.raw, output->data.raw, bytes); + } + return kTfLiteOk; +} + +TfLiteStatus CopySubgraphOutputsToOpOutputs(TfLiteContext* context, + TfLiteNode* node, + MicroGraph* graph_info, + int subgraph_idx) { + TF_LITE_ENSURE(context, static_cast(node->outputs->size) == + graph_info->NumSubgraphOutputs(subgraph_idx)); + for (int i = 0; i < node->outputs->size; i++) { + TfLiteEvalTensor* output = tflite::micro::GetEvalOutput(context, node, i); + TfLiteEvalTensor* subgraph_output = + graph_info->GetSubgraphOutput(subgraph_idx, i); + int bytes = ValidateAndGetTensorSizes(output, subgraph_output); + TF_LITE_ENSURE(context, bytes >= 0); + memcpy(output->data.raw, subgraph_output->data.raw, bytes); + } + return kTfLiteOk; +} + +TfLiteEvalTensor MakeUnpackedInt4Tensor(TfLiteContext* context, + int scratch_buffer_index, + const TfLiteEvalTensor* tensor) { + if (tensor->type != kTfLiteInt4) { + return *tensor; + } + + TfLiteEvalTensor new_tensor; + new_tensor.data.data = static_cast( + context->GetScratchBuffer(context, scratch_buffer_index)); + new_tensor.dims = tensor->dims; + new_tensor.type = kTfLiteInt8; + tflite::tensor_utils::UnpackDenseInt4IntoInt8( + tflite::micro::GetTensorData(tensor), + tflite::micro::GetTensorShape(tensor).FlatSize(), + tflite::micro::GetTensorData(&new_tensor)); + return new_tensor; +} + +} // namespace micro +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/kernel_util.h b/tensorflow/lite/micro/kernels/kernel_util.h new file mode 100644 index 0000000..080a0b3 --- /dev/null +++ b/tensorflow/lite/micro/kernels/kernel_util.h @@ -0,0 +1,146 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_KERNEL_UTIL_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_KERNEL_UTIL_H_ + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/internal/types.h" +#include "tensorflow/lite/micro/micro_context.h" + +namespace tflite { +namespace micro { + +TFLMRegistration RegisterOp( + void* (*init)(TfLiteContext* context, const char* buffer, size_t length), + TfLiteStatus (*prepare)(TfLiteContext* context, TfLiteNode* node), + TfLiteStatus (*invoke)(TfLiteContext* context, TfLiteNode* node), + void (*free)(TfLiteContext* context, void* buffer) = nullptr, + void (*reset)(TfLiteContext* context, void* buffer) = nullptr); + +// Prints out n bytes in a int8_t buffer as hex +void PrintNBytes(const int8_t* tensor_data, int n_bytes, + const char* prefix = nullptr); + +// Prints out the n bytes in a TfLiteEvalTensor as hex +void PrintNBytes(const TfLiteEvalTensor* tensor, int n_bytes, + const char* prefix = nullptr); + +// Prints out n bytes in a TfLiteTensor as hex +void PrintNBytes(const TfLiteTensor* tensor, int n_bytes, + const char* prefix = nullptr); + +// Returns a mutable tensor for a given input index. is_variable must be checked +// during prepare when the full TfLiteTensor is available. +TfLiteEvalTensor* GetMutableEvalInput(const TfLiteContext* context, + const TfLiteNode* node, int index); + +// Returns the TfLiteEvalTensor struct for a given input index in a node. +const TfLiteEvalTensor* GetEvalInput(const TfLiteContext* context, + const TfLiteNode* node, int index); + +// Returns the TfLiteEvalTensor struct for a given output index in a node. +TfLiteEvalTensor* GetEvalOutput(const TfLiteContext* context, + const TfLiteNode* node, int index); + +// Returns data for a TfLiteEvalTensor struct that are expected to exist. +template +T* GetTensorData(TfLiteEvalTensor* tensor) { + TFLITE_DCHECK(tensor != nullptr); + return reinterpret_cast(tensor->data.raw); +} + +// Returns const data for a TfLiteEvalTensor struct that are expected to exist. +template +const T* GetTensorData(const TfLiteEvalTensor* tensor) { + TFLITE_DCHECK(tensor != nullptr); + return reinterpret_cast(tensor->data.raw); +} + +// Returns data for a TfLiteEvalTensor struct that could be null. +template +T* GetOptionalTensorData(TfLiteEvalTensor* tensor) { + return tensor == nullptr ? nullptr : reinterpret_cast(tensor->data.raw); +} + +// Returns const data for a TfLiteEvalTensor struct that could be null. +template +const T* GetOptionalTensorData(const TfLiteEvalTensor* tensor) { + return tensor == nullptr ? nullptr + : reinterpret_cast(tensor->data.raw); +} + +// Returns the shape of a TfLiteEvalTensor struct. +const RuntimeShape GetTensorShape(const TfLiteEvalTensor* tensor); + +// Return true if the given tensors have the same shape. +bool HaveSameShapes(const TfLiteEvalTensor* input1, + const TfLiteEvalTensor* input2); + +PaddingType RuntimePaddingType(TfLitePadding padding); + +// Relocate tensor dims from FlatBuffer to the persistent storage arena. +// The old dims data is copied to the new storage area. +// The tensor and eval_tensor must be the same tensor. +// Only use during Prepare phase. +TfLiteStatus CreateWritableTensorDimsWithCopy(TfLiteContext* context, + TfLiteTensor* tensor, + TfLiteEvalTensor* eval_tensor); + +// Copy all op input tensors to op output tensors. Requires all op input tensor +// shapes and types to be identical to op output tensor shapes and types. +TfLiteStatus CopyOpInputsToOpOutputs(TfLiteContext* context, TfLiteNode* node); + +// Copy all op input tensors to subgraph input tensors. Requires all op input +// tensor shapes and types to be identical to subgraph input tensor shapes and +// types. +TfLiteStatus CopyOpInputsToSubgraphInputs(TfLiteContext* context, + TfLiteNode* node, + MicroGraph* graph_info, + int subgraph_idx, + int first_tensor_idx); + +// Copy all op output tensors to subgraph input tensors. Requires all op output +// tensor shapes and types to be identical to subgraph input tensor shapes and +// types. +TfLiteStatus CopyOpOutputsToSubgraphInputs(TfLiteContext* context, + TfLiteNode* node, + MicroGraph* graph_info, + int subgraph_idx); + +// Copy all subgraph output tensors to op outputs. Requires all subgraph output +// tensor shapes and types to be identical to op output tensor shapes and types. +TfLiteStatus CopySubgraphOutputsToOpOutputs(TfLiteContext* context, + TfLiteNode* node, + MicroGraph* graph_info, + int subgraph_idx); + +// If tensor is INT4, make a new TfLiteEvalTensor with data unpacked into +// a scratch buffer. The returned tensor will have the kTfLiteInt8 type. +// Assume scratch buffer is previously requested in Prepare, and +// scratch_buffer_index can be used to retrieve that buffer. +// If the tensor is not INT4, a shallow copy is returned. +TfLiteEvalTensor MakeUnpackedInt4Tensor(TfLiteContext* context, + int scratch_buffer_index, + const TfLiteEvalTensor* tensor); +} // namespace micro +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_KERNEL_UTIL_H_ diff --git a/tensorflow/lite/micro/kernels/l2_pool_2d.cc b/tensorflow/lite/micro/kernels/l2_pool_2d.cc new file mode 100644 index 0000000..c5eb70f --- /dev/null +++ b/tensorflow/lite/micro/kernels/l2_pool_2d.cc @@ -0,0 +1,142 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include +#include + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/reference/pooling.h" +#include "tensorflow/lite/kernels/internal/types.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/padding.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { +namespace { + +// Input/output tensor index. +constexpr int kInputTensor = 0; +constexpr int kOutputTensor = 0; + +// required rank for input/output tensor shape +constexpr int kTensorShapeRank = 4; + +// input/output tensor shape rank associations +enum { kBatchRank = 0, kHeightRank, kWidthRank, kChannelRank }; + +TfLiteStatus L2Prepare(TfLiteContext* context, TfLiteNode* node) { + MicroContext* micro_context = GetMicroContext(context); + + auto* params = static_cast(node->builtin_data); + + TF_LITE_ENSURE_EQ(context, NumInputs(node), 1); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TF_LITE_ENSURE_EQ(context, NumDimensions(input), kTensorShapeRank); + TF_LITE_ENSURE_EQ(context, NumDimensions(output), kTensorShapeRank); + TF_LITE_ENSURE_TYPES_EQ(context, input->type, output->type); + + int batches = SizeOfDimension(input, kBatchRank); + int height = SizeOfDimension(input, kHeightRank); + int width = SizeOfDimension(input, kWidthRank); + int channels_out = SizeOfDimension(input, kChannelRank); + + // Matching GetWindowedOutputSize in TensorFlow. + auto padding = params->padding; + int out_width, out_height; + + params->computed.padding = ComputePaddingHeightWidth( + params->stride_height, params->stride_width, 1, 1, height, width, + params->filter_height, params->filter_width, padding, &out_height, + &out_width); + + // We currently don't have a quantized implementation of L2Pool + TF_LITE_ENSURE_TYPES_EQ(context, input->type, kTfLiteFloat32); + + // We must update the output tensor dimensions. + // The dims storage is expected to be the same area in memory + // for both TfLiteTensor and TfLiteEvalTensor. This is important + // because TfLiteTensor in the MicroInterpreter is a temporary + // allocation. For the KernelRunner interpreter, TfLiteEvalTensor + // is a temporary allocation. We must therefore relocate the dims + // from the FlatBuffer to the persistent storage arena. + TfLiteEvalTensor* output_eval = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + TF_LITE_ENSURE_OK(context, tflite::micro::CreateWritableTensorDimsWithCopy( + context, output, output_eval)); + output->dims->data[kBatchRank] = batches; + output->dims->data[kHeightRank] = out_height; + output->dims->data[kWidthRank] = out_width; + output->dims->data[kChannelRank] = channels_out; + + micro_context->DeallocateTempTfLiteTensor(output); + micro_context->DeallocateTempTfLiteTensor(input); + + return kTfLiteOk; +} + +void L2EvalFloat(const TfLitePoolParams& params, const TfLiteEvalTensor& input, + tflite::PoolParams* op_params, TfLiteEvalTensor* output) { + float activation_min, activation_max; + CalculateActivationRange(params.activation, &activation_min, &activation_max); + + op_params->float_activation_min = activation_min; + op_params->float_activation_max = activation_max; + reference_ops::L2Pool(*op_params, tflite::micro::GetTensorShape(&input), + tflite::micro::GetTensorData(&input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); +} + +TfLiteStatus L2Eval(TfLiteContext* context, TfLiteNode* node) { + auto* params = static_cast(node->builtin_data); + + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + + tflite::PoolParams op_params; + op_params.stride_height = params->stride_height; + op_params.stride_width = params->stride_width; + op_params.filter_height = params->filter_height; + op_params.filter_width = params->filter_width; + op_params.padding_values.height = params->computed.padding.height; + op_params.padding_values.width = params->computed.padding.width; + + switch (input->type) { // Already know in/out types are same. + case kTfLiteFloat32: + L2EvalFloat(*params, *input, &op_params, output); + break; + default: + MicroPrintf("L2_POOL_2D only supports float32 currently, got %s.", + TfLiteTypeGetName(input->type)); + return kTfLiteError; + } + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_L2_POOL_2D() { + return tflite::micro::RegisterOp(nullptr, L2Prepare, L2Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/l2_pool_2d_test.cc b/tensorflow/lite/micro/kernels/l2_pool_2d_test.cc new file mode 100644 index 0000000..a8c20b2 --- /dev/null +++ b/tensorflow/lite/micro/kernels/l2_pool_2d_test.cc @@ -0,0 +1,222 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +constexpr float kTolerance = 1e-5; + +constexpr int kOutputDimsCount = 4; + +struct L2Pool2DTestParams { + TfLitePadding padding = kTfLitePaddingValid; + int stride_width = 2; + int stride_height = 2; + int filter_width = 2; + int filter_height = 2; + TfLiteFusedActivation activation = kTfLiteActNone; + float compare_tolerance = kTolerance; + // output_dims_data is a TfLiteIntArray + int output_dims_data[kOutputDimsCount + 1] = {kOutputDimsCount, 0, 0, 0, 0}; +}; + +void ExecuteL2Pool2DTest(const L2Pool2DTestParams& params, + TfLiteTensor* tensors, int tensors_count) { + int kInputArrayData[] = {1, 0}; + TfLiteIntArray* inputs_array = IntArrayFromInts(kInputArrayData); + int kOutputArrayData[] = {1, 1}; + TfLiteIntArray* outputs_array = IntArrayFromInts(kOutputArrayData); + + TfLitePoolParams op_params = {}; + op_params.activation = params.activation; + op_params.filter_height = params.filter_height; + op_params.filter_width = params.filter_width; + op_params.padding = params.padding; + op_params.stride_height = params.stride_height; + op_params.stride_width = params.stride_width; + + const TFLMRegistration registration = tflite::Register_L2_POOL_2D(); + micro::KernelRunner runner(registration, tensors, tensors_count, inputs_array, + outputs_array, static_cast(&op_params)); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); +} + +template +void TestL2Pool2D(L2Pool2DTestParams& params, int* input_dims_data, + const T* input_data, int* expected_dims_data, + const T* expected_data, T* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* expected_dims = IntArrayFromInts(expected_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(params.output_dims_data); + const int expected_count = ElementCount(*expected_dims); + + TfLiteTensor tensors[] = { + CreateTensor(input_data, input_dims), + CreateTensor(output_data, output_dims), + }; + constexpr int tensors_count = std::extent::value; + ExecuteL2Pool2DTest(params, tensors, tensors_count); + + for (int i = 0; i < expected_count; i++) { + TF_LITE_MICRO_EXPECT_NEAR(expected_data[i], output_data[i], + params.compare_tolerance); + } + for (int i = 0; i < expected_dims->size; i++) { + // output dims will have been relocated during prepare phase, + // so use the tensor dims pointer. + TF_LITE_MICRO_EXPECT_EQ(expected_dims->data[i], tensors[1].dims->data[i]); + } +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(FloatPoolingOpTestL2Pool) { + int kInputDims[] = {4, 1, 2, 4, 1}; + constexpr float kInput[] = { + 0, 6, 2, 4, // + 3, 2, 10, 7, // + }; + int kExpectDims[] = {4, 1, 1, 2, 1}; + constexpr float kExpect[] = {3.5, 6.5}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + tflite::testing::L2Pool2DTestParams params; + + tflite::testing::TestL2Pool2D(params, kInputDims, kInput, kExpectDims, + kExpect, output_data); +} + +TF_LITE_MICRO_TEST(FloatPoolingOpTestL2PoolActivationRelu) { + int kInputDims[] = {4, 1, 2, 4, 1}; + constexpr float kInput[] = { + -1, -6, 2, 4, // + -3, -2, 10, 7, // + }; + int kExpectDims[] = {4, 1, 1, 2, 1}; + constexpr float kExpect[] = {3.53553, 6.5}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + tflite::testing::L2Pool2DTestParams params; + params.activation = kTfLiteActRelu; + + tflite::testing::TestL2Pool2D(params, kInputDims, kInput, kExpectDims, + kExpect, output_data); +} + +TF_LITE_MICRO_TEST(FloatPoolingOpTestL2PoolActivationRelu1) { + int kInputDims[] = {4, 1, 2, 4, 1}; + constexpr float kInput[] = { + -0.1, -0.6, 2, 4, // + -0.3, -0.2, 10, 7, // + }; + int kExpectDims[] = {4, 1, 1, 2, 1}; + constexpr float kExpect[] = {0.353553, 1.0}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + tflite::testing::L2Pool2DTestParams params; + params.activation = kTfLiteActReluN1To1; + + tflite::testing::TestL2Pool2D(params, kInputDims, kInput, kExpectDims, + kExpect, output_data); +} + +TF_LITE_MICRO_TEST(FloatPoolingOpTestL2PoolActivationRelu6) { + int kInputDims[] = {4, 1, 2, 4, 1}; + constexpr float kInput[] = { + -0.1, -0.6, 2, 4, // + -0.3, -0.2, 10, 7, // + }; + int kExpectDims[] = {4, 1, 1, 2, 1}; + constexpr float kExpect[] = {0.353553, 6.0}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + tflite::testing::L2Pool2DTestParams params; + params.activation = kTfLiteActRelu6; + + tflite::testing::TestL2Pool2D(params, kInputDims, kInput, kExpectDims, + kExpect, output_data); +} + +TF_LITE_MICRO_TEST(FloatPoolingOpTestL2PoolPaddingSame) { + int kInputDims[] = {4, 1, 2, 4, 1}; + constexpr float kInput[] = { + 0, 6, 2, 4, // + 3, 2, 10, 7, // + }; + int kExpectDims[] = {4, 1, 1, 2, 1}; + constexpr float kExpect[] = {3.5, 6.5}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + tflite::testing::L2Pool2DTestParams params; + params.padding = kTfLitePaddingSame; + + tflite::testing::TestL2Pool2D(params, kInputDims, kInput, kExpectDims, + kExpect, output_data); +} + +TF_LITE_MICRO_TEST(FloatPoolingOpTestL2PoolPaddingSameStride1) { + int kInputDims[] = {4, 1, 2, 4, 1}; + constexpr float kInput[] = { + 0, 6, 2, 4, // + 3, 2, 10, 7, // + }; + int kExpectDims[] = {4, 1, 2, 4, 1}; + constexpr float kExpect[] = {3.5, 6.0, 6.5, 5.70088, + 2.54951, 7.2111, 8.63134, 7.0}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + tflite::testing::L2Pool2DTestParams params; + params.padding = kTfLitePaddingSame; + params.compare_tolerance = 1e-4; + params.stride_width = 1; + params.stride_height = 1; + + tflite::testing::TestL2Pool2D(params, kInputDims, kInput, kExpectDims, + kExpect, output_data); +} + +TF_LITE_MICRO_TEST(FloatPoolingOpTestL2PoolPaddingValidStride1) { + int kInputDims[] = {4, 1, 2, 4, 1}; + constexpr float kInput[] = { + 0, 6, 2, 4, // + 3, 2, 10, 7, // + }; + int kExpectDims[] = {4, 1, 1, 3, 1}; + constexpr float kExpect[] = {3.5, 6.0, 6.5}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + tflite::testing::L2Pool2DTestParams params; + params.stride_width = 1; + params.stride_height = 1; + + tflite::testing::TestL2Pool2D(params, kInputDims, kInput, kExpectDims, + kExpect, output_data); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/l2norm.cc b/tensorflow/lite/micro/kernels/l2norm.cc new file mode 100644 index 0000000..fa3601b --- /dev/null +++ b/tensorflow/lite/micro/kernels/l2norm.cc @@ -0,0 +1,140 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/l2normalization.h" +#include "tensorflow/lite/kernels/internal/reference/l2normalization.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { + +namespace { + +// This file has two implementation of L2Norm. +enum KernelType { + kReference, + kGenericOptimized, +}; + +constexpr int kInputTensor = 0; +constexpr int kOutputTensor = 0; + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + TFLITE_DCHECK(node->builtin_data != nullptr); + + auto* params = reinterpret_cast(node->builtin_data); + L2NormalizationParams* data = + static_cast(node->user_data); + + TF_LITE_ENSURE_EQ(context, NumInputs(node), 1); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + TF_LITE_ENSURE(context, NumDimensions(input) <= 4); + + TF_LITE_ENSURE(context, + output->type == kTfLiteFloat32 || output->type == kTfLiteInt8); + TF_LITE_ENSURE_TYPES_EQ(context, input->type, output->type); + + if (output->type == kTfLiteInt8) { + data->input_zero_point = input->params.zero_point; + } else if (output->type == kTfLiteFloat32) { + data->input_zero_point = 0; + } + + // Our implementations don't currently support activations. + TF_LITE_ENSURE_EQ(context, params->activation, kTfLiteActNone); + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(output); + return kTfLiteOk; +} + +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, + sizeof(L2NormalizationParams)); +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + const L2NormalizationParams& data = + *(static_cast(node->user_data)); + + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + + // TODO(b/143912164): instead of hardcode the epsilon here, we should read it + // from tensorflow, i.e., adding a params. + // We don't compute epsilon for quantized kernel: + // + // epsilon_float = (epsilon_quant - zp) * scale + // so + // espsilon_quant = epsilon_float / scale + zp + // We know epsilon_float is just a very small number to avoid division by + // zero error, and scale is > 1, so the integer value of epsilon for quant + // is just dominated by the zero point. + // Also, GetInvSqrtQuantizedMultiplierExp handles the scenario where the sum + // of input value squared is zero case well. + // So we don't even need to do handle the epsilon for quantized kernel case. + const float epsilon = 1e-6f; + if (output->type == kTfLiteFloat32) { + reference_ops::L2Normalization(data, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output), + epsilon); + } else if (output->type == kTfLiteInt8) { + const auto input_shape = tflite::micro::GetTensorShape(input); + const auto output_shape = tflite::micro::GetTensorShape(output); + const int trailing_dim = input_shape.DimensionsCount() - 1; + const int depth = + MatchingDim(input_shape, trailing_dim, output_shape, trailing_dim); + const int outer_size = + MatchingFlatSizeSkipDim(input_shape, trailing_dim, output_shape); + reference_integer_ops::L2Normalization( + data.input_zero_point, outer_size, depth, + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorData(output)); + } else { + MicroPrintf("Output type is %s, requires float.", + TfLiteTypeGetName(output->type)); + return kTfLiteError; + } + + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_L2NORM_REF() { + return tflite::micro::RegisterOp(Init, Prepare, Eval); +} + +TFLMRegistration Register_L2_NORMALIZATION() { return Register_L2NORM_REF(); } + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/l2norm_test.cc b/tensorflow/lite/micro/kernels/l2norm_test.cc new file mode 100644 index 0000000..435f2f0 --- /dev/null +++ b/tensorflow/lite/micro/kernels/l2norm_test.cc @@ -0,0 +1,191 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +// used to set the quantization parameters for the int8_t and tests +constexpr float kInputMin = -2.0; +constexpr float kInputMax = 2.0; +constexpr float kOutputMin = -1.0; +constexpr float kOutputMax = 127.0 / 128.0; + +TfLiteTensor CreateL2NormTensor(const float* data, TfLiteIntArray* dims, + bool is_input) { + return CreateTensor(data, dims); +} + +template +TfLiteTensor CreateL2NormTensor(const T* data, TfLiteIntArray* dims, + bool is_input) { + float kInputScale = ScaleFromMinMax(kInputMin, kInputMax); + int kInputZeroPoint = ZeroPointFromMinMax(kInputMin, kInputMax); + float kOutputScale = ScaleFromMinMax(kOutputMin, kOutputMax); + int kOutputZeroPoint = ZeroPointFromMinMax(kOutputMin, kOutputMax); + TfLiteTensor tensor; + if (is_input) { + tensor = CreateQuantizedTensor(data, dims, kInputScale, kInputZeroPoint); + } else { + tensor = CreateQuantizedTensor(data, dims, kOutputScale, kOutputZeroPoint); + } + + tensor.quantization.type = kTfLiteAffineQuantization; + return tensor; +} + +template +void TestL2Normalization(int* input_dims_data, const T* input_data, + const T* expected_output_data, T* output_data) { + TfLiteIntArray* dims = IntArrayFromInts(input_dims_data); + + const int output_dims_count = ElementCount(*dims); + + constexpr int tensors_size = 2; + TfLiteTensor tensors[tensors_size] = { + CreateL2NormTensor(input_data, dims, true), + CreateL2NormTensor(output_data, dims, false), + }; + + int inputs_array_data[] = {1, 0}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 1}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + TfLiteL2NormParams builtin_data = { + .activation = kTfLiteActNone, + }; + + const TFLMRegistration registration = tflite::Register_L2_NORMALIZATION(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, + reinterpret_cast(&builtin_data)); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + for (int i = 0; i < output_dims_count; ++i) { + TF_LITE_MICRO_EXPECT_EQ(expected_output_data[i], output_data[i]); + } +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(SimpleFloatTest) { + int input_dims[] = {4, 1, 1, 1, 6}; + constexpr int data_length = 6; + const float input_data[data_length] = {-1.1, 0.6, 0.7, 1.2, -0.7, 0.1}; + const float expected_output_data[data_length] = {-0.55, 0.3, 0.35, + 0.6, -0.35, 0.05}; + float output_data[data_length]; + + tflite::testing::TestL2Normalization( + input_dims, input_data, expected_output_data, output_data); +} + +TF_LITE_MICRO_TEST(ZerosVectorFloatTest) { + int input_dims[] = {4, 1, 1, 1, 6}; + constexpr int data_length = 6; + const float input_data[data_length] = {0, 0, 0, 0, 0, 0}; + const float expected_output_data[data_length] = {0, 0, 0, 0, 0, 0}; + float output_data[data_length]; + + tflite::testing::TestL2Normalization( + input_dims, input_data, expected_output_data, output_data); +} + +TF_LITE_MICRO_TEST(SimpleFloatWithRankLessThanFourTest) { + int input_dims[] = {4, 1, 1, 1, 6}; + constexpr int data_length = 6; + const float input_data[data_length] = {-1.1, 0.6, 0.7, 1.2, -0.7, 0.1}; + const float expected_output_data[data_length] = {-0.55, 0.3, 0.35, + 0.6, -0.35, 0.05}; + float output_data[data_length]; + + tflite::testing::TestL2Normalization( + input_dims, input_data, expected_output_data, output_data); +} + +TF_LITE_MICRO_TEST(MultipleBatchFloatTest) { + int input_dims[] = {4, 3, 1, 1, 6}; + constexpr int data_length = 18; + const float input_data[data_length] = { + -1.1, 0.6, 0.7, 1.2, -0.7, 0.1, // batch 1 + -1.1, 0.6, 0.7, 1.2, -0.7, 0.1, // batch 2 + -1.1, 0.6, 0.7, 1.2, -0.7, 0.1, // batch 3 + }; + const float expected_output_data[data_length] = { + -0.55, 0.3, 0.35, 0.6, -0.35, 0.05, // batch 1 + -0.55, 0.3, 0.35, 0.6, -0.35, 0.05, // batch 2 + -0.55, 0.3, 0.35, 0.6, -0.35, 0.05, // batch 3 + }; + float output_data[data_length]; + + tflite::testing::TestL2Normalization( + input_dims, input_data, expected_output_data, output_data); +} + +TF_LITE_MICRO_TEST(SimpleInt8Test) { + int input_dims[] = {4, 1, 1, 1, 6}; + constexpr int data_length = 6; + const int8_t input_data[data_length] = {-71, 37, 44, 76, -46, 5}; + const int8_t expected_output[data_length] = {-70, 38, 45, 77, -45, 6}; + int8_t output_data[data_length]; + + tflite::testing::TestL2Normalization(input_dims, input_data, + expected_output, output_data); +} + +TF_LITE_MICRO_TEST(ZerosVectorInt8Test) { + int input_dims[] = {4, 1, 1, 1, 6}; + constexpr int data_length = 6; + const int8_t input_data[data_length] = {-1, -1, -1, -1, -1, -1}; + const int8_t expected_output[data_length] = {0, 0, 0, 0, 0, 0}; + int8_t output_data[data_length]; + + tflite::testing::TestL2Normalization(input_dims, input_data, + expected_output, output_data); +} + +TF_LITE_MICRO_TEST(MultipleBatchInt8Test) { + int input_dims[] = {2, 3, 6}; + constexpr int data_length = 18; + const int8_t input_data[data_length] = { + -71, 37, 44, 76, -46, 5, // batch 1 + -71, 37, 44, 76, -46, 5, // batch 2 + -71, 37, 44, 76, -46, 5, // batch 3 + }; + const int8_t expected_output[data_length] = { + -70, 38, 45, 77, -45, 6, // batch 1 + -70, 38, 45, 77, -45, 6, // batch 2 + -70, 38, 45, 77, -45, 6, // batch 3 + }; + int8_t output_data[data_length]; + + tflite::testing::TestL2Normalization(input_dims, input_data, + expected_output, output_data); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/leaky_relu.cc b/tensorflow/lite/micro/kernels/leaky_relu.cc new file mode 100644 index 0000000..ee86f19 --- /dev/null +++ b/tensorflow/lite/micro/kernels/leaky_relu.cc @@ -0,0 +1,95 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/reference/leaky_relu.h" + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/process_broadcast_shapes.h" +#include "tensorflow/lite/kernels/internal/types.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/leaky_relu.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { + +template +void QuantizeLeakyRelu(const LeakyReluOpData& data, + const TfLiteEvalTensor* input, + TfLiteEvalTensor* output) { + LeakyReluParams op_params = {}; + + op_params.input_offset = data.input_zero_point; + op_params.output_offset = data.output_zero_point; + op_params.output_multiplier_alpha = data.output_multiplier_alpha; + op_params.output_shift_alpha = data.output_shift_alpha; + op_params.output_multiplier_identity = data.output_multiplier_identity; + op_params.output_shift_identity = data.output_shift_identity; + reference_ops::QuantizeLeakyRelu(op_params, + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); +} + +void* LeakyReluInit(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(LeakyReluOpData)); +} + +TfLiteStatus LeakyReluEval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + const LeakyReluOpData& data = *static_cast(node->user_data); + + switch (input->type) { + case kTfLiteFloat32: { + LeakyReluParams op_params = {}; + const auto* params = + static_cast(node->builtin_data); + + op_params.alpha = params->alpha; + reference_ops::LeakyRelu(op_params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + return kTfLiteOk; + } break; + case kTfLiteInt8: { + QuantizeLeakyRelu(data, input, output); + return kTfLiteOk; + } break; + case kTfLiteInt16: { + QuantizeLeakyRelu(data, input, output); + return kTfLiteOk; + } break; + default: + MicroPrintf("Only float32, int8 are supported by LEAKY_RELU, got %s.", + TfLiteTypeGetName(input->type)); + return kTfLiteError; + } + + return kTfLiteError; +} + +TFLMRegistration Register_LEAKY_RELU() { + return tflite::micro::RegisterOp(LeakyReluInit, LeakyReluPrepare, + LeakyReluEval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/leaky_relu.h b/tensorflow/lite/micro/kernels/leaky_relu.h new file mode 100644 index 0000000..dfcd6e9 --- /dev/null +++ b/tensorflow/lite/micro/kernels/leaky_relu.h @@ -0,0 +1,43 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_LEAKY_RELU_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_LEAKY_RELU_H_ + +#include "tensorflow/lite/c/common.h" + +namespace tflite { + +// Input/output tensor index. +extern const int kInputTensor; +extern const int kOutputTensor; + +struct LeakyReluOpData { + // quantization parameters + int32_t output_multiplier_alpha; + int32_t output_shift_alpha; + int32_t output_multiplier_identity; + int32_t output_shift_identity; + int32_t input_zero_point; + int32_t output_zero_point; +}; + +TfLiteStatus CalculateOpDataLeakyRelu(TfLiteContext* context, TfLiteNode* node); + +TfLiteStatus LeakyReluPrepare(TfLiteContext* context, TfLiteNode* node); + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_LEAKY_RELU_H_ diff --git a/tensorflow/lite/micro/kernels/leaky_relu_common.cc b/tensorflow/lite/micro/kernels/leaky_relu_common.cc new file mode 100644 index 0000000..3d1ffeb --- /dev/null +++ b/tensorflow/lite/micro/kernels/leaky_relu_common.cc @@ -0,0 +1,78 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/leaky_relu.h" +#include "tensorflow/lite/kernels/internal/reference/process_broadcast_shapes.h" +#include "tensorflow/lite/kernels/internal/types.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/leaky_relu.h" + +namespace tflite { + +// Input/output tensor index. +const int kInputTensor = 0; +const int kOutputTensor = 0; + +TfLiteStatus CalculateOpDataLeakyRelu(TfLiteContext* context, + TfLiteNode* node) { + MicroContext* micro_context = GetMicroContext(context); + + TF_LITE_ENSURE_EQ(context, NumInputs(node), 1); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + TF_LITE_ENSURE_TYPES_EQ(context, input->type, output->type); + + if (output->type == kTfLiteInt8 || output->type == kTfLiteInt16) { + LeakyReluOpData* data = static_cast(node->user_data); + const auto* params = + static_cast(node->builtin_data); + + data->input_zero_point = input->params.zero_point; + data->output_zero_point = output->params.zero_point; + + int output_shift_alpha; + double alpha_multiplier = static_cast( + input->params.scale * params->alpha / output->params.scale); + QuantizeMultiplier(alpha_multiplier, &data->output_multiplier_alpha, + &output_shift_alpha); + data->output_shift_alpha = static_cast(output_shift_alpha); + + int output_shift_identity; + double identity_multiplier = + static_cast(input->params.scale / output->params.scale); + QuantizeMultiplier(identity_multiplier, &data->output_multiplier_identity, + &output_shift_identity); + data->output_shift_identity = static_cast(output_shift_identity); + } + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(output); + + return kTfLiteOk; +} + +TfLiteStatus LeakyReluPrepare(TfLiteContext* context, TfLiteNode* node) { + return CalculateOpDataLeakyRelu(context, node); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/leaky_relu_test.cc b/tensorflow/lite/micro/kernels/leaky_relu_test.cc new file mode 100644 index 0000000..3c5df3f --- /dev/null +++ b/tensorflow/lite/micro/kernels/leaky_relu_test.cc @@ -0,0 +1,241 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +// min/max are used to compute scale, zero-point, compare tolerance +template +struct TestLeakyReluParams { + // general parameters + float alpha; // alpha multiplier + + // quantization parameters + float scale; // quantization scale of input and output + int zero_point; // quantization zero_point of input and output + T* input_data; // quantized input storage + T* output_data; // quantized output storage + float tolerance; // output vs expected value tolerance +}; + +void ExecuteLeakyReluTest(const float alpha, const int tensors_count, + TfLiteTensor* tensors) { + TfLiteLeakyReluParams builtin_data = {}; + builtin_data.alpha = alpha; + + int kInputArrayData[] = {1, 0}; + TfLiteIntArray* inputs_array = IntArrayFromInts(kInputArrayData); + int kOutputArrayData[] = {1, 1}; + TfLiteIntArray* outputs_array = IntArrayFromInts(kOutputArrayData); + + const TFLMRegistration registration = tflite::Register_LEAKY_RELU(); + micro::KernelRunner runner(registration, tensors, tensors_count, inputs_array, + outputs_array, static_cast(&builtin_data)); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); +} + +template +void TestLeakyRelu(const TestLeakyReluParams& params, int* input_dims_data, + const T* input_data, int* expected_dims, + const T* expected_data, T* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(expected_dims); + const int output_count = ElementCount(*output_dims); + + TfLiteTensor tensors[] = { + CreateTensor(input_data, input_dims), + CreateTensor(output_data, output_dims), + }; + constexpr int tensors_count = std::extent::value; + ExecuteLeakyReluTest(params.alpha, tensors_count, tensors); + + for (int i = 0; i < output_count; i++) { + TF_LITE_MICRO_EXPECT_EQ(expected_data[i], output_data[i]); + } +} + +template +void TestLeakyReluQuantized(const TestLeakyReluParams& params, + int* input_dims_data, const float* input_data, + int* expected_dims, const float* expected_data, + float* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(expected_dims); + const int output_count = ElementCount(*output_dims); + + TfLiteTensor tensors[] = { + CreateQuantizedTensor(input_data, params.input_data, input_dims, + params.scale, params.zero_point), + CreateQuantizedTensor(params.output_data, output_dims, params.scale, + params.zero_point), + }; + constexpr int kTensorsCount = std::extent::value; + ExecuteLeakyReluTest(params.alpha, kTensorsCount, tensors); + + Dequantize(params.output_data, output_count, params.scale, params.zero_point, + output_data); + const float kTolerance = params.tolerance; + for (int i = 0; i < output_count; i++) { + TF_LITE_MICRO_EXPECT_NEAR(expected_data[i], output_data[i], kTolerance); + } +} + +// Our fixed-point math function implementations have roughly 12 bits of +// accuracy, when specialized to 16-bit fixed-point arithmetic. +// That is purely an implementation compromise, it would have been possible +// to get closer to 16 bits of accuracy but that would be more expensive, +// and not needed for our purposes as ultimately the output is either +// immediately down-quantized to 8 bits, or will typically be at the output +// of the surrounding LSTM cell. +// So we can require roughly 2^-12 accuracy when the output is 16-bit, and +// we can more or less expect the full 2^-8 accuracy when the output is 8-bit. +// +// However, the representable output interval is often [-1, 1] (it has to be +// for tanh, and even for logistic, when we implement it in fixed-point, we +// typically have to do so on such a symmetric interval, e.g. ARM NEON only +// has signed fixed-point arithmetic (SQRDMULH)). As the width of [-1, 1] +// is 2, our representable values are often diluted by a factor of 2, whence +// the factor of 2 below. +const float kQuantizedTolerance = 2 * (1. / 256); + +template +void QuantizedActivationsOpTestLeakyRelu() { + int kDims[] = {2, 5, 5}; + constexpr float kInput[] = { + -5.0f, -4.6f, -4.2f, -3.8f, -3.4f, // Row 1 + -3.0f, -2.6f, -2.2f, -1.8f, -1.4f, // Row 2 + -1.0f, -0.6f, -0.2f, 0.2f, 0.6f, // Row 3 + 1.0f, 1.4f, 1.8f, 2.2f, 2.6f, // Row 4 + 3.0f, 3.4f, 3.8f, 4.2f, 4.6f, // Row 5 + }; + constexpr float kExpect[] = { + -0.50f, -0.46f, -0.42f, -0.38f, -0.34f, // Row 1 + -0.30f, -0.26f, -0.22f, -0.18f, -0.14f, // Row 2 + -0.10f, -0.06f, -0.02f, 0.20f, 0.60f, // Row 3 + 1.00f, 1.40f, 1.80f, 2.20f, 2.60f, // Row 4 + 3.00f, 3.40f, 3.80f, 4.20f, 4.60f, // Row 5 + }; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + // setup quantization storage and parameters + integer_dtype q_output_data[kOutputCount]; + integer_dtype q_input_data[kOutputCount]; + + constexpr float kMin = -1; + constexpr float kMax = + std::numeric_limits::max() / + static_cast(std::numeric_limits::max() + 1); + // Quantize with a symmetric input / output range of {-5, 5}. + constexpr float kDataMin = 5 * kMin; + constexpr float kDataMax = 5 * kMax; + + TestLeakyReluParams params = {}; + params.alpha = 0.1f; + params.scale = ScaleFromMinMax(kDataMin, kDataMax); + params.zero_point = ZeroPointFromMinMax(kDataMin, kDataMax); + params.input_data = q_input_data; + params.output_data = q_output_data; + params.tolerance = kQuantizedTolerance * 5; + + TestLeakyReluQuantized(params, kDims, kInput, kDims, kExpect, output_data); +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(QuantizedActivationsOpTestLeakyReluInt8_1) { + int kDims[] = {2, 2, 3}; + constexpr float kInput[] = {0.0f, 1.0f, 3.0f, 1.0f, -1.0f, -2.0f}; + constexpr float kExpect[] = {0.0f, 1.0f, 3.0f, 1.0f, -0.5f, -1.0f}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + // setup quantization storage and parameters + int8_t q_output_data[kOutputCount]; + int8_t q_input_data[kOutputCount]; + + tflite::testing::TestLeakyReluParams params = {}; + params.alpha = 0.5f; + params.scale = 0.1f; + params.zero_point = 0; + params.input_data = q_input_data; + params.output_data = q_output_data; + params.tolerance = tflite::testing::kQuantizedTolerance; + + tflite::testing::TestLeakyReluQuantized(params, kDims, kInput, kDims, kExpect, + output_data); +} + +TF_LITE_MICRO_TEST(QuantizedActivationsOpTestLeakyReluInt8_2) { + tflite::testing::QuantizedActivationsOpTestLeakyRelu(); +} + +TF_LITE_MICRO_TEST(QuantizedActivationsOpTestLeakyReluInt16_1) { + int kDims[] = {2, 2, 3}; + constexpr float kInput[] = {0.0f, 1.0f, 3.0f, 1.0f, -1.0f, -2.0f}; + constexpr float kExpect[] = {0.0f, 1.0f, 3.0f, 1.0f, -0.5f, -1.0f}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + // setup quantization storage and parameters + int16_t q_output_data[kOutputCount]; + int16_t q_input_data[kOutputCount]; + + tflite::testing::TestLeakyReluParams params = {}; + params.alpha = 0.5f; + params.scale = 0.01f; + params.zero_point = 0; + params.input_data = q_input_data; + params.output_data = q_output_data; + params.tolerance = tflite::testing::kQuantizedTolerance; + + tflite::testing::TestLeakyReluQuantized(params, kDims, kInput, kDims, kExpect, + output_data); +} + +TF_LITE_MICRO_TEST(QuantizedActivationsOpTestLeakyReluInt16_2) { + tflite::testing::QuantizedActivationsOpTestLeakyRelu(); +} + +TF_LITE_MICRO_TEST(FloatActivationsOpTestLeakyRelu) { + int kDims[] = {2, 2, 3}; + constexpr float kInput[] = {0.0f, 1.0f, 3.0f, 1.0f, -1.0f, -2.0f}; + constexpr float kExpect[] = {0.0f, 1.0f, 3.0f, 1.0f, -0.5f, -1.0f}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + tflite::testing::TestLeakyReluParams params = {}; + params.alpha = 0.5f; + + tflite::testing::TestLeakyRelu(params, kDims, kInput, kDims, kExpect, + output_data); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/log_softmax.cc b/tensorflow/lite/micro/kernels/log_softmax.cc new file mode 100644 index 0000000..47f5937 --- /dev/null +++ b/tensorflow/lite/micro/kernels/log_softmax.cc @@ -0,0 +1,148 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/kernels/internal/reference/log_softmax.h" + +#include +#include + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/internal/types.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { +namespace { + +// used only with quantized data +struct LogSoftmaxOpData { + int32_t input_multiplier; + int32_t input_left_shift; + int32_t reverse_scaling_divisor; + int32_t reverse_scaling_right_shift; + int diff_min; + size_t outer_size; // number of tensor elements skipping computation axis + size_t depth; // number of tensor elements on computation axis +}; + +// input/output tensor index +constexpr int kInputTensor = 0; +constexpr int kOutputTensor = 0; + +TfLiteStatus CalculateOpData(TfLiteContext* context, TfLiteNode* node) { + MicroContext* micro_context = GetMicroContext(context); + + TF_LITE_ENSURE_EQ(context, NumInputs(node), 1); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + TF_LITE_ENSURE_TYPES_EQ(context, input->type, output->type); + + TF_LITE_ENSURE(context, HaveSameShapes(input, output)); + + if (input->type == kTfLiteInt8) { + node->user_data = + context->AllocatePersistentBuffer(context, sizeof(LogSoftmaxOpData)); + auto data = static_cast(node->user_data); + + // quantization datum + constexpr int32_t kOutputZeroPoint = 127; + constexpr float kOutputScale = 16.0 / 256; + constexpr double kBeta = 1.0; + constexpr int kScaledDiffIntegerBits = 5; + + TF_LITE_ENSURE(context, output->params.scale == kOutputScale); + TF_LITE_ENSURE(context, output->params.zero_point == kOutputZeroPoint); + + int input_left_shift; + int reverse_scaling_right_shift; + tflite::PreprocessLogSoftmaxScalingExp( + kBeta, static_cast(input->params.scale), kScaledDiffIntegerBits, + &data->input_multiplier, &input_left_shift, + &data->reverse_scaling_divisor, &reverse_scaling_right_shift); + data->input_left_shift = static_cast(input_left_shift); + data->reverse_scaling_right_shift = + static_cast(-reverse_scaling_right_shift); + // diff_min has a negative value, and is used to limit the maximum magnitude + // of the diffs, which are <= 0. + data->diff_min = + -tflite::CalculateInputRadius(kScaledDiffIntegerBits, input_left_shift); + + RuntimeShape input_shape = GetTensorShape(input); + const int trailing_dim = input_shape.DimensionsCount() - 1; + data->outer_size = + static_cast(FlatSizeSkipDim(input_shape, trailing_dim)); + data->depth = static_cast(input_shape.Dims(trailing_dim)); + } + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(output); + return kTfLiteOk; +} + +TfLiteStatus LogSoftmaxPrepare(TfLiteContext* context, TfLiteNode* node) { + return CalculateOpData(context, node); +} + +TfLiteStatus LogSoftmaxEval(TfLiteContext* context, TfLiteNode* node) { + const LogSoftmaxOpData* data = + static_cast(node->user_data); + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + switch (input->type) { + case kTfLiteFloat32: { + SoftmaxParams op_params = {}; + reference_ops::LogSoftmax(op_params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + return kTfLiteOk; + } + case kTfLiteInt8: { + SoftmaxParams op_params = {}; + op_params.input_multiplier = data->input_multiplier; + op_params.input_left_shift = data->input_left_shift; + op_params.reverse_scaling_divisor = data->reverse_scaling_divisor; + op_params.reverse_scaling_right_shift = data->reverse_scaling_right_shift; + op_params.diff_min = data->diff_min; + reference_ops::LogSoftmax(op_params, data->outer_size, data->depth, + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + return kTfLiteOk; + } + default: + MicroPrintf("LOG_SOFTMAX only supports float32, int8, got %s.", + TfLiteTypeGetName(input->type)); + return kTfLiteError; + } +} + +} // namespace + +TFLMRegistration Register_LOG_SOFTMAX() { + return tflite::micro::RegisterOp(nullptr, LogSoftmaxPrepare, LogSoftmaxEval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/log_softmax_test.cc b/tensorflow/lite/micro/kernels/log_softmax_test.cc new file mode 100644 index 0000000..a34c597 --- /dev/null +++ b/tensorflow/lite/micro/kernels/log_softmax_test.cc @@ -0,0 +1,230 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +void ExecuteLogSoftmaxTest(int tensors_count, TfLiteTensor* tensors) { + int kInputArrayData[] = {1, 0}; + TfLiteIntArray* inputs_array = IntArrayFromInts(kInputArrayData); + int kOutputArrayData[] = {1, 1}; + TfLiteIntArray* outputs_array = IntArrayFromInts(kOutputArrayData); + + const TFLMRegistration registration = tflite::Register_LOG_SOFTMAX(); + micro::KernelRunner runner(registration, tensors, tensors_count, inputs_array, + outputs_array, nullptr); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); +} + +template +void TestLogSoftmax(const float tolerance, int* input_dims_data, + const T* input_data, int* expected_dims, + const T* expected_data, T* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(expected_dims); + const int output_count = ElementCount(*output_dims); + + TfLiteTensor tensors[] = { + CreateTensor(input_data, input_dims), + CreateTensor(output_data, output_dims), + }; + constexpr int kTensorsCount = std::extent::value; + ExecuteLogSoftmaxTest(kTensorsCount, tensors); + + for (int i = 0; i < output_count; i++) { + TF_LITE_MICRO_EXPECT_NEAR(expected_data[i], output_data[i], tolerance); + } +} + +// min/max are used to compute scale, zero-point +template +struct TestLogSoftmaxParams { + // quantization parameters + float data_min; // input and output data minimum value + float data_max; // input and output data maximum value + T* input_data; // quantized input storage + T* output_data; // quantized output storage + float tolerance; // maximum compare difference +}; + +template +void TestLogSoftmaxQuantized(const TestLogSoftmaxParams& params, + int* input_dims_data, const float* input_data, + int* expected_dims, const float* expected_data, + const T* expected_data_quantized, + float* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(expected_dims); + const int output_count = ElementCount(*output_dims); + + constexpr float kOutputScale = 16.0 / 256; + constexpr int kOutputZeroPoint = 127; + const float scale = ScaleFromMinMax(params.data_min, params.data_max); + const int zero_point = + ZeroPointFromMinMax(params.data_min, params.data_max); + + TfLiteTensor tensors[] = { + CreateQuantizedTensor(input_data, params.input_data, input_dims, scale, + zero_point), + CreateQuantizedTensor(params.output_data, output_dims, kOutputScale, + kOutputZeroPoint), + }; + constexpr int kTensorsCount = std::extent::value; + + ExecuteLogSoftmaxTest(kTensorsCount, tensors); + + for (int i = 0; i < output_count; i++) { + TF_LITE_MICRO_EXPECT_EQ(expected_data_quantized[i], params.output_data[i]); + } + Dequantize(params.output_data, output_count, kOutputScale, kOutputZeroPoint, + output_data); + for (int i = 0; i < output_count; i++) { + TF_LITE_MICRO_EXPECT_NEAR(expected_data[i], output_data[i], + params.tolerance); + } +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +// This contains the same test values as the Softmax test, but reference answer +// generated via the following snippet of python: +// logits1 = tf.constant([[0, -6, 2, 4],[3, -2, 10, 1]], dtype=tf.float32) +// logits2 = tf.constant([[0,-6],[2,4],[3,-2],[10,1]], dtype=tf.float32) +// lsm1 = tf.nn.log_softmax(logits1) +// lsm2 = tf.nn.log_softmax(logits2) +// with tf.Session() as sess: +// print('lsm1', sess.run(lsm1)) +// print('lsm2', sess.run(lsm2)) +TF_LITE_MICRO_TEST(FloatActivationsOpTestLogSoftmax) { + int kDims1[] = {2, 2, 4}; + constexpr float kInput[] = { + 0, -6, 2, 4, 3, -2, 10, 1, + }; + constexpr float kExpect1[] = { + -4.14297, -10.14297, -2.14297, -.142971, // + -7.00104, -12.00104, -.00104087, -9.00104, // + }; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + constexpr float kTolerance = 1e-5; + + tflite::testing::TestLogSoftmax(kTolerance, kDims1, kInput, kDims1, kExpect1, + output_data); + + // Same input, but a different shape. + int kDims2[] = {2, 4, 2}; + constexpr float kExpect2[] = { + -.00247565, -6.00247, -2.12692, -.126928, + -.00671534, -5.00671, -.000123374, -9.00012, + }; + + tflite::testing::TestLogSoftmax(kTolerance, kDims2, kInput, kDims2, kExpect2, + output_data); +} + +TF_LITE_MICRO_TEST(LogSoftmaxOpTestSimpleTest) { + int kDims[] = {2, 2, 5}; + constexpr float kInput[] = { + 1.0, 2.0, 3.0, 4.0, 5.0, // + -1.0, -2.0, -3.0, -4.0, -5.0, // + }; + constexpr float kExpect[] = { + -4.45191431, -3.45191431, -2.45191431, -1.45191443, -0.4519144, // + -0.4519144, -1.45191443, -2.45191431, -3.45191431, -4.45191431 // + }; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + constexpr float kTolerance = 1e-6; + + tflite::testing::TestLogSoftmax(kTolerance, kDims, kInput, kDims, kExpect, + output_data); +} + +TF_LITE_MICRO_TEST(QuantizedActivationsOpTestLogSoftmaxInt8) { + int kDims[] = {2, 2, 4}; + constexpr float kInput[] = { + 0, -6, 2, 4, 3, -2, 10, 1, + }; + constexpr float kExpect[] = { + -4.14297, -10.14297, -2.14297, -.142971, + -7.00104, -12.00104, -.00104087, -9.00104, + }; + constexpr int8_t kExpectQuantized[] = { + 61, -36, 93, 125, 15, -65, 127, -16, + }; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + // setup quantization storage and parameters + int8_t q_output_data[kOutputCount]; + int8_t q_input_data[kOutputCount]; + constexpr float kMin = -10; + constexpr float kMax = 10; + constexpr float kLogSoftmaxQuantizedTolerance = 0.06355; + tflite::testing::TestLogSoftmaxParams params = {}; + params.data_min = kMin; + params.data_max = kMax; + params.input_data = q_input_data; + params.output_data = q_output_data; + params.tolerance = kLogSoftmaxQuantizedTolerance; + + tflite::testing::TestLogSoftmaxQuantized( + params, kDims, kInput, kDims, kExpect, kExpectQuantized, output_data); +} + +TF_LITE_MICRO_TEST(ExtraTestLogSoftmaxInt8) { + int kDims[] = {2, 3, 1}; + constexpr float kInput[] = {0, -1, 1}; + constexpr float kExpect[] = {0, 0, 0}; + constexpr int8_t kExpectQuantized[] = {127, 127, 127}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + // setup quantization storage and parameters + int8_t q_output_data[kOutputCount]; + int8_t q_input_data[kOutputCount]; + constexpr float kMin = -1; + constexpr float kMax = 1; + constexpr float kLogSoftmaxQuantizedTolerance = 0.06355; + tflite::testing::TestLogSoftmaxParams params = {}; + params.data_min = kMin; + params.data_max = kMax; + params.input_data = q_input_data; + params.output_data = q_output_data; + params.tolerance = kLogSoftmaxQuantizedTolerance; + + tflite::testing::TestLogSoftmaxQuantized( + params, kDims, kInput, kDims, kExpect, kExpectQuantized, output_data); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/logical.cc b/tensorflow/lite/micro/kernels/logical.cc new file mode 100644 index 0000000..53e282d --- /dev/null +++ b/tensorflow/lite/micro/kernels/logical.cc @@ -0,0 +1,44 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/micro/kernels/logical.h" + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/reference/binary_function.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" + +namespace tflite { +namespace { + +TfLiteStatus LogicalOrEval(TfLiteContext* context, TfLiteNode* node) { + return LogicalImpl(context, node, LogicalOr); +} + +TfLiteStatus LogicalAndEval(TfLiteContext* context, TfLiteNode* node) { + return LogicalImpl(context, node, LogicalAnd); +} + +} // namespace + +TFLMRegistration Register_LOGICAL_OR() { + return tflite::micro::RegisterOp(nullptr, nullptr, LogicalOrEval); +} + +TFLMRegistration Register_LOGICAL_AND() { + return tflite::micro::RegisterOp(nullptr, nullptr, LogicalAndEval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/logical.h b/tensorflow/lite/micro/kernels/logical.h new file mode 100644 index 0000000..e70e457 --- /dev/null +++ b/tensorflow/lite/micro/kernels/logical.h @@ -0,0 +1,35 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_LOGICAL_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_LOGICAL_H_ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" + +namespace tflite { +// Input/output tensor index. +extern const int kLogicalInputTensor1; +extern const int kLogicalInputTensor2; +extern const int kLogicalOutputTensor; + +TfLiteStatus LogicalImpl(TfLiteContext* context, TfLiteNode* node, + bool (*func)(bool, bool)); + +bool LogicalOr(bool x, bool y); +bool LogicalAnd(bool x, bool y); + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_LOGICAL_H_ diff --git a/tensorflow/lite/micro/kernels/logical_common.cc b/tensorflow/lite/micro/kernels/logical_common.cc new file mode 100644 index 0000000..2612d3a --- /dev/null +++ b/tensorflow/lite/micro/kernels/logical_common.cc @@ -0,0 +1,63 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/reference/binary_function.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/logical.h" + +namespace tflite { + +// Input/output tensor index. +const int kLogicalInputTensor1 = 0; +const int kLogicalInputTensor2 = 1; +const int kLogicalOutputTensor = 0; + +TfLiteStatus LogicalImpl(TfLiteContext* context, TfLiteNode* node, + bool (*func)(bool, bool)) { + const TfLiteEvalTensor* input1 = + tflite::micro::GetEvalInput(context, node, kLogicalInputTensor1); + const TfLiteEvalTensor* input2 = + tflite::micro::GetEvalInput(context, node, kLogicalInputTensor2); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kLogicalOutputTensor); + + if (tflite::micro::HaveSameShapes(input1, input2)) { + reference_ops::BinaryFunction( + tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output), func); + } else { + reference_ops::BroadcastBinaryFunction4DSlow( + tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output), func); + } + + return kTfLiteOk; +} + +bool LogicalOr(bool x, bool y) { return x || y; } + +bool LogicalAnd(bool x, bool y) { return x && y; } + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/logical_test.cc b/tensorflow/lite/micro/kernels/logical_test.cc new file mode 100644 index 0000000..eeab32b --- /dev/null +++ b/tensorflow/lite/micro/kernels/logical_test.cc @@ -0,0 +1,112 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +void TestLogicalOp(const TFLMRegistration& registration, int* input1_dims_data, + const bool* input1_data, int* input2_dims_data, + const bool* input2_data, int* output_dims_data, + const bool* expected_output_data, bool* output_data) { + TfLiteIntArray* input1_dims = IntArrayFromInts(input1_dims_data); + TfLiteIntArray* input2_dims = IntArrayFromInts(input2_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_dims_count = ElementCount(*output_dims); + + constexpr int inputs_size = 2; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(input1_data, input1_dims), + CreateTensor(input2_data, input2_dims), + CreateTensor(output_data, output_dims), + }; + + int inputs_array_data[] = {2, 0, 1}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 2}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, + /*builtin_data=*/nullptr); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + TF_LITE_MICRO_EXPECT_EQ(output_dims_count, 4); + for (int i = 0; i < output_dims_count; ++i) { + TF_LITE_MICRO_EXPECT_EQ(expected_output_data[i], output_data[i]); + } +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(LogicalOr) { + int shape[] = {4, 1, 1, 1, 4}; + const bool input1[] = {true, false, false, true}; + const bool input2[] = {true, false, true, false}; + const bool golden[] = {true, false, true, true}; + bool output_data[4]; + tflite::testing::TestLogicalOp(tflite::Register_LOGICAL_OR(), shape, input1, + shape, input2, shape, golden, output_data); +} + +TF_LITE_MICRO_TEST(BroadcastLogicalOr) { + int input1_shape[] = {4, 1, 1, 1, 4}; + const bool input1[] = {true, false, false, true}; + int input2_shape[] = {4, 1, 1, 1, 1}; + const bool input2[] = {false}; + const bool golden[] = {true, false, false, true}; + bool output_data[4]; + tflite::testing::TestLogicalOp(tflite::Register_LOGICAL_OR(), input1_shape, + input1, input2_shape, input2, input1_shape, + golden, output_data); +} + +TF_LITE_MICRO_TEST(LogicalAnd) { + int shape[] = {4, 1, 1, 1, 4}; + const bool input1[] = {true, false, false, true}; + const bool input2[] = {true, false, true, false}; + const bool golden[] = {true, false, false, false}; + bool output_data[4]; + tflite::testing::TestLogicalOp(tflite::Register_LOGICAL_AND(), shape, input1, + shape, input2, shape, golden, output_data); +} + +TF_LITE_MICRO_TEST(BroadcastLogicalAnd) { + int input1_shape[] = {4, 1, 1, 1, 4}; + const bool input1[] = {true, false, false, true}; + int input2_shape[] = {4, 1, 1, 1, 1}; + const bool input2[] = {true}; + const bool golden[] = {true, false, false, true}; + bool output_data[4]; + tflite::testing::TestLogicalOp(tflite::Register_LOGICAL_AND(), input1_shape, + input1, input2_shape, input2, input1_shape, + golden, output_data); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/logistic.cc b/tensorflow/lite/micro/kernels/logistic.cc new file mode 100644 index 0000000..da2b34f --- /dev/null +++ b/tensorflow/lite/micro/kernels/logistic.cc @@ -0,0 +1,111 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/reference/integer_ops/logistic.h" + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/logistic.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/logistic.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { +namespace { + +void* LogisticInit(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(OpDataLogistic)); +} + +TfLiteStatus LogisticEval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kLogisticInputTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kLogisticOutputTensor); + + TFLITE_DCHECK(node->user_data != nullptr); + OpDataLogistic* data = static_cast(node->user_data); + + if (input->type == kTfLiteFloat32) { + switch (output->type) { + case kTfLiteFloat32: { + reference_ops::Logistic(tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + return kTfLiteOk; + } + default: + MicroPrintf("Input %s, output %s not supported.", + TfLiteTypeGetName(input->type), + TfLiteTypeGetName(output->type)); + return kTfLiteError; + } + } else if (input->type == kTfLiteInt16) { + switch (output->type) { + case kTfLiteInt16: { + reference_integer_ops::Logistic( + data->input_multiplier, data->input_left_shift, + NumElements(input->dims), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorData(output)); + return kTfLiteOk; + } + default: + MicroPrintf("Input %s, output %s not supported.", + TfLiteTypeGetName(input->type), + TfLiteTypeGetName(output->type)); + return kTfLiteError; + } + } else if (input->type == kTfLiteInt8) { + switch (output->type) { + case kTfLiteInt8: { + reference_integer_ops::Logistic( + data->input_zero_point, data->input_range_radius, + data->input_multiplier, data->input_left_shift, + NumElements(input->dims), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorData(output)); + return kTfLiteOk; + } + default: + MicroPrintf("Input %s, output %s not supported.", + TfLiteTypeGetName(input->type), + TfLiteTypeGetName(output->type)); + return kTfLiteError; + } + } else { + // TODO(b/141211002): Also support other data types once we have supported + // temporary tensors in TFLM. + MicroPrintf("Input %s, output %s not supported.", + TfLiteTypeGetName(input->type), + TfLiteTypeGetName(output->type)); + return kTfLiteError; + } + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_LOGISTIC() { + return tflite::micro::RegisterOp(LogisticInit, LogisticPrepare, LogisticEval); +} +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/logistic.h b/tensorflow/lite/micro/kernels/logistic.h new file mode 100644 index 0000000..1de0cda --- /dev/null +++ b/tensorflow/lite/micro/kernels/logistic.h @@ -0,0 +1,42 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_LOGISTIC_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_LOGISTIC_H_ + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" + +namespace tflite { +extern const int kLogisticInputTensor; +extern const int kLogisticOutputTensor; + +struct OpDataLogistic { + int32_t input_zero_point; + int32_t input_range_radius; + int32_t input_multiplier; + int input_left_shift; +}; + +TfLiteStatus CalculateArithmeticOpDataLogistic(TfLiteContext* context, + TfLiteNode* node, + OpDataLogistic* data); + +TfLiteStatus LogisticPrepare(TfLiteContext* context, TfLiteNode* node); + +} // namespace tflite +#endif // TENSORFLOW_LITE_MICRO_KERNELS_LOGISTIC_H_ diff --git a/tensorflow/lite/micro/kernels/logistic_common.cc b/tensorflow/lite/micro/kernels/logistic_common.cc new file mode 100644 index 0000000..a79fd6b --- /dev/null +++ b/tensorflow/lite/micro/kernels/logistic_common.cc @@ -0,0 +1,119 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/logistic.h" +#include "tensorflow/lite/kernels/internal/reference/logistic.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/logistic.h" + +namespace tflite { +const int kLogisticInputTensor = 0; +const int kLogisticOutputTensor = 0; + +TfLiteStatus CalculateArithmeticOpDataLogistic(TfLiteContext* context, + TfLiteNode* node, + OpDataLogistic* data) { + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kLogisticInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kLogisticOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + + TF_LITE_ENSURE_TYPES_EQ(context, input->type, output->type); + if (input->type == kTfLiteInt8) { + TF_LITE_ENSURE_EQ(context, output->params.zero_point, + std::numeric_limits::min()); + + static constexpr int kInputIntegerBits = 4; + const double input_real_multiplier = + static_cast(input->params.scale) * + static_cast(1 << (31 - kInputIntegerBits)); + + data->input_zero_point = input->params.zero_point; + + const double q = std::frexp(input_real_multiplier, &data->input_left_shift); + data->input_multiplier = static_cast(TfLiteRound(q * (1ll << 31))); + + data->input_range_radius = + CalculateInputRadius(kInputIntegerBits, data->input_left_shift, 31); + } + + if (input->type == kTfLiteInt16) { + static constexpr int kInputIntegerBits = 3; + static constexpr int kOutputFractionalBits = 15; + + // See comments in TanhPrepare about requiring zero_point==0 + // and a power-of-two ("POT") scale. + + TF_LITE_ENSURE_EQ(context, input->params.zero_point, 0); + TF_LITE_ENSURE_EQ(context, output->params.zero_point, 0); + + int input_scale_log2_rounded; + bool param_scale_pot = + CheckedLog2(input->params.scale, &input_scale_log2_rounded); + + data->input_left_shift = + (15 - kInputIntegerBits) + input_scale_log2_rounded; + param_scale_pot &= (data->input_left_shift == 0); + + if (param_scale_pot) { + data->input_multiplier = 0; + } else { + // Calculate multiplier to change input scale to 1/(3*4096) + // as required by the table lookup. + // In this scaling +/-2^17 represents +/-10.7 + double multiplier = + static_cast(input->params.scale) * 4096.0 * 3.0; + + data->input_left_shift = 0; + + while (multiplier <= 32767.0 / 2.0 && data->input_left_shift <= 30) { + data->input_left_shift++; + multiplier = multiplier * 2.0; + } + + data->input_multiplier = static_cast(multiplier); + } + + int output_scale_log2_rounded; + TF_LITE_ENSURE( + context, CheckedLog2(output->params.scale, &output_scale_log2_rounded)); + TF_LITE_ENSURE_EQ(context, output_scale_log2_rounded, + -kOutputFractionalBits); + } + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(output); + return kTfLiteOk; +} + +TfLiteStatus LogisticPrepare(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + OpDataLogistic* data = static_cast(node->user_data); + + return CalculateArithmeticOpDataLogistic(context, node, data); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/logistic_test.cc b/tensorflow/lite/micro/kernels/logistic_test.cc new file mode 100644 index 0000000..224e4e4 --- /dev/null +++ b/tensorflow/lite/micro/kernels/logistic_test.cc @@ -0,0 +1,277 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +// The Logistic kernel assumes an output in the range [0, 1.0], leading to these +// quantization parameters. +const float quantized_output_scale_int8 = 1.0 / 255.0; +const int quantized_output_zero_point_int8 = -128; + +const int flat_size_basic = 10; +int shape_basic[] = {2, 2, 5}; +const float input_data_basic[] = {1, 2, 3, 4, 5, -1, -2, -3, -4, -5}; +const float golden_basic[] = {0.73105858, 0.88079708, 0.95257413, 0.98201379, + 0.99330715, 0.26894142, 0.11920292, 0.04742587, + 0.01798621, 0.00669285}; + +const int flat_size_wide_range = 10; +int shape_wide_range[] = {2, 1, 5}; +const float input_data_wide_range[]{ + 1.0, 2.0, 3.0, 4.0, 93.0, -1.0, -2.0, -3.0, -4.0, -93.0, +}; +const float golden_wide_range[] = { + 0.73105858, 0.88079708, 0.95257413, 0.98201379, 1.0, + 0.26894142, 0.11920292, 0.04742587, 0.01798621, 0.0, +}; + +// Test vector and expected results are directly ported from TensorFlow Lite's +// int16 logistic test. +constexpr int int16_vec_size = 177; + +int shape_int16_vec[] = {2, 1, int16_vec_size}; + +const float int16_input_vec_fp[int16_vec_size] = { + -20.0000000000, -19.7727272727, -19.5454545455, -19.3181818182, + -19.0909090909, -18.8636363636, -18.6363636364, -18.4090909091, + -18.1818181818, -17.9545454545, -17.7272727273, -17.5000000000, + -17.2727272727, -17.0454545455, -16.8181818182, -16.5909090909, + -16.3636363636, -16.1363636364, -15.9090909091, -15.6818181818, + -15.4545454545, -15.2272727273, -15.0000000000, -14.7727272727, + -14.5454545455, -14.3181818182, -14.0909090909, -13.8636363636, + -13.6363636364, -13.4090909091, -13.1818181818, -12.9545454545, + -12.7272727273, -12.5000000000, -12.2727272727, -12.0454545455, + -11.8181818182, -11.5909090909, -11.3636363636, -11.1363636364, + -10.9090909091, -10.6818181818, -10.4545454545, -10.2272727273, + -10.0000000000, -9.7727272727, -9.5454545455, -9.3181818182, + -9.0909090909, -8.8636363636, -8.6363636364, -8.4090909091, + -8.1818181818, -7.9545454545, -7.7272727273, -7.5000000000, + -7.2727272727, -7.0454545455, -6.8181818182, -6.5909090909, + -6.3636363636, -6.1363636364, -5.9090909091, -5.6818181818, + -5.4545454545, -5.2272727273, -5.0000000000, -4.7727272727, + -4.5454545455, -4.3181818182, -4.0909090909, -3.8636363636, + -3.6363636364, -3.4090909091, -3.1818181818, -2.9545454545, + -2.7272727273, -2.5000000000, -2.2727272727, -2.0454545455, + -1.8181818182, -1.5909090909, -1.3636363636, -1.1363636364, + -0.9090909091, -0.6818181818, -0.4545454545, -0.2272727273, + 0.0000000000, 0.2272727273, 0.4545454545, 0.6818181818, + 0.9090909091, 1.1363636364, 1.3636363636, 1.5909090909, + 1.8181818182, 2.0454545455, 2.2727272727, 2.5000000000, + 2.7272727273, 2.9545454545, 3.1818181818, 3.4090909091, + 3.6363636364, 3.8636363636, 4.0909090909, 4.3181818182, + 4.5454545455, 4.7727272727, 5.0000000000, 5.2272727273, + 5.4545454545, 5.6818181818, 5.9090909091, 6.1363636364, + 6.3636363636, 6.5909090909, 6.8181818182, 7.0454545455, + 7.2727272727, 7.5000000000, 7.7272727273, 7.9545454545, + 8.1818181818, 8.4090909091, 8.6363636364, 8.8636363636, + 9.0909090909, 9.3181818182, 9.5454545455, 9.7727272727, + 10.0000000000, 10.2272727273, 10.4545454545, 10.6818181818, + 10.9090909091, 11.1363636364, 11.3636363636, 11.5909090909, + 11.8181818182, 12.0454545455, 12.2727272727, 12.5000000000, + 12.7272727273, 12.9545454545, 13.1818181818, 13.4090909091, + 13.6363636364, 13.8636363636, 14.0909090909, 14.3181818182, + 14.5454545455, 14.7727272727, 15.0000000000, 15.2272727273, + 15.4545454545, 15.6818181818, 15.9090909091, 16.1363636364, + 16.3636363636, 16.5909090909, 16.8181818182, 17.0454545455, + 17.2727272727, 17.5000000000, 17.7272727273, 17.9545454545, + 18.1818181818, 18.4090909091, 18.6363636364, 18.8636363636, + 19.0909090909, 19.3181818182, 19.5454545455, 19.7727272727, + 20.0000000000}; + +const float int16_golden_vec_fp[int16_vec_size] = { + 0.0000000021, 0.0000000026, 0.0000000032, 0.0000000041, 0.0000000051, + 0.0000000064, 0.0000000081, 0.0000000101, 0.0000000127, 0.0000000159, + 0.0000000200, 0.0000000251, 0.0000000315, 0.0000000396, 0.0000000497, + 0.0000000623, 0.0000000782, 0.0000000982, 0.0000001232, 0.0000001547, + 0.0000001942, 0.0000002437, 0.0000003059, 0.0000003840, 0.0000004819, + 0.0000006049, 0.0000007593, 0.0000009530, 0.0000011962, 0.0000015014, + 0.0000018846, 0.0000023654, 0.0000029690, 0.0000037266, 0.0000046776, + 0.0000058711, 0.0000073693, 0.0000092497, 0.0000116100, 0.0000145724, + 0.0000182909, 0.0000229581, 0.0000288162, 0.0000361690, 0.0000453979, + 0.0000569815, 0.0000715205, 0.0000897689, 0.0001126729, 0.0001414198, + 0.0001774998, 0.0002227827, 0.0002796147, 0.0003509396, 0.0004404502, + 0.0005527786, 0.0006937345, 0.0008706021, 0.0010925128, 0.0013709094, + 0.0017201256, 0.0021581065, 0.0027073042, 0.0033957870, 0.0042586071, + 0.0053394826, 0.0066928509, 0.0083863576, 0.0105038445, 0.0131488902, + 0.0164489307, 0.0205599431, 0.0256715863, 0.0320125562, 0.0398556989, + 0.0495221198, 0.0613831074, 0.0758581800, 0.0934070047, 0.1145124805, + 0.1396521834, 0.1692560327, 0.2036499335, 0.2429886272, 0.2871859014, + 0.3358556241, 0.3882805886, 0.4434251301, 0.5000000000, 0.5565748699, + 0.6117194114, 0.6641443759, 0.7128140986, 0.7570113728, 0.7963500665, + 0.8307439673, 0.8603478166, 0.8854875195, 0.9065929953, 0.9241418200, + 0.9386168926, 0.9504778802, 0.9601443011, 0.9679874438, 0.9743284137, + 0.9794400569, 0.9835510693, 0.9868511098, 0.9894961555, 0.9916136424, + 0.9933071491, 0.9946605174, 0.9957413929, 0.9966042130, 0.9972926958, + 0.9978418935, 0.9982798744, 0.9986290906, 0.9989074872, 0.9991293979, + 0.9993062655, 0.9994472214, 0.9995595498, 0.9996490604, 0.9997203853, + 0.9997772173, 0.9998225002, 0.9998585802, 0.9998873271, 0.9999102311, + 0.9999284795, 0.9999430185, 0.9999546021, 0.9999638310, 0.9999711838, + 0.9999770419, 0.9999817091, 0.9999854276, 0.9999883900, 0.9999907503, + 0.9999926307, 0.9999941289, 0.9999953224, 0.9999962734, 0.9999970310, + 0.9999976346, 0.9999981154, 0.9999984986, 0.9999988038, 0.9999990470, + 0.9999992407, 0.9999993951, 0.9999995181, 0.9999996160, 0.9999996941, + 0.9999997563, 0.9999998058, 0.9999998453, 0.9999998768, 0.9999999018, + 0.9999999218, 0.9999999377, 0.9999999503, 0.9999999604, 0.9999999685, + 0.9999999749, 0.9999999800, 0.9999999841, 0.9999999873, 0.9999999899, + 0.9999999919, 0.9999999936, 0.9999999949, 0.9999999959, 0.9999999968, + 0.9999999974, 0.9999999979}; + +template +void ValidateLogisticGoldens(TfLiteTensor* tensors, const int tensor_count, + T* output_data, const T* golden, + int output_dims_count, float tolerance) { + int inputs_array_data[] = {1, 0}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 1}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = tflite::Register_LOGISTIC(); + micro::KernelRunner runner(registration, tensors, tensor_count, inputs_array, + outputs_array, nullptr); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + for (int i = 0; i < output_dims_count; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(golden[i], output_data[i], tolerance); + } +} + +void TestLogisticFloat(int* input_dims_data, const float* input_data, + const float* golden, int* output_dims_data, + float* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_elements_count = ElementCount(*output_dims); + + constexpr int inputs_size = 1; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(input_data, input_dims), + CreateTensor(output_data, output_dims), + }; + + ValidateLogisticGoldens(tensors, tensors_size, output_data, golden, + output_elements_count, 1e-5); +} + +template +void TestLogisticQuantized(int* input_dims_data, const float* input_data, + T* input_quantized, const float input_scale, + const int input_zero_point, const float* golden, + T* golden_quantized, int* output_dims_data, + const float output_scale, + const int output_zero_point, T* output_data, + float tolerance) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_elements_count = ElementCount(*output_dims); + + constexpr int inputs_size = 1; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateQuantizedTensor(input_data, input_quantized, input_dims, + input_scale, input_zero_point), + CreateQuantizedTensor(output_data, output_dims, output_scale, + output_zero_point), + }; + + tflite::Quantize(golden, golden_quantized, output_elements_count, + output_scale, output_zero_point); + ValidateLogisticGoldens(tensors, tensors_size, output_data, golden_quantized, + output_elements_count, tolerance); +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(LogisticFloatBasicShouldMatchGolden) { + float output_data[tflite::testing::flat_size_basic]; + tflite::testing::TestLogisticFloat( + tflite::testing::shape_basic, tflite::testing::input_data_basic, + tflite::testing::golden_basic, tflite::testing::shape_basic, output_data); +} + +TF_LITE_MICRO_TEST(LogisticQuantizedInt8BasicShouldMatchGolden) { + const float input_scale = 0.1; + const int input_zero_point = 0; + int8_t input_quantized[tflite::testing::flat_size_basic]; + int8_t golden_quantized[tflite::testing::flat_size_basic]; + int8_t output_data[tflite::testing::flat_size_basic]; + + tflite::testing::TestLogisticQuantized( + tflite::testing::shape_basic, tflite::testing::input_data_basic, + input_quantized, input_scale, input_zero_point, + tflite::testing::golden_basic, golden_quantized, + tflite::testing::shape_basic, + tflite::testing::quantized_output_scale_int8, + tflite::testing::quantized_output_zero_point_int8, output_data, 1.0f); +} + +TF_LITE_MICRO_TEST(LogisticFloatWideRangeShouldMatchGolden) { + float output_data[tflite::testing::flat_size_wide_range]; + tflite::testing::TestLogisticFloat( + tflite::testing::shape_wide_range, tflite::testing::input_data_wide_range, + tflite::testing::golden_wide_range, tflite::testing::shape_wide_range, + output_data); +} + +TF_LITE_MICRO_TEST(LogisticQuantizedInt8WideRangeShouldMatchGolden) { + const float input_scale = 1.0; + const int input_zero_point = 0; + int8_t input_quantized[tflite::testing::flat_size_wide_range]; + int8_t golden_quantized[tflite::testing::flat_size_wide_range]; + int8_t output_data[tflite::testing::flat_size_wide_range]; + + tflite::testing::TestLogisticQuantized( + tflite::testing::shape_wide_range, tflite::testing::input_data_wide_range, + input_quantized, input_scale, input_zero_point, + tflite::testing::golden_wide_range, golden_quantized, + tflite::testing::shape_wide_range, + tflite::testing::quantized_output_scale_int8, + tflite::testing::quantized_output_zero_point_int8, output_data, 1.0f); +} + +TF_LITE_MICRO_TEST(LogisticQuantizedInt16ShouldMatchGolden) { + const float input_scale = 32.f / 65536.f; + const int input_zero_point = 0; + const float output_scale = 2.f / 65536.f; + const int output_zero_point = 0; + int16_t input_quantized[tflite::testing::int16_vec_size]; + int16_t golden_quantized[tflite::testing::int16_vec_size]; + int16_t output_data[tflite::testing::int16_vec_size]; + + tflite::testing::TestLogisticQuantized( + tflite::testing::shape_int16_vec, tflite::testing::int16_input_vec_fp, + input_quantized, input_scale, input_zero_point, + tflite::testing::int16_golden_vec_fp, golden_quantized, + tflite::testing::shape_int16_vec, output_scale, output_zero_point, + output_data, 16.0f); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/lstm_eval.cc b/tensorflow/lite/micro/kernels/lstm_eval.cc new file mode 100644 index 0000000..93d6bc7 --- /dev/null +++ b/tensorflow/lite/micro/kernels/lstm_eval.cc @@ -0,0 +1,295 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/micro/kernels/lstm_eval.h" + +#include + +#include "tensorflow/lite/kernels/internal/reference/fully_connected.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/fully_connected.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/logistic.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/mul.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/tanh.h" +#include "tensorflow/lite/kernels/internal/reference/logistic.h" +#include "tensorflow/lite/kernels/internal/reference/mul.h" +#include "tensorflow/lite/kernels/internal/reference/tanh.h" +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { + +LstmTensors::LstmTensors(TfLiteContext* context, TfLiteNode* node) { + micro_context_ = GetMicroContext(context); + // 24 internal tensors. see lstm_shared.h for tensor names + for (size_t i = 0; i < 24; i++) { + internal_tensors_[i] = micro_context_->AllocateTempInputTensor(node, i); + } + output_tensor_ = + micro_context_->AllocateTempOutputTensor(node, kLstmOutputTensor); +} + +LstmTensors::~LstmTensors() { + for (size_t i = 0; i < 24; i++) { + if (internal_tensors_[i] != nullptr) { + micro_context_->DeallocateTempTfLiteTensor(internal_tensors_[i]); + } + } + micro_context_->DeallocateTempTfLiteTensor(output_tensor_); +} + +// Verify the LSTM internal tensor properties (e.g., type checks) +// Input/output/states/fc weights tensors are required for kernel evaulation. +// The state tensors should be variables. Variants of the standard LSTM +// are not supported here, therefore their corresponding tensors should be +// invalid +TfLiteStatus LstmTensors::ValidateTensorStatus(TfLiteContext* context) const { + // Verify certain tensor properties + // input tensor + TF_LITE_ENSURE(context, internal_tensors_[kLstmInputTensor] != nullptr); + // hidden state + TF_LITE_ENSURE(context, internal_tensors_[kLstmOutputStateTensor] != nullptr); + TF_LITE_ENSURE(context, + internal_tensors_[kLstmOutputStateTensor]->is_variable); + // hidden state becomes input so they must have the same type + TF_LITE_ENSURE_EQ(context, internal_tensors_[kLstmOutputStateTensor]->type, + internal_tensors_[kLstmInputTensor]->type); + // cell state + TF_LITE_ENSURE(context, internal_tensors_[kLstmCellStateTensor] != nullptr); + TF_LITE_ENSURE(context, internal_tensors_[kLstmCellStateTensor]->is_variable); + // output + TF_LITE_ENSURE(context, output_tensor_ != nullptr); + // output type is the same as the input type (activations) + TF_LITE_ENSURE_EQ(context, output_tensor_->type, + internal_tensors_[kLstmInputTensor]->type); + + // weight tensors (1-9, see lstm_shared for index definition) + const auto weight_type = + internal_tensors_[kLstmInputToForgetWeightsTensor]->type; + for (size_t i = 1; i < 9; i++) { + TF_LITE_ENSURE(context, internal_tensors_[i] != nullptr); + TF_LITE_ENSURE_EQ(context, internal_tensors_[i]->type, weight_type); + } + + // bias tensors (12-15, see lstm_shared for index definition) + const auto bias_type = internal_tensors_[kLstmForgetGateBiasTensor]->type; + for (size_t i = 12; i < 16; i++) { + TF_LITE_ENSURE(context, internal_tensors_[i] != nullptr); + TF_LITE_ENSURE_EQ(context, internal_tensors_[i]->type, bias_type); + } + // Tensors from LSTM variants are invalid + // No peephole + for (size_t i = 9; i < 12; i++) { + TF_LITE_ENSURE(context, internal_tensors_[i] == nullptr); + } + // No projection + for (size_t i = 16; i < 18; i++) { + TF_LITE_ENSURE(context, internal_tensors_[i] == nullptr); + } + // No internal layer norm + for (size_t i = 20; i < 24; i++) { + TF_LITE_ENSURE(context, internal_tensors_[i] == nullptr); + } + return kTfLiteOk; +} + +namespace lstm_internal { + +const int32_t kInt16Max = std::numeric_limits::max(); +const int32_t kInt16Min = std::numeric_limits::min(); + +void AddElementWise(const int16_t* input_1, const int16_t* input_2, int n_batch, + int n_input, int16_t* output) { + for (int batch = 0; batch < n_batch; ++batch) { + for (int i = 0; i < n_input; ++i) { + const int index = batch * n_input + i; + int32_t sum = input_1[index] + input_2[index]; + const int32_t sum_clamped = std::min(kInt16Max, std::max(kInt16Min, sum)); + output[index] = static_cast(sum_clamped); + } + } +} + +void AddElementWise(const float* input_1, const float* input_2, int n_batch, + int n_input, float* output) { + for (int batch = 0; batch < n_batch; ++batch) { + for (int i = 0; i < n_input; ++i) { + const int index = batch * n_input + i; + output[index] = input_1[index] + input_2[index]; + } + } +} + +void Sigmoid(const RuntimeShape& data_shape, int16_t* data) { + reference_integer_ops::Logistic( + 0 /*data->input_multiplier*/, 0 /*data->input_left_shift */, + data_shape.FlatSize() /*NumElements(input->dims)*/, + data /* tflite::micro::GetTensorData(input) */, + data /*tflite::micro::GetTensorData(output) */); +} + +void Sigmoid(const RuntimeShape& data_shape, float* data) { + reference_ops::Logistic(data_shape, data, data_shape, data); +} + +void Tanh(int32_t cell_state_scale_power, const RuntimeShape& input_data_shape, + int16_t* input_data, const RuntimeShape& output_data_shape, + int16_t* output_data) { + int32_t tanh_input_left_shift = (15 + cell_state_scale_power) - 3; + int32_t input_multiplier = 0; + if (tanh_input_left_shift < 0) /* handling negative shift value */ + { + tanh_input_left_shift = -tanh_input_left_shift; + input_multiplier = 3; + } + reference_integer_ops::Tanh(input_multiplier, tanh_input_left_shift, + input_data_shape, input_data, output_data_shape, + output_data); +} + +void Tanh(int32_t cell_state_scale_power, const RuntimeShape& input_data_shape, + float* input_data, const RuntimeShape& output_data_shape, + float* output_data) { + reference_ops::Tanh(input_data_shape, input_data, output_data_shape, + output_data); +} + +// Input and output have the same shape in LSTM +void Mul(const RuntimeShape& shape, const ArithmeticParams& params, + const int16_t* input1_data, const int16_t* input2_data, + int8_t* output_data) { + return reference_integer_ops::MulElementwise( + shape.FlatSize(), params, input1_data, input2_data, output_data); +} + +// Input and output have the same shape in LSTM +void Mul(const RuntimeShape& shape, const ArithmeticParams& params, + const int16_t* input1_data, const int16_t* input2_data, + int16_t* output_data) { + return reference_integer_ops::MulElementwise( + shape.FlatSize(), params, input1_data, input2_data, output_data); +} + +// Input and output have the same shape in LSTM +void Mul(const RuntimeShape& shape, const ArithmeticParams& params, + const float* input1_data, const float* input2_data, + float* output_data) { + return reference_ops::Mul(params, shape, input1_data, shape, input2_data, + shape, output_data); +} + +void FullyConnected(const FullyConnectedParams& params, + const RuntimeShape& input_shape, const int8_t* input_data, + const RuntimeShape& filter_shape, const int8_t* filter_data, + const RuntimeShape& bias_shape, const int32_t* bias_data, + const RuntimeShape& output_shape, int16_t* output_data) { + return tflite::reference_integer_ops::FullyConnected( + params, input_shape, input_data, filter_shape, filter_data, bias_shape, + bias_data, output_shape, output_data); +} + +void FullyConnected(const FullyConnectedParams& params, + const RuntimeShape& input_shape, const int16_t* input_data, + const RuntimeShape& filter_shape, const int8_t* filter_data, + const RuntimeShape& bias_shape, const int64_t* bias_data, + const RuntimeShape& output_shape, int16_t* output_data) { + return tflite::reference_integer_ops::FullyConnected( + params, input_shape, input_data, filter_shape, filter_data, bias_shape, + bias_data, output_shape, output_data); +} + +void FullyConnected(const FullyConnectedParams& params, + const RuntimeShape& input_shape, const float* input_data, + const RuntimeShape& filter_shape, const float* filter_data, + const RuntimeShape& bias_shape, const float* bias_data, + const RuntimeShape& output_shape, float* output_data) { + return tflite::reference_ops::FullyConnected( + params, input_shape, input_data, filter_shape, filter_data, bias_shape, + bias_data, output_shape, output_data); +} + +void Clipping(const int v_size, const CellStateInfo& cell_state_info, + int16_t* vector) { + for (int i = 0; i < v_size; i++) { + vector[i] = + std::max(std::min(cell_state_info.quantized_cell_clip, vector[i]), + static_cast(-cell_state_info.quantized_cell_clip)); + } +} + +void Clipping(const int v_size, const CellStateInfo& cell_state_info, + float* vector) { + for (int i = 0; i < v_size; i++) { + vector[i] = std::max(std::min(cell_state_info.cell_clip, vector[i]), + -cell_state_info.cell_clip); + } +} + +// Increment the data offset so the sigle time step invocation call can access +// the corresponding input/output tensor data at the time step +void LstmStepManager::UpdateTime() { + current_time_ += 1; + TFLITE_DCHECK_LE(current_time_, size_info_.time_steps); + // default as one batch per inference + int input_step = size_info_.input_dimension; + int output_step = size_info_.state_dimension; + // time major: batch inference + if (size_info_.time_major) { + input_step = input_step * size_info_.batch_size; + output_step = output_step * size_info_.batch_size; + } + + input_offset_ += input_step; + output_offset_ += output_step; +} + +// Increment the data offset so the sigle time step invocation call can access +// the corresponding hidden/cell state tensor data at the time step (for single +// batch inference only) +void LstmStepManager::UpdateBatch() { + current_batch_ += 1; + TFLITE_DCHECK_LE(current_batch_, size_info_.batch_size); + // batch inference for time major: no action needed + if (size_info_.time_major) { + return; + } + // otherwise: singe batch inference, go to the next batch + hidden_state_offset_ += size_info_.state_dimension; + cell_state_offset_ += size_info_.state_dimension; +} + +// Input shape for each single time LSTM invocation. +// Multi-batch for time_major input +RuntimeShape LstmStepManager::InputShape() const { + int batch_size = 1; + if (size_info_.time_major) { + batch_size = size_info_.batch_size; + } + const int dims[2] = {batch_size, size_info_.input_dimension}; + const int32_t* dims_data = reinterpret_cast(dims); + return RuntimeShape(2, dims_data); +} + +// State shape (both hidden and cell) for each single time LSTM invocation. +// Multi-batch for time_major input +RuntimeShape LstmStepManager::StateShape() const { + int batch_size = 1; + if (size_info_.time_major) { + batch_size = size_info_.batch_size; + } + const int dims[2] = {batch_size, size_info_.state_dimension}; + const int32_t* dims_data = reinterpret_cast(dims); + return RuntimeShape(2, dims_data); +} + +} // namespace lstm_internal +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/lstm_eval.h b/tensorflow/lite/micro/kernels/lstm_eval.h new file mode 100644 index 0000000..62bc635 --- /dev/null +++ b/tensorflow/lite/micro/kernels/lstm_eval.h @@ -0,0 +1,541 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +// Functions to perform integer evaulation for standard LSTM (e.g., defined in +// the keras lstm layer, no peephole etc.). Currently used by the 16 bits +// activation case only + +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_LSTM_EVAL_GENERAL_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_LSTM_EVAL_GENERAL_H_ +#include +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/lstm_shared.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { + +// Interface to access all the TempTfLiteTensors of the LSTM kernel during the +// preparation phase. Can only be constructed through the constructor to avoid +// memory leakage. All TempTfLiteTensors will be deallocated through the +// destructor. +class LstmTensors { + public: + LstmTensors(const LstmTensors& other) = delete; + LstmTensors& operator=(const LstmTensors& other) = delete; + + LstmTensors(TfLiteContext* context, TfLiteNode* node); + ~LstmTensors(); + + // Verify the LSTM internal tensor properties (e.g., type checks) + // Input/output/states/fc weights tensors are required for kernel evaulation. + // The state tensors should be variables. Variants of the standard LSTM + // are not supported here, therefore their corresponding tensors should be + // invalid + TfLiteStatus ValidateTensorStatus(TfLiteContext* context) const; + + // Internal tensors. see lstm_shared.h for tensor names + const TfLiteTensor* GetInternalTensor(const int tensor_index) const { + return internal_tensors_[tensor_index]; + } + + const TfLiteTensor* HiddenStateTensor() const { + return internal_tensors_[kLstmOutputStateTensor]; + } + const TfLiteTensor* CellStateTensor() const { + return internal_tensors_[kLstmCellStateTensor]; + } + const TfLiteTensor* OutputTensor() const { return output_tensor_; } + + private: + // see lstm_shared.h for tensor names + MicroContext* micro_context_; + TfLiteTensor* internal_tensors_[24]; + TfLiteTensor* output_tensor_; +}; + +// Deduce the size information (Batch (B), Time Steps (T), Input dimension (I), +// State dimension (S)) that defines the LSTM using the input and hidden state +// tensor +LstmSizeInfo CreateLstmSizeInfo( + const bool time_major, const TfLiteIntArray* input_tensor_shape, + const TfLiteIntArray* hidden_state_tensor_shape); + +TfLiteStatus ValidateWeightTensorSize(TfLiteContext* context, + const TfLiteTensor* tensor, int dim1_size, + int dim2_size); + +TfLiteStatus ValidateBiasTensorSize(TfLiteContext* context, + const TfLiteTensor* tensor, int size); + +// Go through every tensors and make sure their shape match the kernel +// configuration +TfLiteStatus ValidateTensorSize(TfLiteContext* context, + const LstmTensors& tensors, + const LstmSizeInfo& size_info); + +// Wrapper function to create gate parameters for the four internal LSTM gates +TfLiteStatus CreateGateParams( + TfLiteContext* context, + /*Input tensors*/ + const TfLiteTensor* input, const TfLiteTensor* input_weight, + const TfLiteTensor* input_bias, + /*Hidden state tensors*/ + const TfLiteTensor* hidden_state, const TfLiteTensor* hidden_state_weight, + const TfLiteTensor* hidden_state_bias, + /*Scale of the fc output (input to non-linear activation)*/ + const float nonlinear_activation_input_scale, const TfLiteType cell_type, + const tflite::GateParameters& gate_params); + +// Create parameters for element wise multiplication that happens in a) cell +// state update ; b) hidden state update +// Note that all the output of gates are symmetrically quantized so only scales +// are required for input. However, during the hidden state update phase, the +// output is the updated hidden state, which is asymmetrically quantized. Thus +// output may require zero point +tflite::ArithmeticParams CreateInterGateMulParams(const float input1_scale, + const float input2_scale, + const float output_scale, + const TfLiteType output_type, + const int output_zp = 0); + +// Create the additional information about the cell state, which include: +// cell_state_scale_power: used in integer nonlinear function (e.g., tanh) +// quantized_cell_clip: quantized cell clip range +CellStateInfo CreateLstmCellStateInfo(const float cell_state_scale, + const float cell_clip); + +CellStateInfo CreateLstmCellStateInfoFloat(const float cell_clip); +tflite::FullyConnectedParams CreateFCParamsFloat(); + +tflite::GateParameters CreateGateParamsFloat(); + +tflite::ArithmeticParams CreateInterGateMulParamsFloat(); + +TfLiteStatus PrepareGateParametersFloat(TfLiteContext* context, + const LstmTensors& lstm_tensors, + OpDataLSTM* op_data_lstm); + +TfLiteStatus PrepareGateParametersInteger(TfLiteContext* context, + const LstmTensors& lstm_tensors, + OpDataLSTM* op_data_lstm); + +LSTMKernelContents CreateLSTMKernelContent(TfLiteContext* context, + TfLiteNode* node); + +template +LSTMBuffers CreateLSTMBuffers(TfLiteContext* context, + const int* buffer_indices) { + LSTMBuffers buffers; + buffers.buffer0 = reinterpret_cast( + context->GetScratchBuffer(context, buffer_indices[0])); + buffers.buffer1 = reinterpret_cast( + context->GetScratchBuffer(context, buffer_indices[1])); + buffers.buffer2 = reinterpret_cast( + context->GetScratchBuffer(context, buffer_indices[2])); + buffers.buffer3 = reinterpret_cast( + context->GetScratchBuffer(context, buffer_indices[3])); + return buffers; +} + +// Since LSTM includes multiple intermediate stages, introducing the internal +// namespace to expose them for testing +namespace lstm_internal { + +void Sigmoid(const RuntimeShape& data_shape, int16_t* data); + +void Sigmoid(const RuntimeShape& data_shape, float* data); + +void Tanh(int32_t cell_state_scale_power, const RuntimeShape& input_data_shape, + int16_t* input_data, const RuntimeShape& output_data_shape, + int16_t* output_data); + +void Tanh(int32_t cell_state_scale_power, const RuntimeShape& input_data_shape, + float* input_data, const RuntimeShape& output_data_shape, + float* output_data); + +void Mul(const RuntimeShape& shape, const ArithmeticParams& params, + const int16_t* input1_data, const int16_t* input2_data, + int8_t* output_data); + +void Mul(const RuntimeShape& shape, const ArithmeticParams& params, + const int16_t* input1_data, const int16_t* input2_data, + int16_t* output_data); + +void Mul(const RuntimeShape& shape, const ArithmeticParams& params, + const float* input1_data, const float* input2_data, + float* output_data); + +void FullyConnected(const FullyConnectedParams& params, + const RuntimeShape& input_shape, const int8_t* input_data, + const RuntimeShape& filter_shape, const int8_t* filter_data, + const RuntimeShape& bias_shape, const int32_t* bias_data, + const RuntimeShape& output_shape, int16_t* output_data); + +void FullyConnected(const FullyConnectedParams& params, + const RuntimeShape& input_shape, const int16_t* input_data, + const RuntimeShape& filter_shape, const int8_t* filter_data, + const RuntimeShape& bias_shape, const int64_t* bias_data, + const RuntimeShape& output_shape, int16_t* output_data); + +void FullyConnected(const FullyConnectedParams& params, + const RuntimeShape& input_shape, const float* input_data, + const RuntimeShape& filter_shape, const float* filter_data, + const RuntimeShape& bias_shape, const float* bias_data, + const RuntimeShape& output_shape, float* output_data); + +void AddElementWise(const int16_t* input_1, const int16_t* input_2, int n_batch, + int n_input, int16_t* output); + +void AddElementWise(const float* input_1, const float* input_2, int n_batch, + int n_input, float* output); + +void Clipping(const int v_size, const CellStateInfo& cell_state_info, + int16_t* vector); + +void Clipping(const int v_size, const CellStateInfo& cell_state_info, + float* vector); + +// Manages the slice position (offset), slice length (sliced tensor shape), +// and update rules for input/output/hidden state/cell state tensors at each +// time step. +class LstmStepManager { + public: + LstmStepManager() = delete; + // Does not take any ownership, and all pointers must refer to valid objects + // that outlive the one constructed. + explicit LstmStepManager(const LstmSizeInfo* size_info) + : size_info_(*size_info) {} + + void UpdateTime(); + void UpdateBatch(); + + void ResetTime() { current_time_ = 0; } + RuntimeShape InputShape() const; + RuntimeShape StateShape() const; + + int InputOffset() const { return input_offset_; } + int OutputOffset() const { return output_offset_; } + int HiddenStateOffset() const { return hidden_state_offset_; } + int CellStateOffset() const { return cell_state_offset_; } + + private: + int current_time_ = 0; + int current_batch_ = 0; + int input_offset_ = 0; + int output_offset_ = 0; + int hidden_state_offset_ = 0; + int cell_state_offset_ = 0; + // Sizeinfo is from LstmOpData, which reside in the memory arena + // (guarante to outlast LSTMStepManager, which reside in stack) + const LstmSizeInfo& size_info_; +}; + +// Calculates a single LSTM gate. +// Implements the following formula: +// gate = activate(FC(input) + FC(recurrent)) +// Activation is sigmoid except for the "cell" gate (configurable, usually tanh) +template +void CalculateLstmGate( + const LstmStepManager& step_info, const GateParameters& gate_params, + // Input FC + const TfLiteEvalTensor* input, const TfLiteEvalTensor* input_weight, + const TfLiteEvalTensor* input_bias, + // Recurrent FC + const TfLiteEvalTensor* recurrent, const TfLiteEvalTensor* recurrent_weight, + const TfLiteEvalTensor* recurrent_bias, + // Output + CellType* gate_output, + // Scratch arrays + CellType* fc_output_buffer, const TfLiteFusedActivation activation) { + const auto gate_output_shape = step_info.StateShape(); + // Check offset validity to avoid memory overflow + TFLITE_DCHECK_LE(step_info.InputOffset() + step_info.InputShape().FlatSize(), + tflite::micro::GetTensorShape(input).FlatSize()); + TFLITE_DCHECK_LE( + step_info.HiddenStateOffset() + step_info.StateShape().FlatSize(), + tflite::micro::GetTensorShape(recurrent).FlatSize()); + + // Input FC + FullyConnected(gate_params.input_fc_params, step_info.InputShape(), + tflite::micro::GetTensorData(input) + + step_info.InputOffset(), + micro::GetTensorShape(input_weight), + tflite::micro::GetTensorData(input_weight), + tflite::micro::GetTensorShape(input_bias), + tflite::micro::GetOptionalTensorData(input_bias), + gate_output_shape, gate_output); + + // Recurrent FC + FullyConnected(gate_params.recurrent_fc_params, step_info.StateShape(), + tflite::micro::GetTensorData(recurrent) + + step_info.HiddenStateOffset(), + tflite::micro::GetTensorShape(recurrent_weight), + tflite::micro::GetTensorData(recurrent_weight), + tflite::micro::GetTensorShape(recurrent_bias), + tflite::micro::GetOptionalTensorData(recurrent_bias), + gate_output_shape, fc_output_buffer); + + AddElementWise(gate_output, fc_output_buffer, + /*n_batch=*/gate_output_shape.DimsData()[0], + /*n_state=*/gate_output_shape.DimsData()[1], gate_output); + // Apply activation + switch (activation) { + case kTfLiteActSigmoid: + Sigmoid(gate_output_shape, gate_output); + break; + case kTfLiteActTanh: { + // Set the scale power to -12 to avoid shift + Tanh(/*cell_state_scale_power=*/-12, gate_output_shape, gate_output, + gate_output_shape, gate_output); + } break; + default: + // Only Sigmoid or Tanh is used. + TFLITE_ASSERT_FALSE; + } +} + +// Update the cell state using the output from the forget gate, input gate, and +// cell gate Formula: updated_cell_state = forget_gate_output*cell_state + +// input_gate_output * cell_gate_output, where * denotes element wise +// multiplication +template +void UpdateLstmCell(const LstmStepManager& step_info, + TfLiteEvalTensor* cell_state, + // Gate outputs + CellType* forget_gate_output, + const CellType* input_gate_output, + const CellType* cell_gate_output, + // Mul parameters + const ArithmeticParams& forget_cell_mul_params, + const ArithmeticParams& input_mul_params, + const CellStateInfo& cell_state_info, CellType* buffer) { + // Check offset validity to avoid memory overflow + TFLITE_DCHECK_LE( + step_info.CellStateOffset() + step_info.StateShape().FlatSize(), + tflite::micro::GetTensorShape(cell_state).FlatSize()); + + auto cell_state_shape = step_info.StateShape(); + // Forget Gate x Cell State + Mul(cell_state_shape, forget_cell_mul_params, forget_gate_output, + tflite::micro::GetTensorData(cell_state) + + step_info.CellStateOffset(), + tflite::micro::GetTensorData(cell_state) + + step_info.CellStateOffset()); + // Input Gate x Cell Gate + Mul(cell_state_shape, input_mul_params, input_gate_output, cell_gate_output, + buffer); + + // Update the cell state + AddElementWise(tflite::micro::GetTensorData(cell_state) + + step_info.CellStateOffset(), + buffer, + /*n_batch=*/cell_state_shape.DimsData()[0], + /*n_state=*/cell_state_shape.DimsData()[1], + tflite::micro::GetTensorData(cell_state) + + step_info.CellStateOffset()); + + if (cell_state_info.cell_clip > 0) { + Clipping(cell_state_shape.FlatSize(), cell_state_info, + tflite::micro::GetTensorData(cell_state) + + step_info.CellStateOffset()); + } +} + +// Update the hidden state of the LSTM kernel using the following formula: +// updated_hidden_state = Tanh(updated_cell_state) * output_gate_output, * means +// element wise multiplication +template +void UpdateLstmHidden(const LstmStepManager& step_info, + TfLiteEvalTensor* cell_state, + TfLiteEvalTensor* hidden_state, + const CellType* output_gate_output, + const ArithmeticParams& mul_params, + int32_t cell_state_scale_power, CellType* buffer) { + // Check offset validity to avoid memory overflow + TFLITE_DCHECK_LE( + step_info.CellStateOffset() + step_info.StateShape().FlatSize(), + tflite::micro::GetTensorShape(cell_state).FlatSize()); + TFLITE_DCHECK_LE( + step_info.HiddenStateOffset() + step_info.StateShape().FlatSize(), + tflite::micro::GetTensorShape(hidden_state).FlatSize()); + + auto cell_state_shape = step_info.StateShape(); + CellType* cell_state_data = + tflite::micro::GetTensorData(cell_state) + + step_info.CellStateOffset(); + // Tanh(cell_state) + Tanh(cell_state_scale_power, cell_state_shape, cell_state_data, + cell_state_shape, buffer); + // Update the hidden state + Mul(cell_state_shape, mul_params, buffer, output_gate_output, + tflite::micro::GetTensorData(hidden_state) + + step_info.HiddenStateOffset()); +} + +template +void LstmStep(const LstmStepManager& step_info, const OpDataLSTM& op_data, + LSTMKernelContents& kernel_content, + const LSTMBuffers& buffers) { + /*Step1: Calculate gate outputs to prepare cell state update*/ + CellType* gate_internal_buffer = buffers.buffer3; + CellType* forget_gate_output = buffers.buffer0; + CalculateLstmGate( + step_info, op_data.forget_gate_parameters, + // Input FC + kernel_content.GetInternalTensor(tflite::kLstmInputTensor), + kernel_content.GetInternalTensor(tflite::kLstmInputToForgetWeightsTensor), + kernel_content.GetInternalTensor(tflite::kLstmForgetGateBiasTensor), + // Recurrent FC + kernel_content.HiddenStateTensor(), + kernel_content.GetInternalTensor( + tflite::kLstmRecurrentToForgetWeightsTensor), + /*recurrent_bias*/ nullptr, + // Output + forget_gate_output, + // Scratch arrays + gate_internal_buffer, kTfLiteActSigmoid); + + // Input Gate calculation; + CellType* input_gate_output = buffers.buffer1; + CalculateLstmGate( + step_info, op_data.input_gate_parameters, + // Input FC + kernel_content.GetInternalTensor(tflite::kLstmInputTensor), + kernel_content.GetInternalTensor(tflite::kLstmInputToInputWeightsTensor), + kernel_content.GetInternalTensor(tflite::kLstmInputGateBiasTensor), + // Recurrent FC + kernel_content.HiddenStateTensor(), + kernel_content.GetInternalTensor( + tflite::kLstmRecurrentToInputWeightsTensor), + /*recurrent_bias*/ nullptr, + // Output + input_gate_output, + // Scratch arrays + gate_internal_buffer, kTfLiteActSigmoid); + + // Cell Gate calculation + CellType* cell_gate_output = buffers.buffer2; + CalculateLstmGate( + step_info, op_data.cell_gate_parameters, + // Input FC + kernel_content.GetInternalTensor(tflite::kLstmInputTensor), + kernel_content.GetInternalTensor(tflite::kLstmInputToCellWeightsTensor), + kernel_content.GetInternalTensor(tflite::kLstmCellGateBiasTensor), + // Recurrent FC + kernel_content.HiddenStateTensor(), + kernel_content.GetInternalTensor( + tflite::kLstmRecurrentToCellWeightsTensor), + /*recurrent_bias*/ nullptr, + // Output + cell_gate_output, + // Scratch arrays + gate_internal_buffer, op_data.cell_gate_nonlinear_type); + + /*Step2: update the cell state */ + const InterGateParameters& inter_gate_params = op_data.inter_gate_parameters; + CellType* updated_input_buffer = buffers.buffer1; // reuse buffer + + UpdateLstmCell(step_info, kernel_content.CellStateTensor(), + forget_gate_output, input_gate_output, + cell_gate_output, + inter_gate_params.forget_cell_mul_params, + inter_gate_params.input_mul_params, + op_data.cell_state_info, updated_input_buffer); + + /*Step3: update the hidden state */ + CellType* output_gate_output = buffers.buffer1; // reuse buffer + CalculateLstmGate( + step_info, op_data.output_gate_parameters, + // Input FC + kernel_content.GetInternalTensor(tflite::kLstmInputTensor), + kernel_content.GetInternalTensor(tflite::kLstmInputToOutputWeightsTensor), + kernel_content.GetInternalTensor(tflite::kLstmOutputGateBiasTensor), + // Recurrent FC + kernel_content.HiddenStateTensor(), + kernel_content.GetInternalTensor( + tflite::kLstmRecurrentToOutputWeightsTensor), + /*recurrent_bias*/ nullptr, + // Output + output_gate_output, + // Scratch arrays + gate_internal_buffer, kTfLiteActSigmoid); + + CellType* tanh_activated_cell_buffer = buffers.buffer0; // reuse buffer + tflite::lstm_internal::UpdateLstmHidden( + step_info, kernel_content.CellStateTensor(), + kernel_content.HiddenStateTensor(), output_gate_output, + inter_gate_params.output_mul_params, + op_data.cell_state_info.cell_state_scale_power, + tanh_activated_cell_buffer); + + /*Step4: copy the update the hidden state to output*/ + // Check offset validity to avoid memory overflow + TFLITE_DCHECK_LE( + step_info.OutputOffset() + step_info.StateShape().FlatSize(), + tflite::micro::GetTensorShape(kernel_content.output_tensor).FlatSize()); + // record the output (from the updated hidden state) + ActivationType* output_ptr = tflite::micro::GetTensorData( + kernel_content.output_tensor); + const auto* hidden_state = kernel_content.HiddenStateTensor(); + std::memcpy(output_ptr + step_info.OutputOffset(), + tflite::micro::GetTensorData(hidden_state) + + step_info.HiddenStateOffset(), + step_info.StateShape().FlatSize() * sizeof(ActivationType)); +} + +} // namespace lstm_internal + +// Evaulate the LSTM kernel with (potential) multi-steps and multi-batch input +// Since +template +TfLiteStatus EvalLstm(const OpDataLSTM& op_data, + LSTMKernelContents& kernel_content, + const LSTMBuffers& buffers) { + lstm_internal::LstmStepManager step_info(&op_data.size_info); + const auto& size_info = op_data.size_info; + // time is the first dimention, enable batch computation + if (size_info.time_major) { + for (int t = 0; t < size_info.time_steps; t++) { + lstm_internal::LstmStep( + step_info, op_data, kernel_content, buffers); + // prepare for the next time step + step_info.UpdateTime(); + } + } else { + // batch first, unable to size the input data. single batch inference + for (int b = 0; b < size_info.batch_size; b++) { + for (int t = 0; t < size_info.time_steps; t++) { + lstm_internal::LstmStep( + step_info, op_data, kernel_content, buffers); + // prepare for the next time step + step_info.UpdateTime(); + } + // prepare for the next batch + step_info.UpdateBatch(); + step_info.ResetTime(); + } + } + return kTfLiteOk; +} +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_LSTM_EVAL_16ACT_H_ diff --git a/tensorflow/lite/micro/kernels/lstm_eval_common.cc b/tensorflow/lite/micro/kernels/lstm_eval_common.cc new file mode 100644 index 0000000..9631b4c --- /dev/null +++ b/tensorflow/lite/micro/kernels/lstm_eval_common.cc @@ -0,0 +1,326 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/fully_connected.h" +#include "tensorflow/lite/micro/kernels/lstm_eval.h" + +namespace tflite { + +// Deduce the size information (Batch (B), Time Steps (T), Input dimension (I), +// State dimension (S)) that defines the LSTM using the input and hidden state +// tensor +LstmSizeInfo CreateLstmSizeInfo( + const bool time_major, const TfLiteIntArray* input_tensor_shape, + const TfLiteIntArray* hidden_state_tensor_shape) { + LstmSizeInfo size_info; + size_info.time_major = time_major; + size_info.batch_size = + time_major ? input_tensor_shape->data[1] : input_tensor_shape->data[0]; + size_info.time_steps = + time_major ? input_tensor_shape->data[0] : input_tensor_shape->data[1]; + size_info.input_dimension = input_tensor_shape->data[2]; + size_info.state_dimension = hidden_state_tensor_shape->data[1]; + return size_info; +} + +TfLiteStatus ValidateWeightTensorSize(TfLiteContext* context, + const TfLiteTensor* tensor, int dim1_size, + int dim2_size) { + TF_LITE_ENSURE_EQ(context, tensor->dims->size, 2); + TF_LITE_ENSURE_EQ(context, tensor->dims->data[0], dim1_size); + TF_LITE_ENSURE_EQ(context, tensor->dims->data[1], dim2_size); + return kTfLiteOk; +} + +TfLiteStatus ValidateBiasTensorSize(TfLiteContext* context, + const TfLiteTensor* tensor, int size) { + TF_LITE_ENSURE_EQ(context, tensor->dims->size, 1); + TF_LITE_ENSURE_EQ(context, tensor->dims->data[0], size); + return kTfLiteOk; +} + +// Go through every tensors and make sure their shape match the kernel +// configuration +TfLiteStatus ValidateTensorSize(TfLiteContext* context, + const LstmTensors& tensors, + const LstmSizeInfo& size_info) { + // Input FC weights + for (size_t i = 1; i < 5; i++) { + TF_LITE_ENSURE_OK( + context, ValidateWeightTensorSize(context, tensors.GetInternalTensor(i), + size_info.state_dimension, + size_info.input_dimension)); + } + // Recurrent FC weights + for (size_t i = 5; i < 9; i++) { + TF_LITE_ENSURE_OK( + context, ValidateWeightTensorSize(context, tensors.GetInternalTensor(i), + size_info.state_dimension, + size_info.state_dimension)); + } + // Biases + for (size_t i = 12; i < 16; i++) { + TF_LITE_ENSURE_OK( + context, ValidateBiasTensorSize(context, tensors.GetInternalTensor(i), + size_info.state_dimension)); + } + + // Check the shape of input state tensors. + // These tensor may be 1D or 2D. It's fine as long as the total size is + // correct. + TF_LITE_ENSURE_EQ(context, NumElements(tensors.HiddenStateTensor()), + size_info.batch_size * size_info.state_dimension); + TF_LITE_ENSURE_EQ(context, NumElements(tensors.CellStateTensor()), + size_info.batch_size * size_info.state_dimension); + + // Check the shape of output tensor against that of input tensor + TF_LITE_ENSURE_EQ(context, tensors.OutputTensor()->dims->size, 3); + TF_LITE_ENSURE_EQ(context, + tensors.GetInternalTensor(kLstmInputTensor)->dims->data[0], + tensors.OutputTensor()->dims->data[0]); + TF_LITE_ENSURE_EQ(context, + tensors.GetInternalTensor(kLstmInputTensor)->dims->data[1], + tensors.OutputTensor()->dims->data[1]); + TF_LITE_ENSURE_EQ(context, tensors.OutputTensor()->dims->data[2], + size_info.state_dimension); + return kTfLiteOk; +} + +// Wrapper function to create gate parameters for the four internal LSTM gates +TfLiteStatus CreateGateParams( + TfLiteContext* context, + /*Input tensors*/ + const TfLiteTensor* input, const TfLiteTensor* input_weight, + const TfLiteTensor* input_bias, + /*Hidden state tensors*/ + const TfLiteTensor* hidden_state, const TfLiteTensor* hidden_state_weight, + const TfLiteTensor* hidden_state_bias, + /*Scale of the fc output (input to non-linear activation)*/ + const float nonlinear_activation_input_scale, const TfLiteType cell_type, + tflite::GateParameters& gate_params) { + // A temp tflite tensor to represent the output of fc operation. Only the data + // type and quantization parameters are set since it is only used for + // parameter calculations + TfLiteTensor fc_output_temp; + fc_output_temp.type = cell_type; + fc_output_temp.params.scale = nonlinear_activation_input_scale; + fc_output_temp.params.zero_point = 0; // symmetrical quantized + + // A temp fc opdata to reuse the helper function on creating fc parameters + tflite::OpDataFullyConnected fc_data_temp; + // TODO(b/265853320): due to the lack of precision for the float scale, + // scale_diff / output_scale <= 0.02 (potentially requires 1e-8 precision) can + // not be satisified for the bias. Here we rely on the correctiveness of the + // conversion process (set input_bias=nullptr to avoid checking) for + // tensor scales + TF_LITE_ENSURE_STATUS(CalculateOpDataFullyConnected( + context, kTfLiteActNone, input->type, input, input_weight, + /*input_bias=*/nullptr, &fc_output_temp, &fc_data_temp)); + gate_params.input_fc_params = FullyConnectedParamsQuantized(fc_data_temp); + double real_multiplier = 0.0; + GetQuantizedConvolutionMultipler(context, input, input_weight, nullptr, + &fc_output_temp, &real_multiplier); + + TF_LITE_ENSURE_STATUS(CalculateOpDataFullyConnected( + context, kTfLiteActNone, hidden_state->type, hidden_state, + hidden_state_weight, hidden_state_bias, &fc_output_temp, &fc_data_temp)); + gate_params.recurrent_fc_params = FullyConnectedParamsQuantized(fc_data_temp); + return kTfLiteOk; +} + +// Create parameters for element wise multiplication that happens in a) cell +// state update ; b) hidden state update +// Note that all the output of gates are symmetrically quantized so only scales +// are required for input. However, during the hidden state update phase, the +// output is the updated hidden state, which is asymmetrically quantized. Thus +// output may require zero point +tflite::ArithmeticParams CreateInterGateMulParams(const float input1_scale, + const float input2_scale, + const float output_scale, + const TfLiteType output_type, + const int output_zp) { + tflite::ArithmeticParams op_params = {}; + if (output_type == kTfLiteInt16) { + op_params.quantized_activation_min = std::numeric_limits::min(); + op_params.quantized_activation_max = std::numeric_limits::max(); + } else if (output_type == kTfLiteInt8) { + op_params.quantized_activation_min = std::numeric_limits::min(); + op_params.quantized_activation_max = std::numeric_limits::max(); + } + + op_params.input1_offset = 0; // symmetric + op_params.input2_offset = 0; // symmetric + op_params.output_offset = output_zp; + + const double input_product_scale = + static_cast(input1_scale) * static_cast(input2_scale); + double effective_scale = + input_product_scale / static_cast(output_scale); + + QuantizeMultiplier(effective_scale, &op_params.output_multiplier, + &op_params.output_shift); + return op_params; +} + +// Create the additional information about the cell state, which include: +// cell_state_scale_power: used in integer nonlinear function (e.g., tanh) +// quantized_cell_clip: quantized cell clip range +CellStateInfo CreateLstmCellStateInfo(const float cell_state_scale, + const float cell_clip) { + CellStateInfo cell_state_info; + // cell_state_scale_power: 2^-cell_state_scale_power = cell state scale + int buffer; + tflite::CheckedLog2(cell_state_scale, &buffer); + cell_state_info.cell_state_scale_power = buffer; + // Cell state specifics + cell_state_info.cell_clip = cell_clip; + cell_state_info.quantized_cell_clip = static_cast( + std::min(std::max(static_cast(cell_clip) / + static_cast(cell_state_scale), + -32768.0), + 32767.0)); + + return cell_state_info; +} + +CellStateInfo CreateLstmCellStateInfoFloat(const float cell_clip) { + CellStateInfo cell_state_info; + cell_state_info.cell_clip = cell_clip; + cell_state_info.cell_state_scale_power = 0; // no quantization + cell_state_info.quantized_cell_clip = 0; // no quantization + return cell_state_info; +} + +tflite::FullyConnectedParams CreateFCParamsFloat() { + FullyConnectedParams op_params; + CalculateActivationRange(kTfLiteActNone, &op_params.float_activation_min, + &op_params.float_activation_max); + return op_params; +} + +tflite::GateParameters CreateGateParamsFloat() { + tflite::GateParameters gate_params = {}; + gate_params.input_fc_params = CreateFCParamsFloat(); + gate_params.recurrent_fc_params = CreateFCParamsFloat(); + return gate_params; +} + +tflite::ArithmeticParams CreateInterGateMulParamsFloat() { + tflite::ArithmeticParams op_params = {}; + CalculateActivationRange(kTfLiteActNone, &op_params.float_activation_min, + &op_params.float_activation_max); + return op_params; +} + +TfLiteStatus PrepareGateParametersFloat(TfLiteContext* context, + const LstmTensors& lstm_tensors, + OpDataLSTM* op_data_lstm) { + // Gate Parameters + op_data_lstm->forget_gate_parameters = CreateGateParamsFloat(); + op_data_lstm->input_gate_parameters = CreateGateParamsFloat(); + op_data_lstm->cell_gate_parameters = CreateGateParamsFloat(); + op_data_lstm->output_gate_parameters = CreateGateParamsFloat(); + // Inter gate multiplication parameters + op_data_lstm->inter_gate_parameters.forget_cell_mul_params = + CreateInterGateMulParamsFloat(); + op_data_lstm->inter_gate_parameters.input_mul_params = + CreateInterGateMulParamsFloat(); + op_data_lstm->inter_gate_parameters.output_mul_params = + CreateInterGateMulParamsFloat(); + return kTfLiteOk; +} + +TfLiteStatus PrepareGateParametersInteger(TfLiteContext* context, + const LstmTensors& lstm_tensors, + OpDataLSTM* op_data_lstm) { + float nonlinear_input_scale = 0.000244140625; // 2^-12 Q3.12 -> Q0.15 + TF_LITE_ENSURE_OK( + context, + CreateGateParams( + context, lstm_tensors.GetInternalTensor(kLstmInputTensor), + lstm_tensors.GetInternalTensor(kLstmInputToForgetWeightsTensor), + lstm_tensors.GetInternalTensor(kLstmForgetGateBiasTensor), + lstm_tensors.GetInternalTensor(kLstmOutputStateTensor), + lstm_tensors.GetInternalTensor(kLstmRecurrentToForgetWeightsTensor), + /*hidden_state_bias=*/nullptr, nonlinear_input_scale, kTfLiteInt16, + op_data_lstm->forget_gate_parameters)); + TF_LITE_ENSURE_OK( + context, + CreateGateParams( + context, lstm_tensors.GetInternalTensor(kLstmInputTensor), + lstm_tensors.GetInternalTensor(kLstmInputToInputWeightsTensor), + lstm_tensors.GetInternalTensor(kLstmInputGateBiasTensor), + lstm_tensors.GetInternalTensor(kLstmOutputStateTensor), + lstm_tensors.GetInternalTensor(kLstmRecurrentToInputWeightsTensor), + /*hidden_state_bias=*/nullptr, nonlinear_input_scale, kTfLiteInt16, + op_data_lstm->input_gate_parameters)); + TF_LITE_ENSURE_OK( + context, + CreateGateParams( + context, lstm_tensors.GetInternalTensor(kLstmInputTensor), + lstm_tensors.GetInternalTensor(kLstmInputToCellWeightsTensor), + lstm_tensors.GetInternalTensor(kLstmCellGateBiasTensor), + lstm_tensors.GetInternalTensor(kLstmOutputStateTensor), + lstm_tensors.GetInternalTensor(kLstmRecurrentToCellWeightsTensor), + /*hidden_state_bias=*/nullptr, nonlinear_input_scale, kTfLiteInt16, + op_data_lstm->cell_gate_parameters)); + TF_LITE_ENSURE_OK( + context, + CreateGateParams( + context, lstm_tensors.GetInternalTensor(kLstmInputTensor), + lstm_tensors.GetInternalTensor(kLstmInputToOutputWeightsTensor), + lstm_tensors.GetInternalTensor(kLstmOutputGateBiasTensor), + lstm_tensors.GetInternalTensor(kLstmOutputStateTensor), + lstm_tensors.GetInternalTensor(kLstmRecurrentToOutputWeightsTensor), + /*hidden_state_bias=*/nullptr, nonlinear_input_scale, kTfLiteInt16, + op_data_lstm->output_gate_parameters)); + + // Inter gate multiplication parameters + float nonlinear_output_scale = 0.000030517578125; // 2^-15 Q3.12 -> Q0.15 + float cell_state_scale = lstm_tensors.CellStateTensor()->params.scale; + // forget gate output (nonlinear output) x cell state -> cell state + op_data_lstm->inter_gate_parameters.forget_cell_mul_params = + CreateInterGateMulParams(nonlinear_output_scale, cell_state_scale, + cell_state_scale, kTfLiteInt16); + // input gate output x cell gate output -> cell state + op_data_lstm->inter_gate_parameters.input_mul_params = + CreateInterGateMulParams(nonlinear_output_scale, nonlinear_output_scale, + cell_state_scale, kTfLiteInt16); + // tanh output x output gate output -> hidden state (potentially asymmetric) + op_data_lstm->inter_gate_parameters.output_mul_params = + CreateInterGateMulParams( + nonlinear_output_scale, nonlinear_output_scale, + lstm_tensors.HiddenStateTensor()->params.scale, + lstm_tensors.HiddenStateTensor()->type, + lstm_tensors.HiddenStateTensor()->params.zero_point); + return kTfLiteOk; +} + +LSTMKernelContents CreateLSTMKernelContent(TfLiteContext* context, + TfLiteNode* node) { + LSTMKernelContents kernel_content; + // Point to correct tensors + for (size_t i = 0; i < 24; i++) { + kernel_content.internal_tensors[i] = + tflite::micro::GetMutableEvalInput(context, node, i); + } + // Output tensor + kernel_content.output_tensor = tflite::micro::GetEvalOutput(context, node, 0); + return kernel_content; +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/lstm_eval_test.cc b/tensorflow/lite/micro/kernels/lstm_eval_test.cc new file mode 100644 index 0000000..53c0d7c --- /dev/null +++ b/tensorflow/lite/micro/kernels/lstm_eval_test.cc @@ -0,0 +1,459 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/micro/kernels/lstm_eval_test.h" + +#include +#include +#include +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/lstm_eval.h" +#include "tensorflow/lite/micro/kernels/lstm_shared.h" +#include "tensorflow/lite/micro/kernels/testdata/lstm_test_data.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +// TODO(b/230666079) enable below tests for xtensa when the xtensa +// kernel is reconciled with reference kernel +#if !defined(XTENSA) +namespace { +// Test Settings +constexpr float kTestFloatTolerance = 1e-6f; +} // namespace +#endif // !defined(XTENSA) + +TF_LITE_MICRO_TESTS_BEGIN +// TODO(b/230666079) enable below tests for xtensa when the xtensa +// kernel is reconciled with reference kernel +#if !defined(XTENSA) +TF_LITE_MICRO_TEST(CheckGateOutputFloat) { + const tflite::testing::GateOutputCheckData<4, 4> gate_output_data = + tflite::testing::Get2X2GateOutputCheckData(); + tflite::testing::LstmNodeContent + float_node_contents = tflite::testing::Create2x3x2X2FloatNodeContents( + gate_output_data.input_data, gate_output_data.hidden_state, + gate_output_data.cell_state); + + // Forget gate + tflite::testing::TestCalculateLstmGateFloat<2, 2>( + float_node_contents.GetEvalTensor(tflite::kLstmInputTensor), + float_node_contents.GetEvalTensor( + tflite::kLstmInputToForgetWeightsTensor), + float_node_contents.GetEvalTensor(tflite::kLstmForgetGateBiasTensor), + // Recurrent FC + float_node_contents.HiddenStateEvalTensor(), + float_node_contents.GetEvalTensor( + tflite::kLstmRecurrentToForgetWeightsTensor), + nullptr, // bias fused to activation FC, + // Result comparison + kTfLiteActSigmoid, gate_output_data.expected_forget_gate_output, + kTestFloatTolerance); + + // Input gate + tflite::testing::TestCalculateLstmGateFloat<2, 2>( + float_node_contents.GetEvalTensor(tflite::kLstmInputTensor), + float_node_contents.GetEvalTensor(tflite::kLstmInputToInputWeightsTensor), + float_node_contents.GetEvalTensor(tflite::kLstmInputGateBiasTensor), + // Recurrent FC + float_node_contents.HiddenStateEvalTensor(), + float_node_contents.GetEvalTensor( + tflite::kLstmRecurrentToInputWeightsTensor), + nullptr, // bias fused to activation FC, + // Result comparison + kTfLiteActSigmoid, gate_output_data.expected_input_gate_output, + kTestFloatTolerance); + + // Output gate + tflite::testing::TestCalculateLstmGateFloat<2, 2>( + float_node_contents.GetEvalTensor(tflite::kLstmInputTensor), + float_node_contents.GetEvalTensor( + tflite::kLstmInputToOutputWeightsTensor), + float_node_contents.GetEvalTensor(tflite::kLstmOutputGateBiasTensor), + // Recurrent FC + float_node_contents.HiddenStateEvalTensor(), + float_node_contents.GetEvalTensor( + tflite::kLstmRecurrentToOutputWeightsTensor), + nullptr, // bias fused to activation FC, + // Result comparison + kTfLiteActSigmoid, gate_output_data.expected_output_gate_output, + kTestFloatTolerance); + + // Cell gate + tflite::testing::TestCalculateLstmGateFloat<2, 2>( + float_node_contents.GetEvalTensor(tflite::kLstmInputTensor), + float_node_contents.GetEvalTensor(tflite::kLstmInputToCellWeightsTensor), + float_node_contents.GetEvalTensor(tflite::kLstmCellGateBiasTensor), + // Recurrent FC + float_node_contents.HiddenStateEvalTensor(), + float_node_contents.GetEvalTensor( + tflite::kLstmRecurrentToCellWeightsTensor), + nullptr, // bias fused to activation FC, + // Result comparison + float_node_contents.BuiltinData().activation, + gate_output_data.expected_cell_gate_output, kTestFloatTolerance); +} + +TF_LITE_MICRO_TEST(CheckGateOutputInt8) { + const tflite::testing::GateOutputCheckData<4, 4> gate_output_data = + tflite::testing::Get2X2GateOutputCheckData(); + tflite::testing::LstmNodeContent + int8_node_contents = tflite::testing::Create2x3x2X2Int8NodeContents( + gate_output_data.input_data, gate_output_data.hidden_state, + gate_output_data.cell_state); + + // Forget gate + // Quantization performs badly here due to integer overflow!!! + float tolerance = 1e-1f; + tflite::testing::TestCalculateLstmGateInteger( + int8_node_contents.GetEvalTensor(tflite::kLstmInputTensor), + int8_node_contents.GetEvalTensor(tflite::kLstmInputToForgetWeightsTensor), + int8_node_contents.GetEvalTensor(tflite::kLstmForgetGateBiasTensor), + // Recurrent FC + int8_node_contents.HiddenStateEvalTensor(), + int8_node_contents.GetEvalTensor( + tflite::kLstmRecurrentToForgetWeightsTensor), + nullptr, // bias fused to activation FC, + // Quantization settings + int8_node_contents.QuantizationSettings(), + int8_node_contents.QuantizationSettings().forget_gate, + // Result comparison + kTfLiteActSigmoid, gate_output_data.expected_forget_gate_output, + tolerance); + + // Input gate + // Quantization performs badly here due to integer overflow!!! + tolerance = 1e-1f; + tflite::testing::TestCalculateLstmGateInteger( + int8_node_contents.GetEvalTensor(tflite::kLstmInputTensor), + int8_node_contents.GetEvalTensor(tflite::kLstmInputToInputWeightsTensor), + int8_node_contents.GetEvalTensor(tflite::kLstmInputGateBiasTensor), + // Recurrent FC + int8_node_contents.HiddenStateEvalTensor(), + int8_node_contents.GetEvalTensor( + tflite::kLstmRecurrentToInputWeightsTensor), + nullptr, // bias fused to activation FC, + // Quantization settings + int8_node_contents.QuantizationSettings(), + int8_node_contents.QuantizationSettings().input_gate, + // Result comparison + kTfLiteActSigmoid, gate_output_data.expected_input_gate_output, + tolerance); + + // Output gate + tolerance = 1e-2f; + tflite::testing::TestCalculateLstmGateInteger( + int8_node_contents.GetEvalTensor(tflite::kLstmInputTensor), + int8_node_contents.GetEvalTensor(tflite::kLstmInputToOutputWeightsTensor), + int8_node_contents.GetEvalTensor(tflite::kLstmOutputGateBiasTensor), + // Recurrent FC + int8_node_contents.HiddenStateEvalTensor(), + int8_node_contents.GetEvalTensor( + tflite::kLstmRecurrentToOutputWeightsTensor), + nullptr, // bias fused to activation FC, + // Quantization settings + int8_node_contents.QuantizationSettings(), + int8_node_contents.QuantizationSettings().output_gate, + // Result comparison + kTfLiteActSigmoid, gate_output_data.expected_output_gate_output, + tolerance); + + // Cell gate + tolerance = 1e-2f; + tflite::testing::TestCalculateLstmGateInteger( + int8_node_contents.GetEvalTensor(tflite::kLstmInputTensor), + int8_node_contents.GetEvalTensor(tflite::kLstmInputToCellWeightsTensor), + int8_node_contents.GetEvalTensor(tflite::kLstmCellGateBiasTensor), + // Recurrent FC + int8_node_contents.HiddenStateEvalTensor(), + int8_node_contents.GetEvalTensor( + tflite::kLstmRecurrentToCellWeightsTensor), + nullptr, // bias fused to activation FC, + // Quantization settings + int8_node_contents.QuantizationSettings(), + int8_node_contents.QuantizationSettings().cell_gate, + // Result comparison + int8_node_contents.BuiltinData().activation, + gate_output_data.expected_cell_gate_output, tolerance); +} + +TF_LITE_MICRO_TEST(CheckGateOutputInt16) { + const tflite::testing::GateOutputCheckData<4, 4> gate_output_data = + tflite::testing::Get2X2GateOutputCheckData(); + tflite::testing::LstmNodeContent + int16_node_contents = tflite::testing::Create2x3x2X2Int16NodeContents( + gate_output_data.input_data, gate_output_data.hidden_state, + gate_output_data.cell_state); + + // Forget gate + // Quantization performs badly here due to integer overflow (from batch2)!!! + float tolerance = 1e-1f; + tflite::testing::TestCalculateLstmGateInteger( + int16_node_contents.GetEvalTensor(tflite::kLstmInputTensor), + int16_node_contents.GetEvalTensor( + tflite::kLstmInputToForgetWeightsTensor), + int16_node_contents.GetEvalTensor(tflite::kLstmForgetGateBiasTensor), + // Recurrent FC + int16_node_contents.HiddenStateEvalTensor(), + int16_node_contents.GetEvalTensor( + tflite::kLstmRecurrentToForgetWeightsTensor), + nullptr, // bias fused to activation FC, + // Quantization settings + int16_node_contents.QuantizationSettings(), + int16_node_contents.QuantizationSettings().forget_gate, + // Result comparison + kTfLiteActSigmoid, gate_output_data.expected_forget_gate_output, + tolerance); + + // Input gate + // Quantization performs badly here due to integer overflow (from batch2)!!! + tolerance = 1e-1f; + tflite::testing::TestCalculateLstmGateInteger( + int16_node_contents.GetEvalTensor(tflite::kLstmInputTensor), + int16_node_contents.GetEvalTensor(tflite::kLstmInputToInputWeightsTensor), + int16_node_contents.GetEvalTensor(tflite::kLstmInputGateBiasTensor), + // Recurrent FC + int16_node_contents.HiddenStateEvalTensor(), + int16_node_contents.GetEvalTensor( + tflite::kLstmRecurrentToInputWeightsTensor), + nullptr, // bias fused to activation FC, + // Quantization settings + int16_node_contents.QuantizationSettings(), + int16_node_contents.QuantizationSettings().input_gate, + // Result comparison + kTfLiteActSigmoid, gate_output_data.expected_input_gate_output, + tolerance); + + // Output gate + // Quantization scale (theoritical lowest range) is at range 1e-5 + tolerance = 1e-4f; + tflite::testing::TestCalculateLstmGateInteger( + int16_node_contents.GetEvalTensor(tflite::kLstmInputTensor), + int16_node_contents.GetEvalTensor( + tflite::kLstmInputToOutputWeightsTensor), + int16_node_contents.GetEvalTensor(tflite::kLstmOutputGateBiasTensor), + // Recurrent FC + int16_node_contents.HiddenStateEvalTensor(), + int16_node_contents.GetEvalTensor( + tflite::kLstmRecurrentToOutputWeightsTensor), + nullptr, // bias fused to activation FC, + // Quantization settings + int16_node_contents.QuantizationSettings(), + int16_node_contents.QuantizationSettings().output_gate, + // Result comparison + kTfLiteActSigmoid, gate_output_data.expected_output_gate_output, + tolerance); + + // Cell gate + tolerance = 1e-4f; + tflite::testing::TestCalculateLstmGateInteger( + int16_node_contents.GetEvalTensor(tflite::kLstmInputTensor), + int16_node_contents.GetEvalTensor(tflite::kLstmInputToCellWeightsTensor), + int16_node_contents.GetEvalTensor(tflite::kLstmCellGateBiasTensor), + // Recurrent FC + int16_node_contents.HiddenStateEvalTensor(), + int16_node_contents.GetEvalTensor( + tflite::kLstmRecurrentToCellWeightsTensor), + nullptr, // bias fused to activation FC, + // Quantization settings + int16_node_contents.QuantizationSettings(), + int16_node_contents.QuantizationSettings().cell_gate, + // Result comparison + int16_node_contents.BuiltinData().activation, + gate_output_data.expected_cell_gate_output, tolerance); +} + +TF_LITE_MICRO_TEST(CheckCellStateUpdateFloat) { + const tflite::testing::GateOutputCheckData<4, 4> gate_output_data = + tflite::testing::Get2X2GateOutputCheckData(); + tflite::testing::LstmNodeContent + float_node_contents = tflite::testing::Create2x3x2X2FloatNodeContents( + gate_output_data.input_data, gate_output_data.hidden_state, + gate_output_data.cell_state); + + tflite::testing::TestUpdateLstmCellFloat( + gate_output_data, float_node_contents, kTestFloatTolerance); +} + +TF_LITE_MICRO_TEST(CheckCellStateUpdateInt8) { + const tflite::testing::GateOutputCheckData<4, 4> gate_output_data = + tflite::testing::Get2X2GateOutputCheckData(); + tflite::testing::LstmNodeContent + int8_node_contents = tflite::testing::Create2x3x2X2Int8NodeContents( + gate_output_data.input_data, gate_output_data.hidden_state, + gate_output_data.cell_state); + + // Very high precision. The error is introduced by the + // quantization error of the clip value (~1e-5), but cannot actually reach + // the precision due to integer overflow of the elements + const float tolerance = 1e-3f; + tflite::testing::TestUpdateLstmCellInteger(gate_output_data, + int8_node_contents, tolerance); +} + +TF_LITE_MICRO_TEST(CheckCellStateUpdateInt16) { + const tflite::testing::GateOutputCheckData<4, 4> gate_output_data = + tflite::testing::Get2X2GateOutputCheckData(); + tflite::testing::LstmNodeContent + int16_node_contents = tflite::testing::Create2x3x2X2Int16NodeContents( + gate_output_data.input_data, gate_output_data.hidden_state, + gate_output_data.cell_state); + // Very high precision. The error is introduced by the + // quantization error of the clip value (~1e-5), but cannot actually reach + // the precision due to integer overflow of the elements + const float tolerance = 1e-3f; + tflite::testing::TestUpdateLstmCellInteger(gate_output_data, + int16_node_contents, tolerance); +} + +TF_LITE_MICRO_TEST(CheckHiddenStateUpdateFloat) { + const tflite::testing::GateOutputCheckData<4, 4> gate_output_data = + tflite::testing::Get2X2GateOutputCheckData(); + tflite::testing::LstmNodeContent + float_node_contents = tflite::testing::Create2x3x2X2FloatNodeContents( + gate_output_data.input_data, gate_output_data.hidden_state, + gate_output_data.expected_updated_cell); + + tflite::testing::TestUpdateLstmHiddenFloat( + gate_output_data, float_node_contents, kTestFloatTolerance); +} + +TF_LITE_MICRO_TEST(CheckHiddenStateUpdateInt8) { + const tflite::testing::GateOutputCheckData<4, 4> gate_output_data = + tflite::testing::Get2X2GateOutputCheckData(); + tflite::testing::LstmNodeContent + int8_node_contents = tflite::testing::Create2x3x2X2Int8NodeContents( + gate_output_data.input_data, gate_output_data.hidden_state, + gate_output_data.expected_updated_cell); + + // Theoritical error floor = quantization scale = 0.004705882165580988 + const float tolerance = 1e-2; + tflite::testing::TestUpdateLstmHiddenInteger(gate_output_data, + int8_node_contents, tolerance); +} + +TF_LITE_MICRO_TEST(CheckHiddenStateUpdateInt16) { + const tflite::testing::GateOutputCheckData<4, 4> gate_output_data = + tflite::testing::Get2X2GateOutputCheckData(); + tflite::testing::LstmNodeContent + int16_node_contents = tflite::testing::Create2x3x2X2Int16NodeContents( + gate_output_data.input_data, gate_output_data.hidden_state, + gate_output_data.expected_updated_cell); + + const float tolerance = 1e-4; + tflite::testing::TestUpdateLstmHiddenInteger(gate_output_data, + int16_node_contents, tolerance); +} + +TF_LITE_MICRO_TEST(CheckOneStepLSTMFloat) { + const tflite::testing::GateOutputCheckData<4, 4> gate_output_data = + tflite::testing::Get2X2GateOutputCheckData(); + tflite::testing::LstmNodeContent + float_node_contents = tflite::testing::Create2x3x2X2FloatNodeContents( + gate_output_data.input_data, gate_output_data.hidden_state, + gate_output_data.cell_state); + tflite::testing::TestLstmStepFloat(gate_output_data, kTestFloatTolerance, + kTestFloatTolerance, float_node_contents); +} + +TF_LITE_MICRO_TEST(CheckOneStepLSTMInt8) { + const tflite::testing::GateOutputCheckData<4, 4> gate_output_data = + tflite::testing::Get2X2GateOutputCheckData(); + tflite::testing::LstmNodeContent + int8_node_contents = tflite::testing::Create2x3x2X2Int8NodeContents( + gate_output_data.input_data, gate_output_data.hidden_state, + gate_output_data.cell_state); + + const float hidden_state_tolerance = 1e-2; + // cell state degrade due to integer overflow + const float cell_state_tolerance = 1e-1; + tflite::testing::TestLstmStepInteger(gate_output_data, hidden_state_tolerance, + cell_state_tolerance, + int8_node_contents); +} + +TF_LITE_MICRO_TEST(CheckOneStepLSTMInt16) { + const tflite::testing::GateOutputCheckData<4, 4> gate_output_data = + tflite::testing::Get2X2GateOutputCheckData(); + tflite::testing::LstmNodeContent + int16_node_contents = tflite::testing::Create2x3x2X2Int16NodeContents( + gate_output_data.input_data, gate_output_data.hidden_state, + gate_output_data.cell_state); + const float hidden_state_tolerance = 1e-3; // actually very close to 1e-4 + // cell state degrade due to integer overflow + const float cell_state_tolerance = 1e-1; + tflite::testing::TestLstmStepInteger( + gate_output_data, hidden_state_tolerance, cell_state_tolerance, + int16_node_contents); +} + +TF_LITE_MICRO_TEST(TestLSTMEvalFloat) { + const tflite::testing::LstmEvalCheckData<12, 4, 12> kernel_eval_data = + tflite::testing::Get2X2LstmEvalCheckData(); + tflite::testing::LstmNodeContent + float_node_contents = tflite::testing::Create2x3x2X2FloatNodeContents( + kernel_eval_data.input_data, kernel_eval_data.hidden_state); + + tflite::testing::TestEvalLstmFloat(kernel_eval_data, kTestFloatTolerance, + kTestFloatTolerance, float_node_contents); +} + +TF_LITE_MICRO_TEST(TestLSTMEvalInt8) { + const tflite::testing::LstmEvalCheckData<12, 4, 12> kernel_eval_data = + tflite::testing::Get2X2LstmEvalCheckData(); + tflite::testing::LstmNodeContent + int8_node_contents = tflite::testing::Create2x3x2X2Int8NodeContents( + kernel_eval_data.input_data, kernel_eval_data.hidden_state); + + const float hidden_state_tolerance = 1e-2; + // cell state degrade due to integer overflow + const float cell_state_tolerance = 1e-2; + tflite::testing::TestEvalLstmInteger(kernel_eval_data, hidden_state_tolerance, + cell_state_tolerance, + int8_node_contents); +} + +TF_LITE_MICRO_TEST(TestLSTMEvalInt16) { + const tflite::testing::LstmEvalCheckData<12, 4, 12> kernel_eval_data = + tflite::testing::Get2X2LstmEvalCheckData(); + tflite::testing::LstmNodeContent + int16_node_contents = tflite::testing::Create2x3x2X2Int16NodeContents( + kernel_eval_data.input_data, kernel_eval_data.hidden_state); + + const float hidden_state_tolerance = 1e-3; // actually very close to 1e-4 + // cell state degrade due to integer overflow + const float cell_state_tolerance = 1e-2; + tflite::testing::TestEvalLstmInteger(kernel_eval_data, hidden_state_tolerance, + cell_state_tolerance, + int16_node_contents); +} + +#endif // !defined(XTENSA) +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/lstm_eval_test.h b/tensorflow/lite/micro/kernels/lstm_eval_test.h new file mode 100644 index 0000000..aee12cf --- /dev/null +++ b/tensorflow/lite/micro/kernels/lstm_eval_test.h @@ -0,0 +1,817 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_LSTM_EVAL_TEST_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_LSTM_EVAL_TEST_H_ + +#include +#include + +#include "tensorflow/lite/micro/kernels/lstm_eval.h" +#include "tensorflow/lite/micro/kernels/testdata/lstm_test_data.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { + +/*Helper Functions (mainly about mimicking the kernel preparation)*/ + +// Create fully connected parameters using quantization settings of input and +// weight tensors. +// Since TfLiteContext is not available during the kernel test, here we mimic +// (put into stack memory) CalculateOpDataFullyConnected in +// tensorflow/lite/micro/kernels/fully_connected_common.cc +template +tflite::FullyConnectedParams CreateFCParams( + const TensorQuantizationParameters& input_quant_params, + const TensorQuantizationParameters& weight_quant_params, + const float nonlinear_activation_input_scale) { + OpDataFullyConnected data; + const double input_product_scale = + input_quant_params.scale * weight_quant_params.scale; + double effective_scale = + input_product_scale / + static_cast(nonlinear_activation_input_scale); + + QuantizeMultiplier(effective_scale, &data.output_multiplier, + &data.output_shift); + + data.input_zero_point = input_quant_params.zero_point; + + data.filter_zero_point = 0; // symmetrically quantized + data.output_zero_point = 0; // symmetrically quantized + + data.output_activation_min = std::numeric_limits::min(); + data.output_activation_max = std::numeric_limits::max(); + + return tflite::FullyConnectedParamsQuantized(data); +} + +inline tflite::FullyConnectedParams CreateFCParamsFloat() { + FullyConnectedParams op_params; + CalculateActivationRange(kTfLiteActNone, &op_params.float_activation_min, + &op_params.float_activation_max); + return op_params; +} + +// Wrapper function to create gate parameters for the four internal LSTM gates +template +tflite::GateParameters CreateGateParams( + const TensorQuantizationParameters& input_quant_params, + const TensorQuantizationParameters& hidden_state_quant_params, + const GateQuantizationParameters& gate_quantization_settings, + const float nonlinear_activation_input_scale) { + tflite::GateParameters gate_params = {}; + gate_params.input_fc_params = CreateFCParams( + input_quant_params, gate_quantization_settings.activation_weight, + nonlinear_activation_input_scale); + gate_params.recurrent_fc_params = CreateFCParams( + hidden_state_quant_params, gate_quantization_settings.recurrent_weight, + nonlinear_activation_input_scale); + return gate_params; +} + +inline tflite::GateParameters CreateGateParamsFloat() { + tflite::GateParameters gate_params = {}; + gate_params.input_fc_params = CreateFCParamsFloat(); + gate_params.recurrent_fc_params = CreateFCParamsFloat(); + return gate_params; +} +// Create parameters for element wise multiplication that happens in a) cell +// state update ; b) hidden state update +// Note that all the output of gates are symmetrically quantized so only scales +// are required for input. However, during the hidden state update phase, the +// output is the updated hidden state, which is asymmetrically quantized. Thus +// output may require zero point +template +tflite::ArithmeticParams CreateInterGateMulParams(const float input1_scale, + const float input2_scale, + const float output_scale, + const int output_zp = 0) { + tflite::ArithmeticParams op_params = {}; + op_params.quantized_activation_min = std::numeric_limits::min(); + op_params.quantized_activation_max = std::numeric_limits::max(); + op_params.input1_offset = 0; + op_params.input2_offset = 0; + op_params.output_offset = output_zp; + + const double input_product_scale = + static_cast(input1_scale) * static_cast(input2_scale); + double effective_scale = + input_product_scale / static_cast(output_scale); + + QuantizeMultiplier(effective_scale, &op_params.output_multiplier, + &op_params.output_shift); + return op_params; +} + +inline tflite::ArithmeticParams CreateInterGateMulParamsFloat() { + tflite::ArithmeticParams op_params = {}; + CalculateActivationRange(kTfLiteActNone, &op_params.float_activation_min, + &op_params.float_activation_max); + return op_params; +} + +// Create the additional information about the cell state, which include: +// cell_state_scale_power: used in integer nonlinear function (e.g., tanh) +// quantized_cell_clip: quantized cell clip range +CellStateInfo CreateLstmCellStateInfo(const float cell_state_scale, + const float cell_clip) { + CellStateInfo cell_state_info; + // cell_state_scale_power: 2^-cell_state_scale_power = cell state scale + int buffer; + tflite::CheckedLog2(cell_state_scale, &buffer); + cell_state_info.cell_state_scale_power = buffer; + // Cell state specifics + cell_state_info.cell_clip = cell_clip; + cell_state_info.quantized_cell_clip = static_cast( + std::min(std::max(static_cast(cell_clip) / + static_cast(cell_state_scale), + -32768.0), + 32767.0)); + return cell_state_info; +} + +// Create LSTMKernelContents from LstmNodeContent by copying TfLiteEvalTensor +// pointers +template +LSTMKernelContents CreateLSTMKernelContent( + LstmNodeContent& + node_contents) { + LSTMKernelContents kernel_content; + // Point to correct tensors + kernel_content.internal_tensors[kLstmInputTensor] = + node_contents.GetEvalTensor(kLstmInputTensor); + kernel_content.internal_tensors[kLstmInputToInputWeightsTensor] = + node_contents.GetEvalTensor(kLstmInputToInputWeightsTensor); + kernel_content.internal_tensors[kLstmInputToForgetWeightsTensor] = + node_contents.GetEvalTensor(kLstmInputToForgetWeightsTensor); + kernel_content.internal_tensors[kLstmInputToCellWeightsTensor] = + node_contents.GetEvalTensor(kLstmInputToCellWeightsTensor); + kernel_content.internal_tensors[kLstmInputToOutputWeightsTensor] = + node_contents.GetEvalTensor(kLstmInputToOutputWeightsTensor); + kernel_content.internal_tensors[kLstmRecurrentToInputWeightsTensor] = + node_contents.GetEvalTensor(kLstmRecurrentToInputWeightsTensor); + kernel_content.internal_tensors[kLstmRecurrentToForgetWeightsTensor] = + node_contents.GetEvalTensor(kLstmRecurrentToForgetWeightsTensor); + kernel_content.internal_tensors[kLstmRecurrentToCellWeightsTensor] = + node_contents.GetEvalTensor(kLstmRecurrentToCellWeightsTensor); + kernel_content.internal_tensors[kLstmRecurrentToOutputWeightsTensor] = + node_contents.GetEvalTensor(kLstmRecurrentToOutputWeightsTensor); + kernel_content.internal_tensors[kLstmInputGateBiasTensor] = + node_contents.GetEvalTensor(kLstmInputGateBiasTensor); + kernel_content.internal_tensors[kLstmForgetGateBiasTensor] = + node_contents.GetEvalTensor(kLstmForgetGateBiasTensor); + kernel_content.internal_tensors[kLstmCellGateBiasTensor] = + node_contents.GetEvalTensor(kLstmCellGateBiasTensor); + kernel_content.internal_tensors[kLstmOutputGateBiasTensor] = + node_contents.GetEvalTensor(kLstmOutputGateBiasTensor); + kernel_content.internal_tensors[kLstmOutputStateTensor] = + node_contents.GetEvalTensor(kLstmOutputStateTensor); + kernel_content.internal_tensors[kLstmOutputGateBiasTensor] = + node_contents.GetEvalTensor(kLstmOutputGateBiasTensor); + kernel_content.internal_tensors[kLstmCellStateTensor] = + node_contents.GetEvalTensor(kLstmCellStateTensor); + // Not used internal tensors + kernel_content.internal_tensors[kLstmCellToInputWeightsTensor] = nullptr; + kernel_content.internal_tensors[kLstmCellToForgetWeightsTensor] = nullptr; + kernel_content.internal_tensors[kLstmCellToOutputWeightsTensor] = nullptr; + kernel_content.internal_tensors[kLstmProjectionWeightsTensor] = nullptr; + kernel_content.internal_tensors[kLstmProjectionBiasTensor] = nullptr; + kernel_content.internal_tensors[kLstmInputLayerNormCoefficientsTensor] = + nullptr; + kernel_content.internal_tensors[kLstmForgetLayerNormCoefficientsTensor] = + nullptr; + kernel_content.internal_tensors[kLstmInputLayerNormCoefficientsTensor] = + nullptr; + kernel_content.internal_tensors[kLstmCellLayerNormCoefficientsTensor] = + nullptr; + kernel_content.internal_tensors[kLstmOutputLayerNormCoefficientsTensor] = + nullptr; + // Output tensor + kernel_content.output_tensor = node_contents.OutputEvalTensor(); + return kernel_content; +} + +// Deduce the size information (Batch (B), Time Steps (T), Input dimension (I), +// State dimension (S)) that defines the LSTM using the input and hidden state +// tensor +LstmSizeInfo CreateLstmSizeInfo( + const bool time_major, const TfLiteIntArray* input_tensor_shape, + const TfLiteIntArray* hidden_state_tensor_shape) { + LstmSizeInfo size_info; + size_info.time_major = time_major; + size_info.batch_size = + time_major ? input_tensor_shape->data[1] : input_tensor_shape->data[0]; + size_info.time_steps = + time_major ? input_tensor_shape->data[0] : input_tensor_shape->data[1]; + size_info.input_dimension = input_tensor_shape->data[2]; + size_info.state_dimension = hidden_state_tensor_shape->data[1]; + return size_info; +} + +// Create the LstmOpData using the LstmNodeContent and +// NodeQuantizationParameters (defined in test_data/lstm_test_data) During the +// actual inference phase, OpDataLSTM is created using information from the +// flatbuffer file. The test divide the complete LSTM node information into +// LstmNodeContent and NodeQuantizationParameters for easy construction +// purposes +template +OpDataLSTM CreateLstmOpData( + LstmNodeContent& + node_contents) { + const auto& builtin_data = node_contents.BuiltinData(); + const auto& quantization_settings = node_contents.QuantizationSettings(); + OpDataLSTM op_data; + + op_data.cell_gate_nonlinear_type = builtin_data.activation; + op_data.size_info = + CreateLstmSizeInfo(builtin_data.time_major, + node_contents.GetEvalTensor(kLstmInputTensor)->dims, + node_contents.HiddenStateEvalTensor()->dims); + + op_data.cell_state_info = CreateLstmCellStateInfo( + quantization_settings.cell_state.scale, builtin_data.cell_clip); + + // Gate Parameters + op_data.forget_gate_parameters = CreateGateParams( + quantization_settings.input, quantization_settings.hidden_state, + quantization_settings.forget_gate, + quantization_settings.nonlinear_activation_input_scale); + op_data.input_gate_parameters = CreateGateParams( + quantization_settings.input, quantization_settings.hidden_state, + quantization_settings.input_gate, + quantization_settings.nonlinear_activation_input_scale); + op_data.cell_gate_parameters = CreateGateParams( + quantization_settings.input, quantization_settings.hidden_state, + quantization_settings.cell_gate, + quantization_settings.nonlinear_activation_input_scale); + op_data.output_gate_parameters = CreateGateParams( + quantization_settings.input, quantization_settings.hidden_state, + quantization_settings.output_gate, + quantization_settings.nonlinear_activation_input_scale); + // Inter gate multiplication parameters + op_data.inter_gate_parameters.forget_cell_mul_params = + CreateInterGateMulParams( + quantization_settings.nonlinear_activation_output_scale, + quantization_settings.cell_state.scale, + quantization_settings.cell_state.scale); + op_data.inter_gate_parameters.input_mul_params = + CreateInterGateMulParams( + quantization_settings.nonlinear_activation_output_scale, + quantization_settings.nonlinear_activation_output_scale, + quantization_settings.cell_state.scale); + op_data.inter_gate_parameters.output_mul_params = + CreateInterGateMulParams( + quantization_settings.nonlinear_activation_output_scale, + quantization_settings.nonlinear_activation_output_scale, + quantization_settings.hidden_state.scale, + quantization_settings.hidden_state.zero_point); + return op_data; +} + +template +OpDataLSTM CreateLstmOpDataFloat( + LstmNodeContent& node_contents) { + const auto& builtin_data = node_contents.BuiltinData(); + OpDataLSTM op_data; + + op_data.cell_gate_nonlinear_type = builtin_data.activation; + op_data.size_info = + CreateLstmSizeInfo(builtin_data.time_major, + node_contents.GetEvalTensor(kLstmInputTensor)->dims, + node_contents.HiddenStateEvalTensor()->dims); + op_data.cell_state_info.cell_clip = builtin_data.cell_clip; + op_data.cell_state_info.quantized_cell_clip = 0; // No quantization + op_data.cell_state_info.cell_state_scale_power = 0; // No quantization + + // Gate Parameters + op_data.forget_gate_parameters = CreateGateParamsFloat(); + op_data.input_gate_parameters = CreateGateParamsFloat(); + op_data.cell_gate_parameters = CreateGateParamsFloat(); + op_data.output_gate_parameters = CreateGateParamsFloat(); + // Inter gate multiplication parameters + op_data.inter_gate_parameters.forget_cell_mul_params = + CreateInterGateMulParamsFloat(); + op_data.inter_gate_parameters.input_mul_params = + CreateInterGateMulParamsFloat(); + op_data.inter_gate_parameters.output_mul_params = + CreateInterGateMulParamsFloat(); + return op_data; +} + +/*Test Functions Below Here*/ +template +void ValidateResultGoldens(const T* golden, const T* output_data, + const int output_len, const float tolerance) { + for (int i = 0; i < output_len; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(golden[i], output_data[i], tolerance); + } +} + +template +void TestCalculateLstmGateFloat(const TfLiteEvalTensor* input, + const TfLiteEvalTensor* input_weight, + const TfLiteEvalTensor* input_bias, + // Recurrent FC + const TfLiteEvalTensor* recurrent, + const TfLiteEvalTensor* recurrent_weight, + const TfLiteEvalTensor* recurrent_bias, + // Result comparison + TfLiteFusedActivation nonlinear_type, + const float* expected_vals, float tolerance) { + float gate_output[batch_size * state_dimension] = {}; + float fc_output_buffer[batch_size * state_dimension] = {}; + + tflite::GateParameters gate_params = CreateGateParamsFloat(); + + // Create step information: only one time step, no need to update + auto size_info = tflite::testing::CreateLstmSizeInfo( + /*time_major*/ false, input->dims, recurrent->dims); + // revise time_major = true to enable batch inference + size_info.time_major = true; + tflite::lstm_internal::LstmStepManager step_info(&size_info); + + tflite::lstm_internal::CalculateLstmGate( + step_info, gate_params, + // Input FC + input, input_weight, input_bias, + // Recurrent FC + recurrent, recurrent_weight, recurrent_bias, + // Output + gate_output, + // Scratch arrays + fc_output_buffer, nonlinear_type); + + ValidateResultGoldens(expected_vals, gate_output, + batch_size * state_dimension, tolerance); +} + +template +void TestCalculateLstmGateInteger( + const TfLiteEvalTensor* input, const TfLiteEvalTensor* input_weight, + const TfLiteEvalTensor* input_bias, + // Recurrent FC + const TfLiteEvalTensor* recurrent, const TfLiteEvalTensor* recurrent_weight, + const TfLiteEvalTensor* recurrent_bias, + // Quantization settings + const NodeQuantizationParameters& node_quantization_settings, + const GateQuantizationParameters& gate_quantization_settings, + // Result comparison + TfLiteFusedActivation nonlinear_type, const float* expected_vals, + float tolerance) { + CellType gate_output[batch_size * state_dimension] = {}; + CellType fc_output_buffer[batch_size * state_dimension] = {}; + + tflite::GateParameters gate_params = CreateGateParams( + node_quantization_settings.input, node_quantization_settings.hidden_state, + gate_quantization_settings, + node_quantization_settings.nonlinear_activation_input_scale); + + // Create step information: only one time step, no need to update + auto size_info = tflite::testing::CreateLstmSizeInfo( + /*time_major*/ false, input->dims, recurrent->dims); + // revise time_major = true to enable batch inference + size_info.time_major = true; + tflite::lstm_internal::LstmStepManager step_info(&size_info); + + // only int8 weight is supported now + tflite::lstm_internal::CalculateLstmGate( + step_info, gate_params, + // Input FC + input, input_weight, input_bias, + // Recurrent FC + recurrent, recurrent_weight, recurrent_bias, + // Output + gate_output, + // Scratch arrays + fc_output_buffer, nonlinear_type); + + float gate_output_float[batch_size * state_dimension] = {}; + Dequantize(gate_output, batch_size * state_dimension, + node_quantization_settings.nonlinear_activation_output_scale, 0, + gate_output_float); + + ValidateResultGoldens(expected_vals, gate_output_float, + batch_size * state_dimension, tolerance); +} + +template +void TestUpdateLstmCellFloat( + const GateOutputCheckData& gate_output_data, + LstmNodeContent& node_content, + const float tolerance) { + float buffer[batch_size * state_dimension] = {}; + + auto forget_cell_mul_params = CreateInterGateMulParamsFloat(); + auto input_mul_params = CreateInterGateMulParamsFloat(); + + auto cell_state = node_content.CellStateEvalTensor(); + // Create step information: only one time step, no need to update + auto size_info = tflite::testing::CreateLstmSizeInfo( + /*time_major*/ false, + node_content.GetEvalTensor(tflite::kLstmInputTensor)->dims, + node_content.HiddenStateEvalTensor()->dims); + // revise time_major = true to enable batch inference + size_info.time_major = true; + tflite::lstm_internal::LstmStepManager step_info(&size_info); + + // copy the data since it will be updated + float forget_gate[batch_size * state_dimension] = {}; + std::memcpy(forget_gate, gate_output_data.expected_forget_gate_output, + batch_size * state_dimension * sizeof(float)); + + CellStateInfo cell_state_info; + cell_state_info.cell_clip = node_content.BuiltinData().cell_clip; + // Call the function to be tested + tflite::lstm_internal::UpdateLstmCell( + step_info, cell_state, forget_gate, + gate_output_data.expected_input_gate_output, + gate_output_data.expected_cell_gate_output, forget_cell_mul_params, + input_mul_params, cell_state_info, buffer); + + ValidateResultGoldens(gate_output_data.expected_updated_cell, + tflite::micro::GetTensorData(cell_state), + batch_size * state_dimension, tolerance); +} + +template +void TestUpdateLstmCellInteger( + const GateOutputCheckData& gate_output_data, + LstmNodeContent& node_content, + const float tolerance) { + const auto& quantization_settings = node_content.QuantizationSettings(); + CellType quantized_forget_gate[batch_size * state_dimension] = {}; + tflite::Quantize(gate_output_data.expected_forget_gate_output, + quantized_forget_gate, batch_size * state_dimension, + quantization_settings.nonlinear_activation_output_scale, 0); + + CellType quantized_input_gate[batch_size * state_dimension] = {}; + tflite::Quantize(gate_output_data.expected_input_gate_output, + quantized_input_gate, batch_size * state_dimension, + quantization_settings.nonlinear_activation_output_scale, 0); + + CellType quantized_cell_gate[batch_size * state_dimension] = {}; + tflite::Quantize(gate_output_data.expected_cell_gate_output, + quantized_cell_gate, batch_size * state_dimension, + quantization_settings.nonlinear_activation_output_scale, 0); + + CellType buffer[batch_size * state_dimension] = {}; + + auto forget_cell_mul_params = CreateInterGateMulParams( + quantization_settings.nonlinear_activation_output_scale, + quantization_settings.cell_state.scale, + quantization_settings.cell_state.scale); + auto input_mul_params = CreateInterGateMulParams( + quantization_settings.nonlinear_activation_output_scale, + quantization_settings.nonlinear_activation_output_scale, + quantization_settings.cell_state.scale); + + auto cell_state_info = + CreateLstmCellStateInfo(quantization_settings.cell_state.scale, + node_content.BuiltinData().cell_clip); + + auto cell_state = node_content.CellStateEvalTensor(); + // Create step information: only one time step, no need to update + auto size_info = tflite::testing::CreateLstmSizeInfo( + /*time_major*/ false, + node_content.GetEvalTensor(tflite::kLstmInputTensor)->dims, + node_content.HiddenStateEvalTensor()->dims); + // revise time_major = true to enable batch inference + size_info.time_major = true; + tflite::lstm_internal::LstmStepManager step_info(&size_info); + + // Call the function to be tested + tflite::lstm_internal::UpdateLstmCell( + step_info, cell_state, quantized_forget_gate, quantized_input_gate, + quantized_cell_gate, forget_cell_mul_params, input_mul_params, + cell_state_info, buffer); + + float cell_state_float[batch_size * state_dimension] = {}; + Dequantize(tflite::micro::GetTensorData(cell_state), + batch_size * state_dimension, + quantization_settings.cell_state.scale, + quantization_settings.cell_state.zero_point, cell_state_float); + + ValidateResultGoldens(gate_output_data.expected_updated_cell, + cell_state_float, batch_size * state_dimension, + tolerance); +} + +template +void TestUpdateLstmHiddenFloat( + const GateOutputCheckData& gate_output_data, + LstmNodeContent& node_content, + const float tolerance) { + float buffer[batch_size * state_dimension] = {}; + + auto mul_params = CreateInterGateMulParamsFloat(); + + int32_t cell_state_scale_power = 0; + + // Create step information: only one time step, no need to update + auto size_info = tflite::testing::CreateLstmSizeInfo( + /*time_major*/ false, + node_content.GetEvalTensor(tflite::kLstmInputTensor)->dims, + node_content.HiddenStateEvalTensor()->dims); + // revise time_major = true to enable batch inference + size_info.time_major = true; + tflite::lstm_internal::LstmStepManager step_info(&size_info); + + auto cell_state = node_content.CellStateEvalTensor(); + auto hidden_state = node_content.HiddenStateEvalTensor(); + + tflite::lstm_internal::UpdateLstmHidden( + step_info, cell_state, hidden_state, + gate_output_data.expected_output_gate_output, mul_params, + cell_state_scale_power, buffer); + + ValidateResultGoldens(gate_output_data.expected_updated_hidden, + tflite::micro::GetTensorData(hidden_state), + batch_size * state_dimension, tolerance); +} + +template +void TestUpdateLstmHiddenInteger( + const GateOutputCheckData& gate_output_data, + LstmNodeContent& node_content, + const float tolerance) { + const auto& quantization_settings = node_content.QuantizationSettings(); + CellType quantized_output_gate[batch_size * state_dimension] = {}; + tflite::Quantize(gate_output_data.expected_output_gate_output, + quantized_output_gate, batch_size * state_dimension, + quantization_settings.nonlinear_activation_output_scale, 0); + + CellType buffer[batch_size * state_dimension] = {}; + + auto mul_params = CreateInterGateMulParams( + quantization_settings.nonlinear_activation_output_scale, + quantization_settings.nonlinear_activation_output_scale, + quantization_settings.hidden_state.scale, + quantization_settings.hidden_state.zero_point); + + int cell_state_scale_power_buffer; + tflite::CheckedLog2(quantization_settings.cell_state.scale, + &cell_state_scale_power_buffer); + int32_t cell_state_scale_power = cell_state_scale_power_buffer; + + // Create step information: only one time step, no need to update + auto size_info = tflite::testing::CreateLstmSizeInfo( + /*time_major*/ false, + node_content.GetEvalTensor(tflite::kLstmInputTensor)->dims, + node_content.HiddenStateEvalTensor()->dims); + // revise time_major = true to enable batch inference + size_info.time_major = true; + tflite::lstm_internal::LstmStepManager step_info(&size_info); + + auto cell_state = node_content.CellStateEvalTensor(); + auto hidden_state = node_content.HiddenStateEvalTensor(); + + tflite::lstm_internal::UpdateLstmHidden( + step_info, cell_state, hidden_state, quantized_output_gate, mul_params, + cell_state_scale_power, buffer); + + float hidden_state_float[batch_size * state_dimension] = {}; + Dequantize(tflite::micro::GetTensorData(hidden_state), + batch_size * state_dimension, + quantization_settings.hidden_state.scale, + quantization_settings.hidden_state.zero_point, hidden_state_float); + + ValidateResultGoldens(gate_output_data.expected_updated_hidden, + hidden_state_float, batch_size * state_dimension, + tolerance); +} + +template +void TestLstmStepFloat( + const GateOutputCheckData& gate_output_data, + const float hidden_state_tolerance, const float cell_state_tolerance, + /*can not be const, state will be updated*/ + LstmNodeContent& node_contents) { + // Mimicking the kernel preparation phase, node_contents approximate the + LSTMKernelContents kernel_content = CreateLSTMKernelContent(node_contents); + LSTMBuffers buffers; + // Scratch buffers on the stack + float buffer0[batch_size * state_dimension] = {}; + buffers.buffer0 = buffer0; + float buffer1[batch_size * state_dimension] = {}; + buffers.buffer1 = buffer1; + float buffer2[batch_size * state_dimension] = {}; + buffers.buffer2 = buffer2; + float buffer3[batch_size * state_dimension] = {}; + buffers.buffer3 = buffer3; + + OpDataLSTM op_data = CreateLstmOpDataFloat(node_contents); + // set time_major to true to test batch inference + op_data.size_info.time_major = true; + tflite::lstm_internal::LstmStepManager step_info(&op_data.size_info); + tflite::lstm_internal::LstmStep( + step_info, op_data, kernel_content, buffers); + + ValidateResultGoldens( + gate_output_data.expected_updated_hidden, + tflite::micro::GetTensorData(kernel_content.HiddenStateTensor()), + batch_size * state_dimension, hidden_state_tolerance); + ValidateResultGoldens( + gate_output_data.expected_updated_cell, + tflite::micro::GetTensorData(kernel_content.CellStateTensor()), + batch_size * state_dimension, cell_state_tolerance); +} + +template +void TestLstmStepInteger( + const GateOutputCheckData& gate_output_data, + const float hidden_state_tolerance, const float cell_state_tolerance, + /*can not be const, state will be updated*/ + LstmNodeContent& + node_contents) { + // Mimicking the kernel preparation phase, node_contents approximate the + LSTMKernelContents kernel_content = CreateLSTMKernelContent(node_contents); + LSTMBuffers buffers; + + // Scratch buffers on the stack + CellType buffer0[batch_size * state_dimension] = {}; + buffers.buffer0 = buffer0; + CellType buffer1[batch_size * state_dimension] = {}; + buffers.buffer1 = buffer1; + CellType buffer2[batch_size * state_dimension] = {}; + buffers.buffer2 = buffer2; + CellType buffer3[batch_size * state_dimension] = {}; + buffers.buffer3 = buffer3; + + OpDataLSTM op_data = CreateLstmOpData(node_contents); + // set time_major to true to test batch inference + op_data.size_info.time_major = true; + tflite::lstm_internal::LstmStepManager step_info(&op_data.size_info); + tflite::lstm_internal::LstmStep(step_info, op_data, kernel_content, + buffers); + + const auto& quantization_settings = node_contents.QuantizationSettings(); + float dequantized_hidden_state[batch_size * state_dimension] = {}; + Dequantize( + tflite::micro::GetTensorData( + kernel_content.HiddenStateTensor()), + batch_size * state_dimension, quantization_settings.hidden_state.scale, + quantization_settings.hidden_state.zero_point, dequantized_hidden_state); + + float dequantized_cell_state[batch_size * state_dimension] = {}; + Dequantize( + tflite::micro::GetTensorData(kernel_content.CellStateTensor()), + batch_size * state_dimension, quantization_settings.cell_state.scale, + quantization_settings.cell_state.zero_point, dequantized_cell_state); + + ValidateResultGoldens(gate_output_data.expected_updated_hidden, + dequantized_hidden_state, batch_size * state_dimension, + hidden_state_tolerance); + ValidateResultGoldens(gate_output_data.expected_updated_cell, + dequantized_cell_state, batch_size * state_dimension, + cell_state_tolerance); +} + +template +void TestEvalLstmFloat( + const LstmEvalCheckData< + batch_size * time_steps * input_dimension, batch_size * state_dimension, + batch_size * state_dimension * time_steps>& eval_check_data, + const float hidden_state_tolerance, const float cell_state_tolerance, + LstmNodeContent& node_contents) { + // Mimicking the kernel preparation phase, node_contents approximate the node + LSTMKernelContents kernel_content = CreateLSTMKernelContent(node_contents); + // Scratch buffers on the stack + LSTMBuffers buffers; + float buffer0[batch_size * state_dimension] = {}; + buffers.buffer0 = buffer0; + float buffer1[batch_size * state_dimension] = {}; + buffers.buffer1 = buffer1; + float buffer2[batch_size * state_dimension] = {}; + buffers.buffer2 = buffer2; + float buffer3[batch_size * state_dimension] = {}; + buffers.buffer3 = buffer3; + + OpDataLSTM op_data = CreateLstmOpDataFloat(node_contents); + + tflite::EvalLstm(op_data, kernel_content, + buffers); + + ValidateResultGoldens(eval_check_data.expected_hidden_state, + node_contents.GetHiddenStateData(), + batch_size * state_dimension, hidden_state_tolerance); + + ValidateResultGoldens(eval_check_data.expected_cell_state, + node_contents.GetCellStateData(), + batch_size * state_dimension, cell_state_tolerance); + + ValidateResultGoldens(eval_check_data.expected_output, + node_contents.GetOutputData(), + batch_size * state_dimension, hidden_state_tolerance); +} + +template +void TestEvalLstmInteger( + const LstmEvalCheckData< + batch_size * time_steps * input_dimension, batch_size * state_dimension, + batch_size * state_dimension * time_steps>& eval_check_data, + const float hidden_state_tolerance, const float cell_state_tolerance, + LstmNodeContent& + node_contents) { + // Mimicking the kernel preparation phase, node_contents approximate the node + LSTMKernelContents kernel_content = CreateLSTMKernelContent(node_contents); + // Scratch buffers on the stack + LSTMBuffers buffers; + CellType buffer0[batch_size * state_dimension] = {}; + buffers.buffer0 = buffer0; + CellType buffer1[batch_size * state_dimension] = {}; + buffers.buffer1 = buffer1; + CellType buffer2[batch_size * state_dimension] = {}; + buffers.buffer2 = buffer2; + CellType buffer3[batch_size * state_dimension] = {}; + buffers.buffer3 = buffer3; + + OpDataLSTM op_data = CreateLstmOpData(node_contents); + + tflite::EvalLstm( + op_data, kernel_content, buffers); + + const auto& quantization_settings = node_contents.QuantizationSettings(); + float dequantized_hidden_state[batch_size * state_dimension] = {}; + Dequantize(node_contents.GetHiddenStateData(), batch_size * state_dimension, + quantization_settings.hidden_state.scale, + quantization_settings.hidden_state.zero_point, + dequantized_hidden_state); + + ValidateResultGoldens(eval_check_data.expected_hidden_state, + dequantized_hidden_state, batch_size * state_dimension, + hidden_state_tolerance); + + float dequantized_cell_state[batch_size * state_dimension] = {}; + Dequantize(node_contents.GetCellStateData(), batch_size * state_dimension, + quantization_settings.cell_state.scale, + quantization_settings.cell_state.zero_point, + dequantized_cell_state); + ValidateResultGoldens(eval_check_data.expected_cell_state, + dequantized_cell_state, batch_size * state_dimension, + cell_state_tolerance); + + float dequantized_output[batch_size * state_dimension * time_steps] = {}; + Dequantize(node_contents.GetOutputData(), + batch_size * state_dimension * time_steps, + quantization_settings.output.scale, + quantization_settings.output.zero_point, dequantized_output); + ValidateResultGoldens(eval_check_data.expected_output, dequantized_output, + batch_size * state_dimension, hidden_state_tolerance); +} + +} // namespace testing +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_LSTM_EVAL_TEST_H_ diff --git a/tensorflow/lite/micro/kernels/lstm_shared.h b/tensorflow/lite/micro/kernels/lstm_shared.h new file mode 100644 index 0000000..dbdc3c5 --- /dev/null +++ b/tensorflow/lite/micro/kernels/lstm_shared.h @@ -0,0 +1,150 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_LSTM_SHARED_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_LSTM_SHARED_H_ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { + +// Input Tensors of size {n_batch, n_input} +constexpr int kLstmInputTensor = 0; + +// Input weight tensors of size: {n_cell, n_input} +constexpr int kLstmInputToInputWeightsTensor = 1; // Optional +constexpr int kLstmInputToForgetWeightsTensor = 2; +constexpr int kLstmInputToCellWeightsTensor = 3; +constexpr int kLstmInputToOutputWeightsTensor = 4; + +// Recurrent weight tensors of size {n_cell, n_output} +constexpr int kLstmRecurrentToInputWeightsTensor = 5; // Optional +constexpr int kLstmRecurrentToForgetWeightsTensor = 6; +constexpr int kLstmRecurrentToCellWeightsTensor = 7; +constexpr int kLstmRecurrentToOutputWeightsTensor = 8; + +// Peephole weights tensors of size {n_cell}, representing a diagonal matrix. +constexpr int kLstmCellToInputWeightsTensor = 9; // Optional +constexpr int kLstmCellToForgetWeightsTensor = 10; // Optional +constexpr int kLstmCellToOutputWeightsTensor = 11; // Optional + +// Gates bias tensors of size {n_cell} +constexpr int kLstmInputGateBiasTensor = 12; // Optional +constexpr int kLstmForgetGateBiasTensor = 13; +constexpr int kLstmCellGateBiasTensor = 14; +constexpr int kLstmOutputGateBiasTensor = 15; + +// Projection weight tensor of size {n_output, n_cell} +constexpr int kLstmProjectionWeightsTensor = 16; // Optional +// Projection bias tensor of size {n_output} +constexpr int kLstmProjectionBiasTensor = 17; // Optional + +// These state tensors are defined as variable tensors, and will be modified by +// this op. +constexpr int kLstmOutputStateTensor = 18; +constexpr int kLstmCellStateTensor = 19; + +// Layer norm coefficient tensors of size {n_cell}, representing a diagonal +// matrix. +constexpr int kLstmInputLayerNormCoefficientsTensor = 20; // Optional +constexpr int kLstmForgetLayerNormCoefficientsTensor = 21; // Optional +constexpr int kLstmCellLayerNormCoefficientsTensor = 22; // Optional +constexpr int kLstmOutputLayerNormCoefficientsTensor = 23; // Optional + +// Output tensors. +constexpr int kLstmOutputTensor = 0; + +// Parameters for the two fully conncted computation inside each gate +struct GateParameters { + FullyConnectedParams input_fc_params; + FullyConnectedParams recurrent_fc_params; +}; + +// Paramaters for the element wise multiplications between gate outputs +struct InterGateParameters { + ArithmeticParams forget_cell_mul_params; + ArithmeticParams input_mul_params; + ArithmeticParams output_mul_params; +}; + +// Size information about the LSTM kernel, which is deduced from tensors stored +// in the flat buffer file. +struct LstmSizeInfo { + bool time_major; + int batch_size; + int time_steps; + int input_dimension; + int state_dimension; +}; + +// Contains information about the cell state tensor +struct CellStateInfo { + float cell_clip; + // clipping range for cell state only 16 bits cell is supported (could be + // generalized through templatation) + int16_t quantized_cell_clip; + // 2^-cell_state_scale_power = cell state scale, required by integer tanh + // computation + int32_t cell_state_scale_power; +}; + +// Contains required computation information for LSTM kernel evaluation. +// Specifically, it includes shape and quantization settings for the LSTM +// internal operations. Formatted to support operations defined in the +// tensorflow/lite/kernels/internal/reference/integer_ops +// Should be constructed during the preparation phase +struct OpDataLSTM { + LstmSizeInfo size_info; + CellStateInfo cell_state_info; + TfLiteFusedActivation cell_gate_nonlinear_type; + GateParameters forget_gate_parameters; + GateParameters input_gate_parameters; + GateParameters cell_gate_parameters; + GateParameters output_gate_parameters; + InterGateParameters inter_gate_parameters; + int buffer_indices[4]; // TFLM only +}; + +// Provide an interface to access the internal tensors and buffers used for LSTM +// invocation. Constructed during the invocation phase +struct LSTMKernelContents { + public: + // Internal tensors, fixed (const). see lstm_shared.h for tensor names + const TfLiteEvalTensor* GetInternalTensor(const int tensor_index) const { + return internal_tensors[tensor_index]; + } + // Variable tensors (will be changed, can not be const) + TfLiteEvalTensor* HiddenStateTensor() { + return internal_tensors[kLstmOutputStateTensor]; + } + TfLiteEvalTensor* CellStateTensor() { + return internal_tensors[kLstmCellStateTensor]; + } + // Node internal tensors with indexes defined at the beginning of the file + TfLiteEvalTensor* internal_tensors[24]; + TfLiteEvalTensor* output_tensor; +}; + +template +struct LSTMBuffers { + // TFLM buffers requires buffer index from LstmOpData. + CellType* buffer0; + CellType* buffer1; + CellType* buffer2; + CellType* buffer3; +}; + +} // namespace tflite +#endif // TENSORFLOW_LITE_MICRO_KERNELS_LSTM_SHARED_H_ diff --git a/tensorflow/lite/micro/kernels/maximum_minimum.cc b/tensorflow/lite/micro/kernels/maximum_minimum.cc new file mode 100644 index 0000000..4871707 --- /dev/null +++ b/tensorflow/lite/micro/kernels/maximum_minimum.cc @@ -0,0 +1,122 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/reference/maximum_minimum.h" + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { + +namespace { + +// This file has a reference implementation of TFMaximum/TFMinimum. +enum KernelType { + kReference, +}; + +constexpr int kInputTensor1 = 0; +constexpr int kInputTensor2 = 1; +constexpr int kOutputTensor = 0; + +struct OpContext { + OpContext(TfLiteContext* context, TfLiteNode* node) { + input1 = tflite::micro::GetEvalInput(context, node, kInputTensor1); + input2 = tflite::micro::GetEvalInput(context, node, kInputTensor2); + output = tflite::micro::GetEvalOutput(context, node, kOutputTensor); + } + const TfLiteEvalTensor* input1; + const TfLiteEvalTensor* input2; + TfLiteEvalTensor* output; +}; + +struct MaximumOp { + template + static data_type op(data_type el1, data_type el2) { + return el1 > el2 ? el1 : el2; + } +}; + +struct MinimumOp { + template + static data_type op(data_type el1, data_type el2) { + return el1 < el2 ? el1 : el2; + } +}; + +template +void TFLiteOperation(TfLiteContext* context, TfLiteNode* node, + const OpContext& op_context) { + reference_ops::MaximumMinimumBroadcastSlow( + tflite::micro::GetTensorShape(op_context.input1), + tflite::micro::GetTensorData(op_context.input1), + tflite::micro::GetTensorShape(op_context.input2), + tflite::micro::GetTensorData(op_context.input2), + tflite::micro::GetTensorShape(op_context.output), + tflite::micro::GetTensorData(op_context.output), + op_type::template op); +} + +template +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + OpContext op_context(context, node); + + if (kernel_type == kReference) { + switch (op_context.output->type) { + case kTfLiteFloat32: + TFLiteOperation(context, node, op_context); + break; + case kTfLiteInt8: + TFLiteOperation(context, node, op_context); + break; + case kTfLiteInt32: + TFLiteOperation(context, node, op_context); + break; + case kTfLiteInt64: + TFLiteOperation(context, node, op_context); + break; + default: + MicroPrintf("Type %s (%d) is not supported by Maximum/Minimum.", + TfLiteTypeGetName(op_context.output->type), + op_context.output->type); + return kTfLiteError; + } + } else { + MicroPrintf("Kernel type not supported by Maximum/Minimum."); + return kTfLiteError; + } + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_MAXIMUM() { + return tflite::micro::RegisterOp(nullptr, nullptr, + Eval); +} + +TFLMRegistration Register_MINIMUM() { + return tflite::micro::RegisterOp(nullptr, nullptr, + Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/maximum_minimum_test.cc b/tensorflow/lite/micro/kernels/maximum_minimum_test.cc new file mode 100644 index 0000000..f8165b6 --- /dev/null +++ b/tensorflow/lite/micro/kernels/maximum_minimum_test.cc @@ -0,0 +1,222 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +void TestMaxMinFloat(const TFLMRegistration& registration, + int* input1_dims_data, const float* input1_data, + int* input2_dims_data, const float* input2_data, + const float* expected_output_data, int* output_dims_data, + float* output_data) { + TfLiteIntArray* input1_dims = IntArrayFromInts(input1_dims_data); + TfLiteIntArray* input2_dims = IntArrayFromInts(input2_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_dims_count = ElementCount(*output_dims); + + constexpr int inputs_size = 2; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(input1_data, input1_dims), + CreateTensor(input2_data, input2_dims), + CreateTensor(output_data, output_dims), + }; + + int inputs_array_data[] = {2, 0, 1}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 2}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, + /*builtin_data=*/nullptr); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + for (int i = 0; i < output_dims_count; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(expected_output_data[i], output_data[i], 1e-5f); + } +} + +void TestMaxMinQuantized(const TFLMRegistration& registration, + int* input1_dims_data, const int8_t* input1_data, + float const input1_scale, const int input1_zero_point, + int* input2_dims_data, const int8_t* input2_data, + const float input2_scale, const int input2_zero_point, + const int8_t* expected_output_data, + const float output_scale, const int output_zero_point, + int* output_dims_data, int8_t* output_data) { + TfLiteIntArray* input1_dims = IntArrayFromInts(input1_dims_data); + TfLiteIntArray* input2_dims = IntArrayFromInts(input2_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_dims_count = ElementCount(*output_dims); + + constexpr int inputs_size = 2; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateQuantizedTensor(input1_data, input1_dims, input1_scale, + input1_zero_point), + CreateQuantizedTensor(input2_data, input2_dims, input2_scale, + input2_zero_point), + CreateQuantizedTensor(output_data, output_dims, output_scale, + output_zero_point), + }; + + int inputs_array_data[] = {2, 0, 1}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 2}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, + /*builtin_data=*/nullptr); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + for (int i = 0; i < output_dims_count; ++i) { + TF_LITE_MICRO_EXPECT_EQ(expected_output_data[i], output_data[i]); + } +} + +void TestMaxMinQuantizedInt32(const TFLMRegistration& registration, + int* input1_dims_data, const int32_t* input1_data, + int* input2_dims_data, const int32_t* input2_data, + const int32_t* expected_output_data, + int* output_dims_data, int32_t* output_data) { + TfLiteIntArray* input1_dims = IntArrayFromInts(input1_dims_data); + TfLiteIntArray* input2_dims = IntArrayFromInts(input2_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_dims_count = ElementCount(*output_dims); + + constexpr int inputs_size = 2; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(input1_data, input1_dims), + CreateTensor(input2_data, input2_dims), + CreateTensor(output_data, output_dims), + }; + + int inputs_array_data[] = {2, 0, 1}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 2}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, + /*builtin_data=*/nullptr); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + for (int i = 0; i < output_dims_count; ++i) { + TF_LITE_MICRO_EXPECT_EQ(expected_output_data[i], output_data[i]); + } +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(FloatTest) { + int dims[] = {3, 3, 1, 2}; + const float data1[] = {1.0, 0.0, -1.0, 11.0, -2.0, -1.44}; + const float data2[] = {-1.0, 0.0, 1.0, 12.0, -3.0, -1.43}; + const float golden_max[] = {1.0, 0.0, 1.0, 12.0, -2.0, -1.43}; + const float golden_min[] = {-1.0, 0.0, -1.0, 11.0, -3.0, -1.44}; + float output_data[6]; + + tflite::testing::TestMaxMinFloat(tflite::Register_MAXIMUM(), dims, data1, + dims, data2, golden_max, dims, output_data); + + tflite::testing::TestMaxMinFloat(tflite::Register_MINIMUM(), dims, data1, + dims, data2, golden_min, dims, output_data); +} + +TF_LITE_MICRO_TEST(Int8Test) { + int dims[] = {3, 3, 1, 2}; + const int8_t data1[] = {1, 0, 2, 11, 2, 23}; + const int8_t data2[] = {0, 0, 1, 12, 127, 1}; + const int8_t golden_max[] = {1, 0, 2, 12, 127, 23}; + const int8_t golden_min[] = {0, 0, 1, 11, 2, 1}; + + const float input_scale = 1.0; + const int input_zero_point = 0; + const float output_scale = 1.0; + const int output_zero_point = 0; + + int8_t output_data[6]; + + tflite::testing::TestMaxMinQuantized( + tflite::Register_MAXIMUM(), dims, data1, input_scale, input_zero_point, + dims, data2, input_scale, input_zero_point, golden_max, output_scale, + output_zero_point, dims, output_data); + + tflite::testing::TestMaxMinQuantized( + tflite::Register_MINIMUM(), dims, data1, input_scale, input_zero_point, + dims, data2, input_scale, input_zero_point, golden_min, output_scale, + output_zero_point, dims, output_data); +} + +TF_LITE_MICRO_TEST(FloatWithBroadcastTest) { + int dims[] = {3, 3, 1, 2}; + int dims_scalar[] = {1, 2}; + const float data1[] = {1.0, 0.0, -1.0, -2.0, -1.44, 11.0}; + const float data2[] = {0.5, 2.0}; + const float golden_max[] = {1.0, 2.0, 0.5, 2.0, 0.5, 11.0}; + const float golden_min[] = {0.5, 0.0, -1.0, -2.0, -1.44, 2.0}; + float output_data[6]; + + tflite::testing::TestMaxMinFloat(tflite::Register_MAXIMUM(), dims, data1, + dims_scalar, data2, golden_max, dims, + output_data); + + tflite::testing::TestMaxMinFloat(tflite::Register_MINIMUM(), dims, data1, + dims_scalar, data2, golden_min, dims, + output_data); +} + +TF_LITE_MICRO_TEST(Int32WithBroadcastTest) { + int dims[] = {3, 3, 1, 2}; + int dims_scalar[] = {1, 1}; + const int32_t data1[] = {1, 0, -1, -2, 3, 11}; + const int32_t data2[] = {2}; + const int32_t golden_max[] = {2, 2, 2, 2, 3, 11}; + const int32_t golden_min[] = {1, 0, -1, -2, 2, 2}; + int32_t output_data[6]; + + tflite::testing::TestMaxMinQuantizedInt32(tflite::Register_MAXIMUM(), dims, + data1, dims_scalar, data2, + golden_max, dims, output_data); + + tflite::testing::TestMaxMinQuantizedInt32(tflite::Register_MINIMUM(), dims, + data1, dims_scalar, data2, + golden_min, dims, output_data); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/micro_ops.h b/tensorflow/lite/micro/kernels/micro_ops.h new file mode 100644 index 0000000..8ad80b8 --- /dev/null +++ b/tensorflow/lite/micro/kernels/micro_ops.h @@ -0,0 +1,144 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_MICRO_OPS_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_MICRO_OPS_H_ + +#include "tensorflow/lite/c/common.h" + +// Forward declaration of all micro op kernel registration methods. These +// registrations are included with the standard `BuiltinOpResolver`. +// +// This header is particularly useful in cases where only a subset of ops are +// needed. In such cases, the client can selectively add only the registrations +// their model requires, using a custom `(Micro)MutableOpResolver`. Selective +// registration in turn allows the linker to strip unused kernels. + +namespace tflite { + +// TFLM is incrementally moving towards a flat tflite namespace +// (https://abseil.io/tips/130). Any new ops (or cleanup of existing ops should +// have their Register function declarations in the tflite namespace. + +TFLMRegistration Register_ABS(); +TFLMRegistration Register_ADD(); +TFLMRegistration Register_ADD_N(); +TFLMRegistration Register_ARG_MAX(); +TFLMRegistration Register_ARG_MIN(); +TFLMRegistration Register_ASSIGN_VARIABLE(); +TFLMRegistration Register_AVERAGE_POOL_2D(); +TFLMRegistration Register_BATCH_TO_SPACE_ND(); +TFLMRegistration Register_BROADCAST_ARGS(); +TFLMRegistration Register_BROADCAST_TO(); +TFLMRegistration Register_CALL_ONCE(); +TFLMRegistration Register_CAST(); +TFLMRegistration Register_CEIL(); +// TODO(b/160234179): Change custom OPs to also return by value. +TFLMRegistration* Register_CIRCULAR_BUFFER(); +TFLMRegistration Register_CONCATENATION(); +TFLMRegistration Register_CONV_2D(); +TFLMRegistration Register_COS(); +TFLMRegistration Register_CUMSUM(); +TFLMRegistration Register_DEPTH_TO_SPACE(); +TFLMRegistration Register_DEPTHWISE_CONV_2D(); +TFLMRegistration Register_DEQUANTIZE(); +TFLMRegistration Register_DIV(); +TFLMRegistration Register_ELU(); +TFLMRegistration Register_EMBEDDING_LOOKUP(); +TFLMRegistration Register_EQUAL(); +TFLMRegistration* Register_ETHOSU(); +TFLMRegistration Register_EXP(); +TFLMRegistration Register_EXPAND_DIMS(); +TFLMRegistration Register_FILL(); +TFLMRegistration Register_FLOOR(); +TFLMRegistration Register_FLOOR_DIV(); +TFLMRegistration Register_FLOOR_MOD(); +TFLMRegistration Register_FULLY_CONNECTED(); +TFLMRegistration Register_GATHER(); +TFLMRegistration Register_GATHER_ND(); +TFLMRegistration Register_GREATER(); +TFLMRegistration Register_GREATER_EQUAL(); +TFLMRegistration Register_HARD_SWISH(); +TFLMRegistration Register_IF(); +TFLMRegistration Register_L2_NORMALIZATION(); +TFLMRegistration Register_L2_POOL_2D(); +TFLMRegistration Register_LEAKY_RELU(); +TFLMRegistration Register_LESS(); +TFLMRegistration Register_LESS_EQUAL(); +TFLMRegistration Register_LOG(); +TFLMRegistration Register_LOG_SOFTMAX(); +TFLMRegistration Register_LOGICAL_AND(); +TFLMRegistration Register_LOGICAL_NOT(); +TFLMRegistration Register_LOGICAL_OR(); +TFLMRegistration Register_LOGISTIC(); +TFLMRegistration Register_MAX_POOL_2D(); +TFLMRegistration Register_MAXIMUM(); +TFLMRegistration Register_MEAN(); +TFLMRegistration Register_MINIMUM(); +TFLMRegistration Register_MIRROR_PAD(); +TFLMRegistration Register_MUL(); +TFLMRegistration Register_NEG(); +TFLMRegistration Register_NOT_EQUAL(); +TFLMRegistration Register_PACK(); +TFLMRegistration Register_PAD(); +TFLMRegistration Register_PADV2(); +TFLMRegistration Register_PRELU(); +TFLMRegistration Register_QUANTIZE(); +TFLMRegistration Register_READ_VARIABLE(); +TFLMRegistration Register_REDUCE_MAX(); +TFLMRegistration Register_RELU(); +TFLMRegistration Register_RELU6(); +TFLMRegistration Register_RESIZE_BILINEAR(); +TFLMRegistration Register_RESIZE_NEAREST_NEIGHBOR(); +TFLMRegistration Register_RSQRT(); +TFLMRegistration Register_SELECT_V2(); +TFLMRegistration Register_SHAPE(); +TFLMRegistration Register_SIN(); +TFLMRegistration Register_SLICE(); +TFLMRegistration Register_SOFTMAX(); +TFLMRegistration Register_SPACE_TO_BATCH_ND(); +TFLMRegistration Register_SPACE_TO_DEPTH(); +TFLMRegistration Register_SPLIT(); +TFLMRegistration Register_SPLIT_V(); +TFLMRegistration Register_SQRT(); +TFLMRegistration Register_SQUARE(); +TFLMRegistration Register_SQUARED_DIFFERENCE(); +TFLMRegistration Register_SQUEEZE(); +TFLMRegistration Register_STRIDED_SLICE(); +TFLMRegistration Register_SUB(); +TFLMRegistration Register_SUM(); +TFLMRegistration Register_SVDF(); +TFLMRegistration Register_TANH(); +TFLMRegistration Register_TRANSPOSE(); +TFLMRegistration Register_TRANSPOSE_CONV(); +// TODO(b/230666079): resolve conflict with xtensa implementation +TFLMRegistration Register_UNIDIRECTIONAL_SEQUENCE_LSTM(); +TFLMRegistration Register_UNPACK(); +TFLMRegistration Register_VAR_HANDLE(); +TFLMRegistration Register_WHILE(); +// TODO(b/160234179): Change custom OPs to also return by value. +namespace tflm_signal { +TFLMRegistration* Register_WINDOW(); +} +TFLMRegistration Register_ZEROS_LIKE(); + +namespace ops { +namespace micro { +TFLMRegistration Register_RESHAPE(); +TFLMRegistration Register_ROUND(); +} // namespace micro +} // namespace ops +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_MICRO_OPS_H_ diff --git a/tensorflow/lite/micro/kernels/micro_tensor_utils.cc b/tensorflow/lite/micro/kernels/micro_tensor_utils.cc new file mode 100644 index 0000000..87cfe0c --- /dev/null +++ b/tensorflow/lite/micro/kernels/micro_tensor_utils.cc @@ -0,0 +1,67 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/micro/kernels/micro_tensor_utils.h" + +#include +#include +#include +#include +#include +#include + +#include "fixedpoint/fixedpoint.h" // from @gemmlowp +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/kernels/internal/cppmath.h" +#include "tensorflow/lite/kernels/op_macros.h" + +namespace tflite { + +// Apply sigmoid to elements of a vector. +void PortableApplySigmoidToVector(const float* vector, int v_size, + float* result) { + for (int v = 0; v < v_size; v++) { + result[v] = 1.0f / (1.0f + std::exp(-vector[v])); + } +} + +void PortableApplyTanhToVector(const float* vector, int v_size, float* result) { + for (int v = 0; v < v_size; v++) { + result[v] = std::tanh(vector[v]); + } +} + +void PortableApplyActivationToVector(const float* vector, int v_size, + TfLiteFusedActivation activation, + float* result) { + switch (activation) { + case kTfLiteActNone: + return; + case kTfLiteActRelu: + return tflite::tensor_utils::ApplyReluToVector(vector, v_size, result); + case kTfLiteActReluN1To1: + return tflite::tensor_utils::ApplyRelu1ToVector(vector, v_size, result); + case kTfLiteActRelu6: + return tflite::tensor_utils::ApplyRelu6ToVector(vector, v_size, result); + case kTfLiteActTanh: + return PortableApplyTanhToVector(vector, v_size, result); + case kTfLiteActSignBit: + return tflite::tensor_utils::ApplySignbitToVector(vector, v_size, result); + case kTfLiteActSigmoid: + return PortableApplySigmoidToVector(vector, v_size, result); + } +} + +} // namespace tflite \ No newline at end of file diff --git a/tensorflow/lite/micro/kernels/micro_tensor_utils.h b/tensorflow/lite/micro/kernels/micro_tensor_utils.h new file mode 100644 index 0000000..0b87f0a --- /dev/null +++ b/tensorflow/lite/micro/kernels/micro_tensor_utils.h @@ -0,0 +1,56 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +// This file and the associated .cc file is branched from +// tensorflow/lite/kernels/internal/reference/portable_tensor_utils* +// TFLM needs to create its own because the original files are coupled with +// the tensor_utils module, which we cannot reuse due to its use of the +// Eigen library. + +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_MICRO_TENSOR_UTILS_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_MICRO_TENSOR_UTILS_H_ + +#include +#include +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/portable_tensor_utils.h" + +#if defined(_MSC_VER) +#define __restrict__ __restrict +#endif + +namespace tflite { + +// Not all backends support CpuBackendContext usage, so forward declare to avoid +// pulling in its implementation. +// TODO(b/230666277): consider removing this since micro does not utilize it +class CpuBackendContext; + +// Apply sigmoid to elements of a vector. +void PortableApplySigmoidToVector(const float* vector, int v_size, + float* result); +// Apply tanh to elements of a vector +void PortableApplyTanhToVector(const float* vector, int v_size, float* result); +// Apply appropriate activation function to elements of a vector. +void PortableApplyActivationToVector(const float* vector, int v_size, + TfLiteFusedActivation activation, + float* result); + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_MICRO_TENSOR_UTILS_H_ \ No newline at end of file diff --git a/tensorflow/lite/micro/kernels/mirror_pad.cc b/tensorflow/lite/micro/kernels/mirror_pad.cc new file mode 100644 index 0000000..4cbaf52 --- /dev/null +++ b/tensorflow/lite/micro/kernels/mirror_pad.cc @@ -0,0 +1,215 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" + +namespace tflite { +namespace { + +struct OpDataMirrorPad { + int input_dims; + int output_size; + int offset; + int output_dims_num_elements_buffer_index; + int input_dims_num_elements_buffer_index; +}; + +// Helper method that fills the left and right pads. +template +inline void GetPadding(const T* data, int offset, int64_t* left_pad, + int64_t* right_pad) { + *left_pad = static_cast(*(data + offset * 2)); + *right_pad = static_cast(*(data + offset * 2 + 1)); +} + +// Given dimension index and the left/right padding. +// Returns the corresponding dimension in the input array. +inline int GetInputDimension(int padded_dimension, int left_pad, int right_pad, + int input_dim_size, int offset) { + if (padded_dimension < left_pad) { + const int original_ind = left_pad + offset - 1; + return original_ind - (std::min(padded_dimension, original_ind - offset)); + } + padded_dimension -= left_pad; + if (padded_dimension >= input_dim_size) { + padded_dimension -= input_dim_size; + const int original_ind = input_dim_size - (1 + offset); + return original_ind - std::min(padded_dimension, original_ind); + } + return padded_dimension; +} + +// Given and index in output array, returns the index of the value +// in input array. +int GetFlatIndex(int index, int num_dims, + const TfLiteEvalTensor* padding_matrix, + const TfLiteIntArray* input_dims, + int* output_dims_num_elements, int* input_dims_num_elements, + const int offset) { + int flat_index = 0; + int64_t left_pad = 0, right_pad = 0, dimension_index, index_in_input; + + for (int i = 0; i < num_dims; ++i) { + switch (padding_matrix->type) { + case kTfLiteInt32: + GetPadding(padding_matrix->data.i32, i, &left_pad, &right_pad); + break; + case kTfLiteInt64: + GetPadding(padding_matrix->data.i64, i, &left_pad, &right_pad); + break; + default: + break; + } + dimension_index = index / output_dims_num_elements[i]; + + index_in_input = GetInputDimension(dimension_index, left_pad, right_pad, + input_dims->data[i], offset); + + flat_index += index_in_input * (input_dims_num_elements)[i]; + index %= output_dims_num_elements[i]; + } + + return flat_index; +} + +template +void MirrorPad(const TfLiteEvalTensor* padding_matrix, + const TfLiteIntArray* input_dims, int* output_dims_num_elements, + int* input_dims_num_elements, const T* input_data, + T* output_data, const int offset, const int num_dims, + const int output_size) { + for (int i = 0; i < output_size; ++i) { + output_data[i] = input_data[GetFlatIndex( + i, num_dims, padding_matrix, input_dims, output_dims_num_elements, + input_dims_num_elements, offset)]; + } +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + TfLiteStatus status = kTfLiteOk; + const OpDataMirrorPad* data = + static_cast(node->user_data); + + const TfLiteEvalTensor* input_tensor = + tflite::micro::GetEvalInput(context, node, 0); + const TfLiteEvalTensor* padding_matrix = + tflite::micro::GetEvalInput(context, node, 1); + + TfLiteEvalTensor* output_tensor = + tflite::micro::GetEvalOutput(context, node, 0); + const int input_dims = data->input_dims; + const int output_size = data->output_size; + + int* input_dims_num_elements = (int*)context->GetScratchBuffer( + context, data->input_dims_num_elements_buffer_index); + int* output_dims_num_elements = (int*)context->GetScratchBuffer( + context, data->output_dims_num_elements_buffer_index); + + for (int i = 0; i < input_dims; i++) { + output_dims_num_elements[i] = 1; + input_dims_num_elements[i] = 1; + } + + for (int i = input_dims - 2; i >= 0; i--) { + output_dims_num_elements[i] = + output_dims_num_elements[i + 1] * output_tensor->dims->data[i + 1]; + + input_dims_num_elements[i] = + input_dims_num_elements[i + 1] * input_tensor->dims->data[i + 1]; + } + + switch (output_tensor->type) { + case kTfLiteFloat32: { + MirrorPad(padding_matrix, input_tensor->dims, output_dims_num_elements, + input_dims_num_elements, + tflite::micro::GetTensorData(input_tensor), + tflite::micro::GetTensorData(output_tensor), + data->offset, input_dims, output_size); + break; + } + case kTfLiteInt8: { + MirrorPad(padding_matrix, input_tensor->dims, output_dims_num_elements, + input_dims_num_elements, + tflite::micro::GetTensorData(input_tensor), + tflite::micro::GetTensorData(output_tensor), + data->offset, input_dims, output_size); + break; + } + default: + status = kTfLiteError; + break; + } + +#undef TF_LITE_MIRROR_PAD + + return status; +} + +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(OpDataMirrorPad)); +} + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + MicroContext* micro_context = GetMicroContext(context); + + TFLITE_DCHECK(node->user_data != nullptr); + OpDataMirrorPad* data = static_cast(node->user_data); + + TfLiteTensor* input_tensor = micro_context->AllocateTempInputTensor(node, 0); + TfLiteTensor* padding_matrix = + micro_context->AllocateTempInputTensor(node, 1); + TfLiteTensor* output_tensor = + micro_context->AllocateTempOutputTensor(node, 0); + + TF_LITE_ENSURE_EQ(context, NumDimensions(padding_matrix), 2); + TF_LITE_ENSURE_EQ(context, SizeOfDimension(padding_matrix, 0), + NumDimensions(input_tensor)); + auto* params = + reinterpret_cast(node->builtin_data); + if (params == nullptr) { + return kTfLiteError; + } + + data->offset = + params->mode != TfLiteMirrorPaddingMode::kTfLiteMirrorPaddingReflect ? 0 + : 1; + data->input_dims = NumDimensions(input_tensor); + data->output_size = NumElements(output_tensor); + + TF_LITE_ENSURE_STATUS(context->RequestScratchBufferInArena( + context, data->input_dims * sizeof(int), + &data->output_dims_num_elements_buffer_index)); + TF_LITE_ENSURE_STATUS(context->RequestScratchBufferInArena( + context, data->input_dims * sizeof(int), + &data->input_dims_num_elements_buffer_index)); + + micro_context->DeallocateTempTfLiteTensor(input_tensor); + micro_context->DeallocateTempTfLiteTensor(padding_matrix); + micro_context->DeallocateTempTfLiteTensor(output_tensor); + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_MIRROR_PAD() { + return tflite::micro::RegisterOp(Init, Prepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/mirror_pad_test.cc b/tensorflow/lite/micro/kernels/mirror_pad_test.cc new file mode 100644 index 0000000..0670226 --- /dev/null +++ b/tensorflow/lite/micro/kernels/mirror_pad_test.cc @@ -0,0 +1,264 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +template +void ValidateMirrorPadGoldens(TfLiteTensor* tensors, int tensors_size, + const T* golden, T* output, int output_size, + TfLiteMirrorPaddingMode mode) { + TfLiteMirrorPaddingParams builtin_data; + builtin_data.mode = mode; + + int inputs_array_data[] = {2, 0, 1}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 2}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = Register_MIRROR_PAD(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, &builtin_data); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + for (int i = 0; i < output_size; ++i) { + TF_LITE_MICRO_EXPECT_EQ(golden[i], output[i]); + } +} + +template +void TestMirrorPad(int* input_shape, const T* input_data, int* pad_shape, + const int32_t* pad_data, int* output_shape, + const T* golden_data, TfLiteMirrorPaddingMode mode, + T* output_data) { + TfLiteIntArray* input_dims = tflite::testing::IntArrayFromInts(input_shape); + TfLiteIntArray* pad_dims = tflite::testing::IntArrayFromInts(pad_shape); + TfLiteIntArray* output_dims = tflite::testing::IntArrayFromInts(output_shape); + + constexpr int inputs_size = 2; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + tflite::testing::CreateTensor(input_data, input_dims), + tflite::testing::CreateTensor(pad_data, pad_dims), + tflite::testing::CreateTensor(output_data, output_dims), + }; + + ValidateMirrorPadGoldens(tensors, tensors_size, golden_data, output_data, + tflite::ElementCount(*output_dims), mode); +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(EmptyPad) { + int input_shape[] = {2, 2, 3}; + int pad_shape[] = {2, 2, 2}; + int output_shape[] = {2, 2, 3}; + + const int8_t input_data[] = {1, 2, 3, 4, 5, 6}; + const int32_t pad_data[] = {0, 0, 0, 0}; + int8_t output_data[6]; + const int8_t golden_data[] = {1, 2, 3, 4, 5, 6}; + + tflite::testing::TestMirrorPad(input_shape, input_data, pad_shape, pad_data, + output_shape, golden_data, + kTfLiteMirrorPaddingReflect, output_data); +} + +TF_LITE_MICRO_TEST(PadOneSide_right_Reflect) { + int input_shape[] = {2, 2, 3}; + int pad_shape[] = {2, 2, 2}; + int output_shape[] = {2, 3, 4}; + + const int8_t input_data[] = {1, 2, 3, 4, 5, 6}; + const int32_t pad_data[] = {0, 1, 0, 1}; + int8_t output_data[12]; + const int8_t golden_data[] = {1, 2, 3, 2, 4, 5, 6, 5, 1, 2, 3, 2}; + + tflite::testing::TestMirrorPad(input_shape, input_data, pad_shape, pad_data, + output_shape, golden_data, + kTfLiteMirrorPaddingReflect, output_data); +} + +TF_LITE_MICRO_TEST(PadOneSide_left_Reflect) { + int input_shape[] = {2, 2, 3}; + int pad_shape[] = {2, 2, 2}; + int output_shape[] = {2, 3, 4}; + + const int8_t input_data[] = {1, 2, 3, 4, 5, 6}; + const int32_t pad_data[] = {1, 0, 1, 0}; + int8_t output_data[12]; + const int8_t golden_data[] = {5, 4, 5, 6, 2, 1, 2, 3, 5, 4, 5, 6}; + + tflite::testing::TestMirrorPad(input_shape, input_data, pad_shape, pad_data, + output_shape, golden_data, + kTfLiteMirrorPaddingReflect, output_data); +} + +TF_LITE_MICRO_TEST(PadOneSide_right_Symmetric) { + int input_shape[] = {2, 2, 3}; + int pad_shape[] = {2, 2, 2}; + int output_shape[] = {2, 3, 4}; + + const int8_t input_data[] = {1, 2, 3, 4, 5, 6}; + const int32_t pad_data[] = {0, 1, 0, 1}; + int8_t output_data[12]; + const int8_t golden_data[] = {1, 2, 3, 3, 4, 5, 6, 6, 4, 5, 6, 6}; + + tflite::testing::TestMirrorPad(input_shape, input_data, pad_shape, pad_data, + output_shape, golden_data, + kTfLiteMirrorPaddingSymmetric, output_data); +} + +TF_LITE_MICRO_TEST(PadOneSide_left_Symmetric) { + int input_shape[] = {2, 2, 3}; + int pad_shape[] = {2, 2, 2}; + int output_shape[] = {2, 3, 4}; + + const int8_t input_data[] = {1, 2, 3, 4, 5, 6}; + const int32_t pad_data[] = {1, 0, 1, 0}; + int8_t output_data[12]; + const int8_t golden_data[] = {1, 1, 2, 3, 1, 1, 2, 3, 4, 4, 5, 6}; + + tflite::testing::TestMirrorPad(input_shape, input_data, pad_shape, pad_data, + output_shape, golden_data, + kTfLiteMirrorPaddingSymmetric, output_data); +} +TF_LITE_MICRO_TEST(PadBothSides_Symmetric) { + int input_shape[] = {2, 2, 3}; + int pad_shape[] = {2, 2, 2}; + int output_shape[] = {2, 4, 5}; + + const int8_t input_data[] = {1, 2, 3, 4, 5, 6}; + const int32_t pad_data[] = {1, 1, 1, 1}; + int8_t output_data[20]; + const int8_t golden_data[] = {1, 1, 2, 3, 3, 1, 1, 2, 3, 3, + 4, 4, 5, 6, 6, 4, 4, 5, 6, 6}; + + tflite::testing::TestMirrorPad(input_shape, input_data, pad_shape, pad_data, + output_shape, golden_data, + kTfLiteMirrorPaddingSymmetric, output_data); +} + +TF_LITE_MICRO_TEST(PadBothSides_Reflect) { + int input_shape[] = {2, 2, 3}; + int pad_shape[] = {2, 2, 2}; + int output_shape[] = {2, 4, 5}; + + const int8_t input_data[] = {1, 2, 3, 4, 5, 6}; + const int32_t pad_data[] = {1, 1, 1, 1}; + int8_t output_data[20]; + const int8_t golden_data[] = {5, 4, 5, 6, 5, 2, 1, 2, 3, 2, + 5, 4, 5, 6, 5, 2, 1, 2, 3, 2}; + + tflite::testing::TestMirrorPad(input_shape, input_data, pad_shape, pad_data, + output_shape, golden_data, + kTfLiteMirrorPaddingReflect, output_data); +} + +TF_LITE_MICRO_TEST(PadBothSides_Symmetric_Whole) { + int input_shape[] = {2, 2, 3}; + int pad_shape[] = {2, 2, 2}; + int output_shape[] = {2, 6, 9}; + + const int8_t input_data[] = {1, 2, 3, 4, 5, 6}; + const int32_t pad_data[] = {2, 2, 3, 3}; + int8_t output_data[54]; + const int8_t golden_data[] = {6, 5, 4, 4, 5, 6, 6, 5, 4, 3, 2, 1, 1, 2, + 3, 3, 2, 1, 3, 2, 1, 1, 2, 3, 3, 2, 1, 6, + 5, 4, 4, 5, 6, 6, 5, 4, 6, 5, 4, 4, 5, 6, + 6, 5, 4, 3, 2, 1, 1, 2, 3, 3, 2, 1}; + + tflite::testing::TestMirrorPad(input_shape, input_data, pad_shape, pad_data, + output_shape, golden_data, + kTfLiteMirrorPaddingSymmetric, output_data); +} + +TF_LITE_MICRO_TEST(PadBothSides_Reflect_Whole) { + int input_shape[] = {2, 2, 3}; + int pad_shape[] = {2, 2, 2}; + int output_shape[] = {2, 4, 7}; + + const int8_t input_data[] = {1, 2, 3, 4, 5, 6}; + const int32_t pad_data[] = {1, 1, 2, 2}; + int8_t output_data[28]; + const int8_t golden_data[] = {6, 5, 4, 5, 6, 5, 4, 3, 2, 1, 2, 3, 2, 1, + 6, 5, 4, 5, 6, 5, 4, 3, 2, 1, 2, 3, 2, 1}; + + tflite::testing::TestMirrorPad(input_shape, input_data, pad_shape, pad_data, + output_shape, golden_data, + kTfLiteMirrorPaddingReflect, output_data); +} + +TF_LITE_MICRO_TEST(Pad_Symmetric) { + int input_shape[] = {2, 2, 3}; + int pad_shape[] = {2, 2, 2}; + int output_shape[] = {2, 4, 7}; + + const int8_t input_data[] = {1, 2, 3, 4, 5, 6}; + const int32_t pad_data[] = {1, 1, 2, 2}; + int8_t output_data[28]; + const int8_t golden_data[] = {2, 1, 1, 2, 3, 3, 2, 2, 1, 1, 2, 3, 3, 2, + 5, 4, 4, 5, 6, 6, 5, 5, 4, 4, 5, 6, 6, 5}; + + tflite::testing::TestMirrorPad(input_shape, input_data, pad_shape, pad_data, + output_shape, golden_data, + kTfLiteMirrorPaddingSymmetric, output_data); +} + +TF_LITE_MICRO_TEST(Pad_1D_Reflect) { + int input_shape[] = {1, 3}; + int pad_shape[] = {2, 1, 2}; + int output_shape[] = {1, 5}; + + const int8_t input_data[] = {1, 2, 3, 4, 5, 6}; + const int32_t pad_data[] = {0, 2}; + int8_t output_data[5]; + const int8_t golden_data[] = {1, 2, 3, 2, 1}; + + tflite::testing::TestMirrorPad(input_shape, input_data, pad_shape, pad_data, + output_shape, golden_data, + kTfLiteMirrorPaddingReflect, output_data); +} + +TF_LITE_MICRO_TEST(Pad_1D_Symmetric) { + int input_shape[] = {1, 3}; + int pad_shape[] = {2, 1, 2}; + int output_shape[] = {1, 5}; + + const int8_t input_data[] = {1, 2, 3, 4, 5, 6}; + const int32_t pad_data[] = {0, 2}; + int8_t output_data[5]; + const int8_t golden_data[] = {1, 2, 3, 3, 2}; + + tflite::testing::TestMirrorPad(input_shape, input_data, pad_shape, pad_data, + output_shape, golden_data, + kTfLiteMirrorPaddingSymmetric, output_data); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/mul.cc b/tensorflow/lite/micro/kernels/mul.cc new file mode 100644 index 0000000..d9473d4 --- /dev/null +++ b/tensorflow/lite/micro/kernels/mul.cc @@ -0,0 +1,68 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/kernels/mul.h" + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/mul.h" +#include "tensorflow/lite/kernels/internal/reference/mul.h" +#include "tensorflow/lite/kernels/internal/reference/process_broadcast_shapes.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/memory_helpers.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { + +TfLiteStatus MulEval(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->builtin_data != nullptr); + auto* params = reinterpret_cast(node->builtin_data); + + TFLITE_DCHECK(node->user_data != nullptr); + const OpDataMul* data = static_cast(node->user_data); + + const TfLiteEvalTensor* input1 = + tflite::micro::GetEvalInput(context, node, kMulInput1Tensor); + const TfLiteEvalTensor* input2 = + tflite::micro::GetEvalInput(context, node, kMulInput2Tensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kMulOutputTensor); + + switch (input1->type) { + case kTfLiteInt8: + case kTfLiteInt16: + case kTfLiteInt32: + EvalMulQuantizedReference(context, node, data, input1, input2, output); + break; + case kTfLiteFloat32: + EvalMulFloatReference(context, node, params, data, input1, input2, + output); + break; + default: + MicroPrintf("Type %s (%d) not supported.", + TfLiteTypeGetName(input1->type), input1->type); + return kTfLiteError; + } + + return kTfLiteOk; +} + +TFLMRegistration Register_MUL() { + return tflite::micro::RegisterOp(MulInit, MulPrepare, MulEval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/mul.h b/tensorflow/lite/micro/kernels/mul.h new file mode 100644 index 0000000..32407ed --- /dev/null +++ b/tensorflow/lite/micro/kernels/mul.h @@ -0,0 +1,74 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_MUL_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_MUL_H_ + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/micro/micro_common.h" + +namespace tflite { + +extern const int kMulInput1Tensor; +extern const int kMulInput2Tensor; +extern const int kMulOutputTensor; + +struct OpDataMul { + int32_t input1_zero_point; + int32_t input2_zero_point; + + int32_t output_activation_min; + int32_t output_activation_max; + int32_t output_zero_point; + int32_t output_multiplier; + int output_shift; + + float output_activation_min_f32; + float output_activation_max_f32; +}; + +void* MulInit(TfLiteContext* context, const char* buffer, size_t length); + +TfLiteStatus CalculateOpDataMul(TfLiteContext* context, TfLiteNode* node, + TfLiteMulParams* params, OpDataMul* data); + +TfLiteStatus MulPrepare(TfLiteContext* context, TfLiteNode* node); + +TfLiteStatus EvalMulQuantizedReference(TfLiteContext* context, TfLiteNode* node, + const OpDataMul* data, + const TfLiteEvalTensor* input1, + const TfLiteEvalTensor* input2, + TfLiteEvalTensor* output); + +void EvalMulFloatReference(TfLiteContext* context, TfLiteNode* node, + TfLiteMulParams* params, const OpDataMul* data, + const TfLiteEvalTensor* input1, + const TfLiteEvalTensor* input2, + TfLiteEvalTensor* output); + +// Generic must define registration function. +TFLMRegistration Register_MUL(); + +#if defined(CMSIS_NN) +TFLMRegistration Register_MUL_INT8(); +#else +// Fallback registration +inline TFLMRegistration Register_MUL_INT8() { return Register_MUL(); } +#endif +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_MUL_H_ diff --git a/tensorflow/lite/micro/kernels/mul_common.cc b/tensorflow/lite/micro/kernels/mul_common.cc new file mode 100644 index 0000000..45e7c1e --- /dev/null +++ b/tensorflow/lite/micro/kernels/mul_common.cc @@ -0,0 +1,213 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/mul.h" +#include "tensorflow/lite/kernels/internal/reference/mul.h" +#include "tensorflow/lite/kernels/internal/reference/process_broadcast_shapes.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/mul.h" +#include "tensorflow/lite/micro/memory_helpers.h" + +namespace tflite { + +const int kMulInput1Tensor = 0; +const int kMulInput2Tensor = 1; +const int kMulOutputTensor = 0; + +void* MulInit(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(OpDataMul)); +} + +TfLiteStatus CalculateOpDataMul(TfLiteContext* context, TfLiteNode* node, + TfLiteMulParams* params, OpDataMul* data) { + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* input1 = + micro_context->AllocateTempInputTensor(node, kMulInput1Tensor); + TF_LITE_ENSURE(context, input1 != nullptr); + TfLiteTensor* input2 = + micro_context->AllocateTempInputTensor(node, kMulInput2Tensor); + TF_LITE_ENSURE(context, input2 != nullptr); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kMulOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + + TF_LITE_ENSURE_EQ(context, NumInputs(node), 2); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + + TF_LITE_ENSURE_TYPES_EQ(context, input1->type, input2->type); + + if (output->type == kTfLiteInt8 || output->type == kTfLiteInt16) { + TF_LITE_ENSURE_STATUS(CalculateActivationRangeQuantized( + context, params->activation, output, &data->output_activation_min, + &data->output_activation_max)); + + double real_multiplier = static_cast(input1->params.scale) * + static_cast(input2->params.scale) / + static_cast(output->params.scale); + QuantizeMultiplier(real_multiplier, &data->output_multiplier, + &data->output_shift); + + data->input1_zero_point = input1->params.zero_point; + data->input2_zero_point = input2->params.zero_point; + data->output_zero_point = output->params.zero_point; + + if (input1->type == kTfLiteInt16) { + TF_LITE_ENSURE_EQ(context, data->input1_zero_point, 0); + TF_LITE_ENSURE_EQ(context, data->input2_zero_point, 0); + TF_LITE_ENSURE_EQ(context, data->output_zero_point, 0); + } + } else if (output->type == kTfLiteInt32) { + CalculateActivationRange(params->activation, &data->output_activation_min, + &data->output_activation_max); + } else { + CalculateActivationRange(params->activation, + &data->output_activation_min_f32, + &data->output_activation_max_f32); + } + + micro_context->DeallocateTempTfLiteTensor(input1); + micro_context->DeallocateTempTfLiteTensor(input2); + micro_context->DeallocateTempTfLiteTensor(output); + return kTfLiteOk; +} + +TfLiteStatus MulPrepare(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->builtin_data != nullptr); + auto* params = reinterpret_cast(node->builtin_data); + + TFLITE_DCHECK(node->user_data != nullptr); + OpDataMul* data = static_cast(node->user_data); + + return CalculateOpDataMul(context, node, params, data); +} + +TfLiteStatus EvalMulQuantizedReference(TfLiteContext* context, TfLiteNode* node, + const OpDataMul* data, + const TfLiteEvalTensor* input1, + const TfLiteEvalTensor* input2, + TfLiteEvalTensor* output) { + tflite::ArithmeticParams op_params = {}; + op_params.quantized_activation_min = data->output_activation_min; + op_params.quantized_activation_max = data->output_activation_max; + op_params.float_activation_max = data->output_activation_max_f32; + op_params.input1_offset = -data->input1_zero_point; + op_params.input2_offset = -data->input2_zero_point; + op_params.output_offset = data->output_zero_point; + op_params.output_multiplier = data->output_multiplier; + op_params.output_shift = data->output_shift; + + bool need_broadcast = reference_ops::ProcessBroadcastShapes( + tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorShape(input2), &op_params); + + if (input1->type == kTfLiteInt8) { + if (need_broadcast) { + reference_integer_ops::BroadcastMul4DSlow( + op_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } else { + reference_integer_ops::Mul(op_params, + tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } + } else if (input1->type == kTfLiteInt32) { + if (need_broadcast) { + reference_ops::BroadcastMul4DSlow( + op_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } else { + reference_ops::Mul(op_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } + } else if (input1->type == kTfLiteInt16) { + TF_LITE_ENSURE_EQ(context, op_params.input1_offset, 0); + TF_LITE_ENSURE_EQ(context, op_params.input2_offset, 0); + TF_LITE_ENSURE_EQ(context, op_params.output_offset, 0); + + if (need_broadcast) { + reference_integer_ops::BroadcastMul4DSlow( + op_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } else { + reference_integer_ops::Mul(op_params, + tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } + } + return kTfLiteOk; +} + +void EvalMulFloatReference(TfLiteContext* context, TfLiteNode* node, + TfLiteMulParams* params, const OpDataMul* data, + const TfLiteEvalTensor* input1, + const TfLiteEvalTensor* input2, + TfLiteEvalTensor* output) { + tflite::ArithmeticParams op_params = {}; + op_params.float_activation_min = data->output_activation_min_f32; + op_params.float_activation_max = data->output_activation_max_f32; + + bool need_broadcast = reference_ops::ProcessBroadcastShapes( + tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorShape(input2), &op_params); + + if (need_broadcast) { + reference_ops::BroadcastMul4DSlow( + op_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } else { + reference_ops::Mul(op_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/mul_test.cc b/tensorflow/lite/micro/kernels/mul_test.cc new file mode 100644 index 0000000..2234a1f --- /dev/null +++ b/tensorflow/lite/micro/kernels/mul_test.cc @@ -0,0 +1,252 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +const int flat_size_simple = 4; +const float scale_simple = 0.01; +int dims_simple[] = {4, 1, 2, 2, 1}; +const float input1_simple[] = {-0.8, 0.2, 0.9, 0.7}; +const float input2_simple[] = {0.6, 0.4, 0.9, 0.8}; +const float golden_simple[] = {-0.48, 0.08, 0.81, 0.56}; +const float golden_simple_relu[] = {0.0, 0.08, 0.81, 0.56}; + +const int flat_size_broadcast = 6; +const float input_scale_broadcast = 0.05f; +const float output_scale_broadcast = 0.01f; +int dims_broadcast[] = {4, 1, 3, 1, 2}; +int dims_scalar_broadcast[] = {1, 1}; +const float input1_broadcast[] = {-2.0, 0.2, 0.7, 0.8, 1.1, 2.0}; +const float input2_broadcast[] = {0.1}; +const float golden_broadcast[] = {-0.2, 0.02, 0.07, 0.08, 0.11, 0.2}; +const float golden_broadcast_relu[] = {0, 0.02, 0.07, 0.08, 0.11, 0.2}; + +template +void ValidateMulGoldens(TfLiteTensor* tensors, int tensors_size, + TfLiteFusedActivation activation, const T* golden, + int output_len, float tolerance, T* output) { + TfLiteMulParams builtin_data = { + .activation = activation, + }; + + int inputs_array_data[] = {2, 0, 1}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 2}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = tflite::Register_MUL(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, + reinterpret_cast(&builtin_data)); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + for (int i = 0; i < output_len; i++) { + TF_LITE_MICRO_EXPECT_NEAR(golden[i], output[i], tolerance); + } +} + +void TestMulFloat(int* input1_dims_data, const float* input1_data, + int* input2_dims_data, const float* input2_data, + int* output_dims_data, const float* golden, + float* output_data, TfLiteFusedActivation activation) { + TfLiteIntArray* input1_dims = IntArrayFromInts(input1_dims_data); + TfLiteIntArray* input2_dims = IntArrayFromInts(input2_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_dims_count = ElementCount(*output_dims); + + constexpr int inputs_size = 2; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(input1_data, input1_dims), + CreateTensor(input2_data, input2_dims), + CreateTensor(output_data, output_dims), + }; + + ValidateMulGoldens(tensors, tensors_size, activation, golden, + output_dims_count, 1e-5, output_data); +} + +template +void TestMulQuantized(int* input1_dims_data, const float* input1_data, + T* input1_quantized, int* input2_dims_data, + const float* input2_data, T* input2_quantized, + const float input_scale, const int input_zero_point, + int* output_dims_data, const float* golden, + T* golden_quantized, const float output_scale, + const int output_zero_point, T* output_data, + TfLiteFusedActivation activation) { + TfLiteIntArray* input1_dims = IntArrayFromInts(input1_dims_data); + TfLiteIntArray* input2_dims = IntArrayFromInts(input2_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_dims_count = ElementCount(*output_dims); + + constexpr int inputs_size = 2; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateQuantizedTensor(input1_data, input1_quantized, input1_dims, + input_scale, input_zero_point), + CreateQuantizedTensor(input2_data, input2_quantized, input2_dims, + input_scale, input_zero_point), + CreateQuantizedTensor(output_data, output_dims, output_scale, + output_zero_point)}; + + Quantize(golden, golden_quantized, output_dims_count, output_scale, + output_zero_point); + + ValidateMulGoldens(tensors, tensors_size, activation, golden_quantized, + output_dims_count, 1.0f, output_data); +} + +} // namespace + +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(SimpleFloatNoActivationShouldMatchGolden) { + float output_data[tflite::testing::flat_size_simple]; + + tflite::testing::TestMulFloat( + tflite::testing::dims_simple, tflite::testing::input1_simple, + tflite::testing::dims_simple, tflite::testing::input2_simple, + tflite::testing::dims_simple, tflite::testing::golden_simple, output_data, + kTfLiteActNone); +} + +TF_LITE_MICRO_TEST(SimpleFloatReluShouldMatchGolden) { + float output_data[tflite::testing::flat_size_simple]; + + tflite::testing::TestMulFloat( + tflite::testing::dims_simple, tflite::testing::input1_simple, + tflite::testing::dims_simple, tflite::testing::input2_simple, + tflite::testing::dims_simple, tflite::testing::golden_simple_relu, + output_data, kTfLiteActRelu); +} + +TF_LITE_MICRO_TEST(SimpleInt8NoActivationShouldMatchGolden) { + int8_t input1_quantized[tflite::testing::flat_size_simple]; + int8_t input2_quantized[tflite::testing::flat_size_simple]; + int8_t golden_quantized[tflite::testing::flat_size_simple]; + int8_t output_data[tflite::testing::flat_size_simple]; + + tflite::testing::TestMulQuantized( + tflite::testing::dims_simple, tflite::testing::input1_simple, + input1_quantized, tflite::testing::dims_simple, + tflite::testing::input2_simple, input2_quantized, + tflite::testing::scale_simple, 0, tflite::testing::dims_simple, + tflite::testing::golden_simple, golden_quantized, + tflite::testing::scale_simple, 0, output_data, kTfLiteActNone); +} + +TF_LITE_MICRO_TEST(SimpleInt16NoActivationShouldMatchGolden) { + int16_t input1_quantized[tflite::testing::flat_size_simple]; + int16_t input2_quantized[tflite::testing::flat_size_simple]; + int16_t golden_quantized[tflite::testing::flat_size_simple]; + int16_t output_data[tflite::testing::flat_size_simple]; + + tflite::testing::TestMulQuantized( + tflite::testing::dims_simple, tflite::testing::input1_simple, + input1_quantized, tflite::testing::dims_simple, + tflite::testing::input2_simple, input2_quantized, + tflite::testing::scale_simple, 0, tflite::testing::dims_simple, + tflite::testing::golden_simple, golden_quantized, + tflite::testing::scale_simple, 0, output_data, kTfLiteActNone); +} + +TF_LITE_MICRO_TEST(BroadcastFloatNoActivationShouldMatchGolden) { + float output_data[tflite::testing::flat_size_broadcast]; + + tflite::testing::TestMulFloat( + tflite::testing::dims_broadcast, tflite::testing::input1_broadcast, + tflite::testing::dims_scalar_broadcast, tflite::testing::input2_broadcast, + tflite::testing::dims_broadcast, tflite::testing::golden_broadcast, + output_data, kTfLiteActNone); +} + +TF_LITE_MICRO_TEST(BroadcastFloatReluShouldMatchGolden) { + float output_data[tflite::testing::flat_size_broadcast]; + + tflite::testing::TestMulFloat( + tflite::testing::dims_broadcast, tflite::testing::input1_broadcast, + tflite::testing::dims_scalar_broadcast, tflite::testing::input2_broadcast, + tflite::testing::dims_broadcast, tflite::testing::golden_broadcast_relu, + output_data, kTfLiteActRelu); +} + +TF_LITE_MICRO_TEST(BroadcastInt8NoActivationShouldMatchGolden) { + int8_t input1_quantized[tflite::testing::flat_size_broadcast]; + int8_t input2_quantized[tflite::testing::flat_size_broadcast]; + int8_t golden_quantized[tflite::testing::flat_size_broadcast]; + int8_t output_data[tflite::testing::flat_size_broadcast]; + + tflite::testing::TestMulQuantized( + tflite::testing::dims_broadcast, tflite::testing::input1_broadcast, + input1_quantized, tflite::testing::dims_scalar_broadcast, + tflite::testing::input2_broadcast, input2_quantized, + tflite::testing::input_scale_broadcast, 0, + tflite::testing::dims_broadcast, tflite::testing::golden_broadcast, + golden_quantized, tflite::testing::output_scale_broadcast, 0, output_data, + kTfLiteActNone); +} + +TF_LITE_MICRO_TEST(BroadcastInt16NoActivationShouldMatchGolden) { + int16_t input1_quantized[tflite::testing::flat_size_broadcast]; + int16_t input2_quantized[tflite::testing::flat_size_broadcast]; + int16_t golden_quantized[tflite::testing::flat_size_broadcast]; + int16_t output_data[tflite::testing::flat_size_broadcast]; + + tflite::testing::TestMulQuantized( + tflite::testing::dims_broadcast, tflite::testing::input1_broadcast, + input1_quantized, tflite::testing::dims_scalar_broadcast, + tflite::testing::input2_broadcast, input2_quantized, + tflite::testing::input_scale_broadcast, 0, + tflite::testing::dims_broadcast, tflite::testing::golden_broadcast, + golden_quantized, tflite::testing::output_scale_broadcast, 0, output_data, + kTfLiteActNone); +} + +TF_LITE_MICRO_TEST(SimpleInt32NoActivationShouldMatchGolden) { + int32_t input1_quantized[tflite::testing::flat_size_simple]; + int32_t input2_quantized[tflite::testing::flat_size_simple]; + int32_t golden_quantized[tflite::testing::flat_size_simple]; + int32_t output_data[tflite::testing::flat_size_simple]; + + // Int32 mul ignores quantization parameters with TFLite and TFLM. Use + // TestMulQuantized method to convert float arrays to int32 arrays, but use + // quantization parameters of 0.01 for both inputs and 0.0001 for output, + // since input scales are multiplied together to get output scale when there + // is no rescaling inside the op. + tflite::testing::TestMulQuantized( + tflite::testing::dims_simple, tflite::testing::input1_simple, + input1_quantized, tflite::testing::dims_simple, + tflite::testing::input2_simple, input2_quantized, 0.01, 0, + tflite::testing::dims_simple, tflite::testing::golden_simple, + golden_quantized, 0.0001, 0, output_data, kTfLiteActNone); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/neg.cc b/tensorflow/lite/micro/kernels/neg.cc new file mode 100644 index 0000000..c80a809 --- /dev/null +++ b/tensorflow/lite/micro/kernels/neg.cc @@ -0,0 +1,57 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/reference/neg.h" + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { + +namespace { + +constexpr int kInputTensor = 0; +constexpr int kOutputTensor = 0; + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + switch (input->type) { + // TODO(wangtz): handle for kTfLiteInt8 + case kTfLiteFloat32: + reference_ops::Negate(tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + default: + MicroPrintf("Type %s (%d) not supported.", TfLiteTypeGetName(input->type), + input->type); + return kTfLiteError; + } + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_NEG() { + return tflite::micro::RegisterOp(nullptr, nullptr, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/neg_test.cc b/tensorflow/lite/micro/kernels/neg_test.cc new file mode 100644 index 0000000..a1d1685 --- /dev/null +++ b/tensorflow/lite/micro/kernels/neg_test.cc @@ -0,0 +1,83 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +void TestNegFloat(int* input_dims_data, const float* input_data, + const float* expected_output_data, int* output_dims_data, + float* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_dims_count = ElementCount(*output_dims); + constexpr int inputs_size = 1; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(input_data, input_dims), + CreateTensor(output_data, output_dims), + }; + + int inputs_array_data[] = {1, 0}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 1}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = Register_NEG(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, + /*builtin_data=*/nullptr); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + TF_LITE_MICRO_EXPECT_EQ(expected_output_data[0], output_data[0]); + for (int i = 0; i < output_dims_count; ++i) { + TF_LITE_MICRO_EXPECT_EQ(expected_output_data[i], output_data[i]); + } +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(NegOpSingleFloat) { + int dims[] = {1, 2}; + const float input_data[] = {8.5, 0.0}; + const float golden[] = {-8.5, 0.0}; + float output_data[2]; + + tflite::testing::TestNegFloat(dims, input_data, golden, dims, output_data); +} + +TF_LITE_MICRO_TEST(NegOpFloat) { + int dims[] = {2, 2, 3}; + const float input_data[] = {-2.0f, -1.0f, 0.f, 1.0f, 2.0f, 3.0f}; + const float golden[] = {2.0f, 1.0f, -0.f, -1.0f, -2.0f, -3.0f}; + float output_data[6]; + + tflite::testing::TestNegFloat(dims, input_data, golden, dims, output_data); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/pack.cc b/tensorflow/lite/micro/kernels/pack.cc new file mode 100644 index 0000000..7b4aeef --- /dev/null +++ b/tensorflow/lite/micro/kernels/pack.cc @@ -0,0 +1,112 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { + +namespace { + +constexpr int kOutputTensor = 0; + +template +TfLiteStatus PackImpl(TfLiteContext* context, TfLiteNode* node, + TfLiteEvalTensor* output, int values_count, int axis) { + const TfLiteEvalTensor* input0 = + tflite::micro::GetEvalInput(context, node, 0); + + const int dimensions = output->dims->size; + const TfLiteIntArray* input_dims = input0->dims; + const TfLiteIntArray* output_dims = output->dims; + + if (axis < 0) { + axis += dimensions; + } + + int outer_size = 1; + for (int i = 0; i < axis; ++i) { + outer_size *= output_dims->data[i]; + } + int copy_size = 1; + for (int i = axis + 1; i < dimensions; ++i) { + copy_size *= output_dims->data[i]; + } + int input_size = 1; + for (int i = 0; i < input_dims->size; ++i) { + input_size *= input_dims->data[i]; + } + TFLITE_DCHECK_EQ(input_size, copy_size * outer_size); + + T* output_data = tflite::micro::GetTensorData(output); + + for (int i = 0; i < values_count; ++i) { + const TfLiteEvalTensor* t = tflite::micro::GetEvalInput(context, node, i); + const T* input_data = tflite::micro::GetTensorData(t); + for (int k = 0; k < outer_size; ++k) { + const T* input_ptr = input_data + copy_size * k; + int loc = k * values_count * copy_size + i * copy_size; + T* output_ptr = output_data + loc; + for (int j = 0; j < copy_size; ++j) output_ptr[j] = input_ptr[j]; + } + } + + return kTfLiteOk; +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + const TfLitePackParams* data = + reinterpret_cast(node->builtin_data); + + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + + switch (output->type) { + case kTfLiteFloat32: { + return PackImpl(context, node, output, data->values_count, + data->axis); + } + case kTfLiteInt8: { + return PackImpl(context, node, output, data->values_count, + data->axis); + } + case kTfLiteInt32: { + return PackImpl(context, node, output, data->values_count, + data->axis); + } + case kTfLiteInt64: { + return PackImpl(context, node, output, data->values_count, + data->axis); + } + default: { + MicroPrintf("Type '%s' is not supported by pack.", + TfLiteTypeGetName(output->type)); + return kTfLiteError; + } + } + + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_PACK() { + return tflite::micro::RegisterOp(nullptr, nullptr, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/pack_test.cc b/tensorflow/lite/micro/kernels/pack_test.cc new file mode 100644 index 0000000..0942217 --- /dev/null +++ b/tensorflow/lite/micro/kernels/pack_test.cc @@ -0,0 +1,275 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/debug_log.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { + +template +void ValidatePackGoldens(TfLiteTensor* tensors, int tensors_size, + TfLitePackParams params, TfLiteIntArray* inputs_array, + TfLiteIntArray* outputs_array, const T* golden, + int output_len, float tolerance, T* output) { + // Place a unique value in the uninitialized output buffer. + for (int i = 0; i < output_len; ++i) { + output[i] = 23; + } + + const TFLMRegistration registration = Register_PACK(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, reinterpret_cast(¶ms)); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + for (int i = 0; i < output_len; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(golden[i], output[i], tolerance); + } +} + +void TestPackTwoInputsFloat(int* input1_dims_data, const float* input1_data, + int* input2_dims_data, const float* input2_data, + int axis, int* output_dims_data, + const float* expected_output_data, + float* output_data) { + TfLiteIntArray* input1_dims = IntArrayFromInts(input1_dims_data); + TfLiteIntArray* input2_dims = IntArrayFromInts(input2_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_dims_count = ElementCount(*output_dims); + + constexpr int input_size = 2; + constexpr int output_size = 1; + constexpr int tensors_size = input_size + output_size; + TfLiteTensor tensors[tensors_size] = {CreateTensor(input1_data, input1_dims), + CreateTensor(input2_data, input2_dims), + CreateTensor(output_data, output_dims)}; + + TfLitePackParams builtin_data = { + .values_count = 2, + .axis = axis, + }; + int inputs_array_data[] = {2, 0, 1}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 2}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + ValidatePackGoldens(tensors, tensors_size, builtin_data, inputs_array, + outputs_array, expected_output_data, output_dims_count, + 1e-5f, output_data); +} + +void TestPackThreeInputsFloat(int* input1_dims_data, const float* input1_data, + int* input2_dims_data, const float* input2_data, + int* input3_dims_data, const float* input3_data, + int axis, int* output_dims_data, + const float* expected_output_data, + float* output_data) { + TfLiteIntArray* input1_dims = IntArrayFromInts(input1_dims_data); + TfLiteIntArray* input2_dims = IntArrayFromInts(input2_dims_data); + TfLiteIntArray* input3_dims = IntArrayFromInts(input3_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_dims_count = ElementCount(*output_dims); + + constexpr int input_size = 3; + constexpr int output_size = 1; + constexpr int tensors_size = input_size + output_size; + TfLiteTensor tensors[tensors_size] = {CreateTensor(input1_data, input1_dims), + CreateTensor(input2_data, input2_dims), + CreateTensor(input3_data, input3_dims), + CreateTensor(output_data, output_dims)}; + + TfLitePackParams builtin_data = { + .values_count = 3, + .axis = axis, + }; + int inputs_array_data[] = {3, 0, 1, 2}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 3}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + ValidatePackGoldens(tensors, tensors_size, builtin_data, inputs_array, + outputs_array, expected_output_data, output_dims_count, + 1e-5f, output_data); +} + +void TestPackTwoInputsQuantized( + int* input1_dims_data, const int8_t* input1_data, int* input2_dims_data, + const int8_t* input2_data, int axis, int* output_dims_data, + const int8_t* expected_output_data, int8_t* output_data) { + TfLiteIntArray* input1_dims = IntArrayFromInts(input1_dims_data); + TfLiteIntArray* input2_dims = IntArrayFromInts(input2_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_dims_count = ElementCount(*output_dims); + + constexpr int input_size = 2; + constexpr int output_size = 1; + constexpr int tensors_size = input_size + output_size; + TfLiteTensor tensors[tensors_size] = { + // CreateQuantizedTensor needs scale/zero_point values as input, but these + // values don't matter as to the functionality of PACK, so just set as 1.0 + // and 128. + CreateQuantizedTensor(input1_data, input1_dims, 1.0, 128), + CreateQuantizedTensor(input2_data, input2_dims, 1.0, 128), + CreateQuantizedTensor(output_data, output_dims, 1.0, 128)}; + + TfLitePackParams builtin_data = { + .values_count = 2, + .axis = axis, + }; + int inputs_array_data[] = {2, 0, 1}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 2}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + ValidatePackGoldens(tensors, tensors_size, builtin_data, inputs_array, + outputs_array, expected_output_data, output_dims_count, + 1e-5f, output_data); +} + +void TestPackTwoInputsQuantized32( + int* input1_dims_data, const int32_t* input1_data, int* input2_dims_data, + const int32_t* input2_data, int axis, int* output_dims_data, + const int32_t* expected_output_data, int32_t* output_data) { + TfLiteIntArray* input1_dims = IntArrayFromInts(input1_dims_data); + TfLiteIntArray* input2_dims = IntArrayFromInts(input2_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_dims_count = ElementCount(*output_dims); + + constexpr int input_size = 2; + constexpr int output_size = 1; + constexpr int tensors_size = input_size + output_size; + TfLiteTensor tensors[tensors_size] = {CreateTensor(input1_data, input1_dims), + CreateTensor(input2_data, input2_dims), + CreateTensor(output_data, output_dims)}; + + TfLitePackParams builtin_data = { + .values_count = 2, + .axis = axis, + }; + int inputs_array_data[] = {2, 0, 1}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 2}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + ValidatePackGoldens(tensors, tensors_size, builtin_data, inputs_array, + outputs_array, expected_output_data, output_dims_count, + 1e-5f, output_data); +} + +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(PackFloatThreeInputs) { + int input_shape[] = {1, 2}; + int output_shape[] = {2, 3, 2}; + const float input1_values[] = {1, 4}; + const float input2_values[] = {2, 5}; + const float input3_values[] = {3, 6}; + const float golden[] = {1, 4, 2, 5, 3, 6}; + const int axis = 0; + constexpr int output_dims_count = 6; + float output_data[output_dims_count]; + + tflite::testing::TestPackThreeInputsFloat( + input_shape, input1_values, input_shape, input2_values, input_shape, + input3_values, axis, output_shape, golden, output_data); +} + +TF_LITE_MICRO_TEST(PackFloatThreeInputsDifferentAxis) { + int input_shape[] = {1, 2}; + int output_shape[] = {2, 2, 3}; + const float input1_values[] = {1, 4}; + const float input2_values[] = {2, 5}; + const float input3_values[] = {3, 6}; + const float golden[] = {1, 2, 3, 4, 5, 6}; + const int axis = 1; + constexpr int output_dims_count = 6; + float output_data[output_dims_count]; + + tflite::testing::TestPackThreeInputsFloat( + input_shape, input1_values, input_shape, input2_values, input_shape, + input3_values, axis, output_shape, golden, output_data); +} + +TF_LITE_MICRO_TEST(PackFloatThreeInputsNegativeAxis) { + int input_shape[] = {1, 2}; + int output_shape[] = {2, 2, 3}; + const float input1_values[] = {1, 4}; + const float input2_values[] = {2, 5}; + const float input3_values[] = {3, 6}; + const float golden[] = {1, 2, 3, 4, 5, 6}; + const int axis = -1; + constexpr int output_dims_count = 6; + float output_data[output_dims_count]; + + tflite::testing::TestPackThreeInputsFloat( + input_shape, input1_values, input_shape, input2_values, input_shape, + input3_values, axis, output_shape, golden, output_data); +} + +TF_LITE_MICRO_TEST(PackFloatMultilDimensions) { + int input_shape[] = {2, 2, 3}; + int output_shape[] = {3, 2, 2, 3}; + const float input1_values[] = {1, 2, 3, 4, 5, 6}; + const float input2_values[] = {7, 8, 9, 10, 11, 12}; + const float golden[] = {1, 2, 3, 7, 8, 9, 4, 5, 6, 10, 11, 12}; + const int axis = 1; + constexpr int output_dims_count = 12; + float output_data[output_dims_count]; + + tflite::testing::TestPackTwoInputsFloat(input_shape, input1_values, + input_shape, input2_values, axis, + output_shape, golden, output_data); +} + +TF_LITE_MICRO_TEST(PackQuantizedMultilDimensions) { + int input_shape[] = {2, 2, 3}; + int output_shape[] = {3, 2, 2, 3}; + const int8_t input1_values[] = {1, 2, 3, 4, 5, 6}; + const int8_t input2_values[] = {7, 8, 9, 10, 11, 12}; + const int8_t golden[] = {1, 2, 3, 7, 8, 9, 4, 5, 6, 10, 11, 12}; + const int axis = 1; + constexpr int output_dims_count = 12; + int8_t output_data[output_dims_count]; + + tflite::testing::TestPackTwoInputsQuantized( + input_shape, input1_values, input_shape, input2_values, axis, + output_shape, golden, output_data); +} + +TF_LITE_MICRO_TEST(PackQuantized32MultilDimensions) { + int input_shape[] = {2, 2, 3}; + int output_shape[] = {3, 2, 2, 3}; + const int32_t input1_values[] = {1, 2, 3, 4, 5, 6}; + const int32_t input2_values[] = {7, 8, 9, 10, 11, 12}; + const int32_t golden[] = {1, 2, 3, 7, 8, 9, 4, 5, 6, 10, 11, 12}; + const int axis = 1; + constexpr int output_dims_count = 12; + int32_t output_data[output_dims_count]; + + tflite::testing::TestPackTwoInputsQuantized32( + input_shape, input1_values, input_shape, input2_values, axis, + output_shape, golden, output_data); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/pad.cc b/tensorflow/lite/micro/kernels/pad.cc new file mode 100644 index 0000000..f8d40ad --- /dev/null +++ b/tensorflow/lite/micro/kernels/pad.cc @@ -0,0 +1,229 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/kernels/internal/reference/pad.h" + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/types.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { +namespace { + +struct OpData { + PadParams params; + int32_t output_zero_point; +}; + +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(OpData)); +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + const OpData* data = static_cast(node->user_data); + + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, /*index=*/0); + const TfLiteEvalTensor* constant_values = + NumInputs(node) == 3 + ? tflite::micro::GetEvalInput(context, node, /*index=*/2) + : nullptr; + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, /*index=*/0); + + switch (input->type) { + case kTfLiteFloat32: { + float pad_value = + constant_values == nullptr + ? 0.f + : *tflite::micro::GetTensorData(constant_values); + if (data->params.resizing_category == ResizingCategory::kImageStyle) { + reference_ops::PadImageStyle( + data->params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), &pad_value, + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } else { + reference_ops::Pad(data->params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + &pad_value, tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } + } break; + case kTfLiteInt8: { + int8_t pad_value; + if (constant_values == nullptr) { + pad_value = static_cast(data->output_zero_point); + } else { + pad_value = *tflite::micro::GetTensorData(constant_values); + } + if (data->params.resizing_category == ResizingCategory::kImageStyle) { + reference_ops::PadImageStyle( + data->params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), &pad_value, + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } else { + reference_ops::Pad(data->params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + &pad_value, tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } + } break; + case kTfLiteInt16: { + int16_t pad_value = + constant_values == nullptr + ? 0 + : *tflite::micro::GetTensorData(constant_values); + reference_ops::Pad(data->params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + &pad_value, tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } break; + case kTfLiteInt32: { + int32_t pad_value = + constant_values == nullptr + ? 0 + : *tflite::micro::GetTensorData(constant_values); + reference_ops::Pad(data->params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + &pad_value, tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } break; + default: + + MicroPrintf("Type %s not currently supported by Pad.", + TfLiteTypeGetName(input->type)); + return kTfLiteError; + } + return kTfLiteOk; +} + +} // namespace + +TfLiteStatus PadPrepare(TfLiteContext* context, TfLiteNode* node) { + MicroContext* micro_context = GetMicroContext(context); + + TFLITE_DCHECK(node->user_data != nullptr); + OpData* data = static_cast(node->user_data); + + TF_LITE_ENSURE(context, NumInputs(node) == 2 || NumInputs(node) == 3); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, /*index=*/0); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* paddings = + micro_context->AllocateTempInputTensor(node, /*index=*/1); + TF_LITE_ENSURE(context, paddings != nullptr); + TfLiteTensor* constant_values = + NumInputs(node) == 3 + ? micro_context->AllocateTempInputTensor(node, /*index=*/2) + : nullptr; + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, /*index=*/0); + TF_LITE_ENSURE(context, output != nullptr); + + TF_LITE_ENSURE_EQ(context, input->type, output->type); + + // Current implementations rely on the inputs being <= 4D. + TF_LITE_ENSURE(context, NumDimensions(input) <= + reference_ops::PadKernelMaxDimensionCount()); + + if (constant_values != nullptr) { + TF_LITE_ENSURE_EQ(context, input->type, constant_values->type); + // Ensure that constant_values is a scalar. + TF_LITE_ENSURE_EQ(context, NumElements(constant_values), 1); + } + + // There must be a pair of paddings for each output dimension. + TF_LITE_ENSURE_EQ(context, GetTensorShape(paddings).FlatSize(), + output->dims->size * 2); + + // On Micro, outputs must be properly sized by the converter. + // NOTE: This data is only available because the paddings buffer is stored in + // the flatbuffer: + TF_LITE_ENSURE(context, IsConstantTensor(paddings)); + const int32_t* paddings_data = GetTensorData(paddings); + for (int i = 0; i < output->dims->size; i++) { + int output_dim = output->dims->data[i]; + int expected_dim = + input->dims->data[i] + paddings_data[i * 2] + paddings_data[i * 2 + 1]; + TF_LITE_ENSURE_EQ(context, output_dim, expected_dim); + } + + // Calculate OpData: + data->params.resizing_category = ResizingCategory::kGenericResize; + const int paddings_total = GetTensorShape(paddings).FlatSize(); + if (paddings_total == 8 && (paddings_data[0] == 0 && paddings_data[1] == 0) && + (paddings_data[6] == 0 && paddings_data[7] == 0)) { + data->params.resizing_category = ResizingCategory::kImageStyle; + } + + const int num_input_dimensions = NumDimensions(input); + data->params.left_padding_count = num_input_dimensions; + data->params.right_padding_count = num_input_dimensions; + + for (int idx = num_input_dimensions - 1; idx >= 0; --idx) { + data->params.left_padding[idx] = paddings_data[idx * 2]; + data->params.right_padding[idx] = paddings_data[idx * 2 + 1]; + } + + if (input->type == kTfLiteInt8) { + if (constant_values == nullptr) { + // Quantized Pad requires that 0 is represented in the quantized + // range. + TF_LITE_ENSURE(context, output->params.zero_point >= + std::numeric_limits::min()); + TF_LITE_ENSURE(context, output->params.zero_point <= + std::numeric_limits::max()); + } else { + // Quantized Pad requires that 'constant_values' is represented in the + // same quantized range as the input and output tensors. + TF_LITE_ENSURE_EQ(context, output->params.zero_point, + constant_values->params.zero_point); + TF_LITE_ENSURE_EQ(context, static_cast(output->params.scale), + static_cast(constant_values->params.scale)); + } + data->output_zero_point = output->params.zero_point; + } + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(paddings); + if (constant_values != nullptr) { + micro_context->DeallocateTempTfLiteTensor(constant_values); + } + micro_context->DeallocateTempTfLiteTensor(output); + + return kTfLiteOk; +} + +TFLMRegistration Register_PAD() { + return tflite::micro::RegisterOp(Init, PadPrepare, Eval); +} + +// Also register Pad as PadV2. +TFLMRegistration Register_PADV2() { + return tflite::micro::RegisterOp(Init, PadPrepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/pad.h b/tensorflow/lite/micro/kernels/pad.h new file mode 100644 index 0000000..ad90890 --- /dev/null +++ b/tensorflow/lite/micro/kernels/pad.h @@ -0,0 +1,27 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_PAD_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_PAD_H_ + +#include "tensorflow/lite/c/common.h" + +namespace tflite { + +TfLiteStatus PadPrepare(TfLiteContext* context, TfLiteNode* node); + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_PAD_H_ diff --git a/tensorflow/lite/micro/kernels/pad_test.cc b/tensorflow/lite/micro/kernels/pad_test.cc new file mode 100644 index 0000000..21939c8 --- /dev/null +++ b/tensorflow/lite/micro/kernels/pad_test.cc @@ -0,0 +1,494 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +template +TfLiteStatus ValidatePadGoldens(TfLiteTensor* tensors, int tensors_size, + const T* golden, T* output_data, + int output_length) { + int inputs_array_data[] = {2, 0, 1}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 2}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = Register_PAD(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, + /*builtin_data=*/nullptr); + + // Prepare should catch dimension mismatches. + TfLiteStatus prepare_status = runner.InitAndPrepare(); + if (prepare_status != kTfLiteOk) { + return prepare_status; + } + + // Eval should catch quantization mismatches. + TfLiteStatus invoke_status = runner.Invoke(); + if (invoke_status != kTfLiteOk) { + return invoke_status; + } + + for (int i = 0; i < output_length; ++i) { + TF_LITE_MICRO_EXPECT_EQ(golden[i], output_data[i]); + } + return kTfLiteOk; +} + +template +TfLiteStatus ValidatePadV2Goldens(TfLiteTensor* tensors, int tensors_size, + const T* golden, T* output_data, + int output_length) { + int inputs_array_data[] = {3, 0, 1, 2}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 3}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = Register_PADV2(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, + /*builtin_data=*/nullptr); + + // Prepare should catch dimension mismatches. + TfLiteStatus prepare_status = runner.InitAndPrepare(); + if (prepare_status != kTfLiteOk) { + return prepare_status; + } + + // Eval should catch quantization mismatches. + TfLiteStatus invoke_status = runner.Invoke(); + if (invoke_status != kTfLiteOk) { + return invoke_status; + } + + for (int i = 0; i < output_length; ++i) { + TF_LITE_MICRO_EXPECT_EQ(golden[i], output_data[i]); + } + return kTfLiteOk; +} + +// output data and golden must be shaped correctly +void TestPadFloat(int* input_dims_data, const float* input_data, + int* pad_dims_data, const int32_t* pad_data, + int* output_dims_data, const float* golden, + float* output_data, + TfLiteStatus expected_status = kTfLiteOk) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* pad_dims = IntArrayFromInts(pad_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_dims_count = ElementCount(*output_dims); + constexpr int inputs_size = 2; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = {CreateTensor(input_data, input_dims), + CreateTensor(pad_data, pad_dims), + CreateTensor(output_data, output_dims)}; + + // Pad tensor must be constant. + tensors[1].allocation_type = kTfLiteMmapRo; + + TF_LITE_MICRO_EXPECT_EQ(expected_status, + ValidatePadGoldens(tensors, tensors_size, golden, + output_data, output_dims_count)); +} + +// output data and golden must be shaped correctly +void TestPadV2Float(int* input_dims_data, const float* input_data, + int* pad_dims_data, const int32_t* pad_data, + const float pad_value, int* output_dims_data, + const float* golden, float* output_data, + TfLiteStatus expected_status = kTfLiteOk) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* pad_dims = IntArrayFromInts(pad_dims_data); + int pad_value_dims_data[] = {1, 1}; // Only one padding value allowed. + TfLiteIntArray* pad_value_dims = IntArrayFromInts(pad_value_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_dims_count = ElementCount(*output_dims); + constexpr int inputs_size = 3; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(input_data, input_dims), CreateTensor(pad_data, pad_dims), + CreateTensor(&pad_value, pad_value_dims), + CreateTensor(output_data, output_dims)}; + + // Pad tensor must be constant. + tensors[1].allocation_type = kTfLiteMmapRo; + + TF_LITE_MICRO_EXPECT_EQ(expected_status, + ValidatePadV2Goldens(tensors, tensors_size, golden, + output_data, output_dims_count)); +} + +template +void TestPadQuantized(int* input_dims_data, const float* input_data, + T* input_quantized, float input_scale, + int input_zero_point, int* pad_dims_data, + const int32_t* pad_data, int* output_dims_data, + const float* golden, T* golden_quantized, + float output_scale, int output_zero_point, T* output_data, + TfLiteStatus expected_status = kTfLiteOk) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* pad_dims = IntArrayFromInts(pad_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_dims_count = ElementCount(*output_dims); + constexpr int inputs_size = 2; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateQuantizedTensor(input_data, input_quantized, input_dims, + input_scale, input_zero_point), + CreateTensor(pad_data, pad_dims), + CreateQuantizedTensor(output_data, output_dims, output_scale, + output_zero_point)}; + + // Pad tensor must be constant. + tensors[1].allocation_type = kTfLiteMmapRo; + + tflite::Quantize(golden, golden_quantized, output_dims_count, output_scale, + output_zero_point); + TF_LITE_MICRO_EXPECT_EQ( + expected_status, + ValidatePadGoldens(tensors, tensors_size, golden_quantized, output_data, + output_dims_count)); +} + +template +void TestPadV2Quantized( + int* input_dims_data, const float* input_data, T* input_quantized, + float input_scale, int input_zero_point, int* pad_dims_data, + const int32_t* pad_data, const float pad_value, const float pad_value_scale, + const int pad_value_zero_point, int* output_dims_data, const float* golden, + T* golden_quantized, float output_scale, int output_zero_point, + T* output_data, TfLiteStatus expected_status = kTfLiteOk) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* pad_dims = IntArrayFromInts(pad_dims_data); + int pad_value_dims_data[] = {1, 1}; // Only one padding value allowed. + TfLiteIntArray* pad_value_dims = IntArrayFromInts(pad_value_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + T pad_value_quantized; + const int output_dims_count = ElementCount(*output_dims); + constexpr int inputs_size = 3; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateQuantizedTensor(input_data, input_quantized, input_dims, + input_scale, input_zero_point), + CreateTensor(pad_data, pad_dims), + CreateQuantizedTensor(&pad_value, &pad_value_quantized, pad_value_dims, + pad_value_scale, pad_value_zero_point), + CreateQuantizedTensor(output_data, output_dims, output_scale, + output_zero_point)}; + + // Pad tensor must be constant. + tensors[1].allocation_type = kTfLiteMmapRo; + tensors[2].params.scale = pad_value_scale; + tensors[3].params.scale = output_scale; + + tflite::Quantize(golden, golden_quantized, output_dims_count, output_scale, + output_zero_point); + TF_LITE_MICRO_EXPECT_EQ( + expected_status, + ValidatePadV2Goldens(tensors, tensors_size, golden_quantized, output_data, + output_dims_count)); +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(Test2DFloat) { + int input_dims[] = {4, 1, 2, 2, 1}; + const float input_values[] = {1, 2, 3, 4}; + int pad_dims[] = {2, 4, 2}; + const int32_t pad_values[] = {1, 1, 0, 0, 1, 1, 0, 0}; + int output_dims[] = {4, 3, 2, 4, 1}; + const float golden[] = {0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 0, + 0, 3, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0}; + float output_data[24]; + + tflite::testing::TestPadFloat(input_dims, input_values, pad_dims, pad_values, + output_dims, golden, output_data); +} + +TF_LITE_MICRO_TEST(Test4DFloat) { + int input_dims[] = {4, 1, 1, 1, 1}; + const float input_values[] = {42}; + int pad_dims[] = {2, 4, 2}; + const int32_t pad_values[] = {1, 1, 1, 1, 1, 1, 1, 1}; + int output_dims[] = {4, 3, 3, 3, 3}; + const int kOutputLen = 81; // 3 * 3 * 3 * 3 + float golden[kOutputLen]; + for (int i = 0; i < kOutputLen; i++) { + golden[i] = 0; + } + golden[40] = 42; + float output_data[kOutputLen]; + + tflite::testing::TestPadFloat(input_dims, input_values, pad_dims, pad_values, + output_dims, const_cast(golden), + output_data); +} + +TF_LITE_MICRO_TEST(Test2DFloatV2) { + int input_dims[] = {4, 1, 2, 2, 1}; + const float input_values[] = {1, 2, 3, 4}; + int pad_dims[] = {2, 4, 2}; + const int32_t pad_values[] = {1, 1, 0, 0, 1, 1, 0, 0}; + const float pad_value = 42; + int output_dims[] = {4, 3, 2, 4, 1}; + const float golden[] = {42, 42, 42, 42, 42, 42, 42, 42, 42, 1, 2, 42, + 42, 3, 4, 42, 42, 42, 42, 42, 42, 42, 42, 42}; + float output_data[24]; + + tflite::testing::TestPadV2Float(input_dims, input_values, pad_dims, + pad_values, pad_value, output_dims, golden, + output_data); +} + +TF_LITE_MICRO_TEST(Test2DInt8) { + int input_dims[] = {4, 1, 2, 2, 1}; + const float input_values[] = {1, 2, 3, 4}; + const float input_scale = 1.0f; + const int input_zero_point = 0; + int pad_dims[] = {2, 4, 2}; + const int32_t pad_values[] = {1, 1, 0, 0, 1, 1, 0, 0}; + int output_dims[] = {4, 3, 2, 4, 1}; + const float golden[] = {0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 0, + 0, 3, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0}; + const float output_scale = 1.0f; + const int output_zero_point = 0; + int8_t output_data[24]; + int8_t input_quantized[4]; + int8_t golden_quantized[24]; + + tflite::testing::TestPadQuantized( + input_dims, input_values, input_quantized, input_scale, input_zero_point, + pad_dims, pad_values, output_dims, golden, golden_quantized, output_scale, + output_zero_point, output_data); +} + +TF_LITE_MICRO_TEST(Test2DInt16) { + int input_dims[] = {4, 1, 2, 2, 1}; + const float input_values[] = {1, 2, 3, 4}; + const float input_scale = 1.0f; + const int input_zero_point = 0; + int pad_dims[] = {2, 4, 2}; + const int32_t pad_values[] = {1, 1, 0, 0, 1, 1, 0, 0}; + int output_dims[] = {4, 3, 2, 4, 1}; + const float golden[] = {0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 0, + 0, 3, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0}; + const float output_scale = 1.0f; + const int output_zero_point = 0; + int16_t output_data[24]; + int16_t input_quantized[4]; + int16_t golden_quantized[24]; + + tflite::testing::TestPadQuantized( + input_dims, input_values, input_quantized, input_scale, input_zero_point, + pad_dims, pad_values, output_dims, golden, golden_quantized, output_scale, + output_zero_point, output_data); +} + +TF_LITE_MICRO_TEST(Test2DInt32) { + int input_dims[] = {4, 1, 2, 2, 1}; + const float input_values[] = {1, 2, 3, 4}; + const float input_scale = 1.0f; + const int input_zero_point = 0; + int pad_dims[] = {2, 4, 2}; + const int32_t pad_values[] = {1, 1, 0, 0, 1, 1, 0, 0}; + int output_dims[] = {4, 3, 2, 4, 1}; + const float golden[] = {0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 0, + 0, 3, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0}; + const float output_scale = 1.0f; + const int output_zero_point = 0; + int32_t output_data[24]; + int32_t input_quantized[4]; + int32_t golden_quantized[24]; + + tflite::testing::TestPadQuantized( + input_dims, input_values, input_quantized, input_scale, input_zero_point, + pad_dims, pad_values, output_dims, golden, golden_quantized, output_scale, + output_zero_point, output_data); +} + +TF_LITE_MICRO_TEST(Test2DInt8V2) { + int input_dims[] = {4, 1, 2, 2, 1}; + const float input_values[] = {1, 2, 3, 4}; + const float input_scale = 1.0f; + const int input_zero_point = 0; + int pad_dims[] = {2, 4, 2}; + const int32_t pad_values[] = {1, 1, 0, 0, 1, 1, 0, 0}; + const float pad_value = 42; + const float pad_value_scale = 1.0; + const float pad_value_zero_point = 0; + int output_dims[] = {4, 3, 2, 4, 1}; + const float golden[] = {42, 42, 42, 42, 42, 42, 42, 42, 42, 1, 2, 42, + 42, 3, 4, 42, 42, 42, 42, 42, 42, 42, 42, 42}; + const float output_scale = 1.0f; + const int output_zero_point = 0; + int8_t output_data[24]; + int8_t input_quantized[4]; + int8_t golden_quantized[24]; + + tflite::testing::TestPadV2Quantized( + input_dims, input_values, input_quantized, input_scale, input_zero_point, + pad_dims, pad_values, pad_value, pad_value_scale, pad_value_zero_point, + output_dims, golden, golden_quantized, output_scale, output_zero_point, + output_data); +} + +TF_LITE_MICRO_TEST(Test2DInt16V2) { + int input_dims[] = {4, 1, 2, 2, 1}; + const float input_values[] = {1, 2, 3, 4}; + const float input_scale = 1.0f; + const int input_zero_point = 0; + int pad_dims[] = {2, 4, 2}; + const int32_t pad_values[] = {1, 1, 0, 0, 1, 1, 0, 0}; + const float pad_value = 42; + const float pad_value_scale = 1.0; + const float pad_value_zero_point = 0; + int output_dims[] = {4, 3, 2, 4, 1}; + const float golden[] = {42, 42, 42, 42, 42, 42, 42, 42, 42, 1, 2, 42, + 42, 3, 4, 42, 42, 42, 42, 42, 42, 42, 42, 42}; + const float output_scale = 1.0f; + const int output_zero_point = 0; + int16_t output_data[24]; + int16_t input_quantized[4]; + int16_t golden_quantized[24]; + + tflite::testing::TestPadV2Quantized( + input_dims, input_values, input_quantized, input_scale, input_zero_point, + pad_dims, pad_values, pad_value, pad_value_scale, pad_value_zero_point, + output_dims, golden, golden_quantized, output_scale, output_zero_point, + output_data); +} + +TF_LITE_MICRO_TEST(Test2DInt32V2) { + int input_dims[] = {4, 1, 2, 2, 1}; + const float input_values[] = {1, 2, 3, 4}; + const float input_scale = 1.0f; + const int input_zero_point = 0; + int pad_dims[] = {2, 4, 2}; + const int32_t pad_values[] = {1, 1, 0, 0, 1, 1, 0, 0}; + const float pad_value = 42; + const float pad_value_scale = 1.0; + const float pad_value_zero_point = 0; + int output_dims[] = {4, 3, 2, 4, 1}; + const float golden[] = {42, 42, 42, 42, 42, 42, 42, 42, 42, 1, 2, 42, + 42, 3, 4, 42, 42, 42, 42, 42, 42, 42, 42, 42}; + const float output_scale = 1.0f; + const int output_zero_point = 0; + int32_t output_data[24]; + int32_t input_quantized[4]; + int32_t golden_quantized[24]; + + tflite::testing::TestPadV2Quantized( + input_dims, input_values, input_quantized, input_scale, input_zero_point, + pad_dims, pad_values, pad_value, pad_value_scale, pad_value_zero_point, + output_dims, golden, golden_quantized, output_scale, output_zero_point, + output_data); +} + +TF_LITE_MICRO_TEST(Test2DInt8V2ExpectFailurePadValueQuantizationMismatch) { + int input_dims[] = {4, 1, 2, 2, 1}; + const float input_values[] = {1, 2, 3, 4}; + const float input_scale = 1.0f; + const int input_zero_point = 0; + int pad_dims[] = {2, 4, 2}; + const int32_t pad_values[] = {1, 1, 0, 0, 1, 1, 0, 0}; + const float pad_value = 42; + // Causes failure since this is in a different quantization space than input. + const float pad_value_scale = .5; + const float pad_value_zero_point = 0; + int output_dims[] = {4, 3, 2, 4, 1}; + const float golden[] = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}; + const float output_scale = 1.0f; + const int output_zero_point = 0; + int8_t output_data[24] = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}; + int8_t input_quantized[4]; + int8_t golden_quantized[24]; + + tflite::testing::TestPadV2Quantized( + input_dims, input_values, input_quantized, input_scale, input_zero_point, + pad_dims, pad_values, pad_value, pad_value_scale, pad_value_zero_point, + output_dims, golden, golden_quantized, output_scale, output_zero_point, + output_data, kTfLiteError); +} + +TF_LITE_MICRO_TEST(Test2DInt8V2ExpectFailurePadValueQuantizationMismatch) { + int input_dims[] = {4, 1, 2, 2, 1}; + const float input_values[] = {1, 2, 3, 4}; + const float input_scale = 1.0f; + const int input_zero_point = 0; + int pad_dims[] = {2, 4, 2}; + const int32_t pad_values[] = {1, 1, 0, 0, 1, 1, 0, 0}; + const float pad_value = 42; + // Causes failure since this is in a different quantization space than input. + const float pad_value_scale = .5; + const float pad_value_zero_point = 0; + int output_dims[] = {4, 3, 2, 4, 1}; + const float golden[] = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}; + const float output_scale = 1.0f; + const int output_zero_point = 0; + int8_t output_data[24] = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}; + int8_t input_quantized[4]; + int8_t golden_quantized[24]; + + tflite::testing::TestPadV2Quantized( + input_dims, input_values, input_quantized, input_scale, input_zero_point, + pad_dims, pad_values, pad_value, pad_value_scale, pad_value_zero_point, + output_dims, golden, golden_quantized, output_scale, output_zero_point, + output_data, kTfLiteError); +} + +TF_LITE_MICRO_TEST(Test2DInt8ExpectFailureQuantizationRangeExcludesZero) { + int input_dims[] = {4, 1, 2, 2, 1}; + const float input_values[] = {1, 2, 3, 4}; + const float input_scale = 1.0f; + const int input_zero_point = 0; + int pad_dims[] = {2, 4, 2}; + const int32_t pad_values[] = {1, 1, 0, 0, 1, 1, 0, 0}; + int output_dims[] = {4, 3, 2, 4, 1}; + const float golden[] = {42, 42, 42, 42, 42, 42, 42, 42, 42, 1, 2, 42, + 42, 3, 4, 42, 42, 42, 42, 42, 42, 42, 42, 42}; + // Causes failure since this quantization zero point excludes zero. + const float output_scale = 1.0f; + const int output_zero_point = 129; + int8_t output_data[24]; + int8_t input_quantized[4]; + int8_t golden_quantized[24]; + + tflite::testing::TestPadQuantized( + input_dims, input_values, input_quantized, input_scale, input_zero_point, + pad_dims, pad_values, output_dims, golden, golden_quantized, output_scale, + output_zero_point, output_data, kTfLiteError); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/pooling.cc b/tensorflow/lite/micro/kernels/pooling.cc new file mode 100644 index 0000000..e03f72e --- /dev/null +++ b/tensorflow/lite/micro/kernels/pooling.cc @@ -0,0 +1,109 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/kernels/internal/reference/pooling.h" + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/pooling.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { + +namespace { + +TfLiteStatus AverageEval(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->builtin_data != nullptr); + auto* params = reinterpret_cast(node->builtin_data); + + TFLITE_DCHECK(node->user_data != nullptr); + const OpDataPooling* data = + static_cast(node->user_data); + + const TfLiteEvalTensor* input = + micro::GetEvalInput(context, node, kPoolingInputTensor); + TfLiteEvalTensor* output = + micro::GetEvalOutput(context, node, kPoolingOutputTensor); + + // Inputs and outputs share the same type, guaranteed by the converter. + switch (input->type) { + case kTfLiteFloat32: + AveragePoolingEvalFloat(context, node, params, data, input, output); + break; + case kTfLiteInt8: + AveragePoolingEvalQuantized(context, node, params, data, input, + output); + break; + case kTfLiteInt16: + AveragePoolingEvalQuantized(context, node, params, data, input, + output); + break; + default: + MicroPrintf("Input type %s is not currently supported", + TfLiteTypeGetName(input->type)); + return kTfLiteError; + } + return kTfLiteOk; +} + +TfLiteStatus MaxEval(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->builtin_data != nullptr); + auto* params = reinterpret_cast(node->builtin_data); + + TFLITE_DCHECK(node->user_data != nullptr); + const OpDataPooling* data = + static_cast(node->user_data); + + const TfLiteEvalTensor* input = + micro::GetEvalInput(context, node, kPoolingInputTensor); + TfLiteEvalTensor* output = + micro::GetEvalOutput(context, node, kPoolingOutputTensor); + + switch (input->type) { + case kTfLiteFloat32: + MaxPoolingEvalFloat(context, node, params, data, input, output); + break; + case kTfLiteInt8: + MaxPoolingEvalQuantized(context, node, params, data, input, + output); + break; + case kTfLiteInt16: + MaxPoolingEvalQuantized(context, node, params, data, input, + output); + break; + default: + MicroPrintf("Type %s not currently supported.", + TfLiteTypeGetName(input->type)); + return kTfLiteError; + } + return kTfLiteOk; +} + +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(OpDataPooling)); +} + +} // namespace + +TFLMRegistration Register_AVERAGE_POOL_2D() { + return tflite::micro::RegisterOp(Init, PoolingPrepare, AverageEval); +} + +TFLMRegistration Register_MAX_POOL_2D() { + return tflite::micro::RegisterOp(Init, PoolingPrepare, MaxEval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/pooling.h b/tensorflow/lite/micro/kernels/pooling.h new file mode 100644 index 0000000..a87e22c --- /dev/null +++ b/tensorflow/lite/micro/kernels/pooling.h @@ -0,0 +1,142 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_POOLING_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_POOLING_H_ + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/pooling.h" +#include "tensorflow/lite/kernels/internal/reference/pooling.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/padding.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/micro_ops.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { + +extern const int kPoolingInputTensor; +extern const int kPoolingOutputTensor; + +struct OpDataPooling { + TfLitePaddingValues padding; + int32_t activation_min; + int32_t activation_max; + float activation_min_f32; + float activation_max_f32; +}; + +TfLiteStatus CalculateOpDataPooling(const TfLiteContext* context, + const TfLitePoolParams* params, + const TfLiteTensor* input, + const TfLiteTensor* output, + OpDataPooling* data); + +TfLiteStatus PoolingPrepare(TfLiteContext* context, TfLiteNode* node); + +void AveragePoolingEvalFloat(const TfLiteContext* context, + const TfLiteNode* node, + const TfLitePoolParams* params, + const OpDataPooling* data, + const TfLiteEvalTensor* input, + TfLiteEvalTensor* output); + +template +void AveragePoolingEvalQuantized(TfLiteContext* context, const TfLiteNode* node, + const TfLitePoolParams* params, + const OpDataPooling* data, + const TfLiteEvalTensor* input, + TfLiteEvalTensor* output) { + TFLITE_DCHECK(input->type == kTfLiteInt8 || input->type == kTfLiteInt16); + + PoolParams op_params; + op_params.stride_height = params->stride_height; + op_params.stride_width = params->stride_width; + op_params.filter_height = params->filter_height; + op_params.filter_width = params->filter_width; + op_params.padding_values.height = data->padding.height; + op_params.padding_values.width = data->padding.width; + op_params.quantized_activation_min = data->activation_min; + op_params.quantized_activation_max = data->activation_max; + + reference_integer_ops::AveragePool(op_params, + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); +} + +void MaxPoolingEvalFloat(TfLiteContext* context, TfLiteNode* node, + TfLitePoolParams* params, const OpDataPooling* data, + const TfLiteEvalTensor* input, + TfLiteEvalTensor* output); + +template +void MaxPoolingEvalQuantized(TfLiteContext* context, TfLiteNode* node, + TfLitePoolParams* params, + const OpDataPooling* data, + const TfLiteEvalTensor* input, + TfLiteEvalTensor* output) { + TFLITE_DCHECK(input->type == kTfLiteInt8 || input->type == kTfLiteInt16); + + tflite::PoolParams op_params; + op_params.stride_height = params->stride_height; + op_params.stride_width = params->stride_width; + op_params.filter_height = params->filter_height; + op_params.filter_width = params->filter_width; + op_params.padding_values.height = data->padding.height; + op_params.padding_values.width = data->padding.width; + op_params.quantized_activation_min = data->activation_min; + op_params.quantized_activation_max = data->activation_max; + + reference_integer_ops::MaxPool(op_params, + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); +} + +#if defined(CMSIS_NN) || defined(XTENSA) +TFLMRegistration Register_AVERAGE_POOL_2D_INT8(); + +TFLMRegistration Register_MAX_POOL_2D_INT8(); + +TFLMRegistration Register_AVERAGE_POOL_2D_INT16(); + +TFLMRegistration Register_MAX_POOL_2D_INT16(); +#else +inline TFLMRegistration Register_AVERAGE_POOL_2D_INT8() { + return tflite::Register_AVERAGE_POOL_2D(); +} + +inline TFLMRegistration Register_MAX_POOL_2D_INT8() { + return tflite::Register_MAX_POOL_2D(); +} + +inline TFLMRegistration Register_AVERAGE_POOL_2D_INT16() { + return tflite::Register_AVERAGE_POOL_2D(); +} + +inline TFLMRegistration Register_MAX_POOL_2D_INT16() { + return tflite::Register_MAX_POOL_2D(); +} +#endif +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_POOLING_H_ diff --git a/tensorflow/lite/micro/kernels/pooling_common.cc b/tensorflow/lite/micro/kernels/pooling_common.cc new file mode 100644 index 0000000..b39e9d8 --- /dev/null +++ b/tensorflow/lite/micro/kernels/pooling_common.cc @@ -0,0 +1,128 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/pooling.h" +#include "tensorflow/lite/kernels/internal/reference/pooling.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/padding.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/pooling.h" + +namespace tflite { + +const int kPoolingInputTensor = 0; +const int kPoolingOutputTensor = 0; + +TfLiteStatus CalculateOpDataPooling(const TfLiteContext* context, + const TfLitePoolParams* params, + const TfLiteTensor* input, + const TfLiteTensor* output, + OpDataPooling* data) { + // input: batch, height, width, channel + int height = SizeOfDimension(input, 1); + int width = SizeOfDimension(input, 2); + + int out_height, out_width; + + data->padding = ComputePaddingHeightWidth( + params->stride_height, params->stride_width, + /*dilation_rate_height=*/1, + /*dilation_rate_width=*/1, height, width, params->filter_height, + params->filter_width, params->padding, &out_height, &out_width); + + return kTfLiteOk; +} + +TfLiteStatus PoolingPrepare(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->builtin_data != nullptr); + auto* params = reinterpret_cast(node->builtin_data); + + TFLITE_DCHECK(node->user_data != nullptr); + OpDataPooling* data = static_cast(node->user_data); + + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kPoolingInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kPoolingOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + + TF_LITE_ENSURE_STATUS( + CalculateOpDataPooling(context, params, input, output, data)); + + if (input->type == kTfLiteFloat32) { + CalculateActivationRange(params->activation, &data->activation_min_f32, + &data->activation_max_f32); + } else if (input->type == kTfLiteInt8 || input->type == kTfLiteInt16) { + CalculateActivationRangeQuantized(context, params->activation, output, + &data->activation_min, + &data->activation_max); + } else { + MicroPrintf("Type %s (%d) not supported.", TfLiteTypeGetName(input->type), + input->type); + return kTfLiteError; + } + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(output); + + return kTfLiteOk; +} + +void AveragePoolingEvalFloat(const TfLiteContext* context, + const TfLiteNode* node, + const TfLitePoolParams* params, + const OpDataPooling* data, + const TfLiteEvalTensor* input, + TfLiteEvalTensor* output) { + PoolParams op_params; + op_params.stride_height = params->stride_height; + op_params.stride_width = params->stride_width; + op_params.filter_height = params->filter_height; + op_params.filter_width = params->filter_width; + op_params.padding_values.height = data->padding.height; + op_params.padding_values.width = data->padding.width; + op_params.float_activation_min = data->activation_min_f32; + op_params.float_activation_max = data->activation_max_f32; + reference_ops::AveragePool(op_params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); +} + +void MaxPoolingEvalFloat(TfLiteContext* context, TfLiteNode* node, + TfLitePoolParams* params, const OpDataPooling* data, + const TfLiteEvalTensor* input, + TfLiteEvalTensor* output) { + tflite::PoolParams op_params; + op_params.stride_height = params->stride_height; + op_params.stride_width = params->stride_width; + op_params.filter_height = params->filter_height; + op_params.filter_width = params->filter_width; + op_params.padding_values.height = data->padding.height; + op_params.padding_values.width = data->padding.width; + op_params.float_activation_min = data->activation_min_f32; + op_params.float_activation_max = data->activation_max_f32; + reference_ops::MaxPool(op_params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/pooling_test.cc b/tensorflow/lite/micro/kernels/pooling_test.cc new file mode 100644 index 0000000..b0ee3a3 --- /dev/null +++ b/tensorflow/lite/micro/kernels/pooling_test.cc @@ -0,0 +1,705 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +template +void ValidatePoolingGoldens(TfLiteTensor* tensors, int tensors_size, + const TFLMRegistration registration, + const int filter_height, const int filter_width, + const int stride_height, const int stride_width, + const T* golden, const int output_length, + TfLitePadding padding, + TfLiteFusedActivation activation, T* output_data) { + int inputs_array_data[] = {1, 0}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 1}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + TfLitePoolParams builtin_data = {padding, + stride_width, + stride_height, + filter_width, + filter_height, + activation, + {}}; + + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, + reinterpret_cast(&builtin_data)); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + for (int i = 0; i < output_length; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(golden[i], output_data[i], 1e-5f); + } +} + +void TestAveragePoolFloat(int* input_dims_data, const float* input_data, + const int filter_height, const int filter_width, + const int stride_height, const int stride_width, + const float* expected_output_data, + int* output_dims_data, TfLitePadding padding, + TfLiteFusedActivation activation, + float* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_dims_count = ElementCount(*output_dims); + + constexpr int inputs_size = 1; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(input_data, input_dims), + CreateTensor(output_data, output_dims), + }; + + const TFLMRegistration registration = Register_AVERAGE_POOL_2D(); + + ValidatePoolingGoldens(tensors, tensors_size, registration, filter_height, + filter_width, stride_height, stride_width, + expected_output_data, output_dims_count, padding, + activation, output_data); +} + +template +void TestAveragePoolQuantized( + int* input_dims_data, const T* input_data, const float input_scale, + const int input_zero_point, const int filter_height, const int filter_width, + const int stride_height, const int stride_width, + const T* expected_output_data, int* output_dims_data, + const float output_scale, const int output_zero_point, + TfLitePadding padding, TfLiteFusedActivation activation, T* output_data) { + static_assert(sizeof(T) == 1 || sizeof(T) == 2, + "Only int8_t/int16_t data types allowed."); + + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_dims_count = ElementCount(*output_dims); + + constexpr int inputs_size = 1; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateQuantizedTensor(input_data, input_dims, input_scale, + input_zero_point), + CreateQuantizedTensor(output_data, output_dims, output_scale, + output_zero_point), + }; + + const TFLMRegistration registration = Register_AVERAGE_POOL_2D(); + ValidatePoolingGoldens(tensors, tensors_size, registration, filter_height, + filter_width, stride_height, stride_width, + expected_output_data, output_dims_count, padding, + activation, output_data); +} + +void TestMaxPoolFloat(int* input_dims_data, const float* input_data, + int filter_width, int filter_height, int stride_width, + int stride_height, const float* expected_output_data, + int* output_dims_data, TfLitePadding padding, + TfLiteFusedActivation activation, float* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_dims_count = ElementCount(*output_dims); + + constexpr int inputs_size = 1; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(input_data, input_dims), + CreateTensor(output_data, output_dims), + }; + + const TFLMRegistration registration = Register_MAX_POOL_2D(); + ValidatePoolingGoldens(tensors, tensors_size, registration, filter_height, + filter_width, stride_height, stride_width, + expected_output_data, output_dims_count, padding, + activation, output_data); +} + +template +void TestMaxPoolQuantized(int* input_dims_data, const T* input_data, + const float input_scale, const int input_zero_point, + const int filter_height, const int filter_width, + const int stride_height, const int stride_width, + const T* expected_output_data, int* output_dims_data, + const float output_scale, const int output_zero_point, + TfLitePadding padding, + TfLiteFusedActivation activation, T* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_dims_count = ElementCount(*output_dims); + + constexpr int inputs_size = 1; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateQuantizedTensor(input_data, input_dims, input_scale, + input_zero_point), + CreateQuantizedTensor(output_data, output_dims, output_scale, + output_zero_point), + }; + + const TFLMRegistration registration = Register_MAX_POOL_2D(); + ValidatePoolingGoldens(tensors, tensors_size, registration, filter_height, + filter_width, stride_height, stride_width, + expected_output_data, output_dims_count, padding, + activation, output_data); +} + +} // namespace + +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(SimpleAveragePoolTestFloat) { + int input_shape[] = {4, 1, 2, 4, 1}; + const float input_values[] = {0, 6, 2, 4, 3, 2, 10, 7}; + const int filter_width = 2; + const int filter_height = 2; + const int stride_width = 2; + const int stride_height = 2; + const float golden[] = {2.75, 5.75}; + int output_shape[] = {4, 1, 1, 2, 1}; + float output_data[2]; + tflite::testing::TestAveragePoolFloat( + input_shape, input_values, filter_height, filter_width, stride_height, + stride_width, golden, output_shape, kTfLitePaddingValid, kTfLiteActNone, + output_data); +} + +TF_LITE_MICRO_TEST(SimpleAveragePoolTestInt8PaddingValidStride2ActNone) { + int input_shape[] = {4, 1, 2, 4, 1}; + const int8_t input_values[] = {0, -24, 8, 16, 12, 8, -40, 28}; + const int filter_width = 2; + const int filter_height = 2; + const int stride_width = 2; + const int stride_height = 2; + const int8_t golden[] = {-1, 3}; + int output_shape[] = {4, 1, 1, 2, 1}; + int8_t output_data[2]; + + const float input_scale = .25; + const int input_zero_point = 0; + const float output_scale = .25; + const int output_zero_point = 0; + tflite::testing::TestAveragePoolQuantized( + input_shape, input_values, input_scale, input_zero_point, filter_height, + filter_width, stride_height, stride_width, golden, output_shape, + output_scale, output_zero_point, kTfLitePaddingValid, kTfLiteActNone, + output_data); +} + +TF_LITE_MICRO_TEST(SimpleAveragePoolTestInt8PaddingValidStride1Stride2Relu) { + int input_shape[] = {4, 1, 2, 4, 1}; + const int8_t input_values[] = {0, -24, 8, 16, 12, 8, -40, 28}; + const int filter_width = 2; + const int filter_height = 2; + const int stride_width = 1; + const int stride_height = 2; + const int8_t golden[] = {0, 0, 3}; + int output_shape[] = {4, 1, 1, 3, 1}; + int8_t output_data[3]; + + const float input_scale = .25; + const int input_zero_point = 0; + const float output_scale = .25; + const int output_zero_point = 0; + tflite::testing::TestAveragePoolQuantized( + input_shape, input_values, input_scale, input_zero_point, filter_height, + filter_width, stride_height, stride_width, golden, output_shape, + output_scale, output_zero_point, kTfLitePaddingValid, kTfLiteActRelu, + output_data); +} + +TF_LITE_MICRO_TEST( + SimpleAveragePoolTestInt8PaddingValidStride2Stride1ReluN1To1) { + int input_shape[] = {4, 1, 2, 4, 1}; + const int8_t input_values[] = {0, -24, 8, 16, 12, 8, -40, 28}; + const int filter_width = 2; + const int filter_height = 2; + const int stride_width = 2; + const int stride_height = 1; + const int8_t golden[] = {-1, 3}; + int output_shape[] = {4, 1, 1, 2, 1}; + int8_t output_data[2]; + + const float input_scale = .25; + const int input_zero_point = 0; + const float output_scale = .25; + const int output_zero_point = 0; + tflite::testing::TestAveragePoolQuantized( + input_shape, input_values, input_scale, input_zero_point, filter_height, + filter_width, stride_height, stride_width, golden, output_shape, + output_scale, output_zero_point, kTfLitePaddingValid, kTfLiteActReluN1To1, + output_data); +} + +TF_LITE_MICRO_TEST(SimpleAveragePoolTestInt8PaddingValidStride2Relu6) { + int input_shape[] = {4, 1, 2, 4, 1}; + const int8_t input_values[] = {12, -24, 32, 16, 12, 8, 40, 28}; + const int filter_width = 2; + const int filter_height = 2; + const int stride_width = 2; + const int stride_height = 2; + const int8_t golden[] = {2, 24}; + int output_shape[] = {4, 1, 1, 2, 1}; + int8_t output_data[2]; + + const float input_scale = .25; + const int input_zero_point = 0; + const float output_scale = .25; + const int output_zero_point = 0; + tflite::testing::TestAveragePoolQuantized( + input_shape, input_values, input_scale, input_zero_point, filter_height, + filter_width, stride_height, stride_width, golden, output_shape, + output_scale, output_zero_point, kTfLitePaddingValid, kTfLiteActRelu6, + output_data); +} + +TF_LITE_MICRO_TEST(SimpleAveragePoolTestInt8PaddingSameStride1ActNone) { + int input_shape[] = {4, 1, 2, 4, 1}; + const int8_t input_values[] = {12, -24, 32, 16, 12, 8, 40, 28}; + const int filter_width = 2; + const int filter_height = 2; + const int stride_width = 1; + const int stride_height = 1; + const int8_t golden[] = {2, 14, 29, 22, 10, 24, 34, 28}; + int output_shape[] = {4, 1, 2, 4, 1}; + int8_t output_data[8]; + + const float input_scale = .25; + const int input_zero_point = 0; + const float output_scale = .25; + const int output_zero_point = 0; + tflite::testing::TestAveragePoolQuantized( + input_shape, input_values, input_scale, input_zero_point, filter_height, + filter_width, stride_height, stride_width, golden, output_shape, + output_scale, output_zero_point, kTfLitePaddingValid, kTfLiteActNone, + output_data); +} + +TF_LITE_MICRO_TEST(SimpleAveragePoolTestInt16PaddingValidStride2ActNone) { + int input_shape[] = {4, 1, 2, 4, 1}; + const int16_t input_values[] = {0, -24, 8, 16, 12, 8, -40, 28}; + const int filter_width = 2; + const int filter_height = 2; + const int stride_width = 2; + const int stride_height = 2; + const int16_t golden[] = {-1, 3}; + int output_shape[] = {4, 1, 1, 2, 1}; + int16_t output_data[2]; + + const float input_scale = .25; + const int input_zero_point = 0; + const float output_scale = .25; + const int output_zero_point = 0; + tflite::testing::TestAveragePoolQuantized( + input_shape, input_values, input_scale, input_zero_point, filter_height, + filter_width, stride_height, stride_width, golden, output_shape, + output_scale, output_zero_point, kTfLitePaddingValid, kTfLiteActNone, + output_data); +} + +TF_LITE_MICRO_TEST(SimpleAveragePoolTestInt16PaddingValidStride1Stride2Relu) { + int input_shape[] = {4, 1, 2, 4, 1}; + const int16_t input_values[] = {0, -24, 8, 16, 12, 8, -40, 28}; + const int filter_width = 2; + const int filter_height = 2; + const int stride_width = 1; + const int stride_height = 2; + const int16_t golden[] = {0, 0, 3}; + int output_shape[] = {4, 1, 1, 3, 1}; + int16_t output_data[3]; + + const float input_scale = .25; + const int input_zero_point = 0; + const float output_scale = .25; + const int output_zero_point = 0; + tflite::testing::TestAveragePoolQuantized( + input_shape, input_values, input_scale, input_zero_point, filter_height, + filter_width, stride_height, stride_width, golden, output_shape, + output_scale, output_zero_point, kTfLitePaddingValid, kTfLiteActRelu, + output_data); +} + +TF_LITE_MICRO_TEST( + SimpleAveragePoolTestInt16PaddingValidStride2Stride1ReluN1To1) { + int input_shape[] = {4, 1, 2, 4, 1}; + const int16_t input_values[] = {0, -24, 8, 16, 12, 8, -40, 28}; + const int filter_width = 2; + const int filter_height = 2; + const int stride_width = 2; + const int stride_height = 1; + const int16_t golden[] = {-1, 3}; + int output_shape[] = {4, 1, 1, 2, 1}; + int16_t output_data[2]; + + const float input_scale = .25; + const int input_zero_point = 0; + const float output_scale = .25; + const int output_zero_point = 0; + tflite::testing::TestAveragePoolQuantized( + input_shape, input_values, input_scale, input_zero_point, filter_height, + filter_width, stride_height, stride_width, golden, output_shape, + output_scale, output_zero_point, kTfLitePaddingValid, kTfLiteActReluN1To1, + output_data); +} + +TF_LITE_MICRO_TEST(SimpleAveragePoolTestInt16PaddingValidStride2Relu6) { + int input_shape[] = {4, 1, 2, 4, 1}; + const int16_t input_values[] = {12, -24, 32, 16, 12, 8, 40, 28}; + const int filter_width = 2; + const int filter_height = 2; + const int stride_width = 2; + const int stride_height = 2; + const int16_t golden[] = {2, 24}; + int output_shape[] = {4, 1, 1, 2, 1}; + int16_t output_data[2]; + + const float input_scale = .25; + const int input_zero_point = 0; + const float output_scale = .25; + const int output_zero_point = 0; + tflite::testing::TestAveragePoolQuantized( + input_shape, input_values, input_scale, input_zero_point, filter_height, + filter_width, stride_height, stride_width, golden, output_shape, + output_scale, output_zero_point, kTfLitePaddingValid, kTfLiteActRelu6, + output_data); +} + +TF_LITE_MICRO_TEST(SimpleAveragePoolTestInt16PaddingSameStride1ActNone) { + int input_shape[] = {4, 1, 2, 4, 1}; + const int16_t input_values[] = {12, -24, 32, 16, 12, 8, 40, 28}; + const int filter_width = 2; + const int filter_height = 2; + const int stride_width = 1; + const int stride_height = 1; + const int16_t golden[] = {2, 14, 29, 22, 10, 24, 34, 28}; + int output_shape[] = {4, 1, 2, 4, 1}; + int16_t output_data[8]; + + const float input_scale = .25; + const int input_zero_point = 0; + const float output_scale = .25; + const int output_zero_point = 0; + tflite::testing::TestAveragePoolQuantized( + input_shape, input_values, input_scale, input_zero_point, filter_height, + filter_width, stride_height, stride_width, golden, output_shape, + output_scale, output_zero_point, kTfLitePaddingValid, kTfLiteActNone, + output_data); +} + +TF_LITE_MICRO_TEST(SimpleMaxPoolTestFloat) { + int input_shape[] = {4, 1, 2, 4, 1}; + const float input_values[] = {0, 6, 2, 4, 3, 2, 10, 7}; + const int filter_width = 2; + const int filter_height = 2; + const int stride_width = 2; + const int stride_height = 2; + const float golden[] = {6, 10}; + int output_shape[] = {4, 1, 1, 2, 1}; + float output_data[2]; + tflite::testing::TestMaxPoolFloat(input_shape, input_values, filter_height, + filter_width, stride_height, stride_width, + golden, output_shape, kTfLitePaddingValid, + kTfLiteActNone, output_data); +} + +TF_LITE_MICRO_TEST(SimpleMaxPoolTestFloatRelu) { + int input_shape[] = {4, 1, 2, 4, 1}; + const float input_values[] = {-1, -6, 2, 4, -3, -2, 10.5, 7}; + const int filter_width = 2; + const int filter_height = 2; + const int stride_width = 2; + const int stride_height = 2; + const float golden[] = {0, 10.5}; + int output_shape[] = {4, 1, 1, 2, 1}; + float output_data[2]; + tflite::testing::TestMaxPoolFloat(input_shape, input_values, filter_height, + filter_width, stride_height, stride_width, + golden, output_shape, kTfLitePaddingValid, + kTfLiteActRelu, output_data); +} + +TF_LITE_MICRO_TEST(SimpleMaxPoolTestFloatReluN1To1) { + int input_shape[] = {4, 1, 2, 4, 1}; + const float input_values1[] = {-2.75, -6, 0.2, 0.4, -3, -2, -0.3, 0.7}; + const int filter_width = 2; + const int filter_height = 2; + const int stride_width = 2; + const int stride_height = 2; + const float golden1[] = {-1.0, 0.7}; + int output_shape[] = {4, 1, 1, 2, 1}; + float output_data[2]; + tflite::testing::TestMaxPoolFloat(input_shape, input_values1, filter_height, + filter_width, stride_height, stride_width, + golden1, output_shape, kTfLitePaddingValid, + kTfLiteActReluN1To1, output_data); + + const float input_values2[] = {-2.75, -6, -2, -4, -3, -2, 10, -7}; + const float golden2[] = {-1.0, 1.0}; + tflite::testing::TestMaxPoolFloat(input_shape, input_values2, filter_height, + filter_width, stride_height, stride_width, + golden2, output_shape, kTfLitePaddingValid, + kTfLiteActReluN1To1, output_data); +} + +TF_LITE_MICRO_TEST(SimpleMaxPoolTestFloatRelu6) { + int input_shape[] = {4, 1, 2, 4, 1}; + const float input_values1[] = {-1.5, -6, 12, 4, -3, -2, 10, 7}; + const int filter_width = 2; + const int filter_height = 2; + const int stride_width = 2; + const int stride_height = 2; + const float golden1[] = {0, 6}; + int output_shape[] = {4, 1, 1, 2, 1}; + float output_data[2]; + tflite::testing::TestMaxPoolFloat(input_shape, input_values1, filter_height, + filter_width, stride_height, stride_width, + golden1, output_shape, kTfLitePaddingValid, + kTfLiteActRelu6, output_data); + + const float input_values2[] = {0, 4.5, 12, 4, 3, 2, 10, 7}; + const float golden2[] = {4.5, 6}; + tflite::testing::TestMaxPoolFloat(input_shape, input_values2, filter_height, + filter_width, stride_height, stride_width, + golden2, output_shape, kTfLitePaddingValid, + kTfLiteActRelu6, output_data); +} + +TF_LITE_MICRO_TEST(SimpleMaxPoolTestPaddingSameStride1) { + int input_shape[] = {4, 1, 2, 4, 1}; + const float input_values[] = {0, 6, 2, 4, 3, 2, 10, 7}; + const int filter_width = 2; + const int filter_height = 2; + const int stride_width = 1; + const int stride_height = 1; + const float golden[] = {6, 10, 10, 7, 3, 10, 10, 7}; + int output_shape[] = {4, 1, 2, 4, 1}; + float output_data[8]; + tflite::testing::TestMaxPoolFloat(input_shape, input_values, filter_height, + filter_width, stride_height, stride_width, + golden, output_shape, kTfLitePaddingSame, + kTfLiteActNone, output_data); +} + +TF_LITE_MICRO_TEST(SimpleMaxPoolTestPaddingValidStride1) { + int input_shape[] = {4, 1, 2, 4, 1}; + const float input_values[] = {0, 6, 2, 4, 3, 2, 10, 7}; + const int filter_width = 2; + const int filter_height = 2; + const int stride_width = 1; + const int stride_height = 1; + const float golden[] = {6, 10, 10}; + int output_shape[] = {4, 1, 1, 3, 1}; + float output_data[8]; + tflite::testing::TestMaxPoolFloat(input_shape, input_values, filter_height, + filter_width, stride_height, stride_width, + golden, output_shape, kTfLitePaddingValid, + kTfLiteActNone, output_data); +} + +TF_LITE_MICRO_TEST(SimpleMaxPoolTestInt8ActNone) { + int input_shape[] = {4, 1, 2, 4, 1}; + const int8_t input_values1[] = {0, 6, 2, 4, 3, 2, 10, 7}; + const int filter_width = 2; + const int filter_height = 2; + const int stride_width = 2; + const int stride_height = 2; + const int8_t golden1[] = {6, 10}; + int output_shape[] = {4, 1, 1, 2, 1}; + int8_t output_data[2]; + + const float input_scale = 1.0; + const int input_zero_point = 0; + const float output_scale = 1.0; + const int output_zero_point = 0; + tflite::testing::TestMaxPoolQuantized( + input_shape, input_values1, input_scale, input_zero_point, filter_height, + filter_width, stride_height, stride_width, golden1, output_shape, + output_scale, output_zero_point, kTfLitePaddingValid, kTfLiteActNone, + output_data); +} + +TF_LITE_MICRO_TEST(MaxPoolTestInt8ActRelu) { + int input_shape[] = {4, 1, 2, 4, 1}; + const int8_t input_values1[] = {-3, -12, 4, 8, -6, -4, 20, 14}; + const int filter_width = 2; + const int filter_height = 2; + const int stride_width = 2; + const int stride_height = 2; + const int8_t golden1[] = {0, 20}; + int output_shape[] = {4, 1, 1, 2, 1}; + int8_t output_data[2]; + + const float input_scale = 0.5; + const int input_zero_point = 0; + const float output_scale = 0.5; + const int output_zero_point = 0; + tflite::testing::TestMaxPoolQuantized( + input_shape, input_values1, input_scale, input_zero_point, filter_height, + filter_width, stride_height, stride_width, golden1, output_shape, + output_scale, output_zero_point, kTfLitePaddingValid, kTfLiteActRelu, + output_data); +} + +TF_LITE_MICRO_TEST(MaxPoolTestInt8ActReluN1To1) { + int input_shape[] = {4, 1, 2, 4, 1}; + const int8_t input_values1[] = {-2, -6, -2, -4, -3, -2, 10, 7}; + const int filter_width = 2; + const int filter_height = 2; + const int stride_width = 2; + const int stride_height = 2; + const int8_t golden1[] = {-1, 1}; + int output_shape[] = {4, 1, 1, 2, 1}; + int8_t output_data[2]; + + const float input_scale = 1.0; + const int input_zero_point = 0; + const float output_scale = 1.0; + const int output_zero_point = 0; + tflite::testing::TestMaxPoolQuantized( + input_shape, input_values1, input_scale, input_zero_point, filter_height, + filter_width, stride_height, stride_width, golden1, output_shape, + output_scale, output_zero_point, kTfLitePaddingValid, kTfLiteActReluN1To1, + output_data); +} + +TF_LITE_MICRO_TEST(MaxPoolTestInt8ActRelu6) { + int input_shape[] = {4, 1, 2, 4, 1}; + const int8_t input_values1[] = {0, -6, 12, 4, -3, -2, 10, 7}; + const int filter_width = 2; + const int filter_height = 2; + const int stride_width = 2; + const int stride_height = 2; + const int8_t golden1[] = {0, 6}; + int output_shape[] = {4, 1, 1, 2, 1}; + int8_t output_data[2]; + + const float input_scale = 1.0; + const int input_zero_point = 0; + const float output_scale = 1.0; + const int output_zero_point = 0; + tflite::testing::TestMaxPoolQuantized( + input_shape, input_values1, input_scale, input_zero_point, filter_height, + filter_width, stride_height, stride_width, golden1, output_shape, + output_scale, output_zero_point, kTfLitePaddingValid, kTfLiteActRelu6, + output_data); +} + +TF_LITE_MICRO_TEST(SimpleMaxPoolTestInt16ActNone) { + int input_shape[] = {4, 1, 2, 4, 1}; + const int16_t input_values1[] = {0, 6, 2, 4, 3, 2, 10, 7}; + const int filter_width = 2; + const int filter_height = 2; + const int stride_width = 2; + const int stride_height = 2; + const int16_t golden1[] = {6, 10}; + int output_shape[] = {4, 1, 1, 2, 1}; + int16_t output_data[2]; + + const float input_scale = 1.0; + const int input_zero_point = 0; + const float output_scale = 1.0; + const int output_zero_point = 0; + tflite::testing::TestMaxPoolQuantized( + input_shape, input_values1, input_scale, input_zero_point, filter_height, + filter_width, stride_height, stride_width, golden1, output_shape, + output_scale, output_zero_point, kTfLitePaddingValid, kTfLiteActNone, + output_data); +} + +TF_LITE_MICRO_TEST(MaxPoolTestInt16ActRelu) { + int input_shape[] = {4, 1, 2, 4, 1}; + const int16_t input_values1[] = {-3, -12, 4, 8, -6, -4, 20, 14}; + const int filter_width = 2; + const int filter_height = 2; + const int stride_width = 2; + const int stride_height = 2; + const int16_t golden1[] = {0, 20}; + int output_shape[] = {4, 1, 1, 2, 1}; + int16_t output_data[2]; + + const float input_scale = 0.5; + const int input_zero_point = 0; + const float output_scale = 0.5; + const int output_zero_point = 0; + tflite::testing::TestMaxPoolQuantized( + input_shape, input_values1, input_scale, input_zero_point, filter_height, + filter_width, stride_height, stride_width, golden1, output_shape, + output_scale, output_zero_point, kTfLitePaddingValid, kTfLiteActRelu, + output_data); +} + +TF_LITE_MICRO_TEST(MaxPoolTestInt16ActReluN1To1) { + int input_shape[] = {4, 1, 2, 4, 1}; + const int16_t input_values1[] = {-2, -6, -2, -4, -3, -2, 10, 7}; + const int filter_width = 2; + const int filter_height = 2; + const int stride_width = 2; + const int stride_height = 2; + const int16_t golden1[] = {-1, 1}; + int output_shape[] = {4, 1, 1, 2, 1}; + int16_t output_data[2]; + + const float input_scale = 1.0; + const int input_zero_point = 0; + const float output_scale = 1.0; + const int output_zero_point = 0; + tflite::testing::TestMaxPoolQuantized( + input_shape, input_values1, input_scale, input_zero_point, filter_height, + filter_width, stride_height, stride_width, golden1, output_shape, + output_scale, output_zero_point, kTfLitePaddingValid, kTfLiteActReluN1To1, + output_data); +} + +TF_LITE_MICRO_TEST(MaxPoolTestInt16ActRelu6) { + int input_shape[] = {4, 1, 2, 4, 1}; + const int16_t input_values1[] = {0, -6, 12, 4, -3, -2, 10, 7}; + const int filter_width = 2; + const int filter_height = 2; + const int stride_width = 2; + const int stride_height = 2; + const int16_t golden1[] = {0, 6}; + int output_shape[] = {4, 1, 1, 2, 1}; + int16_t output_data[2]; + + const float input_scale = 1.0; + const int input_zero_point = 0; + const float output_scale = 1.0; + const int output_zero_point = 0; + tflite::testing::TestMaxPoolQuantized( + input_shape, input_values1, input_scale, input_zero_point, filter_height, + filter_width, stride_height, stride_width, golden1, output_shape, + output_scale, output_zero_point, kTfLitePaddingValid, kTfLiteActRelu6, + output_data); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/prelu.cc b/tensorflow/lite/micro/kernels/prelu.cc new file mode 100644 index 0000000..66a017b --- /dev/null +++ b/tensorflow/lite/micro/kernels/prelu.cc @@ -0,0 +1,75 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/reference/prelu.h" + +#include + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/prelu.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { + +void* PreluInit(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(PreluParams)); +} + +TfLiteStatus PreluEval(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + const PreluParams& params = + *(static_cast(node->user_data)); + + const TfLiteEvalTensor* input = tflite::micro::GetEvalInput(context, node, 0); + const TfLiteEvalTensor* alpha = tflite::micro::GetEvalInput(context, node, 1); + TfLiteEvalTensor* output = tflite::micro::GetEvalOutput(context, node, 0); + + switch (input->type) { + case kTfLiteFloat32: { + BroadcastPrelu4DSlowFloat(tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(alpha), + tflite::micro::GetTensorData(alpha), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + return kTfLiteOk; + } break; + case kTfLiteInt8: { + reference_ops::BroadcastPrelu4DSlow( + params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(alpha), + tflite::micro::GetTensorData(alpha), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + return kTfLiteOk; + } break; + default: + MicroPrintf("Only float32 and uint8_t are supported currently, got %d.", + TfLiteTypeGetName(input->type)); + return kTfLiteError; + } +} + +TFLMRegistration Register_PRELU() { + return tflite::micro::RegisterOp(PreluInit, PreluPrepare, PreluEval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/prelu.h b/tensorflow/lite/micro/kernels/prelu.h new file mode 100644 index 0000000..571d1e8 --- /dev/null +++ b/tensorflow/lite/micro/kernels/prelu.h @@ -0,0 +1,39 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_PRELU_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_PRELU_H_ + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { + +TfLiteStatus CalculatePreluParams(const TfLiteTensor* input, + const TfLiteTensor* alpha, + TfLiteTensor* output, PreluParams* params); + +void BroadcastPrelu4DSlowFloat(const RuntimeShape& unextended_input1_shape, + const float* input1_data, + const RuntimeShape& unextended_input2_shape, + const float* input2_data, + const RuntimeShape& unextended_output_shape, + float* output_data); + +TfLiteStatus PreluPrepare(TfLiteContext* context, TfLiteNode* node); + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_PRELU_H_ diff --git a/tensorflow/lite/micro/kernels/prelu_common.cc b/tensorflow/lite/micro/kernels/prelu_common.cc new file mode 100644 index 0000000..1a89cad --- /dev/null +++ b/tensorflow/lite/micro/kernels/prelu_common.cc @@ -0,0 +1,105 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/prelu.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/prelu.h" + +namespace tflite { + +TfLiteStatus CalculatePreluParams(const TfLiteTensor* input, + const TfLiteTensor* alpha, + TfLiteTensor* output, PreluParams* params) { + if (output->type == kTfLiteInt8 || output->type == kTfLiteInt16) { + double real_multiplier_1 = static_cast(input->params.scale) / + static_cast(output->params.scale); + double real_multiplier_2 = static_cast(input->params.scale) * + static_cast(alpha->params.scale) / + static_cast(output->params.scale); + QuantizeMultiplier(real_multiplier_1, ¶ms->output_multiplier_1, + ¶ms->output_shift_1); + QuantizeMultiplier(real_multiplier_2, ¶ms->output_multiplier_2, + ¶ms->output_shift_2); + + params->input_offset = -input->params.zero_point; + params->alpha_offset = -alpha->params.zero_point; + params->output_offset = output->params.zero_point; + } + + return kTfLiteOk; +} + +void BroadcastPrelu4DSlowFloat(const RuntimeShape& unextended_input1_shape, + const float* input1_data, + const RuntimeShape& unextended_input2_shape, + const float* input2_data, + const RuntimeShape& unextended_output_shape, + float* output_data) { + TFLITE_DCHECK_LE(unextended_input1_shape.DimensionsCount(), 4); + TFLITE_DCHECK_LE(unextended_input2_shape.DimensionsCount(), 4); + TFLITE_DCHECK_LE(unextended_output_shape.DimensionsCount(), 4); + const RuntimeShape output_shape = + RuntimeShape::ExtendedShape(4, unextended_output_shape); + + NdArrayDesc<4> desc1; + NdArrayDesc<4> desc2; + NdArrayDescsForElementwiseBroadcast(unextended_input1_shape, + unextended_input2_shape, &desc1, &desc2); + + for (int b = 0; b < output_shape.Dims(0); ++b) { + for (int y = 0; y < output_shape.Dims(1); ++y) { + for (int x = 0; x < output_shape.Dims(2); ++x) { + for (int c = 0; c < output_shape.Dims(3); ++c) { + auto out_idx = Offset(output_shape, b, y, x, c); + auto in1_idx = SubscriptToIndex(desc1, b, y, x, c); + auto in2_idx = SubscriptToIndex(desc2, b, y, x, c); + auto in1_val = input1_data[in1_idx]; + auto in2_val = input2_data[in2_idx]; + output_data[out_idx] = in1_val >= 0.0f ? in1_val : in1_val * in2_val; + } + } + } + } +} + +TfLiteStatus PreluPrepare(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + PreluParams* params = static_cast(node->user_data); + + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* input = micro_context->AllocateTempInputTensor(node, 0); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* alpha = micro_context->AllocateTempInputTensor(node, 1); + TF_LITE_ENSURE(context, alpha != nullptr); + TfLiteTensor* output = micro_context->AllocateTempOutputTensor(node, 0); + TF_LITE_ENSURE(context, output != nullptr); + + TF_LITE_ENSURE_OK(context, + CalculatePreluParams(input, alpha, output, params)); + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(alpha); + micro_context->DeallocateTempTfLiteTensor(output); + return kTfLiteOk; +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/prelu_test.cc b/tensorflow/lite/micro/kernels/prelu_test.cc new file mode 100644 index 0000000..e406034 --- /dev/null +++ b/tensorflow/lite/micro/kernels/prelu_test.cc @@ -0,0 +1,159 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +template +void ValidatePreluGoldens(TfLiteTensor* tensors, int tensors_size, + const T* golden, const int output_length, + T* output_data) { + int inputs_array_data[] = {2, 0, 1}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 2}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = tflite::Register_PRELU(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, + /*builtin_data=*/nullptr); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + for (int i = 0; i < output_length; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(golden[i], output_data[i], 1e-5f); + } +} + +void TestPreluFloat(int* input_dims_data, const float* input_data, + int* alpha_dims_data, const float* alpha_data, + const float* expected_output_data, int* output_dims_data, + float* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* alpha_dims = IntArrayFromInts(alpha_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_dims_count = ElementCount(*output_dims); + constexpr int inputs_size = 2; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(input_data, input_dims), + CreateTensor(alpha_data, alpha_dims), + CreateTensor(output_data, output_dims), + }; + + ValidatePreluGoldens(tensors, tensors_size, expected_output_data, + output_dims_count, output_data); +} + +template +void TestPreluQuantized(int* input_dims_data, const float* input_data, + T* input_quantized, const float input_scale, + const int input_zero_point, int* alpha_dims_data, + const float* alpha_data, T* alpha_quantized, + const float alpha_scale, const int alpha_zero_point, + const float* golden, T* golden_quantized, + const float output_scale, const int output_zero_point, + int* output_dims_data, T* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* alpha_dims = IntArrayFromInts(alpha_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_dims_count = ElementCount(*output_dims); + constexpr int inputs_size = 2; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateQuantizedTensor(input_data, input_quantized, input_dims, + input_scale, input_zero_point), + CreateQuantizedTensor(alpha_data, alpha_quantized, alpha_dims, + alpha_scale, alpha_zero_point), + CreateQuantizedTensor(output_data, output_dims, output_scale, + output_zero_point), + }; + + Quantize(golden, golden_quantized, output_dims_count, output_scale, + output_zero_point); + + ValidatePreluGoldens(tensors, tensors_size, golden_quantized, + output_dims_count, output_data); +} +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(FloatPreluActivationsOpTest) { + int input_shape[] = {3, 2, 2, 3}; + const float input_values[] = { + 0.0f, 0.0f, 0.0f, // Row 1, Column 1 + 1.0f, 1.0f, 1.0f, // Row 1, Column 2 + -1.0f, -1.0f, -1.0f, // Row 2, Column 1 + -2.0f, -2.0f, -2.0f, // Row 1, Column 2 + }; + int alpha_shape[] = {3, 1, 1, 3}; + const float alpha_values[] = {0.0f, 1.0f, 2.0f}; + int output_shape[] = {3, 2, 2, 3}; + const float golden[] = { + 0.0f, 0.0f, 0.0f, // Row 1, Column 1 + 1.0f, 1.0f, 1.0f, // Row 1, Column 2 + 0.0f, -1.0f, -2.0f, // Row 2, Column 1 + 0.0f, -2.0f, -4.0f, // Row 1, Column 2 + }; + const int output_dims_count = 12; + float output_data[output_dims_count]; + tflite::testing::TestPreluFloat(input_shape, input_values, alpha_shape, + alpha_values, golden, output_shape, + output_data); +} + +TF_LITE_MICRO_TEST(QuantizedInt8PreluActivationsOpTest) { + int input_shape[] = {3, 2, 2, 3}; + const float input_values[] = { + 0.0f, 0.0f, 0.0f, // Row 1, Column 1 + 0.5f, 0.5f, 0.5f, // Row 1, Column 2 + -1.0f, -1.0f, -1.0f, // Row 2, Column 1 + -0.25f, -0.25f, -0.25f, // Row 1, Column 2 + }; + int alpha_shape[] = {3, 1, 1, 3}; + const float alpha_values[] = {0.0f, 0.5f, -0.5f}; + int output_shape[] = {3, 2, 2, 3}; + const float golden[] = { + 0.0f, 0.0f, 0.0f, // Row 1, Column 1 + 0.5f, 0.5f, 0.5f, // Row 1, Column 2 + 0.0f, -0.5f, 0.5f, // Row 2, Column 1 + 0.0f, -0.125f, 0.125f, // Row 1, Column 2 + }; + const int dims_count = 12; + int8_t input_quantized[dims_count]; + int8_t alpha_quantized[3]; + int8_t golden_quantized[dims_count]; + float scale = 2.0 / 255.0; + int zero_point = 0; + int8_t output_data[dims_count]; + tflite::testing::TestPreluQuantized( + input_shape, input_values, input_quantized, scale, zero_point, + alpha_shape, alpha_values, alpha_quantized, scale, zero_point, golden, + golden_quantized, scale, zero_point, output_shape, output_data); +} +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/quantization_util_test.cc b/tensorflow/lite/micro/kernels/quantization_util_test.cc new file mode 100644 index 0000000..76ee9ee --- /dev/null +++ b/tensorflow/lite/micro/kernels/quantization_util_test.cc @@ -0,0 +1,465 @@ +/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/quantization_util.h" + +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace { + +template +void RunSafeCastTests() { + const IntOut imax = std::numeric_limits::max(); + TF_LITE_MICRO_EXPECT_GT(imax, 0); + const IntOut imin = std::numeric_limits::min(); + const bool s = std::numeric_limits::is_signed; + if (s) { + TF_LITE_MICRO_EXPECT_LT(static_cast(imin), 0); + } else { + TF_LITE_MICRO_EXPECT_EQ(static_cast(0), imin); + } + + // Some basic tests. + TF_LITE_MICRO_EXPECT_EQ(SafeCast(static_cast(0.0)), + static_cast(0)); + TF_LITE_MICRO_EXPECT_EQ(SafeCast(static_cast(-0.0)), + static_cast(0)); + TF_LITE_MICRO_EXPECT_EQ(SafeCast(static_cast(0.99)), + static_cast(0)); + TF_LITE_MICRO_EXPECT_EQ(SafeCast(static_cast(1.0)), + static_cast(1)); + TF_LITE_MICRO_EXPECT_EQ(SafeCast(static_cast(1.01)), + static_cast(1)); + TF_LITE_MICRO_EXPECT_EQ(SafeCast(static_cast(1.99)), + static_cast(1)); + TF_LITE_MICRO_EXPECT_EQ(SafeCast(static_cast(2.0)), + static_cast(2)); + TF_LITE_MICRO_EXPECT_EQ(SafeCast(static_cast(2.01)), + static_cast(2)); + TF_LITE_MICRO_EXPECT_EQ(SafeCast(static_cast(-0.99)), + static_cast(0)); + TF_LITE_MICRO_EXPECT_EQ(SafeCast(static_cast(-1.0)), + s ? static_cast(-1) : static_cast(0)); + TF_LITE_MICRO_EXPECT_EQ(SafeCast(static_cast(-1.01)), + s ? static_cast(-1) : static_cast(0)); + TF_LITE_MICRO_EXPECT_EQ(SafeCast(static_cast(-1.99)), + s ? static_cast(-1) : static_cast(0)); + TF_LITE_MICRO_EXPECT_EQ(SafeCast(static_cast(-2.0)), + s ? static_cast(-2) : static_cast(0)); + TF_LITE_MICRO_EXPECT_EQ(SafeCast(static_cast(-2.01)), + s ? static_cast(-2) : static_cast(0)); + TF_LITE_MICRO_EXPECT_EQ(SafeCast(static_cast(117.9)), + static_cast(117)); + TF_LITE_MICRO_EXPECT_EQ(SafeCast(static_cast(118.0)), + static_cast(118)); + TF_LITE_MICRO_EXPECT_EQ(SafeCast(static_cast(118.1)), + static_cast(118)); + TF_LITE_MICRO_EXPECT_EQ( + SafeCast(static_cast(-117.9)), + s ? static_cast(-117) : static_cast(0)); + TF_LITE_MICRO_EXPECT_EQ( + SafeCast(static_cast(-118.0)), + s ? static_cast(-118) : static_cast(0)); + TF_LITE_MICRO_EXPECT_EQ( + SafeCast(static_cast(-118.1)), + s ? static_cast(-118) : static_cast(0)); + + // Some edge cases. + TF_LITE_MICRO_EXPECT_EQ(SafeCast(std::numeric_limits::max()), + imax); + TF_LITE_MICRO_EXPECT_EQ( + SafeCast(std::numeric_limits::lowest()), imin); + TF_LITE_MICRO_EXPECT_EQ( + SafeCast(std::numeric_limits::infinity()), imax); + TF_LITE_MICRO_EXPECT_EQ( + SafeCast(-std::numeric_limits::infinity()), imin); + TF_LITE_MICRO_EXPECT_EQ( + SafeCast(std::numeric_limits::quiet_NaN()), + static_cast(0)); + + // Some larger numbers. + if (sizeof(IntOut) >= static_cast(4) && + sizeof(FloatIn) > static_cast(4)) { + TF_LITE_MICRO_EXPECT_EQ(SafeCast(static_cast(0x76543210)), + static_cast(0x76543210)); + } + + if (sizeof(FloatIn) > sizeof(IntOut)) { + // Check values near imax. + TF_LITE_MICRO_EXPECT_EQ( + SafeCast(static_cast(static_cast(imax) + + static_cast(0.1))), + imax); + TF_LITE_MICRO_EXPECT_EQ( + SafeCast(static_cast(static_cast(imax) + + static_cast(0.99))), + imax); + TF_LITE_MICRO_EXPECT_EQ( + SafeCast(static_cast(static_cast(imax) + + static_cast(1.0))), + imax); + TF_LITE_MICRO_EXPECT_EQ( + SafeCast(static_cast(static_cast(imax) + + static_cast(1.99))), + imax); + TF_LITE_MICRO_EXPECT_EQ( + SafeCast(static_cast(static_cast(imax) + + static_cast(2.0))), + imax); + TF_LITE_MICRO_EXPECT_EQ( + SafeCast(static_cast(static_cast(imax) - + static_cast(0.1))), + imax - 1); + TF_LITE_MICRO_EXPECT_EQ( + SafeCast(static_cast(static_cast(imax) - + static_cast(0.99))), + imax - 1); + TF_LITE_MICRO_EXPECT_EQ( + SafeCast(static_cast(static_cast(imax) - + static_cast(1.0))), + imax - 1); + TF_LITE_MICRO_EXPECT_EQ( + SafeCast(static_cast(static_cast(imax) - + static_cast(1.01))), + imax - 2); + TF_LITE_MICRO_EXPECT_EQ( + SafeCast(static_cast(static_cast(imax) - + static_cast(1.99))), + imax - 2); + TF_LITE_MICRO_EXPECT_EQ( + SafeCast(static_cast(static_cast(imax) - + static_cast(2.0))), + imax - 2); + TF_LITE_MICRO_EXPECT_EQ( + SafeCast(static_cast(static_cast(imax) - + static_cast(2.01))), + imax - 3); + } + + // Check values considerably larger in magnitude than imin and imax + TF_LITE_MICRO_EXPECT_EQ( + SafeCast(static_cast(static_cast(imax) * 2)), + imax); + TF_LITE_MICRO_EXPECT_EQ( + SafeCast(static_cast(static_cast(imax) * 20)), + imax); + TF_LITE_MICRO_EXPECT_EQ( + SafeCast(static_cast(static_cast(imax) * 100)), + imax); + TF_LITE_MICRO_EXPECT_EQ( + SafeCast(static_cast(static_cast(imin) * 2)), + imin); + TF_LITE_MICRO_EXPECT_EQ( + SafeCast(static_cast(static_cast(imin) * 20)), + imin); + TF_LITE_MICRO_EXPECT_EQ( + SafeCast(static_cast(static_cast(imin) * 100)), + imin); +} + +} // namespace +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(QuantizationUtilTest_SafeCast) { + tflite::RunSafeCastTests(); + tflite::RunSafeCastTests(); + tflite::RunSafeCastTests(); + tflite::RunSafeCastTests(); + tflite::RunSafeCastTests(); + tflite::RunSafeCastTests(); + tflite::RunSafeCastTests(); + tflite::RunSafeCastTests(); + tflite::RunSafeCastTests(); + tflite::RunSafeCastTests(); + tflite::RunSafeCastTests(); + tflite::RunSafeCastTests(); + tflite::RunSafeCastTests(); + tflite::RunSafeCastTests(); + tflite::RunSafeCastTests(); + tflite::RunSafeCastTests(); +} + +// Example taken from http://www.tensorflow.org/performance/quantization +// +// Quantized | Float +// --------- | ----- +// 0 | -10.0 +// 255 | 30.0 +// 128 | 10.0 +TF_LITE_MICRO_TEST(QuantizationUtilTest_ChooseQuantizationParams) { + tflite::QuantizationParams qp = + tflite::ChooseQuantizationParams(-10.0, 30.0); + TF_LITE_MICRO_EXPECT_NEAR(qp.scale, 0.156863, 1e-5); + TF_LITE_MICRO_EXPECT_EQ(qp.zero_point, 64); +} + +TF_LITE_MICRO_TEST( + QuantizationUtilTest_ChooseQuantizationParamsZeroPointOnMinBoundary) { + tflite::QuantizationParams qp = + tflite::ChooseQuantizationParams(0.0, 30.0); + TF_LITE_MICRO_EXPECT_NEAR(qp.scale, 0.117647, 1e-5); + TF_LITE_MICRO_EXPECT_EQ(qp.zero_point, 0); +} + +TF_LITE_MICRO_TEST( + QuantizationUtilTest_ChooseQuantizationParamsEmptyRangeZero) { + tflite::QuantizationParams qp = + tflite::ChooseQuantizationParams(0.0, 0.0); + TF_LITE_MICRO_EXPECT_NEAR(qp.scale, 0.0, 1e-5); + TF_LITE_MICRO_EXPECT_EQ(qp.zero_point, 0); +} + +TF_LITE_MICRO_TEST( + QuantizationUtilTest_ChooseQuantizationParamsZeroPointOnMaxBoundary) { + tflite::QuantizationParams qp = + tflite::ChooseQuantizationParams(-10.0, 0.0); + TF_LITE_MICRO_EXPECT_NEAR(qp.scale, 0.039216, 1e-5); + TF_LITE_MICRO_EXPECT_EQ(qp.zero_point, 255); +} + +TF_LITE_MICRO_TEST(QuantizationUtilTest_IntegerFrExp) { + int shift; + int64_t result = tflite::IntegerFrExp(0.0, &shift); + TF_LITE_MICRO_EXPECT_EQ(0, result); + TF_LITE_MICRO_EXPECT_EQ(0, shift); + + result = tflite::IntegerFrExp(1.0, &shift); + TF_LITE_MICRO_EXPECT_NEAR(0x40000000, result, 1ll); + TF_LITE_MICRO_EXPECT_EQ(1, shift); + + result = tflite::IntegerFrExp(0.25, &shift); + TF_LITE_MICRO_EXPECT_NEAR(0x40000000, result, 1ll); + TF_LITE_MICRO_EXPECT_EQ(-1, shift); + + result = tflite::IntegerFrExp(-1.0, &shift); + TF_LITE_MICRO_EXPECT_NEAR(-(1 << 30), result, 1ll); + TF_LITE_MICRO_EXPECT_EQ(1, shift); + + result = tflite::IntegerFrExp(123.45, &shift); + TF_LITE_MICRO_EXPECT_NEAR(2071147315, result, 1ll); + TF_LITE_MICRO_EXPECT_EQ(7, shift); + + result = tflite::IntegerFrExp(static_cast(NAN), &shift); + TF_LITE_MICRO_EXPECT_NEAR(0, result, 1); + TF_LITE_MICRO_EXPECT_EQ(0x7fffffff, shift); + + result = tflite::IntegerFrExp(static_cast(INFINITY), &shift); + TF_LITE_MICRO_EXPECT_NEAR(std::numeric_limits::max(), result, 1); + TF_LITE_MICRO_EXPECT_EQ(0x7fffffff, shift); + + result = tflite::IntegerFrExp(-static_cast(INFINITY), &shift); + TF_LITE_MICRO_EXPECT_NEAR(std::numeric_limits::min(), result, 1); + TF_LITE_MICRO_EXPECT_EQ(0x7fffffff, shift); +} + +TF_LITE_MICRO_TEST(QuantizationUtilTest_IntegerFrExpVersusDouble) { + int shift; + int32_t result = tflite::IntegerFrExp(0.0, &shift); + TF_LITE_MICRO_EXPECT_EQ(result, 0); + TF_LITE_MICRO_EXPECT_EQ(shift, 0); + + int double_shift; + double double_result = std::frexp(0.0, &double_shift); + TF_LITE_MICRO_EXPECT_EQ(double_result, 0); + TF_LITE_MICRO_EXPECT_EQ(double_shift, 0); + + result = tflite::IntegerFrExp(1.0, &shift); + TF_LITE_MICRO_EXPECT_NEAR(result, 0x40000000, 1); + TF_LITE_MICRO_EXPECT_EQ(shift, 1); + double_result = std::frexp(1.0, &double_shift); + TF_LITE_MICRO_EXPECT_NEAR(double_result, 0.5, 1e-5); + TF_LITE_MICRO_EXPECT_EQ(double_shift, 1); + + result = tflite::IntegerFrExp(0.25, &shift); + TF_LITE_MICRO_EXPECT_NEAR(result, 0x40000000, 1); + TF_LITE_MICRO_EXPECT_EQ(shift, -1); + double_result = std::frexp(0.25, &double_shift); + TF_LITE_MICRO_EXPECT_NEAR(double_result, 0.5, 1e-5); + TF_LITE_MICRO_EXPECT_EQ(double_shift, -1); + + result = tflite::IntegerFrExp(-1.0, &shift); + TF_LITE_MICRO_EXPECT_NEAR(result, -(1 << 30), 1); + TF_LITE_MICRO_EXPECT_EQ(shift, 1); + double_result = std::frexp(-1.0, &double_shift); + TF_LITE_MICRO_EXPECT_NEAR(double_result, -0.5, 1e-5); + TF_LITE_MICRO_EXPECT_EQ(double_shift, 1); + + result = tflite::IntegerFrExp(123.45, &shift); + TF_LITE_MICRO_EXPECT_NEAR(result, (0.964453 * (1LL << 31)), 1000); + TF_LITE_MICRO_EXPECT_EQ(shift, 7); + double_result = std::frexp(123.45, &double_shift); + TF_LITE_MICRO_EXPECT_NEAR(double_result, 0.964453, 1e-5); + TF_LITE_MICRO_EXPECT_EQ(double_shift, 7); +} + +TF_LITE_MICRO_TEST(QuantizationUtilTest_DoubleFromFractionAndShift) { + double result = tflite::DoubleFromFractionAndShift(0, 0); + TF_LITE_MICRO_EXPECT_EQ(0, result); + + result = tflite::DoubleFromFractionAndShift(0x40000000, 1); + TF_LITE_MICRO_EXPECT_NEAR(1.0, result, 1e-5); + + result = tflite::DoubleFromFractionAndShift(0x40000000, 2); + TF_LITE_MICRO_EXPECT_NEAR(2.0, result, 1e-5); + + int shift; + int64_t fraction = tflite::IntegerFrExp(3.0, &shift); + result = tflite::DoubleFromFractionAndShift(fraction, shift); + TF_LITE_MICRO_EXPECT_NEAR(3.0, result, 1e-5); + + fraction = tflite::IntegerFrExp(123.45, &shift); + result = tflite::DoubleFromFractionAndShift(fraction, shift); + TF_LITE_MICRO_EXPECT_NEAR(123.45, result, 1e-5); + + fraction = tflite::IntegerFrExp(-23.232323, &shift); + result = tflite::DoubleFromFractionAndShift(fraction, shift); + TF_LITE_MICRO_EXPECT_NEAR(-23.232323, result, 1e-5); + + fraction = tflite::IntegerFrExp(static_cast(NAN), &shift); + result = tflite::DoubleFromFractionAndShift(fraction, shift); + TF_LITE_MICRO_EXPECT_TRUE(std::isnan(result)); + + fraction = tflite::IntegerFrExp(static_cast(INFINITY), &shift); + result = tflite::DoubleFromFractionAndShift(fraction, shift); + TF_LITE_MICRO_EXPECT_FALSE(std::isfinite(result)); +} + +TF_LITE_MICRO_TEST(QuantizationUtilTest_IntegerDoubleMultiply) { + TF_LITE_MICRO_EXPECT_NEAR(1.0, tflite::IntegerDoubleMultiply(1.0, 1.0), 1e-5); + TF_LITE_MICRO_EXPECT_NEAR(2.0, tflite::IntegerDoubleMultiply(1.0, 2.0), 1e-5); + TF_LITE_MICRO_EXPECT_NEAR(2.0, tflite::IntegerDoubleMultiply(2.0, 1.0), 1e-5); + TF_LITE_MICRO_EXPECT_NEAR(4.0, tflite::IntegerDoubleMultiply(2.0, 2.0), 1e-5); + TF_LITE_MICRO_EXPECT_NEAR(0.5, tflite::IntegerDoubleMultiply(1.0, 0.5), 1e-5); + TF_LITE_MICRO_EXPECT_NEAR(0.25, tflite::IntegerDoubleMultiply(0.5, 0.5), + 1e-5); + TF_LITE_MICRO_EXPECT_NEAR(-1.0, tflite::IntegerDoubleMultiply(1.0, -1.0), + 1e-5); + TF_LITE_MICRO_EXPECT_NEAR(-1.0, tflite::IntegerDoubleMultiply(-1.0, 1.0), + 1e-5); + TF_LITE_MICRO_EXPECT_NEAR(1.0, tflite::IntegerDoubleMultiply(-1.0, -1.0), + 1e-5); + TF_LITE_MICRO_EXPECT_NEAR( + 15000000.0, tflite::IntegerDoubleMultiply(3000.0, 5000.0), 1e-5); + TF_LITE_MICRO_EXPECT_TRUE(std::isnan( + tflite::IntegerDoubleMultiply(static_cast(NAN), 5000.0))); + TF_LITE_MICRO_EXPECT_TRUE(std::isnan( + tflite::IntegerDoubleMultiply(3000.0, static_cast(NAN)))); +} + +TF_LITE_MICRO_TEST(QuantizationUtilTest_IntegerDoubleCompare) { + TF_LITE_MICRO_EXPECT_EQ(-1, tflite::IntegerDoubleCompare(0.0, 1.0)); + TF_LITE_MICRO_EXPECT_EQ(1, tflite::IntegerDoubleCompare(1.0, 0.0)); + TF_LITE_MICRO_EXPECT_EQ(0, tflite::IntegerDoubleCompare(1.0, 1.0)); + TF_LITE_MICRO_EXPECT_EQ(0, tflite::IntegerDoubleCompare(0.0, 0.0)); + TF_LITE_MICRO_EXPECT_EQ(-1, tflite::IntegerDoubleCompare(-10.0, 10.0)); + TF_LITE_MICRO_EXPECT_EQ(1, tflite::IntegerDoubleCompare(123.45, 10.0)); + TF_LITE_MICRO_EXPECT_EQ( + 1, tflite::IntegerDoubleCompare(static_cast(NAN), + static_cast(INFINITY))); + TF_LITE_MICRO_EXPECT_EQ( + 1, tflite::IntegerDoubleCompare(static_cast(INFINITY), + static_cast(NAN))); +} + +TF_LITE_MICRO_TEST(QuantizationUtilTest_PreprocessSoftmaxScaling) { + auto quantize = [](double beta, double scale, int integer_bits) { + int32_t q; + int s; + tflite::PreprocessSoftmaxScaling(beta, scale, integer_bits, &q, &s); + return std::pair{q, s}; + }; + + // If beta * scale is greater than fits in the number of integer bits, the + // result is move near the maximum. Otherwise they quantize as expected. + // With 4 integer bits we can represent up to 16.0. + + auto r = quantize(1.0, 16.0, 4); + TF_LITE_MICRO_EXPECT_EQ(r.first, 2147483647); + TF_LITE_MICRO_EXPECT_EQ(r.second, 31); + + r = quantize(1.0, 8.0, 4); + TF_LITE_MICRO_EXPECT_EQ(r.first, 1073741824); + TF_LITE_MICRO_EXPECT_EQ(r.second, 31); + + // But with 5 bits we can go further. + r = quantize(2.0, 16.0, 5); + TF_LITE_MICRO_EXPECT_EQ(r.first, 2147483647); + TF_LITE_MICRO_EXPECT_EQ(r.second, 31); + + r = quantize(2.0, 8.0, 5); + TF_LITE_MICRO_EXPECT_EQ(r.first, 1073741824); + TF_LITE_MICRO_EXPECT_EQ(r.second, 31); +} + +TF_LITE_MICRO_TEST(QuantizationUtilTest_CalculateInputRadius) { + TF_LITE_MICRO_EXPECT_EQ(tflite::CalculateInputRadius(4, 27), 15); + TF_LITE_MICRO_EXPECT_EQ(tflite::CalculateInputRadius(3, 27), 14); + TF_LITE_MICRO_EXPECT_EQ(tflite::CalculateInputRadius(3, 28), 7); + TF_LITE_MICRO_EXPECT_EQ(tflite::CalculateInputRadius(4, 2), 503316480); +} + +TF_LITE_MICRO_TEST(QuantizationUtilTest_QuantizeMultiplierArray) { + const double weights[] = {-4, -2, -1, -0.5, -0.25, -0.125, 0, + 0.125, 0.25, 0.5, 1, 2, 4}; + + const int size = 13; + int32_t effective_scale_significand[size]; + int effective_scale_shift[size]; + tflite::QuantizeMultiplierArray(weights, size, effective_scale_significand, + effective_scale_shift); + const int32_t expected_effective_scale_significand[] = { + -1073741824, // float scale = -4 + -1073741824, // float scale = -2 + -1073741824, // float scale = -1 + -1073741824, // float scale = -0.5 + -1073741824, // float scale = -0.25 + -1073741824, // float scale = -0.125 + 0, // float scale = 0 + 1073741824, // float scale = 0.125 + 1073741824, // float scale = 0.25 + 1073741824, // float scale = 0.5 + 1073741824, // float scale = 1 + 1073741824, // float scale = 2 + 1073741824, // float scale = 4 + }; + + const int expected_effective_scale_shift[] = { + 3, // float scale = -4 + 2, // float scale = -2 + 1, // float scale = -1 + 0, // float scale = -0.5 + -1, // float scale = -0.25 + -2, // float scale = -0.125 + 0, // float scale = 0 + -2, // float scale = 0.125 + -1, // float scale = 0.25 + 0, // float scale = 0.5 + 1, // float scale = 1 + 2, // float scale = 2 + 3, // float scale = 4 + }; + + for (int i = 0; i < size; i++) { + TF_LITE_MICRO_EXPECT_EQ(effective_scale_significand[i], + expected_effective_scale_significand[i]); + TF_LITE_MICRO_EXPECT_EQ(effective_scale_shift[i], + expected_effective_scale_shift[i]); + } +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/quantize.cc b/tensorflow/lite/micro/kernels/quantize.cc new file mode 100644 index 0000000..1ac6942 --- /dev/null +++ b/tensorflow/lite/micro/kernels/quantize.cc @@ -0,0 +1,41 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/kernels/quantize.h" + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_utils.h" + +namespace tflite { +namespace { + +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, + sizeof(OpDataQuantizeReference)); +} + +} // namespace + +TFLMRegistration Register_QUANTIZE() { + return tflite::micro::RegisterOp(Init, PrepareQuantizeReference, + EvalQuantizeReference); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/quantize.h b/tensorflow/lite/micro/kernels/quantize.h new file mode 100644 index 0000000..ba93809 --- /dev/null +++ b/tensorflow/lite/micro/kernels/quantize.h @@ -0,0 +1,37 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_QUANTIZE_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_QUANTIZE_H_ + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { + +struct OpDataQuantizeReference { + tflite::QuantizationParams quantization_params; + // The scaling factor from input to output (aka the 'real multiplier') can + // be represented as a fixed point multiplier plus a left shift. + int32_t requantize_output_multiplier; + int requantize_output_shift; + + int32_t input_zero_point; +}; + +TfLiteStatus EvalQuantizeReference(TfLiteContext* context, TfLiteNode* node); +TfLiteStatus PrepareQuantizeReference(TfLiteContext* context, TfLiteNode* node); +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_QUANTIZE_H_ diff --git a/tensorflow/lite/micro/kernels/quantize_common.cc b/tensorflow/lite/micro/kernels/quantize_common.cc new file mode 100644 index 0000000..cb04eaf --- /dev/null +++ b/tensorflow/lite/micro/kernels/quantize_common.cc @@ -0,0 +1,239 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/quantize.h" +#include "tensorflow/lite/kernels/internal/reference/requantize.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/quantize.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_utils.h" + +namespace tflite { + +TfLiteStatus PrepareQuantizeReference(TfLiteContext* context, + TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + auto* data = static_cast(node->user_data); + + TF_LITE_ENSURE_EQ(context, NumInputs(node), 1); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* input = micro_context->AllocateTempInputTensor(node, 0); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* output = micro_context->AllocateTempOutputTensor(node, 0); + TF_LITE_ENSURE(context, output != nullptr); + + // TODO(b/128934713): Add support for fixed-point per-channel quantization. + // Currently this only support affine per-layer quantization. + TF_LITE_ENSURE_EQ(context, output->quantization.type, + kTfLiteAffineQuantization); + const auto* affine_quantization = + reinterpret_cast(output->quantization.params); + TF_LITE_ENSURE(context, affine_quantization); + TF_LITE_ENSURE(context, affine_quantization->scale); + TF_LITE_ENSURE(context, affine_quantization->scale->size == 1); + + TF_LITE_ENSURE( + context, input->type == kTfLiteFloat32 || input->type == kTfLiteInt32 || + input->type == kTfLiteInt16 || input->type == kTfLiteInt8 || + input->type == kTfLiteUInt8); + TF_LITE_ENSURE(context, output->type == kTfLiteInt8 || + output->type == kTfLiteInt16 || + output->type == kTfLiteInt32 || + output->type == kTfLiteUInt8); + + if ((input->type == kTfLiteInt16 && output->type == kTfLiteInt8) || + (input->type == kTfLiteInt8 && output->type == kTfLiteInt8) || + (input->type == kTfLiteInt8 && output->type == kTfLiteUInt8) || + (input->type == kTfLiteUInt8 && output->type == kTfLiteInt8) || + (input->type == kTfLiteInt8 && output->type == kTfLiteInt16) || + (input->type == kTfLiteInt8 && output->type == kTfLiteInt32) || + (input->type == kTfLiteInt16 && output->type == kTfLiteInt16) || + (input->type == kTfLiteInt16 && output->type == kTfLiteInt32) || + (input->type == kTfLiteInt32 && output->type == kTfLiteInt8) || + (input->type == kTfLiteInt32 && output->type == kTfLiteInt16)) { + double effective_scale = static_cast(input->params.scale) / + static_cast(output->params.scale); + + QuantizeMultiplier(effective_scale, &data->requantize_output_multiplier, + &data->requantize_output_shift); + } + + data->quantization_params.zero_point = output->params.zero_point; + data->quantization_params.scale = static_cast(output->params.scale); + + data->input_zero_point = input->params.zero_point; + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(output); + return kTfLiteOk; +} + +TfLiteStatus EvalQuantizeReference(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + auto* data = static_cast(node->user_data); + + const TfLiteEvalTensor* input = tflite::micro::GetEvalInput(context, node, 0); + TfLiteEvalTensor* output = tflite::micro::GetEvalOutput(context, node, 0); + + if (input->type == kTfLiteFloat32) { + switch (output->type) { + case kTfLiteInt8: + reference_ops::AffineQuantize( + data->quantization_params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + case kTfLiteInt16: + reference_ops::AffineQuantize( + data->quantization_params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + return kTfLiteOk; + default: + MicroPrintf("Input %s, output %s not supported.", + TfLiteTypeGetName(input->type), + TfLiteTypeGetName(output->type)); + return kTfLiteError; + } + } else if (input->type == kTfLiteInt32) { + size_t size = ElementCount(*input->dims); + switch (output->type) { + case kTfLiteInt8: + reference_ops::Requantize( + tflite::micro::GetTensorData(input), size, + data->requantize_output_multiplier, data->requantize_output_shift, + data->input_zero_point, data->quantization_params.zero_point, + tflite::micro::GetTensorData(output)); + break; + case kTfLiteInt16: + reference_ops::Requantize( + tflite::micro::GetTensorData(input), size, + data->requantize_output_multiplier, data->requantize_output_shift, + data->input_zero_point, data->quantization_params.zero_point, + tflite::micro::GetTensorData(output)); + break; + default: + MicroPrintf("Input %s, output %s not supported.", + TfLiteTypeGetName(input->type), + TfLiteTypeGetName(output->type)); + return kTfLiteError; + } + } else if (input->type == kTfLiteInt16) { + size_t size = ElementCount(*input->dims); + switch (output->type) { + case kTfLiteInt8: + reference_ops::Requantize( + tflite::micro::GetTensorData(input), size, + data->requantize_output_multiplier, data->requantize_output_shift, + data->input_zero_point, data->quantization_params.zero_point, + tflite::micro::GetTensorData(output)); + break; + case kTfLiteInt16: + reference_ops::Requantize( + tflite::micro::GetTensorData(input), size, + data->requantize_output_multiplier, data->requantize_output_shift, + data->input_zero_point, data->quantization_params.zero_point, + tflite::micro::GetTensorData(output)); + return kTfLiteOk; + case kTfLiteInt32: + reference_ops::Requantize( + tflite::micro::GetTensorData(input), size, + data->requantize_output_multiplier, data->requantize_output_shift, + data->input_zero_point, data->quantization_params.zero_point, + tflite::micro::GetTensorData(output)); + return kTfLiteOk; + default: + MicroPrintf("Input %s, output %s not supported.", + TfLiteTypeGetName(input->type), + TfLiteTypeGetName(output->type)); + return kTfLiteError; + } + } else if (input->type == kTfLiteInt8) { + // Int8 to Int8 requantization, required if the input and output tensors + // have different scales and/or zero points. + size_t size = ElementCount(*input->dims); + switch (output->type) { + case kTfLiteInt8: + reference_ops::Requantize( + tflite::micro::GetTensorData(input), size, + data->requantize_output_multiplier, data->requantize_output_shift, + data->input_zero_point, data->quantization_params.zero_point, + tflite::micro::GetTensorData(output)); + break; + case kTfLiteUInt8: + reference_ops::Requantize( + tflite::micro::GetTensorData(input), size, + data->requantize_output_multiplier, data->requantize_output_shift, + data->input_zero_point, data->quantization_params.zero_point, + tflite::micro::GetTensorData(output)); + break; + case kTfLiteInt16: + reference_ops::Requantize( + tflite::micro::GetTensorData(input), size, + data->requantize_output_multiplier, data->requantize_output_shift, + data->input_zero_point, data->quantization_params.zero_point, + tflite::micro::GetTensorData(output)); + break; + case kTfLiteInt32: + reference_ops::Requantize( + tflite::micro::GetTensorData(input), size, + data->requantize_output_multiplier, data->requantize_output_shift, + data->input_zero_point, data->quantization_params.zero_point, + tflite::micro::GetTensorData(output)); + break; + default: + MicroPrintf("Input %s, output %s not supported.", + TfLiteTypeGetName(input->type), + TfLiteTypeGetName(output->type)); + return kTfLiteError; + } + } else if (input->type == kTfLiteUInt8) { + size_t size = ElementCount(*input->dims); + switch (output->type) { + case kTfLiteInt8: + reference_ops::Requantize( + tflite::micro::GetTensorData(input), size, + data->requantize_output_multiplier, data->requantize_output_shift, + data->input_zero_point, data->quantization_params.zero_point, + tflite::micro::GetTensorData(output)); + break; + default: + MicroPrintf("Input %s, output %s not supported.", + TfLiteTypeGetName(input->type), + TfLiteTypeGetName(output->type)); + return kTfLiteError; + } + } else { + MicroPrintf("Input %s, output %s not supported.", + TfLiteTypeGetName(input->type), + TfLiteTypeGetName(output->type)); + return kTfLiteError; + } + + return kTfLiteOk; +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/quantize_test.cc b/tensorflow/lite/micro/kernels/quantize_test.cc new file mode 100644 index 0000000..9867108 --- /dev/null +++ b/tensorflow/lite/micro/kernels/quantize_test.cc @@ -0,0 +1,439 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +template +void ValidateQuantizeGoldens(TfLiteTensor* tensors, int tensors_size, + const float* golden, T* golden_quantized, + float scale, int zero_point, int output_len, + T* output_data) { + int inputs_array_data[] = {1, 0}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 1}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + // Version 1 of quantize supports int8_t and uint8_t quantization. + const TFLMRegistration registration = Register_QUANTIZE(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, + /*builtin_data=*/nullptr); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + // Use reference quantization from test utils to compare against op output. + Quantize(golden, golden_quantized, output_len, scale, zero_point); + for (int i = 0; i < output_len; ++i) { + TF_LITE_MICRO_EXPECT_EQ(golden_quantized[i], output_data[i]); + } +} + +template +void TestQuantizeFloat(int* input_dims_data, const float* input_data, + int* output_dims_data, const float* golden, + T* golden_quantized, const float scale, + const int zero_point, T* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_dims_count = ElementCount(*output_dims); + + TfLiteTensor output_tensor = + CreateQuantizedTensor(output_data, output_dims, scale, zero_point); + + TfLiteAffineQuantization quant; + float scales[] = {1, scale}; + int zero_points[] = {1, zero_point}; + quant.scale = FloatArrayFromFloats(scales); + quant.zero_point = IntArrayFromInts(zero_points); + output_tensor.quantization = {kTfLiteAffineQuantization, &quant}; + + // 1 input, 1 output. + constexpr int tensors_size = 2; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(input_data, input_dims), + output_tensor, + }; + + ValidateQuantizeGoldens(tensors, tensors_size, golden, golden_quantized, + scale, zero_point, output_dims_count, output_data); +} + +template +void TestRequantize(int* input_dims_data, const float* input_data, + InputType* input_quantized, const float input_scale, + const int input_zero_point, int* output_dims_data, + const float* golden, OutputType* golden_quantized, + const float output_scale, const int output_zero_point, + OutputType* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_dims_count = ElementCount(*output_dims); + + TfLiteTensor output_tensor = CreateQuantizedTensor( + output_data, output_dims, output_scale, output_zero_point); + + TfLiteAffineQuantization quant; + float scales[] = {1, output_scale}; + int zero_points[] = {1, output_zero_point}; + quant.scale = FloatArrayFromFloats(scales); + quant.zero_point = IntArrayFromInts(zero_points); + output_tensor.quantization = {kTfLiteAffineQuantization, &quant}; + + // 1 input, 1 output. + constexpr int tensors_size = 2; + TfLiteTensor tensors[tensors_size] = { + CreateQuantizedTensor(input_data, input_quantized, input_dims, + input_scale, input_zero_point), + output_tensor, + }; + + ValidateQuantizeGoldens(tensors, tensors_size, golden, golden_quantized, + output_scale, output_zero_point, output_dims_count, + output_data); +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN +TF_LITE_MICRO_TEST(QuantizeOpTestInt16) { + const int kLength = 10; + int dims[] = {2, 2, 5}; + const float values[] = {-63.5, -63, -62.5, -62, -61.5, + 62, 62.5, 63, 63.5, 64}; + const float scale = 0.5; + const int zero_point = -1; + int16_t output[kLength]; + int16_t values_quantized[kLength]; + tflite::testing::TestQuantizeFloat( + dims, values, dims, values, values_quantized, scale, zero_point, output); +} + +TF_LITE_MICRO_TEST(QuantizeOpTestInt16NoScale) { + const int kLength = 10; + int dims[] = {2, 2, 5}; + const float values[] = {-128, -127, -126, -125, -124, + 123, 124, 125, 126, 127}; + const float scale = 1.0; + const int zero_point = 0; + int16_t output[kLength]; + int16_t values_quantized[kLength]; + tflite::testing::TestQuantizeFloat( + dims, values, dims, values, values_quantized, scale, zero_point, output); +} + +TF_LITE_MICRO_TEST(QuantizeOpTestInt16toInt16) { + const int kLength = 10; + int dims[] = {2, 2, 5}; + const float values[] = {-64, -62, -60, -58, -56, 54, 56, 58, 60, 62}; + const float input_scale = 2.f; + const int input_zero_point = 0; + const float output_scale = 0.5; + const int output_zero_point = 32; + int16_t output_quantized[kLength]; + int16_t values_quantized[kLength]; + int16_t input_quantized[kLength]; + tflite::testing::TestRequantize(dims, values, input_quantized, input_scale, + input_zero_point, dims, values, + values_quantized, output_scale, + output_zero_point, output_quantized); +} + +TF_LITE_MICRO_TEST(QuantizeOpTestInt16toInt16NoZeroPoint) { + const int kLength = 10; + int dims[] = {2, 2, 5}; + const float values[] = {-32, -31, -30, -29, -28, 27, 28, 29, 30, 31}; + const float input_scale = 1.f; + const int input_zero_point = 0; + const float output_scale = 0.5; + const int output_zero_point = 0; + int16_t output_quantized[kLength]; + int16_t values_quantized[kLength]; + int16_t input_quantized[kLength]; + tflite::testing::TestRequantize(dims, values, input_quantized, input_scale, + input_zero_point, dims, values, + values_quantized, output_scale, + output_zero_point, output_quantized); +} + +TF_LITE_MICRO_TEST(QuantizeOpTestInt8toInt8) { + const int kLength = 10; + int dims[] = {2, 2, 5}; + const float values[] = {-64, -62, -60, -58, -56, 54, 56, 58, 60, 62}; + const float input_scale = 2.f; + const int input_zero_point = 0; + const float output_scale = 0.5; + const int output_zero_point = 32; + int8_t output_quantized[kLength]; + int8_t values_quantized[kLength]; + int8_t input_quantized[kLength]; + tflite::testing::TestRequantize(dims, values, input_quantized, input_scale, + input_zero_point, dims, values, + values_quantized, output_scale, + output_zero_point, output_quantized); +} + +TF_LITE_MICRO_TEST(QuantizeOpTestInt8toInt8NoZeroPoint) { + const int kLength = 10; + int dims[] = {2, 2, 5}; + const float values[] = {-32, -31, -30, -29, -28, 27, 28, 29, 30, 31}; + const float input_scale = 1.f; + const int input_zero_point = 0; + const float output_scale = 0.5; + const int output_zero_point = 0; + int8_t output_quantized[kLength]; + int8_t values_quantized[kLength]; + int8_t input_quantized[kLength]; + tflite::testing::TestRequantize(dims, values, input_quantized, input_scale, + input_zero_point, dims, values, + values_quantized, output_scale, + output_zero_point, output_quantized); +} + +TF_LITE_MICRO_TEST(QuantizeOpTestInt8toInt16) { + const int kLength = 10; + int dims[] = {2, 2, 5}; + const float values[] = {-64, -62, -60, -58, -56, 54, 56, 58, 60, 62}; + const float input_scale = 2.f; + const int input_zero_point = 0; + const float output_scale = 0.5; + const int output_zero_point = 32; + int16_t output_quantized[kLength]; + int16_t values_quantized[kLength]; + int8_t input_quantized[kLength]; + tflite::testing::TestRequantize(dims, values, input_quantized, input_scale, + input_zero_point, dims, values, + values_quantized, output_scale, + output_zero_point, output_quantized); +} + +TF_LITE_MICRO_TEST(QuantizeOpTestInt32toInt16) { + constexpr int kLength = 10; + int dims[] = {2, 2, 5}; + const float values[] = {-32, -31, -30, -29, -28, 27, 28, 29, 30, 31}; + // TODO(b/155682734): Input scale must be smaller than output scale for + // xtensa. + const float input_scale = 0.4f; + const int input_zero_point = 0; + const float output_scale = 1.0f; + const int output_zero_point = 0; + int16_t output_quantized[kLength]; + int16_t values_quantized[kLength]; + int32_t input_quantized[kLength]; + tflite::testing::TestRequantize(dims, values, input_quantized, input_scale, + input_zero_point, dims, values, + values_quantized, output_scale, + output_zero_point, output_quantized); +} + +TF_LITE_MICRO_TEST(QuantizeOpTestInt32toInt8) { + constexpr int kLength = 10; + int dims[] = {2, 2, 5}; + const float values[] = {-32, -31, -30, -29, -28, 27, 28, 29, 30, 31}; + // TODO(b/155682734): Input scale must be smaller than output scale for + // xtensa. + const float input_scale = 0.4f; + const int input_zero_point = 0; + const float output_scale = 1.0f; + const int output_zero_point = 0; + int8_t output_quantized[kLength]; + int8_t values_quantized[kLength]; + int32_t input_quantized[kLength]; + tflite::testing::TestRequantize(dims, values, input_quantized, input_scale, + input_zero_point, dims, values, + values_quantized, output_scale, + output_zero_point, output_quantized); +} + +// TODO(b/155682734): Hifimini optimized quantize requires input scale to be +// smaller than output scale. +TF_LITE_MICRO_TEST(QuantizeOpTestInt16toInt8) { + const int kLength = 10; + int dims[] = {2, 2, 5}; + const float values[] = {-64, -62, -60, -58, -56, 54, 56, 58, 60, 62}; + const float input_scale = 2.f; + const int input_zero_point = 0; + const float output_scale = 0.5; + const int output_zero_point = 0; + int8_t output_quantized[kLength]; + int8_t values_quantized[kLength]; + int16_t input_quantized[kLength]; + tflite::testing::TestRequantize(dims, values, input_quantized, input_scale, + input_zero_point, dims, values, + values_quantized, output_scale, + output_zero_point, output_quantized); +} + +// Test the fast algorithm from int8 to uint8 when zero point diff = -128 +TF_LITE_MICRO_TEST(QuantizeOpTestInt8toUInt8Fast) { + constexpr int kLength = 10; + int dims[] = {2, 2, 5}; + const float values[] = {-32, -31, -30, -29, -28, 27, 28, 29, 30, 31}; + const float input_scale = 1.0f; + const int input_zero_point = 0; + const float output_scale = 0.5f; + const int output_zero_point = 128; + uint8_t output_quantized[kLength]; + uint8_t values_quantized[kLength]; + int8_t input_quantized[kLength]; + tflite::testing::TestRequantize(dims, values, input_quantized, input_scale, + input_zero_point, dims, values, + values_quantized, output_scale, + output_zero_point, output_quantized); +} + +// Test the normal requant algorithm from int8 to uint8 +TF_LITE_MICRO_TEST(QuantizeOpTestInt8toUInt8Normal) { + constexpr int kLength = 10; + int dims[] = {2, 2, 5}; + const float values[] = {-32, -31, -30, -29, -28, 27, 28, 29, 30, 31}; + const float input_scale = 1.0f; + const int input_zero_point = 50; + const float output_scale = 1.0f; + const int output_zero_point = 110; + uint8_t output_quantized[kLength]; + uint8_t values_quantized[kLength]; + int8_t input_quantized[kLength]; + tflite::testing::TestRequantize(dims, values, input_quantized, input_scale, + input_zero_point, dims, values, + values_quantized, output_scale, + output_zero_point, output_quantized); +} + +// Test the fast algorithm from uint8 to int8 when zero point diff = 128 +TF_LITE_MICRO_TEST(QuantizeOpTestUInt8toInt8Fast) { + constexpr int kLength = 10; + int dims[] = {2, 2, 5}; + const float values[] = {-32, -31, -30, -29, -28, 27, 28, 29, 30, 31}; + const float input_scale = 0.4f; + const int input_zero_point = 0; + const float output_scale = 1.0f; + const int output_zero_point = -128; + int8_t output_quantized[kLength]; + int8_t values_quantized[kLength]; + uint8_t input_quantized[kLength]; + tflite::testing::TestRequantize(dims, values, input_quantized, input_scale, + input_zero_point, dims, values, + values_quantized, output_scale, + output_zero_point, output_quantized); +} + +// Test the normal requant algorithm from uint8 to int8 +TF_LITE_MICRO_TEST(QuantizeOpTestUInt8toInt8Normal) { + constexpr int kLength = 10; + int dims[] = {2, 2, 5}; + const float values[] = {-32, -31, -30, -29, -28, 27, 28, 29, 30, 31}; + const float input_scale = 1.0f; + const int input_zero_point = 50; + const float output_scale = 0.3f; + const int output_zero_point = 0; + int8_t output_quantized[kLength]; + int8_t values_quantized[kLength]; + uint8_t input_quantized[kLength]; + tflite::testing::TestRequantize(dims, values, input_quantized, input_scale, + input_zero_point, dims, values, + values_quantized, output_scale, + output_zero_point, output_quantized); +} + +TF_LITE_MICRO_TEST(QuantizeOpTestInt8toInt32) { + const int kLength = 10; + int dims[] = {2, 2, 5}; + const float values[] = {-32, -31, -30, -29, -28, 27, 28, 29, 30, 31}; + const float input_scale = 1.f; + const int input_zero_point = 0; + const float output_scale = 0.5; + const int output_zero_point = 0; + int32_t output_quantized[kLength]; + int32_t values_quantized[kLength]; + int8_t input_quantized[kLength]; + tflite::testing::TestRequantize(dims, values, input_quantized, input_scale, + input_zero_point, dims, values, + values_quantized, output_scale, + output_zero_point, output_quantized); +} + +TF_LITE_MICRO_TEST(QuantizeOpTestInt16toInt32) { + const int kLength = 10; + int dims[] = {2, 2, 5}; + const float values[] = {-32, -31, -30, -29, -28, 27, 28, 29, 30, 31}; + const float input_scale = 1.f; + const int input_zero_point = 0; + const float output_scale = 0.5; + const int output_zero_point = 0; + int32_t output_quantized[kLength]; + int32_t values_quantized[kLength]; + int16_t input_quantized[kLength]; + tflite::testing::TestRequantize(dims, values, input_quantized, input_scale, + input_zero_point, dims, values, + values_quantized, output_scale, + output_zero_point, output_quantized); +} + +TF_LITE_MICRO_TEST(QuantizeOpTestInt16toInt8) { + constexpr int kLength = 10; + int dims[] = {2, 2, 5}; + const float values[] = {-32, -31, -30, -29, -28, 27, 28, 29, 30, 31}; + // TODO(b/155682734): Input scale must be smaller than output scale for + // xtensa. + const float input_scale = 0.4f; + const int input_zero_point = 0; + const float output_scale = 1.0f; + const int output_zero_point = 0; + int8_t output_quantized[kLength]; + int8_t values_quantized[kLength]; + int16_t input_quantized[kLength]; + tflite::testing::TestRequantize(dims, values, input_quantized, input_scale, + input_zero_point, dims, values, + values_quantized, output_scale, + output_zero_point, output_quantized); +} + +TF_LITE_MICRO_TEST(QuantizeOpTestInt8) { + const int kLength = 10; + int dims[] = {2, 2, 5}; + const float values[] = {-63.5, -63, -62.5, -62, -61.5, + 62, 62.5, 63, 63.5, 64}; + const float scale = 0.5; + const int zero_point = -1; + int16_t output[kLength]; + int16_t values_quantized[kLength]; + tflite::testing::TestQuantizeFloat( + dims, values, dims, values, values_quantized, scale, zero_point, output); +} + +TF_LITE_MICRO_TEST(QuantizeOpTestInt8NoZeroPoint) { + const int kLength = 10; + int dims[] = {2, 2, 5}; + const float values[] = {-32, -31, -30, -29, -28, 27, 28, 29, 30, 31}; + const float scale = 0.5; + const int zero_point = 0; + int8_t output[kLength]; + int8_t values_quantized[kLength]; + tflite::testing::TestQuantizeFloat( + dims, values, dims, values, values_quantized, scale, zero_point, output); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/read_variable.cc b/tensorflow/lite/micro/kernels/read_variable.cc new file mode 100644 index 0000000..87e720d --- /dev/null +++ b/tensorflow/lite/micro/kernels/read_variable.cc @@ -0,0 +1,87 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/memory_helpers.h" +#include "tensorflow/lite/micro/micro_graph.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_resource_variable.h" +#include "tensorflow/lite/schema/schema_generated.h" + +namespace tflite { + +namespace { + +constexpr int kInputVariableId = 0; +constexpr int kOutputValue = 0; + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(NumInputs(node) == 1); + TFLITE_DCHECK(NumOutputs(node) == 1); + + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* input_resource_id_tensor = + micro_context->AllocateTempInputTensor(node, kInputVariableId); + + TFLITE_DCHECK(input_resource_id_tensor != nullptr); + TFLITE_DCHECK(input_resource_id_tensor->type == kTfLiteResource); + TFLITE_DCHECK(NumElements(input_resource_id_tensor) == 1); + + micro_context->DeallocateTempTfLiteTensor(input_resource_id_tensor); + + return kTfLiteOk; +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input_resource_id_tensor = + tflite::micro::GetEvalInput(context, node, kInputVariableId); + TFLITE_DCHECK(input_resource_id_tensor != nullptr); + + TfLiteEvalTensor* output_value = + tflite::micro::GetEvalOutput(context, node, kOutputValue); + TFLITE_DCHECK(output_value != nullptr); + + tflite::MicroContext* micro_context = tflite::GetMicroContext(context); + MicroGraph& graph_info = micro_context->graph(); + + MicroResourceVariables* resources = graph_info.GetResourceVariables(); + if (resources == nullptr) { + MicroPrintf( + "READ_VARIABLE requires resource variables. Please create " + "ResourceVariables and pass it to the interpreter."); + return kTfLiteError; + } + TF_LITE_ENSURE_OK( + context, + resources->Read(input_resource_id_tensor->data.i32[0], output_value)); + return kTfLiteOk; +} + +} // namespace. + +TFLMRegistration Register_READ_VARIABLE() { + return tflite::micro::RegisterOp(nullptr, Prepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/reduce.cc b/tensorflow/lite/micro/kernels/reduce.cc new file mode 100644 index 0000000..ab24a82 --- /dev/null +++ b/tensorflow/lite/micro/kernels/reduce.cc @@ -0,0 +1,72 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/reference/reduce.h" + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/mean.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/internal/types.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/reduce.h" +#include "tensorflow/lite/micro/micro_utils.h" + +namespace tflite { + +void* InitReduce(TfLiteContext* context, const char* buffer, size_t length) { + return context->AllocatePersistentBuffer(context, sizeof(OpDataReduce)); +} + +TfLiteStatus PrepareMax(TfLiteContext* context, TfLiteNode* node) { + return PrepareMaxHelper(context, node, + static_cast(node->user_data)); +} + +TfLiteStatus PrepareMeanOrSum(TfLiteContext* context, TfLiteNode* node) { + return PrepareMeanOrSumHelper(context, node, + static_cast(node->user_data)); +} + +TfLiteStatus EvalMean(TfLiteContext* context, TfLiteNode* node) { + return EvalMeanHelper(context, node, + static_cast(node->user_data)); +} + +TfLiteStatus EvalMax(TfLiteContext* context, TfLiteNode* node) { + OpDataReduce* op_data = static_cast(node->user_data); + return EvalMaxHelper(context, node, op_data); +} + +TfLiteStatus EvalSum(TfLiteContext* context, TfLiteNode* node) { + return EvalSumHelper(context, node, + static_cast(node->user_data)); +} + +TFLMRegistration Register_MEAN() { + return tflite::micro::RegisterOp(InitReduce, PrepareMeanOrSum, EvalMean); +} + +TFLMRegistration Register_REDUCE_MAX() { + return tflite::micro::RegisterOp(InitReduce, PrepareMax, EvalMax); +} + +TFLMRegistration Register_SUM() { + return tflite::micro::RegisterOp(InitReduce, PrepareMeanOrSum, EvalSum); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/reduce.h b/tensorflow/lite/micro/kernels/reduce.h new file mode 100644 index 0000000..2daeef5 --- /dev/null +++ b/tensorflow/lite/micro/kernels/reduce.h @@ -0,0 +1,65 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_REDUCE_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_REDUCE_H_ + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/kernels/internal/types.h" +#include "tensorflow/lite/micro/micro_common.h" + +namespace tflite { + +extern const int kMaxNumberOfAxis; +extern const int kMaxNumberOfReducedAxis; + +struct OpDataReduce { + int32_t multiplier; + int shift; + int temp_buffer_idx; + int resolved_axis_idx; + int input_zp; + float input_scale; + int output_zp; + float output_scale; + int num_output_elements; + int num_axis; +}; + +TfLiteStatus PrepareMaxHelper(TfLiteContext* context, TfLiteNode* node, + OpDataReduce* op_data); + +TfLiteStatus PrepareMeanOrSumHelper(TfLiteContext* context, TfLiteNode* node, + OpDataReduce* op_data); + +TfLiteStatus EvalMaxHelper(TfLiteContext* context, TfLiteNode* node, + OpDataReduce* op_data); +TfLiteStatus EvalMeanHelper(TfLiteContext* context, TfLiteNode* node, + OpDataReduce* op_data); +TfLiteStatus EvalSumHelper(TfLiteContext* context, TfLiteNode* node, + OpDataReduce* op_data); + +void ReduceResolveAxis(const int* axis_data, int axis_count, + MeanParams* op_params); + +TFLMRegistration Register_MEAN(); +TFLMRegistration Register_REDUCE_MAX(); +TFLMRegistration Register_SUM(); + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_REDUCE_H_ diff --git a/tensorflow/lite/micro/kernels/reduce_common.cc b/tensorflow/lite/micro/kernels/reduce_common.cc new file mode 100644 index 0000000..0dab49c --- /dev/null +++ b/tensorflow/lite/micro/kernels/reduce_common.cc @@ -0,0 +1,338 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/mean.h" +#include "tensorflow/lite/kernels/internal/reference/reduce.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/internal/types.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/reduce.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_utils.h" + +namespace tflite { + +const int kMaxNumberOfAxis = 5; +const int kMaxNumberOfReducedAxis = 2; + +TfLiteStatus PrepareSimple(TfLiteContext* context, TfLiteNode* node, + int32_t* multiplier, int* shift) { + MicroContext* micro_context = GetMicroContext(context); + + // Inputs Tensor (dtype depends on quantization): + // [0] = Input + // [1] = Axis + TfLiteTensor* input = micro_context->AllocateTempInputTensor(node, 0); + + // Outputs Tensor (dtype depends on quantization): + // [0] = Output + + // Validate number of inputs and outputs + TF_LITE_ENSURE_EQ(context, node->inputs->size, 2); + TF_LITE_ENSURE_EQ(context, node->outputs->size, 1); + + // Validate axis type + TfLiteTensor* axis = micro_context->AllocateTempInputTensor(node, 1); + TF_LITE_ENSURE(context, axis != nullptr); + TF_LITE_ENSURE_TYPES_EQ(context, axis->type, kTfLiteInt32); + + if (input->type == kTfLiteInt8) { + TfLiteTensor* output = micro_context->AllocateTempOutputTensor(node, 0); + const double real_multiplier = static_cast(input->params.scale) / + static_cast(output->params.scale); + QuantizeMultiplier(real_multiplier, multiplier, shift); + micro_context->DeallocateTempTfLiteTensor(output); + } + micro_context->DeallocateTempTfLiteTensor(axis); + micro_context->DeallocateTempTfLiteTensor(input); + return kTfLiteOk; +} + +TfLiteStatus PrepareMaxHelper(TfLiteContext* context, TfLiteNode* node, + OpDataReduce* op_data) { + TF_LITE_ENSURE_OK(context, PrepareSimple(context, node, &op_data->multiplier, + &op_data->shift)); + + MicroContext* micro_context = GetMicroContext(context); + TfLiteTensor* input = micro_context->AllocateTempInputTensor(node, 0); + TfLiteTensor* output = micro_context->AllocateTempOutputTensor(node, 0); + TfLiteTensor* axis = micro_context->AllocateTempInputTensor(node, 1); + + op_data->input_scale = input->params.scale; + op_data->output_scale = output->params.scale; + op_data->num_output_elements = NumElements(output); + + context->RequestScratchBufferInArena(context, sizeof(int) * input->dims->size, + &op_data->temp_buffer_idx); + context->RequestScratchBufferInArena( + context, sizeof(int) * static_cast(ElementCount(*axis->dims)), + &op_data->resolved_axis_idx); + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(output); + micro_context->DeallocateTempTfLiteTensor(axis); + return kTfLiteOk; +} + +TfLiteStatus PrepareMeanOrSumHelper(TfLiteContext* context, TfLiteNode* node, + OpDataReduce* op_data) { + MicroContext* micro_context = GetMicroContext(context); + TfLiteTensor* input = micro_context->AllocateTempInputTensor(node, 0); + TfLiteTensor* output = micro_context->AllocateTempOutputTensor(node, 0); + TfLiteTensor* axis = micro_context->AllocateTempInputTensor(node, 1); + if (input->type == kTfLiteInt8 || input->type == kTfLiteInt16) { + const double real_multiplier = static_cast(input->params.scale) / + static_cast(output->params.scale); + QuantizeMultiplier(real_multiplier, &op_data->multiplier, &op_data->shift); + } + + int output_size = NumElements(output); + op_data->num_axis = NumElements(axis); + + if (input->type == kTfLiteInt8 || input->type == kTfLiteInt16) { + context->RequestScratchBufferInArena(context, output_size * sizeof(int32_t), + &op_data->temp_buffer_idx); + op_data->input_zp = input->params.zero_point; + op_data->input_scale = input->params.scale; + op_data->output_zp = output->params.zero_point; + op_data->output_scale = output->params.scale; + } + + TF_LITE_ENSURE_OK( + context, + PrepareSimple(context, node, &(op_data->multiplier), &(op_data->shift))); + // TODO(b/144955155): Support uint8_t(b/144955155) and int8_t(b/144955018) + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(output); + micro_context->DeallocateTempTfLiteTensor(axis); + return kTfLiteOk; +} + +void ResolveAxis(const int* axis_data, int axis_count, + tflite::MeanParams* op_params) { + int i = 0; + for (; i < axis_count; ++i) { + op_params->axis[i] = static_cast(axis_data[i]); + } + for (; i < 4; ++i) { + op_params->axis[i] = 1; + } + op_params->axis_count = axis_count; +} + +template +TfLiteStatus QuantizedMeanOrSum(TfLiteContext* context, TfLiteNode* node, + int* temp_index, int* resolved_axis, + int32_t* temp_sum, OpDataReduce* op_data, + bool compute_sum) { + const TfLiteEvalTensor* input = tflite::micro::GetEvalInput(context, node, 0); + const TfLiteEvalTensor* axis = tflite::micro::GetEvalInput(context, node, 1); + TfLiteEvalTensor* output = tflite::micro::GetEvalOutput(context, node, 0); + TfLiteReducerParams* params = + static_cast(node->builtin_data); + + bool result = reference_ops::QuantizedMeanOrSumExtraArgs( + tflite::micro::GetTensorData(input), op_data->input_zp, + op_data->input_scale, &input->dims->data[0], input->dims->size, + tflite::micro::GetTensorData(output), op_data->output_scale, + op_data->multiplier, op_data->shift, op_data->output_zp, + &output->dims->data[0], output->dims->size, + tflite::micro::GetTensorData(axis), op_data->num_axis, + params->keep_dims, temp_index, resolved_axis, temp_sum, compute_sum); + TF_LITE_ENSURE(context, result); + + return kTfLiteOk; +} + +template +TfLiteStatus EvalIntegerMean(TfLiteContext* context, TfLiteNode* node, + int num_axis, OpDataReduce* op_data, + int* temp_index, int* resolved_axis) { + int32_t* temp_sum = static_cast( + context->GetScratchBuffer(context, op_data->temp_buffer_idx)); + + QuantizedMeanOrSum(context, node, temp_index, resolved_axis, + temp_sum, op_data, /*compute_sum=*/false); + + return kTfLiteOk; +} + +TfLiteStatus EvalMeanHelper(TfLiteContext* context, TfLiteNode* node, + OpDataReduce* op_data) { + const TfLiteEvalTensor* input = tflite::micro::GetEvalInput(context, node, 0); + const TfLiteEvalTensor* axis = tflite::micro::GetEvalInput(context, node, 1); + TfLiteEvalTensor* output = tflite::micro::GetEvalOutput(context, node, 0); + TfLiteReducerParams* params = + reinterpret_cast(node->builtin_data); + + int num_axis = static_cast(ElementCount(*axis->dims)); + int temp_index[kMaxNumberOfAxis]; + int resolved_axis[kMaxNumberOfReducedAxis]; + + switch (input->type) { + case kTfLiteFloat32: { + tflite::MeanParams op_params; + ResolveAxis(tflite::micro::GetTensorData(axis), num_axis, + &op_params); + + // Special case mean implementation exists for 4D mean across axes 1 + // and 2. + bool special_case_4d_axes_1_and_2 = + input->dims->size == 4 && op_params.axis_count == 2 && + ((op_params.axis[0] == 1 && op_params.axis[1] == 2) || + (op_params.axis[0] == 2 && op_params.axis[1] == 1)); + + // Defer to specialized implementation for 4D Mean across axes 1 & 2. + if (params->keep_dims && special_case_4d_axes_1_and_2) { + reference_ops::Mean(op_params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } else { + TF_LITE_ENSURE( + context, + reference_ops::Mean( + tflite::micro::GetTensorData(input), input->dims->data, + input->dims->size, tflite::micro::GetTensorData(output), + output->dims->data, output->dims->size, + tflite::micro::GetTensorData(axis), num_axis, + params->keep_dims, temp_index, resolved_axis, + tflite::micro::GetTensorData(output))); + } + } break; + case kTfLiteInt8: { + TF_LITE_ENSURE_OK( + context, EvalIntegerMean(context, node, num_axis, op_data, + temp_index, resolved_axis)); + } break; + case kTfLiteInt16: { + TF_LITE_ENSURE_OK( + context, EvalIntegerMean(context, node, num_axis, op_data, + temp_index, resolved_axis)); + } break; + default: + TF_LITE_ENSURE_MSG(context, false, + "Currently, only float32, int8 or int16 input type " + "is supported."); + } + return kTfLiteOk; +} + +TfLiteStatus EvalMaxHelper(TfLiteContext* context, TfLiteNode* node, + OpDataReduce* op_data) { + const TfLiteEvalTensor* input = tflite::micro::GetEvalInput(context, node, 0); + const TfLiteEvalTensor* axis = tflite::micro::GetEvalInput(context, node, 1); + TfLiteEvalTensor* output = tflite::micro::GetEvalOutput(context, node, 0); + TF_LITE_ENSURE_TYPES_EQ(context, input->type, output->type); + TfLiteReducerParams* params = + static_cast(node->builtin_data); + + // Interpret an axis tensor with null dimensions as a scalar + int num_axis = static_cast(ElementCount(*axis->dims)); + int* temp_buffer = static_cast( + context->GetScratchBuffer(context, op_data->temp_buffer_idx)); + int* resolved_axis = static_cast( + context->GetScratchBuffer(context, op_data->resolved_axis_idx)); + switch (input->type) { + case kTfLiteFloat32: + TF_LITE_ENSURE( + context, + reference_ops::ReduceGeneric( + tflite::micro::GetTensorData(input), input->dims->data, + input->dims->size, tflite::micro::GetTensorData(output), + output->dims->data, output->dims->size, + tflite::micro::GetTensorData(axis), num_axis, + params->keep_dims, temp_buffer, resolved_axis, + std::numeric_limits::lowest(), + [](const float current, const float in) -> float { + return (in > current) ? in : current; + })); + break; + case kTfLiteInt8: + TF_LITE_ENSURE_EQ(context, static_cast(op_data->input_scale), + static_cast(op_data->output_scale)); + TF_LITE_ENSURE_EQ(context, op_data->input_zp, op_data->output_zp); + TF_LITE_ENSURE( + context, + reference_ops::ReduceGeneric( + tflite::micro::GetTensorData(input), input->dims->data, + input->dims->size, tflite::micro::GetTensorData(output), + output->dims->data, output->dims->size, + tflite::micro::GetTensorData(axis), num_axis, + params->keep_dims, temp_buffer, resolved_axis, + std::numeric_limits::lowest(), + [](const int8_t current, const int8_t in) -> int8_t { + return (in > current) ? in : current; + })); + break; + default: + MicroPrintf("Only float32 and int8 types are supported."); + return kTfLiteError; + } + return kTfLiteOk; +} + +TfLiteStatus EvalSumHelper(TfLiteContext* context, TfLiteNode* node, + OpDataReduce* op_data) { + const TfLiteEvalTensor* input = tflite::micro::GetEvalInput(context, node, 0); + const TfLiteEvalTensor* axis = tflite::micro::GetEvalInput(context, node, 1); + TfLiteEvalTensor* output = tflite::micro::GetEvalOutput(context, node, 0); + TF_LITE_ENSURE_TYPES_EQ(context, input->type, output->type); + TfLiteReducerParams* params = + static_cast(node->builtin_data); + + // Interpret an axis tensor with null dimensions as a scalar. + int num_axis = static_cast(ElementCount(*axis->dims)); + int temp_index[kMaxNumberOfAxis]; + int resolved_axis[kMaxNumberOfReducedAxis]; + + switch (input->type) { + case kTfLiteFloat32: { + TF_LITE_ENSURE( + context, + reference_ops::ReduceGeneric( + tflite::micro::GetTensorData(input), input->dims->data, + input->dims->size, tflite::micro::GetTensorData(output), + output->dims->data, output->dims->size, + tflite::micro::GetTensorData(axis), num_axis, + params->keep_dims, temp_index, resolved_axis, /*init_value=*/0.f, + [](const float current, const float in) -> float { + return in + current; + })); + } break; + case kTfLiteInt8: { + int32_t* temp_sum = static_cast( + context->GetScratchBuffer(context, op_data->temp_buffer_idx)); + QuantizedMeanOrSum(context, node, temp_index, resolved_axis, + temp_sum, op_data, /*compute_sum=*/true); + } break; + case kTfLiteInt16: { + int32_t* temp_sum = static_cast( + context->GetScratchBuffer(context, op_data->temp_buffer_idx)); + QuantizedMeanOrSum(context, node, temp_index, resolved_axis, + temp_sum, op_data, /*compute_sum=*/true); + } break; + default: + MicroPrintf("Only float32, int8, and int16 types are supported."); + return kTfLiteError; + } + return kTfLiteOk; +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/reduce_test.cc b/tensorflow/lite/micro/kernels/reduce_test.cc new file mode 100644 index 0000000..8e53237 --- /dev/null +++ b/tensorflow/lite/micro/kernels/reduce_test.cc @@ -0,0 +1,921 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/kernels/reduce.h" + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +// Common 2D inputs, outputs and axis. +static const int kInputElements2D = 8; +static int kInputShape2D[] = {2, 2, 4}; +static const float kInputData2D[] = {1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0}; + +static int kAxisShape2D[] = {1, 1}; +static const int32_t kAxisData2D[] = {1}; + +static const int kOutputElements2D = 2; +static int kOutputShape2D[] = {2, 1, 2}; +static const float kGoldenData2D[] = {2.5, 6.5}; + +static const float kGoldenDataSum2D[] = {10.0, 26.0}; + +// Common 3D inputs, outputs and axis. +static const int kInputElements3D = 8; +static int kInputShape3D[] = {3, 2, 2, 2}; +static const float kInputData3D[] = {1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0}; + +static int kAxisShape3D[] = {1, 2}; +static const int32_t kAxisData3D[] = {1, 2}; + +static const int kOutputElements3D = 2; +static int kOutputShape3D[] = {2, 1, 2}; +static const float kGoldenData3D[] = {2.5, 6.5}; + +static const float kGoldenDataSum3D[] = {10.0, 26.0}; + +// Common 4D inputs, outputs and axis. +static const int kInputElements4D = 24; +static int kInputShape4D[] = {4, 2, 2, 3, 2}; +static const float kInputData4D[] = { + 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0, + 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0, 24.0}; + +static int kAxisShape4D[] = {1, 2}; +static const int32_t kAxisData4D[] = {1, 2}; + +static const int kOutputElements4D = 4; +static int kOutputShape4D[] = {4, 2, 1, 1, 2}; +static const float kGoldenData4D[] = {6, 7, 18, 19}; + +static const float kGoldenDataSum4D[] = {36, 42, 108, 114}; + +template +TfLiteStatus ValidateReduceGoldens(TfLiteTensor* tensors, int tensors_size, + const T* expected_output_data, + T* output_data, int output_length, + const TFLMRegistration& registration, + TfLiteReducerParams* params, + float tolerance = 1e-5) { + int inputs_array_data[] = {2, 0, 1}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 2}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, params); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + for (int i = 0; i < output_length; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(expected_output_data[i], output_data[i], + tolerance); + } + return kTfLiteOk; +} + +void TestReduceOpFloat(int* input_dims_data, const float* input_data, + int* axis_dims_data, const int32_t* axis_data, + int* output_dims_data, float* output_data, + const float* expected_output_data, + const TFLMRegistration& registration, + TfLiteReducerParams* params, float tolerance = 1e-5) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* axis_dims = IntArrayFromInts(axis_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_dims_count = ElementCount(*output_dims); + + constexpr int num_of_inputs = 2; // input and axis + constexpr int num_of_outputs = 1; // output + + constexpr int tensors_size = num_of_inputs + num_of_outputs; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(input_data, input_dims), + CreateTensor(axis_data, axis_dims), + CreateTensor(output_data, output_dims), + }; + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, ValidateReduceGoldens( + tensors, tensors_size, expected_output_data, output_data, + output_dims_count, registration, params, tolerance)); +} + +template +void TestReduceOpQuantized(int* input_dims_data, const float* input_data, + T* input_data_quant, float input_scale, + int input_zero_point, int* axis_dims_data, + const int32_t* axis_data, int* output_dims_data, + const float* expected_output_data, + T* output_data_quant, T* expected_output_data_quant, + float output_scale, int output_zero_point, + const TFLMRegistration& registration, + TfLiteReducerParams* params, + float tolerance = 0.01) { + // Convert dimesion arguments to TfLiteArrays + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* axis_dims = IntArrayFromInts(axis_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + + // Get number of elements in input and output tensors + const int output_dims_count = ElementCount(*output_dims); + + // Initialize tensors + constexpr int tensors_size = 3; + TfLiteTensor tensors[] = { + CreateQuantizedTensor(input_data, input_data_quant, input_dims, + input_scale, input_zero_point), + CreateTensor(axis_data, axis_dims), + CreateQuantizedTensor(output_data_quant, output_dims, output_scale, + output_zero_point), + }; + + // Quantize expected output + tflite::Quantize(expected_output_data, expected_output_data_quant, + output_dims_count, output_scale, output_zero_point); + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + ValidateReduceGoldens(tensors, tensors_size, expected_output_data_quant, + output_data_quant, output_dims_count, registration, + params, tolerance)); +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(MeanFloat2DKeepDims) { + float output_data[tflite::testing::kOutputElements2D]; + + TfLiteReducerParams params = {true}; + + tflite::testing::TestReduceOpFloat( + tflite::testing::kInputShape2D, tflite::testing::kInputData2D, + tflite::testing::kAxisShape2D, tflite::testing::kAxisData2D, + tflite::testing::kOutputShape2D, output_data, + tflite::testing::kGoldenData2D, tflite::Register_MEAN(), ¶ms); +} + +TF_LITE_MICRO_TEST(MeanInt82DKeepDims) { + int8_t expected_output_data_quant[tflite::testing::kOutputElements2D]; + int8_t output_data_quant[tflite::testing::kOutputElements2D]; + int8_t input_data_quant[tflite::testing::kInputElements2D]; + + float input_scale = 0.5f; + int input_zero_point = 0; + float output_scale = 0.5f; + int output_zero_point = 0; + + TfLiteReducerParams params = { + true // keep_dims + }; + + tflite::testing::TestReduceOpQuantized( + tflite::testing::kInputShape2D, tflite::testing::kInputData2D, + input_data_quant, input_scale, input_zero_point, + tflite::testing::kAxisShape2D, tflite::testing::kAxisData2D, + tflite::testing::kOutputShape2D, tflite::testing::kGoldenData2D, + output_data_quant, expected_output_data_quant, output_scale, + output_zero_point, tflite::Register_MEAN(), ¶ms, 1.0); +} + +TF_LITE_MICRO_TEST(MeanInt162DKeepDims) { + int16_t expected_output_data_quant[tflite::testing::kOutputElements2D]; + int16_t output_data_quant[tflite::testing::kOutputElements2D]; + int16_t input_data_quant[tflite::testing::kInputElements2D]; + + float input_scale = 0.5f; + int input_zero_point = 0; + float output_scale = 0.5f; + int output_zero_point = 0; + + TfLiteReducerParams params = { + true // keep_dims + }; + + tflite::testing::TestReduceOpQuantized( + tflite::testing::kInputShape2D, tflite::testing::kInputData2D, + input_data_quant, input_scale, input_zero_point, + tflite::testing::kAxisShape2D, tflite::testing::kAxisData2D, + tflite::testing::kOutputShape2D, tflite::testing::kGoldenData2D, + output_data_quant, expected_output_data_quant, output_scale, + output_zero_point, tflite::Register_MEAN(), ¶ms, 1.0); +} + +TF_LITE_MICRO_TEST(MeanFloat3DKeepDims) { + float output_data[tflite::testing::kOutputElements3D]; + + TfLiteReducerParams params = {true}; + + tflite::testing::TestReduceOpFloat( + tflite::testing::kInputShape3D, tflite::testing::kInputData3D, + tflite::testing::kAxisShape3D, tflite::testing::kAxisData3D, + tflite::testing::kOutputShape3D, output_data, + tflite::testing::kGoldenData3D, tflite::Register_MEAN(), ¶ms); +} + +TF_LITE_MICRO_TEST(MeanInt83DKeepDims) { + int8_t expected_output_data_quant[tflite::testing::kOutputElements3D]; + int8_t output_data_quant[tflite::testing::kOutputElements3D]; + int8_t input_data_quant[tflite::testing::kInputElements3D]; + + float input_scale = 0.5f; + int input_zero_point = 0; + float output_scale = 0.5f; + int output_zero_point = 0; + + TfLiteReducerParams params = { + true // keep_dims + }; + + tflite::testing::TestReduceOpQuantized( + tflite::testing::kInputShape3D, tflite::testing::kInputData3D, + input_data_quant, input_scale, input_zero_point, + tflite::testing::kAxisShape3D, tflite::testing::kAxisData3D, + tflite::testing::kOutputShape3D, tflite::testing::kGoldenData3D, + output_data_quant, expected_output_data_quant, output_scale, + output_zero_point, tflite::Register_MEAN(), ¶ms, 1.0); +} + +TF_LITE_MICRO_TEST(MeanInt163DKeepDims) { + int16_t expected_output_data_quant[tflite::testing::kOutputElements3D]; + int16_t output_data_quant[tflite::testing::kOutputElements3D]; + int16_t input_data_quant[tflite::testing::kInputElements3D]; + + float input_scale = 0.5f; + int input_zero_point = 0; + float output_scale = 0.5f; + int output_zero_point = 0; + + TfLiteReducerParams params = { + true // keep_dims + }; + + tflite::testing::TestReduceOpQuantized( + tflite::testing::kInputShape3D, tflite::testing::kInputData3D, + input_data_quant, input_scale, input_zero_point, + tflite::testing::kAxisShape3D, tflite::testing::kAxisData3D, + tflite::testing::kOutputShape3D, tflite::testing::kGoldenData3D, + output_data_quant, expected_output_data_quant, output_scale, + output_zero_point, tflite::Register_MEAN(), ¶ms, 1.0); +} + +TF_LITE_MICRO_TEST(MeanFloat4DKeepDims) { + float output_data[tflite::testing::kOutputElements4D]; + + TfLiteReducerParams params = { + true // keep_dims + }; + + tflite::testing::TestReduceOpFloat( + tflite::testing::kInputShape4D, tflite::testing::kInputData4D, + tflite::testing::kAxisShape4D, tflite::testing::kAxisData4D, + tflite::testing::kOutputShape4D, output_data, + tflite::testing::kGoldenData4D, tflite::Register_MEAN(), ¶ms); +} + +TF_LITE_MICRO_TEST(MeanInt84DKeepDims) { + int8_t expected_output_data_quant[tflite::testing::kOutputElements4D]; + int8_t output_data_quant[tflite::testing::kOutputElements4D]; + int8_t input_data_quant[tflite::testing::kInputElements4D]; + + float input_scale = 0.5f; + int input_zero_point = 0; + float output_scale = 0.5f; + int output_zero_point = 0; + + TfLiteReducerParams params = { + true // keep_dims + }; + + tflite::testing::TestReduceOpQuantized( + tflite::testing::kInputShape4D, tflite::testing::kInputData4D, + input_data_quant, input_scale, input_zero_point, + tflite::testing::kAxisShape4D, tflite::testing::kAxisData4D, + tflite::testing::kOutputShape4D, tflite::testing::kGoldenData4D, + output_data_quant, expected_output_data_quant, output_scale, + output_zero_point, tflite::Register_MEAN(), ¶ms, 1.0); +} + +TF_LITE_MICRO_TEST(MeanInt164DKeepDims) { + int16_t expected_output_data_quant[tflite::testing::kOutputElements4D]; + int16_t output_data_quant[tflite::testing::kOutputElements4D]; + int16_t input_data_quant[tflite::testing::kInputElements4D]; + + float input_scale = 0.5f; + int input_zero_point = 0; + float output_scale = 0.5f; + int output_zero_point = 0; + + TfLiteReducerParams params = { + true // keep_dims + }; + + tflite::testing::TestReduceOpQuantized( + tflite::testing::kInputShape4D, tflite::testing::kInputData4D, + input_data_quant, input_scale, input_zero_point, + tflite::testing::kAxisShape4D, tflite::testing::kAxisData4D, + tflite::testing::kOutputShape4D, tflite::testing::kGoldenData4D, + output_data_quant, expected_output_data_quant, output_scale, + output_zero_point, tflite::Register_MEAN(), ¶ms, 1.0); +} + +TF_LITE_MICRO_TEST(MeanFloat4DWithoutKeepDims) { + int kOutputShape4D[] = {2, 2, 2}; + float output_data[tflite::testing::kOutputElements4D]; + TfLiteReducerParams params = { + false // keep_dims + }; + + tflite::testing::TestReduceOpFloat( + tflite::testing::kInputShape4D, tflite::testing::kInputData4D, + tflite::testing::kAxisShape4D, tflite::testing::kAxisData4D, + kOutputShape4D, output_data, tflite::testing::kGoldenData4D, + tflite::Register_MEAN(), ¶ms); +} + +TF_LITE_MICRO_TEST(MeanInt84DWithoutKeepDims) { + int8_t expected_output_data_quant[tflite::testing::kOutputElements4D]; + int8_t output_data_quant[tflite::testing::kOutputElements4D]; + int8_t input_data_quant[tflite::testing::kInputElements4D]; + + int kOutputShape4D[] = {2, 2, 2}; + TfLiteReducerParams params = { + false // keep_dims + }; + float input_scale = 0.5f; + int input_zero_point = 0; + float output_scale = 0.5f; + int output_zero_point = 0; + + tflite::testing::TestReduceOpQuantized( + tflite::testing::kInputShape4D, tflite::testing::kInputData4D, + input_data_quant, input_scale, input_zero_point, + tflite::testing::kAxisShape4D, tflite::testing::kAxisData4D, + kOutputShape4D, tflite::testing::kGoldenData4D, output_data_quant, + expected_output_data_quant, output_scale, output_zero_point, + tflite::Register_MEAN(), ¶ms, 1.0); +} + +TF_LITE_MICRO_TEST(MeanInt164DWithoutKeepDims) { + int16_t expected_output_data_quant[tflite::testing::kOutputElements4D]; + int16_t output_data_quant[tflite::testing::kOutputElements4D]; + int16_t input_data_quant[tflite::testing::kInputElements4D]; + + int kOutputShape4D[] = {2, 2, 2}; + TfLiteReducerParams params = { + false // keep_dims + }; + float input_scale = 0.5f; + int input_zero_point = 0; + float output_scale = 0.5f; + int output_zero_point = 0; + + tflite::testing::TestReduceOpQuantized( + tflite::testing::kInputShape4D, tflite::testing::kInputData4D, + input_data_quant, input_scale, input_zero_point, + tflite::testing::kAxisShape4D, tflite::testing::kAxisData4D, + kOutputShape4D, tflite::testing::kGoldenData4D, output_data_quant, + expected_output_data_quant, output_scale, output_zero_point, + tflite::Register_MEAN(), ¶ms, 1.0); +} + +TF_LITE_MICRO_TEST(MeanInt164DWithoutKeepDimsDifferentScaleAndZeroPoint) { + int16_t expected_output_data_quant[tflite::testing::kOutputElements4D]; + int16_t output_data_quant[tflite::testing::kOutputElements4D]; + int16_t input_data_quant[tflite::testing::kInputElements4D]; + + int kOutputShape4D[] = {2, 2, 2}; + TfLiteReducerParams params = { + false // keep_dims + }; + float input_scale = 0.5f; + int input_zero_point = 0; + float output_scale = 0.7f; + int output_zero_point = 0; + + tflite::testing::TestReduceOpQuantized( + tflite::testing::kInputShape4D, tflite::testing::kInputData4D, + input_data_quant, input_scale, input_zero_point, + tflite::testing::kAxisShape4D, tflite::testing::kAxisData4D, + kOutputShape4D, tflite::testing::kGoldenData4D, output_data_quant, + expected_output_data_quant, output_scale, output_zero_point, + tflite::Register_MEAN(), ¶ms, 1.0); +} + +TF_LITE_MICRO_TEST(MeanFloat4DWithoutKeepDimsWithPrecision) { + int kInputShape4D[] = {4, 2, 2, 3, 1}; + const float kInputData4D[] = {1.0, 24.0, 13.0, 3.0, 9.0, 17.0, + 11.0, 36.0, 14.0, 19.0, 17.0, 22.0}; + const int kOutputElements4D = 2; + int kOutputShape4D[] = {2, 2, 1}; + const float kGoldenData4D[] = {11.166667, 19.833334}; + float output_data[kOutputElements4D]; + TfLiteReducerParams params = { + false // keep_dims + }; + + tflite::testing::TestReduceOpFloat( + kInputShape4D, kInputData4D, tflite::testing::kAxisShape4D, + tflite::testing::kAxisData4D, kOutputShape4D, output_data, kGoldenData4D, + tflite::Register_MEAN(), ¶ms); +} + +TF_LITE_MICRO_TEST(FloatMaxOpTestNotKeepDims) { + int input_shape[] = {3, 4, 3, 2}; + const float input_data[] = {1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, + 9.0, 10.0, 11.0, 12.0, 13.0, 14.0, 15.0, 16.0, + 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0, 24.0}; + int axis_shape[] = {1, 4}; + const int32_t axis_data[] = {1, 0, -3, -3}; + int output_shape[] = {1, 2}; + const float expected_output_data[] = {23, 24}; + float output_data[2]; + + TfLiteReducerParams params = {false}; + + tflite::testing::TestReduceOpFloat( + input_shape, input_data, axis_shape, axis_data, output_shape, output_data, + expected_output_data, tflite::Register_REDUCE_MAX(), ¶ms); +} + +TF_LITE_MICRO_TEST(FloatMaxOpTestKeepDims) { + int input_shape[] = {3, 4, 3, 2}; + const float input_data[] = {1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, + 9.0, 10.0, 11.0, 12.0, 13.0, 14.0, 15.0, 16.0, + 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0, 24.0}; + int axis_shape[] = {1, 2}; + const int32_t axis_data[] = {0, 2}; + int output_shape[] = {1, 3}; + const float expected_output_data[] = {20, 22, 24}; + float output_data[3]; + + TfLiteReducerParams params = {true}; + + tflite::testing::TestReduceOpFloat( + input_shape, input_data, axis_shape, axis_data, output_shape, output_data, + expected_output_data, tflite::Register_REDUCE_MAX(), ¶ms); +} + +TF_LITE_MICRO_TEST(Int8MaxOpTestKeepDims) { + int input_shape[] = {3, 1, 3, 2}; + const float input_data[] = {0.4, 0.2, 0.3, 0.4, 0.5, 0.6}; + int axis_shape[] = {1, 1}; + const int32_t axis_data[] = {1, 1}; + int output_shape[] = {1, 2}; + const float expected_output_data[] = {0.5, 0.6}; + + float input_scale = 2 / 255.0; + int input_zp = 0; + + TfLiteReducerParams params = {true}; + + int8_t input_data_quant[6]; + int8_t output_data_quant[2]; + int8_t expected_output_data_quant[2]; + + tflite::testing::TestReduceOpQuantized( + input_shape, input_data, input_data_quant, input_scale, input_zp, + axis_shape, axis_data, output_shape, expected_output_data, + output_data_quant, expected_output_data_quant, input_scale, input_zp, + tflite::Register_REDUCE_MAX(), ¶ms); +} + +TF_LITE_MICRO_TEST(Int8MaxOpTestWithoutKeepDims) { + int input_shape[] = {3, 1, 3, 2}; + const float input_data[] = {0.4, 0.2, 0.3, 0.4, 0.5, 0.6}; + int axis_shape[] = {1, 1}; + const int32_t axis_data[] = {1, 1}; + int output_shape[] = {1, 2}; + const float expected_output_data[] = {0.5, 0.6}; + + float input_scale = 2 / 255.0; + int input_zp = 0; + float output_scale = 2 / 255.0; + int output_zp = 0; + + TfLiteReducerParams params = {false}; + + int8_t input_data_quant[6]; + int8_t output_data_quant[2]; + int8_t expected_output_data_quant[2]; + + tflite::testing::TestReduceOpQuantized( + input_shape, input_data, input_data_quant, input_scale, input_zp, + axis_shape, axis_data, output_shape, expected_output_data, + output_data_quant, expected_output_data_quant, output_scale, output_zp, + tflite::Register_REDUCE_MAX(), ¶ms); +} + +TF_LITE_MICRO_TEST(MeanInt84DWithoutKeepDimsWithPrecision) { + int kInputShape4D[] = {4, 2, 2, 3, 1}; + const float kInputData4D[] = {1.0, 24.0, 13.0, 3.0, 9.0, 17.0, + 11.0, 36.0, 14.0, 19.0, 17.0, 22.0}; + int kOutputShape4D[] = {2, 2, 1}; + const float kGoldenData4D[] = {11.166667, 19.833334}; + TfLiteReducerParams params = { + false // keep_dims + }; + float input_scale = 0.5f; + int input_zero_point = 0; + float output_scale = 0.5f; + int output_zero_point = 0; + + int8_t output_data_quant[2]; + int8_t expected_output_data_quant[2]; + int8_t input_data_quant[12]; + + tflite::testing::TestReduceOpQuantized( + kInputShape4D, kInputData4D, input_data_quant, input_scale, + input_zero_point, tflite::testing::kAxisShape4D, + tflite::testing::kAxisData4D, kOutputShape4D, kGoldenData4D, + output_data_quant, expected_output_data_quant, output_scale, + output_zero_point, tflite::Register_MEAN(), ¶ms, 1.0); +} + +TF_LITE_MICRO_TEST(SumFloatFlatten2ReduceDims) { + int input_shape[] = {3, 4, 3, 2}; + int output_shape[] = {1, 4}; + int axis_shape[] = {1, 2}; + int32_t axis_data[] = {2, 1}; + float input_data[] = {1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, + 9.0, 10.0, 11.0, 12.0, 13.0, 14.0, 15.0, 16.0, + 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0, 24.0}; + float expected_output[] = {21.0, 57.0, 93.0, 129.0}; + float actual_output_data[4]; + + TfLiteReducerParams params = {false}; + + tflite::testing::TestReduceOpFloat( + input_shape, input_data, axis_shape, axis_data, output_shape, + actual_output_data, expected_output, tflite::Register_SUM(), ¶ms); +} + +TF_LITE_MICRO_TEST(SumFloatFlatten2NonReduceDims) { + int input_shape[] = {3, 4, 3, 2}; + int output_shape[] = {1, 12}; + int axis_shape[] = {1, 1}; + int32_t axis_data[] = {2}; + float input_data[] = {1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, + 9.0, 10.0, 11.0, 12.0, 13.0, 14.0, 15.0, 16.0, + 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0, 24.0}; + float expected_output[] = {3.0, 7.0, 11.0, 15.0, 19.0, 23.0, + 27.0, 31.0, 35.0, 39.0, 43.0, 47.0}; + float actual_output_data[12]; + + TfLiteReducerParams params = {false}; + + tflite::testing::TestReduceOpFloat( + input_shape, input_data, axis_shape, axis_data, output_shape, + actual_output_data, expected_output, tflite::Register_SUM(), ¶ms); +} + +TF_LITE_MICRO_TEST(SumFloatFlatten2MiddleDims) { + int input_shape[] = {4, 2, 2, 3, 2}; + int output_shape[] = {2, 2, 2}; + int axis_shape[] = {1, 2}; + int32_t axis_data[] = {1, 2}; + float input_data[] = {1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, + 9.0, 10.0, 11.0, 12.0, 13.0, 14.0, 15.0, 16.0, + 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0, 24.0}; + float expected_output[] = {36.0, 42.0, 108.0, 114.0}; + float actual_output_data[4]; + + TfLiteReducerParams params = {false}; + + tflite::testing::TestReduceOpFloat( + input_shape, input_data, axis_shape, axis_data, output_shape, + actual_output_data, expected_output, tflite::Register_SUM(), ¶ms); +} + +TF_LITE_MICRO_TEST(SumFloat2DKeepDims) { + float output_data[tflite::testing::kOutputElements2D]; + + TfLiteReducerParams params = {true}; + + tflite::testing::TestReduceOpFloat( + tflite::testing::kInputShape2D, tflite::testing::kInputData2D, + tflite::testing::kAxisShape2D, tflite::testing::kAxisData2D, + tflite::testing::kOutputShape2D, output_data, + tflite::testing::kGoldenDataSum2D, tflite::Register_SUM(), ¶ms); +} + +TF_LITE_MICRO_TEST(SumInt82DKeepDims) { + int8_t expected_output_data_quant[tflite::testing::kOutputElements2D]; + int8_t output_data_quant[tflite::testing::kOutputElements2D]; + int8_t input_data_quant[tflite::testing::kInputElements2D]; + + float input_scale = 0.5f; + int input_zero_point = 0; + float output_scale = 0.5f; + int output_zero_point = 0; + + TfLiteReducerParams params = { + true // keep_dims + }; + + tflite::testing::TestReduceOpQuantized( + tflite::testing::kInputShape2D, tflite::testing::kInputData2D, + input_data_quant, input_scale, input_zero_point, + tflite::testing::kAxisShape2D, tflite::testing::kAxisData2D, + tflite::testing::kOutputShape2D, tflite::testing::kGoldenDataSum2D, + output_data_quant, expected_output_data_quant, output_scale, + output_zero_point, tflite::Register_SUM(), ¶ms, 1.0); +} + +TF_LITE_MICRO_TEST(SumInt162DKeepDims) { + int16_t expected_output_data_quant[tflite::testing::kOutputElements2D]; + int16_t output_data_quant[tflite::testing::kOutputElements2D]; + int16_t input_data_quant[tflite::testing::kInputElements2D]; + + float input_scale = 0.5f; + int input_zero_point = 0; + float output_scale = 0.5f; + int output_zero_point = 0; + + TfLiteReducerParams params = { + true // keep_dims + }; + + tflite::testing::TestReduceOpQuantized( + tflite::testing::kInputShape2D, tflite::testing::kInputData2D, + input_data_quant, input_scale, input_zero_point, + tflite::testing::kAxisShape2D, tflite::testing::kAxisData2D, + tflite::testing::kOutputShape2D, tflite::testing::kGoldenDataSum2D, + output_data_quant, expected_output_data_quant, output_scale, + output_zero_point, tflite::Register_SUM(), ¶ms, 1.0); +} + +TF_LITE_MICRO_TEST(SumFloat3DKeepDims) { + float output_data[tflite::testing::kOutputElements3D]; + + TfLiteReducerParams params = {true}; + + tflite::testing::TestReduceOpFloat( + tflite::testing::kInputShape3D, tflite::testing::kInputData3D, + tflite::testing::kAxisShape3D, tflite::testing::kAxisData3D, + tflite::testing::kOutputShape3D, output_data, + tflite::testing::kGoldenDataSum3D, tflite::Register_SUM(), ¶ms); +} + +TF_LITE_MICRO_TEST(SumInt83DKeepDims) { + int8_t expected_output_data_quant[tflite::testing::kOutputElements3D]; + int8_t output_data_quant[tflite::testing::kOutputElements3D]; + int8_t input_data_quant[tflite::testing::kInputElements3D]; + + float input_scale = 0.5f; + int input_zero_point = 0; + float output_scale = 0.5f; + int output_zero_point = 0; + + TfLiteReducerParams params = { + true // keep_dims + }; + + tflite::testing::TestReduceOpQuantized( + tflite::testing::kInputShape3D, tflite::testing::kInputData3D, + input_data_quant, input_scale, input_zero_point, + tflite::testing::kAxisShape3D, tflite::testing::kAxisData3D, + tflite::testing::kOutputShape3D, tflite::testing::kGoldenDataSum3D, + output_data_quant, expected_output_data_quant, output_scale, + output_zero_point, tflite::Register_SUM(), ¶ms, 1.0); +} + +TF_LITE_MICRO_TEST(SumInt163DKeepDims) { + int16_t expected_output_data_quant[tflite::testing::kOutputElements3D]; + int16_t output_data_quant[tflite::testing::kOutputElements3D]; + int16_t input_data_quant[tflite::testing::kInputElements3D]; + + float input_scale = 0.5f; + int input_zero_point = 0; + float output_scale = 0.5f; + int output_zero_point = 0; + + TfLiteReducerParams params = { + true // keep_dims + }; + + tflite::testing::TestReduceOpQuantized( + tflite::testing::kInputShape3D, tflite::testing::kInputData3D, + input_data_quant, input_scale, input_zero_point, + tflite::testing::kAxisShape3D, tflite::testing::kAxisData3D, + tflite::testing::kOutputShape3D, tflite::testing::kGoldenDataSum3D, + output_data_quant, expected_output_data_quant, output_scale, + output_zero_point, tflite::Register_SUM(), ¶ms, 1.0); +} + +TF_LITE_MICRO_TEST(SumFloat4DKeepDims) { + float output_data[tflite::testing::kOutputElements4D]; + + TfLiteReducerParams params = { + true // keep_dims + }; + + tflite::testing::TestReduceOpFloat( + tflite::testing::kInputShape4D, tflite::testing::kInputData4D, + tflite::testing::kAxisShape4D, tflite::testing::kAxisData4D, + tflite::testing::kOutputShape4D, output_data, + tflite::testing::kGoldenDataSum4D, tflite::Register_SUM(), ¶ms); +} + +TF_LITE_MICRO_TEST(SumInt84DKeepDims) { + int8_t expected_output_data_quant[tflite::testing::kOutputElements4D]; + int8_t output_data_quant[tflite::testing::kOutputElements4D]; + int8_t input_data_quant[tflite::testing::kInputElements4D]; + + float input_scale = 1.f; + int input_zero_point = 0; + float output_scale = 1.f; + int output_zero_point = 0; + + TfLiteReducerParams params = { + true // keep_dims + }; + + tflite::testing::TestReduceOpQuantized( + tflite::testing::kInputShape4D, tflite::testing::kInputData4D, + input_data_quant, input_scale, input_zero_point, + tflite::testing::kAxisShape4D, tflite::testing::kAxisData4D, + tflite::testing::kOutputShape4D, tflite::testing::kGoldenDataSum4D, + output_data_quant, expected_output_data_quant, output_scale, + output_zero_point, tflite::Register_SUM(), ¶ms, 1.0); +} + +TF_LITE_MICRO_TEST(SumInt164DKeepDims) { + int16_t expected_output_data_quant[tflite::testing::kOutputElements4D]; + int16_t output_data_quant[tflite::testing::kOutputElements4D]; + int16_t input_data_quant[tflite::testing::kInputElements4D]; + + float input_scale = 0.5f; + int input_zero_point = 0; + float output_scale = 0.5f; + int output_zero_point = 0; + + TfLiteReducerParams params = { + true // keep_dims + }; + + tflite::testing::TestReduceOpQuantized( + tflite::testing::kInputShape4D, tflite::testing::kInputData4D, + input_data_quant, input_scale, input_zero_point, + tflite::testing::kAxisShape4D, tflite::testing::kAxisData4D, + tflite::testing::kOutputShape4D, tflite::testing::kGoldenDataSum4D, + output_data_quant, expected_output_data_quant, output_scale, + output_zero_point, tflite::Register_SUM(), ¶ms, 1.0); +} + +TF_LITE_MICRO_TEST(SumFloat4DWithoutKeepDims) { + int kOutputShape4D[] = {2, 2, 2}; + float output_data[tflite::testing::kOutputElements4D]; + TfLiteReducerParams params = { + false // keep_dims + }; + + tflite::testing::TestReduceOpFloat( + tflite::testing::kInputShape4D, tflite::testing::kInputData4D, + tflite::testing::kAxisShape4D, tflite::testing::kAxisData4D, + kOutputShape4D, output_data, tflite::testing::kGoldenDataSum4D, + tflite::Register_SUM(), ¶ms); +} + +TF_LITE_MICRO_TEST(SumInt84DWithoutKeepDims) { + int8_t expected_output_data_quant[tflite::testing::kOutputElements4D]; + int8_t output_data_quant[tflite::testing::kOutputElements4D]; + int8_t input_data_quant[tflite::testing::kInputElements4D]; + + int kOutputShape4D[] = {2, 2, 2}; + TfLiteReducerParams params = { + false // keep_dims + }; + float input_scale = 1.f; + int input_zero_point = 0; + float output_scale = 1.f; + int output_zero_point = 0; + + tflite::testing::TestReduceOpQuantized( + tflite::testing::kInputShape4D, tflite::testing::kInputData4D, + input_data_quant, input_scale, input_zero_point, + tflite::testing::kAxisShape4D, tflite::testing::kAxisData4D, + kOutputShape4D, tflite::testing::kGoldenDataSum4D, output_data_quant, + expected_output_data_quant, output_scale, output_zero_point, + tflite::Register_SUM(), ¶ms, 1.0); +} + +TF_LITE_MICRO_TEST(SumInt164DWithoutKeepDims) { + int16_t expected_output_data_quant[tflite::testing::kOutputElements4D]; + int16_t output_data_quant[tflite::testing::kOutputElements4D]; + int16_t input_data_quant[tflite::testing::kInputElements4D]; + + int kOutputShape4D[] = {2, 2, 2}; + TfLiteReducerParams params = { + false // keep_dims + }; + float input_scale = 0.5f; + int input_zero_point = 0; + float output_scale = 0.5f; + int output_zero_point = 0; + + tflite::testing::TestReduceOpQuantized( + tflite::testing::kInputShape4D, tflite::testing::kInputData4D, + input_data_quant, input_scale, input_zero_point, + tflite::testing::kAxisShape4D, tflite::testing::kAxisData4D, + kOutputShape4D, tflite::testing::kGoldenDataSum4D, output_data_quant, + expected_output_data_quant, output_scale, output_zero_point, + tflite::Register_SUM(), ¶ms, 1.0); +} + +TF_LITE_MICRO_TEST(SumInt164DWithoutKeepDimsDifferentScaleAndZeroPoint) { + int16_t expected_output_data_quant[tflite::testing::kOutputElements4D]; + int16_t output_data_quant[tflite::testing::kOutputElements4D]; + int16_t input_data_quant[tflite::testing::kInputElements4D]; + + int kOutputShape4D[] = {2, 2, 2}; + TfLiteReducerParams params = { + false // keep_dims + }; + float input_scale = 0.5f; + int input_zero_point = 0; + float output_scale = 0.7f; + int output_zero_point = 0; + + tflite::testing::TestReduceOpQuantized( + tflite::testing::kInputShape4D, tflite::testing::kInputData4D, + input_data_quant, input_scale, input_zero_point, + tflite::testing::kAxisShape4D, tflite::testing::kAxisData4D, + kOutputShape4D, tflite::testing::kGoldenDataSum4D, output_data_quant, + expected_output_data_quant, output_scale, output_zero_point, + tflite::Register_SUM(), ¶ms, 1.0); +} + +TF_LITE_MICRO_TEST(SumFloatSize1) { + int input_shape[] = {1, 1}; + int output_shape[] = {1, 1}; + int axis_shape[] = {1, 1}; + int32_t axis_data[] = {0}; + float input_data[] = {1.0}; + float expected_output[] = {1.0}; + float actual_output_data[1]; + + TfLiteReducerParams params = {false}; + + tflite::testing::TestReduceOpFloat( + input_shape, input_data, axis_shape, axis_data, output_shape, + actual_output_data, expected_output, tflite::Register_SUM(), ¶ms); +} + +TF_LITE_MICRO_TEST(SumFloat2DRedundantDims) { + int input_shape[] = {3, 1, 2, 4}; + int output_shape[] = {2, 1, 4}; + int axis_shape[] = {1, 1}; + int32_t axis_data[] = {1}; + float input_data[] = {1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0}; + float expected_output[] = {6.0, 8.0, 10.0, 12.0}; + float actual_output_data[4]; + + TfLiteReducerParams params = {false}; + + tflite::testing::TestReduceOpFloat( + input_shape, input_data, axis_shape, axis_data, output_shape, + actual_output_data, expected_output, tflite::Register_SUM(), ¶ms); +} + +TF_LITE_MICRO_TEST(SumFloatScalar) { + int input_shape[] = {1, 1}; + int output_shape[] = {1, 1}; + int axis_shape[] = {1, 0}; + int32_t axis_data[] = {}; + float input_data[] = {1.0}; + float expected_output[] = {1.0}; + float actual_output_data[1]; + + TfLiteReducerParams params = {false}; + + tflite::testing::TestReduceOpFloat( + input_shape, input_data, axis_shape, axis_data, output_shape, + actual_output_data, expected_output, tflite::Register_SUM(), ¶ms); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/reshape.cc b/tensorflow/lite/micro/kernels/reshape.cc new file mode 100644 index 0000000..c734b96 --- /dev/null +++ b/tensorflow/lite/micro/kernels/reshape.cc @@ -0,0 +1,123 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/memory_helpers.h" +#include "tensorflow/lite/micro/micro_utils.h" + +namespace tflite { +namespace ops { +namespace micro { +namespace reshape { + +constexpr int kInputTensor = 0; +constexpr int kOutputTensor = 0; + +TfLiteStatus ReshapeOutput(TfLiteContext* context, TfLiteNode* node) { + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + // Tensorflow's Reshape allows one of the shape components to have the + // special -1 value, meaning it will be calculated automatically based on the + // input. Here we calculate what that dimension should be so that the number + // of output elements in the same as the number of input elements. + int num_input_elements = NumElements(input); + TfLiteIntArray* output_shape = output->dims; + + if (NumInputs(node) == 1 && // Legacy scalar supported with params. + output_shape->size == 1 && output_shape->data[0] == 0) { + // Legacy tflite models use a shape parameter of [0] to indicate scalars, + // so adjust accordingly. TODO(b/111614235): Allow zero-sized buffers during + // toco conversion. + output_shape->size = 0; + } + + int num_output_elements = 1; + int stretch_dim = -1; + for (int i = 0; i < output_shape->size; ++i) { + int value = output_shape->data[i]; + if (value == -1) { + TF_LITE_ENSURE_EQ(context, stretch_dim, -1); + stretch_dim = i; + } else { + num_output_elements *= value; + } + } + if (stretch_dim != -1) { + TfLiteEvalTensor* output_eval = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + TF_LITE_ENSURE_STATUS(tflite::micro::CreateWritableTensorDimsWithCopy( + context, output, output_eval)); + output_shape = output->dims; // output tensor dims were moved + output_shape->data[stretch_dim] = num_input_elements / num_output_elements; + num_output_elements *= output_shape->data[stretch_dim]; + } + + TF_LITE_ENSURE_TYPES_EQ(context, input->type, output->type); + TF_LITE_ENSURE_EQ(context, num_input_elements, num_output_elements); + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(output); + return kTfLiteOk; +} + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + TF_LITE_ENSURE(context, NumInputs(node) == 1 || NumInputs(node) == 2); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + TF_LITE_ENSURE_EQ(context, ReshapeOutput(context, node), kTfLiteOk); + return kTfLiteOk; +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + + // TODO(b/162522304): storing input bytes in OpData increases some models + // significantly, possibly due to alignment issues. + size_t input_bytes; + TF_LITE_ENSURE_STATUS(TfLiteTypeSizeOf(input->type, &input_bytes)); + input_bytes *= ElementCount(*input->dims); + + // Do nothing for in-place reshape. + if (input->data.raw != output->data.raw) { + // Otherwise perform reshape with copy. + memcpy(output->data.raw, input->data.raw, input_bytes); + } + return kTfLiteOk; +} + +} // namespace reshape + +TFLMRegistration Register_RESHAPE() { + return tflite::micro::RegisterOp(nullptr, reshape::Prepare, reshape::Eval); +} + +} // namespace micro +} // namespace ops +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/reshape_test.cc b/tensorflow/lite/micro/kernels/reshape_test.cc new file mode 100644 index 0000000..63074e2 --- /dev/null +++ b/tensorflow/lite/micro/kernels/reshape_test.cc @@ -0,0 +1,377 @@ +/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/micro_utils.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +// TODO(b/162356196): Cleanup this unit test more. + +template +void ValidateReshapeGoldens(TfLiteTensor* tensors, int tensors_size, + TfLiteIntArray* inputs_array, + TfLiteIntArray* outputs_array, + const T* expected_output, + const size_t expected_output_len, + int* expected_dims, const size_t expected_dims_len, + bool expect_failure) { + const TFLMRegistration registration = tflite::ops::micro::Register_RESHAPE(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, + /*builtin_data=*/nullptr); + + if (expect_failure) { + TF_LITE_MICRO_EXPECT_NE(kTfLiteOk, runner.InitAndPrepare()); + return; + } + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + TfLiteTensor* output_tensor = &tensors[outputs_array->data[0]]; + const T* output_data = GetTensorData(output_tensor); + for (size_t i = 0; i < expected_output_len; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(expected_output[i], output_data[i], 1e-5f); + } + TF_LITE_MICRO_EXPECT_EQ(expected_dims_len, + static_cast(output_tensor->dims->size)); + for (size_t i = 0; i < expected_dims_len; ++i) { + TF_LITE_MICRO_EXPECT_EQ(expected_dims[i], output_tensor->dims->data[i]); + } +} +template +void TestReshapeWithShape(TfLiteTensor* input_tensor, + TfLiteTensor* shape_tensor, + TfLiteTensor* output_tensor, const T* expected_output, + const size_t expected_output_len, int* expected_dims, + const size_t expected_dims_len, bool expect_failure) { + constexpr int inputs_size = 2; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size]; + tensors[0] = *input_tensor; + tensors[1] = *shape_tensor; + tensors[2] = *output_tensor; + + int inputs_data[] = {2, 0, 1}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_data); + int outputs_data[] = {1, 2}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_data); + + ValidateReshapeGoldens(tensors, tensors_size, inputs_array, outputs_array, + expected_output, expected_output_len, expected_dims, + expected_dims_len, expect_failure); +} + +// If expected output is empty, the test is expected to fail. +template +void TestReshapeWithoutShape(TfLiteTensor* input_tensor, + TfLiteTensor* output_tensor, + const T* expected_output, + const size_t expected_output_len, + int* expected_dims, const size_t expected_dims_len, + bool expect_failure) { + constexpr int inputs_size = 1; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size]; + tensors[0] = *input_tensor; + tensors[1] = *output_tensor; + + int inputs_data[] = {1, 0}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_data); + int outputs_data[] = {1, 1}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_data); + + ValidateReshapeGoldens(tensors, tensors_size, inputs_array, outputs_array, + expected_output, expected_output_len, expected_dims, + expected_dims_len, expect_failure); +} + +void TestReshape(int* input_dims_data, const float* input_data, + int* shape_dims_data, const int32_t* shape_data, + int* output_dims_data, float* output_data, + const float* expected_output, const size_t expected_output_len, + int* expected_dims, const size_t expected_dims_len, + bool expect_failure = false) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* shape_dims = IntArrayFromInts(shape_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + TfLiteTensor input_tensor = CreateTensor(input_data, input_dims); + TfLiteTensor shape_tensor = CreateTensor(shape_data, shape_dims); + TfLiteTensor output_tensor = CreateTensor(output_data, output_dims); + + TestReshapeWithShape(&input_tensor, &shape_tensor, &output_tensor, + expected_output, expected_output_len, expected_dims, + expected_dims_len, expect_failure); +} + +template +void TestReshapeQuantized(int* input_dims_data, const T* input_data, + int* shape_dims_data, const int32_t* shape_data, + int* output_dims_data, T* output_data, + const T* expected_output, + const size_t expected_output_len, int* expected_dims, + const size_t expected_dims_len, + bool expect_failure = false) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* shape_dims = IntArrayFromInts(shape_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + TfLiteTensor input_tensor = CreateQuantizedTensor( + input_data, input_dims, /*scale=*/1.f, /*zero_point=*/0); + TfLiteTensor shape_tensor = CreateTensor(shape_data, shape_dims); + TfLiteTensor output_tensor = CreateQuantizedTensor( + output_data, output_dims, /*scale=*/1.f, /*zero_point=*/0); + + TestReshapeWithShape(&input_tensor, &shape_tensor, &output_tensor, + expected_output, expected_output_len, expected_dims, + expected_dims_len, expect_failure); +} +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(ReshapeWithMismatchedDimensionsShouldFail) { + float output_data[32]; + int input_dims[] = {4, 1, 2, 4, 1}; + const float input_data[] = {3}; + int shape_dims[] = {1, 2}; + const int32_t shape_int32[] = {2, 1}; + int output_dims[] = {2, 2, 1}; + const int golden_output_len = 0; + const float golden_output[] = {}; + const int golden_dims_len = 0; + int golden_dims[] = {}; + tflite::testing::TestReshape( + input_dims, input_data, shape_dims, shape_int32, output_dims, output_data, + golden_output, golden_output_len, golden_dims, golden_dims_len, true); +} + +// TODO(b/237407410): Re-enable for Vision P6 when the issue is resolved +#if !defined(VISION_P6) +TF_LITE_MICRO_TEST(ReshapeWithTooManyDimensionsShouldFail) { + float output_data[32]; + int input_dims[] = {9, 1, 1, 2, 1, 1, 1, 1, 1, 1}; + const float input[] = {3, 2}; + int shape_dims[] = {1, 9}; + const int32_t shape_int32[] = {1, 1, 1, 1, 1, 1, 1, 1, 2}; + int output_dims[] = {9, 1, 1, 1, 1, 1, 1, 1, 1, 2}; + const int golden_output_len = 2; + const float golden_output[] = {3, 2}; + const int golden_dims_len = 9; + int golden_dims[] = {1, 1, 1, 1, 1, 1, 1, 1, 2}; + tflite::testing::TestReshape( + input_dims, input, shape_dims, shape_int32, output_dims, output_data, + golden_output, golden_output_len, golden_dims, golden_dims_len, false); +} +#endif + +TF_LITE_MICRO_TEST(ReshapeWithTooManySpecialDimensionsShouldFail) { + float output_data[32]; + int input_dims[] = {4, 1, 2, 4, 11}; + const float input[] = {3}; + int shape_dims[] = {1, 4}; + const int32_t shape_int32[] = {-1, -1, 2, 4}; + int output_dims[] = {4, -1, -1, 2, 4}; + const int golden_output_len = 2; + const float golden_output[] = {}; + const int golden_dims_len = 9; + int golden_dims[] = {}; + tflite::testing::TestReshape( + input_dims, input, shape_dims, shape_int32, output_dims, output_data, + golden_output, golden_output_len, golden_dims, golden_dims_len, true); +} + +// Create the model with a 2x2 shape. Processing still works because the new +// shape ends up being hardcoded as a flat vector. +TF_LITE_MICRO_TEST(ReshapeWithInvalidShapeShouldFail) { + int input_dims_data[] = {3, 1, 2, 2}; + TfLiteIntArray* input_dims = + tflite::testing::IntArrayFromInts(input_dims_data); + const float input_data[] = {3.0f}; + auto input_tensor = tflite::testing::CreateTensor(input_data, input_dims); + float output_data[4]; + int output_dims_data[6] = {2, 2, 1, 2, 2, 1}; + TfLiteIntArray* output_dims = + tflite::testing::IntArrayFromInts(output_dims_data); + auto output_tensor = tflite::testing::CreateTensor(output_data, output_dims); + const int expected_output[] = {}; + const int expected_output_len = 0; + int expected_dims[] = {}; + const int expected_dims_len = 0; + tflite::testing::TestReshapeWithoutShape( + &input_tensor, &output_tensor, expected_output, expected_output_len, + expected_dims, expected_dims_len, true); +} + +TF_LITE_MICRO_TEST(ReshapeWithRegularShapesShouldSucceed) { + float output_data_float[32]; + int8_t output_data_int8[32]; + uint8_t output_data_uint8[32]; + int16_t output_data_int16[32]; + int input_dims[] = {4, 1, 2, 4, 1}; + const float input_float[] = {1, 2, 3, 4, 5, 6, 7, 8}; + const int8_t input_int8[] = {1, 2, 3, 4, 5, 6, 7, 8}; + const uint8_t input_uint8[] = {1, 2, 3, 4, 5, 6, 7, 8}; + const int16_t input_int16[] = {1, 2, 3, 4, 5, 6, 7, 8}; + int shape_dims[] = {1, 3}; + const int32_t shape_int32[] = {2, 2, 2}; + int output_dims[] = {3, 2, 2, 2}; + const int golden_output_len = 8; + const float golden_output_float[] = {1, 2, 3, 4, 5, 6, 7, 8}; + const int8_t golden_output_int8[] = {1, 2, 3, 4, 5, 6, 7, 8}; + const uint8_t golden_output_uint8[] = {1, 2, 3, 4, 5, 6, 7, 8}; + const int16_t golden_output_int16[] = {1, 2, 3, 4, 5, 6, 7, 8}; + const int golden_dims_len = 3; + int golden_dims[] = {2, 2, 2}; + tflite::testing::TestReshape(input_dims, input_float, shape_dims, shape_int32, + output_dims, output_data_float, + golden_output_float, golden_output_len, + golden_dims, golden_dims_len, false); + tflite::testing::TestReshapeQuantized( + input_dims, input_int8, shape_dims, shape_int32, output_dims, + output_data_int8, golden_output_int8, golden_output_len, golden_dims, + golden_dims_len, false); + tflite::testing::TestReshapeQuantized( + input_dims, input_uint8, shape_dims, shape_int32, output_dims, + output_data_uint8, golden_output_uint8, golden_output_len, golden_dims, + golden_dims_len, false); + tflite::testing::TestReshapeQuantized( + input_dims, input_int16, shape_dims, shape_int32, output_dims, + output_data_int16, golden_output_int16, golden_output_len, golden_dims, + golden_dims_len, false); +} + +// Stretch is not supported with TF Micro +TF_LITE_MICRO_TEST(ReshapeWithStretchDimensionShouldSucceed) { + float output_data_float[32]; + int8_t output_data_int8[32]; + uint8_t output_data_uint8[32]; + int16_t output_data_int16[32]; + int input_dims[] = {4, 1, 2, 4, 1}; + const float input_float[] = {1, 2, 3, 4, 5, 6, 7, 8}; + const int8_t input_int8[] = {1, 2, 3, 4, 5, 6, 7, 8}; + const uint8_t input_uint8[] = {1, 2, 3, 4, 5, 6, 7, 8}; + const int16_t input_int16[] = {1, 2, 3, 4, 5, 6, 7, 8}; + int shape_dims[] = {1, 3}; + const int32_t shape_int32[] = {2, 1, -1}; + int output_dims[] = {3, 2, 1, -1}; + const int golden_output_len = 8; + const float golden_output_float[] = {1, 2, 3, 4, 5, 6, 7, 8}; + const int8_t golden_output_int8[] = {1, 2, 3, 4, 5, 6, 7, 8}; + const uint8_t golden_output_uint8[] = {1, 2, 3, 4, 5, 6, 7, 8}; + const int16_t golden_output_int16[] = {1, 2, 3, 4, 5, 6, 7, 8}; + const int golden_dims_len = 3; + int golden_dims[] = {2, 1, 4}; + tflite::testing::TestReshape(input_dims, input_float, shape_dims, shape_int32, + output_dims, output_data_float, + golden_output_float, golden_output_len, + golden_dims, golden_dims_len, false); + tflite::testing::TestReshapeQuantized( + input_dims, input_int8, shape_dims, shape_int32, output_dims, + output_data_int8, golden_output_int8, golden_output_len, golden_dims, + golden_dims_len, false); + tflite::testing::TestReshapeQuantized( + input_dims, input_uint8, shape_dims, shape_int32, output_dims, + output_data_uint8, golden_output_uint8, golden_output_len, golden_dims, + golden_dims_len, false); + tflite::testing::TestReshapeQuantized( + input_dims, input_int16, shape_dims, shape_int32, output_dims, + output_data_int16, golden_output_int16, golden_output_len, golden_dims, + golden_dims_len, false); +} + +// Empty shape indicates scalar output. +TF_LITE_MICRO_TEST(ReshapeWithScalarOutputShouldSucceed) { + float output_data_float[4]; + int8_t output_data_int8[4]; + uint8_t output_data_uint8[4]; + int input_dims[] = {1, 1}; + const float input_float[] = {3}; + const int8_t input_int8[] = {3}; + const uint8_t input_uint8[] = {3}; + int shape_dims[] = {0}; + const int32_t shape_int32[] = {}; + int output_dims[] = {0}; + const int golden_output_len = 1; + const float golden_output_float[] = {3}; + const int8_t golden_output_int8[] = {3}; + const uint8_t golden_output_uint8[] = {3}; + const int golden_dims_len = 0; + int golden_dims[] = {}; + tflite::testing::TestReshape(input_dims, input_float, shape_dims, shape_int32, + output_dims, output_data_float, + golden_output_float, golden_output_len, + golden_dims, golden_dims_len, false); + tflite::testing::TestReshapeQuantized( + input_dims, input_int8, shape_dims, shape_int32, output_dims, + output_data_int8, golden_output_int8, golden_output_len, golden_dims, + golden_dims_len, false); + tflite::testing::TestReshapeQuantized( + input_dims, input_uint8, shape_dims, shape_int32, output_dims, + output_data_uint8, golden_output_uint8, golden_output_len, golden_dims, + golden_dims_len, false); +} + +// Some old models specify '[0]' as the new shape, indicating that both input +// and output are scalars. +TF_LITE_MICRO_TEST(ReshapeWithLegacyScalarOutputShouldSucceed) { + using tflite::testing::CreateTensor; + using tflite::testing::IntArrayFromInts; + + int input_dims_data[] = {1, 1}; + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + const float input_data[] = {3.0f}; + auto input_tensor = CreateTensor(input_data, input_dims); + + float output_data[1]; + int output_dims_data[2] = {1, 0}; + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + auto output_tensor = CreateTensor(output_data, output_dims); + + int shape_dims_data[] = {1, 0}; + TfLiteIntArray* shape_dims = IntArrayFromInts(shape_dims_data); + + const int32_t shape_data[] = {0}; + auto shape_tensor = tflite::testing::CreateTensor(shape_data, shape_dims); + const float expected_output_with_shape[] = {}; + const int expected_output_with_shape_len = 0; + const float expected_output_no_shape[] = {3}; + const int expected_output_no_shape_len = 1; + int expected_dims[] = {}; + const int expected_dims_len = 0; + tflite::testing::TestReshapeWithShape( + &input_tensor, &shape_tensor, &output_tensor, expected_output_with_shape, + expected_output_with_shape_len, expected_dims, expected_dims_len, true); + + tflite::testing::TestReshapeWithoutShape( + &input_tensor, &output_tensor, expected_output_no_shape, + expected_output_no_shape_len, expected_dims, expected_dims_len, false); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/resize_bilinear.cc b/tensorflow/lite/micro/kernels/resize_bilinear.cc new file mode 100644 index 0000000..e701e03 --- /dev/null +++ b/tensorflow/lite/micro/kernels/resize_bilinear.cc @@ -0,0 +1,116 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/kernels/internal/reference/resize_bilinear.h" + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_utils.h" + +namespace tflite { +namespace { + +constexpr int kInputTensor = 0; +constexpr int kSizeTensor = 1; +constexpr int kOutputTensor = 0; + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + MicroContext* micro_context = GetMicroContext(context); + + TF_LITE_ENSURE_EQ(context, NumInputs(node), 2); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TfLiteTensor* size = + micro_context->AllocateTempInputTensor(node, kSizeTensor); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + + TF_LITE_ENSURE_EQ(context, NumDimensions(input), 4); + TF_LITE_ENSURE_EQ(context, NumDimensions(size), 1); + + TF_LITE_ENSURE_EQ(context, size->type, kTfLiteInt32); + output->type = input->type; + + TF_LITE_ENSURE_MSG(context, IsConstantTensor(size), + "Non constant size tensor not supported"); + + // Ensure params are valid. + auto* params = + reinterpret_cast(node->builtin_data); + if (params->half_pixel_centers && params->align_corners) { + MicroPrintf("If half_pixel_centers is True, align_corners must be False."); + return kTfLiteError; + } + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(size); + micro_context->DeallocateTempTfLiteTensor(output); + return kTfLiteOk; +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + auto* params = + reinterpret_cast(node->builtin_data); + + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + const TfLiteEvalTensor* size = + tflite::micro::GetEvalInput(context, node, kSizeTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + + if (output->type == kTfLiteFloat32) { + tflite::ResizeBilinearParams op_params; + op_params.align_corners = params->align_corners; + op_params.half_pixel_centers = params->half_pixel_centers; + reference_ops::ResizeBilinear(op_params, + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(size), + tflite::micro::GetTensorData(size), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } else if (output->type == kTfLiteInt8) { + tflite::ResizeBilinearParams op_params; + op_params.align_corners = params->align_corners; + op_params.half_pixel_centers = params->half_pixel_centers; + reference_ops::ResizeBilinearInteger( + op_params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(size), + tflite::micro::GetTensorData(size), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } else { + MicroPrintf("Output type is %d, requires float or int8.", output->type); + return kTfLiteError; + } + + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_RESIZE_BILINEAR() { + return tflite::micro::RegisterOp(nullptr, Prepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/resize_bilinear_test.cc b/tensorflow/lite/micro/kernels/resize_bilinear_test.cc new file mode 100644 index 0000000..b52cebe --- /dev/null +++ b/tensorflow/lite/micro/kernels/resize_bilinear_test.cc @@ -0,0 +1,329 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +TfLiteTensor TestCreateTensor(const float* data, TfLiteIntArray* dims) { + return CreateTensor(data, dims); +} + +TfLiteTensor TestCreateTensor(const int8_t* data, TfLiteIntArray* dims) { + return CreateQuantizedTensor(data, dims, -128, 127); +} + +template +TfLiteStatus ValidateGoldens(TfLiteTensor* tensors, int tensors_size, + const T* expected_output_data, T* output_data, + int output_length, + TfLiteResizeBilinearParams* params, + float tolerance = 1e-5) { + int inputs_array_data[] = {2, 0, 1}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 2}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = Register_RESIZE_BILINEAR(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, params); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + for (int i = 0; i < output_length; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(expected_output_data[i], output_data[i], + tolerance); + } + + return kTfLiteOk; +} + +template +void TestResizeBilinear(int* input_dims_data, const T* input_data, + const int32_t* expected_size_data, + const T* expected_output_data, int* output_dims_data, + T* output_data, TfLiteResizeBilinearParams* params, + float tolerance = 1e-5) { + int expected_size_dims_data[] = {1, 2}; + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* expected_size_dims = + IntArrayFromInts(expected_size_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + + const int output_dims_count = ElementCount(*output_dims); + + // Hack to pass ConstantTensor check in prepare + TfLiteTensor t = CreateTensor(expected_size_data, expected_size_dims); + t.allocation_type = kTfLiteMmapRo; + + constexpr int tensors_size = 3; + TfLiteTensor tensors[tensors_size]{ + TestCreateTensor(input_data, input_dims), + t, + TestCreateTensor(output_data, output_dims), + }; + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + ValidateGoldens(tensors, tensors_size, expected_output_data, output_data, + output_dims_count, params, tolerance)); +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(HorizontalResize) { + int input_dims[] = {4, 1, 1, 2, 1}; + const float input_data[] = {3, 6}; + const int32_t expected_size_data[] = {1, 3}; + const float expected_output_data[] = {3, 5, 6}; + int output_dims[] = {4, 1, 1, 3, 1}; + float output_data[3]; + + TfLiteResizeBilinearParams params = { + false, /*align_corners*/ + false /*half pixel centers*/ + }; + + tflite::testing::TestResizeBilinear(input_dims, input_data, + expected_size_data, expected_output_data, + output_dims, output_data, ¶ms); +} + +TF_LITE_MICRO_TEST(HorizontalResizeInt8) { + int input_dims[] = {4, 1, 1, 2, 1}; + const int8_t input_data[] = {3, 6}; + const int32_t expected_size_data[] = {1, 3}; + const int8_t expected_output_data[] = {3, 5, 6}; + int output_dims[] = {4, 1, 1, 3, 1}; + int8_t output_data[3]; + + TfLiteResizeBilinearParams params = { + false, /*align_corners*/ + false /*half pixel centers*/ + }; + + tflite::testing::TestResizeBilinear( + input_dims, input_data, expected_size_data, expected_output_data, + output_dims, output_data, ¶ms); +} + +TF_LITE_MICRO_TEST(VerticalResize) { + int input_dims[] = {4, 1, 2, 1, 1}; + const float input_data[] = {3, 9}; + const int32_t expected_size_data[] = {3, 1}; + const float expected_output_data[] = {3, 7, 9}; + int output_dims[] = {4, 1, 3, 1, 1}; + float output_data[3]; + + TfLiteResizeBilinearParams params = { + false, /*align_corners*/ + false /*half pixel centers*/ + }; + + tflite::testing::TestResizeBilinear(input_dims, input_data, + expected_size_data, expected_output_data, + output_dims, output_data, ¶ms); +} + +TF_LITE_MICRO_TEST(VerticalResizeInt8) { + int input_dims[] = {4, 1, 2, 1, 1}; + const int8_t input_data[] = {3, 9}; + const int32_t expected_size_data[] = {3, 1}; + const int8_t expected_output_data[] = {3, 7, 9}; + int output_dims[] = {4, 1, 3, 1, 1}; + int8_t output_data[3]; + + TfLiteResizeBilinearParams params = { + false, /*align_corners*/ + false /*half pixel centers*/ + }; + + tflite::testing::TestResizeBilinear( + input_dims, input_data, expected_size_data, expected_output_data, + output_dims, output_data, ¶ms); +} + +TF_LITE_MICRO_TEST(TwoDimensionalResize) { + int input_dims[] = {4, 1, 2, 2, 1}; + const float input_data[] = { + 3, 6, // + 9, 12, // + }; + const int32_t expected_size_data[] = {3, 3}; + const float expected_output_data[] = { + 3, 5, 6, // + 7, 9, 10, // + 9, 11, 12 // + }; + + int output_dims[] = {4, 1, 3, 3, 1}; + float output_data[9]; + + TfLiteResizeBilinearParams params = { + false, /*align_corners*/ + false /*half pixel centers*/ + }; + + tflite::testing::TestResizeBilinear(input_dims, input_data, + expected_size_data, expected_output_data, + output_dims, output_data, ¶ms); +} + +TF_LITE_MICRO_TEST(TwoDimensionalResizeInt8) { + int input_dims[] = {4, 1, 2, 2, 1}; + const int8_t input_data[] = { + 3, 6, // + 9, 12, // + }; + const int32_t expected_size_data[] = {3, 3}; + const int8_t expected_output_data[] = { + 3, 5, 6, // + 7, 9, 10, // + 9, 11, 12, // + }; + int output_dims[] = {4, 1, 3, 3, 1}; + int8_t output_data[9]; + + TfLiteResizeBilinearParams params = { + false, /*align_corners*/ + false /*half pixel centers*/ + }; + + tflite::testing::TestResizeBilinear( + input_dims, input_data, expected_size_data, expected_output_data, + output_dims, output_data, ¶ms); +} + +TF_LITE_MICRO_TEST(TwoDimensionalResizeWithTwoBatches) { + int input_dims[] = {4, 2, 2, 2, 1}; + const float input_data[] = { + 3, 6, // + 9, 12, // + 4, 10, // + 10, 16 // + }; + const int32_t expected_size_data[] = {3, 3}; + const float expected_output_data[] = { + 3, 5, 6, // + 7, 9, 10, // + 9, 11, 12, // + 4, 8, 10, // + 8, 12, 14, // + 10, 14, 16, // + }; + int output_dims[] = {4, 2, 3, 3, 1}; + float output_data[18]; + + TfLiteResizeBilinearParams params = { + false, /*align_corners*/ + false /*half pixel centers*/ + }; + + tflite::testing::TestResizeBilinear(input_dims, input_data, + expected_size_data, expected_output_data, + output_dims, output_data, ¶ms); +} + +TF_LITE_MICRO_TEST(TwoDimensionalResizeWithTwoBatchesInt8) { + int input_dims[] = {4, 2, 2, 2, 1}; + const int8_t input_data[] = { + 3, 6, // + 9, 12, // + 4, 10, // + 12, 16 // + }; + const int32_t expected_size_data[] = {3, 3}; + const int8_t expected_output_data[] = { + 3, 5, 6, // + 7, 9, 10, // + 9, 11, 12, // + 4, 8, 10, // + 9, 12, 13, // + 12, 14, 16, // + }; + int output_dims[] = {4, 2, 3, 3, 1}; + int8_t output_data[18]; + + TfLiteResizeBilinearParams params = { + false, /*align_corners*/ + false /*half pixel centers*/ + }; + + tflite::testing::TestResizeBilinear( + input_dims, input_data, expected_size_data, expected_output_data, + output_dims, output_data, ¶ms, /*tolerance=*/1); +} + +TF_LITE_MICRO_TEST(ThreeDimensionalResize) { + int input_dims[] = {4, 1, 2, 2, 2}; + const float input_data[] = { + 3, 4, 6, 10, // + 9, 10, 12, 16, // + }; + const int32_t expected_size_data[] = {3, 3}; + const float expected_output_data[] = { + 3, 4, 5, 8, 6, 10, // + 7, 8, 9, 12, 10, 14, // + 9, 10, 11, 14, 12, 16, // + }; + int output_dims[] = {4, 1, 3, 3, 2}; + float output_data[18]; + + TfLiteResizeBilinearParams params = { + false, /*align_corners*/ + false /*half pixel centers*/ + }; + + tflite::testing::TestResizeBilinear(input_dims, input_data, + expected_size_data, expected_output_data, + output_dims, output_data, ¶ms); +} + +TF_LITE_MICRO_TEST(ThreeDimensionalResizeInt8) { + int input_dims[] = {4, 1, 2, 2, 2}; + const int8_t input_data[] = { + 3, 4, 6, 10, // + 10, 12, 14, 16, // + }; + const int32_t expected_size_data[] = {3, 3}; + const int8_t expected_output_data[] = { + 3, 4, 5, 8, 6, 10, // + 7, 9, 10, 12, 11, 13, // + 10, 12, 12, 14, 14, 16, // + }; + int output_dims[] = {4, 1, 3, 3, 2}; + int8_t output_data[18]; + + TfLiteResizeBilinearParams params = { + false, /*align_corners*/ + false /*half pixel centers*/ + }; + + tflite::testing::TestResizeBilinear( + input_dims, input_data, expected_size_data, expected_output_data, + output_dims, output_data, ¶ms, /*tolerance=*/1); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/resize_nearest_neighbor.cc b/tensorflow/lite/micro/kernels/resize_nearest_neighbor.cc new file mode 100644 index 0000000..46b6ea1 --- /dev/null +++ b/tensorflow/lite/micro/kernels/resize_nearest_neighbor.cc @@ -0,0 +1,123 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/reference/resize_nearest_neighbor.h" + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { + +namespace { + +constexpr int kInputTensor = 0; +constexpr int kSizeTensor = 1; +constexpr int kOutputTensor = 0; + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + MicroContext* micro_context = GetMicroContext(context); + + TF_LITE_ENSURE_EQ(context, NumInputs(node), 2); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TfLiteTensor* size = + micro_context->AllocateTempInputTensor(node, kSizeTensor); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + + // Our current implementations rely on the input being 4D, + // and the size being 1D tensor with exactly 2 elements. + TF_LITE_ENSURE_EQ(context, NumDimensions(input), 4); + TF_LITE_ENSURE_EQ(context, NumDimensions(size), 1); + TF_LITE_ENSURE_EQ(context, size->type, kTfLiteInt32); + TF_LITE_ENSURE_EQ(context, size->dims->data[0], 2); + + output->type = input->type; + + if (!IsConstantTensor(size)) { + MicroPrintf("Dynamic tensors are unsupported in tfmicro."); + return kTfLiteError; + } + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(size); + micro_context->DeallocateTempTfLiteTensor(output); + + return kTfLiteOk; +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + auto* params = + reinterpret_cast(node->builtin_data); + + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + const TfLiteEvalTensor* size = + tflite::micro::GetEvalInput(context, node, kSizeTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + + tflite::ResizeNearestNeighborParams op_params; + op_params.align_corners = params->align_corners; + op_params.half_pixel_centers = false; + + if (output->type == kTfLiteFloat32) { + reference_ops::ResizeNearestNeighbor( + op_params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(size), + tflite::micro::GetTensorData(size), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } else if (output->type == kTfLiteInt8) { + reference_ops::ResizeNearestNeighbor( + op_params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(size), + tflite::micro::GetTensorData(size), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } else if (output->type == kTfLiteInt16) { + reference_ops::ResizeNearestNeighbor( + op_params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(size), + tflite::micro::GetTensorData(size), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } else { + MicroPrintf("Output tensor type %s (%d) not supported.", + TfLiteTypeGetName(output->type), output->type); + + return kTfLiteError; + } + + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_RESIZE_NEAREST_NEIGHBOR() { + return tflite::micro::RegisterOp(nullptr, Prepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/resize_nearest_neighbor_test.cc b/tensorflow/lite/micro/kernels/resize_nearest_neighbor_test.cc new file mode 100644 index 0000000..3e06da8 --- /dev/null +++ b/tensorflow/lite/micro/kernels/resize_nearest_neighbor_test.cc @@ -0,0 +1,352 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +// Input data expects a 4-D tensor of [batch, height, width, channels] +// Output data should match input datas batch and channels +// Expected sizes should be a 1-D tensor with 2 elements: new_height & new_width +template +void TestResizeNearestNeighbor(int* input_dims_data, const T* input_data, + const int32_t* expected_size_data, + const T* expected_output_data, + int* output_dims_data, T* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + + int expected_size_dims_data[] = {1, 2}; + TfLiteIntArray* expected_size_dims = + IntArrayFromInts(expected_size_dims_data); + + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + + int output_dims_count = ElementCount(*output_dims); + + constexpr int tensors_size = 3; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(input_data, input_dims), + CreateTensor(expected_size_data, expected_size_dims), + CreateTensor(output_data, output_dims), + }; + + tensors[1].allocation_type = kTfLiteMmapRo; + + TfLiteResizeNearestNeighborParams builtin_data = {false, false}; + + int inputs_array_data[] = {2, 0, 1}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 2}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = Register_RESIZE_NEAREST_NEIGHBOR(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, &builtin_data); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + // compare results + for (int i = 0; i < output_dims_count; ++i) { + TF_LITE_MICRO_EXPECT_EQ(expected_output_data[i], output_data[i]); + } +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(HorizontalResize) { + int input_dims[] = {4, 1, 1, 2, 1}; + const float input_data[] = {3, 6}; + const int32_t expected_size_data[] = {1, 3}; + const float expected_output_data[] = {3, 3, 6}; + int output_dims[] = {4, 1, 1, 3, 1}; + float output_data[3]; + + tflite::testing::TestResizeNearestNeighbor( + input_dims, input_data, expected_size_data, expected_output_data, + output_dims, output_data); +} + +TF_LITE_MICRO_TEST(HorizontalResizeInt8) { + int input_dims[] = {4, 1, 1, 2, 1}; + const int8_t input_data[] = {-3, 6}; + const int32_t expected_size_data[] = {1, 3}; + const int8_t expected_output_data[] = {-3, -3, 6}; + int output_dims[] = {4, 1, 1, 3, 1}; + int8_t output_data[3]; + + tflite::testing::TestResizeNearestNeighbor( + input_dims, input_data, expected_size_data, expected_output_data, + output_dims, output_data); +} + +TF_LITE_MICRO_TEST(HorizontalResizeInt16) { + int input_dims[] = {4, 1, 1, 2, 1}; + const int16_t input_data[] = {-3, 6}; + const int32_t expected_size_data[] = {1, 3}; + const int16_t expected_output_data[] = {-3, -3, 6}; + int output_dims[] = {4, 1, 1, 3, 1}; + int16_t output_data[3]; + + tflite::testing::TestResizeNearestNeighbor( + input_dims, input_data, expected_size_data, expected_output_data, + output_dims, output_data); +} + +TF_LITE_MICRO_TEST(VerticalResize) { + int input_dims[] = {4, 1, 2, 1, 1}; + const float input_data[] = {3, 9}; + const int32_t expected_size_data[] = {3, 1}; + const float expected_output_data[] = {3, 3, 9}; + int output_dims[] = {4, 1, 3, 1, 1}; + float output_data[3]; + + tflite::testing::TestResizeNearestNeighbor( + input_dims, input_data, expected_size_data, expected_output_data, + output_dims, output_data); +} + +TF_LITE_MICRO_TEST(VerticalResizeInt8) { + int input_dims[] = {4, 1, 2, 1, 1}; + const int8_t input_data[] = {3, -9}; + const int32_t expected_size_data[] = {3, 1}; + const int8_t expected_output_data[] = {3, 3, -9}; + int output_dims[] = {4, 1, 3, 1, 1}; + int8_t output_data[3]; + + tflite::testing::TestResizeNearestNeighbor( + input_dims, input_data, expected_size_data, expected_output_data, + output_dims, output_data); +} + +TF_LITE_MICRO_TEST(VerticalResizeInt16) { + int input_dims[] = {4, 1, 2, 1, 1}; + const int16_t input_data[] = {3, -9}; + const int32_t expected_size_data[] = {3, 1}; + const int16_t expected_output_data[] = {3, 3, -9}; + int output_dims[] = {4, 1, 3, 1, 1}; + int16_t output_data[3]; + + tflite::testing::TestResizeNearestNeighbor( + input_dims, input_data, expected_size_data, expected_output_data, + output_dims, output_data); +} + +TF_LITE_MICRO_TEST(TwoDimensionalResize) { + int input_dims[] = {4, 1, 2, 2, 1}; + const float input_data[] = { + 3, 6, // + 9, 12, // + }; + const int32_t expected_size_data[] = {3, 3}; + const float expected_output_data[] = { + 3, 3, 6, // + 3, 3, 6, // + 9, 9, 12 // + }; + + int output_dims[] = {4, 1, 3, 3, 1}; + float output_data[9]; + + tflite::testing::TestResizeNearestNeighbor( + input_dims, input_data, expected_size_data, expected_output_data, + output_dims, output_data); +} + +TF_LITE_MICRO_TEST(TwoDimensionalResizeInt8) { + int input_dims[] = {4, 1, 2, 2, 1}; + const int8_t input_data[] = { + 3, -6, // + 9, 12, // + }; + const int32_t expected_size_data[] = {3, 3}; + const int8_t expected_output_data[] = { + 3, 3, -6, // + 3, 3, -6, // + 9, 9, 12, // + }; + int output_dims[] = {4, 1, 3, 3, 1}; + int8_t output_data[9]; + + tflite::testing::TestResizeNearestNeighbor( + input_dims, input_data, expected_size_data, expected_output_data, + output_dims, output_data); +} + +TF_LITE_MICRO_TEST(TwoDimensionalResizeInt16) { + int input_dims[] = {4, 1, 2, 2, 1}; + const int16_t input_data[] = { + 3, -6, // + 9, 12, // + }; + const int32_t expected_size_data[] = {3, 3}; + const int16_t expected_output_data[] = { + 3, 3, -6, // + 3, 3, -6, // + 9, 9, 12, // + }; + int output_dims[] = {4, 1, 3, 3, 1}; + int16_t output_data[9]; + + tflite::testing::TestResizeNearestNeighbor( + input_dims, input_data, expected_size_data, expected_output_data, + output_dims, output_data); +} + +TF_LITE_MICRO_TEST(TwoDimensionalResizeWithTwoBatches) { + int input_dims[] = {4, 2, 2, 2, 1}; + const float input_data[] = { + 3, 6, // + 9, 12, // + 4, 10, // + 10, 16 // + }; + const int32_t expected_size_data[] = {3, 3}; + const float expected_output_data[] = { + 3, 3, 6, // + 3, 3, 6, // + 9, 9, 12, // + 4, 4, 10, // + 4, 4, 10, // + 10, 10, 16, // + }; + int output_dims[] = {4, 2, 3, 3, 1}; + float output_data[18]; + + tflite::testing::TestResizeNearestNeighbor( + input_dims, input_data, expected_size_data, expected_output_data, + output_dims, output_data); +} + +TF_LITE_MICRO_TEST(TwoDimensionalResizeWithTwoBatchesInt8) { + int input_dims[] = {4, 2, 2, 2, 1}; + const int8_t input_data[] = { + 3, 6, // + 9, -12, // + -4, 10, // + 10, 16 // + }; + const int32_t expected_size_data[] = {3, 3}; + const int8_t expected_output_data[] = { + 3, 3, 6, // + 3, 3, 6, // + 9, 9, -12, // + -4, -4, 10, // + -4, -4, 10, // + 10, 10, 16, // + }; + int output_dims[] = {4, 2, 3, 3, 1}; + int8_t output_data[18]; + + tflite::testing::TestResizeNearestNeighbor( + input_dims, input_data, expected_size_data, expected_output_data, + output_dims, output_data); +} + +TF_LITE_MICRO_TEST(TwoDimensionalResizeWithTwoBatchesInt16) { + int input_dims[] = {4, 2, 2, 2, 1}; + const int16_t input_data[] = { + 3, 6, // + 9, -12, // + -4, 10, // + 10, 16 // + }; + const int32_t expected_size_data[] = {3, 3}; + const int16_t expected_output_data[] = { + 3, 3, 6, // + 3, 3, 6, // + 9, 9, -12, // + -4, -4, 10, // + -4, -4, 10, // + 10, 10, 16, // + }; + int output_dims[] = {4, 2, 3, 3, 1}; + int16_t output_data[18]; + + tflite::testing::TestResizeNearestNeighbor( + input_dims, input_data, expected_size_data, expected_output_data, + output_dims, output_data); +} + +TF_LITE_MICRO_TEST(ThreeDimensionalResize) { + int input_dims[] = {4, 1, 2, 2, 2}; + const float input_data[] = { + 3, 4, 6, 10, // + 9, 10, 12, 16, // + }; + const int32_t expected_size_data[] = {3, 3}; + const float expected_output_data[] = { + 3, 4, 3, 4, 6, 10, // + 3, 4, 3, 4, 6, 10, // + 9, 10, 9, 10, 12, 16, // + }; + int output_dims[] = {4, 1, 3, 3, 2}; + float output_data[18]; + + tflite::testing::TestResizeNearestNeighbor( + input_dims, input_data, expected_size_data, expected_output_data, + output_dims, output_data); +} + +TF_LITE_MICRO_TEST(ThreeDimensionalResizeInt8) { + int input_dims[] = {4, 1, 2, 2, 2}; + const int8_t input_data[] = { + 3, 4, -6, 10, // + 10, 12, -14, 16, // + }; + const int32_t expected_size_data[] = {3, 3}; + const int8_t expected_output_data[] = { + 3, 4, 3, 4, -6, 10, // + 3, 4, 3, 4, -6, 10, // + 10, 12, 10, 12, -14, 16, // + }; + int output_dims[] = {4, 1, 3, 3, 2}; + int8_t output_data[18]; + + tflite::testing::TestResizeNearestNeighbor( + input_dims, input_data, expected_size_data, expected_output_data, + output_dims, output_data); +} + +TF_LITE_MICRO_TEST(ThreeDimensionalResizeInt16) { + int input_dims[] = {4, 1, 2, 2, 2}; + const int16_t input_data[] = { + 3, 4, -6, 10, // + 10, 12, -14, 16, // + }; + const int32_t expected_size_data[] = {3, 3}; + const int16_t expected_output_data[] = { + 3, 4, 3, 4, -6, 10, // + 3, 4, 3, 4, -6, 10, // + 10, 12, 10, 12, -14, 16, // + }; + int output_dims[] = {4, 1, 3, 3, 2}; + int16_t output_data[18]; + + tflite::testing::TestResizeNearestNeighbor( + input_dims, input_data, expected_size_data, expected_output_data, + output_dims, output_data); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/round.cc b/tensorflow/lite/micro/kernels/round.cc new file mode 100644 index 0000000..7a9458b --- /dev/null +++ b/tensorflow/lite/micro/kernels/round.cc @@ -0,0 +1,76 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/reference/round.h" + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" + +namespace tflite { +namespace ops { +namespace micro { +namespace round { + +constexpr int kInputTensor = 0; +constexpr int kOutputTensor = 0; + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + TF_LITE_ENSURE_EQ(context, NumInputs(node), 1); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + TF_LITE_ENSURE_TYPES_EQ(context, input->type, kTfLiteFloat32); + TF_LITE_ENSURE_TYPES_EQ(context, output->type, input->type); + TF_LITE_ENSURE_EQ(context, output->bytes, input->bytes); + TF_LITE_ENSURE_EQ(context, output->dims->size, input->dims->size); + for (int i = 0; i < output->dims->size; ++i) { + TF_LITE_ENSURE_EQ(context, output->dims->data[i], input->dims->data[i]); + } + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(output); + return kTfLiteOk; +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + + reference_ops::Round(tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + + return kTfLiteOk; +} +} // namespace round + +TFLMRegistration Register_ROUND() { + return tflite::micro::RegisterOp(nullptr, round::Prepare, round::Eval); +} + +} // namespace micro +} // namespace ops +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/round_test.cc b/tensorflow/lite/micro/kernels/round_test.cc new file mode 100644 index 0000000..1edb609 --- /dev/null +++ b/tensorflow/lite/micro/kernels/round_test.cc @@ -0,0 +1,79 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +void TestRound(int* input_dims_data, const float* input_data, + const float* expected_output_data, float* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(input_dims_data); + const int output_dims_count = ElementCount(*output_dims); + constexpr int inputs_size = 1; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(input_data, input_dims), + CreateTensor(output_data, output_dims), + }; + + int inputs_array_data[] = {1, 0}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 1}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = tflite::ops::micro::Register_ROUND(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, nullptr); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + for (int i = 0; i < output_dims_count; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(expected_output_data[i], output_data[i], 1e-5f); + } +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(SingleDim) { + int input_dims[] = {1, 6}; + const float input_data[] = {8.5, 0.0, 3.5, 4.2, -3.5, -4.5}; + const float golden[] = {8, 0, 4, 4, -4, -4}; + float output_data[6]; + tflite::testing::TestRound(input_dims, input_data, golden, output_data); +} + +TF_LITE_MICRO_TEST(MultiDims) { + int input_dims[] = {4, 2, 1, 1, 6}; + const float input_data[] = {0.0001, 8.0001, 0.9999, 9.9999, 0.5, -0.0001, + -8.0001, -0.9999, -9.9999, -0.5, -2.5, 1.5}; + const float golden[] = {0, 8, 1, 10, 0, 0, -8, -1, -10, -0, -2, 2}; + float output_data[12]; + tflite::testing::TestRound(input_dims, input_data, golden, output_data); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/select.cc b/tensorflow/lite/micro/kernels/select.cc new file mode 100644 index 0000000..90e6413 --- /dev/null +++ b/tensorflow/lite/micro/kernels/select.cc @@ -0,0 +1,196 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/kernels/internal/reference/select.h" + +#include +#include + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { +namespace { + +constexpr int kInputTensorCondition = 0; +constexpr int kInputTensorX = 1; +constexpr int kInputTensorY = 2; +constexpr int kOutputTensor = 0; + +struct OpData { + bool requires_broadcast; +}; + +void* SelectInit(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + auto* data = static_cast( + context->AllocatePersistentBuffer(context, sizeof(OpData))); + data->requires_broadcast = false; + return data; +} + +TfLiteStatus CheckBroadcastShape(TfLiteContext* context, + const TfLiteTensor* input1, + const TfLiteTensor* input2, + const TfLiteTensor* input3, + const TfLiteIntArray* output_shape) { + const int dims1 = NumDimensions(input1); + const int dims2 = NumDimensions(input2); + const int dims3 = NumDimensions(input3); + const int out_dims = std::max(std::max(dims1, dims2), dims3); + TF_LITE_ENSURE_EQ(context, out_dims, output_shape->size); + + for (int i = 0; i < out_dims; ++i) { + const int d1 = i >= dims1 ? 1 : SizeOfDimension(input1, dims1 - i - 1); + const int d2 = i >= dims2 ? 1 : SizeOfDimension(input2, dims2 - i - 1); + const int d3 = i >= dims3 ? 1 : SizeOfDimension(input3, dims3 - i - 1); + const int min_value = std::min(std::min(d1, d2), d3); + int max_value = std::max(std::max(d1, d2), d3); + // If one dimension is 0, others must be 0 or 1. + if (min_value == 0) max_value = 0; + if (!(d1 == 1 || d1 == max_value) || !(d2 == 1 || d2 == max_value) || + !(d3 == 1 || d3 == max_value)) { + MicroPrintf("Given shapes are not broadcastable."); + return kTfLiteError; + } + TF_LITE_ENSURE_EQ(context, output_shape->data[out_dims - i - 1], max_value); + } + return kTfLiteOk; +} + +TfLiteStatus SelectPrepare(TfLiteContext* context, TfLiteNode* node) { + OpData* data = reinterpret_cast(node->user_data); + + TF_LITE_ENSURE_EQ(context, NumInputs(node), 3); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + + MicroContext* micro_context = GetMicroContext(context); + TfLiteTensor* input_condition = + micro_context->AllocateTempInputTensor(node, kInputTensorCondition); + + TfLiteTensor* input_x = + micro_context->AllocateTempInputTensor(node, kInputTensorX); + + TfLiteTensor* input_y = + micro_context->AllocateTempInputTensor(node, kInputTensorY); + + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + + // Input must be bool. + TF_LITE_ENSURE_TYPES_EQ(context, input_condition->type, kTfLiteBool); + TF_LITE_ENSURE_TYPES_EQ(context, input_x->type, input_y->type); + output->type = input_x->type; + + // Respect the original output shape when there are mixed shapes to represent + // a scalar data. + bool possible_mixed_scaler = + GetTensorShape(input_condition).FlatSize() == 1 && + GetTensorShape(input_x).FlatSize() == 1 && + GetTensorShape(input_y).FlatSize() == 1 && + GetTensorShape(output).FlatSize() == 1; + + bool same_shape = HaveSameShapes(input_condition, input_x) && + HaveSameShapes(input_x, input_y); + if (!same_shape && !possible_mixed_scaler) { + TF_LITE_ENSURE_OK( + context, CheckBroadcastShape(context, input_condition, input_x, input_y, + output->dims)); + data->requires_broadcast = true; + } + + micro_context->DeallocateTempTfLiteTensor(input_condition); + micro_context->DeallocateTempTfLiteTensor(input_x); + micro_context->DeallocateTempTfLiteTensor(input_y); + micro_context->DeallocateTempTfLiteTensor(output); + + return kTfLiteOk; +} + +template +void CallSelect(const TfLiteEvalTensor* input_condition, + const TfLiteEvalTensor* input_x, + const TfLiteEvalTensor* input_y, TfLiteEvalTensor* output, + bool need_broadcast) { + using Func = decltype(reference_ops::Select)*; + Func select_func; + if (need_broadcast) { + select_func = reference_ops::BroadcastSelect5DSlow; + } else { + select_func = reference_ops::Select; + } + + select_func(tflite::micro::GetTensorShape(input_condition), + tflite::micro::GetTensorData(input_condition), + tflite::micro::GetTensorShape(input_x), + tflite::micro::GetTensorData(input_x), + tflite::micro::GetTensorShape(input_y), + tflite::micro::GetTensorData(input_y), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); +} + +TfLiteStatus SelectEval(TfLiteContext* context, TfLiteNode* node) { + OpData* data = static_cast(node->user_data); + + const TfLiteEvalTensor* input_condition = + tflite::micro::GetEvalInput(context, node, kInputTensorCondition); + + const TfLiteEvalTensor* input_x = + tflite::micro::GetEvalInput(context, node, kInputTensorX); + + const TfLiteEvalTensor* input_y = + tflite::micro::GetEvalInput(context, node, kInputTensorY); + + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + + switch (input_x->type) { + case kTfLiteFloat32: + CallSelect(input_condition, input_x, input_y, output, + data->requires_broadcast); + break; + case kTfLiteInt8: + CallSelect(input_condition, input_x, input_y, output, + data->requires_broadcast); + break; + case kTfLiteInt16: + CallSelect(input_condition, input_x, input_y, output, + data->requires_broadcast); + break; + default: + MicroPrintf("Does not support type other than %s, but got %s", + "int8|int16|float32", TfLiteTypeGetName(input_x->type)); + return kTfLiteError; + } + + return kTfLiteOk; +} + +} // namespace + +// SelectV2 op selects values of 'x' if the corresponding value of 'condition' +// is true or the value of 'y' if false. There are valid condition input sizes: +// +// 1. Either the same shape (in which case the select is elementwise), or +// 2. Broadcastable shapes between 'condition', 'x' and 'y'. +TFLMRegistration Register_SELECT_V2() { + return tflite::micro::RegisterOp(tflite::SelectInit, tflite::SelectPrepare, + tflite::SelectEval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/select_test.cc b/tensorflow/lite/micro/kernels/select_test.cc new file mode 100644 index 0000000..1f92660 --- /dev/null +++ b/tensorflow/lite/micro/kernels/select_test.cc @@ -0,0 +1,263 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { + +typedef struct { + EmptyStructPlaceholder placeholder; +} TfLiteSelectParams; + +template +void TestSelect(int* input1_dims_data, const bool* input1_data, + int* input2_dims_data, const T* input2_data, + int* input3_dims_data, const T* input3_data, + int* output_dims_data, T* output_data) { + TfLiteIntArray* input1_dims = IntArrayFromInts(input1_dims_data); + TfLiteIntArray* input2_dims = IntArrayFromInts(input2_dims_data); + TfLiteIntArray* input3_dims = IntArrayFromInts(input3_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + + constexpr int input_size = 3; + constexpr int output_size = 1; + constexpr int tensors_size = input_size + output_size; + TfLiteTensor tensors[tensors_size] = {CreateTensor(input1_data, input1_dims), + CreateTensor(input2_data, input2_dims), + CreateTensor(input3_data, input3_dims), + CreateTensor(output_data, output_dims)}; + + int inputs_array_data[] = {3, 0, 1, 2}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 3}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + TfLiteSelectParams builtin_data; + const TFLMRegistration registration = tflite::Register_SELECT_V2(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, + reinterpret_cast(&builtin_data)); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); +} + +template +void ExpectEqual(int* dims, const T* expected_data, const T* output_data) { + TfLiteIntArray* dims_array = IntArrayFromInts(dims); + const int element_count = ElementCount(*dims_array); + for (int i = 0; i < element_count; ++i) { + TF_LITE_MICRO_EXPECT_EQ(expected_data[i], output_data[i]); + } +} + +template +void ExpectNear(int* dims, const T* expected_data, const T* output_data) { + TfLiteIntArray* dims_array = IntArrayFromInts(dims); + const int element_count = ElementCount(*dims_array); + for (int i = 0; i < element_count; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(expected_data[i], output_data[i], 1e-5f); + } +} + +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(SelectFloat) { + int inout_shape[] = {4, 1, 1, 1, 4}; + + const bool input1_data[] = {true, false, true, false}; + const float input2_data[] = {0.1f, 0.2f, 0.3f, 0.4f}; + const float input3_data[] = {0.5f, 0.6f, 0.7f, 0.8f}; + const float expected_output[] = {0.1f, 0.6f, 0.3, 0.8f}; + + float output_data[4]; + tflite::testing::TestSelect(inout_shape, input1_data, inout_shape, + input2_data, inout_shape, input3_data, + inout_shape, output_data); + tflite::testing::ExpectNear(inout_shape, expected_output, output_data); +} + +TF_LITE_MICRO_TEST(SelectInt8) { + int inout_shape[] = {4, 1, 1, 1, 4}; + + const bool input1_data[] = {false, true, false, false}; + const int8_t input2_data[] = {1, -2, 3, 4}; + const int8_t input3_data[] = {5, 6, 7, -8}; + const int8_t expected_output[] = {5, -2, 7, -8}; + + int8_t output_data[4]; + tflite::testing::TestSelect(inout_shape, input1_data, inout_shape, + input2_data, inout_shape, input3_data, + inout_shape, output_data); + tflite::testing::ExpectEqual(inout_shape, expected_output, output_data); +} + +TF_LITE_MICRO_TEST(SelectInt16) { + int inout_shape[] = {4, 1, 1, 1, 4}; + + const bool input1_data[] = {false, true, false, false}; + const int16_t input2_data[] = {1, 2, 3, 4}; + const int16_t input3_data[] = {5, 6, 7, 8}; + const int16_t expected_output[] = {5, 2, 7, 8}; + + int16_t output_data[4]; + tflite::testing::TestSelect(inout_shape, input1_data, inout_shape, + input2_data, inout_shape, input3_data, + inout_shape, output_data); + tflite::testing::ExpectEqual(inout_shape, expected_output, output_data); +} + +TF_LITE_MICRO_TEST(BroadcastSelectInt16OneDimensionConditionWithSingleValue) { + int input1_shape[] = {1, 1}; + int input2_shape[] = {5, 1, 2, 2, 2, 1}; + int input3_shape[] = {4, 1, 2, 2, 1}; + + const bool input1_data[] = {false}; + const int16_t input2_data[] = {1, 2, 3, 4, 5, 6, 7, 8}; + const int16_t input3_data[] = {9, 10, 11, 12}; + const int16_t expected_output[] = {9, 10, 11, 12, 9, 10, 11, 12}; + + int16_t output_data[8]; + tflite::testing::TestSelect(input1_shape, input1_data, input2_shape, + input2_data, input3_shape, input3_data, + input2_shape, output_data); + tflite::testing::ExpectEqual(input2_shape, expected_output, output_data); +} + +TF_LITE_MICRO_TEST(BroadcastSelectInt16LesserThan4D) { + int input1_shape[] = {2, 1, 2}; + int inout_shape[] = {3, 1, 2, 2}; + + const bool input1_data[] = {false, true}; + const int16_t input2_data[] = {1, 2, 3, 4}; + const int16_t input3_data[] = {5, 6, 7, 8}; + const int16_t expected_output[] = {5, 2, 7, 4}; + + int16_t output_data[4]; + tflite::testing::TestSelect(input1_shape, input1_data, inout_shape, + input2_data, inout_shape, input3_data, + inout_shape, output_data); + tflite::testing::ExpectEqual(inout_shape, expected_output, output_data); +} + +TF_LITE_MICRO_TEST(BroadcastSelectInt16OnFalseValue) { + int input1_shape[] = {1, 1}; + int inout_shape[] = {3, 1, 2, 2}; + + const bool input1_data[] = {false}; + const int16_t input2_data[] = {1, 2, 3, 4}; + const int16_t input3_data[] = {5, 6, 7, 8}; + const int16_t expected_output[] = {5, 6, 7, 8}; + + int16_t output_data[4]; + tflite::testing::TestSelect(input1_shape, input1_data, inout_shape, + input2_data, inout_shape, input3_data, + inout_shape, output_data); + tflite::testing::ExpectEqual(inout_shape, expected_output, output_data); +} + +TF_LITE_MICRO_TEST(BroadcastSelectInt16) { + int input1_shape[] = {2, 1, 2}; + int inout_shape[] = {3, 1, 2, 2}; + + const bool input1_data[] = {false, true}; + const int16_t input2_data[] = {1, 2, 3, 4}; + const int16_t input3_data[] = {5, 6, 7, 7}; + const int16_t expected_output[] = {5, 2, 7, 4}; + + int16_t output_data[4]; + tflite::testing::TestSelect(input1_shape, input1_data, inout_shape, + input2_data, inout_shape, input3_data, + inout_shape, output_data); + tflite::testing::ExpectEqual(inout_shape, expected_output, output_data); +} + +TF_LITE_MICRO_TEST(BroadcastSelectInt16OneDimensionConditionWithTwoValues) { + int input1_shape[] = {1, 2}; + int input_shape[] = {4, 2, 1, 2, 1}; + int output_shape[] = {4, 2, 1, 2, 2}; + + const bool input1_data[] = {false, true}; + const int16_t input2_data[] = {1, 2, 3, 4}; + const int16_t input3_data[] = {5, 6, 7, 8}; + const int16_t expected_output[] = {5, 1, 6, 2, 7, 3, 8, 4}; + + int16_t output_data[8]; + tflite::testing::TestSelect(input1_shape, input1_data, input_shape, + input2_data, input_shape, input3_data, + output_shape, output_data); + tflite::testing::ExpectEqual(output_shape, expected_output, output_data); +} + +TF_LITE_MICRO_TEST(MixedFlatSizeOneInputsWithScalarInputConditionTensor) { + int input1_shape[] = {0}; // conditional data is a scalar + int input_shape[] = {1, 1}; + int output_shape[] = {0}; // output data is a scalar + + const bool input1_data[] = {false}; + const int16_t input2_data[] = {1}; + const int16_t input3_data[] = {5}; + const int16_t expected_output[] = {5}; + + int16_t output_data[std::extent::value]; + tflite::testing::TestSelect(input1_shape, input1_data, input_shape, + input2_data, input_shape, input3_data, + output_shape, output_data); + tflite::testing::ExpectEqual(output_shape, expected_output, output_data); +} + +TF_LITE_MICRO_TEST(MixedFlatSizeOneInputsWithScalarInputXTensor) { + int input2_shape[] = {0}; // x data is a scalar + int input_shape[] = {1, 1}; + int output_shape[] = {0}; // output data is a scalar + + const bool input1_data[] = {true}; + const int16_t input2_data[] = {1}; + const int16_t input3_data[] = {5}; + const int16_t expected_output[] = {1}; + + int16_t output_data[std::extent::value]; + tflite::testing::TestSelect(input_shape, input1_data, input2_shape, + input2_data, input_shape, input3_data, + output_shape, output_data); + tflite::testing::ExpectEqual(output_shape, expected_output, output_data); +} + +TF_LITE_MICRO_TEST(MixedFlatSizeOneInputsWithScalarInputYTensor) { + int input3_shape[] = {0}; // y data is a scalar + int input_shape[] = {1, 1}; + int output_shape[] = {0}; // output data is a scalar + + const bool input1_data[] = {false}; + const int16_t input2_data[] = {1}; + const int16_t input3_data[] = {5}; + const int16_t expected_output[] = {5}; + + int16_t output_data[std::extent::value]; + tflite::testing::TestSelect(input_shape, input1_data, input_shape, + input2_data, input3_shape, input3_data, + output_shape, output_data); + tflite::testing::ExpectEqual(output_shape, expected_output, output_data); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/shape.cc b/tensorflow/lite/micro/kernels/shape.cc new file mode 100644 index 0000000..a39bfc0 --- /dev/null +++ b/tensorflow/lite/micro/kernels/shape.cc @@ -0,0 +1,67 @@ +/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/memory_helpers.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_utils.h" + +namespace tflite { + +namespace { +constexpr int kInputTensor = 0; +constexpr int kOutputTensor = 0; + +void ExtractShape(const TfLiteEvalTensor* input, int32_t* output_data) { + for (int i = 0; i < input->dims->size; ++i) { + output_data[i] = input->dims->data[i]; + } +} + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + TF_LITE_ENSURE_EQ(context, NumInputs(node), 1); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + + return kTfLiteOk; +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + if (output->type != kTfLiteInt32) { + MicroPrintf("Output type %s (%d) not supported.", + TfLiteTypeGetName(output->type), output->type); + return kTfLiteError; + } else { + ExtractShape(input, tflite::micro::GetTensorData(output)); + } + + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_SHAPE() { + return tflite::micro::RegisterOp(nullptr, Prepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/shape_test.cc b/tensorflow/lite/micro/kernels/shape_test.cc new file mode 100644 index 0000000..413b726 --- /dev/null +++ b/tensorflow/lite/micro/kernels/shape_test.cc @@ -0,0 +1,137 @@ +/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +void ValidateShape(TfLiteTensor* tensors, const int tensor_count, + int32_t* output_data, const int32_t* expected_output, + int output_dims_count) { + int inputs_array_data[] = {1, 0}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 1}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = tflite::Register_SHAPE(); + micro::KernelRunner runner(registration, tensors, tensor_count, inputs_array, + outputs_array, nullptr); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + for (int i = 0; i < output_dims_count; ++i) { + TF_LITE_MICRO_EXPECT_EQ(expected_output[i], output_data[i]); + } +} + +void TestShape(int* input_dims_data, const float* input_data, + int* output_dims_data, const int32_t* expected_output_data, + int32_t* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_dims_count = ElementCount(*output_dims); + + constexpr int inputs_size = 1; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(input_data, input_dims), + CreateTensor(output_data, output_dims, true), + }; + + ValidateShape(tensors, tensors_size, output_data, expected_output_data, + output_dims_count); +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(TestShape0) { + int input_shape[] = {1, 5}; + float input_values[] = {1, 3, 1, 3, 5}; + int output_dims[] = {1, 1}; // this is actually input_shapes shape + int32_t expected_output_data[] = {5}; + int32_t output_data[1]; + + tflite::testing::TestShape(input_shape, input_values, output_dims, + expected_output_data, output_data); +} + +TF_LITE_MICRO_TEST(TestShape1) { + int input_shape[] = {2, 4, 3}; + float input_values[] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}; + int output_dims[] = {2, 1, 1}; + int32_t expected_output_data[] = {4, 3}; + int32_t output_data[2]; + + tflite::testing::TestShape(input_shape, input_values, output_dims, + expected_output_data, output_data); +} + +TF_LITE_MICRO_TEST(TestShape2) { + int input_shape[] = {2, 12, 1}; + float input_values[] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}; + int output_dims[] = {2, 1, 1}; + int32_t expected_output_data[] = {12, 1}; + int32_t output_data[2]; + + tflite::testing::TestShape(input_shape, input_values, output_dims, + expected_output_data, output_data); +} + +TF_LITE_MICRO_TEST(TestShape3) { + int input_shape[] = {2, 2, 6}; + float input_values[] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}; + int output_dims[] = {2, 1, 1}; + int32_t expected_output_data[] = {2, 6}; + int32_t output_data[2]; + + tflite::testing::TestShape(input_shape, input_values, output_dims, + expected_output_data, output_data); +} + +TF_LITE_MICRO_TEST(TestShape4) { + int input_shape[] = {2, 2, 2, 3}; + float input_values[] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}; + int output_dims[] = {3, 1, 1, 1}; + int32_t expected_output_data[] = {2, 2, 3}; + int32_t output_data[3]; + + tflite::testing::TestShape(input_shape, input_values, output_dims, + expected_output_data, output_data); +} + +TF_LITE_MICRO_TEST(TestShape5) { + int input_shape[] = {1, 1}; + float input_values[] = {1}; + int output_dims[] = {1, 1}; + int32_t expected_output_data[] = {1}; + int32_t output_data[1]; + + tflite::testing::TestShape(input_shape, input_values, output_dims, + expected_output_data, output_data); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/slice.cc b/tensorflow/lite/micro/kernels/slice.cc new file mode 100644 index 0000000..973da18 --- /dev/null +++ b/tensorflow/lite/micro/kernels/slice.cc @@ -0,0 +1,164 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/reference/slice.h" + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { + +namespace { + +constexpr int kInputTensor = 0; +constexpr int kBeginTensor = 1; +constexpr int kSizeTensor = 2; +constexpr int kOutputTensor = 0; + +const int kMaxDim = 5; + +template +void GetBeginAndSizeVectors(int dimensions, const TfLiteEvalTensor* begin, + const TfLiteEvalTensor* size, int32_t* begins, + int32_t* sizes) { + int offset = kMaxDim - dimensions; + for (int idx = 0; idx < dimensions; ++idx) { + begins[offset + idx] = tflite::micro::GetTensorData(begin)[idx]; + sizes[offset + idx] = tflite::micro::GetTensorData(size)[idx]; + } +} + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + MicroContext* micro_context = GetMicroContext(context); + + TF_LITE_ENSURE_EQ(context, NumInputs(node), 3); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TFLITE_DCHECK(input != nullptr); + TfLiteTensor* begin = + micro_context->AllocateTempInputTensor(node, kBeginTensor); + TFLITE_DCHECK(begin != nullptr); + TfLiteTensor* size = + micro_context->AllocateTempInputTensor(node, kSizeTensor); + TFLITE_DCHECK(size != nullptr); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TFLITE_DCHECK(output != nullptr); + + // Ensure validity of input tensor and its dimension. + TFLITE_DCHECK(input->type == output->type); + TFLITE_DCHECK(begin->type == size->type); + TFLITE_DCHECK(begin->type == kTfLiteInt32 || begin->type == kTfLiteInt64); + TFLITE_DCHECK(size->type == kTfLiteInt32 || size->type == kTfLiteInt64); + TFLITE_DCHECK(NumDimensions(begin) == 1); + TFLITE_DCHECK(NumDimensions(size) == 1); + TFLITE_DCHECK(NumElements(begin) == NumElements(size)); + TFLITE_DCHECK(NumDimensions(input) <= kMaxDim); + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(begin); + micro_context->DeallocateTempTfLiteTensor(size); + micro_context->DeallocateTempTfLiteTensor(output); + + return kTfLiteOk; +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + const TfLiteEvalTensor* begin = + tflite::micro::GetEvalInput(context, node, kBeginTensor); + const TfLiteEvalTensor* size = + tflite::micro::GetEvalInput(context, node, kSizeTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + + tflite::SliceParams op_params; + op_params.begin_count = kMaxDim; + op_params.size_count = kMaxDim; + for (int i = 0; i < kMaxDim; ++i) { + op_params.begin[i] = 0; + op_params.size[i] = 1; + } + + if (begin->type == kTfLiteInt32) { + GetBeginAndSizeVectors(input->dims->size, begin, size, + op_params.begin, op_params.size); + } else if (begin->type == kTfLiteInt64) { + GetBeginAndSizeVectors(input->dims->size, begin, size, + op_params.begin, op_params.size); + } else { + MicroPrintf("Begin tensor type %s (%d) not supported.", + TfLiteTypeGetName(input->type), input->type); + return kTfLiteError; + } + + switch (input->type) { + case kTfLiteFloat32: + reference_ops::Slice(op_params, + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + case kTfLiteInt32: + reference_ops::Slice( + op_params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + case kTfLiteInt8: + reference_ops::Slice( + op_params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + case kTfLiteInt16: + reference_ops::Slice( + op_params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + case kTfLiteBool: + reference_ops::Slice(op_params, + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + default: + MicroPrintf("Input tensor type %s (%d) not supported.", + TfLiteTypeGetName(input->type), input->type); + return kTfLiteError; + } + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_SLICE() { + return tflite::micro::RegisterOp(nullptr, Prepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/slice_test.cc b/tensorflow/lite/micro/kernels/slice_test.cc new file mode 100644 index 0000000..e787c0c --- /dev/null +++ b/tensorflow/lite/micro/kernels/slice_test.cc @@ -0,0 +1,340 @@ +/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +template +void TestSlice(int* input_dims_data, const dataT* input_data, + int* begin_dims_data, const shapeT* begin_data, + int* size_dims_data, const shapeT* size_data, + int* output_dims_data, const dataT* expected_output_data, + dataT* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* begin_dims = IntArrayFromInts(begin_dims_data); + TfLiteIntArray* size_dims = IntArrayFromInts(size_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_dims_count = ElementCount(*output_dims); + + constexpr int inputs_size = 3; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(input_data, input_dims), + CreateTensor(begin_data, begin_dims), + CreateTensor(size_data, size_dims), + CreateTensor(output_data, output_dims), + }; + + int inputs_array_data[] = {3, 0, 1, 2}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 3}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = tflite::Register_SLICE(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, nullptr); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + for (int i = 0; i < output_dims_count; ++i) { + TF_LITE_MICRO_EXPECT_EQ(expected_output_data[i], output_data[i]); + } +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(In1D) { + int input_shape[] = {1, 4}; + float input_values[] = {1, 2, 3, 4}; + int begin_shape[] = {1, 1}; + int32_t begin_values[] = {1}; + int size_shape[] = {1, 1}; + int32_t size_values[] = {2}; + int output_shape[] = {1, 2}; + float expected_output_data[] = {2, 3}; + float output_data[2]; + + tflite::testing::TestSlice(input_shape, input_values, begin_shape, + begin_values, size_shape, size_values, + output_shape, expected_output_data, output_data); +} + +TF_LITE_MICRO_TEST(In2D) { + int input_shape[] = {2, 2, 3}; + float input_values[] = {1, 2, 3, 4, 5, 6}; + int begin_shape[] = {1, 2}; + int32_t begin_values[] = {1, 0}; + int size_shape[] = {1, 2}; + int32_t size_values[] = {1, 2}; + int output_shape[] = {1, 2}; + float expected_output_data[] = {4, 5}; + float output_data[2]; + + tflite::testing::TestSlice(input_shape, input_values, begin_shape, + begin_values, size_shape, size_values, + output_shape, expected_output_data, output_data); +} + +TF_LITE_MICRO_TEST(In3D) { + int input_shape[] = {3, 2, 3, 2}; + float input_values[] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}; + int begin_shape[] = {1, 3}; + int32_t begin_values[] = {0, 0, 0}; + int size_shape[] = {1, 3}; + int32_t size_values[] = {2, 3, 2}; + int output_shape[] = {3, 2, 3, 2}; + float expected_output_data[] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}; + float output_data[12]; + + tflite::testing::TestSlice(input_shape, input_values, begin_shape, + begin_values, size_shape, size_values, + output_shape, expected_output_data, output_data); +} + +TF_LITE_MICRO_TEST(In5D) { + int input_shape[] = {5, 5, 1, 1, 1, 1}; + float input_values[] = {1, 2, 3, 4, 5}; + int begin_shape[] = {1, 5}; + int32_t begin_values[] = {1, 0, 0, 0, 0}; + int size_shape[] = {1, 5}; + int32_t size_values[] = {3, 1, 1, 1, 1}; + int output_shape[] = {5, 3, 1, 1, 1, 1}; + float expected_output_data[] = {2, 3, 4}; + float output_data[3]; + + tflite::testing::TestSlice(input_shape, input_values, begin_shape, + begin_values, size_shape, size_values, + output_shape, expected_output_data, output_data); +} + +TF_LITE_MICRO_TEST(InputFloat) { + int input_shape[] = {4, 4, 1, 1, 1}; + float input_values[] = {1, 2, 3, 4}; + int begin_shape[] = {1, 4}; + int32_t begin_values[] = {1, 0, 0, 0}; + int size_shape[] = {1, 4}; + int32_t size_values[] = {3, 1, 1, 1}; + int output_shape[] = {4, 3, 1, 1, 1}; + float expected_output_data[] = {2, 3, 4}; + float output_data[3]; + + tflite::testing::TestSlice(input_shape, input_values, begin_shape, + begin_values, size_shape, size_values, + output_shape, expected_output_data, output_data); +} + +TF_LITE_MICRO_TEST(IndexInt64) { + int input_shape[] = {4, 4, 1, 1, 1}; + float input_values[] = {1, 2, 3, 4}; + int begin_shape[] = {1, 4}; + int64_t begin_values[] = {1, 0, 0, 0}; + int size_shape[] = {1, 4}; + int64_t size_values[] = {3, 1, 1, 1}; + int output_shape[] = {4, 3, 1, 1, 1}; + float expected_output_data[] = {2, 3, 4}; + float output_data[3]; + + tflite::testing::TestSlice(input_shape, input_values, begin_shape, + begin_values, size_shape, size_values, + output_shape, expected_output_data, output_data); +} + +// See these test cases under: +// https://www.tensorflow.org/versions/master/api_docs/python/tf/slice +TF_LITE_MICRO_TEST(InputInteger1) { + int input_shape[] = {4, 3, 2, 3, 1}; + int32_t input_values[] = {1, 1, 1, 2, 2, 2, 3, 3, 3, + 4, 4, 4, 5, 5, 5, 6, 6, 6}; + int begin_shape[] = {1, 4}; + int32_t begin_values[] = {1, 0, 0, 0}; + int size_shape[] = {1, 4}; + int32_t size_values[] = {1, 1, 3, 1}; + int output_shape[] = {4, 1, 1, 3, 1}; + int32_t expected_output_data[] = {3, 3, 3}; + int32_t output_data[3]; + + tflite::testing::TestSlice(input_shape, input_values, begin_shape, + begin_values, size_shape, size_values, + output_shape, expected_output_data, output_data); +} + +TF_LITE_MICRO_TEST(InputInteger2) { + int input_shape[] = {4, 3, 2, 3, 1}; + int32_t input_values[] = {1, 1, 1, 2, 2, 2, 3, 3, 3, + 4, 4, 4, 5, 5, 5, 6, 6, 6}; + int begin_shape[] = {1, 4}; + int32_t begin_values[] = {1, 0, 0, 0}; + int size_shape[] = {1, 4}; + int32_t size_values[] = {1, 2, 3, 1}; + int output_shape[] = {4, 1, 2, 3, 1}; + int32_t expected_output_data[] = {3, 3, 3, 4, 4, 4}; + int32_t output_data[6]; + + tflite::testing::TestSlice(input_shape, input_values, begin_shape, + begin_values, size_shape, size_values, + output_shape, expected_output_data, output_data); +} + +TF_LITE_MICRO_TEST(InputInteger3) { + int input_shape[] = {4, 3, 2, 3, 1}; + int32_t input_values[] = {1, 1, 1, 2, 2, 2, 3, 3, 3, + 4, 4, 4, 5, 5, 5, 6, 6, 6}; + int begin_shape[] = {1, 4}; + int32_t begin_values[] = {1, 0, 0, 0}; + int size_shape[] = {1, 4}; + int32_t size_values[] = {2, 1, 3, 1}; + int output_shape[] = {4, 2, 1, 3, 1}; + int32_t expected_output_data[] = {3, 3, 3, 5, 5, 5}; + int32_t output_data[6]; + + tflite::testing::TestSlice(input_shape, input_values, begin_shape, + begin_values, size_shape, size_values, + output_shape, expected_output_data, output_data); +} + +TF_LITE_MICRO_TEST(SizeMinus1) { + int input_shape[] = {4, 3, 2, 3, 1}; + int32_t input_values[] = {1, 1, 1, 2, 2, 2, 3, 3, 3, + 4, 4, 4, 5, 5, 5, 6, 6, 6}; + int begin_shape[] = {1, 4}; + int32_t begin_values[] = {1, 0, 0, 0}; + int size_shape[] = {1, 4}; + int32_t size_values[] = {2, 1, -1, 1}; + int output_shape[] = {4, 2, 1, 3, 1}; + int32_t expected_output_data[] = {3, 3, 3, 5, 5, 5}; + int32_t output_data[6]; + + tflite::testing::TestSlice(input_shape, input_values, begin_shape, + begin_values, size_shape, size_values, + output_shape, expected_output_data, output_data); +} + +TF_LITE_MICRO_TEST(BeginNonZeroSizeMinus1Axis1) { + int input_shape[] = {4, 3, 3, 2, 1}; + int32_t input_values[] = {1, 1, 2, 2, 3, 3, 4, 4, 5, + 5, 6, 6, 7, 7, 8, 8, 9, 9}; + int begin_shape[] = {1, 4}; + int32_t begin_values[] = {1, 1, 0, 0}; + int size_shape[] = {1, 4}; + int32_t size_values[] = {2, -1, 1, 1}; + int output_shape[] = {4, 2, 2, 1, 1}; + int32_t expected_output_data[] = {5, 6, 8, 9}; + int32_t output_data[4]; + + tflite::testing::TestSlice(input_shape, input_values, begin_shape, + begin_values, size_shape, size_values, + output_shape, expected_output_data, output_data); +} + +TF_LITE_MICRO_TEST(BeginNonZeroSizeMinus1Axis2) { + int input_shape[] = {4, 3, 2, 3, 1}; + int32_t input_values[] = {1, 1, 1, 2, 2, 2, 3, 3, 3, + 4, 4, 4, 5, 5, 5, 6, 6, 6}; + int begin_shape[] = {1, 4}; + int32_t begin_values[] = {1, 0, 1, 0}; + int size_shape[] = {1, 4}; + int32_t size_values[] = {2, 1, -1, 1}; + int output_shape[] = {4, 2, 1, 2, 1}; + int32_t expected_output_data[] = {3, 3, 5, 5}; + int32_t output_data[4]; + + tflite::testing::TestSlice(input_shape, input_values, begin_shape, + begin_values, size_shape, size_values, + output_shape, expected_output_data, output_data); +} + +TF_LITE_MICRO_TEST(BeginNonZeroSizeMinus1Axis3) { + int input_shape[] = {4, 3, 1, 2, 3}; + int32_t input_values[] = {1, 1, 1, 2, 2, 2, 3, 3, 3, + 4, 4, 4, 5, 5, 5, 6, 6, 6}; + int begin_shape[] = {1, 4}; + int32_t begin_values[] = {1, 0, 0, 1}; + int size_shape[] = {1, 4}; + int32_t size_values[] = {2, 1, 1, -1}; + int output_shape[] = {4, 2, 1, 1, 2}; + int32_t expected_output_data[] = {3, 3, 5, 5}; + int32_t output_data[4]; + + tflite::testing::TestSlice(input_shape, input_values, begin_shape, + begin_values, size_shape, size_values, + output_shape, expected_output_data, output_data); +} + +TF_LITE_MICRO_TEST(SliceInt8) { + int input_shape[] = {4, 3, 2, 3, 1}; + int8_t input_values[] = {1, 1, 1, 2, 2, 2, 3, 3, 3, + 4, 4, 4, 5, 5, 5, 6, 6, 6}; + int begin_shape[] = {1, 4}; + int32_t begin_values[] = {1, 0, 0, 0}; + int size_shape[] = {1, 4}; + int32_t size_values[] = {2, 1, -1, 1}; + int output_shape[] = {4, 2, 1, 3, 1}; + int8_t expected_output_data[] = {3, 3, 3, 5, 5, 5}; + int8_t output_data[6]; + + tflite::testing::TestSlice(input_shape, input_values, begin_shape, + begin_values, size_shape, size_values, + output_shape, expected_output_data, output_data); +} + +TF_LITE_MICRO_TEST(SliceInt16) { + int input_shape[] = {4, 3, 2, 3, 1}; + int16_t input_values[] = {1, 1, 1, 2, 2, 2, 3, 3, 3, + 4, 4, 4, 5, 5, 5, 6, 6, 6}; + int begin_shape[] = {1, 4}; + int32_t begin_values[] = {1, 0, 0, 0}; + int size_shape[] = {1, 4}; + int32_t size_values[] = {2, 1, -1, 1}; + int output_shape[] = {4, 2, 1, 3, 1}; + int16_t expected_output_data[] = {3, 3, 3, 5, 5, 5}; + int16_t output_data[6]; + + tflite::testing::TestSlice(input_shape, input_values, begin_shape, + begin_values, size_shape, size_values, + output_shape, expected_output_data, output_data); +} + +TF_LITE_MICRO_TEST(SliceBool) { + int input_shape[] = {4, 3, 2, 3, 1}; + bool input_values[] = {false, false, false, false, false, false, + true, false, true, false, false, false, + false, false, true, false, false, false}; + int begin_shape[] = {1, 4}; + int32_t begin_values[] = {1, 0, 0, 0}; + int size_shape[] = {1, 4}; + int32_t size_values[] = {2, 1, -1, 1}; + int output_shape[] = {4, 2, 1, 3, 1}; + bool expected_output_data[] = {true, false, true, false, false, true}; + bool output_data[6]; + + tflite::testing::TestSlice(input_shape, input_values, begin_shape, + begin_values, size_shape, size_values, + output_shape, expected_output_data, output_data); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/softmax.cc b/tensorflow/lite/micro/kernels/softmax.cc new file mode 100644 index 0000000..b154ecb --- /dev/null +++ b/tensorflow/lite/micro/kernels/softmax.cc @@ -0,0 +1,90 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/kernels/softmax.h" + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/softmax.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { +namespace { + +void SoftmaxQuantized(const TfLiteEvalTensor* input, TfLiteEvalTensor* output, + const SoftmaxParams& op_data) { + if (input->type == kTfLiteInt8) { + if (output->type == kTfLiteInt16) { + tflite::reference_ops::Softmax( + op_data, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } else { + tflite::reference_ops::Softmax( + op_data, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } + } else { + tflite::reference_ops::SoftmaxInt16( + op_data, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } +} + +TfLiteStatus SoftmaxEval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = tflite::micro::GetEvalInput(context, node, 0); + TfLiteEvalTensor* output = tflite::micro::GetEvalOutput(context, node, 0); + + TFLITE_DCHECK(node->user_data != nullptr); + SoftmaxParams op_data = *static_cast(node->user_data); + + switch (input->type) { + case kTfLiteFloat32: { + tflite::reference_ops::Softmax( + op_data, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + return kTfLiteOk; + } + case kTfLiteInt8: + case kTfLiteInt16: { + SoftmaxQuantized(input, output, op_data); + return kTfLiteOk; + } + default: + MicroPrintf("Type %s (%d) not supported.", TfLiteTypeGetName(input->type), + input->type); + return kTfLiteError; + } +} +} // namespace + +TFLMRegistration Register_SOFTMAX() { + return tflite::micro::RegisterOp(SoftmaxInit, SoftmaxPrepare, SoftmaxEval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/softmax.h b/tensorflow/lite/micro/kernels/softmax.h new file mode 100644 index 0000000..fd97201 --- /dev/null +++ b/tensorflow/lite/micro/kernels/softmax.h @@ -0,0 +1,67 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_SOFTMAX_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_SOFTMAX_H_ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/kernels/internal/types.h" +#include "tensorflow/lite/micro/micro_common.h" + +namespace tflite { + +void* SoftmaxInit(TfLiteContext* context, const char* buffer, size_t length); + +// Common helper function to SoftmaxPrepare. +TfLiteStatus CalculateSoftmaxParams(TfLiteContext* context, + const TfLiteTensor* input, + TfLiteTensor* output, + const TfLiteSoftmaxParams* params, + SoftmaxParams* op_data); + +TfLiteStatus SoftmaxPrepare(TfLiteContext* context, TfLiteNode* node); + +// This is the most generic TFLMRegistration. The actual supported types +// may still be target dependent. The only requirement is that every +// implementation (reference or optimized) must define this function. +TFLMRegistration Register_SOFTMAX(); + +#if defined(XTENSA) || defined(CMSIS_NN) +// Returns a TFLMRegistration struct for kernel variant that only supports +// int8 input and int16 output. +TFLMRegistration Register_SOFTMAX_INT8_INT16(); +#else +inline TFLMRegistration Register_SOFTMAX_INT8_INT16() { + return Register_SOFTMAX(); +} +#endif + +#if defined(CMSIS_NN) +// Returns a TFLMRegistration struct for kernel variant that only supports +// int8 input/output and uses the latency optimized implementations. +TFLMRegistration Register_SOFTMAX_INT8(); + +// Returns a TFLMRegistration struct for kernel variant that only supports +// int16 input/output and uses the latency optimized implementations. +TFLMRegistration Register_SOFTMAX_INT16(); + +#else +inline TFLMRegistration Register_SOFTMAX_INT8() { return Register_SOFTMAX(); } + +inline TFLMRegistration Register_SOFTMAX_INT16() { return Register_SOFTMAX(); } +#endif + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_SOFTMAX_H_ diff --git a/tensorflow/lite/micro/kernels/softmax_common.cc b/tensorflow/lite/micro/kernels/softmax_common.cc new file mode 100644 index 0000000..62b8b29 --- /dev/null +++ b/tensorflow/lite/micro/kernels/softmax_common.cc @@ -0,0 +1,168 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/softmax.h" +#include "tensorflow/lite/micro/micro_context.h" + +namespace tflite { + +namespace { +// Softmax parameter data that persists in user_data +const int kInt16LUTArraySize = LUTSize(); + +TfLiteStatus InitializeLutForInt16(TfLiteContext* context, + const TfLiteTensor* input, + TfLiteTensor* output, + SoftmaxParams* op_data) { + // Only allocate LUTs for KTfLiteInt16 data type + if (input->type == kTfLiteInt16) { + void* raw_exp_lut = context->AllocatePersistentBuffer( + context, sizeof(int16_t) * kInt16LUTArraySize); + TF_LITE_ENSURE(context, raw_exp_lut != nullptr); + op_data->exp_lut = reinterpret_cast(raw_exp_lut); + void* one_over_one_plus_x_lut = context->AllocatePersistentBuffer( + context, sizeof(int16_t) * kInt16LUTArraySize); + TF_LITE_ENSURE(context, one_over_one_plus_x_lut != nullptr); + op_data->one_over_one_plus_x_lut = + reinterpret_cast(one_over_one_plus_x_lut); + } + + if (output->type == kTfLiteInt16) { + TF_LITE_ENSURE(context, + input->type == kTfLiteInt8 || input->type == kTfLiteInt16); + } else { + TF_LITE_ENSURE_EQ(context, input->type, output->type); + } + + // Populate LUT if required + if (input->type == kTfLiteInt16) { + TF_LITE_ENSURE_EQ(context, output->params.zero_point, 0); + // exp LUT only used on negative values + // we consider exp(-10.0) is insignificant to accumulation + const int32_t range = std::numeric_limits::max() - + std::numeric_limits::min(); + LUTPopulate( + 10.0f / range, std::numeric_limits::max(), 2.0f / range, 0, + [](float value) { return std::exp(value); }, op_data->exp_lut); + + LUTPopulate( + 1.0f / range, std::numeric_limits::min(), 2.0f / range, 0, + [](float value) { return 1.0f / (1.0f + value); }, + op_data->one_over_one_plus_x_lut); + + op_data->zero_point = output->params.zero_point; + op_data->scale = output->params.scale; + } + + return kTfLiteOk; +} + +} // namespace + +TfLiteStatus CalculateSoftmaxParams(TfLiteContext* context, + const TfLiteTensor* input, + TfLiteTensor* output, + const TfLiteSoftmaxParams* params, + SoftmaxParams* op_data) { + if (InitializeLutForInt16(context, input, output, op_data) != kTfLiteOk) { + return kTfLiteError; + } + + if (input->type == kTfLiteInt8 || input->type == kTfLiteInt16) { + if (input->type == kTfLiteInt16) { + TF_LITE_ENSURE_EQ(context, input->params.zero_point, 0); + TF_LITE_ENSURE_EQ(context, output->params.zero_point, 0); + TF_LITE_ENSURE_NEAR(context, output->params.scale, 1.f / 32768, + (0.001f * 1.f / 32768)); + } else { // input->type == kTfLiteInt8 + TF_LITE_ENSURE_TYPES_EQ(context, input->type, kTfLiteInt8); + if (output->type == kTfLiteInt16) { + TF_LITE_ENSURE_EQ(context, output->params.zero_point, -32768); + TF_LITE_ENSURE_NEAR(context, output->params.scale, 1.f / 65536, + (0.001f * 1.f / 65536)); + } else { // output->type == kTfLiteint8 + TF_LITE_ENSURE_TYPES_EQ(context, output->type, kTfLiteInt8); + TF_LITE_ENSURE_EQ(context, output->params.zero_point, -128); + TF_LITE_ENSURE(context, output->params.scale == 1.f / 256); + } + } + + static const int kScaledDiffIntegerBits = 5; + + // Calculate input_multiplier and input_left_shift + if (input->type == kTfLiteInt16) { + int input_left_shift; + double input_scale_beta_rescale = + static_cast(input->params.scale) * + static_cast(params->beta) / + (10.0 / 65535.0); // scale the input_diff such that [-65535, 0] + // correspond to [-10.0, 0.0] + QuantizeMultiplier(input_scale_beta_rescale, &op_data->input_multiplier, + &input_left_shift); + op_data->input_left_shift = input_left_shift; + } else { + int input_left_shift; + tflite::PreprocessSoftmaxScaling( + static_cast(params->beta), + static_cast(input->params.scale), kScaledDiffIntegerBits, + &op_data->input_multiplier, &input_left_shift); + op_data->input_left_shift = input_left_shift; + op_data->diff_min = + -1.0 * tflite::CalculateInputRadius(kScaledDiffIntegerBits, + op_data->input_left_shift); + } + } else { + TF_LITE_ENSURE_TYPES_EQ(context, input->type, kTfLiteFloat32); + TF_LITE_ENSURE_TYPES_EQ(context, output->type, kTfLiteFloat32); + op_data->beta = static_cast(params->beta); + } + return kTfLiteOk; +} + +void* SoftmaxInit(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(SoftmaxParams)); +} + +TfLiteStatus SoftmaxPrepare(TfLiteContext* context, TfLiteNode* node) { + MicroContext* micro_context = GetMicroContext(context); + + TF_LITE_ENSURE_EQ(context, NumInputs(node), 1); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + TfLiteTensor* input = micro_context->AllocateTempInputTensor(node, 0); + TF_LITE_ENSURE(context, input != nullptr); + TF_LITE_ENSURE(context, NumDimensions(input) >= 1); + TfLiteTensor* output = micro_context->AllocateTempOutputTensor(node, 0); + TF_LITE_ENSURE(context, output != nullptr); + + TF_LITE_ENSURE(context, node->user_data != nullptr); + SoftmaxParams* op_data = static_cast(node->user_data); + + auto* params = static_cast(node->builtin_data); + auto ret_val = + CalculateSoftmaxParams(context, input, output, params, op_data); + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(output); + return ret_val; +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/softmax_test.cc b/tensorflow/lite/micro/kernels/softmax_test.cc new file mode 100644 index 0000000..057892b --- /dev/null +++ b/tensorflow/lite/micro/kernels/softmax_test.cc @@ -0,0 +1,488 @@ +/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +// The Softmax kernel assumes an output in the range [0, 1.0], leading to these +// quantization parameters. +const float output_scale_int8 = 1.0f / 256.0f; +const float output_scale_int16 = 1.0f / 32768.0f; +const int output_zero_point_int8 = -128; +const int output_zero_point_int16 = 0; + +// Empirical tolerance in quantization space +const float tolerance_int16 = 7.0; + +// 1-dimensional test data. +const int flat_size_1d = 5; +int shape_1d[] = {1, 5}; +const float input_data_1d[] = {1.0, 2.0, 3.0, 4.0, 5.0}; +const float golden_1d[] = {0.011656231, 0.031684921, 0.086128544, 0.234121657, + 0.636408647}; + +// 2-dimensional test data. +const int flat_size_2d = 10; +int shape_2d[] = {2, 2, 5}; +const float input_data_2d[] = {1.0, 2.0, 3.0, 4.0, 5.0, + -1.0, -2.0, -3.0, -4.0, -5.0}; +const float golden_2d[] = {0.011656231, 0.031684921, 0.086128544, 0.234121657, + 0.636408647, 0.636408647, 0.234121657, 0.086128544, + 0.031684921, 0.011656231}; + +// 3-dimensional test data. +const int flat_size_3d = 60; +int shape_3d[] = {3, 3, 4, 5}; +const float input_data_3d[] = { + // c = 0 + // h = 0 + 3.00, 6.00, -5.00, 4.00, -9.00, + // h = 1 + -10.00, -10.00, -8.00, 2.00, 2.00, + // h = 2 + 8.00, -5.00, -8.00, 5.00, -6.00, + // h = 3 + -8.00, 6.00, 1.00, -10.00, -8.00, + + // c = 1 + // h = 0 + 7.00, 6.00, -10.00, -4.00, -5.00, + // h = 1 + 2.00, 7.00, 9.00, -9.00, 7.00, + // h = 2 + -4.00, -2.00, 8.00, 2.00, 2.00, + // h = 3 + 3.00, 6.00, 6.00, 2.00, 4.00, + + // c = 2 + // h = 0 + 9.00, 7.00, -7.00, 0.00, 4.00, + // h = 1 + -3.00, 8.00, 8.00, -3.00, -4.00, + // h = 2 + -9.00, -9.00, 4.00, -8.00, -1.00, + // h = 3 + -10.00, -2.00, 6.00, -7.00, 0.00}; + +float golden_3d[] = { + // c = 0 + // h = 0 + 0.042009463, 0.843782625, 0.000014093, 0.114193561, 0.000000258, + // h = 1 + 0.000003072, 0.000003072, 0.000022699, 0.499985578, 0.499985578, + // h = 2 + 0.952571219, 0.000002153, 0.000000107, 0.047425728, 0.000000792, + // h = 3 + 0.000000826, 0.993305397, 0.006692839, 0.000000112, 0.000000826, + + // c = 1 + // h = 0 + 0.731046347, 0.268936922, 0.000000030, 0.000012210, 0.000004492, + // h = 1 + 0.000717124, 0.106430599, 0.786421666, 0.000000012, 0.106430599, + // h = 2 + 0.000006114, 0.000045174, 0.995015917, 0.002466398, 0.002466398, + // h = 3 + 0.022595176, 0.453836234, 0.453836234, 0.008312301, 0.061420055, + + // c = 2 + // h = 0 + 0.875505904, 0.118486839, 0.000000099, 0.000108046, 0.005899112, + // h = 1 + 0.000008351, 0.499990113, 0.499990113, 0.000008351, 0.000003072, + // h = 2 + 0.000002245, 0.000002245, 0.993296627, 0.000006103, 0.006692780, + // h = 3 + 0.000000112, 0.000334520, 0.997191323, 0.000002254, 0.002471790}; + +// 4-dimensional test data. +const int flat_size_4d = 120; +int shape_4d[] = {4, 2, 3, 4, 5}; +const float input_data_4d[] = { + // n = 0 + // c = 0 + // h = 0 + 3.00, 6.00, -5.00, 4.00, -9.00, + // h = 1 + -10.00, -10.00, -8.00, 2.00, 2.00, + // h = 2 + 8.00, -5.00, -8.00, 5.00, -6.00, + // h = 3 + -8.00, 6.00, 1.00, -10.00, -8.00, + + // c = 1 + // h = 0 + 7.00, 6.00, -10.00, -4.00, -5.00, + // h = 1 + 2.00, 7.00, 9.00, -9.00, 7.00, + // h = 2 + -4.00, -2.00, 8.00, 2.00, 2.00, + // h = 3 + 3.00, 6.00, 6.00, 2.00, 4.00, + + // c = 2 + // h = 0 + 9.00, 7.00, -7.00, 0.00, 4.00, + // h = 1 + -3.00, 8.00, 8.00, -3.00, -4.00, + // h = 2 + -9.00, -9.00, 4.00, -8.00, -1.00, + // h = 3 + -10.00, -2.00, 6.00, -7.00, 0.00, + + // n = 1 + // c = 0 + // h = 0 + -9.00, -8.00, 6.00, -1.00, -5.00, + // h = 1 + -10.00, -5.00, -10.00, 7.00, -2.00, + // h = 2 + -5.00, -4.00, 1.00, 2.00, 2.00, + // h = 3 + -2.00, -2.00, 1.00, 1.00, -4.00, + + // c = 1 + // h = 0 + -8.00, -3.00, 1.00, 1.00, -1.00, + // h = 1 + -2.00, 6.00, -1.00, -5.00, 6.00, + // h = 2 + -7.00, 8.00, 9.00, 0.00, 9.00, + // h = 3 + -9.00, -5.00, -2.00, 0.00, 8.00, + + // c = 2 + // h = 0 + 4.00, 2.00, -3.00, 5.00, 8.00, + // h = 1 + -1.00, 1.00, -4.00, -9.00, 7.00, + // h = 2 + 3.00, -8.00, 0.00, 9.00, -4.00, + // h = 3 + 8.00, -1.00, 9.00, -9.00, 1.00}; + +const float golden_4d[] = { + // n = 0 + // c = 0 + // h = 0 + 0.042009463, 0.843782625, 0.000014093, 0.114193561, 0.000000258, + // h = 1 + 0.000003072, 0.000003072, 0.000022699, 0.499985578, 0.499985578, + // h = 2 + 0.952571219, 0.000002153, 0.000000107, 0.047425728, 0.000000792, + // h = 3 + 0.000000826, 0.993305397, 0.006692839, 0.000000112, 0.000000826, + + // c = 1 + // h = 0 + 0.731046347, 0.268936922, 0.000000030, 0.000012210, 0.000004492, + // h = 1 + 0.000717124, 0.106430599, 0.786421666, 0.000000012, 0.106430599, + // h = 2 + 0.000006114, 0.000045174, 0.995015917, 0.002466398, 0.002466398, + // h = 3 + 0.022595176, 0.453836234, 0.453836234, 0.008312301, 0.061420055, + + // c = 2 + // h = 0 + 0.875505904, 0.118486839, 0.000000099, 0.000108046, 0.005899112, + // h = 1 + 0.000008351, 0.499990113, 0.499990113, 0.000008351, 0.000003072, + // h = 2 + 0.000002245, 0.000002245, 0.993296627, 0.000006103, 0.006692780, + // h = 3 + 0.000000112, 0.000334520, 0.997191323, 0.000002254, 0.002471790, + + // n = 1 + // c = 0 + // h = 0 + 0.000000306, 0.000000831, 0.999071142, 0.000911035, 0.000016686, + // h = 1 + 0.000000041, 0.000006143, 0.000000041, 0.999870380, 0.000123394, + // h = 2 + 0.000384554, 0.001045327, 0.155140254, 0.421714933, 0.421714933, + // h = 3 + 0.023637081, 0.023637081, 0.474763454, 0.474763454, 0.003198931, + + // c = 1 + // h = 0 + 0.000057299, 0.008503973, 0.464301197, 0.464301197, 0.062836334, + // h = 1 + 0.000167625, 0.499684188, 0.000455653, 0.000008346, 0.499684188, + // h = 2 + 0.000000048, 0.155354299, 0.422296769, 0.000052116, 0.422296769, + // h = 3 + 0.000000041, 0.000002259, 0.000045383, 0.000335334, 0.999616982, + + // c = 2 + // h = 0 + 0.017107856, 0.002315297, 0.000015600, 0.046503973, 0.934057274, + // h = 1 + 0.000334516, 0.002471755, 0.000016655, 0.000000112, 0.997176963, + // h = 2 + 0.002472313, 0.000000041, 0.000123089, 0.997402302, 0.000002254, + // h = 3 + 0.268866557, 0.000033181, 0.730855076, 0.000000011, 0.000245175}; + +template +void ValidateSoftmaxGoldens(TfLiteTensor* tensors, const int tensor_count, + T* output_data, const T* expected_output, + int output_dims_count, float tolerance) { + TfLiteSoftmaxParams builtin_data = {1.0f}; + + int inputs_array_data[] = {1, 0}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 1}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = Register_SOFTMAX(); + micro::KernelRunner runner(registration, tensors, tensor_count, inputs_array, + outputs_array, &builtin_data); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + for (int i = 0; i < output_dims_count; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(expected_output[i], output_data[i], tolerance); + } +} + +void TestSoftmaxFloat(int* input_dims_data, const float* input_data, + int* output_dims_data, const float* expected_output_data, + float* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_dims_count = ElementCount(*output_dims); + + constexpr int inputs_size = 1; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(input_data, input_dims), + CreateTensor(output_data, output_dims), + }; + + ValidateSoftmaxGoldens(tensors, tensors_size, output_data, + expected_output_data, output_dims_count, 1e-5); +} + +template +void TestSoftmaxQuantized(int* input_dims_data, const float* input_data, + inputT* input_quantized, float input_scale, + int input_zero_point, int* output_dims_data, + const float* golden, outputT* golden_quantized, + float output_scale, int output_zero_point, + outputT* output_data, float tolerance = 1.0) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_dims_count = ElementCount(*output_dims); + + constexpr int inputs_size = 1; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateQuantizedTensor(input_data, input_quantized, input_dims, + input_scale, input_zero_point), + CreateQuantizedTensor(output_data, output_dims, output_scale, + output_zero_point), + }; + + Quantize(golden, golden_quantized, output_dims_count, output_scale, + output_zero_point); + + ValidateSoftmaxGoldens(tensors, tensors_size, output_data, golden_quantized, + output_dims_count, tolerance); +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(Softmax1DFloatShouldMatchGolden) { + float output_data[tflite::testing::flat_size_1d]; + tflite::testing::TestSoftmaxFloat( + tflite::testing ::shape_1d, tflite::testing::input_data_1d, + tflite::testing::shape_1d, tflite::testing::golden_1d, output_data); +} + +TF_LITE_MICRO_TEST(Softmax1DQuantizedInt8ShouldMatchGolden) { + const float input_scale = 0.1f; + const int input_zero_point = 0; + + int8_t input_quantized[tflite::testing::flat_size_1d]; + int8_t golden_quantized[tflite::testing::flat_size_1d]; + int8_t output_data[tflite::testing::flat_size_1d]; + tflite::testing::TestSoftmaxQuantized( + tflite::testing::shape_1d, tflite::testing::input_data_1d, + input_quantized, input_scale, input_zero_point, tflite::testing::shape_1d, + tflite::testing::golden_1d, golden_quantized, + tflite::testing::output_scale_int8, + tflite::testing::output_zero_point_int8, output_data); +} + +TF_LITE_MICRO_TEST(Softmax1DQuantizedInt16ShouldMatchGolden) { + const float input_scale = 0.1f; + const int input_zero_point = 0; + + int16_t input_quantized[tflite::testing::flat_size_1d]; + int16_t golden_quantized[tflite::testing::flat_size_1d]; + int16_t output_data[tflite::testing::flat_size_1d]; + tflite::testing::TestSoftmaxQuantized( + tflite::testing::shape_1d, tflite::testing::input_data_1d, + input_quantized, input_scale, input_zero_point, tflite::testing::shape_1d, + tflite::testing::golden_1d, golden_quantized, + tflite::testing::output_scale_int16, + tflite::testing::output_zero_point_int16, output_data); +} + +TF_LITE_MICRO_TEST(Softmax2DFloatShouldMatchGolden) { + float output_data[tflite::testing::flat_size_2d]; + tflite::testing::TestSoftmaxFloat( + tflite::testing ::shape_2d, tflite::testing::input_data_2d, + tflite::testing::shape_2d, tflite::testing::golden_2d, output_data); +} + +TF_LITE_MICRO_TEST(Softmax2DQuantizedInt8ShouldMatchGolden) { + const float input_scale = 0.1f; + const int input_zero_point = 0; + + int8_t input_quantized[tflite::testing::flat_size_2d]; + int8_t golden_quantized[tflite::testing::flat_size_2d]; + int8_t output_data[tflite::testing::flat_size_2d]; + tflite::testing::TestSoftmaxQuantized( + tflite::testing::shape_2d, tflite::testing::input_data_2d, + input_quantized, input_scale, input_zero_point, tflite::testing::shape_2d, + tflite::testing::golden_2d, golden_quantized, + tflite::testing::output_scale_int8, + tflite::testing::output_zero_point_int8, output_data); +} + +TF_LITE_MICRO_TEST(Softmax2DQuantizedInt16ShouldMatchGolden) { + const float input_scale = 0.1f; + const int input_zero_point = 0; + + int16_t input_quantized[tflite::testing::flat_size_2d]; + int16_t golden_quantized[tflite::testing::flat_size_2d]; + int16_t output_data[tflite::testing::flat_size_2d]; + tflite::testing::TestSoftmaxQuantized( + tflite::testing::shape_2d, tflite::testing::input_data_2d, + input_quantized, input_scale, input_zero_point, tflite::testing::shape_2d, + tflite::testing::golden_2d, golden_quantized, + tflite::testing::output_scale_int16, + tflite::testing::output_zero_point_int16, output_data); +} + +TF_LITE_MICRO_TEST(Softmax3DFloatShouldMatchGolden) { + float output_data[tflite::testing::flat_size_3d]; + tflite::testing::TestSoftmaxFloat( + tflite::testing ::shape_3d, tflite::testing::input_data_3d, + tflite::testing::shape_3d, tflite::testing::golden_3d, output_data); +} + +TF_LITE_MICRO_TEST(Softmax3DQuantizedInt8ShouldMatchGolden) { + const float input_scale = 0.1f; + const int input_zero_point = 0; + + int8_t input_quantized[tflite::testing::flat_size_3d]; + int8_t golden_quantized[tflite::testing::flat_size_3d]; + int8_t output_data[tflite::testing::flat_size_3d]; + tflite::testing::TestSoftmaxQuantized( + tflite::testing::shape_3d, tflite::testing::input_data_3d, + input_quantized, input_scale, input_zero_point, tflite::testing::shape_3d, + tflite::testing::golden_3d, golden_quantized, + tflite::testing::output_scale_int8, + tflite::testing::output_zero_point_int8, output_data); +} + +TF_LITE_MICRO_TEST(Softmax3DQuantizedInt16ShouldMatchGolden) { + const float input_scale = 0.1f; + const int input_zero_point = 0; + + int16_t input_quantized[tflite::testing::flat_size_3d]; + int16_t golden_quantized[tflite::testing::flat_size_3d]; + int16_t output_data[tflite::testing::flat_size_3d]; + tflite::testing::TestSoftmaxQuantized( + tflite::testing::shape_3d, tflite::testing::input_data_3d, + input_quantized, input_scale, input_zero_point, tflite::testing::shape_3d, + tflite::testing::golden_3d, golden_quantized, + tflite::testing::output_scale_int16, + tflite::testing::output_zero_point_int16, output_data, + tflite::testing::tolerance_int16); +} + +TF_LITE_MICRO_TEST(Softmax4DFloatShouldMatchGolden) { + float output_data[tflite::testing::flat_size_4d]; + tflite::testing::TestSoftmaxFloat( + tflite::testing ::shape_4d, tflite::testing::input_data_4d, + tflite::testing::shape_4d, tflite::testing::golden_4d, output_data); +} + +TF_LITE_MICRO_TEST(Softmax4DQuantizedInt8ShouldMatchGolden) { + const float input_scale = 0.1f; + const int input_zero_point = 0; + + int8_t input_quantized[tflite::testing::flat_size_4d]; + int8_t golden_quantized[tflite::testing::flat_size_4d]; + int8_t output_data[tflite::testing::flat_size_4d]; + tflite::testing::TestSoftmaxQuantized( + tflite::testing::shape_4d, tflite::testing::input_data_4d, + input_quantized, input_scale, input_zero_point, tflite::testing::shape_4d, + tflite::testing::golden_4d, golden_quantized, + tflite::testing::output_scale_int8, + tflite::testing::output_zero_point_int8, output_data); +} + +TF_LITE_MICRO_TEST(Softmax4DQuantizedInt16ShouldMatchGolden) { + const float input_scale = 0.1f; + const int input_zero_point = 0; + + int16_t input_quantized[tflite::testing::flat_size_4d]; + int16_t golden_quantized[tflite::testing::flat_size_4d]; + int16_t output_data[tflite::testing::flat_size_4d]; + tflite::testing::TestSoftmaxQuantized( + tflite::testing::shape_4d, tflite::testing::input_data_4d, + input_quantized, input_scale, input_zero_point, tflite::testing::shape_4d, + tflite::testing::golden_4d, golden_quantized, + tflite::testing::output_scale_int16, + tflite::testing::output_zero_point_int16, output_data, + tflite::testing::tolerance_int16); +} + +TF_LITE_MICRO_TEST(Softmax2DQuantizedInt8InputInt16OutputShouldMatchGolden) { + const float input_scale = 0.1f; + const int input_zero_point = 0; + const float output_scale = 1.0f / 65536.0f; + const int output_zero_point = -32768; + + int8_t input_quantized[tflite::testing::flat_size_2d]; + int16_t golden_quantized[tflite::testing::flat_size_2d]; + int16_t output_data[tflite::testing::flat_size_2d]; + tflite::testing::TestSoftmaxQuantized( + tflite::testing::shape_2d, tflite::testing::input_data_2d, + input_quantized, input_scale, input_zero_point, tflite::testing::shape_2d, + tflite::testing::golden_2d, golden_quantized, output_scale, + output_zero_point, output_data); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/space_to_batch_nd.cc b/tensorflow/lite/micro/kernels/space_to_batch_nd.cc new file mode 100644 index 0000000..6b536ee --- /dev/null +++ b/tensorflow/lite/micro/kernels/space_to_batch_nd.cc @@ -0,0 +1,121 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/reference/space_to_batch_nd.h" + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/internal/types.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_utils.h" + +namespace tflite { + +namespace { + +constexpr int kInputTensor = 0; +constexpr int kBlockShapeTensor = 1; +constexpr int kCropsTensor = 2; +constexpr int kOutputTensor = 0; + +// Currently, only 3D NHC and 4D NHWC input/output op_context are supported. +// In case of 3D input, it will be extended to 3D NHWC by adding W=1. +// The 4D array need to have exactly 2 spatial dimensions. +// TODO(b/149952582): Support arbitrary dimension in SpaceToBatchND. +const int kInputOutputMinDimensionNum = 3; +const int kInputOutputMaxDimensionNum = 4; + +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(SpaceToBatchParams)); +} + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + MicroContext* micro_context = GetMicroContext(context); + + TF_LITE_ENSURE_EQ(context, NumInputs(node), 3); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, input != nullptr && output != nullptr); + + TF_LITE_ENSURE(context, NumDimensions(input) >= kInputOutputMinDimensionNum); + TF_LITE_ENSURE(context, NumDimensions(output) >= kInputOutputMinDimensionNum); + TF_LITE_ENSURE(context, NumDimensions(input) <= kInputOutputMaxDimensionNum); + TF_LITE_ENSURE(context, NumDimensions(output) <= kInputOutputMaxDimensionNum); + TF_LITE_ENSURE_TYPES_EQ(context, input->type, output->type); + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(output); + return kTfLiteOk; +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + const SpaceToBatchParams& params = + *(static_cast(node->user_data)); + + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + const TfLiteEvalTensor* block_shape = + tflite::micro::GetEvalInput(context, node, kBlockShapeTensor); + const TfLiteEvalTensor* crops = + tflite::micro::GetEvalInput(context, node, kCropsTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + + switch (input->type) { // Already know in/out types are same. + case kTfLiteFloat32: + reference_ops::SpaceToBatchND( + params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(block_shape), + tflite::micro::GetTensorData(block_shape), + tflite::micro::GetTensorShape(crops), + tflite::micro::GetTensorData(crops), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + case kTfLiteInt8: + reference_ops::SpaceToBatchND( + params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(block_shape), + tflite::micro::GetTensorData(block_shape), + tflite::micro::GetTensorShape(crops), + tflite::micro::GetTensorData(crops), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + default: + MicroPrintf("Type %s (%d) not supported.", TfLiteTypeGetName(input->type), + input->type); + return kTfLiteError; + } + return kTfLiteOk; +} + +} // namespace. + +TFLMRegistration Register_SPACE_TO_BATCH_ND() { + return tflite::micro::RegisterOp(Init, Prepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/space_to_batch_nd_test.cc b/tensorflow/lite/micro/kernels/space_to_batch_nd_test.cc new file mode 100644 index 0000000..eae185b --- /dev/null +++ b/tensorflow/lite/micro/kernels/space_to_batch_nd_test.cc @@ -0,0 +1,154 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +constexpr int kBasicInputOutputSize = 16; +int basic_input_dims[] = {4, 1, 4, 4, 1}; +const float basic_input[kBasicInputOutputSize] = { + 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}; +int basic_block_shape_dims[] = {1, 2}; +const int32_t basic_block_shape[] = {2, 2}; +int basic_crops_dims[] = {1, 4}; +const int32_t basic_crops[] = {0, 0, 0, 0}; +int basic_output_dims[] = {4, 4, 2, 2, 1}; +const float basic_golden[kBasicInputOutputSize] = {1, 3, 9, 11, 2, 4, 10, 12, + 5, 7, 13, 15, 6, 8, 14, 16}; + +template +TfLiteStatus ValidateSpaceToBatchNdGoldens(TfLiteTensor* tensors, + int tensors_size, const T* golden, + T* output, int output_size) { + int inputs_array_data[] = {3, 0, 1, 2}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 3}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = Register_SPACE_TO_BATCH_ND(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, nullptr); + + TF_LITE_ENSURE_STATUS(runner.InitAndPrepare()); + TF_LITE_ENSURE_STATUS(runner.Invoke()); + + for (int i = 0; i < output_size; ++i) { + // TODO(b/158102673): workaround for not having fatal test assertions. + TF_LITE_MICRO_EXPECT_EQ(golden[i], output[i]); + if (golden[i] != output[i]) { + return kTfLiteError; + } + } + return kTfLiteOk; +} + +TfLiteStatus TestSpaceToBatchNdFloat( + int* input_dims_data, const float* input_data, int* block_shape_dims_data, + const int32_t* block_shape_data, int* crops_dims_data, + const int32_t* crops_data, int* output_dims_data, const float* golden, + float* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* block_shape_dims = IntArrayFromInts(block_shape_dims_data); + TfLiteIntArray* crops_dims = IntArrayFromInts(crops_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + + constexpr int inputs_size = 3; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(input_data, input_dims), + CreateTensor(block_shape_data, block_shape_dims), + CreateTensor(crops_data, crops_dims), + CreateTensor(output_data, output_dims), + }; + + return ValidateSpaceToBatchNdGoldens(tensors, tensors_size, golden, + output_data, ElementCount(*output_dims)); +} + +template +TfLiteStatus TestSpaceToBatchNdQuantized( + int* input_dims_data, const float* input_data, T* input_quantized, + float input_scale, int input_zero_point, int* block_shape_dims_data, + const int32_t* block_shape_data, int* crops_dims_data, + const int32_t* crops_data, int* output_dims_data, const float* golden, + T* golden_quantized, float output_scale, int output_zero_point, + T* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* block_shape_dims = IntArrayFromInts(block_shape_dims_data); + TfLiteIntArray* crops_dims = IntArrayFromInts(crops_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + + constexpr int inputs_size = 3; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + tflite::testing::CreateQuantizedTensor(input_data, input_quantized, + input_dims, input_scale, + input_zero_point), + tflite::testing::CreateTensor(block_shape_data, block_shape_dims), + tflite::testing::CreateTensor(crops_data, crops_dims), + tflite::testing::CreateQuantizedTensor(output_data, output_dims, + output_scale, output_zero_point), + }; + tflite::Quantize(golden, golden_quantized, ElementCount(*output_dims), + output_scale, output_zero_point); + + return ValidateSpaceToBatchNdGoldens(tensors, tensors_size, golden_quantized, + output_data, ElementCount(*output_dims)); +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(SpaceToBatchBasicFloat) { + float output[tflite::testing::kBasicInputOutputSize]; + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + tflite::testing::TestSpaceToBatchNdFloat( + tflite::testing::basic_input_dims, tflite::testing::basic_input, + tflite::testing::basic_block_shape_dims, + tflite::testing::basic_block_shape, tflite::testing::basic_crops_dims, + tflite::testing::basic_crops, tflite::testing::basic_output_dims, + tflite::testing::basic_golden, output)); +} + +TF_LITE_MICRO_TEST(SpaceToBatchBasicInt8) { + int8_t output[tflite::testing::kBasicInputOutputSize]; + int8_t input_quantized[tflite::testing::kBasicInputOutputSize]; + int8_t golden_quantized[tflite::testing::kBasicInputOutputSize]; + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + tflite::testing::TestSpaceToBatchNdQuantized( + tflite::testing::basic_input_dims, tflite::testing::basic_input, + input_quantized, 1.0f, 0, tflite::testing::basic_block_shape_dims, + tflite::testing::basic_block_shape, tflite::testing::basic_crops_dims, + tflite::testing::basic_crops, tflite::testing::basic_output_dims, + tflite::testing::basic_golden, golden_quantized, 1.0f, 0, output)); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/space_to_depth.cc b/tensorflow/lite/micro/kernels/space_to_depth.cc new file mode 100644 index 0000000..eac6613 --- /dev/null +++ b/tensorflow/lite/micro/kernels/space_to_depth.cc @@ -0,0 +1,127 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/kernels/internal/reference/space_to_depth.h" + +#include + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/types.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { + +namespace { + +constexpr int kInputTensor = 0; +constexpr int kOutputTensor = 0; +constexpr int kBatchRank = 0; +constexpr int kHeightRank = 1; +constexpr int kWidthRank = 2; +constexpr int kDepthRank = 3; + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + auto* params = + reinterpret_cast(node->builtin_data); + + TF_LITE_ENSURE_EQ(context, NumInputs(node), 1); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + + TF_LITE_ENSURE_EQ(context, NumDimensions(input), 4); + + auto data_type = output->type; + TF_LITE_ENSURE(context, + data_type == kTfLiteFloat32 || data_type == kTfLiteInt8); + TF_LITE_ENSURE_TYPES_EQ(context, input->type, output->type); + + const int block_size = params->block_size; + const int input_height = input->dims->data[kHeightRank]; + const int input_width = input->dims->data[kWidthRank]; + int output_height = input_height / block_size; + int output_width = input_width / block_size; + + TF_LITE_ENSURE_EQ(context, input_height, output_height * block_size); + TF_LITE_ENSURE_EQ(context, input_width, output_width * block_size); + + // Relocate dims to the persistent storage arena before changing them, + // otherwise we'd be modifying temporary copies made by the interpreters each + // time they process the layer. + TfLiteEvalTensor* output_eval = + micro::GetEvalOutput(context, node, kOutputTensor); + TF_LITE_ENSURE_OK(context, micro::CreateWritableTensorDimsWithCopy( + context, output, output_eval)); + + output->dims->data[kBatchRank] = input->dims->data[kBatchRank]; + output->dims->data[kHeightRank] = output_height; + output->dims->data[kWidthRank] = output_width; + output->dims->data[kDepthRank] = + input->dims->data[kDepthRank] * block_size * block_size; + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(output); + + return kTfLiteOk; +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + auto* params = + reinterpret_cast(node->builtin_data); + + const TfLiteEvalTensor* input = + micro::GetEvalInput(context, node, kInputTensor); + TfLiteEvalTensor* output = micro::GetEvalOutput(context, node, kOutputTensor); + + SpaceToDepthParams op_params; + op_params.block_size = params->block_size; + + switch (input->type) { // Already know in/out types are same. + case kTfLiteFloat32: + reference_ops::SpaceToDepth(op_params, micro::GetTensorShape(input), + micro::GetTensorData(input), + micro::GetTensorShape(output), + micro::GetTensorData(output)); + break; + case kTfLiteInt8: + reference_ops::SpaceToDepth(op_params, micro::GetTensorShape(input), + micro::GetTensorData(input), + micro::GetTensorShape(output), + micro::GetTensorData(output)); + break; + default: + MicroPrintf("SPACE_TO_DEPTH only supports FLOAT32 and INT8, got %s.", + TfLiteTypeGetName(input->type)); + return kTfLiteError; + } + + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_SPACE_TO_DEPTH() { + return tflite::micro::RegisterOp(nullptr, Prepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/space_to_depth_test.cc b/tensorflow/lite/micro/kernels/space_to_depth_test.cc new file mode 100644 index 0000000..1fd7d10 --- /dev/null +++ b/tensorflow/lite/micro/kernels/space_to_depth_test.cc @@ -0,0 +1,184 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/micro_utils.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +using tflite::ElementCount; +using tflite::testing::CreateTensor; +using tflite::testing::IntArrayFromInts; + +namespace { + +void ExpectEq(TfLiteIntArray* a, TfLiteIntArray* b) { + for (int i = 0; i < a->size; ++i) { + TF_LITE_MICRO_EXPECT_EQ(a->data[i], b->data[i]); + } +} + +template +void ExpectNear(const T a[], const T b[], int size, float tolerance = 1e-5) { + for (int i = 0; i < size; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(a[i], b[i], tolerance); + } +} + +template +constexpr int ArrayLength(const T&) { + return std::extent::value; +} + +template +struct SpaceToDepthTest { + int* input_dims; + const T* input_data; + int block_size; + int* expect_dims; + const T* expect_data; + T* output_data; +}; + +template +void TestSpaceToDepth(const SpaceToDepthTest& args) { + TfLiteIntArray* input_dims = IntArrayFromInts(args.input_dims); + constexpr int kOutputDims = 4; + int output_dims_data[] = {kOutputDims, 0, 0, 0, 0}; + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + TfLiteTensor tensors[] = {CreateTensor(args.input_data, input_dims), + CreateTensor(args.output_data, output_dims)}; + + const TFLMRegistration registration = tflite::Register_SPACE_TO_DEPTH(); + constexpr int tensor_count = ArrayLength(tensors); + constexpr int kInputIndex = 0; + int input_indexes_data[] = {1, kInputIndex}; + TfLiteIntArray* input_indexes = IntArrayFromInts(input_indexes_data); + constexpr int kOutputIndex = 1; + int output_indexes_data[] = {1, kOutputIndex}; + TfLiteIntArray* output_indexes = IntArrayFromInts(output_indexes_data); + TfLiteSpaceToDepthParams op_params = {}; + op_params.block_size = args.block_size; + + tflite::micro::KernelRunner runner(registration, tensors, tensor_count, + input_indexes, output_indexes, + static_cast(&op_params)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + const TfLiteTensor* output_tensor = &tensors[kOutputIndex]; + TfLiteIntArray* expect_dims = IntArrayFromInts(args.expect_dims); + ExpectEq(output_tensor->dims, expect_dims); + ExpectNear(args.output_data, args.expect_data, ElementCount(*expect_dims)); +} + +} // namespace + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(SpaceToDepth_Float32_1222) { + using value_type = float; + SpaceToDepthTest test; + + int input_dims[] = {4, 1, 2, 2, 2}; + test.input_dims = input_dims; + constexpr value_type kInputData[] = {1.4, 2.3, 3.2, 4.1, 5.4, 6.3, 7.2, 8.1}; + test.input_data = kInputData; + + test.block_size = 2; + + int expect_dims[] = {4, 1, 1, 1, 8}; + test.expect_dims = expect_dims; + test.expect_data = kInputData; + + constexpr int kExpectElements = ArrayLength(kInputData); + value_type output_data[kExpectElements]; + test.output_data = output_data; + + TestSpaceToDepth(test); +} + +TF_LITE_MICRO_TEST(SpaceToDepth_Int8_1221) { + using value_type = int8_t; + SpaceToDepthTest test; + + int input_dims[] = {4, 1, 2, 2, 1}; + test.input_dims = input_dims; + constexpr value_type kInputData[] = {1, 2, 3, 4}; + test.input_data = kInputData; + + test.block_size = 2; + + int expect_dims[] = {4, 1, 1, 1, 4}; + test.expect_dims = expect_dims; + test.expect_data = kInputData; + + constexpr int kExpectElements = ArrayLength(kInputData); + value_type output_data[kExpectElements]; + test.output_data = output_data; + + TestSpaceToDepth(test); +} + +TF_LITE_MICRO_TEST(SpaceToDepth_Int8_1223) { + using value_type = int8_t; + SpaceToDepthTest test; + + int input_dims[] = {4, 1, 2, 2, 3}; + test.input_dims = input_dims; + constexpr value_type kInputData[] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}; + test.input_data = kInputData; + + test.block_size = 2; + + int expect_dims[] = {4, 1, 1, 1, 12}; + test.expect_dims = expect_dims; + test.expect_data = kInputData; + + constexpr int kExpectElements = ArrayLength(kInputData); + value_type output_data[kExpectElements]; + test.output_data = output_data; + + TestSpaceToDepth(test); +} + +TF_LITE_MICRO_TEST(SpaceToDepth_Int8_1441) { + using value_type = int8_t; + SpaceToDepthTest test; + + int input_dims[] = {4, 1, 4, 4, 1}; + test.input_dims = input_dims; + constexpr value_type kInputData[] = {1, 2, 5, 6, 3, 4, 7, 8, + 9, 10, 13, 14, 11, 12, 15, 16}; + test.input_data = kInputData; + + test.block_size = 2; + + int expect_dims[] = {4, 1, 2, 2, 4}; + test.expect_dims = expect_dims; + constexpr value_type kExpectData[] = {1, 2, 3, 4, 5, 6, 7, 8, + 9, 10, 11, 12, 13, 14, 15, 16}; + test.expect_data = kExpectData; + + constexpr int kExpectElements = ArrayLength(kInputData); + value_type output_data[kExpectElements]; + test.output_data = output_data; + + TestSpaceToDepth(test); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/split.cc b/tensorflow/lite/micro/kernels/split.cc new file mode 100644 index 0000000..aa87720 --- /dev/null +++ b/tensorflow/lite/micro/kernels/split.cc @@ -0,0 +1,125 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { + +namespace { + +template +TfLiteStatus SplitImpl(TfLiteContext* context, TfLiteNode* node, + const TfLiteEvalTensor* input, int axis_value) { + const int output_count = NumOutputs(node); + const TfLiteIntArray* input_dims = input->dims; + const TfLiteEvalTensor* output0 = + tflite::micro::GetEvalOutput(context, node, 0); + const TfLiteIntArray* output_dims = output0->dims; + + const int split_dimensions = input_dims->size; + int axis = axis_value < 0 ? axis_value + split_dimensions : axis_value; + + TFLITE_DCHECK_LT(axis, split_dimensions); + TFLITE_DCHECK_EQ(output_dims->size, split_dimensions); + + int64_t split_size = output_dims->data[axis] * output_count; + + TFLITE_DCHECK_EQ(split_size, input_dims->data[axis]); + int64_t outer_size = 1; + for (int i = 0; i < axis; ++i) { + outer_size *= input_dims->data[i]; + } + + int64_t base_inner_size = 1; + for (int i = axis + 1; i < split_dimensions; ++i) { + base_inner_size *= input_dims->data[i]; + } + + const T* input_ptr = tflite::micro::GetTensorData(input); + for (int k = 0; k < outer_size; ++k) { + for (int i = 0; i < output_count; ++i) { + TfLiteEvalTensor* t = tflite::micro::GetEvalOutput(context, node, i); + T* output_data = tflite::micro::GetTensorData(t); + const int copy_size = output_dims->data[axis] * base_inner_size; + T* output_ptr = output_data + k * copy_size; + for (int j = 0; j < copy_size; ++j) output_ptr[j] = input_ptr[j]; + input_ptr += copy_size; + } + } + + return kTfLiteOk; +} + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + MicroContext* micro_context = GetMicroContext(context); + TfLiteTensor* axis = micro_context->AllocateTempInputTensor(node, 0); + TF_LITE_ENSURE(context, axis != nullptr); + + // Dynamic output tensors are needed if axis tensor is not constant. + // But Micro doesn't support dynamic memory allocation, so we only support + // constant axis tensor for now. + TF_LITE_ENSURE_MSG(context, IsConstantTensor(axis), + "Non constant axis tensor not supported"); + + micro_context->DeallocateTempTfLiteTensor(axis); + return kTfLiteOk; +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* axis = tflite::micro::GetEvalInput(context, node, 0); + const TfLiteEvalTensor* input = tflite::micro::GetEvalInput(context, node, 1); + + int axis_value = tflite::micro::GetTensorData(axis)[0]; + if (axis_value < 0) { + axis_value += input->dims->size; + } + + TF_LITE_ENSURE(context, axis_value >= 0); + TF_LITE_ENSURE(context, axis_value < input->dims->size); + + switch (input->type) { + case kTfLiteFloat32: { + return SplitImpl(context, node, input, axis_value); + } + case kTfLiteInt8: { + return SplitImpl(context, node, input, axis_value); + } + case kTfLiteInt16: { + return SplitImpl(context, node, input, axis_value); + } + case kTfLiteInt32: { + return SplitImpl(context, node, input, axis_value); + } + default: + MicroPrintf("Type %s currently not supported.", + TfLiteTypeGetName(input->type)); + return kTfLiteError; + } + + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_SPLIT() { + return tflite::micro::RegisterOp(nullptr, Prepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/split_test.cc b/tensorflow/lite/micro/kernels/split_test.cc new file mode 100644 index 0000000..e3bf37d --- /dev/null +++ b/tensorflow/lite/micro/kernels/split_test.cc @@ -0,0 +1,459 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/debug_log.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { + +void TestSplitTwoOutputsFloat(int* input_dims_data, const float* input_data, + int* axis_dims_data, const int32_t* axis_data, + int* output1_dims_data, + const float* expected_output1_data, + int* output2_dims_data, + const float* expected_output2_data, + float* output1_data, float* output2_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* axis_dims = IntArrayFromInts(axis_dims_data); + TfLiteIntArray* output1_dims = IntArrayFromInts(output1_dims_data); + TfLiteIntArray* output2_dims = IntArrayFromInts(output2_dims_data); + const int output1_dims_count = ElementCount(*output1_dims); + const int output2_dims_count = ElementCount(*output2_dims); + + constexpr int input_size = 1; + constexpr int output_size = 2; + constexpr int axis_size = 1; + constexpr int tensors_size = input_size + output_size + axis_size; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(axis_data, axis_dims), CreateTensor(input_data, input_dims), + CreateTensor(output1_data, output1_dims), + CreateTensor(output2_data, output2_dims)}; + + // Currently only support constant axis tensor. + tensors[0].allocation_type = kTfLiteMmapRo; + // Place a unique value in the uninitialized output buffer. + for (int i = 0; i < output1_dims_count; ++i) { + output1_data[i] = 23; + } + + for (int i = 0; i < output2_dims_count; ++i) { + output2_data[i] = 23; + } + + int inputs_array_data[] = {2, 0, 1}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {2, 2, 3}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = Register_SPLIT(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, nullptr); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + for (int i = 0; i < output1_dims_count; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(expected_output1_data[i], output1_data[i], 1e-5f); + } + + for (int i = 0; i < output2_dims_count; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(expected_output2_data[i], output2_data[i], 1e-5f); + } +} + +void TestSplitFourOutputsFloat( + int* input_dims_data, const float* input_data, int* axis_dims_data, + const int32_t* axis_data, int* output1_dims_data, + const float* expected_output1_data, int* output2_dims_data, + const float* expected_output2_data, int* output3_dims_data, + const float* expected_output3_data, int* output4_dims_data, + const float* expected_output4_data, float* output1_data, + float* output2_data, float* output3_data, float* output4_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* axis_dims = IntArrayFromInts(axis_dims_data); + TfLiteIntArray* output1_dims = IntArrayFromInts(output1_dims_data); + TfLiteIntArray* output2_dims = IntArrayFromInts(output2_dims_data); + TfLiteIntArray* output3_dims = IntArrayFromInts(output3_dims_data); + TfLiteIntArray* output4_dims = IntArrayFromInts(output4_dims_data); + const int output1_dims_count = ElementCount(*output1_dims); + const int output2_dims_count = ElementCount(*output2_dims); + const int output3_dims_count = ElementCount(*output3_dims); + const int output4_dims_count = ElementCount(*output4_dims); + + constexpr int input_size = 1; + constexpr int output_size = 4; + constexpr int axis_size = 1; + constexpr int tensors_size = input_size + output_size + axis_size; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(axis_data, axis_dims), + CreateTensor(input_data, input_dims), + CreateTensor(output1_data, output1_dims), + CreateTensor(output2_data, output2_dims), + CreateTensor(output3_data, output1_dims), + CreateTensor(output4_data, output1_dims)}; + + // Currently only support constant axis tensor. + tensors[0].allocation_type = kTfLiteMmapRo; + // Place a unique value in the uninitialized output buffer. + for (int i = 0; i < output1_dims_count; ++i) { + output1_data[i] = 23; + } + for (int i = 0; i < output2_dims_count; ++i) { + output2_data[i] = 23; + } + for (int i = 0; i < output3_dims_count; ++i) { + output3_data[i] = 23; + } + for (int i = 0; i < output4_dims_count; ++i) { + output4_data[i] = 23; + } + + int inputs_array_data[] = {2, 0, 1}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {4, 2, 3, 4, 5}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = tflite::Register_SPLIT(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, nullptr); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + for (int i = 0; i < output1_dims_count; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(expected_output1_data[i], output1_data[i], 1e-5f); + } + for (int i = 0; i < output2_dims_count; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(expected_output2_data[i], output2_data[i], 1e-5f); + } + for (int i = 0; i < output3_dims_count; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(expected_output3_data[i], output3_data[i], 1e-5f); + } + for (int i = 0; i < output4_dims_count; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(expected_output4_data[i], output4_data[i], 1e-5f); + } +} + +void TestSplitTwoOutputsQuantized(int* input_dims_data, + const int8_t* input_data, int* axis_dims_data, + const int32_t* axis_data, + int* output1_dims_data, + const int8_t* expected_output1_data, + int* output2_dims_data, + const int8_t* expected_output2_data, + int8_t* output1_data, int8_t* output2_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* axis_dims = IntArrayFromInts(axis_dims_data); + TfLiteIntArray* output1_dims = IntArrayFromInts(output1_dims_data); + TfLiteIntArray* output2_dims = IntArrayFromInts(output2_dims_data); + const int output1_dims_count = ElementCount(*output1_dims); + const int output2_dims_count = ElementCount(*output2_dims); + + constexpr int input_size = 1; + constexpr int output_size = 2; + constexpr int axis_size = 1; + constexpr int tensors_size = input_size + output_size + axis_size; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(axis_data, axis_dims), + CreateQuantizedTensor(input_data, input_dims, 0, 10), + CreateQuantizedTensor(output1_data, output1_dims, 0, 10), + CreateQuantizedTensor(output2_data, output2_dims, 0, 10)}; + + // Currently only support constant axis tensor. + tensors[0].allocation_type = kTfLiteMmapRo; + + // Place a unique value in the uninitialized output buffer. + for (int i = 0; i < output1_dims_count; ++i) { + output1_data[i] = 23; + } + + for (int i = 0; i < output2_dims_count; ++i) { + output2_data[i] = 23; + } + + int inputs_array_data[] = {2, 0, 1}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {2, 2, 3}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = tflite::Register_SPLIT(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, nullptr); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + for (int i = 0; i < output1_dims_count; ++i) { + TF_LITE_MICRO_EXPECT_EQ(expected_output1_data[i], output1_data[i]); + } + + for (int i = 0; i < output2_dims_count; ++i) { + TF_LITE_MICRO_EXPECT_EQ(expected_output2_data[i], output2_data[i]); + } +} + +void TestSplitTwoOutputsQuantized32( + int* input_dims_data, const int32_t* input_data, int* axis_dims_data, + const int32_t* axis_data, int* output1_dims_data, + const int32_t* expected_output1_data, int* output2_dims_data, + const int32_t* expected_output2_data, int32_t* output1_data, + int32_t* output2_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* axis_dims = IntArrayFromInts(axis_dims_data); + TfLiteIntArray* output1_dims = IntArrayFromInts(output1_dims_data); + TfLiteIntArray* output2_dims = IntArrayFromInts(output2_dims_data); + const int output1_dims_count = ElementCount(*output1_dims); + const int output2_dims_count = ElementCount(*output2_dims); + + constexpr int input_size = 1; + constexpr int output_size = 2; + constexpr int axis_size = 1; + constexpr int tensors_size = input_size + output_size + axis_size; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(axis_data, axis_dims), CreateTensor(input_data, input_dims), + CreateTensor(output1_data, output1_dims), + CreateTensor(output2_data, output2_dims)}; + + // Currently only support constant axis tensor. + tensors[0].allocation_type = kTfLiteMmapRo; + + // Place a unique value in the uninitialized output buffer. + for (int i = 0; i < output1_dims_count; ++i) { + output1_data[i] = 23; + } + + for (int i = 0; i < output2_dims_count; ++i) { + output2_data[i] = 23; + } + + int inputs_array_data[] = {2, 0, 1}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {2, 2, 3}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = tflite::Register_SPLIT(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, nullptr); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + for (int i = 0; i < output1_dims_count; ++i) { + TF_LITE_MICRO_EXPECT_EQ(expected_output1_data[i], output1_data[i]); + } + + for (int i = 0; i < output2_dims_count; ++i) { + TF_LITE_MICRO_EXPECT_EQ(expected_output2_data[i], output2_data[i]); + } +} + +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(TwoSplitFourDimensionalAxisZero) { + int input_shape[] = {4, 2, 2, 2, 2}; + const float input_data[] = {1, 2, 3, 4, 5, 6, 7, 8, + 9, 10, 11, 12, 13, 14, 15, 16}; + int axis_shape[] = {1, 1}; + const int32_t axis_data[] = {0}; + int output1_shape[] = {4, 1, 2, 2, 2}; + const float golden1[] = {1, 2, 3, 4, 5, 6, 7, 8}; + int output2_shape[] = {4, 1, 2, 2, 2}; + const float golden2[] = {9, 10, 11, 12, 13, 14, 15, 16}; + + constexpr int output1_dims_count = 8; + constexpr int output2_dims_count = 8; + float output1_data[output1_dims_count]; + float output2_data[output2_dims_count]; + tflite::testing::TestSplitTwoOutputsFloat( + input_shape, input_data, axis_shape, axis_data, output1_shape, golden1, + output2_shape, golden2, output1_data, output2_data); +} + +TF_LITE_MICRO_TEST(TwoSplitFourDimensionalAxisOne) { + int input_shape[] = {4, 2, 2, 2, 2}; + const float input_data[] = {1, 2, 3, 4, 5, 6, 7, 8, + 9, 10, 11, 12, 13, 14, 15, 16}; + int axis_shape[] = {1, 1}; + const int32_t axis_data[] = {1}; + int output1_shape[] = {4, 2, 1, 2, 2}; + const float golden1[] = {1, 2, 3, 4, 9, 10, 11, 12}; + int output2_shape[] = {4, 2, 1, 2, 2}; + const float golden2[] = {5, 6, 7, 8, 13, 14, 15, 16}; + + constexpr int output1_dims_count = 8; + constexpr int output2_dims_count = 8; + float output1_data[output1_dims_count]; + float output2_data[output2_dims_count]; + tflite::testing::TestSplitTwoOutputsFloat( + input_shape, input_data, axis_shape, axis_data, output1_shape, golden1, + output2_shape, golden2, output1_data, output2_data); +} + +TF_LITE_MICRO_TEST(TwoSplitFourDimensionalAxisTwo) { + int input_shape[] = {4, 2, 2, 2, 2}; + const float input_data[] = {1, 2, 3, 4, 5, 6, 7, 8, + 9, 10, 11, 12, 13, 14, 15, 16}; + int axis_shape[] = {1, 1}; + const int32_t axis_data[] = {2}; + int output1_shape[] = {4, 2, 2, 1, 2}; + const float golden1[] = {1, 2, 5, 6, 9, 10, 13, 14}; + int output2_shape[] = {4, 2, 2, 1, 2}; + const float golden2[] = {3, 4, 7, 8, 11, 12, 15, 16}; + + constexpr int output1_dims_count = 8; + constexpr int output2_dims_count = 8; + float output1_data[output1_dims_count]; + float output2_data[output2_dims_count]; + tflite::testing::TestSplitTwoOutputsFloat( + input_shape, input_data, axis_shape, axis_data, output1_shape, golden1, + output2_shape, golden2, output1_data, output2_data); +} + +TF_LITE_MICRO_TEST(TwoSplitFourDimensionalAxisThree) { + int input_shape[] = {4, 2, 2, 2, 2}; + const float input_data[] = {1, 2, 3, 4, 5, 6, 7, 8, + 9, 10, 11, 12, 13, 14, 15, 16}; + int axis_shape[] = {1, 1}; + const int32_t axis_data[] = {3}; + int output1_shape[] = {4, 2, 2, 2, 1}; + const float golden1[] = {1, 3, 5, 7, 9, 11, 13, 15}; + int output2_shape[] = {4, 2, 2, 2, 1}; + const float golden2[] = {2, 4, 6, 8, 10, 12, 14, 16}; + + constexpr int output1_dims_count = 8; + constexpr int output2_dims_count = 8; + float output1_data[output1_dims_count]; + float output2_data[output2_dims_count]; + tflite::testing::TestSplitTwoOutputsFloat( + input_shape, input_data, axis_shape, axis_data, output1_shape, golden1, + output2_shape, golden2, output1_data, output2_data); +} + +TF_LITE_MICRO_TEST(TwoSplitFourDimensionalNegativeAxis) { + int input_shape[] = {4, 2, 2, 2, 2}; + const float input_data[] = {1, 2, 3, 4, 5, 6, 7, 8, + 9, 10, 11, 12, 13, 14, 15, 16}; + int axis_shape[] = {1, 1}; + const int32_t axis_data[] = {-4}; + int output1_shape[] = {4, 1, 2, 2, 2}; + const float golden1[] = {1, 2, 3, 4, 5, 6, 7, 8}; + int output2_shape[] = {4, 1, 2, 2, 2}; + const float golden2[] = {9, 10, 11, 12, 13, 14, 15, 16}; + + constexpr int output1_dims_count = 8; + constexpr int output2_dims_count = 8; + float output1_data[output1_dims_count]; + float output2_data[output2_dims_count]; + tflite::testing::TestSplitTwoOutputsFloat( + input_shape, input_data, axis_shape, axis_data, output1_shape, golden1, + output2_shape, golden2, output1_data, output2_data); +} + +TF_LITE_MICRO_TEST(FourSplit) { + int input_shape[] = {1, 4}; + const float input_data[] = {1, 2, 3, 4}; + int axis_shape[] = {1, 1}; + const int32_t axis_data[] = {0}; + int output1_shape[] = {1, 1}; + const float golden1[] = {1}; + int output2_shape[] = {1, 1}; + const float golden2[] = {2}; + int output3_shape[] = {1, 1}; + const float golden3[] = {3}; + int output4_shape[] = {1, 1}; + const float golden4[] = {4}; + + constexpr int output1_dims_count = 1; + constexpr int output2_dims_count = 1; + constexpr int output3_dims_count = 1; + constexpr int output4_dims_count = 1; + float output1_data[output1_dims_count]; + float output2_data[output2_dims_count]; + float output3_data[output3_dims_count]; + float output4_data[output4_dims_count]; + tflite::testing::TestSplitFourOutputsFloat( + input_shape, input_data, axis_shape, axis_data, output1_shape, golden1, + output2_shape, golden2, output3_shape, golden3, output4_shape, golden4, + output1_data, output2_data, output3_data, output4_data); +} + +TF_LITE_MICRO_TEST(TwoSplitOneDimensional) { + int input_shape[] = {1, 2}; + const float input_data[] = {1, 2}; + int axis_shape[] = {1, 1}; + const int32_t axis_data[] = {0}; + int output1_shape[] = {1, 1}; + const float golden1[] = {1}; + int output2_shape[] = {1, 1}; + const float golden2[] = {2}; + + constexpr int output1_dims_count = 8; + constexpr int output2_dims_count = 8; + float output1_data[output1_dims_count]; + float output2_data[output2_dims_count]; + tflite::testing::TestSplitTwoOutputsFloat( + input_shape, input_data, axis_shape, axis_data, output1_shape, golden1, + output2_shape, golden2, output1_data, output2_data); +} + +TF_LITE_MICRO_TEST(TwoSplitFourDimensionalQuantized) { + int input_shape[] = {4, 2, 2, 2, 2}; + const int8_t input_data[] = {1, 2, 3, 4, 5, 6, 7, 8, + 9, 10, 11, 12, 13, 14, 15, 16}; + int axis_shape[] = {1, 1}; + const int32_t axis_data[] = {1}; + int output1_shape[] = {4, 2, 1, 2, 2}; + const int8_t golden1[] = {1, 2, 3, 4, 9, 10, 11, 12}; + int output2_shape[] = {4, 2, 1, 2, 2}; + const int8_t golden2[] = {5, 6, 7, 8, 13, 14, 15, 16}; + + constexpr int output1_dims_count = 8; + constexpr int output2_dims_count = 8; + int8_t output1_data[output1_dims_count]; + int8_t output2_data[output2_dims_count]; + tflite::testing::TestSplitTwoOutputsQuantized( + input_shape, input_data, axis_shape, axis_data, output1_shape, golden1, + output2_shape, golden2, output1_data, output2_data); +} + +TF_LITE_MICRO_TEST(TwoSplitFourDimensionalQuantized32) { + int input_shape[] = {4, 2, 2, 2, 2}; + const int32_t input_data[] = {1, 2, 3, 4, 5, 6, 7, 8, + 9, 10, 11, 12, 13, 14, 15, 16}; + int axis_shape[] = {1, 1}; + const int32_t axis_data[] = {1}; + int output1_shape[] = {4, 2, 1, 2, 2}; + const int32_t golden1[] = {1, 2, 3, 4, 9, 10, 11, 12}; + int output2_shape[] = {4, 2, 1, 2, 2}; + const int32_t golden2[] = {5, 6, 7, 8, 13, 14, 15, 16}; + + constexpr int output1_dims_count = 8; + constexpr int output2_dims_count = 8; + int32_t output1_data[output1_dims_count]; + int32_t output2_data[output2_dims_count]; + tflite::testing::TestSplitTwoOutputsQuantized32( + input_shape, input_data, axis_shape, axis_data, output1_shape, golden1, + output2_shape, golden2, output1_data, output2_data); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/split_v.cc b/tensorflow/lite/micro/kernels/split_v.cc new file mode 100644 index 0000000..6aed6f7 --- /dev/null +++ b/tensorflow/lite/micro/kernels/split_v.cc @@ -0,0 +1,127 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { + +namespace { + +template +TfLiteStatus SplitImpl(TfLiteContext* context, TfLiteNode* node, + const TfLiteEvalTensor* input, int axis_value) { + const TfLiteIntArray* input_dims = input->dims; + const TfLiteEvalTensor* output0 = + tflite::micro::GetEvalOutput(context, node, 0); + + const int split_dimensions = input_dims->size; + + TFLITE_DCHECK_LT(axis_value, split_dimensions); + TFLITE_DCHECK_EQ(output0->dims->size, split_dimensions); + + int64_t split_size = 0; + const int output_count = NumOutputs(node); + for (int i = 0; i < output_count; i++) { + split_size += + tflite::micro::GetEvalOutput(context, node, i)->dims->data[axis_value]; + } + TFLITE_DCHECK_EQ(split_size, input_dims->data[axis_value]); + int64_t outer_size = 1; + for (int i = 0; i < axis_value; ++i) { + outer_size *= input_dims->data[i]; + } + + int64_t base_inner_size = 1; + for (int i = axis_value + 1; i < split_dimensions; ++i) { + base_inner_size *= input_dims->data[i]; + } + + const T* input_ptr = tflite::micro::GetTensorData(input); + for (int k = 0; k < outer_size; ++k) { + for (int i = 0; i < output_count; ++i) { + TfLiteEvalTensor* output_tensor = + tflite::micro::GetEvalOutput(context, node, i); + T* output_data = tflite::micro::GetTensorData(output_tensor); + const int copy_size = + output_tensor->dims->data[axis_value] * base_inner_size; + T* output_ptr = output_data + k * copy_size; + for (int j = 0; j < copy_size; ++j) output_ptr[j] = input_ptr[j]; + input_ptr += copy_size; + } + } + + return kTfLiteOk; +} + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + TF_LITE_ENSURE_EQ(context, NumInputs(node), 3); + + MicroContext* micro_context = GetMicroContext(context); + // Dynamic output tensors are needed if axis tensor is not constant. + // But Micro doesn't support dynamic memory allocation, so we only support + // constant axis tensor for now. + TfLiteTensor* axis = micro_context->AllocateTempInputTensor(node, 2); + TF_LITE_ENSURE_MSG(context, IsConstantTensor(axis), + "Non constant axis tensor not supported"); + micro_context->DeallocateTempTfLiteTensor(axis); + return kTfLiteOk; +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = tflite::micro::GetEvalInput(context, node, 0); + const TfLiteEvalTensor* axis = tflite::micro::GetEvalInput(context, node, 2); + + int axis_value = tflite::micro::GetTensorData(axis)[0]; + if (axis_value < 0) { + axis_value += input->dims->size; + } + + TF_LITE_ENSURE(context, axis_value >= 0); + TF_LITE_ENSURE(context, axis_value < input->dims->size); + + switch (input->type) { + case kTfLiteFloat32: { + return SplitImpl(context, node, input, axis_value); + } + case kTfLiteInt8: { + return SplitImpl(context, node, input, axis_value); + } + case kTfLiteInt16: { + return SplitImpl(context, node, input, axis_value); + } + case kTfLiteInt32: { + return SplitImpl(context, node, input, axis_value); + } + default: + MicroPrintf("Type %s currently not supported.", + TfLiteTypeGetName(input->type)); + return kTfLiteError; + } + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_SPLIT_V() { + return tflite::micro::RegisterOp(nullptr, Prepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/split_v_test.cc b/tensorflow/lite/micro/kernels/split_v_test.cc new file mode 100644 index 0000000..24efee0 --- /dev/null +++ b/tensorflow/lite/micro/kernels/split_v_test.cc @@ -0,0 +1,466 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/debug_log.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { + +template +struct OutputTensors { + float* data[N]; + int* dims[N]; + float* expected_output_data[N]; +}; +template +void TestSplitVFloat(int* input_dims_data, const float* input_data, + int* axis_dims_data, const int32_t* axis_data, + int* split_dims_data, const int32_t* split_data, + const OutputTensors& output_tensors) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* axis_dims = IntArrayFromInts(axis_dims_data); + TfLiteIntArray* split_dims = IntArrayFromInts(split_dims_data); + TfLiteIntArray* output_dims[N]; + for (int i = 0; i < N; i++) + output_dims[i] = IntArrayFromInts(output_tensors.dims[i]); + + // Place a unique value in the uninitialized output buffer. + for (int i = 0; i < N; i++) { + int dim_count = ElementCount(*output_dims[i]); + for (int j = 0; j < dim_count; j++) { + (output_tensors.data[i])[j] = 23; + } + } + constexpr int input_size = 1; + constexpr int axis_size = 1; + constexpr int split_size = 1; + constexpr int output_size = N; + + constexpr int tensors_size = + input_size + output_size + axis_size + split_size; + + // first input tensor is data + // second is size_splits + // third is axis + // then come outputs + + TfLiteTensor tensors[tensors_size]; + tensors[0] = CreateTensor(input_data, input_dims); + tensors[1] = CreateTensor(split_data, split_dims); + tensors[2] = CreateTensor(axis_data, axis_dims); + + // add output tensors + for (int i = 0; i < N; i++) + tensors[3 + i] = CreateTensor(output_tensors.data[i], output_dims[i]); + + tensors[2].allocation_type = kTfLiteMmapRo; + tensors[1].allocation_type = kTfLiteMmapRo; + + int inputs_array_data[] = {3, 0, 1, 2}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[N + 1]; + outputs_array_data[0] = N; + for (int i = 0; i < N; i++) outputs_array_data[i + 1] = i + 3; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = Register_SPLIT_V(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, nullptr); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + for (int i = 0; i < N; i++) { + int dim_count = ElementCount(*output_dims[i]); + for (int j = 0; j < dim_count; j++) { + TF_LITE_MICRO_EXPECT_NEAR((output_tensors.expected_output_data[i])[j], + (output_tensors.data[i])[j], 1e-5f); + } + } +} + +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(SPLIT_V_ThreeOutputs) { + constexpr int output1_dims_count = 3; + constexpr int output2_dims_count = 3; + constexpr int output3_dims_count = 6; + float output1_data[output1_dims_count]; + float output2_data[output2_dims_count]; + float output3_data[output3_dims_count]; + int input_shape[] = {2, 4, 3}; + float input_values[] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}; + int axis_shape[] = {1, 1}; + int32_t axis_values[] = {0}; + int split_shape[] = {1, 3}; + int32_t split_values[] = {1, 1, 2}; + int output1_shape[] = {2, 1, 3}; + float output1_values[] = {1, 2, 3}; + int output2_shape[] = {2, 1, 3}; + float output2_values[] = {4, 5, 6}; + int output3_shape[] = {2, 2, 3}; + float output3_values[] = {7, 8, 9, 10, 11, 12}; + + tflite::testing::OutputTensors<3> output_tensors; + output_tensors.data[0] = output1_data; + output_tensors.data[1] = output2_data; + output_tensors.data[2] = output3_data; + + output_tensors.dims[0] = output1_shape; + output_tensors.dims[1] = output2_shape; + output_tensors.dims[2] = output3_shape; + + output_tensors.expected_output_data[0] = output1_values; + output_tensors.expected_output_data[1] = output2_values; + output_tensors.expected_output_data[2] = output3_values; + + tflite::testing::TestSplitVFloat(input_shape, input_values, axis_shape, + axis_values, split_shape, split_values, + output_tensors); +} + +TF_LITE_MICRO_TEST(SPLIT_V_FourDimensionalFloatAxis0) { + constexpr int output1_dims_count = 8; + constexpr int output2_dims_count = 8; + float output1_data[output1_dims_count]; + float output2_data[output2_dims_count]; + + int input_shape[] = {4, 2, 2, 2, 2}; + float input_values[] = {1, 2, 3, 4, 5, 6, 7, 8, + 9, 10, 11, 12, 13, 14, 15, 16}; + int axis_shape[] = {1, 1}; + int32_t axis_values[] = {0}; + int split_shape[] = {1, 2}; + int32_t split_values[] = {1, 1}; + int output1_shape[] = {4, 1, 2, 2, 2}; + float output1_values[] = {1, 2, 3, 4, 5, 6, 7, 8}; + int output2_shape[] = {4, 1, 2, 2, 2}; + float output2_values[] = {9, 10, 11, 12, 13, 14, 15, 16}; + + tflite::testing::OutputTensors<2> output_tensors; + + output_tensors.data[0] = output1_data; + output_tensors.data[1] = output2_data; + + output_tensors.dims[0] = output1_shape; + output_tensors.dims[1] = output2_shape; + + output_tensors.expected_output_data[0] = output1_values; + output_tensors.expected_output_data[1] = output2_values; + + tflite::testing::TestSplitVFloat(input_shape, input_values, axis_shape, + axis_values, split_shape, split_values, + output_tensors); +} + +TF_LITE_MICRO_TEST(SPLIT_V_FourDimensionalFloatAxis1) { + constexpr int output1_dims_count = 8; + constexpr int output2_dims_count = 8; + float output1_data[output1_dims_count]; + float output2_data[output2_dims_count]; + + int input_shape[] = {4, 2, 2, 2, 2}; + float input_values[] = {1, 2, 3, 4, 5, 6, 7, 8, + 9, 10, 11, 12, 13, 14, 15, 16}; + int axis_shape[] = {1, 1}; + int32_t axis_values[] = {1}; + int split_shape[] = {1, 2}; + int32_t split_values[] = {1, 1}; + int output1_shape[] = {4, 2, 1, 2, 2}; + float output1_values[] = {1, 2, 3, 4, 9, 10, 11, 12}; + int output2_shape[] = {4, 2, 1, 2, 2}; + float output2_values[] = {5, 6, 7, 8, 13, 14, 15, 16}; + + tflite::testing::OutputTensors<2> output_tensors; + + output_tensors.data[0] = output1_data; + output_tensors.data[1] = output2_data; + + output_tensors.dims[0] = output1_shape; + output_tensors.dims[1] = output2_shape; + + output_tensors.expected_output_data[0] = output1_values; + output_tensors.expected_output_data[1] = output2_values; + + tflite::testing::TestSplitVFloat(input_shape, input_values, axis_shape, + axis_values, split_shape, split_values, + output_tensors); +} + +TF_LITE_MICRO_TEST(SPLIT_VFourDimensionalFloatAxis2) { + constexpr int output1_dims_count = 8; + constexpr int output2_dims_count = 8; + float output1_data[output1_dims_count]; + float output2_data[output2_dims_count]; + + int input_shape[] = {4, 2, 2, 2, 2}; + float input_values[] = {1, 2, 3, 4, 5, 6, 7, 8, + 9, 10, 11, 12, 13, 14, 15, 16}; + int axis_shape[] = {1, 1}; + int32_t axis_values[] = {2}; + int split_shape[] = {1, 2}; + int32_t split_values[] = {1, 1}; + int output1_shape[] = {4, 2, 2, 1, 2}; + float output1_values[] = {1, 2, 5, 6, 9, 10, 13, 14}; + int output2_shape[] = {4, 2, 2, 1, 2}; + float output2_values[] = {3, 4, 7, 8, 11, 12, 15, 16}; + + tflite::testing::OutputTensors<2> output_tensors; + + output_tensors.data[0] = output1_data; + output_tensors.data[1] = output2_data; + + output_tensors.dims[0] = output1_shape; + output_tensors.dims[1] = output2_shape; + + output_tensors.expected_output_data[0] = output1_values; + output_tensors.expected_output_data[1] = output2_values; + + tflite::testing::TestSplitVFloat(input_shape, input_values, axis_shape, + axis_values, split_shape, split_values, + output_tensors); +} + +TF_LITE_MICRO_TEST(SPLIT_V_FourDimensionalFloatAxis3) { + constexpr int output1_dims_count = 8; + constexpr int output2_dims_count = 8; + float output1_data[output1_dims_count]; + float output2_data[output2_dims_count]; + int input_shape[] = {4, 2, 2, 2, 2}; + float input_values[] = {1, 2, 3, 4, 5, 6, 7, 8, + 9, 10, 11, 12, 13, 14, 15, 16}; + int axis_shape[] = {1, 1}; + int32_t axis_values[] = {3}; + int split_shape[] = {1, 2}; + int32_t split_values[] = {1, 1}; + int output1_shape[] = {4, 2, 2, 2, 1}; + float output1_values[] = {1, 3, 5, 7, 9, 11, 13, 15}; + int output2_shape[] = {4, 2, 2, 2, 1}; + float output2_values[] = {2, 4, 6, 8, 10, 12, 14, 16}; + + tflite::testing::OutputTensors<2> output_tensors; + + output_tensors.data[0] = output1_data; + output_tensors.data[1] = output2_data; + + output_tensors.dims[0] = output1_shape; + output_tensors.dims[1] = output2_shape; + + output_tensors.expected_output_data[0] = output1_values; + output_tensors.expected_output_data[1] = output2_values; + + tflite::testing::TestSplitVFloat(input_shape, input_values, axis_shape, + axis_values, split_shape, split_values, + output_tensors); +} + +TF_LITE_MICRO_TEST(SPLIT_V_FourDimensionalFloatNegativeAxis) { + constexpr int output1_dims_count = 8; + constexpr int output2_dims_count = 8; + float output1_data[output1_dims_count]; + float output2_data[output2_dims_count]; + + int input_shape[] = {4, 2, 2, 2, 2}; + float input_values[] = {1, 2, 3, 4, 5, 6, 7, 8, + 9, 10, 11, 12, 13, 14, 15, 16}; + int axis_shape[] = {1, 1}; + int32_t axis_values[] = {-4}; + int split_shape[] = {1, 2}; + int32_t split_values[] = {1, 1}; + int output1_shape[] = {4, 1, 2, 2, 2}; + float output1_values[] = {1, 2, 3, 4, 5, 6, 7, 8}; + int output2_shape[] = {4, 1, 2, 2, 2}; + float output2_values[] = {9, 10, 11, 12, 13, 14, 15, 16}; + + tflite::testing::OutputTensors<2> output_tensors; + + output_tensors.data[0] = output1_data; + output_tensors.data[1] = output2_data; + + output_tensors.dims[0] = output1_shape; + output_tensors.dims[1] = output2_shape; + + output_tensors.expected_output_data[0] = output1_values; + output_tensors.expected_output_data[1] = output2_values; + + tflite::testing::TestSplitVFloat(input_shape, input_values, axis_shape, + axis_values, split_shape, split_values, + output_tensors); +} + +TF_LITE_MICRO_TEST(SPLIT_V_OneDimensionalFloatAxis0) { + constexpr int output1_dims_count = 1; + constexpr int output2_dims_count = 1; + constexpr int output3_dims_count = 1; + constexpr int output4_dims_count = 1; + constexpr int output5_dims_count = 1; + constexpr int output6_dims_count = 1; + constexpr int output7_dims_count = 1; + constexpr int output8_dims_count = 1; + + float output1_data[output1_dims_count]; + float output2_data[output2_dims_count]; + float output3_data[output3_dims_count]; + float output4_data[output4_dims_count]; + float output5_data[output5_dims_count]; + float output6_data[output6_dims_count]; + float output7_data[output7_dims_count]; + float output8_data[output8_dims_count]; + int input_shape[] = {1, 8}; + float input_values[] = {1, 2, 3, 4, 5, 6, 7, 8}; + int axis_shape[] = {1, 1}; + int32_t axis_value[] = {0}; + int split_size_shape[] = {1, 8}; + int32_t split[] = {1, 1, 1, 1, 1, 1, 1, 1}; + int output1_shape[] = {1, 1}; + float output1_values[] = {1}; + int output2_shape[] = {1, 1}; + float output2_values[] = {2}; + + int output3_shape[] = {1, 1}; + float output3_values[] = {3}; + int output4_shape[] = {1, 1}; + float output4_values[] = {4}; + + int output5_shape[] = {1, 1}; + float output5_values[] = {5}; + int output6_shape[] = {1, 1}; + float output6_values[] = {6}; + + int output7_shape[] = {1, 1}; + float output7_values[] = {7}; + int output8_shape[] = {1, 1}; + float output8_values[] = {8}; + + tflite::testing::OutputTensors<8> output_tensors; + + output_tensors.data[0] = output1_data; + output_tensors.data[1] = output2_data; + output_tensors.data[2] = output3_data; + output_tensors.data[3] = output4_data; + output_tensors.data[4] = output5_data; + output_tensors.data[5] = output6_data; + output_tensors.data[6] = output7_data; + output_tensors.data[7] = output8_data; + + output_tensors.dims[0] = output1_shape; + output_tensors.dims[1] = output2_shape; + output_tensors.dims[2] = output3_shape; + output_tensors.dims[3] = output4_shape; + output_tensors.dims[4] = output5_shape; + output_tensors.dims[5] = output6_shape; + output_tensors.dims[6] = output7_shape; + output_tensors.dims[7] = output8_shape; + + output_tensors.expected_output_data[0] = output1_values; + output_tensors.expected_output_data[1] = output2_values; + output_tensors.expected_output_data[2] = output3_values; + output_tensors.expected_output_data[3] = output4_values; + output_tensors.expected_output_data[4] = output5_values; + output_tensors.expected_output_data[5] = output6_values; + output_tensors.expected_output_data[6] = output7_values; + output_tensors.expected_output_data[7] = output8_values; + + tflite::testing::TestSplitVFloat(input_shape, input_values, axis_shape, + axis_value, split_size_shape, split, + output_tensors); +} + +TF_LITE_MICRO_TEST(SPLIT_V_OneDimensionalFloatTest2) { + constexpr int output1_dims_count = 1; + constexpr int output2_dims_count = 1; + constexpr int output3_dims_count = 1; + constexpr int output4_dims_count = 1; + constexpr int output5_dims_count = 1; + constexpr int output6_dims_count = 1; + constexpr int output7_dims_count = 2; + + float output1_data[output1_dims_count]; + float output2_data[output2_dims_count]; + float output3_data[output3_dims_count]; + float output4_data[output4_dims_count]; + float output5_data[output5_dims_count]; + float output6_data[output6_dims_count]; + float output7_data[output7_dims_count]; + + int input_shape[] = {1, 8}; + float input_values[] = {1, 2, 3, 4, 5, 6, 7, 8}; + int axis_shape[] = {1, 1}; + int32_t axis_value[] = {0}; + int split_size_shape[] = {1, 8}; + int32_t split[] = {1, 1, 1, 1, 1, 1, 2, -1}; + int output1_shape[] = {1, 1}; + float output1_values[] = {1}; + int output2_shape[] = {1, 1}; + float output2_values[] = {2}; + + int output3_shape[] = {1, 1}; + float output3_values[] = {3}; + int output4_shape[] = {1, 1}; + float output4_values[] = {4}; + + int output5_shape[] = {1, 1}; + float output5_values[] = {5}; + int output6_shape[] = {1, 1}; + float output6_values[] = {6}; + + int output7_shape[] = {1, 2}; + float output7_values[] = {7, 8}; + int output8_shape[] = {1, 0}; + float output8_values[1] = {}; + + tflite::testing::OutputTensors<8> output_tensors; + + output_tensors.data[0] = output1_data; + output_tensors.data[1] = output2_data; + output_tensors.data[2] = output3_data; + output_tensors.data[3] = output4_data; + output_tensors.data[4] = output5_data; + output_tensors.data[5] = output6_data; + output_tensors.data[6] = output7_data; + output_tensors.data[7] = NULL; + + output_tensors.dims[0] = output1_shape; + output_tensors.dims[1] = output2_shape; + output_tensors.dims[2] = output3_shape; + output_tensors.dims[3] = output4_shape; + output_tensors.dims[4] = output5_shape; + output_tensors.dims[5] = output6_shape; + output_tensors.dims[6] = output7_shape; + output_tensors.dims[7] = output8_shape; + + output_tensors.expected_output_data[0] = output1_values; + output_tensors.expected_output_data[1] = output2_values; + output_tensors.expected_output_data[2] = output3_values; + output_tensors.expected_output_data[3] = output4_values; + output_tensors.expected_output_data[4] = output5_values; + output_tensors.expected_output_data[5] = output6_values; + output_tensors.expected_output_data[6] = output7_values; + output_tensors.expected_output_data[7] = output8_values; + + tflite::testing::TestSplitVFloat(input_shape, input_values, axis_shape, + axis_value, split_size_shape, split, + output_tensors); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/squared_difference.cc b/tensorflow/lite/micro/kernels/squared_difference.cc new file mode 100644 index 0000000..0194d0c --- /dev/null +++ b/tensorflow/lite/micro/kernels/squared_difference.cc @@ -0,0 +1,270 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/binary_function.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/add.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_context.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { +namespace { +constexpr int kInputTensor1 = 0; +constexpr int kInputTensor2 = 1; +constexpr int kOutputTensor = 0; + +struct OpData { + bool requires_broadcast; + ArithmeticParams arithmetic_params; +}; + +template +T SquaredDifference(T input1, T input2) { + const T difference = input1 - input2; + return difference * difference; +} + +void* SquaredDifferenceInit(TfLiteContext* context, const char* buffer, + size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(OpData)); +} + +void PrepareQuantized( + const TfLiteQuantizationParams& input1_quantization_params, + const TfLiteQuantizationParams& input2_quantization_params, + const TfLiteQuantizationParams& output_quantization_params, + const int left_shift, const int32_t quantized_activation_min, + const int32_t quantized_activation_max, OpData* data) { + data->arithmetic_params.input1_offset = + -input1_quantization_params.zero_point; + data->arithmetic_params.input2_offset = + -input2_quantization_params.zero_point; + data->arithmetic_params.output_offset = output_quantization_params.zero_point; + data->arithmetic_params.left_shift = left_shift; + const double twice_max_input_scale = + 2.0 * static_cast(std::max(input1_quantization_params.scale, + input2_quantization_params.scale)); + const double real_input1_multiplier = + static_cast(input1_quantization_params.scale) / + twice_max_input_scale; + double real_input2_multiplier = + static_cast(input2_quantization_params.scale) / + twice_max_input_scale; + const double real_output_multiplier = + (twice_max_input_scale * twice_max_input_scale) / + static_cast((1 << data->arithmetic_params.left_shift * 2) * + output_quantization_params.scale); + QuantizeMultiplierSmallerThanOneExp( + real_input1_multiplier, &data->arithmetic_params.input1_multiplier, + &data->arithmetic_params.input1_shift); + QuantizeMultiplierSmallerThanOneExp( + real_input2_multiplier, &data->arithmetic_params.input2_multiplier, + &data->arithmetic_params.input2_shift); + QuantizeMultiplier(real_output_multiplier, + &data->arithmetic_params.output_multiplier, + &data->arithmetic_params.output_shift); + data->arithmetic_params.quantized_activation_min = quantized_activation_min; + data->arithmetic_params.quantized_activation_max = quantized_activation_max; +} + +TfLiteStatus SquaredDifferencePrepare(TfLiteContext* context, + TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + OpData* data = reinterpret_cast(node->user_data); + data->requires_broadcast = false; + + TF_LITE_ENSURE_EQ(context, NumInputs(node), 2); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* input1 = + micro_context->AllocateTempInputTensor(node, kInputTensor1); + TF_LITE_ENSURE(context, input1 != nullptr); + TfLiteTensor* input2 = + micro_context->AllocateTempInputTensor(node, kInputTensor2); + TF_LITE_ENSURE(context, input2 != nullptr); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + + TF_LITE_ENSURE_TYPES_EQ(context, input1->type, input2->type); + output->type = input2->type; + + const TfLiteQuantizationParams& input1_quantization_params = input1->params; + const TfLiteQuantizationParams& input2_quantization_params = input2->params; + const TfLiteQuantizationParams& output_quantization_params = output->params; + if (input1->type == kTfLiteInt8) { + const int32_t integer_type_min = std::numeric_limits::min(); + const int32_t integer_type_max = std::numeric_limits::max(); + TF_LITE_ENSURE(context, + input1_quantization_params.zero_point >= integer_type_min); + TF_LITE_ENSURE(context, + input1_quantization_params.zero_point <= integer_type_max); + TF_LITE_ENSURE(context, + input2_quantization_params.zero_point >= integer_type_min); + TF_LITE_ENSURE(context, + input2_quantization_params.zero_point <= integer_type_max); + TF_LITE_ENSURE(context, + output_quantization_params.zero_point >= integer_type_min); + TF_LITE_ENSURE(context, + output_quantization_params.zero_point <= integer_type_max); + // leftshift = 7 is selected so that maximum shifted result 255^2 * (1 << (7 + // * 2 )) does not overflow signed 32-bit integer + PrepareQuantized(input1_quantization_params, input2_quantization_params, + output_quantization_params, /*left_shift=*/7, + /*quantized_activation_min*/ integer_type_min, + /*quantized_activation_max*/ integer_type_max, data); + } else if (input1->type == kTfLiteInt16) { + const int32_t integer_type_min = std::numeric_limits::min(); + const int32_t integer_type_max = std::numeric_limits::max(); + TF_LITE_ENSURE(context, input1_quantization_params.zero_point == 0); + TF_LITE_ENSURE(context, input2_quantization_params.zero_point == 0); + TF_LITE_ENSURE(context, output_quantization_params.zero_point == 0); + + // leftshift = 0 as number is already 16-bit. so that maximum shifted result + // 32767^2 * (1 << (0 * 2 )) + PrepareQuantized(input1_quantization_params, input2_quantization_params, + output_quantization_params, /*left_shift=*/0, + /*quantized_activation_min*/ integer_type_min, + /*quantized_activation_max*/ integer_type_max, data); + } + + data->requires_broadcast = !HaveSameShapes(input1, input2); + + micro_context->DeallocateTempTfLiteTensor(input1); + micro_context->DeallocateTempTfLiteTensor(input2); + micro_context->DeallocateTempTfLiteTensor(output); + return kTfLiteOk; +} + +template +T SquaredDifference(T x, T y, const ArithmeticParams& params) { + const int32_t input1_val = params.input1_offset + x; + const int32_t input2_val = params.input2_offset + y; + const int32_t shifted_input1_val = input1_val * (1 << params.left_shift); + const int32_t shifted_input2_val = input2_val * (1 << params.left_shift); + const int32_t scaled_input1_val = + MultiplyByQuantizedMultiplierSmallerThanOneExp( + shifted_input1_val, params.input1_multiplier, params.input1_shift); + const int32_t scaled_input2_val = + MultiplyByQuantizedMultiplierSmallerThanOneExp( + shifted_input2_val, params.input2_multiplier, params.input2_shift); + const int32_t raw_diff = scaled_input1_val - scaled_input2_val; + + // Max of this is 32767^2 * (1 << 0), so won't overflow 32 bits. + const int32_t squared_raw_diff = raw_diff * raw_diff; + const int32_t raw_output = + MultiplyByQuantizedMultiplier(squared_raw_diff, params.output_multiplier, + params.output_shift) + + params.output_offset; + const int32_t clamped_output = + std::min(params.quantized_activation_max, + std::max(params.quantized_activation_min, raw_output)); + return static_cast(clamped_output); +} + +template +void EvalQuantizedSquaredDifference(TfLiteContext* context, TfLiteNode* node, + const OpData* data, + const TfLiteEvalTensor* input1, + const TfLiteEvalTensor* input2, + TfLiteEvalTensor* output) { + const auto* op_data = static_cast(node->user_data); + if (data->requires_broadcast) { + reference_integer_ops::BroadcastBinaryFunction4DSlow( + op_data->arithmetic_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output), + reference_integer_ops::CheckArithmeticParams, SquaredDifference); + } else { + const int flat_size = tflite::micro::GetTensorShape(input1).FlatSize(); + reference_integer_ops::ElementWise( + flat_size, op_data->arithmetic_params, + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorData(output), + reference_integer_ops::CheckArithmeticParams, SquaredDifference); + } +} + +template +void EvalSquaredDifference(TfLiteContext* context, TfLiteNode* node, + const OpData* data, const TfLiteEvalTensor* input1, + const TfLiteEvalTensor* input2, + TfLiteEvalTensor* output) { + if (data->requires_broadcast) { + reference_ops::BroadcastBinaryFunction4DSlow( + tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output), SquaredDifference); + } else { + reference_ops::BinaryFunction( + tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output), SquaredDifference); + } +} + +TfLiteStatus SquaredDifferenceEval(TfLiteContext* context, TfLiteNode* node) { + OpData* data = reinterpret_cast(node->user_data); + + const TfLiteEvalTensor* input1 = + tflite::micro::GetEvalInput(context, node, kInputTensor1); + const TfLiteEvalTensor* input2 = + tflite::micro::GetEvalInput(context, node, kInputTensor2); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + + if (output->type == kTfLiteFloat32) { + EvalSquaredDifference(context, node, data, input1, input2, output); + } else if (output->type == kTfLiteInt32) { + EvalSquaredDifference(context, node, data, input1, input2, output); + } else if (output->type == kTfLiteInt8) { + EvalQuantizedSquaredDifference(context, node, data, input1, input2, + output); + } else if (output->type == kTfLiteInt16) { + EvalQuantizedSquaredDifference(context, node, data, input1, input2, + output); + } else { + MicroPrintf( + "SquaredDifference only supports FLOAT32, INT32 , INT16 and INT8 now, " + "got %d.", + output->type); + return kTfLiteError; + } + + return kTfLiteOk; +} +} // namespace + +TFLMRegistration Register_SQUARED_DIFFERENCE() { + return tflite::micro::RegisterOp( + SquaredDifferenceInit, SquaredDifferencePrepare, SquaredDifferenceEval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/squared_difference_test.cc b/tensorflow/lite/micro/kernels/squared_difference_test.cc new file mode 100644 index 0000000..c7d7e42 --- /dev/null +++ b/tensorflow/lite/micro/kernels/squared_difference_test.cc @@ -0,0 +1,335 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/micro_utils.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +constexpr int kNumTestShapes = 4; +constexpr int kMaxTestShapeSize = 5; + +int test_shape[kNumTestShapes][kMaxTestShapeSize] = { + {1, 6}, + {2, 2, 3}, + {3, 2, 1, 3}, + {4, 1, 3, 1, 2}, +}; + +template +void ValidateSquaredDifferenceGoldens(TfLiteTensor* tensors, int tensors_size, + const T* golden, T* output, + int output_size, float tolerance = 1e-5) { + int inputs_array_data[] = {2, 0, 1}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 2}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = tflite::Register_SQUARED_DIFFERENCE(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, /*builtin_data=*/nullptr); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + for (int i = 0; i < output_size; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(golden[i], output[i], tolerance); + } +} + +template +void TestSquaredDifference(int* input1_dims_data, const T* input1_data, + int* input2_dims_data, const T* input2_data, + int* output_dims_data, const T* expected_output, + T* output_data) { + TfLiteIntArray* input1_dims = IntArrayFromInts(input1_dims_data); + TfLiteIntArray* input2_dims = IntArrayFromInts(input2_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + + constexpr int inputs_size = 2; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(input1_data, input1_dims), + CreateTensor(input2_data, input2_dims), + CreateTensor(output_data, output_dims), + }; + + ValidateSquaredDifferenceGoldens(tensors, tensors_size, expected_output, + output_data, ElementCount(*output_dims)); +} + +template +void TestSquaredDifferenceQuantized( + int* input1_dims_data, const float* input1_data, T* input1_quantized, + float input1_min, float input1_max, + + int* input2_dims_data, const float* input2_data, T* input2_quantized, + float input2_min, float input2_max, + + int* output_dims_data, T* output_data, float output_min, float output_max, + float* dequantized_output, const float* golden, + + float tolerance, bool narrow_range = false) { + QuantizationParams input1_qparams; + QuantizationParams input2_qparams; + QuantizationParams output_qparams; + + input1_qparams = ChooseQuantizationParams(static_cast(input1_min), + static_cast(input1_max), + narrow_range); + input2_qparams = ChooseQuantizationParams(static_cast(input2_min), + static_cast(input2_max), + narrow_range); + output_qparams = ChooseQuantizationParams(static_cast(output_min), + static_cast(output_max), + narrow_range); + + TfLiteIntArray* input1_dims = IntArrayFromInts(input1_dims_data); + TfLiteIntArray* input2_dims = IntArrayFromInts(input2_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + int output_size = ElementCount(*output_dims); + + constexpr int inputs_size = 2; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateQuantizedTensor(input1_data, input1_quantized, input1_dims, + input1_qparams.scale, input1_qparams.zero_point), + CreateQuantizedTensor(input2_data, input2_quantized, input2_dims, + input2_qparams.scale, input2_qparams.zero_point), + CreateQuantizedTensor(output_data, output_dims, output_qparams.scale, + output_qparams.zero_point), + }; + + int inputs_array_data[] = {2, 0, 1}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 2}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = tflite::Register_SQUARED_DIFFERENCE(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, /*builtin_data=*/nullptr); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + Dequantize(output_data, output_size, output_qparams.scale, + output_qparams.zero_point, dequantized_output); + + for (int i = 0; i < output_size; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(golden[i], dequantized_output[i], tolerance); + } +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(FloatSquaredDifferenceSameShape) { + constexpr int data_size = 4; + int inout_shape[] = {4, 1, 2, 2, 1}; + const float input1_values[] = {-0.2, 0.2, -1.2, 0.8}; + const float input2_values[] = {0.5, 0.2, -1.5, 0.5}; + const float golden_values[] = {0.49, 0.0, 0.09, 0.09}; + float output_data[data_size]; + tflite::testing::TestSquaredDifference( + inout_shape, input1_values, inout_shape, input2_values, inout_shape, + golden_values, output_data); +} + +TF_LITE_MICRO_TEST(FloatSquaredDifferenceVariousShapes) { + constexpr int data_size = 6; + const float input1_values[] = {-2.0, 0.2, 0.3, 0.8, 1.1, -2.0}; + const float input2_values[] = {1.0, 0.2, 0.6, 0.4, -1.0, -0.0}; + const float golden_values[] = {9.0, 0.0, 0.09, 0.16, 4.41, 4.0}; + float output_data[data_size]; + for (int i = 0; i < tflite::testing::kNumTestShapes; ++i) { + tflite::testing::TestSquaredDifference( + tflite::testing::test_shape[i], input1_values, + tflite::testing::test_shape[i], input2_values, + tflite::testing::test_shape[i], golden_values, output_data); + } +} + +TF_LITE_MICRO_TEST(FloatSquaredDifferenceWithBroadcast) { + constexpr int data_size = 6; + + // input 2 is scalar + int input2_shape[] = {1, 1}; + const float input1_values[] = {-0.2, 0.2, 0.5, 0.8, 0.11, 1.1}; + const float input2_values[] = {0.1}; + const float golden_values[] = {0.09, 0.01, 0.16, 0.49, 0.0001, 1.0}; + float output_data[data_size]; + for (int i = 0; i < tflite::testing::kNumTestShapes; ++i) { + tflite::testing::TestSquaredDifference( + tflite::testing::test_shape[i], input1_values, input2_shape, + input2_values, tflite::testing::test_shape[i], golden_values, + output_data); + } +} + +TF_LITE_MICRO_TEST(IntegerSquaredDifferenceSameShape) { + constexpr int data_size = 4; + int inout_shape[] = {4, 1, 2, 2, 1}; + const int32_t input1_values[] = {-2, 2, -15, 8}; + const int32_t input2_values[] = {5, -2, -3, 5}; + const int32_t golden_values[] = {49, 16, 144, 9}; + int32_t output_data[data_size]; + tflite::testing::TestSquaredDifference( + inout_shape, input1_values, inout_shape, input2_values, inout_shape, + golden_values, output_data); +} + +TF_LITE_MICRO_TEST(IntegerSquaredDifferenceVariousShapes) { + constexpr int data_size = 6; + const int32_t input1_values[] = {-20, 2, 3, 8, 11, -20}; + const int32_t input2_values[] = {1, 2, 6, 5, -5, -20}; + const int32_t golden_values[] = {441, 0, 9, 9, 256, 0}; + int32_t output_data[data_size]; + for (int i = 0; i < tflite::testing::kNumTestShapes; ++i) { + tflite::testing::TestSquaredDifference( + tflite::testing::test_shape[i], input1_values, + tflite::testing::test_shape[i], input2_values, + tflite::testing::test_shape[i], golden_values, output_data); + } +} + +TF_LITE_MICRO_TEST(IntegerSquaredDifferenceWithBroadcast) { + constexpr int data_size = 6; + + // input 2 is a scalar + int input2_shape[] = {1, 1}; + const int32_t input1_values[] = {-20, 10, 7, 3, 1, 13}; + const int32_t input2_values[] = {3}; + const int32_t golden_values[] = {529, 49, 16, 0, 4, 100}; + int32_t output_data[data_size]; + for (int i = 0; i < tflite::testing::kNumTestShapes; ++i) { + tflite::testing::TestSquaredDifference( + tflite::testing::test_shape[i], input1_values, input2_shape, + input2_values, tflite::testing::test_shape[i], golden_values, + output_data); + } +} + +TF_LITE_MICRO_TEST(QuantizedSquaredDifferenceSameShape) { + constexpr int data_size = 4; + int inout_shape[] = {4, 1, 2, 2, 1}; + const float input1_values[] = {-0.2, 0.2, -1.2, 0.8}; + const float input2_values[] = {0.5, 0.2, -1.5, 0.5}; + const float golden_values[] = {0.49, 0.0, 0.09, 0.09}; + float output_dequantized[data_size]; + // Int8 case + int8_t input1_int8[data_size]; + int8_t input2_int8[data_size]; + int8_t output_int8[data_size]; + tflite::testing::TestSquaredDifferenceQuantized( + inout_shape, input1_values, input1_int8, -1.2f, 0.8f, inout_shape, + input2_values, input2_int8, -1.5f, 0.5f, inout_shape, output_int8, 0.0f, + 0.5f, output_dequantized, golden_values, 2.0f / 255.0f); + + // Int16 case + int16_t input1_int16[data_size]; + int16_t input2_int16[data_size]; + int16_t output_int16[data_size]; + // Symmetrical quantization: (rmin == -rmax), requires narrow range (qmin = + // -qmax). + // TODO(b/269352046): understand the tolerance level + // http://b/269352046#comment7 + tflite::testing::TestSquaredDifferenceQuantized( + inout_shape, input1_values, input1_int16, -1.2f, 1.2f, inout_shape, + input2_values, input2_int16, -1.5f, 1.5f, inout_shape, output_int16, + -0.5f, 0.5f, output_dequantized, golden_values, 6.0f / 32768.0f, + /*narrow_range=*/true); +} + +TF_LITE_MICRO_TEST(QuantizedSquaredDifferenceVariousShapes) { + constexpr int data_size = 6; + const float input1_values[] = {-2.0, 0.2, 0.3, 0.8, 1.1, -2.0}; + const float input2_values[] = {1.0, 0.2, 0.6, 0.4, -1.0, -0.0}; + const float golden_values[] = {9.0, 0.0, 0.09, 0.16, 4.41, 4.0}; + // Int8 case + int8_t input1_int8[data_size]; + int8_t input2_int8[data_size]; + int8_t output_int8[data_size]; + float output_dequantized[data_size]; + for (int i = 0; i < tflite::testing::kNumTestShapes; ++i) { + tflite::testing::TestSquaredDifferenceQuantized( + tflite::testing::test_shape[i], input1_values, input1_int8, -2.0f, 1.7f, + tflite::testing::test_shape[i], input2_values, input2_int8, -1.0f, 1.0f, + tflite::testing::test_shape[i], output_int8, 0.0f, 9.0f, + output_dequantized, golden_values, 18.0f / 255.0f); + } + + // Int16 case + int16_t input1_int16[data_size]; + int16_t input2_int16[data_size]; + int16_t output_int16[data_size]; + // Symmetrical quantization: (rmin == -rmax), requires narrow range (qmin = + // -qmax). + for (int i = 0; i < tflite::testing::kNumTestShapes; ++i) { + tflite::testing::TestSquaredDifferenceQuantized( + tflite::testing::test_shape[i], input1_values, input1_int16, -2.0f, + 2.0f, tflite::testing::test_shape[i], input2_values, input2_int16, + -1.0f, 1.0f, tflite::testing::test_shape[i], output_int16, -9.0f, 9.0f, + output_dequantized, golden_values, 18.0f / 32768.0f, + /*narrow_range=*/true); + } +} + +TF_LITE_MICRO_TEST(FloatSquaredDifferenceWithBroadcast) { + constexpr int data_size = 6; + + // input 2 is a scalar + int input2_shape[] = {1, 1}; + const float input1_values[] = {-0.2, 0.2, 0.5, 0.8, 0.11, 1.1}; + const float input2_values[] = {0.1}; + const float golden_values[] = {0.09, 0.01, 0.16, 0.49, 0.0001, 1.0}; + + // Int8 case + int8_t input1_int8[data_size]; + int8_t input2_int8[data_size]; + int8_t output_int8[data_size]; + float output_dequantized[data_size]; + for (int i = 0; i < tflite::testing::kNumTestShapes; ++i) { + tflite::testing::TestSquaredDifferenceQuantized( + tflite::testing::test_shape[i], input1_values, input1_int8, -0.2f, 1.1f, + input2_shape, input2_values, input2_int8, 0.0f, 1.0f, + tflite::testing::test_shape[i], output_int8, 0.0f, 1.0f, + output_dequantized, golden_values, 2.0f / 255.0f); + } + + // Int16 case + int16_t input1_int16[data_size]; + int16_t input2_int16[data_size]; + int16_t output_int16[data_size]; + for (int i = 0; i < tflite::testing::kNumTestShapes; ++i) { + tflite::testing::TestSquaredDifferenceQuantized( + tflite::testing::test_shape[i], input1_values, input1_int16, -1.1f, + 1.1f, input2_shape, input2_values, input2_int16, -1.0f, 1.0f, + tflite::testing::test_shape[i], output_int16, -1.0f, 1.0f, + output_dequantized, golden_values, 2.0f / 32768.0f, + /*narrow_range=*/true); + } +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/squeeze.cc b/tensorflow/lite/micro/kernels/squeeze.cc new file mode 100644 index 0000000..e52ccab --- /dev/null +++ b/tensorflow/lite/micro/kernels/squeeze.cc @@ -0,0 +1,118 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/process_broadcast_shapes.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/memory_helpers.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { +namespace { + +struct SqueezeContext { + SqueezeContext(TfLiteContext* context, TfLiteNode* node) { + params = reinterpret_cast(node->builtin_data); + micro_context = GetMicroContext(context); + input = micro_context->AllocateTempInputTensor(node, 0); + output = micro_context->AllocateTempOutputTensor(node, 0); + } + ~SqueezeContext() { + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(output); + } + MicroContext* micro_context; + TfLiteSqueezeParams* params; + TfLiteTensor* input; + TfLiteTensor* output; +}; + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + TF_LITE_ENSURE_EQ(context, NumInputs(node), 1); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + + SqueezeContext op_context(context, node); + const int input_num_dims = NumDimensions(op_context.input); + const int num_squeeze_dims = op_context.params->num_squeeze_dims; + + // Determines number of dimensions of output tensor after squeeze. + const TfLiteIntArray* input_dims = op_context.input->dims; + const TfLiteIntArray* output_dims = op_context.output->dims; + const int* squeeze_dims = op_context.params->squeeze_dims; + + constexpr int max_squeeze_dims = 8; + TF_LITE_ENSURE(context, input_num_dims <= max_squeeze_dims); + bool should_squeeze[max_squeeze_dims] = {}; + + if (num_squeeze_dims == 0) { + for (int idx = 0; idx < input_num_dims; ++idx) { + if (input_dims->data[idx] == 1) { + should_squeeze[idx] = true; + } + } + } else { + for (int idx = 0; idx < num_squeeze_dims; ++idx) { + int current = squeeze_dims[idx] < 0 ? squeeze_dims[idx] + input_num_dims + : squeeze_dims[idx]; + TF_LITE_ENSURE(context, current >= 0 && current < input_num_dims && + input_dims->data[current] == 1); + should_squeeze[current] = true; + } + } + + // Ensure output dimensions are big enough. + for (int in_idx = 0, out_idx = 0; in_idx < input_num_dims; ++in_idx) { + if (!should_squeeze[in_idx]) { + TFLITE_CHECK_GE(output_dims->data[out_idx++], input_dims->data[in_idx]); + } + } + + return kTfLiteOk; +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = tflite::micro::GetEvalInput(context, node, 0); + + if (input->type == kTfLiteString) { + MicroPrintf("Type %s (%d) not supported.", TfLiteTypeGetName(input->type), + input->type); + return kTfLiteError; + } + + TfLiteEvalTensor* output = tflite::micro::GetEvalOutput(context, node, 0); + size_t input_byte_size; + size_t output_byte_size; + TF_LITE_ENSURE_OK(context, + TfLiteEvalTensorByteLength(input, &input_byte_size)); + TF_LITE_ENSURE_OK(context, + TfLiteEvalTensorByteLength(output, &output_byte_size)); + + TF_LITE_ENSURE_EQ(context, input_byte_size, output_byte_size); + memcpy(output->data.raw, input->data.raw, input_byte_size); + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_SQUEEZE() { + return tflite::micro::RegisterOp(nullptr, Prepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/squeeze_test.cc b/tensorflow/lite/micro/kernels/squeeze_test.cc new file mode 100644 index 0000000..149413b --- /dev/null +++ b/tensorflow/lite/micro/kernels/squeeze_test.cc @@ -0,0 +1,129 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +int input_dims_data_common[] = {3, 1, 24, 1}; +int output_dims_data_common[] = {1, 24}; +const int32_t input_data_common[] = {1, 2, 3, 4, 5, 6, 7, 8, + 9, 10, 11, 12, 13, 14, 15, 16, + 17, 18, 19, 20, 21, 22, 23, 24}; +const int32_t golden_common[] = {1, 2, 3, 4, 5, 6, 7, 8, + 9, 10, 11, 12, 13, 14, 15, 16, + 17, 18, 19, 20, 21, 22, 23, 24}; +const int expected_output_size_common = 24; + +void TestSqueezeOp(int* input_dims_data, const int32_t* input_data, + int* output_dims_data, int32_t* output_data, + const int32_t* golden, int expected_output_size, + TfLiteSqueezeParams* squeeze_params) { + TfLiteIntArray* input_dims1 = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output_dims1 = IntArrayFromInts(output_dims_data); + + constexpr int inputs_size = 1; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + + TfLiteTensor tensors[tensors_size]; + tensors[0] = CreateTensor(input_data, input_dims1); + tensors[1] = CreateTensor(output_data, output_dims1); + + int inputs_array_data[] = {1, 0}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 1}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = Register_SQUEEZE(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, + reinterpret_cast(squeeze_params)); + + const char* init_data = reinterpret_cast(squeeze_params); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare(init_data)); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + for (int i = 0; i < expected_output_size; ++i) { + TF_LITE_MICRO_EXPECT_EQ(golden[i], output_data[i]); + } + + TF_LITE_MICRO_EXPECT(runner.ValidateTempBufferDeallocated()); +} +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(SqueezeAll) { + int32_t output_data[24]; + TfLiteSqueezeParams squeeze_params = {{}, 0}; + + tflite::testing::TestSqueezeOp(tflite::testing::input_dims_data_common, + tflite::testing::input_data_common, + tflite::testing::output_dims_data_common, + output_data, tflite::testing::golden_common, + tflite::testing::expected_output_size_common, + &squeeze_params); +} + +TF_LITE_MICRO_TEST(SqueezeSelectedAxis) { + int32_t output_data[24]; + TfLiteSqueezeParams squeeze_params = {{2}, 1}; + int output_dims_data_common[] = {2, 1, 24}; + + tflite::testing::TestSqueezeOp( + tflite::testing::input_dims_data_common, + tflite::testing::input_data_common, output_dims_data_common, output_data, + tflite::testing::golden_common, + tflite::testing::expected_output_size_common, &squeeze_params); +} + +TF_LITE_MICRO_TEST(SqueezeNegativeAxis) { + int32_t output_data[24]; + TfLiteSqueezeParams squeeze_params = {{-1, 0}, 2}; + + tflite::testing::TestSqueezeOp(tflite::testing::input_dims_data_common, + tflite::testing::input_data_common, + tflite::testing::output_dims_data_common, + output_data, tflite::testing::golden_common, + tflite::testing::expected_output_size_common, + &squeeze_params); +} + +TF_LITE_MICRO_TEST(SqueezeAllDims) { + int input_dims_data[] = {7, 1, 1, 1, 1, 1, 1, 1}; + int output_dims_data[] = {1, 1}; + const int32_t input_data[] = {3}; + const int32_t golden[] = {3}; + const int expected_output_size = 1; + + int32_t output_data[24]; + TfLiteSqueezeParams squeeze_params = {{}, 0}; + + tflite::testing::TestSqueezeOp(input_dims_data, input_data, output_dims_data, + output_data, golden, expected_output_size, + &squeeze_params); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/strided_slice.cc b/tensorflow/lite/micro/kernels/strided_slice.cc new file mode 100644 index 0000000..4e60e6b --- /dev/null +++ b/tensorflow/lite/micro/kernels/strided_slice.cc @@ -0,0 +1,207 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/kernels/internal/reference/strided_slice.h" + +#include +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { + +namespace { + +constexpr int kInputTensor = 0; +constexpr int kBeginTensor = 1; +constexpr int kEndTensor = 2; +constexpr int kStridesTensor = 3; +constexpr int kOutputTensor = 0; + +struct StridedSliceContext { + StridedSliceContext(TfLiteContext* context, TfLiteNode* node) { + params = reinterpret_cast(node->builtin_data); + micro_context = GetMicroContext(context); + input = micro_context->AllocateTempInputTensor(node, kInputTensor); + begin = micro_context->AllocateTempInputTensor(node, kBeginTensor); + end = micro_context->AllocateTempInputTensor(node, kEndTensor); + strides = micro_context->AllocateTempInputTensor(node, kStridesTensor); + output = micro_context->AllocateTempOutputTensor(node, kOutputTensor); + dims = NumDimensions(input); + } + ~StridedSliceContext() { + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(begin); + micro_context->DeallocateTempTfLiteTensor(end); + micro_context->DeallocateTempTfLiteTensor(strides); + micro_context->DeallocateTempTfLiteTensor(output); + } + const TfLiteStridedSliceParams* params; + MicroContext* micro_context; + TfLiteTensor* input; + TfLiteTensor* begin; + TfLiteTensor* end; + TfLiteTensor* strides; + TfLiteTensor* output; + int dims; +}; + +// This Op only supports 1-4D cases and since we use the reference 4D +// implementation, the 1-3D tensors are mapped to 4D. +const int kMaxDim = 4; + +tflite::StridedSliceParams BuildStridedSliceParams( + StridedSliceContext* op_context) { + tflite::StridedSliceParams op_params{}; + op_params.start_indices_count = op_context->dims; + op_params.stop_indices_count = op_context->dims; + op_params.strides_count = op_context->dims; + + for (int i = 0; i < op_context->dims; ++i) { + op_params.start_indices[i] = GetTensorData(op_context->begin)[i]; + op_params.stop_indices[i] = GetTensorData(op_context->end)[i]; + op_params.strides[i] = GetTensorData(op_context->strides)[i]; + } + + op_params.begin_mask = op_context->params->begin_mask; + op_params.ellipsis_mask = 0; + op_params.end_mask = op_context->params->end_mask; + op_params.new_axis_mask = 0; + op_params.shrink_axis_mask = op_context->params->shrink_axis_mask; + return op_params; +} + +// Processes the indexing tensors (begin, end and strides) to resize the +// output tensor. This function is callable from both Prepare() and Eval() as +// long as the caller ensures the indexing tensors are present. +TfLiteStatus CheckOutputSize(TfLiteContext* context, + StridedSliceContext* op_context) { + using ::tflite::strided_slice::StartForAxis; + using ::tflite::strided_slice::StopForAxis; + TfLiteIntArray* output_shape = op_context->output->dims; + int shape_size = 0; + auto op_params = BuildStridedSliceParams(op_context); + auto input_shape = GetTensorShape(op_context->input); + for (int idx = 0; idx < op_context->dims; ++idx) { + int32_t stride = GetTensorData(op_context->strides)[idx]; + TF_LITE_ENSURE_MSG(context, stride != 0, "stride value has to be non-zero"); + int32_t begin = StartForAxis(op_params, input_shape, idx); + int32_t end = StopForAxis(op_params, input_shape, idx, begin); + + // When shrinking an axis, the end position does not matter (and can be + // incorrect when negative indexing is used, see Issue #19260). Always use + // begin + 1 to generate a length 1 slice, since begin has + // already been adjusted for negative indices by StartForAxis. + const bool shrink_axis = op_context->params->shrink_axis_mask & (1 << idx); + if (shrink_axis) { + end = begin + 1; + } + + // This is valid for both positive and negative strides + int32_t dim_shape = std::ceil((end - begin) / static_cast(stride)); + dim_shape = dim_shape < 0 ? 0 : dim_shape; + if (!shrink_axis) { + TF_LITE_ENSURE_EQ(context, output_shape->data[shape_size], dim_shape); + shape_size++; + } + } + TF_LITE_ENSURE_EQ(context, output_shape->size, shape_size); + return kTfLiteOk; +} + +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(StridedSliceParams)); +} + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + StridedSliceParams* op_params = + static_cast(node->user_data); + TF_LITE_ENSURE_EQ(context, NumInputs(node), 4); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + StridedSliceContext op_context(context, node); + TF_LITE_ENSURE_MSG(context, op_context.dims <= kMaxDim, + "input dim should not exceed 4"); + auto params = BuildStridedSliceParams(&op_context); + memcpy(op_params, ¶ms, sizeof(StridedSliceParams)); + return CheckOutputSize(context, &op_context); +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + const StridedSliceParams& op_params = + *(static_cast(node->user_data)); + + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + switch (output->type) { + case kTfLiteFloat32: + reference_ops::StridedSlice(op_params, + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + case kTfLiteInt8: + reference_ops::StridedSlice(op_params, + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + case kTfLiteInt16: + reference_ops::StridedSlice( + op_params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + case kTfLiteInt32: + reference_ops::StridedSlice( + op_params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + case kTfLiteBool: + reference_ops::StridedSlice(op_params, + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + default: + MicroPrintf("Type %s (%d) not supported.", TfLiteTypeGetName(input->type), + input->type); + return kTfLiteError; + } + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_STRIDED_SLICE() { + return tflite::micro::RegisterOp(Init, Prepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/strided_slice_test.cc b/tensorflow/lite/micro/kernels/strided_slice_test.cc new file mode 100644 index 0000000..16c3d9c --- /dev/null +++ b/tensorflow/lite/micro/kernels/strided_slice_test.cc @@ -0,0 +1,1268 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +template +void ValidateStridedSliceGoldens(TfLiteTensor* tensors, int tensors_size, + const T* golden, T* output, int output_len, + TfLiteStridedSliceParams* params, + const bool expect_prepare_err, int num_invoke, + float tolerance = 1e-5, + bool no_golden_data = false) { + int inputs_array_data[] = {4, 0, 1, 2, 3}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 4}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = Register_STRIDED_SLICE(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, reinterpret_cast(params)); + if (expect_prepare_err) { + TF_LITE_MICRO_EXPECT_EQ(kTfLiteError, runner.InitAndPrepare()); + return; + } else { + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + } + + for (int i = 0; i < num_invoke; i++) { + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + } + + if (no_golden_data == false) { + for (int i = 0; i < output_len; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(golden[i], output[i], 1e-5f); + } + } + TF_LITE_MICRO_EXPECT(runner.ValidateTempBufferDeallocated()); +} + +void TestStridedSliceFloat(int* input_shape, int* begin_shape, int* end_shape, + int* strides_shape, + TfLiteStridedSliceParams* builtin_data, + float* input_data, const int32_t* begin_data, + const int32_t* end_data, const int32_t* strides_data, + int* output_shape, float* output_data, + const float* expected_output, + bool expect_prepare_err, int num_invoke = 1, + bool no_golden_data = false) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_shape); + TfLiteIntArray* begin_dims = IntArrayFromInts(begin_shape); + TfLiteIntArray* end_dims = IntArrayFromInts(end_shape); + TfLiteIntArray* strides_dims = IntArrayFromInts(strides_shape); + TfLiteIntArray* output_dims = IntArrayFromInts(output_shape); + constexpr int inputs_size = 4; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(input_data, input_dims), + CreateTensor(begin_data, begin_dims), + CreateTensor(end_data, end_dims), + CreateTensor(strides_data, strides_dims), + CreateTensor(output_data, output_dims), + }; + + ValidateStridedSliceGoldens(tensors, tensors_size, expected_output, + output_data, ElementCount(*output_dims), + builtin_data, expect_prepare_err, num_invoke, 1.0, + no_golden_data); +} + +template +void TestStridedSliceQuantized(int* input_shape, int* begin_shape, + int* end_shape, int* strides_shape, + TfLiteStridedSliceParams* builtin_data, + const T* input_data, const int32_t* begin_data, + const int32_t* end_data, + const int32_t* strides_data, int* output_shape, + T* output_data, const T* expected_output, + bool expect_prepare_err, int num_invoke = 1, + bool no_golden_data = false) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_shape); + TfLiteIntArray* begin_dims = IntArrayFromInts(begin_shape); + TfLiteIntArray* end_dims = IntArrayFromInts(end_shape); + TfLiteIntArray* strides_dims = IntArrayFromInts(strides_shape); + TfLiteIntArray* output_dims = IntArrayFromInts(output_shape); + constexpr int inputs_size = 4; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + int zero_point = + std::numeric_limits::max() + std::numeric_limits::min() / 2; + TfLiteTensor tensors[tensors_size] = { + CreateQuantizedTensor(input_data, input_dims, 1.0, zero_point), + CreateTensor(begin_data, begin_dims), + CreateTensor(end_data, end_dims), + CreateTensor(strides_data, strides_dims), + CreateQuantizedTensor(output_data, output_dims, 1.0, zero_point), + }; + + ValidateStridedSliceGoldens(tensors, tensors_size, expected_output, + output_data, ElementCount(*output_dims), + builtin_data, expect_prepare_err, num_invoke, 1.0, + no_golden_data); +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(UnsupportedInputSize) { + int input_shape[] = {5, 2, 2, 2, 2, 2}; + int begin_shape[] = {1, 5}; + int end_shape[] = {1, 5}; + int strides_shape[] = {1, 5}; + int output_shape[] = {0}; + float input_data[] = {}; + int32_t begin_data[] = {}; + int32_t end_data[] = {}; + int32_t strides_data[] = {}; + float golden[] = {}; + float output_data[4]; + + TfLiteStridedSliceParams builtin_data = {}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, true); +} + +TF_LITE_MICRO_TEST(In1D) { + int input_shape[] = {1, 4}; + int begin_shape[] = {1, 1}; + int end_shape[] = {1, 1}; + int strides_shape[] = {1, 1}; + int output_shape[] = {1, 2}; + float input_data[] = {1, 2, 3, 4}; + int32_t begin_data[] = {1}; + int32_t end_data[] = {3}; + int32_t strides_data[] = {1}; + float golden[] = {2, 3}; + float output_data[4]; + + TfLiteStridedSliceParams builtin_data = {}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(In1D_EmptyOutput) { + int input_shape[] = {1, 4}; + int begin_shape[] = {1, 1}; + int end_shape[] = {1, 1}; + int strides_shape[] = {1, 1}; + int output_shape[] = {1, 0}; + float input_data[] = {1, 2, 3, 4}; + int32_t begin_data[] = {10}; + int32_t end_data[] = {3}; + int32_t strides_data[] = {1}; + float golden[] = {}; + float output_data[4]; + + TfLiteStridedSliceParams builtin_data = {}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(In1D_NegativeBegin) { + int input_shape[] = {1, 4}; + int begin_shape[] = {1, 1}; + int end_shape[] = {1, 1}; + int strides_shape[] = {1, 1}; + int output_shape[] = {1, 2}; + float input_data[] = {1, 2, 3, 4}; + int32_t begin_data[] = {-3}; + int32_t end_data[] = {3}; + int32_t strides_data[] = {1}; + float golden[] = {2, 3}; + float output_data[4]; + + TfLiteStridedSliceParams builtin_data = {}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(In1D_OutOfRangeBegin) { + int input_shape[] = {1, 4}; + int begin_shape[] = {1, 1}; + int end_shape[] = {1, 1}; + int strides_shape[] = {1, 1}; + int output_shape[] = {1, 3}; + float input_data[] = {1, 2, 3, 4}; + int32_t begin_data[] = {-5}; + int32_t end_data[] = {3}; + int32_t strides_data[] = {1}; + float golden[] = {1, 2, 3}; + float output_data[4]; + + TfLiteStridedSliceParams builtin_data = {}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(In1D_NegativeEnd) { + int input_shape[] = {1, 4}; + int begin_shape[] = {1, 1}; + int end_shape[] = {1, 1}; + int strides_shape[] = {1, 1}; + int output_shape[] = {1, 1}; + float input_data[] = {1, 2, 3, 4}; + int32_t begin_data[] = {1}; + int32_t end_data[] = {-2}; + int32_t strides_data[] = {1}; + float golden[] = {2}; + float output_data[4]; + + TfLiteStridedSliceParams builtin_data = {}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(In1D_OutOfRangeEnd) { + int input_shape[] = {1, 4}; + int begin_shape[] = {1, 1}; + int end_shape[] = {1, 1}; + int strides_shape[] = {1, 1}; + int output_shape[] = {1, 3}; + float input_data[] = {1, 2, 3, 4}; + int32_t begin_data[] = {-3}; + int32_t end_data[] = {5}; + int32_t strides_data[] = {1}; + float golden[] = {2, 3, 4}; + float output_data[4]; + + TfLiteStridedSliceParams builtin_data = {}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(In1D_BeginMask) { + int input_shape[] = {1, 4}; + int begin_shape[] = {1, 1}; + int end_shape[] = {1, 1}; + int strides_shape[] = {1, 1}; + int output_shape[] = {1, 3}; + float input_data[] = {1, 2, 3, 4}; + int32_t begin_data[] = {1}; + int32_t end_data[] = {3}; + int32_t strides_data[] = {1}; + float golden[] = {1, 2, 3}; + float output_data[4]; + + TfLiteStridedSliceParams builtin_data = {1, 0, 0, 0, 0, false}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(In1D_NegativeBeginNegativeStride) { + int input_shape[] = {1, 4}; + int begin_shape[] = {1, 1}; + int end_shape[] = {1, 1}; + int strides_shape[] = {1, 1}; + int output_shape[] = {1, 1}; + float input_data[] = {1, 2, 3, 4}; + int32_t begin_data[] = {-2}; + int32_t end_data[] = {-3}; + int32_t strides_data[] = {-1}; + float golden[] = {3}; + float output_data[4]; + + TfLiteStridedSliceParams builtin_data = {}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(In1D_OutOfRangeBeginNegativeStride) { + int input_shape[] = {1, 4}; + int begin_shape[] = {1, 1}; + int end_shape[] = {1, 1}; + int strides_shape[] = {1, 1}; + int output_shape[] = {1, 1}; + float input_data[] = {1, 2, 3, 4}; + int32_t begin_data[] = {5}; + int32_t end_data[] = {2}; + int32_t strides_data[] = {-1}; + float golden[] = {4}; + float output_data[4]; + + TfLiteStridedSliceParams builtin_data = {}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(In1D_NegativeEndNegativeStride) { + int input_shape[] = {1, 4}; + int begin_shape[] = {1, 1}; + int end_shape[] = {1, 1}; + int strides_shape[] = {1, 1}; + int output_shape[] = {1, 2}; + float input_data[] = {1, 2, 3, 4}; + int32_t begin_data[] = {2}; + int32_t end_data[] = {-4}; + int32_t strides_data[] = {-1}; + float golden[] = {3, 2}; + float output_data[4]; + + TfLiteStridedSliceParams builtin_data = {}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(In1D_OutOfRangeEndNegativeStride) { + int input_shape[] = {1, 4}; + int begin_shape[] = {1, 1}; + int end_shape[] = {1, 1}; + int strides_shape[] = {1, 1}; + int output_shape[] = {1, 2}; + float input_data[] = {1, 2, 3, 4}; + int32_t begin_data[] = {-3}; + int32_t end_data[] = {-5}; + int32_t strides_data[] = {-1}; + float golden[] = {2, 1}; + float output_data[4]; + + TfLiteStridedSliceParams builtin_data = {}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(In1D_EndMask) { + int input_shape[] = {1, 4}; + int begin_shape[] = {1, 1}; + int end_shape[] = {1, 1}; + int strides_shape[] = {1, 1}; + int output_shape[] = {1, 3}; + float input_data[] = {1, 2, 3, 4}; + int32_t begin_data[] = {1}; + int32_t end_data[] = {3}; + int32_t strides_data[] = {1}; + float golden[] = {2, 3, 4}; + float output_data[4]; + + TfLiteStridedSliceParams builtin_data = {0, 1, 0, 0, 0, false}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(In1D_NegStride) { + int input_shape[] = {1, 3}; + int begin_shape[] = {1, 1}; + int end_shape[] = {1, 1}; + int strides_shape[] = {1, 1}; + int output_shape[] = {1, 3}; + float input_data[] = {1, 2, 3}; + int32_t begin_data[] = {-1}; + int32_t end_data[] = {-4}; + int32_t strides_data[] = {-1}; + float golden[] = {3, 2, 1}; + float output_data[4]; + + TfLiteStridedSliceParams builtin_data = {}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(In1D_EvenLenStride2) { + int input_shape[] = {1, 2}; + int begin_shape[] = {1, 1}; + int end_shape[] = {1, 1}; + int strides_shape[] = {1, 1}; + int output_shape[] = {1, 1}; + float input_data[] = {1, 2, 3, 4}; + int32_t begin_data[] = {0}; + int32_t end_data[] = {4}; + int32_t strides_data[] = {2}; + float golden[] = {1}; + float output_data[4]; + + TfLiteStridedSliceParams builtin_data = {}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(In1D_OddLenStride2) { + int input_shape[] = {1, 3}; + int begin_shape[] = {1, 1}; + int end_shape[] = {1, 1}; + int strides_shape[] = {1, 1}; + int output_shape[] = {1, 2}; + float input_data[] = {1, 2, 3, 4}; + int32_t begin_data[] = {0}; + int32_t end_data[] = {3}; + int32_t strides_data[] = {2}; + float golden[] = {1, 3}; + float output_data[4]; + + TfLiteStridedSliceParams builtin_data = {}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(In2D_Identity) { + int input_shape[] = {2, 2, 3}; + int begin_shape[] = {1, 2}; + int end_shape[] = {1, 2}; + int strides_shape[] = {1, 2}; + int output_shape[] = {2, 2, 3}; + float input_data[] = {1, 2, 3, 4, 5, 6}; + int32_t begin_data[] = {0, 0}; + int32_t end_data[] = {2, 3}; + int32_t strides_data[] = {1, 1}; + float golden[] = {1, 2, 3, 4, 5, 6}; + float output_data[8]; + + TfLiteStridedSliceParams builtin_data = {}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(In2D) { + int input_shape[] = {2, 2, 3}; + int begin_shape[] = {1, 2}; + int end_shape[] = {1, 2}; + int strides_shape[] = {1, 2}; + int output_shape[] = {2, 1, 2}; + float input_data[] = {1, 2, 3, 4, 5, 6}; + int32_t begin_data[] = {1, 0}; + int32_t end_data[] = {2, 2}; + int32_t strides_data[] = {1, 1}; + float golden[] = {4, 5}; + float output_data[8]; + + TfLiteStridedSliceParams builtin_data = {}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(In2D_Stride2) { + int input_shape[] = {2, 2, 3}; + int begin_shape[] = {1, 2}; + int end_shape[] = {1, 2}; + int strides_shape[] = {1, 2}; + int output_shape[] = {2, 1, 2}; + float input_data[] = {1, 2, 3, 4, 5, 6}; + int32_t begin_data[] = {0, 0}; + int32_t end_data[] = {2, 3}; + int32_t strides_data[] = {2, 2}; + float golden[] = {1, 3}; + float output_data[8]; + + TfLiteStridedSliceParams builtin_data = {}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(In2D_NegStride) { + int input_shape[] = {2, 2, 3}; + int begin_shape[] = {1, 2}; + int end_shape[] = {1, 2}; + int strides_shape[] = {1, 2}; + int output_shape[] = {2, 1, 3}; + float input_data[] = {1, 2, 3, 4, 5, 6}; + int32_t begin_data[] = {1, -1}; + int32_t end_data[] = {2, -4}; + int32_t strides_data[] = {2, -1}; + float golden[] = {6, 5, 4}; + float output_data[8]; + + TfLiteStridedSliceParams builtin_data = {}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(In2D_BeginMask) { + int input_shape[] = {2, 2, 3}; + int begin_shape[] = {1, 2}; + int end_shape[] = {1, 2}; + int strides_shape[] = {1, 2}; + int output_shape[] = {2, 2, 2}; + float input_data[] = {1, 2, 3, 4, 5, 6}; + int32_t begin_data[] = {1, 0}; + int32_t end_data[] = {2, 2}; + int32_t strides_data[] = {1, 1}; + float golden[] = {1, 2, 4, 5}; + float output_data[8]; + + TfLiteStridedSliceParams builtin_data = {1, 0, 0, 0, 0, false}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(In2D_EndMask) { + int input_shape[] = {2, 2, 3}; + int begin_shape[] = {1, 2}; + int end_shape[] = {1, 2}; + int strides_shape[] = {1, 2}; + int output_shape[] = {2, 1, 3}; + float input_data[] = {1, 2, 3, 4, 5, 6}; + int32_t begin_data[] = {1, 0}; + int32_t end_data[] = {2, 2}; + int32_t strides_data[] = {1, 1}; + float golden[] = {4, 5, 6}; + float output_data[8]; + + TfLiteStridedSliceParams builtin_data = {0, 2, 0, 0, 0, false}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(In2D_NegStrideBeginMask) { + int input_shape[] = {2, 2, 3}; + int begin_shape[] = {1, 2}; + int end_shape[] = {1, 2}; + int strides_shape[] = {1, 2}; + int output_shape[] = {2, 1, 3}; + float input_data[] = {1, 2, 3, 4, 5, 6}; + int32_t begin_data[] = {1, -2}; + int32_t end_data[] = {2, -4}; + int32_t strides_data[] = {1, -1}; + float golden[] = {6, 5, 4}; + float output_data[8]; + + TfLiteStridedSliceParams builtin_data = {2, 0, 0, 0, 0, false}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(In2D_NegStrideEndMask) { + int input_shape[] = {2, 2, 3}; + int begin_shape[] = {1, 2}; + int end_shape[] = {1, 2}; + int strides_shape[] = {1, 2}; + int output_shape[] = {2, 1, 2}; + float input_data[] = {1, 2, 3, 4, 5, 6}; + int32_t begin_data[] = {1, -2}; + int32_t end_data[] = {2, -3}; + int32_t strides_data[] = {1, -1}; + float golden[] = {5, 4}; + float output_data[8]; + + TfLiteStridedSliceParams builtin_data = {0, 2, 0, 0, 0, false}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(In3D_Identity) { + int input_shape[] = {3, 2, 3, 2}; + int begin_shape[] = {1, 3}; + int end_shape[] = {1, 3}; + int strides_shape[] = {1, 3}; + int output_shape[] = {3, 2, 3, 2}; + float input_data[] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}; + int32_t begin_data[] = {0, 0, 0}; + int32_t end_data[] = {2, 3, 2}; + int32_t strides_data[] = {1, 1, 1}; + float golden[] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}; + float output_data[16]; + + TfLiteStridedSliceParams builtin_data = {}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(In3D_NegStride) { + int input_shape[] = {3, 2, 3, 2}; + int begin_shape[] = {1, 3}; + int end_shape[] = {1, 3}; + int strides_shape[] = {1, 3}; + int output_shape[] = {3, 2, 3, 2}; + float input_data[] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}; + int32_t begin_data[] = {0, 0, 0}; + int32_t end_data[] = {2, 3, 2}; + int32_t strides_data[] = {1, 1, 1}; + float golden[] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}; + float output_data[16]; + + TfLiteStridedSliceParams builtin_data = {}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(In3D_Strided2) { + int input_shape[] = {3, 2, 3, 2}; + int begin_shape[] = {1, 3}; + int end_shape[] = {1, 3}; + int strides_shape[] = {1, 3}; + int output_shape[] = {3, 1, 2, 1}; + float input_data[] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}; + int32_t begin_data[] = {0, 0, 0}; + int32_t end_data[] = {2, 3, 2}; + int32_t strides_data[] = {2, 2, 2}; + float golden[] = {1, 5}; + float output_data[16]; + + TfLiteStridedSliceParams builtin_data = {}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(In1D_ShrinkAxisMask1) { + int input_shape[] = {3, 2, 3, 2}; + int begin_shape[] = {1, 3}; + int end_shape[] = {1, 3}; + int strides_shape[] = {1, 3}; + int output_shape[] = {3, 2, 3, 2}; + float input_data[] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}; + int32_t begin_data[] = {0, 0, 0}; + int32_t end_data[] = {2, 3, 2}; + int32_t strides_data[] = {1, 1, 1}; + float golden[] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}; + float output_data[16]; + + TfLiteStridedSliceParams builtin_data = {}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(In1D_ShrinkAxisMask1_NegativeSlice) { + int input_shape[] = {1, 4}; + int begin_shape[] = {1, 1}; + int end_shape[] = {1, 1}; + int strides_shape[] = {1, 1}; + int output_shape[] = {0}; + float input_data[] = {0, 1, 2, 3}; + int32_t begin_data[] = {-1}; + int32_t end_data[] = {0}; + int32_t strides_data[] = {1}; + float golden[] = {3}; + float output_data[4]; + + TfLiteStridedSliceParams builtin_data = {0, 0, 0, 0, 1, false}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(In2D_ShrinkAxis3_NegativeSlice) { + int input_shape[] = {2, 4, 1}; + int begin_shape[] = {1, 2}; + int end_shape[] = {1, 2}; + int strides_shape[] = {1, 2}; + int output_shape[] = {0}; + float input_data[] = {0, 1, 2, 3}; + int32_t begin_data[] = {-2, -1}; + int32_t end_data[] = {-1, 0}; + int32_t strides_data[] = {1, 1}; + float golden[] = {2}; + float output_data[4]; + + TfLiteStridedSliceParams builtin_data = {0, 0, 0, 0, 3, false}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(In2D_ShrinkAxis2_BeginEndAxis1_NegativeSlice) { + int input_shape[] = {2, 4, 1}; + int begin_shape[] = {1, 2}; + int end_shape[] = {1, 2}; + int strides_shape[] = {1, 2}; + int output_shape[] = {1, 4}; + float input_data[] = {0, 1, 2, 3}; + int32_t begin_data[] = {0, -1}; + int32_t end_data[] = {0, 0}; + int32_t strides_data[] = {1, 1}; + float golden[] = {0, 1, 2, 3}; + float output_data[4]; + + TfLiteStridedSliceParams builtin_data = {1, 1, 0, 0, 2, false}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(In1D_BeginMaskShrinkAxisMask1) { + int input_shape[] = {1, 4}; + int begin_shape[] = {1, 1}; + int end_shape[] = {1, 1}; + int strides_shape[] = {1, 1}; + int output_shape[] = {0}; + float input_data[] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}; + int32_t begin_data[] = {1}; + int32_t end_data[] = {1}; + int32_t strides_data[] = {1}; + float golden[] = {1}; + float output_data[4]; + + TfLiteStridedSliceParams builtin_data = {1, 0, 0, 0, 1, false}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(In2D_ShrinkAxisMask1) { + int input_shape[] = {2, 2, 3}; + int begin_shape[] = {1, 2}; + int end_shape[] = {1, 2}; + int strides_shape[] = {1, 2}; + int output_shape[] = {1, 3}; + float input_data[] = {1, 2, 3, 4, 5, 6}; + int32_t begin_data[] = {0, 0}; + int32_t end_data[] = {1, 3}; + int32_t strides_data[] = {1, 1}; + float golden[] = {1, 2, 3}; + float output_data[6]; + + TfLiteStridedSliceParams builtin_data = {0, 0, 0, 0, 1, false}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(In2D_ShrinkAxisMask2) { + int input_shape[] = {2, 2, 3}; + int begin_shape[] = {1, 2}; + int end_shape[] = {1, 2}; + int strides_shape[] = {1, 2}; + int output_shape[] = {1, 2}; + float input_data[] = {1, 2, 3, 4, 5, 6}; + int32_t begin_data[] = {0, 0}; + int32_t end_data[] = {2, 1}; + int32_t strides_data[] = {1, 1}; + float golden[] = {1, 4}; + float output_data[6]; + + TfLiteStridedSliceParams builtin_data = {0, 0, 0, 0, 2, false}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(In2D_ShrinkAxisMask3) { + int input_shape[] = {2, 2, 3}; + int begin_shape[] = {1, 2}; + int end_shape[] = {1, 2}; + int strides_shape[] = {1, 2}; + int output_shape[] = {0}; + float input_data[] = {1, 2, 3, 4, 5, 6}; + int32_t begin_data[] = {0, 0}; + int32_t end_data[] = {1, 1}; + int32_t strides_data[] = {1, 1}; + float golden[] = {1}; + float output_data[6]; + + TfLiteStridedSliceParams builtin_data = {0, 0, 0, 0, 3, false}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(In3D_IdentityShrinkAxis1) { + int input_shape[] = {3, 2, 3, 2}; + int begin_shape[] = {1, 3}; + int end_shape[] = {1, 3}; + int strides_shape[] = {1, 3}; + int output_shape[] = {2, 3, 2}; + float input_data[] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}; + int32_t begin_data[] = {0, 0, 0}; + int32_t end_data[] = {1, 3, 2}; + int32_t strides_data[] = {1, 1, 1}; + float golden[] = {1, 2, 3, 4, 5, 6}; + float output_data[16]; + + TfLiteStridedSliceParams builtin_data = {0, 0, 0, 0, 1, false}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(In3D_IdentityShrinkAxis2) { + int input_shape[] = {3, 2, 3, 2}; + int begin_shape[] = {1, 3}; + int end_shape[] = {1, 3}; + int strides_shape[] = {1, 3}; + int output_shape[] = {2, 2, 2}; + float input_data[] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}; + int32_t begin_data[] = {0, 0, 0}; + int32_t end_data[] = {2, 1, 2}; + int32_t strides_data[] = {1, 1, 1}; + float golden[] = {1, 2, 7, 8}; + float output_data[16]; + + TfLiteStridedSliceParams builtin_data = {0, 0, 0, 0, 2, false}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(In3D_IdentityShrinkAxis3) { + int input_shape[] = {3, 2, 3, 2}; + int begin_shape[] = {1, 3}; + int end_shape[] = {1, 3}; + int strides_shape[] = {1, 3}; + int output_shape[] = {1, 2}; + float input_data[] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}; + int32_t begin_data[] = {0, 0, 0}; + int32_t end_data[] = {1, 1, 2}; + int32_t strides_data[] = {1, 1, 1}; + float golden[] = {1, 2}; + float output_data[16]; + + TfLiteStridedSliceParams builtin_data = {0, 0, 0, 0, 3, false}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(In3D_IdentityShrinkAxis4) { + int input_shape[] = {3, 2, 3, 2}; + int begin_shape[] = {1, 3}; + int end_shape[] = {1, 3}; + int strides_shape[] = {1, 3}; + int output_shape[] = {2, 2, 3}; + float input_data[] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}; + int32_t begin_data[] = {0, 0, 0}; + int32_t end_data[] = {2, 3, 2}; + int32_t strides_data[] = {1, 1, 1}; + float golden[] = {1, 3, 5, 7, 9, 11}; + float output_data[16]; + + TfLiteStridedSliceParams builtin_data = {0, 0, 0, 0, 4, false}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(In3D_IdentityShrinkAxis5) { + int input_shape[] = {3, 2, 3, 2}; + int begin_shape[] = {1, 3}; + int end_shape[] = {1, 3}; + int strides_shape[] = {1, 3}; + int output_shape[] = {1, 3}; + float input_data[] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}; + int32_t begin_data[] = {0, 0, 0}; + int32_t end_data[] = {1, 3, 1}; + int32_t strides_data[] = {1, 1, 1}; + float golden[] = {1, 3, 5}; + float output_data[16]; + + TfLiteStridedSliceParams builtin_data = {0, 0, 0, 0, 5, false}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(In3D_IdentityShrinkAxis6) { + int input_shape[] = {3, 2, 3, 2}; + int begin_shape[] = {1, 3}; + int end_shape[] = {1, 3}; + int strides_shape[] = {1, 3}; + int output_shape[] = {1, 2}; + float input_data[] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}; + int32_t begin_data[] = {0, 0, 0}; + int32_t end_data[] = {2, 1, 1}; + int32_t strides_data[] = {1, 1, 1}; + float golden[] = {1, 7}; + float output_data[16]; + + TfLiteStridedSliceParams builtin_data = {0, 0, 0, 0, 6, false}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(In3D_IdentityShrinkAxis7) { + int input_shape[] = {3, 2, 3, 2}; + int begin_shape[] = {1, 3}; + int end_shape[] = {1, 3}; + int strides_shape[] = {1, 3}; + int output_shape[] = {0}; + float input_data[] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}; + int32_t begin_data[] = {0, 0, 0}; + int32_t end_data[] = {1, 1, 1}; + int32_t strides_data[] = {1, 1, 1}; + float golden[] = {1}; + float output_data[16]; + + TfLiteStridedSliceParams builtin_data = {0, 0, 0, 0, 7, false}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +// This tests catches a very subtle bug that was fixed by cl/188403234. +TF_LITE_MICRO_TEST(RunTwice) { + int input_shape[] = {2, 2, 3}; + int begin_shape[] = {1, 2}; + int end_shape[] = {1, 2}; + int strides_shape[] = {1, 2}; + int output_shape[] = {2, 2, 2}; + float input_data[] = {1, 2, 3, 4, 5, 6}; + int32_t begin_data[] = {1, 0}; + int32_t end_data[] = {2, 2}; + int32_t strides_data[] = {1, 1}; + float golden[] = {1, 2, 4, 5}; + float output_data[16]; + + TfLiteStridedSliceParams builtin_data = {1, 0, 0, 0, 0, false}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false, 2); +} + +TF_LITE_MICRO_TEST(In3D_IdentityShrinkAxis1int8) { + int input_shape[] = {3, 2, 3, 2}; + int begin_shape[] = {1, 3}; + int end_shape[] = {1, 3}; + int strides_shape[] = {1, 3}; + int output_shape[] = {2, 3, 2}; + int8_t input_data[] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}; + int32_t begin_data[] = {0, 0, 0}; + int32_t end_data[] = {1, 3, 2}; + int32_t strides_data[] = {1, 1, 1}; + int8_t golden[] = {1, 2, 3, 4, 5, 6}; + int8_t output_data[12]; + + TfLiteStridedSliceParams builtin_data = {0, 0, 0, 0, 1, false}; + + tflite::testing::TestStridedSliceQuantized( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(In3D_IdentityShrinkAxis1int16) { + int input_shape[] = {3, 2, 3, 2}; + int begin_shape[] = {1, 3}; + int end_shape[] = {1, 3}; + int strides_shape[] = {1, 3}; + int output_shape[] = {2, 3, 2}; + int16_t input_data[] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}; + int32_t begin_data[] = {0, 0, 0}; + int32_t end_data[] = {1, 3, 2}; + int32_t strides_data[] = {1, 1, 1}; + int16_t golden[] = {1, 2, 3, 4, 5, 6}; + int16_t output_data[12]; + + TfLiteStridedSliceParams builtin_data = {0, 0, 0, 0, 1, false}; + + tflite::testing::TestStridedSliceQuantized( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(In3D_IdentityShrinkAxis1int32) { + int input_shape[] = {3, 2, 3, 2}; + int begin_shape[] = {1, 3}; + int end_shape[] = {1, 3}; + int strides_shape[] = {1, 3}; + int output_shape[] = {2, 3, 2}; + int32_t input_data[] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}; + int32_t begin_data[] = {0, 0, 0}; + int32_t end_data[] = {1, 3, 2}; + int32_t strides_data[] = {1, 1, 1}; + int32_t golden[] = {1, 2, 3, 4, 5, 6}; + int32_t output_data[12]; + + TfLiteStridedSliceParams builtin_data = {0, 0, 0, 0, 1, false}; + + tflite::testing::TestStridedSliceQuantized( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(In3D_Strided2int32) { + int input_shape[] = {3, 2, 3, 2}; + int begin_shape[] = {1, 3}; + int end_shape[] = {1, 3}; + int strides_shape[] = {1, 3}; + int output_shape[] = {3, 1, 2, 1}; + int32_t input_data[] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}; + int32_t begin_data[] = {0, 0, 0}; + int32_t end_data[] = {2, 3, 2}; + int32_t strides_data[] = {2, 2, 2}; + int32_t golden[] = {1, 5}; + int32_t output_data[16]; + + TfLiteStridedSliceParams builtin_data = {}; + + tflite::testing::TestStridedSliceQuantized( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(In3D_Strided2bool) { + int input_shape[] = {3, 2, 3, 2}; + int begin_shape[] = {1, 3}; + int end_shape[] = {1, 3}; + int strides_shape[] = {1, 3}; + int output_shape[] = {3, 1, 2, 1}; + bool input_data[] = {true, false, false, false, true, false, + false, false, false, false, false, false}; + int32_t begin_data[] = {0, 0, 0}; + int32_t end_data[] = {2, 3, 2}; + int32_t strides_data[] = {2, 2, 2}; + bool golden[] = {true, true}; + bool output_data[16]; + + TfLiteStridedSliceParams builtin_data = {}; + + tflite::testing::TestStridedSliceQuantized( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(MinusThreeMinusFourMinusOne) { + int input_shape[] = {1, 4}; + int begin_shape[] = {1, 1}; + int end_shape[] = {1, 1}; + int strides_shape[] = {1, 1}; + int output_shape[] = {1, 1}; + float input_data[] = {1, 2, 3, 4}; + int32_t begin_data[] = {-3}; + int32_t end_data[] = {-4}; + int32_t strides_data[] = {-1}; + float golden[] = {2}; + float output_data[1]; + + TfLiteStridedSliceParams builtin_data = {0, 0, 0, 0, 0, false}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(MinusFourMinusThreeOne) { + int input_shape[] = {1, 4}; + int begin_shape[] = {1, 1}; + int end_shape[] = {1, 1}; + int strides_shape[] = {1, 1}; + int output_shape[] = {1, 1}; + float input_data[] = {1, 2, 3, 4}; + int32_t begin_data[] = {-4}; + int32_t end_data[] = {-3}; + int32_t strides_data[] = {1}; + float golden[] = {1}; + float output_data[1]; + + TfLiteStridedSliceParams builtin_data = {0, 0, 0, 0, 0, false}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(In3D_BackwardSmallBeginEndMask) { + int input_shape[] = {1, 1, 1, 2}; + int begin_shape[] = {1, 1}; + int end_shape[] = {1, 1}; + int strides_shape[] = {1, 1}; + int output_shape[] = {1, 0}; + float input_data[] = {1, 2}; + int32_t begin_data[] = {1}; + int32_t end_data[] = {0}; + int32_t strides_data[] = {1}; + float* golden = nullptr; + float* output_data = nullptr; + + TfLiteStridedSliceParams builtin_data = {0, 1, 0, 0, 0, false}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(OneOneOne) { + int input_shape[] = {1, 1}; + int begin_shape[] = {1, 1}; + int end_shape[] = {1, 1}; + int strides_shape[] = {1, 1}; + int output_shape[] = {1, 0}; + float input_data[] = {1, 2}; + int32_t begin_data[] = {1}; + int32_t end_data[] = {1}; + int32_t strides_data[] = {1}; + float* golden = nullptr; + float* output_data = nullptr; + + TfLiteStridedSliceParams builtin_data = {0, 0, 0, 0, 0, false}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false); +} + +TF_LITE_MICRO_TEST(StrideOutOfBounds) { + int input_shape[] = {1, 1}; + int begin_shape[] = {1, 1}; + int end_shape[] = {1, 1}; + int strides_shape[] = {1, 1}; + int output_shape[] = {0}; + float input_data[] = {}; + int32_t begin_data[] = {1}; + int32_t end_data[] = {4}; + int32_t strides_data[] = {7}; + float golden[] = {1}; + float output_data[16]; + + TfLiteStridedSliceParams builtin_data = {0, 0, 0, 0, 1, false}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false, 1, true); +} + +TF_LITE_MICRO_TEST(OutOfBounds) { + int input_shape[] = {1, 1}; + int begin_shape[] = {1, 1}; + int end_shape[] = {1, 1}; + int strides_shape[] = {1, 1}; + int output_shape[] = {0}; + float input_data[] = {}; + int32_t begin_data[] = {1}; + int32_t end_data[] = {2}; + int32_t strides_data[] = {1}; + float golden[0]; + float output_data[16]; + + TfLiteStridedSliceParams builtin_data = {0, 0, 0, 0, 1, false}; + + tflite::testing::TestStridedSliceFloat( + input_shape, begin_shape, end_shape, strides_shape, &builtin_data, + input_data, begin_data, end_data, strides_data, output_shape, output_data, + golden, false, 1, true); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/sub.cc b/tensorflow/lite/micro/kernels/sub.cc new file mode 100644 index 0000000..930bc0b --- /dev/null +++ b/tensorflow/lite/micro/kernels/sub.cc @@ -0,0 +1,168 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/kernels/sub.h" + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/add.h" +#include "tensorflow/lite/kernels/internal/reference/process_broadcast_shapes.h" +#include "tensorflow/lite/kernels/internal/reference/sub.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/internal/types.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { + +void* SubInit(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(OpDataSub)); +} + +void EvalSub(TfLiteContext* context, TfLiteNode* node, TfLiteSubParams* params, + const OpDataSub* data, const TfLiteEvalTensor* input1, + const TfLiteEvalTensor* input2, TfLiteEvalTensor* output) { + float output_activation_min, output_activation_max; + CalculateActivationRange(params->activation, &output_activation_min, + &output_activation_max); + tflite::ArithmeticParams op_params; + SetActivationParams(output_activation_min, output_activation_max, &op_params); + if (data->requires_broadcast) { + tflite::reference_ops::BroadcastSubSlow( + op_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } else { + tflite::reference_ops::SubWithActivation( + op_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } +} + +TfLiteStatus EvalSubQuantized(TfLiteContext* context, TfLiteNode* node, + TfLiteSubParams* params, const OpDataSub* data, + const TfLiteEvalTensor* input1, + const TfLiteEvalTensor* input2, + TfLiteEvalTensor* output) { + tflite::ArithmeticParams op_params; + op_params.left_shift = data->left_shift; + op_params.input1_offset = data->input1_offset; + op_params.input1_multiplier = data->input1_multiplier; + op_params.input1_shift = data->input1_shift; + op_params.input2_offset = data->input2_offset; + op_params.input2_multiplier = data->input2_multiplier; + op_params.input2_shift = data->input2_shift; + op_params.output_offset = data->output_offset; + op_params.output_multiplier = data->output_multiplier; + op_params.output_shift = data->output_shift; + SetActivationParams(data->output_activation_min, data->output_activation_max, + &op_params); + bool need_broadcast = reference_ops::ProcessBroadcastShapes( + tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorShape(input2), &op_params); + + switch (output->type) { + case kTfLiteInt8: { + if (need_broadcast) { + tflite::reference_ops::BroadcastQuantSubSlow( + op_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } else { + tflite::reference_ops::Sub( + op_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } + break; + } + case kTfLiteInt16: { + if (need_broadcast) { + tflite::reference_ops::BroadcastQuantSubSlow( + op_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } else { + tflite::reference_ops::Sub( + op_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } + break; + } + default: + MicroPrintf("Quantized type %s not currently supported.", + TfLiteTypeGetName(output->type)); + return kTfLiteError; + } + return kTfLiteOk; +} + +TfLiteStatus SubEval(TfLiteContext* context, TfLiteNode* node) { + auto* params = reinterpret_cast(node->builtin_data); + + const TfLiteEvalTensor* input1 = + tflite::micro::GetEvalInput(context, node, kSubInputTensor1); + const TfLiteEvalTensor* input2 = + tflite::micro::GetEvalInput(context, node, kSubInputTensor2); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kSubOutputTensor); + + TFLITE_DCHECK(node->user_data != nullptr); + const OpDataSub& data = *(static_cast(node->user_data)); + + if (output->type == kTfLiteFloat32) { + EvalSub(context, node, params, &data, input1, input2, output); + } else if (output->type == kTfLiteInt8 || output->type == kTfLiteInt16) { + TF_LITE_ENSURE_OK(context, EvalSubQuantized(context, node, params, &data, + input1, input2, output)); + } else { + MicroPrintf("Type %s (%d) not supported.", TfLiteTypeGetName(output->type), + output->type); + return kTfLiteError; + } + + return kTfLiteOk; +} + +TFLMRegistration Register_SUB() { + return tflite::micro::RegisterOp(SubInit, SubPrepare, SubEval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/sub.h b/tensorflow/lite/micro/kernels/sub.h new file mode 100644 index 0000000..2990022 --- /dev/null +++ b/tensorflow/lite/micro/kernels/sub.h @@ -0,0 +1,60 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_SUB_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_SUB_H_ + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" + +namespace tflite { + +extern const int kSubInputTensor1; +extern const int kSubInputTensor2; +extern const int kSubOutputTensor; + +struct OpDataSub { + bool requires_broadcast; + + // These fields are used in both the general 8-bit -> 8bit quantized path, + // and the special 16-bit -> 16bit quantized path + int input1_shift; + int input2_shift; + int32_t output_activation_min; + int32_t output_activation_max; + + // These fields are used only in the general 8-bit -> 8bit quantized path + int32_t input1_multiplier; + int32_t input2_multiplier; + int32_t output_multiplier; + int output_shift; + int left_shift; + int32_t input1_offset; + int32_t input2_offset; + int32_t output_offset; +}; + +TfLiteStatus CalculateOpDataSub(TfLiteContext* context, TfLiteSubParams* params, + const TfLiteTensor* input1, + const TfLiteTensor* input2, + TfLiteTensor* output, OpDataSub* data); + +TfLiteStatus SubPrepare(TfLiteContext* context, TfLiteNode* node); + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_SUB_H_ diff --git a/tensorflow/lite/micro/kernels/sub_common.cc b/tensorflow/lite/micro/kernels/sub_common.cc new file mode 100644 index 0000000..d664746 --- /dev/null +++ b/tensorflow/lite/micro/kernels/sub_common.cc @@ -0,0 +1,107 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/add.h" +#include "tensorflow/lite/kernels/internal/reference/process_broadcast_shapes.h" +#include "tensorflow/lite/kernels/internal/reference/sub.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/internal/types.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/sub.h" + +namespace tflite { + +const int kSubInputTensor1 = 0; +const int kSubInputTensor2 = 1; +const int kSubOutputTensor = 0; + +TfLiteStatus CalculateOpDataSub(TfLiteContext* context, TfLiteSubParams* params, + const TfLiteTensor* input1, + const TfLiteTensor* input2, + TfLiteTensor* output, OpDataSub* data) { + data->requires_broadcast = !HaveSameShapes(input1, input2); + + if (output->type == kTfLiteInt8 || output->type == kTfLiteInt16) { + // 8bit -> 8bit general quantized path, with general rescalings + data->input1_offset = -input1->params.zero_point; + data->input2_offset = -input2->params.zero_point; + data->output_offset = output->params.zero_point; + + // The shift is set to 15 in case of 16-bit and 20 in case of 8-bit, + // accordingly. In case of 16-bit we have 65535 << 15 which is less than 1 + // << 31, therefore the addition will still fit in a 32 bit accumulator. + data->left_shift = output->type == kTfLiteInt16 ? 15 : 20; + const float twice_max_input_scale = + 2 * std::max(input1->params.scale, input2->params.scale); + const double real_input1_multiplier = + static_cast(input1->params.scale / twice_max_input_scale); + const double real_input2_multiplier = + static_cast(input2->params.scale / twice_max_input_scale); + const double real_output_multiplier = + static_cast(twice_max_input_scale / + ((1 << data->left_shift) * output->params.scale)); + + QuantizeMultiplierSmallerThanOneExp( + real_input1_multiplier, &data->input1_multiplier, &data->input1_shift); + + QuantizeMultiplierSmallerThanOneExp( + real_input2_multiplier, &data->input2_multiplier, &data->input2_shift); + + QuantizeMultiplierSmallerThanOneExp( + real_output_multiplier, &data->output_multiplier, &data->output_shift); + + TF_LITE_ENSURE_STATUS(CalculateActivationRangeQuantized( + context, params->activation, output, &data->output_activation_min, + &data->output_activation_max)); + } + + return kTfLiteOk; +} + +TfLiteStatus SubPrepare(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + TFLITE_DCHECK(node->builtin_data != nullptr); + + OpDataSub* data = static_cast(node->user_data); + auto* params = reinterpret_cast(node->builtin_data); + + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* input1 = + micro_context->AllocateTempInputTensor(node, kSubInputTensor1); + TF_LITE_ENSURE(context, input1 != nullptr); + TfLiteTensor* input2 = + micro_context->AllocateTempInputTensor(node, kSubInputTensor2); + TF_LITE_ENSURE(context, input2 != nullptr); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kSubOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + + TF_LITE_ENSURE_STATUS( + CalculateOpDataSub(context, params, input1, input2, output, data)); + + micro_context->DeallocateTempTfLiteTensor(input1); + micro_context->DeallocateTempTfLiteTensor(input2); + micro_context->DeallocateTempTfLiteTensor(output); + return kTfLiteOk; +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/sub_test.cc b/tensorflow/lite/micro/kernels/sub_test.cc new file mode 100644 index 0000000..d5226eb --- /dev/null +++ b/tensorflow/lite/micro/kernels/sub_test.cc @@ -0,0 +1,471 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +// Shapes and values for mixed broadcast tests. +const int broadcast_output_dims_count = 36; +const int broadcast_num_shapes = 4; + +int broadcast_input1_shape[] = {4, 2, 3, 1, 2}; +const float broadcast_input1_values[] = {-0.3, 2.3, 0.9, 0.5, 0.8, -1.1, + 1.2, 2.8, -1.6, 0.0, 0.7, -2.2}; +const float broadcast_input2_values[] = {-0.2, -0.3, 0.4, -0.5, -1.0, -0.9}; +const float + broadcast_goldens[broadcast_num_shapes][broadcast_output_dims_count] = { + {-0.1, 2.6, -0.7, 2.8, 0.7, 3.2, 1.1, 0.8, 0.5, 1.0, 1.9, 1.4, + 1.0, -0.8, 0.4, -0.6, 1.8, -0.2, 1.4, 3.1, 0.8, 3.3, 2.2, 3.7, + -1.4, 0.3, -2.0, 0.5, -0.6, 0.9, 0.9, -1.9, 0.3, -1.7, 1.7, -1.3}, + {-0.1, 2.6, 0.5, 1.0, 1.8, -0.2, 1.4, 3.1, -2.0, 0.5, 1.7, -1.3}, + {-0.1, 2.5, 0.0, 2.6, -0.7, 1.9, 1.1, 0.7, 1.2, 0.8, 0.5, 0.1, + 1.0, -0.9, 1.1, -0.8, 0.4, -1.5, 1.7, 3.3, 2.2, 3.8, 2.1, 3.7, + -1.1, 0.5, -0.6, 1.0, -0.7, 0.9, 1.2, -1.7, 1.7, -1.2, 1.6, -1.3}, + {-0.1, 2.5, 1.2, 0.8, 0.4, -1.5, 1.7, 3.3, -0.6, 1.0, 1.6, -1.3}, +}; + +const int broadcast_max_shape_size = 5; +int broadcast_input2_shapes[broadcast_num_shapes][broadcast_max_shape_size] = { + {4, 1, 1, 3, 2}, + {4, 1, 3, 1, 2}, + {4, 2, 1, 3, 1}, + {4, 2, 3, 1, 1}, +}; +int broadcast_output_shapes[broadcast_num_shapes][broadcast_max_shape_size] = { + {4, 2, 3, 3, 2}, + {4, 2, 3, 1, 2}, + {4, 2, 3, 3, 2}, + {4, 2, 3, 1, 2}, +}; + +template +void ValidateSubGoldens(TfLiteTensor* tensors, int tensors_size, + const T* golden, T* output, int output_size, + TfLiteFusedActivation activation, + float tolerance = 1e-5) { + TfLiteSubParams builtin_data; + builtin_data.activation = activation; + + int inputs_array_data[] = {2, 0, 1}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 2}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = tflite::Register_SUB(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, &builtin_data); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + for (int i = 0; i < output_size; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(golden[i], output[i], tolerance); + } +} + +void TestSubFloat(int* input1_dims_data, const float* input1_data, + int* input2_dims_data, const float* input2_data, + int* output_dims_data, const float* expected_output, + TfLiteFusedActivation activation, float* output_data) { + TfLiteIntArray* input1_dims = IntArrayFromInts(input1_dims_data); + TfLiteIntArray* input2_dims = IntArrayFromInts(input2_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + + constexpr int inputs_size = 2; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(input1_data, input1_dims), + CreateTensor(input2_data, input2_dims), + CreateTensor(output_data, output_dims), + }; + + ValidateSubGoldens(tensors, tensors_size, expected_output, output_data, + ElementCount(*output_dims), activation); +} + +template +void TestSubQuantized(int* input1_dims_data, const float* input1_data, + T* input1_quantized, float input1_scale, + int input1_zero_point, int* input2_dims_data, + const float* input2_data, T* input2_quantized, + float input2_scale, int input2_zero_point, + int* output_dims_data, const float* golden, + T* golden_quantized, float output_scale, + int output_zero_point, TfLiteFusedActivation activation, + T* output_data) { + TfLiteIntArray* input1_dims = IntArrayFromInts(input1_dims_data); + TfLiteIntArray* input2_dims = IntArrayFromInts(input2_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + + constexpr int inputs_size = 2; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + tflite::testing::CreateQuantizedTensor(input1_data, input1_quantized, + input1_dims, input1_scale, + input1_zero_point), + tflite::testing::CreateQuantizedTensor(input2_data, input2_quantized, + input2_dims, input2_scale, + input2_zero_point), + tflite::testing::CreateQuantizedTensor(output_data, output_dims, + output_scale, output_zero_point), + }; + tflite::Quantize(golden, golden_quantized, ElementCount(*output_dims), + output_scale, output_zero_point); + + ValidateSubGoldens(tensors, tensors_size, golden_quantized, output_data, + ElementCount(*output_dims), activation); +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(FloatSubNoActivation) { + const int output_dims_count = 4; + int inout_shape[] = {4, 1, 2, 2, 1}; + const float input1_values[] = {-2.0, 0.2, 0.7, 0.8}; + const float input2_values[] = {0.1, 0.2, 0.3, 0.5}; + const float golden_values[] = {-2.1, 0.0, 0.4, 0.3}; + float output_data[output_dims_count]; + tflite::testing::TestSubFloat(inout_shape, input1_values, inout_shape, + input2_values, inout_shape, golden_values, + kTfLiteActNone, output_data); +} + +TF_LITE_MICRO_TEST(FloatSubActivationRelu1) { + const int output_dims_count = 4; + int inout_shape[] = {4, 1, 2, 2, 1}; + const float input1_values[] = {-2.0, 0.2, 2.0, 0.8}; + const float input2_values[] = {2.0, 0.2, 0.3, 0.5}; + const float golden_values[] = {-1.0, 0.0, 1.0, 0.3}; + + float output_data[output_dims_count]; + tflite::testing::TestSubFloat(inout_shape, input1_values, inout_shape, + input2_values, inout_shape, golden_values, + kTfLiteActReluN1To1, output_data); +} + +TF_LITE_MICRO_TEST(FloatSubVariousInputShapes) { + const int output_dims_count = 6; + float output_data[output_dims_count]; + + const float input1_values[] = {-2.0, 0.2, 0.7, 0.8, 1.1, 2.0}; + const float input2_values[] = {0.1, 0.2, 0.3, 0.5, 1.1, 0.1}; + const float expected_output[] = {-2.1, 0.0, 0.4, 0.3, 0.0, 1.9}; + + constexpr int num_shapes = 4; + constexpr int max_shape_size = 5; + int test_shapes[num_shapes][max_shape_size] = { + {1, 6}, + {2, 2, 3}, + {3, 2, 1, 3}, + {4, 1, 3, 1, 2}, + }; + + for (int i = 0; i < num_shapes; ++i) { + tflite::testing::TestSubFloat(test_shapes[i], input1_values, test_shapes[i], + input2_values, test_shapes[i], + expected_output, kTfLiteActNone, output_data); + } +} + +TF_LITE_MICRO_TEST(FloatSubWithScalarBroadcast) { + const int output_dims_count = 6; + float output_data[output_dims_count]; + + const float input1_values[] = {-2.0, 0.2, 0.7, 0.8, 1.1, 2.0}; + int input2_shape[] = {0}; + const float input2_values[] = {0.1}; + const float expected_output[] = {-2.1, 0.1, 0.6, 0.7, 1.0, 1.9}; + + constexpr int num_shapes = 4; + constexpr int max_shape_size = 5; + int test_shapes[num_shapes][max_shape_size] = { + {1, 6}, + {2, 2, 3}, + {3, 2, 1, 3}, + {4, 1, 3, 1, 2}, + }; + + for (int i = 0; i < num_shapes; ++i) { + tflite::testing::TestSubFloat(test_shapes[i], input1_values, input2_shape, + input2_values, test_shapes[i], + expected_output, kTfLiteActNone, output_data); + } +} + +TF_LITE_MICRO_TEST(QuantizedSubNoActivationInt8) { + const float scales[] = {0.25, 0.5, 1.0}; + const int zero_points[] = {-10, 4, 13}; + const int output_dims_count = 4; + int inout_shape[] = {4, 1, 2, 2, 1}; + const float input1_values[] = {-2.01, -1.01, -0.01, 0.98}; + const float input2_values[] = {-1.01, -1.99, -2.99, -4.02}; + const float golden_values[] = {-1, 1, 3, 5}; + + int8_t input1_quantized[output_dims_count]; + int8_t input2_quantized[output_dims_count]; + int8_t golden_quantized[output_dims_count]; + int8_t output[output_dims_count]; + + tflite::testing::TestSubQuantized( + inout_shape, input1_values, input1_quantized, scales[0], zero_points[0], + inout_shape, input2_values, input2_quantized, scales[1], zero_points[1], + inout_shape, golden_values, golden_quantized, scales[2], zero_points[2], + kTfLiteActNone, output); +} + +TF_LITE_MICRO_TEST(QuantizedSubActivationRelu1Int8) { + const float scales[] = {0.25, 0.5, 1.0}; + const int zero_points[] = {-10, 4, 13}; + const int output_dims_count = 4; + int inout_shape[] = {4, 1, 2, 2, 1}; + const float input1_values[] = {-2.01, -1.01, -0.01, 0.98}; + const float input2_values[] = {-1.01, -1.99, -2.99, -4.02}; + const float golden_values[] = {-1, 1, 1, 1}; + + int8_t input1_quantized[output_dims_count]; + int8_t input2_quantized[output_dims_count]; + int8_t golden_quantized[output_dims_count]; + int8_t output[output_dims_count]; + + tflite::testing::TestSubQuantized( + inout_shape, input1_values, input1_quantized, scales[0], zero_points[0], + inout_shape, input2_values, input2_quantized, scales[1], zero_points[1], + inout_shape, golden_values, golden_quantized, scales[2], zero_points[2], + kTfLiteActReluN1To1, output); +} + +TF_LITE_MICRO_TEST(QuantizedSubActivationRelu1Int16) { + const float scales[] = {0.25, 0.5, 1.0}; + const int zero_points[] = {0, 0, 0}; + const int output_dims_count = 4; + int inout_shape[] = {4, 1, 2, 2, 1}; + const float input1_values[] = {-2.01, -1.01, -0.01, 0.98}; + const float input2_values[] = {-1.01, -1.99, -2.99, -4.02}; + const float golden_values[] = {-1, 1, 1, 1}; + + int16_t input1_quantized[output_dims_count]; + int16_t input2_quantized[output_dims_count]; + int16_t golden_quantized[output_dims_count]; + int16_t output[output_dims_count]; + + tflite::testing::TestSubQuantized( + inout_shape, input1_values, input1_quantized, scales[0], zero_points[0], + inout_shape, input2_values, input2_quantized, scales[1], zero_points[1], + inout_shape, golden_values, golden_quantized, scales[2], zero_points[2], + kTfLiteActReluN1To1, output); +} + +TF_LITE_MICRO_TEST(QuantizedSubVariousInputShapesInt8) { + const float scales[] = {0.1, 0.05, 0.1}; + const int zero_points[] = {-9, 5, 14}; + const int output_dims_count = 6; + + constexpr int num_shapes = 4; + constexpr int max_shape_size = 5; + int test_shapes[num_shapes][max_shape_size] = { + {1, 6}, + {2, 2, 3}, + {3, 2, 1, 3}, + {4, 1, 3, 1, 2}, + }; + + const float input1_values[] = {-2.0, 0.2, 0.7, 0.8, 1.1, 2.0}; + const float input2_values[] = {-0.1, -0.2, -0.3, -0.5, -1.1, -0.1}; + const float golden_values[] = {-1.9, 0.4, 1.0, 1.3, 2.2, 2.1}; + + int8_t input1_quantized[output_dims_count]; + int8_t input2_quantized[output_dims_count]; + int8_t golden_quantized[output_dims_count]; + int8_t output[output_dims_count]; + + for (int i = 0; i < num_shapes; i++) { + tflite::testing::TestSubQuantized( + test_shapes[i], input1_values, input1_quantized, scales[0], + zero_points[0], test_shapes[i], input2_values, input2_quantized, + scales[1], zero_points[1], test_shapes[i], golden_values, + golden_quantized, scales[2], zero_points[2], kTfLiteActNone, output); + } +} + +TF_LITE_MICRO_TEST(QuantizedSubVariousInputShapesInt16) { + const float scales[] = {0.1, 0.05, 0.1}; + const int zero_points[] = {0, 0, 0}; + const int output_dims_count = 6; + + constexpr int num_shapes = 4; + constexpr int max_shape_size = 5; + int test_shapes[num_shapes][max_shape_size] = { + {1, 6}, + {2, 2, 3}, + {3, 2, 1, 3}, + {4, 1, 3, 1, 2}, + }; + + const float input1_values[] = {-2.0, 0.2, 0.7, 0.8, 1.1, 2.0}; + const float input2_values[] = {-0.1, -0.2, -0.3, -0.5, -1.1, -0.1}; + const float golden_values[] = {-1.9, 0.4, 1.0, 1.3, 2.2, 2.1}; + + int16_t input1_quantized[output_dims_count]; + int16_t input2_quantized[output_dims_count]; + int16_t golden_quantized[output_dims_count]; + int16_t output[output_dims_count]; + + for (int i = 0; i < num_shapes; i++) { + tflite::testing::TestSubQuantized( + test_shapes[i], input1_values, input1_quantized, scales[0], + zero_points[0], test_shapes[i], input2_values, input2_quantized, + scales[1], zero_points[1], test_shapes[i], golden_values, + golden_quantized, scales[2], zero_points[2], kTfLiteActNone, output); + } +} + +TF_LITE_MICRO_TEST(QuantizedSubWithScalarBroadcastFloat) { + float output_float[tflite::testing::broadcast_output_dims_count]; + + for (int i = 0; i < tflite::testing::broadcast_num_shapes; ++i) { + tflite::testing::TestSubFloat(tflite::testing::broadcast_input1_shape, + tflite::testing::broadcast_input1_values, + tflite::testing::broadcast_input2_shapes[i], + tflite::testing::broadcast_input2_values, + tflite::testing::broadcast_output_shapes[i], + tflite::testing::broadcast_goldens[i], + kTfLiteActNone, output_float); + } +} + +TF_LITE_MICRO_TEST(QuantizedSubWithScalarBroadcastInt8) { + const int output_dims_count = 6; + + const float input1_values[] = {-2.0, 0.2, 0.7, 0.8, 1.1, 2.0}; + int input2_shape[] = {0}; + const float input2_values[] = {-0.1}; + const float golden[] = {-1.9, 0.3, 0.8, 0.9, 1.2, 2.1}; + + constexpr int num_shapes = 4; + constexpr int max_shape_size = 5; + int test_shapes[num_shapes][max_shape_size] = { + {1, 6}, + {2, 2, 3}, + {3, 2, 1, 3}, + {4, 1, 3, 1, 2}, + }; + + const float scales[] = {0.1, 0.05, 0.05}; + const int zero_points[] = {-8, 4, 12}; + + int8_t input1_quantized[output_dims_count]; + int8_t input2_quantized[output_dims_count]; + int8_t golden_quantized[output_dims_count]; + int8_t output[output_dims_count]; + + for (int i = 0; i < num_shapes; ++i) { + tflite::testing::TestSubQuantized( + test_shapes[i], input1_values, input1_quantized, scales[0], + zero_points[0], input2_shape, input2_values, input2_quantized, + scales[1], zero_points[1], test_shapes[i], golden, golden_quantized, + scales[2], zero_points[2], kTfLiteActNone, output); + } +} + +TF_LITE_MICRO_TEST(QuantizedSubWithScalarBroadcastInt16) { + const int output_dims_count = 6; + + const float input1_values[] = {-2.0, 0.2, 0.7, 0.8, 1.1, 2.0}; + int input2_shape[] = {0}; + const float input2_values[] = {-0.1}; + const float golden[] = {-1.9, 0.3, 0.8, 0.9, 1.2, 2.1}; + + constexpr int num_shapes = 4; + constexpr int max_shape_size = 5; + int test_shapes[num_shapes][max_shape_size] = { + {1, 6}, + {2, 2, 3}, + {3, 2, 1, 3}, + {4, 1, 3, 1, 2}, + }; + + const float scales[] = {0.1, 0.05, 0.05}; + const int zero_points[] = {0, 0, 0}; + + int16_t input1_quantized[output_dims_count]; + int16_t input2_quantized[output_dims_count]; + int16_t golden_quantized[output_dims_count]; + int16_t output[output_dims_count]; + + for (int i = 0; i < num_shapes; ++i) { + tflite::testing::TestSubQuantized( + test_shapes[i], input1_values, input1_quantized, scales[0], + zero_points[0], input2_shape, input2_values, input2_quantized, + scales[1], zero_points[1], test_shapes[i], golden, golden_quantized, + scales[2], zero_points[2], kTfLiteActNone, output); + } +} + +TF_LITE_MICRO_TEST(QuantizedSubWithMixedBroadcastInt8) { + const float scales[] = {0.1, 0.05, 0.1}; + const int zero_points[] = {-10, -5, 7}; + int8_t input1_quantized[tflite::testing::broadcast_output_dims_count]; + int8_t input2_quantized[tflite::testing::broadcast_output_dims_count]; + int8_t golden_quantized[tflite::testing::broadcast_output_dims_count]; + int8_t output[tflite::testing::broadcast_output_dims_count]; + + for (int i = 0; i < tflite::testing::broadcast_num_shapes; ++i) { + tflite::testing::TestSubQuantized( + tflite::testing::broadcast_input1_shape, + tflite::testing::broadcast_input1_values, input1_quantized, scales[0], + zero_points[0], tflite::testing::broadcast_input2_shapes[i], + tflite::testing::broadcast_input2_values, input2_quantized, scales[1], + zero_points[1], tflite::testing::broadcast_output_shapes[i], + tflite::testing::broadcast_goldens[i], golden_quantized, scales[2], + zero_points[2], kTfLiteActNone, output); + } +} + +TF_LITE_MICRO_TEST(QuantizedSubWithMixedBroadcastInt16) { + const float scales[] = {0.1, 0.05, 0.1}; + const int zero_points[] = {0, 0, 0}; + int16_t input1_quantized[tflite::testing::broadcast_output_dims_count]; + int16_t input2_quantized[tflite::testing::broadcast_output_dims_count]; + int16_t golden_quantized[tflite::testing::broadcast_output_dims_count]; + int16_t output[tflite::testing::broadcast_output_dims_count]; + + for (int i = 0; i < tflite::testing::broadcast_num_shapes; ++i) { + tflite::testing::TestSubQuantized( + tflite::testing::broadcast_input1_shape, + tflite::testing::broadcast_input1_values, input1_quantized, scales[0], + zero_points[0], tflite::testing::broadcast_input2_shapes[i], + tflite::testing::broadcast_input2_values, input2_quantized, scales[1], + zero_points[1], tflite::testing::broadcast_output_shapes[i], + tflite::testing::broadcast_goldens[i], golden_quantized, scales[2], + zero_points[2], kTfLiteActNone, output); + } +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/svdf.cc b/tensorflow/lite/micro/kernels/svdf.cc new file mode 100644 index 0000000..0ffb4b0 --- /dev/null +++ b/tensorflow/lite/micro/kernels/svdf.cc @@ -0,0 +1,105 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/kernels/svdf.h" + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/activation_utils.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_utils.h" + +namespace tflite { +namespace { + +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(OpDataSvdf)); +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + auto* params = reinterpret_cast(node->builtin_data); + TFLITE_DCHECK(node->user_data != nullptr); + const OpDataSvdf& data = *(static_cast(node->user_data)); + + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kSvdfInputTensor); + const TfLiteEvalTensor* weights_feature = + tflite::micro::GetEvalInput(context, node, kSvdfWeightsFeatureTensor); + const TfLiteEvalTensor* weights_time = + tflite::micro::GetEvalInput(context, node, kSvdfWeightsTimeTensor); + // TODO(#1751): account for optional bias tensor + const TfLiteEvalTensor* bias = + (NumInputs(node) == 5) + ? tflite::micro::GetEvalInput(context, node, kSvdfBiasTensor) + : nullptr; + TfLiteEvalTensor* activation_state = tflite::micro::GetMutableEvalInput( + context, node, kSvdfInputActivationStateTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kSvdfOutputTensor); + + switch (weights_feature->type) { + case kTfLiteFloat32: { + EvalFloatSvdfReference( + context, node, input, weights_feature, weights_time, bias, params, + data.scratch_tensor_index, activation_state, output); + break; + } + + case kTfLiteInt8: { + switch (weights_time->type) { + case kTfLiteInt16: { + EvalInt16SvdfReference(context, node, input, weights_feature, + weights_time, bias, params, activation_state, + output, data); + break; + } + case kTfLiteInt8: { + EvalInt8SvdfReference(context, node, input, weights_feature, + weights_time, bias, params, activation_state, + output, data); + break; + } + default: + MicroPrintf("Type %s not currently supported.", + TfLiteTypeGetName(weights_time->type)); + return kTfLiteError; + } + break; + } + + default: + MicroPrintf("Type %s not currently supported.", + TfLiteTypeGetName(weights_feature->type)); + return kTfLiteError; + } + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_SVDF() { + return tflite::micro::RegisterOp(Init, PrepareSvdf, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/svdf.h b/tensorflow/lite/micro/kernels/svdf.h new file mode 100644 index 0000000..a05a9b4 --- /dev/null +++ b/tensorflow/lite/micro/kernels/svdf.h @@ -0,0 +1,100 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_SVDF_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_SVDF_H_ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/micro/micro_common.h" + +namespace tflite { + +struct OpDataSvdf { + int32_t effective_scale_1_a; + int32_t effective_scale_2_a; + // b versions of each scale are kept at int since the numbers are just the + // shift value - typically between [-32, 32]. + int effective_scale_1_b; + int effective_scale_2_b; + int scratch_tensor_index; + int scratch_output_tensor_index; + + // Cached tensor zero point values for quantized operations. + int input_zero_point; + int output_zero_point; + int activation_state_zero_point; +}; + +// Input tensors. +extern const int kSvdfInputTensor; +extern const int kSvdfWeightsFeatureTensor; +extern const int kSvdfWeightsTimeTensor; +extern const int kSvdfBiasTensor; +// This is a variable tensor, and will be modified by this op. +extern const int kSvdfInputActivationStateTensor; + +// Output tensor. +extern const int kSvdfOutputTensor; + +void EvalInt8SvdfReference(TfLiteContext* context, TfLiteNode* node, + const TfLiteEvalTensor* input_tensor, + const TfLiteEvalTensor* weights_feature_tensor, + const TfLiteEvalTensor* weights_time_tensor, + const TfLiteEvalTensor* bias_tensor, + const TfLiteSVDFParams* params, + TfLiteEvalTensor* activation_state_tensor, + TfLiteEvalTensor* output_tensor, + const OpDataSvdf& data); + +// TODO(#523): remove 16-bit code when no longer needed. +void EvalInt16SvdfReference(TfLiteContext* context, TfLiteNode* node, + const TfLiteEvalTensor* input_tensor, + const TfLiteEvalTensor* weights_feature_tensor, + const TfLiteEvalTensor* weights_time_tensor, + const TfLiteEvalTensor* bias_tensor, + const TfLiteSVDFParams* params, + TfLiteEvalTensor* activation_state_tensor, + TfLiteEvalTensor* output_tensor, + const OpDataSvdf& data); + +void EvalFloatSvdfReference( + TfLiteContext* context, TfLiteNode* node, const TfLiteEvalTensor* input, + const TfLiteEvalTensor* weights_feature, + const TfLiteEvalTensor* weights_time, const TfLiteEvalTensor* bias, + const TfLiteSVDFParams* params, int scratch_tensor_index, + TfLiteEvalTensor* activation_state, TfLiteEvalTensor* output); + +TfLiteStatus PrepareSvdf(TfLiteContext* context, TfLiteNode* node); + +// This is the most generic TFLMRegistration. The actual supported types +// may still be target dependent. The only requirement is that every +// implementation (reference or optimized) must define this function. +TFLMRegistration Register_SVDF(); + +#if defined(HEXAGON) || defined(CMSIS_NN) || defined(XTENSA) + +TFLMRegistration Register_SVDF_INT8(); + +#else +// Note that while this block gets used for both reference and optimized kernels +// that do not have any specialized implementations, the only goal here is to +// define fallback implementation that allow reference kernels to still be used +// from applications that call a more specific kernel variant. + +inline TFLMRegistration Register_SVDF_INT8() { return Register_SVDF(); } + +#endif +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_SVDF_H_ diff --git a/tensorflow/lite/micro/kernels/svdf_common.cc b/tensorflow/lite/micro/kernels/svdf_common.cc new file mode 100644 index 0000000..d7dd963 --- /dev/null +++ b/tensorflow/lite/micro/kernels/svdf_common.cc @@ -0,0 +1,517 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/activation_utils.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/svdf.h" +#include "tensorflow/lite/micro/micro_utils.h" + +namespace tflite { + +/** + * This version of SVDF is specific to TFLite Micro. It contains the following + * differences between the TFLite version: + * + * 1.) Scratch tensor allocation - scratch tensors must be known ahead of time + * for the Micro interpreter. + * 2.) Output dimensions - the TFLite version determines output size and runtime + * and resizes the output tensor. Micro runtime does not support tensor + * resizing. + */ + +const int kSvdfInputTensor = 0; +const int kSvdfWeightsFeatureTensor = 1; +const int kSvdfWeightsTimeTensor = 2; +const int kSvdfBiasTensor = 3; +const int kSvdfInputActivationStateTensor = + 4; // This is a variable tensor, and will be modified by this op. +const int kSvdfOutputTensor = 0; + +template +void EvalIntegerSvdfReference(TfLiteContext* context, TfLiteNode* node, + const TfLiteEvalTensor* input_tensor, + const TfLiteEvalTensor* weights_feature_tensor, + const TfLiteEvalTensor* weights_time_tensor, + const TfLiteEvalTensor* bias_tensor, + const TfLiteSVDFParams* params, + TfLiteEvalTensor* activation_state_tensor, + TfLiteEvalTensor* output_tensor, + const OpDataSvdf& data) { + const int n_rank = params->rank; + const int n_batch = input_tensor->dims->data[0]; + const int n_input = input_tensor->dims->data[1]; + const int n_filter = weights_feature_tensor->dims->data[0]; + const int n_unit = n_filter / n_rank; + const int n_memory = weights_time_tensor->dims->data[1]; + + TFLITE_DCHECK(context != nullptr); + TFLITE_DCHECK(context->GetScratchBuffer != nullptr); + + int32_t* scratch_tensor = static_cast( + context->GetScratchBuffer(context, data.scratch_tensor_index)); + int32_t* scratch_output_tensor = static_cast( + context->GetScratchBuffer(context, data.scratch_output_tensor_index)); + + // Shift states. + T* const state_ptr = tflite::micro::GetTensorData(activation_state_tensor); + + // Left shift the activation_state. + { + T* new_state_start = state_ptr; + const T* old_state_start = state_ptr + 1; + const T* old_state_end = state_ptr + n_batch * n_filter * n_memory; + while (old_state_start != old_state_end) { + *new_state_start++ = *old_state_start++; + } + } + + // Note: no need to clear the latest activation, matmul is not accumulative. + + // Feature matmul. + { + T* state = tflite::micro::GetTensorData(activation_state_tensor); + const int8_t* input = tflite::micro::GetTensorData(input_tensor); + const int8_t* weight_feature = + tflite::micro::GetTensorData(weights_feature_tensor); + const int32_t output_max = std::numeric_limits::max(); + const int32_t output_min = std::numeric_limits::min(); + T* result_in_batch = state + (n_memory - 1); + for (int b = 0; b < n_batch; b++) { + const int8_t* matrix_ptr = weight_feature; + for (int r = 0; r < n_filter; r++) { + int32_t dot_prod = 0; + const int8_t* vector_in_batch = input + b * n_input; + for (int c = 0; c < n_input; c++) { + dot_prod += + *matrix_ptr++ * (*vector_in_batch++ - data.input_zero_point); + } + dot_prod = MultiplyByQuantizedMultiplier( + dot_prod, data.effective_scale_1_a, data.effective_scale_1_b); + dot_prod = std::min(std::max(output_min, dot_prod), output_max); + // The int16 version of the op assumes a zero_point of 0. This + // code accounts for the potentially non-zero zero_point for the int8 + // version of the op. + *result_in_batch = data.activation_state_zero_point + dot_prod; + result_in_batch += n_memory; + } + } + } + + // Time. + { + for (int b = 0; b < n_batch; ++b) { + int32_t* scratch_ptr_batch = scratch_tensor + b * n_filter; + + // Perform batched vector dot product: + const T* vector1_ptr = + tflite::micro::GetTensorData(weights_time_tensor); + const T* vector2_ptr = + tflite::micro::GetTensorData(activation_state_tensor) + + b * n_memory * n_filter; + + for (int i = 0; i < n_filter; i++) { + *scratch_ptr_batch = 0; + for (int j = 0; j < n_memory; j++) { + *scratch_ptr_batch += + *vector1_ptr++ * + (*vector2_ptr++ - data.activation_state_zero_point); + } + scratch_ptr_batch++; + } + } + } + + // Reduce, add bias, rescale, activation. + { + // Add bias. + if (bias_tensor) { + // Vector batch assign: + const int32_t* bias_data = + tflite::micro::GetTensorData(bias_tensor); + for (int i = 0; i < n_batch; ++i) { + int32_t* output_ptr = scratch_output_tensor + i * n_unit; + const int32_t* bias_ptr = bias_data; + for (int j = 0; j < n_unit; ++j) { + *output_ptr++ = *bias_ptr++; + } + } + } else { + int32_t* output_ptr = scratch_output_tensor; + for (int i = 0; i < n_batch * n_unit; ++i) { + *output_ptr++ = 0; + } + } + + // Reduce. + for (int b = 0; b < n_batch; ++b) { + int32_t* output_temp_ptr = scratch_output_tensor + b * n_unit; + int32_t* scratch_ptr_batch = scratch_tensor + b * n_filter; + + // Reduction sum vector + for (int i = 0; i < n_unit; ++i) { + for (int j = 0; j < n_rank; ++j) { + output_temp_ptr[i] += *scratch_ptr_batch++; + } + } + } + + // Rescale. + const int32_t output_max = std::numeric_limits::max(); + const int32_t output_min = std::numeric_limits::min(); + for (int i = 0; i < n_batch * n_unit; ++i) { + int32_t x1 = scratch_output_tensor[i]; + int32_t x2 = MultiplyByQuantizedMultiplier(x1, data.effective_scale_2_a, + data.effective_scale_2_b); + int32_t x3 = x2 + data.output_zero_point; + int32_t x4 = std::min(std::max(output_min, x3), output_max); + tflite::micro::GetTensorData(output_tensor)[i] = + static_cast(x4); + } + } +} + +/** + * Generate two versions of the integer code. One with int16_t type for the + * time weights and the activation state, and another one with int8_t for the + * same. + */ + +void EvalInt16SvdfReference(TfLiteContext* context, TfLiteNode* node, + const TfLiteEvalTensor* input_tensor, + const TfLiteEvalTensor* weights_feature_tensor, + const TfLiteEvalTensor* weights_time_tensor, + const TfLiteEvalTensor* bias_tensor, + const TfLiteSVDFParams* params, + TfLiteEvalTensor* activation_state_tensor, + TfLiteEvalTensor* output_tensor, + const OpDataSvdf& data) { + EvalIntegerSvdfReference( + context, node, input_tensor, weights_feature_tensor, weights_time_tensor, + bias_tensor, params, activation_state_tensor, output_tensor, data); +} + +void EvalInt8SvdfReference(TfLiteContext* context, TfLiteNode* node, + const TfLiteEvalTensor* input_tensor, + const TfLiteEvalTensor* weights_feature_tensor, + const TfLiteEvalTensor* weights_time_tensor, + const TfLiteEvalTensor* bias_tensor, + const TfLiteSVDFParams* params, + TfLiteEvalTensor* activation_state_tensor, + TfLiteEvalTensor* output_tensor, + const OpDataSvdf& data) { + EvalIntegerSvdfReference( + context, node, input_tensor, weights_feature_tensor, weights_time_tensor, + bias_tensor, params, activation_state_tensor, output_tensor, data); +} + +static inline void ApplyTimeWeightsBiasAndActivation( + int batch_size, int memory_size, int num_filters, int num_units, int rank, + const float* const weights_time_ptr, const float* const bias_ptr, + TfLiteFusedActivation activation, float* const state_ptr, + float* const scratch_ptr, float* const output_ptr) { + // Compute matmul(activation_state, weights_time). + for (int b = 0; b < batch_size; ++b) { + // Perform batched vector dot product: + float* scratch_ptr_batch = scratch_ptr + b * num_filters; + const float* vector1_ptr = weights_time_ptr; + const float* vector2_ptr = state_ptr + b * memory_size * num_filters; + for (int i = 0; i < num_filters; ++i) { + *scratch_ptr_batch = 0.f; + for (int j = 0; j < memory_size; ++j) { + *scratch_ptr_batch += *vector1_ptr++ * *vector2_ptr++; + } + scratch_ptr_batch++; + } + } + + // Initialize output with bias if provided. + if (bias_ptr) { + // VectorBatchVectorAssign + for (int i = 0; i < batch_size; ++i) { + float* output_data = output_ptr + i * num_units; + const float* bias_data = bias_ptr; + for (int j = 0; j < num_units; ++j) { + *output_data++ = *bias_data++; + } + } + } else { + float* output_data = output_ptr; + for (int i = 0; i < batch_size * num_units; ++i) { + *output_data++ = 0.0f; + } + } + + // Reduction sum. + for (int b = 0; b < batch_size; ++b) { + float* output_ptr_batch = output_ptr + b * num_units; + float* scratch_ptr_batch = scratch_ptr + b * num_filters; + + // Reduction sum vector + for (int i = 0; i < num_units; ++i) { + for (int j = 0; j < rank; j++) { + output_ptr_batch[i] += *scratch_ptr_batch++; + } + } + } + + // Apply activation. + for (int b = 0; b < batch_size; ++b) { + float* output_ptr_batch = output_ptr + b * num_units; + for (int i = 0; i < num_units; ++i) { + *output_ptr_batch = + tflite::ops::micro::ActivationValFloat(activation, *output_ptr_batch); + ++output_ptr_batch; + } + } +} + +void EvalFloatSvdfReference( + TfLiteContext* context, TfLiteNode* node, const TfLiteEvalTensor* input, + const TfLiteEvalTensor* weights_feature, + const TfLiteEvalTensor* weights_time, const TfLiteEvalTensor* bias, + const TfLiteSVDFParams* params, int scratch_tensor_index, + TfLiteEvalTensor* activation_state, TfLiteEvalTensor* output) { + const int rank = params->rank; + const int batch_size = input->dims->data[0]; + const int input_size = input->dims->data[1]; + const int num_filters = weights_feature->dims->data[0]; + const int num_units = num_filters / rank; + const int memory_size = weights_time->dims->data[1]; + + const float* weights_feature_ptr = + tflite::micro::GetTensorData(weights_feature); + const float* weights_time_ptr = + tflite::micro::GetTensorData(weights_time); + // TODO(#1751): account for optional bias tensor + const float* bias_ptr = tflite::micro::GetTensorData(bias); + const float* input_ptr = tflite::micro::GetTensorData(input); + + float* state_ptr = tflite::micro::GetTensorData(activation_state); + + TFLITE_DCHECK(context != nullptr); + TFLITE_DCHECK(context->GetScratchBuffer != nullptr); + + float* scratch_ptr = static_cast( + context->GetScratchBuffer(context, scratch_tensor_index)); + + float* output_ptr = tflite::micro::GetTensorData(output); + + // Left shift the activation_state. + { + float* new_state_start = state_ptr; + const float* old_state_start = state_ptr + 1; + const float* old_state_end = + state_ptr + batch_size * num_filters * memory_size; + while (old_state_start != old_state_end) { + *new_state_start++ = *old_state_start++; + } + } + + // Note: no need to clear the latest activation, matmul is not accumulative. + + // Compute conv1d(inputs, weights_feature). + // The activation_state's rightmost column is used to save current cycle + // activation. This is achieved by starting at state_ptr[memory_size - 1] and + // having the stride equal to memory_size. + + // Perform batched matrix vector multiply operation: + { + const float* matrix = weights_feature_ptr; + const float* vector = input_ptr; + float* result = &state_ptr[memory_size - 1]; + float* result_in_batch = result; + for (int i = 0; i < batch_size; ++i) { + const float* matrix_ptr = matrix; + for (int j = 0; j < num_filters; ++j) { + float dot_prod = 0.0f; + const float* vector_in_batch = vector + i * input_size; + for (int k = 0; k < input_size; ++k) { + dot_prod += *matrix_ptr++ * *vector_in_batch++; + } + *result_in_batch = dot_prod; + result_in_batch += memory_size; + } + } + } + + ApplyTimeWeightsBiasAndActivation( + batch_size, memory_size, num_filters, num_units, rank, weights_time_ptr, + bias_ptr, params->activation, state_ptr, scratch_ptr, output_ptr); +} + +TfLiteStatus PrepareSvdf(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->builtin_data != nullptr); + + const auto* params = static_cast(node->builtin_data); + + MicroContext* micro_context = GetMicroContext(context); + + // Validate Tensor Inputs (dtype depends on quantization): + // [0] = Input, {2, batch_size, input_size} + // [1] = Weights Feature, {2, num_filters, input_size} + // [2] = Weights Time, {2, num_filters, memory_size} + // [3] = Bias (optional), {1, num_units} + // [4] = Activation State (variable), + // {2, batch_size, memory_size * num_filters} + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kSvdfInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* weights_feature = + micro_context->AllocateTempInputTensor(node, kSvdfWeightsFeatureTensor); + TF_LITE_ENSURE(context, weights_feature != nullptr); + TfLiteTensor* weights_time = + micro_context->AllocateTempInputTensor(node, kSvdfWeightsTimeTensor); + TF_LITE_ENSURE(context, weights_time != nullptr); + TfLiteTensor* bias = + micro_context->AllocateTempInputTensor(node, kSvdfBiasTensor); + TfLiteTensor* activation_state = micro_context->AllocateTempInputTensor( + node, kSvdfInputActivationStateTensor); + TF_LITE_ENSURE(context, activation_state != nullptr); + + // Define input constants based on input tensor definition above: + const int rank = params->rank; + const int input_size = input->dims->data[1]; + const int batch_size = input->dims->data[0]; + const int num_filters = weights_feature->dims->data[0]; + TF_LITE_ENSURE_EQ(context, num_filters % rank, 0); + const int num_units = num_filters / rank; + const int memory_size = weights_time->dims->data[1]; + + // Validate Input Tensor: + TF_LITE_ENSURE(context, + input->type == kTfLiteFloat32 || input->type == kTfLiteInt8); + TF_LITE_ENSURE_EQ(context, NumDimensions(input), 2); + + // Validate Tensor Output: + // [0] = float/int8_t, {2, batch_size, num_units} + TF_LITE_ENSURE_EQ(context, node->outputs->size, 1); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kSvdfOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + TF_LITE_ENSURE_EQ(context, NumDimensions(output), 2); + TF_LITE_ENSURE_EQ(context, output->dims->data[0], batch_size); + TF_LITE_ENSURE_EQ(context, output->dims->data[1], num_units); + + // Validate Weights Feature Input Tensor: + TF_LITE_ENSURE_EQ(context, NumDimensions(weights_feature), 2); + TF_LITE_ENSURE_EQ(context, weights_feature->dims->data[1], input_size); + + // Validate Weights Time Input Tensor: + TF_LITE_ENSURE_EQ(context, NumDimensions(weights_time), 2); + TF_LITE_ENSURE_EQ(context, weights_time->dims->data[0], num_filters); + TF_LITE_ENSURE_EQ(context, weights_time->dims->data[1], memory_size); + + // Validate Optional Bias Input Tensor: + if (bias != nullptr) { + TF_LITE_ENSURE_EQ(context, bias->dims->data[0], num_units); + } + + // Validate Activation State Input Tensor: + TF_LITE_ENSURE_EQ(context, NumDimensions(activation_state), 2); + TF_LITE_ENSURE_EQ(context, activation_state->dims->data[0], batch_size); + TF_LITE_ENSURE_EQ(context, activation_state->dims->data[1], + memory_size * num_filters); + // Since is_variable is not part of TFLiteEvalTensor, check is_variable here. + TF_LITE_ENSURE_EQ(context, activation_state->is_variable, true); + + TF_LITE_ENSURE_EQ(context, node->inputs->size, 5); + + TFLITE_DCHECK(node->user_data != nullptr); + OpDataSvdf* data = static_cast(node->user_data); + + if (input->type == kTfLiteInt8) { + TF_LITE_ENSURE_EQ(context, weights_feature->type, kTfLiteInt8); + TF_LITE_ENSURE(context, (weights_time->type == kTfLiteInt16) || + (weights_time->type == kTfLiteInt8)); + TF_LITE_ENSURE(context, (activation_state->type == kTfLiteInt16) || + (activation_state->type == kTfLiteInt8)); + if (bias != nullptr) { + TF_LITE_ENSURE_EQ(context, bias->type, kTfLiteInt32); + } + + TF_LITE_ENSURE_TYPES_EQ(context, output->type, kTfLiteInt8); + + const double effective_scale_1 = static_cast( + input->params.scale * weights_feature->params.scale / + activation_state->params.scale); + const double effective_scale_2 = + static_cast(activation_state->params.scale * + weights_time->params.scale / output->params.scale); + + // TODO(b/162018098): Use TF_LITE_ENSURE_NEAR when it is ready. + // TODO(#1751): account for optional bias tensor + TF_LITE_ENSURE( + context, + std::abs(static_cast(bias->params.scale) - + static_cast(activation_state->params.scale * + weights_time->params.scale)) < 1e-5); + + QuantizeMultiplier(effective_scale_1, &(data->effective_scale_1_a), + &(data->effective_scale_1_b)); + QuantizeMultiplier(effective_scale_2, &(data->effective_scale_2_a), + &(data->effective_scale_2_b)); + + data->input_zero_point = input->params.zero_point; + data->output_zero_point = output->params.zero_point; + data->activation_state_zero_point = activation_state->params.zero_point; + + TFLITE_DCHECK(context->RequestScratchBufferInArena != nullptr); + + const TfLiteStatus scratch_status = context->RequestScratchBufferInArena( + context, batch_size * num_filters * sizeof(int32_t), + &(data->scratch_tensor_index)); + TF_LITE_ENSURE_OK(context, scratch_status); + + const TfLiteStatus scratch_output_status = + context->RequestScratchBufferInArena( + context, batch_size * num_units * sizeof(int32_t), + &(data->scratch_output_tensor_index)); + TF_LITE_ENSURE_OK(context, scratch_output_status); + } else { + TF_LITE_ENSURE_EQ(context, weights_feature->type, kTfLiteFloat32); + TF_LITE_ENSURE_EQ(context, weights_time->type, kTfLiteFloat32); + TF_LITE_ENSURE_EQ(context, activation_state->type, kTfLiteFloat32); + if (bias != nullptr) { + TF_LITE_ENSURE_EQ(context, bias->type, kTfLiteFloat32); + } + TF_LITE_ENSURE_TYPES_EQ(context, output->type, kTfLiteFloat32); + + TFLITE_DCHECK(context->RequestScratchBufferInArena != nullptr); + const TfLiteStatus scratch_status = context->RequestScratchBufferInArena( + context, batch_size * num_filters * sizeof(float), + &(data->scratch_tensor_index)); + TF_LITE_ENSURE_OK(context, scratch_status); + } + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(weights_feature); + micro_context->DeallocateTempTfLiteTensor(weights_time); + micro_context->DeallocateTempTfLiteTensor(activation_state); + micro_context->DeallocateTempTfLiteTensor(output); + // TODO(#1751): account for optional bias tensor + micro_context->DeallocateTempTfLiteTensor(bias); + return kTfLiteOk; +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/svdf_test.cc b/tensorflow/lite/micro/kernels/svdf_test.cc new file mode 100644 index 0000000..ae8fadc --- /dev/null +++ b/tensorflow/lite/micro/kernels/svdf_test.cc @@ -0,0 +1,965 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +// naming as follows: _xx + +// 10 inputs each with shape {2, 2}. +const float input_data_2x2x10[] = { + 0.12609188, -0.46347019, 0.35867718, 0.36897406, + + 0.14278367, -1.64410412, -0.57290924, 0.12729003, + + 0.49837467, 0.19278903, 0.17660543, 0.52949083, + + -0.11186574, 0.13164264, -0.72674477, -0.5683046, + + -0.68892461, 0.37783599, -0.63690937, 0.44483393, + + -0.81299269, -0.86831826, -0.95760226, 1.82078898, + + -1.45006323, -0.82251364, -1.65087092, -1.89238167, + + 0.03966608, -0.24936394, 2.06740379, -1.51439476, + + 0.11771342, -0.23761693, 0.31088525, -1.55601168, + + -0.89477462, 1.67204106, -0.6230064, 0.29819036, +}; + +// Feature filter of shape {8, 2}. +const float feature_weights_data_2x2x10[] = { + -0.31930989, 0.0079667, 0.39296314, 0.37613347, 0.12416199, 0.15785322, + 0.27901134, 0.3905206, 0.21931258, -0.36137494, -0.10640851, 0.31053296, + -0.36118156, -0.0976817, -0.36916667, 0.22197971}; + +// Time filter of shape {8, 10}. +const float time_weights_data_2x2x10[] = { + -0.31930989, 0.37613347, 0.27901134, -0.36137494, -0.36118156, + 0.22197971, 0.27557442, -0.06634006, 0.0079667, 0.12416199, + + 0.3905206, -0.10640851, -0.0976817, 0.15294972, 0.39635518, + -0.02702999, 0.39296314, 0.15785322, 0.21931258, 0.31053296, + + -0.36916667, 0.38031587, -0.21580373, 0.27072677, 0.23622236, + 0.34936687, 0.18174365, 0.35907319, -0.17493086, 0.324846, + + -0.10781813, 0.27201805, 0.14324132, -0.23681851, -0.27115166, + -0.01580888, -0.14943552, 0.15465137, 0.09784451, -0.0337657, + + -0.14884081, 0.19931212, -0.36002168, 0.34663299, -0.11405486, + 0.12672701, 0.39463779, -0.07886535, -0.06384811, 0.08249187, + + -0.26816407, -0.19905911, 0.29211238, 0.31264046, -0.28664589, + 0.05698794, 0.11613581, 0.14078894, 0.02187902, -0.21781836, + + -0.15567942, 0.08693647, -0.38256618, 0.36580828, -0.22922277, + -0.0226903, 0.12878349, -0.28122205, -0.10850525, -0.11955214, + + 0.27179423, -0.04710215, 0.31069002, 0.22672787, 0.09580326, + 0.08682203, 0.1258215, 0.1851041, 0.29228821, 0.12366763}; + +// Activation state with shape {2, 80}. These initial values must be copied into +// a mutable activation state tensor. + +const float initial_activation_state_data_2x2x10[] = { + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}; + +// Bias with shape {8} +const float bias_data_2x2x10[] = {0, 0, 0, 0, 0, 0, 0, 0}; + +// 10 outputs each of shape {2, 4} +const float golden_output_2x2x10[] = { + -0.044205, -0.013757, 0.050369, -0.018447, + 0.073010, 0.025142, -0.021154, 0.013551, + + -0.209613, -0.062421, 0.150209, -0.108334, + 0.028256, -0.006950, -0.030885, 0.009603, + + -0.076800, -0.037075, -0.087198, -0.155183, + 0.091069, 0.098446, -0.016083, 0.106475, + + -0.082123, -0.162238, -0.084434, -0.141074, + -0.029340, -0.090685, 0.053302, -0.030604, + + -0.201440, 0.088424, 0.139877, 0.012416, + -0.113212, 0.103893, -0.100842, 0.122780, + + -0.166632, -0.116705, 0.175298, -0.047163, + 0.313077, -0.166485, -0.285860, 0.129069, + + -0.625911, 0.046134, 0.138081, -0.129581, + -0.521455, -0.061579, 0.230289, 0.114963, + + -0.216693, -0.161643, -0.179177, -0.052599, + -0.213239, 0.029502, 0.260858, 0.275045, + + -0.213689, -0.323608, -0.285635, -0.317687, + -0.324092, -0.317972, -0.208450, -0.462504, + + -0.255126, -0.218576, -0.041528, 0.179421, + -0.440583, 0.072127, -0.284136, 0.241570}; + +// Simulated real-world inputs, weights and expected outputs. + +// Input of shape {1x16} +const float input_data_16x1x1[] = { + -0.488494, 2.023762, -2.233117, -0.488494, 3.559030, 9.490748, + -3.210106, -1.953977, -0.279140, 0.907204, 1.674838, 0.000000, + -0.279140, -0.628064, -0.069785, -0.628064, +}; + +// Feature filter of shape {64, 16}. +const float feature_weights_data_16x1x1[] = { + 0.173588, 0.173588, -0.024798, 0.193426, -0.099193, 0.044637, 0.183507, + 0.183507, 0.044637, 0.198386, -0.069435, 0.084314, 0.312458, 0.024798, + 0.173588, -0.049596, -0.352135, -0.550521, -0.009919, -0.099193, -0.074395, + -0.128951, 0.193426, 0.357095, -0.317418, -0.119032, -0.218225, -0.004960, + -0.386853, -0.133911, 0.252942, -0.019839, -0.024798, -0.054556, -0.069435, + -0.128951, 0.029758, -0.099193, -0.312458, -0.029758, 0.064475, 0.183507, + 0.114072, -0.178547, -0.247982, -0.119032, 0.243023, -0.119032, -0.034718, + -0.178547, 0.019839, 0.128951, -0.223184, -0.009919, -0.213265, 0.168628, + -0.143830, -0.322377, -0.218225, -0.193426, -0.252942, -0.049596, 0.064475, + -0.267821, -0.580279, -0.099193, 0.213265, 0.119032, -0.119032, -0.178547, + 0.610037, 0.109112, 0.049596, -0.014879, -0.049596, -0.193426, 0.039677, + -0.148789, -0.114072, -0.158709, -0.158709, 0.094233, 0.099193, -0.114072, + 0.104153, -0.123991, 0.198386, -0.173588, 0.089274, -0.247982, -0.054556, + 0.123991, 0.183507, 0.114072, 0.188467, 0.302539, 0.044637, 0.039677, + -0.099193, 0.168628, -0.024798, -0.054556, -0.109112, 0.014879, -0.009919, + 0.069435, -0.396772, -0.287660, -0.079354, -0.104153, 0.054556, 0.089274, + -0.099193, 0.114072, 0.034718, 0.119032, 0.282700, -0.119032, -0.505884, + -0.233104, -0.114072, -0.257902, -0.233104, -0.178547, 0.153749, 0.128951, + 0.143830, -0.188467, -0.183507, 0.104153, -0.024798, 0.193426, -0.287660, + 0.168628, -0.009919, 0.119032, -0.024798, -0.099193, -0.203346, 0.099193, + 0.084314, -0.168628, 0.123991, -0.148789, 0.114072, -0.029758, 0.228144, + -0.238063, 0.089274, -0.064475, 0.307498, -0.188467, -0.004960, -0.252942, + -0.173588, -0.158709, -0.044637, -0.009919, 0.312458, -0.262861, 0.059516, + 0.158709, 0.069435, -0.282700, 0.074395, -0.322377, -0.183507, -0.123991, + -0.233104, 0.009919, 0.252942, -0.243023, 0.555481, -0.099193, -0.119032, + -0.441409, 0.148789, 0.084314, -0.168628, -0.183507, 0.188467, 0.024798, + -0.302539, 0.223184, 0.143830, -0.193426, -0.054556, -0.218225, -0.297579, + 0.104153, 0.272781, -0.034718, 0.114072, -0.059516, 0.044637, 0.342216, + 0.421570, 0.138870, -0.024798, -0.039677, -0.163668, -0.034718, 0.396772, + -0.128951, -0.044637, -0.173588, 0.302539, 0.079354, 0.049596, 0.133911, + -0.029758, -0.312458, -0.029758, 0.079354, 0.128951, 0.252942, 0.213265, + 0.014879, 0.287660, 0.178547, 0.297579, 0.352135, 0.401732, 0.024798, + -0.277740, -0.411651, -0.069435, 0.342216, -0.158709, -0.104153, -0.009919, + 0.223184, 0.228144, -0.019839, 0.059516, -0.104153, -0.510844, 0.029758, + -0.406691, 0.089274, 0.421570, 0.163668, -0.143830, -0.019839, -0.039677, + 0.104153, -0.044637, -0.128951, 0.203346, 0.079354, -0.069435, 0.094233, + -0.138870, 0.466207, -0.163668, 0.049596, 0.029758, 0.267821, 0.029758, + -0.049596, 0.009919, 0.004960, -0.099193, 0.094233, -0.262861, 0.089274, + -0.302539, 0.332297, -0.307498, -0.014879, 0.168628, -0.094233, -0.272781, + 0.034718, -0.133911, -0.228144, 0.094233, 0.257902, -0.228144, 0.153749, + -0.054556, -0.252942, 0.054556, 0.218225, -0.054556, 0.302539, 0.282700, + 0.054556, -0.044637, -0.133911, 0.233104, -0.049596, 0.411651, 0.044637, + -0.297579, -0.029758, -0.114072, 0.114072, -0.580279, 0.079354, -0.024798, + -0.347175, -0.128951, -0.099193, 0.238063, -0.104153, -0.009919, 0.158709, + -0.034718, 0.123991, -0.163668, 0.059516, 0.342216, 0.009919, 0.064475, + -0.307498, -0.520763, -0.238063, 0.163668, 0.362054, 0.034718, -0.178547, + -0.104153, -0.257902, 0.322377, 0.054556, 0.148789, -0.178547, 0.084314, + 0.004960, 0.257902, 0.029758, 0.079354, -0.223184, -0.193426, 0.282700, + 0.000000, -0.019839, -0.114072, 0.491005, -0.193426, -0.029758, -0.243023, + 0.009919, 0.089274, -0.277740, -0.089274, 0.104153, 0.337256, 0.138870, + -0.307498, -0.054556, 0.352135, 0.133911, -0.044637, 0.133911, -0.089274, + -0.357095, -0.272781, 0.069435, 0.059516, -0.109112, 0.148789, -0.044637, + -0.019839, -0.153749, 0.123991, -0.223184, 0.322377, 0.074395, -0.312458, + 0.024798, -0.223184, 0.109112, -0.138870, 0.218225, -0.074395, -0.406691, + 0.009919, -0.198386, -0.009919, 0.416611, 0.178547, 0.148789, 0.133911, + -0.004960, 0.069435, -0.054556, -0.044637, 0.297579, 0.059516, -0.456288, + -0.148789, -0.004960, 0.054556, 0.094233, -0.104153, 0.198386, -0.302539, + 0.133911, 0.411651, 0.054556, 0.525723, -0.089274, 0.079354, 0.238063, + 0.079354, -0.039677, 0.039677, 0.029758, 0.332297, -0.014879, -0.367014, + -0.143830, -0.123991, -0.064475, 0.014879, 0.173588, -0.168628, 0.386853, + 0.009919, 0.173588, 0.163668, 0.123991, 0.163668, 0.198386, 0.203346, + -0.401732, -0.009919, 0.272781, -0.173588, 0.044637, 0.238063, 0.133911, + 0.049596, 0.208305, -0.024798, 0.049596, -0.049596, 0.034718, -0.446368, + 0.466207, -0.089274, -0.099193, -0.128951, -0.228144, 0.014879, -0.252942, + 0.074395, -0.223184, -0.168628, -0.292619, 0.178547, 0.153749, -0.014879, + 0.054556, 0.000000, 0.193426, 0.158709, 0.178547, -0.327337, -0.138870, + -0.114072, 0.168628, 0.297579, -0.109112, -0.029758, -0.029758, -0.416611, + 0.059516, 0.000000, -0.168628, -0.322377, 0.238063, -0.128951, -0.029758, + 0.500925, 0.292619, 0.123991, -0.099193, 0.074395, 0.317418, -0.148789, + 0.064475, -0.104153, -0.044637, -0.094233, 0.188467, -0.044637, 0.213265, + -0.233104, -0.049596, 0.004960, -0.198386, 0.287660, -0.148789, -0.257902, + 0.004960, -0.218225, -0.044637, -0.386853, -0.243023, -0.163668, 0.094233, + 0.029758, -0.019839, -0.009919, -0.143830, -0.158709, 0.158709, -0.243023, + -0.039677, -0.297579, 0.069435, 0.049596, 0.302539, 0.059516, 0.074395, + -0.019839, 0.352135, -0.019839, -0.138870, -0.178547, -0.243023, 0.233104, + 0.252942, -0.228144, -0.049596, 0.173588, 0.173588, -0.074395, -0.034718, + -0.292619, 0.362054, 0.183507, 0.243023, -0.203346, -0.044637, 0.054556, + 0.059516, -0.158709, -0.158709, 0.000000, 0.327337, 0.119032, 0.034718, + -0.044637, -0.089274, 0.089274, -0.233104, 0.000000, -0.317418, 0.371974, + 0.213265, 0.307498, -0.178547, -0.367014, 0.039677, -0.059516, 0.168628, + -0.014879, 0.143830, 0.123991, -0.084314, -0.332297, -0.416611, 0.183507, + 0.109112, -0.039677, 0.014879, 0.292619, -0.213265, -0.054556, 0.004960, + 0.123991, 0.119032, 0.000000, -0.332297, -0.312458, -0.198386, -0.213265, + 0.119032, 0.322377, 0.168628, 0.104153, -0.262861, 0.327337, -0.049596, + -0.228144, -0.074395, 0.168628, 0.123991, 0.396772, 0.044637, 0.322377, + 0.193426, 0.267821, -0.178547, 0.297579, 0.148789, -0.218225, -0.138870, + 0.044637, 0.049596, 0.133911, 0.064475, 0.069435, 0.064475, -0.158709, + -0.044637, -0.173588, 0.267821, 0.327337, 0.079354, -0.228144, 0.029758, + 0.014879, 0.198386, -0.109112, -0.133911, 0.431490, 0.099193, 0.421570, + 0.233104, -0.054556, 0.054556, -0.317418, -0.133911, -0.123991, -0.287660, + 0.342216, -0.049596, -0.153749, 0.228144, -0.213265, 0.262861, 0.406691, + -0.084314, -0.004960, 0.193426, 0.188467, -0.099193, -0.223184, 0.163668, + -0.257902, -0.153749, 0.441409, 0.099193, 0.128951, -0.089274, -0.208305, + -0.009919, -0.004960, -0.109112, 0.024798, -0.119032, 0.019839, 0.391812, + -0.024798, 0.198386, 0.327337, -0.505884, -0.099193, 0.510844, -0.148789, + 0.094233, -0.153749, -0.039677, 0.352135, 0.272781, -0.228144, -0.287660, + -0.272781, 0.148789, 0.277740, 0.074395, 0.109112, -0.064475, 0.044637, + 0.074395, -0.292619, 0.153749, -0.064475, -0.114072, 0.198386, -0.039677, + -0.128951, -0.004960, 0.257902, -0.228144, -0.094233, 0.064475, 0.014879, + 0.188467, -0.416611, 0.099193, 0.362054, -0.208305, 0.198386, -0.079354, + 0.009919, 0.119032, 0.332297, 0.243023, -0.168628, 0.158709, 0.039677, + 0.143830, 0.277740, -0.168628, 0.009919, 0.099193, -0.004960, -0.257902, + -0.297579, 0.208305, -0.104153, 0.119032, 0.247982, 0.381893, -0.223184, + -0.367014, -0.327337, -0.168628, -0.094233, 0.208305, -0.019839, 0.183507, + 0.084314, 0.133911, 0.109112, -0.148789, -0.183507, -0.411651, -0.024798, + -0.114072, -0.029758, -0.009919, 0.173588, -0.059516, -0.049596, 0.039677, + 0.317418, 0.138870, -0.247982, -0.084314, 0.158709, 0.054556, -0.084314, + -0.049596, 0.074395, 0.019839, -0.282700, -0.119032, -0.262861, 0.163668, + -0.069435, -0.064475, -0.059516, 0.094233, 0.123991, -0.079354, -0.272781, + -0.267821, 0.233104, 0.114072, -0.218225, 0.540602, 0.089274, 0.262861, + 0.079354, 0.267821, -0.119032, -0.109112, -0.128951, 0.128951, -0.044637, + -0.272781, 0.277740, 0.297579, -0.054556, -0.084314, -0.049596, 0.123991, + 0.059516, 0.238063, -0.168628, -0.009919, 0.163668, -0.307498, 0.109112, + -0.064475, 0.218225, -0.168628, -0.004960, -0.168628, 0.119032, 0.094233, + -0.183507, -0.089274, -0.292619, -0.094233, 0.064475, -0.183507, -0.168628, + 0.089274, 0.074395, -0.367014, -0.024798, -0.069435, 0.119032, -0.302539, + -0.376933, -0.123991, -0.009919, -0.069435, -0.208305, -0.119032, 0.014879, + -0.183507, -0.238063, 0.163668, -0.332297, -0.148789, -0.391812, -0.024798, + -0.133911, -0.059516, -0.123991, 0.123991, -0.292619, -0.044637, 0.059516, + -0.069435, 0.049596, -0.069435, 0.034718, 0.158709, -0.347175, -0.044637, + 0.352135, -0.347175, -0.282700, -0.054556, 0.307498, 0.029758, 0.357095, + -0.148789, 0.208305, -0.317418, 0.009919, 0.004960, -0.243023, 0.049596, + -0.099193, 0.213265, -0.342216, 0.158709, 0.123991, -0.332297, 0.386853, + -0.262861, -0.208305, 0.123991, -0.044637, 0.148789, 0.084314, -0.297579, + -0.307498, -0.163668, 0.337256, -0.014879, 0.074395, 0.178547, -0.004960, + -0.257902, -0.019839, -0.228144, -0.034718, -0.277740, -0.158709, -0.119032, + -0.153749, 0.629876, 0.277740, 0.178547, -0.267821, -0.004960, 0.247982, + 0.084314, -0.094233, 0.000000, -0.039677, 0.332297, 0.178547, 0.009919, + -0.213265, -0.208305, -0.044637, 0.019839, 0.218225, -0.297579, 0.014879, + -0.247982, -0.004960, -0.128951, 0.421570, -0.059516, 0.362054, -0.203346, + -0.143830, -0.099193, -0.024798, 0.094233, -0.123991, 0.163668, 0.109112, + -0.104153, -0.233104, 0.009919, -0.218225, 0.376933, 0.104153, -0.059516, + 0.049596, -0.054556, 0.019839, -0.044637, -0.019839, 0.371974, -0.019839, + 0.104153, 0.168628, -0.024798, -0.272781, -0.158709, 0.223184, 0.044637, + 0.039677, -0.168628, -0.287660, -0.109112, 0.094233, -0.089274, -0.148789, + 0.178547, -0.039677, -0.089274, -0.049596, -0.024798, 0.064475, -0.158709, + 0.089274, 0.029758, -0.247982, 0.362054, 0.024798, -0.004960, -0.099193, + 0.173588, -0.059516, 0.188467, -0.629876, 0.094233, 0.371974, 0.069435, + 0.252942, -0.357095, -0.272781, -0.367014, 0.014879, -0.049596, -0.262861, + 0.009919, -0.094233, -0.094233, 0.059516, 0.223184, 0.133911, 0.411651, + -0.044637, -0.044637, 0.109112, 0.228144, 0.386853, -0.233104, 0.069435, + 0.228144, -0.302539, 0.029758, 0.089274, 0.044637, -0.238063, -0.138870, + -0.158709, -0.019839, 0.049596, 0.039677, 0.000000, -0.069435, 0.109112, + -0.213265, -0.188467, -0.262861, -0.267821, -0.094233, 0.133911, 0.391812, + 0.123991, -0.317418, 0.233104, -0.029758, -0.099193, -0.193426, 0.074395, + -0.009919, 0.252942, 0.322377, -0.530683, 0.208305, 0.252942, 0.203346, + -0.069435, -0.262861}; + +// Time filter of shape {64, 8}. +const float time_weights_data_16x1x1[] = { + -0.052026, 0.043107, 0.053512, 0.013378, 0.011892, -0.182834, -0.108511, + 0.153105, 0.050539, -0.173915, 0.145672, 0.208103, -0.221481, 0.108511, + -0.496475, 0.181347, -0.016351, -0.132294, -0.234859, -0.243778, 0.028243, + -0.228914, -0.130808, -0.167969, -0.041621, -0.306209, -0.193239, -0.028243, + -0.057972, -0.057972, -0.497962, 0.054999, 0.181347, 0.047566, -0.099592, + -0.111484, -0.130808, -0.071350, 0.380532, 0.010405, 0.041621, 0.052026, + 0.022297, 0.081755, 0.098106, 0.099592, -0.584176, -0.023783, 0.062431, + -0.090674, -0.279453, -0.486070, -0.273507, 0.004459, -0.062431, 0.095133, + 0.056485, 0.022297, -0.105538, -0.184320, 0.358235, 0.254183, 0.049053, + 0.084728, 0.218508, 0.078782, -0.136754, -0.017837, -0.124862, -0.118916, + -0.001486, 0.043107, 0.254183, 0.087701, 0.261616, 0.309182, -0.404315, + -0.040134, -0.046080, -0.052026, -0.034188, -0.475665, -0.025270, -0.049053, + -0.046080, -0.062431, 0.020810, 0.040134, -0.135267, -0.169456, -0.050539, + -0.576743, 0.034188, 0.075809, 0.101079, 0.136754, 0.083241, 0.077296, + -0.050539, 0.761064, -0.335938, -0.080268, 0.025270, 0.257156, 0.227427, + 0.252697, 0.065404, 0.115943, 0.222968, -0.026756, -0.054999, 0.107025, + -0.093646, 0.041621, -0.092160, -0.474178, -0.016351, 0.004459, 0.049053, + 0.019324, 0.019324, 0.074323, 0.038648, -0.613905, 0.182834, 0.075809, + 0.028243, 0.019324, 0.010405, -0.011892, 0.001486, -0.492016, -0.224454, + -0.474178, -0.147159, 0.002973, 0.102565, 0.136754, -0.267561, -0.001486, + -0.095133, -0.040134, 0.066890, 0.074323, 0.104052, 0.532150, 0.090674, + 0.072836, -0.053512, -0.004459, 0.020810, 0.046080, 0.062431, 0.477151, + 0.133781, -0.029729, -0.026756, 0.031215, 0.156077, 0.096619, 0.251210, + 0.352289, 0.657012, 0.047566, -0.014865, -0.072836, -0.016351, 0.008919, + -0.053512, 0.016351, 0.300263, 0.047566, 0.020810, 0.169456, 0.001486, + 0.007432, 0.111484, 0.044594, -0.188779, -0.096619, 0.074323, -0.040134, + 0.160537, 0.138240, 0.184320, 0.377559, -0.092160, -0.049053, 0.056485, + -0.032702, 0.001486, -0.083241, -0.472692, -0.114457, -0.117430, -0.075809, + 0.026756, 0.163510, 0.172428, 0.127835, -0.199185, -0.218508, -0.057972, + -0.132294, -0.162023, -0.019324, -0.245265, -0.395396, -0.254183, 0.084728, + 0.248238, 0.191752, 0.221481, 0.173915, 0.173915, -0.208103, -0.077296, + 0.384991, -0.313641, -0.313641, -0.147159, -0.090674, 0.035675, 0.059458, + -0.010405, 0.019324, 0.087701, 0.016351, 0.037161, 0.469719, -0.074323, + 0.092160, 0.026756, 0.090674, 0.098106, 0.004459, -0.034188, 0.492016, + -0.367154, -0.093646, -0.063917, 0.041621, 0.017837, 0.026756, -0.062431, + -0.350803, 0.425125, 0.002973, 0.083241, 0.075809, 0.016351, 0.047566, + -0.185807, -0.107025, -0.098106, -0.144186, 0.255670, 0.020810, 0.105538, + 0.029729, 0.129321, 0.156077, 0.141213, 0.334452, 0.147159, -0.066890, + 0.035675, 0.115943, 0.240805, 0.328506, 0.162023, -0.237832, 0.218508, + 0.233373, 0.214049, 0.099592, 0.026756, -0.322560, -0.236346, -0.166483, + 0.225941, 0.109997, -0.147159, 0.147159, -0.266075, 0.111484, 0.078782, + -0.120403, 0.022297, -0.075809, -0.148645, -0.251210, -0.176888, -0.044594, + -0.023783, 0.016351, 0.026756, -0.013378, -0.069863, -0.112970, 0.013378, + 0.086214, 0.014865, 0.352289, -0.240805, -0.135267, -0.114457, -0.472692, + 0.334452, 0.095133, 0.047566, 0.130808, -0.068377, -0.007432, -0.130808, + -0.121889, -0.053512, -0.245265, -0.371613, -0.083241, 0.000000, -0.028243, + 0.029729, -0.093646, -0.004459, -0.038648, -0.108511, -0.475665, -0.169456, + -0.047566, -0.010405, -0.114457, -0.353776, -0.034188, -0.044594, 0.041621, + -0.047566, -0.107025, 0.004459, 0.053512, 0.047566, -0.358235, -0.193239, + 0.040134, -0.096619, -0.054999, 0.099592, 0.032702, 0.205130, -0.170942, + -0.237832, -0.405801, -0.126348, -0.072836, -0.203644, -0.169456, -0.093646, + -0.074323, 0.078782, 0.607959, -0.437017, -0.164996, -0.166483, 0.043107, + -0.016351, 0.258643, 0.065404, -0.057972, 0.017837, 0.080268, 0.050539, + -0.013378, -0.215536, -0.524718, 0.260129, 0.040134, -0.002973, -0.046080, + 0.020810, 0.025270, 0.145672, 0.515799, 0.233373, 0.011892, 0.139727, + 0.126348, 0.065404, -0.007432, -0.008919, 0.035675, 0.083241, 0.040134, + -0.005946, 0.503907, -0.490529, -0.181347, -0.092160, -0.038648, 0.019324, + 0.133781, -0.011892, 0.041621, 0.062431, -0.062431, -0.040134, -0.092160, + -0.111484, -0.133781, -0.130808, -0.484583, -0.248238, 0.037161, -0.092160, + -0.056485, -0.041621, 0.112970, 0.248238, 0.438503, 0.258643, -0.013378, + 0.004459, 0.043107, 0.040134, 0.017837, 0.101079, 0.264589, 0.212563, + 0.014865, 0.285399, 0.153105, 0.170942, 0.358235, 0.334452, 0.086214, + 0.132294, 0.098106, -0.001486, 0.107025, 0.200671, -0.026756, 0.344857, + 0.227427, -0.041621, 0.098106, 0.063917, -0.093646, 0.130808, 0.285399, + -0.319587, 0.035675, -0.017837, -0.319587, 0.016351, -0.098106, -0.017837, + 0.083241, 0.074323, -0.054999, 0.276480, 0.316614, -0.099592, -0.059458, + 0.156077, -0.043107, 0.035675, 0.056485, -0.022297, 0.017837, -0.001486, + 0.340398, 0.492016, 0.004459, 0.057972, -0.150132, -0.206617, -0.257156, + -0.248238, -0.080268, -0.164996, 0.352289, -0.054999, -0.056485, 0.010405, + -0.049053, -0.041621, -0.099592, 0.013378, -0.089187, 0.057972, -0.413234, + 0.217022, 0.013378, -0.080268, -0.035675, 0.035675, 0.007432, 0.002973, + -0.469719, 0.141213, 0.136754, 0.153105, 0.130808, -0.104052, -0.508367, + -0.291345, -0.072836, -0.019324, -0.252697, -0.214049, -0.214049, 0.130808, + 0.484583}; + +// Bias of shape {64} +const float bias_data_16x1x1[] = { + -0.245395, -0.083545, -0.262522, -0.407912, -0.560898, -0.364789, -0.037964, + -0.378594, 0.178152, 0.400380, -0.301349, -0.240913, -0.159454, -0.158757, + -0.073665, 0.455906, -0.061232, 0.318907, -0.226993, -0.344644, 0.140316, + 0.559608, 0.109774, 0.437391, 0.113849, -0.162068, 0.039572, 0.569472, + 0.460205, 0.113459, 0.370469, 0.176811, 0.203063, -0.296975, -0.271655, + 0.059862, -0.159912, -0.077310, -0.338314, -0.195477, -0.256762, 0.233834, + 0.083172, 0.029040, -0.236288, -0.267054, -0.166627, 0.188319, -0.271391, + -0.222920, 0.106463, 0.263614, 0.384986, -0.125957, -0.095890, 0.363686, + -0.036990, -0.358884, -0.178254, 0.305596, 0.390088, -0.189437, 0.613409, + 0.399639}; + +// Activation state with shape {64, 8}. These initial values must be copied into +// a mutable activation state tensor. +const float initial_activation_state_data_16x1x1[] = { + -0.582275, -0.586623, -1.262373, -1.277279, -1.542175, -1.271999, -1.429757, + -1.184425, -0.462094, -1.443421, 0.230736, -0.494701, -0.354955, -2.534061, + -4.277471, -4.218467, 0.403711, -0.248748, -0.330111, -0.467683, 0.549047, + 0.733511, -0.230115, 0.793136, -1.126353, -0.984123, -0.081984, -0.222351, + 0.692830, 0.517060, 1.367958, 2.118860, -0.116766, -0.826365, -2.402700, + -2.313884, -2.898954, -2.076005, -2.405185, -2.755481, 0.329490, 0.085400, + -1.485966, -2.034702, -2.161405, -1.269515, -1.151818, -1.823841, 0.561469, + 1.109273, 1.693411, -0.082605, -0.069252, -1.225107, -1.330693, -1.411435, + 0.253406, -0.357439, -1.593415, -0.879779, -1.111136, 1.821357, 2.471952, + 1.236908, -4.014127, -2.810448, -2.944604, -1.930980, -1.566398, -0.838166, + -0.319242, 0.749349, 1.156476, 0.658670, 1.997437, 2.080663, 2.912618, + 2.677224, 2.642442, 2.796163, -0.272349, -0.473273, 3.120063, 2.747097, + 3.595510, 1.874150, 2.049919, 2.093396, -1.049959, 0.277939, -1.255541, + -1.052443, -1.810177, -0.883505, -0.538178, 0.524203, -1.017662, -0.269244, + 0.039129, -0.227941, -0.114592, -2.018243, -2.548968, -0.706804, 0.890959, + 0.102480, 0.349986, 0.405885, 1.287216, 0.756181, 0.319242, -0.641590, + -3.841774, -2.716042, -4.342065, -3.826557, -2.924729, -1.643724, -1.237839, + -0.597492, -1.954892, -1.215169, -1.528201, -1.018904, -0.863941, -0.293467, + 0.039439, 0.672023, 1.408019, 1.362679, 1.467644, 1.006171, 0.310236, + -0.249990, -1.048406, -0.752144, -1.831605, -1.058033, -1.096541, -0.293467, + 0.051551, 0.232600, 0.088816, 2.570395, 0.704009, 2.465120, 3.010751, + 2.139357, 0.630410, 1.006171, 1.545281, 1.486898, -1.162998, -2.344317, + -4.593918, -3.522842, -2.872247, -1.416714, -0.642521, -0.230115, 0.315205, + -0.368930, -0.162726, 0.396879, 0.505570, 0.534451, 0.554947, 1.270447, + 0.388805, 0.531967, -1.243119, -0.671713, -1.214859, -0.238189, 0.016459, + -1.164550, 0.609603, 3.293348, 2.600208, 1.454290, -1.034121, -1.760179, + -1.192500, -0.613951, 3.449553, 2.912618, 1.917937, 1.435968, 0.879158, + 1.118279, 0.102791, -0.502465, -0.239121, -0.092853, 1.786265, 1.943091, + 2.547104, 2.630641, 2.585302, 2.965411, -0.945615, -2.538720, -2.474126, + -1.088156, 0.056209, 0.864873, 0.170490, 0.457435, 0.545941, 0.752765, + 1.569503, 1.129459, 0.662086, -0.527929, -0.810838, -1.662978, 1.285042, + 1.653040, 4.130893, 2.961995, 4.147041, 3.256393, 3.881524, 2.522571, + -0.875431, -1.112378, 2.105817, 2.180970, 3.121926, 1.577577, 1.639376, + 2.906407, -0.142230, 0.421101, 2.212335, 2.311399, 3.993321, 3.651719, + 4.206666, 4.678387, -1.304917, -1.130701, -2.543067, -2.500212, -2.197118, + -1.197158, -0.949652, -0.282908, 0.320795, -1.543728, 1.290322, 1.788128, + 3.957297, 3.205774, 2.892432, 2.297114, 0.138814, -0.139435, 0.936920, + 0.344707, 0.723263, -1.772290, -3.138385, -2.287177, -2.405806, -1.859864, + -4.572801, -3.410424, -3.855748, -2.239663, -2.269786, -1.582857, 4.238342, + 3.858543, 2.499901, 1.087535, 0.290051, -0.026086, -0.880400, -2.602692, + -1.404292, 0.253096, -0.665502, -1.443421, -0.925119, -0.096580, 1.115484, + 1.846200, -1.604284, -1.244671, -0.464888, 0.326385, 0.168006, -0.262723, + -0.744691, 0.953379, -0.407127, -0.349986, -1.154302, 0.831023, 1.590931, + 2.538720, 2.063583, 3.697680, -0.752455, -1.293117, -1.330693, -1.869802, + -0.592523, 0.631652, 1.198089, -0.481347, 3.738983, 4.153252, 2.782499, + 2.244321, 0.709289, 1.650245, 1.700865, 0.385078, 2.192460, 2.610456, + 4.009780, 3.492719, 2.574743, 2.116687, 1.856138, 1.205853, 2.722563, + 4.075305, 5.415935, 3.009198, 2.715421, 1.571056, 0.897170, -2.430339, + 0.749970, 0.425760, -0.302783, 0.817359, 1.031636, 1.913589, 2.686229, + 1.631923, -1.459259, -1.793097, -1.187531, -1.553355, -0.844998, -1.296843, + -1.805519, -0.486627, 0.909591, 2.082837, -1.473855, -2.456735, -3.851401, + -2.760139, -3.060438, -2.605487, -2.138735, -2.441519, -1.333177, -1.353984, + -0.245642, -0.588486, 0.033850, 2.084700, 0.076084, 0.690035, 0.747797, + 0.594697, -1.016109, -1.348083, -1.201195, -1.088466, 2.045571, 2.460772, + 0.717984, 0.041613, -0.721711, 1.134738, 2.322269, 1.112378, -0.307441, + -0.581033, -0.868599, -0.018633, 0.856488, 0.919839, 0.303094, -0.433213, + 0.811148, -0.508986, -1.060828, -1.227591, -1.566087, -1.117968, -1.385038, + -2.011101, -0.490353, -1.849616, -0.594697, -1.055859, 1.110205, 0.622646, + 0.145957, 0.359303, 1.012072, 0.774814, -0.400295, -1.484103, -2.007374, + -1.441247, -0.997787, -0.581033, -0.545941, -0.306510, 0.693451, 0.087264, + -0.227320, -1.211753, -1.532859, -1.688753, 0.065215, 0.134777, 0.608051, + -0.393152, -0.214588, -0.635689, -1.499320, 0.069562, -1.555839, -2.633126, + -2.966032, -1.550870, -0.101549, 0.874189, 0.436318, 0.299367, 2.289972, + 2.339659, 2.602071, 1.564535, 0.019254, -0.583207, -1.295912, -2.424749, + -1.221070, -1.175109, -0.577306, -0.102791, 1.877876, 2.568222, 2.173827, + 3.131243, 2.637784, 2.088737, 3.679047, 3.218506, 2.483442, 1.650556, + 1.363611, -0.027328, 1.486898, -0.721711, -3.684327, -3.006093, -3.777491, + -2.327548, -2.737470, -4.549510, -0.060867, 0.127635, 0.680408, 0.581344, + 0.320174, -0.403090, -0.838166, 0.293777, -0.995613, -0.165521, -0.419859, + 1.110515, 1.203679, 1.749931, 2.467294, 4.276539, 0.031055, -0.967664, + 1.167035, 1.865144, 3.221923, 3.248630, 4.121266, 4.187723, 0.749039, + -1.571056, 0.785994, 1.568572, 3.759479, 3.588678, 4.116608, 3.864444, + -0.290051, -0.271107, 0.375140, 0.537556, 0.536314, 0.095959, 0.054656, + 0.088816}; + +// One output with shape {1, 64} +const float golden_output_16x1x1[] = { + -0.087914, 1.145864, -0.418088, -1.556392, -0.925298, 0.205252, 0.289119, + 1.331180, -0.218010, 0.963057, -2.225886, 1.248478, 1.448983, 0.355467, + 1.682174, 0.803739, 0.449738, 0.543566, 1.916269, -2.975136, 0.222774, + 0.241589, -0.104216, 1.561748, 0.936818, -0.089907, -0.520117, -0.870353, + 1.606074, 0.895770, 0.521297, -0.369994, -0.889351, -2.809309, 2.404628, + 1.069754, -0.195456, -1.105652, 1.272715, -1.233177, 1.271416, -1.691805, + -1.058125, -0.716227, 0.052540, 1.262483, 0.540555, 1.735760, -0.539197, + -0.014367, -0.243002, 1.072254, 0.528985, -0.731151, -1.262649, 2.338702, + -0.603093, 0.970736, -3.567897, 0.035085, -0.201711, -0.550400, 1.545573, + -1.805005}; + +// One output with shape {1, 64} +const float golden_output_relu_16x1x1[] = { + 0.000000, 1.145864, 0.000000, 0.000000, 0.000000, 0.205252, 0.289119, + 1.331180, 0.000000, 0.963057, 0.000000, 1.248478, 1.448983, 0.355467, + 1.682174, 0.803739, 0.449738, 0.543566, 1.916269, 0.000000, 0.222774, + 0.241589, 0.000000, 1.561748, 0.936818, 0.000000, 0.000000, 0.000000, + 1.606074, 0.895770, 0.521297, 0.000000, 0.000000, 0.000000, 2.404628, + 1.069754, 0.000000, 0.000000, 1.272715, 0.000000, 1.271416, 0.000000, + 0.000000, 0.000000, 0.052540, 1.262483, 0.540555, 1.735760, 0.000000, + 0.000000, 0.000000, 1.072254, 0.528985, 0.000000, 0.000000, 2.338702, + 0.000000, 0.970736, 0.000000, 0.035085, 0.000000, 0.000000, 1.545573, + 0.000000}; + +template +void ValidateSVDFGoldens(const int batch_size, const int num_units, + const int input_size, const int rank, + TfLiteTensor* tensors, const int tensor_count, + TfLiteFusedActivation activation, + const T* input_sequences_data, + const int input_sequences_len, T* output_data, + const T* expected_output, float tolerance = 1e-5f) { + TfLiteSVDFParams params; + params.rank = rank; + params.activation = activation; + + int inputs_array_data[] = {5, 0, 1, 2, 3, 4}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + + int outputs_array_data[] = {1, 5}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = Register_SVDF(); + micro::KernelRunner runner(registration, tensors, tensor_count, inputs_array, + outputs_array, ¶ms); + + TfLiteStatus init_and_prepare_status = runner.InitAndPrepare(); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, init_and_prepare_status); + TF_LITE_MICRO_EXPECT(runner.ValidateTempBufferDeallocated()); + + // Abort early to make it clear init and prepare failed. + if (init_and_prepare_status != kTfLiteOk) { + return; + } + + int num_inputs = input_sequences_len / (input_size * batch_size); + + for (int i = 0; i < num_inputs; ++i) { + const T* input_batch_start = + input_sequences_data + i * input_size * batch_size; + + memcpy(tensors[0].data.raw, input_batch_start, tensors[0].bytes); + TfLiteStatus status = runner.Invoke(); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, status); + + // Only validate outputs when invoke has succeeded. + if (status == kTfLiteOk) { + int output_idx = 0; + int golden_idx = i * batch_size * num_units; + for (int j = golden_idx; j < golden_idx + batch_size * num_units; ++j) { + TF_LITE_MICRO_EXPECT_NEAR(expected_output[j], output_data[output_idx], + tolerance); + output_idx++; + } + } + } + TF_LITE_MICRO_EXPECT(runner.ValidateTempBufferDeallocated()); +} + +void TestSVDF(const int batch_size, const int num_units, const int input_size, + const int memory_size, const int rank, + TfLiteFusedActivation activation, float* input_data, + const float* feature_weights_data, const float* time_weights_data, + float* activation_state_data, const float* bias_data, + float* scratch_data, float* output_data, + const float* input_sequences_data, int input_sequences_len, + const float* expected_output, float tolerance = 1e-5f) { + const int num_filters = num_units * rank; + + int input_dims_arg[] = {2, batch_size, input_size}; + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_arg); + + int feature_weights_dims_args[] = {2, num_filters, input_size}; + TfLiteIntArray* feature_weights_dims = + IntArrayFromInts(feature_weights_dims_args); + + int time_weights_dims_args[] = {2, num_filters, memory_size}; + TfLiteIntArray* time_weights_dims = IntArrayFromInts(time_weights_dims_args); + + int activation_state_dims_args[] = {2, batch_size, memory_size * num_filters}; + TfLiteIntArray* activation_state_dims = + IntArrayFromInts(activation_state_dims_args); + + int bias_dims_args[] = {1, num_units}; + TfLiteIntArray* bias_dims = IntArrayFromInts(bias_dims_args); + + int output_dims_args[] = {2, batch_size, num_units}; + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_args); + + const int tensor_count = 6; // 5 inputs, 1 output + TfLiteTensor tensors[] = { + CreateTensor(input_data, input_dims), + CreateTensor(feature_weights_data, feature_weights_dims), + CreateTensor(time_weights_data, time_weights_dims), + CreateTensor(bias_data, bias_dims), + CreateTensor(activation_state_data, activation_state_dims, + /*is_variable=*/true), + CreateTensor(output_data, output_dims), + }; + + ValidateSVDFGoldens(batch_size, num_units, input_size, rank, tensors, + tensor_count, activation, input_sequences_data, + input_sequences_len, output_data, expected_output, + tolerance); +} + +// The pattern to this method's arguments is: +// +// for each tensor in +// {input, feature weights, time weights, bias, activation state, output}: +// +// +// Template parameter sets type of both time_weights and activation_state. +template +inline void TestIntegerSVDF( + const int batch_size, const int num_units, const int input_size, + const int memory_size, const int rank, TfLiteFusedActivation activation, + int8_t* input_quantized, float input_scale, int input_zero_point, + const float* feature_weights_data, int8_t* feature_weights_quantized, + const float feature_weights_scale, const float* time_weights_data, + T* time_weights_quantized, float time_weights_scale, const float* bias_data, + int32_t* bias_quantized, const float* initial_activation_state_data, + T* activation_state_quantized, float activation_state_scale, + int activation_state_zero_point, int8_t* output_data, float output_scale, + int output_zero_point, const float* input_sequences_data, + int8_t* input_sequences_quantized, const int input_sequences_len, + const float* golden_output, int8_t* golden_output_quantized, + int golden_output_len) { + const int num_filters = num_units * rank; + + int input_dims_arg[] = {2, batch_size, input_size}; + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_arg); + + int feature_weights_dims_args[] = {2, num_filters, input_size}; + TfLiteIntArray* feature_weights_dims = + IntArrayFromInts(feature_weights_dims_args); + + int time_weights_dims_args[] = {2, num_filters, memory_size}; + TfLiteIntArray* time_weights_dims = IntArrayFromInts(time_weights_dims_args); + + int bias_dims_data[] = {1, num_units}; + TfLiteIntArray* bias_dims = IntArrayFromInts(bias_dims_data); + + int activation_state_dims_args[] = {2, batch_size, memory_size * num_filters}; + TfLiteIntArray* activation_state_dims = + IntArrayFromInts(activation_state_dims_args); + + int output_dims_args[] = {2, batch_size, num_units}; + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_args); + + const int tensor_count = 6; // 5 inputs, 1 output + + TfLiteTensor tensors[] = { + CreateQuantizedTensor(input_quantized, input_dims, input_scale, + input_zero_point), + CreateQuantizedTensor(feature_weights_data, feature_weights_quantized, + feature_weights_dims, feature_weights_scale, 0), + CreateQuantizedTensor(time_weights_data, time_weights_quantized, + time_weights_dims, time_weights_scale, 0), + CreateQuantizedBiasTensor(bias_data, bias_quantized, bias_dims, + time_weights_scale, activation_state_scale), + CreateQuantizedTensor(initial_activation_state_data, + activation_state_quantized, activation_state_dims, + activation_state_scale, 0, + /*is_variable=*/true), + CreateQuantizedTensor(output_data, output_dims, output_scale, + output_zero_point)}; + + tflite::Quantize(golden_output, golden_output_quantized, golden_output_len, + output_scale, output_zero_point); + tflite::Quantize(input_sequences_data, input_sequences_quantized, + input_sequences_len, input_scale, input_zero_point); + + ValidateSVDFGoldens(batch_size, num_units, input_size, rank, tensors, + tensor_count, activation, input_sequences_quantized, + input_sequences_len, output_data, golden_output_quantized, + /*tolerance*/ 1); +} + +// Template parameter sets type of both time_weights and activation_state. +template +void SvdfQuantized2x2Input2x4OutputShouldMatchGolden() { + constexpr int batch_size = 2; + constexpr int num_units = 4; + constexpr int input_size = 2; + constexpr int memory_size = 10; + constexpr int rank = 2; + constexpr int num_filters = num_units * rank; + + const int input_size_dims_count = batch_size * input_size; + + const int activation_state_dims_count = + batch_size * memory_size * num_filters; + + const int output_dims_count = batch_size * num_units; + int8_t output_data[output_dims_count]; + + float input_scale = 2.5 / std::numeric_limits::max(); + float feature_weights_scale = 1.0 / std::numeric_limits::max(); + float time_weights_scale = 1.0 / std::numeric_limits::max(); + float activation_state_scale = 1.49 / std::numeric_limits::max(); + float output_scale = 1.0 / std::numeric_limits::max(); + + int input_zero_point = 0; + int output_zero_point = 0; + + int8_t input_quantized[input_size_dims_count]; + int8_t input_sequences_quantized[sizeof(tflite::testing::input_data_2x2x10) / + sizeof(float)]; + int8_t feature_weights_quantized + [sizeof(tflite::testing::feature_weights_data_2x2x10) / sizeof(float)]; + T time_weights_quantized[sizeof(tflite::testing::time_weights_data_2x2x10) / + sizeof(float)]; + T activation_state_quantized[activation_state_dims_count]; + int32_t + bias_quantized[sizeof(tflite::testing::bias_data_2x2x10) / sizeof(float)]; + int8_t golden_quantized[sizeof(tflite::testing::golden_output_2x2x10) / + sizeof(float)]; + + tflite::testing::TestIntegerSVDF( + batch_size, num_units, input_size, memory_size, rank, kTfLiteActRelu, + input_quantized, input_scale, input_zero_point, + tflite::testing::feature_weights_data_2x2x10, feature_weights_quantized, + feature_weights_scale, tflite::testing::time_weights_data_2x2x10, + time_weights_quantized, time_weights_scale, + tflite::testing::bias_data_2x2x10, bias_quantized, + tflite::testing::initial_activation_state_data_2x2x10, + activation_state_quantized, activation_state_scale, 0, output_data, + output_scale, output_zero_point, tflite::testing::input_data_2x2x10, + input_sequences_quantized, + sizeof(tflite::testing::input_data_2x2x10) / sizeof(float), + tflite::testing::golden_output_2x2x10, golden_quantized, + sizeof(tflite::testing::golden_output_2x2x10) / sizeof(float)); +} + +// Template parameter sets type of both time_weights and activation_state. +template +void SvdfQuantized1x16Input64x1OutputShouldMatchGolden() { + constexpr int batch_size = 1; + constexpr int num_units = 64; + constexpr int input_size = 16; + constexpr int memory_size = 8; + constexpr int rank = 1; + constexpr int num_filters = num_units * rank; + constexpr int activation_state_dims_count = + batch_size * memory_size * num_filters; + constexpr int output_dims_count = batch_size * num_units; + constexpr int input_dims_count = batch_size * input_size; + + int8_t output_data[output_dims_count]; + + float input_scale = 0.10075444; + float feature_weights_scale = 0.00649388; + float time_weights_scale = tflite::testing::ScaleFromMinMax(-.81, .81); + float activation_state_scale = + tflite::testing::ScaleFromMinMax(-17.73, 17.73); + int activation_state_zero_point = + tflite::testing::ZeroPointFromMinMax(-17.73, 17.73); + float output_scale = 0.051445257; + + int input_zero_point = 2; + int output_zero_point = 0; + + int8_t input_quantized[input_dims_count]; + int8_t input_sequences_quantized[sizeof(tflite::testing::input_data_16x1x1) / + sizeof(float)]; + int8_t feature_weights_quantized + [sizeof(tflite::testing::feature_weights_data_16x1x1) / sizeof(float)]; + T time_weights_quantized[sizeof(tflite::testing::time_weights_data_16x1x1) / + sizeof(float)]; + T activation_state_quantized[activation_state_dims_count]; + int32_t + bias_quantized[sizeof(tflite::testing::bias_data_16x1x1) / sizeof(float)]; + int8_t golden_quantized[sizeof(tflite::testing::golden_output_16x1x1) / + sizeof(float)]; + + tflite::testing::TestIntegerSVDF( + batch_size, num_units, input_size, memory_size, rank, kTfLiteActNone, + input_quantized, input_scale, input_zero_point, + tflite::testing::feature_weights_data_16x1x1, feature_weights_quantized, + feature_weights_scale, tflite::testing::time_weights_data_16x1x1, + time_weights_quantized, time_weights_scale, + tflite::testing::bias_data_16x1x1, bias_quantized, + tflite::testing::initial_activation_state_data_16x1x1, + activation_state_quantized, activation_state_scale, + activation_state_zero_point, output_data, output_scale, output_zero_point, + tflite::testing::input_data_16x1x1, input_sequences_quantized, + sizeof(tflite::testing::input_data_16x1x1) / sizeof(float), + tflite::testing::golden_output_16x1x1, golden_quantized, + sizeof(tflite::testing::golden_output_16x1x1) / sizeof(float)); +} + +template +void SvdfQuantized1x16Input64x1OutputReluShouldMatchGolden() { + constexpr int batch_size = 1; + constexpr int num_units = 64; + constexpr int input_size = 16; + constexpr int memory_size = 8; + constexpr int rank = 1; + constexpr int num_filters = num_units * rank; + constexpr int activation_state_dims_count = + batch_size * memory_size * num_filters; + constexpr int output_dims_count = batch_size * num_units; + constexpr int input_dims_count = batch_size * input_size; + + int8_t output_data[output_dims_count]; + + float input_scale = 0.10075444; + float feature_weights_scale = 0.00649388; + float time_weights_scale = tflite::testing::ScaleFromMinMax(-.81, .81); + float activation_state_scale = + tflite::testing::ScaleFromMinMax(-17.73, 17.73); + int activation_state_zero_point = + tflite::testing::ZeroPointFromMinMax(-17.73, 17.73); + float output_scale = 0.051445257; + + int input_zero_point = 2; + int output_zero_point = -128; + + int8_t input_quantized[input_dims_count]; + int8_t input_sequences_quantized[sizeof(tflite::testing::input_data_16x1x1) / + sizeof(float)]; + int8_t feature_weights_quantized + [sizeof(tflite::testing::feature_weights_data_16x1x1) / sizeof(float)]; + T time_weights_quantized[sizeof(tflite::testing::time_weights_data_16x1x1) / + sizeof(float)]; + T activation_state_quantized[activation_state_dims_count]; + int32_t + bias_quantized[sizeof(tflite::testing::bias_data_16x1x1) / sizeof(float)]; + int8_t golden_quantized[sizeof(tflite::testing::golden_output_relu_16x1x1) / + sizeof(float)]; + + tflite::testing::TestIntegerSVDF( + batch_size, num_units, input_size, memory_size, rank, kTfLiteActRelu, + input_quantized, input_scale, input_zero_point, + tflite::testing::feature_weights_data_16x1x1, feature_weights_quantized, + feature_weights_scale, tflite::testing::time_weights_data_16x1x1, + time_weights_quantized, time_weights_scale, + tflite::testing::bias_data_16x1x1, bias_quantized, + tflite::testing::initial_activation_state_data_16x1x1, + activation_state_quantized, activation_state_scale, + activation_state_zero_point, output_data, output_scale, output_zero_point, + tflite::testing::input_data_16x1x1, input_sequences_quantized, + sizeof(tflite::testing::input_data_16x1x1) / sizeof(float), + tflite::testing::golden_output_relu_16x1x1, golden_quantized, + sizeof(tflite::testing::golden_output_relu_16x1x1) / sizeof(float)); +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(SvdfFloat2x2Input2x4OutputShouldMatchGolden) { + constexpr int batch_size = 2; + constexpr int num_units = 4; + constexpr int input_size = 2; + constexpr int memory_size = 10; + constexpr int rank = 2; + constexpr int num_filters = num_units * rank; + + const int input_size_dims_count = batch_size * input_size; + float input_data[input_size_dims_count]; + + const int activation_state_dims_count = + batch_size * memory_size * num_filters; + float activation_state_data[activation_state_dims_count]; + + memcpy(activation_state_data, + tflite::testing::initial_activation_state_data_2x2x10, + sizeof(tflite::testing::initial_activation_state_data_2x2x10)); + + const int scratch_dims_count = batch_size * num_filters; + float scratch_data[scratch_dims_count]; + + const int output_dims_count = batch_size * num_units; + float output_data[output_dims_count]; + + tflite::testing::TestSVDF( + batch_size, num_units, input_size, memory_size, rank, kTfLiteActNone, + input_data, tflite::testing::feature_weights_data_2x2x10, + tflite::testing::time_weights_data_2x2x10, activation_state_data, + tflite::testing::bias_data_2x2x10, scratch_data, output_data, + tflite::testing::input_data_2x2x10, + sizeof(tflite::testing::input_data_2x2x10) / sizeof(float), + tflite::testing::golden_output_2x2x10); +} + +// Only reference kernels support full int8 svdf currently. +#if !defined(HEXAGON) +TF_LITE_MICRO_TEST(SvdfQuantized2x2Input2x4OutputShouldMatchGoldenInt8) { + tflite::testing::SvdfQuantized2x2Input2x4OutputShouldMatchGolden(); +} +#endif + +TF_LITE_MICRO_TEST(SvdfQuantized2x2Input2x4OutputShouldMatchGoldenInt16) { + tflite::testing::SvdfQuantized2x2Input2x4OutputShouldMatchGolden(); +} + +TF_LITE_MICRO_TEST(SvdfFloat1x16Input64x1OutputShouldMatchGolden) { + constexpr int batch_size = 1; + constexpr int num_units = 64; + constexpr int input_size = 16; + constexpr int memory_size = 8; + constexpr int rank = 1; + constexpr int num_filters = num_units * rank; + constexpr int activation_state_dims_count = + batch_size * memory_size * num_filters; + constexpr int output_dims_count = batch_size * num_units; + constexpr int input_dims_count = batch_size * input_size; + + float input_data[input_dims_count]; + float output_data[output_dims_count]; + float scratch_buffer[batch_size * num_filters]; + float activation_state_data_mutable[activation_state_dims_count]; + + // Initialize activation state to starting values. + memcpy(activation_state_data_mutable, + tflite::testing::initial_activation_state_data_16x1x1, + sizeof(tflite::testing::initial_activation_state_data_16x1x1)); + + tflite::testing::TestSVDF( + batch_size, num_units, input_size, memory_size, rank, kTfLiteActNone, + input_data, tflite::testing::feature_weights_data_16x1x1, + tflite::testing::time_weights_data_16x1x1, activation_state_data_mutable, + tflite::testing::bias_data_16x1x1, scratch_buffer, output_data, + tflite::testing::input_data_16x1x1, input_size, + tflite::testing::golden_output_16x1x1); +} + +TF_LITE_MICRO_TEST(SvdfFloat1x16Input64x1OutputReluShouldMatchGolden) { + constexpr int batch_size = 1; + constexpr int num_units = 64; + constexpr int input_size = 16; + constexpr int memory_size = 8; + constexpr int rank = 1; + constexpr int num_filters = num_units * rank; + constexpr int activation_state_dims_count = + batch_size * memory_size * num_filters; + constexpr int output_dims_count = batch_size * num_units; + constexpr int input_dims_count = batch_size * input_size; + + float input_data[input_dims_count]; + float output_data[output_dims_count]; + float scratch_buffer[batch_size * num_filters]; + float activation_state_data_mutable[activation_state_dims_count]; + + // Initialize activation state to starting values. + memcpy(activation_state_data_mutable, + tflite::testing::initial_activation_state_data_16x1x1, + sizeof(tflite::testing::initial_activation_state_data_16x1x1)); + + tflite::testing::TestSVDF( + batch_size, num_units, input_size, memory_size, rank, kTfLiteActRelu, + input_data, tflite::testing::feature_weights_data_16x1x1, + tflite::testing::time_weights_data_16x1x1, activation_state_data_mutable, + tflite::testing::bias_data_16x1x1, scratch_buffer, output_data, + tflite::testing::input_data_16x1x1, input_size, + tflite::testing::golden_output_relu_16x1x1); +} + +// Only reference kernels support full int8 svdf currently. +#if !defined(HEXAGON) +TF_LITE_MICRO_TEST(SvdfQuantized1x16Input64x1OutputShouldMatchGoldenInt8) { + tflite::testing::SvdfQuantized1x16Input64x1OutputShouldMatchGolden(); +} +#endif + +TF_LITE_MICRO_TEST(SvdfQuantized1x16Input64x1OutputShouldMatchGoldenInt16) { + tflite::testing::SvdfQuantized1x16Input64x1OutputShouldMatchGolden(); +} + +// Only reference kernels support full int8 svdf currently. +#if !defined(HEXAGON) +TF_LITE_MICRO_TEST(SvdfQuantized1x16Input64x1OutputReluShouldMatchGoldenInt8) { + tflite::testing::SvdfQuantized1x16Input64x1OutputReluShouldMatchGolden< + int8_t>(); +} +#endif + +TF_LITE_MICRO_TEST(SvdfQuantized1x16Input64x1OutputReluShouldMatchGoldenInt16) { + tflite::testing::SvdfQuantized1x16Input64x1OutputReluShouldMatchGolden< + int16_t>(); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/tanh.cc b/tensorflow/lite/micro/kernels/tanh.cc new file mode 100644 index 0000000..d20863b --- /dev/null +++ b/tensorflow/lite/micro/kernels/tanh.cc @@ -0,0 +1,199 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/reference/integer_ops/tanh.h" + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/tanh.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_utils.h" + +namespace tflite { + +namespace { + +constexpr int kInputTensor = 0; +constexpr int kOutputTensor = 0; + +struct OpData { + int32_t input_zero_point; + int32_t input_range_radius; + int32_t input_multiplier; + int input_left_shift; +}; + +void* TanhInit(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(OpData)); +} + +TfLiteStatus CalculateArithmeticOpData(TfLiteContext* context, TfLiteNode* node, + OpData* data) { + MicroContext* micro_context = GetMicroContext(context); + TF_LITE_ENSURE_EQ(context, NumInputs(node), 1); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + + TF_LITE_ENSURE_TYPES_EQ(context, input->type, output->type); + + if (input->type == kTfLiteInt8) { + static constexpr int kInputIntegerBits = 4; + const double input_real_multiplier = + static_cast(input->params.scale) * + static_cast(1 << (31 - kInputIntegerBits)); + + const double q = std::frexp(input_real_multiplier, &data->input_left_shift); + data->input_multiplier = static_cast(TfLiteRound(q * (1ll << 31))); + + data->input_range_radius = + CalculateInputRadius(kInputIntegerBits, data->input_left_shift, 31); + } + + if (input->type == kTfLiteInt16) { + static constexpr int kInputIntegerBits = 3; + static constexpr int kOutputFractionalBits = 15; + + // These operators are implemented in fixed-point arithmetic, + // which intrinsically wants symmetric ranges (zero_point==0) + // and power-of-two scales (power-of-two is abbreviated below as POT). + // While more general support would be possible by means of rescaling, + // that would add some overhead and some loss of accuracy and wouldn't + // be used at the moment as current quantized LSTM applications are + // happy with symmetric, power-of-two-scales quantization. So we just + // implement that narrow case only for now. + + TF_LITE_ENSURE_EQ(context, input->params.zero_point, 0); + TF_LITE_ENSURE_EQ(context, output->params.zero_point, 0); + + int input_scale_log2_rounded; + bool param_scale_pot = + CheckedLog2(input->params.scale, &input_scale_log2_rounded); + + data->input_left_shift = + (15 - kInputIntegerBits) + input_scale_log2_rounded; + param_scale_pot &= + (data->input_left_shift == 0 || data->input_left_shift == 1); + + if (param_scale_pot) { + data->input_multiplier = 0; + } else { + // Calculate multiplier to change input scale to 1/(3*4096) + // as required by the table lookup. + // The number 3.0 in the multiplier comes from here, + // because the interval is [-10.7, 10.7] instead of [-8, 8]. + // So, in this scaling +/-2^17 represents +/-10.7. + + double multiplier = + static_cast(input->params.scale) * 4096.0 * 3.0; + data->input_left_shift = 0; + + while (multiplier <= 32767.0 / 2.0 && data->input_left_shift <= 30) { + data->input_left_shift++; + multiplier = multiplier * 2.0; + } + + data->input_multiplier = static_cast(multiplier); + } + + int output_scale_log2_rounded; + TF_LITE_ENSURE( + context, CheckedLog2(output->params.scale, &output_scale_log2_rounded)); + TF_LITE_ENSURE_EQ(context, output_scale_log2_rounded, + -kOutputFractionalBits); + } + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(output); + return kTfLiteOk; +} + +TfLiteStatus TanhPrepare(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + + OpData* data = static_cast(node->user_data); + + MicroContext* micro_context = GetMicroContext(context); + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + data->input_zero_point = input->params.zero_point; + TF_LITE_ENSURE_OK(context, CalculateArithmeticOpData(context, node, data)); + + micro_context->DeallocateTempTfLiteTensor(input); + return kTfLiteOk; +} + +TfLiteStatus TanhEval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + + TFLITE_DCHECK(node->user_data != nullptr); + const OpData& data = *(static_cast(node->user_data)); + + switch (input->type) { + case kTfLiteFloat32: { + reference_ops::Tanh(tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + return kTfLiteOk; + } break; + case kTfLiteInt16: { + reference_integer_ops::Tanh( + data.input_multiplier, data.input_left_shift, + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + return kTfLiteOk; + } break; + case kTfLiteInt8: { + reference_integer_ops::Tanh( + data.input_zero_point, data.input_range_radius, data.input_multiplier, + data.input_left_shift, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + return kTfLiteOk; + } break; + default: + MicroPrintf("Input %s, output %s not supported.", + TfLiteTypeGetName(input->type), + TfLiteTypeGetName(output->type), context); + return kTfLiteError; + } +} + +} // namespace + +TFLMRegistration Register_TANH() { + return tflite::micro::RegisterOp(TanhInit, TanhPrepare, TanhEval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/tanh_test.cc b/tensorflow/lite/micro/kernels/tanh_test.cc new file mode 100644 index 0000000..00eb655 --- /dev/null +++ b/tensorflow/lite/micro/kernels/tanh_test.cc @@ -0,0 +1,312 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +constexpr int tanh_vec_size = 90; + +const float tanh_input_vec_fp[tanh_vec_size] = { + -8.0000000000, -7.8181818182, -7.6363636364, -7.4545454545, -7.2727272727, + -7.0909090909, -6.9090909091, -6.7272727273, -6.5454545455, -6.3636363636, + -6.1818181818, -6.0000000000, -5.8181818182, -5.6363636364, -5.4545454545, + -5.2727272727, -5.0909090909, -4.9090909091, -4.7272727273, -4.5454545455, + -4.3636363636, -4.1818181818, -4.0000000000, -3.8181818182, -3.6363636364, + -3.4545454545, -3.2727272727, -3.0909090909, -2.9090909091, -2.7272727273, + -2.5454545455, -2.3636363636, -2.1818181818, -2.0000000000, -1.8181818182, + -1.6363636364, -1.4545454545, -1.2727272727, -1.0909090909, -0.9090909091, + -0.7272727273, -0.5454545455, -0.3636363636, -0.1818181818, 0.0000000000, + 0.1818181818, 0.3636363636, 0.5454545455, 0.7272727273, 0.9090909091, + 1.0909090909, 1.2727272727, 1.4545454545, 1.6363636364, 1.8181818182, + 2.0000000000, 2.1818181818, 2.3636363636, 2.5454545455, 2.7272727273, + 2.9090909091, 3.0909090909, 3.2727272727, 3.4545454545, 3.6363636364, + 3.8181818182, 4.0000000000, 4.1818181818, 4.3636363636, 4.5454545455, + 4.7272727273, 4.9090909091, 5.0909090909, 5.2727272727, 5.4545454545, + 5.6363636364, 5.8181818182, 6.0000000000, 6.1818181818, 6.3636363636, + 6.5454545455, 6.7272727273, 6.9090909091, 7.0909090909, 7.2727272727, + 7.4545454545, 7.6363636364, 7.8181818182, 8.0000000000}; + +const float tanh_output_vec_fp[tanh_vec_size] = { + -0.9999997749, -0.9999996762, -0.9999995342, -0.9999993300, -0.9999990361, + -0.9999986134, -0.9999980053, -0.9999971306, -0.9999958722, -0.9999940619, + -0.9999914578, -0.9999877117, -0.9999823226, -0.9999745703, -0.9999634183, + -0.9999473758, -0.9999242982, -0.9998911009, -0.9998433469, -0.9997746542, + -0.9996758446, -0.9995337191, -0.9993292997, -0.9990353053, -0.9986125310, + -0.9980046622, -0.9971308601, -0.9958751909, -0.9940716137, -0.9914827859, + -0.9877703933, -0.9824541388, -0.9748561217, -0.9640275801, -0.9486568273, + -0.9269625051, -0.8965880154, -0.8545351057, -0.7972097087, -0.7206956332, + -0.6213939966, -0.4971057414, -0.3484130125, -0.1798408185, 0.0000000000, + 0.1798408185, 0.3484130125, 0.4971057414, 0.6213939966, 0.7206956332, + 0.7972097087, 0.8545351057, 0.8965880154, 0.9269625051, 0.9486568273, + 0.9640275801, 0.9748561217, 0.9824541388, 0.9877703933, 0.9914827859, + 0.9940716137, 0.9958751909, 0.9971308601, 0.9980046622, 0.9986125310, + 0.9990353053, 0.9993292997, 0.9995337191, 0.9996758446, 0.9997746542, + 0.9998433469, 0.9998911009, 0.9999242982, 0.9999473758, 0.9999634183, + 0.9999745703, 0.9999823226, 0.9999877117, 0.9999914578, 0.9999940619, + 0.9999958722, 0.9999971306, 0.9999980053, 0.9999986134, 0.9999990361, + 0.9999993300, 0.9999995342, 0.9999996762, 0.9999997749}; + +// Test vector and expected results are directly ported from TensorFlow Lite's +// int16 tanh test. +constexpr int tanh_int16_vec_size = 177; + +const float tanh_int16_input_vec_fp[tanh_int16_vec_size] = { + -20.0000000000, -19.7727272727, -19.5454545455, -19.3181818182, + -19.0909090909, -18.8636363636, -18.6363636364, -18.4090909091, + -18.1818181818, -17.9545454545, -17.7272727273, -17.5000000000, + -17.2727272727, -17.0454545455, -16.8181818182, -16.5909090909, + -16.3636363636, -16.1363636364, -15.9090909091, -15.6818181818, + -15.4545454545, -15.2272727273, -15.0000000000, -14.7727272727, + -14.5454545455, -14.3181818182, -14.0909090909, -13.8636363636, + -13.6363636364, -13.4090909091, -13.1818181818, -12.9545454545, + -12.7272727273, -12.5000000000, -12.2727272727, -12.0454545455, + -11.8181818182, -11.5909090909, -11.3636363636, -11.1363636364, + -10.9090909091, -10.6818181818, -10.4545454545, -10.2272727273, + -10.0000000000, -9.7727272727, -9.5454545455, -9.3181818182, + -9.0909090909, -8.8636363636, -8.6363636364, -8.4090909091, + -8.1818181818, -7.9545454545, -7.7272727273, -7.5000000000, + -7.2727272727, -7.0454545455, -6.8181818182, -6.5909090909, + -6.3636363636, -6.1363636364, -5.9090909091, -5.6818181818, + -5.4545454545, -5.2272727273, -5.0000000000, -4.7727272727, + -4.5454545455, -4.3181818182, -4.0909090909, -3.8636363636, + -3.6363636364, -3.4090909091, -3.1818181818, -2.9545454545, + -2.7272727273, -2.5000000000, -2.2727272727, -2.0454545455, + -1.8181818182, -1.5909090909, -1.3636363636, -1.1363636364, + -0.9090909091, -0.6818181818, -0.4545454545, -0.2272727273, + 0.0000000000, 0.2272727273, 0.4545454545, 0.6818181818, + 0.9090909091, 1.1363636364, 1.3636363636, 1.5909090909, + 1.8181818182, 2.0454545455, 2.2727272727, 2.5000000000, + 2.7272727273, 2.9545454545, 3.1818181818, 3.4090909091, + 3.6363636364, 3.8636363636, 4.0909090909, 4.3181818182, + 4.5454545455, 4.7727272727, 5.0000000000, 5.2272727273, + 5.4545454545, 5.6818181818, 5.9090909091, 6.1363636364, + 6.3636363636, 6.5909090909, 6.8181818182, 7.0454545455, + 7.2727272727, 7.5000000000, 7.7272727273, 7.9545454545, + 8.1818181818, 8.4090909091, 8.6363636364, 8.8636363636, + 9.0909090909, 9.3181818182, 9.5454545455, 9.7727272727, + 10.0000000000, 10.2272727273, 10.4545454545, 10.6818181818, + 10.9090909091, 11.1363636364, 11.3636363636, 11.5909090909, + 11.8181818182, 12.0454545455, 12.2727272727, 12.5000000000, + 12.7272727273, 12.9545454545, 13.1818181818, 13.4090909091, + 13.6363636364, 13.8636363636, 14.0909090909, 14.3181818182, + 14.5454545455, 14.7727272727, 15.0000000000, 15.2272727273, + 15.4545454545, 15.6818181818, 15.9090909091, 16.1363636364, + 16.3636363636, 16.5909090909, 16.8181818182, 17.0454545455, + 17.2727272727, 17.5000000000, 17.7272727273, 17.9545454545, + 18.1818181818, 18.4090909091, 18.6363636364, 18.8636363636, + 19.0909090909, 19.3181818182, 19.5454545455, 19.7727272727, + 20.0000000000}; + +const float tanh_int16_output_vec_fp[tanh_int16_vec_size] = { + -1.0000000000, -1.0000000000, -1.0000000000, -1.0000000000, -1.0000000000, + -1.0000000000, -1.0000000000, -1.0000000000, -1.0000000000, -1.0000000000, + -1.0000000000, -1.0000000000, -1.0000000000, -1.0000000000, -1.0000000000, + -1.0000000000, -1.0000000000, -1.0000000000, -1.0000000000, -1.0000000000, + -1.0000000000, -1.0000000000, -1.0000000000, -1.0000000000, -1.0000000000, + -1.0000000000, -1.0000000000, -1.0000000000, -1.0000000000, -1.0000000000, + -1.0000000000, -1.0000000000, -1.0000000000, -1.0000000000, -1.0000000000, + -0.9999999999, -0.9999999999, -0.9999999998, -0.9999999997, -0.9999999996, + -0.9999999993, -0.9999999989, -0.9999999983, -0.9999999974, -0.9999999959, + -0.9999999935, -0.9999999898, -0.9999999839, -0.9999999746, -0.9999999600, + -0.9999999370, -0.9999999007, -0.9999998435, -0.9999997535, -0.9999996117, + -0.9999993882, -0.9999990361, -0.9999984815, -0.9999976076, -0.9999962309, + -0.9999940619, -0.9999906449, -0.9999852614, -0.9999767801, -0.9999634183, + -0.9999423677, -0.9999092043, -0.9998569589, -0.9997746542, -0.9996450004, + -0.9994407705, -0.9991190997, -0.9986125310, -0.9978149744, -0.9965597488, + -0.9945853915, -0.9914827859, -0.9866142982, -0.9789923110, -0.9671021386, + -0.9486568273, -0.9202886021, -0.8772337852, -0.8131859906, -0.7206956332, + -0.5927001330, -0.4256281972, -0.2234388228, 0.0000000000, 0.2234388228, + 0.4256281972, 0.5927001330, 0.7206956332, 0.8131859906, 0.8772337852, + 0.9202886021, 0.9486568273, 0.9671021386, 0.9789923110, 0.9866142982, + 0.9914827859, 0.9945853915, 0.9965597488, 0.9978149744, 0.9986125310, + 0.9991190997, 0.9994407705, 0.9996450004, 0.9997746542, 0.9998569589, + 0.9999092043, 0.9999423677, 0.9999634183, 0.9999767801, 0.9999852614, + 0.9999906449, 0.9999940619, 0.9999962309, 0.9999976076, 0.9999984815, + 0.9999990361, 0.9999993882, 0.9999996117, 0.9999997535, 0.9999998435, + 0.9999999007, 0.9999999370, 0.9999999600, 0.9999999746, 0.9999999839, + 0.9999999898, 0.9999999935, 0.9999999959, 0.9999999974, 0.9999999983, + 0.9999999989, 0.9999999993, 0.9999999996, 0.9999999997, 0.9999999998, + 0.9999999999, 0.9999999999, 1.0000000000, 1.0000000000, 1.0000000000, + 1.0000000000, 1.0000000000, 1.0000000000, 1.0000000000, 1.0000000000, + 1.0000000000, 1.0000000000, 1.0000000000, 1.0000000000, 1.0000000000, + 1.0000000000, 1.0000000000, 1.0000000000, 1.0000000000, 1.0000000000, + 1.0000000000, 1.0000000000, 1.0000000000, 1.0000000000, 1.0000000000, + 1.0000000000, 1.0000000000, 1.0000000000, 1.0000000000, 1.0000000000, + 1.0000000000, 1.0000000000, 1.0000000000, 1.0000000000, 1.0000000000, + 1.0000000000, 1.0000000000}; + +void TestTanhFloat(int input_dims_data[], const float* input_data, + const float* expected_output_data, int output_dims_data[], + float* output_data, const float tolerance) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_elements_count = ElementCount(*output_dims); + + constexpr int inputs_size = 1; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(input_data, input_dims), + CreateTensor(output_data, output_dims), + }; + + int inputs_array_data[] = {1, 0}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 1}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = tflite::Register_TANH(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, /*builtin_data=*/nullptr); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + for (int i = 0; i < output_elements_count; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(expected_output_data[i], output_data[i], + tolerance); + } +} + +template +void TestTanhQuantized(int input_dims_data[], const float* input_data, + T* input_quantized, float input_scale, + int input_zero_point, const float* expected_output_data, + T* expected_output_quantized, int output_dims_data[], + float output_scale, int output_zero_point, + T* output_quantized, const int tolerance) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_elements_count = ElementCount(*output_dims); + + tflite::Quantize(expected_output_data, expected_output_quantized, + output_elements_count, output_scale, output_zero_point); + + constexpr int inputs_size = 1; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateQuantizedTensor(input_data, input_quantized, input_dims, + input_scale, input_zero_point), + CreateQuantizedTensor(output_quantized, output_dims, output_scale, + output_zero_point)}; + + int inputs_array_data[] = {1, 0}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 1}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = tflite::Register_TANH(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, /*builtin_data=*/nullptr); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + for (int i = 0; i < output_elements_count; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(expected_output_quantized[i], output_quantized[i], + tolerance); + } +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(SimpleTestTanhFloat) { + using tflite::testing::tanh_input_vec_fp; + using tflite::testing::tanh_output_vec_fp; + using tflite::testing::tanh_vec_size; + + int input_shape[] = {2, 1, tanh_vec_size}; + int output_shape[] = {2, 1, tanh_vec_size}; + + float output_data[tanh_vec_size]; + tflite::testing::TestTanhFloat( // + input_shape, // Input shape. + tanh_input_vec_fp, // Input data + tanh_output_vec_fp, // Expected results. + output_shape, // Output shape. + output_data, 1e-7 /* tolerance */); +} + +TF_LITE_MICRO_TEST(SimpleTestTanhInt8) { + using tflite::testing::tanh_input_vec_fp; + using tflite::testing::tanh_output_vec_fp; + using tflite::testing::tanh_vec_size; + + const float input_scale = 16 / 256.f; + const int input_zero_point = 0; + const float output_scale = 1.99999955f / 256.f; + const int output_zero_point = 0; + + int input_shape[] = {2, 1, tanh_vec_size}; + int output_shape[] = {2, 1, tanh_vec_size}; + + int8_t input_quantized[tanh_vec_size]; + int8_t expected_output_quantized[tanh_vec_size]; + int8_t output_quantized[tanh_vec_size]; + tflite::testing::TestTanhQuantized( // + input_shape, // Input shape. + tanh_input_vec_fp, input_quantized, // Input data. + input_scale, input_zero_point, // Input quantized info. + tanh_output_vec_fp, expected_output_quantized, // Expected results. + output_shape, // Output shape. + output_scale, output_zero_point, // Output quantized info. + output_quantized, // Operation results + 2 // Tolerance. + ); +} + +TF_LITE_MICRO_TEST(TestTanhInt16WideRange) { + using tflite::testing::tanh_int16_input_vec_fp; + using tflite::testing::tanh_int16_output_vec_fp; + using tflite::testing::tanh_int16_vec_size; + + const float input_scale = 32.f / 65536.f; + const int input_zero_point = 0; + const float output_scale = 2.f / 65536.f; + const int output_zero_point = 0; + + int input_shape[] = {2, 1, tanh_int16_vec_size}; + int output_shape[] = {2, 1, tanh_int16_vec_size}; + + int16_t input_quantized[tanh_int16_vec_size]; + int16_t expected_output_quantized[tanh_int16_vec_size]; + int16_t output_quantized[tanh_int16_vec_size]; + tflite::testing::TestTanhQuantized( // + input_shape, // Input shape. + tanh_int16_input_vec_fp, // Input data. + input_quantized, // Quantized input data. + input_scale, input_zero_point, // Input quantized info. + tanh_int16_output_vec_fp, // Expected results. + expected_output_quantized, // Expected quantized results. + output_shape, // Output shape. + output_scale, output_zero_point, // Output quantized info. + output_quantized, // Operation results + 16 // Tolerance. + ); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/test_data_generation/BUILD b/tensorflow/lite/micro/kernels/test_data_generation/BUILD new file mode 100644 index 0000000..3f473f1 --- /dev/null +++ b/tensorflow/lite/micro/kernels/test_data_generation/BUILD @@ -0,0 +1,25 @@ +package( + # Disabling layering_check because of http://b/177257332 + features = ["-layering_check"], + licenses = ["notice"], +) + +cc_binary( + name = "generate_detection_postprocess_flexbuffers_data", + srcs = [ + "generate_detection_postprocess_flexbuffers_data.cc", + ], + deps = [ + "@flatbuffers", + ], +) + +cc_binary( + name = "generate_circular_buffer_flexbuffers_data", + srcs = [ + "generate_circular_buffer_flexbuffers_data.cc", + ], + deps = [ + "@flatbuffers", + ], +) diff --git a/tensorflow/lite/micro/kernels/test_data_generation/README.md b/tensorflow/lite/micro/kernels/test_data_generation/README.md new file mode 100644 index 0000000..7f3bbb2 --- /dev/null +++ b/tensorflow/lite/micro/kernels/test_data_generation/README.md @@ -0,0 +1,13 @@ +# Background + +As a Custom operator, detection_postprocess is using Flexbuffers library. In the +unit test there is a need to use flexbuffers::Builder since the operator itself +use flexbuffers::Map. However flexbuffers::Builder can not be used for most +targets (basically only on X86), since it is using std::vector and std::map. +Therefore the flexbuffers::Builder data is pregenerated on X86. + +# How to generate new data: + +~~~ + g++ -I ../../../micro/tools/make/downloads/flatbuffers/include generate_detection_postprocess_flexbuffers_data.cc && ./a.out > ../detection_postprocess_flexbuffers_generated_data.cc +~~~ diff --git a/tensorflow/lite/micro/kernels/test_data_generation/generate_circular_buffer_flexbuffers_data.cc b/tensorflow/lite/micro/kernels/test_data_generation/generate_circular_buffer_flexbuffers_data.cc new file mode 100644 index 0000000..38abb63 --- /dev/null +++ b/tensorflow/lite/micro/kernels/test_data_generation/generate_circular_buffer_flexbuffers_data.cc @@ -0,0 +1,61 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "flatbuffers/flexbuffers.h" + +const char* license = + "/* Copyright 2021 The TensorFlow Authors. All Rights Reserved.\n" + "Licensed under the Apache License, Version 2.0 (the \"License\");\n" + "you may not use this file except in compliance with the License.\n" + "You may obtain a copy of the License at\n\n" + " http://www.apache.org/licenses/LICENSE-2.0\n\n" + "Unless required by applicable law or agreed to in writing, software\n" + "distributed under the License is distributed on an \"AS IS\" BASIS,\n" + "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" + "See the License for the specific language governing permissions and\n" + "limitations under the License.\n" + "=======================================================================" + "=======*/\n"; + +void generate(const char* name) { + flexbuffers::Builder fbb; + fbb.Map([&]() { fbb.Int("cycles_max", 1); }); + fbb.Finish(); + + // fbb.GetBuffer returns std::Vector but TfLite passes char arrays + // for the raw data, and so we reinterpret_cast. + const uint8_t* init_data = + reinterpret_cast(fbb.GetBuffer().data()); + int fbb_size = fbb.GetBuffer().size(); + + printf("const int g_gen_data_size_%s = %d;\n", name, fbb_size); + printf("const unsigned char g_gen_data_%s[] = { ", name); + for (size_t i = 0; i < fbb_size; i++) { + printf("0x%02x, ", init_data[i]); + } + printf("};\n"); +} + +int main() { + printf("%s\n", license); + printf("// This file is generated. See:\n"); + printf("// third_party/tensorflow/lite/micro/kernels/test_data_generation/"); + printf("README.md\n"); + printf("\n"); + printf( + "#include \"third_party/tensorflow/lite/micro/kernels/" + "circular_buffer_flexbuffers_generated_data.h\""); + printf("\n\n"); + generate("circular_buffer_config"); +} diff --git a/tensorflow/lite/micro/kernels/test_data_generation/generate_detection_postprocess_flexbuffers_data.cc b/tensorflow/lite/micro/kernels/test_data_generation/generate_detection_postprocess_flexbuffers_data.cc new file mode 100644 index 0000000..9aa73fe --- /dev/null +++ b/tensorflow/lite/micro/kernels/test_data_generation/generate_detection_postprocess_flexbuffers_data.cc @@ -0,0 +1,75 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "flatbuffers/flexbuffers.h" + +const char* license = + "/* Copyright 2022 The TensorFlow Authors. All Rights Reserved.\n\n" + "Licensed under the Apache License, Version 2.0 (the \"License\");\n" + "you may not use this file except in compliance with the License.\n" + "You may obtain a copy of the License at\n\n" + " http://www.apache.org/licenses/LICENSE-2.0\n\n" + "Unless required by applicable law or agreed to in writing, software\n" + "distributed under the License is distributed on an \"AS IS\" BASIS,\n" + "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" + "See the License for the specific language governing permissions and\n" + "limitations under the License.\n" + "=======================================================================" + "=======*/\n"; + +void generate(const char* name, bool use_regular_nms) { + flexbuffers::Builder fbb; + fbb.Map([&]() { + fbb.Int("max_detections", 3); + fbb.Int("max_classes_per_detection", 1); + fbb.Int("detections_per_class", 1); + fbb.Bool("use_regular_nms", use_regular_nms); + fbb.Float("nms_score_threshold", 0.0); + fbb.Float("nms_iou_threshold", 0.5); + fbb.Int("num_classes", 2); + fbb.Float("y_scale", 10.0); + fbb.Float("x_scale", 10.0); + fbb.Float("h_scale", 5.0); + fbb.Float("w_scale", 5.0); + }); + fbb.Finish(); + + // fbb.GetBuffer returns std::Vector but TfLite passes char arrays + // for the raw data, and so we reinterpret_cast. + const uint8_t* init_data = + reinterpret_cast(fbb.GetBuffer().data()); + int fbb_size = fbb.GetBuffer().size(); + + printf("const int g_gen_data_size_%s = %d;\n", name, fbb_size); + printf("alignas(4) const unsigned char g_gen_data_%s[] = { ", name); + for (size_t i = 0; i < fbb_size; i++) { + printf("0x%02x, ", init_data[i]); + } + printf("};\n"); +} + +int main() { + printf("%s\n", license); + printf("// This file is generated. See:\n"); + printf("// tflite-micro/tensorflow/lite/micro/kernels/test_data_generation/"); + printf("README.md\n"); + printf("\n"); + printf( + "#include " + "\"tensorflow/lite/micro/kernels/" + "detection_postprocess_flexbuffers_generated_data.h\""); + printf("\n\n"); + generate("none_regular_nms", false); + generate("regular_nms", true); +} diff --git a/tensorflow/lite/micro/kernels/testdata/BUILD b/tensorflow/lite/micro/kernels/testdata/BUILD new file mode 100644 index 0000000..a20337a --- /dev/null +++ b/tensorflow/lite/micro/kernels/testdata/BUILD @@ -0,0 +1,64 @@ +load("@tflm_pip_deps//:requirements.bzl", "requirement") + +package( + default_visibility = ["//tensorflow/lite/micro/kernels:__pkg__"], + # Disabling layering_check because of http://b/177257332 + features = ["-layering_check"], + licenses = ["notice"], +) + +#################################### +# C++ libraries +#################################### + +cc_library( + name = "conv_test_data", + srcs = ["conv_test_data.cc"], + hdrs = ["conv_test_data.h"], + deps = ["//tensorflow/lite/c:common"], +) + +cc_library( + name = "lstm_test_data", + srcs = ["lstm_test_data.cc"], + hdrs = [ + "lstm_test_data.h", + ], + deps = [ + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/kernels:lstm_shared", + ], +) + +#################################### +# Python +#################################### +py_binary( + name = "lstm_test_data_generator", + srcs = [ + "lstm_test_data_generator.py", + "lstm_test_data_utils.py", + ], + srcs_version = "PY3", + deps = [ + "@absl_py//absl:app", + requirement("numpy"), + requirement("tensorflow-cpu"), + ], +) + +py_test( + name = "lstm_test_data_generator_test", + srcs = ["lstm_test_data_generator_test.py"], + main = "lstm_test_data_generator_test.py", + python_version = "PY3", + tags = [ + "noasan", + "nomsan", # Python doesn't like these symbols + "noubsan", + ], + deps = [ + ":lstm_test_data_generator", + ], +) diff --git a/tensorflow/lite/micro/kernels/testdata/conv_test_data.cc b/tensorflow/lite/micro/kernels/testdata/conv_test_data.cc new file mode 100644 index 0000000..094aab6 --- /dev/null +++ b/tensorflow/lite/micro/kernels/testdata/conv_test_data.cc @@ -0,0 +1,504 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/kernels/testdata/conv_test_data.h" + +#include "tensorflow/lite/c/common.h" + +namespace tflite { + +// Kernel Conv Test Case: Int8Filter8x3x3x3PerChannelScaleRelu6ShouldMatchGolden +const int8_t kConvInput1x32x32x3[1 * 32 * 32 * 3] = { + 27, 32, 33, 31, 31, 35, 32, 33, 35, 30, 37, 36, + 31, 37, 37, 31, 37, 37, 31, 36, 42, 32, 38, 39, + 34, 39, 40, 36, 38, 40, 32, 42, 41, 36, 41, 42, + 35, 41, 42, 38, 44, 45, 40, 46, 43, 40, 44, 48, + 34, 39, 40, 39, 43, 44, 49, 53, 60, 42, 48, 49, + 36, 44, 43, 35, 45, 44, 37, 46, 45, 38, 46, 45, + 39, 45, 45, 37, 45, 44, 41, 46, 46, 41, 46, 46, + 38, 46, 46, 40, 47, 47, 38, 48, 47, 38, 46, 46, + 29, 32, 33, 30, 32, 34, 31, 32, 34, 29, 35, 34, + 30, 36, 34, 30, 37, 36, 32, 37, 37, 31, 37, 36, + 31, 37, 37, 33, 38, 39, 34, 40, 39, 34, 41, 41, + 42, 48, 47, 26, 27, 27, -20, -17, -19, -4, -3, 0, + 1, 1, 6, -4, -5, 0, -31, -33, -31, -8, -7, -7, + 53, 59, 62, 37, 44, 42, 37, 46, 44, 37, 45, 43, + 38, 46, 43, 39, 45, 44, 39, 46, 46, 38, 47, 46, + 42, 47, 47, 40, 48, 47, 42, 47, 48, 41, 49, 47, + 31, 33, 35, 33, 33, 36, 31, 37, 35, 30, 37, 36, + 31, 37, 36, 33, 36, 38, 32, 38, 37, 33, 39, 37, + 34, 40, 40, 33, 40, 40, 37, 38, 41, 40, 47, 47, + 4, 7, 2, -34, -34, -34, 34, 40, 38, -48, -42, -47, + -65, -61, -62, -55, -47, -49, 7, 14, 14, 21, 26, 26, + -24, -24, -31, 49, 56, 58, 39, 47, 46, 38, 47, 45, + 39, 47, 44, 39, 47, 45, 39, 48, 45, 40, 48, 47, + 40, 47, 47, 39, 46, 46, 40, 47, 47, 41, 47, 48, + 30, 31, 32, 29, 34, 34, 31, 34, 36, 30, 36, 36, + 31, 36, 34, 32, 36, 34, 30, 37, 37, 34, 37, 35, + 34, 37, 38, 34, 37, 38, 36, 39, 39, 43, 45, 44, + -68, -69, -78, 50, 57, 56, -70, -69, -71, -116, -117, -114, + -105, -105, -102, -99, -99, -97, -106, -105, -97, 62, 72, 70, + -30, -28, -27, 10, 14, 10, 42, 53, 51, 40, 48, 47, + 42, 48, 48, 42, 48, 47, 41, 49, 47, 42, 48, 47, + 42, 49, 49, 43, 49, 49, 43, 49, 49, 41, 49, 47, + 33, 34, 37, 31, 36, 36, 31, 38, 37, 32, 37, 38, + 34, 36, 36, 35, 37, 36, 33, 39, 39, 33, 38, 38, + 35, 38, 37, 37, 39, 37, 38, 41, 42, 22, 25, 25, + -46, -49, -50, 61, 64, 64, -66, -63, -63, -113, -113, -113, + -100, -100, -97, -97, -96, -95, -112, -110, -103, 51, 65, 62, + 10, 17, 17, -40, -38, -43, 46, 58, 55, 40, 47, 46, + 42, 46, 47, 39, 48, 47, 39, 49, 48, 38, 50, 49, + 42, 48, 48, 42, 48, 48, 42, 49, 49, 44, 48, 48, + 31, 34, 35, 31, 36, 35, 31, 38, 36, 32, 38, 36, + 33, 37, 38, 34, 38, 39, 36, 36, 39, 36, 37, 39, + 37, 38, 40, 37, 41, 42, 38, 42, 42, 41, 45, 45, + -37, -42, -45, 44, 53, 55, 5, 3, 4, -95, -97, -90, + -89, -92, -94, -86, -85, -82, -38, -62, -62, 63, 35, 26, + -19, -19, -23, 2, -25, -28, 69, 26, 27, 64, 21, 21, + 65, 21, 23, 62, 24, 21, 62, 26, 24, 56, 32, 29, + 44, 46, 44, 42, 50, 51, 43, 49, 49, 42, 48, 48, + 31, 33, 35, 32, 37, 35, 32, 37, 35, 33, 38, 39, + 37, 38, 39, 38, 39, 41, 39, 41, 41, 38, 40, 43, + 37, 41, 43, 38, 43, 45, 38, 46, 45, 45, 50, 50, + 10, -1, -5, 2, 9, 5, 42, 34, 34, 2, -19, -25, + -64, -73, -88, -39, -34, -37, 37, -13, -14, 3, -22, -20, + -15, -28, -28, 67, 21, 24, 63, 26, 28, 61, 23, 28, + 60, 22, 23, 62, 24, 22, 61, 25, 23, 64, 26, 26, + 65, 23, 20, 45, 41, 37, 42, 49, 50, 42, 49, 48, + 31, 32, 34, 32, 33, 35, 32, 35, 34, 31, 38, 38, + 43, 45, 44, 41, 46, 44, 37, 44, 42, 39, 34, 32, + 45, 31, 27, 50, 30, 22, 46, 20, 13, 38, 5, -4, + 33, -9, -14, -2, -42, -49, -41, -50, -49, -44, -54, -51, + -71, -82, -96, -57, -56, -66, -18, -17, -10, -25, -46, -49, + 28, -20, -15, 36, -6, -5, 55, 18, 22, 56, 13, 17, + 56, 10, 14, 56, 13, 16, 52, 14, 12, 52, 17, 13, + 52, 15, 14, 50, 13, 4, 46, 53, 52, 44, 50, 49, + 27, 34, 35, 28, 36, 33, 37, 37, 31, 62, 50, 45, + -26, -26, -24, -127, -124, -123, 32, -1, 0, 66, 26, 21, + 54, 16, 15, 49, 17, 14, 39, 7, 5, 23, -19, -24, + 0, -43, -49, 17, -15, -11, 43, 24, 32, 42, 31, 48, + 60, 62, 77, 79, 81, 90, 81, 73, 85, 82, 71, 79, + 66, 44, 51, 39, -1, 7, 31, -20, -15, 33, -15, -11, + 39, 0, 1, 40, 1, 1, 43, 2, 3, 45, 7, 6, + 42, -4, -5, 33, -13, -23, 49, 55, 53, 43, 53, 53, + 56, 24, 21, 65, 19, 19, 83, 40, 38, -85, -95, -95, + -127, -127, -127, -34, -30, -21, -59, -60, -59, 50, 16, 19, + 55, 15, 12, 42, 2, -1, 25, -4, -5, 14, 4, 7, + 16, 17, 20, 0, 5, 4, -9, -10, -6, -6, -7, -1, + -12, -13, -8, -1, -1, 5, -4, -3, 3, 11, 10, 13, + 29, 27, 27, 62, 64, 63, 99, 99, 103, 74, 59, 67, + 12, -32, -25, 9, -44, -51, 15, -45, -46, 15, -50, -54, + 19, -39, -49, 38, 31, 26, 45, 48, 48, 50, 34, 31, + 63, 22, 26, 69, 34, 37, 9, -11, -12, -127, -127, -127, + -109, -110, -110, -126, -127, -125, -57, -52, -46, -25, -40, -47, + 66, 26, 27, 4, -7, -8, -12, -6, -7, -40, -39, -42, + -46, -47, -50, -21, -21, -21, -28, -28, -30, -13, -11, -17, + -11, -10, -13, -21, -19, -26, -9, -6, -13, -23, -22, -24, + -8, -1, -6, -4, -1, 1, -7, -7, -7, 25, 21, 19, + 77, 80, 80, 39, 11, 26, -19, -72, -74, 19, -21, -24, + 51, 25, 23, 57, 13, 12, 56, 7, 7, 54, 8, 6, + 54, 22, 26, 55, 25, 30, 19, -25, -28, -51, -107, -114, + -105, -114, -116, -114, -114, -109, -53, -57, -64, 24, 26, 27, + -31, -28, -30, -45, -44, -48, -53, -55, -58, -29, -29, -30, + -23, -21, -25, 2, 3, -1, 2, 2, 2, 3, 3, 1, + -1, 1, -1, -22, -20, -22, 24, 27, 22, 0, 3, -4, + 8, 11, 4, -9, -6, -12, -3, 3, -2, -1, 2, 1, + 0, -1, 4, 5, 6, 7, 40, 38, 43, 52, 7, 8, + 60, 15, 19, 56, 6, 8, 48, -3, -2, 45, -5, -6, + 38, -8, -2, 37, -6, 1, 22, -11, -6, -25, -65, -69, + -58, -97, -94, -76, -110, -111, -47, -64, -65, -65, -69, -73, + -47, -49, -51, -37, -35, -38, -20, -20, -20, 29, 32, 28, + 49, 54, 54, 65, 71, 72, 51, 59, 60, 32, 42, 45, + 47, 44, 48, 7, -9, -4, 83, 96, 95, 55, 63, 64, + 59, 58, 56, 43, 47, 44, 15, 16, 13, 2, 3, -1, + 0, 3, -2, -4, 2, 2, -18, -18, -18, 4, 0, 0, + 49, 5, 9, 50, -8, -1, 38, -21, -17, 33, -18, -18, + 18, -34, -41, 20, -25, -28, 23, -7, -10, 20, -9, -11, + -13, -56, -55, -64, -91, -96, -48, -58, -61, -54, -55, -58, + -32, -29, -31, 12, 15, 15, 60, 66, 65, 7, 13, 18, + 45, 57, 57, 62, 16, 21, 69, 58, 60, 47, 29, 33, + 36, 4, 9, 21, 12, 15, 44, 23, 28, 75, 82, 81, + 69, 12, 22, 87, 95, 95, 31, 37, 42, 53, 62, 59, + 22, 24, 19, 1, 1, -1, -10, -6, -8, -14, -20, -17, + -34, -40, -41, 44, -13, -8, 21, -34, -30, 12, -45, -42, + 5, -59, -65, 7, -48, -53, 12, -33, -40, 17, -21, -25, + 0, -37, -36, -53, -68, -70, -54, -60, -62, -11, -6, -5, + 22, 29, 29, 59, 68, 67, 66, 67, 65, 74, 70, 72, + 74, 82, 82, 69, 88, 88, 40, 12, 17, 72, 78, 78, + 51, 50, 51, 11, 0, 4, 84, 95, 95, 47, 22, 26, + 59, 63, 65, 79, 87, 88, 86, 90, 93, 78, 55, 57, + 66, 74, 72, 41, 47, 43, 13, 14, 12, -6, -2, -5, + -28, -35, -26, -23, -42, -44, 19, -39, -33, -4, -68, -64, + 7, -50, -55, -11, -78, -77, -3, -54, -59, 5, -41, -47, + -45, -65, -70, -50, -63, -68, -25, -20, -17, 8, 15, 16, + 36, 41, 45, 30, 38, 43, 66, 40, 40, 59, 32, 37, + 69, 85, 82, 39, 16, 20, 23, 29, 33, 76, 84, 85, + 35, 38, 39, -4, -1, 0, 82, 89, 89, 46, 55, 58, + 45, 27, 31, 74, 84, 83, 76, 82, 83, 73, 45, 48, + 88, 97, 100, 30, 38, 37, 40, 43, 41, 9, 12, 7, + 12, 15, 16, -56, -62, -53, 2, -45, -42, -11, -64, -61, + 39, 3, -3, -37, -103, -104, -28, -88, -87, -32, -83, -84, + -67, -86, -91, -46, -52, -49, 11, 18, 21, 53, 59, 58, + 50, 58, 59, 46, 56, 59, 68, 79, 78, 66, 77, 76, + 70, 77, 77, 34, 18, 22, 71, 77, 79, 77, 84, 85, + -30, -27, -29, -73, -76, -82, 83, 90, 90, 77, 85, 86, + 51, 32, 36, 64, 72, 72, 77, 83, 84, 77, 86, 90, + 83, 91, 92, 82, 93, 94, 75, 82, 82, 26, 33, 29, + 18, 21, 20, -27, -29, -14, -47, -75, -76, -25, -77, -78, + 43, 0, 2, 39, 0, -3, -69, -108, -107, -95, -118, -118, + -37, -56, -62, -12, -10, -6, 28, 34, 39, 61, 73, 72, + 70, 52, 50, 57, 15, 20, 63, 76, 77, 63, 73, 71, + 68, 75, 75, 46, 40, 41, 51, 48, 49, 63, 73, 74, + 27, 31, 32, -9, -7, -2, 66, 73, 76, 75, 85, 87, + 37, 16, 19, 77, 86, 85, 81, 85, 86, 80, 88, 87, + 79, 73, 76, 77, 43, 45, 88, 98, 98, 65, 71, 72, + 30, 36, 31, 27, 28, 38, -62, -64, -59, 37, 29, 29, + 34, -12, -13, 32, -14, -20, 24, -19, -22, -52, -76, -81, + -22, -36, -41, 6, 13, 19, 45, 51, 54, 43, 50, 51, + 44, 48, 51, 66, 70, 68, 64, 70, 70, 64, 71, 71, + 66, 72, 72, 67, 77, 76, 40, 29, 32, 21, -2, 3, + 58, 65, 66, -15, -9, -5, 52, 45, 48, 35, 10, 15, + 71, 80, 80, 75, 83, 82, 98, 102, 103, 95, 101, 101, + 93, 100, 99, 94, 103, 100, 81, 88, 88, 71, 77, 77, + 63, 71, 67, 37, 42, 50, -68, -75, -64, 29, -10, -5, + 28, -22, -18, 38, 4, 4, 37, -2, -2, -10, -30, -43, + -23, -35, -37, 37, 44, 48, 40, 46, 48, 34, 41, 43, + 35, 46, 48, 67, 76, 75, 64, 70, 70, 64, 70, 70, + 64, 70, 70, 65, 71, 71, 68, 76, 76, 70, 73, 72, + -21, -30, -30, -66, -74, -87, 50, 41, 42, 71, 82, 81, + 70, 77, 77, 80, 89, 88, 118, 124, 124, 120, 123, 125, + 121, 124, 125, 121, 126, 127, 115, 120, 123, 98, 104, 104, + 85, 93, 91, 45, 50, 54, -70, -72, -58, 56, 15, 19, + 30, -25, -18, 29, -12, -9, 10, -40, -40, 58, 37, 33, + -30, -42, -44, 36, 45, 47, 40, 48, 51, 68, 78, 78, + 56, 12, 16, 57, 43, 49, 65, 72, 72, 63, 71, 70, + 64, 70, 70, 64, 70, 70, 64, 70, 70, 68, 74, 74, + 11, 14, 10, -50, -52, -53, 79, 86, 86, 68, 76, 77, + 67, 75, 76, 81, 84, 89, 116, 124, 123, 121, 126, 126, + 122, 126, 127, 112, 101, 104, 97, 97, 96, 77, 84, 85, + 77, 83, 82, 37, 44, 46, -74, -78, -62, 52, -3, 0, + 24, -23, -21, 15, -39, -36, -4, -59, -61, 7, -18, -22, + -32, -40, -50, 27, 34, 36, 48, 54, 58, 67, 75, 76, + 59, 61, 59, 62, 69, 67, 62, 69, 69, 64, 70, 70, + 65, 71, 71, 66, 72, 72, 63, 69, 69, 58, 64, 64, + 33, 33, 35, -29, -24, -24, 75, 83, 82, 72, 79, 79, + 70, 76, 79, 71, 76, 80, 78, 83, 86, 79, 84, 86, + 79, 89, 88, 83, 93, 92, 63, 70, 70, 82, 89, 92, + 60, 67, 70, 22, 25, 30, -64, -66, -57, 34, -27, -22, + 14, -45, -46, 18, -29, -30, 9, -29, -29, -15, -52, -50, + -26, -29, -44, -13, -9, -5, 41, 46, 51, 60, 66, 68, + -11, -7, 0, 72, 78, 78, 65, 72, 73, 64, 70, 70, + 66, 72, 72, 68, 74, 74, 66, 75, 75, 13, 18, 24, + 24, 25, 30, 14, 19, 25, 36, 40, 47, 38, 43, 47, + 74, 79, 81, 70, 75, 80, 71, 78, 78, 73, 80, 81, + 70, 62, 63, 80, 81, 82, 53, 59, 60, 66, 68, 72, + 26, 36, 38, -35, -34, -25, -45, -62, -61, 11, -47, -44, + 35, -6, -2, 36, -5, 0, 19, -26, -24, -23, -78, -78, + -44, -62, -73, -38, -47, -47, 45, 51, 59, 58, 62, 67, + 78, 88, 88, 57, 25, 28, 57, 19, 21, 69, 76, 79, + 66, 73, 72, 68, 73, 74, 66, 72, 72, 54, 60, 60, + 45, 50, 54, 46, 50, 54, 45, 47, 52, 45, 50, 54, + 74, 78, 82, 71, 76, 81, 73, 79, 83, 75, 83, 84, + 72, 33, 36, 74, 69, 66, 81, 89, 88, 66, 73, 76, + -12, -6, -2, -73, -71, -67, -13, -58, -59, -7, -67, -66, + 43, 6, 5, 47, 6, 7, 38, -5, -4, 14, -35, -35, + -38, -79, -83, -29, -43, -48, -19, -17, -10, 36, 44, 49, + 58, 64, 68, 73, 84, 84, 33, 42, 47, 70, 76, 76, + 71, 83, 83, 68, 75, 75, 69, 74, 75, 72, 77, 79, + 65, 71, 73, 66, 71, 74, 62, 68, 69, 75, 81, 81, + 69, 78, 78, 71, 77, 79, 70, 70, 74, 70, 72, 76, + 35, 41, 45, 87, 95, 92, 70, 75, 79, 26, 35, 39, + -45, -51, -48, -50, -72, -72, -10, -56, -63, 9, -11, -18, + 66, 27, 30, 67, 28, 31, 56, 19, 21, 33, -13, -11, + -20, -76, -80, -38, -57, -54, -40, -54, -58, 5, 10, 16, + 49, 54, 59, 61, 66, 68, 8, 15, 20, 73, 77, 78, + 49, -31, -18, 77, 88, 87, 67, 74, 74, 70, 82, 81, + 63, 71, 70, 63, 68, 69, 63, 71, 73, 69, 80, 79, + 70, 77, 76, 77, 75, 76, 59, 4, 9, 75, 85, 83, + 57, 64, 63, 54, 64, 63, 63, 72, 76, -50, -53, -46, + -35, -51, -51, 30, 26, 18, 60, 72, 71, 61, 72, 73, + 62, 24, 28, 54, 12, 14, 50, 6, 9, 31, -19, -13, + -7, -67, -66, -65, -105, -110, -37, -50, -47, -31, -45, -47, + -4, 3, 7, 41, 46, 52, 65, 70, 71, 63, 70, 70, + 76, 88, 83, 19, 25, 30, 74, 85, 85, 56, -10, 0, + 67, 62, 65, 79, 89, 89, 60, 37, 40, 66, 25, 29, + 69, 76, 79, 9, 14, 19, 80, 93, 91, 63, 69, 70, + 70, 78, 80, 57, 67, 71, -58, -58, -52, -21, -40, -41, + 59, 64, 60, 60, 71, 72, 52, 41, 44, 48, 19, 21, + 49, 4, 5, 45, -4, -2, 42, -17, -11, 27, -21, -19, + 4, -45, -45, -28, -74, -77, -61, -105, -105, -36, -50, -47, + -19, -38, -35, 6, 9, 13, 28, 30, 37, 41, 46, 50, + 64, 66, 70, 40, 41, 45, 74, 82, 82, 72, 73, 70, + 76, 79, 80, -20, -18, -9, 76, 83, 81, 74, 81, 79, + 76, 82, 81, 37, 41, 41, 68, 74, 77, 66, 72, 75, + 8, 13, 20, -56, -59, -55, -15, -33, -40, 25, 5, 3, + 40, 15, 15, 42, -1, 0, 46, 1, 4, 49, 1, 6, + 38, -19, -18, 34, -30, -30, 32, -17, -17, 29, -11, -11, + 26, -13, -16, 5, -40, -47, -28, -82, -88, -64, -104, -113, + -51, -67, -63, -13, -24, -23, -3, -9, -5, 29, 34, 38, + 22, 30, 35, 45, 52, 57, 45, 50, 54, 38, 43, 47, + 64, 71, 72, 42, 46, 49, 60, 66, 67, 35, 41, 42, + 55, 60, 64, 54, 64, 65, 11, 16, 23, -34, -30, -28, + -54, -61, -66, -17, -44, -51, 15, -24, -29, 43, 4, 2, + 43, 11, 9, 41, 6, 11, 41, 2, 5, 43, -2, 1, + 30, -31, -32, 33, -12, -13, 45, 13, 11, 48, 18, 17, + 44, 10, 9, 33, -2, -5, 6, -48, -51, -34, -88, -98, + -62, -101, -104, -69, -91, -88, -38, -53, -50, -11, -24, -24, + -7, -16, -13, 8, 9, 13, 30, 34, 38, 28, 35, 38, + 25, 31, 35, 21, 26, 32, 20, 25, 31, 18, 22, 28, + -1, 2, 6, -45, -48, -46, -53, -64, -67, -43, -67, -77, + 9, -22, -30, 25, -15, -18, 40, -7, -7, 39, -7, -5, + 33, -12, -9, 29, -10, -8, 30, -8, -4, 32, -12, -10, + 44, 7, 10, 53, 12, 12, 59, 17, 19, 61, 22, 21, + 54, 15, 14, 51, 8, 9, 39, -5, -7, 7, -41, -50, + -46, -92, -102, -62, -95, -94, -85, -108, -113, -90, -112, -116, + -29, -32, -44, 3, -1, -2, -35, -45, -38, -25, -33, -28, + -30, -37, -32, -39, -46, -46, -49, -60, -62, -57, -71, -74, + -67, -87, -93, -39, -65, -75, -8, -36, -42, 25, -9, -13, + 52, 19, 21, 49, 13, 16, 31, -10, -7, 35, -13, -6, + 38, -15, -6, 32, -20, -17, 23, -23, -22, 24, -26, -23, + 63, 25, 22, 66, 30, 30, 64, 25, 28, 62, 22, 28, + 61, 21, 26, 55, 17, 17, 52, 13, 13, 42, 2, 4, + 7, -42, -42, -56, -99, -98, -56, -84, -86, -20, -45, -61, + -39, -79, -90, -57, -99, -102, -49, -96, -102, -47, -91, -94, + -49, -87, -92, -47, -86, -88, -28, -70, -76, -5, -41, -45, + 19, -9, -11, 35, 12, 9, 42, 17, 16, 44, 15, 17, + 40, 13, 15, 27, -10, -7, 5, -53, -51, 3, -51, -56, + 6, -55, -59, 16, -41, -41, 19, -31, -29, 14, -41, -39}; + +// Kernel Conv Test Case: Int8Filter8x3x3x3PerChannelScaleRelu6ShouldMatchGolden +const int8_t kConvFilter8x3x3x3[8 * 3 * 3 * 3] = { + -82, -59, -112, -88, -69, -114, -50, -60, -99, -67, -63, -100, -75, + -86, -124, -51, -46, -94, -94, -88, -127, -77, -71, -109, -28, -41, + -93, -31, 68, -34, -54, 104, -49, -32, 62, -30, -38, 80, -41, + -67, 127, -63, -36, 71, -36, -12, 26, -15, -26, 51, -27, -6, + 18, -12, 114, 86, 55, 109, 80, 49, 103, 75, 50, 112, 84, + 55, 108, 79, 50, 102, 72, 48, 127, 95, 64, 124, 91, 61, + 116, 85, 59, -93, 24, 72, -98, 25, 76, -24, 7, 18, -120, + 34, 87, -127, 35, 94, -29, 10, 21, -49, 21, 27, -54, 17, + 35, 2, -3, 1, 44, 73, 10, 38, 68, 17, -12, -15, 5, + -70, -127, -16, -53, -115, -25, 6, 10, -11, 24, 55, 2, 19, + 54, 10, -11, -16, 2, 97, 114, 86, 99, 113, 87, 105, 123, + 92, 103, 120, 93, 104, 118, 92, 109, 125, 99, 99, 117, 97, + 100, 115, 92, 108, 127, 102, -48, -78, -20, -79, -127, -33, -38, + -56, -13, -13, -9, -8, -29, -24, -10, -16, -14, -2, 22, 28, + 7, 36, 43, 13, 15, 16, 11, -31, -64, -18, -30, -57, -15, + 4, -7, 1, -78, -127, -32, -69, -119, -31, -3, -4, -2, -49, + -61, -15, -42, -63, -18, -2, 8, 0}; + +// Kernel Conv Test Case: Int8Filter8x3x3x3PerChannelScaleRelu6ShouldMatchGolden +const int32_t kConvBiasQuantized8[8] = {-4166945, 70595, 203077, 315159, + 55295, 184082, 75855, 233991}; + +// Kernel Conv Test Case: Int8Filter8x3x3x3PerChannelScaleRelu6ShouldMatchGolden +const int8_t kConvGoldenOutput1x16x16x8[1 * 16 * 16 * 8] = { + -128, -21, -81, 67, -20, -109, -29, 4, -128, -19, -81, 68, + -19, -109, -31, 3, -128, -19, -80, 68, -20, -109, -32, 2, + -128, -19, -80, 68, -20, -109, -32, 1, -128, -19, -80, 68, + -19, -109, -33, 1, -128, -18, -80, 69, -18, -109, -36, -1, + -128, -19, -86, 68, -36, -112, -46, 9, -128, -20, -92, 68, + 13, -115, -47, 20, -128, -21, -94, 68, -19, -116, -52, 27, + -128, -20, -87, 68, 29, -112, -43, 18, -128, -17, -80, 69, + -36, -108, -41, 0, -128, -17, -78, 69, -20, -108, -37, -3, + -128, -18, -77, 68, -21, -107, -37, -3, -128, -18, -77, 69, + -20, -107, -38, -4, -128, -18, -77, 69, -22, -107, -38, -4, + -128, -19, -82, 69, -18, -110, -31, -4, -128, -20, -81, 67, + -19, -109, -30, 3, -128, -19, -81, 68, -19, -109, -31, 2, + -128, -19, -80, 68, -20, -109, -31, 2, -128, -19, -80, 68, + -20, -109, -33, 1, -128, -20, -79, 68, -19, -108, -33, 1, + -128, -19, -88, 67, -19, -113, -34, 0, -128, -19, -99, 66, + -13, -118, -1, 26, -128, -19, -120, 66, 32, -128, 2, 64, + -128, -20, -124, 67, 8, -128, 13, 76, -128, -19, -98, 68, + 1, -118, -17, 31, -128, -18, -89, 67, -12, -113, -33, 25, + -128, -17, -76, 69, -22, -107, -37, -5, -128, -17, -77, 68, + -22, -107, -38, -4, -128, -17, -77, 69, -22, -107, -38, -5, + -128, -18, -76, 69, -23, -107, -38, -5, -128, -19, -82, 68, + -18, -110, -31, -5, -128, -19, -81, 68, -20, -109, -31, 2, + -128, -19, -80, 68, -21, -109, -32, 1, -128, -20, -79, 67, + -20, -109, -32, 1, -128, -21, -79, 67, -20, -108, -32, 0, + -128, -20, -79, 67, -21, -108, -33, -1, -128, -20, -86, 67, + -20, -113, -12, -1, -128, -21, -93, 66, -15, -115, -10, 21, + -128, -23, -113, 65, -16, -126, 77, 46, -128, -23, -120, 65, + -9, -128, 74, 71, -128, -26, -97, 60, -26, -118, -12, 29, + -128, -27, -89, 61, -11, -114, 1, 28, -128, -32, -77, 58, + -15, -108, -42, -2, -128, -30, -78, 58, -16, -108, -40, 0, + -128, -26, -77, 60, -18, -108, -40, -2, -128, -20, -77, 66, + -24, -107, -41, -4, -128, -19, -82, 68, -17, -110, -32, -5, + -128, -20, -81, 67, -20, -109, -32, 3, -128, -20, -82, 67, + -14, -110, -34, 0, -128, -20, -90, 67, -62, -113, -67, 16, + -128, -24, -80, 64, -25, -109, -38, 1, -128, -25, -80, 61, + -20, -110, -39, 2, -128, -28, -87, 58, -10, -113, -42, 9, + -128, -29, -94, 56, -15, -116, -13, 29, -128, -26, -96, 61, + 37, -118, 24, 31, -128, -23, -94, 62, 30, -116, 55, 40, + -128, -33, -87, 59, 25, -113, 26, 20, -128, -41, -85, 53, + -8, -112, -19, 16, -128, -42, -83, 52, -26, -111, -42, 9, + -128, -40, -82, 52, -20, -111, -38, 8, -128, -38, -82, 52, + -18, -111, -38, 7, -128, -29, -81, 54, -22, -110, -42, 9, + -128, -19, -81, 68, -18, -110, -32, -7, -128, -31, -80, 58, + -17, -109, -33, 1, -128, -24, -107, 60, 0, -123, -46, 32, + -128, -23, -121, 66, -27, -128, 24, 76, -128, -34, -92, 57, + -9, -115, -39, 32, -128, -35, -86, 53, -16, -113, -29, 10, + -128, -30, -97, 58, -25, -117, -17, 24, -128, -27, -97, 62, + -46, -117, -15, 28, -128, -26, -91, 66, -4, -114, -44, 23, + -128, -24, -87, 68, 15, -112, -71, 17, -128, -25, -85, 66, + 10, -111, -71, 13, -128, -29, -83, 63, -38, -110, -39, 5, + -128, -36, -82, 59, -88, -110, -14, -2, -128, -41, -90, 52, + 25, -115, -9, 19, -128, -43, -94, 47, 1, -117, -21, 34, + -128, -32, -84, 52, -13, -111, -16, 15, -128, -24, -83, 63, + -20, -111, -39, -1, -128, -38, -85, 54, -23, -112, -36, 6, + -128, -32, -116, 54, -12, -128, 61, 50, -128, -24, -127, 63, + -12, -128, 68, 87, -128, -24, -106, 63, -47, -122, -6, 44, + -128, -22, -102, 63, 8, -120, -17, 38, -128, -20, -99, 66, + 6, -118, 30, 38, -128, -20, -90, 66, -8, -114, 35, 21, + -128, -20, -89, 66, -16, -114, 18, 17, -128, -20, -89, 65, + -15, -113, 14, 25, -128, -18, -84, 66, -14, -111, 22, 9, + -128, -18, -89, 66, -4, -113, 8, 15, -128, -20, -89, 66, + -16, -113, -27, 20, -128, -27, -90, 65, 9, -114, -30, 14, + -128, -37, -90, 56, -63, -114, -5, 20, -128, -43, -84, 50, + -21, -112, -32, 8, -128, -38, -89, 49, -17, -114, -24, 16, + -128, -41, -96, 49, -13, -118, -21, 31, -128, -37, -101, 53, + -33, -120, 23, 28, -128, -33, -116, 54, -24, -127, 56, 61, + -128, -22, -108, 63, -15, -123, 45, 53, -128, -20, -89, 66, + -18, -113, 33, 24, -128, -20, -76, 67, -31, -107, -9, 0, + -128, -25, -72, 65, 2, -105, -45, -12, -128, -26, -76, 64, + -22, -107, -35, -6, -128, -28, -79, 62, -19, -108, -26, 11, + -128, -25, -70, 65, -9, -104, -57, -14, -128, -24, -71, 64, + -22, -105, -30, -14, -128, -20, -78, 66, -30, -108, 1, -4, + -128, -19, -88, 66, -9, -113, 8, 10, -128, -25, -96, 65, + -11, -117, -5, 27, -128, -43, -96, 53, -6, -118, -26, 31, + -128, -41, -97, 48, -13, -118, -13, 34, -128, -46, -106, 44, + -8, -123, 9, 46, -128, -38, -109, 48, -17, -124, -10, 45, + -128, -26, -110, 59, -12, -124, 28, 57, -128, -20, -89, 68, + -22, -113, 15, 23, -128, -21, -74, 69, -14, -106, -41, -4, + -128, -24, -68, 63, -2, -103, -53, -12, -128, -23, -73, 67, + -9, -105, -54, -12, -128, -22, -76, 67, -12, -107, -45, -13, + -128, -21, -84, 67, -44, -110, -51, 21, -128, -23, -69, 67, + -21, -104, -40, -21, -128, -21, -68, 67, -16, -103, -64, -13, + -128, -25, -64, 65, -14, -101, -54, -21, -128, -18, -70, 69, + -18, -104, -28, -19, -128, -20, -86, 66, -19, -112, 5, 5, + -128, -31, -102, 64, -16, -120, 16, 34, -128, -41, -104, 49, + -25, -121, 5, 49, -128, -42, -99, 48, -43, -120, 33, 24, + -128, -36, -116, 51, 23, -128, 52, 69, -128, -24, -98, 63, + -31, -118, 31, 43, -128, -20, -76, 69, -28, -106, -40, -5, + -128, -24, -71, 64, -13, -105, -45, -7, -128, -17, -67, 70, + -22, -102, -59, -20, -128, -23, -72, 67, -21, -105, -38, -13, + -128, -21, -77, 67, -23, -108, -54, -9, -128, -21, -86, 67, + -51, -112, 15, 22, -128, -23, -71, 67, -32, -105, -67, -17, + -128, -22, -67, 66, -15, -102, -46, -13, -128, -20, -60, 69, + -21, -99, -60, -30, -128, -23, -61, 65, 0, -100, -61, -25, + -128, -18, -70, 69, -35, -104, -31, -22, -128, -25, -94, 67, + -21, -116, 15, 5, -128, -31, -103, 59, -47, -120, 24, 42, + -128, -41, -92, 50, -21, -116, -12, 21, -128, -30, -99, 53, + -25, -119, 30, 28, -128, -22, -88, 66, -29, -113, -13, 23, + -128, -20, -76, 69, -11, -107, -37, -6, -128, -20, -72, 67, + -30, -105, -55, -9, -128, -19, -68, 69, -22, -103, -55, -18, + -128, -19, -69, 69, -20, -103, -51, -19, -128, -23, -77, 65, + -48, -108, -21, -13, -128, -21, -90, 65, 23, -114, -22, 36, + -128, -23, -69, 66, -31, -104, -30, -13, -128, -18, -60, 69, + -27, -98, -65, -25, -128, -20, -50, 69, -36, -94, -73, -47, + -128, -20, -52, 68, -39, -95, -74, -46, -128, -19, -60, 69, + -44, -99, -62, -36, -128, -22, -86, 69, -22, -113, -24, -15, + -128, -34, -100, 60, -27, -119, 3, 39, -128, -45, -98, 48, + -13, -119, -5, 31, -128, -32, -101, 52, 1, -120, -17, 34, + -128, -20, -90, 66, -23, -113, -28, 27, -128, -22, -75, 69, + -21, -107, -52, -13, -128, -23, -74, 64, -45, -105, -42, -7, + -128, -18, -68, 69, -21, -103, -54, -18, -128, -19, -67, 69, + -23, -103, -53, -19, -128, -19, -75, 69, -23, -107, -54, -13, + -128, -20, -85, 67, -29, -111, -1, 20, -128, -19, -68, 69, + -36, -103, -68, -18, -128, -20, -63, 69, -23, -100, -70, -23, + -128, -19, -59, 69, -6, -98, -99, -31, -128, -20, -61, 68, + -15, -100, -88, -31, -128, -19, -67, 69, -20, -102, -73, -23, + -128, -22, -92, 69, -19, -115, -38, 1, -128, -37, -102, 57, + -17, -120, -2, 45, -128, -42, -91, 50, -29, -116, 10, 20, + -128, -37, -104, 50, -2, -122, 18, 37, -128, -23, -101, 61, + -5, -119, -13, 48, -128, -21, -78, 70, -33, -107, -46, -3, + -128, -26, -73, 65, -31, -106, -37, -11, -128, -22, -69, 66, + -13, -103, -55, -12, -128, -19, -66, 69, -21, -102, -55, -21, + -128, -20, -72, 69, -28, -105, -25, -15, -128, -22, -75, 69, + -29, -106, -16, -6, -128, -21, -70, 69, -19, -104, -35, -9, + -128, -21, -65, 69, -23, -101, -60, -23, -128, -23, -68, 68, + -25, -103, -61, -23, -128, -24, -68, 63, -12, -103, -55, -15, + -128, -20, -78, 69, -33, -108, -55, -18, -128, -25, -107, 66, + -2, -123, -1, 43, -128, -37, -102, 51, -10, -120, 17, 46, + -128, -40, -81, 52, -33, -110, -21, 3, -128, -43, -96, 50, + -27, -118, 7, 17, -128, -32, -112, 53, -21, -125, 15, 57, + -128, -23, -94, 66, -11, -115, -42, 34, -128, -20, -76, 70, + -27, -107, -58, -8, -128, -24, -72, 68, -10, -105, -46, -9, + -128, -24, -70, 64, 0, -104, -62, -8, -128, -21, -69, 67, + -33, -103, -66, -18, -128, -20, -68, 69, -18, -103, -54, -19, + -128, -20, -69, 67, -34, -103, -65, -16, -128, -23, -69, 67, + -34, -104, -64, -17, -128, -23, -69, 64, -3, -103, -48, -15, + -128, -18, -74, 69, -12, -106, -65, -16, -128, -22, -93, 67, + -18, -116, -31, 20, -128, -26, -90, 62, -16, -114, 49, 23, + -128, -24, -86, 62, -74, -112, 2, 0, -128, -44, -87, 49, + -18, -113, -34, 14, -128, -45, -93, 49, -11, -117, -3, 22, + -128, -40, -110, 48, -18, -125, 49, 48, -128, -30, -113, 57, + -7, -125, 5, 61, -128, -23, -93, 66, -13, -115, -47, 29, + -128, -20, -77, 69, -14, -107, -67, 0, -128, -20, -73, 69, + -27, -105, -48, -8, -128, -26, -70, 64, -47, -104, -43, -14, + -128, -22, -72, 68, 7, -105, -52, -6, -128, -24, -70, 64, + -50, -104, -46, -15, -128, -19, -73, 69, -30, -105, -41, -12, + -128, -19, -81, 70, -40, -109, -85, -9, -128, -22, -97, 67, + 21, -118, -45, 27, -128, -29, -93, 59, -37, -115, 9, 31, + -128, -31, -81, 59, 3, -110, -54, 5, -128, -35, -87, 55, + -9, -113, -29, 10, -128, -44, -88, 47, -11, -114, 1, 21, + -128, -38, -85, 52, -29, -112, -4, 10, -128, -39, -92, 50, + -27, -116, 18, 16, -128, -39, -112, 46, -14, -126, 57, 50, + -128, -31, -117, 57, 9, -128, 4, 67, -128, -25, -105, 63, + -17, -121, -46, 47, -128, -22, -91, 67, -2, -114, -50, 20, + -128, -21, -88, 68, -35, -112, -63, 12, -128, -21, -89, 69, + -25, -113, -70, 12, -128, -21, -94, 68, -30, -116, -71, 18, + -128, -22, -99, 66, 6, -119, -57, 32, -128, -26, -101, 60, + 24, -120, 19, 44, -128, -35, -94, 54, -27, -117, 28, 27, + -128, -41, -89, 50, -24, -114, -16, 19, -128, -41, -89, 52, + -12, -114, -24, 19, -128, -37, -91, 52, -17, -115, -19, 21, + -128, -37, -85, 55, -39, -112, -37, 8, -128, -38, -84, 55, + -32, -111, -42, 8, -128, -37, -86, 54, -31, -113, -32, 10, + -128, -37, -95, 52, -36, -117, 10, 19, -128, -35, -110, 52, + -10, -125, 71, 51, -128, -28, -110, 56, -21, -124, 71, 55, + -128, -29, -105, 57, 37, -122, 21, 48, -128, -32, -106, 57, + 30, -123, 32, 52, -128, -30, -107, 58, 25, -123, 38, 51, + -128, -30, -105, 57, -9, -122, 49, 45, -128, -30, -98, 57, + -59, -118, 33, 30, -128, -33, -90, 57, -46, -114, -13, 18, + -128, -37, -90, 56, -15, -115, -27, 16, -128, -42, -96, 50, + 12, -118, -6, 33, -128, -43, -95, 49, 7, -117, -4, 32, + -128, -37, -95, 53, -7, -117, -1, 30}; + +// Conv Test Case: Int8Filter1x3x3x1ShouldMatchGolden +const int8_t kConvFilter1x3x3x1[1 * 3 * 3 * 1]{ + 22, -98, 78, -127, 101, 47, 87, 12, -15, +}; + +const int32_t kConvZeroBias[1] = {0}; + +// Kernel Conv Test Case: Int8Filter1x3x3x1ShouldMatchGoldenEvenInput +const int8_t kConvInput1x4x4x1[1 * 4 * 4 * 1]{ + -127, -111, -95, -79, -63, -47, -31, -15, 1, 17, 33, 49, 65, 81, 97, 113, +}; + +// Conv Test Case: Int8Filter1x3x3x1ShouldMatchGoldenOddInput +const int8_t kConvInput1x5x5x1[1 * 5 * 5 * 1]{ + -128, -111, -95, -79, -63, -47, -31, -15, 1, 17, 33, 49, 65, + 81, 97, 113, 127, 100, 80, 60, 40, 20, 0, -20, -40}; + +// Conv Test Case: Int8Filter1x3x3x1ShouldMatchGoldenEvenInputPaddingSame +const int8_t kConvGoldenOutput4x4InputPaddingSame2x2[1 * 2 * 2 * 1] = {38, 24, + 16, -58}; + +// Conv Test Case: Int8Filter1x3x3x1ShouldMatchGoldenOddInputPaddingSame +const int8_t kConvGoldenOutput5x5InputPaddingSame3x3[1 * 3 * 3 * 1] = { + -6, 25, 30, 58, 76, 7, 50, -11, -59}; + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/testdata/conv_test_data.h b/tensorflow/lite/micro/kernels/testdata/conv_test_data.h new file mode 100644 index 0000000..bdac510 --- /dev/null +++ b/tensorflow/lite/micro/kernels/testdata/conv_test_data.h @@ -0,0 +1,37 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_CONV_TEST_DATA_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_CONV_TEST_DATA_H_ + +#include "tensorflow/lite/c/common.h" + +namespace tflite { +extern const int8_t kConvInput1x32x32x3[]; +extern const int8_t kConvFilter8x3x3x3[]; +extern const int32_t kConvBiasQuantized8[]; +extern const int8_t kConvGoldenOutput1x16x16x8[]; + +// Kernel Conv Test Cases: Int8Filter1x3x3x1ShouldMatchGolden +extern const int8_t kConvInput1x4x4x1[]; +extern const int8_t kConvInput1x5x5x1[]; +extern const int8_t kConvFilter1x3x3x1[]; +extern const int32_t kConvZeroBias[]; +extern const int8_t kConvGoldenOutput4x4InputPaddingSame2x2[]; +extern const int8_t kConvGoldenOutput5x5InputPaddingSame3x3[]; + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_CONV_TEST_DATA_H_ diff --git a/tensorflow/lite/micro/kernels/testdata/lstm_test_data.cc b/tensorflow/lite/micro/kernels/testdata/lstm_test_data.cc new file mode 100644 index 0000000..4d7d9d9 --- /dev/null +++ b/tensorflow/lite/micro/kernels/testdata/lstm_test_data.cc @@ -0,0 +1,309 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/kernels/testdata/lstm_test_data.h" + +#include + +namespace tflite { +namespace testing { + +namespace { +// LSTM internal setting (e.g., nonlinear activation type) +// Only UnidirectionalLSTM is supported now +constexpr TfLiteUnidirectionalSequenceLSTMParams kDefaultBuiltinData = { + /*.activation=*/kTfLiteActTanh, + /*.cell_clip=*/6, + /*.proj_clip=*/3, + /*.time_major=*/false, + /*.asymmetric_quantize_inputs=*/true, + /*diagonal_recurrent_tensors=*/false}; +} // namespace + +GateOutputCheckData<4, 4> Get2X2GateOutputCheckData() { + GateOutputCheckData<4, 4> gate_data; + const float input_data[4] = { + 0.2, 0.3, // batch1 + -0.98, 0.62 // batch2 + }; + std::memcpy(gate_data.input_data, input_data, 4 * sizeof(float)); + + const float hidden_state[4] = { + -0.1, 0.2, // batch1 + -0.3, 0.5 // batch2 + }; + std::memcpy(gate_data.hidden_state, hidden_state, 4 * sizeof(float)); + + const float cell_state[4] = { + -1.3, 6.2, // batch1 + -7.3, 3.5 // batch2 + }; + std::memcpy(gate_data.cell_state, cell_state, 4 * sizeof(float)); + + // Use the forget gate parameters to test small gate outputs + // output = sigmoid(W_i*i+W_h*h+b) = sigmoid([[-10,-10],[-20,-20]][0.2, + // +[[-10,-10],[-20,-20]][-0.1, 0.2]+[1,2]) = sigmoid([-5,-10]) = + // [6.69285092e-03, 4.53978687e-05] (Batch1) + // Similarly, we have [0.93086158 0.9945137 ] for batch 2 + const float expected_forget_gate_output[4] = {6.69285092e-3f, 4.53978687e-5f, + 0.93086158, 0.9945137}; + std::memcpy(gate_data.expected_forget_gate_output, + expected_forget_gate_output, 4 * sizeof(float)); + + // Use the input gate parameters to test small gate outputs + // output = sigmoid(W_i*i+W_h*h+b) = sigmoid([[10,10],[20,20]][0.2, 0.3] + // +[[10,10],[20,20]][-0.1, 0.2]+[-1,-2]) = sigmoid([5,10]) = + // [0.99330715, 0.9999546] + // Similarly, we have [0.06913842 0.0054863 ] for batch 2 + const float expected_input_gate_output[4] = {0.99330715, 0.9999546, + 0.06913842, 0.0054863}; + std::memcpy(gate_data.expected_input_gate_output, expected_input_gate_output, + 4 * sizeof(float)); + + // Use the output gate parameters to test normnal gate outputs + // output = sigmoid(W_i*i+W_h*h+b) = sigmoid([[1,1],[1,1]][0.2, 0.3] + // +[[1,1],[1,1]][-0.1, 0.2]+[0,0]) = sigmoid([0.6,0.6]) = + // [0.6456563062257954, 0.6456563062257954] + // Similarly, we have [[0.46008512 0.46008512]] for batch 2 + const float expected_output_gate_output[4] = { + 0.6456563062257954, 0.6456563062257954, 0.46008512, 0.46008512}; + std::memcpy(gate_data.expected_output_gate_output, + expected_output_gate_output, 4 * sizeof(float)); + + // Use the cell(modulation) gate parameters to tanh output + // output = tanh(W_i*i+W_h*h+b) = tanh([[1,1],[1,1]][0.2, 0.3] + // +[[1,1],[1,1]][-0.1, 0.2]+[0,0]) = tanh([0.6,0.6]) = + // [0.6456563062257954, 0.6456563062257954] + // Similarly, we have [-0.1586485 -0.1586485] for batch 2 + const float expected_cell_gate_output[4] = { + 0.5370495669980353, 0.5370495669980353, -0.1586485, -0.1586485}; + std::memcpy(gate_data.expected_cell_gate_output, expected_cell_gate_output, + 4 * sizeof(float)); + + // Cell = forget_gate*cell + input_gate*cell_gate + // Note -6.80625824 is clipped to -6 + const float expected_updated_cell[4] = {0.52475447, 0.53730665, -6, + 3.47992756}; + std::memcpy(gate_data.expected_updated_cell, expected_updated_cell, + 4 * sizeof(float)); + + // Use the updated cell state to update the hidden state + // tanh(expected_updated_cell) * expected_output_gate_output + const float expected_updated_hidden[4] = {0.31079388, 0.3169827, -0.46007947, + 0.45921249}; + std::memcpy(gate_data.expected_updated_hidden, expected_updated_hidden, + 4 * sizeof(float)); + return gate_data; +} + +// TODO(b/253466487): document how the golden values are arrived at +LstmEvalCheckData<12, 4, 12> Get2X2LstmEvalCheckData() { + LstmEvalCheckData<12, 4, 12> eval_data; + const float input_data[12] = { + 0.2, 0.3, 0.2, 0.3, 0.2, 0.3, // batch one + -0.98, 0.62, 0.01, 0.99, 0.49, -0.32 // batch two + }; + std::memcpy(eval_data.input_data, input_data, 12 * sizeof(float)); + + // Initialize hidden state as zeros + const float hidden_state[4] = {}; + std::memcpy(eval_data.hidden_state, hidden_state, 4 * sizeof(float)); + + // The expected model output after 3 time steps using the fixed input and + // parameters + const float expected_output[12] = { + 0.26455893, 0.26870455, 0.47935803, + 0.47937014, 0.58013272, 0.58013278, // batch1 + -1.41184672e-3f, -1.43329117e-5f, 0.46887168, + 0.46891281, 0.50054074, 0.50054148 // batch2 + }; + std::memcpy(eval_data.expected_output, expected_output, 12 * sizeof(float)); + + const float expected_hidden_state[4] = { + 0.58013272, 0.58013278, // batch1 + 0.50054074, 0.50054148 // batch2 + }; + std::memcpy(eval_data.expected_hidden_state, expected_hidden_state, + 4 * sizeof(float)); + + const float expected_cell_state[4] = { + 0.89740515, 0.8974053, // batch1 + 0.80327607, 0.80327785 // batch2 + }; + std::memcpy(eval_data.expected_cell_state, expected_cell_state, + 4 * sizeof(float)); + return eval_data; +} + +LstmNodeContent +Create2x3x2X2FloatNodeContents(const float* input_data, + const float* hidden_state_data, + const float* cell_state_data) { + // Parameters for different gates + // negative large weights for forget gate to make it really forget + const GateData forget_gate_data = { + /*.activation_weight=*/{-10, -10, -20, -20}, + /*.recurrent_weight=*/{-10, -10, -20, -20}, + /*.fused_bias=*/{1, 2}, + /*activation_zp_folded_bias=*/{0, 0}, + /*recurrent_zp_folded_bias=*/{0, 0}}; + // positive large weights for input gate to make it really remember + const GateData input_gate_data = { + /*.activation_weight=*/{10, 10, 20, 20}, + /*.recurrent_weight=*/{10, 10, 20, 20}, + /*.fused_bias=*/{-1, -2}, + /*activation_zp_folded_bias=*/{0, 0}, + /*recurrent_zp_folded_bias=*/{0, 0}}; + // all ones to test the behavior of tanh at normal range (-1,1) + const GateData cell_gate_data = { + /*.activation_weight=*/{1, 1, 1, 1}, + /*.recurrent_weight=*/{1, 1, 1, 1}, + /*.fused_bias=*/{0, 0}, + /*activation_zp_folded_bias=*/{0, 0}, + /*recurrent_zp_folded_bias=*/{0, 0}}; + // all ones to test the behavior of sigmoid at normal range (-1. 1) + const GateData output_gate_data = { + /*.activation_weight=*/{1, 1, 1, 1}, + /*.recurrent_weight=*/{1, 1, 1, 1}, + /*.fused_bias=*/{0, 0}, + /*activation_zp_folded_bias=*/{0, 0}, + /*recurrent_zp_folded_bias=*/{0, 0}}; + + LstmNodeContent float_node_contents( + kDefaultBuiltinData, forget_gate_data, input_gate_data, cell_gate_data, + output_gate_data); + + if (input_data != nullptr) { + float_node_contents.SetInputData(input_data); + } + if (hidden_state_data != nullptr) { + float_node_contents.SetHiddenStateData(hidden_state_data); + } + if (cell_state_data != nullptr) { + float_node_contents.SetCellStateData(cell_state_data); + } + return float_node_contents; +} + +NodeQuantizationParameters Get2X2Int8LstmQuantizationSettings() { + NodeQuantizationParameters quantization_settings; + quantization_settings.activation_type = kTfLiteInt8; + quantization_settings.weight_type = kTfLiteInt8; + quantization_settings.cell_type = kTfLiteInt16; + quantization_settings.bias_type = kTfLiteInt32; + quantization_settings.nonlinear_activation_input_scale = + 0.00024414062; // std::pow(2.0f, -12.0f) + quantization_settings.nonlinear_activation_output_scale = + 0.00003051757; // std::pow(2.0f, -15.0f) + + // state quantization parameters + quantization_settings.input = {/*scale=*/0.00784313725490196, /*zp=*/0, + /*symmetry=*/false}; + quantization_settings.output = {/*scale=*/0.004705882165580988, /*zp=*/-21, + /*symmetry=*/false}; + quantization_settings.hidden_state = {/*scale=*/0.004705882165580988, + /*zp=*/-21, /*symmetry=*/false}; + quantization_settings.cell_state = {/*scale=*/0.00024414062, /*zp=*/0, + /*symmetry=*/true}; + + // gate quantization parameters + quantization_settings.forget_gate = { + {/*scale=*/0.15748031496062992, /*zp=*/0, /*symmetry=*/true}, + {/*scale=*/0.15748031496062992, /*zp=*/0, /*symmetry=*/true}, + {/*scale=*/0.0012351397251814111, /*zp=*/0, /*symmetry=*/true}}; + quantization_settings.input_gate = { + {/*scale=*/0.15748031496062992, /*zp=*/0, /*symmetry=*/true}, + {/*scale=*/0.15748031496062992, /*zp=*/0, /*symmetry=*/true}, + {/*scale=*/0.0012351397251814111, /*zp=*/0, /*symmetry=*/true}}; + quantization_settings.cell_gate = { + {/*scale=*/0.007874015748031496, /*zp=*/0, /*symmetry=*/true}, + {/*scale=*/0.007874015748031496, /*zp=*/0, /*symmetry=*/true}, + {/*scale=*/6.175698625907056e-5, /*zp=*/0, /*symmetry=*/true}}; + quantization_settings.output_gate = { + {/*scale=*/0.007874015748031496, /*zp=*/0, /*symmetry=*/true}, + {/*scale=*/0.007874015748031496, /*zp=*/0, /*symmetry=*/true}, + {/*scale=*/6.175698625907056e-5, /*zp=*/0, /*symmetry=*/true}}; + return quantization_settings; +} + +NodeQuantizationParameters Get2X2Int16LstmQuantizationSettings() { + NodeQuantizationParameters quantization_settings; + quantization_settings.activation_type = kTfLiteInt16; + quantization_settings.weight_type = kTfLiteInt8; + quantization_settings.cell_type = kTfLiteInt16; + quantization_settings.bias_type = kTfLiteInt64; + quantization_settings.nonlinear_activation_input_scale = + 0.00024414062; // std::pow(2.0f, -12.0f) + quantization_settings.nonlinear_activation_output_scale = + 0.00003051757; // std::pow(2.0f, -15.0f) + + // state quantization parameters + quantization_settings.input = {/*scale=*/3.0518044e-5, /*zp=*/0, + /*symmetry=*/false}; + quantization_settings.output = {/*scale=*/1.8310826e-5, /*zp=*/-5461, + /*symmetry=*/false}; + quantization_settings.hidden_state = {/*scale=*/1.8310826e-5, /*zp=*/-5461, + /*symmetry=*/false}; + quantization_settings.cell_state = {/*scale=*/0.00024414062, /*zp=*/0, + /*symmetry=*/true}; + + // gate quantization parameters + quantization_settings.forget_gate = { + {/*scale=*/0.15748031496062992, /*zp=*/0, /*symmetry=*/true}, + {/*scale=*/0.15748031496062992, /*zp=*/0, /*symmetry=*/true}, + {/*scale=*/4.8059911474468205e-06, /*zp=*/0, /*symmetry=*/true}}; + quantization_settings.input_gate = { + {/*scale=*/0.15748031496062992, /*zp=*/0, /*symmetry=*/true}, + {/*scale=*/0.15748031496062992, /*zp=*/0, /*symmetry=*/true}, + {/*scale=*/4.8059911474468205e-06, /*zp=*/0, /*symmetry=*/true}}; + quantization_settings.cell_gate = { + {/*scale=*/0.007874015748031496, /*zp=*/0, /*symmetry=*/true}, + {/*scale=*/0.007874015748031496, /*zp=*/0, /*symmetry=*/true}, + {/*scale=*/2.40299557372341e-07, /*zp=*/0, /*symmetry=*/true}}; + quantization_settings.output_gate = { + {/*scale=*/0.007874015748031496, /*zp=*/0, /*symmetry=*/true}, + {/*scale=*/0.007874015748031496, /*zp=*/0, /*symmetry=*/true}, + {/*scale=*/2.40299557372341e-07, /*zp=*/0, /*symmetry=*/true}}; + return quantization_settings; +} + +LstmNodeContent +Create2x3x2X2Int8NodeContents(const float* input_data, + const float* hidden_state, + const float* cell_state) { + auto float_node_content = + Create2x3x2X2FloatNodeContents(input_data, hidden_state, cell_state); + const auto quantization_settings = Get2X2Int8LstmQuantizationSettings(); + return CreateIntegerNodeContents(quantization_settings, + /*fold_zero_point=*/true, + float_node_content); +} + +LstmNodeContent +Create2x3x2X2Int16NodeContents(const float* input_data, + const float* hidden_state, + const float* cell_state) { + auto float_node_content = + Create2x3x2X2FloatNodeContents(input_data, hidden_state, cell_state); + const auto quantization_settings = Get2X2Int16LstmQuantizationSettings(); + return CreateIntegerNodeContents(quantization_settings, + /*fold_zero_point=*/false, + float_node_content); +} + +} // namespace testing +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/testdata/lstm_test_data.h b/tensorflow/lite/micro/kernels/testdata/lstm_test_data.h new file mode 100644 index 0000000..3edf420 --- /dev/null +++ b/tensorflow/lite/micro/kernels/testdata/lstm_test_data.h @@ -0,0 +1,579 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_TESTDATA_LSTM_TEST_DATA_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_TESTDATA_LSTM_TEST_DATA_H_ +#include + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/portable_tensor_utils.h" +#include "tensorflow/lite/micro/kernels/lstm_shared.h" +#include "tensorflow/lite/micro/test_helpers.h" + +namespace tflite { +namespace testing { +// Data structure to store all the data used to check output of internal gates +// of one time step +// input_size = batch_size*input_dimension (size of the input array) +// gate_output_size = batch_size*state_dimension (size of the gate output) +template +struct GateOutputCheckData { + float input_data[input_size]; + float hidden_state[gate_output_size]; + float cell_state[gate_output_size]; + float expected_forget_gate_output[gate_output_size]; + float expected_input_gate_output[gate_output_size]; + float expected_output_gate_output[gate_output_size]; + float expected_cell_gate_output[gate_output_size]; + float expected_updated_cell[gate_output_size]; + float expected_updated_hidden[gate_output_size]; +}; + +// Data structure to store all the data used to check the output of the kernel +// of multiple batch, multiple timesteps +// input_size = batch_size*time_steps*input_dimension (size of the input array) +// gate_output_size = batch_size*state_dimension (size of the gate output) +// output_size = time_steps*gate_output_size (size of the output from the +// kernel) +template +struct LstmEvalCheckData { + float input_data[input_size]; + float hidden_state[gate_output_size]; + float expected_output[output_size]; + float expected_hidden_state[gate_output_size]; + float expected_cell_state[gate_output_size]; +}; + +// Struct that holds the weight/bias information for a standard gate (i.e. no +// modification such as layer normalization, peephole, etc.) +// Every gate is defined by the type and size of the weights (bias included) +// inside. +// Specifically, types are weight type and bias type (normally the same +// type of MatMul accumulator). +// activation_weight has shape (hidden state dimension * input tensor dimension) +// recurrent_weight has shape (hidden state dimension * hidden state dimension) +// bias has shape (hidden state dimension, 1) +template +struct GateData { + WeightType activation_weight[state_dimension * input_dimension]; + WeightType recurrent_weight[state_dimension * state_dimension]; + BiasType fused_bias[state_dimension]; + // Quantized model folded the zero point of activations into biases: + // bias + zero_point * weight. + // Note: folded bias is only required for the legacy 8x8->16 pass. Therefore + // the data type is fixed here to avoid compilation errors (the computation of + // folding does not support other types) + int32_t activation_zp_folded_bias[state_dimension]; + int32_t recurrent_zp_folded_bias[state_dimension]; +}; + +// A struct that holds quantization parameters for a LSTM Tensor +struct TensorQuantizationParameters { + double scale; + int zero_point; + bool symmetry; +}; + +// A struct that holds quantization parameters for an internal gate, which is +// defined by activation/recurrent weight and bias (assuming no internal layer +// normalization) +struct GateQuantizationParameters { + TensorQuantizationParameters activation_weight; + TensorQuantizationParameters recurrent_weight; + TensorQuantizationParameters bias; +}; + +// A struct that holds the quantization settings for the LSTM node. Data +// members can be grouped into five parts. +// 1. Data types (activation,weight, cell, bias) +// 2. Non-linear activation (i.e., tanh and sigmoid) fixed point +// calculation settings +// 3. Input/output tensor quantization settings +// 4. Internal state (hidden and cell) quantization settings +// 5. Internal gate (forget, input, cell, output) settings +struct NodeQuantizationParameters { + TfLiteType activation_type; + TfLiteType weight_type; + TfLiteType cell_type; + TfLiteType bias_type; + // Fixed point setting for integer nonlinear activation calculation + double nonlinear_activation_input_scale; + double nonlinear_activation_output_scale; + // Quantization parameters for input/output + TensorQuantizationParameters input; + TensorQuantizationParameters output; + // Quantization parameters for internal states + TensorQuantizationParameters hidden_state; + TensorQuantizationParameters cell_state; + // Quantization parameters for gates + GateQuantizationParameters forget_gate; + GateQuantizationParameters input_gate; + GateQuantizationParameters cell_gate; + GateQuantizationParameters output_gate; +}; + +// Data structure that holds all the information to evaluate a LSTM kernel +// (mimic the LSTM node). +// Tensor Types: +// ActivationType defines the data type of input/output of the layer. The hidden +// state has the ActivationType as well since it is the layer output of the +// previous time. +// WeightType defines the weight data type inside the internal gates. +// BiasType defines the bias data type inside the internal gates. (normally the +// same type of MatMul accumulator). +// Tensor Shapes: +// The input to the layer has shape (batch_size,time_steps,input_dimension). +// Both the hidden state and cell state has shape (state_dimension, 1) +// The output of the layer has shape (batch_size,time_steps,state_dimension) +// Note: state values can change through calls (stateful) +template +class LstmNodeContent { + public: + LstmNodeContent(const LstmNodeContent& other) = default; + LstmNodeContent& operator=(const LstmNodeContent& other) = default; + // Use the general model setting (builtin data) and the four gates data to + // construct the node content. Note the input, hidden state, and cell state + // data is provided later for flexible testing (initialize as zero now) + LstmNodeContent( + const TfLiteUnidirectionalSequenceLSTMParams builtin_data, + const GateData + forget_gate_params, + const GateData + input_gate_params, + const GateData + cell_gate_params, + const GateData + output_gate_params) + : builtin_data_(builtin_data), + forget_gate_data_(forget_gate_params), + input_gate_data_(input_gate_params), + cell_gate_data_(cell_gate_params), + output_gate_data_(output_gate_params) { + InitializeTensors(); + } + + // Add quantization parameters (scale, zero point) to tensors + // Only required for the integer kernel + void AddQuantizationParameters( + const NodeQuantizationParameters& quantization_params) { + quantization_settings_ = quantization_params; + // Input Tensor + SetTensorQuantizationParam(kLstmInputTensor, quantization_params.input); + // Forget Gate Tensors + const auto& forget_gate_quant_param = quantization_params.forget_gate; + SetTensorQuantizationParam(kLstmInputToForgetWeightsTensor, + forget_gate_quant_param.activation_weight); + SetTensorQuantizationParam(kLstmRecurrentToForgetWeightsTensor, + forget_gate_quant_param.recurrent_weight); + SetTensorQuantizationParam(kLstmForgetGateBiasTensor, + forget_gate_quant_param.bias); + // Input Gate Tensors + const auto& input_gate_quant_param = quantization_params.input_gate; + SetTensorQuantizationParam(kLstmInputToInputWeightsTensor, + input_gate_quant_param.activation_weight); + SetTensorQuantizationParam(kLstmRecurrentToInputWeightsTensor, + input_gate_quant_param.recurrent_weight); + SetTensorQuantizationParam(kLstmInputGateBiasTensor, + input_gate_quant_param.bias); + // Cell Gate Tensors + const auto& cell_gate_quant_param = quantization_params.cell_gate; + SetTensorQuantizationParam(kLstmInputToCellWeightsTensor, + cell_gate_quant_param.activation_weight); + SetTensorQuantizationParam(kLstmRecurrentToCellWeightsTensor, + cell_gate_quant_param.recurrent_weight); + SetTensorQuantizationParam(kLstmCellGateBiasTensor, + cell_gate_quant_param.bias); + // Output Gate Tensors + const auto& output_gate_quant_param = quantization_params.output_gate; + SetTensorQuantizationParam(kLstmInputToOutputWeightsTensor, + output_gate_quant_param.activation_weight); + SetTensorQuantizationParam(kLstmRecurrentToOutputWeightsTensor, + output_gate_quant_param.recurrent_weight); + SetTensorQuantizationParam(kLstmOutputGateBiasTensor, + output_gate_quant_param.bias); + // State Tensors + SetTensorQuantizationParam(kLstmOutputStateTensor, + quantization_params.hidden_state); + SetTensorQuantizationParam(kLstmCellStateTensor, + quantization_params.cell_state); + // Output Tensor + SetTensorQuantizationParam(24, quantization_params.output); + } + + // Provide interface to set the input tensor values for flexible testing + void SetInputData(const ActivationType* data) { + std::memcpy( + input_, data, + batch_size * input_dimension * time_steps * sizeof(ActivationType)); + SetTensor(kLstmInputTensor, input_, input_size_); + } + const ActivationType* GetInputData() const { return input_; } + + // Provide interface to set the hidden state tensor values for flexible + // testing + void SetHiddenStateData(const ActivationType* data) { + std::memcpy(hidden_state_, data, + batch_size * state_dimension * sizeof(ActivationType)); + } + ActivationType* GetHiddenStateData() { return hidden_state_; } + + // Provide interface to set the cell state tensor values for flexible + // testing + void SetCellStateData(const CellType* data) { + std::memcpy(cell_state_, data, + batch_size * state_dimension * sizeof(CellType)); + } + CellType* GetCellStateData() { return cell_state_; } + ActivationType* GetOutputData() { return output_; } + + // Internal tensors, see lstm_shared.h for tensor names + TfLiteEvalTensor* GetEvalTensor(const int tensor_index) { + auto valid_index = input_tensor_indices_[tensor_index + 1]; + if (valid_index < 0) { + return nullptr; + } + return &eval_tensors_[tensor_index]; + } + + TfLiteTensor* GetTensors() { return tensors_; } + + // Required by the kernel runner + TfLiteIntArray* KernelInputs() { + return IntArrayFromInts(input_tensor_indices_); + } + // Required by the kernel runner + TfLiteIntArray* KernelOutputs() { + return IntArrayFromInts(output_tensor_indices_); + } + + // Variable tensors (will be changed, can not be const) + TfLiteEvalTensor* HiddenStateEvalTensor() { + return &eval_tensors_[kLstmOutputStateTensor]; + } + TfLiteEvalTensor* CellStateEvalTensor() { + return &eval_tensors_[kLstmCellStateTensor]; + } + TfLiteEvalTensor* OutputEvalTensor() { return &eval_tensors_[24]; } + + const GateData& + ForgetGateData() const { + return forget_gate_data_; + } + const GateData& + InputGateData() const { + return input_gate_data_; + } + const GateData& + CellGateData() const { + return cell_gate_data_; + } + const GateData& + OutputGateData() const { + return output_gate_data_; + } + + const TfLiteUnidirectionalSequenceLSTMParams& BuiltinData() const { + return builtin_data_; + } + + const NodeQuantizationParameters& QuantizationSettings() const { + return quantization_settings_; + } + + private: + void InitializeTensors() { + // Invalid all the input tensors untill we set it + input_tensor_indices_[0] = 24; // tot elements + for (size_t i = 1; i < 25; i++) { + input_tensor_indices_[i] = kTfLiteOptionalTensor; + } + // Input Tensor + SetTensor(kLstmInputTensor, input_, input_size_); + // Forget Gate Tensors + SetTensor(kLstmInputToForgetWeightsTensor, + forget_gate_data_.activation_weight, activation_weight_size_); + SetTensor(kLstmRecurrentToForgetWeightsTensor, + forget_gate_data_.recurrent_weight, recurrent_weight_size_); + SetTensor(kLstmForgetGateBiasTensor, forget_gate_data_.fused_bias, + bias_size_); + // Input Gate Tensors + SetTensor(kLstmInputToInputWeightsTensor, + input_gate_data_.activation_weight, activation_weight_size_); + SetTensor(kLstmRecurrentToInputWeightsTensor, + input_gate_data_.recurrent_weight, recurrent_weight_size_); + SetTensor(kLstmInputGateBiasTensor, input_gate_data_.fused_bias, + bias_size_); + // Cell Gate Tensors + SetTensor(kLstmInputToCellWeightsTensor, cell_gate_data_.activation_weight, + activation_weight_size_); + SetTensor(kLstmRecurrentToCellWeightsTensor, + cell_gate_data_.recurrent_weight, recurrent_weight_size_); + SetTensor(kLstmCellGateBiasTensor, cell_gate_data_.fused_bias, bias_size_); + // Output Gate Tensors + SetTensor(kLstmInputToOutputWeightsTensor, + output_gate_data_.activation_weight, activation_weight_size_); + SetTensor(kLstmRecurrentToOutputWeightsTensor, + output_gate_data_.recurrent_weight, recurrent_weight_size_); + SetTensor(kLstmOutputGateBiasTensor, output_gate_data_.fused_bias, + bias_size_); + // State Tensors + SetTensor(kLstmOutputStateTensor, hidden_state_, state_size_, + /*is_variable=*/true); + SetTensor(kLstmCellStateTensor, cell_state_, state_size_, + /*is_variable=*/true); + // // Output Tensor + SetTensor(24, output_, output_size_, /*is_variable=*/true); + } + + template + void SetTensor(const int index, const T* data, int* dims, + const bool is_variable = false) { + // Lite tensors for kernel level testing + tensors_[index].data.data = const_cast(data); + tensors_[index].dims = IntArrayFromInts(dims); + tensors_[index].type = typeToTfLiteType(); + tensors_[index].is_variable = is_variable; + // Eval tensors for internal computation testing + eval_tensors_[index].data.data = const_cast(data); + eval_tensors_[index].dims = IntArrayFromInts(dims); + eval_tensors_[index].type = typeToTfLiteType(); + // update the index + if (index < 24) { + input_tensor_indices_[index + 1] = index; + } + } + + void SetTensorQuantizationParam( + const int index, const TensorQuantizationParameters& quant_param) { + tensors_[index].params.scale = quant_param.scale; + tensors_[index].params.zero_point = quant_param.zero_point; + } + + const TfLiteUnidirectionalSequenceLSTMParams builtin_data_; + GateData + forget_gate_data_; + GateData + input_gate_data_; + GateData + cell_gate_data_; + GateData + output_gate_data_; + + // Keep to ease the testing process (although all quantization information can + // be obtained from individual tensors, they are well organized here and light + // weighted) + NodeQuantizationParameters quantization_settings_; + + // Not const since IntArrayFromInts takes int *; the first element of the + // array must be the size of the array + int input_size_[4] = {3, batch_size, time_steps, input_dimension}; + int output_size_[4] = {3, batch_size, time_steps, state_dimension}; + // weight tensor has C-style "row-major" memory ordering + int activation_weight_size_[3] = {2, state_dimension, input_dimension}; + int recurrent_weight_size_[3] = {2, state_dimension, state_dimension}; + int bias_size_[2] = {1, state_dimension}; + int state_size_[3] = {2, batch_size, state_dimension}; + + // see lstm_shared.h for tensor names, the last tensor is the output tensor + TfLiteTensor tensors_[24 + 1]; + // Use for internel kernel testing + TfLiteEvalTensor eval_tensors_[24 + 1]; + // indices for the tensors inside the node (required by kernel runner) + int input_tensor_indices_[1 + 24] = {}; + // single output (last in the tensors array) + int output_tensor_indices_[2] = {1, 24}; + + // tennsor data + // states are initialized to zero + ActivationType hidden_state_[batch_size * state_dimension] = {}; + CellType cell_state_[batch_size * state_dimension] = {}; + // input is defined in the ModelContent (const across all derived models) + ActivationType input_[batch_size * input_dimension * time_steps] = {}; + ActivationType output_[batch_size * state_dimension * time_steps] = {}; +}; + +// Converts floating point gate parameters to the corresponding quantized +// version +template +GateData +CreateQuantizedGateData( + const GateData& + gate_parameters, + const TensorQuantizationParameters& input_quantization_params, + const TensorQuantizationParameters& output_quantization_params, + const GateQuantizationParameters& gate_quantization_params, + const bool fold_zero_point) { + GateData + quantized_gate_params; + tflite::SymmetricQuantize(gate_parameters.activation_weight, + quantized_gate_params.activation_weight, + state_dimension * input_dimension, + gate_quantization_params.activation_weight.scale); + tflite::SymmetricQuantize(gate_parameters.recurrent_weight, + quantized_gate_params.recurrent_weight, + state_dimension * state_dimension, + gate_quantization_params.recurrent_weight.scale); + tflite::SymmetricQuantize(gate_parameters.fused_bias, + quantized_gate_params.fused_bias, state_dimension, + gate_quantization_params.bias.scale); + // Note: steps below are not required for the generalized LSTM evaluation + // (e.g., 16bits activation) + if (fold_zero_point) { + // Copy the bias values to prepare zero_point folded + // bias precomputation. bias has same scale as + // input_scale*input_weight_scale) + std::memcpy(quantized_gate_params.activation_zp_folded_bias, + quantized_gate_params.fused_bias, 2 * sizeof(int32_t)); + // Pre-calculate bias - zero_point * weight (a constant). + tflite::tensor_utils::MatrixScalarMultiplyAccumulate( + quantized_gate_params.activation_weight, + -1 * input_quantization_params.zero_point, 2, 2, + quantized_gate_params.activation_zp_folded_bias); + + // Initialize the folded bias to zeros for accumulation + for (size_t i = 0; i < 2; i++) { + quantized_gate_params.recurrent_zp_folded_bias[i] = 0; + } + // Calculate : -zero_point * weight since it is a constant + tflite::tensor_utils::MatrixScalarMultiplyAccumulate( + quantized_gate_params.recurrent_weight, + -1 * output_quantization_params.zero_point, 2, 2, + quantized_gate_params.recurrent_zp_folded_bias); + } + return quantized_gate_params; +} + +// Create integer LSTM node content from the float node contents and +// quantization settings +// Note: fold_zero_point folds the zero point into the bias (precomputation), +// which is not required for the generalized integer inference (16 bits act +// LSTM). +template +LstmNodeContent +CreateIntegerNodeContents( + const NodeQuantizationParameters& quantization_settings, + const bool fold_zero_point, + LstmNodeContent& float_node_contents) { + const auto quantized_forget_gate_data = + CreateQuantizedGateData( + float_node_contents.ForgetGateData(), quantization_settings.input, + quantization_settings.output, quantization_settings.forget_gate, + fold_zero_point); + const auto quantized_input_gate_data = + CreateQuantizedGateData( + float_node_contents.InputGateData(), quantization_settings.input, + quantization_settings.output, quantization_settings.input_gate, + fold_zero_point); + const auto quantized_cell_gate_data = + CreateQuantizedGateData( + float_node_contents.CellGateData(), quantization_settings.input, + quantization_settings.output, quantization_settings.cell_gate, + fold_zero_point); + const auto quantized_output_gate_params = + CreateQuantizedGateData( + float_node_contents.OutputGateData(), quantization_settings.input, + quantization_settings.output, quantization_settings.output_gate, + fold_zero_point); + LstmNodeContent + quantized_node_content( + float_node_contents.BuiltinData(), quantized_forget_gate_data, + quantized_input_gate_data, quantized_cell_gate_data, + quantized_output_gate_params); + + // Quantize the floating point input + ActivationType quantized_input[batch_size * input_dimension * time_steps] = + {}; + Quantize(float_node_contents.GetInputData(), quantized_input, + batch_size * input_dimension * time_steps, + quantization_settings.input.scale, + quantization_settings.input.zero_point); + quantized_node_content.SetInputData(quantized_input); + // Quantize the floating point hidden state + ActivationType quantized_hidden_state[batch_size * state_dimension] = {}; + Quantize(float_node_contents.GetHiddenStateData(), quantized_hidden_state, + batch_size * state_dimension, + quantization_settings.hidden_state.scale, + quantization_settings.hidden_state.zero_point); + quantized_node_content.SetHiddenStateData(quantized_hidden_state); + // Quantize the floating point cell state + CellType quantized_cell_state[batch_size * state_dimension] = {}; + Quantize(float_node_contents.GetCellStateData(), quantized_cell_state, + batch_size * state_dimension, quantization_settings.cell_state.scale, + quantization_settings.cell_state.zero_point); + quantized_node_content.SetCellStateData(quantized_cell_state); + + // Add scale and zero point to tensors + quantized_node_content.AddQuantizationParameters(quantization_settings); + return quantized_node_content; +} + +// Get the gate output data (one time step) for a simple 2X2 model +// batch_size = 2; time_steps = 1; input_dimension = 2; state_dimension = 2 +// input_size = batch_size*time_steps*input_dimension = 4 +// gate_output_size = batch_size*state_dimension = 4 +GateOutputCheckData<4, 4> Get2X2GateOutputCheckData(); + +// Get the kernel output data for a simple 2X2 model +// batch_size = 2; time_steps = 3; input_dimension = 2; state_dimension = 2 +// input_size = batch_size*time_steps*input_dimension = 12 +// gate_output_size = batch_size*state_dimension = 4 +// output_size = time_steps*gate_output_size = 12 +LstmEvalCheckData<12, 4, 12> Get2X2LstmEvalCheckData(); + +// Create a 2x2 float node content +// batch_size = 2; time_steps = 3; input_dimension = 2; state_dimension = 2 +LstmNodeContent +Create2x3x2X2FloatNodeContents(const float* input_data = nullptr, + const float* hidden_state = nullptr, + const float* cell_state = nullptr); + +// Get the quantization settings for the 2X2 model +NodeQuantizationParameters Get2X2Int8LstmQuantizationSettings(); + +// Create int8 (activation) x int8 (weight) -> int16 (cell) node +// batch_size = 2; time_steps = 3; input_dimension = 2; state_dimension = 2 +// input is in float format since the source of truth is always the float +// configuration +LstmNodeContent +Create2x3x2X2Int8NodeContents(const float* input_data = nullptr, + const float* hidden_state = nullptr, + const float* cell_state = nullptr); + +// Create int16 (activation) x int8 (weight) -> int16 (cell) node +// batch_size = 2; time_steps = 3; input_dimension = 2; state_dimension = 2 +// input is in float format since the source of truth is always the float +// configuration +LstmNodeContent +Create2x3x2X2Int16NodeContents(const float* input_data = nullptr, + const float* hidden_state = nullptr, + const float* cell_state = nullptr); + +} // namespace testing +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_TESTDATA_LSTM_TEST_DATA_H_ diff --git a/tensorflow/lite/micro/kernels/testdata/lstm_test_data_generator.py b/tensorflow/lite/micro/kernels/testdata/lstm_test_data_generator.py new file mode 100644 index 0000000..97c8798 --- /dev/null +++ b/tensorflow/lite/micro/kernels/testdata/lstm_test_data_generator.py @@ -0,0 +1,192 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================= +""" Generate the LSTM kernel test data settings in lstm_test_data.cc +1. Print the quantization settings for the test model (Get2X2Int8LstmQuantizationSettings in .cc) +2. Print the intermediate step outputs inside the LSTM for a single step LSTM invocation (Get2X2GateOutputCheckData in .cc) +3. Print the outputs for multi-step LSTM invocation (Get2X2LstmEvalCheckData in .cc) + +Every invocation gives three types information: +1. Quantized output: kernel output in integer +2. Dequantized output: Quantized output in floating point representation +3. Float output: output from the floating point computation (i.e., float kernel) + +Note: +1. Change quantization settings in _KERNEL_CONFIG to see the outcomes from various quantization schema (e.g., 8x8 Vs. 16x8) +2. Only single batch inference is supporte here. Change _GATE_TEST_DATA or _MULTISTEP_TEST_DATA to see kernel outputs on different input data +3. The quantization computation here is not the exact as the c++ implementation. The integer calculation is mimiced here using floating point. +No fixed point math is implemented here. The purpose is to illustrate the computation procedure and possible quantization error accumulation, not for bit exactness. +""" +from absl import app +import numpy as np + +from tflite_micro.tensorflow.lite.micro.kernels.testdata import lstm_test_data_utils + +# Basic kernel information (defaul a 2x2 model with int8 quantization) +# change activation_bits to 16 for 16x8 case +_KERNEL_CONFIG = { + 'quantization_settings': { + 'weight_bits': 8, + 'activation_bits': 8, + 'bias_bits': 32, + 'cell_bits': 16, + }, + 'shape_info': { + 'input_dim': 2, + 'state_dim': 2 + } +} + +# Kernel data setting (weight data for every gate). Corresponds to Create2x3x2X2FloatNodeContents in .cc +_KERNEL_PARAMETERS = { + 'forget_gate_data': { + 'activation_weight_data': [-10, -10, -20, -20], + 'recurrent_weight_data': [-10, -10, -20, -20], + 'bias_data': [1, 2], + }, + 'input_gate_data': { + 'activation_weight_data': [10, 10, 20, 20], + 'recurrent_weight_data': [10, 10, 20, 20], + 'bias_data': [-1, -2], + }, + 'cell_gate_data': { + 'activation_weight_data': [1, 1, 1, 1], + 'recurrent_weight_data': [1, 1, 1, 1], + 'bias_data': [0, 0], + }, + 'output_gate_data': { + 'activation_weight_data': [1, 1, 1, 1], + 'recurrent_weight_data': [1, 1, 1, 1], + 'bias_data': [0, 0], + }, +} + +# Input and states setting for gate level testing (Get2X2GateOutputCheckData in .cc) +# Only single batch inference is supported (default as batch1 in .cc) +_GATE_TEST_DATA = { + 'init_hidden_state_vals': [-0.1, 0.2], + 'init_cell_state_vals': [-1.3, 6.2], + 'input_data': [0.2, 0.3], + 'hidden_state_range': (-0.5, 0.7), + 'cell_state_range': [-8, 8], + 'input_data_range': [-1, 1] +} + +# Input and states setting for multi-step kernel testing (Get2X2LstmEvalCheckData in .cc) +# Only single batch inference is supported (default as batch1 in .cc) +_MULTISTEP_TEST_DATA = { + 'init_hidden_state_vals': [0, 0], + 'init_cell_state_vals': [0, 0], + 'input_data': [0.2, 0.3, 0.2, 0.3, 0.2, 0.3], # three time steps + 'hidden_state_range': (-0.5, 0.7), + 'cell_state_range': [-8, 8], + 'input_data_range': [-1, 1] +} + + +def print_tensor_quantization_params(tensor_name, tensor): + """Print the tensor quantization information (scale and zero point)""" + print(f"{tensor_name}, scale: {tensor.scale}, zero_point:" + f" {tensor.zero_point}") + + +def print_gate_tensor_params(gate_name, gate): + """Print the quantization information for a gate (input/forget/cell/output gate)""" + print(f"###### Quantization settings for {gate_name} ######") + print_tensor_quantization_params("activation weight", gate.activation_weight) + print_tensor_quantization_params("recurrent weight", gate.activation_weight) + + +def print_quantization_settings(lstm_debugger): + """Print the quantization information for a LSTM kernel""" + print_gate_tensor_params("forget gate", lstm_debugger.forget_gate_params) + print_gate_tensor_params("input gate", lstm_debugger.input_gate_params) + print_gate_tensor_params("cell gate", lstm_debugger.modulation_gate_params) + print_gate_tensor_params("output gate", lstm_debugger.output_gate_params) + print("###### State Tensors ######") + print_tensor_quantization_params("Hidden State Tensor", + lstm_debugger.hidden_state_tensor) + print_tensor_quantization_params("Cell State Tensor", + lstm_debugger.cell_state_tensor) + + +def print_one_step(lstm_debugger): + """Print the intermediate calculation results for one step LSTM invocation (Get2X2GateOutputCheckData in .cc)""" + test_data = np.array(_GATE_TEST_DATA['input_data']).reshape((-1, 1)) + input_data_range = _GATE_TEST_DATA['input_data_range'] + input_tensor = lstm_test_data_utils.assemble_quantized_tensor( + test_data, + input_data_range[0], + input_data_range[1], + symmetry=False, + num_bits=_KERNEL_CONFIG['quantization_settings']['activation_bits']) + lstm_debugger.invoke(input_tensor, debug=True) + + +def print_multi_step(lstm_debugger, debug=False): + """Print the output of every step for multi step LSTM invocation (Get2X2LstmEvalCheckData in .cc)""" + input_data = _MULTISTEP_TEST_DATA['input_data'] + input_data_range = _MULTISTEP_TEST_DATA['input_data_range'] + input_data_size = _KERNEL_CONFIG['shape_info']['input_dim'] + input_start_pos = 0 + steps = 0 + while input_start_pos < len(input_data): + one_step_data = np.array(input_data[input_start_pos:input_start_pos + + input_data_size]).reshape((-1, 1)) + input_tensor = lstm_test_data_utils.assemble_quantized_tensor( + one_step_data, + input_data_range[0], + input_data_range[1], + symmetry=False, + num_bits=_KERNEL_CONFIG['quantization_settings']['activation_bits']) + output_quant, output_float = lstm_debugger.invoke(input_tensor, + debug=debug) + print(f"##### Step: {steps} #####") + print(f"Quantized Output: {output_quant.flatten()}") + print( + f"Dequantized Output: {lstm_debugger.hidden_state_tensor.dequantized_data.flatten().flatten()}" + ) + print(f"Float Output: {output_float.flatten()}") + input_start_pos += input_data_size + steps += 1 + + +def main(_): + one_step_lstm_debugger = lstm_test_data_utils.QuantizedLSTMDebugger( + _KERNEL_CONFIG, + _KERNEL_PARAMETERS, + _GATE_TEST_DATA['init_hidden_state_vals'], + _GATE_TEST_DATA['hidden_state_range'], + _GATE_TEST_DATA['init_cell_state_vals'], + _GATE_TEST_DATA['cell_state_range'], + ) + print("========== Quantization Settings for the Test Kernal ========== ") + print_quantization_settings(one_step_lstm_debugger) + print("========== Single Step Invocation Intermediates ========== ") + print_one_step(one_step_lstm_debugger) + + multi_step_lstm_debugger = lstm_test_data_utils.QuantizedLSTMDebugger( + _KERNEL_CONFIG, + _KERNEL_PARAMETERS, + _MULTISTEP_TEST_DATA['init_hidden_state_vals'], + _MULTISTEP_TEST_DATA['hidden_state_range'], + _MULTISTEP_TEST_DATA['init_cell_state_vals'], + _MULTISTEP_TEST_DATA['cell_state_range'], + ) + print("========== Multi Step Invocation Intermediates ========== ") + print_multi_step(multi_step_lstm_debugger) + + +if __name__ == "__main__": + app.run(main) diff --git a/tensorflow/lite/micro/kernels/testdata/lstm_test_data_generator_test.py b/tensorflow/lite/micro/kernels/testdata/lstm_test_data_generator_test.py new file mode 100644 index 0000000..cb5c21d --- /dev/null +++ b/tensorflow/lite/micro/kernels/testdata/lstm_test_data_generator_test.py @@ -0,0 +1,108 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================= +import numpy as np +import tensorflow as tf + +from tensorflow.python.framework import test_util +from tensorflow.python.platform import test +from tflite_micro.tensorflow.lite.micro.kernels.testdata import lstm_test_data_utils + +_KERNEL_CONFIG = { + 'quantization_settings': { + 'weight_bits': 8, + 'activation_bits': 8, + 'bias_bits': 32, + 'cell_bits': 16, + }, + 'shape_info': { + 'input_dim': 2, + 'state_dim': 2 + } +} + +_KERNEL_PARAMETERS = { + 'forget_gate_data': { + 'activation_weight_data': [1, 1, 1, 1], + 'recurrent_weight_data': [1, 1, 1, 1], + 'bias_data': [0, 0], + }, + 'input_gate_data': { + 'activation_weight_data': [1, 1, 1, 1], + 'recurrent_weight_data': [1, 1, 1, 1], + 'bias_data': [0, 0], + }, + 'cell_gate_data': { + 'activation_weight_data': [1, 1, 1, 1], + 'recurrent_weight_data': [1, 1, 1, 1], + 'bias_data': [0, 0], + }, + 'output_gate_data': { + 'activation_weight_data': [1, 1, 1, 1], + 'recurrent_weight_data': [1, 1, 1, 1], + 'bias_data': [0, 0], + }, +} + +_KERNEL_INITIALIZATION_SETTINGS = { + 'init_hidden_state_vals': [0, 0], + 'init_cell_state_vals': [0, 0], + 'hidden_state_range': (-1, 1), + 'cell_state_range': [-8, 8], +} + + +def create_keras_lstm(stateful=True): + """Create a keras model with LSTM layer only for testing""" + input_layer = tf.keras.layers.Input(shape=(1, 2), batch_size=1, name="input") + lstm_output = tf.keras.layers.LSTM(units=2, + return_sequences=True, + stateful=stateful, + unit_forget_bias=False, + return_state=True, + kernel_initializer="ones", + recurrent_initializer="ones", + bias_initializer="zeros")(input_layer) + return tf.keras.Model(input_layer, lstm_output, name="LSTM") + + +class QuantizedLSTMDebuggerTest(test_util.TensorFlowTestCase): + + # only the float output from the debugger is used to setup the test data in .cc + def testFloatCompareWithKeras(self): + keras_lstm = create_keras_lstm() + lstm_debugger = lstm_test_data_utils.QuantizedLSTMDebugger( + _KERNEL_CONFIG, + _KERNEL_PARAMETERS, + _KERNEL_INITIALIZATION_SETTINGS['init_hidden_state_vals'], + _KERNEL_INITIALIZATION_SETTINGS['hidden_state_range'], + _KERNEL_INITIALIZATION_SETTINGS['init_cell_state_vals'], + _KERNEL_INITIALIZATION_SETTINGS['cell_state_range'], + ) + + num_steps = 20 + for _ in range(num_steps): + # debugger has input shape (input_dim, 1) + test_data = np.random.rand(2, 1) + input_tensor = lstm_test_data_utils.assemble_quantized_tensor( + test_data, -1, 1, False) + _, output_float = lstm_debugger.invoke(input_tensor) + output_keras, _, _ = keras_lstm.predict(test_data.reshape(1, 1, 2)) + + diff = abs(output_float.flatten() - output_keras.flatten()) + self.assertAllLess(diff, 1e-6) + + +if __name__ == "__main__": + test.main() \ No newline at end of file diff --git a/tensorflow/lite/micro/kernels/testdata/lstm_test_data_utils.py b/tensorflow/lite/micro/kernels/testdata/lstm_test_data_utils.py new file mode 100644 index 0000000..345b143 --- /dev/null +++ b/tensorflow/lite/micro/kernels/testdata/lstm_test_data_utils.py @@ -0,0 +1,531 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================= +"""Utils to lstm_test_data_generator.py that helps to generate the test data for lstm kernel (lstm_test_data.cc)""" + +import numpy as np +from copy import deepcopy + + +def clip_range(vals, bit_width): + """Mimic integer calculation. + Clip the range of vals based on bit width. + e.g., clip_range([300], 8) = [127] since int8 have range [-128, 127] + Args: + vals (np.array): float representation of the integer values + bit_width (int): number of desired bits for vals + Returns: + np.array : clipped vals + """ + # Numpy integer calculation does not do saturation. Implement here + min_val = -2**(bit_width - 1) + max_val = 2**(bit_width - 1) - 1 + if vals.max() > max_val or vals.min() < min_val: + print(f"WARNING: integer overflow!") + return np.clip(vals, min_val, max_val) + + +def quantize_data(data, scale, zero_point=0, bit_width=8): + """Quantize the data to integer type with desired bit width. + The quantized data is represented using float since integer calculation in + numpy may differ from other implementations (e.g., no integer saturation + protection in numpy) + Args: + data (np.array): float data + scale (float): quantization scale of the data + zero_point (integer): quantization zero point of the data + bit_width (int): number of representative bits for vals + Returns: + np.array : quantized data in float but clipped range + """ + vals = np.round(data / scale) + zero_point + return clip_range(vals, bit_width) + + +def dequantize_data(quantized_data, scale, zero_point=0): + """Dequantize the data to integer type with desired bit width. + Args: + quantized_data (np.array): quantized data + scale (float): quantization scale of the data + zero_point (integer): quantization zero point of the data + Returns: + np.array : dequantized data + """ + return scale * (quantized_data - zero_point) + + +def rescale(data, effective_scale, zero_point, num_bits): + """Rescale the data to the effective scale """ + # q = r/s + z + rescaled = np.round(data * effective_scale) + zero_point + return clip_range(rescaled, num_bits) + + +def calculate_scale(min_val, max_val, num_bits=8, symmetry=False): + """Calculate quantization scale from the range and bit width""" + num_bins = np.power(2, num_bits) - 1 + if symmetry: + return max(abs(min_val), abs(max_val)) / int(num_bins / 2) + return np.array((max_val - min_val) / num_bins, dtype=np.float32) + + +def calculate_zp(min_val, scale, num_bits=8): + """Calculate the zero point from the minimal value""" + quantized_floor = -np.power(2, num_bits) / 2 + return int(quantized_floor - min_val / scale) + + +def sigmoid(x): + """Sigmoid (floating point)""" + return 1 / (1 + np.exp(-x)) + + +def quantized_sigmoid(input, input_scale, output_scale, num_bits=16): + """Sigmoid (interger)""" + float_input = input * input_scale + float_result = sigmoid(float_input) + return quantize_data(float_result, output_scale, bit_width=num_bits) + + +def quantized_tanh(input, input_scale, output_scale, num_bits=16): + """Tanh (interger)""" + float_input = input * input_scale + float_result = np.tanh(float_input) + return quantize_data(float_result, output_scale, bit_width=num_bits) + + +class QuantizedTensor: + """Data structure for a quantized tensor""" + + def __init__(self, float_data, scale, zero_point, symmetry, num_bits=8): + """Tensor is initialized using the floating point data""" + self.float_data = float_data + self.scale = scale + self.zero_point = int(zero_point) + self.symmetry = symmetry + self.num_bits = num_bits + self.quantized_data = quantize_data(float_data, scale, zero_point, + num_bits) + + @property + def dequantized_data(self): + """Dequantize the quantized tensor data back to floating point""" + return dequantize_data(self.quantized_data, self.scale, + self.zero_point).flatten() + + +class QuantizedGateParams: + """Hold the quantization data and corresponding information for a LSTM gate (forget/input/cell/output gate) """ + + def __init__( + self, + quantized_activation_weight, + quantized_recurrent_weight, + bias_data_float, + shape_info, + bias_num_bits=32, + cell_num_bits=16, + modulation=False, + ): + self.shape_info = shape_info + self.activation_weight = quantized_activation_weight + self.recurrent_weight = quantized_recurrent_weight + self.bias_data_float = bias_data_float + self.modulation = modulation + self.bias_num_bits = bias_num_bits + self.cell_num_bits = cell_num_bits + # For INT16 cell state, the input scale is Q3.12 + self.nonlinear_input_scale = np.power(2.0, -(cell_num_bits - 4)) + # For INT16 cell state, the output scale is Q0.15 + self.nonlinear_output_scale = np.power(2.0, -(cell_num_bits - 1)) + + def quantize_bias_data(self, input_scale): + bias_scale = self.activation_weight.scale * input_scale + return quantize_data(self.bias_data_float, bias_scale, 0, + self.bias_num_bits) + + def fold_zeropoint(self, weight, zero_point): + # W*real = W*(quant-zero_pt) = Wquant - Wzero_pt + # Wzero_pt is precomputed here as a constant (implemented in TFLM) + zp_vector = zero_point * np.ones(shape=(self.shape_info['input_dim'], 1)) + zero_folded_vector = np.dot(weight, zp_vector) + return -1 * clip_range(zero_folded_vector, self.bias_num_bits) + + def compute_activation_bias(self, input_scale, input_zp): + # Wz is precomputed here and added it to the original bias (same scale) + zero_folded_vector = self.fold_zeropoint( + self.activation_weight.quantized_data, input_zp) + quantized_bias = self.quantize_bias_data(input_scale) + return zero_folded_vector + quantized_bias + + def compute_recurrent_bias(self, recurrent_zp): + # Wz is precomputed here + return self.fold_zeropoint(self.recurrent_weight.quantized_data, + recurrent_zp) + + def effective_activation_scale(self, input_scale): + # Combine input scale with output scale. Used for fc calculation + return (self.activation_weight.scale * input_scale / + self.nonlinear_input_scale) + + def effective_recurrence_scale(self, recurrent_scale): + # Combine input scale with output scale. Used for fc calculation + return (self.recurrent_weight.scale * recurrent_scale / + self.nonlinear_input_scale) + + +def assemble_quantized_tensor(float_data, + min_val, + max_val, + symmetry, + num_bits=8): + """Create a QuantizedTensor using floating point data, range information, and bit width""" + scale = calculate_scale(min_val, max_val, num_bits, symmetry) + zp = 0 + if not symmetry: + zp = calculate_zp(min_val, scale, num_bits) + return QuantizedTensor(float_data, + scale, + zp, + symmetry=symmetry, + num_bits=num_bits) + + +def create_gate_params(gate_parameters, model_config, modulation=False): + """Create a QuantizedGateParams using the gate paramater information and the model configuration""" + shape_info = model_config['shape_info'] + quantization_settings = model_config['quantization_settings'] + + activation_weight_data = np.array( + gate_parameters['activation_weight_data']).reshape( + (shape_info['input_dim'], shape_info['state_dim'])) + activation_weight = assemble_quantized_tensor( + activation_weight_data, + activation_weight_data.min(), + activation_weight_data.max(), + True, + quantization_settings['weight_bits'], + ) + + recurrent_weight_data = np.array( + gate_parameters['recurrent_weight_data']).reshape( + (shape_info['input_dim'], shape_info['state_dim'])) + + recurrent_weight = assemble_quantized_tensor( + recurrent_weight_data, + recurrent_weight_data.min(), + recurrent_weight_data.max(), + True, + quantization_settings['weight_bits'], + ) + + bias_data_float = np.array(gate_parameters['bias_data']).reshape( + (shape_info['input_dim'], 1)) + gate_params = QuantizedGateParams( + activation_weight, + recurrent_weight, + bias_data_float, + shape_info, + bias_num_bits=quantization_settings['bias_bits'], + cell_num_bits=quantization_settings['cell_bits'], + modulation=modulation, + ) + return gate_params + + +def gate_calculation(input, hidden_state, gate_params, debug=False): + """ + A gate calculation is tanh(FC(activation, activation weight) + FC(recurrent, recurrent weight)). + For modulation gate, sigmoid is used instead of tanh. + + Note: for debugging purpose, floating point calculation is conducted in parallel with the integer calculation + """ + # Quantized Version + input_fc = np.dot(gate_params.activation_weight.quantized_data, + input.quantized_data) + input_fc += gate_params.compute_activation_bias(input.scale, + input.zero_point) + input_fc = rescale(input_fc, + gate_params.effective_activation_scale(input.scale), 0, + gate_params.cell_num_bits) + recurrent_fc = np.dot(gate_params.recurrent_weight.quantized_data, + hidden_state.quantized_data) + recurrent_fc += gate_params.compute_recurrent_bias(hidden_state.zero_point) + recurrent_fc = rescale( + recurrent_fc, gate_params.effective_recurrence_scale(hidden_state.scale), + 0, gate_params.cell_num_bits) + + before_activation = clip_range(input_fc + recurrent_fc, + gate_params.cell_num_bits) + + # Float Version + float_result = np.dot(gate_params.activation_weight.float_data, + input.float_data) + float_result += np.dot(gate_params.recurrent_weight.float_data, + hidden_state.float_data) + float_result += gate_params.bias_data_float + + if debug: + print(f'input fc: {input_fc.flatten()}') + print(f'recurrent fc: {recurrent_fc.flatten()}') + + dequantized_res = dequantize_data(before_activation, + gate_params.nonlinear_input_scale) + print(f'Intermediate before activation: {before_activation.flatten()}') + print(f'dequantized :{dequantized_res.flatten()} ') + print(f'float computation result: {float_result.flatten()} ') + + diff = dequantized_res - float_result + print(f'diff percentage (%): {abs(diff/float_result).flatten()*100}') + + if gate_params.modulation: + activated = quantized_tanh(before_activation, + gate_params.nonlinear_input_scale, + gate_params.nonlinear_output_scale, + gate_params.cell_num_bits) + float_result = np.tanh(float_result) + else: + activated = quantized_sigmoid(before_activation, + gate_params.nonlinear_input_scale, + gate_params.nonlinear_output_scale, + gate_params.cell_num_bits) + float_result = sigmoid(float_result) + + if debug: + dequantized_res = dequantize_data(activated, + gate_params.nonlinear_output_scale) + print(f'Gate result: {activated.flatten()} ') + print(f'Dequantized: {dequantized_res.flatten()} ') + print(f'float computation result: {float_result.flatten()} ') + diff = dequantized_res - float_result + print(f'diff percentage (%): {abs(diff/float_result).flatten()*100}') + + return activated, float_result + + +# The LSTM class +class QuantizedLSTMDebugger(object): + """Help the debugging process of the LSTM kernel implementation by + 1. Exposing the kernel internal computation + 2. Run floating point calculation in parallel with the integer version + """ + + def __init__( + self, + kernel_config, + kernel_params, + init_hidden_state_vals, + hiddens_state_range, + init_cell_state_vals, + cell_state_range, + cell_clip=8, + ): + self.kernel_config = kernel_config + self.forget_gate_params = create_gate_params( + kernel_params['forget_gate_data'], kernel_config) + self.input_gate_params = create_gate_params( + kernel_params['input_gate_data'], kernel_config) + self.modulation_gate_params = create_gate_params( + kernel_params['cell_gate_data'], kernel_config, modulation=True) + self.output_gate_params = create_gate_params( + kernel_params['output_gate_data'], kernel_config) + self.quantization_settings = kernel_config['quantization_settings'] + + self.hidden_state_tensor = assemble_quantized_tensor( + np.array(init_hidden_state_vals).reshape((-1, 1)), + hiddens_state_range[0], + hiddens_state_range[1], + False, + self.quantization_settings['activation_bits'], + ) + self.cell_state_tensor = assemble_quantized_tensor( + np.array(init_cell_state_vals).reshape((-1, 1)), + cell_state_range[0], + cell_state_range[1], + True, + self.quantization_settings['cell_bits'], + ) + + self.quantized_cell_clip = quantize_data( + cell_clip, + self.cell_state_tensor.scale, + self.cell_state_tensor.zero_point, + self.quantization_settings['cell_bits'], + ) + + def invoke(self, input_tensor, debug=False): + assert ( + input_tensor.num_bits == self.quantization_settings['activation_bits']) + + prev_hidden_state_tensor = deepcopy(self.hidden_state_tensor) + prev_cell_state_tensor = deepcopy(self.cell_state_tensor) + + prev_hidden_state_float = prev_hidden_state_tensor.float_data + prev_cell_state_float = prev_cell_state_tensor.float_data + + # forget gate + forget_gate_quant, forget_gate_float = gate_calculation( + input_tensor, prev_hidden_state_tensor, self.forget_gate_params) + + self.cell_state_tensor.quantized_data = rescale( + prev_cell_state_tensor.quantized_data * forget_gate_quant, + self.forget_gate_params.nonlinear_output_scale, + 0, + self.quantization_settings['cell_bits'], + ) + self.cell_state_tensor.float_data = (prev_cell_state_float * + forget_gate_float) + + # input gate + input_gate_quant, input_gate_float = gate_calculation( + input_tensor, prev_hidden_state_tensor, self.input_gate_params) + + modulation_gate_quant, modulation_gate_float = gate_calculation( + input_tensor, prev_hidden_state_tensor, self.modulation_gate_params) + + gated_input_quant = rescale( + input_gate_quant * modulation_gate_quant, + self._calculate_effective_cell_scale(), + 0, + self.quantization_settings['cell_bits'], + ) + gated_input_float = input_gate_float * modulation_gate_float + + if ( + debug + ): # Hidden/cell state will be updated, break up the debug to record the intermediate state + print('======================One Step LSTM======================') + print('###### Forget Gate Output: ######') + print(f'Quantized: {forget_gate_quant.flatten()}') + dequantized_val = dequantize_data( + forget_gate_quant, self.forget_gate_params.nonlinear_output_scale, 0) + print(f'Dequantized : {dequantized_val.flatten()}') + print(f'Float : {forget_gate_float.flatten()}') + + print('###### Cell state after forgetting: ######') + print(f'Quantized: {self.cell_state_tensor.quantized_data.flatten()}') + print( + f'Dequantized: {self.cell_state_tensor.dequantized_data.flatten()}') + print(f'Float : {self.cell_state_tensor.float_data.flatten()}') + + print('###### Input gate output: ######') + print(f'Quantized: {input_gate_quant.flatten()}') + dequantized_val = dequantize_data( + input_gate_quant, self.input_gate_params.nonlinear_output_scale, 0) + print(f'Dequantized: {dequantized_val.flatten()}') + print(f'Float : {input_gate_float.flatten()}') + + print('###### cell gate output: ######') + print(f'Quantized: {modulation_gate_quant.flatten()}') + dequantized_val = dequantize_data( + modulation_gate_quant, + self.modulation_gate_params.nonlinear_output_scale, + 0, + ) + print(f'Dequantized: {dequantized_val.flatten()}') + print(f'Float : {modulation_gate_float.flatten()}') + + print('###### Gated input (input_gate * cell_gate): ######') + print(f'Quantized: {gated_input_quant.flatten()}') + dequantized_val = dequantize_data(gated_input_quant, + self.cell_state_tensor.scale, 0) + print(f'Dequantized: {dequantized_val.flatten()}') + print(f'Float : {gated_input_float.flatten()}') + + # Update the cell state + self.cell_state_tensor.quantized_data += gated_input_quant + self._apply_cell_clip() + self.cell_state_tensor.float_data += gated_input_float + + # output gate + output_gate_quant, output_gate_float = gate_calculation( + input_tensor, prev_hidden_state_tensor, self.output_gate_params) + + # Update the hidden state + transformed_cell_quant = quantized_tanh( + self.cell_state_tensor.quantized_data, + self.output_gate_params.nonlinear_input_scale, + self.output_gate_params.nonlinear_output_scale, + self.cell_state_tensor.num_bits, + ) + + transformed_cell_float = np.tanh(self.cell_state_tensor.float_data) + + gated_output_quant = rescale( + output_gate_quant * transformed_cell_quant, + self._calculate_effective_output_scale(), + self.hidden_state_tensor.zero_point, + self.hidden_state_tensor.num_bits, + ) + gated_output_float = output_gate_float * transformed_cell_float + + self.hidden_state_tensor.quantized_data = gated_output_quant + self.hidden_state_tensor.float_data = gated_output_float + + if debug: + print('###### Updated cell state): ######') + print(f'Quantized: {self.cell_state_tensor.quantized_data.flatten()}') + print( + f'Dequantized: {self.cell_state_tensor.dequantized_data.flatten()}') + print(f'Float : {self.cell_state_tensor.float_data.flatten()}') + + print('###### Output gate: ######') + print(f'Quantized : {output_gate_quant.flatten()}') + dequantized_val = dequantize_data( + output_gate_quant, self.output_gate_params.nonlinear_output_scale, 0) + print(f'Dequantized: {dequantized_val.flatten()}') + print(f'Float : {output_gate_float.flatten()}') + + print('###### Tanh transformed cell: ######') + print(f'Quantized: {transformed_cell_quant.flatten()}') + dequantized_val = dequantize_data( + transformed_cell_quant, + self.output_gate_params.nonlinear_output_scale, + 0, + ) + print(f'Dequantized: {dequantized_val.flatten()}') + print(f'Float : {transformed_cell_float.flatten()}') + + print('###### Updated hidden state: ######') + print(f'Quantized: {gated_output_quant.flatten()}') + print( + f'Dequantized: {self.hidden_state_tensor.dequantized_data.flatten()}' + ) + print(f'Float : {gated_output_float.flatten()}') + + diff = abs(self.hidden_state_tensor.dequantized_data - + gated_output_float.flatten()) + max_diff_perc = diff / gated_output_float.flatten() * 100 + print(f'Max diff perc (%): {max_diff_perc}') + return gated_output_quant, gated_output_float + + def _calculate_effective_output_scale(self): + return (self.output_gate_params.nonlinear_output_scale * + self.modulation_gate_params.nonlinear_output_scale / + self.hidden_state_tensor.scale) + + def _calculate_effective_cell_scale(self): + return (self.input_gate_params.nonlinear_output_scale * + self.modulation_gate_params.nonlinear_output_scale / + self.cell_state_tensor.scale) + + def _apply_cell_clip(self): + cell_vals = self.cell_state_tensor.quantized_data + if (cell_vals.max() > self.quantized_cell_clip + or cell_vals.min() < -self.quantized_cell_clip): + print(f'WARNING: cell values clip to {self.quantized_cell_clip}!') + + self.cell_state_tensor.quantized_data = np.round( + np.clip(cell_vals, -self.quantized_cell_clip, + self.quantized_cell_clip)) diff --git a/tensorflow/lite/micro/kernels/transpose.cc b/tensorflow/lite/micro/kernels/transpose.cc new file mode 100644 index 0000000..710bfca --- /dev/null +++ b/tensorflow/lite/micro/kernels/transpose.cc @@ -0,0 +1,122 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/kernels/internal/reference/transpose.h" + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/internal/types.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { +namespace { + +constexpr int kInputTensor = 0; +constexpr int kPermTensor = 1; +constexpr int kOutputTensor = 0; + +struct TransposeContext { + TransposeContext(TfLiteContext* context, TfLiteNode* node) { + micro_context = GetMicroContext(context); + input = micro_context->AllocateTempInputTensor(node, kInputTensor); + perm = micro_context->AllocateTempInputTensor(node, kPermTensor); + output = micro_context->AllocateTempOutputTensor(node, kOutputTensor); + } + ~TransposeContext() { + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(perm); + micro_context->DeallocateTempTfLiteTensor(output); + } + MicroContext* micro_context; + TfLiteTensor* input; + TfLiteTensor* perm; + TfLiteTensor* output; +}; + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + TF_LITE_ENSURE_EQ(context, NumInputs(node), 2); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + + TransposeContext op_context(context, node); + + // Ensure validity of input tensor. + TF_LITE_ENSURE_MSG(context, NumDimensions(op_context.input) <= 5, + "Transpose op only supports 1D-5D input arrays."); + TF_LITE_ENSURE_TYPES_EQ(context, op_context.input->type, + op_context.output->type); + + int dims = NumDimensions(op_context.input); + const int32_t* perm_data = GetTensorData(op_context.perm); + + // Ensure validity of the permutations tensor as a 1D tensor. + TF_LITE_ENSURE_EQ(context, NumDimensions(op_context.perm), 1); + TF_LITE_ENSURE_EQ(context, op_context.perm->dims->data[0], dims); + for (int idx = 0; idx < dims; ++idx) { + TF_LITE_ENSURE_MSG(context, (perm_data[idx] >= 0 && perm_data[idx] < dims), + "Transpose op permutations array is out of bounds."); + } + + return kTfLiteOk; +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* perm_tensor = + tflite::micro::GetEvalInput(context, node, kPermTensor); + const int32_t* perm_data = perm_tensor->data.i32; + const int size = perm_tensor->dims->data[0]; + TransposeParams params; + params.perm_count = size; + for (int i = 0; i < size; ++i) { + params.perm[i] = perm_data[i]; + } + + // Transpose kernel only does rearranging values not numeric evaluations + // on each cell. It's safe to implement per size of scalar type and this + // trick keeps the total code size in a reasonable range. + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + switch (input->type) { + case kTfLiteFloat32: + reference_ops::Transpose(params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + case kTfLiteInt8: + reference_ops::Transpose(params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + default: + MicroPrintf( + "Type %s is currently not supported by Transpose. " + "Only float32 and int8 is supported", + TfLiteTypeGetName(input->type)); + return kTfLiteError; + } + + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_TRANSPOSE() { + return tflite::micro::RegisterOp(nullptr, Prepare, Eval); +} +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/transpose_conv.cc b/tensorflow/lite/micro/kernels/transpose_conv.cc new file mode 100644 index 0000000..a2ac2b4 --- /dev/null +++ b/tensorflow/lite/micro/kernels/transpose_conv.cc @@ -0,0 +1,352 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/reference/transpose_conv.h" + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/transpose_conv.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/padding.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { +namespace { + +// For the TfLite transpose_conv implementation, input tensor 0 corresponds to +// the OutputShapeTensor. However, since TFLM does not support dynamic tensors, +// the TFLM implementation ignores input tensor 0 and the only inputs we care +// about are kFilterTensor, kInputTensor and kBiasTensor. +constexpr int kFilterTensor = 1; +constexpr int kInputTensor = 2; +constexpr int kBiasTensor = 3; +constexpr int kOutputTensor = 0; + +// Conv is quantized along dimension 0: +// https://www.tensorflow.org/lite/performance/quantization_spec +constexpr int kConvQuantizedDimension = 0; + +struct OpData { + ConvParams params; + + // A scratch buffer is required for quantized implementations. + int scratch_buffer_index; + + // TODO(b/192090531): Remove this once all 8x16 transpose conv models use + // 64-bit biases. + int bias_converted_buffer_index; + + // Multiplier and shift arrays are required for the int8 implementation. + int32_t* per_channel_output_multiplier; + int32_t* per_channel_output_shift; +}; + +inline PaddingType RuntimePaddingType(TfLitePadding padding) { + switch (padding) { + case TfLitePadding::kTfLitePaddingSame: + return PaddingType::kSame; + case TfLitePadding::kTfLitePaddingValid: + return PaddingType::kValid; + case TfLitePadding::kTfLitePaddingUnknown: + default: + return PaddingType::kNone; + } +} + +TfLiteStatus CalculateOpData(TfLiteContext* context, TfLiteNode* node, + const TfLiteTransposeConvParams* params, int width, + int height, int filter_width, int filter_height, + const TfLiteType data_type, OpData* data) { + bool has_bias = node->inputs->size == 4; + // Check number of inputs/outputs + TF_LITE_ENSURE(context, has_bias || node->inputs->size == 3); + TF_LITE_ENSURE_EQ(context, node->outputs->size, 1); + + // Matching GetWindowedOutputSize in TensorFlow. + auto padding = params->padding; + int unused_output_width; + int unused_output_height; + TfLitePaddingValues padding_values = ComputePaddingHeightWidth( + params->stride_height, params->stride_width, 1, + 1, // Dilation height and width are always 1 for transpose_conv. + height, width, filter_height, filter_width, padding, + &unused_output_height, &unused_output_width); + + data->params.padding_type = RuntimePaddingType(padding); + data->params.padding_values.width = padding_values.width; + data->params.padding_values.height = padding_values.height; + + // Note that quantized inference requires that all tensors have their + // parameters set. This is usually done during quantized training. + if (data_type != kTfLiteFloat32) { + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* filter = + micro_context->AllocateTempInputTensor(node, kFilterTensor); + TF_LITE_ENSURE(context, filter != nullptr); + TfLiteTensor* bias = + micro_context->AllocateTempInputTensor(node, kBiasTensor); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + int output_channels = filter->dims->data[kConvQuantizedDimension]; + + TF_LITE_ENSURE_STATUS(tflite::PopulateConvolutionQuantizationParams( + context, input, filter, bias, output, kTfLiteActNone, + &data->params.output_multiplier, &data->params.output_shift, + &data->params.quantized_activation_min, + &data->params.quantized_activation_max, + data->per_channel_output_multiplier, data->per_channel_output_shift, + output_channels)); + + // TODO(b/192090531): Remove this once all 8x16 transpose conv models use + // 64-bit biases. + if (input->type == kTfLiteInt16) { + TFLITE_DCHECK(filter->type == kTfLiteInt8); + TFLITE_DCHECK(output->type == kTfLiteInt16); + if (bias->type == kTfLiteInt16) { + TFLITE_DCHECK( + context->RequestScratchBufferInArena( + context, GetTensorShape(bias).FlatSize() * sizeof(std::int64_t), + &(data->bias_converted_buffer_index)) == kTfLiteOk); + } + } + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(filter); + micro_context->DeallocateTempTfLiteTensor(output); + if (bias != nullptr) { + micro_context->DeallocateTempTfLiteTensor(bias); + } + } + return kTfLiteOk; +} + +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(OpData)); +} + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + TFLITE_DCHECK(node->builtin_data != nullptr); + + OpData* data = static_cast(node->user_data); + const auto params = + static_cast(node->builtin_data); + + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* filter = + micro_context->AllocateTempInputTensor(node, kFilterTensor); + TF_LITE_ENSURE(context, filter != nullptr); + + TF_LITE_ENSURE_MSG( + context, + input->type == filter->type || + (input->type == kTfLiteInt16 && filter->type == kTfLiteInt8), + "Hybrid models are not supported on TFLite Micro."); + + // Get height and width of the output. + const int width = SizeOfDimension(output, 2); + const int height = SizeOfDimension(output, 1); + const int filter_width = SizeOfDimension(filter, 2); + const int filter_height = SizeOfDimension(filter, 1); + + // Dynamically allocate per-channel quantization parameters. + const int num_channels = filter->dims->data[kConvQuantizedDimension]; + data->per_channel_output_multiplier = + static_cast(context->AllocatePersistentBuffer( + context, num_channels * sizeof(int32_t))); + data->per_channel_output_shift = + static_cast(context->AllocatePersistentBuffer( + context, num_channels * sizeof(int32_t))); + + // Quantized kernels use an int32 scratch buffer. + if (input->type == kTfLiteInt8) { + TFLITE_DCHECK(context->RequestScratchBufferInArena != nullptr); + TFLITE_DCHECK(context->RequestScratchBufferInArena( + context, + GetTensorShape(output).FlatSize() * sizeof(int32_t), + &(data->scratch_buffer_index)) == kTfLiteOk); + } + + // Quantized 16x8 kernels use an int64 scratch buffer. + if (input->type == kTfLiteInt16) { + TFLITE_DCHECK(context->RequestScratchBufferInArena != nullptr); + TFLITE_DCHECK(context->RequestScratchBufferInArena( + context, + GetTensorShape(output).FlatSize() * sizeof(std::int64_t), + &(data->scratch_buffer_index)) == kTfLiteOk); + } + + // All per-channel quantized tensors need valid zero point and scale arrays. + if (input->type == kTfLiteInt8 || input->type == kTfLiteInt16) { + TF_LITE_ENSURE_EQ(context, filter->quantization.type, + kTfLiteAffineQuantization); + + const auto* affine_quantization = + static_cast(filter->quantization.params); + TF_LITE_ENSURE(context, affine_quantization); + TF_LITE_ENSURE(context, affine_quantization->scale); + TF_LITE_ENSURE(context, affine_quantization->zero_point); + + TF_LITE_ENSURE(context, + affine_quantization->scale->size == 1 || + affine_quantization->scale->size == + filter->dims->data[kConvQuantizedDimension]); + TF_LITE_ENSURE_EQ(context, affine_quantization->scale->size, + affine_quantization->zero_point->size); + } + + TF_LITE_ENSURE_STATUS(CalculateOpData(context, node, params, width, height, + filter_width, filter_height, + input->type, data)); + + // Offsets (zero points) + data->params.input_offset = -input->params.zero_point; + data->params.weights_offset = -filter->params.zero_point; + data->params.output_offset = output->params.zero_point; + + // Stride + data->params.stride_width = params->stride_width; + data->params.stride_height = params->stride_height; + + micro_context->DeallocateTempTfLiteTensor(output); + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(filter); + return kTfLiteOk; +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + const TfLiteEvalTensor* filter = + tflite::micro::GetEvalInput(context, node, kFilterTensor); + const TfLiteEvalTensor* bias = + (NumInputs(node) == 4) + ? tflite::micro::GetEvalInput(context, node, kBiasTensor) + : nullptr; + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + + TFLITE_DCHECK(node->user_data != nullptr); + const OpData& data = *(static_cast(node->user_data)); + + TF_LITE_ENSURE_EQ(context, input->type, output->type); + + switch (input->type) { // Already know in/out types are same. + case kTfLiteFloat32: { + const auto& params = + *(reinterpret_cast(node->builtin_data)); + ConvParams op_params = data.params; + CalculateActivationRange(params.activation, + &op_params.float_activation_min, + &op_params.float_activation_max); + + reference_ops::TransposeConv( + op_params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), + tflite::micro::GetOptionalTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output), + tflite::micro::GetTensorShape(nullptr), nullptr); + break; + } + case kTfLiteInt8: { + int32_t* scratch_buffer = static_cast( + context->GetScratchBuffer(context, data.scratch_buffer_index)); + reference_integer_ops::TransposeConv( + data.params, data.per_channel_output_multiplier, + data.per_channel_output_shift, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), + tflite::micro::GetOptionalTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output), + tflite::micro::GetTensorShape(nullptr), nullptr, scratch_buffer); + break; + } + case kTfLiteInt16: { + std::int64_t* scratch_buffer = static_cast( + context->GetScratchBuffer(context, data.scratch_buffer_index)); + // TODO(b/192090531): Remove this once all 8x16 transpose conv models use + // 64-bit biases. + if (bias != nullptr && bias->type == kTfLiteInt16) { + std::int64_t* bias_converted_buffer = + static_cast(context->GetScratchBuffer( + context, data.bias_converted_buffer_index)); + for (int i = 0; i < tflite::micro::GetTensorShape(bias).FlatSize(); + i++) { + bias_converted_buffer[i] = bias->data.i16[i]; + } + reference_integer_ops::TransposeConv( + data.params, data.per_channel_output_multiplier, + data.per_channel_output_shift, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), bias_converted_buffer, + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output), + tflite::micro::GetTensorShape(nullptr), nullptr, scratch_buffer); + } else { + reference_integer_ops::TransposeConv( + data.params, data.per_channel_output_multiplier, + data.per_channel_output_shift, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), + tflite::micro::GetOptionalTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output), + tflite::micro::GetTensorShape(nullptr), nullptr, scratch_buffer); + } + break; + } + default: + MicroPrintf("Type %s (%d) not supported.", TfLiteTypeGetName(input->type), + input->type); + return kTfLiteError; + } + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_TRANSPOSE_CONV() { + return tflite::micro::RegisterOp(Init, Prepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/transpose_conv_test.cc b/tensorflow/lite/micro/kernels/transpose_conv_test.cc new file mode 100644 index 0000000..0ddb3b2 --- /dev/null +++ b/tensorflow/lite/micro/kernels/transpose_conv_test.cc @@ -0,0 +1,475 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/conv_test.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/micro_utils.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +// Common inputs and outputs. +constexpr int kInputElements = 32; +static int kInputShape[] = {4, 1, 4, 4, 2}; +static const float kInputData[kInputElements] = { + 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, + 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32}; + +constexpr int kFilterElements = 18; +static int kFilterShape[] = {4, 1, 3, 3, 2}; +static const float kFilterData[kFilterElements] = { + 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18}; + +constexpr int kBiasElements = 1; +static int kBiasShape[] = {4, 1, 1, 1, 1}; +static const float kBiasData[kBiasElements] = {0}; + +constexpr int kOutputElements = 16; +static int kOutputShape[] = {4, 1, 4, 4, 1}; +static const float kGoldenData[kOutputElements] = { + 184, 412, 568, 528, 678, 1347, 1689, 1434, + 1494, 2715, 3057, 2442, 1968, 3352, 3652, 2760}; + +// Transpose conv uses TfLiteConvParams. +static TfLiteConvParams common_conv_params = {kTfLitePaddingSame, // padding + 1, // stride_width + 1, // stride_height + kTfLiteActNone, + 1, + 1}; + +template +TfLiteStatus InvokeTransposeConv(TfLiteTensor* tensors, int tensors_size, + int output_length, + TfLiteConvParams* conv_params, + T* output_data) { + int inputs_array_data[] = {4, 0, 1, 2, 3}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 4}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = tflite::Register_TRANSPOSE_CONV(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, conv_params); + + const char* init_data = reinterpret_cast(conv_params); + TfLiteStatus status = runner.InitAndPrepare(init_data); + if (status != kTfLiteOk) { + return status; + } + return runner.Invoke(); +} + +template +TfLiteStatus ValidateTransposeConvGoldens(TfLiteTensor* tensors, + int tensors_size, + const T* expected_output_data, + int output_length, + TfLiteConvParams* conv_params, + T* output_data, float tolerance) { + TfLiteStatus status = InvokeTransposeConv( + tensors, tensors_size, output_length, conv_params, output_data); + if (status != kTfLiteOk) { + return status; + } + for (int i = 0; i < output_length; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(expected_output_data[i], output_data[i], + tolerance); + } + return kTfLiteOk; +} + +TfLiteStatus TestTransposeConvFloat( + int* input_dims_data, const float* input_data, int* filter_dims_data, + const float* filter_data, int* bias_dims_data, const float* bias_data, + int* output_dims_data, const float* expected_output_data, + TfLiteConvParams* conv_params, float* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* filter_dims = IntArrayFromInts(filter_dims_data); + TfLiteIntArray* bias_dims = IntArrayFromInts(bias_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_dims_count = ElementCount(*output_dims); + + int output_shape_dims_data[] = {1, 0}; + int32_t* output_shape = nullptr; + TfLiteIntArray* output_shape_dims = IntArrayFromInts(output_shape_dims_data); + + constexpr int inputs_size = 4; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(output_shape, output_shape_dims), + CreateTensor(filter_data, filter_dims), + CreateTensor(input_data, input_dims), + CreateTensor(bias_data, bias_dims), + CreateTensor(output_data, output_dims), + }; + + return ValidateTransposeConvGoldens(tensors, tensors_size, + expected_output_data, output_dims_count, + conv_params, output_data, 0.001f); +} + +TfLiteStatus TestTransposeConvQuantized( + int* input_dims_data, const float* input_data, int8_t* input_quantized, + float input_scale, int input_zero_point, int* filter_dims_data, + const float* filter_data, int8_t* filter_quantized, float filter_scale, + int* bias_dims_data, const float* bias_data, int32_t* bias_quantized, + float* bias_scales, int* bias_zero_points, int* output_dims_data, + const float* expected_output_data, int8_t* expected_output_quantized, + float output_scale, int output_zero_point, TfLiteConvParams* conv_params, + int8_t* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* filter_dims = IntArrayFromInts(filter_dims_data); + TfLiteIntArray* bias_dims = IntArrayFromInts(bias_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_dims_count = ElementCount(*output_dims); + + int filter_zero_points[5]; + float filter_scales[5]; + TfLiteAffineQuantization filter_quant; + TfLiteTensor filter_tensor = CreateSymmetricPerChannelQuantizedTensor( + filter_data, filter_quantized, filter_dims, filter_scales, + filter_zero_points, &filter_quant, 0 /* quantized dimension */); + tflite::Quantize(expected_output_data, expected_output_quantized, + output_dims_count, output_scale, 0); + + int output_shape_dims_data[] = {1, 0}; + int32_t* output_shape = nullptr; + TfLiteIntArray* output_shape_dims = IntArrayFromInts(output_shape_dims_data); + + constexpr int inputs_size = 4; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(output_shape, output_shape_dims), filter_tensor, + CreateQuantizedTensor(input_data, input_quantized, input_dims, + input_scale, input_zero_point), + CreateQuantizedBiasTensor(bias_data, bias_quantized, bias_dims, + input_scale, filter_scale), + CreateQuantizedTensor(output_data, output_dims, output_scale, + output_zero_point)}; + + return ValidateTransposeConvGoldens( + tensors, tensors_size, expected_output_quantized, output_dims_count, + conv_params, output_data, 1.0f); +} + +template +TfLiteStatus TestTransposeConvQuantized( + int* input_dims_data, const float* input_data, int16_t* input_quantized, + float input_scale, int input_zero_point, int* filter_dims_data, + const float* filter_data, int8_t* filter_quantized, float filter_scale, + int* bias_dims_data, const float* bias_data, T* bias_quantized, + float* bias_scales, int* bias_zero_points, int* output_dims_data, + const float* expected_output_data, int16_t* expected_output_quantized, + float output_scale, int output_zero_point, TfLiteConvParams* conv_params, + int16_t* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* filter_dims = IntArrayFromInts(filter_dims_data); + TfLiteIntArray* bias_dims = IntArrayFromInts(bias_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_dims_count = ElementCount(*output_dims); + + int filter_zero_points[5]; + float filter_scales[5]; + TfLiteAffineQuantization filter_quant; + TfLiteTensor filter_tensor = CreateSymmetricPerChannelQuantizedTensor( + filter_data, filter_quantized, filter_dims, filter_scales, + filter_zero_points, &filter_quant, 0 /* quantized dimension */); + tflite::Quantize(expected_output_data, expected_output_quantized, + output_dims_count, output_scale, 0); + + int output_shape_dims_data[] = {1, 0}; + int32_t* output_shape = nullptr; + TfLiteIntArray* output_shape_dims = IntArrayFromInts(output_shape_dims_data); + + constexpr int inputs_size = 4; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(output_shape, output_shape_dims), filter_tensor, + CreateQuantizedTensor(input_data, input_quantized, input_dims, + input_scale, input_zero_point), + CreateQuantizedBiasTensor(bias_data, bias_quantized, bias_dims, + input_scale, filter_scale), + CreateQuantizedTensor(output_data, output_dims, output_scale, + output_zero_point)}; + + // Tolerance is slightly looser for 8x16 compared with float, since quant + // error is more pronounced on the finer-grained 16-bit output. + return ValidateTransposeConvGoldens( + tensors, tensors_size, expected_output_quantized, output_dims_count, + conv_params, output_data, 4.0f); +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(SimpleTestFloat) { + float output_data[tflite::testing::kOutputElements]; + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + tflite::testing::TestTransposeConvFloat( + tflite::testing::kInputShape, tflite::testing::kInputData, + tflite::testing::kFilterShape, tflite::testing::kFilterData, + tflite::testing::kBiasShape, tflite::testing::kBiasData, + tflite::testing::kOutputShape, tflite::testing::kGoldenData, + &tflite::testing::common_conv_params, output_data)); +} + +TF_LITE_MICRO_TEST(fusedRELUTest) { + float output_data[tflite::testing::kOutputElements]; + float golden_data[] = {29, 24, 0, 0, 99, 72, 0, 0, + 207, 186, 0, 0, 263, 292, 141, 0}; + int filter_shape[] = {4, 1, 3, 3, 1}; + float filter_data[] = {1, 2, 3, 4, 5, 6, 7, 8, 9}; + int input_shape[] = {4, 1, 4, 4, 1}; + float input_data[] = {1, 2, -3, -4, 5, 6, -7, -8, + 9, 10, -11, -12, 13, 14, 15, 16}; + TfLiteConvParams conv_params = {kTfLitePaddingSame, // padding + 1, // stride_width + 1, // stride_height + kTfLiteActRelu, + 1, + 1}; + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, tflite::testing::TestTransposeConvFloat( + input_shape, input_data, filter_shape, filter_data, + tflite::testing::kBiasShape, tflite::testing::kBiasData, + tflite::testing::kOutputShape, golden_data, &conv_params, + output_data)); +} + +TF_LITE_MICRO_TEST(AccuracyWithFusedActivationTest) { + int output_shape[] = {4, 1, 3, 4, 1}; + float output_data[tflite::testing::kOutputElements]; + float golden_data[] = {1615, 1938, 0, 0, 2584, 1615, 0, 0, 323, 1292, 0, 0}; + int filter_shape[] = {4, 1, 3, 3, 1}; + float filter_data[] = {9, 5, 6, 9, 8, 5, 3, 1, 4}; + int input_shape[] = {4, 1, 1, 2, 1}; + float input_data[] = {323, -521}; + TfLiteConvParams conv_params = {kTfLitePaddingSame, // padding + 3, // stride_width + 3, // stride_height + kTfLiteActRelu, + 1, + 1}; + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, tflite::testing::TestTransposeConvFloat( + input_shape, input_data, filter_shape, filter_data, + tflite::testing::kBiasShape, tflite::testing::kBiasData, + output_shape, golden_data, &conv_params, output_data)); +} + +TF_LITE_MICRO_TEST(MultiChannelBiasWithFusedActivationTest) { + int output_shape[] = {4, 1, 5, 5, 2}; + float output_data[50]; + float golden_data[] = {4, 6, 6, 8, 10, 14, 9, 12, 13, 16, 10, 12, 12, + 14, 28, 32, 21, 24, 25, 28, 13, 12, 9, 8, 35, 40, + 45, 52, 57, 64, 0, 0, 0, 0, 0, 0, 39, 44, 47, + 52, 0, 0, 0, 0, 4, 6, 63, 68, 71, 76}; + int filter_shape[] = {4, 2, 3, 3, 1}; + float filter_data[] = {1, 3, 5, 7, 9, 11, 13, 15, 17, + 2, 4, 6, 8, 10, 12, 14, 16, 18}; + int input_shape[] = {4, 1, 2, 2, 1}; + float input_data[] = {1, 2, -3, 4}; + int bias_shape[] = {4, 2, 1, 1, 1}; + float bias_data[] = {3, 4}; + TfLiteConvParams conv_params = {kTfLitePaddingValid, // padding + 2, // stride_width + 2, // stride_height + kTfLiteActRelu, + 1, + 1}; + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + tflite::testing::TestTransposeConvFloat( + input_shape, input_data, filter_shape, filter_data, bias_shape, + bias_data, output_shape, golden_data, &conv_params, output_data)); +} + +TF_LITE_MICRO_TEST(SimpleTestQuantizedPerChannel) { + int8_t output_data[tflite::testing::kOutputElements]; + + const float input_scale = 0.5f; + const float output_scale = 30.0f; + const float filter_scale = 1.0f; + const int input_zero_point = 0; + const int output_zero_point = 0; + + int8_t input_quantized[tflite::testing::kInputElements]; + int8_t filter_quantized[tflite::testing::kFilterElements]; + int32_t bias_quantized[tflite::testing::kBiasElements]; + int8_t golden_quantized[tflite::testing::kOutputElements]; + int zero_points[tflite::testing::kBiasElements + 1]; + float scales[tflite::testing::kBiasElements + 1]; + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + tflite::testing::TestTransposeConvQuantized( + tflite::testing::kInputShape, tflite::testing::kInputData, + input_quantized, input_scale, input_zero_point, + tflite::testing::kFilterShape, tflite::testing::kFilterData, + filter_quantized, filter_scale, tflite::testing::kBiasShape, + tflite::testing::kBiasData, bias_quantized, scales, zero_points, + tflite::testing::kOutputShape, tflite::testing::kGoldenData, + golden_quantized, output_scale, output_zero_point, + &tflite::testing::common_conv_params, output_data)); +} + +TF_LITE_MICRO_TEST(SimpleTestQuantized16x8PerChannel) { + int16_t output_data[tflite::testing::kOutputElements]; + + const float input_scale = 1.0f; + const float output_scale = 1.0f; + const float filter_scale = 1.0f; + const int input_zero_point = 0; + const int output_zero_point = 0; + + int16_t input_quantized[tflite::testing::kInputElements]; + int8_t filter_quantized[tflite::testing::kFilterElements]; + std::int64_t bias_quantized[tflite::testing::kBiasElements]; + int16_t golden_quantized[tflite::testing::kOutputElements]; + int zero_points[tflite::testing::kBiasElements + 1]; + float scales[tflite::testing::kBiasElements + 1]; + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + tflite::testing::TestTransposeConvQuantized( + tflite::testing::kInputShape, tflite::testing::kInputData, + input_quantized, input_scale, input_zero_point, + tflite::testing::kFilterShape, tflite::testing::kFilterData, + filter_quantized, filter_scale, tflite::testing::kBiasShape, + tflite::testing::kBiasData, bias_quantized, scales, zero_points, + tflite::testing::kOutputShape, tflite::testing::kGoldenData, + golden_quantized, output_scale, output_zero_point, + &tflite::testing::common_conv_params, output_data)); +} + +TF_LITE_MICRO_TEST(SimpleTestQuantized16x8PerChannelWithInt16Bias) { + int16_t output_data[tflite::testing::kOutputElements]; + + const float input_scale = 1.0f; + const float output_scale = 1.0f; + const float filter_scale = 1.0f; + const int input_zero_point = 0; + const int output_zero_point = 0; + + int16_t input_quantized[tflite::testing::kInputElements]; + int8_t filter_quantized[tflite::testing::kFilterElements]; + int16_t bias_quantized[tflite::testing::kBiasElements]; + int16_t golden_quantized[tflite::testing::kOutputElements]; + int zero_points[tflite::testing::kBiasElements + 1]; + float scales[tflite::testing::kBiasElements + 1]; + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + tflite::testing::TestTransposeConvQuantized( + tflite::testing::kInputShape, tflite::testing::kInputData, + input_quantized, input_scale, input_zero_point, + tflite::testing::kFilterShape, tflite::testing::kFilterData, + filter_quantized, filter_scale, tflite::testing::kBiasShape, + tflite::testing::kBiasData, bias_quantized, scales, zero_points, + tflite::testing::kOutputShape, tflite::testing::kGoldenData, + golden_quantized, output_scale, output_zero_point, + &tflite::testing::common_conv_params, output_data)); +} + +TF_LITE_MICRO_TEST(InputOutputDifferentTypeIsError) { + using tflite::testing::CreateQuantizedTensor; + using tflite::testing::CreateTensor; + using tflite::testing::IntArrayFromInts; + + TfLiteIntArray* input_dims = IntArrayFromInts(tflite::testing::kInputShape); + TfLiteIntArray* filter_dims = IntArrayFromInts(tflite::testing::kFilterShape); + TfLiteIntArray* bias_dims = IntArrayFromInts(tflite::testing::kBiasShape); + TfLiteIntArray* output_dims = IntArrayFromInts(tflite::testing::kOutputShape); + const int output_dims_count = tflite::ElementCount(*output_dims); + constexpr int inputs_size = 4; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + + int8_t output_data[tflite::testing::kOutputElements]; + + int output_shape_dims_data[] = {1, 0}; + int32_t* output_shape = nullptr; + TfLiteIntArray* output_shape_dims = IntArrayFromInts(output_shape_dims_data); + + TfLiteTensor tensors[tensors_size] = { + CreateTensor(output_shape, output_shape_dims), + CreateTensor(tflite::testing::kInputData, input_dims), + CreateTensor(tflite::testing::kFilterData, filter_dims), + CreateTensor(tflite::testing::kBiasData, bias_dims), + CreateQuantizedTensor(output_data, output_dims, /*scale=*/1.0f, + /*zero_point=*/0), + }; + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteError, tflite::testing::InvokeTransposeConv( + tensors, tensors_size, output_dims_count, + &tflite::testing::common_conv_params, output_data)); +} + +TF_LITE_MICRO_TEST(HybridModeIsError) { + using tflite::testing::CreateQuantizedTensor; + using tflite::testing::CreateTensor; + using tflite::testing::IntArrayFromInts; + + TfLiteIntArray* input_dims = IntArrayFromInts(tflite::testing::kInputShape); + TfLiteIntArray* filter_dims = IntArrayFromInts(tflite::testing::kFilterShape); + TfLiteIntArray* bias_dims = IntArrayFromInts(tflite::testing::kBiasShape); + TfLiteIntArray* output_dims = IntArrayFromInts(tflite::testing::kOutputShape); + const int output_dims_count = tflite::ElementCount(*output_dims); + + constexpr int inputs_size = 4; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + + int8_t filter_data[tflite::testing::kFilterElements] = {}; + float output_data[tflite::testing::kOutputElements]; + + int output_shape_dims_data[] = {1, 0}; + int32_t* output_shape = nullptr; + TfLiteIntArray* output_shape_dims = IntArrayFromInts(output_shape_dims_data); + + TfLiteTensor tensors[tensors_size] = { + CreateTensor(output_shape, output_shape_dims), + CreateTensor(tflite::testing::kInputData, input_dims), + CreateQuantizedTensor(filter_data, filter_dims, + /*scale=*/1.0f, + /*zero_point=*/0), + CreateTensor(tflite::testing::kBiasData, bias_dims), + CreateTensor(output_data, output_dims), + }; + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteError, tflite::testing::InvokeTransposeConv( + tensors, tensors_size, output_dims_count, + &tflite::testing::common_conv_params, output_data)); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/transpose_test.cc b/tensorflow/lite/micro/kernels/transpose_test.cc new file mode 100644 index 0000000..12bc431 --- /dev/null +++ b/tensorflow/lite/micro/kernels/transpose_test.cc @@ -0,0 +1,613 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/reference/transpose.h" + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/micro_utils.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +template +void RunTestPermutation(int num_dims, const int32_t* shape, + const int32_t* perms, T* input, T* input_transposed) { + // Count elements and allocate output. + int count = 1; + for (int i = 0; i < num_dims; i++) { + count *= shape[i]; + } + + // Create the dummy data + for (int i = 0; i < count; i++) { + input[i] = i; + } + + // Make input and output shapes. + const RuntimeShape input_shape = RuntimeShape(num_dims, shape); + RuntimeShape output_shape(num_dims); + + for (int i = 0; i < num_dims; i++) { + output_shape.SetDim(i, shape[perms[i]]); + } + + TransposeParams params; + params.perm_count = num_dims; + for (int i = 0; i < num_dims; ++i) { + params.perm[i] = perms[i]; + } + + reference_ops::Transpose(params, input_shape, input, output_shape, + input_transposed); +} + +template +TfLiteStatus InvokeTranspose(TfLiteTensor* tensors, int tensors_size, + T* output_data, int output_length, + TransposeParams* params) { + int inputs_array_data[] = {2, 0, 1}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 2}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = Register_TRANSPOSE(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, reinterpret_cast(params)); + + const char* init_data = reinterpret_cast(params); + TfLiteStatus status = runner.InitAndPrepare(init_data); + if (status != kTfLiteOk) { + return status; + } + return runner.Invoke(); +} + +template +TfLiteStatus ValidateTranspose(TfLiteTensor* tensors, int tensors_size, + const T* expected_output_data, T* output_data, + int output_length, + tflite::TransposeParams* params, + float tolerance = 1e-5) { + TfLiteStatus status = InvokeTranspose(tensors, tensors_size, output_data, + output_length, params); + if (status != kTfLiteOk) { + return status; + } + + for (int i = 0; i < output_length; ++i) { + TF_LITE_MICRO_EXPECT_EQ(expected_output_data[i], output_data[i]); + } + return kTfLiteOk; +} + +template +void TestTranspose(int* input_dims_data, T* input_data, int* output_dims_data, + const T* expected_output_data, T* output_data, + TransposeParams* params) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int input_size = ElementCount(*input_dims); + for (int i = 0; i < input_size; i++) { + input_data[i] = i; + } + + for (int i = 0; i < input_dims->size; i++) { + output_dims->data[i] = input_dims->data[params->perm[i]]; + } + + int perm_dims_data[] = {1, params->perm_count}; + TfLiteIntArray* perm_dims = IntArrayFromInts(perm_dims_data); + const int output_dims_count = ElementCount(*output_dims); + constexpr int inputs_size = 2; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(input_data, input_dims), + CreateTensor(params->perm, perm_dims), + CreateTensor(output_data, output_dims), + }; + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, ValidateTranspose(tensors, tensors_size, expected_output_data, + output_data, output_dims_count, params)); +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(1D) { + int input_dims_data[] = {1, 3}; + int output_dims_data[] = {1, 3}; + + int8_t input_data[3]; + int8_t output_data[3]; + const int8_t expected_output_data[] = {0, 1, 2}; + + tflite::TransposeParams params = {1, {0}}; + + tflite::testing::TestTranspose(input_dims_data, input_data, output_dims_data, + expected_output_data, output_data, ¶ms); +} + +TF_LITE_MICRO_TEST(2DPerm1) { + int input_dims_data[] = {2, 3, 2}; + int output_dims_data[] = {2, 3, 2}; + + int8_t input_data[6]; + int8_t output_data[6]; + const int8_t expected_output_data[] = {0, 2, 4, 1, 3, 5}; + + tflite::TransposeParams params = {2, {1, 0}}; + + tflite::testing::TestTranspose(input_dims_data, input_data, output_dims_data, + expected_output_data, output_data, ¶ms); +} + +TF_LITE_MICRO_TEST(2D4x4KernelLeftOverRightSide) { + int input_dims_data[] = {2, 4, 6}; + int output_dims_data[] = {2, 4, 6}; + + int8_t input_data[24]; + int8_t output_data[24]; + const int8_t expected_output_data[] = {0, 6, 12, 18, 1, 7, 13, 19, + 2, 8, 14, 20, 3, 9, 15, 21, + 4, 10, 16, 22, 5, 11, 17, 23}; + + tflite::TransposeParams params = {2, {1, 0}}; + + tflite::testing::TestTranspose(input_dims_data, input_data, output_dims_data, + expected_output_data, output_data, ¶ms); +} + +TF_LITE_MICRO_TEST(2D4x4KernelLeftOverBottomSide) { + int input_dims_data[] = {2, 6, 4}; + int output_dims_data[] = {2, 4, 6}; + + int8_t input_data[24]; + int8_t output_data[24]; + const int8_t expected_output_data[] = {0, 4, 8, 12, 16, 20, 1, 5, + 9, 13, 17, 21, 2, 6, 10, 14, + 18, 22, 3, 7, 11, 15, 19, 23}; + + tflite::TransposeParams params = {2, {1, 0}}; + + tflite::testing::TestTranspose(input_dims_data, input_data, output_dims_data, + expected_output_data, output_data, ¶ms); +} + +TF_LITE_MICRO_TEST(3D) { + int input_dims_data[] = {3, 2, 3, 4}; + int output_dims_data[] = {3, 2, 3, 4}; + + int8_t input_data[24]; + int8_t output_data[24]; + const int8_t expected_output_data[] = {0, 4, 8, 12, 16, 20, 1, 5, + 9, 13, 17, 21, 2, 6, 10, 14, + 18, 22, 3, 7, 11, 15, 19, 23}; + + tflite::TransposeParams params = {3, {2, 0, 1}}; + + tflite::testing::TestTranspose(input_dims_data, input_data, output_dims_data, + expected_output_data, output_data, ¶ms); +} + +TF_LITE_MICRO_TEST(1DNotShrinked) { + int input_dims_data[] = {1, 1}; + int output_dims_data[] = {1, 1}; + + float input_data[1]; + float output_data[1]; + const float expected_output_data[] = {0}; + + tflite::TransposeParams params = {1, {0}}; + + tflite::testing::TestTranspose(input_dims_data, input_data, output_dims_data, + expected_output_data, output_data, ¶ms); +} + +TF_LITE_MICRO_TEST(2DShrinkedOneTime) { + int input_dims_data[] = {2, 2, 1}; + int output_dims_data[] = {2, 2, 1}; + + float input_data[2]; + float output_data[2]; + const float expected_output_data[] = {0, 1}; + + tflite::TransposeParams params = {2, {1, 0}}; + + tflite::testing::TestTranspose(input_dims_data, input_data, output_dims_data, + expected_output_data, output_data, ¶ms); +} + +TF_LITE_MICRO_TEST(2DShrinkedTwoTimes) { + int input_dims_data[] = {2, 1, 1}; + int output_dims_data[] = {2, 1, 1}; + + float input_data[1]; + float output_data[1]; + const float expected_output_data[] = {0}; + + tflite::TransposeParams params = {2, {1, 0}}; + + tflite::testing::TestTranspose(input_dims_data, input_data, output_dims_data, + expected_output_data, output_data, ¶ms); +} + +TF_LITE_MICRO_TEST(3DShrinkedOneTime) { + int input_dims_data[] = {3, 2, 1, 3}; + int output_dims_data[] = {3, 2, 1, 3}; + + float input_data[6]; + float output_data[6]; + const float expected_output_data[] = {0, 1, 2, 3, 4, 5}; + + tflite::TransposeParams params = {3, {0, 2, 1}}; + + tflite::testing::TestTranspose(input_dims_data, input_data, output_dims_data, + expected_output_data, output_data, ¶ms); +} + +TF_LITE_MICRO_TEST(3DShrinkedTwoTimes) { + int input_dims_data[] = {3, 1, 1, 3}; + int output_dims_data[] = {3, 1, 1, 3}; + + float input_data[3]; + float output_data[3]; + const float expected_output_data[] = {0, 1, 2}; + + tflite::TransposeParams params = {3, {1, 2, 0}}; + + tflite::testing::TestTranspose(input_dims_data, input_data, output_dims_data, + expected_output_data, output_data, ¶ms); +} + +TF_LITE_MICRO_TEST(3DShrinkedAll) { + int input_dims_data[] = {3, 1, 1, 1}; + int output_dims_data[] = {3, 1, 1, 1}; + + float input_data[1]; + float output_data[1]; + const float expected_output_data[] = {0}; + + tflite::TransposeParams params = {3, {1, 2, 0}}; + + tflite::testing::TestTranspose(input_dims_data, input_data, output_dims_data, + expected_output_data, output_data, ¶ms); +} + +TF_LITE_MICRO_TEST(4DShrinkedOneTimes) { + int input_dims_data[] = {4, 2, 2, 3, 1}; + int output_dims_data[] = {4, 2, 2, 3, 1}; + + float input_data[12]; + float output_data[12]; + const float expected_output_data[] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11}; + + tflite::TransposeParams params = {4, {3, 0, 1, 2}}; + + tflite::testing::TestTranspose(input_dims_data, input_data, output_dims_data, + expected_output_data, output_data, ¶ms); +} + +TF_LITE_MICRO_TEST(4DShrinkedTwoTimes) { + int input_dims_data[] = {4, 2, 1, 3, 1}; + int output_dims_data[] = {4, 2, 1, 3, 1}; + + float input_data[6]; + float output_data[6]; + const float expected_output_data[] = {0, 1, 2, 3, 4, 5}; + + tflite::TransposeParams params = {4, {0, 3, 1, 2}}; + + tflite::testing::TestTranspose(input_dims_data, input_data, output_dims_data, + expected_output_data, output_data, ¶ms); +} + +TF_LITE_MICRO_TEST(4DShrinkedThreeTimes) { + int input_dims_data[] = {4, 2, 1, 1, 1}; + int output_dims_data[] = {4, 2, 1, 1, 1}; + + float input_data[2]; + float output_data[2]; + const float expected_output_data[] = {0, 1}; + + tflite::TransposeParams params = {4, {3, 2, 1, 0}}; + + tflite::testing::TestTranspose(input_dims_data, input_data, output_dims_data, + expected_output_data, output_data, ¶ms); +} + +TF_LITE_MICRO_TEST(4DShrinkedFourTimes) { + int input_dims_data[] = {4, 1, 1, 1, 1}; + int output_dims_data[] = {4, 1, 1, 1, 1}; + + float input_data[1]; + float output_data[1]; + const float expected_output_data[] = {0}; + + tflite::TransposeParams params = {4, {2, 3, 1, 0}}; + + tflite::testing::TestTranspose(input_dims_data, input_data, output_dims_data, + expected_output_data, output_data, ¶ms); +} + +TF_LITE_MICRO_TEST(3DFlatten) { + int input_dims_data[] = {3, 2, 2, 3}; + int output_dims_data[] = {3, 2, 2, 3}; + + float input_data[12]; + float output_data[12]; + const float expected_output_data[] = {0, 3, 1, 4, 2, 5, 6, 9, 7, 10, 8, 11}; + + tflite::TransposeParams params = {3, {0, 2, 1}}; + + tflite::testing::TestTranspose(input_dims_data, input_data, output_dims_data, + expected_output_data, output_data, ¶ms); +} + +TF_LITE_MICRO_TEST(4DFlatten) { + int input_dims_data[] = {4, 2, 2, 2, 2}; + int output_dims_data[] = {4, 2, 2, 2, 2}; + + float input_data[16]; + float output_data[16]; + const float expected_output_data[] = {0, 2, 1, 3, 4, 6, 5, 7, + 8, 10, 9, 11, 12, 14, 13, 15}; + + tflite::TransposeParams params = {4, {0, 1, 3, 2}}; + + tflite::testing::TestTranspose(input_dims_data, input_data, output_dims_data, + expected_output_data, output_data, ¶ms); +} + +TF_LITE_MICRO_TEST(4DFlattenTwo) { + int input_dims_data[] = {4, 2, 2, 2, 2}; + int output_dims_data[] = {4, 2, 2, 2, 2}; + + float input_data[16]; + float output_data[16]; + const float expected_output_data[] = {0, 4, 1, 5, 2, 6, 3, 7, + 8, 12, 9, 13, 10, 14, 11, 15}; + + tflite::TransposeParams params = {4, {0, 2, 3, 1}}; + + tflite::testing::TestTranspose(input_dims_data, input_data, output_dims_data, + expected_output_data, output_data, ¶ms); +} + +TF_LITE_MICRO_TEST(3DDividedIntoTwo2DsOne) { + float input_data[24]; + float expected_output_data[24]; + int32_t shape[] = {2, 3, 4}; + int32_t perms[] = {1, 2, 0}; + tflite::testing::RunTestPermutation(3, shape, perms, input_data, + expected_output_data); + int input_dims_data[] = {3, 2, 3, 4}; + int output_dims_data[] = {3, 2, 3, 4}; + + float output_data[24]; + + tflite::TransposeParams params = {3, {1, 2, 0}}; + + tflite::testing::TestTranspose(input_dims_data, input_data, output_dims_data, + expected_output_data, output_data, ¶ms); +} + +TF_LITE_MICRO_TEST(3DDividedIntoTwo2DsTwo) { + float input_data[24]; + float expected_output_data[24]; + int32_t shape[] = {2, 3, 4}; + int32_t perms[] = {2, 0, 1}; + tflite::testing::RunTestPermutation(3, shape, perms, input_data, + expected_output_data); + int input_dims_data[] = {3, 2, 3, 4}; + int output_dims_data[] = {3, 2, 3, 4}; + + float output_data[24]; + + tflite::TransposeParams params = {3, {2, 0, 1}}; + + tflite::testing::TestTranspose(input_dims_data, input_data, output_dims_data, + expected_output_data, output_data, ¶ms); +} + +TF_LITE_MICRO_TEST(4DDividedIntoTwo2DsOne) { + int32_t shape[] = {2, 3, 4, 2}; + int32_t perms[] = {1, 2, 3, 0}; + float input_data[48]; + float expected_output_data[48]; + tflite::testing::RunTestPermutation(4, shape, perms, input_data, + expected_output_data); + int input_dims_data[] = {4, 2, 3, 4, 2}; + int output_dims_data[] = {4, 2, 3, 4, 2}; + + float output_data[48]; + + tflite::TransposeParams params = {4, {1, 2, 3, 0}}; + + tflite::testing::TestTranspose(input_dims_data, input_data, output_dims_data, + expected_output_data, output_data, ¶ms); +} +TF_LITE_MICRO_TEST(4DDividedIntoTwo2DsTwo) { + int32_t shape[] = {2, 3, 4, 2}; + int32_t perms[] = {2, 3, 0, 1}; + float input_data[48]; + float expected_output_data[48]; + tflite::testing::RunTestPermutation(4, shape, perms, input_data, + expected_output_data); + int input_dims_data[] = {4, 2, 3, 4, 2}; + int output_dims_data[] = {4, 2, 3, 4, 2}; + + float output_data[48]; + + tflite::TransposeParams params = {4, {2, 3, 0, 1}}; + + tflite::testing::TestTranspose(input_dims_data, input_data, output_dims_data, + expected_output_data, output_data, ¶ms); +} + +TF_LITE_MICRO_TEST(4DDividedIntoTwo2DsThree) { + int32_t shape[] = {2, 3, 4, 2}; + int32_t perms[] = {3, 0, 1, 2}; + float input_data[48]; + float expected_output_data[48]; + tflite::testing::RunTestPermutation(4, shape, perms, input_data, + expected_output_data); + int input_dims_data[] = {4, 2, 3, 4, 2}; + int output_dims_data[] = {4, 2, 3, 4, 2}; + + float output_data[48]; + + tflite::TransposeParams params = {4, {3, 0, 1, 2}}; + + tflite::testing::TestTranspose(input_dims_data, input_data, output_dims_data, + expected_output_data, output_data, ¶ms); +} + +TF_LITE_MICRO_TEST(5DDividedIntoTwo2DsOne) { + int32_t shape[] = {2, 3, 2, 2, 2}; + int32_t perms[] = {1, 4, 2, 3, 0}; + float input_data[48]; + float expected_output_data[48]; + tflite::testing::RunTestPermutation(5, shape, perms, input_data, + expected_output_data); + int input_dims_data[] = {5, 2, 3, 2, 2, 2}; + int output_dims_data[] = {5, 2, 3, 2, 2, 2}; + + float output_data[48]; + + tflite::TransposeParams params = {5, {1, 4, 2, 3, 0}}; + + tflite::testing::TestTranspose(input_dims_data, input_data, output_dims_data, + expected_output_data, output_data, ¶ms); +} + +TF_LITE_MICRO_TEST(5DDividedIntoTwo2DsTwo) { + int32_t shape[] = {2, 3, 2, 2, 2}; + int32_t perms[] = {2, 3, 0, 4, 1}; + float input_data[48]; + float expected_output_data[48]; + tflite::testing::RunTestPermutation(5, shape, perms, input_data, + expected_output_data); + int input_dims_data[] = {5, 2, 3, 2, 2, 2}; + int output_dims_data[] = {5, 2, 3, 2, 2, 2}; + + float output_data[48]; + + tflite::TransposeParams params = {5, {2, 3, 0, 4, 1}}; + + tflite::testing::TestTranspose(input_dims_data, input_data, output_dims_data, + expected_output_data, output_data, ¶ms); +} + +TF_LITE_MICRO_TEST(5DDividedIntoTwo2DsThree) { + int32_t shape[] = {2, 3, 2, 2, 2}; + int32_t perms[] = {3, 0, 4, 1, 2}; + float input_data[48]; + float expected_output_data[48]; + tflite::testing::RunTestPermutation(5, shape, perms, input_data, + expected_output_data); + int input_dims_data[] = {5, 2, 3, 2, 2, 2}; + int output_dims_data[] = {5, 2, 3, 2, 2, 2}; + + float output_data[48]; + + tflite::TransposeParams params = {5, {3, 0, 4, 1, 2}}; + + tflite::testing::TestTranspose(input_dims_data, input_data, output_dims_data, + expected_output_data, output_data, ¶ms); +} + +TF_LITE_MICRO_TEST(SimpleTestNoReorder) { + int input_dims_data[] = {4, 1, 2, 3, 1}; + int output_dims_data[] = {4, 1, 2, 3, 1}; + + float input_data[6]; + float output_data[6]; + const float expected_output_data[] = {0, 1, 2, 3, 4, 5}; + + tflite::TransposeParams params = {4, {0, 1, 2, 3}}; + + tflite::testing::TestTranspose(input_dims_data, input_data, output_dims_data, + expected_output_data, output_data, ¶ms); +} + +TF_LITE_MICRO_TEST(SimpleTestWithReorder) { + int input_dims_data[] = {4, 1, 2, 3, 1}; + int output_dims_data[] = {4, 1, 2, 3, 1}; + + float input_data[6]; + float output_data[6]; + const float expected_output_data[] = {0, 3, 1, 4, 2, 5}; + + tflite::TransposeParams params = {4, {2, 1, 3, 0}}; + + tflite::testing::TestTranspose(input_dims_data, input_data, output_dims_data, + expected_output_data, output_data, ¶ms); +} + +TF_LITE_MICRO_TEST(ComplexTestWithReorder) { + int input_dims_data[] = {4, 2, 3, 4, 5}; + int output_dims_data[] = {4, 2, 3, 4, 5}; + + float input_data[120]; + float output_data[120]; + const float expected_output_data[] = { + 0, 1, 2, 3, 4, 20, 21, 22, 23, 24, 40, 41, 42, 43, 44, + 60, 61, 62, 63, 64, 80, 81, 82, 83, 84, 100, 101, 102, 103, 104, + 5, 6, 7, 8, 9, 25, 26, 27, 28, 29, 45, 46, 47, 48, 49, + 65, 66, 67, 68, 69, 85, 86, 87, 88, 89, 105, 106, 107, 108, 109, + 10, 11, 12, 13, 14, 30, 31, 32, 33, 34, 50, 51, 52, 53, 54, + 70, 71, 72, 73, 74, 90, 91, 92, 93, 94, 110, 111, 112, 113, 114, + 15, 16, 17, 18, 19, 35, 36, 37, 38, 39, 55, 56, 57, 58, 59, + 75, 76, 77, 78, 79, 95, 96, 97, 98, 99, 115, 116, 117, 118, 119}; + + tflite::TransposeParams params = {4, {2, 0, 1, 3}}; + + tflite::testing::TestTranspose(input_dims_data, input_data, output_dims_data, + expected_output_data, output_data, ¶ms); +} + +TF_LITE_MICRO_TEST(Complex5DTestWithReorder) { + int input_dims_data[] = {5, 2, 3, 2, 2, 5}; + int output_dims_data[] = {5, 2, 3, 2, 2, 5}; + + float input_data[120]; + float output_data[120]; + const float expected_output_data[] = { + 0, 5, 1, 6, 2, 7, 3, 8, 4, 9, 20, 25, 21, 26, 22, + 27, 23, 28, 24, 29, 40, 45, 41, 46, 42, 47, 43, 48, 44, 49, + 60, 65, 61, 66, 62, 67, 63, 68, 64, 69, 80, 85, 81, 86, 82, + 87, 83, 88, 84, 89, 100, 105, 101, 106, 102, 107, 103, 108, 104, 109, + 10, 15, 11, 16, 12, 17, 13, 18, 14, 19, 30, 35, 31, 36, 32, + 37, 33, 38, 34, 39, 50, 55, 51, 56, 52, 57, 53, 58, 54, 59, + 70, 75, 71, 76, 72, 77, 73, 78, 74, 79, 90, 95, 91, 96, 92, + 97, 93, 98, 94, 99, 110, 115, 111, 116, 112, 117, 113, 118, 114, 119}; + + tflite::TransposeParams params = {5, {2, 0, 1, 4, 3}}; + + tflite::testing::TestTranspose(input_dims_data, input_data, output_dims_data, + expected_output_data, output_data, ¶ms); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/unidirectional_sequence_lstm.cc b/tensorflow/lite/micro/kernels/unidirectional_sequence_lstm.cc new file mode 100644 index 0000000..b296680 --- /dev/null +++ b/tensorflow/lite/micro/kernels/unidirectional_sequence_lstm.cc @@ -0,0 +1,168 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +// Integer version of unidirectional sequence lstm. Only the standard LSTM +// (defined in the keras LSTM layer, e.g., no peephole etc.) is supported here. +// Currently used by the 16 bits activation case only + +#include +#include + +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/fully_connected.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/lstm_eval.h" +#include "tensorflow/lite/micro/kernels/lstm_shared.h" + +namespace tflite { + +namespace { +/*Helper Functions*/ + +/*Kernel functions*/ + +void* UnidirectionalSequenceLstmInit(TfLiteContext* context, const char* buffer, + size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(OpDataLSTM)); +} + +TfLiteStatus UnidirectionalSequenceLstmPrepare(TfLiteContext* context, + TfLiteNode* node) { + TF_LITE_ENSURE_EQ(context, node->outputs->size, 1); + TF_LITE_ENSURE_EQ(context, node->inputs->size, 24); + + TFLITE_DCHECK(node->builtin_data != nullptr); + TFLITE_DCHECK(node->user_data != nullptr); + + OpDataLSTM* op_data = reinterpret_cast(node->user_data); + const auto* builtin_data = + static_cast(node->builtin_data); + // All TempTfLiteTensors will be deallocated through the destructor. + LstmTensors lstm_tensors(context, node); + TF_LITE_ENSURE_OK(context, lstm_tensors.ValidateTensorStatus(context)); + + op_data->cell_gate_nonlinear_type = builtin_data->activation; + op_data->size_info = + CreateLstmSizeInfo(builtin_data->time_major, + lstm_tensors.GetInternalTensor(kLstmInputTensor)->dims, + lstm_tensors.HiddenStateTensor()->dims); + TF_LITE_ENSURE_OK( + context, ValidateTensorSize(context, lstm_tensors, op_data->size_info)); + + // Create cell state information and gate parameters (Fully Connected and Mul) + auto cell_state_type = + lstm_tensors.GetInternalTensor(kLstmCellStateTensor)->type; + if (cell_state_type == kTfLiteFloat32) { + op_data->cell_state_info = + CreateLstmCellStateInfoFloat(builtin_data->cell_clip); + TF_LITE_ENSURE_OK( + context, PrepareGateParametersFloat(context, lstm_tensors, op_data)); + } else if (cell_state_type == kTfLiteInt16) { + op_data->cell_state_info = CreateLstmCellStateInfo( + lstm_tensors.CellStateTensor()->params.scale, builtin_data->cell_clip); + TF_LITE_ENSURE_OK( + context, PrepareGateParametersInteger(context, lstm_tensors, op_data)); + } else { + MicroPrintf( + "Cell state type %s (%d) not supported. The quantized Unidirectional " + "Sequence LSTM Op only support int16 cell state", + TfLiteTypeGetName(cell_state_type), cell_state_type); + return kTfLiteError; + } + // request buffers (four buffers) + for (size_t i = 0; i < 4; i++) { + TF_LITE_ENSURE_OK(context, context->RequestScratchBufferInArena( + context, + op_data->size_info.batch_size * + op_data->size_info.state_dimension * + TfLiteTypeGetSize(cell_state_type), + &(op_data->buffer_indices[i]))); + } + return kTfLiteOk; +} + +TfLiteStatus UnidirectionalSequenceLstmEval(TfLiteContext* context, + TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + const OpDataLSTM& op_data = *reinterpret_cast(node->user_data); + auto kernel_content = CreateLSTMKernelContent(context, node); + + const auto activation_type = + kernel_content.internal_tensors[kLstmInputTensor]->type; + const auto weight_type = + kernel_content.internal_tensors[kLstmInputToInputWeightsTensor]->type; + + switch (activation_type) { + case kTfLiteFloat32: { + LSTMBuffers buffers = + CreateLSTMBuffers(context, op_data.buffer_indices); + EvalLstm(op_data, kernel_content, buffers); + break; + } + case kTfLiteInt8: { + switch (weight_type) { + case kTfLiteInt8: { + // 8(activation)x8(weight)->16(cell) LSTM with 32 bits bias + LSTMBuffers buffers = + CreateLSTMBuffers(context, op_data.buffer_indices); + EvalLstm(op_data, kernel_content, + buffers); + break; + } + default: { + MicroPrintf("Filter type %s (%d) not supported.", + TfLiteTypeGetName(weight_type), activation_type); + return kTfLiteError; + } + } + break; + } + case kTfLiteInt16: { + switch (weight_type) { + case kTfLiteInt8: { + // 16(activation)x8(weight)->16(cell) LSTM with 64 bits bias + LSTMBuffers buffers = + CreateLSTMBuffers(context, op_data.buffer_indices); + EvalLstm(op_data, kernel_content, + buffers); + break; + } + default: { + MicroPrintf("Filter type %s (%d) not supported.", + TfLiteTypeGetName(weight_type), weight_type); + return kTfLiteError; + } + } + break; + } + default: { + MicroPrintf("Input type %s (%d) not supported.", + TfLiteTypeGetName(activation_type), activation_type); + return kTfLiteError; + } + } + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_UNIDIRECTIONAL_SEQUENCE_LSTM() { + return tflite::micro::RegisterOp(UnidirectionalSequenceLstmInit, + UnidirectionalSequenceLstmPrepare, + UnidirectionalSequenceLstmEval); +} +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/unidirectional_sequence_lstm.h b/tensorflow/lite/micro/kernels/unidirectional_sequence_lstm.h new file mode 100644 index 0000000..16aa23b --- /dev/null +++ b/tensorflow/lite/micro/kernels/unidirectional_sequence_lstm.h @@ -0,0 +1,47 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_UNIDIRECTIONAL_SEQUENCE_LSTM_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_UNIDIRECTIONAL_SEQUENCE_LSTM_H_ + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/types.h" + +namespace tflite { + +// This is the most generic TFLMRegistration. The actual supported types +// may still be target dependent. The only requirement is that every +// implementation (reference or optimized) must define this function. +// TODO(b/230666079): resolve conflict with xtensa implementation +TFLMRegistration Register_UNIDIRECTIONAL_SEQUENCE_LSTM(); + +#if defined(CMSIS_NN) +// Returns a TFLMRegistration struct for kernel variant that only supports +// int8 activations and int8 weights and uses the latency optimized +// implementations. +TFLMRegistration Register_UNIDIRECTIONAL_SEQUENCE_LSTM_INT8(); + +#else +inline TFLMRegistration Register_UNIDIRECTIONAL_SEQUENCE_LSTM_INT8() { + return Register_UNIDIRECTIONAL_SEQUENCE_LSTM(); +} +#endif + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_UNIDIRECTIONAL_SEQUENCE_LSTM_H_ diff --git a/tensorflow/lite/micro/kernels/unidirectional_sequence_lstm_test.cc b/tensorflow/lite/micro/kernels/unidirectional_sequence_lstm_test.cc new file mode 100644 index 0000000..c85e56f --- /dev/null +++ b/tensorflow/lite/micro/kernels/unidirectional_sequence_lstm_test.cc @@ -0,0 +1,197 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/portable_tensor_utils.h" +#include "tensorflow/lite/kernels/internal/types.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/kernels/lstm_shared.h" +#include "tensorflow/lite/micro/kernels/micro_ops.h" +#include "tensorflow/lite/micro/kernels/testdata/lstm_test_data.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +constexpr int kLstmMaxNumInputOutputTensors = 24 + 1; +constexpr int kLstmIntermediateTensorBase = kLstmMaxNumInputOutputTensors + 1; + +// Validate the output result array with golden values +template +void ValidateResultGoldens(const T* golden, const T* output_data, + const int output_len, const float tolerance) { + for (int i = 0; i < output_len; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(golden[i], output_data[i], tolerance); + } +} + +template +void TestUnidirectionalLSTMInteger( + const LstmEvalCheckData< + batch_size * time_steps * input_dimension, batch_size * state_dimension, + batch_size * state_dimension * time_steps>& eval_check_data, + const float hidden_state_tolerance, const float cell_state_tolerance, + LstmNodeContent& + node_contents) { + TfLiteTensor tensors[kLstmMaxNumInputOutputTensors + 1 + 5]; + memcpy(tensors, node_contents.GetTensors(), + kLstmMaxNumInputOutputTensors * sizeof(TfLiteTensor)); + + // Provide also intermediate tensors needed by older LSTM implementations + int intermediate_array_data[6] = {5, + kLstmIntermediateTensorBase, + kLstmIntermediateTensorBase + 1, + kLstmIntermediateTensorBase + 2, + kLstmIntermediateTensorBase + 3, + kLstmIntermediateTensorBase + 4}; + int input_zero_points[2] = {1, -21}; + float input_scales[2] = {1, 0.004705882165580988}; + TfLiteAffineQuantization input_quant = { + tflite::testing::FloatArrayFromFloats(input_scales), + tflite::testing::IntArrayFromInts(input_zero_points), 0}; + int intermediate_dim[2] = {1, 0}; + for (int i = 0; i < 5; ++i) { + tensors[kLstmIntermediateTensorBase + i] = + CreateTensor(nullptr, IntArrayFromInts(intermediate_dim)); + tensors[kLstmIntermediateTensorBase + i].quantization = { + kTfLiteAffineQuantization, &input_quant}; + } + + const TFLMRegistration registration = Register_UNIDIRECTIONAL_SEQUENCE_LSTM(); + auto buildin_data = node_contents.BuiltinData(); + micro::KernelRunner runner( + registration, tensors, kLstmMaxNumInputOutputTensors + 1 + 5, + node_contents.KernelInputs(), node_contents.KernelOutputs(), + reinterpret_cast(&buildin_data), + IntArrayFromInts(intermediate_array_data)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + const auto& quantization_settings = node_contents.QuantizationSettings(); + + float dequantized_hidden_state[batch_size * state_dimension] = {}; + Dequantize(node_contents.GetHiddenStateData(), batch_size * state_dimension, + quantization_settings.hidden_state.scale, + quantization_settings.hidden_state.zero_point, + dequantized_hidden_state); + + ValidateResultGoldens(eval_check_data.expected_hidden_state, + dequantized_hidden_state, batch_size * state_dimension, + hidden_state_tolerance); + + float dequantized_cell_state[batch_size * state_dimension] = {}; + Dequantize(node_contents.GetCellStateData(), batch_size * state_dimension, + quantization_settings.cell_state.scale, + quantization_settings.cell_state.zero_point, + dequantized_cell_state); + ValidateResultGoldens(eval_check_data.expected_cell_state, + dequantized_cell_state, batch_size * state_dimension, + cell_state_tolerance); + + float dequantized_output[batch_size * state_dimension * time_steps] = {}; + Dequantize(node_contents.GetOutputData(), + batch_size * state_dimension * time_steps, + quantization_settings.output.scale, + quantization_settings.output.zero_point, dequantized_output); + ValidateResultGoldens(eval_check_data.expected_output, dequantized_output, + batch_size * state_dimension, hidden_state_tolerance); +} + +template +void TestUnidirectionalLSTMFloat( + const LstmEvalCheckData< + batch_size * time_steps * input_dimension, batch_size * state_dimension, + batch_size * state_dimension * time_steps>& eval_check_data, + const float hidden_state_tolerance, const float cell_state_tolerance, + LstmNodeContent& node_contents) { + const TFLMRegistration registration = Register_UNIDIRECTIONAL_SEQUENCE_LSTM(); + auto buildin_data = node_contents.BuiltinData(); + micro::KernelRunner runner( + registration, node_contents.GetTensors(), kLstmMaxNumInputOutputTensors, + node_contents.KernelInputs(), node_contents.KernelOutputs(), + reinterpret_cast(&buildin_data)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + ValidateResultGoldens(eval_check_data.expected_hidden_state, + node_contents.GetHiddenStateData(), + batch_size * state_dimension, hidden_state_tolerance); + ValidateResultGoldens(eval_check_data.expected_cell_state, + node_contents.GetCellStateData(), + batch_size * state_dimension, cell_state_tolerance); + ValidateResultGoldens(eval_check_data.expected_output, + node_contents.GetOutputData(), + batch_size * state_dimension, hidden_state_tolerance); +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN +// TODO(b/230666079) enable below tests for xtensa when the xtensa +// kernel is reconciled with reference kernel +#if !defined(XTENSA) +TF_LITE_MICRO_TEST(TestUnidirectionalLSTMFloat) { + const tflite::testing::LstmEvalCheckData<12, 4, 12> kernel_eval_data = + tflite::testing::Get2X2LstmEvalCheckData(); + tflite::testing::LstmNodeContent + float_node_contents = tflite::testing::Create2x3x2X2FloatNodeContents( + kernel_eval_data.input_data, kernel_eval_data.hidden_state); + + const float tolerance = 1e-6; + tflite::testing::TestUnidirectionalLSTMFloat(kernel_eval_data, tolerance, + tolerance, float_node_contents); +} + +TF_LITE_MICRO_TEST(TestUnidirectionalLSTMInt8) { + const tflite::testing::LstmEvalCheckData<12, 4, 12> kernel_eval_data = + tflite::testing::Get2X2LstmEvalCheckData(); + tflite::testing::LstmNodeContent + int8_node_contents = tflite::testing::Create2x3x2X2Int8NodeContents( + kernel_eval_data.input_data, kernel_eval_data.hidden_state); + + const float hidden_state_tolerance = 1e-2; + // cell state degrade due to integer overflow + const float cell_state_tolerance = 1e-2; + tflite::testing::TestUnidirectionalLSTMInteger( + kernel_eval_data, hidden_state_tolerance, cell_state_tolerance, + int8_node_contents); +} + +TF_LITE_MICRO_TEST(TestUnidirectionalLSTMInt16) { + const tflite::testing::LstmEvalCheckData<12, 4, 12> kernel_eval_data = + tflite::testing::Get2X2LstmEvalCheckData(); + tflite::testing::LstmNodeContent + int16_node_contents = tflite::testing::Create2x3x2X2Int16NodeContents( + kernel_eval_data.input_data, kernel_eval_data.hidden_state); + + const float hidden_state_tolerance = 1e-3; // actually very close to 1e-4 + // cell state degrade due to integer overflow + const float cell_state_tolerance = 1e-2; + tflite::testing::TestUnidirectionalLSTMInteger( + kernel_eval_data, hidden_state_tolerance, cell_state_tolerance, + int16_node_contents); +} +#endif // !defined(XTENSA) +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/unpack.cc b/tensorflow/lite/micro/kernels/unpack.cc new file mode 100644 index 0000000..3ce4c33 --- /dev/null +++ b/tensorflow/lite/micro/kernels/unpack.cc @@ -0,0 +1,108 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { + +namespace { + +constexpr int kInputTensor = 0; + +template +TfLiteStatus UnpackImpl(TfLiteContext* context, TfLiteNode* node, + const TfLiteEvalTensor* input, int output_count, + int axis) { + const TfLiteEvalTensor* output0 = + tflite::micro::GetEvalOutput(context, node, 0); + const TfLiteIntArray* input_dims = input->dims; + const TfLiteIntArray* output_dims = output0->dims; + const int dimensions = input_dims->size; + + if (axis < 0) { + axis += input->dims->size; + } + + TFLITE_DCHECK_LT(axis, dimensions); + + int outer_size = 1; + for (int i = 0; i < axis; ++i) { + outer_size *= input_dims->data[i]; + } + int copy_size = 1; + for (int i = axis + 1; i < dimensions; ++i) { + copy_size *= input_dims->data[i]; + } + int output_size = 1; + for (int i = 0; i < output_dims->size; ++i) { + output_size *= output_dims->data[i]; + } + TFLITE_DCHECK_EQ(output_size, copy_size * outer_size); + + const T* input_data = tflite::micro::GetTensorData(input); + + for (int i = 0; i < output_count; ++i) { + TfLiteEvalTensor* t = tflite::micro::GetEvalOutput(context, node, i); + T* output_data = tflite::micro::GetTensorData(t); + for (int k = 0; k < outer_size; ++k) { + T* output_ptr = output_data + copy_size * k; + int loc = k * output_count * copy_size + i * copy_size; + const T* input_ptr = input_data + loc; + for (int j = 0; j < copy_size; ++j) output_ptr[j] = input_ptr[j]; + } + } + + return kTfLiteOk; +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + TfLiteUnpackParams* data = + reinterpret_cast(node->builtin_data); + + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + + switch (input->type) { + case kTfLiteFloat32: { + return UnpackImpl(context, node, input, data->num, data->axis); + } + case kTfLiteInt32: { + return UnpackImpl(context, node, input, data->num, data->axis); + } + case kTfLiteInt8: { + return UnpackImpl(context, node, input, data->num, data->axis); + } + default: { + MicroPrintf("Type '%s' is not supported by unpack.", + TfLiteTypeGetName(input->type)); + return kTfLiteError; + } + } + + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_UNPACK() { + return tflite::micro::RegisterOp(nullptr, nullptr, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/unpack_test.cc b/tensorflow/lite/micro/kernels/unpack_test.cc new file mode 100644 index 0000000..2e16822 --- /dev/null +++ b/tensorflow/lite/micro/kernels/unpack_test.cc @@ -0,0 +1,281 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/debug_log.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { + +void TestUnpackThreeOutputsFloat( + int* input_dims_data, const float* input_data, int axis, + int* output1_dims_data, const float* expected_output1_data, + int* output2_dims_data, const float* expected_output2_data, + int* output3_dims_data, const float* expected_output3_data, + float* output1_data, float* output2_data, float* output3_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output1_dims = IntArrayFromInts(output1_dims_data); + TfLiteIntArray* output2_dims = IntArrayFromInts(output2_dims_data); + TfLiteIntArray* output3_dims = IntArrayFromInts(output3_dims_data); + const int output1_dims_count = ElementCount(*output1_dims); + const int output2_dims_count = ElementCount(*output2_dims); + const int output3_dims_count = ElementCount(*output3_dims); + + constexpr int input_size = 1; + constexpr int output_size = 3; + constexpr int tensors_size = input_size + output_size; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(input_data, input_dims), + CreateTensor(output1_data, output1_dims), + CreateTensor(output2_data, output2_dims), + CreateTensor(output3_data, output3_dims)}; + + // Place a unique value in the uninitialized output buffer. + for (int i = 0; i < output1_dims_count; ++i) { + output1_data[i] = 23; + } + + for (int i = 0; i < output2_dims_count; ++i) { + output2_data[i] = 23; + } + + for (int i = 0; i < output3_dims_count; ++i) { + output3_data[i] = 23; + } + + TfLiteUnpackParams builtin_data = { + .num = 3, + .axis = axis, + }; + + int inputs_array_data[] = {1, 0}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {3, 1, 2, 3}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = tflite::Register_UNPACK(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, + reinterpret_cast(&builtin_data)); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + for (int i = 0; i < output1_dims_count; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(expected_output1_data[i], output1_data[i], 1e-5f); + } + + for (int i = 0; i < output2_dims_count; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(expected_output2_data[i], output2_data[i], 1e-5f); + } + + for (int i = 0; i < output3_dims_count; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(expected_output3_data[i], output3_data[i], 1e-5f); + } +} + +void TestUnpackOneOutputFloat(int* input_dims_data, const float* input_data, + int axis, int* output_dims_data, + const float* expected_output_data, + float* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_dims_count = ElementCount(*output_dims); + + constexpr int input_size = 1; + constexpr int output_size = 1; + constexpr int tensors_size = input_size + output_size; + TfLiteTensor tensors[tensors_size] = {CreateTensor(input_data, input_dims), + CreateTensor(output_data, output_dims)}; + + // Place a unique value in the uninitialized output buffer. + for (int i = 0; i < output_dims_count; ++i) { + output_data[i] = 23; + } + + TfLiteUnpackParams builtin_data = { + .num = 1, + .axis = axis, + }; + + int inputs_array_data[] = {1, 0}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 1}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = tflite::Register_UNPACK(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, + reinterpret_cast(&builtin_data)); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + for (int i = 0; i < output_dims_count; ++i) { + TF_LITE_MICRO_EXPECT_NEAR(expected_output_data[i], output_data[i], 1e-5f); + } +} + +void TestUnpackThreeOutputsQuantized32( + int* input_dims_data, const int32_t* input_data, int axis, + int* output1_dims_data, const int32_t* expected_output1_data, + int* output2_dims_data, const int32_t* expected_output2_data, + int* output3_dims_data, const int32_t* expected_output3_data, + int32_t* output1_data, int32_t* output2_data, int32_t* output3_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output1_dims = IntArrayFromInts(output1_dims_data); + TfLiteIntArray* output2_dims = IntArrayFromInts(output2_dims_data); + TfLiteIntArray* output3_dims = IntArrayFromInts(output3_dims_data); + const int output1_dims_count = ElementCount(*output1_dims); + const int output2_dims_count = ElementCount(*output2_dims); + const int output3_dims_count = ElementCount(*output3_dims); + + constexpr int input_size = 1; + constexpr int output_size = 3; + constexpr int tensors_size = input_size + output_size; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(input_data, input_dims), + CreateTensor(output1_data, output1_dims), + CreateTensor(output2_data, output2_dims), + CreateTensor(output3_data, output3_dims)}; + + // Place a unique value in the uninitialized output buffer. + for (int i = 0; i < output1_dims_count; ++i) { + output1_data[i] = 23; + } + + for (int i = 0; i < output2_dims_count; ++i) { + output2_data[i] = 23; + } + + for (int i = 0; i < output3_dims_count; ++i) { + output3_data[i] = 23; + } + + TfLiteUnpackParams builtin_data = { + .num = 3, + .axis = axis, + }; + + int inputs_array_data[] = {1, 0}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {3, 1, 2, 3}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = tflite::Register_UNPACK(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, + reinterpret_cast(&builtin_data)); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + for (int i = 0; i < output1_dims_count; ++i) { + TF_LITE_MICRO_EXPECT_EQ(expected_output1_data[i], output1_data[i]); + } + + for (int i = 0; i < output2_dims_count; ++i) { + TF_LITE_MICRO_EXPECT_EQ(expected_output2_data[i], output2_data[i]); + } + + for (int i = 0; i < output3_dims_count; ++i) { + TF_LITE_MICRO_EXPECT_EQ(expected_output3_data[i], output3_data[i]); + } +} + +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(UnpackFloatThreeOutputs) { + int input_shape[] = {2, 3, 2}; + const float input_values[] = {1, 2, 3, 4, 5, 6}; + int output1_shape[] = {1, 2}; + const float output1_golden[] = {1, 2}; + int output2_shape[] = {1, 2}; + const float output2_golden[] = {3, 4}; + int output3_shape[] = {1, 2}; + const float output3_golden[] = {5, 6}; + constexpr int output1_dims_count = 2; + constexpr int output2_dims_count = 2; + constexpr int output3_dims_count = 2; + float output1_data[output1_dims_count]; + float output2_data[output2_dims_count]; + float output3_data[output3_dims_count]; + tflite::testing::TestUnpackThreeOutputsFloat( + input_shape, input_values, 0, output1_shape, output1_golden, + output2_shape, output2_golden, output3_shape, output3_golden, + output1_data, output2_data, output3_data); +} + +TF_LITE_MICRO_TEST(UnpackFloatThreeOutputsNegativeAxisTwo) { + int input_shape[] = {2, 3, 2}; + const float input_values[] = {1, 2, 3, 4, 5, 6}; + int output1_shape[] = {1, 2}; + const float output1_golden[] = {1, 2}; + int output2_shape[] = {1, 2}; + const float output2_golden[] = {3, 4}; + int output3_shape[] = {1, 2}; + const float output3_golden[] = {5, 6}; + constexpr int output1_dims_count = 2; + constexpr int output2_dims_count = 2; + constexpr int output3_dims_count = 2; + float output1_data[output1_dims_count]; + float output2_data[output2_dims_count]; + float output3_data[output3_dims_count]; + tflite::testing::TestUnpackThreeOutputsFloat( + input_shape, input_values, -2, output1_shape, output1_golden, + output2_shape, output2_golden, output3_shape, output3_golden, + output1_data, output2_data, output3_data); +} + +TF_LITE_MICRO_TEST(UnpackFloatOneOutput) { + int input_shape[] = {2, 1, 6}; + const float input_values[] = {1, 2, 3, 4, 5, 6}; + int output_shape[] = {1, 6}; + const float golden[] = {1, 2, 3, 4, 5, 6}; + constexpr int output_dims_count = 6; + float output_data[output_dims_count]; + tflite::testing::TestUnpackOneOutputFloat(input_shape, input_values, 0, + output_shape, golden, output_data); +} + +TF_LITE_MICRO_TEST(UnpackQuantized32ThreeOutputs) { + int input_shape[] = {2, 3, 2}; + const int32_t input_values[] = {1, 2, 3, 4, 5, 6}; + int output1_shape[] = {1, 2}; + const int32_t output1_golden[] = {1, 2}; + int output2_shape[] = {1, 2}; + const int32_t output2_golden[] = {3, 4}; + int output3_shape[] = {1, 2}; + const int32_t output3_golden[] = {5, 6}; + constexpr int output1_dims_count = 2; + constexpr int output2_dims_count = 2; + constexpr int output3_dims_count = 2; + int32_t output1_data[output1_dims_count]; + int32_t output2_data[output2_dims_count]; + int32_t output3_data[output3_dims_count]; + tflite::testing::TestUnpackThreeOutputsQuantized32( + input_shape, input_values, 0, output1_shape, output1_golden, + output2_shape, output2_golden, output3_shape, output3_golden, + output1_data, output2_data, output3_data); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/var_handle.cc b/tensorflow/lite/micro/kernels/var_handle.cc new file mode 100644 index 0000000..06087f7 --- /dev/null +++ b/tensorflow/lite/micro/kernels/var_handle.cc @@ -0,0 +1,93 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/memory_helpers.h" +#include "tensorflow/lite/micro/micro_graph.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_resource_variable.h" +#include "tensorflow/lite/schema/schema_generated.h" + +namespace tflite { + +namespace { + +struct OpData { + int32_t resource_id; +}; + +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(OpData)); +} + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + OpData* op_data = reinterpret_cast(node->user_data); + const auto* params = + reinterpret_cast(node->builtin_data); + + tflite::MicroContext* micro_context = tflite::GetMicroContext(context); + MicroGraph& graph_info = micro_context->graph(); + + MicroResourceVariables* resources = graph_info.GetResourceVariables(); + if (resources == nullptr) { + MicroPrintf( + "VAR_HANDLE requires resource variables. Please create " + "ResourceVariables and pass it to the interpreter."); + return kTfLiteError; + } + op_data->resource_id = + resources->CreateIdIfNoneFound(params->container, params->shared_name); + if (op_data->resource_id < 0) { + return kTfLiteError; + } + + TfLiteEvalTensor* output = tflite::micro::GetEvalOutput(context, node, 0); + TFLITE_DCHECK(output != nullptr); + + // Assign saved resource_id so this output tensor will always return the + // correct resource id. + output->data.i32 = &op_data->resource_id; + + return kTfLiteOk; +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + OpData* op_data = reinterpret_cast(node->user_data); + + TfLiteEvalTensor* output = tflite::micro::GetEvalOutput(context, node, 0); + TFLITE_DCHECK(output != nullptr); + + // Assign saved resource_id so this output tensor will always return the + // correct resource id. + output->data.i32 = &op_data->resource_id; + return kTfLiteOk; +} + +} // namespace. + +TFLMRegistration Register_VAR_HANDLE() { + return tflite::micro::RegisterOp(Init, Prepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/while.cc b/tensorflow/lite/micro/kernels/while.cc new file mode 100644 index 0000000..097a342 --- /dev/null +++ b/tensorflow/lite/micro/kernels/while.cc @@ -0,0 +1,133 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/memory_helpers.h" +#include "tensorflow/lite/micro/micro_context.h" +#include "tensorflow/lite/micro/micro_graph.h" +#include "tensorflow/lite/micro/micro_utils.h" + +namespace tflite { + +namespace { + +struct OpData { + int cond_subgraph_index; + int body_subgraph_index; +}; + +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(OpData)); +} + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + OpData* op_data = reinterpret_cast(node->user_data); + const auto* params = + reinterpret_cast(node->builtin_data); + + op_data->cond_subgraph_index = params->cond_subgraph_index; + op_data->body_subgraph_index = params->body_subgraph_index; + + // The first input is the condition. + tflite::MicroContext* micro_context = tflite::GetMicroContext(context); + + size_t num_inputs = node->inputs->size; + size_t num_outputs = node->outputs->size; + + MicroGraph& graph_info = micro_context->graph(); + + TF_LITE_ENSURE(context, + op_data->cond_subgraph_index < graph_info.NumSubgraphs()); + TF_LITE_ENSURE(context, + op_data->body_subgraph_index < graph_info.NumSubgraphs()); + + TF_LITE_ENSURE_EQ(context, num_inputs, + graph_info.NumSubgraphInputs(op_data->cond_subgraph_index)); + TF_LITE_ENSURE_EQ(context, num_inputs, + graph_info.NumSubgraphInputs(op_data->body_subgraph_index)); + TF_LITE_ENSURE_EQ(context, num_inputs, num_outputs); + TF_LITE_ENSURE_EQ( + context, num_outputs, + graph_info.NumSubgraphOutputs(op_data->body_subgraph_index)); + + return kTfLiteOk; +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + const OpData* op_data = reinterpret_cast(node->user_data); + + tflite::MicroContext* micro_context = tflite::GetMicroContext(context); + MicroGraph* graph_info = µ_context->graph(); + + TF_LITE_ENSURE_OK(context, + tflite::micro::CopyOpInputsToSubgraphInputs( + context, node, graph_info, op_data->cond_subgraph_index, + /*first_tensor_idx=*/0)); + + TF_LITE_ENSURE_OK(context, + graph_info->InvokeSubgraph(op_data->cond_subgraph_index)); + + TfLiteEvalTensor* cond_subgraph_output = graph_info->GetSubgraphOutput( + op_data->cond_subgraph_index, /*tensor_idx=*/0); + bool cond_value = cond_subgraph_output->data.b[0]; + + TF_LITE_ENSURE_OK(context, + tflite::micro::CopyOpInputsToSubgraphInputs( + context, node, graph_info, op_data->body_subgraph_index, + /*first_tensor_idx=*/0)); + TF_LITE_ENSURE_OK(context, + tflite::micro::CopyOpInputsToOpOutputs(context, node)); + + while (cond_value == true) { + // Copy output of this iteration back to the body input. + TF_LITE_ENSURE_OK( + context, tflite::micro::CopyOpOutputsToSubgraphInputs( + context, node, graph_info, op_data->body_subgraph_index)); + TF_LITE_ENSURE_OK(context, + graph_info->InvokeSubgraph(op_data->body_subgraph_index)); + + TF_LITE_ENSURE_OK( + context, tflite::micro::CopySubgraphOutputsToOpOutputs( + context, node, graph_info, op_data->body_subgraph_index)); + TF_LITE_ENSURE_OK( + context, tflite::micro::CopyOpOutputsToSubgraphInputs( + context, node, graph_info, op_data->cond_subgraph_index)); + TF_LITE_ENSURE_OK(context, + graph_info->InvokeSubgraph(op_data->cond_subgraph_index)); + + cond_subgraph_output = graph_info->GetSubgraphOutput( + op_data->cond_subgraph_index, /*tensor_idx=*/0); + cond_value = cond_subgraph_output->data.b[0]; + } + + return kTfLiteOk; +} + +} // namespace. + +TFLMRegistration Register_WHILE() { + return tflite::micro::RegisterOp(Init, Prepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/while_test.cc b/tensorflow/lite/micro/kernels/while_test.cc new file mode 100644 index 0000000..536f903 --- /dev/null +++ b/tensorflow/lite/micro/kernels/while_test.cc @@ -0,0 +1,78 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/micro_arena_constants.h" +#include "tensorflow/lite/micro/micro_interpreter.h" +#include "tensorflow/lite/micro/mock_micro_graph.h" +#include "tensorflow/lite/micro/test_helper_custom_ops.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(WhileShouldNeverInvokeConditionFalse) { + constexpr int kArenaSize = 5000; + uint8_t arena[kArenaSize]; + + const tflite::Model* model = + tflite::testing::GetSimpleModelWithSubgraphsAndWhile(); + tflite::MicroMutableOpResolver<3> resolver; + resolver.AddWhile(); + resolver.AddAdd(); + resolver.AddLess(); + tflite::MicroInterpreter interpreter(model, resolver, arena, kArenaSize); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, interpreter.AllocateTensors()); + TfLiteTensor* input0 = interpreter.input(0); + TfLiteTensor* input1 = interpreter.input(1); + TfLiteTensor* output0 = interpreter.output(0); + TfLiteTensor* output1 = interpreter.output(1); + input0->data.f[0] = 3.0f; + input1->data.f[0] = 2.0f; + + interpreter.Invoke(); + + TF_LITE_MICRO_EXPECT_EQ(output0->data.f[0], 3.0f); + TF_LITE_MICRO_EXPECT_EQ(output1->data.f[0], 2.0f); +} + +TF_LITE_MICRO_TEST(WhileShouldInvokeOnce) { + constexpr int kArenaSize = 5000; + uint8_t arena[kArenaSize]; + + const tflite::Model* model = + tflite::testing::GetSimpleModelWithSubgraphsAndWhile(); + tflite::MicroMutableOpResolver<3> resolver; + resolver.AddWhile(); + resolver.AddAdd(); + resolver.AddLess(); + tflite::MicroInterpreter interpreter(model, resolver, arena, kArenaSize); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, interpreter.AllocateTensors()); + TfLiteTensor* input0 = interpreter.input(0); + TfLiteTensor* input1 = interpreter.input(1); + TfLiteTensor* output0 = interpreter.output(0); + TfLiteTensor* output1 = interpreter.output(1); + input0->data.f[0] = 2.0f; + input1->data.f[0] = 3.0f; + + interpreter.Invoke(); + + TF_LITE_MICRO_EXPECT_EQ(output0->data.f[0], 5.0f); + TF_LITE_MICRO_EXPECT_EQ(output1->data.f[0], 3.0f); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/xtensa/add.cc b/tensorflow/lite/micro/kernels/xtensa/add.cc new file mode 100644 index 0000000..4e4f805 --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/add.cc @@ -0,0 +1,275 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/kernels/internal/reference/add.h" + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/add.h" +#include "tensorflow/lite/kernels/internal/reference/process_broadcast_shapes.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/add.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa_add.h" +#include "tensorflow/lite/micro/memory_helpers.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + TF_LITE_ENSURE_OK(context, AddPrepare(context, node)); + +#if defined(VISION_P6) + TF_LITE_ENSURE_OK(context, AddPrepareVision(context, node)); +#endif // VISION_P6 + return kTfLiteOk; +} + +TfLiteStatus EvalAdd(TfLiteContext* context, TfLiteNode* node, + TfLiteAddParams* params, const OpDataAdd* data, + const TfLiteEvalTensor* input1, + const TfLiteEvalTensor* input2, TfLiteEvalTensor* output) { + switch (output->type) { + case kTfLiteFloat32: { + tflite::ArithmeticParams op_params; + SetActivationParams(data->output_activation_min_f32, + data->output_activation_max_f32, &op_params); + if (data->requires_broadcast) { + reference_ops::BroadcastAdd4DSlow( + op_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } else { + reference_ops::Add(op_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } + } break; + case kTfLiteInt32: { + tflite::ArithmeticParams op_params; + SetActivationParams(std::numeric_limits::lowest(), + std::numeric_limits::max(), &op_params); + if (data->requires_broadcast) { + reference_ops::BroadcastAdd4DSlow( + op_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } else { + reference_ops::Add(op_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } + } break; + default: + MicroPrintf("Type %s (%d) not supported.", + TfLiteTypeGetName(output->type), output->type); + return kTfLiteError; + } + return kTfLiteOk; +} + +TfLiteStatus EvalAddQuantized(TfLiteContext* context, TfLiteNode* node, + TfLiteAddParams* params, const OpDataAdd* data, + const TfLiteEvalTensor* input1, + const TfLiteEvalTensor* input2, + TfLiteEvalTensor* output) { + tflite::ArithmeticParams op_params; + op_params.left_shift = data->left_shift; + op_params.input1_offset = data->input1_offset; + op_params.input1_multiplier = data->input1_multiplier; + op_params.input1_shift = data->input1_shift; + op_params.input2_offset = data->input2_offset; + op_params.input2_multiplier = data->input2_multiplier; + op_params.input2_shift = data->input2_shift; + op_params.output_offset = data->output_offset; + op_params.output_multiplier = data->output_multiplier; + op_params.output_shift = data->output_shift; + SetActivationParams(data->output_activation_min, data->output_activation_max, + &op_params); +#if !(defined(HIFI4)) + bool need_broadcast = reference_ops::ProcessBroadcastShapes( + tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorShape(input2), &op_params); +#endif // !defined(HIFI4) + + switch (output->type) { + case kTfLiteInt8: { +#if defined(VISION_P6) + const auto& op_data = + *(reinterpret_cast(node->user_data)); + AddEvalQuantizedVision(context, node, *params, op_data, input1, input2, + output); +#elif defined(HIFI4) // defined(VISION_P6) + int err; + const RuntimeShape extended_input1_shape = + RuntimeShape::ExtendedShape(4, tflite::micro::GetTensorShape(input1)); + const RuntimeShape extended_input2_shape = + RuntimeShape::ExtendedShape(4, tflite::micro::GetTensorShape(input2)); + const RuntimeShape extended_output_shape = + RuntimeShape::ExtendedShape(4, tflite::micro::GetTensorShape(output)); + + err = xa_nn_elm_add_broadcast_4D_asym8sxasym8s_asym8s( + tflite::micro::GetTensorData(output), + extended_output_shape.DimsData(), op_params.output_offset, + op_params.output_shift, op_params.output_multiplier, + op_params.quantized_activation_min, + op_params.quantized_activation_max, + tflite::micro::GetTensorData(input1), + extended_input1_shape.DimsData(), op_params.input1_offset, + op_params.input1_shift, op_params.input1_multiplier, + tflite::micro::GetTensorData(input2), + extended_input2_shape.DimsData(), op_params.input2_offset, + op_params.input2_shift, op_params.input2_multiplier, + op_params.left_shift); + + TF_LITE_ENSURE(context, err == 0); +#else // defined(VISION_P6) + if (need_broadcast) { + reference_integer_ops::BroadcastAdd4DSlow( + op_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } else { + reference_integer_ops::Add( + op_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } +#endif // defined(VISION_P6) + break; + } + case kTfLiteInt16: { +#if defined(HIFI4) + int err; + const RuntimeShape extended_input1_shape = + RuntimeShape::ExtendedShape(4, tflite::micro::GetTensorShape(input1)); + const RuntimeShape extended_input2_shape = + RuntimeShape::ExtendedShape(4, tflite::micro::GetTensorShape(input2)); + const RuntimeShape extended_output_shape = + RuntimeShape::ExtendedShape(4, tflite::micro::GetTensorShape(output)); + + err = xa_nn_elm_add_broadcast_4D_asym16sxasym16s_asym16s( + tflite::micro::GetTensorData(output), + extended_output_shape.DimsData(), op_params.output_offset, + op_params.output_shift, op_params.output_multiplier, + op_params.quantized_activation_min, + op_params.quantized_activation_max, + tflite::micro::GetTensorData(input1), + extended_input1_shape.DimsData(), op_params.input1_offset, + op_params.input1_shift, op_params.input1_multiplier, + tflite::micro::GetTensorData(input2), + extended_input2_shape.DimsData(), op_params.input2_offset, + op_params.input2_shift, op_params.input2_multiplier, + op_params.left_shift); + + TF_LITE_ENSURE(context, err == 0); +#else // defined(HIFI4) + if (need_broadcast) { + reference_ops::BroadcastAdd4DSlow( + op_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } else { + reference_ops::Add(op_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output), + false); + } +#endif // defined(HIFI4) + break; + } + default: + MicroPrintf("Type %s (%d) not supported.", + TfLiteTypeGetName(output->type), output->type); + return kTfLiteError; + } + + return kTfLiteOk; +} + +void* AddInit(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + void* data; +#if defined(VISION_P6) + data = context->AllocatePersistentBuffer(context, sizeof(XtensaAddOpData)); + if (InitXtensaContext()) { + return nullptr; + } +#else + data = context->AllocatePersistentBuffer(context, sizeof(OpDataAdd)); +#endif // defined(VISION_P6) + return data; +} + +TfLiteStatus AddEval(TfLiteContext* context, TfLiteNode* node) { + auto* params = reinterpret_cast(node->builtin_data); + + TFLITE_DCHECK(node->user_data != nullptr); + const OpDataAdd* data = static_cast(node->user_data); + + const TfLiteEvalTensor* input1 = + tflite::micro::GetEvalInput(context, node, kAddInputTensor1); + const TfLiteEvalTensor* input2 = + tflite::micro::GetEvalInput(context, node, kAddInputTensor2); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kAddOutputTensor); + + if (output->type == kTfLiteFloat32 || output->type == kTfLiteInt32) { + TF_LITE_ENSURE_OK( + context, EvalAdd(context, node, params, data, input1, input2, output)); + } else if (output->type == kTfLiteInt8 || output->type == kTfLiteInt16) { + TF_LITE_ENSURE_OK(context, EvalAddQuantized(context, node, params, data, + input1, input2, output)); + } else { + MicroPrintf("Type %s (%d) not supported.", TfLiteTypeGetName(output->type), + output->type); + return kTfLiteError; + } + + return kTfLiteOk; +} + +TFLMRegistration Register_ADD() { + return tflite::micro::RegisterOp(AddInit, Prepare, AddEval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/xtensa/add_vision.cc b/tensorflow/lite/micro/kernels/xtensa/add_vision.cc new file mode 100644 index 0000000..4e20713 --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/add_vision.cc @@ -0,0 +1,121 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#if defined(VISION_P6) + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/add.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa_add.h" + +namespace tflite { + +TfLiteStatus AddPrepareVision(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + TFLITE_DCHECK(node->builtin_data != nullptr); + + XtensaAddOpData* data = reinterpret_cast(node->user_data); + + MicroContext* micro_context = GetMicroContext(context); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kAddOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + TfLiteTensor* input1 = + micro_context->AllocateTempInputTensor(node, kAddInputTensor1); + TF_LITE_ENSURE(context, input1 != nullptr); + TfLiteTensor* input2 = + micro_context->AllocateTempInputTensor(node, kAddInputTensor2); + TF_LITE_ENSURE(context, input2 != nullptr); + + uint32_t context_size = 0; + uint32_t status = xiAddGetMemReqd_Context(&context_size); + TFLITE_DCHECK(status == 0); + if (context_size) { + void* context_data = + context->AllocatePersistentBuffer(context, context_size); + if (context_data == nullptr) { + return kTfLiteError; + } + data->p_context = reinterpret_cast(context_data); + data->context_size = context_size; + } + + uint32_t input1_dims[4] = {1, 1, 1, 1}; + uint32_t input2_dims[4] = {1, 1, 1, 1}; + uint32_t output_dims[4] = {1, 1, 1, 1}; + for (int i = 0; i < NumDimensions(input1); i++) { + input1_dims[i] = + std::max(1, SizeOfDimension(input1, NumDimensions(input1) - 1 - i)); + } + for (int i = 0; i < NumDimensions(input2); i++) { + input2_dims[i] = + std::max(1, SizeOfDimension(input2, NumDimensions(input2) - 1 - i)); + } + for (int i = 0; i < NumDimensions(output); i++) { + output_dims[i] = + std::max(1, SizeOfDimension(output, NumDimensions(output) - 1 - i)); + } + + status = xiAddSetContext( + data->p_context, data->context_size, input1_dims[0], input1_dims[1], + input1_dims[2], input1_dims[3], input2_dims[0], input2_dims[1], + input2_dims[2], input2_dims[3], output_dims[0], output_dims[1], + output_dims[2], output_dims[3], input1->params.zero_point, + input2->params.zero_point, output->params.zero_point, + data->reference_op_data.input1_multiplier, + data->reference_op_data.input2_multiplier, + data->reference_op_data.output_multiplier, + data->reference_op_data.input1_shift, + data->reference_op_data.input2_shift, + data->reference_op_data.output_shift, + data->reference_op_data.output_activation_min, + data->reference_op_data.output_activation_max); + if (status) { + return kTfLiteError; + } + + micro_context->DeallocateTempTfLiteTensor(output); + micro_context->DeallocateTempTfLiteTensor(input1); + micro_context->DeallocateTempTfLiteTensor(input2); + + return kTfLiteOk; +} + +TfLiteStatus AddEvalQuantizedVision(TfLiteContext* context, TfLiteNode* node, + const TfLiteAddParams& params, + const XtensaAddOpData& data, + const TfLiteEvalTensor* input1, + const TfLiteEvalTensor* input2, + TfLiteEvalTensor* output) { + const uint32_t input1_size = NumElements(input1->dims); + const uint32_t input2_size = NumElements(input2->dims); + const uint32_t output_size = NumElements(output->dims); + + xiAdd(data.p_context, data.context_size, + const_cast(tflite::micro::GetTensorData(input1)), + input1_size, + const_cast(tflite::micro::GetTensorData(input2)), + input2_size, tflite::micro::GetTensorData(output), output_size); + return kTfLiteOk; +} +} // namespace tflite +#endif // defined(VISION_P6) diff --git a/tensorflow/lite/micro/kernels/xtensa/conv.cc b/tensorflow/lite/micro/kernels/xtensa/conv.cc new file mode 100644 index 0000000..59e576c --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/conv.cc @@ -0,0 +1,140 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/kernels/conv.h" + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/conv.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/conv.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/padding.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa_conv.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { +namespace { + +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + void* data = + context->AllocatePersistentBuffer(context, sizeof(XtensaConvOpData)); +#if defined(VISION_P6) + if (InitXtensaContext()) { + return nullptr; + } +#endif // defined(VISION_P6) + + return data; +} + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + TF_LITE_ENSURE_OK(context, ConvPrepare(context, node)); + +#if defined(HIFI4) || defined(HIFI5) + TF_LITE_ENSURE_OK(context, ConvPrepareHifi(context, node)); +#endif +#if defined(VISION_P6) + TF_LITE_ENSURE_OK(context, ConvPrepareVision(context, node)); +#endif // VISION_P6 + return kTfLiteOk; +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + TFLITE_DCHECK(node->builtin_data != nullptr); + + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kConvInputTensor); + + const auto& params = + *(reinterpret_cast(node->builtin_data)); + const auto& op_data = *(reinterpret_cast(node->user_data)); + + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kConvOutputTensor); + const TfLiteEvalTensor* filter = + tflite::micro::GetEvalInput(context, node, kConvWeightsTensor); + const TfLiteEvalTensor* bias = + (NumInputs(node) == 3) + ? tflite::micro::GetEvalInput(context, node, kConvBiasTensor) + : nullptr; + + TfLiteEvalTensor filter_int8 = tflite::micro::MakeUnpackedInt4Tensor( + context, op_data.reference_op_data.filter_buffer_index, filter); + + switch (input->type) { + case kTfLiteInt8: { + switch (filter_int8.type) { + case kTfLiteInt8: { +#if defined(HIFI4) || defined(HIFI5) + ConvEvalHifi(context, node, params, op_data, input, &filter_int8, + bias, output); +#elif defined(VISION_P6) + return ConvEvalVision(context, node, params, op_data, input, + &filter_int8, bias, output); +#else + reference_integer_ops::ConvPerChannel( + ConvParamsQuantized(params, op_data.reference_op_data), + op_data.reference_op_data.per_channel_output_multiplier, + op_data.reference_op_data.per_channel_output_shift, + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(&filter_int8), + tflite::micro::GetTensorShape(bias), + tflite::micro::GetOptionalTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + return kTfLiteOk; +#endif + break; + } + + default: + MicroPrintf("Filter type %s (%d) not supported.", + TfLiteTypeGetName(filter->type), filter->type); + return kTfLiteError; + } + return kTfLiteOk; + } + case kTfLiteInt16: { +#if defined(HIFI4) + ConvEvalHifi16(context, node, params, op_data, input, filter, bias, + output); +#else + return ConvReferenceEvalInt16(context, node); +#endif // defined(HIFI4) + break; + } + default: + MicroPrintf("Type %s (%d) not supported.", TfLiteTypeGetName(input->type), + input->type); + return kTfLiteError; + } + return kTfLiteOk; +} +} // namespace + +TFLMRegistration Register_CONV_2D() { + return tflite::micro::RegisterOp(Init, Prepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/xtensa/conv_hifi.cc b/tensorflow/lite/micro/kernels/xtensa/conv_hifi.cc new file mode 100644 index 0000000..487c84a --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/conv_hifi.cc @@ -0,0 +1,327 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#if defined(HIFI4) || defined(HIFI5) + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/conv.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/conv.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa_conv.h" + +namespace tflite { + +TfLiteStatus ConvPrepareHifi(TfLiteContext* context, TfLiteNode* node) { + XtensaConvOpData* data = static_cast(node->user_data); + const auto params = static_cast(node->builtin_data); + + MicroContext* micro_context = GetMicroContext(context); + + // Calculate scratch memory requirements and request scratch buffer + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kConvOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kConvInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* filter = + micro_context->AllocateTempInputTensor(node, kConvWeightsTensor); + TF_LITE_ENSURE(context, filter != nullptr); + + const RuntimeShape& input_shape = GetTensorShape(input); + const RuntimeShape& filter_shape = GetTensorShape(filter); + const RuntimeShape& output_shape = GetTensorShape(output); + const int input_height = input_shape.Dims(1); + const int input_depth = MatchingDim(input_shape, 3, filter_shape, 3); + const int filter_height = filter_shape.Dims(1); + const int filter_width = filter_shape.Dims(2); + const int output_height = output_shape.Dims(1); + const int output_channels = output_shape.Dims(3); + const int stride_height = params->stride_height; + const int pad_height = data->reference_op_data.padding.height; + + int required_scratch = 0; + // Dilation is currently not supported on HiFi 4 NN Library + if ((params->dilation_width_factor == 1) && + (params->dilation_height_factor == 1)) { + if (input->type == kTfLiteInt8) { + required_scratch = xa_nn_conv2d_std_getsize( + input_height, input_depth, filter_height, filter_width, stride_height, + pad_height, output_height, output_channels, PREC_ASYM8S); + TF_LITE_ENSURE(context, required_scratch > 0); + } + if (input->type == kTfLiteInt16) { + required_scratch = xa_nn_conv2d_std_getsize( + input_height, input_depth, filter_height, filter_width, stride_height, + pad_height, output_height, output_channels, PREC_SYM16S); + TF_LITE_ENSURE(context, required_scratch > 0); + } + } + TF_LITE_ENSURE_OK( + context, context->RequestScratchBufferInArena( + context, required_scratch, &data->scratch_tensor_index)); + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(filter); + micro_context->DeallocateTempTfLiteTensor(output); + return kTfLiteOk; +} + +#if defined(HIFI4) +TfLiteStatus ConvEvalHifi16(TfLiteContext* context, TfLiteNode* node, + const TfLiteConvParams& params, + const XtensaConvOpData& data, + const TfLiteEvalTensor* input, + const TfLiteEvalTensor* filter, + const TfLiteEvalTensor* bias, + TfLiteEvalTensor* output) { + const RuntimeShape& input_shape = tflite::micro::GetTensorShape(input); + const RuntimeShape& filter_shape = tflite::micro::GetTensorShape(filter); + /* TODO(b/277112516):Dilation is currently not supported on HiFi 4 NN Library + */ + if ((params.dilation_width_factor == 1) && + (params.dilation_height_factor == 1) && + input_shape.Dims(1) >= filter_shape.Dims(1) && + input_shape.Dims(2) >= filter_shape.Dims(2)) { + const int stride_width = params.stride_width; + const int stride_height = params.stride_height; + const int pad_width = data.reference_op_data.padding.width; + const int pad_height = data.reference_op_data.padding.height; + const int32_t output_activation_min = + data.reference_op_data.output_activation_min; + const int32_t output_activation_max = + data.reference_op_data.output_activation_max; + + const RuntimeShape& output_shape = tflite::micro::GetTensorShape(output); + const int batches = MatchingDim(input_shape, 0, output_shape, 0); + const int input_depth = MatchingDim(input_shape, 3, filter_shape, 3); + const int output_depth = MatchingDim(filter_shape, 0, output_shape, 3); + const int input_height = input_shape.Dims(1); + const int input_width = input_shape.Dims(2); + const int filter_height = filter_shape.Dims(1); + const int filter_width = filter_shape.Dims(2); + const int output_height = output_shape.Dims(1); + const int output_width = output_shape.Dims(2); + + const int16_t* input_data = tflite::micro::GetTensorData(input); + const int8_t* filter_data = tflite::micro::GetTensorData(filter); + const int64_t* bias_data = tflite::micro::GetTensorData(bias); + int16_t* output_data = tflite::micro::GetTensorData(output); + + int output_data_format = 0; + int out_length = output_height * output_width * output_depth; + if (filter_height == 1 && filter_width == 1) { + for (int batch = 0; batch < batches; ++batch) { + int16_t* p_out_temp; + p_out_temp = &output_data[batch * out_length]; + + TF_LITE_ENSURE_EQ( + context, + xa_nn_conv2d_pointwise_per_chan_sym8sxsym16s( + p_out_temp, const_cast(filter_data), + const_cast(&input_data[batch * input_height * + input_width * input_depth]), + const_cast(bias_data), input_height, input_width, + input_depth, output_depth, 0, + data.reference_op_data.per_channel_output_multiplier, + data.reference_op_data.per_channel_output_shift, 0, + output_data_format), + 0); + + TF_LITE_ENSURE_EQ(context, + xa_nn_vec_activation_min_max_16_16( + p_out_temp, p_out_temp, output_activation_min, + output_activation_max, out_length), + 0); + } + } else { + void* p_scratch = static_cast( + context->GetScratchBuffer(context, data.scratch_tensor_index)); + + for (int batch = 0; batch < batches; ++batch) { + int16_t* p_out_temp; + p_out_temp = &output_data[batch * out_length]; + + { + TF_LITE_ENSURE_EQ( + context, + xa_nn_conv2d_std_per_chan_sym8sxsym16s( + p_out_temp, + &input_data[batch * input_height * input_width * input_depth], + const_cast(filter_data), // filter_data, + bias_data, input_height, input_width, input_depth, + filter_height, filter_width, output_depth, stride_width, + stride_height, pad_width, pad_height, output_height, + output_width, 0, + data.reference_op_data.per_channel_output_multiplier, + data.reference_op_data.per_channel_output_shift, 0, + output_data_format, static_cast(p_scratch)), + 0); + } + TF_LITE_ENSURE_EQ(context, + xa_nn_vec_activation_min_max_16_16( + p_out_temp, p_out_temp, output_activation_min, + output_activation_max, out_length), + 0); + } + } + return kTfLiteOk; + } + reference_integer_ops::ConvPerChannel( + ConvParamsQuantized(params, data.reference_op_data), + data.reference_op_data.per_channel_output_multiplier, + data.reference_op_data.per_channel_output_shift, + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), + tflite::micro::GetTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + return kTfLiteOk; +} +#endif // defined(HIFI4) + +TfLiteStatus ConvEvalHifi(TfLiteContext* context, TfLiteNode* node, + const TfLiteConvParams& params, + const XtensaConvOpData& data, + const TfLiteEvalTensor* input, + const TfLiteEvalTensor* filter, + const TfLiteEvalTensor* bias, + TfLiteEvalTensor* output) { + const RuntimeShape& input_shape = tflite::micro::GetTensorShape(input); + const RuntimeShape& filter_shape = tflite::micro::GetTensorShape(filter); + /* TODO(b/277112516):Dilation is currently not supported on HiFi 4 NN + Library */ + if ((params.dilation_width_factor == 1) && + (params.dilation_height_factor == 1) && + input_shape.Dims(1) >= filter_shape.Dims(1) && + input_shape.Dims(2) >= filter_shape.Dims(2)) { + const int32_t input_offset = -data.reference_op_data.input_zero_point; + const int32_t output_offset = data.reference_op_data.output_zero_point; + const int stride_width = params.stride_width; + const int stride_height = params.stride_height; + const int pad_width = data.reference_op_data.padding.width; + const int pad_height = data.reference_op_data.padding.height; + const int32_t output_activation_min = + data.reference_op_data.output_activation_min; + const int32_t output_activation_max = + data.reference_op_data.output_activation_max; + + const RuntimeShape& output_shape = tflite::micro::GetTensorShape(output); + const int batches = MatchingDim(input_shape, 0, output_shape, 0); + const int input_depth = MatchingDim(input_shape, 3, filter_shape, 3); + const int output_depth = MatchingDim(filter_shape, 0, output_shape, 3); + const int input_height = input_shape.Dims(1); + const int input_width = input_shape.Dims(2); + const int filter_height = filter_shape.Dims(1); + const int filter_width = filter_shape.Dims(2); + const int output_height = output_shape.Dims(1); + const int output_width = output_shape.Dims(2); + + const int8_t* input_data = tflite::micro::GetTensorData(input); + const int8_t* filter_data = tflite::micro::GetTensorData(filter); + const int32_t* bias_data = tflite::micro::GetTensorData(bias); + int8_t* output_data = tflite::micro::GetTensorData(output); + + int output_data_format = 0; + int out_length = output_height * output_width * output_depth; + + if (filter_height == 1 && filter_width == 1) { + for (int batch = 0; batch < batches; ++batch) { + int8_t* p_out_temp; + p_out_temp = &output_data[batch * out_length]; + + TF_LITE_ENSURE_EQ( + context, + + xa_nn_conv2d_pointwise_per_chan_sym8sxasym8s( + p_out_temp, const_cast(filter_data), + const_cast(&input_data[batch * input_height * + input_width * input_depth]), + const_cast(bias_data), input_height, input_width, + input_depth, output_depth, input_offset, + data.reference_op_data.per_channel_output_multiplier, + data.reference_op_data.per_channel_output_shift, output_offset, + output_data_format), + 0); + + TF_LITE_ENSURE_EQ(context, + xa_nn_vec_activation_min_max_8_8( + p_out_temp, p_out_temp, output_activation_min, + output_activation_max, out_length), + 0); + } + } else { + void* p_scratch = static_cast( + context->GetScratchBuffer(context, data.scratch_tensor_index)); + + for (int batch = 0; batch < batches; ++batch) { + int8_t* p_out_temp; + p_out_temp = &output_data[batch * out_length]; + + { + TF_LITE_ENSURE_EQ( + context, + xa_nn_conv2d_std_per_chan_sym8sxasym8s( + p_out_temp, + &input_data[batch * input_height * input_width * input_depth], + const_cast(filter_data), // filter_data, + bias_data, input_height, input_width, input_depth, + filter_height, filter_width, output_depth, stride_width, + stride_height, pad_width, pad_height, output_height, + output_width, input_offset, + data.reference_op_data.per_channel_output_multiplier, + data.reference_op_data.per_channel_output_shift, + output_offset, output_data_format, + static_cast(p_scratch)), + 0); + } + + TF_LITE_ENSURE_EQ(context, + xa_nn_vec_activation_min_max_8_8( + p_out_temp, p_out_temp, output_activation_min, + output_activation_max, out_length), + 0); + } + } + return kTfLiteOk; + } + + reference_integer_ops::ConvPerChannel( + ConvParamsQuantized(params, data.reference_op_data), + data.reference_op_data.per_channel_output_multiplier, + data.reference_op_data.per_channel_output_shift, + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), + tflite::micro::GetTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + return kTfLiteOk; +} + +} // namespace tflite +#endif // defined(HIFI4) || defined(HIFI5) diff --git a/tensorflow/lite/micro/kernels/xtensa/conv_int16_reference.cc b/tensorflow/lite/micro/kernels/xtensa/conv_int16_reference.cc new file mode 100644 index 0000000..0d3c4a3 --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/conv_int16_reference.cc @@ -0,0 +1,76 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/conv.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/padding.h" +#include "tensorflow/lite/micro/kernels/conv.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" + +namespace tflite { +namespace { + +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(OpDataConv)); +} + +} // namespace. + +TfLiteStatus ConvReferenceEvalInt16(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + TFLITE_DCHECK(node->builtin_data != nullptr); + const auto& params = + *(reinterpret_cast(node->builtin_data)); + const auto& op_data = *(reinterpret_cast(node->user_data)); + + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kConvOutputTensor); + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kConvInputTensor); + const TfLiteEvalTensor* filter = + tflite::micro::GetEvalInput(context, node, kConvWeightsTensor); + const TfLiteEvalTensor* bias = + (NumInputs(node) == 3) + ? tflite::micro::GetEvalInput(context, node, kConvBiasTensor) + : nullptr; + + reference_integer_ops::ConvPerChannel( + ConvParamsQuantized(params, op_data), + op_data.per_channel_output_multiplier, op_data.per_channel_output_shift, + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), + tflite::micro::GetTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + return kTfLiteOk; +} + +// TODO(b/189981943): This variant can be used for a smaller binary +// since the optimized conv implementation currently adds a lot to +// the binary size (~30KB to text section). +TFLMRegistration Register_CONV_2D_INT16REF() { + return tflite::micro::RegisterOp(Init, ConvPrepare, ConvReferenceEvalInt16); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/xtensa/conv_int8_reference.cc b/tensorflow/lite/micro/kernels/xtensa/conv_int8_reference.cc new file mode 100644 index 0000000..80a42d9 --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/conv_int8_reference.cc @@ -0,0 +1,76 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/conv.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/padding.h" +#include "tensorflow/lite/micro/kernels/conv.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" + +namespace tflite { +namespace { + +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(OpDataConv)); +} + +} // namespace. + +TfLiteStatus ConvReferenceEvalInt8(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + TFLITE_DCHECK(node->builtin_data != nullptr); + const auto& params = + *(reinterpret_cast(node->builtin_data)); + const auto& op_data = *(reinterpret_cast(node->user_data)); + + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kConvOutputTensor); + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kConvInputTensor); + const TfLiteEvalTensor* filter = + tflite::micro::GetEvalInput(context, node, kConvWeightsTensor); + const TfLiteEvalTensor* bias = + (NumInputs(node) == 3) + ? tflite::micro::GetEvalInput(context, node, kConvBiasTensor) + : nullptr; + + reference_integer_ops::ConvPerChannel( + ConvParamsQuantized(params, op_data), + op_data.per_channel_output_multiplier, op_data.per_channel_output_shift, + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), + tflite::micro::GetTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + return kTfLiteOk; +} + +// TODO(b/189981943): This variant can be used for a smaller binary +// since the optimized conv implementation currently adds a lot to +// the binary size (~30KB to text section). +TFLMRegistration Register_CONV_2D_INT8REF() { + return tflite::micro::RegisterOp(Init, ConvPrepare, ConvReferenceEvalInt8); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/xtensa/conv_vision.cc b/tensorflow/lite/micro/kernels/xtensa/conv_vision.cc new file mode 100644 index 0000000..e4f0d49 --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/conv_vision.cc @@ -0,0 +1,176 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#if defined(VISION_P6) + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/portable_tensor_utils.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/conv.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa_conv.h" +#include "tensorflow/lite/micro/micro_arena_constants.h" + +namespace tflite { + +TfLiteStatus ConvPrepareVision(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + TFLITE_DCHECK(node->builtin_data != nullptr); + + XtensaConvOpData* data = reinterpret_cast(node->user_data); + const auto& params = + *(reinterpret_cast(node->builtin_data)); + + MicroContext* micro_context = GetMicroContext(context); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kConvOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kConvInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* filter = + micro_context->AllocateTempInputTensor(node, kConvWeightsTensor); + TF_LITE_ENSURE(context, filter != nullptr); + TfLiteTensor* bias = + micro_context->AllocateTempInputTensor(node, kConvBiasTensor); + TF_LITE_ENSURE(context, bias != nullptr); + + const uint32_t input_height = SizeOfDimension(input, 1); + const uint32_t input_width = SizeOfDimension(input, 2); + + const uint32_t output_height = SizeOfDimension(output, 1); + const uint32_t output_width = SizeOfDimension(output, 2); + + const uint32_t filter_height = SizeOfDimension(filter, 1); + const uint32_t filter_width = SizeOfDimension(filter, 2); + + // Dynamically allocate per-channel quantization parameters. + const int num_channels = SizeOfDimension(filter, kConvQuantizedDimension); + data->per_channel_output_shift_int8 = static_cast( + context->AllocatePersistentBuffer(context, num_channels)); + + for (int i = 0; i < num_channels; i++) { + data->per_channel_output_shift_int8[i] = static_cast( + -1 * data->reference_op_data.per_channel_output_shift[i]); + } + + uint32_t context_size = 0; + uint32_t status = xiConvGetMemReqd_Context(&context_size); + if (!status && context_size) { + void* context_data = + context->AllocatePersistentBuffer(context, context_size); + if (context_data == nullptr) { + return kTfLiteError; + } + data->p_context = reinterpret_cast(context_data); + data->context_size = context_size; + } + + const uint32_t input_depth = SizeOfDimension(input, 3); + const uint32_t output_depth = SizeOfDimension(output, 3); + TfLiteTensor filter_int8; + + if (filter->type == kTfLiteInt4) { + const size_t bytes_unpacked = filter->bytes * 2; + filter_int8.data.data = micro_context->AllocateTempBuffer( + bytes_unpacked, tflite::MicroArenaBufferAlignment()); + filter_int8.dims = filter->dims; + filter_int8.type = kTfLiteInt8; + tflite::tensor_utils::UnpackDenseInt4IntoInt8( + GetTensorData(filter), GetTensorShape(filter).FlatSize(), + GetTensorData(&filter_int8)); + + } else { + filter_int8 = *filter; + } + + status = xiConvSetContext( + data->p_context, data->context_size, input_depth, input_width, + input_height, output_depth, output_width, output_height, filter_width, + filter_height, params.stride_width, input->params.zero_point, + filter->params.zero_point, output->params.zero_point, + data->reference_op_data.output_multiplier, + data->reference_op_data.output_shift, + data->reference_op_data.output_activation_min, + data->reference_op_data.output_activation_max, + (uint8_t*)GetTensorData(&filter_int8), + data->reference_op_data.padding.width, + data->reference_op_data.padding.height); + if (status) { + return kTfLiteError; + } + + uint32_t coefficient_size = 0; + status = xiConvGetMemReqd_Coeff(data->p_context, data->context_size, + &coefficient_size); + if (status || coefficient_size == 0) { + return kTfLiteError; + } + + void* coefficient_data = + context->AllocatePersistentBuffer(context, coefficient_size); + if (coefficient_data == nullptr) { + return kTfLiteError; + } + data->reorder_coefficient_bias = reinterpret_cast(coefficient_data); + data->reorder_coefficient_bias_size = coefficient_size; + + status = xiConvDoCoeffReorder( + data->p_context, data->context_size, + reinterpret_cast(data->reorder_coefficient_bias), + data->reorder_coefficient_bias_size, + const_cast(GetTensorData(&filter_int8)), + const_cast(GetTensorData(bias))); + if (status) { + return kTfLiteError; + } + if (filter->type == kTfLiteInt4) { + micro_context->DeallocateTempBuffer(GetTensorData(&filter_int8)); + } + micro_context->DeallocateTempTfLiteTensor(output); + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(filter); + micro_context->DeallocateTempTfLiteTensor(bias); + + return kTfLiteOk; +} + +TfLiteStatus ConvEvalVision(TfLiteContext* context, TfLiteNode* node, + const TfLiteConvParams& params, + const XtensaConvOpData& data, + const TfLiteEvalTensor* input, + const TfLiteEvalTensor* filter, + const TfLiteEvalTensor* bias, + TfLiteEvalTensor* output) { + const uint32_t input_size = NumElements(input->dims); + const uint32_t output_size = NumElements(output->dims); + const int num_channels = filter->dims->data[kConvQuantizedDimension]; + + xiConv(data.p_context, data.context_size, + const_cast(tflite::micro::GetTensorData(input)), + input_size, tflite::micro::GetTensorData(output), output_size, + data.reorder_coefficient_bias, data.reorder_coefficient_bias_size, + data.reference_op_data.per_channel_output_multiplier, + data.per_channel_output_shift_int8, num_channels); + return kTfLiteOk; +} +} // namespace tflite +#endif // defined(VISION_P6) diff --git a/tensorflow/lite/micro/kernels/xtensa/depthwise_conv.cc b/tensorflow/lite/micro/kernels/xtensa/depthwise_conv.cc new file mode 100644 index 0000000..02ea871 --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/depthwise_conv.cc @@ -0,0 +1,131 @@ + +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/kernels/depthwise_conv.h" + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/depthwiseconv_float.h" +#include "tensorflow/lite/kernels/internal/reference/depthwiseconv_uint8.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/depthwise_conv.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/padding.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa_depthwise_conv.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { +namespace { + +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + void* data = context->AllocatePersistentBuffer( + context, sizeof(XtensaDepthwiseConvOpData)); +#if defined(VISION_P6) + if (InitXtensaContext()) { + return nullptr; + } +#endif // defined(VISION_P6) + return data; +} + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + TF_LITE_ENSURE_OK(context, DepthwiseConvPrepare(context, node)); + +#if defined(HIFI4) || defined(HIFI5) + TF_LITE_ENSURE_OK(context, DepthwiseConvPrepareHifi(context, node)); +#endif // defined(HIFI4) || defined(HIFI5) + +#if defined(VISION_P6) + TF_LITE_ENSURE_OK(context, DepthwiseConvPrepareVision(context, node)); +#endif // VISION_P6 + return kTfLiteOk; +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + TFLITE_DCHECK(node->builtin_data != nullptr); + const auto& params = + *(reinterpret_cast(node->builtin_data)); + const auto& op_data = + *(reinterpret_cast(node->user_data)); + + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kDepthwiseConvOutputTensor); + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kDepthwiseConvInputTensor); + const TfLiteEvalTensor* filter = + tflite::micro::GetEvalInput(context, node, kDepthwiseConvWeightsTensor); + const TfLiteEvalTensor* bias = + (NumInputs(node) == 3) + ? tflite::micro::GetEvalInput(context, node, kDepthwiseConvBiasTensor) + : nullptr; + + TfLiteEvalTensor filter_int8 = tflite::micro::MakeUnpackedInt4Tensor( + context, op_data.reference_op_data.filter_buffer_index, filter); + + switch (input->type) { // Already know in/out types are same. + case kTfLiteInt8: { + switch (filter_int8.type) { + case kTfLiteInt8: { +#if defined(HIFI4) || defined(HIFI5) + DepthwiseConvEvalHifi(context, node, params, op_data, input, + &filter_int8, bias, output); +#elif defined(VISION_P6) + DepthwiseConvEvalVision(context, node, params, op_data, input, + &filter_int8, bias, output); +#else + reference_integer_ops::DepthwiseConvPerChannel( + DepthwiseConvParamsQuantized(params, op_data.reference_op_data), + op_data.reference_op_data.per_channel_output_multiplier, + op_data.reference_op_data.per_channel_output_shift, + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(&filter_int8), + tflite::micro::GetTensorShape(bias), + tflite::micro::GetOptionalTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); +#endif // defined(HIFI4) || defined(HIFI5) + break; + } + default: + MicroPrintf("Filter type %s (%d) not supported.", + TfLiteTypeGetName(filter->type), filter->type); + return kTfLiteError; + } + break; + } + default: + MicroPrintf("Type %s (%d) not supported.", TfLiteTypeGetName(input->type), + input->type); + return kTfLiteError; + } + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_DEPTHWISE_CONV_2D() { + return tflite::micro::RegisterOp(Init, Prepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/xtensa/depthwise_conv_hifi.cc b/tensorflow/lite/micro/kernels/xtensa/depthwise_conv_hifi.cc new file mode 100644 index 0000000..05dab48 --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/depthwise_conv_hifi.cc @@ -0,0 +1,190 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/depthwiseconv_float.h" +#include "tensorflow/lite/kernels/internal/reference/depthwiseconv_uint8.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/depthwise_conv.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/padding.h" +#include "tensorflow/lite/micro/kernels/depthwise_conv.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa_depthwise_conv.h" + +#if defined(HIFI4) || defined(HIFI5) +namespace tflite { +TfLiteStatus DepthwiseConvPrepareHifi(TfLiteContext* context, + TfLiteNode* node) { + XtensaDepthwiseConvOpData* data = + static_cast(node->user_data); + const auto& params = + *(static_cast(node->builtin_data)); + + MicroContext* micro_context = GetMicroContext(context); + + // Calculate scratch memory requirements and request scratch buffer + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kConvOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kConvInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* filter = + micro_context->AllocateTempInputTensor(node, kConvWeightsTensor); + TF_LITE_ENSURE(context, filter != nullptr); + + TF_LITE_ENSURE_EQ(context, input->type, kTfLiteInt8); + + const RuntimeShape& input_shape = GetTensorShape(input); + const RuntimeShape& filter_shape = GetTensorShape(filter); + const RuntimeShape& output_shape = GetTensorShape(output); + + const int input_height = input_shape.Dims(1); + const int input_width = input_shape.Dims(2); + const int input_depth = input_shape.Dims(3); + const int filter_height = filter_shape.Dims(1); + const int filter_width = filter_shape.Dims(2); + const int output_height = output_shape.Dims(1); + const int output_width = output_shape.Dims(2); + + const int depth_multiplier = params.depth_multiplier; + const int stride_height = params.stride_height; + const int stride_width = params.stride_width; + const int pad_width = data->reference_op_data.padding.width; + const int pad_height = data->reference_op_data.padding.height; + + int required_scratch = 0; + // Dilation is currently not supported on HiFi 4 NN Library + if ((params.dilation_width_factor == 1) && + (params.dilation_height_factor == 1)) { + required_scratch = xa_nn_conv2d_depthwise_getsize( + input_height, input_width, input_depth, filter_height, filter_width, + depth_multiplier, stride_width, stride_height, pad_width, pad_height, + output_height, output_width, PREC_ASYM8S, 0 /* NHWC */); + TF_LITE_ENSURE(context, required_scratch > 0); + } + TF_LITE_ENSURE_OK( + context, context->RequestScratchBufferInArena( + context, required_scratch, &data->scratch_tensor_index)); + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(filter); + micro_context->DeallocateTempTfLiteTensor(output); + return kTfLiteOk; +} + +TfLiteStatus DepthwiseConvEvalHifi(TfLiteContext* context, TfLiteNode* node, + const TfLiteDepthwiseConvParams& params, + const XtensaDepthwiseConvOpData& data, + const TfLiteEvalTensor* input, + const TfLiteEvalTensor* filter, + const TfLiteEvalTensor* bias, + TfLiteEvalTensor* output) { + // If dilation is not required use the optimized NN Library kernel. + // Otherwise call the reference implementation. + if ((params.dilation_width_factor == 1) && + (params.dilation_height_factor == 1)) { + const int stride_width = params.stride_width; + const int stride_height = params.stride_height; + const int pad_width = data.reference_op_data.padding.width; + const int pad_height = data.reference_op_data.padding.height; + const int depth_multiplier = params.depth_multiplier; + const int32_t output_activation_min = + data.reference_op_data.output_activation_min; + const int32_t output_activation_max = + data.reference_op_data.output_activation_max; + TFLITE_DCHECK_LE(output_activation_min, output_activation_max); + + const RuntimeShape& input_shape = tflite::micro::GetTensorShape(input); + const RuntimeShape& filter_shape = tflite::micro::GetTensorShape(filter); + const RuntimeShape& output_shape = tflite::micro::GetTensorShape(output); + const RuntimeShape& bias_shape = tflite::micro::GetTensorShape(bias); + TFLITE_DCHECK_EQ(input_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(filter_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(output_shape.DimensionsCount(), 4); + + const int batches = MatchingDim(input_shape, 0, output_shape, 0); + const int output_depth = MatchingDim(filter_shape, 3, output_shape, 3); + const int input_height = input_shape.Dims(1); + const int input_width = input_shape.Dims(2); + const int input_depth = input_shape.Dims(3); + const int filter_height = filter_shape.Dims(1); + const int filter_width = filter_shape.Dims(2); + const int output_height = output_shape.Dims(1); + const int output_width = output_shape.Dims(2); + TFLITE_DCHECK_EQ(output_depth, input_depth * depth_multiplier); + TFLITE_DCHECK_EQ(bias_shape.FlatSize(), output_depth); + + const int8_t* input_data = tflite::micro::GetTensorData(input); + const int8_t* filter_data = tflite::micro::GetTensorData(filter); + const int32_t* bias_data = tflite::micro::GetTensorData(bias); + int8_t* output_data = tflite::micro::GetTensorData(output); + + int32_t input_data_format = 0; + int32_t output_data_format = 0; + + uint8_t* p_scratch = static_cast( + context->GetScratchBuffer(context, data.scratch_tensor_index)); + + for (int i = 0; i < batches; i++) { + TF_LITE_ENSURE_EQ( + context, + xa_nn_conv2d_depthwise_per_chan_sym8sxasym8s( + &output_data[i * output_height * output_width * output_depth], + filter_data, + &input_data[i * input_height * input_width * input_depth], + bias_data, input_height, input_width, input_depth, filter_height, + filter_width, depth_multiplier, stride_width, stride_height, + pad_width, pad_height, output_height, output_width, + -data.reference_op_data.input_zero_point, + data.reference_op_data.per_channel_output_multiplier, + data.reference_op_data.per_channel_output_shift, + data.reference_op_data.output_zero_point, input_data_format, + output_data_format, p_scratch), + 0); + } + + int out_length = batches * output_height * output_width * output_depth; + TF_LITE_ENSURE_EQ(context, + xa_nn_vec_activation_min_max_8_8( + output_data, output_data, output_activation_min, + output_activation_max, out_length), + 0); + + return kTfLiteOk; + } + + reference_integer_ops::DepthwiseConvPerChannel( + DepthwiseConvParamsQuantized(params, data.reference_op_data), + data.reference_op_data.per_channel_output_multiplier, + data.reference_op_data.per_channel_output_shift, + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), + tflite::micro::GetTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + + return kTfLiteOk; +} +} // namespace tflite +#endif // defined(HIFI4) || defined(HIFI5) diff --git a/tensorflow/lite/micro/kernels/xtensa/depthwise_conv_vision.cc b/tensorflow/lite/micro/kernels/xtensa/depthwise_conv_vision.cc new file mode 100644 index 0000000..35fa8cf --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/depthwise_conv_vision.cc @@ -0,0 +1,180 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#if defined(VISION_P6) + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/portable_tensor_utils.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/depthwise_conv.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa_depthwise_conv.h" +#include "tensorflow/lite/micro/micro_arena_constants.h" + +namespace tflite { + +TfLiteStatus DepthwiseConvPrepareVision(TfLiteContext* context, + TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + TFLITE_DCHECK(node->builtin_data != nullptr); + + XtensaDepthwiseConvOpData* data = + reinterpret_cast(node->user_data); + const auto& params = + *(reinterpret_cast(node->builtin_data)); + + MicroContext* micro_context = GetMicroContext(context); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kDepthwiseConvOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kDepthwiseConvInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* filter = + micro_context->AllocateTempInputTensor(node, kDepthwiseConvWeightsTensor); + TF_LITE_ENSURE(context, filter != nullptr); + TfLiteTensor* bias = + micro_context->AllocateTempInputTensor(node, kDepthwiseConvBiasTensor); + TF_LITE_ENSURE(context, filter != nullptr); + + // Dynamically allocate per-channel quantization parameters. + const int num_channels = + SizeOfDimension(filter, kDepthwiseConvQuantizedDimension); + data->per_channel_output_shift_int8 = static_cast( + context->AllocatePersistentBuffer(context, num_channels)); + + for (int i = 0; i < num_channels; i++) { + data->per_channel_output_shift_int8[i] = static_cast( + -1 * data->reference_op_data.per_channel_output_shift[i]); + } + + uint32_t context_size = 0; + uint32_t status = xiDepthwiseConvGetMemReqd_Context(&context_size); + if (!status && context_size) { + void* context_data = + context->AllocatePersistentBuffer(context, context_size); + if (context_data == nullptr) { + return kTfLiteError; + } + data->p_context = (uint8_t*)context_data; + data->context_size = context_size; + } + + const uint32_t input_height = SizeOfDimension(input, 1); + const uint32_t input_width = SizeOfDimension(input, 2); + const uint32_t input_depth = SizeOfDimension(input, 3); + + const uint32_t output_height = SizeOfDimension(output, 1); + const uint32_t output_width = SizeOfDimension(output, 2); + const uint32_t output_depth = SizeOfDimension(output, 3); + + const uint32_t filter_height = SizeOfDimension(filter, 1); + const uint32_t filter_width = SizeOfDimension(filter, 2); + + status = xiDepthwiseConvSetContext( + data->p_context, data->context_size, input_depth, input_width, + input_height, output_depth, output_width, output_height, filter_width, + filter_height, params.stride_width, input->params.zero_point, + filter->params.zero_point, output->params.zero_point, + data->reference_op_data.output_multiplier, + data->reference_op_data.output_shift, + data->reference_op_data.output_activation_min, + data->reference_op_data.output_activation_max, + data->reference_op_data.padding.width, + data->reference_op_data.padding.height); + if (status) { + return kTfLiteError; + } + + uint32_t coefficent_size = 0; + status = xiDepthwiseConvGetMemReqd_Coeff(data->p_context, data->context_size, + &coefficent_size); + if (status || coefficent_size == 0) { + return kTfLiteError; + } + + void* coeff_data = + context->AllocatePersistentBuffer(context, coefficent_size); + if (coeff_data == nullptr) { + return kTfLiteError; + } + data->reorder_coefficient_bias = reinterpret_cast(coeff_data); + data->reorder_coefficient_bias_size = coefficent_size; + + TfLiteTensor filter_int8; + + if (filter->type == kTfLiteInt4) { + const size_t bytes_unpacked = filter->bytes * 2; + filter_int8.data.data = micro_context->AllocateTempBuffer( + bytes_unpacked, tflite::MicroArenaBufferAlignment()); + filter_int8.dims = filter->dims; + filter_int8.type = kTfLiteInt8; + tflite::tensor_utils::UnpackDenseInt4IntoInt8( + GetTensorData(filter), GetTensorShape(filter).FlatSize(), + GetTensorData(&filter_int8)); + + } else { + filter_int8 = *filter; + } + + status = xiDepthwiseConvDoCoeffReorder( + data->p_context, data->context_size, + reinterpret_cast(data->reorder_coefficient_bias), + data->reorder_coefficient_bias_size, + const_cast(GetTensorData(&filter_int8)), + const_cast(GetTensorData(bias))); + if (status) { + return kTfLiteError; + } + if (filter->type == kTfLiteInt4) { + micro_context->DeallocateTempBuffer(GetTensorData(&filter_int8)); + } + micro_context->DeallocateTempTfLiteTensor(output); + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(filter); + micro_context->DeallocateTempTfLiteTensor(bias); + + return kTfLiteOk; +} + +TfLiteStatus DepthwiseConvEvalVision(TfLiteContext* context, TfLiteNode* node, + const TfLiteDepthwiseConvParams& params, + const XtensaDepthwiseConvOpData& data, + const TfLiteEvalTensor* input, + const TfLiteEvalTensor* filter, + const TfLiteEvalTensor* bias, + TfLiteEvalTensor* output) { + const uint32_t input_size = NumElements(input->dims); + const uint32_t output_size = NumElements(output->dims); + const int num_channels = filter->dims->data[kDepthwiseConvQuantizedDimension]; + xiDepthwiseConv( + data.p_context, data.context_size, + const_cast(tflite::micro::GetTensorData(input)), + input_size, tflite::micro::GetTensorData(output), output_size, + data.reorder_coefficient_bias, data.reorder_coefficient_bias_size, + data.reference_op_data.per_channel_output_multiplier, + data.per_channel_output_shift_int8, num_channels, + data.reference_op_data.padding.width, + data.reference_op_data.padding.height); + return kTfLiteOk; +} +} // namespace tflite +#endif // defined(VISION_P6) diff --git a/tensorflow/lite/micro/kernels/xtensa/fully_connected.cc b/tensorflow/lite/micro/kernels/xtensa/fully_connected.cc new file mode 100644 index 0000000..1395fc3 --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/fully_connected.cc @@ -0,0 +1,128 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/kernels/fully_connected.h" + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/fully_connected.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/fully_connected.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa_fully_connected.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { + +namespace { + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->builtin_data != nullptr); + const auto* params = + static_cast(node->builtin_data); + + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kFullyConnectedInputTensor); + const TfLiteEvalTensor* filter = + tflite::micro::GetEvalInput(context, node, kFullyConnectedWeightsTensor); + const TfLiteEvalTensor* bias = + tflite::micro::GetEvalInput(context, node, kFullyConnectedBiasTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kFullyConnectedOutputTensor); + + TFLITE_DCHECK(node->user_data != nullptr); + + const auto& data = + *(static_cast(node->user_data)); + + // Checks in Prepare ensure input, output and filter types are all the same. + switch (input->type) { + case kTfLiteFloat32: { + tflite::reference_ops::FullyConnected( + FullyConnectedParamsFloat(params->activation), + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), + tflite::micro::GetOptionalTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + } + + case kTfLiteInt8: { + switch (filter->type) { + case kTfLiteInt8: { + return XtensaEvalFullyConnectedQuantizedInt8( + context, node, data, input, filter, bias, output); + } + case kTfLiteInt4: { + return XtensaEvalFullyConnectedQuantizedInt8( + context, node, data, input, filter, bias, output); + } + default: { + MicroPrintf("Filter type %s (%d) not supported.", + TfLiteTypeGetName(filter->type), input->type); + return kTfLiteError; + } + } + break; + } + + case kTfLiteInt16: { + switch (filter->type) { + case kTfLiteInt8: { + tflite::reference_integer_ops::FullyConnected( + FullyConnectedParamsQuantized(data), + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), + tflite::micro::GetOptionalTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + } + default: { + MicroPrintf("Filter type %s (%d) not supported.", + TfLiteTypeGetName(filter->type), input->type); + return kTfLiteError; + } + } + break; + } + + default: { + MicroPrintf("Input type %s (%d) not supported.", + TfLiteTypeGetName(input->type), input->type); + return kTfLiteError; + } + } + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_FULLY_CONNECTED() { + return tflite::micro::RegisterOp(XtensaInitFullyConnected, + XtensaPrepareFullyConnected, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/xtensa/fully_connected_common_xtensa.cc b/tensorflow/lite/micro/kernels/xtensa/fully_connected_common_xtensa.cc new file mode 100644 index 0000000..cf87c5f --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/fully_connected_common_xtensa.cc @@ -0,0 +1,136 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa_fully_connected.h" + +namespace tflite { + +void* XtensaInitFullyConnected(TfLiteContext* context, const char* buffer, + size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); +#if !defined(VISION_P6) + return context->AllocatePersistentBuffer(context, + sizeof(OpDataFullyConnected)); +#else + void* data = context->AllocatePersistentBuffer( + context, sizeof(XtensaFullyConnectedOpData)); +#if !defined(HIFIMINI) + if (InitXtensaContext()) { + return nullptr; + } +#endif + return data; +#endif // defined(VISION_P6) +} + +TfLiteStatus XtensaCalculateOpDataFullyConnected( + TfLiteContext* context, TfLiteFusedActivation activation, + TfLiteType data_type, const TfLiteTensor* input, const TfLiteTensor* filter, + const TfLiteTensor* bias, TfLiteTensor* output, + OpDataFullyConnected* data) { + if (data_type != kTfLiteFloat32) { + double real_multiplier = 0.0; + TF_LITE_ENSURE_STATUS(GetQuantizedConvolutionMultipler( + context, input, filter, bias, output, &real_multiplier)); +#if defined(HIFIMINI) + if (input->type == kTfLiteInt8) { + QuantizeMultiplierForInt24(real_multiplier, &data->output_multiplier, + &data->output_shift); + } else { + QuantizeMultiplier(real_multiplier, &data->output_multiplier, + &data->output_shift); + } +#else + QuantizeMultiplier(real_multiplier, &data->output_multiplier, + &data->output_shift); +#endif + + // Filter weights will always be symmetric quantized since we only support + // int8 quantization. See + // https://github.com/tensorflow/tensorflow/issues/44912 for additional + // context. + TFLITE_DCHECK(filter->params.zero_point == 0); + + data->input_zero_point = input->params.zero_point; + data->filter_zero_point = filter->params.zero_point; + data->output_zero_point = output->params.zero_point; + + return CalculateActivationRangeQuantized(context, activation, output, + &data->output_activation_min, + &data->output_activation_max); + } + return kTfLiteOk; +} + +TfLiteStatus XtensaPrepareFullyConnected(TfLiteContext* context, + TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + TFLITE_DCHECK(node->builtin_data != nullptr); + + auto* data = static_cast(node->user_data); + const auto params = + static_cast(node->builtin_data); + + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kFullyConnectedInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* filter = micro_context->AllocateTempInputTensor( + node, kFullyConnectedWeightsTensor); + TF_LITE_ENSURE(context, filter != nullptr); + TfLiteTensor* bias = + micro_context->AllocateTempInputTensor(node, kFullyConnectedBiasTensor); + TfLiteTensor* output = micro_context->AllocateTempOutputTensor( + node, kFullyConnectedOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + TF_LITE_ENSURE_TYPES_EQ(context, input->type, output->type); + + if (filter->type == kTfLiteInt4) { + int filter_size = + RuntimeShape(filter->dims->size, + reinterpret_cast(filter->dims->data)) + .FlatSize(); + context->RequestScratchBufferInArena(context, filter_size, + &data->filter_buffer_index); + } + + TFLITE_DCHECK_GE(GetTensorShape(output).DimensionsCount(), 1); + + TF_LITE_ENSURE_OK(context, XtensaCalculateOpDataFullyConnected( + context, params->activation, input->type, + input, filter, bias, output, data)); + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(filter); + if (bias != nullptr) { + micro_context->DeallocateTempTfLiteTensor(bias); + } + micro_context->DeallocateTempTfLiteTensor(output); +#if defined(VISION_P6) + TF_LITE_ENSURE_OK(context, FullyConnectedPrepareVision(context, node)); +#endif // defined(VISION_P6) + + return kTfLiteOk; +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/xtensa/fully_connected_int8.cc b/tensorflow/lite/micro/kernels/xtensa/fully_connected_int8.cc new file mode 100644 index 0000000..b53afa4 --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/fully_connected_int8.cc @@ -0,0 +1,139 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/portable_tensor_utils.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/fully_connected.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa_fully_connected.h" + +namespace tflite { + +TfLiteStatus XtensaEvalFullyConnectedQuantizedInt8( + TfLiteContext* context, TfLiteNode* node, const OpDataFullyConnected& data, + const TfLiteEvalTensor* input, const TfLiteEvalTensor* filter, + const TfLiteEvalTensor* bias, TfLiteEvalTensor* output) { +#if !defined(VISION_P6) + const int32_t* bias_data = + tflite::micro::GetOptionalTensorData(bias); + + // P6 Vision will handle INT4 filters as a reference operation. + // For all other architectures, unpack INT4 here. + const int8_t* filter_data = tflite::micro::GetTensorData(filter); + if (filter->type == kTfLiteInt4) { + int8_t* unpacked_filter_data = static_cast( + context->GetScratchBuffer(context, data.filter_buffer_index)); + + tflite::tensor_utils::UnpackDenseInt4IntoInt8( + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(filter).FlatSize(), unpacked_filter_data); + filter_data = unpacked_filter_data; + } +#endif // !defined(VISION_P6) + +#if defined(HIFIMINI) + FullyConnectedEvalHifimini(FullyConnectedParamsQuantized(data), + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), filter_data, + tflite::micro::GetTensorShape(bias), bias_data, + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); +#elif defined(HIFI4) || defined(HIFI5) + const RuntimeShape& output_shape = tflite::micro::GetTensorShape(output); + const int num_batches = + FlatSizeSkipDim(output_shape, output_shape.DimensionsCount() - 1); + const int output_depth = + output_shape.Dims(output_shape.DimensionsCount() - 1); + + const RuntimeShape& filter_shape = tflite::micro::GetTensorShape(filter); + const int filter_dim_count = filter_shape.DimensionsCount(); + const int accum_depth = filter_shape.Dims(filter_dim_count - 1); + + FullyConnectedParams op_params = FullyConnectedParamsQuantized(data); + for (int b = 0; b < num_batches; ++b) { + TF_LITE_ENSURE_EQ( + context, + xa_nn_fully_connected_sym8sxasym8s_asym8s( + (tflite::micro::GetTensorData(output) + b * output_depth), + filter_data, + (tflite::micro::GetTensorData(input) + b * accum_depth), + bias_data, accum_depth, output_depth, op_params.input_offset, + op_params.output_multiplier, op_params.output_shift, + op_params.output_offset), + 0); + } + + int8_t* output_arr = tflite::micro::GetTensorData(output); + TF_LITE_ENSURE_EQ(context, + xa_nn_vec_activation_min_max_8_8( + output_arr, output_arr, data.output_activation_min, + data.output_activation_max, num_batches * output_depth), + 0); +#elif defined(VISION_P6) + const auto& params = + *(reinterpret_cast(node->builtin_data)); + const auto& op_data = + *(reinterpret_cast(node->user_data)); + FullyConnectedEvalVision(context, node, params, op_data, input, filter, bias, + output); +#else + reference_integer_ops::FullyConnected( + FullyConnectedParamsQuantized(data), tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), filter_data, + tflite::micro::GetTensorShape(bias), bias_data, + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); +#endif // defined(HIFI4) || defined(HIFI5) + + return kTfLiteOk; +} + +namespace { + +TfLiteStatus EvalInt8(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + const auto& data = + *(static_cast(node->user_data)); + + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kFullyConnectedInputTensor); + const TfLiteEvalTensor* filter = + tflite::micro::GetEvalInput(context, node, kFullyConnectedWeightsTensor); + const TfLiteEvalTensor* bias = + tflite::micro::GetEvalInput(context, node, kFullyConnectedBiasTensor); + + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kFullyConnectedOutputTensor); + + return XtensaEvalFullyConnectedQuantizedInt8(context, node, data, input, + filter, bias, output); +} + +} // namespace + +TFLMRegistration Register_FULLY_CONNECTED_INT8() { + return tflite::micro::RegisterOp(XtensaInitFullyConnected, + XtensaPrepareFullyConnected, EvalInt8); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/xtensa/fully_connected_vision.cc b/tensorflow/lite/micro/kernels/xtensa/fully_connected_vision.cc new file mode 100644 index 0000000..14bb9a1 --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/fully_connected_vision.cc @@ -0,0 +1,181 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#if defined(VISION_P6) + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/portable_tensor_utils.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/conv.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa_fully_connected.h" +#include "tensorflow/lite/micro/micro_arena_constants.h" + +namespace tflite { + +void NormalizeFCDims(uint32_t* dims, int rank) { + if (rank < 4) { + dims[3] = dims[rank - 1]; + dims[rank - 1] = 1; + } + dims[0] *= dims[1] * dims[2]; + dims[1] = 1; + dims[2] = 1; + return; +} + +inline void OperandDims4D(uint32_t* dims, const TfLiteTensor* opnd) { + for (int i = NumDimensions(opnd) - 1, j = 0; i >= 0; i--, j++) { + dims[j] = SizeOfDimension(opnd, i); + } + return; +} +TfLiteStatus FullyConnectedPrepareVision(TfLiteContext* context, + TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + TFLITE_DCHECK(node->builtin_data != nullptr); + XtensaFullyConnectedOpData* data = + reinterpret_cast(node->user_data); + + MicroContext* micro_context = GetMicroContext(context); + TfLiteTensor* output = micro_context->AllocateTempOutputTensor( + node, kFullyConnectedOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kFullyConnectedInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* filter = micro_context->AllocateTempInputTensor( + node, kFullyConnectedWeightsTensor); + TF_LITE_ENSURE(context, filter != nullptr); + TfLiteTensor* bias = + micro_context->AllocateTempInputTensor(node, kFullyConnectedBiasTensor); + + uint32_t inputDims[4] = {1, 1, 1, 1}; + uint32_t outputDims[4] = {1, 1, 1, 1}; + uint32_t filterDims[4] = {1, 1, 1, 1}; + + OperandDims4D(inputDims, input); + OperandDims4D(outputDims, output); + OperandDims4D(filterDims, filter); + + NormalizeFCDims(inputDims, NumDimensions(input)); + NormalizeFCDims(filterDims, NumDimensions(filter)); + NormalizeFCDims(outputDims, NumDimensions(output)); + + uint32_t context_size = 0; + uint32_t status = xiFullyConnectedGetMemReqd_Context(&context_size); + if (!status && context_size) { + void* context_data = + context->AllocatePersistentBuffer(context, context_size); + if (context_data == nullptr) { + return kTfLiteError; + } + data->p_context = reinterpret_cast(context_data); + data->context_size = context_size; + } + + TfLiteTensor filter_int8; + + if (filter->type == kTfLiteInt4) { + const size_t bytes_unpacked = filter->bytes * 2; + filter_int8.data.data = micro_context->AllocateTempBuffer( + bytes_unpacked, tflite::MicroArenaBufferAlignment()); + filter_int8.dims = filter->dims; + filter_int8.type = kTfLiteInt8; + tflite::tensor_utils::UnpackDenseInt4IntoInt8( + GetTensorData(filter), GetTensorShape(filter).FlatSize(), + GetTensorData(&filter_int8)); + + } else { + filter_int8 = *filter; + } + + status = xiFullyConnectedSetContext( + data->p_context, data->context_size, inputDims, outputDims, filterDims, 1, + input->params.zero_point, filter->params.zero_point, + output->params.zero_point, data->reference_op_data.output_multiplier, + data->reference_op_data.output_shift, + data->reference_op_data.output_activation_min, + data->reference_op_data.output_activation_max, + (uint8_t*)GetTensorData(&filter_int8)); + + if (status) { + return kTfLiteError; + } + + uint32_t coefficient_size = 0; + status = xiFullyConnectedGetMemReqd_Coeff(data->p_context, data->context_size, + &coefficient_size); + if (status || coefficient_size == 0) { + return kTfLiteError; + } + + void* coefficient_data = + context->AllocatePersistentBuffer(context, coefficient_size); + if (coefficient_data == nullptr) { + return kTfLiteError; + } + data->reorder_coefficient_bias = reinterpret_cast(coefficient_data); + data->reorder_coefficient_bias_size = coefficient_size; + + status = xiFullyConnectedDoCoeffReorder( + data->p_context, data->context_size, + reinterpret_cast(data->reorder_coefficient_bias), + data->reorder_coefficient_bias_size, + const_cast(GetTensorData(&filter_int8)), + const_cast(GetTensorData(bias))); + if (status) { + return kTfLiteError; + } + + if (filter->type == kTfLiteInt4) { + micro_context->DeallocateTempBuffer(GetTensorData(&filter_int8)); + } + micro_context->DeallocateTempTfLiteTensor(output); + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(filter); + if (bias != nullptr) { + micro_context->DeallocateTempTfLiteTensor(bias); + } + return kTfLiteOk; +} + +TfLiteStatus FullyConnectedEvalVision(TfLiteContext* context, TfLiteNode* node, + const TfLiteConvParams& params, + const XtensaFullyConnectedOpData& data, + const TfLiteEvalTensor* input, + const TfLiteEvalTensor* filter, + const TfLiteEvalTensor* bias, + TfLiteEvalTensor* output) { + const uint32_t input_size = NumElements(input->dims); + const uint32_t output_size = NumElements(output->dims); + const int num_channels = filter->dims->data[kConvQuantizedDimension]; + + xiFullyConnected( + data.p_context, data.context_size, + const_cast(tflite::micro::GetTensorData(input)), + input_size, tflite::micro::GetTensorData(output), output_size, + data.reorder_coefficient_bias, data.reorder_coefficient_bias_size, NULL, + NULL, num_channels); + return kTfLiteOk; +} +} // namespace tflite +#endif // defined(VISION_P6) diff --git a/tensorflow/lite/micro/kernels/xtensa/hifimini/fixedpoint_utils.h b/tensorflow/lite/micro/kernels/xtensa/hifimini/fixedpoint_utils.h new file mode 100644 index 0000000..42bf971 --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/hifimini/fixedpoint_utils.h @@ -0,0 +1,139 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_XTENSA_HIFIMINI_FIXEDPOINT_UTILS_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_XTENSA_HIFIMINI_FIXEDPOINT_UTILS_H_ + +#if defined(HIFIMINI) +#include + +#include +#include +#include + +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa.h" + +namespace tflite { + +// INT24 MIN/MAX +#define INT24_MIN -8388608 +#define INT24_MAX 8388607 + +// Multiply 24bit value by a quantized multiplier (w/ shift) and returns a 48bit +// aligned value in the QR register. +inline ae_q56s MultiplyByQuantizedMultiplier(ae_p24x2s x_24x2, + int32_t quantized_multiplier, + int shift) { + // A value with 1 sign bit, N integer bits and M fractional bits is + // represented as QN+1.M since the sign bit is included in the integer bits. + // + // The Q notation in this method explains the values represented in each + // variable, along with an implicit division since the quantized_multiplier + // represents a value between 0.5 and 1.0 (Q1.X-1 where X is the bit precision + // of the type). + // + // Load the quantized multiplier into the PR register. + // NOTE: This method assumes that this param has been calculated for 24bit + // space - not 32bits. + // Q32.0 / 2^23 -> Q24.0 / 2^23 representing a Q1.23 multiplier. + ae_p24x2s quantized_multiplier_24x2 = AE_MOVPA24(quantized_multiplier); + // Shift right by 23 - 16 bits minus the specified shift. This is because we + // keep 16 fractional bits until the end to perform rounding. Subtract shift + // since shift is a left shift, and the 23-16 is a right shift. + int shift_amount = 7 - shift; + + // Find the product of x and the quantized_multiplier. + // Q24.0 / 2^23 * Q24.0 = Q48.0 / 2^23 + // Q48.0 / 2^23 >> 7 = Q48.0 / 2^16 + ae_q56s result_56 = AE_MULP24S_HH(x_24x2, quantized_multiplier_24x2); + + // Shift right if shift amount is positive, left if shift amount is negative. + if (shift_amount >= 0) { + result_56 = AE_Q56S_SRA(result_56, shift_amount); + } else { + result_56 = AE_Q56S_SLA(result_56, -shift_amount); + } + + // Round off the bottom 16 bits. + // Q48.0 / 2^16 -> Q32.0 aligned to 48 bits. + result_56 = AE_ROUNDSQ32SYM(result_56); + return result_56; +} + +// Multiply 32bit value by a quantized multiplier (w/ shift) and returns a 48bit +// aligned value in the QR register. +inline ae_q56s MultiplyByQuantizedMultiplierResult48Bit( + int32_t x, int32_t quantized_multiplier, int shift) { + // Convert x into a 2x24bit PR register file. If x is outside the numerical + // limits of a 24bit integer, the "fractional" or lower 8bits are discarded. + // If x is within the range of a 24 bit integer, the "signed" or upper 8bits + // are discarded. + ae_p24x2s x_24x2; + if (x > INT24_MIN && x < INT24_MAX) { + x_24x2 = AE_MOVPA24(x); + } else { + x_24x2 = static_cast(*reinterpret_cast(&x)); + shift += 8; + } + + return MultiplyByQuantizedMultiplier(x_24x2, quantized_multiplier, shift); +} + +// Calculate quantization params for 24bit runtimes. +inline void QuantizeMultiplierForInt24(float multiplier, + int32_t* quantized_multiplier, + int* shift) { + if (multiplier == 0.0f) { + *quantized_multiplier = 0; + *shift = 0; + return; + } + + // Special cased to 24bit: + const float q = std::frexp(multiplier, shift); + auto q_fixed = static_cast(std::round(q * (1 << 23))); + + TFLITE_CHECK(q_fixed <= (1 << 23)); + if (q_fixed == (1 << 23)) { + q_fixed /= 2; + ++*shift; + } + TFLITE_CHECK_LE(q_fixed, INT24_MAX); + + // Ensure shift does not exceed 24-bit range. + TFLITE_CHECK_LE(*shift, 23); + if (*shift < -23) { + *shift = 0; + q_fixed = 0; + } + *quantized_multiplier = static_cast(q_fixed); +} + +// Convert a floating point number to a Q representation for 24 bit integers. +inline int CreateQConstantForInt24(int integer_bits, float f) { + const float min_bounds = static_cast(INT24_MIN); + const float max_bounds = static_cast(INT24_MAX); + + int fractional_bits = 23 - integer_bits; + float raw = std::round(f * static_cast(1 << fractional_bits)); + raw = std::max(raw, min_bounds); + raw = std::min(raw, max_bounds); + return static_cast(raw); +} + +} // namespace tflite +#endif // defined(HIFIMINI) +#endif // TENSORFLOW_LITE_MICRO_KERNELS_XTENSA_HIFIMINI_FIXEDPOINT_UTILS_H_ diff --git a/tensorflow/lite/micro/kernels/xtensa/hifimini/fully_connected.cc b/tensorflow/lite/micro/kernels/xtensa/hifimini/fully_connected.cc new file mode 100644 index 0000000..b63c500 --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/hifimini/fully_connected.cc @@ -0,0 +1,118 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#if defined(HIFIMINI) +#include "tensorflow/lite/micro/kernels/fully_connected.h" + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/fully_connected.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/fully_connected.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/xtensa/hifimini/fixedpoint_utils.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa.h" + +namespace tflite { + +void FullyConnectedEvalHifimini( + const FullyConnectedParams& params, const RuntimeShape& input_shape, + const int8_t* input_data, const RuntimeShape& filter_shape, + const int8_t* filter_data, const RuntimeShape& bias_shape, + const int32_t* bias_data, const RuntimeShape& output_shape, + int8_t* output_data) { + const int32_t input_offset = params.input_offset; + const int32_t filter_offset = params.weights_offset; + const int32_t output_offset = params.output_offset; + const int32_t output_multiplier = params.output_multiplier; + const int output_shift = params.output_shift; + const int32_t output_activation_min = params.quantized_activation_min; + const int32_t output_activation_max = params.quantized_activation_max; + + const int filter_dim_count = filter_shape.DimensionsCount(); + const int batches = output_shape.Dims(0); + const int output_depth = output_shape.Dims(1); + const int accum_depth = filter_shape.Dims(filter_dim_count - 1); + const int accum_depth_iters = accum_depth / 2; + + ae_p24x2s offsets_input_24x2 = AE_MOVPA24(input_offset); + ae_p24x2s offsets_filter_24x2 = AE_MOVPA24(filter_offset); + ae_q56s output_offset_56 = AE_CVTQ48A32S(output_offset); + ae_q56s output_activation_max_56 = AE_CVTQ48A32S(output_activation_max); + ae_q56s output_activation_min_56 = AE_CVTQ48A32S(output_activation_min); + + for (int b = 0; b < batches; ++b) { + for (int out_c = 0; out_c < output_depth; ++out_c) { + // Load intrinsics advance pointer before loading so backoff data pointers + // by two before loading: + const int8_t* input_ptr = (input_data + b * accum_depth) - 2; + const int8_t* filter_ptr = (filter_data + out_c * accum_depth) - 2; + + // Main accumulator register entry for loop: + ae_q56s sum_56 = AE_ZEROQ56(); + + for (int d = 0; d < accum_depth_iters; d++) { + // Load the signed 8bit values into the PR register: + ae_p24x2s input_24x2; + ae_p24x2s filter_24x2; + AE_LP8X2F_IU(input_24x2, input_ptr, 2); + AE_LP8X2F_IU(filter_24x2, filter_ptr, 2); + + // Right shift the signed 8bit values to expand to signed 24bit values: + input_24x2 = AE_P24X2S_SRAI(input_24x2, 16); + filter_24x2 = AE_P24X2S_SRAI(filter_24x2, 16); + + // Add offsets to data values (24 bit aligned): + input_24x2 = AE_P24S_ADDS_P24X2S(offsets_input_24x2, input_24x2); + filter_24x2 = AE_P24S_ADDS_P24X2S(offsets_filter_24x2, filter_24x2); + + // 24x2 signed integer dual MAC w/ addition into 56bit accumulator (48 + // bit aligned): + AE_MULAAP24S_HH_LL(sum_56, input_24x2, filter_24x2); + } + + // Left shift to get back into 32bit space (right padded to 48bit): + sum_56 = AE_Q56S_SLAI(sum_56, 16); + + // Add bias data if needed: + if (bias_data) { + ae_q56s bias_56 = AE_CVTQ48A32S(bias_data[out_c]); + sum_56 = AE_ADDQ56(sum_56, bias_56); + } + + // Shift left into 24bit space and place back on PR register: + sum_56 = AE_Q56S_SLAI(sum_56, 8); + ae_p24x2s sum_24x2 = AE_TRUNCP24Q48(sum_56); + + // MultiplyByQuantizedMultiplier returns a 48bit aligned value + sum_56 = MultiplyByQuantizedMultiplier(sum_24x2, output_multiplier, + output_shift); + + // Add output_offset and cap min/max values: + sum_56 = AE_ADDQ56(sum_56, output_offset_56); + sum_56 = AE_MINQ56S(sum_56, output_activation_max_56); + sum_56 = AE_MAXQ56S(sum_56, output_activation_min_56); + + output_data[out_c + output_depth * b] = + static_cast(AE_TRUNCA32Q48(sum_56)); + } + } +} + +} // namespace tflite +#endif // defined(HIFIMINI) diff --git a/tensorflow/lite/micro/kernels/xtensa/hifimini/svdf.cc b/tensorflow/lite/micro/kernels/xtensa/hifimini/svdf.cc new file mode 100644 index 0000000..08ef4d9 --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/hifimini/svdf.cc @@ -0,0 +1,237 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#if defined(HIFIMINI) +#include "tensorflow/lite/micro/kernels/svdf.h" + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/activation_utils.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/xtensa/hifimini/fixedpoint_utils.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa_svdf.h" + +namespace tflite { + +/** + * This version of SVDF is specific to TFLite Micro. It contains only a full + * integer receipe with optimizations for the Xtensa HiFiMini platform. + * + * Note: passing OpDataSvdf by value might seem like an oversight but it helps + * reduce the latency. See b/155656675 for more details. + */ +TfLiteStatus EvalIntegerSvdfHifimini( + TfLiteContext* context, TfLiteNode* node, + const TfLiteEvalTensor* input_tensor, + const TfLiteEvalTensor* weights_feature_tensor, + const TfLiteEvalTensor* weights_time_tensor, + const TfLiteEvalTensor* bias_tensor, const TfLiteSVDFParams* params, + TfLiteEvalTensor* activation_state_tensor, TfLiteEvalTensor* output_tensor, + OpDataSvdf data) { + const int n_rank = params->rank; + const int n_batch = input_tensor->dims->data[0]; + const int n_input = input_tensor->dims->data[1]; + const int n_filter = weights_feature_tensor->dims->data[0]; + const int n_unit = n_filter / n_rank; + const int n_memory = weights_time_tensor->dims->data[1]; + + TFLITE_DCHECK(context != nullptr); + TFLITE_DCHECK(context->GetScratchBuffer != nullptr); + + int32_t* scratch_tensor = static_cast( + context->GetScratchBuffer(context, data.scratch_tensor_index)); + TFLITE_DCHECK(scratch_tensor != nullptr); + int32_t* scratch_output_tensor = static_cast( + context->GetScratchBuffer(context, data.scratch_output_tensor_index)); + TFLITE_DCHECK(scratch_output_tensor != nullptr); + + // Shift states. + int16_t* const state_ptr = + tflite::micro::GetTensorData(activation_state_tensor); + + // Left shift the activation_state. + { + int16_t* new_state_start = state_ptr; + const int16_t* old_state_start = state_ptr + 1; + const int16_t* old_state_end = state_ptr + n_batch * n_filter * n_memory; + while (old_state_start != old_state_end) { + *new_state_start++ = *old_state_start++; + } + } + + // Note: no need to clear the latest activation, matmul is not accumulative. + + // Feature matmul. + { + const int8_t* input = tflite::micro::GetTensorData(input_tensor); + const int8_t* weight_feature = + tflite::micro::GetTensorData(weights_feature_tensor); + int16_t* result_in_batch = state_ptr + (n_memory - 1); + + ae_q56s output_int16_max_56 = AE_CVTQ48A32S(INT16_MAX); + ae_q56s output_int16_min_56 = AE_CVTQ48A32S(INT16_MIN); + ae_p24x2s input_zp_24x2 = AE_MOVPA24(data.input_zero_point); + + for (int b = 0; b < n_batch; b++) { + const int8_t* weight_feature_ptr = weight_feature - 2; + + for (int r = 0; r < n_filter; r++) { + ae_q56s dot_prod_56 = AE_ZEROQ56(); + + const int8_t* input_batch_ptr = input + b * n_input; + const int8_t* offset_input_batch_ptr = input_batch_ptr - 2; + + int num_iters = n_input / 2; + for (int c = 0; c < num_iters; c++) { + // Load 2 sets of values: + ae_p24x2s weight_feature_ptr_24x2; + ae_p24x2s input_batch_ptr_24x2; + AE_LP8X2F_IU(weight_feature_ptr_24x2, weight_feature_ptr, 2); + AE_LP8X2F_IU(input_batch_ptr_24x2, offset_input_batch_ptr, 2); + + // Right shift the signed 8bit values to expand to signed 24bit + // values: + weight_feature_ptr_24x2 = AE_P24X2S_SRAI(weight_feature_ptr_24x2, 16); + input_batch_ptr_24x2 = AE_P24X2S_SRAI(input_batch_ptr_24x2, 16); + + // First subtract input_zp from input_batch_ptr_24x2: + input_batch_ptr_24x2 = + AE_SUBSP24S(input_batch_ptr_24x2, input_zp_24x2); + + // Multiply accum: + AE_MULAAP24S_HH_LL(dot_prod_56, weight_feature_ptr_24x2, + input_batch_ptr_24x2); + } + + // Left shift 48bit value into 24bit space and place on the PR register: + dot_prod_56 = AE_Q56S_SLAI(dot_prod_56, 24); + ae_p24x2s dot_prod_24x2 = AE_TRUNCP24Q48(dot_prod_56); + + dot_prod_56 = MultiplyByQuantizedMultiplier( + dot_prod_24x2, data.effective_scale_1_a, data.effective_scale_1_b); + + // Cap min/max and convert to int32_t: + dot_prod_56 = AE_MAXQ56S(dot_prod_56, output_int16_min_56); + dot_prod_56 = AE_MINQ56S(dot_prod_56, output_int16_max_56); + // Truncate immediately since the QR register is already 32 bit aligned: + // This assumes state is symmetrically quantized. Otherwise last bit of + // state should be initialized to its zero point and accumulate the + // dot_prod. + // Equivalent as the following: + // result_in_batch = zero point, which happens to be zero. + // result_in_batch += dot_prod_56. + *result_in_batch = AE_TRUNCA32Q48(dot_prod_56); + result_in_batch += n_memory; + } + } + } + + // Time. + { + for (int b = 0; b < n_batch; ++b) { + int32_t* scratch_ptr_batch = scratch_tensor + b * n_filter; + + // Perform batched vector dot product: + const int16_t* vector1_ptr = + tflite::micro::GetTensorData(weights_time_tensor); + const int16_t* vector2_ptr = state_ptr + b * n_memory * n_filter; + + const ae_p16x2s* offset_vector1 = + reinterpret_cast(vector1_ptr - 2); + const ae_p16x2s* offset_vector2 = + reinterpret_cast(vector2_ptr - 2); + + for (int i = 0; i < n_filter; i++) { + *scratch_ptr_batch = 0; + + ae_q56s sum_56 = AE_ZEROQ56(); + int num_iters = n_memory / 2; + for (int j = 0; j < num_iters; j++) { + ae_p24x2s vector1_24x2; + ae_p24x2s vector2_24x2; + AE_LP16X2F_IU(vector1_24x2, offset_vector1, 4); + AE_LP16X2F_IU(vector2_24x2, offset_vector2, 4); + AE_MULAAP24S_HH_LL(sum_56, vector1_24x2, vector2_24x2); + } + // Truncate directly since values are already 32bit aligned: + *scratch_ptr_batch = AE_TRUNCA32Q48(sum_56); + scratch_ptr_batch++; + } + } + } + + // Reduce, add bias, rescale, activation. + { + // Add bias. + if (bias_tensor) { + // Vector batch assign: + const int32_t* bias_data = + tflite::micro::GetTensorData(bias_tensor); + for (int i = 0; i < n_batch; ++i) { + int32_t* output_ptr = scratch_output_tensor + i * n_unit; + const int32_t* bias_ptr = bias_data; + for (int j = 0; j < n_unit; ++j) { + *output_ptr++ = *bias_ptr++; + } + } + } else { + int32_t* output_ptr = scratch_output_tensor; + for (int i = 0; i < n_batch * n_unit; ++i) { + *output_ptr++ = 0; + } + } + + // Reduce. + for (int b = 0; b < n_batch; ++b) { + int32_t* output_temp_ptr = scratch_output_tensor + b * n_unit; + int32_t* scratch_ptr_batch = scratch_tensor + b * n_filter; + + // Reduction sum vector + for (int i = 0; i < n_unit; ++i) { + for (int j = 0; j < n_rank; ++j) { + output_temp_ptr[i] += *scratch_ptr_batch++; + } + } + } + + // Rescale. + ae_q56s output_int8_max_56 = AE_CVTQ48A32S(INT8_MAX); + ae_q56s output_int8_min_56 = AE_CVTQ48A32S(INT8_MIN); + ae_q56s output_zp_56 = AE_CVTQ48A32S(data.output_zero_point); + for (int i = 0; i < n_batch * n_unit; ++i) { + ae_q56s x_56 = MultiplyByQuantizedMultiplierResult48Bit( + scratch_output_tensor[i], data.effective_scale_2_a, + data.effective_scale_2_b); + // Add output adjustment: + x_56 = AE_ADDQ56(x_56, output_zp_56); + // Cap min/max and convert to int32_t (already aligned to 32bit): + x_56 = AE_MAXQ56S(x_56, output_int8_min_56); + x_56 = AE_MINQ56S(x_56, output_int8_max_56); + tflite::micro::GetTensorData(output_tensor)[i] = + static_cast(AE_TRUNCA32Q48(x_56)); + } + } + return kTfLiteOk; +} +} // namespace tflite +#endif // defined(HIFIMINI) diff --git a/tensorflow/lite/micro/kernels/xtensa/leaky_relu.cc b/tensorflow/lite/micro/kernels/xtensa/leaky_relu.cc new file mode 100644 index 0000000..857a488 --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/leaky_relu.cc @@ -0,0 +1,109 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/reference/leaky_relu.h" + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/process_broadcast_shapes.h" +#include "tensorflow/lite/kernels/internal/types.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/leaky_relu.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { + +template +void QuantizeLeakyRelu(const LeakyReluOpData& data, + const TfLiteEvalTensor* input, + TfLiteEvalTensor* output) { + LeakyReluParams op_params = {}; + + op_params.input_offset = data.input_zero_point; + op_params.output_offset = data.output_zero_point; + op_params.output_multiplier_alpha = data.output_multiplier_alpha; + op_params.output_shift_alpha = data.output_shift_alpha; + op_params.output_multiplier_identity = data.output_multiplier_identity; + op_params.output_shift_identity = data.output_shift_identity; + reference_ops::QuantizeLeakyRelu(op_params, + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); +} + +void* LeakyReluInit(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(LeakyReluOpData)); +} + +TfLiteStatus LeakyReluEval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + const LeakyReluOpData& data = *static_cast(node->user_data); + + switch (input->type) { + case kTfLiteFloat32: { + LeakyReluParams op_params = {}; + const auto* params = + static_cast(node->builtin_data); + + op_params.alpha = params->alpha; + reference_ops::LeakyRelu(op_params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + return kTfLiteOk; + } break; + case kTfLiteInt8: { + QuantizeLeakyRelu(data, input, output); + return kTfLiteOk; + } break; + case kTfLiteInt16: { +#if defined(HIFI4) + const RuntimeShape& input_shape = tflite::micro::GetTensorShape(input); + const RuntimeShape& output_shape = tflite::micro::GetTensorShape(output); + const int flat_size = MatchingFlatSize(input_shape, output_shape); + int32_t err = xa_nn_vec_leaky_relu_asym16s_asym16s( + tflite::micro::GetTensorData(output), + tflite::micro::GetTensorData(input), data.input_zero_point, + data.output_multiplier_alpha, data.output_shift_alpha, + data.output_multiplier_identity, data.output_shift_identity, + data.output_zero_point, flat_size); + if (err != 0) return kTfLiteError; +#else + QuantizeLeakyRelu(data, input, output); +#endif // defined(HIFI4) + return kTfLiteOk; + } break; + default: + MicroPrintf("Only float32, int8 are supported by LEAKY_RELU, got %s.", + TfLiteTypeGetName(input->type)); + return kTfLiteError; + } + + return kTfLiteError; +} + +TFLMRegistration Register_LEAKY_RELU() { + return tflite::micro::RegisterOp(LeakyReluInit, LeakyReluPrepare, + LeakyReluEval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/xtensa/logistic.cc b/tensorflow/lite/micro/kernels/xtensa/logistic.cc new file mode 100644 index 0000000..41e6f3d --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/logistic.cc @@ -0,0 +1,134 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/reference/integer_ops/logistic.h" + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/logistic.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/logistic.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { +namespace { + +void* LogisticInit(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(OpDataLogistic)); +} + +TfLiteStatus LogisticEval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kLogisticInputTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kLogisticOutputTensor); + + TFLITE_DCHECK(node->user_data != nullptr); + OpDataLogistic* data = static_cast(node->user_data); + + if (input->type != output->type) { + MicroPrintf( + "Input and output types must be identical. Input %s, output %s.", + TfLiteTypeGetName(input->type), TfLiteTypeGetName(output->type)); + return kTfLiteError; + } + + switch (input->type) { + case kTfLiteFloat32: { +#if HIFI_VFPU && (defined(HIFI4) || defined(HIFI5)) + const RuntimeShape& input_shape = tflite::micro::GetTensorShape(input); + const RuntimeShape& output_shape = tflite::micro::GetTensorShape(output); + const int flat_size = MatchingFlatSize(input_shape, output_shape); + + const float* inp_data_ptr = tflite::micro::GetTensorData(input); + float* out_data_ptr = tflite::micro::GetTensorData(output); + + TF_LITE_ENSURE_EQ( + context, + xa_nn_vec_sigmoid_f32_f32(out_data_ptr, inp_data_ptr, flat_size), 0); +#else + reference_ops::Logistic(tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); +#endif // HIFI_VFPU && (defined(HIFI4) || defined(HIFI5)) + break; + } + case kTfLiteInt8: { +#if defined(HIFI4) || defined(HIFI5) + const RuntimeShape& input_shape = tflite::micro::GetTensorShape(input); + const RuntimeShape& output_shape = tflite::micro::GetTensorShape(output); + const int flat_size = MatchingFlatSize(input_shape, output_shape); + + const int8_t* input_data_ptr = + tflite::micro::GetTensorData(input); + int8_t* output_data_ptr = tflite::micro::GetTensorData(output); + + TF_LITE_ENSURE_EQ( + context, + xa_nn_vec_sigmoid_asym8s_asym8s( + output_data_ptr, input_data_ptr, data->input_zero_point, + data->input_range_radius, data->input_multiplier, + data->input_left_shift, flat_size), + 0); +#else + reference_integer_ops::Logistic( + data->input_zero_point, data->input_range_radius, + data->input_multiplier, data->input_left_shift, + NumElements(input->dims), tflite::micro::GetTensorData(input), + tflite::micro::GetTensorData(output)); +#endif // defined(HIFI4) || defined(HIFI5) + break; + } + case kTfLiteInt16: { + switch (output->type) { + case kTfLiteInt16: + reference_integer_ops::Logistic( + data->input_multiplier, data->input_left_shift, + NumElements(input->dims), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorData(output)); + break; + default: + MicroPrintf("Input %s, output %s not supported.", + TfLiteTypeGetName(input->type), + TfLiteTypeGetName(output->type)); + return kTfLiteError; + } + break; + } + default: { + MicroPrintf("Input %s, output %s not supported.", + TfLiteTypeGetName(input->type), + TfLiteTypeGetName(output->type)); + return kTfLiteError; + } + } + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_LOGISTIC() { + return tflite::micro::RegisterOp(LogisticInit, LogisticPrepare, LogisticEval); +} +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/xtensa/lstm_eval.cc b/tensorflow/lite/micro/kernels/xtensa/lstm_eval.cc new file mode 100644 index 0000000..9065388 --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/lstm_eval.cc @@ -0,0 +1,1217 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/micro/kernels/xtensa/lstm_eval.h" + +#include +#include + +#include +#include +#include +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/kernels/internal/portable_tensor_utils.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa.h" + +namespace tflite { +namespace ops { +namespace micro { +namespace lstm_eval { +namespace { + +// Calculates a single LSTM gate, int8x8_16 version. +// Implements the same functionality as CalculateLstmGateFloat. +void CalculateLstmGateInteger8x8_16( + // Input and weights + const int8_t* input, const int8_t* input_to_gate_weights, + const int32_t* input_to_gate_bias, const int32_t input_to_gate_scale_a, + const int32_t input_to_gate_scale_b, + // Output state and weights + const int8_t* output_state, const int8_t* recurrent_to_gate_weights, + const int32_t* recurrent_to_gate_bias, + const int32_t recurrent_to_gate_scale_a, + const int32_t recurrent_to_gate_scale_b, + // Cell state and weights + const int16_t* cell_state, const int16_t* cell_to_gate_weights, + const int32_t cell_to_gate_scale_a, const int32_t cell_to_gate_scale_b, + // Layer normalization parameters (layer norm LSTM) + const int16_t* layer_norm_coefficients, const int32_t* layer_norm_bias, + const int32_t layer_norm_input_scale_a, + const int32_t layer_norm_input_scale_b, + const int32_t layer_norm_variance_guard, + // Array sizes + const int n_batch, const int n_input, const int n_output, const int n_cell, + const TfLiteFusedActivation activation, + // Output + int16_t* gate, + // Parameters for performance optimizations + // CpuBackendContext* context, + // Scratch arrays + int32_t* scratch5) { + const bool use_peephole = (cell_to_gate_weights != nullptr); + const bool use_layer_norm = (layer_norm_coefficients != nullptr); + + // Initialize scratch buffers with zeros. Note that unlike float and hybrid + // versions, bias is only used in layer normalization. + std::fill_n(gate, n_batch * n_cell, 0); +#if !defined(HIFI5) + // For each batch and cell: compute input_weight * input. + tensor_utils::PortableMatrixBatchVectorMultiplyAccumulate( + input, input_to_gate_bias, input_to_gate_weights, input_to_gate_scale_a, + input_to_gate_scale_b, n_batch, n_input, n_cell, 0, scratch5, gate, NULL); +#else + { + xa_nn_matXvec_acc_batch_sym8sx8_asym16s( + gate, input_to_gate_weights, input, input_to_gate_bias, n_cell, n_input, + n_input, input_to_gate_scale_a, input_to_gate_scale_b, 0, n_batch); + } +#endif // !defined(HIFI5) +// Note: no aux_input. + +// For each batch and cell: compute recurrent_weight * output_state. +#if !defined(HIFI5) + tensor_utils::PortableMatrixBatchVectorMultiplyAccumulate( + output_state, recurrent_to_gate_bias, recurrent_to_gate_weights, + recurrent_to_gate_scale_a, recurrent_to_gate_scale_b, n_batch, n_output, + n_cell, 0, scratch5, gate, NULL); +#else + { + xa_nn_matXvec_acc_batch_sym8sx8_asym16s( + gate, recurrent_to_gate_weights, output_state, recurrent_to_gate_bias, + n_cell, n_output, n_output, recurrent_to_gate_scale_a, + recurrent_to_gate_scale_b, 0, n_batch); + } +#endif // !defined(HIFI5) + // For each batch and cell: compute cell_weight * cell_state (peephole LSTM) + if (use_peephole) { + tensor_utils::PortableVectorBatchVectorCwiseProductAccumulate( + cell_to_gate_weights, n_output, cell_state, n_batch, + cell_to_gate_scale_a, cell_to_gate_scale_b, gate); + } + // Do layer normalization (if layer norm LSTM) + if (use_layer_norm) { + tensor_utils::PortableApplyLayerNorm( + gate, layer_norm_coefficients, layer_norm_bias, + layer_norm_input_scale_a, layer_norm_input_scale_b, + layer_norm_variance_guard, n_batch, n_cell, gate); + } + // Apply activation + switch (activation) { + case kTfLiteActSigmoid: +#if !defined(HIFI5) + tensor_utils::PortableApplySigmoid(gate, n_batch, n_cell, gate); +#else + xa_nn_vec_sigmoid_16_16(gate, gate, n_batch * n_cell); +#endif // !defined(HIFI5) + break; + case kTfLiteActTanh: +#if !defined(HIFI5) + tensor_utils::PortableApplyTanh(3, gate, n_batch, n_cell, gate); +#else + xa_nn_vec_tanh_16_16(gate, gate, 3, n_batch * n_cell); +#endif // !defined(HIFI5) + break; + default: + // Only Sigmoid or Tanh is used. + TFLITE_ASSERT_FALSE; + } +} + +// Updates the LSTM cell state, used by both integer LSTM versions. +// Also see UpdateLstmCellFloat. +// +// Parameters: +// - n_batch, n_cell: sizes of vectors +// - cell_state: input/output vector, size n_batch*n_cell +// - cell_state_scale: scaling factor of cell state. +// - input_gate: input vector, size n_batch*n_cell. +// - forget_gate: input/scratch vector, size n_batch*n_cell, always modified. +// - cell_gate: input vector, size n_batch*n_cell. +// - use_cifg: use 1-forget_gate instead of input_gate. +// - clip: if > 0, clip the resulting cell state to [-clip, +clip]. +void UpdateLstmCellInteger(int n_batch, int n_cell, int16_t* cell_state, + int32_t cell_state_scale, const int16_t* input_gate, + int16_t* forget_gate, const int16_t* cell_gate, + bool use_cifg, int16_t clip) { +#if !defined(HIFI5) + // Use the forget_gate array as scratch, as input_gate array is not allocated + // in CIFG case. (Be careful not to write to the scratch before reading the + // forget gate data.) + int16_t* scratch = forget_gate; + + tensor_utils::PortableCwiseMul(forget_gate, cell_state, n_batch, n_cell, 15, + cell_state); + if (use_cifg) { + tensor_utils::PortableSub1Vector(forget_gate, n_batch * n_cell, scratch); + tensor_utils::PortableCwiseMul(scratch, cell_gate, n_batch, n_cell, + 30 + cell_state_scale, scratch); + } else { + tensor_utils::PortableCwiseMul(input_gate, cell_gate, n_batch, n_cell, + 30 + cell_state_scale, scratch); + } + tensor_utils::PortableCwiseAdd(cell_state, scratch, n_batch, n_cell, + cell_state); + + if (clip > 0) { + tensor_utils::PortableCwiseClipping(cell_state, n_batch * n_cell, clip); + } +#else + if (use_cifg) { + calc_cell_state_with_cifg(cell_state, forget_gate, cell_gate, 15, + 30 + cell_state_scale, clip, n_batch * n_cell); + } else { + calc_cell_state_without_cifg(cell_state, forget_gate, cell_gate, input_gate, + 15, 30 + cell_state_scale, clip, + n_batch * n_cell); + } + +#endif // !defined(HIFI5) +} + +// Calculates the output state tensor of an LSTM step. See Float and hybrid +// versions as well. +// +// Parameters: +// - n_batch: batches: the number of distinct vectors in each array. +// - n_cell, n_output: sizes of vectors. +// - cell_state, output_gate: input vectors, size n_batch*n_cell. +// - cell_state_scale: scaling of cell_state. +// - hidden_scale_[a|b]: effective scale of cell_state.*output_gate +// - hidden_zp: zero_point for cell_state.*output_gate +// - projection_weights, proj_scale_[a|b], projection_bias: +// constant inputs, describing projection matrix and bias. +// - output_state_zp: zero point of output_state. (Input, calibrated value.) +// - quantized_proj_clip: if > 0, clip the output of the projection. +// - output_state: output vector, size n_batch*n_output. Must be contiguous. +// - context: data for optimized MatrixBatchVectorMultiplyAccumulate. +// - scratch0: scratch area of size n_batch*n_cell +// - scratch1: scratch area of size n_batch*n_cell +// - scratch2: scratch area used by MatrixBatchVectorMultiplyAccumulate +void CalculateLstmOutputInteger8x8_16( + int n_batch, int n_cell, int n_output, const int16_t* cell_state, + int32_t cell_state_scale, const int16_t* output_gate, + int32_t hidden_scale_a, int32_t hidden_scale_b, int32_t hidden_zp, + const int8_t* projection_weights, int32_t proj_scale_a, + int32_t proj_scale_b, const int32_t* projection_bias, + int32_t output_state_zp, int8_t quantized_proj_clip, int8_t* output_state, + int16_t* scratch0, int8_t* scratch1, int32_t* scratch2) { +// Note: unlike float/hybrid, the activation is always Tanh. +#if !defined(HIFI5) + tensor_utils::PortableApplyTanh(15 + cell_state_scale, cell_state, n_batch, + n_cell, scratch0); +#else + xa_nn_vec_tanh_16_16(scratch0, cell_state, (15 + cell_state_scale), + n_batch * n_cell); +#endif // !defined(HIFI5) + +#if !defined(HIFI5) + tensor_utils::PortableCwiseMul(output_gate, scratch0, hidden_scale_a, + hidden_scale_b, n_batch, n_cell, hidden_zp, + scratch1); +#else + xa_nn_elm_mul_16x16_asym8s(scratch1, output_gate, scratch0, hidden_scale_a, + hidden_scale_b, hidden_zp, n_batch * n_cell); +#endif // !defined(HIFI5) + + const bool use_projection = (projection_weights != nullptr); + + if (use_projection) { + // Note: no bias like in float/hybrid + std::fill_n(output_state, n_batch * n_output, 0); + tensor_utils::PortableMatrixBatchVectorMultiplyAccumulate( + scratch1, projection_bias, projection_weights, proj_scale_a, + proj_scale_b, n_batch, n_cell, n_output, output_state_zp, scratch2, + output_state, NULL); + if (quantized_proj_clip > 0) { + tensor_utils::PortableCwiseClipping(output_state, n_batch * n_output, + quantized_proj_clip); + } + } else { + std::copy_n(scratch1, n_batch * n_output, output_state); + } +} + +// Calculates a single LSTM gate, int8x8_8 version. +// Implements the same functionality as CalculateLstmGateFloat. +void CalculateLstmGateInteger8x8_8( + // Inputs and weights + const int8_t* input, int32_t input_zp, const int8_t* input_to_gate_weight, + const int32_t input_to_gate_scale_a, const int32_t input_to_gate_scale_b, + const int32_t input_times_weights_scale_a, + const int32_t input_times_weights_scale_b, + const int32_t input_times_weights_zp, + // Output state and weights + const int8_t* output_state, const int32_t output_state_zp, + const int8_t* recurrent_to_gate_weight, + const int32_t recurrent_to_gate_scale_a, + const int32_t recurrent_to_gate_scale_b, + const int32_t output_state_times_weights_scale_a, + const int32_t output_state_times_weights_scale_b, + const int32_t output_state_times_weights_zp, + // Layer normalization parameters (layer norm LSTM) + const int16_t* layer_norm_gate_weight, + const int32_t layer_norm_gate_scale_a, + const int32_t layer_norm_gate_scale_b, const int32_t* gate_bias, + // Array sizes + const int n_batch, const int n_input, const int n_output, const int n_cell, + const TfLiteFusedActivation activation, + // Output + int16_t* gate, + // Scratch arrays, both sized n_batch*n_cell + int8_t* scratch0, int8_t* scratch1) { + // Multiply input * input_weights => scratch0 + tensor_utils::PortableMatrixBatchVectorMultiply( + input, input_zp, input_to_gate_weight, input_to_gate_scale_a, + input_to_gate_scale_b, n_batch, n_input, n_cell, scratch0, + input_times_weights_zp); + // Multiply output_state * recurrent_weights => scratch1 + tensor_utils::PortableMatrixBatchVectorMultiply( + output_state, output_state_zp, recurrent_to_gate_weight, + recurrent_to_gate_scale_a, recurrent_to_gate_scale_b, n_batch, n_output, + n_cell, scratch1, output_state_times_weights_zp); + // Add scratch0 + scratch1 => gate + tensor_utils::PortableTwoGateSaturatingAdd( + scratch0, input_times_weights_zp, scratch1, output_state_times_weights_zp, + input_times_weights_scale_a, input_times_weights_scale_b, + output_state_times_weights_scale_a, output_state_times_weights_scale_b, + n_batch, n_cell, gate); + // Apply layer normalization. + tensor_utils::PortableApplyLayerNormFloat( + gate, layer_norm_gate_weight, layer_norm_gate_scale_a, + layer_norm_gate_scale_b, gate_bias, n_batch, n_cell, gate); + // Apply activation. + switch (activation) { + case kTfLiteActSigmoid: + tensor_utils::PortableApplySigmoidFloat(gate, n_batch, n_cell, gate); + break; + case kTfLiteActTanh: + tensor_utils::PortableApplyTanhFloat(gate, n_batch, n_cell, -12, gate); + break; + default: + // Only Sigmoid or Tanh is used. + TFLITE_ASSERT_FALSE; + } +} + +// Calculates the output state tensor of an LSTM step. See Float and hybrid +// versions as well. +// +// Parameters: +// - n_batch: batches: the number of distinct vectors in each array. +// - n_cell, n_output: sizes of vectors. +// - cell_state, output_gate: input vectors, size n_batch*n_cell. +// - projection_weights, proj_scale_[a|b], projection_bias: +// constant inputs, describing projection matrix and bias. +// - output_state_zp: zero point of the output state. +// - quantized_proj_clip: if > 0, clip the output of the projection. +// - output_state: output vector, size n_batch*n_output. Must be contiguous. +// - scratch: scratch area of size n_batch*n_cell +void CalculateLstmOutputInteger8x8_8( + int n_batch, int n_cell, int n_output, const int16_t* cell_state, + const int16_t* output_gate, const int8_t* projection_weights, + int32_t proj_scale_a, int32_t proj_scale_b, const int32_t* projection_bias, + int32_t output_state_zp, int32_t quantized_proj_clip, int8_t* output_state, + int16_t* scratch) { + // Note: unlike float/hybrid, the activation is always Tanh. + tensor_utils::PortableApplyTanhFloat(cell_state, n_batch, n_cell, -15, + scratch); + tensor_utils::PortableCwiseMul(output_gate, scratch, n_batch, n_cell, + 15 + 15 - 15, scratch); + // Note: no bias like in float/hybrid + tensor_utils::PortableMatrixBatchVectorMultiply( + scratch, projection_weights, proj_scale_a, proj_scale_b, projection_bias, + n_batch, n_cell, n_output, output_state_zp, output_state); + if (quantized_proj_clip > 0) { + tensor_utils::PortableCwiseClipping(output_state, n_batch * n_output, + (int8_t)quantized_proj_clip); + } +} + +// Fully quantized lstm kernel for 16 bit gate matmul output. +// +// Input tensor of size n_batch * n_input: +// input_ptr +// +// LSTM weights: +// Quantized input weights of size 'n_cell * n_input': +// input_to_input_weight_ptr - optional +// input_to_forget_weight_ptr - optional +// input_to_cell_weight_ptr - optional +// input_to_output_weight_ptr - optional +// +// Quantized recurrent weights of size 'n_cell * n_output': +// recurrent_to_input_weight_ptr - optional +// recurrent_to_forget_weights_ptr +// recurrent_to_cell_weights_ptr +// recurrent_to_input_weights_ptr +// +// Quantized peephole weights of size 'n_cell', representing diagonal matrices. +// cell_to_input_weights - optional +// cell_to_cell_weights - optional +// cell_to_output_weights - optional +// +// Quantized projection weights of size 'n_output * n_cell' +// projection_weight_ptr - optional +// +// Weight scales (scalars) for each of the weights above. +// effective_input_to_input_scale_a - optional +// effective_input_to_input_scale_b - optional +// effective_input_to_forget_scale_a +// effective_input_to_forget_scale_b +// effective_input_to_cell_scale_a +// effective_input_to_cell_scale_b +// effective_input_to_output_scale_a +// effective_input_to_output_scale_b +// effective_recurrent_to_input_scale_a - optional +// effective_recurrent_to_input_scale_b - optional +// effective_recurrent_to_forget_scale_a +// effective_recurrent_to_forget_scale_b +// effective_recurrent_to_cell_scale_a +// effective_recurrent_to_cell_scale_b +// effective_recurrent_to_output_scale_a +// effective_recurrent_to_output_scale_b +// effective_proj_scale_a - optional +// effective_proj_scale_b - optional +// +// Gate biases of size 'n_cell': +// input_gate_bias_ptr - optional +// forget_gate_bias_ptr +// cell_gate_bias_ptr +// output_gate_bias_ptr +// +// Layer norm coefficients of size 'n_cell', representing diagonal matrices. +// layer_norm_input_weight_ptr - optional +// layer_norm_forget_weight_ptr - optional +// layer_norm_cell_weight_ptr - optional +// layer_norm_output_weight_ptr - optional +// +// Layer norm scales of size 'n_cell'. +// layer_norm_input_scale_a - optional +// layer_norm_input_scale_b - optional +// layer_norm_forget_scale_a - optional +// layer_norm_forget_scale_b - optional +// layer_norm_cell_scale_a - optional +// layer_norm_cell_scale_b - optional +// layer_norm_output_scale_a - optional +// layer_norm_output_scale_b - optional +// +// Scalar values: +// quantized_cell_clip: quantized clip value for cell. +// quantized_proj_clip: quantized clip value for projection. +// cell_state_scale: the power of two scale for cell state. +// +// Zero points: +// output_state_zp: zero point of output state +// hidden_zp: zero point for hidden state. +// +// Temporary pre-allocated storage for the calculation. Each is of size n_cell * +// n_batch. +// scratch0 +// scratch1 +// scratch2 +// scratch3 +// scratch4 +// scratch5: this scratch buffer is created purely for optimizing the +// MatrixBatchVectorMultiplyAccumulate. +// +// Outputs: +// output_state_ptr - size 'n_batch * n_output' +// cell_state_ptr - size 'n_batch * n_cell' +// output_ptr - size 'n_batch * n_output' +// TODO(b/159947023): scratch0 is not used if (!cifg). Don't allocate then. +inline void LstmStepInteger8x8_16( + const int8_t* input_ptr, const int8_t* input_to_input_weight_ptr, + int32_t effective_input_to_input_scale_a, + int32_t effective_input_to_input_scale_b, + const int8_t* input_to_forget_weight_ptr, + int32_t effective_input_to_forget_scale_a, + int32_t effective_input_to_forget_scale_b, + const int8_t* input_to_cell_weight_ptr, + int32_t effective_input_to_cell_scale_a, + int32_t effective_input_to_cell_scale_b, + const int8_t* input_to_output_weight_ptr, + int32_t effective_input_to_output_scale_a, + int32_t effective_input_to_output_scale_b, + const int8_t* recurrent_to_input_weight_ptr, + int32_t effective_recurrent_to_input_scale_a, + int32_t effective_recurrent_to_input_scale_b, + const int8_t* recurrent_to_forget_weight_ptr, + int32_t effective_recurrent_to_forget_scale_a, + int32_t effective_recurrent_to_forget_scale_b, + const int8_t* recurrent_to_cell_weight_ptr, + int32_t effective_recurrent_to_cell_scale_a, + int32_t effective_recurrent_to_cell_scale_b, + const int8_t* recurrent_to_output_weight_ptr, + int32_t effective_recurrent_to_output_scale_a, + int32_t effective_recurrent_to_output_scale_b, + const int16_t* cell_to_input_weight_ptr, + int32_t effective_cell_to_input_scale_a, + int32_t effective_cell_to_input_scale_b, + const int16_t* cell_to_forget_weight_ptr, + int32_t effective_cell_to_forget_scale_a, + int32_t effective_cell_to_forget_scale_b, + const int16_t* cell_to_output_weight_ptr, + int32_t effective_cell_to_output_scale_a, + int32_t effective_cell_to_output_scale_b, + const int8_t* projection_weight_ptr, int32_t effective_proj_scale_a, + int32_t effective_proj_scale_b, int32_t hidden_zp, + int32_t effective_hidden_scale_a, int32_t effective_hidden_scale_b, + const int16_t* layer_norm_input_weight_ptr, + int32_t layer_norm_input_scale_a, int32_t layer_norm_input_scale_b, + const int16_t* layer_norm_forget_weight_ptr, + int32_t layer_norm_forget_scale_a, int32_t layer_norm_forget_scale_b, + const int16_t* layer_norm_cell_weight_ptr, int32_t layer_norm_cell_scale_a, + int32_t layer_norm_cell_scale_b, + const int16_t* layer_norm_output_weight_ptr, + int32_t layer_norm_output_scale_a, int32_t layer_norm_output_scale_b, + const int32_t* input_gate_bias_ptr, const int32_t* forget_gate_bias_ptr, + const int32_t* cell_gate_bias_ptr, const int32_t* output_gate_bias_ptr, + int16_t quantized_cell_clip, int8_t quantized_proj_clip, + int32_t cell_state_scale, int32_t input_variance_guard, + int32_t forget_variance_guard, int32_t cell_variance_guard, + int32_t output_variance_guard, + const int32_t* input_to_forget_effective_bias, + const int32_t* recurrent_to_forget_effective_bias, + const int32_t* input_to_cell_effective_bias, + const int32_t* recurrent_to_cell_effective_bias, + const int32_t* input_to_output_effective_bias, + const int32_t* recurrent_to_output_effective_bias, + const int32_t* input_to_input_effective_bias, + const int32_t* recurrent_to_input_effective_bias, + const int32_t* projection_effective_bias, int n_batch, int n_cell, + int n_input, int n_output, int8_t* output_state_ptr, + int32_t output_state_zp, int16_t* cell_state_ptr, int8_t* output_ptr, + int16_t* scratch0, int16_t* scratch1, int16_t* scratch2, int16_t* scratch3, + int8_t* scratch4, int32_t* scratch5) { + // ruy::profiler::ScopeLabel label("LstmStepInteger8x8_16"); + // Make named scratch buffers for the different gates. + int16_t* input_gate_scratch = scratch0; + int16_t* forget_gate_scratch = scratch1; + int16_t* cell_gate_scratch = scratch2; + int16_t* output_gate_scratch = scratch3; + + // Since we have already checked that weights are all there or none, we + // can check the existence of only one to the get the condition. + const bool use_cifg = (input_to_input_weight_ptr == nullptr); + + // Check for nullptrs. + TFLITE_DCHECK(input_to_forget_effective_bias); + TFLITE_DCHECK(recurrent_to_forget_effective_bias); + TFLITE_DCHECK(input_to_cell_effective_bias); + TFLITE_DCHECK(recurrent_to_cell_effective_bias); + TFLITE_DCHECK(input_to_output_effective_bias); + TFLITE_DCHECK(recurrent_to_output_effective_bias); + if (!use_cifg) { + TFLITE_DCHECK(input_to_input_effective_bias); + TFLITE_DCHECK(recurrent_to_input_effective_bias); + } + const bool use_projection = (projection_weight_ptr != nullptr); + if (use_projection) { + TFLITE_DCHECK(projection_effective_bias); + } + if (!use_cifg) { + // Calculate the input gate. (If not CIFG.) + CalculateLstmGateInteger8x8_16( + input_ptr, input_to_input_weight_ptr, input_to_input_effective_bias, + effective_input_to_input_scale_a, effective_input_to_input_scale_b, + output_state_ptr, recurrent_to_input_weight_ptr, + recurrent_to_input_effective_bias, effective_recurrent_to_input_scale_a, + effective_recurrent_to_input_scale_b, cell_state_ptr, + cell_to_input_weight_ptr, effective_cell_to_input_scale_a, + effective_cell_to_input_scale_b, layer_norm_input_weight_ptr, + input_gate_bias_ptr, layer_norm_input_scale_a, layer_norm_input_scale_b, + input_variance_guard, n_batch, n_input, n_output, n_cell, + kTfLiteActSigmoid, input_gate_scratch, scratch5); + } + // Calculate the forget gate. + CalculateLstmGateInteger8x8_16( + input_ptr, input_to_forget_weight_ptr, input_to_forget_effective_bias, + effective_input_to_forget_scale_a, effective_input_to_forget_scale_b, + output_state_ptr, recurrent_to_forget_weight_ptr, + recurrent_to_forget_effective_bias, effective_recurrent_to_forget_scale_a, + effective_recurrent_to_forget_scale_b, cell_state_ptr, + cell_to_forget_weight_ptr, effective_cell_to_forget_scale_a, + effective_cell_to_forget_scale_b, layer_norm_forget_weight_ptr, + forget_gate_bias_ptr, layer_norm_forget_scale_a, + layer_norm_forget_scale_b, forget_variance_guard, n_batch, n_input, + n_output, n_cell, kTfLiteActSigmoid, forget_gate_scratch, scratch5); + // Calculate the cell update gate. + CalculateLstmGateInteger8x8_16( + input_ptr, input_to_cell_weight_ptr, input_to_cell_effective_bias, + effective_input_to_cell_scale_a, effective_input_to_cell_scale_b, + output_state_ptr, recurrent_to_cell_weight_ptr, + recurrent_to_cell_effective_bias, effective_recurrent_to_cell_scale_a, + effective_recurrent_to_cell_scale_b, cell_state_ptr, + /*cell_to_gate_weights=*/nullptr, /*cell_to_gate_scale_a=*/0, + /*cell_to_gate_scale_b=*/0, layer_norm_cell_weight_ptr, + cell_gate_bias_ptr, layer_norm_cell_scale_a, layer_norm_cell_scale_b, + cell_variance_guard, n_batch, n_input, n_output, n_cell, kTfLiteActTanh, + cell_gate_scratch, scratch5); + // Update the cell state. + UpdateLstmCellInteger(n_batch, n_cell, cell_state_ptr, cell_state_scale, + input_gate_scratch, forget_gate_scratch, + cell_gate_scratch, use_cifg, quantized_cell_clip); + // Calculate the output gate. + CalculateLstmGateInteger8x8_16( + input_ptr, input_to_output_weight_ptr, input_to_output_effective_bias, + effective_input_to_output_scale_a, effective_input_to_output_scale_b, + output_state_ptr, recurrent_to_output_weight_ptr, + recurrent_to_output_effective_bias, effective_recurrent_to_output_scale_a, + effective_recurrent_to_output_scale_b, cell_state_ptr, + cell_to_output_weight_ptr, effective_cell_to_output_scale_a, + effective_cell_to_output_scale_b, layer_norm_output_weight_ptr, + output_gate_bias_ptr, layer_norm_output_scale_a, + layer_norm_output_scale_b, output_variance_guard, n_batch, n_input, + n_output, n_cell, kTfLiteActSigmoid, output_gate_scratch, scratch5); + // Update the output state. + CalculateLstmOutputInteger8x8_16( + n_batch, n_cell, n_output, cell_state_ptr, cell_state_scale, + output_gate_scratch, effective_hidden_scale_a, effective_hidden_scale_b, + hidden_zp, projection_weight_ptr, effective_proj_scale_a, + effective_proj_scale_b, projection_effective_bias, output_state_zp, + quantized_proj_clip, output_state_ptr, scratch0, scratch4, scratch5); + // Copy output state to the output. Note that unlike float or hybrid, output + // is always contiguous. + std::copy_n(output_state_ptr, n_batch * n_output, output_ptr); +} + +// Fully quantized lstm kernel for 8 bit gate matmul output. +// +// Input tensor of size n_batch * n_input: +// input_ptr +// +// LSTM weights: +// Quantized input weights of size 'n_cell * n_input': +// input_to_input_weight_ptr - optional +// input_to_forget_weight_ptr - optional +// input_to_cell_weight_ptr - optional +// input_to_output_weight_ptr - optional +// +// Quantized recurrent weights of size 'n_cell * n_output': +// recurrent_to_input_weight_ptr - optional +// recurrent_to_forget_weights_ptr +// recurrent_to_cell_weights_ptr +// recurrent_to_input_weights_ptr +// +// Quantized peephole weights of size 'n_cell', representing diagonal matrices. +// cell_to_input_weights - optional +// cell_to_cell_weights - optional +// cell_to_output_weights - optional +// +// Quantized projection weights of size 'n_output * n_cell' +// projection_weight_ptr - optional +// +// Weight scales (scalars) for each of the weights above. +// effective_input_to_input_scale_a - optional +// effective_input_to_input_scale_b - optional +// effective_input_to_forget_scale_a +// effective_input_to_forget_scale_b +// effective_input_to_cell_scale_a +// effective_input_to_cell_scale_b +// effective_input_to_output_scale_a +// effective_input_to_output_scale_b +// effective_recurrent_to_input_scale_a - optional +// effective_recurrent_to_input_scale_b - optional +// effective_recurrent_to_forget_scale_a +// effective_recurrent_to_forget_scale_b +// effective_recurrent_to_cell_scale_a +// effective_recurrent_to_cell_scale_b +// effective_recurrent_to_output_scale_a +// effective_recurrent_to_output_scale_b +// effective_proj_scale_a - optional +// effective_proj_scale_b - optional +// +// Gate biases of size 'n_cell': +// input_gate_bias_ptr - optional +// forget_gate_bias_ptr +// cell_gate_bias_ptr +// output_gate_bias_ptr +// +// Layer norm coefficients of size 'n_cell', representing diagonal matrices. +// layer_norm_input_weight_ptr - optional +// layer_norm_forget_weight_ptr - optional +// layer_norm_cell_weight_ptr - optional +// layer_norm_output_weight_ptr - optional +// +// Layer norm scales of size 'n_cell'. +// layer_norm_input_scale_a - optional +// layer_norm_input_scale_b - optional +// layer_norm_forget_scale_a - optional +// layer_norm_forget_scale_b - optional +// layer_norm_cell_scale_a - optional +// layer_norm_cell_scale_b - optional +// layer_norm_output_scale_a - optional +// layer_norm_output_scale_b - optional +// +// Scalar values: +// quantized_cell_clip: quantized clip value for cell. +// quantized_proj_clip: quantized clip value for projection. +// cell_state_scale: the power of two scale for cell state. +// +// Zero points: +// output_state_zp: zero point of output state. +// hidden_zp: zero point for hidden state. +// +// Temporary pre-allocated storage for the calculation. Each is of size n_cell * +// n_batch. +// scratch0 +// scratch1 +// scratch2 +// scratch3 +// scratch4 +// scratch5 +// scratch6 +// scratch7 +// +// Outputs: +// output_state_ptr - size 'n_batch * n_output' +// cell_state_ptr - size 'n_batch * n_cell' +// output_ptr - size 'n_batch * n_output' +// TODO(b/148688698): Move zero point calculation into Prepare(). +// TODO(b/159947023): scratch5 is unused, remove. +inline void LstmStepInteger8x8_8( + const int8_t* input_ptr, int32_t input_zp, + const int8_t* input_to_input_weight_ptr, + int32_t effective_input_to_input_scale_a, + int32_t effective_input_to_input_scale_b, + const int8_t* input_to_forget_weight_ptr, + int32_t effective_input_to_forget_scale_a, + int32_t effective_input_to_forget_scale_b, + const int8_t* input_to_cell_weight_ptr, + int32_t effective_input_to_cell_scale_a, + int32_t effective_input_to_cell_scale_b, + const int8_t* input_to_output_weight_ptr, + int32_t effective_input_to_output_scale_a, + int32_t effective_input_to_output_scale_b, + const int8_t* recurrent_to_input_weight_ptr, + int32_t effective_recurrent_to_input_scale_a, + int32_t effective_recurrent_to_input_scale_b, + const int8_t* recurrent_to_forget_weight_ptr, + int32_t effective_recurrent_to_forget_scale_a, + int32_t effective_recurrent_to_forget_scale_b, + const int8_t* recurrent_to_cell_weight_ptr, + int32_t effective_recurrent_to_cell_scale_a, + int32_t effective_recurrent_to_cell_scale_b, + const int8_t* recurrent_to_output_weight_ptr, + int32_t effective_recurrent_to_output_scale_a, + int32_t effective_recurrent_to_output_scale_b, + const int8_t* cell_to_input_weight_ptr, + int32_t effective_cell_to_input_scale_a, + int32_t effective_cell_to_input_scale_b, + const int8_t* cell_to_forget_weight_ptr, + int32_t effective_cell_to_forget_scale_a, + int32_t effective_cell_to_forget_scale_b, + const int8_t* cell_to_output_weight_ptr, + int32_t effective_cell_to_output_scale_a, + int32_t effective_cell_to_output_scale_b, + const int8_t* projection_weight_ptr, int32_t effective_proj_scale_a, + int32_t effective_proj_scale_b, const int16_t* layer_norm_input_weight_ptr, + int32_t layer_norm_input_scale_a, int32_t layer_norm_input_scale_b, + const int16_t* layer_norm_forget_weight_ptr, + int32_t layer_norm_forget_scale_a, int32_t layer_norm_forget_scale_b, + const int16_t* layer_norm_cell_weight_ptr, int32_t layer_norm_cell_scale_a, + int32_t layer_norm_cell_scale_b, + const int16_t* layer_norm_output_weight_ptr, + int32_t layer_norm_output_scale_a, int32_t layer_norm_output_scale_b, + const int32_t* input_gate_bias_ptr, const int32_t* forget_gate_bias_ptr, + const int32_t* cell_gate_bias_ptr, const int32_t* output_gate_bias_ptr, + const int32_t* projection_bias_ptr, const TfLiteLSTMParams* params, + const int32_t* intermediate_scale_a, const int32_t* intermediate_scale_b, + const int32_t* intermediate_zp, int16_t quantized_cell_clip, + int8_t quantized_proj_clip, int n_batch, int n_cell, int n_input, + int n_output, int output_batch_leading_dim, int8_t* output_state_ptr, + int32_t output_state_zp, int16_t* cell_state_ptr, int8_t* output_ptr, + int8_t* scratch0, int8_t* scratch1, int16_t* scratch2, int16_t* scratch3, + int16_t* scratch4, int16_t* scratch5, int16_t* scratch6, + int16_t* scratch7) { + // TODO(b/159066113): scratch5 is unused, remove. + + // ruy::profiler::ScopeLabel label("LstmStepInteger8x8_8"); + // Make named scratch buffers for the different gates. + int16_t* forget_gate_scratch = scratch2; + int16_t* cell_gate_scratch = scratch3; + int16_t* output_gate_scratch = scratch4; + // no-CIFG is not supported here + + // Calculate the forget gate. + CalculateLstmGateInteger8x8_8( + input_ptr, input_zp, input_to_forget_weight_ptr, + effective_input_to_forget_scale_a, effective_input_to_forget_scale_b, + intermediate_scale_a[2], intermediate_scale_b[2], intermediate_zp[4], + output_state_ptr, output_state_zp, recurrent_to_forget_weight_ptr, + effective_recurrent_to_forget_scale_a, + effective_recurrent_to_forget_scale_b, intermediate_scale_a[3], + intermediate_scale_b[3], intermediate_zp[5], layer_norm_forget_weight_ptr, + layer_norm_forget_scale_a, layer_norm_forget_scale_b, + forget_gate_bias_ptr, n_batch, n_input, n_output, n_cell, + kTfLiteActSigmoid, forget_gate_scratch, scratch0, scratch1); + // Calculate the cell update gate. + CalculateLstmGateInteger8x8_8( + input_ptr, input_zp, input_to_cell_weight_ptr, + effective_input_to_cell_scale_a, effective_input_to_cell_scale_b, + intermediate_scale_a[4], intermediate_scale_b[4], intermediate_zp[7], + output_state_ptr, output_state_zp, recurrent_to_cell_weight_ptr, + effective_recurrent_to_cell_scale_a, effective_recurrent_to_cell_scale_b, + intermediate_scale_a[5], intermediate_scale_b[5], intermediate_zp[8], + layer_norm_cell_weight_ptr, layer_norm_cell_scale_a, + layer_norm_cell_scale_b, cell_gate_bias_ptr, n_batch, n_input, n_output, + n_cell, kTfLiteActTanh, cell_gate_scratch, scratch0, scratch1); + // Update the cell state. + UpdateLstmCellInteger(n_batch, n_cell, cell_state_ptr, + /*cell_state_scale=*/-15, /*input_gate=*/nullptr, + forget_gate_scratch, cell_gate_scratch, + /*use_cifg=*/true, quantized_cell_clip); + // Calculate the output gate. + CalculateLstmGateInteger8x8_8( + input_ptr, input_zp, input_to_output_weight_ptr, + effective_input_to_output_scale_a, effective_input_to_output_scale_b, + intermediate_scale_a[6], intermediate_scale_b[6], intermediate_zp[10], + output_state_ptr, output_state_zp, recurrent_to_output_weight_ptr, + effective_recurrent_to_output_scale_a, + effective_recurrent_to_output_scale_b, intermediate_scale_a[11], + intermediate_scale_b[7], intermediate_zp[7], layer_norm_output_weight_ptr, + layer_norm_output_scale_a, layer_norm_output_scale_b, + output_gate_bias_ptr, n_batch, n_input, n_output, n_cell, + kTfLiteActSigmoid, output_gate_scratch, scratch0, scratch1); + // Update the output state. + CalculateLstmOutputInteger8x8_8( + n_batch, n_cell, n_output, cell_state_ptr, output_gate_scratch, + projection_weight_ptr, effective_proj_scale_a, effective_proj_scale_b, + projection_bias_ptr, output_state_zp, quantized_proj_clip, + output_state_ptr, scratch2); + // Copy output state to the output. Note that unlike float or hybrid, output + // is always contiguous. + std::copy_n(output_state_ptr, n_batch * n_output, output_ptr); +} + +} // namespace + +// LINT.ThenChange(//tensorflow/lite/tools/optimize/calibration/builtin_logging_ops/lstm.cc) +TfLiteStatus EvalInteger8x8_16( + TfLiteContext* context, TfLiteNode* node, const TfLiteEvalTensor* input, + const TfLiteEvalTensor* input_to_input_weights, + const TfLiteEvalTensor* input_to_forget_weights, + const TfLiteEvalTensor* input_to_cell_weights, + const TfLiteEvalTensor* input_to_output_weights, + const TfLiteEvalTensor* recurrent_to_input_weights, + const TfLiteEvalTensor* recurrent_to_forget_weights, + const TfLiteEvalTensor* recurrent_to_cell_weights, + const TfLiteEvalTensor* recurrent_to_output_weights, + const TfLiteEvalTensor* cell_to_input_weights, + const TfLiteEvalTensor* cell_to_forget_weights, + const TfLiteEvalTensor* cell_to_output_weights, + const TfLiteEvalTensor* input_layer_norm_coefficients, + const TfLiteEvalTensor* forget_layer_norm_coefficients, + const TfLiteEvalTensor* cell_layer_norm_coefficients, + const TfLiteEvalTensor* output_layer_norm_coefficients, + const TfLiteEvalTensor* input_gate_bias, + const TfLiteEvalTensor* forget_gate_bias, + const TfLiteEvalTensor* cell_gate_bias, + const TfLiteEvalTensor* output_gate_bias, + const TfLiteEvalTensor* projection_weights, + const TfLiteEvalTensor* projection_bias, const TfLiteLSTMParams* params, + bool forward_sequence, bool time_major, + const lstm_eval::IntegerLstmParameter* integer_lstm_param, + TfLiteEvalTensor* output_state, TfLiteEvalTensor* cell_state, + TfLiteEvalTensor* output, TfLiteEvalTensor* scratch0, + TfLiteEvalTensor* scratch1, TfLiteEvalTensor* scratch2, + TfLiteEvalTensor* scratch3, TfLiteEvalTensor* scratch4, + TfLiteEvalTensor* scratch5) { + TFLITE_DCHECK(input->dims->size >= 2 && input->dims->size <= 3); + const int n_input = input->dims->data[input->dims->size - 1]; + int max_time, n_batch; + if (input->dims->size == 2) { + max_time = 1; + n_batch = input->dims->data[0]; + } else { + max_time = (time_major) ? input->dims->data[0] : input->dims->data[1]; + n_batch = (time_major) ? input->dims->data[1] : input->dims->data[0]; + } + + // n_cell and n_output will be the same size when there is no projection. + const int n_cell = input_to_output_weights->dims->data[0]; + const int n_output = recurrent_to_output_weights->dims->data[1]; + + // Activation zero point + // TODO@is data.output_zero_point equal to output_state->params.zero_point + // int output_state_zp = output_state->params.zero_point; + int output_state_zp = 0; + + // Get params for time/batch/sequence. + const int output_batch_leading_dim = + output->dims->data[output->dims->size - 1]; + + if (time_major) { + const int input_step = n_batch * n_input; + const int output_step = n_batch * output_batch_leading_dim; + for (int t = 0; t < max_time; t++) { + const int t_rel = t; + int8_t* output_ptr = + tflite::micro::GetTensorData(output) + t_rel * output_step; + const int8_t* input_ptr = + tflite::micro::GetTensorData(input) + t_rel * input_step; + LstmStepInteger8x8_16( + input_ptr, + tflite::micro::GetTensorData(input_to_input_weights), + integer_lstm_param->effective_input_to_input_scale_a, + integer_lstm_param->effective_input_to_input_scale_b, + tflite::micro::GetTensorData(input_to_forget_weights), + integer_lstm_param->effective_input_to_forget_scale_a, + integer_lstm_param->effective_input_to_forget_scale_b, + tflite::micro::GetTensorData(input_to_cell_weights), + integer_lstm_param->effective_input_to_cell_scale_a, + integer_lstm_param->effective_input_to_cell_scale_b, + tflite::micro::GetTensorData(input_to_output_weights), + integer_lstm_param->effective_input_to_output_scale_a, + integer_lstm_param->effective_input_to_output_scale_b, + tflite::micro::GetTensorData(recurrent_to_input_weights), + integer_lstm_param->effective_recurrent_to_input_scale_a, + integer_lstm_param->effective_recurrent_to_input_scale_b, + tflite::micro::GetTensorData(recurrent_to_forget_weights), + integer_lstm_param->effective_recurrent_to_forget_scale_a, + integer_lstm_param->effective_recurrent_to_forget_scale_b, + tflite::micro::GetTensorData(recurrent_to_cell_weights), + integer_lstm_param->effective_recurrent_to_cell_scale_a, + integer_lstm_param->effective_recurrent_to_cell_scale_b, + tflite::micro::GetTensorData(recurrent_to_output_weights), + integer_lstm_param->effective_recurrent_to_output_scale_a, + integer_lstm_param->effective_recurrent_to_output_scale_b, + tflite::micro::GetTensorData(cell_to_input_weights), + integer_lstm_param->effective_cell_to_input_scale_a, + integer_lstm_param->effective_cell_to_input_scale_b, + tflite::micro::GetTensorData(cell_to_forget_weights), + integer_lstm_param->effective_cell_to_forget_scale_a, + integer_lstm_param->effective_cell_to_forget_scale_b, + tflite::micro::GetTensorData(cell_to_output_weights), + integer_lstm_param->effective_cell_to_output_scale_a, + integer_lstm_param->effective_cell_to_output_scale_b, + tflite::micro::GetTensorData(projection_weights), + integer_lstm_param->effective_proj_scale_a, + integer_lstm_param->effective_proj_scale_b, + integer_lstm_param->hidden_zp, + integer_lstm_param->effective_hidden_scale_a, + integer_lstm_param->effective_hidden_scale_b, + tflite::micro::GetTensorData(input_layer_norm_coefficients), + integer_lstm_param->layer_norm_input_scale_a, + integer_lstm_param->layer_norm_input_scale_b, + tflite::micro::GetTensorData(forget_layer_norm_coefficients), + integer_lstm_param->layer_norm_forget_scale_a, + integer_lstm_param->layer_norm_forget_scale_b, + tflite::micro::GetTensorData(cell_layer_norm_coefficients), + integer_lstm_param->layer_norm_cell_scale_a, + integer_lstm_param->layer_norm_cell_scale_b, + tflite::micro::GetTensorData(output_layer_norm_coefficients), + integer_lstm_param->layer_norm_output_scale_a, + integer_lstm_param->layer_norm_output_scale_b, + tflite::micro::GetTensorData(input_gate_bias), + tflite::micro::GetTensorData(forget_gate_bias), + tflite::micro::GetTensorData(cell_gate_bias), + tflite::micro::GetTensorData(output_gate_bias), + integer_lstm_param->quantized_cell_clip, + integer_lstm_param->quantized_proj_clip, + integer_lstm_param->cell_scale, + integer_lstm_param->input_variance_guard, + integer_lstm_param->forget_variance_guard, + integer_lstm_param->cell_variance_guard, + integer_lstm_param->output_variance_guard, + integer_lstm_param->input_to_forget_effective_bias.get(), + integer_lstm_param->recurrent_to_forget_effective_bias.get(), + integer_lstm_param->input_to_cell_effective_bias.get(), + integer_lstm_param->recurrent_to_cell_effective_bias.get(), + integer_lstm_param->input_to_output_effective_bias.get(), + integer_lstm_param->recurrent_to_output_effective_bias.get(), + integer_lstm_param->input_to_input_effective_bias.get(), + integer_lstm_param->recurrent_to_input_effective_bias.get(), + integer_lstm_param->projection_effective_bias.get(), n_batch, n_cell, + n_input, n_output, tflite::micro::GetTensorData(output_state), + output_state_zp, tflite::micro::GetTensorData(cell_state), + output_ptr, (int16_t*)(scratch0), (int16_t*)(scratch1), + (int16_t*)(scratch2), (int16_t*)(scratch3), (int8_t*)(scratch4), + (int32_t*)(scratch5)); + } + } else { + for (int b = 0; b < n_batch; b++) { + const int input_step = n_input; + const int output_step = output_batch_leading_dim; + for (int t = 0; t < max_time; t++) { + // If this is the forward_sequence, step forward, otherwise step + // backwards. + const int t_rel = forward_sequence ? t : max_time - t - 1; + const int time_offset = b * max_time + t_rel; + const int8_t* input_ptr = tflite::micro::GetTensorData(input) + + time_offset * input_step; + int8_t* output_ptr = tflite::micro::GetTensorData(output) + + time_offset * output_step; + + // Offset the {output,cell}_state pointers to the right batch. + int8_t* output_state_ptr = + tflite::micro::GetTensorData(output_state) + + b * output_batch_leading_dim; + int16_t* cell_state_ptr = + tflite::micro::GetTensorData(cell_state) + b * n_cell; + + LstmStepInteger8x8_16( + input_ptr, + tflite::micro::GetTensorData(input_to_input_weights), + integer_lstm_param->effective_input_to_input_scale_a, + integer_lstm_param->effective_input_to_input_scale_b, + tflite::micro::GetTensorData(input_to_forget_weights), + integer_lstm_param->effective_input_to_forget_scale_a, + integer_lstm_param->effective_input_to_forget_scale_b, + tflite::micro::GetTensorData(input_to_cell_weights), + integer_lstm_param->effective_input_to_cell_scale_a, + integer_lstm_param->effective_input_to_cell_scale_b, + tflite::micro::GetTensorData(input_to_output_weights), + integer_lstm_param->effective_input_to_output_scale_a, + integer_lstm_param->effective_input_to_output_scale_b, + tflite::micro::GetTensorData(recurrent_to_input_weights), + integer_lstm_param->effective_recurrent_to_input_scale_a, + integer_lstm_param->effective_recurrent_to_input_scale_b, + tflite::micro::GetTensorData(recurrent_to_forget_weights), + integer_lstm_param->effective_recurrent_to_forget_scale_a, + integer_lstm_param->effective_recurrent_to_forget_scale_b, + tflite::micro::GetTensorData(recurrent_to_cell_weights), + integer_lstm_param->effective_recurrent_to_cell_scale_a, + integer_lstm_param->effective_recurrent_to_cell_scale_b, + tflite::micro::GetTensorData(recurrent_to_output_weights), + integer_lstm_param->effective_recurrent_to_output_scale_a, + integer_lstm_param->effective_recurrent_to_output_scale_b, + tflite::micro::GetTensorData(cell_to_input_weights), + integer_lstm_param->effective_cell_to_input_scale_a, + integer_lstm_param->effective_cell_to_input_scale_b, + tflite::micro::GetTensorData(cell_to_forget_weights), + integer_lstm_param->effective_cell_to_forget_scale_a, + integer_lstm_param->effective_cell_to_forget_scale_b, + tflite::micro::GetTensorData(cell_to_output_weights), + integer_lstm_param->effective_cell_to_output_scale_a, + integer_lstm_param->effective_cell_to_output_scale_b, + tflite::micro::GetTensorData(projection_weights), + integer_lstm_param->effective_proj_scale_a, + integer_lstm_param->effective_proj_scale_b, + integer_lstm_param->hidden_zp, + integer_lstm_param->effective_hidden_scale_a, + integer_lstm_param->effective_hidden_scale_b, + tflite::micro::GetTensorData( + input_layer_norm_coefficients), + integer_lstm_param->layer_norm_input_scale_a, + integer_lstm_param->layer_norm_input_scale_b, + tflite::micro::GetTensorData( + forget_layer_norm_coefficients), + integer_lstm_param->layer_norm_forget_scale_a, + integer_lstm_param->layer_norm_forget_scale_b, + tflite::micro::GetTensorData(cell_layer_norm_coefficients), + integer_lstm_param->layer_norm_cell_scale_a, + integer_lstm_param->layer_norm_cell_scale_b, + tflite::micro::GetTensorData( + output_layer_norm_coefficients), + integer_lstm_param->layer_norm_output_scale_a, + integer_lstm_param->layer_norm_output_scale_b, + tflite::micro::GetTensorData(input_gate_bias), + tflite::micro::GetTensorData(forget_gate_bias), + tflite::micro::GetTensorData(cell_gate_bias), + tflite::micro::GetTensorData(output_gate_bias), + integer_lstm_param->quantized_cell_clip, + integer_lstm_param->quantized_proj_clip, + integer_lstm_param->cell_scale, + integer_lstm_param->input_variance_guard, + integer_lstm_param->forget_variance_guard, + integer_lstm_param->cell_variance_guard, + integer_lstm_param->output_variance_guard, + integer_lstm_param->input_to_forget_effective_bias.get(), + integer_lstm_param->recurrent_to_forget_effective_bias.get(), + integer_lstm_param->input_to_cell_effective_bias.get(), + integer_lstm_param->recurrent_to_cell_effective_bias.get(), + integer_lstm_param->input_to_output_effective_bias.get(), + integer_lstm_param->recurrent_to_output_effective_bias.get(), + integer_lstm_param->input_to_input_effective_bias.get(), + integer_lstm_param->recurrent_to_input_effective_bias.get(), + integer_lstm_param->projection_effective_bias.get(), /*n_batch=*/1, + n_cell, n_input, n_output, output_state_ptr, output_state_zp, + cell_state_ptr, output_ptr, (int16_t*)(scratch0), + (int16_t*)(scratch1), (int16_t*)(scratch2), (int16_t*)(scratch3), + (int8_t*)(scratch4), (int32_t*)(scratch5)); + } + } + } + + return kTfLiteOk; +} + +TfLiteStatus EvalInteger8x8_8( + const TfLiteEvalTensor* input, + const TfLiteEvalTensor* input_to_input_weights, + const TfLiteEvalTensor* input_to_forget_weights, + const TfLiteEvalTensor* input_to_cell_weights, + const TfLiteEvalTensor* input_to_output_weights, + const TfLiteEvalTensor* recurrent_to_input_weights, + const TfLiteEvalTensor* recurrent_to_forget_weights, + const TfLiteEvalTensor* recurrent_to_cell_weights, + const TfLiteEvalTensor* recurrent_to_output_weights, + const TfLiteEvalTensor* cell_to_input_weights, + const TfLiteEvalTensor* cell_to_forget_weights, + const TfLiteEvalTensor* cell_to_output_weights, + const TfLiteEvalTensor* input_layer_norm_coefficients, + const TfLiteEvalTensor* forget_layer_norm_coefficients, + const TfLiteEvalTensor* cell_layer_norm_coefficients, + const TfLiteEvalTensor* output_layer_norm_coefficients, + const TfLiteEvalTensor* input_gate_bias, + const TfLiteEvalTensor* forget_gate_bias, + const TfLiteEvalTensor* cell_gate_bias, + const TfLiteEvalTensor* output_gate_bias, + const TfLiteEvalTensor* projection_weights, + const TfLiteEvalTensor* projection_bias, const TfLiteLSTMParams* params, + TfLiteEvalTensor* output_state, TfLiteEvalTensor* cell_state, + TfLiteEvalTensor* output, + const lstm_eval::IntegerLstmParameter* integer_lstm_param, + TfLiteEvalTensor* scratch0, TfLiteEvalTensor* scratch1, + TfLiteEvalTensor* scratch2, TfLiteEvalTensor* scratch3, + TfLiteEvalTensor* scratch4, TfLiteEvalTensor* scratch5, + TfLiteEvalTensor* scratch6, TfLiteEvalTensor* scratch7) { + TFLITE_DCHECK(input->dims->size >= 2 && input->dims->size <= 3); + const int n_input = input->dims->data[input->dims->size - 1]; + int max_time, n_batch; + if (input->dims->size == 2) { + max_time = 1; + n_batch = input->dims->data[0]; + } else { + max_time = input->dims->data[0]; + n_batch = input->dims->data[1]; + } + + // n_cell and n_output will be the same size when there is no projection. + const int n_cell = input_to_output_weights->dims->data[0]; + const int n_output = recurrent_to_output_weights->dims->data[1]; + //@TODO input zero point and output zeropoint + // const int32_t input_zp = input->params.zero_point; + /// const int32_t output_state_zp = output_state->params.zero_point; + const int32_t input_zp = 0; + const int32_t output_state_zp = 0; + + // Get params for time/batch/sequence. + const int output_batch_leading_dim = + output->dims->data[output->dims->size - 1]; + const int input_step = n_batch * n_input; + const int output_step = n_batch * output_batch_leading_dim; + + for (int t = 0; t < max_time; t++) { + const int t_rel = t; + int8_t* output_ptr = + tflite::micro::GetTensorData(output) + t_rel * output_step; + // Input can be int8 asymmetric or int16 symmetric. + const int8_t* input_ptr = + tflite::micro::GetTensorData(input) + t_rel * input_step; + lstm_eval::LstmStepInteger8x8_8( + input_ptr, input_zp, + + tflite::micro::GetTensorData(input_to_input_weights), + integer_lstm_param->effective_input_to_input_scale_a, + integer_lstm_param->effective_input_to_input_scale_b, + + tflite::micro::GetTensorData(input_to_forget_weights), + integer_lstm_param->effective_input_to_forget_scale_a, + integer_lstm_param->effective_input_to_forget_scale_b, + + tflite::micro::GetTensorData(input_to_cell_weights), + integer_lstm_param->effective_input_to_cell_scale_a, + integer_lstm_param->effective_input_to_cell_scale_b, + + tflite::micro::GetTensorData(input_to_output_weights), + integer_lstm_param->effective_input_to_output_scale_a, + integer_lstm_param->effective_input_to_output_scale_b, + + tflite::micro::GetTensorData(recurrent_to_input_weights), + integer_lstm_param->effective_recurrent_to_input_scale_a, + integer_lstm_param->effective_recurrent_to_input_scale_b, + + tflite::micro::GetTensorData(recurrent_to_forget_weights), + integer_lstm_param->effective_recurrent_to_forget_scale_a, + integer_lstm_param->effective_recurrent_to_forget_scale_b, + + tflite::micro::GetTensorData(recurrent_to_cell_weights), + integer_lstm_param->effective_recurrent_to_cell_scale_a, + integer_lstm_param->effective_recurrent_to_cell_scale_b, + + tflite::micro::GetTensorData(recurrent_to_output_weights), + integer_lstm_param->effective_recurrent_to_output_scale_a, + integer_lstm_param->effective_recurrent_to_output_scale_b, + + tflite::micro::GetTensorData(cell_to_input_weights), + integer_lstm_param->effective_cell_to_input_scale_a, + integer_lstm_param->effective_cell_to_input_scale_b, + + tflite::micro::GetTensorData(cell_to_forget_weights), + integer_lstm_param->effective_cell_to_forget_scale_a, + integer_lstm_param->effective_cell_to_forget_scale_b, + + tflite::micro::GetTensorData(cell_to_output_weights), + integer_lstm_param->effective_cell_to_output_scale_a, + integer_lstm_param->effective_cell_to_output_scale_b, + + tflite::micro::GetTensorData(projection_weights), + integer_lstm_param->effective_proj_scale_a, + integer_lstm_param->effective_proj_scale_b, + + tflite::micro::GetTensorData(input_layer_norm_coefficients), + integer_lstm_param->layer_norm_input_scale_a, + integer_lstm_param->layer_norm_input_scale_b, + + tflite::micro::GetTensorData(forget_layer_norm_coefficients), + integer_lstm_param->layer_norm_forget_scale_a, + integer_lstm_param->layer_norm_forget_scale_b, + + tflite::micro::GetTensorData(cell_layer_norm_coefficients), + integer_lstm_param->layer_norm_cell_scale_a, + integer_lstm_param->layer_norm_cell_scale_b, + + tflite::micro::GetTensorData(output_layer_norm_coefficients), + integer_lstm_param->layer_norm_output_scale_a, + integer_lstm_param->layer_norm_output_scale_b, + + tflite::micro::GetTensorData(input_gate_bias), + tflite::micro::GetTensorData(forget_gate_bias), + tflite::micro::GetTensorData(cell_gate_bias), + tflite::micro::GetTensorData(output_gate_bias), + tflite::micro::GetTensorData(projection_bias), + + params, integer_lstm_param->intermediate_scale_a, + integer_lstm_param->intermediate_scale_b, + integer_lstm_param->intermediate_zp, + integer_lstm_param->quantized_cell_clip, + integer_lstm_param->quantized_proj_clip, n_batch, n_cell, n_input, + n_output, output_batch_leading_dim, + tflite::micro::GetTensorData(output_state), output_state_zp, + tflite::micro::GetTensorData(cell_state), output_ptr, + tflite::micro::GetTensorData(scratch0), + tflite::micro::GetTensorData(scratch1), + tflite::micro::GetTensorData(scratch2), + tflite::micro::GetTensorData(scratch3), + tflite::micro::GetTensorData(scratch4), + tflite::micro::GetTensorData(scratch5), + tflite::micro::GetTensorData(scratch6), + tflite::micro::GetTensorData(scratch7)); + } + + return kTfLiteOk; +} + +} // namespace lstm_eval +} // namespace micro +} // namespace ops +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/xtensa/lstm_eval.h b/tensorflow/lite/micro/kernels/xtensa/lstm_eval.h new file mode 100644 index 0000000..5dd746a --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/lstm_eval.h @@ -0,0 +1,216 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_LSTM_EVAL_H_ +#define TENSORFLOW_LITE_KERNELS_LSTM_EVAL_H_ + +#include +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/portable_tensor_utils.h" +#include "tensorflow/lite/kernels/internal/reference/portable_tensor_utils_impl.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" + +namespace tflite { +namespace ops { +namespace micro { +namespace lstm_eval { + +#if defined(HIFI5) +void calc_cell_state_without_cifg(int16_t* cell_state, + const int16_t* forget_gate, + const int16_t* cell_gate, + const int16_t* input_gate, int shift1, + int shift2, int clip, int num_elms); + +void calc_cell_state_with_cifg(int16_t* cell_state, const int16_t* forget_gate, + const int16_t* cell_gate, int shift1, int shift2, + int clip, int num_elms); + +void xa_nn_elm_mul_16x16_asym8s(int8_t* output, const int16_t* input_1, + const int16_t* input_2, int32_t multiplier, + int32_t shift, int32_t zero_point, + int num_elms); +#endif // defined(HIFI5) + +// Pamameters for integer LSTM. +// Consider split this into two Integer Parameters if more fields are added. +struct IntegerLstmParameter { + int32_t effective_input_to_input_scale_a; + int effective_input_to_input_scale_b; + int32_t effective_recurrent_to_input_scale_a; + int effective_recurrent_to_input_scale_b; + int32_t effective_cell_to_input_scale_a; + int effective_cell_to_input_scale_b; + int32_t effective_input_to_forget_scale_a; + int effective_input_to_forget_scale_b; + int32_t effective_recurrent_to_forget_scale_a; + int effective_recurrent_to_forget_scale_b; + int32_t effective_cell_to_forget_scale_a; + int effective_cell_to_forget_scale_b; + int32_t effective_input_to_cell_scale_a; + int effective_input_to_cell_scale_b; + int32_t effective_recurrent_to_cell_scale_a; + int effective_recurrent_to_cell_scale_b; + int32_t effective_input_to_output_scale_a; + int effective_input_to_output_scale_b; + int32_t effective_recurrent_to_output_scale_a; + int effective_recurrent_to_output_scale_b; + int32_t effective_cell_to_output_scale_a; + int effective_cell_to_output_scale_b; + int32_t effective_proj_scale_a; + int effective_proj_scale_b; + int32_t effective_hidden_scale_a; + int effective_hidden_scale_b; + int32_t layer_norm_input_scale_a; + int layer_norm_input_scale_b; + int32_t layer_norm_forget_scale_a; + int layer_norm_forget_scale_b; + int32_t layer_norm_cell_scale_a; + int layer_norm_cell_scale_b; + int32_t layer_norm_output_scale_a; + int layer_norm_output_scale_b; + // Quantized clip value for cell and projection. Zero value means no clipping. + int16_t quantized_cell_clip; + int8_t quantized_proj_clip; + int32_t hidden_zp; + int32_t cell_scale; + + int32_t input_variance_guard; + int32_t forget_variance_guard; + int32_t cell_variance_guard; + int32_t output_variance_guard; + + // Pre-calculate bias + zero_point * weight. + // Unabled to use temporary tensors since those are used in Prepare() and + // scratch buffer is only allocated after Preapre(). + std::unique_ptr input_to_forget_effective_bias; + std::unique_ptr recurrent_to_forget_effective_bias; + std::unique_ptr input_to_cell_effective_bias; + std::unique_ptr recurrent_to_cell_effective_bias; + std::unique_ptr input_to_output_effective_bias; + std::unique_ptr recurrent_to_output_effective_bias; + std::unique_ptr input_to_input_effective_bias; + std::unique_ptr recurrent_to_input_effective_bias; + std::unique_ptr projection_effective_bias; + + // Scale and zero point for intermediate tensors. + // Used only in the 8x8_8 case. + int32_t intermediate_scale_a[8]; + int32_t intermediate_scale_b[8]; + int32_t intermediate_zp[12]; +}; + +TfLiteStatus EvalFloat(const TfLiteEvalTensor* input, + const TfLiteEvalTensor* input_to_input_weights, + const TfLiteEvalTensor* input_to_forget_weights, + const TfLiteEvalTensor* input_to_cell_weights, + const TfLiteEvalTensor* input_to_output_weights, + const TfLiteEvalTensor* recurrent_to_input_weights, + const TfLiteEvalTensor* recurrent_to_forget_weights, + const TfLiteEvalTensor* recurrent_to_cell_weights, + const TfLiteEvalTensor* recurrent_to_output_weights, + const TfLiteEvalTensor* cell_to_input_weights, + const TfLiteEvalTensor* cell_to_forget_weights, + const TfLiteEvalTensor* cell_to_output_weights, + const TfLiteEvalTensor* input_layer_norm_coefficients, + const TfLiteEvalTensor* forget_layer_norm_coefficients, + const TfLiteEvalTensor* cell_layer_norm_coefficients, + const TfLiteEvalTensor* output_layer_norm_coefficients, + const TfLiteEvalTensor* aux_input, + const TfLiteEvalTensor* aux_input_to_input_weights, + const TfLiteEvalTensor* aux_input_to_forget_weights, + const TfLiteEvalTensor* aux_input_to_cell_weights, + const TfLiteEvalTensor* aux_input_to_output_weights, + const TfLiteEvalTensor* input_gate_bias, + const TfLiteEvalTensor* forget_gate_bias, + const TfLiteEvalTensor* cell_gate_bias, + const TfLiteEvalTensor* output_gate_bias, + const TfLiteEvalTensor* projection_weights, + const TfLiteEvalTensor* projection_bias, + const TfLiteLSTMParams* params, bool forward_sequence, + bool time_major, int output_offset, + TfLiteEvalTensor* scratch_buffer, + TfLiteEvalTensor* output_state, + TfLiteEvalTensor* cell_state, TfLiteEvalTensor* output); + +TfLiteStatus EvalInteger8x8_16( + TfLiteContext* context, TfLiteNode* node, const TfLiteEvalTensor* input, + const TfLiteEvalTensor* input_to_input_weights, + const TfLiteEvalTensor* input_to_forget_weights, + const TfLiteEvalTensor* input_to_cell_weights, + const TfLiteEvalTensor* input_to_output_weights, + const TfLiteEvalTensor* recurrent_to_input_weights, + const TfLiteEvalTensor* recurrent_to_forget_weights, + const TfLiteEvalTensor* recurrent_to_cell_weights, + const TfLiteEvalTensor* recurrent_to_output_weights, + const TfLiteEvalTensor* cell_to_input_weights, + const TfLiteEvalTensor* cell_to_forget_weights, + const TfLiteEvalTensor* cell_to_output_weights, + const TfLiteEvalTensor* input_layer_norm_coefficients, + const TfLiteEvalTensor* forget_layer_norm_coefficients, + const TfLiteEvalTensor* cell_layer_norm_coefficients, + const TfLiteEvalTensor* output_layer_norm_coefficients, + const TfLiteEvalTensor* input_gate_bias, + const TfLiteEvalTensor* forget_gate_bias, + const TfLiteEvalTensor* cell_gate_bias, + const TfLiteEvalTensor* output_gate_bias, + const TfLiteEvalTensor* projection_weights, + const TfLiteEvalTensor* projection_bias, const TfLiteLSTMParams* params, + bool forward_sequence, bool time_major, + const lstm_eval::IntegerLstmParameter* integer_lstm_param, + TfLiteEvalTensor* output_state, TfLiteEvalTensor* cell_state, + TfLiteEvalTensor* output, TfLiteEvalTensor* scratch0, + TfLiteEvalTensor* scratch1, TfLiteEvalTensor* scratch2, + TfLiteEvalTensor* scratch3, TfLiteEvalTensor* scratch4, + TfLiteEvalTensor* scratch5); + +TfLiteStatus EvalInteger8x8_8( + const TfLiteEvalTensor* input, + const TfLiteEvalTensor* input_to_input_weights, + const TfLiteEvalTensor* input_to_forget_weights, + const TfLiteEvalTensor* input_to_cell_weights, + const TfLiteEvalTensor* input_to_output_weights, + const TfLiteEvalTensor* recurrent_to_input_weights, + const TfLiteEvalTensor* recurrent_to_forget_weights, + const TfLiteEvalTensor* recurrent_to_cell_weights, + const TfLiteEvalTensor* recurrent_to_output_weights, + const TfLiteEvalTensor* cell_to_input_weights, + const TfLiteEvalTensor* cell_to_forget_weights, + const TfLiteEvalTensor* cell_to_output_weights, + const TfLiteEvalTensor* input_layer_norm_coefficients, + const TfLiteEvalTensor* forget_layer_norm_coefficients, + const TfLiteEvalTensor* cell_layer_norm_coefficients, + const TfLiteEvalTensor* output_layer_norm_coefficients, + const TfLiteEvalTensor* input_gate_bias, + const TfLiteEvalTensor* forget_gate_bias, + const TfLiteEvalTensor* cell_gate_bias, + const TfLiteEvalTensor* output_gate_bias, + const TfLiteEvalTensor* projection_weights, + const TfLiteEvalTensor* projection_bias, const TfLiteLSTMParams* params, + TfLiteEvalTensor* output_state, TfLiteEvalTensor* cell_state, + TfLiteEvalTensor* output, + const lstm_eval::IntegerLstmParameter* integer_lstm_param, + TfLiteEvalTensor* scratch0, TfLiteEvalTensor* scratch1, + TfLiteEvalTensor* scratch2, TfLiteEvalTensor* scratch3, + TfLiteEvalTensor* scratch4, TfLiteEvalTensor* scratch5, + TfLiteEvalTensor* scratch6, TfLiteEvalTensor* scratch7); + +} // namespace lstm_eval +} // namespace micro +} // namespace ops +} // namespace tflite +#endif // TENSORFLOW_LITE_KERNELS_LSTM_EVAL_H_ diff --git a/tensorflow/lite/micro/kernels/xtensa/lstm_eval_hifi.cc b/tensorflow/lite/micro/kernels/xtensa/lstm_eval_hifi.cc new file mode 100644 index 0000000..2b49f26 --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/lstm_eval_hifi.cc @@ -0,0 +1,462 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/xtensa/lstm_eval.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa.h" + +namespace tflite { +namespace ops { +namespace micro { +namespace lstm_eval { + +#if defined(HIFI5) +void calc_cell_state_without_cifg(int16_t* cell_state, + const int16_t* forget_gate, + const int16_t* cell_gate, + const int16_t* input_gate, int shift1, + int shift2, int clip, int num_elms) { + const ae_int16x8 *p16x8_cs_r, *p16x8_fg_r; + const ae_int16x8 *p16x8_cg_r, *p16x8_ig_r; + + ae_int16x8* p16x8_cs_w; + + ae_valignx2 align_cs_r, align_fg_r; + ae_valignx2 align_cg_r, align_ig_r; + ae_valignx2 align_cs_w; + + ae_int16x4 d_cs_r_0, d_cs_r_1; + ae_int16x4 d_fg_0, d_fg_1; + ae_int16x4 d_cg_0, d_cg_1; + ae_int16x4 d_ig_0, d_ig_1; + ae_int16x4 d_cs_w_0, d_cs_w_1; + ae_int32x2 d_mul_0, d_mul_1, d_mul_2, d_mul_3; + ae_int32x2 d_mul_4, d_mul_5, d_mul_6, d_mul_7; + + ae_int16x4 d_min, d_max; + + int i = 0; + p16x8_cs_r = (const ae_int16x8*)cell_state; + p16x8_fg_r = (const ae_int16x8*)forget_gate; + p16x8_cg_r = (const ae_int16x8*)cell_gate; + p16x8_ig_r = (const ae_int16x8*)input_gate; + + p16x8_cs_w = (ae_int16x8*)cell_state; + + align_cs_r = AE_LA128_PP(p16x8_cs_r); + align_fg_r = AE_LA128_PP(p16x8_fg_r); + align_cg_r = AE_LA128_PP(p16x8_cg_r); + align_ig_r = AE_LA128_PP(p16x8_ig_r); + + align_cs_w = AE_ZALIGN128(); + + if (clip > 0) { + d_min = AE_MOVDA16(-clip); + d_max = AE_MOVDA16(clip); + } else { + d_min = AE_MOVDA16(-32768); + d_max = AE_MOVDA16(32767); + } + +#pragma concurrent + if (shift1 == 15) { + for (i = 0; i < (num_elms >> 3); i++) { + AE_LA16X4X2_IP(d_cs_r_0, d_cs_r_1, align_cs_r, p16x8_cs_r); + AE_LA16X4X2_IP(d_fg_0, d_fg_1, align_fg_r, p16x8_fg_r); + AE_LA16X4X2_IP(d_cg_0, d_cg_1, align_cg_r, p16x8_cg_r); + AE_LA16X4X2_IP(d_ig_0, d_ig_1, align_ig_r, p16x8_ig_r); + + d_cs_w_0 = AE_MULFP16X4RS(d_cs_r_0, d_fg_0); + d_cs_w_1 = AE_MULFP16X4RS(d_cs_r_1, d_fg_1); + + AE_MUL16X4(d_mul_4, d_mul_5, d_cg_0, d_ig_0); + AE_MUL16X4(d_mul_6, d_mul_7, d_cg_1, d_ig_1); + d_mul_4 = AE_SRAA32SYMS(d_mul_4, shift2); + d_mul_5 = AE_SRAA32SYMS(d_mul_5, shift2); + d_mul_6 = AE_SRAA32SYMS(d_mul_6, shift2); + d_mul_7 = AE_SRAA32SYMS(d_mul_7, shift2); + d_cg_0 = AE_SAT16X4(d_mul_4, d_mul_5); + d_cg_1 = AE_SAT16X4(d_mul_6, d_mul_7); + + d_cs_w_0 = AE_ADD16S(d_cs_w_0, d_cg_0); + d_cs_w_1 = AE_ADD16S(d_cs_w_1, d_cg_1); + + AE_MINMAX16(d_cs_w_0, d_min, d_max); + AE_MINMAX16(d_cs_w_1, d_min, d_max); + + AE_SA16X4X2_IP(d_cs_w_0, d_cs_w_1, align_cs_w, p16x8_cs_w); + } + AE_SA128POS_FP(align_cs_w, p16x8_cs_w); // finalize the stream + + const ae_int16 *p16_cs_r, *p16_fg_r; + const ae_int16 *p16_cg_r, *p16_ig_r; + + ae_int16* p16_cs_w; + + p16_cs_r = (const ae_int16*)p16x8_cs_r; + p16_fg_r = (const ae_int16*)p16x8_fg_r; + p16_cg_r = (const ae_int16*)p16x8_cg_r; + p16_ig_r = (const ae_int16*)p16x8_ig_r; + + p16_cs_w = (ae_int16*)p16x8_cs_w; +// residue iterations +#pragma concurrent +#pragma loop_count max = 7 + for (i = 0; i < ((num_elms)&7); i++) { + d_cs_r_0 = p16_cs_r[i]; + d_fg_0 = p16_fg_r[i]; + d_cg_0 = p16_cg_r[i]; + d_ig_0 = p16_ig_r[i]; + + d_cs_w_0 = AE_MULFP16X4RS(d_cs_r_0, d_fg_0); + + AE_MUL16X4(d_mul_0, d_mul_1, d_cg_0, d_ig_0); + d_mul_0 = AE_SRAA32SYMS(d_mul_0, shift2); + d_cg_0 = AE_SAT16X4(d_mul_0, d_mul_1); + + d_cs_w_0 = AE_ADD16S(d_cs_w_0, d_cg_0); + AE_MINMAX16(d_cs_w_0, d_min, d_max); + p16_cs_w[i] = d_cs_w_0; + } + } else { + for (i = 0; i < (num_elms >> 3); i++) { + AE_LA16X4X2_IP(d_cs_r_0, d_cs_r_1, align_cs_r, p16x8_cs_r); + AE_LA16X4X2_IP(d_fg_0, d_fg_1, align_fg_r, p16x8_fg_r); + AE_LA16X4X2_IP(d_cg_0, d_cg_1, align_cg_r, p16x8_cg_r); + AE_LA16X4X2_IP(d_ig_0, d_ig_1, align_ig_r, p16x8_ig_r); + + AE_MUL16X4(d_mul_0, d_mul_1, d_cs_r_0, d_fg_0); + AE_MUL16X4(d_mul_2, d_mul_3, d_cs_r_1, d_fg_1); + d_mul_0 = AE_SRAA32SYMS(d_mul_0, shift1); + d_mul_1 = AE_SRAA32SYMS(d_mul_1, shift1); + d_mul_2 = AE_SRAA32SYMS(d_mul_2, shift1); + d_mul_3 = AE_SRAA32SYMS(d_mul_3, shift1); + d_cs_w_0 = AE_SAT16X4(d_mul_0, d_mul_1); + d_cs_w_1 = AE_SAT16X4(d_mul_2, d_mul_3); + + AE_MUL16X4(d_mul_4, d_mul_5, d_cg_0, d_ig_0); + AE_MUL16X4(d_mul_6, d_mul_7, d_cg_1, d_ig_1); + d_mul_4 = AE_SRAA32SYMS(d_mul_4, shift2); + d_mul_5 = AE_SRAA32SYMS(d_mul_5, shift2); + d_mul_6 = AE_SRAA32SYMS(d_mul_6, shift2); + d_mul_7 = AE_SRAA32SYMS(d_mul_7, shift2); + d_cg_0 = AE_SAT16X4(d_mul_4, d_mul_5); + d_cg_1 = AE_SAT16X4(d_mul_6, d_mul_7); + + d_cs_w_0 = AE_ADD16S(d_cs_w_0, d_cg_0); + d_cs_w_1 = AE_ADD16S(d_cs_w_1, d_cg_1); + + AE_MINMAX16(d_cs_w_0, d_min, d_max); + AE_MINMAX16(d_cs_w_1, d_min, d_max); + + AE_SA16X4X2_IP(d_cs_w_0, d_cs_w_1, align_cs_w, p16x8_cs_w); + } + AE_SA128POS_FP(align_cs_w, p16x8_cs_w); // finalize the stream + + const ae_int16 *p16_cs_r, *p16_fg_r; + const ae_int16 *p16_cg_r, *p16_ig_r; + + ae_int16* p16_cs_w; + + p16_cs_r = (const ae_int16*)p16x8_cs_r; + p16_fg_r = (const ae_int16*)p16x8_fg_r; + p16_cg_r = (const ae_int16*)p16x8_cg_r; + p16_ig_r = (const ae_int16*)p16x8_ig_r; + + p16_cs_w = (ae_int16*)p16x8_cs_w; +// residue iterations +#pragma concurrent +#pragma loop_count max = 7 + for (i = 0; i < ((num_elms)&7); i++) { + d_cs_r_0 = p16_cs_r[i]; + d_fg_0 = p16_fg_r[i]; + d_cg_0 = p16_cg_r[i]; + d_ig_0 = p16_ig_r[i]; + + AE_MUL16X4(d_mul_0, d_mul_1, d_cs_r_0, d_fg_0); + d_mul_0 = AE_SRAA32SYMS(d_mul_0, shift1); + d_cs_w_0 = AE_SAT16X4(d_mul_0, d_mul_1); + + AE_MUL16X4(d_mul_0, d_mul_1, d_cg_0, d_ig_0); + d_mul_0 = AE_SRAA32SYMS(d_mul_0, shift2); + d_cg_0 = AE_SAT16X4(d_mul_0, d_mul_1); + + d_cs_w_0 = AE_ADD16S(d_cs_w_0, d_cg_0); + AE_MINMAX16(d_cs_w_0, d_min, d_max); + p16_cs_w[i] = d_cs_w_0; + } + } +} + +void calc_cell_state_with_cifg(int16_t* cell_state, const int16_t* forget_gate, + const int16_t* cell_gate, int shift1, int shift2, + int clip, int num_elms) { + const ae_int16x8 *p16x8_cs_r, *p16x8_fg_r; + const ae_int16x8* p16x8_cg_r; + + ae_int16x8* p16x8_cs_w; + + ae_valignx2 align_cs_r, align_fg_r; + ae_valignx2 align_cg_r; + ae_valignx2 align_cs_w; + + ae_int16x4 d_cs_r_0, d_cs_r_1; + ae_int16x4 d_fg_0, d_fg_1; + ae_int16x4 d_cg_0, d_cg_1; + ae_int16x4 d_1mfg_0, d_1mfg_1; + ae_int16x4 d_cs_w_0, d_cs_w_1; + ae_int32x2 d_mul_0, d_mul_1, d_mul_2, d_mul_3; + ae_int32x2 d_mul_4, d_mul_5, d_mul_6, d_mul_7; + + ae_int16x4 d_min, d_max, d_one; + + int i = 0; + p16x8_cs_r = (const ae_int16x8*)cell_state; + p16x8_fg_r = (const ae_int16x8*)forget_gate; + p16x8_cg_r = (const ae_int16x8*)cell_gate; + + p16x8_cs_w = (ae_int16x8*)cell_state; + + align_cs_r = AE_LA128_PP(p16x8_cs_r); + align_fg_r = AE_LA128_PP(p16x8_fg_r); + align_cg_r = AE_LA128_PP(p16x8_cg_r); + + align_cs_w = AE_ZALIGN128(); + + if (clip > 0) { + d_min = AE_MOVDA16(-clip); + d_max = AE_MOVDA16(clip); + } else { + d_min = AE_MOVDA16(-32768); + d_max = AE_MOVDA16(32767); + } + d_one = AE_MOVDA16(32767); + +#pragma concurrent + if (shift1 == 15) { + for (i = 0; i < (num_elms >> 3); i++) { + AE_LA16X4X2_IP(d_cs_r_0, d_cs_r_1, align_cs_r, p16x8_cs_r); + AE_LA16X4X2_IP(d_fg_0, d_fg_1, align_fg_r, p16x8_fg_r); + AE_LA16X4X2_IP(d_cg_0, d_cg_1, align_cg_r, p16x8_cg_r); + + d_cs_w_0 = AE_MULFP16X4RS(d_cs_r_0, d_fg_0); + d_cs_w_1 = AE_MULFP16X4RS(d_cs_r_1, d_fg_1); + + d_1mfg_0 = AE_SUB16S(d_one, d_fg_0); + d_1mfg_1 = AE_SUB16S(d_one, d_fg_1); + AE_MUL16X4(d_mul_4, d_mul_5, d_cg_0, d_1mfg_0); + AE_MUL16X4(d_mul_6, d_mul_7, d_cg_1, d_1mfg_1); + d_mul_4 = AE_SRAA32SYMS(d_mul_4, shift2); + d_mul_5 = AE_SRAA32SYMS(d_mul_5, shift2); + d_mul_6 = AE_SRAA32SYMS(d_mul_6, shift2); + d_mul_7 = AE_SRAA32SYMS(d_mul_7, shift2); + d_cg_0 = AE_SAT16X4(d_mul_4, d_mul_5); + d_cg_1 = AE_SAT16X4(d_mul_6, d_mul_7); + + d_cs_w_0 = AE_ADD16S(d_cs_w_0, d_cg_0); + d_cs_w_1 = AE_ADD16S(d_cs_w_1, d_cg_1); + + AE_MINMAX16(d_cs_w_0, d_min, d_max); + AE_MINMAX16(d_cs_w_1, d_min, d_max); + + AE_SA16X4X2_IP(d_cs_w_0, d_cs_w_1, align_cs_w, p16x8_cs_w); + } + AE_SA128POS_FP(align_cs_w, p16x8_cs_w); // finalize the stream + + const ae_int16 *p16_cs_r, *p16_fg_r; + const ae_int16* p16_cg_r; + + ae_int16* p16_cs_w; + + p16_cs_r = (const ae_int16*)p16x8_cs_r; + p16_fg_r = (const ae_int16*)p16x8_fg_r; + p16_cg_r = (const ae_int16*)p16x8_cg_r; + + p16_cs_w = (ae_int16*)p16x8_cs_w; +// residue iterations +#pragma concurrent +#pragma loop_count max = 7 + for (i = 0; i < ((num_elms)&7); i++) { + d_cs_r_0 = p16_cs_r[i]; + d_fg_0 = p16_fg_r[i]; + d_cg_0 = p16_cg_r[i]; + + d_cs_w_0 = AE_MULFP16X4RS(d_cs_r_0, d_fg_0); + + d_1mfg_0 = AE_SUB16S(d_one, d_fg_0); + AE_MUL16X4(d_mul_0, d_mul_1, d_cg_0, d_1mfg_0); + d_mul_0 = AE_SRAA32SYMS(d_mul_0, shift2); + d_cg_0 = AE_SAT16X4(d_mul_0, d_mul_1); + + d_cs_w_0 = AE_ADD16S(d_cs_w_0, d_cg_0); + AE_MINMAX16(d_cs_w_0, d_min, d_max); + p16_cs_w[i] = d_cs_w_0; + } + } else { + for (i = 0; i < (num_elms >> 3); i++) { + AE_LA16X4X2_IP(d_cs_r_0, d_cs_r_1, align_cs_r, p16x8_cs_r); + AE_LA16X4X2_IP(d_fg_0, d_fg_1, align_fg_r, p16x8_fg_r); + AE_LA16X4X2_IP(d_cg_0, d_cg_1, align_cg_r, p16x8_cg_r); + + AE_MUL16X4(d_mul_0, d_mul_1, d_cs_r_0, d_fg_0); + AE_MUL16X4(d_mul_2, d_mul_3, d_cs_r_1, d_fg_1); + d_mul_0 = AE_SRAA32SYMS(d_mul_0, shift1); + d_mul_1 = AE_SRAA32SYMS(d_mul_1, shift1); + d_mul_2 = AE_SRAA32SYMS(d_mul_2, shift1); + d_mul_3 = AE_SRAA32SYMS(d_mul_3, shift1); + d_cs_w_0 = AE_SAT16X4(d_mul_0, d_mul_1); + d_cs_w_1 = AE_SAT16X4(d_mul_2, d_mul_3); + + d_1mfg_0 = AE_SUB16S(d_one, d_fg_0); + d_1mfg_1 = AE_SUB16S(d_one, d_fg_1); + AE_MUL16X4(d_mul_4, d_mul_5, d_cg_0, d_1mfg_0); + AE_MUL16X4(d_mul_6, d_mul_7, d_cg_1, d_1mfg_1); + d_mul_4 = AE_SRAA32SYMS(d_mul_4, shift2); + d_mul_5 = AE_SRAA32SYMS(d_mul_5, shift2); + d_mul_6 = AE_SRAA32SYMS(d_mul_6, shift2); + d_mul_7 = AE_SRAA32SYMS(d_mul_7, shift2); + d_cg_0 = AE_SAT16X4(d_mul_4, d_mul_5); + d_cg_1 = AE_SAT16X4(d_mul_6, d_mul_7); + + d_cs_w_0 = AE_ADD16S(d_cs_w_0, d_cg_0); + d_cs_w_1 = AE_ADD16S(d_cs_w_1, d_cg_1); + + AE_MINMAX16(d_cs_w_0, d_min, d_max); + AE_MINMAX16(d_cs_w_1, d_min, d_max); + + AE_SA16X4X2_IP(d_cs_w_0, d_cs_w_1, align_cs_w, p16x8_cs_w); + } + AE_SA128POS_FP(align_cs_w, p16x8_cs_w); // finalize the stream + + const ae_int16 *p16_cs_r, *p16_fg_r; + const ae_int16* p16_cg_r; + + ae_int16* p16_cs_w; + + p16_cs_r = (const ae_int16*)p16x8_cs_r; + p16_fg_r = (const ae_int16*)p16x8_fg_r; + p16_cg_r = (const ae_int16*)p16x8_cg_r; + + p16_cs_w = (ae_int16*)p16x8_cs_w; +// residue iterations +#pragma concurrent +#pragma loop_count max = 7 + for (i = 0; i < ((num_elms)&7); i++) { + d_cs_r_0 = p16_cs_r[i]; + d_fg_0 = p16_fg_r[i]; + d_cg_0 = p16_cg_r[i]; + + AE_MUL16X4(d_mul_0, d_mul_1, d_cs_r_0, d_fg_0); + d_mul_0 = AE_SRAA32SYMS(d_mul_0, shift1); + d_cs_w_0 = AE_SAT16X4(d_mul_0, d_mul_1); + + d_1mfg_0 = AE_SUB16S(d_one, d_fg_0); + AE_MUL16X4(d_mul_0, d_mul_1, d_cg_0, d_1mfg_0); + d_mul_0 = AE_SRAA32SYMS(d_mul_0, shift2); + d_cg_0 = AE_SAT16X4(d_mul_0, d_mul_1); + + d_cs_w_0 = AE_ADD16S(d_cs_w_0, d_cg_0); + AE_MINMAX16(d_cs_w_0, d_min, d_max); + p16_cs_w[i] = d_cs_w_0; + } + } +} + +void xa_nn_elm_mul_16x16_asym8s(int8_t* output, const int16_t* input_1, + const int16_t* input_2, int32_t multiplier, + int32_t shift, int32_t zero_point, + int num_elms) { + ae_int16x8* tmp_input_1; + ae_int16x8* tmp_input_2; + + ae_valignx2 align_src_input_1, align_src_input_2; + ae_valign align_dst_output; + + ae_int16x4 data_a_0, data_a_1; + ae_int16x4 data_b_0, data_b_1; + ae_int32x2 data_ab_0, data_ab_1, data_ab_2, data_ab_3; + ae_int32x2 d_multiplier, d_left_shift; + ae_int16x4 d_zp; + ae_int16x4 data_c_0, data_c_1; + ae_int8x8 data_c; + + int i = 0; + int left_shift, right_shift; + tmp_input_1 = (ae_int16x8*)(input_1); + tmp_input_2 = (ae_int16x8*)(input_2); + + align_src_input_1 = AE_LA128_PP((ae_int16x8*)tmp_input_1); + align_src_input_2 = AE_LA128_PP((ae_int16x8*)tmp_input_2); + align_dst_output = AE_ZALIGN64(); // zero alignment reg + + d_multiplier = AE_MOVDA32(multiplier); + d_zp = AE_MOVDA16(zero_point); + + left_shift = shift < 0 ? 0 : shift; + right_shift = shift > 0 ? 0 : -shift; + + d_left_shift = AE_MOVDA32(1 << left_shift); +#pragma concurrent + for (i = 0; i < (num_elms >> 3); i++) { + AE_LA16X4X2_IP(data_a_0, data_a_1, align_src_input_1, tmp_input_1); + AE_LA16X4X2_IP(data_b_0, data_b_1, align_src_input_2, tmp_input_2); + + AE_MUL16X4(data_ab_0, data_ab_1, data_a_0, data_b_0); + AE_MUL16X4(data_ab_2, data_ab_3, data_a_1, data_b_1); + AE_MUL2P32X4(data_ab_0, data_ab_1, data_ab_0, data_ab_1, d_left_shift, + d_left_shift); + AE_MUL2P32X4(data_ab_2, data_ab_3, data_ab_2, data_ab_3, d_left_shift, + d_left_shift); + AE_MULF2P32X4RAS(data_ab_0, data_ab_1, data_ab_0, data_ab_1, d_multiplier, + d_multiplier); + AE_MULF2P32X4RAS(data_ab_2, data_ab_3, data_ab_2, data_ab_3, d_multiplier, + d_multiplier); + data_ab_0 = AE_SRAA32SYMS(data_ab_0, right_shift); + data_ab_1 = AE_SRAA32SYMS(data_ab_1, right_shift); + data_ab_2 = AE_SRAA32SYMS(data_ab_2, right_shift); + data_ab_3 = AE_SRAA32SYMS(data_ab_3, right_shift); + data_c_0 = AE_SAT16X4(data_ab_0, data_ab_1); + data_c_1 = AE_SAT16X4(data_ab_2, data_ab_3); + data_c_0 = AE_SUB16S(data_c_0, d_zp); + data_c_1 = AE_SUB16S(data_c_1, d_zp); + data_c = AE_SAT8X8X16(data_c_0, data_c_1); + AE_SA8X8_IP(data_c, align_dst_output, (ae_int8x8*)output); + } + + AE_SA64POS_FP(align_dst_output, output); // finalize the stream + +// residue iterations +#pragma concurrent +#pragma loop_count max = 7 + for (int j = 0; j < ((num_elms)&7); j++) { + AE_L16_IP(data_a_0, (ae_int16*)tmp_input_1, 2); + AE_L16_IP(data_b_0, (ae_int16*)tmp_input_2, 2); + + AE_MUL16X4(data_ab_0, data_ab_1, data_a_0, data_b_0); + data_ab_0 = AE_MULP32X2(data_ab_0, d_left_shift); + data_ab_0 = AE_MULFP32X2RAS(data_ab_0, d_multiplier); + data_ab_0 = AE_SRAA32SYMS(data_ab_0, right_shift); + data_c_0 = AE_SAT16X4(data_ab_0, data_ab_1); + data_c_0 = AE_SUB16S(data_c_0, d_zp); + data_c = AE_SAT8X8X16(data_c_0, data_c_0); + AE_S8_0_IP(data_c, (ae_int8*)output, 1); + } +} +#endif // defined(HIFI5) + +} // namespace lstm_eval +} // namespace micro +} // namespace ops +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/xtensa/lstm_shared.h b/tensorflow/lite/micro/kernels/xtensa/lstm_shared.h new file mode 100644 index 0000000..4bcff1a --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/lstm_shared.h @@ -0,0 +1,78 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_KERNELS_LSTM_SHARED_H_ +#define TENSORFLOW_LITE_KERNELS_LSTM_SHARED_H_ + +namespace tflite { +namespace ops { +namespace micro { +namespace lstm { +// For full inputs kernel (24-inputs). +// Please note the 20-input full kernel is deprecated and only kept +// here for backward compatibility. +namespace full { + +// Input Tensors of size {n_batch, n_input} +constexpr int kInputTensor = 0; + +// Input weight tensors of size: {n_cell, n_input} +constexpr int kInputToInputWeightsTensor = 1; // Optional +constexpr int kInputToForgetWeightsTensor = 2; +constexpr int kInputToCellWeightsTensor = 3; +constexpr int kInputToOutputWeightsTensor = 4; + +// Recurrent weight tensors of size {n_cell, n_output} +constexpr int kRecurrentToInputWeightsTensor = 5; // Optional +constexpr int kRecurrentToForgetWeightsTensor = 6; +constexpr int kRecurrentToCellWeightsTensor = 7; +constexpr int kRecurrentToOutputWeightsTensor = 8; + +// Peephole weights tensors of size {n_cell}, representing a diagonal matrix. +constexpr int kCellToInputWeightsTensor = 9; // Optional +constexpr int kCellToForgetWeightsTensor = 10; // Optional +constexpr int kCellToOutputWeightsTensor = 11; // Optional + +// Gates bias tensors of size {n_cell} +constexpr int kInputGateBiasTensor = 12; // Optional +constexpr int kForgetGateBiasTensor = 13; +constexpr int kCellGateBiasTensor = 14; +constexpr int kOutputGateBiasTensor = 15; + +// Projection weight tensor of size {n_output, n_cell} +constexpr int kProjectionWeightsTensor = 16; // Optional +// Projection bias tensor of size {n_output} +constexpr int kProjectionBiasTensor = 17; // Optional + +// These state tensors are defined as variable tensors, and will be modified by +// this op. +constexpr int kOutputStateTensor = 18; +constexpr int kCellStateTensor = 19; + +// Layer norm coefficient tensors of size {n_cell}, representing a diagonal +// matrix. +constexpr int kInputLayerNormCoefficientsTensor = 20; // Optional +constexpr int kForgetLayerNormCoefficientsTensor = 21; // Optional +constexpr int kCellLayerNormCoefficientsTensor = 22; // Optional +constexpr int kOutputLayerNormCoefficientsTensor = 23; // Optional + +// Output tensors. +constexpr int kOutputTensor = 0; +} // namespace full + +} // namespace lstm +} // namespace micro +} // namespace ops +} // namespace tflite +#endif // TENSORFLOW_LITE_KERNELS_LSTM_SHARED_H_ diff --git a/tensorflow/lite/micro/kernels/xtensa/pad.cc b/tensorflow/lite/micro/kernels/xtensa/pad.cc new file mode 100644 index 0000000..bb00edb --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/pad.cc @@ -0,0 +1,275 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/kernels/internal/reference/pad.h" + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/types.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa_pad.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { + +namespace { + +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); +#if !defined(VISION_P6) + return context->AllocatePersistentBuffer(context, sizeof(OpDataPad)); +#else + void* data = + context->AllocatePersistentBuffer(context, sizeof(XtensaPadData)); + if (InitXtensaContext()) { + return nullptr; + } + return data; +#endif // defined(VISION_P6) +} + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + MicroContext* micro_context = GetMicroContext(context); + + TFLITE_DCHECK(node->user_data != nullptr); +#if defined(VISION_P6) + XtensaPadData* op_data_xtensa = static_cast(node->user_data); + OpDataPad* data = &op_data_xtensa->reference_op_data; +#else + OpDataPad* data = static_cast(node->user_data); +#endif + + TF_LITE_ENSURE(context, NumInputs(node) == 2 || NumInputs(node) == 3); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, /*index=*/0); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* paddings = + micro_context->AllocateTempInputTensor(node, /*index=*/1); + TF_LITE_ENSURE(context, paddings != nullptr); + TfLiteTensor* constant_values = + NumInputs(node) == 3 + ? micro_context->AllocateTempInputTensor(node, /*index=*/2) + : nullptr; + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, /*index=*/0); + TF_LITE_ENSURE(context, output != nullptr); + + TF_LITE_ENSURE_EQ(context, input->type, output->type); + + // Current implementations rely on the inputs being <= 4D. + TF_LITE_ENSURE(context, NumDimensions(input) <= + reference_ops::PadKernelMaxDimensionCount()); + + if (constant_values != nullptr) { + TF_LITE_ENSURE_EQ(context, input->type, constant_values->type); + // Ensure that constant_values is a scalar. + TF_LITE_ENSURE_EQ(context, NumElements(constant_values), 1); + } + + // There must be a pair of paddings for each output dimension. + TF_LITE_ENSURE_EQ(context, GetTensorShape(paddings).FlatSize(), + output->dims->size * 2); + + // On Micro, outputs must be properly sized by the converter. + // NOTE: This data is only available because the paddings buffer is stored in + // the flatbuffer: + TF_LITE_ENSURE(context, IsConstantTensor(paddings)); + const int32_t* paddings_data = GetTensorData(paddings); + for (int i = 0; i < output->dims->size; i++) { + int output_dim = output->dims->data[i]; + int expected_dim = + input->dims->data[i] + paddings_data[i * 2] + paddings_data[i * 2 + 1]; + TF_LITE_ENSURE_EQ(context, output_dim, expected_dim); + } + + // Calculate OpDataPad: + data->params.resizing_category = ResizingCategory::kGenericResize; + const int paddings_total = GetTensorShape(paddings).FlatSize(); + if (paddings_total == 8 && (paddings_data[0] == 0 && paddings_data[1] == 0) && + (paddings_data[6] == 0 && paddings_data[7] == 0)) { + data->params.resizing_category = ResizingCategory::kImageStyle; + } + + const int num_input_dimensions = NumDimensions(input); + data->params.left_padding_count = num_input_dimensions; + data->params.right_padding_count = num_input_dimensions; + + for (int idx = num_input_dimensions - 1; idx >= 0; --idx) { + data->params.left_padding[idx] = paddings_data[idx * 2]; + data->params.right_padding[idx] = paddings_data[idx * 2 + 1]; + } + + if (input->type == kTfLiteInt8) { + if (constant_values == nullptr) { + // Quantized Pad requires that 0 is represented in the quantized + // range. + TF_LITE_ENSURE(context, output->params.zero_point >= + std::numeric_limits::min()); + TF_LITE_ENSURE(context, output->params.zero_point <= + std::numeric_limits::max()); + } else { + // Quantized Pad requires that 'constant_values' is represented in the + // same quantized range as the input and output tensors. + TF_LITE_ENSURE_EQ(context, output->params.zero_point, + constant_values->params.zero_point); + TF_LITE_ENSURE_EQ(context, static_cast(output->params.scale), + static_cast(constant_values->params.scale)); + } + data->output_zero_point = output->params.zero_point; + } + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(paddings); + if (constant_values != nullptr) { + micro_context->DeallocateTempTfLiteTensor(constant_values); + } + micro_context->DeallocateTempTfLiteTensor(output); +#if defined(VISION_P6) + TF_LITE_ENSURE_OK(context, PadPrepareVision(context, node)); +#endif // VISION_P6 + + return kTfLiteOk; +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); +#if defined(VISION_P6) + XtensaPadData* op_data_xtensa = static_cast(node->user_data); + OpDataPad* data = &op_data_xtensa->reference_op_data; +#else + OpDataPad* data = static_cast(node->user_data); +#endif + + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, /*index=*/0); + const TfLiteEvalTensor* constant_values = + NumInputs(node) == 3 + ? tflite::micro::GetEvalInput(context, node, /*index=*/2) + : nullptr; + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, /*index=*/0); + + switch (input->type) { + case kTfLiteFloat32: { + float pad_value = + constant_values == nullptr + ? 0.f + : *tflite::micro::GetTensorData(constant_values); + if (data->params.resizing_category == ResizingCategory::kImageStyle) { + reference_ops::PadImageStyle( + data->params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), &pad_value, + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } else { + reference_ops::Pad(data->params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + &pad_value, tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } + } break; + case kTfLiteInt8: { +#if defined(VISION_P6) + PadEvalVision(*op_data_xtensa, input, output); +#else + int8_t pad_value; + if (constant_values == nullptr) { + pad_value = static_cast(data->output_zero_point); + } else { + pad_value = *tflite::micro::GetTensorData(constant_values); + } + if (data->params.resizing_category == ResizingCategory::kImageStyle) { + reference_ops::PadImageStyle( + data->params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), &pad_value, + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } else { + reference_ops::Pad(data->params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + &pad_value, tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } +#endif + } break; + case kTfLiteInt16: { + int16_t pad_value = + constant_values == nullptr + ? 0 + : *tflite::micro::GetTensorData(constant_values); +#if defined(HIFI4) + /* NNLib currently only supports up to 4D input tensors */ + if (tflite::micro::GetTensorShape(input).DimensionsCount() == 4) { + const TfLiteEvalTensor* paddings = + tflite::micro::GetEvalInput(context, node, /*index=*/1); + int32_t err = xa_nn_pad_16_16( + tflite::micro::GetTensorData(output), + tflite::micro::GetTensorShape(output).DimsData(), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(input).DimsData(), + tflite::micro::GetTensorData(paddings), + tflite::micro::GetTensorShape(paddings).DimsData(), + tflite::micro::GetTensorShape(output).DimensionsCount(), + tflite::micro::GetTensorShape(input).DimensionsCount(), + tflite::micro::GetTensorShape(paddings).DimensionsCount(), + pad_value); + if (err != 0) return kTfLiteError; + } else { +#endif // defined(HIFI4) + reference_ops::Pad(data->params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + &pad_value, tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); +#if defined(HIFI4) + } +#endif // defined(HIFI4) + } break; + case kTfLiteInt32: { + int32_t pad_value = + constant_values == nullptr + ? 0 + : *tflite::micro::GetTensorData(constant_values); + reference_ops::Pad(data->params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + &pad_value, tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } break; + default: + + MicroPrintf("Type %s not currently supported by Pad.", + TfLiteTypeGetName(input->type)); + return kTfLiteError; + } + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_PAD() { + return tflite::micro::RegisterOp(Init, Prepare, Eval); +} + +// Also register Pad as PadV2. +TFLMRegistration Register_PADV2() { + return tflite::micro::RegisterOp(Init, Prepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/xtensa/pad_vision.cc b/tensorflow/lite/micro/kernels/xtensa/pad_vision.cc new file mode 100644 index 0000000..f15e2f2 --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/pad_vision.cc @@ -0,0 +1,107 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#if defined(VISION_P6) + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/reference/reduce.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa_pad.h" + +namespace tflite { + +inline void OperandDims4D(uint32_t* dims, TfLiteTensor* opnd) { + for (int i = NumDimensions(opnd) - 1, j = 0; i >= 0; i--, j++) { + dims[j] = SizeOfDimension(opnd, i); + } + return; +} + +TfLiteStatus PadPrepareVision(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + XtensaPadData* data = reinterpret_cast(node->user_data); + + MicroContext* micro_context = GetMicroContext(context); + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, /*index=*/0); + TfLiteTensor* paddings = + micro_context->AllocateTempInputTensor(node, /*index=*/1); + TfLiteTensor* constant_values = + NumInputs(node) == 3 + ? micro_context->AllocateTempInputTensor(node, /*index=*/2) + : nullptr; + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, /*index=*/0); + + uint32_t inputDims[4] = {1, 1, 1, 1}; + OperandDims4D(inputDims, input); + + const int32_t* paddings_data = GetTensorData(paddings); + uint32_t inputRank = NumDimensions(input); + + uint32_t context_size = 0; + uint32_t status = xiPadGetMemReqd_Context(&context_size); + TFLITE_DCHECK(status == 0); + if (context_size) { + void* context_data = + context->AllocatePersistentBuffer(context, context_size); + if (context_data == nullptr) { + return kTfLiteError; + } + data->p_context = reinterpret_cast(context_data); + data->context_size = context_size; + } + int8_t pad_value; + if (constant_values == nullptr) { + pad_value = static_cast(data->reference_op_data.output_zero_point); + } else { + pad_value = *constant_values->data.int8; + } + + status = xiPadSetContext(data->p_context, data->context_size, inputDims, + paddings_data, pad_value, inputRank); + + if (status) { + return kTfLiteError; + } + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(paddings); + if (constant_values != nullptr) { + micro_context->DeallocateTempTfLiteTensor(constant_values); + } + micro_context->DeallocateTempTfLiteTensor(output); + return kTfLiteOk; +} +TfLiteStatus PadEvalVision(const XtensaPadData& data, + const TfLiteEvalTensor* input, + TfLiteEvalTensor* output) { + const uint32_t input_size = NumElements(input->dims); + const uint32_t output_size = NumElements(output->dims); + + xiPad(data.p_context, data.context_size, + const_cast(tflite::micro::GetTensorData(input)), + input_size, tflite::micro::GetTensorData(output), output_size); + return kTfLiteOk; +} +} // namespace tflite +#endif // defined(VISION_P6) diff --git a/tensorflow/lite/micro/kernels/xtensa/pooling.cc b/tensorflow/lite/micro/kernels/xtensa/pooling.cc new file mode 100644 index 0000000..172d058 --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/pooling.cc @@ -0,0 +1,165 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/kernels/pooling.h" + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa_pooling.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { + +namespace { + +TfLiteStatus AverageEval(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->builtin_data != nullptr); + auto* params = reinterpret_cast(node->builtin_data); + + TFLITE_DCHECK(node->user_data != nullptr); +#if defined(HIFI5) + auto* op_data = static_cast(node->user_data); + const OpDataPooling* reference_op_data = &(op_data->reference_op_data); +#else + const OpDataPooling* reference_op_data = + static_cast(node->user_data); +#endif + + const TfLiteEvalTensor* input = + micro::GetEvalInput(context, node, kPoolingInputTensor); + TfLiteEvalTensor* output = + micro::GetEvalOutput(context, node, kPoolingOutputTensor); + + // Inputs and outputs share the same type, guaranteed by the converter. + switch (input->type) { + case kTfLiteFloat32: { + AveragePoolingEvalFloat(context, node, params, reference_op_data, input, + output); + break; + } + case kTfLiteInt8: { +#if defined(HIFI5) + AverageEvalQuantizedHifi(context, node, params, op_data, input, output); +#elif defined(VISION_P6) + const auto& op_data = + *(reinterpret_cast(node->user_data)); + PoolEvalVision(context, node, *params, op_data, input, output); +#else + AveragePoolingEvalQuantized(context, node, params, + reference_op_data, input, output); +#endif + break; + } + case kTfLiteInt16: { + AveragePoolingEvalQuantized(context, node, params, + reference_op_data, input, output); + break; + } + default: { + MicroPrintf("Input type %s is not currently supported", + TfLiteTypeGetName(input->type)); + return kTfLiteError; + } + } + return kTfLiteOk; +} + +TfLiteStatus MaxEval(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->builtin_data != nullptr); + auto* params = reinterpret_cast(node->builtin_data); + + TFLITE_DCHECK(node->user_data != nullptr); +#if defined(HIFI5) + auto* op_data = static_cast(node->user_data); + const OpDataPooling* reference_op_data = &(op_data->reference_op_data); +#else + const OpDataPooling* reference_op_data = + static_cast(node->user_data); +#endif + + const TfLiteEvalTensor* input = + micro::GetEvalInput(context, node, kPoolingInputTensor); + TfLiteEvalTensor* output = + micro::GetEvalOutput(context, node, kPoolingOutputTensor); + + switch (input->type) { + case kTfLiteFloat32: { + MaxPoolingEvalFloat(context, node, params, reference_op_data, input, + output); + break; + } + case kTfLiteInt8: { +#if defined(HIFI5) + MaxEvalQuantizedHifi(context, node, params, op_data, input, output); +#elif defined(VISION_P6) + const auto& op_data = + *(reinterpret_cast(node->user_data)); + PoolEvalVision(context, node, *params, op_data, input, output); +#else + MaxPoolingEvalQuantized(context, node, params, reference_op_data, + input, output); +#endif + break; + } + case kTfLiteInt16: { + MaxPoolingEvalQuantized(context, node, params, reference_op_data, + input, output); + break; + } + default: { + MicroPrintf("Type %s not currently supported.", + TfLiteTypeGetName(input->type)); + return kTfLiteError; + } + } + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_AVERAGE_POOL_2D() { +#if defined(HIFI5) + return tflite::micro::RegisterOp(XtensaPoolingInit, AveragePrepareHifi, + AverageEval); +#elif defined(VISION_P6) + return tflite::micro::RegisterOp(XtensaPoolingInit, AvgPoolingPrepareVision, + AverageEval); +#else + return tflite::micro::RegisterOp(XtensaPoolingInit, PoolingPrepare, + AverageEval); +#endif +} + +TFLMRegistration Register_MAX_POOL_2D() { +#if defined(HIFI5) + return tflite::micro::RegisterOp(XtensaPoolingInit, MaxPrepareHifi, MaxEval); +#elif defined(VISION_P6) + return tflite::micro::RegisterOp(XtensaPoolingInit, MaxPoolingPrepareVision, + MaxEval); +#else + return tflite::micro::RegisterOp(XtensaPoolingInit, PoolingPrepare, MaxEval); +#endif +} + +TFLMRegistration Register_AVERAGE_POOL_2D_INT16() { + return Register_AVERAGE_POOL_2D(); +} + +TFLMRegistration Register_MAX_POOL_2D_INT16() { return Register_MAX_POOL_2D(); } + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/xtensa/pooling_int8.cc b/tensorflow/lite/micro/kernels/xtensa/pooling_int8.cc new file mode 100644 index 0000000..84246d6 --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/pooling_int8.cc @@ -0,0 +1,338 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/pooling.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa_pooling.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { + +namespace { + +TfLiteStatus AverageEvalInt8(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->builtin_data != nullptr); + auto* params = reinterpret_cast(node->builtin_data); + + TFLITE_DCHECK(node->user_data != nullptr); + + const TfLiteEvalTensor* input = + micro::GetEvalInput(context, node, kPoolingInputTensor); + TfLiteEvalTensor* output = + micro::GetEvalOutput(context, node, kPoolingOutputTensor); + + // Inputs and outputs share the same type, guaranteed by the converter. + switch (input->type) { + case kTfLiteInt8: { +#if defined(HIFI5) + auto* op_data = static_cast(node->user_data); + AverageEvalQuantizedHifi(context, node, params, op_data, input, output); +#elif defined(VISION_P6) + const auto& op_data = + *(reinterpret_cast(node->user_data)); + PoolEvalVision(context, node, *params, op_data, input, output); +#else + const OpDataPooling* reference_op_data = + static_cast(node->user_data); + AveragePoolingEvalQuantized(context, node, params, + reference_op_data, input, output); +#endif + break; + } + default: { + MicroPrintf("Input type %s is not currently supported", + TfLiteTypeGetName(input->type)); + return kTfLiteError; + } + } + return kTfLiteOk; +} + +TfLiteStatus MaxEvalInt8(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->builtin_data != nullptr); + auto* params = reinterpret_cast(node->builtin_data); + + TFLITE_DCHECK(node->user_data != nullptr); + + const TfLiteEvalTensor* input = + micro::GetEvalInput(context, node, kPoolingInputTensor); + TfLiteEvalTensor* output = + micro::GetEvalOutput(context, node, kPoolingOutputTensor); + + switch (input->type) { + case kTfLiteInt8: { +#if defined(HIFI5) + auto* op_data = static_cast(node->user_data); + MaxEvalQuantizedHifi(context, node, params, op_data, input, output); +#elif defined(VISION_P6) + const auto& op_data = + *(reinterpret_cast(node->user_data)); + PoolEvalVision(context, node, *params, op_data, input, output); +#else + const OpDataPooling* reference_op_data = + static_cast(node->user_data); + MaxPoolingEvalQuantized(context, node, params, reference_op_data, + input, output); +#endif + break; + } + default: { + MicroPrintf("Type %s not currently supported.", + TfLiteTypeGetName(input->type)); + return kTfLiteError; + } + } + return kTfLiteOk; +} + +} // namespace + +#if defined(HIFI5) + +TfLiteStatus AveragePrepareHifi(TfLiteContext* context, TfLiteNode* node) { + TF_LITE_ENSURE_STATUS(PoolingPrepare(context, node)); + MicroContext* micro_context = GetMicroContext(context); + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kPoolingInputTensor); + + if (input->type == kTfLiteInt8) { + const RuntimeShape& input_shape = GetTensorShape(input); + TfLiteTensor* output = + micro_context->AllocateTempInputTensor(node, kPoolingOutputTensor); + const RuntimeShape& output_shape = GetTensorShape(output); + micro_context->DeallocateTempTfLiteTensor(output); + + const int depth = MatchingDim(input_shape, 3, output_shape, 3); + const int input_height = input_shape.Dims(1); + const int input_width = input_shape.Dims(2); + const int output_height = output_shape.Dims(1); + const int output_width = output_shape.Dims(2); + + auto* params = reinterpret_cast(node->builtin_data); + auto* data = static_cast(node->user_data); + + int required_scratch = xa_nn_avgpool_getsize( + depth, PREC_8, PREC_8, input_height, input_width, params->filter_height, + params->filter_width, + params->stride_width, // x_stride, + params->stride_height, // y_stride, + data->reference_op_data.padding.width, // x_padding, + data->reference_op_data.padding.height, // y_padding, + output_height, output_width, 0 /*NHWC input */, 0 /* NHWC output */); + + if (required_scratch <= 0) { + MicroPrintf("Averagepool: xa_nn_avgpool_getsize failed"); + return kTfLiteError; + } + + TF_LITE_ENSURE_STATUS(context->RequestScratchBufferInArena( + context, required_scratch, &(data->scratch_tensor_index))); + } + + micro_context->DeallocateTempTfLiteTensor(input); + return kTfLiteOk; +} + +TfLiteStatus AverageEvalQuantizedHifi(TfLiteContext* context, + const TfLiteNode* node, + const TfLitePoolParams* params, + const XtensaOpDataPooling* data, + const TfLiteEvalTensor* input, + TfLiteEvalTensor* output) { + TFLITE_DCHECK(input->type == kTfLiteInt8); + + const RuntimeShape& input_shape = tflite::micro::GetTensorShape(input); + const RuntimeShape& output_shape = tflite::micro::GetTensorShape(output); + const int batches = MatchingDim(input_shape, 0, output_shape, 0); + const int depth = MatchingDim(input_shape, 3, output_shape, 3); + const int input_height = input_shape.Dims(1); + const int input_width = input_shape.Dims(2); + const int output_height = output_shape.Dims(1); + const int output_width = output_shape.Dims(2); + + void* p_scratch = static_cast( + context->GetScratchBuffer(context, data->scratch_tensor_index)); + + const int8_t* inp_data_ptr = tflite::micro::GetTensorData(input); + int8_t* out_data_ptr = tflite::micro::GetTensorData(output); + + for (int batch = 0; batch < batches; ++batch) { + TF_LITE_ENSURE_EQ( + context, + xa_nn_avgpool_8( + &out_data_ptr[output_height * output_width * depth * batch], + const_cast( + &inp_data_ptr[output_height * output_width * depth * batch]), + input_height, input_width, depth, params->filter_height, + params->filter_width, params->stride_width, params->stride_height, + data->reference_op_data.padding.width, + data->reference_op_data.padding.height, output_height, output_width, + 0, 0, p_scratch), + 0); + } + + const int out_length = batches * output_height * output_width * depth; + TF_LITE_ENSURE_EQ( + context, + xa_nn_vec_activation_min_max_8_8( + out_data_ptr, out_data_ptr, data->reference_op_data.activation_min, + data->reference_op_data.activation_max, out_length), + 0); + + return kTfLiteOk; +} + +TfLiteStatus MaxPrepareHifi(TfLiteContext* context, TfLiteNode* node) { + TF_LITE_ENSURE_STATUS(PoolingPrepare(context, node)); + + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kPoolingInputTensor); + + if (input->type == kTfLiteInt8) { + auto* params = reinterpret_cast(node->builtin_data); + auto* data = static_cast(node->user_data); + + const RuntimeShape& input_shape = GetTensorShape(input); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kPoolingOutputTensor); + const RuntimeShape& output_shape = GetTensorShape(output); + micro_context->DeallocateTempTfLiteTensor(output); + + const int depth = MatchingDim(input_shape, 3, output_shape, 3); + const int input_height = input_shape.Dims(1); + const int input_width = input_shape.Dims(2); + const int output_height = output_shape.Dims(1); + const int output_width = output_shape.Dims(2); + + int required_scratch = xa_nn_maxpool_getsize( + depth, PREC_8, PREC_8, input_height, input_width, params->filter_height, + params->filter_width, + params->stride_width, // x_stride, + params->stride_height, // y_stride, + data->reference_op_data.padding.width, // x_padding, + data->reference_op_data.padding.height, // y_padding, + output_height, output_width, 0 /* NHWC inpput */, 0 /* NHWC output */); + + if (required_scratch <= 0) { + MicroPrintf("Maxpool: xa_nn_maxpool_getsize failed"); + return kTfLiteError; + } + + TF_LITE_ENSURE_STATUS(context->RequestScratchBufferInArena( + context, required_scratch, &(data->scratch_tensor_index))); + } + + micro_context->DeallocateTempTfLiteTensor(input); + return kTfLiteOk; +} + +TfLiteStatus MaxEvalQuantizedHifi(TfLiteContext* context, TfLiteNode* node, + TfLitePoolParams* params, + const XtensaOpDataPooling* data, + const TfLiteEvalTensor* input, + TfLiteEvalTensor* output) { + const RuntimeShape& input_shape = tflite::micro::GetTensorShape(input); + const RuntimeShape& output_shape = tflite::micro::GetTensorShape(output); + const int batches = MatchingDim(input_shape, 0, output_shape, 0); + const int depth = MatchingDim(input_shape, 3, output_shape, 3); + const int input_height = input_shape.Dims(1); + const int input_width = input_shape.Dims(2); + const int output_height = output_shape.Dims(1); + const int output_width = output_shape.Dims(2); + + void* p_scratch = static_cast( + context->GetScratchBuffer(context, data->scratch_tensor_index)); + + const int8_t* inp_data_ptr = tflite::micro::GetTensorData(input); + int8_t* out_data_ptr = tflite::micro::GetTensorData(output); + + for (int batch = 0; batch < batches; ++batch) { + TF_LITE_ENSURE_EQ( + context, + xa_nn_maxpool_8( + &out_data_ptr[output_height * output_width * depth * batch], + const_cast( + &inp_data_ptr[output_height * output_width * depth * batch]), + input_height, input_width, depth, params->filter_height, + params->filter_width, params->stride_width, params->stride_height, + data->reference_op_data.padding.width, + data->reference_op_data.padding.height, output_height, output_width, + 0, 0, p_scratch), + 0); + } + + const int out_length = batches * output_height * output_width * depth; + TF_LITE_ENSURE_EQ( + context, + xa_nn_vec_activation_min_max_8_8( + out_data_ptr, out_data_ptr, data->reference_op_data.activation_min, + data->reference_op_data.activation_max, out_length), + 0); + + return kTfLiteOk; +} + +#endif // defined(HIFI5) + +void* XtensaPoolingInit(TfLiteContext* context, const char* buffer, + size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); +#if defined(HIFI5) + return context->AllocatePersistentBuffer(context, + sizeof(XtensaOpDataPooling)); +#elif defined(VISION_P6) + if (InitXtensaContext()) { + return nullptr; + } + return context->AllocatePersistentBuffer(context, + sizeof(XtensaOpDataPooling)); +#else + return context->AllocatePersistentBuffer(context, sizeof(OpDataPooling)); +#endif +} + +TFLMRegistration Register_AVERAGE_POOL_2D_INT8() { +#if defined(HIFI5) + return tflite::micro::RegisterOp(XtensaPoolingInit, AveragePrepareHifi, + AverageEvalInt8); +#elif defined(VISION_P6) + return tflite::micro::RegisterOp(XtensaPoolingInit, AvgPoolingPrepareVision, + AverageEvalInt8); +#else + return tflite::micro::RegisterOp(XtensaPoolingInit, PoolingPrepare, + AverageEvalInt8); +#endif +} + +TFLMRegistration Register_MAX_POOL_2D_INT8() { +#if defined(HIFI5) + return tflite::micro::RegisterOp(XtensaPoolingInit, MaxPrepareHifi, + MaxEvalInt8); +#elif defined(VISION_P6) + return tflite::micro::RegisterOp(XtensaPoolingInit, MaxPoolingPrepareVision, + MaxEvalInt8); +#else + return tflite::micro::RegisterOp(XtensaPoolingInit, PoolingPrepare, + MaxEvalInt8); +#endif +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/xtensa/pooling_vision.cc b/tensorflow/lite/micro/kernels/xtensa/pooling_vision.cc new file mode 100644 index 0000000..b0186f1 --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/pooling_vision.cc @@ -0,0 +1,117 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#if defined(VISION_P6) + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/kernels/internal/reference/pooling.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/pooling.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa_pooling.h" + +#define MAX_POOLING 0 +#define AVG_POOLING 1 + +namespace tflite { + +TfLiteStatus PoolingPrepareVision(TfLiteContext* context, TfLiteNode* node, + uint8_t pool_type) { + TF_LITE_ENSURE_STATUS(PoolingPrepare(context, node)); + TFLITE_DCHECK(node->user_data != nullptr); + TFLITE_DCHECK(node->builtin_data != nullptr); + + XtensaOpDataPooling* data = + reinterpret_cast(node->user_data); + const auto& params = + *(reinterpret_cast(node->builtin_data)); + + MicroContext* micro_context = GetMicroContext(context); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kPoolingOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kPoolingInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + + if (input->type == kTfLiteInt8) { + uint32_t context_size = 0; + uint32_t status = xiPoolGetMemReqd_Context(&context_size); + TFLITE_DCHECK(status == 0); + if (context_size) { + void* context_data = + context->AllocatePersistentBuffer(context, context_size); + if (context_data == nullptr) { + return kTfLiteError; + } + data->p_context = reinterpret_cast(context_data); + data->context_size = context_size; + } + + uint32_t input_dims[4] = {1, 1, 1, 1}; + uint32_t output_dims[4] = {1, 1, 1, 1}; + for (int i = 0; i < NumDimensions(input); i++) { + input_dims[i] = + std::max(1, SizeOfDimension(input, NumDimensions(input) - 1 - i)); + } + for (int i = 0; i < NumDimensions(output); i++) { + output_dims[i] = + std::max(1, SizeOfDimension(output, NumDimensions(output) - 1 - i)); + } + + status = xiPoolSetContext( + data->p_context, data->context_size, input_dims[0], input_dims[1], + input_dims[2], input_dims[3], output_dims[0], output_dims[1], + output_dims[2], params.filter_width, params.filter_height, + params.stride_width, params.stride_height, + data->reference_op_data.padding.width, + data->reference_op_data.padding.height, input->params.zero_point, + output->params.zero_point, data->reference_op_data.activation_min, + data->reference_op_data.activation_max, pool_type); + if (status) { + return kTfLiteError; + } + } + + micro_context->DeallocateTempTfLiteTensor(output); + micro_context->DeallocateTempTfLiteTensor(input); + + return kTfLiteOk; +} + +TfLiteStatus AvgPoolingPrepareVision(TfLiteContext* context, TfLiteNode* node) { + return PoolingPrepareVision(context, node, AVG_POOLING); +} + +TfLiteStatus MaxPoolingPrepareVision(TfLiteContext* context, TfLiteNode* node) { + return PoolingPrepareVision(context, node, MAX_POOLING); +} + +TfLiteStatus PoolEvalVision(TfLiteContext* context, TfLiteNode* node, + const TfLitePoolParams& params, + const XtensaOpDataPooling& data, + const TfLiteEvalTensor* input, + TfLiteEvalTensor* output) { + const uint32_t input_size = NumElements(input->dims); + const uint32_t output_size = NumElements(output->dims); + + xiPool(data.p_context, data.context_size, + const_cast(tflite::micro::GetTensorData(input)), + input_size, tflite::micro::GetTensorData(output), output_size); + return kTfLiteOk; +} +} // namespace tflite + +#endif // VISIONP6 diff --git a/tensorflow/lite/micro/kernels/xtensa/quantize.cc b/tensorflow/lite/micro/kernels/xtensa/quantize.cc new file mode 100644 index 0000000..e849108 --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/quantize.cc @@ -0,0 +1,317 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/reference/quantize.h" + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/requantize.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/quantize.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_utils.h" + +namespace tflite { +namespace { + +#if defined(HIFI4) || defined(HIFI5) +TfLiteStatus EvalXtensa(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + auto* op_data = static_cast(node->user_data); + + const TfLiteEvalTensor* input = tflite::micro::GetEvalInput(context, node, 0); + TfLiteEvalTensor* output = tflite::micro::GetEvalOutput(context, node, 0); + + switch (input->type) { + case kTfLiteUInt8: { + switch (output->type) { + case kTfLiteInt8: { + int size = ElementCount(*input->dims); + reference_ops::Requantize( + tflite::micro::GetTensorData(input), size, + op_data->requantize_output_multiplier, + op_data->requantize_output_shift, op_data->input_zero_point, + op_data->quantization_params.zero_point, + tflite::micro::GetTensorData(output)); + break; + } + + default: + MicroPrintf("Input %s, output %s not supported.", + TfLiteTypeGetName(input->type), + TfLiteTypeGetName(output->type)); + return kTfLiteError; + } + break; + } + + case kTfLiteInt8: { + switch (output->type) { + case kTfLiteUInt8: { + int size = ElementCount(*input->dims); + reference_ops::Requantize( + tflite::micro::GetTensorData(input), size, + op_data->requantize_output_multiplier, + op_data->requantize_output_shift, op_data->input_zero_point, + op_data->quantization_params.zero_point, + tflite::micro::GetTensorData(output)); + break; + } + + case kTfLiteInt8: { + int size = ElementCount(*input->dims); + reference_ops::Requantize( + tflite::micro::GetTensorData(input), size, + op_data->requantize_output_multiplier, + op_data->requantize_output_shift, op_data->input_zero_point, + op_data->quantization_params.zero_point, + tflite::micro::GetTensorData(output)); + break; + } + + case kTfLiteInt16: { + int size = ElementCount(*input->dims); + int32_t zero_point = op_data->quantization_params.zero_point; + reference_ops::Requantize( + tflite::micro::GetTensorData(input), size, + op_data->requantize_output_multiplier, + op_data->requantize_output_shift, op_data->input_zero_point, + zero_point, tflite::micro::GetTensorData(output)); + break; + } + + case kTfLiteInt32: { + int size = ElementCount(*input->dims); + int32_t zero_point = op_data->quantization_params.zero_point; +#if defined(HIFI5) + const int8_t* input_data_ptr; + int32_t* output_data_ptr; + input_data_ptr = tflite::micro::GetTensorData(input); + output_data_ptr = tflite::micro::GetTensorData(output); + + TF_LITE_ENSURE_EQ( + context, + xa_nn_elm_requantize_asym8s_asym32s( + output_data_ptr, input_data_ptr, op_data->input_zero_point, + zero_point, op_data->requantize_output_shift, + op_data->requantize_output_multiplier, size), + 0); +#else + reference_ops::Requantize( + tflite::micro::GetTensorData(input), size, + op_data->requantize_output_multiplier, + op_data->requantize_output_shift, op_data->input_zero_point, + zero_point, tflite::micro::GetTensorData(output)); +#endif // defined(HIFI5) + break; + } + + default: { + MicroPrintf("Input %s, output %s not supported.", + TfLiteTypeGetName(input->type), + TfLiteTypeGetName(output->type)); + return kTfLiteError; + } + } + break; + } + + case kTfLiteInt16: { + switch (output->type) { + case kTfLiteInt8: { + int size = ElementCount(*input->dims); + TF_LITE_ENSURE_EQ(context, + xa_nn_elm_requantize_asym16s_asym8s( + tflite::micro::GetTensorData(output), + tflite::micro::GetTensorData(input), + op_data->input_zero_point, + op_data->quantization_params.zero_point, + op_data->requantize_output_shift, + op_data->requantize_output_multiplier, size), + 0); + break; + } + + case kTfLiteInt16: { + int size = ElementCount(*input->dims); + reference_ops::Requantize( + tflite::micro::GetTensorData(input), size, + op_data->requantize_output_multiplier, + op_data->requantize_output_shift, op_data->input_zero_point, + op_data->quantization_params.zero_point, + tflite::micro::GetTensorData(output)); + break; + } + + case kTfLiteInt32: { + int size = ElementCount(*input->dims); +#if defined(HIFI5) + TF_LITE_ENSURE_EQ(context, + xa_nn_elm_requantize_asym16s_asym32s( + tflite::micro::GetTensorData(output), + tflite::micro::GetTensorData(input), + op_data->input_zero_point, + op_data->quantization_params.zero_point, + op_data->requantize_output_shift, + op_data->requantize_output_multiplier, size), + 0); +#else + int32_t zero_point = op_data->quantization_params.zero_point; + reference_ops::Requantize( + tflite::micro::GetTensorData(input), size, + op_data->requantize_output_multiplier, + op_data->requantize_output_shift, op_data->input_zero_point, + zero_point, tflite::micro::GetTensorData(output)); +#endif // defined(HIFI5) + break; + } + + default: { + MicroPrintf("Input %s, output %s not supported.", + TfLiteTypeGetName(input->type), + TfLiteTypeGetName(output->type)); + return kTfLiteError; + } + } + break; + } + + case kTfLiteInt32: { + switch (output->type) { + case kTfLiteInt8: { + int size = ElementCount(*input->dims); + reference_ops::Requantize( + tflite::micro::GetTensorData(input), size, + op_data->requantize_output_multiplier, + op_data->requantize_output_shift, op_data->input_zero_point, + op_data->quantization_params.zero_point, + tflite::micro::GetTensorData(output)); + break; + } + + case kTfLiteInt16: { + int size = ElementCount(*input->dims); + int32_t zero_point = op_data->quantization_params.zero_point; + reference_ops::Requantize( + tflite::micro::GetTensorData(input), size, + op_data->requantize_output_multiplier, + op_data->requantize_output_shift, op_data->input_zero_point, + zero_point, tflite::micro::GetTensorData(output)); + break; + } + + default: { + MicroPrintf("Input %s, output %s not supported.", + TfLiteTypeGetName(input->type), + TfLiteTypeGetName(output->type)); + return kTfLiteError; + } + } + break; + } + + case kTfLiteFloat32: { + switch (output->type) { + case kTfLiteInt8: { + reference_ops::AffineQuantize( + op_data->quantization_params, + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + } + + case kTfLiteInt16: { + reference_ops::AffineQuantize( + op_data->quantization_params, + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + } + + default: { + MicroPrintf("Input %s, output %s not supported.", + TfLiteTypeGetName(input->type), + TfLiteTypeGetName(output->type)); + return kTfLiteError; + } + } + break; + } + + default: { + MicroPrintf("Input %s, output %s not supported.", + TfLiteTypeGetName(input->type), + TfLiteTypeGetName(output->type)); + return kTfLiteError; + } + } + + return kTfLiteOk; +} +#endif // defined(HIFI4) || defined(HIFI5) + +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, + sizeof(OpDataQuantizeReference)); +} + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* output = micro_context->AllocateTempOutputTensor(node, 0); + TfLiteTensor* input = micro_context->AllocateTempInputTensor(node, 0); + + auto* op_data = static_cast(node->user_data); + op_data->quantization_params.zero_point = output->params.zero_point; + op_data->quantization_params.scale = + static_cast(output->params.scale); + + op_data->input_zero_point = input->params.zero_point; + + double effective_scale = static_cast(input->params.scale) / + static_cast(output->params.scale); + QuantizeMultiplier(effective_scale, &op_data->requantize_output_multiplier, + &op_data->requantize_output_shift); + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(output); + + return kTfLiteOk; +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +#if defined(HIFI4) || defined(HIFI5) + return EvalXtensa(context, node); +#else + return EvalQuantizeReference(context, node); +#endif // defined(HIFI4) || defined(HIFI5) +} + +} // namespace + +TFLMRegistration Register_QUANTIZE() { + return tflite::micro::RegisterOp(Init, Prepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/xtensa/reduce.cc b/tensorflow/lite/micro/kernels/xtensa/reduce.cc new file mode 100644 index 0000000..c7c507f --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/reduce.cc @@ -0,0 +1,118 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/reference/reduce.h" + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/mean.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/internal/types.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa_reduce.h" +#include "tensorflow/lite/micro/micro_utils.h" + +namespace tflite { + +void* XtensaInitReduce(TfLiteContext* context, const char* buffer, + size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + void* data = + context->AllocatePersistentBuffer(context, sizeof(XtensaReduceOpData)); + +#if defined(VISION_P6) + if (InitXtensaContext() != 0) { + return nullptr; + } +#endif + return data; +} + +TfLiteStatus XtensaPrepareMax(TfLiteContext* context, TfLiteNode* node) { + OpDataReduce* op_data = + &(static_cast(node->user_data)->reference_op_data); + TF_LITE_ENSURE_OK(context, PrepareMaxHelper(context, node, op_data)); +#if defined(VISION_P6) + TF_LITE_ENSURE_OK(context, ReducePrepareVision(context, node)); +#endif // VISION_P6 + return kTfLiteOk; +} + +TfLiteStatus XtensaPrepareMeanOrSum(TfLiteContext* context, TfLiteNode* node) { + OpDataReduce* op_data = + &(static_cast(node->user_data)->reference_op_data); + return PrepareMeanOrSumHelper(context, node, op_data); +} + +TfLiteStatus XtensaEvalMean(TfLiteContext* context, TfLiteNode* node) { + OpDataReduce* op_data = + &(static_cast(node->user_data)->reference_op_data); + return EvalMeanHelper(context, node, op_data); +} + +TfLiteStatus XtensaEvalMax(TfLiteContext* context, TfLiteNode* node) { + XtensaReduceOpData* op_data_xtensa = + static_cast(node->user_data); + OpDataReduce* op_data = &(op_data_xtensa->reference_op_data); + +#if defined(VISION_P6) + const TfLiteEvalTensor* input = tflite::micro::GetEvalInput(context, node, 0); + TfLiteEvalTensor* output = tflite::micro::GetEvalOutput(context, node, 0); + TF_LITE_ENSURE_TYPES_EQ(context, input->type, output->type); + + switch (input->type) { + case kTfLiteInt8: { + TF_LITE_ENSURE_EQ(context, static_cast(op_data->input_scale), + static_cast(op_data->output_scale)); + TF_LITE_ENSURE_EQ(context, op_data->input_zp, op_data->output_zp); + ReduceEvalVision(*op_data_xtensa, input, output); + break; + } + default: { + // Use the reference EvalMax for all other cases. + return EvalMaxHelper(context, node, op_data); + } + } + return kTfLiteOk; +#else + return EvalMaxHelper(context, node, op_data); +#endif +} + +TfLiteStatus XtensaEvalSum(TfLiteContext* context, TfLiteNode* node) { + OpDataReduce* op_data = + &(static_cast(node->user_data)->reference_op_data); + return EvalSumHelper(context, node, op_data); +} + +TFLMRegistration Register_MEAN() { + return tflite::micro::RegisterOp(XtensaInitReduce, XtensaPrepareMeanOrSum, + XtensaEvalMean); +} + +TFLMRegistration Register_REDUCE_MAX() { + return tflite::micro::RegisterOp(XtensaInitReduce, XtensaPrepareMax, + XtensaEvalMax); +} + +TFLMRegistration Register_SUM() { + return tflite::micro::RegisterOp(XtensaInitReduce, XtensaPrepareMeanOrSum, + XtensaEvalSum); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/xtensa/reduce_vision.cc b/tensorflow/lite/micro/kernels/xtensa/reduce_vision.cc new file mode 100644 index 0000000..c76525e --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/reduce_vision.cc @@ -0,0 +1,152 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#if defined(VISION_P6) + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/reference/reduce.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa_reduce.h" +#include "tensorflow/lite/micro/micro_utils.h" + +namespace tflite { + +inline void OperandDims4D(uint32_t* dims, TfLiteTensor* opnd) { + for (int i = NumDimensions(opnd) - 1, j = 0; i >= 0; i--, j++) { + dims[j] = SizeOfDimension(opnd, i); + } + return; +} + +// This function is duplicated from reference/reduce.h +// This method parses the input 'axis' to remove duplicates and handle negative +// values, and returns a valid 'out_axis' +inline bool ResolveAxis(const int num_dims, const int* axis, + const int64_t num_axis, int* out_axis, + int* out_num_axis) { + *out_num_axis = 0; // Just in case. + // Short-circuit axis resolution for scalars; the axis will go unused. + if (num_dims == 0) { + return true; + } + // o(n^2) is fine since out_num_axis should be really small, mostly <= 4 + for (int64_t idx = 0; idx < num_axis; ++idx) { + // Handle negative index. A positive index 'p_idx' can be represented as a + // negative index 'n_idx' as: n_idx = p_idx-num_dims + // eg: For num_dims=3, [0, 1, 2] is the same as [-3, -2, -1] */ + int current = axis[idx] < 0 ? (axis[idx] + num_dims) : axis[idx]; + TFLITE_DCHECK(current >= 0 && current < num_dims); + if (current < 0 || current >= num_dims) { + return false; + } + bool is_dup = false; + for (int j = 0; j < *out_num_axis; ++j) { + if (out_axis[j] == current) { + is_dup = true; + break; + } + } + if (!is_dup) { + out_axis[*out_num_axis] = current; + *out_num_axis += 1; + } + } + return true; +} +TfLiteStatus ReducePrepareVision(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + TFLITE_DCHECK(node->builtin_data != nullptr); + XtensaReduceOpData* data = + reinterpret_cast(node->user_data); + + MicroContext* micro_context = GetMicroContext(context); + TfLiteTensor* input = micro_context->AllocateTempInputTensor(node, 0); + TfLiteTensor* output = micro_context->AllocateTempOutputTensor(node, 0); + TfLiteTensor* axis = micro_context->AllocateTempInputTensor(node, 1); + + uint32_t inputDims[4] = {1, 1, 1, 1}; + uint32_t outputDims[4] = {1, 1, 1, 1}; + uint32_t shouldReduceR[4] = {0, 0, 0, 0}; + int32_t resolved_axis[4] = {0, 0, 0, 0}; + OperandDims4D(inputDims, input); + OperandDims4D(outputDims, output); + + uint32_t inputRank = NumDimensions(input); + // Interpret an axis tensor with null dimensions as a scalar + int num_axis = static_cast(ElementCount(*axis->dims)); + // Resolve axis. + int num_resolved_axis = 0; + if (!ResolveAxis(input->dims->size, axis->data.i32, num_axis, resolved_axis, + &num_resolved_axis)) { + return kTfLiteError; + } + std::vector shouldReduce(inputRank); + + for (int32_t i = 0; i < num_axis; ++i) { + int32_t axisD = resolved_axis[i]; + shouldReduce[axisD] = true; + } + + // reverse axes and align it to dimension 0 as OperandDims4D + for (uint32_t axisI = 0; axisI < inputRank; ++axisI) { + shouldReduceR[inputRank - 1 - axisI] = (uint32_t)shouldReduce[axisI]; + } + + uint32_t context_size = 0; + uint32_t status = xiReduceGetMemReqd_Context(&context_size); + if (!status && context_size) { + void* context_data = + context->AllocatePersistentBuffer(context, context_size); + if (context_data == nullptr) { + return kTfLiteError; + } + data->p_context = reinterpret_cast(context_data); + data->context_size = context_size; + } + + status = xiReduceSetContext(data->p_context, data->context_size, inputDims, + outputDims, shouldReduceR); + + if (status) { + return kTfLiteError; + } + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(output); + micro_context->DeallocateTempTfLiteTensor(axis); + return kTfLiteOk; +} + +TfLiteStatus ReduceEvalVision(const XtensaReduceOpData& data, + const TfLiteEvalTensor* input, + TfLiteEvalTensor* output) { + const uint32_t input_size = NumElements(input->dims); + const uint32_t output_size = NumElements(output->dims); + + xiReduce(data.p_context, data.context_size, + const_cast(tflite::micro::GetTensorData(input)), + input_size, tflite::micro::GetTensorData(output), + output_size); + return kTfLiteOk; +} +} // namespace tflite +#endif // defined(VISION_P6) diff --git a/tensorflow/lite/micro/kernels/xtensa/reshape.cc b/tensorflow/lite/micro/kernels/xtensa/reshape.cc new file mode 100644 index 0000000..292b47c --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/reshape.cc @@ -0,0 +1,163 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa_reshape.h" +#include "tensorflow/lite/micro/memory_helpers.h" +#include "tensorflow/lite/micro/micro_utils.h" + +namespace tflite { +namespace ops { +namespace micro { +namespace reshape { + +#if defined(VISION_P6) +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + void* data = + context->AllocatePersistentBuffer(context, sizeof(XtensaReshapeData)); + if (InitXtensaContext()) { + return nullptr; + } + return data; +} +#endif // defined(VISION_P6) + +TfLiteStatus ReshapeOutput(TfLiteContext* context, TfLiteNode* node) { + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kReshapeInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kReshapeOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + // Tensorflow's Reshape allows one of the shape components to have the + // special -1 value, meaning it will be calculated automatically based on the + // input. Here we calculate what that dimension should be so that the number + // of output elements in the same as the number of input elements. + int num_input_elements = NumElements(input); + TfLiteIntArray* output_shape = output->dims; + + if (NumInputs(node) == 1 && // Legacy scalar supported with params. + output_shape->size == 1 && output_shape->data[0] == 0) { + // Legacy tflite models use a shape parameter of [0] to indicate scalars, + // so adjust accordingly. TODO(b/111614235): Allow zero-sized buffers during + // toco conversion. + output_shape->size = 0; + } + + int num_output_elements = 1; + int stretch_dim = -1; + for (int i = 0; i < output_shape->size; ++i) { + int value = output_shape->data[i]; + if (value == -1) { + TF_LITE_ENSURE_EQ(context, stretch_dim, -1); + stretch_dim = i; + } else { + num_output_elements *= value; + } + } + if (stretch_dim != -1) { + TfLiteEvalTensor* output_eval = + tflite::micro::GetEvalOutput(context, node, kReshapeOutputTensor); + TF_LITE_ENSURE_STATUS(tflite::micro::CreateWritableTensorDimsWithCopy( + context, output, output_eval)); + output_shape = output->dims; // output tensor dims were moved + output_shape->data[stretch_dim] = num_input_elements / num_output_elements; + num_output_elements *= output_shape->data[stretch_dim]; + } + + TF_LITE_ENSURE_TYPES_EQ(context, input->type, output->type); + TF_LITE_ENSURE_EQ(context, num_input_elements, num_output_elements); + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(output); + return kTfLiteOk; +} + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + TF_LITE_ENSURE(context, NumInputs(node) == 1 || NumInputs(node) == 2); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + TF_LITE_ENSURE_EQ(context, ReshapeOutput(context, node), kTfLiteOk); +#if defined(VISION_P6) + { + MicroContext* micro_context = GetMicroContext(context); + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kReshapeInputTensor); + // Vision P6 currently only supports up to 4D int8 input tensors + if (NumDimensions(input) <= 4 && input->type == kTfLiteInt8) { + TF_LITE_ENSURE_OK(context, ReshapePrepareVision(context, node)); + } + micro_context->DeallocateTempTfLiteTensor(input); + } +#endif // VISION_P6 + return kTfLiteOk; +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kReshapeInputTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kReshapeOutputTensor); + + // TODO(b/162522304): storing input bytes in OpData increases some models + // significantly, possibly due to alignment issues. + size_t input_bytes; + TF_LITE_ENSURE_STATUS(TfLiteTypeSizeOf(input->type, &input_bytes)); + input_bytes *= ElementCount(*input->dims); + + // Do nothing for in-place reshape. + if (input->data.raw != output->data.raw) { + // Otherwise perform reshape with copy. +#if defined(VISION_P6) + // Vision P6 currently only supports up to 4D int8 input tensors + if (tflite::micro::GetTensorShape(input).DimensionsCount() <= 4 && + input->type == kTfLiteInt8) { + XtensaReshapeData* op_data_xtensa = + static_cast(node->user_data); + ReshapeEvalVision(*op_data_xtensa, input, output); + } else { +#endif // VISION_P6 + memcpy(output->data.raw, input->data.raw, input_bytes); +#if defined(VISION_P6) + } +#endif // VISION_P6 + } + return kTfLiteOk; +} + +} // namespace reshape + +TFLMRegistration Register_RESHAPE() { +#if defined(VISION_P6) + return tflite::micro::RegisterOp(reshape::Init, reshape::Prepare, + reshape::Eval); +#else + return tflite::micro::RegisterOp(nullptr, reshape::Prepare, reshape::Eval); +#endif +} + +} // namespace micro +} // namespace ops +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/xtensa/reshape_vision.cc b/tensorflow/lite/micro/kernels/xtensa/reshape_vision.cc new file mode 100644 index 0000000..a43ca17 --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/reshape_vision.cc @@ -0,0 +1,87 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#if defined(VISION_P6) + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/reference/reduce.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa_reshape.h" + +namespace tflite { + +inline void OperandDims4D(uint32_t* dims, TfLiteTensor* opnd) { + for (int i = NumDimensions(opnd) - 1, j = 0; i >= 0; i--, j++) { + dims[j] = SizeOfDimension(opnd, i); + } + return; +} + +TfLiteStatus ReshapePrepareVision(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + XtensaReshapeData* data = + reinterpret_cast(node->user_data); + + MicroContext* micro_context = GetMicroContext(context); + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kReshapeInputTensor); + + uint32_t inputRank = NumDimensions(input); + uint32_t inputDims[4] = {1, 1, 1, 1}; + OperandDims4D(inputDims, input); + uint32_t context_size = 0; + uint32_t status = xiReshapeGetMemReqd_Context(&context_size); + TFLITE_DCHECK(status == 0); + if (context_size) { + void* context_data = + context->AllocatePersistentBuffer(context, context_size); + if (context_data == nullptr) { + return kTfLiteError; + } + data->p_context = reinterpret_cast(context_data); + data->context_size = context_size; + } + + status = xiReshapeSetContext(data->p_context, data->context_size, inputDims, + inputRank); + + if (status) { + return kTfLiteError; + } + + micro_context->DeallocateTempTfLiteTensor(input); + return kTfLiteOk; +} +TfLiteStatus ReshapeEvalVision(const XtensaReshapeData& data, + const TfLiteEvalTensor* input, + TfLiteEvalTensor* output) { + const uint32_t input_size = NumElements(input->dims); + const uint32_t output_size = NumElements(output->dims); + + xiReshape(data.p_context, data.context_size, + const_cast(tflite::micro::GetTensorData(input)), + input_size, tflite::micro::GetTensorData(output), + output_size); + return kTfLiteOk; +} +} // namespace tflite +#endif // defined(VISION_P6) diff --git a/tensorflow/lite/micro/kernels/xtensa/softmax.cc b/tensorflow/lite/micro/kernels/xtensa/softmax.cc new file mode 100644 index 0000000..76c380f --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/softmax.cc @@ -0,0 +1,126 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/kernels/softmax.h" + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/softmax.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa_softmax.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { +namespace { + +#if defined(HIFI4) || defined(HIFI5) +TfLiteStatus EvalHifiInt8(const XtensaSoftmaxOpData* op_data, + const TfLiteEvalTensor* input, + TfLiteEvalTensor* output, TfLiteContext* context) { + const RuntimeShape& input_shape = tflite::micro::GetTensorShape(input); + const int8_t* input_data = tflite::micro::GetTensorData(input); + const RuntimeShape& output_shape = tflite::micro::GetTensorShape(output); + int8_t* output_data = tflite::micro::GetTensorData(output); + const int trailing_dim = input_shape.DimensionsCount() - 1; + const int outer_size = + MatchingFlatSizeSkipDim(input_shape, trailing_dim, output_shape); + const int depth = + MatchingDim(input_shape, trailing_dim, output_shape, trailing_dim); + + void* p_scratch = static_cast( + context->GetScratchBuffer(context, op_data->scratch_tensor_index)); + for (int i = 0; i < outer_size; ++i) { + int err = xa_nn_vec_softmax_asym8s_asym8s( + &output_data[i * depth], &input_data[i * depth], + op_data->params.diff_min, op_data->params.input_left_shift, + op_data->params.input_multiplier, depth, p_scratch); + TF_LITE_ENSURE(context, err == 0); + } + return kTfLiteOk; +} +#endif // defined(HIFI4) || defined(HIFI5) + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = tflite::micro::GetEvalInput(context, node, 0); + TfLiteEvalTensor* output = tflite::micro::GetEvalOutput(context, node, 0); + + if (input->type == kTfLiteInt8 && output->type == kTfLiteInt16) { + return XtensaEvalSoftmaxInt8Int16(context, node); + } + + TFLITE_DCHECK(node->user_data != nullptr); + +#if defined(HIFI4) || defined(HIFI5) + XtensaSoftmaxOpData op_data = + *static_cast(node->user_data); + SoftmaxParams params = op_data.params; +#else + SoftmaxParams params = *static_cast(node->user_data); +#endif + + if (input->type == kTfLiteInt8 && output->type == kTfLiteInt8) { +#if defined(HIFI4) || defined(HIFI5) + return EvalHifiInt8(static_cast(node->user_data), + input, output, context); +#elif defined(VISION_P6) + return SoftmaxEvalVision( + context, node, *(static_cast(node->user_data)), + input, output); +#else + tflite::reference_ops::Softmax( + params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + return kTfLiteOk; +#endif // defined(HIFI4) || defined(HIFI5) + } + + if (input->type == kTfLiteInt16 && output->type == kTfLiteInt16) { + tflite::reference_ops::SoftmaxInt16( + params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + return kTfLiteOk; + } + + if (input->type == kTfLiteFloat32) { + tflite::reference_ops::Softmax(params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + return kTfLiteOk; + } + + MicroPrintf("Type %s (%d) not supported.", TfLiteTypeGetName(input->type), + input->type); + return kTfLiteError; +} + +} // namespace + +TFLMRegistration Register_SOFTMAX() { + return tflite::micro::RegisterOp(XtensaInitSoftmax, XtensaPrepareSoftmax, + Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/xtensa/softmax_int8_int16.cc b/tensorflow/lite/micro/kernels/xtensa/softmax_int8_int16.cc new file mode 100644 index 0000000..b23a9f7 --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/softmax_int8_int16.cc @@ -0,0 +1,154 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/softmax.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/softmax.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa_softmax.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { +namespace { + +#if defined(HIFI4) || defined(HIFI5) +TfLiteStatus PrepareHifi(TfLiteContext* context, TfLiteNode* node) { + TF_LITE_ENSURE_OK(context, SoftmaxPrepare(context, node)); + + MicroContext* micro_context = GetMicroContext(context); + // Calculate scratch memory requirements and request scratch buffer + TfLiteTensor* input = micro_context->AllocateTempInputTensor(node, 0); + TfLiteTensor* output = micro_context->AllocateTempOutputTensor(node, 0); + + const RuntimeShape& input_shape = GetTensorShape(input); + const RuntimeShape& output_shape = GetTensorShape(output); + const int trailing_dim = input_shape.DimensionsCount() - 1; + const int depth = + MatchingDim(input_shape, trailing_dim, output_shape, trailing_dim); + + if (input->type == kTfLiteInt8) { + int required_scratch = + get_softmax_scratch_size(PREC_ASYM8S, PREC_ASYM8S, depth); + TF_LITE_ENSURE(context, required_scratch > 0); + + auto* data = static_cast(node->user_data); + TF_LITE_ENSURE_OK( + context, context->RequestScratchBufferInArena( + context, required_scratch, &(data->scratch_tensor_index))); + } + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(output); + return kTfLiteOk; +} + +TfLiteStatus EvalHifi(const XtensaSoftmaxOpData* op_data, + const TfLiteEvalTensor* input, TfLiteEvalTensor* output, + TfLiteContext* context) { + const RuntimeShape& input_shape = tflite::micro::GetTensorShape(input); + const int8_t* input_data = tflite::micro::GetTensorData(input); + const RuntimeShape& output_shape = tflite::micro::GetTensorShape(output); + int16_t* output_data = tflite::micro::GetTensorData(output); + const int trailing_dim = input_shape.DimensionsCount() - 1; + const int outer_size = + MatchingFlatSizeSkipDim(input_shape, trailing_dim, output_shape); + const int depth = + MatchingDim(input_shape, trailing_dim, output_shape, trailing_dim); + + void* p_scratch = static_cast( + context->GetScratchBuffer(context, op_data->scratch_tensor_index)); + + for (int i = 0; i < outer_size; ++i) { + int err = xa_nn_vec_softmax_asym8s_16( + &output_data[i * depth], &input_data[i * depth], + op_data->params.diff_min, op_data->params.input_left_shift, + op_data->params.input_multiplier, depth, p_scratch); + TF_LITE_ENSURE(context, err == 0); + } + return kTfLiteOk; +} +#endif // defined(HIFI4) || defined(HIFI5) + +} // namespace + +void* XtensaInitSoftmax(TfLiteContext* context, const char* buffer, + size_t length) { +#if defined(HIFI4) || defined(HIFI5) + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, + sizeof(XtensaSoftmaxOpData)); +#elif defined(VISION_P6) + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + if (InitXtensaContext()) { + return nullptr; + } + return context->AllocatePersistentBuffer(context, + sizeof(XtensaSoftmaxOpData)); +#else + return SoftmaxInit(context, buffer, length); +#endif // defined(HIFI4) || defined(HIFI5) +} + +TfLiteStatus XtensaPrepareSoftmax(TfLiteContext* context, TfLiteNode* node) { +#if defined(HIFI4) || defined(HIFI5) + return PrepareHifi(context, node); +#else + TF_LITE_ENSURE_OK(context, SoftmaxPrepare(context, node)); +#if defined(VISION_P6) + TF_LITE_ENSURE_OK(context, SoftmaxPrepareVision(context, node)); +#endif + return kTfLiteOk; +#endif // defined(HIFI4) || defined(HIFI5) +} + +TfLiteStatus XtensaEvalSoftmaxInt8Int16(TfLiteContext* context, + TfLiteNode* node) { + const TfLiteEvalTensor* input = tflite::micro::GetEvalInput(context, node, 0); + TfLiteEvalTensor* output = tflite::micro::GetEvalOutput(context, node, 0); + TFLITE_DCHECK(node->user_data != nullptr); + + if (input->type == kTfLiteInt8 && output->type == kTfLiteInt16) { +#if defined(HIFI4) || defined(HIFI5) + return EvalHifi(static_cast(node->user_data), input, + output, context); +#else + SoftmaxParams op_data = *static_cast(node->user_data); + tflite::reference_ops::Softmax( + op_data, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + return kTfLiteOk; +#endif // defined(HIFI4) || defined(HIFI5) + } else { + MicroPrintf("Type %s (%d) not supported.", TfLiteTypeGetName(input->type), + input->type); + return kTfLiteError; + } +} + +TFLMRegistration Register_SOFTMAX_INT8_INT16() { + return tflite::micro::RegisterOp(XtensaInitSoftmax, XtensaPrepareSoftmax, + XtensaEvalSoftmaxInt8Int16); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/xtensa/softmax_vision.cc b/tensorflow/lite/micro/kernels/xtensa/softmax_vision.cc new file mode 100644 index 0000000..0c78327 --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/softmax_vision.cc @@ -0,0 +1,100 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#if defined(VISION_P6) + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/softmax.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa_softmax.h" + +namespace tflite { + +TfLiteStatus SoftmaxPrepareVision(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + TFLITE_DCHECK(node->builtin_data != nullptr); + + XtensaSoftmaxOpData* data = + reinterpret_cast(node->user_data); + + MicroContext* micro_context = GetMicroContext(context); + TfLiteTensor* output = micro_context->AllocateTempOutputTensor(node, 0); + TF_LITE_ENSURE(context, output != nullptr); + TfLiteTensor* input = micro_context->AllocateTempInputTensor(node, 0); + TF_LITE_ENSURE(context, input != nullptr); + + uint32_t context_size = 0; + uint32_t status = xiSoftmaxGetMemReqd_Context(&context_size); + TFLITE_DCHECK(status == 0); + if (context_size) { + void* context_data = + context->AllocatePersistentBuffer(context, context_size); + if (context_data == nullptr) { + return kTfLiteError; + } + data->p_context = reinterpret_cast(context_data); + data->context_size = context_size; + } + + uint32_t input_dims[4] = {1, 1, 1, 1}; + uint32_t output_dims[4] = {1, 1, 1, 1}; + for (int i = 0; i < NumDimensions(input); i++) { + input_dims[i] = + std::max(1, SizeOfDimension(input, NumDimensions(input) - 1 - i)); + } + for (int i = 0; i < NumDimensions(output); i++) { + output_dims[i] = + std::max(1, SizeOfDimension(output, NumDimensions(output) - 1 - i)); + } + + status = xiSoftmaxSetContext( + data->p_context, data->context_size, input_dims[0], input_dims[1], + input_dims[2], input_dims[3], data->params.input_multiplier, + data->params.input_left_shift, data->params.diff_min); + + if (status) { + return kTfLiteError; + } + + micro_context->DeallocateTempTfLiteTensor(output); + micro_context->DeallocateTempTfLiteTensor(input); + + return kTfLiteOk; +} + +TfLiteStatus SoftmaxEvalVision(TfLiteContext* context, TfLiteNode* node, + const XtensaSoftmaxOpData& data, + const TfLiteEvalTensor* input, + TfLiteEvalTensor* output) { + const uint32_t input_size = NumElements(input->dims); + const uint32_t output_size = NumElements(output->dims); + + xiSoftmax(data.p_context, data.context_size, + const_cast(tflite::micro::GetTensorData(input)), + input_size, tflite::micro::GetTensorData(output), + output_size); + + return kTfLiteOk; +} + +} // namespace tflite +#endif // defined(VISION_P6) diff --git a/tensorflow/lite/micro/kernels/xtensa/strided_slice.cc b/tensorflow/lite/micro/kernels/xtensa/strided_slice.cc new file mode 100644 index 0000000..0440cfc --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/strided_slice.cc @@ -0,0 +1,263 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/kernels/internal/reference/strided_slice.h" + +#include +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { +namespace { + +constexpr int kInputTensor = 0; +constexpr int kBeginTensor = 1; +constexpr int kEndTensor = 2; +constexpr int kStridesTensor = 3; +constexpr int kOutputTensor = 0; + +struct StridedSliceContext { + StridedSliceContext(TfLiteContext* context, TfLiteNode* node) { + params = reinterpret_cast(node->builtin_data); + micro_context = GetMicroContext(context); + input = micro_context->AllocateTempInputTensor(node, kInputTensor); + begin = micro_context->AllocateTempInputTensor(node, kBeginTensor); + end = micro_context->AllocateTempInputTensor(node, kEndTensor); + strides = micro_context->AllocateTempInputTensor(node, kStridesTensor); + output = micro_context->AllocateTempOutputTensor(node, kOutputTensor); + dims = NumDimensions(input); + } + ~StridedSliceContext() { + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(begin); + micro_context->DeallocateTempTfLiteTensor(end); + micro_context->DeallocateTempTfLiteTensor(strides); + micro_context->DeallocateTempTfLiteTensor(output); + } + const TfLiteStridedSliceParams* params; + MicroContext* micro_context; + TfLiteTensor* input; + TfLiteTensor* begin; + TfLiteTensor* end; + TfLiteTensor* strides; + TfLiteTensor* output; + int dims; +}; + +// This Op only supports 1-4D cases and since we use the reference 4D +// implementation, the 1-3D tensors are mapped to 4D. +const int kMaxDim = 4; + +tflite::StridedSliceParams BuildStridedSliceParams( + StridedSliceContext* op_context) { + tflite::StridedSliceParams op_params; + op_params.start_indices_count = op_context->dims; + op_params.stop_indices_count = op_context->dims; + op_params.strides_count = op_context->dims; + + for (int i = 0; i < op_context->dims; ++i) { + op_params.start_indices[i] = GetTensorData(op_context->begin)[i]; + op_params.stop_indices[i] = GetTensorData(op_context->end)[i]; + op_params.strides[i] = GetTensorData(op_context->strides)[i]; + } + + op_params.begin_mask = op_context->params->begin_mask; + op_params.ellipsis_mask = 0; + op_params.end_mask = op_context->params->end_mask; + op_params.new_axis_mask = 0; + op_params.shrink_axis_mask = op_context->params->shrink_axis_mask; + return op_params; +} + +// Processes the indexing tensors (begin, end and strides) to resize the +// output tensor. This function is callable from both Prepare() and Eval() as +// long as the caller ensures the indexing tensors are present. +TfLiteStatus CheckOutputSize(TfLiteContext* context, + StridedSliceContext* op_context) { + using ::tflite::strided_slice::StartForAxis; + using ::tflite::strided_slice::StopForAxis; + TfLiteIntArray* output_shape = op_context->output->dims; + int shape_size = 0; + auto op_params = BuildStridedSliceParams(op_context); + auto input_shape = GetTensorShape(op_context->input); + for (int idx = 0; idx < op_context->dims; ++idx) { + int32_t stride = GetTensorData(op_context->strides)[idx]; + TF_LITE_ENSURE_MSG(context, stride != 0, "stride value has to be non-zero"); + int32_t begin = StartForAxis(op_params, input_shape, idx); + int32_t end = StopForAxis(op_params, input_shape, idx, begin); + + // When shrinking an axis, the end position does not matter (and can be + // incorrect when negative indexing is used, see Issue #19260). Always use + // begin + 1 to generate a length 1 slice, since begin has + // already been adjusted for negative indices by StartForAxis. + const bool shrink_axis = op_context->params->shrink_axis_mask & (1 << idx); + if (shrink_axis) { + end = begin + 1; + } + + // This is valid for both positive and negative strides + int32_t dim_shape = std::ceil((end - begin) / static_cast(stride)); + dim_shape = dim_shape < 0 ? 0 : dim_shape; + if (!shrink_axis) { + TF_LITE_ENSURE_EQ(context, output_shape->data[shape_size], dim_shape); + shape_size++; + } + } + TF_LITE_ENSURE_EQ(context, output_shape->size, shape_size); + return kTfLiteOk; +} + +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(StridedSliceParams)); +} + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + StridedSliceParams* op_params = + static_cast(node->user_data); + TF_LITE_ENSURE_EQ(context, NumInputs(node), 4); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + StridedSliceContext op_context(context, node); + TF_LITE_ENSURE_MSG(context, op_context.dims <= kMaxDim, + "input dim should not exceed 4"); + auto params = BuildStridedSliceParams(&op_context); + memcpy(op_params, ¶ms, sizeof(StridedSliceParams)); + return CheckOutputSize(context, &op_context); +} + +#if defined(HIFI4) +void StridedSlice_int16_hifi4opt(const tflite::StridedSliceParams& op_params, + const RuntimeShape& unextended_input_shape, + const int16_t* input_data, + const RuntimeShape& unextended_output_shape, + int16_t* output_data) { + using ::tflite::strided_slice::StartForAxis; + using ::tflite::strided_slice::StopForAxis; + + ruy::profiler::ScopeLabel label("StridedSlice"); + + // Note that the output_shape is not used herein. + tflite::StridedSliceParams params_copy = op_params; + + TFLITE_DCHECK_LE(unextended_input_shape.DimensionsCount(), 5); + TFLITE_DCHECK_LE(unextended_output_shape.DimensionsCount(), 5); + const RuntimeShape input_shape = + RuntimeShape::ExtendedShape(5, unextended_input_shape); + const RuntimeShape output_shape = + RuntimeShape::ExtendedShape(5, unextended_output_shape); + + // Reverse and pad to 5 dimensions because that is what the runtime code + // requires (ie. all shapes must be 5D and are given backwards). + ::tflite::strided_slice::StridedSlicePadIndices(¶ms_copy, 5); + + const int start_0 = StartForAxis(params_copy, input_shape, 0); + const int stop_0 = StopForAxis(params_copy, input_shape, 0, start_0); + const int start_1 = StartForAxis(params_copy, input_shape, 1); + const int stop_1 = StopForAxis(params_copy, input_shape, 1, start_1); + const int start_2 = StartForAxis(params_copy, input_shape, 2); + const int stop_2 = StopForAxis(params_copy, input_shape, 2, start_2); + const int start_3 = StartForAxis(params_copy, input_shape, 3); + const int stop_3 = StopForAxis(params_copy, input_shape, 3, start_3); + const int start_4 = StartForAxis(params_copy, input_shape, 4); + const int stop_4 = StopForAxis(params_copy, input_shape, 4, start_4); + + xa_nn_strided_slice_int16(output_data, input_data, static_cast(start_0), + static_cast(stop_0), static_cast(start_1), + static_cast(stop_1), static_cast(start_2), + static_cast(stop_2), static_cast(start_3), + static_cast(stop_3), static_cast(start_4), + static_cast(stop_4), params_copy.strides[0], + params_copy.strides[1], params_copy.strides[2], + params_copy.strides[3], params_copy.strides[4], + input_shape.Dims(1), input_shape.Dims(2), + input_shape.Dims(3), input_shape.Dims(4)); +} +#endif // defined(HIFI4) + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + const StridedSliceParams& op_params = + *(static_cast(node->user_data)); + + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + switch (output->type) { + case kTfLiteFloat32: + reference_ops::StridedSlice(op_params, + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + case kTfLiteInt8: + reference_ops::StridedSlice(op_params, + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + case kTfLiteInt16: +#if defined(HIFI4) + StridedSlice_int16_hifi4opt( + op_params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); +#else + reference_ops::StridedSlice( + op_params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); +#endif // defined(HIFI4) + break; + case kTfLiteInt32: + reference_ops::StridedSlice( + op_params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + case kTfLiteBool: + reference_ops::StridedSlice(op_params, + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + default: + MicroPrintf("Type %s (%d) not supported.", TfLiteTypeGetName(input->type), + input->type); + return kTfLiteError; + } + return kTfLiteOk; +} +} // namespace + +TFLMRegistration Register_STRIDED_SLICE() { + return tflite::micro::RegisterOp(Init, Prepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/xtensa/sub.cc b/tensorflow/lite/micro/kernels/xtensa/sub.cc new file mode 100644 index 0000000..c4f0984 --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/sub.cc @@ -0,0 +1,259 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/micro/kernels/sub.h" + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/add.h" +#include "tensorflow/lite/kernels/internal/reference/process_broadcast_shapes.h" +#include "tensorflow/lite/kernels/internal/reference/sub.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/internal/types.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { + +void* SubInit(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(OpDataSub)); +} + +void EvalSub(TfLiteContext* context, TfLiteNode* node, TfLiteSubParams* params, + const OpDataSub* data, const TfLiteEvalTensor* input1, + const TfLiteEvalTensor* input2, TfLiteEvalTensor* output) { + float output_activation_min, output_activation_max; + CalculateActivationRange(params->activation, &output_activation_min, + &output_activation_max); + tflite::ArithmeticParams op_params; + SetActivationParams(output_activation_min, output_activation_max, &op_params); + if (data->requires_broadcast) { + tflite::reference_ops::BroadcastSubSlow( + op_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } else { + tflite::reference_ops::SubWithActivation( + op_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } +} + +TfLiteStatus EvalSubQuantized(TfLiteContext* context, TfLiteNode* node, + TfLiteSubParams* params, const OpDataSub* data, + const TfLiteEvalTensor* input1, + const TfLiteEvalTensor* input2, + TfLiteEvalTensor* output) { + tflite::ArithmeticParams op_params; + op_params.left_shift = data->left_shift; + op_params.input1_offset = data->input1_offset; + op_params.input1_multiplier = data->input1_multiplier; + op_params.input1_shift = data->input1_shift; + op_params.input2_offset = data->input2_offset; + op_params.input2_multiplier = data->input2_multiplier; + op_params.input2_shift = data->input2_shift; + op_params.output_offset = data->output_offset; + op_params.output_multiplier = data->output_multiplier; + op_params.output_shift = data->output_shift; + SetActivationParams(data->output_activation_min, data->output_activation_max, + &op_params); + // TODO(b/259724572): vision_p6 and hifi code path is getting very confusing. + // Let's separate them into two different files. +#if !(defined(HIFI4)) + bool need_broadcast = reference_ops::ProcessBroadcastShapes( + tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorShape(input2), &op_params); +#endif // !(defined(HIFI4)) + + switch (output->type) { + case kTfLiteInt8: { +#if defined(HIFI4) + int err; + const RuntimeShape extended_input1_shape = + RuntimeShape::ExtendedShape(5, tflite::micro::GetTensorShape(input1)); + const RuntimeShape extended_input2_shape = + RuntimeShape::ExtendedShape(5, tflite::micro::GetTensorShape(input2)); + const RuntimeShape extended_output_shape = + RuntimeShape::ExtendedShape(5, tflite::micro::GetTensorShape(output)); + const int* input1_dims = extended_input1_shape.DimsData(); + const int* input2_dims = extended_input2_shape.DimsData(); + const int* output_dims = extended_output_shape.DimsData(); + // TODO(b/259724572): Refactor the following block of code. + int b; + int inp1_off = 0; + int inp2_off = 0; + int out_off; + out_off = + output_dims[1] * output_dims[2] * output_dims[3] * output_dims[4]; + if (input1_dims[0] > 1) { + inp1_off = + input1_dims[1] * input1_dims[2] * input1_dims[3] * input1_dims[4]; + } + if (input2_dims[0] > 1) { + inp2_off = + input2_dims[1] * input2_dims[2] * input2_dims[3] * input2_dims[4]; + } + + for (b = 0; b < output_dims[0]; b++) { + err = xa_nn_elm_sub_broadcast_4D_asym8sxasym8s_asym8s( + tflite::micro::GetTensorData(output) + b * out_off, + output_dims + 1, op_params.output_offset, op_params.output_shift, + op_params.output_multiplier, op_params.quantized_activation_min, + op_params.quantized_activation_max, + tflite::micro::GetTensorData(input1) + b * inp1_off, + input1_dims + 1, op_params.input1_offset, op_params.input1_shift, + op_params.input1_multiplier, + tflite::micro::GetTensorData(input2), input2_dims + 1, + op_params.input2_offset, op_params.input2_shift, + op_params.input2_multiplier, op_params.left_shift); + + TF_LITE_ENSURE(context, err == 0); + } +#else // defined(HIFI4) + if (need_broadcast) { + tflite::reference_ops::BroadcastQuantSubSlow( + op_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } else { + tflite::reference_ops::Sub( + op_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } +#endif // defined(HIFI4) + break; + } + case kTfLiteInt16: { +#if defined(HIFI4) + int err; + const RuntimeShape extended_input1_shape = + RuntimeShape::ExtendedShape(5, tflite::micro::GetTensorShape(input1)); + const RuntimeShape extended_input2_shape = + RuntimeShape::ExtendedShape(5, tflite::micro::GetTensorShape(input2)); + const RuntimeShape extended_output_shape = + RuntimeShape::ExtendedShape(5, tflite::micro::GetTensorShape(output)); + const int* input1_dims = extended_input1_shape.DimsData(); + const int* input2_dims = extended_input2_shape.DimsData(); + const int* output_dims = extended_output_shape.DimsData(); + int b; + int inp1_off = 0; + int inp2_off = 0; + int out_off; + out_off = + output_dims[1] * output_dims[2] * output_dims[3] * output_dims[4]; + if (input1_dims[0] > 1) { + inp1_off = + input1_dims[1] * input1_dims[2] * input1_dims[3] * input1_dims[4]; + } + if (input2_dims[0] > 1) { + inp2_off = + input2_dims[1] * input2_dims[2] * input2_dims[3] * input2_dims[4]; + } + + for (b = 0; b < output_dims[0]; b++) { + err = xa_nn_elm_sub_broadcast_4D_asym16sxasym16s_asym16s( + tflite::micro::GetTensorData(output) + b * out_off, + output_dims + 1, op_params.output_offset, op_params.output_shift, + op_params.output_multiplier, op_params.quantized_activation_min, + op_params.quantized_activation_max, + tflite::micro::GetTensorData(input1) + b * inp1_off, + input1_dims + 1, op_params.input1_offset, op_params.input1_shift, + op_params.input1_multiplier, + tflite::micro::GetTensorData(input2), input2_dims + 1, + op_params.input2_offset, op_params.input2_shift, + op_params.input2_multiplier, op_params.left_shift); + + TF_LITE_ENSURE(context, err == 0); + } +#else // defined(HIFI4) + if (need_broadcast) { + tflite::reference_ops::BroadcastQuantSubSlow( + op_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } else { + tflite::reference_ops::Sub( + op_params, tflite::micro::GetTensorShape(input1), + tflite::micro::GetTensorData(input1), + tflite::micro::GetTensorShape(input2), + tflite::micro::GetTensorData(input2), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } +#endif // defined(HIFI4) + break; + } + default: + MicroPrintf("Quantized type %s not currently supported.", + TfLiteTypeGetName(output->type)); + return kTfLiteError; + } + return kTfLiteOk; +} + +TfLiteStatus SubEval(TfLiteContext* context, TfLiteNode* node) { + auto* params = reinterpret_cast(node->builtin_data); + + const TfLiteEvalTensor* input1 = + tflite::micro::GetEvalInput(context, node, kSubInputTensor1); + const TfLiteEvalTensor* input2 = + tflite::micro::GetEvalInput(context, node, kSubInputTensor2); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kSubOutputTensor); + + TFLITE_DCHECK(node->user_data != nullptr); + const OpDataSub& data = *(static_cast(node->user_data)); + + if (output->type == kTfLiteFloat32) { + EvalSub(context, node, params, &data, input1, input2, output); + } else if (output->type == kTfLiteInt8 || output->type == kTfLiteInt16) { + TF_LITE_ENSURE_OK(context, EvalSubQuantized(context, node, params, &data, + input1, input2, output)); + } else { + MicroPrintf("Type %s (%d) not supported.", TfLiteTypeGetName(output->type), + output->type); + return kTfLiteError; + } + + return kTfLiteOk; +} + +TFLMRegistration Register_SUB() { + return tflite::micro::RegisterOp(SubInit, SubPrepare, SubEval); +} + +} // namespace tflite \ No newline at end of file diff --git a/tensorflow/lite/micro/kernels/xtensa/svdf.cc b/tensorflow/lite/micro/kernels/xtensa/svdf.cc new file mode 100644 index 0000000..c1dac3b --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/svdf.cc @@ -0,0 +1,392 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/kernels/svdf.h" + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/activation_utils.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa_svdf.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { +namespace { + +#if defined(HIFI4) || defined(HIFI5) + +TfLiteStatus EvalIntegerSvdfHifi(TfLiteContext* context, TfLiteNode* node, + const TfLiteEvalTensor* input_tensor, + const TfLiteEvalTensor* weights_feature_tensor, + const TfLiteEvalTensor* weights_time_tensor, + const TfLiteEvalTensor* bias_tensor, + const TfLiteSVDFParams* params, + TfLiteEvalTensor* activation_state_tensor, + TfLiteEvalTensor* output_tensor, + const OpDataSvdf& data) { + const int n_rank = params->rank; + const int n_batch = input_tensor->dims->data[0]; + const int n_input = input_tensor->dims->data[1]; + const int n_filter = weights_feature_tensor->dims->data[0]; + const int n_unit = n_filter / n_rank; + const int n_memory = weights_time_tensor->dims->data[1]; + + TFLITE_DCHECK(context != nullptr); + TFLITE_DCHECK(context->GetScratchBuffer != nullptr); + + // Shift states. + int16_t* const state_ptr = + tflite::micro::GetTensorData(activation_state_tensor); + + // Left shift the activation_state. + int num_bytes = sizeof(*state_ptr) * (n_batch * n_filter * n_memory - 1); +#if defined(HIFI5) + memcpy(state_ptr, state_ptr + 1, num_bytes); +#else + xa_nn_memmove_16(state_ptr, state_ptr + 1, num_bytes); +#endif // defined(HIFI5) + + // Note: no need to clear the latest activation, matmul is not accumulative. + + // Feature matmul. + const int8_t* input = tflite::micro::GetTensorData(input_tensor); + const int8_t* weight_feature = + tflite::micro::GetTensorData(weights_feature_tensor); + int16_t* result_in_batch = state_ptr + (n_memory - 1); + + for (int b = 0; b < n_batch; b++) { + TF_LITE_ENSURE_EQ(context, + xa_nn_matXvec_out_stride_sym8sxasym8s_16( + &result_in_batch[b * n_filter * n_memory], + weight_feature, &input[b * n_input], NULL, n_filter, + n_input, n_input, n_memory, -data.input_zero_point, + (data.effective_scale_1_a), data.effective_scale_1_b), + 0); + } + + // Time weights dot product + activation + for (int b = 0; b < n_batch; ++b) { + const int16_t* vector1_ptr = + tflite::micro::GetTensorData(weights_time_tensor); + const int16_t* vector2_ptr = + tflite::micro::GetTensorData(activation_state_tensor) + + b * n_memory * n_filter; + // TODO(#1751): account for optional bias tensor + const int32_t* bias_ptr = + tflite::micro::GetTensorData(bias_tensor); + int8_t* output_ptr = + tflite::micro::GetTensorData(output_tensor) + b * n_unit; + + // TODO(#1751): account for optional bias tensor + TF_LITE_ENSURE_EQ( + context, + xa_nn_dot_prod_16x16_asym8s( + output_ptr, vector1_ptr, vector2_ptr, bias_ptr, n_memory * n_rank, + (data.effective_scale_2_a), data.effective_scale_2_b, + data.output_zero_point, n_unit), + 0); + } + return kTfLiteOk; +} +#endif // defined(HIFI4) || defined(HIFI5) + +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(OpDataSvdf)); +} + +TfLiteStatus PrepareInt8(TfLiteContext* context, TfLiteNode* node) { +#if defined(HIFIMINI) || defined(HIFI4) || defined(HIFI5) + TFLITE_DCHECK(node->builtin_data != nullptr); + const auto* params = static_cast(node->builtin_data); + + // Validate Tensor Inputs (dtype depends on quantization): + // [0] = Input, {2, batch_size, input_size} + // [1] = Weights Feature, {2, num_filters, input_size} + // [2] = Weights Time, {2, num_filters, memory_size} + // [3] = Bias (optional), {1, num_units} + // [4] = Activation State (variable), + // {2, batch_size, memory_size * num_filters} + MicroContext* micro_context = GetMicroContext(context); + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kSvdfInputTensor); + TfLiteTensor* weights_feature = + micro_context->AllocateTempInputTensor(node, kSvdfWeightsFeatureTensor); + TfLiteTensor* weights_time = + micro_context->AllocateTempInputTensor(node, kSvdfWeightsTimeTensor); + TfLiteTensor* bias = + micro_context->AllocateTempInputTensor(node, kSvdfBiasTensor); + TfLiteTensor* activation_state = micro_context->AllocateTempInputTensor( + node, kSvdfInputActivationStateTensor); + + // Define input constants based on input tensor definition above: + const int rank = params->rank; + const int input_size = input->dims->data[1]; + const int batch_size = input->dims->data[0]; + +#if defined(HIFIMINI) + // Ensure the input size is a multiple of two. This is necessary since + // optimized kernels access the memory in chunks of two, and all accesses + // must be aligned to 16 bits. + // TODO(b/153202598): Remove when padding is allowed in TFLite tensors. + TF_LITE_ENSURE_EQ(context, input_size % 2, 0); +#endif // defined(HIFIMINI) + + const int num_filters = weights_feature->dims->data[0]; + TF_LITE_ENSURE_EQ(context, num_filters % rank, 0); + const int num_units = num_filters / rank; + const int memory_size = weights_time->dims->data[1]; + + // Validate Input Tensor: + TF_LITE_ENSURE(context, input->type == kTfLiteInt8); + TF_LITE_ENSURE_EQ(context, NumDimensions(input), 2); + + // Validate Tensor Output: + // [0] = float/int8_t, {2, batch_size, num_units} + TF_LITE_ENSURE_EQ(context, node->outputs->size, 1); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kSvdfOutputTensor); + TF_LITE_ENSURE_EQ(context, NumDimensions(output), 2); + TF_LITE_ENSURE_EQ(context, output->dims->data[0], batch_size); + TF_LITE_ENSURE_EQ(context, output->dims->data[1], num_units); + + // Validate Weights Feature Input Tensor: + TF_LITE_ENSURE_EQ(context, NumDimensions(weights_feature), 2); + TF_LITE_ENSURE_EQ(context, weights_feature->dims->data[1], input_size); + + // Validate Weights Time Input Tensor: + TF_LITE_ENSURE_EQ(context, NumDimensions(weights_time), 2); + TF_LITE_ENSURE_EQ(context, weights_time->dims->data[0], num_filters); + TF_LITE_ENSURE_EQ(context, weights_time->dims->data[1], memory_size); + + // Validate Optional Bias Input Tensor: + if (bias != nullptr) { + TF_LITE_ENSURE_EQ(context, bias->dims->data[0], num_units); + TF_LITE_ENSURE_EQ(context, bias->type, kTfLiteInt32); + } + + // Validate Activation State Input Tensor: + TF_LITE_ENSURE_EQ(context, NumDimensions(activation_state), 2); + TF_LITE_ENSURE_EQ(context, activation_state->dims->data[0], batch_size); + TF_LITE_ENSURE_EQ(context, activation_state->dims->data[1], + memory_size * num_filters); + + TF_LITE_ENSURE_EQ(context, node->inputs->size, 5); + TF_LITE_ENSURE_EQ(context, weights_feature->type, kTfLiteInt8); + TF_LITE_ENSURE_EQ(context, weights_time->type, kTfLiteInt16); + TF_LITE_ENSURE_EQ(context, activation_state->type, kTfLiteInt16); + + // Validate output tensor: + TF_LITE_ENSURE_TYPES_EQ(context, output->type, kTfLiteInt8); + + const double effective_scale_1 = + static_cast(input->params.scale * weights_feature->params.scale / + activation_state->params.scale); + const double effective_scale_2 = + static_cast(activation_state->params.scale * + weights_time->params.scale / output->params.scale); + + // TODO(#1751): account for optional bias tensor + TF_LITE_ENSURE_NEAR(context, static_cast(bias->params.scale), + static_cast(activation_state->params.scale * + weights_time->params.scale), + 1e-5); + + TFLITE_DCHECK(node->user_data != nullptr); + OpDataSvdf* data = static_cast(node->user_data); + +#if defined(HIFIMINI) + QuantizeMultiplierForInt24(effective_scale_1, &data->effective_scale_1_a, + &data->effective_scale_1_b); + QuantizeMultiplierForInt24(effective_scale_2, &data->effective_scale_2_a, + &data->effective_scale_2_b); +#else + QuantizeMultiplier(effective_scale_1, &(data->effective_scale_1_a), + &(data->effective_scale_1_b)); + QuantizeMultiplier(effective_scale_2, &(data->effective_scale_2_a), + &(data->effective_scale_2_b)); +#endif // defined(HIFIMINI) + + data->input_zero_point = input->params.zero_point; + data->output_zero_point = output->params.zero_point; + + const TfLiteStatus scratch_status = context->RequestScratchBufferInArena( + context, batch_size * num_filters * sizeof(int32_t), + &(data->scratch_tensor_index)); + TF_LITE_ENSURE_OK(context, scratch_status); + const TfLiteStatus scratch_output_status = + context->RequestScratchBufferInArena( + context, batch_size * num_units * sizeof(int32_t), + &(data->scratch_output_tensor_index)); + TF_LITE_ENSURE_OK(context, scratch_output_status); + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(weights_time); + micro_context->DeallocateTempTfLiteTensor(weights_feature); + if (bias != nullptr) { + micro_context->DeallocateTempTfLiteTensor(bias); + } + micro_context->DeallocateTempTfLiteTensor(activation_state); + micro_context->DeallocateTempTfLiteTensor(output); + + return kTfLiteOk; +#else + return PrepareSvdf(context, node); +#endif // defined(HIFIMINI) || defined(HIFI4) || defined(HIFI5) +} + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { +#if defined(HIFIMINI) || defined(HIFI4) || defined(HIFI5) + + MicroContext* micro_context = GetMicroContext(context); + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kSvdfInputTensor); + TfLiteTensor* weights_time = + micro_context->AllocateTempInputTensor(node, kSvdfWeightsTimeTensor); + + TfLiteStatus status; + if (input->type == kTfLiteInt8 && weights_time->type == kTfLiteInt16) { + status = PrepareInt8(context, node); + } else { + status = PrepareSvdf(context, node); + } + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(weights_time); + + return status; +#else + return PrepareSvdf(context, node); +#endif // defined(HIFIMINI) || defined(HIFI4) || defined(HIFI5) +} + +TfLiteStatus EvalInt8(TfLiteContext* context, TfLiteNode* node) { + auto* params = static_cast(node->builtin_data); + + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kSvdfInputTensor); + const TfLiteEvalTensor* weights_feature = + tflite::micro::GetEvalInput(context, node, kSvdfWeightsFeatureTensor); + const TfLiteEvalTensor* weights_time = + tflite::micro::GetEvalInput(context, node, kSvdfWeightsTimeTensor); + // TODO(#1751): account for optional bias tensor + const TfLiteEvalTensor* bias = + (NumInputs(node) == 5) + ? tflite::micro::GetEvalInput(context, node, kSvdfBiasTensor) + : nullptr; + TfLiteEvalTensor* activation_state = tflite::micro::GetMutableEvalInput( + context, node, kSvdfInputActivationStateTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kSvdfOutputTensor); + + TFLITE_DCHECK(node->user_data != nullptr); + const OpDataSvdf& data = *(static_cast(node->user_data)); + +#if defined(HIFIMINI) + return EvalIntegerSvdfHifimini(context, node, input, weights_feature, + weights_time, bias, params, activation_state, + output, data); +#elif defined(HIFI4) || defined(HIFI5) + return EvalIntegerSvdfHifi(context, node, input, weights_feature, + weights_time, bias, params, activation_state, + output, data); +#else + EvalInt16SvdfReference(context, node, input, weights_feature, weights_time, + bias, params, activation_state, output, data); + return kTfLiteOk; +#endif // defined(HIFI4) || defined(HIFI5) +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + auto* params = static_cast(node->builtin_data); + + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kSvdfInputTensor); + const TfLiteEvalTensor* weights_feature = + tflite::micro::GetEvalInput(context, node, kSvdfWeightsFeatureTensor); + const TfLiteEvalTensor* weights_time = + tflite::micro::GetEvalInput(context, node, kSvdfWeightsTimeTensor); + // TODO(#1751): account for optional bias tensor + const TfLiteEvalTensor* bias = + (NumInputs(node) == 5) + ? tflite::micro::GetEvalInput(context, node, kSvdfBiasTensor) + : nullptr; + TfLiteEvalTensor* activation_state = tflite::micro::GetMutableEvalInput( + context, node, kSvdfInputActivationStateTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kSvdfOutputTensor); + + TFLITE_DCHECK(node->user_data != nullptr); + const OpDataSvdf& data = *(static_cast(node->user_data)); + + switch (weights_feature->type) { + case kTfLiteFloat32: { + EvalFloatSvdfReference( + context, node, input, weights_feature, weights_time, bias, params, + data.scratch_tensor_index, activation_state, output); + break; + } + + case kTfLiteInt8: { + switch (weights_time->type) { + case kTfLiteInt16: { + return EvalInt8(context, node); + } + + case kTfLiteInt8: { + EvalInt8SvdfReference(context, node, input, weights_feature, + weights_time, bias, params, activation_state, + output, data); + break; + } + + default: { + MicroPrintf("Type %s not currently supported.", + TfLiteTypeGetName(weights_time->type)); + return kTfLiteError; + } + } + break; + } + + default: { + MicroPrintf("Type %s not currently supported.", + TfLiteTypeGetName(weights_feature->type)); + return kTfLiteError; + } + } + + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_SVDF() { + return tflite::micro::RegisterOp(Init, Prepare, Eval); +} + +TFLMRegistration Register_SVDF_INT8() { + return tflite::micro::RegisterOp(Init, PrepareInt8, EvalInt8); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/xtensa/transpose_conv.cc b/tensorflow/lite/micro/kernels/xtensa/transpose_conv.cc new file mode 100644 index 0000000..826e168 --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/transpose_conv.cc @@ -0,0 +1,394 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/reference/transpose_conv.h" + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/transpose_conv.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/padding.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { +namespace { + +// For the TfLite transpose_conv implementation, input tensor 0 corresponds to +// the OutputShapeTensor. However, since TFLM does not support dynamic tensors, +// the TFLM implementation ignores input tensor 0 and the only inputs we care +// about are kFilterTensor, kInputTensor and kBiasTensor. +constexpr int kFilterTensor = 1; +constexpr int kInputTensor = 2; +constexpr int kBiasTensor = 3; +constexpr int kOutputTensor = 0; + +// Conv is quantized along dimension 0: +// https://www.tensorflow.org/lite/performance/quantization_spec +constexpr int kConvQuantizedDimension = 0; + +struct OpData { + ConvParams params; + + // A scratch buffer is required for quantized implementations. + int scratch_buffer_index; + + // TODO(b/192090531): Remove this once all 8x16 transpose conv models use + // 64-bit biases. + int bias_converted_buffer_index; + + // Multiplier and shift arrays are required for the int8 implementation. + int32_t* per_channel_output_multiplier; + int32_t* per_channel_output_shift; +}; + +inline PaddingType RuntimePaddingType(TfLitePadding padding) { + switch (padding) { + case TfLitePadding::kTfLitePaddingSame: + return PaddingType::kSame; + case TfLitePadding::kTfLitePaddingValid: + return PaddingType::kValid; + case TfLitePadding::kTfLitePaddingUnknown: + default: + return PaddingType::kNone; + } +} + +TfLiteStatus CalculateOpData(TfLiteContext* context, TfLiteNode* node, + const TfLiteTransposeConvParams* params, int width, + int height, int filter_width, int filter_height, + const TfLiteType data_type, OpData* data) { + bool has_bias = node->inputs->size == 4; + // Check number of inputs/outputs + TF_LITE_ENSURE(context, has_bias || node->inputs->size == 3); + TF_LITE_ENSURE_EQ(context, node->outputs->size, 1); + + // Matching GetWindowedOutputSize in TensorFlow. + auto padding = params->padding; + int unused_output_width; + int unused_output_height; + TfLitePaddingValues padding_values = ComputePaddingHeightWidth( + params->stride_height, params->stride_width, 1, + 1, // Dilation height and width are always 1 for transpose_conv. + height, width, filter_height, filter_width, padding, + &unused_output_height, &unused_output_width); + + data->params.padding_type = RuntimePaddingType(padding); + data->params.padding_values.width = padding_values.width; + data->params.padding_values.height = padding_values.height; + + // Note that quantized inference requires that all tensors have their + // parameters set. This is usually done during quantized training. + if (data_type != kTfLiteFloat32) { + MicroContext* micro_context = GetMicroContext(context); + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* filter = + micro_context->AllocateTempInputTensor(node, kFilterTensor); + TF_LITE_ENSURE(context, filter != nullptr); + TfLiteTensor* bias = + micro_context->AllocateTempInputTensor(node, kBiasTensor); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + int output_channels = filter->dims->data[kConvQuantizedDimension]; + + TF_LITE_ENSURE_STATUS(tflite::PopulateConvolutionQuantizationParams( + context, input, filter, bias, output, kTfLiteActNone, + &data->params.output_multiplier, &data->params.output_shift, + &data->params.quantized_activation_min, + &data->params.quantized_activation_max, + data->per_channel_output_multiplier, data->per_channel_output_shift, + output_channels)); + + // TODO(b/192090531): Remove this once all 8x16 transpose conv models use + // 64-bit biases. + if (input->type == kTfLiteInt16) { + TFLITE_DCHECK(filter->type == kTfLiteInt8); + TFLITE_DCHECK(output->type == kTfLiteInt16); + if (bias->type == kTfLiteInt16) { + TFLITE_DCHECK( + context->RequestScratchBufferInArena( + context, GetTensorShape(bias).FlatSize() * sizeof(std::int64_t), + &(data->bias_converted_buffer_index)) == kTfLiteOk); + } + } + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(output); + micro_context->DeallocateTempTfLiteTensor(filter); + if (bias != nullptr) { + micro_context->DeallocateTempTfLiteTensor(bias); + } + } + return kTfLiteOk; +} + +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(OpData)); +} + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + TFLITE_DCHECK(node->builtin_data != nullptr); + + OpData* data = static_cast(node->user_data); + const auto params = + static_cast(node->builtin_data); + + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* filter = + micro_context->AllocateTempInputTensor(node, kFilterTensor); + TF_LITE_ENSURE(context, filter != nullptr); + + // Get height and width of the output. + const int width = SizeOfDimension(output, 2); + const int height = SizeOfDimension(output, 1); + const int filter_width = SizeOfDimension(filter, 2); + const int filter_height = SizeOfDimension(filter, 1); + + // Dynamically allocate per-channel quantization parameters. + const int num_channels = filter->dims->data[kConvQuantizedDimension]; + data->per_channel_output_multiplier = + static_cast(context->AllocatePersistentBuffer( + context, num_channels * sizeof(int32_t))); + data->per_channel_output_shift = + static_cast(context->AllocatePersistentBuffer( + context, num_channels * sizeof(int32_t))); + + // Quantized kernels use an int32 scratch buffer. + if (input->type == kTfLiteInt8) { + TFLITE_DCHECK(context->RequestScratchBufferInArena != nullptr); + TFLITE_DCHECK(context->RequestScratchBufferInArena( + context, + GetTensorShape(output).FlatSize() * sizeof(int32_t), + &(data->scratch_buffer_index)) == kTfLiteOk); + } + + // Quantized 16x8 kernels use an int64 scratch buffer. + if (input->type == kTfLiteInt16) { + TFLITE_DCHECK(context->RequestScratchBufferInArena != nullptr); + TFLITE_DCHECK(context->RequestScratchBufferInArena( + context, + GetTensorShape(output).FlatSize() * sizeof(std::int64_t), + &(data->scratch_buffer_index)) == kTfLiteOk); + } + + // All per-channel quantized tensors need valid zero point and scale arrays. + if (input->type == kTfLiteInt8 || input->type == kTfLiteInt16) { + TF_LITE_ENSURE_EQ(context, filter->quantization.type, + kTfLiteAffineQuantization); + + const auto* affine_quantization = + static_cast(filter->quantization.params); + TF_LITE_ENSURE(context, affine_quantization); + TF_LITE_ENSURE(context, affine_quantization->scale); + TF_LITE_ENSURE(context, affine_quantization->zero_point); + + TF_LITE_ENSURE(context, + affine_quantization->scale->size == 1 || + affine_quantization->scale->size == + filter->dims->data[kConvQuantizedDimension]); + TF_LITE_ENSURE_EQ(context, affine_quantization->scale->size, + affine_quantization->zero_point->size); + } + + TF_LITE_ENSURE_STATUS(CalculateOpData(context, node, params, width, height, + filter_width, filter_height, + input->type, data)); + + // Offsets (zero points) + data->params.input_offset = -input->params.zero_point; + data->params.weights_offset = -filter->params.zero_point; + data->params.output_offset = output->params.zero_point; + + // Stride + data->params.stride_width = params->stride_width; + data->params.stride_height = params->stride_height; + + micro_context->DeallocateTempTfLiteTensor(output); + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(filter); + return kTfLiteOk; +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + const TfLiteEvalTensor* filter = + tflite::micro::GetEvalInput(context, node, kFilterTensor); + const TfLiteEvalTensor* bias = + (NumInputs(node) == 4) + ? tflite::micro::GetEvalInput(context, node, kBiasTensor) + : nullptr; + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + + TFLITE_DCHECK(node->user_data != nullptr); + const OpData& data = *(static_cast(node->user_data)); + + TF_LITE_ENSURE_EQ(context, input->type, output->type); + TF_LITE_ENSURE_MSG( + context, + input->type == filter->type || + (input->type == kTfLiteInt16 && filter->type == kTfLiteInt8), + "Hybrid models are not supported on TFLite Micro."); + + switch (input->type) { // Already know in/out types are same. + case kTfLiteFloat32: { + const auto& params = + *(reinterpret_cast(node->builtin_data)); + ConvParams op_params = data.params; + CalculateActivationRange(params.activation, + &op_params.float_activation_min, + &op_params.float_activation_max); + + reference_ops::TransposeConv( + op_params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), + tflite::micro::GetTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output), + tflite::micro::GetTensorShape(nullptr), nullptr); + break; + } + case kTfLiteInt8: { + int32_t* scratch_buffer = static_cast( + context->GetScratchBuffer(context, data.scratch_buffer_index)); + reference_integer_ops::TransposeConv( + data.params, data.per_channel_output_multiplier, + data.per_channel_output_shift, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), + tflite::micro::GetTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output), + tflite::micro::GetTensorShape(nullptr), nullptr, scratch_buffer); + break; + } + case kTfLiteInt16: { + std::int64_t* scratch_buffer = static_cast( + context->GetScratchBuffer(context, data.scratch_buffer_index)); + // TODO(b/192090531): Remove this once all 8x16 transpose conv models use + // 64-bit biases. + if (bias->type == kTfLiteInt16) { + std::int64_t* bias_converted_buffer = + static_cast(context->GetScratchBuffer( + context, data.bias_converted_buffer_index)); + for (int i = 0; i < tflite::micro::GetTensorShape(bias).FlatSize(); + i++) { + bias_converted_buffer[i] = bias->data.i16[i]; + } + reference_integer_ops::TransposeConv( + data.params, data.per_channel_output_multiplier, + data.per_channel_output_shift, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), bias_converted_buffer, + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output), + tflite::micro::GetTensorShape(nullptr), nullptr, scratch_buffer); + } else { +#if defined(HIFI4) + const RuntimeShape& input_shape = tflite::micro::GetTensorShape(input); + const RuntimeShape& filter_shape = + tflite::micro::GetTensorShape(filter); + const RuntimeShape& output_shape = + tflite::micro::GetTensorShape(output); + const int stride_width = data.params.stride_width; + const int stride_height = data.params.stride_height; + const int pad_width = data.params.padding_values.width; + const int pad_height = data.params.padding_values.height; + + const int batches = MatchingDim(input_shape, 0, output_shape, 0); + const int input_depth = MatchingDim(input_shape, 3, filter_shape, 3); + const int output_depth = MatchingDim(filter_shape, 0, output_shape, 3); + + const int input_height = input_shape.Dims(1); + const int input_width = input_shape.Dims(2); + const int filter_height = filter_shape.Dims(1); + const int filter_width = filter_shape.Dims(2); + const int output_height = output_shape.Dims(1); + const int output_width = output_shape.Dims(2); + const int16_t* input_data = + tflite::micro::GetTensorData(input); + const int8_t* filter_data = + tflite::micro::GetTensorData(filter); + const int64_t* bias_data = tflite::micro::GetTensorData(bias); + int16_t* output_data = tflite::micro::GetTensorData(output); + + const int num_elements = output_shape.FlatSize(); + + for (int b = 0; b < batches; b++) { + xa_nn_transpose_conv_sym8sxsym16s( + &output_data[b * output_height * output_width * output_depth], + const_cast( + &input_data[b * input_height * input_width * input_depth]), + const_cast(filter_data), const_cast(bias_data), + stride_width, stride_height, pad_width, pad_height, input_depth, + output_depth, input_height, input_width, filter_height, + filter_width, output_height, output_width, num_elements / batches, + data.per_channel_output_shift, data.per_channel_output_multiplier, + &scratch_buffer[b * output_height * output_width * output_depth]); + } +#else + reference_integer_ops::TransposeConv( + data.params, data.per_channel_output_multiplier, + data.per_channel_output_shift, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), + tflite::micro::GetTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output), + tflite::micro::GetTensorShape(nullptr), nullptr, scratch_buffer); +#endif // defined(HIFI4) + } + break; + } + default: + MicroPrintf("Type %s (%d) not supported.", TfLiteTypeGetName(input->type), + input->type); + return kTfLiteError; + } + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_TRANSPOSE_CONV() { + return tflite::micro::RegisterOp(Init, Prepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/xtensa/unidirectional_sequence_lstm.cc b/tensorflow/lite/micro/kernels/xtensa/unidirectional_sequence_lstm.cc new file mode 100644 index 0000000..cbce1e1 --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/unidirectional_sequence_lstm.cc @@ -0,0 +1,1121 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include +#include + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/xtensa/lstm_eval.h" +#include "tensorflow/lite/micro/kernels/xtensa/lstm_shared.h" +#include "tensorflow/lite/micro/micro_log.h" + +// TODO(b/230666079): Flatten the namespace to match the builtin kernel +// implementation +namespace tflite { +namespace ops { +namespace micro { +// namespace unidirectional_sequence_lstm { +namespace { + +struct OpData { + // If the lstm is layer norm. + bool use_layer_norm; + // The scratch tensor index. + int scratch_tensor_index; + bool compute_row_sums = false; + + lstm_eval::IntegerLstmParameter integer_lstm_param; +}; + +TfLiteStatus PopulateQuantizedLstmParams8x8_16( + TfLiteContext* context, TfLiteNode* node, + lstm_eval::IntegerLstmParameter* integer_lstm_param) { + // Calculate quantized clip for projection and cell. + const auto* params = + static_cast(node->builtin_data); + const float cell_clip = static_cast(params->cell_clip); + const float proj_clip = static_cast(params->proj_clip); + + const TfLiteTensor* cell_state = + GetVariableInput(context, node, micro::lstm::full::kCellStateTensor); + TF_LITE_ENSURE(context, cell_state != nullptr); + TfLiteTensor* output_tensor; + TF_LITE_ENSURE_OK( + context, GetOutputSafe(context, node, micro::lstm::full::kOutputTensor, + &output_tensor)); + + auto* cell_state_params = + static_cast(cell_state->quantization.params); + auto* proj_params = static_cast( + output_tensor->quantization.params); + if (cell_clip > static_cast(0.0)) { + integer_lstm_param->quantized_cell_clip = static_cast(std::min( + std::max(cell_clip / cell_state_params->scale->data[0], -32768.0f), + 32767.0f)); + } else { + integer_lstm_param->quantized_cell_clip = 0; + } + if (proj_clip > static_cast(0.0)) { + integer_lstm_param->quantized_proj_clip = static_cast(std::min( + std::max(proj_clip / proj_params->scale->data[0], -128.0f), 127.0f)); + } else { + integer_lstm_param->quantized_proj_clip = 0; + } + + // Calculate effective scales. + OpData* op_data = static_cast(node->user_data); + const bool use_layer_norm = op_data->use_layer_norm; + + const TfLiteTensor* input; + TF_LITE_ENSURE_OK( + context, + GetInputSafe(context, node, micro::lstm::full::kInputTensor, &input)); + + const TfLiteTensor* input_to_input_weights = GetOptionalInputTensor( + context, node, micro::lstm::full::kInputToInputWeightsTensor); + const TfLiteTensor* input_to_forget_weights; + TF_LITE_ENSURE_OK(context, + GetInputSafe(context, node, + micro::lstm::full::kInputToForgetWeightsTensor, + &input_to_forget_weights)); + const TfLiteTensor* input_to_cell_weights; + TF_LITE_ENSURE_OK( + context, + GetInputSafe(context, node, micro::lstm::full::kInputToCellWeightsTensor, + &input_to_cell_weights)); + const TfLiteTensor* input_to_output_weights; + TF_LITE_ENSURE_OK(context, + GetInputSafe(context, node, + micro::lstm::full::kInputToOutputWeightsTensor, + &input_to_output_weights)); + + const TfLiteTensor* recurrent_to_input_weights = GetOptionalInputTensor( + context, node, micro::lstm::full::kRecurrentToInputWeightsTensor); + const TfLiteTensor* recurrent_to_forget_weights; + TF_LITE_ENSURE_OK( + context, GetInputSafe(context, node, + micro::lstm::full::kRecurrentToForgetWeightsTensor, + &recurrent_to_forget_weights)); + const TfLiteTensor* recurrent_to_cell_weights; + TF_LITE_ENSURE_OK( + context, GetInputSafe(context, node, + micro::lstm::full::kRecurrentToCellWeightsTensor, + &recurrent_to_cell_weights)); + const TfLiteTensor* recurrent_to_output_weights; + TF_LITE_ENSURE_OK( + context, GetInputSafe(context, node, + micro::lstm::full::kRecurrentToOutputWeightsTensor, + &recurrent_to_output_weights)); + + const TfLiteTensor* cell_to_input_weights = GetOptionalInputTensor( + context, node, micro::lstm::full::kCellToInputWeightsTensor); + const TfLiteTensor* cell_to_forget_weights = GetOptionalInputTensor( + context, node, micro::lstm::full::kCellToForgetWeightsTensor); + const TfLiteTensor* cell_to_output_weights = GetOptionalInputTensor( + context, node, micro::lstm::full::kCellToOutputWeightsTensor); + + const TfLiteTensor* input_layer_norm_coefficients = GetOptionalInputTensor( + context, node, micro::lstm::full::kInputLayerNormCoefficientsTensor); + const TfLiteTensor* forget_layer_norm_coefficients = GetOptionalInputTensor( + context, node, micro::lstm::full::kForgetLayerNormCoefficientsTensor); + const TfLiteTensor* cell_layer_norm_coefficients = GetOptionalInputTensor( + context, node, micro::lstm::full::kCellLayerNormCoefficientsTensor); + const TfLiteTensor* output_layer_norm_coefficients = GetOptionalInputTensor( + context, node, micro::lstm::full::kOutputLayerNormCoefficientsTensor); + + const TfLiteTensor* projection_weights = GetOptionalInputTensor( + context, node, micro::lstm::full::kProjectionWeightsTensor); + + TfLiteTensor* output_state = + GetVariableInput(context, node, micro::lstm::full::kOutputStateTensor); + TF_LITE_ENSURE(context, output_state != nullptr); + + // Since we have already checked that weights are all there or none, we can + // check the existence of only one to get the condition. + const bool use_cifg = (input_to_input_weights == nullptr); + const bool use_peephole = (cell_to_output_weights != nullptr); + const bool use_projection = (projection_weights != nullptr); + + // Get intermediate scales and zero points. + constexpr size_t kIntermediateCount = 5; + float intermediate_scale[kIntermediateCount]; + int32_t intermediate_zp[kIntermediateCount]; + for (int i = 0; i < 4; ++i) { + if (use_layer_norm) { + TfLiteTensor* intermediate = + context->GetTensor(context, node->intermediates->data[i]); + auto* tmp_params = static_cast( + intermediate->quantization.params); + intermediate_scale[i] = tmp_params->scale->data[0]; + intermediate_zp[i] = tmp_params->zero_point->data[0]; + } else { + // Q3.12 for activation functions. + intermediate_scale[i] = std::pow(2, -12); + intermediate_zp[i] = 0; + } + } + // In the absence of projection, hidden becomes otuput and this intermediate + // is ignored. + TfLiteTensor* hidden = + context->GetTensor(context, node->intermediates->data[4]); + auto* hidden_params = + static_cast(hidden->quantization.params); + intermediate_scale[4] = hidden_params->scale->data[0]; + intermediate_zp[4] = hidden_params->zero_point->data[0]; + + // Scales. + const float default_scale = 1.0; + float input_scale = default_scale; + float input_to_input_weight_scale = default_scale; + float recurrent_to_input_weight_scale = default_scale; + float cell_to_input_weight_scale = default_scale; + float input_to_forget_weight_scale = default_scale; + float recurrent_to_forget_weight_scale = default_scale; + float cell_to_forget_weight_scale = default_scale; + float input_to_cell_weight_scale = default_scale; + float recurrent_to_cell_weight_scale = default_scale; + float input_to_output_weight_scale = default_scale; + float recurrent_to_output_weight_scale = default_scale; + float cell_to_output_weight_scale = default_scale; + float projection_weight_scale = default_scale; + float layer_norm_input_scale = default_scale; + float layer_norm_forget_scale = default_scale; + float layer_norm_cell_scale = default_scale; + float layer_norm_output_scale = default_scale; + float output_state_scale = default_scale; + int cell_scale = 1; + + // Effective scales. + float effective_input_to_input_scale = default_scale; + float effective_recurrent_to_input_scale = default_scale; + float effective_cell_to_input_scale = default_scale; + float effective_input_to_forget_scale = default_scale; + float effective_recurrent_to_forget_scale = default_scale; + float effective_cell_to_forget_scale = default_scale; + float effective_input_to_cell_scale = default_scale; + float effective_recurrent_to_cell_scale = default_scale; + float effective_input_to_output_scale = default_scale; + float effective_recurrent_to_output_scale = default_scale; + float effective_cell_to_output_scale = default_scale; + float effective_proj_scale = default_scale; + float effective_hidden_scale = default_scale; + + // Populate scales. + if (!use_cifg) { + input_to_input_weight_scale = input_to_input_weights->params.scale; + recurrent_to_input_weight_scale = recurrent_to_input_weights->params.scale; + } + + if (use_peephole) { + if (!use_cifg) { + cell_to_input_weight_scale = cell_to_input_weights->params.scale; + } + cell_to_forget_weight_scale = cell_to_forget_weights->params.scale; + cell_to_output_weight_scale = cell_to_output_weights->params.scale; + } + + if (use_layer_norm) { + if (!use_cifg) { + layer_norm_input_scale = input_layer_norm_coefficients->params.scale; + } + layer_norm_forget_scale = forget_layer_norm_coefficients->params.scale; + layer_norm_cell_scale = cell_layer_norm_coefficients->params.scale; + layer_norm_output_scale = output_layer_norm_coefficients->params.scale; + } + + if (use_projection) { + projection_weight_scale = projection_weights->params.scale; + } + output_state_scale = output_state->params.scale; + + input_to_forget_weight_scale = input_to_forget_weights->params.scale; + input_to_cell_weight_scale = input_to_cell_weights->params.scale; + input_to_output_weight_scale = input_to_output_weights->params.scale; + recurrent_to_forget_weight_scale = recurrent_to_forget_weights->params.scale; + recurrent_to_cell_weight_scale = recurrent_to_cell_weights->params.scale; + recurrent_to_output_weight_scale = recurrent_to_output_weights->params.scale; + + // Check cell state (already used above) + TF_LITE_ENSURE(context, CheckedLog2(cell_state->params.scale, &cell_scale)); + // TF_LITE_ENSURE(context, cell_scale <= -9); + integer_lstm_param->cell_scale = cell_scale; + input_scale = input->params.scale; + + // Calculate effective scales. + if (!use_cifg) { + effective_input_to_input_scale = + input_to_input_weight_scale * input_scale / intermediate_scale[0]; + effective_recurrent_to_input_scale = recurrent_to_input_weight_scale * + output_state_scale / + intermediate_scale[0]; + } + effective_input_to_forget_scale = + input_to_forget_weight_scale * input_scale / intermediate_scale[1]; + effective_recurrent_to_forget_scale = recurrent_to_forget_weight_scale * + output_state_scale / + intermediate_scale[1]; + + effective_input_to_cell_scale = + input_to_cell_weight_scale * input_scale / intermediate_scale[2]; + effective_recurrent_to_cell_scale = recurrent_to_cell_weight_scale * + output_state_scale / + intermediate_scale[2]; + + effective_input_to_output_scale = + input_to_output_weight_scale * input_scale / intermediate_scale[3]; + effective_recurrent_to_output_scale = recurrent_to_output_weight_scale * + output_state_scale / + intermediate_scale[3]; + + effective_hidden_scale = std::pow((float)2, (float)-15) / + intermediate_scale[4] * + std::pow((float)2, (float)-15); + + effective_proj_scale = + projection_weight_scale * intermediate_scale[4] / output_state_scale; + + if (use_peephole) { + if (!use_cifg) { + effective_cell_to_input_scale = + std::pow((float)(2), (float)cell_scale) * // NOLINT + (float)(cell_to_input_weight_scale) / intermediate_scale[0]; + } + effective_cell_to_forget_scale = + std::pow((float)2, (float)cell_scale) * // NOLINT + (float)cell_to_forget_weight_scale / intermediate_scale[1]; + effective_cell_to_output_scale = + std::pow((float)2, (float)cell_scale) * // NOLINT + (float)cell_to_output_weight_scale / intermediate_scale[3]; + } + + // Decompose scales. + QuantizeMultiplier(static_cast(effective_input_to_input_scale), + &integer_lstm_param->effective_input_to_input_scale_a, + &integer_lstm_param->effective_input_to_input_scale_b); + QuantizeMultiplier(static_cast(effective_recurrent_to_input_scale), + &integer_lstm_param->effective_recurrent_to_input_scale_a, + &integer_lstm_param->effective_recurrent_to_input_scale_b); + QuantizeMultiplier(static_cast(effective_cell_to_input_scale), + &integer_lstm_param->effective_cell_to_input_scale_a, + &integer_lstm_param->effective_cell_to_input_scale_b); + QuantizeMultiplier(static_cast(effective_input_to_forget_scale), + &integer_lstm_param->effective_input_to_forget_scale_a, + &integer_lstm_param->effective_input_to_forget_scale_b); + QuantizeMultiplier( + static_cast(effective_recurrent_to_forget_scale), + &integer_lstm_param->effective_recurrent_to_forget_scale_a, + &integer_lstm_param->effective_recurrent_to_forget_scale_b); + QuantizeMultiplier(static_cast(effective_cell_to_forget_scale), + &integer_lstm_param->effective_cell_to_forget_scale_a, + &integer_lstm_param->effective_cell_to_forget_scale_b); + QuantizeMultiplier(static_cast(effective_input_to_cell_scale), + &integer_lstm_param->effective_input_to_cell_scale_a, + &integer_lstm_param->effective_input_to_cell_scale_b); + QuantizeMultiplier(static_cast(effective_recurrent_to_cell_scale), + &integer_lstm_param->effective_recurrent_to_cell_scale_a, + &integer_lstm_param->effective_recurrent_to_cell_scale_b); + QuantizeMultiplier(static_cast(effective_input_to_output_scale), + &integer_lstm_param->effective_input_to_output_scale_a, + &integer_lstm_param->effective_input_to_output_scale_b); + QuantizeMultiplier( + static_cast(effective_recurrent_to_output_scale), + &integer_lstm_param->effective_recurrent_to_output_scale_a, + &integer_lstm_param->effective_recurrent_to_output_scale_b); + QuantizeMultiplier(static_cast(effective_cell_to_output_scale), + &integer_lstm_param->effective_cell_to_output_scale_a, + &integer_lstm_param->effective_cell_to_output_scale_b); + QuantizeMultiplier(static_cast(effective_proj_scale), + &integer_lstm_param->effective_proj_scale_a, + &integer_lstm_param->effective_proj_scale_b); + QuantizeMultiplier(static_cast(effective_hidden_scale), + &integer_lstm_param->effective_hidden_scale_a, + &integer_lstm_param->effective_hidden_scale_b); + QuantizeMultiplier(static_cast(layer_norm_input_scale), + &integer_lstm_param->layer_norm_input_scale_a, + &integer_lstm_param->layer_norm_input_scale_b); + QuantizeMultiplier(static_cast(layer_norm_forget_scale), + &integer_lstm_param->layer_norm_forget_scale_a, + &integer_lstm_param->layer_norm_forget_scale_b); + QuantizeMultiplier(static_cast(layer_norm_cell_scale), + &integer_lstm_param->layer_norm_cell_scale_a, + &integer_lstm_param->layer_norm_cell_scale_b); + QuantizeMultiplier(static_cast(layer_norm_output_scale), + &integer_lstm_param->layer_norm_output_scale_a, + &integer_lstm_param->layer_norm_output_scale_b); + + integer_lstm_param->hidden_zp = intermediate_zp[4]; + + // 10000 is used to make sure the kernel logic does not overflow. + if (!use_cifg) { + integer_lstm_param->input_variance_guard = + std::max(static_cast(1), + static_cast(10000 * layer_norm_input_scale)); + } + integer_lstm_param->forget_variance_guard = + std::max(static_cast(1), + static_cast(10000 * layer_norm_forget_scale)); + integer_lstm_param->cell_variance_guard = + std::max(static_cast(1), + static_cast(10000 * layer_norm_cell_scale)); + integer_lstm_param->output_variance_guard = + std::max(static_cast(1), + static_cast(10000 * layer_norm_output_scale)); + + return kTfLiteOk; +} + +} // namespace + +// Temporary tensors +enum TemporaryTensor { + kScratchBuffer = 0, + kInputQuantized = 1, + kOutputStateQuantized = 2, + kCellStateQuantized = 3, + kInputScalingFactors = 4, + kOutputStateScalingFactors = 5, + kProductScalingFactors = 6, + kRecoveredCellWeights = 7, + kAccumScratch = 8, + kInputZeroPoints = 9, + kOutputStateZeroPoints = 10, + kRowSums = 11, + kNumTemporaryTensors = 12, +}; + +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + OpData* op_data = reinterpret_cast( + context->AllocatePersistentBuffer(context, sizeof(OpData))); + + return op_data; +} + +// Check that input tensor dimensions matches with each other. +TfLiteStatus CheckInputTensorDimensions(TfLiteContext* context, + TfLiteNode* node, int n_input, + int n_output, int n_cell, + bool use_layer_norm, bool is_integer) { + const auto* params = reinterpret_cast(node->builtin_data); + + // Making sure clipping parameters have valid values. + // == 0 means no clipping + // > 0 means clipping + TF_LITE_ENSURE(context, params->cell_clip >= 0); + TF_LITE_ENSURE(context, params->proj_clip >= 0); + const TfLiteEvalTensor* input_to_input_weights = tflite::micro::GetEvalInput( + context, node, micro::lstm::full::kInputToInputWeightsTensor); + if (input_to_input_weights != nullptr) { + TF_LITE_ENSURE_EQ(context, input_to_input_weights->dims->size, 2); + TF_LITE_ENSURE_EQ(context, input_to_input_weights->dims->data[0], n_cell); + TF_LITE_ENSURE_EQ(context, input_to_input_weights->dims->data[1], n_input); + } + const TfLiteEvalTensor* input_to_forget_weights = tflite::micro::GetEvalInput( + context, node, micro::lstm::full::kInputToForgetWeightsTensor); + + TF_LITE_ENSURE_EQ(context, input_to_forget_weights->dims->size, 2); + TF_LITE_ENSURE_EQ(context, input_to_forget_weights->dims->data[0], n_cell); + TF_LITE_ENSURE_EQ(context, input_to_forget_weights->dims->data[1], n_input); + const TfLiteEvalTensor* input_to_cell_weights = tflite::micro::GetEvalInput( + context, node, micro::lstm::full::kInputToCellWeightsTensor); + + TF_LITE_ENSURE_EQ(context, input_to_cell_weights->dims->size, 2); + TF_LITE_ENSURE_EQ(context, input_to_cell_weights->dims->data[0], n_cell); + TF_LITE_ENSURE_EQ(context, input_to_cell_weights->dims->data[1], n_input); + const TfLiteEvalTensor* recurrent_to_input_weights = + tflite::micro::GetEvalInput( + context, node, micro::lstm::full::kRecurrentToInputWeightsTensor); + if (recurrent_to_input_weights != nullptr) { + TF_LITE_ENSURE_EQ(context, recurrent_to_input_weights->dims->size, 2); + TF_LITE_ENSURE_EQ(context, recurrent_to_input_weights->dims->data[0], + n_cell); + TF_LITE_ENSURE_EQ(context, recurrent_to_input_weights->dims->data[1], + n_output); + } + const TfLiteEvalTensor* recurrent_to_forget_weights = + tflite::micro::GetEvalInput( + context, node, micro::lstm::full::kRecurrentToForgetWeightsTensor); + + TF_LITE_ENSURE_EQ(context, recurrent_to_forget_weights->dims->size, 2); + TF_LITE_ENSURE_EQ(context, recurrent_to_forget_weights->dims->data[0], + n_cell); + TF_LITE_ENSURE_EQ(context, recurrent_to_forget_weights->dims->data[1], + n_output); + const TfLiteEvalTensor* recurrent_to_cell_weights = + tflite::micro::GetEvalInput( + context, node, micro::lstm::full::kRecurrentToCellWeightsTensor); + + TF_LITE_ENSURE_EQ(context, recurrent_to_cell_weights->dims->size, 2); + TF_LITE_ENSURE_EQ(context, recurrent_to_cell_weights->dims->data[0], n_cell); + TF_LITE_ENSURE_EQ(context, recurrent_to_cell_weights->dims->data[1], + n_output); + + // We make sure the input-gate's parameters are either both present (regular + // LSTM) or not at all (CIFG-LSTM). + const bool cifg_weights_all_or_none = + ((input_to_input_weights != nullptr) && + (recurrent_to_input_weights != nullptr)) || + ((input_to_input_weights == nullptr) && + (recurrent_to_input_weights == nullptr)); + TF_LITE_ENSURE(context, cifg_weights_all_or_none == true); + + const TfLiteTensor* cell_to_input_weights = GetOptionalInputTensor( + context, node, micro::lstm::full::kCellToInputWeightsTensor); + if (cell_to_input_weights != nullptr) { + TF_LITE_ENSURE_EQ(context, cell_to_input_weights->dims->size, 1); + TF_LITE_ENSURE_EQ(context, cell_to_input_weights->dims->data[0], n_cell); + TF_LITE_ENSURE_TYPES_EQ( + context, cell_to_input_weights->type, + is_integer ? kTfLiteInt16 : input_to_forget_weights->type); + } + + const TfLiteTensor* cell_to_forget_weights = GetOptionalInputTensor( + context, node, lstm::full::kCellToForgetWeightsTensor); + if (cell_to_forget_weights != nullptr) { + TF_LITE_ENSURE_EQ(context, cell_to_forget_weights->dims->size, 1); + TF_LITE_ENSURE_EQ(context, cell_to_forget_weights->dims->data[0], n_cell); + TF_LITE_ENSURE_TYPES_EQ( + context, cell_to_forget_weights->type, + is_integer ? kTfLiteInt16 : input_to_forget_weights->type); + } + + const TfLiteTensor* cell_to_output_weights = GetOptionalInputTensor( + context, node, micro::lstm::full::kCellToOutputWeightsTensor); + if (cell_to_output_weights != nullptr) { + TF_LITE_ENSURE_EQ(context, cell_to_output_weights->dims->size, 1); + TF_LITE_ENSURE_EQ(context, cell_to_output_weights->dims->data[0], n_cell); + TF_LITE_ENSURE_TYPES_EQ( + context, cell_to_output_weights->type, + is_integer ? kTfLiteInt16 : input_to_forget_weights->type); + } + + // Making sure the peephole weights are there all or none. + const bool use_cifg = (input_to_input_weights == nullptr); + const bool peephole_weights_all_or_none = + ((cell_to_input_weights != nullptr || use_cifg) && + (cell_to_forget_weights != nullptr) && + (cell_to_output_weights != nullptr)) || + ((cell_to_input_weights == nullptr) && + (cell_to_forget_weights == nullptr) && + (cell_to_output_weights == nullptr)); + TF_LITE_ENSURE(context, peephole_weights_all_or_none == true); + const TfLiteEvalTensor* input_gate_bias = tflite::micro::GetEvalInput( + context, node, micro::lstm::full::kInputGateBiasTensor); + + if (use_cifg) { + TF_LITE_ENSURE_EQ(context, input_gate_bias, nullptr); + } else { + TF_LITE_ENSURE_EQ(context, input_gate_bias->dims->size, 1); + TF_LITE_ENSURE_EQ(context, input_gate_bias->dims->data[0], n_cell); + if (is_integer) { + TF_LITE_ENSURE_TYPES_EQ(context, input_gate_bias->type, kTfLiteInt32); + } else { + TF_LITE_ENSURE_TYPES_EQ(context, input_gate_bias->type, kTfLiteFloat32); + } + } + const TfLiteEvalTensor* forget_gate_bias = tflite::micro::GetEvalInput( + context, node, micro::lstm::full::kForgetGateBiasTensor); + + TF_LITE_ENSURE_EQ(context, forget_gate_bias->dims->size, 1); + TF_LITE_ENSURE_EQ(context, forget_gate_bias->dims->data[0], n_cell); + if (is_integer) { + TF_LITE_ENSURE_TYPES_EQ(context, forget_gate_bias->type, kTfLiteInt32); + } else { + TF_LITE_ENSURE_TYPES_EQ(context, forget_gate_bias->type, kTfLiteFloat32); + } + const TfLiteEvalTensor* cell_gate_bias = tflite::micro::GetEvalInput( + context, node, micro::lstm::full::kCellGateBiasTensor); + + TF_LITE_ENSURE_EQ(context, cell_gate_bias->dims->size, 1); + TF_LITE_ENSURE_EQ(context, cell_gate_bias->dims->data[0], n_cell); + if (is_integer) { + TF_LITE_ENSURE_TYPES_EQ(context, cell_gate_bias->type, kTfLiteInt32); + } else { + TF_LITE_ENSURE_TYPES_EQ(context, cell_gate_bias->type, kTfLiteFloat32); + } + const TfLiteEvalTensor* output_gate_bias = tflite::micro::GetEvalInput( + context, node, micro::lstm::full::kOutputGateBiasTensor); + TF_LITE_ENSURE_EQ(context, output_gate_bias->dims->size, 1); + TF_LITE_ENSURE_EQ(context, output_gate_bias->dims->data[0], n_cell); + if (is_integer) { + TF_LITE_ENSURE_TYPES_EQ(context, output_gate_bias->type, kTfLiteInt32); + } else { + TF_LITE_ENSURE_TYPES_EQ(context, output_gate_bias->type, kTfLiteFloat32); + } + + const TfLiteTensor* projection_weights = GetOptionalInputTensor( + context, node, micro::lstm::full::kProjectionWeightsTensor); + if (projection_weights != nullptr) { + TF_LITE_ENSURE_EQ(context, projection_weights->dims->size, 2); + TF_LITE_ENSURE_EQ(context, projection_weights->dims->data[0], n_output); + TF_LITE_ENSURE_EQ(context, projection_weights->dims->data[1], n_cell); + } + + const TfLiteTensor* projection_bias = GetOptionalInputTensor( + context, node, micro::lstm::full::kProjectionBiasTensor); + if (projection_bias != nullptr) { + TF_LITE_ENSURE_EQ(context, projection_bias->dims->size, 1); + TF_LITE_ENSURE_EQ(context, projection_bias->dims->data[0], n_output); + if (is_integer) { + TF_LITE_ENSURE_TYPES_EQ(context, projection_bias->type, kTfLiteInt32); + } else { + TF_LITE_ENSURE_TYPES_EQ(context, projection_bias->type, kTfLiteFloat32); + } + } + + // Making sure the projection tensors are consistent: + // 1) If projection weight is not present, then projection bias should not be + // present. + // 2) If projection weight is present, then projection bias is optional. + const bool projecton_tensors_consistent = + ((projection_weights != nullptr) || (projection_bias == nullptr)); + TF_LITE_ENSURE(context, projecton_tensors_consistent == true); + + if (use_layer_norm) { + const TfLiteEvalTensor* input_layer_norm_coefficients = + tflite::micro::GetEvalInput( + context, node, + micro::lstm::full::kInputLayerNormCoefficientsTensor); + if (use_cifg) { + TF_LITE_ENSURE_EQ(context, input_layer_norm_coefficients, nullptr); + } else { + TF_LITE_ENSURE(context, input_layer_norm_coefficients != nullptr); + TF_LITE_ENSURE_EQ(context, input_layer_norm_coefficients->dims->size, 1); + TF_LITE_ENSURE_EQ(context, input_layer_norm_coefficients->dims->data[0], + n_cell); + if (is_integer) { + TF_LITE_ENSURE_TYPES_EQ(context, input_layer_norm_coefficients->type, + kTfLiteInt16); + } else { + TF_LITE_ENSURE_TYPES_EQ(context, input_layer_norm_coefficients->type, + kTfLiteFloat32); + } + } + const TfLiteEvalTensor* forget_layer_norm_coefficients = + tflite::micro::GetEvalInput( + context, node, + micro::lstm::full::kForgetLayerNormCoefficientsTensor); + TF_LITE_ENSURE_EQ(context, forget_layer_norm_coefficients->dims->size, 1); + TF_LITE_ENSURE_EQ(context, forget_layer_norm_coefficients->dims->data[0], + n_cell); + if (is_integer) { + TF_LITE_ENSURE_TYPES_EQ(context, forget_layer_norm_coefficients->type, + kTfLiteInt16); + } else { + TF_LITE_ENSURE_TYPES_EQ(context, forget_layer_norm_coefficients->type, + kTfLiteFloat32); + } + const TfLiteEvalTensor* cell_layer_norm_coefficients = + tflite::micro::GetEvalInput( + context, node, micro::lstm::full::kCellLayerNormCoefficientsTensor); + TF_LITE_ENSURE_EQ(context, cell_layer_norm_coefficients->dims->size, 1); + TF_LITE_ENSURE_EQ(context, cell_layer_norm_coefficients->dims->data[0], + n_cell); + if (is_integer) { + TF_LITE_ENSURE_TYPES_EQ(context, cell_layer_norm_coefficients->type, + kTfLiteInt16); + } else { + TF_LITE_ENSURE_TYPES_EQ(context, cell_layer_norm_coefficients->type, + kTfLiteFloat32); + } + const TfLiteEvalTensor* output_layer_norm_coefficients = + tflite::micro::GetEvalInput( + context, node, + micro::lstm::full::kOutputLayerNormCoefficientsTensor); + + TF_LITE_ENSURE_EQ(context, output_layer_norm_coefficients->dims->size, 1); + TF_LITE_ENSURE_EQ(context, output_layer_norm_coefficients->dims->data[0], + n_cell); + if (is_integer) { + TF_LITE_ENSURE_TYPES_EQ(context, output_layer_norm_coefficients->type, + kTfLiteInt16); + } else { + TF_LITE_ENSURE_TYPES_EQ(context, output_layer_norm_coefficients->type, + kTfLiteFloat32); + } + } + + return kTfLiteOk; +} + +TfLiteStatus PrecomputeZeroPointTimesWeightWithBias( + TfLiteContext* context, int32_t zero_point, + const TfLiteTensor* weight_tensor, const TfLiteTensor* bias_tensor, + std::unique_ptr* output) { + if (weight_tensor == nullptr) { + return kTfLiteOk; + } + + const RuntimeShape& weight_shape = GetTensorShape(weight_tensor); + TF_LITE_ENSURE_EQ(context, weight_shape.DimensionsCount(), 2); + const int row = weight_shape.Dims(0); + const int col = weight_shape.Dims(1); + output->reset(new int32_t[row]); + if (bias_tensor == nullptr) { + memset(output->get(), 0, row * sizeof(int32_t)); + } else { + const int32_t* bias = GetTensorData(bias_tensor); + memcpy(output->get(), bias, row * sizeof(int32_t)); + } + if (zero_point != 0) { + const int8_t* weight = GetTensorData(weight_tensor); + tensor_utils::PortableMatrixScalarMultiplyAccumulate( + weight, zero_point, row, col, output->get()); + } + return kTfLiteOk; +} + +TfLiteStatus PopulatePrecomputedZPTimesWeightsWithBias(TfLiteContext* context, + OpData* op_data, + TfLiteNode* node) { + const TfLiteTensor* input; + TF_LITE_ENSURE_OK( + context, + GetInputSafe(context, node, micro::lstm::full::kInputTensor, &input)); + const TfLiteTensor* output_state = + GetVariableInput(context, node, micro::lstm::full::kOutputStateTensor); + TF_LITE_ENSURE(context, output_state != nullptr); + + const int32_t input_zero_point = -input->params.zero_point; + const int32_t output_state_zero_point = -output_state->params.zero_point; + + const TfLiteTensor* input_to_input_weights = GetOptionalInputTensor( + context, node, micro::lstm::full::kInputToInputWeightsTensor); + const TfLiteTensor* input_to_forget_weights; + TF_LITE_ENSURE_OK(context, + GetInputSafe(context, node, + micro::lstm::full::kInputToForgetWeightsTensor, + &input_to_forget_weights)); + const TfLiteTensor* input_to_cell_weights; + TF_LITE_ENSURE_OK( + context, + GetInputSafe(context, node, micro::lstm::full::kInputToCellWeightsTensor, + &input_to_cell_weights)); + const TfLiteTensor* input_to_output_weights; + TF_LITE_ENSURE_OK(context, + GetInputSafe(context, node, + micro::lstm::full::kInputToOutputWeightsTensor, + &input_to_output_weights)); + + const TfLiteTensor* recurrent_to_input_weights = GetOptionalInputTensor( + context, node, micro::lstm::full::kRecurrentToInputWeightsTensor); + const TfLiteTensor* recurrent_to_forget_weights; + TF_LITE_ENSURE_OK( + context, GetInputSafe(context, node, + micro::lstm::full::kRecurrentToForgetWeightsTensor, + &recurrent_to_forget_weights)); + const TfLiteTensor* recurrent_to_cell_weights; + TF_LITE_ENSURE_OK( + context, GetInputSafe(context, node, + micro::lstm::full::kRecurrentToCellWeightsTensor, + &recurrent_to_cell_weights)); + const TfLiteTensor* recurrent_to_output_weights; + TF_LITE_ENSURE_OK( + context, GetInputSafe(context, node, + micro::lstm::full::kRecurrentToOutputWeightsTensor, + &recurrent_to_output_weights)); + + const TfLiteTensor* projection_weights = GetOptionalInputTensor( + context, node, lstm::full::kProjectionWeightsTensor); + const TfLiteTensor* projection_bias = GetOptionalInputTensor( + context, node, micro::lstm::full::kProjectionBiasTensor); + + lstm_eval::IntegerLstmParameter* integer_lstm_params = + &op_data->integer_lstm_param; + + TfLiteTensor* intermediate = + context->GetTensor(context, node->intermediates->data[4]); + const auto* params = + static_cast(intermediate->quantization.params); + const int32_t hidden_zp = params->zero_point->data[0]; + + // Get bias and perform zero point calculation. + // When there is layer normalization, the gate bias does not apply to matmul + // directly: + // y = ln(w * x + w * r + w * c) + b. + const bool is_layer_norm = op_data->use_layer_norm; + + // Forget gate. + const TfLiteTensor* forget_gate_bias = + is_layer_norm + ? nullptr + : GetInput(context, node, micro::lstm::full::kForgetGateBiasTensor); + TF_LITE_ENSURE_OK( + context, + PrecomputeZeroPointTimesWeightWithBias( + context, input_zero_point, input_to_forget_weights, forget_gate_bias, + &(integer_lstm_params->input_to_forget_effective_bias))); + + TF_LITE_ENSURE_OK( + context, + PrecomputeZeroPointTimesWeightWithBias( + context, output_state_zero_point, recurrent_to_forget_weights, + nullptr, &(integer_lstm_params->recurrent_to_forget_effective_bias))); + + // Modulation gate. + const TfLiteTensor* cell_gate_bias = + is_layer_norm + ? nullptr + : GetInput(context, node, micro::lstm::full::kCellGateBiasTensor); + TF_LITE_ENSURE_OK( + context, + PrecomputeZeroPointTimesWeightWithBias( + context, input_zero_point, input_to_cell_weights, cell_gate_bias, + &(integer_lstm_params->input_to_cell_effective_bias))); + TF_LITE_ENSURE_OK( + context, + PrecomputeZeroPointTimesWeightWithBias( + context, output_state_zero_point, recurrent_to_cell_weights, nullptr, + &(integer_lstm_params->recurrent_to_cell_effective_bias))); + + // Output gate. + const TfLiteTensor* output_gate_bias = + is_layer_norm + ? nullptr + : GetInput(context, node, micro::lstm::full::kOutputGateBiasTensor); + TF_LITE_ENSURE_OK( + context, + PrecomputeZeroPointTimesWeightWithBias( + context, input_zero_point, input_to_output_weights, output_gate_bias, + &(integer_lstm_params->input_to_output_effective_bias))); + + TF_LITE_ENSURE_OK( + context, + PrecomputeZeroPointTimesWeightWithBias( + context, output_state_zero_point, recurrent_to_output_weights, + nullptr, &(integer_lstm_params->recurrent_to_output_effective_bias))); + + // Input gate. The calculation is only meaningful for non-cifg case. + const TfLiteTensor* input_gate_bias = + is_layer_norm + ? nullptr + : GetInput(context, node, micro::lstm::full::kInputGateBiasTensor); + TF_LITE_ENSURE_OK( + context, + PrecomputeZeroPointTimesWeightWithBias( + context, input_zero_point, input_to_input_weights, input_gate_bias, + &(integer_lstm_params->input_to_input_effective_bias))); + TF_LITE_ENSURE_OK( + context, + PrecomputeZeroPointTimesWeightWithBias( + context, output_state_zero_point, recurrent_to_input_weights, nullptr, + &(integer_lstm_params->recurrent_to_input_effective_bias))); + + // Projection bias. The calculation is only meaningful for with projection. + TF_LITE_ENSURE_OK(context, + PrecomputeZeroPointTimesWeightWithBias( + context, hidden_zp, projection_weights, projection_bias, + &(integer_lstm_params->projection_effective_bias))); + return kTfLiteOk; +} + +// Resize the output and state tensors based on the sizes of the input tensors. +// Allocate a temporary scratch tensor. Also check that the sizes of the input +// tensors match each other. +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + OpData* op_data = reinterpret_cast(node->user_data); + // const int scratch_tensor_index = op_data->scratch_tensor_index; + + // Check we have all the inputs and outputs we need. + bool use_layer_norm = false; + if (node->inputs->size == 24) { + const TfLiteTensor* forget_layer_norm_coefficients = GetOptionalInputTensor( + context, node, micro::lstm::full::kForgetLayerNormCoefficientsTensor); + if (forget_layer_norm_coefficients == nullptr) { + use_layer_norm = false; + } else { + use_layer_norm = true; + } + } else if (node->inputs->size == 20) { + // This is deprecated and is only kept here for backward compatibility. + use_layer_norm = false; + } else { + MicroPrintf("The LSTM Full kernel expects 20 or 24 inputs. Got %d inputs", + node->inputs->size); + return kTfLiteError; + } + TF_LITE_ENSURE_EQ(context, node->outputs->size, 1); + op_data->use_layer_norm = use_layer_norm; + + // Inferring batch size, number of outputs and sequence length and + // number of cells from the input tensors. + const TfLiteEvalTensor* input = tflite::micro::GetEvalInput( + context, node, micro::lstm::full::kInputTensor); + const bool is_integer = input->type == kTfLiteInt8; + TF_LITE_ENSURE(context, input->dims->size > 1); + const auto* params = + reinterpret_cast( + node->builtin_data); + const bool time_major = params->time_major; + const int n_batch = time_major ? input->dims->data[1] : input->dims->data[0]; + const int n_input = input->dims->data[2]; + const TfLiteEvalTensor* input_to_output_weights = tflite::micro::GetEvalInput( + context, node, micro::lstm::full::kInputToOutputWeightsTensor); + const int n_cell = input_to_output_weights->dims->data[0]; + TF_LITE_ENSURE_EQ(context, input_to_output_weights->dims->size, 2); + TF_LITE_ENSURE_EQ(context, input_to_output_weights->dims->data[1], n_input); + const TfLiteEvalTensor* recurrent_to_output_weights = + tflite::micro::GetEvalInput( + context, node, micro::lstm::full::kRecurrentToOutputWeightsTensor); + + TF_LITE_ENSURE_EQ(context, recurrent_to_output_weights->dims->size, 2); + TF_LITE_ENSURE_EQ(context, recurrent_to_output_weights->dims->data[0], + n_cell); + const int n_output = recurrent_to_output_weights->dims->data[1]; + + // Check that input tensor dimensions matches with each other. + TF_LITE_ENSURE_OK( + context, CheckInputTensorDimensions(context, node, n_input, n_output, + n_cell, use_layer_norm, is_integer)); + // Get the pointer to output, output_state and cell_state buffer tensors. + // TfLiteEvalTensor* output = + // tflite::micro::GetEvalOutput(context, node, + // micro::lstm::full::kOutputTensor); + TfLiteEvalTensor* output_state = tflite::micro::GetMutableEvalInput( + context, node, micro::lstm::full::kOutputStateTensor); + TFLITE_DCHECK(output_state != nullptr); + TfLiteEvalTensor* cell_state = tflite::micro::GetMutableEvalInput( + context, node, micro::lstm::full::kCellStateTensor); + TFLITE_DCHECK(cell_state != nullptr); + // Check the shape of input state tensors. + // These tensor may be 1D or 2D. It's fine as long as the total size is + // correct. + TF_LITE_ENSURE_EQ(context, NumElements(output_state->dims), + n_batch * n_output); + TF_LITE_ENSURE_EQ(context, NumElements(cell_state->dims), n_batch * n_cell); + + if (is_integer) { + const int num_intermediate_tensors = node->intermediates->size; + TF_LITE_ENSURE(context, num_intermediate_tensors == 5); + } + + if (is_integer) { + // Integer UnidirectionalSequenceLSTM prepare function for 8x8->16. + // This code path needs 5 intermediate tensors per Op. + // Populate quantization parameters. + PopulateQuantizedLstmParams8x8_16(context, node, + &op_data->integer_lstm_param); + // Allocate scratch buffer. Need 6 16bit buffer with size n_batch * n_cell + // and 1 8bit buffer with size n_batch * n_cell. We also need 1 32 bit + // buffer with size n_batch * n_cell. + // + // Handle cifg case as well, which might save one buffer. + + int scratch_idx = 0; + + context->RequestScratchBufferInArena( + context, n_batch * n_cell * sizeof(int32_t), &(scratch_idx)); + op_data->scratch_tensor_index = scratch_idx; + + for (int scratch_index = 1; scratch_index < 6; ++scratch_index) { + // node->temporaries->data[scratch_index] = op_data->scratch_tensor_index + // + scratch_index; + context->RequestScratchBufferInArena( + context, n_batch * n_cell * sizeof(int32_t), &(scratch_idx)); + TFLITE_DCHECK(scratch_idx == + (op_data->scratch_tensor_index + scratch_index)); + } + + // Populate precomputed zp * weight. + TF_LITE_ENSURE_OK(context, PopulatePrecomputedZPTimesWeightsWithBias( + context, op_data, node)); + } + + return kTfLiteOk; +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + const auto* params = + reinterpret_cast( + node->builtin_data); + const OpData* op_data = reinterpret_cast(node->user_data); + // const bool use_layer_norm = op_data->use_layer_norm; + // const bool time_major = params->time_major; + + const TfLiteEvalTensor* input = tflite::micro::GetEvalInput( + context, node, micro::lstm::full::kInputTensor); + const TfLiteEvalTensor* input_to_input_weights = tflite::micro::GetEvalInput( + context, node, micro::lstm::full::kInputToInputWeightsTensor); + const TfLiteEvalTensor* input_to_forget_weights = tflite::micro::GetEvalInput( + context, node, micro::lstm::full::kInputToForgetWeightsTensor); + const TfLiteEvalTensor* input_to_cell_weights = tflite::micro::GetEvalInput( + context, node, micro::lstm::full::kInputToCellWeightsTensor); + const TfLiteEvalTensor* input_to_output_weights = tflite::micro::GetEvalInput( + context, node, micro::lstm::full::kInputToOutputWeightsTensor); + const TfLiteEvalTensor* recurrent_to_input_weights = + tflite::micro::GetEvalInput( + context, node, micro::lstm::full::kRecurrentToInputWeightsTensor); + const TfLiteEvalTensor* recurrent_to_forget_weights = + tflite::micro::GetEvalInput( + context, node, micro::lstm::full::kRecurrentToForgetWeightsTensor); + const TfLiteEvalTensor* recurrent_to_cell_weights = + tflite::micro::GetEvalInput( + context, node, micro::lstm::full::kRecurrentToCellWeightsTensor); + const TfLiteEvalTensor* recurrent_to_output_weights = + tflite::micro::GetEvalInput( + context, node, micro::lstm::full::kRecurrentToOutputWeightsTensor); + const TfLiteEvalTensor* cell_to_input_weights = context->GetEvalTensor( + context, + node->inputs->data[micro::lstm::full::kCellToInputWeightsTensor]); + const TfLiteEvalTensor* cell_to_forget_weights = context->GetEvalTensor( + context, + node->inputs->data[micro::lstm::full::kCellToForgetWeightsTensor]); + const TfLiteEvalTensor* cell_to_output_weights = context->GetEvalTensor( + context, + node->inputs->data[micro::lstm::full::kCellToOutputWeightsTensor]); + const TfLiteEvalTensor* input_gate_bias = context->GetEvalTensor( + context, node->inputs->data[micro::lstm::full::kInputGateBiasTensor]); + + const TfLiteEvalTensor* forget_gate_bias = context->GetEvalTensor( + context, node->inputs->data[micro::lstm::full::kForgetGateBiasTensor]); + const TfLiteEvalTensor* cell_gate_bias = context->GetEvalTensor( + context, node->inputs->data[micro::lstm::full::kCellGateBiasTensor]); + const TfLiteEvalTensor* output_gate_bias = context->GetEvalTensor( + context, node->inputs->data[micro::lstm::full::kOutputGateBiasTensor]); + + const TfLiteEvalTensor* projection_weights = context->GetEvalTensor( + context, node->inputs->data[micro::lstm::full::kProjectionWeightsTensor]); + const TfLiteEvalTensor* projection_bias = context->GetEvalTensor( + context, node->inputs->data[micro::lstm::full::kProjectionBiasTensor]); + + TfLiteEvalTensor* output_state = context->GetEvalTensor( + context, node->inputs->data[micro::lstm::full::kOutputStateTensor]); + TFLITE_DCHECK(output_state != nullptr); + TfLiteEvalTensor* cell_state = context->GetEvalTensor( + context, node->inputs->data[micro::lstm::full::kCellStateTensor]); + TFLITE_DCHECK(cell_state != nullptr); + const TfLiteEvalTensor* input_layer_norm_coefficients = + context->GetEvalTensor( + context, + node->inputs + ->data[micro::lstm::full::kInputLayerNormCoefficientsTensor]); + + const TfLiteEvalTensor* forget_layer_norm_coefficients = + context->GetEvalTensor( + context, + node->inputs + ->data[micro::lstm::full::kForgetLayerNormCoefficientsTensor]); + const TfLiteEvalTensor* cell_layer_norm_coefficients = context->GetEvalTensor( + context, + node->inputs->data[micro::lstm::full::kCellLayerNormCoefficientsTensor]); + + const TfLiteEvalTensor* output_layer_norm_coefficients = + context->GetEvalTensor( + context, + node->inputs + ->data[micro::lstm::full::kOutputLayerNormCoefficientsTensor]); + + TfLiteEvalTensor* output = tflite::micro::GetEvalOutput( + context, node, micro::lstm::full::kOutputTensor); + + // Copy out the LSTM specific params so they can be passed in the function. + TfLiteLSTMParams lstm_params; + lstm_params.activation = params->activation; + lstm_params.cell_clip = params->cell_clip; + lstm_params.proj_clip = params->proj_clip; + lstm_params.asymmetric_quantize_inputs = params->asymmetric_quantize_inputs; + switch (input_to_output_weights->type) { + case kTfLiteInt8: { + const bool is_hybrid = input->type == kTfLiteFloat32; + if (is_hybrid) { + MicroPrintf(" hybrid type is not supported."); + return kTfLiteError; + + } else { + TfLiteEvalTensor* scratch[6]; + // Allocate scratch buffer. Need 6 16bit buffer with size n_batch * + // n_cell + // and 1 8bit buffer with size n_batch * n_cell. We also need 1 32 bit + // buffer with size n_batch * n_cell. + // + // Handle cifg case as well, which might save one buffer. + + const auto* tmp_params = + reinterpret_cast( + node->builtin_data); + const bool time_major = tmp_params->time_major; + for (int scratch_index = 0; scratch_index < 6; ++scratch_index) { + TFLITE_DCHECK(context != nullptr); + TFLITE_DCHECK(context->GetScratchBuffer != nullptr); + int32_t* scratch_tensor = + static_cast(context->GetScratchBuffer( + context, op_data->scratch_tensor_index + scratch_index)); + scratch[scratch_index] = (TfLiteEvalTensor*)scratch_tensor; + } + /* + TF_LITE_ENSURE_OK(context, + GetScratchSafe(context, node, 0, + &scratch0)); + + TF_LITE_ENSURE_OK(context, + GetScratchSafe(context, node, 1, + &scratch1)); + + TF_LITE_ENSURE_OK(context, + GetScratchSafe(context, node, 2, + &scratch2)); + + TF_LITE_ENSURE_OK(context, + GetScratchSafe(context, node, 3, + &scratch3)); + + TF_LITE_ENSURE_OK(context, + GetScratchSafe(context, node, 4, + &scratch4)); + + TF_LITE_ENSURE_OK(context, + GetScratchSafe(context, node, 5, + &scratch5)); + */ + return lstm_eval::EvalInteger8x8_16( + context, node, input, input_to_input_weights, + input_to_forget_weights, input_to_cell_weights, + input_to_output_weights, recurrent_to_input_weights, + recurrent_to_forget_weights, recurrent_to_cell_weights, + recurrent_to_output_weights, cell_to_input_weights, + cell_to_forget_weights, cell_to_output_weights, + input_layer_norm_coefficients, forget_layer_norm_coefficients, + cell_layer_norm_coefficients, output_layer_norm_coefficients, + input_gate_bias, forget_gate_bias, cell_gate_bias, output_gate_bias, + projection_weights, projection_bias, &lstm_params, + /*forward_sequence=*/true, time_major, &op_data->integer_lstm_param, + output_state, cell_state, output, scratch[0], scratch[1], + scratch[2], scratch[3], scratch[4], scratch[5]); + } + } + + default: + MicroPrintf("Type %s is not currently supported.", + TfLiteTypeGetName(input_to_output_weights->type)); + return kTfLiteError; + } + return kTfLiteOk; +} +//} // namespace unidirectional_sequence_lstm + +} // namespace micro +} // namespace ops + +TFLMRegistration Register_UNIDIRECTIONAL_SEQUENCE_LSTM() { + return tflite::micro::RegisterOp(ops::micro::Init, ops::micro::Prepare, + ops::micro::Eval); +} +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/xtensa/xtensa.h b/tensorflow/lite/micro/kernels/xtensa/xtensa.h new file mode 100644 index 0000000..47820d3 --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/xtensa.h @@ -0,0 +1,38 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_XTENSA_XTENSA_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_XTENSA_XTENSA_H_ + +#if defined(HIFIMINI) +#include + +#include "tensorflow/lite/micro/kernels/xtensa/hifimini/fixedpoint_utils.h" +#endif // defined(HIFMINI) + +#if defined(HIFI4) || defined(HIFI5) +#include "include/nnlib/xa_nnlib_api.h" +#include "include/nnlib/xa_nnlib_standards.h" + +#define ALIGNED_SIZE(x, bytes) (((x) + (bytes - 1)) & (~(bytes - 1))) +#define ALIGN_PTR(x, bytes) ((((unsigned)(x)) + (bytes - 1)) & (~(bytes - 1))) +#endif // defined(HIFI4) || defined(HIFI5) + +#if defined(VISION_P6) +#include "utils.h" +#include "vision_api.h" +#endif // defined(VISION_P6) + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_XTENSA_XTENSA_H_ diff --git a/tensorflow/lite/micro/kernels/xtensa/xtensa_add.h b/tensorflow/lite/micro/kernels/xtensa/xtensa_add.h new file mode 100644 index 0000000..a221339 --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/xtensa_add.h @@ -0,0 +1,48 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_XTENSA_XTENSA_ADD_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_XTENSA_XTENSA_ADD_H_ + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/add.h" +namespace tflite { + +struct XtensaAddOpData { + OpDataAdd reference_op_data; +#if defined(VISION_P6) + uint8_t* p_context; // persistent lib context for this instance saved here + uint32_t context_size; +#endif // VISION_P6 +}; + +#if defined(VISION_P6) + +TfLiteStatus AddPrepareVision(TfLiteContext* context, TfLiteNode* node); +TfLiteStatus AddEvalQuantizedVision(TfLiteContext* context, TfLiteNode* node, + const TfLiteAddParams& params, + const XtensaAddOpData& data, + const TfLiteEvalTensor* input1, + const TfLiteEvalTensor* input2, + TfLiteEvalTensor* output); + +#endif // VISION_P6 + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_XTENSA_XTENSA_ADD_H_ diff --git a/tensorflow/lite/micro/kernels/xtensa/xtensa_conv.h b/tensorflow/lite/micro/kernels/xtensa/xtensa_conv.h new file mode 100644 index 0000000..355f022 --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/xtensa_conv.h @@ -0,0 +1,84 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_XTENSA_XTENSA_CONV_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_XTENSA_XTENSA_CONV_H_ + +#include + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/types.h" +#include "tensorflow/lite/micro/kernels/conv.h" + +namespace tflite { +struct XtensaConvOpData { + OpDataConv reference_op_data; + +#if defined(HIFI4) || defined(HIFI5) + int scratch_tensor_index; +#endif // defined(HIFI4) || defined(HIFI5) + +#if defined(VISION_P6) + int8_t* reorder_coefficient_bias; // buffers used to keep reordered coeff and + // biases. + uint32_t reorder_coefficient_bias_size; + int8_t* per_channel_output_shift_int8; + uint8_t* p_context; // persistent lib context for this instance saved here + uint32_t context_size; +#endif // VISION_P6 +}; + +#if defined(HIFI4) || defined(HIFI5) +TfLiteStatus ConvPrepareHifi(TfLiteContext* context, TfLiteNode* node); + +TfLiteStatus ConvEvalHifi(TfLiteContext* context, TfLiteNode* node, + const TfLiteConvParams& params, + const XtensaConvOpData& data, + const TfLiteEvalTensor* input, + const TfLiteEvalTensor* filter, + const TfLiteEvalTensor* bias, + TfLiteEvalTensor* output); +#endif // defined(HIFI4) || defined(HIFI5) + +#if defined(HIFI4) +TfLiteStatus ConvEvalHifi16(TfLiteContext* context, TfLiteNode* node, + const TfLiteConvParams& params, + const XtensaConvOpData& data, + const TfLiteEvalTensor* input, + const TfLiteEvalTensor* filter, + const TfLiteEvalTensor* bias, + TfLiteEvalTensor* output); +#endif // defined(HIFI4) + +#if defined(VISION_P6) + +TfLiteStatus ConvPrepareVision(TfLiteContext* context, TfLiteNode* node); + +TfLiteStatus ConvEvalVision(TfLiteContext* context, TfLiteNode* node, + const TfLiteConvParams& params, + const XtensaConvOpData& data, + const TfLiteEvalTensor* input, + const TfLiteEvalTensor* filter, + const TfLiteEvalTensor* bias, + TfLiteEvalTensor* output); + +#endif // VISION_P6 + +TfLiteStatus ConvReferenceEvalInt8(TfLiteContext* context, TfLiteNode* node); + +TfLiteStatus ConvReferenceEvalInt16(TfLiteContext* context, TfLiteNode* node); + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_XTENSA_XTENSA_CONV_H_ diff --git a/tensorflow/lite/micro/kernels/xtensa/xtensa_depthwise_conv.h b/tensorflow/lite/micro/kernels/xtensa/xtensa_depthwise_conv.h new file mode 100644 index 0000000..ca15719 --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/xtensa_depthwise_conv.h @@ -0,0 +1,74 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_XTENSA_XTENSA_DEPTHWISE_CONV_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_XTENSA_XTENSA_DEPTHWISE_CONV_H_ + +#include + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/types.h" +#include "tensorflow/lite/micro/kernels/depthwise_conv.h" + +namespace tflite { +struct XtensaDepthwiseConvOpData { + OpDataConv reference_op_data; + +#if defined(HIFI4) || defined(HIFI5) + int scratch_tensor_index; +#endif // defined(HIFI4) || defined(HIFI5) + +#if defined(VISION_P6) + int8_t* reorder_coefficient_bias; // buffers used to keep reordered coeff and + // biases. + uint32_t reorder_coefficient_bias_size; + int8_t* per_channel_output_shift_int8; + uint8_t* p_context; // persistent lib context for this instance saved here + uint32_t context_size; +#endif // VISION_P6 +}; + +#if defined(HIFI4) || defined(HIFI5) +TfLiteStatus DepthwiseConvPrepareHifi(TfLiteContext* context, TfLiteNode* node); + +TfLiteStatus DepthwiseConvEvalHifi(TfLiteContext* context, TfLiteNode* node, + const TfLiteDepthwiseConvParams& params, + const XtensaDepthwiseConvOpData& data, + const TfLiteEvalTensor* input, + const TfLiteEvalTensor* filter, + const TfLiteEvalTensor* bias, + TfLiteEvalTensor* output); + +TfLiteStatus DepthwiseConvReferenceEvalInt8(TfLiteContext* context, + TfLiteNode* node); +#endif // defined(HIFI4) || defined(HIFI5) + +#if defined(VISION_P6) + +TfLiteStatus DepthwiseConvPrepareVision(TfLiteContext* context, + TfLiteNode* node); + +TfLiteStatus DepthwiseConvEvalVision(TfLiteContext* context, TfLiteNode* node, + const TfLiteDepthwiseConvParams& params, + const XtensaDepthwiseConvOpData& data, + const TfLiteEvalTensor* input, + const TfLiteEvalTensor* filter, + const TfLiteEvalTensor* bias, + TfLiteEvalTensor* output); + +#endif // VISION_P6 + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_XTENSA_XTENSA_DEPTHWISE_CONV_H_ diff --git a/tensorflow/lite/micro/kernels/xtensa/xtensa_fully_connected.h b/tensorflow/lite/micro/kernels/xtensa/xtensa_fully_connected.h new file mode 100644 index 0000000..e030f0b --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/xtensa_fully_connected.h @@ -0,0 +1,78 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_XTENSA_XTENSA_FULLY_CONNECTED_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_XTENSA_XTENSA_FULLY_CONNECTED_H_ + +#include + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/types.h" +#include "tensorflow/lite/micro/kernels/fully_connected.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { +struct XtensaFullyConnectedOpData { + OpDataFullyConnected reference_op_data; + +#if defined(VISION_P6) + int8_t* reorder_coefficient_bias; // buffers used to keep reordered coeff and + // biases. + uint32_t reorder_coefficient_bias_size; + uint8_t* p_context; // persistent lib context for this instance saved here + uint32_t context_size; +#endif // VISION_P6 +}; + +#if defined(HIFIMINI) +void FullyConnectedEvalHifimini( + const FullyConnectedParams& params, const RuntimeShape& input_shape, + const int8_t* input_data, const RuntimeShape& filter_shape, + const int8_t* filter_data, const RuntimeShape& bias_shape, + const int32_t* bias_data, const RuntimeShape& output_shape, + int8_t* output_data); +#endif // defined(HIFIMINI) + +#if defined(VISION_P6) +TfLiteStatus FullyConnectedPrepareVision(TfLiteContext* context, + TfLiteNode* node); + +TfLiteStatus FullyConnectedEvalVision(TfLiteContext* context, TfLiteNode* node, + const TfLiteConvParams& params, + const XtensaFullyConnectedOpData& data, + const TfLiteEvalTensor* input, + const TfLiteEvalTensor* filter, + const TfLiteEvalTensor* bias, + TfLiteEvalTensor* output); +#endif // VISION_P6 + +void* XtensaInitFullyConnected(TfLiteContext* context, const char* buffer, + size_t length); + +TfLiteStatus XtensaEvalFullyConnectedQuantizedInt8( + TfLiteContext* context, TfLiteNode* node, const OpDataFullyConnected& data, + const TfLiteEvalTensor* input, const TfLiteEvalTensor* filter, + const TfLiteEvalTensor* bias, TfLiteEvalTensor* output); + +TfLiteStatus XtensaCalculateOpDataFullyConnected( + TfLiteContext* context, TfLiteFusedActivation activation, + TfLiteType data_type, const TfLiteTensor* input, const TfLiteTensor* filter, + const TfLiteTensor* bias, TfLiteTensor* output, OpDataFullyConnected* data); + +TfLiteStatus XtensaPrepareFullyConnected(TfLiteContext* context, + TfLiteNode* node); + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_XTENSA_XTENSA_FULLY_CONNECTED_H_ diff --git a/tensorflow/lite/micro/kernels/xtensa/xtensa_pad.h b/tensorflow/lite/micro/kernels/xtensa/xtensa_pad.h new file mode 100644 index 0000000..12e386d --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/xtensa_pad.h @@ -0,0 +1,49 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_XTENSA_XTENSA_PAD_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_XTENSA_XTENSA_PAD_H_ + +#include + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/types.h" +namespace tflite { + +struct OpDataPad { + PadParams params; + int32_t output_zero_point; +}; + +struct XtensaPadData { + OpDataPad reference_op_data; + +#if defined(VISION_P6) + uint8_t* p_context; // persistent lib context for this instance saved here + uint32_t context_size; +#endif // VISION_P6 +}; + +#if defined(VISION_P6) + +TfLiteStatus PadPrepareVision(TfLiteContext* context, TfLiteNode* node); + +TfLiteStatus PadEvalVision(const XtensaPadData& data, + const TfLiteEvalTensor* input, + TfLiteEvalTensor* output); +#endif // VISION_P6 + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_XTENSA_XTENSA_PAD_H_ diff --git a/tensorflow/lite/micro/kernels/xtensa/xtensa_pooling.h b/tensorflow/lite/micro/kernels/xtensa/xtensa_pooling.h new file mode 100644 index 0000000..a2346e3 --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/xtensa_pooling.h @@ -0,0 +1,76 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_XTENSA_XTENSA_POOLING_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_XTENSA_XTENSA_POOLING_H_ + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/pooling.h" +namespace tflite { + +struct XtensaOpDataPooling { + OpDataPooling reference_op_data; + +#if defined(VISION_P6) + uint8_t* p_context; // persistent lib context for this instance saved here + uint32_t context_size; +#endif // defined(VISION_P6) + +#if defined(HIFI5) + int scratch_tensor_index; +#endif // defined(HIFI5) +}; + +#if defined(VISION_P6) + +TfLiteStatus AvgPoolingPrepareVision(TfLiteContext* context, TfLiteNode* node); + +TfLiteStatus MaxPoolingPrepareVision(TfLiteContext* context, TfLiteNode* node); + +TfLiteStatus PoolEvalVision(TfLiteContext* context, TfLiteNode* node, + const TfLitePoolParams& params, + const XtensaOpDataPooling& data, + const TfLiteEvalTensor* input, + TfLiteEvalTensor* output); +#endif + +#if defined(HIFI5) + +TfLiteStatus AveragePrepareHifi(TfLiteContext* context, TfLiteNode* node); +TfLiteStatus AverageEvalQuantizedHifi(TfLiteContext* context, + const TfLiteNode* node, + const TfLitePoolParams* params, + const XtensaOpDataPooling* data, + const TfLiteEvalTensor* input, + TfLiteEvalTensor* output); + +TfLiteStatus MaxPrepareHifi(TfLiteContext* context, TfLiteNode* node); +TfLiteStatus MaxEvalQuantizedHifi(TfLiteContext* context, TfLiteNode* node, + TfLitePoolParams* params, + const XtensaOpDataPooling* data, + const TfLiteEvalTensor* input, + TfLiteEvalTensor* output); + +#endif // defined(HIFI5) + +void* XtensaPoolingInit(TfLiteContext* context, const char* buffer, + size_t length); + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_XTENSA_XTENSA_POOLING_H_ diff --git a/tensorflow/lite/micro/kernels/xtensa/xtensa_reduce.h b/tensorflow/lite/micro/kernels/xtensa/xtensa_reduce.h new file mode 100644 index 0000000..6f5f65a --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/xtensa_reduce.h @@ -0,0 +1,47 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_XTENSA_XTENSA_REDUCE_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_XTENSA_XTENSA_REDUCE_H_ + +#include + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/types.h" +#include "tensorflow/lite/micro/kernels/reduce.h" + +namespace tflite { + +struct XtensaReduceOpData { + OpDataReduce reference_op_data; + +#if defined(VISION_P6) + uint8_t* p_context; // persistent lib context for this instance saved here + uint32_t context_size; +#endif // VISION_P6 +}; + +#if defined(VISION_P6) + +TfLiteStatus ReducePrepareVision(TfLiteContext* context, TfLiteNode* node); + +TfLiteStatus ReduceEvalVision(const XtensaReduceOpData& data, + const TfLiteEvalTensor* input, + TfLiteEvalTensor* output); + +#endif // VISION_P6 + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_XTENSA_XTENSA_REDUCE_H_ diff --git a/tensorflow/lite/micro/kernels/xtensa/xtensa_reshape.h b/tensorflow/lite/micro/kernels/xtensa/xtensa_reshape.h new file mode 100644 index 0000000..cc8ffc7 --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/xtensa_reshape.h @@ -0,0 +1,46 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_XTENSA_XTENSA_RESHAPE_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_XTENSA_XTENSA_RESHAPE_H_ + +#include + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/types.h" +namespace tflite { + +constexpr int kReshapeInputTensor = 0; +constexpr int kReshapeOutputTensor = 0; + +#if defined(VISION_P6) + +struct XtensaReshapeData { + uint8_t* p_context; // persistent lib context for this instance saved here + uint32_t context_size; +}; +#endif // VISION_P6 + +#if defined(VISION_P6) + +TfLiteStatus ReshapePrepareVision(TfLiteContext* context, TfLiteNode* node); + +TfLiteStatus ReshapeEvalVision(const XtensaReshapeData& data, + const TfLiteEvalTensor* input, + TfLiteEvalTensor* output); +#endif // VISION_P6 + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_XTENSA_XTENSA_RESHAPE_H_ diff --git a/tensorflow/lite/micro/kernels/xtensa/xtensa_softmax.h b/tensorflow/lite/micro/kernels/xtensa/xtensa_softmax.h new file mode 100644 index 0000000..7d0d461 --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/xtensa_softmax.h @@ -0,0 +1,58 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_XTENSA_XTENSA_SOFTMAX_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_XTENSA_XTENSA_SOFTMAX_H_ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/types.h" +#include "tensorflow/lite/micro/kernels/softmax.h" + +namespace tflite { + +#if defined(HIFI4) || defined(HIFI5) +struct XtensaSoftmaxOpData { + SoftmaxParams params; + int scratch_tensor_index; +}; +#endif // defined(HIFI4) || defined(HIFI5) + +#if defined(VISION_P6) +struct XtensaSoftmaxOpData { + SoftmaxParams params; + uint8_t* p_context; // persistent lib context for this instance saved here + uint32_t context_size; +}; +#endif // defined(VISION_P6) + +void* XtensaInitSoftmax(TfLiteContext* context, const char* buffer, + size_t length); + +TfLiteStatus XtensaPrepareSoftmax(TfLiteContext* context, TfLiteNode* node); + +TfLiteStatus XtensaEvalSoftmaxInt8Int16(TfLiteContext* context, + TfLiteNode* node); + +#if defined(VISION_P6) +TfLiteStatus SoftmaxPrepareVision(TfLiteContext* context, TfLiteNode* node); +TfLiteStatus SoftmaxEvalVision(TfLiteContext* context, TfLiteNode* node, + const XtensaSoftmaxOpData& data, + const TfLiteEvalTensor* input, + TfLiteEvalTensor* output); +#endif // defined(VISION_P6) + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_XTENSA_XTENSA_SOFTMAX_H_ diff --git a/tensorflow/lite/micro/kernels/xtensa/xtensa_svdf.h b/tensorflow/lite/micro/kernels/xtensa/xtensa_svdf.h new file mode 100644 index 0000000..3c257ce --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/xtensa_svdf.h @@ -0,0 +1,39 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_XTENSA_XTENSA_SVDF_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_XTENSA_XTENSA_SVDF_H_ + +#include + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/types.h" +#include "tensorflow/lite/micro/kernels/svdf.h" + +namespace tflite { +#if defined(HIFIMINI) +TfLiteStatus EvalIntegerSvdfHifimini( + TfLiteContext* context, TfLiteNode* node, + const TfLiteEvalTensor* input_tensor, + const TfLiteEvalTensor* weights_feature_tensor, + const TfLiteEvalTensor* weights_time_tensor, + const TfLiteEvalTensor* bias_tensor, const TfLiteSVDFParams* params, + TfLiteEvalTensor* activation_state_tensor, TfLiteEvalTensor* output_tensor, + OpDataSvdf data); +#endif // HIFIMINI + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_XTENSA_XTENSA_SVDF_H_ diff --git a/tensorflow/lite/micro/kernels/zeros_like.cc b/tensorflow/lite/micro/kernels/zeros_like.cc new file mode 100644 index 0000000..597e50e --- /dev/null +++ b/tensorflow/lite/micro/kernels/zeros_like.cc @@ -0,0 +1,88 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { +namespace { + +constexpr int kInputTensor = 0; +constexpr int kOutputTensor = 0; + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + MicroContext* micro_context = GetMicroContext(context); + + TF_LITE_ENSURE_EQ(context, NumInputs(node), 1); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + output->type = input->type; + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(output); + return kTfLiteOk; +} + +template +void resetZeros(T* out, const int num_elements) { + for (int i = 0; i < num_elements; ++i) { + out[i] = static_cast(0); + } +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + int flat_size = MatchingFlatSize(tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorShape(output)); + switch (input->type) { + case kTfLiteInt64: + resetZeros(tflite::micro::GetTensorData(output), flat_size); + break; + case kTfLiteInt32: + resetZeros(tflite::micro::GetTensorData(output), flat_size); + break; + case kTfLiteInt8: + resetZeros(tflite::micro::GetTensorData(output), flat_size); + break; + case kTfLiteFloat32: + resetZeros(tflite::micro::GetTensorData(output), flat_size); + break; + default: + MicroPrintf( + "ZerosLike only currently supports int64, int32, " + "and float32, got %d.", + input->type); + return kTfLiteError; + } + return kTfLiteOk; +} +} // namespace + +TFLMRegistration Register_ZEROS_LIKE() { + return tflite::micro::RegisterOp(nullptr, Prepare, Eval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/zeros_like_test.cc b/tensorflow/lite/micro/kernels/zeros_like_test.cc new file mode 100644 index 0000000..9a28cf4 --- /dev/null +++ b/tensorflow/lite/micro/kernels/zeros_like_test.cc @@ -0,0 +1,100 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +template +void TestZerosLike(int* input_dims_data, const T* input_data, + const T* expected_output_data, T* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(input_dims_data); + const int output_dims_count = ElementCount(*output_dims); + constexpr int inputs_size = 1; + constexpr int outputs_size = 1; + constexpr int tensors_size = inputs_size + outputs_size; + TfLiteTensor tensors[tensors_size] = { + CreateTensor(input_data, input_dims), + CreateTensor(output_data, output_dims), + }; + + int inputs_array_data[] = {1, 0}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 1}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration registration = Register_ZEROS_LIKE(); + micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, + outputs_array, + /*builtin_data=*/nullptr); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); + + for (int i = 0; i < output_dims_count; ++i) { + TF_LITE_MICRO_EXPECT_EQ(expected_output_data[i], output_data[i]); + } +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(TestZerosLikeFloat) { + float output_data[6]; + int input_dims[] = {2, 2, 3}; + const float input_values[] = {-2.0, -1.0, 0.0, 1.0, 2.0, 3.0}; + const float golden[] = {0.0, 0.0, 0.0, 0.0, 0.0, 0.0}; + tflite::testing::TestZerosLike(input_dims, input_values, golden, + output_data); +} + +TF_LITE_MICRO_TEST(TestZerosLikeInt8) { + int8_t output_data[6]; + int input_dims[] = {3, 1, 2, 3}; + const int8_t input_values[] = {-2, -1, 0, 1, 2, 3}; + const int8_t golden[] = {0, 0, 0, 0, 0, 0}; + tflite::testing::TestZerosLike(input_dims, input_values, golden, + output_data); +} + +TF_LITE_MICRO_TEST(TestZerosLikeInt32) { + int32_t output_data[4]; + int input_dims[] = {4, 1, 2, 2, 1}; + const int32_t input_values[] = {-2, -1, 0, 3}; + const int32_t golden[] = {0, 0, 0, 0}; + tflite::testing::TestZerosLike(input_dims, input_values, golden, + output_data); +} + +TF_LITE_MICRO_TEST(TestZerosLikeInt64) { + int64_t output_data[4]; + int input_dims[] = {4, 1, 2, 2, 1}; + const int64_t input_values[] = {-2, -1, 0, 3}; + const int64_t golden[] = {0, 0, 0, 0}; + tflite::testing::TestZerosLike(input_dims, input_values, golden, + output_data); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/memory_arena_threshold_test.cc b/tensorflow/lite/micro/memory_arena_threshold_test.cc new file mode 100644 index 0000000..f6bb24f --- /dev/null +++ b/tensorflow/lite/micro/memory_arena_threshold_test.cc @@ -0,0 +1,301 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include "tensorflow/lite/micro/kernels/svdf.h" +#include "tensorflow/lite/micro/memory_helpers.h" +#include "tensorflow/lite/micro/memory_planner/greedy_memory_planner.h" +#include "tensorflow/lite/micro/micro_mutable_op_resolver.h" +#include "tensorflow/lite/micro/models/keyword_scrambled_model_data.h" +#include "tensorflow/lite/micro/recording_micro_allocator.h" +#include "tensorflow/lite/micro/recording_micro_interpreter.h" +#include "tensorflow/lite/micro/testing/micro_test.h" +#include "tensorflow/lite/micro/testing/test_conv_model.h" + +/** + * Tests to ensure arena memory allocation does not regress by more than 3%. + */ + +namespace { + +// Ensure memory doesn't expand more that 3%: +constexpr float kAllocationThreshold = 0.03; + +// TODO(b/160617245): Record persistent allocations to provide a more accurate +// number here. +constexpr float kAllocationTailMiscCeiling = 2 * 1024; + +const bool kIs64BitSystem = (sizeof(void*) == 8); + +constexpr int kKeywordModelTensorArenaSize = 22 * 1024; +uint8_t keyword_model_tensor_arena[kKeywordModelTensorArenaSize]; + +constexpr int kKeywordModelTensorCount = 54; +constexpr int kKeywordModelNodeAndRegistrationCount = 15; + +// NOTE: These values are measured on x86-64: +// TODO(b/158651472): Consider auditing these values on non-64 bit systems. +// TODO(b/199414774): use expression for hardcoded constants such as +// kKeywordModelTotalSize. +// +// Run this test with '--copt=-DTF_LITE_STATIC_MEMORY' to get optimized memory +// runtime values: +#ifdef TF_LITE_STATIC_MEMORY +// Total size contributed by the keyword model excluding the +// RecordingMicroAllocator's overhead +// TODO(b/207157610): replace magic number that depends on OPs +constexpr int kKeywordModelOnlyTotalSize = 14304; +// Tail size contributed by the kdyword model excluding the +// RecordingMicroAllocator's overhead +// TODO(b/207157610): replace magic number that depends on OPs +constexpr int kKeywordModelOnlyTailSize = 13632; +constexpr int kKeywordModelPersistentTfLiteTensorDataSize = 128; +constexpr int kKeywordModelPersistentBufferDataSize = 756; +#else +// Total size contributed by the keyword model excluding the +// RecordingMicroAllocator's overhead. +// TODO(b/207157610): replace magic number that depends on OPs +constexpr int kKeywordModelOnlyTotalSize = 14752; +// Tail size contributed by the keyword model excluding the +// RecordingMicroAllocator's overhead +// TODO(b/207157610): replace magic number that depends on OPs +constexpr int kKeywordModelOnlyTailSize = 14080; +constexpr int kKeywordModelPersistentTfLiteTensorDataSize = 224; +constexpr int kKeywordModelPersistentBufferDataSize = 764; +#endif +constexpr int kKeywordModelHeadSize = 672; +constexpr int kKeywordModelTfLiteTensorVariableBufferDataSize = 10240; +constexpr int kKeywordModelPersistentTfLiteTensorQuantizationData = 64; +constexpr int kKeywordModelOpRuntimeDataSize = 148; + +constexpr int kTestConvModelArenaSize = 12 * 1024; +uint8_t test_conv_tensor_arena[kTestConvModelArenaSize]; + +constexpr int kTestConvModelTensorCount = 15; +constexpr int kTestConvModelNodeAndRegistrationCount = 7; + +// NOTE: These values are measured on x86-64: +// TODO(b/158651472): Consider auditing these values on non-64 bit systems. +#ifdef TF_LITE_STATIC_MEMORY +// Total size contributed by the conv model excluding the +// RecordingMicroAllocator's overhead +// TODO(b/207157610): replace magic number that depends on OPs +constexpr int kTestConvModelOnlyTotalSize = 9488; +// Tail size contributed by the conv model excluding the +// RecordingMicroAllocator's overhead +// TODO(b/207157610): replace magic number that depends on OPs +constexpr int kTestConvModelOnlyTailSize = 1744; +constexpr int kTestConvModelPersistentTfLiteTensorDataSize = 128; +constexpr int kTestConvModelPersistentBufferDataSize = 728; +#else +// Total size contributed by the conv model excluding the +// RecordingMicroAllocator's overhead +// TODO(b/207157610): replace magic number that depends on OPs +constexpr int kTestConvModelOnlyTotalSize = 9760; +// Tail size contributed by the conv model excluding the +// RecordingMicroAllocator's overhead +// TODO(b/207157610): replace magic number that depends on OPs +constexpr int kTestConvModelOnlyTailSize = 2016; +constexpr int kTestConvModelPersistentTfLiteTensorDataSize = 224; +constexpr int kTestConvModelPersistentBufferDataSize = 720; +#endif +constexpr int kTestConvModelHeadSize = 7744; +constexpr int kTestConvModelOpRuntimeDataSize = 136; +constexpr int kTestConvModelPersistentTfLiteTensorQuantizationData = 0; + +struct ModelAllocationThresholds { + size_t tensor_count = 0; + size_t node_and_registration_count = 0; + size_t total_alloc_size = 0; + size_t head_alloc_size = 0; + size_t tail_alloc_size = 0; + size_t tensor_variable_buffer_data_size = 0; + size_t persistent_tflite_tensor_data_size = 0; + size_t persistent_tflite_tensor_quantization_data_size = 0; + size_t op_runtime_data_size = 0; + size_t persistent_buffer_data = 0; +}; + +void EnsureAllocatedSizeThreshold(const char* allocation_type, size_t actual, + size_t expected) { + // TODO(b/158651472): Better auditing of non-64 bit systems: + if (kIs64BitSystem) { + // 64-bit systems should check floor and ceiling to catch memory savings: + TF_LITE_MICRO_EXPECT_NEAR(actual, expected, + expected * kAllocationThreshold); + if (actual != expected) { + MicroPrintf("%s threshold failed: %d != %d", allocation_type, actual, + expected); + } + } else { + // Non-64 bit systems should just expect allocation does not exceed the + // ceiling: + TF_LITE_MICRO_EXPECT_LE(actual, expected + expected * kAllocationThreshold); + } +} + +void ValidateModelAllocationThresholds( + const tflite::RecordingMicroAllocator& allocator, + const ModelAllocationThresholds& thresholds) { + MicroPrintf("Overhead from RecordingMicroAllocator is %d", + tflite::RecordingMicroAllocator::GetDefaultTailUsage()); + allocator.PrintAllocations(); + + EnsureAllocatedSizeThreshold( + "Total", allocator.GetSimpleMemoryAllocator()->GetUsedBytes(), + thresholds.total_alloc_size); + EnsureAllocatedSizeThreshold( + "Head", allocator.GetSimpleMemoryAllocator()->GetNonPersistentUsedBytes(), + thresholds.head_alloc_size); + EnsureAllocatedSizeThreshold( + "Tail", allocator.GetSimpleMemoryAllocator()->GetPersistentUsedBytes(), + thresholds.tail_alloc_size); + EnsureAllocatedSizeThreshold( + "TfLiteEvalTensor", + allocator + .GetRecordedAllocation( + tflite::RecordedAllocationType::kTfLiteEvalTensorData) + .used_bytes, + sizeof(TfLiteEvalTensor) * thresholds.tensor_count); + EnsureAllocatedSizeThreshold( + "VariableBufferData", + allocator + .GetRecordedAllocation( + tflite::RecordedAllocationType::kTfLiteTensorVariableBufferData) + .used_bytes, + thresholds.tensor_variable_buffer_data_size); + EnsureAllocatedSizeThreshold( + "PersistentTfLiteTensor", + allocator + .GetRecordedAllocation( + tflite::RecordedAllocationType::kPersistentTfLiteTensorData) + .used_bytes, + thresholds.persistent_tflite_tensor_data_size); + EnsureAllocatedSizeThreshold( + "PersistentTfliteTensorQuantizationData", + allocator + .GetRecordedAllocation(tflite::RecordedAllocationType:: + kPersistentTfLiteTensorQuantizationData) + .used_bytes, + thresholds.persistent_tflite_tensor_quantization_data_size); + EnsureAllocatedSizeThreshold( + "PersistentBufferData", + allocator + .GetRecordedAllocation( + tflite::RecordedAllocationType::kPersistentBufferData) + .used_bytes, + thresholds.persistent_buffer_data); + EnsureAllocatedSizeThreshold( + "NodeAndRegistration", + allocator + .GetRecordedAllocation( + tflite::RecordedAllocationType::kNodeAndRegistrationArray) + .used_bytes, + sizeof(tflite::NodeAndRegistration) * + thresholds.node_and_registration_count); + + // Ensure tail allocation recording is not missing any large chunks: + size_t tail_est_length = sizeof(TfLiteEvalTensor) * thresholds.tensor_count + + thresholds.tensor_variable_buffer_data_size + + sizeof(tflite::NodeAndRegistration) * + thresholds.node_and_registration_count + + thresholds.op_runtime_data_size; + TF_LITE_MICRO_EXPECT_LE(thresholds.tail_alloc_size - tail_est_length, + kAllocationTailMiscCeiling); +} + +} // namespace + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(TestKeywordModelMemoryThreshold) { + tflite::MicroMutableOpResolver<4> op_resolver; + TF_LITE_MICRO_EXPECT_EQ( + op_resolver.AddFullyConnected(tflite::Register_FULLY_CONNECTED_INT8()), + kTfLiteOk); + TF_LITE_MICRO_EXPECT_EQ(op_resolver.AddQuantize(), kTfLiteOk); + TF_LITE_MICRO_EXPECT_EQ( + op_resolver.AddSoftmax(tflite::Register_SOFTMAX_INT8_INT16()), kTfLiteOk); + TF_LITE_MICRO_EXPECT_EQ(op_resolver.AddSvdf(tflite::Register_SVDF_INT8()), + kTfLiteOk); + tflite::RecordingMicroInterpreter interpreter( + tflite::GetModel(g_keyword_scrambled_model_data), op_resolver, + keyword_model_tensor_arena, kKeywordModelTensorArenaSize); + + interpreter.AllocateTensors(); + + ModelAllocationThresholds thresholds; + thresholds.tensor_count = kKeywordModelTensorCount; + thresholds.node_and_registration_count = + kKeywordModelNodeAndRegistrationCount; + thresholds.total_alloc_size = + kKeywordModelOnlyTotalSize + + tflite::RecordingMicroAllocator::GetDefaultTailUsage(); + thresholds.head_alloc_size = kKeywordModelHeadSize; + thresholds.tail_alloc_size = + kKeywordModelOnlyTailSize + + tflite::RecordingMicroAllocator::GetDefaultTailUsage(); + thresholds.tensor_variable_buffer_data_size = + kKeywordModelTfLiteTensorVariableBufferDataSize; + thresholds.op_runtime_data_size = kKeywordModelOpRuntimeDataSize; + thresholds.persistent_buffer_data = kKeywordModelPersistentBufferDataSize; + thresholds.persistent_tflite_tensor_data_size = + kKeywordModelPersistentTfLiteTensorDataSize; + thresholds.persistent_tflite_tensor_quantization_data_size = + kKeywordModelPersistentTfLiteTensorQuantizationData; + + ValidateModelAllocationThresholds(interpreter.GetMicroAllocator(), + thresholds); +} + +TF_LITE_MICRO_TEST(TestConvModelMemoryThreshold) { + tflite::MicroMutableOpResolver<6> op_resolver; + TF_LITE_MICRO_EXPECT_EQ(op_resolver.AddConv2D(), kTfLiteOk); + TF_LITE_MICRO_EXPECT_EQ(op_resolver.AddQuantize(), kTfLiteOk); + TF_LITE_MICRO_EXPECT_EQ(op_resolver.AddMaxPool2D(), kTfLiteOk); + TF_LITE_MICRO_EXPECT_EQ(op_resolver.AddReshape(), kTfLiteOk); + TF_LITE_MICRO_EXPECT_EQ(op_resolver.AddFullyConnected(), kTfLiteOk); + TF_LITE_MICRO_EXPECT_EQ(op_resolver.AddDequantize(), kTfLiteOk); + + tflite::RecordingMicroInterpreter interpreter( + tflite::GetModel(kTestConvModelData), op_resolver, test_conv_tensor_arena, + kTestConvModelArenaSize); + + interpreter.AllocateTensors(); + + ModelAllocationThresholds thresholds; + thresholds.tensor_count = kTestConvModelTensorCount; + thresholds.node_and_registration_count = + kTestConvModelNodeAndRegistrationCount; + thresholds.total_alloc_size = + kTestConvModelOnlyTotalSize + + tflite::RecordingMicroAllocator::GetDefaultTailUsage(); + thresholds.head_alloc_size = kTestConvModelHeadSize; + thresholds.tail_alloc_size = + kTestConvModelOnlyTailSize + + tflite::RecordingMicroAllocator::GetDefaultTailUsage(); + thresholds.op_runtime_data_size = kTestConvModelOpRuntimeDataSize; + thresholds.persistent_buffer_data = kTestConvModelPersistentBufferDataSize; + thresholds.persistent_tflite_tensor_data_size = + kTestConvModelPersistentTfLiteTensorDataSize; + thresholds.persistent_tflite_tensor_quantization_data_size = + kTestConvModelPersistentTfLiteTensorQuantizationData; + + ValidateModelAllocationThresholds(interpreter.GetMicroAllocator(), + thresholds); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/memory_helpers.cc b/tensorflow/lite/micro/memory_helpers.cc new file mode 100644 index 0000000..685f04b --- /dev/null +++ b/tensorflow/lite/micro/memory_helpers.cc @@ -0,0 +1,171 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/memory_helpers.h" + +#include +#include + +#include "flatbuffers/flatbuffers.h" // from @flatbuffers +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/micro/tflite_bridge/flatbuffer_conversions_bridge.h" +#include "tensorflow/lite/schema/schema_generated.h" + +namespace tflite { + +uint8_t* AlignPointerUp(uint8_t* data, size_t alignment) { + std::uintptr_t data_as_uintptr_t = reinterpret_cast(data); + uint8_t* aligned_result = reinterpret_cast( + ((data_as_uintptr_t + (alignment - 1)) / alignment) * alignment); + return aligned_result; +} + +uint8_t* AlignPointerDown(uint8_t* data, size_t alignment) { + std::uintptr_t data_as_uintptr_t = reinterpret_cast(data); + uint8_t* aligned_result = + reinterpret_cast((data_as_uintptr_t / alignment) * alignment); + return aligned_result; +} + +size_t AlignSizeUp(size_t size, size_t alignment) { + size_t aligned_size = (((size + (alignment - 1)) / alignment) * alignment); + return aligned_size; +} + +TfLiteStatus TfLiteTypeSizeOf(TfLiteType type, size_t* size) { + switch (type) { + case kTfLiteFloat16: + *size = sizeof(int16_t); + break; + case kTfLiteFloat32: + *size = sizeof(float); + break; + case kTfLiteFloat64: + *size = sizeof(double); + break; + case kTfLiteInt16: + *size = sizeof(int16_t); + break; + case kTfLiteInt32: + *size = sizeof(int32_t); + break; + case kTfLiteUInt32: + *size = sizeof(uint32_t); + break; + case kTfLiteUInt8: + *size = sizeof(uint8_t); + break; + case kTfLiteInt8: + *size = sizeof(int8_t); + break; + case kTfLiteInt64: + *size = sizeof(int64_t); + break; + case kTfLiteUInt64: + *size = sizeof(uint64_t); + break; + case kTfLiteBool: + *size = sizeof(bool); + break; + case kTfLiteResource: + *size = sizeof(int32_t); + break; + case kTfLiteComplex64: + *size = sizeof(float) * 2; + break; + case kTfLiteComplex128: + *size = sizeof(double) * 2; + break; + case kTfLiteInt4: + *size = sizeof(int8_t); + break; + default: + return kTfLiteError; + } + return kTfLiteOk; +} + +TfLiteStatus BytesRequiredForTensor(const tflite::Tensor& flatbuffer_tensor, + size_t* bytes, size_t* type_size) { + int element_count = 1; + // If flatbuffer_tensor.shape == nullptr, then flatbuffer_tensor is a scalar + // so has 1 element. + if (flatbuffer_tensor.shape() != nullptr) { + for (size_t n = 0; n < flatbuffer_tensor.shape()->size(); ++n) { + element_count *= flatbuffer_tensor.shape()->Get(n); + } + } + + TfLiteType tf_lite_type; + TF_LITE_ENSURE_STATUS( + ConvertTensorType(flatbuffer_tensor.type(), &tf_lite_type)); + TF_LITE_ENSURE_STATUS(TfLiteTypeSizeOf(tf_lite_type, type_size)); + *bytes = element_count * (*type_size); + return kTfLiteOk; +} + +TfLiteStatus TfLiteEvalTensorByteLength(const TfLiteEvalTensor* eval_tensor, + size_t* out_bytes) { + TFLITE_DCHECK(out_bytes != nullptr); + + int element_count = 1; + // If eval_tensor->dims == nullptr, then tensor is a scalar so has 1 element. + if (eval_tensor->dims != nullptr) { + for (int n = 0; n < eval_tensor->dims->size; ++n) { + element_count *= eval_tensor->dims->data[n]; + } + } + size_t type_size; + TF_LITE_ENSURE_STATUS(TfLiteTypeSizeOf(eval_tensor->type, &type_size)); + *out_bytes = element_count * type_size; + return kTfLiteOk; +} + +TfLiteStatus AllocateOutputDimensionsFromInput(TfLiteContext* context, + const TfLiteTensor* input1, + const TfLiteTensor* input2, + TfLiteTensor* output) { + const TfLiteTensor* input = nullptr; + + TF_LITE_ENSURE(context, input1->dims != nullptr); + TF_LITE_ENSURE(context, input2->dims != nullptr); + TF_LITE_ENSURE(context, output->dims->size == 0); + + input = input1->dims->size > input2->dims->size ? input1 : input2; + TF_LITE_ENSURE(context, output->type == input->type); + + size_t size = 0; + TfLiteTypeSizeOf(input->type, &size); + const int dimensions_count = tflite::GetTensorShape(input).DimensionsCount(); + for (int i = 0; i < dimensions_count; i++) { + size *= input->dims->data[i]; + } + + output->bytes = size; + + output->dims = + reinterpret_cast(context->AllocatePersistentBuffer( + context, TfLiteIntArrayGetSizeInBytes(size))); + + output->dims->size = input->dims->size; + for (int i = 0; i < dimensions_count; i++) { + output->dims->data[i] = input->dims->data[i]; + } + + return kTfLiteOk; +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/memory_helpers.h b/tensorflow/lite/micro/memory_helpers.h new file mode 100644 index 0000000..f3392e4 --- /dev/null +++ b/tensorflow/lite/micro/memory_helpers.h @@ -0,0 +1,64 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_MICRO_MEMORY_HELPERS_H_ +#define TENSORFLOW_LITE_MICRO_MEMORY_HELPERS_H_ + +#include +#include + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/schema/schema_generated.h" + +namespace tflite { + +// Returns the next pointer address aligned to the given alignment. +uint8_t* AlignPointerUp(uint8_t* data, size_t alignment); + +// Returns the previous pointer address aligned to the given alignment. +uint8_t* AlignPointerDown(uint8_t* data, size_t alignment); + +// Returns an increased size that's a multiple of alignment. +size_t AlignSizeUp(size_t size, size_t alignment); + +// Templated version of AlignSizeUp +// Returns an increased size that's a multiple of alignment. +template +size_t AlignSizeUp(size_t count = 1) { + return AlignSizeUp(sizeof(T) * count, alignof(T)); +} + +// Returns size in bytes for a given TfLiteType. +TfLiteStatus TfLiteTypeSizeOf(TfLiteType type, size_t* size); + +// How many bytes are needed to hold a tensor's contents. +TfLiteStatus BytesRequiredForTensor(const tflite::Tensor& flatbuffer_tensor, + size_t* bytes, size_t* type_size); + +// How many bytes are used in a TfLiteEvalTensor instance. The byte length is +// returned in out_bytes. +TfLiteStatus TfLiteEvalTensorByteLength(const TfLiteEvalTensor* eval_tensor, + size_t* out_bytes); + +// Deduce output dimensions from input and allocate given size. +// Useful for operators with two inputs where the largest input should equal the +// output dimension. +TfLiteStatus AllocateOutputDimensionsFromInput(TfLiteContext* context, + const TfLiteTensor* input1, + const TfLiteTensor* input2, + TfLiteTensor* output); + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_MEMORY_HELPERS_H_ diff --git a/tensorflow/lite/micro/memory_helpers_test.cc b/tensorflow/lite/micro/memory_helpers_test.cc new file mode 100644 index 0000000..e44c586 --- /dev/null +++ b/tensorflow/lite/micro/memory_helpers_test.cc @@ -0,0 +1,243 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/memory_helpers.h" + +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace { + +// This just needs to be big enough to handle the array of 5 ints allocated +// in TestAllocateOutputDimensionsFromInput below. +const int kGlobalPersistentBufferLength = 100; +char global_persistent_buffer[kGlobalPersistentBufferLength]; + +// Only need to handle a single allocation at a time for output dimensions +// in TestAllocateOutputDimensionsFromInput. +void* FakeAllocatePersistentBuffer(TfLiteContext* context, size_t bytes) { + return reinterpret_cast(global_persistent_buffer); +} + +} // namespace + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(TestAlignPointerUp) { + uint8_t* input0 = reinterpret_cast(0); + + uint8_t* input0_aligned1 = tflite::AlignPointerUp(input0, 1); + TF_LITE_MICRO_EXPECT(input0 == input0_aligned1); + + uint8_t* input0_aligned2 = tflite::AlignPointerUp(input0, 2); + TF_LITE_MICRO_EXPECT(input0 == input0_aligned2); + + uint8_t* input0_aligned3 = tflite::AlignPointerUp(input0, 3); + TF_LITE_MICRO_EXPECT(input0 == input0_aligned3); + + uint8_t* input0_aligned16 = tflite::AlignPointerUp(input0, 16); + TF_LITE_MICRO_EXPECT(input0 == input0_aligned16); + + uint8_t* input23 = reinterpret_cast(23); + + uint8_t* input23_aligned1 = tflite::AlignPointerUp(input23, 1); + TF_LITE_MICRO_EXPECT(input23 == input23_aligned1); + + uint8_t* input23_aligned2 = tflite::AlignPointerUp(input23, 2); + uint8_t* expected23_aligned2 = reinterpret_cast(24); + TF_LITE_MICRO_EXPECT(expected23_aligned2 == input23_aligned2); + + uint8_t* input23_aligned3 = tflite::AlignPointerUp(input23, 3); + uint8_t* expected23_aligned3 = reinterpret_cast(24); + TF_LITE_MICRO_EXPECT(expected23_aligned3 == input23_aligned3); + + uint8_t* input23_aligned16 = tflite::AlignPointerUp(input23, 16); + uint8_t* expected23_aligned16 = reinterpret_cast(32); + TF_LITE_MICRO_EXPECT(expected23_aligned16 == input23_aligned16); +} + +TF_LITE_MICRO_TEST(TestAlignPointerDown) { + uint8_t* input0 = reinterpret_cast(0); + + uint8_t* input0_aligned1 = tflite::AlignPointerDown(input0, 1); + TF_LITE_MICRO_EXPECT(input0 == input0_aligned1); + + uint8_t* input0_aligned2 = tflite::AlignPointerDown(input0, 2); + TF_LITE_MICRO_EXPECT(input0 == input0_aligned2); + + uint8_t* input0_aligned3 = tflite::AlignPointerDown(input0, 3); + TF_LITE_MICRO_EXPECT(input0 == input0_aligned3); + + uint8_t* input0_aligned16 = tflite::AlignPointerDown(input0, 16); + TF_LITE_MICRO_EXPECT(input0 == input0_aligned16); + + uint8_t* input23 = reinterpret_cast(23); + + uint8_t* input23_aligned1 = tflite::AlignPointerDown(input23, 1); + TF_LITE_MICRO_EXPECT(input23 == input23_aligned1); + + uint8_t* input23_aligned2 = tflite::AlignPointerDown(input23, 2); + uint8_t* expected23_aligned2 = reinterpret_cast(22); + TF_LITE_MICRO_EXPECT(expected23_aligned2 == input23_aligned2); + + uint8_t* input23_aligned3 = tflite::AlignPointerDown(input23, 3); + uint8_t* expected23_aligned3 = reinterpret_cast(21); + TF_LITE_MICRO_EXPECT(expected23_aligned3 == input23_aligned3); + + uint8_t* input23_aligned16 = tflite::AlignPointerDown(input23, 16); + uint8_t* expected23_aligned16 = reinterpret_cast(16); + TF_LITE_MICRO_EXPECT(expected23_aligned16 == input23_aligned16); +} + +TF_LITE_MICRO_TEST(TestAlignSizeUp) { + TF_LITE_MICRO_EXPECT_EQ(static_cast(1), tflite::AlignSizeUp(1, 1)); + TF_LITE_MICRO_EXPECT_EQ(static_cast(2), tflite::AlignSizeUp(1, 2)); + TF_LITE_MICRO_EXPECT_EQ(static_cast(3), tflite::AlignSizeUp(1, 3)); + TF_LITE_MICRO_EXPECT_EQ(static_cast(16), tflite::AlignSizeUp(1, 16)); + + TF_LITE_MICRO_EXPECT_EQ(static_cast(23), tflite::AlignSizeUp(23, 1)); + TF_LITE_MICRO_EXPECT_EQ(static_cast(24), tflite::AlignSizeUp(23, 2)); + TF_LITE_MICRO_EXPECT_EQ(static_cast(24), tflite::AlignSizeUp(23, 3)); + TF_LITE_MICRO_EXPECT_EQ(static_cast(32), tflite::AlignSizeUp(23, 16)); +} + +TF_LITE_MICRO_TEST(TestTemplatedAlignSizeUp) { + // Test structure to test AlignSizeUp. + struct alignas(32) TestAlignSizeUp { + // Opaque blob + float blob_data[4]; + }; + + TF_LITE_MICRO_EXPECT_EQ(static_cast(32), + tflite::AlignSizeUp()); + TF_LITE_MICRO_EXPECT_EQ(static_cast(64), + tflite::AlignSizeUp(2)); +} + +TF_LITE_MICRO_TEST(TestTypeSizeOf) { + size_t size; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + tflite::TfLiteTypeSizeOf(kTfLiteFloat16, &size)); + TF_LITE_MICRO_EXPECT_EQ(sizeof(int16_t), size); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + tflite::TfLiteTypeSizeOf(kTfLiteFloat32, &size)); + TF_LITE_MICRO_EXPECT_EQ(sizeof(float), size); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + tflite::TfLiteTypeSizeOf(kTfLiteFloat64, &size)); + TF_LITE_MICRO_EXPECT_EQ(sizeof(double), size); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + tflite::TfLiteTypeSizeOf(kTfLiteInt16, &size)); + TF_LITE_MICRO_EXPECT_EQ(sizeof(int16_t), size); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + tflite::TfLiteTypeSizeOf(kTfLiteInt32, &size)); + TF_LITE_MICRO_EXPECT_EQ(sizeof(int32_t), size); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + tflite::TfLiteTypeSizeOf(kTfLiteUInt32, &size)); + TF_LITE_MICRO_EXPECT_EQ(sizeof(uint32_t), size); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + tflite::TfLiteTypeSizeOf(kTfLiteUInt8, &size)); + TF_LITE_MICRO_EXPECT_EQ(sizeof(uint8_t), size); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + tflite::TfLiteTypeSizeOf(kTfLiteInt8, &size)); + TF_LITE_MICRO_EXPECT_EQ(sizeof(int8_t), size); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + tflite::TfLiteTypeSizeOf(kTfLiteInt64, &size)); + TF_LITE_MICRO_EXPECT_EQ(sizeof(int64_t), size); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + tflite::TfLiteTypeSizeOf(kTfLiteUInt64, &size)); + TF_LITE_MICRO_EXPECT_EQ(sizeof(uint64_t), size); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + tflite::TfLiteTypeSizeOf(kTfLiteBool, &size)); + TF_LITE_MICRO_EXPECT_EQ(sizeof(bool), size); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + tflite::TfLiteTypeSizeOf(kTfLiteComplex64, &size)); + TF_LITE_MICRO_EXPECT_EQ(sizeof(float) * 2, size); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + tflite::TfLiteTypeSizeOf(kTfLiteComplex128, &size)); + TF_LITE_MICRO_EXPECT_EQ(sizeof(double) * 2, size); + + TF_LITE_MICRO_EXPECT_NE( + kTfLiteOk, tflite::TfLiteTypeSizeOf(static_cast(-1), &size)); +} + +TF_LITE_MICRO_TEST(TestBytesRequiredForTensor) { + const tflite::Tensor* tensor100 = + tflite::testing::Create1dFlatbufferTensor(100); + size_t bytes; + size_t type_size; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, tflite::BytesRequiredForTensor( + *tensor100, &bytes, &type_size)); + TF_LITE_MICRO_EXPECT_EQ(static_cast(400), bytes); + TF_LITE_MICRO_EXPECT_EQ(static_cast(4), type_size); + + const tflite::Tensor* tensor200 = + tflite::testing::Create1dFlatbufferTensor(200); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, tflite::BytesRequiredForTensor( + *tensor200, &bytes, &type_size)); + TF_LITE_MICRO_EXPECT_EQ(static_cast(800), bytes); + TF_LITE_MICRO_EXPECT_EQ(static_cast(4), type_size); +} + +TF_LITE_MICRO_TEST(TestAllocateOutputDimensionsFromInput) { + constexpr int kDimsLen = 4; + int input1_dims[] = {1, 1}; + int input2_dims[] = {kDimsLen, 5, 5, 5, 5}; + int output_dims[] = {0, 0, 0, 0, 0}; + TfLiteTensor input_tensor1 = tflite::testing::CreateTensor( + nullptr, tflite::testing::IntArrayFromInts(input1_dims)); + TfLiteTensor input_tensor2 = tflite::testing::CreateTensor( + nullptr, tflite::testing::IntArrayFromInts(input2_dims)); + TfLiteTensor output_tensor = tflite::testing::CreateTensor( + nullptr, tflite::testing::IntArrayFromInts(output_dims)); + TfLiteContext context; + // Only need to allocate space for output_tensor.dims. Use a simple + // fake allocator. + context.AllocatePersistentBuffer = FakeAllocatePersistentBuffer; + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, tflite::AllocateOutputDimensionsFromInput( + &context, &input_tensor1, &input_tensor2, &output_tensor)); + + TF_LITE_MICRO_EXPECT_EQ(output_tensor.bytes, input_tensor2.bytes); + for (int i = 0; i < kDimsLen; i++) { + TF_LITE_MICRO_EXPECT_EQ(input_tensor2.dims->data[i], + output_tensor.dims->data[i]); + // Reset output dims for next iteration. + output_tensor.dims->data[i] = 0; + } + // Output tensor size must be 0 to allocate output dimensions from input. + output_tensor.dims->size = 0; + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, tflite::AllocateOutputDimensionsFromInput( + &context, &input_tensor2, &input_tensor1, &output_tensor)); + for (int i = 0; i < kDimsLen; i++) { + TF_LITE_MICRO_EXPECT_EQ(input_tensor2.dims->data[i], + output_tensor.dims->data[i]); + } + TF_LITE_MICRO_EXPECT_EQ(output_tensor.bytes, input_tensor2.bytes); +} +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/memory_planner/BUILD b/tensorflow/lite/micro/memory_planner/BUILD new file mode 100644 index 0000000..0329e73 --- /dev/null +++ b/tensorflow/lite/micro/memory_planner/BUILD @@ -0,0 +1,120 @@ +load( + "//tensorflow/lite/micro:build_def.bzl", + "micro_copts", +) + +package( + default_visibility = ["//visibility:public"], + # Disabling layering_check because of http://b/177257332 + features = ["-layering_check"], + licenses = ["notice"], +) + +cc_library( + name = "micro_memory_planner", + hdrs = [ + "micro_memory_planner.h", + ], + copts = micro_copts(), + deps = [ + "//tensorflow/lite/c:common", + "//tensorflow/lite/core/api", + ], +) + +cc_library( + name = "linear_memory_planner", + srcs = [ + "linear_memory_planner.cc", + ], + hdrs = [ + "linear_memory_planner.h", + ], + copts = micro_copts(), + deps = [ + ":micro_memory_planner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:micro_compatibility", + "//tensorflow/lite/micro:micro_log", + ], +) + +cc_library( + name = "greedy_memory_planner", + srcs = [ + "greedy_memory_planner.cc", + ], + hdrs = [ + "greedy_memory_planner.h", + ], + copts = micro_copts(), + deps = [ + ":micro_memory_planner", + "//tensorflow/lite/micro:micro_compatibility", + "//tensorflow/lite/micro:micro_log", + "//tensorflow/lite/micro:micro_string", + ], +) + +cc_test( + name = "linear_memory_planner_test", + srcs = [ + "linear_memory_planner_test.cc", + ], + deps = [ + ":linear_memory_planner", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "greedy_memory_planner_test", + srcs = [ + "greedy_memory_planner_test.cc", + ], + deps = [ + ":greedy_memory_planner", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_library( + name = "memory_plan_struct", + hdrs = [ + "memory_plan_struct.h", + ], + copts = micro_copts(), + deps = [ + "//tensorflow/lite/c:common", + "//tensorflow/lite/core/api", + "//tensorflow/lite/micro:micro_compatibility", + "//tensorflow/lite/micro:micro_utils", + ], +) + +cc_library( + name = "non_persistent_buffer_planner_shim", + srcs = ["non_persistent_buffer_planner_shim.cc"], + hdrs = [ + "non_persistent_buffer_planner_shim.h", + ], + copts = micro_copts(), + deps = [ + ":memory_plan_struct", + ":micro_memory_planner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/core/api", + "//tensorflow/lite/micro:micro_compatibility", + ], +) + +cc_test( + name = "non_persistent_buffer_planner_shim_test", + srcs = [ + "non_persistent_buffer_planner_shim_test.cc", + ], + deps = [ + ":non_persistent_buffer_planner_shim", + "//tensorflow/lite/micro/testing:micro_test", + ], +) diff --git a/tensorflow/lite/micro/memory_planner/greedy_memory_planner.cc b/tensorflow/lite/micro/memory_planner/greedy_memory_planner.cc new file mode 100644 index 0000000..471a5b2 --- /dev/null +++ b/tensorflow/lite/micro/memory_planner/greedy_memory_planner.cc @@ -0,0 +1,448 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/memory_planner/greedy_memory_planner.h" + +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_string.h" + +namespace tflite { + +namespace { + +// Returns a character representing a numbered buffer +// for GreedyMemoryPlanner::PrintMemoryPlan() +char GetOrdinalCharacter(int i) { + if (i < 10) { + return '0' + i; + } else if (i < 36) { + return 'a' + (i - 10); + } else if (i < 62) { + return 'A' + (i - 36); + } + return '*'; +} + +} // namespace + +// Simple stable in-place sort function. Not time-efficient for large arrays. +// Would normally be in an anonymous namespace to keep it private, but we want +// to be able to test it externally. +void ReverseSortInPlace(int* values, int* ids, int size) { + bool any_swapped; + do { + any_swapped = false; + for (int i = 1; i < size; ++i) { + if (values[i - 1] < values[i]) { + const int value_temp = values[i - 1]; + values[i - 1] = values[i]; + values[i] = value_temp; + const int id_temp = ids[i - 1]; + ids[i - 1] = ids[i]; + ids[i] = id_temp; + any_swapped = true; + } + } + } while (any_swapped); +} + +GreedyMemoryPlanner::GreedyMemoryPlanner() {} + +TfLiteStatus GreedyMemoryPlanner::Init(unsigned char* scratch_buffer, + int scratch_buffer_size) { + // Reset internal states + buffer_count_ = 0; + need_to_calculate_offsets_ = true; + + // Allocate the arrays we need within the scratch buffer arena. + max_buffer_count_ = scratch_buffer_size / per_buffer_size(); + + unsigned char* next_free = scratch_buffer; + requirements_ = reinterpret_cast(next_free); + next_free += sizeof(BufferRequirements) * max_buffer_count_; + + buffer_sizes_sorted_ = reinterpret_cast(next_free); + next_free += sizeof(int) * max_buffer_count_; + + buffer_ids_sorted_ = reinterpret_cast(next_free); + next_free += sizeof(int) * max_buffer_count_; + + buffers_sorted_by_offset_ = reinterpret_cast(next_free); + next_free += sizeof(ListEntry) * max_buffer_count_; + + buffer_offsets_ = reinterpret_cast(next_free); + return kTfLiteOk; +} + +GreedyMemoryPlanner::~GreedyMemoryPlanner() { + // We don't own the scratch buffer, so don't deallocate anything. +} + +TfLiteStatus GreedyMemoryPlanner::AddBuffer(int size, int first_time_used, + int last_time_used) { + if (buffer_count_ >= max_buffer_count_) { + MicroPrintf("Too many buffers (max is %d)", max_buffer_count_); + return kTfLiteError; + } + BufferRequirements* current = &requirements_[buffer_count_]; + current->size = size; + current->first_time_used = first_time_used; + current->last_time_used = last_time_used; + current->offline_offset = kOnlinePlannedBuffer; + ++buffer_count_; + need_to_calculate_offsets_ = true; + return kTfLiteOk; +} + +TfLiteStatus GreedyMemoryPlanner::AddBuffer(int size, int first_time_used, + int last_time_used, + int offline_offset) { + BufferRequirements* current = &requirements_[buffer_count_]; + if (AddBuffer(size, first_time_used, last_time_used) != kTfLiteOk) { + return kTfLiteError; + } + current->offline_offset = offline_offset; + return kTfLiteOk; +} + +bool GreedyMemoryPlanner::DoesEntryOverlapInTime( + const GreedyMemoryPlanner::ListEntry* entry, const int first_time_used, + const int last_time_used) const { + const BufferRequirements* entry_requirements = + &requirements_[entry->requirements_index]; + if (entry_requirements->first_time_used > last_time_used) { + return false; + } + if (first_time_used > entry_requirements->last_time_used) { + return false; + } + return true; +} + +GreedyMemoryPlanner::ListEntry* +GreedyMemoryPlanner::NextSimultaneouslyActiveBuffer( + const GreedyMemoryPlanner::ListEntry* start, const int first_time_used, + const int last_time_used) { + ListEntry* result = nullptr; + ListEntry* candidate_next_entry; + if (start == nullptr) { + candidate_next_entry = &buffers_sorted_by_offset_[first_entry_index_]; + } else { + if (start->next_entry_index == -1) { + return nullptr; + } + candidate_next_entry = &buffers_sorted_by_offset_[start->next_entry_index]; + } + do { + if (DoesEntryOverlapInTime(candidate_next_entry, first_time_used, + last_time_used)) { + result = candidate_next_entry; + break; + } + if (candidate_next_entry->next_entry_index == -1) { + break; + } + candidate_next_entry = + &buffers_sorted_by_offset_[candidate_next_entry->next_entry_index]; + } while (true); + return result; +} + +void GreedyMemoryPlanner::CalculateOffsetsIfNeeded() { + if (!need_to_calculate_offsets_ || (buffer_count_ == 0)) { + return; + } + need_to_calculate_offsets_ = false; + + // Start off by ordering the buffers in descending order of size. + // This helps find a more compact layout. Intuitively, you can think + // about putting the large buffers in place first, and then the + // smaller buffers can fit in the gaps, rather than fragmenting the + // gaps with small buffers at the beginning. Add offline planned offsets + // first in the list, since they have a predetermined offset. + int idx_from_tail = buffer_count_; + int idx_from_head = 0; + for (int i = 0; i < buffer_count_; ++i) { + if (requirements_[i].offline_offset == kOnlinePlannedBuffer) { + idx_from_tail--; + buffer_sizes_sorted_[idx_from_tail] = requirements_[i].size; + buffer_ids_sorted_[idx_from_tail] = i; + buffer_offsets_[i] = -1; + } else { + buffer_sizes_sorted_[idx_from_head] = requirements_[i].size; + buffer_ids_sorted_[idx_from_head] = i; + buffer_offsets_[i] = requirements_[i].offline_offset; + idx_from_head++; + } + } + + // This sorting algorithm is naive, and may end up taking a very long time + // with hundreds of buffers. Do not sort the offline planned offsets. + ReverseSortInPlace(&buffer_sizes_sorted_[idx_from_head], + &buffer_ids_sorted_[idx_from_head], + buffer_count_ - idx_from_head); + + // Initialize the first entry to the first buffer in + // buffer_ids_sorted_. + // - If there are no offline planned offsets, the largest buffer will be + // first, and the buffers will be handled in size order. + // - If offline offsets are present, these will be handled first in order + // for the greedy algorithm to utilized gaps in the offline plan. + first_entry_index_ = 0; + next_free_entry_ = 1; + ListEntry* first_entry = &buffers_sorted_by_offset_[first_entry_index_]; + first_entry->next_entry_index = -1; // to mark the entry as end of list + int buffer_id = buffer_ids_sorted_[0]; + first_entry->requirements_index = buffer_id; + if (requirements_[buffer_id].offline_offset == kOnlinePlannedBuffer) { + buffer_offsets_[buffer_id] = 0; + } + first_entry->offset = buffer_offsets_[buffer_id]; + + // Work through the rest of the buffers to find a good gap to place each one. + for (int i = 1; i < buffer_count_; ++i) { + // The id is the order the buffer was originally added by the client. + buffer_id = buffer_ids_sorted_[i]; + // Look at what size and time range the buffer needs to be active. + BufferRequirements* wanted_requirements = &requirements_[buffer_id]; + const int wanted_size = wanted_requirements->size; + const int wanted_first_time_used = wanted_requirements->first_time_used; + const int wanted_last_time_used = wanted_requirements->last_time_used; + + // Find the first buffer that's active in our time range. All placed + // buffers are stored in the order of their starting position in the arena + // so that it's easy to find the next buffer in memory, and so the gap. + // The candidate_entry variable holds the buffer that we're considering + // placing the current buffer after. + + int candidate_offset = 0; + // Loop through the offset-ordered list of buffers, looking for gaps. + if (wanted_requirements->offline_offset == kOnlinePlannedBuffer) { + ListEntry* prior_entry = nullptr; + while (true) { + // Find out what the next active buffer is. + ListEntry* next_entry = NextSimultaneouslyActiveBuffer( + prior_entry, wanted_first_time_used, wanted_last_time_used); + + if (prior_entry) { + BufferRequirements* candidate_requirements = + &requirements_[prior_entry->requirements_index]; + const int prior_entry_offset = + prior_entry->offset + candidate_requirements->size; + if (prior_entry_offset > candidate_offset) { + candidate_offset = prior_entry_offset; + } + } + if (next_entry == nullptr) { + // We're at the end of the list, so we can always append the buffer + // here. + break; + } + // Find out how much space there is between us and the next buffer. + const int gap = next_entry->offset - candidate_offset; + if (gap >= wanted_size) { + // This entry has a big enough gap between it and the next, so + // use it! + break; + } + // The gap wasn't big enough, so move on to another candidate. + prior_entry = next_entry; + } + } else { + // Offline planned offset are to be considered constant + candidate_offset = wanted_requirements->offline_offset; + } + // At this point, we've either found a gap (possibly at the end of the + // list) and want to place the buffer there, or there are no other active + // buffers in this time range and so we can put it at offset zero. + // Record the buffer's offset in our plan. + buffer_offsets_[buffer_id] = candidate_offset; + // Add the newly-placed buffer to our offset-ordered list, so that + // subsequent passes can fit in their buffers around it. + ListEntry* new_entry = &buffers_sorted_by_offset_[next_free_entry_]; + new_entry->offset = candidate_offset; + new_entry->requirements_index = buffer_id; + const int new_entry_index = next_free_entry_; + ++next_free_entry_; + + if (first_entry->offset > candidate_offset) { + // The new entry offset is smaller than the first entry offset => + // replace the first entry + first_entry = new_entry; + first_entry->next_entry_index = first_entry_index_; + first_entry_index_ = new_entry_index; + } else { + ListEntry* current_entry = first_entry; + // Make sure that we insert the buffer at the correct place in the + // buffer-offset-ordered list + while (true) { + const int next_entry_index = current_entry->next_entry_index; + if (next_entry_index == -1) { + // We're at the end of the list, so just add the new entry here. + current_entry->next_entry_index = new_entry_index; + new_entry->next_entry_index = -1; + break; + } + // not at the end of the list -> take a look at next entry + ListEntry* next_entry = &buffers_sorted_by_offset_[next_entry_index]; + if (next_entry->offset > candidate_offset) { + // We're at the right spot to do an insertion and retain the sorting + // order, so place the new entry here. + new_entry->next_entry_index = current_entry->next_entry_index; + current_entry->next_entry_index = new_entry_index; + break; + } + current_entry = next_entry; + } + } + } +} + +size_t GreedyMemoryPlanner::GetMaximumMemorySize() { + CalculateOffsetsIfNeeded(); + if (buffer_count_ == 0) { + return 0; + } + ListEntry* entry = &buffers_sorted_by_offset_[first_entry_index_]; + size_t max_size = 0; + while (entry) { + BufferRequirements* requirements = + &requirements_[entry->requirements_index]; + const size_t current_size = entry->offset + requirements->size; + if (current_size > max_size) { + max_size = current_size; + } + if (entry->next_entry_index == -1) { + break; + } + entry = &buffers_sorted_by_offset_[entry->next_entry_index]; + } + return max_size; +} + +void GreedyMemoryPlanner::PrintMemoryPlan() { + CalculateOffsetsIfNeeded(); + + for (int i = 0; i < buffer_count_; ++i) { + MicroPrintf("%c (id=%d): size=%d, offset=%d, first_used=%d last_used=%d", + GetOrdinalCharacter(i), i, requirements_[i].size, + buffer_offsets_[i], requirements_[i].first_time_used, + requirements_[i].last_time_used); + } + + constexpr int kLineWidth = 80; + int max_size = kLineWidth; + int max_time = 0; + for (int i = 0; i < buffer_count_; ++i) { + BufferRequirements* requirements = &requirements_[i]; + const int offset = buffer_offsets_[i]; + const int last_time_used = requirements->last_time_used; + const int size = offset + requirements->size; + if (size > max_size) { + max_size = size; + } + if (last_time_used > max_time) { + max_time = last_time_used; + } + } + + char line[kLineWidth + 1]; + for (int t = 0; t <= max_time; ++t) { + for (int c = 0; c < kLineWidth; ++c) { + line[c] = '.'; + } + int memory_use = 0; + for (int i = 0; i < buffer_count_; ++i) { + BufferRequirements* requirements = &requirements_[i]; + if ((t < requirements->first_time_used) || + (t > requirements->last_time_used)) { + continue; + } + const int offset = buffer_offsets_[i]; + if (offset == -1) { + continue; + } + const int size = requirements->size; + memory_use += size; + const int line_start = (offset * kLineWidth) / max_size; + const int line_end = ((offset + size) * kLineWidth) / max_size; + for (int n = line_start; n < line_end; ++n) { + if (line[n] == '.') { + line[n] = GetOrdinalCharacter(i); + } else { + line[n] = '!'; + } + } + } + line[kLineWidth] = 0; + + MicroPrintf("%s%d: %s (%dk)", t < 10 ? " " : "", t, (const char*)line, + (memory_use + 1023) / 1024); + } +} + +int GreedyMemoryPlanner::GetBufferCount() { return buffer_count_; } + +TfLiteStatus GreedyMemoryPlanner::GetOffsetForBuffer(int buffer_index, + int* offset) { + CalculateOffsetsIfNeeded(); + if ((buffer_index < 0) || (buffer_index >= buffer_count_)) { + MicroPrintf("buffer index %d is outside range 0 to %d", buffer_index, + buffer_count_); + return kTfLiteError; + } + *offset = buffer_offsets_[buffer_index]; + return kTfLiteOk; +} + +bool GreedyMemoryPlanner::DoAnyBuffersOverlap() { + CalculateOffsetsIfNeeded(); + bool were_overlaps_found = false; + for (int i = 0; i < buffer_count_; ++i) { + BufferRequirements* a_requirements = &requirements_[i]; + const int a_start_offset = buffer_offsets_[i]; + const int a_first_time_used = a_requirements->first_time_used; + const int a_last_time_used = a_requirements->last_time_used; + const int a_end_offset = a_start_offset + a_requirements->size; + for (int j = 0; j < buffer_count_; ++j) { + if (i == j) { + continue; + } + BufferRequirements* b_requirements = &requirements_[j]; + const int b_start_offset = buffer_offsets_[j]; + const int b_first_time_used = b_requirements->first_time_used; + const int b_last_time_used = b_requirements->last_time_used; + const int b_end_offset = b_start_offset + b_requirements->size; + if ((a_first_time_used > b_last_time_used) || + (b_first_time_used > a_last_time_used)) { + // Buffers don't overlap in time. + continue; + } + if ((a_start_offset >= b_end_offset) || + (b_start_offset >= a_end_offset)) { + // No overlap in memory. + continue; + } + were_overlaps_found = true; + MicroPrintf("Overlap: %d (%d=>%d, %d->%d) vs %d (%d=>%d, %d->%d)", i, + a_first_time_used, a_last_time_used, a_start_offset, + a_end_offset, j, b_first_time_used, b_last_time_used, + b_start_offset, b_end_offset); + } + } + return were_overlaps_found; +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/memory_planner/greedy_memory_planner.h b/tensorflow/lite/micro/memory_planner/greedy_memory_planner.h new file mode 100644 index 0000000..ae3705d --- /dev/null +++ b/tensorflow/lite/micro/memory_planner/greedy_memory_planner.h @@ -0,0 +1,165 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_MEMORY_PLANNER_GREEDY_MEMORY_PLANNER_H_ +#define TENSORFLOW_LITE_MICRO_MEMORY_PLANNER_GREEDY_MEMORY_PLANNER_H_ + +#include "tensorflow/lite/micro/compatibility.h" +#include "tensorflow/lite/micro/memory_planner/micro_memory_planner.h" + +namespace tflite { + +constexpr int kOnlinePlannedBuffer = -1; + +// A memory planner that uses a greedy algorithm to arrange buffers in memory +// to minimize the overall arena size needed. +// +// The algorithm works like this: +// - The client enters the buffer information through AddBuffer(). +// - When a function like GetOffsetForBuffer() is called, the +// CalculateOffsetsIfNeeded() method is invoked. +// - If an up to date plan is not already present, one will be calculated. +// - The buffers are sorted in descending order of size. +// - The largest buffer is placed at offset zero. +// - The rest of the buffers are looped through in descending size order. +// - The other buffers that need to be in memory at the same time are found. +// - The first gap between simultaneously active buffers that the current +// buffer fits into will be used. +// - If no large-enough gap is found, the current buffer is placed after the +// last buffer that's simultaneously active. +// - This continues until all buffers are placed, and the offsets stored. +// +// This is not guaranteed to produce the best placement, since that's an +// NP-Complete problem, but in practice it should produce one that's decent. +class GreedyMemoryPlanner : public MicroMemoryPlanner { + public: + GreedyMemoryPlanner(); + ~GreedyMemoryPlanner() override; + + // You need to pass in an area of memory to be used for planning. The client + // should ensure the validity of the memory when it needs to use this object. + // This memory isn't owned by this object, so management should be handled by + // the client. This is so it can be stack or globally allocated if necessary + // on devices without dynamic memory allocation. How many buffers can be + // planned for will depend on the size of this scratch memory, so you should + // enlarge it if you see an error when calling AddBuffer(). The memory can be + // reused once you're done with the planner, as long as you copy the + // calculated offsets to another location. Each buffer requires about 36 bytes + // of scratch. + TfLiteStatus Init(unsigned char* scratch_buffer, + int scratch_buffer_size) override; + + // Record details of a buffer we want to place. + TfLiteStatus AddBuffer(int size, int first_time_used, + int last_time_used) override; + + // Record details of an offline planned buffer offset we want to place. + // offline_offset is the buffer offset from the start of the arena. + TfLiteStatus AddBuffer(int size, int first_time_used, int last_time_used, + int offline_offset) override; + + // Returns the high-water mark of used memory. This is the minimum size of a + // memory arena you'd need to allocate to hold these buffers. + size_t GetMaximumMemorySize() override; + + // How many buffers have been recorded. + int GetBufferCount() override; + + // Where a given buffer should be placed in the memory arena. + // This information is stored in the memory arena itself, so once the arena + // is used for inference, it will be overwritten. + TfLiteStatus GetOffsetForBuffer(int buffer_index, int* offset) override; + + // Prints an ascii-art diagram of the buffer layout plan. + void PrintMemoryPlan() override; + + // Debug method to check whether any buffer allocations are overlapping. This + // is an O(N^2) complexity operation, so only use for testing. + bool DoAnyBuffersOverlap(); + + // Used to store a list of buffers ordered by their offset. + struct ListEntry { + int offset; + int requirements_index; + int next_entry_index; + }; + + // Number of bytes required in order to plan a buffer. + static size_t per_buffer_size() { + const int per_buffer_size = + sizeof(BufferRequirements) + // requirements_ + sizeof(int) + // buffer_sizes_sorted_ + sizeof(int) + // buffer_ids_sorted_ + sizeof(ListEntry) + // buffers_sorted_by_offset_ + sizeof(int); // buffer_offsets_; + return per_buffer_size; + } + + private: + // Whether a buffer is active in a given time range. + bool DoesEntryOverlapInTime(const ListEntry* entry, const int first_time_used, + const int last_time_used) const; + + // Walks the list to return the next buffer that is active in a given time + // range, or a null pointer if there are none. + ListEntry* NextSimultaneouslyActiveBuffer(const ListEntry* start, + const int first_time_used, + const int last_time_used); + + // If there isn't an up to date plan, calculate a new one. + void CalculateOffsetsIfNeeded(); + + // How many buffers we can plan for, based on the arena size we're given in + // the constructor. + int max_buffer_count_; + + // The number of buffers added so far. + int buffer_count_; + + // Records the client-provided information about each buffer. + struct BufferRequirements { + int size; + int offline_offset; + int first_time_used; + int last_time_used; + }; + + // Working arrays used during the layout algorithm. + BufferRequirements* requirements_; + // buffer_sizes_sorted_ and buffer_ids_sorted_ are sorted according to: + // { + // offline planned buffers, + // online planned buffers sorted by size + // } + int* buffer_sizes_sorted_; + int* buffer_ids_sorted_; + ListEntry* buffers_sorted_by_offset_; + int next_free_entry_; // Index of the next free entry of + // buffers_sorted_by_offset_ + int first_entry_index_; // Index of the first entry (smallest offset) of + // buffers_sorted_by_offset_ + + // Stores the outcome of the plan, the location of each buffer in the arena. + int* buffer_offsets_; + + // Whether buffers have been added since the last plan was calculated. + bool need_to_calculate_offsets_; + + TF_LITE_REMOVE_VIRTUAL_DELETE +}; + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_MEMORY_PLANNER_GREEDY_MEMORY_PLANNER_H_ diff --git a/tensorflow/lite/micro/memory_planner/greedy_memory_planner_test.cc b/tensorflow/lite/micro/memory_planner/greedy_memory_planner_test.cc new file mode 100644 index 0000000..1be1ca4 --- /dev/null +++ b/tensorflow/lite/micro/memory_planner/greedy_memory_planner_test.cc @@ -0,0 +1,212 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/memory_planner/greedy_memory_planner.h" + +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +// We don't declare this in the header since it's not a public interface, but we +// need to call it to test it, so declare it here instead. +void ReverseSortInPlace(int* values, int* ids, int size); +} // namespace tflite + +namespace { +constexpr int kScratchBufferSize = 4096; +alignas(4) unsigned char g_scratch_buffer[kScratchBufferSize]; +} // namespace + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(TestReverseSortInPlace) { + constexpr int a_size = 10; + int a_values[a_size] = {10, 9, 8, 7, 6, 5, 4, 3, 2, 1}; + int a_ids[a_size] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9}; + const int a_expected_values[a_size] = {10, 9, 8, 7, 6, 5, 4, 3, 2, 1}; + const int a_expected_ids[a_size] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9}; + tflite::ReverseSortInPlace(a_values, a_ids, a_size); + for (int i = 0; i < a_size; ++i) { + TF_LITE_MICRO_EXPECT_EQ(a_expected_values[i], a_values[i]); + TF_LITE_MICRO_EXPECT_EQ(a_expected_ids[i], a_ids[i]); + } + + constexpr int b_size = 10; + int b_values[b_size] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10}; + int b_ids[b_size] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9}; + const int b_expected_values[b_size] = {10, 9, 8, 7, 6, 5, 4, 3, 2, 1}; + const int b_expected_ids[b_size] = {9, 8, 7, 6, 5, 4, 3, 2, 1, 0}; + tflite::ReverseSortInPlace(b_values, b_ids, b_size); + for (int i = 0; i < b_size; ++i) { + TF_LITE_MICRO_EXPECT_EQ(b_expected_values[i], b_values[i]); + TF_LITE_MICRO_EXPECT_EQ(b_expected_ids[i], b_ids[i]); + } + + constexpr int c_size = 100; + int c_values[c_size] = { + 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, + 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, + 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, + 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, + 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10}; + int c_ids[c_size] = { + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, + 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, + 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, + 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, + 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, + 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99}; + const int c_expected_values[c_size] = { + 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, + 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, + 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1}; + const int c_expected_ids[c_size] = { + 9, 19, 29, 39, 49, 59, 69, 79, 89, 99, 8, 18, 28, 38, 48, 58, 68, + 78, 88, 98, 7, 17, 27, 37, 47, 57, 67, 77, 87, 97, 6, 16, 26, 36, + 46, 56, 66, 76, 86, 96, 5, 15, 25, 35, 45, 55, 65, 75, 85, 95, 4, + 14, 24, 34, 44, 54, 64, 74, 84, 94, 3, 13, 23, 33, 43, 53, 63, 73, + 83, 93, 2, 12, 22, 32, 42, 52, 62, 72, 82, 92, 1, 11, 21, 31, 41, + 51, 61, 71, 81, 91, 0, 10, 20, 30, 40, 50, 60, 70, 80, 90}; + tflite::ReverseSortInPlace(c_values, c_ids, c_size); + for (int i = 0; i < c_size; ++i) { + TF_LITE_MICRO_EXPECT_EQ(c_expected_values[i], c_values[i]); + TF_LITE_MICRO_EXPECT_EQ(c_expected_ids[i], c_ids[i]); + } +} + +TF_LITE_MICRO_TEST(TestGreedyBasics) { + tflite::GreedyMemoryPlanner planner; + planner.Init(g_scratch_buffer, kScratchBufferSize); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(10, 0, 1)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(20, 2, 3)); + + TF_LITE_MICRO_EXPECT_EQ(false, planner.DoAnyBuffersOverlap()); + + TF_LITE_MICRO_EXPECT_EQ(static_cast(20), + planner.GetMaximumMemorySize()); + + int offset = -1; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.GetOffsetForBuffer(0, &offset)); + TF_LITE_MICRO_EXPECT_EQ(0, offset); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.GetOffsetForBuffer(1, &offset)); + TF_LITE_MICRO_EXPECT_EQ(0, offset); +} + +TF_LITE_MICRO_TEST(TestGreedyMedium) { + tflite::GreedyMemoryPlanner planner; + planner.Init(g_scratch_buffer, kScratchBufferSize); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(10, 0, 1)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(20, 1, 2)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(30, 2, 3)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(40, 3, 4)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(50, 0, 1)); + + int offset = -1; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.GetOffsetForBuffer(0, &offset)); + TF_LITE_MICRO_EXPECT_EQ(50, offset); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.GetOffsetForBuffer(1, &offset)); + TF_LITE_MICRO_EXPECT_EQ(70, offset); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.GetOffsetForBuffer(2, &offset)); + TF_LITE_MICRO_EXPECT_EQ(40, offset); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.GetOffsetForBuffer(3, &offset)); + TF_LITE_MICRO_EXPECT_EQ(0, offset); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.GetOffsetForBuffer(4, &offset)); + TF_LITE_MICRO_EXPECT_EQ(0, offset); + + planner.PrintMemoryPlan(); + + TF_LITE_MICRO_EXPECT_EQ(false, planner.DoAnyBuffersOverlap()); + + TF_LITE_MICRO_EXPECT_EQ(static_cast(90), + planner.GetMaximumMemorySize()); +} + +TF_LITE_MICRO_TEST(TestPersonDetectionModel) { + tflite::GreedyMemoryPlanner planner; + planner.Init(g_scratch_buffer, kScratchBufferSize); + // These buffer sizes and time ranges are taken from the 250KB MobileNet model + // used in the person detection example. + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(9216, 0, 29)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(3, 28, 29)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(256, 27, 28)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(2304, 26, 27)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(2304, 25, 26)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(2304, 24, 25)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(1152, 23, 24)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(4608, 22, 23)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(4608, 21, 22)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(4608, 20, 21)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(4608, 19, 20)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(4608, 18, 19)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(4608, 17, 18)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(4608, 16, 17)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(4608, 15, 16)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(4608, 14, 15)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(4608, 13, 14)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(4608, 12, 13)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(2304, 11, 12)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(9216, 10, 11)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(9216, 9, 10)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(9216, 8, 9)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(4608, 7, 8)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(18432, 6, 7)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(18432, 5, 6)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(18432, 4, 5)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(9216, 3, 4)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(36864, 2, 3)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(18432, 1, 2)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(18432, 0, 1)); + + planner.PrintMemoryPlan(); + + TF_LITE_MICRO_EXPECT_EQ(false, planner.DoAnyBuffersOverlap()); + + // The sum of all the buffers is 241,027 bytes, so we at least expect the plan + // to come up with something smaller than this. + TF_LITE_MICRO_EXPECT_GT(static_cast(241027), + planner.GetMaximumMemorySize()); +} + +TF_LITE_MICRO_TEST(TestOverlapCase) { + tflite::GreedyMemoryPlanner planner; + planner.Init(g_scratch_buffer, kScratchBufferSize); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(100, 0, 1)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(50, 2, 3)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(20, 1, 2)); + + planner.PrintMemoryPlan(); + + TF_LITE_MICRO_EXPECT_EQ(false, planner.DoAnyBuffersOverlap()); + + TF_LITE_MICRO_EXPECT_EQ(static_cast(120), + planner.GetMaximumMemorySize()); +} + +TF_LITE_MICRO_TEST(TestSmallScratch) { + constexpr int scratch_buffer_size = 40; + unsigned char scratch_buffer[scratch_buffer_size]; + tflite::GreedyMemoryPlanner planner; + planner.Init(scratch_buffer, scratch_buffer_size); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(100, 0, 1)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteError, planner.AddBuffer(50, 2, 3)); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/memory_planner/linear_memory_planner.cc b/tensorflow/lite/micro/memory_planner/linear_memory_planner.cc new file mode 100644 index 0000000..5c6afb5 --- /dev/null +++ b/tensorflow/lite/micro/memory_planner/linear_memory_planner.cc @@ -0,0 +1,53 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/memory_planner/linear_memory_planner.h" + +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { + +LinearMemoryPlanner::LinearMemoryPlanner() + : current_buffer_count_(0), next_free_offset_(0) {} +LinearMemoryPlanner::~LinearMemoryPlanner() {} + +TfLiteStatus LinearMemoryPlanner::AddBuffer(int size, int first_time_used, + int last_time_used) { + if (current_buffer_count_ >= kMaxBufferCount) { + MicroPrintf("Too many buffers (max is %d)", kMaxBufferCount); + return kTfLiteError; + } + buffer_offsets_[current_buffer_count_] = next_free_offset_; + next_free_offset_ += size; + ++current_buffer_count_; + return kTfLiteOk; +} + +size_t LinearMemoryPlanner::GetMaximumMemorySize() { return next_free_offset_; } + +int LinearMemoryPlanner::GetBufferCount() { return current_buffer_count_; } + +TfLiteStatus LinearMemoryPlanner::GetOffsetForBuffer(int buffer_index, + int* offset) { + if ((buffer_index < 0) || (buffer_index >= current_buffer_count_)) { + MicroPrintf("buffer index %d is outside range 0 to %d", buffer_index, + current_buffer_count_); + return kTfLiteError; + } + *offset = buffer_offsets_[buffer_index]; + return kTfLiteOk; +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/memory_planner/linear_memory_planner.h b/tensorflow/lite/micro/memory_planner/linear_memory_planner.h new file mode 100644 index 0000000..d4938dd --- /dev/null +++ b/tensorflow/lite/micro/memory_planner/linear_memory_planner.h @@ -0,0 +1,49 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_MEMORY_PLANNER_LINEAR_MEMORY_PLANNER_H_ +#define TENSORFLOW_LITE_MICRO_MEMORY_PLANNER_LINEAR_MEMORY_PLANNER_H_ + +#include "tensorflow/lite/micro/compatibility.h" +#include "tensorflow/lite/micro/memory_planner/micro_memory_planner.h" + +namespace tflite { + +// The simplest possible memory planner that just lays out all buffers at +// increasing offsets without trying to reuse memory. +class LinearMemoryPlanner : public MicroMemoryPlanner { + public: + LinearMemoryPlanner(); + ~LinearMemoryPlanner() override; + + TfLiteStatus AddBuffer(int size, int first_time_used, + int last_time_used) override; + + size_t GetMaximumMemorySize() override; + int GetBufferCount() override; + TfLiteStatus GetOffsetForBuffer(int buffer_index, int* offset) override; + + private: + static constexpr int kMaxBufferCount = 1024; + size_t buffer_offsets_[kMaxBufferCount]; + int current_buffer_count_; + size_t next_free_offset_; + + TF_LITE_REMOVE_VIRTUAL_DELETE +}; + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_MEMORY_PLANNER_LINEAR_MEMORY_PLANNER_H_ diff --git a/tensorflow/lite/micro/memory_planner/linear_memory_planner_test.cc b/tensorflow/lite/micro/memory_planner/linear_memory_planner_test.cc new file mode 100644 index 0000000..cf4c438 --- /dev/null +++ b/tensorflow/lite/micro/memory_planner/linear_memory_planner_test.cc @@ -0,0 +1,81 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/memory_planner/linear_memory_planner.h" + +#include "tensorflow/lite/micro/testing/micro_test.h" + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(TestBasics) { + tflite::LinearMemoryPlanner planner; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(10, 0, 1)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(20, 1, 2)); + TF_LITE_MICRO_EXPECT_EQ(static_cast(30), + planner.GetMaximumMemorySize()); + + int offset = -1; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.GetOffsetForBuffer(0, &offset)); + TF_LITE_MICRO_EXPECT_EQ(0, offset); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.GetOffsetForBuffer(1, &offset)); + TF_LITE_MICRO_EXPECT_EQ(10, offset); +} + +TF_LITE_MICRO_TEST(TestErrorHandling) { + tflite::LinearMemoryPlanner planner; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(10, 0, 1)); + + int offset = -1; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteError, planner.GetOffsetForBuffer(1, &offset)); +} + +TF_LITE_MICRO_TEST(TestPersonDetectionModel) { + tflite::LinearMemoryPlanner planner; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(9216, 0, 29)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(3, 28, 29)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(256, 27, 28)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(2304, 26, 27)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(2304, 25, 26)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(2304, 24, 25)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(1152, 23, 24)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(4608, 22, 23)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(4608, 21, 22)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(4608, 20, 21)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(4608, 19, 20)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(4608, 18, 19)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(4608, 17, 18)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(4608, 16, 17)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(4608, 15, 16)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(4608, 14, 15)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(4608, 13, 14)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(4608, 12, 13)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(2304, 11, 12)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(9216, 10, 11)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(9216, 9, 10)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(9216, 8, 9)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(4608, 7, 8)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(18432, 6, 7)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(18432, 5, 6)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(18432, 4, 5)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(9216, 3, 4)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(36864, 2, 3)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(18432, 1, 2)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(18432, 0, 1)); + TF_LITE_MICRO_EXPECT_EQ(static_cast(241027), + planner.GetMaximumMemorySize()); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/memory_planner/memory_plan_struct.h b/tensorflow/lite/micro/memory_planner/memory_plan_struct.h new file mode 100644 index 0000000..c8c431c --- /dev/null +++ b/tensorflow/lite/micro/memory_planner/memory_plan_struct.h @@ -0,0 +1,73 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_MEMORY_PLANNER_MEMORY_PLAN_STRUCT_H_ +#define TENSORFLOW_LITE_MICRO_MEMORY_PLANNER_MEMORY_PLAN_STRUCT_H_ + +#include +#include + +#include "tensorflow/lite/micro/micro_utils.h" + +namespace tflite { + +// This is an experimental feature and subjected to change. +// More description is available at +// tensorflow/lite/micro/docs/offline_memory_plan.md. + +// Describes a buffer's layout inside an arena. This struct should be kept as +// small as possible for memory footprint sensitive applications and should use +// only primitive fields, making it easy to adjust offline. +struct BufferDescriptor { + // Starting offset inside an arena for this buffer. + // Offset is the minimum information needed for the buffer. The user knows + // the model and the size of each buffer in order to lay out a valid buffer + // plan. + int32_t offset; +}; + +// A structure describing the lay out of buffers inside an arena. +struct BufferPlan { + // Number of buffers described in this plan. + int32_t buffer_count; + + // Each element describes one buffer. + // Buffer index is implicit by the order of AddBuffer() call. + // Specifically, indices of activation tensors are 0 … N-1 where N is the + // number of activation tensors. + // The rest are based on the order of OP requests. + // + // This is a flexible array member and should ideally be + // arena_entries[]; However, in order to support a variety + // of compilers (and without needing to add ifdef's), we + // are implementing the flexible array member with an array of + // length 1 as the last member of the struct. When the size of a BufferPlan + // is needed, use the provided SizeOfBufferPlan(buffer_count) that + // accounts for this implemenatation caveat. + BufferDescriptor buffer_plan_entries[1]; +}; + +// Returns size of a BufferPlan given a buffer count. This size is compile time +// known if buffer_count is a compile time constant. +constexpr size_t SizeOfBufferPlan(int32_t buffer_count) { + // Minus 1 because a BufferPlan struct have a BufferDescriptor already. + // Max to provide a lower bound for the corner case of buffer_count = 0. + return sizeof(BufferPlan) + + sizeof(BufferDescriptor) * Max(buffer_count - 1, 0); +} + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_MEMORY_PLANNER_MEMORY_PLAN_STRUCT_H_ diff --git a/tensorflow/lite/micro/memory_planner/micro_memory_planner.h b/tensorflow/lite/micro/memory_planner/micro_memory_planner.h new file mode 100644 index 0000000..0bfe693 --- /dev/null +++ b/tensorflow/lite/micro/memory_planner/micro_memory_planner.h @@ -0,0 +1,91 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_MICRO_MEMORY_PLANNER_MEMORY_PLANNER_H_ +#define TENSORFLOW_LITE_MICRO_MICRO_MEMORY_PLANNER_MEMORY_PLANNER_H_ + +#include "tensorflow/lite/c/common.h" + +namespace tflite { + +// Interface class for planning the layout of memory buffers during the +// execution of a graph. +// It's designed to be used by a client that iterates in any order through the +// buffers it wants to lay out, and then calls the getter functions for +// information about the calculated layout. For example: +// +// SomeMemoryPlanner planner; +// planner.AddBuffer(100, 0, 1); // Buffer 0 +// planner.AddBuffer(50, 2, 3); // Buffer 1 +// planner.AddBuffer(50, 2, 3); // Buffer 2 +// +// int offset0; +// TF_EXPECT_OK(planner.GetOffsetForBuffer(0, &offset0)); +// int offset1; +// TF_EXPECT_OK(planner.GetOffsetForBuffer(1, &offset1)); +// int offset2; +// TF_EXPECT_OK(planner.GetOffsetForBuffer(2, &offset2)); +// const int arena_size_needed = planner.GetMaximumMemorySize(); +// +// The goal is for applications to be able to experiment with different layout +// strategies without changing their client code, by swapping out classes that +// implement this interface.= +class MicroMemoryPlanner { + public: + MicroMemoryPlanner() {} + virtual ~MicroMemoryPlanner() {} + + // Pass information about a buffer's size and lifetime to the layout + // algorithm. The order this is called implicitly assigns an index to the + // result, so the buffer information that's passed into the N-th call of + // this method will be used as the buffer_index argument to + // GetOffsetForBuffer(). + virtual TfLiteStatus AddBuffer(int size, int first_time_used, + int last_time_used) = 0; + + // Record details of an offline planned buffer offset we want to place. + // offline_offset is the buffer offset from the start of the arena. + // This is to support offline memory planning from the flatbuffer metadata. + // By default, it returns an error. + virtual TfLiteStatus AddBuffer(int size, int first_time_used, + int last_time_used, int offline_offset) { + return kTfLiteError; + } + + // The largest contiguous block of memory that's needed to hold the layout. + virtual size_t GetMaximumMemorySize() = 0; + // How many buffers have been added to the planner. + virtual int GetBufferCount() = 0; + // Calculated layout offset for the N-th buffer added to the planner. + virtual TfLiteStatus GetOffsetForBuffer(int buffer_index, int* offset) = 0; + + // Provides the scratch buffer in case that the memory planner needs it. + // The lifetime of scratch buffers lifetime lasts until the static memory plan + // is committed. + // The default implementation is for the memory planner that does not need + // scratch buffer and simply returns ok. + virtual TfLiteStatus Init(unsigned char* scratch_buffer, + int scratch_buffer_size) { + return kTfLiteOk; + } + + virtual void PrintMemoryPlan() { + // Default does nothing. + } +}; + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_MICRO_MEMORY_PLANNER_MEMORY_PLANNER_H_ diff --git a/tensorflow/lite/micro/memory_planner/non_persistent_buffer_planner_shim.cc b/tensorflow/lite/micro/memory_planner/non_persistent_buffer_planner_shim.cc new file mode 100644 index 0000000..9bcb80c --- /dev/null +++ b/tensorflow/lite/micro/memory_planner/non_persistent_buffer_planner_shim.cc @@ -0,0 +1,66 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/memory_planner/non_persistent_buffer_planner_shim.h" + +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { + +NonPersistentMemoryPlannerShim::NonPersistentMemoryPlannerShim( + const BufferPlan* buffer_plan) + : buffer_plan_(buffer_plan), buffer_request_count_(0) {} + +NonPersistentMemoryPlannerShim::~NonPersistentMemoryPlannerShim() {} + +TfLiteStatus NonPersistentMemoryPlannerShim::AddBuffer(int size, + int first_time_used, + int last_time_used) { + buffer_request_count_++; + if (buffer_request_count_ > buffer_plan_->buffer_count) { + MicroPrintf( + "Attempting to add buffer %d, but only %d buffers in given buffer " + "plan.", + buffer_request_count_, buffer_plan_->buffer_count); + return kTfLiteError; + } + return kTfLiteOk; +} + +size_t NonPersistentMemoryPlannerShim::GetMaximumMemorySize() { + // Simply return 0 to let the framework accept this memory plan + // because the client ensure validity of the memory plan. + return 0; +} + +// How many buffers are in the given memory plan. +int NonPersistentMemoryPlannerShim::GetBufferCount() { + return buffer_plan_->buffer_count; +} + +TfLiteStatus NonPersistentMemoryPlannerShim::GetOffsetForBuffer( + int buffer_request_index, int* offset) { + if (buffer_request_index >= buffer_plan_->buffer_count) { + MicroPrintf( + "Attempting to get offset for buffer %d, but only %d buffers in given " + "buffer plan.", + buffer_request_index, buffer_plan_->buffer_count); + return kTfLiteError; + } + *offset = buffer_plan_->buffer_plan_entries[buffer_request_index].offset; + return kTfLiteOk; +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/memory_planner/non_persistent_buffer_planner_shim.h b/tensorflow/lite/micro/memory_planner/non_persistent_buffer_planner_shim.h new file mode 100644 index 0000000..8f9bb26 --- /dev/null +++ b/tensorflow/lite/micro/memory_planner/non_persistent_buffer_planner_shim.h @@ -0,0 +1,129 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_MEMORY_PLANNER_NON_PERSISTENT_MEMORY_PLANNER_SHIM_H__ +#define TENSORFLOW_LITE_MICRO_MEMORY_PLANNER_NON_PERSISTENT_MEMORY_PLANNER_SHIM_H__ + +#include "tensorflow/lite/micro/compatibility.h" +#include "tensorflow/lite/micro/memory_planner/memory_plan_struct.h" +#include "tensorflow/lite/micro/memory_planner/micro_memory_planner.h" + +namespace tflite { + +/* This is an experimental feature and subjected to change. + * +The NonPersistentMemoryPlannerShim enables TFLM to work with an external tooling +that can plan the offset of each non persistent buffer for the Model within the +TFLM arena. + +If the NonPersistentMemoryPlannerShim is used, then the final binary does not +have any of the symbols associated with the GreedyMemoryPlanner which results in +a reduced memory footprint. + +Additionally, the offline planning of the non-persistent buffers can be used to +have a more efficient utilization compared to the GreedyMemoryPlanner. + +For example, consider the following hypothetical model: + +A1(400) A2(401) +──┬─────────┐ ┌─────────── + │ │ │ + │ │ │ + │ ▼ ▼ + │ ┌────────┐ + │ │ OP1 │ + │ └───┬────┘ A4(201) + │ A3(10) │ │ + │ │ │ + │ │ │ + │ ┌───┴────┐ │ + │ │ OP2 │◄────────┤ + │ └───┬────┘ │ + │ A5(11) │ A6(202) │ + │ │ │ │ + │ ▼ │ │ + │ ┌────────┐ │ │ + │ │ OP3 │◄─┘ │ + │ └───┬────┘ │ + │ │ A8(200) │ + │ A7(12) │ │ │ + │ │ │ │ + │ ┌───┴────┐◄──┘ │ + └──────►│ OP4 │ │ + └───┬────┘◄────────┘ + │ + A9(13) │ + ▼ + +The GreedyMemoryPlanner will give the following memory layout that requires 1012 +bytes of scratch arena size: + +┌─────────────────────────────────────────┬──────────────────────────┬────────┬───────┐ +│ A2(401) │ A1(400) │ A4(201)│ +A3(10)│ +└─────────────────────────────────────────┴──────────────────────────┴────────┴───────┘ + +┌───────────┬──────┬──────┐ +│ A6(202) │A5(11)│A7(12)│ +└───────────┴──────┴──────┘ + +┌──────────┬───────┐ +│ A8(200) │A9(13) │ +└──────────┴───────┘ + +But a more efficient offline memory plan that requires only 826 bytes of scratch +arena size can be + +┌──────────────────────────────────────┬─────────────────────────────┬───────┬──────┐ +│ A1(400) │ A2(401) │ +A3(10)│A5(11)│ +└──────────────────────────────────────┴─────────────────────────────┴───────┴──────┘ + + ┌────────────────┬────────────┬────────┬───────┐ + │A4(201) │ A8(200) │A9(13) +│A7(12) │ └────────────────┴────────────┴────────┴───────┘ + + ┌─────────────┐ + │ A6(202) │ + └─────────────┘ + +*/ +class NonPersistentMemoryPlannerShim : public MicroMemoryPlanner { + public: + // Does not take ownership of buffer_plan, which must refer to a valid + // BufferPlan that outlives this object. + explicit NonPersistentMemoryPlannerShim(const BufferPlan* buffer_plan); + ~NonPersistentMemoryPlannerShim() override; + + TfLiteStatus GetOffsetForBuffer(int buffer_request_index, + int* offset) override; + + TfLiteStatus AddBuffer(int size, int first_time_used, + int last_time_used) override; + size_t GetMaximumMemorySize() override; + int GetBufferCount() override; + + private: + const BufferPlan* buffer_plan_; // not owned, can't be null + + // The number of buffers requested so far. Used for error checking. + int buffer_request_count_; + + TF_LITE_REMOVE_VIRTUAL_DELETE +}; + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_MEMORY_PLANNER_NON_PERSISTENT_MEMORY_PLANNER_SHIM_H__ diff --git a/tensorflow/lite/micro/memory_planner/non_persistent_buffer_planner_shim_test.cc b/tensorflow/lite/micro/memory_planner/non_persistent_buffer_planner_shim_test.cc new file mode 100644 index 0000000..5ccd469 --- /dev/null +++ b/tensorflow/lite/micro/memory_planner/non_persistent_buffer_planner_shim_test.cc @@ -0,0 +1,103 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/memory_planner/non_persistent_buffer_planner_shim.h" + +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace { +constexpr int32_t kBufferCnt = 2; +constexpr int32_t kBuffer0Offset = 0; +constexpr int32_t kBuffer1Offset = 10; + +// Our c++ convention disallow us to use designated initializers which would +// have simplify the below code to a more readable kBufferPlan = { +// .buffer_count = 2, +// .buffer_plan_entries = { +// [0] = { .offset = 0 }, +// [1] = { .offset = 10} +// } +// }; +tflite::BufferPlan* CreateBufferPlan() { + // Some targets do not support dynamic memory (i.e., no malloc or new), thus, + // the test need to place non-transitent memories in static variables. This is + // safe because tests are guarateed to run serially. + static int8_t buffer_plan_buffer[tflite::SizeOfBufferPlan(kBufferCnt)]; + tflite::BufferPlan* buffer_plan_ptr = + reinterpret_cast(buffer_plan_buffer); + buffer_plan_ptr->buffer_count = kBufferCnt; + buffer_plan_ptr->buffer_plan_entries[0].offset = kBuffer0Offset; + buffer_plan_ptr->buffer_plan_entries[1].offset = kBuffer1Offset; + return buffer_plan_ptr; +} + +} // namespace + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(TestGetOffsetForBuffer) { + tflite::NonPersistentMemoryPlannerShim planner(CreateBufferPlan()); + + int offset0 = -1; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.GetOffsetForBuffer(0, &offset0)); + TF_LITE_MICRO_EXPECT_EQ(kBuffer0Offset, offset0); + + int offset1 = -1; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.GetOffsetForBuffer(1, &offset1)); + TF_LITE_MICRO_EXPECT_EQ(kBuffer1Offset, offset1); +} + +TF_LITE_MICRO_TEST(TestErrorGetOffsetForBuffer) { + tflite::NonPersistentMemoryPlannerShim planner(CreateBufferPlan()); + + int offset = -1; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteError, + planner.GetOffsetForBuffer(kBufferCnt, &offset)); +} + +TF_LITE_MICRO_TEST(TestAddBufferSuccess) { + tflite::NonPersistentMemoryPlannerShim planner(CreateBufferPlan()); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(/*size=*/10, + /*first_time_used=*/0, + /*last_time_used=*/1)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(/*size=*/20, + /*first_time_used=*/0, + /*last_time_used=*/1)); +} + +TF_LITE_MICRO_TEST(TestAddBufferFailWhenExceedRange) { + tflite::NonPersistentMemoryPlannerShim planner(CreateBufferPlan()); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(/*size=*/10, + /*first_time_used=*/0, + /*last_time_used=*/1)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, planner.AddBuffer(/*size=*/20, + /*first_time_used=*/0, + /*last_time_used=*/1)); + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteError, + planner.AddBuffer(/*size=*/10, + /*first_time_used=*/0, /*last_time_used=*/1)); +} + +TF_LITE_MICRO_TEST(TestBasics) { + tflite::NonPersistentMemoryPlannerShim planner(CreateBufferPlan()); + + TF_LITE_MICRO_EXPECT_EQ(static_cast(0), + planner.GetMaximumMemorySize()); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/micro_allocation_info.cc b/tensorflow/lite/micro/micro_allocation_info.cc new file mode 100644 index 0000000..a89a5e6 --- /dev/null +++ b/tensorflow/lite/micro/micro_allocation_info.cc @@ -0,0 +1,375 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/micro_allocation_info.h" + +#include + +#include "tensorflow/lite/c/c_api_types.h" +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/memory_helpers.h" +#include "tensorflow/lite/micro/memory_planner/greedy_memory_planner.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { + +namespace { +constexpr char kOfflineMemAllocMetadata[] = "OfflineMemoryAllocation"; +constexpr int kUninitializedLifetime = -1; +} // namespace + +// Mark the given Allocation info as first created at the specified allocation +// scope count. Only the first creation must be recorded since the allocation +// scope count monotonically increases throughout the lifetime marking process. +void AllocationInfoBuilder::UpdateFirstCreated(AllocationInfo* current, + int allocation_scope_count) { + TFLITE_DCHECK(current->first_created <= allocation_scope_count); + if (current->first_created == kUninitializedLifetime) { + current->first_created = allocation_scope_count; + } +} + +// Mark the given AllocationInfo as last used at the specified allocation scope +// count. Update the last used marker every time, since the allocation scope +// count monotonically increases through the lifetime marking process. +void AllocationInfoBuilder::UpdateLastUsed(AllocationInfo* current, + int allocation_scope_count) { + TFLITE_DCHECK(current->last_used <= allocation_scope_count); + current->last_used = allocation_scope_count; +} + +TfLiteStatus AllocationInfoBuilder::MarkSubgraphLifetimesIfNecessary( + const Operator* op, internal::ScratchBufferRequest* scratch_buffer_requests, + ScratchBufferHandle* scratch_buffer_handles, + SubgraphAllocations* allocations) { + int first_subgraph_index = -1; + int second_subgraph_index = -1; + const OperatorCode* opcode = + model_->operator_codes()->Get(op->opcode_index()); + switch (opcode->builtin_code()) { + case BuiltinOperator_IF: { + first_subgraph_index = + op->builtin_options_as_IfOptions()->then_subgraph_index(); + second_subgraph_index = + op->builtin_options_as_IfOptions()->else_subgraph_index(); + break; + } + case BuiltinOperator_CALL_ONCE: { + first_subgraph_index = + op->builtin_options_as_CallOnceOptions()->init_subgraph_index(); + break; + } + case BuiltinOperator_WHILE: { + first_subgraph_index = + op->builtin_options_as_WhileOptions()->cond_subgraph_index(); + second_subgraph_index = + op->builtin_options_as_WhileOptions()->body_subgraph_index(); + break; + } + default: { + break; + } + } + if (first_subgraph_index != -1) { + // Enter a new allocation scope for each subgraph. + allocation_scope_count_++; + TF_LITE_ENSURE_STATUS( + MarkAllocationLifetimes(first_subgraph_index, scratch_buffer_requests, + scratch_buffer_handles, allocations)); + } + if (second_subgraph_index != -1) { + // Enter a new allocation scope for each subgraph. + allocation_scope_count_++; + TF_LITE_ENSURE_STATUS( + MarkAllocationLifetimes(second_subgraph_index, scratch_buffer_requests, + scratch_buffer_handles, allocations)); + } + return kTfLiteOk; +} + +TfLiteStatus AllocationInfoBuilder::CreateAllocationInfo( + int scratch_buffer_request_count) { + size_t subgraph_offsets_length = model_->subgraphs()->size() * sizeof(size_t); + info_.subgraph_offsets = + reinterpret_cast(non_persistent_allocator_->AllocateTemp( + subgraph_offsets_length, alignof(size_t))); + if (info_.subgraph_offsets == nullptr) { + MicroPrintf( + "Failed to allocate memory for memory planning, %d bytes required", + subgraph_offsets_length); + return kTfLiteError; + } + size_t tensor_count = 0; + for (size_t subgraph_idx = 0; subgraph_idx < model_->subgraphs()->size(); + subgraph_idx++) { + // Add all tensors in each subgraph to the AllocationInfo array. Even weight + // tensors are added but marked with needs_allocating = false. Including all + // tensors in the graph here simplifies logic. + info_.subgraph_offsets[subgraph_idx] = tensor_count; + tensor_count += model_->subgraphs()->Get(subgraph_idx)->tensors()->size(); + } + info_.tensor_count = tensor_count; + + // Scratch buffer allocations follow tensor allocations, so the scratch offset + // is equal to the number of tensor allocations. + info_.scratch_offset = tensor_count; + info_.allocation_info_count = tensor_count + scratch_buffer_request_count; + info_.scratch_buffer_count = scratch_buffer_request_count; + size_t bytes = sizeof(AllocationInfo) * info_.allocation_info_count; + + // Allocate an array of AllocationInfo structs from the temp section. This + // struct will be used by AllocationInfoBuilder to find buffer usage. + info_.allocation_info = reinterpret_cast( + non_persistent_allocator_->AllocateTemp(bytes, alignof(AllocationInfo))); + if (info_.allocation_info == nullptr) { + MicroPrintf( + "Failed to allocate memory for memory planning, %d bytes required", + bytes); + return kTfLiteError; + } + return kTfLiteOk; +} + +TfLiteStatus AllocationInfoBuilder::FreeAllocationInfo() { + non_persistent_allocator_->DeallocateTemp( + reinterpret_cast(info_.allocation_info)); + non_persistent_allocator_->DeallocateTemp( + reinterpret_cast(info_.subgraph_offsets)); + return kTfLiteOk; +} + +TfLiteStatus AllocationInfoBuilder::ValidateSubgraph( + const SubGraph* subgraph, TfLiteEvalTensor* eval_tensors) { + uint32_t operators_size = NumSubgraphOperators(subgraph); + + for (uint32_t i = 0; i < operators_size; i++) { + const auto op = subgraph->operators()->Get(i); + for (size_t n = 0; + op->intermediates() != nullptr && n < op->intermediates()->size(); + n++) { + const int tensor_index = op->intermediates()->Get(n); + size_t tensor_size = -1; + TF_LITE_ENSURE_STATUS(TfLiteEvalTensorByteLength( + &eval_tensors[tensor_index], &tensor_size)); + if (tensor_size != 0) { + MicroPrintf( + "Does not support intermediate tensor with non-zero size: %d", + tensor_size); + return kTfLiteError; + } + } + } + return kTfLiteOk; +} + +TfLiteStatus AllocationInfoBuilder::InitializeAllocationInfo( + const int32_t* offline_offsets, SubgraphAllocations* allocations) { + AllocationInfo* allocation_info = info_.allocation_info; + // Initialize allocation info for every tensor in every subgraph. + int offline_index = 0; + for (size_t subgraph_idx = 0; subgraph_idx < model_->subgraphs()->size(); + subgraph_idx++) { + const SubGraph* subgraph = model_->subgraphs()->Get(subgraph_idx); + TfLiteEvalTensor* eval_tensors = allocations[subgraph_idx].tensors; + AllocationInfo* subgraph_allocation_info = + &allocation_info[info_.subgraph_offsets[subgraph_idx]]; + + // Ensure constraints are met. + TF_LITE_ENSURE_STATUS(ValidateSubgraph(subgraph, eval_tensors)); + + for (size_t i = 0; i < subgraph->tensors()->size(); ++i) { + AllocationInfo* current = &subgraph_allocation_info[i]; + current->output_ptr = &(eval_tensors[i].data.data); + + TF_LITE_ENSURE_STATUS( + TfLiteEvalTensorByteLength(&eval_tensors[i], ¤t->bytes)); + + current->first_created = kUninitializedLifetime; + current->last_used = kUninitializedLifetime; + current->needs_allocating = + (eval_tensors[i].data.data == nullptr) && + (!subgraph->tensors()->Get(i)->is_variable()) && + (current->bytes != 0); + if (offline_offsets) { + current->offline_offset = offline_offsets[offline_index++]; + + // Mark offline planned variable tensors so they can get an offline + // offset and be handled offline. + if (subgraph->tensors()->Get(i)->is_variable() && + current->offline_offset != kOnlinePlannedBuffer) { + current->needs_allocating = true; + } + } else { + current->offline_offset = kOnlinePlannedBuffer; + } + } + } + // Initialize allocation info for every scratch buffer. + AllocationInfo* scratch_allocation_info = + &allocation_info[info_.scratch_offset]; + for (size_t i = 0; i < info_.scratch_buffer_count; i++) { + AllocationInfo* current = &scratch_allocation_info[i]; + current->first_created = kUninitializedLifetime; + current->last_used = kUninitializedLifetime; + current->needs_allocating = true; + current->offline_offset = kOnlinePlannedBuffer; + } + return kTfLiteOk; +} + +TfLiteStatus AllocationInfoBuilder::MarkAllocationLifetimes( + int subgraph_idx, internal::ScratchBufferRequest* scratch_buffer_requests, + ScratchBufferHandle* scratch_buffer_handles, + SubgraphAllocations* allocations) { + const SubGraph* subgraph = model_->subgraphs()->Get(subgraph_idx); + + AllocationInfo* allocation_info = info_.allocation_info; + // Each subgraph's tensor allocations are in a contiguous block starting at + // subgraph_offsets_[subgraph index] with one entry per tensor. + AllocationInfo* subgraph_allocation_info = + &allocation_info[info_.subgraph_offsets[subgraph_idx]]; + + uint32_t operators_size = NumSubgraphOperators(subgraph); + // Mark all inputs as created at the start of the subgraph invocation. + for (size_t i = 0; + subgraph->inputs() != nullptr && i < subgraph->inputs()->size(); ++i) { + const int tensor_index = subgraph->inputs()->Get(i); + AllocationInfo* current = &subgraph_allocation_info[tensor_index]; + UpdateFirstCreated(current, allocation_scope_count_); + // This will ensure that the tensors that are inputs to the subgraphs + // but not used in any ops also have a reasonable lifetime. + UpdateLastUsed(current, allocation_scope_count_); + } + + for (uint32_t i = 0; i < operators_size; i++) { + // Each operator has a new allocation scope. + allocation_scope_count_++; + const auto* op = subgraph->operators()->Get(i); + // Figure out when the first creation and use of each tensor is. + for (size_t n = 0; op->outputs() != nullptr && n < op->outputs()->size(); + ++n) { + const int tensor_index = op->outputs()->Get(n); + AllocationInfo* current = &subgraph_allocation_info[tensor_index]; + UpdateFirstCreated(current, allocation_scope_count_); + } + + // Keep track of scope count before any subgraphs, so that scratch buffers' + // lifetime within a control flow op properly overlaps with all subgraphs. + int start_allocation_scope_count = allocation_scope_count_; + + // Control flow operators can invoke subgraphs. Plan these subgraphs + // before continuing on to the rest of the graph. + MarkSubgraphLifetimesIfNecessary(op, scratch_buffer_requests, + scratch_buffer_handles, allocations); + + // Figure out when the last use of each tensor is. + for (size_t n = 0; op->inputs() != nullptr && n < op->inputs()->size(); + ++n) { + const int tensor_index = op->inputs()->Get(n); + // Optional bias tensors can have an index of -1 when they are omitted. + if (tensor_index >= 0) { + AllocationInfo* current = &subgraph_allocation_info[tensor_index]; + // No need to update creation since it is either marked by the subgraph + // or producer op, or it is not part of the memory plan (weight, bias + // tensor). + UpdateLastUsed(current, allocation_scope_count_); + } + } + for (size_t n = 0; op->outputs() != nullptr && n < op->outputs()->size(); + ++n) { + const int tensor_index = op->outputs()->Get(n); + AllocationInfo* current = &subgraph_allocation_info[tensor_index]; + UpdateLastUsed(current, allocation_scope_count_); + } + + // Mark thse lifetime of scratch buffers belonging to the current node. This + // operation is O(N * M) where N is the total number of visited nodes and M + // is the total number of scratch buffers. + // TODO(b/217794030): Optimize this memory planning code. + AllocationInfo* scratch_allocation_info = + &allocation_info[info_.scratch_offset]; + for (size_t scratch_idx = 0; scratch_idx < info_.scratch_buffer_count; + scratch_idx++) { + internal::ScratchBufferRequest request = + scratch_buffer_requests[scratch_idx]; + AllocationInfo* current = &scratch_allocation_info[scratch_idx]; + if (request.node_idx == static_cast(i) && + request.subgraph_idx == static_cast(subgraph_idx)) { + ScratchBufferHandle* current_handle = + &(scratch_buffer_handles[scratch_idx]); + current->output_ptr = reinterpret_cast(¤t_handle->data); + current->bytes = request.bytes; + UpdateFirstCreated(current, start_allocation_scope_count); + UpdateLastUsed(current, allocation_scope_count_); + } + } + } + + // Mark all outputs as persistent to the end of the subgraph invocation. + for (size_t i = 0; + subgraph->outputs() != nullptr && i < subgraph->outputs()->size(); ++i) { + const int tensor_index = subgraph->outputs()->Get(i); + AllocationInfo* current = &subgraph_allocation_info[tensor_index]; + // Make sure to assign the First created value of the subgraph output + // This will handle the case where the subgraph is empty. This helps + // ensure all tensors have valid lifetimes before those are used by the + // memory planner. + UpdateFirstCreated(current, allocation_scope_count_); + UpdateLastUsed(current, allocation_scope_count_); + } + return kTfLiteOk; +} + +// Get offline tensors allocation plan. See +// micro/docs/memory_management.md for more info. +TfLiteStatus AllocationInfoBuilder::GetOfflinePlannedOffsets( + const int32_t** offline_planner_offsets) { + if (model_->metadata()) { + for (size_t i = 0; i < model_->metadata()->size(); ++i) { + auto metadata = model_->metadata()->Get(i); + + if (metadata->name()) { + const size_t metadata_name_size = metadata->name()->size(); + + if ((strncmp(metadata->name()->c_str(), kOfflineMemAllocMetadata, + std::min(metadata_name_size, + strlen(kOfflineMemAllocMetadata))) == 0) && + metadata_name_size == strlen(kOfflineMemAllocMetadata)) { + const flatbuffers::Vector>* buffers = + model_->buffers(); + auto* buffer = (*buffers)[metadata->buffer()]; + auto* array = buffer->data(); + const uint32_t* metadata_buffer = + reinterpret_cast(array->data()); + const size_t nbr_tensors = static_cast(metadata_buffer[2]); + *offline_planner_offsets = + reinterpret_cast(&metadata_buffer[3]); + + if (info_.tensor_count != nbr_tensors) { + MicroPrintf( + "Nbr of offline buffer offsets (%d) in metadata " + "not equal nbr tensors (%d)\n", + nbr_tensors, info_.tensor_count); + return kTfLiteError; + } + } + } + } + } + return kTfLiteOk; +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/micro_allocation_info.h b/tensorflow/lite/micro/micro_allocation_info.h new file mode 100644 index 0000000..688d04e --- /dev/null +++ b/tensorflow/lite/micro/micro_allocation_info.h @@ -0,0 +1,138 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_MICRO_MICRO_ALLOCATION_INFO_H_ +#define TENSORFLOW_LITE_MICRO_MICRO_ALLOCATION_INFO_H_ + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/compatibility.h" +#include "tensorflow/lite/micro/flatbuffer_utils.h" +#include "tensorflow/lite/micro/micro_allocator.h" +#include "tensorflow/lite/schema/schema_generated.h" + +namespace tflite { + +// Used to hold information used during allocation calculations. +struct AllocationInfo { + size_t bytes; + void** output_ptr; + int first_created; + int last_used; + int32_t offline_offset; + bool needs_allocating; +}; + +// Used to hold the allocation info list and related metadata for the entire +// graph (including subgraphs). Since all subgraphs are planned together, the +// allocation info list contains allocations for all subgraphs. Track the offset +// into this list for each subgraph then reserve space to track all allocations. +// +// The AllocationInfo list is a contiguous list of allocations across all +// subgraphs and scratch buffers. Each element here is marked as +// st. The following is a possible +// AllocationInfo list: +// [s0t0, s0t1, s1t0, s2t1, s1t2, s3t0, s3t1, scratch0, scratch1, scratch2] +// +// For this example, the subgraph offsets would be [0, 2, 5] and the scratch +// offset would be 7. +struct GraphAllocationInfo { + AllocationInfo* allocation_info; + size_t allocation_info_count; + size_t* subgraph_offsets; + size_t scratch_offset; + size_t tensor_count; + size_t scratch_buffer_count; +}; + +// A helper class to construct AllocationInfo array. This array contains the +// lifetime of tensors / scratch_buffer and will be used to calculate the memory +// plan. Methods need to be called in order from `Create`, Init`, `Add*`, to +// `Finish`. +class AllocationInfoBuilder { + public: + AllocationInfoBuilder(const Model* model, + INonPersistentBufferAllocator* non_persistent_allocator) + : model_(model), non_persistent_allocator_(non_persistent_allocator) {} + + // Check if model contains offline planned buffer offsets. + // - If there's no metadata available, offline_planner_offsets is not set + // - If there's metadata available, offline_planner_offsets will point to the + // first offset in the metadata buffer list. + TfLiteStatus GetOfflinePlannedOffsets( + const int32_t** offline_planner_offsets); + + // Allocate memory for the allocation info array as well as offsets into that + // array for each subgraph. + TfLiteStatus CreateAllocationInfo(int scratch_buffer_request_count); + + // Release memory used for the allocation info array. + TfLiteStatus FreeAllocationInfo(); + + // Initialize AllocationInfo for all tensors and scratch buffers in the graph. + TfLiteStatus InitializeAllocationInfo(const int32_t* offline_offsets, + SubgraphAllocations* allocations); + + // Mark the scope of each tensor and scratch buffer across the graph. Enter + // all possible subgraphs invoked by each control flow operator. This method + // marks the maximum lifetime of each buffer so that tensors are correctly + // planned for all valid invocation flows. + TfLiteStatus MarkAllocationLifetimes( + int subgraph_idx, internal::ScratchBufferRequest* scratch_buffer_request, + ScratchBufferHandle* scratch_buffer_handles, + SubgraphAllocations* allocations); + + // Identify control flow operators and recursively mark all subgraphs which + // that operator can invoke. The lifetime of all tensors within a subgraph + // can only be extended. The order of subgraph invocation does not matter + // since subgraphs within the same control flow operator are executed + // within their own allocation scope (planned buffers in a subgraph cannot + // persist beyond the end of that subgraph's invocation). + TfLiteStatus MarkSubgraphLifetimesIfNecessary( + const Operator* op, + internal::ScratchBufferRequest* scratch_buffer_requests, + ScratchBufferHandle* scratch_buffer_handles, + SubgraphAllocations* allocations); + + // Returns the number of allocations. + int AllocationCount() const { return info_.allocation_info_count; } + + // Returns a pointer to the built AllocationInfo array. + AllocationInfo* Finish() const { return info_.allocation_info; } + + private: + // Mark the given Allocation info as first created at the specified allocation + // scope count. Only the first creation must be recorded since the allocation + // scope count monotonically increases throughout the lifetime marking + // process. + void UpdateFirstCreated(AllocationInfo* current, int allocation_scope_count); + + // Mark the given AllocationInfo as last used at the specified allocation + // scope + // count. Update the last used marker every time, since the allocation scope + // count monotonically increases through the lifetime marking process. + void UpdateLastUsed(AllocationInfo* current, int allocation_scope_count); + + // Validate if a subgraph satisfies assumptions. + TfLiteStatus ValidateSubgraph(const SubGraph* subgraph, + TfLiteEvalTensor* eval_tensors); + + const tflite::Model* model_ = nullptr; + INonPersistentBufferAllocator* non_persistent_allocator_ = nullptr; + GraphAllocationInfo info_; + int allocation_scope_count_ = 0; +}; + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_MICRO_ALLOCATION_INFO_H_ diff --git a/tensorflow/lite/micro/micro_allocation_info_test.cc b/tensorflow/lite/micro/micro_allocation_info_test.cc new file mode 100644 index 0000000..5dca2b6 --- /dev/null +++ b/tensorflow/lite/micro/micro_allocation_info_test.cc @@ -0,0 +1,194 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/micro_allocation_info.h" + +#include "tensorflow/lite/micro/arena_allocator/single_arena_buffer_allocator.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(TestSingleSubgraph) { + constexpr int kArenaSize = 1024; + uint8_t arena[kArenaSize]; + const tflite::Model* model = tflite::testing::GetSimpleMockModel(); + tflite::SingleArenaBufferAllocator allocator(arena, kArenaSize); + tflite::AllocationInfoBuilder builder(model, &allocator); + builder.CreateAllocationInfo(0); + tflite::MicroAllocator* micro_allocator = + tflite::MicroAllocator::Create(arena, kArenaSize); + tflite::SubgraphAllocations* subgraph_allocations = + micro_allocator->StartModelAllocation(model); + builder.InitializeAllocationInfo(nullptr, subgraph_allocations); + builder.MarkAllocationLifetimes(0, nullptr, nullptr, subgraph_allocations); + TF_LITE_MICRO_EXPECT_EQ(builder.AllocationCount(), 4); + tflite::AllocationInfo* allocation_info = builder.Finish(); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[0].first_created, 0); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[0].last_used, 2); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[1].first_created, -1); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[1].last_used, 2); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[2].first_created, 1); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[2].last_used, 2); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[3].first_created, 2); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[3].last_used, 2); +} + +TF_LITE_MICRO_TEST(TestSingleSubgraphWithIntermediates) { + constexpr int kArenaSize = 1024; + uint8_t arena[kArenaSize]; + const tflite::Model* model = tflite::testing::GetSimpleStatefulModel(); + tflite::SingleArenaBufferAllocator allocator(arena, kArenaSize); + tflite::AllocationInfoBuilder builder(model, &allocator); + builder.CreateAllocationInfo(0); + tflite::MicroAllocator* micro_allocator = + tflite::MicroAllocator::Create(arena, kArenaSize); + tflite::SubgraphAllocations* subgraph_allocations = + micro_allocator->StartModelAllocation(model); + builder.InitializeAllocationInfo(nullptr, subgraph_allocations); + builder.MarkAllocationLifetimes(0, nullptr, nullptr, subgraph_allocations); + TF_LITE_MICRO_EXPECT_EQ(builder.AllocationCount(), 4); + tflite::AllocationInfo* allocation_info = builder.Finish(); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[0].first_created, 0); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[0].last_used, 1); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[0].needs_allocating, true); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[1].first_created, 1); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[1].last_used, 1); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[1].needs_allocating, true); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[2].first_created, 1); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[2].last_used, 1); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[2].needs_allocating, true); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[3].first_created, -1); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[3].last_used, -1); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[3].needs_allocating, false); +} + +TF_LITE_MICRO_TEST(TestMultiSubgraphWithIf) { + constexpr int kArenaSize = 1024; + uint8_t arena[kArenaSize]; + const tflite::Model* model = + tflite::testing::GetSimpleModelWithSubgraphsAndIf(); + tflite::SingleArenaBufferAllocator allocator(arena, kArenaSize); + tflite::AllocationInfoBuilder builder(model, &allocator); + builder.CreateAllocationInfo(0); + tflite::MicroAllocator* micro_allocator = + tflite::MicroAllocator::Create(arena, kArenaSize); + tflite::SubgraphAllocations* subgraph_allocations = + micro_allocator->StartModelAllocation(model); + builder.InitializeAllocationInfo(nullptr, subgraph_allocations); + builder.MarkAllocationLifetimes(0, nullptr, nullptr, subgraph_allocations); + TF_LITE_MICRO_EXPECT_EQ(builder.AllocationCount(), 10); + tflite::AllocationInfo* allocation_info = builder.Finish(); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[0].first_created, 0); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[0].last_used, 5); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[1].first_created, 0); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[1].last_used, 5); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[2].first_created, 0); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[2].last_used, 5); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[3].first_created, 1); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[3].last_used, 5); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[4].first_created, 2); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[4].last_used, 3); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[5].first_created, 2); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[5].last_used, 3); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[6].first_created, 3); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[6].last_used, 3); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[7].first_created, 4); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[7].last_used, 5); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[8].first_created, 4); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[8].last_used, 5); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[9].first_created, 5); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[9].last_used, 5); +} + +TF_LITE_MICRO_TEST(TestMultiSubgraphWithIfAndEmptySubgraph) { + constexpr int kArenaSize = 1024; + uint8_t arena[kArenaSize]; + const tflite::Model* model = + tflite::testing::GetSimpleModelWithIfAndEmptySubgraph(); + tflite::SingleArenaBufferAllocator allocator(arena, kArenaSize); + tflite::AllocationInfoBuilder builder(model, &allocator); + builder.CreateAllocationInfo(0); + tflite::MicroAllocator* micro_allocator = + tflite::MicroAllocator::Create(arena, kArenaSize); + tflite::SubgraphAllocations* subgraph_allocations = + micro_allocator->StartModelAllocation(model); + builder.InitializeAllocationInfo(nullptr, subgraph_allocations); + builder.MarkAllocationLifetimes(0, nullptr, nullptr, subgraph_allocations); + TF_LITE_MICRO_EXPECT_EQ(builder.AllocationCount(), 10); + tflite::AllocationInfo* allocation_info = builder.Finish(); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[0].first_created, 0); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[0].last_used, 4); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[1].first_created, 0); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[1].last_used, 4); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[2].first_created, 0); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[2].last_used, 4); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[3].first_created, 1); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[3].last_used, 4); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[4].first_created, 2); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[4].last_used, 3); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[5].first_created, 2); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[5].last_used, 3); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[6].first_created, 3); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[6].last_used, 3); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[7].first_created, 4); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[7].last_used, 4); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[8].first_created, 4); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[8].last_used, 4); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[9].first_created, 4); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[9].last_used, 4); +} + +TF_LITE_MICRO_TEST(TestMultiSubgraphWithIfAndInputSubgraphOverlap) { + constexpr int kArenaSize = 2048; + uint8_t arena[kArenaSize]; + const tflite::Model* model = + tflite::testing::GetModelWithIfAndSubgraphInputTensorOverlap(); + tflite::SingleArenaBufferAllocator allocator(arena, kArenaSize); + tflite::AllocationInfoBuilder builder(model, &allocator); + builder.CreateAllocationInfo(0); + tflite::MicroAllocator* micro_allocator = + tflite::MicroAllocator::Create(arena, kArenaSize); + tflite::SubgraphAllocations* subgraph_allocations = + micro_allocator->StartModelAllocation(model); + builder.InitializeAllocationInfo(nullptr, subgraph_allocations); + builder.MarkAllocationLifetimes(0, nullptr, nullptr, subgraph_allocations); + TF_LITE_MICRO_EXPECT_EQ(builder.AllocationCount(), 11); + tflite::AllocationInfo* allocation_info = builder.Finish(); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[0].first_created, 0); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[0].last_used, 5); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[1].first_created, 0); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[1].last_used, 5); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[2].first_created, 0); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[2].last_used, 6); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[3].first_created, 1); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[3].last_used, 6); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[4].first_created, 6); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[4].last_used, 6); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[5].first_created, 2); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[5].last_used, 3); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[6].first_created, 2); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[6].last_used, 3); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[7].first_created, 3); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[7].last_used, 3); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[8].first_created, 4); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[8].last_used, 5); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[9].first_created, 4); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[9].last_used, 5); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[10].first_created, 5); + TF_LITE_MICRO_EXPECT_EQ(allocation_info[10].last_used, 5); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/micro_allocator.cc b/tensorflow/lite/micro/micro_allocator.cc new file mode 100644 index 0000000..ba7cb66 --- /dev/null +++ b/tensorflow/lite/micro/micro_allocator.cc @@ -0,0 +1,951 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/micro_allocator.h" + +#include +#include + +#include "flatbuffers/flatbuffers.h" // from @flatbuffers +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/micro/arena_allocator/non_persistent_arena_buffer_allocator.h" +#include "tensorflow/lite/micro/arena_allocator/persistent_arena_buffer_allocator.h" +#include "tensorflow/lite/micro/arena_allocator/single_arena_buffer_allocator.h" +#include "tensorflow/lite/micro/compatibility.h" +#include "tensorflow/lite/micro/flatbuffer_utils.h" +#include "tensorflow/lite/micro/memory_helpers.h" +#include "tensorflow/lite/micro/memory_planner/greedy_memory_planner.h" +#include "tensorflow/lite/micro/memory_planner/micro_memory_planner.h" +#include "tensorflow/lite/micro/micro_allocation_info.h" +#include "tensorflow/lite/micro/micro_arena_constants.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/tflite_bridge/flatbuffer_conversions_bridge.h" +#include "tensorflow/lite/schema/schema_generated.h" +#include "tensorflow/lite/schema/schema_utils.h" + +namespace tflite { + +namespace { + +// Maximum number of scratch buffer requests per operator. Operator kernels that +// request more than this value will receive an exception. +constexpr size_t kMaxScratchBuffersPerOp = 12; + +// Sentinel value used as a placeholder to mark a ScratchBufferRequest request +// needs a node id assignment. +constexpr int kUnassignedScratchBufferRequestIndex = -1; + +const TfLiteIntArray kZeroLengthIntArray = {}; + +class MicroBuiltinDataAllocator : public TfLiteBridgeBuiltinDataAllocator { + public: + explicit MicroBuiltinDataAllocator( + IPersistentBufferAllocator* persistent_allocator) + : persistent_allocator_(persistent_allocator) {} + + void* Allocate(size_t size, size_t alignment_hint) override { + return persistent_allocator_->AllocatePersistentBuffer(size, + alignment_hint); + } + void Deallocate(void* data) override { + // Do not deallocate, builtin data needs to be available for the life time + // of the model. + } + + TF_LITE_REMOVE_VIRTUAL_DELETE + + private: + IPersistentBufferAllocator* persistent_allocator_; +}; + +TfLiteStatus CreatePlan(MicroMemoryPlanner* planner, + const AllocationInfo* allocation_info, + size_t allocation_info_size) { + // Add the tensors to our allocation plan. + for (size_t i = 0; i < allocation_info_size; ++i) { + const AllocationInfo* current = &allocation_info[i]; + if (current->needs_allocating) { + size_t aligned_bytes_required = + AlignSizeUp(current->bytes, MicroArenaBufferAlignment()); + if (current->offline_offset == kOnlinePlannedBuffer) { + TF_LITE_ENSURE_STATUS(planner->AddBuffer(aligned_bytes_required, + current->first_created, + current->last_used)); + } else { + TF_LITE_ENSURE_STATUS( + planner->AddBuffer(aligned_bytes_required, current->first_created, + current->last_used, current->offline_offset)); + } + } + } + return kTfLiteOk; +} + +TfLiteStatus CommitPlan(MicroMemoryPlanner* planner, uint8_t* starting_point, + const AllocationInfo* allocation_info, + size_t allocation_info_size) { + // Figure out the actual memory addresses for each buffer, based on the plan. + int planner_index = 0; + for (size_t i = 0; i < allocation_info_size; ++i) { + const AllocationInfo* current = &allocation_info[i]; + if (current->needs_allocating) { + int offset = -1; + TF_LITE_ENSURE_STATUS( + planner->GetOffsetForBuffer(planner_index, &offset)); + *current->output_ptr = reinterpret_cast(starting_point + offset); + ++planner_index; + } + } + return kTfLiteOk; +} + +IPersistentBufferAllocator* CreatePersistentArenaAllocator(uint8_t* buffer_head, + size_t buffer_size) { + // Align the actually used area by the tail because persistent buffer grows + // from the bottom to top. + uint8_t* aligned_buffer_tail = + AlignPointerDown(buffer_head + buffer_size, MicroArenaBufferAlignment()); + size_t aligned_buffer_size = aligned_buffer_tail - buffer_head; + PersistentArenaBufferAllocator tmp = + PersistentArenaBufferAllocator(buffer_head, aligned_buffer_size); + + // Allocate enough bytes from the buffer to create a + // SingleArenaBufferAllocator. The new instance will use the current adjusted + // tail buffer from the tmp allocator instance. + uint8_t* allocator_buffer = + tmp.AllocatePersistentBuffer(sizeof(PersistentArenaBufferAllocator), + alignof(PersistentArenaBufferAllocator)); + // Use the default copy constructor to populate internal states. + return new (allocator_buffer) PersistentArenaBufferAllocator(tmp); +} + +// NonPersistentBufferAllocator instance is created in the persistent buffer +// because it has to be persistent to keep track of the non-persistent buffer +// information. +INonPersistentBufferAllocator* CreateNonPersistentArenaAllocator( + uint8_t* buffer_head, size_t buffer_size, + IPersistentBufferAllocator* persistent_buffer_allocator) { + uint8_t* allocator_buffer = + persistent_buffer_allocator->AllocatePersistentBuffer( + sizeof(NonPersistentArenaBufferAllocator), + alignof(NonPersistentArenaBufferAllocator)); + // Align the actually used area by the head because persistent buffer grows + // from the head to bottom. + uint8_t* aligned_buffer_head = + AlignPointerUp(buffer_head, MicroArenaBufferAlignment()); + size_t aligned_buffer_size = buffer_head + buffer_size - aligned_buffer_head; + + INonPersistentBufferAllocator* non_persistent_buffer_allocator = + new (allocator_buffer) NonPersistentArenaBufferAllocator( + aligned_buffer_head, aligned_buffer_size); + return non_persistent_buffer_allocator; +} + +} // namespace + +namespace internal { + +// Returns a pointer to any buffer associated with the flatbuffer tensor. Can +// return nullptr if no buffer is found. +void* GetFlatbufferTensorBuffer( + const tflite::Tensor& flatbuffer_tensor, + const flatbuffers::Vector>* buffers) { + // We need to figure out where the actual contents of this tensor are stored + // in memory. We'll check to see if there's a serialized buffer (pretty much + // the same as a constant op in TensorFlow) associated with this tensor first, + // and if there is update the runtime structure to point to its location in + // memory. + // First see if there's any buffer information in the serialized tensor. + // TODO(b/170379532): Add better unit tests to validate flatbuffer values. + void* out_buffer = nullptr; + if (auto* buffer = (*buffers)[flatbuffer_tensor.buffer()]) { + // If we've found a buffer, does it have any data? + if (auto* array = buffer->data()) { + // If it has any data, is the data size larger than zero? + if (array->size()) { + // We've found a buffer with valid data, so update the runtime tensor + // data structure to point to it. + out_buffer = const_cast(static_cast(array->data())); + } + } + // TODO(petewarden): It's not clear in what circumstances we could have a + // buffer in the serialized tensor, but it doesn't have any data in it. Is + // that a validly-generated file, and if so what does it mean, or is it an + // error condition? It would be good to tighten up the specification to make + // it less ambiguous. + } + return out_buffer; +} + +TfLiteStatus InitializeTfLiteTensorFromFlatbuffer( + IPersistentBufferAllocator* persistent_buffer_allocator, + INonPersistentBufferAllocator* non_persistent_buffer_allocator, + bool allocate_temp, const tflite::Tensor& flatbuffer_tensor, + const flatbuffers::Vector>* buffers, + TfLiteTensor* result) { + TFLITE_DCHECK(result != nullptr); + + *result = {}; + // Make sure the serialized type is one we know how to deal with, and convert + // it from a flatbuffer enum into a constant used by the kernel C API. + TF_LITE_ENSURE_STATUS( + tflite::ConvertTensorType(flatbuffer_tensor.type(), &result->type)); + // Make sure we remember if the serialized tensor is designated as a variable. + result->is_variable = flatbuffer_tensor.is_variable(); + + result->data.data = GetFlatbufferTensorBuffer(flatbuffer_tensor, buffers); + + // TODO(petewarden): Some of these paths aren't getting enough testing + // coverage, so we should figure out some tests that exercise them. + if (result->data.data == nullptr) { + // The tensor contents haven't been set from a serialized buffer, so + // make a note that they will be allocated from memory. The actual + // allocation won't happen until later. + result->allocation_type = kTfLiteArenaRw; + } else { + // We set the data from a serialized buffer, so record tha. + result->allocation_type = kTfLiteMmapRo; + } + + // Figure out what the size in bytes of the buffer is and store it. + size_t type_size; + TF_LITE_ENSURE_STATUS( + BytesRequiredForTensor(flatbuffer_tensor, &result->bytes, &type_size)); + + if (flatbuffer_tensor.shape() == nullptr) { + // flatbuffer_tensor.shape() can return a nullptr in the case of a scalar + // tensor. + // TODO(b/188459715): figure out why const_cast is required here. + result->dims = const_cast(&kZeroLengthIntArray); + } else { + // TFLM doesn't allow reshaping the tensor which requires dynamic memory + // allocation so it is safe to drop the const qualifier. In the future, if + // we really want to update the tensor shape, we can always pass in a new + // TfLiteIntArray - especially we have to do so if the dimension is + result->dims = FlatBufferVectorToTfLiteTypeArray(flatbuffer_tensor.shape()); + } + + // Copy the quantization information from the serialized data. + const auto* src_quantization = flatbuffer_tensor.quantization(); + if (src_quantization && src_quantization->scale() && + (src_quantization->scale()->size() > 0) && + src_quantization->zero_point() && + (src_quantization->zero_point()->size() > 0)) { + // Always populate the TfLiteTensor.params field, even if there are + // per-channel quantization parameters. + result->params.scale = src_quantization->scale()->Get(0); + // Note that the zero_point field in the FlatBuffers schema is a 64-bit + // integer, but the zero_point field in the TfLiteQuantizationParams struct + // is a 32-bit integer. + result->params.zero_point = + static_cast(src_quantization->zero_point()->Get(0)); + + // Populate per-channel quantization params. + int channels = src_quantization->scale()->size(); + TfLiteAffineQuantization* quantization = + allocate_temp + ? reinterpret_cast( + non_persistent_buffer_allocator->AllocateTemp( + sizeof(TfLiteAffineQuantization), + alignof(TfLiteAffineQuantization))) + : reinterpret_cast( + persistent_buffer_allocator->AllocatePersistentBuffer( + sizeof(TfLiteAffineQuantization), + alignof(TfLiteAffineQuantization))); + if (quantization == nullptr) { + MicroPrintf("Unable to allocate TfLiteAffineQuantization.\n"); + return kTfLiteError; + } + + // TODO(b/153688719): Reduce tail allocation by using a global zero-point + // buffer. This value can not be reused from the flatbuffer since the + // zero_point is stored as a int64_t. + quantization->zero_point = + allocate_temp + ? reinterpret_cast( + non_persistent_buffer_allocator->AllocateTemp( + TfLiteIntArrayGetSizeInBytes(channels), + alignof(TfLiteIntArray))) + : reinterpret_cast( + persistent_buffer_allocator->AllocatePersistentBuffer( + TfLiteIntArrayGetSizeInBytes(channels), + alignof(TfLiteIntArray))); + if (quantization->zero_point == nullptr) { + MicroPrintf("Unable to allocate quantization->zero_point.\n"); + return kTfLiteError; + } + + quantization->scale = + FlatBufferVectorToTfLiteTypeArray(src_quantization->scale()); + + quantization->zero_point->size = channels; + int* zero_point_data = quantization->zero_point->data; + for (int i = 0; i < channels; i++) { + // As a space-saving optimization, zero point arrays for weights can be + // reduced to a single value, since all zero points for weights are 0. + zero_point_data[i] = src_quantization->zero_point()->size() == + src_quantization->scale()->size() + ? src_quantization->zero_point()->Get(i) + : src_quantization->zero_point()->Get(0); + } + // TODO(rocky): Need to add a micro_allocator test case that fails when + // this is not copied: + quantization->quantized_dimension = src_quantization->quantized_dimension(); + + result->quantization = {kTfLiteAffineQuantization, quantization}; + } + return kTfLiteOk; +} + +TfLiteStatus InitializeTfLiteEvalTensorFromFlatbuffer( + const tflite::Tensor& flatbuffer_tensor, + const flatbuffers::Vector>* buffers, + TfLiteEvalTensor* result) { + *result = {}; + // Make sure the serialized type is one we know how to deal with, and convert + // it from a flatbuffer enum into a constant used by the kernel C API. + TF_LITE_ENSURE_STATUS( + tflite::ConvertTensorType(flatbuffer_tensor.type(), &result->type)); + + result->data.data = GetFlatbufferTensorBuffer(flatbuffer_tensor, buffers); + + if (flatbuffer_tensor.shape() == nullptr) { + // flatbuffer_tensor.shape() can return a nullptr in the case of a scalar + // tensor. + result->dims = const_cast(&kZeroLengthIntArray); + } else { + result->dims = FlatBufferVectorToTfLiteTypeArray(flatbuffer_tensor.shape()); + } + return kTfLiteOk; +} + +} // namespace internal + +size_t MicroAllocator::GetDefaultTailUsage(bool is_memory_planner_given) { + size_t total_size = AlignSizeUp() + + AlignSizeUp() + + AlignSizeUp() + + AlignSizeUp(); + if (!is_memory_planner_given) { + total_size += AlignSizeUp(); + } + return total_size; +} + +MicroAllocator::MicroAllocator(SingleArenaBufferAllocator* memory_allocator, + MicroMemoryPlanner* memory_planner) + : non_persistent_buffer_allocator_(memory_allocator), + persistent_buffer_allocator_(memory_allocator), + memory_planner_(memory_planner), + model_is_allocating_(false) {} + +MicroAllocator::MicroAllocator( + IPersistentBufferAllocator* persistent_buffer_allocator, + INonPersistentBufferAllocator* non_persistent_buffer_allocator, + MicroMemoryPlanner* memory_planner) + : non_persistent_buffer_allocator_(non_persistent_buffer_allocator), + persistent_buffer_allocator_(persistent_buffer_allocator), + memory_planner_(memory_planner), + model_is_allocating_(false) {} + +MicroAllocator::~MicroAllocator() {} + +MicroAllocator* MicroAllocator::Create(uint8_t* tensor_arena, size_t arena_size, + MicroMemoryPlanner* memory_planner) { + uint8_t* aligned_arena = + AlignPointerUp(tensor_arena, MicroArenaBufferAlignment()); + size_t aligned_arena_size = tensor_arena + arena_size - aligned_arena; + SingleArenaBufferAllocator* memory_allocator = + SingleArenaBufferAllocator::Create(aligned_arena, aligned_arena_size); + + return Create(memory_allocator, memory_planner); +} + +MicroAllocator* MicroAllocator::Create(uint8_t* tensor_arena, + size_t arena_size) { + uint8_t* aligned_arena = + AlignPointerUp(tensor_arena, MicroArenaBufferAlignment()); + size_t aligned_arena_size = tensor_arena + arena_size - aligned_arena; + SingleArenaBufferAllocator* memory_allocator = + SingleArenaBufferAllocator::Create(aligned_arena, aligned_arena_size); + + // By default create GreedyMemoryPlanner. + // If a different MemoryPlanner is needed, use the other api. + uint8_t* memory_planner_buffer = memory_allocator->AllocatePersistentBuffer( + sizeof(GreedyMemoryPlanner), alignof(GreedyMemoryPlanner)); + GreedyMemoryPlanner* memory_planner = + new (memory_planner_buffer) GreedyMemoryPlanner(); + + return Create(memory_allocator, memory_planner); +} + +MicroAllocator* MicroAllocator::Create( + SingleArenaBufferAllocator* memory_allocator, + MicroMemoryPlanner* memory_planner) { + TFLITE_DCHECK(memory_allocator != nullptr); + TFLITE_DCHECK(memory_planner != nullptr); + + uint8_t* allocator_buffer = memory_allocator->AllocatePersistentBuffer( + sizeof(MicroAllocator), alignof(MicroAllocator)); + MicroAllocator* allocator = new (allocator_buffer) + MicroAllocator(memory_allocator, memory_allocator, memory_planner); + return allocator; +} + +MicroAllocator* MicroAllocator::Create(uint8_t* persistent_tensor_arena, + size_t persistent_arena_size, + uint8_t* non_persistent_tensor_arena, + size_t non_persistent_arena_size) { + TFLITE_DCHECK(persistent_tensor_arena != nullptr); + TFLITE_DCHECK(non_persistent_tensor_arena != nullptr); + TFLITE_DCHECK(persistent_tensor_arena != non_persistent_tensor_arena); + + IPersistentBufferAllocator* persistent_buffer_allocator = + CreatePersistentArenaAllocator(persistent_tensor_arena, + persistent_arena_size); + INonPersistentBufferAllocator* non_persistent_buffer_allocator = + CreateNonPersistentArenaAllocator(non_persistent_tensor_arena, + non_persistent_arena_size, + persistent_buffer_allocator); + + uint8_t* memory_planner_buffer = + persistent_buffer_allocator->AllocatePersistentBuffer( + sizeof(GreedyMemoryPlanner), alignof(GreedyMemoryPlanner)); + GreedyMemoryPlanner* memory_planner = + new (memory_planner_buffer) GreedyMemoryPlanner(); + + uint8_t* micro_allocator_buffer = + persistent_buffer_allocator->AllocatePersistentBuffer( + sizeof(MicroAllocator), alignof(MicroAllocator)); + MicroAllocator* allocator = new (micro_allocator_buffer) + MicroAllocator(persistent_buffer_allocator, + non_persistent_buffer_allocator, memory_planner); + return allocator; +} + +SubgraphAllocations* MicroAllocator::StartModelAllocation(const Model* model) { + TFLITE_DCHECK(model != nullptr); + + if (model_is_allocating_) { + MicroPrintf( + "MicroAllocator: Model allocation started before " + "finishing previously allocated model"); + return nullptr; + } + + model_is_allocating_ = true; + + uint8_t* data_allocator_buffer = + persistent_buffer_allocator_->AllocatePersistentBuffer( + sizeof(MicroBuiltinDataAllocator), + alignof(MicroBuiltinDataAllocator)); + builtin_data_allocator_ = new (data_allocator_buffer) + MicroBuiltinDataAllocator(persistent_buffer_allocator_); + + if (InitScratchBufferData() != kTfLiteOk) { + return nullptr; + } + + // Allocate struct to store eval tensors, nodes and registrations. + SubgraphAllocations* output = reinterpret_cast( + persistent_buffer_allocator_->AllocatePersistentBuffer( + sizeof(SubgraphAllocations) * model->subgraphs()->size(), + alignof(SubgraphAllocations))); + if (output == nullptr) { + MicroPrintf("Failed to allocate memory for model metadata."); + return nullptr; + } + + if (AllocateTfLiteEvalTensors(model, output) != kTfLiteOk || + AllocateNodeAndRegistrations(model, output) != kTfLiteOk) { + return nullptr; + } + return output; +} + +TfLiteStatus MicroAllocator::FinishModelAllocation( + const Model* model, SubgraphAllocations* subgraph_allocations, + ScratchBufferHandle** scratch_buffer_handles) { + if (!model_is_allocating_) { + MicroPrintf( + "MicroAllocator: Model allocation finished before " + "starting allocating model"); + return kTfLiteError; + } + + // Allocate scratch buffer metadata. + TF_LITE_ENSURE_STATUS(AllocateScratchBufferHandles( + scratch_buffer_handles, scratch_buffer_request_count_)); + + // Plan all subgraphs and scratch buffers together. + TF_LITE_ENSURE_STATUS(CommitStaticMemoryPlan(model, subgraph_allocations, + *scratch_buffer_handles)); + model_is_allocating_ = false; + return kTfLiteOk; +} + +void* MicroAllocator::AllocatePersistentBuffer(size_t bytes) { + return persistent_buffer_allocator_->AllocatePersistentBuffer( + bytes, MicroArenaBufferAlignment()); +} + +TfLiteStatus MicroAllocator::RequestScratchBufferInArena(size_t bytes, + int subgraph_idx, + int* buffer_idx) { + // All scratch buffer requests are stored in the head section of the arena + // when a model is in the prepare phase. First align a scratch buffer request + // pointer to the start of the head: + internal::ScratchBufferRequest* requests = GetScratchBufferRequests(); + + // Count the number of requested scratch buffers for the current node: + size_t current_node_request_count = 0; + for (size_t i = 0; i < scratch_buffer_request_count_; ++i) { + if (requests[i].node_idx == kUnassignedScratchBufferRequestIndex) { + ++current_node_request_count; + } + } + + // First, ensure that the per-kernel request has not exceeded the limit: + if (current_node_request_count >= kMaxScratchBuffersPerOp) { + MicroPrintf("Scratch buffer request exeeds limit per operator (%d)", + kMaxScratchBuffersPerOp); + return kTfLiteError; + } + + // Initialize and assign values for the request at the current index: + internal::ScratchBufferRequest* current_request = + &requests[scratch_buffer_request_count_]; + *current_request = {}; + // Assign -1 as a sentinel value that will be updated when the node finishes + // allocating: + current_request->bytes = bytes; + current_request->node_idx = kUnassignedScratchBufferRequestIndex; + current_request->subgraph_idx = subgraph_idx; + + // Assign the current request index to the out-param: + *buffer_idx = scratch_buffer_request_count_; + + // Bump the request count to prepare for the next request: + ++scratch_buffer_request_count_; + return kTfLiteOk; +} + +TfLiteStatus MicroAllocator::FinishPrepareNodeAllocations(int node_id) { + // When a node has finished preparing, all temp allocations performed by the + // kernel should be cleaned up: + TF_LITE_ENSURE_STATUS(ResetTempAllocations()); + + // Find and update any new scratch buffer requests for the current node: + internal::ScratchBufferRequest* requests = GetScratchBufferRequests(); + + for (size_t i = 0; i < scratch_buffer_request_count_; ++i) { + // A request with a node_idx of -1 is a sentinel value used to indicate this + // was a new request for the current node. The allocator finally knows the + // node index at this point. Assign the value and update the list of new + // requests so the head section can be adjusted to allow for the next kernel + // to allocate at most kMaxScratchBuffersPerOp requests: + if (requests[i].node_idx == kUnassignedScratchBufferRequestIndex) { + requests[i].node_idx = node_id; + } + } + + // Ensure that the head is re-adjusted to allow for another at-most + // kMaxScratchBuffersPerOp scratch buffer requests in the next operator: + TF_LITE_ENSURE_STATUS(non_persistent_buffer_allocator_->ResizeBuffer( + scratch_buffer_head_, + sizeof(internal::ScratchBufferRequest) * + (scratch_buffer_request_count_ + kMaxScratchBuffersPerOp), + alignof(internal::ScratchBufferRequest))); + + return kTfLiteOk; +} + +size_t MicroAllocator::used_bytes() const { + return non_persistent_buffer_allocator_->GetNonPersistentUsedBytes() + + persistent_buffer_allocator_->GetPersistentUsedBytes(); +} + +TfLiteStatus MicroAllocator::AllocateNodeAndRegistrations( + const Model* model, SubgraphAllocations* subgraph_allocations) { + TFLITE_DCHECK(subgraph_allocations != nullptr); + + for (size_t subgraph_idx = 0; subgraph_idx < model->subgraphs()->size(); + subgraph_idx++) { + const SubGraph* subgraph = model->subgraphs()->Get(subgraph_idx); + TFLITE_DCHECK(subgraph != nullptr); + + uint32_t operators_size = NumSubgraphOperators(subgraph); + + // Initialize NodeAndRegistrations for the subgraph. + NodeAndRegistration* output = reinterpret_cast( + persistent_buffer_allocator_->AllocatePersistentBuffer( + sizeof(NodeAndRegistration) * operators_size, + alignof(NodeAndRegistration))); + if (output == nullptr) { + MicroPrintf("Failed to allocate memory for node_and_registrations."); + return kTfLiteError; + } + subgraph_allocations[subgraph_idx].node_and_registrations = output; + } + return kTfLiteOk; +} + +TfLiteTensor* MicroAllocator::AllocatePersistentTfLiteTensor( + const Model* model, const SubgraphAllocations* subgraph_allocations, + int tensor_index, int subgraph_index) { + const SubGraph* subgraph = model->subgraphs()->Get(subgraph_index); + TFLITE_DCHECK(subgraph != nullptr); + + // This value is allocated from persistent arena space. It is guaranteed to be + // around for the lifetime of the application. + TfLiteTensor* tensor = AllocatePersistentTfLiteTensorInternal(); + + if (tensor == nullptr) { + MicroPrintf("Failed to allocate memory for persistent TfLiteTensor"); + return nullptr; + } + + // Populate any fields from the flatbuffer, since this TfLiteTensor struct is + // allocated in the persistent section of the arena, ensure that additional + // allocations also take place in that section of the arena. + if (PopulateTfLiteTensorFromFlatbuffer( + model, tensor, tensor_index, subgraph_index, + /*allocate_temp=*/false) != kTfLiteOk) { + MicroPrintf( + "Failed to populate a persistent TfLiteTensor struct " + "from flatbuffer data!"); + return nullptr; + } + + if (subgraph_allocations != nullptr) { + // Tensor buffers that are allocated at runtime (e.g. non-weight buffers) + // and not located in the flatbuffer are stored on the pre-allocated list of + // TfLiteEvalTensors structs. These structs are the source of truth, simply + // point the corresponding buffer to the new TfLiteTensor data value. + tensor->data.data = + subgraph_allocations[subgraph_index].tensors[tensor_index].data.data; + // TfLiteEvalTensor structs must also be the source of truth for the + // TfLiteTensor dims. + tensor->dims = + subgraph_allocations[subgraph_index].tensors[tensor_index].dims; + } + return tensor; +} + +void MicroAllocator::DeallocateTempTfLiteTensor(TfLiteTensor* tensor) { + TFLITE_DCHECK(tensor != nullptr); + + if (tensor->quantization.type == kTfLiteAffineQuantization) { + TFLITE_DCHECK(tensor->quantization.params != nullptr); + TfLiteAffineQuantization* quantization = + reinterpret_cast( + tensor->quantization.params); + + non_persistent_buffer_allocator_->DeallocateTemp( + reinterpret_cast(quantization->zero_point)); + non_persistent_buffer_allocator_->DeallocateTemp( + reinterpret_cast(quantization)); + } + + // Clear the data in case someone still access tensor arena by mistake + tensor->quantization.type = kTfLiteNoQuantization; + tensor->quantization.params = nullptr; + tensor->data.data = nullptr; + tensor->dims = nullptr; + non_persistent_buffer_allocator_->DeallocateTemp( + reinterpret_cast(tensor)); +} + +TfLiteTensor* MicroAllocator::AllocateTempTfLiteTensor( + const Model* model, const SubgraphAllocations* subgraph_allocations, + int tensor_index, int subgraph_index) { + const SubGraph* subgraph = model->subgraphs()->Get(subgraph_index); + TFLITE_DCHECK(subgraph != nullptr); + + // This value is allocated from temporary arena space. It is guaranteed to be + // around for at least the scope of the calling function. Since this struct + // allocation takes place in temp space, no need to own or cleanup. + TfLiteTensor* tensor = reinterpret_cast( + non_persistent_buffer_allocator_->AllocateTemp(sizeof(TfLiteTensor), + alignof(TfLiteTensor))); + + // Populate any fields from the flatbuffer, since this TfLiteTensor struct is + // allocated in the temp section of the arena, ensure that additional + // allocations also take place in that section of the arena. + if (PopulateTfLiteTensorFromFlatbuffer(model, tensor, tensor_index, + subgraph_index, + /*allocate_temp=*/true) != kTfLiteOk) { + MicroPrintf( + "Failed to populate a temp TfLiteTensor struct from flatbuffer data!"); + return nullptr; + } + + if (subgraph_allocations != nullptr) { + // Tensor buffers that are allocated at runtime (e.g. non-weight buffers) + // and not located in the flatbuffer are stored on the pre-allocated list of + // TfLiteEvalTensors structs. These structs are the source of truth, simply + // point the corresponding buffer to the new TfLiteTensor data value. + tensor->data.data = + subgraph_allocations[subgraph_index].tensors[tensor_index].data.data; + // TfLiteEvalTensor structs must also be the source of truth for the + // TfLiteTensor dims. + tensor->dims = + subgraph_allocations[subgraph_index].tensors[tensor_index].dims; + } + return tensor; +} + +uint8_t* MicroAllocator::AllocateTempBuffer(size_t size, size_t alignment) { + return non_persistent_buffer_allocator_->AllocateTemp(size, alignment); +} + +void MicroAllocator::DeallocateTempBuffer(uint8_t* buffer) { + non_persistent_buffer_allocator_->DeallocateTemp(buffer); +} + +TfLiteStatus MicroAllocator::ResetTempAllocations() { + return non_persistent_buffer_allocator_->ResetTempAllocations(); +} + +bool MicroAllocator::IsAllTempDeallocated() { + return non_persistent_buffer_allocator_->IsAllTempDeallocated(); +} + +TfLiteStatus MicroAllocator::AllocateTfLiteEvalTensors( + const Model* model, SubgraphAllocations* subgraph_allocations) { + TFLITE_DCHECK(subgraph_allocations != nullptr); + + for (size_t subgraph_idx = 0; subgraph_idx < model->subgraphs()->size(); + subgraph_idx++) { + const SubGraph* subgraph = model->subgraphs()->Get(subgraph_idx); + TFLITE_DCHECK(subgraph != nullptr); + + size_t alloc_count = subgraph->tensors()->size(); + TfLiteEvalTensor* tensors = reinterpret_cast( + persistent_buffer_allocator_->AllocatePersistentBuffer( + sizeof(TfLiteEvalTensor) * alloc_count, alignof(TfLiteEvalTensor))); + if (tensors == nullptr) { + MicroPrintf( + "Failed to allocate memory for context->eval_tensors, " + "%d bytes required", + sizeof(TfLiteEvalTensor) * alloc_count); + return kTfLiteError; + } + + for (size_t i = 0; i < alloc_count; ++i) { + TfLiteStatus status = internal::InitializeTfLiteEvalTensorFromFlatbuffer( + *subgraph->tensors()->Get(i), model->buffers(), &tensors[i]); + if (status != kTfLiteOk) { + MicroPrintf("Failed to initialize tensor %d", i); + return kTfLiteError; + } + } + subgraph_allocations[subgraph_idx].tensors = tensors; + } + return kTfLiteOk; +} + +TfLiteStatus MicroAllocator::AllocateVariables( + const SubGraph* subgraph, TfLiteEvalTensor* eval_tensors, + const int32_t* offline_planner_offsets) { + for (size_t i = 0; i < subgraph->tensors()->size(); ++i) { + auto* tensor = subgraph->tensors()->Get(i); + if (tensor->is_variable()) { + if (offline_planner_offsets == nullptr || + offline_planner_offsets[i] == kOnlinePlannedBuffer) { + size_t buffer_size; + TF_LITE_ENSURE_STATUS( + TfLiteEvalTensorByteLength(&eval_tensors[i], &buffer_size)); + + eval_tensors[i].data.data = + persistent_buffer_allocator_->AllocatePersistentBuffer( + buffer_size, MicroArenaBufferAlignment()); + + if (eval_tensors[i].data.data == nullptr) { + MicroPrintf("Failed to allocate variable tensor of size %d", + buffer_size); + return kTfLiteError; + } + } + } + } + return kTfLiteOk; +} + +TfLiteTensor* MicroAllocator::AllocatePersistentTfLiteTensorInternal() { + return reinterpret_cast( + persistent_buffer_allocator_->AllocatePersistentBuffer( + sizeof(TfLiteTensor), alignof(TfLiteTensor))); +} + +TfLiteStatus MicroAllocator::PopulateTfLiteTensorFromFlatbuffer( + const Model* model, TfLiteTensor* tensor, int tensor_index, + int subgraph_idx, bool allocate_temp) { + // TODO(b/162311891): This method serves as a stub to ensure quantized + // allocations in the tail can be recorded. Once the interpreter has APIs for + // accessing buffers on TfLiteEvalTensor this method can be dropped. + return internal::InitializeTfLiteTensorFromFlatbuffer( + persistent_buffer_allocator_, non_persistent_buffer_allocator_, + allocate_temp, + *model->subgraphs()->Get(subgraph_idx)->tensors()->Get(tensor_index), + model->buffers(), tensor); +} + +TfLiteStatus MicroAllocator::CommitStaticMemoryPlan( + const Model* model, SubgraphAllocations* allocations, + ScratchBufferHandle* scratch_buffer_handles) { + size_t head_usage = 0; + // Create static memory plan + // 1. Calculate AllocationInfo to know the lifetime of each tensor/buffer. + // 2. Add them into the planner (such as the GreedyMemoryPlanner). + // 3. Static memory planning using the planner. + // 4. Set tensor/buffer pointers based on the offsets from the previous step. + // + // Note that AllocationInfo is only needed for creating the plan. It will be + // allocated from the temp section and cleaned up at the bottom of this + // function. + + // Use the AllocationInfoBuilder class to help determine where buffers are + // used in the subgraph. + AllocationInfoBuilder builder(model, non_persistent_buffer_allocator_); + TF_LITE_ENSURE_STATUS( + builder.CreateAllocationInfo(scratch_buffer_request_count_)); + + const int32_t* offline_planner_offsets = nullptr; + TF_LITE_ENSURE_STATUS( + builder.GetOfflinePlannedOffsets(&offline_planner_offsets)); + + // We allocate buffers for variable tensors here since the offline planner + // offsets are conviently available here. + for (size_t subgraph_idx = 0; subgraph_idx < model->subgraphs()->size(); + subgraph_idx++) { + const SubGraph* subgraph = model->subgraphs()->Get(subgraph_idx); + TFLITE_DCHECK(subgraph != nullptr); + TF_LITE_ENSURE_STATUS(AllocateVariables( + subgraph, allocations[subgraph_idx].tensors, offline_planner_offsets)); + } + + TF_LITE_ENSURE_STATUS( + builder.InitializeAllocationInfo(offline_planner_offsets, allocations)); + + internal::ScratchBufferRequest* scratch_buffer_requests = + GetScratchBufferRequests(); + TF_LITE_ENSURE_STATUS(builder.MarkAllocationLifetimes( + 0, scratch_buffer_requests, scratch_buffer_handles, allocations)); + int allocation_info_count = builder.AllocationCount(); + AllocationInfo* allocation_info = builder.Finish(); + + // Remaining arena size that memory planner can use for calculating offsets. + size_t remaining_arena_size = + non_persistent_buffer_allocator_->GetAvailableMemory( + MicroArenaBufferAlignment()); + uint8_t* planner_arena = non_persistent_buffer_allocator_->AllocateTemp( + remaining_arena_size, MicroArenaBufferAlignment()); + + if (planner_arena == nullptr) { + return kTfLiteError; + } + + memory_planner_->Init(planner_arena, remaining_arena_size); + TF_LITE_ENSURE_STATUS( + CreatePlan(memory_planner_, allocation_info, allocation_info_count)); + + // Commit the plan. + TF_LITE_ENSURE_STATUS( + CommitPlan(memory_planner_, + non_persistent_buffer_allocator_->GetOverlayMemoryAddress(), + allocation_info, allocation_info_count)); + + // Reset all temp allocations used above: + builder.FreeAllocationInfo(); + non_persistent_buffer_allocator_->DeallocateTemp(planner_arena); + TF_LITE_ENSURE_STATUS( + non_persistent_buffer_allocator_->ResetTempAllocations()); + TF_LITE_ENSURE_STATUS( + non_persistent_buffer_allocator_->DeallocateResizableBuffer( + scratch_buffer_head_)); + +#ifdef TF_LITE_SHOW_MEMORY_USE + memory_planner_->PrintMemoryPlan(); +#endif + head_usage = memory_planner_->GetMaximumMemorySize(); + + // The head is used to store memory plans for one model at a time during the + // model preparation stage, and is re-purposed to store scratch buffer handles + // during model invocation. The head must be as large as the greater of the + // largest model memory plan's size and the total space required for all + // scratch buffer handles. + if (max_head_buffer_usage_ < head_usage) { + max_head_buffer_usage_ = head_usage; + } + + // The head is used for storing scratch buffer allocations before finalizing a + // memory plan in this function. Ensure that the head is set to the largest + // memory plan sent through the allocator: + TF_LITE_ENSURE_STATUS( + non_persistent_buffer_allocator_->ReserveNonPersistentOverlayMemory( + max_head_buffer_usage_, MicroArenaBufferAlignment())); + return kTfLiteOk; +} + +TfLiteStatus MicroAllocator::AllocateScratchBufferHandles( + ScratchBufferHandle** scratch_buffer_handles, size_t handle_count) { + TFLITE_DCHECK(scratch_buffer_handles != nullptr); + + if (scratch_buffer_request_count_ == 0) { + // No scratch buffer requests were requested during model allocation. + return kTfLiteOk; + } + + // Allocate a consecutive block of memory store the scratch buffer handles. + // This alignment ensures quick lookup during inference time for the model: + *scratch_buffer_handles = reinterpret_cast( + persistent_buffer_allocator_->AllocatePersistentBuffer( + sizeof(ScratchBufferHandle) * handle_count, + alignof(ScratchBufferHandle))); + + return kTfLiteOk; +} + +TfLiteStatus MicroAllocator::InitScratchBufferData() { + // A model is preparing to allocate resources, ensure that scratch buffer + // request counter is cleared: + scratch_buffer_request_count_ = 0; + + // All requests will be stored in the head section. Each kernel is allowed at + // most kMaxScratchBuffersPerOp requests. Adjust the head to reserve at most + // that many requests to begin: + scratch_buffer_head_ = + non_persistent_buffer_allocator_->AllocateResizableBuffer( + sizeof(internal::ScratchBufferRequest) * kMaxScratchBuffersPerOp, + alignof(internal::ScratchBufferRequest)); + if (scratch_buffer_head_ == nullptr) { + return kTfLiteError; + } + + return kTfLiteOk; +} + +internal::ScratchBufferRequest* MicroAllocator::GetScratchBufferRequests() { + return reinterpret_cast(AlignPointerUp( + scratch_buffer_head_, alignof(internal::ScratchBufferRequest))); +} + +TfLiteBridgeBuiltinDataAllocator* MicroAllocator::GetBuiltinDataAllocator() { + return builtin_data_allocator_; +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/micro_allocator.h b/tensorflow/lite/micro/micro_allocator.h new file mode 100644 index 0000000..3532577 --- /dev/null +++ b/tensorflow/lite/micro/micro_allocator.h @@ -0,0 +1,332 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_MICRO_MICRO_ALLOCATOR_H_ +#define TENSORFLOW_LITE_MICRO_MICRO_ALLOCATOR_H_ + +#include +#include + +#include "tensorflow/lite/micro/arena_allocator/single_arena_buffer_allocator.h" +#include "tensorflow/lite/micro/compatibility.h" +#include "tensorflow/lite/micro/flatbuffer_utils.h" +#include "tensorflow/lite/micro/memory_planner/micro_memory_planner.h" +#include "tensorflow/lite/micro/micro_common.h" +#include "tensorflow/lite/micro/tflite_bridge/flatbuffer_conversions_bridge.h" +#include "tensorflow/lite/schema/schema_generated.h" + +namespace tflite { + +// TODO(b/199402574): rename to tflite_internal or just remove internal +// namespace. +namespace internal { + +// Sets up all of the data structure members for a TfLiteTensor based on the +// contents of a serialized tensor in the flatbuffer. +// TODO(b/162311891): Drop this method when the interpreter has an API for +// returning buffers on TfLiteEvalTensor. +TfLiteStatus InitializeTfLiteTensorFromFlatbuffer( + IPersistentBufferAllocator* persistent_buffer_allocator, + INonPersistentBufferAllocator* non_persistent_buffer_allocator, + bool allocate_temp, const tflite::Tensor& flatbuffer_tensor, + const flatbuffers::Vector>* buffers, + TfLiteTensor* result); + +// Holds placeholder information for a scratch buffer request from a kernel. +// This struct is only used during the model prepare stage. Each request from a +// kernel is stored in the head section. During the prepare stage, the head +// section will at least hold kMaxScratchBuffersPerOp number of requests plus +// any requests from previous kernel requests. +// +// When the memory plan is finalized, these structs are no longer used in favor +// of a sequential, array of ScratchBufferHandle allocations in the tail +// section. These allocations are indexed by the request API defined in the +// TfLiteContext struct. +struct ScratchBufferRequest { + // Number of bytes required by the buffer. The actual allocated size might be + // greater than `bytes` due to buffer alignment. + size_t bytes; + // Node where the buffer is allocated for. This provides useful information to + // determine the lifetime of the buffer. In AllocationInfo, this buffer will + // have `before` = node_idx and `after` = node_idx. + int node_idx; + int subgraph_idx; +}; + +} // namespace internal + +struct NodeAndRegistration { + TfLiteNode node; + const TFLMRegistration* registration; +}; + +// Holds a pointer to a buffer for a scratch buffer requested by a kernel during +// the model prepare stage. This struct is allocated in-place and allows for +// quick pointer-indexed lookup for speed during model inference. +struct ScratchBufferHandle { + // Pointer to location of the scratch buffer: + uint8_t* data; +}; + +// Stores all per-subgraph allocations. This includes the node and registration +// array, and tensor list for each subgraph. +struct SubgraphAllocations { + NodeAndRegistration* node_and_registrations; + TfLiteEvalTensor* tensors; +}; + +// Allocator responsible for allocating memory for all intermediate tensors +// necessary to invoke a model. +// +// The lifetime of the model, tensor arena and error reporter must be at +// least as long as that of the allocator object, since the allocator needs +// them to be accessible during its entire lifetime. +// +// The MicroAllocator simply plans out additional allocations that are required +// to standup a model for inference in TF Micro. This class currently relies on +// an additional allocator - SingleArenaBufferAllocator - for all allocations +// from an arena. These allocations are divided into head (non-persistent) and +// tail (persistent) regions: +// +// Memory layout to help understand how it works +// This information could change in the future version. +// ************** .memory_allocator->GetBuffer() +// Tensors/Scratch buffers (head) +// ************** .head_watermark +// unused memory +// ************** .memory_allocator->GetBuffer() + ->GetMaxBufferSize() +// - ->GetDataSize() +// persistent area (tail) +// ************** .memory_allocator->GetBuffer() + ->GetMaxBufferSize() +class MicroAllocator { + public: + // Creates a MicroAllocator instance from a given tensor arena. This arena + // will be managed by the created instance. The GreedyMemoryPlanner will + // by default be used and created on the arena. + // Note: Please use alignas(16) to make sure tensor_arena is 16 + // bytes aligned, otherwise some head room will be wasted. + // TODO(b/157615197): Cleanup constructor + factory usage. + static MicroAllocator* Create(uint8_t* tensor_arena, size_t arena_size); + + // Creates a MicroAllocator instance from a given tensor arena and a given + // MemoryPlanner. This arena will be managed by the created instance. Note: + // Please use alignas(16) to make sure tensor_arena is 16 bytes + // aligned, otherwise some head room will be wasted. + static MicroAllocator* Create(uint8_t* tensor_arena, size_t arena_size, + MicroMemoryPlanner* memory_planner); + + // Creates a MicroAllocator instance using the provided + // SingleArenaBufferAllocator instance and the MemoryPlanner. This allocator + // instance will use the SingleArenaBufferAllocator instance to manage + // allocations internally. + static MicroAllocator* Create(SingleArenaBufferAllocator* memory_allocator, + MicroMemoryPlanner* memory_planner); + + // Creates a MicroAllocator instance using the provided + // SingleArenaBufferAllocator instance and the MemoryPlanner. This allocator + // instance will use the SingleArenaBufferAllocator instance to manage + // allocations internally. + static MicroAllocator* Create(uint8_t* persistent_tensor_arena, + size_t persistent_arena_size, + uint8_t* non_persistent_tensor_arena, + size_t non_persistent_arena_size); + + // Returns the fixed amount of memory overhead of MicroAllocator. + static size_t GetDefaultTailUsage(bool is_memory_planner_given); + + // Allocates internal resources required for model inference for each subgraph + // from the arena. + // + // This method will run through the flatbuffer data supplied in the model to + // properly allocate tensor, node, and op registration data. This method is + // expected to be followed with a call to FinishModelAllocation() Returns a + // pointer to an array of SubgraphAllocations (also stored in the tail of the + // arena) where each index corresponds to a different subgraph in the model. + // Return value is nullptr if the allocations failed. + SubgraphAllocations* StartModelAllocation(const Model* model); + + // Finish allocating internal resources required for model inference. + // + // -Plan the memory for activation tensors and scratch buffers. + // -Update eval tensors for each subgraph based on planned offsets. + // -Allocate scratch buffer handles array and update based on planned offsets. + // + // This method should be called after assigning model resources + // in StartModelAllocation(). The subgraph_allocations pointer should be the + // value passed into this class during StartModelAllocation(). Scratch buffer + // handles are stored in the out-param `scratch_buffer_handles` array which is + // allocated in this method. This value will be used in `GetScratchBuffer` + // call to retrieve scratch buffers. + TfLiteStatus FinishModelAllocation( + const Model* model, SubgraphAllocations* subgraph_allocations, + ScratchBufferHandle** scratch_buffer_handles); + + // Allocates a TfLiteTensor struct and populates the returned value with + // properties from the model flatbuffer. This struct is allocated from + // persistent arena memory is only guaranteed for the lifetime of the + // application. The eval_tensors pointer should be the value passed into this + // class during StartModelAllocation() and contains the source-of-truth for + // buffers. + virtual TfLiteTensor* AllocatePersistentTfLiteTensor( + const Model* model, const SubgraphAllocations* subgraph_allocations, + int tensor_index, int subgraph_index); + + // Allocates a TfLiteTensor struct and populates the returned value with + // properties from the model flatbuffer. This struct is allocated from + // temporary arena memory is only guaranteed until a call is made to + // ResetTempAllocations(). Subgraph_allocations contains the array of + // TfLiteEvalTensors. If the newly allocated temp at the specified subgraph + // and tensor index is already present int the TfLiteEvalTensor array, its + // data buffer will be re-used. + virtual TfLiteTensor* AllocateTempTfLiteTensor( + const Model* model, const SubgraphAllocations* subgraph_allocations, + int tensor_index, int subgraph_index); + + virtual void DeallocateTempTfLiteTensor(TfLiteTensor*); + + // Returns a pointer to a buffer from the temporary arena memory and is only + // guaranteed until a call is made to ResetTempAllocations(). + virtual uint8_t* AllocateTempBuffer(size_t size, size_t alignment); + + // Signals that the temporary buffer no longer needed. + virtual void DeallocateTempBuffer(uint8_t* buffer); + + // Resets all temporary allocations. This method should be called after a + // chain of temp allocations (e.g. chain of TfLiteTensor objects via + // AllocateTfLiteTensor()). + virtual TfLiteStatus ResetTempAllocations(); + + // Returns true if all temporary buffers including temp TfLiteTensor are + // already deallocated. + virtual bool IsAllTempDeallocated(); + + // Allocates persistent buffer which has the same life time as the allocator. + // The memory is immediately available and is allocated from the tail of the + // arena. + virtual void* AllocatePersistentBuffer(size_t bytes); + + // Register a scratch buffer of size `bytes` for Node with `node_id`. + // This method only requests a buffer with a given size to be used after a + // model has finished allocation via FinishModelAllocation(). All requested + // buffers will be accessible by the out-param in that method. + TfLiteStatus RequestScratchBufferInArena(size_t bytes, int subgraph_idx, + int* buffer_idx); + + // Finish allocating a specific NodeAndRegistration prepare block (kernel + // entry for a model) with a given node ID. This call ensures that any scratch + // buffer requests and temporary allocations are handled and ready for the + // next node prepare block. + TfLiteStatus FinishPrepareNodeAllocations(int node_id); + + // Returns the arena usage in bytes, only available after + // `FinishModelAllocation`. Otherwise, it will return 0. + size_t used_bytes() const; + + TfLiteBridgeBuiltinDataAllocator* GetBuiltinDataAllocator(); + + protected: + MicroAllocator(SingleArenaBufferAllocator* memory_allocator, + MicroMemoryPlanner* memory_planner); + MicroAllocator(IPersistentBufferAllocator* persistent_buffer_allocator, + INonPersistentBufferAllocator* non_persistent_buffer_allocator, + MicroMemoryPlanner* memory_planner); + virtual ~MicroAllocator(); + + // Allocates an array in the arena to hold pointers to the node and + // registration pointers required to represent the inference graph of the + // model. + virtual TfLiteStatus AllocateNodeAndRegistrations( + const Model* model, SubgraphAllocations* subgraph_allocations); + + // Allocates the list of persistent TfLiteEvalTensors that are used for the + // "eval" phase of model inference. These structs will be the source of truth + // for all tensor buffers. + virtual TfLiteStatus AllocateTfLiteEvalTensors( + const Model* model, SubgraphAllocations* subgraph_allocations); + + // Allocates persistent tensor buffers for variable tensors in the subgraph. + // Online and offline variable tensors are handled differently hence the + // offline_planner_offsets parameter is needed. + virtual TfLiteStatus AllocateVariables( + const SubGraph* subgraph, TfLiteEvalTensor* eval_tensors, + const int32_t* offline_planner_offsets); + + // Allocate and return a persistent TfLiteTensor. + // TODO(b/162311891): Drop this method when the interpreter has an API for + // accessing TfLiteEvalTensor structs. + virtual TfLiteTensor* AllocatePersistentTfLiteTensorInternal(); + + // Populates a TfLiteTensor struct with data from the model flatbuffer. Any + // quantization data is allocated from either the tail (persistent) or temp + // sections of the arena based on the allocation flag. + virtual TfLiteStatus PopulateTfLiteTensorFromFlatbuffer(const Model* model, + TfLiteTensor* tensor, + int tensor_index, + int subgraph_idx, + bool allocate_temp); + + private: + // Commits a memory plan for all non-persistent buffer allocations in the + // 'head' section of the memory arena. The eval_tensors pointer is the list of + // pre-allocated TfLiteEvalTensor structs that will point to the buffers that + // will be allocated into the head section in this function call. The + // scratch_buffer_handles pointer is the array of pre-allocated + // ScratchBufferHandle structs that will point to allocated buffers also in + // the head section. + virtual TfLiteStatus CommitStaticMemoryPlan( + const Model* model, SubgraphAllocations* allocations, + ScratchBufferHandle* scratch_buffer_handles); + + // Allocates an array of ScratchBufferHandle structs in the tail section for a + // given number of handles. + virtual TfLiteStatus AllocateScratchBufferHandles( + ScratchBufferHandle** scratch_buffer_handles, size_t handle_count); + + // Clears all internal scratch buffer request counts and resets the head to + // prepare for kernels to request scratch buffer data when a model is + // preparing. + TfLiteStatus InitScratchBufferData(); + + // Returns the pointer for the array of ScratchBufferRequest allocations in + // the head section. + internal::ScratchBufferRequest* GetScratchBufferRequests(); + + // A simple memory allocator that always allocate from the arena tail or head. + INonPersistentBufferAllocator* non_persistent_buffer_allocator_; + IPersistentBufferAllocator* persistent_buffer_allocator_; + + // Allocator used to allocate persistent builtin data. + TfLiteBridgeBuiltinDataAllocator* builtin_data_allocator_; + + // Activation buffer memory planner. + MicroMemoryPlanner* memory_planner_; + + bool model_is_allocating_; + + // Holds the number of ScratchBufferRequest instances stored in the head + // section when a model is allocating. + size_t scratch_buffer_request_count_ = 0; + + // Holds ScratchBufferRequest when a model is allocating + uint8_t* scratch_buffer_head_ = nullptr; + + // Holds the byte length of the memory plan with the largest head usage. Used + // to ensure that multi-tenant allocations can share the head for buffers. + size_t max_head_buffer_usage_ = 0; + + TF_LITE_REMOVE_VIRTUAL_DELETE +}; + +} // namespace tflite +#endif // TENSORFLOW_LITE_MICRO_MICRO_ALLOCATOR_H_ diff --git a/tensorflow/lite/micro/micro_allocator_test.cc b/tensorflow/lite/micro/micro_allocator_test.cc new file mode 100644 index 0000000..0c4878d --- /dev/null +++ b/tensorflow/lite/micro/micro_allocator_test.cc @@ -0,0 +1,1362 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/micro_allocator.h" + +#include + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/arena_allocator/single_arena_buffer_allocator.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/memory_helpers.h" +#include "tensorflow/lite/micro/memory_planner/greedy_memory_planner.h" +#include "tensorflow/lite/micro/memory_planner/non_persistent_buffer_planner_shim.h" +#include "tensorflow/lite/micro/micro_arena_constants.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" +#include "tensorflow/lite/micro/testing/test_conv_model.h" + +namespace tflite { +namespace testing { +namespace { + +constexpr int t0 = 0; +constexpr int t1 = 1; +constexpr int t2 = 2; +constexpr int t3 = 3; +constexpr int t4 = 4; +constexpr int t5 = 5; + +void VerifyMockTfLiteTensor(TfLiteTensor* tensor, bool is_variable = false) { + TF_LITE_MICRO_EXPECT_EQ(kTfLiteInt32, tensor->type); + TF_LITE_MICRO_EXPECT_EQ(1, tensor->dims->size); + TF_LITE_MICRO_EXPECT_EQ(1, tensor->dims->data[0]); + TF_LITE_MICRO_EXPECT_EQ(is_variable, tensor->is_variable); + TF_LITE_MICRO_EXPECT_EQ(static_cast(4), tensor->bytes); + TF_LITE_MICRO_EXPECT(nullptr != tensor->data.raw); + TF_LITE_MICRO_EXPECT_EQ(static_cast(0), + (reinterpret_cast(tensor->data.raw) % + MicroArenaBufferAlignment())); +} + +// TODO(b/203663932): remove the usage of uint8 weight, which is deprecated. +void VerifyMockWeightTfLiteTensor(TfLiteTensor* tensor) { + TF_LITE_MICRO_EXPECT_EQ(kTfLiteInt8, tensor->type); + TF_LITE_MICRO_EXPECT_EQ(1, tensor->dims->size); + TF_LITE_MICRO_EXPECT_EQ(1, tensor->dims->data[0]); + TF_LITE_MICRO_EXPECT_EQ(static_cast(1), tensor->bytes); + TF_LITE_MICRO_EXPECT(nullptr != tensor->data.raw); +} + +void VerifyMockTfLiteEvalTensor(TfLiteEvalTensor* tensor) { + TF_LITE_MICRO_EXPECT_EQ(kTfLiteInt32, tensor->type); + TF_LITE_MICRO_EXPECT_EQ(1, tensor->dims->size); + TF_LITE_MICRO_EXPECT_EQ(1, tensor->dims->data[0]); + size_t buffer_size; + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, tflite::TfLiteEvalTensorByteLength(tensor, &buffer_size)); + TF_LITE_MICRO_EXPECT_EQ(static_cast(4), buffer_size); + TF_LITE_MICRO_EXPECT(nullptr != tensor->data.raw); + TF_LITE_MICRO_EXPECT_EQ(static_cast(0), + (reinterpret_cast(tensor->data.raw) % + MicroArenaBufferAlignment())); +} + +void VerifyMockWeightTfLiteEvalTensor(TfLiteEvalTensor* tensor) { + TF_LITE_MICRO_EXPECT_EQ(kTfLiteInt8, tensor->type); + TF_LITE_MICRO_EXPECT_EQ(1, tensor->dims->size); + TF_LITE_MICRO_EXPECT_EQ(1, tensor->dims->data[0]); + size_t buffer_size; + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, tflite::TfLiteEvalTensorByteLength(tensor, &buffer_size)); + TF_LITE_MICRO_EXPECT_EQ(static_cast(1), buffer_size); + TF_LITE_MICRO_EXPECT(nullptr != tensor->data.raw); +} + +// TODO(b/203664378): rename to reflect the function does more than just verify. +void AllocateAndVerifyMockTensor(const Model* model, MicroAllocator* allocator, + SubgraphAllocations* subgraph_allocations, + int tensor_idx, bool is_variable = false) { + for (size_t subgraph_idx = 0; subgraph_idx < model->subgraphs()->size(); + subgraph_idx++) { + VerifyMockTfLiteTensor( + allocator->AllocatePersistentTfLiteTensor(model, subgraph_allocations, + tensor_idx, subgraph_idx), + is_variable); + VerifyMockTfLiteEvalTensor( + &subgraph_allocations[subgraph_idx].tensors[tensor_idx]); + } +} + +void AllocateAndVerifyMockWeightTensor( + const Model* model, MicroAllocator* allocator, + SubgraphAllocations* subgraph_allocations, int tensor_idx) { + for (size_t subgraph_idx = 0; subgraph_idx < model->subgraphs()->size(); + subgraph_idx++) { + VerifyMockWeightTfLiteTensor(allocator->AllocatePersistentTfLiteTensor( + model, subgraph_allocations, tensor_idx, subgraph_idx)); + VerifyMockWeightTfLiteEvalTensor( + &subgraph_allocations[subgraph_idx].tensors[tensor_idx]); + } +} + +void EnsureUniqueVariableTensorBuffer(const Model* model, + TfLiteEvalTensor* eval_tensors, + const int variable_tensor_idx) { + for (size_t i = 0; i < GetModelTensorCount(model); i++) { + if (i != static_cast(variable_tensor_idx)) { + TF_LITE_MICRO_EXPECT_NE(eval_tensors[variable_tensor_idx].data.raw, + eval_tensors[i].data.raw); + } + } +} + +void VerifyRegistrationAndNodeAllocation( + SubgraphAllocations* subgraph_allocations, size_t count, + int num_subgraphs) { + for (int subgraph_idx = 0; subgraph_idx < num_subgraphs; subgraph_idx++) { + for (size_t i = 0; i < count; i++) { + TF_LITE_MICRO_EXPECT(&subgraph_allocations[subgraph_idx] + .node_and_registrations[i] + .registration); + } + } +} + +size_t GetArenaUsedBytesBySimpleMockModel(bool is_memory_planner_injected) { + const int tensor_count = 4; + const int node_count = 2; + size_t eval_tensor_size = AlignSizeUp(tensor_count); + size_t node_registration_size = AlignSizeUp(node_count); + + const int activation_tensor_count = 3; + size_t activation_tensor_buffer = + activation_tensor_count * AlignSizeUp(1, MicroArenaBufferAlignment()); + + size_t default_tail_usage = + MicroAllocator::GetDefaultTailUsage(/*is_memory_plan_given=*/false); + if (is_memory_planner_injected) { + default_tail_usage = + MicroAllocator::GetDefaultTailUsage(/*is_memory_plan_given=*/true); + } + + return default_tail_usage + eval_tensor_size + node_registration_size + + activation_tensor_buffer; +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(TestInitializeRuntimeTensor) { + constexpr size_t arena_size = 1024; + uint8_t arena[arena_size]; + tflite::SingleArenaBufferAllocator* simple_allocator = + tflite::SingleArenaBufferAllocator::Create(arena, arena_size); + + const tflite::Tensor* tensor = tflite::testing::Create1dFlatbufferTensor(100); + const flatbuffers::Vector>* buffers = + tflite::testing::CreateFlatbufferBuffers(); + + TfLiteTensor allocated_tensor; + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + tflite::internal::InitializeTfLiteTensorFromFlatbuffer( + simple_allocator, simple_allocator, /*allocate_temp=*/false, *tensor, + buffers, &allocated_tensor)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteInt32, allocated_tensor.type); + TF_LITE_MICRO_EXPECT_EQ(1, allocated_tensor.dims->size); + TF_LITE_MICRO_EXPECT_EQ(100, allocated_tensor.dims->data[0]); + TF_LITE_MICRO_EXPECT_EQ(static_cast(400), allocated_tensor.bytes); + TF_LITE_MICRO_EXPECT(nullptr == allocated_tensor.data.i32); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteArenaRw, allocated_tensor.allocation_type); + + simple_allocator->~SingleArenaBufferAllocator(); +} + +// TODO(b/162311891): Drop this test when InitializeTfLiteTensorFromFlatbuffer() +// always allocates from temp (interpreter returns buffers from +// TfLiteEvalTensor): +TF_LITE_MICRO_TEST(TestInitializeTempRuntimeTensor) { + constexpr size_t arena_size = 1024; + uint8_t arena[arena_size]; + tflite::SingleArenaBufferAllocator* simple_allocator = + tflite::SingleArenaBufferAllocator::Create(arena, arena_size); + + const tflite::Tensor* tensor = tflite::testing::Create1dFlatbufferTensor(100); + const flatbuffers::Vector>* buffers = + tflite::testing::CreateFlatbufferBuffers(); + + TfLiteTensor allocated_temp_tensor; + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, tflite::internal::InitializeTfLiteTensorFromFlatbuffer( + simple_allocator, simple_allocator, /*allocate_temp=*/true, + *tensor, buffers, &allocated_temp_tensor)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteInt32, allocated_temp_tensor.type); + TF_LITE_MICRO_EXPECT_EQ(1, allocated_temp_tensor.dims->size); + TF_LITE_MICRO_EXPECT_EQ(100, allocated_temp_tensor.dims->data[0]); + TF_LITE_MICRO_EXPECT_EQ(static_cast(400), + allocated_temp_tensor.bytes); + TF_LITE_MICRO_EXPECT(nullptr == allocated_temp_tensor.data.i32); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteArenaRw, + allocated_temp_tensor.allocation_type); + + simple_allocator->~SingleArenaBufferAllocator(); +} + +TF_LITE_MICRO_TEST(TestInitializeQuantizedTensor) { + constexpr size_t arena_size = 1024; + uint8_t arena[arena_size]; + tflite::SingleArenaBufferAllocator* simple_allocator = + tflite::SingleArenaBufferAllocator::Create(arena, arena_size); + + const tflite::Tensor* tensor = + tflite::testing::CreateQuantizedFlatbufferTensor(100); + const flatbuffers::Vector>* buffers = + tflite::testing::CreateFlatbufferBuffers(); + + TfLiteTensor allocated_tensor; + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + tflite::internal::InitializeTfLiteTensorFromFlatbuffer( + simple_allocator, simple_allocator, /*allocate_temp=*/false, *tensor, + buffers, &allocated_tensor)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteInt32, allocated_tensor.type); + TF_LITE_MICRO_EXPECT_EQ(1, allocated_tensor.dims->size); + TF_LITE_MICRO_EXPECT_EQ(100, allocated_tensor.dims->data[0]); + TF_LITE_MICRO_EXPECT_EQ(static_cast(400), allocated_tensor.bytes); + TF_LITE_MICRO_EXPECT(nullptr == allocated_tensor.data.i32); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteArenaRw, allocated_tensor.allocation_type); + + simple_allocator->~SingleArenaBufferAllocator(); +} + +TF_LITE_MICRO_TEST(TestMissingQuantization) { + constexpr size_t arena_size = 1024; + uint8_t arena[arena_size]; + tflite::SingleArenaBufferAllocator* simple_allocator = + tflite::SingleArenaBufferAllocator::Create(arena, arena_size); + + const tflite::Tensor* tensor = + tflite::testing::CreateMissingQuantizationFlatbufferTensor(100); + const flatbuffers::Vector>* buffers = + tflite::testing::CreateFlatbufferBuffers(); + + TfLiteTensor allocated_tensor; + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + tflite::internal::InitializeTfLiteTensorFromFlatbuffer( + simple_allocator, simple_allocator, /*allocate_temp=*/false, *tensor, + buffers, &allocated_tensor)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteInt32, allocated_tensor.type); + TF_LITE_MICRO_EXPECT_EQ(1, allocated_tensor.dims->size); + TF_LITE_MICRO_EXPECT_EQ(100, allocated_tensor.dims->data[0]); + TF_LITE_MICRO_EXPECT_EQ(static_cast(400), allocated_tensor.bytes); + TF_LITE_MICRO_EXPECT(nullptr == allocated_tensor.data.i32); +} + +TF_LITE_MICRO_TEST(TestFailsWhenModelStartsTwice) { + const tflite::Model* model = tflite::testing::GetSimpleMockModel(); + tflite::testing::TestingOpResolver op_resolver; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + tflite::testing::GetTestingOpResolver(op_resolver)); + constexpr size_t arena_size = 1024; + uint8_t arena[arena_size]; + tflite::MicroAllocator* allocator = + tflite::MicroAllocator::Create(arena, arena_size); + TF_LITE_MICRO_EXPECT(nullptr != allocator); + TF_LITE_MICRO_EXPECT(nullptr != allocator->StartModelAllocation(model)); + TF_LITE_MICRO_EXPECT(nullptr == allocator->StartModelAllocation(model)); +} + +TF_LITE_MICRO_TEST(TestFailsWithWrongSequence) { + const tflite::Model* model = tflite::testing::GetSimpleMockModel(); + tflite::ScratchBufferHandle* scratch_buffer_handles = nullptr; + tflite::testing::TestingOpResolver op_resolver; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + tflite::testing::GetTestingOpResolver(op_resolver)); + tflite::SubgraphAllocations* subgraph_allocations = nullptr; + constexpr size_t arena_size = 1024; + uint8_t arena[arena_size]; + tflite::MicroAllocator* allocator = + tflite::MicroAllocator::Create(arena, arena_size); + TF_LITE_MICRO_EXPECT(nullptr != allocator); + + // We can't finish allocation before it ever got started. + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteError, allocator->FinishModelAllocation( + model, subgraph_allocations, &scratch_buffer_handles)); + + // Start twice is not allowed. + TF_LITE_MICRO_EXPECT(nullptr != allocator->StartModelAllocation(model)); + TF_LITE_MICRO_EXPECT(nullptr == allocator->StartModelAllocation(model)); +} + +TF_LITE_MICRO_TEST(TestMockModelAllocation) { + const tflite::Model* model = tflite::testing::GetSimpleMockModel(); + tflite::ScratchBufferHandle* scratch_buffer_handles = nullptr; + tflite::testing::TestingOpResolver op_resolver; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + tflite::testing::GetTestingOpResolver(op_resolver)); + constexpr size_t arena_size = 1024 + 16; + uint8_t arena[arena_size]; + tflite::MicroAllocator* allocator = + tflite::MicroAllocator::Create(arena, arena_size); + TF_LITE_MICRO_EXPECT(nullptr != allocator); + tflite::SubgraphAllocations* subgraph_allocations = + allocator->StartModelAllocation(model); + TF_LITE_MICRO_EXPECT(nullptr != subgraph_allocations); + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, allocator->FinishModelAllocation(model, subgraph_allocations, + &scratch_buffer_handles)); + size_t expected_arena_used_bytes = + tflite::testing::GetArenaUsedBytesBySimpleMockModel( + /*is_memory_planner_injected=*/false); + TF_LITE_MICRO_EXPECT_EQ(allocator->used_bytes(), expected_arena_used_bytes); + + size_t model_tensor_size = tflite::testing::GetModelTensorCount(model); + TF_LITE_MICRO_EXPECT_EQ(static_cast(4), model_tensor_size); + + tflite::testing::AllocateAndVerifyMockTensor(model, allocator, + subgraph_allocations, 0); + tflite::testing::AllocateAndVerifyMockWeightTensor(model, allocator, + subgraph_allocations, 1); + tflite::testing::AllocateAndVerifyMockTensor(model, allocator, + subgraph_allocations, 2); + tflite::testing::AllocateAndVerifyMockTensor(model, allocator, + subgraph_allocations, 3); + + TfLiteEvalTensor* eval_tensors = subgraph_allocations[0].tensors; + TF_LITE_MICRO_EXPECT_NE(eval_tensors[1].data.raw, eval_tensors[0].data.raw); + TF_LITE_MICRO_EXPECT_NE(eval_tensors[2].data.raw, eval_tensors[0].data.raw); + TF_LITE_MICRO_EXPECT_NE(eval_tensors[1].data.raw, eval_tensors[2].data.raw); + TF_LITE_MICRO_EXPECT_NE(eval_tensors[3].data.raw, eval_tensors[0].data.raw); + TF_LITE_MICRO_EXPECT_NE(eval_tensors[3].data.raw, eval_tensors[1].data.raw); + TF_LITE_MICRO_EXPECT_NE(eval_tensors[3].data.raw, eval_tensors[2].data.raw); + + // SimpleMockModel has 2 operators: + tflite::testing::VerifyRegistrationAndNodeAllocation(subgraph_allocations, + /*count=*/2, + /*num_subgraphs=*/1); +} + +TF_LITE_MICRO_TEST(TestMockModelAllocationInTwoSeparateArenas) { + const tflite::Model* model = tflite::testing::GetSimpleMockModel(); + tflite::ScratchBufferHandle* scratch_buffer_handles = nullptr; + tflite::testing::TestingOpResolver op_resolver; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + tflite::testing::GetTestingOpResolver(op_resolver)); + constexpr size_t arena_size = 1024; + uint8_t persistent_arena[arena_size]; + uint8_t non_persistent_arena[arena_size]; + + tflite::MicroAllocator* allocator = tflite::MicroAllocator::Create( + persistent_arena, arena_size, non_persistent_arena, arena_size); + TF_LITE_MICRO_EXPECT(nullptr != allocator); + tflite::SubgraphAllocations* subgraph_allocations = + allocator->StartModelAllocation(model); + TF_LITE_MICRO_EXPECT(nullptr != subgraph_allocations); + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, allocator->FinishModelAllocation(model, subgraph_allocations, + &scratch_buffer_handles)); + + size_t model_tensor_size = tflite::testing::GetModelTensorCount(model); + TF_LITE_MICRO_EXPECT_EQ(static_cast(4), model_tensor_size); + + tflite::testing::AllocateAndVerifyMockTensor(model, allocator, + subgraph_allocations, 0); + tflite::testing::AllocateAndVerifyMockWeightTensor(model, allocator, + subgraph_allocations, 1); + tflite::testing::AllocateAndVerifyMockTensor(model, allocator, + subgraph_allocations, 2); + tflite::testing::AllocateAndVerifyMockTensor(model, allocator, + subgraph_allocations, 3); + + TfLiteEvalTensor* eval_tensors = subgraph_allocations[0].tensors; + TF_LITE_MICRO_EXPECT_NE(eval_tensors[1].data.raw, eval_tensors[0].data.raw); + TF_LITE_MICRO_EXPECT_NE(eval_tensors[2].data.raw, eval_tensors[0].data.raw); + TF_LITE_MICRO_EXPECT_NE(eval_tensors[1].data.raw, eval_tensors[2].data.raw); + TF_LITE_MICRO_EXPECT_NE(eval_tensors[3].data.raw, eval_tensors[0].data.raw); + TF_LITE_MICRO_EXPECT_NE(eval_tensors[3].data.raw, eval_tensors[1].data.raw); + TF_LITE_MICRO_EXPECT_NE(eval_tensors[3].data.raw, eval_tensors[2].data.raw); + + // SimpleMockModel has 2 operators: + tflite::testing::VerifyRegistrationAndNodeAllocation(subgraph_allocations, + /*count=*/2, + /*num_subgraphs=*/1); +} + +TF_LITE_MICRO_TEST(TestMockModelAllocationWithGivenMemoryPlanner) { + const tflite::Model* model = tflite::testing::GetSimpleMockModel(); + tflite::ScratchBufferHandle* scratch_buffer_handles = nullptr; + tflite::testing::TestingOpResolver op_resolver; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + tflite::testing::GetTestingOpResolver(op_resolver)); + constexpr size_t arena_size = 1024; + uint8_t arena[arena_size]; + tflite::GreedyMemoryPlanner memory_planner; + tflite::MicroAllocator* allocator = + tflite::MicroAllocator::Create(arena, arena_size, &memory_planner); + TF_LITE_MICRO_EXPECT(nullptr != allocator); + tflite::SubgraphAllocations* subgraph_allocations = + allocator->StartModelAllocation(model); + TF_LITE_MICRO_EXPECT(nullptr != subgraph_allocations); + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, allocator->FinishModelAllocation(model, subgraph_allocations, + &scratch_buffer_handles)); + size_t expected_arena_used_bytes = + tflite::testing::GetArenaUsedBytesBySimpleMockModel( + /*is_memory_planner_injected=*/true); + TF_LITE_MICRO_EXPECT_EQ(allocator->used_bytes(), expected_arena_used_bytes); + + size_t model_tensor_size = tflite::testing::GetModelTensorCount(model); + TF_LITE_MICRO_EXPECT_EQ(static_cast(4), model_tensor_size); + + tflite::testing::AllocateAndVerifyMockTensor(model, allocator, + subgraph_allocations, 0); + tflite::testing::AllocateAndVerifyMockWeightTensor(model, allocator, + subgraph_allocations, 1); + tflite::testing::AllocateAndVerifyMockTensor(model, allocator, + subgraph_allocations, 2); + tflite::testing::AllocateAndVerifyMockTensor(model, allocator, + subgraph_allocations, 3); + + TfLiteEvalTensor* eval_tensors = subgraph_allocations[0].tensors; + TF_LITE_MICRO_EXPECT_NE(eval_tensors[1].data.raw, eval_tensors[0].data.raw); + TF_LITE_MICRO_EXPECT_NE(eval_tensors[2].data.raw, eval_tensors[0].data.raw); + TF_LITE_MICRO_EXPECT_NE(eval_tensors[1].data.raw, eval_tensors[2].data.raw); + TF_LITE_MICRO_EXPECT_NE(eval_tensors[3].data.raw, eval_tensors[0].data.raw); + TF_LITE_MICRO_EXPECT_NE(eval_tensors[3].data.raw, eval_tensors[1].data.raw); + TF_LITE_MICRO_EXPECT_NE(eval_tensors[3].data.raw, eval_tensors[2].data.raw); + + // SimpleMockModel has 2 operators: + tflite::testing::VerifyRegistrationAndNodeAllocation(subgraph_allocations, + /*count=*/2, + /*num_subgraphs=*/1); +} + +TF_LITE_MICRO_TEST(TestMultiTenantAllocation) { + // The `OpResolver` is shared among different models in this test for + // simplicity but in practice you could have different `OpResolver`. + tflite::testing::TestingOpResolver op_resolver; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + tflite::testing::GetTestingOpResolver(op_resolver)); + + // Create a shared allocator. + constexpr size_t arena_size = 4096; + uint8_t arena[arena_size]; + tflite::MicroAllocator* allocator = + tflite::MicroAllocator::Create(arena, arena_size); + TF_LITE_MICRO_EXPECT(nullptr != allocator); + + tflite::ScratchBufferHandle* scratch_buffer_handles = nullptr; + + // Allocate for model 1. We use ComplexMockModel here to cover the code path + // allocatig variables. + const tflite::Model* model1 = tflite::testing::GetComplexMockModel(); + tflite::SubgraphAllocations* subgraph_allocations1 = + allocator->StartModelAllocation(model1); + TF_LITE_MICRO_EXPECT(nullptr != subgraph_allocations1); + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, allocator->FinishModelAllocation(model1, subgraph_allocations1, + &scratch_buffer_handles)); + const size_t single_model_used_bytes = allocator->used_bytes(); + + // Allocate for model 2. + const tflite::Model* model2 = tflite::testing::GetComplexMockModel(); + tflite::SubgraphAllocations* subgraph_allocations2 = + allocator->StartModelAllocation(model2); + TF_LITE_MICRO_EXPECT(nullptr != subgraph_allocations2); + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, allocator->FinishModelAllocation(model2, subgraph_allocations2, + &scratch_buffer_handles)); + + // Allocation for two instances of the same model takes less memory as `head` + // of the arena is reused. + TF_LITE_MICRO_EXPECT_LE(allocator->used_bytes(), 2 * single_model_used_bytes); +} + +TF_LITE_MICRO_TEST(TestMultiTenantAllocationInTwoSeparateArenas) { + // The `OpResolver` is shared among different models in this test for + // simplicity but in practice you could have different `OpResolver`. + tflite::testing::TestingOpResolver op_resolver; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + tflite::testing::GetTestingOpResolver(op_resolver)); + + // Create a shared allocator. + constexpr size_t arena_size = 4096; + uint8_t persistent_arena[arena_size]; + uint8_t non_persistent_arena[arena_size]; + + tflite::MicroAllocator* allocator = tflite::MicroAllocator::Create( + persistent_arena, arena_size, non_persistent_arena, arena_size); + TF_LITE_MICRO_EXPECT(nullptr != allocator); + tflite::ScratchBufferHandle* scratch_buffer_handles = nullptr; + + // Allocate for model 1. We use ComplexMockModel here to cover the code path + // allocatig variables. + const tflite::Model* model1 = tflite::testing::GetComplexMockModel(); + tflite::SubgraphAllocations* subgraph_allocations1 = + allocator->StartModelAllocation(model1); + TF_LITE_MICRO_EXPECT(nullptr != subgraph_allocations1); + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, allocator->FinishModelAllocation(model1, subgraph_allocations1, + &scratch_buffer_handles)); + const size_t single_model_used_bytes = allocator->used_bytes(); + + // Allocate for model 2. + const tflite::Model* model2 = tflite::testing::GetComplexMockModel(); + tflite::SubgraphAllocations* subgraph_allocations2 = + allocator->StartModelAllocation(model2); + TF_LITE_MICRO_EXPECT(nullptr != subgraph_allocations2); + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, allocator->FinishModelAllocation(model2, subgraph_allocations2, + &scratch_buffer_handles)); + + // Allocation for two instances of the same model takes less memory as `head` + // of the arena is reused. + TF_LITE_MICRO_EXPECT_LE(allocator->used_bytes(), 2 * single_model_used_bytes); +} + +TF_LITE_MICRO_TEST(TestAllocationForModelsWithBranches) { + const tflite::Model* model = tflite::testing::GetSimpleModelWithBranch(); + tflite::ScratchBufferHandle* scratch_buffer_handles = nullptr; + tflite::testing::TestingOpResolver op_resolver; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + tflite::testing::GetTestingOpResolver(op_resolver)); + constexpr size_t arena_size = 4096; + uint8_t arena[arena_size]; + tflite::MicroAllocator* allocator = + tflite::MicroAllocator::Create(arena, arena_size); + TF_LITE_MICRO_EXPECT(nullptr != allocator); + tflite::SubgraphAllocations* subgraph_allocations = + allocator->StartModelAllocation(model); + TF_LITE_MICRO_EXPECT(nullptr != subgraph_allocations); + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, allocator->FinishModelAllocation(model, subgraph_allocations, + &scratch_buffer_handles)); + + int8_t* start = + tflite::micro::GetTensorData(&subgraph_allocations[0].tensors[0]); + // Check test_helpers.cc BuildSimpleModelWithBranch for model structure. + // t0 is the first tensor, so place it in offset 0. + TF_LITE_MICRO_EXPECT_EQ(0, tflite::micro::GetTensorData( + &subgraph_allocations[0].tensors[0]) - + start); + // bytes = 2 * 2 * 3 * sizeof(float32) = 48, same for other tensors. + size_t buffer_size; + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, tflite::TfLiteEvalTensorByteLength( + &subgraph_allocations[0].tensors[0], &buffer_size)); + TF_LITE_MICRO_EXPECT_EQ(static_cast(48), buffer_size); + // t1 can't reuse any memory, as n0 requires both t0 and t1. + TF_LITE_MICRO_EXPECT_EQ(96, tflite::micro::GetTensorData( + &subgraph_allocations[0].tensors[1]) - + start); + // t2 can't reuse any memory, as n1 requires both t0 and t2. Also n2 requires + // both t1 and t2. + TF_LITE_MICRO_EXPECT_EQ(48, tflite::micro::GetTensorData( + &subgraph_allocations[0].tensors[2]) - + start); + // t3 reuses the same memory from t0 as t0 is not an input to any node. + TF_LITE_MICRO_EXPECT_EQ(0, tflite::micro::GetTensorData( + &subgraph_allocations[0].tensors[3]) - + start); + + // SimpleModelWithBranch has 3 operators: + tflite::testing::VerifyRegistrationAndNodeAllocation(subgraph_allocations, + /*count=*/3, + /*num_subgraphs=*/1); +} + +TF_LITE_MICRO_TEST(TestAllocationForComplexModelAllocation) { + const tflite::Model* model = tflite::testing::GetComplexMockModel(); + tflite::ScratchBufferHandle* scratch_buffer_handles = nullptr; + tflite::testing::TestingOpResolver op_resolver; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + tflite::testing::GetTestingOpResolver(op_resolver)); + constexpr size_t arena_size = 2048; + uint8_t arena[arena_size]; + tflite::MicroAllocator* allocator = + tflite::MicroAllocator::Create(arena, arena_size); + TF_LITE_MICRO_EXPECT(nullptr != allocator); + tflite::SubgraphAllocations* subgraph_allocations = + allocator->StartModelAllocation(model); + TF_LITE_MICRO_EXPECT(nullptr != subgraph_allocations); + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, allocator->FinishModelAllocation(model, subgraph_allocations, + &scratch_buffer_handles)); + + size_t model_tensor_size = tflite::testing::GetModelTensorCount(model); + TF_LITE_MICRO_EXPECT_EQ(static_cast(10), model_tensor_size); + + // NOTE: Tensor indexes match the values in GetComplexMockModel(). + tflite::testing::AllocateAndVerifyMockTensor(model, allocator, + subgraph_allocations, 0); + tflite::testing::AllocateAndVerifyMockTensor(model, allocator, + subgraph_allocations, 1, + /*is_variable=*/ + true); + tflite::testing::AllocateAndVerifyMockWeightTensor(model, allocator, + subgraph_allocations, 2); + tflite::testing::AllocateAndVerifyMockTensor(model, allocator, + subgraph_allocations, 3); + tflite::testing::AllocateAndVerifyMockTensor(model, allocator, + subgraph_allocations, 4, + /*is_variable=*/ + true); + tflite::testing::AllocateAndVerifyMockWeightTensor(model, allocator, + subgraph_allocations, 5); + tflite::testing::AllocateAndVerifyMockTensor(model, allocator, + subgraph_allocations, 6); + tflite::testing::AllocateAndVerifyMockTensor(model, allocator, + subgraph_allocations, 7, + /*is_variable=*/ + true); + tflite::testing::AllocateAndVerifyMockWeightTensor(model, allocator, + subgraph_allocations, 8); + tflite::testing::AllocateAndVerifyMockTensor(model, allocator, + subgraph_allocations, 9); + + // // Ensure that variable tensors have unique address + tflite::testing::EnsureUniqueVariableTensorBuffer( + model, subgraph_allocations[0].tensors, 1); + tflite::testing::EnsureUniqueVariableTensorBuffer( + model, subgraph_allocations[0].tensors, 4); + tflite::testing::EnsureUniqueVariableTensorBuffer( + model, subgraph_allocations[0].tensors, 7); + + // ComplexMockModel has 3 operators: + tflite::testing::VerifyRegistrationAndNodeAllocation(subgraph_allocations, + /*count=*/3, + /*num_subgraphs=*/1); +} + +TF_LITE_MICRO_TEST(OfflinePlannerBranchesAllOnline) { + int version = 1; + int subgraph = 0; + constexpr int number_tensors = 4; + tflite::testing::TestingOpResolver op_resolver; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + tflite::testing::GetTestingOpResolver(op_resolver)); + const int32_t metadata_buffer[tflite::testing::kOfflinePlannerHeaderSize + + number_tensors] = {version, subgraph, + number_tensors, // header + // memory offsets: + -1, -1, -1, -1}; + + // The structure is identical to the one in + // TestAllocationForModelsWithBranches + int number_connections = 3; + tflite::testing::NodeConnection node_list[3] = {{ + {0}, // input + {1} // output + }, + { + {0}, // input + {2} // output + }, + { + {1, 2}, // input1, input2 + {3} // output + }}; + + const tflite::Model* model = tflite::testing::GetModelWithOfflinePlanning( + number_tensors, metadata_buffer, node_list, number_connections); + + tflite::ScratchBufferHandle* scratch_buffer_handles = nullptr; + + constexpr size_t arena_size = 4096; + uint8_t arena[arena_size]; + tflite::MicroAllocator* allocator = + tflite::MicroAllocator::Create(arena, arena_size); + + tflite::SubgraphAllocations* subgraph_allocations = + allocator->StartModelAllocation(model); + TF_LITE_MICRO_EXPECT(nullptr != subgraph_allocations); + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, allocator->FinishModelAllocation(model, subgraph_allocations, + &scratch_buffer_handles)); + + // Since all of the tensors are online planned and the model structure is + // identical to that in TestAllocationForModelsWithBranches, + // the offsets be should identical to that test. + int8_t* start = + tflite::micro::GetTensorData(&subgraph_allocations[0].tensors[0]); + TF_LITE_MICRO_EXPECT_EQ(0, tflite::micro::GetTensorData( + &subgraph_allocations[0].tensors[0]) - + start); + + size_t buffer_size; + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, tflite::TfLiteEvalTensorByteLength( + &subgraph_allocations[0].tensors[0], &buffer_size)); + TF_LITE_MICRO_EXPECT_EQ(static_cast(48), buffer_size); + TF_LITE_MICRO_EXPECT_EQ(96, tflite::micro::GetTensorData( + &subgraph_allocations[0].tensors[1]) - + start); + TF_LITE_MICRO_EXPECT_EQ(48, tflite::micro::GetTensorData( + &subgraph_allocations[0].tensors[2]) - + start); + TF_LITE_MICRO_EXPECT_EQ(0, tflite::micro::GetTensorData( + &subgraph_allocations[0].tensors[3]) - + start); +} + +TF_LITE_MICRO_TEST(OfflinePlannerBasic) { + constexpr int number_tensors = 4; + tflite::testing::TestingOpResolver op_resolver; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + tflite::testing::GetTestingOpResolver(op_resolver)); + const int32_t metadata_buffer[tflite::testing::kOfflinePlannerHeaderSize + + number_tensors] = {1, 0, number_tensors, + /*t0=*/0, + /*t1=*/48, + /*t2=*/0, + /*t3=*/48}; + constexpr int number_connections = 3; + tflite::testing::NodeConnection node_list[number_connections] = { + {/*input=*/{tflite::testing::t0}, + /*output=*/{tflite::testing::t1}}, + {/*input=*/{tflite::testing::t1}, + /*output=*/{tflite::testing::t2}}, + {/*input=*/{tflite::testing::t2}, + /*output=*/{tflite::testing::t3}}}; + + const tflite::Model* model = tflite::testing::GetModelWithOfflinePlanning( + number_tensors, metadata_buffer, node_list, number_connections); + + tflite::ScratchBufferHandle* scratch_buffer_handles = nullptr; + constexpr size_t arena_size = 4096; + uint8_t arena[arena_size]; + tflite::MicroAllocator* allocator = + tflite::MicroAllocator::Create(arena, arena_size); + + tflite::SubgraphAllocations* subgraph_allocations = + allocator->StartModelAllocation(model); + TF_LITE_MICRO_EXPECT(nullptr != subgraph_allocations); + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, allocator->FinishModelAllocation(model, subgraph_allocations, + &scratch_buffer_handles)); + + int8_t* start = + tflite::micro::GetTensorData(&subgraph_allocations[0].tensors[0]); + TF_LITE_MICRO_EXPECT_EQ(0, tflite::micro::GetTensorData( + &subgraph_allocations[0].tensors[0]) - + start); + TF_LITE_MICRO_EXPECT_EQ(48, tflite::micro::GetTensorData( + &subgraph_allocations[0].tensors[1]) - + start); + TF_LITE_MICRO_EXPECT_EQ(0, tflite::micro::GetTensorData( + &subgraph_allocations[0].tensors[2]) - + start); + TF_LITE_MICRO_EXPECT_EQ(48, tflite::micro::GetTensorData( + &subgraph_allocations[0].tensors[3]) - + start); +} + +TF_LITE_MICRO_TEST(OfflinePlannerOverlappingAllocation) { + constexpr int number_tensors = 4; + tflite::testing::TestingOpResolver op_resolver; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + tflite::testing::GetTestingOpResolver(op_resolver)); + const int32_t metadata_buffer[tflite::testing::kOfflinePlannerHeaderSize + + number_tensors] = {/*version=*/1, + /*subgraph=*/0, + number_tensors, + /*t0=*/0, + /*t1=*/0, + /*t2=*/48, + /*t3=*/-1}; + + int number_connections = 2; + tflite::testing::NodeConnection node_list[2] = { + {/*input, scratch=*/{tflite::testing::t0, tflite::testing::t1}, + /*output=*/{tflite::testing::t2}}, + {/*input=*/{tflite::testing::t2}, + /*output=*/{tflite::testing::t3}}, + }; + + const tflite::Model* model = tflite::testing::GetModelWithOfflinePlanning( + number_tensors, metadata_buffer, node_list, number_connections); + + tflite::ScratchBufferHandle* scratch_buffer_handles = nullptr; + constexpr size_t arena_size = 4096; + uint8_t arena[arena_size]; + tflite::MicroAllocator* allocator = + tflite::MicroAllocator::Create(arena, arena_size); + + tflite::SubgraphAllocations* subgraph_allocations = + allocator->StartModelAllocation(model); + TF_LITE_MICRO_EXPECT(nullptr != subgraph_allocations); + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, allocator->FinishModelAllocation(model, subgraph_allocations, + &scratch_buffer_handles)); + + int8_t* start = + tflite::micro::GetTensorData(&subgraph_allocations[0].tensors[0]); + TF_LITE_MICRO_EXPECT_EQ(0, tflite::micro::GetTensorData( + &subgraph_allocations[0].tensors[0]) - + start); + TF_LITE_MICRO_EXPECT_EQ(0, tflite::micro::GetTensorData( + &subgraph_allocations[0].tensors[1]) - + start); + TF_LITE_MICRO_EXPECT_EQ(48, tflite::micro::GetTensorData( + &subgraph_allocations[0].tensors[2]) - + start); + TF_LITE_MICRO_EXPECT_EQ(0, tflite::micro::GetTensorData( + &subgraph_allocations[0].tensors[3]) - + start); + // TF_LITE_MICRO_EXPECT_EQ(static_cast(48), context.tensors[0].bytes); +} + +TF_LITE_MICRO_TEST(OfflinePlannerOfflineOnline) { + constexpr int number_tensors = 5; + tflite::testing::TestingOpResolver op_resolver; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + tflite::testing::GetTestingOpResolver(op_resolver)); + const int32_t metadata_buffer[tflite::testing::kOfflinePlannerHeaderSize + + number_tensors] = {/*version=*/1, + /*subgraph=*/0, + number_tensors, + /*t0=*/0, + /*t1=*/48, + /*t2=*/-1, + /*t3=*/0, + /*t4=*/-1}; + + constexpr int number_connections = 2; + tflite::testing::NodeConnection node_list[number_connections] = { + { + /*input, scratch=*/{tflite::testing::t0, tflite::testing::t1}, + /*output=*/{tflite::testing::t2}, + }, + { + /*input=*/{tflite::testing::t2}, + /*output1, output2=*/{tflite::testing::t3, tflite::testing::t4}, + }, + }; + + const tflite::Model* model = tflite::testing::GetModelWithOfflinePlanning( + number_tensors, metadata_buffer, node_list, number_connections); + + tflite::ScratchBufferHandle* scratch_buffer_handles = nullptr; + constexpr size_t arena_size = 4096; + uint8_t arena[arena_size]; + tflite::MicroAllocator* allocator = + tflite::MicroAllocator::Create(arena, arena_size); + + tflite::SubgraphAllocations* subgraph_allocations = + allocator->StartModelAllocation(model); + TF_LITE_MICRO_EXPECT(nullptr != subgraph_allocations); + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, allocator->FinishModelAllocation(model, subgraph_allocations, + &scratch_buffer_handles)); + + int8_t* start = + tflite::micro::GetTensorData(&subgraph_allocations[0].tensors[0]); + TF_LITE_MICRO_EXPECT_EQ(0, tflite::micro::GetTensorData( + &subgraph_allocations[0].tensors[0]) - + start); + TF_LITE_MICRO_EXPECT_EQ(48, tflite::micro::GetTensorData( + &subgraph_allocations[0].tensors[1]) - + start); + TF_LITE_MICRO_EXPECT_EQ(96, tflite::micro::GetTensorData( + &subgraph_allocations[0].tensors[2]) - + start); + TF_LITE_MICRO_EXPECT_EQ(48, tflite::micro::GetTensorData( + &subgraph_allocations[0].tensors[4]) - + start); + TF_LITE_MICRO_EXPECT_EQ(0, tflite::micro::GetTensorData( + &subgraph_allocations[0].tensors[3]) - + start); +} + +TF_LITE_MICRO_TEST(TestAllocatePersistentTfLiteTensor) { + const tflite::Model* model = tflite::GetModel(kTestConvModelData); + constexpr size_t arena_size = 1024 * 12; + uint8_t arena[arena_size]; + tflite::MicroAllocator* allocator = + tflite::MicroAllocator::Create(arena, arena_size); + TF_LITE_MICRO_EXPECT(allocator != nullptr); + + TfLiteTensor* tensor1 = allocator->AllocatePersistentTfLiteTensor( + model, /*subgraph_allocations=*/nullptr, /*tensor_index=*/1, + /*subgraph_index=*/0); + TF_LITE_MICRO_EXPECT(tensor1 != nullptr); + TF_LITE_MICRO_EXPECT(tensor1->quantization.params != nullptr); + TF_LITE_MICRO_EXPECT_FALSE(tensor1->is_variable); + + TfLiteTensor* tensor2 = allocator->AllocatePersistentTfLiteTensor( + model, /*subgraph_allocations=*/nullptr, /*tensor_index=*/2, + /*subgraph_index=*/0); + TF_LITE_MICRO_EXPECT(tensor2 != nullptr); + TF_LITE_MICRO_EXPECT(tensor2->quantization.params != nullptr); + TF_LITE_MICRO_EXPECT_FALSE(tensor2->is_variable); + + // The address of tensor1 should be higher than the address of tensor2 since + // persistent allocations take place in the tail which grows downward. + TF_LITE_MICRO_EXPECT_GT(tensor1, tensor2); +} + +TF_LITE_MICRO_TEST(TestAllocateSingleTempTfLiteTensor) { + const tflite::Model* model = tflite::testing::GetSimpleMockModel(); + constexpr size_t arena_size = 1024; + uint8_t arena[arena_size]; + tflite::MicroAllocator* allocator = + tflite::MicroAllocator::Create(arena, arena_size); + TF_LITE_MICRO_EXPECT(allocator != nullptr); + + TfLiteTensor* tensor1 = allocator->AllocateTempTfLiteTensor( + model, /*subgraph_allocations=*/nullptr, /*tensor_index=*/1, + /*subgraph_index=*/0); + TF_LITE_MICRO_EXPECT(tensor1 != nullptr); +} + +TF_LITE_MICRO_TEST(TestAllocateChainOfTfLiteTensor) { + const tflite::Model* model = tflite::testing::GetSimpleMockModel(); + constexpr size_t arena_size = 1024; + uint8_t arena[arena_size]; + tflite::MicroAllocator* allocator = + tflite::MicroAllocator::Create(arena, arena_size); + TF_LITE_MICRO_EXPECT(allocator != nullptr); + + TfLiteTensor* tensor1 = allocator->AllocateTempTfLiteTensor( + model, /*subgraph_allocations=*/nullptr, /*tensor_index=*/1, + /*subgraph_index=*/0); + TF_LITE_MICRO_EXPECT(tensor1 != nullptr); + + TfLiteTensor* tensor2 = allocator->AllocateTempTfLiteTensor( + model, /*subgraph_allocations=*/nullptr, /*tensor_index=*/2, + /*subgraph_index=*/0); + TF_LITE_MICRO_EXPECT(tensor2 != nullptr); + + // The address of tensor2 should be higher than the address of tensor1 + // (chained allocations): + TF_LITE_MICRO_EXPECT_GT(tensor2, tensor1); +} + +TF_LITE_MICRO_TEST(TestAllocateAndDeallocateChainOfTfLiteTensor) { + const tflite::Model* model = tflite::testing::GetSimpleMockModel(); + constexpr size_t arena_size = 1024; + uint8_t arena[arena_size]; + tflite::MicroAllocator* allocator = + tflite::MicroAllocator::Create(arena, arena_size); + TF_LITE_MICRO_EXPECT(allocator != nullptr); + + TfLiteTensor* tensor1 = allocator->AllocateTempTfLiteTensor( + model, /*subgraph_allocations=*/nullptr, /*tensor_index=*/1, + /*subgraph_index=*/0); + TF_LITE_MICRO_EXPECT(tensor1 != nullptr); + + TfLiteTensor* tensor2 = allocator->AllocateTempTfLiteTensor( + model, /*subgraph_allocations=*/nullptr, /*tensor_index=*/2, + /*subgraph_index=*/0); + TF_LITE_MICRO_EXPECT(tensor2 != nullptr); + + // The address of tensor2 should be higher than the address of tensor1 + // (chained allocations): + TF_LITE_MICRO_EXPECT_GT(tensor2, tensor1); + + // Deallocate only one temp TfLiteTensor does not deallocate all temp buffers. + allocator->DeallocateTempTfLiteTensor(tensor1); + TF_LITE_MICRO_EXPECT_FALSE(allocator->IsAllTempDeallocated()); + + // Deallocate both temp TfLiteTensor deallocate all temp buffers. + allocator->DeallocateTempTfLiteTensor(tensor2); + TF_LITE_MICRO_EXPECT_TRUE(allocator->IsAllTempDeallocated()); +} + +TF_LITE_MICRO_TEST(TestAllocateAndDeallocateTempBuffer) { + constexpr size_t arena_size = 1024; + uint8_t arena[arena_size]; + tflite::MicroAllocator* allocator = + tflite::MicroAllocator::Create(arena, arena_size); + TF_LITE_MICRO_EXPECT(allocator != nullptr); + TF_LITE_MICRO_EXPECT_TRUE(allocator->IsAllTempDeallocated()); + uint8_t* buffer1 = + allocator->AllocateTempBuffer(10, tflite::MicroArenaBufferAlignment()); + TF_LITE_MICRO_EXPECT(buffer1 != nullptr); + TF_LITE_MICRO_EXPECT_FALSE(allocator->IsAllTempDeallocated()); + allocator->DeallocateTempBuffer(buffer1); + TF_LITE_MICRO_EXPECT_TRUE(allocator->IsAllTempDeallocated()); +} + +TF_LITE_MICRO_TEST(TestAllocateTfLiteTensorWithReset) { + const tflite::Model* model = tflite::testing::GetSimpleMockModel(); + constexpr size_t arena_size = 1024; + uint8_t arena[arena_size]; + tflite::MicroAllocator* allocator = + tflite::MicroAllocator::Create(arena, arena_size); + TF_LITE_MICRO_EXPECT(allocator != nullptr); + + TfLiteTensor* tensor1 = allocator->AllocateTempTfLiteTensor( + model, /*subgraph_allocations=*/nullptr, /*tensor_index=*/1, + /*subgraph_index=*/0); + TF_LITE_MICRO_EXPECT(tensor1 != nullptr); + + allocator->DeallocateTempTfLiteTensor(tensor1); + + allocator->ResetTempAllocations(); + + TfLiteTensor* tensor2 = allocator->AllocateTempTfLiteTensor( + model, /*subgraph_allocations=*/nullptr, /*tensor_index=*/2, + /*subgraph_index=*/0); + TF_LITE_MICRO_EXPECT(tensor2 != nullptr); + + // The address of tensor2 should be equal than the address of tensor1 since + // allocations were not chained: + TF_LITE_MICRO_EXPECT(tensor2 == tensor1); +} + +TF_LITE_MICRO_TEST(TestOperatorInputsNotInSubgraphInputs) { + constexpr int number_tensors = 5; + tflite::testing::TestingOpResolver op_resolver; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + tflite::testing::GetTestingOpResolver(op_resolver)); + const int32_t metadata_buffer[tflite::testing::kOfflinePlannerHeaderSize + + number_tensors] = {/*version=*/1, + /*subgraph=*/0, + number_tensors, + /*t0=*/0, + /*t1=*/0, + /*t2=*/0, + /*t3=*/48, + /*t4=*/-1}; + + constexpr int number_connections = 2; + tflite::testing::NodeConnection node_list[number_connections] = { + {// t0: input (actual input part of subgraph inputs as + // well as operator inputs) + // t1: scratch1 (only in operator inputs) + // t2: scratch2 (only in operator inputs) + {tflite::testing::t0, tflite::testing::t1, tflite::testing::t2}, + /*t3: output=*/{tflite::testing::t3}}, + {/*t3: input=*/{tflite::testing::t3}, + /*t4: output=*/{tflite::testing::t4}}, + }; + + const tflite::Model* model = tflite::testing::GetModelWithOfflinePlanning( + number_tensors, metadata_buffer, node_list, number_connections, + /*Only first tensor (t0) is in subgraph input list=*/1); + + tflite::ScratchBufferHandle* scratch_buffer_handles = nullptr; + constexpr size_t arena_size = 4096; + uint8_t arena[arena_size]; + tflite::MicroAllocator* allocator = + tflite::MicroAllocator::Create(arena, arena_size); + + tflite::SubgraphAllocations* subgraph_allocations = + allocator->StartModelAllocation(model); + TF_LITE_MICRO_EXPECT(nullptr != subgraph_allocations); + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, allocator->FinishModelAllocation(model, subgraph_allocations, + &scratch_buffer_handles)); + + int8_t* start = + tflite::micro::GetTensorData(&subgraph_allocations[0].tensors[0]); + TF_LITE_MICRO_EXPECT_EQ(0, tflite::micro::GetTensorData( + &subgraph_allocations[0].tensors[0]) - + start); + TF_LITE_MICRO_EXPECT_EQ(0, tflite::micro::GetTensorData( + &subgraph_allocations[0].tensors[1]) - + start); + TF_LITE_MICRO_EXPECT_EQ(0, tflite::micro::GetTensorData( + &subgraph_allocations[0].tensors[2]) - + start); + TF_LITE_MICRO_EXPECT_EQ(48, tflite::micro::GetTensorData( + &subgraph_allocations[0].tensors[3]) - + start); + TF_LITE_MICRO_EXPECT_EQ(0, tflite::micro::GetTensorData( + &subgraph_allocations[0].tensors[4]) - + start); +} + +TF_LITE_MICRO_TEST(TestTypicalFirstOpAndSecondOpWithScratchTensors) { + constexpr int number_tensors = 6; + tflite::testing::TestingOpResolver op_resolver; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + tflite::testing::GetTestingOpResolver(op_resolver)); + const int32_t metadata_buffer[tflite::testing::kOfflinePlannerHeaderSize + + number_tensors] = {/*version=*/1, + /*subgraph=*/0, + number_tensors, + /*t0=*/0, + /*t1=*/0, + /*t2=*/0, + /*t3=*/0, + /*t4=*/48, + /*t5=*/-1}; + + constexpr int number_connections = 3; + tflite::testing::NodeConnection node_list[number_connections] = { + {/*t0: input (subgraph and operator input)=*/{tflite::testing::t0}, + /*t1: output=*/{tflite::testing::t1}}, + {// t1: input + // t2: scratch1 (only in operator inputs) + // t3: scratch2 (only in operator inputs) + {tflite::testing::t1, tflite::testing::t2, tflite::testing::t3}, + + /*t4: output=*/{tflite::testing::t4}}, + {/*t4: input=*/{tflite::testing::t4}, + /*t5: output=*/{tflite::testing::t5}}, + }; + + const tflite::Model* model = tflite::testing::GetModelWithOfflinePlanning( + number_tensors, metadata_buffer, node_list, number_connections, + /*Only first tensor (t0) is in subgraph input list=*/1); + + tflite::ScratchBufferHandle* scratch_buffer_handles = nullptr; + constexpr size_t arena_size = 4096; + uint8_t arena[arena_size]; + tflite::MicroAllocator* allocator = + tflite::MicroAllocator::Create(arena, arena_size); + + tflite::SubgraphAllocations* subgraph_allocations = + allocator->StartModelAllocation(model); + TF_LITE_MICRO_EXPECT(nullptr != subgraph_allocations); + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, allocator->FinishModelAllocation(model, subgraph_allocations, + &scratch_buffer_handles)); + + int8_t* start = + tflite::micro::GetTensorData(&subgraph_allocations[0].tensors[0]); + TF_LITE_MICRO_EXPECT_EQ(0, tflite::micro::GetTensorData( + &subgraph_allocations[0].tensors[0]) - + start); + TF_LITE_MICRO_EXPECT_EQ(0, tflite::micro::GetTensorData( + &subgraph_allocations[0].tensors[1]) - + start); + TF_LITE_MICRO_EXPECT_EQ(0, tflite::micro::GetTensorData( + &subgraph_allocations[0].tensors[2]) - + start); + TF_LITE_MICRO_EXPECT_EQ(0, tflite::micro::GetTensorData( + &subgraph_allocations[0].tensors[3]) - + start); + TF_LITE_MICRO_EXPECT_EQ(48, tflite::micro::GetTensorData( + &subgraph_allocations[0].tensors[4]) - + start); + TF_LITE_MICRO_EXPECT_EQ(0, tflite::micro::GetTensorData( + &subgraph_allocations[0].tensors[5]) - + start); +} + +TF_LITE_MICRO_TEST(TestModelWithUnusedTensors) { + tflite::testing::TestingOpResolver op_resolver; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + tflite::testing::GetTestingOpResolver(op_resolver)); + + const tflite::Model* model = tflite::testing::GetModelWithUnusedInputs(); + + tflite::ScratchBufferHandle* scratch_buffer_handles = nullptr; + constexpr size_t arena_size = 4096; + uint8_t arena[arena_size]; + tflite::MicroAllocator* allocator = + tflite::MicroAllocator::Create(arena, arena_size); + + tflite::SubgraphAllocations* subgraph_allocations = + allocator->StartModelAllocation(model); + TF_LITE_MICRO_EXPECT(nullptr != subgraph_allocations); + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, allocator->FinishModelAllocation(model, subgraph_allocations, + &scratch_buffer_handles)); + + // Unused input tensor should not occupy any space. + int8_t* start = + tflite::micro::GetTensorData(&subgraph_allocations[0].tensors[2]); + TF_LITE_MICRO_EXPECT_EQ(64, tflite::micro::GetTensorData( + &subgraph_allocations[0].tensors[0]) - + start); + TF_LITE_MICRO_EXPECT_EQ(0, tflite::micro::GetTensorData( + &subgraph_allocations[0].tensors[1]) - + start); + TF_LITE_MICRO_EXPECT_EQ(0, tflite::micro::GetTensorData( + &subgraph_allocations[0].tensors[2]) - + start); + // Unused tensor should not occupy any space. + TF_LITE_MICRO_EXPECT_EQ(0, tflite::micro::GetTensorData( + &subgraph_allocations[0].tensors[3]) - + start); +} + +TF_LITE_MICRO_TEST(TestModelWithUnusedOperatorOutputs) { + tflite::testing::TestingOpResolver op_resolver; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + tflite::testing::GetTestingOpResolver(op_resolver)); + + const tflite::Model* model = + tflite::testing::GetModelWithUnusedOperatorOutputs(); + + tflite::ScratchBufferHandle* scratch_buffer_handles = nullptr; + constexpr size_t arena_size = 4096; + uint8_t arena[arena_size]; + tflite::MicroAllocator* allocator = + tflite::MicroAllocator::Create(arena, arena_size); + + tflite::SubgraphAllocations* subgraph_allocations = + allocator->StartModelAllocation(model); + TF_LITE_MICRO_EXPECT(nullptr != subgraph_allocations); + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, allocator->FinishModelAllocation(model, subgraph_allocations, + &scratch_buffer_handles)); + + // Unused output tensor should have its own allocation. + int8_t* start = + tflite::micro::GetTensorData(&subgraph_allocations[0].tensors[1]); + TF_LITE_MICRO_EXPECT_EQ(64, tflite::micro::GetTensorData( + &subgraph_allocations[0].tensors[0]) - + start); + TF_LITE_MICRO_EXPECT_EQ(0, tflite::micro::GetTensorData( + &subgraph_allocations[0].tensors[1]) - + start); +} + +// Manually create an offline plan for the SimpleMockModel. Pass that into the +// interpreter and confirm that the eval tensors' offsets are exactly what was +// specified in the offline plan. +TF_LITE_MICRO_TEST(TestMockModelAllocationByNonPersistentMemoryPlannerShim) { + const tflite::Model* model = tflite::testing::GetSimpleMockModel(); + + // The simple model has three activation tensors 0, 2, 3, which corresponding + // to buffer request 0, 1, 2. + constexpr size_t kBufferEntriesCount = 3; + constexpr size_t kBufferPlanSize = + sizeof(tflite::BufferPlan) + + (kBufferEntriesCount) * sizeof(tflite::BufferDescriptor); + // The offsets of buffers are chosen to be very different from what the + // default greedy memory planner would select to reflect that buffers does NOT + // need to start at offset 0 and in contiguous order. The offsets in a given + // memory plan just need to meet the 4-byte buffer alignment requirement from + // the framework side. The memory plan provider guarantees the correctness + // of the plan for the model. + constexpr int32_t kOffset0 = 200; + constexpr int32_t kOffset1 = 64; + constexpr int32_t kOffset2 = 120; + // Allocate a memory plan buffer first b/c the struct BufferPlan has a + // flexible member array. + uint8_t buffer_plan_arena[kBufferPlanSize]; + tflite::BufferPlan* non_persistent_buffer_plan = + reinterpret_cast(buffer_plan_arena); + non_persistent_buffer_plan->buffer_count = kBufferEntriesCount; + non_persistent_buffer_plan->buffer_plan_entries[0].offset = kOffset0; + non_persistent_buffer_plan->buffer_plan_entries[1].offset = kOffset1; + non_persistent_buffer_plan->buffer_plan_entries[2].offset = kOffset2; + + tflite::NonPersistentMemoryPlannerShim planner(non_persistent_buffer_plan); + + tflite::ScratchBufferHandle* scratch_buffer_handles = nullptr; + tflite::testing::TestingOpResolver op_resolver; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + tflite::testing::GetTestingOpResolver(op_resolver)); + constexpr size_t arena_size = 1024; + uint8_t arena[arena_size]; + tflite::MicroAllocator* allocator = + tflite::MicroAllocator::Create(arena, arena_size, &planner); + TF_LITE_MICRO_EXPECT(allocator != nullptr); + tflite::SubgraphAllocations* subgraph_allocations = + allocator->StartModelAllocation(model); + TF_LITE_MICRO_EXPECT(nullptr != subgraph_allocations); + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, allocator->FinishModelAllocation(model, subgraph_allocations, + &scratch_buffer_handles)); + + size_t model_tensor_size = tflite::testing::GetModelTensorCount(model); + TF_LITE_MICRO_EXPECT_EQ(static_cast(4), model_tensor_size); + tflite::testing::AllocateAndVerifyMockWeightTensor(model, allocator, + subgraph_allocations, 1); + + TfLiteEvalTensor* eval_tensors = subgraph_allocations[0].tensors; + + // Offset is relative to the arena after the buffer alignment adjustment which + // happens when MicroAllocator is created. + uint8_t* aligned_arena = + tflite::AlignPointerUp(arena, tflite::MicroArenaBufferAlignment()); + + TF_LITE_MICRO_EXPECT_TRUE(static_cast(eval_tensors[0].data.data) == + (aligned_arena + kOffset0)); + TF_LITE_MICRO_EXPECT_TRUE(static_cast(eval_tensors[2].data.data) == + (aligned_arena + kOffset1)); + TF_LITE_MICRO_EXPECT_TRUE(static_cast(eval_tensors[3].data.data) == + (aligned_arena + kOffset2)); + + // SimpleMockModel has 2 operators: + tflite::testing::VerifyRegistrationAndNodeAllocation(subgraph_allocations, + /*count=*/2, + /*num_subgraphs=*/1); +} + +TF_LITE_MICRO_TEST(TestMultiSubgraphNumScratchAllocations) { + // Any test model with multiple subgraphs will suffice + const tflite::Model* model = + tflite::testing::GetSimpleModelWithNullInputsAndOutputs(); + + constexpr size_t arena_size = 2048 * 2; + uint8_t arena[arena_size]; + + tflite::MicroAllocator* allocator = nullptr; + tflite::SubgraphAllocations* subgraph_allocations = nullptr; + tflite::ScratchBufferHandle* scratch_buffer_handles = nullptr; + + // First iteration: no scratch buffers + allocator = tflite::MicroAllocator::Create(arena, arena_size); + TF_LITE_MICRO_EXPECT(nullptr != allocator); + + subgraph_allocations = allocator->StartModelAllocation(model); + TF_LITE_MICRO_EXPECT(nullptr != subgraph_allocations); + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, allocator->FinishModelAllocation(model, subgraph_allocations, + &scratch_buffer_handles)); + + size_t used_bytes = allocator->used_bytes(); + + // Second iteration: the same but request two scratch buffers + tflite::MicroAllocator::Create(arena, arena_size); + TF_LITE_MICRO_EXPECT(nullptr != allocator); + + subgraph_allocations = allocator->StartModelAllocation(model); + TF_LITE_MICRO_EXPECT(nullptr != subgraph_allocations); + + // Request two scratch buffers. + // They have size 0 because we do not want to affect the memory plan + const int scratch_subgraph_idx = 0; + const int scratch_node_idx = 0; + const size_t scratch_size = 0; + int buffer_idx1 = -1; + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, allocator->RequestScratchBufferInArena( + scratch_size, scratch_subgraph_idx, &buffer_idx1)); + int buffer_idx2 = -1; + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, allocator->RequestScratchBufferInArena( + scratch_size, scratch_subgraph_idx, &buffer_idx2)); + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, allocator->FinishPrepareNodeAllocations(scratch_node_idx)); + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, allocator->FinishModelAllocation(model, subgraph_allocations, + &scratch_buffer_handles)); + + // Check that AllocateScratchBufferHandles was only called once, i.e. only two + // tflite::ScratchBufferHandle should have been allocated. + size_t used_bytes_with_scratch = allocator->used_bytes(); + + TF_LITE_MICRO_EXPECT_EQ(used_bytes_with_scratch, + used_bytes + sizeof(tflite::ScratchBufferHandle) * 2); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/micro_arena_constants.h b/tensorflow/lite/micro/micro_arena_constants.h new file mode 100644 index 0000000..8282817 --- /dev/null +++ b/tensorflow/lite/micro/micro_arena_constants.h @@ -0,0 +1,28 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_MICRO_ARENA_CONSTANTS_H_ +#define TENSORFLOW_LITE_MICRO_MICRO_ARENA_CONSTANTS_H_ + +namespace tflite { + +// The default buffer alignment requirement. +// We align tensor buffers to 16-byte boundaries, since this is a common +// requirement for SIMD extensions. +constexpr int MicroArenaBufferAlignment() { return 16; } + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_MICRO_ARENA_CONSTANTS_H_ diff --git a/tensorflow/lite/micro/micro_common.h b/tensorflow/lite/micro/micro_common.h new file mode 100644 index 0000000..dc0bc08 --- /dev/null +++ b/tensorflow/lite/micro/micro_common.h @@ -0,0 +1,33 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef THIRD_PARTY_TFLITE_MICRO_TENSORFLOW_LITE_MICRO_MICRO_COMMON_H_ +#define THIRD_PARTY_TFLITE_MICRO_TENSORFLOW_LITE_MICRO_MICRO_COMMON_H_ + +#include "tensorflow/lite/c/common.h" + +// TFLMRegistration defines the API that TFLM kernels need to implement. +// This will be replacing the current TfLiteRegistration_V1 struct with +// something more compatible Embedded enviroment TFLM is used in. +struct TFLMRegistration { + void* (*init)(TfLiteContext* context, const char* buffer, size_t length); + void (*free)(TfLiteContext* context, void* buffer); + TfLiteStatus (*prepare)(TfLiteContext* context, TfLiteNode* node); + TfLiteStatus (*invoke)(TfLiteContext* context, TfLiteNode* node); + void (*reset)(TfLiteContext* context, void* buffer); + int32_t builtin_code; + const char* custom_name; +}; + +#endif // THIRD_PARTY_TFLITE_MICRO_TENSORFLOW_LITE_MICRO_MICRO_COMMON_H_ diff --git a/tensorflow/lite/micro/micro_context.cc b/tensorflow/lite/micro/micro_context.cc new file mode 100644 index 0000000..b06252a --- /dev/null +++ b/tensorflow/lite/micro/micro_context.cc @@ -0,0 +1,157 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/micro_context.h" + +#include +#include +#include + +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { +MicroContext::MicroContext(MicroAllocator* allocator, const Model* model, + MicroGraph* graph) + : allocator_(*allocator), + graph_(*graph), + model_(model), + state_(InterpreterState::kInit) {} + +MicroContext::~MicroContext() {} + +void* MicroContext::AllocatePersistentBuffer(size_t bytes) { + TFLITE_DCHECK(state_ == InterpreterState::kPrepare || + state_ == InterpreterState::kInit); + return allocator_.AllocatePersistentBuffer(bytes); +} + +TfLiteStatus MicroContext::RequestScratchBufferInArena(size_t bytes, + int* buffer_idx) { + TFLITE_DCHECK(state_ == InterpreterState::kPrepare); + return allocator_.RequestScratchBufferInArena( + bytes, graph_.GetCurrentSubgraphIndex(), buffer_idx); +} + +void* MicroContext::GetScratchBuffer(int buffer_idx) { + TFLITE_DCHECK(state_ == InterpreterState::kInvoke); + ScratchBufferHandle* handle = scratch_buffer_handles_ + buffer_idx; + return handle->data; +} + +TfLiteTensor* MicroContext::AllocateTempTfLiteTensor(int tensor_idx) { + return allocator_.AllocateTempTfLiteTensor(model_, graph_.GetAllocations(), + tensor_idx, + graph_.GetCurrentSubgraphIndex()); +} + +int MicroContext::GetTensorIndex(int index, int max_size, + const int* tensor_indices) { + if (index >= 0 && index < max_size) { + const int tensor_index = tensor_indices[index]; + if (tensor_index != kTfLiteOptionalTensor) { + return tensor_index; + } + } + return -1; +} + +TfLiteTensor* MicroContext::AllocateTempInputTensor(const TfLiteNode* node, + int index) { + const int tensor_index = + GetTensorIndex(index, node->inputs->size, node->inputs->data); + if (tensor_index < 0) { + return nullptr; + } + return AllocateTempTfLiteTensor(tensor_index); +} + +TfLiteTensor* MicroContext::AllocateTempOutputTensor(const TfLiteNode* node, + int index) { + const int tensor_index = + GetTensorIndex(index, node->outputs->size, node->outputs->data); + if (tensor_index < 0) { + return nullptr; + } + return AllocateTempTfLiteTensor(tensor_index); +} + +TfLiteTensor* MicroContext::AllocateTempIntermediateTensor( + const TfLiteNode* node, int index) { + const int tensor_index = GetTensorIndex(index, node->intermediates->size, + node->intermediates->data); + if (tensor_index < 0) { + return nullptr; + } + return AllocateTempTfLiteTensor(tensor_index); +} + +void MicroContext::DeallocateTempTfLiteTensor(TfLiteTensor* tensor) { + return allocator_.DeallocateTempTfLiteTensor(tensor); +} + +uint8_t* MicroContext::AllocateTempBuffer(size_t size, size_t alignment) { + TFLITE_DCHECK(state_ == InterpreterState::kPrepare); + return allocator_.AllocateTempBuffer(size, alignment); +} + +void MicroContext::DeallocateTempBuffer(uint8_t* buffer) { + TFLITE_DCHECK(state_ == InterpreterState::kPrepare); + allocator_.DeallocateTempBuffer(buffer); +} + +TfLiteEvalTensor* MicroContext::GetEvalTensor(int tensor_idx) { + return &graph_.GetAllocations()[graph_.GetCurrentSubgraphIndex()] + .tensors[tensor_idx]; +} + +void MicroContext::SetScratchBufferHandles( + ScratchBufferHandle* scratch_buffer_handles) { + scratch_buffer_handles_ = scratch_buffer_handles; +} + +TfLiteStatus MicroContext::set_external_context( + void* external_context_payload) { + TFLITE_DCHECK(state_ == InterpreterState::kPrepare || + state_ == InterpreterState::kInvoke); + if (external_context_payload == nullptr || + external_context_payload_ != nullptr) { + MicroPrintf( + "Attempting to set external context to %x but it was %x already", + external_context_payload, external_context_payload_); + return kTfLiteError; + } + + external_context_payload_ = external_context_payload; + return kTfLiteOk; +} + +void MicroContextReportOpError(struct TfLiteContext* context, + const char* format, ...) { + va_list args; + va_start(args, format); + Log(format, args); + va_end(args); +} + +void MicroContext::SetInterpreterState(MicroContext::InterpreterState state) { + state_ = state; +} + +MicroContext::InterpreterState MicroContext::GetInterpreterState() const { + return state_; +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/micro_context.h b/tensorflow/lite/micro/micro_context.h new file mode 100644 index 0000000..63b4b7d --- /dev/null +++ b/tensorflow/lite/micro/micro_context.h @@ -0,0 +1,187 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_MICRO_CONTEXT_H_ +#define TENSORFLOW_LITE_MICRO_MICRO_CONTEXT_H_ + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/micro_allocator.h" +#include "tensorflow/lite/micro/micro_graph.h" + +namespace tflite { +// MicroContext is eventually going to become the API between TFLM and the +// kernels, replacing all the functions in TfLiteContext. The end state is code +// kernels to have code like: +// +// MicroContext* micro_context = GetMicroContext(context); +// micro_context-> +class MicroContext { + public: + // Enum that allows MicroContext to keep track of the stages different memory + // planning APIs are available to kernels. + enum class InterpreterState { + kInit, + kPrepare, + kMemoryPlanning, + kInvoke, + }; + + // Does not take any ownership, and all pointers must refer to valid objects + // that outlive the one constructed. + explicit MicroContext(MicroAllocator* allocator, const Model* model, + MicroGraph* graph); + virtual ~MicroContext(); + + // Allocate persistent buffer which has the same life time as the interpreter. + // Returns nullptr on failure. + // The memory is allocated from the tail. + // This method is only available in Init or Prepare stage. + // Virtual so that it can be faked for kernel tests. + virtual void* AllocatePersistentBuffer(size_t bytes); + + // Request a scratch buffer in the arena through static memory planning. + // This method is only available in Prepare stage and the buffer is allocated + // by the interpreter between Prepare and Eval stage. In Eval stage, + // GetScratchBuffer API can be used to fetch the address. + // Virtual so that it can be faked for kernel tests. + virtual TfLiteStatus RequestScratchBufferInArena(size_t bytes, + int* buffer_idx); + + // Get the scratch buffer pointer. + // This method is only available in Eval stage. + // Virtual so that it can be faked for kernel tests. + virtual void* GetScratchBuffer(int buffer_idx); + + // Returns a temporary TfLiteTensor struct for a given index. + // Virtual so that it can be faked for kernel tests. + virtual TfLiteTensor* AllocateTempTfLiteTensor(int tensor_idx); + + // Returns a temporary TfLiteTensor struct for the specified input tensor of a + // given mode. This is the recommended API over the deprecated + // GetInput/GetInputSafe to get a temp input tensor. The returned tensor shall + // be freed via calling DeallocateTempTfLiteTensor. + virtual TfLiteTensor* AllocateTempInputTensor(const TfLiteNode* node, + int index); + + // Returns a temporary TfLiteTensor struct for the specified output tensor of + // a given mode. This is the recommended API over the deprecated + // GetOutput/GetOutputSafe to get a temp output tensor. The returned tensor + // shall be freed via calling DeallocateTempTfLiteTensor. + virtual TfLiteTensor* AllocateTempOutputTensor(const TfLiteNode* node, + int index); + + // Returns a temporary TfLiteTensor struct for the specified intermediate + // tensor of a given mode. This is the recommended API over the deprecated + // GetIntermediates/GetIntermediatesSafe to get a temp intermediate tensor. + // The returned tensor shall be freed via calling DeallocateTempTfLiteTensor. + virtual TfLiteTensor* AllocateTempIntermediateTensor(const TfLiteNode* node, + int index); + + // Deallocates a temp TfLiteTensor. + // Virtual so that it can be faked for kernel tests. + virtual void DeallocateTempTfLiteTensor(TfLiteTensor* tensor); + + // Returns a pointer to a temporary buffer (from the arena). + // This API is only valid from the kernel's Prepare function and + // the buffer's lifetime is also that of the Prepare function. + // Virtual so that it can be faked for kernel tests. + virtual uint8_t* AllocateTempBuffer(size_t size, size_t alignment); + + // Signals that the temporary buffer is no longer needed. + // Virtual so that it can be faked for kernel tests. + virtual void DeallocateTempBuffer(uint8_t* buffer); + + // Returns a TfLiteEvalTensor struct for a given index. + // Virtual so that it can be faked for kernel tests. + virtual TfLiteEvalTensor* GetEvalTensor(int tensor_idx); + + // Sets the State of MemoryPlanning MicroContext + void SetInterpreterState(MicroContext::InterpreterState state); + + // Sets the State of MemoryPlanning MicroContext + MicroContext::InterpreterState GetInterpreterState() const; + + // Does not take ownership of the pointer and the pointer must refer to valid + // an object that outlive this class instance. + // This can only be called once to set one external context. + TfLiteStatus set_external_context(void* external_context_payload); + + void* external_context() { return external_context_payload_; } + + MicroGraph& graph() { return graph_; } + + // Sets the pointer to a list of ScratchBufferHandle instances. + // Not API between TFLM and kernels. Primarily used by the framework for + // housekeeping in MicroContext. + void SetScratchBufferHandles(ScratchBufferHandle* scratch_buffer_handles); + + private: + // Return the tensor index as tensor_indices[index]. tensor_indices is of + // max_size. Return -1 if index is not in the valid range of tensor_indices. + int GetTensorIndex(int index, int max_size, const int* tensor_indices); + + MicroAllocator& allocator_; + MicroGraph& graph_; + const Model* model_; + InterpreterState state_; + + ScratchBufferHandle* scratch_buffer_handles_ = nullptr; + void* external_context_payload_ = nullptr; + + TF_LITE_REMOVE_VIRTUAL_DELETE +}; + +inline MicroContext* GetMicroContext(const struct TfLiteContext* context) { + return reinterpret_cast(context->impl_); +} + +// Deprecated API. Prefer to using the MicroContext API directly from the +// kernels. +// TODO(b/213010668): migrate all existing kernels to use MicroContext, delete +// these functions, and remove corresponding members from the TfLiteContext +// struct for TFLM. +inline void* MicroContextAllocatePersistentBuffer(TfLiteContext* ctx, + size_t bytes) { + return GetMicroContext(ctx)->AllocatePersistentBuffer(bytes); +} +inline TfLiteStatus MicroContextRequestScratchBufferInArena(TfLiteContext* ctx, + size_t bytes, + int* buffer_idx) { + return GetMicroContext(ctx)->RequestScratchBufferInArena(bytes, buffer_idx); +} +inline void* MicroContextGetScratchBuffer(TfLiteContext* ctx, int buffer_idx) { + return GetMicroContext(ctx)->GetScratchBuffer(buffer_idx); +} +inline TfLiteTensor* MicroContextGetTensor(const struct TfLiteContext* context, + int tensor_idx) { + return GetMicroContext(context)->AllocateTempTfLiteTensor(tensor_idx); +} +inline TfLiteEvalTensor* MicroContextGetEvalTensor( + const struct TfLiteContext* context, int tensor_idx) { + return GetMicroContext(context)->GetEvalTensor(tensor_idx); +} +inline TfLiteExternalContext* MicroContextGetExternalContext( + TfLiteContext* context, TfLiteExternalContextType unused) { + return reinterpret_cast( + GetMicroContext(context)->external_context()); +} + +// Requests that an error be reported with format string msg. +void MicroContextReportOpError(struct TfLiteContext* context, + const char* format, ...); + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_MICRO_CONTEXT_H_ diff --git a/tensorflow/lite/micro/micro_context_test.cc b/tensorflow/lite/micro/micro_context_test.cc new file mode 100644 index 0000000..e01d387 --- /dev/null +++ b/tensorflow/lite/micro/micro_context_test.cc @@ -0,0 +1,170 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/micro/micro_context.h" + +#include + +#include "tensorflow/lite/micro/micro_allocator.h" +#include "tensorflow/lite/micro/micro_arena_constants.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +using ::tflite::testing::IntArrayFromInts; + +namespace tflite { +namespace { + +tflite::MicroContext CreateMicroContext() { + // Some targets do not support dynamic memory (i.e., no malloc or new), thus, + // the test need to place non-transient memories in static variables. This is + // safe because tests are guaranteed to run serially. + constexpr size_t kMicroGraphPlacementBufferSize = 1024; + alignas(4) static uint8_t + micro_graph_placement_buffer[kMicroGraphPlacementBufferSize]; + constexpr size_t kArenaSize = 1024; + static uint8_t tensor_arena[kArenaSize]; + + const tflite::Model* model = tflite::testing::GetSimpleMockModel(); + MicroAllocator* micro_allocator = + MicroAllocator::Create(tensor_arena, kArenaSize); + MicroGraph* micro_graph = new (micro_graph_placement_buffer) + MicroGraph(nullptr, nullptr, nullptr, nullptr); + + tflite::MicroContext micro_context(micro_allocator, model, micro_graph); + return micro_context; +} + +// Test structure for external context payload. +struct TestExternalContextPayloadData { + // Opaque blob + alignas(4) uint8_t blob_data[128]; +}; +} // namespace +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +// Ensures that a regular set and get pair works ok. +TF_LITE_MICRO_TEST(TestSetGetExternalContextSuccess) { + tflite::MicroContext micro_context = tflite::CreateMicroContext(); + micro_context.SetInterpreterState( + tflite::MicroContext::InterpreterState::kInvoke); + + tflite::TestExternalContextPayloadData payload; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + micro_context.set_external_context(&payload)); + + tflite::TestExternalContextPayloadData* returned_external_context = + reinterpret_cast( + micro_context.external_context()); + + // What is returned should be the same as what is set. + TF_LITE_MICRO_EXPECT((void*)returned_external_context == (void*)(&payload)); +} + +TF_LITE_MICRO_TEST(TestGetExternalContextWithoutSetShouldReturnNull) { + tflite::MicroContext micro_context = tflite::CreateMicroContext(); + + tflite::TestExternalContextPayloadData* returned_external_context = + reinterpret_cast( + micro_context.external_context()); + + // Return a null if nothing is set before. + TF_LITE_MICRO_EXPECT((void*)returned_external_context == (nullptr)); +} + +TF_LITE_MICRO_TEST(TestSetExternalContextCanOnlyBeCalledOnce) { + tflite::MicroContext micro_context = tflite::CreateMicroContext(); + micro_context.SetInterpreterState( + tflite::MicroContext::InterpreterState::kPrepare); + tflite::TestExternalContextPayloadData payload; + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + micro_context.set_external_context(&payload)); + + // Another set should fail. + TF_LITE_MICRO_EXPECT_EQ(kTfLiteError, + micro_context.set_external_context(&payload)); +} + +TF_LITE_MICRO_TEST(TestSetExternalContextToNullShouldFail) { + tflite::MicroContext micro_context = tflite::CreateMicroContext(); + micro_context.SetInterpreterState( + tflite::MicroContext::InterpreterState::kPrepare); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteError, + micro_context.set_external_context(nullptr)); +} + +TF_LITE_MICRO_TEST(TestGetTempInputTensor) { + tflite::MicroContext micro_context = tflite::CreateMicroContext(); + + TfLiteNode node; + int input_data[] = {2, 0, 1}; + node.inputs = IntArrayFromInts(input_data); + + TfLiteTensor* input1 = micro_context.AllocateTempInputTensor(&node, 0); + TF_LITE_MICRO_EXPECT_TRUE(input1 != nullptr); + micro_context.DeallocateTempTfLiteTensor(input1); + + TfLiteTensor* input2 = micro_context.AllocateTempInputTensor(&node, 1); + TF_LITE_MICRO_EXPECT_TRUE(input2 != nullptr); + micro_context.DeallocateTempTfLiteTensor(input2); + + TfLiteTensor* invalid_input = micro_context.AllocateTempInputTensor(&node, 2); + TF_LITE_MICRO_EXPECT_TRUE(invalid_input == nullptr); +} + +TF_LITE_MICRO_TEST(TestGetTempOutputTensor) { + tflite::MicroContext micro_context = tflite::CreateMicroContext(); + + TfLiteNode node; + int output_data[] = {1, 0}; + node.outputs = IntArrayFromInts(output_data); + + TfLiteTensor* output = micro_context.AllocateTempOutputTensor(&node, 0); + TF_LITE_MICRO_EXPECT_TRUE(output != nullptr); + micro_context.DeallocateTempTfLiteTensor(output); + + TfLiteTensor* invalid_output = + micro_context.AllocateTempOutputTensor(&node, 1); + TF_LITE_MICRO_EXPECT_TRUE(invalid_output == nullptr); +} + +TF_LITE_MICRO_TEST(TestAllocateTempBuffer) { + tflite::MicroContext micro_context = tflite::CreateMicroContext(); + micro_context.SetInterpreterState( + tflite::MicroContext::InterpreterState::kPrepare); + uint8_t* buffer1 = + micro_context.AllocateTempBuffer(10, tflite::MicroArenaBufferAlignment()); + TF_LITE_MICRO_EXPECT(buffer1 != nullptr); +} + +TF_LITE_MICRO_TEST(TestGetTempIntermediateTensor) { + tflite::MicroContext micro_context = tflite::CreateMicroContext(); + + TfLiteNode node; + int intermediate_data[] = {1, 0}; + node.intermediates = IntArrayFromInts(intermediate_data); + + TfLiteTensor* output = micro_context.AllocateTempIntermediateTensor(&node, 0); + TF_LITE_MICRO_EXPECT_TRUE(output != nullptr); + micro_context.DeallocateTempTfLiteTensor(output); + + TfLiteTensor* invalid_output = + micro_context.AllocateTempIntermediateTensor(&node, 1); + TF_LITE_MICRO_EXPECT_TRUE(invalid_output == nullptr); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/micro_graph.cc b/tensorflow/lite/micro/micro_graph.cc new file mode 100644 index 0000000..35c6c1f --- /dev/null +++ b/tensorflow/lite/micro/micro_graph.cc @@ -0,0 +1,270 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/micro_graph.h" + +#include "flatbuffers/flatbuffers.h" // from @flatbuffers +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/micro/flatbuffer_utils.h" +#include "tensorflow/lite/micro/memory_helpers.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_profiler.h" +#include "tensorflow/lite/schema/schema_generated.h" + +namespace tflite { +namespace { + +const char* OpNameFromRegistration(const TFLMRegistration* registration) { + if (registration->builtin_code == BuiltinOperator_CUSTOM) { + return registration->custom_name; + } else { + return EnumNameBuiltinOperator(BuiltinOperator(registration->builtin_code)); + } +} + +} // namespace + +MicroGraph::MicroGraph(TfLiteContext* context, const Model* model, + MicroAllocator* allocator, + MicroResourceVariables* resource_variables) + : context_(context), + model_(model), + allocator_(allocator), + current_subgraph_index_(0), + resource_variables_(resource_variables) { + if (model != nullptr) { + subgraphs_ = model->subgraphs(); + } +} + +MicroGraph::~MicroGraph() {} + +TfLiteStatus MicroGraph::InitSubgraphs() { + int previous_subgraph_idx = current_subgraph_index_; + + for (size_t subgraph_idx = 0; subgraph_idx < subgraphs_->size(); + subgraph_idx++) { + current_subgraph_index_ = subgraph_idx; + uint32_t operators_size = NumSubgraphOperators(model_, subgraph_idx); + for (size_t i = 0; i < operators_size; ++i) { + TfLiteNode* node = + &(subgraph_allocations_[subgraph_idx].node_and_registrations[i].node); + const TFLMRegistration* registration = subgraph_allocations_[subgraph_idx] + .node_and_registrations[i] + .registration; + size_t init_data_size; + const char* init_data; + if (registration->builtin_code == BuiltinOperator_CUSTOM) { + init_data = reinterpret_cast(node->custom_initial_data); + init_data_size = node->custom_initial_data_size; + } else { + init_data = reinterpret_cast(node->builtin_data); + init_data_size = 0; + } + if (registration->init) { + node->user_data = + registration->init(context_, init_data, init_data_size); + } + } + } + current_subgraph_index_ = previous_subgraph_idx; + + return kTfLiteOk; +} + +TfLiteStatus MicroGraph::PrepareSubgraphs() { + int previous_subgraph_idx = current_subgraph_index_; + + for (size_t subgraph_idx = 0; subgraph_idx < subgraphs_->size(); + subgraph_idx++) { + current_subgraph_index_ = subgraph_idx; + uint32_t operators_size = NumSubgraphOperators(model_, subgraph_idx); + for (size_t i = 0; i < operators_size; ++i) { + TfLiteNode* node = + &(subgraph_allocations_[subgraph_idx].node_and_registrations[i].node); + const TFLMRegistration* registration = subgraph_allocations_[subgraph_idx] + .node_and_registrations[i] + .registration; + if (registration->prepare != nullptr) { + TfLiteStatus prepare_status = registration->prepare(context_, node); + if (prepare_status != kTfLiteOk) { + MicroPrintf("Node %s (number %df) failed to prepare with status %d", + OpNameFromRegistration(registration), i, prepare_status); + return kTfLiteError; + } + } + allocator_->FinishPrepareNodeAllocations(/*node_id=*/i); + } + } + current_subgraph_index_ = previous_subgraph_idx; + + return kTfLiteOk; +} + +TfLiteStatus MicroGraph::ResetSubgraphs() { + int previous_subgraph_idx = current_subgraph_index_; + + for (size_t subgraph_idx = 0; subgraph_idx < subgraphs_->size(); + subgraph_idx++) { + current_subgraph_index_ = subgraph_idx; + uint32_t operators_size = NumSubgraphOperators(model_, subgraph_idx); + for (size_t i = 0; i < operators_size; ++i) { + TfLiteNode* node = + &(subgraph_allocations_[subgraph_idx].node_and_registrations[i].node); + const TFLMRegistration* registration = subgraph_allocations_[subgraph_idx] + .node_and_registrations[i] + .registration; + // registration is allocated outside the interpreter, so double check to + // make sure it's not nullptr; + if (registration != nullptr && registration->reset != nullptr) { + registration->reset(context_, node->user_data); + } + } + } + current_subgraph_index_ = previous_subgraph_idx; + + return kTfLiteOk; +} + +TfLiteStatus MicroGraph::FreeSubgraphs() { + int previous_subgraph_idx = current_subgraph_index_; + + for (size_t subgraph_idx = 0; subgraph_idx < subgraphs_->size(); + subgraph_idx++) { + current_subgraph_index_ = subgraph_idx; + uint32_t operators_size = NumSubgraphOperators(model_, subgraph_idx); + for (size_t i = 0; i < operators_size; ++i) { + TfLiteNode* node = + &(subgraph_allocations_[subgraph_idx].node_and_registrations[i].node); + const TFLMRegistration* registration = subgraph_allocations_[subgraph_idx] + .node_and_registrations[i] + .registration; + // registration is allocated outside the interpreter, so double check to + // make sure it's not nullptr; + if (registration != nullptr && registration->free != nullptr) { + registration->free(context_, node->user_data); + } + } + } + current_subgraph_index_ = previous_subgraph_idx; + + return kTfLiteOk; +} + +TfLiteStatus MicroGraph::InvokeSubgraph(int subgraph_idx) { + int previous_subgraph_idx = current_subgraph_index_; + current_subgraph_index_ = subgraph_idx; + + if (static_cast(subgraph_idx) >= subgraphs_->size()) { + MicroPrintf("Accessing subgraph %d but only %d subgraphs found", + subgraph_idx, subgraphs_->size()); + return kTfLiteError; + } + uint32_t operators_size = NumSubgraphOperators(model_, subgraph_idx); + for (size_t i = 0; i < operators_size; ++i) { + TfLiteNode* node = + &(subgraph_allocations_[subgraph_idx].node_and_registrations[i].node); + const TFLMRegistration* registration = subgraph_allocations_[subgraph_idx] + .node_and_registrations[i] + .registration; + +// This ifdef is needed (even though ScopedMicroProfiler itself is a no-op with +// -DTF_LITE_STRIP_ERROR_STRINGS) because the function OpNameFromRegistration is +// only defined for builds with the error strings. +#if !defined(TF_LITE_STRIP_ERROR_STRINGS) + ScopedMicroProfiler scoped_profiler( + OpNameFromRegistration(registration), + reinterpret_cast(context_->profiler)); +#endif + + TFLITE_DCHECK(registration->invoke); + TfLiteStatus invoke_status = registration->invoke(context_, node); + + // All TfLiteTensor structs used in the kernel are allocated from temp + // memory in the allocator. This creates a chain of allocations in the + // temp section. The call below resets the chain of allocations to + // prepare for the next call. + allocator_->ResetTempAllocations(); + + if (invoke_status == kTfLiteError) { + MicroPrintf("Node %s (number %d) failed to invoke with status %d", + OpNameFromRegistration(registration), i, invoke_status); + return kTfLiteError; + } else if (invoke_status != kTfLiteOk) { + return invoke_status; + } + } + current_subgraph_index_ = previous_subgraph_idx; + return kTfLiteOk; +} + +TfLiteStatus MicroGraph::ResetVariableTensors() { + for (size_t subgraph_idx = 0; subgraph_idx < subgraphs_->size(); + subgraph_idx++) { + const SubGraph* subgraph = (*subgraphs_)[subgraph_idx]; + for (size_t i = 0; i < subgraph->tensors()->size(); ++i) { + auto* tensor = subgraph->tensors()->Get(i); + if (tensor->is_variable()) { + size_t buffer_size; + TF_LITE_ENSURE_STATUS(TfLiteEvalTensorByteLength( + &subgraph_allocations_[subgraph_idx].tensors[i], &buffer_size)); + + int value = 0; + if (tensor->type() == tflite::TensorType_INT8) { + value = tensor->quantization()->zero_point()->Get(0); + } + memset(subgraph_allocations_[subgraph_idx].tensors[i].data.raw, value, + buffer_size); + } + } + } + if (resource_variables_ != nullptr) { + resource_variables_->ResetAll(); + } + + return kTfLiteOk; +} + +int MicroGraph::NumSubgraphs() { return model_->subgraphs()->size(); } + +void MicroGraph::SetSubgraphAllocations( + SubgraphAllocations* subgraph_allocations) { + subgraph_allocations_ = subgraph_allocations; +} + +size_t MicroGraph::NumSubgraphInputs(int subgraph_idx) { + return model_->subgraphs()->Get(subgraph_idx)->inputs()->size(); +} + +TfLiteEvalTensor* MicroGraph::GetSubgraphInput(int subgraph_idx, + int input_idx) { + int tensor_idx = + model_->subgraphs()->Get(subgraph_idx)->inputs()->Get(input_idx); + return &subgraph_allocations_[subgraph_idx].tensors[tensor_idx]; +} + +size_t MicroGraph::NumSubgraphOutputs(int subgraph_idx) { + return model_->subgraphs()->Get(subgraph_idx)->outputs()->size(); +} + +TfLiteEvalTensor* MicroGraph::GetSubgraphOutput(int subgraph_idx, + int output_idx) { + int tensor_idx = + model_->subgraphs()->Get(subgraph_idx)->outputs()->Get(output_idx); + return &subgraph_allocations_[subgraph_idx].tensors[tensor_idx]; +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/micro_graph.h b/tensorflow/lite/micro/micro_graph.h new file mode 100644 index 0000000..ca8c40e --- /dev/null +++ b/tensorflow/lite/micro/micro_graph.h @@ -0,0 +1,108 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_MICRO_GRAPH_H_ +#define TENSORFLOW_LITE_MICRO_MICRO_GRAPH_H_ + +#include "tensorflow/lite/micro/micro_allocator.h" +#include "tensorflow/lite/micro/micro_common.h" +#include "tensorflow/lite/micro/micro_resource_variable.h" +#include "tensorflow/lite/schema/schema_generated.h" + +namespace tflite { + +// Abstracts the details of interacting with the tflite::Model. +// +// Provides methods to access, initialize, prepare, invoke and free any +// subgraph in the tflite::Graph. +class MicroGraph { + public: + // The lifetime of the context, model, allocator and resource_variables must + // be at least as long as that of the graph object, since the this class may + // need to access them at any time. If resource_variables is a nullptr, + // GetResourceVariables will return a nullptr. + MicroGraph(TfLiteContext* context, const Model* model, + MicroAllocator* allocator, + MicroResourceVariables* resource_variables); + virtual ~MicroGraph(); + + // Sets up builtin data and calls TFLMRegistration->Init for every + // operator in every subgraph in the model. + virtual TfLiteStatus InitSubgraphs(); + + // Calls TFLMRegistration->Prepare for every operator in every subgraph + // in the model. + virtual TfLiteStatus PrepareSubgraphs(); + + // Calls TFLMRegistration->Reset for every operator in every subgraph in + // the model. + virtual TfLiteStatus ResetSubgraphs(); + + // Calls TFLMRegistration->Free for every operator in every subgraph in + // the model. + virtual TfLiteStatus FreeSubgraphs(); + + // Calls TFLMRegistration->Invoke for every operator in a single subgraph + // in the model. + virtual TfLiteStatus InvokeSubgraph(int subgraph_idx); + + // Zeros out all variable tensors in all subgraphs in the model. + virtual TfLiteStatus ResetVariableTensors(); + + // Number of tensor inputs to a specified subgraph in the model. + virtual size_t NumSubgraphInputs(int subgraph_idx); + + // Get the specified input tensor of a specified subgraph in the model. + virtual TfLiteEvalTensor* GetSubgraphInput(int subgraph_idx, int input_idx); + + // Number of tensor outputs from a specified subgraph in the model. + virtual size_t NumSubgraphOutputs(int subgraph_idx); + + // Get the specified output tensor of a specified subgraph in the model. + virtual TfLiteEvalTensor* GetSubgraphOutput(int subgraph_idx, int output_idx); + + // Number of subgraphs in the model. + virtual int NumSubgraphs(); + + // Hook to pass in subgraph allocations tracked within the interpreter, + // allowing MicroGraph to init / prepare / invoke subgraphs in the model. + void SetSubgraphAllocations(SubgraphAllocations* subgraph_allocations); + + // Get the current subgraph index. Within an on operator, this is guaranteed + // to be the subgraph of that operator. + int GetCurrentSubgraphIndex() { return current_subgraph_index_; } + + // Gets the list of alloctions for each subgraph. This is the source of truth + // for all per-subgraph allocation data. + SubgraphAllocations* GetAllocations() { return subgraph_allocations_; } + + // Get the resource variables for this TFLM graph. + MicroResourceVariables* GetResourceVariables() { return resource_variables_; } + + private: + TfLiteContext* context_; + const Model* model_; + MicroAllocator* allocator_; + SubgraphAllocations* subgraph_allocations_ = nullptr; + int current_subgraph_index_; + MicroResourceVariables* resource_variables_; + const flatbuffers::Vector>* subgraphs_; + + TF_LITE_REMOVE_VIRTUAL_DELETE +}; + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_MICRO_GRAPH_H_ diff --git a/tensorflow/lite/micro/micro_interpreter.cc b/tensorflow/lite/micro/micro_interpreter.cc new file mode 100644 index 0000000..c6917b4 --- /dev/null +++ b/tensorflow/lite/micro/micro_interpreter.cc @@ -0,0 +1,313 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/micro/micro_interpreter.h" + +#include +#include +#include + +#include "flatbuffers/flatbuffers.h" // from @flatbuffers +#include "tensorflow/lite/c/c_api_types.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/flatbuffer_utils.h" +#include "tensorflow/lite/micro/memory_helpers.h" +#include "tensorflow/lite/micro/micro_allocator.h" +#include "tensorflow/lite/micro/micro_context.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_op_resolver.h" +#include "tensorflow/lite/micro/micro_profiler_interface.h" +#include "tensorflow/lite/micro/tflite_bridge/flatbuffer_conversions_bridge.h" +#include "tensorflow/lite/schema/schema_generated.h" +#include "tensorflow/lite/schema/schema_utils.h" + +namespace tflite { + +MicroInterpreter::MicroInterpreter(const Model* model, + const MicroOpResolver& op_resolver, + uint8_t* tensor_arena, + size_t tensor_arena_size, + MicroResourceVariables* resource_variables, + MicroProfilerInterface* profiler) + : model_(model), + op_resolver_(op_resolver), + allocator_(*MicroAllocator::Create(tensor_arena, tensor_arena_size)), + + graph_(&context_, model, &allocator_, resource_variables), + tensors_allocated_(false), + initialization_status_(kTfLiteError), + input_tensors_(nullptr), + output_tensors_(nullptr), + micro_context_(&allocator_, model_, &graph_) { + Init(profiler); +} + +MicroInterpreter::MicroInterpreter(const Model* model, + const MicroOpResolver& op_resolver, + MicroAllocator* allocator, + MicroResourceVariables* resource_variables, + MicroProfilerInterface* profiler) + : model_(model), + op_resolver_(op_resolver), + allocator_(*allocator), + graph_(&context_, model, allocator, resource_variables), + tensors_allocated_(false), + initialization_status_(kTfLiteError), + input_tensors_(nullptr), + output_tensors_(nullptr), + micro_context_(&allocator_, model_, &graph_) { + Init(profiler); +} + +MicroInterpreter::~MicroInterpreter() { + if (graph_.GetAllocations() != nullptr) { + graph_.FreeSubgraphs(); + } +} + +void MicroInterpreter::Init(MicroProfilerInterface* profiler) { + micro_context_.SetInterpreterState(MicroContext::InterpreterState::kInit); + context_.impl_ = static_cast(µ_context_); + context_.ReportError = MicroContextReportOpError; + context_.GetTensor = MicroContextGetTensor; + context_.GetEvalTensor = MicroContextGetEvalTensor; + context_.profiler = profiler; + context_.RequestScratchBufferInArena = + MicroContextRequestScratchBufferInArena; + context_.GetExternalContext = MicroContextGetExternalContext; + context_.AllocatePersistentBuffer = MicroContextAllocatePersistentBuffer; + context_.GetScratchBuffer = MicroContextGetScratchBuffer; + + initialization_status_ = kTfLiteOk; +} + +TfLiteStatus MicroInterpreter::PrepareNodeAndRegistrationDataFromFlatbuffer() { + for (int subgraph_idx = 0; subgraph_idx < graph_.NumSubgraphs(); + subgraph_idx++) { + const SubGraph* subgraph = model_->subgraphs()->Get(subgraph_idx); + TFLITE_DCHECK(subgraph != nullptr); + + auto* opcodes = model_->operator_codes(); + TfLiteBridgeBuiltinDataAllocator* builtin_data_allocator = + allocator_.GetBuiltinDataAllocator(); + uint32_t operators_size = NumSubgraphOperators(subgraph); + for (size_t i = 0; i < operators_size; ++i) { + const auto* op = subgraph->operators()->Get(i); + const size_t index = op->opcode_index(); + if (index >= opcodes->size()) { + MicroPrintf("Missing registration for opcode_index %d\n", index); + return kTfLiteError; + } + const auto* opcode = opcodes->Get(index); + TfLiteStatus status = + GetRegistrationFromOpCode(opcode, op_resolver_, + &(graph_.GetAllocations()[subgraph_idx] + .node_and_registrations[i] + .registration)); + if (status != kTfLiteOk) { + MicroPrintf("Failed to get registration from op code %s\n ", + EnumNameBuiltinOperator(GetBuiltinCode(opcode))); + return status; + } + const auto* registration = graph_.GetAllocations()[subgraph_idx] + .node_and_registrations[i] + .registration; + if (registration == nullptr) { + MicroPrintf("Skipping op for opcode_index %d\n", index); + return kTfLiteError; + } + BuiltinOperator op_type = + static_cast(registration->builtin_code); + + const char* custom_data = nullptr; + size_t custom_data_size = 0; + unsigned char* builtin_data = nullptr; + + if (op_type == BuiltinOperator_CUSTOM) { + // Custom Ops may or may not have a non-null custom_options field. + if (op->custom_options() != nullptr) { + custom_data = + reinterpret_cast(op->custom_options()->data()); + custom_data_size = op->custom_options()->size(); + } + } else { + if (op->custom_options() != nullptr) { + MicroPrintf( + "Unsupported behavior: found builtin operator %s with custom " + "options.\n", + EnumNameBuiltinOperator(op_type)); + return kTfLiteError; + } + + TfLiteBridgeBuiltinParseFunction parser = + op_resolver_.GetOpDataParser(op_type); + if (parser == nullptr) { + MicroPrintf("Did not find a parser for %s", + EnumNameBuiltinOperator(op_type)); + + return kTfLiteError; + } + TF_LITE_ENSURE_STATUS(CallBuiltinParseFunction( + parser, op, builtin_data_allocator, (void**)(&builtin_data))); + } + + TfLiteIntArray* inputs_array = + FlatBufferVectorToTfLiteTypeArray(op->inputs()); + TfLiteIntArray* outputs_array = + FlatBufferVectorToTfLiteTypeArray(op->outputs()); + + TfLiteNode* node = &( + graph_.GetAllocations()[subgraph_idx].node_and_registrations[i].node); + *node = {}; + node->inputs = inputs_array; + node->outputs = outputs_array; + node->builtin_data = reinterpret_cast(builtin_data); + node->custom_initial_data = custom_data; + node->custom_initial_data_size = custom_data_size; + + if (op->intermediates() && (op->intermediates()->size() > 0)) { + node->intermediates = + FlatBufferVectorToTfLiteTypeArray(op->intermediates()); + } + } + } + return kTfLiteOk; +} + +TfLiteStatus MicroInterpreter::AllocateTensors() { + SubgraphAllocations* allocations = allocator_.StartModelAllocation(model_); + + if (allocations == nullptr) { + MicroPrintf("Failed starting model allocation.\n"); + initialization_status_ = kTfLiteError; + return kTfLiteError; + } + + graph_.SetSubgraphAllocations(allocations); + + TF_LITE_ENSURE_STATUS(PrepareNodeAndRegistrationDataFromFlatbuffer()); + + micro_context_.SetInterpreterState(MicroContext::InterpreterState::kInit); + TF_LITE_ENSURE_STATUS(graph_.InitSubgraphs()); + + micro_context_.SetInterpreterState(MicroContext::InterpreterState::kPrepare); + + TF_LITE_ENSURE_STATUS(graph_.PrepareSubgraphs()); + + micro_context_.SetInterpreterState( + MicroContext::InterpreterState::kMemoryPlanning); + + TF_LITE_ENSURE_OK(&context_, allocator_.FinishModelAllocation( + model_, graph_.GetAllocations(), + &scratch_buffer_handles_)); + + micro_context_.SetScratchBufferHandles(scratch_buffer_handles_); + + // TODO(b/162311891): Drop these allocations when the interpreter supports + // handling buffers from TfLiteEvalTensor. + input_tensors_ = + reinterpret_cast(allocator_.AllocatePersistentBuffer( + sizeof(TfLiteTensor*) * inputs_size())); + if (input_tensors_ == nullptr) { + MicroPrintf( + "Failed to allocate memory for context->input_tensors_, " + "%d bytes required", + sizeof(TfLiteTensor*) * inputs_size()); + return kTfLiteError; + } + + for (size_t i = 0; i < inputs_size(); ++i) { + input_tensors_[i] = allocator_.AllocatePersistentTfLiteTensor( + model_, graph_.GetAllocations(), inputs().Get(i), 0); + if (input_tensors_[i] == nullptr) { + MicroPrintf("Failed to initialize input tensor %d", i); + return kTfLiteError; + } + } + + // TODO(b/162311891): Drop these allocations when the interpreter supports + // handling buffers from TfLiteEvalTensor. + output_tensors_ = + reinterpret_cast(allocator_.AllocatePersistentBuffer( + sizeof(TfLiteTensor*) * outputs_size())); + if (output_tensors_ == nullptr) { + MicroPrintf( + "Failed to allocate memory for context->output_tensors_, " + "%d bytes required", + sizeof(TfLiteTensor*) * outputs_size()); + return kTfLiteError; + } + + for (size_t i = 0; i < outputs_size(); ++i) { + output_tensors_[i] = allocator_.AllocatePersistentTfLiteTensor( + model_, graph_.GetAllocations(), outputs().Get(i), 0); + if (output_tensors_[i] == nullptr) { + MicroPrintf("Failed to initialize output tensor %d", i); + return kTfLiteError; + } + } + + TF_LITE_ENSURE_STATUS(Reset()); + + tensors_allocated_ = true; + micro_context_.SetInterpreterState(MicroContext::InterpreterState::kInvoke); + return kTfLiteOk; +} + +TfLiteStatus MicroInterpreter::Invoke() { + if (initialization_status_ != kTfLiteOk) { + MicroPrintf("Invoke() called after initialization failed\n"); + return kTfLiteError; + } + + // Ensure tensors are allocated before the interpreter is invoked to avoid + // difficult to debug segfaults. + if (!tensors_allocated_) { + TF_LITE_ENSURE_OK(&context_, AllocateTensors()); + } + return graph_.InvokeSubgraph(0); +} + +TfLiteTensor* MicroInterpreter::input(size_t index) { + const size_t length = inputs_size(); + if (index >= length) { + MicroPrintf("Input index %d out of range (length is %d)", index, length); + return nullptr; + } + return input_tensors_[index]; +} + +TfLiteTensor* MicroInterpreter::output(size_t index) { + const size_t length = outputs_size(); + if (index >= length) { + MicroPrintf("Output index %d out of range (length is %d)", index, length); + return nullptr; + } + return output_tensors_[index]; +} + +TfLiteStatus MicroInterpreter::Reset() { + TfLiteStatus status = graph_.ResetSubgraphs(); + if (status != kTfLiteOk) { + return status; + } + return graph_.ResetVariableTensors(); +} + +TfLiteStatus MicroInterpreter::SetMicroExternalContext( + void* external_context_payload) { + return micro_context_.set_external_context(external_context_payload); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/micro_interpreter.h b/tensorflow/lite/micro/micro_interpreter.h new file mode 100644 index 0000000..a77b0e0 --- /dev/null +++ b/tensorflow/lite/micro/micro_interpreter.h @@ -0,0 +1,171 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_MICRO_MICRO_INTERPRETER_H_ +#define TENSORFLOW_LITE_MICRO_MICRO_INTERPRETER_H_ + +#include +#include + +#include "flatbuffers/flatbuffers.h" // from @flatbuffers +#include "tensorflow/lite/c/c_api_types.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/core/api/error_reporter.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/micro/micro_allocator.h" +#include "tensorflow/lite/micro/micro_context.h" +#include "tensorflow/lite/micro/micro_graph.h" +#include "tensorflow/lite/micro/micro_op_resolver.h" +#include "tensorflow/lite/micro/micro_profiler_interface.h" +#include "tensorflow/lite/portable_type_to_tflitetype.h" +#include "tensorflow/lite/schema/schema_generated.h" + +/// Copied from tensorflow/lite/version.h to avoid a dependency chain into +// tensorflow/core. +#define TFLITE_SCHEMA_VERSION (3) + +namespace tflite { + +class MicroInterpreter { + public: + // The lifetime of the model, op resolver, tensor arena, error reporter, + // resource variables, and profiler must be at least as long as that of the + // interpreter object, since the interpreter may need to access them at any + // time. This means that you should usually create them with the same scope as + // each other, for example having them all allocated on the stack as local + // variables through a top-level function. The interpreter doesn't do any + // deallocation of any of the pointed-to objects, ownership remains with the + // caller. + MicroInterpreter(const Model* model, const MicroOpResolver& op_resolver, + uint8_t* tensor_arena, size_t tensor_arena_size, + MicroResourceVariables* resource_variables = nullptr, + MicroProfilerInterface* profiler = nullptr); + + // Create an interpreter instance using an existing MicroAllocator instance. + // This constructor should be used when creating an allocator that needs to + // have allocation handled in more than one interpreter or for recording + // allocations inside the interpreter. The lifetime of the allocator must be + // as long as that of the interpreter object. + MicroInterpreter(const Model* model, const MicroOpResolver& op_resolver, + MicroAllocator* allocator, + MicroResourceVariables* resource_variables = nullptr, + MicroProfilerInterface* profiler = nullptr); + + ~MicroInterpreter(); + + // Runs through the model and allocates all necessary input, output and + // intermediate tensors. + TfLiteStatus AllocateTensors(); + + // In order to support partial graph runs for strided models, this can return + // values other than kTfLiteOk and kTfLiteError. + // TODO(b/149795762): Add this to the TfLiteStatus enum. + TfLiteStatus Invoke(); + + // This is the recommended API for an application to pass an external payload + // pointer as an external context to kernels. The life time of the payload + // pointer should be at least as long as this interpreter. TFLM supports only + // one external context. + TfLiteStatus SetMicroExternalContext(void* external_context_payload); + + TfLiteTensor* input(size_t index); + size_t inputs_size() const { + return model_->subgraphs()->Get(0)->inputs()->size(); + } + const flatbuffers::Vector& inputs() const { + return *model_->subgraphs()->Get(0)->inputs(); + } + TfLiteTensor* input_tensor(size_t index) { return input(index); } + template + T* typed_input_tensor(int tensor_index) { + if (TfLiteTensor* tensor_ptr = input_tensor(tensor_index)) { + if (tensor_ptr->type == typeToTfLiteType()) { + return GetTensorData(tensor_ptr); + } + } + return nullptr; + } + + TfLiteTensor* output(size_t index); + size_t outputs_size() const { + return model_->subgraphs()->Get(0)->outputs()->size(); + } + const flatbuffers::Vector& outputs() const { + return *model_->subgraphs()->Get(0)->outputs(); + } + TfLiteTensor* output_tensor(size_t index) { return output(index); } + template + T* typed_output_tensor(int tensor_index) { + if (TfLiteTensor* tensor_ptr = output_tensor(tensor_index)) { + if (tensor_ptr->type == typeToTfLiteType()) { + return GetTensorData(tensor_ptr); + } + } + return nullptr; + } + + // Reset the state to be what you would expect when the interpreter is first + // created. i.e. after Init and Prepare is called for the very first time. + TfLiteStatus Reset(); + + TfLiteStatus initialization_status() const { return initialization_status_; } + + // Populates node and registration pointers representing the inference graph + // of the model from values inside the flatbuffer (loaded from the TfLiteModel + // instance). Persistent data (e.g. operator data) is allocated from the + // arena. + TfLiteStatus PrepareNodeAndRegistrationDataFromFlatbuffer(); + + // For debugging only. + // Returns the actual used arena in bytes. This method gives the optimal arena + // size. It's only available after `AllocateTensors` has been called. + // Note that normally `tensor_arena` requires 16 bytes alignment to fully + // utilize the space. If it's not the case, the optimial arena size would be + // arena_used_bytes() + 16. + size_t arena_used_bytes() const { return allocator_.used_bytes(); } + + protected: + const MicroAllocator& allocator() const { return allocator_; } + const TfLiteContext& context() const { return context_; } + + private: + // TODO(b/158263161): Consider switching to Create() function to enable better + // error reporting during initialization. + void Init(MicroProfilerInterface* profiler); + + // Gets the current subgraph index used from within context methods. + int get_subgraph_index() { return graph_.GetCurrentSubgraphIndex(); } + + const Model* model_; + const MicroOpResolver& op_resolver_; + TfLiteContext context_ = {}; + MicroAllocator& allocator_; + MicroGraph graph_; + bool tensors_allocated_; + + TfLiteStatus initialization_status_; + + ScratchBufferHandle* scratch_buffer_handles_ = nullptr; + + // TODO(b/162311891): Clean these pointers up when this class supports buffers + // from TfLiteEvalTensor. + TfLiteTensor** input_tensors_; + TfLiteTensor** output_tensors_; + + MicroContext micro_context_; +}; + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_MICRO_INTERPRETER_H_ diff --git a/tensorflow/lite/micro/micro_interpreter_test.cc b/tensorflow/lite/micro/micro_interpreter_test.cc new file mode 100644 index 0000000..0ba31c4 --- /dev/null +++ b/tensorflow/lite/micro/micro_interpreter_test.cc @@ -0,0 +1,551 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/micro_interpreter.h" + +#include + +#include "tensorflow/lite/micro/arena_allocator/recording_single_arena_buffer_allocator.h" +#include "tensorflow/lite/micro/compatibility.h" +#include "tensorflow/lite/micro/micro_arena_constants.h" +#include "tensorflow/lite/micro/micro_profiler_interface.h" +#include "tensorflow/lite/micro/recording_micro_allocator.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace { + +constexpr size_t buffer_arena_size = 256 * 1024; +uint8_t arena_buffer[buffer_arena_size]; +class MockProfiler : public MicroProfilerInterface { + public: + MockProfiler() : event_starts_(0), event_ends_(0) {} + + uint32_t BeginEvent(const char* tag) override { + event_starts_++; + return 0; + } + + void EndEvent(uint32_t event_handle) override { event_ends_++; } + + int event_starts() { return event_starts_; } + int event_ends() { return event_ends_; } + + private: + int event_starts_; + int event_ends_; + + TF_LITE_REMOVE_VIRTUAL_DELETE +}; + +} // namespace +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(TestInterpreter) { + const tflite::Model* model = tflite::testing::GetSimpleMockModel(); + TF_LITE_MICRO_EXPECT(nullptr != model); + tflite::testing::TestingOpResolver op_resolver; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + tflite::testing::GetTestingOpResolver(op_resolver)); + + constexpr size_t allocator_buffer_size = 2000; + uint8_t allocator_buffer[allocator_buffer_size]; + + // Create a new scope so that we can test the destructor. + { + tflite::MicroInterpreter interpreter(model, op_resolver, allocator_buffer, + allocator_buffer_size); + TF_LITE_MICRO_EXPECT_EQ(interpreter.AllocateTensors(), kTfLiteOk); + TF_LITE_MICRO_EXPECT_LE(interpreter.arena_used_bytes(), 928 + 100); + TF_LITE_MICRO_EXPECT_EQ(static_cast(1), interpreter.inputs_size()); + TF_LITE_MICRO_EXPECT_EQ(static_cast(2), interpreter.outputs_size()); + + TfLiteTensor* input = interpreter.input(0); + TF_LITE_MICRO_EXPECT(nullptr != input); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteInt32, input->type); + TF_LITE_MICRO_EXPECT_EQ(1, input->dims->size); + TF_LITE_MICRO_EXPECT_EQ(1, input->dims->data[0]); + TF_LITE_MICRO_EXPECT_EQ(static_cast(4), input->bytes); + TF_LITE_MICRO_EXPECT(nullptr != input->data.i32); + input->data.i32[0] = 21; + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, interpreter.Invoke()); + + TfLiteTensor* output = interpreter.output(0); + TF_LITE_MICRO_EXPECT(nullptr != output); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteInt32, output->type); + TF_LITE_MICRO_EXPECT_EQ(1, output->dims->size); + TF_LITE_MICRO_EXPECT_EQ(1, output->dims->data[0]); + TF_LITE_MICRO_EXPECT_EQ(static_cast(4), output->bytes); + TF_LITE_MICRO_EXPECT(nullptr != output->data.i32); + TF_LITE_MICRO_EXPECT_EQ(42, output->data.i32[0]); + + output = interpreter.output(1); + TF_LITE_MICRO_EXPECT(nullptr != output); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteInt32, output->type); + TF_LITE_MICRO_EXPECT_EQ(1, output->dims->size); + TF_LITE_MICRO_EXPECT_EQ(1, output->dims->data[0]); + TF_LITE_MICRO_EXPECT_EQ(static_cast(4), output->bytes); + TF_LITE_MICRO_EXPECT(nullptr != output->data.i32); + TF_LITE_MICRO_EXPECT_EQ(42, output->data.i32[0]); + } + + TF_LITE_MICRO_EXPECT_EQ(tflite::testing::MockCustom::freed_, true); +} + +TF_LITE_MICRO_TEST(TestMultiTenantInterpreter) { + tflite::testing::TestingOpResolver op_resolver; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + tflite::testing::GetTestingOpResolver(op_resolver)); + constexpr size_t arena_size = 8192; + uint8_t arena[arena_size]; + + size_t simple_model_head_usage = 0, complex_model_head_usage = 0; + + // Get simple_model_head_usage. + { + tflite::RecordingMicroAllocator* allocator = + tflite::RecordingMicroAllocator::Create(arena, arena_size); + const tflite::Model* model0 = tflite::testing::GetSimpleMockModel(); + tflite::MicroInterpreter interpreter0(model0, op_resolver, allocator); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, interpreter0.AllocateTensors()); + simple_model_head_usage = + allocator->GetSimpleMemoryAllocator()->GetNonPersistentUsedBytes(); + + TfLiteTensor* input = interpreter0.input(0); + TfLiteTensor* output = interpreter0.output(0); + input->data.i32[0] = 21; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, interpreter0.Invoke()); + TF_LITE_MICRO_EXPECT_EQ(42, output->data.i32[0]); + } + + // Shared allocator for various models. + tflite::RecordingMicroAllocator* allocator = + tflite::RecordingMicroAllocator::Create(arena, arena_size); + + // Get complex_model_head_usage. No head space reuse since it's the first + // model allocated in the `allocator`. + const tflite::Model* model1 = tflite::testing::GetComplexMockModel(); + tflite::MicroInterpreter interpreter1(model1, op_resolver, allocator); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, interpreter1.AllocateTensors()); + TfLiteTensor* input1 = interpreter1.input(0); + TfLiteTensor* output1 = interpreter1.output(0); + complex_model_head_usage = + allocator->GetSimpleMemoryAllocator()->GetNonPersistentUsedBytes(); + + // Allocate simple model from the same `allocator`. Some head space will + // be reused thanks to multi-tenant TFLM support. Also makes sure that + // the output is correct. + const tflite::Model* model2 = tflite::testing::GetSimpleMockModel(); + tflite::MicroInterpreter interpreter2(model2, op_resolver, allocator); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, interpreter2.AllocateTensors()); + TfLiteTensor* input2 = interpreter2.input(0); + TfLiteTensor* output2 = interpreter2.output(0); + // Verify that 1 + 1 < 2. + size_t multi_tenant_head_usage = + allocator->GetSimpleMemoryAllocator()->GetNonPersistentUsedBytes(); + TF_LITE_MICRO_EXPECT_LE(multi_tenant_head_usage, + complex_model_head_usage + simple_model_head_usage); + + // Now we have model1 and model2 sharing the same `allocator`. + // Let's make sure that they can produce correct results. + TF_LITE_MICRO_EXPECT_EQ(kTfLiteInt32, input1->type); + input1->data.i32[0] = 10; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, interpreter1.Invoke()); + // Output tensor for the first model. + TF_LITE_MICRO_EXPECT_EQ(kTfLiteInt32, output1->type); + TF_LITE_MICRO_EXPECT_EQ(10, output1->data.i32[0]); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteInt32, input2->type); + input2->data.i32[0] = 21; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, interpreter2.Invoke()); + // Output for the second model. + TF_LITE_MICRO_EXPECT_EQ(kTfLiteInt32, output2->type); + TF_LITE_MICRO_EXPECT_EQ(42, output2->data.i32[0]); + + // Allocate another complex model from the `allocator` will not increase + // head space usage. + const tflite::Model* model3 = tflite::testing::GetComplexMockModel(); + tflite::MicroInterpreter interpreter3(model3, op_resolver, allocator); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, interpreter3.AllocateTensors()); + TfLiteTensor* input3 = interpreter3.input(0); + TfLiteTensor* output3 = interpreter3.output(0); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteInt32, input3->type); + input3->data.i32[0] = 10; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, interpreter3.Invoke()); + // Output tensor for the third model. + TF_LITE_MICRO_EXPECT_EQ(kTfLiteInt32, output3->type); + TF_LITE_MICRO_EXPECT_EQ(10, output3->data.i32[0]); + // No increase on the head usage as we're reusing the space. + TF_LITE_MICRO_EXPECT_EQ( + multi_tenant_head_usage, + allocator->GetSimpleMemoryAllocator()->GetNonPersistentUsedBytes()); +} + +TF_LITE_MICRO_TEST(TestKernelMemoryPlanning) { + const tflite::Model* model = tflite::testing::GetSimpleStatefulModel(); + TF_LITE_MICRO_EXPECT(nullptr != model); + + tflite::testing::TestingOpResolver op_resolver; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + tflite::testing::GetTestingOpResolver(op_resolver)); + + constexpr size_t allocator_buffer_size = 4096 + 1024; + uint8_t allocator_buffer[allocator_buffer_size]; + + tflite::RecordingMicroAllocator* allocator = + tflite::RecordingMicroAllocator::Create(allocator_buffer, + allocator_buffer_size); + + // Make sure kernel memory planning works in multi-tenant context. + for (int i = 0; i < 3; i++) { + tflite::MicroInterpreter interpreter(model, op_resolver, allocator); + TF_LITE_MICRO_EXPECT_EQ(interpreter.AllocateTensors(), kTfLiteOk); + TF_LITE_MICRO_EXPECT_EQ(static_cast(1), interpreter.inputs_size()); + TF_LITE_MICRO_EXPECT_EQ(static_cast(2), interpreter.outputs_size()); + + TfLiteTensor* input = interpreter.input(0); + TF_LITE_MICRO_EXPECT_EQ(1, input->dims->size); + TF_LITE_MICRO_EXPECT_EQ(3, input->dims->data[0]); + input->data.uint8[0] = 2; + input->data.uint8[1] = 3; + input->data.uint8[2] = 1; + + uint8_t expected_median = 2; + + { + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, interpreter.Invoke()); + TfLiteTensor* median = interpreter.output(0); + TF_LITE_MICRO_EXPECT_EQ(expected_median, median->data.uint8[0]); + TfLiteTensor* invoke_count = interpreter.output(1); + TF_LITE_MICRO_EXPECT_EQ(1, invoke_count->data.i32[0]); + } + + { + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, interpreter.Invoke()); + TfLiteTensor* median = interpreter.output(0); + TF_LITE_MICRO_EXPECT_EQ(expected_median, median->data.uint8[0]); + TfLiteTensor* invoke_count = interpreter.output(1); + TF_LITE_MICRO_EXPECT_EQ(2, invoke_count->data.i32[0]); + } + } +} + +// The interpreter initialization requires multiple steps and this test case +// ensures that simply creating and destructing an interpreter object is ok. +// b/147830765 has one example of a change that caused trouble for this simple +// case. +TF_LITE_MICRO_TEST(TestIncompleteInitialization) { + const tflite::Model* model = tflite::testing::GetComplexMockModel(); + TF_LITE_MICRO_EXPECT(nullptr != model); + + tflite::testing::TestingOpResolver op_resolver; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + tflite::testing::GetTestingOpResolver(op_resolver)); + + constexpr size_t allocator_buffer_size = 2048; + uint8_t allocator_buffer[allocator_buffer_size]; + + tflite::MicroInterpreter interpreter(model, op_resolver, allocator_buffer, + allocator_buffer_size); +} + +// Test that an interpreter with a supplied profiler correctly calls the +// profiler each time an operator is invoked. +TF_LITE_MICRO_TEST(InterpreterWithProfilerShouldProfileOps) { + const tflite::Model* model = tflite::testing::GetComplexMockModel(); + TF_LITE_MICRO_EXPECT(nullptr != model); + + tflite::testing::TestingOpResolver op_resolver; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + tflite::testing::GetTestingOpResolver(op_resolver)); + + constexpr size_t allocator_buffer_size = 2048; + uint8_t allocator_buffer[allocator_buffer_size]; + tflite::MockProfiler profiler; + tflite::MicroInterpreter interpreter(model, op_resolver, allocator_buffer, + allocator_buffer_size, nullptr, + &profiler); + + TF_LITE_MICRO_EXPECT_EQ(profiler.event_starts(), 0); + TF_LITE_MICRO_EXPECT_EQ(profiler.event_ends(), 0); + TF_LITE_MICRO_EXPECT_EQ(interpreter.AllocateTensors(), kTfLiteOk); + TF_LITE_MICRO_EXPECT_EQ(interpreter.Invoke(), kTfLiteOk); +#ifndef TF_LITE_STRIP_ERROR_STRINGS + TF_LITE_MICRO_EXPECT_EQ(profiler.event_starts(), 3); + TF_LITE_MICRO_EXPECT_EQ(profiler.event_ends(), 3); +#else + TF_LITE_MICRO_EXPECT_EQ(profiler.event_starts(), 0); + TF_LITE_MICRO_EXPECT_EQ(profiler.event_ends(), 0); +#endif +} + +TF_LITE_MICRO_TEST(TestIncompleteInitializationAllocationsWithSmallArena) { + const tflite::Model* model = tflite::testing::GetComplexMockModel(); + TF_LITE_MICRO_EXPECT(nullptr != model); + + tflite::testing::TestingOpResolver op_resolver; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + tflite::testing::GetTestingOpResolver(op_resolver)); + + // This test is designed to create the following classes/buffers successfully + // on the arena: + // + // From tail: RecordingSingleArenaBufferAllocator, RecordingMicroAllocator, + // RecordingMicroAllocator. + // + // From head:ScratchBufferRequest buffer. + // + // Since sizes of the above classes vary between architecture, we use sizeof + // for whatever is visible from this test file. For those that are not visible + // from this test file, we use the upper bound for x86 architecture since it + // is not ideal to expose definitions for test only. + constexpr size_t max_scratch_buffer_request_size = 192; + constexpr size_t max_micro_builtin_data_allocator_size = 16; + constexpr size_t allocator_buffer_size = + sizeof(tflite::RecordingSingleArenaBufferAllocator) + + sizeof(tflite::RecordingMicroAllocator) + + max_micro_builtin_data_allocator_size + max_scratch_buffer_request_size; + uint8_t allocator_buffer[allocator_buffer_size]; + + tflite::RecordingMicroAllocator* allocator = + tflite::RecordingMicroAllocator::Create(allocator_buffer, + allocator_buffer_size); + TF_LITE_MICRO_EXPECT(nullptr != allocator); + + tflite::MicroInterpreter interpreter(model, op_resolver, allocator); + + // Interpreter fails because arena is too small: + TF_LITE_MICRO_EXPECT_EQ(interpreter.Invoke(), kTfLiteError); + + // The head buffer use cannot exceed the upper bound from x86. + TF_LITE_MICRO_EXPECT_LE( + allocator->GetSimpleMemoryAllocator()->GetNonPersistentUsedBytes(), + max_scratch_buffer_request_size); + + // Ensure allocations are zero (ignore tail since some internal structs are + // initialized with this space): + TF_LITE_MICRO_EXPECT_EQ( + static_cast(0), + allocator + ->GetRecordedAllocation( + tflite::RecordedAllocationType::kTfLiteEvalTensorData) + .used_bytes); + TF_LITE_MICRO_EXPECT_EQ( + static_cast(0), + allocator + ->GetRecordedAllocation( + tflite::RecordedAllocationType::kTfLiteTensorVariableBufferData) + .used_bytes); + TF_LITE_MICRO_EXPECT_EQ( + static_cast(0), + allocator->GetRecordedAllocation(tflite::RecordedAllocationType::kOpData) + .used_bytes); +} + +TF_LITE_MICRO_TEST(TestInterpreterDoesNotAllocateUntilInvoke) { + const tflite::Model* model = tflite::testing::GetComplexMockModel(); + TF_LITE_MICRO_EXPECT(nullptr != model); + + tflite::testing::TestingOpResolver op_resolver; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + tflite::testing::GetTestingOpResolver(op_resolver)); + + constexpr size_t allocator_buffer_size = 1024 * 10; + uint8_t allocator_buffer[allocator_buffer_size]; + + tflite::RecordingMicroAllocator* allocator = + tflite::RecordingMicroAllocator::Create(allocator_buffer, + allocator_buffer_size); + TF_LITE_MICRO_EXPECT(nullptr != allocator); + + tflite::MicroInterpreter interpreter(model, op_resolver, allocator); + + // Ensure allocations are zero (ignore tail since some internal structs are + // initialized with this space): + TF_LITE_MICRO_EXPECT_EQ( + static_cast(0), + allocator->GetSimpleMemoryAllocator()->GetNonPersistentUsedBytes()); + TF_LITE_MICRO_EXPECT_EQ( + static_cast(0), + allocator + ->GetRecordedAllocation( + tflite::RecordedAllocationType::kTfLiteTensorVariableBufferData) + .used_bytes); + TF_LITE_MICRO_EXPECT_EQ( + static_cast(0), + allocator + ->GetRecordedAllocation( + tflite::RecordedAllocationType::kTfLiteEvalTensorData) + .used_bytes); + TF_LITE_MICRO_EXPECT_EQ( + static_cast(0), + allocator->GetRecordedAllocation(tflite::RecordedAllocationType::kOpData) + .used_bytes); + + TF_LITE_MICRO_EXPECT_EQ(interpreter.Invoke(), kTfLiteOk); + allocator->PrintAllocations(); + + // Allocation sizes vary based on platform - check that allocations are now + // non-zero: + TF_LITE_MICRO_EXPECT_GT( + allocator->GetSimpleMemoryAllocator()->GetNonPersistentUsedBytes(), + static_cast(0)); + TF_LITE_MICRO_EXPECT_GT( + allocator + ->GetRecordedAllocation( + tflite::RecordedAllocationType::kTfLiteEvalTensorData) + .used_bytes, + 0); + + TF_LITE_MICRO_EXPECT_GT( + allocator + ->GetRecordedAllocation( + tflite::RecordedAllocationType::kTfLiteTensorVariableBufferData) + .used_bytes, + static_cast(0)); + + // TODO(b/160160549): This check is mostly meaningless right now because the + // operator creation in our mock models is inconsistent. Revisit what + // this check should be once the mock models are properly created. + TF_LITE_MICRO_EXPECT_EQ( + allocator->GetRecordedAllocation(tflite::RecordedAllocationType::kOpData) + .used_bytes, + static_cast(0)); +} + +TF_LITE_MICRO_TEST(TestInterpreterMultipleInputs) { + const tflite::Model* model = tflite::testing::GetSimpleMultipleInputsModel(); + TF_LITE_MICRO_EXPECT(nullptr != model); + + tflite::testing::TestingOpResolver op_resolver; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + tflite::testing::GetTestingOpResolver(op_resolver)); + + constexpr size_t allocator_buffer_size = 2000; + uint8_t allocator_buffer[allocator_buffer_size]; + + // Create a new scope so that we can test the destructor. + { + tflite::MicroInterpreter interpreter(model, op_resolver, allocator_buffer, + allocator_buffer_size); + + TF_LITE_MICRO_EXPECT_EQ(interpreter.AllocateTensors(), kTfLiteOk); + TF_LITE_MICRO_EXPECT_LE(interpreter.arena_used_bytes(), 928 + 100); + + TF_LITE_MICRO_EXPECT_EQ(static_cast(3), interpreter.inputs_size()); + TF_LITE_MICRO_EXPECT_EQ(static_cast(1), interpreter.outputs_size()); + + TfLiteTensor* input = interpreter.input(0); + TF_LITE_MICRO_EXPECT(nullptr != input); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteInt32, input->type); + TF_LITE_MICRO_EXPECT_EQ(1, input->dims->size); + TF_LITE_MICRO_EXPECT_EQ(1, input->dims->data[0]); + TF_LITE_MICRO_EXPECT_EQ(static_cast(4), input->bytes); + TF_LITE_MICRO_EXPECT(nullptr != input->data.i32); + input->data.i32[0] = 21; + + TfLiteTensor* input1 = interpreter.input(1); + TF_LITE_MICRO_EXPECT(nullptr != input1); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteInt8, input1->type); + TF_LITE_MICRO_EXPECT_EQ(1, input1->dims->size); + TF_LITE_MICRO_EXPECT_EQ(1, input1->dims->data[0]); + TF_LITE_MICRO_EXPECT_EQ(static_cast(1), input1->bytes); + TF_LITE_MICRO_EXPECT(nullptr != input1->data.i32); + input1->data.i32[0] = 21; + + TfLiteTensor* input2 = interpreter.input(2); + TF_LITE_MICRO_EXPECT(nullptr != input2); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteInt32, input2->type); + TF_LITE_MICRO_EXPECT_EQ(1, input2->dims->size); + TF_LITE_MICRO_EXPECT_EQ(1, input2->dims->data[0]); + TF_LITE_MICRO_EXPECT_EQ(static_cast(4), input2->bytes); + TF_LITE_MICRO_EXPECT(nullptr != input2->data.i32); + input2->data.i32[0] = 24; + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, interpreter.Invoke()); + + TfLiteTensor* output = interpreter.output(0); + TF_LITE_MICRO_EXPECT(nullptr != output); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteInt32, output->type); + TF_LITE_MICRO_EXPECT_EQ(1, output->dims->size); + TF_LITE_MICRO_EXPECT_EQ(1, output->dims->data[0]); + TF_LITE_MICRO_EXPECT_EQ(static_cast(4), output->bytes); + TF_LITE_MICRO_EXPECT(nullptr != output->data.i32); + TF_LITE_MICRO_EXPECT_EQ(66, output->data.i32[0]); + } + + TF_LITE_MICRO_EXPECT_EQ(tflite::testing::MultipleInputs::freed_, true); +} + +TF_LITE_MICRO_TEST(TestInterpreterNullInputsAndOutputs) { + const tflite::Model* model = + tflite::testing::GetSimpleModelWithNullInputsAndOutputs(); + TF_LITE_MICRO_EXPECT(nullptr != model); + + tflite::testing::TestingOpResolver op_resolver; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + tflite::testing::GetTestingOpResolver(op_resolver)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, op_resolver.AddCallOnce()); + + constexpr size_t allocator_buffer_size = 2000; + uint8_t allocator_buffer[allocator_buffer_size]; + + tflite::MicroInterpreter interpreter(model, op_resolver, allocator_buffer, + allocator_buffer_size); + + TF_LITE_MICRO_EXPECT_EQ(interpreter.AllocateTensors(), kTfLiteOk); + + TF_LITE_MICRO_EXPECT_EQ(static_cast(1), interpreter.inputs_size()); + TF_LITE_MICRO_EXPECT_EQ(static_cast(1), interpreter.outputs_size()); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, interpreter.Invoke()); +} + +// This test is disabled from Bluepill platform because it requires more SRAM +// than what our Bluepill simulation platform specifies. +TF_LITE_MICRO_TEST(TestArenaUsedBytes) { + const tflite::Model* model = tflite::testing::GetModelWith256x256Tensor(); + TF_LITE_MICRO_EXPECT(nullptr != model); + + tflite::testing::TestingOpResolver op_resolver; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + tflite::testing::GetTestingOpResolver(op_resolver)); + tflite::MicroInterpreter interpreter(model, op_resolver, tflite::arena_buffer, + tflite::buffer_arena_size); + TF_LITE_MICRO_EXPECT_EQ(interpreter.AllocateTensors(), kTfLiteOk); + + // Store the required arena size before Invoke() because this is what this + // api might be used. + size_t used_arena_size = interpreter.arena_used_bytes(); + + TF_LITE_MICRO_EXPECT_EQ(interpreter.Invoke(), kTfLiteOk); + + // The reported used_arena_size plus alignment padding is sufficient for this + // model to run. Plus alignment padding is because SingleArenaBufferAllocator + // is given the arena after the alignment. + size_t required_arena_size = + used_arena_size + tflite::MicroArenaBufferAlignment(); + tflite::MicroInterpreter interpreter2( + model, op_resolver, tflite::arena_buffer, required_arena_size); + TF_LITE_MICRO_EXPECT_EQ(interpreter2.AllocateTensors(), kTfLiteOk); + + TF_LITE_MICRO_EXPECT_EQ(interpreter2.Invoke(), kTfLiteOk); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/micro_log.cc b/tensorflow/lite/micro/micro_log.cc new file mode 100644 index 0000000..9c8ccaa --- /dev/null +++ b/tensorflow/lite/micro/micro_log.cc @@ -0,0 +1,47 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/micro_log.h" + +#include +#include +#include + +#if !defined(TF_LITE_STRIP_ERROR_STRINGS) +#include "tensorflow/lite/micro/debug_log.h" +#include "tensorflow/lite/micro/micro_string.h" +#endif + +void Log(const char* format, va_list args) { +#if !defined(TF_LITE_STRIP_ERROR_STRINGS) + // Only pulling in the implementation of this function for builds where we + // expect to make use of it to be extra cautious about not increasing the code + // size. + static constexpr int kMaxLogLen = 256; + char log_buffer[kMaxLogLen]; + MicroVsnprintf(log_buffer, kMaxLogLen, format, args); + DebugLog(log_buffer); + DebugLog("\r\n"); +#endif +} + +#if !defined(TF_LITE_STRIP_ERROR_STRINGS) +void MicroPrintf(const char* format, ...) { + va_list args; + va_start(args, format); + Log(format, args); + va_end(args); +} +#endif diff --git a/tensorflow/lite/micro/micro_log.h b/tensorflow/lite/micro/micro_log.h new file mode 100644 index 0000000..d9cfbe8 --- /dev/null +++ b/tensorflow/lite/micro/micro_log.h @@ -0,0 +1,44 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_MICRO_MICRO_LOG_H_ +#define TENSORFLOW_LITE_MICRO_MICRO_LOG_H_ + +#include + +// This is a free function used to perform the actual logging. +// This function will be used by MicroPrintf and MicroErrorReporter::Report() +void Log(const char* format, va_list args); + +#if !defined(TF_LITE_STRIP_ERROR_STRINGS) +// This function can be used independent of the MicroErrorReporter to get +// printf-like functionalitys and are common to all target platforms. +void MicroPrintf(const char* format, ...); +#else +// We use a #define to ensure that the strings are completely stripped, to +// prevent an unnecessary increase in the binary size. +#define MicroPrintf(...) tflite::Unused(__VA_ARGS__) +#endif + +namespace tflite { + +// From +// https://stackoverflow.com/questions/23235910/variadic-unused-function-macro +template +void Unused(Args&&... args) { + (void)(sizeof...(args)); +} +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_MICRO_LOG_H_ diff --git a/tensorflow/lite/micro/micro_log_test.cc b/tensorflow/lite/micro/micro_log_test.cc new file mode 100644 index 0000000..97ac8be --- /dev/null +++ b/tensorflow/lite/micro/micro_log_test.cc @@ -0,0 +1,32 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/micro_log.h" + +#include "tensorflow/lite/micro/system_setup.h" + +namespace tflite { +inline void InitializeTest() { InitializeTarget(); } +} // namespace tflite + +int main(int argc, char** argv) { + tflite::InitializeTest(); +#ifndef TF_LITE_STRIP_ERROR_STRINGS + MicroPrintf("Number: %d", 42); + MicroPrintf("Badly-formed format string %"); + MicroPrintf("Another % badly-formed %% format string"); + MicroPrintf("~~~%s~~~", "ALL TESTS PASSED"); +#endif // !defined(TF_LITE_STRIP_ERROR_STRINGS) +} diff --git a/tensorflow/lite/micro/micro_mutable_op_resolver.h b/tensorflow/lite/micro/micro_mutable_op_resolver.h new file mode 100644 index 0000000..3fe9c94 --- /dev/null +++ b/tensorflow/lite/micro/micro_mutable_op_resolver.h @@ -0,0 +1,624 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_MICRO_MICRO_MUTABLE_OP_RESOLVER_H_ +#define TENSORFLOW_LITE_MICRO_MICRO_MUTABLE_OP_RESOLVER_H_ + +#include +#include + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/core/api/flatbuffer_conversions.h" +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/compatibility.h" +#include "tensorflow/lite/micro/kernels/add.h" +#include "tensorflow/lite/micro/kernels/conv.h" +#include "tensorflow/lite/micro/kernels/depthwise_conv.h" +#include "tensorflow/lite/micro/kernels/ethosu.h" +#include "tensorflow/lite/micro/kernels/fully_connected.h" +#include "tensorflow/lite/micro/kernels/micro_ops.h" +#include "tensorflow/lite/micro/kernels/pooling.h" +#include "tensorflow/lite/micro/kernels/reduce.h" +#include "tensorflow/lite/micro/kernels/softmax.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_op_resolver.h" +#include "tensorflow/lite/schema/schema_generated.h" + +namespace tflite { +TFLMRegistration* Register_DETECTION_POSTPROCESS(); + +template +class MicroMutableOpResolver : public MicroOpResolver { + public: + TF_LITE_REMOVE_VIRTUAL_DELETE + + explicit MicroMutableOpResolver() {} + + const TFLMRegistration* FindOp(tflite::BuiltinOperator op) const override { + if (op == BuiltinOperator_CUSTOM) return nullptr; + + for (unsigned int i = 0; i < registrations_len_; ++i) { + const TFLMRegistration& registration = registrations_[i]; + if (registration.builtin_code == op) { + return ®istration; + } + } + return nullptr; + } + + const TFLMRegistration* FindOp(const char* op) const override { + for (unsigned int i = 0; i < registrations_len_; ++i) { + const TFLMRegistration& registration = registrations_[i]; + if ((registration.builtin_code == BuiltinOperator_CUSTOM) && + (strcmp(registration.custom_name, op) == 0)) { + return ®istration; + } + } + return nullptr; + } + + TfLiteBridgeBuiltinParseFunction GetOpDataParser( + BuiltinOperator op) const override { + TFLITE_DCHECK(num_buitin_ops_ <= tOpCount); + for (unsigned int i = 0; i < num_buitin_ops_; ++i) { + if (builtin_codes_[i] == op) return builtin_parsers_[i]; + } + return nullptr; + } + + // Registers a Custom Operator with the MicroOpResolver. + // + // Only the first call for a given name will be successful. i.e. if this + // function is called again for a previously added Custom Operator, the + // MicroOpResolver will be unchanged and this function will return + // kTfLiteError. + TfLiteStatus AddCustom(const char* name, TFLMRegistration* registration) { + if (registrations_len_ >= tOpCount) { + MicroPrintf( + "Couldn't register custom op '%s', resolver size is too" + "small (%d)", + name, tOpCount); + return kTfLiteError; + } + + if (FindOp(name) != nullptr) { + MicroPrintf("Calling AddCustom for the same op more than once "); + MicroPrintf("is not supported (Op: %s).", name); + return kTfLiteError; + } + + TFLMRegistration* new_registration = ®istrations_[registrations_len_]; + registrations_len_ += 1; + + *new_registration = *registration; + new_registration->builtin_code = BuiltinOperator_CUSTOM; + new_registration->custom_name = name; + return kTfLiteOk; + } + + // The Add* functions below add the various Builtin operators to the + // MicroMutableOpResolver object. + + TfLiteStatus AddAbs() { + return AddBuiltin(BuiltinOperator_ABS, Register_ABS(), ParseAbs); + } + + TfLiteStatus AddAdd(const TFLMRegistration& registration = Register_ADD()) { + return AddBuiltin(BuiltinOperator_ADD, registration, ParseAdd); + } + + TfLiteStatus AddAddN() { + return AddBuiltin(BuiltinOperator_ADD_N, tflite::Register_ADD_N(), + ParseAddN); + } + + TfLiteStatus AddArgMax() { + return AddBuiltin(BuiltinOperator_ARG_MAX, Register_ARG_MAX(), ParseArgMax); + } + + TfLiteStatus AddArgMin() { + return AddBuiltin(BuiltinOperator_ARG_MIN, Register_ARG_MIN(), ParseArgMin); + } + + TfLiteStatus AddAssignVariable() { + return AddBuiltin(BuiltinOperator_ASSIGN_VARIABLE, + tflite::Register_ASSIGN_VARIABLE(), ParseAssignVariable); + } + + TfLiteStatus AddAveragePool2D( + const TFLMRegistration& registration = Register_AVERAGE_POOL_2D()) { + return AddBuiltin(BuiltinOperator_AVERAGE_POOL_2D, registration, ParsePool); + } + + TfLiteStatus AddBatchToSpaceNd() { + return AddBuiltin(BuiltinOperator_BATCH_TO_SPACE_ND, + Register_BATCH_TO_SPACE_ND(), ParseBatchToSpaceNd); + } + + TfLiteStatus AddBroadcastArgs() { + return AddBuiltin(BuiltinOperator_BROADCAST_ARGS, Register_BROADCAST_ARGS(), + ParseBroadcastArgs); + } + + TfLiteStatus AddBroadcastTo() { + return AddBuiltin(BuiltinOperator_BROADCAST_TO, Register_BROADCAST_TO(), + ParseBroadcastTo); + } + + TfLiteStatus AddCallOnce() { + return AddBuiltin(BuiltinOperator_CALL_ONCE, Register_CALL_ONCE(), + ParseCallOnce); + } + + TfLiteStatus AddCast() { + return AddBuiltin(BuiltinOperator_CAST, Register_CAST(), ParseCast); + } + + TfLiteStatus AddCeil() { + return AddBuiltin(BuiltinOperator_CEIL, Register_CEIL(), ParseCeil); + } + + TfLiteStatus AddCircularBuffer() { + return AddCustom("CIRCULAR_BUFFER", tflite::Register_CIRCULAR_BUFFER()); + } + + TfLiteStatus AddConcatenation() { + return AddBuiltin(BuiltinOperator_CONCATENATION, Register_CONCATENATION(), + ParseConcatenation); + } + + TfLiteStatus AddConv2D( + const TFLMRegistration& registration = Register_CONV_2D()) { + return AddBuiltin(BuiltinOperator_CONV_2D, registration, ParseConv2D); + } + + TfLiteStatus AddCos() { + return AddBuiltin(BuiltinOperator_COS, tflite::Register_COS(), ParseCos); + } + + TfLiteStatus AddCumSum() { + return AddBuiltin(BuiltinOperator_CUMSUM, tflite::Register_CUMSUM(), + ParseCumsum); + } + + TfLiteStatus AddDepthToSpace() { + return AddBuiltin(BuiltinOperator_DEPTH_TO_SPACE, + tflite::Register_DEPTH_TO_SPACE(), ParseDepthToSpace); + } + + TfLiteStatus AddDepthwiseConv2D( + const TFLMRegistration& registration = Register_DEPTHWISE_CONV_2D()) { + return AddBuiltin(BuiltinOperator_DEPTHWISE_CONV_2D, registration, + ParseDepthwiseConv2D); + } + + TfLiteStatus AddDequantize() { + return AddBuiltin(BuiltinOperator_DEQUANTIZE, tflite::Register_DEQUANTIZE(), + ParseDequantize); + } + + TfLiteStatus AddDetectionPostprocess() { + return AddCustom("TFLite_Detection_PostProcess", + tflite::Register_DETECTION_POSTPROCESS()); + } + + TfLiteStatus AddDiv() { + return AddBuiltin(BuiltinOperator_DIV, tflite::Register_DIV(), ParseDiv); + } + + TfLiteStatus AddElu() { + return AddBuiltin(BuiltinOperator_ELU, tflite::Register_ELU(), ParseElu); + } + + TfLiteStatus AddEqual() { + return AddBuiltin(BuiltinOperator_EQUAL, Register_EQUAL(), ParseEqual); + } + + TfLiteStatus AddEthosU() { + TFLMRegistration* registration = tflite::Register_ETHOSU(); + if (registration) { + return AddCustom(tflite::GetString_ETHOSU(), registration); + } + return kTfLiteOk; + } + + TfLiteStatus AddExp() { + return AddBuiltin(BuiltinOperator_EXP, Register_EXP(), ParseExp); + } + + TfLiteStatus AddExpandDims() { + return AddBuiltin(BuiltinOperator_EXPAND_DIMS, Register_EXPAND_DIMS(), + ParseExpandDims); + } + + TfLiteStatus AddFill() { + return AddBuiltin(BuiltinOperator_FILL, tflite::Register_FILL(), ParseFill); + } + + TfLiteStatus AddFloor() { + return AddBuiltin(BuiltinOperator_FLOOR, Register_FLOOR(), ParseFloor); + } + + TfLiteStatus AddFloorDiv() { + return AddBuiltin(BuiltinOperator_FLOOR_DIV, tflite::Register_FLOOR_DIV(), + ParseFloorDiv); + } + + TfLiteStatus AddFloorMod() { + return AddBuiltin(BuiltinOperator_FLOOR_MOD, tflite::Register_FLOOR_MOD(), + ParseFloorMod); + } + + TfLiteStatus AddFullyConnected( + const TFLMRegistration& registration = Register_FULLY_CONNECTED()) { + return AddBuiltin(BuiltinOperator_FULLY_CONNECTED, registration, + ParseFullyConnected); + } + + TfLiteStatus AddGather() { + return AddBuiltin(BuiltinOperator_GATHER, tflite::Register_GATHER(), + ParseGather); + } + + TfLiteStatus AddGatherNd() { + return AddBuiltin(BuiltinOperator_GATHER_ND, tflite::Register_GATHER_ND(), + ParseGatherNd); + } + + TfLiteStatus AddGreater() { + return AddBuiltin(BuiltinOperator_GREATER, Register_GREATER(), + ParseGreater); + } + + TfLiteStatus AddGreaterEqual() { + return AddBuiltin(BuiltinOperator_GREATER_EQUAL, Register_GREATER_EQUAL(), + ParseGreaterEqual); + } + + TfLiteStatus AddHardSwish() { + return AddBuiltin(BuiltinOperator_HARD_SWISH, tflite::Register_HARD_SWISH(), + ParseHardSwish); + } + + TfLiteStatus AddIf() { + return AddBuiltin(BuiltinOperator_IF, tflite::Register_IF(), ParseIf); + } + + TfLiteStatus AddL2Normalization() { + return AddBuiltin(BuiltinOperator_L2_NORMALIZATION, + Register_L2_NORMALIZATION(), ParseL2Normalization); + } + + TfLiteStatus AddL2Pool2D() { + return AddBuiltin(BuiltinOperator_L2_POOL_2D, tflite::Register_L2_POOL_2D(), + ParsePool); + } + + TfLiteStatus AddLeakyRelu() { + return AddBuiltin(BuiltinOperator_LEAKY_RELU, tflite::Register_LEAKY_RELU(), + ParseLeakyRelu); + } + + TfLiteStatus AddLess() { + return AddBuiltin(BuiltinOperator_LESS, Register_LESS(), ParseLess); + } + + TfLiteStatus AddLessEqual() { + return AddBuiltin(BuiltinOperator_LESS_EQUAL, Register_LESS_EQUAL(), + ParseLessEqual); + } + + TfLiteStatus AddLog() { + return AddBuiltin(BuiltinOperator_LOG, Register_LOG(), ParseLog); + } + + TfLiteStatus AddLogicalAnd() { + return AddBuiltin(BuiltinOperator_LOGICAL_AND, + tflite::Register_LOGICAL_AND(), ParseLogicalAnd); + } + + TfLiteStatus AddLogicalNot() { + return AddBuiltin(BuiltinOperator_LOGICAL_NOT, Register_LOGICAL_NOT(), + ParseLogicalNot); + } + + TfLiteStatus AddLogicalOr() { + return AddBuiltin(BuiltinOperator_LOGICAL_OR, tflite::Register_LOGICAL_OR(), + ParseLogicalOr); + } + + TfLiteStatus AddLogistic() { + return AddBuiltin(BuiltinOperator_LOGISTIC, tflite::Register_LOGISTIC(), + ParseLogistic); + } + + TfLiteStatus AddLogSoftmax() { + return AddBuiltin(BuiltinOperator_LOG_SOFTMAX, + tflite::Register_LOG_SOFTMAX(), ParseLogSoftmax); + } + + TfLiteStatus AddMaximum() { + return AddBuiltin(BuiltinOperator_MAXIMUM, Register_MAXIMUM(), + ParseMaximum); + } + + TfLiteStatus AddMaxPool2D( + const TFLMRegistration& registration = Register_MAX_POOL_2D()) { + return AddBuiltin(BuiltinOperator_MAX_POOL_2D, registration, ParsePool); + } + + TfLiteStatus AddMirrorPad() { + return AddBuiltin(BuiltinOperator_MIRROR_PAD, tflite::Register_MIRROR_PAD(), + ParseMirrorPad); + } + + TfLiteStatus AddMean() { + return AddBuiltin(BuiltinOperator_MEAN, Register_MEAN(), ParseReducer); + } + + TfLiteStatus AddMinimum() { + return AddBuiltin(BuiltinOperator_MINIMUM, Register_MINIMUM(), + ParseMinimum); + } + + TfLiteStatus AddMul(const TFLMRegistration& registration = Register_MUL()) { + return AddBuiltin(BuiltinOperator_MUL, registration, ParseMul); + } + + TfLiteStatus AddNeg() { + return AddBuiltin(BuiltinOperator_NEG, Register_NEG(), ParseNeg); + } + + TfLiteStatus AddNotEqual() { + return AddBuiltin(BuiltinOperator_NOT_EQUAL, Register_NOT_EQUAL(), + ParseNotEqual); + } + + TfLiteStatus AddPack() { + return AddBuiltin(BuiltinOperator_PACK, Register_PACK(), ParsePack); + } + + TfLiteStatus AddPad(const TFLMRegistration& registration = Register_PAD()) { + return AddBuiltin(BuiltinOperator_PAD, registration, ParsePad); + } + + TfLiteStatus AddPadV2() { + return AddBuiltin(BuiltinOperator_PADV2, Register_PADV2(), ParsePadV2); + } + + TfLiteStatus AddPrelu() { + return AddBuiltin(BuiltinOperator_PRELU, tflite::Register_PRELU(), + ParsePrelu); + } + + TfLiteStatus AddQuantize() { + return AddBuiltin(BuiltinOperator_QUANTIZE, Register_QUANTIZE(), + ParseQuantize); + } + + TfLiteStatus AddReadVariable() { + return AddBuiltin(BuiltinOperator_READ_VARIABLE, + tflite::Register_READ_VARIABLE(), ParseReadVariable); + } + + TfLiteStatus AddReduceMax() { + return AddBuiltin(BuiltinOperator_REDUCE_MAX, Register_REDUCE_MAX(), + ParseReducer); + } + + TfLiteStatus AddRelu() { + return AddBuiltin(BuiltinOperator_RELU, tflite::Register_RELU(), ParseRelu); + } + + TfLiteStatus AddRelu6() { + return AddBuiltin(BuiltinOperator_RELU6, tflite::Register_RELU6(), + ParseRelu6); + } + + TfLiteStatus AddReshape() { + return AddBuiltin(BuiltinOperator_RESHAPE, + tflite::ops::micro::Register_RESHAPE(), ParseReshape); + } + + TfLiteStatus AddResizeBilinear() { + return AddBuiltin(BuiltinOperator_RESIZE_BILINEAR, + Register_RESIZE_BILINEAR(), ParseResizeBilinear); + } + + TfLiteStatus AddResizeNearestNeighbor() { + return AddBuiltin(BuiltinOperator_RESIZE_NEAREST_NEIGHBOR, + Register_RESIZE_NEAREST_NEIGHBOR(), + ParseResizeNearestNeighbor); + } + + TfLiteStatus AddRound() { + return AddBuiltin(BuiltinOperator_ROUND, + tflite::ops::micro::Register_ROUND(), ParseRound); + } + + TfLiteStatus AddRsqrt() { + return AddBuiltin(BuiltinOperator_RSQRT, Register_RSQRT(), ParseRsqrt); + } + + TfLiteStatus AddSelectV2() { + return AddBuiltin(BuiltinOperator_SELECT_V2, Register_SELECT_V2(), + ParseSelectV2); + } + + TfLiteStatus AddShape() { + return AddBuiltin(BuiltinOperator_SHAPE, Register_SHAPE(), ParseShape); + } + + TfLiteStatus AddSin() { + return AddBuiltin(BuiltinOperator_SIN, Register_SIN(), ParseSin); + } + + TfLiteStatus AddSlice() { + return AddBuiltin(BuiltinOperator_SLICE, Register_SLICE(), ParseSlice); + } + + TfLiteStatus AddSoftmax( + const TFLMRegistration& registration = Register_SOFTMAX()) { + return AddBuiltin(BuiltinOperator_SOFTMAX, registration, ParseSoftmax); + } + + TfLiteStatus AddSpaceToBatchNd() { + return AddBuiltin(BuiltinOperator_SPACE_TO_BATCH_ND, + Register_SPACE_TO_BATCH_ND(), ParseSpaceToBatchNd); + } + + TfLiteStatus AddSpaceToDepth() { + return AddBuiltin(BuiltinOperator_SPACE_TO_DEPTH, Register_SPACE_TO_DEPTH(), + ParseSpaceToDepth); + } + + TfLiteStatus AddSplit() { + return AddBuiltin(BuiltinOperator_SPLIT, Register_SPLIT(), ParseSplit); + } + + TfLiteStatus AddSplitV() { + return AddBuiltin(BuiltinOperator_SPLIT_V, Register_SPLIT_V(), ParseSplitV); + } + + TfLiteStatus AddSqueeze() { + return AddBuiltin(BuiltinOperator_SQUEEZE, Register_SQUEEZE(), + ParseSqueeze); + } + + TfLiteStatus AddSqrt() { + return AddBuiltin(BuiltinOperator_SQRT, Register_SQRT(), ParseSqrt); + } + + TfLiteStatus AddSquare() { + return AddBuiltin(BuiltinOperator_SQUARE, Register_SQUARE(), ParseSquare); + } + + TfLiteStatus AddSquaredDifference() { + return AddBuiltin(BuiltinOperator_SQUARED_DIFFERENCE, + tflite::Register_SQUARED_DIFFERENCE(), + ParseSquaredDifference); + } + + TfLiteStatus AddStridedSlice() { + return AddBuiltin(BuiltinOperator_STRIDED_SLICE, Register_STRIDED_SLICE(), + ParseStridedSlice); + } + + TfLiteStatus AddSub() { + return AddBuiltin(BuiltinOperator_SUB, tflite::Register_SUB(), ParseSub); + } + + TfLiteStatus AddSum() { + return AddBuiltin(BuiltinOperator_SUM, Register_SUM(), ParseReducer); + } + + TfLiteStatus AddSvdf(const TFLMRegistration& registration = Register_SVDF()) { + return AddBuiltin(BuiltinOperator_SVDF, registration, ParseSvdf); + } + + TfLiteStatus AddTanh() { + return AddBuiltin(BuiltinOperator_TANH, Register_TANH(), ParseTanh); + } + + TfLiteStatus AddTransposeConv() { + return AddBuiltin(BuiltinOperator_TRANSPOSE_CONV, + tflite::Register_TRANSPOSE_CONV(), ParseTransposeConv); + } + + TfLiteStatus AddTranspose() { + return AddBuiltin(BuiltinOperator_TRANSPOSE, Register_TRANSPOSE(), + ParseTranspose); + } + + TfLiteStatus AddUnpack() { + return AddBuiltin(BuiltinOperator_UNPACK, Register_UNPACK(), ParseUnpack); + } + + TfLiteStatus AddUnidirectionalSequenceLSTM( + const TFLMRegistration& registration = + Register_UNIDIRECTIONAL_SEQUENCE_LSTM()) { + return AddBuiltin(BuiltinOperator_UNIDIRECTIONAL_SEQUENCE_LSTM, + registration, ParseUnidirectionalSequenceLSTM); + } + + TfLiteStatus AddVarHandle() { + return AddBuiltin(BuiltinOperator_VAR_HANDLE, Register_VAR_HANDLE(), + ParseVarHandle); + } + + TfLiteStatus AddWhile() { + return AddBuiltin(BuiltinOperator_WHILE, Register_WHILE(), ParseWhile); + } + + TfLiteStatus AddWindow() { + // TODO(b/286250473): change back name to "Window" and remove namespace + return AddCustom("SignalWindow", tflite::tflm_signal::Register_WINDOW()); + } + + TfLiteStatus AddZerosLike() { + return AddBuiltin(BuiltinOperator_ZEROS_LIKE, Register_ZEROS_LIKE(), + ParseZerosLike); + } + + unsigned int GetRegistrationLength() { return registrations_len_; } + + private: + TfLiteStatus AddBuiltin(tflite::BuiltinOperator op, + const TFLMRegistration& registration, + TfLiteBridgeBuiltinParseFunction parser) { + if (op == BuiltinOperator_CUSTOM) { + MicroPrintf("Invalid parameter BuiltinOperator_CUSTOM to the "); + MicroPrintf("AddBuiltin function."); + return kTfLiteError; + } + + if (FindOp(op) != nullptr) { + MicroPrintf("Calling AddBuiltin with the same op more than "); + MicroPrintf("once is not supported (Op: #%d).", op); + return kTfLiteError; + } + + if (registrations_len_ >= tOpCount) { + MicroPrintf("Couldn't register builtin op #%d, resolver size ", op); + MicroPrintf("is too small (%d).", tOpCount); + return kTfLiteError; + } + + registrations_[registrations_len_] = registration; + // Strictly speaking, the builtin_code is not necessary for TFLM but filling + // it in regardless. + registrations_[registrations_len_].builtin_code = op; + registrations_len_++; + + builtin_codes_[num_buitin_ops_] = op; + builtin_parsers_[num_buitin_ops_] = parser; + num_buitin_ops_++; + + return kTfLiteOk; + } + + TFLMRegistration registrations_[tOpCount]; + unsigned int registrations_len_ = 0; + + // Arrays (and counter) to store the builtin codes and their corresponding + // parse functions as these are registered with the Op Resolver. + BuiltinOperator builtin_codes_[tOpCount]; + TfLiteBridgeBuiltinParseFunction builtin_parsers_[tOpCount]; + unsigned int num_buitin_ops_ = 0; +}; + +}; // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_MICRO_MUTABLE_OP_RESOLVER_H_ diff --git a/tensorflow/lite/micro/micro_mutable_op_resolver_test.cc b/tensorflow/lite/micro/micro_mutable_op_resolver_test.cc new file mode 100644 index 0000000..dbbb872 --- /dev/null +++ b/tensorflow/lite/micro/micro_mutable_op_resolver_test.cc @@ -0,0 +1,98 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/micro_mutable_op_resolver.h" + +#include "tensorflow/lite/micro/micro_op_resolver.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace { +void* MockInit(TfLiteContext* context, const char* buffer, size_t length) { + // Do nothing. + return nullptr; +} + +void MockFree(TfLiteContext* context, void* buffer) { + // Do nothing. +} + +TfLiteStatus MockPrepare(TfLiteContext* context, TfLiteNode* node) { + return kTfLiteOk; +} + +TfLiteStatus MockInvoke(TfLiteContext* context, TfLiteNode* node) { + return kTfLiteOk; +} + +class MockErrorReporter : public ErrorReporter { + public: + MockErrorReporter() : has_been_called_(false) {} + int Report(const char* format, va_list args) override { + has_been_called_ = true; + return 0; + }; + + bool HasBeenCalled() { return has_been_called_; } + + void ResetState() { has_been_called_ = false; } + + private: + bool has_been_called_; + TF_LITE_REMOVE_VIRTUAL_DELETE +}; + +} // namespace +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(TestOperations) { + using tflite::BuiltinOperator_CONV_2D; + using tflite::BuiltinOperator_RELU; + using tflite::MicroMutableOpResolver; + + static TFLMRegistration r = {}; + r.init = tflite::MockInit; + r.free = tflite::MockFree; + r.prepare = tflite::MockPrepare; + r.invoke = tflite::MockInvoke; + + MicroMutableOpResolver<1> micro_op_resolver; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + micro_op_resolver.AddCustom("mock_custom", &r)); + + // Only one AddCustom per operator should return kTfLiteOk. + TF_LITE_MICRO_EXPECT_EQ(kTfLiteError, + micro_op_resolver.AddCustom("mock_custom", &r)); + + tflite::MicroOpResolver* resolver = µ_op_resolver; + + TF_LITE_MICRO_EXPECT_EQ(static_cast(1), + micro_op_resolver.GetRegistrationLength()); + + const TFLMRegistration* registration = resolver->FindOp(BuiltinOperator_RELU); + TF_LITE_MICRO_EXPECT(nullptr == registration); + + registration = resolver->FindOp("mock_custom"); + TF_LITE_MICRO_EXPECT(nullptr != registration); + TF_LITE_MICRO_EXPECT(nullptr == registration->init(nullptr, nullptr, 0)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, registration->prepare(nullptr, nullptr)); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, registration->invoke(nullptr, nullptr)); + + registration = resolver->FindOp("nonexistent_custom"); + TF_LITE_MICRO_EXPECT(nullptr == registration); +} +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/micro_op_resolver.cc b/tensorflow/lite/micro/micro_op_resolver.cc new file mode 100644 index 0000000..8d9603c --- /dev/null +++ b/tensorflow/lite/micro/micro_op_resolver.cc @@ -0,0 +1,55 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/micro_op_resolver.h" + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/schema/schema_utils.h" + +namespace tflite { + +TfLiteStatus GetRegistrationFromOpCode(const OperatorCode* opcode, + const MicroOpResolver& op_resolver, + const TFLMRegistration** registration) { + TfLiteStatus status = kTfLiteOk; + *registration = nullptr; + auto builtin_code = GetBuiltinCode(opcode); + + if (builtin_code > BuiltinOperator_MAX) { + MicroPrintf("Op builtin_code out of range: %d.", builtin_code); + status = kTfLiteError; + } else if (builtin_code != BuiltinOperator_CUSTOM) { + *registration = op_resolver.FindOp(builtin_code); + if (*registration == nullptr) { + MicroPrintf("Didn't find op for builtin opcode '%s'", + EnumNameBuiltinOperator(builtin_code)); + status = kTfLiteError; + } + } else if (!opcode->custom_code()) { + MicroPrintf("Operator with CUSTOM builtin_code has no custom_code.\n"); + status = kTfLiteError; + } else { + const char* name = opcode->custom_code()->c_str(); + *registration = op_resolver.FindOp(name); + if (*registration == nullptr) { + // Do not report error for unresolved custom op, we do the final check + // while preparing ops. + status = kTfLiteError; + } + } + return status; +} +} // namespace tflite diff --git a/tensorflow/lite/micro/micro_op_resolver.h b/tensorflow/lite/micro/micro_op_resolver.h new file mode 100644 index 0000000..e9aac38 --- /dev/null +++ b/tensorflow/lite/micro/micro_op_resolver.h @@ -0,0 +1,62 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_MICRO_MICRO_OP_RESOLVER_H_ +#define TENSORFLOW_LITE_MICRO_MICRO_OP_RESOLVER_H_ + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/micro_common.h" +#include "tensorflow/lite/micro/tflite_bridge/flatbuffer_conversions_bridge.h" +#include "tensorflow/lite/schema/schema_generated.h" + +namespace tflite { + +// This is an interface for the OpResolver for TFLiteMicro. The differences from +// the TFLite OpResolver base class are to: +// * explicitly remove support for Op versions +// * allow for finer grained registration of the Builtin Ops to reduce code +// size for TFLiteMicro. +// +// We need an interface class instead of directly using MicroMutableOpResolver +// because MicroMutableOpResolver is a class template with the number of +// registered Ops as the template parameter. +class MicroOpResolver { + public: + // Returns the Op registration struct corresponding to the enum code from the + // flatbuffer schema. Returns nullptr if the op is not found or if op == + // BuiltinOperator_CUSTOM. + virtual const TFLMRegistration* FindOp(BuiltinOperator op) const = 0; + + // Returns the Op registration struct corresponding to the custom operator by + // name. + virtual const TFLMRegistration* FindOp(const char* op) const = 0; + + // Returns the operator specific parsing function for the OpData for a + // BuiltinOperator (if registered), else nullptr. + virtual TfLiteBridgeBuiltinParseFunction GetOpDataParser( + BuiltinOperator op) const = 0; + + virtual ~MicroOpResolver() {} +}; + +// Handles the logic for converting between an OperatorCode structure extracted +// from a flatbuffer and information about a registered operator +// implementation. +TfLiteStatus GetRegistrationFromOpCode(const OperatorCode* opcode, + const MicroOpResolver& op_resolver, + const TFLMRegistration** registration); + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_MICRO_OP_RESOLVER_H_ diff --git a/tensorflow/lite/micro/micro_profiler.cc b/tensorflow/lite/micro/micro_profiler.cc new file mode 100644 index 0000000..c3f0f4f --- /dev/null +++ b/tensorflow/lite/micro/micro_profiler.cc @@ -0,0 +1,118 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/micro/micro_profiler.h" + +#include +#include +#include + +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_time.h" + +namespace tflite { + +uint32_t MicroProfiler::BeginEvent(const char* tag) { + if (num_events_ == kMaxEvents) { + MicroPrintf( + "MicroProfiler errored out because total number of events exceeded the " + "maximum of %d.", + kMaxEvents); + TFLITE_ASSERT_FALSE; + } + + tags_[num_events_] = tag; + start_ticks_[num_events_] = GetCurrentTimeTicks(); + end_ticks_[num_events_] = start_ticks_[num_events_] - 1; + return num_events_++; +} + +void MicroProfiler::EndEvent(uint32_t event_handle) { + TFLITE_DCHECK(event_handle < kMaxEvents); + end_ticks_[event_handle] = GetCurrentTimeTicks(); +} + +uint32_t MicroProfiler::GetTotalTicks() const { + int32_t ticks = 0; + for (int i = 0; i < num_events_; ++i) { + ticks += end_ticks_[i] - start_ticks_[i]; + } + return ticks; +} + +void MicroProfiler::Log() const { +#if !defined(TF_LITE_STRIP_ERROR_STRINGS) + for (int i = 0; i < num_events_; ++i) { + uint32_t ticks = end_ticks_[i] - start_ticks_[i]; + MicroPrintf("%s took %u ticks (%d ms).", tags_[i], ticks, TicksToMs(ticks)); + } +#endif +} + +void MicroProfiler::LogCsv() const { +#if !defined(TF_LITE_STRIP_ERROR_STRINGS) + MicroPrintf("\"Event\",\"Tag\",\"Ticks\""); + for (int i = 0; i < num_events_; ++i) { + uint32_t ticks = end_ticks_[i] - start_ticks_[i]; + MicroPrintf("%d,%s,%" PRIu32, i, tags_[i], ticks); + } +#endif +} + +void MicroProfiler::LogTicksPerTagCsv() { +#if !defined(TF_LITE_STRIP_ERROR_STRINGS) + MicroPrintf( + "\"Unique Tag\",\"Total ticks across all events with that tag.\""); + int total_ticks = 0; + for (int i = 0; i < num_events_; ++i) { + uint32_t ticks = end_ticks_[i] - start_ticks_[i]; + TFLITE_DCHECK(tags_[i] != nullptr); + int position = FindExistingOrNextPosition(tags_[i]); + TFLITE_DCHECK(position >= 0); + total_ticks_per_tag[position].tag = tags_[i]; + total_ticks_per_tag[position].ticks = + total_ticks_per_tag[position].ticks + ticks; + total_ticks += ticks; + } + + for (int i = 0; i < num_events_; ++i) { + TicksPerTag each_tag_entry = total_ticks_per_tag[i]; + if (each_tag_entry.tag == nullptr) { + break; + } + MicroPrintf("%s, %d", each_tag_entry.tag, each_tag_entry.ticks); + } + MicroPrintf("total number of ticks, %d", total_ticks); +#endif +} + +// This method finds a particular array element in the total_ticks_per_tag array +// with the matching tag_name passed in the method. If it can find a +// matching array element that has the same tag_name, then it will return the +// position of the matching element. But if it unable to find a matching element +// with the given tag_name, it will return the next available empty position +// from the array. +int MicroProfiler::FindExistingOrNextPosition(const char* tag_name) { + int pos = 0; + for (; pos < num_events_; pos++) { + TicksPerTag each_tag_entry = total_ticks_per_tag[pos]; + if (each_tag_entry.tag == nullptr || + strcmp(each_tag_entry.tag, tag_name) == 0) { + return pos; + } + } + return pos < num_events_ ? pos : -1; +} +} // namespace tflite diff --git a/tensorflow/lite/micro/micro_profiler.h b/tensorflow/lite/micro/micro_profiler.h new file mode 100644 index 0000000..1c39ea1 --- /dev/null +++ b/tensorflow/lite/micro/micro_profiler.h @@ -0,0 +1,140 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_MICRO_PROFILER_H_ +#define TENSORFLOW_LITE_MICRO_MICRO_PROFILER_H_ + +#include "tensorflow/lite/micro/compatibility.h" +#include "tensorflow/lite/micro/micro_profiler_interface.h" + +namespace tflite { + +// MicroProfiler creates a common way to gain fine-grained insight into runtime +// performance. Bottleck operators can be identified along with slow code +// sections. This can be used in conjunction with running the relevant micro +// benchmark to evaluate end-to-end performance. +class MicroProfiler : public MicroProfilerInterface { + public: + MicroProfiler() = default; + virtual ~MicroProfiler() = default; + + // Marks the start of a new event and returns an event handle that can be used + // to mark the end of the event via EndEvent. The lifetime of the tag + // parameter must exceed that of the MicroProfiler. + virtual uint32_t BeginEvent(const char* tag) override; + + // Marks the end of an event associated with event_handle. It is the + // responsibility of the caller to ensure than EndEvent is called once and + // only once per event_handle. + // + // If EndEvent is called more than once for the same event_handle, the last + // call will be used as the end of event marker.If EndEvent is called 0 times + // for a particular event_handle, the duration of that event will be 0 ticks. + virtual void EndEvent(uint32_t event_handle) override; + + // Clears all the events that have been currently profiled. + void ClearEvents() { num_events_ = 0; } + + // Returns the sum of the ticks taken across all the events. This number + // is only meaningful if all of the events are disjoint (the end time of + // event[i] <= start time of event[i+1]). + uint32_t GetTotalTicks() const; + + // Prints the profiling information of each of the events in human readable + // form. + void Log() const; + + // Prints the profiling information of each of the events in CSV (Comma + // Separated Value) form. + void LogCsv() const; + + // Prints total ticks for each unique tag in CSV format. + // Output will have one row for each unique tag along with the + // total ticks summed across all events with that particular tag. + void LogTicksPerTagCsv(); + + private: + // Maximum number of events that this class can keep track of. If we call + // AddEvent more than kMaxEvents number of times, then the oldest event's + // profiling information will be overwritten. + static constexpr int kMaxEvents = 4096; + + const char* tags_[kMaxEvents]; + uint32_t start_ticks_[kMaxEvents]; + uint32_t end_ticks_[kMaxEvents]; + int num_events_ = 0; + + struct TicksPerTag { + const char* tag; + uint32_t ticks; + }; + // In practice, the number of tags will be much lower than the number of + // events. But it is theoretically possible that each event to be unique and + // hence we allow total_ticks_per_tag to have kMaxEvents entries. + TicksPerTag total_ticks_per_tag[kMaxEvents] = {}; + + int FindExistingOrNextPosition(const char* tag_name); + + TF_LITE_REMOVE_VIRTUAL_DELETE; +}; + +#if defined(TF_LITE_STRIP_ERROR_STRINGS) +// For release builds, the ScopedMicroProfiler is a noop. +// +// This is done because the ScipedProfiler is used as part of the +// MicroInterpreter and we want to ensure zero overhead for the release builds. +class ScopedMicroProfiler { + public: + explicit ScopedMicroProfiler(const char* tag, + MicroProfilerInterface* profiler) {} +}; + +#else + +// This class can be used to add events to a MicroProfiler object that span the +// lifetime of the ScopedMicroProfiler object. +// Usage example: +// +// MicroProfiler profiler(); +// ... +// { +// ScopedMicroProfiler scoped_profiler("custom_tag", profiler); +// work_to_profile(); +// } +class ScopedMicroProfiler { + public: + explicit ScopedMicroProfiler(const char* tag, + MicroProfilerInterface* profiler) + : profiler_(profiler) { + if (profiler_ != nullptr) { + event_handle_ = profiler_->BeginEvent(tag); + } + } + + ~ScopedMicroProfiler() { + if (profiler_ != nullptr) { + profiler_->EndEvent(event_handle_); + } + } + + private: + uint32_t event_handle_ = 0; + MicroProfilerInterface* profiler_ = nullptr; +}; +#endif // !defined(TF_LITE_STRIP_ERROR_STRINGS) + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_MICRO_PROFILER_H_ diff --git a/tensorflow/lite/micro/micro_profiler_interface.h b/tensorflow/lite/micro/micro_profiler_interface.h new file mode 100644 index 0000000..f839a74 --- /dev/null +++ b/tensorflow/lite/micro/micro_profiler_interface.h @@ -0,0 +1,38 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_MICRO_PROFILER_INTERFACE_H_ +#define TENSORFLOW_LITE_MICRO_MICRO_PROFILER_INTERFACE_H_ + +#include + +namespace tflite { + +// Interface class that the TFLM framework relies on for profiling. +class MicroProfilerInterface { + public: + virtual ~MicroProfilerInterface() {} + + // Marks the start of a new event and returns an event handle that can be used + // to mark the end of the event via EndEvent. + virtual uint32_t BeginEvent(const char* tag) = 0; + + // Marks the end of an event associated with event_handle. + virtual void EndEvent(uint32_t event_handle) = 0; +}; + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_MICRO_PROFILER_INTERFACE_H_ diff --git a/tensorflow/lite/micro/micro_resource_variable.cc b/tensorflow/lite/micro/micro_resource_variable.cc new file mode 100644 index 0000000..767e7d1 --- /dev/null +++ b/tensorflow/lite/micro/micro_resource_variable.cc @@ -0,0 +1,158 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/micro_resource_variable.h" + +#include + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/micro/memory_helpers.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_utils.h" + +namespace tflite { + +namespace {} // namespace + +MicroResourceVariables* MicroResourceVariables::Create( + MicroAllocator* allocator, int max_num_variables) { + TFLITE_DCHECK(allocator != nullptr); + + uint8_t* allocator_buffer = static_cast( + allocator->AllocatePersistentBuffer(sizeof(MicroResourceVariables))); + MicroResourceVariable* variable_array = + static_cast(allocator->AllocatePersistentBuffer( + sizeof(MicroResourceVariable) * max_num_variables)); + MicroResourceVariables* variables = new (allocator_buffer) + MicroResourceVariables(variable_array, max_num_variables); + return variables; +} + +int MicroResourceVariables::CreateIdIfNoneFound(const char* container, + const char* shared_name) { + int resource_id = FindId(container, shared_name); + if (resource_id >= 0) { + return resource_id; + } + + // no existing variable found for the given container and shared name pair. + if (num_resource_variables_ >= max_variable_count_) { + MicroPrintf( + "Failed to allocate resource variable. Maximum resource variable count " + "(%d) " + "reached.", + max_variable_count_); + return -1; + } + + resource_id = num_resource_variables_++; + resource_variables_[resource_id].container = container; + resource_variables_[resource_id].shared_name = shared_name; + resource_variables_[resource_id].resource_buffer = nullptr; + resource_variables_[resource_id].bytes = 0; + resource_variables_[resource_id].default_value = 0; + return resource_id; +} + +TfLiteStatus MicroResourceVariables::Read(int id, + const TfLiteEvalTensor* tensor) { + if (id < 0 || id >= num_resource_variables_) { + MicroPrintf("Attempting to read non-existent resource variable %d", id); + return kTfLiteError; + } + MicroResourceVariable variable = resource_variables_[id]; + TFLITE_DCHECK(EvalTensorBytes(tensor) == variable.bytes); + TFLITE_DCHECK(variable.resource_buffer != nullptr); + memcpy(tensor->data.raw, variable.resource_buffer, variable.bytes); + return kTfLiteOk; +} + +TfLiteStatus MicroResourceVariables::Allocate(int id, TfLiteContext* context, + const TfLiteTensor* tensor) { + if (id < 0 || id >= num_resource_variables_) { + MicroPrintf("Attempting to read non-existent resource variable %d", id); + return kTfLiteError; + } + + MicroResourceVariable& variable = resource_variables_[id]; + + if (variable.resource_buffer == nullptr) { + variable.bytes = tensor->bytes; + variable.resource_buffer = + context->AllocatePersistentBuffer(context, tensor->bytes); + if (variable.resource_buffer == nullptr) { + MicroPrintf("Failed to allocate resource buffer."); + return kTfLiteError; + } + // Set resource buffers to the zero_point by default. Buffers can be + // initialized to nonzero values using ASSIGN_VARIABLE. + // See comment#2 in b/269648474 for more details why we use zero_point. + if (tensor->quantization.params != nullptr) { + auto* quantization_data = reinterpret_cast( + tensor->quantization.params); + int8_t zero_point = quantization_data->zero_point[0].data[0]; + variable.default_value = zero_point; + } + // TODO(b/269669735): Explains why casting zero_point to int8 and memset. + memset(variable.resource_buffer, variable.default_value, variable.bytes); + } + + return kTfLiteOk; +} + +TfLiteStatus MicroResourceVariables::Assign(int id, + const TfLiteEvalTensor* tensor) { + if (id < 0 || id >= num_resource_variables_) { + MicroPrintf("Attempting to read non-existent resource variable %d", id); + return kTfLiteError; + } + MicroResourceVariable variable = resource_variables_[id]; + + if (variable.resource_buffer == nullptr) { + MicroPrintf( + "Attempting to assign from a TfLiteEvalTensor before the resource " + "buffer has been allocated. Make sure to call AssignResourceVariable " + "with a TfLiteTensor first."); + return kTfLiteError; + } + TFLITE_DCHECK(EvalTensorBytes(tensor) == variable.bytes); + memcpy(variable.resource_buffer, tensor->data.raw, variable.bytes); + return kTfLiteOk; +} + +TfLiteStatus MicroResourceVariables::ResetAll() { + for (int i = 0; i < num_resource_variables_; i++) { + MicroResourceVariable variable = resource_variables_[i]; + // TODO(b/269669735): Explains why casting zero_point to int8 and memset. + memset(variable.resource_buffer, variable.default_value, variable.bytes); + } + return kTfLiteOk; +} + +int MicroResourceVariables::FindId(const char* container, + const char* shared_name) { + for (int i = 0; i < num_resource_variables_; i++) { + // Some TFLite flatbuffers contain null container names to save space. + if ((container == nullptr || + !strcmp(container, resource_variables_[i].container)) && + !strcmp(shared_name, resource_variables_[i].shared_name)) { + return i; + } + } + return -1; +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/micro_resource_variable.h b/tensorflow/lite/micro/micro_resource_variable.h new file mode 100644 index 0000000..fb9917d --- /dev/null +++ b/tensorflow/lite/micro/micro_resource_variable.h @@ -0,0 +1,89 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TFLITE_MICRO_TENSORFLOW_LITE_MICRO_MICRO_RESOURCE_H_ +#define TFLITE_MICRO_TENSORFLOW_LITE_MICRO_MICRO_RESOURCE_H_ + +#include + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/micro_allocator.h" + +namespace tflite { + +class MicroResourceVariables { + public: + // Create + static MicroResourceVariables* Create(MicroAllocator* allocator, + int num_variables); + + // Creates a resource variable if none is available for the given container + // and shared name pair. Returns the resource ID corresponding to the + // container and shared name pair. If allocation fails, the returned resource + // ID will be negative. The the container and shared_name must outlive this + // class. + int CreateIdIfNoneFound(const char* container, const char* shared_name); + + // Read the resource buffer associated with the given ID into the given + // tensor. + TfLiteStatus Read(int id, const TfLiteEvalTensor* tensor); + + // Allocates the resource buffer if none has been allocated, based on the + // length of the input tensor. Copies input tensor contents to the resource + // buffer. + TfLiteStatus Allocate(int id, TfLiteContext* context, + const TfLiteTensor* tensor); + + // Copies input tensor contents to the resource buffer. + // AllocateResourceVariable with a TFLite tensor must have been called first + // in order to allocate the resource buffer. + TfLiteStatus Assign(int id, const TfLiteEvalTensor* tensor); + + // Zeros out all resource buffers. + TfLiteStatus ResetAll(); + + private: + int FindId(const char* container, const char* shared_name); + + // Micro resource contains the mapping between resource container/name strings + // and resouce IDs. Each resource ID corresponds to a resource buffer pointer. + // The resouce ID is created during the VAR_HANDLE operator preparation stage. + // The resource buffer pointer is created during ASSIGN_VARIABLE preparation + // stage based on the size of the TFLiteTensor being assigned. + struct MicroResourceVariable { + const char* container; + const char* shared_name; + void* resource_buffer; + + // This is only for verifying read size. + size_t bytes; + // Initialization default value + int8_t default_value; + }; + + MicroResourceVariables(MicroResourceVariable* variables, + int max_variable_count) + : resource_variables_(variables), + max_variable_count_(max_variable_count), + num_resource_variables_(0) {} + + MicroResourceVariable* resource_variables_; + int max_variable_count_; + int num_resource_variables_; +}; + +} // namespace tflite + +#endif // TFLITE_MICRO_TENSORFLOW_LITE_MICRO_MICRO_RESOURCE_H_ diff --git a/tensorflow/lite/micro/micro_resource_variable_test.cc b/tensorflow/lite/micro/micro_resource_variable_test.cc new file mode 100644 index 0000000..13868bb --- /dev/null +++ b/tensorflow/lite/micro/micro_resource_variable_test.cc @@ -0,0 +1,155 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/micro_resource_variable.h" + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace { + +constexpr int kMaxBufferSize = 1024; +uint8_t buffer_[kMaxBufferSize]; +int last_allocation_size_; + +void* AllocateMockBuffer(TfLiteContext* context, size_t size) { + last_allocation_size_ = size; + return buffer_; +} + +TfLiteContext* GetMockContext() { + static TfLiteContext mock_context = {}; + mock_context.AllocatePersistentBuffer = AllocateMockBuffer; + return &mock_context; +} + +} // namespace +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(CreateVariables) { + tflite::MicroResourceVariables* resource_variables = + tflite::MicroResourceVariables::Create( + tflite::MicroAllocator::Create(tflite::buffer_, + tflite::kMaxBufferSize), + 4); + int id1 = resource_variables->CreateIdIfNoneFound("", "var1"); + TF_LITE_MICRO_EXPECT_GE(id1, 0); + + int id2 = resource_variables->CreateIdIfNoneFound("", "var2"); + TF_LITE_MICRO_EXPECT_NE(id1, id2); + + int id3 = resource_variables->CreateIdIfNoneFound("foo", "var1"); + TF_LITE_MICRO_EXPECT_NE(id1, id3); + TF_LITE_MICRO_EXPECT_NE(id2, id3); + + int id4 = resource_variables->CreateIdIfNoneFound("foo", "var2"); + TF_LITE_MICRO_EXPECT_NE(id1, id4); + TF_LITE_MICRO_EXPECT_NE(id2, id4); + TF_LITE_MICRO_EXPECT_NE(id3, id4); + + TF_LITE_MICRO_EXPECT_EQ(id2, + resource_variables->CreateIdIfNoneFound("", "var2")); + TF_LITE_MICRO_EXPECT_EQ(id1, + resource_variables->CreateIdIfNoneFound("", "var1")); + TF_LITE_MICRO_EXPECT_EQ( + id4, resource_variables->CreateIdIfNoneFound("foo", "var2")); + TF_LITE_MICRO_EXPECT_EQ( + id3, resource_variables->CreateIdIfNoneFound("foo", "var1")); +} + +TF_LITE_MICRO_TEST(AllocateResourceBuffers) { + tflite::MicroResourceVariables* resource_variables = + tflite::MicroResourceVariables::Create( + tflite::MicroAllocator::Create(tflite::buffer_, + tflite::kMaxBufferSize), + 2); + int id1 = resource_variables->CreateIdIfNoneFound("", "var1"); + TF_LITE_MICRO_EXPECT_GE(id1, 0); + + int id2 = resource_variables->CreateIdIfNoneFound("", "var2"); + TF_LITE_MICRO_EXPECT_NE(id1, id2); + + TfLiteTensor tensor = {}; + tensor.bytes = 42; + resource_variables->Allocate(id1, tflite::GetMockContext(), &tensor); + TF_LITE_MICRO_EXPECT_EQ(42, tflite::last_allocation_size_); + + tensor.bytes = 100; + resource_variables->Allocate(id2, tflite::GetMockContext(), &tensor); + TF_LITE_MICRO_EXPECT_EQ(100, tflite::last_allocation_size_); +} + +TF_LITE_MICRO_TEST(VerifyAssignAndReadResourceBuffer) { + tflite::MicroResourceVariables* resource_variables = + tflite::MicroResourceVariables::Create( + tflite::MicroAllocator::Create(tflite::buffer_, + tflite::kMaxBufferSize), + 1); + int id = resource_variables->CreateIdIfNoneFound("", "var1"); + TF_LITE_MICRO_EXPECT_GE(id, 0); + + TfLiteTensor tensor = {}; + const int bytes = 32 * sizeof(int32_t); + tensor.bytes = bytes; + resource_variables->Allocate(id, tflite::GetMockContext(), &tensor); + TF_LITE_MICRO_EXPECT_EQ(bytes, tflite::last_allocation_size_); + + int32_t golden[32] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, + 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, + 23, 24, 25, 26, 27, 28, 29, 30, 31, 32}; + int dims[] = {1, 32}; + TfLiteEvalTensor assign_tensor = { + .data = {golden}, + .dims = tflite::testing::IntArrayFromInts(dims), + + .type = kTfLiteFloat32, + }; + resource_variables->Assign(id, &assign_tensor); + + int32_t buffer[32]; + TfLiteEvalTensor read_tensor = { + .data = {buffer}, + .dims = tflite::testing::IntArrayFromInts(dims), + .type = kTfLiteInt32, + }; + resource_variables->Read(id, &read_tensor); + for (int i = 0; i < 32; i++) { + TF_LITE_MICRO_EXPECT_EQ(buffer[i], golden[i]); + } +} + +TF_LITE_MICRO_TEST(CreateVariablesNullContainer) { + tflite::MicroResourceVariables* resource_variables = + tflite::MicroResourceVariables::Create( + tflite::MicroAllocator::Create(tflite::buffer_, + tflite::kMaxBufferSize), + 4); + int id1 = resource_variables->CreateIdIfNoneFound(nullptr, "var1"); + TF_LITE_MICRO_EXPECT_GE(id1, 0); + + int id2 = resource_variables->CreateIdIfNoneFound(nullptr, "var2"); + TF_LITE_MICRO_EXPECT_NE(id1, id2); + + TF_LITE_MICRO_EXPECT_EQ( + id2, resource_variables->CreateIdIfNoneFound(nullptr, "var2")); + TF_LITE_MICRO_EXPECT_EQ( + id1, resource_variables->CreateIdIfNoneFound(nullptr, "var1")); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/micro_string.cc b/tensorflow/lite/micro/micro_string.cc new file mode 100644 index 0000000..bb41a9e --- /dev/null +++ b/tensorflow/lite/micro/micro_string.cc @@ -0,0 +1,317 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +// Implements debug logging for numbers by converting them into strings and then +// calling the main DebugLog(char*) function. These are separated into a +// different file so that platforms can just implement the string output version +// of DebugLog() and then get the numerical variations without requiring any +// more code. + +#include "tensorflow/lite/micro/micro_string.h" + +#include +#include +#include + +namespace { + +// Int formats can need up to 10 bytes for the value plus a single byte for the +// sign. +constexpr int kMaxIntCharsNeeded = 10 + 1; +// Hex formats can need up to 8 bytes for the value plus two bytes for the "0x". +constexpr int kMaxHexCharsNeeded = 8 + 2; + +// Float formats can need up to 7 bytes for the fraction plus 3 bytes for "x2^" +// plus 3 bytes for the exponent and a single sign bit. +constexpr float kMaxFloatCharsNeeded = 7 + 3 + 3 + 1; + +// All input buffers to the number conversion functions must be this long. +const int kFastToBufferSize = 48; + +// Reverses a zero-terminated string in-place. +char* ReverseStringInPlace(char* start, char* end) { + char* p1 = start; + char* p2 = end - 1; + while (p1 < p2) { + char tmp = *p1; + *p1++ = *p2; + *p2-- = tmp; + } + return start; +} + +// Appends a string to a string, in-place. You need to pass in the maximum +// string length as the second argument. +char* StrCatStr(char* main, int main_max_length, const char* to_append) { + char* current = main; + while (*current != 0) { + ++current; + } + char* current_end = main + (main_max_length - 1); + while ((*to_append != 0) && (current < current_end)) { + *current = *to_append; + ++current; + ++to_append; + } + *current = 0; + return current; +} + +// Populates the provided buffer with an ASCII representation of the number. +char* FastUInt32ToBufferLeft(uint32_t i, char* buffer, int base) { + char* start = buffer; + do { + int32_t digit = i % base; + char character; + if (digit < 10) { + character = '0' + digit; + } else { + character = 'a' + (digit - 10); + } + *buffer++ = character; + i /= base; + } while (i > 0); + *buffer = 0; + ReverseStringInPlace(start, buffer); + return buffer; +} + +// Populates the provided buffer with an ASCII representation of the number. +char* FastInt32ToBufferLeft(int32_t i, char* buffer) { + uint32_t u = i; + if (i < 0) { + *buffer++ = '-'; + u = -u; + } + return FastUInt32ToBufferLeft(u, buffer, 10); +} + +// Converts a number to a string and appends it to another. +char* StrCatInt32(char* main, int main_max_length, int32_t number) { + char number_string[kFastToBufferSize]; + FastInt32ToBufferLeft(number, number_string); + return StrCatStr(main, main_max_length, number_string); +} + +// Converts a number to a string and appends it to another. +char* StrCatUInt32(char* main, int main_max_length, uint32_t number, int base) { + char number_string[kFastToBufferSize]; + FastUInt32ToBufferLeft(number, number_string, base); + return StrCatStr(main, main_max_length, number_string); +} + +// Populates the provided buffer with ASCII representation of the float number. +// Avoids the use of any floating point instructions (since these aren't +// supported on many microcontrollers) and as a consequence prints values with +// power-of-two exponents. +char* FastFloatToBufferLeft(float f, char* buffer) { + char* current = buffer; + char* current_end = buffer + (kFastToBufferSize - 1); + // Access the bit fields of the floating point value to avoid requiring any + // float instructions. These constants are derived from IEEE 754. + const uint32_t sign_mask = 0x80000000; + const uint32_t exponent_mask = 0x7f800000; + const int32_t exponent_shift = 23; + const int32_t exponent_bias = 127; + const uint32_t fraction_mask = 0x007fffff; + uint32_t u; + memcpy(&u, &f, sizeof(int32_t)); + const int32_t exponent = + ((u & exponent_mask) >> exponent_shift) - exponent_bias; + const uint32_t fraction = (u & fraction_mask); + // Expect ~0x2B1B9D3 for fraction. + if (u & sign_mask) { + *current = '-'; + current += 1; + } + *current = 0; + // These are special cases for infinities and not-a-numbers. + if (exponent == 128) { + if (fraction == 0) { + current = StrCatStr(current, (current_end - current), "Inf"); + return current; + } else { + current = StrCatStr(current, (current_end - current), "NaN"); + return current; + } + } + // 0x007fffff (8388607) represents 0.99... for the fraction, so to print the + // correct decimal digits we need to scale our value before passing it to the + // conversion function. This scale should be 10000000/8388608 = 1.1920928955. + // We can approximate this using multiply-adds and right-shifts using the + // values in this array. The 1. portion of the number string is printed out + // in a fixed way before the fraction, below. + const int32_t scale_shifts_size = 13; + const int8_t scale_shifts[13] = {3, 4, 8, 11, 13, 14, 17, + 18, 19, 20, 21, 22, 23}; + uint32_t scaled_fraction = fraction; + for (int i = 0; i < scale_shifts_size; ++i) { + scaled_fraction += (fraction >> scale_shifts[i]); + } + *current = '1'; + current += 1; + *current = '.'; + current += 1; + *current = 0; + + // Prepend leading zeros to fill in all 7 bytes of the fraction. Truncate + // zeros off the end of the fraction. Every fractional value takes 7 bytes. + // For example, 2500 would be written into the buffer as 0002500 since it + // represents .00025. + constexpr int kMaxFractionalDigits = 7; + + // Abort early if there is not enough space in the buffer. + if (current_end - current <= kMaxFractionalDigits) { + return current; + } + + // Pre-fill buffer with zeros to ensure zero-truncation works properly. + for (int i = 1; i < kMaxFractionalDigits; i++) { + *(current + i) = '0'; + } + + // Track how large the fraction is to add leading zeros. + char* previous = current; + current = StrCatUInt32(current, (current_end - current), scaled_fraction, 10); + int fraction_digits = current - previous; + int leading_zeros = kMaxFractionalDigits - fraction_digits; + + // Overwrite the null terminator from StrCatUInt32 to ensure zero-trunctaion + // works properly. + *current = '0'; + + // Shift fraction values and prepend zeros if necessary. + if (leading_zeros != 0) { + for (int i = 0; i < fraction_digits; i++) { + current--; + *(current + leading_zeros) = *current; + *current = '0'; + } + current += kMaxFractionalDigits; + } + + // Truncate trailing zeros for cleaner logs. Ensure we leave at least one + // fractional character for the case when scaled_fraction is 0. + while (*(current - 1) == '0' && (current - 1) > previous) { + current--; + } + *current = 0; + current = StrCatStr(current, (current_end - current), "*2^"); + current = StrCatInt32(current, (current_end - current), exponent); + return current; +} + +int FormatInt32(char* output, int32_t i) { + return static_cast(FastInt32ToBufferLeft(i, output) - output); +} + +int FormatUInt32(char* output, uint32_t i) { + return static_cast(FastUInt32ToBufferLeft(i, output, 10) - output); +} + +int FormatHex(char* output, uint32_t i) { + return static_cast(FastUInt32ToBufferLeft(i, output, 16) - output); +} + +int FormatFloat(char* output, float i) { + return static_cast(FastFloatToBufferLeft(i, output) - output); +} + +} // namespace + +extern "C" int MicroVsnprintf(char* output, int len, const char* format, + va_list args) { + int output_index = 0; + const char* current = format; + // One extra character must be left for the null terminator. + const int usable_length = len - 1; + while (*current != '\0' && output_index < usable_length) { + if (*current == '%') { + current++; + switch (*current) { + case 'd': + // Cut off log message if format could exceed log buffer length. + if (usable_length - output_index < kMaxIntCharsNeeded) { + output[output_index++] = '\0'; + return output_index; + } + output_index += + FormatInt32(&output[output_index], va_arg(args, int32_t)); + current++; + break; + case 'u': + if (usable_length - output_index < kMaxIntCharsNeeded) { + output[output_index++] = '\0'; + return output_index; + } + output_index += + FormatUInt32(&output[output_index], va_arg(args, uint32_t)); + current++; + break; + case 'x': + if (usable_length - output_index < kMaxHexCharsNeeded) { + output[output_index++] = '\0'; + return output_index; + } + output[output_index++] = '0'; + output[output_index++] = 'x'; + output_index += + FormatHex(&output[output_index], va_arg(args, uint32_t)); + current++; + break; + case 'f': + if (usable_length - output_index < kMaxFloatCharsNeeded) { + output[output_index++] = '\0'; + return output_index; + } + output_index += + FormatFloat(&output[output_index], va_arg(args, double)); + current++; + break; + case '%': + output[output_index++] = *current++; + break; + case 'c': + if (usable_length - output_index < 1) { + output[output_index++] = '\0'; + return output_index; + } + output[output_index++] = va_arg(args, int32_t); + current++; + break; + case 's': + char* string = va_arg(args, char*); + int string_idx = 0; + while (string_idx + output_index < usable_length && + string[string_idx] != '\0') { + output[output_index++] = string[string_idx++]; + } + current++; + } + } else { + output[output_index++] = *current++; + } + } + output[output_index++] = '\0'; + return output_index; +} + +extern "C" int MicroSnprintf(char* output, int len, const char* format, ...) { + va_list args; + va_start(args, format); + int bytes_written = MicroVsnprintf(output, len, format, args); + va_end(args); + return bytes_written; +} diff --git a/tensorflow/lite/micro/micro_string.h b/tensorflow/lite/micro/micro_string.h new file mode 100644 index 0000000..59303e8 --- /dev/null +++ b/tensorflow/lite/micro/micro_string.h @@ -0,0 +1,33 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_MICRO_MICRO_STRING_H_ +#define TENSORFLOW_LITE_MICRO_MICRO_STRING_H_ + +#include + +// Implements simple string formatting for numeric types. Returns the number of +// bytes written to output. +extern "C" { +// Functionally equivalent to vsnprintf, trimmed down for TFLite Micro. +// MicroSnprintf() is implemented using MicroVsnprintf(). +int MicroVsnprintf(char* output, int len, const char* format, va_list args); +// Functionally equavalent to snprintf, trimmed down for TFLite Micro. +// For example, MicroSnprintf(buffer, 10, "int %d", 10) will put the string +// "int 10" in the buffer. +// Floating point values are logged in exponent notation (1.XXX*2^N). +int MicroSnprintf(char* output, int len, const char* format, ...); +} + +#endif // TENSORFLOW_LITE_MICRO_MICRO_STRING_H_ diff --git a/tensorflow/lite/micro/micro_string_test.cc b/tensorflow/lite/micro/micro_string_test.cc new file mode 100644 index 0000000..3c1d8e9 --- /dev/null +++ b/tensorflow/lite/micro/micro_string_test.cc @@ -0,0 +1,161 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/micro_string.h" + +#include "tensorflow/lite/micro/testing/micro_test.h" + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(FormatPositiveIntShouldMatchExpected) { + const int kBufferLen = 32; + char buffer[kBufferLen]; + const char golden[] = "Int: 55"; + int bytes_written = MicroSnprintf(buffer, kBufferLen, "Int: %d", 55); + TF_LITE_MICRO_EXPECT_EQ(static_cast(sizeof(golden)), bytes_written); + TF_LITE_MICRO_EXPECT_STRING_EQ(golden, buffer); +} + +TF_LITE_MICRO_TEST(FormatNegativeIntShouldMatchExpected) { + const int kBufferLen = 32; + char buffer[kBufferLen]; + const char golden[] = "Int: -55"; + int bytes_written = MicroSnprintf(buffer, kBufferLen, "Int: %d", -55); + TF_LITE_MICRO_EXPECT_EQ(static_cast(sizeof(golden)), bytes_written); + TF_LITE_MICRO_EXPECT_STRING_EQ(golden, buffer); +} + +TF_LITE_MICRO_TEST(FormatUnsignedIntShouldMatchExpected) { + const int kBufferLen = 32; + char buffer[kBufferLen]; + const char golden[] = "UInt: 12345"; + int bytes_written = MicroSnprintf(buffer, kBufferLen, "UInt: %u", 12345); + TF_LITE_MICRO_EXPECT_EQ(static_cast(sizeof(golden)), bytes_written); + TF_LITE_MICRO_EXPECT_STRING_EQ(golden, buffer); +} + +TF_LITE_MICRO_TEST(FormatHexShouldMatchExpected) { + const int kBufferLen = 32; + char buffer[kBufferLen]; + const char golden[] = "Hex: 0x12345"; + int bytes_written = MicroSnprintf(buffer, kBufferLen, "Hex: %x", 0x12345); + TF_LITE_MICRO_EXPECT_EQ(static_cast(sizeof(golden)), bytes_written); + TF_LITE_MICRO_EXPECT_STRING_EQ(golden, buffer); +} + +TF_LITE_MICRO_TEST(FormatFloatShouldMatchExpected) { + const int kBufferLen = 32; + char buffer[kBufferLen]; + const char golden[] = "Float: 1.0*2^4"; + int bytes_written = MicroSnprintf(buffer, kBufferLen, "Float: %f", 16.); + TF_LITE_MICRO_EXPECT_EQ(static_cast(sizeof(golden)), bytes_written); + TF_LITE_MICRO_EXPECT_STRING_EQ(golden, buffer); +} + +TF_LITE_MICRO_TEST(FormatCharShouldMatchExpected) { + const int kBufferLen = 32; + char buffer[kBufferLen]; + const char golden[] = "Chars: @,Z"; + int bytes_written = + MicroSnprintf(buffer, kBufferLen, "Chars: %c,%c", 64, 'Z'); + TF_LITE_MICRO_EXPECT_EQ(static_cast(sizeof(golden)), bytes_written); + TF_LITE_MICRO_EXPECT_STRING_EQ(golden, buffer); +} + +TF_LITE_MICRO_TEST(BadlyFormattedStringShouldProduceReasonableString) { + const int kBufferLen = 32; + char buffer[kBufferLen]; + const char golden[] = "Test Badly % formated % string"; + int bytes_written = + MicroSnprintf(buffer, kBufferLen, "Test Badly %% formated %% string%"); + TF_LITE_MICRO_EXPECT_EQ(static_cast(sizeof(golden)), bytes_written); + TF_LITE_MICRO_EXPECT_STRING_EQ(golden, buffer); +} + +TF_LITE_MICRO_TEST(IntFormatOverrunShouldTruncate) { + const int kBufferLen = 8; + char buffer[kBufferLen]; + const char golden[] = "Int: "; + int bytes_written = MicroSnprintf(buffer, kBufferLen, "Int: %d", 12345); + TF_LITE_MICRO_EXPECT_EQ(static_cast(sizeof(golden)), bytes_written); + TF_LITE_MICRO_EXPECT_STRING_EQ(golden, buffer); +} + +TF_LITE_MICRO_TEST(UnsignedIntFormatOverrunShouldTruncate) { + const int kBufferLen = 8; + char buffer[kBufferLen]; + const char golden[] = "UInt: "; + int bytes_written = MicroSnprintf(buffer, kBufferLen, "UInt: %u", 12345); + TF_LITE_MICRO_EXPECT_EQ(static_cast(sizeof(golden)), bytes_written); + TF_LITE_MICRO_EXPECT_STRING_EQ(golden, buffer); +} + +TF_LITE_MICRO_TEST(HexFormatOverrunShouldTruncate) { + const int kBufferLen = 8; + char buffer[kBufferLen]; + const char golden[] = "Hex: "; + int bytes_written = MicroSnprintf(buffer, kBufferLen, "Hex: %x", 0x12345); + TF_LITE_MICRO_EXPECT_EQ(static_cast(sizeof(golden)), bytes_written); + TF_LITE_MICRO_EXPECT_STRING_EQ(golden, buffer); +} + +TF_LITE_MICRO_TEST(FloatFormatOverrunShouldTruncate) { + const int kBufferLen = 12; + char buffer[kBufferLen]; + const char golden[] = "Float: "; + int bytes_written = MicroSnprintf(buffer, kBufferLen, "Float: %x", 12345.); + TF_LITE_MICRO_EXPECT_EQ(static_cast(sizeof(golden)), bytes_written); + TF_LITE_MICRO_EXPECT_STRING_EQ(golden, buffer); +} + +TF_LITE_MICRO_TEST(FloatFormatShouldPrintFractionCorrectly) { + const int kBufferLen = 24; + char buffer[kBufferLen]; + const char golden[] = "Float: 1.0625*2^0"; + // Add small offset to float value to account for float rounding error. + int bytes_written = MicroSnprintf(buffer, kBufferLen, "Float: %f", 1.0625001); + TF_LITE_MICRO_EXPECT_EQ(static_cast(sizeof(golden)), bytes_written); + TF_LITE_MICRO_EXPECT_STRING_EQ(golden, buffer); +} + +TF_LITE_MICRO_TEST(FloatFormatShouldPrintFractionCorrectlyNoLeadingZeros) { + const int kBufferLen = 24; + char buffer[kBufferLen]; + const char golden[] = "Float: 1.6332993*2^-1"; + int bytes_written = MicroSnprintf(buffer, kBufferLen, "Float: %f", 0.816650); + TF_LITE_MICRO_EXPECT_EQ(static_cast(sizeof(golden)), bytes_written); + TF_LITE_MICRO_EXPECT_STRING_EQ(golden, buffer); +} + +TF_LITE_MICRO_TEST(StringFormatOverrunShouldTruncate) { + const int kBufferLen = 10; + char buffer[kBufferLen]; + const char golden[] = "String: h"; + int bytes_written = + MicroSnprintf(buffer, kBufferLen, "String: %s", "hello world"); + TF_LITE_MICRO_EXPECT_EQ(static_cast(sizeof(golden)), bytes_written); + TF_LITE_MICRO_EXPECT_STRING_EQ(golden, buffer); +} + +TF_LITE_MICRO_TEST(StringFormatWithExactOutputSizeOverrunShouldTruncate) { + const int kBufferLen = 10; + char buffer[kBufferLen]; + const char golden[] = "format st"; + int bytes_written = MicroSnprintf(buffer, kBufferLen, "format str"); + TF_LITE_MICRO_EXPECT_EQ(static_cast(sizeof(golden)), bytes_written); + TF_LITE_MICRO_EXPECT_STRING_EQ(golden, buffer); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/micro_time.cc b/tensorflow/lite/micro/micro_time.cc new file mode 100644 index 0000000..2d74fdb --- /dev/null +++ b/tensorflow/lite/micro/micro_time.cc @@ -0,0 +1,58 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +// Reference implementation of timer functions. Platforms are not required to +// implement these timer methods, but they are required to enable profiling. + +// On platforms that have a POSIX stack or C library, it can be written using +// methods from or clock() from . + +// To add an equivalent function for your own platform, create your own +// implementation file, and place it in a subfolder with named after the OS +// you're targeting. For example, see the Cortex M bare metal version in +// tensorflow/lite/micro/bluepill/micro_time.cc + +#include "tensorflow/lite/micro/micro_time.h" + +#if defined(TF_LITE_USE_CTIME) +#include +#endif + +namespace tflite { + +#if !defined(TF_LITE_USE_CTIME) + +// Reference implementation of the ticks_per_second() function that's required +// for a platform to support Tensorflow Lite for Microcontrollers profiling. +// This returns 0 by default because timing is an optional feature that builds +// without errors on platforms that do not need it. +uint32_t ticks_per_second() { return 0; } + +// Reference implementation of the GetCurrentTimeTicks() function that's +// required for a platform to support Tensorflow Lite for Microcontrollers +// profiling. This returns 0 by default because timing is an optional feature +// that builds without errors on platforms that do not need it. +uint32_t GetCurrentTimeTicks() { return 0; } + +#else // defined(TF_LITE_USE_CTIME) + +// For platforms that support ctime, we implment the micro_time interface in +// this central location. +uint32_t ticks_per_second() { return CLOCKS_PER_SEC; } + +uint32_t GetCurrentTimeTicks() { return clock(); } +#endif + +} // namespace tflite diff --git a/tensorflow/lite/micro/micro_time.h b/tensorflow/lite/micro/micro_time.h new file mode 100644 index 0000000..7a8ab45 --- /dev/null +++ b/tensorflow/lite/micro/micro_time.h @@ -0,0 +1,36 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_MICRO_MICRO_TIME_H_ +#define TENSORFLOW_LITE_MICRO_MICRO_TIME_H_ + +#include + +namespace tflite { + +// These functions should be implemented by each target platform, and provide an +// accurate tick count along with how many ticks there are per second. +uint32_t ticks_per_second(); + +// Return time in ticks. The meaning of a tick varies per platform. +uint32_t GetCurrentTimeTicks(); + +inline uint32_t TicksToMs(int32_t ticks) { + return static_cast(1000.0f * static_cast(ticks) / + static_cast(ticks_per_second())); +} + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_MICRO_TIME_H_ diff --git a/tensorflow/lite/micro/micro_time_test.cc b/tensorflow/lite/micro/micro_time_test.cc new file mode 100644 index 0000000..7e3a02d --- /dev/null +++ b/tensorflow/lite/micro/micro_time_test.cc @@ -0,0 +1,48 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/micro_time.h" + +#include "tensorflow/lite/micro/testing/micro_test.h" + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(TestBasicTimerFunctionality) { + uint32_t ticks_per_second = tflite::ticks_per_second(); + + // Retry enough times to guarantee a tick advance, while not taking too long + // to complete. With 1e6 retries, assuming each loop takes tens of cycles, + // this will retry for less than 10 seconds on a 10MHz platform. + constexpr int kMaxRetries = 1e6; + unsigned int start_time = tflite::GetCurrentTimeTicks(); + + if (ticks_per_second != 0) { + for (int i = 0; i < kMaxRetries; i++) { + if (tflite::GetCurrentTimeTicks() - start_time > 0) { + break; + } + } + } + + // Ensure the timer is increasing. This works for the overflow case too, since + // (MIN_INT + x) - (MAX_INT - y) == x + y + 1. For example, + // 0x80000001(min int + 1) - 0x7FFFFFFE(max int - 1) = 0x00000003 == 3. + // GetTicksPerSecond() == 0 means the timer is not implemented on this + // platform. + TF_LITE_MICRO_EXPECT(ticks_per_second == 0 || + tflite::GetCurrentTimeTicks() - start_time > 0); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/micro_utils.cc b/tensorflow/lite/micro/micro_utils.cc new file mode 100644 index 0000000..7b0c9cf --- /dev/null +++ b/tensorflow/lite/micro/micro_utils.cc @@ -0,0 +1,90 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/micro_utils.h" + +#include +#include +#include + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/memory_helpers.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { + +int ElementCount(const TfLiteIntArray& dims) { + int result = 1; + for (int i = 0; i < dims.size; ++i) { + result *= dims.data[i]; + } + return result; +} + +size_t EvalTensorBytes(const TfLiteEvalTensor* tensor) { + size_t bytes_per_element; + TFLITE_DCHECK(kTfLiteOk == + TfLiteTypeSizeOf(tensor->type, &bytes_per_element)); + return ElementCount(*tensor->dims) * bytes_per_element; +} + +void SignedSymmetricPerChannelQuantize( + const float* values, TfLiteIntArray* dims, int quantized_dimension, + int8_t* quantized_values, float* scaling_factors, TfLiteType type) { + int input_size = ElementCount(*dims); + int channel_count = dims->data[quantized_dimension]; + int per_channel_size = input_size / channel_count; + + int stride; + int channel_stride; + + int qmin = QMinFromTfLiteType(type); + int qmax = QMaxFromTfLiteType(type); + + if (quantized_dimension == 0) { + stride = 1; + channel_stride = per_channel_size; + } else if (quantized_dimension == 3) { + stride = channel_count; + channel_stride = 1; + } else { + MicroPrintf("quantized dimension must be 0 or 3"); + TFLITE_ABORT; + } + + // Calculate scales for each channel. + for (int channel = 0; channel < channel_count; channel++) { + float min = 0; + float max = 0; + + for (int i = 0; i < per_channel_size; i++) { + int idx = channel * channel_stride + i * stride; + min = fminf(min, values[idx]); + max = fmaxf(max, values[idx]); + } + scaling_factors[channel] = fmaxf(fabs(min), fabs(max)) / qmax; + for (int i = 0; i < per_channel_size; i++) { + int idx = channel * channel_stride + i * stride; + const int32_t quantized_value = + static_cast(roundf(values[idx] / scaling_factors[channel])); + // Clamp: just in case some odd numeric offset. + quantized_values[idx] = fminf(qmax, fmaxf(qmin + 1, quantized_value)); + } + } +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/micro_utils.h b/tensorflow/lite/micro/micro_utils.h new file mode 100644 index 0000000..98ef81d --- /dev/null +++ b/tensorflow/lite/micro/micro_utils.h @@ -0,0 +1,162 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_MICRO_UTILS_H_ +#define TENSORFLOW_LITE_MICRO_MICRO_UTILS_H_ + +#include +#include +#include +#include + +#include "tensorflow/lite/c/common.h" + +namespace tflite { + +// Returns number of elements in the shape array. + +int ElementCount(const TfLiteIntArray& dims); + +size_t EvalTensorBytes(const TfLiteEvalTensor* tensor); + +// C++11 does not support constexpr max; hence, use ternary conditional to +// create our own constexpr Max function. +constexpr int Max(int a, int b) { return a >= b ? a : b; } + +// Converts a float value into a quantized value. Note that large values (close +// to max int and min int) may see significant error due to a lack of floating +// point granularity for large values. +template +T FloatToQuantizedType(const float value, const float scale, int zero_point) { + int32_t result = round(value / scale) + zero_point; + result = + std::max(static_cast(std::numeric_limits::min()), result); + result = + std::min(static_cast(std::numeric_limits::max()), result); + return result; +} + +template +T FloatToSymmetricQuantizedType(const float value, const float scale) { + // 64-bit values are required since 8x16 conv accumulates to int64, meaning + // an int64 bias is required. + std::int64_t result = round(value / scale); + result = std::max( + static_cast(std::numeric_limits::min() + 1), result); + result = std::min(static_cast(std::numeric_limits::max()), + result); + return result; +} + +// Helper methods to quantize arrays of floats to the desired format. +// +// There are several key flavors of quantization in TfLite: +// asymmetric symmetric per channel +// int8_t | X | X | X | +// uint8_t | X | X | | +// int16_t | X | | | +// int32_t | | X | X | +// +// The per-op quantization spec can be found here: +// https://www.tensorflow.org/lite/performance/quantization_spec +template +void Quantize(const float* input, T* output, int num_elements, float scale, + int zero_point) { + for (int i = 0; i < num_elements; i++) { + output[i] = FloatToQuantizedType(input[i], scale, zero_point); + } +} + +template +void SymmetricQuantize(const float* input, T* output, int num_elements, + float scale) { + for (int i = 0; i < num_elements; i++) { + output[i] = FloatToSymmetricQuantizedType(input[i], scale); + } +} + +template +void SymmetricPerChannelQuantize(const float* input, T* output, + int num_elements, int num_channels, + float* scales) { + int elements_per_channel = num_elements / num_channels; + for (int i = 0; i < num_channels; i++) { + for (int j = 0; j < elements_per_channel; j++) { + output[i * elements_per_channel + j] = FloatToSymmetricQuantizedType( + input[i * elements_per_channel + j], scales[i]); + } + } +} + +void SignedSymmetricPerChannelQuantize(const float* values, + TfLiteIntArray* dims, + int quantized_dimension, + int8_t* quantized_values, + float* scaling_factor, + TfLiteType type = kTfLiteNoType); + +// Quantizes inputs based on the values provided, choosing the smallest range +// which includes all input values. +template +void SymmetricQuantizeCalculateScales(const float* values, TfLiteIntArray* dims, + T* output, float* scale) { + int input_size = ElementCount(*dims); + + float min = 0; + float max = 0; + for (int i = 0; i < input_size; i++) { + min = fminf(min, values[i]); + max = fmaxf(max, values[i]); + } + *scale = fmaxf(std::abs(min), std::abs(max)) / std::numeric_limits::max(); + for (int i = 0; i < input_size; i++) { + const int32_t quantized_value = + static_cast(roundf(values[i] / *scale)); + // Clamp: just in case some odd numeric offset. + quantized_value = fminf(std::numeric_limits::max(), quantized_value); + quantized_value = fmaxf(std::numeric_limits::min() + 1, quantized_value); + output[i] = quantized_value; + } +} + +template +void Dequantize(const T* values, const int size, const float scale, + int zero_point, float* dequantized_values) { + for (int i = 0; i < size; ++i) { + dequantized_values[i] = (values[i] - zero_point) * scale; + } +} + +// based on TfLiteType passed in to these functions the corresponding max / min +// int for that type are returned +inline int QMinFromTfLiteType(TfLiteType type) { + if (type == kTfLiteInt4) { + return -8; + } else { + return std::numeric_limits::min(); + } +} + +inline int QMaxFromTfLiteType(TfLiteType type) { + if (type == kTfLiteInt4) { + return 7; + } else { + return std::numeric_limits::max(); + } +} + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_MICRO_UTILS_H_ diff --git a/tensorflow/lite/micro/micro_utils_test.cc b/tensorflow/lite/micro/micro_utils_test.cc new file mode 100644 index 0000000..0df9f2a --- /dev/null +++ b/tensorflow/lite/micro/micro_utils_test.cc @@ -0,0 +1,129 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/micro_utils.h" + +#include "tensorflow/lite/micro/testing/micro_test.h" + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(FloatToAsymmetricQuantizedUInt8Test) { + using tflite::FloatToQuantizedType; + // [0, 127.5] -> zero_point=0, scale=0.5 + TF_LITE_MICRO_EXPECT_EQ(0, FloatToQuantizedType(0, 0.5, 0)); + TF_LITE_MICRO_EXPECT_EQ(254, FloatToQuantizedType(127, 0.5, 0)); + TF_LITE_MICRO_EXPECT_EQ(255, FloatToQuantizedType(127.5, 0.5, 0)); + // [-10, 245] -> zero_point=10, scale=1.0 + TF_LITE_MICRO_EXPECT_EQ(0, FloatToQuantizedType(-10, 1.0, 10)); + TF_LITE_MICRO_EXPECT_EQ(1, FloatToQuantizedType(-9, 1.0, 10)); + TF_LITE_MICRO_EXPECT_EQ(128, FloatToQuantizedType(118, 1.0, 10)); + TF_LITE_MICRO_EXPECT_EQ(253, FloatToQuantizedType(243, 1.0, 10)); + TF_LITE_MICRO_EXPECT_EQ(254, FloatToQuantizedType(244, 1.0, 10)); + TF_LITE_MICRO_EXPECT_EQ(255, FloatToQuantizedType(245, 1.0, 10)); +} + +TF_LITE_MICRO_TEST(FloatToAsymmetricQuantizedInt8Test) { + using tflite::FloatToQuantizedType; + // [-64, 63.5] -> zero_point=0, scale=0.5 + TF_LITE_MICRO_EXPECT_EQ(2, FloatToQuantizedType(1, 0.5, 0)); + TF_LITE_MICRO_EXPECT_EQ(4, FloatToQuantizedType(2, 0.5, 0)); + TF_LITE_MICRO_EXPECT_EQ(6, FloatToQuantizedType(3, 0.5, 0)); + TF_LITE_MICRO_EXPECT_EQ(-10, FloatToQuantizedType(-5, 0.5, 0)); + TF_LITE_MICRO_EXPECT_EQ(-128, FloatToQuantizedType(-64, 0.5, 0)); + TF_LITE_MICRO_EXPECT_EQ(127, FloatToQuantizedType(63.5, 0.5, 0)); + // [-127, 128] -> zero_point=-1, scale=1.0 + TF_LITE_MICRO_EXPECT_EQ(0, FloatToQuantizedType(1, 1.0, -1)); + TF_LITE_MICRO_EXPECT_EQ(-1, FloatToQuantizedType(0, 1.0, -1)); + TF_LITE_MICRO_EXPECT_EQ(126, FloatToQuantizedType(127, 1.0, -1)); + TF_LITE_MICRO_EXPECT_EQ(127, FloatToQuantizedType(128, 1.0, -1)); + TF_LITE_MICRO_EXPECT_EQ(-127, FloatToQuantizedType(-126, 1.0, -1)); + TF_LITE_MICRO_EXPECT_EQ(-128, FloatToQuantizedType(-127, 1.0, -1)); +} + +TF_LITE_MICRO_TEST(FloatToSymmetricQuantizedInt8Test) { + using tflite::FloatToSymmetricQuantizedType; + // [-64, 63.5] -> zero_point=0, scale=0.5 + TF_LITE_MICRO_EXPECT_EQ(2, FloatToSymmetricQuantizedType(1, 0.5)); + TF_LITE_MICRO_EXPECT_EQ(4, FloatToSymmetricQuantizedType(2, 0.5)); + TF_LITE_MICRO_EXPECT_EQ(6, FloatToSymmetricQuantizedType(3, 0.5)); + TF_LITE_MICRO_EXPECT_EQ(-10, FloatToSymmetricQuantizedType(-5, 0.5)); + TF_LITE_MICRO_EXPECT_EQ(-127, + FloatToSymmetricQuantizedType(-64, 0.5)); + TF_LITE_MICRO_EXPECT_EQ(127, + FloatToSymmetricQuantizedType(63.5, 0.5)); + // [-127, 128] -> zero_point=-1, scale=1.0 + TF_LITE_MICRO_EXPECT_EQ(1, FloatToSymmetricQuantizedType(1, 1.0)); + TF_LITE_MICRO_EXPECT_EQ(0, FloatToSymmetricQuantizedType(0, 1.0)); + TF_LITE_MICRO_EXPECT_EQ(127, FloatToSymmetricQuantizedType(127, 1.0)); + TF_LITE_MICRO_EXPECT_EQ(127, FloatToSymmetricQuantizedType(128, 1.0)); + TF_LITE_MICRO_EXPECT_EQ(-126, + FloatToSymmetricQuantizedType(-126, 1.0)); + TF_LITE_MICRO_EXPECT_EQ(-127, + FloatToSymmetricQuantizedType(-127, 1.0)); +} + +TF_LITE_MICRO_TEST(FloatToAsymmetricQuantizedInt32Test) { + using tflite::FloatToSymmetricQuantizedType; + TF_LITE_MICRO_EXPECT_EQ(0, FloatToSymmetricQuantizedType(0, 0.5)); + TF_LITE_MICRO_EXPECT_EQ(2, FloatToSymmetricQuantizedType(1, 0.5)); + TF_LITE_MICRO_EXPECT_EQ(-2, FloatToSymmetricQuantizedType(-1, 0.5)); + TF_LITE_MICRO_EXPECT_EQ(-100, + FloatToSymmetricQuantizedType(-50, 0.5)); + TF_LITE_MICRO_EXPECT_EQ(100, FloatToSymmetricQuantizedType(50, 0.5)); +} + +TF_LITE_MICRO_TEST(AsymmetricQuantizeInt8) { + float values[] = {-10.3, -3.1, -2.1, -1.9, -0.9, 0.1, 0.9, 1.85, 2.9, 4.1}; + int8_t goldens[] = {-20, -5, -3, -3, -1, 1, 3, 5, 7, 9}; + constexpr int length = sizeof(values) / sizeof(float); + int8_t quantized[length]; + tflite::Quantize(values, quantized, length, 0.5, 1); + for (int i = 0; i < length; i++) { + TF_LITE_MICRO_EXPECT_EQ(quantized[i], goldens[i]); + } +} + +TF_LITE_MICRO_TEST(AsymmetricQuantizeUInt8) { + float values[] = {-10.3, -3.1, -2.1, -1.9, -0.9, 0.1, 0.9, 1.85, 2.9, 4.1}; + uint8_t goldens[] = {106, 121, 123, 123, 125, 127, 129, 131, 133, 135}; + constexpr int length = sizeof(values) / sizeof(float); + uint8_t quantized[length]; + tflite::Quantize(values, quantized, length, 0.5, 127); + for (int i = 0; i < length; i++) { + TF_LITE_MICRO_EXPECT_EQ(quantized[i], goldens[i]); + } +} + +TF_LITE_MICRO_TEST(SymmetricQuantizeInt32) { + float values[] = {-10.3, -3.1, -2.1, -1.9, -0.9, 0.1, 0.9, 1.85, 2.9, 4.1}; + int32_t goldens[] = {-21, -6, -4, -4, -2, 0, 2, 4, 6, 8}; + constexpr int length = sizeof(values) / sizeof(float); + int32_t quantized[length]; + tflite::SymmetricQuantize(values, quantized, length, 0.5); + for (int i = 0; i < length; i++) { + TF_LITE_MICRO_EXPECT_EQ(quantized[i], goldens[i]); + } +} + +// Verify Max function works as expected. +TF_LITE_MICRO_TEST(Max) { + using tflite::Max; + // [0, 127.5] -> zero_point=0, scale=0.5 + TF_LITE_MICRO_EXPECT_EQ(3, Max(2, 3)); + TF_LITE_MICRO_EXPECT_EQ(2, Max(2, 2)); + TF_LITE_MICRO_EXPECT_EQ(4, Max(3, Max(4, 2))); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/mock_micro_graph.cc b/tensorflow/lite/micro/mock_micro_graph.cc new file mode 100644 index 0000000..438a406 --- /dev/null +++ b/tensorflow/lite/micro/mock_micro_graph.cc @@ -0,0 +1,66 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/mock_micro_graph.h" + +#include "tensorflow/lite/micro/test_helpers.h" + +namespace tflite { + +MockMicroGraph::MockMicroGraph(SingleArenaBufferAllocator* allocator) + : MicroGraph(nullptr, nullptr, nullptr, nullptr), + allocator_(allocator), + init_count_(0), + prepare_count_(0), + free_count_(0) { + memset(invoke_counts_, 0, sizeof(invoke_counts_)); + mock_tensor_ = + reinterpret_cast(allocator_->AllocatePersistentBuffer( + sizeof(TfLiteEvalTensor), alignof(TfLiteEvalTensor))); + int* dims_array = reinterpret_cast( + allocator_->AllocatePersistentBuffer(3 * sizeof(int), alignof(int))); + float* data_array = reinterpret_cast( + allocator_->AllocatePersistentBuffer(2 * sizeof(float), alignof(float))); + int dims[] = {2, 1, 2}; + memcpy(dims_array, dims, 3 * sizeof(int)); + mock_tensor_->dims = testing::IntArrayFromInts(dims_array); + mock_tensor_->data.f = data_array; + mock_tensor_->type = kTfLiteFloat32; +} + +TfLiteStatus MockMicroGraph::InvokeSubgraph(int subgraph_idx) { + invoke_counts_[subgraph_idx]++; + return kTfLiteOk; +} + +TfLiteStatus MockMicroGraph::ResetVariableTensors() { return kTfLiteOk; } + +size_t MockMicroGraph::NumSubgraphInputs(int subgraph_idx) { return 1; } + +TfLiteEvalTensor* MockMicroGraph::GetSubgraphInput(int subgraph_idx, + int tensor_idx) { + return mock_tensor_; +} + +size_t MockMicroGraph::NumSubgraphOutputs(int subgraph_idx) { return 1; } + +TfLiteEvalTensor* MockMicroGraph::GetSubgraphOutput(int subgraph_idx, + int tensor_idx) { + return mock_tensor_; +} + +int MockMicroGraph::NumSubgraphs() { return kMaxSubgraphs; } + +} // namespace tflite diff --git a/tensorflow/lite/micro/mock_micro_graph.h b/tensorflow/lite/micro/mock_micro_graph.h new file mode 100644 index 0000000..3ae7d7c --- /dev/null +++ b/tensorflow/lite/micro/mock_micro_graph.h @@ -0,0 +1,60 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_MOCK_MICRO_GRAPH_H_ +#define TENSORFLOW_LITE_MICRO_MOCK_MICRO_GRAPH_H_ + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/micro_allocator.h" +#include "tensorflow/lite/micro/micro_graph.h" +#include "tensorflow/lite/schema/schema_generated.h" + +namespace tflite { + +// MockMicroGraph stubs out all MicroGraph methods used during invoke. A count +// of the number of calls to invoke for each subgraph is maintained for +// validation of control flow operators. +class MockMicroGraph : public MicroGraph { + public: + explicit MockMicroGraph(SingleArenaBufferAllocator* allocator); + TfLiteStatus InvokeSubgraph(int subgraph_idx) override; + TfLiteStatus ResetVariableTensors() override; + size_t NumSubgraphInputs(int subgraph_idx) override; + TfLiteEvalTensor* GetSubgraphInput(int subgraph_idx, int tensor_idx) override; + size_t NumSubgraphOutputs(int subgraph_idx) override; + TfLiteEvalTensor* GetSubgraphOutput(int subgraph_idx, + int tensor_idx) override; + int NumSubgraphs() override; + int get_init_count() const { return init_count_; } + int get_prepare_count() const { return prepare_count_; } + int get_free_count() const { return free_count_; } + int get_invoke_count(int subgraph_idx) const { + return invoke_counts_[subgraph_idx]; + } + + private: + static constexpr int kMaxSubgraphs = 10; + SingleArenaBufferAllocator* allocator_; + TfLiteEvalTensor* mock_tensor_; + int init_count_; + int prepare_count_; + int free_count_; + int invoke_counts_[kMaxSubgraphs]; + TF_LITE_REMOVE_VIRTUAL_DELETE +}; + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_MOCK_MICRO_GRAPH_H_ diff --git a/tensorflow/lite/micro/models/BUILD b/tensorflow/lite/micro/models/BUILD new file mode 100644 index 0000000..76108c8 --- /dev/null +++ b/tensorflow/lite/micro/models/BUILD @@ -0,0 +1,51 @@ +# Description: +# TensorFlow Lite for Microcontrollers Vision Example. +load("//tensorflow/lite/micro:build_def.bzl", "generate_cc_arrays") + +package( + default_visibility = ["//visibility:public"], + # Disabling layering_check because of http://b/177257332 + features = ["-layering_check"], + licenses = ["notice"], +) + +filegroup( + name = "models", + data = glob(["*.tflite"]), +) + +generate_cc_arrays( + name = "generated_person_detect_model_cc", + src = "person_detect.tflite", + out = "person_detect_model_data.cc", +) + +generate_cc_arrays( + name = "generated_person_detect_model_hdr", + src = "person_detect.tflite", + out = "person_detect_model_data.h", +) + +generate_cc_arrays( + name = "generated_keyword_scrambled_model_hdr", + src = "keyword_scrambled.tflite", + out = "keyword_scrambled_model_data.h", +) + +generate_cc_arrays( + name = "generated_keyword_scrambled_model_cc", + src = "keyword_scrambled.tflite", + out = "keyword_scrambled_model_data.cc", +) + +generate_cc_arrays( + name = "generated_keyword_scrambled_8bit_model_hdr", + src = "keyword_scrambled_8bit.tflite", + out = "keyword_scrambled_8bit_model_data.h", +) + +generate_cc_arrays( + name = "generated_keyword_scrambled_8bit_model_cc", + src = "keyword_scrambled_8bit.tflite", + out = "keyword_scrambled_8bit_model_data.cc", +) diff --git a/tensorflow/lite/micro/models/keyword_scrambled.tflite b/tensorflow/lite/micro/models/keyword_scrambled.tflite new file mode 100644 index 0000000..4ff7787 Binary files /dev/null and b/tensorflow/lite/micro/models/keyword_scrambled.tflite differ diff --git a/tensorflow/lite/micro/models/keyword_scrambled_8bit.tflite b/tensorflow/lite/micro/models/keyword_scrambled_8bit.tflite new file mode 100644 index 0000000..6cf7636 Binary files /dev/null and b/tensorflow/lite/micro/models/keyword_scrambled_8bit.tflite differ diff --git a/tensorflow/lite/micro/models/person_detect.tflite b/tensorflow/lite/micro/models/person_detect.tflite new file mode 100644 index 0000000..8159d01 Binary files /dev/null and b/tensorflow/lite/micro/models/person_detect.tflite differ diff --git a/tensorflow/lite/micro/python/BUILD b/tensorflow/lite/micro/python/BUILD new file mode 100644 index 0000000..e69de29 diff --git a/tensorflow/lite/micro/python/interpreter/BUILD b/tensorflow/lite/micro/python/interpreter/BUILD new file mode 100644 index 0000000..e69de29 diff --git a/tensorflow/lite/micro/python/interpreter/src/BUILD b/tensorflow/lite/micro/python/interpreter/src/BUILD new file mode 100644 index 0000000..601e3db --- /dev/null +++ b/tensorflow/lite/micro/python/interpreter/src/BUILD @@ -0,0 +1,53 @@ +load( + "//tensorflow/lite/micro:build_def.bzl", + "micro_copts", +) +load( + "//tensorflow:extra_rules.bzl", + "tflm_python_op_resolver_friends", +) + +package( + features = ["-layering_check"], + licenses = ["notice"], +) + +package_group( + name = "op_resolver_friends", + packages = tflm_python_op_resolver_friends(), +) + +# tflm_runtime is deprecated, please use //python/tflite_micro:runtime instead. +# TODO(b/286456378): remove once all usage is changed to the runtime target. +py_library( + name = "tflm_runtime", + srcs = ["tflm_runtime.py"], + visibility = ["//visibility:public"], + deps = ["//python/tflite_micro:runtime"], +) + +# runtime is deprecated, please use //python/tflite_micro:runtime instead. +# TODO(b/286456378): remove once all usage is changed to the runtime target. +py_library( + name = "runtime", + srcs = ["runtime.py"], + visibility = ["//visibility:public"], + deps = ["//python/tflite_micro:runtime"], +) + +# TODO(b/286456378): remove once all internal usage is fixed. +cc_library( + name = "python_ops_resolver", + srcs = [], + hdrs = [ + "python_ops_resolver.h", + ], + copts = micro_copts(), + visibility = [ + ":op_resolver_friends", + "//tensorflow/lite/micro/integration_tests:__subpackages__", + ], + deps = [ + "//python/tflite_micro:python_ops_resolver", + ], +) diff --git a/tensorflow/lite/micro/python/interpreter/src/python_ops_resolver.h b/tensorflow/lite/micro/python/interpreter/src/python_ops_resolver.h new file mode 100644 index 0000000..8d27aee --- /dev/null +++ b/tensorflow/lite/micro/python/interpreter/src/python_ops_resolver.h @@ -0,0 +1,21 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_MICRO_PYTHON_INTERPRETER_SRC_PYTHON_OPS_RESOLVER_H_ +#define TENSORFLOW_LITE_MICRO_PYTHON_INTERPRETER_SRC_PYTHON_OPS_RESOLVER_H_ + +// TODO(b/286456378): remove once this shim is no longer needed. +#include "python/tflite_micro/python_ops_resolver.h" + +#endif // TENSORFLOW_LITE_MICRO_PYTHON_INTERPRETER_SRC_PYTHON_OPS_RESOLVER_H_ diff --git a/tensorflow/lite/micro/python/interpreter/src/runtime.py b/tensorflow/lite/micro/python/interpreter/src/runtime.py new file mode 100644 index 0000000..5432d55 --- /dev/null +++ b/tensorflow/lite/micro/python/interpreter/src/runtime.py @@ -0,0 +1,17 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +# TODO(b/286456378): remove once all usage is switched to runtime +from tflite_micro.python.tflite_micro.runtime import * diff --git a/tensorflow/lite/micro/python/interpreter/src/tflm_runtime.py b/tensorflow/lite/micro/python/interpreter/src/tflm_runtime.py new file mode 100644 index 0000000..5432d55 --- /dev/null +++ b/tensorflow/lite/micro/python/interpreter/src/tflm_runtime.py @@ -0,0 +1,17 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +# TODO(b/286456378): remove once all usage is switched to runtime +from tflite_micro.python.tflite_micro.runtime import * diff --git a/tensorflow/lite/micro/python/tflite_size/README.md b/tensorflow/lite/micro/python/tflite_size/README.md new file mode 100644 index 0000000..173d287 --- /dev/null +++ b/tensorflow/lite/micro/python/tflite_size/README.md @@ -0,0 +1,27 @@ +This is a experimental tool to generate a visualization of tflite file with size info for +each field. + +The size info of each field is the raw storage size info of each field without +any flatbuffer overhead such as the offset table etc. Hence, the size info +provide a lower bound on the size of data required (such as storing it into a c +struct) instead of storing it as the tflite buffer. + +Here is how you can use a visualization of tflite file + +``` +cd tensorflow/lite/micro/python/tflite_size/src + +bazel run flatbuffer_size -- in_tflite_file out_html_file +``` + +A sample output html looks like this ![sample_output](./sample_output.png). + +It displays each field's name, value and size. The display is composed of +collapsibly list so that you can zoom in/out individual structure based on need. + +## How to update `schema_generated_with_reflective_type.h` + +We generate our own schema_generated_with_reflective, using the build target in +tensorflow/lite/schema:schema_fbs_with_reflection (call with: +bazel build schema_fbs_with_reflection_srcs). + diff --git a/tensorflow/lite/micro/python/tflite_size/sample_output.png b/tensorflow/lite/micro/python/tflite_size/sample_output.png new file mode 100644 index 0000000..ef0819f Binary files /dev/null and b/tensorflow/lite/micro/python/tflite_size/sample_output.png differ diff --git a/tensorflow/lite/micro/python/tflite_size/src/BUILD b/tensorflow/lite/micro/python/tflite_size/src/BUILD new file mode 100644 index 0000000..66d9b50 --- /dev/null +++ b/tensorflow/lite/micro/python/tflite_size/src/BUILD @@ -0,0 +1,60 @@ +load("@pybind11_bazel//:build_defs.bzl", "pybind_extension", "pybind_library") + +package( + default_visibility = ["//visibility:public"], + features = ["-layering_check"], + licenses = ["notice"], +) + +# Append _lib at the end to avoid naming collision with the extension below +# because internal tool appends a _pybind suffix. +pybind_library( + name = "flatbuffer_size_wrapper_lib", + srcs = [ + "flatbuffer_size.cc", + "flatbuffer_size_wrapper.cc", + ], + hdrs = [ + "flatbuffer_size.h", + "flatbuffer_size_wrapper.h", + ], + deps = [ + "//tensorflow/lite/schema:schema_fbs_with_reflection", + "@flatbuffers", + ], +) + +# pybind_extension() appends ".so" to "name" so the actual target name contains +# the ".so" suffix +pybind_extension( + name = "flatbuffer_size_wrapper_pybind", + srcs = [ + "flatbuffer_size_wrapper_pybind.cc", + ], + deps = [ + ":flatbuffer_size_wrapper_lib", + ], +) + +py_library( + name = "flatbuffer_size_lib", + srcs = [ + "flatbuffer_size_graph.py", + "flatbuffer_size_graph_html_converter.py", + ], + data = [ + ":flatbuffer_size_wrapper_pybind.so", + ], + srcs_version = "PY3", +) + +py_binary( + name = "flatbuffer_size", + srcs = [ + "flatbuffer_size.py", + ], + srcs_version = "PY3", + deps = [ + ":flatbuffer_size_lib", + ], +) diff --git a/tensorflow/lite/micro/python/tflite_size/src/flatbuffer_size.cc b/tensorflow/lite/micro/python/tflite_size/src/flatbuffer_size.cc new file mode 100644 index 0000000..5c06dfc --- /dev/null +++ b/tensorflow/lite/micro/python/tflite_size/src/flatbuffer_size.cc @@ -0,0 +1,578 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +/* This file is adopted from + * flatbuffers/include/flatbuffers/minireflect.h **/ + +#include "flatbuffer_size.h" + +#include + +#include "flatbuffers/flatbuffers.h" +#include "flatbuffers/util.h" + +namespace { + +using flatbuffers::ElementaryType; +using flatbuffers::TypeTable; + +using flatbuffers::EscapeString; +using flatbuffers::soffset_t; +using flatbuffers::String; +using flatbuffers::Table; +using flatbuffers::uoffset_t; +using flatbuffers::Vector; + +using flatbuffers::FieldIndexToOffset; +using flatbuffers::GetRoot; +using flatbuffers::NumToString; +using flatbuffers::ReadScalar; + +using flatbuffers::ET_BOOL; +using flatbuffers::ET_CHAR; +using flatbuffers::ET_DOUBLE; +using flatbuffers::ET_FLOAT; +using flatbuffers::ET_INT; +using flatbuffers::ET_LONG; +using flatbuffers::ET_SEQUENCE; +using flatbuffers::ET_SHORT; +using flatbuffers::ET_STRING; +using flatbuffers::ET_UCHAR; +using flatbuffers::ET_UINT; +using flatbuffers::ET_ULONG; +using flatbuffers::ET_USHORT; +using flatbuffers::ET_UTYPE; + +using flatbuffers::ST_ENUM; +using flatbuffers::ST_STRUCT; +using flatbuffers::ST_TABLE; +using flatbuffers::ST_UNION; +using flatbuffers::voffset_t; + +/* Utilities that petty print a tflite buffer in a json format with the +additional size information. Each element is represented by the following json +string: + +field_name: {value: xxxx, total_size: } +field_name: {value: [ ], total_size: } + +where value can be: +1. a dict (a new structure) that is not just value and total_size +2. a list (a new array) +3. a scalar (neither dict nor list). +*/ + +// Returns the storage size of a basic element type +inline size_t InlineSize(ElementaryType type, const TypeTable* type_table) { + switch (type) { + case ET_UTYPE: + case ET_BOOL: + case ET_CHAR: + case ET_UCHAR: + return 1; + case ET_SHORT: + case ET_USHORT: + return 2; + case ET_INT: + case ET_UINT: + case ET_FLOAT: + case ET_STRING: + return 4; + case ET_LONG: + case ET_ULONG: + case ET_DOUBLE: + return 8; + case flatbuffers::ET_SEQUENCE: + switch (type_table->st) { + case ST_TABLE: + case ST_UNION: + return 4; + case ST_STRUCT: + return static_cast(type_table->values[type_table->num_elems]); + default: + FLATBUFFERS_ASSERT(false); + return 1; + } + default: + FLATBUFFERS_ASSERT(false); + return 1; + } +} + +// First, a generic iterator that can be used by multiple algorithms. +struct IterationVisitor { + // These mark the scope of a table or struct. + virtual void StartSequence() {} + virtual void EndSequence(size_t) {} + // Called for each field regardless of whether it is present or not. + // If not present, val == nullptr. set_idx is the index of all set fields. + virtual void Field(size_t /*field_idx*/, size_t /*set_idx*/, + ElementaryType /*type*/, bool /*is_vector*/, + const TypeTable* /*type_table*/, const char* /*name*/, + const uint8_t* /*val*/) {} + // Called for a value that is actually present, after a field, or as part + // of a vector. + virtual size_t UType(uint8_t, const char*) { + return InlineSize(flatbuffers::ET_UTYPE, nullptr); + } + virtual size_t Bool(bool) { return InlineSize(ET_BOOL, nullptr); } + virtual size_t Char(int8_t, const char*) { + return InlineSize(ET_CHAR, nullptr); + } + virtual size_t UChar(uint8_t, const char*) { + return InlineSize(ET_UCHAR, nullptr); + } + virtual size_t Short(int16_t, const char*) { + return InlineSize(ET_SHORT, nullptr); + } + virtual size_t UShort(uint16_t, const char*) { + return InlineSize(ET_USHORT, nullptr); + } + virtual size_t Int(int32_t, const char*) { + return InlineSize(ET_INT, nullptr); + } + virtual size_t UInt(uint32_t, const char*) { + return InlineSize(ET_UINT, nullptr); + } + virtual size_t Long(int64_t) { return InlineSize(ET_LONG, nullptr); } + virtual size_t ULong(uint64_t) { return InlineSize(ET_ULONG, nullptr); } + virtual size_t Float(float) { return InlineSize(ET_FLOAT, nullptr); } + virtual size_t Double(double) { return InlineSize(ET_DOUBLE, nullptr); } + virtual size_t String(const String*) { + return InlineSize(ET_STRING, nullptr); + } + virtual size_t Unknown(const uint8_t*) { + return 1; + } // From a future version. + // These mark the scope of a vector. + virtual void StartVector() {} + virtual void EndVector(size_t vector_size) {} + virtual void Element(size_t /*i*/, ElementaryType /*type*/, + const TypeTable* /*type_table*/, + const uint8_t* /*val*/) {} + virtual ~IterationVisitor() {} +}; + +inline int64_t LookupEnum(int64_t enum_val, const int64_t* values, + size_t num_values) { + if (!values) return enum_val; + for (size_t i = 0; i < num_values; i++) { + if (enum_val == values[i]) return static_cast(i); + } + return -1; // Unknown enum value. +} + +template +const char* EnumName(T tval, const TypeTable* type_table) { + if (!type_table || !type_table->names) return nullptr; + auto i = LookupEnum(static_cast(tval), type_table->values, + type_table->num_elems); + if (i >= 0 && i < static_cast(type_table->num_elems)) { + return type_table->names[i]; + } + return nullptr; +} + +size_t IterateObject(const uint8_t* obj, const TypeTable* type_table, + IterationVisitor* visitor); + +inline size_t SizeIterateValue(ElementaryType type, const uint8_t* val, + const TypeTable* type_table, + const uint8_t* prev_val, soffset_t vector_index, + IterationVisitor* visitor) { + size_t value_size = 0; + switch (type) { + case ET_UTYPE: { + auto tval = ReadScalar(val); + value_size += visitor->UType(tval, EnumName(tval, type_table)); + break; + } + case ET_BOOL: { + value_size += visitor->Bool(ReadScalar(val) != 0); + break; + } + case ET_CHAR: { + auto tval = ReadScalar(val); + value_size += visitor->Char(tval, EnumName(tval, type_table)); + break; + } + case ET_UCHAR: { + auto tval = ReadScalar(val); + value_size += visitor->UChar(tval, EnumName(tval, type_table)); + break; + } + case ET_SHORT: { + auto tval = ReadScalar(val); + value_size += visitor->Short(tval, EnumName(tval, type_table)); + break; + } + case ET_USHORT: { + auto tval = ReadScalar(val); + value_size += visitor->UShort(tval, EnumName(tval, type_table)); + break; + } + case ET_INT: { + auto tval = ReadScalar(val); + value_size += visitor->Int(tval, EnumName(tval, type_table)); + break; + } + case ET_UINT: { + auto tval = ReadScalar(val); + value_size += visitor->UInt(tval, EnumName(tval, type_table)); + break; + } + case ET_LONG: { + value_size += visitor->Long(ReadScalar(val)); + break; + } + case ET_ULONG: { + value_size += visitor->ULong(ReadScalar(val)); + break; + } + case ET_FLOAT: { + value_size += visitor->Float(ReadScalar(val)); + break; + } + case ET_DOUBLE: { + value_size += visitor->Double(ReadScalar(val)); + break; + } + case ET_STRING: { + val += ReadScalar(val); + value_size += visitor->String(reinterpret_cast(val)); + break; + } + case ET_SEQUENCE: { + switch (type_table->st) { + case ST_TABLE: + val += ReadScalar(val); + value_size += IterateObject(val, type_table, visitor); + break; + case ST_STRUCT: + value_size += IterateObject(val, type_table, visitor); + break; + case ST_UNION: { + val += ReadScalar(val); + FLATBUFFERS_ASSERT(prev_val); + auto union_type = *prev_val; // Always a uint8_t. + if (vector_index >= 0) { + auto type_vec = reinterpret_cast*>(prev_val); + union_type = type_vec->Get(static_cast(vector_index)); + } + auto type_code_idx = + LookupEnum(union_type, type_table->values, type_table->num_elems); + if (type_code_idx >= 0 && + type_code_idx < static_cast(type_table->num_elems)) { + auto type_code = type_table->type_codes[type_code_idx]; + switch (type_code.base_type) { + case ET_SEQUENCE: { + auto ref = type_table->type_refs[type_code.sequence_ref](); + value_size += IterateObject(val, ref, visitor); + break; + } + case ET_STRING: + value_size += + visitor->String(reinterpret_cast(val)); + break; + default: + value_size += visitor->Unknown(val); + } + } else { + value_size += visitor->Unknown(val); + } + break; + } + case ST_ENUM: + FLATBUFFERS_ASSERT(false); + break; + } + break; + } + default: { + value_size += visitor->Unknown(val); + break; + } + } + return value_size; +} + +inline size_t IterateObject(const uint8_t* obj, const TypeTable* type_table, + IterationVisitor* visitor) { + visitor->StartSequence(); + const uint8_t* prev_val = nullptr; + size_t set_idx = 0; + size_t object_size = 0; + for (size_t i = 0; i < type_table->num_elems; i++) { + auto type_code = type_table->type_codes[i]; + auto type = static_cast(type_code.base_type); + auto is_vector = type_code.is_repeating != 0; + auto ref_idx = type_code.sequence_ref; + const TypeTable* ref = nullptr; + if (ref_idx >= 0) { + ref = type_table->type_refs[ref_idx](); + } + auto name = type_table->names ? type_table->names[i] : nullptr; + const uint8_t* val = nullptr; + if (type_table->st == ST_TABLE) { + val = reinterpret_cast(obj)->GetAddressOf( + FieldIndexToOffset(static_cast(i))); + } else { + val = obj + type_table->values[i]; + } + visitor->Field(i, set_idx, type, is_vector, ref, name, val); + if (val) { + set_idx++; + if (is_vector) { + val += ReadScalar(val); + auto vec = reinterpret_cast*>(val); + size_t vector_size = 0; + visitor->StartVector(); + auto elem_ptr = vec->Data(); + for (size_t j = 0; j < vec->size(); j++) { + visitor->Element(j, type, ref, elem_ptr); + size_t element_size = + SizeIterateValue(type, elem_ptr, ref, prev_val, + static_cast(j), visitor); + object_size += element_size; + vector_size += element_size; + elem_ptr += InlineSize(type, ref); + } + visitor->EndVector(vector_size); + } else { + object_size += SizeIterateValue(type, val, ref, prev_val, -1, visitor); + } + } + prev_val = val; + } + visitor->EndSequence(object_size); + return object_size; +} + +inline void IterateFlatBuffer(const uint8_t* buffer, + const TypeTable* type_table, + IterationVisitor* callback) { + IterateObject(GetRoot(buffer), type_table, callback); +} + +// Outputting a Flatbuffer to a string. Tries to conform as close to JSON / +// the output generated by idl_gen_text.cpp. + +struct ToJsonWithSizeInfoVisitor : public IterationVisitor { + std::string s; + std::string d; // delimiter + bool q; // quote + std::string in; // indent + size_t indent_level; + bool vector_delimited; + ToJsonWithSizeInfoVisitor() + : d("\n"), q(true), in(""), indent_level(0), vector_delimited(false) {} + + void append_indent() { + for (size_t i = 0; i < indent_level; i++) { + s += in; + } + } + + void StartSequence() override { + s += "{ \"value\": {"; + s += d; + indent_level++; + } + void EndSequence(size_t object_size) override { + s += d; + indent_level--; + append_indent(); + + s += "}, \"total_size\": "; + s += NumToString(object_size); + s += "}"; + } + void Field(size_t /*field_idx*/, size_t set_idx, ElementaryType /*type*/, + bool /*is_vector*/, const TypeTable* /*type_table*/, + const char* name, const uint8_t* val) override { + if (!val) return; + if (set_idx) { + s += ","; + s += d; + } + append_indent(); + if (name) { + if (q) s += "\""; + s += name; + if (q) s += "\""; + s += ": "; + } + } + template + void Named(T x, const char* name) { + s += "{ \"value\": "; + if (name) { + if (q) s += "\""; + s += name; + if (q) s += "\""; + } else { + s += NumToString(x); + } + s += ", "; + } + + void PrintFieldSize(uint32_t size, const char* name) { + s += "\"total_size\":"; + s += NumToString(size); + s += "}"; + } + + size_t UType(uint8_t x, const char* name) override { + Named(x, name); + size_t size = InlineSize(ET_UTYPE, nullptr); + PrintFieldSize(size, name); + return size; + } + size_t Bool(bool x) override { + s += "{ \"value\": "; + s += x ? "true" : "false"; + s += ", "; + + size_t size = InlineSize(ET_BOOL, nullptr); + PrintFieldSize(size, nullptr); + return size; + } + size_t Char(int8_t x, const char* name) override { + Named(x, name); + size_t size = InlineSize(ET_UTYPE, nullptr); + PrintFieldSize(size, name); + return size; + } + size_t UChar(uint8_t x, const char* name) override { + Named(x, name); + size_t size = InlineSize(ET_UCHAR, nullptr); + PrintFieldSize(size, name); + return size; + } + size_t Short(int16_t x, const char* name) override { + Named(x, name); + size_t size = InlineSize(ET_SHORT, nullptr); + PrintFieldSize(size, name); + return size; + } + size_t UShort(uint16_t x, const char* name) override { + Named(x, name); + size_t size = InlineSize(ET_USHORT, nullptr); + PrintFieldSize(size, name); + return size; + } + size_t Int(int32_t x, const char* name) override { + Named(x, name); + size_t size = InlineSize(ET_INT, nullptr); + PrintFieldSize(size, name); + return size; + } + size_t UInt(uint32_t x, const char* name) override { + Named(x, name); + size_t size = InlineSize(ET_UINT, nullptr); + PrintFieldSize(size, name); + return size; + } + size_t Long(int64_t x) override { + Named(x, nullptr); + size_t size = InlineSize(ET_LONG, nullptr); + PrintFieldSize(size, nullptr); + return size; + } + size_t ULong(uint64_t x) override { + Named(x, nullptr); + size_t size = InlineSize(ET_ULONG, nullptr); + PrintFieldSize(size, nullptr); + return size; + } + size_t Float(float x) override { + Named(x, nullptr); + size_t size = InlineSize(ET_FLOAT, nullptr); + PrintFieldSize(size, nullptr); + return size; + } + size_t Double(double x) override { + Named(x, nullptr); + size_t size = InlineSize(ET_DOUBLE, nullptr); + PrintFieldSize(size, nullptr); + + return size; + } + size_t String(const struct String* str) override { + s += "{ \"value\": "; + + EscapeString(str->c_str(), str->size(), &s, true, false); + + s += ", "; + + PrintFieldSize(str->size(), nullptr); + + // s += "}"; + return str->size(); + } + size_t Unknown(const uint8_t*) override { + s += "(?)"; + PrintFieldSize(1, nullptr); + return 1; + } + void StartVector() override { + s += "{ \"value\": ["; + if (vector_delimited) { + s += d; + indent_level++; + append_indent(); + } else { + s += " "; + } + } + void EndVector(size_t vector_size) override { + if (vector_delimited) { + s += d; + indent_level--; + append_indent(); + } else { + s += " "; + } + s += "]"; + s += ", \"total_size\": "; + s += NumToString(vector_size); + s += "}"; + } + void Element(size_t i, ElementaryType /*type*/, + const TypeTable* /*type_table*/, + const uint8_t* /*val*/) override { + if (i) { + s += ","; + if (vector_delimited) { + s += d; + append_indent(); + } else { + s += " "; + } + } + } +}; + +} // namespace + +namespace tflite { +std::string FlatBufferSizeToJsonString( + const uint8_t* buffer, const flatbuffers::TypeTable* type_table) { + ToJsonWithSizeInfoVisitor tostring_visitor; + IterateFlatBuffer(buffer, type_table, &tostring_visitor); + return tostring_visitor.s; +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/python/tflite_size/src/flatbuffer_size.h b/tensorflow/lite/micro/python/tflite_size/src/flatbuffer_size.h new file mode 100644 index 0000000..6e4243a --- /dev/null +++ b/tensorflow/lite/micro/python/tflite_size/src/flatbuffer_size.h @@ -0,0 +1,30 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_MICRO_PYTHON_TFLITE_SIZE_SRC_FLATBUFFERS_SIZE_H_ +#define TENSORFLOW_LITE_MICRO_PYTHON_TFLITE_SIZE_SRC_FLATBUFFERS_SIZE_H_ + +#include + +#include "flatbuffers/flatbuffers.h" +#include "flatbuffers/util.h" + +namespace tflite { + +std::string FlatBufferSizeToJsonString( + const uint8_t* buffer, const flatbuffers::TypeTable* type_table); + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_PYTHON_TFLITE_SIZE_SRC_FLATBUFFERS_SIZE_H_ diff --git a/tensorflow/lite/micro/python/tflite_size/src/flatbuffer_size.py b/tensorflow/lite/micro/python/tflite_size/src/flatbuffer_size.py new file mode 100644 index 0000000..e4c6987 --- /dev/null +++ b/tensorflow/lite/micro/python/tflite_size/src/flatbuffer_size.py @@ -0,0 +1,57 @@ +# Copyright 2022 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +import json +import sys + +from tflite_micro.tensorflow.lite.micro.python.tflite_size.src import flatbuffer_size_wrapper_pybind +from tflite_micro.tensorflow.lite.micro.python.tflite_size.src import flatbuffer_size_graph +from tflite_micro.tensorflow.lite.micro.python.tflite_size.src import flatbuffer_size_graph_html_converter + + +def convert_tflite_to_html(in_flatbuf): + """ Given a input tflite flatbuffer, returns a html and a json with size info""" + size_wrapper = flatbuffer_size_wrapper_pybind.FlatbufferSize() + out_json = size_wrapper.convertToJsonString(in_flatbuf) + json_as_dict = json.loads(out_json) + + formatted_json = json.dumps(json_as_dict) + + graph_builder = flatbuffer_size_graph.FlatbufferSizeGraph() + graph_builder.create_graph(json_as_dict) + + html_converter = flatbuffer_size_graph_html_converter.HtmlConverter() + html_string = graph_builder.display_graph(html_converter) + + return html_string, formatted_json + + +def main(argv): + try: + tflite_input = argv[1] + html_output = argv[2] + except IndexError: + print("Usage: %s " % (argv[0])) + else: + with open(tflite_input, 'rb') as f: + in_flatbuf = f.read() + + html_string = convert_tflite_to_html(in_flatbuf)[0] + + with open(html_output, 'w') as f: + f.write(html_string) + + +if __name__ == '__main__': + main(sys.argv) diff --git a/tensorflow/lite/micro/python/tflite_size/src/flatbuffer_size_graph.py b/tensorflow/lite/micro/python/tflite_size/src/flatbuffer_size_graph.py new file mode 100644 index 0000000..7fb5cca --- /dev/null +++ b/tensorflow/lite/micro/python/tflite_size/src/flatbuffer_size_graph.py @@ -0,0 +1,100 @@ +# Copyright 2022 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + + +class FNode: + """ A node representing a flatbuffer element. """ + + def __init__(self): + self.isLeaf = False + self.name = '' + self.children = list() + self.size = 0 + self.value = 0 + + def print(self): + print("%d, %s, %d, %d, %s" % + (self.isLeaf, self.name, len(self.children), self.size, self.value)) + + +""" +FlatbufferSizeGraph converts a flatbuffer in json string (with size info ) into a graph of nodes. + +A basic node structure corresponds to the following json string: + +field_name: {value: xxxx, total_size: } +field_name: {value: [ ], total_size: } + +where value can be: +1. a dict (a new structure) that is not just value and total_size +2. a list (a new array) +3. a scalar (neither dict nor list). +""" + + +class FlatbufferSizeGraph: + + def __init__(self): + self._root = FNode() + self._verbose = False + + def _build_node_for_field(self, name, flatbuffer_json): + node = FNode() + node.name = name + + if self._verbose: + print("Start processing %s" % flatbuffer_json) + node.print() + + if "value" in flatbuffer_json.keys( + ) and "total_size" in flatbuffer_json.keys(): + node.size = flatbuffer_json["total_size"] + self._process_value(node, flatbuffer_json["value"]) + else: + raise Exception("Filed not a dict with value and total size!") + + if self._verbose: + print("End processing %s" % flatbuffer_json) + node.print() + return node + + def _process_value(self, node, value_in_flatbuffer_json): + if type(value_in_flatbuffer_json) is not dict and type( + value_in_flatbuffer_json) is not list: + node.value = value_in_flatbuffer_json + node.isLeaf = True + + if type(value_in_flatbuffer_json) is dict: + if "value" in value_in_flatbuffer_json.keys( + ) and "total_size" in value_in_flatbuffer_json.keys(): + raise Exception( + "Field is another dict with value and total size again??") + + for name in value_in_flatbuffer_json.keys(): + node.children.append( + self._build_node_for_field(name, value_in_flatbuffer_json[name])) + elif type(value_in_flatbuffer_json) is list: + for nidx, next_obj in enumerate(value_in_flatbuffer_json): + leaf_name = "%s[%d]" % (node.name, nidx) + # array: "operator_codes": {"value": [{"value": {"version": {"value": 2, "total_size": 4}}, "total_size": 4}], "total_size": 4} + # so, each element must be of {"value: { field_name: }", total_size} + node.children.append(self._build_node_for_field(leaf_name, next_obj)) + + def create_graph(self, flatbuffer_in_json_with_size): + self._root = self._build_node_for_field("ROOT", + flatbuffer_in_json_with_size) + + def display_graph(self, graph_traveser): + return graph_traveser.display_flatbuffer(self._root) diff --git a/tensorflow/lite/micro/python/tflite_size/src/flatbuffer_size_graph_html_converter.py b/tensorflow/lite/micro/python/tflite_size/src/flatbuffer_size_graph_html_converter.py new file mode 100644 index 0000000..2d05dd6 --- /dev/null +++ b/tensorflow/lite/micro/python/tflite_size/src/flatbuffer_size_graph_html_converter.py @@ -0,0 +1,109 @@ +# Copyright 2022 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +HTML_HEAD = """ + + + + + + + + + + +
    +""" + +HTML_TAIL = """ +
+ + + + + + +""" + + +class HtmlConverter: + """ A class to convert the size graph to a tree of collapsible list """ + + def __init__(self): + self._html_body = HTML_HEAD + + def _draw_collapsible_list(self, node): + if node.isLeaf is True or len(node.children) == 0: + self._html_body += "
  • %s: %s (size: %d)
  • \n" % ( + node.name, node.value, node.size) + else: + self._html_body += "
  • %s (size: %d) \n" % ( + node.name, node.size) + self._html_body += "
      \n" + for node in node.children: + self._draw_collapsible_list(node) + self._html_body += "
    \n" + self._html_body += "
  • \n" + + def display_flatbuffer(self, root): + self._html_body = HTML_HEAD + self._draw_collapsible_list(root) + self._html_body += HTML_TAIL + return self._html_body diff --git a/tensorflow/lite/micro/python/tflite_size/src/flatbuffer_size_wrapper.cc b/tensorflow/lite/micro/python/tflite_size/src/flatbuffer_size_wrapper.cc new file mode 100644 index 0000000..816445e --- /dev/null +++ b/tensorflow/lite/micro/python/tflite_size/src/flatbuffer_size_wrapper.cc @@ -0,0 +1,38 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "flatbuffer_size_wrapper.h" + +#include + +#include "flatbuffer_size.h" +#include "tensorflow/lite/schema/reflection/schema_generated.h" + +namespace tflite { + +FlatbufferSizeWrapper::~FlatbufferSizeWrapper() {} + +FlatbufferSizeWrapper::FlatbufferSizeWrapper() {} + +std::string FlatbufferSizeWrapper::ConvertToJsonString( + const char* in_flatbuffer) { + std::string output = tflite::FlatBufferSizeToJsonString( + reinterpret_cast(in_flatbuffer), + tflite::ModelTypeTable()); + + return output; +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/python/tflite_size/src/flatbuffer_size_wrapper.h b/tensorflow/lite/micro/python/tflite_size/src/flatbuffer_size_wrapper.h new file mode 100644 index 0000000..fc48f86 --- /dev/null +++ b/tensorflow/lite/micro/python/tflite_size/src/flatbuffer_size_wrapper.h @@ -0,0 +1,33 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_MICRO_PYTHON_TFLITE_SIZE_SRC_FLATBUFFERS_SIZE_WRAPPER_H_ +#define TENSORFLOW_LITE_MICRO_PYTHON_TFLITE_SIZE_SRC_FLATBUFFERS_SIZE_WRAPPER_H_ + +#include + +#include + +namespace tflite { + +class FlatbufferSizeWrapper { + public: + FlatbufferSizeWrapper(); + ~FlatbufferSizeWrapper(); + + std::string ConvertToJsonString(const char* in_flatbuffer); +}; + +} // namespace tflite +#endif // TENSORFLOW_LITE_MICRO_PYTHON_TFLITE_SIZE_SRC_FLATBUFFERS_SIZE_WRAPPER_H_ diff --git a/tensorflow/lite/micro/python/tflite_size/src/flatbuffer_size_wrapper_pybind.cc b/tensorflow/lite/micro/python/tflite_size/src/flatbuffer_size_wrapper_pybind.cc new file mode 100644 index 0000000..e8786c1 --- /dev/null +++ b/tensorflow/lite/micro/python/tflite_size/src/flatbuffer_size_wrapper_pybind.cc @@ -0,0 +1,32 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include "flatbuffer_size_wrapper.h" + +namespace py = pybind11; + +PYBIND11_MODULE(flatbuffer_size_wrapper_pybind, m) { + m.doc() = "FlatbufferSize"; + + py::class_(m, "FlatbufferSize") + .def(py::init([]() { + return std::unique_ptr( + new tflite::FlatbufferSizeWrapper()); + })) + .def("convertToJsonString", + &tflite::FlatbufferSizeWrapper::ConvertToJsonString); +} diff --git a/tensorflow/lite/micro/python/tflite_size/tests/BUILD b/tensorflow/lite/micro/python/tflite_size/tests/BUILD new file mode 100644 index 0000000..1b4c5b2 --- /dev/null +++ b/tensorflow/lite/micro/python/tflite_size/tests/BUILD @@ -0,0 +1,31 @@ +load("@tflm_pip_deps//:requirements.bzl", "requirement") + +licenses(["notice"]) + +filegroup( + name = "test_resources", + srcs = [ + "gold_simple_add_model_html.txt", + "gold_simple_add_model_json.txt", + "simple_add_model.tflite", + ], +) + +py_test( + name = "flatbuffer_size_test", + srcs = ["flatbuffer_size_test.py"], + data = [ + ":test_resources", + ], + main = "flatbuffer_size_test.py", + python_version = "PY3", + tags = [ + "noasan", + "nomsan", # Python doesn't like these symbols from flatbuffer_size_wrapper_pybind.so + "noubsan", + ], + deps = [ + requirement("tensorflow-cpu"), + "//tensorflow/lite/micro/python/tflite_size/src:flatbuffer_size", + ], +) diff --git a/tensorflow/lite/micro/python/tflite_size/tests/flatbuffer_size_test.py b/tensorflow/lite/micro/python/tflite_size/tests/flatbuffer_size_test.py new file mode 100644 index 0000000..c4ed296 --- /dev/null +++ b/tensorflow/lite/micro/python/tflite_size/tests/flatbuffer_size_test.py @@ -0,0 +1,46 @@ +# Copyright 2022 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +import os + +from tensorflow.python.framework import test_util +from tensorflow.python.platform import test +from tflite_micro.tensorflow.lite.micro.python.tflite_size.src import flatbuffer_size + + +class FlatbufferSizeTest(test_util.TensorFlowTestCase): + + def _compareFile(self, file1, data2): + with open(file1, 'rb') as f1: + data1 = f1.read() + self.assertEqual(data1, data2.encode()) + + def testCompareWithTFLite(self): + root_dir = os.path.split(os.path.abspath(__file__))[0] + + in_filename = root_dir + '/simple_add_model.tflite' + gold_json_file = root_dir + '/gold_simple_add_model_json.txt' + gold_html_file = root_dir + '/gold_simple_add_model_html.txt' + + with open(in_filename, 'rb') as f: + model = f.read() + + html_string, formatted_json = flatbuffer_size.convert_tflite_to_html(model) + + self._compareFile(gold_json_file, formatted_json) + self._compareFile(gold_html_file, html_string) + + +if __name__ == '__main__': + test.main() diff --git a/tensorflow/lite/micro/python/tflite_size/tests/gold_simple_add_model_html.txt b/tensorflow/lite/micro/python/tflite_size/tests/gold_simple_add_model_html.txt new file mode 100644 index 0000000..29631ca --- /dev/null +++ b/tensorflow/lite/micro/python/tflite_size/tests/gold_simple_add_model_html.txt @@ -0,0 +1,312 @@ + + + + + + + + + + + +
      +
    • ROOT (size: 345) +
        +
      • version: 3 (size: 4)
      • +
      • operator_codes (size: 4) +
          +
        • operator_codes[0] (size: 4) +
            +
          • version: 2 (size: 4)
          • +
          +
        • +
        +
      • +
      • subgraphs (size: 243) +
          +
        • subgraphs[0] (size: 243) +
            +
          • tensors (size: 214) +
              +
            • tensors[0] (size: 74) +
                +
              • shape (size: 16) +
                  +
                • shape[0]: 1 (size: 4)
                • +
                • shape[1]: 128 (size: 4)
                • +
                • shape[2]: 128 (size: 4)
                • +
                • shape[3]: 1 (size: 4)
                • +
                +
              • +
              • type: INT8 (size: 1)
              • +
              • buffer: 1 (size: 4)
              • +
              • name: serving_default_input_1:0 (size: 25)
              • +
              • quantization (size: 12) +
                  +
                • scale (size: 4) +
                    +
                  • scale[0]: 0.040725 (size: 4)
                  • +
                  +
                • +
                • zero_point (size: 8) +
                    +
                  • zero_point[0]: -2 (size: 8)
                  • +
                  +
                • +
                +
              • +
              • shape_signature (size: 16) +
                  +
                • shape_signature[0]: -1 (size: 4)
                • +
                • shape_signature[1]: 128 (size: 4)
                • +
                • shape_signature[2]: 128 (size: 4)
                • +
                • shape_signature[3]: 1 (size: 4)
                • +
                +
              • +
              +
            • +
            • tensors[1] (size: 74) +
                +
              • shape (size: 16) +
                  +
                • shape[0]: 1 (size: 4)
                • +
                • shape[1]: 128 (size: 4)
                • +
                • shape[2]: 128 (size: 4)
                • +
                • shape[3]: 1 (size: 4)
                • +
                +
              • +
              • type: INT8 (size: 1)
              • +
              • buffer: 2 (size: 4)
              • +
              • name: serving_default_input_2:0 (size: 25)
              • +
              • quantization (size: 12) +
                  +
                • scale (size: 4) +
                    +
                  • scale[0]: 0.041246 (size: 4)
                  • +
                  +
                • +
                • zero_point (size: 8) +
                    +
                  • zero_point[0]: -1 (size: 8)
                  • +
                  +
                • +
                +
              • +
              • shape_signature (size: 16) +
                  +
                • shape_signature[0]: -1 (size: 4)
                • +
                • shape_signature[1]: 128 (size: 4)
                • +
                • shape_signature[2]: 128 (size: 4)
                • +
                • shape_signature[3]: 1 (size: 4)
                • +
                +
              • +
              +
            • +
            • tensors[2] (size: 66) +
                +
              • shape (size: 16) +
                  +
                • shape[0]: 1 (size: 4)
                • +
                • shape[1]: 128 (size: 4)
                • +
                • shape[2]: 128 (size: 4)
                • +
                • shape[3]: 1 (size: 4)
                • +
                +
              • +
              • type: INT8 (size: 1)
              • +
              • buffer: 3 (size: 4)
              • +
              • name: PartitionedCall:0 (size: 17)
              • +
              • quantization (size: 12) +
                  +
                • scale (size: 4) +
                    +
                  • scale[0]: 0.058377 (size: 4)
                  • +
                  +
                • +
                • zero_point (size: 8) +
                    +
                  • zero_point[0]: -3 (size: 8)
                  • +
                  +
                • +
                +
              • +
              • shape_signature (size: 16) +
                  +
                • shape_signature[0]: -1 (size: 4)
                • +
                • shape_signature[1]: 128 (size: 4)
                • +
                • shape_signature[2]: 128 (size: 4)
                • +
                • shape_signature[3]: 1 (size: 4)
                • +
                +
              • +
              +
            • +
            +
          • +
          • inputs (size: 8) +
              +
            • inputs[0]: 0 (size: 4)
            • +
            • inputs[1]: 1 (size: 4)
            • +
            +
          • +
          • outputs (size: 4) +
              +
            • outputs[0]: 2 (size: 4)
            • +
            +
          • +
          • operators (size: 13) +
              +
            • operators[0] (size: 13) +
                +
              • inputs (size: 8) +
                  +
                • inputs[0]: 0 (size: 4)
                • +
                • inputs[1]: 1 (size: 4)
                • +
                +
              • +
              • outputs (size: 4) +
                  +
                • outputs[0]: 2 (size: 4)
                • +
                +
              • +
              • builtin_options_type: AddOptions (size: 1)
              • +
              • builtin_options: 0 (size: 0)
              • +
              +
            • +
            +
          • +
          • name: main (size: 4)
          • +
          +
        • +
        +
      • +
      • description: MLIR Converted. (size: 15)
      • +
      • buffers (size: 16) +
          +
        • buffers[0]: 0 (size: 0)
        • +
        • buffers[1]: 0 (size: 0)
        • +
        • buffers[2]: 0 (size: 0)
        • +
        • buffers[3]: 0 (size: 0)
        • +
        • buffers[4] (size: 16) +
            +
          • data (size: 16) +
              +
            • data[0]: 49 (size: 1)
            • +
            • data[1]: 46 (size: 1)
            • +
            • data[2]: 49 (size: 1)
            • +
            • data[3]: 52 (size: 1)
            • +
            • data[4]: 46 (size: 1)
            • +
            • data[5]: 48 (size: 1)
            • +
            • data[6]: 0 (size: 1)
            • +
            • data[7]: 0 (size: 1)
            • +
            • data[8]: 0 (size: 1)
            • +
            • data[9]: 0 (size: 1)
            • +
            • data[10]: 0 (size: 1)
            • +
            • data[11]: 0 (size: 1)
            • +
            • data[12]: 0 (size: 1)
            • +
            • data[13]: 0 (size: 1)
            • +
            • data[14]: 0 (size: 1)
            • +
            • data[15]: 0 (size: 1)
            • +
            +
          • +
          +
        • +
        +
      • +
      • metadata (size: 23) +
          +
        • metadata[0] (size: 23) +
            +
          • name: min_runtime_version (size: 19)
          • +
          • buffer: 4 (size: 4)
          • +
          +
        • +
        +
      • +
      • signature_defs (size: 40) +
          +
        • signature_defs[0] (size: 40) +
            +
          • inputs (size: 18) +
              +
            • inputs[0] (size: 7) +
                +
              • name: input_1 (size: 7)
              • +
              +
            • +
            • inputs[1] (size: 11) +
                +
              • name: input_2 (size: 7)
              • +
              • tensor_index: 1 (size: 4)
              • +
              +
            • +
            +
          • +
          • outputs (size: 7) +
              +
            • outputs[0] (size: 7) +
                +
              • name: add (size: 3)
              • +
              • tensor_index: 2 (size: 4)
              • +
              +
            • +
            +
          • +
          • signature_key: serving_default (size: 15)
          • +
          +
        • +
        +
      • +
      +
    • + +
    + + + + + + diff --git a/tensorflow/lite/micro/python/tflite_size/tests/gold_simple_add_model_json.txt b/tensorflow/lite/micro/python/tflite_size/tests/gold_simple_add_model_json.txt new file mode 100644 index 0000000..ec6e9dd --- /dev/null +++ b/tensorflow/lite/micro/python/tflite_size/tests/gold_simple_add_model_json.txt @@ -0,0 +1 @@ +{"value": {"version": {"value": 3, "total_size": 4}, "operator_codes": {"value": [{"value": {"version": {"value": 2, "total_size": 4}}, "total_size": 4}], "total_size": 4}, "subgraphs": {"value": [{"value": {"tensors": {"value": [{"value": {"shape": {"value": [{"value": 1, "total_size": 4}, {"value": 128, "total_size": 4}, {"value": 128, "total_size": 4}, {"value": 1, "total_size": 4}], "total_size": 16}, "type": {"value": "INT8", "total_size": 1}, "buffer": {"value": 1, "total_size": 4}, "name": {"value": "serving_default_input_1:0", "total_size": 25}, "quantization": {"value": {"scale": {"value": [{"value": 0.040725, "total_size": 4}], "total_size": 4}, "zero_point": {"value": [{"value": -2, "total_size": 8}], "total_size": 8}}, "total_size": 12}, "shape_signature": {"value": [{"value": -1, "total_size": 4}, {"value": 128, "total_size": 4}, {"value": 128, "total_size": 4}, {"value": 1, "total_size": 4}], "total_size": 16}}, "total_size": 74}, {"value": {"shape": {"value": [{"value": 1, "total_size": 4}, {"value": 128, "total_size": 4}, {"value": 128, "total_size": 4}, {"value": 1, "total_size": 4}], "total_size": 16}, "type": {"value": "INT8", "total_size": 1}, "buffer": {"value": 2, "total_size": 4}, "name": {"value": "serving_default_input_2:0", "total_size": 25}, "quantization": {"value": {"scale": {"value": [{"value": 0.041246, "total_size": 4}], "total_size": 4}, "zero_point": {"value": [{"value": -1, "total_size": 8}], "total_size": 8}}, "total_size": 12}, "shape_signature": {"value": [{"value": -1, "total_size": 4}, {"value": 128, "total_size": 4}, {"value": 128, "total_size": 4}, {"value": 1, "total_size": 4}], "total_size": 16}}, "total_size": 74}, {"value": {"shape": {"value": [{"value": 1, "total_size": 4}, {"value": 128, "total_size": 4}, {"value": 128, "total_size": 4}, {"value": 1, "total_size": 4}], "total_size": 16}, "type": {"value": "INT8", "total_size": 1}, "buffer": {"value": 3, "total_size": 4}, "name": {"value": "PartitionedCall:0", "total_size": 17}, "quantization": {"value": {"scale": {"value": [{"value": 0.058377, "total_size": 4}], "total_size": 4}, "zero_point": {"value": [{"value": -3, "total_size": 8}], "total_size": 8}}, "total_size": 12}, "shape_signature": {"value": [{"value": -1, "total_size": 4}, {"value": 128, "total_size": 4}, {"value": 128, "total_size": 4}, {"value": 1, "total_size": 4}], "total_size": 16}}, "total_size": 66}], "total_size": 214}, "inputs": {"value": [{"value": 0, "total_size": 4}, {"value": 1, "total_size": 4}], "total_size": 8}, "outputs": {"value": [{"value": 2, "total_size": 4}], "total_size": 4}, "operators": {"value": [{"value": {"inputs": {"value": [{"value": 0, "total_size": 4}, {"value": 1, "total_size": 4}], "total_size": 8}, "outputs": {"value": [{"value": 2, "total_size": 4}], "total_size": 4}, "builtin_options_type": {"value": "AddOptions", "total_size": 1}, "builtin_options": {"value": {}, "total_size": 0}}, "total_size": 13}], "total_size": 13}, "name": {"value": "main", "total_size": 4}}, "total_size": 243}], "total_size": 243}, "description": {"value": "MLIR Converted.", "total_size": 15}, "buffers": {"value": [{"value": {}, "total_size": 0}, {"value": {}, "total_size": 0}, {"value": {}, "total_size": 0}, {"value": {}, "total_size": 0}, {"value": {"data": {"value": [{"value": 49, "total_size": 1}, {"value": 46, "total_size": 1}, {"value": 49, "total_size": 1}, {"value": 52, "total_size": 1}, {"value": 46, "total_size": 1}, {"value": 48, "total_size": 1}, {"value": 0, "total_size": 1}, {"value": 0, "total_size": 1}, {"value": 0, "total_size": 1}, {"value": 0, "total_size": 1}, {"value": 0, "total_size": 1}, {"value": 0, "total_size": 1}, {"value": 0, "total_size": 1}, {"value": 0, "total_size": 1}, {"value": 0, "total_size": 1}, {"value": 0, "total_size": 1}], "total_size": 16}}, "total_size": 16}], "total_size": 16}, "metadata": {"value": [{"value": {"name": {"value": "min_runtime_version", "total_size": 19}, "buffer": {"value": 4, "total_size": 4}}, "total_size": 23}], "total_size": 23}, "signature_defs": {"value": [{"value": {"inputs": {"value": [{"value": {"name": {"value": "input_1", "total_size": 7}}, "total_size": 7}, {"value": {"name": {"value": "input_2", "total_size": 7}, "tensor_index": {"value": 1, "total_size": 4}}, "total_size": 11}], "total_size": 18}, "outputs": {"value": [{"value": {"name": {"value": "add", "total_size": 3}, "tensor_index": {"value": 2, "total_size": 4}}, "total_size": 7}], "total_size": 7}, "signature_key": {"value": "serving_default", "total_size": 15}}, "total_size": 40}], "total_size": 40}}, "total_size": 345} \ No newline at end of file diff --git a/tensorflow/lite/micro/python/tflite_size/tests/simple_add_model.tflite b/tensorflow/lite/micro/python/tflite_size/tests/simple_add_model.tflite new file mode 100644 index 0000000..2a80c50 Binary files /dev/null and b/tensorflow/lite/micro/python/tflite_size/tests/simple_add_model.tflite differ diff --git a/tensorflow/lite/micro/recording_micro_allocator.cc b/tensorflow/lite/micro/recording_micro_allocator.cc new file mode 100644 index 0000000..f41dba6 --- /dev/null +++ b/tensorflow/lite/micro/recording_micro_allocator.cc @@ -0,0 +1,251 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/recording_micro_allocator.h" + +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/micro/arena_allocator/recording_single_arena_buffer_allocator.h" +#include "tensorflow/lite/micro/compatibility.h" +#include "tensorflow/lite/micro/memory_helpers.h" +#include "tensorflow/lite/micro/memory_planner/greedy_memory_planner.h" +#include "tensorflow/lite/micro/micro_allocator.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { + +size_t RecordingMicroAllocator::GetDefaultTailUsage() { + // RecordingMicroAllocator inherits from MicroAllocator and its tail usage is + // similar with MicroAllocator with SingleArenaBufferAllocator and + // MicroAllocator being replaced. + return MicroAllocator::GetDefaultTailUsage( + /*is_memory_planner_given=*/false) + + AlignSizeUp() - + AlignSizeUp() + + AlignSizeUp() - AlignSizeUp(); +} + +RecordingMicroAllocator::RecordingMicroAllocator( + RecordingSingleArenaBufferAllocator* recording_memory_allocator, + MicroMemoryPlanner* memory_planner) + : MicroAllocator(recording_memory_allocator, memory_planner), + recording_memory_allocator_(recording_memory_allocator) {} + +RecordingMicroAllocator* RecordingMicroAllocator::Create(uint8_t* tensor_arena, + size_t arena_size) { + RecordingSingleArenaBufferAllocator* simple_memory_allocator = + RecordingSingleArenaBufferAllocator::Create(tensor_arena, arena_size); + TFLITE_DCHECK(simple_memory_allocator != nullptr); + + uint8_t* memory_planner_buffer = + simple_memory_allocator->AllocatePersistentBuffer( + sizeof(GreedyMemoryPlanner), alignof(GreedyMemoryPlanner)); + GreedyMemoryPlanner* memory_planner = + new (memory_planner_buffer) GreedyMemoryPlanner(); + + uint8_t* allocator_buffer = simple_memory_allocator->AllocatePersistentBuffer( + sizeof(RecordingMicroAllocator), alignof(RecordingMicroAllocator)); + RecordingMicroAllocator* allocator = new (allocator_buffer) + RecordingMicroAllocator(simple_memory_allocator, memory_planner); + return allocator; +} + +RecordedAllocation RecordingMicroAllocator::GetRecordedAllocation( + RecordedAllocationType allocation_type) const { + switch (allocation_type) { + case RecordedAllocationType::kTfLiteEvalTensorData: + return recorded_tflite_eval_tensor_data_; + case RecordedAllocationType::kPersistentTfLiteTensorData: + return recorded_persistent_tflite_tensor_data_; + case RecordedAllocationType::kPersistentTfLiteTensorQuantizationData: + return recorded_persistent_tflite_tensor_quantization_data_; + case RecordedAllocationType::kPersistentBufferData: + return recorded_persistent_buffer_data_; + case RecordedAllocationType::kTfLiteTensorVariableBufferData: + return recorded_tflite_tensor_variable_buffer_data_; + case RecordedAllocationType::kNodeAndRegistrationArray: + return recorded_node_and_registration_array_data_; + case RecordedAllocationType::kOpData: + return recorded_op_data_; + } + MicroPrintf("Invalid allocation type supplied: %d", allocation_type); + return RecordedAllocation(); +} + +const RecordingSingleArenaBufferAllocator* +RecordingMicroAllocator::GetSimpleMemoryAllocator() const { + return recording_memory_allocator_; +} + +void RecordingMicroAllocator::PrintAllocations() const { + MicroPrintf("[RecordingMicroAllocator] Arena allocation total %d bytes", + recording_memory_allocator_->GetUsedBytes()); + MicroPrintf("[RecordingMicroAllocator] Arena allocation head %d bytes", + recording_memory_allocator_->GetNonPersistentUsedBytes()); + MicroPrintf("[RecordingMicroAllocator] Arena allocation tail %d bytes", + recording_memory_allocator_->GetPersistentUsedBytes()); + PrintRecordedAllocation(RecordedAllocationType::kTfLiteEvalTensorData, + "TfLiteEvalTensor data", "allocations"); + PrintRecordedAllocation(RecordedAllocationType::kPersistentTfLiteTensorData, + "Persistent TfLiteTensor data", "tensors"); + PrintRecordedAllocation( + RecordedAllocationType::kPersistentTfLiteTensorQuantizationData, + "Persistent TfLiteTensor quantization data", "allocations"); + PrintRecordedAllocation(RecordedAllocationType::kPersistentBufferData, + "Persistent buffer data", "allocations"); + PrintRecordedAllocation( + RecordedAllocationType::kTfLiteTensorVariableBufferData, + "TfLiteTensor variable buffer data", "allocations"); + PrintRecordedAllocation(RecordedAllocationType::kNodeAndRegistrationArray, + "NodeAndRegistration struct", + "NodeAndRegistration structs"); + PrintRecordedAllocation(RecordedAllocationType::kOpData, + "Operator runtime data", "OpData structs"); +} + +void* RecordingMicroAllocator::AllocatePersistentBuffer(size_t bytes) { + RecordedAllocation allocations = SnapshotAllocationUsage(); + void* buffer = MicroAllocator::AllocatePersistentBuffer(bytes); + RecordAllocationUsage(allocations, recorded_persistent_buffer_data_); + + return buffer; +} + +void RecordingMicroAllocator::PrintRecordedAllocation( + RecordedAllocationType allocation_type, const char* allocation_name, + const char* allocation_description) const { +#ifndef TF_LITE_STRIP_ERROR_STRINGS + RecordedAllocation allocation = GetRecordedAllocation(allocation_type); + if (allocation.used_bytes > 0 || allocation.requested_bytes > 0) { + MicroPrintf( + "[RecordingMicroAllocator] '%s' used %d bytes with alignment overhead " + "(requested %d bytes for %d %s)", + allocation_name, allocation.used_bytes, allocation.requested_bytes, + allocation.count, allocation_description); + } +#endif +} + +TfLiteStatus RecordingMicroAllocator::AllocateNodeAndRegistrations( + const Model* model, SubgraphAllocations* subgraph_allocations) { + RecordedAllocation allocations = SnapshotAllocationUsage(); + + TfLiteStatus status = + MicroAllocator::AllocateNodeAndRegistrations(model, subgraph_allocations); + + RecordAllocationUsage(allocations, + recorded_node_and_registration_array_data_); + + for (size_t subgraph_idx = 0; subgraph_idx < model->subgraphs()->size(); + subgraph_idx++) { + // The allocation count in SingleArenaBufferAllocator will only be 1. To + // provide better logging, decrement by 1 and add in the actual number of + // operators used in the graph: The allocation for this recording will + // always be 1. This is because the parent class mallocs one large + // allocation for the number of nodes in the graph (e.g. + // sizeof(NodeAndRegistration) * num_nodes). To prevent extra overhead and + // potential for fragmentation, manually adjust the accounting by + // decrementing by 1 and adding the actual number of nodes used in the + // graph: + if (model->subgraphs()->Get(subgraph_idx)->operators()) { + recorded_node_and_registration_array_data_.count += + model->subgraphs()->Get(subgraph_idx)->operators()->size() - 1; + } else { + recorded_node_and_registration_array_data_.count -= 1; + } + } + return status; +} + +TfLiteStatus RecordingMicroAllocator::AllocateTfLiteEvalTensors( + const Model* model, SubgraphAllocations* subgraph_allocations) { + RecordedAllocation allocations = SnapshotAllocationUsage(); + + TfLiteStatus status = + MicroAllocator::AllocateTfLiteEvalTensors(model, subgraph_allocations); + + RecordAllocationUsage(allocations, recorded_tflite_eval_tensor_data_); + + for (size_t subgraph_idx = 0; subgraph_idx < model->subgraphs()->size(); + subgraph_idx++) { + // The allocation for this recording will always be 1. This is because the + // parent class mallocs one large allocation for the number of tensors in + // the graph (e.g. sizeof(TfLiteEvalTensor) * num_tensors). To prevent extra + // overhead and potential for fragmentation, manually adjust the accounting + // by decrementing by 1 and adding the actual number of tensors used in the + // graph: + recorded_tflite_eval_tensor_data_.count += + model->subgraphs()->Get(subgraph_idx)->tensors()->size() - 1; + } + return status; +} + +TfLiteStatus RecordingMicroAllocator::AllocateVariables( + const SubGraph* subgraph, TfLiteEvalTensor* eval_tensors, + const int32_t* offline_planner_offsets) { + RecordedAllocation allocations = SnapshotAllocationUsage(); + + TfLiteStatus status = MicroAllocator::AllocateVariables( + subgraph, eval_tensors, offline_planner_offsets); + + RecordAllocationUsage(allocations, + recorded_tflite_tensor_variable_buffer_data_); + return status; +} + +TfLiteTensor* +RecordingMicroAllocator::AllocatePersistentTfLiteTensorInternal() { + RecordedAllocation allocations = SnapshotAllocationUsage(); + + TfLiteTensor* result = + MicroAllocator::AllocatePersistentTfLiteTensorInternal(); + + RecordAllocationUsage(allocations, recorded_persistent_tflite_tensor_data_); + return result; +} + +TfLiteStatus RecordingMicroAllocator::PopulateTfLiteTensorFromFlatbuffer( + const Model* model, TfLiteTensor* tensor, int tensor_index, + int subgraph_index, bool allocate_temp) { + RecordedAllocation allocations = SnapshotAllocationUsage(); + + TfLiteStatus status = MicroAllocator::PopulateTfLiteTensorFromFlatbuffer( + model, tensor, tensor_index, subgraph_index, allocate_temp); + + RecordAllocationUsage(allocations, + recorded_persistent_tflite_tensor_quantization_data_); + return status; +} + +RecordedAllocation RecordingMicroAllocator::SnapshotAllocationUsage() const { + return {/*requested_bytes=*/recording_memory_allocator_->GetRequestedBytes(), + /*used_bytes=*/recording_memory_allocator_->GetUsedBytes(), + /*count=*/recording_memory_allocator_->GetAllocatedCount()}; +} + +void RecordingMicroAllocator::RecordAllocationUsage( + const RecordedAllocation& snapshotted_allocation, + RecordedAllocation& recorded_allocation) { + recorded_allocation.requested_bytes += + recording_memory_allocator_->GetRequestedBytes() - + snapshotted_allocation.requested_bytes; + recorded_allocation.used_bytes += + recording_memory_allocator_->GetUsedBytes() - + snapshotted_allocation.used_bytes; + recorded_allocation.count += + recording_memory_allocator_->GetAllocatedCount() - + snapshotted_allocation.count; +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/recording_micro_allocator.h b/tensorflow/lite/micro/recording_micro_allocator.h new file mode 100644 index 0000000..b6f6926 --- /dev/null +++ b/tensorflow/lite/micro/recording_micro_allocator.h @@ -0,0 +1,125 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_RECORDING_MICRO_ALLOCATOR_H_ +#define TENSORFLOW_LITE_MICRO_RECORDING_MICRO_ALLOCATOR_H_ + +#include "tensorflow/lite/micro/arena_allocator/recording_single_arena_buffer_allocator.h" +#include "tensorflow/lite/micro/compatibility.h" +#include "tensorflow/lite/micro/micro_allocator.h" + +namespace tflite { + +// List of buckets currently recorded by this class. Each type keeps a list of +// allocated information during model initialization. +// TODO(b/169834511): Add tracking for scratch buffer allocations. +enum class RecordedAllocationType { + kTfLiteEvalTensorData, + kPersistentTfLiteTensorData, + kPersistentTfLiteTensorQuantizationData, + kPersistentBufferData, + kTfLiteTensorVariableBufferData, + kNodeAndRegistrationArray, + kOpData, +}; + +// Container for holding information about allocation recordings by a given +// type. Each recording contains the number of bytes requested, the actual bytes +// allocated (can defer from requested by alignment), and the number of items +// allocated. +struct RecordedAllocation { + size_t requested_bytes; + size_t used_bytes; + size_t count; +}; + +// Utility subclass of MicroAllocator that records all allocations +// inside the arena. A summary of allocations can be logged through the +// ErrorReporter by invoking LogAllocations(). This special allocator requires +// an instance of RecordingSingleArenaBufferAllocator to capture allocations in +// the head and tail. Arena allocation recording can be retrieved by type +// through the GetRecordedAllocation() function. This class should only be used +// for auditing memory usage or integration testing. +class RecordingMicroAllocator : public MicroAllocator { + public: + static RecordingMicroAllocator* Create(uint8_t* tensor_arena, + size_t arena_size); + + // Returns the fixed amount of memory overhead of RecordingMicroAllocator. + static size_t GetDefaultTailUsage(); + + // Returns the recorded allocations information for a given allocation type. + RecordedAllocation GetRecordedAllocation( + RecordedAllocationType allocation_type) const; + + const RecordingSingleArenaBufferAllocator* GetSimpleMemoryAllocator() const; + + // Logs out through the ErrorReporter all allocation recordings by type + // defined in RecordedAllocationType. + void PrintAllocations() const; + + void* AllocatePersistentBuffer(size_t bytes) override; + + protected: + TfLiteStatus AllocateNodeAndRegistrations( + const Model* model, SubgraphAllocations* subgraph_allocations) override; + TfLiteStatus AllocateTfLiteEvalTensors( + const Model* model, SubgraphAllocations* subgraph_allocations) override; + TfLiteStatus AllocateVariables( + const SubGraph* subgraph, TfLiteEvalTensor* eval_tensors, + const int32_t* offline_planner_offsets) override; + // TODO(b/162311891): Once all kernels have been updated to the new API drop + // this method. It is only used to record TfLiteTensor persistent allocations. + TfLiteTensor* AllocatePersistentTfLiteTensorInternal() override; + + // TODO(b/162311891): Once all kernels have been updated to the new API drop + // this function since all allocations for quantized data will take place in + // the temp section. + TfLiteStatus PopulateTfLiteTensorFromFlatbuffer(const Model* model, + TfLiteTensor* tensor, + int tensor_index, + int subgraph_index, + bool allocate_temp) override; + + private: + RecordingMicroAllocator(RecordingSingleArenaBufferAllocator* memory_allocator, + MicroMemoryPlanner* memory_planner); + + void PrintRecordedAllocation(RecordedAllocationType allocation_type, + const char* allocation_name, + const char* allocation_description) const; + + RecordedAllocation SnapshotAllocationUsage() const; + void RecordAllocationUsage(const RecordedAllocation& snapshotted_allocation, + RecordedAllocation& recorded_allocation); + + const RecordingSingleArenaBufferAllocator* recording_memory_allocator_; + + RecordedAllocation recorded_tflite_eval_tensor_data_ = {}; + RecordedAllocation recorded_persistent_tflite_tensor_data_ = {}; + RecordedAllocation recorded_persistent_tflite_tensor_quantization_data_ = {}; + RecordedAllocation recorded_persistent_buffer_data_ = {}; + RecordedAllocation recorded_tflite_tensor_variable_buffer_data_ = {}; + RecordedAllocation recorded_node_and_registration_array_data_ = {}; + + // TODO(b/187993291): Re-enable OpData allocating tracking. + RecordedAllocation recorded_op_data_ = {}; + + TF_LITE_REMOVE_VIRTUAL_DELETE +}; + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_RECORDING_MICRO_ALLOCATOR_H_ diff --git a/tensorflow/lite/micro/recording_micro_allocator_test.cc b/tensorflow/lite/micro/recording_micro_allocator_test.cc new file mode 100644 index 0000000..9d3a596 --- /dev/null +++ b/tensorflow/lite/micro/recording_micro_allocator_test.cc @@ -0,0 +1,323 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/recording_micro_allocator.h" + +#include "tensorflow/lite/micro/micro_allocator.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" +#include "tensorflow/lite/micro/testing/test_conv_model.h" + +#define TF_LITE_TENSOR_STRUCT_SIZE sizeof(TfLiteTensor) +#define TF_LITE_EVAL_TENSOR_STRUCT_SIZE sizeof(TfLiteEvalTensor) +#define TF_LITE_AFFINE_QUANTIZATION_SIZE sizeof(TfLiteAffineQuantization) +#define NODE_AND_REGISTRATION_STRUCT_SIZE sizeof(tflite::NodeAndRegistration) + +// TODO(b/158303868): Move tests into anonymous namespace. +namespace { + +constexpr int kTestConvArenaSize = 1024 * 12; + +} // namespace + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(TestRecordsTfLiteEvalTensorArrayData) { + tflite::ScratchBufferHandle* scratch_buffer_handles = nullptr; + tflite::testing::TestingOpResolver ops_resolver; + const tflite::Model* model = tflite::GetModel(kTestConvModelData); + uint8_t arena[kTestConvArenaSize]; + + tflite::RecordingMicroAllocator* micro_allocator = + tflite::RecordingMicroAllocator::Create(arena, kTestConvArenaSize); + // TODO(b/158102673): ugly workaround for not having fatal assertions. Same + // throughout this file. + TF_LITE_MICRO_EXPECT(micro_allocator != nullptr); + if (micro_allocator == nullptr) return 1; + + tflite::SubgraphAllocations* subgraph_allocations = + micro_allocator->StartModelAllocation(model); + TF_LITE_MICRO_EXPECT(nullptr != subgraph_allocations); + if (subgraph_allocations == nullptr) return 1; + + TfLiteStatus status = micro_allocator->FinishModelAllocation( + model, subgraph_allocations, &scratch_buffer_handles); + TF_LITE_MICRO_EXPECT_EQ(status, kTfLiteOk); + if (status != kTfLiteOk) return 1; + + micro_allocator->PrintAllocations(); + + tflite::RecordedAllocation recorded_allocation = + micro_allocator->GetRecordedAllocation( + tflite::RecordedAllocationType::kTfLiteEvalTensorData); + + micro_allocator->PrintAllocations(); + + size_t tensors_count = tflite::testing::GetModelTensorCount(model); + + TF_LITE_MICRO_EXPECT_EQ(recorded_allocation.count, tensors_count); + TF_LITE_MICRO_EXPECT_EQ(recorded_allocation.requested_bytes, + tensors_count * TF_LITE_EVAL_TENSOR_STRUCT_SIZE); + TF_LITE_MICRO_EXPECT_GE(recorded_allocation.used_bytes, + tensors_count * TF_LITE_EVAL_TENSOR_STRUCT_SIZE); +} + +TF_LITE_MICRO_TEST(TestRecordsNodeAndRegistrationArrayData) { + tflite::ScratchBufferHandle* scratch_buffer_handles = nullptr; + tflite::testing::TestingOpResolver ops_resolver; + const tflite::Model* model = tflite::GetModel(kTestConvModelData); + uint8_t arena[kTestConvArenaSize]; + + tflite::RecordingMicroAllocator* micro_allocator = + tflite::RecordingMicroAllocator::Create(arena, kTestConvArenaSize); + TF_LITE_MICRO_EXPECT(micro_allocator != nullptr); + if (micro_allocator == nullptr) return 1; + + tflite::SubgraphAllocations* subgraph_allocations = + micro_allocator->StartModelAllocation(model); + TF_LITE_MICRO_EXPECT(nullptr != subgraph_allocations); + if (subgraph_allocations == nullptr) return 1; + + TfLiteStatus status = micro_allocator->FinishModelAllocation( + model, subgraph_allocations, &scratch_buffer_handles); + TF_LITE_MICRO_EXPECT_EQ(status, kTfLiteOk); + if (status != kTfLiteOk) return 1; + + size_t num_ops = model->subgraphs()->Get(0)->operators()->size(); + tflite::RecordedAllocation recorded_allocation = + micro_allocator->GetRecordedAllocation( + tflite::RecordedAllocationType::kNodeAndRegistrationArray); + + TF_LITE_MICRO_EXPECT_EQ(recorded_allocation.count, num_ops); + TF_LITE_MICRO_EXPECT_EQ(recorded_allocation.requested_bytes, + num_ops * NODE_AND_REGISTRATION_STRUCT_SIZE); + TF_LITE_MICRO_EXPECT_GE(recorded_allocation.used_bytes, + num_ops * NODE_AND_REGISTRATION_STRUCT_SIZE); +} + +TF_LITE_MICRO_TEST(TestRecordsMultiTenantAllocations) { + tflite::ScratchBufferHandle* scratch_buffer_handles = nullptr; + tflite::testing::TestingOpResolver ops_resolver; + const tflite::Model* model = tflite::GetModel(kTestConvModelData); + + // Double the arena size to allocate two models inside of it: + uint8_t arena[kTestConvArenaSize * 2]; + + TfLiteStatus status; + + tflite::RecordingMicroAllocator* micro_allocator = + tflite::RecordingMicroAllocator::Create(arena, kTestConvArenaSize * 2); + TF_LITE_MICRO_EXPECT(micro_allocator != nullptr); + if (micro_allocator == nullptr) return 1; + + // First allocation with the model in the arena: + tflite::SubgraphAllocations* subgraph_allocations = + micro_allocator->StartModelAllocation(model); + TF_LITE_MICRO_EXPECT(nullptr != subgraph_allocations); + if (subgraph_allocations == nullptr) return 1; + + status = micro_allocator->FinishModelAllocation(model, subgraph_allocations, + &scratch_buffer_handles); + TF_LITE_MICRO_EXPECT_EQ(status, kTfLiteOk); + if (status != kTfLiteOk) return 1; + + // Second allocation with the same model in the arena: + subgraph_allocations = micro_allocator->StartModelAllocation(model); + TF_LITE_MICRO_EXPECT(nullptr != subgraph_allocations); + if (subgraph_allocations == nullptr) return 1; + + status = kTfLiteOk, micro_allocator->FinishModelAllocation( + model, subgraph_allocations, &scratch_buffer_handles); + TF_LITE_MICRO_EXPECT_EQ(status, kTfLiteOk); + if (status != kTfLiteOk) return 1; + + size_t tensors_count = tflite::testing::GetModelTensorCount(model); + + tflite::RecordedAllocation recorded_allocation = + micro_allocator->GetRecordedAllocation( + tflite::RecordedAllocationType::kTfLiteEvalTensorData); + + // Node and tensor arrays must be allocated as well as each node and tensor. + TF_LITE_MICRO_EXPECT_EQ(recorded_allocation.count, tensors_count * 2); + TF_LITE_MICRO_EXPECT_EQ(recorded_allocation.requested_bytes, + tensors_count * TF_LITE_EVAL_TENSOR_STRUCT_SIZE * 2); + TF_LITE_MICRO_EXPECT_GE(recorded_allocation.used_bytes, + tensors_count * TF_LITE_EVAL_TENSOR_STRUCT_SIZE * 2); +} + +TF_LITE_MICRO_TEST(TestRecordsPersistentTfLiteTensorData) { + const tflite::Model* model = tflite::GetModel(kTestConvModelData); + uint8_t arena[kTestConvArenaSize]; + + tflite::RecordingMicroAllocator* micro_allocator = + tflite::RecordingMicroAllocator::Create(arena, kTestConvArenaSize); + TF_LITE_MICRO_EXPECT(micro_allocator != nullptr); + if (micro_allocator == nullptr) return 1; + + TfLiteTensor* tensor = micro_allocator->AllocatePersistentTfLiteTensor( + model, /*subgraph_allocations=*/nullptr, 0, 0); + TF_LITE_MICRO_EXPECT(tensor != nullptr); + if (tensor == nullptr) return 1; + + tflite::RecordedAllocation recorded_allocation = + micro_allocator->GetRecordedAllocation( + tflite::RecordedAllocationType::kPersistentTfLiteTensorData); + + TF_LITE_MICRO_EXPECT_EQ(recorded_allocation.count, static_cast(1)); + TF_LITE_MICRO_EXPECT_EQ(recorded_allocation.requested_bytes, + TF_LITE_TENSOR_STRUCT_SIZE); + TF_LITE_MICRO_EXPECT_GE(recorded_allocation.used_bytes, + TF_LITE_TENSOR_STRUCT_SIZE); +} + +TF_LITE_MICRO_TEST(TestRecordsPersistentTfLiteTensorQuantizationData) { + const tflite::Model* model = tflite::GetModel(kTestConvModelData); + uint8_t arena[kTestConvArenaSize]; + + tflite::RecordingMicroAllocator* micro_allocator = + tflite::RecordingMicroAllocator::Create(arena, kTestConvArenaSize); + TF_LITE_MICRO_EXPECT(micro_allocator != nullptr); + if (micro_allocator == nullptr) return 1; + + TfLiteTensor* tensor = micro_allocator->AllocatePersistentTfLiteTensor( + model, /*subgraph_allocations=*/nullptr, 0, 0); + TF_LITE_MICRO_EXPECT(tensor != nullptr); + if (tensor == nullptr) return 1; + + // Walk the model subgraph to find all tensors with quantization params and + // keep a tally. + size_t quantized_channel_bytes = 0; + const tflite::Tensor* cur_tensor = + model->subgraphs()->Get(0)->tensors()->Get(0); + const tflite::QuantizationParameters* quantization_params = + cur_tensor->quantization(); + if (quantization_params && quantization_params->scale() && + quantization_params->scale()->size() > 0 && + quantization_params->zero_point() && + quantization_params->zero_point()->size() > 0) { + size_t num_channels = quantization_params->scale()->size(); + quantized_channel_bytes += TfLiteIntArrayGetSizeInBytes(num_channels); + } + + // Calculate the expected allocation bytes with subgraph quantization data: + size_t expected_requested_bytes = + TF_LITE_AFFINE_QUANTIZATION_SIZE + quantized_channel_bytes; + + tflite::RecordedAllocation recorded_allocation = + micro_allocator->GetRecordedAllocation( + tflite::RecordedAllocationType:: + kPersistentTfLiteTensorQuantizationData); + + // Each quantized tensors has 2 mallocs (quant struct, zero point dimensions): + TF_LITE_MICRO_EXPECT_EQ(recorded_allocation.count, static_cast(2)); + TF_LITE_MICRO_EXPECT_EQ(recorded_allocation.requested_bytes, + expected_requested_bytes); + TF_LITE_MICRO_EXPECT_GE(recorded_allocation.used_bytes, + expected_requested_bytes); +} + +TF_LITE_MICRO_TEST(TestRecordsPersistentBufferData) { + uint8_t arena[kTestConvArenaSize]; + + tflite::RecordingMicroAllocator* micro_allocator = + tflite::RecordingMicroAllocator::Create(arena, kTestConvArenaSize); + TF_LITE_MICRO_EXPECT(micro_allocator != nullptr); + if (micro_allocator == nullptr) return 1; + + void* buffer = micro_allocator->AllocatePersistentBuffer(/*bytes=*/100); + TF_LITE_MICRO_EXPECT(buffer != nullptr); + if (buffer == nullptr) return 1; + + tflite::RecordedAllocation recorded_allocation = + micro_allocator->GetRecordedAllocation( + tflite::RecordedAllocationType::kPersistentBufferData); + + TF_LITE_MICRO_EXPECT_EQ(recorded_allocation.count, static_cast(1)); + TF_LITE_MICRO_EXPECT_EQ(recorded_allocation.requested_bytes, + static_cast(100)); + TF_LITE_MICRO_EXPECT_GE(recorded_allocation.used_bytes, + static_cast(100)); + + buffer = micro_allocator->AllocatePersistentBuffer(/*bytes=*/50); + TF_LITE_MICRO_EXPECT(buffer != nullptr); + if (buffer == nullptr) return 1; + + recorded_allocation = micro_allocator->GetRecordedAllocation( + tflite::RecordedAllocationType::kPersistentBufferData); + + TF_LITE_MICRO_EXPECT_EQ(recorded_allocation.count, static_cast(2)); + TF_LITE_MICRO_EXPECT_EQ(recorded_allocation.requested_bytes, + static_cast(150)); + TF_LITE_MICRO_EXPECT_GE(recorded_allocation.used_bytes, + static_cast(150)); +} + +TF_LITE_MICRO_TEST(TestMultiSubgraphModel) { + tflite::ScratchBufferHandle* scratch_buffer_handles = nullptr; + tflite::testing::TestingOpResolver ops_resolver; + const tflite::Model* model = + tflite::testing::GetSimpleModelWithNullInputsAndOutputs(); + const int arena_size = 2048; + + uint8_t arena[arena_size]; + + tflite::RecordingMicroAllocator* micro_allocator = + tflite::RecordingMicroAllocator::Create(arena, arena_size); + TF_LITE_MICRO_EXPECT(micro_allocator != nullptr); + if (micro_allocator == nullptr) return 1; + + tflite::SubgraphAllocations* subgraph_allocations = + micro_allocator->StartModelAllocation(model); + TF_LITE_MICRO_EXPECT(nullptr != subgraph_allocations); + if (subgraph_allocations == nullptr) return 1; + + TfLiteStatus status = micro_allocator->FinishModelAllocation( + model, subgraph_allocations, &scratch_buffer_handles); + TF_LITE_MICRO_EXPECT_EQ(status, kTfLiteOk); + if (status != kTfLiteOk) return 1; + + size_t num_ops = 0; + size_t num_tensors = 0; + for (size_t subgraph_idx = 0; subgraph_idx < model->subgraphs()->size(); + subgraph_idx++) { + const tflite::SubGraph* subgraph = model->subgraphs()->Get(subgraph_idx); + num_ops += subgraph->operators()->size(); + num_tensors += subgraph->tensors()->size(); + } + + tflite::RecordedAllocation recorded_allocation = + micro_allocator->GetRecordedAllocation( + tflite::RecordedAllocationType::kNodeAndRegistrationArray); + + TF_LITE_MICRO_EXPECT_EQ(recorded_allocation.count, num_ops); + TF_LITE_MICRO_EXPECT_EQ(recorded_allocation.requested_bytes, + num_ops * NODE_AND_REGISTRATION_STRUCT_SIZE); + TF_LITE_MICRO_EXPECT_GE(recorded_allocation.used_bytes, + num_ops * NODE_AND_REGISTRATION_STRUCT_SIZE); + + recorded_allocation = micro_allocator->GetRecordedAllocation( + tflite::RecordedAllocationType::kTfLiteEvalTensorData); + + TF_LITE_MICRO_EXPECT_EQ(recorded_allocation.count, num_tensors); + TF_LITE_MICRO_EXPECT_EQ(recorded_allocation.requested_bytes, + num_tensors * TF_LITE_EVAL_TENSOR_STRUCT_SIZE); + TF_LITE_MICRO_EXPECT_GE(recorded_allocation.used_bytes, + num_tensors * TF_LITE_EVAL_TENSOR_STRUCT_SIZE); +} + +// TODO(b/158124094): Find a way to audit OpData allocations on +// cross-architectures. + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/recording_micro_interpreter.h b/tensorflow/lite/micro/recording_micro_interpreter.h new file mode 100644 index 0000000..24987c2 --- /dev/null +++ b/tensorflow/lite/micro/recording_micro_interpreter.h @@ -0,0 +1,69 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_RECORDING_MICRO_INTERPRETER_H_ +#define TENSORFLOW_LITE_MICRO_RECORDING_MICRO_INTERPRETER_H_ + +#include "tensorflow/lite/micro/micro_interpreter.h" +#include "tensorflow/lite/micro/micro_profiler_interface.h" +#include "tensorflow/lite/micro/recording_micro_allocator.h" + +namespace tflite { + +// Utility subclass that enables internal recordings of the MicroInterpreter. +// This class should be used to audit and analyze memory arena usage for a given +// model and interpreter. +// +// After construction and the first Invoke() or AllocateTensors() call - the +// memory usage is recorded and available through the GetMicroAllocator() +// function. See RecordingMicroAlloctor for more details on what is currently +// recorded from arena allocations. +// +// It is recommended for users to increase the tensor arena size by at least 1kb +// to ensure enough additional memory is available for internal recordings. +class RecordingMicroInterpreter : public MicroInterpreter { + public: + RecordingMicroInterpreter(const Model* model, + const MicroOpResolver& op_resolver, + uint8_t* tensor_arena, size_t tensor_arena_size, + MicroResourceVariables* resource_variable = nullptr, + MicroProfilerInterface* profiler = nullptr) + : MicroInterpreter( + model, op_resolver, + RecordingMicroAllocator::Create(tensor_arena, tensor_arena_size), + resource_variable, profiler), + recording_micro_allocator_( + static_cast(allocator())) {} + + RecordingMicroInterpreter(const Model* model, + const MicroOpResolver& op_resolver, + RecordingMicroAllocator* allocator, + MicroResourceVariables* resource_variable = nullptr, + MicroProfilerInterface* profiler = nullptr) + : MicroInterpreter(model, op_resolver, allocator, resource_variable, + profiler), + recording_micro_allocator_(*allocator) {} + + const RecordingMicroAllocator& GetMicroAllocator() const { + return recording_micro_allocator_; + } + + private: + const RecordingMicroAllocator& recording_micro_allocator_; +}; + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_RECORDING_MICRO_INTERPRETER_H_ diff --git a/tensorflow/lite/micro/riscv32_mcu/README.md b/tensorflow/lite/micro/riscv32_mcu/README.md new file mode 100644 index 0000000..5477d7a --- /dev/null +++ b/tensorflow/lite/micro/riscv32_mcu/README.md @@ -0,0 +1,7 @@ +# RISC-V MCU + +This folder contains TFLite kernel operations optimized for RISC-V micro +controllers. + +It is designed to be portable even to 'bare metal', so it follows the same +design goals as the micro experimental port. diff --git a/tensorflow/lite/micro/riscv32_mcu/debug_log.cc b/tensorflow/lite/micro/riscv32_mcu/debug_log.cc new file mode 100644 index 0000000..f9459b8 --- /dev/null +++ b/tensorflow/lite/micro/riscv32_mcu/debug_log.cc @@ -0,0 +1,21 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +// TODO(b/121324430): Add test for DebugLog functions +// TODO(b/121275099): Remove dependency on debug_log once the platform supports +// printf + +#include + +extern "C" void DebugLog(const char* s) { puts(s); } diff --git a/tensorflow/lite/micro/system_setup.cc b/tensorflow/lite/micro/system_setup.cc new file mode 100644 index 0000000..db4a100 --- /dev/null +++ b/tensorflow/lite/micro/system_setup.cc @@ -0,0 +1,25 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/system_setup.h" + +namespace tflite { + +// To add an equivalent function for your own platform, create your own +// implementation file, and place it in a subfolder named after the target. See +// tensorflow/lite/micro/debug_log.cc for a similar example. +void InitializeTarget() {} + +} // namespace tflite diff --git a/tensorflow/lite/micro/system_setup.h b/tensorflow/lite/micro/system_setup.h new file mode 100644 index 0000000..71ab13a --- /dev/null +++ b/tensorflow/lite/micro/system_setup.h @@ -0,0 +1,27 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_MICRO_SYSTEM_SETUP_H_ +#define TENSORFLOW_LITE_MICRO_SYSTEM_SETUP_H_ + +namespace tflite { + +// This should called during initialization of TFLM binaries and tests. It can +// be specialized if there is a need for custom target-specific intialization. +// For more information, see tensorflow/lite/micro/system_setup.cc. +void InitializeTarget(); + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_SYSTEM_SETUP_H_ diff --git a/tensorflow/lite/micro/test_helper_custom_ops.cc b/tensorflow/lite/micro/test_helper_custom_ops.cc new file mode 100644 index 0000000..374aabc --- /dev/null +++ b/tensorflow/lite/micro/test_helper_custom_ops.cc @@ -0,0 +1,111 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/test_helper_custom_ops.h" + +#include +#include +#include +#include +#include + +#include "flatbuffers/flatbuffers.h" // from @flatbuffers +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_utils.h" +#include "tensorflow/lite/schema/schema_generated.h" + +// TODO(b/170464050): Use TFLM test only version of schema_utils. + +namespace tflite { +namespace testing { + +const TFLMRegistration* PackerOp::getRegistration() { + return GetMutableRegistration(); +} + +TFLMRegistration* PackerOp::GetMutableRegistration() { + static TFLMRegistration r; + r.init = Init; + r.prepare = Prepare; + r.invoke = Invoke; + r.free = Free; + return &r; +} + +void* PackerOp::Init(TfLiteContext* context, const char* buffer, + size_t length) { + freed_ = false; + // Do nothing. + return nullptr; +} + +void PackerOp::Free(TfLiteContext* context, void* buffer) { freed_ = true; } + +TfLiteStatus PackerOp::Prepare(TfLiteContext* context, TfLiteNode* node) { + return kTfLiteOk; +} + +TfLiteStatus PackerOp::Invoke(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input1 = + tflite::micro::GetEvalInput(context, node, 0); + TF_LITE_ENSURE(context, input1 != nullptr); + const int32_t* input1_data = input1->data.i32; + TF_LITE_ENSURE_EQ(context, input1->dims->size, 1); + const int32_t input1_len = input1->dims->data[0]; + + const TfLiteEvalTensor* input2 = + tflite::micro::GetEvalInput(context, node, 1); + TF_LITE_ENSURE(context, input2 != nullptr); + const int32_t* input2_data = input2->data.i32; + TF_LITE_ENSURE_EQ(context, input2->dims->size, 1); + const int32_t input2_len = input2->dims->data[0]; + + TfLiteEvalTensor* output = tflite::micro::GetEvalOutput(context, node, 0); + TF_LITE_ENSURE(context, output != nullptr); + int32_t* output_data = output->data.i32; + int32_t output_len = output->dims->data[0]; + + // Fill output with input: first with the first tensor, then with the second + // tensor up to the size of the output tensor. + int cnt = 0; + int i; + for (i = 0; i < input1_len && cnt < output_len; i++, cnt++) { + output_data[cnt] = input1_data[i]; + } + if (cnt >= output_len) { + return kTfLiteOk; + } + + for (i = 0; i < input2_len && cnt < output_len; i++, cnt++) { + output_data[cnt] = input2_data[i]; + } + if (cnt >= output_len) { + return kTfLiteOk; + } + + for (; cnt < output_len; cnt++) { + output_data[cnt] = 0; + } + return kTfLiteOk; +} + +bool PackerOp::freed_ = false; + +} // namespace testing +} // namespace tflite diff --git a/tensorflow/lite/micro/test_helper_custom_ops.h b/tensorflow/lite/micro/test_helper_custom_ops.h new file mode 100644 index 0000000..d28bb40 --- /dev/null +++ b/tensorflow/lite/micro/test_helper_custom_ops.h @@ -0,0 +1,49 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_TEST_HELPER_CUSTOM_OPS_H_ +#define TENSORFLOW_LITE_MICRO_TEST_HELPER_CUSTOM_OPS_H_ + +#include +#include + +#include "flatbuffers/flatbuffers.h" // from @flatbuffers +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/micro/micro_common.h" +#include "tensorflow/lite/micro/micro_utils.h" +#include "tensorflow/lite/portable_type_to_tflitetype.h" +#include "tensorflow/lite/schema/schema_generated.h" + +namespace tflite { +namespace testing { + +class PackerOp { + public: + static const TFLMRegistration* getRegistration(); + static TFLMRegistration* GetMutableRegistration(); + static void* Init(TfLiteContext* context, const char* buffer, size_t length); + static void Free(TfLiteContext* context, void* buffer); + static TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node); + static TfLiteStatus Invoke(TfLiteContext* context, TfLiteNode* node); + + private: + static bool freed_; +}; + +} // namespace testing +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_TEST_HELPER_CUSTOM_OPS_H_ diff --git a/tensorflow/lite/micro/test_helpers.cc b/tensorflow/lite/micro/test_helpers.cc new file mode 100644 index 0000000..15d2382 --- /dev/null +++ b/tensorflow/lite/micro/test_helpers.cc @@ -0,0 +1,2034 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/test_helpers.h" + +#include +#include +#include +#include +#include + +#include "flatbuffers/flatbuffers.h" // from @flatbuffers +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/memory_helpers.h" +#include "tensorflow/lite/micro/micro_arena_constants.h" +#include "tensorflow/lite/micro/micro_utils.h" +#include "tensorflow/lite/micro/test_helper_custom_ops.h" +#include "tensorflow/lite/schema/schema_generated.h" + +// TODO(b/170464050): Use TFLM test only version of schema_utils. + +namespace tflite { +namespace testing { +namespace { + +class StackAllocator : public flatbuffers::Allocator { + public: + StackAllocator(size_t alignment) : data_size_(0) { + data_ = AlignPointerUp(data_backing_, alignment); + } + + uint8_t* allocate(size_t size) override { + TFLITE_DCHECK((data_size_ + size) <= kStackAllocatorSize); + uint8_t* result = data_; + data_ += size; + data_size_ += size; + return result; + } + + void deallocate(uint8_t* p, size_t) override {} + + static StackAllocator& instance(size_t alignment = 1) { + // Avoid using true dynamic memory allocation to be portable to bare metal. + static char inst_memory[sizeof(StackAllocator)]; + static StackAllocator* inst = new (inst_memory) StackAllocator(alignment); + return *inst; + } + + static constexpr size_t kStackAllocatorSize = 8192; + + private: + uint8_t data_backing_[kStackAllocatorSize]; + uint8_t* data_; + int data_size_; + + TF_LITE_REMOVE_VIRTUAL_DELETE +}; + +flatbuffers::FlatBufferBuilder* BuilderInstance() { + static char inst_memory[sizeof(flatbuffers::FlatBufferBuilder)]; + static flatbuffers::FlatBufferBuilder* inst = + new (inst_memory) flatbuffers::FlatBufferBuilder( + StackAllocator::kStackAllocatorSize, + &StackAllocator::instance(MicroArenaBufferAlignment())); + return inst; +} + +// A wrapper around FlatBuffer API to help build model easily. +class ModelBuilder { + public: + typedef int32_t Tensor; + typedef int Operator; + typedef int Node; + + // `builder` needs to be available until BuildModel is called. + explicit ModelBuilder(flatbuffers::FlatBufferBuilder* builder) + : builder_(builder) {} + + // Registers an operator that will be used in the model. + Operator RegisterOp(BuiltinOperator op, const char* custom_code); + + // Adds a tensor to the model. + Tensor AddTensor(TensorType type, std::initializer_list shape) { + return AddTensorImpl(type, /* is_variable */ false, shape); + } + + // Adds a variable tensor to the model. + Tensor AddVariableTensor(TensorType type, + std::initializer_list shape) { + return AddTensorImpl(type, /* is_variable */ true, shape); + } + + // Adds a node to the model with given input and output Tensors. + Node AddNode(Operator op, std::initializer_list inputs, + std::initializer_list outputs, + std::initializer_list intermediates = + std::initializer_list{}); + + void AddMetadata(const char* description_string, + const int32_t* metadata_buffer_data, size_t num_elements); + + // Constructs the flatbuffer model using `builder_` and return a pointer to + // it. The returned model has the same lifetime as `builder_`. + // Note the default value of 0 for num_subgraph_inputs means all tensor inputs + // are in subgraph input list. + const Model* BuildModel(std::initializer_list inputs, + std::initializer_list outputs, + size_t num_subgraph_inputs = 0); + + private: + // Adds a tensor to the model. + Tensor AddTensorImpl(TensorType type, bool is_variable, + std::initializer_list shape); + + flatbuffers::FlatBufferBuilder* builder_; + + static constexpr int kMaxOperatorCodes = 10; + flatbuffers::Offset operator_codes_[kMaxOperatorCodes]; + int next_operator_code_id_ = 0; + + static constexpr int kMaxOperators = 50; + flatbuffers::Offset operators_[kMaxOperators]; + int next_operator_id_ = 0; + + static constexpr int kMaxTensors = 50; + flatbuffers::Offset tensors_[kMaxTensors]; + + static constexpr int kMaxMetadataBuffers = 10; + + static constexpr int kMaxMetadatas = 10; + flatbuffers::Offset metadata_[kMaxMetadatas]; + + flatbuffers::Offset metadata_buffers_[kMaxMetadataBuffers]; + + int nbr_of_metadata_buffers_ = 0; + + int next_tensor_id_ = 0; +}; + +ModelBuilder::Operator ModelBuilder::RegisterOp(BuiltinOperator op, + const char* custom_code) { + TFLITE_DCHECK(next_operator_code_id_ <= kMaxOperatorCodes); + operator_codes_[next_operator_code_id_] = tflite::CreateOperatorCodeDirect( + *builder_, /*deprecated_builtin_code=*/0, custom_code, /*version=*/0, op); + next_operator_code_id_++; + return next_operator_code_id_ - 1; +} + +ModelBuilder::Node ModelBuilder::AddNode( + ModelBuilder::Operator op, + std::initializer_list inputs, + std::initializer_list outputs, + std::initializer_list intermediates) { + TFLITE_DCHECK(next_operator_id_ <= kMaxOperators); + operators_[next_operator_id_] = tflite::CreateOperator( + *builder_, op, builder_->CreateVector(inputs.begin(), inputs.size()), + builder_->CreateVector(outputs.begin(), outputs.size()), + BuiltinOptions_NONE, + /*builtin_options=*/0, + /*custom_options=*/0, tflite::CustomOptionsFormat_FLEXBUFFERS, + /*mutating_variable_inputs =*/0, + builder_->CreateVector(intermediates.begin(), intermediates.size())); + next_operator_id_++; + return next_operator_id_ - 1; +} + +void ModelBuilder::AddMetadata(const char* description_string, + const int32_t* metadata_buffer_data, + size_t num_elements) { + metadata_[ModelBuilder::nbr_of_metadata_buffers_] = + CreateMetadata(*builder_, builder_->CreateString(description_string), + 1 + ModelBuilder::nbr_of_metadata_buffers_); + + metadata_buffers_[nbr_of_metadata_buffers_] = tflite::CreateBuffer( + *builder_, builder_->CreateVector((uint8_t*)metadata_buffer_data, + sizeof(uint32_t) * num_elements)); + + ModelBuilder::nbr_of_metadata_buffers_++; +} + +const Model* ModelBuilder::BuildModel( + std::initializer_list inputs, + std::initializer_list outputs, + size_t num_subgraph_inputs) { + // Model schema requires an empty buffer at idx 0. + size_t buffer_size = 1 + ModelBuilder::nbr_of_metadata_buffers_; + flatbuffers::Offset buffers[kMaxMetadataBuffers]; + buffers[0] = tflite::CreateBuffer(*builder_); + + // Place the metadata buffers first in the buffer since the indices for them + // have already been set in AddMetadata() + for (int i = 1; i < ModelBuilder::nbr_of_metadata_buffers_ + 1; ++i) { + buffers[i] = metadata_buffers_[i - 1]; + } + + // Default to single subgraph model. + constexpr size_t subgraphs_size = 1; + + // Find out number of subgraph inputs. + if (num_subgraph_inputs == 0) { + // This is the default case. + num_subgraph_inputs = inputs.size(); + } else { + // A non-zero value of num_subgraph_inputs means that some of + // the operator input tensors are not subgraph inputs. + TFLITE_DCHECK(num_subgraph_inputs <= inputs.size()); + } + + const flatbuffers::Offset subgraphs[subgraphs_size] = { + tflite::CreateSubGraph( + *builder_, builder_->CreateVector(tensors_, next_tensor_id_), + builder_->CreateVector(inputs.begin(), num_subgraph_inputs), + builder_->CreateVector(outputs.begin(), outputs.size()), + builder_->CreateVector(operators_, next_operator_id_), + builder_->CreateString("test_subgraph"))}; + + flatbuffers::Offset model_offset; + if (ModelBuilder::nbr_of_metadata_buffers_ > 0) { + model_offset = tflite::CreateModel( + *builder_, 0, + builder_->CreateVector(operator_codes_, next_operator_code_id_), + builder_->CreateVector(subgraphs, subgraphs_size), + builder_->CreateString("teset_model"), + builder_->CreateVector(buffers, buffer_size), 0, + builder_->CreateVector(metadata_, + ModelBuilder::nbr_of_metadata_buffers_)); + } else { + model_offset = tflite::CreateModel( + *builder_, 0, + builder_->CreateVector(operator_codes_, next_operator_code_id_), + builder_->CreateVector(subgraphs, subgraphs_size), + builder_->CreateString("teset_model"), + builder_->CreateVector(buffers, buffer_size)); + } + + tflite::FinishModelBuffer(*builder_, model_offset); + void* model_pointer = builder_->GetBufferPointer(); + const Model* model = flatbuffers::GetRoot(model_pointer); + return model; +} + +ModelBuilder::Tensor ModelBuilder::AddTensorImpl( + TensorType type, bool is_variable, std::initializer_list shape) { + TFLITE_DCHECK(next_tensor_id_ <= kMaxTensors); + tensors_[next_tensor_id_] = tflite::CreateTensor( + *builder_, builder_->CreateVector(shape.begin(), shape.size()), type, + /* buffer */ 0, /* name */ 0, /* quantization */ 0, + /* is_variable */ is_variable, + /* sparsity */ 0); + next_tensor_id_++; + return next_tensor_id_ - 1; +} + +const Model* BuildSimpleStatefulModel() { + using flatbuffers::Offset; + flatbuffers::FlatBufferBuilder* fb_builder = BuilderInstance(); + + ModelBuilder model_builder(fb_builder); + + const int op_id = + model_builder.RegisterOp(BuiltinOperator_CUSTOM, "simple_stateful_op"); + const int input_tensor = model_builder.AddTensor(TensorType_INT8, {3}); + const int median_tensor = model_builder.AddTensor(TensorType_INT8, {3}); + const int invoke_count_tensor = + model_builder.AddTensor(TensorType_INT32, {1}); + const int intermediate_tensor = + model_builder.AddTensor(TensorType_FLOAT32, {0}); + + model_builder.AddNode(op_id, {input_tensor}, + {median_tensor, invoke_count_tensor}, + {intermediate_tensor}); + return model_builder.BuildModel({input_tensor}, + {median_tensor, invoke_count_tensor}); +} + +const Model* BuildSimpleModelWithBranch() { + using flatbuffers::Offset; + flatbuffers::FlatBufferBuilder* fb_builder = BuilderInstance(); + + ModelBuilder model_builder(fb_builder); + /* Model structure + | t0 + +------| + | v + | +---------+ + | | n0 | + | | | + | +---------+ + v + + | + +---------+ | t1 + | n1 | | + | | | + +---------+ | + | | + t2 | v + | +---------+ + +-->| n2 | + | | + +-------|-+ + |t3 + v + */ + const int op_id = + model_builder.RegisterOp(BuiltinOperator_CUSTOM, "mock_custom"); + const int t0 = model_builder.AddTensor(TensorType_FLOAT32, {2, 2, 3}); + const int t1 = model_builder.AddTensor(TensorType_FLOAT32, {2, 2, 3}); + const int t2 = model_builder.AddTensor(TensorType_FLOAT32, {2, 2, 3}); + const int t3 = model_builder.AddTensor(TensorType_FLOAT32, {2, 2, 3}); + model_builder.AddNode(op_id, {t0}, {t1}); // n0 + model_builder.AddNode(op_id, {t0}, {t2}); // n1 + model_builder.AddNode(op_id, {t1, t2}, {t3}); // n2 + return model_builder.BuildModel({t0}, {t3}); +} + +const Model* BuildModelWithOfflinePlanning(int number_of_tensors, + const int32_t* metadata_buffer, + NodeConnection* node_conn, + int num_conns, + int num_subgraph_inputs) { + using flatbuffers::Offset; + flatbuffers::FlatBufferBuilder* fb_builder = BuilderInstance(); + + ModelBuilder model_builder(fb_builder); + + const int op_id = + model_builder.RegisterOp(BuiltinOperator_CUSTOM, "mock_custom"); + + for (int i = 0; i < number_of_tensors; ++i) { + model_builder.AddTensor(TensorType_FLOAT32, {2, 2, 3}); + } + + for (int i = 0; i < num_conns; ++i) { + model_builder.AddNode(op_id, node_conn[i].input, node_conn[i].output); + } + + model_builder.AddMetadata( + "OfflineMemoryAllocation", metadata_buffer, + number_of_tensors + tflite::testing::kOfflinePlannerHeaderSize); + + return model_builder.BuildModel( + node_conn[0].input, node_conn[num_conns - 1].output, num_subgraph_inputs); +} + +const Model* BuildModelWithUnusedInputs() { + using flatbuffers::Offset; + flatbuffers::FlatBufferBuilder* builder = BuilderInstance(); + + constexpr size_t buffers_size = 1; + const Offset buffers[buffers_size] = {CreateBuffer(*builder)}; + constexpr size_t tensor_shape_size = 2; + const int32_t tensor_shape[tensor_shape_size] = {1, 64}; + constexpr size_t tensors_size = 4; + const Offset tensors[tensors_size] = { + CreateTensor(*builder, + builder->CreateVector(tensor_shape, tensor_shape_size), + TensorType_INT8, 0, + builder->CreateString("test_input_tensor"), 0, false), + CreateTensor(*builder, + builder->CreateVector(tensor_shape, tensor_shape_size), + TensorType_INT8, 0, + builder->CreateString("test_unused_input_tensor"), 0, false), + CreateTensor(*builder, + builder->CreateVector(tensor_shape, tensor_shape_size), + TensorType_INT8, 0, + builder->CreateString("test_output_tensor"), 0, false), + CreateTensor(*builder, + builder->CreateVector(tensor_shape, tensor_shape_size), + TensorType_INT8, 0, + builder->CreateString("test_unused_tensor"), 0, false), + }; + constexpr size_t inputs_size = 2; + const int32_t inputs[inputs_size] = {0, 1}; + constexpr size_t outputs_size = 1; + const int32_t outputs[outputs_size] = {2}; + constexpr size_t operator_inputs_size = 1; + const int32_t operator_inputs[operator_inputs_size] = {0}; + constexpr size_t operator_outputs_size = 1; + const int32_t operator_outputs[operator_outputs_size] = {2}; + constexpr size_t operators_size = 1; + const Offset operators[operators_size] = { + CreateOperator( + *builder, 0, + builder->CreateVector(operator_inputs, operator_inputs_size), + builder->CreateVector(operator_outputs, operator_outputs_size), + BuiltinOptions_NONE), + }; + constexpr size_t subgraphs_size = 1; + const Offset subgraphs[subgraphs_size] = { + CreateSubGraph(*builder, builder->CreateVector(tensors, tensors_size), + builder->CreateVector(inputs, inputs_size), + builder->CreateVector(outputs, outputs_size), + builder->CreateVector(operators, operators_size), + builder->CreateString("test_subgraph"))}; + constexpr size_t operator_codes_size = 1; + const Offset operator_codes[operator_codes_size] = { + CreateOperatorCodeDirect(*builder, /*deprecated_builtin_code=*/0, + "mock_custom", + /*version=*/0, BuiltinOperator_CUSTOM)}; + const Offset model_offset = CreateModel( + *builder, 0, builder->CreateVector(operator_codes, operator_codes_size), + builder->CreateVector(subgraphs, subgraphs_size), + builder->CreateString("test_model"), + builder->CreateVector(buffers, buffers_size)); + FinishModelBuffer(*builder, model_offset); + void* model_pointer = builder->GetBufferPointer(); + const Model* model = flatbuffers::GetRoot(model_pointer); + return model; +} + +const Model* BuildModelWithUnusedOperatorOutputs() { + using flatbuffers::Offset; + flatbuffers::FlatBufferBuilder* builder = BuilderInstance(); + + constexpr size_t buffers_size = 1; + const Offset buffers[buffers_size] = {CreateBuffer(*builder)}; + constexpr size_t tensor_shape_size = 2; + const int32_t tensor_shape[tensor_shape_size] = {1, 64}; + constexpr size_t tensors_size = 2; + const Offset tensors[tensors_size] = { + CreateTensor(*builder, + builder->CreateVector(tensor_shape, tensor_shape_size), + TensorType_INT8, 0, + builder->CreateString("test_input_tensor"), 0, false), + CreateTensor( + *builder, builder->CreateVector(tensor_shape, tensor_shape_size), + TensorType_INT8, 0, + builder->CreateString("test_unused_output_tensor"), 0, false)}; + constexpr size_t inputs_size = 0; + const int32_t inputs[inputs_size] = {}; + constexpr size_t outputs_size = 1; + const int32_t outputs[outputs_size] = {0}; + constexpr size_t operator_inputs_size = 0; + const int32_t operator_inputs[operator_inputs_size] = {}; + constexpr size_t operator_outputs_size = 2; + const int32_t operator_outputs[operator_outputs_size] = {0, 1}; + constexpr size_t operators_size = 1; + const Offset operators[operators_size] = { + CreateOperator( + *builder, 0, + builder->CreateVector(operator_inputs, operator_inputs_size), + builder->CreateVector(operator_outputs, operator_outputs_size), + BuiltinOptions_NONE), + }; + constexpr size_t subgraphs_size = 1; + const Offset subgraphs[subgraphs_size] = { + CreateSubGraph(*builder, builder->CreateVector(tensors, tensors_size), + builder->CreateVector(inputs, inputs_size), + builder->CreateVector(outputs, outputs_size), + builder->CreateVector(operators, operators_size), + builder->CreateString("test_subgraph"))}; + constexpr size_t operator_codes_size = 1; + const Offset operator_codes[operator_codes_size] = { + CreateOperatorCodeDirect(*builder, /*deprecated_builtin_code=*/0, + "mock_custom", + /*version=*/0, BuiltinOperator_CUSTOM)}; + const Offset model_offset = CreateModel( + *builder, 0, builder->CreateVector(operator_codes, operator_codes_size), + builder->CreateVector(subgraphs, subgraphs_size), + builder->CreateString("test_model"), + builder->CreateVector(buffers, buffers_size)); + FinishModelBuffer(*builder, model_offset); + void* model_pointer = builder->GetBufferPointer(); + const Model* model = flatbuffers::GetRoot(model_pointer); + return model; +} + +const Model* BuildModelWith256x256Tensor() { + using flatbuffers::Offset; + flatbuffers::FlatBufferBuilder* fb_builder = BuilderInstance(); + + ModelBuilder model_builder(fb_builder); + + const int op_id = + model_builder.RegisterOp(BuiltinOperator_CUSTOM, "mock_custom"); + const int input1_tensor = + model_builder.AddTensor(TensorType_INT8, {256, 256}); + const int input2_tensor = + model_builder.AddTensor(TensorType_INT8, {256, 256}); + const int output_tensor = + model_builder.AddTensor(TensorType_INT8, {256, 256}); + + model_builder.AddNode(op_id, {input1_tensor, input2_tensor}, {output_tensor}); + return model_builder.BuildModel({input1_tensor, input2_tensor}, + {output_tensor}); +} + +const Model* BuildSimpleMockModel() { + using flatbuffers::Offset; + flatbuffers::FlatBufferBuilder* builder = BuilderInstance(); + + constexpr size_t buffer_data_size = 1; + const uint8_t buffer_data[buffer_data_size] = {21}; + constexpr size_t buffers_size = 2; + const Offset buffers[buffers_size] = { + CreateBuffer(*builder), + CreateBuffer(*builder, + builder->CreateVector(buffer_data, buffer_data_size))}; + constexpr size_t tensor_shape_size = 1; + const int32_t tensor_shape[tensor_shape_size] = {1}; + constexpr size_t tensors_size = 4; + const Offset tensors[tensors_size] = { + CreateTensor(*builder, + builder->CreateVector(tensor_shape, tensor_shape_size), + TensorType_INT32, 0, + builder->CreateString("test_input_tensor"), 0, false), + CreateTensor(*builder, + builder->CreateVector(tensor_shape, tensor_shape_size), + TensorType_INT8, 1, + builder->CreateString("test_weight_tensor"), 0, false), + CreateTensor(*builder, + builder->CreateVector(tensor_shape, tensor_shape_size), + TensorType_INT32, 0, + builder->CreateString("test_output_tensor"), 0, false), + CreateTensor(*builder, + builder->CreateVector(tensor_shape, tensor_shape_size), + TensorType_INT32, 0, + builder->CreateString("test_output2_tensor"), 0, false), + }; + constexpr size_t inputs_size = 1; + const int32_t inputs[inputs_size] = {0}; + constexpr size_t outputs_size = 2; + const int32_t outputs[outputs_size] = {2, 3}; + constexpr size_t operator_inputs_size = 2; + const int32_t operator_inputs[operator_inputs_size] = {0, 1}; + constexpr size_t operator_outputs_size = 1; + const int32_t operator_outputs[operator_outputs_size] = {2}; + const int32_t operator2_outputs[operator_outputs_size] = {3}; + constexpr size_t operators_size = 2; + const Offset operators[operators_size] = { + CreateOperator( + *builder, 0, + builder->CreateVector(operator_inputs, operator_inputs_size), + builder->CreateVector(operator_outputs, operator_outputs_size), + BuiltinOptions_NONE), + CreateOperator( + *builder, 0, + builder->CreateVector(operator_inputs, operator_inputs_size), + builder->CreateVector(operator2_outputs, operator_outputs_size), + BuiltinOptions_NONE), + }; + constexpr size_t subgraphs_size = 1; + const Offset subgraphs[subgraphs_size] = { + CreateSubGraph(*builder, builder->CreateVector(tensors, tensors_size), + builder->CreateVector(inputs, inputs_size), + builder->CreateVector(outputs, outputs_size), + builder->CreateVector(operators, operators_size), + builder->CreateString("test_subgraph"))}; + constexpr size_t operator_codes_size = 1; + const Offset operator_codes[operator_codes_size] = { + CreateOperatorCodeDirect(*builder, /*deprecated_builtin_code=*/0, + "mock_custom", + /*version=*/0, BuiltinOperator_CUSTOM)}; + const Offset model_offset = CreateModel( + *builder, 0, builder->CreateVector(operator_codes, operator_codes_size), + builder->CreateVector(subgraphs, subgraphs_size), + builder->CreateString("test_model"), + builder->CreateVector(buffers, buffers_size)); + FinishModelBuffer(*builder, model_offset); + void* model_pointer = builder->GetBufferPointer(); + const Model* model = flatbuffers::GetRoot(model_pointer); + return model; +} + +const Model* BuildComplexMockModel() { + using flatbuffers::Offset; + flatbuffers::FlatBufferBuilder* builder = BuilderInstance(); + + constexpr size_t buffer_data_size = 1; + const uint8_t buffer_data_1[buffer_data_size] = {21}; + const uint8_t buffer_data_2[buffer_data_size] = {21}; + const uint8_t buffer_data_3[buffer_data_size] = {21}; + constexpr size_t buffers_size = 7; + const Offset buffers[buffers_size] = { + // Op 1 buffers: + CreateBuffer(*builder), + CreateBuffer(*builder), + CreateBuffer(*builder, + builder->CreateVector(buffer_data_1, buffer_data_size)), + // Op 2 buffers: + CreateBuffer(*builder), + CreateBuffer(*builder, + builder->CreateVector(buffer_data_2, buffer_data_size)), + // Op 3 buffers: + CreateBuffer(*builder), + CreateBuffer(*builder, + builder->CreateVector(buffer_data_3, buffer_data_size)), + }; + constexpr size_t tensor_shape_size = 1; + const int32_t tensor_shape[tensor_shape_size] = {1}; + + constexpr size_t tensors_size = 10; + const Offset tensors[tensors_size] = { + // Op 1 inputs: + CreateTensor( + *builder, builder->CreateVector(tensor_shape, tensor_shape_size), + TensorType_INT32, 0, builder->CreateString("test_input_tensor_1"), 0, + false /* is_variable */), + CreateTensor( + *builder, builder->CreateVector(tensor_shape, tensor_shape_size), + TensorType_INT32, 1, builder->CreateString("test_variable_tensor_1"), + 0, true /* is_variable */), + CreateTensor( + *builder, builder->CreateVector(tensor_shape, tensor_shape_size), + TensorType_INT8, 2, builder->CreateString("test_weight_tensor_1"), 0, + false /* is_variable */), + // Op 1 output / Op 2 input: + CreateTensor( + *builder, builder->CreateVector(tensor_shape, tensor_shape_size), + TensorType_INT32, 0, builder->CreateString("test_output_tensor_1"), 0, + false /* is_variable */), + // Op 2 inputs: + CreateTensor( + *builder, builder->CreateVector(tensor_shape, tensor_shape_size), + TensorType_INT32, 1, builder->CreateString("test_variable_tensor_2"), + 0, true /* is_variable */), + CreateTensor( + *builder, builder->CreateVector(tensor_shape, tensor_shape_size), + TensorType_INT8, 2, builder->CreateString("test_weight_tensor_2"), 0, + false /* is_variable */), + // Op 2 output / Op 3 input: + CreateTensor( + *builder, builder->CreateVector(tensor_shape, tensor_shape_size), + TensorType_INT32, 0, builder->CreateString("test_output_tensor_2"), 0, + false /* is_variable */), + // Op 3 inputs: + CreateTensor( + *builder, builder->CreateVector(tensor_shape, tensor_shape_size), + TensorType_INT32, 1, builder->CreateString("test_variable_tensor_3"), + 0, true /* is_variable */), + CreateTensor( + *builder, builder->CreateVector(tensor_shape, tensor_shape_size), + TensorType_INT8, 2, builder->CreateString("test_weight_tensor_3"), 0, + false /* is_variable */), + // Op 3 output: + CreateTensor( + *builder, builder->CreateVector(tensor_shape, tensor_shape_size), + TensorType_INT32, 0, builder->CreateString("test_output_tensor_3"), 0, + false /* is_variable */), + }; + + constexpr size_t operators_size = 3; + Offset operators[operators_size]; + { + // Set Op 1 attributes: + constexpr size_t operator_inputs_size = 3; + const int32_t operator_inputs[operator_inputs_size] = {0, 1, 2}; + constexpr size_t operator_outputs_size = 1; + const int32_t operator_outputs[operator_outputs_size] = {3}; + + operators[0] = {CreateOperator( + *builder, 0, + builder->CreateVector(operator_inputs, operator_inputs_size), + builder->CreateVector(operator_outputs, operator_outputs_size), + BuiltinOptions_NONE)}; + } + + { + // Set Op 2 attributes + constexpr size_t operator_inputs_size = 3; + const int32_t operator_inputs[operator_inputs_size] = {3, 4, 5}; + constexpr size_t operator_outputs_size = 1; + const int32_t operator_outputs[operator_outputs_size] = {6}; + + operators[1] = {CreateOperator( + *builder, 0, + builder->CreateVector(operator_inputs, operator_inputs_size), + builder->CreateVector(operator_outputs, operator_outputs_size), + BuiltinOptions_NONE)}; + } + + { + // Set Op 3 attributes + constexpr size_t operator_inputs_size = 3; + const int32_t operator_inputs[operator_inputs_size] = {6, 7, 8}; + constexpr size_t operator_outputs_size = 1; + const int32_t operator_outputs[operator_outputs_size] = {9}; + + operators[2] = {CreateOperator( + *builder, 0, + builder->CreateVector(operator_inputs, operator_inputs_size), + builder->CreateVector(operator_outputs, operator_outputs_size), + BuiltinOptions_NONE)}; + } + + constexpr size_t inputs_size = 1; + const int32_t inputs[inputs_size] = {0}; + constexpr size_t outputs_size = 1; + const int32_t outputs[outputs_size] = {9}; + + constexpr size_t subgraphs_size = 1; + const Offset subgraphs[subgraphs_size] = { + CreateSubGraph(*builder, builder->CreateVector(tensors, tensors_size), + builder->CreateVector(inputs, inputs_size), + builder->CreateVector(outputs, outputs_size), + builder->CreateVector(operators, operators_size), + builder->CreateString("test_subgraph"))}; + + constexpr size_t operator_codes_size = 1; + const Offset operator_codes[operator_codes_size] = { + CreateOperatorCodeDirect(*builder, /*deprecated_builtin_code=*/0, + "mock_custom", + /*version=*/0, BuiltinOperator_CUSTOM)}; + + const Offset model_offset = CreateModel( + *builder, 0, builder->CreateVector(operator_codes, operator_codes_size), + builder->CreateVector(subgraphs, subgraphs_size), + builder->CreateString("test_model"), + builder->CreateVector(buffers, buffers_size)); + + FinishModelBuffer(*builder, model_offset); + void* model_pointer = builder->GetBufferPointer(); + const Model* model = flatbuffers::GetRoot(model_pointer); + return model; +} + +const Model* BuildSimpleMultipleInputsModel() { + using flatbuffers::Offset; + flatbuffers::FlatBufferBuilder* builder = BuilderInstance(); + + constexpr size_t buffers_size = 1; + const Offset buffers[buffers_size] = { + CreateBuffer(*builder), + }; + constexpr size_t tensor_shape_size = 1; + const int32_t tensor_shape[tensor_shape_size] = {1}; + constexpr size_t tensors_size = 4; + const Offset tensors[tensors_size] = { + CreateTensor(*builder, + builder->CreateVector(tensor_shape, tensor_shape_size), + TensorType_INT32, 0, + builder->CreateString("test_input_tensor1"), 0, false), + CreateTensor(*builder, + builder->CreateVector(tensor_shape, tensor_shape_size), + TensorType_INT8, 0, + builder->CreateString("test_input_tensor2"), 0, false), + CreateTensor(*builder, + builder->CreateVector(tensor_shape, tensor_shape_size), + TensorType_INT32, 0, + builder->CreateString("test_input_tensor3"), 0, false), + CreateTensor(*builder, + builder->CreateVector(tensor_shape, tensor_shape_size), + TensorType_INT32, 0, + builder->CreateString("test_output_tensor"), 0, false), + }; + constexpr size_t inputs_size = 3; + const int32_t inputs[inputs_size] = {0, 1, 2}; + constexpr size_t outputs_size = 1; + const int32_t outputs[outputs_size] = {3}; + constexpr size_t operator_inputs_size = 3; + const int32_t operator_inputs[operator_inputs_size] = {0, 1, 2}; + constexpr size_t operator_outputs_size = 1; + const int32_t operator_outputs[operator_outputs_size] = {3}; + constexpr size_t operators_size = 1; + const Offset operators[operators_size] = { + CreateOperator( + *builder, 0, + builder->CreateVector(operator_inputs, operator_inputs_size), + builder->CreateVector(operator_outputs, operator_outputs_size), + BuiltinOptions_NONE), + }; + constexpr size_t subgraphs_size = 1; + const Offset subgraphs[subgraphs_size] = { + CreateSubGraph(*builder, builder->CreateVector(tensors, tensors_size), + builder->CreateVector(inputs, inputs_size), + builder->CreateVector(outputs, outputs_size), + builder->CreateVector(operators, operators_size), + builder->CreateString("test_subgraph"))}; + constexpr size_t operator_codes_size = 1; + const Offset operator_codes[operator_codes_size] = { + CreateOperatorCodeDirect(*builder, /*deprecated_builtin_code=*/0, + "multiple_inputs_op", + /*version=*/0, BuiltinOperator_CUSTOM)}; + const Offset model_offset = CreateModel( + *builder, 0, builder->CreateVector(operator_codes, operator_codes_size), + builder->CreateVector(subgraphs, subgraphs_size), + builder->CreateString("test_model"), + builder->CreateVector(buffers, buffers_size)); + FinishModelBuffer(*builder, model_offset); + void* model_pointer = builder->GetBufferPointer(); + const Model* model = flatbuffers::GetRoot(model_pointer); + return model; +} + +const Model* BuildSimpleModelWithSubgraphsAndIf() { + using flatbuffers::Offset; + flatbuffers::FlatBufferBuilder* builder = BuilderInstance(); + + constexpr size_t buffers_size = 1; + const Offset buffers[buffers_size] = { + CreateBuffer(*builder), + }; + const int32_t condition_tensor_shape[] = {1}; + const int32_t data_tensor_shape[] = {1, 2}; + constexpr size_t tensors_size = 4; + const Offset subgraph1_tensors[tensors_size] = { + CreateTensor(*builder, builder->CreateVector(condition_tensor_shape, 1), + TensorType_BOOL, 0, + builder->CreateString("condition tensor"), 0, false), + CreateTensor(*builder, builder->CreateVector(data_tensor_shape, 2), + TensorType_FLOAT32, 0, + builder->CreateString("input_tensor1"), 0, false), + CreateTensor(*builder, builder->CreateVector(data_tensor_shape, 2), + TensorType_FLOAT32, 0, + builder->CreateString("input_tensor2"), 0, false), + CreateTensor(*builder, builder->CreateVector(data_tensor_shape, 2), + TensorType_FLOAT32, 0, + builder->CreateString("output_tensor"), 0, false), + }; + const Offset subgraph2_tensors[tensors_size] = { + CreateTensor(*builder, builder->CreateVector(data_tensor_shape, 2), + TensorType_FLOAT32, 0, + builder->CreateString("input_tensor1"), 0, false), + CreateTensor(*builder, builder->CreateVector(data_tensor_shape, 2), + TensorType_FLOAT32, 0, + builder->CreateString("input_tensor2"), 0, false), + CreateTensor(*builder, builder->CreateVector(data_tensor_shape, 2), + TensorType_FLOAT32, 0, + builder->CreateString("output_tensor"), 0, false), + }; + const Offset subgraph3_tensors[tensors_size] = { + CreateTensor(*builder, builder->CreateVector(data_tensor_shape, 2), + TensorType_FLOAT32, 0, + builder->CreateString("input_tensor1"), 0, false), + CreateTensor(*builder, builder->CreateVector(data_tensor_shape, 2), + TensorType_FLOAT32, 0, + builder->CreateString("input_tensor2"), 0, false), + CreateTensor(*builder, builder->CreateVector(data_tensor_shape, 2), + TensorType_FLOAT32, 0, + builder->CreateString("output_tensor"), 0, false), + }; + + constexpr size_t if_inputs_size = 3; + const int32_t if_inputs[if_inputs_size] = {0, 1, 2}; + constexpr size_t outputs_size = 1; + const int32_t if_outputs[outputs_size] = {3}; + constexpr size_t operator_inputs_size = 2; + const int32_t operator_inputs[operator_inputs_size] = {0, 1}; + const int32_t operator_outputs[outputs_size] = {2}; + constexpr size_t operators_size = 1; + const Offset subgraph1_operators[operators_size] = { + CreateOperator( + *builder, 0, builder->CreateVector(if_inputs, if_inputs_size), + builder->CreateVector(if_outputs, outputs_size), + BuiltinOptions_IfOptions, CreateIfOptions(*builder, 1, 2).Union()), + }; + const Offset subgraph2_operators[operators_size] = { + CreateOperator( + *builder, 1, + builder->CreateVector(operator_inputs, operator_inputs_size), + builder->CreateVector(operator_outputs, outputs_size), + BuiltinOptions_NONE), + }; + const Offset subgraph3_operators[operators_size] = { + CreateOperator( + *builder, 2, + builder->CreateVector(operator_inputs, operator_inputs_size), + builder->CreateVector(operator_outputs, outputs_size), + BuiltinOptions_NONE), + }; + constexpr size_t subgraphs_size = 3; + const Offset subgraphs[subgraphs_size] = { + CreateSubGraph(*builder, builder->CreateVector(subgraph1_tensors, 4), + builder->CreateVector(if_inputs, if_inputs_size), + builder->CreateVector(if_outputs, outputs_size), + builder->CreateVector(subgraph1_operators, operators_size), + builder->CreateString("if_subgraph")), + CreateSubGraph( + *builder, builder->CreateVector(subgraph2_tensors, 3), + builder->CreateVector(operator_inputs, operator_inputs_size), + builder->CreateVector(operator_outputs, outputs_size), + builder->CreateVector(subgraph2_operators, operators_size), + builder->CreateString("then_subgraph")), + CreateSubGraph( + *builder, builder->CreateVector(subgraph3_tensors, 3), + builder->CreateVector(operator_inputs, operator_inputs_size), + builder->CreateVector(operator_outputs, outputs_size), + builder->CreateVector(subgraph3_operators, operators_size), + builder->CreateString("else_subgraph")), + }; + constexpr size_t operator_codes_size = 3; + const Offset operator_codes[operator_codes_size] = { + CreateOperatorCodeDirect(*builder, /*deprecated_builtin_code=*/0, + "multiple_inputs_op", + /*version=*/0, BuiltinOperator_IF), + CreateOperatorCodeDirect(*builder, /*deprecated_builtin_code=*/0, + "multiple_inputs_op", + /*version=*/0, BuiltinOperator_ADD), + CreateOperatorCodeDirect(*builder, /*deprecated_builtin_code=*/0, + "multiple_inputs_op", + /*version=*/0, BuiltinOperator_MUL), + }; + const Offset model_offset = CreateModel( + *builder, 0, builder->CreateVector(operator_codes, operator_codes_size), + builder->CreateVector(subgraphs, subgraphs_size), + builder->CreateString("test_model"), + builder->CreateVector(buffers, buffers_size)); + FinishModelBuffer(*builder, model_offset); + void* model_pointer = builder->GetBufferPointer(); + const Model* model = flatbuffers::GetRoot(model_pointer); + return model; +} + +const Model* BuildSimpleModelWithIfAndEmptySubgraph() { + using flatbuffers::Offset; + flatbuffers::FlatBufferBuilder* builder = BuilderInstance(); + + constexpr size_t buffers_size = 1; + const Offset buffers[buffers_size] = { + CreateBuffer(*builder), + }; + const int32_t condition_tensor_shape[] = {1}; + const int32_t data_tensor_shape[] = {1, 2}; + constexpr size_t tensors_size = 4; + const Offset subgraph1_tensors[tensors_size] = { + CreateTensor(*builder, builder->CreateVector(condition_tensor_shape, 1), + TensorType_BOOL, 0, + builder->CreateString("condition tensor"), 0, false), + CreateTensor(*builder, builder->CreateVector(data_tensor_shape, 2), + TensorType_FLOAT32, 0, + builder->CreateString("input_tensor1"), 0, false), + CreateTensor(*builder, builder->CreateVector(data_tensor_shape, 2), + TensorType_FLOAT32, 0, + builder->CreateString("input_tensor2"), 0, false), + CreateTensor(*builder, builder->CreateVector(data_tensor_shape, 2), + TensorType_FLOAT32, 0, + builder->CreateString("output_tensor"), 0, false), + }; + const Offset subgraph2_tensors[tensors_size] = { + CreateTensor(*builder, builder->CreateVector(data_tensor_shape, 2), + TensorType_FLOAT32, 0, + builder->CreateString("input_tensor1"), 0, false), + CreateTensor(*builder, builder->CreateVector(data_tensor_shape, 2), + TensorType_FLOAT32, 0, + builder->CreateString("input_tensor2"), 0, false), + CreateTensor(*builder, builder->CreateVector(data_tensor_shape, 2), + TensorType_FLOAT32, 0, + builder->CreateString("output_tensor"), 0, false), + }; + const Offset subgraph3_tensors[tensors_size] = { + CreateTensor(*builder, builder->CreateVector(data_tensor_shape, 2), + TensorType_FLOAT32, 0, + builder->CreateString("input_tensor1"), 0, false), + CreateTensor(*builder, builder->CreateVector(data_tensor_shape, 2), + TensorType_FLOAT32, 0, + builder->CreateString("input_tensor2"), 0, false), + CreateTensor(*builder, builder->CreateVector(data_tensor_shape, 2), + TensorType_FLOAT32, 0, + builder->CreateString("output_tensor"), 0, false), + }; + + constexpr size_t if_inputs_size = 3; + const int32_t if_inputs[if_inputs_size] = {0, 1, 2}; + constexpr size_t outputs_size = 1; + const int32_t if_outputs[outputs_size] = {3}; + constexpr size_t operator_inputs_size = 2; + const int32_t operator_inputs[operator_inputs_size] = {0, 1}; + const int32_t operator_outputs[outputs_size] = {2}; + constexpr size_t operators_size = 1; + const Offset subgraph1_operators[operators_size] = { + CreateOperator( + *builder, 0, builder->CreateVector(if_inputs, if_inputs_size), + builder->CreateVector(if_outputs, outputs_size), + BuiltinOptions_IfOptions, CreateIfOptions(*builder, 1, 2).Union()), + }; + const Offset subgraph2_operators[operators_size] = { + CreateOperator( + *builder, 1, + builder->CreateVector(operator_inputs, operator_inputs_size), + builder->CreateVector(operator_outputs, outputs_size), + BuiltinOptions_NONE), + }; + constexpr size_t subgraphs_size = 3; + const Offset subgraphs[subgraphs_size] = { + CreateSubGraph(*builder, builder->CreateVector(subgraph1_tensors, 4), + builder->CreateVector(if_inputs, if_inputs_size), + builder->CreateVector(if_outputs, outputs_size), + builder->CreateVector(subgraph1_operators, operators_size), + builder->CreateString("if_subgraph")), + CreateSubGraph( + *builder, builder->CreateVector(subgraph2_tensors, 3), + builder->CreateVector(operator_inputs, operator_inputs_size), + builder->CreateVector(operator_outputs, outputs_size), + builder->CreateVector(subgraph2_operators, operators_size), + builder->CreateString("then_subgraph")), + CreateSubGraph( + *builder, builder->CreateVector(subgraph3_tensors, 3), + builder->CreateVector(operator_inputs, operator_inputs_size), + builder->CreateVector(operator_outputs, outputs_size), 0, + builder->CreateString("else_subgraph")), + }; + constexpr size_t operator_codes_size = 3; + const Offset operator_codes[operator_codes_size] = { + CreateOperatorCodeDirect(*builder, /*deprecated_builtin_code=*/0, + "multiple_inputs_op", + /*version=*/0, BuiltinOperator_IF), + CreateOperatorCodeDirect(*builder, /*deprecated_builtin_code=*/0, + "multiple_inputs_op", + /*version=*/0, BuiltinOperator_ADD), + CreateOperatorCodeDirect(*builder, /*deprecated_builtin_code=*/0, + "multiple_inputs_op", + /*version=*/0, BuiltinOperator_MUL), + }; + const Offset model_offset = CreateModel( + *builder, 0, builder->CreateVector(operator_codes, operator_codes_size), + builder->CreateVector(subgraphs, subgraphs_size), + builder->CreateString("test_model"), + builder->CreateVector(buffers, buffers_size)); + FinishModelBuffer(*builder, model_offset); + void* model_pointer = builder->GetBufferPointer(); + const Model* model = flatbuffers::GetRoot(model_pointer); + return model; +} + +const Model* BuildSimpleModelWithSubgraphsAndWhile() { + using flatbuffers::Offset; + flatbuffers::FlatBufferBuilder* builder = BuilderInstance(); + + constexpr size_t buffers_size = 1; + const Offset buffers[buffers_size] = { + CreateBuffer(*builder), + }; + const int32_t data_tensor_shape[] = {1, 1}; + constexpr size_t while_tensors_size = 4; + constexpr size_t op_tensors_size = 3; + const Offset subgraph0_tensors[while_tensors_size] = { + CreateTensor(*builder, builder->CreateVector(data_tensor_shape, 1), + TensorType_FLOAT32, 0, + builder->CreateString("input_tensor0"), 0, false), + CreateTensor(*builder, builder->CreateVector(data_tensor_shape, 1), + TensorType_FLOAT32, 0, + builder->CreateString("input_tensor1"), 0, false), + CreateTensor(*builder, builder->CreateVector(data_tensor_shape, 1), + TensorType_FLOAT32, 0, + builder->CreateString("output_tensor0"), 0, false), + CreateTensor(*builder, builder->CreateVector(data_tensor_shape, 1), + TensorType_FLOAT32, 0, + builder->CreateString("output_tensor1"), 0, false), + }; + const Offset subgraph1_tensors[op_tensors_size] = { + CreateTensor(*builder, builder->CreateVector(data_tensor_shape, 1), + TensorType_FLOAT32, 0, + builder->CreateString("input_tensor1"), 0, false), + CreateTensor(*builder, builder->CreateVector(data_tensor_shape, 1), + TensorType_FLOAT32, 0, + builder->CreateString("input_tensor2"), 0, false), + CreateTensor(*builder, builder->CreateVector(data_tensor_shape, 1), + TensorType_BOOL, 0, + builder->CreateString("condition_tensor"), 0, false), + }; + const Offset subgraph2_tensors[op_tensors_size] = { + CreateTensor(*builder, builder->CreateVector(data_tensor_shape, 1), + TensorType_FLOAT32, 0, + builder->CreateString("input_tensor0"), 0, false), + CreateTensor(*builder, builder->CreateVector(data_tensor_shape, 1), + TensorType_FLOAT32, 0, + builder->CreateString("input_tensor1"), 0, false), + CreateTensor(*builder, builder->CreateVector(data_tensor_shape, 1), + TensorType_FLOAT32, 0, + builder->CreateString("output_tensor0"), 0, false), + }; + + constexpr size_t inputs_size = 2; + const int32_t inputs[inputs_size] = {0, 1}; + constexpr size_t while_outputs_size = 2; + const int32_t while_outputs[while_outputs_size] = {2, 3}; + constexpr size_t cond_outputs_size = 1; + const int32_t cond_outputs[cond_outputs_size] = {2}; + constexpr size_t add_outputs_size = 1; + const int32_t add_outputs[add_outputs_size] = {2}; + constexpr size_t add_subgraph_outputs_size = 2; + const int32_t add_subgraph_outputs[add_subgraph_outputs_size] = {2, 1}; + constexpr size_t operators_size = 1; + const Offset subgraph0_operators[operators_size] = { + CreateOperator(*builder, 0, builder->CreateVector(inputs, inputs_size), + builder->CreateVector(while_outputs, while_outputs_size), + BuiltinOptions_WhileOptions, + CreateWhileOptions(*builder, 1, 2).Union()), + }; + const Offset subgraph1_operators[operators_size] = { + CreateOperator(*builder, 1, builder->CreateVector(inputs, inputs_size), + builder->CreateVector(cond_outputs, cond_outputs_size), + BuiltinOptions_NONE), + }; + const Offset subgraph2_operators[operators_size] = { + CreateOperator(*builder, 2, builder->CreateVector(inputs, inputs_size), + builder->CreateVector(add_outputs, add_outputs_size), + BuiltinOptions_NONE), + }; + constexpr size_t subgraphs_size = 3; + const Offset subgraphs[subgraphs_size] = { + CreateSubGraph(*builder, builder->CreateVector(subgraph0_tensors, 4), + builder->CreateVector(inputs, inputs_size), + builder->CreateVector(while_outputs, while_outputs_size), + builder->CreateVector(subgraph0_operators, operators_size), + builder->CreateString("while_subgraph")), + CreateSubGraph(*builder, builder->CreateVector(subgraph1_tensors, 3), + builder->CreateVector(inputs, inputs_size), + builder->CreateVector(cond_outputs, cond_outputs_size), + builder->CreateVector(subgraph1_operators, operators_size), + builder->CreateString("cond_subgraph")), + CreateSubGraph(*builder, builder->CreateVector(subgraph2_tensors, 3), + builder->CreateVector(inputs, inputs_size), + builder->CreateVector(add_subgraph_outputs, + add_subgraph_outputs_size), + builder->CreateVector(subgraph2_operators, operators_size), + builder->CreateString("body_subgraph")), + }; + constexpr size_t operator_codes_size = 3; + const Offset operator_codes[operator_codes_size] = { + CreateOperatorCodeDirect(*builder, /*deprecated_builtin_code=*/0, + "multiple_inputs_op", + /*version=*/0, BuiltinOperator_WHILE), + CreateOperatorCodeDirect(*builder, /*deprecated_builtin_code=*/0, + "multiple_inputs_op", + /*version=*/0, BuiltinOperator_LESS), + CreateOperatorCodeDirect(*builder, /*deprecated_builtin_code=*/0, + "multiple_inputs_op", + /*version=*/0, BuiltinOperator_ADD), + }; + const Offset model_offset = CreateModel( + *builder, 0, builder->CreateVector(operator_codes, operator_codes_size), + builder->CreateVector(subgraphs, subgraphs_size), + builder->CreateString("test_model"), + builder->CreateVector(buffers, buffers_size)); + FinishModelBuffer(*builder, model_offset); + void* model_pointer = builder->GetBufferPointer(); + const Model* model = flatbuffers::GetRoot(model_pointer); + return model; +} + +// Build a model with If and two subgraphs: two data tensors A1 of size 2, A2 of +// size 4 are first concatenated, then cut to a new tensor A3 of size 3; the new +// tensor A3 of size 3 is then concatenated with A2 tensor of size 4 to produce +// a final output tensor A4. This model is specially crafted to capture the +// corner case outlined in go/avoid-memory-corruption-in-if-operator. +// +// Subgraph0 +// A0(1) A2_0(4) A1_0(2) +// | | | ---+ +// v v v | +// +--------------+ | +// | IF | | +// +------+-------+ | +// | A3_0(3) | +// v | +// +--------------+ | +// | CUSTOM |<---+ +// +------+-------+ +// | +// v +// A4_0(8) +// +// Subgraph1/2 +// A1_1(2) A2_1(4) +// | | +// v v +// +---------------+ +// | CUSTOM | +// +-------+-------+ +// | +// v A3_1(3) +// +// And it leads to memory plan as below +// +// Subgraph0 Layout +// +// +// <------------A4_0 -------------> <----- A2_0-------> <----A3_0 ---> +// +----+----+----+----+----+----+----+----+----+----+----+----+----+----+----+ +// | | | | | | | | | 3 | 4 | 5 | 6 | | | | +// +----+----+----+----+----+----+----+----+----+----+----+----+----+----+----+ +// +// +----+----+----+ +// | 1 | 2 | A0 | +// +----+----+----+ +// <---A1_0--> +// +// Subgraph 1 Layout +// +// +----+----+----+----+----+----+----+----+----+ +// | | | | | | | | | | +// +----+----+----+----+----+----+----+----+----+ +// +// +// <------A2_1 -------><----A3_1 ---><--A1_1---> +// +// +// A1_1 of subgraph 1 will overlap with A2_0 of subgraph 0. +// In a buggy implementation of IF, two overwrite may happen: +// 1. copying input from A1_0 to A1_1 overwrites A2_0 before A2_0 is copied to +// A2_1; thus subgraph 1 produce incorrect output. +// 2. copying output from A3_1 to A4_0 overwrites A1_0, which should remain +// intact so that it can be used by the OP after the IF operator in subgraph 0 +// + +const Model* BuildModelWithIfAndSubgraphInputTensorOverlap() { + using flatbuffers::Offset; + flatbuffers::FlatBufferBuilder* builder = BuilderInstance(); + + constexpr TensorType kTensorType = TensorType_INT32; + constexpr int kBlockSize = + tflite::MicroArenaBufferAlignment() / sizeof(int32_t); + constexpr size_t kBuffersCount = 1; + const Offset buffers[kBuffersCount] = { + CreateBuffer(*builder), + }; + const int32_t kConditionTensorShape[] = {1}; + const int32_t kIfInput1TensorShape[] = {2 * kBlockSize}; + const int32_t kIfInput2TensorShape[] = {4 * kBlockSize}; + const int32_t kIfOutputTensorShape[] = {3 * kBlockSize}; + const int32_t kFinalOutputTensorShape[] = {8 * kBlockSize}; + constexpr size_t kSubgraph0TensorsCount = 5; + const Offset kSubgraph0Tensors[kSubgraph0TensorsCount] = { + CreateTensor(*builder, builder->CreateVector(kConditionTensorShape, 1), + TensorType_BOOL, 0, + builder->CreateString("condition tensor"), 0, false), + CreateTensor(*builder, builder->CreateVector(kIfInput1TensorShape, 1), + kTensorType, 0, builder->CreateString("if_input_tensor1"), 0, + false), + CreateTensor(*builder, builder->CreateVector(kIfInput2TensorShape, 1), + kTensorType, 0, builder->CreateString("if_input_tensor2"), 0, + false), + CreateTensor(*builder, builder->CreateVector(kIfOutputTensorShape, 1), + kTensorType, 0, builder->CreateString("if_output_tensor"), 0, + false), + CreateTensor(*builder, builder->CreateVector(kFinalOutputTensorShape, 1), + kTensorType, 0, builder->CreateString("final_output_tensor"), + 0, false), + }; + + // Subgraph 1 is the chosen path if condition tensor in IF is true. + constexpr size_t kSubgraph1TensorsCount = 3; + const Offset kSubgraph1Tensors[kSubgraph1TensorsCount] = { + CreateTensor(*builder, builder->CreateVector(kIfInput1TensorShape, 1), + kTensorType, 0, + builder->CreateString("subgraph1_input_tensor1"), 0, false), + CreateTensor(*builder, builder->CreateVector(kIfInput2TensorShape, 1), + kTensorType, 0, + builder->CreateString("subgraph1_input_tensor2"), 0, false), + CreateTensor(*builder, builder->CreateVector(kIfOutputTensorShape, 1), + kTensorType, 0, + builder->CreateString("subgraph1_output_tensor"), 0, false), + }; + + // Subgraph 2 is the chosen path if condition tensor in IF is false + constexpr size_t kSubgraph2TensorsCount = 3; + const Offset kSubgraph2Tensors[kSubgraph2TensorsCount] = { + CreateTensor(*builder, builder->CreateVector(kIfInput1TensorShape, 1), + kTensorType, 0, builder->CreateString("if_input_tensor1"), 0, + false), + CreateTensor(*builder, builder->CreateVector(kIfInput2TensorShape, 1), + kTensorType, 0, builder->CreateString("if_input_tensor2"), 0, + false), + CreateTensor(*builder, builder->CreateVector(kIfOutputTensorShape, 1), + kTensorType, 0, builder->CreateString("if_output_tensor"), 0, + false), + }; + + constexpr int kIfOpCodeIndex = 0; + constexpr int kCustomOpCodeIndex = 1; + + constexpr size_t kIfInputsCount = 3; + const int32_t kIfInputs[kIfInputsCount] = {0, 1, 2}; + constexpr size_t kOutputsCount = 1; + const int32_t kIfOutputs[kOutputsCount] = {3}; + constexpr size_t kOpAfterIfInputsCount = 2; + const int32_t kOpAfterIfInputs[kOpAfterIfInputsCount] = {3, 2}; + const int32_t kOpAfterIfOutputs[kOutputsCount] = {4}; + constexpr size_t kOperatorsCount = 2; + const Offset kSubgraph0Operators[kOperatorsCount] = { + CreateOperator(*builder, kIfOpCodeIndex, + builder->CreateVector(kIfInputs, kIfInputsCount), + builder->CreateVector(kIfOutputs, kOutputsCount), + BuiltinOptions_IfOptions, + CreateIfOptions(*builder, 1, 2).Union()), + CreateOperator( + *builder, kCustomOpCodeIndex, + builder->CreateVector(kOpAfterIfInputs, kOpAfterIfInputsCount), + builder->CreateVector(kOpAfterIfOutputs, kOutputsCount)), + }; + + constexpr size_t kSubgraph1InputsCount = 2; + const int32_t kSubgraph1Inputs[kSubgraph1InputsCount] = {0, 1}; + constexpr size_t kSubgraph1OutputsCount = 1; + const int32_t kSubgraph1Outputs[kSubgraph1OutputsCount] = {2}; + constexpr size_t kSubgraph1OperatorsCount = 1; + const Offset kSubgraph1Operators[kSubgraph1OperatorsCount] = { + CreateOperator( + *builder, kCustomOpCodeIndex, + builder->CreateVector(kSubgraph1Inputs, kSubgraph1InputsCount), + builder->CreateVector(kSubgraph1Outputs, kSubgraph1OutputsCount), + BuiltinOptions_NONE), + }; + + constexpr size_t kSubgraph2InputsCount = 2; + const int32_t kSubgraph2Inputs[kSubgraph2InputsCount] = {0, 1}; + constexpr size_t kSubgraph2OutputsCount = 1; + const int32_t kSubgraph2Outputs[kSubgraph2OutputsCount] = {2}; + constexpr size_t kSubgraph2OperatorsCount = 1; + const Offset kSubgraph2Operators[kSubgraph2OperatorsCount] = { + CreateOperator( + *builder, kCustomOpCodeIndex, + builder->CreateVector(kSubgraph2Inputs, kSubgraph2InputsCount), + builder->CreateVector(kSubgraph2Outputs, kSubgraph2OutputsCount), + BuiltinOptions_NONE), + }; + + constexpr size_t kSubgraphsCount = 3; + const Offset kSubgraphs[kSubgraphsCount] = { + CreateSubGraph( + *builder, + builder->CreateVector(kSubgraph0Tensors, kSubgraph0TensorsCount), + builder->CreateVector(kIfInputs, kIfInputsCount), + builder->CreateVector(kOpAfterIfOutputs, kOutputsCount), + builder->CreateVector(kSubgraph0Operators, kOperatorsCount), + builder->CreateString("if_subgraph")), + CreateSubGraph( + *builder, + builder->CreateVector(kSubgraph1Tensors, kSubgraph1TensorsCount), + builder->CreateVector(kSubgraph1Inputs, kSubgraph1InputsCount), + builder->CreateVector(kSubgraph1Outputs, kSubgraph1OutputsCount), + builder->CreateVector(kSubgraph1Operators, kSubgraph1OperatorsCount), + builder->CreateString("then_subgraph")), + CreateSubGraph( + *builder, + builder->CreateVector(kSubgraph2Tensors, kSubgraph2TensorsCount), + builder->CreateVector(kSubgraph2Inputs, kSubgraph2InputsCount), + builder->CreateVector(kSubgraph2Outputs, kSubgraph2OutputsCount), + builder->CreateVector(kSubgraph2Operators, kSubgraph2OperatorsCount), + builder->CreateString("else_subgraph")), + }; + + constexpr size_t kOperatorCodesCount = 2; + const Offset kOperatorCodes[kOperatorCodesCount] = { + CreateOperatorCodeDirect(*builder, /*deprecated_builtin_code=*/0, "if", + /*version=*/0, BuiltinOperator_IF), + CreateOperatorCodeDirect(*builder, /*deprecated_builtin_code=*/0, + "custom_packer_op", + /*version=*/0, BuiltinOperator_CUSTOM), + }; + const Offset kModelOffset = CreateModel( + *builder, 0, builder->CreateVector(kOperatorCodes, kOperatorCodesCount), + builder->CreateVector(kSubgraphs, kSubgraphsCount), + builder->CreateString("test_model"), + builder->CreateVector(buffers, kBuffersCount)); + FinishModelBuffer(*builder, kModelOffset); + void* model_pointer = builder->GetBufferPointer(); + const Model* model = flatbuffers::GetRoot(model_pointer); + return model; +} + +// Mock model with one main subgraph containing a single CALL_ONCE op (with null +// inputs and outputs) which invokes a second subgraph which has null inputs and +// outputs. +const Model* BuildSimpleMockModelWithNullInputsOutputs() { + using flatbuffers::Offset; + flatbuffers::FlatBufferBuilder* builder = BuilderInstance(); + + constexpr size_t buffers_size = 1; + const Offset buffers[buffers_size] = { + CreateBuffer(*builder), + }; + constexpr size_t tensor_shape_size = 1; + const int32_t tensor_shape[tensor_shape_size] = {0}; + constexpr size_t tensors_size = 1; + const Offset tensors[tensors_size] = { + CreateTensor(*builder, + builder->CreateVector(tensor_shape, tensor_shape_size), + TensorType_INT32, 0, + builder->CreateString("test_input_tensor1"), 0, false), + }; + constexpr size_t subgraph0_inputs_size = 1; + const int32_t subgraph0_inputs[subgraph0_inputs_size] = {0}; + constexpr size_t subgraph0_outputs_size = 1; + const int32_t subgraph0_outputs[subgraph0_outputs_size] = {0}; + constexpr size_t operators_size = 1; + const Offset subgraph0_operators[operators_size] = { + CreateOperator(*builder, 0, {}, {}, BuiltinOptions_CallOnceOptions, + CreateCallOnceOptions(*builder, 1).Union()), + }; + const Offset subgraph1_operators[operators_size] = { + CreateOperator(*builder, 1, {}, {}, BuiltinOptions_NONE)}; + constexpr size_t subgraphs_size = 2; + const Offset subgraphs[subgraphs_size] = { + CreateSubGraph( + *builder, builder->CreateVector(tensors, tensors_size), + builder->CreateVector(subgraph0_inputs, subgraph0_inputs_size), + builder->CreateVector(subgraph0_outputs, subgraph0_outputs_size), + builder->CreateVector(subgraph0_operators, operators_size), + builder->CreateString("main_subgraph")), + CreateSubGraph(*builder, builder->CreateVector(tensors, tensors_size), {}, + {}, + builder->CreateVector(subgraph1_operators, operators_size), + builder->CreateString("secondary subgraph")), + }; + constexpr size_t operator_codes_size = 2; + const Offset operator_codes[operator_codes_size] = { + CreateOperatorCodeDirect(*builder, /*deprecated_builtin_code=*/0, + "call_once_op", + /*version=*/0, BuiltinOperator_CALL_ONCE), + CreateOperatorCodeDirect(*builder, /*deprecated_builtin_code=*/0, "no_op", + /*version=*/0, BuiltinOperator_CUSTOM)}; + const Offset model_offset = CreateModel( + *builder, 0, builder->CreateVector(operator_codes, operator_codes_size), + builder->CreateVector(subgraphs, subgraphs_size), + builder->CreateString("test_model"), + builder->CreateVector(buffers, buffers_size)); + FinishModelBuffer(*builder, model_offset); + void* model_pointer = builder->GetBufferPointer(); + const Model* model = flatbuffers::GetRoot(model_pointer); + return model; +} + +} // namespace + +const TFLMRegistration* SimpleStatefulOp::getRegistration() { + return GetMutableRegistration(); +} + +TFLMRegistration* SimpleStatefulOp::GetMutableRegistration() { + static TFLMRegistration r; + r.init = Init; + r.prepare = Prepare; + r.invoke = Invoke; + return &r; +} + +void* SimpleStatefulOp::Init(TfLiteContext* context, const char* buffer, + size_t length) { + void* raw = context->AllocatePersistentBuffer(context, sizeof(OpData)); + OpData* data = reinterpret_cast(raw); + *data = {}; + return raw; +} + +TfLiteStatus SimpleStatefulOp::Prepare(TfLiteContext* context, + TfLiteNode* node) { + OpData* data = reinterpret_cast(node->user_data); + + // Make sure that the input is in uint8_t with at least 1 data entry. + MicroContext* micro_context = GetMicroContext(context); + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + + if (input->type != kTfLiteInt8) return kTfLiteError; + if (NumElements(input->dims) == 0) return kTfLiteError; + + // Allocate a temporary buffer with the same size of input for sorting. + TF_LITE_ENSURE_STATUS(context->RequestScratchBufferInArena( + context, sizeof(uint8_t) * NumElements(input->dims), + &data->sorting_buffer)); + // We can interleave scratch / persistent buffer allocation. + data->invoke_count = reinterpret_cast( + context->AllocatePersistentBuffer(context, sizeof(int))); + *data->invoke_count = 0; + + micro_context->DeallocateTempTfLiteTensor(input); + return kTfLiteOk; +} + +TfLiteStatus SimpleStatefulOp::Invoke(TfLiteContext* context, + TfLiteNode* node) { + OpData* data = reinterpret_cast(node->user_data); + *data->invoke_count += 1; + + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + const uint8_t* input_data = input->data.uint8; + int size = NumElements(input->dims); + + uint8_t* sorting_buffer = reinterpret_cast( + context->GetScratchBuffer(context, data->sorting_buffer)); + // Copy inputs data to the sorting buffer. We don't want to mutate the input + // tensor as it might be used by a another node. + for (int i = 0; i < size; i++) { + sorting_buffer[i] = input_data[i]; + } + + // In place insertion sort on `sorting_buffer`. + for (int i = 1; i < size; i++) { + for (int j = i; j > 0 && sorting_buffer[j] < sorting_buffer[j - 1]; j--) { + std::swap(sorting_buffer[j], sorting_buffer[j - 1]); + } + } + + TfLiteEvalTensor* median = + tflite::micro::GetEvalOutput(context, node, kMedianTensor); + TF_LITE_ENSURE(context, median != nullptr); + uint8_t* median_data = median->data.uint8; + TfLiteEvalTensor* invoke_count = + tflite::micro::GetEvalOutput(context, node, kInvokeCount); + TF_LITE_ENSURE(context, invoke_count != nullptr); + int32_t* invoke_count_data = invoke_count->data.i32; + + median_data[0] = sorting_buffer[size / 2]; + invoke_count_data[0] = *data->invoke_count; + return kTfLiteOk; +} + +const TFLMRegistration* MockCustom::getRegistration() { + return GetMutableRegistration(); +} + +TFLMRegistration* MockCustom::GetMutableRegistration() { + static TFLMRegistration r; + r.init = Init; + r.prepare = Prepare; + r.invoke = Invoke; + r.free = Free; + return &r; +} + +void* MockCustom::Init(TfLiteContext* context, const char* buffer, + size_t length) { + // We don't support delegate in TFL micro. This is a weak check to test if + // context struct being zero-initialized. + TFLITE_DCHECK(context->ReplaceNodeSubsetsWithDelegateKernels == nullptr); + freed_ = false; + // Do nothing. + return nullptr; +} + +void MockCustom::Free(TfLiteContext* context, void* buffer) { freed_ = true; } + +TfLiteStatus MockCustom::Prepare(TfLiteContext* context, TfLiteNode* node) { + return kTfLiteOk; +} + +TfLiteStatus MockCustom::Invoke(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = tflite::micro::GetEvalInput(context, node, 0); + TF_LITE_ENSURE(context, input != nullptr); + const int32_t* input_data = input->data.i32; + const TfLiteEvalTensor* weight = + tflite::micro::GetEvalInput(context, node, 1); + TF_LITE_ENSURE(context, weight != nullptr); + const uint8_t* weight_data = weight->data.uint8; + TfLiteEvalTensor* output = tflite::micro::GetEvalOutput(context, node, 0); + TF_LITE_ENSURE(context, output != nullptr); + int32_t* output_data = output->data.i32; + output_data[0] = + 0; // Catch output tensor sharing memory with an input tensor + output_data[0] = input_data[0] + weight_data[0]; + return kTfLiteOk; +} + +bool MockCustom::freed_ = false; + +const TFLMRegistration* MultipleInputs::getRegistration() { + return GetMutableRegistration(); +} + +TFLMRegistration* MultipleInputs::GetMutableRegistration() { + static TFLMRegistration r; + r.init = Init; + r.prepare = Prepare; + r.invoke = Invoke; + r.free = Free; + return &r; +} + +void* MultipleInputs::Init(TfLiteContext* context, const char* buffer, + size_t length) { + // We don't support delegate in TFL micro. This is a weak check to test if + // context struct being zero-initialized. + TFLITE_DCHECK(context->ReplaceNodeSubsetsWithDelegateKernels == nullptr); + freed_ = false; + // Do nothing. + return nullptr; +} + +void MultipleInputs::Free(TfLiteContext* context, void* buffer) { + freed_ = true; +} + +TfLiteStatus MultipleInputs::Prepare(TfLiteContext* context, TfLiteNode* node) { + return kTfLiteOk; +} + +TfLiteStatus MultipleInputs::Invoke(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = tflite::micro::GetEvalInput(context, node, 0); + TF_LITE_ENSURE(context, input != nullptr); + const int32_t* input_data = input->data.i32; + const TfLiteEvalTensor* input1 = + tflite::micro::GetEvalInput(context, node, 1); + TF_LITE_ENSURE(context, input1 != nullptr); + const int32_t* input_data1 = input1->data.i32; + const TfLiteEvalTensor* input2 = + tflite::micro::GetEvalInput(context, node, 2); + TF_LITE_ENSURE(context, input2 != nullptr); + const int32_t* input_data2 = input2->data.i32; + + TfLiteEvalTensor* output = tflite::micro::GetEvalOutput(context, node, 0); + TF_LITE_ENSURE(context, output != nullptr); + int32_t* output_data = output->data.i32; + output_data[0] = + 0; // Catch output tensor sharing memory with an input tensor + output_data[0] = input_data[0] + input_data1[0] + input_data2[0]; + return kTfLiteOk; +} + +bool MultipleInputs::freed_ = false; + +const TFLMRegistration* NoOp::getRegistration() { + return GetMutableRegistration(); +} + +TFLMRegistration* NoOp::GetMutableRegistration() { + static TFLMRegistration r; + r.init = Init; + r.prepare = Prepare; + r.invoke = Invoke; + r.free = Free; + return &r; +} + +void* NoOp::Init(TfLiteContext* context, const char* buffer, size_t length) { + // We don't support delegate in TFL micro. This is a weak check to test if + // context struct being zero-initialized. + TFLITE_DCHECK(context->ReplaceNodeSubsetsWithDelegateKernels == nullptr); + freed_ = false; + // Do nothing. + return nullptr; +} + +void NoOp::Free(TfLiteContext* context, void* buffer) { freed_ = true; } + +TfLiteStatus NoOp::Prepare(TfLiteContext* context, TfLiteNode* node) { + return kTfLiteOk; +} + +TfLiteStatus NoOp::Invoke(TfLiteContext* context, TfLiteNode* node) { + return kTfLiteOk; +} + +bool NoOp::freed_ = false; + +TfLiteStatus GetTestingOpResolver( + tflite::testing::TestingOpResolver& op_resolver) { + TF_LITE_ENSURE_STATUS(op_resolver.AddCustom( + "mock_custom", MockCustom::GetMutableRegistration())); + TF_LITE_ENSURE_STATUS(op_resolver.AddCustom( + "simple_stateful_op", SimpleStatefulOp::GetMutableRegistration())); + TF_LITE_ENSURE_STATUS(op_resolver.AddCustom( + "multiple_inputs_op", MultipleInputs::GetMutableRegistration())); + TF_LITE_ENSURE_STATUS( + op_resolver.AddCustom("no_op", NoOp::GetMutableRegistration())); + TF_LITE_ENSURE_STATUS(op_resolver.AddCustom( + "custom_packer_op", PackerOp::GetMutableRegistration())); + TF_LITE_ENSURE_STATUS(op_resolver.AddIf()); + return kTfLiteOk; +} + +const Model* GetModelWithUnusedInputs() { + static Model* model = nullptr; + if (!model) { + model = const_cast(BuildModelWithUnusedInputs()); + } + return model; +} + +const Model* GetModelWithUnusedOperatorOutputs() { + static Model* model = nullptr; + if (!model) { + model = const_cast(BuildModelWithUnusedOperatorOutputs()); + } + return model; +} + +const Model* GetModelWith256x256Tensor() { + static const Model* model = BuildModelWith256x256Tensor(); + return model; +} + +const Model* GetSimpleMockModel() { + static Model* model = nullptr; + if (!model) { + model = const_cast(BuildSimpleMockModel()); + } + return model; +} + +const Model* GetSimpleMultipleInputsModel() { + static Model* model = nullptr; + if (!model) { + model = const_cast(BuildSimpleMultipleInputsModel()); + } + return model; +} + +const Model* GetSimpleModelWithSubgraphsAndIf() { + static Model* model = nullptr; + if (!model) { + model = const_cast(BuildSimpleModelWithSubgraphsAndIf()); + } + return model; +} + +const Model* GetSimpleModelWithIfAndEmptySubgraph() { + static Model* model = nullptr; + if (!model) { + model = const_cast(BuildSimpleModelWithIfAndEmptySubgraph()); + } + return model; +} + +const Model* GetSimpleModelWithSubgraphsAndWhile() { + static Model* model = nullptr; + if (!model) { + model = const_cast(BuildSimpleModelWithSubgraphsAndWhile()); + } + return model; +} + +const Model* GetModelWithIfAndSubgraphInputTensorOverlap() { + static Model* model = nullptr; + if (!model) { + model = const_cast(BuildModelWithIfAndSubgraphInputTensorOverlap()); + } + return model; +} + +const Model* GetSimpleModelWithNullInputsAndOutputs() { + static Model* model = nullptr; + if (!model) { + model = const_cast(BuildSimpleMockModelWithNullInputsOutputs()); + } + return model; +} + +const Model* GetComplexMockModel() { + static Model* model = nullptr; + if (!model) { + model = const_cast(BuildComplexMockModel()); + } + return model; +} + +const Model* GetSimpleModelWithBranch() { + static Model* model = nullptr; + if (!model) { + model = const_cast(BuildSimpleModelWithBranch()); + } + return model; +} + +const Model* GetModelWithOfflinePlanning(int num_tensors, + const int32_t* metadata_buffer, + NodeConnection* node_conn, + int num_conns, + int num_subgraph_inputs) { + const Model* model = BuildModelWithOfflinePlanning( + num_tensors, metadata_buffer, node_conn, num_conns, num_subgraph_inputs); + return model; +} + +const Model* GetSimpleStatefulModel() { + static Model* model = nullptr; + if (!model) { + model = const_cast(BuildSimpleStatefulModel()); + } + return model; +} + +const Tensor* Create1dFlatbufferTensor(int size, bool is_variable) { + using flatbuffers::Offset; + flatbuffers::FlatBufferBuilder* builder = BuilderInstance(); + constexpr size_t tensor_shape_size = 1; + const int32_t tensor_shape[tensor_shape_size] = {size}; + const Offset tensor_offset = CreateTensor( + *builder, builder->CreateVector(tensor_shape, tensor_shape_size), + TensorType_INT32, 0, builder->CreateString("test_tensor"), 0, + is_variable); + builder->Finish(tensor_offset); + void* tensor_pointer = builder->GetBufferPointer(); + const Tensor* tensor = flatbuffers::GetRoot(tensor_pointer); + return tensor; +} + +const Tensor* CreateQuantizedFlatbufferTensor(int size) { + using flatbuffers::Offset; + flatbuffers::FlatBufferBuilder* builder = BuilderInstance(); + constexpr size_t quant_params_size = 1; + const float min_array[quant_params_size] = {0.1f}; + const float max_array[quant_params_size] = {0.2f}; + const float scale_array[quant_params_size] = {0.3f}; + const int64_t zero_point_array[quant_params_size] = {100ll}; + + const Offset quant_params = + CreateQuantizationParameters( + *builder, + /*min=*/builder->CreateVector(min_array, quant_params_size), + /*max=*/builder->CreateVector(max_array, quant_params_size), + /*scale=*/ + builder->CreateVector(scale_array, quant_params_size), + /*zero_point=*/ + builder->CreateVector(zero_point_array, quant_params_size)); + + constexpr size_t tensor_shape_size = 1; + const int32_t tensor_shape[tensor_shape_size] = {size}; + const Offset tensor_offset = CreateTensor( + *builder, builder->CreateVector(tensor_shape, tensor_shape_size), + TensorType_INT32, 0, builder->CreateString("test_tensor"), quant_params, + false); + builder->Finish(tensor_offset); + void* tensor_pointer = builder->GetBufferPointer(); + const Tensor* tensor = flatbuffers::GetRoot(tensor_pointer); + return tensor; +} + +const Tensor* CreateMissingQuantizationFlatbufferTensor(int size) { + using flatbuffers::Offset; + flatbuffers::FlatBufferBuilder* builder = BuilderInstance(); + const Offset quant_params = + CreateQuantizationParameters(*builder, 0, 0, 0, 0, + QuantizationDetails_NONE, 0, 0); + constexpr size_t tensor_shape_size = 1; + const int32_t tensor_shape[tensor_shape_size] = {size}; + const Offset tensor_offset = CreateTensor( + *builder, builder->CreateVector(tensor_shape, tensor_shape_size), + TensorType_INT32, 0, builder->CreateString("test_tensor"), quant_params, + false); + builder->Finish(tensor_offset); + void* tensor_pointer = builder->GetBufferPointer(); + const Tensor* tensor = flatbuffers::GetRoot(tensor_pointer); + return tensor; +} + +const flatbuffers::Vector>* +CreateFlatbufferBuffers() { + using flatbuffers::Offset; + flatbuffers::FlatBufferBuilder* builder = BuilderInstance(); + constexpr size_t buffers_size = 1; + const Offset buffers[buffers_size] = { + CreateBuffer(*builder), + }; + const flatbuffers::Offset>> + buffers_offset = builder->CreateVector(buffers, buffers_size); + builder->Finish(buffers_offset); + void* buffers_pointer = builder->GetBufferPointer(); + const flatbuffers::Vector>* result = + flatbuffers::GetRoot>>( + buffers_pointer); + return result; +} + +int TestStrcmp(const char* a, const char* b) { + if ((a == nullptr) || (b == nullptr)) { + return -1; + } + while ((*a != 0) && (*a == *b)) { + a++; + b++; + } + return *reinterpret_cast(a) - + *reinterpret_cast(b); +} + +// Create a TfLiteIntArray from an array of ints. The first element in the +// supplied array must be the size of the array expressed as an int. +TfLiteIntArray* IntArrayFromInts(int* int_array) { + return reinterpret_cast(int_array); +} + +// Create a TfLiteFloatArray from an array of floats. The first element in the +// supplied array must be the size of the array expressed as a float. +TfLiteFloatArray* FloatArrayFromFloats(const float* floats) { + static_assert(sizeof(float) == sizeof(int), + "assumes sizeof(float) == sizeof(int) to perform casting"); + int size = static_cast(floats[0]); + *reinterpret_cast(const_cast(floats)) = size; + return reinterpret_cast(const_cast(floats)); +} + +TfLiteTensor CreateQuantizedBiasTensor(const float* data, int16_t* quantized, + TfLiteIntArray* dims, float input_scale, + float weights_scale, bool is_variable) { + float bias_scale = input_scale * weights_scale; + tflite::SymmetricQuantize(data, quantized, ElementCount(*dims), bias_scale); + + // Quantized int16_t tensors always have a zero point of 0, since the range of + // int16_t values is large, and because zero point costs extra cycles during + // processing. + TfLiteTensor result = + CreateQuantizedTensor(quantized, dims, bias_scale, 0, is_variable); + return result; +} + +TfLiteTensor CreateQuantizedBiasTensor(const float* data, int32_t* quantized, + TfLiteIntArray* dims, float input_scale, + float weights_scale, bool is_variable) { + float bias_scale = input_scale * weights_scale; + tflite::SymmetricQuantize(data, quantized, ElementCount(*dims), bias_scale); + + // Quantized int32_t tensors always have a zero point of 0, since the range of + // int32_t values is large, and because zero point costs extra cycles during + // processing. + TfLiteTensor result = + CreateQuantizedTensor(quantized, dims, bias_scale, 0, is_variable); + return result; +} + +TfLiteTensor CreateQuantizedBiasTensor(const float* data, + std::int64_t* quantized, + TfLiteIntArray* dims, float input_scale, + float weights_scale, bool is_variable) { + float bias_scale = input_scale * weights_scale; + tflite::SymmetricQuantize(data, quantized, ElementCount(*dims), bias_scale); + + // Quantized int32_t tensors always have a zero point of 0, since the range of + // int32_t values is large, and because zero point costs extra cycles during + // processing. + TfLiteTensor result = + CreateQuantizedTensor(quantized, dims, bias_scale, 0, is_variable); + return result; +} + +// Quantizes int32_t bias tensor with per-channel weights determined by input +// scale multiplied by weight scale for each channel. +template +TfLiteTensor CreatePerChannelQuantizedBiasTensor( + const float* input, T* quantized, TfLiteIntArray* dims, float input_scale, + float* weight_scales, float* scales, int* zero_points, + TfLiteAffineQuantization* affine_quant, int quantized_dimension, + bool is_variable) { + int input_size = ElementCount(*dims); + int num_channels = dims->data[quantized_dimension]; + // First element is reserved for array length + zero_points[0] = num_channels; + scales[0] = static_cast(num_channels); + float* scales_array = &scales[1]; + for (int i = 0; i < num_channels; i++) { + scales_array[i] = input_scale * weight_scales[i]; + zero_points[i + 1] = 0; + } + + SymmetricPerChannelQuantize(input, quantized, input_size, num_channels, + scales_array); + + affine_quant->scale = FloatArrayFromFloats(scales); + affine_quant->zero_point = IntArrayFromInts(zero_points); + affine_quant->quantized_dimension = quantized_dimension; + + TfLiteTensor result = CreateTensor(quantized, dims, is_variable); + result.quantization = {kTfLiteAffineQuantization, affine_quant}; + return result; +} + +TfLiteTensor CreatePerChannelQuantizedBiasTensor( + const float* input, int32_t* quantized, TfLiteIntArray* dims, + float input_scale, float* weight_scales, float* scales, int* zero_points, + TfLiteAffineQuantization* affine_quant, int quantized_dimension, + bool is_variable) { + return CreatePerChannelQuantizedBiasTensor( + input, quantized, dims, input_scale, weight_scales, scales, zero_points, + affine_quant, quantized_dimension, is_variable); +} + +TfLiteTensor CreatePerChannelQuantizedBiasTensor( + const float* input, std::int64_t* quantized, TfLiteIntArray* dims, + float input_scale, float* weight_scales, float* scales, int* zero_points, + TfLiteAffineQuantization* affine_quant, int quantized_dimension, + bool is_variable) { + return CreatePerChannelQuantizedBiasTensor( + input, quantized, dims, input_scale, weight_scales, scales, zero_points, + affine_quant, quantized_dimension, is_variable); +} + +TfLiteTensor CreateSymmetricPerChannelQuantizedTensor( + const float* input, int8_t* quantized, TfLiteIntArray* dims, float* scales, + int* zero_points, TfLiteAffineQuantization* affine_quant, + int quantized_dimension, bool is_variable, TfLiteType tensor_weight_type) { + int channel_count = dims->data[quantized_dimension]; + + scales[0] = static_cast(channel_count); + zero_points[0] = channel_count; + + SignedSymmetricPerChannelQuantize(input, dims, quantized_dimension, quantized, + &scales[1], tensor_weight_type); + + for (int i = 0; i < channel_count; i++) { + zero_points[i + 1] = 0; + } + + affine_quant->scale = FloatArrayFromFloats(scales); + affine_quant->zero_point = IntArrayFromInts(zero_points); + affine_quant->quantized_dimension = quantized_dimension; + TfLiteTensor result = + CreateTensor(quantized, dims, is_variable, tensor_weight_type); + result.quantization = {kTfLiteAffineQuantization, affine_quant}; + return result; +} + +size_t GetModelTensorCount(const Model* model) { + auto* subgraphs = model->subgraphs(); + if (subgraphs) { + return (*subgraphs)[0]->tensors()->size(); + } + return 0; +} + +void PackInt4ValuesDenselyInPlace(uint8_t* src_buffer, int buffer_size) { + for (int i = 0; i < buffer_size; ++i) { + if (i % 2 == 0) { + src_buffer[i / 2] = src_buffer[i] & 0x0F; + } else { + src_buffer[i / 2] |= src_buffer[i] << 4; + } + } + // the rest of the buffer should be empty since half of it is packed with the + // values + memset(src_buffer + (buffer_size + 1) / 2, 0, buffer_size / 2); +} + +} // namespace testing +} // namespace tflite diff --git a/tensorflow/lite/micro/test_helpers.h b/tensorflow/lite/micro/test_helpers.h new file mode 100644 index 0000000..578282e --- /dev/null +++ b/tensorflow/lite/micro/test_helpers.h @@ -0,0 +1,334 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_TEST_HELPERS_H_ +#define TENSORFLOW_LITE_MICRO_TEST_HELPERS_H_ + +#include +#include +#include +#include + +#include "flatbuffers/flatbuffers.h" // from @flatbuffers +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/micro/micro_mutable_op_resolver.h" +#include "tensorflow/lite/micro/micro_utils.h" +#include "tensorflow/lite/portable_type_to_tflitetype.h" +#include "tensorflow/lite/schema/schema_generated.h" + +namespace tflite { +namespace testing { + +constexpr int kOfflinePlannerHeaderSize = 3; +using TestingOpResolver = tflite::MicroMutableOpResolver<10>; + +struct NodeConnection_ { + std::initializer_list input; + std::initializer_list output; +}; +typedef struct NodeConnection_ NodeConnection; + +// A simple operator that returns the median of the input with the number of +// times the kernel was invoked. The implementation below is deliberately +// complicated, just to demonstrate how kernel memory planning works. +class SimpleStatefulOp { + static constexpr int kBufferNotAllocated = 0; + // Inputs: + static constexpr int kInputTensor = 0; + // Outputs: + static constexpr int kMedianTensor = 0; + static constexpr int kInvokeCount = 1; + struct OpData { + int* invoke_count = nullptr; + int sorting_buffer = kBufferNotAllocated; + }; + + public: + static const TFLMRegistration* getRegistration(); + static TFLMRegistration* GetMutableRegistration(); + static void* Init(TfLiteContext* context, const char* buffer, size_t length); + static TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node); + static TfLiteStatus Invoke(TfLiteContext* context, TfLiteNode* node); +}; + +class MockCustom { + public: + static const TFLMRegistration* getRegistration(); + static TFLMRegistration* GetMutableRegistration(); + static void* Init(TfLiteContext* context, const char* buffer, size_t length); + static void Free(TfLiteContext* context, void* buffer); + static TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node); + static TfLiteStatus Invoke(TfLiteContext* context, TfLiteNode* node); + + static bool freed_; +}; + +// A simple operator with the purpose of testing multiple inputs. It returns +// the sum of the inputs. +class MultipleInputs { + public: + static const TFLMRegistration* getRegistration(); + static TFLMRegistration* GetMutableRegistration(); + static void* Init(TfLiteContext* context, const char* buffer, size_t length); + static void Free(TfLiteContext* context, void* buffer); + static TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node); + static TfLiteStatus Invoke(TfLiteContext* context, TfLiteNode* node); + + static bool freed_; +}; + +// A simple no-op operator. +class NoOp { + public: + static const TFLMRegistration* getRegistration(); + static TFLMRegistration* GetMutableRegistration(); + static void* Init(TfLiteContext* context, const char* buffer, size_t length); + static void Free(TfLiteContext* context, void* buffer); + static TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node); + static TfLiteStatus Invoke(TfLiteContext* context, TfLiteNode* node); + + static bool freed_; +}; + +// Returns an Op Resolver that can be used in the testing code. +TfLiteStatus GetTestingOpResolver(TestingOpResolver& op_resolver); + +// Returns a simple example flatbuffer TensorFlow Lite model. Contains 1 input, +// 1 layer of weights, 1 output Tensor, and 1 operator. +const Model* GetSimpleMockModel(); + +// Returns a flatbuffer TensorFlow Lite model with more inputs, variable +// tensors, and operators. +const Model* GetComplexMockModel(); + +// Returns a simple example flatbuffer TensorFlow Lite model. Contains 1 input, +// 1 layer of weights, 1 output Tensor, and 1 operator. +// The size of all three tensors is 256 x 256, which is larger than what other +// models provide from this test helper. +const Model* GetModelWith256x256Tensor(); + +// Returns a simple flatbuffer model with two branches. +const Model* GetSimpleModelWithBranch(); + +// Returns a simple example flatbuffer TensorFlow Lite model. Contains 3 inputs, +// 1 output Tensor, and 1 operator. +const Model* GetSimpleMultipleInputsModel(); + +// Returns a simple flatbuffer model with offline planned tensors +// @param[in] num_tensors Number of tensors in the model. +// @param[in] metadata_buffer Metadata for offline planner. +// @param[in] node_con List of connections, i.e. operators +// in the model. +// @param[in] num_conns Number of connections. +// @param[in] num_subgraph_inputs How many of the input tensors are in +// the subgraph inputs. The default value +// of 0 means all of the input tensors +// are in the subgraph input list. There +// must be at least 1 input tensor in the +// subgraph input list. +const Model* GetModelWithOfflinePlanning(int num_tensors, + const int32_t* metadata_buffer, + NodeConnection* node_conn, + int num_conns, + int num_subgraph_inputs = 0); + +// Returns a flatbuffer with a single operator, two inputs (one unused) and one +// output. +const Model* GetModelWithUnusedInputs(); + +// Returns a flatbuffer with a single operator, zero inputs and two outputs +// (one unused). +const Model* GetModelWithUnusedOperatorOutputs(); + +// Returns a flatbuffer model with `simple_stateful_op` +const Model* GetSimpleStatefulModel(); + +// Returns a flatbuffer model with "if" and two subgraphs. +const Model* GetSimpleModelWithSubgraphsAndIf(); + +// Returns a flatbuffer model with "if" and two subgraphs one of which is empty. +const Model* GetSimpleModelWithIfAndEmptySubgraph(); + +// Returns a flatbuffer model with "while" and three subgraphs. +const Model* GetSimpleModelWithSubgraphsAndWhile(); + +// Returns a flatbuffer model with "if" and two subgraphs and the input tensor 1 +// of "if" subgraph overlaps with the input tensor 2 of subgraph 1. +const Model* GetModelWithIfAndSubgraphInputTensorOverlap(); + +// Returns a flatbuffer model with null subgraph/operator inputs and outputs. +const Model* GetSimpleModelWithNullInputsAndOutputs(); + +// Builds a one-dimensional flatbuffer tensor of the given size. +const Tensor* Create1dFlatbufferTensor(int size, bool is_variable = false); + +// Builds a one-dimensional flatbuffer tensor of the given size with +// quantization metadata. +const Tensor* CreateQuantizedFlatbufferTensor(int size); + +// Creates a one-dimensional tensor with no quantization metadata. +const Tensor* CreateMissingQuantizationFlatbufferTensor(int size); + +// Creates a vector of flatbuffer buffers. +const flatbuffers::Vector>* +CreateFlatbufferBuffers(); + +// Performs a simple string comparison without requiring standard C library. +int TestStrcmp(const char* a, const char* b); + +void PopulateContext(TfLiteTensor* tensors, int tensors_size, + TfLiteContext* context); + +// Create a TfLiteIntArray from an array of ints. The first element in the +// supplied array must be the size of the array expressed as an int. +TfLiteIntArray* IntArrayFromInts(int* int_array); + +// Create a TfLiteFloatArray from an array of floats. The first element in the +// supplied array must be the size of the array expressed as a float. +TfLiteFloatArray* FloatArrayFromFloats(const float* floats); + +// Assumes that `src_tensor` is a buffer where each element is a 4-bit value +// stored in 8-bit. +// Returns a new buffer that is packed densely with 2 4-bit values in a byte. +// The packing format is low-bits-first, i.e. the lower nibble of a byte is +// filled first, followed by the upper nibble. +void PackInt4ValuesDenselyInPlace(uint8_t* src_buffer, int buffer_size); + +template +TfLiteTensor CreateTensor(const T* data, TfLiteIntArray* dims, + const bool is_variable = false, + TfLiteType type = kTfLiteNoType) { + TfLiteTensor result; + result.dims = dims; + result.params = {}; + result.quantization = {kTfLiteNoQuantization, nullptr}; + result.is_variable = is_variable; + result.allocation_type = kTfLiteMemNone; + result.data.data = const_cast(data); + result.bytes = ElementCount(*dims) * sizeof(T); + result.data.data = const_cast(data); + + if (type == kTfLiteInt4) { + result.type = kTfLiteInt4; + PackInt4ValuesDenselyInPlace(tflite::GetTensorData(&result), + ElementCount(*dims)); + result.bytes = ((ElementCount(*dims) + 1) / 2); + } else { + // Const cast is used to allow passing in const and non-const arrays within + // a single CreateTensor method. A Const array should be used for immutable + // input tensors and non-const array should be used for mutable and output + // tensors. + result.type = typeToTfLiteType(); + } + return result; +} + +template +TfLiteTensor CreateQuantizedTensor(const T* data, TfLiteIntArray* dims, + const float scale, const int zero_point = 0, + const bool is_variable = false, + TfLiteType type = kTfLiteNoType) { + TfLiteTensor result = CreateTensor(data, dims, is_variable, type); + result.params = {scale, zero_point}; + result.quantization = {kTfLiteAffineQuantization, nullptr}; + return result; +} + +template +TfLiteTensor CreateQuantizedTensor(const float* input, T* quantized, + TfLiteIntArray* dims, float scale, + int zero_point, bool is_variable = false, + TfLiteType type = kTfLiteNoType) { + int input_size = ElementCount(*dims); + tflite::Quantize(input, quantized, input_size, scale, zero_point); + return CreateQuantizedTensor(quantized, dims, scale, zero_point, is_variable, + type); +} + +TfLiteTensor CreateQuantizedBiasTensor(const float* data, int16_t* quantized, + TfLiteIntArray* dims, float input_scale, + float weights_scale, + bool is_variable = false); + +TfLiteTensor CreateQuantizedBiasTensor(const float* data, int32_t* quantized, + TfLiteIntArray* dims, float input_scale, + float weights_scale, + bool is_variable = false); + +TfLiteTensor CreateQuantizedBiasTensor(const float* data, + std::int64_t* quantized, + TfLiteIntArray* dims, float input_scale, + float weights_scale, + bool is_variable = false); + +// Quantizes int32_t bias tensor with per-channel weights determined by input +// scale multiplied by weight scale for each channel. +TfLiteTensor CreatePerChannelQuantizedBiasTensor( + const float* input, int32_t* quantized, TfLiteIntArray* dims, + float input_scale, float* weight_scales, float* scales, int* zero_points, + TfLiteAffineQuantization* affine_quant, int quantized_dimension, + bool is_variable = false); + +// Quantizes int64_t bias tensor with per-channel weights determined by input +// scale multiplied by weight scale for each channel. +TfLiteTensor CreatePerChannelQuantizedBiasTensor( + const float* input, std::int64_t* quantized, TfLiteIntArray* dims, + float input_scale, float* weight_scales, float* scales, int* zero_points, + TfLiteAffineQuantization* affine_quant, int quantized_dimension, + bool is_variable = false); + +TfLiteTensor CreateSymmetricPerChannelQuantizedTensor( + const float* input, int8_t* quantized, TfLiteIntArray* dims, float* scales, + int* zero_points, TfLiteAffineQuantization* affine_quant, + int quantized_dimension, bool is_variable = false, + TfLiteType tensor_weight_type = kTfLiteNoType); + +// Returns the number of tensors in the default subgraph for a tflite::Model. +size_t GetModelTensorCount(const Model* model); + +// Derives the asymmetric quantization scaling factor from a min and max range. +template +inline float ScaleFromMinMax(const float min, const float max) { + return (max - min) / + static_cast((std::numeric_limits::max() * 1.0) - + std::numeric_limits::min()); +} + +// Derives the symmetric quantization scaling factor from a min and max range. +template +inline float SymmetricScaleFromMinMax(const float min, const float max) { + const int32_t kScale = + std::numeric_limits::type>::max(); + const float range = std::max(std::abs(min), std::abs(max)); + if (range == 0) { + return 1.0f; + } else { + return range / kScale; + } +} + +// Derives the quantization zero point from a min and max range. +template +inline int ZeroPointFromMinMax(const float min, const float max) { + return static_cast(std::numeric_limits::min()) + + static_cast(-min / ScaleFromMinMax(min, max) + 0.5f); +} + +} // namespace testing +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_TEST_HELPERS_H_ diff --git a/tensorflow/lite/micro/testing/BUILD b/tensorflow/lite/micro/testing/BUILD new file mode 100644 index 0000000..58914bc --- /dev/null +++ b/tensorflow/lite/micro/testing/BUILD @@ -0,0 +1,98 @@ +load("@tflm_pip_deps//:requirements.bzl", "requirement") +load( + "//tensorflow:extra_rules.bzl", + "tflm_kernel_friends", +) + +package( + # Disabling layering_check because of http://b/177257332 + features = ["-layering_check"], + licenses = ["notice"], +) + +package_group( + name = "tflite_micro", + packages = ["//..."], +) + +package_group( + name = "microfrontend", + packages = ["//tensorflow/lite/experimental/microfrontend/..."], +) + +package_group( + name = "kernel_test_friends", + packages = tflm_kernel_friends(), +) + +cc_library( + name = "micro_test", + hdrs = [ + "micro_test.h", + ], + visibility = [ + ":kernel_test_friends", + ":microfrontend", + ":tflite_micro", + ], + deps = [ + "//tensorflow/lite/c:common", + "//tensorflow/lite/core/api", + "//tensorflow/lite/micro:micro_framework", + "//tensorflow/lite/micro:micro_log", + "//tensorflow/lite/micro:micro_utils", + "//tensorflow/lite/micro:system_setup", + "//tensorflow/lite/micro:test_helpers", + ], +) + +cc_test( + name = "util_test", + srcs = [ + "util_test.cc", + ], + deps = [ + ":micro_test", + ], +) + +cc_library( + name = "test_conv_model", + srcs = [ + "test_conv_model.cc", + ], + hdrs = [ + "test_conv_model.h", + ], + visibility = [ + ":tflite_micro", + ], +) + +py_library( + name = "generate_test_models_lib", + srcs = ["generate_test_models.py"], + visibility = [ + ":tflite_micro", + ], + deps = [ + requirement("numpy"), + requirement("tensorflow-cpu"), + ], +) + +py_binary( + name = "generate_test_models", + srcs = ["generate_test_models.py"], + python_version = "PY3", + srcs_version = "PY3ONLY", + tags = [ + "nomicro_static", # TF dep incompatible w/ TF_LITE_STATIC_MEMORY. + "noubsan", # TODO(b/144512025): Fix raw_to_bitmap_test to fix ubsan failure. + ], + deps = [ + "@absl_py//absl:app", + requirement("numpy"), + requirement("tensorflow-cpu"), + ], +) diff --git a/tensorflow/lite/micro/testing/Dockerfile.riscv b/tensorflow/lite/micro/testing/Dockerfile.riscv new file mode 100644 index 0000000..4f7ac55 --- /dev/null +++ b/tensorflow/lite/micro/testing/Dockerfile.riscv @@ -0,0 +1,24 @@ +# Copyright 2018 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +# This docker configuration file lets you emulate a Hifive1 board +# on an x86 desktop or laptop, which can be useful for debugging and +# automated testing. +FROM antmicro/renode:latest + +LABEL maintainer="Pete Warden " + +RUN apt-get update +RUN apt-get install -y curl git unzip make g++ \ No newline at end of file diff --git a/tensorflow/lite/micro/testing/bluepill.resc b/tensorflow/lite/micro/testing/bluepill.resc new file mode 100644 index 0000000..78af665 --- /dev/null +++ b/tensorflow/lite/micro/testing/bluepill.resc @@ -0,0 +1,25 @@ +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +using sysbus + +mach create +machine LoadPlatformDescription @platforms/cpus/stm32f103.repl + +# These lines are needed to show the results of DebugLog calls in the output. +machine LoadPlatformDescriptionFromString "uartSemihosting: UART.SemihostingUart @ cpu" +showAnalyzer cpu.uartSemihosting Antmicro.Renode.Analyzers.LoggingUartAnalyzer +cpu.uartSemihosting CreateFileBackend $logfile true + diff --git a/tensorflow/lite/micro/testing/bluepill.resource.txt b/tensorflow/lite/micro/testing/bluepill.resource.txt new file mode 100644 index 0000000..761a47b --- /dev/null +++ b/tensorflow/lite/micro/testing/bluepill.resource.txt @@ -0,0 +1,2 @@ +*** Variables *** +${UART} sysbus.cpu.uartSemihosting diff --git a/tensorflow/lite/micro/testing/bluepill_nontest.resc b/tensorflow/lite/micro/testing/bluepill_nontest.resc new file mode 100644 index 0000000..c345014 --- /dev/null +++ b/tensorflow/lite/micro/testing/bluepill_nontest.resc @@ -0,0 +1,22 @@ +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +mach create +# Load platform specification +machine LoadPlatformDescription @platforms/cpus/stm32f103.repl +# Create additional semihosting interface peripheral +machine LoadPlatformDescriptionFromString "uartSemihosting: UART.SemihostingUart @ cpu" +showAnalyzer sysbus.cpu.uartSemihosting + diff --git a/tensorflow/lite/micro/testing/generate_test_models.py b/tensorflow/lite/micro/testing/generate_test_models.py new file mode 100644 index 0000000..25902d0 --- /dev/null +++ b/tensorflow/lite/micro/testing/generate_test_models.py @@ -0,0 +1,88 @@ +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Python utility script to generate unit test model data.""" + +# Steps to regenerate model test data: +# TODO(b/158011574): Do these steps in the script here instead of manually. +# 1.) Run this script +# 2.) Hexdump the model into a .h/.cc file: +# xxd -i /tmp/tf_micro_conv_test_model.tflite > /tmp/temp.cc +# 3.) Copy/replace contents of temp.cc into desired header/source files (e.g. +# test_conv_model.h/.cc + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +from absl import app +import numpy as np +import tensorflow as tf + + +def generate_conv_model(write_to_file=True, + filename="/tmp/tf_micro_conv_test_model.int8.tflite"): + """Creates a basic Keras model and converts to tflite. + + This model does not make any relevant classifications. It only exists to + generate a model that is designed to run on embedded devices. + """ + np.random.seed(0) + input_shape = (16, 16, 1) + + model = tf.keras.models.Sequential() + model.add( + tf.keras.layers.Conv2D(16, 3, activation="relu", + input_shape=input_shape)) + model.add(tf.keras.layers.Conv2D(32, 3, activation="relu")) + model.add(tf.keras.layers.MaxPooling2D(2)) + model.add(tf.keras.layers.Flatten()) + model.add(tf.keras.layers.Dense(10)) + model.compile(optimizer="adam", + loss="categorical_crossentropy", + metrics=["accuracy"]) + model.summary() + + # Test with random data + data_x = np.random.rand(12, 16, 16, 1) + data_y = np.random.randint(2, size=(12, 10)) + model.fit(data_x, data_y, epochs=5) + + def representative_dataset_gen(): + np.random.seed(0) + for _ in range(12): + yield [np.random.rand(16, 16).reshape(1, 16, 16, 1).astype(np.float32)] + + # Now convert to a TFLite model with full int8 quantization: + converter = tf.lite.TFLiteConverter.from_keras_model(model) + converter.optimizations = [tf.lite.Optimize.DEFAULT] + converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8] + converter.inference_input_type = tf.int8 + converter.inference_output_type = tf.int8 + converter.representative_dataset = representative_dataset_gen + + tflite_model = converter.convert() + if write_to_file: + open(filename, "wb").write(tflite_model) + + return tflite_model + + +def main(argv): + del argv # Unused for now + generate_conv_model() + + +if __name__ == "__main__": + app.run(main) diff --git a/tensorflow/lite/micro/testing/micro_test.h b/tensorflow/lite/micro/testing/micro_test.h new file mode 100644 index 0000000..2e119e1 --- /dev/null +++ b/tensorflow/lite/micro/testing/micro_test.h @@ -0,0 +1,266 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +// An ultra-lightweight testing framework designed for use with microcontroller +// applications. This is designed to be usable even +// when no standard C or C++ libraries are available, and without any dynamic +// memory allocation or reliance on global constructors. +// +// To build a test, you use syntax similar to gunit, but with some extra +// decoration to create a hidden 'main' function containing each of the tests to +// be run. Your code should look something like: +// ---------------------------------------------------------------------------- +// #include "path/to/this/header" +// +// TF_LITE_MICRO_TESTS_BEGIN +// +// TF_LITE_MICRO_TEST(SomeTest) { +// TF_LITE_LOG_EXPECT_EQ(true, true); +// } +// +// TF_LITE_MICRO_TESTS_END +// ---------------------------------------------------------------------------- +// If you compile this for your platform, you'll get a normal binary that you +// should be able to run. Executing it will output logging information like this +// to stderr: +// ---------------------------------------------------------------------------- +// Testing SomeTest +// 1/1 tests passed +// ~~~ALL TESTS PASSED~~~ +// ---------------------------------------------------------------------------- +// This is designed to be human-readable, so you can just run tests manually, +// but the string "~~~ALL TESTS PASSED~~~" should only appear if all of the +// tests do pass. This makes it possible to integrate with automated test +// systems by scanning the output logs and looking for that magic value. +// +// This framework is intended to be a rudimentary alternative to no testing at +// all on systems that struggle to run more conventional approaches, so use with +// caution! + +#ifndef TENSORFLOW_LITE_MICRO_TESTING_MICRO_TEST_H_ +#define TENSORFLOW_LITE_MICRO_TESTING_MICRO_TEST_H_ +#include +#include + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/system_setup.h" + +namespace micro_test { +extern int tests_passed; +extern int tests_failed; +extern bool is_test_complete; +extern bool did_test_fail; +} // namespace micro_test + +namespace tflite { + +// This additional helper function is used (instead of directly calling +// tflite::InitializeTarget from the TF_LITE_MICRO_TESTS_BEGIN macro) to avoid +// adding a dependency from every bazel test target to micro:system_setp (which +// is the target that implements InitializeTarget(). +// +// The underlying issue here is that the use of the macros results in +// dependencies that can be containted within the micro/testing:micro_test +// target bleeding on to all the tests. +inline void InitializeTest() { InitializeTarget(); } +} // namespace tflite + +#define TF_LITE_MICRO_TESTS_BEGIN \ + namespace micro_test { \ + int tests_passed; \ + int tests_failed; \ + bool is_test_complete; \ + bool did_test_fail; \ + } \ + \ + int main(int argc, char** argv) { \ + micro_test::tests_passed = 0; \ + micro_test::tests_failed = 0; \ + tflite::InitializeTest(); + +#define TF_LITE_MICRO_TESTS_END \ + MicroPrintf("%d/%d tests passed", micro_test::tests_passed, \ + (micro_test::tests_failed + micro_test::tests_passed)); \ + if (micro_test::tests_failed == 0) { \ + MicroPrintf("~~~ALL TESTS PASSED~~~\n"); \ + return kTfLiteOk; \ + } else { \ + MicroPrintf("~~~SOME TESTS FAILED~~~\n"); \ + return kTfLiteError; \ + } \ + } + +// TODO(petewarden): I'm going to hell for what I'm doing to this poor for loop. +#define TF_LITE_MICRO_TEST(name) \ + MicroPrintf("Testing " #name); \ + for (micro_test::is_test_complete = false, \ + micro_test::did_test_fail = false; \ + !micro_test::is_test_complete; micro_test::is_test_complete = true, \ + micro_test::tests_passed += (micro_test::did_test_fail) ? 0 : 1, \ + micro_test::tests_failed += (micro_test::did_test_fail) ? 1 : 0) + +#define TF_LITE_MICRO_EXPECT(x) \ + do { \ + if (!(x)) { \ + MicroPrintf(#x " failed at %s:%d", __FILE__, __LINE__); \ + micro_test::did_test_fail = true; \ + } \ + } while (false) + +#define TF_LITE_MICRO_EXPECT_EQ(x, y) \ + do { \ + auto vx = x; \ + auto vy = y; \ + bool isFloatingX = (std::is_floating_point::value); \ + bool isFloatingY = (std::is_floating_point::value); \ + if (isFloatingX && isFloatingY) { \ + auto delta = ((vx) > (vy)) ? ((vx) - (vy)) : ((vy) - (vx)); \ + if (delta > std::numeric_limits::epsilon()) { \ + MicroPrintf(#x " == " #y " failed at %s:%d (%f vs %f)", __FILE__, \ + __LINE__, static_cast(vx), \ + static_cast(vy)); \ + micro_test::did_test_fail = true; \ + } \ + } else if ((vx) != (vy)) { \ + MicroPrintf(#x " == " #y " failed at %s:%d (%d vs %d)", __FILE__, \ + __LINE__, static_cast(vx), static_cast(vy)); \ + if (isFloatingX || isFloatingY) { \ + MicroPrintf("-----------WARNING-----------"); \ + MicroPrintf("Only one of the values is floating point value."); \ + } \ + micro_test::did_test_fail = true; \ + } \ + } while (false) + +#define TF_LITE_MICRO_EXPECT_NE(x, y) \ + do { \ + auto vx = x; \ + auto vy = y; \ + bool isFloatingX = (std::is_floating_point::value); \ + bool isFloatingY = (std::is_floating_point::value); \ + if (isFloatingX && isFloatingY) { \ + auto delta = ((vx) > (vy)) ? ((vx) - (vy)) : ((vy) - (vx)); \ + if (delta <= std::numeric_limits::epsilon()) { \ + MicroPrintf(#x " != " #y " failed at %s:%d", __FILE__, __LINE__); \ + micro_test::did_test_fail = true; \ + } \ + } else if ((vx) == (vy)) { \ + MicroPrintf(#x " != " #y " failed at %s:%d", __FILE__, __LINE__); \ + if (isFloatingX || isFloatingY) { \ + MicroPrintf("-----------WARNING-----------"); \ + MicroPrintf("Only one of the values is floating point value."); \ + } \ + micro_test::did_test_fail = true; \ + } \ + } while (false) + +// TODO(wangtz): Making it more generic once needed. +#define TF_LITE_MICRO_ARRAY_ELEMENT_EXPECT_NEAR(arr1, idx1, arr2, idx2, \ + epsilon) \ + do { \ + auto delta = ((arr1)[(idx1)] > (arr2)[(idx2)]) \ + ? ((arr1)[(idx1)] - (arr2)[(idx2)]) \ + : ((arr2)[(idx2)] - (arr1)[(idx1)]); \ + if (delta > epsilon) { \ + MicroPrintf(#arr1 "[%d] (%f) near " #arr2 "[%d] (%f) failed at %s:%d", \ + static_cast(idx1), static_cast((arr1)[(idx1)]), \ + static_cast(idx2), static_cast((arr2)[(idx2)]), \ + __FILE__, __LINE__); \ + micro_test::did_test_fail = true; \ + } \ + } while (false) + +// The check vx != vy is needed to properly handle the case where both +// x and y evaluate to infinity. See #46960 for more details. +#define TF_LITE_MICRO_EXPECT_NEAR(x, y, epsilon) \ + do { \ + auto vx = (x); \ + auto vy = (y); \ + auto delta = ((vx) > (vy)) ? ((vx) - (vy)) : ((vy) - (vx)); \ + if (vx != vy && delta > epsilon) { \ + MicroPrintf(#x " (%f) near " #y " (%f) failed at %s:%d", \ + static_cast(vx), static_cast(vy), __FILE__, \ + __LINE__); \ + micro_test::did_test_fail = true; \ + } \ + } while (false) + +#define TF_LITE_MICRO_EXPECT_GT(x, y) \ + do { \ + if ((x) <= (y)) { \ + MicroPrintf(#x " > " #y " failed at %s:%d", __FILE__, __LINE__); \ + micro_test::did_test_fail = true; \ + } \ + } while (false) + +#define TF_LITE_MICRO_EXPECT_LT(x, y) \ + do { \ + if ((x) >= (y)) { \ + MicroPrintf(#x " < " #y " failed at %s:%d", __FILE__, __LINE__); \ + micro_test::did_test_fail = true; \ + } \ + } while (false) + +#define TF_LITE_MICRO_EXPECT_GE(x, y) \ + do { \ + if ((x) < (y)) { \ + MicroPrintf(#x " >= " #y " failed at %s:%d", __FILE__, __LINE__); \ + micro_test::did_test_fail = true; \ + } \ + } while (false) + +#define TF_LITE_MICRO_EXPECT_LE(x, y) \ + do { \ + if ((x) > (y)) { \ + MicroPrintf(#x " <= " #y " failed at %s:%d", __FILE__, __LINE__); \ + micro_test::did_test_fail = true; \ + } \ + } while (false) + +#define TF_LITE_MICRO_EXPECT_TRUE(x) \ + do { \ + if (!(x)) { \ + MicroPrintf(#x " was not true failed at %s:%d", __FILE__, __LINE__); \ + micro_test::did_test_fail = true; \ + } \ + } while (false) + +#define TF_LITE_MICRO_EXPECT_FALSE(x) \ + do { \ + if (x) { \ + MicroPrintf(#x " was not false failed at %s:%d", __FILE__, __LINE__); \ + micro_test::did_test_fail = true; \ + } \ + } while (false) + +#define TF_LITE_MICRO_FAIL(msg) \ + do { \ + MicroPrintf("FAIL: %s", msg, __FILE__, __LINE__); \ + micro_test::did_test_fail = true; \ + } while (false) + +#define TF_LITE_MICRO_EXPECT_STRING_EQ(string1, string2) \ + do { \ + for (int i = 0; string1[i] != '\0' && string2[i] != '\0'; i++) { \ + if (string1[i] != string2[i]) { \ + MicroPrintf("FAIL: %s did not match %s", string1, string2, __FILE__, \ + __LINE__); \ + micro_test::did_test_fail = true; \ + } \ + } \ + } while (false) + +#endif // TENSORFLOW_LITE_MICRO_TESTING_MICRO_TEST_H_ diff --git a/tensorflow/lite/micro/testing/riscv32_mcu.resc b/tensorflow/lite/micro/testing/riscv32_mcu.resc new file mode 100644 index 0000000..cc3baeb --- /dev/null +++ b/tensorflow/lite/micro/testing/riscv32_mcu.resc @@ -0,0 +1,39 @@ +# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +using sysbus + +mach create + +# This is a little hack to increase our ram size from 16k to 256k, since some +# tests require a larger ram size. The platform's linker script is also modified +# to account for the larger ram size, see patch_sifive_sdk() in download_and_extract.sh +set platform +""" +using "platforms/cpus/sifive-fe310.repl" + +dtim: + size: 0x40000 +""" + +machine LoadPlatformDescriptionFromString $platform + +sysbus Tag <0x10008000 4> "PRCI_HFROSCCFG" 0xFFFFFFFF +sysbus Tag <0x10008008 4> "PRCI_PLLCFG" 0xFFFFFFFF + +showAnalyzer uart0 Antmicro.Renode.Analyzers.LoggingUartAnalyzer +uart0 CreateFileBackend $logfile true + +cpu PerformanceInMips 320 diff --git a/tensorflow/lite/micro/testing/riscv32_mcu.resource.txt b/tensorflow/lite/micro/testing/riscv32_mcu.resource.txt new file mode 100644 index 0000000..c271384 --- /dev/null +++ b/tensorflow/lite/micro/testing/riscv32_mcu.resource.txt @@ -0,0 +1,2 @@ +*** Variables *** +${UART} sysbus.uart0 diff --git a/tensorflow/lite/micro/testing/robot.resource.txt b/tensorflow/lite/micro/testing/robot.resource.txt new file mode 100644 index 0000000..caa0b88 --- /dev/null +++ b/tensorflow/lite/micro/testing/robot.resource.txt @@ -0,0 +1,23 @@ +*** Keywords *** +Teardown With Custom Message + Test Teardown + [Documentation] Replace robot fail message with whole UART output + ${UART_LOGS} Get File ${UART_LOG} + Set Test Message UART OUTPUT:\n\n${UART_LOGS} + Remove File ${UART_LOG} + +Create Platform + Execute Command $logfile=@${UART_LOG} + Execute Script ${RESC} + Provides ready-platform Reexecution + +Test Binary + [Arguments] ${BIN} + Requires ready-platform + Execute Command sysbus LoadELF ${BIN} + + Create Terminal Tester ${UART} timeout=30 + Start Emulation + + Wait For Line On Uart ${UART_LINE_ON_SUCCESS} + diff --git a/tensorflow/lite/micro/testing/size_hexagon_binary.sh b/tensorflow/lite/micro/testing/size_hexagon_binary.sh new file mode 100755 index 0000000..677ab35 --- /dev/null +++ b/tensorflow/lite/micro/testing/size_hexagon_binary.sh @@ -0,0 +1,66 @@ +#!/bin/bash -e +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# +# Neasures the size of a Qualcomm Hexagon binary by parsing the output of +# hexgaon-size. If an optional list of symbols is provided, the symbols' sizes +# are excluded from the total. This is useful when the binary contains symbols +# that are only used during testing. +# +# First argument is the binary location. +# Second argument is a regular expression for symbols that need to be excluded +# from the measurement + +declare -r TEST_TMPDIR=/tmp/test_hexagon_binary/ +declare -r MICRO_LOG_PATH=${TEST_TMPDIR}/$1 +declare -r MICRO_LOG_FILENAME=${MICRO_LOG_PATH}/logs.txt +mkdir -p ${MICRO_LOG_PATH} + +hexagon-elfcopy $1 $1.elf + +raw_size=$(hexagon-size $1.elf) +# Skip the title row +sizes=$(echo "${raw_size}" | sed -n '2 p') +text_size=$(echo "$sizes" | awk '{print $1}') +data_size=$(echo "$sizes" | awk '{print $2}') +bss_size=$(echo "$sizes" | awk '{print $3}') +total_size=$(echo "$sizes" | awk '{print $4}') + +symbols=$(hexagon-nm -S $1.elf | grep -w $2) + +while IFS= read -r line; do + symbol_size=$((16#$(echo $line | awk '{print $2}'))) + symbol_type=$(echo $line | awk '{print $3}') + symbol_name=$(echo $line | awk '{print $4}') + + total_size=$(("$total_size"-"$symbol_size")) + if [[ "$symbol_type" =~ [DdRr] ]]; then + # Data and readonly symbols are counted as data + data_size=$(("$data_size"-"$symbol_size")) + elif [[ "$symbol_type" =~ [Tt] ]]; then + # Text symbols + text_size=$(("$text_size"-"$symbol_size")) + elif [[ "$symbol_type" =~ [Bb] ]]; then + # BSS symbols + bss_size=$(("$bss_size"-"$symbol_size")) + else + echo "The symbol $(symbol_name)'s type isn't recognized" + exit 1 + fi +done <<< "$symbols" +str="text data bss total +$text_size $data_size $bss_size $total_size" +echo "$str" +exit 0 diff --git a/tensorflow/lite/micro/testing/size_riscv32_binary.sh b/tensorflow/lite/micro/testing/size_riscv32_binary.sh new file mode 100755 index 0000000..cc0cb88 --- /dev/null +++ b/tensorflow/lite/micro/testing/size_riscv32_binary.sh @@ -0,0 +1,63 @@ +#!/bin/bash -e +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# +# Neasures the size of a riscv binary by parsing the output of +# the 'size' program. If an optional list of symbols is provided, the symbols' sizes +# are excluded from the total. This is useful when the binary contains symbols +# that are only used during testing. +# +# First argument is the binary location. +# Second argument is a regular expression for symbols that need to be excluded +# from the measurement +declare -r TEST_TMPDIR=/tmp/test_${1}_binary/ +declare -r MICRO_LOG_PATH=${TEST_TMPDIR}/$1 +declare -r MICRO_LOG_FILENAME=${MICRO_LOG_PATH}/logs.txt +mkdir -p ${MICRO_LOG_PATH} +raw_size=$(riscv64-unknown-elf-size $1) +# Skip the title row +sizes=$(echo "${raw_size}" | sed -n '2 p') +text_size=$(echo "$sizes" | awk '{print $1}') +data_size=$(echo "$sizes" | awk '{print $2}') +bss_size=$(echo "$sizes" | awk '{print $3}') +total_size=$(echo "$sizes" | awk '{print $4}') +symbols=$(riscv64-unknown-elf-nm -S $1 | grep -w $2) +while IFS= read -r line; do + symbol_size=$((16#$(echo $line | awk '{print $2}'))) + symbol_type=$(echo $line | awk '{print $3}') + symbol_name=$(echo $line | awk '{print $4}') + total_size=$(("$total_size"-"$symbol_size")) + if [[ "$symbol_type" =~ [Dd] ]]; then + data_size=$(("$data_size"-"$symbol_size")) + elif [[ "$symbol_type" =~ [TtRr] ]]; then + # Text symbols + # Readonly symbols are usually counted as text + text_size=$(("$text_size"-"$symbol_size")) + elif [[ "$symbol_type" =~ [Bb] ]]; then + # BSS symbols + bss_size=$(("$bss_size"-"$symbol_size")) + elif [[ "$symbol_type" =~ [Gg] ]]; then + # Data symbols + data_size=$(("$data_size"-"$symbol_size")) + else + echo "The symbol ${symbol_name}'s type isn't recognized" + exit 1 + fi +done <<< "$symbols" +str="text data bss total +$text_size $data_size $bss_size $total_size" +echo "$str" +exit 0 + diff --git a/tensorflow/lite/micro/testing/size_xtensa_binary.sh b/tensorflow/lite/micro/testing/size_xtensa_binary.sh new file mode 100755 index 0000000..24dd936 --- /dev/null +++ b/tensorflow/lite/micro/testing/size_xtensa_binary.sh @@ -0,0 +1,66 @@ +#!/bin/bash -e +# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# +# Measures the size of an xtensa binary by parsing the output of +# xt-size. If an optional list of symbols is provided, the symbols' sizes +# are excluded from the total. This is useful when the binary contains symbols +# that are only used during testing. +# +# First argument is the binary location. +# Second argument is a regular expression for symbols that need to be excluded +# from the measurement + +declare -r TEST_TMPDIR=/tmp/test_xtensa_binary/ +declare -r MICRO_LOG_PATH=${TEST_TMPDIR}/$1 +declare -r MICRO_LOG_FILENAME=${MICRO_LOG_PATH}/logs.txt +mkdir -p ${MICRO_LOG_PATH} + +cp $1 $1.elf + +raw_size=$(xt-size $1.elf) +# Skip the title row +sizes=$(echo "${raw_size}" | sed -n '2 p') +text_size=$(echo "$sizes" | awk '{print $1}') +data_size=$(echo "$sizes" | awk '{print $2}') +bss_size=$(echo "$sizes" | awk '{print $3}') +total_size=$(echo "$sizes" | awk '{print $4}') + +symbols=$(xt-nm -S $1.elf | grep -w $2) + +while IFS= read -r line; do + symbol_size=$((16#$(echo $line | awk '{print $2}'))) + symbol_type=$(echo $line | awk '{print $3}') + symbol_name=$(echo $line | awk '{print $4}') + + total_size=$(("$total_size"-"$symbol_size")) + if [[ "$symbol_type" =~ [DdRr] ]]; then + # Data and readonly symbols are counted as data + data_size=$(("$data_size"-"$symbol_size")) + elif [[ "$symbol_type" =~ [Tt] ]]; then + # Text symbols + text_size=$(("$text_size"-"$symbol_size")) + elif [[ "$symbol_type" =~ [Bb] ]]; then + # BSS symbols + bss_size=$(("$bss_size"-"$symbol_size")) + else + echo "The symbol $(symbol_name)'s type isn't recognized" + exit 1 + fi +done <<< "$symbols" +str="text data bss total +$text_size $data_size $bss_size $total_size" +echo "$str" +exit 0 diff --git a/tensorflow/lite/micro/testing/test_conv_model.cc b/tensorflow/lite/micro/testing/test_conv_model.cc new file mode 100644 index 0000000..358479c --- /dev/null +++ b/tensorflow/lite/micro/testing/test_conv_model.cc @@ -0,0 +1,1799 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/testing/test_conv_model.h" + +extern const unsigned char kTestConvModelData[] = { + 0x24, 0x00, 0x00, 0x00, 0x54, 0x46, 0x4c, 0x33, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x12, 0x00, 0x1c, 0x00, 0x04, 0x00, + 0x08, 0x00, 0x0c, 0x00, 0x10, 0x00, 0x14, 0x00, 0x00, 0x00, 0x18, 0x00, + 0x12, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0xb4, 0x52, 0x00, 0x00, + 0x3c, 0x42, 0x00, 0x00, 0x24, 0x42, 0x00, 0x00, 0x3c, 0x00, 0x00, 0x00, + 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x00, 0x00, + 0x08, 0x00, 0x0c, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, + 0x08, 0x00, 0x00, 0x00, 0x0e, 0x00, 0x00, 0x00, 0x13, 0x00, 0x00, 0x00, + 0x6d, 0x69, 0x6e, 0x5f, 0x72, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x5f, + 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x00, 0x0f, 0x00, 0x00, 0x00, + 0xd4, 0x41, 0x00, 0x00, 0xc0, 0x41, 0x00, 0x00, 0x64, 0x41, 0x00, 0x00, + 0xc0, 0x40, 0x00, 0x00, 0x7c, 0x40, 0x00, 0x00, 0x58, 0x40, 0x00, 0x00, + 0x44, 0x13, 0x00, 0x00, 0xa0, 0x12, 0x00, 0x00, 0x8c, 0x00, 0x00, 0x00, + 0x80, 0x00, 0x00, 0x00, 0x6c, 0x00, 0x00, 0x00, 0x58, 0x00, 0x00, 0x00, + 0x44, 0x00, 0x00, 0x00, 0x30, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, + 0xd6, 0xbe, 0xff, 0xff, 0x04, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, + 0x31, 0x2e, 0x35, 0x2e, 0x30, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x94, 0xb2, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xa4, 0xb2, 0xff, 0xff, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0xb4, 0xb2, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0xc4, 0xb2, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xd4, 0xb2, 0xff, 0xff, + 0x00, 0x00, 0x00, 0x00, 0x46, 0xbf, 0xff, 0xff, 0x04, 0x00, 0x00, 0x00, + 0x00, 0x12, 0x00, 0x00, 0x7d, 0x6a, 0x24, 0xa1, 0xf6, 0xca, 0x70, 0x2f, + 0x8e, 0xb1, 0xe8, 0x15, 0x42, 0x08, 0x32, 0xf6, 0xe9, 0xfb, 0xa0, 0xda, + 0xe4, 0xf1, 0x0a, 0x9d, 0x72, 0x66, 0x88, 0x37, 0xe9, 0x9e, 0x08, 0x54, + 0x61, 0x51, 0x40, 0x93, 0x4d, 0xcf, 0xe2, 0x08, 0x36, 0xad, 0xb1, 0x8e, + 0xfc, 0xe4, 0x02, 0xd1, 0x9a, 0x1e, 0x05, 0x67, 0xa3, 0x3b, 0xa6, 0xde, + 0x5d, 0x2a, 0xcc, 0x8c, 0x3c, 0x2e, 0xd2, 0x15, 0xc2, 0x60, 0xab, 0xea, + 0x73, 0xe4, 0x88, 0xc1, 0x66, 0x21, 0xb0, 0xe5, 0x5b, 0x55, 0xda, 0x69, + 0x2d, 0x0c, 0x66, 0x07, 0x74, 0x36, 0xcd, 0x79, 0x81, 0xf9, 0x5c, 0x2c, + 0xb5, 0x93, 0xab, 0x76, 0xa1, 0x1f, 0x20, 0x90, 0x89, 0xe1, 0x41, 0xc7, + 0x32, 0xc2, 0xa3, 0x03, 0x77, 0x86, 0x79, 0xf7, 0x89, 0xc1, 0xb1, 0x42, + 0x2a, 0x75, 0xc7, 0xc1, 0x2f, 0xbb, 0xf6, 0xe8, 0x23, 0x99, 0x9b, 0x74, + 0x9c, 0xe5, 0x91, 0x15, 0xc6, 0x08, 0x0e, 0xae, 0x7c, 0xd3, 0x27, 0x54, + 0xfb, 0xa7, 0x49, 0x65, 0x52, 0x2f, 0x63, 0x33, 0x8b, 0x5f, 0x67, 0x21, + 0x25, 0xe0, 0xcf, 0x95, 0x03, 0x05, 0x19, 0x0c, 0x3d, 0xfc, 0x95, 0x42, + 0xa9, 0x26, 0x27, 0x54, 0xa3, 0x71, 0xb4, 0x70, 0x7a, 0x40, 0x0d, 0xc1, + 0x72, 0x04, 0x81, 0x3b, 0xb9, 0xb7, 0xd2, 0xc1, 0x4e, 0xf8, 0xff, 0xca, + 0x66, 0xc1, 0xbe, 0xb9, 0x09, 0xbd, 0xb9, 0x2c, 0x5b, 0x97, 0xc3, 0xa8, + 0xf6, 0xc4, 0x23, 0x93, 0x2e, 0xf6, 0xce, 0x2e, 0xdb, 0xfb, 0x8f, 0xb0, + 0xc8, 0xba, 0xfa, 0x97, 0xfd, 0xc0, 0x0a, 0xc8, 0x2c, 0xf3, 0x4c, 0x4d, + 0x8b, 0x3b, 0x47, 0x11, 0xfb, 0xe8, 0x96, 0xe3, 0xcc, 0xef, 0xe4, 0xb5, + 0x07, 0xa1, 0xb7, 0xa9, 0xf7, 0x98, 0x71, 0x59, 0x9b, 0x5a, 0x7b, 0x88, + 0xe4, 0xcf, 0x9b, 0x55, 0x26, 0xce, 0x59, 0x73, 0x66, 0x17, 0x9c, 0x74, + 0x02, 0xfc, 0x24, 0x01, 0xde, 0x44, 0x98, 0xe3, 0x8b, 0x18, 0x02, 0x42, + 0xf5, 0x0f, 0xbc, 0xcb, 0xf7, 0x37, 0xb1, 0xd5, 0xb4, 0x7c, 0x0a, 0x6a, + 0x59, 0x59, 0xc9, 0x11, 0xd8, 0x0f, 0xf9, 0xab, 0x40, 0xdd, 0x14, 0xf9, + 0x30, 0xaa, 0xf1, 0x8c, 0x6d, 0xbc, 0x4c, 0x5b, 0x71, 0x95, 0xfd, 0x41, + 0x4c, 0xf3, 0xb4, 0x7f, 0x1c, 0xb6, 0x4b, 0x12, 0x3b, 0x6e, 0xc1, 0xce, + 0x6f, 0xf8, 0x57, 0xb7, 0x5e, 0x2a, 0x36, 0x32, 0x3d, 0x85, 0xc6, 0xbf, + 0xd7, 0xab, 0x95, 0x45, 0x62, 0xae, 0xb8, 0xa6, 0x03, 0xcc, 0x21, 0x25, + 0x18, 0x5a, 0xa8, 0x03, 0x27, 0x33, 0x47, 0xb1, 0x7e, 0x0e, 0xbd, 0xc3, + 0x24, 0x25, 0x78, 0x28, 0xa4, 0xe3, 0x5b, 0x08, 0xbf, 0x04, 0xa2, 0xae, + 0x90, 0x4c, 0x96, 0x78, 0xa8, 0xb1, 0xb8, 0x54, 0x89, 0x25, 0x2d, 0x35, + 0x93, 0x95, 0xa5, 0xd3, 0x1a, 0xe6, 0x00, 0x8b, 0xfe, 0x36, 0x0f, 0xd2, + 0x6e, 0xff, 0x86, 0x93, 0x48, 0xb8, 0x08, 0x39, 0x1f, 0x3a, 0x2d, 0xe7, + 0x47, 0x5e, 0x05, 0x66, 0x7a, 0xb8, 0xe4, 0xda, 0xbc, 0x5b, 0x57, 0xdf, + 0xd9, 0x0a, 0xb9, 0x48, 0x5d, 0x0c, 0x57, 0xed, 0x8d, 0xbb, 0x8d, 0x4b, + 0x0e, 0xb8, 0xea, 0x02, 0x06, 0x2f, 0xfd, 0x28, 0x0d, 0x0b, 0xf4, 0xf4, + 0x52, 0x81, 0x77, 0x15, 0x87, 0x53, 0x28, 0xef, 0xbe, 0xc6, 0x4c, 0x45, + 0x3e, 0x1a, 0x6e, 0xbd, 0x10, 0xd8, 0x9a, 0x72, 0x1f, 0x14, 0xe2, 0x37, + 0x08, 0xaf, 0xfa, 0xce, 0xd3, 0x84, 0x23, 0x43, 0x8c, 0x5c, 0xce, 0x1b, + 0xf7, 0xf3, 0xb0, 0x3b, 0xfd, 0x33, 0xf8, 0x09, 0xf1, 0x41, 0xa5, 0xa8, + 0x86, 0x8d, 0x56, 0xde, 0xf6, 0x68, 0xe3, 0x4c, 0x97, 0xa6, 0xc3, 0x66, + 0x9b, 0xa9, 0x8a, 0xbd, 0x59, 0x45, 0xfb, 0xdf, 0xa1, 0x42, 0x10, 0x1c, + 0x55, 0x22, 0x53, 0xe1, 0x32, 0x33, 0xf9, 0xfa, 0xc2, 0x70, 0x0f, 0x49, + 0x15, 0xa7, 0x21, 0xbc, 0x56, 0x35, 0x09, 0x06, 0xe6, 0x5e, 0xc4, 0xc1, + 0x64, 0x93, 0x59, 0x3b, 0x8e, 0xb7, 0x52, 0x6c, 0x4d, 0xa1, 0xb7, 0xee, + 0x14, 0xc2, 0x01, 0x25, 0xbb, 0x5e, 0xe0, 0xc6, 0xa4, 0x4f, 0xb5, 0x20, + 0x88, 0xe0, 0xd7, 0x5e, 0x26, 0x5b, 0x9f, 0xf7, 0xb5, 0x26, 0x5b, 0xfc, + 0xf3, 0x3e, 0xf3, 0x57, 0x6f, 0x9e, 0x9e, 0x51, 0x07, 0x6e, 0xc0, 0x53, + 0x17, 0x89, 0x79, 0xf0, 0x91, 0xb2, 0x54, 0x30, 0x1f, 0x97, 0x95, 0xfc, + 0x02, 0x2d, 0x0c, 0x06, 0xb0, 0x82, 0xad, 0x20, 0xc2, 0xdc, 0x78, 0xbc, + 0xbe, 0x5b, 0x88, 0xa0, 0xdd, 0x45, 0x49, 0x26, 0xec, 0xb4, 0xa5, 0x8b, + 0x7f, 0xdd, 0x40, 0xcf, 0x9e, 0xbe, 0x46, 0x4d, 0x36, 0xab, 0x0a, 0x34, + 0x1a, 0x2a, 0xd0, 0xd3, 0x83, 0x96, 0xff, 0x88, 0xa4, 0xd8, 0x48, 0x75, + 0x2f, 0xcb, 0x3c, 0xc3, 0xbb, 0xc7, 0x2f, 0xe9, 0xf9, 0xa3, 0xde, 0x9d, + 0xbb, 0x5e, 0x37, 0x29, 0xf6, 0x75, 0xcc, 0x85, 0xeb, 0xf9, 0x73, 0xf7, + 0xdc, 0x31, 0x8c, 0x56, 0x52, 0x4a, 0x44, 0xa4, 0x2a, 0x2a, 0x51, 0x49, + 0x77, 0x6d, 0x35, 0x0a, 0xf9, 0x44, 0xaa, 0x36, 0x05, 0xef, 0x1e, 0x6b, + 0xe5, 0x65, 0x6b, 0xaa, 0xc1, 0x41, 0x9c, 0x62, 0xd0, 0x70, 0x78, 0xff, + 0x88, 0xe8, 0x5f, 0x3c, 0x2e, 0x00, 0x6c, 0xe3, 0xdb, 0xc3, 0x54, 0x66, + 0xa9, 0xf4, 0xe2, 0x4c, 0x91, 0x11, 0xc8, 0x3c, 0x39, 0x9b, 0x31, 0x81, + 0xc7, 0x11, 0x22, 0x62, 0xb7, 0x26, 0xa0, 0x0c, 0x2e, 0x6c, 0xe7, 0x34, + 0x3b, 0x1f, 0x27, 0xb3, 0xe5, 0x4f, 0xc9, 0x71, 0xb2, 0x18, 0x99, 0x59, + 0x95, 0xc6, 0x35, 0x4c, 0x5d, 0xa3, 0x59, 0xd1, 0x8b, 0x71, 0xea, 0xe7, + 0x30, 0x3f, 0xe7, 0x8c, 0x1a, 0x59, 0xeb, 0xc5, 0x5d, 0xbd, 0xe6, 0x00, + 0x67, 0x02, 0xfb, 0xca, 0x8d, 0xdf, 0x71, 0xb6, 0xed, 0xc7, 0xd2, 0xf2, + 0x72, 0x1b, 0xd3, 0x63, 0x51, 0x1f, 0x04, 0xe9, 0xf9, 0xe2, 0x38, 0x13, + 0x48, 0x63, 0x19, 0x66, 0x2b, 0x48, 0xc8, 0x1b, 0x9d, 0x19, 0x5a, 0x57, + 0x44, 0x2d, 0x30, 0xb5, 0xce, 0x3b, 0xcc, 0xae, 0xc4, 0x5e, 0x4e, 0x96, + 0x62, 0x5c, 0x53, 0x1f, 0xbf, 0xbd, 0xc8, 0x9d, 0xcf, 0x81, 0xb3, 0x1e, + 0xb0, 0x22, 0xd5, 0xbe, 0x60, 0x65, 0xd9, 0xeb, 0x11, 0x74, 0x8c, 0x24, + 0x18, 0x67, 0x45, 0xd3, 0xf8, 0x3f, 0xc5, 0xdf, 0xac, 0x65, 0xd4, 0x0c, + 0x82, 0x63, 0xd6, 0x43, 0x94, 0xa0, 0x3b, 0xff, 0x03, 0x0f, 0xbb, 0xe4, + 0x4d, 0x3b, 0x41, 0x9f, 0xf4, 0x1a, 0xa9, 0xdb, 0x15, 0x5b, 0x9a, 0x92, + 0xcb, 0xd5, 0xb8, 0x33, 0x5e, 0xea, 0x28, 0x3d, 0x2d, 0x30, 0x20, 0xcd, + 0xb6, 0x23, 0x18, 0x0e, 0x10, 0x2a, 0xa9, 0xe1, 0xad, 0xbc, 0x96, 0xd1, + 0xf9, 0xf3, 0x95, 0x4f, 0x2a, 0x0b, 0x91, 0xff, 0xf0, 0x96, 0x14, 0x00, + 0xaa, 0xfb, 0x1a, 0x44, 0x21, 0x9b, 0xe8, 0x71, 0x31, 0x9e, 0xd6, 0x58, + 0x7f, 0x02, 0x36, 0x5e, 0x92, 0x8d, 0x93, 0x99, 0xac, 0xb6, 0x87, 0x39, + 0xda, 0x47, 0xef, 0x70, 0xd4, 0xf7, 0x8d, 0x2a, 0xbd, 0x08, 0x40, 0x4d, + 0xec, 0xeb, 0x4e, 0x1b, 0x85, 0x5d, 0x55, 0x64, 0x4c, 0xf3, 0x5e, 0x8f, + 0x68, 0x1e, 0x5e, 0x64, 0xc3, 0xb8, 0x92, 0x24, 0x41, 0x98, 0x78, 0x09, + 0x85, 0x87, 0x17, 0x2c, 0x88, 0x9e, 0x62, 0x86, 0x4f, 0x44, 0x71, 0x9c, + 0xa8, 0x73, 0xb3, 0x14, 0x1f, 0x3c, 0x96, 0x6b, 0xab, 0xad, 0x43, 0xdf, + 0x67, 0x34, 0x66, 0x30, 0x1d, 0x15, 0xd3, 0xe7, 0xd5, 0x8b, 0x00, 0xaa, + 0x11, 0x77, 0xea, 0x36, 0xc9, 0x49, 0x99, 0x93, 0x01, 0x6e, 0x00, 0x4a, + 0x93, 0x08, 0x2c, 0x44, 0x01, 0x91, 0xe0, 0x91, 0xdd, 0xab, 0x70, 0x4b, + 0xe7, 0xbf, 0x2d, 0x0f, 0xd4, 0x52, 0xa0, 0xf1, 0x5d, 0xa0, 0xcc, 0xb9, + 0x1b, 0xa2, 0x62, 0xeb, 0x23, 0x1e, 0x8e, 0xbb, 0x2b, 0xb6, 0xc5, 0x3a, + 0xdf, 0x32, 0x99, 0xde, 0x2e, 0x94, 0xcf, 0x98, 0x99, 0x34, 0x59, 0x60, + 0xcf, 0x57, 0xe0, 0xb0, 0xd9, 0x89, 0xaa, 0xc2, 0x4f, 0x1e, 0x38, 0x88, + 0xca, 0x32, 0x93, 0x9b, 0xa3, 0x2b, 0x17, 0x0b, 0x40, 0x5e, 0x69, 0xbd, + 0x14, 0x15, 0xca, 0x1a, 0x21, 0xdf, 0xa8, 0x4e, 0x14, 0x5e, 0x18, 0x40, + 0xe3, 0x4e, 0x04, 0x1f, 0xe5, 0x81, 0x53, 0x11, 0xae, 0x5e, 0x30, 0xe5, + 0xda, 0xd7, 0xf1, 0x3b, 0x72, 0x1b, 0xa5, 0xe3, 0x13, 0xad, 0x40, 0x54, + 0xae, 0xf0, 0xbc, 0x2b, 0xc1, 0x1a, 0x9c, 0xdd, 0xe1, 0xd0, 0x12, 0x10, + 0xfd, 0x59, 0xce, 0x36, 0x60, 0x86, 0xa0, 0xa7, 0xee, 0xe1, 0x02, 0xe6, + 0xf8, 0xf0, 0x5c, 0x4f, 0xa3, 0xa4, 0xe4, 0x09, 0xb9, 0xc3, 0x84, 0xe3, + 0x8d, 0x97, 0x21, 0x62, 0xf3, 0x11, 0x47, 0xb1, 0x4a, 0xce, 0x5b, 0x89, + 0xde, 0x86, 0xb5, 0x0e, 0xba, 0xbc, 0x8c, 0xcf, 0x54, 0x38, 0x3a, 0xc6, + 0xaf, 0x8c, 0x4d, 0x9d, 0xff, 0x58, 0x9b, 0xe8, 0x32, 0xb7, 0xa2, 0x29, + 0xad, 0x91, 0x3a, 0xa5, 0xc7, 0x54, 0xff, 0xd8, 0x47, 0x4f, 0x8f, 0x38, + 0x91, 0x12, 0x76, 0xa3, 0x2e, 0xf7, 0xdd, 0xba, 0xa7, 0xd4, 0x49, 0xe5, + 0xd1, 0x74, 0xe9, 0x2a, 0x29, 0xe4, 0x64, 0xb9, 0x58, 0x98, 0x0c, 0xe5, + 0x1f, 0xb2, 0x0e, 0x33, 0xea, 0xf8, 0x2e, 0xb1, 0x22, 0x46, 0xc2, 0x67, + 0x2d, 0xfe, 0x2e, 0xd3, 0xcf, 0xbc, 0x64, 0x7b, 0x75, 0x24, 0x53, 0x1c, + 0x42, 0x8c, 0x0b, 0x99, 0x9e, 0xa7, 0xa6, 0xb9, 0xfb, 0x5d, 0x86, 0x9f, + 0xe9, 0x04, 0x62, 0xb2, 0x42, 0x81, 0xa2, 0x0d, 0x60, 0x83, 0x40, 0xbb, + 0x21, 0x10, 0xdf, 0xaa, 0xe6, 0x6c, 0x72, 0xc5, 0xb1, 0xad, 0x9f, 0xd2, + 0x91, 0xf8, 0xb6, 0x56, 0xfb, 0x2e, 0xb3, 0xc4, 0x12, 0xd9, 0x86, 0x29, + 0x6c, 0x55, 0x88, 0x72, 0xba, 0xfb, 0x9b, 0xb9, 0x6f, 0x2d, 0x7d, 0x75, + 0xd0, 0x9d, 0xaf, 0x44, 0xb6, 0xbd, 0x7b, 0xec, 0x78, 0xf1, 0xbf, 0x66, + 0xe8, 0x79, 0x66, 0x16, 0x5e, 0xf9, 0x68, 0x89, 0x5b, 0xde, 0x8f, 0xf9, + 0xeb, 0x04, 0x0b, 0x6a, 0x71, 0xa1, 0x3b, 0x46, 0x03, 0xb4, 0x29, 0xa9, + 0x31, 0xf4, 0xc5, 0xd3, 0x43, 0x6d, 0x88, 0x43, 0xa8, 0xef, 0xb7, 0xd7, + 0x75, 0x6b, 0x83, 0x35, 0xb6, 0x2f, 0xe0, 0x5f, 0xf2, 0x14, 0xcd, 0xd0, + 0x06, 0xb3, 0x5e, 0x8b, 0xdb, 0x86, 0x11, 0x94, 0x2f, 0xfb, 0x92, 0x19, + 0x52, 0x7f, 0xcb, 0xe5, 0x22, 0x27, 0x5f, 0xe4, 0x68, 0xb2, 0xcb, 0xc7, + 0xb8, 0xec, 0xfd, 0x9e, 0x39, 0x9c, 0x5b, 0xe4, 0xae, 0xca, 0x83, 0x19, + 0xcf, 0xf0, 0x01, 0xe3, 0xfc, 0xb0, 0x28, 0xda, 0x79, 0x84, 0xfb, 0xfe, + 0xa5, 0xb6, 0xb3, 0xd2, 0x73, 0xd3, 0x11, 0xe5, 0xdf, 0x7a, 0xd7, 0x82, + 0x78, 0x25, 0x06, 0x5b, 0x0f, 0x89, 0x9d, 0x0b, 0x9b, 0xd1, 0x1b, 0xc5, + 0xb7, 0x67, 0xef, 0x7c, 0xa2, 0xa3, 0xca, 0x27, 0xd0, 0x59, 0xb9, 0x99, + 0x86, 0xa9, 0xf6, 0x9a, 0x28, 0xf0, 0xbb, 0x42, 0xd2, 0xa0, 0xa8, 0x01, + 0x29, 0xa1, 0x0c, 0x1b, 0x33, 0x1b, 0x9c, 0xcb, 0xe4, 0x6c, 0x61, 0x0a, + 0xc4, 0xd7, 0x6c, 0xec, 0x86, 0xb3, 0xd2, 0xaa, 0x8c, 0xab, 0x1a, 0xf4, + 0x03, 0x2e, 0x2b, 0x42, 0xbe, 0xc1, 0x31, 0x1d, 0x57, 0x47, 0xdc, 0x7b, + 0xb5, 0x8f, 0x8b, 0xdf, 0x06, 0xad, 0x3f, 0xf4, 0x4f, 0xb5, 0x52, 0x07, + 0x4e, 0x25, 0xb3, 0x73, 0x34, 0x92, 0x6a, 0x89, 0x93, 0x28, 0x8b, 0x96, + 0x9d, 0xdb, 0xb4, 0x77, 0x81, 0x76, 0x86, 0xd2, 0xa5, 0x94, 0x76, 0x35, + 0xc9, 0x66, 0x4e, 0xd8, 0xc5, 0xc3, 0xc9, 0x34, 0xaf, 0xad, 0x4a, 0x7c, + 0x92, 0x24, 0xb1, 0x7d, 0x7d, 0xac, 0xf6, 0xcb, 0x8f, 0x36, 0xc1, 0xb2, + 0x63, 0x78, 0x99, 0x33, 0x23, 0x68, 0x6e, 0x71, 0x6a, 0xcc, 0x05, 0xf9, + 0x41, 0x92, 0x30, 0xf0, 0xb1, 0xb4, 0xa6, 0x46, 0x86, 0x62, 0xd9, 0xd9, + 0x94, 0x8a, 0xb2, 0x9c, 0x68, 0xff, 0xf4, 0x3a, 0x2e, 0xaf, 0xee, 0xcf, + 0x04, 0x94, 0x53, 0x35, 0x25, 0xf9, 0xaa, 0x74, 0x93, 0xf3, 0x63, 0xc0, + 0xd2, 0x22, 0x30, 0x8c, 0xde, 0xa6, 0xb1, 0xb4, 0xa1, 0x56, 0x07, 0x06, + 0x71, 0xa2, 0x9e, 0x42, 0x31, 0xa3, 0x1e, 0xa6, 0x9a, 0xbc, 0x9f, 0x5b, + 0x12, 0x3c, 0xc2, 0x74, 0xf9, 0x61, 0x71, 0xef, 0x73, 0x86, 0xc2, 0x3b, + 0x25, 0x8a, 0x31, 0x72, 0x27, 0xac, 0xa4, 0x72, 0xf3, 0xbb, 0x78, 0x2c, + 0x94, 0xed, 0xa8, 0x3a, 0x42, 0x98, 0x34, 0xda, 0x3e, 0x60, 0x1c, 0x4a, + 0xec, 0x6b, 0x4e, 0x5f, 0x2a, 0x62, 0xb9, 0xad, 0xc9, 0xd9, 0x38, 0x90, + 0xa7, 0x3b, 0xd3, 0x1a, 0xbb, 0x81, 0x0d, 0x33, 0xd9, 0x16, 0x35, 0x8e, + 0xc3, 0x88, 0x36, 0xfa, 0x3e, 0xa8, 0x4f, 0x30, 0x9d, 0xf1, 0x08, 0xea, + 0x40, 0x1b, 0x87, 0x4d, 0x23, 0x8e, 0x8e, 0xb0, 0xe2, 0xf0, 0x27, 0xc1, + 0xdc, 0x0d, 0xe2, 0x8f, 0x93, 0xef, 0x8b, 0xd1, 0x19, 0xa5, 0xbe, 0xd7, + 0x5a, 0x8a, 0x38, 0x62, 0x43, 0xba, 0x74, 0xf8, 0xae, 0x11, 0x1f, 0x1d, + 0xa4, 0x6e, 0x70, 0x94, 0x91, 0x14, 0xf4, 0xff, 0xbe, 0x39, 0xb4, 0x33, + 0xc2, 0x87, 0x74, 0x1b, 0xfd, 0x9a, 0xa8, 0x64, 0x09, 0x4b, 0x7f, 0x95, + 0x0a, 0xcb, 0x6b, 0x15, 0x54, 0x1d, 0xc6, 0x03, 0x1d, 0x1b, 0x25, 0x56, + 0x15, 0xb5, 0xd7, 0xe5, 0xd6, 0xf3, 0x28, 0xa4, 0xde, 0x1b, 0x39, 0x0d, + 0x59, 0x26, 0x12, 0xe4, 0x32, 0xf2, 0x25, 0xeb, 0xc0, 0xdb, 0x58, 0xe5, + 0xce, 0x64, 0x6f, 0x70, 0x74, 0xc1, 0xc9, 0xbd, 0x75, 0xef, 0x16, 0x02, + 0xdf, 0x27, 0x09, 0xc8, 0xb8, 0x37, 0x8f, 0x44, 0x0d, 0x58, 0x48, 0xf5, + 0xc2, 0x53, 0x21, 0x28, 0x16, 0xa4, 0x56, 0x02, 0xdf, 0xa7, 0x97, 0xa4, + 0x5c, 0x48, 0x75, 0x51, 0x89, 0x0b, 0xa7, 0x4d, 0xd9, 0x9e, 0x04, 0x4e, + 0x5d, 0x6c, 0xe5, 0x1f, 0x68, 0x88, 0xcc, 0xb7, 0x9a, 0x20, 0x05, 0x83, + 0x82, 0x6c, 0xfd, 0xdb, 0x07, 0x6c, 0xec, 0x61, 0xaa, 0x36, 0x57, 0x68, + 0x01, 0xf2, 0x70, 0xfe, 0xe6, 0x4d, 0xe1, 0xa9, 0xb6, 0xb6, 0x52, 0xe6, + 0x20, 0x52, 0x0f, 0x27, 0x9a, 0x1c, 0x2d, 0x20, 0x9b, 0xd4, 0x07, 0xd3, + 0xf6, 0x85, 0x4b, 0xf2, 0x52, 0x4d, 0x4c, 0xd7, 0xf0, 0x32, 0x5d, 0x2e, + 0xef, 0xa2, 0xd0, 0xcd, 0x48, 0x89, 0xbc, 0x9f, 0xcb, 0x37, 0x02, 0x29, + 0xa5, 0xdb, 0xab, 0xfa, 0x1d, 0xf4, 0x53, 0x78, 0x30, 0xde, 0x2c, 0x5c, + 0x35, 0x7f, 0x3d, 0xe1, 0xe0, 0xce, 0xdb, 0x13, 0xca, 0x2a, 0xae, 0xdf, + 0x1c, 0xb1, 0xb6, 0xb9, 0x6a, 0x9f, 0x28, 0xb0, 0x54, 0x5a, 0x00, 0xdd, + 0x76, 0x14, 0xfb, 0x17, 0xc2, 0x2a, 0x45, 0xa2, 0x18, 0xbb, 0x8a, 0x3e, + 0xbe, 0x0e, 0xa5, 0x1b, 0x3c, 0x70, 0x56, 0x10, 0x98, 0xec, 0xc6, 0x3a, + 0x95, 0x2a, 0x96, 0x6a, 0x44, 0xef, 0xd9, 0x9c, 0x2a, 0x45, 0xb4, 0x15, + 0xf8, 0x2e, 0x03, 0x5d, 0x8c, 0x79, 0xfb, 0xb0, 0x53, 0x71, 0xcd, 0x0d, + 0xf4, 0xe2, 0xfc, 0x3b, 0x71, 0xee, 0x30, 0xf2, 0x29, 0xd3, 0xaa, 0x18, + 0x7a, 0x45, 0x1d, 0x99, 0x6d, 0x2f, 0x1f, 0x2d, 0x32, 0x23, 0x48, 0xc2, + 0x69, 0x33, 0x3d, 0x04, 0xa7, 0xa3, 0x96, 0xb5, 0x76, 0x5b, 0x4e, 0xb7, + 0x3c, 0x10, 0x58, 0x17, 0xf4, 0x5f, 0xec, 0x51, 0x6d, 0x5a, 0x3b, 0x7f, + 0x1e, 0x0e, 0xbb, 0xbf, 0x77, 0x43, 0xf7, 0xa4, 0x57, 0xc0, 0x33, 0xac, + 0xc1, 0xe3, 0x3e, 0x1f, 0x65, 0x3c, 0x62, 0x19, 0x46, 0x2d, 0x7b, 0x2d, + 0x07, 0x44, 0x48, 0xf4, 0x91, 0xdf, 0x59, 0x32, 0x10, 0xf7, 0x12, 0xe2, + 0xe5, 0x39, 0x70, 0x37, 0xa4, 0x79, 0x9a, 0x17, 0x19, 0xe8, 0x90, 0xe7, + 0x37, 0x0d, 0xb6, 0x6d, 0x58, 0xe6, 0x7e, 0x57, 0x76, 0x8a, 0xe8, 0xd0, + 0x76, 0x30, 0x25, 0xda, 0xb6, 0xdf, 0x59, 0x3c, 0x6c, 0x20, 0x65, 0x88, + 0xd2, 0x60, 0x5e, 0x39, 0xb6, 0x6b, 0xac, 0xa2, 0x25, 0xc6, 0xa7, 0xb1, + 0x2f, 0xbb, 0x1d, 0x23, 0xee, 0x02, 0x08, 0x1d, 0xd6, 0x6c, 0x0e, 0xbc, + 0xea, 0xd2, 0xc2, 0x70, 0x34, 0xe9, 0x96, 0xd3, 0xf3, 0xf4, 0x8e, 0x94, + 0x6f, 0x86, 0x76, 0xe7, 0x38, 0x08, 0x6f, 0x47, 0xf5, 0xcd, 0xab, 0xad, + 0x7a, 0x39, 0x10, 0x9a, 0xa8, 0x44, 0xba, 0x2d, 0x7f, 0x05, 0x1e, 0xb7, + 0x44, 0xd8, 0x10, 0x05, 0xd1, 0x8d, 0x98, 0x09, 0x14, 0xbb, 0x6b, 0x2b, + 0xf7, 0xeb, 0x9f, 0xa5, 0x65, 0x4b, 0x21, 0xff, 0xaf, 0xe8, 0x2e, 0x34, + 0x52, 0x38, 0xcf, 0xd5, 0x51, 0x29, 0x2c, 0x91, 0x43, 0x3a, 0x49, 0x42, + 0xdd, 0xfb, 0x0e, 0xd2, 0x77, 0x8f, 0x65, 0x93, 0x3e, 0x52, 0x22, 0x58, + 0xd6, 0xf9, 0xd9, 0x58, 0xd4, 0x06, 0xa9, 0x0c, 0x79, 0x9f, 0x1b, 0xa5, + 0x45, 0x61, 0xd8, 0x4e, 0xbf, 0x4b, 0x51, 0xe2, 0xfb, 0x6f, 0x58, 0xee, + 0xc5, 0xa5, 0x11, 0xbd, 0x99, 0x25, 0x14, 0xac, 0x94, 0x0e, 0xd1, 0xf7, + 0x54, 0xb6, 0x05, 0x8c, 0xc3, 0x57, 0xa5, 0x3c, 0x3c, 0xa6, 0x83, 0x47, + 0x38, 0xd1, 0x6a, 0xab, 0x12, 0xc0, 0xd3, 0x7f, 0x96, 0x55, 0xd7, 0xf4, + 0x3a, 0xd0, 0x08, 0x85, 0x5f, 0x3d, 0x65, 0x8e, 0xbb, 0xea, 0x34, 0xf3, + 0x53, 0x96, 0x71, 0x08, 0x9b, 0x50, 0xe9, 0x4b, 0xce, 0x8a, 0x2f, 0xef, + 0xe4, 0xb2, 0x72, 0x68, 0xcb, 0x88, 0xa8, 0xd9, 0xd9, 0xa2, 0xfc, 0x62, + 0xe8, 0x8b, 0x23, 0x2b, 0xbc, 0xf0, 0x9e, 0xb4, 0xd0, 0x40, 0x8b, 0x45, + 0xff, 0x6d, 0x37, 0x01, 0xa6, 0x4b, 0x62, 0xe0, 0x3b, 0x4e, 0x18, 0x67, + 0xb3, 0x97, 0x04, 0xa0, 0x2a, 0xf2, 0x11, 0x79, 0x38, 0xb4, 0xb2, 0xed, + 0x64, 0xc1, 0x1e, 0xfe, 0xc4, 0xf4, 0xe2, 0x4d, 0x94, 0xb4, 0x17, 0x52, + 0x1a, 0x63, 0xe6, 0x56, 0x8a, 0x41, 0x0a, 0x5b, 0xa2, 0x1c, 0x59, 0xef, + 0x17, 0x64, 0xf9, 0xf7, 0x2c, 0xa4, 0xfd, 0x66, 0xf7, 0xe3, 0xae, 0xa0, + 0x54, 0x36, 0x64, 0x26, 0x84, 0x51, 0x49, 0xd5, 0x3a, 0x5e, 0x2c, 0xc5, + 0xca, 0xde, 0x8e, 0xe7, 0x25, 0x59, 0xb3, 0x9a, 0xb2, 0xf0, 0xff, 0xf1, + 0x83, 0xe5, 0x70, 0xc3, 0xef, 0x63, 0x66, 0x31, 0x04, 0x4d, 0x42, 0xf1, + 0xd9, 0x4c, 0x5e, 0x29, 0x92, 0x37, 0x8d, 0xd1, 0x18, 0x2a, 0x9e, 0x3c, + 0xcc, 0x05, 0xb9, 0xc4, 0xb6, 0xe7, 0x2a, 0x09, 0x3a, 0x68, 0xb5, 0x61, + 0x60, 0x36, 0x11, 0x02, 0x92, 0xf8, 0xa0, 0x56, 0x9b, 0xe8, 0xfe, 0xac, + 0x87, 0xcc, 0xaf, 0xb9, 0x62, 0xa7, 0x1e, 0x99, 0xb8, 0x9f, 0x47, 0xf7, + 0xa5, 0x12, 0x47, 0x66, 0xeb, 0xd6, 0x3a, 0x6f, 0xb3, 0x26, 0x63, 0xe2, + 0xec, 0x0c, 0xba, 0x7d, 0xc2, 0x9b, 0xb2, 0x10, 0x62, 0x03, 0x3f, 0x20, + 0xed, 0x7a, 0xce, 0x47, 0xd0, 0x50, 0x5b, 0x5c, 0x66, 0xbf, 0x01, 0x09, + 0x84, 0x0b, 0x71, 0xa8, 0x1f, 0x8d, 0xe1, 0x05, 0x09, 0xb4, 0xd5, 0x34, + 0xf1, 0xba, 0x31, 0xc6, 0x76, 0x8e, 0x00, 0x96, 0x3d, 0x6b, 0xe4, 0x66, + 0x3a, 0x22, 0xcd, 0x7f, 0x9d, 0xf8, 0x64, 0xfc, 0x76, 0x42, 0x88, 0x0e, + 0x32, 0xa5, 0xd0, 0x69, 0x56, 0xe2, 0xa5, 0x6f, 0xbb, 0xfa, 0xd8, 0xde, + 0xb4, 0x23, 0xa9, 0xc7, 0x9a, 0xc1, 0x99, 0xa7, 0x7f, 0x79, 0x58, 0xe1, + 0xe7, 0xc5, 0x56, 0x36, 0xc0, 0xfb, 0x8d, 0x8f, 0xe4, 0x6c, 0x96, 0x89, + 0xcb, 0xb0, 0xb0, 0x6e, 0xee, 0x20, 0x46, 0xd3, 0x43, 0x83, 0xac, 0x39, + 0x7c, 0x25, 0xba, 0x69, 0x3a, 0x58, 0x8a, 0x48, 0x0a, 0xf7, 0xb7, 0xfc, + 0x58, 0x7b, 0x93, 0x8b, 0xcd, 0x81, 0x7e, 0x94, 0xe0, 0xdf, 0xb1, 0xca, + 0xf6, 0x60, 0x54, 0xa9, 0x6e, 0xc6, 0x7f, 0xac, 0xfb, 0x62, 0xfe, 0xd9, + 0xd5, 0xf4, 0x6c, 0x62, 0x65, 0xf6, 0x0b, 0x24, 0x49, 0x1d, 0x55, 0xd6, + 0x4c, 0x0b, 0x5a, 0xf1, 0x2e, 0x78, 0x7a, 0x4e, 0xc1, 0xd0, 0xdb, 0xfe, + 0xd2, 0x84, 0x60, 0x68, 0x51, 0x8e, 0x3f, 0xf1, 0xa8, 0x90, 0xbf, 0xda, + 0x86, 0xda, 0x41, 0xd8, 0x90, 0x7b, 0xc3, 0xc8, 0x9e, 0xa5, 0x77, 0x06, + 0x56, 0x02, 0x13, 0x59, 0xaa, 0x89, 0xf9, 0xd5, 0x3c, 0x1d, 0xe2, 0xa9, + 0xb1, 0xc8, 0x02, 0x5a, 0x1c, 0xae, 0x72, 0x66, 0xdf, 0xb4, 0x1a, 0xb7, + 0xd2, 0x4d, 0xda, 0x4f, 0xc9, 0xed, 0x88, 0x7d, 0x9b, 0xc4, 0x4a, 0x8c, + 0x5e, 0x77, 0xaf, 0xd6, 0xd3, 0xbb, 0x38, 0xd2, 0xfa, 0x85, 0xe4, 0xdd, + 0xe7, 0x6e, 0xcb, 0x0b, 0x34, 0x1e, 0xa8, 0xfd, 0xf4, 0xd2, 0xc3, 0xdd, + 0xe0, 0xa6, 0xb1, 0x78, 0x16, 0x85, 0x2b, 0x1b, 0x22, 0xa6, 0xd5, 0x93, + 0x4f, 0xa1, 0xd5, 0x10, 0x96, 0xab, 0x38, 0xa7, 0x3c, 0xf2, 0xbd, 0xd9, + 0x7c, 0x59, 0x71, 0x25, 0x6f, 0x7c, 0xce, 0x73, 0x8e, 0x4e, 0xfb, 0x5a, + 0x30, 0x24, 0x53, 0xc5, 0xa3, 0x20, 0x13, 0x03, 0xfc, 0x7a, 0xaf, 0x1f, + 0x71, 0x5d, 0x6b, 0xce, 0x2e, 0x92, 0x16, 0x4d, 0xab, 0x96, 0x10, 0xc0, + 0xf6, 0x3c, 0xfe, 0x51, 0x89, 0x4d, 0x39, 0x45, 0x2c, 0x92, 0x5a, 0x86, + 0x24, 0xce, 0xbc, 0x75, 0xc6, 0x7f, 0x0e, 0xc2, 0xd1, 0xe7, 0x6a, 0x75, + 0x30, 0x59, 0xfb, 0xbf, 0x6b, 0xcf, 0x60, 0x90, 0x07, 0x73, 0xb1, 0x47, + 0x6e, 0x5d, 0xcd, 0x44, 0xac, 0xee, 0x2a, 0xdb, 0x16, 0x5a, 0x1a, 0xaf, + 0xba, 0xf8, 0x64, 0xdd, 0xdd, 0xed, 0x46, 0x4b, 0x67, 0xf3, 0xf8, 0x2d, + 0x22, 0xe9, 0x25, 0x74, 0x4c, 0x70, 0xe0, 0x3d, 0xbc, 0x11, 0xd3, 0x56, + 0xec, 0x86, 0x39, 0x89, 0x4c, 0xf2, 0xbc, 0x39, 0xdc, 0xde, 0x5f, 0x3b, + 0x42, 0xcb, 0xf6, 0x0c, 0x49, 0x8c, 0x66, 0x76, 0x58, 0x28, 0xe8, 0x47, + 0x59, 0x40, 0x11, 0xef, 0xb5, 0x9d, 0x93, 0xe5, 0x39, 0x56, 0x62, 0x0d, + 0xd0, 0xdd, 0xbb, 0x51, 0xff, 0x87, 0xa3, 0xd1, 0x9e, 0x0e, 0x0c, 0xbd, + 0x8e, 0xfc, 0xa5, 0x44, 0xc7, 0x6d, 0x35, 0x1d, 0x69, 0x14, 0x5b, 0x0d, + 0x45, 0xff, 0x85, 0x2d, 0xd1, 0x14, 0xf4, 0x5e, 0x5b, 0x49, 0x85, 0xad, + 0x69, 0xf1, 0x34, 0x9e, 0x7a, 0xf3, 0xed, 0x2d, 0xf2, 0x5f, 0x70, 0x5a, + 0xc1, 0xca, 0x63, 0xb5, 0xec, 0x49, 0xfc, 0x88, 0xcb, 0x0f, 0x81, 0x1d, + 0xd4, 0x2f, 0x18, 0xf6, 0xfe, 0x71, 0x51, 0xe2, 0x25, 0x71, 0x48, 0xa4, + 0xb2, 0x9f, 0x4f, 0xc0, 0xa5, 0x24, 0x12, 0x5b, 0xf8, 0xf2, 0xcf, 0x6e, + 0x52, 0x52, 0x6a, 0xee, 0x7d, 0xa5, 0x9b, 0xdb, 0x9c, 0xc9, 0x35, 0x30, + 0x1a, 0xf0, 0x7d, 0xcc, 0x98, 0x73, 0x09, 0x16, 0x8c, 0x05, 0x8d, 0x70, + 0xa3, 0x15, 0xd6, 0x7a, 0xa0, 0x7c, 0xd5, 0xcc, 0xd3, 0x29, 0x32, 0x2e, + 0xa5, 0xde, 0xf6, 0xd3, 0xa4, 0x03, 0x59, 0x6c, 0x05, 0x2d, 0x0e, 0x8b, + 0xb7, 0x1f, 0xa0, 0x57, 0x5c, 0x76, 0xde, 0x81, 0xcb, 0x64, 0xb9, 0x73, + 0xc1, 0x3b, 0x26, 0xba, 0x16, 0xdb, 0xe6, 0x40, 0x23, 0xa4, 0xe9, 0x24, + 0x48, 0xb8, 0x73, 0x23, 0x67, 0xbf, 0x26, 0xca, 0x95, 0x4f, 0xa0, 0x60, + 0x95, 0xa2, 0x0f, 0x29, 0xed, 0x5d, 0x71, 0x66, 0x94, 0xa3, 0xd0, 0x2a, + 0x4e, 0x17, 0x32, 0x18, 0xe6, 0xd6, 0x75, 0x84, 0xa5, 0x2a, 0x72, 0x18, + 0x60, 0x85, 0xde, 0x66, 0x22, 0x52, 0xf6, 0x45, 0xd6, 0xf0, 0xed, 0x93, + 0x0f, 0x5a, 0xa9, 0x12, 0x2a, 0xc4, 0xa8, 0x3d, 0x97, 0xc9, 0xc7, 0x84, + 0x71, 0x14, 0xb3, 0x54, 0xb6, 0xf7, 0x92, 0x7a, 0xc0, 0x6e, 0x02, 0xf7, + 0x48, 0xdb, 0x7c, 0xc1, 0x45, 0x21, 0xdb, 0x1b, 0x51, 0xc3, 0xea, 0xc0, + 0x19, 0x31, 0xe4, 0x6c, 0x20, 0x5f, 0x08, 0xe7, 0x88, 0xf7, 0xc0, 0x6e, + 0xee, 0x5f, 0x20, 0x33, 0x68, 0xef, 0xc5, 0x33, 0x1b, 0x40, 0x66, 0xc5, + 0xa3, 0x68, 0xdb, 0xbc, 0x8a, 0xb7, 0x54, 0xdb, 0xc7, 0xc5, 0x2c, 0x42, + 0x65, 0x51, 0xab, 0x56, 0x94, 0x73, 0xec, 0xd9, 0x95, 0xfa, 0x6a, 0x56, + 0xef, 0x22, 0x95, 0xa4, 0x75, 0x46, 0xee, 0x60, 0x8b, 0x25, 0xa6, 0x92, + 0x0a, 0x8e, 0xc1, 0x39, 0x97, 0x69, 0xa9, 0x19, 0x97, 0xf1, 0x0f, 0x61, + 0xc2, 0x40, 0x7d, 0x62, 0xe9, 0x5e, 0x22, 0x1f, 0x27, 0xe5, 0xc7, 0xe7, + 0xa4, 0x35, 0x5d, 0x90, 0xc7, 0x38, 0x38, 0x2d, 0xb0, 0x1e, 0x29, 0x0f, + 0x4f, 0x08, 0x8b, 0xdd, 0x69, 0x3c, 0x5c, 0x03, 0xbe, 0x9a, 0x76, 0xba, + 0x91, 0xf5, 0x57, 0x07, 0x39, 0xfe, 0x09, 0xfc, 0x01, 0x7b, 0x37, 0xc4, + 0x73, 0x7f, 0x76, 0x50, 0x76, 0xae, 0x6e, 0x4b, 0x22, 0x2c, 0x3b, 0xe7, + 0x77, 0x19, 0x9a, 0x92, 0x26, 0xdf, 0xc4, 0xe6, 0xd8, 0x57, 0xc1, 0x7f, + 0x65, 0x0b, 0xfb, 0xfa, 0xdd, 0xd2, 0x8c, 0xc7, 0xb1, 0x72, 0x2a, 0xb2, + 0x5a, 0xfa, 0xb2, 0x84, 0xb1, 0xec, 0x79, 0x9e, 0xde, 0xd8, 0x2f, 0xdf, + 0x3b, 0x39, 0x0b, 0xac, 0xfa, 0xb8, 0x07, 0x38, 0xff, 0x2e, 0x22, 0x2b, + 0xc9, 0x31, 0x3b, 0x09, 0x05, 0xd2, 0x06, 0xc4, 0x2d, 0x22, 0x1c, 0x21, + 0x70, 0x03, 0x93, 0xd1, 0x3a, 0x8d, 0x94, 0x60, 0xfe, 0x99, 0x13, 0xc3, + 0x00, 0x03, 0x41, 0xfa, 0x50, 0x79, 0x31, 0xeb, 0xf0, 0xf4, 0x06, 0x7a, + 0x19, 0xe8, 0x90, 0xdf, 0x61, 0x4d, 0x5f, 0xe3, 0x99, 0x1b, 0xca, 0xbf, + 0xcf, 0xae, 0xca, 0xfa, 0x84, 0x63, 0x88, 0x56, 0x1d, 0x52, 0x5a, 0x21, + 0xf9, 0xcd, 0xa3, 0x30, 0x16, 0xb9, 0x0d, 0xe1, 0x87, 0x08, 0x78, 0xa2, + 0xdb, 0x7e, 0x16, 0x82, 0x48, 0x48, 0x17, 0x1a, 0xa8, 0x3f, 0xc7, 0x4d, + 0xfd, 0x99, 0x2b, 0x36, 0xbf, 0x08, 0xb9, 0xeb, 0xa6, 0xbf, 0xb6, 0xa0, + 0x9e, 0x26, 0x15, 0xac, 0xd2, 0x65, 0xc9, 0x36, 0x41, 0xe3, 0x59, 0x4e, + 0xdc, 0x7b, 0x58, 0x3b, 0x47, 0x0b, 0xc9, 0xf3, 0xb3, 0xf9, 0x81, 0x33, + 0x39, 0xca, 0xf8, 0x97, 0x2d, 0x9b, 0x24, 0x33, 0x69, 0xbe, 0x1b, 0x81, + 0x59, 0x59, 0x17, 0xed, 0x7d, 0x5b, 0xbe, 0xda, 0xeb, 0x4e, 0x5d, 0x5d, + 0x70, 0x13, 0x3c, 0x4b, 0x4a, 0xfc, 0xa4, 0xbe, 0xa0, 0x5d, 0xa2, 0xed, + 0xe8, 0x8d, 0xf8, 0xf2, 0xa5, 0xdd, 0xd4, 0x49, 0x45, 0x04, 0xef, 0x18, + 0x9f, 0xa1, 0xf7, 0xc4, 0x3b, 0xc2, 0x6b, 0xe0, 0x45, 0xa8, 0x76, 0x39, + 0x49, 0x32, 0xec, 0xc3, 0xcb, 0x45, 0x46, 0xd2, 0x4b, 0x3a, 0x55, 0xe5, + 0xce, 0x08, 0xc4, 0x84, 0xe5, 0xd9, 0xb3, 0xf3, 0xc4, 0xa8, 0xe9, 0x88, + 0x83, 0xd5, 0x56, 0xe1, 0xa6, 0xef, 0x41, 0x55, 0xb0, 0x3f, 0xa3, 0xc1, + 0xbe, 0x3b, 0x83, 0xd6, 0x92, 0x90, 0x38, 0xd3, 0xf3, 0x75, 0xf6, 0x49, + 0x95, 0xee, 0xa9, 0xed, 0xaa, 0xf8, 0xb9, 0x14, 0x0e, 0x6a, 0x48, 0x9d, + 0xc5, 0x48, 0x3b, 0x5e, 0x61, 0xd3, 0x8c, 0x4a, 0x10, 0x12, 0x7c, 0x0a, + 0xf7, 0xaf, 0x62, 0x2d, 0xd3, 0x89, 0x8d, 0x75, 0x19, 0x6b, 0x62, 0x4b, + 0x1a, 0x04, 0xc7, 0xd3, 0x32, 0x17, 0x2f, 0x5f, 0x29, 0xfa, 0xb1, 0x8d, + 0x78, 0xe7, 0x27, 0xf6, 0x67, 0x7e, 0x17, 0xa3, 0x18, 0xdc, 0x13, 0x08, + 0x1e, 0x4b, 0xc7, 0x8e, 0xf6, 0xba, 0x90, 0xb3, 0x32, 0x42, 0x37, 0x6b, + 0x60, 0xa9, 0x23, 0xb5, 0x89, 0x57, 0x7b, 0xdb, 0x98, 0x35, 0x1f, 0x95, + 0x86, 0xa5, 0x83, 0x36, 0xd1, 0x8c, 0x8e, 0xc0, 0x77, 0x5c, 0x40, 0x8e, + 0xec, 0xdf, 0x25, 0x69, 0x0a, 0x83, 0x8f, 0xdf, 0x91, 0x52, 0x31, 0xab, + 0xd5, 0x61, 0x37, 0xbd, 0x83, 0x1d, 0x4c, 0x8b, 0xa1, 0x4a, 0x81, 0x8b, + 0xa0, 0xf4, 0x41, 0xbd, 0x54, 0x36, 0x36, 0x56, 0x6d, 0x4c, 0xe7, 0xd9, + 0xc7, 0x09, 0xd9, 0x4b, 0xf0, 0x54, 0x45, 0x3c, 0x62, 0x47, 0x17, 0x54, + 0x1f, 0x55, 0x2f, 0x74, 0xdc, 0x11, 0xe9, 0xa3, 0xb5, 0x75, 0xe9, 0x10, + 0xde, 0x62, 0xa9, 0x24, 0x39, 0xd4, 0x17, 0xbb, 0x15, 0xe4, 0x48, 0x09, + 0x26, 0x6a, 0xbd, 0x3b, 0x10, 0xa1, 0x55, 0xe5, 0x99, 0x53, 0x1e, 0xd2, + 0xee, 0x7c, 0x54, 0xd8, 0x06, 0x8b, 0x1e, 0xe7, 0x3f, 0x08, 0x38, 0x9b, + 0x2e, 0x41, 0xdf, 0x0b, 0x7e, 0x83, 0x7f, 0x04, 0x38, 0xa5, 0x1f, 0x46, + 0x8b, 0x94, 0x28, 0x9f, 0xb8, 0x8c, 0x41, 0xfe, 0x96, 0xe2, 0x24, 0xd1, + 0x97, 0xa4, 0xcb, 0xba, 0xfa, 0x19, 0xc9, 0x57, 0x30, 0x0f, 0x88, 0x58, + 0xa9, 0x67, 0x31, 0x74, 0x51, 0x34, 0x03, 0xbc, 0xff, 0x3b, 0x12, 0x61, + 0x84, 0x63, 0x74, 0xec, 0x4d, 0xda, 0xa3, 0x56, 0xc3, 0xe5, 0x5e, 0x4a, + 0x03, 0x26, 0x88, 0x1a, 0x1d, 0x7f, 0xe8, 0x3f, 0x61, 0x78, 0xb6, 0xc5, + 0x66, 0xb7, 0xb4, 0xc1, 0xe7, 0x82, 0xc1, 0x44, 0xdf, 0xf9, 0x30, 0x30, + 0xe1, 0xd0, 0xf8, 0xf5, 0x40, 0x5a, 0x72, 0x29, 0xef, 0x30, 0xe1, 0x01, + 0xca, 0x1b, 0xb0, 0xa6, 0xa3, 0x17, 0x2b, 0x58, 0x03, 0xda, 0x25, 0x0f, + 0xdc, 0x49, 0x7c, 0xc5, 0x8f, 0x2d, 0x83, 0xca, 0x43, 0x08, 0xc0, 0x36, + 0x70, 0x1e, 0x42, 0xfd, 0xac, 0x4d, 0x31, 0xcf, 0x68, 0x4a, 0xda, 0xd8, + 0xcb, 0xee, 0xaa, 0xfc, 0xcf, 0xcc, 0xe6, 0xb2, 0x77, 0x8b, 0x83, 0x5b, + 0xd5, 0x3d, 0x55, 0xba, 0x03, 0x45, 0xce, 0x51, 0x78, 0x36, 0xcb, 0xcd, + 0x9a, 0x0f, 0x58, 0xbe, 0x15, 0x10, 0xdb, 0x3f, 0x1d, 0x28, 0x27, 0x11, + 0x69, 0xca, 0x95, 0x68, 0xa8, 0xc8, 0xff, 0x0c, 0x3f, 0xd5, 0x11, 0x91, + 0x35, 0x45, 0x35, 0x9d, 0x1c, 0x58, 0xa2, 0xe5, 0xab, 0x83, 0x95, 0x10, + 0x44, 0xd4, 0xc0, 0x27, 0xf4, 0xc2, 0x72, 0x0f, 0x1a, 0x3d, 0x1c, 0xf2, + 0x7f, 0xb9, 0x54, 0xf2, 0x41, 0x24, 0xa8, 0x67, 0x30, 0xa0, 0x57, 0x67, + 0x00, 0xa8, 0x06, 0x60, 0xc3, 0x74, 0x6d, 0x54, 0x90, 0x5e, 0xad, 0x71, + 0x41, 0x50, 0xab, 0x9d, 0xba, 0x34, 0x1a, 0xfd, 0x19, 0x21, 0x0e, 0x87, + 0xb7, 0x22, 0xe6, 0xca, 0xb9, 0x0d, 0x3c, 0x4f, 0xad, 0x16, 0xf1, 0xa5, + 0x6d, 0xba, 0x6d, 0x7b, 0xbe, 0x7b, 0xe3, 0x95, 0xec, 0x1b, 0x8b, 0x6e, + 0xb0, 0xdc, 0x5c, 0xfd, 0x31, 0x73, 0x85, 0x02, 0x63, 0xc6, 0xcc, 0x04, + 0x29, 0xa5, 0xf4, 0x1f, 0xcb, 0x90, 0xf7, 0x83, 0x0d, 0x36, 0xbf, 0x31, + 0xc0, 0xfc, 0x26, 0x15, 0x87, 0xc8, 0x15, 0x88, 0xc9, 0x79, 0x11, 0x67, + 0x23, 0x53, 0xca, 0x03, 0x7a, 0x02, 0xe5, 0xfc, 0xb3, 0x38, 0xf3, 0x5d, + 0xfc, 0x91, 0x6f, 0x59, 0x26, 0xae, 0xd8, 0x45, 0xfa, 0xc4, 0x5b, 0xa2, + 0xfb, 0x2c, 0xc5, 0x36, 0xc6, 0x0d, 0x7b, 0x4e, 0xd2, 0x7f, 0x61, 0xc5, + 0xcc, 0x74, 0xd3, 0x41, 0xd4, 0x8a, 0xaf, 0xcb, 0x32, 0x50, 0xca, 0xeb, + 0x59, 0x0a, 0x05, 0x25, 0xe0, 0x5f, 0x30, 0x2b, 0x5d, 0x9b, 0xf7, 0xe8, + 0x14, 0x14, 0xb5, 0xfe, 0xd5, 0x2f, 0x94, 0x84, 0x5b, 0xc7, 0x4f, 0x82, + 0x01, 0x50, 0xbf, 0x54, 0xe2, 0x7d, 0xeb, 0x0c, 0x85, 0xc8, 0x99, 0x45, + 0x50, 0x8e, 0x4e, 0x10, 0x12, 0x01, 0x17, 0x41, 0xf3, 0x21, 0x4a, 0xee, + 0xaf, 0x0f, 0x76, 0x44, 0xe2, 0x8e, 0xf8, 0x36, 0x25, 0xab, 0x0d, 0x8f, + 0xb1, 0x0a, 0xbf, 0x63, 0x0e, 0xf2, 0x0c, 0x9d, 0x39, 0xa1, 0x98, 0x98, + 0x69, 0x91, 0xd1, 0x9b, 0xe8, 0xcf, 0x16, 0x65, 0x02, 0xc9, 0x67, 0x72, + 0x71, 0x7c, 0xfb, 0x41, 0x2d, 0xe4, 0xd3, 0xfb, 0x44, 0x8a, 0x7a, 0x88, + 0x32, 0x62, 0x26, 0x63, 0xfe, 0x5b, 0x0c, 0x4f, 0x6c, 0xad, 0x2f, 0x64, + 0x6f, 0xc9, 0xda, 0x95, 0x10, 0xbe, 0xd1, 0xfa, 0x8b, 0x67, 0x64, 0x35, + 0x2d, 0xed, 0xca, 0xf3, 0x12, 0xb7, 0x06, 0xc3, 0xa9, 0x8e, 0x3f, 0x09, + 0x4d, 0x1f, 0x50, 0x3a, 0x97, 0xb7, 0xa7, 0xce, 0x4d, 0x46, 0xf1, 0x61, + 0xc1, 0x06, 0x95, 0x0d, 0x07, 0xa2, 0xbc, 0xed, 0xeb, 0x45, 0xb4, 0x69, + 0x05, 0x7a, 0x30, 0x47, 0xa3, 0xbf, 0x81, 0xa9, 0xa7, 0xf0, 0x53, 0x36, + 0x31, 0x37, 0x13, 0xe5, 0x0e, 0xd6, 0xe6, 0xc7, 0x17, 0x17, 0x21, 0x6d, + 0x36, 0xd0, 0xf6, 0x2a, 0xea, 0x2d, 0x32, 0x0e, 0x90, 0x03, 0x30, 0x4d, + 0x30, 0x31, 0xaa, 0x79, 0x2d, 0xae, 0x2e, 0xb0, 0x13, 0xad, 0x63, 0x69, + 0x67, 0xd8, 0xf3, 0x6e, 0xa4, 0x34, 0xcf, 0x02, 0x10, 0xdd, 0x76, 0xfa, + 0xa7, 0xb0, 0x92, 0xea, 0x47, 0xbd, 0xff, 0xf9, 0xac, 0x8a, 0x1f, 0x31, + 0xf8, 0x05, 0xd4, 0xce, 0x23, 0xad, 0x32, 0x8c, 0x6c, 0x92, 0x85, 0xb9, + 0x74, 0xa6, 0xab, 0x6e, 0x76, 0xfd, 0x3e, 0x8a, 0xac, 0xa3, 0xd1, 0xb7, + 0x40, 0x53, 0x87, 0x28, 0xfc, 0xbc, 0x8a, 0x52, 0x8e, 0x2e, 0x59, 0x2c, + 0x5f, 0x3f, 0xcb, 0xd8, 0xbe, 0x37, 0xfd, 0xdc, 0xc0, 0x34, 0x85, 0x67, + 0x28, 0x9f, 0x1d, 0x05, 0x05, 0x94, 0xed, 0x6f, 0x54, 0x7a, 0x51, 0x9a, + 0xaa, 0xca, 0xe1, 0x41, 0x10, 0xf0, 0x9d, 0x38, 0x9c, 0x5e, 0x95, 0xe3, + 0x7e, 0x62, 0xe2, 0x31, 0x81, 0x28, 0x4a, 0x3c, 0x5e, 0x04, 0x11, 0xe2, + 0x6a, 0x45, 0x6f, 0x68, 0x96, 0x5b, 0xbf, 0x22, 0xd8, 0x29, 0x91, 0x76, + 0xe1, 0xb2, 0x5f, 0xfc, 0x89, 0x90, 0x87, 0xf8, 0xb8, 0x3f, 0xd5, 0x11, + 0xe7, 0x36, 0x47, 0x71, 0xb9, 0x52, 0x97, 0x8e, 0x62, 0x8b, 0x05, 0x31, + 0xe5, 0xd9, 0xa2, 0xc3, 0x1a, 0xb5, 0xda, 0xc7, 0xa5, 0x37, 0x06, 0x67, + 0x41, 0x1f, 0x6e, 0xa3, 0xc2, 0xb4, 0x96, 0x64, 0xfc, 0x46, 0x85, 0x95, + 0x4e, 0xd8, 0x2a, 0x4b, 0xaa, 0x1e, 0xec, 0xd5, 0xed, 0x81, 0x23, 0x68, + 0x0f, 0x5d, 0x0b, 0x95, 0x29, 0xd4, 0x36, 0x4d, 0x8c, 0x32, 0x73, 0x6a, + 0xb7, 0xad, 0xb8, 0x9c, 0xad, 0x76, 0x09, 0xad, 0xb9, 0xea, 0x2d, 0x17, + 0x3c, 0x33, 0x87, 0x7f, 0x62, 0x74, 0x77, 0xc9, 0xd6, 0x3d, 0x17, 0xbc, + 0xff, 0x57, 0x10, 0xec, 0x7a, 0xb7, 0x89, 0x05, 0x26, 0xf1, 0xb2, 0x53, + 0xa1, 0x91, 0xc5, 0x2a, 0xfb, 0x5a, 0xce, 0x5d, 0xd1, 0x6b, 0xbc, 0xb7, + 0x39, 0x09, 0x43, 0xdf, 0x20, 0xd3, 0xc1, 0x74, 0x8d, 0xf4, 0x0b, 0x2a, + 0xc7, 0xe8, 0xa1, 0x5f, 0xb2, 0xfe, 0x1a, 0x96, 0x3a, 0x92, 0xbc, 0x8f, + 0x85, 0xe2, 0x22, 0x73, 0x3f, 0x49, 0xb3, 0x6b, 0x90, 0xbd, 0xcb, 0x3f, + 0x36, 0x6c, 0x3d, 0xe3, 0x00, 0x00, 0x00, 0x00, 0x56, 0xd1, 0xff, 0xff, + 0x04, 0x00, 0x00, 0x00, 0x90, 0x00, 0x00, 0x00, 0x1f, 0x05, 0x81, 0x3f, + 0x25, 0x68, 0xde, 0x72, 0x88, 0x26, 0x66, 0x2d, 0xe4, 0xc8, 0x81, 0xf8, + 0x5d, 0x98, 0xa2, 0xc2, 0x02, 0x62, 0x63, 0x47, 0xe6, 0x61, 0x7f, 0xee, + 0xca, 0x3f, 0x81, 0xd7, 0x1e, 0xa9, 0xbf, 0x66, 0x59, 0x7f, 0xc3, 0x35, + 0x03, 0xae, 0xe5, 0xf2, 0x4d, 0x81, 0x82, 0x78, 0x5e, 0xaf, 0xaa, 0xd1, + 0x27, 0x41, 0x19, 0x93, 0xa8, 0x9b, 0x78, 0x4e, 0x95, 0x89, 0x7f, 0xce, + 0x49, 0xd0, 0x45, 0xb5, 0x7f, 0x1d, 0xe9, 0xee, 0x7f, 0x91, 0xf4, 0x0a, + 0x67, 0x7d, 0x75, 0xff, 0x38, 0x81, 0x27, 0x90, 0x14, 0xa5, 0x99, 0x40, + 0x5b, 0xe6, 0x9a, 0x81, 0x75, 0x22, 0x5f, 0x18, 0x81, 0x34, 0xb7, 0x54, + 0x2e, 0x8d, 0x81, 0x36, 0x0e, 0x5e, 0xc0, 0x5f, 0xd4, 0xc6, 0x34, 0x81, + 0xc8, 0xb9, 0xe2, 0xa9, 0x77, 0x81, 0x44, 0xb4, 0x06, 0x24, 0x81, 0x74, + 0x1c, 0xeb, 0xfb, 0xdd, 0x25, 0x81, 0x14, 0x09, 0x2d, 0xba, 0x11, 0x4b, + 0x07, 0x13, 0xf1, 0xae, 0x81, 0xaf, 0xa3, 0x87, 0x00, 0x00, 0x00, 0x00, + 0xf6, 0xd1, 0xff, 0xff, 0x04, 0x00, 0x00, 0x00, 0x00, 0x2d, 0x00, 0x00, + 0x8a, 0x29, 0x03, 0xe6, 0x24, 0x2a, 0xd6, 0x21, 0xb6, 0xb1, 0x2d, 0x3a, + 0xff, 0xd6, 0x27, 0xd7, 0x18, 0x42, 0xc1, 0xb4, 0xf8, 0xfd, 0xdf, 0x45, + 0x09, 0x91, 0xcb, 0xfe, 0xe9, 0xb5, 0x24, 0xf1, 0xc0, 0x69, 0xd0, 0x64, + 0xa8, 0xeb, 0x12, 0x71, 0xe3, 0xb4, 0xbe, 0xb4, 0x93, 0xbf, 0x8a, 0x8b, + 0xf3, 0x4d, 0x13, 0x3b, 0x6f, 0x6f, 0x32, 0x12, 0x98, 0x95, 0xb9, 0x63, + 0xcd, 0xa5, 0x23, 0xa4, 0xb8, 0x2e, 0x74, 0x75, 0xbc, 0xe4, 0xc7, 0x46, + 0x96, 0xd4, 0x47, 0xa0, 0x65, 0xec, 0xea, 0xcf, 0xd0, 0xdc, 0xe9, 0x8b, + 0xcc, 0x1d, 0x2f, 0x0d, 0x0a, 0x9c, 0x6e, 0x99, 0x97, 0x97, 0xcc, 0x00, + 0xd2, 0x8e, 0xbc, 0x3c, 0x9a, 0xf1, 0x32, 0x0e, 0xf3, 0xd6, 0x27, 0x1c, + 0xea, 0xab, 0xca, 0x4d, 0x69, 0x32, 0x30, 0x5f, 0x18, 0xd7, 0xb7, 0x4a, + 0xcb, 0x8e, 0xb2, 0x96, 0x39, 0xa3, 0xc7, 0x42, 0xca, 0x60, 0x9b, 0xad, + 0x8e, 0xb7, 0x54, 0x32, 0xea, 0xfd, 0x58, 0xfa, 0xf8, 0x02, 0xef, 0x2f, + 0xec, 0x3c, 0x2a, 0x1a, 0x6a, 0x08, 0xa4, 0x4b, 0xec, 0x30, 0x90, 0xaf, + 0x13, 0x98, 0xcd, 0x48, 0xfd, 0x5f, 0x56, 0x68, 0x17, 0x9e, 0x87, 0xb1, + 0x2b, 0x16, 0xd3, 0x3c, 0xe0, 0xe8, 0x0e, 0xa6, 0xc4, 0x24, 0xd3, 0x05, + 0x75, 0xda, 0x22, 0x44, 0xb5, 0x41, 0xd2, 0xa5, 0x99, 0xf1, 0x5e, 0xbe, + 0x15, 0xb7, 0x33, 0x54, 0x9a, 0x97, 0x5b, 0x35, 0x77, 0x2b, 0x18, 0x46, + 0x2f, 0x92, 0xc5, 0x97, 0x2d, 0x4c, 0xa6, 0xf8, 0x9e, 0xc3, 0xe0, 0x0a, + 0x52, 0xf9, 0x97, 0xc7, 0xd6, 0x36, 0xdd, 0x38, 0xaa, 0xf3, 0x05, 0x30, + 0xc3, 0xe5, 0xaf, 0x54, 0xdc, 0xc4, 0xf2, 0x01, 0x9e, 0xe6, 0xc1, 0x89, + 0xee, 0xd8, 0x5f, 0xfe, 0xf0, 0x70, 0x3c, 0xc4, 0x40, 0xa4, 0xd4, 0xee, + 0xaf, 0x3d, 0xe6, 0xcd, 0x31, 0x16, 0x31, 0x3b, 0xa0, 0x0e, 0xc4, 0x71, + 0xbf, 0xbd, 0x39, 0x89, 0x0f, 0x36, 0xba, 0xd8, 0xa2, 0x49, 0x01, 0xab, + 0xf4, 0x07, 0x99, 0xc7, 0xb1, 0x0c, 0x33, 0x9d, 0x71, 0xf1, 0x15, 0x4b, + 0x60, 0xe0, 0xed, 0x59, 0x0a, 0x34, 0xd9, 0xa2, 0x45, 0x99, 0x4a, 0x60, + 0xd3, 0xdc, 0x37, 0x56, 0x32, 0x4c, 0xea, 0xdc, 0xcf, 0xe6, 0x22, 0x27, + 0x17, 0xea, 0x75, 0x3f, 0x69, 0xd4, 0xcf, 0x53, 0x92, 0x98, 0xf4, 0xfe, + 0x13, 0xa8, 0xe2, 0xb2, 0x48, 0x5f, 0x64, 0xab, 0x2b, 0x61, 0x97, 0xf5, + 0xc5, 0xb6, 0xef, 0x32, 0x4e, 0x47, 0x26, 0x42, 0x48, 0x9c, 0x5b, 0x24, + 0xa3, 0xcb, 0x70, 0xc7, 0x31, 0x6c, 0xc8, 0x4d, 0x5c, 0x02, 0xca, 0x71, + 0x1e, 0x56, 0xdb, 0x27, 0x66, 0x5d, 0x4f, 0x0b, 0x09, 0x57, 0xbe, 0x72, + 0x17, 0x3b, 0xce, 0xdd, 0xd2, 0x20, 0x13, 0x67, 0x32, 0x04, 0xee, 0xc4, + 0x66, 0x23, 0x0e, 0x97, 0x5e, 0x21, 0x30, 0xb2, 0xe4, 0x16, 0x06, 0x57, + 0xc3, 0x9b, 0x29, 0x5b, 0x76, 0xd0, 0x36, 0xac, 0xe6, 0xa2, 0x91, 0x57, + 0x96, 0x4e, 0x1c, 0x6f, 0x4a, 0x03, 0x50, 0x55, 0x6d, 0xaf, 0x9a, 0x29, + 0xc9, 0x61, 0x6c, 0x18, 0x4c, 0xb9, 0xd5, 0x41, 0xf8, 0x75, 0x2b, 0xc3, + 0x0e, 0x69, 0x9f, 0x45, 0x93, 0x2f, 0xa6, 0xf9, 0x30, 0x65, 0x05, 0x13, + 0xe3, 0x00, 0x54, 0x0e, 0xa4, 0xb5, 0x89, 0x6d, 0x4d, 0x11, 0x3d, 0x2a, + 0x29, 0x99, 0xd9, 0xdf, 0x75, 0xce, 0x01, 0x21, 0xbc, 0x26, 0xb3, 0x22, + 0xf9, 0xb0, 0x45, 0x5c, 0xf8, 0xea, 0xb2, 0x08, 0x1a, 0xf7, 0xa0, 0x70, + 0x65, 0xa8, 0xab, 0xe1, 0x92, 0xcc, 0xcc, 0x1f, 0x0e, 0x36, 0x60, 0xb7, + 0xea, 0xcb, 0x3d, 0xf6, 0x98, 0xbf, 0xcd, 0x00, 0xc9, 0x16, 0x1e, 0xdb, + 0x58, 0x24, 0xb1, 0xd8, 0xaf, 0x01, 0x00, 0xfa, 0x15, 0xf4, 0x37, 0x05, + 0xd7, 0x17, 0x2a, 0xd2, 0xe8, 0xe4, 0x0c, 0x50, 0xfa, 0xe8, 0xd6, 0x99, + 0xa9, 0x58, 0x61, 0x38, 0xee, 0x22, 0x3c, 0x53, 0xcf, 0x64, 0x8e, 0xad, + 0x4d, 0xd6, 0xc3, 0xc3, 0xdd, 0xb0, 0xb3, 0xf7, 0xdd, 0x37, 0xfd, 0xf3, + 0x2b, 0x6a, 0xe2, 0xd4, 0xfc, 0x0c, 0x74, 0xca, 0x37, 0x2f, 0xd2, 0xf8, + 0x5b, 0xf1, 0x8c, 0x32, 0xa0, 0xdc, 0x2c, 0xa8, 0x36, 0x2f, 0xbe, 0x45, + 0x9b, 0x42, 0x95, 0x15, 0x5e, 0x08, 0xb1, 0x61, 0xec, 0xa2, 0xdf, 0x5f, + 0xca, 0xf8, 0x62, 0x73, 0xfd, 0x66, 0xc8, 0x51, 0x2a, 0x69, 0x3c, 0x8f, + 0x75, 0xa4, 0x6f, 0xbe, 0xc1, 0x5c, 0x66, 0xe2, 0x60, 0x92, 0xd7, 0x0e, + 0xee, 0x1b, 0xc7, 0x39, 0x8b, 0x56, 0x6c, 0xc6, 0x20, 0xfa, 0xec, 0x96, + 0xa5, 0x0f, 0x74, 0x42, 0x32, 0x12, 0x11, 0xdf, 0x02, 0xfe, 0x42, 0x1c, + 0xfe, 0xf1, 0x72, 0xaf, 0x47, 0x3b, 0x62, 0xe3, 0x27, 0x29, 0xf0, 0xec, + 0x39, 0xd2, 0xdd, 0xb6, 0xe9, 0xbe, 0x5f, 0x66, 0x67, 0x6c, 0xc9, 0xa1, + 0xf0, 0x25, 0x9a, 0x1b, 0xa8, 0xa0, 0x15, 0xcb, 0x61, 0x98, 0x98, 0xfd, + 0xef, 0xba, 0x74, 0x9b, 0x54, 0xf3, 0x6d, 0xe1, 0xa4, 0xcf, 0xb5, 0xe7, + 0xba, 0x0f, 0xd1, 0x41, 0xd8, 0x63, 0x94, 0x09, 0xcd, 0x4f, 0xb1, 0x31, + 0x49, 0x5e, 0x54, 0xb1, 0x28, 0x39, 0x8e, 0x13, 0x48, 0x2e, 0x20, 0xb0, + 0xf7, 0x18, 0x9a, 0xea, 0xf2, 0x9b, 0xde, 0x8f, 0x16, 0xc8, 0x9e, 0x31, + 0xca, 0x94, 0x28, 0x26, 0x0d, 0x8c, 0x0f, 0x09, 0x69, 0xc5, 0x2a, 0x38, + 0xae, 0x6b, 0xfb, 0x4f, 0xbb, 0xf4, 0x14, 0xea, 0x8d, 0x13, 0xc0, 0x09, + 0xe2, 0xfb, 0xfb, 0x09, 0xa1, 0xfc, 0x49, 0xff, 0x0f, 0x52, 0x3e, 0xe8, + 0xda, 0xfe, 0xe1, 0x67, 0x8f, 0x21, 0xcf, 0xaf, 0xb7, 0xe2, 0xcf, 0x09, + 0x15, 0x10, 0x51, 0x72, 0x8f, 0x42, 0x09, 0x9d, 0xea, 0x27, 0x2d, 0x25, + 0x9f, 0x54, 0x50, 0xfa, 0xdf, 0x9f, 0x41, 0xe8, 0xd2, 0x66, 0xd8, 0x28, + 0xfb, 0x8b, 0xe4, 0x42, 0x03, 0x92, 0xf9, 0xcd, 0xcc, 0xb0, 0xc0, 0x52, + 0x53, 0x6d, 0xcd, 0xed, 0x16, 0xad, 0x3c, 0x3d, 0xf9, 0x3b, 0x05, 0xbb, + 0xac, 0x9e, 0xa3, 0x4b, 0x17, 0xb4, 0xc7, 0xdd, 0xd4, 0xd3, 0x0c, 0x10, + 0x0d, 0xd8, 0x9c, 0xdb, 0xa4, 0x60, 0x06, 0x89, 0x4b, 0x06, 0x4c, 0x9f, + 0xc4, 0x47, 0xc8, 0xaf, 0xab, 0x02, 0x23, 0x89, 0x6e, 0xf2, 0x9d, 0x2b, + 0x6b, 0x9a, 0xa4, 0xee, 0x16, 0x0b, 0x3c, 0x76, 0xd4, 0xf0, 0x17, 0x90, + 0xca, 0xf5, 0xc8, 0xbf, 0xcb, 0xb1, 0x02, 0x69, 0x34, 0x71, 0x59, 0x5d, + 0x0e, 0x56, 0xd8, 0x41, 0x0a, 0xa5, 0x0a, 0x16, 0xbc, 0x93, 0x63, 0xf9, + 0xd9, 0xab, 0x3e, 0x75, 0x1e, 0xd3, 0xf3, 0x56, 0xf5, 0x14, 0xee, 0x65, + 0xf3, 0x2f, 0x72, 0x03, 0xcb, 0x69, 0x90, 0x91, 0x0d, 0x31, 0x8e, 0x3e, + 0xe9, 0xb0, 0xe6, 0x2e, 0x37, 0x5d, 0xb0, 0x38, 0x52, 0xe6, 0x23, 0x24, + 0x36, 0xb2, 0xe9, 0xa5, 0xa0, 0xae, 0xed, 0xfd, 0x95, 0xa5, 0xcf, 0x4a, + 0xe3, 0xbd, 0xe7, 0x29, 0xd0, 0x57, 0x3e, 0xf1, 0xdf, 0xc8, 0xc7, 0x26, + 0xf6, 0xc7, 0x4b, 0xc8, 0x6a, 0x4a, 0xed, 0x49, 0x60, 0x2d, 0x1c, 0xe3, + 0x8b, 0x10, 0x24, 0xfc, 0xef, 0xbb, 0x1e, 0x24, 0xbb, 0x40, 0xeb, 0x99, + 0xba, 0xe1, 0x4a, 0xd4, 0x1f, 0x69, 0x47, 0xa4, 0x8f, 0x48, 0x05, 0x17, + 0xcb, 0xee, 0x55, 0xca, 0xe5, 0xe3, 0x60, 0xec, 0xfa, 0xe6, 0xd1, 0x28, + 0xc5, 0xa8, 0x04, 0xd8, 0xce, 0x13, 0x2b, 0x99, 0x2b, 0xc7, 0x94, 0x9d, + 0xda, 0xd7, 0x6f, 0x31, 0xfe, 0xee, 0x6c, 0x9b, 0xf1, 0x70, 0xd2, 0xee, + 0xc4, 0xba, 0xb7, 0xbe, 0xd3, 0x37, 0xdc, 0x43, 0x4e, 0x30, 0x4a, 0x67, + 0xf2, 0x45, 0x29, 0xe1, 0x8b, 0xb8, 0x6d, 0xca, 0xec, 0xb9, 0xd6, 0xd3, + 0xdd, 0xcb, 0xde, 0xdb, 0xa9, 0x4d, 0xdd, 0x3d, 0x41, 0xae, 0x99, 0x89, + 0xce, 0x70, 0x50, 0x61, 0x07, 0xf3, 0xca, 0x24, 0x56, 0x76, 0x3f, 0xe0, + 0x6e, 0xbe, 0xa7, 0xc6, 0xac, 0x6c, 0xf1, 0x8c, 0xa2, 0x0e, 0xc4, 0x2a, + 0x48, 0x30, 0x8b, 0xc9, 0xc0, 0x5a, 0xb2, 0x2b, 0xbd, 0xa2, 0xcc, 0xf7, + 0x25, 0x16, 0xc3, 0xde, 0x1b, 0x8d, 0x23, 0x8c, 0xb6, 0xc4, 0xaa, 0x4a, + 0x0b, 0x66, 0x25, 0x35, 0xb3, 0x9a, 0x74, 0x27, 0x63, 0xea, 0xef, 0x92, + 0x12, 0x8c, 0x58, 0xd9, 0x3a, 0x55, 0xd6, 0x61, 0x29, 0x9f, 0xbc, 0x28, + 0xbd, 0x30, 0xcd, 0x43, 0xe6, 0x36, 0x36, 0x66, 0x20, 0x8c, 0x9e, 0x23, + 0xfe, 0x6d, 0xf0, 0xbc, 0x61, 0xcd, 0x58, 0xd8, 0xe0, 0x2e, 0xe4, 0xcf, + 0x61, 0xf7, 0xd5, 0x6b, 0x54, 0x33, 0xb3, 0x2c, 0x60, 0xa8, 0x59, 0x21, + 0x5d, 0xaa, 0x65, 0x9e, 0xdc, 0xa3, 0xc9, 0xc4, 0x9d, 0x4d, 0x95, 0x29, + 0xf6, 0x2b, 0xcd, 0xc9, 0xb9, 0x9d, 0x46, 0xa0, 0x89, 0xf4, 0x4e, 0x52, + 0x55, 0xe2, 0x13, 0x98, 0xf0, 0xef, 0x27, 0xc3, 0xc9, 0xd1, 0xe1, 0xee, + 0x07, 0x1b, 0x9d, 0x8a, 0x5b, 0x9d, 0x06, 0x26, 0x61, 0x2a, 0x55, 0x6f, + 0x54, 0x22, 0xd5, 0x06, 0x20, 0xed, 0x06, 0x4d, 0xa2, 0xb3, 0xaa, 0x4f, + 0x1f, 0x3e, 0xd2, 0x0d, 0x6a, 0xab, 0x6d, 0xee, 0x8f, 0x09, 0xb2, 0xd9, + 0x39, 0x46, 0x0f, 0xe7, 0x51, 0x70, 0x51, 0xdb, 0x09, 0xf8, 0x8e, 0xbb, + 0x06, 0x98, 0x49, 0x69, 0xb7, 0x9e, 0xa0, 0xbc, 0x16, 0x5f, 0x96, 0xad, + 0xe9, 0x76, 0x9f, 0x71, 0xe2, 0x1b, 0x91, 0x73, 0xd9, 0x74, 0x6a, 0x70, + 0x48, 0x71, 0x47, 0x3b, 0x0c, 0xd5, 0x96, 0xe3, 0x6e, 0xdb, 0xbb, 0x9c, + 0x44, 0x5c, 0xe5, 0x07, 0x73, 0x31, 0xd1, 0x55, 0x07, 0xff, 0x5f, 0xb1, + 0x55, 0x9d, 0x0d, 0xbf, 0x32, 0x53, 0xf9, 0xfe, 0xcd, 0xc8, 0xe0, 0x56, + 0x18, 0x8f, 0x4b, 0x51, 0xd1, 0x23, 0x2e, 0x9f, 0xb9, 0xee, 0xf3, 0xfd, + 0x26, 0x02, 0xf6, 0x54, 0xd5, 0x3e, 0x13, 0xc1, 0xc1, 0xe4, 0xa8, 0xb4, + 0x5f, 0x5c, 0xa0, 0x9f, 0xb5, 0x19, 0xbb, 0x4e, 0xd6, 0xf8, 0x18, 0x9b, + 0xeb, 0x9e, 0x58, 0x9d, 0x00, 0x51, 0x24, 0x28, 0x70, 0x55, 0xf7, 0xb9, + 0x5a, 0x59, 0x50, 0xc5, 0x72, 0xab, 0x6b, 0x13, 0x95, 0xfb, 0xe4, 0xc2, + 0x05, 0x96, 0xf3, 0x48, 0xef, 0x02, 0x67, 0xd5, 0x8f, 0x5b, 0x8e, 0xb6, + 0xbe, 0xc1, 0x3d, 0x8e, 0x22, 0xee, 0x49, 0xc7, 0xbe, 0xfb, 0x2d, 0x51, + 0x45, 0x44, 0xca, 0x94, 0x8e, 0xce, 0xb5, 0x9a, 0x29, 0xc7, 0x52, 0xde, + 0x2c, 0xdf, 0xcc, 0x43, 0xc7, 0xd7, 0x51, 0xb7, 0x07, 0xf0, 0x9b, 0x9d, + 0x33, 0x98, 0x62, 0xfa, 0xc9, 0x13, 0x0b, 0xcd, 0xdf, 0xbd, 0xff, 0x8e, + 0x13, 0x44, 0xda, 0x62, 0xc0, 0xd1, 0x8d, 0x57, 0x0e, 0xec, 0x53, 0x8a, + 0x04, 0xcf, 0x0f, 0x5a, 0xd7, 0x3c, 0x4b, 0x17, 0xda, 0x3b, 0xf0, 0x30, + 0xbf, 0xea, 0x40, 0xa6, 0x36, 0xed, 0xda, 0xf7, 0x40, 0x6b, 0xf1, 0x1e, + 0x61, 0xa0, 0x8b, 0x5d, 0xfa, 0xa8, 0x6a, 0xca, 0xfd, 0x6a, 0x06, 0xb4, + 0xf5, 0xb6, 0xc7, 0xbe, 0xdf, 0xac, 0x17, 0x00, 0x4a, 0x91, 0x8d, 0x97, + 0x5b, 0xc8, 0xcb, 0xd4, 0xc8, 0x20, 0x0b, 0x53, 0xee, 0x2b, 0x25, 0xb8, + 0xa1, 0x24, 0xa1, 0xa0, 0x17, 0x60, 0xd9, 0xf7, 0x2d, 0x00, 0x6c, 0x70, + 0x44, 0x0d, 0x60, 0xe7, 0x95, 0x1e, 0x8a, 0x1b, 0x29, 0xcf, 0xb5, 0xc1, + 0xbe, 0xd0, 0xe5, 0xeb, 0xd8, 0x71, 0x88, 0x34, 0xcb, 0xbd, 0x32, 0x52, + 0xa7, 0xcf, 0x6d, 0x9b, 0xef, 0xf2, 0xe4, 0x68, 0x6f, 0xfe, 0xb9, 0x17, + 0x31, 0xa0, 0x3e, 0xfc, 0xae, 0xf6, 0x54, 0xe3, 0x33, 0x24, 0xd1, 0xfc, + 0xb7, 0x37, 0x8f, 0xd3, 0x4f, 0xf2, 0x59, 0x53, 0xea, 0xaf, 0x71, 0xc5, + 0xb1, 0xdb, 0xf9, 0xed, 0xc0, 0x46, 0x56, 0xfc, 0x09, 0x90, 0xf7, 0x09, + 0x5a, 0x12, 0x71, 0xad, 0xa6, 0x0f, 0xba, 0x4c, 0x2f, 0xd7, 0x61, 0xcb, + 0xf2, 0xab, 0x44, 0x67, 0x43, 0xd0, 0x41, 0xd5, 0xba, 0xff, 0x26, 0x50, + 0x5b, 0x97, 0x91, 0xc4, 0x8f, 0x2a, 0x64, 0x3c, 0x06, 0x2e, 0x26, 0x8e, + 0x5f, 0xb1, 0xba, 0x74, 0x16, 0xeb, 0xee, 0x6e, 0xe1, 0x68, 0xcc, 0x09, + 0xed, 0xa5, 0x5d, 0xf7, 0xef, 0xd6, 0xfa, 0x9f, 0x39, 0xe1, 0x5c, 0x38, + 0xbd, 0x1b, 0xe6, 0x8a, 0xfa, 0xea, 0xbc, 0x14, 0x4c, 0x31, 0xa8, 0x9d, + 0x64, 0xa6, 0xec, 0xf0, 0xf8, 0xa2, 0x0a, 0x6c, 0xb9, 0xc5, 0x3d, 0x40, + 0x48, 0x41, 0x1d, 0xf2, 0xab, 0xd4, 0xdf, 0xfb, 0x55, 0x9e, 0xa5, 0xac, + 0xe9, 0xf0, 0x46, 0x96, 0xc5, 0x4d, 0x5f, 0x5f, 0x64, 0x00, 0x69, 0x48, + 0x0e, 0xa3, 0xb5, 0x5d, 0x45, 0xce, 0x57, 0xc4, 0x45, 0xdb, 0xc6, 0x13, + 0x4b, 0xa7, 0xa0, 0xd5, 0x31, 0xb4, 0xd4, 0x0f, 0x4f, 0x29, 0x40, 0xc0, + 0xaa, 0xb7, 0x54, 0x21, 0xd5, 0x3a, 0x01, 0xbc, 0xa8, 0x58, 0xb5, 0x3f, + 0xa6, 0x1a, 0x06, 0xb5, 0x07, 0xd3, 0xb6, 0xff, 0x6e, 0x74, 0x08, 0x16, + 0x45, 0xaf, 0xd9, 0xc5, 0x4a, 0x0d, 0xd2, 0x8a, 0xd1, 0x6c, 0xba, 0x5a, + 0xd0, 0xee, 0x57, 0x10, 0xa4, 0x1a, 0xf4, 0x92, 0x97, 0xe0, 0xd7, 0xa8, + 0xff, 0x47, 0xed, 0x56, 0x6b, 0x91, 0x77, 0x5d, 0xa6, 0xcf, 0xed, 0x96, + 0xc5, 0x5a, 0xe3, 0x0b, 0x1d, 0xc0, 0xcc, 0xa1, 0x71, 0x95, 0xa8, 0xec, + 0xef, 0x33, 0x91, 0xd6, 0x53, 0x1f, 0xef, 0x43, 0xa9, 0x42, 0x2a, 0xc7, + 0xf6, 0x15, 0x60, 0xc2, 0xde, 0xeb, 0xac, 0xf8, 0x55, 0x27, 0x14, 0xf1, + 0xf8, 0x69, 0x55, 0xc8, 0x69, 0x1f, 0xf3, 0xc2, 0x71, 0xe8, 0x75, 0xa9, + 0x1a, 0x91, 0xc5, 0x1e, 0xe3, 0x52, 0x24, 0x5f, 0x60, 0xb5, 0xf1, 0xe6, + 0xdd, 0x4b, 0x1b, 0xdd, 0x3a, 0xad, 0x58, 0x36, 0x9c, 0xb3, 0x25, 0x9e, + 0x28, 0xd4, 0x3b, 0x6a, 0x64, 0xe7, 0x57, 0x54, 0xad, 0x4d, 0x44, 0xfc, + 0x54, 0xd3, 0xa3, 0x96, 0x4e, 0xee, 0xde, 0x23, 0x30, 0x30, 0x1f, 0x57, + 0x2f, 0xd6, 0xb4, 0xfa, 0x5c, 0x1b, 0x4a, 0x1b, 0x96, 0x58, 0x9a, 0xc7, + 0x25, 0xd0, 0x9c, 0xf3, 0x2b, 0x16, 0x58, 0x62, 0x0c, 0x5b, 0x45, 0x96, + 0xb0, 0xc2, 0x3e, 0xca, 0x0a, 0xb5, 0x0f, 0x06, 0xa8, 0xa3, 0xb2, 0x0a, + 0x6a, 0xc5, 0xb7, 0xf8, 0x69, 0xfa, 0xc1, 0xa8, 0xbc, 0x17, 0x6c, 0x92, + 0x06, 0x50, 0x74, 0x4b, 0x02, 0xc8, 0x4d, 0x9c, 0x3e, 0x94, 0x6f, 0xef, + 0x3e, 0xd9, 0x71, 0xa6, 0x3a, 0x70, 0x6a, 0x14, 0x0e, 0x06, 0xbe, 0x40, + 0x2b, 0xa1, 0xbb, 0x05, 0x71, 0x05, 0xbd, 0xd5, 0x2d, 0xd9, 0xe2, 0xf6, + 0xb4, 0x32, 0x33, 0xac, 0x0f, 0x9a, 0xe3, 0xaf, 0xf4, 0x44, 0x21, 0x59, + 0x91, 0x0d, 0xd0, 0xf1, 0x47, 0x9e, 0x00, 0x38, 0xa2, 0x1d, 0x61, 0x54, + 0xd2, 0x18, 0x9d, 0xe4, 0x4f, 0xf3, 0xbd, 0x04, 0xdb, 0x4d, 0x59, 0x8c, + 0xfa, 0x12, 0xdd, 0xe4, 0xb5, 0x32, 0x3b, 0xf8, 0x93, 0xae, 0x3b, 0xa9, + 0xb3, 0xe9, 0x57, 0x30, 0x49, 0x6d, 0xaa, 0x35, 0x12, 0xce, 0x16, 0x98, + 0x3c, 0xd0, 0xed, 0xe8, 0xa6, 0xbc, 0xa6, 0xe6, 0x66, 0x0f, 0xb3, 0x12, + 0x95, 0x19, 0x56, 0x23, 0xb1, 0x30, 0x5d, 0xb3, 0x4c, 0x5f, 0x0c, 0xef, + 0x24, 0x12, 0xe0, 0x97, 0xf3, 0x3e, 0x9c, 0x49, 0xff, 0xa6, 0x6f, 0xa6, + 0xd2, 0x58, 0xbe, 0x3f, 0x30, 0xdd, 0x65, 0xd0, 0x40, 0xe1, 0xaf, 0x09, + 0xf1, 0xf4, 0x0f, 0x1a, 0xe5, 0xef, 0x51, 0x50, 0x38, 0x5d, 0xb0, 0x1e, + 0xed, 0x19, 0x8d, 0x4e, 0x20, 0xa1, 0x65, 0x07, 0x5b, 0x23, 0x0c, 0x14, + 0xd3, 0x18, 0xa3, 0xda, 0x58, 0x9f, 0x10, 0x00, 0xbd, 0xb5, 0x95, 0x07, + 0x1d, 0x0f, 0xf9, 0x2a, 0xe4, 0x35, 0x3c, 0x60, 0xad, 0xb2, 0x13, 0x3b, + 0xd5, 0x9e, 0xeb, 0xc7, 0x09, 0x6e, 0x53, 0xff, 0x95, 0xf3, 0xc1, 0x9b, + 0xcd, 0x21, 0x15, 0x3b, 0x5f, 0xfe, 0x4e, 0xaf, 0x3f, 0xf8, 0xe3, 0xa8, + 0x35, 0xee, 0x44, 0x33, 0xc7, 0x8c, 0x9c, 0x1c, 0x33, 0x55, 0x3c, 0x4a, + 0xa4, 0x35, 0xf6, 0xf0, 0x32, 0x8e, 0xed, 0x6d, 0x06, 0xff, 0x8d, 0x24, + 0x05, 0x72, 0x4c, 0xa2, 0x97, 0x25, 0x93, 0x3d, 0x79, 0x18, 0x22, 0x15, + 0xec, 0x5c, 0xc4, 0x10, 0x65, 0xec, 0x90, 0x6d, 0x28, 0xba, 0x93, 0xb5, + 0x2f, 0x53, 0xe4, 0x00, 0x9c, 0x39, 0xf5, 0x4c, 0xde, 0x51, 0x39, 0xc3, + 0xd8, 0x03, 0xc3, 0x97, 0xe1, 0xa8, 0x3e, 0x06, 0x26, 0x4d, 0xd9, 0x49, + 0x75, 0xbb, 0xd5, 0x69, 0x20, 0xfb, 0x85, 0x12, 0xc9, 0xac, 0xfc, 0x05, + 0xad, 0x57, 0xa9, 0x58, 0xcd, 0xfd, 0xbe, 0x64, 0x31, 0x50, 0x4d, 0xa4, + 0x93, 0xb6, 0x23, 0x3b, 0xfd, 0xd9, 0xdb, 0x46, 0xdd, 0x1f, 0x07, 0x54, + 0xc2, 0xc2, 0xd6, 0xad, 0xf6, 0x21, 0x39, 0xa1, 0x96, 0x53, 0x12, 0x46, + 0x5a, 0xc8, 0xf3, 0xf8, 0xe2, 0xa3, 0xd0, 0x29, 0x3f, 0x30, 0xca, 0x0b, + 0x57, 0xab, 0xcf, 0x1e, 0x08, 0x59, 0x3d, 0x41, 0x6a, 0xf7, 0xb2, 0xfc, + 0xff, 0x33, 0x46, 0xd1, 0x1a, 0xa6, 0x91, 0x54, 0xca, 0x27, 0x5a, 0x94, + 0x13, 0xf4, 0xf0, 0xcf, 0x58, 0xe0, 0x96, 0x50, 0xda, 0xe6, 0x91, 0xc7, + 0x8d, 0x14, 0x5b, 0xc1, 0xeb, 0x4a, 0x96, 0xf1, 0xa5, 0x43, 0xf6, 0x29, + 0x91, 0xb9, 0xb9, 0x67, 0x3f, 0x31, 0xd7, 0x08, 0xe6, 0x2b, 0xfb, 0x43, + 0x56, 0x39, 0x4e, 0xf9, 0x02, 0x8e, 0x96, 0x1f, 0xa3, 0x3c, 0xae, 0x55, + 0x03, 0x05, 0x9a, 0x39, 0xbe, 0xf7, 0x67, 0xa1, 0x6b, 0x2f, 0x42, 0x45, + 0x9b, 0x45, 0x8f, 0x53, 0x1f, 0x96, 0x42, 0x54, 0xd2, 0x5b, 0xf0, 0x17, + 0x94, 0x41, 0xaf, 0xd4, 0xc6, 0x37, 0x5f, 0xc0, 0xbd, 0xe3, 0x44, 0x8d, + 0xc1, 0x69, 0x64, 0x2a, 0xe7, 0x08, 0xe5, 0x18, 0x92, 0x53, 0xfc, 0xed, + 0xd3, 0x69, 0x94, 0x6b, 0x10, 0x0b, 0x5e, 0x91, 0x38, 0x4b, 0xa5, 0x19, + 0x3a, 0x6a, 0x2e, 0x5a, 0xa2, 0x6f, 0x34, 0x2c, 0x7b, 0x5d, 0x53, 0x33, + 0x77, 0x46, 0xf8, 0x4a, 0xa2, 0x8d, 0x55, 0x67, 0xa8, 0xbd, 0xc6, 0x3c, + 0x5d, 0x47, 0xeb, 0x99, 0xed, 0xdc, 0xae, 0xcf, 0xec, 0xbe, 0x40, 0x60, + 0xfc, 0x36, 0x5c, 0x93, 0x95, 0x64, 0xd8, 0x47, 0x14, 0xe2, 0x1e, 0xa2, + 0xd4, 0xd4, 0xdf, 0xd9, 0x23, 0x18, 0xf2, 0x99, 0xe8, 0xe4, 0x2a, 0x3b, + 0xec, 0x2e, 0x28, 0xa8, 0x04, 0x74, 0x04, 0xa4, 0x32, 0xa6, 0x49, 0xf9, + 0x33, 0x6c, 0xa8, 0x1d, 0xb2, 0xbb, 0x57, 0xe4, 0xcf, 0xf2, 0x9e, 0x74, + 0x8d, 0xf7, 0x22, 0xaa, 0x0d, 0x8a, 0x2f, 0x34, 0x72, 0x33, 0xec, 0xdf, + 0x46, 0x57, 0x6c, 0x97, 0x94, 0xad, 0x06, 0x88, 0xeb, 0x20, 0xec, 0x79, + 0x44, 0xe1, 0xbc, 0xf8, 0xbd, 0xeb, 0x99, 0xe3, 0xaf, 0xfe, 0xc5, 0xb5, + 0xfa, 0x31, 0x75, 0x62, 0xff, 0x2a, 0x2a, 0x1b, 0xce, 0xad, 0xa8, 0xc8, + 0x3c, 0x54, 0x23, 0xf9, 0x9e, 0x2d, 0xe2, 0xa4, 0x4f, 0x5b, 0x4d, 0xb8, + 0x4f, 0xc6, 0xb3, 0xc6, 0xef, 0x66, 0x54, 0x31, 0xab, 0xd3, 0xf0, 0xb9, + 0xfa, 0xb6, 0x15, 0xe6, 0xdb, 0x4b, 0x51, 0x4d, 0x77, 0xa5, 0x3d, 0x4e, + 0xd9, 0xc9, 0xdb, 0x95, 0x31, 0x1d, 0x4d, 0x37, 0xe0, 0x34, 0xd3, 0xf3, + 0x20, 0x6b, 0xb8, 0x16, 0x0b, 0x4e, 0x55, 0x96, 0x56, 0x1e, 0xa7, 0xe8, + 0xc6, 0x3a, 0x08, 0x49, 0xa1, 0x16, 0x46, 0xc9, 0x43, 0xcb, 0x8f, 0x28, + 0x4a, 0x78, 0xaa, 0xf9, 0x6c, 0x74, 0xc8, 0x0b, 0xce, 0x13, 0x2c, 0xef, + 0xfe, 0x73, 0x42, 0xa7, 0xbc, 0x3d, 0xc9, 0xf2, 0xaf, 0x1c, 0x32, 0xdb, + 0xb2, 0x15, 0x70, 0x6b, 0x9b, 0x6e, 0x6f, 0x6e, 0xf7, 0x95, 0xea, 0x3e, + 0xd0, 0xb1, 0x2a, 0xbe, 0x8c, 0x66, 0x4e, 0xe9, 0x29, 0xe3, 0x35, 0xde, + 0xbf, 0x44, 0xbc, 0x5e, 0x56, 0x8b, 0xb3, 0xd4, 0xdf, 0xf5, 0x4e, 0x2e, + 0xeb, 0xe6, 0x8e, 0x58, 0xe2, 0xfd, 0xe7, 0x27, 0xff, 0x07, 0x49, 0x20, + 0xdd, 0xcf, 0xe4, 0xd7, 0x5c, 0x5f, 0x1f, 0xcc, 0xeb, 0x29, 0xeb, 0x34, + 0xac, 0xd6, 0xb6, 0xf8, 0xae, 0xdf, 0x11, 0x58, 0xd5, 0xea, 0xf1, 0x76, + 0xe5, 0x4d, 0x51, 0x72, 0xd4, 0x5e, 0x1e, 0x0f, 0xfd, 0x2e, 0xbe, 0x8e, + 0x07, 0x1a, 0x1f, 0x99, 0x4d, 0x73, 0x70, 0xe1, 0x41, 0xb4, 0x20, 0x10, + 0x75, 0x0f, 0xc8, 0x69, 0x5f, 0x6c, 0x20, 0x2b, 0xc8, 0xfd, 0xe9, 0x4c, + 0xf4, 0x6f, 0x6a, 0xe0, 0x1a, 0xb5, 0xec, 0x2e, 0xf5, 0x25, 0x6d, 0x56, + 0x56, 0xb9, 0x42, 0xca, 0x70, 0x72, 0xe5, 0x41, 0x07, 0x4f, 0x41, 0x25, + 0xea, 0x0a, 0x5d, 0xe1, 0x0a, 0xd5, 0x6f, 0x35, 0x50, 0xcc, 0x27, 0x53, + 0x5f, 0x31, 0x1c, 0xee, 0xae, 0x26, 0xc8, 0xc4, 0x4f, 0x9b, 0xf5, 0xf6, + 0x4d, 0x19, 0xb9, 0xc4, 0x55, 0xcd, 0xe5, 0x8a, 0xe9, 0x45, 0xec, 0xf2, + 0xf9, 0x33, 0x4d, 0xba, 0x57, 0x8f, 0xd6, 0xf5, 0xf7, 0x92, 0xb3, 0xd3, + 0x65, 0x39, 0x07, 0x04, 0x92, 0x2f, 0x70, 0x99, 0x97, 0x96, 0x60, 0xe5, + 0x92, 0x60, 0xc3, 0x72, 0x1e, 0xc7, 0xe6, 0x1d, 0xbb, 0x5b, 0xd5, 0x64, + 0x1b, 0x36, 0x45, 0xb8, 0xcb, 0x42, 0xe7, 0x26, 0x45, 0x65, 0xc8, 0x04, + 0x1c, 0x05, 0x9b, 0x48, 0xe3, 0x93, 0x8e, 0xb2, 0x1c, 0x6a, 0xab, 0x60, + 0xc2, 0xa6, 0x1a, 0x71, 0xd5, 0x2c, 0xb8, 0xe9, 0x9e, 0x66, 0x8d, 0xb6, + 0xb1, 0x99, 0x90, 0x9c, 0x1b, 0xc9, 0x44, 0x6d, 0x31, 0xbb, 0x62, 0x6e, + 0x46, 0xcc, 0xd7, 0x47, 0x3a, 0x40, 0x63, 0x33, 0x34, 0x4f, 0x50, 0x3c, + 0x94, 0x97, 0xe9, 0xe8, 0x3a, 0xf7, 0x2d, 0x2d, 0x9c, 0xb6, 0x5d, 0x52, + 0xbd, 0xa9, 0x2d, 0x42, 0xfc, 0xe8, 0x70, 0x09, 0x48, 0xd0, 0x36, 0x0b, + 0x3d, 0x2b, 0x9f, 0xe2, 0x4c, 0xdf, 0xf3, 0x57, 0x73, 0x55, 0xf7, 0x34, + 0xb8, 0x6b, 0x44, 0x6f, 0xf6, 0x6d, 0xcf, 0x93, 0x09, 0x14, 0xac, 0x8f, + 0xde, 0xce, 0x5f, 0x05, 0x04, 0x9f, 0xc7, 0x05, 0x5f, 0xdd, 0x2e, 0xfc, + 0x53, 0xec, 0x9e, 0xdb, 0xa8, 0xa2, 0xc7, 0x53, 0x5c, 0x9a, 0x4d, 0xb6, + 0x6f, 0xa5, 0xc6, 0xf3, 0xc5, 0xa4, 0x56, 0x62, 0xdc, 0x75, 0xe4, 0x0b, + 0xb0, 0xcc, 0x38, 0xde, 0x2d, 0xbb, 0xbc, 0x0b, 0xc6, 0xab, 0xac, 0xac, + 0x46, 0xce, 0x1e, 0xe6, 0x47, 0x6c, 0x6e, 0x8e, 0x00, 0x00, 0xa0, 0xae, + 0x1e, 0x1d, 0xaa, 0x22, 0xaf, 0x34, 0xc7, 0x26, 0x37, 0x01, 0x46, 0x25, + 0x9c, 0x5f, 0x92, 0xef, 0xda, 0x07, 0x64, 0x62, 0xe4, 0xf7, 0x4c, 0xa2, + 0x41, 0xf1, 0x10, 0xe0, 0xe5, 0x73, 0x72, 0xe1, 0xf8, 0x66, 0x19, 0x58, + 0xa9, 0xdf, 0xb1, 0x41, 0xcb, 0xb3, 0xc4, 0xe6, 0x21, 0xbe, 0x17, 0x26, + 0xa9, 0x68, 0x96, 0xde, 0x5d, 0xba, 0x8f, 0x1b, 0x09, 0x00, 0x39, 0x0e, + 0xc2, 0x8d, 0x31, 0x61, 0xfe, 0x9e, 0x60, 0x05, 0xf3, 0x72, 0xdf, 0x78, + 0x14, 0x5a, 0x1b, 0x74, 0xa1, 0x23, 0xa7, 0x6e, 0x93, 0x76, 0xfa, 0x4a, + 0x73, 0xa1, 0x3b, 0xda, 0x0b, 0x06, 0xdd, 0xfc, 0x2f, 0xef, 0x0a, 0x38, + 0x03, 0xbf, 0xbb, 0x12, 0x29, 0x6b, 0xec, 0x68, 0xc7, 0xa6, 0xf9, 0x72, + 0xbc, 0xdb, 0xeb, 0x4e, 0x8f, 0x5f, 0x3a, 0xa9, 0x06, 0x4e, 0x3c, 0xf4, + 0x3b, 0xe0, 0x98, 0x9b, 0x77, 0x57, 0x0f, 0x39, 0x08, 0x43, 0x3f, 0x9b, + 0x76, 0x11, 0xd3, 0x38, 0xb6, 0x1f, 0x1e, 0xfe, 0xbb, 0x16, 0x37, 0x24, + 0x15, 0xf7, 0x8e, 0x61, 0x3d, 0xf5, 0x60, 0xab, 0x46, 0x49, 0xd6, 0xb2, + 0x8e, 0x35, 0xd5, 0x66, 0x20, 0x1f, 0xad, 0xf5, 0x95, 0xc3, 0x3e, 0xaa, + 0xda, 0x12, 0x1f, 0x33, 0xf4, 0xc0, 0xd9, 0x9e, 0x09, 0x76, 0x8b, 0x2f, + 0x35, 0xe2, 0x58, 0x09, 0x36, 0xf1, 0x03, 0xbc, 0xc2, 0x54, 0x67, 0x29, + 0x00, 0x3b, 0xf0, 0x24, 0xdf, 0xa0, 0x92, 0x71, 0xc3, 0x98, 0xe8, 0x5d, + 0xbe, 0xc7, 0xe8, 0x6f, 0x2f, 0x05, 0x89, 0x9f, 0xa1, 0x63, 0x29, 0x12, + 0x94, 0xff, 0xc7, 0x4c, 0xec, 0x98, 0x0e, 0xb8, 0xeb, 0x9e, 0x6d, 0x1e, + 0x4f, 0x4a, 0x1e, 0x41, 0xb0, 0xf9, 0x40, 0x8b, 0xdd, 0xd9, 0xa6, 0x1b, + 0xd4, 0x6d, 0xaf, 0x5b, 0x14, 0x68, 0xfd, 0x96, 0x5d, 0x0d, 0xad, 0x46, + 0x03, 0xf8, 0xd7, 0x13, 0x1d, 0xf3, 0x47, 0xbe, 0x46, 0x3d, 0xc7, 0xdd, + 0xa9, 0x60, 0x05, 0x15, 0xef, 0x9d, 0xa4, 0xb8, 0xde, 0xf2, 0x41, 0xe2, + 0x07, 0x1d, 0xcb, 0xe8, 0xf3, 0x9c, 0x9c, 0x5e, 0xcd, 0xec, 0x53, 0x39, + 0xf2, 0x62, 0x3b, 0x69, 0x3a, 0x29, 0xc7, 0xb3, 0x57, 0xce, 0x58, 0xd6, + 0x55, 0xf8, 0xc2, 0xf1, 0x16, 0xf3, 0x33, 0x3f, 0xf2, 0xaa, 0x63, 0x42, + 0x27, 0x01, 0x22, 0x5a, 0x1e, 0x8d, 0xa5, 0x33, 0x34, 0x29, 0x12, 0xf6, + 0x07, 0x22, 0xfd, 0xbb, 0x72, 0x60, 0x2a, 0xf5, 0xec, 0x71, 0xfe, 0xd7, + 0xc1, 0xf5, 0xdf, 0x97, 0x3e, 0x4a, 0x9a, 0x97, 0x6f, 0x56, 0xf1, 0xd4, + 0xba, 0x29, 0x09, 0x46, 0x3f, 0x10, 0xdc, 0x2d, 0xb2, 0x04, 0x32, 0x38, + 0xa3, 0xc7, 0x75, 0x95, 0x16, 0xd6, 0x12, 0x44, 0x7a, 0xd3, 0x18, 0xb3, + 0x51, 0x72, 0x63, 0xb8, 0xae, 0x9b, 0xf1, 0xec, 0x17, 0xe4, 0x2d, 0xed, + 0x29, 0x05, 0x63, 0xd7, 0x01, 0xf4, 0xf5, 0xc1, 0x6d, 0x13, 0x5f, 0x5c, + 0x73, 0x11, 0xc9, 0x53, 0xf4, 0xda, 0x90, 0xa2, 0x1c, 0x0b, 0x1d, 0x37, + 0x28, 0xa1, 0x06, 0x65, 0xd3, 0x49, 0x5d, 0x07, 0x1f, 0x93, 0xa9, 0x98, + 0xc5, 0xa5, 0x13, 0xc5, 0xac, 0xda, 0x64, 0x25, 0x77, 0x9a, 0xd5, 0xa9, + 0xe9, 0x3a, 0x77, 0x62, 0xac, 0xf2, 0x76, 0xf4, 0x03, 0xb6, 0x03, 0x6e, + 0xef, 0x97, 0x13, 0x1c, 0xd1, 0xb9, 0x73, 0x12, 0xf7, 0x10, 0xbd, 0x1c, + 0xa1, 0xe7, 0xed, 0xd7, 0xa0, 0xd7, 0x53, 0xa1, 0x21, 0xf1, 0x5f, 0x1e, + 0xec, 0x36, 0x0d, 0x2c, 0xce, 0x74, 0x4a, 0x0c, 0x97, 0x5a, 0x76, 0x62, + 0x18, 0x9c, 0xc3, 0xc1, 0xc4, 0x5e, 0xf1, 0xfa, 0xe6, 0x4b, 0x15, 0xda, + 0xfa, 0xfd, 0xe9, 0x98, 0x09, 0xc3, 0x67, 0x63, 0x1f, 0x28, 0x37, 0xf0, + 0x59, 0x4b, 0x4b, 0xa3, 0xd1, 0x41, 0x94, 0xa6, 0x05, 0xb0, 0x93, 0xee, + 0x41, 0xa4, 0xce, 0xee, 0xea, 0xc4, 0x43, 0x6e, 0xab, 0x65, 0x70, 0xe3, + 0x4d, 0xf1, 0x02, 0xf5, 0x0f, 0xd5, 0x5e, 0xfd, 0x03, 0xcd, 0x22, 0x27, + 0x90, 0xf4, 0x98, 0xa2, 0xc0, 0xb4, 0xd5, 0x04, 0xfa, 0x75, 0x22, 0x4c, + 0xe7, 0xdd, 0xef, 0x3a, 0x1d, 0xb6, 0x00, 0x58, 0xcd, 0x5a, 0xbc, 0x12, + 0xea, 0x5a, 0xda, 0xa9, 0x18, 0x0e, 0xff, 0x51, 0xc4, 0xaf, 0xc8, 0x95, + 0xfb, 0x92, 0xdf, 0x99, 0xc9, 0x4e, 0xfe, 0xb1, 0xb0, 0xca, 0xa1, 0xba, + 0x90, 0xc8, 0x07, 0x34, 0x52, 0x6d, 0xd8, 0x05, 0x72, 0x2e, 0xee, 0x98, + 0xc0, 0x1e, 0x25, 0xb3, 0xa2, 0xb4, 0x9c, 0xa5, 0xdc, 0xd3, 0xb1, 0xdf, + 0x17, 0xd9, 0xda, 0xe9, 0x5d, 0x41, 0xca, 0xc7, 0xe4, 0x94, 0x0d, 0x67, + 0xba, 0x9c, 0xcf, 0x52, 0xf0, 0x00, 0x54, 0xe0, 0xbd, 0x3c, 0xc7, 0xb9, + 0x6a, 0x11, 0xc6, 0xd1, 0x62, 0xc3, 0xcf, 0xc2, 0x6a, 0x44, 0xeb, 0x41, + 0x43, 0x54, 0xe2, 0xf5, 0xc4, 0x11, 0xd7, 0x6a, 0xf2, 0x76, 0xa9, 0x16, + 0xae, 0xe2, 0x11, 0xfb, 0x04, 0x3d, 0xee, 0xd1, 0x98, 0x30, 0x0b, 0x6b, + 0x8a, 0x6f, 0x45, 0xb7, 0x01, 0x64, 0x46, 0x32, 0x61, 0xd5, 0x05, 0xfa, + 0xb1, 0x14, 0x54, 0x39, 0x13, 0x9b, 0xd5, 0x1d, 0x5c, 0xad, 0xd0, 0x5e, + 0x6d, 0xb3, 0xa1, 0xb3, 0xc5, 0x8d, 0xf8, 0x12, 0xd9, 0x5f, 0x94, 0x27, + 0xdf, 0x30, 0xc8, 0x0e, 0x3a, 0x46, 0x70, 0x5c, 0x4c, 0xaa, 0x24, 0xc3, + 0x50, 0x62, 0x52, 0xc8, 0x63, 0x64, 0xc9, 0x49, 0x74, 0x1c, 0xd2, 0x49, + 0x0f, 0x20, 0x69, 0x53, 0x97, 0x34, 0xc0, 0x92, 0x48, 0x28, 0x7b, 0x64, + 0xca, 0xea, 0x07, 0x6c, 0x63, 0x3e, 0xb6, 0xdb, 0xd5, 0x52, 0x9d, 0x7a, + 0x5f, 0x46, 0xc1, 0xb9, 0x3e, 0xe2, 0xe9, 0xeb, 0x04, 0x65, 0xc0, 0x74, + 0x4b, 0x07, 0x6a, 0x19, 0x4a, 0x9d, 0x05, 0xa0, 0xba, 0xae, 0x74, 0xef, + 0x62, 0x09, 0x57, 0x36, 0xe5, 0x9c, 0x54, 0x59, 0x3d, 0x04, 0xf0, 0xfb, + 0x6f, 0x89, 0x13, 0x1f, 0x1f, 0x88, 0x03, 0x6b, 0x0c, 0xeb, 0x53, 0xac, + 0x3a, 0x18, 0xa4, 0x93, 0xcc, 0x4f, 0xf5, 0x92, 0x44, 0x23, 0x9e, 0x67, + 0xf0, 0xf5, 0x2f, 0xb9, 0xc9, 0x34, 0x76, 0x97, 0x1d, 0x94, 0x75, 0x3f, + 0x47, 0x97, 0xe0, 0x30, 0xcc, 0xff, 0xd2, 0x7a, 0x3b, 0x04, 0xa7, 0xa5, + 0x62, 0x9e, 0xe4, 0x8f, 0xd8, 0x62, 0xee, 0x1d, 0x1c, 0xff, 0xad, 0x18, + 0xc9, 0x66, 0x47, 0x36, 0xfb, 0x2e, 0x74, 0x2a, 0xe7, 0x5f, 0xb2, 0x12, + 0xd2, 0x9e, 0xae, 0x2b, 0x92, 0xb8, 0x53, 0x66, 0x22, 0x5c, 0xa8, 0xaf, + 0x4f, 0x29, 0xab, 0x64, 0x50, 0x09, 0xe9, 0x2f, 0x2e, 0x62, 0x2e, 0x0e, + 0x8a, 0xd6, 0xeb, 0xa7, 0x5d, 0x3e, 0x9e, 0xe1, 0x39, 0x52, 0x13, 0x57, + 0x54, 0x5c, 0x78, 0xed, 0xb3, 0xfc, 0x5f, 0xa1, 0xf3, 0x2a, 0x77, 0x90, + 0xa9, 0x09, 0xa1, 0x05, 0x3b, 0xa9, 0x6a, 0xf5, 0xc4, 0xfa, 0x97, 0x79, + 0x64, 0x57, 0x1a, 0xf1, 0x74, 0xe5, 0x16, 0x93, 0xa9, 0xef, 0xe6, 0xdf, + 0x36, 0xd2, 0xd0, 0xe6, 0xb8, 0xdd, 0xe9, 0x13, 0x4c, 0xcd, 0x22, 0x98, + 0xc1, 0x94, 0xbb, 0x04, 0x2a, 0x4a, 0x69, 0x10, 0x5a, 0xcb, 0x1d, 0x9e, + 0xc4, 0x3d, 0x6d, 0x0e, 0xe0, 0x12, 0xb4, 0xe1, 0x6c, 0x55, 0x6f, 0xa3, + 0xf5, 0x1b, 0x0c, 0xe5, 0x1c, 0x99, 0x8b, 0x23, 0x23, 0xbc, 0x33, 0xe4, + 0xd4, 0x15, 0xfd, 0xcc, 0x90, 0x87, 0xb5, 0x0e, 0x24, 0xba, 0x20, 0x1b, + 0xcf, 0x67, 0x98, 0x1a, 0x35, 0xe7, 0xc3, 0x95, 0x29, 0xd6, 0xd2, 0x4f, + 0xe4, 0x14, 0xd5, 0xa1, 0x93, 0xff, 0x24, 0x0e, 0xfc, 0xb7, 0xd6, 0xde, + 0x05, 0xc5, 0x2f, 0xaa, 0x92, 0xd4, 0xd8, 0xac, 0x8f, 0x67, 0x45, 0xdb, + 0x36, 0x19, 0x15, 0x09, 0x9a, 0x3f, 0x2a, 0x56, 0xd5, 0xa9, 0x26, 0xb6, + 0xcb, 0x19, 0xf3, 0x6a, 0xbb, 0xba, 0xba, 0xa3, 0x68, 0x90, 0x0f, 0xb1, + 0x98, 0x14, 0x33, 0xd8, 0x12, 0xdf, 0xef, 0xe5, 0x01, 0x93, 0xab, 0xf8, + 0x93, 0x40, 0xbd, 0xa0, 0x01, 0x34, 0x54, 0xfd, 0xa0, 0xc4, 0xc3, 0xf3, + 0x6b, 0x90, 0x30, 0xc1, 0xbe, 0xd8, 0xbb, 0xab, 0x71, 0xaa, 0xe5, 0x3b, + 0x2d, 0x5d, 0x6e, 0x00, 0x34, 0xa8, 0x02, 0x34, 0xa9, 0x67, 0x95, 0xcd, + 0xed, 0xa2, 0x25, 0x55, 0xc9, 0x03, 0x1c, 0x30, 0xe7, 0xdf, 0xe6, 0xe7, + 0x2b, 0x5a, 0x9a, 0xcd, 0xa8, 0xf0, 0x4e, 0xe4, 0xd7, 0x90, 0x5f, 0x4e, + 0xbf, 0x5d, 0x68, 0x12, 0x1c, 0x4c, 0x68, 0x03, 0x9c, 0x49, 0xcb, 0xe6, + 0xc4, 0xfd, 0xad, 0xd5, 0xa8, 0xd8, 0xda, 0x2f, 0x13, 0xbc, 0x42, 0x61, + 0xa5, 0x0a, 0x1a, 0xe9, 0x5e, 0x5c, 0x01, 0x7c, 0xca, 0x73, 0x6f, 0x32, + 0xc1, 0x96, 0x24, 0x9d, 0x12, 0x20, 0x11, 0x6a, 0xf6, 0xbc, 0xff, 0x6a, + 0xc1, 0x58, 0x0d, 0xb9, 0xad, 0xc5, 0xde, 0x69, 0x37, 0xbe, 0xd9, 0x93, + 0xcc, 0x2b, 0xe9, 0x13, 0x45, 0xa0, 0x6c, 0x3f, 0x44, 0x34, 0xaf, 0x43, + 0x6d, 0xae, 0xef, 0xb2, 0x65, 0x03, 0xc1, 0xef, 0x10, 0x1e, 0xd8, 0x6e, + 0xb5, 0xb9, 0x03, 0xd8, 0x6e, 0x2f, 0x53, 0xe6, 0xc0, 0xaf, 0x44, 0xd2, + 0xd8, 0x15, 0x56, 0x15, 0x59, 0xd6, 0xd4, 0xe4, 0x1a, 0x25, 0xd5, 0xcf, + 0xe7, 0x6a, 0x55, 0xd4, 0xf8, 0x42, 0x4c, 0xcb, 0x9a, 0x48, 0x4d, 0x27, + 0x61, 0x4c, 0x36, 0x2b, 0xcb, 0x10, 0xba, 0xf7, 0xe3, 0x23, 0x27, 0xc5, + 0x6a, 0x1b, 0x94, 0x69, 0x64, 0xb1, 0x8c, 0xdb, 0xd4, 0x0d, 0x32, 0x3e, + 0x58, 0x73, 0xa8, 0x2f, 0x3d, 0x22, 0xd9, 0x0d, 0x2a, 0x52, 0xf0, 0xdd, + 0xeb, 0x21, 0x42, 0xc7, 0x59, 0x96, 0x09, 0x93, 0x5a, 0x70, 0xc3, 0x21, + 0x5f, 0xce, 0xc2, 0xdd, 0xcf, 0x61, 0xed, 0x1c, 0xfb, 0x2f, 0x57, 0xf7, + 0x31, 0xb8, 0x3e, 0x92, 0x29, 0xd4, 0x47, 0x6a, 0x19, 0x66, 0x00, 0xc2, + 0xc4, 0x6c, 0xb5, 0xc5, 0x68, 0x24, 0xa8, 0x64, 0x26, 0x72, 0x43, 0x20, + 0x9f, 0xf1, 0x3f, 0xac, 0x64, 0xb5, 0x12, 0x26, 0x13, 0x76, 0x52, 0x05, + 0xda, 0x57, 0xe3, 0x53, 0x73, 0x30, 0x21, 0x27, 0x75, 0x8d, 0x37, 0xd1, + 0x77, 0x40, 0x97, 0x2a, 0xb7, 0x0b, 0x2e, 0x9e, 0x4c, 0x36, 0x75, 0x44, + 0x15, 0xdb, 0x96, 0x70, 0xf9, 0x33, 0x9a, 0x1e, 0x6e, 0x13, 0x05, 0x38, + 0x2c, 0xbf, 0x0a, 0xdd, 0x2b, 0x2b, 0x38, 0x77, 0xa9, 0x00, 0x2d, 0x5e, + 0xee, 0x4b, 0xf3, 0x20, 0x7a, 0x90, 0x97, 0x44, 0xdf, 0x55, 0xfd, 0x50, + 0xe3, 0x24, 0x25, 0xa9, 0xd9, 0x3f, 0x6d, 0x09, 0x32, 0x67, 0xb5, 0x43, + 0xf1, 0xc7, 0xa7, 0xfb, 0x92, 0xde, 0xc3, 0xbf, 0x64, 0x6b, 0x35, 0xda, + 0x08, 0x94, 0x68, 0xb0, 0xc8, 0x3f, 0xb5, 0x9f, 0x15, 0x05, 0xff, 0x6c, + 0xbc, 0x22, 0x61, 0xf4, 0x67, 0xf8, 0x1f, 0x2e, 0x91, 0xc8, 0x12, 0xdc, + 0xcb, 0x22, 0x05, 0xb8, 0xab, 0x0d, 0x0e, 0xd7, 0x04, 0x8e, 0x32, 0x0e, + 0xfe, 0x72, 0x79, 0xc3, 0xba, 0xd8, 0x68, 0x3e, 0x5d, 0xab, 0xa0, 0xf8, + 0x26, 0x57, 0xe4, 0x20, 0x91, 0x0a, 0xde, 0x52, 0x95, 0xbc, 0xb7, 0x71, + 0x50, 0xe4, 0x3f, 0x07, 0x4c, 0xa8, 0x6a, 0xb6, 0xa0, 0x95, 0xe2, 0x31, + 0x8f, 0x5f, 0xfa, 0xdd, 0xee, 0x02, 0x23, 0x56, 0xf1, 0xdd, 0x1a, 0xa6, + 0xa0, 0x2d, 0x46, 0x36, 0x6c, 0x79, 0xe8, 0x67, 0x43, 0xdd, 0xe7, 0x2e, + 0x25, 0xda, 0x35, 0x6f, 0x63, 0xf1, 0x2c, 0x6c, 0x61, 0xaa, 0xb7, 0x51, + 0x91, 0xa1, 0x7c, 0x54, 0x9a, 0xf6, 0x3c, 0x3f, 0xa8, 0xba, 0x4d, 0xee, + 0xb6, 0xab, 0xa5, 0x05, 0xc6, 0xb6, 0xe8, 0x2f, 0x1b, 0x99, 0xb0, 0x45, + 0x3e, 0xc3, 0x50, 0x26, 0x0b, 0x10, 0x61, 0x5a, 0xc6, 0x25, 0x2d, 0x07, + 0xb6, 0x28, 0x59, 0xf3, 0xb4, 0x02, 0x61, 0xa0, 0xd0, 0x0a, 0xae, 0xd6, + 0x3c, 0xcc, 0x5f, 0xfb, 0xc0, 0xfd, 0xeb, 0x7b, 0xe2, 0x66, 0xc5, 0x98, + 0x70, 0x50, 0x31, 0x3a, 0x12, 0x45, 0xf4, 0x1c, 0xba, 0xa6, 0x92, 0x51, + 0xae, 0x68, 0xec, 0xb0, 0x1a, 0xd9, 0x45, 0x00, 0xd6, 0x9e, 0xad, 0x64, + 0xfe, 0xd9, 0xfb, 0xcc, 0x57, 0xff, 0x9e, 0xa3, 0x71, 0xe7, 0x7a, 0xaf, + 0x26, 0x31, 0x31, 0x6a, 0x41, 0xa4, 0x4d, 0x68, 0xbc, 0xcb, 0xfa, 0xb4, + 0x3a, 0x1c, 0x3a, 0x8f, 0xcd, 0xc1, 0x95, 0xb2, 0x46, 0x72, 0xf7, 0xfc, + 0x20, 0xe2, 0x2f, 0x0f, 0xbd, 0x74, 0xe1, 0x2a, 0xd5, 0xf6, 0xe9, 0xe1, + 0x45, 0x7d, 0x95, 0xb0, 0x49, 0xce, 0xe8, 0x53, 0x69, 0x46, 0x9d, 0x03, + 0x5f, 0x15, 0x2e, 0x92, 0x4c, 0xb7, 0xf1, 0x43, 0x67, 0x8a, 0x43, 0xc6, + 0x90, 0xec, 0xb5, 0x5d, 0xd5, 0x64, 0x16, 0x6e, 0xf0, 0xad, 0x4e, 0xf0, + 0x56, 0xe8, 0x77, 0xd5, 0x47, 0x47, 0x41, 0xc9, 0x98, 0x3a, 0xcb, 0xe0, + 0x01, 0x77, 0x93, 0x15, 0xe0, 0xd3, 0x93, 0xbe, 0xe1, 0x97, 0xe0, 0x21, + 0x60, 0x2b, 0xf1, 0x4a, 0x62, 0x29, 0x11, 0xe9, 0x61, 0x55, 0xc4, 0x57, + 0x04, 0xa8, 0xb3, 0xb3, 0x61, 0xd7, 0xa6, 0xce, 0x50, 0xd2, 0xc3, 0x38, + 0xda, 0xc2, 0x23, 0x67, 0x37, 0x09, 0xa7, 0xfd, 0x29, 0xdc, 0xcc, 0x52, + 0x65, 0xea, 0x3f, 0xcc, 0x67, 0x5e, 0x3b, 0xd4, 0x59, 0x59, 0x12, 0x9b, + 0xf1, 0xd2, 0x43, 0x46, 0x54, 0xcd, 0xb9, 0xbe, 0x71, 0xb6, 0x6d, 0x6a, + 0x62, 0xc5, 0x59, 0xc1, 0x21, 0xf7, 0x4c, 0x91, 0x64, 0xe0, 0xd7, 0xd9, + 0x34, 0x60, 0x0d, 0xb2, 0x93, 0xd8, 0xd3, 0x01, 0x8b, 0xf3, 0x9c, 0x6c, + 0xff, 0x63, 0xca, 0xd2, 0xf4, 0x76, 0xe3, 0x60, 0x52, 0x5c, 0x0e, 0xa3, + 0x13, 0xc8, 0xd9, 0xa7, 0x13, 0x6d, 0x1b, 0x29, 0xc0, 0xb1, 0x54, 0x31, + 0x33, 0x55, 0x44, 0x0a, 0x0a, 0x96, 0x3f, 0xf0, 0xb2, 0x64, 0x23, 0xa1, + 0xc8, 0x08, 0x01, 0x94, 0x2f, 0xc8, 0x0a, 0xfb, 0x93, 0x38, 0xe4, 0xc1, + 0xd9, 0xea, 0x46, 0x96, 0xdd, 0x5d, 0x62, 0xfc, 0xb0, 0x4d, 0x17, 0xe8, + 0xa0, 0xd4, 0x35, 0x98, 0x65, 0xb0, 0x27, 0x97, 0xbc, 0xe8, 0x48, 0x38, + 0x90, 0x9b, 0x6e, 0xf1, 0xd2, 0x17, 0x1b, 0xbf, 0x03, 0xc6, 0xa3, 0x42, + 0xaf, 0xdc, 0x44, 0x9d, 0x9e, 0x69, 0x67, 0x33, 0x61, 0xfb, 0x96, 0xfa, + 0xff, 0xf4, 0xa8, 0x3c, 0xb6, 0x42, 0xd2, 0x4c, 0xc0, 0xa8, 0x2a, 0x4b, + 0x37, 0x78, 0x41, 0x94, 0xf6, 0x04, 0xb9, 0x54, 0xe4, 0x2b, 0xfc, 0xed, + 0xf5, 0xf7, 0x62, 0x23, 0x44, 0xc4, 0xd7, 0x5a, 0xeb, 0xc2, 0x3d, 0x4c, + 0x41, 0x22, 0xa0, 0xe3, 0x22, 0xbc, 0x91, 0x69, 0x37, 0x3f, 0x94, 0xfd, + 0x07, 0xa7, 0x6e, 0x53, 0x27, 0xdc, 0xb0, 0x14, 0x8d, 0x0a, 0x08, 0x31, + 0xba, 0xf0, 0xd0, 0xda, 0xa6, 0x7a, 0xc0, 0x4c, 0x9d, 0x3b, 0x8f, 0xee, + 0x11, 0xc7, 0x9f, 0xc9, 0xcc, 0x4c, 0x26, 0x51, 0xb4, 0x10, 0xde, 0xc2, + 0xa3, 0xe0, 0xaa, 0x7c, 0x9c, 0x27, 0x8d, 0x04, 0x8e, 0xfc, 0xe4, 0x68, + 0x93, 0xf9, 0x67, 0x28, 0xa0, 0xe6, 0xca, 0xbd, 0x5a, 0x64, 0x98, 0x9f, + 0xe3, 0x7b, 0x16, 0x5d, 0x61, 0xcc, 0x4c, 0x64, 0x04, 0x1b, 0xcc, 0xa6, + 0xa2, 0x31, 0x28, 0xa2, 0xac, 0xd0, 0xce, 0x40, 0x19, 0xe7, 0xf9, 0xea, + 0xc5, 0x98, 0x50, 0x16, 0x38, 0xad, 0x58, 0x21, 0x2e, 0x10, 0x48, 0x4f, + 0xe7, 0xc0, 0xc0, 0x6c, 0xcd, 0xe2, 0xc3, 0xcd, 0xc5, 0xfc, 0x26, 0x91, + 0xea, 0xcf, 0x52, 0x97, 0x9f, 0xdc, 0x2c, 0x45, 0xd8, 0x50, 0xf8, 0x75, + 0xa2, 0x93, 0x52, 0x2b, 0x23, 0xd3, 0x30, 0x9d, 0xa7, 0xf7, 0xbb, 0xc2, + 0xd2, 0xb7, 0x9d, 0xec, 0xf9, 0x9a, 0xec, 0x3e, 0xc0, 0xce, 0x64, 0xb8, + 0xf5, 0x41, 0x4e, 0x06, 0xa1, 0x25, 0xf2, 0x40, 0xee, 0x07, 0xec, 0x6d, + 0x9a, 0xd0, 0x5c, 0xdd, 0xe9, 0xf5, 0x56, 0xf9, 0x2e, 0xf5, 0xdb, 0x69, + 0xc9, 0x3e, 0xb5, 0x0c, 0xbc, 0x29, 0xa4, 0xa9, 0x55, 0x9b, 0xf6, 0xab, + 0x1f, 0x55, 0x9d, 0x25, 0xd2, 0xde, 0x3f, 0xa0, 0xe5, 0x1c, 0xb3, 0x90, + 0x2f, 0x6c, 0xaf, 0xb5, 0x6d, 0x23, 0x15, 0xab, 0x91, 0x55, 0x5f, 0x02, + 0x20, 0x22, 0x8e, 0xc1, 0x4a, 0x63, 0xa6, 0x5e, 0x85, 0x99, 0x58, 0xdc, + 0xde, 0xb0, 0x76, 0x9f, 0x21, 0x4d, 0xe9, 0x47, 0xcc, 0x3f, 0x02, 0x91, + 0x75, 0x67, 0xe5, 0x6a, 0x2c, 0xc3, 0x69, 0x95, 0x2d, 0x74, 0x77, 0xf7, + 0x1d, 0xe1, 0x12, 0x2b, 0xcf, 0x4c, 0x7b, 0xcf, 0xbe, 0x24, 0x1d, 0x07, + 0x34, 0xd3, 0x67, 0xa8, 0xb9, 0x76, 0x2a, 0x3e, 0xfd, 0xb5, 0xcd, 0xf6, + 0x29, 0x07, 0x4e, 0x17, 0xcf, 0x28, 0xdd, 0x90, 0x4b, 0x17, 0x24, 0x55, + 0xdc, 0x78, 0xe5, 0xf4, 0x97, 0x31, 0x3d, 0xfa, 0x96, 0xe2, 0x99, 0x61, + 0xb1, 0xcb, 0xa4, 0x7b, 0x4e, 0x5d, 0x6a, 0xf8, 0xb2, 0x79, 0xfc, 0xa9, + 0xd9, 0x27, 0x46, 0xdd, 0x52, 0xdf, 0x24, 0x66, 0x1c, 0xa6, 0xbc, 0x18, + 0x13, 0x72, 0x38, 0x53, 0xac, 0x1b, 0x67, 0x1f, 0x30, 0xae, 0x5a, 0xf3, + 0x55, 0xd0, 0xe1, 0x23, 0x9a, 0x46, 0xa4, 0xbb, 0x68, 0x73, 0x30, 0xda, + 0xb7, 0x3b, 0xff, 0xd1, 0x0d, 0xe0, 0xf7, 0xda, 0x36, 0x3a, 0x7a, 0x19, + 0xf5, 0x2e, 0xf4, 0xda, 0xa4, 0x09, 0x94, 0xb8, 0x18, 0xad, 0x6b, 0xf6, + 0x64, 0xbf, 0x2a, 0x04, 0xc6, 0xde, 0x0f, 0x45, 0x27, 0x3a, 0x3d, 0x61, + 0xf5, 0xde, 0x38, 0x1d, 0x23, 0x23, 0x70, 0x00, 0xfc, 0x0c, 0x5c, 0x96, + 0xc1, 0x21, 0x78, 0x25, 0x24, 0x71, 0xd1, 0xe2, 0xe9, 0x1a, 0x2f, 0x48, + 0x4d, 0x09, 0x24, 0x27, 0xe4, 0xe7, 0x42, 0x76, 0x92, 0x93, 0x7a, 0x62, + 0x76, 0xc6, 0xd7, 0xdf, 0xe4, 0x5e, 0x0e, 0xfc, 0x4e, 0x0a, 0x65, 0x63, + 0x51, 0x90, 0xfd, 0x92, 0x5f, 0x9a, 0x49, 0xa9, 0x6c, 0xb1, 0xb6, 0xe6, + 0xab, 0xf7, 0xb9, 0x39, 0xc0, 0xed, 0x1d, 0x65, 0x9c, 0x24, 0x21, 0xc1, + 0x0d, 0xd6, 0x9a, 0xbe, 0xd4, 0x74, 0xa2, 0x70, 0xab, 0x0b, 0x45, 0xf0, + 0xc9, 0xaa, 0xf1, 0x49, 0x0b, 0x6c, 0x20, 0xdc, 0x37, 0x2b, 0x13, 0x68, + 0x48, 0x0e, 0xd8, 0xd1, 0x67, 0xd8, 0xa3, 0x7e, 0xd7, 0xb7, 0x50, 0xc8, + 0x14, 0x58, 0x6a, 0x04, 0xa5, 0x70, 0x22, 0x2d, 0x41, 0xea, 0x28, 0xb7, + 0xf0, 0xde, 0xc4, 0xe4, 0x5b, 0x4d, 0xc1, 0x33, 0x9e, 0x14, 0x32, 0xa8, + 0x9b, 0xc8, 0xd9, 0x5b, 0x95, 0x2a, 0x91, 0x9d, 0xe8, 0x15, 0x19, 0x9b, + 0x38, 0xf3, 0x35, 0x69, 0x3e, 0xd3, 0x4b, 0xcc, 0xf2, 0x94, 0x5a, 0xaf, + 0x91, 0xa4, 0xa1, 0x03, 0x48, 0x5f, 0x6d, 0x16, 0x56, 0x03, 0x5a, 0xcb, + 0x99, 0x19, 0x45, 0x9c, 0xba, 0xc9, 0xbc, 0x5b, 0x0f, 0xf5, 0xde, 0x70, + 0xa3, 0x70, 0x0d, 0x3f, 0x3e, 0x5c, 0x4d, 0x5a, 0x1a, 0x46, 0x1b, 0x44, + 0x4a, 0x73, 0xfa, 0xb1, 0xc4, 0x42, 0x7b, 0x0c, 0x15, 0x0d, 0x35, 0xc4, + 0xa3, 0xea, 0x17, 0xa0, 0x0b, 0xfb, 0x4d, 0x1b, 0x2f, 0x96, 0x1f, 0xaa, + 0xc0, 0xad, 0xdc, 0xf3, 0xb2, 0xb1, 0x44, 0x1f, 0x39, 0xc7, 0x33, 0x18, + 0xad, 0xe1, 0x50, 0x7d, 0xf9, 0x2a, 0x90, 0xf2, 0x06, 0xce, 0x07, 0xae, + 0x9f, 0xbc, 0x4d, 0xae, 0x30, 0xdd, 0x47, 0xa2, 0xd3, 0x6d, 0x0c, 0xc6, + 0xb7, 0xae, 0xf5, 0x38, 0xa3, 0x00, 0x59, 0x6a, 0x00, 0x04, 0xd2, 0x77, + 0x0a, 0x58, 0xc9, 0xaf, 0x1b, 0x59, 0x29, 0xf3, 0xdd, 0x58, 0xcf, 0xa1, + 0x6d, 0xb4, 0x66, 0x23, 0x9f, 0x9b, 0x41, 0x2a, 0xc8, 0x28, 0x34, 0x77, + 0x3a, 0x1f, 0xa5, 0xde, 0x4b, 0x3f, 0xc7, 0x19, 0xf5, 0xdb, 0x98, 0xc4, + 0x6c, 0x2f, 0x34, 0x20, 0xc9, 0x52, 0x16, 0x60, 0xbc, 0x04, 0xd5, 0xff, + 0x4b, 0x07, 0x28, 0x5a, 0x3a, 0x48, 0x5b, 0x96, 0xee, 0x1f, 0xf1, 0xb4, + 0x9b, 0xb5, 0x64, 0xde, 0x1c, 0xd5, 0x3c, 0x1b, 0x98, 0x11, 0xc7, 0x0b, + 0x97, 0x00, 0x2f, 0x8f, 0xf9, 0x24, 0x4d, 0xba, 0x75, 0x6a, 0xce, 0xd8, + 0x7a, 0xee, 0x02, 0xd5, 0x19, 0xd6, 0x26, 0x40, 0xa7, 0x78, 0x76, 0x1a, + 0x17, 0xc2, 0xe6, 0x5a, 0x6e, 0x24, 0xb1, 0x17, 0xf8, 0x9f, 0xdc, 0x64, + 0xf0, 0x59, 0xc5, 0xfc, 0x4c, 0xbb, 0x3d, 0x3f, 0x70, 0x2c, 0x0d, 0xf5, + 0x6c, 0x96, 0x46, 0x1a, 0x1e, 0x5f, 0xd1, 0x3a, 0x00, 0x9a, 0x9d, 0x63, + 0xe6, 0xd1, 0xa2, 0x5a, 0x4a, 0x50, 0xa8, 0xd5, 0x91, 0x90, 0x69, 0x58, + 0x65, 0x00, 0xc7, 0xf1, 0xa6, 0x45, 0xfd, 0x5a, 0xe6, 0x05, 0x4b, 0xb2, + 0x3a, 0xdf, 0xa9, 0xd9, 0xe5, 0xa6, 0xe5, 0xe2, 0x5b, 0x3b, 0x2f, 0x57, + 0x6c, 0xc4, 0x06, 0xe1, 0x8e, 0x15, 0x98, 0xc8, 0x5e, 0x63, 0xba, 0x37, + 0xe6, 0x91, 0x5f, 0x1c, 0x5b, 0x77, 0xb5, 0x91, 0x07, 0x3a, 0xa6, 0x67, + 0x6d, 0xdf, 0x15, 0x62, 0x6b, 0x3b, 0xed, 0xa2, 0xc7, 0x46, 0x52, 0x8f, + 0xf2, 0x9f, 0x69, 0x00, 0xb8, 0x49, 0xcf, 0xd4, 0xf0, 0x95, 0x51, 0xda, + 0x0f, 0x4e, 0x0d, 0x11, 0x2f, 0x27, 0x73, 0xe9, 0x13, 0xcb, 0xa1, 0xfc, + 0x6b, 0x45, 0xf0, 0xfd, 0xc7, 0x17, 0xaa, 0x0c, 0xac, 0x98, 0xc4, 0x6c, + 0xf0, 0x32, 0x45, 0x67, 0xfe, 0x6f, 0x2e, 0xfb, 0xec, 0x19, 0xda, 0xbd, + 0x93, 0x5f, 0x50, 0xc2, 0x22, 0x9a, 0x3a, 0x5b, 0x31, 0xf5, 0x4e, 0x91, + 0xa6, 0xea, 0x67, 0xdd, 0x69, 0xf4, 0xd7, 0xea, 0x02, 0xbe, 0x55, 0x52, + 0xb9, 0x30, 0x21, 0xe5, 0xfc, 0x9a, 0x93, 0xd6, 0x6c, 0x33, 0x06, 0xb9, + 0xe3, 0xb0, 0x6a, 0xff, 0x9e, 0xc2, 0x5e, 0x1d, 0xd6, 0xdb, 0xa1, 0x60, + 0x34, 0x5d, 0x08, 0xf9, 0xeb, 0xd6, 0x1f, 0x90, 0xf1, 0xf4, 0x07, 0x47, + 0xbf, 0xd9, 0xc9, 0xe8, 0xcf, 0xce, 0xa5, 0x1d, 0xb0, 0xd9, 0xbe, 0xc7, + 0xfb, 0xcc, 0xac, 0x3e, 0x92, 0x59, 0x0d, 0x1d, 0x65, 0x16, 0xa3, 0xdc, + 0x9b, 0x72, 0x22, 0x46, 0x04, 0xca, 0xb3, 0x5a, 0x2f, 0x3d, 0x99, 0x5c, + 0xb5, 0xb9, 0x30, 0xe3, 0xde, 0x8c, 0xba, 0xc7, 0x4c, 0xe5, 0x34, 0x6e, + 0xf4, 0x75, 0xf4, 0x38, 0x01, 0xf1, 0x61, 0xb8, 0x2b, 0xc3, 0x6f, 0xae, + 0xd1, 0x0a, 0x9d, 0x48, 0xc9, 0xe7, 0xc3, 0xe7, 0xc9, 0xe1, 0x6f, 0x96, + 0xa0, 0xc2, 0x91, 0xfd, 0xad, 0x99, 0x48, 0xde, 0xfc, 0xa3, 0x6e, 0xe3, + 0x94, 0x0e, 0xb5, 0xf6, 0x24, 0x8b, 0xce, 0x70, 0x3c, 0xdc, 0xe2, 0x66, + 0x9f, 0xe3, 0x6b, 0xc5, 0xd1, 0x97, 0x38, 0x12, 0x46, 0x37, 0xd6, 0x9a, + 0x4c, 0x6d, 0x4a, 0x2d, 0xc3, 0x28, 0x20, 0x2f, 0x55, 0x67, 0x17, 0x71, + 0xd3, 0x5c, 0xdc, 0xa3, 0x23, 0x60, 0x25, 0x2d, 0xe0, 0xc2, 0xed, 0xee, + 0x67, 0x9f, 0x26, 0xfb, 0x2f, 0x63, 0xf2, 0x6a, 0x23, 0x45, 0x26, 0x2c, + 0x33, 0x8a, 0xf2, 0xd1, 0xb2, 0x77, 0x99, 0x98, 0xd6, 0x18, 0xfe, 0xf3, + 0xff, 0xa4, 0x36, 0x03, 0xf4, 0xf5, 0xb1, 0xca, 0xa3, 0x5f, 0xe2, 0xc6, + 0xb2, 0x55, 0x2c, 0xaa, 0x64, 0xef, 0x28, 0x3a, 0x9e, 0x98, 0x01, 0x57, + 0x49, 0x98, 0x61, 0x4f, 0x42, 0x57, 0x00, 0x19, 0xb9, 0xa8, 0xec, 0xed, + 0x2b, 0x63, 0xf3, 0x0c, 0x3a, 0x1f, 0x10, 0xab, 0xe9, 0x6e, 0x61, 0x69, + 0xd1, 0x2d, 0xf3, 0x1f, 0xaa, 0x00, 0x57, 0xe2, 0xab, 0x74, 0xcd, 0xff, + 0x97, 0x2c, 0x3b, 0x67, 0xae, 0xa3, 0xfc, 0x69, 0xa9, 0x4e, 0x42, 0x07, + 0xfc, 0xbf, 0x36, 0x1a, 0xef, 0x6d, 0x6d, 0x14, 0x61, 0x30, 0x27, 0x98, + 0xfa, 0xf8, 0xc9, 0x70, 0xb4, 0xaa, 0x53, 0x48, 0x72, 0x3f, 0x58, 0x69, + 0x8d, 0x08, 0xc8, 0x09, 0x2b, 0xfc, 0x1d, 0xa1, 0x92, 0xae, 0x62, 0xa0, + 0xea, 0x05, 0x40, 0xac, 0x9c, 0xaf, 0x0e, 0xf4, 0x1e, 0x45, 0x33, 0xee, + 0x31, 0x39, 0x08, 0x4b, 0x54, 0x02, 0x2d, 0x03, 0x1c, 0xe6, 0x2d, 0x0c, + 0xd0, 0x92, 0x44, 0xd6, 0xa1, 0x57, 0x4e, 0x17, 0xde, 0xe6, 0x4f, 0x6a, + 0x07, 0x9f, 0x58, 0xe2, 0x27, 0xdb, 0xa9, 0x0c, 0x19, 0x56, 0xa3, 0xb4, + 0xc4, 0xe8, 0xa3, 0x52, 0x9f, 0x6a, 0xc9, 0xb1, 0xda, 0xe9, 0xef, 0x12, + 0xc1, 0x6d, 0x5b, 0x04, 0x20, 0x93, 0xac, 0xf4, 0x38, 0x95, 0xdb, 0x50, + 0xa6, 0x2e, 0x5c, 0x3f, 0x2d, 0x32, 0x50, 0x03, 0x73, 0x64, 0x3a, 0xd5, + 0xfd, 0x98, 0x1c, 0x57, 0xc3, 0xe7, 0xf7, 0x14, 0x13, 0x15, 0x2a, 0xa2, + 0x5f, 0xa0, 0x67, 0xdd, 0x67, 0x00, 0x09, 0xc6, 0xfe, 0xad, 0x06, 0x4c, + 0x5e, 0x9a, 0x5b, 0x55, 0x06, 0x8c, 0x9a, 0x2a, 0x51, 0x0e, 0x4f, 0x15, + 0xcc, 0xe1, 0x53, 0x9c, 0x43, 0x37, 0xc1, 0x3e, 0x02, 0x4b, 0x98, 0x6f, + 0x9b, 0x60, 0x31, 0x2c, 0x2b, 0x9d, 0xda, 0xe0, 0x1d, 0xe4, 0x49, 0x66, + 0x65, 0x18, 0xfb, 0x24, 0x97, 0xe0, 0x2d, 0xf5, 0x44, 0x23, 0x09, 0x01, + 0xf9, 0xf5, 0x29, 0xff, 0x01, 0x36, 0xb9, 0x0e, 0x9b, 0xb3, 0x23, 0x1e, + 0xe5, 0x12, 0xbb, 0x3a, 0x04, 0x14, 0xb8, 0x23, 0x43, 0x95, 0xc1, 0x9d, + 0x57, 0x45, 0x46, 0x4c, 0x8f, 0x35, 0x25, 0x5f, 0x2b, 0xd9, 0xc6, 0xdd, + 0x61, 0xb8, 0xbb, 0x4d, 0x49, 0xef, 0x6e, 0x0c, 0x50, 0x07, 0xc9, 0x9b, + 0x2e, 0xb7, 0xbe, 0x23, 0xc3, 0xcf, 0x9d, 0xeb, 0x13, 0xc8, 0xeb, 0x72, + 0x51, 0x71, 0x69, 0x35, 0xf3, 0xce, 0x35, 0x45, 0x02, 0xba, 0x44, 0x5d, + 0xaf, 0xd0, 0xe5, 0x1d, 0x9b, 0x18, 0xbb, 0x62, 0xce, 0xaf, 0x40, 0x48, + 0x40, 0x2a, 0x5d, 0xcd, 0xa7, 0x2b, 0x8f, 0xf4, 0x4a, 0x4c, 0xe1, 0x59, + 0x40, 0x63, 0x33, 0xae, 0xd8, 0x9d, 0x4d, 0x11, 0x3d, 0x2d, 0x11, 0xc6, + 0x8c, 0xa9, 0xab, 0xa2, 0x08, 0xb8, 0xbf, 0x09, 0x66, 0xbc, 0xd7, 0xab, + 0xce, 0x0d, 0xe0, 0x9e, 0x51, 0x2f, 0x5c, 0xc7, 0x21, 0xb9, 0xcf, 0xc4, + 0x8b, 0xc0, 0x4b, 0x04, 0x1b, 0xfd, 0x43, 0xcf, 0xa4, 0x72, 0x62, 0x04, + 0x0b, 0x1f, 0x9f, 0x35, 0x9d, 0xa9, 0x19, 0x71, 0x06, 0xda, 0x03, 0x0f, + 0xcc, 0x3a, 0xf4, 0x3a, 0xaf, 0x07, 0x0f, 0xf2, 0x3e, 0x4a, 0xd3, 0x41, + 0x6a, 0x90, 0x35, 0x39, 0x4c, 0x1d, 0x2f, 0x05, 0xff, 0xcf, 0xc0, 0xbe, + 0x0f, 0xaf, 0x90, 0x4e, 0x45, 0x8c, 0x78, 0x4d, 0x6b, 0xf2, 0x47, 0x26, + 0xe9, 0x0d, 0xee, 0xd3, 0x97, 0x44, 0xaf, 0x6f, 0x95, 0x30, 0x9c, 0x08, + 0xe5, 0x18, 0x9e, 0xad, 0xd2, 0x2a, 0x0c, 0x21, 0x67, 0x50, 0x28, 0x4f, + 0x31, 0x9c, 0xee, 0xb2, 0x95, 0xbd, 0xef, 0xc0, 0xd0, 0x0d, 0xd4, 0x6e, + 0xff, 0x93, 0x12, 0xc3, 0x51, 0x41, 0xe4, 0x6c, 0x19, 0x09, 0xd7, 0x0a, + 0xe0, 0xea, 0x0a, 0xe7, 0xa8, 0x4b, 0x60, 0xd6, 0x0c, 0x4d, 0xb5, 0x29, + 0x01, 0x74, 0xf9, 0x40, 0x8c, 0x6b, 0x11, 0xf6, 0xe4, 0xc9, 0x3c, 0x1a, + 0xf7, 0xce, 0x2c, 0xd8, 0xe3, 0x0e, 0xc5, 0xb9, 0x6c, 0x40, 0x44, 0xc9, + 0x04, 0xf6, 0x5c, 0xe1, 0x9f, 0xc7, 0xe0, 0x68, 0xe7, 0x6a, 0x92, 0xe7, + 0xb2, 0x12, 0x72, 0x3f, 0xfd, 0xc3, 0x06, 0xeb, 0x0a, 0xab, 0x6d, 0xad, + 0x03, 0x0b, 0x5d, 0xcc, 0x49, 0x04, 0x52, 0x19, 0xd4, 0x9d, 0x67, 0xbf, + 0xd3, 0xf4, 0x22, 0x76, 0x99, 0x52, 0xf5, 0xb5, 0x15, 0x38, 0x58, 0x57, + 0x9a, 0xa2, 0xd1, 0xbb, 0x3a, 0x07, 0xe2, 0xd6, 0x8d, 0x69, 0x9e, 0x5c, + 0xf4, 0xba, 0xda, 0x4a, 0x4d, 0x73, 0xdc, 0x32, 0xfd, 0xe1, 0x3a, 0x16, + 0xf1, 0x09, 0x26, 0x3b, 0x2a, 0xa9, 0xa7, 0x2c, 0xd3, 0xcf, 0x6b, 0xc5, + 0xb5, 0xbc, 0x71, 0xb6, 0x9e, 0xa0, 0x6a, 0x69, 0xa5, 0xeb, 0x54, 0x87, + 0xe9, 0x4f, 0x69, 0x39, 0xc5, 0x54, 0x28, 0x55, 0xb9, 0xff, 0x5d, 0x9e, + 0x17, 0x8e, 0x8c, 0xd5, 0x14, 0x5c, 0xa7, 0x33, 0x5a, 0x2f, 0x2d, 0x37, + 0x0e, 0xf2, 0x54, 0x64, 0x9d, 0xdf, 0x49, 0xab, 0xd3, 0x0f, 0xbd, 0xad, + 0x19, 0xb9, 0xcf, 0x0f, 0x40, 0x62, 0x4b, 0x93, 0xd7, 0xf4, 0x3b, 0xee, + 0x2b, 0x97, 0xe3, 0x55, 0xb3, 0x5b, 0x3f, 0x93, 0xa5, 0xf1, 0x40, 0x99, + 0xa1, 0x69, 0xbd, 0xf3, 0xf0, 0xb1, 0x6e, 0x5c, 0xba, 0x4a, 0xc4, 0x51, + 0x8e, 0xe1, 0x5c, 0xb8, 0x92, 0xb5, 0x43, 0xc4, 0x9e, 0x38, 0x0d, 0xfb, + 0x60, 0xb3, 0xe6, 0x0f, 0x55, 0x94, 0x01, 0xaf, 0xaa, 0xc3, 0x6d, 0xea, + 0xb2, 0xfc, 0xb0, 0x06, 0x29, 0x0f, 0xd3, 0x95, 0xb9, 0xf1, 0x8b, 0xce, + 0xd3, 0x5d, 0x16, 0xbf, 0x5c, 0x24, 0xc5, 0x36, 0x98, 0x8c, 0x5b, 0x43, + 0xe7, 0xfe, 0x77, 0xda, 0xc5, 0xd8, 0xf6, 0x72, 0xba, 0xcf, 0x9c, 0x18, + 0x58, 0xb8, 0xe4, 0x1d, 0xf6, 0xfb, 0x3b, 0xb4, 0x1f, 0xea, 0xa3, 0xe3, + 0xd5, 0xbe, 0x3f, 0xd5, 0xf9, 0xc4, 0x00, 0x8e, 0x17, 0x22, 0x3d, 0x96, + 0xd8, 0xb6, 0xa5, 0xf6, 0xcd, 0x55, 0x48, 0x8b, 0x1b, 0x38, 0x9c, 0xd7, + 0x6d, 0x40, 0x2a, 0x5f, 0xcf, 0xcb, 0x67, 0xa4, 0x8c, 0xf4, 0x8f, 0x70, + 0x34, 0xeb, 0x70, 0xcd, 0xee, 0x1c, 0xbd, 0xae, 0xd1, 0xc1, 0xf8, 0x62, + 0x45, 0xb5, 0x5d, 0xe6, 0x0b, 0xd4, 0x3d, 0x23, 0xf0, 0x27, 0x44, 0x56, + 0x32, 0x4d, 0xb1, 0x6c, 0x5d, 0x33, 0x94, 0x77, 0xe3, 0xac, 0x54, 0x56, + 0x24, 0x05, 0x26, 0x4a, 0xf0, 0x59, 0xfb, 0x1f, 0xa4, 0x0f, 0xbe, 0x9e, + 0xbc, 0x76, 0x9d, 0x5a, 0xed, 0x15, 0x97, 0x4e, 0x05, 0x8a, 0x8b, 0xff, + 0xc7, 0x9b, 0x67, 0x32, 0x12, 0x41, 0x04, 0xcb, 0x24, 0xae, 0x9e, 0xcc, + 0xd6, 0xc6, 0x67, 0x53, 0xfa, 0x29, 0x37, 0x73, 0xc6, 0xdf, 0xf2, 0x56, + 0x72, 0x06, 0x03, 0xaa, 0x5d, 0x07, 0xac, 0x38, 0xb9, 0x2a, 0x61, 0x02, + 0x24, 0xcf, 0x54, 0x3f, 0x98, 0xb0, 0x5c, 0xba, 0xe3, 0x15, 0x27, 0x52, + 0x63, 0x43, 0x12, 0x62, 0x33, 0x02, 0xb8, 0x69, 0x52, 0x70, 0x6c, 0xc0, + 0x23, 0x37, 0x65, 0x4b, 0xc9, 0xea, 0x98, 0x06, 0xde, 0x3d, 0x59, 0x72, + 0x94, 0x48, 0x60, 0xeb, 0xe7, 0xaa, 0x68, 0x72, 0x22, 0x15, 0x39, 0xf0, + 0x47, 0x43, 0xeb, 0x37, 0xb1, 0x3b, 0x9e, 0x05, 0x12, 0xdb, 0x74, 0x18, + 0xfe, 0x11, 0xcb, 0xae, 0xe0, 0xed, 0x1c, 0xe3, 0x19, 0x71, 0x56, 0xa6, + 0x04, 0xe6, 0x20, 0x62, 0xfd, 0xb1, 0x57, 0x44, 0xca, 0x3f, 0xdf, 0x51, + 0x23, 0x76, 0x3b, 0x70, 0x27, 0x33, 0x62, 0x74, 0x94, 0xff, 0x70, 0xcc, + 0xd4, 0xbf, 0x67, 0x12, 0x17, 0x5f, 0x71, 0xf8, 0x8f, 0x09, 0xca, 0xb5, + 0x49, 0x38, 0xcf, 0x1f, 0x94, 0x9a, 0xe6, 0x76, 0x0e, 0xa6, 0x5a, 0x2c, + 0x36, 0x61, 0x41, 0x2d, 0x14, 0x2f, 0x35, 0xa2, 0xaa, 0x2d, 0xd5, 0x54, + 0x3c, 0x4e, 0xa0, 0x63, 0xa9, 0x9e, 0xe9, 0x65, 0x62, 0xcf, 0x5a, 0x1a, + 0xb9, 0x70, 0xf7, 0xf1, 0x8a, 0xc7, 0x19, 0x6e, 0x34, 0xa0, 0xbb, 0x1b, + 0x76, 0x9b, 0x60, 0x20, 0xfd, 0xff, 0xe1, 0x40, 0x5e, 0xd7, 0x49, 0xd3, + 0x3c, 0x0f, 0x52, 0xae, 0x37, 0x38, 0x1d, 0xd5, 0xd0, 0xe7, 0xd6, 0xfc, + 0x06, 0x3b, 0x50, 0x06, 0x9c, 0xb4, 0x37, 0x9a, 0x53, 0x09, 0x56, 0xa4, + 0xa8, 0x64, 0x70, 0xa7, 0xaf, 0xb9, 0xd9, 0x19, 0xbc, 0x5b, 0x04, 0x07, + 0x68, 0xc0, 0xa4, 0xc0, 0x3d, 0x32, 0x36, 0x94, 0x24, 0xd3, 0x36, 0x1f, + 0xfc, 0xd8, 0x26, 0x49, 0x94, 0xd2, 0x1e, 0x8b, 0x0c, 0x70, 0x6e, 0xd7, + 0xd2, 0x37, 0x8f, 0x13, 0xef, 0x41, 0xdb, 0x53, 0xb5, 0xba, 0xe5, 0xe3, + 0x0c, 0xcd, 0xa3, 0xfa, 0x74, 0x16, 0xd9, 0x42, 0x10, 0xa3, 0xe6, 0x26, + 0xd6, 0x74, 0xbc, 0x17, 0x9b, 0x2e, 0x4c, 0xe2, 0x13, 0x49, 0x0f, 0xc9, + 0xc2, 0x34, 0xae, 0x5b, 0x6b, 0x46, 0xbc, 0xc4, 0x62, 0xa0, 0x4a, 0x18, + 0x62, 0x69, 0x1c, 0xc3, 0x78, 0x36, 0xfa, 0xd9, 0x8d, 0xd0, 0xf9, 0x4f, + 0x56, 0x90, 0x4b, 0xca, 0xc4, 0xdd, 0x64, 0x2c, 0xd1, 0x3c, 0xa8, 0xbe, + 0x62, 0x8f, 0x2a, 0x11, 0x93, 0x71, 0x75, 0x70, 0x43, 0xd0, 0x5f, 0xfb, + 0x36, 0x2b, 0x35, 0x26, 0xda, 0xda, 0x25, 0x3c, 0x17, 0xf2, 0xb7, 0x36, + 0xd7, 0x8d, 0xd1, 0xbc, 0x2f, 0xe7, 0xf8, 0x55, 0x42, 0x2e, 0xe1, 0xc0, + 0x4a, 0xee, 0x3d, 0x5b, 0xc9, 0x69, 0x15, 0xc5, 0x42, 0x03, 0x2c, 0x46, + 0x02, 0x94, 0x91, 0xfb, 0x0f, 0x98, 0x8d, 0x32, 0xdf, 0x0b, 0x19, 0xda, + 0x9f, 0x96, 0x6e, 0x2d, 0xc4, 0xa1, 0x92, 0xc1, 0x73, 0x2f, 0x23, 0x9f, + 0x55, 0xc5, 0xb4, 0x8c, 0xef, 0xf3, 0xa2, 0x94, 0x8f, 0x6c, 0xd8, 0xb1, + 0x9d, 0x0d, 0x17, 0x93, 0x21, 0xd7, 0xae, 0xa8, 0x41, 0xd3, 0xf1, 0x9a, + 0xe3, 0x36, 0xca, 0x5f, 0xa4, 0xd9, 0xaf, 0x34, 0xbf, 0xe6, 0x9e, 0x4c, + 0xf0, 0xd1, 0xb0, 0x8c, 0x8e, 0x76, 0x3d, 0xb3, 0xf7, 0xd9, 0xfb, 0xbf, + 0x72, 0xae, 0xa8, 0x39, 0x00, 0xe5, 0x53, 0x17, 0x6c, 0x4e, 0x06, 0x22, + 0xc0, 0x10, 0xe7, 0x4d, 0xff, 0x75, 0x03, 0x01, 0x18, 0x46, 0xfd, 0xde, + 0x1e, 0x95, 0x46, 0xb8, 0x5b, 0x36, 0xbc, 0x1d, 0x95, 0x05, 0x8f, 0x5d, + 0x38, 0x41, 0x25, 0x2c, 0x9b, 0x34, 0x75, 0x9b, 0xf0, 0x8b, 0xaf, 0x0d, + 0x2e, 0xc2, 0x1a, 0x03, 0x61, 0xbe, 0xe8, 0x49, 0xbc, 0x9b, 0x45, 0xfb, + 0x35, 0x2b, 0x6c, 0xa1, 0x96, 0xa0, 0x08, 0x0e, 0xca, 0x01, 0xc0, 0x97, + 0xfa, 0xdf, 0x11, 0x1a, 0x0d, 0xf9, 0xc2, 0x5a, 0xe1, 0x4c, 0xb5, 0x37, + 0xff, 0x91, 0xb6, 0x96, 0xbf, 0x62, 0x04, 0x59, 0x69, 0x01, 0x68, 0x66, + 0x52, 0x66, 0x4a, 0x49, 0xe9, 0xe6, 0xe4, 0x44, 0x92, 0x5e, 0xaf, 0xf5, + 0x24, 0xdb, 0x6f, 0x21, 0xf9, 0x21, 0x58, 0x5f, 0xc4, 0xf0, 0x30, 0x90, + 0x68, 0xff, 0x58, 0x5c, 0xbd, 0x6f, 0x58, 0x77, 0xe0, 0x03, 0x68, 0x2a, + 0x1a, 0xa4, 0xd6, 0x9d, 0xd0, 0x38, 0x5a, 0xbd, 0x52, 0xa8, 0xc5, 0xf0, + 0xbc, 0xf2, 0x04, 0x49, 0x0e, 0x1b, 0x1b, 0x93, 0xc0, 0x65, 0xca, 0x05, + 0x42, 0x11, 0x03, 0xd6, 0xd5, 0x2c, 0x4c, 0xcd, 0xed, 0xb4, 0x54, 0xa4, + 0x3d, 0x46, 0x64, 0x4c, 0xc4, 0x8f, 0x0a, 0x95, 0x6a, 0x4f, 0xfb, 0x2e, + 0x1d, 0x5a, 0x8a, 0xcb, 0x31, 0x94, 0x21, 0x54, 0x51, 0xf5, 0x4e, 0x3e, + 0x32, 0x00, 0x12, 0x8e, 0x4c, 0x8c, 0x17, 0x90, 0xea, 0x8d, 0xfe, 0xc3, + 0xfe, 0x69, 0x10, 0xd9, 0x1c, 0x60, 0x91, 0xb6, 0xbb, 0x11, 0xb7, 0x77, + 0x1c, 0x69, 0xec, 0xb5, 0x28, 0x1e, 0x4b, 0xc8, 0xac, 0xe2, 0xe7, 0xe4, + 0xca, 0x1c, 0x6a, 0x16, 0xb8, 0x0a, 0x1c, 0xcb, 0xbd, 0x0e, 0x61, 0xf6, + 0x30, 0xa0, 0xb0, 0x11, 0x57, 0xd0, 0xa0, 0xe5, 0x63, 0xb4, 0x5e, 0x65, + 0x54, 0xbd, 0x2b, 0xcf, 0x92, 0xb3, 0xe2, 0xad, 0xba, 0x6b, 0xd8, 0x8b, + 0xd4, 0xc9, 0x49, 0x6b, 0xe9, 0x6f, 0x30, 0x9a, 0x8d, 0x1a, 0xd2, 0x73, + 0xed, 0x01, 0x20, 0x76, 0x59, 0x3b, 0x63, 0x15, 0xf7, 0x4a, 0x93, 0xf5, + 0xe8, 0xaa, 0x77, 0xf7, 0xee, 0x16, 0x26, 0x6d, 0x6d, 0x1e, 0xb3, 0x04, + 0xd1, 0x36, 0x6d, 0xdb, 0xe1, 0xee, 0xdf, 0x69, 0x0e, 0x28, 0x3b, 0x5a, + 0x37, 0x51, 0x61, 0x10, 0x58, 0xd0, 0x58, 0x75, 0x63, 0x5b, 0x76, 0x3e, + 0x55, 0x0a, 0x07, 0x3e, 0xfe, 0xb9, 0x6e, 0x4c, 0xfc, 0x1b, 0x8a, 0xa5, + 0x03, 0x1a, 0xb9, 0x04, 0x22, 0x60, 0x33, 0x66, 0xda, 0xb7, 0x1c, 0x3a, + 0xb6, 0x92, 0x45, 0x01, 0xc2, 0x73, 0x49, 0x6a, 0x9a, 0x54, 0x10, 0xe2, + 0x36, 0x45, 0xbd, 0x1d, 0x33, 0x2a, 0xd2, 0xc9, 0x70, 0x63, 0x39, 0xcf, + 0xf7, 0x76, 0x70, 0x37, 0xde, 0x23, 0x4c, 0xd2, 0xa1, 0x37, 0x2c, 0x52, + 0xae, 0xa3, 0xfb, 0x45, 0xd0, 0xb9, 0x46, 0x3e, 0x2a, 0xe8, 0xe9, 0x64, + 0xe1, 0x16, 0x30, 0x08, 0x36, 0xcd, 0x9e, 0x15, 0x44, 0xdd, 0x27, 0xa9, + 0x1c, 0x29, 0xf1, 0xa7, 0x20, 0x21, 0x59, 0x61, 0x4c, 0xbe, 0x5e, 0x20, + 0x36, 0xca, 0xb8, 0x6d, 0xb5, 0x0c, 0x29, 0x41, 0xa1, 0xd3, 0x8a, 0x2b, + 0x34, 0xd2, 0x5b, 0x92, 0x12, 0x1f, 0x36, 0x9f, 0x5d, 0x02, 0x2a, 0xca, + 0xac, 0x5b, 0x29, 0x8b, 0x51, 0x3a, 0x65, 0xf5, 0xdf, 0x60, 0x6c, 0x0c, + 0xa7, 0x95, 0x3d, 0x52, 0x13, 0xb4, 0xbd, 0x8c, 0xf1, 0xac, 0xba, 0x3c, + 0x24, 0x6c, 0xc0, 0xdb, 0xa8, 0x5b, 0xd4, 0xdb, 0xf5, 0xcd, 0xaf, 0xdf, + 0x2f, 0xe2, 0x71, 0xcc, 0x00, 0x3a, 0x87, 0xdc, 0x23, 0xdf, 0xa7, 0xb0, + 0xb6, 0xcb, 0xff, 0x1c, 0xe7, 0xfe, 0xa8, 0xa8, 0xea, 0xad, 0x37, 0x58, + 0xfd, 0x58, 0x01, 0xa5, 0xe4, 0x5d, 0xdf, 0x4a, 0x10, 0x0b, 0xc3, 0x5e, + 0xd1, 0x0d, 0x4c, 0x21, 0x0e, 0x51, 0x95, 0x99, 0x58, 0xdf, 0x6d, 0xa8, + 0x8e, 0xf7, 0x51, 0xa6, 0x53, 0x44, 0x6b, 0xb3, 0x00, 0x64, 0xe1, 0x6f, + 0x3d, 0x19, 0x40, 0x30, 0x46, 0x95, 0x9b, 0x39, 0xa5, 0x0d, 0x77, 0xaa, + 0xb1, 0x57, 0x57, 0x08, 0xe0, 0xab, 0xd1, 0xd5, 0x25, 0x59, 0x11, 0x2f, + 0x62, 0xbf, 0x50, 0x95, 0x02, 0x18, 0xdb, 0x2d, 0xbc, 0xdb, 0xfa, 0x3d, + 0x45, 0xab, 0xb5, 0x2e, 0x8e, 0x9b, 0x49, 0xe5, 0x50, 0xbd, 0x1f, 0x1c, + 0x64, 0xd8, 0x9d, 0x0c, 0x0c, 0xe8, 0xf3, 0x54, 0x49, 0x95, 0x3d, 0x71, + 0xa1, 0x16, 0x98, 0x08, 0x16, 0x37, 0x6a, 0x95, 0xa3, 0xaa, 0xb6, 0xf7, + 0x0e, 0x99, 0x2a, 0x0b, 0x68, 0x49, 0xd1, 0xa4, 0x33, 0x3e, 0x57, 0xfc, + 0xc3, 0x5a, 0xa9, 0x1e, 0xbf, 0xf1, 0x19, 0x2d, 0xee, 0xfa, 0x01, 0xa8, + 0x64, 0x0d, 0x74, 0x54, 0xed, 0x4d, 0xab, 0xad, 0x23, 0x25, 0xde, 0xef, + 0xb4, 0x54, 0xfe, 0x3f, 0xba, 0xe0, 0x0e, 0x76, 0x1b, 0x1a, 0xa9, 0xe3, + 0x53, 0xbd, 0xde, 0x65, 0x6b, 0x08, 0x6d, 0x71, 0x45, 0xb4, 0xf8, 0x9a, + 0x06, 0x3d, 0xae, 0x87, 0x25, 0x51, 0x9d, 0x46, 0x33, 0xf3, 0x77, 0x6d, + 0xb6, 0x5d, 0xbe, 0x08, 0xfc, 0xf5, 0x31, 0xa1, 0xd5, 0x22, 0x19, 0xcd, + 0x66, 0x82, 0x19, 0xf5, 0xf5, 0x29, 0x28, 0x83, 0xa5, 0xa3, 0x30, 0x50, + 0xa1, 0xfb, 0xf6, 0x36, 0x31, 0xbf, 0xb5, 0xc4, 0xe7, 0x99, 0xd5, 0x4f, + 0xf5, 0xb0, 0xf5, 0x9a, 0x12, 0x4e, 0x1b, 0xdb, 0x4d, 0x21, 0x6d, 0xda, + 0xeb, 0x6a, 0x11, 0x55, 0xa2, 0xe2, 0x6a, 0xe9, 0xe8, 0x01, 0xa1, 0x97, + 0x68, 0xc2, 0x30, 0xd2, 0xfa, 0x60, 0xec, 0x4d, 0x54, 0x5b, 0x9e, 0x2d, + 0x97, 0xca, 0x1b, 0xc2, 0xb2, 0x14, 0x3f, 0xaf, 0x23, 0x54, 0xe8, 0x0c, + 0x3c, 0xed, 0x50, 0x32, 0xff, 0x3a, 0x8c, 0xe6, 0xdc, 0x17, 0xad, 0x65, + 0x05, 0x35, 0x28, 0xc9, 0x77, 0x21, 0xb1, 0x9a, 0xec, 0xf1, 0xd6, 0x53, + 0xb9, 0xb3, 0xe0, 0x41, 0x11, 0x85, 0x2e, 0x1a, 0xb5, 0xad, 0xab, 0x9b, + 0xae, 0x69, 0xa0, 0xb1, 0xa0, 0x07, 0x72, 0x8f, 0x4a, 0xd9, 0x5e, 0x1f, + 0x29, 0x9e, 0x4d, 0x0b, 0x9a, 0x82, 0xfe, 0x26, 0xc5, 0x17, 0x5b, 0x51, + 0x46, 0xf2, 0xf7, 0x27, 0xba, 0x06, 0x91, 0x0e, 0xc2, 0x07, 0xb3, 0x1b, + 0x54, 0xad, 0xb5, 0xf5, 0x02, 0xc1, 0x39, 0x6a, 0x2a, 0xd7, 0x46, 0xbf, + 0x3d, 0x39, 0x4e, 0x8e, 0xb1, 0x58, 0xf4, 0x90, 0xa7, 0x08, 0x0e, 0x99, + 0x64, 0x33, 0x3e, 0x1e, 0x09, 0xb7, 0x88, 0xa0, 0x29, 0xb2, 0x0b, 0x5c, + 0x15, 0xd4, 0x36, 0x55, 0x42, 0x48, 0xe7, 0x47, 0xf9, 0xb5, 0x05, 0xcd, + 0x40, 0xde, 0x92, 0x27, 0x11, 0x3b, 0xad, 0x3e, 0x9b, 0x95, 0x38, 0xad, + 0x11, 0xd5, 0x9d, 0x1d, 0x38, 0x60, 0xde, 0x31, 0xe3, 0x40, 0xb2, 0xf2, + 0x8e, 0xb4, 0x03, 0xaa, 0x51, 0x15, 0xe4, 0x36, 0x4d, 0x43, 0x05, 0xbc, + 0x36, 0x82, 0xdf, 0xfc, 0xfd, 0x23, 0x4d, 0xad, 0x9f, 0xf4, 0xce, 0xfb, + 0xaf, 0x46, 0xb3, 0x59, 0x98, 0x91, 0x85, 0x4a, 0xa7, 0x67, 0x70, 0xbd, + 0xca, 0x12, 0x9b, 0x6b, 0x00, 0xe5, 0x82, 0x3c, 0x37, 0x99, 0x8d, 0x6b, + 0x32, 0xaf, 0x08, 0x05, 0x36, 0xd6, 0xd7, 0xfb, 0x65, 0xce, 0x4e, 0x9f, + 0xd5, 0xd1, 0x3a, 0x42, 0xb0, 0x31, 0x62, 0xd0, 0xe2, 0xe5, 0x37, 0xc1, + 0x6d, 0x8a, 0x24, 0xa4, 0x19, 0xc2, 0x59, 0x3c, 0x44, 0xef, 0x96, 0xf6, + 0x35, 0x00, 0xe7, 0xe6, 0x2e, 0x82, 0xa5, 0x4a, 0x2f, 0xa2, 0xfe, 0x1f, + 0x53, 0x52, 0x31, 0x97, 0x47, 0x37, 0x15, 0x26, 0xa7, 0x8d, 0xd3, 0x21, + 0x6a, 0x98, 0x6d, 0xf1, 0xe6, 0x29, 0xf8, 0x9d, 0xaf, 0x5f, 0x3e, 0x3a, + 0xbc, 0x65, 0xb2, 0xd8, 0x41, 0xbc, 0xd6, 0x39, 0x3c, 0xc7, 0x2f, 0x2e, + 0xa3, 0x08, 0x9a, 0x21, 0x05, 0xe0, 0x4c, 0x06, 0x4d, 0x82, 0x68, 0x5d, + 0x4a, 0x9e, 0xca, 0xee, 0x3d, 0x28, 0x45, 0x0e, 0xff, 0xdd, 0xe6, 0x46, + 0xbc, 0xf8, 0x19, 0x5b, 0xda, 0xf4, 0x14, 0xd1, 0x4f, 0x02, 0x6e, 0xf6, + 0x01, 0x2d, 0xd6, 0xb6, 0x8b, 0xf5, 0x9c, 0x4e, 0xee, 0xe7, 0xc8, 0x10, + 0x05, 0xb6, 0x6d, 0x8d, 0x49, 0xe2, 0x04, 0xec, 0x4d, 0x61, 0x67, 0xc2, + 0x19, 0x27, 0xab, 0xe1, 0x0d, 0x29, 0xab, 0xf2, 0xa0, 0xf9, 0x69, 0x0d, + 0x81, 0x29, 0x4d, 0x40, 0x6d, 0xd7, 0xda, 0xb7, 0x9e, 0x0b, 0x90, 0x9c, + 0x9b, 0xeb, 0x59, 0x2c, 0xc9, 0xa4, 0x85, 0x95, 0xe2, 0xda, 0x2d, 0xe4, + 0x60, 0x9a, 0x64, 0x21, 0xbf, 0x1d, 0x57, 0x4d, 0x3e, 0xa0, 0x35, 0x0f, + 0xce, 0xd7, 0xe1, 0x44, 0x63, 0x9e, 0xe8, 0x8e, 0xbd, 0xc8, 0xc1, 0x65, + 0xe1, 0xd2, 0x09, 0x45, 0xd3, 0xbd, 0x13, 0xb2, 0x1f, 0x46, 0x32, 0xa6, + 0xcd, 0xa3, 0x44, 0x4c, 0x52, 0xa7, 0xe7, 0x54, 0xea, 0xe6, 0xa0, 0xce, + 0x02, 0x8b, 0x69, 0xdb, 0xde, 0xef, 0x5f, 0xcb, 0x6f, 0x6e, 0x0f, 0xf5, + 0x68, 0x42, 0xf4, 0x37, 0x08, 0x1f, 0x87, 0x55, 0xb4, 0xbc, 0x8a, 0x84, + 0x84, 0x10, 0xc6, 0x36, 0x3e, 0x8a, 0x6b, 0x4e, 0xd5, 0xc8, 0x64, 0xcb, + 0xb5, 0xc0, 0xfe, 0x99, 0x66, 0xaa, 0xb1, 0x50, 0xa7, 0x70, 0xd9, 0xa6, + 0x17, 0x2d, 0xd4, 0xad, 0xdf, 0xf2, 0x2f, 0xac, 0xae, 0xae, 0x12, 0xcf, + 0x5b, 0x09, 0xf2, 0x2d, 0xb4, 0x21, 0xc9, 0xd1, 0x58, 0xdb, 0x4e, 0x9b, + 0xe0, 0x32, 0x08, 0xe4, 0x4a, 0xe6, 0x9c, 0x61, 0x25, 0x90, 0x08, 0xf2, + 0xb1, 0xc1, 0x3c, 0x25, 0x0b, 0x5a, 0x03, 0x40, 0xdb, 0x06, 0x5f, 0xd2, + 0x60, 0x8e, 0x0a, 0x5b, 0xc8, 0xa2, 0xcd, 0xac, 0xb3, 0x54, 0x0b, 0xb6, + 0x05, 0x45, 0xd7, 0xa8, 0x8a, 0xfa, 0x8a, 0xba, 0x09, 0x53, 0x81, 0xd7, + 0xf5, 0x40, 0x61, 0x46, 0xf2, 0x22, 0xe4, 0x21, 0xb4, 0x26, 0x41, 0x10, + 0x25, 0x4d, 0x93, 0xc2, 0xa2, 0xae, 0xc3, 0xaa, 0xbe, 0x71, 0xa6, 0xaa, + 0xf7, 0xb1, 0xbf, 0x02, 0x22, 0xe9, 0xd7, 0xfb, 0xaa, 0x1d, 0x5d, 0xf5, + 0xe7, 0x5b, 0x63, 0xf2, 0xe6, 0x5c, 0xd6, 0x24, 0x6d, 0xb5, 0xca, 0xa3, + 0xe7, 0x57, 0x1a, 0xa5, 0xf7, 0x95, 0xc5, 0x92, 0x51, 0x65, 0x68, 0xc5, + 0xe6, 0x27, 0xa9, 0x94, 0x8a, 0xb6, 0xec, 0x0d, 0x9c, 0x51, 0xdf, 0x22, + 0xca, 0xdf, 0x5a, 0xf5, 0xe4, 0xad, 0xf4, 0xfc, 0x1f, 0x68, 0x9f, 0xdb, + 0x40, 0x4e, 0x6a, 0x1e, 0x5a, 0xd8, 0x6c, 0xd6, 0xef, 0xad, 0x64, 0xe7, + 0xcb, 0xfc, 0x44, 0xae, 0xa5, 0x62, 0x65, 0xad, 0x2e, 0x6a, 0x46, 0xcf, + 0x0d, 0xd0, 0x46, 0x5e, 0x87, 0x37, 0xb6, 0xab, 0x70, 0x52, 0xee, 0x5a, + 0xa7, 0x13, 0xa3, 0xc3, 0x4b, 0x62, 0xe7, 0x31, 0x10, 0xed, 0x39, 0x1c, + 0x4a, 0xe3, 0xc1, 0x57, 0xcb, 0x45, 0xe4, 0x89, 0xee, 0x0e, 0x24, 0xc1, + 0xa6, 0xac, 0xd4, 0x0e, 0x9b, 0xe0, 0x26, 0x28, 0x08, 0x2b, 0xe1, 0xc9, + 0x42, 0x37, 0xa3, 0x46, 0xcc, 0x5d, 0x89, 0x10, 0x1f, 0x23, 0xcb, 0x1c, + 0x67, 0xe2, 0x6d, 0xaa, 0x66, 0xa5, 0xf5, 0xea, 0x94, 0x2b, 0x8c, 0xf6, + 0xf4, 0xd3, 0xfb, 0x9c, 0x96, 0x0a, 0x87, 0xaf, 0x5c, 0x19, 0xb4, 0x3b, + 0x26, 0xb2, 0x48, 0x55, 0x97, 0xfd, 0x3a, 0xec, 0x06, 0xe4, 0x58, 0x99, + 0x9a, 0x26, 0x4f, 0xe0, 0x9c, 0x67, 0x09, 0x05, 0x5b, 0x72, 0x8e, 0xd6, + 0xe4, 0x4e, 0xe2, 0x63, 0xb0, 0x9c, 0xf6, 0x92, 0xd3, 0x05, 0x3f, 0xb0, + 0x04, 0x5f, 0x02, 0x97, 0xf4, 0x42, 0x1d, 0x3b, 0x5c, 0x44, 0x00, 0x95, + 0x8b, 0xf5, 0x06, 0x40, 0xbd, 0xb8, 0xf7, 0x4b, 0x4a, 0xfa, 0xf0, 0x04, + 0x04, 0xd0, 0xa5, 0xb9, 0x3a, 0xa0, 0x2d, 0x0c, 0x1b, 0xec, 0x5a, 0x14, + 0xc8, 0x1d, 0x93, 0x86, 0xfd, 0x16, 0x68, 0xf8, 0x16, 0x9b, 0xb4, 0x88, + 0x99, 0x63, 0x0e, 0xd5, 0x20, 0x07, 0x43, 0x28, 0x26, 0xba, 0xf9, 0x97, + 0xed, 0x6b, 0x40, 0xb8, 0x07, 0x73, 0x59, 0xd5, 0x55, 0xa8, 0x64, 0x14, + 0x1c, 0xc5, 0xc0, 0x1f, 0x8d, 0x09, 0xae, 0x9c, 0x66, 0xa1, 0x94, 0xca, + 0x14, 0x46, 0xed, 0x46, 0x46, 0x25, 0x63, 0x5b, 0x2b, 0x95, 0x85, 0x05, + 0xc2, 0xb7, 0xeb, 0x06, 0x30, 0x5a, 0xf6, 0x22, 0x4e, 0x47, 0x1e, 0x0e, + 0x0c, 0xad, 0xd5, 0x11, 0xa8, 0x6a, 0x89, 0xd5, 0x49, 0xd4, 0xfa, 0x43, + 0xb0, 0x32, 0xb0, 0xb9, 0xb3, 0xda, 0x3f, 0x4f, 0xac, 0x4c, 0xc1, 0xa7, + 0x9f, 0xc2, 0xc2, 0x04, 0x70, 0xa2, 0x08, 0x01, 0xeb, 0x10, 0xa4, 0xa5, + 0x4c, 0xcd, 0xb3, 0x81, 0x4e, 0xbe, 0x6c, 0x51, 0x44, 0xf8, 0x82, 0xbd, + 0x42, 0x34, 0xfb, 0xdb, 0xb4, 0x32, 0xd2, 0x93, 0x63, 0x5e, 0xf6, 0x07, + 0x6e, 0x2c, 0xc2, 0xcf, 0xf4, 0x5d, 0x84, 0xe9, 0x5e, 0x5c, 0xa8, 0x39, + 0x28, 0x4a, 0xed, 0x15, 0x1b, 0xea, 0xe6, 0xde, 0x85, 0x92, 0x86, 0xe7, + 0x83, 0x4b, 0x87, 0xf7, 0x23, 0x60, 0xe2, 0x22, 0xd3, 0x32, 0x16, 0x4e, + 0x2f, 0xde, 0x01, 0x8b, 0x48, 0xea, 0xcd, 0x8a, 0x8b, 0xbc, 0xc6, 0x64, + 0xb2, 0x67, 0x47, 0xf5, 0x98, 0xf8, 0xca, 0xf1, 0x83, 0x66, 0xd7, 0x9a, + 0xef, 0xca, 0x20, 0xc2, 0xec, 0x8c, 0x38, 0xb1, 0x37, 0x13, 0x93, 0x92, + 0xba, 0xa1, 0xee, 0x6a, 0x57, 0x43, 0xaa, 0xdc, 0xdf, 0xa4, 0x3f, 0xc6, + 0xb6, 0xd6, 0x68, 0x54, 0xab, 0x36, 0xe9, 0x0f, 0x6f, 0xd5, 0xa1, 0x1b, + 0xa1, 0x02, 0xc9, 0x41, 0xef, 0x4f, 0x86, 0xcc, 0x1a, 0xfa, 0xd2, 0xdd, + 0x87, 0x04, 0xe0, 0x27, 0x38, 0xcf, 0x91, 0x95, 0xb4, 0x02, 0x10, 0x1d, + 0xc3, 0xcc, 0x6f, 0xaf, 0xbc, 0x94, 0x64, 0x47, 0xbc, 0x37, 0xde, 0xe3, + 0x2e, 0x89, 0x03, 0xb6, 0xd3, 0x28, 0x4a, 0x5e, 0x6d, 0x1e, 0xc5, 0x1a, + 0xa5, 0x0c, 0x92, 0xf7, 0xe2, 0x19, 0xe7, 0x39, 0xf0, 0xf2, 0x49, 0x8b, + 0xe6, 0x99, 0xd8, 0x4b, 0x0d, 0x6e, 0x3f, 0x57, 0x89, 0x9e, 0x0d, 0x34, + 0x4b, 0x52, 0xcd, 0x18, 0x57, 0xc7, 0x8e, 0x48, 0x03, 0x65, 0xd4, 0xdd, + 0xdf, 0x04, 0xf5, 0x39, 0x5e, 0x97, 0xbc, 0xc0, 0xc5, 0x91, 0xe7, 0x9d, + 0xbe, 0x28, 0x4c, 0xe7, 0xf4, 0xa0, 0x34, 0xee, 0xba, 0xa7, 0x8d, 0x52, + 0xc4, 0x07, 0x14, 0xd2, 0x93, 0xb0, 0x1d, 0x61, 0x53, 0x23, 0xc3, 0xe1, + 0xd2, 0xbf, 0xe1, 0xd6, 0x1f, 0x27, 0xcc, 0x8c, 0xe7, 0x0b, 0x09, 0x4f, + 0xe6, 0xa2, 0x41, 0xf4, 0x31, 0xbe, 0x95, 0x17, 0xfb, 0x50, 0xa4, 0xa4, + 0x51, 0x3c, 0x6f, 0xf8, 0x6a, 0xba, 0xac, 0xe4, 0x1e, 0x38, 0x78, 0x18, + 0x58, 0x31, 0x69, 0xc9, 0x52, 0xb0, 0xfc, 0x71, 0x54, 0xad, 0xe2, 0x8e, + 0xa2, 0xf2, 0x8e, 0x58, 0x11, 0x1d, 0xcc, 0x30, 0x74, 0x55, 0x41, 0x02, + 0x9b, 0x2a, 0x2f, 0x17, 0x97, 0xe4, 0x1a, 0xd0, 0xd5, 0x8f, 0x60, 0x10, + 0xdb, 0xc2, 0x69, 0x94, 0x0d, 0xaf, 0x44, 0xd0, 0x95, 0x3d, 0x50, 0xf4, + 0x27, 0x5e, 0xdc, 0x56, 0x5f, 0xa7, 0x4c, 0x41, 0xe5, 0x9e, 0xc8, 0x31, + 0xb0, 0x8e, 0x3f, 0xde, 0xdc, 0x42, 0x24, 0x93, 0x98, 0xce, 0x69, 0x90, + 0x98, 0x73, 0x06, 0xb9, 0x8e, 0xa4, 0x8d, 0x97, 0xb1, 0x41, 0x33, 0x64, + 0x5a, 0xae, 0xe8, 0x2f, 0x5f, 0x99, 0x64, 0x3e, 0xea, 0xd4, 0xbe, 0xa2, + 0x52, 0x2d, 0xc7, 0x56, 0x46, 0xfb, 0x33, 0xd8, 0xde, 0xe6, 0x74, 0xf6, + 0x2e, 0x2a, 0x26, 0xa1, 0x07, 0xcd, 0x3c, 0xca, 0x39, 0x74, 0x61, 0x4a, + 0x53, 0xf7, 0x8c, 0xd7, 0x3c, 0x4f, 0x4f, 0xd9, 0x14, 0x74, 0x56, 0xa8, + 0x3b, 0x3b, 0xe4, 0xe5, 0x70, 0x2e, 0xda, 0xde, 0xcd, 0x65, 0x4f, 0x2e, + 0xb6, 0x76, 0x17, 0x59, 0x6a, 0xaf, 0x0a, 0x24, 0x8c, 0x99, 0x0b, 0x2a, + 0xac, 0x46, 0x74, 0x2c, 0x3b, 0x40, 0x20, 0xad, 0x30, 0xab, 0x63, 0x34, + 0x8f, 0x30, 0x22, 0x50, 0x5c, 0xf8, 0x73, 0x21, 0x3e, 0xeb, 0x16, 0x44, + 0x30, 0xb9, 0x59, 0x0f, 0xf0, 0xe5, 0xb6, 0x6a, 0xde, 0x32, 0x03, 0x28, + 0x3c, 0xc8, 0xc2, 0x8d, 0x6b, 0x72, 0x2f, 0x3e, 0x2b, 0x99, 0xc1, 0xa6, + 0xdf, 0x5a, 0x91, 0x2d, 0x40, 0x39, 0xb2, 0x24, 0x27, 0x25, 0x26, 0x51, + 0xbb, 0xb5, 0x6a, 0x47, 0x38, 0x94, 0x2c, 0x3e, 0xa0, 0x96, 0x19, 0xf7, + 0x99, 0x0c, 0x34, 0x41, 0xb9, 0x0d, 0xad, 0x37, 0xa6, 0x0c, 0x38, 0x9c, + 0xee, 0x03, 0x68, 0x62, 0x76, 0x64, 0x18, 0x63, 0x62, 0x10, 0xd6, 0x2a, + 0xca, 0xdb, 0x73, 0x9b, 0x93, 0x35, 0x29, 0xb0, 0xec, 0x6c, 0xa8, 0x1f, + 0xa6, 0xac, 0xf8, 0xd8, 0xfa, 0x98, 0xc3, 0x02, 0xf0, 0xf5, 0x66, 0x2c, + 0xfc, 0x75, 0xc7, 0xb0, 0x76, 0xfe, 0x0f, 0x92, 0x9b, 0xce, 0xc5, 0xe8, + 0x9a, 0x5e, 0x8f, 0x16, 0x26, 0x8c, 0x97, 0x20, 0x97, 0x36, 0xca, 0x56, + 0xed, 0xf2, 0x05, 0x53, 0xf7, 0x9f, 0x23, 0xbb, 0x1e, 0xdc, 0x5a, 0x94, + 0x0b, 0x1d, 0x0e, 0x55, 0xc7, 0x34, 0xff, 0xd9, 0xa3, 0x37, 0x69, 0x63, + 0x9f, 0x00, 0x0f, 0xa1, 0x5c, 0x1f, 0x50, 0x56, 0x25, 0xf0, 0xb8, 0x0e, + 0x92, 0x70, 0xcd, 0xa0, 0xca, 0x2a, 0xce, 0xa5, 0x21, 0xe7, 0x5b, 0x10, + 0x13, 0xd5, 0x9b, 0x9f, 0x60, 0x1b, 0x3f, 0x21, 0xa9, 0x27, 0xd9, 0xeb, + 0xdc, 0xe8, 0x05, 0x8e, 0x09, 0x27, 0x4b, 0x8b, 0xb1, 0x3b, 0x07, 0xb1, + 0xe9, 0x55, 0xc4, 0xab, 0x5d, 0x74, 0x11, 0xcf, 0x98, 0x5d, 0x47, 0x58, + 0x9d, 0x08, 0xec, 0x0b, 0x31, 0x69, 0x98, 0xad, 0xd0, 0x93, 0x09, 0xc7, + 0xcc, 0xe3, 0x64, 0x67, 0xef, 0xce, 0x98, 0xf3, 0xc2, 0x69, 0xd4, 0x47, + 0x4d, 0xf7, 0x1a, 0x10, 0xa9, 0x18, 0x35, 0x94, 0xc8, 0xe1, 0xd2, 0xf5, + 0xb5, 0xb4, 0x0b, 0xd7, 0x28, 0xa8, 0x97, 0x9b, 0xbf, 0x90, 0xe5, 0xc6, + 0xde, 0xf7, 0x4f, 0x33, 0xaf, 0x36, 0xe2, 0xa8, 0x65, 0x56, 0xdd, 0xe8, + 0x79, 0xae, 0x68, 0xc1, 0xf3, 0x5b, 0x26, 0x59, 0x53, 0x00, 0x43, 0x4c, + 0x3e, 0xf9, 0x24, 0xc4, 0x8d, 0x73, 0x00, 0x6c, 0xb2, 0x97, 0x56, 0x90, + 0x42, 0xde, 0xba, 0xd6, 0x3a, 0x6d, 0x39, 0x9d, 0xbe, 0x1c, 0xca, 0x24, + 0xbb, 0xba, 0x06, 0xf0, 0x59, 0x74, 0x32, 0x99, 0x1b, 0x02, 0xad, 0xc1, + 0x8b, 0xd4, 0x0b, 0xd8, 0xb7, 0xe7, 0xbd, 0xbd, 0x68, 0x56, 0xc1, 0x1e, + 0xda, 0xa4, 0xfe, 0x6b, 0x94, 0xf3, 0xda, 0x9a, 0x33, 0x01, 0x97, 0xb6, + 0x39, 0xc4, 0xe7, 0x57, 0xee, 0xcf, 0x0e, 0xce, 0x40, 0x7a, 0xd4, 0x4d, + 0x30, 0x6a, 0x57, 0x8f, 0x97, 0x92, 0x59, 0xeb, 0xf2, 0x18, 0x8c, 0x77, + 0xd9, 0x8f, 0x72, 0xff, 0xd5, 0xb2, 0x1f, 0x2e, 0xba, 0xb6, 0x46, 0x1a, + 0x33, 0xe0, 0x74, 0x2a, 0xd7, 0xdb, 0xc7, 0x07, 0x37, 0x2f, 0x55, 0xe2, + 0x70, 0x43, 0xc2, 0xbc, 0x33, 0x03, 0xc9, 0xd4, 0x4e, 0x6e, 0x3e, 0xc9, + 0x67, 0x55, 0xf8, 0x6d, 0x63, 0x9f, 0x6b, 0x3f, 0x5b, 0xc7, 0xe9, 0xb8, + 0x31, 0x04, 0x0b, 0x71, 0x15, 0xcd, 0x34, 0xe4, 0xaf, 0x74, 0x73, 0xea, + 0xbf, 0x20, 0x00, 0x75, 0xd7, 0xa7, 0xf7, 0x9c, 0xf5, 0xa1, 0x28, 0xc7, + 0xfe, 0x6b, 0xa2, 0x36, 0xdc, 0xd4, 0xf0, 0xd7, 0x42, 0x4e, 0xe4, 0x3f, + 0x00, 0x09, 0x3c, 0x5e, 0x1f, 0xc8, 0xfd, 0xb9, 0xd8, 0x90, 0xdb, 0xf4, + 0x41, 0x0b, 0xda, 0x68, 0xe1, 0xe4, 0xb9, 0xfb, 0x36, 0x37, 0xa9, 0x5f, + 0xc9, 0xb6, 0xb8, 0xa4, 0xda, 0x41, 0xaa, 0xab, 0xa8, 0xc8, 0xd3, 0xc6, + 0x6a, 0xbe, 0x03, 0x77, 0xcc, 0x1a, 0x8d, 0x0d, 0xe8, 0xcc, 0x58, 0x46, + 0x71, 0x33, 0x19, 0x62, 0xe5, 0xc4, 0xe3, 0x4a, 0x1d, 0xf7, 0x96, 0xd4, + 0x08, 0xe5, 0xa8, 0x18, 0x40, 0x2d, 0xc5, 0xd7, 0xa7, 0x31, 0xa2, 0x5f, + 0x60, 0xde, 0x21, 0xe5, 0xaa, 0x65, 0x93, 0x0d, 0xdb, 0x55, 0x54, 0x88, + 0xbd, 0x53, 0x8e, 0xe0, 0xa6, 0x23, 0xcd, 0x1d, 0xb7, 0xbd, 0x2a, 0x8c, + 0x0e, 0x67, 0x65, 0xab, 0xda, 0xe9, 0x3b, 0x12, 0xf6, 0x97, 0x4b, 0xe8, + 0x16, 0xf7, 0x09, 0xb6, 0x45, 0x97, 0x16, 0xec, 0xd9, 0xdc, 0x8d, 0x01, + 0xba, 0xb0, 0xb6, 0xdd, 0x59, 0x60, 0xbf, 0x92, 0x92, 0xc3, 0x21, 0x41, + 0x46, 0xcb, 0x5e, 0x6e, 0x99, 0x10, 0x41, 0x45, 0x9a, 0xb9, 0xe0, 0x6d, + 0x22, 0x68, 0xd3, 0x5a, 0xaa, 0x6e, 0xb4, 0xc6, 0x42, 0xa2, 0xad, 0xf1, + 0xf7, 0x0b, 0x3d, 0x29, 0x38, 0xa2, 0x11, 0xf8, 0x57, 0x25, 0xb8, 0x8f, + 0xbc, 0x65, 0xac, 0x0d, 0xf0, 0xb7, 0x5c, 0x95, 0xfb, 0x5d, 0xdb, 0x54, + 0x3d, 0x3e, 0xd6, 0x4f, 0x2a, 0xfe, 0x43, 0xfc, 0x1c, 0xca, 0xb9, 0xb3, + 0x95, 0x06, 0x90, 0xd9, 0x5d, 0x43, 0xc4, 0xe9, 0xbb, 0x17, 0xd6, 0xaf, + 0xf2, 0xb0, 0x24, 0x9d, 0x27, 0xdf, 0xaf, 0xf7, 0x6f, 0xd1, 0x4c, 0xbe, + 0xd0, 0x1d, 0x16, 0x3f, 0xf5, 0x23, 0xdb, 0x52, 0xc4, 0x3b, 0x99, 0x3d, + 0xd5, 0xdc, 0x0b, 0x54, 0x3b, 0xfd, 0x9d, 0x36, 0xf6, 0xd9, 0x63, 0xd4, + 0xc0, 0x8f, 0x9d, 0x00, 0xa6, 0x1e, 0x41, 0x72, 0x18, 0xa6, 0xc5, 0xd0, + 0xb6, 0xdd, 0x10, 0x61, 0x45, 0xe0, 0xdc, 0xcc, 0x92, 0xd3, 0x05, 0x54, + 0x26, 0x2c, 0xcf, 0x94, 0x67, 0xa5, 0xae, 0x62, 0x97, 0x4e, 0x10, 0x2b, + 0xf4, 0x65, 0x89, 0x21, 0x98, 0xad, 0x25, 0x6a, 0x01, 0xa9, 0x4f, 0x57, + 0x2b, 0xbe, 0x3b, 0xcc, 0x34, 0x89, 0xc3, 0xd2, 0xa0, 0xc5, 0x72, 0xd9, + 0x39, 0x3f, 0x45, 0x62, 0x73, 0xda, 0xf3, 0xe7, 0xbf, 0xfd, 0xfe, 0x5b, + 0xe0, 0xc5, 0x9f, 0xf9, 0xbe, 0x2b, 0x9a, 0xf7, 0xc2, 0xe9, 0x59, 0x73, + 0xc4, 0x0a, 0xfe, 0x73, 0x5b, 0x34, 0xb9, 0xfc, 0x45, 0xb7, 0x4d, 0x39, + 0xc2, 0xcd, 0x5f, 0x33, 0x91, 0xab, 0x48, 0x57, 0x0a, 0x27, 0xf3, 0xd4, + 0xf3, 0xb4, 0x57, 0x04, 0xeb, 0x8a, 0xb2, 0xd4, 0x06, 0x60, 0x09, 0x48, + 0x58, 0xf8, 0x1f, 0x06, 0x8c, 0x2d, 0x55, 0x2b, 0x8d, 0xbb, 0x37, 0xbb, + 0xc5, 0xa3, 0x05, 0x38, 0xf7, 0x47, 0x0a, 0xd9, 0xa8, 0x5a, 0x5b, 0x75, + 0x58, 0xa3, 0x35, 0x01, 0x1a, 0x5c, 0xe3, 0x97, 0xef, 0x04, 0xd9, 0x28, + 0x93, 0xc9, 0x59, 0xfc, 0xc1, 0x9b, 0x25, 0xe8, 0x44, 0x05, 0x17, 0xdc, + 0xe1, 0xb2, 0x06, 0xd6, 0x08, 0xe0, 0x00, 0xe0, 0x06, 0xaf, 0xb6, 0xf8, + 0x63, 0x6c, 0x54, 0x29, 0x7a, 0x25, 0x0c, 0xc4, 0xe7, 0x6c, 0x2b, 0xe8, + 0xe9, 0x06, 0xa4, 0x9e, 0xb0, 0x38, 0xd4, 0xf1, 0x46, 0xb3, 0x93, 0x54, + 0xa7, 0xa1, 0xcd, 0x65, 0x43, 0xe8, 0xc3, 0x03, 0x60, 0x9c, 0x39, 0x02, + 0xea, 0xc5, 0x0c, 0x96, 0xd2, 0x05, 0x0d, 0x1f, 0xc7, 0x04, 0xc4, 0xa3, + 0xc4, 0xc0, 0xa9, 0x0b, 0xc7, 0xa1, 0x3f, 0xdc, 0x35, 0x51, 0x4d, 0xc8, + 0xc2, 0x87, 0x99, 0x3c, 0x46, 0xb3, 0x4e, 0xc9, 0xbf, 0xb3, 0x34, 0x8b, + 0xb7, 0x6f, 0xe5, 0x95, 0x9b, 0x17, 0x20, 0x56, 0xa6, 0x64, 0x4c, 0x77, + 0xdc, 0x0e, 0x28, 0xc3, 0xef, 0xf4, 0x28, 0x47, 0xd4, 0x0c, 0x6a, 0xe1, + 0x75, 0x63, 0xc9, 0xae, 0xe9, 0x36, 0x57, 0xfd, 0x08, 0x2f, 0xb2, 0x0b, + 0x48, 0xd4, 0x04, 0x24, 0x2f, 0x17, 0x03, 0x9e, 0xfe, 0xfd, 0x67, 0x0e, + 0xbe, 0x66, 0xcf, 0x2c, 0xaa, 0x4f, 0x1c, 0x32, 0x2e, 0xa0, 0xfb, 0x55, + 0x40, 0x15, 0x5d, 0x51, 0xca, 0xbe, 0xff, 0xb2, 0xb2, 0x2b, 0x47, 0xee, + 0x37, 0xc8, 0x65, 0xad, 0xda, 0xb9, 0x3a, 0x75, 0x3a, 0x98, 0x1f, 0xcf, + 0xd7, 0x48, 0x56, 0xa2, 0xed, 0xb4, 0x46, 0x60, 0x30, 0x6a, 0x19, 0x5b, + 0x38, 0xc8, 0x0d, 0x3a, 0xc3, 0xe1, 0x34, 0x6e, 0x39, 0x5f, 0xf2, 0x4d, + 0x78, 0x02, 0xba, 0x3c, 0x71, 0x70, 0x75, 0x6c, 0xb0, 0xfa, 0x38, 0xe3, + 0x6b, 0x42, 0x1e, 0x23, 0xcd, 0xe6, 0xf8, 0xc5, 0x9c, 0x24, 0x3d, 0x98, + 0xa8, 0xbb, 0x4a, 0x07, 0x8c, 0xb6, 0xfa, 0x13, 0xd0, 0xfc, 0xc5, 0xdc, + 0xb2, 0xcd, 0x65, 0x59, 0xc2, 0x3a, 0x24, 0x47, 0x1c, 0x53, 0x92, 0x57, + 0x21, 0xf3, 0x26, 0x9b, 0xe9, 0xa5, 0x95, 0x9a, 0xd6, 0xa5, 0xe2, 0xda, + 0x0e, 0xb7, 0xab, 0x9e, 0xee, 0xe3, 0xef, 0x59, 0xd2, 0x88, 0x32, 0x1f, + 0x0d, 0xbf, 0xf2, 0xa4, 0x3b, 0xd7, 0xd5, 0xf2, 0xa4, 0xae, 0x65, 0xab, + 0xb3, 0x72, 0xf6, 0x3b, 0xe8, 0xc5, 0x2b, 0xad, 0xcc, 0xbe, 0x02, 0x95, + 0x63, 0x95, 0x2c, 0x22, 0x74, 0x3a, 0x1b, 0xd5, 0xd1, 0x1d, 0xf8, 0x69, + 0x03, 0x98, 0x70, 0x66, 0x43, 0xb5, 0x6d, 0xd0, 0x27, 0x6a, 0x1c, 0xfc, + 0xf9, 0xaf, 0x71, 0x9b, 0x8c, 0xcb, 0xf8, 0xbd, 0x18, 0xad, 0x5f, 0xb7, + 0xbc, 0xfb, 0xbd, 0xde, 0xb9, 0xdc, 0x54, 0x65, 0x3b, 0xaf, 0xa7, 0x92, + 0xbe, 0x62, 0xdc, 0x25, 0x50, 0x48, 0x78, 0xd4, 0xed, 0xed, 0x96, 0x3f, + 0x53, 0xc5, 0xb5, 0x5f, 0xac, 0xa7, 0x5c, 0x92, 0xd9, 0xfe, 0x3b, 0xcd, + 0xbb, 0x29, 0xa0, 0xe0, 0x1e, 0xb0, 0x92, 0xad, 0x6b, 0x45, 0x29, 0x59, + 0xff, 0x5d, 0x5a, 0xfe, 0x8f, 0x63, 0x86, 0x6d, 0xa4, 0x4a, 0x53, 0xc4, + 0x3e, 0x39, 0xbf, 0xe5, 0x20, 0xbc, 0xd1, 0xdf, 0x59, 0x9c, 0x3a, 0x72, + 0x3b, 0x8f, 0xb2, 0x40, 0xe5, 0x9e, 0xa5, 0x02, 0x35, 0xd0, 0x4d, 0x6f, + 0x7d, 0xd5, 0x4c, 0xde, 0x51, 0x0a, 0x9a, 0x57, 0x43, 0x43, 0xe5, 0x97, + 0x95, 0x4b, 0xb2, 0x6c, 0xaf, 0x92, 0x4e, 0x52, 0x06, 0x0b, 0x72, 0x60, + 0x9e, 0x5c, 0xa1, 0xe3, 0x9b, 0xb3, 0x8c, 0x32, 0xcd, 0xc1, 0x4a, 0x88, + 0xd6, 0x3d, 0xed, 0xe8, 0x42, 0x5d, 0x53, 0xdd, 0x00, 0x52, 0x26, 0x2e, + 0xd5, 0x41, 0xf2, 0xfc, 0x51, 0x40, 0x45, 0xe4, 0x00, 0xe3, 0x1c, 0xfb, + 0x32, 0x33, 0x22, 0xed, 0x15, 0x12, 0x9b, 0xc4, 0x89, 0xd0, 0x0e, 0x95, + 0xad, 0xfd, 0x04, 0x2e, 0xee, 0x73, 0x06, 0xee, 0x23, 0xe2, 0xd3, 0x3d, + 0x44, 0x62, 0x35, 0xdc, 0x18, 0x9d, 0xf4, 0x9d, 0x92, 0x00, 0x4e, 0x8e, + 0x4e, 0x24, 0xa1, 0x2c, 0xb2, 0xb2, 0x3f, 0xfc, 0xe4, 0x27, 0x43, 0x3b, + 0x59, 0xb4, 0x13, 0xff, 0x57, 0xdf, 0x3d, 0xee, 0x1a, 0xab, 0x8c, 0x51, + 0xd9, 0x96, 0x1f, 0x2b, 0x66, 0x67, 0x42, 0xb6, 0x91, 0xfe, 0x8f, 0x4d, + 0xa6, 0xd3, 0x3b, 0x51, 0x45, 0x35, 0xab, 0xe5, 0x6e, 0x07, 0xed, 0x24, + 0x95, 0x3d, 0x6a, 0x47, 0x3f, 0x4e, 0xe4, 0x13, 0x5f, 0xfc, 0x19, 0xe8, + 0x09, 0x4b, 0x3d, 0xdf, 0x4f, 0xb4, 0xb4, 0xc1, 0x74, 0x31, 0xff, 0x13, + 0x00, 0xaf, 0x07, 0x16, 0xb6, 0x57, 0xfe, 0x6a, 0x37, 0x05, 0x62, 0x01, + 0xa0, 0xfa, 0xe2, 0xe5, 0x57, 0xcb, 0xa4, 0x5a, 0x57, 0xee, 0xd1, 0x5f, + 0x14, 0x23, 0xbe, 0xef, 0x9b, 0x91, 0x0f, 0x97, 0xa8, 0xf2, 0x36, 0xf7, + 0xc3, 0xb6, 0xbe, 0xe5, 0x59, 0x2b, 0x3c, 0xb3, 0x5d, 0x9f, 0x1e, 0x3b, + 0xd3, 0xf7, 0xee, 0x2e, 0xc0, 0x73, 0x6f, 0x2e, 0xfd, 0xc7, 0x3f, 0xfd, + 0x9c, 0xac, 0xbd, 0xa1, 0x8e, 0xcc, 0x59, 0x41, 0xa4, 0x41, 0xd3, 0x39, + 0x28, 0x67, 0x96, 0x14, 0x42, 0xc3, 0x38, 0x96, 0x0d, 0xfc, 0x68, 0x3d, + 0x2e, 0x2f, 0x46, 0x24, 0x66, 0x0d, 0xa6, 0x72, 0xc7, 0x27, 0x66, 0x3c, + 0xad, 0x55, 0xae, 0xbd, 0x34, 0xb4, 0x3b, 0x60, 0x73, 0xa5, 0xaa, 0xd4, + 0x56, 0x0b, 0x61, 0xf5, 0x5c, 0x66, 0x2e, 0x9d, 0x33, 0xfe, 0xfe, 0x7b, + 0x21, 0xbc, 0x36, 0xec, 0x0f, 0x03, 0x28, 0xa4, 0xd6, 0x05, 0x21, 0x30, + 0xf8, 0x3c, 0xd9, 0x3b, 0xaf, 0x5d, 0x92, 0x25, 0xce, 0xac, 0x28, 0xe1, + 0xd1, 0x02, 0x3c, 0x49, 0xe6, 0xed, 0xb7, 0x0e, 0xe7, 0xe7, 0x1e, 0x56, + 0xbf, 0x5d, 0xfd, 0xed, 0xdb, 0x4d, 0x63, 0x03, 0x8c, 0x06, 0x30, 0xfa, + 0x62, 0x78, 0x3f, 0x6e, 0x63, 0x1e, 0xa6, 0x4b, 0x96, 0xe9, 0xe4, 0x2d, + 0x16, 0x51, 0xf2, 0xf1, 0xa7, 0x2a, 0xeb, 0x15, 0xb5, 0xb1, 0x04, 0x9a, + 0xde, 0x77, 0xde, 0xcf, 0xcc, 0x21, 0xd9, 0x30, 0xf1, 0xea, 0xb9, 0xb0, + 0x39, 0xe1, 0x6f, 0xc7, 0x0a, 0xbd, 0x64, 0x75, 0x59, 0xbf, 0x3c, 0xbf, + 0xd0, 0xdb, 0x00, 0xfa, 0x2e, 0x36, 0xcc, 0xb5, 0xd1, 0x20, 0x46, 0xb0, + 0xd7, 0xfc, 0xb1, 0x5b, 0x54, 0x9f, 0xe2, 0xe1, 0xd0, 0x18, 0xa3, 0x51, + 0x62, 0x24, 0x0f, 0xa1, 0xa1, 0x9a, 0x47, 0x33, 0xca, 0xb9, 0x26, 0xb6, + 0x0b, 0x46, 0xd4, 0xb5, 0xc6, 0xbb, 0x72, 0x1e, 0x60, 0xeb, 0xb4, 0x9d, + 0x9f, 0x09, 0x10, 0x12, 0xce, 0x68, 0xa3, 0xb6, 0x8c, 0xce, 0xd7, 0x26, + 0x55, 0xb5, 0x90, 0x08, 0x9f, 0xf2, 0xa8, 0xc0, 0x56, 0xd8, 0xf6, 0x29, + 0x60, 0xe0, 0x73, 0x52, 0x22, 0x6f, 0x35, 0x4e, 0xe7, 0xc5, 0xa3, 0x95, + 0xcd, 0xd0, 0x8e, 0xd3, 0x95, 0xe3, 0x03, 0x04, 0x00, 0x54, 0xeb, 0xef, + 0x27, 0x11, 0xef, 0x38, 0x56, 0x6f, 0xa0, 0xe5, 0x72, 0x2a, 0x97, 0x23, + 0x56, 0xe2, 0x93, 0x21, 0x3f, 0xe2, 0xd6, 0x12, 0xcd, 0x61, 0x50, 0x44, + 0xd3, 0xe3, 0x8d, 0x3f, 0x24, 0x90, 0x6c, 0x53, 0xad, 0x1c, 0xad, 0x03, + 0x0f, 0x89, 0x63, 0xf9, 0xb9, 0xbc, 0xe2, 0x56, 0xdd, 0x16, 0xcf, 0x2d, + 0xa1, 0xda, 0xf9, 0x3f, 0xec, 0xbf, 0xb1, 0xb6, 0xe1, 0xdf, 0x3f, 0x11, + 0x02, 0x76, 0xe9, 0xe2, 0x9f, 0xa2, 0x02, 0xce, 0x3e, 0xf9, 0xcf, 0x4f, + 0xd9, 0x5f, 0x72, 0x5d, 0x51, 0xa7, 0x1d, 0x98, 0xeb, 0x8e, 0x97, 0x98, + 0x39, 0x58, 0x52, 0x11, 0xed, 0x95, 0x3c, 0x94, 0xf0, 0x6c, 0xa2, 0x3e, + 0x5f, 0x5f, 0x05, 0x98, 0xf1, 0x73, 0xab, 0xc7, 0xa8, 0x4b, 0x92, 0x73, + 0xda, 0x59, 0x1d, 0x56, 0x11, 0xc2, 0x38, 0x43, 0xdb, 0x4b, 0xbe, 0x08, + 0xdd, 0xf2, 0x5d, 0x47, 0x26, 0xdc, 0x16, 0xf9, 0x62, 0xf8, 0x92, 0x19, + 0x5c, 0x6f, 0x2b, 0xe1, 0x15, 0x66, 0xfa, 0xdb, 0x3a, 0xe0, 0x92, 0x9c, + 0x70, 0x91, 0x3f, 0xb8, 0xb0, 0x01, 0xc1, 0x44, 0xf6, 0x62, 0x47, 0x37, + 0xe9, 0xd9, 0x4c, 0x0f, 0x99, 0x6a, 0xc4, 0x60, 0x26, 0x2f, 0xc6, 0x43, + 0x50, 0x62, 0xee, 0x44, 0x21, 0xbd, 0xad, 0x50, 0x2d, 0x58, 0x78, 0xea, + 0x5a, 0x5f, 0x5c, 0xf7, 0x28, 0xa9, 0xdf, 0x0e, 0xd3, 0x67, 0xdf, 0x1f, + 0x4c, 0xd3, 0xe9, 0x5e, 0x0f, 0xa3, 0xb7, 0x56, 0xa5, 0x4e, 0x5f, 0x2a, + 0xb6, 0x14, 0x5e, 0x2f, 0x16, 0x71, 0x48, 0x59, 0x77, 0x6b, 0xf9, 0x6c, + 0x79, 0xba, 0xc4, 0x26, 0x30, 0x44, 0x61, 0x62, 0x60, 0xef, 0x35, 0x95, + 0xe3, 0x77, 0xd5, 0xc8, 0x44, 0xa4, 0xf8, 0x95, 0xba, 0xd1, 0x73, 0x6f, + 0x92, 0xf2, 0xd3, 0x98, 0x4c, 0x8f, 0xe0, 0x2e, 0x27, 0xaa, 0x2f, 0x63, + 0x00, 0x00, 0x00, 0x00, 0x06, 0xff, 0xff, 0xff, 0x04, 0x00, 0x00, 0x00, + 0x08, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0x80, 0x04, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x26, 0xff, 0xff, 0xff, 0x04, 0x00, 0x00, 0x00, 0x28, 0x00, 0x00, 0x00, + 0x0e, 0xfe, 0xff, 0xff, 0xbb, 0xfd, 0xff, 0xff, 0xe1, 0x05, 0x00, 0x00, + 0x4b, 0x0f, 0x00, 0x00, 0x8e, 0x15, 0x00, 0x00, 0x7f, 0x04, 0x00, 0x00, + 0x02, 0x02, 0x00, 0x00, 0x53, 0xe6, 0xff, 0xff, 0xa6, 0x04, 0x00, 0x00, + 0xdf, 0x07, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x66, 0xff, 0xff, 0xff, 0x04, 0x00, 0x00, 0x00, + 0x80, 0x00, 0x00, 0x00, 0x7f, 0xfd, 0xff, 0xff, 0x3e, 0xf8, 0xff, 0xff, + 0xae, 0x03, 0x00, 0x00, 0x5c, 0xfe, 0xff, 0xff, 0x82, 0xfa, 0xff, 0xff, + 0xbd, 0xf8, 0xff, 0xff, 0x04, 0xfe, 0xff, 0xff, 0x8c, 0xfe, 0xff, 0xff, + 0x9b, 0xf8, 0xff, 0xff, 0x51, 0x02, 0x00, 0x00, 0x19, 0xfe, 0xff, 0xff, + 0x54, 0xfe, 0xff, 0xff, 0x8f, 0xff, 0xff, 0xff, 0xe7, 0xfd, 0xff, 0xff, + 0xc2, 0x07, 0x00, 0x00, 0x36, 0x06, 0x00, 0x00, 0x57, 0xfd, 0xff, 0xff, + 0xa3, 0x03, 0x00, 0x00, 0x3c, 0x00, 0x00, 0x00, 0x79, 0x03, 0x00, 0x00, + 0x9b, 0xf7, 0xff, 0xff, 0xc7, 0x04, 0x00, 0x00, 0xbf, 0x06, 0x00, 0x00, + 0x86, 0xfe, 0xff, 0xff, 0x20, 0xfb, 0xff, 0xff, 0x90, 0xfc, 0xff, 0xff, + 0x16, 0x00, 0x00, 0x00, 0x8e, 0xff, 0xff, 0xff, 0xa0, 0x03, 0x00, 0x00, + 0xc7, 0xff, 0xff, 0xff, 0x51, 0x01, 0x00, 0x00, 0x24, 0xf8, 0xff, 0xff, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x06, 0x00, 0x08, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, + 0x04, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, 0xee, 0x01, 0x00, 0x00, + 0xda, 0x02, 0x00, 0x00, 0xa9, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, + 0xc4, 0xfe, 0xff, 0xff, 0xfa, 0xfc, 0xff, 0xff, 0xc0, 0xff, 0xff, 0xff, + 0x6a, 0xff, 0xff, 0xff, 0x92, 0x02, 0x00, 0x00, 0xa4, 0xff, 0xff, 0xff, + 0xfd, 0xfe, 0xff, 0xff, 0x4e, 0xfd, 0xff, 0xff, 0x87, 0x00, 0x00, 0x00, + 0x19, 0xfe, 0xff, 0xff, 0x17, 0xff, 0xff, 0xff, 0xa0, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0xf4, 0xf3, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x04, 0xf4, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x0f, 0x00, 0x00, 0x00, 0x4d, 0x4c, 0x49, 0x52, + 0x20, 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, 0x64, 0x2e, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0e, 0x00, + 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0c, 0x00, 0x10, 0x00, 0x14, 0x00, + 0x0e, 0x00, 0x00, 0x00, 0xf8, 0x01, 0x00, 0x00, 0xec, 0x01, 0x00, 0x00, + 0xe0, 0x01, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, + 0x04, 0x00, 0x00, 0x00, 0x6d, 0x61, 0x69, 0x6e, 0x00, 0x00, 0x00, 0x00, + 0x07, 0x00, 0x00, 0x00, 0xa4, 0x01, 0x00, 0x00, 0x4c, 0x01, 0x00, 0x00, + 0xfc, 0x00, 0x00, 0x00, 0xa8, 0x00, 0x00, 0x00, 0x70, 0x00, 0x00, 0x00, + 0x38, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x82, 0xfe, 0xff, 0xff, + 0x05, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x0e, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x0c, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0e, 0x00, 0x18, 0x00, 0x08, 0x00, + 0x0c, 0x00, 0x10, 0x00, 0x07, 0x00, 0x14, 0x00, 0x0e, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x08, 0x03, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, + 0x0c, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xc8, 0xf4, 0xff, 0xff, + 0x01, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, + 0x0b, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, + 0xe6, 0xfe, 0xff, 0xff, 0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, + 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0b, 0x00, 0x00, 0x00, + 0x02, 0x00, 0x00, 0x00, 0x0a, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x0e, 0x00, 0x1a, 0x00, 0x08, 0x00, 0x0c, 0x00, 0x10, 0x00, + 0x07, 0x00, 0x14, 0x00, 0x0e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x05, + 0x01, 0x00, 0x00, 0x00, 0x3c, 0x00, 0x00, 0x00, 0x30, 0x00, 0x00, 0x00, + 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0e, 0x00, 0x18, 0x00, 0x07, 0x00, + 0x08, 0x00, 0x0c, 0x00, 0x10, 0x00, 0x14, 0x00, 0x0e, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x01, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, + 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x0a, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, + 0xc2, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x01, 0x24, 0x00, 0x00, 0x00, + 0x18, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xb4, 0xff, 0xff, 0xff, + 0x00, 0x00, 0x01, 0x01, 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, + 0x08, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x0e, 0x00, 0x14, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0c, 0x00, + 0x07, 0x00, 0x10, 0x00, 0x0e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, + 0x30, 0x00, 0x00, 0x00, 0x24, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, + 0x0c, 0x00, 0x10, 0x00, 0x06, 0x00, 0x08, 0x00, 0x0c, 0x00, 0x07, 0x00, + 0x0c, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x01, 0x01, 0x00, 0x00, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, + 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0a, 0x00, 0x10, 0x00, 0x04, 0x00, + 0x08, 0x00, 0x0c, 0x00, 0x0a, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, + 0x10, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0d, 0x00, 0x00, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x0e, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x0d, 0x00, 0x00, 0x00, 0x0f, 0x00, 0x00, 0x00, 0xb8, 0x0d, 0x00, 0x00, + 0x64, 0x0c, 0x00, 0x00, 0x64, 0x0a, 0x00, 0x00, 0xe8, 0x09, 0x00, 0x00, + 0x9c, 0x09, 0x00, 0x00, 0x20, 0x09, 0x00, 0x00, 0x6c, 0x07, 0x00, 0x00, + 0x78, 0x04, 0x00, 0x00, 0x74, 0x03, 0x00, 0x00, 0x68, 0x02, 0x00, 0x00, + 0xbc, 0x01, 0x00, 0x00, 0x28, 0x01, 0x00, 0x00, 0xa4, 0x00, 0x00, 0x00, + 0x54, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xc8, 0xff, 0xff, 0xff, + 0x28, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, + 0x02, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0x0a, 0x00, 0x00, 0x00, + 0x08, 0x00, 0x00, 0x00, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x79, + 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x0a, 0x00, 0x00, 0x00, 0x14, 0x00, 0x10, 0x00, 0x04, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0c, 0x00, + 0x14, 0x00, 0x00, 0x00, 0x34, 0x00, 0x00, 0x00, 0x1c, 0x00, 0x00, 0x00, + 0x04, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, + 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x0c, 0x00, 0x00, 0x00, 0x63, 0x6f, 0x6e, 0x76, 0x32, 0x64, 0x5f, 0x69, + 0x6e, 0x70, 0x75, 0x74, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x30, 0xf3, 0xff, 0xff, 0x00, 0x00, 0x00, 0x09, + 0x6c, 0x00, 0x00, 0x00, 0x0d, 0x00, 0x00, 0x00, 0x50, 0x00, 0x00, 0x00, + 0x14, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, + 0xff, 0xff, 0xff, 0xff, 0x0a, 0x00, 0x00, 0x00, 0x14, 0xf3, 0xff, 0xff, + 0x2c, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, + 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x13, 0xc2, 0x47, 0x3b, + 0x01, 0x00, 0x00, 0x00, 0x8d, 0xf4, 0xad, 0x3e, 0x01, 0x00, 0x00, 0x00, + 0x15, 0x00, 0xe0, 0xbe, 0x0d, 0x00, 0x00, 0x00, 0x49, 0x64, 0x65, 0x6e, + 0x74, 0x69, 0x74, 0x79, 0x5f, 0x69, 0x6e, 0x74, 0x38, 0x00, 0x00, 0x00, + 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0a, 0x00, 0x00, 0x00, + 0xb0, 0xf3, 0xff, 0xff, 0x00, 0x00, 0x00, 0x09, 0x7c, 0x00, 0x00, 0x00, + 0x0c, 0x00, 0x00, 0x00, 0x54, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, + 0x04, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, + 0x80, 0x04, 0x00, 0x00, 0x94, 0xf3, 0xff, 0xff, 0x30, 0x00, 0x00, 0x00, + 0x24, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x80, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x6c, 0x02, 0xa5, 0x3a, + 0x01, 0x00, 0x00, 0x00, 0x6a, 0x5d, 0xa4, 0x3e, 0x01, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x1a, 0x00, 0x00, 0x00, 0x73, 0x65, 0x71, 0x75, + 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x2f, 0x66, 0x6c, 0x61, 0x74, 0x74, + 0x65, 0x6e, 0x2f, 0x52, 0x65, 0x73, 0x68, 0x61, 0x70, 0x65, 0x00, 0x00, + 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x80, 0x04, 0x00, 0x00, + 0x40, 0xf4, 0xff, 0xff, 0x00, 0x00, 0x00, 0x09, 0x8c, 0x00, 0x00, 0x00, + 0x0b, 0x00, 0x00, 0x00, 0x5c, 0x00, 0x00, 0x00, 0x1c, 0x00, 0x00, 0x00, + 0x04, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, + 0x06, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, + 0x2c, 0xf4, 0xff, 0xff, 0x30, 0x00, 0x00, 0x00, 0x24, 0x00, 0x00, 0x00, + 0x18, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x80, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x6c, 0x02, 0xa5, 0x3a, 0x01, 0x00, 0x00, 0x00, + 0x6a, 0x5d, 0xa4, 0x3e, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x20, 0x00, 0x00, 0x00, 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x74, 0x69, + 0x61, 0x6c, 0x2f, 0x6d, 0x61, 0x78, 0x5f, 0x70, 0x6f, 0x6f, 0x6c, 0x69, + 0x6e, 0x67, 0x32, 0x64, 0x2f, 0x4d, 0x61, 0x78, 0x50, 0x6f, 0x6f, 0x6c, + 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x06, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, + 0xe8, 0xf4, 0xff, 0xff, 0x00, 0x00, 0x00, 0x09, 0xec, 0x00, 0x00, 0x00, + 0x0a, 0x00, 0x00, 0x00, 0x5c, 0x00, 0x00, 0x00, 0x1c, 0x00, 0x00, 0x00, + 0x04, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, + 0x0c, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, + 0xd4, 0xf4, 0xff, 0xff, 0x30, 0x00, 0x00, 0x00, 0x24, 0x00, 0x00, 0x00, + 0x18, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x80, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x6c, 0x02, 0xa5, 0x3a, 0x01, 0x00, 0x00, 0x00, + 0x6a, 0x5d, 0xa4, 0x3e, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x83, 0x00, 0x00, 0x00, 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x74, 0x69, + 0x61, 0x6c, 0x2f, 0x63, 0x6f, 0x6e, 0x76, 0x32, 0x64, 0x5f, 0x31, 0x2f, + 0x52, 0x65, 0x6c, 0x75, 0x3b, 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x74, + 0x69, 0x61, 0x6c, 0x2f, 0x63, 0x6f, 0x6e, 0x76, 0x32, 0x64, 0x5f, 0x31, + 0x2f, 0x42, 0x69, 0x61, 0x73, 0x41, 0x64, 0x64, 0x3b, 0x73, 0x65, 0x71, + 0x75, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x2f, 0x63, 0x6f, 0x6e, 0x76, + 0x32, 0x64, 0x5f, 0x31, 0x2f, 0x43, 0x6f, 0x6e, 0x76, 0x32, 0x44, 0x3b, + 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x2f, 0x63, + 0x6f, 0x6e, 0x76, 0x32, 0x64, 0x5f, 0x31, 0x2f, 0x42, 0x69, 0x61, 0x73, + 0x41, 0x64, 0x64, 0x2f, 0x52, 0x65, 0x61, 0x64, 0x56, 0x61, 0x72, 0x69, + 0x61, 0x62, 0x6c, 0x65, 0x4f, 0x70, 0x2f, 0x72, 0x65, 0x73, 0x6f, 0x75, + 0x72, 0x63, 0x65, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x0c, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, + 0xf0, 0xf5, 0xff, 0xff, 0x00, 0x00, 0x00, 0x09, 0xe4, 0x00, 0x00, 0x00, + 0x09, 0x00, 0x00, 0x00, 0x5c, 0x00, 0x00, 0x00, 0x1c, 0x00, 0x00, 0x00, + 0x04, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, + 0x0e, 0x00, 0x00, 0x00, 0x0e, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, + 0xdc, 0xf5, 0xff, 0xff, 0x30, 0x00, 0x00, 0x00, 0x24, 0x00, 0x00, 0x00, + 0x18, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x80, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x15, 0xa1, 0x10, 0x3b, 0x01, 0x00, 0x00, 0x00, + 0x74, 0x10, 0x10, 0x3f, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x7b, 0x00, 0x00, 0x00, 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x74, 0x69, + 0x61, 0x6c, 0x2f, 0x63, 0x6f, 0x6e, 0x76, 0x32, 0x64, 0x2f, 0x52, 0x65, + 0x6c, 0x75, 0x3b, 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x74, 0x69, 0x61, + 0x6c, 0x2f, 0x63, 0x6f, 0x6e, 0x76, 0x32, 0x64, 0x2f, 0x42, 0x69, 0x61, + 0x73, 0x41, 0x64, 0x64, 0x3b, 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x74, + 0x69, 0x61, 0x6c, 0x2f, 0x63, 0x6f, 0x6e, 0x76, 0x32, 0x64, 0x2f, 0x43, + 0x6f, 0x6e, 0x76, 0x32, 0x44, 0x3b, 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, + 0x74, 0x69, 0x61, 0x6c, 0x2f, 0x63, 0x6f, 0x6e, 0x76, 0x32, 0x64, 0x2f, + 0x42, 0x69, 0x61, 0x73, 0x41, 0x64, 0x64, 0x2f, 0x52, 0x65, 0x61, 0x64, + 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x4f, 0x70, 0x2f, 0x72, + 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x00, 0x04, 0x00, 0x00, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x0e, 0x00, 0x00, 0x00, 0x0e, 0x00, 0x00, 0x00, + 0x10, 0x00, 0x00, 0x00, 0x3a, 0xf8, 0xff, 0xff, 0x00, 0x00, 0x00, 0x09, + 0xd4, 0x02, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0xac, 0x02, 0x00, 0x00, + 0x04, 0x00, 0x00, 0x00, 0xc4, 0xf6, 0xff, 0xff, 0x1c, 0x02, 0x00, 0x00, + 0x94, 0x01, 0x00, 0x00, 0x0c, 0x01, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, + 0x20, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, + 0xb9, 0x37, 0x74, 0x3a, 0x8b, 0xfe, 0x77, 0x3a, 0x54, 0xc7, 0x75, 0x3a, + 0xc4, 0x11, 0x78, 0x3a, 0xb9, 0x90, 0x74, 0x3a, 0x3b, 0x97, 0x7b, 0x3a, + 0xe8, 0x57, 0x75, 0x3a, 0x0c, 0x0e, 0x74, 0x3a, 0x76, 0x8b, 0x79, 0x3a, + 0x2b, 0x7b, 0x6d, 0x3a, 0x17, 0xad, 0x71, 0x3a, 0xe4, 0x9b, 0x77, 0x3a, + 0x0b, 0xab, 0x7a, 0x3a, 0x9e, 0x12, 0x75, 0x3a, 0x8c, 0xcf, 0x79, 0x3a, + 0xa0, 0x5a, 0x79, 0x3a, 0x74, 0xc3, 0x78, 0x3a, 0x0e, 0xa9, 0x74, 0x3a, + 0x6b, 0xf8, 0x6f, 0x3a, 0x53, 0xeb, 0x72, 0x3a, 0xff, 0xe2, 0x73, 0x3a, + 0x3b, 0x38, 0x78, 0x3a, 0xed, 0x9e, 0x76, 0x3a, 0x77, 0xbc, 0x6d, 0x3a, + 0x4f, 0xf5, 0x71, 0x3a, 0x17, 0xc9, 0x74, 0x3a, 0x87, 0x84, 0x6b, 0x3a, + 0x4b, 0xc5, 0x78, 0x3a, 0xdd, 0x02, 0x75, 0x3a, 0x0e, 0xcf, 0x78, 0x3a, + 0x14, 0x40, 0x75, 0x3a, 0x2e, 0xca, 0x72, 0x3a, 0x20, 0x00, 0x00, 0x00, + 0x95, 0x2f, 0xef, 0x3d, 0x47, 0x1c, 0xf0, 0x3d, 0xc5, 0xdb, 0xf3, 0x3d, + 0x2e, 0x57, 0xe7, 0x3d, 0x98, 0xa7, 0xf2, 0x3d, 0x98, 0x89, 0xe4, 0x3d, + 0x38, 0x6d, 0xf3, 0x3d, 0x3f, 0x38, 0xe2, 0x3d, 0x91, 0x6f, 0xf0, 0x3d, + 0x35, 0xa0, 0xeb, 0x3d, 0x42, 0x3d, 0xeb, 0x3d, 0xed, 0x89, 0xe7, 0x3d, + 0xb5, 0xb5, 0xf8, 0x3d, 0x79, 0x28, 0xf3, 0x3d, 0xed, 0xdb, 0xf7, 0x3d, + 0xeb, 0x67, 0xf7, 0x3d, 0xed, 0xd1, 0xf6, 0x3d, 0xbc, 0xbf, 0xf2, 0x3d, + 0x7a, 0x18, 0xee, 0x3d, 0x7c, 0x05, 0xf1, 0x3d, 0x63, 0x69, 0xe8, 0x3d, + 0xbb, 0xc0, 0xf1, 0x3d, 0xaf, 0xb1, 0xf4, 0x3d, 0xfe, 0xe0, 0xeb, 0x3d, + 0xb6, 0x60, 0xec, 0x3d, 0x8c, 0x32, 0xf0, 0x3d, 0x7e, 0xad, 0xe9, 0x3d, + 0xc0, 0xd3, 0xf6, 0x3d, 0xd7, 0x18, 0xf3, 0x3d, 0x40, 0x53, 0xf0, 0x3d, + 0x2c, 0xdc, 0xf1, 0x3d, 0x9a, 0xe4, 0xf0, 0x3d, 0x20, 0x00, 0x00, 0x00, + 0x4a, 0x4f, 0xf2, 0xbd, 0x8e, 0x0e, 0xf6, 0xbd, 0x74, 0x46, 0xec, 0xbd, + 0xa0, 0x21, 0xf6, 0xbd, 0x8e, 0x27, 0xf0, 0xbd, 0x0d, 0xa0, 0xf9, 0xbd, + 0x0c, 0x97, 0xec, 0xbd, 0xf0, 0x25, 0xf2, 0xbd, 0x5f, 0x98, 0xf7, 0xbd, + 0x27, 0x8d, 0xe8, 0xbd, 0xbd, 0xc9, 0xef, 0xbd, 0xac, 0xac, 0xf5, 0xbd, + 0x5a, 0x94, 0xed, 0xbd, 0x5a, 0x64, 0xf1, 0xbd, 0x2a, 0xa7, 0xe9, 0xbd, + 0x3c, 0x93, 0xf3, 0xbd, 0xf8, 0x2b, 0xf3, 0xbd, 0xf6, 0x35, 0xed, 0xbd, + 0x94, 0xf4, 0xed, 0xbd, 0x70, 0x94, 0xe9, 0xbd, 0x39, 0xfb, 0xf1, 0xbd, + 0xcb, 0x47, 0xf6, 0xbd, 0x88, 0xb9, 0xe7, 0xbd, 0x49, 0x62, 0xe9, 0xbd, + 0x64, 0x11, 0xf0, 0xbd, 0x85, 0xdf, 0xf2, 0xbd, 0x5c, 0x61, 0xe8, 0xbd, + 0x22, 0x46, 0xf3, 0xbd, 0x5a, 0x8e, 0xf0, 0xbd, 0x70, 0xdd, 0xf6, 0xbd, + 0x94, 0x55, 0xf3, 0xbd, 0x57, 0xba, 0xf0, 0xbd, 0x1a, 0x00, 0x00, 0x00, + 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x2f, 0x63, + 0x6f, 0x6e, 0x76, 0x32, 0x64, 0x5f, 0x31, 0x2f, 0x43, 0x6f, 0x6e, 0x76, + 0x32, 0x44, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, + 0x03, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, + 0x2a, 0xfb, 0xff, 0xff, 0x00, 0x00, 0x00, 0x09, 0x94, 0x01, 0x00, 0x00, + 0x07, 0x00, 0x00, 0x00, 0x6c, 0x01, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, + 0xb4, 0xf9, 0xff, 0xff, 0x1c, 0x01, 0x00, 0x00, 0xd4, 0x00, 0x00, 0x00, + 0x8c, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, + 0xe6, 0x69, 0xc5, 0x3a, 0xa0, 0x8d, 0xa8, 0x3a, 0xfe, 0x5c, 0xc1, 0x3a, + 0x84, 0x01, 0xcb, 0x3a, 0xa2, 0xc2, 0xb5, 0x3a, 0x42, 0x01, 0xd1, 0x3a, + 0xd7, 0x01, 0xcc, 0x3a, 0x20, 0xd8, 0xc7, 0x3a, 0x28, 0x80, 0xa4, 0x3a, + 0xd9, 0x25, 0xbe, 0x3a, 0x39, 0x6f, 0xc4, 0x3a, 0x59, 0x6c, 0xcb, 0x3a, + 0xb8, 0x0a, 0xc2, 0x3a, 0x73, 0x3f, 0xca, 0x3a, 0xb9, 0xed, 0xc5, 0x3a, + 0xe9, 0x9f, 0xc1, 0x3a, 0x10, 0x00, 0x00, 0x00, 0x5b, 0x2e, 0x2f, 0x3e, + 0x3e, 0xd9, 0x06, 0x3e, 0x44, 0xda, 0x3f, 0x3e, 0xd3, 0x09, 0x22, 0x3e, + 0x1d, 0x57, 0x34, 0x3e, 0xa4, 0xb6, 0x44, 0x3e, 0xd3, 0x69, 0x4a, 0x3e, + 0x70, 0x48, 0x46, 0x3e, 0x28, 0x37, 0x23, 0x3e, 0xe6, 0xdb, 0x06, 0x3e, + 0x3c, 0x1d, 0x34, 0x3e, 0x36, 0xba, 0x16, 0x3e, 0x24, 0xa4, 0x34, 0x3e, + 0xf4, 0xfb, 0x37, 0x3e, 0xd6, 0x7b, 0x8a, 0x3d, 0x00, 0x85, 0xe3, 0x3d, + 0x10, 0x00, 0x00, 0x00, 0x12, 0xdf, 0x43, 0xbe, 0x85, 0x3c, 0x27, 0xbe, + 0x54, 0xcd, 0x0d, 0xbe, 0x81, 0x6b, 0x49, 0xbe, 0x33, 0xb1, 0xe7, 0xbd, + 0x3f, 0x5f, 0x4f, 0xbe, 0xa1, 0x63, 0x3e, 0xbe, 0xbb, 0xa7, 0xea, 0xbd, + 0x2d, 0x8c, 0x0e, 0xbe, 0x8d, 0xa9, 0x3c, 0xbe, 0x5b, 0xe6, 0x42, 0xbe, + 0x80, 0xd5, 0x49, 0xbe, 0xa3, 0x86, 0x40, 0xbe, 0xf4, 0xaa, 0x48, 0xbe, + 0xde, 0x61, 0x44, 0xbe, 0xa9, 0x1c, 0x40, 0xbe, 0x18, 0x00, 0x00, 0x00, + 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x2f, 0x63, + 0x6f, 0x6e, 0x76, 0x32, 0x64, 0x2f, 0x43, 0x6f, 0x6e, 0x76, 0x32, 0x44, + 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, + 0x03, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, + 0xda, 0xfc, 0xff, 0xff, 0x00, 0x00, 0x00, 0x09, 0x64, 0x00, 0x00, 0x00, + 0x06, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, + 0x64, 0xfb, 0xff, 0xff, 0x2c, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, + 0x14, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x07, 0x72, 0x1e, 0x3a, 0x01, 0x00, 0x00, 0x00, 0x32, 0xe2, 0x9b, 0x3d, + 0x01, 0x00, 0x00, 0x00, 0x23, 0x35, 0x9d, 0xbd, 0x17, 0x00, 0x00, 0x00, + 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x2f, 0x64, + 0x65, 0x6e, 0x73, 0x65, 0x2f, 0x4d, 0x61, 0x74, 0x4d, 0x75, 0x6c, 0x00, + 0x02, 0x00, 0x00, 0x00, 0x0a, 0x00, 0x00, 0x00, 0x80, 0x04, 0x00, 0x00, + 0x52, 0xfd, 0xff, 0xff, 0x00, 0x00, 0x00, 0x02, 0x38, 0x00, 0x00, 0x00, + 0x05, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, + 0x04, 0x00, 0x04, 0x00, 0x04, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, + 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x2f, 0x66, + 0x6c, 0x61, 0x74, 0x74, 0x65, 0x6e, 0x2f, 0x43, 0x6f, 0x6e, 0x73, 0x74, + 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, + 0x9a, 0xfd, 0xff, 0xff, 0x00, 0x00, 0x00, 0x02, 0x68, 0x00, 0x00, 0x00, + 0x04, 0x00, 0x00, 0x00, 0x28, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, + 0x8c, 0xfd, 0xff, 0xff, 0x14, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x01, 0x00, 0x00, 0x00, 0xfc, 0x41, 0x4c, 0x35, 0x30, 0x00, 0x00, 0x00, + 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x2f, 0x64, + 0x65, 0x6e, 0x73, 0x65, 0x2f, 0x42, 0x69, 0x61, 0x73, 0x41, 0x64, 0x64, + 0x2f, 0x52, 0x65, 0x61, 0x64, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, + 0x65, 0x4f, 0x70, 0x2f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, + 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0a, 0x00, 0x00, 0x00, + 0x12, 0xfe, 0xff, 0xff, 0x00, 0x00, 0x00, 0x02, 0xdc, 0x01, 0x00, 0x00, + 0x03, 0x00, 0x00, 0x00, 0x9c, 0x01, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, + 0x04, 0xfe, 0xff, 0xff, 0x0c, 0x01, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, + 0x20, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, + 0x03, 0xf9, 0x09, 0x36, 0x3a, 0x1b, 0x0c, 0x36, 0xc6, 0xda, 0x0a, 0x36, + 0x16, 0x26, 0x0c, 0x36, 0x4b, 0x2b, 0x0a, 0x36, 0x60, 0x23, 0x0e, 0x36, + 0xd3, 0x9b, 0x0a, 0x36, 0x78, 0xe1, 0x09, 0x36, 0x78, 0xfb, 0x0c, 0x36, + 0xb6, 0x2a, 0x06, 0x36, 0x6f, 0x89, 0x08, 0x36, 0x7e, 0xe3, 0x0b, 0x36, + 0xf0, 0x9d, 0x0d, 0x36, 0xae, 0x74, 0x0a, 0x36, 0xef, 0x21, 0x0d, 0x36, + 0xe0, 0xdf, 0x0c, 0x36, 0x79, 0x8a, 0x0c, 0x36, 0x0a, 0x39, 0x0a, 0x36, + 0xbb, 0x92, 0x07, 0x36, 0x39, 0x3d, 0x09, 0x36, 0x25, 0xc9, 0x09, 0x36, + 0xd1, 0x3b, 0x0c, 0x36, 0x93, 0x54, 0x0b, 0x36, 0x9a, 0x4f, 0x06, 0x36, + 0x3c, 0xb2, 0x08, 0x36, 0x23, 0x4b, 0x0a, 0x36, 0xbe, 0x0e, 0x05, 0x36, + 0x83, 0x8b, 0x0c, 0x36, 0xc7, 0x6b, 0x0a, 0x36, 0x07, 0x91, 0x0c, 0x36, + 0x5d, 0x8e, 0x0a, 0x36, 0x7f, 0x2a, 0x09, 0x36, 0x33, 0x00, 0x00, 0x00, + 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x2f, 0x63, + 0x6f, 0x6e, 0x76, 0x32, 0x64, 0x5f, 0x31, 0x2f, 0x42, 0x69, 0x61, 0x73, + 0x41, 0x64, 0x64, 0x2f, 0x52, 0x65, 0x61, 0x64, 0x56, 0x61, 0x72, 0x69, + 0x61, 0x62, 0x6c, 0x65, 0x4f, 0x70, 0x2f, 0x72, 0x65, 0x73, 0x6f, 0x75, + 0x72, 0x63, 0x65, 0x00, 0x01, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x0e, 0x00, 0x18, 0x00, 0x08, 0x00, 0x07, 0x00, 0x0c, 0x00, + 0x10, 0x00, 0x14, 0x00, 0x0e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, + 0x2c, 0x01, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0xec, 0x00, 0x00, 0x00, + 0x10, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x0c, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x04, 0x00, 0x08, 0x00, 0x0c, 0x00, 0x00, 0x00, 0x90, 0x00, 0x00, 0x00, + 0x04, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, + 0xe1, 0x22, 0xc6, 0x36, 0x90, 0x2b, 0xa9, 0x36, 0x2d, 0x12, 0xc2, 0x36, + 0xbc, 0xbf, 0xcb, 0x36, 0xf2, 0x6c, 0xb6, 0x36, 0x19, 0xc5, 0xd1, 0x36, + 0xff, 0xc0, 0xcc, 0x36, 0x62, 0x93, 0xc8, 0x36, 0x4c, 0x1a, 0xa5, 0x36, + 0x05, 0xd8, 0xbe, 0x36, 0x49, 0x27, 0xc5, 0x36, 0xf5, 0x2a, 0xcc, 0x36, + 0x8a, 0xc0, 0xc2, 0x36, 0xf5, 0xfc, 0xca, 0x36, 0x2f, 0xa7, 0xc6, 0x36, + 0x57, 0x55, 0xc2, 0x36, 0x31, 0x00, 0x00, 0x00, 0x73, 0x65, 0x71, 0x75, + 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x2f, 0x63, 0x6f, 0x6e, 0x76, 0x32, + 0x64, 0x2f, 0x42, 0x69, 0x61, 0x73, 0x41, 0x64, 0x64, 0x2f, 0x52, 0x65, + 0x61, 0x64, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x4f, 0x70, + 0x2f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x00, 0x00, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x1c, 0x00, + 0x08, 0x00, 0x07, 0x00, 0x0c, 0x00, 0x10, 0x00, 0x14, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x18, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09, + 0x88, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x68, 0x00, 0x00, 0x00, + 0x28, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, + 0xff, 0xff, 0xff, 0xff, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x14, 0x00, 0x04, 0x00, 0x08, 0x00, + 0x0c, 0x00, 0x10, 0x00, 0x0c, 0x00, 0x00, 0x00, 0x30, 0x00, 0x00, 0x00, + 0x24, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x80, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0xf0, 0x77, 0x80, 0x3b, + 0x01, 0x00, 0x00, 0x00, 0xf0, 0xee, 0x7f, 0x3f, 0x01, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x11, 0x00, 0x00, 0x00, 0x63, 0x6f, 0x6e, 0x76, + 0x32, 0x64, 0x5f, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x5f, 0x69, 0x6e, 0x74, + 0x38, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x06, 0x00, 0x00, 0x00, 0x70, 0x00, 0x00, 0x00, 0x54, 0x00, 0x00, 0x00, + 0x40, 0x00, 0x00, 0x00, 0x28, 0x00, 0x00, 0x00, 0x1c, 0x00, 0x00, 0x00, + 0x04, 0x00, 0x00, 0x00, 0xca, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x06, + 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x08, 0x00, 0x07, 0x00, + 0x06, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x72, 0xe6, 0xff, 0xff, 0xff, + 0x00, 0x00, 0x00, 0x09, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, + 0x06, 0x00, 0x05, 0x00, 0x06, 0x00, 0x00, 0x00, 0x00, 0x16, 0x0a, 0x00, + 0x0e, 0x00, 0x07, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0a, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x11, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0a, 0x00, + 0x0c, 0x00, 0x07, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0a, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x03, 0x03, 0x00, 0x00, 0x00}; + +const unsigned int kTestConvModelDataSize = 21344; diff --git a/tensorflow/lite/micro/testing/test_conv_model.h b/tensorflow/lite/micro/testing/test_conv_model.h new file mode 100644 index 0000000..2103196 --- /dev/null +++ b/tensorflow/lite/micro/testing/test_conv_model.h @@ -0,0 +1,23 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_TESTING_TEST_CONV_MODEL_H_ +#define TENSORFLOW_LITE_MICRO_TESTING_TEST_CONV_MODEL_H_ + +// See generate_test_models.py for updating the contents of this model: +extern const unsigned char kTestConvModelData[]; +extern const unsigned int kTestConvModelDataSize; + +#endif // TENSORFLOW_LITE_MICRO_TESTING_TEST_CONV_MODEL_H_ diff --git a/tensorflow/lite/micro/testing/test_hexagon_binary.sh b/tensorflow/lite/micro/testing/test_hexagon_binary.sh new file mode 100755 index 0000000..a245743 --- /dev/null +++ b/tensorflow/lite/micro/testing/test_hexagon_binary.sh @@ -0,0 +1,40 @@ +#!/bin/bash -e +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# +# Tests a Qualcomm Hexagon binary by parsing the log output. +# +# First argument is the binary location. +# Second argument is a regular expression that's required to be in the output +# logs for the test to pass. + +declare -r TEST_TMPDIR=/tmp/test_hexagon_binary/ +declare -r MICRO_LOG_PATH=${TEST_TMPDIR}/$1 +declare -r MICRO_LOG_FILENAME=${MICRO_LOG_PATH}/logs.txt +mkdir -p ${MICRO_LOG_PATH} + +hexagon-sim $1 2>&1 | tee ${MICRO_LOG_FILENAME} + +if [[ ${2} != "non_test_binary" ]] +then + if grep -q "$2" ${MICRO_LOG_FILENAME} + then + echo "$1: PASS" + exit 0 + else + echo "$1: FAIL - '$2' not found in logs." + exit 1 + fi +fi diff --git a/tensorflow/lite/micro/testing/test_with_arc_mdb.sh b/tensorflow/lite/micro/testing/test_with_arc_mdb.sh new file mode 100644 index 0000000..15b6b83 --- /dev/null +++ b/tensorflow/lite/micro/testing/test_with_arc_mdb.sh @@ -0,0 +1,39 @@ +#!/bin/bash -e +# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# +# +# Parameters: +# ${1} - test binary +# ${2} - tcf file location. +# ${3} - string that is checked for pass/fail. + +set -e + +TEST_BINARY=${1} +TCF_FILE=${2} +PASS_STRING=${3} + +# Running test using MDB. If "non_test_binary" is passed as PASS_STRING, skip check. Otherwise, check if test passed. +mdb -run -tcf=${TCF_FILE} ${TEST_BINARY} 2>&1 | tee /dev/stderr | grep "${PASS_STRING}" &>/dev/null || [[ "${PASS_STRING}" == "non_test_binary" ]] + +if [ $? == 0 ]; then + exit 0 +else + exit 1 +fi + +set +e + diff --git a/tensorflow/lite/micro/testing/test_with_arm_corstone_300.sh b/tensorflow/lite/micro/testing/test_with_arm_corstone_300.sh new file mode 100755 index 0000000..9b39ee4 --- /dev/null +++ b/tensorflow/lite/micro/testing/test_with_arm_corstone_300.sh @@ -0,0 +1,52 @@ +#!/bin/bash -e +# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# +# +# Parameters: +# ${1} - path to a binary to test or directory (all *_test will be run). +# ${2} - String that is checked for pass/fail. +# ${3} - target (e.g. cortex_m_generic.) + +set -e + +BINARY_TO_TEST=${1} +PASS_STRING=${2} +TARGET=${3} + +RESULTS_DIRECTORY=/tmp/${TARGET}_logs +MICRO_LOG_FILENAME=${RESULTS_DIRECTORY}/logs.txt +mkdir -p ${RESULTS_DIRECTORY} + +FVP="FVP_Corstone_SSE-300_Ethos-U55 " +FVP+="-C ethosu.num_macs=256 " +FVP+="-C mps3_board.visualisation.disable-visualisation=1 " +FVP+="-C mps3_board.telnetterminal0.start_telnet=0 " +FVP+='-C mps3_board.uart0.out_file="-" ' +FVP+='-C mps3_board.uart0.unbuffered_output=1 ' +FVP+='-C mps3_board.uart0.shutdown_on_eot=1' +${FVP} ${BINARY_TO_TEST} | tee ${MICRO_LOG_FILENAME} + +if [[ ${2} != "non_test_binary" ]] +then + if grep -q "$PASS_STRING" ${MICRO_LOG_FILENAME} + then + echo "$BINARY_TO_TEST: PASS" + exit 0 + else + echo "$BINARY_TO_TEST: FAIL - '$PASS_STRING' not found in logs." + exit 1 + fi +fi diff --git a/tensorflow/lite/micro/testing/test_with_qemu.sh b/tensorflow/lite/micro/testing/test_with_qemu.sh new file mode 100755 index 0000000..84947a2 --- /dev/null +++ b/tensorflow/lite/micro/testing/test_with_qemu.sh @@ -0,0 +1,44 @@ +#!/bin/bash -e +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# +# Tests a binary with QEMU by parsing the log output. +# Parameters: +# ${1} suffix for qemu binary (e.g. to use qemu-arm ${1} should be arm +# ${2} architecture to pass to qemu (e.g. cortex-m3) +# ${3} cross-compiled binary to be emulated +# ${4} - String that is checked for pass/fail. +# ${5} - target (cortex_m_qemu etc.) + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +TFLM_ROOT_DIR=${SCRIPT_DIR}/../../../../ + +TEST_TMPDIR=/tmp/test_${5} +MICRO_LOG_PATH=${TEST_TMPDIR}/${3} +MICRO_LOG_FILENAME=${MICRO_LOG_PATH}/logs.txt + +mkdir -p ${MICRO_LOG_PATH} +qemu-${1} -cpu ${2} ${3} 2>&1 | tee ${MICRO_LOG_FILENAME} +if [[ ${4} != "non_test_binary" ]] +then + if grep -q "${4}" ${MICRO_LOG_FILENAME} + then + echo "Pass" + exit 0 + else + echo "Fail" + exit 1 + fi +fi diff --git a/tensorflow/lite/micro/testing/test_with_renode.sh b/tensorflow/lite/micro/testing/test_with_renode.sh new file mode 100755 index 0000000..9e87f05 --- /dev/null +++ b/tensorflow/lite/micro/testing/test_with_renode.sh @@ -0,0 +1,112 @@ +#!/bin/bash -e +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# +# +# Parameters: +# ${1} - space-separated list of binaries to test +# ${2} - String that is checked for pass/fail. +# ${3} - target (bluepill etc.) + +set -e + +FILES="${1}" +PASS_STRING=${2} +TARGET=${3} + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +TFLM_ROOT_DIR=${SCRIPT_DIR}/.. + +# The renode script for the board being emulated. +RESC_PATH=${TFLM_ROOT_DIR}/testing/${TARGET}.resc + +# Robot file with definition of custom keywords used in test suite. +ROBOT_RESOURCE=${TFLM_ROOT_DIR}/testing/robot.resource.txt + +# Robot file with definitions of target-specific variables +TARGET_RESOURCE=${TFLM_ROOT_DIR}/testing/${TARGET}.resource.txt + +# Renode's entrypoint for using the Robot Framework. +RENODE_TEST_SCRIPT=${TFLM_ROOT_DIR}/tools/make/downloads/renode/renode-test + +if [ ! -f "${RENODE_TEST_SCRIPT}" ]; then + echo "The renode test script: ${RENODE_TEST_SCRIPT} does not exist. Please " \ + "make sure that you have correctly installed Renode for TFLM. See " \ + "tensorflow/lite/micro/docs/renode.md for more details." + exit 1 +fi + +if ! ${RENODE_TEST_SCRIPT} &> /dev/null +then + echo "The following command failed: ${RENODE_TEST_SCRIPT}. Please " \ + "make sure that you have correctly installed Renode for TFLM. See " \ + "tensorflow/lite/micro/docs/renode.md for more details." + exit 1 +fi + +# Files generated by this script will go in the RESULTS_DIRECTORY. These include: +# 1. UART_LOG: Output log from the renode uart. +# 2. html and xml files generated by the Robot Framework. +# 3. ROBOT_SCRIPT: Generated test suite. +# +# Note that with the current approach (in generated ROBOT_SCRIPT), multiple test +# binaries are run in a the same test suite and UART_LOG only has logs from the last test +# binary since it is deleted prior to running each test binary. If some test fails +# the UART_LOG will be printed to console log before being deleted. +RESULTS_DIRECTORY=/tmp/renode_${TARGET}_logs +mkdir -p ${RESULTS_DIRECTORY} + +UART_LOG=${RESULTS_DIRECTORY}/uart_log.txt + +ROBOT_SCRIPT=${RESULTS_DIRECTORY}/${TARGET}.robot + +cat > $ROBOT_SCRIPT <> ${ROBOT_SCRIPT} <&1 | tee ${MICRO_LOG_FILENAME} + +if [[ ${2} != "non_test_binary" ]] +then + if grep -q "$2" ${MICRO_LOG_FILENAME} + then + exit 0 + else + exit 1 + fi +fi + diff --git a/tensorflow/lite/micro/testing/util_test.cc b/tensorflow/lite/micro/testing/util_test.cc new file mode 100644 index 0000000..f359bea --- /dev/null +++ b/tensorflow/lite/micro/testing/util_test.cc @@ -0,0 +1,50 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/testing/micro_test.h" + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(ArgumentsExecutedOnlyOnce) { + float count = 0.; + // Make sure either argument is executed once after macro expansion. + TF_LITE_MICRO_EXPECT_NEAR(0, count++, 0.1f); + TF_LITE_MICRO_EXPECT_NEAR(1, count++, 0.1f); + TF_LITE_MICRO_EXPECT_NEAR(count++, 2, 0.1f); + TF_LITE_MICRO_EXPECT_NEAR(count++, 3, 0.1f); +} + +TF_LITE_MICRO_TEST(TestExpectEQ) { + // test TF_LITE_EXPECT_EQ for expected behavior + double a = 2.1; + TF_LITE_MICRO_EXPECT_EQ(0, 0); + TF_LITE_MICRO_EXPECT_EQ(true, true); + TF_LITE_MICRO_EXPECT_EQ(false, false); + TF_LITE_MICRO_EXPECT_EQ(2.1, a); + TF_LITE_MICRO_EXPECT_EQ(1.0, true); + TF_LITE_MICRO_EXPECT_EQ(1.0, 1); +} + +TF_LITE_MICRO_TEST(TestExpectNE) { + // test TF_LITE_EXPECT_NE for expected behavior + float b = 2.1f; + double a = 2.1; + TF_LITE_MICRO_EXPECT_NE(0, 1); + TF_LITE_MICRO_EXPECT_NE(true, false); + TF_LITE_MICRO_EXPECT_NE(2.10005f, b); + TF_LITE_MICRO_EXPECT_NE(2.2, a); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/testing_helpers_test.cc b/tensorflow/lite/micro/testing_helpers_test.cc new file mode 100644 index 0000000..95b2bcd --- /dev/null +++ b/tensorflow/lite/micro/testing_helpers_test.cc @@ -0,0 +1,132 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(CreateQuantizedBiasTensor) { + float input_scale = 0.5; + float weight_scale = 0.5; + constexpr int tensor_size = 12; + int dims_arr[] = {4, 2, 3, 2, 1}; + int32_t quantized[tensor_size]; + float pre_quantized[] = {-10, -5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 10}; + int32_t expected_quantized_values[] = {-40, -20, -16, -12, -8, -4, + 0, 4, 8, 12, 16, 40}; + TfLiteIntArray* dims = tflite::testing::IntArrayFromInts(dims_arr); + + TfLiteTensor result = tflite::testing::CreateQuantizedBiasTensor( + pre_quantized, quantized, dims, input_scale, weight_scale); + + TF_LITE_MICRO_EXPECT_EQ(result.bytes, tensor_size * sizeof(int32_t)); + TF_LITE_MICRO_EXPECT(result.dims == dims); + TF_LITE_MICRO_EXPECT_EQ(result.params.scale, input_scale * weight_scale); + for (int i = 0; i < tensor_size; i++) { + TF_LITE_MICRO_EXPECT_EQ(expected_quantized_values[i], result.data.i32[i]); + } +} + +TF_LITE_MICRO_TEST(PackInt4Basic) { + int8_t input[4] = {7, 3, 2, 5}; + int input_size = 4; + const int8_t expect_output[2] = {0x37, 0x52}; + int output_size = 2; + + tflite::testing::PackInt4ValuesDenselyInPlace( + reinterpret_cast(input), input_size); + for (int i = 0; i < output_size; i++) { + TF_LITE_MICRO_EXPECT_EQ(expect_output[i], input[i]); + } +} + +TF_LITE_MICRO_TEST(PackInt4BasicOddLength) { + int8_t input[4] = {1, 3, 2}; + const int8_t expect_output[2] = {0x31, 0x2}; + int output_size = 2; + int input_size = 3; + + tflite::testing::PackInt4ValuesDenselyInPlace( + reinterpret_cast(input), input_size); + for (int i = 0; i < output_size; i++) { + TF_LITE_MICRO_EXPECT_EQ(expect_output[i], input[i]); + } +} + +TF_LITE_MICRO_TEST(CreatePerChannelQuantizedBiasTensor) { + float input_scale = 0.5; + float weight_scales[] = {0.5, 1, 2, 4}; + constexpr int tensor_size = 12; + const int channels = 4; + int dims_arr[] = {4, 4, 3, 1, 1}; + int32_t quantized[tensor_size]; + float scales[channels + 1]; + int zero_points[] = {4, 0, 0, 0, 0}; + float pre_quantized[] = {-10, -5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 10}; + int32_t expected_quantized_values[] = {-40, -20, -16, -6, -4, -2, + 0, 1, 2, 2, 2, 5}; + TfLiteIntArray* dims = tflite::testing::IntArrayFromInts(dims_arr); + + TfLiteAffineQuantization quant; + TfLiteTensor result = tflite::testing::CreatePerChannelQuantizedBiasTensor( + pre_quantized, quantized, dims, input_scale, weight_scales, scales, + zero_points, &quant, 0); + + // Values in scales array start at index 1 since index 0 is dedicated to + // tracking the tensor size. + for (int i = 0; i < channels; i++) { + TF_LITE_MICRO_EXPECT_EQ(scales[i + 1], input_scale * weight_scales[i]); + } + + TF_LITE_MICRO_EXPECT_EQ(result.bytes, tensor_size * sizeof(int32_t)); + TF_LITE_MICRO_EXPECT(result.dims == dims); + for (int i = 0; i < tensor_size; i++) { + TF_LITE_MICRO_EXPECT_EQ(expected_quantized_values[i], result.data.i32[i]); + } +} + +TF_LITE_MICRO_TEST(CreateSymmetricPerChannelQuantizedTensor) { + const int tensor_size = 12; + constexpr int channels = 2; + int dims_arr[] = {4, channels, 3, 2, 1}; + int8_t quantized[12]; + const float pre_quantized[] = {-127, -55, -4, -3, -2, -1, + 0, 1, 2, 3, 4, 63.5}; + const int8_t expected_quantized_values[] = {-127, -55, -4, -3, -2, -1, + 0, 2, 4, 6, 8, 127}; + float expected_scales[] = {1.0, 0.5}; + TfLiteIntArray* dims = tflite::testing::IntArrayFromInts(dims_arr); + + int zero_points[channels + 1]; + float scales[channels + 1]; + TfLiteAffineQuantization quant; + TfLiteTensor result = + tflite::testing::CreateSymmetricPerChannelQuantizedTensor( + pre_quantized, quantized, dims, scales, zero_points, &quant, 0); + + TF_LITE_MICRO_EXPECT_EQ(result.bytes, tensor_size * sizeof(int8_t)); + TF_LITE_MICRO_EXPECT(result.dims == dims); + TfLiteFloatArray* result_scales = + static_cast(result.quantization.params)->scale; + for (int i = 0; i < channels; i++) { + TF_LITE_MICRO_EXPECT_EQ(result_scales->data[i], expected_scales[i]); + } + for (int i = 0; i < tensor_size; i++) { + TF_LITE_MICRO_EXPECT_EQ(expected_quantized_values[i], result.data.int8[i]); + } +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/tflite_bridge/BUILD b/tensorflow/lite/micro/tflite_bridge/BUILD new file mode 100644 index 0000000..518015a --- /dev/null +++ b/tensorflow/lite/micro/tflite_bridge/BUILD @@ -0,0 +1,49 @@ +load( + "//tensorflow/lite/micro:build_def.bzl", + "micro_copts", +) + +package( + # Disabling layering_check because of http://b/177257332 + features = ["-layering_check"], + licenses = ["notice"], +) + +cc_library( + name = "flatbuffer_conversions_bridge", + srcs = [ + "flatbuffer_conversions_bridge.cc", + ], + hdrs = [ + "flatbuffer_conversions_bridge.h", + ], + copts = micro_copts(), + visibility = [ + "//tensorflow/lite/micro:__pkg__", + ], + deps = [ + ":micro_error_reporter", + "//tensorflow/lite/c:common", + "//tensorflow/lite/core/api", + "//tensorflow/lite/schema:schema_fbs", + ], +) + +cc_library( + name = "micro_error_reporter", + srcs = [ + "micro_error_reporter.cc", + ], + hdrs = [ + "micro_error_reporter.h", + ], + copts = micro_copts(), + visibility = [ + "//tensorflow/lite/micro/tflite_bridge:__pkg__", + ], + deps = [ + "//tensorflow/lite/core/api:error_reporter", + "//tensorflow/lite/micro:micro_compatibility", + "//tensorflow/lite/micro:micro_log", + ], +) diff --git a/tensorflow/lite/micro/tflite_bridge/flatbuffer_conversions_bridge.cc b/tensorflow/lite/micro/tflite_bridge/flatbuffer_conversions_bridge.cc new file mode 100644 index 0000000..20e4ae4 --- /dev/null +++ b/tensorflow/lite/micro/tflite_bridge/flatbuffer_conversions_bridge.cc @@ -0,0 +1,34 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/micro/tflite_bridge/flatbuffer_conversions_bridge.h" + +#include "tensorflow/lite/c/c_api_types.h" +#include "tensorflow/lite/core/api/error_reporter.h" +#include "tensorflow/lite/core/api/flatbuffer_conversions.h" +#include "tensorflow/lite/micro/tflite_bridge/micro_error_reporter.h" +#include "tensorflow/lite/schema/schema_generated.h" + +namespace tflite { +TfLiteStatus ConvertTensorType(TensorType tensor_type, TfLiteType* type) { + return ConvertTensorType(tensor_type, type, tflite::GetMicroErrorReporter()); +} + +TfLiteStatus CallBuiltinParseFunction(TfLiteBridgeBuiltinParseFunction parser, + const Operator* op, + BuiltinDataAllocator* allocator, + void** builtin_data) { + return parser(op, tflite::GetMicroErrorReporter(), allocator, builtin_data); +} +} // namespace tflite diff --git a/tensorflow/lite/micro/tflite_bridge/flatbuffer_conversions_bridge.h b/tensorflow/lite/micro/tflite_bridge/flatbuffer_conversions_bridge.h new file mode 100644 index 0000000..2c0f369 --- /dev/null +++ b/tensorflow/lite/micro/tflite_bridge/flatbuffer_conversions_bridge.h @@ -0,0 +1,45 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_MICRO_TFLITE_BRIDGE_FLATBUFFER_CONVERSIONS_BRIDGE_H_ +#define TENSORFLOW_LITE_MICRO_TFLITE_BRIDGE_FLATBUFFER_CONVERSIONS_BRIDGE_H_ + +#include "tensorflow/lite/c/c_api_types.h" +#include "tensorflow/lite/core/api/flatbuffer_conversions.h" +#include "tensorflow/lite/schema/schema_generated.h" + +namespace tflite { + +// Forward declaration of the ErrorReporter class to hide it from the TFLM code. +class ErrorReporter; + +using TfLiteBridgeBuiltinDataAllocator = BuiltinDataAllocator; + +using TfLiteBridgeBuiltinParseFunction = + TfLiteStatus (*)(const Operator* op, ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, void** builtin_data); + +// Converts the tensor data type used in the flatbuffer to the representation +// used by the runtime. +TfLiteStatus ConvertTensorType(TensorType tensor_type, TfLiteType* type); + +// CallBuiltinParseFunction is a wrapper function to wrap the parser function +// calls to Call parser(op, allocator, builtin_data) +TfLiteStatus CallBuiltinParseFunction(TfLiteBridgeBuiltinParseFunction parser, + const Operator* op, + BuiltinDataAllocator* allocator, + void** builtin_data); +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_TFLITE_BRIDGE_FLATBUFFER_CONVERSIONS_BRIDGE_H_ diff --git a/tensorflow/lite/micro/tflite_bridge/micro_error_reporter.cc b/tensorflow/lite/micro/tflite_bridge/micro_error_reporter.cc new file mode 100644 index 0000000..63cc42e --- /dev/null +++ b/tensorflow/lite/micro/tflite_bridge/micro_error_reporter.cc @@ -0,0 +1,43 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/tflite_bridge/micro_error_reporter.h" + +#include +#include +#include + +#include "tensorflow/lite/micro/micro_log.h" + +namespace { +uint8_t micro_error_reporter_buffer[sizeof(tflite::MicroErrorReporter)]; +tflite::MicroErrorReporter* error_reporter_ = nullptr; + +} // namespace + +namespace tflite { +ErrorReporter* GetMicroErrorReporter() { + if (error_reporter_ == nullptr) { + error_reporter_ = new (micro_error_reporter_buffer) MicroErrorReporter(); + } + return error_reporter_; +} + +int MicroErrorReporter::Report(const char* format, va_list args) { + Log(format, args); + return 0; +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/tflite_bridge/micro_error_reporter.h b/tensorflow/lite/micro/tflite_bridge/micro_error_reporter.h new file mode 100644 index 0000000..d3702f4 --- /dev/null +++ b/tensorflow/lite/micro/tflite_bridge/micro_error_reporter.h @@ -0,0 +1,37 @@ +/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_MICRO_TFLITE_BRIDGE_MICRO_ERROR_REPORTER_H_ +#define TENSORFLOW_LITE_MICRO_TFLITE_BRIDGE_MICRO_ERROR_REPORTER_H_ + +#include + +#include "tensorflow/lite/core/api/error_reporter.h" +#include "tensorflow/lite/micro/compatibility.h" + +namespace tflite { +// Get a pointer to a singleton global error reporter. +ErrorReporter* GetMicroErrorReporter(); +class MicroErrorReporter : public ErrorReporter { + public: + ~MicroErrorReporter() override {} + int Report(const char* format, va_list args) override; + + private: + TF_LITE_REMOVE_VIRTUAL_DELETE +}; + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_TFLITE_BRIDGE_MICRO_ERROR_REPORTER_H_ diff --git a/tensorflow/lite/micro/tools/BUILD b/tensorflow/lite/micro/tools/BUILD new file mode 100644 index 0000000..1051241 --- /dev/null +++ b/tensorflow/lite/micro/tools/BUILD @@ -0,0 +1,174 @@ +load("@tflm_pip_deps//:requirements.bzl", "requirement") +load("@pybind11_bazel//:build_defs.bzl", "pybind_extension") +load("//tensorflow:extra_rules.bzl", "tflm_application_friends") + +package( + default_visibility = ["//:__subpackages__"], + licenses = ["notice"], +) + +package_group( + name = "tflm_tools", + packages = ["//tensorflow/lite/micro/tools/..."], +) + +package_group( + name = "application_friends", + packages = tflm_application_friends(), +) + +py_library( + name = "generate_cc_arrays_lib", + srcs = ["generate_cc_arrays.py"], + deps = [ + requirement("numpy"), + requirement("pillow"), + ], +) + +py_library( + name = "generate_test_for_model", + srcs = ["generate_test_for_model.py"], + srcs_version = "PY3", + deps = [ + "//tensorflow/lite/python:schema_py", + ], +) + +py_binary( + name = "generate_cc_arrays", + srcs = ["generate_cc_arrays.py"], + deps = [ + requirement("numpy"), + requirement("pillow"), + ], +) + +py_binary( + name = "requantize_flatbuffer", + srcs = [ + "requantize_flatbuffer.py", + "requantize_flatbuffer_utils.py", + ], + srcs_version = "PY3", + deps = [ + "//tensorflow/lite/python:schema_py", + "//tensorflow/lite/tools:flatbuffer_utils", + "@absl_py//absl:app", + ], +) + +py_test( + name = "requantize_flatbuffer_test", + srcs = ["requantize_flatbuffer_test.py"], + main = "requantize_flatbuffer_test.py", + python_version = "PY3", + tags = [ + "noasan", + "nomsan", # Python doesn't like these symbols + "noubsan", + ], + deps = [ + ":requantize_flatbuffer", + "//python/tflite_micro:runtime", + requirement("numpy"), + requirement("tensorflow-cpu"), + ], +) + +pybind_extension( + name = "tflite_flatbuffer_align_wrapper", # :tflite_flatbuffer_align_wrapper.so + srcs = [ + "tflite_flatbuffer_align_wrapper.cc", + ], + deps = [ + "//tensorflow/lite/schema:schema_fbs", + "@flatbuffers", + ], +) + +py_binary( + name = "tflite_flatbuffer_align", + srcs = [ + "tflite_flatbuffer_align.py", + ], + data = [ + ":tflite_flatbuffer_align_wrapper.so", + ], + deps = [ + "@absl_py//absl:app", + ], +) + +py_library( + name = "model_transforms_utils", + srcs = ["model_transforms_utils.py"], + data = [ + ":tflite_flatbuffer_align_wrapper.so", + ], + srcs_version = "PY3", + visibility = [ + ":application_friends", + ":tflm_tools", + ], + deps = [ + "//tensorflow/lite/python:schema_py", + "//tensorflow/lite/python:schema_util", + ], +) + +py_library( + name = "tflm_model_transforms_lib", + srcs = ["tflm_model_transforms_lib.py"], + data = [ + ":tflite_flatbuffer_align", + ], + srcs_version = "PY3", + visibility = [ + ":application_friends", + ":tflm_tools", + "//:__subpackages__", + ], + deps = [ + ":model_transforms_utils", + "//tensorflow/lite/micro/python/interpreter/src:runtime", + "//tensorflow/lite/tools:flatbuffer_utils", + "@absl_py//absl/logging", + requirement("numpy"), + requirement("tensorflow-cpu"), + ], +) + +py_binary( + name = "tflm_model_transforms", + srcs = ["tflm_model_transforms.py"], + python_version = "PY3", + srcs_version = "PY3", + deps = [ + ":tflm_model_transforms_lib", + "@absl_py//absl:app", + "@absl_py//absl/flags", + "@absl_py//absl/logging", + ], +) + +py_test( + name = "tflm_model_transforms_test", + srcs = ["tflm_model_transforms_test.py"], + data = [ + "//tensorflow/lite/micro/models", + ], + main = "tflm_model_transforms_test.py", + python_version = "PY3", + tags = [ + "noasan", + "nomsan", + "noubsan", + ], + deps = [ + ":tflm_model_transforms_lib", + "//tensorflow/lite/micro/examples/recipes:resource_variables_lib", + "@absl_py//absl/testing:parameterized", + requirement("tensorflow-cpu"), + ], +) diff --git a/tensorflow/lite/micro/tools/ci_build/binary_size_history/binary_size.json b/tensorflow/lite/micro/tools/ci_build/binary_size_history/binary_size.json new file mode 100644 index 0000000..7485f93 --- /dev/null +++ b/tensorflow/lite/micro/tools/ci_build/binary_size_history/binary_size.json @@ -0,0 +1 @@ +{"text": "64680", "data": "41304", "bss": "24888", "dec": "130872"} diff --git a/tensorflow/lite/micro/tools/ci_build/binary_size_test/Makefile.inc b/tensorflow/lite/micro/tools/ci_build/binary_size_test/Makefile.inc new file mode 100644 index 0000000..c412d8d --- /dev/null +++ b/tensorflow/lite/micro/tools/ci_build/binary_size_test/Makefile.inc @@ -0,0 +1,19 @@ +# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + + +# This is used to test binary size workflow. +$(eval $(call microlite_test,binary_size_test,\ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/tools/ci_build/binary_size_test/binary_size_test.cc)) diff --git a/tensorflow/lite/micro/tools/ci_build/binary_size_test/binary_size_test.cc b/tensorflow/lite/micro/tools/ci_build/binary_size_test/binary_size_test.cc new file mode 100644 index 0000000..fc65cc9 --- /dev/null +++ b/tensorflow/lite/micro/tools/ci_build/binary_size_test/binary_size_test.cc @@ -0,0 +1,39 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace { +// Change this number to have a binary with a different +// size of data section. +constexpr int kSize = 64; +// Initialize this global array so that it goes to data section, not bss. +long random_array[kSize] = {1, 2, 3, 4, 5, 6, 7, 8}; +} // namespace + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(BinarySizeChangeBykSize) { + // Just some code to create a binary and keep data section. + for (int i = 0; i < kSize; i++) { + random_array[i] = i + 1; + } + + for (int i = 0; i < kSize; i++) { + TF_LITE_MICRO_EXPECT_EQ(random_array[i], i + 1); + } +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/tools/ci_build/helper_functions.sh b/tensorflow/lite/micro/tools/ci_build/helper_functions.sh new file mode 100644 index 0000000..9c25736 --- /dev/null +++ b/tensorflow/lite/micro/tools/ci_build/helper_functions.sh @@ -0,0 +1,42 @@ +#!/usr/bin/env bash +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + + +# Collection of helper functions that can be used in the different continuous +# integration scripts. + +# A small utility to run the command and only print logs if the command fails. +# On success, all logs are hidden. This helps to keep the log output clean and +# makes debugging easier. +function readable_run { + "$@" 2>&1 + echo "Command completed successfully at $(date)" +} + +# Check if the regex ${1} is to be found in the pathspec ${2}. +# An optional error messsage can be passed with ${3} +function check_contents() { + GREP_OUTPUT=$(git grep -E -rn ${1} -- ${2}) + + if [ "${GREP_OUTPUT}" ]; then + echo "==============================================" + echo "Found matches for ${1} that are not permitted." + echo "${3}" + echo "==============================================" + echo "${GREP_OUTPUT}" + return 1 + fi +} diff --git a/tensorflow/lite/micro/tools/ci_build/size_comp.py b/tensorflow/lite/micro/tools/ci_build/size_comp.py new file mode 100755 index 0000000..def4210 --- /dev/null +++ b/tensorflow/lite/micro/tools/ci_build/size_comp.py @@ -0,0 +1,130 @@ +#!/usr/bin/env python3 +# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +import argparse +import json +import sys + + +def berkeley_size_format_to_dict(berkeley_size_format): + lines = berkeley_size_format.split('\n') + labels = lines[0].split() + values = lines[1].split() + outdict = {labels[i]: values[i] for i in range(len(labels) - 2)} + return (outdict) + + +def json_to_dict(some_json): + outdict = json.loads(some_json) + return (outdict) + + +def file_to_dict(a_file): + with open(a_file) as the_file: + contents = the_file.read() + if contents[0] == "{": + retdict = json_to_dict(contents) + else: + retdict = berkeley_size_format_to_dict(contents) + + return (retdict) + + +def compare_val_in_files(old_file, new_file, val='bss'): + old_dict = file_to_dict(old_file) + new_dict = file_to_dict(new_file) + + if int(new_dict[val]) > int(old_dict[val]): + print(val, " larger than previous value") + print("old: ", old_dict[val]) + print("new: ", new_dict[val]) + print("=====Check failed=====") + sys.exit(1) + + print(val) + print("old: ", old_dict[val]) + print("new: ", new_dict[val]) + print("Check Passed") + + return () + + +def compare_all_val_in_files(old_file, new_file, error_on_mem_increase): + old_dict = file_to_dict(old_file) + new_dict = file_to_dict(new_file) + any_mem_increase = False + for section, val in old_dict.items(): + if int(new_dict[section]) > int(old_dict[section]): + print(section, " larger than previous value") + print("old: ", old_dict[section]) + print("new: ", new_dict[section]) + any_mem_increase = True + else: + print(section) + print("old: ", old_dict[section]) + print("new: ", new_dict[section]) + + if any_mem_increase: + print("Warning: memory footprint increases!") + if error_on_mem_increase: + print("Error on memory footprint increase!!") + sys.exit(1) + + return () + + +def berkeley_size_format_to_json_file(input_file, output_file): + output_dict = file_to_dict(input_file) + with open(output_file, 'w') as outfile: + json.dump(output_dict, outfile) + + return () + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument( + "-t", + "--transform", + help="transform a berkeley size format file to a json file", + nargs=2) + parser.add_argument("-c", + "--compare", + help="compare value in old file to new file", + nargs=2) + parser.add_argument("-v", + "--value", + default="bss", + help="value to be compared") + parser.add_argument("-a", + "--compare_all", + help="compare all value in old file to new file", + nargs=2) + parser.add_argument("-e", + "--error_on_mem_increase", + default=False, + action="store_true", + help="error exit on memory footprint increase") + args = parser.parse_args() + + if args.transform: + berkeley_size_format_to_json_file(args.transform[0], args.transform[1]) + + if args.compare: + compare_val_in_files(args.compare[0], args.compare[1], args.value) + + if args.compare_all: + compare_all_val_in_files(args.compare_all[0], args.compare_all[1], + args.error_on_mem_increase) diff --git a/tensorflow/lite/micro/tools/ci_build/test_arc.sh b/tensorflow/lite/micro/tools/ci_build/test_arc.sh new file mode 100644 index 0000000..5738e46 --- /dev/null +++ b/tensorflow/lite/micro/tools/ci_build/test_arc.sh @@ -0,0 +1,44 @@ +#!/usr/bin/env bash +# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# +# Tests the microcontroller code using ARC platform. +# These tests require a MetaWare C/C++ Compiler. + +set -e + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +ROOT_DIR=${SCRIPT_DIR}/../../../../.. +cd "${ROOT_DIR}" + +source tensorflow/lite/micro/tools/ci_build/helper_functions.sh + +readable_run make -f tensorflow/lite/micro/tools/make/Makefile clean + +TARGET_ARCH=arc +TARGET=arc_custom +OPTIMIZED_KERNEL_DIR=arc_mli + +readable_run make -f tensorflow/lite/micro/tools/make/Makefile \ + TARGET=${TARGET} \ + TARGET_ARCH=${TARGET_ARCH} \ + OPTIMIZED_KERNEL_DIR=${OPTIMIZED_KERNEL_DIR} \ + build -j$(nproc) + +readable_run make -f tensorflow/lite/micro/tools/make/Makefile \ + TARGET=${TARGET} \ + TARGET_ARCH=${TARGET_ARCH} \ + OPTIMIZED_KERNEL_DIR=${OPTIMIZED_KERNEL_DIR} \ + test -j$(nproc) diff --git a/tensorflow/lite/micro/tools/ci_build/test_bazel.sh b/tensorflow/lite/micro/tools/ci_build/test_bazel.sh new file mode 100755 index 0000000..b76ba6e --- /dev/null +++ b/tensorflow/lite/micro/tools/ci_build/test_bazel.sh @@ -0,0 +1,37 @@ +#!/usr/bin/env bash +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +set -e + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +ROOT_DIR=${SCRIPT_DIR}/../../../../.. +cd "${ROOT_DIR}" + +source tensorflow/lite/micro/tools/ci_build/helper_functions.sh + +# We are using a bazel build followed by bazel test to make sure that the CI +# covers non-test binary targets as well. These were previousbly covered by +# having build_test but that was removed with #194. + +CC=clang readable_run bazel build ... \ + --build_tag_filters=-no_oss +CC=clang readable_run bazel test ... \ + --test_tag_filters=-no_oss --build_tag_filters=-no_oss \ + --test_output=errors + +# TODO(b/178621680): enable ubsan once bazel + clang + ubsan errors are fixed. +#CC=clang readable_run bazel test tensorflow/lite/micro/... --config=ubsan --test_tag_filters=-no_oss,-noubsan --build_tag_filters=-no_oss,-noubsan + diff --git a/tensorflow/lite/micro/tools/ci_build/test_bazel_asan.sh b/tensorflow/lite/micro/tools/ci_build/test_bazel_asan.sh new file mode 100755 index 0000000..9e025f5 --- /dev/null +++ b/tensorflow/lite/micro/tools/ci_build/test_bazel_asan.sh @@ -0,0 +1,34 @@ +#!/usr/bin/env bash +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +set -e + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +ROOT_DIR=${SCRIPT_DIR}/../../../../.. +cd "${ROOT_DIR}" + +source tensorflow/lite/micro/tools/ci_build/helper_functions.sh + +# We are using a bazel build followed by bazel test to make sure that the CI +# covers non-test binary targets as well. These were previousbly covered by +# having build_test but that was removed with #194. + +CC=clang readable_run bazel build tensorflow/lite/micro/... \ + --config=asan --build_tag_filters=-no_oss,-noasan +CC=clang readable_run bazel test tensorflow/lite/micro/... \ + --config=asan \ + --test_tag_filters=-no_oss,-noasan --build_tag_filters=-no_oss,-noasan \ + --test_output=errors diff --git a/tensorflow/lite/micro/tools/ci_build/test_bazel_msan.sh b/tensorflow/lite/micro/tools/ci_build/test_bazel_msan.sh new file mode 100755 index 0000000..a0b355a --- /dev/null +++ b/tensorflow/lite/micro/tools/ci_build/test_bazel_msan.sh @@ -0,0 +1,34 @@ +#!/usr/bin/env bash +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +set -e + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +ROOT_DIR=${SCRIPT_DIR}/../../../../.. +cd "${ROOT_DIR}" + +source tensorflow/lite/micro/tools/ci_build/helper_functions.sh + +# We are using a bazel build followed by bazel test to make sure that the CI +# covers non-test binary targets as well. These were previousbly covered by +# having build_test but that was removed with #194. + +CC=clang readable_run bazel build tensorflow/lite/micro/... \ + --config=msan --build_tag_filters=-no_oss,-nomsan +CC=clang readable_run bazel test tensorflow/lite/micro/... \ + --config=msan \ + --test_tag_filters=-no_oss,-nomsan --build_tag_filters=-no_oss,-nomsan \ + --test_output=errors diff --git a/tensorflow/lite/micro/tools/ci_build/test_bazel_tflite_tools.sh b/tensorflow/lite/micro/tools/ci_build/test_bazel_tflite_tools.sh new file mode 100755 index 0000000..9556cff --- /dev/null +++ b/tensorflow/lite/micro/tools/ci_build/test_bazel_tflite_tools.sh @@ -0,0 +1,26 @@ +#!/usr/bin/env bash +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +set -e + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +ROOT_DIR=${SCRIPT_DIR}/../../../../.. +cd "${ROOT_DIR}" + +source tensorflow/lite/micro/tools/ci_build/helper_functions.sh + +readable_run bazel test tensorflow/lite/tools/... \ + --test_output=errors diff --git a/tensorflow/lite/micro/tools/ci_build/test_bluepill_no_release.sh b/tensorflow/lite/micro/tools/ci_build/test_bluepill_no_release.sh new file mode 100755 index 0000000..e4922f0 --- /dev/null +++ b/tensorflow/lite/micro/tools/ci_build/test_bluepill_no_release.sh @@ -0,0 +1,39 @@ +#!/usr/bin/env bash +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# Called with following arguments: +# 1 - (optional) TENSORFLOW_ROOT: path to root of the TFLM tree (relative to directory from where the script is called). +# 2 - (optional) EXTERNAL_DIR: Path to the external directory that contains external code +# Tests the microcontroller code for bluepill + +set -e + +TARGET=bluepill +OPTIMIZED_KERNEL_DIR=cmsis_nn +TENSORFLOW_ROOT=${1} +EXTERNAL_DIR=${2} + +source ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/ci_build/helper_functions.sh + +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile clean TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} + +# TODO(b/143715361): downloading first to allow for parallel builds. +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile OPTIMIZED_KERNEL_DIR=${OPTIMIZED_KERNEL_DIR} TARGET=${TARGET} third_party_downloads TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} + +# Build w/o release so that we can run the tests and get additional +# debugging info on failures. +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile clean TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} +readable_run make -j8 -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile OPTIMIZED_KERNEL_DIR=${OPTIMIZED_KERNEL_DIR} TARGET=${TARGET} build TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} +readable_run make -j8 -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile OPTIMIZED_KERNEL_DIR=${OPTIMIZED_KERNEL_DIR} TARGET=${TARGET} test TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} diff --git a/tensorflow/lite/micro/tools/ci_build/test_bluepill_release.sh b/tensorflow/lite/micro/tools/ci_build/test_bluepill_release.sh new file mode 100755 index 0000000..9ec781c --- /dev/null +++ b/tensorflow/lite/micro/tools/ci_build/test_bluepill_release.sh @@ -0,0 +1,37 @@ +#!/usr/bin/env bash +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# Called with following arguments: +# 1 - (optional) TENSORFLOW_ROOT: path to root of the TFLM tree (relative to directory from where the script is called). +# 2 - (optional) EXTERNAL_DIR: Path to the external directory that contains external code +# Tests the microcontroller code for bluepill + +set -e + +TARGET=bluepill +OPTIMIZED_KERNEL_DIR=cmsis_nn +TENSORFLOW_ROOT=${1} +EXTERNAL_DIR=${2} + +source ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/ci_build/helper_functions.sh + +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile clean TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} + +# TODO(b/143715361): downloading first to allow for parallel builds. +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile OPTIMIZED_KERNEL_DIR=${OPTIMIZED_KERNEL_DIR} TARGET=${TARGET} third_party_downloads TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} + +# Make sure that the release build succeeds. +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile clean TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} +readable_run make -j8 -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile BUILD_TYPE=release OPTIMIZED_KERNEL_DIR=${OPTIMIZED_KERNEL_DIR} TARGET=${TARGET} build TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} diff --git a/tensorflow/lite/micro/tools/ci_build/test_bluepill_renode.sh b/tensorflow/lite/micro/tools/ci_build/test_bluepill_renode.sh new file mode 100755 index 0000000..ec7a68f --- /dev/null +++ b/tensorflow/lite/micro/tools/ci_build/test_bluepill_renode.sh @@ -0,0 +1,39 @@ +#!/usr/bin/env bash +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# Called with following arguments: +# 1 - (optional) TENSORFLOW_ROOT: path to root of the TFLM tree (relative to directory from where the script is called). +# 2 - (optional) EXTERNAL_DIR: Path to the external directory that contains external code +# Tests the microcontroller code for bluepill platform + +set -e +pwd + +TENSORFLOW_ROOT=${1} +EXTERNAL_DIR=${2} + +source ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/ci_build/helper_functions.sh + +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile clean TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} + +TARGET=bluepill + +# TODO(b/143715361): downloading first to allow for parallel builds. +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile TARGET=${TARGET} third_party_downloads TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} + +# We use Renode differently when running the full test suite (make test) vs an +# individual test. So, we test only of the kernels individually as well to have +# both of the Renode variations be part of the CI. +readable_run make -j8 -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile TARGET=${TARGET} test_kernel_add_test TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} diff --git a/tensorflow/lite/micro/tools/ci_build/test_code_style.sh b/tensorflow/lite/micro/tools/ci_build/test_code_style.sh new file mode 100755 index 0000000..0d25c9f --- /dev/null +++ b/tensorflow/lite/micro/tools/ci_build/test_code_style.sh @@ -0,0 +1,173 @@ +#!/usr/bin/env bash +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +set -e + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +ROOT_DIR=${SCRIPT_DIR}/../../../../.. +cd "${ROOT_DIR}" + +source tensorflow/lite/micro/tools/ci_build/helper_functions.sh + +# explicitly call third_party_downloads since we need pigweed for the license +# and clang-format checks. +make -f tensorflow/lite/micro/tools/make/Makefile third_party_downloads + +# Explicitly disable exit on error so that we can report all the style errors in +# one pass and clean up the temporary git repository even when one of the +# scripts fail with an error code. +set +e + +# --fix_formatting to let the script fix both code and build file format error. +FIX_FORMAT_FLAG=${1} + +############################################################ +# License Check +############################################################ +tensorflow/lite/micro/tools/make/downloads/pigweed/pw_presubmit/py/pw_presubmit/pigweed_presubmit.py \ + tensorflow/lite/kernels/internal/reference/ \ + tensorflow/lite/micro/ \ + third_party/ \ + -p copyright_notice \ + -e kernels/internal/reference/integer_ops/ \ + -e kernels/internal/reference/reference_ops.h \ + -e python/schema_py_generated.py \ + -e python_requirements.in \ + -e tools/make/downloads \ + -e tools/make/targets/ecm3531 \ + -e BUILD\ + -e leon_commands \ + -e "\.bmp" \ + -e "\.bzl" \ + -e "\.csv" \ + -e "\.h5" \ + -e "\.inc" \ + -e "\.ipynb" \ + -e "\.patch" \ + -e "\.properties" \ + -e "\.tflite" \ + -e "\.tpl" \ + -e "\.txt" \ + -e "\.wav" \ + --output-directory /tmp + +LICENSE_CHECK_RESULT=$? + +############################################################ +# Code Formatting Check +############################################################ + +if [[ ${FIX_FORMAT_FLAG} == "--fix_formatting" ]] +then + FIX_FORMAT_OPTIONS="--fix" +else + FIX_FORMAT_OPTIONS="" +fi + +tensorflow/lite/micro/tools/make/downloads/pigweed/pw_presubmit/py/pw_presubmit/format_code.py \ + ${FIX_FORMAT_OPTIONS} \ + -e "\.github" \ + -e third_party/hexagon \ + -e third_party/xtensa \ + -e ci \ + -e c/common.c \ + -e core/api/error_reporter.cc \ + -e kernels/internal/reference/integer_ops/ \ + -e kernels/internal/reference/reference_ops.h \ + -e kernels/internal/types.h \ + -e lite/python \ + -e lite/tools \ + -e experimental \ + -e schema/schema_generated.h \ + -e schema/schema_utils.h \ + -e "\.inc" \ + -e "\.md" + +CODE_FORMAT_RESULT=$? + +############################################################ +# Build Formatting Check +############################################################ + +BUILDIFIER_MODE="diff" +if [[ ${FIX_FORMAT_FLAG} == "--fix_formatting" ]] +then + BUILDIFIER_MODE="fix" +fi + +BUILD_FILES=$(find . -name BUILD -o -name "*.bzl" -not -path "./tensorflow/lite/micro/tools/make/downloads/*") +buildifier --mode=${BUILDIFIER_MODE} --diff_command="diff -u" ${BUILD_FILES} +BUILD_FORMAT_RESULT=$? + +############################################################################# +# Avoided specific-code snippets for TFLM +############################################################################# +pushd tensorflow/lite/ + +CHECK_CONTENTS_PATHSPEC=\ +"micro"\ +" :(exclude)micro/tools/ci_build/test_code_style.sh"\ +" :(exclude)*\.md" + +# See https://github.com/tensorflow/tensorflow/issues/46297 for more context. +check_contents "gtest|gmock" "${CHECK_CONTENTS_PATHSPEC}" \ + "These matches can likely be deleted." +GTEST_RESULT=$? + +# See http://b/175657165 for more context. +ERROR_REPORTER_MESSAGE=\ +"TF_LITE_REPORT_ERROR should be used instead, so that log strings can be "\ +"removed to save space, if needed." + +check_contents "error_reporter.*Report\(|context->ReportError\(" \ + "${CHECK_CONTENTS_PATHSPEC}" "${ERROR_REPORTER_MESSAGE}" +ERROR_REPORTER_RESULT=$? + +# See http://b/175657165 for more context. +ASSERT_PATHSPEC=\ +"${CHECK_CONTENTS_PATHSPEC}"\ +" :(exclude)micro/examples/micro_speech/esp/ringbuf.c"\ +" :(exclude)*\.ipynb"\ +" :(exclude)*\.py"\ + +check_contents "\" "${ASSERT_PATHSPEC}" \ + "assert should not be used in TFLM code.." +ASSERT_RESULT=$? + +popd + +########################################################################### +# All checks are complete, clean up. +########################################################################### + + +# Re-enable exit on error now that we are done with the temporary git repo. +set -e + +if [[ ${CODE_FORMAT_RESULT} != 0 || ${BUILD_FORMAT_RESULT} != 0 ]] +then + echo "The formatting errors can be fixed with tensorflow/lite/micro/tools/ci_build/test_code_style.sh --fix_formatting" +fi +if [[ ${LICENSE_CHECK_RESULT} != 0 || \ + ${CODE_FORMAT_RESULT} != 0 || \ + ${BUILD_FORMAT_RESULT} != 0 || \ + ${GTEST_RESULT} != 0 || \ + ${ERROR_REPORTER_RESULT} != 0 || \ + ${ASSERT_RESULT} != 0 \ + ]] +then + exit 1 +fi diff --git a/tensorflow/lite/micro/tools/ci_build/test_cortex_m_corstone_300.sh b/tensorflow/lite/micro/tools/ci_build/test_cortex_m_corstone_300.sh new file mode 100755 index 0000000..516c181 --- /dev/null +++ b/tensorflow/lite/micro/tools/ci_build/test_cortex_m_corstone_300.sh @@ -0,0 +1,44 @@ +#!/usr/bin/env bash +# Copyright 2022 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# +# Tests Arm Cortex-M55 microprocessor code with CMSIS-NN optimizied kernels using FVP based on Arm Corstone-300 software. + +set -e + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +ROOT_DIR=${SCRIPT_DIR}/../../../../.. +cd "${ROOT_DIR}" + +source tensorflow/lite/micro/tools/ci_build/helper_functions.sh + +if [[ $1 = "armclang" ]]; then + TOOLCHAIN=armclang +else + TOOLCHAIN=gcc +fi + +TARGET=cortex_m_corstone_300 +TARGET_ARCH=cortex-m55 +OPTIMIZED_KERNEL_DIR=cmsis_nn +TOOLCHAINS=(gcc armclang) + +# TODO(b/143715361): downloading first to allow for parallel builds. +readable_run make -f tensorflow/lite/micro/tools/make/Makefile CO_PROCESSOR=ethos_u OPTIMIZED_KERNEL_DIR=${OPTIMIZED_KERNEL_DIR} TARGET=${TARGET} TARGET_ARCH=${TARGET_ARCH} TOOLCHAIN=${TOOLCHAIN} third_party_downloads + +# Avoid running tests in parallel. +readable_run make -f tensorflow/lite/micro/tools/make/Makefile clean +readable_run make -j$(nproc) -f tensorflow/lite/micro/tools/make/Makefile CO_PROCESSOR=ethos_u OPTIMIZED_KERNEL_DIR=${OPTIMIZED_KERNEL_DIR} TARGET=${TARGET} TARGET_ARCH=${TARGET_ARCH} TOOLCHAIN=${TOOLCHAIN} build +readable_run make -f tensorflow/lite/micro/tools/make/Makefile CO_PROCESSOR=ethos_u OPTIMIZED_KERNEL_DIR=${OPTIMIZED_KERNEL_DIR} TARGET=${TARGET} TARGET_ARCH=${TARGET_ARCH} TOOLCHAIN=${TOOLCHAIN} test diff --git a/tensorflow/lite/micro/tools/ci_build/test_cortex_m_generic.sh b/tensorflow/lite/micro/tools/ci_build/test_cortex_m_generic.sh new file mode 100755 index 0000000..35f5ae6 --- /dev/null +++ b/tensorflow/lite/micro/tools/ci_build/test_cortex_m_generic.sh @@ -0,0 +1,53 @@ +#!/usr/bin/env bash +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# +# Tests the microcontroller code using a Cortex-M4/M4F platform. + +set -e + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +ROOT_DIR=${SCRIPT_DIR}/../../../../.. +cd "${ROOT_DIR}" + +source tensorflow/lite/micro/tools/ci_build/helper_functions.sh + +if [ $1 = "armclang" ]; then + TOOLCHAIN=armclang +else + TOOLCHAIN=gcc +fi + +TARGET=cortex_m_generic +OPTIMIZED_KERNEL_DIR=cmsis_nn + +# TODO(b/143715361): downloading first to allow for parallel builds. +readable_run make -f tensorflow/lite/micro/tools/make/Makefile OPTIMIZED_KERNEL_DIR=${OPTIMIZED_KERNEL_DIR} TARGET=${TARGET} TARGET_ARCH=cortex-m4 TOOLCHAIN=${TOOLCHAIN} third_party_downloads + +# Build for Cortex-M4 (no FPU) without CMSIS +readable_run make -f tensorflow/lite/micro/tools/make/Makefile clean +readable_run make -j$(nproc) -f tensorflow/lite/micro/tools/make/Makefile TARGET=${TARGET} TARGET_ARCH=cortex-m4 TOOLCHAIN=${TOOLCHAIN} microlite + +# Build for Cortex-M4F (FPU present) without CMSIS +readable_run make -f tensorflow/lite/micro/tools/make/Makefile clean +readable_run make -j$(nproc) -f tensorflow/lite/micro/tools/make/Makefile TARGET=${TARGET} TARGET_ARCH=cortex-m4+fp TOOLCHAIN=${TOOLCHAIN} microlite + +# Build for Cortex-M4 (no FPU) with CMSIS +readable_run make -f tensorflow/lite/micro/tools/make/Makefile clean +readable_run make -j$(nproc) -f tensorflow/lite/micro/tools/make/Makefile OPTIMIZED_KERNEL_DIR=${OPTIMIZED_KERNEL_DIR} TARGET=${TARGET} TARGET_ARCH=cortex-m4 TOOLCHAIN=${TOOLCHAIN} microlite + +# Build for Cortex-M4 (FPU present) with CMSIS +readable_run make -f tensorflow/lite/micro/tools/make/Makefile clean +readable_run make -j$(nproc) -f tensorflow/lite/micro/tools/make/Makefile OPTIMIZED_KERNEL_DIR=${OPTIMIZED_KERNEL_DIR} TARGET=${TARGET} TARGET_ARCH=cortex-m4+fp TOOLCHAIN=${TOOLCHAIN} microlite diff --git a/tensorflow/lite/micro/tools/ci_build/test_cortex_m_qemu.sh b/tensorflow/lite/micro/tools/ci_build/test_cortex_m_qemu.sh new file mode 100755 index 0000000..cf98933 --- /dev/null +++ b/tensorflow/lite/micro/tools/ci_build/test_cortex_m_qemu.sh @@ -0,0 +1,53 @@ +#!/usr/bin/env bash +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# Called with following arguments: +# 1 - (optional) TENSORFLOW_ROOT: path to root of the TFLM tree (relative to directory from where the script is called). +# 2 - (optional) EXTERNAL_DIR: Path to the external directory that contains external code +# Tests the microcontroller code with QEMU emulator + +set -e +pwd + +TENSORFLOW_ROOT=${1} +EXTERNAL_DIR=${2} +TARGET=cortex_m_qemu +TARGET_ARCH=${3:-cortex-m3} +OPTIMIZED_KERNEL_DIR=cmsis_nn + +source ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/ci_build/helper_functions.sh + +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile \ + TENSORFLOW_ROOT=${TENSORFLOW_ROOT} \ + EXTERNAL_DIR=${EXTERNAL_DIR} \ + clean + +# TODO(b/143715361): downloading first to allow for parallel builds. +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile \ + TARGET=${TARGET} \ + TARGET_ARCH=${TARGET_ARCH} \ + OPTIMIZED_KERNEL_DIR=${OPTIMIZED_KERNEL_DIR} \ + TENSORFLOW_ROOT=${TENSORFLOW_ROOT} \ + EXTERNAL_DIR=${EXTERNAL_DIR} \ + third_party_downloads + +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile \ + TARGET=${TARGET} \ + TARGET_ARCH=${TARGET_ARCH} \ + OPTIMIZED_KERNEL_DIR=${OPTIMIZED_KERNEL_DIR} \ + TENSORFLOW_ROOT=${TENSORFLOW_ROOT} \ + EXTERNAL_DIR=${EXTERNAL_DIR} \ + test -j$(nproc) + diff --git a/tensorflow/lite/micro/tools/ci_build/test_generate_integration_tests.sh b/tensorflow/lite/micro/tools/ci_build/test_generate_integration_tests.sh new file mode 100755 index 0000000..c432fad --- /dev/null +++ b/tensorflow/lite/micro/tools/ci_build/test_generate_integration_tests.sh @@ -0,0 +1,37 @@ +#!/usr/bin/env bash +# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +set -e + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +ROOT_DIR=${SCRIPT_DIR}/../../../../.. +cd "${ROOT_DIR}" + +source tensorflow/lite/micro/tools/ci_build/helper_functions.sh + +KERNEL=conv + +TEST_TFLITE_FILE="$(realpath ${ROOT_DIR}/tensorflow/lite/micro/models/person_detect.tflite)" +TEST_OUTPUT_DIR=${ROOT_DIR}/tensorflow/lite/micro/integration_tests/person_detect/${KERNEL} +mkdir -p ${TEST_OUTPUT_DIR} +TEST_OUTPUT_DIR_REALPATH="$(realpath ${TEST_OUTPUT_DIR})" + +readable_run bazel run tensorflow/lite/micro/integration_tests:generate_per_layer_tests -- --input_tflite_file=${TEST_TFLITE_FILE} --output_dir=${TEST_OUTPUT_DIR_REALPATH} + +readable_run bazel test tensorflow/lite/micro/integration_tests/person_detect/${KERNEL}:integration_test \ + --test_output=errors + +readable_run make -j8 -f tensorflow/lite/micro/tools/make/Makefile test_integration_tests_person_detect_${KERNEL}_test diff --git a/tensorflow/lite/micro/tools/ci_build/test_generate_micro_mutable_op_resolver_tests.sh b/tensorflow/lite/micro/tools/ci_build/test_generate_micro_mutable_op_resolver_tests.sh new file mode 100755 index 0000000..803f015 --- /dev/null +++ b/tensorflow/lite/micro/tools/ci_build/test_generate_micro_mutable_op_resolver_tests.sh @@ -0,0 +1,46 @@ +#!/usr/bin/env bash +# Copyright 2022 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +set -e + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +ROOT_DIR=${SCRIPT_DIR}/../../../../.. +cd "${ROOT_DIR}" + +source tensorflow/lite/micro/tools/ci_build/helper_functions.sh + +MODEL="person_detect" +TEST_TFLITE_PATH="$(realpath ${ROOT_DIR}/tensorflow/lite/micro/models)" +TEST_TFLITE_NAME="${MODEL}.tflite" +TEST_TFLITE_FILE="${TEST_TFLITE_PATH}/${TEST_TFLITE_NAME}" +MODEL_BASENAME=$(basename ${TEST_TFLITE_FILE} .tflite) +TEST_OUTPUT_DIR_RELATIVE=tensorflow/lite/micro/tools/gen_micro_mutable_op_resolver_test +TEST_OUTPUT_DIR=${ROOT_DIR}/${TEST_OUTPUT_DIR_RELATIVE} +mkdir -p ${TEST_OUTPUT_DIR} +TEST_OUTPUT_DIR_REALPATH="$(realpath ${TEST_OUTPUT_DIR})" +TEST_OUTPUT_MODEL_DIR_REALPATH="$(realpath ${TEST_OUTPUT_DIR})/${MODEL_BASENAME}" +GEN_TEST_OUTPUT_DIR_RELATIVE=${TEST_OUTPUT_DIR_RELATIVE}/${MODEL} + +readable_run bazel run tensorflow/lite/micro/tools/gen_micro_mutable_op_resolver:generate_micro_mutable_op_resolver_from_model -- \ + --common_tflite_path=${TEST_TFLITE_PATH} --input_tflite_files=${TEST_TFLITE_NAME} --output_dir=${TEST_OUTPUT_MODEL_DIR_REALPATH} + +readable_run bazel run tensorflow/lite/micro/tools/gen_micro_mutable_op_resolver:generate_micro_mutable_op_resolver_from_model_test -- \ + --input_tflite_file=${TEST_TFLITE_FILE} -output_dir=${TEST_OUTPUT_DIR_REALPATH} + +readable_run bazel run ${GEN_TEST_OUTPUT_DIR_RELATIVE}:micro_mutable_op_resolver_test + +readable_run make -j8 -f tensorflow/lite/micro/tools/make/Makefile \ + test_generated_micro_mutable_op_resolver_person_detect_test diff --git a/tensorflow/lite/micro/tools/ci_build/test_hexagon.sh b/tensorflow/lite/micro/tools/ci_build/test_hexagon.sh new file mode 100755 index 0000000..fb652eb --- /dev/null +++ b/tensorflow/lite/micro/tools/ci_build/test_hexagon.sh @@ -0,0 +1,59 @@ +#!/usr/bin/env bash +# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# Called with following arguments: +# 1 - (optional) TENSORFLOW_ROOT: path to root of the TFLM tree (relative to directory from where the script is called). +# 2 - (optional) EXTERNAL_DIR: Path to the external directory that contains external code +# 3 - (optional) Path to the HEXAGON TFLM Lib + +set -e + +# Default prebulit core library on docker +HEXAGON_TFLM_LIB=/root/Qualcomm/hexagon_tflm_core.a + +TENSORFLOW_ROOT=${1} +EXTERNAL_DIR=${2} + +if [[ $# -ge 3 ]]; +then + HEXAGON_TFLM_LIB=$3 +fi + +source ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/ci_build/helper_functions.sh + +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile clean TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} + +# TODO(b/143904317): downloading first to allow for parallel builds. +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile third_party_downloads TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} + +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile \ + TARGET=hexagon \ + OPTIMIZED_KERNEL_DIR=hexagon \ + OPTIMIZED_KERNEL_DIR_PREFIX=${TENSORFLOW_ROOT}third_party \ + HEXAGON_TFLM_LIB=${HEXAGON_TFLM_LIB} \ + TENSORFLOW_ROOT=${TENSORFLOW_ROOT} \ + EXTERNAL_DIR=${EXTERNAL_DIR} \ + build -j$(nproc) + +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile \ + TARGET=hexagon \ + OPTIMIZED_KERNEL_DIR=hexagon \ + OPTIMIZED_KERNEL_DIR_PREFIX=${TENSORFLOW_ROOT}third_party \ + HEXAGON_TFLM_LIB=${HEXAGON_TFLM_LIB} \ + TENSORFLOW_ROOT=${TENSORFLOW_ROOT} \ + EXTERNAL_DIR=${EXTERNAL_DIR} \ + test -j$(nproc) + + diff --git a/tensorflow/lite/micro/tools/ci_build/test_makefile.sh b/tensorflow/lite/micro/tools/ci_build/test_makefile.sh new file mode 100755 index 0000000..8a5a4d2 --- /dev/null +++ b/tensorflow/lite/micro/tools/ci_build/test_makefile.sh @@ -0,0 +1,37 @@ +#!/usr/bin/env bash +# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +set -e + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +ROOT_DIR=${SCRIPT_DIR}/../../../../.. +cd "${ROOT_DIR}" +pwd + +# Check that an incorrect optimized kernel directory results in an error. +# Without such an error, an incorrect optimized kernel directory can result in +# an unexpected fallback to reference kernels and which can be hard to debug. We +# add some complexity to the CI to make sure that we do not repeat the same +# mistake as described in http://b/183546742. +INCORRECT_CMD="make -f tensorflow/lite/micro/tools/make/Makefile OPTIMIZED_KERNEL_DIR=does_not_exist clean" +EXT_LIBS_INC=tensorflow/lite/micro/tools/make/ext_libs/does_not_exist.inc +touch ${EXT_LIBS_INC} +if ${INCORRECT_CMD} &> /dev/null ; then + echo "'${INCORRECT_CMD}' should have failed but it did not have any errors." + rm -f ${EXT_LIBS_INC} + exit 1 +fi +rm -f ${EXT_LIBS_INC} diff --git a/tensorflow/lite/micro/tools/ci_build/test_project_generation.sh b/tensorflow/lite/micro/tools/ci_build/test_project_generation.sh new file mode 100755 index 0000000..63339d7 --- /dev/null +++ b/tensorflow/lite/micro/tools/ci_build/test_project_generation.sh @@ -0,0 +1,141 @@ +#!/usr/bin/env bash +# Copyright 2022 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# Called with following arguments: +# 1 - (optional) TENSORFLOW_ROOT: path to root of the TFLM tree (relative to directory from where the script is called). +# 2 - (optional) EXTERNAL_DIR: Path to the external directory that contains external code +set -e + +TENSORFLOW_ROOT=${1} +EXTERNAL_DIR=${2} + +ROOT_DIR="$(pwd)/${TENSORFLOW_ROOT}" + +source ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/ci_build/helper_functions.sh + +# TODO(b/261685878): re-enable once all the issues with the bazel project +# generation CI are sorted out. +# +# # First, we test that create_tflm_tree without any examples can be used to build a +# # static library with bazel. Bazel can help catch errors that are not caught by +# # a simple makefile (e.g. http://b/261106859). +# TEST_OUTPUT_DIR="$(mktemp -d)" +# +# # We currently run the bazel build from TENSORFLOW_ROOT. +# pushd "${ROOT_DIR}" > /dev/null +# readable_run \ +# python3 tensorflow/lite/micro/tools/project_generation/create_tflm_tree.py \ +# "${TEST_OUTPUT_DIR}" +# +# readable_run cp tensorflow/lite/micro/tools/project_generation/BUILD.testing "${TEST_OUTPUT_DIR}/BUILD" +# popd > /dev/null +# +# pushd "${TEST_OUTPUT_DIR}" > /dev/null +# readable_run touch WORKSPACE +# readable_run bazel build :libtflm +# popd > /dev/null +# +# rm -rf "${TEST_OUTPUT_DIR}" + +# Next, we test that create_tflm_tree can be used to build example binaries. We +# perform this test with a Makefile (instead of bazel) because make is more +# commonly understood and because we use make for cross-compilation. +EXAMPLES="-e hello_world -e micro_speech -e person_detection" + +TEST_OUTPUT_DIR="$(mktemp -d)" + +readable_run \ + python3 ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/project_generation/create_tflm_tree.py \ + --makefile_options="TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR}" \ + "${TEST_OUTPUT_DIR}" \ + ${EXAMPLES} + +# Confirm that print_src_files and print_dest_files output valid paths (and +# nothing else). +set +x +FILES="$(python3 ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/project_generation/create_tflm_tree.py \ + --makefile_options="TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR}" \ + ${TEST_OUTPUT_DIR} \ + --print_src_files --print_dest_files --no_copy)" + +readable_run ls ${FILES} > /dev/null + +# Next, make sure that the output tree has all the files needed buld the +# examples. +readable_run cp ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/project_generation/Makefile "${TEST_OUTPUT_DIR}" +pushd "${TEST_OUTPUT_DIR}" > /dev/null +readable_run make -j8 examples TENSORFLOW_ROOT=${TENSORFLOW_ROOT} +popd > /dev/null + +rm -rf "${TEST_OUTPUT_DIR}" + +# Remove existing state prior to testing project generation for cortex-m target. +make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile clean clean_downloads TENSORFLOW_ROOT=${TENSORFLOW_ROOT} + +TEST_OUTPUT_DIR_CMSIS="$(mktemp -d)" + +readable_run \ + python3 ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/project_generation/create_tflm_tree.py \ + --makefile_options="TARGET=cortex_m_generic OPTIMIZED_KERNEL_DIR=cmsis_nn TARGET_ARCH=project_generation TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR}" \ + "${TEST_OUTPUT_DIR_CMSIS}" \ + ${EXAMPLES} + +readable_run \ + cp ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/project_generation/Makefile "${TEST_OUTPUT_DIR_CMSIS}" + +pushd "${TEST_OUTPUT_DIR_CMSIS}" > /dev/null + +PATH="${PATH}:${ROOT_DIR}tensorflow/lite/micro/tools/make/downloads/gcc_embedded/bin" \ + readable_run \ + make -j8 BUILD_TYPE=cmsis_nn TENSORFLOW_ROOT=${TENSORFLOW_ROOT} + +popd > /dev/null + +rm -rf "${TEST_OUTPUT_DIR_CMSIS}" + +# Test that C++ files are renamed to .cpp +TEST_OUTPUT_DIR_RENAME_CC="$(mktemp -d)" + +readable_run \ + python3 ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/project_generation/create_tflm_tree.py \ + --rename_cc_to_cpp \ + --makefile_options="TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR}" \ + "${TEST_OUTPUT_DIR_RENAME_CC}" + +CC_FILES="$(find ${TEST_OUTPUT_DIR_RENAME_CC} -name "*.cc" | head)" +CPP_FILES="$(find ${TEST_OUTPUT_DIR_RENAME_CC} -name "*.cpp" | head)" + +if test -n "${CC_FILES}"; then + echo "Expected no .cc file to exist" + echo "${CC_FILES}" + exit 1; +fi + +if test -z "${CPP_FILES}"; then + echo "Expected a .cpp file to exist" + echo "${CPP_FILES}}}" + exit 1; +fi + +# Test the tflm tree creation works even inside from TENSORFLOW_ROOT directory. +pushd "${TENSORFLOW_ROOT}" > /dev/null +TEST_OUTPUT_DIR="$(mktemp -d)" +readable_run \ +python3 tensorflow/lite/micro/tools/project_generation/create_tflm_tree.py \ +--makefile_options="TARGET=cortex_m_generic OPTIMIZED_KERNEL_DIR=cmsis_nn TARGET_ARCH=cortex-m4" \ +"${TEST_OUTPUT_DIR}" +rm -rf "${TEST_OUTPUT_DIR}" +popd > /dev/null + diff --git a/tensorflow/lite/micro/tools/ci_build/test_riscv.sh b/tensorflow/lite/micro/tools/ci_build/test_riscv.sh new file mode 100755 index 0000000..d511bd9 --- /dev/null +++ b/tensorflow/lite/micro/tools/ci_build/test_riscv.sh @@ -0,0 +1,36 @@ +#!/usr/bin/env bash +# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# +# Tests the RISC-V MCU platform for the SiFive FE310. + +set -e + +TENSORFLOW_ROOT=${1} +EXTERNAL_DIR=${2} +source ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/ci_build/helper_functions.sh + +TARGET=riscv32_generic + +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile TARGET=${TARGET} third_party_downloads TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} + +# check that the release build is ok. +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile clean TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} +readable_run make -j8 -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile TARGET=${TARGET} BUILD_TYPE=release build TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} + +# Next, build w/o release so that we can run the tests and get additional +# debugging info on failures. +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile clean TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} +readable_run make -j8 -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile TARGET=${TARGET} BUILD_TYPE=debug test TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} diff --git a/tensorflow/lite/micro/tools/ci_build/test_size.sh b/tensorflow/lite/micro/tools/ci_build/test_size.sh new file mode 100755 index 0000000..7e6950c --- /dev/null +++ b/tensorflow/lite/micro/tools/ci_build/test_size.sh @@ -0,0 +1,77 @@ +#!/usr/bin/env bash +# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# +# This script builds a TFLite micro test binary and compare the size difference +# between this binary and that same binary from the main repo. +# If the optional argument string "error_on_memory_increase" is provided as the +# script input, the script will error exit on any memory increase. +# If no argument is provided, the script produce a size comparison report. +set -e + +source tensorflow/lite/micro/tools/ci_build/helper_functions.sh + +# Utility function to build a target and return its path back to caller through +# a global variable __BINARY_TARGET_PATH. +# The caller is expected to store this __BINARY_TARGET_PATH back to its local +# variable if it needs to use the generated binary target with path later on. +__BINARY_TARGET_PATH= +function build_target() { + local binary_target=$1 + local build_type=$2 + local target=$3 + local target_arch=$4 + readable_run make -f tensorflow/lite/micro/tools/make/Makefile third_party_downloads + readable_run make -j8 -f tensorflow/lite/micro/tools/make/Makefile build build_type=${build_type} TARGET=${target} TARGET_ARCH=${target_arch} ${binary_target} + + # Return the relative binary with path and name. + __BINARY_TARGET_PATH="gen/${target}_${target_arch}_${build_type}/bin/${binary_target}" +} + +FLAG_ERROR_ON_MEM_INCREASE=$1 +# TODO(b/196637015): change this to a real benchmark binary after the experiment +# is complete. +BENCHMARK_TARGET=binary_size_test + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +ROOT_DIR=${SCRIPT_DIR}/../../../../.. + +# Build a binary for the current repo +cd "${ROOT_DIR}" +# Clean once. +readable_run make -f tensorflow/lite/micro/tools/make/Makefile clean + +build_target ${BENCHMARK_TARGET} default linux x86_64 +CURRENT_BINARY=${__BINARY_TARGET_PATH} +size ${CURRENT_BINARY} > ${ROOT_DIR}/ci/size_log.txt + +# Get a clone of the main repo as the reference. +REF_ROOT_DIR="$(mktemp -d ${ROOT_DIR}/../main_ref.XXXXXX)" +git clone https://github.com/tensorflow/tflite-micro.git ${REF_ROOT_DIR} + +# Build a binary for the main repo. +cd ${REF_ROOT_DIR} +build_target ${BENCHMARK_TARGET} default linux x86_64 +REF_BINARY=${__BINARY_TARGET_PATH} +size ${REF_BINARY} > ${REF_ROOT_DIR}/ci/size_log.txt + +# Compare the two files at th root of current repo. +cd ${ROOT_DIR} +if [ "${FLAG_ERROR_ON_MEM_INCREASE}" = "error_on_mem_increase" ] +then + tensorflow/lite/micro/tools/ci_build/size_comp.py -a ${REF_ROOT_DIR}/ci/size_log.txt ${ROOT_DIR}/ci/size_log.txt --error_on_mem_increase +else + tensorflow/lite/micro/tools/ci_build/size_comp.py -a ${REF_ROOT_DIR}/ci/size_log.txt ${ROOT_DIR}/ci/size_log.txt +fi \ No newline at end of file diff --git a/tensorflow/lite/micro/tools/ci_build/test_x86_default.sh b/tensorflow/lite/micro/tools/ci_build/test_x86_default.sh new file mode 100755 index 0000000..623238e --- /dev/null +++ b/tensorflow/lite/micro/tools/ci_build/test_x86_default.sh @@ -0,0 +1,42 @@ +#!/usr/bin/env bash +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# +# Called with following arguments: +# 1 - (optional) TENSORFLOW_ROOT: path to root of the TFLM tree (relative to directory from where the script is called). +# 2 - (optional) EXTERNAL_DIR: Path to the external directory that contains external code +# Tests the microcontroller code using native x86 execution. +# +# This file is a subset of the tests in test_x86.sh. It is for parallelizing the test +# suite on github actions. + +set -e + +TENSORFLOW_ROOT=${1} +EXTERNAL_DIR=${2} + +source ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/ci_build/helper_functions.sh + +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile clean TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} + +# TODO(b/143715361): downloading first to allow for parallel builds. +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile third_party_downloads TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} + +# Build w/o release so that we can run the tests and get additional +# debugging info on failures. +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile clean TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} +readable_run make -s -j8 -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile build TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} +readable_run make -s -j8 -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile test TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} +readable_run make -s -j8 -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile integration_tests TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} diff --git a/tensorflow/lite/micro/tools/ci_build/test_x86_no_tflite_static_memory.sh b/tensorflow/lite/micro/tools/ci_build/test_x86_no_tflite_static_memory.sh new file mode 100755 index 0000000..9d63a26 --- /dev/null +++ b/tensorflow/lite/micro/tools/ci_build/test_x86_no_tflite_static_memory.sh @@ -0,0 +1,42 @@ +#!/usr/bin/env bash +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# +# Called with following arguments: +# 1 - (optional) TENSORFLOW_ROOT: path to root of the TFLM tree (relative to directory from where the script is called). +# 2 - (optional) EXTERNAL_DIR: Path to the external directory that contains external code +# Tests the microcontroller code using native x86 execution. +# +# This file is a subset of the tests in test_x86.sh. It is for parallelizing the test +# suite on github actions. + +set -e + +TENSORFLOW_ROOT=${1} +EXTERNAL_DIR=${2} + +source ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/ci_build/helper_functions.sh + +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile clean TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} + +# TODO(b/143715361): downloading first to allow for parallel builds. +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile third_party_downloads TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} + +# Build w/o TF_LITE_STATIC_MEMORY to catch additional errors. +# TODO(b/160955687): We run the tests w/o TF_LITE_STATIC_MEMORY to make the +# internal and open source CI consistent. See b/160955687#comment7 for more +# details. +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile clean TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} +readable_run make -j8 -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile BUILD_TYPE=no_tf_lite_static_memory test TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} diff --git a/tensorflow/lite/micro/tools/ci_build/test_x86_out_of_tree.sh b/tensorflow/lite/micro/tools/ci_build/test_x86_out_of_tree.sh new file mode 100755 index 0000000..6699db1 --- /dev/null +++ b/tensorflow/lite/micro/tools/ci_build/test_x86_out_of_tree.sh @@ -0,0 +1,46 @@ +#!/usr/bin/env bash +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# +# Called with following arguments: +# 1 - (optional) TENSORFLOW_ROOT: path to root of the TFLM tree (relative to directory from where the script is called). +# 2 - (optional) EXTERNAL_DIR: Path to the external directory that contains external code +# Tests the microcontroller code using native x86 execution. +# +# This file is a subset of the tests in test_x86.sh. It is for parallelizing the test +# suite on github actions. + +set -e + +TENSORFLOW_ROOT=${1} +EXTERNAL_DIR=${2} + +source ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/ci_build/helper_functions.sh + +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile clean TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} + +# TODO(b/143715361): downloading first to allow for parallel builds. +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile third_party_downloads TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} + +# Test the hello_world as an example outside of the github repo. +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile clean TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} +cp -r ${TENSORFLOW_ROOT}tensorflow/lite/micro/examples/hello_world ./ +sed -i 's/tensorflow\/lite\/micro\/examples\///g' hello_world/Makefile.inc +sed -i 's/$(TENSORFLOW_ROOT)//g' hello_world/Makefile.inc +mv hello_world/Makefile.inc hello_world/Makefile_internal.inc +sed -i 's/tensorflow\/lite\/micro\/examples\///g' hello_world/hello_world_test.cc +readable_run make -s -j8 -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile test_hello_world_test TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=hello_world/ +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile clean TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=hello_world/ +rm -rf hello_world \ No newline at end of file diff --git a/tensorflow/lite/micro/tools/ci_build/test_x86_release.sh b/tensorflow/lite/micro/tools/ci_build/test_x86_release.sh new file mode 100755 index 0000000..ec96f99 --- /dev/null +++ b/tensorflow/lite/micro/tools/ci_build/test_x86_release.sh @@ -0,0 +1,46 @@ +#!/usr/bin/env bash +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# +# Called with following arguments: +# 1 - (optional) TENSORFLOW_ROOT: path to root of the TFLM tree (relative to directory from where the script is called). +# 2 - (optional) EXTERNAL_DIR: Path to the external directory that contains external code +# Tests the microcontroller code using native x86 execution. +# +# This file is a subset of the tests in test_x86.sh. It is for parallelizing the test +# suite on github actions. + +set -e + +TENSORFLOW_ROOT=${1} +EXTERNAL_DIR=${2} + +source ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/ci_build/helper_functions.sh + +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile clean TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} + +# TODO(b/143715361): downloading first to allow for parallel builds. +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile third_party_downloads TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} + +# Build with release and logs so that we can run the tests and get +# additional debugging info on failures. +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile clean TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} +readable_run make -s -j8 -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile BUILD_TYPE=release_with_logs build TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} +readable_run make -s -j8 -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile BUILD_TYPE=release_with_logs test TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} +readable_run make -s -j8 -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile BUILD_TYPE=release_with_logs integration_tests TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} + +# Next, make sure that the release build succeeds. +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile clean TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} +readable_run make -j8 -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile BUILD_TYPE=release build TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} \ No newline at end of file diff --git a/tensorflow/lite/micro/tools/ci_build/test_xtensa_fusion_f1.sh b/tensorflow/lite/micro/tools/ci_build/test_xtensa_fusion_f1.sh new file mode 100755 index 0000000..8416792 --- /dev/null +++ b/tensorflow/lite/micro/tools/ci_build/test_xtensa_fusion_f1.sh @@ -0,0 +1,71 @@ +#!/usr/bin/env bash +# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# Called with following arguments: +# 1 - EXTERNAL or INTERNAL to signal how to run the script +# 2 - (optional) TENSORFLOW_ROOT: path to root of the TFLM tree (relative to directory from where the script is called). +# 3 - (optional) EXTERNAL_DIR: Path to the external directory that contains external code + +set -e +pwd + +TENSORFLOW_ROOT=${2} +EXTERNAL_DIR=${3} + +source ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/ci_build/helper_functions.sh + +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile clean TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} + +# TODO(b/143904317): downloading first to allow for parallel builds. +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile third_party_downloads TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} + +# optional command line parameter "INTERNAL" uses internal test code +if [[ ${1} == "INTERNAL" ]]; then +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile \ + TARGET=xtensa \ + TARGET_ARCH=hifi4 \ + OPTIMIZED_KERNEL_DIR=xtensa \ + XTENSA_CORE=F1_190305_swupgrade \ + TENSORFLOW_ROOT=${TENSORFLOW_ROOT} \ + EXTERNAL_DIR=${EXTERNAL_DIR} \ + build -j$(nproc) + +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile \ + TARGET=xtensa \ + TARGET_ARCH=hifi4 \ + OPTIMIZED_KERNEL_DIR=xtensa \ + XTENSA_CORE=F1_190305_swupgrade \ + TENSORFLOW_ROOT=${TENSORFLOW_ROOT} \ + EXTERNAL_DIR=${EXTERNAL_DIR} \ + test -j$(nproc) +else +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile \ + TARGET=xtensa \ + TARGET_ARCH=hifi4 \ + OPTIMIZED_KERNEL_DIR=xtensa \ + XTENSA_CORE=F1_190305_swupgrade \ + TENSORFLOW_ROOT=${TENSORFLOW_ROOT} \ + EXTERNAL_DIR=${EXTERNAL_DIR} \ + build -j$(nproc) + +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile \ + TARGET=xtensa \ + TARGET_ARCH=hifi4 \ + OPTIMIZED_KERNEL_DIR=xtensa \ + XTENSA_CORE=F1_190305_swupgrade \ + TENSORFLOW_ROOT=${TENSORFLOW_ROOT} \ + EXTERNAL_DIR=${EXTERNAL_DIR} \ + test -j$(nproc) +fi diff --git a/tensorflow/lite/micro/tools/ci_build/test_xtensa_hifi3z.sh b/tensorflow/lite/micro/tools/ci_build/test_xtensa_hifi3z.sh new file mode 100755 index 0000000..072087b --- /dev/null +++ b/tensorflow/lite/micro/tools/ci_build/test_xtensa_hifi3z.sh @@ -0,0 +1,98 @@ +#!/usr/bin/env bash +# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# Called with following arguments: +# 1 - EXTERNAL or INTERNAL to signal how to run the script +# 2 - (optional) TENSORFLOW_ROOT: path to root of the TFLM tree (relative to directory from where the script is called). +# 3 - (optional) EXTERNAL_DIR: Path to the external directory that contains external code + +set -e +pwd + +TENSORFLOW_ROOT=${2} +EXTERNAL_DIR=${3} + +source ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/ci_build/helper_functions.sh + +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile clean TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} + +# TODO(b/143904317): downloading first to allow for parallel builds. +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile third_party_downloads TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} + +# optional command line parameter "INTERNAL" uses internal test code +if [[ ${1} == "INTERNAL" ]]; then + readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile \ + TARGET=xtensa \ + TARGET_ARCH=hifi4 \ + OPTIMIZED_KERNEL_DIR=xtensa \ + XTENSA_CORE=HIFI_190304_swupgrade \ + TENSORFLOW_ROOT=${TENSORFLOW_ROOT} \ + EXTERNAL_DIR=${EXTERNAL_DIR} \ + build -j$(nproc) + + readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile \ + TARGET=xtensa \ + TARGET_ARCH=hifi4 \ + OPTIMIZED_KERNEL_DIR=xtensa \ + XTENSA_CORE=HIFI_190304_swupgrade \ + TENSORFLOW_ROOT=${TENSORFLOW_ROOT} \ + EXTERNAL_DIR=${EXTERNAL_DIR} \ + test -j$(nproc) + + readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile \ + TARGET=xtensa \ + TARGET_ARCH=hifi4 \ + OPTIMIZED_KERNEL_DIR=xtensa \ + XTENSA_CORE=HIFI_190304_swupgrade \ + TENSORFLOW_ROOT=${TENSORFLOW_ROOT} \ + EXTERNAL_DIR=${EXTERNAL_DIR} \ + test_integration_tests_seanet_conv -j$(nproc) + + readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile \ + TARGET=xtensa \ + TARGET_ARCH=hifi4 \ + OPTIMIZED_KERNEL_DIR=xtensa \ + XTENSA_CORE=HIFI_190304_swupgrade \ + TENSORFLOW_ROOT=${TENSORFLOW_ROOT} \ + EXTERNAL_DIR=${EXTERNAL_DIR} \ + test_integration_tests_seanet_add_test -j$(nproc) + + readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile \ + TARGET=xtensa \ + TARGET_ARCH=hifi4 \ + OPTIMIZED_KERNEL_DIR=xtensa \ + XTENSA_CORE=HIFI_190304_swupgrade \ + TENSORFLOW_ROOT=${TENSORFLOW_ROOT} \ + EXTERNAL_DIR=${EXTERNAL_DIR} \ + test_integration_tests_seanet_leaky_relu_test -j$(nproc) +else + readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile \ + TARGET=xtensa \ + TARGET_ARCH=hifi4 \ + OPTIMIZED_KERNEL_DIR=xtensa \ + XTENSA_CORE=HIFI_190304_swupgrade \ + TENSORFLOW_ROOT=${TENSORFLOW_ROOT} \ + EXTERNAL_DIR=${EXTERNAL_DIR} \ + build -j$(nproc) + + readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile \ + TARGET=xtensa \ + TARGET_ARCH=hifi4 \ + OPTIMIZED_KERNEL_DIR=xtensa \ + XTENSA_CORE=HIFI_190304_swupgrade \ + TENSORFLOW_ROOT=${TENSORFLOW_ROOT} \ + EXTERNAL_DIR=${EXTERNAL_DIR} \ + test -j$(nproc) +fi \ No newline at end of file diff --git a/tensorflow/lite/micro/tools/ci_build/test_xtensa_hifi5.sh b/tensorflow/lite/micro/tools/ci_build/test_xtensa_hifi5.sh new file mode 100755 index 0000000..82a04a9 --- /dev/null +++ b/tensorflow/lite/micro/tools/ci_build/test_xtensa_hifi5.sh @@ -0,0 +1,49 @@ +#!/usr/bin/env bash +# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# Called with following arguments: +# 1 - (optional) TENSORFLOW_ROOT: path to root of the TFLM tree (relative to directory from where the script is called). +# 2 - (optional) EXTERNAL_DIR: Path to the external directory that contains external code + +set -e +pwd + +TENSORFLOW_ROOT=${1} +EXTERNAL_DIR=${2} + +source ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/ci_build/helper_functions.sh + +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile clean TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} + +# TODO(b/143904317): downloading first to allow for parallel builds. +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile third_party_downloads TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} + +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile \ + TARGET=xtensa \ + TARGET_ARCH=hifi5 \ + OPTIMIZED_KERNEL_DIR=xtensa \ + XTENSA_CORE=PRD_H5_RDO_07_01_2022 \ + TENSORFLOW_ROOT=${TENSORFLOW_ROOT} \ + EXTERNAL_DIR=${EXTERNAL_DIR} \ + build -j$(nproc) + +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile \ + TARGET=xtensa \ + TARGET_ARCH=hifi5 \ + OPTIMIZED_KERNEL_DIR=xtensa \ + XTENSA_CORE=PRD_H5_RDO_07_01_2022 \ + TENSORFLOW_ROOT=${TENSORFLOW_ROOT} \ + EXTERNAL_DIR=${EXTERNAL_DIR} \ + test -j$(nproc) diff --git a/tensorflow/lite/micro/tools/ci_build/test_xtensa_hifimini.sh b/tensorflow/lite/micro/tools/ci_build/test_xtensa_hifimini.sh new file mode 100755 index 0000000..abfe651 --- /dev/null +++ b/tensorflow/lite/micro/tools/ci_build/test_xtensa_hifimini.sh @@ -0,0 +1,50 @@ +#!/usr/bin/env bash +# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# Called with following arguments: +# 1 - EXTERNAL or INTERNAL to signal how to run the script +# 2 - (optional) TENSORFLOW_ROOT: path to root of the TFLM tree (relative to directory from where the script is called). +# 3 - (optional) EXTERNAL_DIR: Path to the external directory that contains external code + +set -e +pwd + +TENSORFLOW_ROOT=${1} +EXTERNAL_DIR=${2} + +source ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/ci_build/helper_functions.sh + +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile clean TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} + +# TODO(b/143904317): downloading first to allow for parallel builds. +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile third_party_downloads TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} + +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile \ + TARGET=xtensa \ + TARGET_ARCH=hifimini \ + OPTIMIZED_KERNEL_DIR=xtensa \ + XTENSA_CORE=mini1m1m_RG \ + TENSORFLOW_ROOT=${TENSORFLOW_ROOT} \ + EXTERNAL_DIR=${EXTERNAL_DIR} \ + build -j$(nproc) + +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile \ + TARGET=xtensa \ + TARGET_ARCH=hifimini \ + OPTIMIZED_KERNEL_DIR=xtensa \ + XTENSA_CORE=mini1m1m_RG \ + TENSORFLOW_ROOT=${TENSORFLOW_ROOT} \ + EXTERNAL_DIR=${EXTERNAL_DIR} \ + test -j$(nproc) \ No newline at end of file diff --git a/tensorflow/lite/micro/tools/ci_build/test_xtensa_vision_p6.sh b/tensorflow/lite/micro/tools/ci_build/test_xtensa_vision_p6.sh new file mode 100755 index 0000000..a2744b5 --- /dev/null +++ b/tensorflow/lite/micro/tools/ci_build/test_xtensa_vision_p6.sh @@ -0,0 +1,57 @@ +#!/usr/bin/env bash +# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# Called with following arguments: +# 1 - RUN_TESTS/RUN_NO_TESTS: To signal +# 2 - (optional) TENSORFLOW_ROOT: path to root of the TFLM tree (relative to directory from where the script is called). +# 3 - (optional) EXTERNAL_DIR: Path to the external directory that contains external code + +set -e +pwd + +TENSORFLOW_ROOT=${2} +EXTERNAL_DIR=${3} + +source ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/ci_build/helper_functions.sh + +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile clean TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} + +# TODO(b/143904317): downloading first to allow for parallel builds. +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile third_party_downloads TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} + +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile \ + TARGET=xtensa \ + TARGET_ARCH=vision_p6 \ + OPTIMIZED_KERNEL_DIR=xtensa \ + XTENSA_CORE=P6_200528 \ + TENSORFLOW_ROOT=${TENSORFLOW_ROOT} \ + EXTERNAL_DIR=${EXTERNAL_DIR} \ + build -j$(nproc) + + +# Since we currently do not have optimized kernel implementations for vision_p6, +# running the tests (in particular person_detection_int8) takes a very long +# time. So, we have changed the default for this script to only perform a build +# and added an option to run all the tests when that is feasible. +if [[ ${1} == "RUN_TESTS" ]]; then + readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile \ + TARGET=xtensa \ + TARGET_ARCH=vision_p6 \ + OPTIMIZED_KERNEL_DIR=xtensa \ + XTENSA_CORE=P6_200528 \ + TENSORFLOW_ROOT=${TENSORFLOW_ROOT} \ + EXTERNAL_DIR=${EXTERNAL_DIR} \ + test -j$(nproc) +fi diff --git a/tensorflow/lite/micro/tools/dev_setup/pre-push.tflm b/tensorflow/lite/micro/tools/dev_setup/pre-push.tflm new file mode 100755 index 0000000..140f1aa --- /dev/null +++ b/tensorflow/lite/micro/tools/dev_setup/pre-push.tflm @@ -0,0 +1,17 @@ +#!/bin/sh +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +tensorflow/lite/micro/tools/ci_build/test_code_style.sh diff --git a/tensorflow/lite/micro/tools/gen_micro_mutable_op_resolver/BUILD b/tensorflow/lite/micro/tools/gen_micro_mutable_op_resolver/BUILD new file mode 100644 index 0000000..276e9c2 --- /dev/null +++ b/tensorflow/lite/micro/tools/gen_micro_mutable_op_resolver/BUILD @@ -0,0 +1,52 @@ +load("@tflm_pip_deps//:requirements.bzl", "requirement") + +package( + default_visibility = ["//:__subpackages__"], + licenses = ["notice"], +) + +py_binary( + name = "generate_micro_mutable_op_resolver_from_model", + srcs = [ + "generate_micro_mutable_op_resolver_from_model.py", + ], + data = [ + "templates/micro_mutable_op_resolver.h.mako", + ], + python_version = "PY3", + srcs_version = "PY3", + deps = [ + "@absl_py//absl:app", + "@absl_py//absl/flags", + requirement("tensorflow-cpu"), + requirement("mako"), + "//tensorflow/lite/python:schema_py", + "//tensorflow/lite/python:schema_util", + "//tensorflow/lite/tools:flatbuffer_utils", + "//tensorflow/lite/tools:visualize", + ], +) + +py_binary( + name = "generate_micro_mutable_op_resolver_from_model_test", + srcs = [ + "generate_micro_mutable_op_resolver_from_model_test.py", + ], + data = [ + "templates/BUILD.mako", + "templates/micro_mutable_op_resolver.h.mako", + "templates/micro_mutable_op_resolver_test.cc.mako", + ], + python_version = "PY3", + srcs_version = "PY3", + deps = [ + "@absl_py//absl:app", + "@absl_py//absl/flags", + requirement("tensorflow-cpu"), + requirement("mako"), + "//tensorflow/lite/micro/tools:generate_test_for_model", + "//tensorflow/lite/python:schema_py", + "//tensorflow/lite/python:schema_util", + "//tensorflow/lite/tools:flatbuffer_utils", + ], +) diff --git a/tensorflow/lite/micro/tools/gen_micro_mutable_op_resolver/README.md b/tensorflow/lite/micro/tools/gen_micro_mutable_op_resolver/README.md new file mode 100644 index 0000000..95a0c43 --- /dev/null +++ b/tensorflow/lite/micro/tools/gen_micro_mutable_op_resolver/README.md @@ -0,0 +1,84 @@ +# Generate Micro Mutable Op Resolver from a model + +The MicroMutableOpResolver includes the operators explictly specified in source code. +This generally requires manually finding out which operators are used in the model through the use of a visualization tool, which may be impractical in some cases. +This script will automatically generate a MicroMutableOpResolver with only the used operators for a given model or set of models. + +## How to run + +bazel run tensorflow/lite/micro/tools/gen_micro_mutable_op_resolver:generate_micro_mutable_op_resolver_from_model -- \ + --common_tflite_path= \ + --input_tflite_files= --output_dir= + +Note that if having only one tflite as input, the final output directory will be /. + +Example: + +``` +bazel run tensorflow/lite/micro/tools/gen_micro_mutable_op_resolver:generate_micro_mutable_op_resolver_from_model -- \ + --common_tflite_path=/tmp/model_dir \ + --input_tflite_files=person_detect.tflite --output_dir=/tmp/gen_dir +``` + +A header file called, gen_micro_mutable_op_resolver.h will be created in /tmp/gen_dir/person_detect. + +Example: + +``` +bazel run tensorflow/lite/micro/tools/gen_micro_mutable_op_resolver:generate_micro_mutable_op_resolver_from_model -- \ + --common_tflite_path=/tmp/model_dir \ + --input_tflite_files=person_detect.tflite,keyword_scrambled.tflite --output_dir=/tmp/gen_dir +``` +A header file called, gen_micro_mutable_op_resolver.h will be created in /tmp/gen_dir. + +Note that with multiple tflite files as input, the files must be placed in the same common directory. + +The generated header file can then be included in the application and used like below: + +``` +tflite::MicroMutableOpResolver op_resolver = get_resolver(); +``` + +## Verifying the content of the generated header file + +This is just to test the actual script that generates the micro mutable ops resolver header for a given model. +So that the actual list of operators corresponds to a given model and that the syntax of the header is correct. + +For this another script can be used to verify the generated header file: + +``` +bazel run tensorflow/lite/micro/tools/gen_micro_mutable_op_resolver:generate_micro_mutable_op_resolver_from_model_test -- \ + --input_tflite_file= --output_dir= +``` + +This script verifies a single model at a time. It will generate a small inference testing app that is using the generated header file, which can then be executed and tested as a final step. +Because of this the specified output path will be appended with the name of the model so that the generated test is named after the model. +In other words the final output directory will be /. + +The essence of this is that different output paths need to be specified for the actual header script and the actual test script. + +So there will be 3 steps, +1) Generate the micro mutable specifying e.g. output path gen_dir/ +2) Generate the micro mutable specifying e.g. output path gen_dir +3) Run the generated test + +Example assuming /tmp/my_model.tflite exists: + +``` +# Step 1 generates header to gen_dir/my_model +bazel run tensorflow/lite/micro/tools/gen_micro_mutable_op_resolver:generate_micro_mutable_op_resolver_from_model -- \ + --common_tflite_path=/tmp/ \ + --input_tflite_files=my_model.tflite --output_dir=$(realpath gen_dir/my_model) + +# Step 2 generates test app using header from step 1 to gen_dir/my_model since my my_model is appended +bazel run tensorflow/lite/micro/tools/gen_micro_mutable_op_resolver:generate_micro_mutable_op_resolver_from_model_test -- \ + --input_tflite_file=/tmp/my_model.tflite --output_dir=$(realpath gen_dir) --verify_output=1 + +# Step 3 runs the generated my_model test +bazel run gen_dir/my_model:micro_mutable_op_resolver_test + +``` + +Note1: Bazel expects absolute paths. +Note2: By default the inference model test will run without any generated input or verifying the output. Verifying output can be done with --verify_output=1, which is done in the example above. +Note3: Depending on the size of the model the arena size may need to be increased. Arena size can be set with --arena_size=. diff --git a/tensorflow/lite/micro/tools/gen_micro_mutable_op_resolver/generate_micro_mutable_op_resolver_from_model.py b/tensorflow/lite/micro/tools/gen_micro_mutable_op_resolver/generate_micro_mutable_op_resolver_from_model.py new file mode 100644 index 0000000..de583da --- /dev/null +++ b/tensorflow/lite/micro/tools/gen_micro_mutable_op_resolver/generate_micro_mutable_op_resolver_from_model.py @@ -0,0 +1,186 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""This tool generates a header with Micro Mutable Op Resolver code for a given + model. See README.md for more info. +""" + +import os +import re + +from absl import app +from absl import flags +from mako import template + +from tflite_micro.tensorflow.lite.tools import visualize as visualize + +TEMPLATE_DIR = os.path.join(os.path.dirname(__file__), 'templates') +TEMPLATE_DIR = os.path.abspath(TEMPLATE_DIR) + +FLAGS = flags.FLAGS +flags.DEFINE_string( + 'common_tflite_path', None, + 'Common path to tflite files. This need to be an absolute path.' + 'This would typically be the path to the directory where the models reside.' +) +flags.DEFINE_list( + 'input_tflite_files', None, + 'Relative path name list of the input TFLite files.' + 'This would be relative to the common path.' + 'This would typically be the name(s) of the tflite file(s).') +flags.DEFINE_string('output_dir', None, 'Directory to output generated files.') +flags.DEFINE_string( + 'verify_op_list_against_header', None, + 'Take micro_mutable_op_resolver.h as input and verifies that all generated operator calls are there.' +) + +flags.mark_flag_as_required('common_tflite_path') +flags.mark_flag_as_required('input_tflite_files') +flags.mark_flag_as_required('output_dir') + + +def ParseString(word): + """Converts a flatbuffer operator string to a format suitable for Micro + Mutable Op Resolver. Example: CONV_2D --> AddConv2D.""" + + # Edge case for AddDetectionPostprocess(). + # The custom code is TFLite_Detection_PostProcess. + word = word.replace('TFLite', '') + + word_split = re.split('_|-', word) + formated_op_string = '' + for part in word_split: + if len(part) > 1: + if part[0].isalpha(): + formated_op_string += part[0].upper() + part[1:].lower() + else: + formated_op_string += part.upper() + else: + formated_op_string += part.upper() + + # Edge case for AddUnidirectionalSequenceLSTM(). + formated_op_string = formated_op_string.replace('Lstm', 'LSTM') + + return 'Add' + formated_op_string + + +def GenerateMicroMutableOpsResolverHeaderFile(operators, name_of_model, + output_dir): + """Generates Micro Mutable Op Resolver code based on a template.""" + + number_of_ops = len(operators) + outfile = 'micro_mutable_op_resolver.h' + + template_file_path = os.path.join(TEMPLATE_DIR, outfile + '.mako') + build_template = template.Template(filename=template_file_path) + with open(output_dir + '/gen_' + outfile, 'w') as file_obj: + key_values_in_template = { + 'model': name_of_model, + 'number_of_ops': number_of_ops, + 'operators': operators + } + file_obj.write(build_template.render(**key_values_in_template)) + + +def GetModelOperatorsAndActivation(model_path): + """Extracts a set of operators from a tflite model.""" + + custom_op_found = False + operators_and_activations = set() + + with open(model_path, 'rb') as f: + data_bytes = bytearray(f.read()) + + data = visualize.CreateDictFromFlatbuffer(data_bytes) + + for op_code in data["operator_codes"]: + if op_code['custom_code'] is None: + op_code["builtin_code"] = max(op_code["builtin_code"], + op_code["deprecated_builtin_code"]) + else: + custom_op_found = True + operators_and_activations.add( + visualize.NameListToString(op_code['custom_code'])) + + for op_code in data["operator_codes"]: + # Custom operator already added. + if custom_op_found and visualize.BuiltinCodeToName( + op_code['builtin_code']) == "CUSTOM": + continue + + operators_and_activations.add( + visualize.BuiltinCodeToName(op_code['builtin_code'])) + + return operators_and_activations + + +def VerifyOpList(op_list, header): + """Make sure operators in list are not missing in header file .""" + + supported_op_list = [] + with open(header, 'r') as f: + for l in f.readlines(): + if "TfLiteStatus Add" in l: + op = l.strip().split(' ')[1].split('(')[0] + supported_op_list.append(op) + + for op in op_list: + if op not in supported_op_list: + print(f'{op} not supported by TFLM') + return True + + return False + + +def main(_): + model_names = [] + final_operator_list = [] + merged_operator_list = [] + + common_model_path = FLAGS.common_tflite_path + relative_model_paths = FLAGS.input_tflite_files + + for relative_model_path in relative_model_paths: + full_model_path = f"{common_model_path}/{relative_model_path}" + operators = GetModelOperatorsAndActivation(full_model_path) + model_name = full_model_path.split('/')[-1] + model_names.append(model_name) + + parsed_operator_list = [] + for op in sorted(list(operators)): + parsed_operator_list.append(ParseString(op)) + + merged_operator_list = merged_operator_list + parsed_operator_list + + number_models = len(model_names) + if number_models > 1: + model_name = ", ".join(model_names) + + [ + final_operator_list.append(operator) for operator in merged_operator_list + if operator not in final_operator_list + ] + + if FLAGS.verify_op_list_against_header and VerifyOpList( + final_operator_list, FLAGS.verify_op_list_against_header): + return True + + os.makedirs(FLAGS.output_dir, exist_ok=True) + GenerateMicroMutableOpsResolverHeaderFile(final_operator_list, model_name, + FLAGS.output_dir) + return False + + +if __name__ == '__main__': + app.run(main) diff --git a/tensorflow/lite/micro/tools/gen_micro_mutable_op_resolver/generate_micro_mutable_op_resolver_from_model_test.py b/tensorflow/lite/micro/tools/gen_micro_mutable_op_resolver/generate_micro_mutable_op_resolver_from_model_test.py new file mode 100644 index 0000000..5e97c63 --- /dev/null +++ b/tensorflow/lite/micro/tools/gen_micro_mutable_op_resolver/generate_micro_mutable_op_resolver_from_model_test.py @@ -0,0 +1,122 @@ +# Copyright 2022 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +import os +import shutil + +from absl import app +from absl import flags +from mako import template +from tflite_micro.tensorflow.lite.micro.tools import generate_test_for_model + +TEMPLATE_DIR = os.path.join(os.path.dirname(__file__), 'templates') +TEMPLATE_DIR = os.path.abspath(TEMPLATE_DIR) + +FLAGS = flags.FLAGS + +flags.DEFINE_string('input_tflite_file', None, + 'Full path name to the input TFLite file.') +flags.DEFINE_string( + 'output_dir', None, 'Directory to output generated files. \ + Note that final output will be in FLAGS.output_dir/. \ + Where will come from FLAGS.input_tflite_file.') +flags.DEFINE_integer('arena_size', 1024 * 136, 'Size of arena') +flags.DEFINE_boolean('verify_output', False, + 'Verify output or just run model.') + +flags.mark_flag_as_required('input_tflite_file') +flags.mark_flag_as_required('output_dir') + + +class MicroMutableOpTestGenerator(generate_test_for_model.TestDataGenerator): + + def __init__(self, output_dir, model_path, verify_output, arena_size): + super().__init__(output_dir, [model_path], [0]) # Third argument not used. + self.verify_output = verify_output + self.arena_size = arena_size + + self.target = model_path.split('/')[-1].split('.')[0] + self.target_with_path = model_path.split('tflite_micro/')[-1]. \ + split('tflite-micro/')[-1].split('.')[0] + + # Only int8 models supported + self.input_type = 'int8' + self.output_type = 'int8' + self.input_types = [self.input_type] + + def generate_golden(self): + if not self.verify_output: + return + super().generate_golden_single_in_single_out() + + def generate_test(self, template_dir, template_file, out_file): + template_file_path = os.path.join(template_dir, template_file) + build_template = template.Template(filename=template_file_path) + path_to_target = self.target_with_path.split('/' + self.target)[0] + \ + '/' + self.target + with open(self.output_dir + '/' + out_file, 'w') as file_obj: + key_values_in_template = { + 'arena_size': self.arena_size, + 'verify_output': int(self.verify_output), + 'path_to_target': path_to_target, + 'target': self.target, + 'target_with_path': self.target_with_path, + 'input_dtype': self.input_type, + 'output_dtype': self.output_type + } + file_obj.write(build_template.render(**key_values_in_template)) + + def generate_build_file(self, template_dir): + template_file_path = os.path.join(template_dir, 'BUILD.mako') + build_template = template.Template(filename=template_file_path) + with open(self.output_dir + '/BUILD', 'w') as file_obj: + key_values_in_template = { + 'verify_output': self.verify_output, + 'target': self.target, + 'input_dtype': self.input_type, + 'output_dtype': self.output_type + } + file_obj.write(build_template.render(**key_values_in_template)) + + +def main(_): + model_path = FLAGS.input_tflite_file + model_name = model_path.split('/')[-1] + base_model_name = model_name.split('.')[0] + name_of_make_target = 'generated_micro_mutable_op_resolver_' + base_model_name + + out_dir = FLAGS.output_dir + '/' + base_model_name + os.makedirs(out_dir, exist_ok=True) + + # Copy model to out dir to get the Mako generation right + new_model_path = out_dir + '/' + model_name + shutil.copyfile(model_path, new_model_path) + + data_generator = MicroMutableOpTestGenerator(out_dir, new_model_path, + FLAGS.verify_output, + FLAGS.arena_size) + data_generator.generate_golden() + data_generator.generate_build_file(TEMPLATE_DIR) + data_generator.generate_makefile( + test_file='micro_mutable_op_resolver_test.cc', + src_prefix=name_of_make_target) + data_generator.generate_test( + TEMPLATE_DIR, + template_file='micro_mutable_op_resolver_test.cc.mako', + out_file='micro_mutable_op_resolver_test.cc') + + +if __name__ == '__main__': + app.run(main) diff --git a/tensorflow/lite/micro/tools/gen_micro_mutable_op_resolver/templates/BUILD.mako b/tensorflow/lite/micro/tools/gen_micro_mutable_op_resolver/templates/BUILD.mako new file mode 100644 index 0000000..1b8302e --- /dev/null +++ b/tensorflow/lite/micro/tools/gen_micro_mutable_op_resolver/templates/BUILD.mako @@ -0,0 +1,87 @@ +# Description: +# generated micro mutable op resolver test for a given model +load( + "//tensorflow/lite/micro:build_def.bzl", + "generate_cc_arrays", + "micro_copts", +) + +package( + default_visibility = ["//visibility:public"], + # Disabling layering_check because of http://b/177257332 + features = ["-layering_check"], + licenses = ["notice"], +) + + +generate_cc_arrays(name = "generated_${target}_model_data_cc",src = "${target}.tflite",out = "${target}_model_data.cc",) +generate_cc_arrays(name = "generated_${target}_model_data_hdr",src = "${target}.tflite",out = "${target}_model_data.h",) + +% if verify_output: +generate_cc_arrays( + name = "generated_${target}_input_${input_dtype}_test_data_cc", + src = "${target}_input0_${input_dtype}.csv", + out = "${target}_input_${input_dtype}_test_data.cc", +) + +generate_cc_arrays( + name = "generated_${target}_input_${input_dtype}_test_data_hdr", + src = "${target}_input0_${input_dtype}.csv", + out = "${target}_input_${input_dtype}_test_data.h", +) +% endif + +generate_cc_arrays( + name = "generated_${target}_golden_${output_dtype}_test_data_cc", + src = "${target}_golden_${output_dtype}.csv", + out = "${target}_golden_${output_dtype}_test_data.cc", +) + +generate_cc_arrays( + name = "generated_${target}_golden_${output_dtype}_test_data_hdr", + src = "${target}_golden_${output_dtype}.csv", + out = "${target}_golden_${output_dtype}_test_data.h", +) + +cc_library( + name = "models_and_testdata", + srcs = [ + "generated_${target}_model_data_cc", +% if verify_output: + "generated_${target}_input_${input_dtype}_test_data_cc", + "generated_${target}_golden_${output_dtype}_test_data_cc", +% endif + ], + hdrs = [ + "generated_${target}_model_data_hdr", +% if verify_output: + "generated_${target}_input_${input_dtype}_test_data_hdr", + "generated_${target}_golden_${output_dtype}_test_data_hdr", +% endif + ], + copts = micro_copts(), +) + +cc_library( + name = "gen_micro_op_resolver", + hdrs = ["gen_micro_mutable_op_resolver.h",], + visibility = ["//visibility:public"], +) + +cc_test( + name = "micro_mutable_op_resolver_test", + srcs = [ + "micro_mutable_op_resolver_test.cc", + ], + copts = micro_copts(), + deps = [ + ":gen_micro_op_resolver", + ":models_and_testdata", + "//tensorflow/lite/micro:micro_framework", + "//tensorflow/lite/micro:micro_log", + "//tensorflow/lite/micro:micro_resource_variable", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:recording_allocators", + "//tensorflow/lite/micro/testing:micro_test", + ], +) diff --git a/tensorflow/lite/micro/tools/gen_micro_mutable_op_resolver/templates/micro_mutable_op_resolver.h.mako b/tensorflow/lite/micro/tools/gen_micro_mutable_op_resolver/templates/micro_mutable_op_resolver.h.mako new file mode 100644 index 0000000..5bbab04 --- /dev/null +++ b/tensorflow/lite/micro/tools/gen_micro_mutable_op_resolver/templates/micro_mutable_op_resolver.h.mako @@ -0,0 +1,33 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +// Generated based on ${model}. + +#pragma once + +#include "tensorflow/lite/micro/micro_mutable_op_resolver.h" + +constexpr int kNumberOperators = ${number_of_ops}; + +inline tflite::MicroMutableOpResolver get_resolver() +{ + tflite::MicroMutableOpResolver micro_op_resolver; + +% for operator in operators: + micro_op_resolver.${operator}(); +% endfor + + return micro_op_resolver; +} diff --git a/tensorflow/lite/micro/tools/gen_micro_mutable_op_resolver/templates/micro_mutable_op_resolver_test.cc.mako b/tensorflow/lite/micro/tools/gen_micro_mutable_op_resolver/templates/micro_mutable_op_resolver_test.cc.mako new file mode 100644 index 0000000..8f67ff3 --- /dev/null +++ b/tensorflow/lite/micro/tools/gen_micro_mutable_op_resolver/templates/micro_mutable_op_resolver_test.cc.mako @@ -0,0 +1,112 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#define VERIFY_OUTPUT ${verify_output} + +#include + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_profiler.h" +#include "tensorflow/lite/micro/recording_micro_allocator.h" +#include "tensorflow/lite/micro/recording_micro_interpreter.h" +#include "tensorflow/lite/micro/system_setup.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +#include "${path_to_target}/gen_micro_mutable_op_resolver.h" + +#include "${target_with_path}_model_data.h" +#if VERIFY_OUTPUT +#include "${target_with_path}_input_${input_dtype}_test_data.h" +#include "${target_with_path}_golden_${output_dtype}_test_data.h" +#endif + +constexpr size_t kTensorArenaSize = ${arena_size}; +uint8_t tensor_arena[kTensorArenaSize]; + +namespace tflite { +namespace micro { +namespace { + +void RunModel(const uint8_t* model, + const int8_t* input, + const uint32_t input_size, + const int8_t* golden, + const uint32_t golden_size, + const char* name) { + InitializeTarget(); + MicroProfiler profiler; + tflite::MicroMutableOpResolver op_resolver = get_resolver(); + + MicroInterpreter interpreter(GetModel(model), op_resolver, tensor_arena, + kTensorArenaSize, + nullptr, &profiler); + interpreter.AllocateTensors(); +#if VERIFY_OUTPUT + TfLiteTensor* input_tensor0 = interpreter.input(0); + TF_LITE_MICRO_EXPECT_EQ(input_tensor0->bytes, + input_size * sizeof( + int8_t)); + memcpy(interpreter.input(0)->data.raw, + input, + input_tensor0->bytes); + if (kTfLiteOk != interpreter.Invoke()) { + TF_LITE_MICRO_EXPECT(false); + return; + } +#endif + profiler.Log(); + MicroPrintf(""); + +#if VERIFY_OUTPUT + TfLiteTensor* output_tensor = interpreter.output(0); + TF_LITE_MICRO_EXPECT_EQ(output_tensor->bytes, + golden_size * sizeof(int8_t)); + int8_t* output = ::tflite::GetTensorData(output_tensor); + for (uint32_t i = 0; i < golden_size; i++) { + // TODO(b/205046520): Better understand why TfLite and TFLM can sometimes be + // off by 1. + TF_LITE_MICRO_EXPECT_NEAR(golden[i], output[i], 1); + } +#endif +} + +} // namespace +} // namespace micro +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(gen_micro_mutable_from_${target}_test) { +#if VERIFY_OUTPUT +tflite::micro::RunModel( +g_${target}_model_data, +g_${target}_input0_${input_dtype}_test_data, +g_${target}_input0_${input_dtype}_test_data_size, +g_${target}_golden_${output_dtype}_test_data, +g_${target}_golden_${output_dtype}_test_data_size, +"${target} test"); +#else +tflite::micro::RunModel( +g_${target}_model_data, +nullptr, +0, +nullptr, +0, +"${target} test"); +#endif +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/tools/generate_cc_arrays.py b/tensorflow/lite/micro/tools/generate_cc_arrays.py new file mode 100644 index 0000000..4d1e54c --- /dev/null +++ b/tensorflow/lite/micro/tools/generate_cc_arrays.py @@ -0,0 +1,175 @@ +# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Library for converting .tflite, .bmp and .wav files to cc arrays.""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import argparse +import os +import struct +import wave +import numpy as np + +from PIL import Image + + +def generate_file(out_fname, array_name, array_type, array_contents, size): + """Write an array of values to a CC or header file.""" + os.makedirs(os.path.dirname(out_fname), exist_ok=True) + if out_fname.endswith('.cc'): + out_cc_file = open(out_fname, 'w') + out_cc_file.write('#include \n\n') + out_cc_file.write('#include "{}"\n\n'.format( + out_fname.split('genfiles/')[-1].replace('.cc', '.h'))) + out_cc_file.write('const unsigned int {}_size = {};\n'.format( + array_name, str(size))) + out_cc_file.write('alignas(16) const {} {}[] = {{'.format( + array_type, array_name)) + out_cc_file.write(array_contents) + out_cc_file.write('};\n') + out_cc_file.close() + elif out_fname.endswith('.h'): + out_hdr_file = open(out_fname, 'w') + out_hdr_file.write('#include \n\n') + out_hdr_file.write( + 'extern const unsigned int {}_size;\n'.format(array_name)) + out_hdr_file.write('extern const {} {}[];\n'.format( + array_type, array_name)) + out_hdr_file.close() + else: + raise ValueError('generated file must be end with .cc or .h') + + +def bytes_to_hexstring(buffer): + """Convert a byte array to a hex string.""" + hex_values = [hex(buffer[i]) for i in range(len(buffer))] + out_string = ','.join(hex_values) + return out_string + + +def generate_array(input_fname): + """Return array size and array of data from the input file.""" + if input_fname.endswith('.tflite'): + with open(input_fname, 'rb') as input_file: + buffer = input_file.read() + size = len(buffer) + out_string = bytes_to_hexstring(buffer) + return [size, out_string] + elif input_fname.endswith('.bmp'): + img = Image.open(input_fname, mode='r') + image_bytes = img.tobytes() + size = len(image_bytes) + out_string = bytes_to_hexstring(image_bytes) + return [size, out_string] + elif input_fname.endswith('.wav'): + wav_file = wave.open(input_fname, mode='r') + num_channels = wav_file.getnchannels() + n_frames = wav_file.getnframes() + frames = wav_file.readframes(n_frames) + samples = struct.unpack('<%dh' % (num_channels * n_frames), frames) + out_string = ','.join(map(str, samples)) + wav_file.close() + return [wav_file.getnframes(), out_string] + elif input_fname.endswith('.csv'): + with open(input_fname, 'r') as input_file: + # Assume one array per csv file. + elements = input_file.readline() + return [len(elements.split(',')), elements] + elif input_fname.endswith('.npy'): + data = np.float32(np.load(input_fname, allow_pickle=False)) + data_1d = data.flatten() + out_string = ','.join([str(x) for x in data_1d]) + return [len(data_1d), out_string] + + else: + raise ValueError('input file must be .tflite, .bmp, .wav or .csv') + + +def get_array_name(input_fname): + # Normalize potential relative path to remove additional dot. + abs_fname = os.path.abspath(input_fname) + base_array_name = 'g_' + abs_fname.split('.')[-2].split('/')[-1] + if input_fname.endswith('.tflite'): + return [base_array_name + '_model_data', 'unsigned char'] + elif input_fname.endswith('.bmp'): + return [base_array_name + '_image_data', 'unsigned char'] + elif input_fname.endswith('.wav'): + return [base_array_name + '_audio_data', 'int16_t'] + elif input_fname.endswith('_int32.csv'): + return [base_array_name + '_test_data', 'int32_t'] + elif input_fname.endswith('_int16.csv'): + return [base_array_name + '_test_data', 'int16_t'] + elif input_fname.endswith('_int8.csv'): + return [base_array_name + '_test_data', 'int8_t'] + elif input_fname.endswith('_float.csv'): + return [base_array_name + '_test_data', 'float'] + elif input_fname.endswith('npy'): + return [base_array_name + '_test_data', 'float'] + + +def main(): + """Create cc sources with c arrays with data from each .tflite or .bmp.""" + parser = argparse.ArgumentParser() + parser.add_argument( + 'output', + help='base directory for all outputs or a cc or header to generate.') + parser.add_argument( + 'inputs', + nargs='+', + help='input wav, bmp or tflite files to convert. ' + 'If output is a cc or header only one input may be specified.') + args = parser.parse_args() + + if args.output.endswith('.cc') or args.output.endswith('.h'): + assert len(args.inputs) == 1 + size, cc_array = generate_array(args.inputs[0]) + generated_array_name, array_type = get_array_name(args.inputs[0]) + generate_file(args.output, generated_array_name, array_type, cc_array, + size) + else: + # Deduplicate inputs to prevent duplicate generated files (ODR issue). + for input_file in list(dict.fromkeys(args.inputs)): + output_base_fname = os.path.join(args.output, + os.path.splitext(input_file)[0]) + if input_file.endswith('.tflite'): + output_base_fname = output_base_fname + '_model_data' + elif input_file.endswith('.bmp'): + output_base_fname = output_base_fname + '_image_data' + elif input_file.endswith('.wav'): + output_base_fname = output_base_fname + '_audio_data' + elif input_file.endswith('.csv'): + output_base_fname = output_base_fname + '_test_data' + elif input_file.endswith('.npy'): + output_base_fname = output_base_fname + '_test_data' + else: + raise ValueError( + 'input file must be .tflite, .bmp, .wav , .npy or .csv') + + output_cc_fname = output_base_fname + '.cc' + # Print output cc filename for Make to include it in the build. + print(output_cc_fname) + output_hdr_fname = output_base_fname + '.h' + size, cc_array = generate_array(input_file) + generated_array_name, array_type = get_array_name(input_file) + generate_file(output_cc_fname, generated_array_name, array_type, + cc_array, size) + generate_file(output_hdr_fname, generated_array_name, array_type, + cc_array, size) + + +if __name__ == '__main__': + main() diff --git a/tensorflow/lite/micro/tools/generate_test_for_model.py b/tensorflow/lite/micro/tools/generate_test_for_model.py new file mode 100644 index 0000000..8c5b407 --- /dev/null +++ b/tensorflow/lite/micro/tools/generate_test_for_model.py @@ -0,0 +1,243 @@ +# Copyright 2022 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +import csv + +import numpy as np +import tensorflow as tf + +from tflite_micro.tensorflow.lite.python import schema_py_generated as schema_fb + + +class TestDataGenerator: + """ Generate test input/output for given model(s). A list of model(s) are taken as input. + The generated input and output files are in csv format and created in given output folder. """ + + def __init__(self, output_dir, model_paths, inputs): + self.output_dir = output_dir + self.model_paths = model_paths + self.csv_filenames = [] + self.inputs = inputs + self.input_types = {} + self.cc_srcs = [] + self.cc_hdrs = [] + self.includes = [] + + def _generate_inputs_single(self, interpreter, dtype): + input_tensor = interpreter.tensor( + interpreter.get_input_details()[0]['index']) + return [ + np.random.randint(low=np.iinfo(dtype).min, + high=np.iinfo(dtype).max, + dtype=dtype, + size=input_tensor().shape), + ] + + def _generate_inputs_add_sub(self, interpreter, dtype): + input_tensor0 = interpreter.tensor( + interpreter.get_input_details()[0]['index']) + input_tensor1 = interpreter.tensor( + interpreter.get_input_details()[1]['index']) + return [ + np.random.randint(low=np.iinfo(dtype).min, + high=np.iinfo(dtype).max, + dtype=dtype, + size=input_tensor0().shape), + np.random.randint(low=np.iinfo(dtype).min, + high=np.iinfo(dtype).max, + dtype=dtype, + size=input_tensor1().shape) + ] + + def _generate_inputs_transpose_conv(self, interpreter, dtype): + input_tensor0 = interpreter.tensor(0) + filter_tensor = interpreter.tensor(1) + input_tensor1 = interpreter.tensor(2) + + output_shape = interpreter.get_output_details()[0]['shape_signature'] + output_height = output_shape[1] + output_width = output_shape[2] + + output_shape = np.array( + [1, output_height, output_width, + filter_tensor().shape[0]], + dtype=np.int32) + if dtype == float or dtype == np.float32 or dtype == np.float64: + random = np.random.uniform(low=1, high=100, size=input_tensor1().shape) + return [output_shape, random.astype(np.float32)] + else: + return [ + output_shape, + np.random.randint(low=np.iinfo(dtype).min, + high=np.iinfo(dtype).max, + dtype=dtype, + size=input_tensor1().shape) + ] + + def _GetTypeStringFromTensor(self, tensor): + if tensor.dtype == np.int8: + return 'int8' + if tensor.dtype == np.int16: + return 'int16' + if tensor.dtype == np.int32: + return 'int32' + if tensor.dtype == float or tensor.dtype == np.float32: + return 'float' + + def generate_golden_single_in_single_out(self): + """ Takes a single model as input. It is expecting a list with one model. + It then generates input and output in CSV format for that model. """ + + if (len(self.model_paths) != 1): + raise RuntimeError(f'Single model expected') + model_path = self.model_paths[0] + interpreter = tf.lite.Interpreter(model_path=model_path, + experimental_op_resolver_type=\ + tf.lite.experimental.OpResolverType.BUILTIN_REF) + + interpreter.allocate_tensors() + + input_details = interpreter.get_input_details() + if len(input_details) > 1: + raise RuntimeError(f'Only models with one input supported') + input_tensor = interpreter.tensor( + interpreter.get_input_details()[0]['index']) + output_tensor = interpreter.tensor( + interpreter.get_output_details()[0]['index']) + + input_type = interpreter.get_input_details()[0]['dtype'] + output_type = interpreter.get_output_details()[0]['dtype'] + if input_type != np.int8 or output_type != np.int8: + raise RuntimeError(f'Only int8 models supported') + + generated_inputs = self._generate_inputs_single(interpreter, + input_tensor().dtype) + for i, _input_detail in enumerate(input_details): + interpreter.set_tensor(input_details[i]["index"], generated_inputs[i]) + + interpreter.invoke() + + self._write_golden(generated_inputs, model_path, output_tensor) + + def generate_goldens(self, builtin_operator): + """ Takes a list of one or more models as input. + It also takes a built in operator as input because the generated input depends + on what type of operator it is, and it supports a limited number of operators. + All models in the list assumes the operator as the first operator. It generates + input and output in CSV format for the corresponding models. """ + + for model_path in self.model_paths: + # Load model and run a single inference with random inputs. + interpreter = tf.lite.Interpreter( + model_path=model_path, + experimental_op_resolver_type=\ + tf.lite.experimental.OpResolverType.BUILTIN_REF) + interpreter.allocate_tensors() + input_tensor = interpreter.tensor( + interpreter.get_input_details()[0]['index']) + output_tensor = interpreter.tensor( + interpreter.get_output_details()[0]['index']) + + if builtin_operator in (schema_fb.BuiltinOperator.CONV_2D, + schema_fb.BuiltinOperator.DEPTHWISE_CONV_2D, + schema_fb.BuiltinOperator.STRIDED_SLICE, + schema_fb.BuiltinOperator.PAD, + schema_fb.BuiltinOperator.LEAKY_RELU): + generated_inputs = self._generate_inputs_single( + interpreter, + input_tensor().dtype) + elif builtin_operator in (schema_fb.BuiltinOperator.ADD, + schema_fb.BuiltinOperator.SUB): + generated_inputs = self._generate_inputs_add_sub( + interpreter, + input_tensor().dtype) + elif builtin_operator == schema_fb.BuiltinOperator.TRANSPOSE_CONV: + input_tensor = interpreter.tensor( + interpreter.get_input_details()[1]['index']) + generated_inputs = self._generate_inputs_transpose_conv( + interpreter, + input_tensor().dtype) + else: + raise RuntimeError(f'Unsupported BuiltinOperator: {builtin_operator}') + + for idx, input_tensor_idx in enumerate(self.inputs): + interpreter.set_tensor(input_tensor_idx, generated_inputs[idx]) + interpreter.invoke() + + self._write_golden(generated_inputs, model_path, output_tensor) + + def _write_golden(self, generated_inputs, model_path, output_tensor): + """ Generates input and ouputs in CSV format for given model. """ + + # Write input to CSV file. + for input_idx, input_tensor_data in enumerate(generated_inputs): + input_type = self._GetTypeStringFromTensor(input_tensor_data) + self.input_types[input_idx] = input_type + input_flat = input_tensor_data.flatten().tolist() + csv_input_filename = \ + f"{model_path.split('.')[0]}_input{input_idx}_{input_type}.csv" + input_csvfile = open(csv_input_filename, 'w', newline='') + input_csvwriter = csv.writer(input_csvfile) + input_csvwriter.writerow(input_flat) + self.csv_filenames.append(csv_input_filename) + + output_flat = output_tensor().flatten().tolist() + + # Write golden to CSV file. + output_type = self._GetTypeStringFromTensor(output_tensor()) + self.output_type = output_type + csv_golden_filename = f"{model_path.split('.')[0]}_golden_{output_type}.csv" + golden_csvfile = open(csv_golden_filename, 'w', newline='') + golden_csvwriter = csv.writer(golden_csvfile) + np.set_printoptions(threshold=np.inf) + golden_csvwriter.writerow(output_flat) + self.csv_filenames.append(csv_golden_filename) + + def generate_makefile(self, + test_file='integration_tests.cc', + src_prefix=None): + """ Generates a makefile which takes the the given input model(s) as input and also the + corresponding generated input(s) and ouput(s) in csv format. It also take the name of a test file as input. + For example usage see: tensorflow/lite/micro/integration_tests/generate_per_layer_tests.py. """ + + makefile = open(self.output_dir + '/Makefile.inc', 'w') + output_dir_list = self.output_dir.split('/') + if src_prefix is None: + src_prefix = output_dir_list[-3] + '_' + output_dir_list[ + -2] + '_' + output_dir_list[-1] + makefile.write(src_prefix + '_GENERATOR_INPUTS := \\\n') + for model_path in self.model_paths: + makefile.write('$(TENSORFLOW_ROOT)' + + model_path.split('third_party/tflite_micro/')[-1] + + ' \\\n') + for csv_input in self.csv_filenames: + makefile.write('$(TENSORFLOW_ROOT)' + + csv_input.split('third_party/tflite_micro/')[-1] + + ' \\\n') + makefile.write('\n') + makefile.write(src_prefix + '_SRCS := \\\n') + makefile.write('$(TENSORFLOW_ROOT)' + + self.output_dir.split('third_party/tflite_micro/')[-1] + + '/' + test_file + ' \\\n') + makefile.write( + "$(TENSORFLOW_ROOT)python/tflite_micro/python_ops_resolver.cc \\\n") + makefile.write('\n\n') + makefile.write(src_prefix + '_HDR := \\\n') + makefile.write( + "$(TENSORFLOW_ROOT)python/tflite_micro/python_ops_resolver.h \\\n") + makefile.write('\n\n') + makefile.write('$(eval $(call microlite_test,' + src_prefix + '_test,\\\n') + makefile.write('$(' + src_prefix + '_SRCS),$(' + src_prefix + '_HDR),$(' + + src_prefix + '_GENERATOR_INPUTS)))') diff --git a/tensorflow/lite/micro/tools/github/arm_virtual_hardware/cortex_m_corstone_300_avh.yml b/tensorflow/lite/micro/tools/github/arm_virtual_hardware/cortex_m_corstone_300_avh.yml new file mode 100644 index 0000000..3849d3e --- /dev/null +++ b/tensorflow/lite/micro/tools/github/arm_virtual_hardware/cortex_m_corstone_300_avh.yml @@ -0,0 +1,32 @@ +# Copyright 2022 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +#! YAML file to drive client for Arm Virtual Hardware on AWS +# Schema https://raw.githubusercontent.com/ARM-software/avhclient/main/schema/avh.schema.json + + +name: "TensorFlow Lite Micro Corstone-300 ArmClang" +workdir: ./ +backend: + aws: + ami-version: ~=1.1 + instance-type: t2.xlarge +steps: + - run: | + git clone https://github.com/tensorflow/tflite-micro.git + mv ./tflite-micro/tensorflow/ . + tensorflow/lite/micro/tools/ci_build/test_cortex_m_corstone_300.sh armclang &> ./corstone300.log +download: + - corstone300.log diff --git a/tensorflow/lite/micro/tools/github/arm_virtual_hardware/cortex_m_generic_avh.yml b/tensorflow/lite/micro/tools/github/arm_virtual_hardware/cortex_m_generic_avh.yml new file mode 100644 index 0000000..33862a0 --- /dev/null +++ b/tensorflow/lite/micro/tools/github/arm_virtual_hardware/cortex_m_generic_avh.yml @@ -0,0 +1,31 @@ +# Copyright 2022 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +#! YAML file to drive client for Arm Virtual Hardware on AWS +# Schema https://raw.githubusercontent.com/ARM-software/avhclient/main/schema/avh.schema.json + +name: "TensorFlow Lite Micro Cortex-M Generic ArmClang" +workdir: ./ +backend: + aws: + ami-version: ~=1.1 + instance-type: t2.xlarge +steps: + - run: | + git clone https://github.com/tensorflow/tflite-micro.git + mv ./tflite-micro/tensorflow/ . + tensorflow/lite/micro/tools/ci_build/test_cortex_m_generic.sh armclang &> ./cortex_m_generic.log +download: + - cortex_m_generic.log diff --git a/tensorflow/lite/micro/tools/make/.gitignore b/tensorflow/lite/micro/tools/make/.gitignore new file mode 100644 index 0000000..cacbbb5 --- /dev/null +++ b/tensorflow/lite/micro/tools/make/.gitignore @@ -0,0 +1,2 @@ +downloads + diff --git a/tensorflow/lite/micro/tools/make/Makefile b/tensorflow/lite/micro/tools/make/Makefile new file mode 100644 index 0000000..91b32a5 --- /dev/null +++ b/tensorflow/lite/micro/tools/make/Makefile @@ -0,0 +1,846 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +ifneq (3.82,$(firstword $(sort $(MAKE_VERSION) 3.82))) + $(error "Requires make version 3.82 or later (current is $(MAKE_VERSION))") +endif + +# root directory of tensorflow +TENSORFLOW_ROOT := +RELATIVE_MAKEFILE_DIR := tensorflow/lite/micro/tools/make +MAKEFILE_DIR := $(TENSORFLOW_ROOT)$(RELATIVE_MAKEFILE_DIR) + +# Pull in some convenience functions. +include $(MAKEFILE_DIR)/helper_functions.inc + +# Try to figure out the host system +HOST_OS := +ifeq ($(OS),Windows_NT) + HOST_OS = windows +else + UNAME_S := $(shell uname -s) + ifeq ($(UNAME_S),Linux) + HOST_OS := linux + endif + ifeq ($(UNAME_S),Darwin) + HOST_OS := osx + endif +endif + +# Determine the host architecture, with any ix86 architecture being labelled x86_32 +HOST_ARCH := $(shell if uname -m | grep -Eq 'i[345678]86'; then echo x86_32; else echo $(shell uname -m); fi) + +# Override these on the make command line to target a specific architecture. For example: +# make -f tensorflow/lite/Makefile TARGET=rpi TARGET_ARCH=armv7l +TARGET := $(HOST_OS) +TARGET_ARCH := $(HOST_ARCH) + +# Default compiler and tool names: +TOOLCHAIN:=gcc +CXX_TOOL := g++ +CC_TOOL := gcc +AR_TOOL := ar + +ifneq ($(TAGS),) + $(error The TAGS command line option is no longer supported in the TFLM Makefile.) +endif + +# Specify which specialized kernel implementation should be pulled in. +OPTIMIZED_KERNEL_DIR := + +# Override this variable from the command line in case the optimized kernels are +# in a different directory. +OPTIMIZED_KERNEL_DIR_PREFIX := $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels + +# Specify which co-processor's kernel implementation should be pulled in. +# If the same kernel is implemented in both kernels/OPTIMIZED_KERNEL_DIR and +# kernels/CO_PROCESSOR, then the implementation from kernels/CO_PROCESSOR will +# be used. +CO_PROCESSOR := + +# This is the way to specify any code path that we want to Include in the build +# process. This will help us to include external code (code that is not part of +# github repo) be tested. +EXTERNAL_DIR := + +# This is the downloads directory inside the makefiles directory +DOWNLOADS_DIR := $(MAKEFILE_DIR)/downloads + +INCLUDES := \ +-I. \ +-I$(DOWNLOADS_DIR)/gemmlowp \ +-I$(DOWNLOADS_DIR)/flatbuffers/include \ +-I$(DOWNLOADS_DIR)/ruy + +ifneq ($(TENSORFLOW_ROOT),) + INCLUDES += -I$(TENSORFLOW_ROOT) +endif + +ifneq ($(EXTERNAL_DIR),) + INCLUDES += -I$(EXTERNAL_DIR) +endif + +TEST_SCRIPT := + +MICROLITE_LIBS := -lm + +# For the optimized_kernel_dir, and co-processor as specified on the +# command line we add -D to the cflags to allow for #idefs in the code. +# +# We apply the following transformations (via the tr command): +# 1. Convert to uppercase (OPTIMIZED_KERNEL_DIR=xtensa -> -DXTENSA) +ADDITIONAL_DEFINES := +ifneq ($(OPTIMIZED_KERNEL_DIR),) + ADDITIONAL_DEFINES += -D$(shell echo $(OPTIMIZED_KERNEL_DIR) | tr [a-z] [A-Z]) +endif + +ifneq ($(CO_PROCESSOR),) + ADDITIONAL_DEFINES += -D$(shell echo $(CO_PROCESSOR) | tr [a-z] [A-Z]) +endif + +ifeq ($(TOOLCHAIN), armclang) + CORE_OPTIMIZATION_LEVEL := -Oz +else + CORE_OPTIMIZATION_LEVEL := -Os +endif +KERNEL_OPTIMIZATION_LEVEL := -O2 +THIRD_PARTY_KERNEL_OPTIMIZATION_LEVEL := -O2 + +# Warn if deprecated optimization level is set. +OPTIMIZATION_LEVEL := +ifneq ($(OPTIMIZATION_LEVEL),) +$(error "OPTIMIZATION_LEVEL is no longer used.") +endif + + +CC_WARNINGS := \ + -Wsign-compare \ + -Wdouble-promotion \ + -Wshadow \ + -Wunused-variable \ + -Wunused-function \ + -Wswitch \ + -Wvla \ + -Wall \ + -Wextra \ + -Wmissing-field-initializers \ + -Wstrict-aliasing \ + -Wno-unused-parameter + +COMMON_FLAGS := \ + -Werror \ + -fno-unwind-tables \ + -ffunction-sections \ + -fdata-sections \ + -fmessage-length=0 \ + -DTF_LITE_STATIC_MEMORY \ + -DTF_LITE_DISABLE_X86_NEON \ + $(CC_WARNINGS) \ + $(ADDITIONAL_DEFINES) + +ifeq ($(TARGET), $(HOST_OS)) + # If we are not doing a cross-compilation then -DTF_LITE_USE_CTIME is what we + # want to have by default. + COMMON_FLAGS += -DTF_LITE_USE_CTIME +endif + +CXXFLAGS := \ + -std=c++11 \ + -fno-rtti \ + -fno-exceptions \ + -fno-threadsafe-statics \ + -Wnon-virtual-dtor \ + $(COMMON_FLAGS) + +CCFLAGS := \ + -Wimplicit-function-declaration \ + -std=c11 \ + $(COMMON_FLAGS) + +ARFLAGS := -r + +ifeq ($(TOOLCHAIN), gcc) + ifneq ($(TARGET), osx) + # GCC on MacOS uses an LLVM backend so we avoid the additional linker flags + # that are unsupported with LLVM. + LDFLAGS += \ + -Wl,--fatal-warnings \ + -Wl,--gc-sections + endif +endif + +# override these in the makefile.inc for specific compiler targets +TARGET_TOOLCHAIN_PREFIX := +TARGET_TOOLCHAIN_ROOT := + +# Specifying BUILD_TYPE= as part of the make command gives us a few +# options to choose from. +# +# If BUILD_TYPE is not specified, the default build (which should be suitable +# most of the time) has all of the error checking logic at the expense of a +# latency increase of ~5-10% relative to BUILD_TYPE=release_with_logs. +# +# This default build is most suited for usual development and testing as is +# highlighted by the discussion on this github pull request: +# https://github.com/tensorflow/tensorflow/pull/42314#issuecomment-694360567 +BUILD_TYPE := default +ifeq ($(BUILD_TYPE), debug) + # Specifying BUILD_TYPE=debug adds debug symbols to the binary (and makes it + # larger) and should be used to run a binary with gdb. + CXXFLAGS += -g + CCFLAGS += -g +else ifeq ($(BUILD_TYPE), release) + # The 'release' build results in the smallest binary (by virtue of removing + # strings from log messages, DCHECKs ...). + # + # The down-side is that we currently do not have a good mechanism to allow + # for logging that is not related to errors (e.g. profiling information, or + # logs that help determine if tests pass or fail). As a result, we are unable + # to run tests or benchmarks with BUILD_TYPE=release (which is a bit + # counter-intuitive). TODO(b/158205789): A global error reporter might help. + # + # For a close approximation of the release build use + # BUILD_TYPE=release_with_logs. + CXXFLAGS += -DNDEBUG -DTF_LITE_STRIP_ERROR_STRINGS + CCFLAGS += -DNDEBUG -DTF_LITE_STRIP_ERROR_STRINGS +else ifeq ($(BUILD_TYPE), release_with_logs) + # The latency with BUILD_TYPE=release_with_logs will be close to the 'release' + # build and there will still be error logs. This build type may be preferable + # for profiling and benchmarking. + CXXFLAGS += -DNDEBUG + CCFLAGS += -DNDEBUG +else ifeq ($(BUILD_TYPE), no_tf_lite_static_memory) + # TODO(b/287320282): remove the no_tf_lite_static_memory build. + # + # This build should not be used to run any binaries/tests since + # TF_LITE_STATIC_MEMORY should be defined for all micro builds. However, + # having a build without TF_LITE_STATIC_MEMORY is useful to catch errors in + # code that is shared between TfLite Mobile and TfLite Micro. See this issue + # for more details: + # https://github.com/tensorflow/tensorflow/issues/43076 + CXXFLAGS := $(filter-out -DTF_LITE_STATIC_MEMORY, $(CXXFLAGS)) + CCFLAGS := $(filter-out -DTF_LITE_STATIC_MEMORY, $(CCFLAGS)) + + # We are using C++17 for the no_tf_lite_static_memory_build to make it close + # to the TfLite bazel build. + CXXFLAGS := $(filter-out -std=c++11, $(CXXFLAGS)) + CXXFLAGS += -std=c++17 + CCFLAGS := $(filter-out -std=c11, $(CCLAGS)) + CCFLAGS += -std=c17 +endif + +# This library is the main target for this makefile. It will contain a minimal +# runtime that can be linked in to other programs. +MICROLITE_LIB_NAME := libtensorflow-microlite.a + +# Where compiled objects are stored. +BASE_GENDIR := gen +GENDIR := $(BASE_GENDIR)/$(TARGET)_$(TARGET_ARCH)_$(BUILD_TYPE)/ +CORE_OBJDIR := $(GENDIR)obj/core/ +KERNEL_OBJDIR := $(GENDIR)obj/kernels/ +THIRD_PARTY_KERNEL_OBJDIR := $(GENDIR)obj/third_party_kernels/ +THIRD_PARTY_OBJDIR := $(GENDIR)obj/third_party/ +GENERATED_SRCS_DIR := $(GENDIR)genfiles/ +BINDIR := $(GENDIR)bin/ +LIBDIR := $(GENDIR)lib/ +PRJDIR := $(GENDIR)prj/ + +# These two must be defined before we include the target specific Makefile.inc +# because we filter out the examples that are not supported for those targets. +# See targets/xtensa_xpg_makefile.inc for an example. +# +# We limit max depth of directories to search to not include target specific +# Makefiles that are included directly by the main example Makefile. See +# examples/micro_speech/Makefile.inc for an example. At the same time, we +# search till an arbitrary depth for files named Makefile_internal.inc as a way +# to bypass this check and allow for deeper directory structures. +MICRO_LITE_EXAMPLE_TESTS := $(shell find $(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/ -maxdepth 2 -name Makefile.inc) + +# Internal examples are copied outside the TFLM repo in google. +ifneq ($(EXTERNAL_DIR),) + MICRO_LITE_EXAMPLE_TESTS += $(shell find $(EXTERNAL_DIR) -name Makefile_internal.inc) +endif + +# Kernel integration tests must be excluded on certain targets. +MICRO_LITE_INTEGRATION_TESTS += $(shell find $(TENSORFLOW_ROOT)tensorflow/lite/micro/integration_tests -name Makefile.inc) + +MICRO_LITE_GEN_MUTABLE_OP_RESOLVER_TEST += \ + $(wildcard $(TENSORFLOW_ROOT)tensorflow/lite/micro/tools/gen_micro_mutable_op_resolver_test/person_detect/Makefile.inc) + +MICRO_LITE_BENCHMARKS := $(wildcard $(TENSORFLOW_ROOT)tensorflow/lite/micro/benchmarks/Makefile.inc) + +# TODO(b/152645559): move all benchmarks to benchmarks directory. +MICROLITE_BENCHMARK_SRCS := \ +$(wildcard $(TENSORFLOW_ROOT)tensorflow/lite/micro/benchmarks/*benchmark.cc) + +MICROLITE_TEST_SRCS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/fake_micro_context_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/flatbuffer_utils_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/memory_arena_threshold_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/memory_helpers_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/micro_allocator_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/micro_allocation_info_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/micro_context_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/micro_log_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/micro_interpreter_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/micro_mutable_op_resolver_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/micro_resource_variable_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/micro_string_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/micro_time_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/micro_utils_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/recording_micro_allocator_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/arena_allocator/non_persistent_arena_buffer_allocator_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/arena_allocator/persistent_arena_buffer_allocator_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/arena_allocator/recording_single_arena_buffer_allocator_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/arena_allocator/single_arena_buffer_allocator_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/testing_helpers_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/memory_planner/greedy_memory_planner_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/memory_planner/linear_memory_planner_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/memory_planner/non_persistent_buffer_planner_shim_test.cc + +MICROLITE_CC_KERNEL_SRCS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/activations.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/activations_common.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/add.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/add_common.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/add_n.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/arg_min_max.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/assign_variable.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/batch_to_space_nd.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/broadcast_args.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/broadcast_to.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/call_once.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/cast.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/ceil.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/circular_buffer.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/circular_buffer_common.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/comparisons.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/concatenation.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/conv.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/conv_common.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/cumsum.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/depth_to_space.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/depthwise_conv.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/depthwise_conv_common.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/dequantize.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/dequantize_common.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/detection_postprocess.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/div.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/elementwise.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/elu.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/embedding_lookup.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/ethosu.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/exp.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/expand_dims.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/fill.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/floor.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/floor_div.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/floor_mod.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/fully_connected.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/fully_connected_common.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/gather.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/gather_nd.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/hard_swish.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/hard_swish_common.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/if.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/kernel_runner.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/kernel_util.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/l2norm.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/l2_pool_2d.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/leaky_relu.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/leaky_relu_common.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/logical.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/logical_common.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/logistic.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/logistic_common.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/log_softmax.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/lstm_eval.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/lstm_eval_common.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/maximum_minimum.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/micro_tensor_utils.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/mirror_pad.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/mul.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/mul_common.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/neg.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/pack.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/pad.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/pooling.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/pooling_common.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/prelu.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/prelu_common.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/quantize.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/quantize_common.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/read_variable.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/reduce.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/reduce_common.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/reshape.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/resize_bilinear.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/resize_nearest_neighbor.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/round.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/select.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/shape.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/slice.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/softmax.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/softmax_common.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/space_to_batch_nd.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/space_to_depth.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/split.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/split_v.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/squared_difference.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/squeeze.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/strided_slice.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/sub.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/sub_common.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/svdf.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/svdf_common.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/tanh.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/transpose.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/transpose_conv.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/unidirectional_sequence_lstm.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/unpack.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/var_handle.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/while.cc \ +$(TENSORFLOW_ROOT)signal/micro/kernels/window.cc \ +$(TENSORFLOW_ROOT)signal/src/window.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/zeros_like.cc + +MICROLITE_TEST_HDRS := \ +$(wildcard $(TENSORFLOW_ROOT)tensorflow/lite/micro/testing/*.h) + +# The explicitly specified list of sources and headers that are shared between +# TfLite and TFLM are in the ci/sync_from_upstream_tf.sh script. +TFL_CC_SRCS := \ +$(shell find $(TENSORFLOW_ROOT)tensorflow/lite -type d \( -path $(TENSORFLOW_ROOT)tensorflow/lite/experimental -o -path $(TENSORFLOW_ROOT)tensorflow/lite/micro \) -prune -false -o -name "*.cc" -o -name "*.c") + +TFL_CC_HDRS := \ +$(shell find $(TENSORFLOW_ROOT)tensorflow/lite -type d \( -path $(TENSORFLOW_ROOT)tensorflow/lite/experimental -o -path $(TENSORFLOW_ROOT)tensorflow/lite/micro \) -prune -false -o -name "*.h") + +ifneq ($(BUILD_TYPE), no_tf_lite_static_memory) + EXCLUDED_TFL_CC_SRCS := \ + $(TENSORFLOW_ROOT)tensorflow/lite/array.cc + TFL_CC_SRCS := $(filter-out $(EXCLUDED_TFL_CC_SRCS), $(TFL_CC_SRCS)) + + EXCLUDED_TFL_CC_HDRS := \ + $(TENSORFLOW_ROOT)tensorflow/lite/array.h + TFL_CC_HDRS := $(filter-out $(EXCLUDED_TFL_CC_HDRS), $(TFL_CC_HDRS)) +endif + +MICROLITE_CC_BASE_SRCS := \ +$(wildcard $(TENSORFLOW_ROOT)tensorflow/lite/micro/*.cc) \ +$(wildcard $(TENSORFLOW_ROOT)tensorflow/lite/micro/arena_allocator/*.cc) \ +$(wildcard $(TENSORFLOW_ROOT)tensorflow/lite/micro/memory_planner/*.cc) \ +$(wildcard $(TENSORFLOW_ROOT)tensorflow/lite/micro/tflite_bridge/*.cc) \ +$(TFL_CC_SRCS) + +MICROLITE_CC_HDRS := \ +$(wildcard $(TENSORFLOW_ROOT)tensorflow/lite/micro/*.h) \ +$(wildcard $(TENSORFLOW_ROOT)tensorflow/lite/micro/benchmarks/*model_data.h) \ +$(wildcard $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/*.h) \ +$(wildcard $(TENSORFLOW_ROOT)tensorflow/lite/micro/arena_allocator/*.h) \ +$(wildcard $(TENSORFLOW_ROOT)tensorflow/lite/micro/memory_planner/*.h) \ +$(wildcard $(TENSORFLOW_ROOT)tensorflow/lite/micro/tflite_bridge/*.h) \ +$(wildcard $(TENSORFLOW_ROOT)signal/micro/kernels/*.h) \ +$(wildcard $(TENSORFLOW_ROOT)signal/src/*.h) \ +$(TENSORFLOW_ROOT)LICENSE \ +$(TFL_CC_HDRS) + +# TODO(b/165940489): Figure out how to avoid including fixed point +# platform-specific headers. +# some kiss fft source file has to be included in header sections because +# the implemenation of three different resolution fft from one single c file. +# See http://b/201319430 for additional context. +THIRD_PARTY_CC_HDRS := \ +$(DOWNLOADS_DIR)/flatbuffers/include/flatbuffers/allocator.h \ +$(DOWNLOADS_DIR)/flatbuffers/include/flatbuffers/array.h \ +$(DOWNLOADS_DIR)/flatbuffers/include/flatbuffers/base.h \ +$(DOWNLOADS_DIR)/flatbuffers/include/flatbuffers/buffer.h \ +$(DOWNLOADS_DIR)/flatbuffers/include/flatbuffers/buffer_ref.h \ +$(DOWNLOADS_DIR)/flatbuffers/include/flatbuffers/default_allocator.h \ +$(DOWNLOADS_DIR)/flatbuffers/include/flatbuffers/detached_buffer.h \ +$(DOWNLOADS_DIR)/flatbuffers/include/flatbuffers/flatbuffer_builder.h \ +$(DOWNLOADS_DIR)/flatbuffers/include/flatbuffers/flatbuffers.h \ +$(DOWNLOADS_DIR)/flatbuffers/include/flatbuffers/flexbuffers.h \ +$(DOWNLOADS_DIR)/flatbuffers/include/flatbuffers/stl_emulation.h \ +$(DOWNLOADS_DIR)/flatbuffers/include/flatbuffers/string.h \ +$(DOWNLOADS_DIR)/flatbuffers/include/flatbuffers/struct.h \ +$(DOWNLOADS_DIR)/flatbuffers/include/flatbuffers/table.h \ +$(DOWNLOADS_DIR)/flatbuffers/include/flatbuffers/vector.h \ +$(DOWNLOADS_DIR)/flatbuffers/include/flatbuffers/vector_downward.h \ +$(DOWNLOADS_DIR)/flatbuffers/include/flatbuffers/verifier.h \ +$(DOWNLOADS_DIR)/flatbuffers/include/flatbuffers/util.h \ +$(DOWNLOADS_DIR)/flatbuffers/LICENSE.txt \ +$(DOWNLOADS_DIR)/gemmlowp/fixedpoint/fixedpoint.h \ +$(DOWNLOADS_DIR)/gemmlowp/fixedpoint/fixedpoint_neon.h \ +$(DOWNLOADS_DIR)/gemmlowp/fixedpoint/fixedpoint_sse.h \ +$(DOWNLOADS_DIR)/gemmlowp/internal/detect_platform.h \ +$(DOWNLOADS_DIR)/gemmlowp/LICENSE \ +$(DOWNLOADS_DIR)/kissfft/COPYING \ +$(DOWNLOADS_DIR)/kissfft/kiss_fft.c \ +$(DOWNLOADS_DIR)/kissfft/kiss_fft.h \ +$(DOWNLOADS_DIR)/kissfft/_kiss_fft_guts.h \ +$(DOWNLOADS_DIR)/kissfft/tools/kiss_fftr.c \ +$(DOWNLOADS_DIR)/kissfft/tools/kiss_fftr.h \ +$(DOWNLOADS_DIR)/ruy/ruy/profiler/instrumentation.h + +THIRD_PARTY_CC_SRCS := +THIRD_PARTY_KERNEL_CC_SRCS := + +# Load custom kernels. +include $(MAKEFILE_DIR)/additional_kernels.inc + +MICROLITE_CC_SRCS := $(filter-out $(MICROLITE_TEST_SRCS), $(MICROLITE_CC_BASE_SRCS)) +MICROLITE_CC_SRCS := $(filter-out $(MICROLITE_BENCHMARK_SRCS), $(MICROLITE_CC_SRCS)) + + + +# The download scripts require that the downloads directory already exist for +# improved error checking. To accomodate that, we first create a downloads +# directory. +$(shell mkdir -p ${DOWNLOADS_DIR}) + +# Directly download the flatbuffers library. +DOWNLOAD_RESULT := $(shell $(MAKEFILE_DIR)/flatbuffers_download.sh ${DOWNLOADS_DIR} $(TENSORFLOW_ROOT)) +ifneq ($(DOWNLOAD_RESULT), SUCCESS) + $(error Something went wrong with the flatbuffers download: $(DOWNLOAD_RESULT)) +endif + +DOWNLOAD_RESULT := $(shell $(MAKEFILE_DIR)/kissfft_download.sh ${DOWNLOADS_DIR} $(TENSORFLOW_ROOT)) +ifneq ($(DOWNLOAD_RESULT), SUCCESS) + $(error Something went wrong with the kissfft download: $(DOWNLOAD_RESULT)) +endif + +DOWNLOAD_RESULT := $(shell $(MAKEFILE_DIR)/pigweed_download.sh ${DOWNLOADS_DIR} $(TENSORFLOW_ROOT)) +ifneq ($(DOWNLOAD_RESULT), SUCCESS) + $(error Something went wrong with the pigweed download: $(DOWNLOAD_RESULT)) +endif + +include $(MAKEFILE_DIR)/third_party_downloads.inc +THIRD_PARTY_DOWNLOADS := +$(eval $(call add_third_party_download,$(GEMMLOWP_URL),$(GEMMLOWP_MD5),gemmlowp,)) +$(eval $(call add_third_party_download,$(RUY_URL),$(RUY_MD5),ruy,)) + +# The target-specific makefile must have a name that is exactly +# TARGET_makefile.inc and is only needed for cross-compilation (i.e. when TARGET +# is different from the HOST_OS). +TARGETS_WITHOUT_MAKEFILES := \ +$(HOST_OS) + +# This specific string needs to be outputted for a test to be recognized as +# having passed. +TEST_PASS_STRING:='~~~ALL TESTS PASSED~~~' + +# ${TARGET}_makefile.inc can set this to true to allow it to defined a custom +# implementation for `make test`. See bluepill_makefile as an example. +TARGET_SPECIFIC_MAKE_TEST:=0 + +ifeq ($(findstring $(TARGET),$(TARGETS_WITHOUT_MAKEFILES)),) + include $(MAKEFILE_DIR)/targets/$(TARGET)_makefile.inc +endif + +ifneq ($(OPTIMIZED_KERNEL_DIR),) + PATH_TO_OPTIMIZED_KERNELS := $(OPTIMIZED_KERNEL_DIR_PREFIX)/$(OPTIMIZED_KERNEL_DIR) + + # Check that OPTIMIZED_KERNEL_DIR is valid to avoid unexpected fallback to + # reference kernels. See http://b/183546742 for more context. + RESULT := $(shell $(MAKEFILE_DIR)/check_optimized_kernel_dir.sh $(PATH_TO_OPTIMIZED_KERNELS)) + ifneq ($(RESULT), SUCCESS) + $(error Incorrect OPTIMIZED_KERNEL_DIR: $(RESULT)) + endif + + include $(MAKEFILE_DIR)/ext_libs/$(OPTIMIZED_KERNEL_DIR).inc + # Specialize for the optimized kernels + MICROLITE_CC_KERNEL_SRCS := $(shell python3 $(MAKEFILE_DIR)/specialize_files.py \ + --base_files "$(MICROLITE_CC_KERNEL_SRCS)" \ + --specialize_directory $(PATH_TO_OPTIMIZED_KERNELS)) + + # The first ifneq is needed to be compatible with make versions prior to 4.2 + # which do not support .SHELLSTATUS. While make 4.2 was released in 2016, + # Ubuntu 18.04 only has version 4.1 + ifneq ($(.SHELLSTATUS),) + ifneq ($(.SHELLSTATUS),0) + $(error Error with specialize_files.py $(MICROLITE_CC_KERNEL_SRCS)) + endif + endif + + # Optimized kernel directories can have their own header files which need to + # be included in MICROLITE_CC_HDRS for project generation to have a complete + # list of headers. + MICROLITE_CC_HDRS += $(wildcard $(PATH_TO_OPTIMIZED_KERNELS)/*.h) +endif + +# If a co-processor is specified on the command line with +# CO_PROCESSOR= then we will include ext_libs/.inc +# and find additional kernel sources in kernels// +# +# That the co-processor specialization of the kernel sources happens after the +# optimized_kernel_dir means that if there is an implementation of the same +# kernel in both directories, the one from co_processor will be used. +ifneq ($(CO_PROCESSOR),) + include $(MAKEFILE_DIR)/ext_libs/$(CO_PROCESSOR).inc + # Specialize for the coprocessor kernels. + PATH_TO_COPROCESSOR_KERNELS := $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/$(CO_PROCESSOR) + MICROLITE_CC_KERNEL_SRCS := $(shell python3 $(MAKEFILE_DIR)/specialize_files.py \ + --base_files "$(MICROLITE_CC_KERNEL_SRCS)" \ + --specialize_directory $(PATH_TO_COPROCESSOR_KERNELS)) + + # The first ifneq is needed to be compatible with make versions prior to 4.2 + # which do not support .SHELLSTATUS. While make 4.2 was released in 2016, + # Ubuntu 18.04 only has version 4.1 + ifneq ($(.SHELLSTATUS),) + ifneq ($(.SHELLSTATUS),0) + $(error Error with specialize_files.py $(MICROLITE_CC_KERNEL_SRCS)) + endif + endif +endif + +# Specialize for debug_log. micro_time etc. +PATH_TO_TARGET_SRCS := $(TENSORFLOW_ROOT)tensorflow/lite/micro/$(TARGET) +MICROLITE_CC_SRCS := $(shell python3 $(MAKEFILE_DIR)/specialize_files.py \ + --base_files "$(MICROLITE_CC_SRCS)" \ + --specialize_directory $(PATH_TO_TARGET_SRCS)) + +# The first ifneq is needed to be compatible with make versions prior to 4.2 +# which do not support .SHELLSTATUS. While make 4.2 was released in 2016, +# Ubuntu 18.04 only has version 4.1 +ifneq ($(.SHELLSTATUS),) + ifneq ($(.SHELLSTATUS),0) + $(error Error with specialize_files.py $(MICROLITE_CC_SRCS)) + endif +endif + +ALL_SRCS := \ + $(MICROLITE_CC_SRCS) \ + $(MICROLITE_CC_KERNEL_SRCS) \ + $(MICROLITE_TEST_SRCS) + +MICROLITE_LIB_PATH := $(LIBDIR)$(MICROLITE_LIB_NAME) + +CXX := $(TARGET_TOOLCHAIN_ROOT)${TARGET_TOOLCHAIN_PREFIX}${CXX_TOOL} +CC := $(TARGET_TOOLCHAIN_ROOT)${TARGET_TOOLCHAIN_PREFIX}${CC_TOOL} +AR := $(TARGET_TOOLCHAIN_ROOT)${TARGET_TOOLCHAIN_PREFIX}${AR_TOOL} + +# The default Makefile target(all) must appear before any target, +# which is compiled if there's no command-line arguments. +all: $(MICROLITE_LIB_PATH) + +# Include output directory since example cc files depend on generated headers. +INCLUDES += -I$(GENERATED_SRCS_DIR) +INCLUDES += -I$(GENERATED_SRCS_DIR)$(TENSORFLOW_ROOT) + +# Load the examples. +include $(MICRO_LITE_EXAMPLE_TESTS) + +# Load the integration tests. +include $(MICRO_LITE_INTEGRATION_TESTS) + +# Load generated micro mutable op resolver test. +include ${MICRO_LITE_GEN_MUTABLE_OP_RESOLVER_TEST} + +# Load the benchmarks. +include $(MICRO_LITE_BENCHMARKS) + +# Load custom kernel tests. +include $(MAKEFILE_DIR)/additional_tests.inc + +# Create rules for downloading third-party dependencies. +THIRD_PARTY_TARGETS := +$(foreach DOWNLOAD,$(THIRD_PARTY_DOWNLOADS),$(eval $(call create_download_rule,$(DOWNLOAD)))) +third_party_downloads: $(THIRD_PARTY_TARGETS) + +MICROLITE_LIB_OBJS := $(addprefix $(CORE_OBJDIR), \ +$(patsubst %.S,%.o,$(patsubst %.cc,%.o,$(patsubst %.c,%.o,$(MICROLITE_CC_SRCS))))) + +MICROLITE_THIRD_PARTY_OBJS := $(addprefix $(THIRD_PARTY_OBJDIR), \ +$(patsubst %.S,%.o,$(patsubst %.cc,%.o,$(patsubst %.c,%.o,$(THIRD_PARTY_CC_SRCS))))) + +MICROLITE_THIRD_PARTY_KERNEL_OBJS := $(addprefix $(THIRD_PARTY_KERNEL_OBJDIR), \ +$(patsubst %.S,%.o,$(patsubst %.cc,%.o,$(patsubst %.c,%.o,$(THIRD_PARTY_KERNEL_CC_SRCS))))) + +MICROLITE_KERNEL_OBJS := $(addprefix $(KERNEL_OBJDIR), \ +$(patsubst %.S,%.o,$(patsubst %.cc,%.o,$(patsubst %.c,%.o,$(MICROLITE_CC_KERNEL_SRCS))))) + +$(CORE_OBJDIR)%.o: %.cc $(THIRD_PARTY_TARGETS) + @mkdir -p $(dir $@) + $(CXX) $(CXXFLAGS) $(CORE_OPTIMIZATION_LEVEL) $(INCLUDES) -c $< -o $@ + +$(CORE_OBJDIR)%.o: %.c $(THIRD_PARTY_TARGETS) + @mkdir -p $(dir $@) + $(CC) $(CCFLAGS) $(CORE_OPTIMIZATION_LEVEL) $(INCLUDES) -c $< -o $@ + +$(CORE_OBJDIR)%.o: %.S $(THIRD_PARTY_TARGETS) + @mkdir -p $(dir $@) + $(CC) $(CCFLAGS) $(CORE_OPTIMIZATION_LEVEL) $(INCLUDES) -c $< -o $@ + +$(THIRD_PARTY_OBJDIR)%.o: %.cc $(THIRD_PARTY_TARGETS) + @mkdir -p $(dir $@) + $(CXX) $(CXXFLAGS) $(CORE_OPTIMIZATION_LEVEL) $(INCLUDES) -c $< -o $@ + +$(THIRD_PARTY_OBJDIR)%.o: %.c $(THIRD_PARTY_TARGETS) + @mkdir -p $(dir $@) + $(CC) $(CCFLAGS) $(CORE_OPTIMIZATION_LEVEL) $(INCLUDES) -c $< -o $@ + +$(THIRD_PARTY_OBJDIR)%.o: %.S $(THIRD_PARTY_TARGETS) + @mkdir -p $(dir $@) + $(CC) $(CCFLAGS) $(CORE_OPTIMIZATION_LEVEL) $(INCLUDES) -c $< -o $@ + +$(THIRD_PARTY_KERNEL_OBJDIR)%.o: %.cc $(THIRD_PARTY_KERNEL_TARGETS) + @mkdir -p $(dir $@) + $(CXX) $(CXXFLAGS) $(THIRD_PARTY_KERNEL_OPTIMIZATION_LEVEL) $(INCLUDES) -c $< -o $@ + +$(THIRD_PARTY_KERNEL_OBJDIR)%.o: %.c $(THIRD_PARTY_KERNEL_TARGETS) + @mkdir -p $(dir $@) + $(CC) $(CCFLAGS) $(THIRD_PARTY_KERNEL_OPTIMIZATION_LEVEL) $(INCLUDES) -c $< -o $@ + +$(THIRD_PARTY_KERNEL_OBJDIR)%.o: %.S $(THIRD_PARTY_KERNEL_TARGETS) + @mkdir -p $(dir $@) + $(CC) $(CCFLAGS) $(THIRD_PARTY_KERNEL_OPTIMIZATION_LEVEL) $(INCLUDES) -c $< -o $@ + +$(KERNEL_OBJDIR)%.o: %.cc $(THIRD_PARTY_TARGETS) + @mkdir -p $(dir $@) + $(CXX) $(CXXFLAGS) $(KERNEL_OPTIMIZATION_LEVEL) $(INCLUDES) -c $< -o $@ + +$(KERNEL_OBJDIR)%.o: %.c $(THIRD_PARTY_TARGETS) + @mkdir -p $(dir $@) + $(CC) $(CCFLAGS) $(KERNEL_OPTIMIZATION_LEVEL) $(INCLUDES) -c $< -o $@ + +$(KERNEL_OBJDIR)%.o: %.S $(THIRD_PARTY_TARGETS) + @mkdir -p $(dir $@) + $(CC) $(CCFLAGS) $(KERNEL_OPTIMIZATION_LEVEL) $(INCLUDES) -c $< -o $@ + +microlite: $(MICROLITE_LIB_PATH) + +# Gathers together all the objects we've compiled into a single '.a' archive. +$(MICROLITE_LIB_PATH): $(MICROLITE_LIB_OBJS) $(MICROLITE_KERNEL_OBJS) $(MICROLITE_THIRD_PARTY_OBJS) $(MICROLITE_THIRD_PARTY_KERNEL_OBJS) $(MICROLITE_CUSTOM_OP_OBJS) + @mkdir -p $(dir $@) + $(AR) $(ARFLAGS) $(MICROLITE_LIB_PATH) $(MICROLITE_LIB_OBJS) \ + $(MICROLITE_KERNEL_OBJS) $(MICROLITE_THIRD_PARTY_OBJS) $(MICROLITE_THIRD_PARTY_KERNEL_OBJS) $(MICROLITE_CUSTOM_OP_OBJS) + +$(BINDIR)%_test : $(CORE_OBJDIR)%_test.o $(MICROLITE_LIB_PATH) + @mkdir -p $(dir $@) + $(CXX) $(CXXFLAGS) $(INCLUDES) \ + -o $@ $< \ + $(MICROLITE_LIB_PATH) $(LDFLAGS) $(MICROLITE_LIBS) + +$(BINDIR)%.test_target: $(BINDIR)%_test + @test -f $(TEST_SCRIPT) || (echo 'Unable to find the test script. Is the software emulation available in $(TARGET)?'; exit 1) + $(TEST_SCRIPT) $< $(TEST_PASS_STRING) + +# snease: Add %.bin rule here since BINDIR is now defined +# These are microcontroller-specific rules for converting the ELF output +# of the linker into a binary image that can be loaded directly. +ifeq ($(TOOLCHAIN), armclang) + FROMELF := ${TARGET_TOOLCHAIN_ROOT}$(TARGET_TOOLCHAIN_PREFIX)fromelf + $(BINDIR)%.bin: $(BINDIR)% + @mkdir -p $(dir $@) + $(FROMELF) --bin --output=$@ $< +else + OBJCOPY := ${TARGET_TOOLCHAIN_ROOT}$(TARGET_TOOLCHAIN_PREFIX)objcopy + $(BINDIR)%.bin: $(BINDIR)% + @mkdir -p $(dir $@) + $(OBJCOPY) $< $@ -O binary +endif + +# Create kernel test targets. +include $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/Makefile.inc + +# Create binary size test target. +include $(TENSORFLOW_ROOT)tensorflow/lite/micro/tools/ci_build/binary_size_test/Makefile.inc + +# Some tests have additional dependencies (beyond libtensorflow-microlite.a) and +# those need to be explicitly specified with their own individual call to the +# microlite_test helper function. For these tests, we also need to make sure to +# not add targets for them if they have been excluded as part of the target +# specific Makefile. +EXPLICITLY_SPECIFIED_TEST:= $(TENSORFLOW_ROOT)tensorflow/lite/micro/memory_arena_threshold_test.cc +ifneq ($(findstring $(EXPLICITLY_SPECIFIED_TEST),$(MICROLITE_TEST_SRCS)),) + MICROLITE_TEST_SRCS := $(filter-out $(EXPLICITLY_SPECIFIED_TEST), $(MICROLITE_TEST_SRCS)) + EXPLICITLY_SPECIFIED_TEST_SRCS := \ + $(EXPLICITLY_SPECIFIED_TEST) \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/testing/test_conv_model.cc + EXPLICITLY_SPECIFIED_TEST_HDRS := \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/testing/test_conv_model.h + EXPLICITLY_SPECIFIED_TEST_GENERATOR_INPUTS := \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/models/keyword_scrambled.tflite + $(eval $(call microlite_test,memory_arena_threshold_test,\ + $(EXPLICITLY_SPECIFIED_TEST_SRCS),$(EXPLICITLY_SPECIFIED_TEST_HDRS), \ + $(EXPLICITLY_SPECIFIED_TEST_GENERATOR_INPUTS))) +endif + +EXPLICITLY_SPECIFIED_TEST:= $(TENSORFLOW_ROOT)tensorflow/lite/micro/micro_allocator_test.cc +ifneq ($(findstring $(EXPLICITLY_SPECIFIED_TEST),$(MICROLITE_TEST_SRCS)),) + MICROLITE_TEST_SRCS := $(filter-out $(EXPLICITLY_SPECIFIED_TEST), $(MICROLITE_TEST_SRCS)) + EXPLICITLY_SPECIFIED_TEST_SRCS := \ + $(EXPLICITLY_SPECIFIED_TEST) \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/testing/test_conv_model.cc + EXPLICITLY_SPECIFIED_TEST_HDRS := \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/testing/test_conv_model.h + $(eval $(call microlite_test,micro_allocator_test,\ + $(EXPLICITLY_SPECIFIED_TEST_SRCS),$(EXPLICITLY_SPECIFIED_TEST_HDRS))) +endif + +EXPLICITLY_SPECIFIED_TEST:= $(TENSORFLOW_ROOT)tensorflow/lite/micro/recording_micro_allocator_test.cc +ifneq ($(findstring $(EXPLICITLY_SPECIFIED_TEST),$(MICROLITE_TEST_SRCS)),) + MICROLITE_TEST_SRCS := $(filter-out $(EXPLICITLY_SPECIFIED_TEST), $(MICROLITE_TEST_SRCS)) + EXPLICITLY_SPECIFIED_TEST_SRCS := \ + $(EXPLICITLY_SPECIFIED_TEST) \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/testing/test_conv_model.cc + EXPLICITLY_SPECIFIED_TEST_HDRS := \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/testing/test_conv_model.h + $(eval $(call microlite_test,recording_micro_allocator_test,\ + $(EXPLICITLY_SPECIFIED_TEST_SRCS),$(EXPLICITLY_SPECIFIED_TEST_HDRS))) +endif + +# For all the tests that do not have any additional dependencies, we can +# add a make target in a common way. +$(foreach TEST_TARGET,$(filter-out $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/%,$(MICROLITE_TEST_SRCS)),\ +$(eval $(call microlite_test,$(notdir $(basename $(TEST_TARGET))),$(TEST_TARGET)))) + +ifeq ($(TARGET_SPECIFIC_MAKE_TEST),0) +test: $(MICROLITE_TEST_TARGETS) +integration_tests: $(MICROLITE_INTEGRATION_TEST_TARGETS) +generated_micro_mutable_op_resolver: $(MICROLITE_GEN_OP_RESOLVER_TEST_TARGETS) +endif + +# Just build the test targets +build: $(MICROLITE_BUILD_TARGETS) + +list_library_sources: + @echo $(MICROLITE_CC_SRCS) $(MICROLITE_CC_KERNEL_SRCS) + +list_library_headers: + @echo $(MICROLITE_CC_HDRS) + +list_third_party_sources: + @echo $(THIRD_PARTY_CC_SRCS) $(THIRD_PARTY_KERNEL_CC_SRCS) + +list_third_party_headers: + @echo $(THIRD_PARTY_CC_HDRS) + +list_generator_dir: + @echo $(GENERATED_SRCS_DIR) + +# Gets rid of all generated files. +clean: + rm -rf $(BASE_GENDIR) + +# Removes third-party downloads. +clean_downloads: + rm -rf $(DOWNLOADS_DIR) + +$(DEPDIR)/%.d: ; +.PRECIOUS: $(DEPDIR)/%.d +.PRECIOUS: $(BINDIR)%_test + +-include $(patsubst %,$(DEPDIR)/%.d,$(basename $(ALL_SRCS))) diff --git a/tensorflow/lite/micro/tools/make/additional_kernels.inc b/tensorflow/lite/micro/tools/make/additional_kernels.inc new file mode 100644 index 0000000..e69de29 diff --git a/tensorflow/lite/micro/tools/make/additional_tests.inc b/tensorflow/lite/micro/tools/make/additional_tests.inc new file mode 100644 index 0000000..e69de29 diff --git a/tensorflow/lite/micro/tools/make/arm_gcc_download.sh b/tensorflow/lite/micro/tools/make/arm_gcc_download.sh new file mode 100755 index 0000000..8e6d632 --- /dev/null +++ b/tensorflow/lite/micro/tools/make/arm_gcc_download.sh @@ -0,0 +1,100 @@ +#!/bin/bash +# Copyright 2022 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# +# Called with following arguments: +# 1 - Path to the downloads folder which is typically +# ${TENSORFLOW_ROOT}/tensorflow/lite/micro/tools/make/downloads +# 2 - (optional) TENSORFLOW_ROOT: path to root of the TFLM tree (relative to directory from where the script is called). +# +# This script is called from the Makefile and uses the following convention to +# enable determination of sucess/failure: +# +# - If the script is successful, the only output on stdout should be SUCCESS. +# The makefile checks for this particular string. +# +# - Any string on stdout that is not SUCCESS will be shown in the makefile as +# the cause for the script to have failed. +# +# - Any other informational prints should be on stderr. + +set -e + +TENSORFLOW_ROOT=${2} +source ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/bash_helpers.sh + +DOWNLOADS_DIR=${1} +if [ ! -d ${DOWNLOADS_DIR} ]; then + echo "The top-level downloads directory: ${DOWNLOADS_DIR} does not exist." + exit 1 +fi + +DOWNLOADED_GCC_PATH=${DOWNLOADS_DIR}/gcc_embedded + +if [ -d ${DOWNLOADED_GCC_PATH} ]; then + echo >&2 "${DOWNLOADED_GCC_PATH} already exists, skipping the download." +else + + HOST_OS= + if [ "${OS}" == "Windows_NT" ]; then + HOST_OS=windows + else + UNAME_S=`uname -s` + if [ "${UNAME_S}" == "Linux" ]; then + HOST_OS=linux + elif [ "${UNAME_S}" == "Darwin" ]; then + HOST_OS=osx + fi + fi + + if [ "${HOST_OS}" == "linux" ]; then + # host architechture + UNAME_M=`uname -m` + if [ "${UNAME_M}" == "x86_64" ]; then + GCC_URL="https://developer.arm.com/-/media/Files/downloads/gnu-rm/10.3-2021.10/gcc-arm-none-eabi-10.3-2021.10-x86_64-linux.tar.bz2" + EXPECTED_MD5="2383e4eb4ea23f248d33adc70dc3227e" + elif [ "${UNAME_M}" == "aarch64" ]; then + GCC_URL="https://developer.arm.com/-/media/Files/downloads/gnu-rm/10.3-2021.10/gcc-arm-none-eabi-10.3-2021.10-aarch64-linux.tar.bz2" + EXPECTED_MD5="3fe3d8bb693bd0a6e4615b6569443d0d" + fi + + elif [ "${HOST_OS}" == "osx" ]; then + GCC_URL="https://developer.arm.com/-/media/Files/downloads/gnu-rm/10.3-2021.10/gcc-arm-none-eabi-10.3-2021.10-mac.tar.bz2" + EXPECTED_MD5="7f2a7b7b23797302a9d6182c6e482449" + elif [ "${HOST_OS}" == "windows" ]; then + GCC_URL="https://developer.arm.com/-/media/Files/downloads/gnu-rm/10.3-2021.10/gcc-arm-none-eabi-10.3-2021.10-win32.zip" + EXPECTED_MD5="2bc8f0c4c4659f8259c8176223eeafc1" + else + echo "OS type ${HOST_OS} not supported." + exit 1 + fi + + TEMPDIR=$(mktemp -d) + TEMPFILE=${TEMPDIR}/temp_file + wget ${GCC_URL} -O ${TEMPFILE} >&2 + check_md5 ${TEMPFILE} ${EXPECTED_MD5} + + mkdir ${DOWNLOADED_GCC_PATH} + + if [ "${HOST_OS}" == "windows" ]; then + unzip -q ${TEMPFILE} -d ${TEMPDIR} >&2 + mv ${TEMPDIR}/*/* ${DOWNLOADED_GCC_PATH} + else + tar -C ${DOWNLOADED_GCC_PATH} --strip-components=1 -xjf ${TEMPFILE} >&2 + fi + echo >&2 "Unpacked to directory: ${DOWNLOADED_GCC_PATH}" +fi + +echo "SUCCESS" diff --git a/tensorflow/lite/micro/tools/make/bash_helpers.sh b/tensorflow/lite/micro/tools/make/bash_helpers.sh new file mode 100755 index 0000000..f29a641 --- /dev/null +++ b/tensorflow/lite/micro/tools/make/bash_helpers.sh @@ -0,0 +1,79 @@ +#!/bin/bash +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + + +# Compute the MD5 sum. +# +# Parameter(s): +# ${1} - path to the file +function compute_md5() { + UNAME_S=`uname -s` + if [ ${UNAME_S} == Linux ]; then + tflm_md5sum=md5sum + elif [ ${UNAME_S} == Darwin ]; then + tflm_md5sum='md5 -r' + else + tflm_md5sum=md5sum + fi + ${tflm_md5sum} ${1} | awk '{print $1}' +} + +# Check that MD5 sum matches expected value. +# +# Parameter(s): +# ${1} - path to the file +# ${2} - expected md5 +function check_md5() { + MD5=`compute_md5 ${1}` + + if [[ ${MD5} != ${2} ]] + then + echo "Bad checksum. Expected: ${2}, Got: ${MD5}" + exit 1 + fi + +} + +# Create a git repo in a folder. +# +# Parameter(s): +# $[1} - relative path to folder +create_git_repo() { + pushd ${1} > /dev/null + git init . > /dev/null + git config user.email "tflm@google.com" --local + git config user.name "TFLM" --local + git add . >&2 2> /dev/null + git commit -a -m "Commit for a temporary repository." > /dev/null + git checkout -b tflm > /dev/null + popd > /dev/null +} + +# Create a new commit with a patch in a folder that has a git repo. +# +# Parameter(s): +# $[1} - relative path to folder +# ${2} - path to patch file (relative to ${1}) +# ${3} - commit nessage for the patch +function apply_patch_to_folder() { + pushd ${1} > /dev/null + echo >&2 "Applying ${PWD}/${1}/${2} to ${PWD}/${1}" + git apply ${2} + git commit -a -m "${3}" > /dev/null + popd > /dev/null +} + + diff --git a/tensorflow/lite/micro/tools/make/check_optimized_kernel_dir.sh b/tensorflow/lite/micro/tools/make/check_optimized_kernel_dir.sh new file mode 100755 index 0000000..24a40b8 --- /dev/null +++ b/tensorflow/lite/micro/tools/make/check_optimized_kernel_dir.sh @@ -0,0 +1,40 @@ +#!/bin/bash +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# +# Called with following arguments: +# 1 - OPTIMIZED_KERNEL_PATH (relative to the location from which the script is invoked) +# to the optimized kernel implementations. +# +# This script is called from the Makefile and uses the following convention to +# enable determination of sucess/failure: +# +# - If the script is successful, the only output on stdout should be SUCCESS. +# The makefile checks for this particular string. +# +# - Any string on stdout that is not SUCCESS will be shown in the makefile as +# the cause for the script to have failed. +# +# - Any other informational prints should be on stderr. + +set -e + +OPTIMIZED_KERNEL_PATH=${1} +if [ ! -d ${OPTIMIZED_KERNEL_PATH} ]; then + echo "The optimized kernel directory: ${OPTIMIZED_KERNEL_PATH} does not exist." + exit 1 +fi + +echo "SUCCESS" diff --git a/tensorflow/lite/micro/tools/make/corstone_300_download.sh b/tensorflow/lite/micro/tools/make/corstone_300_download.sh new file mode 100755 index 0000000..aa0a762 --- /dev/null +++ b/tensorflow/lite/micro/tools/make/corstone_300_download.sh @@ -0,0 +1,70 @@ +#!/bin/bash +# Copyright 2022 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# +# Called with following arguments: +# 1 - Path to the downloads folder which is typically +# tensorflow/lite/micro/tools/make/downloads +# +# This script is called from the Makefile and uses the following convention to +# enable determination of sucess/failure: +# +# - If the script is successful, the only output on stdout should be SUCCESS. +# The makefile checks for this particular string. +# +# - Any string on stdout that is not SUCCESS will be shown in the makefile as +# the cause for the script to have failed. +# +# - Any other informational prints should be on stderr. + +set -e + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +ROOT_DIR=${SCRIPT_DIR}/../../../../.. +cd "${ROOT_DIR}" + +source tensorflow/lite/micro/tools/make/bash_helpers.sh + +DOWNLOADS_DIR=${1} +if [ ! -d ${DOWNLOADS_DIR} ]; then + echo "The top-level downloads directory: ${DOWNLOADS_DIR} does not exist." + exit 1 +fi + +DOWNLOADED_CORSTONE_PATH=${DOWNLOADS_DIR}/corstone300 + +if [ -d ${DOWNLOADED_CORSTONE_PATH} ]; then + echo >&2 "${DOWNLOADED_CORSTONE_PATH} already exists, skipping the download." +else + UNAME_S=`uname -s` + if [ ${UNAME_S} == Linux ]; then + CORSTONE_URL=https://developer.arm.com/-/media/Arm%20Developer%20Community/Downloads/OSS/FVP/Corstone-300/FVP_Corstone_SSE-300_11.16_26.tgz + EXPECTED_MD5=29d9208127b24a0d83356efb8343162d + else + echo "OS type ${UNAME_S} not supported." + exit 1 + fi + + TEMPFILE=$(mktemp -d)/temp_file + wget ${CORSTONE_URL} -O ${TEMPFILE} >&2 + check_md5 ${TEMPFILE} ${EXPECTED_MD5} + + TEMPDIR=$(mktemp -d) + tar -C ${TEMPDIR} -xvzf ${TEMPFILE} >&2 + mkdir ${DOWNLOADED_CORSTONE_PATH} + ${TEMPDIR}/FVP_Corstone_SSE-300.sh --i-agree-to-the-contained-eula --no-interactive -d ${DOWNLOADED_CORSTONE_PATH} >&2 +fi + +echo "SUCCESS" diff --git a/tensorflow/lite/micro/tools/make/download_and_extract.sh b/tensorflow/lite/micro/tools/make/download_and_extract.sh new file mode 100755 index 0000000..974dca1 --- /dev/null +++ b/tensorflow/lite/micro/tools/make/download_and_extract.sh @@ -0,0 +1,180 @@ +#!/bin/bash +# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +# Utility script that handles downloading, extracting, and patching third-party +# library dependencies for TensorFlow Lite for Microcontrollers. +# Called with four arguments: +# 1 - URL to download from. +# 2 - MD5 checksum to verify the package's integrity. Use md5sum to create one. +# 3 - Path to new folder to unpack the library into. +# 4 - Optional patching action name. + +set -e + +# Patches the Ambiq Micro SDK to work around build issues. +patch_am_sdk() { + local am_dir="${1}" + if [ ! -f ${am_dir}/VERSION.txt ]; then + echo "Could not find ${am_dir}, skipping AmbiqMicro SDK patch"; + return; + fi + + local src_dir=${am_dir}/boards/apollo3_evb/examples/hello_world/gcc + local dest_dir=${am_dir}/boards/apollo3_evb/examples/hello_world/gcc_patched + + rm -rf ${dest_dir} + mkdir ${dest_dir} + + cp "${src_dir}/startup_gcc.c" "${dest_dir}/startup_gcc.c" + cp "${src_dir}/hello_world.ld" "${dest_dir}/apollo3evb.ld" + + sed -i -e '114s/1024/1024\*20/g' "${dest_dir}/startup_gcc.c" + #sed -i -e 's/main/_main/g' "${dest_dir}/startup_gcc.c" + + sed -i -e '3s/hello_world.ld/apollo3evb.ld/g' "${dest_dir}/apollo3evb.ld" + sed -i -e '3s/startup_gnu/startup_gcc/g' "${dest_dir}/apollo3evb.ld" + sed -i -e $'22s/\*(.text\*)/\*(.text\*)\\\n\\\n\\\t\/\* These are the C++ global constructors. Stick them all here and\\\n\\\t \* then walk through the array in main() calling them all.\\\n\\\t \*\/\\\n\\\t_init_array_start = .;\\\n\\\tKEEP (\*(SORT(.init_array\*)))\\\n\\\t_init_array_end = .;\\\n\\\n\\\t\/\* XXX Currently not doing anything for global destructors. \*\/\\\n/g' "${dest_dir}/apollo3evb.ld" + sed -i -e $'70s/} > SRAM/} > SRAM\\\n \/\* Add this to satisfy reference to symbol "end" from libnosys.a(sbrk.o)\\\n \* to denote the HEAP start.\\\n \*\/\\\n end = .;/g' "${dest_dir}/apollo3evb.ld" + + # Add a delay after establishing serial connection + sed -ir -E $'s/ with serial\.Serial\(args\.port, args\.baud, timeout=12\) as ser:/ with serial.Serial(args.port, args.baud, timeout=12) as ser:\\\n # Patched.\\\n import time\\\n time.sleep(0.25)\\\n # End patch./g' "${am_dir}/tools/apollo3_scripts/uart_wired_update.py" + + # Add CPP include guards to "am_hal_iom.h" + sed -i -e '57a\ + #ifdef __cplusplus // Patch\ + extern "C" {\ + #endif // End patch + ' "${am_dir}/mcu/apollo3/hal/am_hal_iom.h" + + sed -i -e '836a\ + #ifdef __cplusplus // Patch\ + }\ + #endif // End patch + ' "${am_dir}/mcu/apollo3/hal/am_hal_iom.h" + + echo "Finished preparing Apollo3 files" +} + +build_embarc_mli() { + if [[ ${ARC_TAGS} =~ "mli20_experimental" ]]; then + make -C ${1}/lib/make build TCF_FILE=${2} BUILDLIB_DIR=${BUILD_LIB_DIR} GEN_EXAMPLES=0 JOBS=4 + else + make -j 4 -C ${1}/lib/make TCF_FILE=${2} + fi +} + +# Main function handling the download, verify, extract, and patch process. +download_and_extract() { + local usage="Usage: download_and_extract URL MD5 DIR [ACTION] [ACTION_PARAM]" + local url="${1:?${usage}}" + local expected_md5="${2:?${usage}}" + local dir="${3:?${usage}}" + local action=${4} + local action_param1=${5} # optional action parameter + local tempdir=$(mktemp -d) + local tempdir2=$(mktemp -d) + local tempfile=${tempdir}/temp_file + local curl_retries=5 + + # Destionation already downloaded. + if [ -d ${dir} ]; then + exit 0 + fi + + command -v curl >/dev/null 2>&1 || { + echo >&2 "The required 'curl' tool isn't installed. Try 'apt-get install curl'."; exit 1; + } + + echo "downloading ${url}" >&2 + mkdir -p "${dir}" + # We've been seeing occasional 56 errors from valid URLs, so set up a retry + # loop to attempt to recover from them. + for (( i=1; i<=$curl_retries; ++i )); do + # We have to use this approach because we normally halt the script when + # there's an error, and instead we want to catch errors so we can retry. + set +ex + curl -LsS --fail --retry 5 "${url}" > ${tempfile} + CURL_RESULT=$? + set -ex + + # Was the command successful? If so, continue. + if [[ $CURL_RESULT -eq 0 ]]; then + break + fi + + # Keep trying if we see the '56' error code. + if [[ ( $CURL_RESULT -ne 56 ) || ( $i -eq $curl_retries ) ]]; then + echo "Error $CURL_RESULT downloading '${url}'" + exit 1 + fi + sleep 2 + done + + # Check that the file was downloaded correctly using a checksum. + DOWNLOADED_MD5=$(openssl dgst -md5 ${tempfile} | sed 's/.* //g') + if [ ${expected_md5} != ${DOWNLOADED_MD5} ]; then + echo "Checksum error for '${url}'. Expected ${expected_md5} but found ${DOWNLOADED_MD5}" + exit 1 + fi + + # delete anything after the '?' in a url that may mask true file extension + url=$(echo "${url}" | sed "s/\?.*//") + + if [[ "${url}" == *gz ]]; then + tar -C "${dir}" --strip-components=1 -xzf ${tempfile} + elif [[ "${url}" == *tar.xz ]]; then + tar -C "${dir}" --strip-components=1 -xf ${tempfile} + elif [[ "${url}" == *bz2 ]]; then + curl -Ls "${url}" > ${tempdir}/tarred.bz2 + tar -C "${dir}" --strip-components=1 -xjf ${tempfile} + elif [[ "${url}" == *zip ]]; then + unzip ${tempfile} -d ${tempdir2} 2>&1 1>/dev/null + # If the zip file contains nested directories, extract the files from the + # inner directory. + if [ $(find $tempdir2/* -maxdepth 0 | wc -l) = 1 ] && [ -d $tempdir2/* ]; then + # unzip has no strip components, so unzip to a temp dir, and move the + # files we want from the tempdir to destination. + cp -R ${tempdir2}/*/* ${dir}/ + else + cp -R ${tempdir2}/* ${dir}/ + fi + else + echo "Error unsupported archive type. Failed to extract tool after download." + exit 1 + fi + rm -rf ${tempdir2} ${tempdir} + + # Delete any potential BUILD files, which would interfere with Bazel builds. + find "${dir}" -type f -name '*BUILD' -delete + + if [[ ${action} == "patch_am_sdk" ]]; then + patch_am_sdk ${dir} + elif [[ ${action} == "patch_cifar10_dataset" ]]; then + patch_cifar10_dataset ${dir} + elif [[ ${action} == "build_embarc_mli" ]]; then + if [[ "${action_param1}" == *.tcf ]]; then + cp ${action_param1} ${dir}/hw/arc.tcf + build_embarc_mli ${dir} ../../hw/arc.tcf + else + build_embarc_mli ${dir} ${action_param1} + fi + elif [[ ${action} ]]; then + echo "Unknown action '${action}'" + exit 1 + fi +} + +download_and_extract "$1" "$2" "$3" "$4" "$5" diff --git a/tensorflow/lite/micro/tools/make/ethos_u_core_platform_download.sh b/tensorflow/lite/micro/tools/make/ethos_u_core_platform_download.sh new file mode 100755 index 0000000..76223db --- /dev/null +++ b/tensorflow/lite/micro/tools/make/ethos_u_core_platform_download.sh @@ -0,0 +1,82 @@ +#!/bin/bash +# Copyright 2022 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# +# Called with following arguments: +# 1 - Path to the downloads folder which is typically +# tensorflow/lite/micro/tools/make/downloads +# +# This script is called from the Makefile and uses the following convention to +# enable determination of sucess/failure: +# +# - If the script is successful, the only output on stdout should be SUCCESS. +# The makefile checks for this particular string. +# +# - Any string on stdout that is not SUCCESS will be shown in the makefile as +# the cause for the script to have failed. +# +# - Any other informational prints should be on stderr. + +set -e + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +ROOT_DIR=${SCRIPT_DIR}/../../../../.. +cd "${ROOT_DIR}" + +source tensorflow/lite/micro/tools/make/bash_helpers.sh + +DOWNLOADS_DIR=${1} +if [ ! -d ${DOWNLOADS_DIR} ]; then + echo "The top-level downloads directory: ${DOWNLOADS_DIR} does not exist." + exit 1 +fi + +DOWNLOADED_ETHOS_U_CORE_PLATFORM_PATH=${DOWNLOADS_DIR}/ethos_u_core_platform + +if [ -d ${DOWNLOADED_ETHOS_U_CORE_PLATFORM_PATH} ]; then + echo >&2 "${DOWNLOADED_ETHOS_U_CORE_PLATFORM_PATH} already exists, skipping the download." +else + UNAME_S=`uname -s` + if [ ${UNAME_S} != Linux ]; then + echo "OS type ${UNAME_S} not supported." + exit 1 + fi + + git clone https://git.mlplatform.org/ml/ethos-u/ethos-u-core-platform.git ${DOWNLOADED_ETHOS_U_CORE_PLATFORM_PATH} >&2 + cd ${DOWNLOADED_ETHOS_U_CORE_PLATFORM_PATH} + git checkout e25a89dec1cf990f3168dbd6c565e3b0d51cb151 >&2 + rm -rf .git + create_git_repo ./ + + apply_patch_to_folder ./ ../../increase-stack-size-and-switch-DTCM-SRAM.patch "TFLM patch" + + cd "${ROOT_DIR}" + + LINKER_PATH=${DOWNLOADED_ETHOS_U_CORE_PLATFORM_PATH}/targets/corstone-300 + + # Run C preprocessor on linker file to get rid of ifdefs and make sure compiler is downloaded first. + COMPILER=${DOWNLOADS_DIR}/gcc_embedded/bin/arm-none-eabi-gcc + if [ ! -f ${COMPILER} ]; then + RETURN_VALUE=`./tensorflow/lite/micro/tools/make/arm_gcc_download.sh ${DOWNLOADS_DIR}` + if [ "SUCCESS" != "${RETURN_VALUE}" ]; then + echo "The script ./tensorflow/lite/micro/tools/make/arm_gcc_download.sh failed." + exit 1 + fi + fi + ${COMPILER} -E -x c -P -o ${LINKER_PATH}/platform_parsed.ld ${LINKER_PATH}/platform.ld + +fi + +echo "SUCCESS" diff --git a/tensorflow/lite/micro/tools/make/ext_libs/arc_mli.inc b/tensorflow/lite/micro/tools/make/ext_libs/arc_mli.inc new file mode 100644 index 0000000..3213e4d --- /dev/null +++ b/tensorflow/lite/micro/tools/make/ext_libs/arc_mli.inc @@ -0,0 +1,112 @@ +# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Settings for embARC MLI library for ARC platform. + +ifeq ($(TARGET_ARCH), arc) + +ifeq ($(BUILD_ARC_MLI),true) + MLI_LIB_DIR ?= arc_mli_$(notdir $(basename $(TCF_FILE_NAME))) + +ifneq ($(filter $(ARC_TAGS), mli20_experimental),) + MLI_LIB_DIR := $(MLI_LIB_DIR)_mli20 +endif + +ifneq ($(findstring test, $(MAKECMDGOALS)),) + $(eval $(call add_third_party_download,$(EMBARC_MLI_URL),$(EMBARC_MLI_MD5),$(MLI_LIB_DIR),build_embarc_mli,$(TCF_FILE))) +else + $(eval $(call add_third_party_download,$(EMBARC_MLI_URL),$(EMBARC_MLI_MD5),$(MLI_LIB_DIR))) +endif + + MLI_INCLUDE_FOLDER = $(MLI_LIB_DIR)/include + +ifneq ($(filter $(ARC_TAGS), mli20_experimental),) + MICROLITE_LIBS += $(MAKEFILE_DIR)/downloads/$(MLI_LIB_DIR)/bin/arc/libmli.a +else + MICROLITE_LIBS += $(MAKEFILE_DIR)/downloads/$(MLI_LIB_DIR)/bin/libmli.a +endif + +else +ifneq ($(ARC_MLI_PRE_COMPILED_TARGET),) + MLI_LIB_DIR ?= arc_mli_package + $(eval $(call add_third_party_download,$(EMBARC_MLI_PRE_COMPILED_URL),$(EMBARC_MLI_PRE_COMPILED_MD5),$(MLI_LIB_DIR),)) + + MLI_INCLUDE_FOLDER = $(MLI_LIB_DIR)/include + MICROLITE_LIBS += $(MAKEFILE_DIR)/downloads/$(MLI_LIB_DIR)/bin/$(ARC_MLI_PRE_COMPILED_TARGET)/release/libmli.a + +else +$(error Target for pre compiled ARC MLI library is not defined) +endif # ARC_MLI_PRE_COMPILED_TARGET +endif # BUILD_ARC_MLI + +ifeq ($(filter $(ARC_TAGS), project_generation),) + + THIRD_PARTY_CC_HDRS += \ + $(MAKEFILE_DIR)/downloads/$(MLI_LIB_DIR)/LICENSE + + THIRD_PARTY_CC_HDRS += $(MLI_LIB) + GENERATED_PROJECT_LIBS += $(MLI_LIB) + + INCLUDES += \ + -I$(MAKEFILE_DIR)/downloads/$(MLI_INCLUDE_FOLDER) \ + -I$(MAKEFILE_DIR)/downloads/$(MLI_INCLUDE_FOLDER)/api + + GENERATED_PROJECT_INCLUDES += \ + -I. \ + -I./third_party/$(MLI_INCLUDE_FOLDER) \ + -I./third_party/$(MLI_INCLUDE_FOLDER)/api + +ifneq ($(filter $(ARC_TAGS), mli20_experimental),) + THIRD_PARTY_CC_HDRS += \ + $(MAKEFILE_DIR)/downloads/$(MLI_INCLUDE_FOLDER)/mli_api.h \ + $(MAKEFILE_DIR)/downloads/$(MLI_INCLUDE_FOLDER)/mli_config.h \ + $(MAKEFILE_DIR)/downloads/$(MLI_INCLUDE_FOLDER)/mli_types.h \ + $(MAKEFILE_DIR)/downloads/$(MLI_INCLUDE_FOLDER)/api/mli_helpers_api.h \ + $(MAKEFILE_DIR)/downloads/$(MLI_INCLUDE_FOLDER)/api/mli_kernels_api.h \ + $(MAKEFILE_DIR)/downloads/$(MLI_INCLUDE_FOLDER)/api/mli_mov_api.h +else + THIRD_PARTY_CC_HDRS += \ + $(MAKEFILE_DIR)/downloads/$(MLI_INCLUDE_FOLDER)/mli_api.h \ + $(MAKEFILE_DIR)/downloads/$(MLI_INCLUDE_FOLDER)/mli_config.h \ + $(MAKEFILE_DIR)/downloads/$(MLI_INCLUDE_FOLDER)/mli_types.h \ + $(MAKEFILE_DIR)/downloads/$(MLI_INCLUDE_FOLDER)/api/mli_helpers_api.h \ + $(MAKEFILE_DIR)/downloads/$(MLI_INCLUDE_FOLDER)/api/mli_kernels_api.h \ + $(MAKEFILE_DIR)/downloads/$(MLI_INCLUDE_FOLDER)/api/mli_krn_avepool_spec_api.h \ + $(MAKEFILE_DIR)/downloads/$(MLI_INCLUDE_FOLDER)/api/mli_krn_conv2d_spec_api.h \ + $(MAKEFILE_DIR)/downloads/$(MLI_INCLUDE_FOLDER)/api/mli_krn_depthwise_conv2d_spec_api.h \ + $(MAKEFILE_DIR)/downloads/$(MLI_INCLUDE_FOLDER)/api/mli_krn_maxpool_spec_api.h \ + $(MAKEFILE_DIR)/downloads/$(MLI_INCLUDE_FOLDER)/api/mli_mov_api.h +endif + +endif # project_generation + + MICROLITE_CC_HDRS += tensorflow/lite/micro/kernels/arc_mli/scratch_buffers.h + MICROLITE_CC_SRCS += tensorflow/lite/micro/kernels/arc_mli/scratch_buffers.cc + MICROLITE_CC_HDRS += tensorflow/lite/micro/kernels/arc_mli/scratch_buf_mgr.h + MICROLITE_CC_SRCS += tensorflow/lite/micro/kernels/arc_mli/scratch_buf_mgr.cc + MICROLITE_CC_HDRS += tensorflow/lite/micro/kernels/arc_mli/mli_slicers.h + MICROLITE_CC_SRCS += tensorflow/lite/micro/kernels/arc_mli/mli_slicers.cc + MICROLITE_CC_HDRS += tensorflow/lite/micro/kernels/arc_mli/mli_tf_utils.h + MICROLITE_CC_HDRS += tensorflow/lite/micro/kernels/arc_mli/mli_interface.h + MICROLITE_CC_HDRS += tensorflow/lite/micro/kernels/arc_mli/mli_function_specializations.h +ifneq ($(filter $(ARC_TAGS), project_generation),) + MICROLITE_CC_SRCS += tensorflow/lite/micro/kernels/arc_mli/mli_interface_mli_20.cc + MICROLITE_CC_SRCS += tensorflow/lite/micro/kernels/arc_mli/mli_interface.cc +else ifneq ($(filter $(ARC_TAGS), mli20_experimental),) + MICROLITE_CC_SRCS += tensorflow/lite/micro/kernels/arc_mli/mli_interface_mli_20.cc +else + MICROLITE_CC_SRCS += tensorflow/lite/micro/kernels/arc_mli/mli_interface.cc +endif + +endif # TARGET_ARCH diff --git a/tensorflow/lite/micro/tools/make/ext_libs/ceva.inc b/tensorflow/lite/micro/tools/make/ext_libs/ceva.inc new file mode 100644 index 0000000..e69de29 diff --git a/tensorflow/lite/micro/tools/make/ext_libs/cmsis_download.sh b/tensorflow/lite/micro/tools/make/ext_libs/cmsis_download.sh new file mode 100755 index 0000000..c943d27 --- /dev/null +++ b/tensorflow/lite/micro/tools/make/ext_libs/cmsis_download.sh @@ -0,0 +1,63 @@ +#!/bin/bash +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# +# Called with following arguments: +# 1 - Path to the downloads folder which is typically +# ${TENSORFLOW_ROOT}/tensorflow/lite/micro/tools/make/downloads +# 2 - (optional) TENSORFLOW_ROOT: path to root of the TFLM tree (relative to directory from where the script is called). +# +# This script is called from the Makefile and uses the following convention to +# enable determination of sucess/failure: +# +# - If the script is successful, the only output on stdout should be SUCCESS. +# The makefile checks for this particular string. +# +# - Any string on stdout that is not SUCCESS will be shown in the makefile as +# the cause for the script to have failed. +# +# - Any other informational prints should be on stderr. + +set -e + +TENSORFLOW_ROOT=${2} +source ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/bash_helpers.sh + +DOWNLOADS_DIR=${1} +if [ ! -d ${DOWNLOADS_DIR} ]; then + echo "The top-level downloads directory: ${DOWNLOADS_DIR} does not exist." + exit 1 +fi + +DOWNLOADED_CMSIS_PATH=${DOWNLOADS_DIR}/cmsis + +if [ -d ${DOWNLOADED_CMSIS_PATH} ]; then + echo >&2 "${DOWNLOADED_CMSIS_PATH} already exists, skipping the download." +else + + ZIP_PREFIX="e94a96201a97be3e84d3d6ef081d2f0f7db9b5fd" + CMSIS_URL="http://github.com/ARM-software/CMSIS_5/archive/${ZIP_PREFIX}.zip" + CMSIS_MD5="e72a40716ca8adca690b91819c69d83e" + + # wget is much faster than git clone of the entire repo. So we wget a specific + # version and can then apply a patch, as needed. + wget ${CMSIS_URL} -O /tmp/${ZIP_PREFIX}.zip >&2 + check_md5 /tmp/${ZIP_PREFIX}.zip ${CMSIS_MD5} + + unzip -qo /tmp/${ZIP_PREFIX}.zip -d /tmp >&2 + mv /tmp/CMSIS_5-${ZIP_PREFIX} ${DOWNLOADED_CMSIS_PATH} +fi + +echo "SUCCESS" diff --git a/tensorflow/lite/micro/tools/make/ext_libs/cmsis_nn.inc b/tensorflow/lite/micro/tools/make/ext_libs/cmsis_nn.inc new file mode 100644 index 0000000..e9ae5fc --- /dev/null +++ b/tensorflow/lite/micro/tools/make/ext_libs/cmsis_nn.inc @@ -0,0 +1,60 @@ +# Enable u-arch specfic behaviours +ifneq (,$(filter $(TARGET_ARCH), x86_64)) + # CMSIS-NN optimizations not supported +endif + +ifneq (,$(CMSIS_NN_LIBS)) + ifeq (,$(CMSIS_PATH)) + $(error CMSIS_NN_LIBS provided but not CMSIS_PATH) + endif + ifeq (,$(CMSIS_NN_PATH)) + $(error CMSIS_NN_LIBS provided but not CMSIS_NN_PATH) + endif +endif + +# Unless an external path is provided we force a download during the first +# phase of make. +CMSIS_DEFAULT_DOWNLOAD_PATH := $(DOWNLOADS_DIR)/cmsis +CMSIS_PATH := $(CMSIS_DEFAULT_DOWNLOAD_PATH) +ifeq ($(CMSIS_PATH), $(CMSIS_DEFAULT_DOWNLOAD_PATH)) + DOWNLOAD_RESULT := $(shell $(MAKEFILE_DIR)/ext_libs/cmsis_download.sh ${DOWNLOADS_DIR} ${TENSORFLOW_ROOT}) + ifneq ($(DOWNLOAD_RESULT), SUCCESS) + $(error Something went wrong with the CMSIS download: $(DOWNLOAD_RESULT)) + endif +endif +CMSIS_NN_DEFAULT_DOWNLOAD_PATH := $(DOWNLOADS_DIR)/cmsis_nn +CMSIS_NN_PATH := $(CMSIS_NN_DEFAULT_DOWNLOAD_PATH) +ifeq ($(CMSIS_NN_PATH), $(CMSIS_NN_DEFAULT_DOWNLOAD_PATH)) + DOWNLOAD_RESULT := $(shell $(MAKEFILE_DIR)/ext_libs/cmsis_nn_download.sh ${DOWNLOADS_DIR} ${TENSORFLOW_ROOT}) + ifneq ($(DOWNLOAD_RESULT), SUCCESS) + $(error Something went wrong with the CMSIS-NN download: $(DOWNLOAD_RESULT)) + endif +endif + +ifeq (,$(CMSIS_NN_LIBS)) + THIRD_PARTY_KERNEL_CC_SRCS += $(shell find $(CMSIS_NN_PATH)/Source -name "*.c") +else + MICROLITE_LIBS += $(CMSIS_NN_LIBS) +endif +THIRD_PARTY_CC_HDRS += $(shell find $(CMSIS_NN_PATH)/Include -name "*.h") + +# Note all the headers from CMSIS/Core/Include are needed to ensure that the +# project generation scripts copy over the compiler specific implementations of +# the various intrinisics. +THIRD_PARTY_CC_HDRS += \ + $(CMSIS_PATH)/LICENSE.txt \ + $(CMSIS_NN_PATH)/LICENSE.txt \ + $(wildcard $(CMSIS_PATH)/CMSIS/Core/Include/*.h) + +# We add -I$(CMSIS_PATH) to enable the code in the TFLM repo (mostly in the +# tensorflow/lite/micro/kernels/cmsis_nn) to use include paths relative to +# the CMSIS code-base. +# +# The CMSIS code itself uses includes such as #include "arm_math.h" and so +# we add $(CMSIS_PATH)/CMSIS/Core/Include etc. to be able to build the CMSIS +# code without any modifications. +INCLUDES += \ + -I$(CMSIS_PATH) \ + -I$(CMSIS_NN_PATH) \ + -I$(CMSIS_PATH)/CMSIS/Core/Include \ + -I$(CMSIS_NN_PATH)/Include diff --git a/tensorflow/lite/micro/tools/make/ext_libs/cmsis_nn_download.sh b/tensorflow/lite/micro/tools/make/ext_libs/cmsis_nn_download.sh new file mode 100755 index 0000000..bc8e87b --- /dev/null +++ b/tensorflow/lite/micro/tools/make/ext_libs/cmsis_nn_download.sh @@ -0,0 +1,63 @@ +#!/bin/bash +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# +# Called with following arguments: +# 1 - Path to the downloads folder which is typically +# ${TENSORFLOW_ROOT}/tensorflow/lite/micro/tools/make/downloads +# 2 - (optional) TENSORFLOW_ROOT: path to root of the TFLM tree (relative to directory from where the script is called). +# +# This script is called from the Makefile and uses the following convention to +# enable determination of sucess/failure: +# +# - If the script is successful, the only output on stdout should be SUCCESS. +# The makefile checks for this particular string. +# +# - Any string on stdout that is not SUCCESS will be shown in the makefile as +# the cause for the script to have failed. +# +# - Any other informational prints should be on stderr. + +set -e + +TENSORFLOW_ROOT=${2} +source ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/bash_helpers.sh + +DOWNLOADS_DIR=${1} +if [ ! -d ${DOWNLOADS_DIR} ]; then + echo "The top-level downloads directory: ${DOWNLOADS_DIR} does not exist." + exit 1 +fi + +DOWNLOADED_CMSIS_NN_PATH=${DOWNLOADS_DIR}/cmsis_nn + +if [ -d ${DOWNLOADED_CMSIS_NN_PATH} ]; then + echo >&2 "${DOWNLOADED_CMSIS_NN_PATH} already exists, skipping the download." +else + + ZIP_PREFIX_NN="dc64e488f6655aa2792d2aceca316c896f78b4db" + CMSIS_NN_URL="http://github.com/ARM-software/CMSIS-NN/archive/${ZIP_PREFIX_NN}.zip" + CMSIS_NN_MD5="80f9cf0bcc10a4aefb6531ae53942044" + + # wget is much faster than git clone of the entire repo. So we wget a specific + # version and can then apply a patch, as needed. + wget ${CMSIS_NN_URL} -O /tmp/${ZIP_PREFIX_NN}.zip >&2 + check_md5 /tmp/${ZIP_PREFIX_NN}.zip ${CMSIS_NN_MD5} + + unzip -qo /tmp/${ZIP_PREFIX_NN}.zip -d /tmp >&2 + mv /tmp/CMSIS-NN-${ZIP_PREFIX_NN} ${DOWNLOADED_CMSIS_NN_PATH} +fi + +echo "SUCCESS" diff --git a/tensorflow/lite/micro/tools/make/ext_libs/ethos_u.inc b/tensorflow/lite/micro/tools/make/ext_libs/ethos_u.inc new file mode 100644 index 0000000..c61aaff --- /dev/null +++ b/tensorflow/lite/micro/tools/make/ext_libs/ethos_u.inc @@ -0,0 +1,75 @@ +# Copyright 2022 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +# Arm Compiler will not link the Math library (see below), therefore we're filtering it out. +# See Fatal error: L6450U: Cannot find library m: +# "Arm Compiler is designed to run in a bare metal environment, +# and automatically includes implementations of these functions, +# and so no such flag is necessary." +# https://developer.arm.com/documentation/100891/0611/troubleshooting/general-troubleshooting-advice +MICROLITE_LIBS := $(filter-out -lm,$(MICROLITE_LIBS)) + +ifneq (,$(filter $(TARGET_ARCH), x86_64)) + $(error target architecture x86_64 not supported) +endif + +# Unless an external path is provided we force a download during the first phase of make so +# that the files exist prior to the call to find below. +ETHOSU_DEFAULT_DOWNLOAD_DRIVER_PATH := $(MAKEFILE_DIR)/downloads/ethos_u_core_driver +ETHOSU_DRIVER_PATH := $(ETHOSU_DEFAULT_DOWNLOAD_DRIVER_PATH) +ifeq ($(ETHOSU_DRIVER_PATH), $(ETHOSU_DEFAULT_DOWNLOAD_DRIVER_PATH)) + $(call $(or $(shell $(DOWNLOAD_SCRIPT) $(ETHOSU_URL) $(ETHOSU_MD5) $(ETHOSU_DRIVER_PATH) >&2 && echo SUCCESS), $(error $(DOWNLOAD_SCRIPT) failed))) +endif + +THIRD_PARTY_CC_HDRS += $(shell find $(ETHOSU_DRIVER_PATH)/include -name "*.h") +ifeq (,$(ETHOSU_DRIVER_LIBS)) + THIRD_PARTY_CC_SRCS += $(shell find $(ETHOSU_DRIVER_PATH)/src -name "*.c") +else + MICROLITE_LIBS += $(ETHOSU_DRIVER_LIBS) +endif + +# Currently there is a dependency to CMSIS even without OPTIMIZED_KERNEL_DIR=cmsis_nn. +CMSIS_DEFAULT_DOWNLOAD_PATH := $(DOWNLOADS_DIR)/cmsis +CMSIS_PATH := $(CMSIS_DEFAULT_DOWNLOAD_PATH) +ifeq ($(CMSIS_PATH), $(CMSIS_DEFAULT_DOWNLOAD_PATH)) + DOWNLOAD_RESULT := $(shell $(MAKEFILE_DIR)/ext_libs/cmsis_download.sh ${DOWNLOADS_DIR} ${TENSORFLOW_ROOT}) + ifneq ($(DOWNLOAD_RESULT), SUCCESS) + $(error Something went wrong with the CMSIS download: $(DOWNLOAD_RESULT)) + endif +endif + +THIRD_PARTY_CC_HDRS += $(CMSIS_PATH)/CMSIS/Core/Include/cmsis_compiler.h + +INCLUDES += -I$(ETHOSU_DRIVER_PATH)/include \ + -I$(CMSIS_PATH)/CMSIS/Core/Include + +ETHOSU_FLAGS := -DETHOSU_LOG_SEVERITY=ETHOSU_LOG_WARN +ifeq ($(ETHOSU_ARCH), u55) + ETHOSU_FLAGS += \ + -DETHOSU_ARCH=u55 \ + -DETHOSU55 +else ifeq ($(ETHOSU_ARCH), u65) + ETHOSU_FLAGS += \ + -DETHOSU_ARCH=u65 \ + -DETHOSU65 +else + $(error "ETHOSU_ARCH=$(ETHOSU_ARCH) is not supported") +endif +CCFLAGS += ${ETHOSU_FLAGS} +CXXFLAGS += ${ETHOSU_FLAGS} + +# Convert downloaded person detect int8 model. +$(GENERATED_SRCS_DIR)tensorflow/lite/micro/models/person_detect_model_data_vela.cc: + $(info Result of person detect int8 model conversion: $(shell $(MAKEFILE_DIR)/ext_libs/person_detection_int8_vela_convert.sh ${DOWNLOADS_DIR} $(GENERATED_SRCS_DIR) $(TENSORFLOW_ROOT))) diff --git a/tensorflow/lite/micro/tools/make/ext_libs/hexagon.inc b/tensorflow/lite/micro/tools/make/ext_libs/hexagon.inc new file mode 100644 index 0000000..2ab78ba --- /dev/null +++ b/tensorflow/lite/micro/tools/make/ext_libs/hexagon.inc @@ -0,0 +1,14 @@ +MICROLITE_CC_KERNEL_SRCS += \ +tensorflow/lite/micro/kernels/hexagon/fully_connected_int8.cc \ +tensorflow/lite/micro/kernels/hexagon/svdf_int8.cc + + +THIRD_PARTY_CC_SRCS += \ + $(wildcard $(HEXAGON_ROOT)/$(HEXAGON_TOOL_VER)/Examples/libcore/SigProc/rFFT/asm_src/*.S) + +THIRD_PARTY_CC_HDRS += \ + $(wildcard $(HEXAGON_ROOT)/$(HEXAGON_TOOL_VER)/Examples/libcore/include/*.h) + +INCLUDES += \ + -I$(HEXAGON_ROOT)/$(HEXAGON_TOOL_VER)/Examples/libcore/include \ + -I$(HEXAGON_ROOT)/$(HEXAGON_TOOL_VER)/Examples/libcore/SigProc/rFFT/include diff --git a/tensorflow/lite/micro/tools/make/ext_libs/person_detection_int8_vela_convert.sh b/tensorflow/lite/micro/tools/make/ext_libs/person_detection_int8_vela_convert.sh new file mode 100755 index 0000000..da51d75 --- /dev/null +++ b/tensorflow/lite/micro/tools/make/ext_libs/person_detection_int8_vela_convert.sh @@ -0,0 +1,76 @@ +#!/bin/bash +# Copyright 2022 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# +# Called with following arguments: +# 1 - Path to the downloads folder which is typically +# ${TENSORFLOW_ROOT}/tensorflow/lite/micro/tools/make/downloads +# 2 - Generated source directory +# 3 - (optional) TENSORFLOW_ROOT: path to root of the TFLM tree (relative to directory from where the script is called). +# +# This script is called from the Makefile and uses the following convention to +# enable determination of sucess/failure: +# +# - If the script is successful, the only output on stdout should be SUCCESS. +# The makefile checks for this particular string. +# +# - Any string on stdout that is not SUCCESS will be shown in the makefile as +# the cause for the script to have failed. +# +# - Any other informational prints should be on stderr. + +set -e + +source ${3}tensorflow/lite/micro/tools/make/bash_helpers.sh + +DOWNLOADS_DIR=${1} +GENERATED_SRCS_DIR=${2} + +if [ ! -d ${DOWNLOADS_DIR} ]; then + echo "The top-level downloads directory: ${DOWNLOADS_DIR} does not exist." + exit 1 +fi + +# Optimize downloaded model with Vela for Ethos-U. +# See tensorflow/lite/micro/kernels/ethos_u/README.md for more info. +MODEL_DIR=${GENERATED_SRCS_DIR}tensorflow/lite/micro/models +CONVERTED_PERSON_MODEL_INT8=${MODEL_DIR}/person_detect_model_data_vela.cc + +if [ ! -f ${CONVERTED_PERSON_MODEL_INT8} ]; then + # Compile an optimized .tflite version for Ethos-U. + TEMPFILE=$(mktemp -d)/ + python3 -m venv $TEMPFILE + source $TEMPFILE/bin/activate + python3 -m pip install --upgrade pip >&2 + pip install --upgrade cython >&2 + pip install --prefer-binary ethos-u-vela >&2 + vela --accelerator-config=ethos-u55-256 ${DOWNLOADS_DIR}/../../../models/person_detect.tflite \ + --output-dir ${MODEL_DIR} >&2 + deactivate + + # Convert .tflite back to C array. + echo "// This file is generated by $0." > ${CONVERTED_PERSON_MODEL_INT8} + echo '#include "tensorflow/lite/micro/models/person_detect_model_data.h"' >> \ + ${CONVERTED_PERSON_MODEL_INT8} + echo -n "const " >> ${CONVERTED_PERSON_MODEL_INT8} + xxd -i ${MODEL_DIR}/person_detect_vela.tflite >> ${CONVERTED_PERSON_MODEL_INT8} + sed -i 's/gen_cortex_m_corstone_300_cortex_m55_default_genfiles_tensorflow_lite_micro_models_person_detect_vela_tflite/g_person_detect_model_data/' \ + ${CONVERTED_PERSON_MODEL_INT8} + sed -i 's/^const unsigned char g_person_detect_model_data/alignas\(16\) &/' ${CONVERTED_PERSON_MODEL_INT8} + sed -i 's/g_person_detect_model_data_len/g_person_detect_model_data_size/' ${CONVERTED_PERSON_MODEL_INT8} + sed -i 's/unsigned int/const unsigned int/' ${CONVERTED_PERSON_MODEL_INT8} +fi + +echo "SUCCESS" diff --git a/tensorflow/lite/micro/tools/make/ext_libs/stm32_bare_lib.patch b/tensorflow/lite/micro/tools/make/ext_libs/stm32_bare_lib.patch new file mode 100644 index 0000000..d4e0573 --- /dev/null +++ b/tensorflow/lite/micro/tools/make/ext_libs/stm32_bare_lib.patch @@ -0,0 +1,85 @@ +diff --git a/include/strings.h b/include/strings.h +index 00e7c5d..3b9bb5a 100644 +--- a/include/strings.h ++++ b/include/strings.h +@@ -33,12 +33,6 @@ static const int kFastToBufferSize = 48; + // Populates the provided buffer with an ASCII representation of the number. + char* FastInt32ToBufferLeft(int32_t i, char* buffer); + +-// Populates the provided buffer with ASCII representation of the float number. +-// Avoids the use of any floating point instructions (since these aren't +-// supported on many microcontrollers) and as a consequence prints values with +-// power-of-two exponents. +-char* FastFloatToBufferLeft(float i, char* buffer); +- + // Appends a string to a string, in-place. You need to pass in the maximum + // string length as the second argument. + char* StrCatStr(char* main, int main_max_length, char* to_append); +diff --git a/source/strings.c b/source/strings.c +index 4701d35..10d5137 100644 +--- a/source/strings.c ++++ b/source/strings.c +@@ -55,63 +55,6 @@ char* FastInt32ToBufferLeft(int32_t i, char* buffer) { + return FastUInt32ToBufferLeft(u, buffer, 10); + } + +-// Populates the provided buffer with ASCII representation of the float number. +-// Avoids the use of any floating point instructions (since these aren't +-// supported on many microcontrollers) and as a consequence prints values with +-// power-of-two exponents. +-char* FastFloatToBufferLeft(float i, char* buffer) { +- char* current = buffer; +- char* current_end = buffer + (kFastToBufferSize - 1); +- // Access the bit fields of the floating point value to avoid requiring any +- // float instructions. These constants are derived from IEEE 754. +- const uint32_t sign_mask = 0x80000000; +- const uint32_t exponent_mask = 0x7f800000; +- const int32_t exponent_shift = 23; +- const int32_t exponent_bias = 127; +- const uint32_t fraction_mask = 0x007fffff; +- const uint32_t u = *(uint32_t*)(&i); +- const int32_t exponent = +- ((u & exponent_mask) >> exponent_shift) - exponent_bias; +- const uint32_t fraction = (u & fraction_mask); +- // Expect ~0x2B1B9D3 for fraction. +- if (u & sign_mask) { +- *current = '-'; +- current += 1; +- } +- *current = 0; +- // These are special cases for infinities and not-a-numbers. +- if (exponent == 128) { +- if (fraction == 0) { +- current = StrCatStr(current, (current_end - current), "Inf"); +- return current; +- } else { +- current = StrCatStr(current, (current_end - current), "NaN"); +- return current; +- } +- } +- // 0x007fffff represents 0.99... for the fraction, so to print the correct +- // decimal digits we need to scale our value before passing it to the +- // conversion function. This scale should be 10000000/8388608 = 1.1920928955. +- // We can approximate this using multipy-adds and right-shifts using the +- // values in this array. +- const int32_t scale_shifts_size = 13; +- const int8_t scale_shifts[13] = {3, 4, 8, 11, 13, 14, 17, +- 18, 19, 20, 21, 22, 23}; +- uint32_t scaled_fraction = fraction; +- for (int i = 0; i < scale_shifts_size; ++i) { +- scaled_fraction += (fraction >> scale_shifts[i]); +- } +- *current = '1'; +- current += 1; +- *current = '.'; +- current += 1; +- *current = 0; +- current = StrCatUInt32(current, (current_end - current), scaled_fraction, 10); +- current = StrCatStr(current, (current_end - current), "*2^"); +- current = StrCatInt32(current, (current_end - current), exponent); +- return current; +-} +- + // Appends a string to a string, in-place. You need to pass in the maximum + // string length as the second argument. + char* StrCatStr(char* main, int main_max_length, char* to_append) { diff --git a/tensorflow/lite/micro/tools/make/ext_libs/stm32_bare_lib_download.sh b/tensorflow/lite/micro/tools/make/ext_libs/stm32_bare_lib_download.sh new file mode 100755 index 0000000..c7e4c78 --- /dev/null +++ b/tensorflow/lite/micro/tools/make/ext_libs/stm32_bare_lib_download.sh @@ -0,0 +1,52 @@ +#!/bin/bash +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# +# Called with following arguments: +# 1 - Path to the downloads folder which is typically +# ${TENSORFLOW_ROOT}/tensorflow/lite/micro/tools/make/downloads +# +# This script is called from the Makefile and uses the following convention to +# enable determination of sucess/failure: +# +# - If the script is successful, the only output on stdout should be SUCCESS. +# The makefile checks for this particular string. +# +# - Any string on stdout that is not SUCCESS will be shown in the makefile as +# the cause for the script to have failed. +# +# - Any other informational prints should be on stderr. + +set -e + +DOWNLOADS_DIR=${1} +if [ ! -d ${DOWNLOADS_DIR} ]; then + echo "The top-level downloads directory: ${DOWNLOADS_DIR} does not exist." + exit 1 +fi + +DOWNLOADED_STM32_BARE_LIB_PATH=${DOWNLOADS_DIR}/stm32_bare_lib + +if [ -d ${DOWNLOADED_STM32_BARE_LIB_PATH} ]; then + echo >&2 "${DOWNLOADED_STM32_BARE_LIB_PATH} already exists, skipping the download." +else + git clone https://github.com/google/stm32_bare_lib.git ${DOWNLOADED_STM32_BARE_LIB_PATH} >&2 + pushd ${DOWNLOADED_STM32_BARE_LIB_PATH} > /dev/null + git checkout aaabdeb0d6098322a0874b29f6ed547a39b3929f >&2 + git apply ../../ext_libs/stm32_bare_lib.patch + popd > /dev/null +fi + +echo "SUCCESS" diff --git a/tensorflow/lite/micro/tools/make/ext_libs/vexriscv.inc b/tensorflow/lite/micro/tools/make/ext_libs/vexriscv.inc new file mode 100644 index 0000000..e69de29 diff --git a/tensorflow/lite/micro/tools/make/ext_libs/xa_nnlib_hifi4.patch b/tensorflow/lite/micro/tools/make/ext_libs/xa_nnlib_hifi4.patch new file mode 100644 index 0000000..227ee92 --- /dev/null +++ b/tensorflow/lite/micro/tools/make/ext_libs/xa_nnlib_hifi4.patch @@ -0,0 +1,492 @@ +From 0a68f2ffa640d1b52314278cec838384722eb1d0 Mon Sep 17 00:00:00 2001 +From: William Huang +Date: Tue, 16 May 2023 09:18:55 +0000 +Subject: [PATCH] Optimize Xtensa transpose convolution for more kernel sizes + and input channels. + +Previously, there were three code paths, in decreasing performance: + +1. Kernel size (H*W) multiple of 4, input channels multiple of 16 +2. Kernel size (H*W) multiple of 4, input channels multiple of 4 +3. Others (unoptimized case) + +This patch reduces them to the follow two cases: + +1. Input channels multiple of 4 +2. Others (unoptimized case) + +Original CL=cl/516144094 + +BUG=227374718 + +Signed-off-by: William Huang + +Optimize Xtensa CONV2D circular buffer copy. + +In Xtensa's CONV2D kernel, data is shuffled around and padded so the 2D +convolution turns into sequential vector products. Unfortunately, this +process is somewhat slow, and the overhead is especially high for small +vector lengths. + +This patch introduces the following: + +- Faster code path for no padding (since our models use VALID padding, + i.e., no padding at all) +- Manual loop if array is small and memcpy if array is large +- Skip memset on padded channels as the corresponding kernels are + already zero + +BUG=249796929 + +Signed-off-by: William Huang + +Add implementation for zero-copy CONV2D kernels. + +The previous `xa_nn_conv2d_std_sym8sxsym16s` implementation shuffles the +input tensor into a circular buffer, flattening the dimensions, so that +the 2D convolution turns into sequential vector products. However, this +created significant overhead for layers where the resulting vector +lengths are small. + +This patch implements an alternative zero-copy method that takes +advantage of two facts: + +1. If `x_padding == 0`, the width dimension is automatically flattened + with the channel dimension, and we need only `kernel_height` + sequential vector products, even without the data shuffling +2. Similar to the loop tiling done in + `xa_nn_matXvec_sym8sxsym16s_sym16s_circ`, we can tile the `out_width` + and `out_channels` dimensions, achieving the throughput of + `_xa_nn_dot_product_2row_4vec_mat_vecs_4bytes_aligned` (i.e., 1.6 + MULAAAAQs/cycle), even when `out_height < 2` + +As a result, the patch significantly benefits layers where the kernel +and output heights are small, leading to 25%+ cycle reductions in some +use cases. + +Signed-off-by: William Huang +--- + .../cnn/hifi4/xa_nn_conv2d_std_circ_buf.c | 84 +++++++- + .../cnn/hifi4/xa_nn_conv2d_std_state.h | 15 ++ + .../cnn/hifi4/xa_nn_conv2d_std_sym8sxsym16s.c | 203 +++++++++++++++--- + .../hifi4/xa_nn_transpose_conv_sym8sxsym16s.c | 36 +--- + 4 files changed, 275 insertions(+), 63 deletions(-) + +diff --git a/algo/kernels/cnn/hifi4/xa_nn_conv2d_std_circ_buf.c b/algo/kernels/cnn/hifi4/xa_nn_conv2d_std_circ_buf.c +index f8adba2..1a5f186 100644 +--- a/algo/kernels/cnn/hifi4/xa_nn_conv2d_std_circ_buf.c ++++ b/algo/kernels/cnn/hifi4/xa_nn_conv2d_std_circ_buf.c +@@ -642,7 +642,8 @@ VOID conv2d_std_init_cir_buf( + } + + // Add x_stride (but not more than kernel_width) x (input_height x input_channels) new planes to circular buffer +-VOID conv2d_std_update_cir_buf( ++// Slow version of conv2d_std_update_cir_buf with fewer requirements ++VOID conv2d_std_update_cir_buf_slow( + WORD32 input_channels, + WORD32 input_channels_pad, + WORD32 input_bytewidth, +@@ -742,6 +743,87 @@ VOID conv2d_std_update_cir_buf( + *pp_inp = (VOID *)p_inp; + } + ++// Add x_stride (but not more than kernel_width) x (input_height x input_channels) new planes to circular buffer ++VOID conv2d_std_update_cir_buf( ++ WORD32 input_channels, ++ WORD32 input_channels_pad, ++ WORD32 input_bytewidth, ++ WORD32 input_width, ++ WORD32 input_height, ++ WORD32 y_padding, ++ WORD32 y_b_pad, ++ WORD32 x_padding, ++ WORD32 kernel_width, ++ WORD32 x_stride, ++ VOID **pp_inp, ++ WORD32 idx_beg_inp_width_pad, ++ xa_nn_conv_state_t *p_state) ++{ ++ if (y_padding != 0 || y_b_pad != 0 || x_padding != 0) { ++ conv2d_std_update_cir_buf_slow( ++ input_channels, ++ input_channels_pad, ++ input_bytewidth, ++ input_width, ++ input_height, ++ y_padding, ++ y_b_pad, ++ x_padding, ++ kernel_width, ++ x_stride, ++ pp_inp, ++ idx_beg_inp_width_pad, ++ p_state ++ ); ++ return; ++ } ++ ++ WORD32 i,k; ++ WORD8 *p_inp = (WORD8 *)*pp_inp; ++ WORD32 planes_to_add = x_stride > kernel_width ? kernel_width : x_stride; ++ WORD32 planes_to_keep = kernel_width - planes_to_add; ++ ++ // Copy 'planes_to_add' planes of data to circular buffer ++ AE_ADDCIRC16X4_XC((ae_int16x4 *)p_state->cir_buf.p_curr, planes_to_add * input_channels_pad * input_bytewidth); ++ WORD8 *p_dst = (WORD8 *)p_state->cir_buf.p_curr; ++ AE_ADDCIRC16X4_XC((ae_int16x4 *)p_dst, planes_to_keep * input_channels_pad * input_bytewidth); ++ ++ WORD32 copy_inp_width = planes_to_add; ++ WORD32 to_skip_inp_width = x_stride - planes_to_add; // Non-zero for x_stride > kernel_width ++ ++ int size = input_channels * input_bytewidth; ++ if (size <= 32) { ++ for(i=0;icir_buf.p_curr/* matrix: rows x cols */ +- ,p_state->p_kernel_padded /* vec: cols */ +- ,p_bias /* bias */ +- ,out_height /* rows */ +- ,input_channels_pad * kernel_width * kernel_height /* cols */ +- ,input_channels_pad * kernel_width * y_stride/* row_offset */ +- ,out_channels /* vec_count */ +- ,input_channels_pad * kernel_width * kernel_height /* vec_stride */ +- ,out_channels_offset /* out_col_offset */ +- ,out_height_offset /* out_row_offset */ +- ,input_zero_bias +- ,p_out_multiplier +- ,p_out_shift +- ,out_zero_bias +- ); +- p_out += out_width_offset; ++ // Convolution using matXvec with matrix as circular buffer ++ xa_nn_matXvec_sym8sxsym16s_sym16s_circ ++ (p_out /* output */ ++ ,p_state->cir_buf.p_curr/* matrix: rows x cols */ ++ ,p_state->p_kernel_padded /* vec: cols */ ++ ,p_bias /* bias */ ++ ,out_height /* rows */ ++ ,input_channels_pad * kernel_width * kernel_height /* cols */ ++ ,input_channels_pad * kernel_width * y_stride/* row_offset */ ++ ,out_channels /* vec_count */ ++ ,input_channels_pad * kernel_width * kernel_height /* vec_stride */ ++ ,out_channels_offset /* out_col_offset */ ++ ,out_height_offset /* out_row_offset */ ++ ,input_zero_bias ++ ,p_out_multiplier ++ ,p_out_shift ++ ,out_zero_bias ++ ); ++ p_out += out_width_offset; ++ } ++ } else { ++ const WORD16 *p_dst0_0 = p_out + 0; ++ const WORD16 *p_dst0_1 = p_out + 1; ++ const WORD16 *p_dst0_2 = p_out + 2; ++ const WORD16 *p_dst0_3 = p_out + 3; ++ const WORD16 *p_dst1_0 = p_out + out_channels + 0; ++ const WORD16 *p_dst1_1 = p_out + out_channels + 1; ++ const WORD16 *p_dst1_2 = p_out + out_channels + 2; ++ const WORD16 *p_dst1_3 = p_out + out_channels + 3; ++ int kernel_out_ch_offset = kernel_height * kernel_width * input_channels; ++ int input_x_offset = input_channels * x_stride / 4; ++ int p_inp_vec_stride = input_width * input_channels / 4; ++ int p_kern_vec_stride = kernel_width * input_channels; ++ int vec_len = kernel_width * input_channels; ++ for (int out_y = 0; out_y < out_height; ++out_y) { ++ for (int out_x = 0; out_x < out_width; out_x += 2) { ++ for (int out_ch = 0; out_ch < out_channels; out_ch += 4) { ++ ae_int64 out0_0 = p_bias[out_ch + 0]; ++ ae_int64 out0_1 = p_bias[out_ch + 1]; ++ ae_int64 out0_2 = p_bias[out_ch + 2]; ++ ae_int64 out0_3 = p_bias[out_ch + 3]; ++ ae_int64 out1_0 = p_bias[out_ch + 0]; ++ ae_int64 out1_1 = p_bias[out_ch + 1]; ++ ae_int64 out1_2 = p_bias[out_ch + 2]; ++ ae_int64 out1_3 = p_bias[out_ch + 3]; ++ out0_0 = AE_SLAI64(out0_0, 8); ++ out0_1 = AE_SLAI64(out0_1, 8); ++ out0_2 = AE_SLAI64(out0_2, 8); ++ out0_3 = AE_SLAI64(out0_3, 8); ++ out1_0 = AE_SLAI64(out1_0, 8); ++ out1_1 = AE_SLAI64(out1_1, 8); ++ out1_2 = AE_SLAI64(out1_2, 8); ++ out1_3 = AE_SLAI64(out1_3, 8); ++ int in_x_o = out_x * x_stride; ++ int in_y_o = out_y * y_stride - y_padding; ++ int k_y_min = -in_y_o; ++ int k_y_max = input_width - in_y_o; ++ k_y_min = (k_y_min < 0) ? 0 : k_y_min; ++ k_y_min = (k_y_min < kernel_height) ? k_y_min : kernel_height; ++ k_y_max = (k_y_max < 0) ? 0 : k_y_max; ++ k_y_max = (k_y_max < kernel_height) ? k_y_max : kernel_height; ++ const ae_int16x4 *p_inp_vec = ++ (ae_int16x4 *)&p_inp[((in_y_o + k_y_min) * input_width + in_x_o) * ++ input_channels + ++ 0]; ++ const WORD8 *p_kern_vec = ++ &p_kernel[(((out_ch + 0) * kernel_height + k_y_min) * kernel_width + ++ 0) * ++ input_channels + ++ 0]; ++ for (int k_y = k_y_min; k_y < k_y_max; ++k_y) { ++ const ae_int16x4 *p_inp_vec0 = p_inp_vec; ++ const ae_int16x4 *p_inp_vec1 = p_inp_vec + input_x_offset; ++ const WORD8 *p_kern_vec0 = p_kern_vec; ++ const WORD8 *p_kern_vec1 = p_kern_vec0 + kernel_out_ch_offset; ++ const WORD8 *p_kern_vec2 = p_kern_vec1 + kernel_out_ch_offset; ++ const WORD8 *p_kern_vec3 = p_kern_vec2 + kernel_out_ch_offset; ++ p_inp_vec += p_inp_vec_stride; ++ p_kern_vec += p_kern_vec_stride; ++ ae_int16x4 d_inp0; ++ ae_int16x4 d_inp1; ++ ae_int16x4 d_kern0; ++ ae_int16x4 d_kern1; ++ ae_int16x4 d_kern2; ++ ae_int16x4 d_kern3; ++ for (int i = 0; i < vec_len; i += 4) { ++ AE_L16X4_IP(d_inp0, p_inp_vec0, 8); ++ AE_L16X4_IP(d_inp1, p_inp_vec1, 8); ++ AE_L8X4F_IP(d_kern0, p_kern_vec0, 4); ++ AE_L8X4F_IP(d_kern1, p_kern_vec1, 4); ++ AE_L8X4F_IP(d_kern2, p_kern_vec2, 4); ++ AE_L8X4F_IP(d_kern3, p_kern_vec3, 4); ++ AE_MULAAAAQ16(out0_0, d_inp0, d_kern0); ++ AE_MULAAAAQ16(out0_1, d_inp0, d_kern1); ++ AE_MULAAAAQ16(out0_2, d_inp0, d_kern2); ++ AE_MULAAAAQ16(out0_3, d_inp0, d_kern3); ++ AE_MULAAAAQ16(out1_0, d_inp1, d_kern0); ++ AE_MULAAAAQ16(out1_1, d_inp1, d_kern1); ++ AE_MULAAAAQ16(out1_2, d_inp1, d_kern2); ++ AE_MULAAAAQ16(out1_3, d_inp1, d_kern3); ++ } ++ } ++ out0_0 = AE_SRAI64(out0_0, 8); ++ out0_1 = AE_SRAI64(out0_1, 8); ++ out0_2 = AE_SRAI64(out0_2, 8); ++ out0_3 = AE_SRAI64(out0_3, 8); ++ out1_0 = AE_SRAI64(out1_0, 8); ++ out1_1 = AE_SRAI64(out1_1, 8); ++ out1_2 = AE_SRAI64(out1_2, 8); ++ out1_3 = AE_SRAI64(out1_3, 8); ++ ae_int32x2 acc_vec0 = MultiplyByQuantizedMultiplier_x2_opt( ++ out0_0, out1_0, p_out_multiplier[out_ch + 0], ++ p_out_shift[out_ch + 0]); ++ ae_int32x2 acc_vec1 = MultiplyByQuantizedMultiplier_x2_opt( ++ out0_1, out1_1, p_out_multiplier[out_ch + 1], ++ p_out_shift[out_ch + 1]); ++ ae_int32x2 acc_vec2 = MultiplyByQuantizedMultiplier_x2_opt( ++ out0_2, out1_2, p_out_multiplier[out_ch + 2], ++ p_out_shift[out_ch + 2]); ++ ae_int32x2 acc_vec3 = MultiplyByQuantizedMultiplier_x2_opt( ++ out0_3, out1_3, p_out_multiplier[out_ch + 3], ++ p_out_shift[out_ch + 3]); ++ ae_int16x4 d1 = AE_SAT16X4(acc_vec0, acc_vec1); ++ ae_int16x4 d2 = AE_SAT16X4(acc_vec2, acc_vec3); ++ AE_S16_0_XP(AE_SEL16_6543(d1, d1), (ae_int16 *)p_dst0_0, 8); ++ AE_S16_0_XP(AE_SEL16_5432(d1, d1), (ae_int16 *)p_dst1_0, 8); ++ AE_S16_0_XP(AE_SEL16_4321(d1, d1), (ae_int16 *)p_dst0_1, 8); ++ AE_S16_0_XP(d1, (ae_int16 *)p_dst1_1, 8); ++ AE_S16_0_XP(AE_SEL16_6543(d2, d2), (ae_int16 *)p_dst0_2, 8); ++ AE_S16_0_XP(AE_SEL16_5432(d2, d2), (ae_int16 *)p_dst1_2, 8); ++ AE_S16_0_XP(AE_SEL16_4321(d2, d2), (ae_int16 *)p_dst0_3, 8); ++ AE_S16_0_XP(d2, (ae_int16 *)p_dst1_3, 8); ++ } ++ p_dst0_0 += out_channels; ++ p_dst0_1 += out_channels; ++ p_dst0_2 += out_channels; ++ p_dst0_3 += out_channels; ++ p_dst1_0 += out_channels; ++ p_dst1_1 += out_channels; ++ p_dst1_2 += out_channels; ++ p_dst1_3 += out_channels; ++ } ++ } + } + + return 0; +diff --git a/algo/kernels/cnn/hifi4/xa_nn_transpose_conv_sym8sxsym16s.c b/algo/kernels/cnn/hifi4/xa_nn_transpose_conv_sym8sxsym16s.c +index 7f31b75..a010d45 100644 +--- a/algo/kernels/cnn/hifi4/xa_nn_transpose_conv_sym8sxsym16s.c ++++ b/algo/kernels/cnn/hifi4/xa_nn_transpose_conv_sym8sxsym16s.c +@@ -157,7 +157,7 @@ int xa_nn_transpose_conv_sym8sxsym16s(WORD16* output_data, + */ + if(input_data && filter_data && output_data && scratch_buffer && + (((unsigned int)input_data&0x7)==0) && (((unsigned int)filter_data&0x3)==0) && (((unsigned int)output_data&0x7) == 0) && +- (((unsigned int)scratch_buffer&0x7) == 0) && ((input_depth&0xF)==0) && ((filter_height*filter_width&0x3)==0)) ++ (((unsigned int)scratch_buffer&0x7) == 0) && ((input_depth&0x3)==0)) + { + { + //tbd : batch = 1, need to handle other values and in_x_min/max= 0 .. need toc heck for other values +@@ -180,7 +180,8 @@ int xa_nn_transpose_conv_sym8sxsym16s(WORD16* output_data, + filt_y_max = (filt_y_max < filter_height) ? filt_y_max : filter_height; + filt_y_max = (filt_y_max < 0) ? 0 : filt_y_max; + pinp = (WORD16*)&input_data[in_y*input_width*input_depth+in_x*input_depth]; +- for (int in_channel = 0; in_channel < input_depth; in_channel+=16) ++ int in_channel = 0; ++ for (; in_channel + 15 < input_depth; in_channel+=16) + { + ae_int16x4 d_inp, d_inp1, d_inp2, d_inp3; + AE_L16X4_IP(d_inp, (ae_int16x4*)pinp, sizeof(WORD64)); +@@ -235,36 +236,7 @@ int xa_nn_transpose_conv_sym8sxsym16s(WORD16* output_data, + } + } + } +- } +- } +- } +- } +- else if(input_data && filter_data && output_data && scratch_buffer && +- (((unsigned int)input_data&0x7)==0) && (((unsigned int)filter_data&0x3)==0) && (((unsigned int)output_data&0x7) == 0) && +- (((unsigned int)scratch_buffer&0x7) == 0) && ((input_depth&0x3)==0) && ((filter_height*filter_width&0x3)==0)) +- { +- { +- //tbd : batch = 1, need to handle other values and in_x_min/max= 0 .. need toc heck for other values +- for (int in_y = 0; in_y < input_height; ++in_y) +- { +- for (int in_x = 0; in_x < input_width; ++in_x) +- { +- const int out_x_orig = in_x*stride_width - pad_width; +- const int out_y_orig = in_y*stride_height - pad_height; +- int filt_x_min = -out_x_orig; +- int filt_x_max = output_width - out_x_orig; +- int filt_y_min = -out_y_orig; +- int filt_y_max = output_height - out_y_orig; +- filt_x_min = (filt_x_min < filter_width) ? filt_x_min : filter_width; +- filt_x_min = (filt_x_min < 0) ? 0 : filt_x_min; +- filt_x_max = (filt_x_max < filter_width) ? filt_x_max : filter_width; +- filt_x_max = (filt_x_max < 0) ? 0 : filt_x_max; +- filt_y_min = (filt_y_min < filter_height) ? filt_y_min : filter_height; +- filt_y_min = (filt_y_min < 0) ? 0 : filt_y_min; +- filt_y_max = (filt_y_max < filter_height) ? filt_y_max : filter_height; +- filt_y_max = (filt_y_max < 0) ? 0 : filt_y_max; +- pinp = (WORD16*)&input_data[in_y*input_width*input_depth+in_x*input_depth]; +- for (int in_channel = 0; in_channel < input_depth; in_channel+=4) ++ for (; in_channel + 3 < input_depth; in_channel+=4) + { + ae_int16x4 d_inp; + AE_L16X4_IP(d_inp, (ae_int16x4*)pinp, sizeof(WORD64)); +-- +2.41.0.162.gfafddb0af9-goog + diff --git a/tensorflow/lite/micro/tools/make/ext_libs/xa_nnlib_hifi5.patch b/tensorflow/lite/micro/tools/make/ext_libs/xa_nnlib_hifi5.patch new file mode 100644 index 0000000..9d95c63 --- /dev/null +++ b/tensorflow/lite/micro/tools/make/ext_libs/xa_nnlib_hifi5.patch @@ -0,0 +1,36 @@ +diff --git a/algo/kernels/fc/hifi4/xa_nn_fully_connected.c b/algo/kernels/fc/hifi4/xa_nn_fully_connected.c +index 26a2b73..61f0a64 100644 +--- a/algo/kernels/fc/hifi4/xa_nn_fully_connected.c ++++ b/algo/kernels/fc/hifi4/xa_nn_fully_connected.c +@@ -298,7 +298,6 @@ WORD32 xa_nn_fully_connected_sym8sxasym8s_asym8s + XA_NNLIB_ARG_CHK_PTR(p_out, -1); + XA_NNLIB_ARG_CHK_PTR(p_weight, -1); + XA_NNLIB_ARG_CHK_PTR(p_inp, -1); +- XA_NNLIB_ARG_CHK_PTR(p_bias, -1); + /* Pointer alignment checks */ + #if 0 + XA_NNLIB_ARG_CHK_ALIGN(p_out, ALIGNMENT, -1); +@@ -310,7 +309,8 @@ WORD32 xa_nn_fully_connected_sym8sxasym8s_asym8s + XA_NNLIB_ARG_CHK_ALIGN(p_out, sizeof(WORD8), -1); + XA_NNLIB_ARG_CHK_ALIGN(p_weight, sizeof(WORD8), -1); + XA_NNLIB_ARG_CHK_ALIGN(p_inp, sizeof(WORD8), -1); +- XA_NNLIB_ARG_CHK_ALIGN(p_bias, sizeof(WORD32), -1); ++ if (p_bias != NULL) ++ XA_NNLIB_ARG_CHK_ALIGN(p_bias, sizeof(WORD32), -1); + #endif + /* Basic Parameter checks */ + XA_NNLIB_ARG_CHK_COND((out_depth <= 0), -1); +diff --git a/algo/kernels/matXvec/hifi5/xa_nn_matXvec_sym8sxasym8s.c b/algo/kernels/matXvec/hifi5/xa_nn_matXvec_sym8sxasym8s.c +index 5350cbe..a91e043 100644 +--- a/algo/kernels/matXvec/hifi5/xa_nn_matXvec_sym8sxasym8s.c ++++ b/algo/kernels/matXvec/hifi5/xa_nn_matXvec_sym8sxasym8s.c +@@ -704,7 +704,8 @@ WORD32 xa_nn_matXvec_sym8sxasym8s_asym8s( + XA_NNLIB_ARG_CHK_PTR(p_mat1, -1); + XA_NNLIB_ARG_CHK_PTR(p_vec1, -1); + /* Pointer alignment checks */ +- XA_NNLIB_ARG_CHK_ALIGN(p_bias, sizeof(WORD32), -1); ++ if (p_bias != NULL) ++ XA_NNLIB_ARG_CHK_ALIGN(p_bias, sizeof(WORD32), -1); + /* Basic Parameter checks */ + XA_NNLIB_ARG_CHK_COND((rows <= 0), -1); + XA_NNLIB_ARG_CHK_COND((cols1 <= 0), -1); diff --git a/tensorflow/lite/micro/tools/make/ext_libs/xtensa.inc b/tensorflow/lite/micro/tools/make/ext_libs/xtensa.inc new file mode 100644 index 0000000..3b28267 --- /dev/null +++ b/tensorflow/lite/micro/tools/make/ext_libs/xtensa.inc @@ -0,0 +1,159 @@ +# Explicitly add kernel sources specific to the Xtensa optimized +# implementations. +MICROLITE_CC_KERNEL_SRCS += \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/xtensa/add_vision.cc \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/xtensa/conv_hifi.cc \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/xtensa/conv_int16_reference.cc \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/xtensa/conv_int8_reference.cc \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/xtensa/conv_vision.cc \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/xtensa/depthwise_conv_hifi.cc \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/xtensa/depthwise_conv_vision.cc \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/xtensa/fully_connected_common_xtensa.cc \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/xtensa/fully_connected_int8.cc \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/xtensa/fully_connected_vision.cc \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/xtensa/pad_vision.cc \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/xtensa/pooling_int8.cc \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/xtensa/pooling_vision.cc \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/xtensa/reduce_vision.cc \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/xtensa/reshape_vision.cc \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/xtensa/softmax_int8_int16.cc \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/xtensa/softmax_vision.cc + +ifeq ($(TARGET_ARCH), hifimini) + # hifimini optimizations are implemented in the TFLM repository itself. + THIRD_PARTY_KERNEL_CC_SRCS += \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/xtensa/hifimini/svdf.cc \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/xtensa/hifimini/fully_connected.cc + +else ifeq ($(TARGET_ARCH), hifi5) + DOWNLOAD_RESULT := $(shell $(MAKEFILE_DIR)/ext_libs/xtensa_download.sh ${DOWNLOADS_DIR} hifi5 $(TENSORFLOW_ROOT)) + ifneq ($(DOWNLOAD_RESULT), SUCCESS) + $(error Something went wrong with the xtensa download: $(DOWNLOAD_RESULT)) + endif + + # TODO(b/161489252): -Wno-shadow is only needed for xannlib. But since we do + # not have separate cflags (or the concept of modular build targets) with the + # Makefile, -Wno-shadow will be used for everything. + + PLATFORM_FLAGS = \ + -DNNLIB_HIFI5 \ + -Wno-shadow + + CCFLAGS += $(PLATFORM_FLAGS) + CXXFLAGS += $(PLATFORM_FLAGS) + + NNLIB_PATH := $(MAKEFILE_DIR)/downloads/xa_nnlib_hifi5 + + THIRD_PARTY_KERNEL_CC_SRCS += \ + $(shell find $(NNLIB_PATH) -name "*.c") + + EXCLUDED_NNLIB_SRCS = \ + $(NNLIB_PATH)/algo/layers/cnn/src/xa_nn_cnn_api.c \ + $(NNLIB_PATH)/algo/layers/gru/src/xa_nn_gru_api.c \ + $(NNLIB_PATH)/algo/layers/lstm/src/xa_nn_lstm_api.c + + THIRD_PARTY_KERNEL_CC_SRCS := $(filter-out $(EXCLUDED_NNLIB_SRCS), $(THIRD_PARTY_KERNEL_CC_SRCS)) + + THIRD_PARTY_CC_HDRS += \ + $(shell find $(NNLIB_PATH) -name "*.h") + + INCLUDES += \ + -I$(NNLIB_PATH)/ \ + -I$(NNLIB_PATH)/algo/kernels/ \ + -I$(NNLIB_PATH)/include/nnlib/ \ + -I$(NNLIB_PATH)/include/ \ + -I$(NNLIB_PATH)/algo/common/include/ \ + -I$(NNLIB_PATH)/algo/ndsp/hifi5/include/ + +else ifeq ($(TARGET_ARCH), hifi4) + + DOWNLOAD_RESULT := $(shell $(MAKEFILE_DIR)/ext_libs/xtensa_download.sh ${DOWNLOADS_DIR} hifi4 $(TENSORFLOW_ROOT)) + ifneq ($(DOWNLOAD_RESULT), SUCCESS) + $(error Something went wrong with the xtensa download: $(DOWNLOAD_RESULT)) + endif + + # TODO(b/161489252): -Wno-shadow is only needed for xannlib. But since we do + # not have separate cflags (or the concept of modular build targets) with the + # Makefile, -Wno-shadow will be used for everything. + + PLATFORM_FLAGS = \ + -DNNLIB_V2 \ + -Wno-shadow + + CCFLAGS += $(PLATFORM_FLAGS) + CXXFLAGS += $(PLATFORM_FLAGS) + + NNLIB_PATH := $(MAKEFILE_DIR)/downloads/xa_nnlib_hifi4 + + THIRD_PARTY_KERNEL_CC_SRCS += \ + $(shell find $(NNLIB_PATH) -name "*.c") + + EXCLUDED_NNLIB_SRCS = \ + $(NNLIB_PATH)/algo/layers/cnn/src/xa_nn_cnn_api.c \ + $(NNLIB_PATH)/algo/layers/gru/src/xa_nn_gru_api.c \ + $(NNLIB_PATH)/algo/layers/lstm/src/xa_nn_lstm_api.c + + THIRD_PARTY_KERNEL_CC_SRCS := $(filter-out $(EXCLUDED_NNLIB_SRCS), $(THIRD_PARTY_KERNEL_CC_SRCS)) + + THIRD_PARTY_CC_HDRS += \ + $(shell find $(NNLIB_PATH) -name "*.h") + + INCLUDES += \ + -I$(NNLIB_PATH)/ \ + -I$(NNLIB_PATH)/algo/kernels/ \ + -I$(NNLIB_PATH)/include/nnlib/ \ + -I$(NNLIB_PATH)/include/ \ + -I$(NNLIB_PATH)/algo/common/include/ \ + -I$(NNLIB_PATH)/algo/ndsp/hifi4/include/ + +else ifeq ($(TARGET_ARCH), vision_p6) + DOWNLOAD_RESULT := $(shell $(MAKEFILE_DIR)/ext_libs/xtensa_download.sh ${DOWNLOADS_DIR} vision_p6 $(TENSORFLOW_ROOT)) + ifneq ($(DOWNLOAD_RESULT), SUCCESS) + $(error Something went wrong with the xtensa download: $(DOWNLOAD_RESULT)) + endif + + # TODO(b/161489252): -Wno-shadow is only needed for xannlib. But since we do + # not have separate cflags (or the concept of modular build targets) with the + # Makefile, -Wno-shadow will be used for everything. + + PLATFORM_FLAGS = \ + -DXI_ERROR_LEVEL=XI_ERROR_LEVEL_NO_ERROR \ + -DCNNRT_PERF_LEVEL=CNNRT_PERF_LEVEL_NONE \ + -DINCLUDE_XI_CNN \ + -Wno-shadow + + CCFLAGS += $(PLATFORM_FLAGS) + CXXFLAGS += $(PLATFORM_FLAGS) + + NNLIB_PATH := $(MAKEFILE_DIR)/downloads/xi_tflmlib_vision_p6 + + THIRD_PARTY_CC_SRCS += \ + $(shell find $(NNLIB_PATH) -name "*.cc") + + INCLUDES += \ + -I$(NNLIB_PATH)/flk/include \ + -I$(NNLIB_PATH)/kernels/include/ \ + -I$(NNLIB_PATH)/runtime/include/ + + LDFLAGS += -lidma +else + $(error Unsupported TARGET_ARCH=$(TARGET_ARCH)) +endif + +FFT_PATH := $(MAKEFILE_DIR)/downloads/hifi_fft + +INCLUDES += -I$(FFT_PATH)/ + +ifeq ($(TARGET_ARCH), $(filter $(TARGET_ARCH), hifi3 hifi4 hifi5)) +THIRD_PARTY_KERNEL_CC_SRCS += \ + $(shell find $(FFT_PATH)/hifi3_fft -name "*.c") + +THIRD_PARTY_CC_HDRS += \ + $(shell find $(FFT_PATH)/hifi3_fft -name "*.h") +else ifeq ($(TARGET_ARCH), hifimini) +THIRD_PARTY_KERNEL_CC_SRCS += \ + $(shell find $(FFT_PATH)/hifi2_fft -name "*.c") + +THIRD_PARTY_CC_HDRS += \ + $(shell find $(FFT_PATH)/hifi2_fft -name "*.h") +endif diff --git a/tensorflow/lite/micro/tools/make/ext_libs/xtensa_download.sh b/tensorflow/lite/micro/tools/make/ext_libs/xtensa_download.sh new file mode 100755 index 0000000..fb45123 --- /dev/null +++ b/tensorflow/lite/micro/tools/make/ext_libs/xtensa_download.sh @@ -0,0 +1,97 @@ +#!/bin/bash +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# +# Downloads necessary to build with OPTIMIZED_KERNEL_DIR=xtensa. +# +# Called with four arguments: +# 1 - Path to the downloads folder which is typically +# ${TENSORFLOW_ROOT}/tensorflow/lite/micro/tools/make/downloads +# 2 - Xtensa variant to download for (e.g. hifi4) +# 3 - (optional) TENSORFLOW_ROOT: path to root of the TFLM tree (relative to directory from where the script is called). +# +# This script is called from the Makefile and uses the following convention to +# enable determination of sucess/failure: +# +# - If the script is successful, the only output on stdout should be SUCCESS. +# The makefile checks for this particular string. +# +# - Any string on stdout that is not SUCCESS will be shown in the makefile as +# the cause for the script to have failed. +# +# - Any other informational prints should be on stderr. + +set -e + +source ${3}tensorflow/lite/micro/tools/make/bash_helpers.sh + +DOWNLOADS_DIR=${1} +if [ ! -d ${DOWNLOADS_DIR} ]; then + echo "The top-level downloads directory: ${DOWNLOADS_DIR} does not exist." + exit 1 +fi + +if [[ ${2} == "hifi4" ]]; then + LIBRARY_URL="http://github.com/foss-xtensa/nnlib-hifi4/raw/master/archive/xa_nnlib_hifi4_10_14_2022.zip" + LIBRARY_DIRNAME="xa_nnlib_hifi4" + LIBRARY_MD5="2bf3c1c7fd5a23f157babc8e24fd2c55" +elif [[ ${2} == "hifi5" ]]; then + LIBRARY_URL="http://github.com/foss-xtensa/nnlib-hifi5/raw/master/archive/xa_nnlib_hifi5_12_19_2022.zip" + LIBRARY_DIRNAME="xa_nnlib_hifi5" + LIBRARY_MD5="83306809191f42a064bde688b94e1eb1" +elif [[ ${2} == "vision_p6" ]]; then + LIBRARY_URL="https://github.com/foss-xtensa/tflmlib_vision/raw/main/archive/xi_tflmlib_vision_p6_22_06_29.zip" + LIBRARY_DIRNAME="xi_tflmlib_vision_p6" + LIBRARY_MD5="fea3720d76fdb3a5a337ace7b6081b56" +else + echo "Attempting to download an unsupported xtensa variant: ${2}" + exit 1 +fi + +LIBRARY_INSTALL_PATH=${DOWNLOADS_DIR}/${LIBRARY_DIRNAME} + +if [ -d ${LIBRARY_INSTALL_PATH} ]; then + echo >&2 "${LIBRARY_INSTALL_PATH} already exists, skipping the download." +else + TEMPDIR="$(mktemp -d)" + TEMPFILE="${TEMPDIR}/${LIBRARY_DIRNAME}.zip" + wget ${LIBRARY_URL} -O "$TEMPFILE" >&2 + MD5=`md5sum "$TEMPFILE" | awk '{print $1}'` + + if [[ ${MD5} != ${LIBRARY_MD5} ]] + then + echo "Bad checksum. Expected: ${LIBRARY_MD5}, Got: ${MD5}" + exit 1 + fi + + # Check if another make process has already extracted the downloaded files. + # If so, skip extracting and patching. + if [ -d ${LIBRARY_INSTALL_PATH} ]; then + echo >&2 "${LIBRARY_INSTALL_PATH} already exists, skipping the extraction." + else + unzip -qo "$TEMPFILE" -d ${DOWNLOADS_DIR} >&2 + + rm -rf "${TEMPDIR}" + + pushd "${LIBRARY_INSTALL_PATH}" > /dev/null + chmod -R +w ./ + if [[ -f "../../ext_libs/xa_nnlib_${2}.patch" ]]; then + create_git_repo ./ + apply_patch_to_folder ./ "../../ext_libs/xa_nnlib_${2}.patch" "TFLM patch" + fi + fi +fi + +echo "SUCCESS" diff --git a/tensorflow/lite/micro/tools/make/flatbuffers.patch b/tensorflow/lite/micro/tools/make/flatbuffers.patch new file mode 100644 index 0000000..cb22cf0 --- /dev/null +++ b/tensorflow/lite/micro/tools/make/flatbuffers.patch @@ -0,0 +1,105 @@ +diff --git a/include/flatbuffers/base.h b/include/flatbuffers/base.h +index a5ac10d..371b6fd 100644 +--- a/include/flatbuffers/base.h ++++ b/include/flatbuffers/base.h +@@ -1,6 +1,16 @@ + #ifndef FLATBUFFERS_BASE_H_ + #define FLATBUFFERS_BASE_H_ + ++// For TFLM, we always want FLATBUFFERS_LOCALE_INDEPENDENT to be defined as 0. ++// We could achieve this by adding -DFLATBUFFERS_LOCALE_INDEPENDENT=0 to the ++// TFLM Makefile. However, for (at least) the Arduino, adding additional build ++// flags during the compilation can be a bit awkward. As such, we have instead ++// made a decision to change the default to be FLATBUFFERS_LOCALE_INDEPENDENT=0 ++// for TFLM to make it easier for external IDE integration. ++#ifndef FLATBUFFERS_LOCALE_INDEPENDENT ++#define FLATBUFFERS_LOCALE_INDEPENDENT 0 ++#endif ++ + // clang-format off + + // If activate should be declared and included first. +diff --git a/include/flatbuffers/default_allocator.h b/include/flatbuffers/default_allocator.h +index 8b173af..975d938 100644 +--- a/include/flatbuffers/default_allocator.h ++++ b/include/flatbuffers/default_allocator.h +@@ -39,26 +39,20 @@ class DefaultAllocator : public Allocator { + // This is to avoid having a statically or dynamically allocated default + // allocator, or having to move it between the classes that may own it. + inline uint8_t *Allocate(Allocator *allocator, size_t size) { +- return allocator ? allocator->allocate(size) +- : DefaultAllocator().allocate(size); ++ return allocator->allocate(size); + } + + inline void Deallocate(Allocator *allocator, uint8_t *p, size_t size) { +- if (allocator) +- allocator->deallocate(p, size); +- else +- DefaultAllocator().deallocate(p, size); ++ allocator->deallocate(p, size); + } + + inline uint8_t *ReallocateDownward(Allocator *allocator, uint8_t *old_p, + size_t old_size, size_t new_size, + size_t in_use_back, size_t in_use_front) { +- return allocator ? allocator->reallocate_downward(old_p, old_size, new_size, +- in_use_back, in_use_front) +- : DefaultAllocator().reallocate_downward( +- old_p, old_size, new_size, in_use_back, in_use_front); ++ return allocator->reallocate_downward(old_p, old_size, new_size, in_use_back, ++ in_use_front); + } + + } // namespace flatbuffers + +-#endif // FLATBUFFERS_DEFAULT_ALLOCATOR_H_ +\ No newline at end of file ++#endif // FLATBUFFERS_DEFAULT_ALLOCATOR_H_ +diff --git a/include/flatbuffers/flexbuffers.h b/include/flatbuffers/flexbuffers.h +index 89f3f30..6e6d0b3 100644 +--- a/include/flatbuffers/flexbuffers.h ++++ b/include/flatbuffers/flexbuffers.h +@@ -496,9 +496,24 @@ class Reference { + return static_cast(ReadUInt64(Indirect(), byte_width_)); + case FBT_NULL: return 0.0; + case FBT_STRING: { ++#if 1 ++#if !defined( _MSC_VER) ++#pragma GCC diagnostic push ++#pragma GCC diagnostic ignored "-Wnull-dereference" ++#endif ++ // See b/173239141 for additional context. Patched via ++ // micro/tools/make/flexbuffers_download.sh ++ // Introduce a segfault for an unsupported code path for TFLM. ++ return *(static_cast(nullptr)); ++#if !defined( _MSC_VER) ++#pragma GCC diagnostic pop ++#endif ++#else ++ // This is the original code + double d; + flatbuffers::StringToNumber(AsString().c_str(), &d); + return d; ++#endif + } + case FBT_VECTOR: return static_cast(AsVector().size()); + case FBT_BOOL: +diff --git a/include/flatbuffers/util.h b/include/flatbuffers/util.h +index 93a39de..1cd4e8f 100644 +--- a/include/flatbuffers/util.h ++++ b/include/flatbuffers/util.h +@@ -24,6 +24,12 @@ + #include "flatbuffers/base.h" + #include "flatbuffers/stl_emulation.h" + ++// For TFLM we always want to use FLATBUFFERS_PREFER_PRINTF=1. See ++// http://b/211811553 for more context. ++#ifndef FLATBUFFERS_PREFER_PRINTF ++#define FLATBUFFERS_PREFER_PRINTF 1 ++#endif ++ + #ifndef FLATBUFFERS_PREFER_PRINTF + # include + # include + \ No newline at end of file diff --git a/tensorflow/lite/micro/tools/make/flatbuffers_download.sh b/tensorflow/lite/micro/tools/make/flatbuffers_download.sh new file mode 100755 index 0000000..af5e80f --- /dev/null +++ b/tensorflow/lite/micro/tools/make/flatbuffers_download.sh @@ -0,0 +1,78 @@ +#!/bin/bash +# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# +# Called with following arguments: +# 1 - Path to the downloads folder which is typically +# ${TENSORFLOW_ROOT}/tensorflow/lite/micro/tools/make/downloads +# 2 - (optional) TENSORFLOW_ROOT: path to root of the TFLM tree (relative to directory from where the script is called). +# +# This script is called from the Makefile and uses the following convention to +# enable determination of sucess/failure: +# +# - If the script is successful, the only output on stdout should be SUCCESS. +# The makefile checks for this particular string. +# +# - Any string on stdout that is not SUCCESS will be shown in the makefile as +# the cause for the script to have failed. +# +# - Any other informational prints should be on stderr. + +set -e + +source ${2}tensorflow/lite/micro/tools/make/bash_helpers.sh + +DOWNLOADS_DIR=${1} +if [ ! -d ${DOWNLOADS_DIR} ]; then + echo "The top-level downloads directory: ${DOWNLOADS_DIR} does not exist." + exit 1 +fi + +# The BUILD files in the downloaded folder result in an error with: +# bazel build tensorflow/lite/micro/... +# +# Parameters: +# $1 - path to the downloaded flatbuffers code. +function delete_build_files() { + rm -f `find . -name BUILD -o -name BUILD.bazel` +} + +DOWNLOADED_FLATBUFFERS_PATH=${DOWNLOADS_DIR}/flatbuffers + +if [ -d ${DOWNLOADED_FLATBUFFERS_PATH} ]; then + echo >&2 "${DOWNLOADED_FLATBUFFERS_PATH} already exists, skipping the download." +else + ZIP_PREFIX="a66de58af9565586832c276fbb4251fc416bf07f" + FLATBUFFERS_URL="https://github.com/google/flatbuffers/archive/${ZIP_PREFIX}.zip" + FLATBUFFERS_MD5="51a7a96747e1c33eb4aac6d52513a02f" + + TEMPDIR="$(mktemp -d)" + TEMPFILE="${TEMPDIR}/${ZIP_PREFIX}.zip" + wget ${FLATBUFFERS_URL} -O "$TEMPFILE" >&2 + check_md5 "${TEMPFILE}" ${FLATBUFFERS_MD5} + + unzip -qo "$TEMPFILE" -d "${TEMPDIR}" >&2 + mv "${TEMPDIR}/flatbuffers-${ZIP_PREFIX}" ${DOWNLOADED_FLATBUFFERS_PATH} + rm -rf "${TEMPDIR}" + + pushd ${DOWNLOADED_FLATBUFFERS_PATH} > /dev/null + delete_build_files ${DOWNLOADED_FLATBUFFERS_PATH} + create_git_repo ./ + apply_patch_to_folder ./ ../../flatbuffers.patch "TFLM patch" + + popd > /dev/null +fi + +echo "SUCCESS" diff --git a/tensorflow/lite/micro/tools/make/helper_functions.inc b/tensorflow/lite/micro/tools/make/helper_functions.inc new file mode 100644 index 0000000..ad3d44c --- /dev/null +++ b/tensorflow/lite/micro/tools/make/helper_functions.inc @@ -0,0 +1,119 @@ +DOWNLOAD_SCRIPT := $(MAKEFILE_DIR)/download_and_extract.sh + +#Handles the details of calculating the size of a binary target. +# +#Arguments are: +# 1 - Name of target. +# 2 - Regular expression for symbols to remove from the size calculation. +#Calling eval on the output will create the targets that you need. +define microlite_size +size_$(1): $$($(1)_BINARY) + $$(SIZE_SCRIPT) $$($(1)_BINARY) $2 +endef + +# Handles the details of generating a binary target, including specializing +# for the current platform, and generating project file targets. +# +# Note that while the function is called microlite_test, it is used for both +# test and non-test binaries. + +# Files that end with _test are added as test targets (i.e. can be executed with +# make test_. All others can be executed with make run_ +# +# Arguments are: +# 1 - Name of target. +# 2 - C/C++ source files +# 3 - C/C++ header files +# 4 - Model sources and model test inputs in.tflite, .wav, .bmp or .csv format. +# Calling eval on the output will create the targets that you need. +define microlite_test +$(1)_LOCAL_SRCS := $(2) + +ifneq ($(4),) + # Generate cc files and headers for all models and bitmaps in the test. + GEN_RESULT := $$(shell python3 $(TENSORFLOW_ROOT)tensorflow/lite/micro/tools/generate_cc_arrays.py $$(GENERATED_SRCS_DIR) $(4)) + + # The first ifneq is needed to be compatible with make versions prior to 4.2 + # which do not support .SHELLSTATUS. While make 4.2 was released in 2016, + # Ubuntu 18.04 only has version 4.1 + ifneq ($(.SHELLSTATUS),) + ifneq ($$(.SHELLSTATUS),0) + $$(error Something went wrong: $$(GEN_RESULT)) + endif + endif + + $(1)_LOCAL_SRCS += $$(GEN_RESULT) +endif + +ALL_SRCS += $$($(1)_LOCAL_SRCS) +$(1)_LOCAL_HDRS := $(3) +$(1)_LOCAL_OBJS := $$(addprefix $$(CORE_OBJDIR), \ +$$(patsubst %.S,%.o,$$(patsubst %.cc,%.o,$$(patsubst %.c,%.o,$$($(1)_LOCAL_SRCS))))) +$(1)_BINARY := $$(BINDIR)$(1) +$$($(1)_BINARY): $$($(1)_LOCAL_OBJS) $$(MICROLITE_LIB_PATH) + @mkdir -p $$(dir $$@) + $$(CXX) $$(CXXFLAGS) $$(INCLUDES) \ + -o $$($(1)_BINARY) $$($(1)_LOCAL_OBJS) \ + $$(MICROLITE_LIB_PATH) $$(LDFLAGS) $$(MICROLITE_LIBS) +$(1): $$($(1)_BINARY) +$(1)_bin: $$($(1)_BINARY).bin + +MICROLITE_BUILD_TARGETS += $$($(1)_BINARY) + +ifneq (,$(findstring _test,$(1))) +ifneq (,$(findstring integration_tests,$(1))) + MICROLITE_INTEGRATION_TEST_TARGETS += test_$(1) +else ifneq (,$(findstring generated_micro_mutable_op_resolver,$(1))) + MICROLITE_GEN_OP_RESOLVER_TEST_TARGETS += test_$(1) +else + MICROLITE_TEST_TARGETS += test_$(1) +endif + +# For bluepill, the CI build is failing due to introduction of the +# introduction of test_run_latency.sh script. Looks at +# https://b.corp.google.com/issues/268565399#comment11 for more details. +ifneq ($(TARGET), bluepill) +test_$(1):$$($(1)_BINARY) + $(MAKEFILE_DIR)/test_latency_log.sh $(1) $$(TEST_SCRIPT) $$($(1)_BINARY) $$(TEST_PASS_STRING) $$(TARGET) +else +test_$(1):$$($(1)_BINARY) + $$(TEST_SCRIPT) $$($(1)_BINARY) $$(TEST_PASS_STRING) $$(TARGET) +endif + +else +run_$(1): $$($(1)_BINARY) + $$(TEST_SCRIPT) $$($(1)_BINARY) non_test_binary $$(TARGET) +endif + +endef + +# Adds a dependency for a third-party library that needs to be downloaded from +# an external source. +# Arguments are: +# 1 - URL to download archive file from (can be .zip, .tgz, or .bz). +# 2 - MD5 sum of archive, to check integrity. Use md5sum tool to generate. +# 3 - Folder name to unpack library into, inside tf/l/x/m/t/downloads root. +# 4 - Optional patching action, must match clause in download_and_extract.sh. +# 5 - Optional patching action parameter +# These arguments are packed into a single '!' separated string, so no element +# can contain a '!'. +define add_third_party_download +THIRD_PARTY_DOWNLOADS += $(1)!$(2)!$(TENSORFLOW_ROOT)tensorflow/lite/micro/tools/make/downloads/$(3)!$(4)!$(5) +endef + +# Unpacks an entry in a list of strings created by add_third_party_download, and +# defines a dependency rule to download the library. The download_and_extract.sh +# script is used to handle to downloading and unpacking. +# 1 - Information about the library, separated by '!'s. +define create_download_rule +$(word 3, $(subst !, ,$(1))): + $(DOWNLOAD_SCRIPT) $(subst !, ,$(1)) +THIRD_PARTY_TARGETS += $(word 3, $(subst !, ,$(1))) +endef + +# Recursively find all files of given pattern +# Arguments are: +# 1 - Starting path +# 2 - File pattern, e.g: *.h +recursive_find = $(wildcard $(1)$(2)) $(foreach dir,$(wildcard $(1)*),$(call recursive_find,$(dir)/,$(2))) + diff --git a/tensorflow/lite/micro/tools/make/increase-stack-size-and-switch-DTCM-SRAM.patch b/tensorflow/lite/micro/tools/make/increase-stack-size-and-switch-DTCM-SRAM.patch new file mode 100644 index 0000000..57c50c1 --- /dev/null +++ b/tensorflow/lite/micro/tools/make/increase-stack-size-and-switch-DTCM-SRAM.patch @@ -0,0 +1,119 @@ +From 470dee13bffc0adb9a778d56fab3028031f71e80 Mon Sep 17 00:00:00 2001 +From: TFLM +Date: Fri, 28 Oct 2022 11:01:15 +0200 +Subject: [PATCH] TFLM patch + +--- + targets/corstone-300/platform.ld | 8 +++----- + targets/corstone-300/platform.scatter | 5 +++-- + targets/corstone-300/retarget.c | 16 ++++++++-------- + 3 files changed, 14 insertions(+), 15 deletions(-) + +diff --git a/targets/corstone-300/platform.ld b/targets/corstone-300/platform.ld +index ec58acc..21316a4 100644 +--- a/targets/corstone-300/platform.ld ++++ b/targets/corstone-300/platform.ld +@@ -75,7 +75,7 @@ + #define ETHOSU_ARENA 1 + #endif + +-__STACK_SIZE = 0x00008000; ++__STACK_SIZE = 0x00030000; + __HEAP_SIZE = 0x00008000; + + MEMORY +@@ -150,9 +150,6 @@ SECTIONS + *(EXCLUDE_FILE(*crtend?.o *crtend.o) .dtors) + *(SORT(.dtors.*)) + *(.dtors) +- +- *(.rodata*) +- + KEEP(*(.eh_frame*)) + } > ITCM :rom_exec + +@@ -275,6 +272,7 @@ SECTIONS + *(network_model_sec) + #endif + * (expected_output_data_sec) ++ *(.rodata*) + } > DDR :rom_dram + + __eddr_data = ALIGN (16) ; +@@ -293,7 +291,7 @@ SECTIONS + *(COMMON) + . = ALIGN(4); + __bss_end__ = .; +- } > DTCM :null ++ } > SRAM :null + + .heap (COPY) : + { +diff --git a/targets/corstone-300/platform.scatter b/targets/corstone-300/platform.scatter +index fab12d1..be5c227 100644 +--- a/targets/corstone-300/platform.scatter ++++ b/targets/corstone-300/platform.scatter +@@ -1,3 +1,4 @@ ++#!cpp + /* + * Copyright (c) 2019-2021 Arm Limited. All rights reserved. + * +@@ -76,7 +77,7 @@ + #endif + + #ifndef STACK_SIZE +-#define STACK_SIZE 0x8000 ++#define STACK_SIZE 0x20000 + #endif + + #ifndef HEAP_SIZE +@@ -136,7 +137,6 @@ APP_IMAGE LR_START LR_SIZE + ; Make sure reset_handler ends up in root segment, when split across + ; ITCM and DTCM + startup_ARMCM55.o +- .ANY (+RO) + } + + #if defined(USE_TRUSTZONE) && defined(TRUSTZONE_SECURE) +@@ -209,6 +209,7 @@ LOAD_REGION_1 DDR_START DDR_SIZE + * (input_data_sec) + * (expected_output_data_sec) + * (output_data_sec) ++ .ANY (+RO) + } + + #if (ETHOSU_ARENA == 1) +diff --git a/targets/corstone-300/retarget.c b/targets/corstone-300/retarget.c +index 4bde44d..b510ad8 100644 +--- a/targets/corstone-300/retarget.c ++++ b/targets/corstone-300/retarget.c +@@ -172,14 +172,6 @@ long RETARGET(_flen)(FILEHANDLE fh) { + return -1; + } + +-int RETARGET(_tmpnam)(char *name, int sig, unsigned maxlen) { +- (void)name; +- (void)sig; +- (void)maxlen; +- +- return 1; +-} +- + char *RETARGET(_command_string)(char *cmd, int len) { + (void)len; + +@@ -274,3 +266,11 @@ int ferror(FILE *f) { + return EOF; + } + #endif ++ ++#if defined(__ARMCC_VERSION) && (__ARMCC_VERSION >= 6100100) ++#else ++void RETARGET(exit)(int return_code) { ++ RETARGET(_exit)(return_code); ++ while (1) {} ++} ++#endif +-- +2.17.1 + diff --git a/tensorflow/lite/micro/tools/make/kissfft_download.sh b/tensorflow/lite/micro/tools/make/kissfft_download.sh new file mode 100755 index 0000000..342ae45 --- /dev/null +++ b/tensorflow/lite/micro/tools/make/kissfft_download.sh @@ -0,0 +1,66 @@ +#!/bin/bash +# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# +# Called with following arguments: +# 1 - Path to the downloads folder which is typically +# ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/downloads +# 2 - (optional) TENSORFLOW_ROOT: path to root of the TFLM tree (relative to directory from where the script is called. +# This script is called from the Makefile and uses the following convention to +# enable determination of sucess/failure: +# +# - If the script is successful, the only output on stdout should be SUCCESS. +# The makefile checks for this particular string. +# +# - Any string on stdout that is not SUCCESS will be shown in the makefile as +# the cause for the script to have failed. +# +# - Any other informational prints should be on stderr. + +set -e + +source ${2}tensorflow/lite/micro/tools/make/bash_helpers.sh + +DOWNLOADS_DIR=${1} +if [ ! -d ${DOWNLOADS_DIR} ]; then + echo "The top-level downloads directory: ${DOWNLOADS_DIR} does not exist." + exit 1 +fi + +DOWNLOADED_KISSFFT_PATH=${DOWNLOADS_DIR}/kissfft + +if [ -d ${DOWNLOADED_KISSFFT_PATH} ]; then + echo >&2 "${DOWNLOADED_KISSFFT_PATH} already exists, skipping the download." +else + + KISSFFT_URL="https://github.com/mborgerding/kissfft/archive/refs/tags/v130.zip" + KISSFFT_MD5="438ba1fef5783cc5f5f201395cc477ca" + + TEMPDIR="$(mktemp -d)" + TEMPFILE="${TEMPDIR}/v130.zip" + wget ${KISSFFT_URL} -O "${TEMPFILE}" >&2 + check_md5 "${TEMPFILE}" ${KISSFFT_MD5} + + unzip -qo "$TEMPFILE" -d "${TEMPDIR}" >&2 + mv "${TEMPDIR}/kissfft-130" ${DOWNLOADED_KISSFFT_PATH} + rm -rf "${TEMPDIR}" + + pushd ${DOWNLOADED_KISSFFT_PATH} > /dev/null + create_git_repo ./ + apply_patch_to_folder ./ ../../../../../../../third_party/kissfft/kissfft.patch "TFLM patch" + popd > /dev/null +fi + +echo "SUCCESS" diff --git a/tensorflow/lite/micro/tools/make/pigweed.patch b/tensorflow/lite/micro/tools/make/pigweed.patch new file mode 100644 index 0000000..d1e2930 --- /dev/null +++ b/tensorflow/lite/micro/tools/make/pigweed.patch @@ -0,0 +1,128 @@ +diff --git a/pw_presubmit/py/pw_presubmit/build.py b/pw_presubmit/py/pw_presubmit/build.py +index 4a370e33..224ad9c6 100644 +--- a/pw_presubmit/py/pw_presubmit/build.py ++++ b/pw_presubmit/py/pw_presubmit/build.py +@@ -20,7 +20,6 @@ from pathlib import Path + import re + from typing import Container, Dict, Iterable, List, Mapping, Set, Tuple + +-from pw_package import package_manager + from pw_presubmit import call, log_run, plural, PresubmitFailure, tools + + _LOG = logging.getLogger(__name__) +diff --git a/pw_presubmit/py/pw_presubmit/format_code.py b/pw_presubmit/py/pw_presubmit/format_code.py +index 19d09546..c1ff6b5a 100755 +--- a/pw_presubmit/py/pw_presubmit/format_code.py ++++ b/pw_presubmit/py/pw_presubmit/format_code.py +@@ -142,7 +142,7 @@ def fix_go_format(files: Iterable[Path]) -> None: + + + def _yapf(*args, **kwargs) -> subprocess.CompletedProcess: +- return log_run(['python', '-m', 'yapf', '--parallel', *args], ++ return log_run(['python', '-m', 'yapf', '--style', '{based_on_style:pep8,indent_width:2}', '--parallel', *args], + capture_output=True, + **kwargs) + +@@ -229,11 +229,6 @@ def print_format_check(errors: Dict[Path, str], + except ValueError: + return Path(path).resolve() + +- message = (f' pw format --fix {path_relative_to_cwd(path)}' +- for path in errors) +- _LOG.warning('To fix formatting, run:\n\n%s\n', '\n'.join(message)) +- +- + class CodeFormat(NamedTuple): + language: str + extensions: Collection[str] +diff --git a/pw_presubmit/py/pw_presubmit/pigweed_presubmit.py b/pw_presubmit/py/pw_presubmit/pigweed_presubmit.py +index 794967db..061db7ea 100755 +--- a/pw_presubmit/py/pw_presubmit/pigweed_presubmit.py ++++ b/pw_presubmit/py/pw_presubmit/pigweed_presubmit.py +@@ -220,8 +220,8 @@ def clang_tidy(ctx: PresubmitContext): + + + # The first line must be regex because of the '20\d\d' date +-COPYRIGHT_FIRST_LINE = r'Copyright 20\d\d The Pigweed Authors' +-COPYRIGHT_COMMENTS = r'(#|//| \*|REM|::)' ++COPYRIGHT_FIRST_LINE = r'Copyright 20\d\d The TensorFlow Authors. All Rights Reserved.' ++COPYRIGHT_COMMENTS = r'(#|//|\*|REM|::|/\*)' + COPYRIGHT_BLOCK_COMMENTS = ( + # HTML comments + (r''), ) +@@ -232,21 +232,23 @@ COPYRIGHT_FIRST_LINE_EXCEPTIONS = ( + '@echo off', + '# -*-', + ':', ++ '# Lint as', ++ '# coding=utf-8' + ) + + COPYRIGHT_LINES = tuple("""\ + +-Licensed under the Apache License, Version 2.0 (the "License"); you may not +-use this file except in compliance with the License. You may obtain a copy of +-the License at ++Licensed under the Apache License, Version 2.0 (the "License"); ++you may not use this file except in compliance with the License. ++You may obtain a copy of the License at + +- https://www.apache.org/licenses/LICENSE-2.0 ++ http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software +-distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +-WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +-License for the specific language governing permissions and limitations under +-the License. ++distributed under the License is distributed on an "AS IS" BASIS, ++WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ++See the License for the specific language governing permissions and ++limitations under the License. + """.splitlines()) + + _EXCLUDE_FROM_COPYRIGHT_NOTICE: Sequence[str] = ( +@@ -344,6 +346,11 @@ def copyright_notice(ctx: PresubmitContext): + errors.append(path) + continue + ++ # Special handling for TFLM style of copyright+license in the cc ++ # files. ++ if comment == '/*': ++ comment = '' ++ + if end_block_comment: + expected_lines = COPYRIGHT_LINES + (end_block_comment, ) + else: +@@ -354,6 +361,10 @@ def copyright_notice(ctx: PresubmitContext): + expected_line = expected + '\n' + elif comment: + expected_line = (comment + ' ' + expected).rstrip() + '\n' ++ else: ++ # Special handling for TFLM style of copyright+license in ++ # the cc files. ++ expected_line = (expected).rstrip() + '\n' + + if expected_line != actual: + _LOG.warning(' bad line: %r', actual) +@@ -475,6 +486,10 @@ BROKEN = ( + gn_nanopb_build, + ) + ++COPYRIGHT_NOTICE = ( ++ copyright_notice, ++) ++ + QUICK = ( + commit_message_format, + init_cipd, +@@ -509,7 +524,8 @@ FULL = ( + build_env_setup, + ) + +-PROGRAMS = Programs(broken=BROKEN, quick=QUICK, full=FULL) ++PROGRAMS = Programs(broken=BROKEN, quick=QUICK, full=FULL, ++ copyright_notice=COPYRIGHT_NOTICE) + + + def parse_args() -> argparse.Namespace: diff --git a/tensorflow/lite/micro/tools/make/pigweed_download.sh b/tensorflow/lite/micro/tools/make/pigweed_download.sh new file mode 100755 index 0000000..b71fdc8 --- /dev/null +++ b/tensorflow/lite/micro/tools/make/pigweed_download.sh @@ -0,0 +1,61 @@ +#!/bin/bash +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# +# Called with following arguments: +# 1 - Path to the downloads folder which is typically +# tensorflow/lite/micro/tools/make/downloads +# 2 - (optional) TENSORFLOW_ROOT: path to root of the TFLM tree (relative to directory from where the script is called). +# +# This script is called from the Makefile and uses the following convention to +# enable determination of sucess/failure: +# +# - If the script is successful, the only output on stdout should be SUCCESS. +# The makefile checks for this particular string. +# +# - Any string on stdout that is not SUCCESS will be shown in the makefile as +# the cause for the script to have failed. +# +# - Any other informational prints should be on stderr. + +set -e + +source ${2}tensorflow/lite/micro/tools/make/bash_helpers.sh + +DOWNLOADS_DIR=${1} +if [ ! -d ${DOWNLOADS_DIR} ]; then + echo "The top-level downloads directory: ${DOWNLOADS_DIR} does not exist." + exit 1 +fi + +DOWNLOADED_PIGWEED_PATH=${DOWNLOADS_DIR}/pigweed + +if [ -d ${DOWNLOADED_PIGWEED_PATH} ]; then + echo >&2 "${DOWNLOADED_PIGWEED_PATH} already exists, skipping the download." +else + git clone https://pigweed.googlesource.com/pigweed/pigweed ${DOWNLOADED_PIGWEED_PATH} >&2 + pushd ${DOWNLOADED_PIGWEED_PATH} > /dev/null + + git checkout 47268dff45019863e20438ca3746c6c62df6ef09 >&2 + rm -rf ${DOWNLOADED_PIGWEED_PATH}/.git + rm -f `find . -name BUILD` + + create_git_repo ./ + apply_patch_to_folder ./ ../../pigweed.patch "TFLM patch" + + popd > /dev/null +fi + +echo "SUCCESS" diff --git a/tensorflow/lite/micro/tools/make/renode_download.sh b/tensorflow/lite/micro/tools/make/renode_download.sh new file mode 100755 index 0000000..f780387 --- /dev/null +++ b/tensorflow/lite/micro/tools/make/renode_download.sh @@ -0,0 +1,65 @@ +#!/bin/bash +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# +# Called with following arguments: +# 1 - Path to the downloads folder which is typically +# ${TENSORFLOW_ROOT}/tensorflow/lite/micro/tools/make/downloads +# 2 - (optional) TENSORFLOW_ROOT: path to root of the TFLM tree (relative to directory from where the script is called). +# +# This script is called from the Makefile and uses the following convention to +# enable determination of sucess/failure: +# +# - If the script is successful, the only output on stdout should be SUCCESS. +# The makefile checks for this particular string. +# +# - Any string on stdout that is not SUCCESS will be shown in the makefile as +# the cause for the script to have failed. +# +# - Any other informational prints should be on stderr. + +set -e + +TENSORFLOW_ROOT=${2} +source ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/bash_helpers.sh + +DOWNLOADS_DIR=${1} +if [ ! -d ${DOWNLOADS_DIR} ]; then + echo "The top-level downloads directory: ${DOWNLOADS_DIR} does not exist." + exit 1 +fi + +DOWNLOADED_RENODE_PATH=${DOWNLOADS_DIR}/renode + +if [ -d ${DOWNLOADED_RENODE_PATH} ]; then + echo >&2 "${DOWNLOADED_RENODE_PATH} already exists, skipping the download." +else + LINUX_PORTABLE_URL="https://github.com/renode/renode/releases/download/v1.13.2/renode-1.13.2.linux-portable.tar.gz" + TEMP_ARCHIVE="/tmp/renode.tar.gz" + + echo >&2 "Downloading from url: ${LINUX_PORTABLE_URL}" + wget ${LINUX_PORTABLE_URL} -O ${TEMP_ARCHIVE} >&2 + + EXPECTED_MD5="cf940256fd32597975f10f9146925d9b" + check_md5 ${TEMP_ARCHIVE} ${EXPECTED_MD5} + + mkdir ${DOWNLOADED_RENODE_PATH} + tar xzf ${TEMP_ARCHIVE} --strip-components=1 --directory "${DOWNLOADED_RENODE_PATH}" >&2 + echo >&2 "Unpacked to directory: ${DOWNLOADED_RENODE_PATH}" + + pip3 install -r ${DOWNLOADED_RENODE_PATH}/tests/requirements.txt >&2 +fi + +echo "SUCCESS" diff --git a/tensorflow/lite/micro/tools/make/specialize_files.py b/tensorflow/lite/micro/tools/make/specialize_files.py new file mode 100644 index 0000000..a7323df --- /dev/null +++ b/tensorflow/lite/micro/tools/make/specialize_files.py @@ -0,0 +1,58 @@ +# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +import argparse +import os + + +# Selects the more specialized files in directory in favor of the file with the +# same name in base_file_list and returns a list containing all the files as a +# result of this specialization merge. +def _specialize_files(base_file_list, directory): + # If the specialized directory is not a valid path, then return the + # base_file_list. + if not os.path.isdir(directory): + return base_file_list + + specialize_files = os.listdir(directory) + specialized_list = [] + for fpath in base_file_list: + fname = os.path.basename(fpath) + if fname in specialize_files: + specialized_list.append(os.path.join(directory, fname)) + else: + specialized_list.append(fpath) + return specialized_list + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description="Helper functions used during the Makefile build") + + parser.add_argument( + "--base_files", + default="", + help="String with (space separated) list of all the files " + "to attempt to specialize.") + + parser.add_argument("--specialize_directory", + default="", + help="Directory containing the more specialized files.") + + args = parser.parse_args() + + if args.base_files != "" and args.specialize_directory != "": + print(" ".join( + _specialize_files(args.base_files.split(), args.specialize_directory))) diff --git a/tensorflow/lite/micro/tools/make/targets/arc/README.md b/tensorflow/lite/micro/tools/make/targets/arc/README.md new file mode 100644 index 0000000..a958830 --- /dev/null +++ b/tensorflow/lite/micro/tools/make/targets/arc/README.md @@ -0,0 +1,268 @@ +# Building TensorFlow Lite for Microcontrollers for Synopsys DesignWare ARC VPX and EM/HS Processors + +## Maintainers + +* [dzakhar](https://github.com/dzakhar) +* [JaccovG](https://github.com/JaccovG) +* [gerbauz](https://github.com/gerbauz) + +## Introduction + +This document contains the general information on building and running +TensorFlow Lite Micro for targets based on the Synopsys ARC VPX and EM/HS Processors. + +## Table of Contents + +- [Install the Synopsys DesignWare ARC MetaWare Development Toolkit](#install-the-synopsys-designware-arc-metaWare-development-toolkit) +- [ARC EM Software Development Platform (ARC EM SDP)](#ARC-EM-Software-Development-Platform-ARC-EM-SDP) +- [Using EmbARC MLI Library 2.0 (experimental feature)](#Using-EmbARC-MLI-Library-2.0-experimental-feature) +- [Model Adaptation Tool (experimental feature)](#Model-Adaptation-Tool-experimental-feature) +- [Custom ARC EM/HS/VPX Platform](#Custom-ARC-EMHSVPX-Platform) + +## Install the Synopsys DesignWare ARC MetaWare Development Toolkit + +The Synopsys DesignWare ARC MetaWare Development Toolkit (MWDT) is required to +build and run Tensorflow Lite Micro applications for all ARC VPX and EM/HS targets. + +To license MWDT, please see further details +[here](https://www.synopsys.com/dw/ipdir.php?ds=sw_metaware) + +To request an evaluation version of MWDT, please use the +[Synopsys Eval Portal](https://eval.synopsys.com/) and follow the link for the +MetaWare Development Toolkit (Important: Do not confuse this with MetaWare EV +Development Toolkit or MetaWare Lite options also available on this page) + +Run the downloaded installer and follow the instructions to set up the toolchain +on your platform. + +TensorFlow Lite for Microcontrollers builds are divided into two phases: +Application Project Generation and Application Project Building/Running. The +former phase requires \*nix environment while the latter does not. + +For basic project generation targeting +[ARC EM Software Development Platform](#ARC-EM-Software-Development-Platform-ARC-EM-SDP), +MetaWare is NOT required for the Project Generation Phase. However, it is +required in case the following: - For project generation for custom (not EM SDP) +targets - To build microlib target library with all required TFLM objects for +external use + +Please consider the above when choosing whether to install Windows or Linux or +both versions of MWDT + +## ARC EM Software Development Platform (ARC EM SDP) + +This section describes how to deploy on an +[ARC EM SDP board](https://www.synopsys.com/dw/ipdir.php?ds=arc-em-software-development-platform) + +### Initial Setup + +To use the EM SDP, you need the following hardware and software: + +#### ARC EM SDP + +More information on the platform, including ordering information, can be found +[here](https://www.synopsys.com/dw/ipdir.php?ds=arc-em-software-development-platform). + +#### MetaWare Development Toolkit + +See +[Install the Synopsys DesignWare ARC MetaWare Development Toolkit](#install-the-synopsys-designware-arc-metaWare-development-toolkit) +section for instructions on toolchain installation. + +#### Digilent Adept 2 System Software Package + +If you wish to use the MetaWare Debugger to debug your code, you need to also +install the Digilent Adept 2 software, which includes the necessary drivers for +connecting to the targets. This is available from official +[Digilent site](https://reference.digilentinc.com/reference/software/adept/start?redirect=1#software_downloads). +You should install the “System” component, and Runtime. Utilities and SDK are +NOT required. + +Digilent installation is NOT required if you plan to deploy to EM SDP via the SD +card instead of using the debugger. + +#### Make Tool + +A `'make'` tool is required for both phases of deploying Tensorflow Lite Micro +applications on ARC EM SDP: +1. Test binaries generation. +2. TFLM static library generation. + +For the first phase you need an environment and make tool compatible with +Tensorflow Lite for Micro build system. At the moment of this writing, this +requires make >=3.82 and a *nix-like environment which supports shell and native +commands for file manipulations. MWDT toolkit is not required for this phase. + +For the second phase, requirements are less strict. The gmake version delivered +with MetaWare Development Toolkit is sufficient. There are no shell and *nix +command dependencies, so Windows can be used + +#### Serial Terminal Emulation Application + +The Debug UART port of the EM SDP is used to print application output. The USB +connection provides both the debug channel and RS232 transport. You can use any +terminal emulation program (like [PuTTY](https://www.putty.org/)) to view UART +output from the EM SDP. + +#### microSD Card + +If you want to self-boot your application (start it independently from a +debugger connection), you also need a microSD card with a minimum size of 512 MB +and a way to write to the card from your development host. Note that the card +must be formatted as FAT32 with default cluster size (but less than 32 Kbytes) + +### Connect the Board + +1. Make sure Boot switches of the board (S3) are configured in the next way: + +Switch # | Switch position +:------: | :-------------: +1 | Low (0) +2 | Low (0) +3 | High (1) +4 | Low (0) + +1. Connect the power supply included in the product package to the ARC EM SDP. +2. Connect the USB cable to connector J10 on the ARC EM SDP (near the RST and + CFG buttons) and to an available USB port on your development host. +3. Determine the COM port assigned to the USB Serial Port (on Windows, using + Device Manager is an easy way to do this) +4. Execute the serial terminal application you installed in the previous step + and open the serial connection with the early defined COM port (speed 115200 + baud; 8 bits; 1 stop bit; no parity). +5. Push the CFG button on the board. After a few seconds you should see the + boot log in the terminal which begins as follows: + +``` +U-Boot + +CPU: ARC EM11D v5.0 at 40 MHz +Subsys:ARC Data Fusion IP Subsystem +Model: snps,emsdp +Board: ARC EM Software Development Platform v1.0 +… +``` + +### Generate TFLM as Static Library for ARC EM SDP + +If you want to use TensorFlow Lite Micro framework in your own application, you need to generate TFLM as a static library. +Next command can be used to generate TFLM library for ARC EM SDP: + +``` +make -f tensorflow/lite/micro/tools/make/Makefile clean +make -f tensorflow/lite/micro/tools/make/Makefile TARGET=arc_emsdp \ +TARGET_ARCH=arc \ +OPTIMIZED_KERNEL_DIR=arc_mli \ +microlite +``` + +Generated library *libtensorflow-microlite.a* can be found in *gen/{target}/lib*. + +### Example Applications for ARC EM SDP + +Example applications can be found on ARC examples repository. + +## Using EmbARC MLI Library 2.0 (experimental feature) + +This section describes how to build TFLM using [embARC MLI Library 2.0](https://github.com/foss-for-synopsys-dwc-arc-processors/embarc_mli/tree/Release_2.0_EA). + +The EmbARC MLI Library 2.0 can be used to build TFLM library and run applications (especially for VPX processors). + +Because of difference in weights layout, TFLM models must be pre-adapted using a Model Adaptation Tool. For native TFLM examples (person detection, micro speech) Model Adaptation Tool is applied automatically when MLI 2.0 is used, so there is no need to run it maually. + +To use the embARC MLI Library 2.0 in all cases (including native examples), you will also need extra dependencies for the Model Adaptation Tool. Please check the [Model Adaptation Tool](#​Model-Adaptation-Tool-experimental-​feature) section for more information. + +To build TFLM using the embARC MLI Library 2.0, add the following tag to the command: +``` +ARC_TAGS=mli20_experimental +``` +Also, some of configurations may require custom BUILD_LIB. Please, check MLI Library 2.0 [documentation](https://github.com/foss-for-synopsys-dwc-arc-processors/embarc_mli/tree/Release_2.0_EA#build-configuration-options) for more details. Following option can be added: +``` +BUILD_LIB_DIR= +``` +Example of command to build TFLM lib for VPX5: +``` +make -f tensorflow/lite/micro/tools/make/Makefile \ +TARGET=arc_custom \ +TCF= \ +BUILD_LIB_DIR=vpx5_integer_full \ +ARC_TAGS=mli20_experimental microlite +``` +## Model Adaptation Tool (experimental feature) + +Models in TFLM format need to be pre-adapted before being used with MLI 2.0 due to differences in weights' tensor layout in some kernels. Adaptation is done automatically during TFLM project generation, but requires TensorFlow to be installed. + +To use the Model Adaptation Tool, you need the following tools in addition to common requirments: +* [Python](https://www.python.org/downloads/) 3.7 or higher +* [TensorFlow for Python](https://www.tensorflow.org/install/pip) version 2.5 or higher + +If you want to use your own model, exported from TensorFlow in **.tflite** or **.cc** format, you will need to adapt it manually using the Model Adaptation Tool from the current folder, using the following command: + +``` +python adaptation_tool.py \ + +``` + +## Custom ARC EM/HS/VPX Platform + +This section describes how to deploy on a Custom ARC VPX or EM/HS platform defined only by a TCF (Tool onfiguration File, created at CPU configuration time) and optional LCF (Linker Command File). In this case, the real hardware is unknown, and applications can be run only in the nSIM simulator included with the MetaWare toolkit. + +VPX support is presented as an experimental feature of supporting embARC MLI Library version 2.0 and model adaptation. Read more about embARC MLI Library 2.0 support in the [related section](#Using-EmbARC-MLI-Library-2.0-experimental-feature). + +### Initial Setup + +To use a custom ARC EM/HS/VPX platform, you need the following : +* Synopsys MetaWare +Development Toolkit version 2019.12 or higher (2021.06 or higher for MLI Library 2.0) +* Make tool (make or gmake) +* CMake 3.18 or higher\ +If you are using the [Model Adaptation Tool](#Model-Adaptation-Tool-experimental-feature), you will also need to install: +* [Python](https://www.python.org/downloads/) 3.7 or higher +* [TensorFlow for Python](https://www.tensorflow.org/install/pip) version 2.5 or higher + +See +[Install the Synopsys DesignWare ARC MetaWare Development Toolkit](#install-the-synopsys-designware-arc-metaWare-development-toolkit) +section for instructions on toolchain installation. See +[MetaWare Development Toolkit](#MetaWare-Development-Toolkit) and +[Make Tool](#Make-Tool) sections for instructions on toolchain installation and +comments about make versions. + +### Generate TFLM as Static Library + +If you want to use TensorFlow Lite Micro framework in your own application, you need to generate TFLM as a static library. +Next command can be used to generate TFLM library: + +``` +make -f tensorflow/lite/micro/tools/make/Makefile clean +make -f tensorflow/lite/micro/tools/make/Makefile \ +TARGET_ARCH=arc \ +TARGET=arc_custom \ +OPTIMIZED_KERNEL_DIR=arc_mli \ +TCF_FILE= \ +LCF_FILE= \ +microlite +``` +For MLI Library 2.0 (experimental feature): +``` +make -f tensorflow/lite/micro/tools/make/Makefile clean +make -f tensorflow/lite/micro/tools/make/Makefile \ +TARGET_ARCH=arc \ +TARGET=arc_custom \ +OPTIMIZED_KERNEL_DIR=arc_mli \ +ARC_TAGS=mli20_experimental \ +BUILD_LIB_DIR= \ +TCF_FILE= \ +microlite +``` + +Generated library *libtensorflow-microlite.a* can be found in *gen/{target}/lib*. + +### Example Applications for ARC EM/HS/VPX custom configuration. + +Example applications can be found on ARC examples repository. + +## License + +TensorFlow's code is covered by the Apache2 License included in the repository, +and third-party dependencies are covered by their respective licenses, in the +third_party folder of this package. diff --git a/tensorflow/lite/micro/tools/make/targets/arc/adaptation_tool.py b/tensorflow/lite/micro/tools/make/targets/arc/adaptation_tool.py new file mode 100644 index 0000000..32d7a51 --- /dev/null +++ b/tensorflow/lite/micro/tools/make/targets/arc/adaptation_tool.py @@ -0,0 +1,236 @@ +# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""embARC MLI model adaptation tool""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import os +import sys +import re +import shutil + +try: + from tensorflow.lite.python.util import convert_bytes_to_c_source, _convert_model_from_object_to_bytearray, \ + _convert_model_from_bytearray_to_object +except ImportError: + print('Install TensorFlow package first to use MLI adaptation tool.') + sys.exit(1) + + +# Model conversion functions +def convert_c_source_to_bytes(input_cc_file): + """Converts C++ source file to bytes (immutable). + + Args: + input_cc_file: A .cc file to process. + + Returns: + A bytearray corresponding to the input cc file array. + """ + pattern = re.compile(r'(((0x[0-9a-fA-F]+), ?)+)') + model_bytearray = bytearray() + + with open(input_cc_file) as file_handle: + for line in file_handle: + values_match = pattern.search(line) + + if values_match is None: + continue + + list_text = values_match.group(1) + values_text = filter(None, list_text.split(',')) + + values = [int(x, base=16) for x in values_text] + model_bytearray.extend(values) + + return bytes(model_bytearray) + + +def convert_c_source_to_object(input_cc_file): + """Converts C++ source file to an object for parsing.""" + with open(input_cc_file, 'r') as model_file: + include_path, array_name = None, None + for line in model_file: + if '#include' in line and not include_path: + include_path = line.strip('#include ').strip('"\n') + if re.search(r"\[\].*[=]|\[[1-9][0-9]*\].*[=]", line) and not array_name: + array_name = re.search(r"\w*(?=\[)", line).group() + if include_path and array_name: + break + + model_bytes = convert_c_source_to_bytes(input_cc_file) + return _convert_model_from_bytearray_to_object(model_bytes), \ + include_path, array_name + + +def read_model(input_tflite_file): + """Reads a tflite model as a python object.""" + + with open(input_tflite_file, 'rb') as model_file: + model_bytearray = bytearray(model_file.read()) + return _convert_model_from_bytearray_to_object(model_bytearray) + + +def write_model(model_object, output_tflite_file, include_path, array_name): + """Writes the tflite model, a python object, into the output file. + + Args: + model_object: A tflite model as a python object + output_tflite_file: Full path name to the output tflite file. + include_path: Path to model header file + array_name: name of the array for .cc output + + Raises: + ValueError: If file is not formatted in .cc or .tflite + """ + model_bytearray = _convert_model_from_object_to_bytearray(model_object) + if output_tflite_file.endswith('.cc'): + mode = 'w' + converted_model = convert_bytes_to_c_source(data=model_bytearray, + array_name=array_name, + include_path=include_path, + use_tensorflow_license=True)[0] + elif output_tflite_file.endswith('.tflite'): + mode = 'wb' + converted_model = model_bytearray + else: + raise ValueError('File format not supported') + + with open(output_tflite_file, mode) as output_file: + output_file.write(converted_model) + + +# Helper functions +def transpose_weights(tensor, buffer, transpose_shape): + """Transposes weights to embARC MLI format according to transpose_shape + + Args: + tensor: A tensor to process + buffer: A buffer relevant to the tensor + transpose_shape: Target shape. + """ + buffer.data = buffer.data \ + .reshape(tensor.shape) \ + .transpose(transpose_shape) \ + .flatten() + + tensor.shape = tensor.shape[transpose_shape] + + tensor.quantization.quantizedDimension = \ + transpose_shape.index(tensor.quantization.quantizedDimension) + + +# Layer-specific adaptation functions +def adapt_conv(operator, tensors, buffers): + """Adapts weights tensors of convolution layers + + Args: + operator: Operator index + tensors: Model tensors dict + buffers: Model buffers dict + """ + transpose_weights(tensors[operator.inputs[1]], + buffers[tensors[operator.inputs[1]].buffer], [1, 2, 3, 0]) + + +def adapt_dw(operator, tensors, _buffers): + """Adapts weights tensors of depthwise convolution layers + + Args: + operator: Operator index + tensors: Model tensors dict + _buffers: Model buffers dict + """ + tensors[operator.inputs[1]].shape = \ + tensors[operator.inputs[1]].shape[[1, 2, 0, 3]] + + +def adapt_fc(operator, tensors, buffers): + """Adapts weights tensors of fully connected layers + + Args: + operator: Operator index + tensors: Model tensors dict + buffers: Model buffers dict + """ + transpose_weights(tensors[operator.inputs[1]], + buffers[tensors[operator.inputs[1]].buffer], [1, 0]) + + +# Op_codes that require additional adaptation for MLI +adapt_op_codes = { + 3: adapt_conv, # CONV_2D + 4: adapt_dw, # DEPTHWISE_CONV_2D + 9: adapt_fc # FULLY_CONNECTED +} + + +def adapt_model_to_mli(model): + """Adapts weights of the model to embARC MLI layout + + Args: + model: TFLite model object + """ + op_codes = [ + op_code.builtinCode + if op_code.builtinCode != 0 else op_code.deprecatedBuiltinCode + for op_code in model.operatorCodes + ] + for subgraph in model.subgraphs: + for operator in subgraph.operators: + try: + adapt_op_codes[op_codes[operator.opcodeIndex]] \ + (operator, subgraph.tensors, model.buffers) + except KeyError: + continue + + +def main(argv): + try: + if len(sys.argv) == 3: + tflite_input = argv[1] + tflite_output = argv[2] + elif len(sys.argv) == 2: + tflite_input = argv[1] + tflite_output = argv[1] + except IndexError: + print("Usage: %s " % (argv[0])) + else: + if tflite_input == tflite_output: + path, filename = os.path.split(tflite_input) + try: + shutil.copyfile(tflite_input, path + '/orig_' + filename) + except OSError as err: + print('Error while creating backup file:', err) + if tflite_input.endswith('.cc'): + model, include_path, array_name = convert_c_source_to_object( + tflite_input) + elif tflite_input.endswith('.tflite'): + model = read_model(tflite_input) + include_path = '' + array_name = os.path.split(tflite_output)[1].split('.')[0] + else: + raise ValueError('File format not supported') + + adapt_model_to_mli(model) + write_model(model, tflite_output, include_path, array_name) + + print('Model was adapted to be used with embARC MLI.') + + +if __name__ == "__main__": + main(sys.argv) \ No newline at end of file diff --git a/tensorflow/lite/micro/tools/make/targets/arc/arc_common.inc b/tensorflow/lite/micro/tools/make/targets/arc/arc_common.inc new file mode 100644 index 0000000..abc45c3 --- /dev/null +++ b/tensorflow/lite/micro/tools/make/targets/arc/arc_common.inc @@ -0,0 +1,86 @@ +# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Common Settings for ARC platform and its projects. +# Might be reused across different targets + +ifeq ($(TARGET_ARCH), arc) + +ifeq ($(ARC_TOOLCHAIN), mwdt) + CC_TOOL := ccac + AR_TOOL := arac + CXX_TOOL := ccac + LD_TOOL := ccac + + ARC_APP_RUN_CMD = mdb -run -tcf=$(TCF_FILE_NAME) $(DLR)\(DBG_ARGS\) + ARC_APP_DEBUG_CMD = mdb -OK -tcf=$(TCF_FILE_NAME) $(DLR)\(DBG_ARGS\) + + # The variable TCF_FILE stores path to Tool Configuration File (*.tcf). + # This file is used by MWDT toolchain to properly compile/run code + TCF_FILE ?= + + LCF_FILE ?= + + EXT_CFLAGS ?= + + BUILD_ARC_MLI ?= true + +# The variable TCF_FILE_NAME stores the TCF file name (including .tcf extension), +# this variable is used later to add the option to the compiler flags. +# This condition also handles the case when the user/makefile specifies +# the configuration bundled with MWDT (usually without .tcf extension). + TCF_FILE_NAME = $(TCF_FILE) + + PLATFORM_FLAGS = -tcf=$(TCF_FILE_NAME) -tcf_core_config + + PLATFORM_FLAGS += -Hnocopyr -Hpurge -Hdense_prologue -fslp-vectorize-aggressive -ffunction-sections -fdata-sections + +ifeq ($(filter $(ARC_TAGS), mli20_experimental),) + PLATFORM_FLAGS += -Hon=Long_enums +endif + + # Use compact CRT. It requires pre-defined heap size + PLATFORM_FLAGS += -Hcl -Hcrt_fast_memcpy -Hcrt_fast_memset + + PLATFORM_LDFLAGS = -tcf=$(TCF_FILE_NAME) + + PLATFORM_LDFLAGS += -Hnocopyr -m -Hldopt=-Bgrouplib -Hheap=24K + +# Fix for kernel_util for ARC target + PLATFORM_LDFLAGS += -Hldopt=-u,TfLiteIntArrayEqual -Hldopt=-u,TfLiteIntArrayGetSizeInBytes + +ifneq ($(LCF_FILE), ) + PLATFORM_LDFLAGS += $(notdir $(LCF_FILE)) +endif + + CXXFLAGS := $(filter-out -std=c++11,$(CXXFLAGS)) + CCFLAGS := $(filter-out -std=c11,$(CCFLAGS)) + + ldflags_to_remove = -Wl,--fatal-warnings -Wl,--gc-sections + LDFLAGS := $(filter-out $(ldflags_to_remove),$(LDFLAGS)) + + MICROLITE_LIBS := $(filter-out -lm,$(MICROLITE_LIBS)) + + CXXFLAGS += $(PLATFORM_FLAGS) + CXXFLAGS += $(EXT_CFLAGS) + CCFLAGS += $(PLATFORM_FLAGS) + CCFLAGS += $(EXT_CFLAGS) + LDFLAGS += $(PLATFORM_LDFLAGS) + +endif # ARC_TOOLCHAIN + +else + $(error "Only ARC target architecture supported (TARGET_ARCH=arc)") + +endif # TARGET_ARCH diff --git a/tensorflow/lite/micro/tools/make/targets/arc_custom_makefile.inc b/tensorflow/lite/micro/tools/make/targets/arc_custom_makefile.inc new file mode 100644 index 0000000..84e2d03 --- /dev/null +++ b/tensorflow/lite/micro/tools/make/targets/arc_custom_makefile.inc @@ -0,0 +1,88 @@ +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Settings for not pre-defined ARC processors. +# User need to specify ARC target with Tool Configuration File (*.tcf). +# Path to this file must be passed through TCF_FILE variable. +# Otherwise, default em7d_voice_audio configuration is used + +TARGET_ARCH := arc +ARC_TOOLCHAIN := mwdt + +# Overriding TARGET variable to change name of project folder according +# to specified Tool Configuration File (*.tcf) passed through TCF_FILE variable +# or default em7d_voice_audio configuration. +ifneq ($(TCF_FILE),) + override TARGET = $(basename $(notdir $(TCF_FILE))) +else + $(warning TCF_FILE variable is not specified. Use default em7d_voice_audio configuration) + override TARGET = em7d_voice_audio + TCF_FILE = em7d_voice_audio +endif + +ifneq ($(filter $(ARC_TAGS), mli20_experimental),) + override TARGET := $(TARGET)_mli20 +endif + +include $(MAKEFILE_DIR)/targets/arc/arc_common.inc + +ifneq ($(filter $(ARC_TAGS), mli20_experimental),) + CXXFLAGS += -DMLI_2_0 + CCFLAGS += -DMLI_2_0 + +# If kernel tests running - using define to activate online permutation. +ifneq ($(findstring test, $(MAKECMDGOALS)),) + CXXFLAGS += -DMLI_2_0_KRNL_TEST + CCFLAGS += -DMLI_2_0_KRNL_TEST +endif + +ifneq ($(BUILD_LIB_DIR), ) + LDFLAGS += -Hlib=$(BUILD_LIB_DIR) +else + $(warning BUILD_LIB_DIR variable is not specified. Default will be used.) +endif + +endif # ARC_TAGS + +EXCLUDED_TESTS := \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/memory_arena_threshold_test.cc + +MICROLITE_TEST_SRCS := $(filter-out $(EXCLUDED_TESTS), $(MICROLITE_TEST_SRCS)) + +EXCLUDED_EXAMPLE_TESTS := \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/hello_world/Makefile.inc \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/memory_footprint/Makefile.inc \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/Makefile.inc \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/network_tester/Makefile.inc \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/person_detection/Makefile.inc + +MICRO_LITE_EXAMPLE_TESTS := $(filter-out $(EXCLUDED_EXAMPLE_TESTS), $(MICRO_LITE_EXAMPLE_TESTS)) + +# Removing integration tests for ARC due to issues. +MICRO_LITE_INTEGRATION_TESTS := + +TEST_SCRIPT := $(TENSORFLOW_ROOT)tensorflow/lite/micro/testing/test_with_arc_mdb.sh + +TARGET_SPECIFIC_MAKE_TEST := 1 + +TEST_TARGET_BINARIES = $(shell ls -1 $(BINDIR)*_test) + +# Removing conv test due to bug in one of cases. +EXCLUDED_TEST_BINARIES := \ + $(BINDIR)kernel_conv_test + +TEST_TARGET_BINARIES := $(filter-out $(EXCLUDED_TEST_BINARIES), $(TEST_TARGET_BINARIES)) + +test: build + $(foreach test,$(TEST_TARGET_BINARIES),$(TEST_SCRIPT) $(test) $(TCF_FILE) $(TEST_PASS_STRING) || exit;) diff --git a/tensorflow/lite/micro/tools/make/targets/arc_emsdp_makefile.inc b/tensorflow/lite/micro/tools/make/targets/arc_emsdp_makefile.inc new file mode 100644 index 0000000..f7c3066 --- /dev/null +++ b/tensorflow/lite/micro/tools/make/targets/arc_emsdp_makefile.inc @@ -0,0 +1,29 @@ +# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Settings for EMSDP target (ARC processor) + +TARGET_ARCH := arc +ARC_TOOLCHAIN := mwdt + + +BUILD_ARC_MLI := false +ARC_MLI_PRE_COMPILED_TARGET := emsdp_em11d_em9d_dfss + +MLI_LIB_DIR = arc_mli_package +$(eval $(call add_third_party_download,$(EMBARC_MLI_PRE_COMPILED_URL),$(EMBARC_MLI_PRE_COMPILED_MD5),$(MLI_LIB_DIR),)) + +TCF_FILE = $(PWD)/$(MAKEFILE_DIR)/downloads/$(MLI_LIB_DIR)/hw/emsdp_em11d_em9d_dfss.tcf + +include $(MAKEFILE_DIR)/targets/arc/arc_common.inc \ No newline at end of file diff --git a/tensorflow/lite/micro/tools/make/targets/bluepill/bluepill.lds b/tensorflow/lite/micro/tools/make/targets/bluepill/bluepill.lds new file mode 100644 index 0000000..cfa9b39 --- /dev/null +++ b/tensorflow/lite/micro/tools/make/targets/bluepill/bluepill.lds @@ -0,0 +1,108 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +/* Copied and modified from: + https://github.com/google/stm32_bare_lib/blob/master/stm32_linker_layout.lds + + Modifications: + * increased the flash and RAM to 16MB (which far exceeds the actual + hardware) + +The primary purpose of using Renode in the TFLM repository is to be +able to run tests on a variety of models in simulation without being limited by +the constraints of the hardware. +*/ + +/* + * 0x00000000 - 0x07ffffff - aliased to flash or sys memory depending on BOOT jumpers. + * 0x08000000 - 0x08ffffff - Flash. + * 0x1ffff000 - 0x1ffff7ff - Boot firmware in system memory. + * 0x1ffff800 - 0x1fffffff - Option bytes. + * 0x20000000 - 0x20ffffff - SRAM. + * 0x40000000 - 0x40023400 - Peripherals + */ + +/* Define main entry point */ +ENTRY(_main) + +MEMORY { +RAM (xrw) : ORIGIN = 0x20000000, LENGTH = 16384K +FLASH (rx) : ORIGIN = 0x8000000, LENGTH = 16384K +} + +/* Compute where the stack ends rather than hard coding it */ +_ld_stack_end_addr = ORIGIN(RAM) + LENGTH(RAM); +_ld_min_stack_size = 0x200; + +SECTIONS { + +/* interrupt vector goes to top of flash */ + +.interrupt_vector : { + . = ALIGN(4); + KEEP(*(.interrupt_vector)) + . = ALIGN(4); +} >FLASH + +/* read only .text and .rodata go to flash */ + +.text : { + . = ALIGN(4); + KEEP(*(.text.interrupt_handler)) + *(.text*) +} >FLASH + +.rodata : { + . = ALIGN(4); + *(.rodata*) + . = ALIGN(4); +} >FLASH + +/* read mwrite data needs to be stored in flash but copied to ram */ +.data : { + . = ALIGN(4); + _ld_data_load_dest_start = .; /* export where to load from */ + *(.data*) + . = ALIGN(4); + _ld_data_load_dest_stop = .; /* export where to load from */ +} >RAM AT> FLASH +_ld_data_load_source = LOADADDR(.data); + +/* unitialized data section needs zero initialization */ +.bss : +{ + . = ALIGN(4); + _ld_bss_data_start = .; + *(.bss*) + . = ALIGN(4); + _ld_bss_data_stop = .; +} >RAM + +._user_heap_stack : +{ + . = ALIGN(8); + . += _ld_min_stack_size; + PROVIDE(end = .); + . = ALIGN(8); +} >RAM + +/DISCARD/ : +{ + libc.a (*) + libm.a (*) + libgcc.a (*) +} + +} /* SECTIONS */ diff --git a/tensorflow/lite/micro/tools/make/targets/bluepill_makefile.inc b/tensorflow/lite/micro/tools/make/targets/bluepill_makefile.inc new file mode 100644 index 0000000..c14bda4 --- /dev/null +++ b/tensorflow/lite/micro/tools/make/targets/bluepill_makefile.inc @@ -0,0 +1,89 @@ +export PATH := $(DOWNLOADS_DIR)/gcc_embedded/bin/:$(PATH) +TARGET_ARCH := cortex-m3 +TARGET_TOOLCHAIN_PREFIX := arm-none-eabi- + +DOWNLOAD_RESULT := $(shell $(MAKEFILE_DIR)/arm_gcc_download.sh ${DOWNLOADS_DIR} $(TENSORFLOW_ROOT)) +ifneq ($(DOWNLOAD_RESULT), SUCCESS) + $(error Something went wrong with the GCC download: $(DOWNLOAD_RESULT)) +endif + +DOWNLOAD_RESULT := $(shell $(MAKEFILE_DIR)/renode_download.sh ${DOWNLOADS_DIR} $(TENSORFLOW_ROOT)) +ifneq ($(DOWNLOAD_RESULT), SUCCESS) + $(error Something went wrong with the renode download: $(DOWNLOAD_RESULT)) +endif + +DOWNLOAD_RESULT := $(shell $(MAKEFILE_DIR)/ext_libs/cmsis_download.sh ${DOWNLOADS_DIR} $(TENSORFLOW_ROOT)) +ifneq ($(DOWNLOAD_RESULT), SUCCESS) + $(error Something went wrong with the CMSIS download: $(DOWNLOAD_RESULT)) +endif + +DOWNLOAD_RESULT := $(shell $(MAKEFILE_DIR)/ext_libs/stm32_bare_lib_download.sh ${DOWNLOADS_DIR} $(TENSORFLOW_ROOT)) +ifneq ($(DOWNLOAD_RESULT), SUCCESS) + $(error Something went wrong with the STM32 Bare Lib download: $(DOWNLOAD_RESULT)) +endif + +PLATFORM_FLAGS = \ + -DTF_LITE_MCU_DEBUG_LOG \ + -mcpu=cortex-m3 \ + -mthumb \ + -Wno-vla \ + -Wno-shadow \ + -fomit-frame-pointer \ + -nostdlib + +# TODO(b/168334217): Currently we always add -DNDEBUG because the build is +# broken w/o it. Remove this workaround once the issue is resolved. +PLATFORM_FLAGS += -DNDEBUG + +# TODO(#46937): Remove once initialization of global variables is sorted out. +PLATFORM_FLAGS += -DRENODE + +CXXFLAGS += $(PLATFORM_FLAGS) -fno-use-cxa-atexit +CCFLAGS += $(PLATFORM_FLAGS) + +LDFLAGS += \ + -T $(MAKEFILE_DIR)/targets/bluepill/bluepill.lds \ + -Wl,-Map=gen/$(TARGET).map,--cref + +# Additional include paths needed for the stm_32_bare_lib only. +INCLUDES += \ + -isystem$(DOWNLOADS_DIR)/cmsis/CMSIS/Core/Include/ \ + -I$(DOWNLOADS_DIR)/stm32_bare_lib/include + +MICROLITE_CC_SRCS += \ + $(wildcard $(DOWNLOADS_DIR)/stm32_bare_lib/source/*.c) \ + $(wildcard $(DOWNLOADS_DIR)/stm32_bare_lib/source/*.cc) +EXCLUDED_SRCS := \ + $(DOWNLOADS_DIR)/stm32_bare_lib/source/debug_log.c +MICROLITE_CC_SRCS := $(filter-out $(EXCLUDED_SRCS), $(MICROLITE_CC_SRCS)) + +# Excludes micro_allocator_test because it calls CreateQuantizedFlatbufferTensor, +# which use std::vector constructor which then invokes new. +# Excludes memory_arena_threshold_test because the size difference of some +# allocator classes between different architectures. +# TODO(b/158651472): Fix the memory_arena_threshold_test +EXCLUDED_TESTS := \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/micro_allocator_test.cc \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/memory_arena_threshold_test.cc + +# flatbuffer_utils_test is intentionaly disabled because the flexbuffer builder +# uses dynamic memory. +EXCLUDED_TESTS += $(TENSORFLOW_ROOT)tensorflow/lite/micro/flatbuffer_utils_test.cc + +MICROLITE_TEST_SRCS := $(filter-out $(EXCLUDED_TESTS), $(MICROLITE_TEST_SRCS)) + +EXCLUDED_EXAMPLE_TESTS := \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/Makefile.inc + +MICRO_LITE_EXAMPLE_TESTS := $(filter-out $(EXCLUDED_EXAMPLE_TESTS), $(MICRO_LITE_EXAMPLE_TESTS)) + +TEST_SCRIPT := $(TENSORFLOW_ROOT)tensorflow/lite/micro/testing/test_with_renode.sh + +# We are setting this variable to non-zero to allow us to have a custom +# implementation of `make test` for bluepill +TARGET_SPECIFIC_MAKE_TEST := 1 + +TEST_TARGET_BINARIES = $(shell ls -1 $(BINDIR)/*_test) + +test: build + $(TEST_SCRIPT) "$(TEST_TARGET_BINARIES)" $(TEST_PASS_STRING) $(TARGET) diff --git a/tensorflow/lite/micro/tools/make/targets/ceva/CEVA_BX1_TFLM.ld b/tensorflow/lite/micro/tools/make/targets/ceva/CEVA_BX1_TFLM.ld new file mode 100755 index 0000000..666c59a --- /dev/null +++ b/tensorflow/lite/micro/tools/make/targets/ceva/CEVA_BX1_TFLM.ld @@ -0,0 +1,234 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +OUTPUT(a.elf) + +/* By default, program starts from reset address (the default location of the interrupt table) */ +ENTRY(__cxd_inttbl_start) + +/** Memory configuration parameters. + * The parameters become application symbols and can be referred from application + */ +__internal_code_start = DEFINED(__internal_code_start) ? __internal_code_start : 0x00000000; +__internal_code_size = DEFINED(__internal_code_size ) ? __internal_code_size : 256k; +__internal_data_start = DEFINED(__internal_data_start) ? __internal_data_start : 0x00000000; +__internal_data_size = DEFINED(__internal_data_size ) ? __internal_data_size : 512k; +__external_start = DEFINED(__external_start ) ? __external_start : 0x40000000; +__external_size = DEFINED(__external_size ) ? __external_size : 0x40000000; +__rom_start = DEFINED(__rom_start ) ? __rom_start : 0xC0000000; +__rom_size = DEFINED(__rom_size ) ? __rom_size : 1024M; + +__malloc_size = DEFINED(__malloc_size ) ? __malloc_size : 32k; +__stack_size = DEFINED(__stack_size ) ? __stack_size : 32k; +__arg_sect_size = DEFINED(__arg_sect_size ) ? __arg_sect_size : 512; + +MEMORY { + INTERNAL_CODE (rx) : ORIGIN = __internal_code_start, LENGTH = __internal_code_size + INTERNAL_DATA (rw) : ORIGIN = __internal_data_start, LENGTH = __internal_data_size + EXTERNAL (rwx) : ORIGIN = __external_start , LENGTH = __external_size + ROM (rx) : ORIGIN = __rom_start , LENGTH = __rom_size +} + +SECTIONS { + .inttbl : ALIGN(0x20) { + /** The interrupt vector table. Contains the NMI + * and maskable interrupt handlers + */ + . = 0x0; + KEEP(*(.inttbl)) + . = ALIGN(0x20); + KEEP(*(.sinttbl)) + } >INTERNAL_CODE + + .data.internal : ALIGN(0x20) { + PROVIDE(__data_internal_start = ABSOLUTE(.)); + /* Don't map any data at address zero to avoid issues with C NULL + * pointer checks + */ + . += 0x4; + + PROVIDE(__data_start = ABSOLUTE(.)); + *(.data .data.*) + PROVIDE(__data_end = ABSOLUTE(.)); + PROVIDE(__data_size = ABSOLUTE(+__data_end - __data_start)); + + PROVIDE(__sdata_start = ABSOLUTE(.)); + *(.sdata .sdata.*) + PROVIDE(__sdata_end = ABSOLUTE(.)); + PROVIDE(__sdata_size = ABSOLUTE(+__sdata_end - __sdata_start)); + + PROVIDE(__data_internal_end = ABSOLUTE(.)); + PROVIDE(__data_internal_size = ABSOLUTE(__data_internal_end - __data_internal_start)); + } >INTERNAL_DATA + + .data.internal.clone (NOLOAD) : ALIGN(0x20) { + PROVIDE(__data_internal_clone_start = ABSOLUTE(.)); + . = ABSOLUTE(. + __data_internal_size); + } >INTERNAL_DATA + + .data.internal.ro : ALIGN(0x20) { + PROVIDE(__data_internal_ro_start = ABSOLUTE(.)); + PROVIDE(__rodata_start = ABSOLUTE(.)); + *(.rodata .rodata.*) + PROVIDE(__rodata_end = ABSOLUTE(.)); + PROVIDE(__rodata_size = ABSOLUTE(+__rodata_end - __rodata_start)); + + PROVIDE(__data_internal_ro_end = ABSOLUTE(.)); + PROVIDE(__data_internal_ro_size = ABSOLUTE(__data_internal_ro_end - __data_internal_ro_start)); + } >INTERNAL_DATA + + .cst.call : ALIGN(4) { + PROVIDE(__cst_call_start = ABSOLUTE(.)); + *(.cst.call) + PROVIDE(__cst_call_end = ABSOLUTE(.)); + } >INTERNAL_DATA + + .cst.mov : ALIGN(4) { + PROVIDE(__cst_mov_start = ABSOLUTE(.)); + *(.cst.mov) + PROVIDE(__cst_mov_end = ABSOLUTE(.)); + } >INTERNAL_DATA + + .bss (NOLOAD) : ALIGN(0x20) { + PROVIDE(__bss_start = ABSOLUTE(.)); + *(.bss .bss.*) + PROVIDE(__common_start = ABSOLUTE(.)); + *(COMMON) + PROVIDE(__common_end = ABSOLUTE(.)); + PROVIDE(__common_size = ABSOLUTE(+__common_end - __common_start)); + PROVIDE(__bss_end = ABSOLUTE(.)); + PROVIDE(__bss_size = ABSOLUTE(+__bss_end - __bss_start)); + } >INTERNAL_DATA + + __STACK_SECT (NOLOAD) : ALIGN(0x10) { + __stack_start = ABSOLUTE(.); + . = . + __stack_size; + __stack_end = ABSOLUTE(.); + } >INTERNAL_DATA + + .text : ALIGN(0x20) { + PROVIDE(__text_start = ABSOLUTE(.)); + /* The __call_saved* functions need to be placed at low addresses for + * calling with absolute call instructions + */ + *(.text.__call_saved*) + *(.text .text.*) + PROVIDE(__text_end = ABSOLUTE(.)); + } >EXTERNAL + + .data.external : ALIGN(0x20) { + /** .data1, .rodata1, .sdata1 are all for large symbols which cannot + * fit in limited internal memory. We put them in external memory by + * default. */ + PROVIDE(__data_external_start = ABSOLUTE(.)); + + PROVIDE(__data1_start = ABSOLUTE(.)); + *(.data1 .data1.*) + PROVIDE(__data1_end = ABSOLUTE(.)); + + PROVIDE(__sdata1_start = ABSOLUTE(.)); + *(.sdata1 .sdata1.*) + PROVIDE(__sdata1_end = ABSOLUTE(.)); + PROVIDE(__sdata1_size = ABSOLUTE(+__sdata1_end - __sdata1_start)); + + PROVIDE(__data_external_end = ABSOLUTE(.)); + PROVIDE(__data_external_size = ABSOLUTE(__data_external_end - __data_external_start)); + } >EXTERNAL + + .data.external.clone (NOLOAD) : ALIGN(0x20) { + PROVIDE(__data_external_clone_start = ABSOLUTE(.)); + . = ABSOLUTE(. + __data_external_size); + } >EXTERNAL + + .data.external.ro : ALIGN(0x20) { + /** .data1, .rodata1, .sdata1 are all for large symbols which cannot + * fit in limited internal memory. We put them in external memory by + * default. */ + PROVIDE(__data_external_ro_start = ABSOLUTE(.)); + + PROVIDE(__rodata1_start = ABSOLUTE(.)); + *(.rodata1 .rodata1.*) + PROVIDE(__rodata1_end = ABSOLUTE(.)); + PROVIDE(__rodata1_size = ABSOLUTE(+__rodata1_end - __rodata1_start)); + + /* Constructors and destructors are called once per program invocation, + * so are never in the hot path; they shouldn't waste space in limited + * internal memory so we place them in slower, external memory */ + + . = ALIGN(4); /* constructors must be aligned on a word boundary */ + PROVIDE(__init_array_start = ABSOLUTE(.)); + KEEP(*(SORT_BY_INIT_PRIORITY(.init_array.*) SORT_BY_INIT_PRIORITY(.ctors.*))); + KEEP(*(SORT_BY_INIT_PRIORITY(.init_array*) SORT_BY_INIT_PRIORITY(.ctors*))); + PROVIDE(__init_array_end = ABSOLUTE(.)); + + PROVIDE(__fini_array_start = ABSOLUTE(.)); + /* destructors are run in reverse order of their priority */ + KEEP(*(SORT_BY_INIT_PRIORITY(.fini_array.*) SORT_BY_INIT_PRIORITY(.dtors.*))); + KEEP(*(SORT_BY_INIT_PRIORITY(.fini_array*) SORT_BY_INIT_PRIORITY(.dtors*))); + PROVIDE(__fini_array_end = ABSOLUTE(.)); + + PROVIDE(__data_external_ro_end = ABSOLUTE(.)); + PROVIDE(__data_external_ro_size = ABSOLUTE(__data_external_ro_end - __data_external_ro_start)); + } >EXTERNAL + + .bss1 (NOLOAD) : ALIGN(0x20) { + /** + * `.bss1` is for large zero-initialized symbols that do not fit in + * internal data + */ + PROVIDE(__bss1_start = ABSOLUTE(.)); + *(.bss1 .bss1.*) + PROVIDE(__large_common_start = ABSOLUTE(.)); + *(LARGE_COMMON) + PROVIDE(__large_common_end = ABSOLUTE(.)); + PROVIDE(__large_common_size = ABSOLUTE(+__large_common_end - __large_common_start)); + PROVIDE(__bss1_end = ABSOLUTE(.)); + PROVIDE(__bss1_size = ABSOLUTE(+__bss1_end - __bss1_start)); + } >EXTERNAL + + /* Program arguments are loaded by `_start` routine from `__arg_sect_start`. + * When the user has set a zero size for the section, argc, and argv + * will be zero and NULL, respectively. + * Although likely small, they are on the slow path so by default they + * go at the end of external memory + */ + __ARG_SECT (NOLOAD) : ALIGN(0x4) { + __arg_sect_start = .; + . = . + (__arg_sect_size ? __arg_sect_size + 4 : 0); + __arg_sect_end = .; + } >EXTERNAL + + __MALLOC_SECT (NOLOAD) : ALIGN(0x10) { + PROVIDE(__malloc_start = ABSOLUTE(.)); + . = . + __malloc_size; + PROVIDE(__malloc_end = ABSOLUTE(.)); + } >EXTERNAL + + data_internal_loadable_addr = __data_internal_clone_start; + data_external_loadable_addr = __data_external_clone_start; + + /DISCARD/ : { + /* Note: The CEVA Debugger and Restriction Checker use information + * stored in the ".note.CEVA-arch" section. Do NOT discard this section + * for projects in development phase. This section has no effect on the + * applications footprint */ + *(.comment) + *(.note.GNU-stack) + /* The X-DSP ABI uses a custom relocation format stored in its own + * section. These are left in the binary by default but are unneeded. */ + *(.ceva_reloc) + } + +} diff --git a/tensorflow/lite/micro/tools/make/targets/ceva/CEVA_BX1_TFLM_18.0.2.ld b/tensorflow/lite/micro/tools/make/targets/ceva/CEVA_BX1_TFLM_18.0.2.ld new file mode 100755 index 0000000..dce5330 --- /dev/null +++ b/tensorflow/lite/micro/tools/make/targets/ceva/CEVA_BX1_TFLM_18.0.2.ld @@ -0,0 +1,205 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +OUTPUT(a.elf) + +/* By default, program starts from reset address (the default location of the interrupt table) */ +ENTRY(__cxd_inttbl_start) + +/** Memory configuration parameters. + * The parameters become application symbols and can be referred from application + */ +__internal_code_start = DEFINED(__internal_code_start) ? __internal_code_start : 0x00000000; +__internal_code_size = DEFINED(__internal_code_size ) ? __internal_code_size : 256k; +__internal_data_start = DEFINED(__internal_data_start) ? __internal_data_start : 0x00000000; +__internal_data_size = DEFINED(__internal_data_size ) ? __internal_data_size : 512k; +__external_start = DEFINED(__external_start ) ? __external_start : 0x40000000; +__external_size = DEFINED(__external_size ) ? __external_size : 0x40000000; +__rom_start = DEFINED(__rom_start ) ? __rom_start : 0xC0000000; +__rom_size = DEFINED(__rom_size ) ? __rom_size : 1024M; + +__malloc_size = DEFINED(__malloc_size ) ? __malloc_size : 64k; +__stack_size = DEFINED(__stack_size ) ? __stack_size : 64k; +__arg_sect_size = DEFINED(__arg_sect_size ) ? __arg_sect_size : 512; + +MEMORY { + INTERNAL_CODE (rx) : ORIGIN = __internal_code_start, LENGTH = __internal_code_size + INTERNAL_DATA (rw) : ORIGIN = __internal_data_start, LENGTH = __internal_data_size + EXTERNAL (rwx) : ORIGIN = __external_start , LENGTH = __external_size + ROM (rx) : ORIGIN = __rom_start , LENGTH = __rom_size +} + +SECTIONS { + .inttbl : ALIGN(0x20) { + /** The interrupt vector resides at address zero and contains the NMI + * and maskable interrupt handlers + */ + . = 0x0; + KEEP(*(.inttbl)) + . = ALIGN(0x20); + KEEP(*(.sinttbl)) + } >INTERNAL_CODE AT>ROM + + .data.internal : ALIGN(0x20) { + PROVIDE(__data_internal_start = ABSOLUTE(.)); + /* Don't map any data at address zero to avoid issues with C NULL + * pointer checks + */ + . += 0x4; + + PROVIDE(__data_start = ABSOLUTE(.)); + *(.data .data.*) + PROVIDE(__data_end = ABSOLUTE(.)); + PROVIDE(__data_size = +__data_end - __data_start); + + PROVIDE(__sdata_start = ABSOLUTE(.)); + *(.sdata .sdata.*) + PROVIDE(__sdata_end = ABSOLUTE(.)); + PROVIDE(__sdata_size = +__sdata_end - __sdata_start); + + PROVIDE(__rodata_start = ABSOLUTE(.)); + *(.rodata .rodata.*) + PROVIDE(__rodata_end = ABSOLUTE(.)); + PROVIDE(__rodata_size = +__rodata_end - __rodata_start); + + PROVIDE(__data_internal_end = ABSOLUTE(.)); + PROVIDE(__data_internal_size = __data_internal_end - __data_internal_start); + } >INTERNAL_DATA AT>ROM + + .cst.call : ALIGN(4) { + PROVIDE(__cst_call_start = ABSOLUTE(.)); + *(.cst.call) + PROVIDE(__cst_call_end = ABSOLUTE(.)); + } >INTERNAL_DATA AT>ROM + + .cst.mov : ALIGN(4) { + PROVIDE(__cst_mov_start = ABSOLUTE(.)); + *(.cst.mov) + PROVIDE(__cst_mov_end = ABSOLUTE(.)); + } >INTERNAL_DATA AT>ROM + + .bss (NOLOAD) : ALIGN(0x20) { + PROVIDE(__bss_start = ABSOLUTE(.)); + *(.bss .bss.*) + PROVIDE(__common_start = ABSOLUTE(.)); + *(COMMON) + PROVIDE(__common_end = ABSOLUTE(.)); + PROVIDE(__common_size = +__common_end - __common_start); + PROVIDE(__bss_end = ABSOLUTE(.)); + PROVIDE(__bss_size = +__bss_end - __bss_start); + } >INTERNAL_DATA + + __STACK_SECT (NOLOAD) : ALIGN(0x10) { + __stack_start = ABSOLUTE(.); + . = . + __stack_size; + __stack_end = ABSOLUTE(.); + } >INTERNAL_DATA + + .text : ALIGN(0x20) { + PROVIDE(__text_start = ABSOLUTE(.)); + /* The __call_saved* functions need to be placed at low addresses for + * calling with absolute call instructions + */ + *(.text.__call_saved*) + *(.text .text.*) + PROVIDE(__text_end = ABSOLUTE(.)); + } >EXTERNAL AT>ROM + + .data.external : ALIGN(0x20) { + /** .data1, .rodata1, .sdata1 are all for large symbols which cannot + * fit in limited internal memory. We put them in external memory by + * default. */ + PROVIDE(__data_external_start = ABSOLUTE(.)); + + PROVIDE(__data1_start = ABSOLUTE(.)); + *(.data1 .data1.*) + PROVIDE(__data1_end = ABSOLUTE(.)); + + PROVIDE(__sdata1_start = ABSOLUTE(.)); + *(.sdata1 .sdata1.*) + PROVIDE(__sdata1_end = ABSOLUTE(.)); + PROVIDE(__sdata1_size = +__sdata1_end - __sdata1_start); + + PROVIDE(__rodata1_start = ABSOLUTE(.)); + *(.rodata1 .rodata1.*) + PROVIDE(__rodata1_end = ABSOLUTE(.)); + PROVIDE(__rodata1_size = +__rodata1_end - __rodata1_start); + + /* Constructors and destructors are called once per program invocation, + * so are never in the hot path; they shouldn't waste space in limited + * internal memory so we place them in slower, external memory + */ + + . = ALIGN(4); /* constructors must be aligned on a word boundary */ + PROVIDE(__init_array_start = ABSOLUTE(.)); + KEEP(*(SORT_BY_INIT_PRIORITY(.init_array.*) SORT_BY_INIT_PRIORITY(.ctors.*))); + KEEP(*(SORT_BY_INIT_PRIORITY(.init_array*) SORT_BY_INIT_PRIORITY(.ctors*))); + PROVIDE(__init_array_end = ABSOLUTE(.)); + + PROVIDE(__fini_array_start = ABSOLUTE(.)); + /* destructors are run in reverse order of their priority */ + KEEP(*(SORT_BY_INIT_PRIORITY(.fini_array.*) SORT_BY_INIT_PRIORITY(.dtors.*))); + KEEP(*(SORT_BY_INIT_PRIORITY(.fini_array*) SORT_BY_INIT_PRIORITY(.dtors*))); + PROVIDE(__fini_array_end = ABSOLUTE(.)); + + PROVIDE(__data_external_end = ABSOLUTE(.)); + PROVIDE(__data_external_size = __data_external_end - __data_external_start); + } >EXTERNAL AT>ROM + + .bss1 (NOLOAD) : ALIGN(0x20) { + /** + * `.bss1` is for large zero-initialized symbols that do not fit in + * internal data + */ + PROVIDE(__bss1_start = ABSOLUTE(.)); + *(.bss1 .bss1.*) + PROVIDE(__large_common_start = ABSOLUTE(.)); + *(LARGE_COMMON) + PROVIDE(__large_common_end = ABSOLUTE(.)); + PROVIDE(__large_common_size = +__large_common_end - __large_common_start); + PROVIDE(__bss1_end = ABSOLUTE(.)); + PROVIDE(__bss1_size = +__bss1_end - __bss1_start); + } >EXTERNAL + + /* Program arguments are loaded by `_start` routine from `__arg_sect_start`. + * When the user has set a zero size for the section, argc, and argv + * will be zero and NULL, respectively. + * Although likely small, they are on the slow path so by default they + * go at the end of external memory + */ + __ARG_SECT (NOLOAD) : ALIGN(0x4) { + __arg_sect_start = .; + . = . + (__arg_sect_size ? __arg_sect_size + 4 : 0); + __arg_sect_end = .; + } >EXTERNAL + + __MALLOC_SECT (NOLOAD) : ALIGN(0x10) { + PROVIDE(__malloc_start = ABSOLUTE(.)); + . = . + __malloc_size; + PROVIDE(__malloc_end = ABSOLUTE(.)); + } >EXTERNAL + + /DISCARD/ : { + /* Discarding .note.CEVA-arch saves a fair amount of space but + * confounds the restriction checker. YMMV */ + /* *(.note.CEVA-arch) */ + *(.comment) + *(.note.GNU-stack) + /* The X-DSP ABI uses a custom relocation format stored in its own + * section. These are left in the binary by default but are unneeded. */ + *(.ceva_reloc) + } + +} diff --git a/tensorflow/lite/micro/tools/make/targets/ceva/CEVA_BX1_TFLM_18.0.3.ld b/tensorflow/lite/micro/tools/make/targets/ceva/CEVA_BX1_TFLM_18.0.3.ld new file mode 100755 index 0000000..0fa2044 --- /dev/null +++ b/tensorflow/lite/micro/tools/make/targets/ceva/CEVA_BX1_TFLM_18.0.3.ld @@ -0,0 +1,235 @@ + +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +OUTPUT(a.elf) + +/* By default, program starts from reset address (the default location of the interrupt table) */ +ENTRY(__cxd_inttbl_start) + +/** Memory configuration parameters. + * The parameters become application symbols and can be referred from application + */ +__internal_code_start = DEFINED(__internal_code_start) ? __internal_code_start : 0x00000000; +__internal_code_size = DEFINED(__internal_code_size ) ? __internal_code_size : 256k; +__internal_data_start = DEFINED(__internal_data_start) ? __internal_data_start : 0x00000000; +__internal_data_size = DEFINED(__internal_data_size ) ? __internal_data_size : 512k; +__external_start = DEFINED(__external_start ) ? __external_start : 0x40000000; +__external_size = DEFINED(__external_size ) ? __external_size : 0x40000000; +__rom_start = DEFINED(__rom_start ) ? __rom_start : 0xC0000000; +__rom_size = DEFINED(__rom_size ) ? __rom_size : 1024M; + +__malloc_size = DEFINED(__malloc_size ) ? __malloc_size : 32k; +__stack_size = DEFINED(__stack_size ) ? __stack_size : 32k; +__arg_sect_size = DEFINED(__arg_sect_size ) ? __arg_sect_size : 512; + +MEMORY { + INTERNAL_CODE (rx) : ORIGIN = __internal_code_start, LENGTH = __internal_code_size + INTERNAL_DATA (rw) : ORIGIN = __internal_data_start, LENGTH = __internal_data_size + EXTERNAL (rwx) : ORIGIN = __external_start , LENGTH = __external_size + ROM (rx) : ORIGIN = __rom_start , LENGTH = __rom_size +} + +SECTIONS { + .inttbl : ALIGN(0x20) { + /** The interrupt vector table. Contains the NMI + * and maskable interrupt handlers + */ + . = 0x0; + KEEP(*(.inttbl)) + . = ALIGN(0x20); + KEEP(*(.sinttbl)) + } >INTERNAL_CODE + + .data.internal : ALIGN(0x20) { + PROVIDE(__data_internal_start = ABSOLUTE(.)); + /* Don't map any data at address zero to avoid issues with C NULL + * pointer checks + */ + . += 0x4; + + PROVIDE(__data_start = ABSOLUTE(.)); + *(.data .data.*) + PROVIDE(__data_end = ABSOLUTE(.)); + PROVIDE(__data_size = ABSOLUTE(+__data_end - __data_start)); + + PROVIDE(__sdata_start = ABSOLUTE(.)); + *(.sdata .sdata.*) + PROVIDE(__sdata_end = ABSOLUTE(.)); + PROVIDE(__sdata_size = ABSOLUTE(+__sdata_end - __sdata_start)); + + PROVIDE(__data_internal_end = ABSOLUTE(.)); + PROVIDE(__data_internal_size = ABSOLUTE(__data_internal_end - __data_internal_start)); + } >INTERNAL_DATA + + .data.internal.clone (NOLOAD) : ALIGN(0x20) { + PROVIDE(__data_internal_clone_start = ABSOLUTE(.)); + . = ABSOLUTE(. + __data_internal_size); + } >INTERNAL_DATA + + .data.internal.ro : ALIGN(0x20) { + PROVIDE(__data_internal_ro_start = ABSOLUTE(.)); + PROVIDE(__rodata_start = ABSOLUTE(.)); + *(.rodata .rodata.*) + PROVIDE(__rodata_end = ABSOLUTE(.)); + PROVIDE(__rodata_size = ABSOLUTE(+__rodata_end - __rodata_start)); + + PROVIDE(__data_internal_ro_end = ABSOLUTE(.)); + PROVIDE(__data_internal_ro_size = ABSOLUTE(__data_internal_ro_end - __data_internal_ro_start)); + } >INTERNAL_DATA + + .cst.call : ALIGN(4) { + PROVIDE(__cst_call_start = ABSOLUTE(.)); + *(.cst.call) + PROVIDE(__cst_call_end = ABSOLUTE(.)); + } >INTERNAL_DATA + + .cst.mov : ALIGN(4) { + PROVIDE(__cst_mov_start = ABSOLUTE(.)); + *(.cst.mov) + PROVIDE(__cst_mov_end = ABSOLUTE(.)); + } >INTERNAL_DATA + + .bss (NOLOAD) : ALIGN(0x20) { + PROVIDE(__bss_start = ABSOLUTE(.)); + *(.bss .bss.*) + PROVIDE(__common_start = ABSOLUTE(.)); + *(COMMON) + PROVIDE(__common_end = ABSOLUTE(.)); + PROVIDE(__common_size = ABSOLUTE(+__common_end - __common_start)); + PROVIDE(__bss_end = ABSOLUTE(.)); + PROVIDE(__bss_size = ABSOLUTE(+__bss_end - __bss_start)); + } >INTERNAL_DATA + + __STACK_SECT (NOLOAD) : ALIGN(0x10) { + __stack_start = ABSOLUTE(.); + . = . + __stack_size; + __stack_end = ABSOLUTE(.); + } >INTERNAL_DATA + + .text : ALIGN(0x20) { + PROVIDE(__text_start = ABSOLUTE(.)); + /* The __call_saved* functions need to be placed at low addresses for + * calling with absolute call instructions + */ + *(.text.__call_saved*) + *(.text .text.*) + PROVIDE(__text_end = ABSOLUTE(.)); + } >EXTERNAL + + .data.external : ALIGN(0x20) { + /** .data1, .rodata1, .sdata1 are all for large symbols which cannot + * fit in limited internal memory. We put them in external memory by + * default. */ + PROVIDE(__data_external_start = ABSOLUTE(.)); + + PROVIDE(__data1_start = ABSOLUTE(.)); + *(.data1 .data1.*) + PROVIDE(__data1_end = ABSOLUTE(.)); + + PROVIDE(__sdata1_start = ABSOLUTE(.)); + *(.sdata1 .sdata1.*) + PROVIDE(__sdata1_end = ABSOLUTE(.)); + PROVIDE(__sdata1_size = ABSOLUTE(+__sdata1_end - __sdata1_start)); + + PROVIDE(__data_external_end = ABSOLUTE(.)); + PROVIDE(__data_external_size = ABSOLUTE(__data_external_end - __data_external_start)); + } >EXTERNAL + + .data.external.clone (NOLOAD) : ALIGN(0x20) { + PROVIDE(__data_external_clone_start = ABSOLUTE(.)); + . = ABSOLUTE(. + __data_external_size); + } >EXTERNAL + + .data.external.ro : ALIGN(0x20) { + /** .data1, .rodata1, .sdata1 are all for large symbols which cannot + * fit in limited internal memory. We put them in external memory by + * default. */ + PROVIDE(__data_external_ro_start = ABSOLUTE(.)); + + PROVIDE(__rodata1_start = ABSOLUTE(.)); + *(.rodata1 .rodata1.*) + PROVIDE(__rodata1_end = ABSOLUTE(.)); + PROVIDE(__rodata1_size = ABSOLUTE(+__rodata1_end - __rodata1_start)); + + /* Constructors and destructors are called once per program invocation, + * so are never in the hot path; they shouldn't waste space in limited + * internal memory so we place them in slower, external memory */ + + . = ALIGN(4); /* constructors must be aligned on a word boundary */ + PROVIDE(__init_array_start = ABSOLUTE(.)); + KEEP(*(SORT_BY_INIT_PRIORITY(.init_array.*) SORT_BY_INIT_PRIORITY(.ctors.*))); + KEEP(*(SORT_BY_INIT_PRIORITY(.init_array*) SORT_BY_INIT_PRIORITY(.ctors*))); + PROVIDE(__init_array_end = ABSOLUTE(.)); + + PROVIDE(__fini_array_start = ABSOLUTE(.)); + /* destructors are run in reverse order of their priority */ + KEEP(*(SORT_BY_INIT_PRIORITY(.fini_array.*) SORT_BY_INIT_PRIORITY(.dtors.*))); + KEEP(*(SORT_BY_INIT_PRIORITY(.fini_array*) SORT_BY_INIT_PRIORITY(.dtors*))); + PROVIDE(__fini_array_end = ABSOLUTE(.)); + + PROVIDE(__data_external_ro_end = ABSOLUTE(.)); + PROVIDE(__data_external_ro_size = ABSOLUTE(__data_external_ro_end - __data_external_ro_start)); + } >EXTERNAL + + .bss1 (NOLOAD) : ALIGN(0x20) { + /** + * `.bss1` is for large zero-initialized symbols that do not fit in + * internal data + */ + PROVIDE(__bss1_start = ABSOLUTE(.)); + *(.bss1 .bss1.*) + PROVIDE(__large_common_start = ABSOLUTE(.)); + *(LARGE_COMMON) + PROVIDE(__large_common_end = ABSOLUTE(.)); + PROVIDE(__large_common_size = ABSOLUTE(+__large_common_end - __large_common_start)); + PROVIDE(__bss1_end = ABSOLUTE(.)); + PROVIDE(__bss1_size = ABSOLUTE(+__bss1_end - __bss1_start)); + } >EXTERNAL + + /* Program arguments are loaded by `_start` routine from `__arg_sect_start`. + * When the user has set a zero size for the section, argc, and argv + * will be zero and NULL, respectively. + * Although likely small, they are on the slow path so by default they + * go at the end of external memory + */ + __ARG_SECT (NOLOAD) : ALIGN(0x4) { + __arg_sect_start = .; + . = . + (__arg_sect_size ? __arg_sect_size + 4 : 0); + __arg_sect_end = .; + } >EXTERNAL + + __MALLOC_SECT (NOLOAD) : ALIGN(0x10) { + PROVIDE(__malloc_start = ABSOLUTE(.)); + . = . + __malloc_size; + PROVIDE(__malloc_end = ABSOLUTE(.)); + } >EXTERNAL + + data_internal_loadable_addr = __data_internal_clone_start; + data_external_loadable_addr = __data_external_clone_start; + + /DISCARD/ : { + /* Note: The CEVA Debugger and Restriction Checker use information + * stored in the ".note.CEVA-arch" section. Do NOT discard this section + * for projects in development phase. This section has no effect on the + * applications footprint */ + *(.comment) + *(.note.GNU-stack) + /* The X-DSP ABI uses a custom relocation format stored in its own + * section. These are left in the binary by default but are unneeded. */ + *(.ceva_reloc) + } + +} diff --git a/tensorflow/lite/micro/tools/make/targets/ceva/CEVA_BX1_TFLM_18.0.5.ld b/tensorflow/lite/micro/tools/make/targets/ceva/CEVA_BX1_TFLM_18.0.5.ld new file mode 100755 index 0000000..127ed82 --- /dev/null +++ b/tensorflow/lite/micro/tools/make/targets/ceva/CEVA_BX1_TFLM_18.0.5.ld @@ -0,0 +1,235 @@ + +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +OUTPUT(a.elf) + +/* By default, program starts from reset address (the default location of the interrupt table) */ +ENTRY(__cxd_inttbl_start) + +/** Memory configuration parameters. + * The parameters become application symbols and can be referred from application + */ +__internal_code_start = DEFINED(__internal_code_start) ? __internal_code_start : 0x00000000; +__internal_code_size = DEFINED(__internal_code_size ) ? __internal_code_size : 256k; +__internal_data_start = DEFINED(__internal_data_start) ? __internal_data_start : 0x00000000; +__internal_data_size = DEFINED(__internal_data_size ) ? __internal_data_size : 512k; +__external_start = DEFINED(__external_start ) ? __external_start : 0x40000000; +__external_size = DEFINED(__external_size ) ? __external_size : 0x40000000; +__rom_start = DEFINED(__rom_start ) ? __rom_start : 0xC0000000; +__rom_size = DEFINED(__rom_size ) ? __rom_size : 1024M; + +__malloc_size = DEFINED(__malloc_size ) ? __malloc_size : 32k; +__stack_size = DEFINED(__stack_size ) ? __stack_size : 32k; +__arg_sect_size = DEFINED(__arg_sect_size ) ? __arg_sect_size : 512; + +MEMORY { + INTERNAL_CODE (rx) : ORIGIN = __internal_code_start, LENGTH = __internal_code_size + INTERNAL_DATA (rw) : ORIGIN = __internal_data_start, LENGTH = __internal_data_size + EXTERNAL (rwx) : ORIGIN = __external_start , LENGTH = __external_size + ROM (rx) : ORIGIN = __rom_start , LENGTH = __rom_size +} + +SECTIONS { + .inttbl : ALIGN(0x20) { + /** The interrupt vector table. Contains the NMI + * and maskable interrupt handlers + */ + . = 0x0; + KEEP(*(.inttbl)) + . = ALIGN(0x20); + KEEP(*(.sinttbl)) + } >INTERNAL_CODE + + .data.internal : ALIGN(0x20) { + PROVIDE(__data_internal_start = ABSOLUTE(.)); + /* Don't map any data at address zero to avoid issues with C NULL + * pointer checks + */ + . += 0x4; + + PROVIDE(__data_start = ABSOLUTE(.)); + *(.data .data.*) + PROVIDE(__data_end = ABSOLUTE(.)); + PROVIDE(__data_size = ABSOLUTE(+__data_end - __data_start)); + + PROVIDE(__sdata_start = ABSOLUTE(.)); + *(.sdata .sdata.*) + PROVIDE(__sdata_end = ABSOLUTE(.)); + PROVIDE(__sdata_size = ABSOLUTE(+__sdata_end - __sdata_start)); + + PROVIDE(__data_internal_end = ABSOLUTE(.)); + PROVIDE(__data_internal_size = ABSOLUTE(__data_internal_end - __data_internal_start)); + } >INTERNAL_DATA + + .data.internal.clone (NOLOAD) : ALIGN(0x20) { + PROVIDE(__data_internal_clone_start = ABSOLUTE(.)); + . = ABSOLUTE(. + __data_internal_size); + } >INTERNAL_DATA + + .data.internal.ro : ALIGN(0x20) { + PROVIDE(__data_internal_ro_start = ABSOLUTE(.)); + PROVIDE(__rodata_start = ABSOLUTE(.)); + *(.rodata .rodata.*) + PROVIDE(__rodata_end = ABSOLUTE(.)); + PROVIDE(__rodata_size = ABSOLUTE(+__rodata_end - __rodata_start)); + + PROVIDE(__data_internal_ro_end = ABSOLUTE(.)); + PROVIDE(__data_internal_ro_size = ABSOLUTE(__data_internal_ro_end - __data_internal_ro_start)); + } >INTERNAL_DATA + + .cst.call : ALIGN(4) { + PROVIDE(__cst_call_start = ABSOLUTE(.)); + *(.cst.call) + PROVIDE(__cst_call_end = ABSOLUTE(.)); + } >INTERNAL_DATA + + .cst.mov : ALIGN(4) { + PROVIDE(__cst_mov_start = ABSOLUTE(.)); + *(.cst.mov) + PROVIDE(__cst_mov_end = ABSOLUTE(.)); + } >INTERNAL_DATA + + .bss (NOLOAD) : ALIGN(0x20) { + PROVIDE(__bss_start = ABSOLUTE(.)); + *(.bss .bss.*) + PROVIDE(__common_start = ABSOLUTE(.)); + *(COMMON) + PROVIDE(__common_end = ABSOLUTE(.)); + PROVIDE(__common_size = ABSOLUTE(+__common_end - __common_start)); + PROVIDE(__bss_end = ABSOLUTE(.)); + PROVIDE(__bss_size = ABSOLUTE(+__bss_end - __bss_start)); + } >INTERNAL_DATA + + __STACK_SECT (NOLOAD) : ALIGN(0x10) { + __stack_start = ABSOLUTE(.); + . = . + __stack_size; + __stack_end = ABSOLUTE(.); + } >INTERNAL_DATA + + .text : ALIGN(0x20) { + PROVIDE(__text_start = ABSOLUTE(.)); + /* The __call_saved* functions need to be placed at low addresses for + * calling with absolute call instructions + */ + *(.text.__call_saved*) + *(.text .text.*) + PROVIDE(__text_end = ABSOLUTE(.)); + } >EXTERNAL + + .data.external : ALIGN(0x20) { + /** .data1, .rodata1, .sdata1 are all for large symbols which cannot + * fit in limited internal memory. We put them in external memory by + * default. */ + PROVIDE(__data_external_start = ABSOLUTE(.)); + + PROVIDE(__data1_start = ABSOLUTE(.)); + *(.data1 .data1.*) + PROVIDE(__data1_end = ABSOLUTE(.)); + + PROVIDE(__sdata1_start = ABSOLUTE(.)); + *(.sdata1 .sdata1.*) + PROVIDE(__sdata1_end = ABSOLUTE(.)); + PROVIDE(__sdata1_size = ABSOLUTE(+__sdata1_end - __sdata1_start)); + + PROVIDE(__data_external_end = ABSOLUTE(.)); + PROVIDE(__data_external_size = ABSOLUTE(__data_external_end - __data_external_start)); + } >EXTERNAL + + .data.external.clone (NOLOAD) : ALIGN(0x20) { + PROVIDE(__data_external_clone_start = ABSOLUTE(.)); + . = ABSOLUTE(. + __data_external_size); + } >EXTERNAL + + .data.external.ro : ALIGN(0x20) { + /** .data1, .rodata1, .sdata1 are all for large symbols which cannot + * fit in limited internal memory. We put them in external memory by + * default. */ + PROVIDE(__data_external_ro_start = ABSOLUTE(.)); + + PROVIDE(__rodata1_start = ABSOLUTE(.)); + *(.rodata1 .rodata1.*) + PROVIDE(__rodata1_end = ABSOLUTE(.)); + PROVIDE(__rodata1_size = ABSOLUTE(+__rodata1_end - __rodata1_start)); + + /* Constructors and destructors are called once per program invocation, + * so are never in the hot path; they shouldn't waste space in limited + * internal memory so we place them in slower, external memory */ + + . = ALIGN(4); /* constructors must be aligned on a word boundary */ + PROVIDE(__init_array_start = ABSOLUTE(.)); + KEEP(*(SORT_BY_INIT_PRIORITY(.init_array.*) SORT_BY_INIT_PRIORITY(.ctors.*))); + KEEP(*(SORT_BY_INIT_PRIORITY(.init_array*) SORT_BY_INIT_PRIORITY(.ctors*))); + PROVIDE(__init_array_end = ABSOLUTE(.)); + + PROVIDE(__fini_array_start = ABSOLUTE(.)); + /* destructors are run in reverse order of their priority */ + KEEP(*(SORT_BY_INIT_PRIORITY(.fini_array.*) SORT_BY_INIT_PRIORITY(.dtors.*))); + KEEP(*(SORT_BY_INIT_PRIORITY(.fini_array*) SORT_BY_INIT_PRIORITY(.dtors*))); + PROVIDE(__fini_array_end = ABSOLUTE(.)); + + PROVIDE(__data_external_ro_end = ABSOLUTE(.)); + PROVIDE(__data_external_ro_size = ABSOLUTE(__data_external_ro_end - __data_external_ro_start)); + } >EXTERNAL + + .bss1 (NOLOAD) : ALIGN(0x20) { + /** + * `.bss1` is for large zero-initialized symbols that do not fit in + * internal data + */ + PROVIDE(__bss1_start = ABSOLUTE(.)); + *(.bss1 .bss1.*) + PROVIDE(__large_common_start = ABSOLUTE(.)); + *(LARGE_COMMON) + PROVIDE(__large_common_end = ABSOLUTE(.)); + PROVIDE(__large_common_size = ABSOLUTE(+__large_common_end - __large_common_start)); + PROVIDE(__bss1_end = ABSOLUTE(.)); + PROVIDE(__bss1_size = ABSOLUTE(+__bss1_end - __bss1_start)); + } >EXTERNAL + + /* Program arguments are loaded by `_start` routine from `__arg_sect_start`. + * When the user has set a zero size for the section, argc, and argv + * will be zero and NULL, respectively. + * Although likely small, they are on the slow path so by default they + * go at the end of external memory + */ + __ARG_SECT (NOLOAD) : ALIGN(0x4) { + __arg_sect_start = .; + . = . + (__arg_sect_size ? __arg_sect_size + 4 : 0); + __arg_sect_end = .; + } >EXTERNAL + + __MALLOC_SECT (NOLOAD) : ALIGN(0x10) { + PROVIDE(__malloc_start = ABSOLUTE(.)); + . = . + __malloc_size; + PROVIDE(__malloc_end = ABSOLUTE(.)); + } >EXTERNAL + + data_internal_loadable_addr = __data_internal_clone_start; + data_external_loadable_addr = __data_external_clone_start; + + /DISCARD/ : { + /* Note: The CEVA Debugger and Restriction Checker use information + * stored in the ".note.CEVA-arch" section. Do NOT discard this section + * for projects in development phase. This section has no effect on the + * applications footprint */ + *(.comment) + *(.note.GNU-stack) + /* The X-DSP ABI uses a custom relocation format stored in its own + * section. These are left in the binary by default but are unneeded. */ + *(.ceva_reloc) + } + +} diff --git a/tensorflow/lite/micro/tools/make/targets/ceva/CEVA_SP500_TFLM.ld b/tensorflow/lite/micro/tools/make/targets/ceva/CEVA_SP500_TFLM.ld new file mode 100755 index 0000000..244859a --- /dev/null +++ b/tensorflow/lite/micro/tools/make/targets/ceva/CEVA_SP500_TFLM.ld @@ -0,0 +1,235 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + + +OUTPUT(a.elf) + +/* By default, program starts from reset address (the default location of the interrupt table) */ +ENTRY(__cxd_inttbl_start) + +/** Memory configuration parameters. + * The parameters become application symbols and can be referred from application + */ +__internal_data_start = DEFINED(__internal_data_start) ? __internal_data_start : 0x00000000; +__internal_data_size = DEFINED(__internal_data_size ) ? __internal_data_size : 256k; +__external_start = DEFINED(__external_start ) ? __external_start : 0x20000000; +__external_size = DEFINED(__external_size ) ? __external_size : 0x60000000; +__rom_start = DEFINED(__rom_start ) ? __rom_start : 0xC0000000; +__rom_size = DEFINED(__rom_size ) ? __rom_size : 1024M; + +__malloc_size = DEFINED(__malloc_size ) ? __malloc_size : 16k; +__stack_size = DEFINED(__stack_size ) ? __stack_size : 16k; +__arg_sect_size = DEFINED(__arg_sect_size ) ? __arg_sect_size : 512; + +MEMORY { + INTERNAL_DATA (rw) : ORIGIN = __internal_data_start, LENGTH = __internal_data_size + EXTERNAL (rwx) : ORIGIN = __external_start , LENGTH = __external_size + ROM (rx) : ORIGIN = __rom_start , LENGTH = __rom_size +} + +SECTIONS { + .inttbl : ALIGN(0x20) { + /** The interrupt vector table. Contains the NMI + * and maskable interrupt handlers + */ + . = 0x0; + KEEP(*(.inttbl)) + . = ALIGN(0x20); + KEEP(*(.sinttbl)) + } >EXTERNAL + + .data.internal : ALIGN(0x20) { + PROVIDE(__data_internal_start = ABSOLUTE(.)); + /* Don't map any data at address zero to avoid issues with C NULL + * pointer checks + */ + . += 0x4; + + PROVIDE(__data_start = ABSOLUTE(.)); + *(.data .data.*) + PROVIDE(__data_end = ABSOLUTE(.)); + PROVIDE(__data_size = ABSOLUTE(+__data_end - __data_start)); + + PROVIDE(__sdata_start = ABSOLUTE(.)); + *(.sdata .sdata.*) + PROVIDE(__sdata_end = ABSOLUTE(.)); + PROVIDE(__sdata_size = ABSOLUTE(+__sdata_end - __sdata_start)); + + PROVIDE(__data_internal_end = ABSOLUTE(.)); + PROVIDE(__data_internal_size = ABSOLUTE(__data_internal_end - __data_internal_start)); + } >INTERNAL_DATA + + .data.internal.clone (NOLOAD) : ALIGN(0x20) { + PROVIDE(__data_internal_clone_start = ABSOLUTE(.)); + . = ABSOLUTE(. + __data_internal_size); + } >INTERNAL_DATA + + .data.internal.ro : ALIGN(0x20) { + PROVIDE(__data_internal_ro_start = ABSOLUTE(.)); + PROVIDE(__rodata_start = ABSOLUTE(.)); + *(.rodata .rodata.*) + PROVIDE(__rodata_end = ABSOLUTE(.)); + PROVIDE(__rodata_size = ABSOLUTE(+__rodata_end - __rodata_start)); + + PROVIDE(__data_internal_ro_end = ABSOLUTE(.)); + PROVIDE(__data_internal_ro_size = ABSOLUTE(__data_internal_ro_end - __data_internal_ro_start)); + } >INTERNAL_DATA + + .cst.call : ALIGN(4) { + PROVIDE(__cst_call_start = ABSOLUTE(.)); + *(.cst.call) + PROVIDE(__cst_call_end = ABSOLUTE(.)); + } >INTERNAL_DATA + + .cst.mov : ALIGN(4) { + PROVIDE(__cst_mov_start = ABSOLUTE(.)); + *(.cst.mov) + PROVIDE(__cst_mov_end = ABSOLUTE(.)); + } >INTERNAL_DATA + + .bss (NOLOAD) : ALIGN(0x20) { + PROVIDE(__bss_start = ABSOLUTE(.)); + *(.bss .bss.*) + PROVIDE(__common_start = ABSOLUTE(.)); + *(COMMON) + PROVIDE(__common_end = ABSOLUTE(.)); + PROVIDE(__common_size = ABSOLUTE(+__common_end - __common_start)); + PROVIDE(__bss_end = ABSOLUTE(.)); + PROVIDE(__bss_size = ABSOLUTE(+__bss_end - __bss_start)); + } >INTERNAL_DATA + + __STACK_SECT (NOLOAD) : ALIGN(0x10) { + __stack_start = ABSOLUTE(.); + . = . + __stack_size; + __stack_end = ABSOLUTE(.); + } >INTERNAL_DATA + + .text : ALIGN(0x20) { + PROVIDE(__text_start = ABSOLUTE(.)); + /* The __call_saved* functions need to be placed at low addresses for + * calling with absolute call instructions + */ + *(.text.__call_saved*) + *(.text .text.*) + /* Program sections in external memory should be aligned to the fetch line width + */ + . = ALIGN(0x20); + PROVIDE(__text_end = ABSOLUTE(.)); + } >EXTERNAL + + .data.external : ALIGN(0x20) { + /** .data1, .rodata1, .sdata1 are all for large symbols which cannot + * fit in limited internal memory. We put them in external memory by + * default. */ + PROVIDE(__data_external_start = ABSOLUTE(.)); + + PROVIDE(__data1_start = ABSOLUTE(.)); + *(.data1 .data1.*) + PROVIDE(__data1_end = ABSOLUTE(.)); + + PROVIDE(__sdata1_start = ABSOLUTE(.)); + *(.sdata1 .sdata1.*) + PROVIDE(__sdata1_end = ABSOLUTE(.)); + PROVIDE(__sdata1_size = ABSOLUTE(+__sdata1_end - __sdata1_start)); + + PROVIDE(__data_external_end = ABSOLUTE(.)); + PROVIDE(__data_external_size = ABSOLUTE(__data_external_end - __data_external_start)); + } >EXTERNAL + + .data.external.clone (NOLOAD) : ALIGN(0x20) { + PROVIDE(__data_external_clone_start = ABSOLUTE(.)); + . = ABSOLUTE(. + __data_external_size); + } >EXTERNAL + + .data.external.ro : ALIGN(0x20) { + /** .data1, .rodata1, .sdata1 are all for large symbols which cannot + * fit in limited internal memory. We put them in external memory by + * default. */ + PROVIDE(__data_external_ro_start = ABSOLUTE(.)); + + PROVIDE(__rodata1_start = ABSOLUTE(.)); + *(.rodata1 .rodata1.*) + PROVIDE(__rodata1_end = ABSOLUTE(.)); + PROVIDE(__rodata1_size = ABSOLUTE(+__rodata1_end - __rodata1_start)); + + /* Constructors and destructors are called once per program invocation, + * so are never in the hot path; they shouldn't waste space in limited + * internal memory so we place them in slower, external memory */ + + . = ALIGN(4); /* constructors must be aligned on a word boundary */ + PROVIDE(__init_array_start = ABSOLUTE(.)); + KEEP(*(SORT_BY_INIT_PRIORITY(.init_array.*) SORT_BY_INIT_PRIORITY(.ctors.*))); + KEEP(*(SORT_BY_INIT_PRIORITY(.init_array*) SORT_BY_INIT_PRIORITY(.ctors*))); + PROVIDE(__init_array_end = ABSOLUTE(.)); + + PROVIDE(__fini_array_start = ABSOLUTE(.)); + /* destructors are run in reverse order of their priority */ + KEEP(*(SORT_BY_INIT_PRIORITY(.fini_array.*) SORT_BY_INIT_PRIORITY(.dtors.*))); + KEEP(*(SORT_BY_INIT_PRIORITY(.fini_array*) SORT_BY_INIT_PRIORITY(.dtors*))); + PROVIDE(__fini_array_end = ABSOLUTE(.)); + + PROVIDE(__data_external_ro_end = ABSOLUTE(.)); + PROVIDE(__data_external_ro_size = ABSOLUTE(__data_external_ro_end - __data_external_ro_start)); + } >EXTERNAL + + .bss1 (NOLOAD) : ALIGN(0x20) { + /** + * `.bss1` is for large zero-initialized symbols that do not fit in + * internal data + */ + PROVIDE(__bss1_start = ABSOLUTE(.)); + *(.bss1 .bss1.*) + PROVIDE(__large_common_start = ABSOLUTE(.)); + *(LARGE_COMMON) + PROVIDE(__large_common_end = ABSOLUTE(.)); + PROVIDE(__large_common_size = ABSOLUTE(+__large_common_end - __large_common_start)); + PROVIDE(__bss1_end = ABSOLUTE(.)); + PROVIDE(__bss1_size = ABSOLUTE(+__bss1_end - __bss1_start)); + } >EXTERNAL + + /* Program arguments are loaded by `_start` routine from `__arg_sect_start`. + * When the user has set a zero size for the section, argc, and argv + * will be zero and NULL, respectively. + * Although likely small, they are on the slow path so by default they + * go at the end of external memory + */ + __ARG_SECT (NOLOAD) : ALIGN(0x4) { + __arg_sect_start = .; + . = . + (__arg_sect_size ? __arg_sect_size + 4 : 0); + __arg_sect_end = .; + } >EXTERNAL + + __MALLOC_SECT (NOLOAD) : ALIGN(0x10) { + PROVIDE(__malloc_start = ABSOLUTE(.)); + . = . + __malloc_size; + PROVIDE(__malloc_end = ABSOLUTE(.)); + } >EXTERNAL + + data_internal_loadable_addr = __data_internal_clone_start; + data_external_loadable_addr = __data_external_clone_start; + + /DISCARD/ : { + /* Note: The CEVA Debugger and Restriction Checker use information + * stored in the ".note.CEVA-arch" section. Do NOT discard this section + * for projects in development phase. This section has no effect on the + * applications footprint */ + *(.comment) + *(.note.GNU-stack) + /* The X-DSP ABI uses a custom relocation format stored in its own + * section. These are left in the binary by default but are unneeded. */ + *(.ceva_reloc) + } + +} diff --git a/tensorflow/lite/micro/tools/make/targets/ceva_makefile.inc b/tensorflow/lite/micro/tools/make/targets/ceva_makefile.inc new file mode 100755 index 0000000..5cc8ad1 --- /dev/null +++ b/tensorflow/lite/micro/tools/make/targets/ceva_makefile.inc @@ -0,0 +1,96 @@ +TARGET_ARCH := + +ifeq ($(TARGET_ARCH), ) + $(error TARGET_ARCH must be specified on the command line) +endif + +# Create a cflag based on the specified TARGET_ARCH. For example: +# TARGET_ARCH=CEVA_BX1 --> -DCEVA_BX1 +# TARGET_ARCH=CEVA_SP500 --> -DCEVA_SP500 +TARGET_ARCH_DEFINES := -D$(shell echo $(TARGET_ARCH) | tr [a-z] [A-Z]) + +CORE_OPTIMIZATION_LEVEL := -O4 + +#no need for -lm: +MICROLITE_LIBS := + +TARGET_TOOLCHAIN_PREFIX := +CXX_TOOL = clang++ +CC_TOOL = clang +LD_TOOL = ceva-elf-ld +LD = ceva-elf-ld +AR = ceva-elf-ar + +PLATFORM_FLAGS = \ +$(TARGET_ARCH_DEFINES) \ + -fmessage-length=0 \ + -fpermissive \ + -O4 \ + -g3 \ + -Wall \ + -pedantic \ + -D_LIBCPP_INLINE_VISIBILITY="" \ + -D_LIBCPP_EXTERN_TEMPLATE_INLINE_VISIBILITY="" + + +CXXFLAGS := -std=c++11 -DTF_LITE_STATIC_MEMORY +CCFLAGS := -std=c11 -DTF_LITE_STATIC_MEMORY + +ifeq ($(TARGET_ARCH), CEVA_BX1) +PLATFORM_FLAGS += \ + --target=cevabx1-elf \ + -mcpu=cevabx1v1.0.0 \ + -m32x32 \ + -mgetbits \ + -mloop-buffer-size=10 \ + -mfp=1 \ + -mdpfp=1 + + +LDFLAGS += \ + -T \ + tensorflow/lite/micro/tools/make/targets/ceva/CEVA_BX1_TFLM_18.0.5.ld \ + -L${TARGET_TOOLCHAIN_ROOT}_cevatools/lib/clang/9.0.1/cevabx1-unknown-unknown-elf/rtlv1.0.0-fp1-dpfp1/lib/ \ + -lc++ -lc++abi -lc -lcompiler-rt -lCEVA_TFLM_lib -lceva_dsp_lib \ + --for-linker --no-relax \ + --for-linker --no-gc-sections \ + --for-linker -defsym \ + --for-linker __internal_data_size=2048k \ + --for-linker -defsym \ + --for-linker __internal_code_size=256k \ + +endif + +ifeq ($(TARGET_ARCH), CEVA_SP500) +PLATFORM_FLAGS = \ + -pedantic \ + -Wa,--no-rstr-check \ + --target=senspro-elf \ + -mcpu=sensprov1.0.0 \ + -mvu=1 \ + -mno-vld2 \ + -mvmpyv5 \ + -mvmpyext -mnonlinear=1 -mno-vbnn -mvhist \ + -mlvu=1 \ + -mfp=2 \ + -mdpfp=2 \ + -mvfp=1 + + LDFLAGS += \ +--no-relax --no-gc-sections \ + -defsym __internal_code_size=0k \ + -defsym __internal_data_size=512k + +endif + +CXXFLAGS += $(PLATFORM_FLAGS) +CCFLAGS += $(PLATFORM_FLAGS) + +MICROLITE_CC_HDRS += \ + tensorflow/lite/micro/kernels/ceva/ceva_tflm_lib.h \ + tensorflow/lite/micro/kernels/ceva/types.h \ + tensorflow/lite/micro/kernels/ceva/ceva_common.h + + +MICROLITE_CC_SRCS += \ + tensorflow/lite/micro/kernels/ceva/ceva_common.cc diff --git a/tensorflow/lite/micro/tools/make/targets/chre_makefile.inc b/tensorflow/lite/micro/tools/make/targets/chre_makefile.inc new file mode 100644 index 0000000..3665b26 --- /dev/null +++ b/tensorflow/lite/micro/tools/make/targets/chre_makefile.inc @@ -0,0 +1,34 @@ +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Remove flexbuffers library and detection postprocess kernel from chre build +# due to string dependencies. +EXCLUDED_CC_SRCS := \ + tensorflow/lite/micro/kernels/circular_buffer.cc \ + tensorflow/lite/micro/kernels/detection_postprocess.cc \ + tensorflow/lite/micro/kernels/flexbuffers_generated_data.cc + +EXCLUDED_TESTS := \ + tensorflow/lite/micro/kernels/detection_postprocess_test.cc + +EXCLUDED_HDRS := \ + third_party/flatbuffers/include/flatbuffers/flexbuffers.h + +EXCLUDED_KERNEL_HDRS := \ + tensorflow/lite/micro/kernels/flexbuffers_generated_data.h + +MICROLITE_CC_KERNEL_SRCS := $(filter-out $(EXCLUDED_CC_SRCS),$(MICROLITE_CC_KERNEL_SRCS)) +MICROLITE_TEST_SRCS := $(filter-out $(EXCLUDED_TESTS),$(MICROLITE_TEST_SRCS)) +THIRD_PARTY_CC_HDRS := $(filter-out $(EXCLUDED_HDRS),$(THIRD_PARTY_CC_HDRS)) +MICROLITE_CC_HDRS := $(filter-out $(EXCLUDED_KERNEL_HDRS),$(MICROLITE_CC_HDRS)) diff --git a/tensorflow/lite/micro/tools/make/targets/cortex_m_corstone_300_makefile.inc b/tensorflow/lite/micro/tools/make/targets/cortex_m_corstone_300_makefile.inc new file mode 100644 index 0000000..0ffe5a3 --- /dev/null +++ b/tensorflow/lite/micro/tools/make/targets/cortex_m_corstone_300_makefile.inc @@ -0,0 +1,204 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +# ARM Cortex M makefile targeted for a FVP based on Arm Corstone-300 software. +# For more info see: tensorflow/lite/micro/cortex_m_corstone_300/README.md + +export PATH := $(MAKEFILE_DIR)/downloads/corstone300/models/Linux64_GCC-6.4:$(PATH) +DOWNLOAD_RESULT := $(shell $(MAKEFILE_DIR)/corstone_300_download.sh ${MAKEFILE_DIR}/downloads) +ifneq ($(DOWNLOAD_RESULT), SUCCESS) + $(error Something went wrong with the Arm Corstone-300 software download: $(DOWNLOAD_RESULT)) +endif + +ETHOS_U_CORE_PLATFORM := ${PWD}/$(MAKEFILE_DIR)/downloads/ethos_u_core_platform/targets/corstone-300 +DOWNLOAD_RESULT := $(shell $(MAKEFILE_DIR)/ethos_u_core_platform_download.sh ${MAKEFILE_DIR}/downloads) +ifneq ($(DOWNLOAD_RESULT), SUCCESS) + $(error Something went wrong with the Ethos-U Core Platform software download: $(DOWNLOAD_RESULT)) +endif + +# This target has dependencies to CMSIS-Device so just in case running without OPTIMIZED_KERNEL_DIR=cmsis_nn. +CMSIS_DEFAULT_DOWNLOAD_PATH := $(MAKEFILE_DIR)/downloads/cmsis +CMSIS_PATH := $(CMSIS_DEFAULT_DOWNLOAD_PATH) +ifeq ($(CMSIS_PATH), $(CMSIS_DEFAULT_DOWNLOAD_PATH)) + DOWNLOAD_RESULT := $(shell $(MAKEFILE_DIR)/ext_libs/cmsis_download.sh ${MAKEFILE_DIR}/downloads) + ifneq ($(DOWNLOAD_RESULT), SUCCESS) + $(error Something went wrong with the CMSIS download: $(DOWNLOAD_RESULT)) + endif +endif + +FLOAT := soft +MCPU_OPTION := $(TARGET_ARCH) + +# Linker and targets must match according to: +# https://www.keil.com/support/man/docs/armclang_mig/armclang_mig_aya1488905345341.htm +ifeq ($(TARGET_ARCH), cortex-m0) + ARMC6_LDFLAGS += -Wl,--cpu=Cortex-M0 + +else ifeq ($(TARGET_ARCH), cortex-m3) + ARMC6_LDFLAGS += -Wl,--cpu=Cortex-M3 + +else ifeq ($(TARGET_ARCH), cortex-m4) + ARMC6_LDFLAGS += -Wl,--cpu=Cortex-M4.no_fp + MCPU_OPTION := cortex-m4+nofp + +else ifeq ($(TARGET_ARCH), cortex-m4+fp) + ARMC6_LDFLAGS += -Wl,--cpu=Cortex-M4 + FLOAT=hard + MCPU_OPTION := cortex-m4 + CMSIS_ARM_FEATURES := _FP + +else ifeq ($(TARGET_ARCH), cortex-m55) + ARMC6_LDFLAGS += -Wl,--cpu=8.1-M.Main.mve.fp + FLOAT=hard + +else ifeq ($(TARGET_ARCH), cortex-m7) + ARMC6_LDFLAGS += -Wl,--cpu=Cortex-M7.no_fp + MCPU_OPTION := cortex-m7+nofp + +else ifeq ($(TARGET_ARCH), cortex-m7+fp) + ARMC6_LDFLAGS += -Wl,--cpu=Cortex-M7 + FLOAT=hard + MCPU_OPTION := cortex-m7 + CMSIS_ARM_FEATURES := _DP + +else + $(error "TARGET_ARCH=$(TARGET_ARCH) is not supported") +endif + +ifneq ($(filter cortex-m55%,$(TARGET_ARCH)),) + # soft-abi=soft disables MVE - use softfp instead for M55. + ifeq ($(FLOAT),soft) + FLOAT=softfp + endif +endif + +# Filter out part of mcpu string for choosing the correct startup files +ARM_CPU := $(subst cortex-m,ARMCM,$(MCPU_OPTION)) +ARM_CPU := $(subst +nofp,,$(ARM_CPU)) + +ifeq ($(TOOLCHAIN), armclang) + CXX_TOOL := armclang + CC_TOOL := armclang + AR_TOOL := armar + LD := armlink + + FLAGS_ARMC = \ + --target=arm-arm-none-eabi \ + -Wno-unused-private-field \ + -mcpu=$(MCPU_OPTION) \ + -ffp-mode=full + + # Pass comma separated linker options to armlink + ARMC6_LDFLAGS += -Wl,--strict,--summary_stderr,--info,summarysizes,--map + ARMC6_LDFLAGS += -Wl,--load_addr_map_info,--xref,--callgraph,--symbols + ARMC6_LDFLAGS += -Wl,--info,sizes,--info,totals,--info,unused,--info,veneers + ARMC6_LDFLAGS += -Wl,--list=${TENSORFLOW_ROOT}gen/$(TARGET).map + ARMC6_LDFLAGS += -Wl,--entry=Reset_Handler --verbose + ARMC6_LDFLAGS += -Wl,--scatter=$(ETHOS_U_CORE_PLATFORM)/platform.scatter + + # Pass a hint to the linker where to find the entry point. This needs to be + # done since the startup object file (containing the entry point) is inside + # the TFLM library. See: + # https://developer.arm.com/documentation/ka003125/latest + ARMC6_LDFLAGS += -Wl,$(LIBDIR)/$(MICROLITE_LIB_NAME)\(startup_$(ARM_CPU).o\) + + CXXFLAGS += $(FLAGS_ARMC) + CCFLAGS += $(FLAGS_ARMC) + LDFLAGS := $(ARMC6_LDFLAGS) + + MICROLITE_CC_KERNEL_SRCS := $(filter-out $(EXCLUDED_CC_SRCS),$(MICROLITE_CC_KERNEL_SRCS)) + THIRD_PARTY_CC_HDRS := $(filter-out $(EXCLUDED_HDRS),$(THIRD_PARTY_CC_HDRS)) + MICROLITE_CC_HDRS := $(filter-out $(EXCLUDED_KERNEL_HDRS),$(MICROLITE_CC_HDRS)) + + # Arm Compiler will not link the Math library (see below), therefore we're filtering it out. + # See Fatal error: L6450U: Cannot find library m: + # "Arm Compiler is designed to run in a bare metal environment, + # and automatically includes implementations of these functions, + # and so no such flag is necessary." + # https://developer.arm.com/documentation/100891/0611/troubleshooting/general-troubleshooting-advice + MICROLITE_LIBS := $(filter-out -lm,$(MICROLITE_LIBS)) + +else ifeq ($(TOOLCHAIN), gcc) + TARGET_DEFAULT_TOOLCHAIN_ROOT := $(MAKEFILE_DIR)/downloads/gcc_embedded/bin/ + TARGET_TOOLCHAIN_ROOT := $(TARGET_DEFAULT_TOOLCHAIN_ROOT) + ifeq ($(TARGET_TOOLCHAIN_ROOT), $(TARGET_DEFAULT_TOOLCHAIN_ROOT)) + DOWNLOAD_RESULT := $(shell $(MAKEFILE_DIR)/arm_gcc_download.sh ${MAKEFILE_DIR}/downloads) + ifneq ($(DOWNLOAD_RESULT), SUCCESS) + $(error Something went wrong with the GCC download: $(DOWNLOAD_RESULT)) + endif + endif + TARGET_TOOLCHAIN_PREFIX := arm-none-eabi- + + FLAGS_GCC = -mcpu=$(MCPU_OPTION) -mfpu=auto + CXXFLAGS += $(FLAGS_GCC) + CCFLAGS += $(FLAGS_GCC) + + LDFLAGS += \ + --specs=nosys.specs \ + -T $(ETHOS_U_CORE_PLATFORM)/platform_parsed.ld \ + -Wl,-Map=${TENSORFLOW_ROOT}gen/$(TARGET).map,--cref \ + -Wl,--gc-sections \ + --entry Reset_Handler + + ldflags_to_remove = -Wl,--fatal-warnings + LDFLAGS := $(filter-out $(ldflags_to_remove),$(LDFLAGS)) + +else + $(error "TOOLCHAIN=$(TOOLCHAIN) is not supported.") +endif + +PLATFORM_FLAGS = \ + -DTF_LITE_MCU_DEBUG_LOG \ + -mthumb \ + -mfloat-abi=$(FLOAT) \ + -funsigned-char \ + -mlittle-endian \ + -fomit-frame-pointer \ + -MD + +# Common + C/C++ flags +CXXFLAGS += $(PLATFORM_FLAGS) +CCFLAGS += $(PLATFORM_FLAGS) + +CXXFLAGS += -D$(ARM_CPU)$(CMSIS_ARM_FEATURES) +CCFLAGS += -D$(ARM_CPU)$(CMSIS_ARM_FEATURES) + +# For Ethos-U Core Driver. Header file name is depending on target architecture. +CXXFLAGS += -DCMSIS_DEVICE_ARM_CORTEX_M_XX_HEADER_FILE=\"$(ARM_CPU)$(CMSIS_ARM_FEATURES).h\" + +THIRD_PARTY_CC_SRCS += \ + $(ETHOS_U_CORE_PLATFORM)/retarget.c \ + $(ETHOS_U_CORE_PLATFORM)/uart.c + +ifeq ($(CO_PROCESSOR), ethos_u) + ETHOSU_ARCH=u55 +endif + +CMSIS_DEFAULT_DOWNLOAD_PATH := $(MAKEFILE_DIR)/downloads/cmsis +CMSIS_PATH := $(CMSIS_DEFAULT_DOWNLOAD_PATH) +THIRD_PARTY_CC_SRCS += \ + $(CMSIS_PATH)/Device/ARM/$(ARM_CPU)/Source/system_$(ARM_CPU).c \ + $(CMSIS_PATH)/Device/ARM/$(ARM_CPU)/Source/startup_$(ARM_CPU).c +INCLUDES += \ + -I$(CMSIS_PATH)/Device/ARM/$(ARM_CPU)/Include \ + -I$(CMSIS_PATH)/CMSIS/Core/Include + +# TODO(#274): Examine why some tests fail here. +EXCLUDED_TESTS := \ + tensorflow/lite/micro/memory_arena_threshold_test.cc \ + tensorflow/lite/micro/recording_micro_allocator_test.cc +MICROLITE_TEST_SRCS := $(filter-out $(EXCLUDED_TESTS), $(MICROLITE_TEST_SRCS)) + +TEST_SCRIPT := tensorflow/lite/micro/testing/test_with_arm_corstone_300.sh diff --git a/tensorflow/lite/micro/tools/make/targets/cortex_m_generic_makefile.inc b/tensorflow/lite/micro/tools/make/targets/cortex_m_generic_makefile.inc new file mode 100644 index 0000000..0ed14fc --- /dev/null +++ b/tensorflow/lite/micro/tools/make/targets/cortex_m_generic_makefile.inc @@ -0,0 +1,206 @@ +# Copyright 2022 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +# Generic Makefile target for ARM Cortex M builds. +# For more info see: tensorflow/lite/micro/cortex_m_generic/README.md + +FLOAT := soft +GCC_TARGET_ARCH := $(TARGET_ARCH) + +# Explicitly set this to true to include the kissfft symbols. +INCLUDE_MICRO_SPEECH := false + +ifeq ($(TARGET_ARCH), cortex-m0) + CORE=M0 + ARM_LDFLAGS := -Wl,--cpu=Cortex-M0 + +else ifeq ($(TARGET_ARCH), cortex-m0plus) + CORE=M0plus + ARM_LDFLAGS := -Wl,--cpu=Cortex-M0plus + +else ifeq ($(TARGET_ARCH), cortex-m3) + CORE=M3 + ARM_LDFLAGS := -Wl,--cpu=Cortex-M3 + +else ifeq ($(TARGET_ARCH), cortex-m33) + CORE=M33 + ARM_LDFLAGS := -Wl,--cpu=Cortex-M33 + CMSIS_ARM_FEATURES := _DSP_FP + FLOAT=hard + +else ifeq ($(TARGET_ARCH), cortex-m33+nodsp) + CORE=M33 + ARM_LDFLAGS := -Wl,--cpu=Cortex-M33.no_dsp.no_fp + +else ifeq ($(TARGET_ARCH), cortex-m4) + CORE=M4 + ARM_LDFLAGS := -Wl,--cpu=Cortex-M4.no_fp + GCC_TARGET_ARCH := cortex-m4+nofp + +else ifeq ($(TARGET_ARCH), cortex-m4+fp) + CORE=M4 + ARM_LDFLAGS := -Wl,--cpu=Cortex-M4 + CMSIS_ARM_FEATURES := _FP + FLOAT=hard + GCC_TARGET_ARCH := cortex-m4 + +else ifeq ($(TARGET_ARCH), cortex-m4+sfp) + CORE=M4 + ARM_LDFLAGS := -Wl,--cpu=Cortex-M4 + CMSIS_ARM_FEATURES := _FP + FLOAT=softfp + GCC_TARGET_ARCH := cortex-m4 + +else ifeq ($(TARGET_ARCH), cortex-m55) + CORE=M55 + ARM_LDFLAGS := -Wl,--cpu=8.1-M.Main.mve.fp + FLOAT=hard + +else ifeq ($(TARGET_ARCH), cortex-m55+nodsp+nofp) + CORE=M55 + ARM_LDFLAGS := -Wl,--cpu=8.1-M.Main.mve.no_dsp.no_fp + +else ifeq ($(TARGET_ARCH), cortex-m55+nofp) + CORE=M55 + ARM_LDFLAGS := -Wl,--cpu=8.1-M.Main.mve.no_fp + +else ifeq ($(TARGET_ARCH), cortex-m7) + CORE=M7 + ARM_LDFLAGS := -Wl,--cpu=Cortex-M7.no_fp + GCC_TARGET_ARCH := cortex-m7+nofp + +else ifeq ($(TARGET_ARCH), cortex-m7+fp) + CORE=M7 + ARM_LDFLAGS := -Wl,--cpu=Cortex-M7 + FLOAT=hard + GCC_TARGET_ARCH := cortex-m7 + CMSIS_ARM_FEATURES := _DP + +else ifeq ($(TARGET_ARCH), cortex-m85) + CORE=M85 + ARM_LDFLAGS := -Wl,--cpu=8.1-M.Main.mve.fp + FLOAT=hard + # GCC does not yet support cortex-m85 option hence go with cortex-m55 for now. + GCC_TARGET_ARCH := cortex-m55 + +else ifeq ($(TARGET_ARCH), project_generation) + # No flags needed here as project_generation does not build anything. +else + $(error "TARGET_ARCH=$(TARGET_ARCH) is not supported") +endif + +# Dependency to CMSIS-Device for DWT/PMU counters. +ARM_CPU := "ARMC$(CORE)" +CMSIS_DEFAULT_DOWNLOAD_PATH := $(MAKEFILE_DIR)/downloads/cmsis +CMSIS_PATH := $(CMSIS_DEFAULT_DOWNLOAD_PATH) +INCLUDES += \ + -I$(CMSIS_PATH)/Device/ARM/$(ARM_CPU)/Include \ + -I$(CMSIS_PATH)/CMSIS/Core/Include + +ifneq ($(filter cortex-m55%,$(TARGET_ARCH)),) + # soft-abi=soft disables MVE - use softfp instead for M55. + ifeq ($(FLOAT),soft) + FLOAT=softfp + endif +endif + +# Toolchain specfic flags +ifeq ($(TOOLCHAIN), armclang) + CXX_TOOL := armclang + CC_TOOL := armclang + AR_TOOL := armar + LD := armlink + + FLAGS_ARMC = \ + --target=arm-arm-none-eabi \ + -mcpu=$(TARGET_ARCH) + + CXXFLAGS += $(FLAGS_ARMC) + CCFLAGS += $(FLAGS_ARMC) + LDFLAGS += $(ARM_LDFLAGS) + + # Arm Compiler will not link the Math library (see below), therefore we're filtering it out. + # See Fatal error: L6450U: Cannot find library m: + # "Arm Compiler is designed to run in a bare metal environment, + # and automatically includes implementations of these functions, + # and so no such flag is necessary." + # https://developer.arm.com/documentation/100891/0611/troubleshooting/general-troubleshooting-advice + MICROLITE_LIBS := $(filter-out -lm,$(MICROLITE_LIBS)) + +else ifeq ($(TOOLCHAIN), gcc) + TARGET_DEFAULT_TOOLCHAIN_ROOT := $(DOWNLOADS_DIR)/gcc_embedded/bin/ + TARGET_TOOLCHAIN_ROOT := $(TARGET_DEFAULT_TOOLCHAIN_ROOT) + ifeq ($(TARGET_TOOLCHAIN_ROOT), $(TARGET_DEFAULT_TOOLCHAIN_ROOT)) + DOWNLOAD_RESULT := $(shell $(MAKEFILE_DIR)/arm_gcc_download.sh ${DOWNLOADS_DIR} ${TENSORFLOW_ROOT}) + ifneq ($(DOWNLOAD_RESULT), SUCCESS) + $(error Something went wrong with the GCC download: $(DOWNLOAD_RESULT)) + endif + endif + + TARGET_TOOLCHAIN_PREFIX := arm-none-eabi- + + FLAGS_GCC = -mcpu=$(GCC_TARGET_ARCH) + ifeq ($(TARGET_ARCH), cortex-m4) + FLAGS_GCC += -mfpu=fpv4-sp-d16 + else ifeq ($(TARGET_ARCH), cortex-m7) + FLAGS_GCC += -mfpu=fpv4-sp-d16 + else + FLAGS_GCC += -mfpu=auto + endif + CXXFLAGS += $(FLAGS_GCC) + CCFLAGS += $(FLAGS_GCC) + +else + $(error "TOOLCHAIN=$(TOOLCHAIN) is not supported.") +endif + +PLATFORM_FLAGS = \ + -DTF_LITE_MCU_DEBUG_LOG \ + -mthumb \ + -mfloat-abi=$(FLOAT) \ + -funsigned-char \ + -mlittle-endian \ + -Wno-type-limits \ + -Wno-unused-private-field \ + -fomit-frame-pointer \ + -MD \ + -DCPU_$(CORE)=1 + +# For DWT/PMU counters. Header file name is depending on target architecture. +PLATFORM_FLAGS += -DCMSIS_DEVICE_ARM_CORTEX_M_XX_HEADER_FILE=\"$(ARM_CPU)$(CMSIS_ARM_FEATURES).h\" +PLATFORM_FLAGS += -D$(ARM_CPU) + +# Arm Cortex-M55 and Cortex-M85 use PMU counters. +ifneq ($(filter "ARMCM55" "ARMCM85",$(ARM_CPU)),) + PLATFORM_FLAGS += -DARM_MODEL_USE_PMU_COUNTERS +endif + +# Common + C/C++ flags +CXXFLAGS += $(PLATFORM_FLAGS) +CCFLAGS += $(PLATFORM_FLAGS) + +# Needed for the project generation interface. +MICROLITE_CC_HDRS += \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/cortex_m_generic/debug_log_callback.h + +# We only include micro_speech for project generation to allow for all the files +# to be downloaded. We do not include it for an actual build with the +# cortex_m_generic target to prevent kissfft symbols from getting included in +# libtensorflow-microlite.a which can result in symbol collision. +ifneq ($(TARGET_ARCH), project_generation) + EXCLUDED_EXAMPLE_TESTS := \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/Makefile.inc + MICRO_LITE_EXAMPLE_TESTS := $(filter-out $(EXCLUDED_EXAMPLE_TESTS), $(MICRO_LITE_EXAMPLE_TESTS)) +endif diff --git a/tensorflow/lite/micro/tools/make/targets/cortex_m_qemu_makefile.inc b/tensorflow/lite/micro/tools/make/targets/cortex_m_qemu_makefile.inc new file mode 100644 index 0000000..a83fc18 --- /dev/null +++ b/tensorflow/lite/micro/tools/make/targets/cortex_m_qemu_makefile.inc @@ -0,0 +1,46 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +export PATH := $(DOWNLOADS_DIR)/gcc_embedded/bin/:$(PATH) +TARGET_TOOLCHAIN_PREFIX := arm-none-eabi- + +DOWNLOAD_RESULT := $(shell $(MAKEFILE_DIR)/arm_gcc_download.sh ${DOWNLOADS_DIR} $(TENSORFLOW_ROOT)) +ifneq ($(DOWNLOAD_RESULT), SUCCESS) + $(error Something went wrong with the GCC download: $(DOWNLOAD_RESULT)) +endif + +PLATFORM_FLAGS = \ + -DTF_LITE_MCU_DEBUG_LOG \ + -mcpu=$(TARGET_ARCH) \ + -mthumb \ + -mfloat-abi=soft \ + -mfpu=auto \ + -funsigned-char \ + -mlittle-endian \ + -fomit-frame-pointer + +# Enable semihosting for QEMU to enable host system calls for debug_log and micro_time. +PLATFORM_FLAGS += --specs=rdimon.specs + +CXXFLAGS += $(PLATFORM_FLAGS) +CCFLAGS += $(PLATFORM_FLAGS) + +# TODO(b/158651472): Fix the memory_arena_threshold_test +EXCLUDED_TESTS := \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/memory_arena_threshold_test.cc + +MICROLITE_TEST_SRCS := $(filter-out $(EXCLUDED_TESTS), $(MICROLITE_TEST_SRCS)) + +TEST_SCRIPT := $(TENSORFLOW_ROOT)tensorflow/lite/micro/testing/test_with_qemu.sh arm $(TARGET_ARCH) diff --git a/tensorflow/lite/micro/tools/make/targets/hexagon/download_hexagon.sh b/tensorflow/lite/micro/tools/make/targets/hexagon/download_hexagon.sh new file mode 100755 index 0000000..7d079f2 --- /dev/null +++ b/tensorflow/lite/micro/tools/make/targets/hexagon/download_hexagon.sh @@ -0,0 +1,42 @@ +#!/bin/bash + +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +# TODO(b/190754463): Delete this script once we have the Hexagon kernels checked +# in and integrated for all the workflows. +# +# Explanation and background can be found in: +# https://docs.google.com/document/d/1SlU5OcHEjdgs02ZCupo21mlLBJ6tE6D46FxUrQl8xUc/edit#heading=h.fshpxalu2qt4 + +# Usage: ./tensorflow/lite/micro/tools/make/targets/hexagon/download_hexagon.sh + +# Clone hexagon kernels to temp directory and check out known-good commit. +HEXAGON_DIR=/tmp/hexagon_optimized + +if [ ! -d ${HEXAGON_DIR} ]; then + mkdir -p ${HEXAGON_DIR} + git clone -b release_v2 https://source.codeaurora.org/quic/embedded_ai/tensorflow ${HEXAGON_DIR} +fi + +pushd ${HEXAGON_DIR} > /dev/null +git checkout 2d052806c211144875c89315a4fc6f1393064cf6 +popd > /dev/null + +# Copy optimized kernels from checkout, copy prebuilt lib. +rm -rf tensorflow/lite/micro/kernels/hexagon +cp -R ${HEXAGON_DIR}/tensorflow/lite/micro/kernels/hexagon tensorflow/lite/micro/kernels/hexagon +mkdir tensorflow/lite/micro/kernels/hexagon/lib +cp ${1} tensorflow/lite/micro/kernels/hexagon/lib/ diff --git a/tensorflow/lite/micro/tools/make/targets/hexagon_makefile.inc b/tensorflow/lite/micro/tools/make/targets/hexagon_makefile.inc new file mode 100644 index 0000000..dcd2ace --- /dev/null +++ b/tensorflow/lite/micro/tools/make/targets/hexagon_makefile.inc @@ -0,0 +1,118 @@ +# Settings for Hexagon toolchain. +# REQUIRED: +# - Hexagon SDK 3.5 Toolkit (for qurt, posix libs). +# HEXAGON_SDK_ROOT environment variable must be set to location of +# Hexagon_SDK// on your machine. +# - Hexagon Tools root (for hexagon-clang++, hexagon-sim). +# The tool folder may be a part of the Hexagon SDK +# (e.g. $(HEXAGON_SDK_ROOT)/tools/HEXAGON_Tools) or installed +# separately. +# HEXAGON_ROOT environment variable must be set to location of +# HEXAGON_Tools on your machine. +# - HEXAGON_TOOL_VER: The Hexagon tool version (installed under HEXAGON_ROOT). +# For example: 8.3.07 +# - HEXAGON_CPU_VER: The CPU version to use, will cause a compiler exception +# without providing a version. Valid values may vary depending on tools +# version, but generally in the range: v55-v67 +# +# Unlike other targets, there is not currently a way to automatically download +# the Hexagon SDK. For this reason, users are required to manually download +# and configure the SDK. + +TARGET_ARCH := hexagon + +ifndef HEXAGON_SDK_ROOT + $(error HEXAGON_SDK_ROOT is undefined) +endif + +ifndef HEXAGON_TOOL_VER + $(error HEXAGON_TOOL_VER is undefined) +endif + +ifndef HEXAGON_ROOT + $(error HEXAGON_ROOT is undefined) +endif + +ifndef HEXAGON_CPU_VER + $(error HEXAGON_CPU_VER is undefined) +endif + +ifneq ($(OPTIMIZED_KERNEL_DIR), ) + ifeq ($(HEXAGON_TFLM_LIB), ) + $(error HEXAGON_TFLM_LIB is undefined) + endif +endif + +HEXAGON_LPI_BUILD := + +PLATFORM_ARGS = \ + -DTF_LITE_MCU_DEBUG_LOG \ + -DTF_LITE_USE_CTIME \ + -DHEXAGON_ASM \ + -DMALLOC_IN_STDLIB \ + -DPTHREAD_STUBS \ + -DUSE_PREALLOCATED_BUFFER \ + -D_HAS_C9X \ + -DTF_LITE_USE_CTIME \ + -MMD \ + -DHEXAGON \ + -Wall \ + -Wextra \ + -Wno-missing-field-initializers \ + -Wno-sign-compare \ + -Wno-unused-parameter \ + -Wno-write-strings \ + -Wunused-function \ + -Wno-unused-private-field \ + -Wvla \ + -fdata-sections \ + -ffunction-sections \ + -fmessage-length=0 \ + -fno-delete-null-pointer-checks \ + -fno-exceptions \ + -fno-register-global-dtors-with-atexit \ + -fno-rtti \ + -fno-short-enums \ + -fno-threadsafe-statics \ + -fno-unwind-tables \ + -fno-use-cxa-atexit \ + -fomit-frame-pointer \ + -fpermissive \ + -funsigned-char \ + -mcpu=$(HEXAGON_CPU_VER) \ + -m$(HEXAGON_CPU_VER) + +# See http://b/183462077 for more details on why we need -G0 for an LPI build. +ifeq ($(HEXAGON_LPI_BUILD), true) + PLATFORM_ARGS += -G0 +endif + +export PATH := $(HEXAGON_ROOT)/$(HEXAGON_TOOL_VER)/Tools/bin:$(PATH) +TARGET_TOOLCHAIN_PREFIX := hexagon- +CXX_TOOL := clang++ +CC_TOOL := clang + +CXXFLAGS += $(PLATFORM_ARGS) +CCFLAGS += $(PLATFORM_ARGS) +LDFLAGS += \ + -Wl,--gc-sections -lhexagon \ + $(HEXAGON_ROOT)/$(HEXAGON_TOOL_VER)/Tools/target/hexagon/lib/v66/libstdc++.a \ + $(HEXAGON_TFLM_LIB) + +# TODO(b/190754463): Remove include path once download_hexagon is removed. +INCLUDES += \ + -I$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/hexagon/inc \ + -I$(HEXAGON_SDK_ROOT)/libs/common/qurt/computev66/include/posix \ + -I$(HEXAGON_SDK_ROOT)/libs/common/qurt/computev66/include/qurt \ + -I${HEXAGON_SDK_ROOT}/rtos/qurt/computev66/include/posix \ + -I${HEXAGON_SDK_ROOT}/rtos/qurt/computev66/include/qurt + +# Excludes memory_arena_threshold_test because of the size difference between +# reference OP and optimized OP. +EXCLUDED_TESTS := \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/memory_arena_threshold_test.cc + +MICROLITE_TEST_SRCS := $(filter-out $(EXCLUDED_TESTS), $(MICROLITE_TEST_SRCS)) + +TEST_SCRIPT := $(TENSORFLOW_ROOT)tensorflow/lite/micro/testing/test_hexagon_binary.sh +SIZE_SCRIPT := $(TENSORFLOW_ROOT)tensorflow/lite/micro/testing/size_hexagon_binary.sh diff --git a/tensorflow/lite/micro/tools/make/targets/riscv32_generic_makefile.inc b/tensorflow/lite/micro/tools/make/targets/riscv32_generic_makefile.inc new file mode 100644 index 0000000..ce5f0eb --- /dev/null +++ b/tensorflow/lite/micro/tools/make/targets/riscv32_generic_makefile.inc @@ -0,0 +1,48 @@ +# Settings for RISCV 32-bit toolchain. +TARGET_ARCH := riscv32 +TARGET_TOOLCHAIN_PREFIX := riscv64-unknown-elf- + +TARGET_DEFAULT_TOOLCHAIN_ROOT := $(DOWNLOADS_DIR)/riscv_toolchain/bin/ +TARGET_TOOLCHAIN_ROOT := $(TARGET_DEFAULT_TOOLCHAIN_ROOT) +ifeq ($(TARGET_TOOLCHAIN_ROOT), $(TARGET_DEFAULT_TOOLCHAIN_ROOT)) + $(eval $(call add_third_party_download,$(RISCV_TOOLCHAIN_URL),$(RISCV_TOOLCHAIN_MD5),riscv_toolchain,)) +endif + +export PATH := $(TARGET_TOOLCHAIN_ROOT):$(PATH) + +PLATFORM_FLAGS = \ + -march=rv32imac \ + -mabi=ilp32 \ + -mcmodel=medany \ + -mexplicit-relocs \ + -fno-builtin-printf \ + -DTF_LITE_MCU_DEBUG_LOG \ + -DTF_LITE_USE_GLOBAL_CMATH_FUNCTIONS \ + -funsigned-char \ + -fno-delete-null-pointer-checks \ + -fomit-frame-pointer + +CXXFLAGS += $(PLATFORM_FLAGS) \ + -fpermissive \ + -fno-use-cxa-atexit \ + -DTF_LITE_USE_GLOBAL_MIN \ + -DTF_LITE_USE_GLOBAL_MAX + +CCFLAGS += $(PLATFORM_FLAGS) + +BUILD_TYPE := micro + +LDFLAGS += --specs=nano.specs + +# See http://b/158651472 for why memory arena threshold test is disabled. +EXCLUDED_TESTS := \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/memory_arena_threshold_test.cc + +MICROLITE_TEST_SRCS := $(filter-out $(EXCLUDED_TESTS), $(MICROLITE_TEST_SRCS)) + +# This disables the "linker relaxation" optimization, which produced incorrect code. +# TODO(b/279805615): Check whether this is fixed in newer versions of the toolchain. +LDFLAGS += -mno-relax +TEST_SCRIPT := $(TENSORFLOW_ROOT)tensorflow/lite/micro/testing/test_with_qemu.sh riscv32 rv32 +SIZE_SCRIPT := ${TENSORFLOW_ROOT}tensorflow/lite/micro/testing/size_riscv32_binary.sh + diff --git a/tensorflow/lite/micro/tools/make/targets/xtensa_makefile.inc b/tensorflow/lite/micro/tools/make/targets/xtensa_makefile.inc new file mode 100644 index 0000000..8d970c7 --- /dev/null +++ b/tensorflow/lite/micro/tools/make/targets/xtensa_makefile.inc @@ -0,0 +1,101 @@ +# Settings for Xtensa toolchain for the kernels. +# REQUIRED: +# Environment variables: +# - XTENSA_BASE must be set to location of +# the Xtensa developer tools installation directory. +# Command line arguments: +# - XTENSA_TOOLS_VERSION: For example: RI-2019.2-linux +# - XTENSA_CORE: The name of the Xtensa core to use +# For example: HIFI_190304_swupgrade + +TARGET_ARCH := +XTENSA_USE_LIBC := + +# Allow additional flags on the command line for debugging. +XTENSA_EXTRA_CFLAGS := + +ifndef XTENSA_BASE + $(error XTENSA_BASE is undefined) +endif + +ifndef XTENSA_TOOLS_VERSION + $(error XTENSA_TOOLS_VERSION is undefined) +endif + +ifndef XTENSA_CORE + $(error XTENSA_CORE is undefined) +endif + +ifeq ($(TARGET_ARCH), ) + $(error TARGET_ARCH must be specified on the command line) +endif + +# Create a cflag based on the specified TARGET_ARCH. For example: +# TARGET_ARCH=hifi4 --> -DHIFI4 +TARGET_ARCH_DEFINES := -D$(shell echo $(TARGET_ARCH) | tr [a-z] [A-Z]) + +PLATFORM_FLAGS = \ + -DTF_LITE_MCU_DEBUG_LOG \ + -DTF_LITE_USE_CTIME \ + --xtensa-core=$(XTENSA_CORE) \ + -mcoproc \ + -DMAX_RFFT_PWR=9 \ + -DMIN_RFFT_PWR=MAX_RFFT_PWR \ + $(TARGET_ARCH_DEFINES) \ + -mlongcalls + +export PATH := $(XTENSA_BASE)/tools/$(XTENSA_TOOLS_VERSION)/XtensaTools/bin:$(PATH) +TARGET_TOOLCHAIN_PREFIX := xt- +CXX_TOOL := clang++ +CC_TOOL := clang + +# Unused exception related symbols make their way into a binary that links +# against TFLM as described in https://github.com/tensorflow/tensorflow/issues/47575. +# We have two options to avoid this. The first involves using -stdlib=libc++ and +# the second involves stubbing out and modifying some of the files in the Xtensa +# toolchain to prevent inclusion of the exception handling code +# (http://b/182209217#comment3). This Makefile supports building TFLM in a way +# that is compatible with either of the two approaches. +ifeq ($(XTENSA_USE_LIBC), true) + PLATFORM_FLAGS += -stdlib=libc++ +else + # TODO(b/150240249): Do not filter-out -fno-rtti once that works for the + # Xtensa toolchain. + CXXFLAGS := $(filter-out -fno-rtti, $(CXXFLAGS)) +endif + +CXXFLAGS += $(PLATFORM_FLAGS) +CCFLAGS += $(PLATFORM_FLAGS) + +CCFLAGS += $(XTENSA_EXTRA_CFLAGS) +CXXFLAGS += $(XTENSA_EXTRA_CFLAGS) + +TEST_SCRIPT := $(TENSORFLOW_ROOT)tensorflow/lite/micro/testing/test_xtensa_binary.sh +SIZE_SCRIPT := $(TENSORFLOW_ROOT)tensorflow/lite/micro/testing/size_xtensa_binary.sh + +# TODO(b/158651472): Fix the memory_arena_threshold_test +# TODO(b/174707181): Fix the micro_interpreter_test +EXCLUDED_TESTS := \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/memory_arena_threshold_test.cc +MICROLITE_TEST_SRCS := $(filter-out $(EXCLUDED_TESTS), $(MICROLITE_TEST_SRCS)) + +# TODO(b/156962140): This manually maintained list of excluded examples is +# quite error prone. +EXCLUDED_EXAMPLE_TESTS := \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/hello_world/Makefile.inc \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/image_recognition_experimental/Makefile.inc \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/network_tester/Makefile.inc +MICRO_LITE_EXAMPLE_TESTS := $(filter-out $(EXCLUDED_EXAMPLE_TESTS), $(MICRO_LITE_EXAMPLE_TESTS)) +MICRO_LITE_EXAMPLE_TESTS += $(shell find $(TENSORFLOW_ROOT)third_party/xtensa/examples/ -name Makefile.inc) + +# Needed for LSTM support. +MICROLITE_CC_KERNEL_SRCS := $(MICROLITE_CC_KERNEL_SRCS) \ +$(TENSORFLOW_ROOT)tensorflow/lite/kernels/internal/reference/portable_tensor_utils.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/kernels/kernel_util.cc + +ifeq ($(OPTIMIZED_KERNEL_DIR), xtensa) + MICROLITE_CC_KERNEL_SRCS := $(MICROLITE_CC_KERNEL_SRCS) \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/xtensa/lstm_eval.cc \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/xtensa/lstm_eval_hifi.cc \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/xtensa/unidirectional_sequence_lstm.cc +endif diff --git a/tensorflow/lite/micro/tools/make/test_latency_log.sh b/tensorflow/lite/micro/tools/make/test_latency_log.sh new file mode 100755 index 0000000..20b36d3 --- /dev/null +++ b/tensorflow/lite/micro/tools/make/test_latency_log.sh @@ -0,0 +1,53 @@ +#!/bin/bash +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# This script is responsible for running the tests and also to log out the +# time (in seconds) it took to run the test file. It is using the linux time +# command to measure the latency. Setting the TIMEFORMAT to '%R' is providing +# us the real time latency. +# +# Called with following arguments: +# 1 - Name of the test file +# 2 - Name of the test script +# + +set -e + +# The TEST_SCRIPT can have a variable number of arguments. So, we remove the +# arguments that are only needed for test_latency_log.sh and pass all the +# remaining ones to the TEST_SCRIPT. +ARGS=("${@}") +TEST_FILE_NAME=${ARGS[0]} +unset ARGS[0] +TEST_SCRIPT=${ARGS[0]} +unset ARGS[0] + +# Output to stdout and stderr go to their normal places: +# Here we are opening 2 file descriptor, 3 and 4. FD 3 will redirect all the +# contents to stdout and 4 will redirect all the contents to stderr. Now when +# executing the TEST_SCRIPT command, we are redirecting all the stdout output of +# the command to FD 3 which will redirect everything to FD 1 (stdout) and all +# the stderr output of the command to FD 4 which will redirect everything to FD +# 2 (stderr). The output of the time command is captured in the time_log +# variable with the redirection of FD 2 (stderr) to FD 1 (stdout). Finally we +# are closing the FD 3 and 4. +# +# For more info +# https://stackoverflow.com/questions/4617489/get-values-from-time-command-via-bash-script +exec 3>&1 4>&2 +time_log=$( { TIMEFORMAT="%R"; time ${TEST_SCRIPT} "${ARGS[@]}" 1>&3 2>&4; } 2>&1 ) # Captures time output only. +exec 3>&- 4>&- + +echo "Running ${TEST_FILE_NAME} took ${time_log} seconds" diff --git a/tensorflow/lite/micro/tools/make/third_party_downloads.inc b/tensorflow/lite/micro/tools/make/third_party_downloads.inc new file mode 100644 index 0000000..a8e63e1 --- /dev/null +++ b/tensorflow/lite/micro/tools/make/third_party_downloads.inc @@ -0,0 +1,43 @@ +# Add URLs and MD5 checksums for third-party libraries here. +# We use mirror.tensorflow.org to cache copies of third-party files, +# but this is just an optimization applied manually by TensorFlow +# engineers, so add non-mirrored URLs if you need to update this +# in a pull request and we'll periodically copy them and update +# the URL. + +GEMMLOWP_URL := "https://github.com/google/gemmlowp/archive/719139ce755a0f31cbf1c37f7f98adcc7fc9f425.zip" +GEMMLOWP_MD5 := "7e8191b24853d75de2af87622ad293ba" + +LEON_BCC2_URL := "http://mirror.tensorflow.org/www.gaisler.com/anonftp/bcc2/bin/bcc-2.0.7-gcc-linux64.tar.xz" +LEON_BCC2_MD5 := "cdf78082be4882da2a92c9baa82fe765" + +TSIM_URL := "http://mirror.tensorflow.org/www.gaisler.com/anonftp/tsim/tsim-eval-2.0.63.tar.gz" +TSIM_MD5 := "afa0095d3ed989a949e1467f94e41d2f" + +ifeq ($(HOST_OS),osx) + RISCV_TOOLCHAIN_URL := "http://mirror.tensorflow.org/static.dev.sifive.com/dev-tools/riscv64-unknown-elf-gcc-8.1.0-2019.01.0-x86_64-apple-darwin.tar.gz" + RISCV_TOOLCHAIN_MD5 := "2ac2fa00618b9ab7fa0c7d0ec173de94" +else + RISCV_TOOLCHAIN_URL := "http://mirror.tensorflow.org/static.dev.sifive.com/dev-tools/riscv64-unknown-elf-gcc-20181030-x86_64-linux-ubuntu14.tar.gz" + RISCV_TOOLCHAIN_MD5="2366b7afe36a54dc94fb0ff8a0830934" +endif + +RUY_URL="https://github.com/google/ruy/archive/d37128311b445e758136b8602d1bbd2a755e115d.zip" +RUY_MD5="abf7a91eb90d195f016ebe0be885bb6e" + +PERSON_MODEL_INT8_URL := "https://storage.googleapis.com/download.tensorflow.org/data/tf_lite_micro_person_data_int8_grayscale_2020_12_1.zip" +PERSON_MODEL_INT8_MD5 := "e765cc76889db8640cfe876a37e4ec00" + +ifneq ($(filter $(ARC_TAGS), mli20_experimental),) +EMBARC_MLI_URL := "https://github.com/foss-for-synopsys-dwc-arc-processors/embarc_mli/archive/refs/tags/Release_2.0.zip" +EMBARC_MLI_MD5 := "13dcc1ea81ed836326a616e7e842ae4d" +else +EMBARC_MLI_URL := "https://github.com/foss-for-synopsys-dwc-arc-processors/embarc_mli/archive/refs/tags/Release_1.1.zip" +EMBARC_MLI_MD5 := "22555d76097727b00e731563b42cb098" +endif + +EMBARC_MLI_PRE_COMPILED_URL := "https://github.com/foss-for-synopsys-dwc-arc-processors/embarc_mli/releases/download/Release_1.1/embARC_MLI_package.zip" +EMBARC_MLI_PRE_COMPILED_MD5 := "173990c2dde4efef6a2c95b92d1f0244" + +ETHOSU_URL := "https://git.mlplatform.org/ml/ethos-u/ethos-u-core-driver.git/snapshot/ethos-u-core-driver-24455eedb9e8939f8a28ca0101a6f2d171e1b2f9.tar.gz" +ETHOSU_MD5 := "14b5712525d4af612d35217f0bc53fcc" diff --git a/tensorflow/lite/micro/tools/metrics/create_size_log.py b/tensorflow/lite/micro/tools/metrics/create_size_log.py new file mode 100644 index 0000000..84cab37 --- /dev/null +++ b/tensorflow/lite/micro/tools/metrics/create_size_log.py @@ -0,0 +1,144 @@ +# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Script to build the required binaries, profile their size and generate log. +""" + +import argparse +import datetime +import os +import pandas as pd +import subprocess + + +def _build_a_binary(root_dir, binary_name, makefile_options): + os.chdir(root_dir) + + params_list = [ + "make", "-f", "tensorflow/lite/micro/tools/make/Makefile", binary_name + ] + ["%s=%s" % (key, value) for (key, value) in makefile_options.items()] + + process = subprocess.Popen(params_list, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + stdout, stderr = process.communicate() + if process.returncode != 0: + raise RuntimeError("Building %s failed with \n\n %s" % + (" ".join(params_list), stderr.decode())) + + +def _profile_a_binary(root_dir, binary_name, makefile_options, build_info): + target_dir = "%s_%s_%s" % (makefile_options["TARGET"], + makefile_options["TARGET_ARCH"], + makefile_options["BUILD_TYPE"]) + binary_path = os.path.join(root_dir, 'gen/', target_dir, 'bin', binary_name) + csv_path = os.path.join(root_dir, 'data/continuous_builds/size_profiling', + target_dir, "%s.csv" % binary_name) + + # Run size command and extract the output + process = subprocess.Popen(["size", binary_path], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + stdout, stderr = process.communicate() + if process.returncode != 0: + raise RuntimeError("size %s failed with \n\n %s" % + (binary_name, stderr.decode())) + + output_str = stdout.decode() + df = pd.DataFrame([line.split() for line in output_str.split('\n')[1:]], + columns=list(output_str.split('\n')[0].split())) + + # Append the output from the size to the CSV file + report = _create_or_read_csv(csv_path) + report.loc[len(report.index)] = [ + build_info["date"], build_info['sha'], df['text'][0], df['data'][0], + df['bss'][0], df['dec'][0] + ] + + report.to_csv(csv_path, index=False, header=False, mode='a') + + +def _create_or_read_csv(csv_file_name): + if os.path.exists(csv_file_name) is not True: + csv_df = pd.DataFrame( + columns=['date', 'sha', 'text', 'data', 'bss', 'total']) + csv_df.to_csv(csv_file_name, index=False, mode='w') + + csv_head = pd.read_csv(csv_file_name, index_col=False, nrows=0) + return csv_head + + +def _get_build_info(root_dir): + os.chdir(root_dir) + + current_time = str(datetime.datetime.now()) + + git_process = subprocess.Popen(["git", "rev-parse", "HEAD"], + stdout=subprocess.PIPE, + cwd=root_dir) + sha, err = git_process.communicate() + if git_process.returncode != 0: + raise RuntimeError("Git failed with %s" % err.decode()) + + return {'date': current_time, 'sha': sha.decode().strip('\n')} + + +def _build_and_profile(root_dir, makefile_options, binary_names): + build_info = _get_build_info(root_dir) + + for binary_name in binary_names: + _build_a_binary(root_dir, binary_name, makefile_options) + _profile_a_binary(root_dir, binary_name, makefile_options, build_info) + + +if __name__ == '__main__': + + parser = argparse.ArgumentParser() + default_binary_list_string = 'keyword_benchmark,baseline_memory_footprint,interpreter_memory_footprint' + parser.add_argument( + '--binary_list', + nargs='?', + const=default_binary_list_string, + default=default_binary_list_string, + help= + 'binary list separated by comma (e.g. keyword_benchmark,baseline_memory_footprint)' + ) + parser.add_argument('--build_type', + nargs='?', + const='release', + default='release', + help='build type (e.g. release)') + parser.add_argument('--target', + nargs='?', + const='linux', + default='linux', + help='host target (e.g. linux)') + parser.add_argument('--target_arch', + nargs='?', + const='x86_64', + default='x86_64', + help='target architecture (e.g x86_64)') + args = parser.parse_args() + + makefile_options = { + "BUILD_TYPE": args.build_type, + "TARGET": args.target, + "TARGET_ARCH": args.target_arch + } + binary_names = args.binary_list.split(',') + + script_path = os.path.dirname(os.path.realpath(__file__)) + root_dir = os.path.join(script_path, '../../../../..') + + _build_and_profile(root_dir, makefile_options, binary_names) diff --git a/tensorflow/lite/micro/tools/metrics/create_size_log_x86.sh b/tensorflow/lite/micro/tools/metrics/create_size_log_x86.sh new file mode 100755 index 0000000..7c0d1b3 --- /dev/null +++ b/tensorflow/lite/micro/tools/metrics/create_size_log_x86.sh @@ -0,0 +1,55 @@ +#!/bin/bash -e +# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# +# Measures the size of specified binaries and append the report to a log. + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +ROOT_DIR=${SCRIPT_DIR}/../../../../.. + +cd "${ROOT_DIR}" +source tensorflow/lite/micro/tools/ci_build/helper_functions.sh + +TARGET="linux" +TARGET_ARCH="x86_64" +BUILD_TYPE="release" + +# Clean the own build and download third party +readable_run make -f tensorflow/lite/micro/tools/make/Makefile clean clean_downloads +readable_run make -f tensorflow/lite/micro/tools/make/Makefile third_party_downloads + +BINARY_LIST="keyword_benchmark,baseline_memory_footprint,interpreter_memory_footprint" +python3 tensorflow/lite/micro/tools/metrics/create_size_log.py --build_type=${BUILD_TYPE} --target=${TARGET} --target_arch=${TARGET_ARCH} --binary_list=${BINARY_LIST} +LOG_GENERATION_STATUS=$? + +if [[ ${LOG_GENERATION_STATUS} != 0 ]] +then + echo "Failure in profiling." + exit -1 +fi + +echo "Success in size log generation" + +LOG_DIR="${ROOT_DIR}/data/continuous_builds/size_profiling/${TARGET}_${TARGET_ARCH}_${BUILD_TYPE}" +python3 tensorflow/lite/micro/tools/metrics/detect_size_increase_and_plot_history.py --input_dir=${LOG_DIR} --output_dir=${LOG_DIR} --binary_list=${BINARY_LIST} +SIZE_ALERT_STATUS=$? + +if [[ ${SIZE_ALERT_STATUS} != 0 ]] +then + echo "Size increase may exceed threshold" + exit -1 +fi + +echo "Size does not increase or size increase does not exceed threshold" \ No newline at end of file diff --git a/tensorflow/lite/micro/tools/metrics/detect_size_increase_and_plot_history.py b/tensorflow/lite/micro/tools/metrics/detect_size_increase_and_plot_history.py new file mode 100644 index 0000000..7549662 --- /dev/null +++ b/tensorflow/lite/micro/tools/metrics/detect_size_increase_and_plot_history.py @@ -0,0 +1,111 @@ +# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Script to whether the size exceeds a threshold and also plot size history as a graph. +""" + +import argparse +import pandas as pd +from matplotlib import pyplot as plt + +# Limit the size history check for the past 60 days +SIZE_HISTORY_DEPTH = 60 +# If a section of size log exceeds the below threshold, an error will be raised +SIZE_THRESHOLD_SETTING = { + "text": 512, + "total": 512, +} + + +def _plot_and_detect_size_increase_for_binary(input_dir, output_dir, + binary_name, threshold): + csv_path = '%s/%s.csv' % (input_dir, binary_name) + size_log = pd.read_csv(csv_path, index_col=False).iloc[-SIZE_HISTORY_DEPTH:] + size_log.reset_index(drop=True, inplace=True) + start_date = size_log.iloc[0, 0][0:10] + end_date = size_log.iloc[-1, 0][0:10] + + fig, axs = plt.subplots(3, 2) + fig.suptitle('Source: %s\n%s - %s' % (binary_name, start_date, end_date)) + + threshold_messages = [] + + for index, name in enumerate(['text', 'data', 'total']): + err_msg_or_none = _subplot_and_detect_size_increase( + axs, size_log, name, index, threshold) + if err_msg_or_none is not None: + threshold_messages.append('%s failure: %s' % + (binary_name, err_msg_or_none)) + + fig_path = '%s/%s.png' % (output_dir, binary_name) + fig.tight_layout() + plt.savefig(fig_path) + plt.clf() + + return threshold_messages + + +def _subplot_and_detect_size_increase(subplot_axs, size_log, section_name, row, + threshold): + subplot_axs[row, 0].set_title(section_name) + subplot_axs[row, 0].plot(size_log[section_name], 'o-') + subplot_axs[row, 0].set_ylabel('Abs Sz(bytes)') + increased_size = size_log[section_name].diff() + subplot_axs[row, 1].plot(increased_size, 'o-') + subplot_axs[row, 1].set_ylabel('Incr Sz (bytes)') + + if section_name in threshold and len(increased_size) > 1: + if increased_size[1] > threshold[section_name]: + return '%s size increases by %d and exceeds threshold %d' % ( + section_name, increased_size[1], threshold[section_name]) + + # By default there is no size increase that exceeds the threshold + return None + + +def _detect_size_increase_and_plot_history(input_dir, output_dir, binary_list, + threshold_setting): + threshold_messages = [] + + for binary_name in binary_list: + threshold_messages += _plot_and_detect_size_increase_for_binary( + input_dir, output_dir, binary_name, threshold_setting) + + if len(threshold_messages) != 0: + raise RuntimeError(str(threshold_messages)) + + +if __name__ == '__main__': + + parser = argparse.ArgumentParser() + + default_binary_list_string = 'keyword_benchmark,baseline_memory_footprint,interpreter_memory_footprint' + parser.add_argument( + '--binary_list', + nargs='?', + const=default_binary_list_string, + default=default_binary_list_string, + help= + 'binary list separated by comma (e.g. keyword_benchmark,baseline_memory_footprint)' + ) + parser.add_argument('--input_dir', + help='Path to the size log file (e.g. ~/size_log') + parser.add_argument('--output_dir', help='Path to save plot to (e.g. /tmp/)') + + args = parser.parse_args() + + binary_names = args.binary_list.split(',') + + _detect_size_increase_and_plot_history(args.input_dir, args.output_dir, + binary_names, SIZE_THRESHOLD_SETTING) diff --git a/tensorflow/lite/micro/tools/model_transforms_utils.py b/tensorflow/lite/micro/tools/model_transforms_utils.py new file mode 100644 index 0000000..2b5c6a7 --- /dev/null +++ b/tensorflow/lite/micro/tools/model_transforms_utils.py @@ -0,0 +1,382 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Flatbuffer utility functions that are specific to TensorFLow Lite Micro. + +This module has a collection of utility function that perform flatbuffer +manipulation that is unsupported in the TfLite converter but are still needed +for Micro-specific use cases. + +Transformation functions breakdown: +go/tflm-flatbuffer-reduction-breakdown +""" + +import numpy as np + +from tflite_micro.tensorflow.lite.python import schema_py_generated as schema_fb +from tflite_micro.tensorflow.lite.python import schema_util +from tflite_micro.tensorflow.lite.micro.tools import tflite_flatbuffer_align_wrapper + + +def remove_extraneous_quantization_data(model): + """Remove min/max quantization data and shrink zero point arrays from weight/bias tensors.""" + for subgraph in model.subgraphs: + for tensor in subgraph.tensors: + if tensor.quantization is not None: + # Remove unused min and max arrays from all tensors. + tensor.quantization.max = [] + tensor.quantization.min = [] + + # ensure a zero point is present for this tensor (non-quantized models + # have quantization info = None) + if tensor.quantization.zeroPoint is None: + continue + + # We are only looking at reducing repeated zero points for the case of + # per-channel quantized tensor. So if the zero point is already a scalar + # (i.e. per tensor quantization) then we can exit early. + if len(tensor.quantization.zeroPoint) == 1: + continue + + # Only weight/bias tensors (where tensor.buffer != None) + # are per-channel quantized + if tensor.buffer is None: + continue + + # Only the first zero_point value is used in the per-channel quantized + # kernel implementation, which is assumed to be 0 anyways (TFLM only + # has support for symmetric quantization). + if all(value == 0 for value in tensor.quantization.zeroPoint): + tensor.quantization.zeroPoint = [tensor.quantization.zeroPoint[0]] + else: + raise ValueError("TFLM only supports zero_point==0") + + +def shorten_variable_shared_names(model): + """Replaces shared names with a shorter string corresponding to a unique index.""" + unique_shared_names = [] + for subgraph in model.subgraphs: + for op in subgraph.operators: + builtin_code = schema_util.get_builtin_code_from_operator_code( + model.operatorCodes[op.opcodeIndex]) + if builtin_code == schema_fb.BuiltinOperator.VAR_HANDLE: + shared_name = op.builtinOptions.sharedName + if shared_name not in unique_shared_names: + unique_shared_names.append(shared_name) + op.builtinOptions.sharedName = str( + unique_shared_names.index(shared_name)) + + +def _remove_initialization_subgraph(model): + """Removes the resource variable initialization subgraph entirely from the flatbuffer for additional memory savings.""" + # assumption is made that subgraph indexed=1 is the resource variable + # initialization subgraph (subgraph containing only pairs of VAR_HANDLE + # and ASSIGN_VARIABLE OPs) + non_initialization_subgraphs_list = [model.subgraphs[0] + ] + model.subgraphs[2:] + + # TODO(b/279035671): add more documentation for why this is needed. + # Make sure there is a proper VAR_HANDLE, ASSIGN_VARIABLE pair to allocate + # each resource variable + + # global (across model) dict to store each unique shared_name with a boolean + # conditional (True in the case it has an VAR_HANDLE/ASSIGN_VARIABLE pair in + # the same subgraph anywhere in the model) + shared_name_to_allocated_pair = {} + for subgraph in non_initialization_subgraphs_list: + # dict local to each subgraph matching a resource_id tensor with the unique + # resource variable shared_name (Tensor indices are specific to a subgraph) + id_to_shared_name_pairs = {} + for op in subgraph.operators: + builtin_code = schema_util.get_builtin_code_from_operator_code( + model.operatorCodes[op.opcodeIndex]) + + if builtin_code == schema_fb.BuiltinOperator.VAR_HANDLE: + shared_name = op.builtinOptions.sharedName + shared_name_to_allocated_pair.setdefault(shared_name, False) + resource_id_tensor = subgraph.tensors[op.outputs[0]] + id_to_shared_name_pairs.setdefault(resource_id_tensor, shared_name) + + elif builtin_code == schema_fb.BuiltinOperator.ASSIGN_VARIABLE: + resource_id_tensor = subgraph.tensors[op.inputs[0]] + shared_name = id_to_shared_name_pairs.get(resource_id_tensor) + shared_name_to_allocated_pair[shared_name] = True + + # We can not remove subgraph 1 if there are any resource variables that don't + # have a VAR_HANDLE/ASSIGN_VARIABLE pair. This is due to the specifics of how + # resource variable buffers are allocated in the TFLM runtime. + # See b/279035671 for more details. + if any(val == False for val in shared_name_to_allocated_pair.values()): + return + + # In preparation for removing subgraph 1 (resource variable initialization + # subgraph) from the flatbuffer, any subgraph indices used by other OPs will + # need to be updated to reflect the new change of having one fewer subgraph. + for subgraph in model.subgraphs[2:]: + for op in subgraph.operators: + builtin_code = schema_util.get_builtin_code_from_operator_code( + model.operatorCodes[op.opcodeIndex]) + if builtin_code == schema_fb.BuiltinOperator.CALL_ONCE: + op.builtinOptions.initSubgraphIndex -= 1 + if builtin_code == schema_fb.BuiltinOperator.IF: + op.builtinOptions.thenSubgraphIndex -= 1 + op.builtinOptions.elseSubgraphIndex -= 1 + elif builtin_code == schema_fb.BuiltinOperator.WHILE: + op.builtinOptions.condSubgraphIndex -= 1 + op.builtinOptions.bodySubgraphIndex -= 1 + + # safe to remove subgraph 1 from the flatbuffer + model.subgraphs = non_initialization_subgraphs_list + + +def _remove_call_once_op(model): + """Removes CALL_ONCE op for the resource variable initialization subgraph.""" + updated_op_list = [] + for op in model.subgraphs[0].operators: + is_call_once = (schema_util.get_builtin_code_from_operator_code( + model.operatorCodes[op.opcodeIndex]) == + schema_fb.BuiltinOperator.CALL_ONCE) + + if is_call_once and op.builtinOptions.initSubgraphIndex == 1: + # We make the assumption that subgraph indexed 1 is the resource variable + # initialization subgraph, and as a result of the transformations, we no + # longer need to execute the subgraph during runtime. + continue + + updated_op_list.append(op) + + model.subgraphs[0].operators = updated_op_list + + +def _zero_bias_buffer(model, buffer_idx, zero_point): + # Only clear buffer if its all zero_points + # Ensure buffers are still present, but empty. This prevents the memory + # planner from allocating arrays for the ASSIGN_VARIABLE input tensors in + # subgraph 1. + buffer = model.buffers[buffer_idx] + if buffer.data is None: + buffer.data = [] + return + if buffer.data == []: + return + + # For now this assumes that zero_point is int8 and hence all the buffer + # data is as well. future work should update this to check for tensor.type + # match to numpy type to load the data properly. + buffer_data = np.frombuffer(buffer.data, dtype=np.int8) + if all(value == zero_point for value in buffer_data): + buffer.data = [] + + +def _zero_resource_buffers(model): + """Zero out resource buffers. + + Ignores buffers which are used in multiple subgraphs (b/266017172). + Args: + model: The model to operate on, a schema_fb.ModelT object. + + Returns: + multi_subgraph_resource_buffers: list of resource variable buffers that are + in multiple subgraphs. + """ + multi_subgraph_resource_buffers = [] + # Each element in subgraph_buffers is a set containing the buffer index to + # all the corresponding tensors of that subgraph. + subgraph_buffers = [set() for _ in range(len(model.subgraphs))] + for i, buffer_set in enumerate(subgraph_buffers): + for tensor in model.subgraphs[i].tensors: + buffer_set.add(tensor.buffer) + + for subgraph in model.subgraphs: + for op in subgraph.operators: + builtin_code = schema_util.get_builtin_code_from_operator_code( + model.operatorCodes[op.opcodeIndex]) + if builtin_code == schema_fb.BuiltinOperator.ASSIGN_VARIABLE: + tensor = subgraph.tensors[op.inputs[1]] + buffer_idx = tensor.buffer + # List of subgraphs that use the buffer corresponding to the Op tensor + buffer_in_subgraph = [ + buffer_idx in buffer_set for buffer_set in subgraph_buffers + ] + # If the buffer was only in one subgraph, it implies that it is used + # for initialization only, and can be replaced with an empty array. + if buffer_in_subgraph.count(True) == 1: + zero_point = 0 + if tensor.quantization.zeroPoint: + zero_point = tensor.quantization.zeroPoint[0] + _zero_bias_buffer(model, buffer_idx, zero_point) + else: + multi_subgraph_resource_buffers.append(buffer_idx) + return multi_subgraph_resource_buffers + + +def clear_resource_variable_buffers(model): + """Clear resource variable buffers, removes assocaited CALL_ONCE op, and the resource buffer initialization subgraph.""" + multi_subgraph_resource_buffers = _zero_resource_buffers(model) + + # * We are assuming the resource variable initializaiton subgraph index is 1. + if len(model.subgraphs) == 1: + return + found_non_resource_var_op = False + for op in model.subgraphs[1].operators: + builtin_code = schema_util.get_builtin_code_from_operator_code( + model.operatorCodes[op.opcodeIndex]) + if (builtin_code != schema_fb.BuiltinOperator.VAR_HANDLE + and builtin_code != schema_fb.BuiltinOperator.ASSIGN_VARIABLE): + found_non_resource_var_op = True + break + + if found_non_resource_var_op: + # since subgraph 1 has OPs other than those associated with initializing + # resource variables, we can't make any additional changes to the flatbuffer + return + + for tensor in model.subgraphs[1].tensors: + buffer_idx = tensor.buffer + if (tensor.type != schema_fb.TensorType.RESOURCE + and buffer_idx not in multi_subgraph_resource_buffers + and model.buffers[buffer_idx].data != []): + # if the entire initialization subgraph has not been cleared, we cannot + # make any additional changes to the flatbuffer + return + + # remove resource variable initialization subgraph + _remove_call_once_op(model) + _remove_initialization_subgraph(model) + + +def _numpy_from_tensor_type(tensor_type_idx): + """Gives the equivalent numpy dtype based on TensorType class (schema) number.""" + tensor_type_idx_to_numpy = { + schema_fb.TensorType.FLOAT32: np.float32, + schema_fb.TensorType.FLOAT16: np.float16, + schema_fb.TensorType.INT32: np.int32, + schema_fb.TensorType.UINT8: np.uint8, + schema_fb.TensorType.INT64: np.int64, + schema_fb.TensorType.STRING: np.string_, + schema_fb.TensorType.BOOL: np.bool_, + schema_fb.TensorType.INT16: np.int16, + schema_fb.TensorType.COMPLEX64: np.complex64, + schema_fb.TensorType.INT8: np.int8, + schema_fb.TensorType.FLOAT64: np.float64, + schema_fb.TensorType.COMPLEX128: np.complex128, + schema_fb.TensorType.UINT64: np.uint64, + schema_fb.TensorType.RESOURCE: "RESORCE", + schema_fb.TensorType.VARIANT: "VARIANT", + schema_fb.TensorType.UINT32: np.uint32, + schema_fb.TensorType.UINT16: np.uint16, + # INT4 is mapped to INT8, b/246806634 + schema_fb.TensorType.INT4: np.int8, + } + return tensor_type_idx_to_numpy.get(tensor_type_idx) + + +def _get_minmax_range_int(dtype): + """Returns the minimum and maximum range for an INT dtype.""" + return np.iinfo(dtype).min, np.iinfo(dtype).max + + +def _get_minmax_range_float(model, input_tensor): + """Returns the minimum and maximum range for a FLOAT input_tensor. + + Assumes only one subgraph. + If the tensor has an associated QUANTIZE Op, uses the quantization information + to determine a more accurate range for random values. + + Args: + model: schema_fb.ModelT model object (tflite model) + input_tensor: a FLOAT dtype schema_fb.TensorT input tensor which's range to + return + + Returns: + range_min, range_max: the min/max values the input could have. default to + [0, 1] + """ + if _numpy_from_tensor_type(input_tensor.type) != np.float32: + return + if not any(input_tensor == model.subgraphs[0].tensors[input_idx] + for input_idx in model.subgraphs[0].inputs): + return + # get associated quantize tensor + # if there are multiple FLOAT32 inputs that get quantized, we assume + # that each has their own quantize op, since quantize.cc ensures that + # NumInputs and NumOutput == 1. + for op in model.subgraphs[0].operators: + if (schema_util.get_builtin_code_from_operator_code( + model.operatorCodes[op.opcodeIndex]) + == schema_fb.BuiltinOperator.QUANTIZE + and input_tensor == model.subgraphs[0].tensors[op.inputs[0]]): + # use quantized tensor information for a more accurate F32 range + quant_tensor = model.subgraphs[0].tensors[op.outputs[0]] + dtype = _numpy_from_tensor_type(quant_tensor.type) + scale = quant_tensor.quantization.scale[0] + zero_point = quant_tensor.quantization.zeroPoint[0] + # We add 1 to q_min to more accurately represent symmetrical + # quantization (for INT16) + r_min = float(np.iinfo(dtype).min + 1 - zero_point) * scale + r_max = float(np.iinfo(dtype).max - zero_point) * scale + return r_min, r_max + + return 0, 1 + + +def generate_random_input_data(model, input_tensor, random_number_generator): + """Generates random input data based on the tensor parameters (data_type and related quantization information). + + Not all input types are supported. RuntimeError is raised on unsupported type. + Assumes a single subgraph model. + + Args: + model: a tflite schema ModelT object + input_tensor: the TensorT object whose parameters are matched to generate + the random values. + random_number_generator: a numpy.random number generator to get random + values from + + Returns: + array of input_tensor.shape of random data according to the input dtype. + + Raises: + RuntimeError: for unsupported dtypes of input tensor. + """ + dtype = _numpy_from_tensor_type(input_tensor.type) + + if dtype in (np.int8, np.int16): + range_min, range_max = _get_minmax_range_int(dtype) + return random_number_generator.integers( + low=range_min, + high=range_max, + size=input_tensor.shape, + dtype=dtype, + ) + elif dtype == np.float32: + range_min, range_max = _get_minmax_range_float(model, input_tensor) + return (range_max - range_min) * random_number_generator.random( + input_tensor.shape, dtype=dtype) + range_min + elif dtype == np.bool_: + range_min, range_max = 0, 1 + return random_number_generator.integers( + low=range_min, + high=range_max, + size=input_tensor.shape, + dtype=np.int8, + ).astype(bool) + else: + raise RuntimeError( + "Unsupported data type for generating data for input tensor.") + + +def tflite_flatbuffer_align(input_model_path, output_model_path): + tflite_flatbuffer_align_wrapper.align_tflite_model(input_model_path, + output_model_path) diff --git a/tensorflow/lite/micro/tools/project_generation/BUILD.testing b/tensorflow/lite/micro/tools/project_generation/BUILD.testing new file mode 100644 index 0000000..088b19c --- /dev/null +++ b/tensorflow/lite/micro/tools/project_generation/BUILD.testing @@ -0,0 +1,15 @@ +# standalone BUILD file used to test project generation with bazel. + +cc_library( + name = "libtflm", + srcs = glob(["tensorflow/**/*.cc", "tensorflow/**/*.c", "third_party/**/*.cc", "third_party/**/*.c"]), + hdrs = glob(["tensorflow/**/*.h", "third_party/**/*.h"]), + copts = [ + "-Ithird_party/gemmlowp", + "-Ithird_party/flatbuffers/include", + "-Ithird_party/kissfft", + "-Ithird_party/kissfft/tools", + "-Ithird_party/ruy", + ] +) + diff --git a/tensorflow/lite/micro/tools/project_generation/Makefile b/tensorflow/lite/micro/tools/project_generation/Makefile new file mode 100644 index 0000000..092ba7e --- /dev/null +++ b/tensorflow/lite/micro/tools/project_generation/Makefile @@ -0,0 +1,137 @@ +# Copyright 2022 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# +# Simple Makefile that serves as a smokes-check for project generation on x86. +# +# Execute the following command after copying this Makefile to the root of the +# TFLM tree created with the project generation script: +# make -j8 examples + +BUILD_TYPE := + +COMMON_FLAGS := \ + -DTF_LITE_STATIC_MEMORY \ + -fno-unwind-tables \ + -ffunction-sections \ + -fdata-sections \ + -fmessage-length=0 + +CXX := clang++ +CC := clang +AR := ar + +INCLUDES := \ + -I. \ + -I./third_party/gemmlowp \ + -I./third_party/flatbuffers/include \ + -I./third_party/kissfft \ + -I./third_party/kissfft/tools \ + -I./third_party/ruy + +ifneq ($(TENSORFLOW_ROOT),) + INCLUDES += -I$(TENSORFLOW_ROOT) +endif + +ifneq ($(EXTERNAL_DIR),) + INCLUDES += -I$(EXTERNAL_DIR) +endif + +ifeq ($(BUILD_TYPE), cmsis_nn) + CXX := arm-none-eabi-g++ + CC := arm-none-eabi-gcc + AR := arm-none-eabi-ar + + INCLUDES += \ + -I./third_party/cmsis \ + -I./third_party/cmsis_nn \ + -I./third_party/cmsis_nn/Include \ + -I./third_party/cmsis/CMSIS/Core/Include + + COMMON_FLAGS += \ + -DTF_LITE_MCU_DEBUG_LOG \ + -DPROJECT_GENERATION \ + -mthumb \ + -mlittle-endian \ + -funsigned-char \ + -fomit-frame-pointer \ + -MD \ + -DCMSIS_NN + +endif + +CXXFLAGS := \ + -std=c++11 \ + -fno-rtti \ + -fno-exceptions \ + -fno-threadsafe-statics \ + $(COMMON_FLAGS) + +CCFLAGS := \ + -std=c11 \ + $(COMMON_FLAGS) + +ARFLAGS := -r + +GENDIR := gen +OBJDIR := $(GENDIR)/obj +BINDIR := $(GENDIR)/bin +LIB := $(GENDIR)/libtflm.a + +TFLM_CC_SRCS := $(shell find $(TENSORFLOW_ROOT)tensorflow -name "*.cc" -o -name "*.c") +OBJS := $(addprefix $(OBJDIR)/, $(patsubst %.c,%.o,$(patsubst %.cc,%.o,$(TFLM_CC_SRCS)))) + +$(OBJDIR)/%.o: %.cc + @mkdir -p $(dir $@) + $(CXX) $(CXXFLAGS) $(INCLUDES) -c $< -o $@ + +$(OBJDIR)/%.o: %.c + @mkdir -p $(dir $@) + $(CC) $(CCFLAGS) $(INCLUDES) -c $< -o $@ + +$(LIB): $(OBJS) + @mkdir -p $(dir $@) + $(AR) $(ARFLAGS) $(LIB) $(OBJS) + +clean: + rm -rf $(GENDIR) + +libtflm: $(LIB) + +HELLO_WORLD_SRCS := $(wildcard examples/hello_world/*.cc) +HELLO_WORLD_SRCS += $(wildcard examples/hello_world/models/*.cc) +HELLO_WORLD_INCLUDES := $(INCLUDES) -I./examples/hello_world + +hello_world: libtflm + @mkdir -p $(BINDIR) + $(CXX) $(CXXFLAGS) $(HELLO_WORLD_SRCS) $(HELLO_WORLD_INCLUDES) $(LIB) -o $(BINDIR)/$@ + +MICRO_SPEECH_SRCS := $(wildcard examples/micro_speech/*.cc) +MICRO_SPEECH_SRCS += $(wildcard examples/micro_speech/*/*.cc) +MICRO_SPEECH_THIRD_PARTY_SRCS := +MICRO_SPEECH_INCLUDES := $(INCLUDES) -I./examples/micro_speech + +micro_speech: libtflm + @mkdir -p $(BINDIR) + $(CXX) $(CXXFLAGS) $(MICRO_SPEECH_SRCS) $(MICRO_SPEECH_THIRD_PARTY_SRCS) $(MICRO_SPEECH_INCLUDES) $(LIB) -o $(BINDIR)/$@ + +PERSON_DETECTION_SRCS := $(wildcard examples/person_detection/*.cc) +PERSON_DETECTION_THIRD_PARTY_SRCS := $(wildcard third_party/person_model_int8/*.cc) +PERSON_DETECTION_INCLUDES := $(INCLUDES) -I./examples/person_detection + +person_detection: libtflm + @mkdir -p $(BINDIR) + $(CXX) $(CXXFLAGS) $(PERSON_DETECTION_SRCS) $(PERSON_DETECTION_THIRD_PARTY_SRCS) $(PERSON_DETECTION_INCLUDES) $(LIB) -o $(BINDIR)/$@ + +examples: hello_world micro_speech person_detection diff --git a/tensorflow/lite/micro/tools/project_generation/create_tflm_tree.py b/tensorflow/lite/micro/tools/project_generation/create_tflm_tree.py new file mode 100644 index 0000000..e964a12 --- /dev/null +++ b/tensorflow/lite/micro/tools/project_generation/create_tflm_tree.py @@ -0,0 +1,303 @@ +# Copyright 2022 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Starting point for writing scripts to integrate TFLM with external IDEs. + +This script can be used to output a tree containing only the sources and headers +needed to use TFLM for a specific configuration (e.g. target and +optimized_kernel_implementation). This should serve as a starting +point to integrate TFLM with external IDEs. + +The goal is for this script to be an interface that is maintained by the TFLM +team and any additional scripting needed for integration with a particular IDE +should be written external to the TFLM repository and built to work on top of +the output tree generated with this script. + +We will add more documentation for a desired end-to-end integration workflow as +we get further along in our prototyping. See this github issue for more details: + https://github.com/tensorflow/tensorflow/issues/47413 +""" + +import argparse +import fileinput +import os +import re +import shutil +import subprocess + + +def _get_dirs(file_list): + dirs = set() + for filepath in file_list: + dirs.add(os.path.dirname(filepath)) + return dirs + + +def _get_file_list(key, makefile_options, tensorflow_root): + params_list = [ + "make", "-f", + tensorflow_root + "tensorflow/lite/micro/tools/make/Makefile", key + ] + makefile_options.split() + process = subprocess.Popen(params_list, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + stdout, stderr = process.communicate() + + if process.returncode != 0: + raise RuntimeError("%s failed with \n\n %s" % + (" ".join(params_list), stderr.decode())) + + return [bytepath.decode() for bytepath in stdout.split()] + + +def _third_party_src_and_dest_files(prefix_dir, makefile_options, + tensorflow_root): + src_files = [] + src_files.extend( + _get_file_list("list_third_party_sources", makefile_options, + tensorflow_root)) + src_files.extend( + _get_file_list("list_third_party_headers", makefile_options, + tensorflow_root)) + + # The list_third_party_* rules give path relative to the root of the git repo. + # However, in the output tree, we would like for the third_party code to be a + # tree under prefix_dir/third_party, with the path to the tflm_download + # directory removed. The path manipulation logic that follows removes the + # downloads directory prefix, and adds the third_party prefix to create a + # list of destination directories for each of the third party files. + # The only exception are third party files outside the downloads folder + # with absolute paths. + tflm_download_path = tensorflow_root + "tensorflow/lite/micro/tools/make/downloads" + dest_files = [] + third_party_path = os.path.join(prefix_dir, "third_party") + for f in src_files: + if os.path.isabs(f): + dest_files.append(os.path.normpath(third_party_path + f)) + else: + dest_files.append( + os.path.join(third_party_path, + os.path.relpath(f, tflm_download_path))) + + return src_files, dest_files + + +def _tflm_src_and_dest_files(prefix_dir, makefile_options, tensorflow_root): + src_files = [] + src_files.extend( + _get_file_list("list_library_sources", makefile_options, + tensorflow_root)) + src_files.extend( + _get_file_list("list_library_headers", makefile_options, + tensorflow_root)) + dest_files = [os.path.join(prefix_dir, src) for src in src_files] + return src_files, dest_files + + +def _get_src_and_dest_files(prefix_dir, makefile_options, tensorflow_root): + tflm_src_files, tflm_dest_files = _tflm_src_and_dest_files( + prefix_dir, makefile_options, tensorflow_root) + third_party_srcs, third_party_dests = _third_party_src_and_dest_files( + prefix_dir, makefile_options, tensorflow_root) + + all_src_files = tflm_src_files + third_party_srcs + all_dest_files = tflm_dest_files + third_party_dests + return all_src_files, all_dest_files + + +def _copy(src_files, dest_files): + for dirname in _get_dirs(dest_files): + os.makedirs(dirname, exist_ok=True) + + for src, dst in zip(src_files, dest_files): + shutil.copy(src, dst) + + +def _get_tflm_generator_path(tensorflow_root): + return _get_file_list("list_generator_dir", + "TENSORFLOW_ROOT=" + tensorflow_root, + tensorflow_root)[0] + + +# For examples, we are explicitly making a deicision to not have any source +# specialization based on the TARGET and OPTIMIZED_KERNEL_DIR. The thinking +# here is that any target-specific sources should not be part of the TFLM +# tree. Rather, this function will return an examples directory structure for +# x86 and it will be the responsibility of the target-specific examples +# repository to provide all the additional sources (and remove the unnecessary +# sources) for the examples to run on that specific target. +def _create_examples_tree(prefix_dir, examples_list, tensorflow_root): + files = [] + for e in examples_list: + files.extend( + _get_file_list("list_%s_example_sources" % (e), + "TENSORFLOW_ROOT=" + tensorflow_root, tensorflow_root)) + files.extend( + _get_file_list("list_%s_example_headers" % (e), + "TENSORFLOW_ROOT=" + tensorflow_root, tensorflow_root)) + + # The get_file_list gives path relative to the root of the git repo (where the + # examples are in tensorflow/lite/micro/examples). However, in the output + # tree, we would like for the examples to be under prefix_dir/examples. + tflm_examples_path = tensorflow_root + "tensorflow/lite/micro/examples" + tflm_downloads_path = tensorflow_root + "tensorflow/lite/micro/tools/make/downloads" + tflm_generator_path = _get_tflm_generator_path(tensorflow_root) + + # Some non-example source and headers will be in the {files} list. They need + # special handling or they will end up outside the {prefix_dir} tree. + dest_file_list = [] + for f in files: + if tflm_generator_path in f: + # file is generated during the build. + relative_path = os.path.relpath(f, tflm_generator_path) + full_filename = os.path.join(prefix_dir, relative_path) + # Allow generated example sources to be placed with their example. + f = relative_path + if tflm_examples_path in f: + # file is in examples tree + relative_path = os.path.relpath(f, tflm_examples_path) + full_filename = os.path.join(prefix_dir, "examples", relative_path) + elif tflm_downloads_path in f: + # is third-party file + relative_path = os.path.relpath(f, tflm_downloads_path) + full_filename = os.path.join(prefix_dir, "third_party", relative_path) + else: + # not third-party and not examples, don't modify file name + # ex. tensorflow/lite/experimental/microfrontend + full_filename = os.path.join(prefix_dir, f) + dest_file_list.append(full_filename) + + for dest_file, filepath in zip(dest_file_list, files): + dest_dir = os.path.dirname(dest_file) + os.makedirs(dest_dir, exist_ok=True) + shutil.copy(filepath, dest_dir) + + # Since we are changing the directory structure for the examples, we will also + # need to modify the paths in the code. + tflm_examples_include_path = "tensorflow/lite/micro/examples" + examples_gen_include_path = tensorflow_root + "tensorflow/lite/micro/examples" + for filepath in dest_file_list: + with fileinput.FileInput(filepath, inplace=True) as f: + for line in f: + include_match = re.match( + r'.*#include.*"' + tflm_examples_include_path + r'/([^/]+)/.*"', + line) + examples_gen_include_match = re.match( + r'.*#include.*"' + examples_gen_include_path + r'/([^/]+)/.*"', + line) + if include_match: + # We need a trailing forward slash because what we care about is + # replacing the include paths. + text_to_replace = os.path.join(tflm_examples_include_path, + include_match.group(1)) + "/" + line = line.replace(text_to_replace, "") + elif examples_gen_include_match: + # We need a trailing forward slash because what we care about is + # replacing the include paths. + text_to_replace_1 = os.path.join( + examples_gen_include_path, + examples_gen_include_match.group(1)) + "/" + line = line.replace(text_to_replace_1, "") + # end="" prevents an extra newline from getting added as part of the + # in-place find and replace. + print(line, end="") + + +def _rename_cc_to_cpp(output_dir): + for path, _, files in os.walk(output_dir): + for name in files: + if name.endswith(".cc"): + base_name_with_path = os.path.join(path, os.path.splitext(name)[0]) + os.rename(base_name_with_path + ".cc", base_name_with_path + ".cpp") + + +def main(): + parser = argparse.ArgumentParser( + description="Starting script for TFLM project generation") + parser.add_argument("output_dir", + help="Output directory for generated TFLM tree") + parser.add_argument("--no_copy", + action="store_true", + help="Do not copy files to output directory") + parser.add_argument("--print_src_files", + action="store_true", + help="Print the src files (i.e. files in the TFLM tree)") + parser.add_argument( + "--print_dest_files", + action="store_true", + help="Print the dest files (i.e. files in the output tree)") + parser.add_argument("--makefile_options", + default="", + help="Additional TFLM Makefile options. For example: " + "--makefile_options=\"TARGET= " + "OPTIMIZED_KERNEL_DIR= " + "TARGET_ARCH=corex-m4\"") + parser.add_argument("--examples", + "-e", + action="append", + help="Examples to add to the output tree. For example: " + "-e hello_world -e micro_speech") + parser.add_argument( + "--rename_cc_to_cpp", + action="store_true", + help="Rename all .cc files to .cpp in the destination files location.") + + args = parser.parse_args() + + makefile_options = args.makefile_options + + make_entries = makefile_options.split() + tensorflow_root = "" + for make_entry in make_entries: + key_value = make_entry.split("=") + if key_value[0] == "TENSORFLOW_ROOT": + tensorflow_root = key_value[1] + + # TODO(b/143904317): Explicitly call make third_party_downloads. This will + # no longer be needed once all the downloads are switched over to bash + # scripts. + params_list = [ + "make", "-f", tensorflow_root + + "tensorflow/lite/micro/tools/make/Makefile", "third_party_downloads" + ] + makefile_options.split() + process = subprocess.Popen(params_list, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + _, stderr = process.communicate() + if process.returncode != 0: + raise RuntimeError("%s failed with \n\n %s" % + (" ".join(params_list), stderr.decode())) + + src_files, dest_files = _get_src_and_dest_files(args.output_dir, + makefile_options, + tensorflow_root) + + if args.print_src_files: + print(" ".join(src_files)) + + if args.print_dest_files: + print(" ".join(dest_files)) + + if args.no_copy is False: + _copy(src_files, dest_files) + + if args.examples is not None: + _create_examples_tree(args.output_dir, args.examples, tensorflow_root) + + if args.rename_cc_to_cpp: + _rename_cc_to_cpp(args.output_dir) + + +if __name__ == "__main__": + main() diff --git a/tensorflow/lite/micro/tools/requantize_flatbuffer.py b/tensorflow/lite/micro/tools/requantize_flatbuffer.py new file mode 100644 index 0000000..ed9f454 --- /dev/null +++ b/tensorflow/lite/micro/tools/requantize_flatbuffer.py @@ -0,0 +1,224 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================= +"""An experimental tool to requantize a int8 activation, int8 weight LSTM based model to int16 activation, int8 weight + +Steps: +1. Convert the trained model to int8 using the TFLite converter. See https://www.tensorflow.org/lite/performance/post_training_quantization#full_integer_quantization +2. Use this tool to requantize the int8 model to int16. +3. Check if the requantized model match the expectation (e.g., read the conversion printout, perform inference tests) + +The conversion process: +1. Requantize the ops specified in _COMPLEX_OP_REQUANTIZE_REGISTRATION using the registered function. Bias type conversion (int32 to int64) only happens here. +2. Requantize all non-constant tensors with int8 type to int16 (and fix the quantization parameters) + +Run: +bazel build tensorflow/lite/micro/tools:requantize_flatbuffer +bazel-bin/tensorflow/lite/micro/tools/requantize_flatbuffer --int8_model_path=".tflite file path"` --save_path="save path" + +CAVEAT: +1. Use this tool ONLY for models that contain the LSTM layer. All other models should use the standard tflite conversion process. +2. This is an experimental tool. ALWAYS check if the converted model matches your expectation +3. Add the custom op requantization function for complex ops (e.g., convolution). +4. We assume ops not in _COMPLEX_OP_REQUANTIZE_REGISTRATION only have activation tensors (i.e. no weights and bias). Check the quantized model performance if you add additional ops to _TESTED_SIMPLE_OPS + +""" +import os + +import numpy as np +from absl import app +from absl import flags +from absl import logging + +from tflite_micro.tensorflow.lite.tools import flatbuffer_utils +from tflite_micro.tensorflow.lite.micro.tools import requantize_flatbuffer_utils +from tflite_micro.tensorflow.lite.python import schema_py_generated + +FLAGS = flags.FLAGS + +flags.DEFINE_string("int8_model_path", + default=None, + help="the int8 model path.") +flags.DEFINE_string("save_path", + default=None, + help="path to save the requantized model.") + +# key: BuiltinOperator (see tensorflow/lite/schema/schema.fbs) +# Val: the requantize function defined in requantize_flatbuffer_utils.py +# FULLY_CONNECTED, CONV_2D, DEPTHWISE_CONV_2D share the same requantize function +# since they all share the same input/weight/bias configuration. +_COMPLEX_OP_REQUANTIZE_REGISTRATION = { + schema_py_generated.BuiltinOperator.FULLY_CONNECTED: + requantize_flatbuffer_utils.requantize_fully_connected, + schema_py_generated.BuiltinOperator.UNIDIRECTIONAL_SEQUENCE_LSTM: + requantize_flatbuffer_utils.requantize_unidirectional_sequence_lstm, + schema_py_generated.BuiltinOperator.SOFTMAX: + requantize_flatbuffer_utils.requantize_softmax, + schema_py_generated.BuiltinOperator.CONV_2D: + requantize_flatbuffer_utils.requantize_fully_connected, + schema_py_generated.BuiltinOperator.DEPTHWISE_CONV_2D: + requantize_flatbuffer_utils.requantize_fully_connected, + schema_py_generated.BuiltinOperator.TRANSPOSE_CONV: + requantize_flatbuffer_utils.requantize_transpose_conv, +} + +# List of tested simple operators (no weight and bias, e.g., reshape) see tensorflow/lite/schema/schema.fbs for op code names +_TESTED_SIMPLE_OPS = [ + schema_py_generated.BuiltinOperator.ADD, + schema_py_generated.BuiltinOperator.CONCATENATION, + schema_py_generated.BuiltinOperator.DEQUANTIZE, + schema_py_generated.BuiltinOperator.LEAKY_RELU, + schema_py_generated.BuiltinOperator.LOGISTIC, + schema_py_generated.BuiltinOperator.MEAN, + schema_py_generated.BuiltinOperator.MUL, + schema_py_generated.BuiltinOperator.PAD, + schema_py_generated.BuiltinOperator.QUANTIZE, + schema_py_generated.BuiltinOperator.RESHAPE, + schema_py_generated.BuiltinOperator.RSQRT, + schema_py_generated.BuiltinOperator.SQUARED_DIFFERENCE, + schema_py_generated.BuiltinOperator.STRIDED_SLICE, + schema_py_generated.BuiltinOperator.SUB, +] + +_SUPPORTED_OPS = set( + list(_COMPLEX_OP_REQUANTIZE_REGISTRATION.keys()) + _TESTED_SIMPLE_OPS) + + +class Requantizer: + """Requantize an int8 activation model to int16""" + + def __init__(self, int8_model): + """Initialize the int8 to int16 converter. + + Args: + int8_model: flatbuffer python object + """ + self.model = int8_model + self.remaining_tensors = set() + for subgraph in self.model.subgraphs: + for tensor in subgraph.tensors: + self.remaining_tensors.add(tensor) + + @classmethod + def from_file(self, model_path): + """Instantiates a converter from a int8 quantized .tflite filepath. + + Args: + model_path: Filepath to the .tflite model + + Returns: + An Int8ToInt16Converter instance + """ + int8_model = flatbuffer_utils.read_model(model_path) + return Requantizer(int8_model) + + @classmethod + def from_bytes(self, bytearray): + """Instantiates a converter from a int8 quantized .tflite bytearray. + + Args: + bytearray: Content of the .tflite model + + Returns: + An Int8ToInt16Converter instance + """ + int8_model = flatbuffer_utils.convert_bytearray_to_object(bytearray) + return Requantizer(int8_model) + + def _remove_tensor(self, tensor): + """Remove tensor from the tensor pool""" + if tensor in self.remaining_tensors: + self.remaining_tensors.remove(tensor) + + def _remove_op_tensors(self, tensors, op): + """Remove tensors in an operator from the tensor pool + + Args: + tensors: tensors in the subgraph + op : the operator + """ + for id in op.inputs: + # -1 means non-used tensor + if id != -1: + self._remove_tensor(tensors[id]) + for id in op.outputs: + if id != -1: + self._remove_tensor(tensors[id]) + + def _convert_ops(self): + """Convert all ops registered in _OP_CONVERSION_REGISTRATION from int8 to int16 (activation type)""" + op_codes = self.model.operatorCodes + for subgraph in self.model.subgraphs: + tensors = subgraph.tensors + for op in subgraph.operators: + op_code = op_codes[op.opcodeIndex].builtinCode + op_name = flatbuffer_utils.opcode_to_name(self.model, op.opcodeIndex) + if op_code not in _SUPPORTED_OPS: + raise RuntimeError( + f"Operator {op_name} is not supported. If the operator contains weight/bias, develop and register the corresponding requantize function in _COMPLEX_OP_CONVERSION_REGISTRATION. Otherwise, try add the op code to _TESTED_SIMPLE_OPS and validate the requantized model " + ) + if op_code in _COMPLEX_OP_REQUANTIZE_REGISTRATION: + logging.info(f"Convert operator {op_name}") + _COMPLEX_OP_REQUANTIZE_REGISTRATION[op_code](tensors, + self.model.buffers, op) + self._remove_op_tensors(tensors, op) + + def _change_tensor_activation_type(self): + """Change all remaining tensor types from int8 to int16""" + for subgraph in self.model.subgraphs: + for tensor in subgraph.tensors: + if ((tensor in self.remaining_tensors) + and (requantize_flatbuffer_utils.TENSOR_CODE_TYPE[tensor.type] + == np.int8) and ("const" not in str(tensor.name))): + requantize_flatbuffer_utils.change_activation_tensor_8to16( + tensor, self.model.buffers) + self._remove_tensor(tensor) + + def requantize_8to16(self): + ''' + The requantize process has two phase: + 1. Go through the registered ops and perform the custom op transformation + 2. Go through the rest of tensors and convert int8 non-const tensor to int16 + ''' + + logging.info("Reset Operators") + self._convert_ops() + logging.info("Set Remaining Activation Types") + self._change_tensor_activation_type() + logging.info("Remaining Tensors:") + for tensor in self.remaining_tensors: + logging.info( + f"{tensor.name}, tensor type {flatbuffer_utils.type_to_name(tensor.type)}" + ) + + def save_model(self, output_path): + """Save the requantized model to a specificed location.""" + flatbuffer_utils.write_model(self.model, output_path) + + def model_bytearray(self): + """Get the flatbuffer bytearray""" + return flatbuffer_utils.convert_object_to_bytearray(self.model) + + +def main(_): + if not os.path.exists(FLAGS.int8_model_path): + raise ValueError( + "Model file does not exist. Please check the .tflite model path.") + requantizer = Requantizer.from_file(FLAGS.int8_model_path) + requantizer.requantize_8to16() + requantizer.save_model(FLAGS.save_path) + + +if __name__ == "__main__": + app.run(main) diff --git a/tensorflow/lite/micro/tools/requantize_flatbuffer_test.py b/tensorflow/lite/micro/tools/requantize_flatbuffer_test.py new file mode 100644 index 0000000..4d80991 --- /dev/null +++ b/tensorflow/lite/micro/tools/requantize_flatbuffer_test.py @@ -0,0 +1,115 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================= +import os + +import numpy as np +import tensorflow as tf + +from tensorflow.python.framework import test_util +from tensorflow.python.platform import test +from tflite_micro.tensorflow.lite.micro.tools import requantize_flatbuffer +from tflite_micro.python.tflite_micro import runtime +from tflite_micro.tensorflow.lite.tools import flatbuffer_utils + + +#TODO(b/248061370): replace the keras model creation process with flatbuffer manipulation to speed up test +def create_simple_fc_model(): + '''Create a simple model with two fully connected(fc) layers''' + model = tf.keras.models.Sequential([ + tf.keras.layers.InputLayer(input_shape=(28, 28)), + tf.keras.layers.Flatten(), + tf.keras.layers.Dense(50, activation=tf.nn.relu), + tf.keras.layers.Dense(10, activation=tf.nn.softmax, name="output") + ]) + fixed_input = tf.keras.layers.Input(shape=[28, 28], + batch_size=1, + dtype=model.inputs[0].dtype, + name="fixed_input") + fixed_output = model(fixed_input) + return tf.keras.models.Model(fixed_input, fixed_output) + + +def representative_dataset_gen(num_samples=100): + np.random.seed(42) #Seed the random number generator + for _ in range(num_samples): + yield [np.random.random((1, 28, 28)).astype(np.float32)] + + +def convert_tfl_converter(keras_model, + representative_dataset_gen, + int16=False): + '''Convert and quantize the keras model using the standard tflite converter''' + converter = tf.lite.TFLiteConverter.from_keras_model(keras_model) + converter.optimizations = [tf.lite.Optimize.DEFAULT] + converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8] + if int16: + converter.target_spec.supported_ops = [ + tf.lite.OpsSet. + EXPERIMENTAL_TFLITE_BUILTINS_ACTIVATIONS_INT16_WEIGHTS_INT8 + ] + converter.representative_dataset = representative_dataset_gen + return converter.convert() + + +def convert_8to16_requantizer(keras_model, representative_dataset_gen): + '''Convert and quantize the keras model using the int8 to int16 conversion tool''' + # Convert to int8 first + int8_model = convert_tfl_converter(keras_model, + representative_dataset_gen, + int16=False) + int8_model = flatbuffer_utils.convert_bytearray_to_object(int8_model) + # Use the tool to convert to int16 + requantizer = requantize_flatbuffer.Requantizer(int8_model) + requantizer.requantize_8to16() + return flatbuffer_utils.convert_object_to_bytearray(requantizer.model) + + +class SimpleFCModelTest(test_util.TensorFlowTestCase): + + def testCompareWithStandardConversion(self): + + def inference(tflm_interpreter, data_x): + tflm_interpreter.set_input(data_x, 0) + tflm_interpreter.invoke() + return tflm_interpreter.get_output(0) + + keras_model = create_simple_fc_model( + ) # int16 fc is supported in tflite converter + tfl_converted_int16_model = convert_tfl_converter( + keras_model, representative_dataset_gen, int16=True) + int8_converted_int16_model = convert_8to16_requantizer( + keras_model, representative_dataset_gen) + + interpreter_tfl_converted = runtime.Interpreter.from_bytes( + tfl_converted_int16_model) + interpreter_tool_converted = runtime.Interpreter.from_bytes( + int8_converted_int16_model) + + num_steps = 10 + # Give the same (random) input to both interpreters to confirm that the outputs are similar. + for _ in range(0, num_steps): + data_x = np.random.random((1, 28, 28)).astype("float32") + + tfl_converted_result = inference(interpreter_tfl_converted, data_x)[0] + tool_converted_result = inference(interpreter_tool_converted, data_x)[0] + + max_diff = max(abs(tool_converted_result - tfl_converted_result)) + self.assertLess( + max_diff, 1e-4 + ) # can not be the same since int8 model loses some range information + + +if __name__ == "__main__": + test.main() diff --git a/tensorflow/lite/micro/tools/requantize_flatbuffer_utils.py b/tensorflow/lite/micro/tools/requantize_flatbuffer_utils.py new file mode 100644 index 0000000..5709ff2 --- /dev/null +++ b/tensorflow/lite/micro/tools/requantize_flatbuffer_utils.py @@ -0,0 +1,325 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================= +import numpy as np +from absl import logging +from tflite_micro.tensorflow.lite.python.schema_py_generated import TensorType + +# Map flatbuffer tensor type code to numpy data type. see Table TensorType in tensorflow/lite/schema/schema.fbs +# TODO(b/269487423): use a common util function instead +TENSOR_CODE_TYPE = { + TensorType.FLOAT32: np.float32, + TensorType.FLOAT16: np.float16, + TensorType.INT32: np.int32, + TensorType.UINT8: np.uint8, + TensorType.INT64: np.int64, + TensorType.STRING: np.string_, + TensorType.BOOL: np.bool_, + TensorType.INT16: np.int16, + TensorType.COMPLEX64: np.complex64, + TensorType.INT8: np.int8, + TensorType.FLOAT64: np.float64, + TensorType.COMPLEX128: np.complex128, + TensorType.UINT64: np.uint64, + TensorType.RESOURCE: "RESOURCE", + TensorType.VARIANT: "VARIANT", + TensorType.UINT32: np.uint32, + TensorType.UINT16: np.uint16, + TensorType.INT4: "INT4", +} + +# TODO(b/269487423): use a common util function instead +TENSOR_TYPE_CODE = dict((reversed(item) for item in TENSOR_CODE_TYPE.items())) + + +def clip_range(vals, bit_width): + """Mimic integer calculation. + + Clip the range of vals based on bit width. + + e.g., clip_range([300], 8) = [127] since int8 have range [-128, 127] + + Args: + vals (np.array): float representation of the integer values + bit_width (int): number of desired bits for vals + + Returns: + np.array : clipped vals + """ + # Numpy integer calculation does not do saturation. Implement here + min_val = -2**(bit_width - 1) + max_val = 2**(bit_width - 1) - 1 + if vals.max() > max_val or vals.min() < min_val: + logging.info(f"WARNING: integer overflow!") + return np.clip(vals, min_val, max_val) + + +def quantize_data(data, scale, zero_point=0, bit_width=8): + """Quantize the data to integer type with desired bit width. + + The quantized data is represented using float since integer calculation in + numpy may differ from other implementations (e.g., no integer saturation + protection in numpy) + + Args: + data (np.array): float data + scale (float): quantization scale of the data + zero_point (integer): quantization zero point of the data + bit_width (int): number of representative bits for vals + + Returns: + np.array : quantized data in float but clipped range + """ + vals = np.round(data / scale) + zero_point + return clip_range(vals, bit_width) + + +def dequantize_data(quantized_data, scale, zero_point=0): + """Dequantize the data to integer type with desired bit width. + + Args: + quantized_data (np.array): quantized data + scale (float): quantization scale of the data + zero_point (integer): quantization zero point of the data + + Returns: + np.array : dequantized data + """ + return scale * (quantized_data - zero_point) + + +def change_quantization_settings_8to16(tensor, buffers): + """Change the quantization seeting of the tensor from int8 to int16""" + + if (tensor.quantization.quantizedDimension != 0): + raise RuntimeError( + "Only layer level quantization is supported. Per channel quantization is not supported now" + ) + + scale = tensor.quantization.scale[0] + zero_point = tensor.quantization.zeroPoint[0] + + # Set MAX_INT8 from 127 to 128 to compromise the range precision loss due to int8 quantization + MIN_INT8, MAX_INT8 = -128, 128 + # Narrow range (-min == max) is used for symmetrical quantization + MIN_INT16, MAX_INT16 = -32767, 32767 + + # Asymmertical quantized: scale * (qmax - zero_point) = rmax + rmax = scale * (MAX_INT8 - zero_point) + rmin = scale * (MIN_INT8 - zero_point) + # symmertical quantized: scale * qmax = rmax + scale_16 = max(abs(rmax), abs(rmin)) / abs(MIN_INT16) + # Change scale: Symmetrical Quantized + tensor.quantization.scale = [scale_16] + tensor.quantization.zeroPoint = [0] + + # requantize the buffer data to int16 if necessary + tensor_buffer = buffers[tensor.buffer] + if type(tensor_buffer.data) != type(None): + expected_buffer_size = np.prod(tensor.shape) + data = np.frombuffer(tensor_buffer.data, dtype=np.int8) + # Different ops may share one buffer. No need to requantize the buffer + # if the buffer has already been processed to int16 (2 bytes) + if data.nbytes == expected_buffer_size * 2: + return + elif data.nbytes != expected_buffer_size: + raise RuntimeError( + f"Bias buffer size {data.nbytes} does not match the expected size {expected_buffer_size * 4}" + ) + dequantized_data = dequantize_data(data, tensor.quantization.scale, + tensor.quantization.zeroPoint) + int16_data = quantize_data(dequantized_data, scale_16, 0, + 16).astype(np.int16) + tensor_buffer.data = int16_data.tobytes() + + +def change_activation_tensor_8to16(tensor, buffers): + """Change the quantization setting of a activation tensor from int8 to int16""" + if tensor.type == TENSOR_TYPE_CODE[np.int8]: + change_quantization_settings_8to16(tensor, buffers) + tensor.type = TENSOR_TYPE_CODE[np.int16] + logging.info(f"Set {tensor.name} from int8 to int16 ") + + +def requantize_bias_perlayer(buffers, input, weight, bias): + """Bias is layer wise quantized """ + bias_buffer = buffers[bias.buffer] + bias_scale = bias.quantization.scale[0] + bias_zero_pt = bias.quantization.zeroPoint[0] + data = np.frombuffer(bias_buffer.data, dtype=np.int32) + + # change scale and zero point + bias_scale_int64 = (input.quantization.scale[0] * + weight.quantization.scale[0]) + bias_zero_pt_int64 = 0 # symmetrical quantized + bias.type = TENSOR_TYPE_CODE[np.int64] + bias.quantization.scale = [bias_scale_int64] + bias.quantization.zeroPoint = [bias_zero_pt_int64] + + expected_buffer_size = bias.shape[0] # bias has only one dimension + # Different ops may share one buffer. No need to requantize the buffer + # if the buffer has already been processed to int64 (8 bytes) + if data.nbytes == expected_buffer_size * 8: + return + elif data.nbytes != expected_buffer_size * 4: + raise RuntimeError( + f"Bias buffer size {data.nbytes} does not match the expected size {expected_buffer_size * 4}" + ) + dequantized_data = dequantize_data(data, bias_scale, bias_zero_pt) + int64_data = quantize_data(dequantized_data, bias_scale_int64, + bias_zero_pt_int64, 64).astype(np.int64) + bias_buffer.data = int64_data.tobytes() + + +def requantize_bias_perchannel(buffers, input, weight, bias): + """Bias is channel wise quantized. Requantize bias one by one """ + bias_buffer = buffers[bias.buffer] + data = np.frombuffer(bias_buffer.data, dtype=np.int32) + expected_buffer_size = bias.shape[0] # bias has only one dimension + # whether to requantize the bias buffer, False if the buffer has already been requantized + requantize_buffer = True + # Different ops may share one buffer. No need to requantize the buffer + # if the buffer has already been processed to int64 (8 bytes) + if data.nbytes == expected_buffer_size * 8: + requantize_buffer = False + elif data.nbytes != expected_buffer_size * 4: + raise RuntimeError( + f"Bias buffer size {data.nbytes} does not match the expected size {expected_buffer_size * 4}" + ) + if len(bias.quantization.scale) != len(weight.quantization.scale): + raise RuntimeError( + f" Per channel quantization requires number of bias scales ({len(bias.quantization.scale)}),\ + equals to number of weight scales ({len(weight.quantization.scale)}) " + ) + requantized_data = [] + requantized_scales = [] + requantized_zero_points = [] + for element_data, bias_scale, weight_scale, bias_zero_point in zip( + data, bias.quantization.scale, weight.quantization.scale, + bias.quantization.zeroPoint): + bias_scale_int64 = (input.quantization.scale[0] * weight_scale) + bias_zero_pt_int64 = 0 # symmetrical quantized + requantized_scales.append(bias_scale_int64) + requantized_zero_points.append(bias_zero_pt_int64) + + if requantize_buffer: + dequantized_data = dequantize_data(element_data, bias_scale, + bias_zero_point) + int64_data = quantize_data(dequantized_data, bias_scale_int64, + bias_zero_pt_int64, 64).astype(np.int64) + requantized_data.append(int64_data) + + bias.type = TENSOR_TYPE_CODE[np.int64] + bias.quantization.scale = requantized_scales + bias.quantization.zeroPoint = requantized_zero_points + if requantize_buffer: + bias_buffer.data = np.array(requantized_data).tobytes() + + +def set_bias_type_int64(buffers, input, weight, bias): + """Set the bias tensor quantization setting from int32 to int64 + + Args: + buffers (list): buffers for the model + input (Tensor): the corresponding input tensor for the bias + weight (Tensor): the corresponding weight tensor for the bias + bias (Tensor): the bias tensor that need to be modified + """ + if bias.type == TENSOR_TYPE_CODE[np.int32]: + if len(bias.quantization.scale) == 1: + requantize_bias_perlayer(buffers, input, weight, bias) + else: + requantize_bias_perchannel(buffers, input, weight, bias) + + +def requantize_fully_connected(tensors, buffers, op): + """Requantize the fully connected op from int8 to int16 + + Note: CONV_2D and DEPTHWISE_CONV_2D also use this requantize function since they all share the same input/weight/bias configuration. + See tensorflow/lite/micro/kernels/fully_connected_common.cc + tflite_micro/tensorflow/lite/micro/kernels/depthwise_conv_common.cc + tflite_micro/tensorflow/lite/micro/kernels/conv_common.cc + """ + # Indices are from tensorflow/lite/micro/kernels/fully_connected_common.cc + input_tensor = tensors[op.inputs[0]] + # weight stays the same, no change needed + weight_tensor = tensors[op.inputs[1]] + output_tensor = tensors[op.outputs[0]] + + change_activation_tensor_8to16(input_tensor, buffers) + change_activation_tensor_8to16(output_tensor, buffers) + # if the bias does not exist, op.inputs[2] == -1 + if op.inputs[2] != -1: + bias_tensor = tensors[op.inputs[2]] + set_bias_type_int64(buffers, input_tensor, weight_tensor, bias_tensor) + + +def requantize_unidirectional_sequence_lstm(tensors, buffers, op): + """Requantize the unidirectonal sequance lstm op from int8 to int16 """ + input_tensor = tensors[op.inputs[0]] + hidden_state_tensor = tensors[op.inputs[18]] + output_tensor = tensors[op.outputs[0]] + + # Indices are from tensorflow/lite/micro/kernels/lstm_shared.h + input_weights_idx = [1, 2, 3, 4] + recurrent_weights_idx = [5, 6, 7, 8] + bias_idx = [12, 13, 14, 15] + + change_activation_tensor_8to16(input_tensor, buffers) + change_activation_tensor_8to16(hidden_state_tensor, buffers) + change_activation_tensor_8to16(output_tensor, buffers) + + for weight_id, bias_id in zip(input_weights_idx, bias_idx): + weight_tensor = tensors[op.inputs[weight_id]] + bias_tensor = tensors[op.inputs[bias_id]] + set_bias_type_int64(buffers, input_tensor, weight_tensor, bias_tensor) + + # recurrent weights have no associated biases + for weight_id in recurrent_weights_idx: + weight_tensor = tensors[op.inputs[weight_id]] + + +def requantize_softmax(tensors, buffers, op): + """Requantize the softmax op from int8 to int16""" + input_tensor = tensors[op.inputs[0]] + output_tensor = tensors[op.outputs[0]] + + # Change input type + change_activation_tensor_8to16(input_tensor, buffers) + + # Output range is always [0,1] + if output_tensor.type == TENSOR_TYPE_CODE[np.int8]: + # change quantization settings + output_tensor.quantization.scale = [1 / 32768] + output_tensor.quantization.zeroPoint = [0] + # Set tensor type + output_tensor.type = TENSOR_TYPE_CODE[np.int16] + logging.info(f"Set {output_tensor.name} from int8 to int16 ") + + +def requantize_transpose_conv(tensors, buffers, op): + """Requantize the transpose conv op from int8 to int16""" + # Indices are from tensorflow/lite/micro/kernels/transpose_conv.cc + input_tensor = tensors[op.inputs[2]] + # weight stays the same, no change needed + weight_tensor = tensors[op.inputs[1]] + output_tensor = tensors[op.outputs[0]] + + change_activation_tensor_8to16(input_tensor, buffers) + change_activation_tensor_8to16(output_tensor, buffers) + # if the bias does not exist, op.inputs[2] == -1 + if len(op.inputs) > 3: + if op.inputs[3] != -1: + bias_tensor = tensors[op.inputs[3]] + set_bias_type_int64(buffers, input_tensor, weight_tensor, bias_tensor) \ No newline at end of file diff --git a/tensorflow/lite/micro/tools/tflite_flatbuffer_align.py b/tensorflow/lite/micro/tools/tflite_flatbuffer_align.py new file mode 100644 index 0000000..02cb14f --- /dev/null +++ b/tensorflow/lite/micro/tools/tflite_flatbuffer_align.py @@ -0,0 +1,34 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Tool to re-align the tflite flatbuffer via the C++ flatbuffer api.""" + +from absl import app + +from tflite_micro.tensorflow.lite.micro.tools import tflite_flatbuffer_align_wrapper + + +def main(argv): + try: + input_model_path = argv[1] + output_model_path = argv[2] + except IndexError: + print('usage: ', argv[0], ' \n') + else: + tflite_flatbuffer_align_wrapper.align_tflite_model(input_model_path, + output_model_path) + + +if __name__ == '__main__': + app.run(main) diff --git a/tensorflow/lite/micro/tools/tflite_flatbuffer_align_wrapper.cc b/tensorflow/lite/micro/tools/tflite_flatbuffer_align_wrapper.cc new file mode 100644 index 0000000..842dcee --- /dev/null +++ b/tensorflow/lite/micro/tools/tflite_flatbuffer_align_wrapper.cc @@ -0,0 +1,51 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include +#include + +#include "flatbuffers/util.h" +#include "tensorflow/lite/schema/schema_generated.h" + +namespace py = pybind11; + +void align_tflite_model(const char* input_file_name, + const char* output_file_name) { + std::string model_file; + // Read the file into a string using the included util API call: + flatbuffers::LoadFile(input_file_name, false, &model_file); + // Parse the string into a C++ class. Model is the root object of a tflite + // flatbuffer file. + const tflite::Model* model = tflite::GetModel(model_file.c_str()); + // A packed model is basically the file format mmaped into memory. + // Unpacking it and then packing it with the C++ API should yield + // a file with the force_align attributes respected. + // ModelT is just the unpacked version of the model file. + tflite::ModelT* unpacked_model = model->UnPack(); + flatbuffers::FlatBufferBuilder fbb; + auto new_model = tflite::Model::Pack(fbb, unpacked_model); + fbb.Finish(new_model, tflite::ModelIdentifier()); + flatbuffers::SaveFile(output_file_name, + reinterpret_cast(fbb.GetBufferPointer()), + fbb.GetSize(), /*binary*/ true); +} + +PYBIND11_MODULE(tflite_flatbuffer_align_wrapper, m) { + m.doc() = "tflite_flatbuffer_align_wrapper"; + m.def("align_tflite_model", &align_tflite_model, + "Aligns the tflite flatbuffer to (16), by unpacking and repacking via " + "the flatbuffer C++ API.", + py::arg("input_file_name"), py::arg("output_file_name")); +} diff --git a/tensorflow/lite/micro/tools/tflm_model_transforms.py b/tensorflow/lite/micro/tools/tflm_model_transforms.py new file mode 100644 index 0000000..0daae0f --- /dev/null +++ b/tensorflow/lite/micro/tools/tflm_model_transforms.py @@ -0,0 +1,72 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Runs TFLM specific transformations to reduce model size on a .tflite model.""" + +from absl import app +from absl import flags +from absl import logging + +from tflite_micro.tensorflow.lite.micro.tools import tflm_model_transforms_lib + +# Usage information: +# Default: +# `bazel run tensorflow/lite/micro/tools:tflm_model_transforms -- \ +# --input_model_path=` +# output will be located at: /path/to/my_model_tflm_optimized.tflite + +_INPUT_MODEL_PATH = flags.DEFINE_string( + "input_model_path", + None, + ".tflite input model path", + required=True, +) + +_SAVE_INTERMEDIATE_MODELS = flags.DEFINE_bool( + "save_intermediate_models", + False, + "optional config to save models between different transforms. Models are" + " saved to a /tmp/ directory and tested at each stage.", +) + +_TEST_TRANSFORMED_MODELS = flags.DEFINE_bool( + "test_transformed_model", + True, + "optional config to enable/disable testing models on random data and" + " asserting equivalent output.", +) + +_OUTPUT_MODEL_PATH = flags.DEFINE_string( + "output_model_path", + None, + ".tflite output path. Leave blank if same as input+_tflm_optimized.tflite", +) + + +def main(_) -> None: + output_model_path = _OUTPUT_MODEL_PATH.value or ( + _INPUT_MODEL_PATH.value.split(".tflite")[0] + "_tflm_optimized.tflite") + + logging.info("\n--Running TFLM optimizations on: %s", + _INPUT_MODEL_PATH.value) + tflm_model_transforms_lib.run_all_transformations( + _INPUT_MODEL_PATH.value, + output_model_path, + _SAVE_INTERMEDIATE_MODELS.value, + _TEST_TRANSFORMED_MODELS.value, + ) + + +if __name__ == "__main__": + app.run(main) diff --git a/tensorflow/lite/micro/tools/tflm_model_transforms_lib.py b/tensorflow/lite/micro/tools/tflm_model_transforms_lib.py new file mode 100644 index 0000000..60530ff --- /dev/null +++ b/tensorflow/lite/micro/tools/tflm_model_transforms_lib.py @@ -0,0 +1,224 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""TFLM specific flatbuffer model transformations, to reduce model size. + +go/tflm-flatbuffer-reduction +We take advantage of the TFLM infrastructure to remove information in the +flatbuffer which we do not preciscely need for inference of a model. +The methods used here require the assumptions made from the TFLM framework to +properly work. +""" + +import os +import tempfile +from absl import logging +import numpy as np +from tensorflow.python.platform import gfile + +from tflite_micro.tensorflow.lite.tools import flatbuffer_utils +from tflite_micro.tensorflow.lite.micro.tools import model_transforms_utils +from tflite_micro.tensorflow.lite.micro.python.interpreter.src import runtime + + +def _save_and_align_flatbuffer(model, model_path): + flatbuffer_utils.write_model(model, model_path) + model_transforms_utils.tflite_flatbuffer_align(model_path, model_path) + + +def log_size_difference(input_path, transformed_model_path): + initial_binary_size = gfile.Stat(input_path).length + final_binary_size = gfile.Stat(transformed_model_path).length + logging.info("Initial file size: %d %s", initial_binary_size, "bytes.") + logging.info("Final file size: %d %s", final_binary_size, "bytes.") + logging.info("Savings = %d %s", initial_binary_size - final_binary_size, + "bytes.") + logging.info( + " (%.2f %s", + round((1 - (final_binary_size / initial_binary_size)) * 100, 2), + "% reduction )", + ) + + +def check_models_equivalent(initial_model_path: str = None, + secondary_model_path: str = None, + test_vector_count: int = 1, + seed: int = 42, + custom_op_registerers=[]): + """Checks that the two models are equivalent by testing that the same set of random inputs produce the same outputs using the TFLM interpreter. + + Note that this function does not test the correctness of the inference. It + only serves to confirm that the two models are equivalent. + The dimensions of the models inputs and outputs must be identical. + + Args: + initial_model_path: first model full path (str) + secondary_model_path: second model full path (str) + test_vector_count: number of different (random) input vectors to use to test + for equivalence. + seed: optionally provide a custom seed value for random number generator + custom_op_registerers: if your model makes use of custom ops + + Raises: + AssertionError if outputs of TFLM invocations are not equal + """ + with gfile.Open(initial_model_path, "rb") as input_model_file: + initial_model_interpreter = runtime.Interpreter.from_bytes( + input_model_file.read(), + custom_op_registerers=custom_op_registerers, + ) + + with gfile.Open(secondary_model_path, "rb") as secondary_model_file: + secondary_model_interpreter = runtime.Interpreter.from_bytes( + secondary_model_file.read(), + custom_op_registerers=custom_op_registerers, + ) + + initial_model_object = flatbuffer_utils.read_model(initial_model_path) + rng = np.random.default_rng(seed=seed) + + for _ in range(test_vector_count): + for idx, input_tensor_idx in enumerate( + initial_model_object.subgraphs[0].inputs): + input_tensor = initial_model_object.subgraphs[0].tensors[ + input_tensor_idx] + rand_data = model_transforms_utils.generate_random_input_data( + initial_model_object, input_tensor, rng) + initial_model_interpreter.set_input(rand_data, idx) + secondary_model_interpreter.set_input(rand_data, idx) + + initial_model_interpreter.invoke() + secondary_model_interpreter.invoke() + + for idx, _ in enumerate(initial_model_object.subgraphs[0].outputs): + np.testing.assert_array_equal( + initial_model_interpreter.get_output(idx), + secondary_model_interpreter.get_output(idx), + ) + + initial_model_interpreter.reset() + secondary_model_interpreter.reset() + + +def apply_transform_and_log( + transform_func, + model, + log_string, + save_model, + output_dir, + filepath, +): + """Calls transform_func(model) and logs transformed model to output_dir/filepath. + + Args: + transform_func: the transformation function to apply + model: tflite flatbuffer model + log_string: information string about the transformation + save_model: boolean whether to save the model + output_dir: directory to write the model to + filepath: name to save the model as + + Returns: + transformed model object + """ + logging.info("Applying transform: %s", log_string) + transform_func(model) + if not save_model: + return model + output_path = os.path.join(output_dir, filepath) + _save_and_align_flatbuffer(model, output_path) + logging.info("Output of this transform located at: %s", output_path) + return model + + +def run_all_transformations( + input_path, + transformed_model_path, + save_intermediates=False, + test_transformed_model=True, + custom_save_dir=None, + custom_op_registerers=[], +): + """Apply all current transform methods on an input .tflite file, and optionally save the models between methods. + + Args: + input_path: the input .tflite model path + transformed_model_path: output model path if not saving intermediates. + save_intermediates: whether to save intermediate models to a tmp folder + test_transformed_model: optional flag to enable/disable testing of + input/transformed models on random data + custom_save_dir: optionally pass the directory path for saving files + custom_op_registerers: if your model makes use of custom ops + + Raises: + AssertionError if outputs of TFLM invocations on input and transformed + models are not equal + """ + output_dir = None + # We only use output_dir for the case of saving intermediate models + if save_intermediates: + output_dir = custom_save_dir or tempfile.mkdtemp() + logging.info("Saving models to: %s", output_dir) + + model = flatbuffer_utils.read_model(input_path) + pre_transform_model_path = input_path + + transforms_list = [ + model_transforms_utils.clear_resource_variable_buffers, + model_transforms_utils.remove_extraneous_quantization_data, + flatbuffer_utils.strip_strings, + model_transforms_utils.shorten_variable_shared_names, + ] + transform_names = [ + "Clear Resource Variable Buffers", + "Remove Extra Quantization Data", + "Strip Strings", + "Shorten Variable Shared Names", + ] + intermediate_file_names = [ + "resource_buffer_cleared.tflite", + "quant_data_removed.tflite", + "string_stripped.tflite", + "variable_shared_names_shortened.tflite", + ] + + for transform, name, file_name in zip(transforms_list, transform_names, + intermediate_file_names): + model = apply_transform_and_log(transform, model, name, save_intermediates, + output_dir, file_name) + + # Testing will only work if the file has been saved to output path. + # The "final" stage of a transformation is after it has been flatbuffer + # aligned, hence this function only works on file paths, instead of objects. + if test_transformed_model and save_intermediates: + output_path = os.path.join(output_dir, file_name) + check_models_equivalent( + initial_model_path=pre_transform_model_path, + secondary_model_path=output_path, + custom_op_registerers=custom_op_registerers, + ) + pre_transform_model_path = output_path + + gfile.MakeDirs(os.path.dirname(transformed_model_path)) + _save_and_align_flatbuffer(model, transformed_model_path) + logging.info("Transformed model located at: %s", transformed_model_path) + + if test_transformed_model: + check_models_equivalent( + initial_model_path=input_path, + secondary_model_path=transformed_model_path, + custom_op_registerers=custom_op_registerers, + ) + + log_size_difference(input_path, transformed_model_path) diff --git a/tensorflow/lite/micro/tools/tflm_model_transforms_test.py b/tensorflow/lite/micro/tools/tflm_model_transforms_test.py new file mode 100644 index 0000000..83f805e --- /dev/null +++ b/tensorflow/lite/micro/tools/tflm_model_transforms_test.py @@ -0,0 +1,84 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Testing for the tflm_model_transforms functions. + +Applies all transforms on various models, and uses +check_models_equivalent() to assert results. +""" +import os + +from absl.testing import parameterized +from tensorflow.python.platform import resource_loader +from tensorflow.python.framework import test_util +from tensorflow.python.platform import test + +from tflite_micro.tensorflow.lite.micro.tools import tflm_model_transforms_lib +from tflite_micro.tensorflow.lite.micro.examples.recipes import resource_variables_lib +from tflite_micro.tensorflow.lite.tools import flatbuffer_utils + + +class TflmModelTransformsTest(test_util.TensorFlowTestCase, + parameterized.TestCase): + + @parameterized.named_parameters( + ("person_detect", "person_detect.tflite"), + ("keyword_scrambled", "keyword_scrambled.tflite"), + ) + def test_model_transforms(self, input_file_name): + test_tmpdir = self.get_temp_dir() + prefix_path = resource_loader.get_path_to_datafile("../models") + input_file_name = os.path.join(prefix_path, input_file_name) + transformed_model_path = test_tmpdir + "/transformed.tflite" + + tflm_model_transforms_lib.run_all_transformations( + input_path=input_file_name, + transformed_model_path=transformed_model_path, + save_intermediates=True, + test_transformed_model=True, + custom_save_dir=test_tmpdir) + + tflm_model_transforms_lib.check_models_equivalent( + initial_model_path=input_file_name, + secondary_model_path=transformed_model_path, + test_vector_count=5, + ) + + # TODO(b/274635545): refactor functions to take in flatbuffer objects instead + # of writing to files here + def test_resource_model(self): + test_tmpdir = self.get_temp_dir() + resource_model = resource_variables_lib.get_model_from_keras() + input_file_name = test_tmpdir + "/resource.tflite" + flatbuffer_utils.write_model( + flatbuffer_utils.convert_bytearray_to_object(resource_model), + input_file_name) + transformed_model_path = test_tmpdir + "/transformed.tflite" + + tflm_model_transforms_lib.run_all_transformations( + input_path=input_file_name, + transformed_model_path=transformed_model_path, + save_intermediates=True, + test_transformed_model=True, + custom_save_dir=test_tmpdir) + + tflm_model_transforms_lib.check_models_equivalent( + initial_model_path=input_file_name, + secondary_model_path=transformed_model_path, + test_vector_count=5, + ) + + +if __name__ == "__main__": + test.main() diff --git a/tensorflow/lite/portable_type_to_tflitetype.h b/tensorflow/lite/portable_type_to_tflitetype.h new file mode 100644 index 0000000..b600585 --- /dev/null +++ b/tensorflow/lite/portable_type_to_tflitetype.h @@ -0,0 +1,75 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_PORTABLE_TYPE_TO_TFLITETYPE_H_ +#define TENSORFLOW_LITE_PORTABLE_TYPE_TO_TFLITETYPE_H_ + +// Most of the definitions have been moved to this subheader so that Micro +// can include it without relying on and , which isn't +// available on all platforms. + +// Arduino build defines abs as a macro here. That is invalid C++, and breaks +// libc++'s header, undefine it. +#ifdef abs +#undef abs +#endif + +#include + +#include "tensorflow/lite/core/c/common.h" + +namespace tflite { + +// Map statically from a C++ type to a TfLiteType. Used in interpreter for +// safe casts. +// Example: +// typeToTfLiteType() -> kTfLiteBool +template +constexpr TfLiteType typeToTfLiteType() { + return kTfLiteNoType; +} +// Map from TfLiteType to the corresponding C++ type. +// Example: +// TfLiteTypeToType::Type -> bool +template +struct TfLiteTypeToType {}; // Specializations below + +// Template specialization for both typeToTfLiteType and TfLiteTypeToType. +#define MATCH_TYPE_AND_TFLITE_TYPE(CPP_TYPE, TFLITE_TYPE_ENUM) \ + template <> \ + constexpr TfLiteType typeToTfLiteType() { \ + return TFLITE_TYPE_ENUM; \ + } \ + template <> \ + struct TfLiteTypeToType { \ + using Type = CPP_TYPE; \ + } + +// No string mapping is included here, since the TF Lite packed representation +// doesn't correspond to a C++ type well. +MATCH_TYPE_AND_TFLITE_TYPE(int32_t, kTfLiteInt32); +MATCH_TYPE_AND_TFLITE_TYPE(uint32_t, kTfLiteUInt32); +MATCH_TYPE_AND_TFLITE_TYPE(int16_t, kTfLiteInt16); +MATCH_TYPE_AND_TFLITE_TYPE(uint16_t, kTfLiteUInt16); +MATCH_TYPE_AND_TFLITE_TYPE(int64_t, kTfLiteInt64); +MATCH_TYPE_AND_TFLITE_TYPE(float, kTfLiteFloat32); +MATCH_TYPE_AND_TFLITE_TYPE(unsigned char, kTfLiteUInt8); +MATCH_TYPE_AND_TFLITE_TYPE(int8_t, kTfLiteInt8); +MATCH_TYPE_AND_TFLITE_TYPE(bool, kTfLiteBool); +MATCH_TYPE_AND_TFLITE_TYPE(TfLiteFloat16, kTfLiteFloat16); +MATCH_TYPE_AND_TFLITE_TYPE(double, kTfLiteFloat64); +MATCH_TYPE_AND_TFLITE_TYPE(uint64_t, kTfLiteUInt64); + +} // namespace tflite +#endif // TENSORFLOW_LITE_PORTABLE_TYPE_TO_TFLITETYPE_H_ diff --git a/tensorflow/lite/python/BUILD b/tensorflow/lite/python/BUILD new file mode 100644 index 0000000..3dc7232 --- /dev/null +++ b/tensorflow/lite/python/BUILD @@ -0,0 +1,25 @@ +load("@flatbuffers//:build_defs.bzl", "flatbuffer_py_library") +load("@tflm_pip_deps//:requirements.bzl", "requirement") + +package( + default_visibility = [ + "//visibility:public", + ], + licenses = ["notice"], +) + +flatbuffer_py_library( + name = "schema_py", + srcs = ["//tensorflow/lite/schema:schema.fbs"], +) + +py_library( + name = "schema_util", + srcs = ["schema_util.py"], + srcs_version = "PY3", + visibility = ["//:__subpackages__"], + deps = [ + requirement("flatbuffers"), + requirement("tensorflow-cpu"), + ], +) diff --git a/tensorflow/lite/python/schema_py_generated.py b/tensorflow/lite/python/schema_py_generated.py new file mode 100755 index 0000000..2fa90a6 --- /dev/null +++ b/tensorflow/lite/python/schema_py_generated.py @@ -0,0 +1,14141 @@ +import flatbuffers + +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class ATan2Options(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ATan2Options() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsATan2Options(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def ATan2OptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # ATan2Options + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def ATan2OptionsStart(builder): builder.StartObject(0) +def Start(builder): + return ATan2OptionsStart(builder) +def ATan2OptionsEnd(builder): return builder.EndObject() +def End(builder): + return ATan2OptionsEnd(builder) + +class ATan2OptionsT(object): + + # ATan2OptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + atan2options = ATan2Options() + atan2options.Init(buf, pos) + return cls.InitFromObj(atan2options) + + @classmethod + def InitFromObj(cls, atan2options): + x = ATan2OptionsT() + x._UnPack(atan2options) + return x + + # ATan2OptionsT + def _UnPack(self, atan2options): + if atan2options is None: + return + + # ATan2OptionsT + def Pack(self, builder): + ATan2OptionsStart(builder) + atan2options = ATan2OptionsEnd(builder) + return atan2options +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class AbsOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = AbsOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsAbsOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def AbsOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # AbsOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def AbsOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return AbsOptionsStart(builder) +def AbsOptionsEnd(builder): return builder.EndObject() +def End(builder): + return AbsOptionsEnd(builder) + +class AbsOptionsT(object): + + # AbsOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + absOptions = AbsOptions() + absOptions.Init(buf, pos) + return cls.InitFromObj(absOptions) + + @classmethod + def InitFromObj(cls, absOptions): + x = AbsOptionsT() + x._UnPack(absOptions) + return x + + # AbsOptionsT + def _UnPack(self, absOptions): + if absOptions is None: + return + + # AbsOptionsT + def Pack(self, builder): + AbsOptionsStart(builder) + absOptions = AbsOptionsEnd(builder) + return absOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +class ActivationFunctionType(object): + NONE = 0 + RELU = 1 + RELU_N1_TO_1 = 2 + RELU6 = 3 + TANH = 4 + SIGN_BIT = 5 +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class AddNOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = AddNOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsAddNOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def AddNOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # AddNOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def AddNOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return AddNOptionsStart(builder) +def AddNOptionsEnd(builder): return builder.EndObject() +def End(builder): + return AddNOptionsEnd(builder) + +class AddNOptionsT(object): + + # AddNOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + addNoptions = AddNOptions() + addNoptions.Init(buf, pos) + return cls.InitFromObj(addNoptions) + + @classmethod + def InitFromObj(cls, addNoptions): + x = AddNOptionsT() + x._UnPack(addNoptions) + return x + + # AddNOptionsT + def _UnPack(self, addNoptions): + if addNoptions is None: + return + + # AddNOptionsT + def Pack(self, builder): + AddNOptionsStart(builder) + addNoptions = AddNOptionsEnd(builder) + return addNoptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class AddOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = AddOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsAddOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def AddOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # AddOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # AddOptions + def FusedActivationFunction(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # AddOptions + def PotScaleInt16(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return True + +def AddOptionsStart(builder): builder.StartObject(2) +def Start(builder): + return AddOptionsStart(builder) +def AddOptionsAddFusedActivationFunction(builder, fusedActivationFunction): builder.PrependInt8Slot(0, fusedActivationFunction, 0) +def AddFusedActivationFunction(builder, fusedActivationFunction): + return AddOptionsAddFusedActivationFunction(builder, fusedActivationFunction) +def AddOptionsAddPotScaleInt16(builder, potScaleInt16): builder.PrependBoolSlot(1, potScaleInt16, 1) +def AddPotScaleInt16(builder, potScaleInt16): + return AddOptionsAddPotScaleInt16(builder, potScaleInt16) +def AddOptionsEnd(builder): return builder.EndObject() +def End(builder): + return AddOptionsEnd(builder) + +class AddOptionsT(object): + + # AddOptionsT + def __init__(self): + self.fusedActivationFunction = 0 # type: int + self.potScaleInt16 = True # type: bool + + @classmethod + def InitFromBuf(cls, buf, pos): + addOptions = AddOptions() + addOptions.Init(buf, pos) + return cls.InitFromObj(addOptions) + + @classmethod + def InitFromObj(cls, addOptions): + x = AddOptionsT() + x._UnPack(addOptions) + return x + + # AddOptionsT + def _UnPack(self, addOptions): + if addOptions is None: + return + self.fusedActivationFunction = addOptions.FusedActivationFunction() + self.potScaleInt16 = addOptions.PotScaleInt16() + + # AddOptionsT + def Pack(self, builder): + AddOptionsStart(builder) + AddOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) + AddOptionsAddPotScaleInt16(builder, self.potScaleInt16) + addOptions = AddOptionsEnd(builder) + return addOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class ArgMaxOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ArgMaxOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsArgMaxOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def ArgMaxOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # ArgMaxOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ArgMaxOptions + def OutputType(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + +def ArgMaxOptionsStart(builder): builder.StartObject(1) +def Start(builder): + return ArgMaxOptionsStart(builder) +def ArgMaxOptionsAddOutputType(builder, outputType): builder.PrependInt8Slot(0, outputType, 0) +def AddOutputType(builder, outputType): + return ArgMaxOptionsAddOutputType(builder, outputType) +def ArgMaxOptionsEnd(builder): return builder.EndObject() +def End(builder): + return ArgMaxOptionsEnd(builder) + +class ArgMaxOptionsT(object): + + # ArgMaxOptionsT + def __init__(self): + self.outputType = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + argMaxOptions = ArgMaxOptions() + argMaxOptions.Init(buf, pos) + return cls.InitFromObj(argMaxOptions) + + @classmethod + def InitFromObj(cls, argMaxOptions): + x = ArgMaxOptionsT() + x._UnPack(argMaxOptions) + return x + + # ArgMaxOptionsT + def _UnPack(self, argMaxOptions): + if argMaxOptions is None: + return + self.outputType = argMaxOptions.OutputType() + + # ArgMaxOptionsT + def Pack(self, builder): + ArgMaxOptionsStart(builder) + ArgMaxOptionsAddOutputType(builder, self.outputType) + argMaxOptions = ArgMaxOptionsEnd(builder) + return argMaxOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class ArgMinOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ArgMinOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsArgMinOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def ArgMinOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # ArgMinOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ArgMinOptions + def OutputType(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + +def ArgMinOptionsStart(builder): builder.StartObject(1) +def Start(builder): + return ArgMinOptionsStart(builder) +def ArgMinOptionsAddOutputType(builder, outputType): builder.PrependInt8Slot(0, outputType, 0) +def AddOutputType(builder, outputType): + return ArgMinOptionsAddOutputType(builder, outputType) +def ArgMinOptionsEnd(builder): return builder.EndObject() +def End(builder): + return ArgMinOptionsEnd(builder) + +class ArgMinOptionsT(object): + + # ArgMinOptionsT + def __init__(self): + self.outputType = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + argMinOptions = ArgMinOptions() + argMinOptions.Init(buf, pos) + return cls.InitFromObj(argMinOptions) + + @classmethod + def InitFromObj(cls, argMinOptions): + x = ArgMinOptionsT() + x._UnPack(argMinOptions) + return x + + # ArgMinOptionsT + def _UnPack(self, argMinOptions): + if argMinOptions is None: + return + self.outputType = argMinOptions.OutputType() + + # ArgMinOptionsT + def Pack(self, builder): + ArgMinOptionsStart(builder) + ArgMinOptionsAddOutputType(builder, self.outputType) + argMinOptions = ArgMinOptionsEnd(builder) + return argMinOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class AssignVariableOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = AssignVariableOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsAssignVariableOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def AssignVariableOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # AssignVariableOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def AssignVariableOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return AssignVariableOptionsStart(builder) +def AssignVariableOptionsEnd(builder): return builder.EndObject() +def End(builder): + return AssignVariableOptionsEnd(builder) + +class AssignVariableOptionsT(object): + + # AssignVariableOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + assignVariableOptions = AssignVariableOptions() + assignVariableOptions.Init(buf, pos) + return cls.InitFromObj(assignVariableOptions) + + @classmethod + def InitFromObj(cls, assignVariableOptions): + x = AssignVariableOptionsT() + x._UnPack(assignVariableOptions) + return x + + # AssignVariableOptionsT + def _UnPack(self, assignVariableOptions): + if assignVariableOptions is None: + return + + # AssignVariableOptionsT + def Pack(self, builder): + AssignVariableOptionsStart(builder) + assignVariableOptions = AssignVariableOptionsEnd(builder) + return assignVariableOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class BatchMatMulOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = BatchMatMulOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsBatchMatMulOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def BatchMatMulOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # BatchMatMulOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # BatchMatMulOptions + def AdjX(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + + # BatchMatMulOptions + def AdjY(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + + # BatchMatMulOptions + def AsymmetricQuantizeInputs(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + +def BatchMatMulOptionsStart(builder): builder.StartObject(3) +def Start(builder): + return BatchMatMulOptionsStart(builder) +def BatchMatMulOptionsAddAdjX(builder, adjX): builder.PrependBoolSlot(0, adjX, 0) +def AddAdjX(builder, adjX): + return BatchMatMulOptionsAddAdjX(builder, adjX) +def BatchMatMulOptionsAddAdjY(builder, adjY): builder.PrependBoolSlot(1, adjY, 0) +def AddAdjY(builder, adjY): + return BatchMatMulOptionsAddAdjY(builder, adjY) +def BatchMatMulOptionsAddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs): builder.PrependBoolSlot(2, asymmetricQuantizeInputs, 0) +def AddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs): + return BatchMatMulOptionsAddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs) +def BatchMatMulOptionsEnd(builder): return builder.EndObject() +def End(builder): + return BatchMatMulOptionsEnd(builder) + +class BatchMatMulOptionsT(object): + + # BatchMatMulOptionsT + def __init__(self): + self.adjX = False # type: bool + self.adjY = False # type: bool + self.asymmetricQuantizeInputs = False # type: bool + + @classmethod + def InitFromBuf(cls, buf, pos): + batchMatMulOptions = BatchMatMulOptions() + batchMatMulOptions.Init(buf, pos) + return cls.InitFromObj(batchMatMulOptions) + + @classmethod + def InitFromObj(cls, batchMatMulOptions): + x = BatchMatMulOptionsT() + x._UnPack(batchMatMulOptions) + return x + + # BatchMatMulOptionsT + def _UnPack(self, batchMatMulOptions): + if batchMatMulOptions is None: + return + self.adjX = batchMatMulOptions.AdjX() + self.adjY = batchMatMulOptions.AdjY() + self.asymmetricQuantizeInputs = batchMatMulOptions.AsymmetricQuantizeInputs() + + # BatchMatMulOptionsT + def Pack(self, builder): + BatchMatMulOptionsStart(builder) + BatchMatMulOptionsAddAdjX(builder, self.adjX) + BatchMatMulOptionsAddAdjY(builder, self.adjY) + BatchMatMulOptionsAddAsymmetricQuantizeInputs(builder, self.asymmetricQuantizeInputs) + batchMatMulOptions = BatchMatMulOptionsEnd(builder) + return batchMatMulOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class BatchToSpaceNDOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = BatchToSpaceNDOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsBatchToSpaceNDOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def BatchToSpaceNDOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # BatchToSpaceNDOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def BatchToSpaceNDOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return BatchToSpaceNDOptionsStart(builder) +def BatchToSpaceNDOptionsEnd(builder): return builder.EndObject() +def End(builder): + return BatchToSpaceNDOptionsEnd(builder) + +class BatchToSpaceNDOptionsT(object): + + # BatchToSpaceNDOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + batchToSpaceNdoptions = BatchToSpaceNDOptions() + batchToSpaceNdoptions.Init(buf, pos) + return cls.InitFromObj(batchToSpaceNdoptions) + + @classmethod + def InitFromObj(cls, batchToSpaceNdoptions): + x = BatchToSpaceNDOptionsT() + x._UnPack(batchToSpaceNdoptions) + return x + + # BatchToSpaceNDOptionsT + def _UnPack(self, batchToSpaceNdoptions): + if batchToSpaceNdoptions is None: + return + + # BatchToSpaceNDOptionsT + def Pack(self, builder): + BatchToSpaceNDOptionsStart(builder) + batchToSpaceNdoptions = BatchToSpaceNDOptionsEnd(builder) + return batchToSpaceNdoptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class BidirectionalSequenceLSTMOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = BidirectionalSequenceLSTMOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsBidirectionalSequenceLSTMOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def BidirectionalSequenceLSTMOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # BidirectionalSequenceLSTMOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # BidirectionalSequenceLSTMOptions + def FusedActivationFunction(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # BidirectionalSequenceLSTMOptions + def CellClip(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) + return 0.0 + + # BidirectionalSequenceLSTMOptions + def ProjClip(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) + return 0.0 + + # BidirectionalSequenceLSTMOptions + def MergeOutputs(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + + # BidirectionalSequenceLSTMOptions + def TimeMajor(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return True + + # BidirectionalSequenceLSTMOptions + def AsymmetricQuantizeInputs(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + +def BidirectionalSequenceLSTMOptionsStart(builder): builder.StartObject(6) +def Start(builder): + return BidirectionalSequenceLSTMOptionsStart(builder) +def BidirectionalSequenceLSTMOptionsAddFusedActivationFunction(builder, fusedActivationFunction): builder.PrependInt8Slot(0, fusedActivationFunction, 0) +def AddFusedActivationFunction(builder, fusedActivationFunction): + return BidirectionalSequenceLSTMOptionsAddFusedActivationFunction(builder, fusedActivationFunction) +def BidirectionalSequenceLSTMOptionsAddCellClip(builder, cellClip): builder.PrependFloat32Slot(1, cellClip, 0.0) +def AddCellClip(builder, cellClip): + return BidirectionalSequenceLSTMOptionsAddCellClip(builder, cellClip) +def BidirectionalSequenceLSTMOptionsAddProjClip(builder, projClip): builder.PrependFloat32Slot(2, projClip, 0.0) +def AddProjClip(builder, projClip): + return BidirectionalSequenceLSTMOptionsAddProjClip(builder, projClip) +def BidirectionalSequenceLSTMOptionsAddMergeOutputs(builder, mergeOutputs): builder.PrependBoolSlot(3, mergeOutputs, 0) +def AddMergeOutputs(builder, mergeOutputs): + return BidirectionalSequenceLSTMOptionsAddMergeOutputs(builder, mergeOutputs) +def BidirectionalSequenceLSTMOptionsAddTimeMajor(builder, timeMajor): builder.PrependBoolSlot(4, timeMajor, 1) +def AddTimeMajor(builder, timeMajor): + return BidirectionalSequenceLSTMOptionsAddTimeMajor(builder, timeMajor) +def BidirectionalSequenceLSTMOptionsAddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs): builder.PrependBoolSlot(5, asymmetricQuantizeInputs, 0) +def AddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs): + return BidirectionalSequenceLSTMOptionsAddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs) +def BidirectionalSequenceLSTMOptionsEnd(builder): return builder.EndObject() +def End(builder): + return BidirectionalSequenceLSTMOptionsEnd(builder) + +class BidirectionalSequenceLSTMOptionsT(object): + + # BidirectionalSequenceLSTMOptionsT + def __init__(self): + self.fusedActivationFunction = 0 # type: int + self.cellClip = 0.0 # type: float + self.projClip = 0.0 # type: float + self.mergeOutputs = False # type: bool + self.timeMajor = True # type: bool + self.asymmetricQuantizeInputs = False # type: bool + + @classmethod + def InitFromBuf(cls, buf, pos): + bidirectionalSequenceLstmoptions = BidirectionalSequenceLSTMOptions() + bidirectionalSequenceLstmoptions.Init(buf, pos) + return cls.InitFromObj(bidirectionalSequenceLstmoptions) + + @classmethod + def InitFromObj(cls, bidirectionalSequenceLstmoptions): + x = BidirectionalSequenceLSTMOptionsT() + x._UnPack(bidirectionalSequenceLstmoptions) + return x + + # BidirectionalSequenceLSTMOptionsT + def _UnPack(self, bidirectionalSequenceLstmoptions): + if bidirectionalSequenceLstmoptions is None: + return + self.fusedActivationFunction = bidirectionalSequenceLstmoptions.FusedActivationFunction() + self.cellClip = bidirectionalSequenceLstmoptions.CellClip() + self.projClip = bidirectionalSequenceLstmoptions.ProjClip() + self.mergeOutputs = bidirectionalSequenceLstmoptions.MergeOutputs() + self.timeMajor = bidirectionalSequenceLstmoptions.TimeMajor() + self.asymmetricQuantizeInputs = bidirectionalSequenceLstmoptions.AsymmetricQuantizeInputs() + + # BidirectionalSequenceLSTMOptionsT + def Pack(self, builder): + BidirectionalSequenceLSTMOptionsStart(builder) + BidirectionalSequenceLSTMOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) + BidirectionalSequenceLSTMOptionsAddCellClip(builder, self.cellClip) + BidirectionalSequenceLSTMOptionsAddProjClip(builder, self.projClip) + BidirectionalSequenceLSTMOptionsAddMergeOutputs(builder, self.mergeOutputs) + BidirectionalSequenceLSTMOptionsAddTimeMajor(builder, self.timeMajor) + BidirectionalSequenceLSTMOptionsAddAsymmetricQuantizeInputs(builder, self.asymmetricQuantizeInputs) + bidirectionalSequenceLstmoptions = BidirectionalSequenceLSTMOptionsEnd(builder) + return bidirectionalSequenceLstmoptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class BidirectionalSequenceRNNOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = BidirectionalSequenceRNNOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsBidirectionalSequenceRNNOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def BidirectionalSequenceRNNOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # BidirectionalSequenceRNNOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # BidirectionalSequenceRNNOptions + def TimeMajor(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + + # BidirectionalSequenceRNNOptions + def FusedActivationFunction(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # BidirectionalSequenceRNNOptions + def MergeOutputs(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + + # BidirectionalSequenceRNNOptions + def AsymmetricQuantizeInputs(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + +def BidirectionalSequenceRNNOptionsStart(builder): builder.StartObject(4) +def Start(builder): + return BidirectionalSequenceRNNOptionsStart(builder) +def BidirectionalSequenceRNNOptionsAddTimeMajor(builder, timeMajor): builder.PrependBoolSlot(0, timeMajor, 0) +def AddTimeMajor(builder, timeMajor): + return BidirectionalSequenceRNNOptionsAddTimeMajor(builder, timeMajor) +def BidirectionalSequenceRNNOptionsAddFusedActivationFunction(builder, fusedActivationFunction): builder.PrependInt8Slot(1, fusedActivationFunction, 0) +def AddFusedActivationFunction(builder, fusedActivationFunction): + return BidirectionalSequenceRNNOptionsAddFusedActivationFunction(builder, fusedActivationFunction) +def BidirectionalSequenceRNNOptionsAddMergeOutputs(builder, mergeOutputs): builder.PrependBoolSlot(2, mergeOutputs, 0) +def AddMergeOutputs(builder, mergeOutputs): + return BidirectionalSequenceRNNOptionsAddMergeOutputs(builder, mergeOutputs) +def BidirectionalSequenceRNNOptionsAddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs): builder.PrependBoolSlot(3, asymmetricQuantizeInputs, 0) +def AddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs): + return BidirectionalSequenceRNNOptionsAddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs) +def BidirectionalSequenceRNNOptionsEnd(builder): return builder.EndObject() +def End(builder): + return BidirectionalSequenceRNNOptionsEnd(builder) + +class BidirectionalSequenceRNNOptionsT(object): + + # BidirectionalSequenceRNNOptionsT + def __init__(self): + self.timeMajor = False # type: bool + self.fusedActivationFunction = 0 # type: int + self.mergeOutputs = False # type: bool + self.asymmetricQuantizeInputs = False # type: bool + + @classmethod + def InitFromBuf(cls, buf, pos): + bidirectionalSequenceRnnoptions = BidirectionalSequenceRNNOptions() + bidirectionalSequenceRnnoptions.Init(buf, pos) + return cls.InitFromObj(bidirectionalSequenceRnnoptions) + + @classmethod + def InitFromObj(cls, bidirectionalSequenceRnnoptions): + x = BidirectionalSequenceRNNOptionsT() + x._UnPack(bidirectionalSequenceRnnoptions) + return x + + # BidirectionalSequenceRNNOptionsT + def _UnPack(self, bidirectionalSequenceRnnoptions): + if bidirectionalSequenceRnnoptions is None: + return + self.timeMajor = bidirectionalSequenceRnnoptions.TimeMajor() + self.fusedActivationFunction = bidirectionalSequenceRnnoptions.FusedActivationFunction() + self.mergeOutputs = bidirectionalSequenceRnnoptions.MergeOutputs() + self.asymmetricQuantizeInputs = bidirectionalSequenceRnnoptions.AsymmetricQuantizeInputs() + + # BidirectionalSequenceRNNOptionsT + def Pack(self, builder): + BidirectionalSequenceRNNOptionsStart(builder) + BidirectionalSequenceRNNOptionsAddTimeMajor(builder, self.timeMajor) + BidirectionalSequenceRNNOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) + BidirectionalSequenceRNNOptionsAddMergeOutputs(builder, self.mergeOutputs) + BidirectionalSequenceRNNOptionsAddAsymmetricQuantizeInputs(builder, self.asymmetricQuantizeInputs) + bidirectionalSequenceRnnoptions = BidirectionalSequenceRNNOptionsEnd(builder) + return bidirectionalSequenceRnnoptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class BitcastOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = BitcastOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsBitcastOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def BitcastOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # BitcastOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def BitcastOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return BitcastOptionsStart(builder) +def BitcastOptionsEnd(builder): return builder.EndObject() +def End(builder): + return BitcastOptionsEnd(builder) + +class BitcastOptionsT(object): + + # BitcastOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + bitcastOptions = BitcastOptions() + bitcastOptions.Init(buf, pos) + return cls.InitFromObj(bitcastOptions) + + @classmethod + def InitFromObj(cls, bitcastOptions): + x = BitcastOptionsT() + x._UnPack(bitcastOptions) + return x + + # BitcastOptionsT + def _UnPack(self, bitcastOptions): + if bitcastOptions is None: + return + + # BitcastOptionsT + def Pack(self, builder): + BitcastOptionsStart(builder) + bitcastOptions = BitcastOptionsEnd(builder) + return bitcastOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class BitwiseXorOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = BitwiseXorOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsBitwiseXorOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def BitwiseXorOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # BitwiseXorOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def BitwiseXorOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return BitwiseXorOptionsStart(builder) +def BitwiseXorOptionsEnd(builder): return builder.EndObject() +def End(builder): + return BitwiseXorOptionsEnd(builder) + +class BitwiseXorOptionsT(object): + + # BitwiseXorOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + bitwiseXorOptions = BitwiseXorOptions() + bitwiseXorOptions.Init(buf, pos) + return cls.InitFromObj(bitwiseXorOptions) + + @classmethod + def InitFromObj(cls, bitwiseXorOptions): + x = BitwiseXorOptionsT() + x._UnPack(bitwiseXorOptions) + return x + + # BitwiseXorOptionsT + def _UnPack(self, bitwiseXorOptions): + if bitwiseXorOptions is None: + return + + # BitwiseXorOptionsT + def Pack(self, builder): + BitwiseXorOptionsStart(builder) + bitwiseXorOptions = BitwiseXorOptionsEnd(builder) + return bitwiseXorOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class BroadcastToOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = BroadcastToOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsBroadcastToOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def BroadcastToOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # BroadcastToOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def BroadcastToOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return BroadcastToOptionsStart(builder) +def BroadcastToOptionsEnd(builder): return builder.EndObject() +def End(builder): + return BroadcastToOptionsEnd(builder) + +class BroadcastToOptionsT(object): + + # BroadcastToOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + broadcastToOptions = BroadcastToOptions() + broadcastToOptions.Init(buf, pos) + return cls.InitFromObj(broadcastToOptions) + + @classmethod + def InitFromObj(cls, broadcastToOptions): + x = BroadcastToOptionsT() + x._UnPack(broadcastToOptions) + return x + + # BroadcastToOptionsT + def _UnPack(self, broadcastToOptions): + if broadcastToOptions is None: + return + + # BroadcastToOptionsT + def Pack(self, builder): + BroadcastToOptionsStart(builder) + broadcastToOptions = BroadcastToOptionsEnd(builder) + return broadcastToOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class BucketizeOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = BucketizeOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsBucketizeOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def BucketizeOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # BucketizeOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # BucketizeOptions + def Boundaries(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Float32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # BucketizeOptions + def BoundariesAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Float32Flags, o) + return 0 + + # BucketizeOptions + def BoundariesLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # BucketizeOptions + def BoundariesIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + +def BucketizeOptionsStart(builder): builder.StartObject(1) +def Start(builder): + return BucketizeOptionsStart(builder) +def BucketizeOptionsAddBoundaries(builder, boundaries): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(boundaries), 0) +def AddBoundaries(builder, boundaries): + return BucketizeOptionsAddBoundaries(builder, boundaries) +def BucketizeOptionsStartBoundariesVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def StartBoundariesVector(builder, numElems): + return BucketizeOptionsStartBoundariesVector(builder, numElems) +def BucketizeOptionsEnd(builder): return builder.EndObject() +def End(builder): + return BucketizeOptionsEnd(builder) +try: + from typing import List +except: + pass + +class BucketizeOptionsT(object): + + # BucketizeOptionsT + def __init__(self): + self.boundaries = None # type: List[float] + + @classmethod + def InitFromBuf(cls, buf, pos): + bucketizeOptions = BucketizeOptions() + bucketizeOptions.Init(buf, pos) + return cls.InitFromObj(bucketizeOptions) + + @classmethod + def InitFromObj(cls, bucketizeOptions): + x = BucketizeOptionsT() + x._UnPack(bucketizeOptions) + return x + + # BucketizeOptionsT + def _UnPack(self, bucketizeOptions): + if bucketizeOptions is None: + return + if not bucketizeOptions.BoundariesIsNone(): + if np is None: + self.boundaries = [] + for i in range(bucketizeOptions.BoundariesLength()): + self.boundaries.append(bucketizeOptions.Boundaries(i)) + else: + self.boundaries = bucketizeOptions.BoundariesAsNumpy() + + # BucketizeOptionsT + def Pack(self, builder): + if self.boundaries is not None: + if np is not None and type(self.boundaries) is np.ndarray: + boundaries = builder.CreateNumpyVector(self.boundaries) + else: + BucketizeOptionsStartBoundariesVector(builder, len(self.boundaries)) + for i in reversed(range(len(self.boundaries))): + builder.PrependFloat32(self.boundaries[i]) + boundaries = builder.EndVector() + BucketizeOptionsStart(builder) + if self.boundaries is not None: + BucketizeOptionsAddBoundaries(builder, boundaries) + bucketizeOptions = BucketizeOptionsEnd(builder) + return bucketizeOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class Buffer(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Buffer() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsBuffer(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def BufferBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # Buffer + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Buffer + def Data(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) + return 0 + + # Buffer + def DataAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) + return 0 + + # Buffer + def DataLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Buffer + def DataIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + + # Buffer + def Offset(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) + return 0 + + # Buffer + def Size(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) + return 0 + +def BufferStart(builder): builder.StartObject(3) +def Start(builder): + return BufferStart(builder) +def BufferAddData(builder, data): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(data), 0) +def AddData(builder, data): + return BufferAddData(builder, data) +def BufferStartDataVector(builder, numElems): return builder.StartVector(1, numElems, 1) +def StartDataVector(builder, numElems): + return BufferStartDataVector(builder, numElems) +def BufferAddOffset(builder, offset): builder.PrependUint64Slot(1, offset, 0) +def AddOffset(builder, offset): + return BufferAddOffset(builder, offset) +def BufferAddSize(builder, size): builder.PrependUint64Slot(2, size, 0) +def AddSize(builder, size): + return BufferAddSize(builder, size) +def BufferEnd(builder): return builder.EndObject() +def End(builder): + return BufferEnd(builder) +try: + from typing import List +except: + pass + +class BufferT(object): + + # BufferT + def __init__(self): + self.data = None # type: List[int] + self.offset = 0 # type: int + self.size = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + buffer = Buffer() + buffer.Init(buf, pos) + return cls.InitFromObj(buffer) + + @classmethod + def InitFromObj(cls, buffer): + x = BufferT() + x._UnPack(buffer) + return x + + # BufferT + def _UnPack(self, buffer): + if buffer is None: + return + if not buffer.DataIsNone(): + if np is None: + self.data = [] + for i in range(buffer.DataLength()): + self.data.append(buffer.Data(i)) + else: + self.data = buffer.DataAsNumpy() + self.offset = buffer.Offset() + self.size = buffer.Size() + + # BufferT + def Pack(self, builder): + if self.data is not None: + if np is not None and type(self.data) is np.ndarray: + data = builder.CreateNumpyVector(self.data) + else: + BufferStartDataVector(builder, len(self.data)) + for i in reversed(range(len(self.data))): + builder.PrependUint8(self.data[i]) + data = builder.EndVector() + BufferStart(builder) + if self.data is not None: + BufferAddData(builder, data) + BufferAddOffset(builder, self.offset) + BufferAddSize(builder, self.size) + buffer = BufferEnd(builder) + return buffer +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +class BuiltinOperator(object): + ADD = 0 + AVERAGE_POOL_2D = 1 + CONCATENATION = 2 + CONV_2D = 3 + DEPTHWISE_CONV_2D = 4 + DEPTH_TO_SPACE = 5 + DEQUANTIZE = 6 + EMBEDDING_LOOKUP = 7 + FLOOR = 8 + FULLY_CONNECTED = 9 + HASHTABLE_LOOKUP = 10 + L2_NORMALIZATION = 11 + L2_POOL_2D = 12 + LOCAL_RESPONSE_NORMALIZATION = 13 + LOGISTIC = 14 + LSH_PROJECTION = 15 + LSTM = 16 + MAX_POOL_2D = 17 + MUL = 18 + RELU = 19 + RELU_N1_TO_1 = 20 + RELU6 = 21 + RESHAPE = 22 + RESIZE_BILINEAR = 23 + RNN = 24 + SOFTMAX = 25 + SPACE_TO_DEPTH = 26 + SVDF = 27 + TANH = 28 + CONCAT_EMBEDDINGS = 29 + SKIP_GRAM = 30 + CALL = 31 + CUSTOM = 32 + EMBEDDING_LOOKUP_SPARSE = 33 + PAD = 34 + UNIDIRECTIONAL_SEQUENCE_RNN = 35 + GATHER = 36 + BATCH_TO_SPACE_ND = 37 + SPACE_TO_BATCH_ND = 38 + TRANSPOSE = 39 + MEAN = 40 + SUB = 41 + DIV = 42 + SQUEEZE = 43 + UNIDIRECTIONAL_SEQUENCE_LSTM = 44 + STRIDED_SLICE = 45 + BIDIRECTIONAL_SEQUENCE_RNN = 46 + EXP = 47 + TOPK_V2 = 48 + SPLIT = 49 + LOG_SOFTMAX = 50 + DELEGATE = 51 + BIDIRECTIONAL_SEQUENCE_LSTM = 52 + CAST = 53 + PRELU = 54 + MAXIMUM = 55 + ARG_MAX = 56 + MINIMUM = 57 + LESS = 58 + NEG = 59 + PADV2 = 60 + GREATER = 61 + GREATER_EQUAL = 62 + LESS_EQUAL = 63 + SELECT = 64 + SLICE = 65 + SIN = 66 + TRANSPOSE_CONV = 67 + SPARSE_TO_DENSE = 68 + TILE = 69 + EXPAND_DIMS = 70 + EQUAL = 71 + NOT_EQUAL = 72 + LOG = 73 + SUM = 74 + SQRT = 75 + RSQRT = 76 + SHAPE = 77 + POW = 78 + ARG_MIN = 79 + FAKE_QUANT = 80 + REDUCE_PROD = 81 + REDUCE_MAX = 82 + PACK = 83 + LOGICAL_OR = 84 + ONE_HOT = 85 + LOGICAL_AND = 86 + LOGICAL_NOT = 87 + UNPACK = 88 + REDUCE_MIN = 89 + FLOOR_DIV = 90 + REDUCE_ANY = 91 + SQUARE = 92 + ZEROS_LIKE = 93 + FILL = 94 + FLOOR_MOD = 95 + RANGE = 96 + RESIZE_NEAREST_NEIGHBOR = 97 + LEAKY_RELU = 98 + SQUARED_DIFFERENCE = 99 + MIRROR_PAD = 100 + ABS = 101 + SPLIT_V = 102 + UNIQUE = 103 + CEIL = 104 + REVERSE_V2 = 105 + ADD_N = 106 + GATHER_ND = 107 + COS = 108 + WHERE = 109 + RANK = 110 + ELU = 111 + REVERSE_SEQUENCE = 112 + MATRIX_DIAG = 113 + QUANTIZE = 114 + MATRIX_SET_DIAG = 115 + ROUND = 116 + HARD_SWISH = 117 + IF = 118 + WHILE = 119 + NON_MAX_SUPPRESSION_V4 = 120 + NON_MAX_SUPPRESSION_V5 = 121 + SCATTER_ND = 122 + SELECT_V2 = 123 + DENSIFY = 124 + SEGMENT_SUM = 125 + BATCH_MATMUL = 126 + PLACEHOLDER_FOR_GREATER_OP_CODES = 127 + CUMSUM = 128 + CALL_ONCE = 129 + BROADCAST_TO = 130 + RFFT2D = 131 + CONV_3D = 132 + IMAG = 133 + REAL = 134 + COMPLEX_ABS = 135 + HASHTABLE = 136 + HASHTABLE_FIND = 137 + HASHTABLE_IMPORT = 138 + HASHTABLE_SIZE = 139 + REDUCE_ALL = 140 + CONV_3D_TRANSPOSE = 141 + VAR_HANDLE = 142 + READ_VARIABLE = 143 + ASSIGN_VARIABLE = 144 + BROADCAST_ARGS = 145 + RANDOM_STANDARD_NORMAL = 146 + BUCKETIZE = 147 + RANDOM_UNIFORM = 148 + MULTINOMIAL = 149 + GELU = 150 + DYNAMIC_UPDATE_SLICE = 151 + RELU_0_TO_1 = 152 + UNSORTED_SEGMENT_PROD = 153 + UNSORTED_SEGMENT_MAX = 154 + UNSORTED_SEGMENT_SUM = 155 + ATAN2 = 156 + UNSORTED_SEGMENT_MIN = 157 + SIGN = 158 + BITCAST = 159 + BITWISE_XOR = 160 + RIGHT_SHIFT = 161 +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +class BuiltinOptions(object): + NONE = 0 + Conv2DOptions = 1 + DepthwiseConv2DOptions = 2 + ConcatEmbeddingsOptions = 3 + LSHProjectionOptions = 4 + Pool2DOptions = 5 + SVDFOptions = 6 + RNNOptions = 7 + FullyConnectedOptions = 8 + SoftmaxOptions = 9 + ConcatenationOptions = 10 + AddOptions = 11 + L2NormOptions = 12 + LocalResponseNormalizationOptions = 13 + LSTMOptions = 14 + ResizeBilinearOptions = 15 + CallOptions = 16 + ReshapeOptions = 17 + SkipGramOptions = 18 + SpaceToDepthOptions = 19 + EmbeddingLookupSparseOptions = 20 + MulOptions = 21 + PadOptions = 22 + GatherOptions = 23 + BatchToSpaceNDOptions = 24 + SpaceToBatchNDOptions = 25 + TransposeOptions = 26 + ReducerOptions = 27 + SubOptions = 28 + DivOptions = 29 + SqueezeOptions = 30 + SequenceRNNOptions = 31 + StridedSliceOptions = 32 + ExpOptions = 33 + TopKV2Options = 34 + SplitOptions = 35 + LogSoftmaxOptions = 36 + CastOptions = 37 + DequantizeOptions = 38 + MaximumMinimumOptions = 39 + ArgMaxOptions = 40 + LessOptions = 41 + NegOptions = 42 + PadV2Options = 43 + GreaterOptions = 44 + GreaterEqualOptions = 45 + LessEqualOptions = 46 + SelectOptions = 47 + SliceOptions = 48 + TransposeConvOptions = 49 + SparseToDenseOptions = 50 + TileOptions = 51 + ExpandDimsOptions = 52 + EqualOptions = 53 + NotEqualOptions = 54 + ShapeOptions = 55 + PowOptions = 56 + ArgMinOptions = 57 + FakeQuantOptions = 58 + PackOptions = 59 + LogicalOrOptions = 60 + OneHotOptions = 61 + LogicalAndOptions = 62 + LogicalNotOptions = 63 + UnpackOptions = 64 + FloorDivOptions = 65 + SquareOptions = 66 + ZerosLikeOptions = 67 + FillOptions = 68 + BidirectionalSequenceLSTMOptions = 69 + BidirectionalSequenceRNNOptions = 70 + UnidirectionalSequenceLSTMOptions = 71 + FloorModOptions = 72 + RangeOptions = 73 + ResizeNearestNeighborOptions = 74 + LeakyReluOptions = 75 + SquaredDifferenceOptions = 76 + MirrorPadOptions = 77 + AbsOptions = 78 + SplitVOptions = 79 + UniqueOptions = 80 + ReverseV2Options = 81 + AddNOptions = 82 + GatherNdOptions = 83 + CosOptions = 84 + WhereOptions = 85 + RankOptions = 86 + ReverseSequenceOptions = 87 + MatrixDiagOptions = 88 + QuantizeOptions = 89 + MatrixSetDiagOptions = 90 + HardSwishOptions = 91 + IfOptions = 92 + WhileOptions = 93 + DepthToSpaceOptions = 94 + NonMaxSuppressionV4Options = 95 + NonMaxSuppressionV5Options = 96 + ScatterNdOptions = 97 + SelectV2Options = 98 + DensifyOptions = 99 + SegmentSumOptions = 100 + BatchMatMulOptions = 101 + CumsumOptions = 102 + CallOnceOptions = 103 + BroadcastToOptions = 104 + Rfft2dOptions = 105 + Conv3DOptions = 106 + HashtableOptions = 107 + HashtableFindOptions = 108 + HashtableImportOptions = 109 + HashtableSizeOptions = 110 + VarHandleOptions = 111 + ReadVariableOptions = 112 + AssignVariableOptions = 113 + RandomOptions = 114 + BucketizeOptions = 115 + GeluOptions = 116 + DynamicUpdateSliceOptions = 117 + UnsortedSegmentProdOptions = 118 + UnsortedSegmentMaxOptions = 119 + UnsortedSegmentMinOptions = 120 + UnsortedSegmentSumOptions = 121 + ATan2Options = 122 + SignOptions = 123 + BitcastOptions = 124 + BitwiseXorOptions = 125 + RightShiftOptions = 126 + +def BuiltinOptionsCreator(unionType, table): + from flatbuffers.table import Table + if not isinstance(table, Table): + return None + if unionType == BuiltinOptions().Conv2DOptions: + return Conv2DOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().DepthwiseConv2DOptions: + return DepthwiseConv2DOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().ConcatEmbeddingsOptions: + return ConcatEmbeddingsOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().LSHProjectionOptions: + return LSHProjectionOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().Pool2DOptions: + return Pool2DOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().SVDFOptions: + return SVDFOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().RNNOptions: + return RNNOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().FullyConnectedOptions: + return FullyConnectedOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().SoftmaxOptions: + return SoftmaxOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().ConcatenationOptions: + return ConcatenationOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().AddOptions: + return AddOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().L2NormOptions: + return L2NormOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().LocalResponseNormalizationOptions: + return LocalResponseNormalizationOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().LSTMOptions: + return LSTMOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().ResizeBilinearOptions: + return ResizeBilinearOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().CallOptions: + return CallOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().ReshapeOptions: + return ReshapeOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().SkipGramOptions: + return SkipGramOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().SpaceToDepthOptions: + return SpaceToDepthOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().EmbeddingLookupSparseOptions: + return EmbeddingLookupSparseOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().MulOptions: + return MulOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().PadOptions: + return PadOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().GatherOptions: + return GatherOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().BatchToSpaceNDOptions: + return BatchToSpaceNDOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().SpaceToBatchNDOptions: + return SpaceToBatchNDOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().TransposeOptions: + return TransposeOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().ReducerOptions: + return ReducerOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().SubOptions: + return SubOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().DivOptions: + return DivOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().SqueezeOptions: + return SqueezeOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().SequenceRNNOptions: + return SequenceRNNOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().StridedSliceOptions: + return StridedSliceOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().ExpOptions: + return ExpOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().TopKV2Options: + return TopKV2OptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().SplitOptions: + return SplitOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().LogSoftmaxOptions: + return LogSoftmaxOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().CastOptions: + return CastOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().DequantizeOptions: + return DequantizeOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().MaximumMinimumOptions: + return MaximumMinimumOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().ArgMaxOptions: + return ArgMaxOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().LessOptions: + return LessOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().NegOptions: + return NegOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().PadV2Options: + return PadV2OptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().GreaterOptions: + return GreaterOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().GreaterEqualOptions: + return GreaterEqualOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().LessEqualOptions: + return LessEqualOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().SelectOptions: + return SelectOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().SliceOptions: + return SliceOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().TransposeConvOptions: + return TransposeConvOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().SparseToDenseOptions: + return SparseToDenseOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().TileOptions: + return TileOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().ExpandDimsOptions: + return ExpandDimsOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().EqualOptions: + return EqualOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().NotEqualOptions: + return NotEqualOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().ShapeOptions: + return ShapeOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().PowOptions: + return PowOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().ArgMinOptions: + return ArgMinOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().FakeQuantOptions: + return FakeQuantOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().PackOptions: + return PackOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().LogicalOrOptions: + return LogicalOrOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().OneHotOptions: + return OneHotOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().LogicalAndOptions: + return LogicalAndOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().LogicalNotOptions: + return LogicalNotOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().UnpackOptions: + return UnpackOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().FloorDivOptions: + return FloorDivOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().SquareOptions: + return SquareOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().ZerosLikeOptions: + return ZerosLikeOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().FillOptions: + return FillOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().BidirectionalSequenceLSTMOptions: + return BidirectionalSequenceLSTMOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().BidirectionalSequenceRNNOptions: + return BidirectionalSequenceRNNOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().UnidirectionalSequenceLSTMOptions: + return UnidirectionalSequenceLSTMOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().FloorModOptions: + return FloorModOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().RangeOptions: + return RangeOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().ResizeNearestNeighborOptions: + return ResizeNearestNeighborOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().LeakyReluOptions: + return LeakyReluOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().SquaredDifferenceOptions: + return SquaredDifferenceOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().MirrorPadOptions: + return MirrorPadOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().AbsOptions: + return AbsOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().SplitVOptions: + return SplitVOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().UniqueOptions: + return UniqueOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().ReverseV2Options: + return ReverseV2OptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().AddNOptions: + return AddNOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().GatherNdOptions: + return GatherNdOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().CosOptions: + return CosOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().WhereOptions: + return WhereOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().RankOptions: + return RankOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().ReverseSequenceOptions: + return ReverseSequenceOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().MatrixDiagOptions: + return MatrixDiagOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().QuantizeOptions: + return QuantizeOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().MatrixSetDiagOptions: + return MatrixSetDiagOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().HardSwishOptions: + return HardSwishOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().IfOptions: + return IfOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().WhileOptions: + return WhileOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().DepthToSpaceOptions: + return DepthToSpaceOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().NonMaxSuppressionV4Options: + return NonMaxSuppressionV4OptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().NonMaxSuppressionV5Options: + return NonMaxSuppressionV5OptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().ScatterNdOptions: + return ScatterNdOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().SelectV2Options: + return SelectV2OptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().DensifyOptions: + return DensifyOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().SegmentSumOptions: + return SegmentSumOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().BatchMatMulOptions: + return BatchMatMulOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().CumsumOptions: + return CumsumOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().CallOnceOptions: + return CallOnceOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().BroadcastToOptions: + return BroadcastToOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().Rfft2dOptions: + return Rfft2dOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().Conv3DOptions: + return Conv3DOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().HashtableOptions: + return HashtableOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().HashtableFindOptions: + return HashtableFindOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().HashtableImportOptions: + return HashtableImportOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().HashtableSizeOptions: + return HashtableSizeOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().VarHandleOptions: + return VarHandleOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().ReadVariableOptions: + return ReadVariableOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().AssignVariableOptions: + return AssignVariableOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().RandomOptions: + return RandomOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().BucketizeOptions: + return BucketizeOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().GeluOptions: + return GeluOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().DynamicUpdateSliceOptions: + return DynamicUpdateSliceOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().UnsortedSegmentProdOptions: + return UnsortedSegmentProdOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().UnsortedSegmentMaxOptions: + return UnsortedSegmentMaxOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().UnsortedSegmentMinOptions: + return UnsortedSegmentMinOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().UnsortedSegmentSumOptions: + return UnsortedSegmentSumOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().ATan2Options: + return ATan2OptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().SignOptions: + return SignOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().BitcastOptions: + return BitcastOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().BitwiseXorOptions: + return BitwiseXorOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions().RightShiftOptions: + return RightShiftOptionsT.InitFromBuf(table.Bytes, table.Pos) + return None +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class CallOnceOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = CallOnceOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsCallOnceOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def CallOnceOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # CallOnceOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # CallOnceOptions + def InitSubgraphIndex(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + +def CallOnceOptionsStart(builder): builder.StartObject(1) +def Start(builder): + return CallOnceOptionsStart(builder) +def CallOnceOptionsAddInitSubgraphIndex(builder, initSubgraphIndex): builder.PrependInt32Slot(0, initSubgraphIndex, 0) +def AddInitSubgraphIndex(builder, initSubgraphIndex): + return CallOnceOptionsAddInitSubgraphIndex(builder, initSubgraphIndex) +def CallOnceOptionsEnd(builder): return builder.EndObject() +def End(builder): + return CallOnceOptionsEnd(builder) + +class CallOnceOptionsT(object): + + # CallOnceOptionsT + def __init__(self): + self.initSubgraphIndex = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + callOnceOptions = CallOnceOptions() + callOnceOptions.Init(buf, pos) + return cls.InitFromObj(callOnceOptions) + + @classmethod + def InitFromObj(cls, callOnceOptions): + x = CallOnceOptionsT() + x._UnPack(callOnceOptions) + return x + + # CallOnceOptionsT + def _UnPack(self, callOnceOptions): + if callOnceOptions is None: + return + self.initSubgraphIndex = callOnceOptions.InitSubgraphIndex() + + # CallOnceOptionsT + def Pack(self, builder): + CallOnceOptionsStart(builder) + CallOnceOptionsAddInitSubgraphIndex(builder, self.initSubgraphIndex) + callOnceOptions = CallOnceOptionsEnd(builder) + return callOnceOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class CallOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = CallOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsCallOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def CallOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # CallOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # CallOptions + def Subgraph(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint32Flags, o + self._tab.Pos) + return 0 + +def CallOptionsStart(builder): builder.StartObject(1) +def Start(builder): + return CallOptionsStart(builder) +def CallOptionsAddSubgraph(builder, subgraph): builder.PrependUint32Slot(0, subgraph, 0) +def AddSubgraph(builder, subgraph): + return CallOptionsAddSubgraph(builder, subgraph) +def CallOptionsEnd(builder): return builder.EndObject() +def End(builder): + return CallOptionsEnd(builder) + +class CallOptionsT(object): + + # CallOptionsT + def __init__(self): + self.subgraph = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + callOptions = CallOptions() + callOptions.Init(buf, pos) + return cls.InitFromObj(callOptions) + + @classmethod + def InitFromObj(cls, callOptions): + x = CallOptionsT() + x._UnPack(callOptions) + return x + + # CallOptionsT + def _UnPack(self, callOptions): + if callOptions is None: + return + self.subgraph = callOptions.Subgraph() + + # CallOptionsT + def Pack(self, builder): + CallOptionsStart(builder) + CallOptionsAddSubgraph(builder, self.subgraph) + callOptions = CallOptionsEnd(builder) + return callOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class CastOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = CastOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsCastOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def CastOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # CastOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # CastOptions + def InDataType(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # CastOptions + def OutDataType(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + +def CastOptionsStart(builder): builder.StartObject(2) +def Start(builder): + return CastOptionsStart(builder) +def CastOptionsAddInDataType(builder, inDataType): builder.PrependInt8Slot(0, inDataType, 0) +def AddInDataType(builder, inDataType): + return CastOptionsAddInDataType(builder, inDataType) +def CastOptionsAddOutDataType(builder, outDataType): builder.PrependInt8Slot(1, outDataType, 0) +def AddOutDataType(builder, outDataType): + return CastOptionsAddOutDataType(builder, outDataType) +def CastOptionsEnd(builder): return builder.EndObject() +def End(builder): + return CastOptionsEnd(builder) + +class CastOptionsT(object): + + # CastOptionsT + def __init__(self): + self.inDataType = 0 # type: int + self.outDataType = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + castOptions = CastOptions() + castOptions.Init(buf, pos) + return cls.InitFromObj(castOptions) + + @classmethod + def InitFromObj(cls, castOptions): + x = CastOptionsT() + x._UnPack(castOptions) + return x + + # CastOptionsT + def _UnPack(self, castOptions): + if castOptions is None: + return + self.inDataType = castOptions.InDataType() + self.outDataType = castOptions.OutDataType() + + # CastOptionsT + def Pack(self, builder): + CastOptionsStart(builder) + CastOptionsAddInDataType(builder, self.inDataType) + CastOptionsAddOutDataType(builder, self.outDataType) + castOptions = CastOptionsEnd(builder) + return castOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +class CombinerType(object): + SUM = 0 + MEAN = 1 + SQRTN = 2 +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class ConcatEmbeddingsOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ConcatEmbeddingsOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsConcatEmbeddingsOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def ConcatEmbeddingsOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # ConcatEmbeddingsOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ConcatEmbeddingsOptions + def NumChannels(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # ConcatEmbeddingsOptions + def NumColumnsPerChannel(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # ConcatEmbeddingsOptions + def NumColumnsPerChannelAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # ConcatEmbeddingsOptions + def NumColumnsPerChannelLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # ConcatEmbeddingsOptions + def NumColumnsPerChannelIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + return o == 0 + + # ConcatEmbeddingsOptions + def EmbeddingDimPerChannel(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # ConcatEmbeddingsOptions + def EmbeddingDimPerChannelAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # ConcatEmbeddingsOptions + def EmbeddingDimPerChannelLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # ConcatEmbeddingsOptions + def EmbeddingDimPerChannelIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + return o == 0 + +def ConcatEmbeddingsOptionsStart(builder): builder.StartObject(3) +def Start(builder): + return ConcatEmbeddingsOptionsStart(builder) +def ConcatEmbeddingsOptionsAddNumChannels(builder, numChannels): builder.PrependInt32Slot(0, numChannels, 0) +def AddNumChannels(builder, numChannels): + return ConcatEmbeddingsOptionsAddNumChannels(builder, numChannels) +def ConcatEmbeddingsOptionsAddNumColumnsPerChannel(builder, numColumnsPerChannel): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(numColumnsPerChannel), 0) +def AddNumColumnsPerChannel(builder, numColumnsPerChannel): + return ConcatEmbeddingsOptionsAddNumColumnsPerChannel(builder, numColumnsPerChannel) +def ConcatEmbeddingsOptionsStartNumColumnsPerChannelVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def StartNumColumnsPerChannelVector(builder, numElems): + return ConcatEmbeddingsOptionsStartNumColumnsPerChannelVector(builder, numElems) +def ConcatEmbeddingsOptionsAddEmbeddingDimPerChannel(builder, embeddingDimPerChannel): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(embeddingDimPerChannel), 0) +def AddEmbeddingDimPerChannel(builder, embeddingDimPerChannel): + return ConcatEmbeddingsOptionsAddEmbeddingDimPerChannel(builder, embeddingDimPerChannel) +def ConcatEmbeddingsOptionsStartEmbeddingDimPerChannelVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def StartEmbeddingDimPerChannelVector(builder, numElems): + return ConcatEmbeddingsOptionsStartEmbeddingDimPerChannelVector(builder, numElems) +def ConcatEmbeddingsOptionsEnd(builder): return builder.EndObject() +def End(builder): + return ConcatEmbeddingsOptionsEnd(builder) +try: + from typing import List +except: + pass + +class ConcatEmbeddingsOptionsT(object): + + # ConcatEmbeddingsOptionsT + def __init__(self): + self.numChannels = 0 # type: int + self.numColumnsPerChannel = None # type: List[int] + self.embeddingDimPerChannel = None # type: List[int] + + @classmethod + def InitFromBuf(cls, buf, pos): + concatEmbeddingsOptions = ConcatEmbeddingsOptions() + concatEmbeddingsOptions.Init(buf, pos) + return cls.InitFromObj(concatEmbeddingsOptions) + + @classmethod + def InitFromObj(cls, concatEmbeddingsOptions): + x = ConcatEmbeddingsOptionsT() + x._UnPack(concatEmbeddingsOptions) + return x + + # ConcatEmbeddingsOptionsT + def _UnPack(self, concatEmbeddingsOptions): + if concatEmbeddingsOptions is None: + return + self.numChannels = concatEmbeddingsOptions.NumChannels() + if not concatEmbeddingsOptions.NumColumnsPerChannelIsNone(): + if np is None: + self.numColumnsPerChannel = [] + for i in range(concatEmbeddingsOptions.NumColumnsPerChannelLength()): + self.numColumnsPerChannel.append(concatEmbeddingsOptions.NumColumnsPerChannel(i)) + else: + self.numColumnsPerChannel = concatEmbeddingsOptions.NumColumnsPerChannelAsNumpy() + if not concatEmbeddingsOptions.EmbeddingDimPerChannelIsNone(): + if np is None: + self.embeddingDimPerChannel = [] + for i in range(concatEmbeddingsOptions.EmbeddingDimPerChannelLength()): + self.embeddingDimPerChannel.append(concatEmbeddingsOptions.EmbeddingDimPerChannel(i)) + else: + self.embeddingDimPerChannel = concatEmbeddingsOptions.EmbeddingDimPerChannelAsNumpy() + + # ConcatEmbeddingsOptionsT + def Pack(self, builder): + if self.numColumnsPerChannel is not None: + if np is not None and type(self.numColumnsPerChannel) is np.ndarray: + numColumnsPerChannel = builder.CreateNumpyVector(self.numColumnsPerChannel) + else: + ConcatEmbeddingsOptionsStartNumColumnsPerChannelVector(builder, len(self.numColumnsPerChannel)) + for i in reversed(range(len(self.numColumnsPerChannel))): + builder.PrependInt32(self.numColumnsPerChannel[i]) + numColumnsPerChannel = builder.EndVector() + if self.embeddingDimPerChannel is not None: + if np is not None and type(self.embeddingDimPerChannel) is np.ndarray: + embeddingDimPerChannel = builder.CreateNumpyVector(self.embeddingDimPerChannel) + else: + ConcatEmbeddingsOptionsStartEmbeddingDimPerChannelVector(builder, len(self.embeddingDimPerChannel)) + for i in reversed(range(len(self.embeddingDimPerChannel))): + builder.PrependInt32(self.embeddingDimPerChannel[i]) + embeddingDimPerChannel = builder.EndVector() + ConcatEmbeddingsOptionsStart(builder) + ConcatEmbeddingsOptionsAddNumChannels(builder, self.numChannels) + if self.numColumnsPerChannel is not None: + ConcatEmbeddingsOptionsAddNumColumnsPerChannel(builder, numColumnsPerChannel) + if self.embeddingDimPerChannel is not None: + ConcatEmbeddingsOptionsAddEmbeddingDimPerChannel(builder, embeddingDimPerChannel) + concatEmbeddingsOptions = ConcatEmbeddingsOptionsEnd(builder) + return concatEmbeddingsOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class ConcatenationOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ConcatenationOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsConcatenationOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def ConcatenationOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # ConcatenationOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ConcatenationOptions + def Axis(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # ConcatenationOptions + def FusedActivationFunction(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + +def ConcatenationOptionsStart(builder): builder.StartObject(2) +def Start(builder): + return ConcatenationOptionsStart(builder) +def ConcatenationOptionsAddAxis(builder, axis): builder.PrependInt32Slot(0, axis, 0) +def AddAxis(builder, axis): + return ConcatenationOptionsAddAxis(builder, axis) +def ConcatenationOptionsAddFusedActivationFunction(builder, fusedActivationFunction): builder.PrependInt8Slot(1, fusedActivationFunction, 0) +def AddFusedActivationFunction(builder, fusedActivationFunction): + return ConcatenationOptionsAddFusedActivationFunction(builder, fusedActivationFunction) +def ConcatenationOptionsEnd(builder): return builder.EndObject() +def End(builder): + return ConcatenationOptionsEnd(builder) + +class ConcatenationOptionsT(object): + + # ConcatenationOptionsT + def __init__(self): + self.axis = 0 # type: int + self.fusedActivationFunction = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + concatenationOptions = ConcatenationOptions() + concatenationOptions.Init(buf, pos) + return cls.InitFromObj(concatenationOptions) + + @classmethod + def InitFromObj(cls, concatenationOptions): + x = ConcatenationOptionsT() + x._UnPack(concatenationOptions) + return x + + # ConcatenationOptionsT + def _UnPack(self, concatenationOptions): + if concatenationOptions is None: + return + self.axis = concatenationOptions.Axis() + self.fusedActivationFunction = concatenationOptions.FusedActivationFunction() + + # ConcatenationOptionsT + def Pack(self, builder): + ConcatenationOptionsStart(builder) + ConcatenationOptionsAddAxis(builder, self.axis) + ConcatenationOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) + concatenationOptions = ConcatenationOptionsEnd(builder) + return concatenationOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class Conv2DOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Conv2DOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsConv2DOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def Conv2DOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # Conv2DOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Conv2DOptions + def Padding(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # Conv2DOptions + def StrideW(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # Conv2DOptions + def StrideH(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # Conv2DOptions + def FusedActivationFunction(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # Conv2DOptions + def DilationWFactor(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 1 + + # Conv2DOptions + def DilationHFactor(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 1 + +def Conv2DOptionsStart(builder): builder.StartObject(6) +def Start(builder): + return Conv2DOptionsStart(builder) +def Conv2DOptionsAddPadding(builder, padding): builder.PrependInt8Slot(0, padding, 0) +def AddPadding(builder, padding): + return Conv2DOptionsAddPadding(builder, padding) +def Conv2DOptionsAddStrideW(builder, strideW): builder.PrependInt32Slot(1, strideW, 0) +def AddStrideW(builder, strideW): + return Conv2DOptionsAddStrideW(builder, strideW) +def Conv2DOptionsAddStrideH(builder, strideH): builder.PrependInt32Slot(2, strideH, 0) +def AddStrideH(builder, strideH): + return Conv2DOptionsAddStrideH(builder, strideH) +def Conv2DOptionsAddFusedActivationFunction(builder, fusedActivationFunction): builder.PrependInt8Slot(3, fusedActivationFunction, 0) +def AddFusedActivationFunction(builder, fusedActivationFunction): + return Conv2DOptionsAddFusedActivationFunction(builder, fusedActivationFunction) +def Conv2DOptionsAddDilationWFactor(builder, dilationWFactor): builder.PrependInt32Slot(4, dilationWFactor, 1) +def AddDilationWFactor(builder, dilationWFactor): + return Conv2DOptionsAddDilationWFactor(builder, dilationWFactor) +def Conv2DOptionsAddDilationHFactor(builder, dilationHFactor): builder.PrependInt32Slot(5, dilationHFactor, 1) +def AddDilationHFactor(builder, dilationHFactor): + return Conv2DOptionsAddDilationHFactor(builder, dilationHFactor) +def Conv2DOptionsEnd(builder): return builder.EndObject() +def End(builder): + return Conv2DOptionsEnd(builder) + +class Conv2DOptionsT(object): + + # Conv2DOptionsT + def __init__(self): + self.padding = 0 # type: int + self.strideW = 0 # type: int + self.strideH = 0 # type: int + self.fusedActivationFunction = 0 # type: int + self.dilationWFactor = 1 # type: int + self.dilationHFactor = 1 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + conv2doptions = Conv2DOptions() + conv2doptions.Init(buf, pos) + return cls.InitFromObj(conv2doptions) + + @classmethod + def InitFromObj(cls, conv2doptions): + x = Conv2DOptionsT() + x._UnPack(conv2doptions) + return x + + # Conv2DOptionsT + def _UnPack(self, conv2doptions): + if conv2doptions is None: + return + self.padding = conv2doptions.Padding() + self.strideW = conv2doptions.StrideW() + self.strideH = conv2doptions.StrideH() + self.fusedActivationFunction = conv2doptions.FusedActivationFunction() + self.dilationWFactor = conv2doptions.DilationWFactor() + self.dilationHFactor = conv2doptions.DilationHFactor() + + # Conv2DOptionsT + def Pack(self, builder): + Conv2DOptionsStart(builder) + Conv2DOptionsAddPadding(builder, self.padding) + Conv2DOptionsAddStrideW(builder, self.strideW) + Conv2DOptionsAddStrideH(builder, self.strideH) + Conv2DOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) + Conv2DOptionsAddDilationWFactor(builder, self.dilationWFactor) + Conv2DOptionsAddDilationHFactor(builder, self.dilationHFactor) + conv2doptions = Conv2DOptionsEnd(builder) + return conv2doptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class Conv3DOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Conv3DOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsConv3DOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def Conv3DOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # Conv3DOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Conv3DOptions + def Padding(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # Conv3DOptions + def StrideD(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # Conv3DOptions + def StrideW(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # Conv3DOptions + def StrideH(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # Conv3DOptions + def FusedActivationFunction(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # Conv3DOptions + def DilationDFactor(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 1 + + # Conv3DOptions + def DilationWFactor(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 1 + + # Conv3DOptions + def DilationHFactor(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 1 + +def Conv3DOptionsStart(builder): builder.StartObject(8) +def Start(builder): + return Conv3DOptionsStart(builder) +def Conv3DOptionsAddPadding(builder, padding): builder.PrependInt8Slot(0, padding, 0) +def AddPadding(builder, padding): + return Conv3DOptionsAddPadding(builder, padding) +def Conv3DOptionsAddStrideD(builder, strideD): builder.PrependInt32Slot(1, strideD, 0) +def AddStrideD(builder, strideD): + return Conv3DOptionsAddStrideD(builder, strideD) +def Conv3DOptionsAddStrideW(builder, strideW): builder.PrependInt32Slot(2, strideW, 0) +def AddStrideW(builder, strideW): + return Conv3DOptionsAddStrideW(builder, strideW) +def Conv3DOptionsAddStrideH(builder, strideH): builder.PrependInt32Slot(3, strideH, 0) +def AddStrideH(builder, strideH): + return Conv3DOptionsAddStrideH(builder, strideH) +def Conv3DOptionsAddFusedActivationFunction(builder, fusedActivationFunction): builder.PrependInt8Slot(4, fusedActivationFunction, 0) +def AddFusedActivationFunction(builder, fusedActivationFunction): + return Conv3DOptionsAddFusedActivationFunction(builder, fusedActivationFunction) +def Conv3DOptionsAddDilationDFactor(builder, dilationDFactor): builder.PrependInt32Slot(5, dilationDFactor, 1) +def AddDilationDFactor(builder, dilationDFactor): + return Conv3DOptionsAddDilationDFactor(builder, dilationDFactor) +def Conv3DOptionsAddDilationWFactor(builder, dilationWFactor): builder.PrependInt32Slot(6, dilationWFactor, 1) +def AddDilationWFactor(builder, dilationWFactor): + return Conv3DOptionsAddDilationWFactor(builder, dilationWFactor) +def Conv3DOptionsAddDilationHFactor(builder, dilationHFactor): builder.PrependInt32Slot(7, dilationHFactor, 1) +def AddDilationHFactor(builder, dilationHFactor): + return Conv3DOptionsAddDilationHFactor(builder, dilationHFactor) +def Conv3DOptionsEnd(builder): return builder.EndObject() +def End(builder): + return Conv3DOptionsEnd(builder) + +class Conv3DOptionsT(object): + + # Conv3DOptionsT + def __init__(self): + self.padding = 0 # type: int + self.strideD = 0 # type: int + self.strideW = 0 # type: int + self.strideH = 0 # type: int + self.fusedActivationFunction = 0 # type: int + self.dilationDFactor = 1 # type: int + self.dilationWFactor = 1 # type: int + self.dilationHFactor = 1 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + conv3doptions = Conv3DOptions() + conv3doptions.Init(buf, pos) + return cls.InitFromObj(conv3doptions) + + @classmethod + def InitFromObj(cls, conv3doptions): + x = Conv3DOptionsT() + x._UnPack(conv3doptions) + return x + + # Conv3DOptionsT + def _UnPack(self, conv3doptions): + if conv3doptions is None: + return + self.padding = conv3doptions.Padding() + self.strideD = conv3doptions.StrideD() + self.strideW = conv3doptions.StrideW() + self.strideH = conv3doptions.StrideH() + self.fusedActivationFunction = conv3doptions.FusedActivationFunction() + self.dilationDFactor = conv3doptions.DilationDFactor() + self.dilationWFactor = conv3doptions.DilationWFactor() + self.dilationHFactor = conv3doptions.DilationHFactor() + + # Conv3DOptionsT + def Pack(self, builder): + Conv3DOptionsStart(builder) + Conv3DOptionsAddPadding(builder, self.padding) + Conv3DOptionsAddStrideD(builder, self.strideD) + Conv3DOptionsAddStrideW(builder, self.strideW) + Conv3DOptionsAddStrideH(builder, self.strideH) + Conv3DOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) + Conv3DOptionsAddDilationDFactor(builder, self.dilationDFactor) + Conv3DOptionsAddDilationWFactor(builder, self.dilationWFactor) + Conv3DOptionsAddDilationHFactor(builder, self.dilationHFactor) + conv3doptions = Conv3DOptionsEnd(builder) + return conv3doptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class CosOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = CosOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsCosOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def CosOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # CosOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def CosOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return CosOptionsStart(builder) +def CosOptionsEnd(builder): return builder.EndObject() +def End(builder): + return CosOptionsEnd(builder) + +class CosOptionsT(object): + + # CosOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + cosOptions = CosOptions() + cosOptions.Init(buf, pos) + return cls.InitFromObj(cosOptions) + + @classmethod + def InitFromObj(cls, cosOptions): + x = CosOptionsT() + x._UnPack(cosOptions) + return x + + # CosOptionsT + def _UnPack(self, cosOptions): + if cosOptions is None: + return + + # CosOptionsT + def Pack(self, builder): + CosOptionsStart(builder) + cosOptions = CosOptionsEnd(builder) + return cosOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class CumsumOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = CumsumOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsCumsumOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def CumsumOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # CumsumOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # CumsumOptions + def Exclusive(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + + # CumsumOptions + def Reverse(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + +def CumsumOptionsStart(builder): builder.StartObject(2) +def Start(builder): + return CumsumOptionsStart(builder) +def CumsumOptionsAddExclusive(builder, exclusive): builder.PrependBoolSlot(0, exclusive, 0) +def AddExclusive(builder, exclusive): + return CumsumOptionsAddExclusive(builder, exclusive) +def CumsumOptionsAddReverse(builder, reverse): builder.PrependBoolSlot(1, reverse, 0) +def AddReverse(builder, reverse): + return CumsumOptionsAddReverse(builder, reverse) +def CumsumOptionsEnd(builder): return builder.EndObject() +def End(builder): + return CumsumOptionsEnd(builder) + +class CumsumOptionsT(object): + + # CumsumOptionsT + def __init__(self): + self.exclusive = False # type: bool + self.reverse = False # type: bool + + @classmethod + def InitFromBuf(cls, buf, pos): + cumsumOptions = CumsumOptions() + cumsumOptions.Init(buf, pos) + return cls.InitFromObj(cumsumOptions) + + @classmethod + def InitFromObj(cls, cumsumOptions): + x = CumsumOptionsT() + x._UnPack(cumsumOptions) + return x + + # CumsumOptionsT + def _UnPack(self, cumsumOptions): + if cumsumOptions is None: + return + self.exclusive = cumsumOptions.Exclusive() + self.reverse = cumsumOptions.Reverse() + + # CumsumOptionsT + def Pack(self, builder): + CumsumOptionsStart(builder) + CumsumOptionsAddExclusive(builder, self.exclusive) + CumsumOptionsAddReverse(builder, self.reverse) + cumsumOptions = CumsumOptionsEnd(builder) + return cumsumOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +class CustomOptionsFormat(object): + FLEXBUFFERS = 0 +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class CustomQuantization(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = CustomQuantization() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsCustomQuantization(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def CustomQuantizationBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # CustomQuantization + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # CustomQuantization + def Custom(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) + return 0 + + # CustomQuantization + def CustomAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) + return 0 + + # CustomQuantization + def CustomLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # CustomQuantization + def CustomIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + +def CustomQuantizationStart(builder): builder.StartObject(1) +def Start(builder): + return CustomQuantizationStart(builder) +def CustomQuantizationAddCustom(builder, custom): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(custom), 0) +def AddCustom(builder, custom): + return CustomQuantizationAddCustom(builder, custom) +def CustomQuantizationStartCustomVector(builder, numElems): return builder.StartVector(1, numElems, 1) +def StartCustomVector(builder, numElems): + return CustomQuantizationStartCustomVector(builder, numElems) +def CustomQuantizationEnd(builder): return builder.EndObject() +def End(builder): + return CustomQuantizationEnd(builder) +try: + from typing import List +except: + pass + +class CustomQuantizationT(object): + + # CustomQuantizationT + def __init__(self): + self.custom = None # type: List[int] + + @classmethod + def InitFromBuf(cls, buf, pos): + customQuantization = CustomQuantization() + customQuantization.Init(buf, pos) + return cls.InitFromObj(customQuantization) + + @classmethod + def InitFromObj(cls, customQuantization): + x = CustomQuantizationT() + x._UnPack(customQuantization) + return x + + # CustomQuantizationT + def _UnPack(self, customQuantization): + if customQuantization is None: + return + if not customQuantization.CustomIsNone(): + if np is None: + self.custom = [] + for i in range(customQuantization.CustomLength()): + self.custom.append(customQuantization.Custom(i)) + else: + self.custom = customQuantization.CustomAsNumpy() + + # CustomQuantizationT + def Pack(self, builder): + if self.custom is not None: + if np is not None and type(self.custom) is np.ndarray: + custom = builder.CreateNumpyVector(self.custom) + else: + CustomQuantizationStartCustomVector(builder, len(self.custom)) + for i in reversed(range(len(self.custom))): + builder.PrependUint8(self.custom[i]) + custom = builder.EndVector() + CustomQuantizationStart(builder) + if self.custom is not None: + CustomQuantizationAddCustom(builder, custom) + customQuantization = CustomQuantizationEnd(builder) + return customQuantization +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class DensifyOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = DensifyOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsDensifyOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def DensifyOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # DensifyOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def DensifyOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return DensifyOptionsStart(builder) +def DensifyOptionsEnd(builder): return builder.EndObject() +def End(builder): + return DensifyOptionsEnd(builder) + +class DensifyOptionsT(object): + + # DensifyOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + densifyOptions = DensifyOptions() + densifyOptions.Init(buf, pos) + return cls.InitFromObj(densifyOptions) + + @classmethod + def InitFromObj(cls, densifyOptions): + x = DensifyOptionsT() + x._UnPack(densifyOptions) + return x + + # DensifyOptionsT + def _UnPack(self, densifyOptions): + if densifyOptions is None: + return + + # DensifyOptionsT + def Pack(self, builder): + DensifyOptionsStart(builder) + densifyOptions = DensifyOptionsEnd(builder) + return densifyOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class DepthToSpaceOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = DepthToSpaceOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsDepthToSpaceOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def DepthToSpaceOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # DepthToSpaceOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # DepthToSpaceOptions + def BlockSize(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + +def DepthToSpaceOptionsStart(builder): builder.StartObject(1) +def Start(builder): + return DepthToSpaceOptionsStart(builder) +def DepthToSpaceOptionsAddBlockSize(builder, blockSize): builder.PrependInt32Slot(0, blockSize, 0) +def AddBlockSize(builder, blockSize): + return DepthToSpaceOptionsAddBlockSize(builder, blockSize) +def DepthToSpaceOptionsEnd(builder): return builder.EndObject() +def End(builder): + return DepthToSpaceOptionsEnd(builder) + +class DepthToSpaceOptionsT(object): + + # DepthToSpaceOptionsT + def __init__(self): + self.blockSize = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + depthToSpaceOptions = DepthToSpaceOptions() + depthToSpaceOptions.Init(buf, pos) + return cls.InitFromObj(depthToSpaceOptions) + + @classmethod + def InitFromObj(cls, depthToSpaceOptions): + x = DepthToSpaceOptionsT() + x._UnPack(depthToSpaceOptions) + return x + + # DepthToSpaceOptionsT + def _UnPack(self, depthToSpaceOptions): + if depthToSpaceOptions is None: + return + self.blockSize = depthToSpaceOptions.BlockSize() + + # DepthToSpaceOptionsT + def Pack(self, builder): + DepthToSpaceOptionsStart(builder) + DepthToSpaceOptionsAddBlockSize(builder, self.blockSize) + depthToSpaceOptions = DepthToSpaceOptionsEnd(builder) + return depthToSpaceOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class DepthwiseConv2DOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = DepthwiseConv2DOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsDepthwiseConv2DOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def DepthwiseConv2DOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # DepthwiseConv2DOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # DepthwiseConv2DOptions + def Padding(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # DepthwiseConv2DOptions + def StrideW(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # DepthwiseConv2DOptions + def StrideH(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # DepthwiseConv2DOptions + def DepthMultiplier(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # DepthwiseConv2DOptions + def FusedActivationFunction(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # DepthwiseConv2DOptions + def DilationWFactor(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 1 + + # DepthwiseConv2DOptions + def DilationHFactor(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 1 + +def DepthwiseConv2DOptionsStart(builder): builder.StartObject(7) +def Start(builder): + return DepthwiseConv2DOptionsStart(builder) +def DepthwiseConv2DOptionsAddPadding(builder, padding): builder.PrependInt8Slot(0, padding, 0) +def AddPadding(builder, padding): + return DepthwiseConv2DOptionsAddPadding(builder, padding) +def DepthwiseConv2DOptionsAddStrideW(builder, strideW): builder.PrependInt32Slot(1, strideW, 0) +def AddStrideW(builder, strideW): + return DepthwiseConv2DOptionsAddStrideW(builder, strideW) +def DepthwiseConv2DOptionsAddStrideH(builder, strideH): builder.PrependInt32Slot(2, strideH, 0) +def AddStrideH(builder, strideH): + return DepthwiseConv2DOptionsAddStrideH(builder, strideH) +def DepthwiseConv2DOptionsAddDepthMultiplier(builder, depthMultiplier): builder.PrependInt32Slot(3, depthMultiplier, 0) +def AddDepthMultiplier(builder, depthMultiplier): + return DepthwiseConv2DOptionsAddDepthMultiplier(builder, depthMultiplier) +def DepthwiseConv2DOptionsAddFusedActivationFunction(builder, fusedActivationFunction): builder.PrependInt8Slot(4, fusedActivationFunction, 0) +def AddFusedActivationFunction(builder, fusedActivationFunction): + return DepthwiseConv2DOptionsAddFusedActivationFunction(builder, fusedActivationFunction) +def DepthwiseConv2DOptionsAddDilationWFactor(builder, dilationWFactor): builder.PrependInt32Slot(5, dilationWFactor, 1) +def AddDilationWFactor(builder, dilationWFactor): + return DepthwiseConv2DOptionsAddDilationWFactor(builder, dilationWFactor) +def DepthwiseConv2DOptionsAddDilationHFactor(builder, dilationHFactor): builder.PrependInt32Slot(6, dilationHFactor, 1) +def AddDilationHFactor(builder, dilationHFactor): + return DepthwiseConv2DOptionsAddDilationHFactor(builder, dilationHFactor) +def DepthwiseConv2DOptionsEnd(builder): return builder.EndObject() +def End(builder): + return DepthwiseConv2DOptionsEnd(builder) + +class DepthwiseConv2DOptionsT(object): + + # DepthwiseConv2DOptionsT + def __init__(self): + self.padding = 0 # type: int + self.strideW = 0 # type: int + self.strideH = 0 # type: int + self.depthMultiplier = 0 # type: int + self.fusedActivationFunction = 0 # type: int + self.dilationWFactor = 1 # type: int + self.dilationHFactor = 1 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + depthwiseConv2doptions = DepthwiseConv2DOptions() + depthwiseConv2doptions.Init(buf, pos) + return cls.InitFromObj(depthwiseConv2doptions) + + @classmethod + def InitFromObj(cls, depthwiseConv2doptions): + x = DepthwiseConv2DOptionsT() + x._UnPack(depthwiseConv2doptions) + return x + + # DepthwiseConv2DOptionsT + def _UnPack(self, depthwiseConv2doptions): + if depthwiseConv2doptions is None: + return + self.padding = depthwiseConv2doptions.Padding() + self.strideW = depthwiseConv2doptions.StrideW() + self.strideH = depthwiseConv2doptions.StrideH() + self.depthMultiplier = depthwiseConv2doptions.DepthMultiplier() + self.fusedActivationFunction = depthwiseConv2doptions.FusedActivationFunction() + self.dilationWFactor = depthwiseConv2doptions.DilationWFactor() + self.dilationHFactor = depthwiseConv2doptions.DilationHFactor() + + # DepthwiseConv2DOptionsT + def Pack(self, builder): + DepthwiseConv2DOptionsStart(builder) + DepthwiseConv2DOptionsAddPadding(builder, self.padding) + DepthwiseConv2DOptionsAddStrideW(builder, self.strideW) + DepthwiseConv2DOptionsAddStrideH(builder, self.strideH) + DepthwiseConv2DOptionsAddDepthMultiplier(builder, self.depthMultiplier) + DepthwiseConv2DOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) + DepthwiseConv2DOptionsAddDilationWFactor(builder, self.dilationWFactor) + DepthwiseConv2DOptionsAddDilationHFactor(builder, self.dilationHFactor) + depthwiseConv2doptions = DepthwiseConv2DOptionsEnd(builder) + return depthwiseConv2doptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class DequantizeOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = DequantizeOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsDequantizeOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def DequantizeOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # DequantizeOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def DequantizeOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return DequantizeOptionsStart(builder) +def DequantizeOptionsEnd(builder): return builder.EndObject() +def End(builder): + return DequantizeOptionsEnd(builder) + +class DequantizeOptionsT(object): + + # DequantizeOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + dequantizeOptions = DequantizeOptions() + dequantizeOptions.Init(buf, pos) + return cls.InitFromObj(dequantizeOptions) + + @classmethod + def InitFromObj(cls, dequantizeOptions): + x = DequantizeOptionsT() + x._UnPack(dequantizeOptions) + return x + + # DequantizeOptionsT + def _UnPack(self, dequantizeOptions): + if dequantizeOptions is None: + return + + # DequantizeOptionsT + def Pack(self, builder): + DequantizeOptionsStart(builder) + dequantizeOptions = DequantizeOptionsEnd(builder) + return dequantizeOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class DimensionMetadata(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = DimensionMetadata() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsDimensionMetadata(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def DimensionMetadataBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # DimensionMetadata + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # DimensionMetadata + def Format(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # DimensionMetadata + def DenseSize(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # DimensionMetadata + def ArraySegmentsType(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) + return 0 + + # DimensionMetadata + def ArraySegments(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + from flatbuffers.table import Table + obj = Table(bytearray(), 0) + self._tab.Union(obj, o) + return obj + return None + + # DimensionMetadata + def ArrayIndicesType(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) + return 0 + + # DimensionMetadata + def ArrayIndices(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + from flatbuffers.table import Table + obj = Table(bytearray(), 0) + self._tab.Union(obj, o) + return obj + return None + +def DimensionMetadataStart(builder): builder.StartObject(6) +def Start(builder): + return DimensionMetadataStart(builder) +def DimensionMetadataAddFormat(builder, format): builder.PrependInt8Slot(0, format, 0) +def AddFormat(builder, format): + return DimensionMetadataAddFormat(builder, format) +def DimensionMetadataAddDenseSize(builder, denseSize): builder.PrependInt32Slot(1, denseSize, 0) +def AddDenseSize(builder, denseSize): + return DimensionMetadataAddDenseSize(builder, denseSize) +def DimensionMetadataAddArraySegmentsType(builder, arraySegmentsType): builder.PrependUint8Slot(2, arraySegmentsType, 0) +def AddArraySegmentsType(builder, arraySegmentsType): + return DimensionMetadataAddArraySegmentsType(builder, arraySegmentsType) +def DimensionMetadataAddArraySegments(builder, arraySegments): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(arraySegments), 0) +def AddArraySegments(builder, arraySegments): + return DimensionMetadataAddArraySegments(builder, arraySegments) +def DimensionMetadataAddArrayIndicesType(builder, arrayIndicesType): builder.PrependUint8Slot(4, arrayIndicesType, 0) +def AddArrayIndicesType(builder, arrayIndicesType): + return DimensionMetadataAddArrayIndicesType(builder, arrayIndicesType) +def DimensionMetadataAddArrayIndices(builder, arrayIndices): builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(arrayIndices), 0) +def AddArrayIndices(builder, arrayIndices): + return DimensionMetadataAddArrayIndices(builder, arrayIndices) +def DimensionMetadataEnd(builder): return builder.EndObject() +def End(builder): + return DimensionMetadataEnd(builder) +try: + from typing import Union +except: + pass + +class DimensionMetadataT(object): + + # DimensionMetadataT + def __init__(self): + self.format = 0 # type: int + self.denseSize = 0 # type: int + self.arraySegmentsType = 0 # type: int + self.arraySegments = None # type: Union[None, Int32VectorT, Uint16VectorT, Uint8VectorT] + self.arrayIndicesType = 0 # type: int + self.arrayIndices = None # type: Union[None, Int32VectorT, Uint16VectorT, Uint8VectorT] + + @classmethod + def InitFromBuf(cls, buf, pos): + dimensionMetadata = DimensionMetadata() + dimensionMetadata.Init(buf, pos) + return cls.InitFromObj(dimensionMetadata) + + @classmethod + def InitFromObj(cls, dimensionMetadata): + x = DimensionMetadataT() + x._UnPack(dimensionMetadata) + return x + + # DimensionMetadataT + def _UnPack(self, dimensionMetadata): + if dimensionMetadata is None: + return + self.format = dimensionMetadata.Format() + self.denseSize = dimensionMetadata.DenseSize() + self.arraySegmentsType = dimensionMetadata.ArraySegmentsType() + self.arraySegments = SparseIndexVectorCreator(self.arraySegmentsType, dimensionMetadata.ArraySegments()) + self.arrayIndicesType = dimensionMetadata.ArrayIndicesType() + self.arrayIndices = SparseIndexVectorCreator(self.arrayIndicesType, dimensionMetadata.ArrayIndices()) + + # DimensionMetadataT + def Pack(self, builder): + if self.arraySegments is not None: + arraySegments = self.arraySegments.Pack(builder) + if self.arrayIndices is not None: + arrayIndices = self.arrayIndices.Pack(builder) + DimensionMetadataStart(builder) + DimensionMetadataAddFormat(builder, self.format) + DimensionMetadataAddDenseSize(builder, self.denseSize) + DimensionMetadataAddArraySegmentsType(builder, self.arraySegmentsType) + if self.arraySegments is not None: + DimensionMetadataAddArraySegments(builder, arraySegments) + DimensionMetadataAddArrayIndicesType(builder, self.arrayIndicesType) + if self.arrayIndices is not None: + DimensionMetadataAddArrayIndices(builder, arrayIndices) + dimensionMetadata = DimensionMetadataEnd(builder) + return dimensionMetadata +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +class DimensionType(object): + DENSE = 0 + SPARSE_CSR = 1 +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class DivOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = DivOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsDivOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def DivOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # DivOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # DivOptions + def FusedActivationFunction(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + +def DivOptionsStart(builder): builder.StartObject(1) +def Start(builder): + return DivOptionsStart(builder) +def DivOptionsAddFusedActivationFunction(builder, fusedActivationFunction): builder.PrependInt8Slot(0, fusedActivationFunction, 0) +def AddFusedActivationFunction(builder, fusedActivationFunction): + return DivOptionsAddFusedActivationFunction(builder, fusedActivationFunction) +def DivOptionsEnd(builder): return builder.EndObject() +def End(builder): + return DivOptionsEnd(builder) + +class DivOptionsT(object): + + # DivOptionsT + def __init__(self): + self.fusedActivationFunction = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + divOptions = DivOptions() + divOptions.Init(buf, pos) + return cls.InitFromObj(divOptions) + + @classmethod + def InitFromObj(cls, divOptions): + x = DivOptionsT() + x._UnPack(divOptions) + return x + + # DivOptionsT + def _UnPack(self, divOptions): + if divOptions is None: + return + self.fusedActivationFunction = divOptions.FusedActivationFunction() + + # DivOptionsT + def Pack(self, builder): + DivOptionsStart(builder) + DivOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) + divOptions = DivOptionsEnd(builder) + return divOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class DynamicUpdateSliceOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = DynamicUpdateSliceOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsDynamicUpdateSliceOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def DynamicUpdateSliceOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # DynamicUpdateSliceOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def DynamicUpdateSliceOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return DynamicUpdateSliceOptionsStart(builder) +def DynamicUpdateSliceOptionsEnd(builder): return builder.EndObject() +def End(builder): + return DynamicUpdateSliceOptionsEnd(builder) + +class DynamicUpdateSliceOptionsT(object): + + # DynamicUpdateSliceOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + dynamicUpdateSliceOptions = DynamicUpdateSliceOptions() + dynamicUpdateSliceOptions.Init(buf, pos) + return cls.InitFromObj(dynamicUpdateSliceOptions) + + @classmethod + def InitFromObj(cls, dynamicUpdateSliceOptions): + x = DynamicUpdateSliceOptionsT() + x._UnPack(dynamicUpdateSliceOptions) + return x + + # DynamicUpdateSliceOptionsT + def _UnPack(self, dynamicUpdateSliceOptions): + if dynamicUpdateSliceOptions is None: + return + + # DynamicUpdateSliceOptionsT + def Pack(self, builder): + DynamicUpdateSliceOptionsStart(builder) + dynamicUpdateSliceOptions = DynamicUpdateSliceOptionsEnd(builder) + return dynamicUpdateSliceOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class EmbeddingLookupSparseOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = EmbeddingLookupSparseOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsEmbeddingLookupSparseOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def EmbeddingLookupSparseOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # EmbeddingLookupSparseOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # EmbeddingLookupSparseOptions + def Combiner(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + +def EmbeddingLookupSparseOptionsStart(builder): builder.StartObject(1) +def Start(builder): + return EmbeddingLookupSparseOptionsStart(builder) +def EmbeddingLookupSparseOptionsAddCombiner(builder, combiner): builder.PrependInt8Slot(0, combiner, 0) +def AddCombiner(builder, combiner): + return EmbeddingLookupSparseOptionsAddCombiner(builder, combiner) +def EmbeddingLookupSparseOptionsEnd(builder): return builder.EndObject() +def End(builder): + return EmbeddingLookupSparseOptionsEnd(builder) + +class EmbeddingLookupSparseOptionsT(object): + + # EmbeddingLookupSparseOptionsT + def __init__(self): + self.combiner = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + embeddingLookupSparseOptions = EmbeddingLookupSparseOptions() + embeddingLookupSparseOptions.Init(buf, pos) + return cls.InitFromObj(embeddingLookupSparseOptions) + + @classmethod + def InitFromObj(cls, embeddingLookupSparseOptions): + x = EmbeddingLookupSparseOptionsT() + x._UnPack(embeddingLookupSparseOptions) + return x + + # EmbeddingLookupSparseOptionsT + def _UnPack(self, embeddingLookupSparseOptions): + if embeddingLookupSparseOptions is None: + return + self.combiner = embeddingLookupSparseOptions.Combiner() + + # EmbeddingLookupSparseOptionsT + def Pack(self, builder): + EmbeddingLookupSparseOptionsStart(builder) + EmbeddingLookupSparseOptionsAddCombiner(builder, self.combiner) + embeddingLookupSparseOptions = EmbeddingLookupSparseOptionsEnd(builder) + return embeddingLookupSparseOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class EqualOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = EqualOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsEqualOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def EqualOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # EqualOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def EqualOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return EqualOptionsStart(builder) +def EqualOptionsEnd(builder): return builder.EndObject() +def End(builder): + return EqualOptionsEnd(builder) + +class EqualOptionsT(object): + + # EqualOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + equalOptions = EqualOptions() + equalOptions.Init(buf, pos) + return cls.InitFromObj(equalOptions) + + @classmethod + def InitFromObj(cls, equalOptions): + x = EqualOptionsT() + x._UnPack(equalOptions) + return x + + # EqualOptionsT + def _UnPack(self, equalOptions): + if equalOptions is None: + return + + # EqualOptionsT + def Pack(self, builder): + EqualOptionsStart(builder) + equalOptions = EqualOptionsEnd(builder) + return equalOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class ExpOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ExpOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsExpOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def ExpOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # ExpOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def ExpOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return ExpOptionsStart(builder) +def ExpOptionsEnd(builder): return builder.EndObject() +def End(builder): + return ExpOptionsEnd(builder) + +class ExpOptionsT(object): + + # ExpOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + expOptions = ExpOptions() + expOptions.Init(buf, pos) + return cls.InitFromObj(expOptions) + + @classmethod + def InitFromObj(cls, expOptions): + x = ExpOptionsT() + x._UnPack(expOptions) + return x + + # ExpOptionsT + def _UnPack(self, expOptions): + if expOptions is None: + return + + # ExpOptionsT + def Pack(self, builder): + ExpOptionsStart(builder) + expOptions = ExpOptionsEnd(builder) + return expOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class ExpandDimsOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ExpandDimsOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsExpandDimsOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def ExpandDimsOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # ExpandDimsOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def ExpandDimsOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return ExpandDimsOptionsStart(builder) +def ExpandDimsOptionsEnd(builder): return builder.EndObject() +def End(builder): + return ExpandDimsOptionsEnd(builder) + +class ExpandDimsOptionsT(object): + + # ExpandDimsOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + expandDimsOptions = ExpandDimsOptions() + expandDimsOptions.Init(buf, pos) + return cls.InitFromObj(expandDimsOptions) + + @classmethod + def InitFromObj(cls, expandDimsOptions): + x = ExpandDimsOptionsT() + x._UnPack(expandDimsOptions) + return x + + # ExpandDimsOptionsT + def _UnPack(self, expandDimsOptions): + if expandDimsOptions is None: + return + + # ExpandDimsOptionsT + def Pack(self, builder): + ExpandDimsOptionsStart(builder) + expandDimsOptions = ExpandDimsOptionsEnd(builder) + return expandDimsOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class FakeQuantOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = FakeQuantOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsFakeQuantOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def FakeQuantOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # FakeQuantOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # FakeQuantOptions + def Min(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) + return 0.0 + + # FakeQuantOptions + def Max(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) + return 0.0 + + # FakeQuantOptions + def NumBits(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # FakeQuantOptions + def NarrowRange(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + +def FakeQuantOptionsStart(builder): builder.StartObject(4) +def Start(builder): + return FakeQuantOptionsStart(builder) +def FakeQuantOptionsAddMin(builder, min): builder.PrependFloat32Slot(0, min, 0.0) +def AddMin(builder, min): + return FakeQuantOptionsAddMin(builder, min) +def FakeQuantOptionsAddMax(builder, max): builder.PrependFloat32Slot(1, max, 0.0) +def AddMax(builder, max): + return FakeQuantOptionsAddMax(builder, max) +def FakeQuantOptionsAddNumBits(builder, numBits): builder.PrependInt32Slot(2, numBits, 0) +def AddNumBits(builder, numBits): + return FakeQuantOptionsAddNumBits(builder, numBits) +def FakeQuantOptionsAddNarrowRange(builder, narrowRange): builder.PrependBoolSlot(3, narrowRange, 0) +def AddNarrowRange(builder, narrowRange): + return FakeQuantOptionsAddNarrowRange(builder, narrowRange) +def FakeQuantOptionsEnd(builder): return builder.EndObject() +def End(builder): + return FakeQuantOptionsEnd(builder) + +class FakeQuantOptionsT(object): + + # FakeQuantOptionsT + def __init__(self): + self.min = 0.0 # type: float + self.max = 0.0 # type: float + self.numBits = 0 # type: int + self.narrowRange = False # type: bool + + @classmethod + def InitFromBuf(cls, buf, pos): + fakeQuantOptions = FakeQuantOptions() + fakeQuantOptions.Init(buf, pos) + return cls.InitFromObj(fakeQuantOptions) + + @classmethod + def InitFromObj(cls, fakeQuantOptions): + x = FakeQuantOptionsT() + x._UnPack(fakeQuantOptions) + return x + + # FakeQuantOptionsT + def _UnPack(self, fakeQuantOptions): + if fakeQuantOptions is None: + return + self.min = fakeQuantOptions.Min() + self.max = fakeQuantOptions.Max() + self.numBits = fakeQuantOptions.NumBits() + self.narrowRange = fakeQuantOptions.NarrowRange() + + # FakeQuantOptionsT + def Pack(self, builder): + FakeQuantOptionsStart(builder) + FakeQuantOptionsAddMin(builder, self.min) + FakeQuantOptionsAddMax(builder, self.max) + FakeQuantOptionsAddNumBits(builder, self.numBits) + FakeQuantOptionsAddNarrowRange(builder, self.narrowRange) + fakeQuantOptions = FakeQuantOptionsEnd(builder) + return fakeQuantOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class FillOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = FillOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsFillOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def FillOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # FillOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def FillOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return FillOptionsStart(builder) +def FillOptionsEnd(builder): return builder.EndObject() +def End(builder): + return FillOptionsEnd(builder) + +class FillOptionsT(object): + + # FillOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + fillOptions = FillOptions() + fillOptions.Init(buf, pos) + return cls.InitFromObj(fillOptions) + + @classmethod + def InitFromObj(cls, fillOptions): + x = FillOptionsT() + x._UnPack(fillOptions) + return x + + # FillOptionsT + def _UnPack(self, fillOptions): + if fillOptions is None: + return + + # FillOptionsT + def Pack(self, builder): + FillOptionsStart(builder) + fillOptions = FillOptionsEnd(builder) + return fillOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class FloorDivOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = FloorDivOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsFloorDivOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def FloorDivOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # FloorDivOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def FloorDivOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return FloorDivOptionsStart(builder) +def FloorDivOptionsEnd(builder): return builder.EndObject() +def End(builder): + return FloorDivOptionsEnd(builder) + +class FloorDivOptionsT(object): + + # FloorDivOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + floorDivOptions = FloorDivOptions() + floorDivOptions.Init(buf, pos) + return cls.InitFromObj(floorDivOptions) + + @classmethod + def InitFromObj(cls, floorDivOptions): + x = FloorDivOptionsT() + x._UnPack(floorDivOptions) + return x + + # FloorDivOptionsT + def _UnPack(self, floorDivOptions): + if floorDivOptions is None: + return + + # FloorDivOptionsT + def Pack(self, builder): + FloorDivOptionsStart(builder) + floorDivOptions = FloorDivOptionsEnd(builder) + return floorDivOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class FloorModOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = FloorModOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsFloorModOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def FloorModOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # FloorModOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def FloorModOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return FloorModOptionsStart(builder) +def FloorModOptionsEnd(builder): return builder.EndObject() +def End(builder): + return FloorModOptionsEnd(builder) + +class FloorModOptionsT(object): + + # FloorModOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + floorModOptions = FloorModOptions() + floorModOptions.Init(buf, pos) + return cls.InitFromObj(floorModOptions) + + @classmethod + def InitFromObj(cls, floorModOptions): + x = FloorModOptionsT() + x._UnPack(floorModOptions) + return x + + # FloorModOptionsT + def _UnPack(self, floorModOptions): + if floorModOptions is None: + return + + # FloorModOptionsT + def Pack(self, builder): + FloorModOptionsStart(builder) + floorModOptions = FloorModOptionsEnd(builder) + return floorModOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class FullyConnectedOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = FullyConnectedOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsFullyConnectedOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def FullyConnectedOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # FullyConnectedOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # FullyConnectedOptions + def FusedActivationFunction(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # FullyConnectedOptions + def WeightsFormat(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # FullyConnectedOptions + def KeepNumDims(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + + # FullyConnectedOptions + def AsymmetricQuantizeInputs(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + +def FullyConnectedOptionsStart(builder): builder.StartObject(4) +def Start(builder): + return FullyConnectedOptionsStart(builder) +def FullyConnectedOptionsAddFusedActivationFunction(builder, fusedActivationFunction): builder.PrependInt8Slot(0, fusedActivationFunction, 0) +def AddFusedActivationFunction(builder, fusedActivationFunction): + return FullyConnectedOptionsAddFusedActivationFunction(builder, fusedActivationFunction) +def FullyConnectedOptionsAddWeightsFormat(builder, weightsFormat): builder.PrependInt8Slot(1, weightsFormat, 0) +def AddWeightsFormat(builder, weightsFormat): + return FullyConnectedOptionsAddWeightsFormat(builder, weightsFormat) +def FullyConnectedOptionsAddKeepNumDims(builder, keepNumDims): builder.PrependBoolSlot(2, keepNumDims, 0) +def AddKeepNumDims(builder, keepNumDims): + return FullyConnectedOptionsAddKeepNumDims(builder, keepNumDims) +def FullyConnectedOptionsAddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs): builder.PrependBoolSlot(3, asymmetricQuantizeInputs, 0) +def AddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs): + return FullyConnectedOptionsAddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs) +def FullyConnectedOptionsEnd(builder): return builder.EndObject() +def End(builder): + return FullyConnectedOptionsEnd(builder) + +class FullyConnectedOptionsT(object): + + # FullyConnectedOptionsT + def __init__(self): + self.fusedActivationFunction = 0 # type: int + self.weightsFormat = 0 # type: int + self.keepNumDims = False # type: bool + self.asymmetricQuantizeInputs = False # type: bool + + @classmethod + def InitFromBuf(cls, buf, pos): + fullyConnectedOptions = FullyConnectedOptions() + fullyConnectedOptions.Init(buf, pos) + return cls.InitFromObj(fullyConnectedOptions) + + @classmethod + def InitFromObj(cls, fullyConnectedOptions): + x = FullyConnectedOptionsT() + x._UnPack(fullyConnectedOptions) + return x + + # FullyConnectedOptionsT + def _UnPack(self, fullyConnectedOptions): + if fullyConnectedOptions is None: + return + self.fusedActivationFunction = fullyConnectedOptions.FusedActivationFunction() + self.weightsFormat = fullyConnectedOptions.WeightsFormat() + self.keepNumDims = fullyConnectedOptions.KeepNumDims() + self.asymmetricQuantizeInputs = fullyConnectedOptions.AsymmetricQuantizeInputs() + + # FullyConnectedOptionsT + def Pack(self, builder): + FullyConnectedOptionsStart(builder) + FullyConnectedOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) + FullyConnectedOptionsAddWeightsFormat(builder, self.weightsFormat) + FullyConnectedOptionsAddKeepNumDims(builder, self.keepNumDims) + FullyConnectedOptionsAddAsymmetricQuantizeInputs(builder, self.asymmetricQuantizeInputs) + fullyConnectedOptions = FullyConnectedOptionsEnd(builder) + return fullyConnectedOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +class FullyConnectedOptionsWeightsFormat(object): + DEFAULT = 0 + SHUFFLED4x16INT8 = 1 +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class GatherNdOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = GatherNdOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsGatherNdOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def GatherNdOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # GatherNdOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def GatherNdOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return GatherNdOptionsStart(builder) +def GatherNdOptionsEnd(builder): return builder.EndObject() +def End(builder): + return GatherNdOptionsEnd(builder) + +class GatherNdOptionsT(object): + + # GatherNdOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + gatherNdOptions = GatherNdOptions() + gatherNdOptions.Init(buf, pos) + return cls.InitFromObj(gatherNdOptions) + + @classmethod + def InitFromObj(cls, gatherNdOptions): + x = GatherNdOptionsT() + x._UnPack(gatherNdOptions) + return x + + # GatherNdOptionsT + def _UnPack(self, gatherNdOptions): + if gatherNdOptions is None: + return + + # GatherNdOptionsT + def Pack(self, builder): + GatherNdOptionsStart(builder) + gatherNdOptions = GatherNdOptionsEnd(builder) + return gatherNdOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class GatherOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = GatherOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsGatherOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def GatherOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # GatherOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # GatherOptions + def Axis(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # GatherOptions + def BatchDims(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + +def GatherOptionsStart(builder): builder.StartObject(2) +def Start(builder): + return GatherOptionsStart(builder) +def GatherOptionsAddAxis(builder, axis): builder.PrependInt32Slot(0, axis, 0) +def AddAxis(builder, axis): + return GatherOptionsAddAxis(builder, axis) +def GatherOptionsAddBatchDims(builder, batchDims): builder.PrependInt32Slot(1, batchDims, 0) +def AddBatchDims(builder, batchDims): + return GatherOptionsAddBatchDims(builder, batchDims) +def GatherOptionsEnd(builder): return builder.EndObject() +def End(builder): + return GatherOptionsEnd(builder) + +class GatherOptionsT(object): + + # GatherOptionsT + def __init__(self): + self.axis = 0 # type: int + self.batchDims = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + gatherOptions = GatherOptions() + gatherOptions.Init(buf, pos) + return cls.InitFromObj(gatherOptions) + + @classmethod + def InitFromObj(cls, gatherOptions): + x = GatherOptionsT() + x._UnPack(gatherOptions) + return x + + # GatherOptionsT + def _UnPack(self, gatherOptions): + if gatherOptions is None: + return + self.axis = gatherOptions.Axis() + self.batchDims = gatherOptions.BatchDims() + + # GatherOptionsT + def Pack(self, builder): + GatherOptionsStart(builder) + GatherOptionsAddAxis(builder, self.axis) + GatherOptionsAddBatchDims(builder, self.batchDims) + gatherOptions = GatherOptionsEnd(builder) + return gatherOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class GeluOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = GeluOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsGeluOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def GeluOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # GeluOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # GeluOptions + def Approximate(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + +def GeluOptionsStart(builder): builder.StartObject(1) +def Start(builder): + return GeluOptionsStart(builder) +def GeluOptionsAddApproximate(builder, approximate): builder.PrependBoolSlot(0, approximate, 0) +def AddApproximate(builder, approximate): + return GeluOptionsAddApproximate(builder, approximate) +def GeluOptionsEnd(builder): return builder.EndObject() +def End(builder): + return GeluOptionsEnd(builder) + +class GeluOptionsT(object): + + # GeluOptionsT + def __init__(self): + self.approximate = False # type: bool + + @classmethod + def InitFromBuf(cls, buf, pos): + geluOptions = GeluOptions() + geluOptions.Init(buf, pos) + return cls.InitFromObj(geluOptions) + + @classmethod + def InitFromObj(cls, geluOptions): + x = GeluOptionsT() + x._UnPack(geluOptions) + return x + + # GeluOptionsT + def _UnPack(self, geluOptions): + if geluOptions is None: + return + self.approximate = geluOptions.Approximate() + + # GeluOptionsT + def Pack(self, builder): + GeluOptionsStart(builder) + GeluOptionsAddApproximate(builder, self.approximate) + geluOptions = GeluOptionsEnd(builder) + return geluOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class GreaterEqualOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = GreaterEqualOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsGreaterEqualOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def GreaterEqualOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # GreaterEqualOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def GreaterEqualOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return GreaterEqualOptionsStart(builder) +def GreaterEqualOptionsEnd(builder): return builder.EndObject() +def End(builder): + return GreaterEqualOptionsEnd(builder) + +class GreaterEqualOptionsT(object): + + # GreaterEqualOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + greaterEqualOptions = GreaterEqualOptions() + greaterEqualOptions.Init(buf, pos) + return cls.InitFromObj(greaterEqualOptions) + + @classmethod + def InitFromObj(cls, greaterEqualOptions): + x = GreaterEqualOptionsT() + x._UnPack(greaterEqualOptions) + return x + + # GreaterEqualOptionsT + def _UnPack(self, greaterEqualOptions): + if greaterEqualOptions is None: + return + + # GreaterEqualOptionsT + def Pack(self, builder): + GreaterEqualOptionsStart(builder) + greaterEqualOptions = GreaterEqualOptionsEnd(builder) + return greaterEqualOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class GreaterOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = GreaterOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsGreaterOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def GreaterOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # GreaterOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def GreaterOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return GreaterOptionsStart(builder) +def GreaterOptionsEnd(builder): return builder.EndObject() +def End(builder): + return GreaterOptionsEnd(builder) + +class GreaterOptionsT(object): + + # GreaterOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + greaterOptions = GreaterOptions() + greaterOptions.Init(buf, pos) + return cls.InitFromObj(greaterOptions) + + @classmethod + def InitFromObj(cls, greaterOptions): + x = GreaterOptionsT() + x._UnPack(greaterOptions) + return x + + # GreaterOptionsT + def _UnPack(self, greaterOptions): + if greaterOptions is None: + return + + # GreaterOptionsT + def Pack(self, builder): + GreaterOptionsStart(builder) + greaterOptions = GreaterOptionsEnd(builder) + return greaterOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class HardSwishOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = HardSwishOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsHardSwishOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def HardSwishOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # HardSwishOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def HardSwishOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return HardSwishOptionsStart(builder) +def HardSwishOptionsEnd(builder): return builder.EndObject() +def End(builder): + return HardSwishOptionsEnd(builder) + +class HardSwishOptionsT(object): + + # HardSwishOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + hardSwishOptions = HardSwishOptions() + hardSwishOptions.Init(buf, pos) + return cls.InitFromObj(hardSwishOptions) + + @classmethod + def InitFromObj(cls, hardSwishOptions): + x = HardSwishOptionsT() + x._UnPack(hardSwishOptions) + return x + + # HardSwishOptionsT + def _UnPack(self, hardSwishOptions): + if hardSwishOptions is None: + return + + # HardSwishOptionsT + def Pack(self, builder): + HardSwishOptionsStart(builder) + hardSwishOptions = HardSwishOptionsEnd(builder) + return hardSwishOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class HashtableFindOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = HashtableFindOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsHashtableFindOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def HashtableFindOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # HashtableFindOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def HashtableFindOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return HashtableFindOptionsStart(builder) +def HashtableFindOptionsEnd(builder): return builder.EndObject() +def End(builder): + return HashtableFindOptionsEnd(builder) + +class HashtableFindOptionsT(object): + + # HashtableFindOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + hashtableFindOptions = HashtableFindOptions() + hashtableFindOptions.Init(buf, pos) + return cls.InitFromObj(hashtableFindOptions) + + @classmethod + def InitFromObj(cls, hashtableFindOptions): + x = HashtableFindOptionsT() + x._UnPack(hashtableFindOptions) + return x + + # HashtableFindOptionsT + def _UnPack(self, hashtableFindOptions): + if hashtableFindOptions is None: + return + + # HashtableFindOptionsT + def Pack(self, builder): + HashtableFindOptionsStart(builder) + hashtableFindOptions = HashtableFindOptionsEnd(builder) + return hashtableFindOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class HashtableImportOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = HashtableImportOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsHashtableImportOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def HashtableImportOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # HashtableImportOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def HashtableImportOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return HashtableImportOptionsStart(builder) +def HashtableImportOptionsEnd(builder): return builder.EndObject() +def End(builder): + return HashtableImportOptionsEnd(builder) + +class HashtableImportOptionsT(object): + + # HashtableImportOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + hashtableImportOptions = HashtableImportOptions() + hashtableImportOptions.Init(buf, pos) + return cls.InitFromObj(hashtableImportOptions) + + @classmethod + def InitFromObj(cls, hashtableImportOptions): + x = HashtableImportOptionsT() + x._UnPack(hashtableImportOptions) + return x + + # HashtableImportOptionsT + def _UnPack(self, hashtableImportOptions): + if hashtableImportOptions is None: + return + + # HashtableImportOptionsT + def Pack(self, builder): + HashtableImportOptionsStart(builder) + hashtableImportOptions = HashtableImportOptionsEnd(builder) + return hashtableImportOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class HashtableOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = HashtableOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsHashtableOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def HashtableOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # HashtableOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # HashtableOptions + def TableId(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # HashtableOptions + def KeyDtype(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # HashtableOptions + def ValueDtype(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + +def HashtableOptionsStart(builder): builder.StartObject(3) +def Start(builder): + return HashtableOptionsStart(builder) +def HashtableOptionsAddTableId(builder, tableId): builder.PrependInt32Slot(0, tableId, 0) +def AddTableId(builder, tableId): + return HashtableOptionsAddTableId(builder, tableId) +def HashtableOptionsAddKeyDtype(builder, keyDtype): builder.PrependInt8Slot(1, keyDtype, 0) +def AddKeyDtype(builder, keyDtype): + return HashtableOptionsAddKeyDtype(builder, keyDtype) +def HashtableOptionsAddValueDtype(builder, valueDtype): builder.PrependInt8Slot(2, valueDtype, 0) +def AddValueDtype(builder, valueDtype): + return HashtableOptionsAddValueDtype(builder, valueDtype) +def HashtableOptionsEnd(builder): return builder.EndObject() +def End(builder): + return HashtableOptionsEnd(builder) + +class HashtableOptionsT(object): + + # HashtableOptionsT + def __init__(self): + self.tableId = 0 # type: int + self.keyDtype = 0 # type: int + self.valueDtype = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + hashtableOptions = HashtableOptions() + hashtableOptions.Init(buf, pos) + return cls.InitFromObj(hashtableOptions) + + @classmethod + def InitFromObj(cls, hashtableOptions): + x = HashtableOptionsT() + x._UnPack(hashtableOptions) + return x + + # HashtableOptionsT + def _UnPack(self, hashtableOptions): + if hashtableOptions is None: + return + self.tableId = hashtableOptions.TableId() + self.keyDtype = hashtableOptions.KeyDtype() + self.valueDtype = hashtableOptions.ValueDtype() + + # HashtableOptionsT + def Pack(self, builder): + HashtableOptionsStart(builder) + HashtableOptionsAddTableId(builder, self.tableId) + HashtableOptionsAddKeyDtype(builder, self.keyDtype) + HashtableOptionsAddValueDtype(builder, self.valueDtype) + hashtableOptions = HashtableOptionsEnd(builder) + return hashtableOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class HashtableSizeOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = HashtableSizeOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsHashtableSizeOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def HashtableSizeOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # HashtableSizeOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def HashtableSizeOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return HashtableSizeOptionsStart(builder) +def HashtableSizeOptionsEnd(builder): return builder.EndObject() +def End(builder): + return HashtableSizeOptionsEnd(builder) + +class HashtableSizeOptionsT(object): + + # HashtableSizeOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + hashtableSizeOptions = HashtableSizeOptions() + hashtableSizeOptions.Init(buf, pos) + return cls.InitFromObj(hashtableSizeOptions) + + @classmethod + def InitFromObj(cls, hashtableSizeOptions): + x = HashtableSizeOptionsT() + x._UnPack(hashtableSizeOptions) + return x + + # HashtableSizeOptionsT + def _UnPack(self, hashtableSizeOptions): + if hashtableSizeOptions is None: + return + + # HashtableSizeOptionsT + def Pack(self, builder): + HashtableSizeOptionsStart(builder) + hashtableSizeOptions = HashtableSizeOptionsEnd(builder) + return hashtableSizeOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class IfOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = IfOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsIfOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def IfOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # IfOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # IfOptions + def ThenSubgraphIndex(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # IfOptions + def ElseSubgraphIndex(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + +def IfOptionsStart(builder): builder.StartObject(2) +def Start(builder): + return IfOptionsStart(builder) +def IfOptionsAddThenSubgraphIndex(builder, thenSubgraphIndex): builder.PrependInt32Slot(0, thenSubgraphIndex, 0) +def AddThenSubgraphIndex(builder, thenSubgraphIndex): + return IfOptionsAddThenSubgraphIndex(builder, thenSubgraphIndex) +def IfOptionsAddElseSubgraphIndex(builder, elseSubgraphIndex): builder.PrependInt32Slot(1, elseSubgraphIndex, 0) +def AddElseSubgraphIndex(builder, elseSubgraphIndex): + return IfOptionsAddElseSubgraphIndex(builder, elseSubgraphIndex) +def IfOptionsEnd(builder): return builder.EndObject() +def End(builder): + return IfOptionsEnd(builder) + +class IfOptionsT(object): + + # IfOptionsT + def __init__(self): + self.thenSubgraphIndex = 0 # type: int + self.elseSubgraphIndex = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + ifOptions = IfOptions() + ifOptions.Init(buf, pos) + return cls.InitFromObj(ifOptions) + + @classmethod + def InitFromObj(cls, ifOptions): + x = IfOptionsT() + x._UnPack(ifOptions) + return x + + # IfOptionsT + def _UnPack(self, ifOptions): + if ifOptions is None: + return + self.thenSubgraphIndex = ifOptions.ThenSubgraphIndex() + self.elseSubgraphIndex = ifOptions.ElseSubgraphIndex() + + # IfOptionsT + def Pack(self, builder): + IfOptionsStart(builder) + IfOptionsAddThenSubgraphIndex(builder, self.thenSubgraphIndex) + IfOptionsAddElseSubgraphIndex(builder, self.elseSubgraphIndex) + ifOptions = IfOptionsEnd(builder) + return ifOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class Int32Vector(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Int32Vector() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsInt32Vector(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def Int32VectorBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # Int32Vector + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Int32Vector + def Values(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # Int32Vector + def ValuesAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # Int32Vector + def ValuesLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Int32Vector + def ValuesIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + +def Int32VectorStart(builder): builder.StartObject(1) +def Start(builder): + return Int32VectorStart(builder) +def Int32VectorAddValues(builder, values): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(values), 0) +def AddValues(builder, values): + return Int32VectorAddValues(builder, values) +def Int32VectorStartValuesVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def StartValuesVector(builder, numElems): + return Int32VectorStartValuesVector(builder, numElems) +def Int32VectorEnd(builder): return builder.EndObject() +def End(builder): + return Int32VectorEnd(builder) +try: + from typing import List +except: + pass + +class Int32VectorT(object): + + # Int32VectorT + def __init__(self): + self.values = None # type: List[int] + + @classmethod + def InitFromBuf(cls, buf, pos): + int32vector = Int32Vector() + int32vector.Init(buf, pos) + return cls.InitFromObj(int32vector) + + @classmethod + def InitFromObj(cls, int32vector): + x = Int32VectorT() + x._UnPack(int32vector) + return x + + # Int32VectorT + def _UnPack(self, int32vector): + if int32vector is None: + return + if not int32vector.ValuesIsNone(): + if np is None: + self.values = [] + for i in range(int32vector.ValuesLength()): + self.values.append(int32vector.Values(i)) + else: + self.values = int32vector.ValuesAsNumpy() + + # Int32VectorT + def Pack(self, builder): + if self.values is not None: + if np is not None and type(self.values) is np.ndarray: + values = builder.CreateNumpyVector(self.values) + else: + Int32VectorStartValuesVector(builder, len(self.values)) + for i in reversed(range(len(self.values))): + builder.PrependInt32(self.values[i]) + values = builder.EndVector() + Int32VectorStart(builder) + if self.values is not None: + Int32VectorAddValues(builder, values) + int32vector = Int32VectorEnd(builder) + return int32vector +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class L2NormOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = L2NormOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsL2NormOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def L2NormOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # L2NormOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # L2NormOptions + def FusedActivationFunction(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + +def L2NormOptionsStart(builder): builder.StartObject(1) +def Start(builder): + return L2NormOptionsStart(builder) +def L2NormOptionsAddFusedActivationFunction(builder, fusedActivationFunction): builder.PrependInt8Slot(0, fusedActivationFunction, 0) +def AddFusedActivationFunction(builder, fusedActivationFunction): + return L2NormOptionsAddFusedActivationFunction(builder, fusedActivationFunction) +def L2NormOptionsEnd(builder): return builder.EndObject() +def End(builder): + return L2NormOptionsEnd(builder) + +class L2NormOptionsT(object): + + # L2NormOptionsT + def __init__(self): + self.fusedActivationFunction = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + l2normOptions = L2NormOptions() + l2normOptions.Init(buf, pos) + return cls.InitFromObj(l2normOptions) + + @classmethod + def InitFromObj(cls, l2normOptions): + x = L2NormOptionsT() + x._UnPack(l2normOptions) + return x + + # L2NormOptionsT + def _UnPack(self, l2normOptions): + if l2normOptions is None: + return + self.fusedActivationFunction = l2normOptions.FusedActivationFunction() + + # L2NormOptionsT + def Pack(self, builder): + L2NormOptionsStart(builder) + L2NormOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) + l2normOptions = L2NormOptionsEnd(builder) + return l2normOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class LSHProjectionOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = LSHProjectionOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsLSHProjectionOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def LSHProjectionOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # LSHProjectionOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # LSHProjectionOptions + def Type(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + +def LSHProjectionOptionsStart(builder): builder.StartObject(1) +def Start(builder): + return LSHProjectionOptionsStart(builder) +def LSHProjectionOptionsAddType(builder, type): builder.PrependInt8Slot(0, type, 0) +def AddType(builder, type): + return LSHProjectionOptionsAddType(builder, type) +def LSHProjectionOptionsEnd(builder): return builder.EndObject() +def End(builder): + return LSHProjectionOptionsEnd(builder) + +class LSHProjectionOptionsT(object): + + # LSHProjectionOptionsT + def __init__(self): + self.type = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + lshprojectionOptions = LSHProjectionOptions() + lshprojectionOptions.Init(buf, pos) + return cls.InitFromObj(lshprojectionOptions) + + @classmethod + def InitFromObj(cls, lshprojectionOptions): + x = LSHProjectionOptionsT() + x._UnPack(lshprojectionOptions) + return x + + # LSHProjectionOptionsT + def _UnPack(self, lshprojectionOptions): + if lshprojectionOptions is None: + return + self.type = lshprojectionOptions.Type() + + # LSHProjectionOptionsT + def Pack(self, builder): + LSHProjectionOptionsStart(builder) + LSHProjectionOptionsAddType(builder, self.type) + lshprojectionOptions = LSHProjectionOptionsEnd(builder) + return lshprojectionOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +class LSHProjectionType(object): + UNKNOWN = 0 + SPARSE = 1 + DENSE = 2 +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +class LSTMKernelType(object): + FULL = 0 + BASIC = 1 +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class LSTMOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = LSTMOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsLSTMOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def LSTMOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # LSTMOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # LSTMOptions + def FusedActivationFunction(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # LSTMOptions + def CellClip(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) + return 0.0 + + # LSTMOptions + def ProjClip(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) + return 0.0 + + # LSTMOptions + def KernelType(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # LSTMOptions + def AsymmetricQuantizeInputs(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + +def LSTMOptionsStart(builder): builder.StartObject(5) +def Start(builder): + return LSTMOptionsStart(builder) +def LSTMOptionsAddFusedActivationFunction(builder, fusedActivationFunction): builder.PrependInt8Slot(0, fusedActivationFunction, 0) +def AddFusedActivationFunction(builder, fusedActivationFunction): + return LSTMOptionsAddFusedActivationFunction(builder, fusedActivationFunction) +def LSTMOptionsAddCellClip(builder, cellClip): builder.PrependFloat32Slot(1, cellClip, 0.0) +def AddCellClip(builder, cellClip): + return LSTMOptionsAddCellClip(builder, cellClip) +def LSTMOptionsAddProjClip(builder, projClip): builder.PrependFloat32Slot(2, projClip, 0.0) +def AddProjClip(builder, projClip): + return LSTMOptionsAddProjClip(builder, projClip) +def LSTMOptionsAddKernelType(builder, kernelType): builder.PrependInt8Slot(3, kernelType, 0) +def AddKernelType(builder, kernelType): + return LSTMOptionsAddKernelType(builder, kernelType) +def LSTMOptionsAddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs): builder.PrependBoolSlot(4, asymmetricQuantizeInputs, 0) +def AddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs): + return LSTMOptionsAddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs) +def LSTMOptionsEnd(builder): return builder.EndObject() +def End(builder): + return LSTMOptionsEnd(builder) + +class LSTMOptionsT(object): + + # LSTMOptionsT + def __init__(self): + self.fusedActivationFunction = 0 # type: int + self.cellClip = 0.0 # type: float + self.projClip = 0.0 # type: float + self.kernelType = 0 # type: int + self.asymmetricQuantizeInputs = False # type: bool + + @classmethod + def InitFromBuf(cls, buf, pos): + lstmoptions = LSTMOptions() + lstmoptions.Init(buf, pos) + return cls.InitFromObj(lstmoptions) + + @classmethod + def InitFromObj(cls, lstmoptions): + x = LSTMOptionsT() + x._UnPack(lstmoptions) + return x + + # LSTMOptionsT + def _UnPack(self, lstmoptions): + if lstmoptions is None: + return + self.fusedActivationFunction = lstmoptions.FusedActivationFunction() + self.cellClip = lstmoptions.CellClip() + self.projClip = lstmoptions.ProjClip() + self.kernelType = lstmoptions.KernelType() + self.asymmetricQuantizeInputs = lstmoptions.AsymmetricQuantizeInputs() + + # LSTMOptionsT + def Pack(self, builder): + LSTMOptionsStart(builder) + LSTMOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) + LSTMOptionsAddCellClip(builder, self.cellClip) + LSTMOptionsAddProjClip(builder, self.projClip) + LSTMOptionsAddKernelType(builder, self.kernelType) + LSTMOptionsAddAsymmetricQuantizeInputs(builder, self.asymmetricQuantizeInputs) + lstmoptions = LSTMOptionsEnd(builder) + return lstmoptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class LeakyReluOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = LeakyReluOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsLeakyReluOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def LeakyReluOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # LeakyReluOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # LeakyReluOptions + def Alpha(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) + return 0.0 + +def LeakyReluOptionsStart(builder): builder.StartObject(1) +def Start(builder): + return LeakyReluOptionsStart(builder) +def LeakyReluOptionsAddAlpha(builder, alpha): builder.PrependFloat32Slot(0, alpha, 0.0) +def AddAlpha(builder, alpha): + return LeakyReluOptionsAddAlpha(builder, alpha) +def LeakyReluOptionsEnd(builder): return builder.EndObject() +def End(builder): + return LeakyReluOptionsEnd(builder) + +class LeakyReluOptionsT(object): + + # LeakyReluOptionsT + def __init__(self): + self.alpha = 0.0 # type: float + + @classmethod + def InitFromBuf(cls, buf, pos): + leakyReluOptions = LeakyReluOptions() + leakyReluOptions.Init(buf, pos) + return cls.InitFromObj(leakyReluOptions) + + @classmethod + def InitFromObj(cls, leakyReluOptions): + x = LeakyReluOptionsT() + x._UnPack(leakyReluOptions) + return x + + # LeakyReluOptionsT + def _UnPack(self, leakyReluOptions): + if leakyReluOptions is None: + return + self.alpha = leakyReluOptions.Alpha() + + # LeakyReluOptionsT + def Pack(self, builder): + LeakyReluOptionsStart(builder) + LeakyReluOptionsAddAlpha(builder, self.alpha) + leakyReluOptions = LeakyReluOptionsEnd(builder) + return leakyReluOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class LessEqualOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = LessEqualOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsLessEqualOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def LessEqualOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # LessEqualOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def LessEqualOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return LessEqualOptionsStart(builder) +def LessEqualOptionsEnd(builder): return builder.EndObject() +def End(builder): + return LessEqualOptionsEnd(builder) + +class LessEqualOptionsT(object): + + # LessEqualOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + lessEqualOptions = LessEqualOptions() + lessEqualOptions.Init(buf, pos) + return cls.InitFromObj(lessEqualOptions) + + @classmethod + def InitFromObj(cls, lessEqualOptions): + x = LessEqualOptionsT() + x._UnPack(lessEqualOptions) + return x + + # LessEqualOptionsT + def _UnPack(self, lessEqualOptions): + if lessEqualOptions is None: + return + + # LessEqualOptionsT + def Pack(self, builder): + LessEqualOptionsStart(builder) + lessEqualOptions = LessEqualOptionsEnd(builder) + return lessEqualOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class LessOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = LessOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsLessOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def LessOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # LessOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def LessOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return LessOptionsStart(builder) +def LessOptionsEnd(builder): return builder.EndObject() +def End(builder): + return LessOptionsEnd(builder) + +class LessOptionsT(object): + + # LessOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + lessOptions = LessOptions() + lessOptions.Init(buf, pos) + return cls.InitFromObj(lessOptions) + + @classmethod + def InitFromObj(cls, lessOptions): + x = LessOptionsT() + x._UnPack(lessOptions) + return x + + # LessOptionsT + def _UnPack(self, lessOptions): + if lessOptions is None: + return + + # LessOptionsT + def Pack(self, builder): + LessOptionsStart(builder) + lessOptions = LessOptionsEnd(builder) + return lessOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class LocalResponseNormalizationOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = LocalResponseNormalizationOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsLocalResponseNormalizationOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def LocalResponseNormalizationOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # LocalResponseNormalizationOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # LocalResponseNormalizationOptions + def Radius(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # LocalResponseNormalizationOptions + def Bias(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) + return 0.0 + + # LocalResponseNormalizationOptions + def Alpha(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) + return 0.0 + + # LocalResponseNormalizationOptions + def Beta(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) + return 0.0 + +def LocalResponseNormalizationOptionsStart(builder): builder.StartObject(4) +def Start(builder): + return LocalResponseNormalizationOptionsStart(builder) +def LocalResponseNormalizationOptionsAddRadius(builder, radius): builder.PrependInt32Slot(0, radius, 0) +def AddRadius(builder, radius): + return LocalResponseNormalizationOptionsAddRadius(builder, radius) +def LocalResponseNormalizationOptionsAddBias(builder, bias): builder.PrependFloat32Slot(1, bias, 0.0) +def AddBias(builder, bias): + return LocalResponseNormalizationOptionsAddBias(builder, bias) +def LocalResponseNormalizationOptionsAddAlpha(builder, alpha): builder.PrependFloat32Slot(2, alpha, 0.0) +def AddAlpha(builder, alpha): + return LocalResponseNormalizationOptionsAddAlpha(builder, alpha) +def LocalResponseNormalizationOptionsAddBeta(builder, beta): builder.PrependFloat32Slot(3, beta, 0.0) +def AddBeta(builder, beta): + return LocalResponseNormalizationOptionsAddBeta(builder, beta) +def LocalResponseNormalizationOptionsEnd(builder): return builder.EndObject() +def End(builder): + return LocalResponseNormalizationOptionsEnd(builder) + +class LocalResponseNormalizationOptionsT(object): + + # LocalResponseNormalizationOptionsT + def __init__(self): + self.radius = 0 # type: int + self.bias = 0.0 # type: float + self.alpha = 0.0 # type: float + self.beta = 0.0 # type: float + + @classmethod + def InitFromBuf(cls, buf, pos): + localResponseNormalizationOptions = LocalResponseNormalizationOptions() + localResponseNormalizationOptions.Init(buf, pos) + return cls.InitFromObj(localResponseNormalizationOptions) + + @classmethod + def InitFromObj(cls, localResponseNormalizationOptions): + x = LocalResponseNormalizationOptionsT() + x._UnPack(localResponseNormalizationOptions) + return x + + # LocalResponseNormalizationOptionsT + def _UnPack(self, localResponseNormalizationOptions): + if localResponseNormalizationOptions is None: + return + self.radius = localResponseNormalizationOptions.Radius() + self.bias = localResponseNormalizationOptions.Bias() + self.alpha = localResponseNormalizationOptions.Alpha() + self.beta = localResponseNormalizationOptions.Beta() + + # LocalResponseNormalizationOptionsT + def Pack(self, builder): + LocalResponseNormalizationOptionsStart(builder) + LocalResponseNormalizationOptionsAddRadius(builder, self.radius) + LocalResponseNormalizationOptionsAddBias(builder, self.bias) + LocalResponseNormalizationOptionsAddAlpha(builder, self.alpha) + LocalResponseNormalizationOptionsAddBeta(builder, self.beta) + localResponseNormalizationOptions = LocalResponseNormalizationOptionsEnd(builder) + return localResponseNormalizationOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class LogSoftmaxOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = LogSoftmaxOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsLogSoftmaxOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def LogSoftmaxOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # LogSoftmaxOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def LogSoftmaxOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return LogSoftmaxOptionsStart(builder) +def LogSoftmaxOptionsEnd(builder): return builder.EndObject() +def End(builder): + return LogSoftmaxOptionsEnd(builder) + +class LogSoftmaxOptionsT(object): + + # LogSoftmaxOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + logSoftmaxOptions = LogSoftmaxOptions() + logSoftmaxOptions.Init(buf, pos) + return cls.InitFromObj(logSoftmaxOptions) + + @classmethod + def InitFromObj(cls, logSoftmaxOptions): + x = LogSoftmaxOptionsT() + x._UnPack(logSoftmaxOptions) + return x + + # LogSoftmaxOptionsT + def _UnPack(self, logSoftmaxOptions): + if logSoftmaxOptions is None: + return + + # LogSoftmaxOptionsT + def Pack(self, builder): + LogSoftmaxOptionsStart(builder) + logSoftmaxOptions = LogSoftmaxOptionsEnd(builder) + return logSoftmaxOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class LogicalAndOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = LogicalAndOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsLogicalAndOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def LogicalAndOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # LogicalAndOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def LogicalAndOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return LogicalAndOptionsStart(builder) +def LogicalAndOptionsEnd(builder): return builder.EndObject() +def End(builder): + return LogicalAndOptionsEnd(builder) + +class LogicalAndOptionsT(object): + + # LogicalAndOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + logicalAndOptions = LogicalAndOptions() + logicalAndOptions.Init(buf, pos) + return cls.InitFromObj(logicalAndOptions) + + @classmethod + def InitFromObj(cls, logicalAndOptions): + x = LogicalAndOptionsT() + x._UnPack(logicalAndOptions) + return x + + # LogicalAndOptionsT + def _UnPack(self, logicalAndOptions): + if logicalAndOptions is None: + return + + # LogicalAndOptionsT + def Pack(self, builder): + LogicalAndOptionsStart(builder) + logicalAndOptions = LogicalAndOptionsEnd(builder) + return logicalAndOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class LogicalNotOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = LogicalNotOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsLogicalNotOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def LogicalNotOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # LogicalNotOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def LogicalNotOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return LogicalNotOptionsStart(builder) +def LogicalNotOptionsEnd(builder): return builder.EndObject() +def End(builder): + return LogicalNotOptionsEnd(builder) + +class LogicalNotOptionsT(object): + + # LogicalNotOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + logicalNotOptions = LogicalNotOptions() + logicalNotOptions.Init(buf, pos) + return cls.InitFromObj(logicalNotOptions) + + @classmethod + def InitFromObj(cls, logicalNotOptions): + x = LogicalNotOptionsT() + x._UnPack(logicalNotOptions) + return x + + # LogicalNotOptionsT + def _UnPack(self, logicalNotOptions): + if logicalNotOptions is None: + return + + # LogicalNotOptionsT + def Pack(self, builder): + LogicalNotOptionsStart(builder) + logicalNotOptions = LogicalNotOptionsEnd(builder) + return logicalNotOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class LogicalOrOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = LogicalOrOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsLogicalOrOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def LogicalOrOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # LogicalOrOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def LogicalOrOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return LogicalOrOptionsStart(builder) +def LogicalOrOptionsEnd(builder): return builder.EndObject() +def End(builder): + return LogicalOrOptionsEnd(builder) + +class LogicalOrOptionsT(object): + + # LogicalOrOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + logicalOrOptions = LogicalOrOptions() + logicalOrOptions.Init(buf, pos) + return cls.InitFromObj(logicalOrOptions) + + @classmethod + def InitFromObj(cls, logicalOrOptions): + x = LogicalOrOptionsT() + x._UnPack(logicalOrOptions) + return x + + # LogicalOrOptionsT + def _UnPack(self, logicalOrOptions): + if logicalOrOptions is None: + return + + # LogicalOrOptionsT + def Pack(self, builder): + LogicalOrOptionsStart(builder) + logicalOrOptions = LogicalOrOptionsEnd(builder) + return logicalOrOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class MatrixDiagOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = MatrixDiagOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsMatrixDiagOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def MatrixDiagOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # MatrixDiagOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def MatrixDiagOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return MatrixDiagOptionsStart(builder) +def MatrixDiagOptionsEnd(builder): return builder.EndObject() +def End(builder): + return MatrixDiagOptionsEnd(builder) + +class MatrixDiagOptionsT(object): + + # MatrixDiagOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + matrixDiagOptions = MatrixDiagOptions() + matrixDiagOptions.Init(buf, pos) + return cls.InitFromObj(matrixDiagOptions) + + @classmethod + def InitFromObj(cls, matrixDiagOptions): + x = MatrixDiagOptionsT() + x._UnPack(matrixDiagOptions) + return x + + # MatrixDiagOptionsT + def _UnPack(self, matrixDiagOptions): + if matrixDiagOptions is None: + return + + # MatrixDiagOptionsT + def Pack(self, builder): + MatrixDiagOptionsStart(builder) + matrixDiagOptions = MatrixDiagOptionsEnd(builder) + return matrixDiagOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class MatrixSetDiagOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = MatrixSetDiagOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsMatrixSetDiagOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def MatrixSetDiagOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # MatrixSetDiagOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def MatrixSetDiagOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return MatrixSetDiagOptionsStart(builder) +def MatrixSetDiagOptionsEnd(builder): return builder.EndObject() +def End(builder): + return MatrixSetDiagOptionsEnd(builder) + +class MatrixSetDiagOptionsT(object): + + # MatrixSetDiagOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + matrixSetDiagOptions = MatrixSetDiagOptions() + matrixSetDiagOptions.Init(buf, pos) + return cls.InitFromObj(matrixSetDiagOptions) + + @classmethod + def InitFromObj(cls, matrixSetDiagOptions): + x = MatrixSetDiagOptionsT() + x._UnPack(matrixSetDiagOptions) + return x + + # MatrixSetDiagOptionsT + def _UnPack(self, matrixSetDiagOptions): + if matrixSetDiagOptions is None: + return + + # MatrixSetDiagOptionsT + def Pack(self, builder): + MatrixSetDiagOptionsStart(builder) + matrixSetDiagOptions = MatrixSetDiagOptionsEnd(builder) + return matrixSetDiagOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class MaximumMinimumOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = MaximumMinimumOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsMaximumMinimumOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def MaximumMinimumOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # MaximumMinimumOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def MaximumMinimumOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return MaximumMinimumOptionsStart(builder) +def MaximumMinimumOptionsEnd(builder): return builder.EndObject() +def End(builder): + return MaximumMinimumOptionsEnd(builder) + +class MaximumMinimumOptionsT(object): + + # MaximumMinimumOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + maximumMinimumOptions = MaximumMinimumOptions() + maximumMinimumOptions.Init(buf, pos) + return cls.InitFromObj(maximumMinimumOptions) + + @classmethod + def InitFromObj(cls, maximumMinimumOptions): + x = MaximumMinimumOptionsT() + x._UnPack(maximumMinimumOptions) + return x + + # MaximumMinimumOptionsT + def _UnPack(self, maximumMinimumOptions): + if maximumMinimumOptions is None: + return + + # MaximumMinimumOptionsT + def Pack(self, builder): + MaximumMinimumOptionsStart(builder) + maximumMinimumOptions = MaximumMinimumOptionsEnd(builder) + return maximumMinimumOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class Metadata(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Metadata() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsMetadata(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def MetadataBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # Metadata + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Metadata + def Name(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # Metadata + def Buffer(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint32Flags, o + self._tab.Pos) + return 0 + +def MetadataStart(builder): builder.StartObject(2) +def Start(builder): + return MetadataStart(builder) +def MetadataAddName(builder, name): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0) +def AddName(builder, name): + return MetadataAddName(builder, name) +def MetadataAddBuffer(builder, buffer): builder.PrependUint32Slot(1, buffer, 0) +def AddBuffer(builder, buffer): + return MetadataAddBuffer(builder, buffer) +def MetadataEnd(builder): return builder.EndObject() +def End(builder): + return MetadataEnd(builder) + +class MetadataT(object): + + # MetadataT + def __init__(self): + self.name = None # type: str + self.buffer = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + metadata = Metadata() + metadata.Init(buf, pos) + return cls.InitFromObj(metadata) + + @classmethod + def InitFromObj(cls, metadata): + x = MetadataT() + x._UnPack(metadata) + return x + + # MetadataT + def _UnPack(self, metadata): + if metadata is None: + return + self.name = metadata.Name() + self.buffer = metadata.Buffer() + + # MetadataT + def Pack(self, builder): + if self.name is not None: + name = builder.CreateString(self.name) + MetadataStart(builder) + if self.name is not None: + MetadataAddName(builder, name) + MetadataAddBuffer(builder, self.buffer) + metadata = MetadataEnd(builder) + return metadata +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +class MirrorPadMode(object): + REFLECT = 0 + SYMMETRIC = 1 +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class MirrorPadOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = MirrorPadOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsMirrorPadOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def MirrorPadOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # MirrorPadOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # MirrorPadOptions + def Mode(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + +def MirrorPadOptionsStart(builder): builder.StartObject(1) +def Start(builder): + return MirrorPadOptionsStart(builder) +def MirrorPadOptionsAddMode(builder, mode): builder.PrependInt8Slot(0, mode, 0) +def AddMode(builder, mode): + return MirrorPadOptionsAddMode(builder, mode) +def MirrorPadOptionsEnd(builder): return builder.EndObject() +def End(builder): + return MirrorPadOptionsEnd(builder) + +class MirrorPadOptionsT(object): + + # MirrorPadOptionsT + def __init__(self): + self.mode = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + mirrorPadOptions = MirrorPadOptions() + mirrorPadOptions.Init(buf, pos) + return cls.InitFromObj(mirrorPadOptions) + + @classmethod + def InitFromObj(cls, mirrorPadOptions): + x = MirrorPadOptionsT() + x._UnPack(mirrorPadOptions) + return x + + # MirrorPadOptionsT + def _UnPack(self, mirrorPadOptions): + if mirrorPadOptions is None: + return + self.mode = mirrorPadOptions.Mode() + + # MirrorPadOptionsT + def Pack(self, builder): + MirrorPadOptionsStart(builder) + MirrorPadOptionsAddMode(builder, self.mode) + mirrorPadOptions = MirrorPadOptionsEnd(builder) + return mirrorPadOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class Model(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Model() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsModel(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def ModelBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # Model + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Model + def Version(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint32Flags, o + self._tab.Pos) + return 0 + + # Model + def OperatorCodes(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + x = self._tab.Vector(o) + x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 + x = self._tab.Indirect(x) + obj = OperatorCode() + obj.Init(self._tab.Bytes, x) + return obj + return None + + # Model + def OperatorCodesLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Model + def OperatorCodesIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + return o == 0 + + # Model + def Subgraphs(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + x = self._tab.Vector(o) + x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 + x = self._tab.Indirect(x) + obj = SubGraph() + obj.Init(self._tab.Bytes, x) + return obj + return None + + # Model + def SubgraphsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Model + def SubgraphsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + return o == 0 + + # Model + def Description(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # Model + def Buffers(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + x = self._tab.Vector(o) + x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 + x = self._tab.Indirect(x) + obj = Buffer() + obj.Init(self._tab.Bytes, x) + return obj + return None + + # Model + def BuffersLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Model + def BuffersIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + return o == 0 + + # Model + def MetadataBuffer(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # Model + def MetadataBufferAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # Model + def MetadataBufferLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Model + def MetadataBufferIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + return o == 0 + + # Model + def Metadata(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + if o != 0: + x = self._tab.Vector(o) + x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 + x = self._tab.Indirect(x) + obj = Metadata() + obj.Init(self._tab.Bytes, x) + return obj + return None + + # Model + def MetadataLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Model + def MetadataIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + return o == 0 + + # Model + def SignatureDefs(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + if o != 0: + x = self._tab.Vector(o) + x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 + x = self._tab.Indirect(x) + obj = SignatureDef() + obj.Init(self._tab.Bytes, x) + return obj + return None + + # Model + def SignatureDefsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Model + def SignatureDefsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + return o == 0 + +def ModelStart(builder): builder.StartObject(8) +def Start(builder): + return ModelStart(builder) +def ModelAddVersion(builder, version): builder.PrependUint32Slot(0, version, 0) +def AddVersion(builder, version): + return ModelAddVersion(builder, version) +def ModelAddOperatorCodes(builder, operatorCodes): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(operatorCodes), 0) +def AddOperatorCodes(builder, operatorCodes): + return ModelAddOperatorCodes(builder, operatorCodes) +def ModelStartOperatorCodesVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def StartOperatorCodesVector(builder, numElems): + return ModelStartOperatorCodesVector(builder, numElems) +def ModelAddSubgraphs(builder, subgraphs): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(subgraphs), 0) +def AddSubgraphs(builder, subgraphs): + return ModelAddSubgraphs(builder, subgraphs) +def ModelStartSubgraphsVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def StartSubgraphsVector(builder, numElems): + return ModelStartSubgraphsVector(builder, numElems) +def ModelAddDescription(builder, description): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(description), 0) +def AddDescription(builder, description): + return ModelAddDescription(builder, description) +def ModelAddBuffers(builder, buffers): builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(buffers), 0) +def AddBuffers(builder, buffers): + return ModelAddBuffers(builder, buffers) +def ModelStartBuffersVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def StartBuffersVector(builder, numElems): + return ModelStartBuffersVector(builder, numElems) +def ModelAddMetadataBuffer(builder, metadataBuffer): builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(metadataBuffer), 0) +def AddMetadataBuffer(builder, metadataBuffer): + return ModelAddMetadataBuffer(builder, metadataBuffer) +def ModelStartMetadataBufferVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def StartMetadataBufferVector(builder, numElems): + return ModelStartMetadataBufferVector(builder, numElems) +def ModelAddMetadata(builder, metadata): builder.PrependUOffsetTRelativeSlot(6, flatbuffers.number_types.UOffsetTFlags.py_type(metadata), 0) +def AddMetadata(builder, metadata): + return ModelAddMetadata(builder, metadata) +def ModelStartMetadataVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def StartMetadataVector(builder, numElems): + return ModelStartMetadataVector(builder, numElems) +def ModelAddSignatureDefs(builder, signatureDefs): builder.PrependUOffsetTRelativeSlot(7, flatbuffers.number_types.UOffsetTFlags.py_type(signatureDefs), 0) +def AddSignatureDefs(builder, signatureDefs): + return ModelAddSignatureDefs(builder, signatureDefs) +def ModelStartSignatureDefsVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def StartSignatureDefsVector(builder, numElems): + return ModelStartSignatureDefsVector(builder, numElems) +def ModelEnd(builder): return builder.EndObject() +def End(builder): + return ModelEnd(builder) +try: + from typing import List +except: + pass + +class ModelT(object): + + # ModelT + def __init__(self): + self.version = 0 # type: int + self.operatorCodes = None # type: List[OperatorCodeT] + self.subgraphs = None # type: List[SubGraphT] + self.description = None # type: str + self.buffers = None # type: List[BufferT] + self.metadataBuffer = None # type: List[int] + self.metadata = None # type: List[MetadataT] + self.signatureDefs = None # type: List[SignatureDefT] + + @classmethod + def InitFromBuf(cls, buf, pos): + model = Model() + model.Init(buf, pos) + return cls.InitFromObj(model) + + @classmethod + def InitFromObj(cls, model): + x = ModelT() + x._UnPack(model) + return x + + # ModelT + def _UnPack(self, model): + if model is None: + return + self.version = model.Version() + if not model.OperatorCodesIsNone(): + self.operatorCodes = [] + for i in range(model.OperatorCodesLength()): + if model.OperatorCodes(i) is None: + self.operatorCodes.append(None) + else: + operatorCode_ = OperatorCodeT.InitFromObj(model.OperatorCodes(i)) + self.operatorCodes.append(operatorCode_) + if not model.SubgraphsIsNone(): + self.subgraphs = [] + for i in range(model.SubgraphsLength()): + if model.Subgraphs(i) is None: + self.subgraphs.append(None) + else: + subGraph_ = SubGraphT.InitFromObj(model.Subgraphs(i)) + self.subgraphs.append(subGraph_) + self.description = model.Description() + if not model.BuffersIsNone(): + self.buffers = [] + for i in range(model.BuffersLength()): + if model.Buffers(i) is None: + self.buffers.append(None) + else: + buffer_ = BufferT.InitFromObj(model.Buffers(i)) + self.buffers.append(buffer_) + if not model.MetadataBufferIsNone(): + if np is None: + self.metadataBuffer = [] + for i in range(model.MetadataBufferLength()): + self.metadataBuffer.append(model.MetadataBuffer(i)) + else: + self.metadataBuffer = model.MetadataBufferAsNumpy() + if not model.MetadataIsNone(): + self.metadata = [] + for i in range(model.MetadataLength()): + if model.Metadata(i) is None: + self.metadata.append(None) + else: + metadata_ = MetadataT.InitFromObj(model.Metadata(i)) + self.metadata.append(metadata_) + if not model.SignatureDefsIsNone(): + self.signatureDefs = [] + for i in range(model.SignatureDefsLength()): + if model.SignatureDefs(i) is None: + self.signatureDefs.append(None) + else: + signatureDef_ = SignatureDefT.InitFromObj(model.SignatureDefs(i)) + self.signatureDefs.append(signatureDef_) + + # ModelT + def Pack(self, builder): + if self.operatorCodes is not None: + operatorCodeslist = [] + for i in range(len(self.operatorCodes)): + operatorCodeslist.append(self.operatorCodes[i].Pack(builder)) + ModelStartOperatorCodesVector(builder, len(self.operatorCodes)) + for i in reversed(range(len(self.operatorCodes))): + builder.PrependUOffsetTRelative(operatorCodeslist[i]) + operatorCodes = builder.EndVector() + if self.subgraphs is not None: + subgraphslist = [] + for i in range(len(self.subgraphs)): + subgraphslist.append(self.subgraphs[i].Pack(builder)) + ModelStartSubgraphsVector(builder, len(self.subgraphs)) + for i in reversed(range(len(self.subgraphs))): + builder.PrependUOffsetTRelative(subgraphslist[i]) + subgraphs = builder.EndVector() + if self.description is not None: + description = builder.CreateString(self.description) + if self.buffers is not None: + bufferslist = [] + for i in range(len(self.buffers)): + bufferslist.append(self.buffers[i].Pack(builder)) + ModelStartBuffersVector(builder, len(self.buffers)) + for i in reversed(range(len(self.buffers))): + builder.PrependUOffsetTRelative(bufferslist[i]) + buffers = builder.EndVector() + if self.metadataBuffer is not None: + if np is not None and type(self.metadataBuffer) is np.ndarray: + metadataBuffer = builder.CreateNumpyVector(self.metadataBuffer) + else: + ModelStartMetadataBufferVector(builder, len(self.metadataBuffer)) + for i in reversed(range(len(self.metadataBuffer))): + builder.PrependInt32(self.metadataBuffer[i]) + metadataBuffer = builder.EndVector() + if self.metadata is not None: + metadatalist = [] + for i in range(len(self.metadata)): + metadatalist.append(self.metadata[i].Pack(builder)) + ModelStartMetadataVector(builder, len(self.metadata)) + for i in reversed(range(len(self.metadata))): + builder.PrependUOffsetTRelative(metadatalist[i]) + metadata = builder.EndVector() + if self.signatureDefs is not None: + signatureDefslist = [] + for i in range(len(self.signatureDefs)): + signatureDefslist.append(self.signatureDefs[i].Pack(builder)) + ModelStartSignatureDefsVector(builder, len(self.signatureDefs)) + for i in reversed(range(len(self.signatureDefs))): + builder.PrependUOffsetTRelative(signatureDefslist[i]) + signatureDefs = builder.EndVector() + ModelStart(builder) + ModelAddVersion(builder, self.version) + if self.operatorCodes is not None: + ModelAddOperatorCodes(builder, operatorCodes) + if self.subgraphs is not None: + ModelAddSubgraphs(builder, subgraphs) + if self.description is not None: + ModelAddDescription(builder, description) + if self.buffers is not None: + ModelAddBuffers(builder, buffers) + if self.metadataBuffer is not None: + ModelAddMetadataBuffer(builder, metadataBuffer) + if self.metadata is not None: + ModelAddMetadata(builder, metadata) + if self.signatureDefs is not None: + ModelAddSignatureDefs(builder, signatureDefs) + model = ModelEnd(builder) + return model +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class MulOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = MulOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsMulOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def MulOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # MulOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # MulOptions + def FusedActivationFunction(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + +def MulOptionsStart(builder): builder.StartObject(1) +def Start(builder): + return MulOptionsStart(builder) +def MulOptionsAddFusedActivationFunction(builder, fusedActivationFunction): builder.PrependInt8Slot(0, fusedActivationFunction, 0) +def AddFusedActivationFunction(builder, fusedActivationFunction): + return MulOptionsAddFusedActivationFunction(builder, fusedActivationFunction) +def MulOptionsEnd(builder): return builder.EndObject() +def End(builder): + return MulOptionsEnd(builder) + +class MulOptionsT(object): + + # MulOptionsT + def __init__(self): + self.fusedActivationFunction = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + mulOptions = MulOptions() + mulOptions.Init(buf, pos) + return cls.InitFromObj(mulOptions) + + @classmethod + def InitFromObj(cls, mulOptions): + x = MulOptionsT() + x._UnPack(mulOptions) + return x + + # MulOptionsT + def _UnPack(self, mulOptions): + if mulOptions is None: + return + self.fusedActivationFunction = mulOptions.FusedActivationFunction() + + # MulOptionsT + def Pack(self, builder): + MulOptionsStart(builder) + MulOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) + mulOptions = MulOptionsEnd(builder) + return mulOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class NegOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = NegOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsNegOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def NegOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # NegOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def NegOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return NegOptionsStart(builder) +def NegOptionsEnd(builder): return builder.EndObject() +def End(builder): + return NegOptionsEnd(builder) + +class NegOptionsT(object): + + # NegOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + negOptions = NegOptions() + negOptions.Init(buf, pos) + return cls.InitFromObj(negOptions) + + @classmethod + def InitFromObj(cls, negOptions): + x = NegOptionsT() + x._UnPack(negOptions) + return x + + # NegOptionsT + def _UnPack(self, negOptions): + if negOptions is None: + return + + # NegOptionsT + def Pack(self, builder): + NegOptionsStart(builder) + negOptions = NegOptionsEnd(builder) + return negOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class NonMaxSuppressionV4Options(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = NonMaxSuppressionV4Options() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsNonMaxSuppressionV4Options(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def NonMaxSuppressionV4OptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # NonMaxSuppressionV4Options + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def NonMaxSuppressionV4OptionsStart(builder): builder.StartObject(0) +def Start(builder): + return NonMaxSuppressionV4OptionsStart(builder) +def NonMaxSuppressionV4OptionsEnd(builder): return builder.EndObject() +def End(builder): + return NonMaxSuppressionV4OptionsEnd(builder) + +class NonMaxSuppressionV4OptionsT(object): + + # NonMaxSuppressionV4OptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + nonMaxSuppressionV4options = NonMaxSuppressionV4Options() + nonMaxSuppressionV4options.Init(buf, pos) + return cls.InitFromObj(nonMaxSuppressionV4options) + + @classmethod + def InitFromObj(cls, nonMaxSuppressionV4options): + x = NonMaxSuppressionV4OptionsT() + x._UnPack(nonMaxSuppressionV4options) + return x + + # NonMaxSuppressionV4OptionsT + def _UnPack(self, nonMaxSuppressionV4options): + if nonMaxSuppressionV4options is None: + return + + # NonMaxSuppressionV4OptionsT + def Pack(self, builder): + NonMaxSuppressionV4OptionsStart(builder) + nonMaxSuppressionV4options = NonMaxSuppressionV4OptionsEnd(builder) + return nonMaxSuppressionV4options +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class NonMaxSuppressionV5Options(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = NonMaxSuppressionV5Options() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsNonMaxSuppressionV5Options(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def NonMaxSuppressionV5OptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # NonMaxSuppressionV5Options + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def NonMaxSuppressionV5OptionsStart(builder): builder.StartObject(0) +def Start(builder): + return NonMaxSuppressionV5OptionsStart(builder) +def NonMaxSuppressionV5OptionsEnd(builder): return builder.EndObject() +def End(builder): + return NonMaxSuppressionV5OptionsEnd(builder) + +class NonMaxSuppressionV5OptionsT(object): + + # NonMaxSuppressionV5OptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + nonMaxSuppressionV5options = NonMaxSuppressionV5Options() + nonMaxSuppressionV5options.Init(buf, pos) + return cls.InitFromObj(nonMaxSuppressionV5options) + + @classmethod + def InitFromObj(cls, nonMaxSuppressionV5options): + x = NonMaxSuppressionV5OptionsT() + x._UnPack(nonMaxSuppressionV5options) + return x + + # NonMaxSuppressionV5OptionsT + def _UnPack(self, nonMaxSuppressionV5options): + if nonMaxSuppressionV5options is None: + return + + # NonMaxSuppressionV5OptionsT + def Pack(self, builder): + NonMaxSuppressionV5OptionsStart(builder) + nonMaxSuppressionV5options = NonMaxSuppressionV5OptionsEnd(builder) + return nonMaxSuppressionV5options +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class NotEqualOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = NotEqualOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsNotEqualOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def NotEqualOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # NotEqualOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def NotEqualOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return NotEqualOptionsStart(builder) +def NotEqualOptionsEnd(builder): return builder.EndObject() +def End(builder): + return NotEqualOptionsEnd(builder) + +class NotEqualOptionsT(object): + + # NotEqualOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + notEqualOptions = NotEqualOptions() + notEqualOptions.Init(buf, pos) + return cls.InitFromObj(notEqualOptions) + + @classmethod + def InitFromObj(cls, notEqualOptions): + x = NotEqualOptionsT() + x._UnPack(notEqualOptions) + return x + + # NotEqualOptionsT + def _UnPack(self, notEqualOptions): + if notEqualOptions is None: + return + + # NotEqualOptionsT + def Pack(self, builder): + NotEqualOptionsStart(builder) + notEqualOptions = NotEqualOptionsEnd(builder) + return notEqualOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class OneHotOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = OneHotOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsOneHotOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def OneHotOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # OneHotOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # OneHotOptions + def Axis(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + +def OneHotOptionsStart(builder): builder.StartObject(1) +def Start(builder): + return OneHotOptionsStart(builder) +def OneHotOptionsAddAxis(builder, axis): builder.PrependInt32Slot(0, axis, 0) +def AddAxis(builder, axis): + return OneHotOptionsAddAxis(builder, axis) +def OneHotOptionsEnd(builder): return builder.EndObject() +def End(builder): + return OneHotOptionsEnd(builder) + +class OneHotOptionsT(object): + + # OneHotOptionsT + def __init__(self): + self.axis = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + oneHotOptions = OneHotOptions() + oneHotOptions.Init(buf, pos) + return cls.InitFromObj(oneHotOptions) + + @classmethod + def InitFromObj(cls, oneHotOptions): + x = OneHotOptionsT() + x._UnPack(oneHotOptions) + return x + + # OneHotOptionsT + def _UnPack(self, oneHotOptions): + if oneHotOptions is None: + return + self.axis = oneHotOptions.Axis() + + # OneHotOptionsT + def Pack(self, builder): + OneHotOptionsStart(builder) + OneHotOptionsAddAxis(builder, self.axis) + oneHotOptions = OneHotOptionsEnd(builder) + return oneHotOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class Operator(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Operator() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsOperator(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def OperatorBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # Operator + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Operator + def OpcodeIndex(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint32Flags, o + self._tab.Pos) + return 0 + + # Operator + def Inputs(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # Operator + def InputsAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # Operator + def InputsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Operator + def InputsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + return o == 0 + + # Operator + def Outputs(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # Operator + def OutputsAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # Operator + def OutputsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Operator + def OutputsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + return o == 0 + + # Operator + def BuiltinOptionsType(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) + return 0 + + # Operator + def BuiltinOptions(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + from flatbuffers.table import Table + obj = Table(bytearray(), 0) + self._tab.Union(obj, o) + return obj + return None + + # Operator + def CustomOptions(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) + return 0 + + # Operator + def CustomOptionsAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) + return 0 + + # Operator + def CustomOptionsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Operator + def CustomOptionsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + return o == 0 + + # Operator + def CustomOptionsFormat(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # Operator + def MutatingVariableInputs(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.BoolFlags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) + return 0 + + # Operator + def MutatingVariableInputsAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.BoolFlags, o) + return 0 + + # Operator + def MutatingVariableInputsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Operator + def MutatingVariableInputsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + return o == 0 + + # Operator + def Intermediates(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(20)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # Operator + def IntermediatesAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(20)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # Operator + def IntermediatesLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(20)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Operator + def IntermediatesIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(20)) + return o == 0 + + # Operator + def CustomOptionsOffset(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(22)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) + return 0 + + # Operator + def CustomOptionsSize(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(24)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) + return 0 + +def OperatorStart(builder): builder.StartObject(11) +def Start(builder): + return OperatorStart(builder) +def OperatorAddOpcodeIndex(builder, opcodeIndex): builder.PrependUint32Slot(0, opcodeIndex, 0) +def AddOpcodeIndex(builder, opcodeIndex): + return OperatorAddOpcodeIndex(builder, opcodeIndex) +def OperatorAddInputs(builder, inputs): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(inputs), 0) +def AddInputs(builder, inputs): + return OperatorAddInputs(builder, inputs) +def OperatorStartInputsVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def StartInputsVector(builder, numElems): + return OperatorStartInputsVector(builder, numElems) +def OperatorAddOutputs(builder, outputs): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(outputs), 0) +def AddOutputs(builder, outputs): + return OperatorAddOutputs(builder, outputs) +def OperatorStartOutputsVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def StartOutputsVector(builder, numElems): + return OperatorStartOutputsVector(builder, numElems) +def OperatorAddBuiltinOptionsType(builder, builtinOptionsType): builder.PrependUint8Slot(3, builtinOptionsType, 0) +def AddBuiltinOptionsType(builder, builtinOptionsType): + return OperatorAddBuiltinOptionsType(builder, builtinOptionsType) +def OperatorAddBuiltinOptions(builder, builtinOptions): builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(builtinOptions), 0) +def AddBuiltinOptions(builder, builtinOptions): + return OperatorAddBuiltinOptions(builder, builtinOptions) +def OperatorAddCustomOptions(builder, customOptions): builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(customOptions), 0) +def AddCustomOptions(builder, customOptions): + return OperatorAddCustomOptions(builder, customOptions) +def OperatorStartCustomOptionsVector(builder, numElems): return builder.StartVector(1, numElems, 1) +def StartCustomOptionsVector(builder, numElems): + return OperatorStartCustomOptionsVector(builder, numElems) +def OperatorAddCustomOptionsFormat(builder, customOptionsFormat): builder.PrependInt8Slot(6, customOptionsFormat, 0) +def AddCustomOptionsFormat(builder, customOptionsFormat): + return OperatorAddCustomOptionsFormat(builder, customOptionsFormat) +def OperatorAddMutatingVariableInputs(builder, mutatingVariableInputs): builder.PrependUOffsetTRelativeSlot(7, flatbuffers.number_types.UOffsetTFlags.py_type(mutatingVariableInputs), 0) +def AddMutatingVariableInputs(builder, mutatingVariableInputs): + return OperatorAddMutatingVariableInputs(builder, mutatingVariableInputs) +def OperatorStartMutatingVariableInputsVector(builder, numElems): return builder.StartVector(1, numElems, 1) +def StartMutatingVariableInputsVector(builder, numElems): + return OperatorStartMutatingVariableInputsVector(builder, numElems) +def OperatorAddIntermediates(builder, intermediates): builder.PrependUOffsetTRelativeSlot(8, flatbuffers.number_types.UOffsetTFlags.py_type(intermediates), 0) +def AddIntermediates(builder, intermediates): + return OperatorAddIntermediates(builder, intermediates) +def OperatorStartIntermediatesVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def StartIntermediatesVector(builder, numElems): + return OperatorStartIntermediatesVector(builder, numElems) +def OperatorAddCustomOptionsOffset(builder, customOptionsOffset): builder.PrependUint64Slot(9, customOptionsOffset, 0) +def AddCustomOptionsOffset(builder, customOptionsOffset): + return OperatorAddCustomOptionsOffset(builder, customOptionsOffset) +def OperatorAddCustomOptionsSize(builder, customOptionsSize): builder.PrependUint64Slot(10, customOptionsSize, 0) +def AddCustomOptionsSize(builder, customOptionsSize): + return OperatorAddCustomOptionsSize(builder, customOptionsSize) +def OperatorEnd(builder): return builder.EndObject() +def End(builder): + return OperatorEnd(builder) +try: + from typing import List, Union +except: + pass + +class OperatorT(object): + + # OperatorT + def __init__(self): + self.opcodeIndex = 0 # type: int + self.inputs = None # type: List[int] + self.outputs = None # type: List[int] + self.builtinOptionsType = 0 # type: int + self.builtinOptions = None # type: Union[None, Conv2DOptionsT, DepthwiseConv2DOptionsT, ConcatEmbeddingsOptionsT, LSHProjectionOptionsT, Pool2DOptionsT, SVDFOptionsT, RNNOptionsT, FullyConnectedOptionsT, SoftmaxOptionsT, ConcatenationOptionsT, AddOptionsT, L2NormOptionsT, LocalResponseNormalizationOptionsT, LSTMOptionsT, ResizeBilinearOptionsT, CallOptionsT, ReshapeOptionsT, SkipGramOptionsT, SpaceToDepthOptionsT, EmbeddingLookupSparseOptionsT, MulOptionsT, PadOptionsT, GatherOptionsT, BatchToSpaceNDOptionsT, SpaceToBatchNDOptionsT, TransposeOptionsT, ReducerOptionsT, SubOptionsT, DivOptionsT, SqueezeOptionsT, SequenceRNNOptionsT, StridedSliceOptionsT, ExpOptionsT, TopKV2OptionsT, SplitOptionsT, LogSoftmaxOptionsT, CastOptionsT, DequantizeOptionsT, MaximumMinimumOptionsT, ArgMaxOptionsT, LessOptionsT, NegOptionsT, PadV2OptionsT, GreaterOptionsT, GreaterEqualOptionsT, LessEqualOptionsT, SelectOptionsT, SliceOptionsT, TransposeConvOptionsT, SparseToDenseOptionsT, TileOptionsT, ExpandDimsOptionsT, EqualOptionsT, NotEqualOptionsT, ShapeOptionsT, PowOptionsT, ArgMinOptionsT, FakeQuantOptionsT, PackOptionsT, LogicalOrOptionsT, OneHotOptionsT, LogicalAndOptionsT, LogicalNotOptionsT, UnpackOptionsT, FloorDivOptionsT, SquareOptionsT, ZerosLikeOptionsT, FillOptionsT, BidirectionalSequenceLSTMOptionsT, BidirectionalSequenceRNNOptionsT, UnidirectionalSequenceLSTMOptionsT, FloorModOptionsT, RangeOptionsT, ResizeNearestNeighborOptionsT, LeakyReluOptionsT, SquaredDifferenceOptionsT, MirrorPadOptionsT, AbsOptionsT, SplitVOptionsT, UniqueOptionsT, ReverseV2OptionsT, AddNOptionsT, GatherNdOptionsT, CosOptionsT, WhereOptionsT, RankOptionsT, ReverseSequenceOptionsT, MatrixDiagOptionsT, QuantizeOptionsT, MatrixSetDiagOptionsT, HardSwishOptionsT, IfOptionsT, WhileOptionsT, DepthToSpaceOptionsT, NonMaxSuppressionV4OptionsT, NonMaxSuppressionV5OptionsT, ScatterNdOptionsT, SelectV2OptionsT, DensifyOptionsT, SegmentSumOptionsT, BatchMatMulOptionsT, CumsumOptionsT, CallOnceOptionsT, BroadcastToOptionsT, Rfft2dOptionsT, Conv3DOptionsT, HashtableOptionsT, HashtableFindOptionsT, HashtableImportOptionsT, HashtableSizeOptionsT, VarHandleOptionsT, ReadVariableOptionsT, AssignVariableOptionsT, RandomOptionsT, BucketizeOptionsT, GeluOptionsT, DynamicUpdateSliceOptionsT, UnsortedSegmentProdOptionsT, UnsortedSegmentMaxOptionsT, UnsortedSegmentMinOptionsT, UnsortedSegmentSumOptionsT, ATan2OptionsT, SignOptionsT, BitcastOptionsT, BitwiseXorOptionsT, RightShiftOptionsT] + self.customOptions = None # type: List[int] + self.customOptionsFormat = 0 # type: int + self.mutatingVariableInputs = None # type: List[bool] + self.intermediates = None # type: List[int] + self.customOptionsOffset = 0 # type: int + self.customOptionsSize = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + operator = Operator() + operator.Init(buf, pos) + return cls.InitFromObj(operator) + + @classmethod + def InitFromObj(cls, operator): + x = OperatorT() + x._UnPack(operator) + return x + + # OperatorT + def _UnPack(self, operator): + if operator is None: + return + self.opcodeIndex = operator.OpcodeIndex() + if not operator.InputsIsNone(): + if np is None: + self.inputs = [] + for i in range(operator.InputsLength()): + self.inputs.append(operator.Inputs(i)) + else: + self.inputs = operator.InputsAsNumpy() + if not operator.OutputsIsNone(): + if np is None: + self.outputs = [] + for i in range(operator.OutputsLength()): + self.outputs.append(operator.Outputs(i)) + else: + self.outputs = operator.OutputsAsNumpy() + self.builtinOptionsType = operator.BuiltinOptionsType() + self.builtinOptions = BuiltinOptionsCreator(self.builtinOptionsType, operator.BuiltinOptions()) + if not operator.CustomOptionsIsNone(): + if np is None: + self.customOptions = [] + for i in range(operator.CustomOptionsLength()): + self.customOptions.append(operator.CustomOptions(i)) + else: + self.customOptions = operator.CustomOptionsAsNumpy() + self.customOptionsFormat = operator.CustomOptionsFormat() + if not operator.MutatingVariableInputsIsNone(): + if np is None: + self.mutatingVariableInputs = [] + for i in range(operator.MutatingVariableInputsLength()): + self.mutatingVariableInputs.append(operator.MutatingVariableInputs(i)) + else: + self.mutatingVariableInputs = operator.MutatingVariableInputsAsNumpy() + if not operator.IntermediatesIsNone(): + if np is None: + self.intermediates = [] + for i in range(operator.IntermediatesLength()): + self.intermediates.append(operator.Intermediates(i)) + else: + self.intermediates = operator.IntermediatesAsNumpy() + self.customOptionsOffset = operator.CustomOptionsOffset() + self.customOptionsSize = operator.CustomOptionsSize() + + # OperatorT + def Pack(self, builder): + if self.inputs is not None: + if np is not None and type(self.inputs) is np.ndarray: + inputs = builder.CreateNumpyVector(self.inputs) + else: + OperatorStartInputsVector(builder, len(self.inputs)) + for i in reversed(range(len(self.inputs))): + builder.PrependInt32(self.inputs[i]) + inputs = builder.EndVector() + if self.outputs is not None: + if np is not None and type(self.outputs) is np.ndarray: + outputs = builder.CreateNumpyVector(self.outputs) + else: + OperatorStartOutputsVector(builder, len(self.outputs)) + for i in reversed(range(len(self.outputs))): + builder.PrependInt32(self.outputs[i]) + outputs = builder.EndVector() + if self.builtinOptions is not None: + builtinOptions = self.builtinOptions.Pack(builder) + if self.customOptions is not None: + if np is not None and type(self.customOptions) is np.ndarray: + customOptions = builder.CreateNumpyVector(self.customOptions) + else: + OperatorStartCustomOptionsVector(builder, len(self.customOptions)) + for i in reversed(range(len(self.customOptions))): + builder.PrependUint8(self.customOptions[i]) + customOptions = builder.EndVector() + if self.mutatingVariableInputs is not None: + if np is not None and type(self.mutatingVariableInputs) is np.ndarray: + mutatingVariableInputs = builder.CreateNumpyVector(self.mutatingVariableInputs) + else: + OperatorStartMutatingVariableInputsVector(builder, len(self.mutatingVariableInputs)) + for i in reversed(range(len(self.mutatingVariableInputs))): + builder.PrependBool(self.mutatingVariableInputs[i]) + mutatingVariableInputs = builder.EndVector() + if self.intermediates is not None: + if np is not None and type(self.intermediates) is np.ndarray: + intermediates = builder.CreateNumpyVector(self.intermediates) + else: + OperatorStartIntermediatesVector(builder, len(self.intermediates)) + for i in reversed(range(len(self.intermediates))): + builder.PrependInt32(self.intermediates[i]) + intermediates = builder.EndVector() + OperatorStart(builder) + OperatorAddOpcodeIndex(builder, self.opcodeIndex) + if self.inputs is not None: + OperatorAddInputs(builder, inputs) + if self.outputs is not None: + OperatorAddOutputs(builder, outputs) + OperatorAddBuiltinOptionsType(builder, self.builtinOptionsType) + if self.builtinOptions is not None: + OperatorAddBuiltinOptions(builder, builtinOptions) + if self.customOptions is not None: + OperatorAddCustomOptions(builder, customOptions) + OperatorAddCustomOptionsFormat(builder, self.customOptionsFormat) + if self.mutatingVariableInputs is not None: + OperatorAddMutatingVariableInputs(builder, mutatingVariableInputs) + if self.intermediates is not None: + OperatorAddIntermediates(builder, intermediates) + OperatorAddCustomOptionsOffset(builder, self.customOptionsOffset) + OperatorAddCustomOptionsSize(builder, self.customOptionsSize) + operator = OperatorEnd(builder) + return operator +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class OperatorCode(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = OperatorCode() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsOperatorCode(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def OperatorCodeBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # OperatorCode + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # OperatorCode + def DeprecatedBuiltinCode(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # OperatorCode + def CustomCode(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # OperatorCode + def Version(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 1 + + # OperatorCode + def BuiltinCode(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + +def OperatorCodeStart(builder): builder.StartObject(4) +def Start(builder): + return OperatorCodeStart(builder) +def OperatorCodeAddDeprecatedBuiltinCode(builder, deprecatedBuiltinCode): builder.PrependInt8Slot(0, deprecatedBuiltinCode, 0) +def AddDeprecatedBuiltinCode(builder, deprecatedBuiltinCode): + return OperatorCodeAddDeprecatedBuiltinCode(builder, deprecatedBuiltinCode) +def OperatorCodeAddCustomCode(builder, customCode): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(customCode), 0) +def AddCustomCode(builder, customCode): + return OperatorCodeAddCustomCode(builder, customCode) +def OperatorCodeAddVersion(builder, version): builder.PrependInt32Slot(2, version, 1) +def AddVersion(builder, version): + return OperatorCodeAddVersion(builder, version) +def OperatorCodeAddBuiltinCode(builder, builtinCode): builder.PrependInt32Slot(3, builtinCode, 0) +def AddBuiltinCode(builder, builtinCode): + return OperatorCodeAddBuiltinCode(builder, builtinCode) +def OperatorCodeEnd(builder): return builder.EndObject() +def End(builder): + return OperatorCodeEnd(builder) + +class OperatorCodeT(object): + + # OperatorCodeT + def __init__(self): + self.deprecatedBuiltinCode = 0 # type: int + self.customCode = None # type: str + self.version = 1 # type: int + self.builtinCode = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + operatorCode = OperatorCode() + operatorCode.Init(buf, pos) + return cls.InitFromObj(operatorCode) + + @classmethod + def InitFromObj(cls, operatorCode): + x = OperatorCodeT() + x._UnPack(operatorCode) + return x + + # OperatorCodeT + def _UnPack(self, operatorCode): + if operatorCode is None: + return + self.deprecatedBuiltinCode = operatorCode.DeprecatedBuiltinCode() + self.customCode = operatorCode.CustomCode() + self.version = operatorCode.Version() + self.builtinCode = operatorCode.BuiltinCode() + + # OperatorCodeT + def Pack(self, builder): + if self.customCode is not None: + customCode = builder.CreateString(self.customCode) + OperatorCodeStart(builder) + OperatorCodeAddDeprecatedBuiltinCode(builder, self.deprecatedBuiltinCode) + if self.customCode is not None: + OperatorCodeAddCustomCode(builder, customCode) + OperatorCodeAddVersion(builder, self.version) + OperatorCodeAddBuiltinCode(builder, self.builtinCode) + operatorCode = OperatorCodeEnd(builder) + return operatorCode +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class PackOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = PackOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsPackOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def PackOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # PackOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # PackOptions + def ValuesCount(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # PackOptions + def Axis(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + +def PackOptionsStart(builder): builder.StartObject(2) +def Start(builder): + return PackOptionsStart(builder) +def PackOptionsAddValuesCount(builder, valuesCount): builder.PrependInt32Slot(0, valuesCount, 0) +def AddValuesCount(builder, valuesCount): + return PackOptionsAddValuesCount(builder, valuesCount) +def PackOptionsAddAxis(builder, axis): builder.PrependInt32Slot(1, axis, 0) +def AddAxis(builder, axis): + return PackOptionsAddAxis(builder, axis) +def PackOptionsEnd(builder): return builder.EndObject() +def End(builder): + return PackOptionsEnd(builder) + +class PackOptionsT(object): + + # PackOptionsT + def __init__(self): + self.valuesCount = 0 # type: int + self.axis = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + packOptions = PackOptions() + packOptions.Init(buf, pos) + return cls.InitFromObj(packOptions) + + @classmethod + def InitFromObj(cls, packOptions): + x = PackOptionsT() + x._UnPack(packOptions) + return x + + # PackOptionsT + def _UnPack(self, packOptions): + if packOptions is None: + return + self.valuesCount = packOptions.ValuesCount() + self.axis = packOptions.Axis() + + # PackOptionsT + def Pack(self, builder): + PackOptionsStart(builder) + PackOptionsAddValuesCount(builder, self.valuesCount) + PackOptionsAddAxis(builder, self.axis) + packOptions = PackOptionsEnd(builder) + return packOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class PadOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = PadOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsPadOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def PadOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # PadOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def PadOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return PadOptionsStart(builder) +def PadOptionsEnd(builder): return builder.EndObject() +def End(builder): + return PadOptionsEnd(builder) + +class PadOptionsT(object): + + # PadOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + padOptions = PadOptions() + padOptions.Init(buf, pos) + return cls.InitFromObj(padOptions) + + @classmethod + def InitFromObj(cls, padOptions): + x = PadOptionsT() + x._UnPack(padOptions) + return x + + # PadOptionsT + def _UnPack(self, padOptions): + if padOptions is None: + return + + # PadOptionsT + def Pack(self, builder): + PadOptionsStart(builder) + padOptions = PadOptionsEnd(builder) + return padOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class PadV2Options(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = PadV2Options() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsPadV2Options(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def PadV2OptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # PadV2Options + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def PadV2OptionsStart(builder): builder.StartObject(0) +def Start(builder): + return PadV2OptionsStart(builder) +def PadV2OptionsEnd(builder): return builder.EndObject() +def End(builder): + return PadV2OptionsEnd(builder) + +class PadV2OptionsT(object): + + # PadV2OptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + padV2options = PadV2Options() + padV2options.Init(buf, pos) + return cls.InitFromObj(padV2options) + + @classmethod + def InitFromObj(cls, padV2options): + x = PadV2OptionsT() + x._UnPack(padV2options) + return x + + # PadV2OptionsT + def _UnPack(self, padV2options): + if padV2options is None: + return + + # PadV2OptionsT + def Pack(self, builder): + PadV2OptionsStart(builder) + padV2options = PadV2OptionsEnd(builder) + return padV2options +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +class Padding(object): + SAME = 0 + VALID = 1 +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class Pool2DOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Pool2DOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsPool2DOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def Pool2DOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # Pool2DOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Pool2DOptions + def Padding(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # Pool2DOptions + def StrideW(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # Pool2DOptions + def StrideH(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # Pool2DOptions + def FilterWidth(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # Pool2DOptions + def FilterHeight(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # Pool2DOptions + def FusedActivationFunction(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + +def Pool2DOptionsStart(builder): builder.StartObject(6) +def Start(builder): + return Pool2DOptionsStart(builder) +def Pool2DOptionsAddPadding(builder, padding): builder.PrependInt8Slot(0, padding, 0) +def AddPadding(builder, padding): + return Pool2DOptionsAddPadding(builder, padding) +def Pool2DOptionsAddStrideW(builder, strideW): builder.PrependInt32Slot(1, strideW, 0) +def AddStrideW(builder, strideW): + return Pool2DOptionsAddStrideW(builder, strideW) +def Pool2DOptionsAddStrideH(builder, strideH): builder.PrependInt32Slot(2, strideH, 0) +def AddStrideH(builder, strideH): + return Pool2DOptionsAddStrideH(builder, strideH) +def Pool2DOptionsAddFilterWidth(builder, filterWidth): builder.PrependInt32Slot(3, filterWidth, 0) +def AddFilterWidth(builder, filterWidth): + return Pool2DOptionsAddFilterWidth(builder, filterWidth) +def Pool2DOptionsAddFilterHeight(builder, filterHeight): builder.PrependInt32Slot(4, filterHeight, 0) +def AddFilterHeight(builder, filterHeight): + return Pool2DOptionsAddFilterHeight(builder, filterHeight) +def Pool2DOptionsAddFusedActivationFunction(builder, fusedActivationFunction): builder.PrependInt8Slot(5, fusedActivationFunction, 0) +def AddFusedActivationFunction(builder, fusedActivationFunction): + return Pool2DOptionsAddFusedActivationFunction(builder, fusedActivationFunction) +def Pool2DOptionsEnd(builder): return builder.EndObject() +def End(builder): + return Pool2DOptionsEnd(builder) + +class Pool2DOptionsT(object): + + # Pool2DOptionsT + def __init__(self): + self.padding = 0 # type: int + self.strideW = 0 # type: int + self.strideH = 0 # type: int + self.filterWidth = 0 # type: int + self.filterHeight = 0 # type: int + self.fusedActivationFunction = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + pool2doptions = Pool2DOptions() + pool2doptions.Init(buf, pos) + return cls.InitFromObj(pool2doptions) + + @classmethod + def InitFromObj(cls, pool2doptions): + x = Pool2DOptionsT() + x._UnPack(pool2doptions) + return x + + # Pool2DOptionsT + def _UnPack(self, pool2doptions): + if pool2doptions is None: + return + self.padding = pool2doptions.Padding() + self.strideW = pool2doptions.StrideW() + self.strideH = pool2doptions.StrideH() + self.filterWidth = pool2doptions.FilterWidth() + self.filterHeight = pool2doptions.FilterHeight() + self.fusedActivationFunction = pool2doptions.FusedActivationFunction() + + # Pool2DOptionsT + def Pack(self, builder): + Pool2DOptionsStart(builder) + Pool2DOptionsAddPadding(builder, self.padding) + Pool2DOptionsAddStrideW(builder, self.strideW) + Pool2DOptionsAddStrideH(builder, self.strideH) + Pool2DOptionsAddFilterWidth(builder, self.filterWidth) + Pool2DOptionsAddFilterHeight(builder, self.filterHeight) + Pool2DOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) + pool2doptions = Pool2DOptionsEnd(builder) + return pool2doptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class PowOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = PowOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsPowOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def PowOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # PowOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def PowOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return PowOptionsStart(builder) +def PowOptionsEnd(builder): return builder.EndObject() +def End(builder): + return PowOptionsEnd(builder) + +class PowOptionsT(object): + + # PowOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + powOptions = PowOptions() + powOptions.Init(buf, pos) + return cls.InitFromObj(powOptions) + + @classmethod + def InitFromObj(cls, powOptions): + x = PowOptionsT() + x._UnPack(powOptions) + return x + + # PowOptionsT + def _UnPack(self, powOptions): + if powOptions is None: + return + + # PowOptionsT + def Pack(self, builder): + PowOptionsStart(builder) + powOptions = PowOptionsEnd(builder) + return powOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +class QuantizationDetails(object): + NONE = 0 + CustomQuantization = 1 + +def QuantizationDetailsCreator(unionType, table): + from flatbuffers.table import Table + if not isinstance(table, Table): + return None + if unionType == QuantizationDetails().CustomQuantization: + return CustomQuantizationT.InitFromBuf(table.Bytes, table.Pos) + return None +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class QuantizationParameters(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = QuantizationParameters() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsQuantizationParameters(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def QuantizationParametersBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # QuantizationParameters + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # QuantizationParameters + def Min(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Float32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # QuantizationParameters + def MinAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Float32Flags, o) + return 0 + + # QuantizationParameters + def MinLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # QuantizationParameters + def MinIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + + # QuantizationParameters + def Max(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Float32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # QuantizationParameters + def MaxAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Float32Flags, o) + return 0 + + # QuantizationParameters + def MaxLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # QuantizationParameters + def MaxIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + return o == 0 + + # QuantizationParameters + def Scale(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Float32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # QuantizationParameters + def ScaleAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Float32Flags, o) + return 0 + + # QuantizationParameters + def ScaleLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # QuantizationParameters + def ScaleIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + return o == 0 + + # QuantizationParameters + def ZeroPoint(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # QuantizationParameters + def ZeroPointAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # QuantizationParameters + def ZeroPointLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # QuantizationParameters + def ZeroPointIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + return o == 0 + + # QuantizationParameters + def DetailsType(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) + return 0 + + # QuantizationParameters + def Details(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + from flatbuffers.table import Table + obj = Table(bytearray(), 0) + self._tab.Union(obj, o) + return obj + return None + + # QuantizationParameters + def QuantizedDimension(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + +def QuantizationParametersStart(builder): builder.StartObject(7) +def Start(builder): + return QuantizationParametersStart(builder) +def QuantizationParametersAddMin(builder, min): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(min), 0) +def AddMin(builder, min): + return QuantizationParametersAddMin(builder, min) +def QuantizationParametersStartMinVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def StartMinVector(builder, numElems): + return QuantizationParametersStartMinVector(builder, numElems) +def QuantizationParametersAddMax(builder, max): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(max), 0) +def AddMax(builder, max): + return QuantizationParametersAddMax(builder, max) +def QuantizationParametersStartMaxVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def StartMaxVector(builder, numElems): + return QuantizationParametersStartMaxVector(builder, numElems) +def QuantizationParametersAddScale(builder, scale): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(scale), 0) +def AddScale(builder, scale): + return QuantizationParametersAddScale(builder, scale) +def QuantizationParametersStartScaleVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def StartScaleVector(builder, numElems): + return QuantizationParametersStartScaleVector(builder, numElems) +def QuantizationParametersAddZeroPoint(builder, zeroPoint): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(zeroPoint), 0) +def AddZeroPoint(builder, zeroPoint): + return QuantizationParametersAddZeroPoint(builder, zeroPoint) +def QuantizationParametersStartZeroPointVector(builder, numElems): return builder.StartVector(8, numElems, 8) +def StartZeroPointVector(builder, numElems): + return QuantizationParametersStartZeroPointVector(builder, numElems) +def QuantizationParametersAddDetailsType(builder, detailsType): builder.PrependUint8Slot(4, detailsType, 0) +def AddDetailsType(builder, detailsType): + return QuantizationParametersAddDetailsType(builder, detailsType) +def QuantizationParametersAddDetails(builder, details): builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(details), 0) +def AddDetails(builder, details): + return QuantizationParametersAddDetails(builder, details) +def QuantizationParametersAddQuantizedDimension(builder, quantizedDimension): builder.PrependInt32Slot(6, quantizedDimension, 0) +def AddQuantizedDimension(builder, quantizedDimension): + return QuantizationParametersAddQuantizedDimension(builder, quantizedDimension) +def QuantizationParametersEnd(builder): return builder.EndObject() +def End(builder): + return QuantizationParametersEnd(builder) +try: + from typing import List, Union +except: + pass + +class QuantizationParametersT(object): + + # QuantizationParametersT + def __init__(self): + self.min = None # type: List[float] + self.max = None # type: List[float] + self.scale = None # type: List[float] + self.zeroPoint = None # type: List[int] + self.detailsType = 0 # type: int + self.details = None # type: Union[None, CustomQuantizationT] + self.quantizedDimension = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + quantizationParameters = QuantizationParameters() + quantizationParameters.Init(buf, pos) + return cls.InitFromObj(quantizationParameters) + + @classmethod + def InitFromObj(cls, quantizationParameters): + x = QuantizationParametersT() + x._UnPack(quantizationParameters) + return x + + # QuantizationParametersT + def _UnPack(self, quantizationParameters): + if quantizationParameters is None: + return + if not quantizationParameters.MinIsNone(): + if np is None: + self.min = [] + for i in range(quantizationParameters.MinLength()): + self.min.append(quantizationParameters.Min(i)) + else: + self.min = quantizationParameters.MinAsNumpy() + if not quantizationParameters.MaxIsNone(): + if np is None: + self.max = [] + for i in range(quantizationParameters.MaxLength()): + self.max.append(quantizationParameters.Max(i)) + else: + self.max = quantizationParameters.MaxAsNumpy() + if not quantizationParameters.ScaleIsNone(): + if np is None: + self.scale = [] + for i in range(quantizationParameters.ScaleLength()): + self.scale.append(quantizationParameters.Scale(i)) + else: + self.scale = quantizationParameters.ScaleAsNumpy() + if not quantizationParameters.ZeroPointIsNone(): + if np is None: + self.zeroPoint = [] + for i in range(quantizationParameters.ZeroPointLength()): + self.zeroPoint.append(quantizationParameters.ZeroPoint(i)) + else: + self.zeroPoint = quantizationParameters.ZeroPointAsNumpy() + self.detailsType = quantizationParameters.DetailsType() + self.details = QuantizationDetailsCreator(self.detailsType, quantizationParameters.Details()) + self.quantizedDimension = quantizationParameters.QuantizedDimension() + + # QuantizationParametersT + def Pack(self, builder): + if self.min is not None: + if np is not None and type(self.min) is np.ndarray: + min = builder.CreateNumpyVector(self.min) + else: + QuantizationParametersStartMinVector(builder, len(self.min)) + for i in reversed(range(len(self.min))): + builder.PrependFloat32(self.min[i]) + min = builder.EndVector() + if self.max is not None: + if np is not None and type(self.max) is np.ndarray: + max = builder.CreateNumpyVector(self.max) + else: + QuantizationParametersStartMaxVector(builder, len(self.max)) + for i in reversed(range(len(self.max))): + builder.PrependFloat32(self.max[i]) + max = builder.EndVector() + if self.scale is not None: + if np is not None and type(self.scale) is np.ndarray: + scale = builder.CreateNumpyVector(self.scale) + else: + QuantizationParametersStartScaleVector(builder, len(self.scale)) + for i in reversed(range(len(self.scale))): + builder.PrependFloat32(self.scale[i]) + scale = builder.EndVector() + if self.zeroPoint is not None: + if np is not None and type(self.zeroPoint) is np.ndarray: + zeroPoint = builder.CreateNumpyVector(self.zeroPoint) + else: + QuantizationParametersStartZeroPointVector(builder, len(self.zeroPoint)) + for i in reversed(range(len(self.zeroPoint))): + builder.PrependInt64(self.zeroPoint[i]) + zeroPoint = builder.EndVector() + if self.details is not None: + details = self.details.Pack(builder) + QuantizationParametersStart(builder) + if self.min is not None: + QuantizationParametersAddMin(builder, min) + if self.max is not None: + QuantizationParametersAddMax(builder, max) + if self.scale is not None: + QuantizationParametersAddScale(builder, scale) + if self.zeroPoint is not None: + QuantizationParametersAddZeroPoint(builder, zeroPoint) + QuantizationParametersAddDetailsType(builder, self.detailsType) + if self.details is not None: + QuantizationParametersAddDetails(builder, details) + QuantizationParametersAddQuantizedDimension(builder, self.quantizedDimension) + quantizationParameters = QuantizationParametersEnd(builder) + return quantizationParameters +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class QuantizeOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = QuantizeOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsQuantizeOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def QuantizeOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # QuantizeOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def QuantizeOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return QuantizeOptionsStart(builder) +def QuantizeOptionsEnd(builder): return builder.EndObject() +def End(builder): + return QuantizeOptionsEnd(builder) + +class QuantizeOptionsT(object): + + # QuantizeOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + quantizeOptions = QuantizeOptions() + quantizeOptions.Init(buf, pos) + return cls.InitFromObj(quantizeOptions) + + @classmethod + def InitFromObj(cls, quantizeOptions): + x = QuantizeOptionsT() + x._UnPack(quantizeOptions) + return x + + # QuantizeOptionsT + def _UnPack(self, quantizeOptions): + if quantizeOptions is None: + return + + # QuantizeOptionsT + def Pack(self, builder): + QuantizeOptionsStart(builder) + quantizeOptions = QuantizeOptionsEnd(builder) + return quantizeOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class RNNOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = RNNOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsRNNOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def RNNOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # RNNOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # RNNOptions + def FusedActivationFunction(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # RNNOptions + def AsymmetricQuantizeInputs(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + +def RNNOptionsStart(builder): builder.StartObject(2) +def Start(builder): + return RNNOptionsStart(builder) +def RNNOptionsAddFusedActivationFunction(builder, fusedActivationFunction): builder.PrependInt8Slot(0, fusedActivationFunction, 0) +def AddFusedActivationFunction(builder, fusedActivationFunction): + return RNNOptionsAddFusedActivationFunction(builder, fusedActivationFunction) +def RNNOptionsAddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs): builder.PrependBoolSlot(1, asymmetricQuantizeInputs, 0) +def AddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs): + return RNNOptionsAddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs) +def RNNOptionsEnd(builder): return builder.EndObject() +def End(builder): + return RNNOptionsEnd(builder) + +class RNNOptionsT(object): + + # RNNOptionsT + def __init__(self): + self.fusedActivationFunction = 0 # type: int + self.asymmetricQuantizeInputs = False # type: bool + + @classmethod + def InitFromBuf(cls, buf, pos): + rnnoptions = RNNOptions() + rnnoptions.Init(buf, pos) + return cls.InitFromObj(rnnoptions) + + @classmethod + def InitFromObj(cls, rnnoptions): + x = RNNOptionsT() + x._UnPack(rnnoptions) + return x + + # RNNOptionsT + def _UnPack(self, rnnoptions): + if rnnoptions is None: + return + self.fusedActivationFunction = rnnoptions.FusedActivationFunction() + self.asymmetricQuantizeInputs = rnnoptions.AsymmetricQuantizeInputs() + + # RNNOptionsT + def Pack(self, builder): + RNNOptionsStart(builder) + RNNOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) + RNNOptionsAddAsymmetricQuantizeInputs(builder, self.asymmetricQuantizeInputs) + rnnoptions = RNNOptionsEnd(builder) + return rnnoptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class RandomOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = RandomOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsRandomOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def RandomOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # RandomOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # RandomOptions + def Seed(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) + return 0 + + # RandomOptions + def Seed2(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) + return 0 + +def RandomOptionsStart(builder): builder.StartObject(2) +def Start(builder): + return RandomOptionsStart(builder) +def RandomOptionsAddSeed(builder, seed): builder.PrependInt64Slot(0, seed, 0) +def AddSeed(builder, seed): + return RandomOptionsAddSeed(builder, seed) +def RandomOptionsAddSeed2(builder, seed2): builder.PrependInt64Slot(1, seed2, 0) +def AddSeed2(builder, seed2): + return RandomOptionsAddSeed2(builder, seed2) +def RandomOptionsEnd(builder): return builder.EndObject() +def End(builder): + return RandomOptionsEnd(builder) + +class RandomOptionsT(object): + + # RandomOptionsT + def __init__(self): + self.seed = 0 # type: int + self.seed2 = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + randomOptions = RandomOptions() + randomOptions.Init(buf, pos) + return cls.InitFromObj(randomOptions) + + @classmethod + def InitFromObj(cls, randomOptions): + x = RandomOptionsT() + x._UnPack(randomOptions) + return x + + # RandomOptionsT + def _UnPack(self, randomOptions): + if randomOptions is None: + return + self.seed = randomOptions.Seed() + self.seed2 = randomOptions.Seed2() + + # RandomOptionsT + def Pack(self, builder): + RandomOptionsStart(builder) + RandomOptionsAddSeed(builder, self.seed) + RandomOptionsAddSeed2(builder, self.seed2) + randomOptions = RandomOptionsEnd(builder) + return randomOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class RangeOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = RangeOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsRangeOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def RangeOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # RangeOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def RangeOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return RangeOptionsStart(builder) +def RangeOptionsEnd(builder): return builder.EndObject() +def End(builder): + return RangeOptionsEnd(builder) + +class RangeOptionsT(object): + + # RangeOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + rangeOptions = RangeOptions() + rangeOptions.Init(buf, pos) + return cls.InitFromObj(rangeOptions) + + @classmethod + def InitFromObj(cls, rangeOptions): + x = RangeOptionsT() + x._UnPack(rangeOptions) + return x + + # RangeOptionsT + def _UnPack(self, rangeOptions): + if rangeOptions is None: + return + + # RangeOptionsT + def Pack(self, builder): + RangeOptionsStart(builder) + rangeOptions = RangeOptionsEnd(builder) + return rangeOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class RankOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = RankOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsRankOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def RankOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # RankOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def RankOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return RankOptionsStart(builder) +def RankOptionsEnd(builder): return builder.EndObject() +def End(builder): + return RankOptionsEnd(builder) + +class RankOptionsT(object): + + # RankOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + rankOptions = RankOptions() + rankOptions.Init(buf, pos) + return cls.InitFromObj(rankOptions) + + @classmethod + def InitFromObj(cls, rankOptions): + x = RankOptionsT() + x._UnPack(rankOptions) + return x + + # RankOptionsT + def _UnPack(self, rankOptions): + if rankOptions is None: + return + + # RankOptionsT + def Pack(self, builder): + RankOptionsStart(builder) + rankOptions = RankOptionsEnd(builder) + return rankOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class ReadVariableOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ReadVariableOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsReadVariableOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def ReadVariableOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # ReadVariableOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def ReadVariableOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return ReadVariableOptionsStart(builder) +def ReadVariableOptionsEnd(builder): return builder.EndObject() +def End(builder): + return ReadVariableOptionsEnd(builder) + +class ReadVariableOptionsT(object): + + # ReadVariableOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + readVariableOptions = ReadVariableOptions() + readVariableOptions.Init(buf, pos) + return cls.InitFromObj(readVariableOptions) + + @classmethod + def InitFromObj(cls, readVariableOptions): + x = ReadVariableOptionsT() + x._UnPack(readVariableOptions) + return x + + # ReadVariableOptionsT + def _UnPack(self, readVariableOptions): + if readVariableOptions is None: + return + + # ReadVariableOptionsT + def Pack(self, builder): + ReadVariableOptionsStart(builder) + readVariableOptions = ReadVariableOptionsEnd(builder) + return readVariableOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class ReducerOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ReducerOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsReducerOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def ReducerOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # ReducerOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ReducerOptions + def KeepDims(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + +def ReducerOptionsStart(builder): builder.StartObject(1) +def Start(builder): + return ReducerOptionsStart(builder) +def ReducerOptionsAddKeepDims(builder, keepDims): builder.PrependBoolSlot(0, keepDims, 0) +def AddKeepDims(builder, keepDims): + return ReducerOptionsAddKeepDims(builder, keepDims) +def ReducerOptionsEnd(builder): return builder.EndObject() +def End(builder): + return ReducerOptionsEnd(builder) + +class ReducerOptionsT(object): + + # ReducerOptionsT + def __init__(self): + self.keepDims = False # type: bool + + @classmethod + def InitFromBuf(cls, buf, pos): + reducerOptions = ReducerOptions() + reducerOptions.Init(buf, pos) + return cls.InitFromObj(reducerOptions) + + @classmethod + def InitFromObj(cls, reducerOptions): + x = ReducerOptionsT() + x._UnPack(reducerOptions) + return x + + # ReducerOptionsT + def _UnPack(self, reducerOptions): + if reducerOptions is None: + return + self.keepDims = reducerOptions.KeepDims() + + # ReducerOptionsT + def Pack(self, builder): + ReducerOptionsStart(builder) + ReducerOptionsAddKeepDims(builder, self.keepDims) + reducerOptions = ReducerOptionsEnd(builder) + return reducerOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class ReshapeOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ReshapeOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsReshapeOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def ReshapeOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # ReshapeOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ReshapeOptions + def NewShape(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # ReshapeOptions + def NewShapeAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # ReshapeOptions + def NewShapeLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # ReshapeOptions + def NewShapeIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + +def ReshapeOptionsStart(builder): builder.StartObject(1) +def Start(builder): + return ReshapeOptionsStart(builder) +def ReshapeOptionsAddNewShape(builder, newShape): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(newShape), 0) +def AddNewShape(builder, newShape): + return ReshapeOptionsAddNewShape(builder, newShape) +def ReshapeOptionsStartNewShapeVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def StartNewShapeVector(builder, numElems): + return ReshapeOptionsStartNewShapeVector(builder, numElems) +def ReshapeOptionsEnd(builder): return builder.EndObject() +def End(builder): + return ReshapeOptionsEnd(builder) +try: + from typing import List +except: + pass + +class ReshapeOptionsT(object): + + # ReshapeOptionsT + def __init__(self): + self.newShape = None # type: List[int] + + @classmethod + def InitFromBuf(cls, buf, pos): + reshapeOptions = ReshapeOptions() + reshapeOptions.Init(buf, pos) + return cls.InitFromObj(reshapeOptions) + + @classmethod + def InitFromObj(cls, reshapeOptions): + x = ReshapeOptionsT() + x._UnPack(reshapeOptions) + return x + + # ReshapeOptionsT + def _UnPack(self, reshapeOptions): + if reshapeOptions is None: + return + if not reshapeOptions.NewShapeIsNone(): + if np is None: + self.newShape = [] + for i in range(reshapeOptions.NewShapeLength()): + self.newShape.append(reshapeOptions.NewShape(i)) + else: + self.newShape = reshapeOptions.NewShapeAsNumpy() + + # ReshapeOptionsT + def Pack(self, builder): + if self.newShape is not None: + if np is not None and type(self.newShape) is np.ndarray: + newShape = builder.CreateNumpyVector(self.newShape) + else: + ReshapeOptionsStartNewShapeVector(builder, len(self.newShape)) + for i in reversed(range(len(self.newShape))): + builder.PrependInt32(self.newShape[i]) + newShape = builder.EndVector() + ReshapeOptionsStart(builder) + if self.newShape is not None: + ReshapeOptionsAddNewShape(builder, newShape) + reshapeOptions = ReshapeOptionsEnd(builder) + return reshapeOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class ResizeBilinearOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ResizeBilinearOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsResizeBilinearOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def ResizeBilinearOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # ResizeBilinearOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ResizeBilinearOptions + def AlignCorners(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + + # ResizeBilinearOptions + def HalfPixelCenters(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + +def ResizeBilinearOptionsStart(builder): builder.StartObject(4) +def Start(builder): + return ResizeBilinearOptionsStart(builder) +def ResizeBilinearOptionsAddAlignCorners(builder, alignCorners): builder.PrependBoolSlot(2, alignCorners, 0) +def AddAlignCorners(builder, alignCorners): + return ResizeBilinearOptionsAddAlignCorners(builder, alignCorners) +def ResizeBilinearOptionsAddHalfPixelCenters(builder, halfPixelCenters): builder.PrependBoolSlot(3, halfPixelCenters, 0) +def AddHalfPixelCenters(builder, halfPixelCenters): + return ResizeBilinearOptionsAddHalfPixelCenters(builder, halfPixelCenters) +def ResizeBilinearOptionsEnd(builder): return builder.EndObject() +def End(builder): + return ResizeBilinearOptionsEnd(builder) + +class ResizeBilinearOptionsT(object): + + # ResizeBilinearOptionsT + def __init__(self): + self.alignCorners = False # type: bool + self.halfPixelCenters = False # type: bool + + @classmethod + def InitFromBuf(cls, buf, pos): + resizeBilinearOptions = ResizeBilinearOptions() + resizeBilinearOptions.Init(buf, pos) + return cls.InitFromObj(resizeBilinearOptions) + + @classmethod + def InitFromObj(cls, resizeBilinearOptions): + x = ResizeBilinearOptionsT() + x._UnPack(resizeBilinearOptions) + return x + + # ResizeBilinearOptionsT + def _UnPack(self, resizeBilinearOptions): + if resizeBilinearOptions is None: + return + self.alignCorners = resizeBilinearOptions.AlignCorners() + self.halfPixelCenters = resizeBilinearOptions.HalfPixelCenters() + + # ResizeBilinearOptionsT + def Pack(self, builder): + ResizeBilinearOptionsStart(builder) + ResizeBilinearOptionsAddAlignCorners(builder, self.alignCorners) + ResizeBilinearOptionsAddHalfPixelCenters(builder, self.halfPixelCenters) + resizeBilinearOptions = ResizeBilinearOptionsEnd(builder) + return resizeBilinearOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class ResizeNearestNeighborOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ResizeNearestNeighborOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsResizeNearestNeighborOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def ResizeNearestNeighborOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # ResizeNearestNeighborOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ResizeNearestNeighborOptions + def AlignCorners(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + + # ResizeNearestNeighborOptions + def HalfPixelCenters(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + +def ResizeNearestNeighborOptionsStart(builder): builder.StartObject(2) +def Start(builder): + return ResizeNearestNeighborOptionsStart(builder) +def ResizeNearestNeighborOptionsAddAlignCorners(builder, alignCorners): builder.PrependBoolSlot(0, alignCorners, 0) +def AddAlignCorners(builder, alignCorners): + return ResizeNearestNeighborOptionsAddAlignCorners(builder, alignCorners) +def ResizeNearestNeighborOptionsAddHalfPixelCenters(builder, halfPixelCenters): builder.PrependBoolSlot(1, halfPixelCenters, 0) +def AddHalfPixelCenters(builder, halfPixelCenters): + return ResizeNearestNeighborOptionsAddHalfPixelCenters(builder, halfPixelCenters) +def ResizeNearestNeighborOptionsEnd(builder): return builder.EndObject() +def End(builder): + return ResizeNearestNeighborOptionsEnd(builder) + +class ResizeNearestNeighborOptionsT(object): + + # ResizeNearestNeighborOptionsT + def __init__(self): + self.alignCorners = False # type: bool + self.halfPixelCenters = False # type: bool + + @classmethod + def InitFromBuf(cls, buf, pos): + resizeNearestNeighborOptions = ResizeNearestNeighborOptions() + resizeNearestNeighborOptions.Init(buf, pos) + return cls.InitFromObj(resizeNearestNeighborOptions) + + @classmethod + def InitFromObj(cls, resizeNearestNeighborOptions): + x = ResizeNearestNeighborOptionsT() + x._UnPack(resizeNearestNeighborOptions) + return x + + # ResizeNearestNeighborOptionsT + def _UnPack(self, resizeNearestNeighborOptions): + if resizeNearestNeighborOptions is None: + return + self.alignCorners = resizeNearestNeighborOptions.AlignCorners() + self.halfPixelCenters = resizeNearestNeighborOptions.HalfPixelCenters() + + # ResizeNearestNeighborOptionsT + def Pack(self, builder): + ResizeNearestNeighborOptionsStart(builder) + ResizeNearestNeighborOptionsAddAlignCorners(builder, self.alignCorners) + ResizeNearestNeighborOptionsAddHalfPixelCenters(builder, self.halfPixelCenters) + resizeNearestNeighborOptions = ResizeNearestNeighborOptionsEnd(builder) + return resizeNearestNeighborOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class ReverseSequenceOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ReverseSequenceOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsReverseSequenceOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def ReverseSequenceOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # ReverseSequenceOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ReverseSequenceOptions + def SeqDim(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # ReverseSequenceOptions + def BatchDim(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + +def ReverseSequenceOptionsStart(builder): builder.StartObject(2) +def Start(builder): + return ReverseSequenceOptionsStart(builder) +def ReverseSequenceOptionsAddSeqDim(builder, seqDim): builder.PrependInt32Slot(0, seqDim, 0) +def AddSeqDim(builder, seqDim): + return ReverseSequenceOptionsAddSeqDim(builder, seqDim) +def ReverseSequenceOptionsAddBatchDim(builder, batchDim): builder.PrependInt32Slot(1, batchDim, 0) +def AddBatchDim(builder, batchDim): + return ReverseSequenceOptionsAddBatchDim(builder, batchDim) +def ReverseSequenceOptionsEnd(builder): return builder.EndObject() +def End(builder): + return ReverseSequenceOptionsEnd(builder) + +class ReverseSequenceOptionsT(object): + + # ReverseSequenceOptionsT + def __init__(self): + self.seqDim = 0 # type: int + self.batchDim = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + reverseSequenceOptions = ReverseSequenceOptions() + reverseSequenceOptions.Init(buf, pos) + return cls.InitFromObj(reverseSequenceOptions) + + @classmethod + def InitFromObj(cls, reverseSequenceOptions): + x = ReverseSequenceOptionsT() + x._UnPack(reverseSequenceOptions) + return x + + # ReverseSequenceOptionsT + def _UnPack(self, reverseSequenceOptions): + if reverseSequenceOptions is None: + return + self.seqDim = reverseSequenceOptions.SeqDim() + self.batchDim = reverseSequenceOptions.BatchDim() + + # ReverseSequenceOptionsT + def Pack(self, builder): + ReverseSequenceOptionsStart(builder) + ReverseSequenceOptionsAddSeqDim(builder, self.seqDim) + ReverseSequenceOptionsAddBatchDim(builder, self.batchDim) + reverseSequenceOptions = ReverseSequenceOptionsEnd(builder) + return reverseSequenceOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class ReverseV2Options(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ReverseV2Options() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsReverseV2Options(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def ReverseV2OptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # ReverseV2Options + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def ReverseV2OptionsStart(builder): builder.StartObject(0) +def Start(builder): + return ReverseV2OptionsStart(builder) +def ReverseV2OptionsEnd(builder): return builder.EndObject() +def End(builder): + return ReverseV2OptionsEnd(builder) + +class ReverseV2OptionsT(object): + + # ReverseV2OptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + reverseV2options = ReverseV2Options() + reverseV2options.Init(buf, pos) + return cls.InitFromObj(reverseV2options) + + @classmethod + def InitFromObj(cls, reverseV2options): + x = ReverseV2OptionsT() + x._UnPack(reverseV2options) + return x + + # ReverseV2OptionsT + def _UnPack(self, reverseV2options): + if reverseV2options is None: + return + + # ReverseV2OptionsT + def Pack(self, builder): + ReverseV2OptionsStart(builder) + reverseV2options = ReverseV2OptionsEnd(builder) + return reverseV2options +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class Rfft2dOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Rfft2dOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsRfft2dOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def Rfft2dOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # Rfft2dOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def Rfft2dOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return Rfft2dOptionsStart(builder) +def Rfft2dOptionsEnd(builder): return builder.EndObject() +def End(builder): + return Rfft2dOptionsEnd(builder) + +class Rfft2dOptionsT(object): + + # Rfft2dOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + rfft2dOptions = Rfft2dOptions() + rfft2dOptions.Init(buf, pos) + return cls.InitFromObj(rfft2dOptions) + + @classmethod + def InitFromObj(cls, rfft2dOptions): + x = Rfft2dOptionsT() + x._UnPack(rfft2dOptions) + return x + + # Rfft2dOptionsT + def _UnPack(self, rfft2dOptions): + if rfft2dOptions is None: + return + + # Rfft2dOptionsT + def Pack(self, builder): + Rfft2dOptionsStart(builder) + rfft2dOptions = Rfft2dOptionsEnd(builder) + return rfft2dOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class RightShiftOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = RightShiftOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsRightShiftOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def RightShiftOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # RightShiftOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def RightShiftOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return RightShiftOptionsStart(builder) +def RightShiftOptionsEnd(builder): return builder.EndObject() +def End(builder): + return RightShiftOptionsEnd(builder) + +class RightShiftOptionsT(object): + + # RightShiftOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + rightShiftOptions = RightShiftOptions() + rightShiftOptions.Init(buf, pos) + return cls.InitFromObj(rightShiftOptions) + + @classmethod + def InitFromObj(cls, rightShiftOptions): + x = RightShiftOptionsT() + x._UnPack(rightShiftOptions) + return x + + # RightShiftOptionsT + def _UnPack(self, rightShiftOptions): + if rightShiftOptions is None: + return + + # RightShiftOptionsT + def Pack(self, builder): + RightShiftOptionsStart(builder) + rightShiftOptions = RightShiftOptionsEnd(builder) + return rightShiftOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class SVDFOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = SVDFOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsSVDFOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def SVDFOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # SVDFOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # SVDFOptions + def Rank(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # SVDFOptions + def FusedActivationFunction(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # SVDFOptions + def AsymmetricQuantizeInputs(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + +def SVDFOptionsStart(builder): builder.StartObject(3) +def Start(builder): + return SVDFOptionsStart(builder) +def SVDFOptionsAddRank(builder, rank): builder.PrependInt32Slot(0, rank, 0) +def AddRank(builder, rank): + return SVDFOptionsAddRank(builder, rank) +def SVDFOptionsAddFusedActivationFunction(builder, fusedActivationFunction): builder.PrependInt8Slot(1, fusedActivationFunction, 0) +def AddFusedActivationFunction(builder, fusedActivationFunction): + return SVDFOptionsAddFusedActivationFunction(builder, fusedActivationFunction) +def SVDFOptionsAddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs): builder.PrependBoolSlot(2, asymmetricQuantizeInputs, 0) +def AddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs): + return SVDFOptionsAddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs) +def SVDFOptionsEnd(builder): return builder.EndObject() +def End(builder): + return SVDFOptionsEnd(builder) + +class SVDFOptionsT(object): + + # SVDFOptionsT + def __init__(self): + self.rank = 0 # type: int + self.fusedActivationFunction = 0 # type: int + self.asymmetricQuantizeInputs = False # type: bool + + @classmethod + def InitFromBuf(cls, buf, pos): + svdfoptions = SVDFOptions() + svdfoptions.Init(buf, pos) + return cls.InitFromObj(svdfoptions) + + @classmethod + def InitFromObj(cls, svdfoptions): + x = SVDFOptionsT() + x._UnPack(svdfoptions) + return x + + # SVDFOptionsT + def _UnPack(self, svdfoptions): + if svdfoptions is None: + return + self.rank = svdfoptions.Rank() + self.fusedActivationFunction = svdfoptions.FusedActivationFunction() + self.asymmetricQuantizeInputs = svdfoptions.AsymmetricQuantizeInputs() + + # SVDFOptionsT + def Pack(self, builder): + SVDFOptionsStart(builder) + SVDFOptionsAddRank(builder, self.rank) + SVDFOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) + SVDFOptionsAddAsymmetricQuantizeInputs(builder, self.asymmetricQuantizeInputs) + svdfoptions = SVDFOptionsEnd(builder) + return svdfoptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class ScatterNdOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ScatterNdOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsScatterNdOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def ScatterNdOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # ScatterNdOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def ScatterNdOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return ScatterNdOptionsStart(builder) +def ScatterNdOptionsEnd(builder): return builder.EndObject() +def End(builder): + return ScatterNdOptionsEnd(builder) + +class ScatterNdOptionsT(object): + + # ScatterNdOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + scatterNdOptions = ScatterNdOptions() + scatterNdOptions.Init(buf, pos) + return cls.InitFromObj(scatterNdOptions) + + @classmethod + def InitFromObj(cls, scatterNdOptions): + x = ScatterNdOptionsT() + x._UnPack(scatterNdOptions) + return x + + # ScatterNdOptionsT + def _UnPack(self, scatterNdOptions): + if scatterNdOptions is None: + return + + # ScatterNdOptionsT + def Pack(self, builder): + ScatterNdOptionsStart(builder) + scatterNdOptions = ScatterNdOptionsEnd(builder) + return scatterNdOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class SegmentSumOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = SegmentSumOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsSegmentSumOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def SegmentSumOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # SegmentSumOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def SegmentSumOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return SegmentSumOptionsStart(builder) +def SegmentSumOptionsEnd(builder): return builder.EndObject() +def End(builder): + return SegmentSumOptionsEnd(builder) + +class SegmentSumOptionsT(object): + + # SegmentSumOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + segmentSumOptions = SegmentSumOptions() + segmentSumOptions.Init(buf, pos) + return cls.InitFromObj(segmentSumOptions) + + @classmethod + def InitFromObj(cls, segmentSumOptions): + x = SegmentSumOptionsT() + x._UnPack(segmentSumOptions) + return x + + # SegmentSumOptionsT + def _UnPack(self, segmentSumOptions): + if segmentSumOptions is None: + return + + # SegmentSumOptionsT + def Pack(self, builder): + SegmentSumOptionsStart(builder) + segmentSumOptions = SegmentSumOptionsEnd(builder) + return segmentSumOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class SelectOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = SelectOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsSelectOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def SelectOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # SelectOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def SelectOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return SelectOptionsStart(builder) +def SelectOptionsEnd(builder): return builder.EndObject() +def End(builder): + return SelectOptionsEnd(builder) + +class SelectOptionsT(object): + + # SelectOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + selectOptions = SelectOptions() + selectOptions.Init(buf, pos) + return cls.InitFromObj(selectOptions) + + @classmethod + def InitFromObj(cls, selectOptions): + x = SelectOptionsT() + x._UnPack(selectOptions) + return x + + # SelectOptionsT + def _UnPack(self, selectOptions): + if selectOptions is None: + return + + # SelectOptionsT + def Pack(self, builder): + SelectOptionsStart(builder) + selectOptions = SelectOptionsEnd(builder) + return selectOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class SelectV2Options(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = SelectV2Options() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsSelectV2Options(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def SelectV2OptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # SelectV2Options + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def SelectV2OptionsStart(builder): builder.StartObject(0) +def Start(builder): + return SelectV2OptionsStart(builder) +def SelectV2OptionsEnd(builder): return builder.EndObject() +def End(builder): + return SelectV2OptionsEnd(builder) + +class SelectV2OptionsT(object): + + # SelectV2OptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + selectV2options = SelectV2Options() + selectV2options.Init(buf, pos) + return cls.InitFromObj(selectV2options) + + @classmethod + def InitFromObj(cls, selectV2options): + x = SelectV2OptionsT() + x._UnPack(selectV2options) + return x + + # SelectV2OptionsT + def _UnPack(self, selectV2options): + if selectV2options is None: + return + + # SelectV2OptionsT + def Pack(self, builder): + SelectV2OptionsStart(builder) + selectV2options = SelectV2OptionsEnd(builder) + return selectV2options +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class SequenceRNNOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = SequenceRNNOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsSequenceRNNOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def SequenceRNNOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # SequenceRNNOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # SequenceRNNOptions + def TimeMajor(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + + # SequenceRNNOptions + def FusedActivationFunction(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # SequenceRNNOptions + def AsymmetricQuantizeInputs(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + +def SequenceRNNOptionsStart(builder): builder.StartObject(3) +def Start(builder): + return SequenceRNNOptionsStart(builder) +def SequenceRNNOptionsAddTimeMajor(builder, timeMajor): builder.PrependBoolSlot(0, timeMajor, 0) +def AddTimeMajor(builder, timeMajor): + return SequenceRNNOptionsAddTimeMajor(builder, timeMajor) +def SequenceRNNOptionsAddFusedActivationFunction(builder, fusedActivationFunction): builder.PrependInt8Slot(1, fusedActivationFunction, 0) +def AddFusedActivationFunction(builder, fusedActivationFunction): + return SequenceRNNOptionsAddFusedActivationFunction(builder, fusedActivationFunction) +def SequenceRNNOptionsAddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs): builder.PrependBoolSlot(2, asymmetricQuantizeInputs, 0) +def AddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs): + return SequenceRNNOptionsAddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs) +def SequenceRNNOptionsEnd(builder): return builder.EndObject() +def End(builder): + return SequenceRNNOptionsEnd(builder) + +class SequenceRNNOptionsT(object): + + # SequenceRNNOptionsT + def __init__(self): + self.timeMajor = False # type: bool + self.fusedActivationFunction = 0 # type: int + self.asymmetricQuantizeInputs = False # type: bool + + @classmethod + def InitFromBuf(cls, buf, pos): + sequenceRnnoptions = SequenceRNNOptions() + sequenceRnnoptions.Init(buf, pos) + return cls.InitFromObj(sequenceRnnoptions) + + @classmethod + def InitFromObj(cls, sequenceRnnoptions): + x = SequenceRNNOptionsT() + x._UnPack(sequenceRnnoptions) + return x + + # SequenceRNNOptionsT + def _UnPack(self, sequenceRnnoptions): + if sequenceRnnoptions is None: + return + self.timeMajor = sequenceRnnoptions.TimeMajor() + self.fusedActivationFunction = sequenceRnnoptions.FusedActivationFunction() + self.asymmetricQuantizeInputs = sequenceRnnoptions.AsymmetricQuantizeInputs() + + # SequenceRNNOptionsT + def Pack(self, builder): + SequenceRNNOptionsStart(builder) + SequenceRNNOptionsAddTimeMajor(builder, self.timeMajor) + SequenceRNNOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) + SequenceRNNOptionsAddAsymmetricQuantizeInputs(builder, self.asymmetricQuantizeInputs) + sequenceRnnoptions = SequenceRNNOptionsEnd(builder) + return sequenceRnnoptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class ShapeOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ShapeOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsShapeOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def ShapeOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # ShapeOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ShapeOptions + def OutType(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + +def ShapeOptionsStart(builder): builder.StartObject(1) +def Start(builder): + return ShapeOptionsStart(builder) +def ShapeOptionsAddOutType(builder, outType): builder.PrependInt8Slot(0, outType, 0) +def AddOutType(builder, outType): + return ShapeOptionsAddOutType(builder, outType) +def ShapeOptionsEnd(builder): return builder.EndObject() +def End(builder): + return ShapeOptionsEnd(builder) + +class ShapeOptionsT(object): + + # ShapeOptionsT + def __init__(self): + self.outType = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + shapeOptions = ShapeOptions() + shapeOptions.Init(buf, pos) + return cls.InitFromObj(shapeOptions) + + @classmethod + def InitFromObj(cls, shapeOptions): + x = ShapeOptionsT() + x._UnPack(shapeOptions) + return x + + # ShapeOptionsT + def _UnPack(self, shapeOptions): + if shapeOptions is None: + return + self.outType = shapeOptions.OutType() + + # ShapeOptionsT + def Pack(self, builder): + ShapeOptionsStart(builder) + ShapeOptionsAddOutType(builder, self.outType) + shapeOptions = ShapeOptionsEnd(builder) + return shapeOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class SignOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = SignOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsSignOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def SignOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # SignOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def SignOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return SignOptionsStart(builder) +def SignOptionsEnd(builder): return builder.EndObject() +def End(builder): + return SignOptionsEnd(builder) + +class SignOptionsT(object): + + # SignOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + signOptions = SignOptions() + signOptions.Init(buf, pos) + return cls.InitFromObj(signOptions) + + @classmethod + def InitFromObj(cls, signOptions): + x = SignOptionsT() + x._UnPack(signOptions) + return x + + # SignOptionsT + def _UnPack(self, signOptions): + if signOptions is None: + return + + # SignOptionsT + def Pack(self, builder): + SignOptionsStart(builder) + signOptions = SignOptionsEnd(builder) + return signOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class SignatureDef(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = SignatureDef() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsSignatureDef(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def SignatureDefBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # SignatureDef + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # SignatureDef + def Inputs(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + x = self._tab.Vector(o) + x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 + x = self._tab.Indirect(x) + obj = TensorMap() + obj.Init(self._tab.Bytes, x) + return obj + return None + + # SignatureDef + def InputsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # SignatureDef + def InputsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + + # SignatureDef + def Outputs(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + x = self._tab.Vector(o) + x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 + x = self._tab.Indirect(x) + obj = TensorMap() + obj.Init(self._tab.Bytes, x) + return obj + return None + + # SignatureDef + def OutputsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # SignatureDef + def OutputsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + return o == 0 + + # SignatureDef + def SignatureKey(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # SignatureDef + def SubgraphIndex(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint32Flags, o + self._tab.Pos) + return 0 + +def SignatureDefStart(builder): builder.StartObject(5) +def Start(builder): + return SignatureDefStart(builder) +def SignatureDefAddInputs(builder, inputs): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(inputs), 0) +def AddInputs(builder, inputs): + return SignatureDefAddInputs(builder, inputs) +def SignatureDefStartInputsVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def StartInputsVector(builder, numElems): + return SignatureDefStartInputsVector(builder, numElems) +def SignatureDefAddOutputs(builder, outputs): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(outputs), 0) +def AddOutputs(builder, outputs): + return SignatureDefAddOutputs(builder, outputs) +def SignatureDefStartOutputsVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def StartOutputsVector(builder, numElems): + return SignatureDefStartOutputsVector(builder, numElems) +def SignatureDefAddSignatureKey(builder, signatureKey): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(signatureKey), 0) +def AddSignatureKey(builder, signatureKey): + return SignatureDefAddSignatureKey(builder, signatureKey) +def SignatureDefAddSubgraphIndex(builder, subgraphIndex): builder.PrependUint32Slot(4, subgraphIndex, 0) +def AddSubgraphIndex(builder, subgraphIndex): + return SignatureDefAddSubgraphIndex(builder, subgraphIndex) +def SignatureDefEnd(builder): return builder.EndObject() +def End(builder): + return SignatureDefEnd(builder) +try: + from typing import List +except: + pass + +class SignatureDefT(object): + + # SignatureDefT + def __init__(self): + self.inputs = None # type: List[TensorMapT] + self.outputs = None # type: List[TensorMapT] + self.signatureKey = None # type: str + self.subgraphIndex = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + signatureDef = SignatureDef() + signatureDef.Init(buf, pos) + return cls.InitFromObj(signatureDef) + + @classmethod + def InitFromObj(cls, signatureDef): + x = SignatureDefT() + x._UnPack(signatureDef) + return x + + # SignatureDefT + def _UnPack(self, signatureDef): + if signatureDef is None: + return + if not signatureDef.InputsIsNone(): + self.inputs = [] + for i in range(signatureDef.InputsLength()): + if signatureDef.Inputs(i) is None: + self.inputs.append(None) + else: + tensorMap_ = TensorMapT.InitFromObj(signatureDef.Inputs(i)) + self.inputs.append(tensorMap_) + if not signatureDef.OutputsIsNone(): + self.outputs = [] + for i in range(signatureDef.OutputsLength()): + if signatureDef.Outputs(i) is None: + self.outputs.append(None) + else: + tensorMap_ = TensorMapT.InitFromObj(signatureDef.Outputs(i)) + self.outputs.append(tensorMap_) + self.signatureKey = signatureDef.SignatureKey() + self.subgraphIndex = signatureDef.SubgraphIndex() + + # SignatureDefT + def Pack(self, builder): + if self.inputs is not None: + inputslist = [] + for i in range(len(self.inputs)): + inputslist.append(self.inputs[i].Pack(builder)) + SignatureDefStartInputsVector(builder, len(self.inputs)) + for i in reversed(range(len(self.inputs))): + builder.PrependUOffsetTRelative(inputslist[i]) + inputs = builder.EndVector() + if self.outputs is not None: + outputslist = [] + for i in range(len(self.outputs)): + outputslist.append(self.outputs[i].Pack(builder)) + SignatureDefStartOutputsVector(builder, len(self.outputs)) + for i in reversed(range(len(self.outputs))): + builder.PrependUOffsetTRelative(outputslist[i]) + outputs = builder.EndVector() + if self.signatureKey is not None: + signatureKey = builder.CreateString(self.signatureKey) + SignatureDefStart(builder) + if self.inputs is not None: + SignatureDefAddInputs(builder, inputs) + if self.outputs is not None: + SignatureDefAddOutputs(builder, outputs) + if self.signatureKey is not None: + SignatureDefAddSignatureKey(builder, signatureKey) + SignatureDefAddSubgraphIndex(builder, self.subgraphIndex) + signatureDef = SignatureDefEnd(builder) + return signatureDef +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class SkipGramOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = SkipGramOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsSkipGramOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def SkipGramOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # SkipGramOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # SkipGramOptions + def NgramSize(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # SkipGramOptions + def MaxSkipSize(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # SkipGramOptions + def IncludeAllNgrams(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + +def SkipGramOptionsStart(builder): builder.StartObject(3) +def Start(builder): + return SkipGramOptionsStart(builder) +def SkipGramOptionsAddNgramSize(builder, ngramSize): builder.PrependInt32Slot(0, ngramSize, 0) +def AddNgramSize(builder, ngramSize): + return SkipGramOptionsAddNgramSize(builder, ngramSize) +def SkipGramOptionsAddMaxSkipSize(builder, maxSkipSize): builder.PrependInt32Slot(1, maxSkipSize, 0) +def AddMaxSkipSize(builder, maxSkipSize): + return SkipGramOptionsAddMaxSkipSize(builder, maxSkipSize) +def SkipGramOptionsAddIncludeAllNgrams(builder, includeAllNgrams): builder.PrependBoolSlot(2, includeAllNgrams, 0) +def AddIncludeAllNgrams(builder, includeAllNgrams): + return SkipGramOptionsAddIncludeAllNgrams(builder, includeAllNgrams) +def SkipGramOptionsEnd(builder): return builder.EndObject() +def End(builder): + return SkipGramOptionsEnd(builder) + +class SkipGramOptionsT(object): + + # SkipGramOptionsT + def __init__(self): + self.ngramSize = 0 # type: int + self.maxSkipSize = 0 # type: int + self.includeAllNgrams = False # type: bool + + @classmethod + def InitFromBuf(cls, buf, pos): + skipGramOptions = SkipGramOptions() + skipGramOptions.Init(buf, pos) + return cls.InitFromObj(skipGramOptions) + + @classmethod + def InitFromObj(cls, skipGramOptions): + x = SkipGramOptionsT() + x._UnPack(skipGramOptions) + return x + + # SkipGramOptionsT + def _UnPack(self, skipGramOptions): + if skipGramOptions is None: + return + self.ngramSize = skipGramOptions.NgramSize() + self.maxSkipSize = skipGramOptions.MaxSkipSize() + self.includeAllNgrams = skipGramOptions.IncludeAllNgrams() + + # SkipGramOptionsT + def Pack(self, builder): + SkipGramOptionsStart(builder) + SkipGramOptionsAddNgramSize(builder, self.ngramSize) + SkipGramOptionsAddMaxSkipSize(builder, self.maxSkipSize) + SkipGramOptionsAddIncludeAllNgrams(builder, self.includeAllNgrams) + skipGramOptions = SkipGramOptionsEnd(builder) + return skipGramOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class SliceOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = SliceOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsSliceOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def SliceOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # SliceOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def SliceOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return SliceOptionsStart(builder) +def SliceOptionsEnd(builder): return builder.EndObject() +def End(builder): + return SliceOptionsEnd(builder) + +class SliceOptionsT(object): + + # SliceOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + sliceOptions = SliceOptions() + sliceOptions.Init(buf, pos) + return cls.InitFromObj(sliceOptions) + + @classmethod + def InitFromObj(cls, sliceOptions): + x = SliceOptionsT() + x._UnPack(sliceOptions) + return x + + # SliceOptionsT + def _UnPack(self, sliceOptions): + if sliceOptions is None: + return + + # SliceOptionsT + def Pack(self, builder): + SliceOptionsStart(builder) + sliceOptions = SliceOptionsEnd(builder) + return sliceOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class SoftmaxOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = SoftmaxOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsSoftmaxOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def SoftmaxOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # SoftmaxOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # SoftmaxOptions + def Beta(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) + return 0.0 + +def SoftmaxOptionsStart(builder): builder.StartObject(1) +def Start(builder): + return SoftmaxOptionsStart(builder) +def SoftmaxOptionsAddBeta(builder, beta): builder.PrependFloat32Slot(0, beta, 0.0) +def AddBeta(builder, beta): + return SoftmaxOptionsAddBeta(builder, beta) +def SoftmaxOptionsEnd(builder): return builder.EndObject() +def End(builder): + return SoftmaxOptionsEnd(builder) + +class SoftmaxOptionsT(object): + + # SoftmaxOptionsT + def __init__(self): + self.beta = 0.0 # type: float + + @classmethod + def InitFromBuf(cls, buf, pos): + softmaxOptions = SoftmaxOptions() + softmaxOptions.Init(buf, pos) + return cls.InitFromObj(softmaxOptions) + + @classmethod + def InitFromObj(cls, softmaxOptions): + x = SoftmaxOptionsT() + x._UnPack(softmaxOptions) + return x + + # SoftmaxOptionsT + def _UnPack(self, softmaxOptions): + if softmaxOptions is None: + return + self.beta = softmaxOptions.Beta() + + # SoftmaxOptionsT + def Pack(self, builder): + SoftmaxOptionsStart(builder) + SoftmaxOptionsAddBeta(builder, self.beta) + softmaxOptions = SoftmaxOptionsEnd(builder) + return softmaxOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class SpaceToBatchNDOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = SpaceToBatchNDOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsSpaceToBatchNDOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def SpaceToBatchNDOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # SpaceToBatchNDOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def SpaceToBatchNDOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return SpaceToBatchNDOptionsStart(builder) +def SpaceToBatchNDOptionsEnd(builder): return builder.EndObject() +def End(builder): + return SpaceToBatchNDOptionsEnd(builder) + +class SpaceToBatchNDOptionsT(object): + + # SpaceToBatchNDOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + spaceToBatchNdoptions = SpaceToBatchNDOptions() + spaceToBatchNdoptions.Init(buf, pos) + return cls.InitFromObj(spaceToBatchNdoptions) + + @classmethod + def InitFromObj(cls, spaceToBatchNdoptions): + x = SpaceToBatchNDOptionsT() + x._UnPack(spaceToBatchNdoptions) + return x + + # SpaceToBatchNDOptionsT + def _UnPack(self, spaceToBatchNdoptions): + if spaceToBatchNdoptions is None: + return + + # SpaceToBatchNDOptionsT + def Pack(self, builder): + SpaceToBatchNDOptionsStart(builder) + spaceToBatchNdoptions = SpaceToBatchNDOptionsEnd(builder) + return spaceToBatchNdoptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class SpaceToDepthOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = SpaceToDepthOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsSpaceToDepthOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def SpaceToDepthOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # SpaceToDepthOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # SpaceToDepthOptions + def BlockSize(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + +def SpaceToDepthOptionsStart(builder): builder.StartObject(1) +def Start(builder): + return SpaceToDepthOptionsStart(builder) +def SpaceToDepthOptionsAddBlockSize(builder, blockSize): builder.PrependInt32Slot(0, blockSize, 0) +def AddBlockSize(builder, blockSize): + return SpaceToDepthOptionsAddBlockSize(builder, blockSize) +def SpaceToDepthOptionsEnd(builder): return builder.EndObject() +def End(builder): + return SpaceToDepthOptionsEnd(builder) + +class SpaceToDepthOptionsT(object): + + # SpaceToDepthOptionsT + def __init__(self): + self.blockSize = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + spaceToDepthOptions = SpaceToDepthOptions() + spaceToDepthOptions.Init(buf, pos) + return cls.InitFromObj(spaceToDepthOptions) + + @classmethod + def InitFromObj(cls, spaceToDepthOptions): + x = SpaceToDepthOptionsT() + x._UnPack(spaceToDepthOptions) + return x + + # SpaceToDepthOptionsT + def _UnPack(self, spaceToDepthOptions): + if spaceToDepthOptions is None: + return + self.blockSize = spaceToDepthOptions.BlockSize() + + # SpaceToDepthOptionsT + def Pack(self, builder): + SpaceToDepthOptionsStart(builder) + SpaceToDepthOptionsAddBlockSize(builder, self.blockSize) + spaceToDepthOptions = SpaceToDepthOptionsEnd(builder) + return spaceToDepthOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +class SparseIndexVector(object): + NONE = 0 + Int32Vector = 1 + Uint16Vector = 2 + Uint8Vector = 3 + +def SparseIndexVectorCreator(unionType, table): + from flatbuffers.table import Table + if not isinstance(table, Table): + return None + if unionType == SparseIndexVector().Int32Vector: + return Int32VectorT.InitFromBuf(table.Bytes, table.Pos) + if unionType == SparseIndexVector().Uint16Vector: + return Uint16VectorT.InitFromBuf(table.Bytes, table.Pos) + if unionType == SparseIndexVector().Uint8Vector: + return Uint8VectorT.InitFromBuf(table.Bytes, table.Pos) + return None +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class SparseToDenseOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = SparseToDenseOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsSparseToDenseOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def SparseToDenseOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # SparseToDenseOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # SparseToDenseOptions + def ValidateIndices(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + +def SparseToDenseOptionsStart(builder): builder.StartObject(1) +def Start(builder): + return SparseToDenseOptionsStart(builder) +def SparseToDenseOptionsAddValidateIndices(builder, validateIndices): builder.PrependBoolSlot(0, validateIndices, 0) +def AddValidateIndices(builder, validateIndices): + return SparseToDenseOptionsAddValidateIndices(builder, validateIndices) +def SparseToDenseOptionsEnd(builder): return builder.EndObject() +def End(builder): + return SparseToDenseOptionsEnd(builder) + +class SparseToDenseOptionsT(object): + + # SparseToDenseOptionsT + def __init__(self): + self.validateIndices = False # type: bool + + @classmethod + def InitFromBuf(cls, buf, pos): + sparseToDenseOptions = SparseToDenseOptions() + sparseToDenseOptions.Init(buf, pos) + return cls.InitFromObj(sparseToDenseOptions) + + @classmethod + def InitFromObj(cls, sparseToDenseOptions): + x = SparseToDenseOptionsT() + x._UnPack(sparseToDenseOptions) + return x + + # SparseToDenseOptionsT + def _UnPack(self, sparseToDenseOptions): + if sparseToDenseOptions is None: + return + self.validateIndices = sparseToDenseOptions.ValidateIndices() + + # SparseToDenseOptionsT + def Pack(self, builder): + SparseToDenseOptionsStart(builder) + SparseToDenseOptionsAddValidateIndices(builder, self.validateIndices) + sparseToDenseOptions = SparseToDenseOptionsEnd(builder) + return sparseToDenseOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class SparsityParameters(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = SparsityParameters() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsSparsityParameters(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def SparsityParametersBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # SparsityParameters + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # SparsityParameters + def TraversalOrder(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # SparsityParameters + def TraversalOrderAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # SparsityParameters + def TraversalOrderLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # SparsityParameters + def TraversalOrderIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + + # SparsityParameters + def BlockMap(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # SparsityParameters + def BlockMapAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # SparsityParameters + def BlockMapLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # SparsityParameters + def BlockMapIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + return o == 0 + + # SparsityParameters + def DimMetadata(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + x = self._tab.Vector(o) + x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 + x = self._tab.Indirect(x) + obj = DimensionMetadata() + obj.Init(self._tab.Bytes, x) + return obj + return None + + # SparsityParameters + def DimMetadataLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # SparsityParameters + def DimMetadataIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + return o == 0 + +def SparsityParametersStart(builder): builder.StartObject(3) +def Start(builder): + return SparsityParametersStart(builder) +def SparsityParametersAddTraversalOrder(builder, traversalOrder): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(traversalOrder), 0) +def AddTraversalOrder(builder, traversalOrder): + return SparsityParametersAddTraversalOrder(builder, traversalOrder) +def SparsityParametersStartTraversalOrderVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def StartTraversalOrderVector(builder, numElems): + return SparsityParametersStartTraversalOrderVector(builder, numElems) +def SparsityParametersAddBlockMap(builder, blockMap): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(blockMap), 0) +def AddBlockMap(builder, blockMap): + return SparsityParametersAddBlockMap(builder, blockMap) +def SparsityParametersStartBlockMapVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def StartBlockMapVector(builder, numElems): + return SparsityParametersStartBlockMapVector(builder, numElems) +def SparsityParametersAddDimMetadata(builder, dimMetadata): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(dimMetadata), 0) +def AddDimMetadata(builder, dimMetadata): + return SparsityParametersAddDimMetadata(builder, dimMetadata) +def SparsityParametersStartDimMetadataVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def StartDimMetadataVector(builder, numElems): + return SparsityParametersStartDimMetadataVector(builder, numElems) +def SparsityParametersEnd(builder): return builder.EndObject() +def End(builder): + return SparsityParametersEnd(builder) +try: + from typing import List +except: + pass + +class SparsityParametersT(object): + + # SparsityParametersT + def __init__(self): + self.traversalOrder = None # type: List[int] + self.blockMap = None # type: List[int] + self.dimMetadata = None # type: List[DimensionMetadataT] + + @classmethod + def InitFromBuf(cls, buf, pos): + sparsityParameters = SparsityParameters() + sparsityParameters.Init(buf, pos) + return cls.InitFromObj(sparsityParameters) + + @classmethod + def InitFromObj(cls, sparsityParameters): + x = SparsityParametersT() + x._UnPack(sparsityParameters) + return x + + # SparsityParametersT + def _UnPack(self, sparsityParameters): + if sparsityParameters is None: + return + if not sparsityParameters.TraversalOrderIsNone(): + if np is None: + self.traversalOrder = [] + for i in range(sparsityParameters.TraversalOrderLength()): + self.traversalOrder.append(sparsityParameters.TraversalOrder(i)) + else: + self.traversalOrder = sparsityParameters.TraversalOrderAsNumpy() + if not sparsityParameters.BlockMapIsNone(): + if np is None: + self.blockMap = [] + for i in range(sparsityParameters.BlockMapLength()): + self.blockMap.append(sparsityParameters.BlockMap(i)) + else: + self.blockMap = sparsityParameters.BlockMapAsNumpy() + if not sparsityParameters.DimMetadataIsNone(): + self.dimMetadata = [] + for i in range(sparsityParameters.DimMetadataLength()): + if sparsityParameters.DimMetadata(i) is None: + self.dimMetadata.append(None) + else: + dimensionMetadata_ = DimensionMetadataT.InitFromObj(sparsityParameters.DimMetadata(i)) + self.dimMetadata.append(dimensionMetadata_) + + # SparsityParametersT + def Pack(self, builder): + if self.traversalOrder is not None: + if np is not None and type(self.traversalOrder) is np.ndarray: + traversalOrder = builder.CreateNumpyVector(self.traversalOrder) + else: + SparsityParametersStartTraversalOrderVector(builder, len(self.traversalOrder)) + for i in reversed(range(len(self.traversalOrder))): + builder.PrependInt32(self.traversalOrder[i]) + traversalOrder = builder.EndVector() + if self.blockMap is not None: + if np is not None and type(self.blockMap) is np.ndarray: + blockMap = builder.CreateNumpyVector(self.blockMap) + else: + SparsityParametersStartBlockMapVector(builder, len(self.blockMap)) + for i in reversed(range(len(self.blockMap))): + builder.PrependInt32(self.blockMap[i]) + blockMap = builder.EndVector() + if self.dimMetadata is not None: + dimMetadatalist = [] + for i in range(len(self.dimMetadata)): + dimMetadatalist.append(self.dimMetadata[i].Pack(builder)) + SparsityParametersStartDimMetadataVector(builder, len(self.dimMetadata)) + for i in reversed(range(len(self.dimMetadata))): + builder.PrependUOffsetTRelative(dimMetadatalist[i]) + dimMetadata = builder.EndVector() + SparsityParametersStart(builder) + if self.traversalOrder is not None: + SparsityParametersAddTraversalOrder(builder, traversalOrder) + if self.blockMap is not None: + SparsityParametersAddBlockMap(builder, blockMap) + if self.dimMetadata is not None: + SparsityParametersAddDimMetadata(builder, dimMetadata) + sparsityParameters = SparsityParametersEnd(builder) + return sparsityParameters +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class SplitOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = SplitOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsSplitOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def SplitOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # SplitOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # SplitOptions + def NumSplits(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + +def SplitOptionsStart(builder): builder.StartObject(1) +def Start(builder): + return SplitOptionsStart(builder) +def SplitOptionsAddNumSplits(builder, numSplits): builder.PrependInt32Slot(0, numSplits, 0) +def AddNumSplits(builder, numSplits): + return SplitOptionsAddNumSplits(builder, numSplits) +def SplitOptionsEnd(builder): return builder.EndObject() +def End(builder): + return SplitOptionsEnd(builder) + +class SplitOptionsT(object): + + # SplitOptionsT + def __init__(self): + self.numSplits = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + splitOptions = SplitOptions() + splitOptions.Init(buf, pos) + return cls.InitFromObj(splitOptions) + + @classmethod + def InitFromObj(cls, splitOptions): + x = SplitOptionsT() + x._UnPack(splitOptions) + return x + + # SplitOptionsT + def _UnPack(self, splitOptions): + if splitOptions is None: + return + self.numSplits = splitOptions.NumSplits() + + # SplitOptionsT + def Pack(self, builder): + SplitOptionsStart(builder) + SplitOptionsAddNumSplits(builder, self.numSplits) + splitOptions = SplitOptionsEnd(builder) + return splitOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class SplitVOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = SplitVOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsSplitVOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def SplitVOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # SplitVOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # SplitVOptions + def NumSplits(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + +def SplitVOptionsStart(builder): builder.StartObject(1) +def Start(builder): + return SplitVOptionsStart(builder) +def SplitVOptionsAddNumSplits(builder, numSplits): builder.PrependInt32Slot(0, numSplits, 0) +def AddNumSplits(builder, numSplits): + return SplitVOptionsAddNumSplits(builder, numSplits) +def SplitVOptionsEnd(builder): return builder.EndObject() +def End(builder): + return SplitVOptionsEnd(builder) + +class SplitVOptionsT(object): + + # SplitVOptionsT + def __init__(self): + self.numSplits = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + splitVoptions = SplitVOptions() + splitVoptions.Init(buf, pos) + return cls.InitFromObj(splitVoptions) + + @classmethod + def InitFromObj(cls, splitVoptions): + x = SplitVOptionsT() + x._UnPack(splitVoptions) + return x + + # SplitVOptionsT + def _UnPack(self, splitVoptions): + if splitVoptions is None: + return + self.numSplits = splitVoptions.NumSplits() + + # SplitVOptionsT + def Pack(self, builder): + SplitVOptionsStart(builder) + SplitVOptionsAddNumSplits(builder, self.numSplits) + splitVoptions = SplitVOptionsEnd(builder) + return splitVoptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class SquareOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = SquareOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsSquareOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def SquareOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # SquareOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def SquareOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return SquareOptionsStart(builder) +def SquareOptionsEnd(builder): return builder.EndObject() +def End(builder): + return SquareOptionsEnd(builder) + +class SquareOptionsT(object): + + # SquareOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + squareOptions = SquareOptions() + squareOptions.Init(buf, pos) + return cls.InitFromObj(squareOptions) + + @classmethod + def InitFromObj(cls, squareOptions): + x = SquareOptionsT() + x._UnPack(squareOptions) + return x + + # SquareOptionsT + def _UnPack(self, squareOptions): + if squareOptions is None: + return + + # SquareOptionsT + def Pack(self, builder): + SquareOptionsStart(builder) + squareOptions = SquareOptionsEnd(builder) + return squareOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class SquaredDifferenceOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = SquaredDifferenceOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsSquaredDifferenceOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def SquaredDifferenceOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # SquaredDifferenceOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def SquaredDifferenceOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return SquaredDifferenceOptionsStart(builder) +def SquaredDifferenceOptionsEnd(builder): return builder.EndObject() +def End(builder): + return SquaredDifferenceOptionsEnd(builder) + +class SquaredDifferenceOptionsT(object): + + # SquaredDifferenceOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + squaredDifferenceOptions = SquaredDifferenceOptions() + squaredDifferenceOptions.Init(buf, pos) + return cls.InitFromObj(squaredDifferenceOptions) + + @classmethod + def InitFromObj(cls, squaredDifferenceOptions): + x = SquaredDifferenceOptionsT() + x._UnPack(squaredDifferenceOptions) + return x + + # SquaredDifferenceOptionsT + def _UnPack(self, squaredDifferenceOptions): + if squaredDifferenceOptions is None: + return + + # SquaredDifferenceOptionsT + def Pack(self, builder): + SquaredDifferenceOptionsStart(builder) + squaredDifferenceOptions = SquaredDifferenceOptionsEnd(builder) + return squaredDifferenceOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class SqueezeOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = SqueezeOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsSqueezeOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def SqueezeOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # SqueezeOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # SqueezeOptions + def SqueezeDims(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # SqueezeOptions + def SqueezeDimsAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # SqueezeOptions + def SqueezeDimsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # SqueezeOptions + def SqueezeDimsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + +def SqueezeOptionsStart(builder): builder.StartObject(1) +def Start(builder): + return SqueezeOptionsStart(builder) +def SqueezeOptionsAddSqueezeDims(builder, squeezeDims): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(squeezeDims), 0) +def AddSqueezeDims(builder, squeezeDims): + return SqueezeOptionsAddSqueezeDims(builder, squeezeDims) +def SqueezeOptionsStartSqueezeDimsVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def StartSqueezeDimsVector(builder, numElems): + return SqueezeOptionsStartSqueezeDimsVector(builder, numElems) +def SqueezeOptionsEnd(builder): return builder.EndObject() +def End(builder): + return SqueezeOptionsEnd(builder) +try: + from typing import List +except: + pass + +class SqueezeOptionsT(object): + + # SqueezeOptionsT + def __init__(self): + self.squeezeDims = None # type: List[int] + + @classmethod + def InitFromBuf(cls, buf, pos): + squeezeOptions = SqueezeOptions() + squeezeOptions.Init(buf, pos) + return cls.InitFromObj(squeezeOptions) + + @classmethod + def InitFromObj(cls, squeezeOptions): + x = SqueezeOptionsT() + x._UnPack(squeezeOptions) + return x + + # SqueezeOptionsT + def _UnPack(self, squeezeOptions): + if squeezeOptions is None: + return + if not squeezeOptions.SqueezeDimsIsNone(): + if np is None: + self.squeezeDims = [] + for i in range(squeezeOptions.SqueezeDimsLength()): + self.squeezeDims.append(squeezeOptions.SqueezeDims(i)) + else: + self.squeezeDims = squeezeOptions.SqueezeDimsAsNumpy() + + # SqueezeOptionsT + def Pack(self, builder): + if self.squeezeDims is not None: + if np is not None and type(self.squeezeDims) is np.ndarray: + squeezeDims = builder.CreateNumpyVector(self.squeezeDims) + else: + SqueezeOptionsStartSqueezeDimsVector(builder, len(self.squeezeDims)) + for i in reversed(range(len(self.squeezeDims))): + builder.PrependInt32(self.squeezeDims[i]) + squeezeDims = builder.EndVector() + SqueezeOptionsStart(builder) + if self.squeezeDims is not None: + SqueezeOptionsAddSqueezeDims(builder, squeezeDims) + squeezeOptions = SqueezeOptionsEnd(builder) + return squeezeOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class StridedSliceOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = StridedSliceOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsStridedSliceOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def StridedSliceOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # StridedSliceOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # StridedSliceOptions + def BeginMask(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # StridedSliceOptions + def EndMask(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # StridedSliceOptions + def EllipsisMask(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # StridedSliceOptions + def NewAxisMask(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # StridedSliceOptions + def ShrinkAxisMask(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # StridedSliceOptions + def Offset(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + +def StridedSliceOptionsStart(builder): builder.StartObject(6) +def Start(builder): + return StridedSliceOptionsStart(builder) +def StridedSliceOptionsAddBeginMask(builder, beginMask): builder.PrependInt32Slot(0, beginMask, 0) +def AddBeginMask(builder, beginMask): + return StridedSliceOptionsAddBeginMask(builder, beginMask) +def StridedSliceOptionsAddEndMask(builder, endMask): builder.PrependInt32Slot(1, endMask, 0) +def AddEndMask(builder, endMask): + return StridedSliceOptionsAddEndMask(builder, endMask) +def StridedSliceOptionsAddEllipsisMask(builder, ellipsisMask): builder.PrependInt32Slot(2, ellipsisMask, 0) +def AddEllipsisMask(builder, ellipsisMask): + return StridedSliceOptionsAddEllipsisMask(builder, ellipsisMask) +def StridedSliceOptionsAddNewAxisMask(builder, newAxisMask): builder.PrependInt32Slot(3, newAxisMask, 0) +def AddNewAxisMask(builder, newAxisMask): + return StridedSliceOptionsAddNewAxisMask(builder, newAxisMask) +def StridedSliceOptionsAddShrinkAxisMask(builder, shrinkAxisMask): builder.PrependInt32Slot(4, shrinkAxisMask, 0) +def AddShrinkAxisMask(builder, shrinkAxisMask): + return StridedSliceOptionsAddShrinkAxisMask(builder, shrinkAxisMask) +def StridedSliceOptionsAddOffset(builder, offset): builder.PrependBoolSlot(5, offset, 0) +def AddOffset(builder, offset): + return StridedSliceOptionsAddOffset(builder, offset) +def StridedSliceOptionsEnd(builder): return builder.EndObject() +def End(builder): + return StridedSliceOptionsEnd(builder) + +class StridedSliceOptionsT(object): + + # StridedSliceOptionsT + def __init__(self): + self.beginMask = 0 # type: int + self.endMask = 0 # type: int + self.ellipsisMask = 0 # type: int + self.newAxisMask = 0 # type: int + self.shrinkAxisMask = 0 # type: int + self.offset = False # type: bool + + @classmethod + def InitFromBuf(cls, buf, pos): + stridedSliceOptions = StridedSliceOptions() + stridedSliceOptions.Init(buf, pos) + return cls.InitFromObj(stridedSliceOptions) + + @classmethod + def InitFromObj(cls, stridedSliceOptions): + x = StridedSliceOptionsT() + x._UnPack(stridedSliceOptions) + return x + + # StridedSliceOptionsT + def _UnPack(self, stridedSliceOptions): + if stridedSliceOptions is None: + return + self.beginMask = stridedSliceOptions.BeginMask() + self.endMask = stridedSliceOptions.EndMask() + self.ellipsisMask = stridedSliceOptions.EllipsisMask() + self.newAxisMask = stridedSliceOptions.NewAxisMask() + self.shrinkAxisMask = stridedSliceOptions.ShrinkAxisMask() + self.offset = stridedSliceOptions.Offset() + + # StridedSliceOptionsT + def Pack(self, builder): + StridedSliceOptionsStart(builder) + StridedSliceOptionsAddBeginMask(builder, self.beginMask) + StridedSliceOptionsAddEndMask(builder, self.endMask) + StridedSliceOptionsAddEllipsisMask(builder, self.ellipsisMask) + StridedSliceOptionsAddNewAxisMask(builder, self.newAxisMask) + StridedSliceOptionsAddShrinkAxisMask(builder, self.shrinkAxisMask) + StridedSliceOptionsAddOffset(builder, self.offset) + stridedSliceOptions = StridedSliceOptionsEnd(builder) + return stridedSliceOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class SubGraph(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = SubGraph() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsSubGraph(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def SubGraphBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # SubGraph + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # SubGraph + def Tensors(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + x = self._tab.Vector(o) + x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 + x = self._tab.Indirect(x) + obj = Tensor() + obj.Init(self._tab.Bytes, x) + return obj + return None + + # SubGraph + def TensorsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # SubGraph + def TensorsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + + # SubGraph + def Inputs(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # SubGraph + def InputsAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # SubGraph + def InputsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # SubGraph + def InputsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + return o == 0 + + # SubGraph + def Outputs(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # SubGraph + def OutputsAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # SubGraph + def OutputsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # SubGraph + def OutputsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + return o == 0 + + # SubGraph + def Operators(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + x = self._tab.Vector(o) + x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 + x = self._tab.Indirect(x) + obj = Operator() + obj.Init(self._tab.Bytes, x) + return obj + return None + + # SubGraph + def OperatorsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # SubGraph + def OperatorsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + return o == 0 + + # SubGraph + def Name(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + +def SubGraphStart(builder): builder.StartObject(5) +def Start(builder): + return SubGraphStart(builder) +def SubGraphAddTensors(builder, tensors): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(tensors), 0) +def AddTensors(builder, tensors): + return SubGraphAddTensors(builder, tensors) +def SubGraphStartTensorsVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def StartTensorsVector(builder, numElems): + return SubGraphStartTensorsVector(builder, numElems) +def SubGraphAddInputs(builder, inputs): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(inputs), 0) +def AddInputs(builder, inputs): + return SubGraphAddInputs(builder, inputs) +def SubGraphStartInputsVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def StartInputsVector(builder, numElems): + return SubGraphStartInputsVector(builder, numElems) +def SubGraphAddOutputs(builder, outputs): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(outputs), 0) +def AddOutputs(builder, outputs): + return SubGraphAddOutputs(builder, outputs) +def SubGraphStartOutputsVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def StartOutputsVector(builder, numElems): + return SubGraphStartOutputsVector(builder, numElems) +def SubGraphAddOperators(builder, operators): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(operators), 0) +def AddOperators(builder, operators): + return SubGraphAddOperators(builder, operators) +def SubGraphStartOperatorsVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def StartOperatorsVector(builder, numElems): + return SubGraphStartOperatorsVector(builder, numElems) +def SubGraphAddName(builder, name): builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0) +def AddName(builder, name): + return SubGraphAddName(builder, name) +def SubGraphEnd(builder): return builder.EndObject() +def End(builder): + return SubGraphEnd(builder) +try: + from typing import List +except: + pass + +class SubGraphT(object): + + # SubGraphT + def __init__(self): + self.tensors = None # type: List[TensorT] + self.inputs = None # type: List[int] + self.outputs = None # type: List[int] + self.operators = None # type: List[OperatorT] + self.name = None # type: str + + @classmethod + def InitFromBuf(cls, buf, pos): + subGraph = SubGraph() + subGraph.Init(buf, pos) + return cls.InitFromObj(subGraph) + + @classmethod + def InitFromObj(cls, subGraph): + x = SubGraphT() + x._UnPack(subGraph) + return x + + # SubGraphT + def _UnPack(self, subGraph): + if subGraph is None: + return + if not subGraph.TensorsIsNone(): + self.tensors = [] + for i in range(subGraph.TensorsLength()): + if subGraph.Tensors(i) is None: + self.tensors.append(None) + else: + tensor_ = TensorT.InitFromObj(subGraph.Tensors(i)) + self.tensors.append(tensor_) + if not subGraph.InputsIsNone(): + if np is None: + self.inputs = [] + for i in range(subGraph.InputsLength()): + self.inputs.append(subGraph.Inputs(i)) + else: + self.inputs = subGraph.InputsAsNumpy() + if not subGraph.OutputsIsNone(): + if np is None: + self.outputs = [] + for i in range(subGraph.OutputsLength()): + self.outputs.append(subGraph.Outputs(i)) + else: + self.outputs = subGraph.OutputsAsNumpy() + if not subGraph.OperatorsIsNone(): + self.operators = [] + for i in range(subGraph.OperatorsLength()): + if subGraph.Operators(i) is None: + self.operators.append(None) + else: + operator_ = OperatorT.InitFromObj(subGraph.Operators(i)) + self.operators.append(operator_) + self.name = subGraph.Name() + + # SubGraphT + def Pack(self, builder): + if self.tensors is not None: + tensorslist = [] + for i in range(len(self.tensors)): + tensorslist.append(self.tensors[i].Pack(builder)) + SubGraphStartTensorsVector(builder, len(self.tensors)) + for i in reversed(range(len(self.tensors))): + builder.PrependUOffsetTRelative(tensorslist[i]) + tensors = builder.EndVector() + if self.inputs is not None: + if np is not None and type(self.inputs) is np.ndarray: + inputs = builder.CreateNumpyVector(self.inputs) + else: + SubGraphStartInputsVector(builder, len(self.inputs)) + for i in reversed(range(len(self.inputs))): + builder.PrependInt32(self.inputs[i]) + inputs = builder.EndVector() + if self.outputs is not None: + if np is not None and type(self.outputs) is np.ndarray: + outputs = builder.CreateNumpyVector(self.outputs) + else: + SubGraphStartOutputsVector(builder, len(self.outputs)) + for i in reversed(range(len(self.outputs))): + builder.PrependInt32(self.outputs[i]) + outputs = builder.EndVector() + if self.operators is not None: + operatorslist = [] + for i in range(len(self.operators)): + operatorslist.append(self.operators[i].Pack(builder)) + SubGraphStartOperatorsVector(builder, len(self.operators)) + for i in reversed(range(len(self.operators))): + builder.PrependUOffsetTRelative(operatorslist[i]) + operators = builder.EndVector() + if self.name is not None: + name = builder.CreateString(self.name) + SubGraphStart(builder) + if self.tensors is not None: + SubGraphAddTensors(builder, tensors) + if self.inputs is not None: + SubGraphAddInputs(builder, inputs) + if self.outputs is not None: + SubGraphAddOutputs(builder, outputs) + if self.operators is not None: + SubGraphAddOperators(builder, operators) + if self.name is not None: + SubGraphAddName(builder, name) + subGraph = SubGraphEnd(builder) + return subGraph +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class SubOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = SubOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsSubOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def SubOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # SubOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # SubOptions + def FusedActivationFunction(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # SubOptions + def PotScaleInt16(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return True + +def SubOptionsStart(builder): builder.StartObject(2) +def Start(builder): + return SubOptionsStart(builder) +def SubOptionsAddFusedActivationFunction(builder, fusedActivationFunction): builder.PrependInt8Slot(0, fusedActivationFunction, 0) +def AddFusedActivationFunction(builder, fusedActivationFunction): + return SubOptionsAddFusedActivationFunction(builder, fusedActivationFunction) +def SubOptionsAddPotScaleInt16(builder, potScaleInt16): builder.PrependBoolSlot(1, potScaleInt16, 1) +def AddPotScaleInt16(builder, potScaleInt16): + return SubOptionsAddPotScaleInt16(builder, potScaleInt16) +def SubOptionsEnd(builder): return builder.EndObject() +def End(builder): + return SubOptionsEnd(builder) + +class SubOptionsT(object): + + # SubOptionsT + def __init__(self): + self.fusedActivationFunction = 0 # type: int + self.potScaleInt16 = True # type: bool + + @classmethod + def InitFromBuf(cls, buf, pos): + subOptions = SubOptions() + subOptions.Init(buf, pos) + return cls.InitFromObj(subOptions) + + @classmethod + def InitFromObj(cls, subOptions): + x = SubOptionsT() + x._UnPack(subOptions) + return x + + # SubOptionsT + def _UnPack(self, subOptions): + if subOptions is None: + return + self.fusedActivationFunction = subOptions.FusedActivationFunction() + self.potScaleInt16 = subOptions.PotScaleInt16() + + # SubOptionsT + def Pack(self, builder): + SubOptionsStart(builder) + SubOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) + SubOptionsAddPotScaleInt16(builder, self.potScaleInt16) + subOptions = SubOptionsEnd(builder) + return subOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class Tensor(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Tensor() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsTensor(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def TensorBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # Tensor + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Tensor + def Shape(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # Tensor + def ShapeAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # Tensor + def ShapeLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Tensor + def ShapeIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + + # Tensor + def Type(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # Tensor + def Buffer(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint32Flags, o + self._tab.Pos) + return 0 + + # Tensor + def Name(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # Tensor + def Quantization(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + x = self._tab.Indirect(o + self._tab.Pos) + obj = QuantizationParameters() + obj.Init(self._tab.Bytes, x) + return obj + return None + + # Tensor + def IsVariable(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + + # Tensor + def Sparsity(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + if o != 0: + x = self._tab.Indirect(o + self._tab.Pos) + obj = SparsityParameters() + obj.Init(self._tab.Bytes, x) + return obj + return None + + # Tensor + def ShapeSignature(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # Tensor + def ShapeSignatureAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # Tensor + def ShapeSignatureLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Tensor + def ShapeSignatureIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + return o == 0 + + # Tensor + def HasRank(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(20)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + + # Tensor + def VariantTensors(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(22)) + if o != 0: + x = self._tab.Vector(o) + x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 + x = self._tab.Indirect(x) + obj = VariantSubType() + obj.Init(self._tab.Bytes, x) + return obj + return None + + # Tensor + def VariantTensorsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(22)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Tensor + def VariantTensorsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(22)) + return o == 0 + +def TensorStart(builder): builder.StartObject(10) +def Start(builder): + return TensorStart(builder) +def TensorAddShape(builder, shape): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(shape), 0) +def AddShape(builder, shape): + return TensorAddShape(builder, shape) +def TensorStartShapeVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def StartShapeVector(builder, numElems): + return TensorStartShapeVector(builder, numElems) +def TensorAddType(builder, type): builder.PrependInt8Slot(1, type, 0) +def AddType(builder, type): + return TensorAddType(builder, type) +def TensorAddBuffer(builder, buffer): builder.PrependUint32Slot(2, buffer, 0) +def AddBuffer(builder, buffer): + return TensorAddBuffer(builder, buffer) +def TensorAddName(builder, name): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0) +def AddName(builder, name): + return TensorAddName(builder, name) +def TensorAddQuantization(builder, quantization): builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(quantization), 0) +def AddQuantization(builder, quantization): + return TensorAddQuantization(builder, quantization) +def TensorAddIsVariable(builder, isVariable): builder.PrependBoolSlot(5, isVariable, 0) +def AddIsVariable(builder, isVariable): + return TensorAddIsVariable(builder, isVariable) +def TensorAddSparsity(builder, sparsity): builder.PrependUOffsetTRelativeSlot(6, flatbuffers.number_types.UOffsetTFlags.py_type(sparsity), 0) +def AddSparsity(builder, sparsity): + return TensorAddSparsity(builder, sparsity) +def TensorAddShapeSignature(builder, shapeSignature): builder.PrependUOffsetTRelativeSlot(7, flatbuffers.number_types.UOffsetTFlags.py_type(shapeSignature), 0) +def AddShapeSignature(builder, shapeSignature): + return TensorAddShapeSignature(builder, shapeSignature) +def TensorStartShapeSignatureVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def StartShapeSignatureVector(builder, numElems): + return TensorStartShapeSignatureVector(builder, numElems) +def TensorAddHasRank(builder, hasRank): builder.PrependBoolSlot(8, hasRank, 0) +def AddHasRank(builder, hasRank): + return TensorAddHasRank(builder, hasRank) +def TensorAddVariantTensors(builder, variantTensors): builder.PrependUOffsetTRelativeSlot(9, flatbuffers.number_types.UOffsetTFlags.py_type(variantTensors), 0) +def AddVariantTensors(builder, variantTensors): + return TensorAddVariantTensors(builder, variantTensors) +def TensorStartVariantTensorsVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def StartVariantTensorsVector(builder, numElems): + return TensorStartVariantTensorsVector(builder, numElems) +def TensorEnd(builder): return builder.EndObject() +def End(builder): + return TensorEnd(builder) +try: + from typing import List, Optional +except: + pass + +class TensorT(object): + + # TensorT + def __init__(self): + self.shape = None # type: List[int] + self.type = 0 # type: int + self.buffer = 0 # type: int + self.name = None # type: str + self.quantization = None # type: Optional[QuantizationParametersT] + self.isVariable = False # type: bool + self.sparsity = None # type: Optional[SparsityParametersT] + self.shapeSignature = None # type: List[int] + self.hasRank = False # type: bool + self.variantTensors = None # type: List[VariantSubTypeT] + + @classmethod + def InitFromBuf(cls, buf, pos): + tensor = Tensor() + tensor.Init(buf, pos) + return cls.InitFromObj(tensor) + + @classmethod + def InitFromObj(cls, tensor): + x = TensorT() + x._UnPack(tensor) + return x + + # TensorT + def _UnPack(self, tensor): + if tensor is None: + return + if not tensor.ShapeIsNone(): + if np is None: + self.shape = [] + for i in range(tensor.ShapeLength()): + self.shape.append(tensor.Shape(i)) + else: + self.shape = tensor.ShapeAsNumpy() + self.type = tensor.Type() + self.buffer = tensor.Buffer() + self.name = tensor.Name() + if tensor.Quantization() is not None: + self.quantization = QuantizationParametersT.InitFromObj(tensor.Quantization()) + self.isVariable = tensor.IsVariable() + if tensor.Sparsity() is not None: + self.sparsity = SparsityParametersT.InitFromObj(tensor.Sparsity()) + if not tensor.ShapeSignatureIsNone(): + if np is None: + self.shapeSignature = [] + for i in range(tensor.ShapeSignatureLength()): + self.shapeSignature.append(tensor.ShapeSignature(i)) + else: + self.shapeSignature = tensor.ShapeSignatureAsNumpy() + self.hasRank = tensor.HasRank() + if not tensor.VariantTensorsIsNone(): + self.variantTensors = [] + for i in range(tensor.VariantTensorsLength()): + if tensor.VariantTensors(i) is None: + self.variantTensors.append(None) + else: + variantSubType_ = VariantSubTypeT.InitFromObj(tensor.VariantTensors(i)) + self.variantTensors.append(variantSubType_) + + # TensorT + def Pack(self, builder): + if self.shape is not None: + if np is not None and type(self.shape) is np.ndarray: + shape = builder.CreateNumpyVector(self.shape) + else: + TensorStartShapeVector(builder, len(self.shape)) + for i in reversed(range(len(self.shape))): + builder.PrependInt32(self.shape[i]) + shape = builder.EndVector() + if self.name is not None: + name = builder.CreateString(self.name) + if self.quantization is not None: + quantization = self.quantization.Pack(builder) + if self.sparsity is not None: + sparsity = self.sparsity.Pack(builder) + if self.shapeSignature is not None: + if np is not None and type(self.shapeSignature) is np.ndarray: + shapeSignature = builder.CreateNumpyVector(self.shapeSignature) + else: + TensorStartShapeSignatureVector(builder, len(self.shapeSignature)) + for i in reversed(range(len(self.shapeSignature))): + builder.PrependInt32(self.shapeSignature[i]) + shapeSignature = builder.EndVector() + if self.variantTensors is not None: + variantTensorslist = [] + for i in range(len(self.variantTensors)): + variantTensorslist.append(self.variantTensors[i].Pack(builder)) + TensorStartVariantTensorsVector(builder, len(self.variantTensors)) + for i in reversed(range(len(self.variantTensors))): + builder.PrependUOffsetTRelative(variantTensorslist[i]) + variantTensors = builder.EndVector() + TensorStart(builder) + if self.shape is not None: + TensorAddShape(builder, shape) + TensorAddType(builder, self.type) + TensorAddBuffer(builder, self.buffer) + if self.name is not None: + TensorAddName(builder, name) + if self.quantization is not None: + TensorAddQuantization(builder, quantization) + TensorAddIsVariable(builder, self.isVariable) + if self.sparsity is not None: + TensorAddSparsity(builder, sparsity) + if self.shapeSignature is not None: + TensorAddShapeSignature(builder, shapeSignature) + TensorAddHasRank(builder, self.hasRank) + if self.variantTensors is not None: + TensorAddVariantTensors(builder, variantTensors) + tensor = TensorEnd(builder) + return tensor +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class TensorMap(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = TensorMap() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsTensorMap(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def TensorMapBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # TensorMap + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # TensorMap + def Name(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # TensorMap + def TensorIndex(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint32Flags, o + self._tab.Pos) + return 0 + +def TensorMapStart(builder): builder.StartObject(2) +def Start(builder): + return TensorMapStart(builder) +def TensorMapAddName(builder, name): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0) +def AddName(builder, name): + return TensorMapAddName(builder, name) +def TensorMapAddTensorIndex(builder, tensorIndex): builder.PrependUint32Slot(1, tensorIndex, 0) +def AddTensorIndex(builder, tensorIndex): + return TensorMapAddTensorIndex(builder, tensorIndex) +def TensorMapEnd(builder): return builder.EndObject() +def End(builder): + return TensorMapEnd(builder) + +class TensorMapT(object): + + # TensorMapT + def __init__(self): + self.name = None # type: str + self.tensorIndex = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + tensorMap = TensorMap() + tensorMap.Init(buf, pos) + return cls.InitFromObj(tensorMap) + + @classmethod + def InitFromObj(cls, tensorMap): + x = TensorMapT() + x._UnPack(tensorMap) + return x + + # TensorMapT + def _UnPack(self, tensorMap): + if tensorMap is None: + return + self.name = tensorMap.Name() + self.tensorIndex = tensorMap.TensorIndex() + + # TensorMapT + def Pack(self, builder): + if self.name is not None: + name = builder.CreateString(self.name) + TensorMapStart(builder) + if self.name is not None: + TensorMapAddName(builder, name) + TensorMapAddTensorIndex(builder, self.tensorIndex) + tensorMap = TensorMapEnd(builder) + return tensorMap +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +class TensorType(object): + FLOAT32 = 0 + FLOAT16 = 1 + INT32 = 2 + UINT8 = 3 + INT64 = 4 + STRING = 5 + BOOL = 6 + INT16 = 7 + COMPLEX64 = 8 + INT8 = 9 + FLOAT64 = 10 + COMPLEX128 = 11 + UINT64 = 12 + RESOURCE = 13 + VARIANT = 14 + UINT32 = 15 + UINT16 = 16 + INT4 = 17 +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class TileOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = TileOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsTileOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def TileOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # TileOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def TileOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return TileOptionsStart(builder) +def TileOptionsEnd(builder): return builder.EndObject() +def End(builder): + return TileOptionsEnd(builder) + +class TileOptionsT(object): + + # TileOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + tileOptions = TileOptions() + tileOptions.Init(buf, pos) + return cls.InitFromObj(tileOptions) + + @classmethod + def InitFromObj(cls, tileOptions): + x = TileOptionsT() + x._UnPack(tileOptions) + return x + + # TileOptionsT + def _UnPack(self, tileOptions): + if tileOptions is None: + return + + # TileOptionsT + def Pack(self, builder): + TileOptionsStart(builder) + tileOptions = TileOptionsEnd(builder) + return tileOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class TopKV2Options(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = TopKV2Options() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsTopKV2Options(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def TopKV2OptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # TopKV2Options + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def TopKV2OptionsStart(builder): builder.StartObject(0) +def Start(builder): + return TopKV2OptionsStart(builder) +def TopKV2OptionsEnd(builder): return builder.EndObject() +def End(builder): + return TopKV2OptionsEnd(builder) + +class TopKV2OptionsT(object): + + # TopKV2OptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + topKv2options = TopKV2Options() + topKv2options.Init(buf, pos) + return cls.InitFromObj(topKv2options) + + @classmethod + def InitFromObj(cls, topKv2options): + x = TopKV2OptionsT() + x._UnPack(topKv2options) + return x + + # TopKV2OptionsT + def _UnPack(self, topKv2options): + if topKv2options is None: + return + + # TopKV2OptionsT + def Pack(self, builder): + TopKV2OptionsStart(builder) + topKv2options = TopKV2OptionsEnd(builder) + return topKv2options +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class TransposeConvOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = TransposeConvOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsTransposeConvOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def TransposeConvOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # TransposeConvOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # TransposeConvOptions + def Padding(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # TransposeConvOptions + def StrideW(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # TransposeConvOptions + def StrideH(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # TransposeConvOptions + def FusedActivationFunction(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + +def TransposeConvOptionsStart(builder): builder.StartObject(4) +def Start(builder): + return TransposeConvOptionsStart(builder) +def TransposeConvOptionsAddPadding(builder, padding): builder.PrependInt8Slot(0, padding, 0) +def AddPadding(builder, padding): + return TransposeConvOptionsAddPadding(builder, padding) +def TransposeConvOptionsAddStrideW(builder, strideW): builder.PrependInt32Slot(1, strideW, 0) +def AddStrideW(builder, strideW): + return TransposeConvOptionsAddStrideW(builder, strideW) +def TransposeConvOptionsAddStrideH(builder, strideH): builder.PrependInt32Slot(2, strideH, 0) +def AddStrideH(builder, strideH): + return TransposeConvOptionsAddStrideH(builder, strideH) +def TransposeConvOptionsAddFusedActivationFunction(builder, fusedActivationFunction): builder.PrependInt8Slot(3, fusedActivationFunction, 0) +def AddFusedActivationFunction(builder, fusedActivationFunction): + return TransposeConvOptionsAddFusedActivationFunction(builder, fusedActivationFunction) +def TransposeConvOptionsEnd(builder): return builder.EndObject() +def End(builder): + return TransposeConvOptionsEnd(builder) + +class TransposeConvOptionsT(object): + + # TransposeConvOptionsT + def __init__(self): + self.padding = 0 # type: int + self.strideW = 0 # type: int + self.strideH = 0 # type: int + self.fusedActivationFunction = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + transposeConvOptions = TransposeConvOptions() + transposeConvOptions.Init(buf, pos) + return cls.InitFromObj(transposeConvOptions) + + @classmethod + def InitFromObj(cls, transposeConvOptions): + x = TransposeConvOptionsT() + x._UnPack(transposeConvOptions) + return x + + # TransposeConvOptionsT + def _UnPack(self, transposeConvOptions): + if transposeConvOptions is None: + return + self.padding = transposeConvOptions.Padding() + self.strideW = transposeConvOptions.StrideW() + self.strideH = transposeConvOptions.StrideH() + self.fusedActivationFunction = transposeConvOptions.FusedActivationFunction() + + # TransposeConvOptionsT + def Pack(self, builder): + TransposeConvOptionsStart(builder) + TransposeConvOptionsAddPadding(builder, self.padding) + TransposeConvOptionsAddStrideW(builder, self.strideW) + TransposeConvOptionsAddStrideH(builder, self.strideH) + TransposeConvOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) + transposeConvOptions = TransposeConvOptionsEnd(builder) + return transposeConvOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class TransposeOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = TransposeOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsTransposeOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def TransposeOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # TransposeOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def TransposeOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return TransposeOptionsStart(builder) +def TransposeOptionsEnd(builder): return builder.EndObject() +def End(builder): + return TransposeOptionsEnd(builder) + +class TransposeOptionsT(object): + + # TransposeOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + transposeOptions = TransposeOptions() + transposeOptions.Init(buf, pos) + return cls.InitFromObj(transposeOptions) + + @classmethod + def InitFromObj(cls, transposeOptions): + x = TransposeOptionsT() + x._UnPack(transposeOptions) + return x + + # TransposeOptionsT + def _UnPack(self, transposeOptions): + if transposeOptions is None: + return + + # TransposeOptionsT + def Pack(self, builder): + TransposeOptionsStart(builder) + transposeOptions = TransposeOptionsEnd(builder) + return transposeOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class Uint16Vector(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Uint16Vector() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsUint16Vector(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def Uint16VectorBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # Uint16Vector + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Uint16Vector + def Values(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Uint16Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 2)) + return 0 + + # Uint16Vector + def ValuesAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint16Flags, o) + return 0 + + # Uint16Vector + def ValuesLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Uint16Vector + def ValuesIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + +def Uint16VectorStart(builder): builder.StartObject(1) +def Start(builder): + return Uint16VectorStart(builder) +def Uint16VectorAddValues(builder, values): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(values), 0) +def AddValues(builder, values): + return Uint16VectorAddValues(builder, values) +def Uint16VectorStartValuesVector(builder, numElems): return builder.StartVector(2, numElems, 2) +def StartValuesVector(builder, numElems): + return Uint16VectorStartValuesVector(builder, numElems) +def Uint16VectorEnd(builder): return builder.EndObject() +def End(builder): + return Uint16VectorEnd(builder) +try: + from typing import List +except: + pass + +class Uint16VectorT(object): + + # Uint16VectorT + def __init__(self): + self.values = None # type: List[int] + + @classmethod + def InitFromBuf(cls, buf, pos): + uint16vector = Uint16Vector() + uint16vector.Init(buf, pos) + return cls.InitFromObj(uint16vector) + + @classmethod + def InitFromObj(cls, uint16vector): + x = Uint16VectorT() + x._UnPack(uint16vector) + return x + + # Uint16VectorT + def _UnPack(self, uint16vector): + if uint16vector is None: + return + if not uint16vector.ValuesIsNone(): + if np is None: + self.values = [] + for i in range(uint16vector.ValuesLength()): + self.values.append(uint16vector.Values(i)) + else: + self.values = uint16vector.ValuesAsNumpy() + + # Uint16VectorT + def Pack(self, builder): + if self.values is not None: + if np is not None and type(self.values) is np.ndarray: + values = builder.CreateNumpyVector(self.values) + else: + Uint16VectorStartValuesVector(builder, len(self.values)) + for i in reversed(range(len(self.values))): + builder.PrependUint16(self.values[i]) + values = builder.EndVector() + Uint16VectorStart(builder) + if self.values is not None: + Uint16VectorAddValues(builder, values) + uint16vector = Uint16VectorEnd(builder) + return uint16vector +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class Uint8Vector(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Uint8Vector() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsUint8Vector(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def Uint8VectorBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # Uint8Vector + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Uint8Vector + def Values(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) + return 0 + + # Uint8Vector + def ValuesAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) + return 0 + + # Uint8Vector + def ValuesLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Uint8Vector + def ValuesIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + +def Uint8VectorStart(builder): builder.StartObject(1) +def Start(builder): + return Uint8VectorStart(builder) +def Uint8VectorAddValues(builder, values): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(values), 0) +def AddValues(builder, values): + return Uint8VectorAddValues(builder, values) +def Uint8VectorStartValuesVector(builder, numElems): return builder.StartVector(1, numElems, 1) +def StartValuesVector(builder, numElems): + return Uint8VectorStartValuesVector(builder, numElems) +def Uint8VectorEnd(builder): return builder.EndObject() +def End(builder): + return Uint8VectorEnd(builder) +try: + from typing import List +except: + pass + +class Uint8VectorT(object): + + # Uint8VectorT + def __init__(self): + self.values = None # type: List[int] + + @classmethod + def InitFromBuf(cls, buf, pos): + uint8vector = Uint8Vector() + uint8vector.Init(buf, pos) + return cls.InitFromObj(uint8vector) + + @classmethod + def InitFromObj(cls, uint8vector): + x = Uint8VectorT() + x._UnPack(uint8vector) + return x + + # Uint8VectorT + def _UnPack(self, uint8vector): + if uint8vector is None: + return + if not uint8vector.ValuesIsNone(): + if np is None: + self.values = [] + for i in range(uint8vector.ValuesLength()): + self.values.append(uint8vector.Values(i)) + else: + self.values = uint8vector.ValuesAsNumpy() + + # Uint8VectorT + def Pack(self, builder): + if self.values is not None: + if np is not None and type(self.values) is np.ndarray: + values = builder.CreateNumpyVector(self.values) + else: + Uint8VectorStartValuesVector(builder, len(self.values)) + for i in reversed(range(len(self.values))): + builder.PrependUint8(self.values[i]) + values = builder.EndVector() + Uint8VectorStart(builder) + if self.values is not None: + Uint8VectorAddValues(builder, values) + uint8vector = Uint8VectorEnd(builder) + return uint8vector +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class UnidirectionalSequenceLSTMOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = UnidirectionalSequenceLSTMOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsUnidirectionalSequenceLSTMOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def UnidirectionalSequenceLSTMOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # UnidirectionalSequenceLSTMOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # UnidirectionalSequenceLSTMOptions + def FusedActivationFunction(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # UnidirectionalSequenceLSTMOptions + def CellClip(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) + return 0.0 + + # UnidirectionalSequenceLSTMOptions + def ProjClip(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) + return 0.0 + + # UnidirectionalSequenceLSTMOptions + def TimeMajor(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + + # UnidirectionalSequenceLSTMOptions + def AsymmetricQuantizeInputs(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + + # UnidirectionalSequenceLSTMOptions + def DiagonalRecurrentTensors(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + +def UnidirectionalSequenceLSTMOptionsStart(builder): builder.StartObject(6) +def Start(builder): + return UnidirectionalSequenceLSTMOptionsStart(builder) +def UnidirectionalSequenceLSTMOptionsAddFusedActivationFunction(builder, fusedActivationFunction): builder.PrependInt8Slot(0, fusedActivationFunction, 0) +def AddFusedActivationFunction(builder, fusedActivationFunction): + return UnidirectionalSequenceLSTMOptionsAddFusedActivationFunction(builder, fusedActivationFunction) +def UnidirectionalSequenceLSTMOptionsAddCellClip(builder, cellClip): builder.PrependFloat32Slot(1, cellClip, 0.0) +def AddCellClip(builder, cellClip): + return UnidirectionalSequenceLSTMOptionsAddCellClip(builder, cellClip) +def UnidirectionalSequenceLSTMOptionsAddProjClip(builder, projClip): builder.PrependFloat32Slot(2, projClip, 0.0) +def AddProjClip(builder, projClip): + return UnidirectionalSequenceLSTMOptionsAddProjClip(builder, projClip) +def UnidirectionalSequenceLSTMOptionsAddTimeMajor(builder, timeMajor): builder.PrependBoolSlot(3, timeMajor, 0) +def AddTimeMajor(builder, timeMajor): + return UnidirectionalSequenceLSTMOptionsAddTimeMajor(builder, timeMajor) +def UnidirectionalSequenceLSTMOptionsAddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs): builder.PrependBoolSlot(4, asymmetricQuantizeInputs, 0) +def AddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs): + return UnidirectionalSequenceLSTMOptionsAddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs) +def UnidirectionalSequenceLSTMOptionsAddDiagonalRecurrentTensors(builder, diagonalRecurrentTensors): builder.PrependBoolSlot(5, diagonalRecurrentTensors, 0) +def AddDiagonalRecurrentTensors(builder, diagonalRecurrentTensors): + return UnidirectionalSequenceLSTMOptionsAddDiagonalRecurrentTensors(builder, diagonalRecurrentTensors) +def UnidirectionalSequenceLSTMOptionsEnd(builder): return builder.EndObject() +def End(builder): + return UnidirectionalSequenceLSTMOptionsEnd(builder) + +class UnidirectionalSequenceLSTMOptionsT(object): + + # UnidirectionalSequenceLSTMOptionsT + def __init__(self): + self.fusedActivationFunction = 0 # type: int + self.cellClip = 0.0 # type: float + self.projClip = 0.0 # type: float + self.timeMajor = False # type: bool + self.asymmetricQuantizeInputs = False # type: bool + self.diagonalRecurrentTensors = False # type: bool + + @classmethod + def InitFromBuf(cls, buf, pos): + unidirectionalSequenceLstmoptions = UnidirectionalSequenceLSTMOptions() + unidirectionalSequenceLstmoptions.Init(buf, pos) + return cls.InitFromObj(unidirectionalSequenceLstmoptions) + + @classmethod + def InitFromObj(cls, unidirectionalSequenceLstmoptions): + x = UnidirectionalSequenceLSTMOptionsT() + x._UnPack(unidirectionalSequenceLstmoptions) + return x + + # UnidirectionalSequenceLSTMOptionsT + def _UnPack(self, unidirectionalSequenceLstmoptions): + if unidirectionalSequenceLstmoptions is None: + return + self.fusedActivationFunction = unidirectionalSequenceLstmoptions.FusedActivationFunction() + self.cellClip = unidirectionalSequenceLstmoptions.CellClip() + self.projClip = unidirectionalSequenceLstmoptions.ProjClip() + self.timeMajor = unidirectionalSequenceLstmoptions.TimeMajor() + self.asymmetricQuantizeInputs = unidirectionalSequenceLstmoptions.AsymmetricQuantizeInputs() + self.diagonalRecurrentTensors = unidirectionalSequenceLstmoptions.DiagonalRecurrentTensors() + + # UnidirectionalSequenceLSTMOptionsT + def Pack(self, builder): + UnidirectionalSequenceLSTMOptionsStart(builder) + UnidirectionalSequenceLSTMOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) + UnidirectionalSequenceLSTMOptionsAddCellClip(builder, self.cellClip) + UnidirectionalSequenceLSTMOptionsAddProjClip(builder, self.projClip) + UnidirectionalSequenceLSTMOptionsAddTimeMajor(builder, self.timeMajor) + UnidirectionalSequenceLSTMOptionsAddAsymmetricQuantizeInputs(builder, self.asymmetricQuantizeInputs) + UnidirectionalSequenceLSTMOptionsAddDiagonalRecurrentTensors(builder, self.diagonalRecurrentTensors) + unidirectionalSequenceLstmoptions = UnidirectionalSequenceLSTMOptionsEnd(builder) + return unidirectionalSequenceLstmoptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class UniqueOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = UniqueOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsUniqueOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def UniqueOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # UniqueOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # UniqueOptions + def IdxOutType(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 2 + +def UniqueOptionsStart(builder): builder.StartObject(1) +def Start(builder): + return UniqueOptionsStart(builder) +def UniqueOptionsAddIdxOutType(builder, idxOutType): builder.PrependInt8Slot(0, idxOutType, 2) +def AddIdxOutType(builder, idxOutType): + return UniqueOptionsAddIdxOutType(builder, idxOutType) +def UniqueOptionsEnd(builder): return builder.EndObject() +def End(builder): + return UniqueOptionsEnd(builder) + +class UniqueOptionsT(object): + + # UniqueOptionsT + def __init__(self): + self.idxOutType = 2 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + uniqueOptions = UniqueOptions() + uniqueOptions.Init(buf, pos) + return cls.InitFromObj(uniqueOptions) + + @classmethod + def InitFromObj(cls, uniqueOptions): + x = UniqueOptionsT() + x._UnPack(uniqueOptions) + return x + + # UniqueOptionsT + def _UnPack(self, uniqueOptions): + if uniqueOptions is None: + return + self.idxOutType = uniqueOptions.IdxOutType() + + # UniqueOptionsT + def Pack(self, builder): + UniqueOptionsStart(builder) + UniqueOptionsAddIdxOutType(builder, self.idxOutType) + uniqueOptions = UniqueOptionsEnd(builder) + return uniqueOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class UnpackOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = UnpackOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsUnpackOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def UnpackOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # UnpackOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # UnpackOptions + def Num(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # UnpackOptions + def Axis(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + +def UnpackOptionsStart(builder): builder.StartObject(2) +def Start(builder): + return UnpackOptionsStart(builder) +def UnpackOptionsAddNum(builder, num): builder.PrependInt32Slot(0, num, 0) +def AddNum(builder, num): + return UnpackOptionsAddNum(builder, num) +def UnpackOptionsAddAxis(builder, axis): builder.PrependInt32Slot(1, axis, 0) +def AddAxis(builder, axis): + return UnpackOptionsAddAxis(builder, axis) +def UnpackOptionsEnd(builder): return builder.EndObject() +def End(builder): + return UnpackOptionsEnd(builder) + +class UnpackOptionsT(object): + + # UnpackOptionsT + def __init__(self): + self.num = 0 # type: int + self.axis = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + unpackOptions = UnpackOptions() + unpackOptions.Init(buf, pos) + return cls.InitFromObj(unpackOptions) + + @classmethod + def InitFromObj(cls, unpackOptions): + x = UnpackOptionsT() + x._UnPack(unpackOptions) + return x + + # UnpackOptionsT + def _UnPack(self, unpackOptions): + if unpackOptions is None: + return + self.num = unpackOptions.Num() + self.axis = unpackOptions.Axis() + + # UnpackOptionsT + def Pack(self, builder): + UnpackOptionsStart(builder) + UnpackOptionsAddNum(builder, self.num) + UnpackOptionsAddAxis(builder, self.axis) + unpackOptions = UnpackOptionsEnd(builder) + return unpackOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class UnsortedSegmentMaxOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = UnsortedSegmentMaxOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsUnsortedSegmentMaxOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def UnsortedSegmentMaxOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # UnsortedSegmentMaxOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def UnsortedSegmentMaxOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return UnsortedSegmentMaxOptionsStart(builder) +def UnsortedSegmentMaxOptionsEnd(builder): return builder.EndObject() +def End(builder): + return UnsortedSegmentMaxOptionsEnd(builder) + +class UnsortedSegmentMaxOptionsT(object): + + # UnsortedSegmentMaxOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + unsortedSegmentMaxOptions = UnsortedSegmentMaxOptions() + unsortedSegmentMaxOptions.Init(buf, pos) + return cls.InitFromObj(unsortedSegmentMaxOptions) + + @classmethod + def InitFromObj(cls, unsortedSegmentMaxOptions): + x = UnsortedSegmentMaxOptionsT() + x._UnPack(unsortedSegmentMaxOptions) + return x + + # UnsortedSegmentMaxOptionsT + def _UnPack(self, unsortedSegmentMaxOptions): + if unsortedSegmentMaxOptions is None: + return + + # UnsortedSegmentMaxOptionsT + def Pack(self, builder): + UnsortedSegmentMaxOptionsStart(builder) + unsortedSegmentMaxOptions = UnsortedSegmentMaxOptionsEnd(builder) + return unsortedSegmentMaxOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class UnsortedSegmentMinOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = UnsortedSegmentMinOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsUnsortedSegmentMinOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def UnsortedSegmentMinOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # UnsortedSegmentMinOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def UnsortedSegmentMinOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return UnsortedSegmentMinOptionsStart(builder) +def UnsortedSegmentMinOptionsEnd(builder): return builder.EndObject() +def End(builder): + return UnsortedSegmentMinOptionsEnd(builder) + +class UnsortedSegmentMinOptionsT(object): + + # UnsortedSegmentMinOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + unsortedSegmentMinOptions = UnsortedSegmentMinOptions() + unsortedSegmentMinOptions.Init(buf, pos) + return cls.InitFromObj(unsortedSegmentMinOptions) + + @classmethod + def InitFromObj(cls, unsortedSegmentMinOptions): + x = UnsortedSegmentMinOptionsT() + x._UnPack(unsortedSegmentMinOptions) + return x + + # UnsortedSegmentMinOptionsT + def _UnPack(self, unsortedSegmentMinOptions): + if unsortedSegmentMinOptions is None: + return + + # UnsortedSegmentMinOptionsT + def Pack(self, builder): + UnsortedSegmentMinOptionsStart(builder) + unsortedSegmentMinOptions = UnsortedSegmentMinOptionsEnd(builder) + return unsortedSegmentMinOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class UnsortedSegmentProdOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = UnsortedSegmentProdOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsUnsortedSegmentProdOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def UnsortedSegmentProdOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # UnsortedSegmentProdOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def UnsortedSegmentProdOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return UnsortedSegmentProdOptionsStart(builder) +def UnsortedSegmentProdOptionsEnd(builder): return builder.EndObject() +def End(builder): + return UnsortedSegmentProdOptionsEnd(builder) + +class UnsortedSegmentProdOptionsT(object): + + # UnsortedSegmentProdOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + unsortedSegmentProdOptions = UnsortedSegmentProdOptions() + unsortedSegmentProdOptions.Init(buf, pos) + return cls.InitFromObj(unsortedSegmentProdOptions) + + @classmethod + def InitFromObj(cls, unsortedSegmentProdOptions): + x = UnsortedSegmentProdOptionsT() + x._UnPack(unsortedSegmentProdOptions) + return x + + # UnsortedSegmentProdOptionsT + def _UnPack(self, unsortedSegmentProdOptions): + if unsortedSegmentProdOptions is None: + return + + # UnsortedSegmentProdOptionsT + def Pack(self, builder): + UnsortedSegmentProdOptionsStart(builder) + unsortedSegmentProdOptions = UnsortedSegmentProdOptionsEnd(builder) + return unsortedSegmentProdOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class UnsortedSegmentSumOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = UnsortedSegmentSumOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsUnsortedSegmentSumOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def UnsortedSegmentSumOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # UnsortedSegmentSumOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def UnsortedSegmentSumOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return UnsortedSegmentSumOptionsStart(builder) +def UnsortedSegmentSumOptionsEnd(builder): return builder.EndObject() +def End(builder): + return UnsortedSegmentSumOptionsEnd(builder) + +class UnsortedSegmentSumOptionsT(object): + + # UnsortedSegmentSumOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + unsortedSegmentSumOptions = UnsortedSegmentSumOptions() + unsortedSegmentSumOptions.Init(buf, pos) + return cls.InitFromObj(unsortedSegmentSumOptions) + + @classmethod + def InitFromObj(cls, unsortedSegmentSumOptions): + x = UnsortedSegmentSumOptionsT() + x._UnPack(unsortedSegmentSumOptions) + return x + + # UnsortedSegmentSumOptionsT + def _UnPack(self, unsortedSegmentSumOptions): + if unsortedSegmentSumOptions is None: + return + + # UnsortedSegmentSumOptionsT + def Pack(self, builder): + UnsortedSegmentSumOptionsStart(builder) + unsortedSegmentSumOptions = UnsortedSegmentSumOptionsEnd(builder) + return unsortedSegmentSumOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class VarHandleOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = VarHandleOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsVarHandleOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def VarHandleOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # VarHandleOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # VarHandleOptions + def Container(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # VarHandleOptions + def SharedName(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + +def VarHandleOptionsStart(builder): builder.StartObject(2) +def Start(builder): + return VarHandleOptionsStart(builder) +def VarHandleOptionsAddContainer(builder, container): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(container), 0) +def AddContainer(builder, container): + return VarHandleOptionsAddContainer(builder, container) +def VarHandleOptionsAddSharedName(builder, sharedName): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(sharedName), 0) +def AddSharedName(builder, sharedName): + return VarHandleOptionsAddSharedName(builder, sharedName) +def VarHandleOptionsEnd(builder): return builder.EndObject() +def End(builder): + return VarHandleOptionsEnd(builder) + +class VarHandleOptionsT(object): + + # VarHandleOptionsT + def __init__(self): + self.container = None # type: str + self.sharedName = None # type: str + + @classmethod + def InitFromBuf(cls, buf, pos): + varHandleOptions = VarHandleOptions() + varHandleOptions.Init(buf, pos) + return cls.InitFromObj(varHandleOptions) + + @classmethod + def InitFromObj(cls, varHandleOptions): + x = VarHandleOptionsT() + x._UnPack(varHandleOptions) + return x + + # VarHandleOptionsT + def _UnPack(self, varHandleOptions): + if varHandleOptions is None: + return + self.container = varHandleOptions.Container() + self.sharedName = varHandleOptions.SharedName() + + # VarHandleOptionsT + def Pack(self, builder): + if self.container is not None: + container = builder.CreateString(self.container) + if self.sharedName is not None: + sharedName = builder.CreateString(self.sharedName) + VarHandleOptionsStart(builder) + if self.container is not None: + VarHandleOptionsAddContainer(builder, container) + if self.sharedName is not None: + VarHandleOptionsAddSharedName(builder, sharedName) + varHandleOptions = VarHandleOptionsEnd(builder) + return varHandleOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class VariantSubType(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = VariantSubType() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsVariantSubType(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def VariantSubTypeBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # VariantSubType + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # VariantSubType + def Shape(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # VariantSubType + def ShapeAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # VariantSubType + def ShapeLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # VariantSubType + def ShapeIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + + # VariantSubType + def Type(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # VariantSubType + def HasRank(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + +def VariantSubTypeStart(builder): builder.StartObject(3) +def Start(builder): + return VariantSubTypeStart(builder) +def VariantSubTypeAddShape(builder, shape): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(shape), 0) +def AddShape(builder, shape): + return VariantSubTypeAddShape(builder, shape) +def VariantSubTypeStartShapeVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def StartShapeVector(builder, numElems): + return VariantSubTypeStartShapeVector(builder, numElems) +def VariantSubTypeAddType(builder, type): builder.PrependInt8Slot(1, type, 0) +def AddType(builder, type): + return VariantSubTypeAddType(builder, type) +def VariantSubTypeAddHasRank(builder, hasRank): builder.PrependBoolSlot(2, hasRank, 0) +def AddHasRank(builder, hasRank): + return VariantSubTypeAddHasRank(builder, hasRank) +def VariantSubTypeEnd(builder): return builder.EndObject() +def End(builder): + return VariantSubTypeEnd(builder) +try: + from typing import List +except: + pass + +class VariantSubTypeT(object): + + # VariantSubTypeT + def __init__(self): + self.shape = None # type: List[int] + self.type = 0 # type: int + self.hasRank = False # type: bool + + @classmethod + def InitFromBuf(cls, buf, pos): + variantSubType = VariantSubType() + variantSubType.Init(buf, pos) + return cls.InitFromObj(variantSubType) + + @classmethod + def InitFromObj(cls, variantSubType): + x = VariantSubTypeT() + x._UnPack(variantSubType) + return x + + # VariantSubTypeT + def _UnPack(self, variantSubType): + if variantSubType is None: + return + if not variantSubType.ShapeIsNone(): + if np is None: + self.shape = [] + for i in range(variantSubType.ShapeLength()): + self.shape.append(variantSubType.Shape(i)) + else: + self.shape = variantSubType.ShapeAsNumpy() + self.type = variantSubType.Type() + self.hasRank = variantSubType.HasRank() + + # VariantSubTypeT + def Pack(self, builder): + if self.shape is not None: + if np is not None and type(self.shape) is np.ndarray: + shape = builder.CreateNumpyVector(self.shape) + else: + VariantSubTypeStartShapeVector(builder, len(self.shape)) + for i in reversed(range(len(self.shape))): + builder.PrependInt32(self.shape[i]) + shape = builder.EndVector() + VariantSubTypeStart(builder) + if self.shape is not None: + VariantSubTypeAddShape(builder, shape) + VariantSubTypeAddType(builder, self.type) + VariantSubTypeAddHasRank(builder, self.hasRank) + variantSubType = VariantSubTypeEnd(builder) + return variantSubType +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class WhereOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = WhereOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsWhereOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def WhereOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # WhereOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def WhereOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return WhereOptionsStart(builder) +def WhereOptionsEnd(builder): return builder.EndObject() +def End(builder): + return WhereOptionsEnd(builder) + +class WhereOptionsT(object): + + # WhereOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + whereOptions = WhereOptions() + whereOptions.Init(buf, pos) + return cls.InitFromObj(whereOptions) + + @classmethod + def InitFromObj(cls, whereOptions): + x = WhereOptionsT() + x._UnPack(whereOptions) + return x + + # WhereOptionsT + def _UnPack(self, whereOptions): + if whereOptions is None: + return + + # WhereOptionsT + def Pack(self, builder): + WhereOptionsStart(builder) + whereOptions = WhereOptionsEnd(builder) + return whereOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class WhileOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = WhileOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsWhileOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def WhileOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # WhileOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # WhileOptions + def CondSubgraphIndex(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # WhileOptions + def BodySubgraphIndex(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + +def WhileOptionsStart(builder): builder.StartObject(2) +def Start(builder): + return WhileOptionsStart(builder) +def WhileOptionsAddCondSubgraphIndex(builder, condSubgraphIndex): builder.PrependInt32Slot(0, condSubgraphIndex, 0) +def AddCondSubgraphIndex(builder, condSubgraphIndex): + return WhileOptionsAddCondSubgraphIndex(builder, condSubgraphIndex) +def WhileOptionsAddBodySubgraphIndex(builder, bodySubgraphIndex): builder.PrependInt32Slot(1, bodySubgraphIndex, 0) +def AddBodySubgraphIndex(builder, bodySubgraphIndex): + return WhileOptionsAddBodySubgraphIndex(builder, bodySubgraphIndex) +def WhileOptionsEnd(builder): return builder.EndObject() +def End(builder): + return WhileOptionsEnd(builder) + +class WhileOptionsT(object): + + # WhileOptionsT + def __init__(self): + self.condSubgraphIndex = 0 # type: int + self.bodySubgraphIndex = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + whileOptions = WhileOptions() + whileOptions.Init(buf, pos) + return cls.InitFromObj(whileOptions) + + @classmethod + def InitFromObj(cls, whileOptions): + x = WhileOptionsT() + x._UnPack(whileOptions) + return x + + # WhileOptionsT + def _UnPack(self, whileOptions): + if whileOptions is None: + return + self.condSubgraphIndex = whileOptions.CondSubgraphIndex() + self.bodySubgraphIndex = whileOptions.BodySubgraphIndex() + + # WhileOptionsT + def Pack(self, builder): + WhileOptionsStart(builder) + WhileOptionsAddCondSubgraphIndex(builder, self.condSubgraphIndex) + WhileOptionsAddBodySubgraphIndex(builder, self.bodySubgraphIndex) + whileOptions = WhileOptionsEnd(builder) + return whileOptions +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: tflite + +from flatbuffers.compat import import_numpy +np = import_numpy() + +class ZerosLikeOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ZerosLikeOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsZerosLikeOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def ZerosLikeOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # ZerosLikeOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def ZerosLikeOptionsStart(builder): builder.StartObject(0) +def Start(builder): + return ZerosLikeOptionsStart(builder) +def ZerosLikeOptionsEnd(builder): return builder.EndObject() +def End(builder): + return ZerosLikeOptionsEnd(builder) + +class ZerosLikeOptionsT(object): + + # ZerosLikeOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + zerosLikeOptions = ZerosLikeOptions() + zerosLikeOptions.Init(buf, pos) + return cls.InitFromObj(zerosLikeOptions) + + @classmethod + def InitFromObj(cls, zerosLikeOptions): + x = ZerosLikeOptionsT() + x._UnPack(zerosLikeOptions) + return x + + # ZerosLikeOptionsT + def _UnPack(self, zerosLikeOptions): + if zerosLikeOptions is None: + return + + # ZerosLikeOptionsT + def Pack(self, builder): + ZerosLikeOptionsStart(builder) + zerosLikeOptions = ZerosLikeOptionsEnd(builder) + return zerosLikeOptions diff --git a/tensorflow/lite/python/schema_util.py b/tensorflow/lite/python/schema_util.py new file mode 100644 index 0000000..e898a47 --- /dev/null +++ b/tensorflow/lite/python/schema_util.py @@ -0,0 +1,45 @@ +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Schema utilities to get builtin code from operator code.""" + +from tensorflow.python.util import all_util + + +def get_builtin_code_from_operator_code(opcode): + """Return the builtin code of the given operator code. + + The following method is introduced to resolve op builtin code shortage + problem. The new builtin operator will be assigned to the extended builtin + code field in the flatbuffer schema. Those methods helps to hide builtin code + details. + + Args: + opcode: Operator code. + + Returns: + The builtin code of the given operator code. + """ + # Access BuiltinCode() method first if available. + if hasattr(opcode, 'BuiltinCode') and callable(opcode.BuiltinCode): + return max(opcode.BuiltinCode(), opcode.DeprecatedBuiltinCode()) + + return max(opcode.builtinCode, opcode.deprecatedBuiltinCode) + + +_allowed_symbols = [ + 'get_builtin_code_from_operator_code', +] + +all_util.remove_undocumented(__name__, _allowed_symbols) diff --git a/tensorflow/lite/schema/BUILD b/tensorflow/lite/schema/BUILD new file mode 100644 index 0000000..e87375a --- /dev/null +++ b/tensorflow/lite/schema/BUILD @@ -0,0 +1,39 @@ +load("@flatbuffers//:build_defs.bzl", "flatbuffer_cc_library") + +package( + default_visibility = [ + "//visibility:public", + ], + licenses = ["notice"], +) + +# Note: when wanting to generate the schema_generated.h, you must build as: +# bazel build schema_fbs_srcs. +flatbuffer_cc_library( + name = "schema_fbs", + srcs = ["schema.fbs"], +) + +# Generic schema for inference on device (but with reflections makes bigger). +flatbuffer_cc_library( + name = "schema_fbs_with_reflection", + srcs = ["schema.fbs"], + flatc_args = [ + "--reflect-types", + "--reflect-names", + "--no-union-value-namespacing", + "--gen-object-api", + ], + out_prefix = "reflection/", +) + +cc_library( + name = "schema_utils", + srcs = ["schema_utils.cc"], + hdrs = ["schema_utils.h"], + deps = [ + ":schema_fbs", + "//tensorflow/lite/kernels/internal:compatibility", + "@flatbuffers//:runtime_cc", + ], +) diff --git a/tensorflow/lite/schema/schema.fbs b/tensorflow/lite/schema/schema.fbs new file mode 100644 index 0000000..70c7dd4 --- /dev/null +++ b/tensorflow/lite/schema/schema.fbs @@ -0,0 +1,1373 @@ +// Copyright 2017 The TensorFlow Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Revision History +// Version 0: Initial version. +// Version 1: Add subgraphs to schema. +// Version 2: Rename operators to conform to NN API. +// Version 3: Move buffer data from Model.Subgraph.Tensors to Model.Buffers. +// Version 3a: Add new builtin op code field. Has backward compatibility with +// version 3. +// Version 3b: Rename fields in SignatureDef. Has backward compatibility with +// version 3 and 3a. +// Version 3c: Move constant tensor buffers & custom op buffers outside from +// Flatbuffers. Has backward compatibility with version 3, 3a and +// 3b. + +namespace tflite; + +// This corresponds to the version. +file_identifier "TFL3"; +// File extension of any written files. +file_extension "tflite"; + +// IMPORTANT: All new members of tables, enums and unions must be added at the +// end to ensure backwards compatibility. + +// The type of data stored in a tensor. +enum TensorType : byte { + FLOAT32 = 0, + FLOAT16 = 1, + INT32 = 2, + UINT8 = 3, + INT64 = 4, + STRING = 5, + BOOL = 6, + INT16 = 7, + COMPLEX64 = 8, + INT8 = 9, + FLOAT64 = 10, + COMPLEX128 = 11, + UINT64 = 12, + // Experimental: Resource and variant types are experimental, that are subject + // to change. Do not implement custom kernels using resource & variant types + // now. + RESOURCE = 13, + VARIANT = 14, + UINT32 = 15, + UINT16 = 16, + INT4 = 17, +} + +// Custom quantization parameters for experimenting with new quantization +// techniques. +table CustomQuantization { + custom:[ubyte] (force_align: 16); +} + +// Represents a specific quantization technique's parameters. +union QuantizationDetails { + CustomQuantization, +} + +// Parameters for converting a quantized tensor back to float. +table QuantizationParameters { + // These four parameters are the asymmetric linear quantization parameters. + // Given a quantized value q, the corresponding float value f should be: + // f = scale * (q - zero_point) + // For other quantization types, the QuantizationDetails below is used. + min:[float]; // For importing back into tensorflow. + max:[float]; // For importing back into tensorflow. + scale:[float]; // For dequantizing the tensor's values. + zero_point:[long]; + + // If this is not none, the other quantization parameters (i.e. min, max, + // scale, zero_point fields above) are ignored and the value of the + // QuantizationDetails union should be used. + details:QuantizationDetails; + + // Specifies the dimension of the Tensor's shape that the scales and + // zero_points correspond to. For example, a tensor t, with dims=[4, 3, 2, 1] + // with quantization params: + // scale=[1.0, 2.0, 3.0], zero_point=[1, 2, 3], quantization_dimension=1 + // will be quantized across the second dimension of t. + // t[:, 0, :, :] will have scale[0]=1.0, zero_point[0]=1 + // t[:, 1, :, :] will have scale[1]=2.0, zero_point[0]=2 + // t[:, 2, :, :] will have scale[2]=3.0, zero_point[0]=3 + quantized_dimension:int; +} + +// Sparse tensors. +// We use a modification of the TACO format. +// Reference: http://tensor-compiler.org/kjolstad-oopsla17-tensor-compiler.pdf +// +// To encode a conceptual n-dimensional dense tensor with dims (d0, ..., dn-1), +// potentially with a k-dimensional block (0 <= k <= n) with dims +// (dn, ..., dn+k-1), the format needs to specify: +// 1. In what order to traverse these dimensions. For example, to store a 2-D +// matrix in row major order, the traversal order would be (d0, d1), +// whereas to store it in column major order, the traversal order would be +// (d1, d0). If the 2-D matrix has a 2-D inner block, the traversal order +// could be (d0, d1, d2, d3). +// 2. How each block dimension in (dn, ..., dn+k-1) maps to the original +// tensor dimension in (d0, ..., dn-1). +// 3. In the traversal order defined above, the format (dense vs. sparse) and +// index metadata for each dimension. For a dense dimension, this is just +// the size of that dimension. For a sparse dimension, it's the same as +// the compressed index defined in the Compressed Sparse Row (CSR) format. +// (http://scipy-lectures.org/advanced/scipy_sparse/csr_matrix.html) + +// The storage type for a dimension. Currently we support: +// 1. DENSE: each coordinate in this dimension is stored implicitly. +// 2. SPARSE_CSR: only the coordinates with non-zero elements are stored. The +// compression technique is the same what CSR uses. +// More types like a sparse dimension with a different compression technique +// could be added to the list in the future. +enum DimensionType : byte { + DENSE = 0, + SPARSE_CSR = 1, +} + +table Int32Vector { + values:[int]; +} + +table Uint16Vector { + values:[ushort] (force_align: 4); +} + +table Uint8Vector { + values:[ubyte] (force_align: 4); +} + +// Variable-typed buffer to store the index metadata for a sparse dimension. +// The widest type is Int32 instead of UInt32 because tensor's shape is a int32 +// vector. We don't want the per-dimensional index to overflow that range. +union SparseIndexVector { + Int32Vector, + Uint16Vector, + Uint8Vector +} + +table DimensionMetadata { + // Whether a dimension is dense or sparse. + format:DimensionType; + // Index metadata used for a dimension. + // - If format is DimensionType.DENSE then we use the dense_size field to + // store the size of that dimension. Each index in that dimension is + // stored implicitly. + // - If format is DimensionType.SPARSE_CSR then we use array_segments and + // array_indices to encode that dimension. array_segments represents how + // to segment the indices array, each segment corresponds to one element + // in the previous dimension. array_indices represents the index of the + // non-zero elements within this dimension (as those in the CSR matrix + // format, where the first array is row pointers and the second array is + // column indices). + dense_size:int; + array_segments:SparseIndexVector; + array_indices:SparseIndexVector; +} + +// Parameters to encode a sparse TfLite tensor. +table SparsityParameters { + // The traversal order of the dimensions defined in the `shape` field of the + // conceptual dense tensor. For a n-dimensional tensors with dims (d0, d1, + // ..., dn-1), + // - if not block sparse, the traversal_order is just a permutation of (d0, + // ..., dn-1). For example, a 2-D matrix stored in row-major order would + // have traversal_order = (d0, d1). + // - if block sparse with a k-dimensional block (0 <= k <= n), the + // traversal_order has n + k elements. The first n elements are still a + // permutation of (d0, ..., dn-1). The lask k elements are a permutation + // of (dn, ..., dn+k-1), defining how to traverse a block internally. For + // example, a 2-D matrix with 2-D blocks, both stored in row-major order + // would have traversal_order = (d0, d1, d2, d3). + traversal_order:[int]; + // For an n-dimensional tensor with a k-dimensional block (0 <= k <= n), + // stores how a block dimension in (dn, ..., dn+k-1) maps to the original + // tensor dimension in (d0, ..., dn). + // It's stored in the order of (dn, ..., dn+k-1). + // If not block-sparse, this field is NULL. + block_map:[int]; + // In the traversal order defined above, the metadata needed for + // each dimension to locate the non-zero values in the original dense tensor. + // The size of the dim_metadata array = the size of the traversal_order array + // = n + k. + dim_metadata:[DimensionMetadata]; +} + +// The nested tensor type for VARIANT type. +table VariantSubType { + // The tensor shape. + shape:[int]; + type:TensorType; + // If false, the rank or the number of tensor dimensions is unknown. + // If false, "shape" must be []. + has_rank: bool = false; +} + +table Tensor { + // The tensor shape. The meaning of each entry is operator-specific but + // builtin ops use: [batch size, height, width, number of channels] (That's + // Tensorflow's NHWC). + shape:[int]; + type:TensorType; + // An index that refers to the buffers table at the root of the model. Or, + // if there is no data buffer associated (i.e. intermediate results), then + // this is 0 (which refers to an always existent empty buffer). + // + // The data_buffer itself is an opaque container, with the assumption that the + // target device is little-endian. In addition, all builtin operators assume + // the memory is ordered such that if `shape` is [4, 3, 2], then index + // [i, j, k] maps to data_buffer[i*3*2 + j*2 + k]. + buffer:uint; + name:string; // For debugging and importing back into tensorflow. + quantization:QuantizationParameters; // Optional. + + is_variable:bool = false; + + // Parameters to encode a sparse tensor. See the example in + // tensorflow/lite/testdata/sparse_tensor.json. + sparsity:SparsityParameters; // Optional. + + // Encodes `shape` with unknown dimensions. Unknown dimensions are + // represented with -1. + shape_signature:[int]; // Optional. + + // If false, the rank or the number of tensor dimensions is unknown. + // If false, "shape" must be []. + has_rank: bool = false; + + // The nested Tensor types for VARIANT type. This is always empty for + // non-VARIANT types. This is optional because the nested type can be omitted. + // Currently only 1 subtype is supported. The field is defined as an array for + // flexibility of supporting multiple subtypes in the future. + variant_tensors:[VariantSubType]; +} + +// A list of builtin operators. Builtin operators are slightly faster than custom +// ones, but not by much. Moreover, while custom operators accept an opaque +// object containing configuration parameters, builtins have a predetermined +// set of acceptable options. +// LINT.IfChange +enum BuiltinOperator : int32 { + ADD = 0, + AVERAGE_POOL_2D = 1, + CONCATENATION = 2, + CONV_2D = 3, + DEPTHWISE_CONV_2D = 4, + DEPTH_TO_SPACE = 5, + DEQUANTIZE = 6, + EMBEDDING_LOOKUP = 7, + FLOOR = 8, + FULLY_CONNECTED = 9, + HASHTABLE_LOOKUP = 10, + L2_NORMALIZATION = 11, + L2_POOL_2D = 12, + LOCAL_RESPONSE_NORMALIZATION = 13, + LOGISTIC = 14, + LSH_PROJECTION = 15, + LSTM = 16, + MAX_POOL_2D = 17, + MUL = 18, + RELU = 19, + // NOTE(aselle): RELU_N1_TO_1 used to be called RELU1, but it was renamed + // since different model developers use RELU1 in different ways. Never + // create another op called RELU1. + RELU_N1_TO_1 = 20, + RELU6 = 21, + RESHAPE = 22, + RESIZE_BILINEAR = 23, + RNN = 24, + SOFTMAX = 25, + SPACE_TO_DEPTH = 26, + SVDF = 27, + TANH = 28, + CONCAT_EMBEDDINGS = 29, + SKIP_GRAM = 30, + CALL = 31, + CUSTOM = 32, + EMBEDDING_LOOKUP_SPARSE = 33, + PAD = 34, + UNIDIRECTIONAL_SEQUENCE_RNN = 35, + GATHER = 36, + BATCH_TO_SPACE_ND = 37, + SPACE_TO_BATCH_ND = 38, + TRANSPOSE = 39, + MEAN = 40, + SUB = 41, + DIV = 42, + SQUEEZE = 43, + UNIDIRECTIONAL_SEQUENCE_LSTM = 44, + STRIDED_SLICE = 45, + BIDIRECTIONAL_SEQUENCE_RNN = 46, + EXP = 47, + TOPK_V2 = 48, + SPLIT = 49, + LOG_SOFTMAX = 50, + // DELEGATE is a special op type for the operations which are delegated to + // other backends. + // WARNING: Experimental interface, subject to change + DELEGATE = 51, + BIDIRECTIONAL_SEQUENCE_LSTM = 52, + CAST = 53, + PRELU = 54, + MAXIMUM = 55, + ARG_MAX = 56, + MINIMUM = 57, + LESS = 58, + NEG = 59, + PADV2 = 60, + GREATER = 61, + GREATER_EQUAL = 62, + LESS_EQUAL = 63, + SELECT = 64, + SLICE = 65, + SIN = 66, + TRANSPOSE_CONV = 67, + SPARSE_TO_DENSE = 68, + TILE = 69, + EXPAND_DIMS = 70, + EQUAL = 71, + NOT_EQUAL = 72, + LOG = 73, + SUM = 74, + SQRT = 75, + RSQRT = 76, + SHAPE = 77, + POW = 78, + ARG_MIN = 79, + FAKE_QUANT = 80, + REDUCE_PROD = 81, + REDUCE_MAX = 82, + PACK = 83, + LOGICAL_OR = 84, + ONE_HOT = 85, + LOGICAL_AND = 86, + LOGICAL_NOT = 87, + UNPACK = 88, + REDUCE_MIN = 89, + FLOOR_DIV = 90, + REDUCE_ANY = 91, + SQUARE = 92, + ZEROS_LIKE = 93, + FILL = 94, + FLOOR_MOD = 95, + RANGE = 96, + RESIZE_NEAREST_NEIGHBOR = 97, + LEAKY_RELU = 98, + SQUARED_DIFFERENCE = 99, + MIRROR_PAD = 100, + ABS = 101, + SPLIT_V = 102, + UNIQUE = 103, + CEIL = 104, + REVERSE_V2 = 105, + ADD_N = 106, + GATHER_ND = 107, + COS = 108, + WHERE = 109, + RANK = 110, + ELU = 111, + REVERSE_SEQUENCE = 112, + MATRIX_DIAG = 113, + QUANTIZE = 114, + MATRIX_SET_DIAG = 115, + ROUND = 116, + HARD_SWISH = 117, + IF = 118, + WHILE = 119, + NON_MAX_SUPPRESSION_V4 = 120, + NON_MAX_SUPPRESSION_V5 = 121, + SCATTER_ND = 122, + SELECT_V2 = 123, + DENSIFY = 124, + SEGMENT_SUM = 125, + BATCH_MATMUL = 126, + PLACEHOLDER_FOR_GREATER_OP_CODES = 127, + CUMSUM = 128, + CALL_ONCE = 129, + BROADCAST_TO = 130, + RFFT2D = 131, + CONV_3D = 132, + IMAG=133, + REAL=134, + COMPLEX_ABS=135, + HASHTABLE = 136, + HASHTABLE_FIND = 137, + HASHTABLE_IMPORT = 138, + HASHTABLE_SIZE = 139, + REDUCE_ALL = 140, + CONV_3D_TRANSPOSE = 141, + VAR_HANDLE = 142, + READ_VARIABLE = 143, + ASSIGN_VARIABLE = 144, + BROADCAST_ARGS = 145, + RANDOM_STANDARD_NORMAL = 146, + BUCKETIZE = 147, + RANDOM_UNIFORM = 148, + MULTINOMIAL = 149, + GELU = 150, + DYNAMIC_UPDATE_SLICE = 151, + RELU_0_TO_1 = 152, + UNSORTED_SEGMENT_PROD = 153, + UNSORTED_SEGMENT_MAX = 154, + UNSORTED_SEGMENT_SUM = 155, + ATAN2 = 156, + UNSORTED_SEGMENT_MIN = 157, + SIGN = 158, + BITCAST = 159, + BITWISE_XOR = 160, + RIGHT_SHIFT = 161, +} +// LINT.ThenChange(nnapi_linter/linter.proto) + +// Options for the builtin operators. +union BuiltinOptions { + Conv2DOptions, + DepthwiseConv2DOptions, + ConcatEmbeddingsOptions, + LSHProjectionOptions, + Pool2DOptions, + SVDFOptions, + RNNOptions, + FullyConnectedOptions, + SoftmaxOptions, + ConcatenationOptions, + AddOptions, + L2NormOptions, + LocalResponseNormalizationOptions, + LSTMOptions, + ResizeBilinearOptions, + CallOptions, + ReshapeOptions, + SkipGramOptions, + SpaceToDepthOptions, + EmbeddingLookupSparseOptions, + MulOptions, + PadOptions, + GatherOptions, + BatchToSpaceNDOptions, + SpaceToBatchNDOptions, + TransposeOptions, + ReducerOptions, + SubOptions, + DivOptions, + SqueezeOptions, + SequenceRNNOptions, + StridedSliceOptions, + ExpOptions, + TopKV2Options, + SplitOptions, + LogSoftmaxOptions, + CastOptions, + DequantizeOptions, + MaximumMinimumOptions, + ArgMaxOptions, + LessOptions, + NegOptions, + PadV2Options, + GreaterOptions, + GreaterEqualOptions, + LessEqualOptions, + SelectOptions, + SliceOptions, + TransposeConvOptions, + SparseToDenseOptions, + TileOptions, + ExpandDimsOptions, + EqualOptions, + NotEqualOptions, + ShapeOptions, + PowOptions, + ArgMinOptions, + FakeQuantOptions, + PackOptions, + LogicalOrOptions, + OneHotOptions, + LogicalAndOptions, + LogicalNotOptions, + UnpackOptions, + FloorDivOptions, + SquareOptions, + ZerosLikeOptions, + FillOptions, + BidirectionalSequenceLSTMOptions, + BidirectionalSequenceRNNOptions, + UnidirectionalSequenceLSTMOptions, + FloorModOptions, + RangeOptions, + ResizeNearestNeighborOptions, + LeakyReluOptions, + SquaredDifferenceOptions, + MirrorPadOptions, + AbsOptions, + SplitVOptions, + UniqueOptions, + ReverseV2Options, + AddNOptions, + GatherNdOptions, + CosOptions, + WhereOptions, + RankOptions, + ReverseSequenceOptions, + MatrixDiagOptions, + QuantizeOptions, + MatrixSetDiagOptions, + HardSwishOptions, + IfOptions, + WhileOptions, + DepthToSpaceOptions, + NonMaxSuppressionV4Options, + NonMaxSuppressionV5Options, + ScatterNdOptions, + SelectV2Options, + DensifyOptions, + SegmentSumOptions, + BatchMatMulOptions, + CumsumOptions, + CallOnceOptions, + BroadcastToOptions, + Rfft2dOptions, + Conv3DOptions, + HashtableOptions, + HashtableFindOptions, + HashtableImportOptions, + HashtableSizeOptions, + VarHandleOptions, + ReadVariableOptions, + AssignVariableOptions, + RandomOptions, + BucketizeOptions, + GeluOptions, + DynamicUpdateSliceOptions, + UnsortedSegmentProdOptions, + UnsortedSegmentMaxOptions, + UnsortedSegmentMinOptions, + UnsortedSegmentSumOptions, + ATan2Options, + SignOptions, + BitcastOptions, + BitwiseXorOptions, + RightShiftOptions, +} + +// LINT.IfChange +enum Padding : byte { SAME, VALID } +// LINT.ThenChange(//tensorflow/compiler/mlir/lite/ir/tfl_op_enums.td) + +// LINT.IfChange +enum ActivationFunctionType : byte { + NONE = 0, + RELU = 1, + RELU_N1_TO_1 = 2, + RELU6 = 3, + TANH = 4, + SIGN_BIT = 5, +} +// LINT.ThenChange(//tensorflow/compiler/mlir/lite/ir/tfl_op_enums.td) + +table Conv2DOptions { + padding:Padding; + stride_w:int; + stride_h:int; + fused_activation_function:ActivationFunctionType; + dilation_w_factor:int = 1; + dilation_h_factor:int = 1; +} + +// Options for both Conv3D and Conv3DTranspose. +table Conv3DOptions { + padding:Padding; + stride_d:int; + stride_w:int; + stride_h:int; + fused_activation_function:ActivationFunctionType; + dilation_d_factor:int = 1; + dilation_w_factor:int = 1; + dilation_h_factor:int = 1; +} + +table Pool2DOptions { + padding:Padding; + stride_w:int; + stride_h:int; + filter_width:int; + filter_height:int; + fused_activation_function:ActivationFunctionType; +} + +table DepthwiseConv2DOptions { + // Parameters for DepthwiseConv version 1 or above. + padding:Padding; + stride_w:int; + stride_h:int; + // `depth_multiplier` is redundant. It's used by CPU kernels in + // TensorFlow 2.0 or below, but ignored in versions above. + // See comments in lite/c/builtin_op_data.h for more details. + depth_multiplier:int; + fused_activation_function:ActivationFunctionType; + // Parameters for DepthwiseConv version 2 or above. + dilation_w_factor:int = 1; + dilation_h_factor:int = 1; +} + +table ConcatEmbeddingsOptions { + num_channels:int; + num_columns_per_channel:[int]; + embedding_dim_per_channel:[int]; // This could be inferred from parameters. +} + +enum LSHProjectionType: byte { + UNKNOWN = 0, + SPARSE = 1, + DENSE = 2, +} + +table LSHProjectionOptions { + type: LSHProjectionType; +} + +table SVDFOptions { + rank:int; + fused_activation_function:ActivationFunctionType; + // For weights-only quantization, use asymmetric quantization for non + // constant inputs at evaluation time. + asymmetric_quantize_inputs:bool; +} + +// An implementation of TensorFlow RNNCell. +table RNNOptions { + fused_activation_function:ActivationFunctionType; + asymmetric_quantize_inputs:bool; +} + +// An implementation of TensorFlow dynamic_rnn with RNNCell. +table SequenceRNNOptions { + time_major:bool; + fused_activation_function:ActivationFunctionType; + asymmetric_quantize_inputs:bool; +} + +// An implementation of TensorFlow bidrectional_dynamic_rnn with RNNCell. +table BidirectionalSequenceRNNOptions { + time_major:bool; + fused_activation_function:ActivationFunctionType; + merge_outputs: bool; + asymmetric_quantize_inputs:bool; +} + +// LINT.IfChange +enum FullyConnectedOptionsWeightsFormat: byte { + DEFAULT = 0, + SHUFFLED4x16INT8 = 1, +} +// LINT.ThenChange(//tensorflow/compiler/mlir/lite/ir/tfl_op_enums.td) + +// An implementation of TensorFlow fully_connected (a.k.a Dense) layer. +table FullyConnectedOptions { + // Parameters for FullyConnected version 1 or above. + fused_activation_function:ActivationFunctionType; + + // Parameters for FullyConnected version 2 or above. + weights_format:FullyConnectedOptionsWeightsFormat = DEFAULT; + + // Parameters for FullyConnected version 5 or above. + // If set to true, then the number of dimension is preserved. Furthermore, + // all but the last dimension of the input and output shapes will be equal. + keep_num_dims: bool; + + // Parameters for FullyConnected version 7 or above. + // If set to true, then weights-only op will use asymmetric quantization for + // inputs. + asymmetric_quantize_inputs: bool; +} + +table SoftmaxOptions { + beta: float; +} + +// An implementation of TensorFlow concat. +table ConcatenationOptions { + axis:int; + fused_activation_function:ActivationFunctionType; +} + +table AddOptions { + fused_activation_function:ActivationFunctionType; + // Parameters supported by version 3. + pot_scale_int16:bool = true; +} + +table MulOptions { + fused_activation_function:ActivationFunctionType; +} + +table L2NormOptions { + // This field is currently ignored in the L2 Norm Op. + fused_activation_function:ActivationFunctionType; +} + +table LocalResponseNormalizationOptions { + radius:int; + bias:float; + alpha:float; + beta:float; +} + +// LINT.IfChange +enum LSTMKernelType : byte { + // Full LSTM kernel which supports peephole and projection. + FULL = 0, + // Basic LSTM kernels. Equivalent to TensorFlow BasicLSTMCell. + BASIC = 1, +} +// LINT.ThenChange(//tensorflow/compiler/mlir/lite/ir/tfl_op_enums.td) + +// An implementation of TensorFlow LSTMCell and CoupledInputForgetGateLSTMCell +table LSTMOptions { + // Parameters for LSTM version 1 or above. + fused_activation_function:ActivationFunctionType; + cell_clip: float; // Optional, 0.0 means no clipping + proj_clip: float; // Optional, 0.0 means no clipping + + // Parameters for LSTM version 2 or above. + // Basic kernel is only supported in version 2 or above. + kernel_type: LSTMKernelType = FULL; + + // Parameters for LSTM version 4 or above. + asymmetric_quantize_inputs: bool; +} + +// An implementation of TensorFlow dynamic_rnn with LSTMCell. +table UnidirectionalSequenceLSTMOptions { + fused_activation_function:ActivationFunctionType; + cell_clip: float; // Optional, 0.0 means no clipping + proj_clip: float; // Optional, 0.0 means no clipping + + // If true then first dimension is sequence, otherwise batch. + time_major:bool; + + // Parameter for Unidirectional Sequence LSTM version 3. + asymmetric_quantize_inputs:bool; + + // Parameter for unidirectional sequence RNN version 4. + diagonal_recurrent_tensors:bool; +} + +table BidirectionalSequenceLSTMOptions { + // Parameters supported by version 1: + fused_activation_function:ActivationFunctionType; + cell_clip: float; // Optional, 0.0 means no clipping + proj_clip: float; // Optional, 0.0 means no clipping + + // If true, store the outputs of both directions into the first output. + merge_outputs: bool; + + // Parameters supported by version 2: + // If true then first dimension is sequence, otherwise batch. + // Version 1 implementations assumed time_major to be true, so this default + // value should never change. + time_major: bool = true; + + // Parameters for version 3 or above. + asymmetric_quantize_inputs:bool; +} + +table ResizeBilinearOptions { + new_height: int (deprecated); + new_width: int (deprecated); + align_corners: bool; + half_pixel_centers: bool; +} + +table ResizeNearestNeighborOptions { + align_corners: bool; + half_pixel_centers: bool; +} + +// A call operation options +table CallOptions { + // The subgraph index that needs to be called. + subgraph:uint; +} + +table PadOptions { +} + +table PadV2Options { +} + +table ReshapeOptions { + new_shape:[int]; +} + +table SpaceToBatchNDOptions { +} + +table BatchToSpaceNDOptions { +} + +table SkipGramOptions { + ngram_size: int; + max_skip_size: int; + include_all_ngrams: bool; +} + +table SpaceToDepthOptions { + block_size: int; +} + +table DepthToSpaceOptions { + block_size: int; +} + +table SubOptions { + fused_activation_function:ActivationFunctionType; + // Parameters supported by version 5 + pot_scale_int16:bool = true; +} + +table DivOptions { + fused_activation_function:ActivationFunctionType; +} + +table TopKV2Options { +} + +enum CombinerType : byte { + SUM = 0, + MEAN = 1, + SQRTN = 2, +} + +table EmbeddingLookupSparseOptions { + combiner:CombinerType; +} + +table GatherOptions { + axis: int; + // Parameters for Gather version 5 or above. + batch_dims: int = 0; +} + +table TransposeOptions { +} + +table ExpOptions { +} + +table CosOptions { +} + +table ReducerOptions { + keep_dims: bool; +} + +table SqueezeOptions { + squeeze_dims:[int]; +} + +table SplitOptions { + num_splits: int; +} + +table SplitVOptions { + num_splits: int; +} + +table StridedSliceOptions { + begin_mask: int; + end_mask: int; + ellipsis_mask: int; + new_axis_mask: int; + shrink_axis_mask: int; + // If true, then the end tensor is an offset of the begin tensor. + offset: bool; +} + +table LogSoftmaxOptions { +} + +table CastOptions { + in_data_type: TensorType; + out_data_type: TensorType; +} + +table DequantizeOptions { +} + +table MaximumMinimumOptions { +} + +table TileOptions { +} + +table ArgMaxOptions { + output_type : TensorType; +} + +table ArgMinOptions { + output_type : TensorType; +} + +table GreaterOptions { +} + +table GreaterEqualOptions { +} + +table LessOptions { +} + +table LessEqualOptions { +} + +table NegOptions { +} + +table SelectOptions { +} + +table SliceOptions { +} + +table TransposeConvOptions { + // Parameters supported by version 1, 2, 3: + padding:Padding; + stride_w:int; + stride_h:int; + + // Parameters supported by version 4: + fused_activation_function:ActivationFunctionType = NONE; +} + +table ExpandDimsOptions { +} + +table SparseToDenseOptions { + validate_indices:bool; +} + +table EqualOptions { +} + +table NotEqualOptions { +} + +table ShapeOptions { + // Optional output type of the operation (int32 or int64). Defaults to int32. + out_type : TensorType; +} + +table RankOptions { +} + +table PowOptions { +} + +table FakeQuantOptions { + // Parameters supported by version 1: + min:float; + max:float; + num_bits:int; + + // Parameters supported by version 2: + narrow_range:bool; +} + +table PackOptions { + values_count:int; + axis:int; +} + +table LogicalOrOptions { +} + +table OneHotOptions { + axis:int; +} + +table AbsOptions { +} + + +table HardSwishOptions { +} + +table LogicalAndOptions { +} + +table LogicalNotOptions { +} + +table UnpackOptions { + num:int; + axis:int; +} + +table FloorDivOptions { +} + +table SquareOptions { +} + +table ZerosLikeOptions { +} + +table FillOptions { +} + +table FloorModOptions { +} + +table RangeOptions { +} + +table LeakyReluOptions { + alpha:float; +} + +table SquaredDifferenceOptions { +} + +// LINT.IfChange +enum MirrorPadMode : byte { + // Doesn't include borders. + REFLECT = 0, + // Includes borders. + SYMMETRIC = 1, +} +// LINT.ThenChange(//tensorflow/compiler/mlir/lite/ir/tfl_op_enums.td) + +table MirrorPadOptions { + mode:MirrorPadMode; +} + +table UniqueOptions { + idx_out_type:TensorType = INT32; +} + +table ReverseV2Options { +} + +table AddNOptions { +} + +table GatherNdOptions { +} + +table WhereOptions { +} + +table ReverseSequenceOptions { + seq_dim:int; + batch_dim:int = 0; +} + +table MatrixDiagOptions { +} + +table QuantizeOptions { +} + +table MatrixSetDiagOptions { +} + +table IfOptions { + then_subgraph_index:int; + else_subgraph_index:int; +} + +table CallOnceOptions { + init_subgraph_index:int; +} + +table WhileOptions { + cond_subgraph_index:int; + body_subgraph_index:int; +} + +table NonMaxSuppressionV4Options { +} + +table NonMaxSuppressionV5Options { +} + +table ScatterNdOptions { +} + +table SelectV2Options { +} + +table DensifyOptions { +} + +table SegmentSumOptions { +} + +table BatchMatMulOptions { + adj_x:bool; + adj_y:bool; + // Parameters for BatchMatMul version 4 or above. + // If set to true, then weights-only op will use asymmetric quantization for + // inputs. + asymmetric_quantize_inputs: bool; +} + +table CumsumOptions { + exclusive:bool; + reverse:bool; +} + +table BroadcastToOptions { +} + +table Rfft2dOptions { +} + +table HashtableOptions { + // The identity of hash tables. This identity will be used across different + // subgraphs in the same interpreter instance. + table_id:int; + key_dtype:TensorType; + value_dtype:TensorType; +} + +table HashtableFindOptions { +} + +table HashtableImportOptions { +} + +table HashtableSizeOptions { +} + +table VarHandleOptions { + container:string; + shared_name:string; +} + +table ReadVariableOptions { +} + +table AssignVariableOptions { +} + +table RandomOptions { + seed: long; + seed2: long; +} + +table BucketizeOptions { + boundaries: [float]; // The bucket boundaries. +} + +table GeluOptions { + approximate: bool; +} + +table DynamicUpdateSliceOptions { +} + +table UnsortedSegmentProdOptions { +} + +table UnsortedSegmentMaxOptions { +} + +table UnsortedSegmentSumOptions { +} + +table ATan2Options { +} + +table UnsortedSegmentMinOptions{ +} + +table SignOptions { +} + +table BitcastOptions { +} + +table BitwiseXorOptions { +} + +table RightShiftOptions { +} + +// An OperatorCode can be an enum value (BuiltinOperator) if the operator is a +// builtin, or a string if the operator is custom. +table OperatorCode { + // This field is for backward compatibility. This field will be used when + // the value of the extended builtin_code field has less than + // BulitinOperator_PLACEHOLDER_FOR_GREATER_OP_CODES. + deprecated_builtin_code:byte; + custom_code:string; + + // The version of the operator. The version need to be bumped whenever new + // parameters are introduced into an op. + version:int = 1; + + // This field is introduced for resolving op builtin code shortage problem + // (the original BuiltinOperator enum field was represented as a byte). + // This field will be used when the value of the extended builtin_code field + // has greater than BulitinOperator_PLACEHOLDER_FOR_GREATER_OP_CODES. + builtin_code:BuiltinOperator; +} + +enum CustomOptionsFormat : byte { + FLEXBUFFERS = 0, +} + +// An operator takes tensors as inputs and outputs. The type of operation being +// performed is determined by an index into the list of valid OperatorCodes, +// while the specifics of each operations is configured using builtin_options +// or custom_options. +table Operator { + // Index into the operator_codes array. Using an integer here avoids + // complicate map lookups. + opcode_index:uint; + + // Optional input are indicated by -1. + inputs:[int]; + outputs:[int]; + + builtin_options:BuiltinOptions; + custom_options:[ubyte]; + custom_options_format:CustomOptionsFormat; + + // A list of booleans indicating the input tensors which are being mutated by + // this operator.(e.g. used by RNN and LSTM). + // For example, if the "inputs" array refers to 5 tensors and the second and + // fifth are mutable variables, then this list will contain + // [false, true, false, false, true]. + // + // If the list is empty, no variable is mutated in this operator. + // The list either has the same length as `inputs`, or is empty. + mutating_variable_inputs:[bool]; + + // A list of indices to the subgraph's "tensors" that are internal to an Op. + // Internal tensors are those that do not flow in or out of the operation, + // but instead are part of internal computation. As such, the operation's + // implementation may manage its memory more efficiently. They are needed + // however (i.e. not just an implementation detail) since they are part of the + // computation, which may require relevant metadata such as quantization + // parameters. + intermediates:[int]; + + // When an op is using custom_options in a model that is larger than 2GB, then + // we instead use the following attributes to find the buffer location which + // is stored outside of flatbuffers, the offset is calculated relative to the + // beginning of the file and is only valid if > 1 + custom_options_offset: ulong; + custom_options_size: ulong; +} + +// The root type, defining a subgraph, which typically represents an entire +// model. +table SubGraph { + // A list of all tensors used in this subgraph. + tensors:[Tensor]; + + // Indices of the tensors that are inputs into this subgraph. Note this is + // the list of non-static tensors that feed into the subgraph for inference. + inputs:[int]; + + // Indices of the tensors that are outputs out of this subgraph. Note this is + // the list of output tensors that are considered the product of the + // subgraph's inference. + outputs:[int]; + + // All operators, in execution order. + operators:[Operator]; + + // Name of this subgraph (used for debugging). + name:string; +} + +// Table of raw data buffers (used for constant tensors). Referenced by tensors +// by index. The generous alignment accommodates mmap-friendly data structures. +table Buffer { + data:[ubyte] (force_align: 16); + + // In a model that is larger than 2GB, then buffers instead uses the following + // attributes to find stored data, which is outside of flatbuffers + // the offset is calculated relative to the beginning of the file and is only + // valid if > 1. + offset: ulong; + size: ulong; +} + +table Metadata { + // A human readable string to uniquely identify a Metadata. + name:string; + // An index to the buffers table. + buffer:uint; +} + +// Map from an alias name of tensor to tensor index in the graph. +// This is used in Signature def. +table TensorMap { + // Represents the alias to use for this tensor. + name:string; + + // The actual tensor index in the primary graph, that 'name' corresponds to. + tensor_index:uint; +} + +// This corresponds to SignatureDef in Tensorflow SavedModel. +// The SignatureDef will be part of the SavedModel provided for conversion. +table SignatureDef { + // Named inputs for this signature. + inputs:[TensorMap]; + + // Named outputs for this signature. + outputs:[TensorMap]; + + // Key value which was in the Tensorflow SavedModel SignatureDef map. + signature_key:string; + + // Model tag, deprecated. + deprecated_tag:string (deprecated); + + // Index of subgraphs that corresponds to the exported method. + subgraph_index:uint; +} + +table Model { + // Version of the schema. + version:uint; + + // A list of all operator codes used in this model. This is + // kept in order because operators carry an index into this + // vector. + operator_codes:[OperatorCode]; + + // All the subgraphs of the model. The 0th is assumed to be the main + // model. + subgraphs:[SubGraph]; + + // A description of the model. + description:string; + + // Buffers of the model. + // Note the 0th entry of this array must be an empty buffer (sentinel). + // This is a convention so that tensors without a buffer can provide 0 as + // their buffer. + buffers:[Buffer]; + + // Metadata about the model. Indirects into the existings buffers list. + // Deprecated, prefer to use metadata field. + metadata_buffer:[int]; + + // Metadata about the model. + metadata:[Metadata]; + + // Optional SignatureDefs for the model. + signature_defs:[SignatureDef]; +} + +root_type Model; diff --git a/tensorflow/lite/schema/schema_generated.h b/tensorflow/lite/schema/schema_generated.h new file mode 100755 index 0000000..9bcac3e --- /dev/null +++ b/tensorflow/lite/schema/schema_generated.h @@ -0,0 +1,20593 @@ +// automatically generated by the FlatBuffers compiler, do not modify + + +#ifndef FLATBUFFERS_GENERATED_SCHEMA_TFLITE_H_ +#define FLATBUFFERS_GENERATED_SCHEMA_TFLITE_H_ + +#include "flatbuffers/flatbuffers.h" + +// Ensure the included flatbuffers.h is the same version as when this file was +// generated, otherwise it may not be compatible. +static_assert(FLATBUFFERS_VERSION_MAJOR == 2 && + FLATBUFFERS_VERSION_MINOR == 0 && + FLATBUFFERS_VERSION_REVISION == 6, + "Non-compatible flatbuffers version included"); + +namespace tflite { + +struct CustomQuantization; +struct CustomQuantizationBuilder; +struct CustomQuantizationT; + +struct QuantizationParameters; +struct QuantizationParametersBuilder; +struct QuantizationParametersT; + +struct Int32Vector; +struct Int32VectorBuilder; +struct Int32VectorT; + +struct Uint16Vector; +struct Uint16VectorBuilder; +struct Uint16VectorT; + +struct Uint8Vector; +struct Uint8VectorBuilder; +struct Uint8VectorT; + +struct DimensionMetadata; +struct DimensionMetadataBuilder; +struct DimensionMetadataT; + +struct SparsityParameters; +struct SparsityParametersBuilder; +struct SparsityParametersT; + +struct VariantSubType; +struct VariantSubTypeBuilder; +struct VariantSubTypeT; + +struct Tensor; +struct TensorBuilder; +struct TensorT; + +struct Conv2DOptions; +struct Conv2DOptionsBuilder; +struct Conv2DOptionsT; + +struct Conv3DOptions; +struct Conv3DOptionsBuilder; +struct Conv3DOptionsT; + +struct Pool2DOptions; +struct Pool2DOptionsBuilder; +struct Pool2DOptionsT; + +struct DepthwiseConv2DOptions; +struct DepthwiseConv2DOptionsBuilder; +struct DepthwiseConv2DOptionsT; + +struct ConcatEmbeddingsOptions; +struct ConcatEmbeddingsOptionsBuilder; +struct ConcatEmbeddingsOptionsT; + +struct LSHProjectionOptions; +struct LSHProjectionOptionsBuilder; +struct LSHProjectionOptionsT; + +struct SVDFOptions; +struct SVDFOptionsBuilder; +struct SVDFOptionsT; + +struct RNNOptions; +struct RNNOptionsBuilder; +struct RNNOptionsT; + +struct SequenceRNNOptions; +struct SequenceRNNOptionsBuilder; +struct SequenceRNNOptionsT; + +struct BidirectionalSequenceRNNOptions; +struct BidirectionalSequenceRNNOptionsBuilder; +struct BidirectionalSequenceRNNOptionsT; + +struct FullyConnectedOptions; +struct FullyConnectedOptionsBuilder; +struct FullyConnectedOptionsT; + +struct SoftmaxOptions; +struct SoftmaxOptionsBuilder; +struct SoftmaxOptionsT; + +struct ConcatenationOptions; +struct ConcatenationOptionsBuilder; +struct ConcatenationOptionsT; + +struct AddOptions; +struct AddOptionsBuilder; +struct AddOptionsT; + +struct MulOptions; +struct MulOptionsBuilder; +struct MulOptionsT; + +struct L2NormOptions; +struct L2NormOptionsBuilder; +struct L2NormOptionsT; + +struct LocalResponseNormalizationOptions; +struct LocalResponseNormalizationOptionsBuilder; +struct LocalResponseNormalizationOptionsT; + +struct LSTMOptions; +struct LSTMOptionsBuilder; +struct LSTMOptionsT; + +struct UnidirectionalSequenceLSTMOptions; +struct UnidirectionalSequenceLSTMOptionsBuilder; +struct UnidirectionalSequenceLSTMOptionsT; + +struct BidirectionalSequenceLSTMOptions; +struct BidirectionalSequenceLSTMOptionsBuilder; +struct BidirectionalSequenceLSTMOptionsT; + +struct ResizeBilinearOptions; +struct ResizeBilinearOptionsBuilder; +struct ResizeBilinearOptionsT; + +struct ResizeNearestNeighborOptions; +struct ResizeNearestNeighborOptionsBuilder; +struct ResizeNearestNeighborOptionsT; + +struct CallOptions; +struct CallOptionsBuilder; +struct CallOptionsT; + +struct PadOptions; +struct PadOptionsBuilder; +struct PadOptionsT; + +struct PadV2Options; +struct PadV2OptionsBuilder; +struct PadV2OptionsT; + +struct ReshapeOptions; +struct ReshapeOptionsBuilder; +struct ReshapeOptionsT; + +struct SpaceToBatchNDOptions; +struct SpaceToBatchNDOptionsBuilder; +struct SpaceToBatchNDOptionsT; + +struct BatchToSpaceNDOptions; +struct BatchToSpaceNDOptionsBuilder; +struct BatchToSpaceNDOptionsT; + +struct SkipGramOptions; +struct SkipGramOptionsBuilder; +struct SkipGramOptionsT; + +struct SpaceToDepthOptions; +struct SpaceToDepthOptionsBuilder; +struct SpaceToDepthOptionsT; + +struct DepthToSpaceOptions; +struct DepthToSpaceOptionsBuilder; +struct DepthToSpaceOptionsT; + +struct SubOptions; +struct SubOptionsBuilder; +struct SubOptionsT; + +struct DivOptions; +struct DivOptionsBuilder; +struct DivOptionsT; + +struct TopKV2Options; +struct TopKV2OptionsBuilder; +struct TopKV2OptionsT; + +struct EmbeddingLookupSparseOptions; +struct EmbeddingLookupSparseOptionsBuilder; +struct EmbeddingLookupSparseOptionsT; + +struct GatherOptions; +struct GatherOptionsBuilder; +struct GatherOptionsT; + +struct TransposeOptions; +struct TransposeOptionsBuilder; +struct TransposeOptionsT; + +struct ExpOptions; +struct ExpOptionsBuilder; +struct ExpOptionsT; + +struct CosOptions; +struct CosOptionsBuilder; +struct CosOptionsT; + +struct ReducerOptions; +struct ReducerOptionsBuilder; +struct ReducerOptionsT; + +struct SqueezeOptions; +struct SqueezeOptionsBuilder; +struct SqueezeOptionsT; + +struct SplitOptions; +struct SplitOptionsBuilder; +struct SplitOptionsT; + +struct SplitVOptions; +struct SplitVOptionsBuilder; +struct SplitVOptionsT; + +struct StridedSliceOptions; +struct StridedSliceOptionsBuilder; +struct StridedSliceOptionsT; + +struct LogSoftmaxOptions; +struct LogSoftmaxOptionsBuilder; +struct LogSoftmaxOptionsT; + +struct CastOptions; +struct CastOptionsBuilder; +struct CastOptionsT; + +struct DequantizeOptions; +struct DequantizeOptionsBuilder; +struct DequantizeOptionsT; + +struct MaximumMinimumOptions; +struct MaximumMinimumOptionsBuilder; +struct MaximumMinimumOptionsT; + +struct TileOptions; +struct TileOptionsBuilder; +struct TileOptionsT; + +struct ArgMaxOptions; +struct ArgMaxOptionsBuilder; +struct ArgMaxOptionsT; + +struct ArgMinOptions; +struct ArgMinOptionsBuilder; +struct ArgMinOptionsT; + +struct GreaterOptions; +struct GreaterOptionsBuilder; +struct GreaterOptionsT; + +struct GreaterEqualOptions; +struct GreaterEqualOptionsBuilder; +struct GreaterEqualOptionsT; + +struct LessOptions; +struct LessOptionsBuilder; +struct LessOptionsT; + +struct LessEqualOptions; +struct LessEqualOptionsBuilder; +struct LessEqualOptionsT; + +struct NegOptions; +struct NegOptionsBuilder; +struct NegOptionsT; + +struct SelectOptions; +struct SelectOptionsBuilder; +struct SelectOptionsT; + +struct SliceOptions; +struct SliceOptionsBuilder; +struct SliceOptionsT; + +struct TransposeConvOptions; +struct TransposeConvOptionsBuilder; +struct TransposeConvOptionsT; + +struct ExpandDimsOptions; +struct ExpandDimsOptionsBuilder; +struct ExpandDimsOptionsT; + +struct SparseToDenseOptions; +struct SparseToDenseOptionsBuilder; +struct SparseToDenseOptionsT; + +struct EqualOptions; +struct EqualOptionsBuilder; +struct EqualOptionsT; + +struct NotEqualOptions; +struct NotEqualOptionsBuilder; +struct NotEqualOptionsT; + +struct ShapeOptions; +struct ShapeOptionsBuilder; +struct ShapeOptionsT; + +struct RankOptions; +struct RankOptionsBuilder; +struct RankOptionsT; + +struct PowOptions; +struct PowOptionsBuilder; +struct PowOptionsT; + +struct FakeQuantOptions; +struct FakeQuantOptionsBuilder; +struct FakeQuantOptionsT; + +struct PackOptions; +struct PackOptionsBuilder; +struct PackOptionsT; + +struct LogicalOrOptions; +struct LogicalOrOptionsBuilder; +struct LogicalOrOptionsT; + +struct OneHotOptions; +struct OneHotOptionsBuilder; +struct OneHotOptionsT; + +struct AbsOptions; +struct AbsOptionsBuilder; +struct AbsOptionsT; + +struct HardSwishOptions; +struct HardSwishOptionsBuilder; +struct HardSwishOptionsT; + +struct LogicalAndOptions; +struct LogicalAndOptionsBuilder; +struct LogicalAndOptionsT; + +struct LogicalNotOptions; +struct LogicalNotOptionsBuilder; +struct LogicalNotOptionsT; + +struct UnpackOptions; +struct UnpackOptionsBuilder; +struct UnpackOptionsT; + +struct FloorDivOptions; +struct FloorDivOptionsBuilder; +struct FloorDivOptionsT; + +struct SquareOptions; +struct SquareOptionsBuilder; +struct SquareOptionsT; + +struct ZerosLikeOptions; +struct ZerosLikeOptionsBuilder; +struct ZerosLikeOptionsT; + +struct FillOptions; +struct FillOptionsBuilder; +struct FillOptionsT; + +struct FloorModOptions; +struct FloorModOptionsBuilder; +struct FloorModOptionsT; + +struct RangeOptions; +struct RangeOptionsBuilder; +struct RangeOptionsT; + +struct LeakyReluOptions; +struct LeakyReluOptionsBuilder; +struct LeakyReluOptionsT; + +struct SquaredDifferenceOptions; +struct SquaredDifferenceOptionsBuilder; +struct SquaredDifferenceOptionsT; + +struct MirrorPadOptions; +struct MirrorPadOptionsBuilder; +struct MirrorPadOptionsT; + +struct UniqueOptions; +struct UniqueOptionsBuilder; +struct UniqueOptionsT; + +struct ReverseV2Options; +struct ReverseV2OptionsBuilder; +struct ReverseV2OptionsT; + +struct AddNOptions; +struct AddNOptionsBuilder; +struct AddNOptionsT; + +struct GatherNdOptions; +struct GatherNdOptionsBuilder; +struct GatherNdOptionsT; + +struct WhereOptions; +struct WhereOptionsBuilder; +struct WhereOptionsT; + +struct ReverseSequenceOptions; +struct ReverseSequenceOptionsBuilder; +struct ReverseSequenceOptionsT; + +struct MatrixDiagOptions; +struct MatrixDiagOptionsBuilder; +struct MatrixDiagOptionsT; + +struct QuantizeOptions; +struct QuantizeOptionsBuilder; +struct QuantizeOptionsT; + +struct MatrixSetDiagOptions; +struct MatrixSetDiagOptionsBuilder; +struct MatrixSetDiagOptionsT; + +struct IfOptions; +struct IfOptionsBuilder; +struct IfOptionsT; + +struct CallOnceOptions; +struct CallOnceOptionsBuilder; +struct CallOnceOptionsT; + +struct WhileOptions; +struct WhileOptionsBuilder; +struct WhileOptionsT; + +struct NonMaxSuppressionV4Options; +struct NonMaxSuppressionV4OptionsBuilder; +struct NonMaxSuppressionV4OptionsT; + +struct NonMaxSuppressionV5Options; +struct NonMaxSuppressionV5OptionsBuilder; +struct NonMaxSuppressionV5OptionsT; + +struct ScatterNdOptions; +struct ScatterNdOptionsBuilder; +struct ScatterNdOptionsT; + +struct SelectV2Options; +struct SelectV2OptionsBuilder; +struct SelectV2OptionsT; + +struct DensifyOptions; +struct DensifyOptionsBuilder; +struct DensifyOptionsT; + +struct SegmentSumOptions; +struct SegmentSumOptionsBuilder; +struct SegmentSumOptionsT; + +struct BatchMatMulOptions; +struct BatchMatMulOptionsBuilder; +struct BatchMatMulOptionsT; + +struct CumsumOptions; +struct CumsumOptionsBuilder; +struct CumsumOptionsT; + +struct BroadcastToOptions; +struct BroadcastToOptionsBuilder; +struct BroadcastToOptionsT; + +struct Rfft2dOptions; +struct Rfft2dOptionsBuilder; +struct Rfft2dOptionsT; + +struct HashtableOptions; +struct HashtableOptionsBuilder; +struct HashtableOptionsT; + +struct HashtableFindOptions; +struct HashtableFindOptionsBuilder; +struct HashtableFindOptionsT; + +struct HashtableImportOptions; +struct HashtableImportOptionsBuilder; +struct HashtableImportOptionsT; + +struct HashtableSizeOptions; +struct HashtableSizeOptionsBuilder; +struct HashtableSizeOptionsT; + +struct VarHandleOptions; +struct VarHandleOptionsBuilder; +struct VarHandleOptionsT; + +struct ReadVariableOptions; +struct ReadVariableOptionsBuilder; +struct ReadVariableOptionsT; + +struct AssignVariableOptions; +struct AssignVariableOptionsBuilder; +struct AssignVariableOptionsT; + +struct RandomOptions; +struct RandomOptionsBuilder; +struct RandomOptionsT; + +struct BucketizeOptions; +struct BucketizeOptionsBuilder; +struct BucketizeOptionsT; + +struct GeluOptions; +struct GeluOptionsBuilder; +struct GeluOptionsT; + +struct DynamicUpdateSliceOptions; +struct DynamicUpdateSliceOptionsBuilder; +struct DynamicUpdateSliceOptionsT; + +struct UnsortedSegmentProdOptions; +struct UnsortedSegmentProdOptionsBuilder; +struct UnsortedSegmentProdOptionsT; + +struct UnsortedSegmentMaxOptions; +struct UnsortedSegmentMaxOptionsBuilder; +struct UnsortedSegmentMaxOptionsT; + +struct UnsortedSegmentSumOptions; +struct UnsortedSegmentSumOptionsBuilder; +struct UnsortedSegmentSumOptionsT; + +struct ATan2Options; +struct ATan2OptionsBuilder; +struct ATan2OptionsT; + +struct UnsortedSegmentMinOptions; +struct UnsortedSegmentMinOptionsBuilder; +struct UnsortedSegmentMinOptionsT; + +struct SignOptions; +struct SignOptionsBuilder; +struct SignOptionsT; + +struct BitcastOptions; +struct BitcastOptionsBuilder; +struct BitcastOptionsT; + +struct BitwiseXorOptions; +struct BitwiseXorOptionsBuilder; +struct BitwiseXorOptionsT; + +struct RightShiftOptions; +struct RightShiftOptionsBuilder; +struct RightShiftOptionsT; + +struct OperatorCode; +struct OperatorCodeBuilder; +struct OperatorCodeT; + +struct Operator; +struct OperatorBuilder; +struct OperatorT; + +struct SubGraph; +struct SubGraphBuilder; +struct SubGraphT; + +struct Buffer; +struct BufferBuilder; +struct BufferT; + +struct Metadata; +struct MetadataBuilder; +struct MetadataT; + +struct TensorMap; +struct TensorMapBuilder; +struct TensorMapT; + +struct SignatureDef; +struct SignatureDefBuilder; +struct SignatureDefT; + +struct Model; +struct ModelBuilder; +struct ModelT; + +enum TensorType : int8_t { + TensorType_FLOAT32 = 0, + TensorType_FLOAT16 = 1, + TensorType_INT32 = 2, + TensorType_UINT8 = 3, + TensorType_INT64 = 4, + TensorType_STRING = 5, + TensorType_BOOL = 6, + TensorType_INT16 = 7, + TensorType_COMPLEX64 = 8, + TensorType_INT8 = 9, + TensorType_FLOAT64 = 10, + TensorType_COMPLEX128 = 11, + TensorType_UINT64 = 12, + TensorType_RESOURCE = 13, + TensorType_VARIANT = 14, + TensorType_UINT32 = 15, + TensorType_UINT16 = 16, + TensorType_INT4 = 17, + TensorType_MIN = TensorType_FLOAT32, + TensorType_MAX = TensorType_INT4 +}; + +inline const TensorType (&EnumValuesTensorType())[18] { + static const TensorType values[] = { + TensorType_FLOAT32, + TensorType_FLOAT16, + TensorType_INT32, + TensorType_UINT8, + TensorType_INT64, + TensorType_STRING, + TensorType_BOOL, + TensorType_INT16, + TensorType_COMPLEX64, + TensorType_INT8, + TensorType_FLOAT64, + TensorType_COMPLEX128, + TensorType_UINT64, + TensorType_RESOURCE, + TensorType_VARIANT, + TensorType_UINT32, + TensorType_UINT16, + TensorType_INT4 + }; + return values; +} + +inline const char * const *EnumNamesTensorType() { + static const char * const names[19] = { + "FLOAT32", + "FLOAT16", + "INT32", + "UINT8", + "INT64", + "STRING", + "BOOL", + "INT16", + "COMPLEX64", + "INT8", + "FLOAT64", + "COMPLEX128", + "UINT64", + "RESOURCE", + "VARIANT", + "UINT32", + "UINT16", + "INT4", + nullptr + }; + return names; +} + +inline const char *EnumNameTensorType(TensorType e) { + if (flatbuffers::IsOutRange(e, TensorType_FLOAT32, TensorType_INT4)) return ""; + const size_t index = static_cast(e); + return EnumNamesTensorType()[index]; +} + +enum QuantizationDetails : uint8_t { + QuantizationDetails_NONE = 0, + QuantizationDetails_CustomQuantization = 1, + QuantizationDetails_MIN = QuantizationDetails_NONE, + QuantizationDetails_MAX = QuantizationDetails_CustomQuantization +}; + +inline const QuantizationDetails (&EnumValuesQuantizationDetails())[2] { + static const QuantizationDetails values[] = { + QuantizationDetails_NONE, + QuantizationDetails_CustomQuantization + }; + return values; +} + +inline const char * const *EnumNamesQuantizationDetails() { + static const char * const names[3] = { + "NONE", + "CustomQuantization", + nullptr + }; + return names; +} + +inline const char *EnumNameQuantizationDetails(QuantizationDetails e) { + if (flatbuffers::IsOutRange(e, QuantizationDetails_NONE, QuantizationDetails_CustomQuantization)) return ""; + const size_t index = static_cast(e); + return EnumNamesQuantizationDetails()[index]; +} + +template struct QuantizationDetailsTraits { + static const QuantizationDetails enum_value = QuantizationDetails_NONE; +}; + +template<> struct QuantizationDetailsTraits { + static const QuantizationDetails enum_value = QuantizationDetails_CustomQuantization; +}; + +template struct QuantizationDetailsUnionTraits { + static const QuantizationDetails enum_value = QuantizationDetails_NONE; +}; + +template<> struct QuantizationDetailsUnionTraits { + static const QuantizationDetails enum_value = QuantizationDetails_CustomQuantization; +}; + +struct QuantizationDetailsUnion { + QuantizationDetails type; + void *value; + + QuantizationDetailsUnion() : type(QuantizationDetails_NONE), value(nullptr) {} + QuantizationDetailsUnion(QuantizationDetailsUnion&& u) FLATBUFFERS_NOEXCEPT : + type(QuantizationDetails_NONE), value(nullptr) + { std::swap(type, u.type); std::swap(value, u.value); } + QuantizationDetailsUnion(const QuantizationDetailsUnion &); + QuantizationDetailsUnion &operator=(const QuantizationDetailsUnion &u) + { QuantizationDetailsUnion t(u); std::swap(type, t.type); std::swap(value, t.value); return *this; } + QuantizationDetailsUnion &operator=(QuantizationDetailsUnion &&u) FLATBUFFERS_NOEXCEPT + { std::swap(type, u.type); std::swap(value, u.value); return *this; } + ~QuantizationDetailsUnion() { Reset(); } + + void Reset(); + + template + void Set(T&& val) { + typedef typename std::remove_reference::type RT; + Reset(); + type = QuantizationDetailsUnionTraits::enum_value; + if (type != QuantizationDetails_NONE) { + value = new RT(std::forward(val)); + } + } + + static void *UnPack(const void *obj, QuantizationDetails type, const flatbuffers::resolver_function_t *resolver); + flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const flatbuffers::rehasher_function_t *_rehasher = nullptr) const; + + tflite::CustomQuantizationT *AsCustomQuantization() { + return type == QuantizationDetails_CustomQuantization ? + reinterpret_cast(value) : nullptr; + } + const tflite::CustomQuantizationT *AsCustomQuantization() const { + return type == QuantizationDetails_CustomQuantization ? + reinterpret_cast(value) : nullptr; + } +}; + +bool VerifyQuantizationDetails(flatbuffers::Verifier &verifier, const void *obj, QuantizationDetails type); +bool VerifyQuantizationDetailsVector(flatbuffers::Verifier &verifier, const flatbuffers::Vector> *values, const flatbuffers::Vector *types); + +enum DimensionType : int8_t { + DimensionType_DENSE = 0, + DimensionType_SPARSE_CSR = 1, + DimensionType_MIN = DimensionType_DENSE, + DimensionType_MAX = DimensionType_SPARSE_CSR +}; + +inline const DimensionType (&EnumValuesDimensionType())[2] { + static const DimensionType values[] = { + DimensionType_DENSE, + DimensionType_SPARSE_CSR + }; + return values; +} + +inline const char * const *EnumNamesDimensionType() { + static const char * const names[3] = { + "DENSE", + "SPARSE_CSR", + nullptr + }; + return names; +} + +inline const char *EnumNameDimensionType(DimensionType e) { + if (flatbuffers::IsOutRange(e, DimensionType_DENSE, DimensionType_SPARSE_CSR)) return ""; + const size_t index = static_cast(e); + return EnumNamesDimensionType()[index]; +} + +enum SparseIndexVector : uint8_t { + SparseIndexVector_NONE = 0, + SparseIndexVector_Int32Vector = 1, + SparseIndexVector_Uint16Vector = 2, + SparseIndexVector_Uint8Vector = 3, + SparseIndexVector_MIN = SparseIndexVector_NONE, + SparseIndexVector_MAX = SparseIndexVector_Uint8Vector +}; + +inline const SparseIndexVector (&EnumValuesSparseIndexVector())[4] { + static const SparseIndexVector values[] = { + SparseIndexVector_NONE, + SparseIndexVector_Int32Vector, + SparseIndexVector_Uint16Vector, + SparseIndexVector_Uint8Vector + }; + return values; +} + +inline const char * const *EnumNamesSparseIndexVector() { + static const char * const names[5] = { + "NONE", + "Int32Vector", + "Uint16Vector", + "Uint8Vector", + nullptr + }; + return names; +} + +inline const char *EnumNameSparseIndexVector(SparseIndexVector e) { + if (flatbuffers::IsOutRange(e, SparseIndexVector_NONE, SparseIndexVector_Uint8Vector)) return ""; + const size_t index = static_cast(e); + return EnumNamesSparseIndexVector()[index]; +} + +template struct SparseIndexVectorTraits { + static const SparseIndexVector enum_value = SparseIndexVector_NONE; +}; + +template<> struct SparseIndexVectorTraits { + static const SparseIndexVector enum_value = SparseIndexVector_Int32Vector; +}; + +template<> struct SparseIndexVectorTraits { + static const SparseIndexVector enum_value = SparseIndexVector_Uint16Vector; +}; + +template<> struct SparseIndexVectorTraits { + static const SparseIndexVector enum_value = SparseIndexVector_Uint8Vector; +}; + +template struct SparseIndexVectorUnionTraits { + static const SparseIndexVector enum_value = SparseIndexVector_NONE; +}; + +template<> struct SparseIndexVectorUnionTraits { + static const SparseIndexVector enum_value = SparseIndexVector_Int32Vector; +}; + +template<> struct SparseIndexVectorUnionTraits { + static const SparseIndexVector enum_value = SparseIndexVector_Uint16Vector; +}; + +template<> struct SparseIndexVectorUnionTraits { + static const SparseIndexVector enum_value = SparseIndexVector_Uint8Vector; +}; + +struct SparseIndexVectorUnion { + SparseIndexVector type; + void *value; + + SparseIndexVectorUnion() : type(SparseIndexVector_NONE), value(nullptr) {} + SparseIndexVectorUnion(SparseIndexVectorUnion&& u) FLATBUFFERS_NOEXCEPT : + type(SparseIndexVector_NONE), value(nullptr) + { std::swap(type, u.type); std::swap(value, u.value); } + SparseIndexVectorUnion(const SparseIndexVectorUnion &); + SparseIndexVectorUnion &operator=(const SparseIndexVectorUnion &u) + { SparseIndexVectorUnion t(u); std::swap(type, t.type); std::swap(value, t.value); return *this; } + SparseIndexVectorUnion &operator=(SparseIndexVectorUnion &&u) FLATBUFFERS_NOEXCEPT + { std::swap(type, u.type); std::swap(value, u.value); return *this; } + ~SparseIndexVectorUnion() { Reset(); } + + void Reset(); + + template + void Set(T&& val) { + typedef typename std::remove_reference::type RT; + Reset(); + type = SparseIndexVectorUnionTraits::enum_value; + if (type != SparseIndexVector_NONE) { + value = new RT(std::forward(val)); + } + } + + static void *UnPack(const void *obj, SparseIndexVector type, const flatbuffers::resolver_function_t *resolver); + flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const flatbuffers::rehasher_function_t *_rehasher = nullptr) const; + + tflite::Int32VectorT *AsInt32Vector() { + return type == SparseIndexVector_Int32Vector ? + reinterpret_cast(value) : nullptr; + } + const tflite::Int32VectorT *AsInt32Vector() const { + return type == SparseIndexVector_Int32Vector ? + reinterpret_cast(value) : nullptr; + } + tflite::Uint16VectorT *AsUint16Vector() { + return type == SparseIndexVector_Uint16Vector ? + reinterpret_cast(value) : nullptr; + } + const tflite::Uint16VectorT *AsUint16Vector() const { + return type == SparseIndexVector_Uint16Vector ? + reinterpret_cast(value) : nullptr; + } + tflite::Uint8VectorT *AsUint8Vector() { + return type == SparseIndexVector_Uint8Vector ? + reinterpret_cast(value) : nullptr; + } + const tflite::Uint8VectorT *AsUint8Vector() const { + return type == SparseIndexVector_Uint8Vector ? + reinterpret_cast(value) : nullptr; + } +}; + +bool VerifySparseIndexVector(flatbuffers::Verifier &verifier, const void *obj, SparseIndexVector type); +bool VerifySparseIndexVectorVector(flatbuffers::Verifier &verifier, const flatbuffers::Vector> *values, const flatbuffers::Vector *types); + +enum BuiltinOperator : int32_t { + BuiltinOperator_ADD = 0, + BuiltinOperator_AVERAGE_POOL_2D = 1, + BuiltinOperator_CONCATENATION = 2, + BuiltinOperator_CONV_2D = 3, + BuiltinOperator_DEPTHWISE_CONV_2D = 4, + BuiltinOperator_DEPTH_TO_SPACE = 5, + BuiltinOperator_DEQUANTIZE = 6, + BuiltinOperator_EMBEDDING_LOOKUP = 7, + BuiltinOperator_FLOOR = 8, + BuiltinOperator_FULLY_CONNECTED = 9, + BuiltinOperator_HASHTABLE_LOOKUP = 10, + BuiltinOperator_L2_NORMALIZATION = 11, + BuiltinOperator_L2_POOL_2D = 12, + BuiltinOperator_LOCAL_RESPONSE_NORMALIZATION = 13, + BuiltinOperator_LOGISTIC = 14, + BuiltinOperator_LSH_PROJECTION = 15, + BuiltinOperator_LSTM = 16, + BuiltinOperator_MAX_POOL_2D = 17, + BuiltinOperator_MUL = 18, + BuiltinOperator_RELU = 19, + BuiltinOperator_RELU_N1_TO_1 = 20, + BuiltinOperator_RELU6 = 21, + BuiltinOperator_RESHAPE = 22, + BuiltinOperator_RESIZE_BILINEAR = 23, + BuiltinOperator_RNN = 24, + BuiltinOperator_SOFTMAX = 25, + BuiltinOperator_SPACE_TO_DEPTH = 26, + BuiltinOperator_SVDF = 27, + BuiltinOperator_TANH = 28, + BuiltinOperator_CONCAT_EMBEDDINGS = 29, + BuiltinOperator_SKIP_GRAM = 30, + BuiltinOperator_CALL = 31, + BuiltinOperator_CUSTOM = 32, + BuiltinOperator_EMBEDDING_LOOKUP_SPARSE = 33, + BuiltinOperator_PAD = 34, + BuiltinOperator_UNIDIRECTIONAL_SEQUENCE_RNN = 35, + BuiltinOperator_GATHER = 36, + BuiltinOperator_BATCH_TO_SPACE_ND = 37, + BuiltinOperator_SPACE_TO_BATCH_ND = 38, + BuiltinOperator_TRANSPOSE = 39, + BuiltinOperator_MEAN = 40, + BuiltinOperator_SUB = 41, + BuiltinOperator_DIV = 42, + BuiltinOperator_SQUEEZE = 43, + BuiltinOperator_UNIDIRECTIONAL_SEQUENCE_LSTM = 44, + BuiltinOperator_STRIDED_SLICE = 45, + BuiltinOperator_BIDIRECTIONAL_SEQUENCE_RNN = 46, + BuiltinOperator_EXP = 47, + BuiltinOperator_TOPK_V2 = 48, + BuiltinOperator_SPLIT = 49, + BuiltinOperator_LOG_SOFTMAX = 50, + BuiltinOperator_DELEGATE = 51, + BuiltinOperator_BIDIRECTIONAL_SEQUENCE_LSTM = 52, + BuiltinOperator_CAST = 53, + BuiltinOperator_PRELU = 54, + BuiltinOperator_MAXIMUM = 55, + BuiltinOperator_ARG_MAX = 56, + BuiltinOperator_MINIMUM = 57, + BuiltinOperator_LESS = 58, + BuiltinOperator_NEG = 59, + BuiltinOperator_PADV2 = 60, + BuiltinOperator_GREATER = 61, + BuiltinOperator_GREATER_EQUAL = 62, + BuiltinOperator_LESS_EQUAL = 63, + BuiltinOperator_SELECT = 64, + BuiltinOperator_SLICE = 65, + BuiltinOperator_SIN = 66, + BuiltinOperator_TRANSPOSE_CONV = 67, + BuiltinOperator_SPARSE_TO_DENSE = 68, + BuiltinOperator_TILE = 69, + BuiltinOperator_EXPAND_DIMS = 70, + BuiltinOperator_EQUAL = 71, + BuiltinOperator_NOT_EQUAL = 72, + BuiltinOperator_LOG = 73, + BuiltinOperator_SUM = 74, + BuiltinOperator_SQRT = 75, + BuiltinOperator_RSQRT = 76, + BuiltinOperator_SHAPE = 77, + BuiltinOperator_POW = 78, + BuiltinOperator_ARG_MIN = 79, + BuiltinOperator_FAKE_QUANT = 80, + BuiltinOperator_REDUCE_PROD = 81, + BuiltinOperator_REDUCE_MAX = 82, + BuiltinOperator_PACK = 83, + BuiltinOperator_LOGICAL_OR = 84, + BuiltinOperator_ONE_HOT = 85, + BuiltinOperator_LOGICAL_AND = 86, + BuiltinOperator_LOGICAL_NOT = 87, + BuiltinOperator_UNPACK = 88, + BuiltinOperator_REDUCE_MIN = 89, + BuiltinOperator_FLOOR_DIV = 90, + BuiltinOperator_REDUCE_ANY = 91, + BuiltinOperator_SQUARE = 92, + BuiltinOperator_ZEROS_LIKE = 93, + BuiltinOperator_FILL = 94, + BuiltinOperator_FLOOR_MOD = 95, + BuiltinOperator_RANGE = 96, + BuiltinOperator_RESIZE_NEAREST_NEIGHBOR = 97, + BuiltinOperator_LEAKY_RELU = 98, + BuiltinOperator_SQUARED_DIFFERENCE = 99, + BuiltinOperator_MIRROR_PAD = 100, + BuiltinOperator_ABS = 101, + BuiltinOperator_SPLIT_V = 102, + BuiltinOperator_UNIQUE = 103, + BuiltinOperator_CEIL = 104, + BuiltinOperator_REVERSE_V2 = 105, + BuiltinOperator_ADD_N = 106, + BuiltinOperator_GATHER_ND = 107, + BuiltinOperator_COS = 108, + BuiltinOperator_WHERE = 109, + BuiltinOperator_RANK = 110, + BuiltinOperator_ELU = 111, + BuiltinOperator_REVERSE_SEQUENCE = 112, + BuiltinOperator_MATRIX_DIAG = 113, + BuiltinOperator_QUANTIZE = 114, + BuiltinOperator_MATRIX_SET_DIAG = 115, + BuiltinOperator_ROUND = 116, + BuiltinOperator_HARD_SWISH = 117, + BuiltinOperator_IF = 118, + BuiltinOperator_WHILE = 119, + BuiltinOperator_NON_MAX_SUPPRESSION_V4 = 120, + BuiltinOperator_NON_MAX_SUPPRESSION_V5 = 121, + BuiltinOperator_SCATTER_ND = 122, + BuiltinOperator_SELECT_V2 = 123, + BuiltinOperator_DENSIFY = 124, + BuiltinOperator_SEGMENT_SUM = 125, + BuiltinOperator_BATCH_MATMUL = 126, + BuiltinOperator_PLACEHOLDER_FOR_GREATER_OP_CODES = 127, + BuiltinOperator_CUMSUM = 128, + BuiltinOperator_CALL_ONCE = 129, + BuiltinOperator_BROADCAST_TO = 130, + BuiltinOperator_RFFT2D = 131, + BuiltinOperator_CONV_3D = 132, + BuiltinOperator_IMAG = 133, + BuiltinOperator_REAL = 134, + BuiltinOperator_COMPLEX_ABS = 135, + BuiltinOperator_HASHTABLE = 136, + BuiltinOperator_HASHTABLE_FIND = 137, + BuiltinOperator_HASHTABLE_IMPORT = 138, + BuiltinOperator_HASHTABLE_SIZE = 139, + BuiltinOperator_REDUCE_ALL = 140, + BuiltinOperator_CONV_3D_TRANSPOSE = 141, + BuiltinOperator_VAR_HANDLE = 142, + BuiltinOperator_READ_VARIABLE = 143, + BuiltinOperator_ASSIGN_VARIABLE = 144, + BuiltinOperator_BROADCAST_ARGS = 145, + BuiltinOperator_RANDOM_STANDARD_NORMAL = 146, + BuiltinOperator_BUCKETIZE = 147, + BuiltinOperator_RANDOM_UNIFORM = 148, + BuiltinOperator_MULTINOMIAL = 149, + BuiltinOperator_GELU = 150, + BuiltinOperator_DYNAMIC_UPDATE_SLICE = 151, + BuiltinOperator_RELU_0_TO_1 = 152, + BuiltinOperator_UNSORTED_SEGMENT_PROD = 153, + BuiltinOperator_UNSORTED_SEGMENT_MAX = 154, + BuiltinOperator_UNSORTED_SEGMENT_SUM = 155, + BuiltinOperator_ATAN2 = 156, + BuiltinOperator_UNSORTED_SEGMENT_MIN = 157, + BuiltinOperator_SIGN = 158, + BuiltinOperator_BITCAST = 159, + BuiltinOperator_BITWISE_XOR = 160, + BuiltinOperator_RIGHT_SHIFT = 161, + BuiltinOperator_MIN = BuiltinOperator_ADD, + BuiltinOperator_MAX = BuiltinOperator_RIGHT_SHIFT +}; + +inline const BuiltinOperator (&EnumValuesBuiltinOperator())[162] { + static const BuiltinOperator values[] = { + BuiltinOperator_ADD, + BuiltinOperator_AVERAGE_POOL_2D, + BuiltinOperator_CONCATENATION, + BuiltinOperator_CONV_2D, + BuiltinOperator_DEPTHWISE_CONV_2D, + BuiltinOperator_DEPTH_TO_SPACE, + BuiltinOperator_DEQUANTIZE, + BuiltinOperator_EMBEDDING_LOOKUP, + BuiltinOperator_FLOOR, + BuiltinOperator_FULLY_CONNECTED, + BuiltinOperator_HASHTABLE_LOOKUP, + BuiltinOperator_L2_NORMALIZATION, + BuiltinOperator_L2_POOL_2D, + BuiltinOperator_LOCAL_RESPONSE_NORMALIZATION, + BuiltinOperator_LOGISTIC, + BuiltinOperator_LSH_PROJECTION, + BuiltinOperator_LSTM, + BuiltinOperator_MAX_POOL_2D, + BuiltinOperator_MUL, + BuiltinOperator_RELU, + BuiltinOperator_RELU_N1_TO_1, + BuiltinOperator_RELU6, + BuiltinOperator_RESHAPE, + BuiltinOperator_RESIZE_BILINEAR, + BuiltinOperator_RNN, + BuiltinOperator_SOFTMAX, + BuiltinOperator_SPACE_TO_DEPTH, + BuiltinOperator_SVDF, + BuiltinOperator_TANH, + BuiltinOperator_CONCAT_EMBEDDINGS, + BuiltinOperator_SKIP_GRAM, + BuiltinOperator_CALL, + BuiltinOperator_CUSTOM, + BuiltinOperator_EMBEDDING_LOOKUP_SPARSE, + BuiltinOperator_PAD, + BuiltinOperator_UNIDIRECTIONAL_SEQUENCE_RNN, + BuiltinOperator_GATHER, + BuiltinOperator_BATCH_TO_SPACE_ND, + BuiltinOperator_SPACE_TO_BATCH_ND, + BuiltinOperator_TRANSPOSE, + BuiltinOperator_MEAN, + BuiltinOperator_SUB, + BuiltinOperator_DIV, + BuiltinOperator_SQUEEZE, + BuiltinOperator_UNIDIRECTIONAL_SEQUENCE_LSTM, + BuiltinOperator_STRIDED_SLICE, + BuiltinOperator_BIDIRECTIONAL_SEQUENCE_RNN, + BuiltinOperator_EXP, + BuiltinOperator_TOPK_V2, + BuiltinOperator_SPLIT, + BuiltinOperator_LOG_SOFTMAX, + BuiltinOperator_DELEGATE, + BuiltinOperator_BIDIRECTIONAL_SEQUENCE_LSTM, + BuiltinOperator_CAST, + BuiltinOperator_PRELU, + BuiltinOperator_MAXIMUM, + BuiltinOperator_ARG_MAX, + BuiltinOperator_MINIMUM, + BuiltinOperator_LESS, + BuiltinOperator_NEG, + BuiltinOperator_PADV2, + BuiltinOperator_GREATER, + BuiltinOperator_GREATER_EQUAL, + BuiltinOperator_LESS_EQUAL, + BuiltinOperator_SELECT, + BuiltinOperator_SLICE, + BuiltinOperator_SIN, + BuiltinOperator_TRANSPOSE_CONV, + BuiltinOperator_SPARSE_TO_DENSE, + BuiltinOperator_TILE, + BuiltinOperator_EXPAND_DIMS, + BuiltinOperator_EQUAL, + BuiltinOperator_NOT_EQUAL, + BuiltinOperator_LOG, + BuiltinOperator_SUM, + BuiltinOperator_SQRT, + BuiltinOperator_RSQRT, + BuiltinOperator_SHAPE, + BuiltinOperator_POW, + BuiltinOperator_ARG_MIN, + BuiltinOperator_FAKE_QUANT, + BuiltinOperator_REDUCE_PROD, + BuiltinOperator_REDUCE_MAX, + BuiltinOperator_PACK, + BuiltinOperator_LOGICAL_OR, + BuiltinOperator_ONE_HOT, + BuiltinOperator_LOGICAL_AND, + BuiltinOperator_LOGICAL_NOT, + BuiltinOperator_UNPACK, + BuiltinOperator_REDUCE_MIN, + BuiltinOperator_FLOOR_DIV, + BuiltinOperator_REDUCE_ANY, + BuiltinOperator_SQUARE, + BuiltinOperator_ZEROS_LIKE, + BuiltinOperator_FILL, + BuiltinOperator_FLOOR_MOD, + BuiltinOperator_RANGE, + BuiltinOperator_RESIZE_NEAREST_NEIGHBOR, + BuiltinOperator_LEAKY_RELU, + BuiltinOperator_SQUARED_DIFFERENCE, + BuiltinOperator_MIRROR_PAD, + BuiltinOperator_ABS, + BuiltinOperator_SPLIT_V, + BuiltinOperator_UNIQUE, + BuiltinOperator_CEIL, + BuiltinOperator_REVERSE_V2, + BuiltinOperator_ADD_N, + BuiltinOperator_GATHER_ND, + BuiltinOperator_COS, + BuiltinOperator_WHERE, + BuiltinOperator_RANK, + BuiltinOperator_ELU, + BuiltinOperator_REVERSE_SEQUENCE, + BuiltinOperator_MATRIX_DIAG, + BuiltinOperator_QUANTIZE, + BuiltinOperator_MATRIX_SET_DIAG, + BuiltinOperator_ROUND, + BuiltinOperator_HARD_SWISH, + BuiltinOperator_IF, + BuiltinOperator_WHILE, + BuiltinOperator_NON_MAX_SUPPRESSION_V4, + BuiltinOperator_NON_MAX_SUPPRESSION_V5, + BuiltinOperator_SCATTER_ND, + BuiltinOperator_SELECT_V2, + BuiltinOperator_DENSIFY, + BuiltinOperator_SEGMENT_SUM, + BuiltinOperator_BATCH_MATMUL, + BuiltinOperator_PLACEHOLDER_FOR_GREATER_OP_CODES, + BuiltinOperator_CUMSUM, + BuiltinOperator_CALL_ONCE, + BuiltinOperator_BROADCAST_TO, + BuiltinOperator_RFFT2D, + BuiltinOperator_CONV_3D, + BuiltinOperator_IMAG, + BuiltinOperator_REAL, + BuiltinOperator_COMPLEX_ABS, + BuiltinOperator_HASHTABLE, + BuiltinOperator_HASHTABLE_FIND, + BuiltinOperator_HASHTABLE_IMPORT, + BuiltinOperator_HASHTABLE_SIZE, + BuiltinOperator_REDUCE_ALL, + BuiltinOperator_CONV_3D_TRANSPOSE, + BuiltinOperator_VAR_HANDLE, + BuiltinOperator_READ_VARIABLE, + BuiltinOperator_ASSIGN_VARIABLE, + BuiltinOperator_BROADCAST_ARGS, + BuiltinOperator_RANDOM_STANDARD_NORMAL, + BuiltinOperator_BUCKETIZE, + BuiltinOperator_RANDOM_UNIFORM, + BuiltinOperator_MULTINOMIAL, + BuiltinOperator_GELU, + BuiltinOperator_DYNAMIC_UPDATE_SLICE, + BuiltinOperator_RELU_0_TO_1, + BuiltinOperator_UNSORTED_SEGMENT_PROD, + BuiltinOperator_UNSORTED_SEGMENT_MAX, + BuiltinOperator_UNSORTED_SEGMENT_SUM, + BuiltinOperator_ATAN2, + BuiltinOperator_UNSORTED_SEGMENT_MIN, + BuiltinOperator_SIGN, + BuiltinOperator_BITCAST, + BuiltinOperator_BITWISE_XOR, + BuiltinOperator_RIGHT_SHIFT + }; + return values; +} + +inline const char * const *EnumNamesBuiltinOperator() { + static const char * const names[163] = { + "ADD", + "AVERAGE_POOL_2D", + "CONCATENATION", + "CONV_2D", + "DEPTHWISE_CONV_2D", + "DEPTH_TO_SPACE", + "DEQUANTIZE", + "EMBEDDING_LOOKUP", + "FLOOR", + "FULLY_CONNECTED", + "HASHTABLE_LOOKUP", + "L2_NORMALIZATION", + "L2_POOL_2D", + "LOCAL_RESPONSE_NORMALIZATION", + "LOGISTIC", + "LSH_PROJECTION", + "LSTM", + "MAX_POOL_2D", + "MUL", + "RELU", + "RELU_N1_TO_1", + "RELU6", + "RESHAPE", + "RESIZE_BILINEAR", + "RNN", + "SOFTMAX", + "SPACE_TO_DEPTH", + "SVDF", + "TANH", + "CONCAT_EMBEDDINGS", + "SKIP_GRAM", + "CALL", + "CUSTOM", + "EMBEDDING_LOOKUP_SPARSE", + "PAD", + "UNIDIRECTIONAL_SEQUENCE_RNN", + "GATHER", + "BATCH_TO_SPACE_ND", + "SPACE_TO_BATCH_ND", + "TRANSPOSE", + "MEAN", + "SUB", + "DIV", + "SQUEEZE", + "UNIDIRECTIONAL_SEQUENCE_LSTM", + "STRIDED_SLICE", + "BIDIRECTIONAL_SEQUENCE_RNN", + "EXP", + "TOPK_V2", + "SPLIT", + "LOG_SOFTMAX", + "DELEGATE", + "BIDIRECTIONAL_SEQUENCE_LSTM", + "CAST", + "PRELU", + "MAXIMUM", + "ARG_MAX", + "MINIMUM", + "LESS", + "NEG", + "PADV2", + "GREATER", + "GREATER_EQUAL", + "LESS_EQUAL", + "SELECT", + "SLICE", + "SIN", + "TRANSPOSE_CONV", + "SPARSE_TO_DENSE", + "TILE", + "EXPAND_DIMS", + "EQUAL", + "NOT_EQUAL", + "LOG", + "SUM", + "SQRT", + "RSQRT", + "SHAPE", + "POW", + "ARG_MIN", + "FAKE_QUANT", + "REDUCE_PROD", + "REDUCE_MAX", + "PACK", + "LOGICAL_OR", + "ONE_HOT", + "LOGICAL_AND", + "LOGICAL_NOT", + "UNPACK", + "REDUCE_MIN", + "FLOOR_DIV", + "REDUCE_ANY", + "SQUARE", + "ZEROS_LIKE", + "FILL", + "FLOOR_MOD", + "RANGE", + "RESIZE_NEAREST_NEIGHBOR", + "LEAKY_RELU", + "SQUARED_DIFFERENCE", + "MIRROR_PAD", + "ABS", + "SPLIT_V", + "UNIQUE", + "CEIL", + "REVERSE_V2", + "ADD_N", + "GATHER_ND", + "COS", + "WHERE", + "RANK", + "ELU", + "REVERSE_SEQUENCE", + "MATRIX_DIAG", + "QUANTIZE", + "MATRIX_SET_DIAG", + "ROUND", + "HARD_SWISH", + "IF", + "WHILE", + "NON_MAX_SUPPRESSION_V4", + "NON_MAX_SUPPRESSION_V5", + "SCATTER_ND", + "SELECT_V2", + "DENSIFY", + "SEGMENT_SUM", + "BATCH_MATMUL", + "PLACEHOLDER_FOR_GREATER_OP_CODES", + "CUMSUM", + "CALL_ONCE", + "BROADCAST_TO", + "RFFT2D", + "CONV_3D", + "IMAG", + "REAL", + "COMPLEX_ABS", + "HASHTABLE", + "HASHTABLE_FIND", + "HASHTABLE_IMPORT", + "HASHTABLE_SIZE", + "REDUCE_ALL", + "CONV_3D_TRANSPOSE", + "VAR_HANDLE", + "READ_VARIABLE", + "ASSIGN_VARIABLE", + "BROADCAST_ARGS", + "RANDOM_STANDARD_NORMAL", + "BUCKETIZE", + "RANDOM_UNIFORM", + "MULTINOMIAL", + "GELU", + "DYNAMIC_UPDATE_SLICE", + "RELU_0_TO_1", + "UNSORTED_SEGMENT_PROD", + "UNSORTED_SEGMENT_MAX", + "UNSORTED_SEGMENT_SUM", + "ATAN2", + "UNSORTED_SEGMENT_MIN", + "SIGN", + "BITCAST", + "BITWISE_XOR", + "RIGHT_SHIFT", + nullptr + }; + return names; +} + +inline const char *EnumNameBuiltinOperator(BuiltinOperator e) { + if (flatbuffers::IsOutRange(e, BuiltinOperator_ADD, BuiltinOperator_RIGHT_SHIFT)) return ""; + const size_t index = static_cast(e); + return EnumNamesBuiltinOperator()[index]; +} + +enum BuiltinOptions : uint8_t { + BuiltinOptions_NONE = 0, + BuiltinOptions_Conv2DOptions = 1, + BuiltinOptions_DepthwiseConv2DOptions = 2, + BuiltinOptions_ConcatEmbeddingsOptions = 3, + BuiltinOptions_LSHProjectionOptions = 4, + BuiltinOptions_Pool2DOptions = 5, + BuiltinOptions_SVDFOptions = 6, + BuiltinOptions_RNNOptions = 7, + BuiltinOptions_FullyConnectedOptions = 8, + BuiltinOptions_SoftmaxOptions = 9, + BuiltinOptions_ConcatenationOptions = 10, + BuiltinOptions_AddOptions = 11, + BuiltinOptions_L2NormOptions = 12, + BuiltinOptions_LocalResponseNormalizationOptions = 13, + BuiltinOptions_LSTMOptions = 14, + BuiltinOptions_ResizeBilinearOptions = 15, + BuiltinOptions_CallOptions = 16, + BuiltinOptions_ReshapeOptions = 17, + BuiltinOptions_SkipGramOptions = 18, + BuiltinOptions_SpaceToDepthOptions = 19, + BuiltinOptions_EmbeddingLookupSparseOptions = 20, + BuiltinOptions_MulOptions = 21, + BuiltinOptions_PadOptions = 22, + BuiltinOptions_GatherOptions = 23, + BuiltinOptions_BatchToSpaceNDOptions = 24, + BuiltinOptions_SpaceToBatchNDOptions = 25, + BuiltinOptions_TransposeOptions = 26, + BuiltinOptions_ReducerOptions = 27, + BuiltinOptions_SubOptions = 28, + BuiltinOptions_DivOptions = 29, + BuiltinOptions_SqueezeOptions = 30, + BuiltinOptions_SequenceRNNOptions = 31, + BuiltinOptions_StridedSliceOptions = 32, + BuiltinOptions_ExpOptions = 33, + BuiltinOptions_TopKV2Options = 34, + BuiltinOptions_SplitOptions = 35, + BuiltinOptions_LogSoftmaxOptions = 36, + BuiltinOptions_CastOptions = 37, + BuiltinOptions_DequantizeOptions = 38, + BuiltinOptions_MaximumMinimumOptions = 39, + BuiltinOptions_ArgMaxOptions = 40, + BuiltinOptions_LessOptions = 41, + BuiltinOptions_NegOptions = 42, + BuiltinOptions_PadV2Options = 43, + BuiltinOptions_GreaterOptions = 44, + BuiltinOptions_GreaterEqualOptions = 45, + BuiltinOptions_LessEqualOptions = 46, + BuiltinOptions_SelectOptions = 47, + BuiltinOptions_SliceOptions = 48, + BuiltinOptions_TransposeConvOptions = 49, + BuiltinOptions_SparseToDenseOptions = 50, + BuiltinOptions_TileOptions = 51, + BuiltinOptions_ExpandDimsOptions = 52, + BuiltinOptions_EqualOptions = 53, + BuiltinOptions_NotEqualOptions = 54, + BuiltinOptions_ShapeOptions = 55, + BuiltinOptions_PowOptions = 56, + BuiltinOptions_ArgMinOptions = 57, + BuiltinOptions_FakeQuantOptions = 58, + BuiltinOptions_PackOptions = 59, + BuiltinOptions_LogicalOrOptions = 60, + BuiltinOptions_OneHotOptions = 61, + BuiltinOptions_LogicalAndOptions = 62, + BuiltinOptions_LogicalNotOptions = 63, + BuiltinOptions_UnpackOptions = 64, + BuiltinOptions_FloorDivOptions = 65, + BuiltinOptions_SquareOptions = 66, + BuiltinOptions_ZerosLikeOptions = 67, + BuiltinOptions_FillOptions = 68, + BuiltinOptions_BidirectionalSequenceLSTMOptions = 69, + BuiltinOptions_BidirectionalSequenceRNNOptions = 70, + BuiltinOptions_UnidirectionalSequenceLSTMOptions = 71, + BuiltinOptions_FloorModOptions = 72, + BuiltinOptions_RangeOptions = 73, + BuiltinOptions_ResizeNearestNeighborOptions = 74, + BuiltinOptions_LeakyReluOptions = 75, + BuiltinOptions_SquaredDifferenceOptions = 76, + BuiltinOptions_MirrorPadOptions = 77, + BuiltinOptions_AbsOptions = 78, + BuiltinOptions_SplitVOptions = 79, + BuiltinOptions_UniqueOptions = 80, + BuiltinOptions_ReverseV2Options = 81, + BuiltinOptions_AddNOptions = 82, + BuiltinOptions_GatherNdOptions = 83, + BuiltinOptions_CosOptions = 84, + BuiltinOptions_WhereOptions = 85, + BuiltinOptions_RankOptions = 86, + BuiltinOptions_ReverseSequenceOptions = 87, + BuiltinOptions_MatrixDiagOptions = 88, + BuiltinOptions_QuantizeOptions = 89, + BuiltinOptions_MatrixSetDiagOptions = 90, + BuiltinOptions_HardSwishOptions = 91, + BuiltinOptions_IfOptions = 92, + BuiltinOptions_WhileOptions = 93, + BuiltinOptions_DepthToSpaceOptions = 94, + BuiltinOptions_NonMaxSuppressionV4Options = 95, + BuiltinOptions_NonMaxSuppressionV5Options = 96, + BuiltinOptions_ScatterNdOptions = 97, + BuiltinOptions_SelectV2Options = 98, + BuiltinOptions_DensifyOptions = 99, + BuiltinOptions_SegmentSumOptions = 100, + BuiltinOptions_BatchMatMulOptions = 101, + BuiltinOptions_CumsumOptions = 102, + BuiltinOptions_CallOnceOptions = 103, + BuiltinOptions_BroadcastToOptions = 104, + BuiltinOptions_Rfft2dOptions = 105, + BuiltinOptions_Conv3DOptions = 106, + BuiltinOptions_HashtableOptions = 107, + BuiltinOptions_HashtableFindOptions = 108, + BuiltinOptions_HashtableImportOptions = 109, + BuiltinOptions_HashtableSizeOptions = 110, + BuiltinOptions_VarHandleOptions = 111, + BuiltinOptions_ReadVariableOptions = 112, + BuiltinOptions_AssignVariableOptions = 113, + BuiltinOptions_RandomOptions = 114, + BuiltinOptions_BucketizeOptions = 115, + BuiltinOptions_GeluOptions = 116, + BuiltinOptions_DynamicUpdateSliceOptions = 117, + BuiltinOptions_UnsortedSegmentProdOptions = 118, + BuiltinOptions_UnsortedSegmentMaxOptions = 119, + BuiltinOptions_UnsortedSegmentMinOptions = 120, + BuiltinOptions_UnsortedSegmentSumOptions = 121, + BuiltinOptions_ATan2Options = 122, + BuiltinOptions_SignOptions = 123, + BuiltinOptions_BitcastOptions = 124, + BuiltinOptions_BitwiseXorOptions = 125, + BuiltinOptions_RightShiftOptions = 126, + BuiltinOptions_MIN = BuiltinOptions_NONE, + BuiltinOptions_MAX = BuiltinOptions_RightShiftOptions +}; + +inline const BuiltinOptions (&EnumValuesBuiltinOptions())[127] { + static const BuiltinOptions values[] = { + BuiltinOptions_NONE, + BuiltinOptions_Conv2DOptions, + BuiltinOptions_DepthwiseConv2DOptions, + BuiltinOptions_ConcatEmbeddingsOptions, + BuiltinOptions_LSHProjectionOptions, + BuiltinOptions_Pool2DOptions, + BuiltinOptions_SVDFOptions, + BuiltinOptions_RNNOptions, + BuiltinOptions_FullyConnectedOptions, + BuiltinOptions_SoftmaxOptions, + BuiltinOptions_ConcatenationOptions, + BuiltinOptions_AddOptions, + BuiltinOptions_L2NormOptions, + BuiltinOptions_LocalResponseNormalizationOptions, + BuiltinOptions_LSTMOptions, + BuiltinOptions_ResizeBilinearOptions, + BuiltinOptions_CallOptions, + BuiltinOptions_ReshapeOptions, + BuiltinOptions_SkipGramOptions, + BuiltinOptions_SpaceToDepthOptions, + BuiltinOptions_EmbeddingLookupSparseOptions, + BuiltinOptions_MulOptions, + BuiltinOptions_PadOptions, + BuiltinOptions_GatherOptions, + BuiltinOptions_BatchToSpaceNDOptions, + BuiltinOptions_SpaceToBatchNDOptions, + BuiltinOptions_TransposeOptions, + BuiltinOptions_ReducerOptions, + BuiltinOptions_SubOptions, + BuiltinOptions_DivOptions, + BuiltinOptions_SqueezeOptions, + BuiltinOptions_SequenceRNNOptions, + BuiltinOptions_StridedSliceOptions, + BuiltinOptions_ExpOptions, + BuiltinOptions_TopKV2Options, + BuiltinOptions_SplitOptions, + BuiltinOptions_LogSoftmaxOptions, + BuiltinOptions_CastOptions, + BuiltinOptions_DequantizeOptions, + BuiltinOptions_MaximumMinimumOptions, + BuiltinOptions_ArgMaxOptions, + BuiltinOptions_LessOptions, + BuiltinOptions_NegOptions, + BuiltinOptions_PadV2Options, + BuiltinOptions_GreaterOptions, + BuiltinOptions_GreaterEqualOptions, + BuiltinOptions_LessEqualOptions, + BuiltinOptions_SelectOptions, + BuiltinOptions_SliceOptions, + BuiltinOptions_TransposeConvOptions, + BuiltinOptions_SparseToDenseOptions, + BuiltinOptions_TileOptions, + BuiltinOptions_ExpandDimsOptions, + BuiltinOptions_EqualOptions, + BuiltinOptions_NotEqualOptions, + BuiltinOptions_ShapeOptions, + BuiltinOptions_PowOptions, + BuiltinOptions_ArgMinOptions, + BuiltinOptions_FakeQuantOptions, + BuiltinOptions_PackOptions, + BuiltinOptions_LogicalOrOptions, + BuiltinOptions_OneHotOptions, + BuiltinOptions_LogicalAndOptions, + BuiltinOptions_LogicalNotOptions, + BuiltinOptions_UnpackOptions, + BuiltinOptions_FloorDivOptions, + BuiltinOptions_SquareOptions, + BuiltinOptions_ZerosLikeOptions, + BuiltinOptions_FillOptions, + BuiltinOptions_BidirectionalSequenceLSTMOptions, + BuiltinOptions_BidirectionalSequenceRNNOptions, + BuiltinOptions_UnidirectionalSequenceLSTMOptions, + BuiltinOptions_FloorModOptions, + BuiltinOptions_RangeOptions, + BuiltinOptions_ResizeNearestNeighborOptions, + BuiltinOptions_LeakyReluOptions, + BuiltinOptions_SquaredDifferenceOptions, + BuiltinOptions_MirrorPadOptions, + BuiltinOptions_AbsOptions, + BuiltinOptions_SplitVOptions, + BuiltinOptions_UniqueOptions, + BuiltinOptions_ReverseV2Options, + BuiltinOptions_AddNOptions, + BuiltinOptions_GatherNdOptions, + BuiltinOptions_CosOptions, + BuiltinOptions_WhereOptions, + BuiltinOptions_RankOptions, + BuiltinOptions_ReverseSequenceOptions, + BuiltinOptions_MatrixDiagOptions, + BuiltinOptions_QuantizeOptions, + BuiltinOptions_MatrixSetDiagOptions, + BuiltinOptions_HardSwishOptions, + BuiltinOptions_IfOptions, + BuiltinOptions_WhileOptions, + BuiltinOptions_DepthToSpaceOptions, + BuiltinOptions_NonMaxSuppressionV4Options, + BuiltinOptions_NonMaxSuppressionV5Options, + BuiltinOptions_ScatterNdOptions, + BuiltinOptions_SelectV2Options, + BuiltinOptions_DensifyOptions, + BuiltinOptions_SegmentSumOptions, + BuiltinOptions_BatchMatMulOptions, + BuiltinOptions_CumsumOptions, + BuiltinOptions_CallOnceOptions, + BuiltinOptions_BroadcastToOptions, + BuiltinOptions_Rfft2dOptions, + BuiltinOptions_Conv3DOptions, + BuiltinOptions_HashtableOptions, + BuiltinOptions_HashtableFindOptions, + BuiltinOptions_HashtableImportOptions, + BuiltinOptions_HashtableSizeOptions, + BuiltinOptions_VarHandleOptions, + BuiltinOptions_ReadVariableOptions, + BuiltinOptions_AssignVariableOptions, + BuiltinOptions_RandomOptions, + BuiltinOptions_BucketizeOptions, + BuiltinOptions_GeluOptions, + BuiltinOptions_DynamicUpdateSliceOptions, + BuiltinOptions_UnsortedSegmentProdOptions, + BuiltinOptions_UnsortedSegmentMaxOptions, + BuiltinOptions_UnsortedSegmentMinOptions, + BuiltinOptions_UnsortedSegmentSumOptions, + BuiltinOptions_ATan2Options, + BuiltinOptions_SignOptions, + BuiltinOptions_BitcastOptions, + BuiltinOptions_BitwiseXorOptions, + BuiltinOptions_RightShiftOptions + }; + return values; +} + +inline const char * const *EnumNamesBuiltinOptions() { + static const char * const names[128] = { + "NONE", + "Conv2DOptions", + "DepthwiseConv2DOptions", + "ConcatEmbeddingsOptions", + "LSHProjectionOptions", + "Pool2DOptions", + "SVDFOptions", + "RNNOptions", + "FullyConnectedOptions", + "SoftmaxOptions", + "ConcatenationOptions", + "AddOptions", + "L2NormOptions", + "LocalResponseNormalizationOptions", + "LSTMOptions", + "ResizeBilinearOptions", + "CallOptions", + "ReshapeOptions", + "SkipGramOptions", + "SpaceToDepthOptions", + "EmbeddingLookupSparseOptions", + "MulOptions", + "PadOptions", + "GatherOptions", + "BatchToSpaceNDOptions", + "SpaceToBatchNDOptions", + "TransposeOptions", + "ReducerOptions", + "SubOptions", + "DivOptions", + "SqueezeOptions", + "SequenceRNNOptions", + "StridedSliceOptions", + "ExpOptions", + "TopKV2Options", + "SplitOptions", + "LogSoftmaxOptions", + "CastOptions", + "DequantizeOptions", + "MaximumMinimumOptions", + "ArgMaxOptions", + "LessOptions", + "NegOptions", + "PadV2Options", + "GreaterOptions", + "GreaterEqualOptions", + "LessEqualOptions", + "SelectOptions", + "SliceOptions", + "TransposeConvOptions", + "SparseToDenseOptions", + "TileOptions", + "ExpandDimsOptions", + "EqualOptions", + "NotEqualOptions", + "ShapeOptions", + "PowOptions", + "ArgMinOptions", + "FakeQuantOptions", + "PackOptions", + "LogicalOrOptions", + "OneHotOptions", + "LogicalAndOptions", + "LogicalNotOptions", + "UnpackOptions", + "FloorDivOptions", + "SquareOptions", + "ZerosLikeOptions", + "FillOptions", + "BidirectionalSequenceLSTMOptions", + "BidirectionalSequenceRNNOptions", + "UnidirectionalSequenceLSTMOptions", + "FloorModOptions", + "RangeOptions", + "ResizeNearestNeighborOptions", + "LeakyReluOptions", + "SquaredDifferenceOptions", + "MirrorPadOptions", + "AbsOptions", + "SplitVOptions", + "UniqueOptions", + "ReverseV2Options", + "AddNOptions", + "GatherNdOptions", + "CosOptions", + "WhereOptions", + "RankOptions", + "ReverseSequenceOptions", + "MatrixDiagOptions", + "QuantizeOptions", + "MatrixSetDiagOptions", + "HardSwishOptions", + "IfOptions", + "WhileOptions", + "DepthToSpaceOptions", + "NonMaxSuppressionV4Options", + "NonMaxSuppressionV5Options", + "ScatterNdOptions", + "SelectV2Options", + "DensifyOptions", + "SegmentSumOptions", + "BatchMatMulOptions", + "CumsumOptions", + "CallOnceOptions", + "BroadcastToOptions", + "Rfft2dOptions", + "Conv3DOptions", + "HashtableOptions", + "HashtableFindOptions", + "HashtableImportOptions", + "HashtableSizeOptions", + "VarHandleOptions", + "ReadVariableOptions", + "AssignVariableOptions", + "RandomOptions", + "BucketizeOptions", + "GeluOptions", + "DynamicUpdateSliceOptions", + "UnsortedSegmentProdOptions", + "UnsortedSegmentMaxOptions", + "UnsortedSegmentMinOptions", + "UnsortedSegmentSumOptions", + "ATan2Options", + "SignOptions", + "BitcastOptions", + "BitwiseXorOptions", + "RightShiftOptions", + nullptr + }; + return names; +} + +inline const char *EnumNameBuiltinOptions(BuiltinOptions e) { + if (flatbuffers::IsOutRange(e, BuiltinOptions_NONE, BuiltinOptions_RightShiftOptions)) return ""; + const size_t index = static_cast(e); + return EnumNamesBuiltinOptions()[index]; +} + +template struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_NONE; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_Conv2DOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_DepthwiseConv2DOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_ConcatEmbeddingsOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_LSHProjectionOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_Pool2DOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_SVDFOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_RNNOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_FullyConnectedOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_SoftmaxOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_ConcatenationOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_AddOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_L2NormOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_LocalResponseNormalizationOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_LSTMOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_ResizeBilinearOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_CallOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_ReshapeOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_SkipGramOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_SpaceToDepthOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_EmbeddingLookupSparseOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_MulOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_PadOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_GatherOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_BatchToSpaceNDOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_SpaceToBatchNDOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_TransposeOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_ReducerOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_SubOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_DivOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_SqueezeOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_SequenceRNNOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_StridedSliceOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_ExpOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_TopKV2Options; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_SplitOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_LogSoftmaxOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_CastOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_DequantizeOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_MaximumMinimumOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_ArgMaxOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_LessOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_NegOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_PadV2Options; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_GreaterOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_GreaterEqualOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_LessEqualOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_SelectOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_SliceOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_TransposeConvOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_SparseToDenseOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_TileOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_ExpandDimsOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_EqualOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_NotEqualOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_ShapeOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_PowOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_ArgMinOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_FakeQuantOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_PackOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_LogicalOrOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_OneHotOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_LogicalAndOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_LogicalNotOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_UnpackOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_FloorDivOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_SquareOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_ZerosLikeOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_FillOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_BidirectionalSequenceLSTMOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_BidirectionalSequenceRNNOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_UnidirectionalSequenceLSTMOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_FloorModOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_RangeOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_ResizeNearestNeighborOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_LeakyReluOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_SquaredDifferenceOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_MirrorPadOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_AbsOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_SplitVOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_UniqueOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_ReverseV2Options; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_AddNOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_GatherNdOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_CosOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_WhereOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_RankOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_ReverseSequenceOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_MatrixDiagOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_QuantizeOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_MatrixSetDiagOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_HardSwishOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_IfOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_WhileOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_DepthToSpaceOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_NonMaxSuppressionV4Options; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_NonMaxSuppressionV5Options; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_ScatterNdOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_SelectV2Options; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_DensifyOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_SegmentSumOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_BatchMatMulOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_CumsumOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_CallOnceOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_BroadcastToOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_Rfft2dOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_Conv3DOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_HashtableOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_HashtableFindOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_HashtableImportOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_HashtableSizeOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_VarHandleOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_ReadVariableOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_AssignVariableOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_RandomOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_BucketizeOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_GeluOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_DynamicUpdateSliceOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_UnsortedSegmentProdOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_UnsortedSegmentMaxOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_UnsortedSegmentMinOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_UnsortedSegmentSumOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_ATan2Options; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_SignOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_BitcastOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_BitwiseXorOptions; +}; + +template<> struct BuiltinOptionsTraits { + static const BuiltinOptions enum_value = BuiltinOptions_RightShiftOptions; +}; + +template struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_NONE; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_Conv2DOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_DepthwiseConv2DOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_ConcatEmbeddingsOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_LSHProjectionOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_Pool2DOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_SVDFOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_RNNOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_FullyConnectedOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_SoftmaxOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_ConcatenationOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_AddOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_L2NormOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_LocalResponseNormalizationOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_LSTMOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_ResizeBilinearOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_CallOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_ReshapeOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_SkipGramOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_SpaceToDepthOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_EmbeddingLookupSparseOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_MulOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_PadOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_GatherOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_BatchToSpaceNDOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_SpaceToBatchNDOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_TransposeOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_ReducerOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_SubOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_DivOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_SqueezeOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_SequenceRNNOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_StridedSliceOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_ExpOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_TopKV2Options; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_SplitOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_LogSoftmaxOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_CastOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_DequantizeOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_MaximumMinimumOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_ArgMaxOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_LessOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_NegOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_PadV2Options; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_GreaterOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_GreaterEqualOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_LessEqualOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_SelectOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_SliceOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_TransposeConvOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_SparseToDenseOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_TileOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_ExpandDimsOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_EqualOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_NotEqualOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_ShapeOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_PowOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_ArgMinOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_FakeQuantOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_PackOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_LogicalOrOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_OneHotOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_LogicalAndOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_LogicalNotOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_UnpackOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_FloorDivOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_SquareOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_ZerosLikeOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_FillOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_BidirectionalSequenceLSTMOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_BidirectionalSequenceRNNOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_UnidirectionalSequenceLSTMOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_FloorModOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_RangeOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_ResizeNearestNeighborOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_LeakyReluOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_SquaredDifferenceOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_MirrorPadOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_AbsOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_SplitVOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_UniqueOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_ReverseV2Options; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_AddNOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_GatherNdOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_CosOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_WhereOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_RankOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_ReverseSequenceOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_MatrixDiagOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_QuantizeOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_MatrixSetDiagOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_HardSwishOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_IfOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_WhileOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_DepthToSpaceOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_NonMaxSuppressionV4Options; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_NonMaxSuppressionV5Options; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_ScatterNdOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_SelectV2Options; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_DensifyOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_SegmentSumOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_BatchMatMulOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_CumsumOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_CallOnceOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_BroadcastToOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_Rfft2dOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_Conv3DOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_HashtableOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_HashtableFindOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_HashtableImportOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_HashtableSizeOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_VarHandleOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_ReadVariableOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_AssignVariableOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_RandomOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_BucketizeOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_GeluOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_DynamicUpdateSliceOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_UnsortedSegmentProdOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_UnsortedSegmentMaxOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_UnsortedSegmentMinOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_UnsortedSegmentSumOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_ATan2Options; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_SignOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_BitcastOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_BitwiseXorOptions; +}; + +template<> struct BuiltinOptionsUnionTraits { + static const BuiltinOptions enum_value = BuiltinOptions_RightShiftOptions; +}; + +struct BuiltinOptionsUnion { + BuiltinOptions type; + void *value; + + BuiltinOptionsUnion() : type(BuiltinOptions_NONE), value(nullptr) {} + BuiltinOptionsUnion(BuiltinOptionsUnion&& u) FLATBUFFERS_NOEXCEPT : + type(BuiltinOptions_NONE), value(nullptr) + { std::swap(type, u.type); std::swap(value, u.value); } + BuiltinOptionsUnion(const BuiltinOptionsUnion &); + BuiltinOptionsUnion &operator=(const BuiltinOptionsUnion &u) + { BuiltinOptionsUnion t(u); std::swap(type, t.type); std::swap(value, t.value); return *this; } + BuiltinOptionsUnion &operator=(BuiltinOptionsUnion &&u) FLATBUFFERS_NOEXCEPT + { std::swap(type, u.type); std::swap(value, u.value); return *this; } + ~BuiltinOptionsUnion() { Reset(); } + + void Reset(); + + template + void Set(T&& val) { + typedef typename std::remove_reference::type RT; + Reset(); + type = BuiltinOptionsUnionTraits::enum_value; + if (type != BuiltinOptions_NONE) { + value = new RT(std::forward(val)); + } + } + + static void *UnPack(const void *obj, BuiltinOptions type, const flatbuffers::resolver_function_t *resolver); + flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const flatbuffers::rehasher_function_t *_rehasher = nullptr) const; + + tflite::Conv2DOptionsT *AsConv2DOptions() { + return type == BuiltinOptions_Conv2DOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::Conv2DOptionsT *AsConv2DOptions() const { + return type == BuiltinOptions_Conv2DOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::DepthwiseConv2DOptionsT *AsDepthwiseConv2DOptions() { + return type == BuiltinOptions_DepthwiseConv2DOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::DepthwiseConv2DOptionsT *AsDepthwiseConv2DOptions() const { + return type == BuiltinOptions_DepthwiseConv2DOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::ConcatEmbeddingsOptionsT *AsConcatEmbeddingsOptions() { + return type == BuiltinOptions_ConcatEmbeddingsOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::ConcatEmbeddingsOptionsT *AsConcatEmbeddingsOptions() const { + return type == BuiltinOptions_ConcatEmbeddingsOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::LSHProjectionOptionsT *AsLSHProjectionOptions() { + return type == BuiltinOptions_LSHProjectionOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::LSHProjectionOptionsT *AsLSHProjectionOptions() const { + return type == BuiltinOptions_LSHProjectionOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::Pool2DOptionsT *AsPool2DOptions() { + return type == BuiltinOptions_Pool2DOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::Pool2DOptionsT *AsPool2DOptions() const { + return type == BuiltinOptions_Pool2DOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::SVDFOptionsT *AsSVDFOptions() { + return type == BuiltinOptions_SVDFOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::SVDFOptionsT *AsSVDFOptions() const { + return type == BuiltinOptions_SVDFOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::RNNOptionsT *AsRNNOptions() { + return type == BuiltinOptions_RNNOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::RNNOptionsT *AsRNNOptions() const { + return type == BuiltinOptions_RNNOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::FullyConnectedOptionsT *AsFullyConnectedOptions() { + return type == BuiltinOptions_FullyConnectedOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::FullyConnectedOptionsT *AsFullyConnectedOptions() const { + return type == BuiltinOptions_FullyConnectedOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::SoftmaxOptionsT *AsSoftmaxOptions() { + return type == BuiltinOptions_SoftmaxOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::SoftmaxOptionsT *AsSoftmaxOptions() const { + return type == BuiltinOptions_SoftmaxOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::ConcatenationOptionsT *AsConcatenationOptions() { + return type == BuiltinOptions_ConcatenationOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::ConcatenationOptionsT *AsConcatenationOptions() const { + return type == BuiltinOptions_ConcatenationOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::AddOptionsT *AsAddOptions() { + return type == BuiltinOptions_AddOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::AddOptionsT *AsAddOptions() const { + return type == BuiltinOptions_AddOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::L2NormOptionsT *AsL2NormOptions() { + return type == BuiltinOptions_L2NormOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::L2NormOptionsT *AsL2NormOptions() const { + return type == BuiltinOptions_L2NormOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::LocalResponseNormalizationOptionsT *AsLocalResponseNormalizationOptions() { + return type == BuiltinOptions_LocalResponseNormalizationOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::LocalResponseNormalizationOptionsT *AsLocalResponseNormalizationOptions() const { + return type == BuiltinOptions_LocalResponseNormalizationOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::LSTMOptionsT *AsLSTMOptions() { + return type == BuiltinOptions_LSTMOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::LSTMOptionsT *AsLSTMOptions() const { + return type == BuiltinOptions_LSTMOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::ResizeBilinearOptionsT *AsResizeBilinearOptions() { + return type == BuiltinOptions_ResizeBilinearOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::ResizeBilinearOptionsT *AsResizeBilinearOptions() const { + return type == BuiltinOptions_ResizeBilinearOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::CallOptionsT *AsCallOptions() { + return type == BuiltinOptions_CallOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::CallOptionsT *AsCallOptions() const { + return type == BuiltinOptions_CallOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::ReshapeOptionsT *AsReshapeOptions() { + return type == BuiltinOptions_ReshapeOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::ReshapeOptionsT *AsReshapeOptions() const { + return type == BuiltinOptions_ReshapeOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::SkipGramOptionsT *AsSkipGramOptions() { + return type == BuiltinOptions_SkipGramOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::SkipGramOptionsT *AsSkipGramOptions() const { + return type == BuiltinOptions_SkipGramOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::SpaceToDepthOptionsT *AsSpaceToDepthOptions() { + return type == BuiltinOptions_SpaceToDepthOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::SpaceToDepthOptionsT *AsSpaceToDepthOptions() const { + return type == BuiltinOptions_SpaceToDepthOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::EmbeddingLookupSparseOptionsT *AsEmbeddingLookupSparseOptions() { + return type == BuiltinOptions_EmbeddingLookupSparseOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::EmbeddingLookupSparseOptionsT *AsEmbeddingLookupSparseOptions() const { + return type == BuiltinOptions_EmbeddingLookupSparseOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::MulOptionsT *AsMulOptions() { + return type == BuiltinOptions_MulOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::MulOptionsT *AsMulOptions() const { + return type == BuiltinOptions_MulOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::PadOptionsT *AsPadOptions() { + return type == BuiltinOptions_PadOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::PadOptionsT *AsPadOptions() const { + return type == BuiltinOptions_PadOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::GatherOptionsT *AsGatherOptions() { + return type == BuiltinOptions_GatherOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::GatherOptionsT *AsGatherOptions() const { + return type == BuiltinOptions_GatherOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::BatchToSpaceNDOptionsT *AsBatchToSpaceNDOptions() { + return type == BuiltinOptions_BatchToSpaceNDOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::BatchToSpaceNDOptionsT *AsBatchToSpaceNDOptions() const { + return type == BuiltinOptions_BatchToSpaceNDOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::SpaceToBatchNDOptionsT *AsSpaceToBatchNDOptions() { + return type == BuiltinOptions_SpaceToBatchNDOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::SpaceToBatchNDOptionsT *AsSpaceToBatchNDOptions() const { + return type == BuiltinOptions_SpaceToBatchNDOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::TransposeOptionsT *AsTransposeOptions() { + return type == BuiltinOptions_TransposeOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::TransposeOptionsT *AsTransposeOptions() const { + return type == BuiltinOptions_TransposeOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::ReducerOptionsT *AsReducerOptions() { + return type == BuiltinOptions_ReducerOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::ReducerOptionsT *AsReducerOptions() const { + return type == BuiltinOptions_ReducerOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::SubOptionsT *AsSubOptions() { + return type == BuiltinOptions_SubOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::SubOptionsT *AsSubOptions() const { + return type == BuiltinOptions_SubOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::DivOptionsT *AsDivOptions() { + return type == BuiltinOptions_DivOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::DivOptionsT *AsDivOptions() const { + return type == BuiltinOptions_DivOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::SqueezeOptionsT *AsSqueezeOptions() { + return type == BuiltinOptions_SqueezeOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::SqueezeOptionsT *AsSqueezeOptions() const { + return type == BuiltinOptions_SqueezeOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::SequenceRNNOptionsT *AsSequenceRNNOptions() { + return type == BuiltinOptions_SequenceRNNOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::SequenceRNNOptionsT *AsSequenceRNNOptions() const { + return type == BuiltinOptions_SequenceRNNOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::StridedSliceOptionsT *AsStridedSliceOptions() { + return type == BuiltinOptions_StridedSliceOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::StridedSliceOptionsT *AsStridedSliceOptions() const { + return type == BuiltinOptions_StridedSliceOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::ExpOptionsT *AsExpOptions() { + return type == BuiltinOptions_ExpOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::ExpOptionsT *AsExpOptions() const { + return type == BuiltinOptions_ExpOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::TopKV2OptionsT *AsTopKV2Options() { + return type == BuiltinOptions_TopKV2Options ? + reinterpret_cast(value) : nullptr; + } + const tflite::TopKV2OptionsT *AsTopKV2Options() const { + return type == BuiltinOptions_TopKV2Options ? + reinterpret_cast(value) : nullptr; + } + tflite::SplitOptionsT *AsSplitOptions() { + return type == BuiltinOptions_SplitOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::SplitOptionsT *AsSplitOptions() const { + return type == BuiltinOptions_SplitOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::LogSoftmaxOptionsT *AsLogSoftmaxOptions() { + return type == BuiltinOptions_LogSoftmaxOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::LogSoftmaxOptionsT *AsLogSoftmaxOptions() const { + return type == BuiltinOptions_LogSoftmaxOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::CastOptionsT *AsCastOptions() { + return type == BuiltinOptions_CastOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::CastOptionsT *AsCastOptions() const { + return type == BuiltinOptions_CastOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::DequantizeOptionsT *AsDequantizeOptions() { + return type == BuiltinOptions_DequantizeOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::DequantizeOptionsT *AsDequantizeOptions() const { + return type == BuiltinOptions_DequantizeOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::MaximumMinimumOptionsT *AsMaximumMinimumOptions() { + return type == BuiltinOptions_MaximumMinimumOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::MaximumMinimumOptionsT *AsMaximumMinimumOptions() const { + return type == BuiltinOptions_MaximumMinimumOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::ArgMaxOptionsT *AsArgMaxOptions() { + return type == BuiltinOptions_ArgMaxOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::ArgMaxOptionsT *AsArgMaxOptions() const { + return type == BuiltinOptions_ArgMaxOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::LessOptionsT *AsLessOptions() { + return type == BuiltinOptions_LessOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::LessOptionsT *AsLessOptions() const { + return type == BuiltinOptions_LessOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::NegOptionsT *AsNegOptions() { + return type == BuiltinOptions_NegOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::NegOptionsT *AsNegOptions() const { + return type == BuiltinOptions_NegOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::PadV2OptionsT *AsPadV2Options() { + return type == BuiltinOptions_PadV2Options ? + reinterpret_cast(value) : nullptr; + } + const tflite::PadV2OptionsT *AsPadV2Options() const { + return type == BuiltinOptions_PadV2Options ? + reinterpret_cast(value) : nullptr; + } + tflite::GreaterOptionsT *AsGreaterOptions() { + return type == BuiltinOptions_GreaterOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::GreaterOptionsT *AsGreaterOptions() const { + return type == BuiltinOptions_GreaterOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::GreaterEqualOptionsT *AsGreaterEqualOptions() { + return type == BuiltinOptions_GreaterEqualOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::GreaterEqualOptionsT *AsGreaterEqualOptions() const { + return type == BuiltinOptions_GreaterEqualOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::LessEqualOptionsT *AsLessEqualOptions() { + return type == BuiltinOptions_LessEqualOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::LessEqualOptionsT *AsLessEqualOptions() const { + return type == BuiltinOptions_LessEqualOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::SelectOptionsT *AsSelectOptions() { + return type == BuiltinOptions_SelectOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::SelectOptionsT *AsSelectOptions() const { + return type == BuiltinOptions_SelectOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::SliceOptionsT *AsSliceOptions() { + return type == BuiltinOptions_SliceOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::SliceOptionsT *AsSliceOptions() const { + return type == BuiltinOptions_SliceOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::TransposeConvOptionsT *AsTransposeConvOptions() { + return type == BuiltinOptions_TransposeConvOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::TransposeConvOptionsT *AsTransposeConvOptions() const { + return type == BuiltinOptions_TransposeConvOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::SparseToDenseOptionsT *AsSparseToDenseOptions() { + return type == BuiltinOptions_SparseToDenseOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::SparseToDenseOptionsT *AsSparseToDenseOptions() const { + return type == BuiltinOptions_SparseToDenseOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::TileOptionsT *AsTileOptions() { + return type == BuiltinOptions_TileOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::TileOptionsT *AsTileOptions() const { + return type == BuiltinOptions_TileOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::ExpandDimsOptionsT *AsExpandDimsOptions() { + return type == BuiltinOptions_ExpandDimsOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::ExpandDimsOptionsT *AsExpandDimsOptions() const { + return type == BuiltinOptions_ExpandDimsOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::EqualOptionsT *AsEqualOptions() { + return type == BuiltinOptions_EqualOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::EqualOptionsT *AsEqualOptions() const { + return type == BuiltinOptions_EqualOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::NotEqualOptionsT *AsNotEqualOptions() { + return type == BuiltinOptions_NotEqualOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::NotEqualOptionsT *AsNotEqualOptions() const { + return type == BuiltinOptions_NotEqualOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::ShapeOptionsT *AsShapeOptions() { + return type == BuiltinOptions_ShapeOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::ShapeOptionsT *AsShapeOptions() const { + return type == BuiltinOptions_ShapeOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::PowOptionsT *AsPowOptions() { + return type == BuiltinOptions_PowOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::PowOptionsT *AsPowOptions() const { + return type == BuiltinOptions_PowOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::ArgMinOptionsT *AsArgMinOptions() { + return type == BuiltinOptions_ArgMinOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::ArgMinOptionsT *AsArgMinOptions() const { + return type == BuiltinOptions_ArgMinOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::FakeQuantOptionsT *AsFakeQuantOptions() { + return type == BuiltinOptions_FakeQuantOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::FakeQuantOptionsT *AsFakeQuantOptions() const { + return type == BuiltinOptions_FakeQuantOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::PackOptionsT *AsPackOptions() { + return type == BuiltinOptions_PackOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::PackOptionsT *AsPackOptions() const { + return type == BuiltinOptions_PackOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::LogicalOrOptionsT *AsLogicalOrOptions() { + return type == BuiltinOptions_LogicalOrOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::LogicalOrOptionsT *AsLogicalOrOptions() const { + return type == BuiltinOptions_LogicalOrOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::OneHotOptionsT *AsOneHotOptions() { + return type == BuiltinOptions_OneHotOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::OneHotOptionsT *AsOneHotOptions() const { + return type == BuiltinOptions_OneHotOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::LogicalAndOptionsT *AsLogicalAndOptions() { + return type == BuiltinOptions_LogicalAndOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::LogicalAndOptionsT *AsLogicalAndOptions() const { + return type == BuiltinOptions_LogicalAndOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::LogicalNotOptionsT *AsLogicalNotOptions() { + return type == BuiltinOptions_LogicalNotOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::LogicalNotOptionsT *AsLogicalNotOptions() const { + return type == BuiltinOptions_LogicalNotOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::UnpackOptionsT *AsUnpackOptions() { + return type == BuiltinOptions_UnpackOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::UnpackOptionsT *AsUnpackOptions() const { + return type == BuiltinOptions_UnpackOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::FloorDivOptionsT *AsFloorDivOptions() { + return type == BuiltinOptions_FloorDivOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::FloorDivOptionsT *AsFloorDivOptions() const { + return type == BuiltinOptions_FloorDivOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::SquareOptionsT *AsSquareOptions() { + return type == BuiltinOptions_SquareOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::SquareOptionsT *AsSquareOptions() const { + return type == BuiltinOptions_SquareOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::ZerosLikeOptionsT *AsZerosLikeOptions() { + return type == BuiltinOptions_ZerosLikeOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::ZerosLikeOptionsT *AsZerosLikeOptions() const { + return type == BuiltinOptions_ZerosLikeOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::FillOptionsT *AsFillOptions() { + return type == BuiltinOptions_FillOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::FillOptionsT *AsFillOptions() const { + return type == BuiltinOptions_FillOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::BidirectionalSequenceLSTMOptionsT *AsBidirectionalSequenceLSTMOptions() { + return type == BuiltinOptions_BidirectionalSequenceLSTMOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::BidirectionalSequenceLSTMOptionsT *AsBidirectionalSequenceLSTMOptions() const { + return type == BuiltinOptions_BidirectionalSequenceLSTMOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::BidirectionalSequenceRNNOptionsT *AsBidirectionalSequenceRNNOptions() { + return type == BuiltinOptions_BidirectionalSequenceRNNOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::BidirectionalSequenceRNNOptionsT *AsBidirectionalSequenceRNNOptions() const { + return type == BuiltinOptions_BidirectionalSequenceRNNOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::UnidirectionalSequenceLSTMOptionsT *AsUnidirectionalSequenceLSTMOptions() { + return type == BuiltinOptions_UnidirectionalSequenceLSTMOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::UnidirectionalSequenceLSTMOptionsT *AsUnidirectionalSequenceLSTMOptions() const { + return type == BuiltinOptions_UnidirectionalSequenceLSTMOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::FloorModOptionsT *AsFloorModOptions() { + return type == BuiltinOptions_FloorModOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::FloorModOptionsT *AsFloorModOptions() const { + return type == BuiltinOptions_FloorModOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::RangeOptionsT *AsRangeOptions() { + return type == BuiltinOptions_RangeOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::RangeOptionsT *AsRangeOptions() const { + return type == BuiltinOptions_RangeOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::ResizeNearestNeighborOptionsT *AsResizeNearestNeighborOptions() { + return type == BuiltinOptions_ResizeNearestNeighborOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::ResizeNearestNeighborOptionsT *AsResizeNearestNeighborOptions() const { + return type == BuiltinOptions_ResizeNearestNeighborOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::LeakyReluOptionsT *AsLeakyReluOptions() { + return type == BuiltinOptions_LeakyReluOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::LeakyReluOptionsT *AsLeakyReluOptions() const { + return type == BuiltinOptions_LeakyReluOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::SquaredDifferenceOptionsT *AsSquaredDifferenceOptions() { + return type == BuiltinOptions_SquaredDifferenceOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::SquaredDifferenceOptionsT *AsSquaredDifferenceOptions() const { + return type == BuiltinOptions_SquaredDifferenceOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::MirrorPadOptionsT *AsMirrorPadOptions() { + return type == BuiltinOptions_MirrorPadOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::MirrorPadOptionsT *AsMirrorPadOptions() const { + return type == BuiltinOptions_MirrorPadOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::AbsOptionsT *AsAbsOptions() { + return type == BuiltinOptions_AbsOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::AbsOptionsT *AsAbsOptions() const { + return type == BuiltinOptions_AbsOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::SplitVOptionsT *AsSplitVOptions() { + return type == BuiltinOptions_SplitVOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::SplitVOptionsT *AsSplitVOptions() const { + return type == BuiltinOptions_SplitVOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::UniqueOptionsT *AsUniqueOptions() { + return type == BuiltinOptions_UniqueOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::UniqueOptionsT *AsUniqueOptions() const { + return type == BuiltinOptions_UniqueOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::ReverseV2OptionsT *AsReverseV2Options() { + return type == BuiltinOptions_ReverseV2Options ? + reinterpret_cast(value) : nullptr; + } + const tflite::ReverseV2OptionsT *AsReverseV2Options() const { + return type == BuiltinOptions_ReverseV2Options ? + reinterpret_cast(value) : nullptr; + } + tflite::AddNOptionsT *AsAddNOptions() { + return type == BuiltinOptions_AddNOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::AddNOptionsT *AsAddNOptions() const { + return type == BuiltinOptions_AddNOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::GatherNdOptionsT *AsGatherNdOptions() { + return type == BuiltinOptions_GatherNdOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::GatherNdOptionsT *AsGatherNdOptions() const { + return type == BuiltinOptions_GatherNdOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::CosOptionsT *AsCosOptions() { + return type == BuiltinOptions_CosOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::CosOptionsT *AsCosOptions() const { + return type == BuiltinOptions_CosOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::WhereOptionsT *AsWhereOptions() { + return type == BuiltinOptions_WhereOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::WhereOptionsT *AsWhereOptions() const { + return type == BuiltinOptions_WhereOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::RankOptionsT *AsRankOptions() { + return type == BuiltinOptions_RankOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::RankOptionsT *AsRankOptions() const { + return type == BuiltinOptions_RankOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::ReverseSequenceOptionsT *AsReverseSequenceOptions() { + return type == BuiltinOptions_ReverseSequenceOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::ReverseSequenceOptionsT *AsReverseSequenceOptions() const { + return type == BuiltinOptions_ReverseSequenceOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::MatrixDiagOptionsT *AsMatrixDiagOptions() { + return type == BuiltinOptions_MatrixDiagOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::MatrixDiagOptionsT *AsMatrixDiagOptions() const { + return type == BuiltinOptions_MatrixDiagOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::QuantizeOptionsT *AsQuantizeOptions() { + return type == BuiltinOptions_QuantizeOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::QuantizeOptionsT *AsQuantizeOptions() const { + return type == BuiltinOptions_QuantizeOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::MatrixSetDiagOptionsT *AsMatrixSetDiagOptions() { + return type == BuiltinOptions_MatrixSetDiagOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::MatrixSetDiagOptionsT *AsMatrixSetDiagOptions() const { + return type == BuiltinOptions_MatrixSetDiagOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::HardSwishOptionsT *AsHardSwishOptions() { + return type == BuiltinOptions_HardSwishOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::HardSwishOptionsT *AsHardSwishOptions() const { + return type == BuiltinOptions_HardSwishOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::IfOptionsT *AsIfOptions() { + return type == BuiltinOptions_IfOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::IfOptionsT *AsIfOptions() const { + return type == BuiltinOptions_IfOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::WhileOptionsT *AsWhileOptions() { + return type == BuiltinOptions_WhileOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::WhileOptionsT *AsWhileOptions() const { + return type == BuiltinOptions_WhileOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::DepthToSpaceOptionsT *AsDepthToSpaceOptions() { + return type == BuiltinOptions_DepthToSpaceOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::DepthToSpaceOptionsT *AsDepthToSpaceOptions() const { + return type == BuiltinOptions_DepthToSpaceOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::NonMaxSuppressionV4OptionsT *AsNonMaxSuppressionV4Options() { + return type == BuiltinOptions_NonMaxSuppressionV4Options ? + reinterpret_cast(value) : nullptr; + } + const tflite::NonMaxSuppressionV4OptionsT *AsNonMaxSuppressionV4Options() const { + return type == BuiltinOptions_NonMaxSuppressionV4Options ? + reinterpret_cast(value) : nullptr; + } + tflite::NonMaxSuppressionV5OptionsT *AsNonMaxSuppressionV5Options() { + return type == BuiltinOptions_NonMaxSuppressionV5Options ? + reinterpret_cast(value) : nullptr; + } + const tflite::NonMaxSuppressionV5OptionsT *AsNonMaxSuppressionV5Options() const { + return type == BuiltinOptions_NonMaxSuppressionV5Options ? + reinterpret_cast(value) : nullptr; + } + tflite::ScatterNdOptionsT *AsScatterNdOptions() { + return type == BuiltinOptions_ScatterNdOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::ScatterNdOptionsT *AsScatterNdOptions() const { + return type == BuiltinOptions_ScatterNdOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::SelectV2OptionsT *AsSelectV2Options() { + return type == BuiltinOptions_SelectV2Options ? + reinterpret_cast(value) : nullptr; + } + const tflite::SelectV2OptionsT *AsSelectV2Options() const { + return type == BuiltinOptions_SelectV2Options ? + reinterpret_cast(value) : nullptr; + } + tflite::DensifyOptionsT *AsDensifyOptions() { + return type == BuiltinOptions_DensifyOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::DensifyOptionsT *AsDensifyOptions() const { + return type == BuiltinOptions_DensifyOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::SegmentSumOptionsT *AsSegmentSumOptions() { + return type == BuiltinOptions_SegmentSumOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::SegmentSumOptionsT *AsSegmentSumOptions() const { + return type == BuiltinOptions_SegmentSumOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::BatchMatMulOptionsT *AsBatchMatMulOptions() { + return type == BuiltinOptions_BatchMatMulOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::BatchMatMulOptionsT *AsBatchMatMulOptions() const { + return type == BuiltinOptions_BatchMatMulOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::CumsumOptionsT *AsCumsumOptions() { + return type == BuiltinOptions_CumsumOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::CumsumOptionsT *AsCumsumOptions() const { + return type == BuiltinOptions_CumsumOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::CallOnceOptionsT *AsCallOnceOptions() { + return type == BuiltinOptions_CallOnceOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::CallOnceOptionsT *AsCallOnceOptions() const { + return type == BuiltinOptions_CallOnceOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::BroadcastToOptionsT *AsBroadcastToOptions() { + return type == BuiltinOptions_BroadcastToOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::BroadcastToOptionsT *AsBroadcastToOptions() const { + return type == BuiltinOptions_BroadcastToOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::Rfft2dOptionsT *AsRfft2dOptions() { + return type == BuiltinOptions_Rfft2dOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::Rfft2dOptionsT *AsRfft2dOptions() const { + return type == BuiltinOptions_Rfft2dOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::Conv3DOptionsT *AsConv3DOptions() { + return type == BuiltinOptions_Conv3DOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::Conv3DOptionsT *AsConv3DOptions() const { + return type == BuiltinOptions_Conv3DOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::HashtableOptionsT *AsHashtableOptions() { + return type == BuiltinOptions_HashtableOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::HashtableOptionsT *AsHashtableOptions() const { + return type == BuiltinOptions_HashtableOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::HashtableFindOptionsT *AsHashtableFindOptions() { + return type == BuiltinOptions_HashtableFindOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::HashtableFindOptionsT *AsHashtableFindOptions() const { + return type == BuiltinOptions_HashtableFindOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::HashtableImportOptionsT *AsHashtableImportOptions() { + return type == BuiltinOptions_HashtableImportOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::HashtableImportOptionsT *AsHashtableImportOptions() const { + return type == BuiltinOptions_HashtableImportOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::HashtableSizeOptionsT *AsHashtableSizeOptions() { + return type == BuiltinOptions_HashtableSizeOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::HashtableSizeOptionsT *AsHashtableSizeOptions() const { + return type == BuiltinOptions_HashtableSizeOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::VarHandleOptionsT *AsVarHandleOptions() { + return type == BuiltinOptions_VarHandleOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::VarHandleOptionsT *AsVarHandleOptions() const { + return type == BuiltinOptions_VarHandleOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::ReadVariableOptionsT *AsReadVariableOptions() { + return type == BuiltinOptions_ReadVariableOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::ReadVariableOptionsT *AsReadVariableOptions() const { + return type == BuiltinOptions_ReadVariableOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::AssignVariableOptionsT *AsAssignVariableOptions() { + return type == BuiltinOptions_AssignVariableOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::AssignVariableOptionsT *AsAssignVariableOptions() const { + return type == BuiltinOptions_AssignVariableOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::RandomOptionsT *AsRandomOptions() { + return type == BuiltinOptions_RandomOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::RandomOptionsT *AsRandomOptions() const { + return type == BuiltinOptions_RandomOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::BucketizeOptionsT *AsBucketizeOptions() { + return type == BuiltinOptions_BucketizeOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::BucketizeOptionsT *AsBucketizeOptions() const { + return type == BuiltinOptions_BucketizeOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::GeluOptionsT *AsGeluOptions() { + return type == BuiltinOptions_GeluOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::GeluOptionsT *AsGeluOptions() const { + return type == BuiltinOptions_GeluOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::DynamicUpdateSliceOptionsT *AsDynamicUpdateSliceOptions() { + return type == BuiltinOptions_DynamicUpdateSliceOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::DynamicUpdateSliceOptionsT *AsDynamicUpdateSliceOptions() const { + return type == BuiltinOptions_DynamicUpdateSliceOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::UnsortedSegmentProdOptionsT *AsUnsortedSegmentProdOptions() { + return type == BuiltinOptions_UnsortedSegmentProdOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::UnsortedSegmentProdOptionsT *AsUnsortedSegmentProdOptions() const { + return type == BuiltinOptions_UnsortedSegmentProdOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::UnsortedSegmentMaxOptionsT *AsUnsortedSegmentMaxOptions() { + return type == BuiltinOptions_UnsortedSegmentMaxOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::UnsortedSegmentMaxOptionsT *AsUnsortedSegmentMaxOptions() const { + return type == BuiltinOptions_UnsortedSegmentMaxOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::UnsortedSegmentMinOptionsT *AsUnsortedSegmentMinOptions() { + return type == BuiltinOptions_UnsortedSegmentMinOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::UnsortedSegmentMinOptionsT *AsUnsortedSegmentMinOptions() const { + return type == BuiltinOptions_UnsortedSegmentMinOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::UnsortedSegmentSumOptionsT *AsUnsortedSegmentSumOptions() { + return type == BuiltinOptions_UnsortedSegmentSumOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::UnsortedSegmentSumOptionsT *AsUnsortedSegmentSumOptions() const { + return type == BuiltinOptions_UnsortedSegmentSumOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::ATan2OptionsT *AsATan2Options() { + return type == BuiltinOptions_ATan2Options ? + reinterpret_cast(value) : nullptr; + } + const tflite::ATan2OptionsT *AsATan2Options() const { + return type == BuiltinOptions_ATan2Options ? + reinterpret_cast(value) : nullptr; + } + tflite::SignOptionsT *AsSignOptions() { + return type == BuiltinOptions_SignOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::SignOptionsT *AsSignOptions() const { + return type == BuiltinOptions_SignOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::BitcastOptionsT *AsBitcastOptions() { + return type == BuiltinOptions_BitcastOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::BitcastOptionsT *AsBitcastOptions() const { + return type == BuiltinOptions_BitcastOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::BitwiseXorOptionsT *AsBitwiseXorOptions() { + return type == BuiltinOptions_BitwiseXorOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::BitwiseXorOptionsT *AsBitwiseXorOptions() const { + return type == BuiltinOptions_BitwiseXorOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::RightShiftOptionsT *AsRightShiftOptions() { + return type == BuiltinOptions_RightShiftOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::RightShiftOptionsT *AsRightShiftOptions() const { + return type == BuiltinOptions_RightShiftOptions ? + reinterpret_cast(value) : nullptr; + } +}; + +bool VerifyBuiltinOptions(flatbuffers::Verifier &verifier, const void *obj, BuiltinOptions type); +bool VerifyBuiltinOptionsVector(flatbuffers::Verifier &verifier, const flatbuffers::Vector> *values, const flatbuffers::Vector *types); + +enum Padding : int8_t { + Padding_SAME = 0, + Padding_VALID = 1, + Padding_MIN = Padding_SAME, + Padding_MAX = Padding_VALID +}; + +inline const Padding (&EnumValuesPadding())[2] { + static const Padding values[] = { + Padding_SAME, + Padding_VALID + }; + return values; +} + +inline const char * const *EnumNamesPadding() { + static const char * const names[3] = { + "SAME", + "VALID", + nullptr + }; + return names; +} + +inline const char *EnumNamePadding(Padding e) { + if (flatbuffers::IsOutRange(e, Padding_SAME, Padding_VALID)) return ""; + const size_t index = static_cast(e); + return EnumNamesPadding()[index]; +} + +enum ActivationFunctionType : int8_t { + ActivationFunctionType_NONE = 0, + ActivationFunctionType_RELU = 1, + ActivationFunctionType_RELU_N1_TO_1 = 2, + ActivationFunctionType_RELU6 = 3, + ActivationFunctionType_TANH = 4, + ActivationFunctionType_SIGN_BIT = 5, + ActivationFunctionType_MIN = ActivationFunctionType_NONE, + ActivationFunctionType_MAX = ActivationFunctionType_SIGN_BIT +}; + +inline const ActivationFunctionType (&EnumValuesActivationFunctionType())[6] { + static const ActivationFunctionType values[] = { + ActivationFunctionType_NONE, + ActivationFunctionType_RELU, + ActivationFunctionType_RELU_N1_TO_1, + ActivationFunctionType_RELU6, + ActivationFunctionType_TANH, + ActivationFunctionType_SIGN_BIT + }; + return values; +} + +inline const char * const *EnumNamesActivationFunctionType() { + static const char * const names[7] = { + "NONE", + "RELU", + "RELU_N1_TO_1", + "RELU6", + "TANH", + "SIGN_BIT", + nullptr + }; + return names; +} + +inline const char *EnumNameActivationFunctionType(ActivationFunctionType e) { + if (flatbuffers::IsOutRange(e, ActivationFunctionType_NONE, ActivationFunctionType_SIGN_BIT)) return ""; + const size_t index = static_cast(e); + return EnumNamesActivationFunctionType()[index]; +} + +enum LSHProjectionType : int8_t { + LSHProjectionType_UNKNOWN = 0, + LSHProjectionType_SPARSE = 1, + LSHProjectionType_DENSE = 2, + LSHProjectionType_MIN = LSHProjectionType_UNKNOWN, + LSHProjectionType_MAX = LSHProjectionType_DENSE +}; + +inline const LSHProjectionType (&EnumValuesLSHProjectionType())[3] { + static const LSHProjectionType values[] = { + LSHProjectionType_UNKNOWN, + LSHProjectionType_SPARSE, + LSHProjectionType_DENSE + }; + return values; +} + +inline const char * const *EnumNamesLSHProjectionType() { + static const char * const names[4] = { + "UNKNOWN", + "SPARSE", + "DENSE", + nullptr + }; + return names; +} + +inline const char *EnumNameLSHProjectionType(LSHProjectionType e) { + if (flatbuffers::IsOutRange(e, LSHProjectionType_UNKNOWN, LSHProjectionType_DENSE)) return ""; + const size_t index = static_cast(e); + return EnumNamesLSHProjectionType()[index]; +} + +enum FullyConnectedOptionsWeightsFormat : int8_t { + FullyConnectedOptionsWeightsFormat_DEFAULT = 0, + FullyConnectedOptionsWeightsFormat_SHUFFLED4x16INT8 = 1, + FullyConnectedOptionsWeightsFormat_MIN = FullyConnectedOptionsWeightsFormat_DEFAULT, + FullyConnectedOptionsWeightsFormat_MAX = FullyConnectedOptionsWeightsFormat_SHUFFLED4x16INT8 +}; + +inline const FullyConnectedOptionsWeightsFormat (&EnumValuesFullyConnectedOptionsWeightsFormat())[2] { + static const FullyConnectedOptionsWeightsFormat values[] = { + FullyConnectedOptionsWeightsFormat_DEFAULT, + FullyConnectedOptionsWeightsFormat_SHUFFLED4x16INT8 + }; + return values; +} + +inline const char * const *EnumNamesFullyConnectedOptionsWeightsFormat() { + static const char * const names[3] = { + "DEFAULT", + "SHUFFLED4x16INT8", + nullptr + }; + return names; +} + +inline const char *EnumNameFullyConnectedOptionsWeightsFormat(FullyConnectedOptionsWeightsFormat e) { + if (flatbuffers::IsOutRange(e, FullyConnectedOptionsWeightsFormat_DEFAULT, FullyConnectedOptionsWeightsFormat_SHUFFLED4x16INT8)) return ""; + const size_t index = static_cast(e); + return EnumNamesFullyConnectedOptionsWeightsFormat()[index]; +} + +enum LSTMKernelType : int8_t { + LSTMKernelType_FULL = 0, + LSTMKernelType_BASIC = 1, + LSTMKernelType_MIN = LSTMKernelType_FULL, + LSTMKernelType_MAX = LSTMKernelType_BASIC +}; + +inline const LSTMKernelType (&EnumValuesLSTMKernelType())[2] { + static const LSTMKernelType values[] = { + LSTMKernelType_FULL, + LSTMKernelType_BASIC + }; + return values; +} + +inline const char * const *EnumNamesLSTMKernelType() { + static const char * const names[3] = { + "FULL", + "BASIC", + nullptr + }; + return names; +} + +inline const char *EnumNameLSTMKernelType(LSTMKernelType e) { + if (flatbuffers::IsOutRange(e, LSTMKernelType_FULL, LSTMKernelType_BASIC)) return ""; + const size_t index = static_cast(e); + return EnumNamesLSTMKernelType()[index]; +} + +enum CombinerType : int8_t { + CombinerType_SUM = 0, + CombinerType_MEAN = 1, + CombinerType_SQRTN = 2, + CombinerType_MIN = CombinerType_SUM, + CombinerType_MAX = CombinerType_SQRTN +}; + +inline const CombinerType (&EnumValuesCombinerType())[3] { + static const CombinerType values[] = { + CombinerType_SUM, + CombinerType_MEAN, + CombinerType_SQRTN + }; + return values; +} + +inline const char * const *EnumNamesCombinerType() { + static const char * const names[4] = { + "SUM", + "MEAN", + "SQRTN", + nullptr + }; + return names; +} + +inline const char *EnumNameCombinerType(CombinerType e) { + if (flatbuffers::IsOutRange(e, CombinerType_SUM, CombinerType_SQRTN)) return ""; + const size_t index = static_cast(e); + return EnumNamesCombinerType()[index]; +} + +enum MirrorPadMode : int8_t { + MirrorPadMode_REFLECT = 0, + MirrorPadMode_SYMMETRIC = 1, + MirrorPadMode_MIN = MirrorPadMode_REFLECT, + MirrorPadMode_MAX = MirrorPadMode_SYMMETRIC +}; + +inline const MirrorPadMode (&EnumValuesMirrorPadMode())[2] { + static const MirrorPadMode values[] = { + MirrorPadMode_REFLECT, + MirrorPadMode_SYMMETRIC + }; + return values; +} + +inline const char * const *EnumNamesMirrorPadMode() { + static const char * const names[3] = { + "REFLECT", + "SYMMETRIC", + nullptr + }; + return names; +} + +inline const char *EnumNameMirrorPadMode(MirrorPadMode e) { + if (flatbuffers::IsOutRange(e, MirrorPadMode_REFLECT, MirrorPadMode_SYMMETRIC)) return ""; + const size_t index = static_cast(e); + return EnumNamesMirrorPadMode()[index]; +} + +enum CustomOptionsFormat : int8_t { + CustomOptionsFormat_FLEXBUFFERS = 0, + CustomOptionsFormat_MIN = CustomOptionsFormat_FLEXBUFFERS, + CustomOptionsFormat_MAX = CustomOptionsFormat_FLEXBUFFERS +}; + +inline const CustomOptionsFormat (&EnumValuesCustomOptionsFormat())[1] { + static const CustomOptionsFormat values[] = { + CustomOptionsFormat_FLEXBUFFERS + }; + return values; +} + +inline const char * const *EnumNamesCustomOptionsFormat() { + static const char * const names[2] = { + "FLEXBUFFERS", + nullptr + }; + return names; +} + +inline const char *EnumNameCustomOptionsFormat(CustomOptionsFormat e) { + if (flatbuffers::IsOutRange(e, CustomOptionsFormat_FLEXBUFFERS, CustomOptionsFormat_FLEXBUFFERS)) return ""; + const size_t index = static_cast(e); + return EnumNamesCustomOptionsFormat()[index]; +} + +struct CustomQuantizationT : public flatbuffers::NativeTable { + typedef CustomQuantization TableType; + std::vector custom{}; +}; + +struct CustomQuantization FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef CustomQuantizationT NativeTableType; + typedef CustomQuantizationBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_CUSTOM = 4 + }; + const flatbuffers::Vector *custom() const { + return GetPointer *>(VT_CUSTOM); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyOffset(verifier, VT_CUSTOM) && + verifier.VerifyVector(custom()) && + verifier.EndTable(); + } + CustomQuantizationT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(CustomQuantizationT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const CustomQuantizationT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct CustomQuantizationBuilder { + typedef CustomQuantization Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_custom(flatbuffers::Offset> custom) { + fbb_.AddOffset(CustomQuantization::VT_CUSTOM, custom); + } + explicit CustomQuantizationBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateCustomQuantization( + flatbuffers::FlatBufferBuilder &_fbb, + flatbuffers::Offset> custom = 0) { + CustomQuantizationBuilder builder_(_fbb); + builder_.add_custom(custom); + return builder_.Finish(); +} + +inline flatbuffers::Offset CreateCustomQuantizationDirect( + flatbuffers::FlatBufferBuilder &_fbb, + const std::vector *custom = nullptr) { + if (custom) { _fbb.ForceVectorAlignment(custom->size(), sizeof(uint8_t), 16); } + auto custom__ = custom ? _fbb.CreateVector(*custom) : 0; + return tflite::CreateCustomQuantization( + _fbb, + custom__); +} + +flatbuffers::Offset CreateCustomQuantization(flatbuffers::FlatBufferBuilder &_fbb, const CustomQuantizationT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct QuantizationParametersT : public flatbuffers::NativeTable { + typedef QuantizationParameters TableType; + std::vector min{}; + std::vector max{}; + std::vector scale{}; + std::vector zero_point{}; + tflite::QuantizationDetailsUnion details{}; + int32_t quantized_dimension = 0; +}; + +struct QuantizationParameters FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef QuantizationParametersT NativeTableType; + typedef QuantizationParametersBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_MIN = 4, + VT_MAX = 6, + VT_SCALE = 8, + VT_ZERO_POINT = 10, + VT_DETAILS_TYPE = 12, + VT_DETAILS = 14, + VT_QUANTIZED_DIMENSION = 16 + }; + const flatbuffers::Vector *min() const { + return GetPointer *>(VT_MIN); + } + const flatbuffers::Vector *max() const { + return GetPointer *>(VT_MAX); + } + const flatbuffers::Vector *scale() const { + return GetPointer *>(VT_SCALE); + } + const flatbuffers::Vector *zero_point() const { + return GetPointer *>(VT_ZERO_POINT); + } + tflite::QuantizationDetails details_type() const { + return static_cast(GetField(VT_DETAILS_TYPE, 0)); + } + const void *details() const { + return GetPointer(VT_DETAILS); + } + template const T *details_as() const; + const tflite::CustomQuantization *details_as_CustomQuantization() const { + return details_type() == tflite::QuantizationDetails_CustomQuantization ? static_cast(details()) : nullptr; + } + int32_t quantized_dimension() const { + return GetField(VT_QUANTIZED_DIMENSION, 0); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyOffset(verifier, VT_MIN) && + verifier.VerifyVector(min()) && + VerifyOffset(verifier, VT_MAX) && + verifier.VerifyVector(max()) && + VerifyOffset(verifier, VT_SCALE) && + verifier.VerifyVector(scale()) && + VerifyOffset(verifier, VT_ZERO_POINT) && + verifier.VerifyVector(zero_point()) && + VerifyField(verifier, VT_DETAILS_TYPE, 1) && + VerifyOffset(verifier, VT_DETAILS) && + VerifyQuantizationDetails(verifier, details(), details_type()) && + VerifyField(verifier, VT_QUANTIZED_DIMENSION, 4) && + verifier.EndTable(); + } + QuantizationParametersT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(QuantizationParametersT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const QuantizationParametersT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +template<> inline const tflite::CustomQuantization *QuantizationParameters::details_as() const { + return details_as_CustomQuantization(); +} + +struct QuantizationParametersBuilder { + typedef QuantizationParameters Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_min(flatbuffers::Offset> min) { + fbb_.AddOffset(QuantizationParameters::VT_MIN, min); + } + void add_max(flatbuffers::Offset> max) { + fbb_.AddOffset(QuantizationParameters::VT_MAX, max); + } + void add_scale(flatbuffers::Offset> scale) { + fbb_.AddOffset(QuantizationParameters::VT_SCALE, scale); + } + void add_zero_point(flatbuffers::Offset> zero_point) { + fbb_.AddOffset(QuantizationParameters::VT_ZERO_POINT, zero_point); + } + void add_details_type(tflite::QuantizationDetails details_type) { + fbb_.AddElement(QuantizationParameters::VT_DETAILS_TYPE, static_cast(details_type), 0); + } + void add_details(flatbuffers::Offset details) { + fbb_.AddOffset(QuantizationParameters::VT_DETAILS, details); + } + void add_quantized_dimension(int32_t quantized_dimension) { + fbb_.AddElement(QuantizationParameters::VT_QUANTIZED_DIMENSION, quantized_dimension, 0); + } + explicit QuantizationParametersBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateQuantizationParameters( + flatbuffers::FlatBufferBuilder &_fbb, + flatbuffers::Offset> min = 0, + flatbuffers::Offset> max = 0, + flatbuffers::Offset> scale = 0, + flatbuffers::Offset> zero_point = 0, + tflite::QuantizationDetails details_type = tflite::QuantizationDetails_NONE, + flatbuffers::Offset details = 0, + int32_t quantized_dimension = 0) { + QuantizationParametersBuilder builder_(_fbb); + builder_.add_quantized_dimension(quantized_dimension); + builder_.add_details(details); + builder_.add_zero_point(zero_point); + builder_.add_scale(scale); + builder_.add_max(max); + builder_.add_min(min); + builder_.add_details_type(details_type); + return builder_.Finish(); +} + +inline flatbuffers::Offset CreateQuantizationParametersDirect( + flatbuffers::FlatBufferBuilder &_fbb, + const std::vector *min = nullptr, + const std::vector *max = nullptr, + const std::vector *scale = nullptr, + const std::vector *zero_point = nullptr, + tflite::QuantizationDetails details_type = tflite::QuantizationDetails_NONE, + flatbuffers::Offset details = 0, + int32_t quantized_dimension = 0) { + auto min__ = min ? _fbb.CreateVector(*min) : 0; + auto max__ = max ? _fbb.CreateVector(*max) : 0; + auto scale__ = scale ? _fbb.CreateVector(*scale) : 0; + auto zero_point__ = zero_point ? _fbb.CreateVector(*zero_point) : 0; + return tflite::CreateQuantizationParameters( + _fbb, + min__, + max__, + scale__, + zero_point__, + details_type, + details, + quantized_dimension); +} + +flatbuffers::Offset CreateQuantizationParameters(flatbuffers::FlatBufferBuilder &_fbb, const QuantizationParametersT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct Int32VectorT : public flatbuffers::NativeTable { + typedef Int32Vector TableType; + std::vector values{}; +}; + +struct Int32Vector FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef Int32VectorT NativeTableType; + typedef Int32VectorBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_VALUES = 4 + }; + const flatbuffers::Vector *values() const { + return GetPointer *>(VT_VALUES); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyOffset(verifier, VT_VALUES) && + verifier.VerifyVector(values()) && + verifier.EndTable(); + } + Int32VectorT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(Int32VectorT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const Int32VectorT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct Int32VectorBuilder { + typedef Int32Vector Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_values(flatbuffers::Offset> values) { + fbb_.AddOffset(Int32Vector::VT_VALUES, values); + } + explicit Int32VectorBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateInt32Vector( + flatbuffers::FlatBufferBuilder &_fbb, + flatbuffers::Offset> values = 0) { + Int32VectorBuilder builder_(_fbb); + builder_.add_values(values); + return builder_.Finish(); +} + +inline flatbuffers::Offset CreateInt32VectorDirect( + flatbuffers::FlatBufferBuilder &_fbb, + const std::vector *values = nullptr) { + auto values__ = values ? _fbb.CreateVector(*values) : 0; + return tflite::CreateInt32Vector( + _fbb, + values__); +} + +flatbuffers::Offset CreateInt32Vector(flatbuffers::FlatBufferBuilder &_fbb, const Int32VectorT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct Uint16VectorT : public flatbuffers::NativeTable { + typedef Uint16Vector TableType; + std::vector values{}; +}; + +struct Uint16Vector FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef Uint16VectorT NativeTableType; + typedef Uint16VectorBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_VALUES = 4 + }; + const flatbuffers::Vector *values() const { + return GetPointer *>(VT_VALUES); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyOffset(verifier, VT_VALUES) && + verifier.VerifyVector(values()) && + verifier.EndTable(); + } + Uint16VectorT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(Uint16VectorT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const Uint16VectorT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct Uint16VectorBuilder { + typedef Uint16Vector Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_values(flatbuffers::Offset> values) { + fbb_.AddOffset(Uint16Vector::VT_VALUES, values); + } + explicit Uint16VectorBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateUint16Vector( + flatbuffers::FlatBufferBuilder &_fbb, + flatbuffers::Offset> values = 0) { + Uint16VectorBuilder builder_(_fbb); + builder_.add_values(values); + return builder_.Finish(); +} + +inline flatbuffers::Offset CreateUint16VectorDirect( + flatbuffers::FlatBufferBuilder &_fbb, + const std::vector *values = nullptr) { + if (values) { _fbb.ForceVectorAlignment(values->size(), sizeof(uint16_t), 4); } + auto values__ = values ? _fbb.CreateVector(*values) : 0; + return tflite::CreateUint16Vector( + _fbb, + values__); +} + +flatbuffers::Offset CreateUint16Vector(flatbuffers::FlatBufferBuilder &_fbb, const Uint16VectorT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct Uint8VectorT : public flatbuffers::NativeTable { + typedef Uint8Vector TableType; + std::vector values{}; +}; + +struct Uint8Vector FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef Uint8VectorT NativeTableType; + typedef Uint8VectorBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_VALUES = 4 + }; + const flatbuffers::Vector *values() const { + return GetPointer *>(VT_VALUES); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyOffset(verifier, VT_VALUES) && + verifier.VerifyVector(values()) && + verifier.EndTable(); + } + Uint8VectorT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(Uint8VectorT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const Uint8VectorT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct Uint8VectorBuilder { + typedef Uint8Vector Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_values(flatbuffers::Offset> values) { + fbb_.AddOffset(Uint8Vector::VT_VALUES, values); + } + explicit Uint8VectorBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateUint8Vector( + flatbuffers::FlatBufferBuilder &_fbb, + flatbuffers::Offset> values = 0) { + Uint8VectorBuilder builder_(_fbb); + builder_.add_values(values); + return builder_.Finish(); +} + +inline flatbuffers::Offset CreateUint8VectorDirect( + flatbuffers::FlatBufferBuilder &_fbb, + const std::vector *values = nullptr) { + if (values) { _fbb.ForceVectorAlignment(values->size(), sizeof(uint8_t), 4); } + auto values__ = values ? _fbb.CreateVector(*values) : 0; + return tflite::CreateUint8Vector( + _fbb, + values__); +} + +flatbuffers::Offset CreateUint8Vector(flatbuffers::FlatBufferBuilder &_fbb, const Uint8VectorT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct DimensionMetadataT : public flatbuffers::NativeTable { + typedef DimensionMetadata TableType; + tflite::DimensionType format = tflite::DimensionType_DENSE; + int32_t dense_size = 0; + tflite::SparseIndexVectorUnion array_segments{}; + tflite::SparseIndexVectorUnion array_indices{}; +}; + +struct DimensionMetadata FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef DimensionMetadataT NativeTableType; + typedef DimensionMetadataBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_FORMAT = 4, + VT_DENSE_SIZE = 6, + VT_ARRAY_SEGMENTS_TYPE = 8, + VT_ARRAY_SEGMENTS = 10, + VT_ARRAY_INDICES_TYPE = 12, + VT_ARRAY_INDICES = 14 + }; + tflite::DimensionType format() const { + return static_cast(GetField(VT_FORMAT, 0)); + } + int32_t dense_size() const { + return GetField(VT_DENSE_SIZE, 0); + } + tflite::SparseIndexVector array_segments_type() const { + return static_cast(GetField(VT_ARRAY_SEGMENTS_TYPE, 0)); + } + const void *array_segments() const { + return GetPointer(VT_ARRAY_SEGMENTS); + } + template const T *array_segments_as() const; + const tflite::Int32Vector *array_segments_as_Int32Vector() const { + return array_segments_type() == tflite::SparseIndexVector_Int32Vector ? static_cast(array_segments()) : nullptr; + } + const tflite::Uint16Vector *array_segments_as_Uint16Vector() const { + return array_segments_type() == tflite::SparseIndexVector_Uint16Vector ? static_cast(array_segments()) : nullptr; + } + const tflite::Uint8Vector *array_segments_as_Uint8Vector() const { + return array_segments_type() == tflite::SparseIndexVector_Uint8Vector ? static_cast(array_segments()) : nullptr; + } + tflite::SparseIndexVector array_indices_type() const { + return static_cast(GetField(VT_ARRAY_INDICES_TYPE, 0)); + } + const void *array_indices() const { + return GetPointer(VT_ARRAY_INDICES); + } + template const T *array_indices_as() const; + const tflite::Int32Vector *array_indices_as_Int32Vector() const { + return array_indices_type() == tflite::SparseIndexVector_Int32Vector ? static_cast(array_indices()) : nullptr; + } + const tflite::Uint16Vector *array_indices_as_Uint16Vector() const { + return array_indices_type() == tflite::SparseIndexVector_Uint16Vector ? static_cast(array_indices()) : nullptr; + } + const tflite::Uint8Vector *array_indices_as_Uint8Vector() const { + return array_indices_type() == tflite::SparseIndexVector_Uint8Vector ? static_cast(array_indices()) : nullptr; + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_FORMAT, 1) && + VerifyField(verifier, VT_DENSE_SIZE, 4) && + VerifyField(verifier, VT_ARRAY_SEGMENTS_TYPE, 1) && + VerifyOffset(verifier, VT_ARRAY_SEGMENTS) && + VerifySparseIndexVector(verifier, array_segments(), array_segments_type()) && + VerifyField(verifier, VT_ARRAY_INDICES_TYPE, 1) && + VerifyOffset(verifier, VT_ARRAY_INDICES) && + VerifySparseIndexVector(verifier, array_indices(), array_indices_type()) && + verifier.EndTable(); + } + DimensionMetadataT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(DimensionMetadataT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const DimensionMetadataT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +template<> inline const tflite::Int32Vector *DimensionMetadata::array_segments_as() const { + return array_segments_as_Int32Vector(); +} + +template<> inline const tflite::Uint16Vector *DimensionMetadata::array_segments_as() const { + return array_segments_as_Uint16Vector(); +} + +template<> inline const tflite::Uint8Vector *DimensionMetadata::array_segments_as() const { + return array_segments_as_Uint8Vector(); +} + +template<> inline const tflite::Int32Vector *DimensionMetadata::array_indices_as() const { + return array_indices_as_Int32Vector(); +} + +template<> inline const tflite::Uint16Vector *DimensionMetadata::array_indices_as() const { + return array_indices_as_Uint16Vector(); +} + +template<> inline const tflite::Uint8Vector *DimensionMetadata::array_indices_as() const { + return array_indices_as_Uint8Vector(); +} + +struct DimensionMetadataBuilder { + typedef DimensionMetadata Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_format(tflite::DimensionType format) { + fbb_.AddElement(DimensionMetadata::VT_FORMAT, static_cast(format), 0); + } + void add_dense_size(int32_t dense_size) { + fbb_.AddElement(DimensionMetadata::VT_DENSE_SIZE, dense_size, 0); + } + void add_array_segments_type(tflite::SparseIndexVector array_segments_type) { + fbb_.AddElement(DimensionMetadata::VT_ARRAY_SEGMENTS_TYPE, static_cast(array_segments_type), 0); + } + void add_array_segments(flatbuffers::Offset array_segments) { + fbb_.AddOffset(DimensionMetadata::VT_ARRAY_SEGMENTS, array_segments); + } + void add_array_indices_type(tflite::SparseIndexVector array_indices_type) { + fbb_.AddElement(DimensionMetadata::VT_ARRAY_INDICES_TYPE, static_cast(array_indices_type), 0); + } + void add_array_indices(flatbuffers::Offset array_indices) { + fbb_.AddOffset(DimensionMetadata::VT_ARRAY_INDICES, array_indices); + } + explicit DimensionMetadataBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateDimensionMetadata( + flatbuffers::FlatBufferBuilder &_fbb, + tflite::DimensionType format = tflite::DimensionType_DENSE, + int32_t dense_size = 0, + tflite::SparseIndexVector array_segments_type = tflite::SparseIndexVector_NONE, + flatbuffers::Offset array_segments = 0, + tflite::SparseIndexVector array_indices_type = tflite::SparseIndexVector_NONE, + flatbuffers::Offset array_indices = 0) { + DimensionMetadataBuilder builder_(_fbb); + builder_.add_array_indices(array_indices); + builder_.add_array_segments(array_segments); + builder_.add_dense_size(dense_size); + builder_.add_array_indices_type(array_indices_type); + builder_.add_array_segments_type(array_segments_type); + builder_.add_format(format); + return builder_.Finish(); +} + +flatbuffers::Offset CreateDimensionMetadata(flatbuffers::FlatBufferBuilder &_fbb, const DimensionMetadataT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct SparsityParametersT : public flatbuffers::NativeTable { + typedef SparsityParameters TableType; + std::vector traversal_order{}; + std::vector block_map{}; + std::vector> dim_metadata{}; + SparsityParametersT() = default; + SparsityParametersT(const SparsityParametersT &o); + SparsityParametersT(SparsityParametersT&&) FLATBUFFERS_NOEXCEPT = default; + SparsityParametersT &operator=(SparsityParametersT o) FLATBUFFERS_NOEXCEPT; +}; + +struct SparsityParameters FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef SparsityParametersT NativeTableType; + typedef SparsityParametersBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_TRAVERSAL_ORDER = 4, + VT_BLOCK_MAP = 6, + VT_DIM_METADATA = 8 + }; + const flatbuffers::Vector *traversal_order() const { + return GetPointer *>(VT_TRAVERSAL_ORDER); + } + const flatbuffers::Vector *block_map() const { + return GetPointer *>(VT_BLOCK_MAP); + } + const flatbuffers::Vector> *dim_metadata() const { + return GetPointer> *>(VT_DIM_METADATA); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyOffset(verifier, VT_TRAVERSAL_ORDER) && + verifier.VerifyVector(traversal_order()) && + VerifyOffset(verifier, VT_BLOCK_MAP) && + verifier.VerifyVector(block_map()) && + VerifyOffset(verifier, VT_DIM_METADATA) && + verifier.VerifyVector(dim_metadata()) && + verifier.VerifyVectorOfTables(dim_metadata()) && + verifier.EndTable(); + } + SparsityParametersT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(SparsityParametersT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const SparsityParametersT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct SparsityParametersBuilder { + typedef SparsityParameters Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_traversal_order(flatbuffers::Offset> traversal_order) { + fbb_.AddOffset(SparsityParameters::VT_TRAVERSAL_ORDER, traversal_order); + } + void add_block_map(flatbuffers::Offset> block_map) { + fbb_.AddOffset(SparsityParameters::VT_BLOCK_MAP, block_map); + } + void add_dim_metadata(flatbuffers::Offset>> dim_metadata) { + fbb_.AddOffset(SparsityParameters::VT_DIM_METADATA, dim_metadata); + } + explicit SparsityParametersBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateSparsityParameters( + flatbuffers::FlatBufferBuilder &_fbb, + flatbuffers::Offset> traversal_order = 0, + flatbuffers::Offset> block_map = 0, + flatbuffers::Offset>> dim_metadata = 0) { + SparsityParametersBuilder builder_(_fbb); + builder_.add_dim_metadata(dim_metadata); + builder_.add_block_map(block_map); + builder_.add_traversal_order(traversal_order); + return builder_.Finish(); +} + +inline flatbuffers::Offset CreateSparsityParametersDirect( + flatbuffers::FlatBufferBuilder &_fbb, + const std::vector *traversal_order = nullptr, + const std::vector *block_map = nullptr, + const std::vector> *dim_metadata = nullptr) { + auto traversal_order__ = traversal_order ? _fbb.CreateVector(*traversal_order) : 0; + auto block_map__ = block_map ? _fbb.CreateVector(*block_map) : 0; + auto dim_metadata__ = dim_metadata ? _fbb.CreateVector>(*dim_metadata) : 0; + return tflite::CreateSparsityParameters( + _fbb, + traversal_order__, + block_map__, + dim_metadata__); +} + +flatbuffers::Offset CreateSparsityParameters(flatbuffers::FlatBufferBuilder &_fbb, const SparsityParametersT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct VariantSubTypeT : public flatbuffers::NativeTable { + typedef VariantSubType TableType; + std::vector shape{}; + tflite::TensorType type = tflite::TensorType_FLOAT32; + bool has_rank = false; +}; + +struct VariantSubType FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef VariantSubTypeT NativeTableType; + typedef VariantSubTypeBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_SHAPE = 4, + VT_TYPE = 6, + VT_HAS_RANK = 8 + }; + const flatbuffers::Vector *shape() const { + return GetPointer *>(VT_SHAPE); + } + tflite::TensorType type() const { + return static_cast(GetField(VT_TYPE, 0)); + } + bool has_rank() const { + return GetField(VT_HAS_RANK, 0) != 0; + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyOffset(verifier, VT_SHAPE) && + verifier.VerifyVector(shape()) && + VerifyField(verifier, VT_TYPE, 1) && + VerifyField(verifier, VT_HAS_RANK, 1) && + verifier.EndTable(); + } + VariantSubTypeT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(VariantSubTypeT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const VariantSubTypeT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct VariantSubTypeBuilder { + typedef VariantSubType Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_shape(flatbuffers::Offset> shape) { + fbb_.AddOffset(VariantSubType::VT_SHAPE, shape); + } + void add_type(tflite::TensorType type) { + fbb_.AddElement(VariantSubType::VT_TYPE, static_cast(type), 0); + } + void add_has_rank(bool has_rank) { + fbb_.AddElement(VariantSubType::VT_HAS_RANK, static_cast(has_rank), 0); + } + explicit VariantSubTypeBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateVariantSubType( + flatbuffers::FlatBufferBuilder &_fbb, + flatbuffers::Offset> shape = 0, + tflite::TensorType type = tflite::TensorType_FLOAT32, + bool has_rank = false) { + VariantSubTypeBuilder builder_(_fbb); + builder_.add_shape(shape); + builder_.add_has_rank(has_rank); + builder_.add_type(type); + return builder_.Finish(); +} + +inline flatbuffers::Offset CreateVariantSubTypeDirect( + flatbuffers::FlatBufferBuilder &_fbb, + const std::vector *shape = nullptr, + tflite::TensorType type = tflite::TensorType_FLOAT32, + bool has_rank = false) { + auto shape__ = shape ? _fbb.CreateVector(*shape) : 0; + return tflite::CreateVariantSubType( + _fbb, + shape__, + type, + has_rank); +} + +flatbuffers::Offset CreateVariantSubType(flatbuffers::FlatBufferBuilder &_fbb, const VariantSubTypeT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct TensorT : public flatbuffers::NativeTable { + typedef Tensor TableType; + std::vector shape{}; + tflite::TensorType type = tflite::TensorType_FLOAT32; + uint32_t buffer = 0; + std::string name{}; + std::unique_ptr quantization{}; + bool is_variable = false; + std::unique_ptr sparsity{}; + std::vector shape_signature{}; + bool has_rank = false; + std::vector> variant_tensors{}; + TensorT() = default; + TensorT(const TensorT &o); + TensorT(TensorT&&) FLATBUFFERS_NOEXCEPT = default; + TensorT &operator=(TensorT o) FLATBUFFERS_NOEXCEPT; +}; + +struct Tensor FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef TensorT NativeTableType; + typedef TensorBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_SHAPE = 4, + VT_TYPE = 6, + VT_BUFFER = 8, + VT_NAME = 10, + VT_QUANTIZATION = 12, + VT_IS_VARIABLE = 14, + VT_SPARSITY = 16, + VT_SHAPE_SIGNATURE = 18, + VT_HAS_RANK = 20, + VT_VARIANT_TENSORS = 22 + }; + const flatbuffers::Vector *shape() const { + return GetPointer *>(VT_SHAPE); + } + tflite::TensorType type() const { + return static_cast(GetField(VT_TYPE, 0)); + } + uint32_t buffer() const { + return GetField(VT_BUFFER, 0); + } + const flatbuffers::String *name() const { + return GetPointer(VT_NAME); + } + const tflite::QuantizationParameters *quantization() const { + return GetPointer(VT_QUANTIZATION); + } + bool is_variable() const { + return GetField(VT_IS_VARIABLE, 0) != 0; + } + const tflite::SparsityParameters *sparsity() const { + return GetPointer(VT_SPARSITY); + } + const flatbuffers::Vector *shape_signature() const { + return GetPointer *>(VT_SHAPE_SIGNATURE); + } + bool has_rank() const { + return GetField(VT_HAS_RANK, 0) != 0; + } + const flatbuffers::Vector> *variant_tensors() const { + return GetPointer> *>(VT_VARIANT_TENSORS); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyOffset(verifier, VT_SHAPE) && + verifier.VerifyVector(shape()) && + VerifyField(verifier, VT_TYPE, 1) && + VerifyField(verifier, VT_BUFFER, 4) && + VerifyOffset(verifier, VT_NAME) && + verifier.VerifyString(name()) && + VerifyOffset(verifier, VT_QUANTIZATION) && + verifier.VerifyTable(quantization()) && + VerifyField(verifier, VT_IS_VARIABLE, 1) && + VerifyOffset(verifier, VT_SPARSITY) && + verifier.VerifyTable(sparsity()) && + VerifyOffset(verifier, VT_SHAPE_SIGNATURE) && + verifier.VerifyVector(shape_signature()) && + VerifyField(verifier, VT_HAS_RANK, 1) && + VerifyOffset(verifier, VT_VARIANT_TENSORS) && + verifier.VerifyVector(variant_tensors()) && + verifier.VerifyVectorOfTables(variant_tensors()) && + verifier.EndTable(); + } + TensorT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(TensorT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const TensorT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct TensorBuilder { + typedef Tensor Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_shape(flatbuffers::Offset> shape) { + fbb_.AddOffset(Tensor::VT_SHAPE, shape); + } + void add_type(tflite::TensorType type) { + fbb_.AddElement(Tensor::VT_TYPE, static_cast(type), 0); + } + void add_buffer(uint32_t buffer) { + fbb_.AddElement(Tensor::VT_BUFFER, buffer, 0); + } + void add_name(flatbuffers::Offset name) { + fbb_.AddOffset(Tensor::VT_NAME, name); + } + void add_quantization(flatbuffers::Offset quantization) { + fbb_.AddOffset(Tensor::VT_QUANTIZATION, quantization); + } + void add_is_variable(bool is_variable) { + fbb_.AddElement(Tensor::VT_IS_VARIABLE, static_cast(is_variable), 0); + } + void add_sparsity(flatbuffers::Offset sparsity) { + fbb_.AddOffset(Tensor::VT_SPARSITY, sparsity); + } + void add_shape_signature(flatbuffers::Offset> shape_signature) { + fbb_.AddOffset(Tensor::VT_SHAPE_SIGNATURE, shape_signature); + } + void add_has_rank(bool has_rank) { + fbb_.AddElement(Tensor::VT_HAS_RANK, static_cast(has_rank), 0); + } + void add_variant_tensors(flatbuffers::Offset>> variant_tensors) { + fbb_.AddOffset(Tensor::VT_VARIANT_TENSORS, variant_tensors); + } + explicit TensorBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateTensor( + flatbuffers::FlatBufferBuilder &_fbb, + flatbuffers::Offset> shape = 0, + tflite::TensorType type = tflite::TensorType_FLOAT32, + uint32_t buffer = 0, + flatbuffers::Offset name = 0, + flatbuffers::Offset quantization = 0, + bool is_variable = false, + flatbuffers::Offset sparsity = 0, + flatbuffers::Offset> shape_signature = 0, + bool has_rank = false, + flatbuffers::Offset>> variant_tensors = 0) { + TensorBuilder builder_(_fbb); + builder_.add_variant_tensors(variant_tensors); + builder_.add_shape_signature(shape_signature); + builder_.add_sparsity(sparsity); + builder_.add_quantization(quantization); + builder_.add_name(name); + builder_.add_buffer(buffer); + builder_.add_shape(shape); + builder_.add_has_rank(has_rank); + builder_.add_is_variable(is_variable); + builder_.add_type(type); + return builder_.Finish(); +} + +inline flatbuffers::Offset CreateTensorDirect( + flatbuffers::FlatBufferBuilder &_fbb, + const std::vector *shape = nullptr, + tflite::TensorType type = tflite::TensorType_FLOAT32, + uint32_t buffer = 0, + const char *name = nullptr, + flatbuffers::Offset quantization = 0, + bool is_variable = false, + flatbuffers::Offset sparsity = 0, + const std::vector *shape_signature = nullptr, + bool has_rank = false, + const std::vector> *variant_tensors = nullptr) { + auto shape__ = shape ? _fbb.CreateVector(*shape) : 0; + auto name__ = name ? _fbb.CreateString(name) : 0; + auto shape_signature__ = shape_signature ? _fbb.CreateVector(*shape_signature) : 0; + auto variant_tensors__ = variant_tensors ? _fbb.CreateVector>(*variant_tensors) : 0; + return tflite::CreateTensor( + _fbb, + shape__, + type, + buffer, + name__, + quantization, + is_variable, + sparsity, + shape_signature__, + has_rank, + variant_tensors__); +} + +flatbuffers::Offset CreateTensor(flatbuffers::FlatBufferBuilder &_fbb, const TensorT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct Conv2DOptionsT : public flatbuffers::NativeTable { + typedef Conv2DOptions TableType; + tflite::Padding padding = tflite::Padding_SAME; + int32_t stride_w = 0; + int32_t stride_h = 0; + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; + int32_t dilation_w_factor = 1; + int32_t dilation_h_factor = 1; +}; + +struct Conv2DOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef Conv2DOptionsT NativeTableType; + typedef Conv2DOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_PADDING = 4, + VT_STRIDE_W = 6, + VT_STRIDE_H = 8, + VT_FUSED_ACTIVATION_FUNCTION = 10, + VT_DILATION_W_FACTOR = 12, + VT_DILATION_H_FACTOR = 14 + }; + tflite::Padding padding() const { + return static_cast(GetField(VT_PADDING, 0)); + } + int32_t stride_w() const { + return GetField(VT_STRIDE_W, 0); + } + int32_t stride_h() const { + return GetField(VT_STRIDE_H, 0); + } + tflite::ActivationFunctionType fused_activation_function() const { + return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); + } + int32_t dilation_w_factor() const { + return GetField(VT_DILATION_W_FACTOR, 1); + } + int32_t dilation_h_factor() const { + return GetField(VT_DILATION_H_FACTOR, 1); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_PADDING, 1) && + VerifyField(verifier, VT_STRIDE_W, 4) && + VerifyField(verifier, VT_STRIDE_H, 4) && + VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && + VerifyField(verifier, VT_DILATION_W_FACTOR, 4) && + VerifyField(verifier, VT_DILATION_H_FACTOR, 4) && + verifier.EndTable(); + } + Conv2DOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(Conv2DOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const Conv2DOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct Conv2DOptionsBuilder { + typedef Conv2DOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_padding(tflite::Padding padding) { + fbb_.AddElement(Conv2DOptions::VT_PADDING, static_cast(padding), 0); + } + void add_stride_w(int32_t stride_w) { + fbb_.AddElement(Conv2DOptions::VT_STRIDE_W, stride_w, 0); + } + void add_stride_h(int32_t stride_h) { + fbb_.AddElement(Conv2DOptions::VT_STRIDE_H, stride_h, 0); + } + void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { + fbb_.AddElement(Conv2DOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); + } + void add_dilation_w_factor(int32_t dilation_w_factor) { + fbb_.AddElement(Conv2DOptions::VT_DILATION_W_FACTOR, dilation_w_factor, 1); + } + void add_dilation_h_factor(int32_t dilation_h_factor) { + fbb_.AddElement(Conv2DOptions::VT_DILATION_H_FACTOR, dilation_h_factor, 1); + } + explicit Conv2DOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateConv2DOptions( + flatbuffers::FlatBufferBuilder &_fbb, + tflite::Padding padding = tflite::Padding_SAME, + int32_t stride_w = 0, + int32_t stride_h = 0, + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE, + int32_t dilation_w_factor = 1, + int32_t dilation_h_factor = 1) { + Conv2DOptionsBuilder builder_(_fbb); + builder_.add_dilation_h_factor(dilation_h_factor); + builder_.add_dilation_w_factor(dilation_w_factor); + builder_.add_stride_h(stride_h); + builder_.add_stride_w(stride_w); + builder_.add_fused_activation_function(fused_activation_function); + builder_.add_padding(padding); + return builder_.Finish(); +} + +flatbuffers::Offset CreateConv2DOptions(flatbuffers::FlatBufferBuilder &_fbb, const Conv2DOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct Conv3DOptionsT : public flatbuffers::NativeTable { + typedef Conv3DOptions TableType; + tflite::Padding padding = tflite::Padding_SAME; + int32_t stride_d = 0; + int32_t stride_w = 0; + int32_t stride_h = 0; + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; + int32_t dilation_d_factor = 1; + int32_t dilation_w_factor = 1; + int32_t dilation_h_factor = 1; +}; + +struct Conv3DOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef Conv3DOptionsT NativeTableType; + typedef Conv3DOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_PADDING = 4, + VT_STRIDE_D = 6, + VT_STRIDE_W = 8, + VT_STRIDE_H = 10, + VT_FUSED_ACTIVATION_FUNCTION = 12, + VT_DILATION_D_FACTOR = 14, + VT_DILATION_W_FACTOR = 16, + VT_DILATION_H_FACTOR = 18 + }; + tflite::Padding padding() const { + return static_cast(GetField(VT_PADDING, 0)); + } + int32_t stride_d() const { + return GetField(VT_STRIDE_D, 0); + } + int32_t stride_w() const { + return GetField(VT_STRIDE_W, 0); + } + int32_t stride_h() const { + return GetField(VT_STRIDE_H, 0); + } + tflite::ActivationFunctionType fused_activation_function() const { + return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); + } + int32_t dilation_d_factor() const { + return GetField(VT_DILATION_D_FACTOR, 1); + } + int32_t dilation_w_factor() const { + return GetField(VT_DILATION_W_FACTOR, 1); + } + int32_t dilation_h_factor() const { + return GetField(VT_DILATION_H_FACTOR, 1); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_PADDING, 1) && + VerifyField(verifier, VT_STRIDE_D, 4) && + VerifyField(verifier, VT_STRIDE_W, 4) && + VerifyField(verifier, VT_STRIDE_H, 4) && + VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && + VerifyField(verifier, VT_DILATION_D_FACTOR, 4) && + VerifyField(verifier, VT_DILATION_W_FACTOR, 4) && + VerifyField(verifier, VT_DILATION_H_FACTOR, 4) && + verifier.EndTable(); + } + Conv3DOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(Conv3DOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const Conv3DOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct Conv3DOptionsBuilder { + typedef Conv3DOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_padding(tflite::Padding padding) { + fbb_.AddElement(Conv3DOptions::VT_PADDING, static_cast(padding), 0); + } + void add_stride_d(int32_t stride_d) { + fbb_.AddElement(Conv3DOptions::VT_STRIDE_D, stride_d, 0); + } + void add_stride_w(int32_t stride_w) { + fbb_.AddElement(Conv3DOptions::VT_STRIDE_W, stride_w, 0); + } + void add_stride_h(int32_t stride_h) { + fbb_.AddElement(Conv3DOptions::VT_STRIDE_H, stride_h, 0); + } + void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { + fbb_.AddElement(Conv3DOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); + } + void add_dilation_d_factor(int32_t dilation_d_factor) { + fbb_.AddElement(Conv3DOptions::VT_DILATION_D_FACTOR, dilation_d_factor, 1); + } + void add_dilation_w_factor(int32_t dilation_w_factor) { + fbb_.AddElement(Conv3DOptions::VT_DILATION_W_FACTOR, dilation_w_factor, 1); + } + void add_dilation_h_factor(int32_t dilation_h_factor) { + fbb_.AddElement(Conv3DOptions::VT_DILATION_H_FACTOR, dilation_h_factor, 1); + } + explicit Conv3DOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateConv3DOptions( + flatbuffers::FlatBufferBuilder &_fbb, + tflite::Padding padding = tflite::Padding_SAME, + int32_t stride_d = 0, + int32_t stride_w = 0, + int32_t stride_h = 0, + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE, + int32_t dilation_d_factor = 1, + int32_t dilation_w_factor = 1, + int32_t dilation_h_factor = 1) { + Conv3DOptionsBuilder builder_(_fbb); + builder_.add_dilation_h_factor(dilation_h_factor); + builder_.add_dilation_w_factor(dilation_w_factor); + builder_.add_dilation_d_factor(dilation_d_factor); + builder_.add_stride_h(stride_h); + builder_.add_stride_w(stride_w); + builder_.add_stride_d(stride_d); + builder_.add_fused_activation_function(fused_activation_function); + builder_.add_padding(padding); + return builder_.Finish(); +} + +flatbuffers::Offset CreateConv3DOptions(flatbuffers::FlatBufferBuilder &_fbb, const Conv3DOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct Pool2DOptionsT : public flatbuffers::NativeTable { + typedef Pool2DOptions TableType; + tflite::Padding padding = tflite::Padding_SAME; + int32_t stride_w = 0; + int32_t stride_h = 0; + int32_t filter_width = 0; + int32_t filter_height = 0; + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; +}; + +struct Pool2DOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef Pool2DOptionsT NativeTableType; + typedef Pool2DOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_PADDING = 4, + VT_STRIDE_W = 6, + VT_STRIDE_H = 8, + VT_FILTER_WIDTH = 10, + VT_FILTER_HEIGHT = 12, + VT_FUSED_ACTIVATION_FUNCTION = 14 + }; + tflite::Padding padding() const { + return static_cast(GetField(VT_PADDING, 0)); + } + int32_t stride_w() const { + return GetField(VT_STRIDE_W, 0); + } + int32_t stride_h() const { + return GetField(VT_STRIDE_H, 0); + } + int32_t filter_width() const { + return GetField(VT_FILTER_WIDTH, 0); + } + int32_t filter_height() const { + return GetField(VT_FILTER_HEIGHT, 0); + } + tflite::ActivationFunctionType fused_activation_function() const { + return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_PADDING, 1) && + VerifyField(verifier, VT_STRIDE_W, 4) && + VerifyField(verifier, VT_STRIDE_H, 4) && + VerifyField(verifier, VT_FILTER_WIDTH, 4) && + VerifyField(verifier, VT_FILTER_HEIGHT, 4) && + VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && + verifier.EndTable(); + } + Pool2DOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(Pool2DOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const Pool2DOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct Pool2DOptionsBuilder { + typedef Pool2DOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_padding(tflite::Padding padding) { + fbb_.AddElement(Pool2DOptions::VT_PADDING, static_cast(padding), 0); + } + void add_stride_w(int32_t stride_w) { + fbb_.AddElement(Pool2DOptions::VT_STRIDE_W, stride_w, 0); + } + void add_stride_h(int32_t stride_h) { + fbb_.AddElement(Pool2DOptions::VT_STRIDE_H, stride_h, 0); + } + void add_filter_width(int32_t filter_width) { + fbb_.AddElement(Pool2DOptions::VT_FILTER_WIDTH, filter_width, 0); + } + void add_filter_height(int32_t filter_height) { + fbb_.AddElement(Pool2DOptions::VT_FILTER_HEIGHT, filter_height, 0); + } + void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { + fbb_.AddElement(Pool2DOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); + } + explicit Pool2DOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreatePool2DOptions( + flatbuffers::FlatBufferBuilder &_fbb, + tflite::Padding padding = tflite::Padding_SAME, + int32_t stride_w = 0, + int32_t stride_h = 0, + int32_t filter_width = 0, + int32_t filter_height = 0, + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE) { + Pool2DOptionsBuilder builder_(_fbb); + builder_.add_filter_height(filter_height); + builder_.add_filter_width(filter_width); + builder_.add_stride_h(stride_h); + builder_.add_stride_w(stride_w); + builder_.add_fused_activation_function(fused_activation_function); + builder_.add_padding(padding); + return builder_.Finish(); +} + +flatbuffers::Offset CreatePool2DOptions(flatbuffers::FlatBufferBuilder &_fbb, const Pool2DOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct DepthwiseConv2DOptionsT : public flatbuffers::NativeTable { + typedef DepthwiseConv2DOptions TableType; + tflite::Padding padding = tflite::Padding_SAME; + int32_t stride_w = 0; + int32_t stride_h = 0; + int32_t depth_multiplier = 0; + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; + int32_t dilation_w_factor = 1; + int32_t dilation_h_factor = 1; +}; + +struct DepthwiseConv2DOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef DepthwiseConv2DOptionsT NativeTableType; + typedef DepthwiseConv2DOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_PADDING = 4, + VT_STRIDE_W = 6, + VT_STRIDE_H = 8, + VT_DEPTH_MULTIPLIER = 10, + VT_FUSED_ACTIVATION_FUNCTION = 12, + VT_DILATION_W_FACTOR = 14, + VT_DILATION_H_FACTOR = 16 + }; + tflite::Padding padding() const { + return static_cast(GetField(VT_PADDING, 0)); + } + int32_t stride_w() const { + return GetField(VT_STRIDE_W, 0); + } + int32_t stride_h() const { + return GetField(VT_STRIDE_H, 0); + } + int32_t depth_multiplier() const { + return GetField(VT_DEPTH_MULTIPLIER, 0); + } + tflite::ActivationFunctionType fused_activation_function() const { + return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); + } + int32_t dilation_w_factor() const { + return GetField(VT_DILATION_W_FACTOR, 1); + } + int32_t dilation_h_factor() const { + return GetField(VT_DILATION_H_FACTOR, 1); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_PADDING, 1) && + VerifyField(verifier, VT_STRIDE_W, 4) && + VerifyField(verifier, VT_STRIDE_H, 4) && + VerifyField(verifier, VT_DEPTH_MULTIPLIER, 4) && + VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && + VerifyField(verifier, VT_DILATION_W_FACTOR, 4) && + VerifyField(verifier, VT_DILATION_H_FACTOR, 4) && + verifier.EndTable(); + } + DepthwiseConv2DOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(DepthwiseConv2DOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const DepthwiseConv2DOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct DepthwiseConv2DOptionsBuilder { + typedef DepthwiseConv2DOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_padding(tflite::Padding padding) { + fbb_.AddElement(DepthwiseConv2DOptions::VT_PADDING, static_cast(padding), 0); + } + void add_stride_w(int32_t stride_w) { + fbb_.AddElement(DepthwiseConv2DOptions::VT_STRIDE_W, stride_w, 0); + } + void add_stride_h(int32_t stride_h) { + fbb_.AddElement(DepthwiseConv2DOptions::VT_STRIDE_H, stride_h, 0); + } + void add_depth_multiplier(int32_t depth_multiplier) { + fbb_.AddElement(DepthwiseConv2DOptions::VT_DEPTH_MULTIPLIER, depth_multiplier, 0); + } + void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { + fbb_.AddElement(DepthwiseConv2DOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); + } + void add_dilation_w_factor(int32_t dilation_w_factor) { + fbb_.AddElement(DepthwiseConv2DOptions::VT_DILATION_W_FACTOR, dilation_w_factor, 1); + } + void add_dilation_h_factor(int32_t dilation_h_factor) { + fbb_.AddElement(DepthwiseConv2DOptions::VT_DILATION_H_FACTOR, dilation_h_factor, 1); + } + explicit DepthwiseConv2DOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateDepthwiseConv2DOptions( + flatbuffers::FlatBufferBuilder &_fbb, + tflite::Padding padding = tflite::Padding_SAME, + int32_t stride_w = 0, + int32_t stride_h = 0, + int32_t depth_multiplier = 0, + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE, + int32_t dilation_w_factor = 1, + int32_t dilation_h_factor = 1) { + DepthwiseConv2DOptionsBuilder builder_(_fbb); + builder_.add_dilation_h_factor(dilation_h_factor); + builder_.add_dilation_w_factor(dilation_w_factor); + builder_.add_depth_multiplier(depth_multiplier); + builder_.add_stride_h(stride_h); + builder_.add_stride_w(stride_w); + builder_.add_fused_activation_function(fused_activation_function); + builder_.add_padding(padding); + return builder_.Finish(); +} + +flatbuffers::Offset CreateDepthwiseConv2DOptions(flatbuffers::FlatBufferBuilder &_fbb, const DepthwiseConv2DOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct ConcatEmbeddingsOptionsT : public flatbuffers::NativeTable { + typedef ConcatEmbeddingsOptions TableType; + int32_t num_channels = 0; + std::vector num_columns_per_channel{}; + std::vector embedding_dim_per_channel{}; +}; + +struct ConcatEmbeddingsOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef ConcatEmbeddingsOptionsT NativeTableType; + typedef ConcatEmbeddingsOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_NUM_CHANNELS = 4, + VT_NUM_COLUMNS_PER_CHANNEL = 6, + VT_EMBEDDING_DIM_PER_CHANNEL = 8 + }; + int32_t num_channels() const { + return GetField(VT_NUM_CHANNELS, 0); + } + const flatbuffers::Vector *num_columns_per_channel() const { + return GetPointer *>(VT_NUM_COLUMNS_PER_CHANNEL); + } + const flatbuffers::Vector *embedding_dim_per_channel() const { + return GetPointer *>(VT_EMBEDDING_DIM_PER_CHANNEL); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_NUM_CHANNELS, 4) && + VerifyOffset(verifier, VT_NUM_COLUMNS_PER_CHANNEL) && + verifier.VerifyVector(num_columns_per_channel()) && + VerifyOffset(verifier, VT_EMBEDDING_DIM_PER_CHANNEL) && + verifier.VerifyVector(embedding_dim_per_channel()) && + verifier.EndTable(); + } + ConcatEmbeddingsOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(ConcatEmbeddingsOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const ConcatEmbeddingsOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct ConcatEmbeddingsOptionsBuilder { + typedef ConcatEmbeddingsOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_num_channels(int32_t num_channels) { + fbb_.AddElement(ConcatEmbeddingsOptions::VT_NUM_CHANNELS, num_channels, 0); + } + void add_num_columns_per_channel(flatbuffers::Offset> num_columns_per_channel) { + fbb_.AddOffset(ConcatEmbeddingsOptions::VT_NUM_COLUMNS_PER_CHANNEL, num_columns_per_channel); + } + void add_embedding_dim_per_channel(flatbuffers::Offset> embedding_dim_per_channel) { + fbb_.AddOffset(ConcatEmbeddingsOptions::VT_EMBEDDING_DIM_PER_CHANNEL, embedding_dim_per_channel); + } + explicit ConcatEmbeddingsOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateConcatEmbeddingsOptions( + flatbuffers::FlatBufferBuilder &_fbb, + int32_t num_channels = 0, + flatbuffers::Offset> num_columns_per_channel = 0, + flatbuffers::Offset> embedding_dim_per_channel = 0) { + ConcatEmbeddingsOptionsBuilder builder_(_fbb); + builder_.add_embedding_dim_per_channel(embedding_dim_per_channel); + builder_.add_num_columns_per_channel(num_columns_per_channel); + builder_.add_num_channels(num_channels); + return builder_.Finish(); +} + +inline flatbuffers::Offset CreateConcatEmbeddingsOptionsDirect( + flatbuffers::FlatBufferBuilder &_fbb, + int32_t num_channels = 0, + const std::vector *num_columns_per_channel = nullptr, + const std::vector *embedding_dim_per_channel = nullptr) { + auto num_columns_per_channel__ = num_columns_per_channel ? _fbb.CreateVector(*num_columns_per_channel) : 0; + auto embedding_dim_per_channel__ = embedding_dim_per_channel ? _fbb.CreateVector(*embedding_dim_per_channel) : 0; + return tflite::CreateConcatEmbeddingsOptions( + _fbb, + num_channels, + num_columns_per_channel__, + embedding_dim_per_channel__); +} + +flatbuffers::Offset CreateConcatEmbeddingsOptions(flatbuffers::FlatBufferBuilder &_fbb, const ConcatEmbeddingsOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct LSHProjectionOptionsT : public flatbuffers::NativeTable { + typedef LSHProjectionOptions TableType; + tflite::LSHProjectionType type = tflite::LSHProjectionType_UNKNOWN; +}; + +struct LSHProjectionOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef LSHProjectionOptionsT NativeTableType; + typedef LSHProjectionOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_TYPE = 4 + }; + tflite::LSHProjectionType type() const { + return static_cast(GetField(VT_TYPE, 0)); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_TYPE, 1) && + verifier.EndTable(); + } + LSHProjectionOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(LSHProjectionOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const LSHProjectionOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct LSHProjectionOptionsBuilder { + typedef LSHProjectionOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_type(tflite::LSHProjectionType type) { + fbb_.AddElement(LSHProjectionOptions::VT_TYPE, static_cast(type), 0); + } + explicit LSHProjectionOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateLSHProjectionOptions( + flatbuffers::FlatBufferBuilder &_fbb, + tflite::LSHProjectionType type = tflite::LSHProjectionType_UNKNOWN) { + LSHProjectionOptionsBuilder builder_(_fbb); + builder_.add_type(type); + return builder_.Finish(); +} + +flatbuffers::Offset CreateLSHProjectionOptions(flatbuffers::FlatBufferBuilder &_fbb, const LSHProjectionOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct SVDFOptionsT : public flatbuffers::NativeTable { + typedef SVDFOptions TableType; + int32_t rank = 0; + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; + bool asymmetric_quantize_inputs = false; +}; + +struct SVDFOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef SVDFOptionsT NativeTableType; + typedef SVDFOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_RANK = 4, + VT_FUSED_ACTIVATION_FUNCTION = 6, + VT_ASYMMETRIC_QUANTIZE_INPUTS = 8 + }; + int32_t rank() const { + return GetField(VT_RANK, 0); + } + tflite::ActivationFunctionType fused_activation_function() const { + return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); + } + bool asymmetric_quantize_inputs() const { + return GetField(VT_ASYMMETRIC_QUANTIZE_INPUTS, 0) != 0; + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_RANK, 4) && + VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && + VerifyField(verifier, VT_ASYMMETRIC_QUANTIZE_INPUTS, 1) && + verifier.EndTable(); + } + SVDFOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(SVDFOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const SVDFOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct SVDFOptionsBuilder { + typedef SVDFOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_rank(int32_t rank) { + fbb_.AddElement(SVDFOptions::VT_RANK, rank, 0); + } + void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { + fbb_.AddElement(SVDFOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); + } + void add_asymmetric_quantize_inputs(bool asymmetric_quantize_inputs) { + fbb_.AddElement(SVDFOptions::VT_ASYMMETRIC_QUANTIZE_INPUTS, static_cast(asymmetric_quantize_inputs), 0); + } + explicit SVDFOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateSVDFOptions( + flatbuffers::FlatBufferBuilder &_fbb, + int32_t rank = 0, + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE, + bool asymmetric_quantize_inputs = false) { + SVDFOptionsBuilder builder_(_fbb); + builder_.add_rank(rank); + builder_.add_asymmetric_quantize_inputs(asymmetric_quantize_inputs); + builder_.add_fused_activation_function(fused_activation_function); + return builder_.Finish(); +} + +flatbuffers::Offset CreateSVDFOptions(flatbuffers::FlatBufferBuilder &_fbb, const SVDFOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct RNNOptionsT : public flatbuffers::NativeTable { + typedef RNNOptions TableType; + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; + bool asymmetric_quantize_inputs = false; +}; + +struct RNNOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef RNNOptionsT NativeTableType; + typedef RNNOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_FUSED_ACTIVATION_FUNCTION = 4, + VT_ASYMMETRIC_QUANTIZE_INPUTS = 6 + }; + tflite::ActivationFunctionType fused_activation_function() const { + return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); + } + bool asymmetric_quantize_inputs() const { + return GetField(VT_ASYMMETRIC_QUANTIZE_INPUTS, 0) != 0; + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && + VerifyField(verifier, VT_ASYMMETRIC_QUANTIZE_INPUTS, 1) && + verifier.EndTable(); + } + RNNOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(RNNOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const RNNOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct RNNOptionsBuilder { + typedef RNNOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { + fbb_.AddElement(RNNOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); + } + void add_asymmetric_quantize_inputs(bool asymmetric_quantize_inputs) { + fbb_.AddElement(RNNOptions::VT_ASYMMETRIC_QUANTIZE_INPUTS, static_cast(asymmetric_quantize_inputs), 0); + } + explicit RNNOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateRNNOptions( + flatbuffers::FlatBufferBuilder &_fbb, + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE, + bool asymmetric_quantize_inputs = false) { + RNNOptionsBuilder builder_(_fbb); + builder_.add_asymmetric_quantize_inputs(asymmetric_quantize_inputs); + builder_.add_fused_activation_function(fused_activation_function); + return builder_.Finish(); +} + +flatbuffers::Offset CreateRNNOptions(flatbuffers::FlatBufferBuilder &_fbb, const RNNOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct SequenceRNNOptionsT : public flatbuffers::NativeTable { + typedef SequenceRNNOptions TableType; + bool time_major = false; + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; + bool asymmetric_quantize_inputs = false; +}; + +struct SequenceRNNOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef SequenceRNNOptionsT NativeTableType; + typedef SequenceRNNOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_TIME_MAJOR = 4, + VT_FUSED_ACTIVATION_FUNCTION = 6, + VT_ASYMMETRIC_QUANTIZE_INPUTS = 8 + }; + bool time_major() const { + return GetField(VT_TIME_MAJOR, 0) != 0; + } + tflite::ActivationFunctionType fused_activation_function() const { + return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); + } + bool asymmetric_quantize_inputs() const { + return GetField(VT_ASYMMETRIC_QUANTIZE_INPUTS, 0) != 0; + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_TIME_MAJOR, 1) && + VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && + VerifyField(verifier, VT_ASYMMETRIC_QUANTIZE_INPUTS, 1) && + verifier.EndTable(); + } + SequenceRNNOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(SequenceRNNOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const SequenceRNNOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct SequenceRNNOptionsBuilder { + typedef SequenceRNNOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_time_major(bool time_major) { + fbb_.AddElement(SequenceRNNOptions::VT_TIME_MAJOR, static_cast(time_major), 0); + } + void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { + fbb_.AddElement(SequenceRNNOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); + } + void add_asymmetric_quantize_inputs(bool asymmetric_quantize_inputs) { + fbb_.AddElement(SequenceRNNOptions::VT_ASYMMETRIC_QUANTIZE_INPUTS, static_cast(asymmetric_quantize_inputs), 0); + } + explicit SequenceRNNOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateSequenceRNNOptions( + flatbuffers::FlatBufferBuilder &_fbb, + bool time_major = false, + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE, + bool asymmetric_quantize_inputs = false) { + SequenceRNNOptionsBuilder builder_(_fbb); + builder_.add_asymmetric_quantize_inputs(asymmetric_quantize_inputs); + builder_.add_fused_activation_function(fused_activation_function); + builder_.add_time_major(time_major); + return builder_.Finish(); +} + +flatbuffers::Offset CreateSequenceRNNOptions(flatbuffers::FlatBufferBuilder &_fbb, const SequenceRNNOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct BidirectionalSequenceRNNOptionsT : public flatbuffers::NativeTable { + typedef BidirectionalSequenceRNNOptions TableType; + bool time_major = false; + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; + bool merge_outputs = false; + bool asymmetric_quantize_inputs = false; +}; + +struct BidirectionalSequenceRNNOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef BidirectionalSequenceRNNOptionsT NativeTableType; + typedef BidirectionalSequenceRNNOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_TIME_MAJOR = 4, + VT_FUSED_ACTIVATION_FUNCTION = 6, + VT_MERGE_OUTPUTS = 8, + VT_ASYMMETRIC_QUANTIZE_INPUTS = 10 + }; + bool time_major() const { + return GetField(VT_TIME_MAJOR, 0) != 0; + } + tflite::ActivationFunctionType fused_activation_function() const { + return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); + } + bool merge_outputs() const { + return GetField(VT_MERGE_OUTPUTS, 0) != 0; + } + bool asymmetric_quantize_inputs() const { + return GetField(VT_ASYMMETRIC_QUANTIZE_INPUTS, 0) != 0; + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_TIME_MAJOR, 1) && + VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && + VerifyField(verifier, VT_MERGE_OUTPUTS, 1) && + VerifyField(verifier, VT_ASYMMETRIC_QUANTIZE_INPUTS, 1) && + verifier.EndTable(); + } + BidirectionalSequenceRNNOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(BidirectionalSequenceRNNOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const BidirectionalSequenceRNNOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct BidirectionalSequenceRNNOptionsBuilder { + typedef BidirectionalSequenceRNNOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_time_major(bool time_major) { + fbb_.AddElement(BidirectionalSequenceRNNOptions::VT_TIME_MAJOR, static_cast(time_major), 0); + } + void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { + fbb_.AddElement(BidirectionalSequenceRNNOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); + } + void add_merge_outputs(bool merge_outputs) { + fbb_.AddElement(BidirectionalSequenceRNNOptions::VT_MERGE_OUTPUTS, static_cast(merge_outputs), 0); + } + void add_asymmetric_quantize_inputs(bool asymmetric_quantize_inputs) { + fbb_.AddElement(BidirectionalSequenceRNNOptions::VT_ASYMMETRIC_QUANTIZE_INPUTS, static_cast(asymmetric_quantize_inputs), 0); + } + explicit BidirectionalSequenceRNNOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateBidirectionalSequenceRNNOptions( + flatbuffers::FlatBufferBuilder &_fbb, + bool time_major = false, + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE, + bool merge_outputs = false, + bool asymmetric_quantize_inputs = false) { + BidirectionalSequenceRNNOptionsBuilder builder_(_fbb); + builder_.add_asymmetric_quantize_inputs(asymmetric_quantize_inputs); + builder_.add_merge_outputs(merge_outputs); + builder_.add_fused_activation_function(fused_activation_function); + builder_.add_time_major(time_major); + return builder_.Finish(); +} + +flatbuffers::Offset CreateBidirectionalSequenceRNNOptions(flatbuffers::FlatBufferBuilder &_fbb, const BidirectionalSequenceRNNOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct FullyConnectedOptionsT : public flatbuffers::NativeTable { + typedef FullyConnectedOptions TableType; + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; + tflite::FullyConnectedOptionsWeightsFormat weights_format = tflite::FullyConnectedOptionsWeightsFormat_DEFAULT; + bool keep_num_dims = false; + bool asymmetric_quantize_inputs = false; +}; + +struct FullyConnectedOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef FullyConnectedOptionsT NativeTableType; + typedef FullyConnectedOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_FUSED_ACTIVATION_FUNCTION = 4, + VT_WEIGHTS_FORMAT = 6, + VT_KEEP_NUM_DIMS = 8, + VT_ASYMMETRIC_QUANTIZE_INPUTS = 10 + }; + tflite::ActivationFunctionType fused_activation_function() const { + return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); + } + tflite::FullyConnectedOptionsWeightsFormat weights_format() const { + return static_cast(GetField(VT_WEIGHTS_FORMAT, 0)); + } + bool keep_num_dims() const { + return GetField(VT_KEEP_NUM_DIMS, 0) != 0; + } + bool asymmetric_quantize_inputs() const { + return GetField(VT_ASYMMETRIC_QUANTIZE_INPUTS, 0) != 0; + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && + VerifyField(verifier, VT_WEIGHTS_FORMAT, 1) && + VerifyField(verifier, VT_KEEP_NUM_DIMS, 1) && + VerifyField(verifier, VT_ASYMMETRIC_QUANTIZE_INPUTS, 1) && + verifier.EndTable(); + } + FullyConnectedOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(FullyConnectedOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const FullyConnectedOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct FullyConnectedOptionsBuilder { + typedef FullyConnectedOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { + fbb_.AddElement(FullyConnectedOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); + } + void add_weights_format(tflite::FullyConnectedOptionsWeightsFormat weights_format) { + fbb_.AddElement(FullyConnectedOptions::VT_WEIGHTS_FORMAT, static_cast(weights_format), 0); + } + void add_keep_num_dims(bool keep_num_dims) { + fbb_.AddElement(FullyConnectedOptions::VT_KEEP_NUM_DIMS, static_cast(keep_num_dims), 0); + } + void add_asymmetric_quantize_inputs(bool asymmetric_quantize_inputs) { + fbb_.AddElement(FullyConnectedOptions::VT_ASYMMETRIC_QUANTIZE_INPUTS, static_cast(asymmetric_quantize_inputs), 0); + } + explicit FullyConnectedOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateFullyConnectedOptions( + flatbuffers::FlatBufferBuilder &_fbb, + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE, + tflite::FullyConnectedOptionsWeightsFormat weights_format = tflite::FullyConnectedOptionsWeightsFormat_DEFAULT, + bool keep_num_dims = false, + bool asymmetric_quantize_inputs = false) { + FullyConnectedOptionsBuilder builder_(_fbb); + builder_.add_asymmetric_quantize_inputs(asymmetric_quantize_inputs); + builder_.add_keep_num_dims(keep_num_dims); + builder_.add_weights_format(weights_format); + builder_.add_fused_activation_function(fused_activation_function); + return builder_.Finish(); +} + +flatbuffers::Offset CreateFullyConnectedOptions(flatbuffers::FlatBufferBuilder &_fbb, const FullyConnectedOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct SoftmaxOptionsT : public flatbuffers::NativeTable { + typedef SoftmaxOptions TableType; + float beta = 0.0f; +}; + +struct SoftmaxOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef SoftmaxOptionsT NativeTableType; + typedef SoftmaxOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_BETA = 4 + }; + float beta() const { + return GetField(VT_BETA, 0.0f); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_BETA, 4) && + verifier.EndTable(); + } + SoftmaxOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(SoftmaxOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const SoftmaxOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct SoftmaxOptionsBuilder { + typedef SoftmaxOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_beta(float beta) { + fbb_.AddElement(SoftmaxOptions::VT_BETA, beta, 0.0f); + } + explicit SoftmaxOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateSoftmaxOptions( + flatbuffers::FlatBufferBuilder &_fbb, + float beta = 0.0f) { + SoftmaxOptionsBuilder builder_(_fbb); + builder_.add_beta(beta); + return builder_.Finish(); +} + +flatbuffers::Offset CreateSoftmaxOptions(flatbuffers::FlatBufferBuilder &_fbb, const SoftmaxOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct ConcatenationOptionsT : public flatbuffers::NativeTable { + typedef ConcatenationOptions TableType; + int32_t axis = 0; + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; +}; + +struct ConcatenationOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef ConcatenationOptionsT NativeTableType; + typedef ConcatenationOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_AXIS = 4, + VT_FUSED_ACTIVATION_FUNCTION = 6 + }; + int32_t axis() const { + return GetField(VT_AXIS, 0); + } + tflite::ActivationFunctionType fused_activation_function() const { + return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_AXIS, 4) && + VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && + verifier.EndTable(); + } + ConcatenationOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(ConcatenationOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const ConcatenationOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct ConcatenationOptionsBuilder { + typedef ConcatenationOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_axis(int32_t axis) { + fbb_.AddElement(ConcatenationOptions::VT_AXIS, axis, 0); + } + void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { + fbb_.AddElement(ConcatenationOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); + } + explicit ConcatenationOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateConcatenationOptions( + flatbuffers::FlatBufferBuilder &_fbb, + int32_t axis = 0, + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE) { + ConcatenationOptionsBuilder builder_(_fbb); + builder_.add_axis(axis); + builder_.add_fused_activation_function(fused_activation_function); + return builder_.Finish(); +} + +flatbuffers::Offset CreateConcatenationOptions(flatbuffers::FlatBufferBuilder &_fbb, const ConcatenationOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct AddOptionsT : public flatbuffers::NativeTable { + typedef AddOptions TableType; + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; + bool pot_scale_int16 = true; +}; + +struct AddOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef AddOptionsT NativeTableType; + typedef AddOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_FUSED_ACTIVATION_FUNCTION = 4, + VT_POT_SCALE_INT16 = 6 + }; + tflite::ActivationFunctionType fused_activation_function() const { + return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); + } + bool pot_scale_int16() const { + return GetField(VT_POT_SCALE_INT16, 1) != 0; + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && + VerifyField(verifier, VT_POT_SCALE_INT16, 1) && + verifier.EndTable(); + } + AddOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(AddOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const AddOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct AddOptionsBuilder { + typedef AddOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { + fbb_.AddElement(AddOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); + } + void add_pot_scale_int16(bool pot_scale_int16) { + fbb_.AddElement(AddOptions::VT_POT_SCALE_INT16, static_cast(pot_scale_int16), 1); + } + explicit AddOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateAddOptions( + flatbuffers::FlatBufferBuilder &_fbb, + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE, + bool pot_scale_int16 = true) { + AddOptionsBuilder builder_(_fbb); + builder_.add_pot_scale_int16(pot_scale_int16); + builder_.add_fused_activation_function(fused_activation_function); + return builder_.Finish(); +} + +flatbuffers::Offset CreateAddOptions(flatbuffers::FlatBufferBuilder &_fbb, const AddOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct MulOptionsT : public flatbuffers::NativeTable { + typedef MulOptions TableType; + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; +}; + +struct MulOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef MulOptionsT NativeTableType; + typedef MulOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_FUSED_ACTIVATION_FUNCTION = 4 + }; + tflite::ActivationFunctionType fused_activation_function() const { + return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && + verifier.EndTable(); + } + MulOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(MulOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const MulOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct MulOptionsBuilder { + typedef MulOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { + fbb_.AddElement(MulOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); + } + explicit MulOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateMulOptions( + flatbuffers::FlatBufferBuilder &_fbb, + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE) { + MulOptionsBuilder builder_(_fbb); + builder_.add_fused_activation_function(fused_activation_function); + return builder_.Finish(); +} + +flatbuffers::Offset CreateMulOptions(flatbuffers::FlatBufferBuilder &_fbb, const MulOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct L2NormOptionsT : public flatbuffers::NativeTable { + typedef L2NormOptions TableType; + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; +}; + +struct L2NormOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef L2NormOptionsT NativeTableType; + typedef L2NormOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_FUSED_ACTIVATION_FUNCTION = 4 + }; + tflite::ActivationFunctionType fused_activation_function() const { + return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && + verifier.EndTable(); + } + L2NormOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(L2NormOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const L2NormOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct L2NormOptionsBuilder { + typedef L2NormOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { + fbb_.AddElement(L2NormOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); + } + explicit L2NormOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateL2NormOptions( + flatbuffers::FlatBufferBuilder &_fbb, + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE) { + L2NormOptionsBuilder builder_(_fbb); + builder_.add_fused_activation_function(fused_activation_function); + return builder_.Finish(); +} + +flatbuffers::Offset CreateL2NormOptions(flatbuffers::FlatBufferBuilder &_fbb, const L2NormOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct LocalResponseNormalizationOptionsT : public flatbuffers::NativeTable { + typedef LocalResponseNormalizationOptions TableType; + int32_t radius = 0; + float bias = 0.0f; + float alpha = 0.0f; + float beta = 0.0f; +}; + +struct LocalResponseNormalizationOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef LocalResponseNormalizationOptionsT NativeTableType; + typedef LocalResponseNormalizationOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_RADIUS = 4, + VT_BIAS = 6, + VT_ALPHA = 8, + VT_BETA = 10 + }; + int32_t radius() const { + return GetField(VT_RADIUS, 0); + } + float bias() const { + return GetField(VT_BIAS, 0.0f); + } + float alpha() const { + return GetField(VT_ALPHA, 0.0f); + } + float beta() const { + return GetField(VT_BETA, 0.0f); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_RADIUS, 4) && + VerifyField(verifier, VT_BIAS, 4) && + VerifyField(verifier, VT_ALPHA, 4) && + VerifyField(verifier, VT_BETA, 4) && + verifier.EndTable(); + } + LocalResponseNormalizationOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(LocalResponseNormalizationOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const LocalResponseNormalizationOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct LocalResponseNormalizationOptionsBuilder { + typedef LocalResponseNormalizationOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_radius(int32_t radius) { + fbb_.AddElement(LocalResponseNormalizationOptions::VT_RADIUS, radius, 0); + } + void add_bias(float bias) { + fbb_.AddElement(LocalResponseNormalizationOptions::VT_BIAS, bias, 0.0f); + } + void add_alpha(float alpha) { + fbb_.AddElement(LocalResponseNormalizationOptions::VT_ALPHA, alpha, 0.0f); + } + void add_beta(float beta) { + fbb_.AddElement(LocalResponseNormalizationOptions::VT_BETA, beta, 0.0f); + } + explicit LocalResponseNormalizationOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateLocalResponseNormalizationOptions( + flatbuffers::FlatBufferBuilder &_fbb, + int32_t radius = 0, + float bias = 0.0f, + float alpha = 0.0f, + float beta = 0.0f) { + LocalResponseNormalizationOptionsBuilder builder_(_fbb); + builder_.add_beta(beta); + builder_.add_alpha(alpha); + builder_.add_bias(bias); + builder_.add_radius(radius); + return builder_.Finish(); +} + +flatbuffers::Offset CreateLocalResponseNormalizationOptions(flatbuffers::FlatBufferBuilder &_fbb, const LocalResponseNormalizationOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct LSTMOptionsT : public flatbuffers::NativeTable { + typedef LSTMOptions TableType; + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; + float cell_clip = 0.0f; + float proj_clip = 0.0f; + tflite::LSTMKernelType kernel_type = tflite::LSTMKernelType_FULL; + bool asymmetric_quantize_inputs = false; +}; + +struct LSTMOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef LSTMOptionsT NativeTableType; + typedef LSTMOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_FUSED_ACTIVATION_FUNCTION = 4, + VT_CELL_CLIP = 6, + VT_PROJ_CLIP = 8, + VT_KERNEL_TYPE = 10, + VT_ASYMMETRIC_QUANTIZE_INPUTS = 12 + }; + tflite::ActivationFunctionType fused_activation_function() const { + return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); + } + float cell_clip() const { + return GetField(VT_CELL_CLIP, 0.0f); + } + float proj_clip() const { + return GetField(VT_PROJ_CLIP, 0.0f); + } + tflite::LSTMKernelType kernel_type() const { + return static_cast(GetField(VT_KERNEL_TYPE, 0)); + } + bool asymmetric_quantize_inputs() const { + return GetField(VT_ASYMMETRIC_QUANTIZE_INPUTS, 0) != 0; + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && + VerifyField(verifier, VT_CELL_CLIP, 4) && + VerifyField(verifier, VT_PROJ_CLIP, 4) && + VerifyField(verifier, VT_KERNEL_TYPE, 1) && + VerifyField(verifier, VT_ASYMMETRIC_QUANTIZE_INPUTS, 1) && + verifier.EndTable(); + } + LSTMOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(LSTMOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const LSTMOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct LSTMOptionsBuilder { + typedef LSTMOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { + fbb_.AddElement(LSTMOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); + } + void add_cell_clip(float cell_clip) { + fbb_.AddElement(LSTMOptions::VT_CELL_CLIP, cell_clip, 0.0f); + } + void add_proj_clip(float proj_clip) { + fbb_.AddElement(LSTMOptions::VT_PROJ_CLIP, proj_clip, 0.0f); + } + void add_kernel_type(tflite::LSTMKernelType kernel_type) { + fbb_.AddElement(LSTMOptions::VT_KERNEL_TYPE, static_cast(kernel_type), 0); + } + void add_asymmetric_quantize_inputs(bool asymmetric_quantize_inputs) { + fbb_.AddElement(LSTMOptions::VT_ASYMMETRIC_QUANTIZE_INPUTS, static_cast(asymmetric_quantize_inputs), 0); + } + explicit LSTMOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateLSTMOptions( + flatbuffers::FlatBufferBuilder &_fbb, + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE, + float cell_clip = 0.0f, + float proj_clip = 0.0f, + tflite::LSTMKernelType kernel_type = tflite::LSTMKernelType_FULL, + bool asymmetric_quantize_inputs = false) { + LSTMOptionsBuilder builder_(_fbb); + builder_.add_proj_clip(proj_clip); + builder_.add_cell_clip(cell_clip); + builder_.add_asymmetric_quantize_inputs(asymmetric_quantize_inputs); + builder_.add_kernel_type(kernel_type); + builder_.add_fused_activation_function(fused_activation_function); + return builder_.Finish(); +} + +flatbuffers::Offset CreateLSTMOptions(flatbuffers::FlatBufferBuilder &_fbb, const LSTMOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct UnidirectionalSequenceLSTMOptionsT : public flatbuffers::NativeTable { + typedef UnidirectionalSequenceLSTMOptions TableType; + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; + float cell_clip = 0.0f; + float proj_clip = 0.0f; + bool time_major = false; + bool asymmetric_quantize_inputs = false; + bool diagonal_recurrent_tensors = false; +}; + +struct UnidirectionalSequenceLSTMOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef UnidirectionalSequenceLSTMOptionsT NativeTableType; + typedef UnidirectionalSequenceLSTMOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_FUSED_ACTIVATION_FUNCTION = 4, + VT_CELL_CLIP = 6, + VT_PROJ_CLIP = 8, + VT_TIME_MAJOR = 10, + VT_ASYMMETRIC_QUANTIZE_INPUTS = 12, + VT_DIAGONAL_RECURRENT_TENSORS = 14 + }; + tflite::ActivationFunctionType fused_activation_function() const { + return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); + } + float cell_clip() const { + return GetField(VT_CELL_CLIP, 0.0f); + } + float proj_clip() const { + return GetField(VT_PROJ_CLIP, 0.0f); + } + bool time_major() const { + return GetField(VT_TIME_MAJOR, 0) != 0; + } + bool asymmetric_quantize_inputs() const { + return GetField(VT_ASYMMETRIC_QUANTIZE_INPUTS, 0) != 0; + } + bool diagonal_recurrent_tensors() const { + return GetField(VT_DIAGONAL_RECURRENT_TENSORS, 0) != 0; + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && + VerifyField(verifier, VT_CELL_CLIP, 4) && + VerifyField(verifier, VT_PROJ_CLIP, 4) && + VerifyField(verifier, VT_TIME_MAJOR, 1) && + VerifyField(verifier, VT_ASYMMETRIC_QUANTIZE_INPUTS, 1) && + VerifyField(verifier, VT_DIAGONAL_RECURRENT_TENSORS, 1) && + verifier.EndTable(); + } + UnidirectionalSequenceLSTMOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(UnidirectionalSequenceLSTMOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const UnidirectionalSequenceLSTMOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct UnidirectionalSequenceLSTMOptionsBuilder { + typedef UnidirectionalSequenceLSTMOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { + fbb_.AddElement(UnidirectionalSequenceLSTMOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); + } + void add_cell_clip(float cell_clip) { + fbb_.AddElement(UnidirectionalSequenceLSTMOptions::VT_CELL_CLIP, cell_clip, 0.0f); + } + void add_proj_clip(float proj_clip) { + fbb_.AddElement(UnidirectionalSequenceLSTMOptions::VT_PROJ_CLIP, proj_clip, 0.0f); + } + void add_time_major(bool time_major) { + fbb_.AddElement(UnidirectionalSequenceLSTMOptions::VT_TIME_MAJOR, static_cast(time_major), 0); + } + void add_asymmetric_quantize_inputs(bool asymmetric_quantize_inputs) { + fbb_.AddElement(UnidirectionalSequenceLSTMOptions::VT_ASYMMETRIC_QUANTIZE_INPUTS, static_cast(asymmetric_quantize_inputs), 0); + } + void add_diagonal_recurrent_tensors(bool diagonal_recurrent_tensors) { + fbb_.AddElement(UnidirectionalSequenceLSTMOptions::VT_DIAGONAL_RECURRENT_TENSORS, static_cast(diagonal_recurrent_tensors), 0); + } + explicit UnidirectionalSequenceLSTMOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateUnidirectionalSequenceLSTMOptions( + flatbuffers::FlatBufferBuilder &_fbb, + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE, + float cell_clip = 0.0f, + float proj_clip = 0.0f, + bool time_major = false, + bool asymmetric_quantize_inputs = false, + bool diagonal_recurrent_tensors = false) { + UnidirectionalSequenceLSTMOptionsBuilder builder_(_fbb); + builder_.add_proj_clip(proj_clip); + builder_.add_cell_clip(cell_clip); + builder_.add_diagonal_recurrent_tensors(diagonal_recurrent_tensors); + builder_.add_asymmetric_quantize_inputs(asymmetric_quantize_inputs); + builder_.add_time_major(time_major); + builder_.add_fused_activation_function(fused_activation_function); + return builder_.Finish(); +} + +flatbuffers::Offset CreateUnidirectionalSequenceLSTMOptions(flatbuffers::FlatBufferBuilder &_fbb, const UnidirectionalSequenceLSTMOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct BidirectionalSequenceLSTMOptionsT : public flatbuffers::NativeTable { + typedef BidirectionalSequenceLSTMOptions TableType; + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; + float cell_clip = 0.0f; + float proj_clip = 0.0f; + bool merge_outputs = false; + bool time_major = true; + bool asymmetric_quantize_inputs = false; +}; + +struct BidirectionalSequenceLSTMOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef BidirectionalSequenceLSTMOptionsT NativeTableType; + typedef BidirectionalSequenceLSTMOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_FUSED_ACTIVATION_FUNCTION = 4, + VT_CELL_CLIP = 6, + VT_PROJ_CLIP = 8, + VT_MERGE_OUTPUTS = 10, + VT_TIME_MAJOR = 12, + VT_ASYMMETRIC_QUANTIZE_INPUTS = 14 + }; + tflite::ActivationFunctionType fused_activation_function() const { + return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); + } + float cell_clip() const { + return GetField(VT_CELL_CLIP, 0.0f); + } + float proj_clip() const { + return GetField(VT_PROJ_CLIP, 0.0f); + } + bool merge_outputs() const { + return GetField(VT_MERGE_OUTPUTS, 0) != 0; + } + bool time_major() const { + return GetField(VT_TIME_MAJOR, 1) != 0; + } + bool asymmetric_quantize_inputs() const { + return GetField(VT_ASYMMETRIC_QUANTIZE_INPUTS, 0) != 0; + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && + VerifyField(verifier, VT_CELL_CLIP, 4) && + VerifyField(verifier, VT_PROJ_CLIP, 4) && + VerifyField(verifier, VT_MERGE_OUTPUTS, 1) && + VerifyField(verifier, VT_TIME_MAJOR, 1) && + VerifyField(verifier, VT_ASYMMETRIC_QUANTIZE_INPUTS, 1) && + verifier.EndTable(); + } + BidirectionalSequenceLSTMOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(BidirectionalSequenceLSTMOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const BidirectionalSequenceLSTMOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct BidirectionalSequenceLSTMOptionsBuilder { + typedef BidirectionalSequenceLSTMOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { + fbb_.AddElement(BidirectionalSequenceLSTMOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); + } + void add_cell_clip(float cell_clip) { + fbb_.AddElement(BidirectionalSequenceLSTMOptions::VT_CELL_CLIP, cell_clip, 0.0f); + } + void add_proj_clip(float proj_clip) { + fbb_.AddElement(BidirectionalSequenceLSTMOptions::VT_PROJ_CLIP, proj_clip, 0.0f); + } + void add_merge_outputs(bool merge_outputs) { + fbb_.AddElement(BidirectionalSequenceLSTMOptions::VT_MERGE_OUTPUTS, static_cast(merge_outputs), 0); + } + void add_time_major(bool time_major) { + fbb_.AddElement(BidirectionalSequenceLSTMOptions::VT_TIME_MAJOR, static_cast(time_major), 1); + } + void add_asymmetric_quantize_inputs(bool asymmetric_quantize_inputs) { + fbb_.AddElement(BidirectionalSequenceLSTMOptions::VT_ASYMMETRIC_QUANTIZE_INPUTS, static_cast(asymmetric_quantize_inputs), 0); + } + explicit BidirectionalSequenceLSTMOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateBidirectionalSequenceLSTMOptions( + flatbuffers::FlatBufferBuilder &_fbb, + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE, + float cell_clip = 0.0f, + float proj_clip = 0.0f, + bool merge_outputs = false, + bool time_major = true, + bool asymmetric_quantize_inputs = false) { + BidirectionalSequenceLSTMOptionsBuilder builder_(_fbb); + builder_.add_proj_clip(proj_clip); + builder_.add_cell_clip(cell_clip); + builder_.add_asymmetric_quantize_inputs(asymmetric_quantize_inputs); + builder_.add_time_major(time_major); + builder_.add_merge_outputs(merge_outputs); + builder_.add_fused_activation_function(fused_activation_function); + return builder_.Finish(); +} + +flatbuffers::Offset CreateBidirectionalSequenceLSTMOptions(flatbuffers::FlatBufferBuilder &_fbb, const BidirectionalSequenceLSTMOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct ResizeBilinearOptionsT : public flatbuffers::NativeTable { + typedef ResizeBilinearOptions TableType; + bool align_corners = false; + bool half_pixel_centers = false; +}; + +struct ResizeBilinearOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef ResizeBilinearOptionsT NativeTableType; + typedef ResizeBilinearOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_ALIGN_CORNERS = 8, + VT_HALF_PIXEL_CENTERS = 10 + }; + bool align_corners() const { + return GetField(VT_ALIGN_CORNERS, 0) != 0; + } + bool half_pixel_centers() const { + return GetField(VT_HALF_PIXEL_CENTERS, 0) != 0; + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_ALIGN_CORNERS, 1) && + VerifyField(verifier, VT_HALF_PIXEL_CENTERS, 1) && + verifier.EndTable(); + } + ResizeBilinearOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(ResizeBilinearOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const ResizeBilinearOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct ResizeBilinearOptionsBuilder { + typedef ResizeBilinearOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_align_corners(bool align_corners) { + fbb_.AddElement(ResizeBilinearOptions::VT_ALIGN_CORNERS, static_cast(align_corners), 0); + } + void add_half_pixel_centers(bool half_pixel_centers) { + fbb_.AddElement(ResizeBilinearOptions::VT_HALF_PIXEL_CENTERS, static_cast(half_pixel_centers), 0); + } + explicit ResizeBilinearOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateResizeBilinearOptions( + flatbuffers::FlatBufferBuilder &_fbb, + bool align_corners = false, + bool half_pixel_centers = false) { + ResizeBilinearOptionsBuilder builder_(_fbb); + builder_.add_half_pixel_centers(half_pixel_centers); + builder_.add_align_corners(align_corners); + return builder_.Finish(); +} + +flatbuffers::Offset CreateResizeBilinearOptions(flatbuffers::FlatBufferBuilder &_fbb, const ResizeBilinearOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct ResizeNearestNeighborOptionsT : public flatbuffers::NativeTable { + typedef ResizeNearestNeighborOptions TableType; + bool align_corners = false; + bool half_pixel_centers = false; +}; + +struct ResizeNearestNeighborOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef ResizeNearestNeighborOptionsT NativeTableType; + typedef ResizeNearestNeighborOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_ALIGN_CORNERS = 4, + VT_HALF_PIXEL_CENTERS = 6 + }; + bool align_corners() const { + return GetField(VT_ALIGN_CORNERS, 0) != 0; + } + bool half_pixel_centers() const { + return GetField(VT_HALF_PIXEL_CENTERS, 0) != 0; + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_ALIGN_CORNERS, 1) && + VerifyField(verifier, VT_HALF_PIXEL_CENTERS, 1) && + verifier.EndTable(); + } + ResizeNearestNeighborOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(ResizeNearestNeighborOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const ResizeNearestNeighborOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct ResizeNearestNeighborOptionsBuilder { + typedef ResizeNearestNeighborOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_align_corners(bool align_corners) { + fbb_.AddElement(ResizeNearestNeighborOptions::VT_ALIGN_CORNERS, static_cast(align_corners), 0); + } + void add_half_pixel_centers(bool half_pixel_centers) { + fbb_.AddElement(ResizeNearestNeighborOptions::VT_HALF_PIXEL_CENTERS, static_cast(half_pixel_centers), 0); + } + explicit ResizeNearestNeighborOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateResizeNearestNeighborOptions( + flatbuffers::FlatBufferBuilder &_fbb, + bool align_corners = false, + bool half_pixel_centers = false) { + ResizeNearestNeighborOptionsBuilder builder_(_fbb); + builder_.add_half_pixel_centers(half_pixel_centers); + builder_.add_align_corners(align_corners); + return builder_.Finish(); +} + +flatbuffers::Offset CreateResizeNearestNeighborOptions(flatbuffers::FlatBufferBuilder &_fbb, const ResizeNearestNeighborOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct CallOptionsT : public flatbuffers::NativeTable { + typedef CallOptions TableType; + uint32_t subgraph = 0; +}; + +struct CallOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef CallOptionsT NativeTableType; + typedef CallOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_SUBGRAPH = 4 + }; + uint32_t subgraph() const { + return GetField(VT_SUBGRAPH, 0); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_SUBGRAPH, 4) && + verifier.EndTable(); + } + CallOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(CallOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const CallOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct CallOptionsBuilder { + typedef CallOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_subgraph(uint32_t subgraph) { + fbb_.AddElement(CallOptions::VT_SUBGRAPH, subgraph, 0); + } + explicit CallOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateCallOptions( + flatbuffers::FlatBufferBuilder &_fbb, + uint32_t subgraph = 0) { + CallOptionsBuilder builder_(_fbb); + builder_.add_subgraph(subgraph); + return builder_.Finish(); +} + +flatbuffers::Offset CreateCallOptions(flatbuffers::FlatBufferBuilder &_fbb, const CallOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct PadOptionsT : public flatbuffers::NativeTable { + typedef PadOptions TableType; +}; + +struct PadOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef PadOptionsT NativeTableType; + typedef PadOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + PadOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(PadOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const PadOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct PadOptionsBuilder { + typedef PadOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit PadOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreatePadOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + PadOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreatePadOptions(flatbuffers::FlatBufferBuilder &_fbb, const PadOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct PadV2OptionsT : public flatbuffers::NativeTable { + typedef PadV2Options TableType; +}; + +struct PadV2Options FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef PadV2OptionsT NativeTableType; + typedef PadV2OptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + PadV2OptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(PadV2OptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const PadV2OptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct PadV2OptionsBuilder { + typedef PadV2Options Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit PadV2OptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreatePadV2Options( + flatbuffers::FlatBufferBuilder &_fbb) { + PadV2OptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreatePadV2Options(flatbuffers::FlatBufferBuilder &_fbb, const PadV2OptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct ReshapeOptionsT : public flatbuffers::NativeTable { + typedef ReshapeOptions TableType; + std::vector new_shape{}; +}; + +struct ReshapeOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef ReshapeOptionsT NativeTableType; + typedef ReshapeOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_NEW_SHAPE = 4 + }; + const flatbuffers::Vector *new_shape() const { + return GetPointer *>(VT_NEW_SHAPE); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyOffset(verifier, VT_NEW_SHAPE) && + verifier.VerifyVector(new_shape()) && + verifier.EndTable(); + } + ReshapeOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(ReshapeOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const ReshapeOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct ReshapeOptionsBuilder { + typedef ReshapeOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_new_shape(flatbuffers::Offset> new_shape) { + fbb_.AddOffset(ReshapeOptions::VT_NEW_SHAPE, new_shape); + } + explicit ReshapeOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateReshapeOptions( + flatbuffers::FlatBufferBuilder &_fbb, + flatbuffers::Offset> new_shape = 0) { + ReshapeOptionsBuilder builder_(_fbb); + builder_.add_new_shape(new_shape); + return builder_.Finish(); +} + +inline flatbuffers::Offset CreateReshapeOptionsDirect( + flatbuffers::FlatBufferBuilder &_fbb, + const std::vector *new_shape = nullptr) { + auto new_shape__ = new_shape ? _fbb.CreateVector(*new_shape) : 0; + return tflite::CreateReshapeOptions( + _fbb, + new_shape__); +} + +flatbuffers::Offset CreateReshapeOptions(flatbuffers::FlatBufferBuilder &_fbb, const ReshapeOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct SpaceToBatchNDOptionsT : public flatbuffers::NativeTable { + typedef SpaceToBatchNDOptions TableType; +}; + +struct SpaceToBatchNDOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef SpaceToBatchNDOptionsT NativeTableType; + typedef SpaceToBatchNDOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + SpaceToBatchNDOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(SpaceToBatchNDOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const SpaceToBatchNDOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct SpaceToBatchNDOptionsBuilder { + typedef SpaceToBatchNDOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit SpaceToBatchNDOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateSpaceToBatchNDOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + SpaceToBatchNDOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateSpaceToBatchNDOptions(flatbuffers::FlatBufferBuilder &_fbb, const SpaceToBatchNDOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct BatchToSpaceNDOptionsT : public flatbuffers::NativeTable { + typedef BatchToSpaceNDOptions TableType; +}; + +struct BatchToSpaceNDOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef BatchToSpaceNDOptionsT NativeTableType; + typedef BatchToSpaceNDOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + BatchToSpaceNDOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(BatchToSpaceNDOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const BatchToSpaceNDOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct BatchToSpaceNDOptionsBuilder { + typedef BatchToSpaceNDOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit BatchToSpaceNDOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateBatchToSpaceNDOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + BatchToSpaceNDOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateBatchToSpaceNDOptions(flatbuffers::FlatBufferBuilder &_fbb, const BatchToSpaceNDOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct SkipGramOptionsT : public flatbuffers::NativeTable { + typedef SkipGramOptions TableType; + int32_t ngram_size = 0; + int32_t max_skip_size = 0; + bool include_all_ngrams = false; +}; + +struct SkipGramOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef SkipGramOptionsT NativeTableType; + typedef SkipGramOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_NGRAM_SIZE = 4, + VT_MAX_SKIP_SIZE = 6, + VT_INCLUDE_ALL_NGRAMS = 8 + }; + int32_t ngram_size() const { + return GetField(VT_NGRAM_SIZE, 0); + } + int32_t max_skip_size() const { + return GetField(VT_MAX_SKIP_SIZE, 0); + } + bool include_all_ngrams() const { + return GetField(VT_INCLUDE_ALL_NGRAMS, 0) != 0; + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_NGRAM_SIZE, 4) && + VerifyField(verifier, VT_MAX_SKIP_SIZE, 4) && + VerifyField(verifier, VT_INCLUDE_ALL_NGRAMS, 1) && + verifier.EndTable(); + } + SkipGramOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(SkipGramOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const SkipGramOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct SkipGramOptionsBuilder { + typedef SkipGramOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_ngram_size(int32_t ngram_size) { + fbb_.AddElement(SkipGramOptions::VT_NGRAM_SIZE, ngram_size, 0); + } + void add_max_skip_size(int32_t max_skip_size) { + fbb_.AddElement(SkipGramOptions::VT_MAX_SKIP_SIZE, max_skip_size, 0); + } + void add_include_all_ngrams(bool include_all_ngrams) { + fbb_.AddElement(SkipGramOptions::VT_INCLUDE_ALL_NGRAMS, static_cast(include_all_ngrams), 0); + } + explicit SkipGramOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateSkipGramOptions( + flatbuffers::FlatBufferBuilder &_fbb, + int32_t ngram_size = 0, + int32_t max_skip_size = 0, + bool include_all_ngrams = false) { + SkipGramOptionsBuilder builder_(_fbb); + builder_.add_max_skip_size(max_skip_size); + builder_.add_ngram_size(ngram_size); + builder_.add_include_all_ngrams(include_all_ngrams); + return builder_.Finish(); +} + +flatbuffers::Offset CreateSkipGramOptions(flatbuffers::FlatBufferBuilder &_fbb, const SkipGramOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct SpaceToDepthOptionsT : public flatbuffers::NativeTable { + typedef SpaceToDepthOptions TableType; + int32_t block_size = 0; +}; + +struct SpaceToDepthOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef SpaceToDepthOptionsT NativeTableType; + typedef SpaceToDepthOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_BLOCK_SIZE = 4 + }; + int32_t block_size() const { + return GetField(VT_BLOCK_SIZE, 0); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_BLOCK_SIZE, 4) && + verifier.EndTable(); + } + SpaceToDepthOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(SpaceToDepthOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const SpaceToDepthOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct SpaceToDepthOptionsBuilder { + typedef SpaceToDepthOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_block_size(int32_t block_size) { + fbb_.AddElement(SpaceToDepthOptions::VT_BLOCK_SIZE, block_size, 0); + } + explicit SpaceToDepthOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateSpaceToDepthOptions( + flatbuffers::FlatBufferBuilder &_fbb, + int32_t block_size = 0) { + SpaceToDepthOptionsBuilder builder_(_fbb); + builder_.add_block_size(block_size); + return builder_.Finish(); +} + +flatbuffers::Offset CreateSpaceToDepthOptions(flatbuffers::FlatBufferBuilder &_fbb, const SpaceToDepthOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct DepthToSpaceOptionsT : public flatbuffers::NativeTable { + typedef DepthToSpaceOptions TableType; + int32_t block_size = 0; +}; + +struct DepthToSpaceOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef DepthToSpaceOptionsT NativeTableType; + typedef DepthToSpaceOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_BLOCK_SIZE = 4 + }; + int32_t block_size() const { + return GetField(VT_BLOCK_SIZE, 0); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_BLOCK_SIZE, 4) && + verifier.EndTable(); + } + DepthToSpaceOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(DepthToSpaceOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const DepthToSpaceOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct DepthToSpaceOptionsBuilder { + typedef DepthToSpaceOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_block_size(int32_t block_size) { + fbb_.AddElement(DepthToSpaceOptions::VT_BLOCK_SIZE, block_size, 0); + } + explicit DepthToSpaceOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateDepthToSpaceOptions( + flatbuffers::FlatBufferBuilder &_fbb, + int32_t block_size = 0) { + DepthToSpaceOptionsBuilder builder_(_fbb); + builder_.add_block_size(block_size); + return builder_.Finish(); +} + +flatbuffers::Offset CreateDepthToSpaceOptions(flatbuffers::FlatBufferBuilder &_fbb, const DepthToSpaceOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct SubOptionsT : public flatbuffers::NativeTable { + typedef SubOptions TableType; + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; + bool pot_scale_int16 = true; +}; + +struct SubOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef SubOptionsT NativeTableType; + typedef SubOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_FUSED_ACTIVATION_FUNCTION = 4, + VT_POT_SCALE_INT16 = 6 + }; + tflite::ActivationFunctionType fused_activation_function() const { + return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); + } + bool pot_scale_int16() const { + return GetField(VT_POT_SCALE_INT16, 1) != 0; + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && + VerifyField(verifier, VT_POT_SCALE_INT16, 1) && + verifier.EndTable(); + } + SubOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(SubOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const SubOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct SubOptionsBuilder { + typedef SubOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { + fbb_.AddElement(SubOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); + } + void add_pot_scale_int16(bool pot_scale_int16) { + fbb_.AddElement(SubOptions::VT_POT_SCALE_INT16, static_cast(pot_scale_int16), 1); + } + explicit SubOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateSubOptions( + flatbuffers::FlatBufferBuilder &_fbb, + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE, + bool pot_scale_int16 = true) { + SubOptionsBuilder builder_(_fbb); + builder_.add_pot_scale_int16(pot_scale_int16); + builder_.add_fused_activation_function(fused_activation_function); + return builder_.Finish(); +} + +flatbuffers::Offset CreateSubOptions(flatbuffers::FlatBufferBuilder &_fbb, const SubOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct DivOptionsT : public flatbuffers::NativeTable { + typedef DivOptions TableType; + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; +}; + +struct DivOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef DivOptionsT NativeTableType; + typedef DivOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_FUSED_ACTIVATION_FUNCTION = 4 + }; + tflite::ActivationFunctionType fused_activation_function() const { + return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && + verifier.EndTable(); + } + DivOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(DivOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const DivOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct DivOptionsBuilder { + typedef DivOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { + fbb_.AddElement(DivOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); + } + explicit DivOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateDivOptions( + flatbuffers::FlatBufferBuilder &_fbb, + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE) { + DivOptionsBuilder builder_(_fbb); + builder_.add_fused_activation_function(fused_activation_function); + return builder_.Finish(); +} + +flatbuffers::Offset CreateDivOptions(flatbuffers::FlatBufferBuilder &_fbb, const DivOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct TopKV2OptionsT : public flatbuffers::NativeTable { + typedef TopKV2Options TableType; +}; + +struct TopKV2Options FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef TopKV2OptionsT NativeTableType; + typedef TopKV2OptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + TopKV2OptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(TopKV2OptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const TopKV2OptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct TopKV2OptionsBuilder { + typedef TopKV2Options Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit TopKV2OptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateTopKV2Options( + flatbuffers::FlatBufferBuilder &_fbb) { + TopKV2OptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateTopKV2Options(flatbuffers::FlatBufferBuilder &_fbb, const TopKV2OptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct EmbeddingLookupSparseOptionsT : public flatbuffers::NativeTable { + typedef EmbeddingLookupSparseOptions TableType; + tflite::CombinerType combiner = tflite::CombinerType_SUM; +}; + +struct EmbeddingLookupSparseOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef EmbeddingLookupSparseOptionsT NativeTableType; + typedef EmbeddingLookupSparseOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_COMBINER = 4 + }; + tflite::CombinerType combiner() const { + return static_cast(GetField(VT_COMBINER, 0)); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_COMBINER, 1) && + verifier.EndTable(); + } + EmbeddingLookupSparseOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(EmbeddingLookupSparseOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const EmbeddingLookupSparseOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct EmbeddingLookupSparseOptionsBuilder { + typedef EmbeddingLookupSparseOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_combiner(tflite::CombinerType combiner) { + fbb_.AddElement(EmbeddingLookupSparseOptions::VT_COMBINER, static_cast(combiner), 0); + } + explicit EmbeddingLookupSparseOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateEmbeddingLookupSparseOptions( + flatbuffers::FlatBufferBuilder &_fbb, + tflite::CombinerType combiner = tflite::CombinerType_SUM) { + EmbeddingLookupSparseOptionsBuilder builder_(_fbb); + builder_.add_combiner(combiner); + return builder_.Finish(); +} + +flatbuffers::Offset CreateEmbeddingLookupSparseOptions(flatbuffers::FlatBufferBuilder &_fbb, const EmbeddingLookupSparseOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct GatherOptionsT : public flatbuffers::NativeTable { + typedef GatherOptions TableType; + int32_t axis = 0; + int32_t batch_dims = 0; +}; + +struct GatherOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef GatherOptionsT NativeTableType; + typedef GatherOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_AXIS = 4, + VT_BATCH_DIMS = 6 + }; + int32_t axis() const { + return GetField(VT_AXIS, 0); + } + int32_t batch_dims() const { + return GetField(VT_BATCH_DIMS, 0); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_AXIS, 4) && + VerifyField(verifier, VT_BATCH_DIMS, 4) && + verifier.EndTable(); + } + GatherOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(GatherOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const GatherOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct GatherOptionsBuilder { + typedef GatherOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_axis(int32_t axis) { + fbb_.AddElement(GatherOptions::VT_AXIS, axis, 0); + } + void add_batch_dims(int32_t batch_dims) { + fbb_.AddElement(GatherOptions::VT_BATCH_DIMS, batch_dims, 0); + } + explicit GatherOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateGatherOptions( + flatbuffers::FlatBufferBuilder &_fbb, + int32_t axis = 0, + int32_t batch_dims = 0) { + GatherOptionsBuilder builder_(_fbb); + builder_.add_batch_dims(batch_dims); + builder_.add_axis(axis); + return builder_.Finish(); +} + +flatbuffers::Offset CreateGatherOptions(flatbuffers::FlatBufferBuilder &_fbb, const GatherOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct TransposeOptionsT : public flatbuffers::NativeTable { + typedef TransposeOptions TableType; +}; + +struct TransposeOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef TransposeOptionsT NativeTableType; + typedef TransposeOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + TransposeOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(TransposeOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const TransposeOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct TransposeOptionsBuilder { + typedef TransposeOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit TransposeOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateTransposeOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + TransposeOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateTransposeOptions(flatbuffers::FlatBufferBuilder &_fbb, const TransposeOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct ExpOptionsT : public flatbuffers::NativeTable { + typedef ExpOptions TableType; +}; + +struct ExpOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef ExpOptionsT NativeTableType; + typedef ExpOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + ExpOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(ExpOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const ExpOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct ExpOptionsBuilder { + typedef ExpOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit ExpOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateExpOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + ExpOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateExpOptions(flatbuffers::FlatBufferBuilder &_fbb, const ExpOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct CosOptionsT : public flatbuffers::NativeTable { + typedef CosOptions TableType; +}; + +struct CosOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef CosOptionsT NativeTableType; + typedef CosOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + CosOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(CosOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const CosOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct CosOptionsBuilder { + typedef CosOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit CosOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateCosOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + CosOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateCosOptions(flatbuffers::FlatBufferBuilder &_fbb, const CosOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct ReducerOptionsT : public flatbuffers::NativeTable { + typedef ReducerOptions TableType; + bool keep_dims = false; +}; + +struct ReducerOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef ReducerOptionsT NativeTableType; + typedef ReducerOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_KEEP_DIMS = 4 + }; + bool keep_dims() const { + return GetField(VT_KEEP_DIMS, 0) != 0; + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_KEEP_DIMS, 1) && + verifier.EndTable(); + } + ReducerOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(ReducerOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const ReducerOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct ReducerOptionsBuilder { + typedef ReducerOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_keep_dims(bool keep_dims) { + fbb_.AddElement(ReducerOptions::VT_KEEP_DIMS, static_cast(keep_dims), 0); + } + explicit ReducerOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateReducerOptions( + flatbuffers::FlatBufferBuilder &_fbb, + bool keep_dims = false) { + ReducerOptionsBuilder builder_(_fbb); + builder_.add_keep_dims(keep_dims); + return builder_.Finish(); +} + +flatbuffers::Offset CreateReducerOptions(flatbuffers::FlatBufferBuilder &_fbb, const ReducerOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct SqueezeOptionsT : public flatbuffers::NativeTable { + typedef SqueezeOptions TableType; + std::vector squeeze_dims{}; +}; + +struct SqueezeOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef SqueezeOptionsT NativeTableType; + typedef SqueezeOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_SQUEEZE_DIMS = 4 + }; + const flatbuffers::Vector *squeeze_dims() const { + return GetPointer *>(VT_SQUEEZE_DIMS); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyOffset(verifier, VT_SQUEEZE_DIMS) && + verifier.VerifyVector(squeeze_dims()) && + verifier.EndTable(); + } + SqueezeOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(SqueezeOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const SqueezeOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct SqueezeOptionsBuilder { + typedef SqueezeOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_squeeze_dims(flatbuffers::Offset> squeeze_dims) { + fbb_.AddOffset(SqueezeOptions::VT_SQUEEZE_DIMS, squeeze_dims); + } + explicit SqueezeOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateSqueezeOptions( + flatbuffers::FlatBufferBuilder &_fbb, + flatbuffers::Offset> squeeze_dims = 0) { + SqueezeOptionsBuilder builder_(_fbb); + builder_.add_squeeze_dims(squeeze_dims); + return builder_.Finish(); +} + +inline flatbuffers::Offset CreateSqueezeOptionsDirect( + flatbuffers::FlatBufferBuilder &_fbb, + const std::vector *squeeze_dims = nullptr) { + auto squeeze_dims__ = squeeze_dims ? _fbb.CreateVector(*squeeze_dims) : 0; + return tflite::CreateSqueezeOptions( + _fbb, + squeeze_dims__); +} + +flatbuffers::Offset CreateSqueezeOptions(flatbuffers::FlatBufferBuilder &_fbb, const SqueezeOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct SplitOptionsT : public flatbuffers::NativeTable { + typedef SplitOptions TableType; + int32_t num_splits = 0; +}; + +struct SplitOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef SplitOptionsT NativeTableType; + typedef SplitOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_NUM_SPLITS = 4 + }; + int32_t num_splits() const { + return GetField(VT_NUM_SPLITS, 0); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_NUM_SPLITS, 4) && + verifier.EndTable(); + } + SplitOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(SplitOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const SplitOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct SplitOptionsBuilder { + typedef SplitOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_num_splits(int32_t num_splits) { + fbb_.AddElement(SplitOptions::VT_NUM_SPLITS, num_splits, 0); + } + explicit SplitOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateSplitOptions( + flatbuffers::FlatBufferBuilder &_fbb, + int32_t num_splits = 0) { + SplitOptionsBuilder builder_(_fbb); + builder_.add_num_splits(num_splits); + return builder_.Finish(); +} + +flatbuffers::Offset CreateSplitOptions(flatbuffers::FlatBufferBuilder &_fbb, const SplitOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct SplitVOptionsT : public flatbuffers::NativeTable { + typedef SplitVOptions TableType; + int32_t num_splits = 0; +}; + +struct SplitVOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef SplitVOptionsT NativeTableType; + typedef SplitVOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_NUM_SPLITS = 4 + }; + int32_t num_splits() const { + return GetField(VT_NUM_SPLITS, 0); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_NUM_SPLITS, 4) && + verifier.EndTable(); + } + SplitVOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(SplitVOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const SplitVOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct SplitVOptionsBuilder { + typedef SplitVOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_num_splits(int32_t num_splits) { + fbb_.AddElement(SplitVOptions::VT_NUM_SPLITS, num_splits, 0); + } + explicit SplitVOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateSplitVOptions( + flatbuffers::FlatBufferBuilder &_fbb, + int32_t num_splits = 0) { + SplitVOptionsBuilder builder_(_fbb); + builder_.add_num_splits(num_splits); + return builder_.Finish(); +} + +flatbuffers::Offset CreateSplitVOptions(flatbuffers::FlatBufferBuilder &_fbb, const SplitVOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct StridedSliceOptionsT : public flatbuffers::NativeTable { + typedef StridedSliceOptions TableType; + int32_t begin_mask = 0; + int32_t end_mask = 0; + int32_t ellipsis_mask = 0; + int32_t new_axis_mask = 0; + int32_t shrink_axis_mask = 0; + bool offset = false; +}; + +struct StridedSliceOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef StridedSliceOptionsT NativeTableType; + typedef StridedSliceOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_BEGIN_MASK = 4, + VT_END_MASK = 6, + VT_ELLIPSIS_MASK = 8, + VT_NEW_AXIS_MASK = 10, + VT_SHRINK_AXIS_MASK = 12, + VT_OFFSET = 14 + }; + int32_t begin_mask() const { + return GetField(VT_BEGIN_MASK, 0); + } + int32_t end_mask() const { + return GetField(VT_END_MASK, 0); + } + int32_t ellipsis_mask() const { + return GetField(VT_ELLIPSIS_MASK, 0); + } + int32_t new_axis_mask() const { + return GetField(VT_NEW_AXIS_MASK, 0); + } + int32_t shrink_axis_mask() const { + return GetField(VT_SHRINK_AXIS_MASK, 0); + } + bool offset() const { + return GetField(VT_OFFSET, 0) != 0; + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_BEGIN_MASK, 4) && + VerifyField(verifier, VT_END_MASK, 4) && + VerifyField(verifier, VT_ELLIPSIS_MASK, 4) && + VerifyField(verifier, VT_NEW_AXIS_MASK, 4) && + VerifyField(verifier, VT_SHRINK_AXIS_MASK, 4) && + VerifyField(verifier, VT_OFFSET, 1) && + verifier.EndTable(); + } + StridedSliceOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(StridedSliceOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const StridedSliceOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct StridedSliceOptionsBuilder { + typedef StridedSliceOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_begin_mask(int32_t begin_mask) { + fbb_.AddElement(StridedSliceOptions::VT_BEGIN_MASK, begin_mask, 0); + } + void add_end_mask(int32_t end_mask) { + fbb_.AddElement(StridedSliceOptions::VT_END_MASK, end_mask, 0); + } + void add_ellipsis_mask(int32_t ellipsis_mask) { + fbb_.AddElement(StridedSliceOptions::VT_ELLIPSIS_MASK, ellipsis_mask, 0); + } + void add_new_axis_mask(int32_t new_axis_mask) { + fbb_.AddElement(StridedSliceOptions::VT_NEW_AXIS_MASK, new_axis_mask, 0); + } + void add_shrink_axis_mask(int32_t shrink_axis_mask) { + fbb_.AddElement(StridedSliceOptions::VT_SHRINK_AXIS_MASK, shrink_axis_mask, 0); + } + void add_offset(bool offset) { + fbb_.AddElement(StridedSliceOptions::VT_OFFSET, static_cast(offset), 0); + } + explicit StridedSliceOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateStridedSliceOptions( + flatbuffers::FlatBufferBuilder &_fbb, + int32_t begin_mask = 0, + int32_t end_mask = 0, + int32_t ellipsis_mask = 0, + int32_t new_axis_mask = 0, + int32_t shrink_axis_mask = 0, + bool offset = false) { + StridedSliceOptionsBuilder builder_(_fbb); + builder_.add_shrink_axis_mask(shrink_axis_mask); + builder_.add_new_axis_mask(new_axis_mask); + builder_.add_ellipsis_mask(ellipsis_mask); + builder_.add_end_mask(end_mask); + builder_.add_begin_mask(begin_mask); + builder_.add_offset(offset); + return builder_.Finish(); +} + +flatbuffers::Offset CreateStridedSliceOptions(flatbuffers::FlatBufferBuilder &_fbb, const StridedSliceOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct LogSoftmaxOptionsT : public flatbuffers::NativeTable { + typedef LogSoftmaxOptions TableType; +}; + +struct LogSoftmaxOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef LogSoftmaxOptionsT NativeTableType; + typedef LogSoftmaxOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + LogSoftmaxOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(LogSoftmaxOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const LogSoftmaxOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct LogSoftmaxOptionsBuilder { + typedef LogSoftmaxOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit LogSoftmaxOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateLogSoftmaxOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + LogSoftmaxOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateLogSoftmaxOptions(flatbuffers::FlatBufferBuilder &_fbb, const LogSoftmaxOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct CastOptionsT : public flatbuffers::NativeTable { + typedef CastOptions TableType; + tflite::TensorType in_data_type = tflite::TensorType_FLOAT32; + tflite::TensorType out_data_type = tflite::TensorType_FLOAT32; +}; + +struct CastOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef CastOptionsT NativeTableType; + typedef CastOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_IN_DATA_TYPE = 4, + VT_OUT_DATA_TYPE = 6 + }; + tflite::TensorType in_data_type() const { + return static_cast(GetField(VT_IN_DATA_TYPE, 0)); + } + tflite::TensorType out_data_type() const { + return static_cast(GetField(VT_OUT_DATA_TYPE, 0)); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_IN_DATA_TYPE, 1) && + VerifyField(verifier, VT_OUT_DATA_TYPE, 1) && + verifier.EndTable(); + } + CastOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(CastOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const CastOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct CastOptionsBuilder { + typedef CastOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_in_data_type(tflite::TensorType in_data_type) { + fbb_.AddElement(CastOptions::VT_IN_DATA_TYPE, static_cast(in_data_type), 0); + } + void add_out_data_type(tflite::TensorType out_data_type) { + fbb_.AddElement(CastOptions::VT_OUT_DATA_TYPE, static_cast(out_data_type), 0); + } + explicit CastOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateCastOptions( + flatbuffers::FlatBufferBuilder &_fbb, + tflite::TensorType in_data_type = tflite::TensorType_FLOAT32, + tflite::TensorType out_data_type = tflite::TensorType_FLOAT32) { + CastOptionsBuilder builder_(_fbb); + builder_.add_out_data_type(out_data_type); + builder_.add_in_data_type(in_data_type); + return builder_.Finish(); +} + +flatbuffers::Offset CreateCastOptions(flatbuffers::FlatBufferBuilder &_fbb, const CastOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct DequantizeOptionsT : public flatbuffers::NativeTable { + typedef DequantizeOptions TableType; +}; + +struct DequantizeOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef DequantizeOptionsT NativeTableType; + typedef DequantizeOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + DequantizeOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(DequantizeOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const DequantizeOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct DequantizeOptionsBuilder { + typedef DequantizeOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit DequantizeOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateDequantizeOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + DequantizeOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateDequantizeOptions(flatbuffers::FlatBufferBuilder &_fbb, const DequantizeOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct MaximumMinimumOptionsT : public flatbuffers::NativeTable { + typedef MaximumMinimumOptions TableType; +}; + +struct MaximumMinimumOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef MaximumMinimumOptionsT NativeTableType; + typedef MaximumMinimumOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + MaximumMinimumOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(MaximumMinimumOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const MaximumMinimumOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct MaximumMinimumOptionsBuilder { + typedef MaximumMinimumOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit MaximumMinimumOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateMaximumMinimumOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + MaximumMinimumOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateMaximumMinimumOptions(flatbuffers::FlatBufferBuilder &_fbb, const MaximumMinimumOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct TileOptionsT : public flatbuffers::NativeTable { + typedef TileOptions TableType; +}; + +struct TileOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef TileOptionsT NativeTableType; + typedef TileOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + TileOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(TileOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const TileOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct TileOptionsBuilder { + typedef TileOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit TileOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateTileOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + TileOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateTileOptions(flatbuffers::FlatBufferBuilder &_fbb, const TileOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct ArgMaxOptionsT : public flatbuffers::NativeTable { + typedef ArgMaxOptions TableType; + tflite::TensorType output_type = tflite::TensorType_FLOAT32; +}; + +struct ArgMaxOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef ArgMaxOptionsT NativeTableType; + typedef ArgMaxOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_OUTPUT_TYPE = 4 + }; + tflite::TensorType output_type() const { + return static_cast(GetField(VT_OUTPUT_TYPE, 0)); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_OUTPUT_TYPE, 1) && + verifier.EndTable(); + } + ArgMaxOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(ArgMaxOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const ArgMaxOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct ArgMaxOptionsBuilder { + typedef ArgMaxOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_output_type(tflite::TensorType output_type) { + fbb_.AddElement(ArgMaxOptions::VT_OUTPUT_TYPE, static_cast(output_type), 0); + } + explicit ArgMaxOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateArgMaxOptions( + flatbuffers::FlatBufferBuilder &_fbb, + tflite::TensorType output_type = tflite::TensorType_FLOAT32) { + ArgMaxOptionsBuilder builder_(_fbb); + builder_.add_output_type(output_type); + return builder_.Finish(); +} + +flatbuffers::Offset CreateArgMaxOptions(flatbuffers::FlatBufferBuilder &_fbb, const ArgMaxOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct ArgMinOptionsT : public flatbuffers::NativeTable { + typedef ArgMinOptions TableType; + tflite::TensorType output_type = tflite::TensorType_FLOAT32; +}; + +struct ArgMinOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef ArgMinOptionsT NativeTableType; + typedef ArgMinOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_OUTPUT_TYPE = 4 + }; + tflite::TensorType output_type() const { + return static_cast(GetField(VT_OUTPUT_TYPE, 0)); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_OUTPUT_TYPE, 1) && + verifier.EndTable(); + } + ArgMinOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(ArgMinOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const ArgMinOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct ArgMinOptionsBuilder { + typedef ArgMinOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_output_type(tflite::TensorType output_type) { + fbb_.AddElement(ArgMinOptions::VT_OUTPUT_TYPE, static_cast(output_type), 0); + } + explicit ArgMinOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateArgMinOptions( + flatbuffers::FlatBufferBuilder &_fbb, + tflite::TensorType output_type = tflite::TensorType_FLOAT32) { + ArgMinOptionsBuilder builder_(_fbb); + builder_.add_output_type(output_type); + return builder_.Finish(); +} + +flatbuffers::Offset CreateArgMinOptions(flatbuffers::FlatBufferBuilder &_fbb, const ArgMinOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct GreaterOptionsT : public flatbuffers::NativeTable { + typedef GreaterOptions TableType; +}; + +struct GreaterOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef GreaterOptionsT NativeTableType; + typedef GreaterOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + GreaterOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(GreaterOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const GreaterOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct GreaterOptionsBuilder { + typedef GreaterOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit GreaterOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateGreaterOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + GreaterOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateGreaterOptions(flatbuffers::FlatBufferBuilder &_fbb, const GreaterOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct GreaterEqualOptionsT : public flatbuffers::NativeTable { + typedef GreaterEqualOptions TableType; +}; + +struct GreaterEqualOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef GreaterEqualOptionsT NativeTableType; + typedef GreaterEqualOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + GreaterEqualOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(GreaterEqualOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const GreaterEqualOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct GreaterEqualOptionsBuilder { + typedef GreaterEqualOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit GreaterEqualOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateGreaterEqualOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + GreaterEqualOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateGreaterEqualOptions(flatbuffers::FlatBufferBuilder &_fbb, const GreaterEqualOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct LessOptionsT : public flatbuffers::NativeTable { + typedef LessOptions TableType; +}; + +struct LessOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef LessOptionsT NativeTableType; + typedef LessOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + LessOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(LessOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const LessOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct LessOptionsBuilder { + typedef LessOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit LessOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateLessOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + LessOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateLessOptions(flatbuffers::FlatBufferBuilder &_fbb, const LessOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct LessEqualOptionsT : public flatbuffers::NativeTable { + typedef LessEqualOptions TableType; +}; + +struct LessEqualOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef LessEqualOptionsT NativeTableType; + typedef LessEqualOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + LessEqualOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(LessEqualOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const LessEqualOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct LessEqualOptionsBuilder { + typedef LessEqualOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit LessEqualOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateLessEqualOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + LessEqualOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateLessEqualOptions(flatbuffers::FlatBufferBuilder &_fbb, const LessEqualOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct NegOptionsT : public flatbuffers::NativeTable { + typedef NegOptions TableType; +}; + +struct NegOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef NegOptionsT NativeTableType; + typedef NegOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + NegOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(NegOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const NegOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct NegOptionsBuilder { + typedef NegOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit NegOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateNegOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + NegOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateNegOptions(flatbuffers::FlatBufferBuilder &_fbb, const NegOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct SelectOptionsT : public flatbuffers::NativeTable { + typedef SelectOptions TableType; +}; + +struct SelectOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef SelectOptionsT NativeTableType; + typedef SelectOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + SelectOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(SelectOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const SelectOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct SelectOptionsBuilder { + typedef SelectOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit SelectOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateSelectOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + SelectOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateSelectOptions(flatbuffers::FlatBufferBuilder &_fbb, const SelectOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct SliceOptionsT : public flatbuffers::NativeTable { + typedef SliceOptions TableType; +}; + +struct SliceOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef SliceOptionsT NativeTableType; + typedef SliceOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + SliceOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(SliceOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const SliceOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct SliceOptionsBuilder { + typedef SliceOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit SliceOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateSliceOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + SliceOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateSliceOptions(flatbuffers::FlatBufferBuilder &_fbb, const SliceOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct TransposeConvOptionsT : public flatbuffers::NativeTable { + typedef TransposeConvOptions TableType; + tflite::Padding padding = tflite::Padding_SAME; + int32_t stride_w = 0; + int32_t stride_h = 0; + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; +}; + +struct TransposeConvOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef TransposeConvOptionsT NativeTableType; + typedef TransposeConvOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_PADDING = 4, + VT_STRIDE_W = 6, + VT_STRIDE_H = 8, + VT_FUSED_ACTIVATION_FUNCTION = 10 + }; + tflite::Padding padding() const { + return static_cast(GetField(VT_PADDING, 0)); + } + int32_t stride_w() const { + return GetField(VT_STRIDE_W, 0); + } + int32_t stride_h() const { + return GetField(VT_STRIDE_H, 0); + } + tflite::ActivationFunctionType fused_activation_function() const { + return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_PADDING, 1) && + VerifyField(verifier, VT_STRIDE_W, 4) && + VerifyField(verifier, VT_STRIDE_H, 4) && + VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && + verifier.EndTable(); + } + TransposeConvOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(TransposeConvOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const TransposeConvOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct TransposeConvOptionsBuilder { + typedef TransposeConvOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_padding(tflite::Padding padding) { + fbb_.AddElement(TransposeConvOptions::VT_PADDING, static_cast(padding), 0); + } + void add_stride_w(int32_t stride_w) { + fbb_.AddElement(TransposeConvOptions::VT_STRIDE_W, stride_w, 0); + } + void add_stride_h(int32_t stride_h) { + fbb_.AddElement(TransposeConvOptions::VT_STRIDE_H, stride_h, 0); + } + void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { + fbb_.AddElement(TransposeConvOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); + } + explicit TransposeConvOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateTransposeConvOptions( + flatbuffers::FlatBufferBuilder &_fbb, + tflite::Padding padding = tflite::Padding_SAME, + int32_t stride_w = 0, + int32_t stride_h = 0, + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE) { + TransposeConvOptionsBuilder builder_(_fbb); + builder_.add_stride_h(stride_h); + builder_.add_stride_w(stride_w); + builder_.add_fused_activation_function(fused_activation_function); + builder_.add_padding(padding); + return builder_.Finish(); +} + +flatbuffers::Offset CreateTransposeConvOptions(flatbuffers::FlatBufferBuilder &_fbb, const TransposeConvOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct ExpandDimsOptionsT : public flatbuffers::NativeTable { + typedef ExpandDimsOptions TableType; +}; + +struct ExpandDimsOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef ExpandDimsOptionsT NativeTableType; + typedef ExpandDimsOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + ExpandDimsOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(ExpandDimsOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const ExpandDimsOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct ExpandDimsOptionsBuilder { + typedef ExpandDimsOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit ExpandDimsOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateExpandDimsOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + ExpandDimsOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateExpandDimsOptions(flatbuffers::FlatBufferBuilder &_fbb, const ExpandDimsOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct SparseToDenseOptionsT : public flatbuffers::NativeTable { + typedef SparseToDenseOptions TableType; + bool validate_indices = false; +}; + +struct SparseToDenseOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef SparseToDenseOptionsT NativeTableType; + typedef SparseToDenseOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_VALIDATE_INDICES = 4 + }; + bool validate_indices() const { + return GetField(VT_VALIDATE_INDICES, 0) != 0; + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_VALIDATE_INDICES, 1) && + verifier.EndTable(); + } + SparseToDenseOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(SparseToDenseOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const SparseToDenseOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct SparseToDenseOptionsBuilder { + typedef SparseToDenseOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_validate_indices(bool validate_indices) { + fbb_.AddElement(SparseToDenseOptions::VT_VALIDATE_INDICES, static_cast(validate_indices), 0); + } + explicit SparseToDenseOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateSparseToDenseOptions( + flatbuffers::FlatBufferBuilder &_fbb, + bool validate_indices = false) { + SparseToDenseOptionsBuilder builder_(_fbb); + builder_.add_validate_indices(validate_indices); + return builder_.Finish(); +} + +flatbuffers::Offset CreateSparseToDenseOptions(flatbuffers::FlatBufferBuilder &_fbb, const SparseToDenseOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct EqualOptionsT : public flatbuffers::NativeTable { + typedef EqualOptions TableType; +}; + +struct EqualOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef EqualOptionsT NativeTableType; + typedef EqualOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + EqualOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(EqualOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const EqualOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct EqualOptionsBuilder { + typedef EqualOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit EqualOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateEqualOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + EqualOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateEqualOptions(flatbuffers::FlatBufferBuilder &_fbb, const EqualOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct NotEqualOptionsT : public flatbuffers::NativeTable { + typedef NotEqualOptions TableType; +}; + +struct NotEqualOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef NotEqualOptionsT NativeTableType; + typedef NotEqualOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + NotEqualOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(NotEqualOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const NotEqualOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct NotEqualOptionsBuilder { + typedef NotEqualOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit NotEqualOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateNotEqualOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + NotEqualOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateNotEqualOptions(flatbuffers::FlatBufferBuilder &_fbb, const NotEqualOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct ShapeOptionsT : public flatbuffers::NativeTable { + typedef ShapeOptions TableType; + tflite::TensorType out_type = tflite::TensorType_FLOAT32; +}; + +struct ShapeOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef ShapeOptionsT NativeTableType; + typedef ShapeOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_OUT_TYPE = 4 + }; + tflite::TensorType out_type() const { + return static_cast(GetField(VT_OUT_TYPE, 0)); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_OUT_TYPE, 1) && + verifier.EndTable(); + } + ShapeOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(ShapeOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const ShapeOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct ShapeOptionsBuilder { + typedef ShapeOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_out_type(tflite::TensorType out_type) { + fbb_.AddElement(ShapeOptions::VT_OUT_TYPE, static_cast(out_type), 0); + } + explicit ShapeOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateShapeOptions( + flatbuffers::FlatBufferBuilder &_fbb, + tflite::TensorType out_type = tflite::TensorType_FLOAT32) { + ShapeOptionsBuilder builder_(_fbb); + builder_.add_out_type(out_type); + return builder_.Finish(); +} + +flatbuffers::Offset CreateShapeOptions(flatbuffers::FlatBufferBuilder &_fbb, const ShapeOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct RankOptionsT : public flatbuffers::NativeTable { + typedef RankOptions TableType; +}; + +struct RankOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef RankOptionsT NativeTableType; + typedef RankOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + RankOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(RankOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const RankOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct RankOptionsBuilder { + typedef RankOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit RankOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateRankOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + RankOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateRankOptions(flatbuffers::FlatBufferBuilder &_fbb, const RankOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct PowOptionsT : public flatbuffers::NativeTable { + typedef PowOptions TableType; +}; + +struct PowOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef PowOptionsT NativeTableType; + typedef PowOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + PowOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(PowOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const PowOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct PowOptionsBuilder { + typedef PowOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit PowOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreatePowOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + PowOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreatePowOptions(flatbuffers::FlatBufferBuilder &_fbb, const PowOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct FakeQuantOptionsT : public flatbuffers::NativeTable { + typedef FakeQuantOptions TableType; + float min = 0.0f; + float max = 0.0f; + int32_t num_bits = 0; + bool narrow_range = false; +}; + +struct FakeQuantOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef FakeQuantOptionsT NativeTableType; + typedef FakeQuantOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_MIN = 4, + VT_MAX = 6, + VT_NUM_BITS = 8, + VT_NARROW_RANGE = 10 + }; + float min() const { + return GetField(VT_MIN, 0.0f); + } + float max() const { + return GetField(VT_MAX, 0.0f); + } + int32_t num_bits() const { + return GetField(VT_NUM_BITS, 0); + } + bool narrow_range() const { + return GetField(VT_NARROW_RANGE, 0) != 0; + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_MIN, 4) && + VerifyField(verifier, VT_MAX, 4) && + VerifyField(verifier, VT_NUM_BITS, 4) && + VerifyField(verifier, VT_NARROW_RANGE, 1) && + verifier.EndTable(); + } + FakeQuantOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(FakeQuantOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const FakeQuantOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct FakeQuantOptionsBuilder { + typedef FakeQuantOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_min(float min) { + fbb_.AddElement(FakeQuantOptions::VT_MIN, min, 0.0f); + } + void add_max(float max) { + fbb_.AddElement(FakeQuantOptions::VT_MAX, max, 0.0f); + } + void add_num_bits(int32_t num_bits) { + fbb_.AddElement(FakeQuantOptions::VT_NUM_BITS, num_bits, 0); + } + void add_narrow_range(bool narrow_range) { + fbb_.AddElement(FakeQuantOptions::VT_NARROW_RANGE, static_cast(narrow_range), 0); + } + explicit FakeQuantOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateFakeQuantOptions( + flatbuffers::FlatBufferBuilder &_fbb, + float min = 0.0f, + float max = 0.0f, + int32_t num_bits = 0, + bool narrow_range = false) { + FakeQuantOptionsBuilder builder_(_fbb); + builder_.add_num_bits(num_bits); + builder_.add_max(max); + builder_.add_min(min); + builder_.add_narrow_range(narrow_range); + return builder_.Finish(); +} + +flatbuffers::Offset CreateFakeQuantOptions(flatbuffers::FlatBufferBuilder &_fbb, const FakeQuantOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct PackOptionsT : public flatbuffers::NativeTable { + typedef PackOptions TableType; + int32_t values_count = 0; + int32_t axis = 0; +}; + +struct PackOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef PackOptionsT NativeTableType; + typedef PackOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_VALUES_COUNT = 4, + VT_AXIS = 6 + }; + int32_t values_count() const { + return GetField(VT_VALUES_COUNT, 0); + } + int32_t axis() const { + return GetField(VT_AXIS, 0); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_VALUES_COUNT, 4) && + VerifyField(verifier, VT_AXIS, 4) && + verifier.EndTable(); + } + PackOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(PackOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const PackOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct PackOptionsBuilder { + typedef PackOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_values_count(int32_t values_count) { + fbb_.AddElement(PackOptions::VT_VALUES_COUNT, values_count, 0); + } + void add_axis(int32_t axis) { + fbb_.AddElement(PackOptions::VT_AXIS, axis, 0); + } + explicit PackOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreatePackOptions( + flatbuffers::FlatBufferBuilder &_fbb, + int32_t values_count = 0, + int32_t axis = 0) { + PackOptionsBuilder builder_(_fbb); + builder_.add_axis(axis); + builder_.add_values_count(values_count); + return builder_.Finish(); +} + +flatbuffers::Offset CreatePackOptions(flatbuffers::FlatBufferBuilder &_fbb, const PackOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct LogicalOrOptionsT : public flatbuffers::NativeTable { + typedef LogicalOrOptions TableType; +}; + +struct LogicalOrOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef LogicalOrOptionsT NativeTableType; + typedef LogicalOrOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + LogicalOrOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(LogicalOrOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const LogicalOrOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct LogicalOrOptionsBuilder { + typedef LogicalOrOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit LogicalOrOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateLogicalOrOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + LogicalOrOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateLogicalOrOptions(flatbuffers::FlatBufferBuilder &_fbb, const LogicalOrOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct OneHotOptionsT : public flatbuffers::NativeTable { + typedef OneHotOptions TableType; + int32_t axis = 0; +}; + +struct OneHotOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef OneHotOptionsT NativeTableType; + typedef OneHotOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_AXIS = 4 + }; + int32_t axis() const { + return GetField(VT_AXIS, 0); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_AXIS, 4) && + verifier.EndTable(); + } + OneHotOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(OneHotOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const OneHotOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct OneHotOptionsBuilder { + typedef OneHotOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_axis(int32_t axis) { + fbb_.AddElement(OneHotOptions::VT_AXIS, axis, 0); + } + explicit OneHotOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateOneHotOptions( + flatbuffers::FlatBufferBuilder &_fbb, + int32_t axis = 0) { + OneHotOptionsBuilder builder_(_fbb); + builder_.add_axis(axis); + return builder_.Finish(); +} + +flatbuffers::Offset CreateOneHotOptions(flatbuffers::FlatBufferBuilder &_fbb, const OneHotOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct AbsOptionsT : public flatbuffers::NativeTable { + typedef AbsOptions TableType; +}; + +struct AbsOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef AbsOptionsT NativeTableType; + typedef AbsOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + AbsOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(AbsOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const AbsOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct AbsOptionsBuilder { + typedef AbsOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit AbsOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateAbsOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + AbsOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateAbsOptions(flatbuffers::FlatBufferBuilder &_fbb, const AbsOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct HardSwishOptionsT : public flatbuffers::NativeTable { + typedef HardSwishOptions TableType; +}; + +struct HardSwishOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef HardSwishOptionsT NativeTableType; + typedef HardSwishOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + HardSwishOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(HardSwishOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const HardSwishOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct HardSwishOptionsBuilder { + typedef HardSwishOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit HardSwishOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateHardSwishOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + HardSwishOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateHardSwishOptions(flatbuffers::FlatBufferBuilder &_fbb, const HardSwishOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct LogicalAndOptionsT : public flatbuffers::NativeTable { + typedef LogicalAndOptions TableType; +}; + +struct LogicalAndOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef LogicalAndOptionsT NativeTableType; + typedef LogicalAndOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + LogicalAndOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(LogicalAndOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const LogicalAndOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct LogicalAndOptionsBuilder { + typedef LogicalAndOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit LogicalAndOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateLogicalAndOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + LogicalAndOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateLogicalAndOptions(flatbuffers::FlatBufferBuilder &_fbb, const LogicalAndOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct LogicalNotOptionsT : public flatbuffers::NativeTable { + typedef LogicalNotOptions TableType; +}; + +struct LogicalNotOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef LogicalNotOptionsT NativeTableType; + typedef LogicalNotOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + LogicalNotOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(LogicalNotOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const LogicalNotOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct LogicalNotOptionsBuilder { + typedef LogicalNotOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit LogicalNotOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateLogicalNotOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + LogicalNotOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateLogicalNotOptions(flatbuffers::FlatBufferBuilder &_fbb, const LogicalNotOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct UnpackOptionsT : public flatbuffers::NativeTable { + typedef UnpackOptions TableType; + int32_t num = 0; + int32_t axis = 0; +}; + +struct UnpackOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef UnpackOptionsT NativeTableType; + typedef UnpackOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_NUM = 4, + VT_AXIS = 6 + }; + int32_t num() const { + return GetField(VT_NUM, 0); + } + int32_t axis() const { + return GetField(VT_AXIS, 0); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_NUM, 4) && + VerifyField(verifier, VT_AXIS, 4) && + verifier.EndTable(); + } + UnpackOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(UnpackOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const UnpackOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct UnpackOptionsBuilder { + typedef UnpackOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_num(int32_t num) { + fbb_.AddElement(UnpackOptions::VT_NUM, num, 0); + } + void add_axis(int32_t axis) { + fbb_.AddElement(UnpackOptions::VT_AXIS, axis, 0); + } + explicit UnpackOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateUnpackOptions( + flatbuffers::FlatBufferBuilder &_fbb, + int32_t num = 0, + int32_t axis = 0) { + UnpackOptionsBuilder builder_(_fbb); + builder_.add_axis(axis); + builder_.add_num(num); + return builder_.Finish(); +} + +flatbuffers::Offset CreateUnpackOptions(flatbuffers::FlatBufferBuilder &_fbb, const UnpackOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct FloorDivOptionsT : public flatbuffers::NativeTable { + typedef FloorDivOptions TableType; +}; + +struct FloorDivOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef FloorDivOptionsT NativeTableType; + typedef FloorDivOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + FloorDivOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(FloorDivOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const FloorDivOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct FloorDivOptionsBuilder { + typedef FloorDivOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit FloorDivOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateFloorDivOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + FloorDivOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateFloorDivOptions(flatbuffers::FlatBufferBuilder &_fbb, const FloorDivOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct SquareOptionsT : public flatbuffers::NativeTable { + typedef SquareOptions TableType; +}; + +struct SquareOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef SquareOptionsT NativeTableType; + typedef SquareOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + SquareOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(SquareOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const SquareOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct SquareOptionsBuilder { + typedef SquareOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit SquareOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateSquareOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + SquareOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateSquareOptions(flatbuffers::FlatBufferBuilder &_fbb, const SquareOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct ZerosLikeOptionsT : public flatbuffers::NativeTable { + typedef ZerosLikeOptions TableType; +}; + +struct ZerosLikeOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef ZerosLikeOptionsT NativeTableType; + typedef ZerosLikeOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + ZerosLikeOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(ZerosLikeOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const ZerosLikeOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct ZerosLikeOptionsBuilder { + typedef ZerosLikeOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit ZerosLikeOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateZerosLikeOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + ZerosLikeOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateZerosLikeOptions(flatbuffers::FlatBufferBuilder &_fbb, const ZerosLikeOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct FillOptionsT : public flatbuffers::NativeTable { + typedef FillOptions TableType; +}; + +struct FillOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef FillOptionsT NativeTableType; + typedef FillOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + FillOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(FillOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const FillOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct FillOptionsBuilder { + typedef FillOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit FillOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateFillOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + FillOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateFillOptions(flatbuffers::FlatBufferBuilder &_fbb, const FillOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct FloorModOptionsT : public flatbuffers::NativeTable { + typedef FloorModOptions TableType; +}; + +struct FloorModOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef FloorModOptionsT NativeTableType; + typedef FloorModOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + FloorModOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(FloorModOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const FloorModOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct FloorModOptionsBuilder { + typedef FloorModOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit FloorModOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateFloorModOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + FloorModOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateFloorModOptions(flatbuffers::FlatBufferBuilder &_fbb, const FloorModOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct RangeOptionsT : public flatbuffers::NativeTable { + typedef RangeOptions TableType; +}; + +struct RangeOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef RangeOptionsT NativeTableType; + typedef RangeOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + RangeOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(RangeOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const RangeOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct RangeOptionsBuilder { + typedef RangeOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit RangeOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateRangeOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + RangeOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateRangeOptions(flatbuffers::FlatBufferBuilder &_fbb, const RangeOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct LeakyReluOptionsT : public flatbuffers::NativeTable { + typedef LeakyReluOptions TableType; + float alpha = 0.0f; +}; + +struct LeakyReluOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef LeakyReluOptionsT NativeTableType; + typedef LeakyReluOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_ALPHA = 4 + }; + float alpha() const { + return GetField(VT_ALPHA, 0.0f); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_ALPHA, 4) && + verifier.EndTable(); + } + LeakyReluOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(LeakyReluOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const LeakyReluOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct LeakyReluOptionsBuilder { + typedef LeakyReluOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_alpha(float alpha) { + fbb_.AddElement(LeakyReluOptions::VT_ALPHA, alpha, 0.0f); + } + explicit LeakyReluOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateLeakyReluOptions( + flatbuffers::FlatBufferBuilder &_fbb, + float alpha = 0.0f) { + LeakyReluOptionsBuilder builder_(_fbb); + builder_.add_alpha(alpha); + return builder_.Finish(); +} + +flatbuffers::Offset CreateLeakyReluOptions(flatbuffers::FlatBufferBuilder &_fbb, const LeakyReluOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct SquaredDifferenceOptionsT : public flatbuffers::NativeTable { + typedef SquaredDifferenceOptions TableType; +}; + +struct SquaredDifferenceOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef SquaredDifferenceOptionsT NativeTableType; + typedef SquaredDifferenceOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + SquaredDifferenceOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(SquaredDifferenceOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const SquaredDifferenceOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct SquaredDifferenceOptionsBuilder { + typedef SquaredDifferenceOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit SquaredDifferenceOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateSquaredDifferenceOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + SquaredDifferenceOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateSquaredDifferenceOptions(flatbuffers::FlatBufferBuilder &_fbb, const SquaredDifferenceOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct MirrorPadOptionsT : public flatbuffers::NativeTable { + typedef MirrorPadOptions TableType; + tflite::MirrorPadMode mode = tflite::MirrorPadMode_REFLECT; +}; + +struct MirrorPadOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef MirrorPadOptionsT NativeTableType; + typedef MirrorPadOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_MODE = 4 + }; + tflite::MirrorPadMode mode() const { + return static_cast(GetField(VT_MODE, 0)); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_MODE, 1) && + verifier.EndTable(); + } + MirrorPadOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(MirrorPadOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const MirrorPadOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct MirrorPadOptionsBuilder { + typedef MirrorPadOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_mode(tflite::MirrorPadMode mode) { + fbb_.AddElement(MirrorPadOptions::VT_MODE, static_cast(mode), 0); + } + explicit MirrorPadOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateMirrorPadOptions( + flatbuffers::FlatBufferBuilder &_fbb, + tflite::MirrorPadMode mode = tflite::MirrorPadMode_REFLECT) { + MirrorPadOptionsBuilder builder_(_fbb); + builder_.add_mode(mode); + return builder_.Finish(); +} + +flatbuffers::Offset CreateMirrorPadOptions(flatbuffers::FlatBufferBuilder &_fbb, const MirrorPadOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct UniqueOptionsT : public flatbuffers::NativeTable { + typedef UniqueOptions TableType; + tflite::TensorType idx_out_type = tflite::TensorType_INT32; +}; + +struct UniqueOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef UniqueOptionsT NativeTableType; + typedef UniqueOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_IDX_OUT_TYPE = 4 + }; + tflite::TensorType idx_out_type() const { + return static_cast(GetField(VT_IDX_OUT_TYPE, 2)); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_IDX_OUT_TYPE, 1) && + verifier.EndTable(); + } + UniqueOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(UniqueOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const UniqueOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct UniqueOptionsBuilder { + typedef UniqueOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_idx_out_type(tflite::TensorType idx_out_type) { + fbb_.AddElement(UniqueOptions::VT_IDX_OUT_TYPE, static_cast(idx_out_type), 2); + } + explicit UniqueOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateUniqueOptions( + flatbuffers::FlatBufferBuilder &_fbb, + tflite::TensorType idx_out_type = tflite::TensorType_INT32) { + UniqueOptionsBuilder builder_(_fbb); + builder_.add_idx_out_type(idx_out_type); + return builder_.Finish(); +} + +flatbuffers::Offset CreateUniqueOptions(flatbuffers::FlatBufferBuilder &_fbb, const UniqueOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct ReverseV2OptionsT : public flatbuffers::NativeTable { + typedef ReverseV2Options TableType; +}; + +struct ReverseV2Options FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef ReverseV2OptionsT NativeTableType; + typedef ReverseV2OptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + ReverseV2OptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(ReverseV2OptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const ReverseV2OptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct ReverseV2OptionsBuilder { + typedef ReverseV2Options Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit ReverseV2OptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateReverseV2Options( + flatbuffers::FlatBufferBuilder &_fbb) { + ReverseV2OptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateReverseV2Options(flatbuffers::FlatBufferBuilder &_fbb, const ReverseV2OptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct AddNOptionsT : public flatbuffers::NativeTable { + typedef AddNOptions TableType; +}; + +struct AddNOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef AddNOptionsT NativeTableType; + typedef AddNOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + AddNOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(AddNOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const AddNOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct AddNOptionsBuilder { + typedef AddNOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit AddNOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateAddNOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + AddNOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateAddNOptions(flatbuffers::FlatBufferBuilder &_fbb, const AddNOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct GatherNdOptionsT : public flatbuffers::NativeTable { + typedef GatherNdOptions TableType; +}; + +struct GatherNdOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef GatherNdOptionsT NativeTableType; + typedef GatherNdOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + GatherNdOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(GatherNdOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const GatherNdOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct GatherNdOptionsBuilder { + typedef GatherNdOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit GatherNdOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateGatherNdOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + GatherNdOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateGatherNdOptions(flatbuffers::FlatBufferBuilder &_fbb, const GatherNdOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct WhereOptionsT : public flatbuffers::NativeTable { + typedef WhereOptions TableType; +}; + +struct WhereOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef WhereOptionsT NativeTableType; + typedef WhereOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + WhereOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(WhereOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const WhereOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct WhereOptionsBuilder { + typedef WhereOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit WhereOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateWhereOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + WhereOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateWhereOptions(flatbuffers::FlatBufferBuilder &_fbb, const WhereOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct ReverseSequenceOptionsT : public flatbuffers::NativeTable { + typedef ReverseSequenceOptions TableType; + int32_t seq_dim = 0; + int32_t batch_dim = 0; +}; + +struct ReverseSequenceOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef ReverseSequenceOptionsT NativeTableType; + typedef ReverseSequenceOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_SEQ_DIM = 4, + VT_BATCH_DIM = 6 + }; + int32_t seq_dim() const { + return GetField(VT_SEQ_DIM, 0); + } + int32_t batch_dim() const { + return GetField(VT_BATCH_DIM, 0); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_SEQ_DIM, 4) && + VerifyField(verifier, VT_BATCH_DIM, 4) && + verifier.EndTable(); + } + ReverseSequenceOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(ReverseSequenceOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const ReverseSequenceOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct ReverseSequenceOptionsBuilder { + typedef ReverseSequenceOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_seq_dim(int32_t seq_dim) { + fbb_.AddElement(ReverseSequenceOptions::VT_SEQ_DIM, seq_dim, 0); + } + void add_batch_dim(int32_t batch_dim) { + fbb_.AddElement(ReverseSequenceOptions::VT_BATCH_DIM, batch_dim, 0); + } + explicit ReverseSequenceOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateReverseSequenceOptions( + flatbuffers::FlatBufferBuilder &_fbb, + int32_t seq_dim = 0, + int32_t batch_dim = 0) { + ReverseSequenceOptionsBuilder builder_(_fbb); + builder_.add_batch_dim(batch_dim); + builder_.add_seq_dim(seq_dim); + return builder_.Finish(); +} + +flatbuffers::Offset CreateReverseSequenceOptions(flatbuffers::FlatBufferBuilder &_fbb, const ReverseSequenceOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct MatrixDiagOptionsT : public flatbuffers::NativeTable { + typedef MatrixDiagOptions TableType; +}; + +struct MatrixDiagOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef MatrixDiagOptionsT NativeTableType; + typedef MatrixDiagOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + MatrixDiagOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(MatrixDiagOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const MatrixDiagOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct MatrixDiagOptionsBuilder { + typedef MatrixDiagOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit MatrixDiagOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateMatrixDiagOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + MatrixDiagOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateMatrixDiagOptions(flatbuffers::FlatBufferBuilder &_fbb, const MatrixDiagOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct QuantizeOptionsT : public flatbuffers::NativeTable { + typedef QuantizeOptions TableType; +}; + +struct QuantizeOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef QuantizeOptionsT NativeTableType; + typedef QuantizeOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + QuantizeOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(QuantizeOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const QuantizeOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct QuantizeOptionsBuilder { + typedef QuantizeOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit QuantizeOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateQuantizeOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + QuantizeOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateQuantizeOptions(flatbuffers::FlatBufferBuilder &_fbb, const QuantizeOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct MatrixSetDiagOptionsT : public flatbuffers::NativeTable { + typedef MatrixSetDiagOptions TableType; +}; + +struct MatrixSetDiagOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef MatrixSetDiagOptionsT NativeTableType; + typedef MatrixSetDiagOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + MatrixSetDiagOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(MatrixSetDiagOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const MatrixSetDiagOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct MatrixSetDiagOptionsBuilder { + typedef MatrixSetDiagOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit MatrixSetDiagOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateMatrixSetDiagOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + MatrixSetDiagOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateMatrixSetDiagOptions(flatbuffers::FlatBufferBuilder &_fbb, const MatrixSetDiagOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct IfOptionsT : public flatbuffers::NativeTable { + typedef IfOptions TableType; + int32_t then_subgraph_index = 0; + int32_t else_subgraph_index = 0; +}; + +struct IfOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef IfOptionsT NativeTableType; + typedef IfOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_THEN_SUBGRAPH_INDEX = 4, + VT_ELSE_SUBGRAPH_INDEX = 6 + }; + int32_t then_subgraph_index() const { + return GetField(VT_THEN_SUBGRAPH_INDEX, 0); + } + int32_t else_subgraph_index() const { + return GetField(VT_ELSE_SUBGRAPH_INDEX, 0); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_THEN_SUBGRAPH_INDEX, 4) && + VerifyField(verifier, VT_ELSE_SUBGRAPH_INDEX, 4) && + verifier.EndTable(); + } + IfOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(IfOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const IfOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct IfOptionsBuilder { + typedef IfOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_then_subgraph_index(int32_t then_subgraph_index) { + fbb_.AddElement(IfOptions::VT_THEN_SUBGRAPH_INDEX, then_subgraph_index, 0); + } + void add_else_subgraph_index(int32_t else_subgraph_index) { + fbb_.AddElement(IfOptions::VT_ELSE_SUBGRAPH_INDEX, else_subgraph_index, 0); + } + explicit IfOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateIfOptions( + flatbuffers::FlatBufferBuilder &_fbb, + int32_t then_subgraph_index = 0, + int32_t else_subgraph_index = 0) { + IfOptionsBuilder builder_(_fbb); + builder_.add_else_subgraph_index(else_subgraph_index); + builder_.add_then_subgraph_index(then_subgraph_index); + return builder_.Finish(); +} + +flatbuffers::Offset CreateIfOptions(flatbuffers::FlatBufferBuilder &_fbb, const IfOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct CallOnceOptionsT : public flatbuffers::NativeTable { + typedef CallOnceOptions TableType; + int32_t init_subgraph_index = 0; +}; + +struct CallOnceOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef CallOnceOptionsT NativeTableType; + typedef CallOnceOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_INIT_SUBGRAPH_INDEX = 4 + }; + int32_t init_subgraph_index() const { + return GetField(VT_INIT_SUBGRAPH_INDEX, 0); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_INIT_SUBGRAPH_INDEX, 4) && + verifier.EndTable(); + } + CallOnceOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(CallOnceOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const CallOnceOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct CallOnceOptionsBuilder { + typedef CallOnceOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_init_subgraph_index(int32_t init_subgraph_index) { + fbb_.AddElement(CallOnceOptions::VT_INIT_SUBGRAPH_INDEX, init_subgraph_index, 0); + } + explicit CallOnceOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateCallOnceOptions( + flatbuffers::FlatBufferBuilder &_fbb, + int32_t init_subgraph_index = 0) { + CallOnceOptionsBuilder builder_(_fbb); + builder_.add_init_subgraph_index(init_subgraph_index); + return builder_.Finish(); +} + +flatbuffers::Offset CreateCallOnceOptions(flatbuffers::FlatBufferBuilder &_fbb, const CallOnceOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct WhileOptionsT : public flatbuffers::NativeTable { + typedef WhileOptions TableType; + int32_t cond_subgraph_index = 0; + int32_t body_subgraph_index = 0; +}; + +struct WhileOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef WhileOptionsT NativeTableType; + typedef WhileOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_COND_SUBGRAPH_INDEX = 4, + VT_BODY_SUBGRAPH_INDEX = 6 + }; + int32_t cond_subgraph_index() const { + return GetField(VT_COND_SUBGRAPH_INDEX, 0); + } + int32_t body_subgraph_index() const { + return GetField(VT_BODY_SUBGRAPH_INDEX, 0); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_COND_SUBGRAPH_INDEX, 4) && + VerifyField(verifier, VT_BODY_SUBGRAPH_INDEX, 4) && + verifier.EndTable(); + } + WhileOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(WhileOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const WhileOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct WhileOptionsBuilder { + typedef WhileOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_cond_subgraph_index(int32_t cond_subgraph_index) { + fbb_.AddElement(WhileOptions::VT_COND_SUBGRAPH_INDEX, cond_subgraph_index, 0); + } + void add_body_subgraph_index(int32_t body_subgraph_index) { + fbb_.AddElement(WhileOptions::VT_BODY_SUBGRAPH_INDEX, body_subgraph_index, 0); + } + explicit WhileOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateWhileOptions( + flatbuffers::FlatBufferBuilder &_fbb, + int32_t cond_subgraph_index = 0, + int32_t body_subgraph_index = 0) { + WhileOptionsBuilder builder_(_fbb); + builder_.add_body_subgraph_index(body_subgraph_index); + builder_.add_cond_subgraph_index(cond_subgraph_index); + return builder_.Finish(); +} + +flatbuffers::Offset CreateWhileOptions(flatbuffers::FlatBufferBuilder &_fbb, const WhileOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct NonMaxSuppressionV4OptionsT : public flatbuffers::NativeTable { + typedef NonMaxSuppressionV4Options TableType; +}; + +struct NonMaxSuppressionV4Options FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef NonMaxSuppressionV4OptionsT NativeTableType; + typedef NonMaxSuppressionV4OptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + NonMaxSuppressionV4OptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(NonMaxSuppressionV4OptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const NonMaxSuppressionV4OptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct NonMaxSuppressionV4OptionsBuilder { + typedef NonMaxSuppressionV4Options Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit NonMaxSuppressionV4OptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateNonMaxSuppressionV4Options( + flatbuffers::FlatBufferBuilder &_fbb) { + NonMaxSuppressionV4OptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateNonMaxSuppressionV4Options(flatbuffers::FlatBufferBuilder &_fbb, const NonMaxSuppressionV4OptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct NonMaxSuppressionV5OptionsT : public flatbuffers::NativeTable { + typedef NonMaxSuppressionV5Options TableType; +}; + +struct NonMaxSuppressionV5Options FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef NonMaxSuppressionV5OptionsT NativeTableType; + typedef NonMaxSuppressionV5OptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + NonMaxSuppressionV5OptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(NonMaxSuppressionV5OptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const NonMaxSuppressionV5OptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct NonMaxSuppressionV5OptionsBuilder { + typedef NonMaxSuppressionV5Options Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit NonMaxSuppressionV5OptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateNonMaxSuppressionV5Options( + flatbuffers::FlatBufferBuilder &_fbb) { + NonMaxSuppressionV5OptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateNonMaxSuppressionV5Options(flatbuffers::FlatBufferBuilder &_fbb, const NonMaxSuppressionV5OptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct ScatterNdOptionsT : public flatbuffers::NativeTable { + typedef ScatterNdOptions TableType; +}; + +struct ScatterNdOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef ScatterNdOptionsT NativeTableType; + typedef ScatterNdOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + ScatterNdOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(ScatterNdOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const ScatterNdOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct ScatterNdOptionsBuilder { + typedef ScatterNdOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit ScatterNdOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateScatterNdOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + ScatterNdOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateScatterNdOptions(flatbuffers::FlatBufferBuilder &_fbb, const ScatterNdOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct SelectV2OptionsT : public flatbuffers::NativeTable { + typedef SelectV2Options TableType; +}; + +struct SelectV2Options FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef SelectV2OptionsT NativeTableType; + typedef SelectV2OptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + SelectV2OptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(SelectV2OptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const SelectV2OptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct SelectV2OptionsBuilder { + typedef SelectV2Options Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit SelectV2OptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateSelectV2Options( + flatbuffers::FlatBufferBuilder &_fbb) { + SelectV2OptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateSelectV2Options(flatbuffers::FlatBufferBuilder &_fbb, const SelectV2OptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct DensifyOptionsT : public flatbuffers::NativeTable { + typedef DensifyOptions TableType; +}; + +struct DensifyOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef DensifyOptionsT NativeTableType; + typedef DensifyOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + DensifyOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(DensifyOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const DensifyOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct DensifyOptionsBuilder { + typedef DensifyOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit DensifyOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateDensifyOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + DensifyOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateDensifyOptions(flatbuffers::FlatBufferBuilder &_fbb, const DensifyOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct SegmentSumOptionsT : public flatbuffers::NativeTable { + typedef SegmentSumOptions TableType; +}; + +struct SegmentSumOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef SegmentSumOptionsT NativeTableType; + typedef SegmentSumOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + SegmentSumOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(SegmentSumOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const SegmentSumOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct SegmentSumOptionsBuilder { + typedef SegmentSumOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit SegmentSumOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateSegmentSumOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + SegmentSumOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateSegmentSumOptions(flatbuffers::FlatBufferBuilder &_fbb, const SegmentSumOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct BatchMatMulOptionsT : public flatbuffers::NativeTable { + typedef BatchMatMulOptions TableType; + bool adj_x = false; + bool adj_y = false; + bool asymmetric_quantize_inputs = false; +}; + +struct BatchMatMulOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef BatchMatMulOptionsT NativeTableType; + typedef BatchMatMulOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_ADJ_X = 4, + VT_ADJ_Y = 6, + VT_ASYMMETRIC_QUANTIZE_INPUTS = 8 + }; + bool adj_x() const { + return GetField(VT_ADJ_X, 0) != 0; + } + bool adj_y() const { + return GetField(VT_ADJ_Y, 0) != 0; + } + bool asymmetric_quantize_inputs() const { + return GetField(VT_ASYMMETRIC_QUANTIZE_INPUTS, 0) != 0; + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_ADJ_X, 1) && + VerifyField(verifier, VT_ADJ_Y, 1) && + VerifyField(verifier, VT_ASYMMETRIC_QUANTIZE_INPUTS, 1) && + verifier.EndTable(); + } + BatchMatMulOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(BatchMatMulOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const BatchMatMulOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct BatchMatMulOptionsBuilder { + typedef BatchMatMulOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_adj_x(bool adj_x) { + fbb_.AddElement(BatchMatMulOptions::VT_ADJ_X, static_cast(adj_x), 0); + } + void add_adj_y(bool adj_y) { + fbb_.AddElement(BatchMatMulOptions::VT_ADJ_Y, static_cast(adj_y), 0); + } + void add_asymmetric_quantize_inputs(bool asymmetric_quantize_inputs) { + fbb_.AddElement(BatchMatMulOptions::VT_ASYMMETRIC_QUANTIZE_INPUTS, static_cast(asymmetric_quantize_inputs), 0); + } + explicit BatchMatMulOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateBatchMatMulOptions( + flatbuffers::FlatBufferBuilder &_fbb, + bool adj_x = false, + bool adj_y = false, + bool asymmetric_quantize_inputs = false) { + BatchMatMulOptionsBuilder builder_(_fbb); + builder_.add_asymmetric_quantize_inputs(asymmetric_quantize_inputs); + builder_.add_adj_y(adj_y); + builder_.add_adj_x(adj_x); + return builder_.Finish(); +} + +flatbuffers::Offset CreateBatchMatMulOptions(flatbuffers::FlatBufferBuilder &_fbb, const BatchMatMulOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct CumsumOptionsT : public flatbuffers::NativeTable { + typedef CumsumOptions TableType; + bool exclusive = false; + bool reverse = false; +}; + +struct CumsumOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef CumsumOptionsT NativeTableType; + typedef CumsumOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_EXCLUSIVE = 4, + VT_REVERSE = 6 + }; + bool exclusive() const { + return GetField(VT_EXCLUSIVE, 0) != 0; + } + bool reverse() const { + return GetField(VT_REVERSE, 0) != 0; + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_EXCLUSIVE, 1) && + VerifyField(verifier, VT_REVERSE, 1) && + verifier.EndTable(); + } + CumsumOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(CumsumOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const CumsumOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct CumsumOptionsBuilder { + typedef CumsumOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_exclusive(bool exclusive) { + fbb_.AddElement(CumsumOptions::VT_EXCLUSIVE, static_cast(exclusive), 0); + } + void add_reverse(bool reverse) { + fbb_.AddElement(CumsumOptions::VT_REVERSE, static_cast(reverse), 0); + } + explicit CumsumOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateCumsumOptions( + flatbuffers::FlatBufferBuilder &_fbb, + bool exclusive = false, + bool reverse = false) { + CumsumOptionsBuilder builder_(_fbb); + builder_.add_reverse(reverse); + builder_.add_exclusive(exclusive); + return builder_.Finish(); +} + +flatbuffers::Offset CreateCumsumOptions(flatbuffers::FlatBufferBuilder &_fbb, const CumsumOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct BroadcastToOptionsT : public flatbuffers::NativeTable { + typedef BroadcastToOptions TableType; +}; + +struct BroadcastToOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef BroadcastToOptionsT NativeTableType; + typedef BroadcastToOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + BroadcastToOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(BroadcastToOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const BroadcastToOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct BroadcastToOptionsBuilder { + typedef BroadcastToOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit BroadcastToOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateBroadcastToOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + BroadcastToOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateBroadcastToOptions(flatbuffers::FlatBufferBuilder &_fbb, const BroadcastToOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct Rfft2dOptionsT : public flatbuffers::NativeTable { + typedef Rfft2dOptions TableType; +}; + +struct Rfft2dOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef Rfft2dOptionsT NativeTableType; + typedef Rfft2dOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + Rfft2dOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(Rfft2dOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const Rfft2dOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct Rfft2dOptionsBuilder { + typedef Rfft2dOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit Rfft2dOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateRfft2dOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + Rfft2dOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateRfft2dOptions(flatbuffers::FlatBufferBuilder &_fbb, const Rfft2dOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct HashtableOptionsT : public flatbuffers::NativeTable { + typedef HashtableOptions TableType; + int32_t table_id = 0; + tflite::TensorType key_dtype = tflite::TensorType_FLOAT32; + tflite::TensorType value_dtype = tflite::TensorType_FLOAT32; +}; + +struct HashtableOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef HashtableOptionsT NativeTableType; + typedef HashtableOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_TABLE_ID = 4, + VT_KEY_DTYPE = 6, + VT_VALUE_DTYPE = 8 + }; + int32_t table_id() const { + return GetField(VT_TABLE_ID, 0); + } + tflite::TensorType key_dtype() const { + return static_cast(GetField(VT_KEY_DTYPE, 0)); + } + tflite::TensorType value_dtype() const { + return static_cast(GetField(VT_VALUE_DTYPE, 0)); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_TABLE_ID, 4) && + VerifyField(verifier, VT_KEY_DTYPE, 1) && + VerifyField(verifier, VT_VALUE_DTYPE, 1) && + verifier.EndTable(); + } + HashtableOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(HashtableOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const HashtableOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct HashtableOptionsBuilder { + typedef HashtableOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_table_id(int32_t table_id) { + fbb_.AddElement(HashtableOptions::VT_TABLE_ID, table_id, 0); + } + void add_key_dtype(tflite::TensorType key_dtype) { + fbb_.AddElement(HashtableOptions::VT_KEY_DTYPE, static_cast(key_dtype), 0); + } + void add_value_dtype(tflite::TensorType value_dtype) { + fbb_.AddElement(HashtableOptions::VT_VALUE_DTYPE, static_cast(value_dtype), 0); + } + explicit HashtableOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateHashtableOptions( + flatbuffers::FlatBufferBuilder &_fbb, + int32_t table_id = 0, + tflite::TensorType key_dtype = tflite::TensorType_FLOAT32, + tflite::TensorType value_dtype = tflite::TensorType_FLOAT32) { + HashtableOptionsBuilder builder_(_fbb); + builder_.add_table_id(table_id); + builder_.add_value_dtype(value_dtype); + builder_.add_key_dtype(key_dtype); + return builder_.Finish(); +} + +flatbuffers::Offset CreateHashtableOptions(flatbuffers::FlatBufferBuilder &_fbb, const HashtableOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct HashtableFindOptionsT : public flatbuffers::NativeTable { + typedef HashtableFindOptions TableType; +}; + +struct HashtableFindOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef HashtableFindOptionsT NativeTableType; + typedef HashtableFindOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + HashtableFindOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(HashtableFindOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const HashtableFindOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct HashtableFindOptionsBuilder { + typedef HashtableFindOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit HashtableFindOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateHashtableFindOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + HashtableFindOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateHashtableFindOptions(flatbuffers::FlatBufferBuilder &_fbb, const HashtableFindOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct HashtableImportOptionsT : public flatbuffers::NativeTable { + typedef HashtableImportOptions TableType; +}; + +struct HashtableImportOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef HashtableImportOptionsT NativeTableType; + typedef HashtableImportOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + HashtableImportOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(HashtableImportOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const HashtableImportOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct HashtableImportOptionsBuilder { + typedef HashtableImportOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit HashtableImportOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateHashtableImportOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + HashtableImportOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateHashtableImportOptions(flatbuffers::FlatBufferBuilder &_fbb, const HashtableImportOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct HashtableSizeOptionsT : public flatbuffers::NativeTable { + typedef HashtableSizeOptions TableType; +}; + +struct HashtableSizeOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef HashtableSizeOptionsT NativeTableType; + typedef HashtableSizeOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + HashtableSizeOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(HashtableSizeOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const HashtableSizeOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct HashtableSizeOptionsBuilder { + typedef HashtableSizeOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit HashtableSizeOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateHashtableSizeOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + HashtableSizeOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateHashtableSizeOptions(flatbuffers::FlatBufferBuilder &_fbb, const HashtableSizeOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct VarHandleOptionsT : public flatbuffers::NativeTable { + typedef VarHandleOptions TableType; + std::string container{}; + std::string shared_name{}; +}; + +struct VarHandleOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef VarHandleOptionsT NativeTableType; + typedef VarHandleOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_CONTAINER = 4, + VT_SHARED_NAME = 6 + }; + const flatbuffers::String *container() const { + return GetPointer(VT_CONTAINER); + } + const flatbuffers::String *shared_name() const { + return GetPointer(VT_SHARED_NAME); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyOffset(verifier, VT_CONTAINER) && + verifier.VerifyString(container()) && + VerifyOffset(verifier, VT_SHARED_NAME) && + verifier.VerifyString(shared_name()) && + verifier.EndTable(); + } + VarHandleOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(VarHandleOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const VarHandleOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct VarHandleOptionsBuilder { + typedef VarHandleOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_container(flatbuffers::Offset container) { + fbb_.AddOffset(VarHandleOptions::VT_CONTAINER, container); + } + void add_shared_name(flatbuffers::Offset shared_name) { + fbb_.AddOffset(VarHandleOptions::VT_SHARED_NAME, shared_name); + } + explicit VarHandleOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateVarHandleOptions( + flatbuffers::FlatBufferBuilder &_fbb, + flatbuffers::Offset container = 0, + flatbuffers::Offset shared_name = 0) { + VarHandleOptionsBuilder builder_(_fbb); + builder_.add_shared_name(shared_name); + builder_.add_container(container); + return builder_.Finish(); +} + +inline flatbuffers::Offset CreateVarHandleOptionsDirect( + flatbuffers::FlatBufferBuilder &_fbb, + const char *container = nullptr, + const char *shared_name = nullptr) { + auto container__ = container ? _fbb.CreateString(container) : 0; + auto shared_name__ = shared_name ? _fbb.CreateString(shared_name) : 0; + return tflite::CreateVarHandleOptions( + _fbb, + container__, + shared_name__); +} + +flatbuffers::Offset CreateVarHandleOptions(flatbuffers::FlatBufferBuilder &_fbb, const VarHandleOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct ReadVariableOptionsT : public flatbuffers::NativeTable { + typedef ReadVariableOptions TableType; +}; + +struct ReadVariableOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef ReadVariableOptionsT NativeTableType; + typedef ReadVariableOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + ReadVariableOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(ReadVariableOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const ReadVariableOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct ReadVariableOptionsBuilder { + typedef ReadVariableOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit ReadVariableOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateReadVariableOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + ReadVariableOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateReadVariableOptions(flatbuffers::FlatBufferBuilder &_fbb, const ReadVariableOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct AssignVariableOptionsT : public flatbuffers::NativeTable { + typedef AssignVariableOptions TableType; +}; + +struct AssignVariableOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef AssignVariableOptionsT NativeTableType; + typedef AssignVariableOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + AssignVariableOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(AssignVariableOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const AssignVariableOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct AssignVariableOptionsBuilder { + typedef AssignVariableOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit AssignVariableOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateAssignVariableOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + AssignVariableOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateAssignVariableOptions(flatbuffers::FlatBufferBuilder &_fbb, const AssignVariableOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct RandomOptionsT : public flatbuffers::NativeTable { + typedef RandomOptions TableType; + int64_t seed = 0; + int64_t seed2 = 0; +}; + +struct RandomOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef RandomOptionsT NativeTableType; + typedef RandomOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_SEED = 4, + VT_SEED2 = 6 + }; + int64_t seed() const { + return GetField(VT_SEED, 0); + } + int64_t seed2() const { + return GetField(VT_SEED2, 0); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_SEED, 8) && + VerifyField(verifier, VT_SEED2, 8) && + verifier.EndTable(); + } + RandomOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(RandomOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const RandomOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct RandomOptionsBuilder { + typedef RandomOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_seed(int64_t seed) { + fbb_.AddElement(RandomOptions::VT_SEED, seed, 0); + } + void add_seed2(int64_t seed2) { + fbb_.AddElement(RandomOptions::VT_SEED2, seed2, 0); + } + explicit RandomOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateRandomOptions( + flatbuffers::FlatBufferBuilder &_fbb, + int64_t seed = 0, + int64_t seed2 = 0) { + RandomOptionsBuilder builder_(_fbb); + builder_.add_seed2(seed2); + builder_.add_seed(seed); + return builder_.Finish(); +} + +flatbuffers::Offset CreateRandomOptions(flatbuffers::FlatBufferBuilder &_fbb, const RandomOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct BucketizeOptionsT : public flatbuffers::NativeTable { + typedef BucketizeOptions TableType; + std::vector boundaries{}; +}; + +struct BucketizeOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef BucketizeOptionsT NativeTableType; + typedef BucketizeOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_BOUNDARIES = 4 + }; + const flatbuffers::Vector *boundaries() const { + return GetPointer *>(VT_BOUNDARIES); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyOffset(verifier, VT_BOUNDARIES) && + verifier.VerifyVector(boundaries()) && + verifier.EndTable(); + } + BucketizeOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(BucketizeOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const BucketizeOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct BucketizeOptionsBuilder { + typedef BucketizeOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_boundaries(flatbuffers::Offset> boundaries) { + fbb_.AddOffset(BucketizeOptions::VT_BOUNDARIES, boundaries); + } + explicit BucketizeOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateBucketizeOptions( + flatbuffers::FlatBufferBuilder &_fbb, + flatbuffers::Offset> boundaries = 0) { + BucketizeOptionsBuilder builder_(_fbb); + builder_.add_boundaries(boundaries); + return builder_.Finish(); +} + +inline flatbuffers::Offset CreateBucketizeOptionsDirect( + flatbuffers::FlatBufferBuilder &_fbb, + const std::vector *boundaries = nullptr) { + auto boundaries__ = boundaries ? _fbb.CreateVector(*boundaries) : 0; + return tflite::CreateBucketizeOptions( + _fbb, + boundaries__); +} + +flatbuffers::Offset CreateBucketizeOptions(flatbuffers::FlatBufferBuilder &_fbb, const BucketizeOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct GeluOptionsT : public flatbuffers::NativeTable { + typedef GeluOptions TableType; + bool approximate = false; +}; + +struct GeluOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef GeluOptionsT NativeTableType; + typedef GeluOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_APPROXIMATE = 4 + }; + bool approximate() const { + return GetField(VT_APPROXIMATE, 0) != 0; + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_APPROXIMATE, 1) && + verifier.EndTable(); + } + GeluOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(GeluOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const GeluOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct GeluOptionsBuilder { + typedef GeluOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_approximate(bool approximate) { + fbb_.AddElement(GeluOptions::VT_APPROXIMATE, static_cast(approximate), 0); + } + explicit GeluOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateGeluOptions( + flatbuffers::FlatBufferBuilder &_fbb, + bool approximate = false) { + GeluOptionsBuilder builder_(_fbb); + builder_.add_approximate(approximate); + return builder_.Finish(); +} + +flatbuffers::Offset CreateGeluOptions(flatbuffers::FlatBufferBuilder &_fbb, const GeluOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct DynamicUpdateSliceOptionsT : public flatbuffers::NativeTable { + typedef DynamicUpdateSliceOptions TableType; +}; + +struct DynamicUpdateSliceOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef DynamicUpdateSliceOptionsT NativeTableType; + typedef DynamicUpdateSliceOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + DynamicUpdateSliceOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(DynamicUpdateSliceOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const DynamicUpdateSliceOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct DynamicUpdateSliceOptionsBuilder { + typedef DynamicUpdateSliceOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit DynamicUpdateSliceOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateDynamicUpdateSliceOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + DynamicUpdateSliceOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateDynamicUpdateSliceOptions(flatbuffers::FlatBufferBuilder &_fbb, const DynamicUpdateSliceOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct UnsortedSegmentProdOptionsT : public flatbuffers::NativeTable { + typedef UnsortedSegmentProdOptions TableType; +}; + +struct UnsortedSegmentProdOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef UnsortedSegmentProdOptionsT NativeTableType; + typedef UnsortedSegmentProdOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + UnsortedSegmentProdOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(UnsortedSegmentProdOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const UnsortedSegmentProdOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct UnsortedSegmentProdOptionsBuilder { + typedef UnsortedSegmentProdOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit UnsortedSegmentProdOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateUnsortedSegmentProdOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + UnsortedSegmentProdOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateUnsortedSegmentProdOptions(flatbuffers::FlatBufferBuilder &_fbb, const UnsortedSegmentProdOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct UnsortedSegmentMaxOptionsT : public flatbuffers::NativeTable { + typedef UnsortedSegmentMaxOptions TableType; +}; + +struct UnsortedSegmentMaxOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef UnsortedSegmentMaxOptionsT NativeTableType; + typedef UnsortedSegmentMaxOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + UnsortedSegmentMaxOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(UnsortedSegmentMaxOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const UnsortedSegmentMaxOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct UnsortedSegmentMaxOptionsBuilder { + typedef UnsortedSegmentMaxOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit UnsortedSegmentMaxOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateUnsortedSegmentMaxOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + UnsortedSegmentMaxOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateUnsortedSegmentMaxOptions(flatbuffers::FlatBufferBuilder &_fbb, const UnsortedSegmentMaxOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct UnsortedSegmentSumOptionsT : public flatbuffers::NativeTable { + typedef UnsortedSegmentSumOptions TableType; +}; + +struct UnsortedSegmentSumOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef UnsortedSegmentSumOptionsT NativeTableType; + typedef UnsortedSegmentSumOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + UnsortedSegmentSumOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(UnsortedSegmentSumOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const UnsortedSegmentSumOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct UnsortedSegmentSumOptionsBuilder { + typedef UnsortedSegmentSumOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit UnsortedSegmentSumOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateUnsortedSegmentSumOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + UnsortedSegmentSumOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateUnsortedSegmentSumOptions(flatbuffers::FlatBufferBuilder &_fbb, const UnsortedSegmentSumOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct ATan2OptionsT : public flatbuffers::NativeTable { + typedef ATan2Options TableType; +}; + +struct ATan2Options FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef ATan2OptionsT NativeTableType; + typedef ATan2OptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + ATan2OptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(ATan2OptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const ATan2OptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct ATan2OptionsBuilder { + typedef ATan2Options Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit ATan2OptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateATan2Options( + flatbuffers::FlatBufferBuilder &_fbb) { + ATan2OptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateATan2Options(flatbuffers::FlatBufferBuilder &_fbb, const ATan2OptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct UnsortedSegmentMinOptionsT : public flatbuffers::NativeTable { + typedef UnsortedSegmentMinOptions TableType; +}; + +struct UnsortedSegmentMinOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef UnsortedSegmentMinOptionsT NativeTableType; + typedef UnsortedSegmentMinOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + UnsortedSegmentMinOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(UnsortedSegmentMinOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const UnsortedSegmentMinOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct UnsortedSegmentMinOptionsBuilder { + typedef UnsortedSegmentMinOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit UnsortedSegmentMinOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateUnsortedSegmentMinOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + UnsortedSegmentMinOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateUnsortedSegmentMinOptions(flatbuffers::FlatBufferBuilder &_fbb, const UnsortedSegmentMinOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct SignOptionsT : public flatbuffers::NativeTable { + typedef SignOptions TableType; +}; + +struct SignOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef SignOptionsT NativeTableType; + typedef SignOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + SignOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(SignOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const SignOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct SignOptionsBuilder { + typedef SignOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit SignOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateSignOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + SignOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateSignOptions(flatbuffers::FlatBufferBuilder &_fbb, const SignOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct BitcastOptionsT : public flatbuffers::NativeTable { + typedef BitcastOptions TableType; +}; + +struct BitcastOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef BitcastOptionsT NativeTableType; + typedef BitcastOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + BitcastOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(BitcastOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const BitcastOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct BitcastOptionsBuilder { + typedef BitcastOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit BitcastOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateBitcastOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + BitcastOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateBitcastOptions(flatbuffers::FlatBufferBuilder &_fbb, const BitcastOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct BitwiseXorOptionsT : public flatbuffers::NativeTable { + typedef BitwiseXorOptions TableType; +}; + +struct BitwiseXorOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef BitwiseXorOptionsT NativeTableType; + typedef BitwiseXorOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + BitwiseXorOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(BitwiseXorOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const BitwiseXorOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct BitwiseXorOptionsBuilder { + typedef BitwiseXorOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit BitwiseXorOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateBitwiseXorOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + BitwiseXorOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateBitwiseXorOptions(flatbuffers::FlatBufferBuilder &_fbb, const BitwiseXorOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct RightShiftOptionsT : public flatbuffers::NativeTable { + typedef RightShiftOptions TableType; +}; + +struct RightShiftOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef RightShiftOptionsT NativeTableType; + typedef RightShiftOptionsBuilder Builder; + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); + } + RightShiftOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(RightShiftOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const RightShiftOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct RightShiftOptionsBuilder { + typedef RightShiftOptions Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + explicit RightShiftOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateRightShiftOptions( + flatbuffers::FlatBufferBuilder &_fbb) { + RightShiftOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +flatbuffers::Offset CreateRightShiftOptions(flatbuffers::FlatBufferBuilder &_fbb, const RightShiftOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct OperatorCodeT : public flatbuffers::NativeTable { + typedef OperatorCode TableType; + int8_t deprecated_builtin_code = 0; + std::string custom_code{}; + int32_t version = 1; + tflite::BuiltinOperator builtin_code = tflite::BuiltinOperator_ADD; +}; + +struct OperatorCode FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef OperatorCodeT NativeTableType; + typedef OperatorCodeBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_DEPRECATED_BUILTIN_CODE = 4, + VT_CUSTOM_CODE = 6, + VT_VERSION = 8, + VT_BUILTIN_CODE = 10 + }; + int8_t deprecated_builtin_code() const { + return GetField(VT_DEPRECATED_BUILTIN_CODE, 0); + } + const flatbuffers::String *custom_code() const { + return GetPointer(VT_CUSTOM_CODE); + } + int32_t version() const { + return GetField(VT_VERSION, 1); + } + tflite::BuiltinOperator builtin_code() const { + return static_cast(GetField(VT_BUILTIN_CODE, 0)); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_DEPRECATED_BUILTIN_CODE, 1) && + VerifyOffset(verifier, VT_CUSTOM_CODE) && + verifier.VerifyString(custom_code()) && + VerifyField(verifier, VT_VERSION, 4) && + VerifyField(verifier, VT_BUILTIN_CODE, 4) && + verifier.EndTable(); + } + OperatorCodeT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(OperatorCodeT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const OperatorCodeT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct OperatorCodeBuilder { + typedef OperatorCode Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_deprecated_builtin_code(int8_t deprecated_builtin_code) { + fbb_.AddElement(OperatorCode::VT_DEPRECATED_BUILTIN_CODE, deprecated_builtin_code, 0); + } + void add_custom_code(flatbuffers::Offset custom_code) { + fbb_.AddOffset(OperatorCode::VT_CUSTOM_CODE, custom_code); + } + void add_version(int32_t version) { + fbb_.AddElement(OperatorCode::VT_VERSION, version, 1); + } + void add_builtin_code(tflite::BuiltinOperator builtin_code) { + fbb_.AddElement(OperatorCode::VT_BUILTIN_CODE, static_cast(builtin_code), 0); + } + explicit OperatorCodeBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateOperatorCode( + flatbuffers::FlatBufferBuilder &_fbb, + int8_t deprecated_builtin_code = 0, + flatbuffers::Offset custom_code = 0, + int32_t version = 1, + tflite::BuiltinOperator builtin_code = tflite::BuiltinOperator_ADD) { + OperatorCodeBuilder builder_(_fbb); + builder_.add_builtin_code(builtin_code); + builder_.add_version(version); + builder_.add_custom_code(custom_code); + builder_.add_deprecated_builtin_code(deprecated_builtin_code); + return builder_.Finish(); +} + +inline flatbuffers::Offset CreateOperatorCodeDirect( + flatbuffers::FlatBufferBuilder &_fbb, + int8_t deprecated_builtin_code = 0, + const char *custom_code = nullptr, + int32_t version = 1, + tflite::BuiltinOperator builtin_code = tflite::BuiltinOperator_ADD) { + auto custom_code__ = custom_code ? _fbb.CreateString(custom_code) : 0; + return tflite::CreateOperatorCode( + _fbb, + deprecated_builtin_code, + custom_code__, + version, + builtin_code); +} + +flatbuffers::Offset CreateOperatorCode(flatbuffers::FlatBufferBuilder &_fbb, const OperatorCodeT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct OperatorT : public flatbuffers::NativeTable { + typedef Operator TableType; + uint32_t opcode_index = 0; + std::vector inputs{}; + std::vector outputs{}; + tflite::BuiltinOptionsUnion builtin_options{}; + std::vector custom_options{}; + tflite::CustomOptionsFormat custom_options_format = tflite::CustomOptionsFormat_FLEXBUFFERS; + std::vector mutating_variable_inputs{}; + std::vector intermediates{}; + uint64_t custom_options_offset = 0; + uint64_t custom_options_size = 0; +}; + +struct Operator FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef OperatorT NativeTableType; + typedef OperatorBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_OPCODE_INDEX = 4, + VT_INPUTS = 6, + VT_OUTPUTS = 8, + VT_BUILTIN_OPTIONS_TYPE = 10, + VT_BUILTIN_OPTIONS = 12, + VT_CUSTOM_OPTIONS = 14, + VT_CUSTOM_OPTIONS_FORMAT = 16, + VT_MUTATING_VARIABLE_INPUTS = 18, + VT_INTERMEDIATES = 20, + VT_CUSTOM_OPTIONS_OFFSET = 22, + VT_CUSTOM_OPTIONS_SIZE = 24 + }; + uint32_t opcode_index() const { + return GetField(VT_OPCODE_INDEX, 0); + } + const flatbuffers::Vector *inputs() const { + return GetPointer *>(VT_INPUTS); + } + const flatbuffers::Vector *outputs() const { + return GetPointer *>(VT_OUTPUTS); + } + tflite::BuiltinOptions builtin_options_type() const { + return static_cast(GetField(VT_BUILTIN_OPTIONS_TYPE, 0)); + } + const void *builtin_options() const { + return GetPointer(VT_BUILTIN_OPTIONS); + } + template const T *builtin_options_as() const; + const tflite::Conv2DOptions *builtin_options_as_Conv2DOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_Conv2DOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::DepthwiseConv2DOptions *builtin_options_as_DepthwiseConv2DOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_DepthwiseConv2DOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::ConcatEmbeddingsOptions *builtin_options_as_ConcatEmbeddingsOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_ConcatEmbeddingsOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::LSHProjectionOptions *builtin_options_as_LSHProjectionOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_LSHProjectionOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::Pool2DOptions *builtin_options_as_Pool2DOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_Pool2DOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::SVDFOptions *builtin_options_as_SVDFOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_SVDFOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::RNNOptions *builtin_options_as_RNNOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_RNNOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::FullyConnectedOptions *builtin_options_as_FullyConnectedOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_FullyConnectedOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::SoftmaxOptions *builtin_options_as_SoftmaxOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_SoftmaxOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::ConcatenationOptions *builtin_options_as_ConcatenationOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_ConcatenationOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::AddOptions *builtin_options_as_AddOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_AddOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::L2NormOptions *builtin_options_as_L2NormOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_L2NormOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::LocalResponseNormalizationOptions *builtin_options_as_LocalResponseNormalizationOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_LocalResponseNormalizationOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::LSTMOptions *builtin_options_as_LSTMOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_LSTMOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::ResizeBilinearOptions *builtin_options_as_ResizeBilinearOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_ResizeBilinearOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::CallOptions *builtin_options_as_CallOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_CallOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::ReshapeOptions *builtin_options_as_ReshapeOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_ReshapeOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::SkipGramOptions *builtin_options_as_SkipGramOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_SkipGramOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::SpaceToDepthOptions *builtin_options_as_SpaceToDepthOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_SpaceToDepthOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::EmbeddingLookupSparseOptions *builtin_options_as_EmbeddingLookupSparseOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_EmbeddingLookupSparseOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::MulOptions *builtin_options_as_MulOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_MulOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::PadOptions *builtin_options_as_PadOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_PadOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::GatherOptions *builtin_options_as_GatherOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_GatherOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::BatchToSpaceNDOptions *builtin_options_as_BatchToSpaceNDOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_BatchToSpaceNDOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::SpaceToBatchNDOptions *builtin_options_as_SpaceToBatchNDOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_SpaceToBatchNDOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::TransposeOptions *builtin_options_as_TransposeOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_TransposeOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::ReducerOptions *builtin_options_as_ReducerOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_ReducerOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::SubOptions *builtin_options_as_SubOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_SubOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::DivOptions *builtin_options_as_DivOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_DivOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::SqueezeOptions *builtin_options_as_SqueezeOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_SqueezeOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::SequenceRNNOptions *builtin_options_as_SequenceRNNOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_SequenceRNNOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::StridedSliceOptions *builtin_options_as_StridedSliceOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_StridedSliceOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::ExpOptions *builtin_options_as_ExpOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_ExpOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::TopKV2Options *builtin_options_as_TopKV2Options() const { + return builtin_options_type() == tflite::BuiltinOptions_TopKV2Options ? static_cast(builtin_options()) : nullptr; + } + const tflite::SplitOptions *builtin_options_as_SplitOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_SplitOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::LogSoftmaxOptions *builtin_options_as_LogSoftmaxOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_LogSoftmaxOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::CastOptions *builtin_options_as_CastOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_CastOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::DequantizeOptions *builtin_options_as_DequantizeOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_DequantizeOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::MaximumMinimumOptions *builtin_options_as_MaximumMinimumOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_MaximumMinimumOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::ArgMaxOptions *builtin_options_as_ArgMaxOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_ArgMaxOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::LessOptions *builtin_options_as_LessOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_LessOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::NegOptions *builtin_options_as_NegOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_NegOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::PadV2Options *builtin_options_as_PadV2Options() const { + return builtin_options_type() == tflite::BuiltinOptions_PadV2Options ? static_cast(builtin_options()) : nullptr; + } + const tflite::GreaterOptions *builtin_options_as_GreaterOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_GreaterOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::GreaterEqualOptions *builtin_options_as_GreaterEqualOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_GreaterEqualOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::LessEqualOptions *builtin_options_as_LessEqualOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_LessEqualOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::SelectOptions *builtin_options_as_SelectOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_SelectOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::SliceOptions *builtin_options_as_SliceOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_SliceOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::TransposeConvOptions *builtin_options_as_TransposeConvOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_TransposeConvOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::SparseToDenseOptions *builtin_options_as_SparseToDenseOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_SparseToDenseOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::TileOptions *builtin_options_as_TileOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_TileOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::ExpandDimsOptions *builtin_options_as_ExpandDimsOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_ExpandDimsOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::EqualOptions *builtin_options_as_EqualOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_EqualOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::NotEqualOptions *builtin_options_as_NotEqualOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_NotEqualOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::ShapeOptions *builtin_options_as_ShapeOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_ShapeOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::PowOptions *builtin_options_as_PowOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_PowOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::ArgMinOptions *builtin_options_as_ArgMinOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_ArgMinOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::FakeQuantOptions *builtin_options_as_FakeQuantOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_FakeQuantOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::PackOptions *builtin_options_as_PackOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_PackOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::LogicalOrOptions *builtin_options_as_LogicalOrOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_LogicalOrOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::OneHotOptions *builtin_options_as_OneHotOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_OneHotOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::LogicalAndOptions *builtin_options_as_LogicalAndOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_LogicalAndOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::LogicalNotOptions *builtin_options_as_LogicalNotOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_LogicalNotOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::UnpackOptions *builtin_options_as_UnpackOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_UnpackOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::FloorDivOptions *builtin_options_as_FloorDivOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_FloorDivOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::SquareOptions *builtin_options_as_SquareOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_SquareOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::ZerosLikeOptions *builtin_options_as_ZerosLikeOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_ZerosLikeOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::FillOptions *builtin_options_as_FillOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_FillOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::BidirectionalSequenceLSTMOptions *builtin_options_as_BidirectionalSequenceLSTMOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_BidirectionalSequenceLSTMOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::BidirectionalSequenceRNNOptions *builtin_options_as_BidirectionalSequenceRNNOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_BidirectionalSequenceRNNOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::UnidirectionalSequenceLSTMOptions *builtin_options_as_UnidirectionalSequenceLSTMOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_UnidirectionalSequenceLSTMOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::FloorModOptions *builtin_options_as_FloorModOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_FloorModOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::RangeOptions *builtin_options_as_RangeOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_RangeOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::ResizeNearestNeighborOptions *builtin_options_as_ResizeNearestNeighborOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_ResizeNearestNeighborOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::LeakyReluOptions *builtin_options_as_LeakyReluOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_LeakyReluOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::SquaredDifferenceOptions *builtin_options_as_SquaredDifferenceOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_SquaredDifferenceOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::MirrorPadOptions *builtin_options_as_MirrorPadOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_MirrorPadOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::AbsOptions *builtin_options_as_AbsOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_AbsOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::SplitVOptions *builtin_options_as_SplitVOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_SplitVOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::UniqueOptions *builtin_options_as_UniqueOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_UniqueOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::ReverseV2Options *builtin_options_as_ReverseV2Options() const { + return builtin_options_type() == tflite::BuiltinOptions_ReverseV2Options ? static_cast(builtin_options()) : nullptr; + } + const tflite::AddNOptions *builtin_options_as_AddNOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_AddNOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::GatherNdOptions *builtin_options_as_GatherNdOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_GatherNdOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::CosOptions *builtin_options_as_CosOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_CosOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::WhereOptions *builtin_options_as_WhereOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_WhereOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::RankOptions *builtin_options_as_RankOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_RankOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::ReverseSequenceOptions *builtin_options_as_ReverseSequenceOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_ReverseSequenceOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::MatrixDiagOptions *builtin_options_as_MatrixDiagOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_MatrixDiagOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::QuantizeOptions *builtin_options_as_QuantizeOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_QuantizeOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::MatrixSetDiagOptions *builtin_options_as_MatrixSetDiagOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_MatrixSetDiagOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::HardSwishOptions *builtin_options_as_HardSwishOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_HardSwishOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::IfOptions *builtin_options_as_IfOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_IfOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::WhileOptions *builtin_options_as_WhileOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_WhileOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::DepthToSpaceOptions *builtin_options_as_DepthToSpaceOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_DepthToSpaceOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::NonMaxSuppressionV4Options *builtin_options_as_NonMaxSuppressionV4Options() const { + return builtin_options_type() == tflite::BuiltinOptions_NonMaxSuppressionV4Options ? static_cast(builtin_options()) : nullptr; + } + const tflite::NonMaxSuppressionV5Options *builtin_options_as_NonMaxSuppressionV5Options() const { + return builtin_options_type() == tflite::BuiltinOptions_NonMaxSuppressionV5Options ? static_cast(builtin_options()) : nullptr; + } + const tflite::ScatterNdOptions *builtin_options_as_ScatterNdOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_ScatterNdOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::SelectV2Options *builtin_options_as_SelectV2Options() const { + return builtin_options_type() == tflite::BuiltinOptions_SelectV2Options ? static_cast(builtin_options()) : nullptr; + } + const tflite::DensifyOptions *builtin_options_as_DensifyOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_DensifyOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::SegmentSumOptions *builtin_options_as_SegmentSumOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_SegmentSumOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::BatchMatMulOptions *builtin_options_as_BatchMatMulOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_BatchMatMulOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::CumsumOptions *builtin_options_as_CumsumOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_CumsumOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::CallOnceOptions *builtin_options_as_CallOnceOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_CallOnceOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::BroadcastToOptions *builtin_options_as_BroadcastToOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_BroadcastToOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::Rfft2dOptions *builtin_options_as_Rfft2dOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_Rfft2dOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::Conv3DOptions *builtin_options_as_Conv3DOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_Conv3DOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::HashtableOptions *builtin_options_as_HashtableOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_HashtableOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::HashtableFindOptions *builtin_options_as_HashtableFindOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_HashtableFindOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::HashtableImportOptions *builtin_options_as_HashtableImportOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_HashtableImportOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::HashtableSizeOptions *builtin_options_as_HashtableSizeOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_HashtableSizeOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::VarHandleOptions *builtin_options_as_VarHandleOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_VarHandleOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::ReadVariableOptions *builtin_options_as_ReadVariableOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_ReadVariableOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::AssignVariableOptions *builtin_options_as_AssignVariableOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_AssignVariableOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::RandomOptions *builtin_options_as_RandomOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_RandomOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::BucketizeOptions *builtin_options_as_BucketizeOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_BucketizeOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::GeluOptions *builtin_options_as_GeluOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_GeluOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::DynamicUpdateSliceOptions *builtin_options_as_DynamicUpdateSliceOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_DynamicUpdateSliceOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::UnsortedSegmentProdOptions *builtin_options_as_UnsortedSegmentProdOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_UnsortedSegmentProdOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::UnsortedSegmentMaxOptions *builtin_options_as_UnsortedSegmentMaxOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_UnsortedSegmentMaxOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::UnsortedSegmentMinOptions *builtin_options_as_UnsortedSegmentMinOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_UnsortedSegmentMinOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::UnsortedSegmentSumOptions *builtin_options_as_UnsortedSegmentSumOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_UnsortedSegmentSumOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::ATan2Options *builtin_options_as_ATan2Options() const { + return builtin_options_type() == tflite::BuiltinOptions_ATan2Options ? static_cast(builtin_options()) : nullptr; + } + const tflite::SignOptions *builtin_options_as_SignOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_SignOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::BitcastOptions *builtin_options_as_BitcastOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_BitcastOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::BitwiseXorOptions *builtin_options_as_BitwiseXorOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_BitwiseXorOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::RightShiftOptions *builtin_options_as_RightShiftOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_RightShiftOptions ? static_cast(builtin_options()) : nullptr; + } + const flatbuffers::Vector *custom_options() const { + return GetPointer *>(VT_CUSTOM_OPTIONS); + } + tflite::CustomOptionsFormat custom_options_format() const { + return static_cast(GetField(VT_CUSTOM_OPTIONS_FORMAT, 0)); + } + const flatbuffers::Vector *mutating_variable_inputs() const { + return GetPointer *>(VT_MUTATING_VARIABLE_INPUTS); + } + const flatbuffers::Vector *intermediates() const { + return GetPointer *>(VT_INTERMEDIATES); + } + uint64_t custom_options_offset() const { + return GetField(VT_CUSTOM_OPTIONS_OFFSET, 0); + } + uint64_t custom_options_size() const { + return GetField(VT_CUSTOM_OPTIONS_SIZE, 0); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_OPCODE_INDEX, 4) && + VerifyOffset(verifier, VT_INPUTS) && + verifier.VerifyVector(inputs()) && + VerifyOffset(verifier, VT_OUTPUTS) && + verifier.VerifyVector(outputs()) && + VerifyField(verifier, VT_BUILTIN_OPTIONS_TYPE, 1) && + VerifyOffset(verifier, VT_BUILTIN_OPTIONS) && + VerifyBuiltinOptions(verifier, builtin_options(), builtin_options_type()) && + VerifyOffset(verifier, VT_CUSTOM_OPTIONS) && + verifier.VerifyVector(custom_options()) && + VerifyField(verifier, VT_CUSTOM_OPTIONS_FORMAT, 1) && + VerifyOffset(verifier, VT_MUTATING_VARIABLE_INPUTS) && + verifier.VerifyVector(mutating_variable_inputs()) && + VerifyOffset(verifier, VT_INTERMEDIATES) && + verifier.VerifyVector(intermediates()) && + VerifyField(verifier, VT_CUSTOM_OPTIONS_OFFSET, 8) && + VerifyField(verifier, VT_CUSTOM_OPTIONS_SIZE, 8) && + verifier.EndTable(); + } + OperatorT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(OperatorT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const OperatorT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +template<> inline const tflite::Conv2DOptions *Operator::builtin_options_as() const { + return builtin_options_as_Conv2DOptions(); +} + +template<> inline const tflite::DepthwiseConv2DOptions *Operator::builtin_options_as() const { + return builtin_options_as_DepthwiseConv2DOptions(); +} + +template<> inline const tflite::ConcatEmbeddingsOptions *Operator::builtin_options_as() const { + return builtin_options_as_ConcatEmbeddingsOptions(); +} + +template<> inline const tflite::LSHProjectionOptions *Operator::builtin_options_as() const { + return builtin_options_as_LSHProjectionOptions(); +} + +template<> inline const tflite::Pool2DOptions *Operator::builtin_options_as() const { + return builtin_options_as_Pool2DOptions(); +} + +template<> inline const tflite::SVDFOptions *Operator::builtin_options_as() const { + return builtin_options_as_SVDFOptions(); +} + +template<> inline const tflite::RNNOptions *Operator::builtin_options_as() const { + return builtin_options_as_RNNOptions(); +} + +template<> inline const tflite::FullyConnectedOptions *Operator::builtin_options_as() const { + return builtin_options_as_FullyConnectedOptions(); +} + +template<> inline const tflite::SoftmaxOptions *Operator::builtin_options_as() const { + return builtin_options_as_SoftmaxOptions(); +} + +template<> inline const tflite::ConcatenationOptions *Operator::builtin_options_as() const { + return builtin_options_as_ConcatenationOptions(); +} + +template<> inline const tflite::AddOptions *Operator::builtin_options_as() const { + return builtin_options_as_AddOptions(); +} + +template<> inline const tflite::L2NormOptions *Operator::builtin_options_as() const { + return builtin_options_as_L2NormOptions(); +} + +template<> inline const tflite::LocalResponseNormalizationOptions *Operator::builtin_options_as() const { + return builtin_options_as_LocalResponseNormalizationOptions(); +} + +template<> inline const tflite::LSTMOptions *Operator::builtin_options_as() const { + return builtin_options_as_LSTMOptions(); +} + +template<> inline const tflite::ResizeBilinearOptions *Operator::builtin_options_as() const { + return builtin_options_as_ResizeBilinearOptions(); +} + +template<> inline const tflite::CallOptions *Operator::builtin_options_as() const { + return builtin_options_as_CallOptions(); +} + +template<> inline const tflite::ReshapeOptions *Operator::builtin_options_as() const { + return builtin_options_as_ReshapeOptions(); +} + +template<> inline const tflite::SkipGramOptions *Operator::builtin_options_as() const { + return builtin_options_as_SkipGramOptions(); +} + +template<> inline const tflite::SpaceToDepthOptions *Operator::builtin_options_as() const { + return builtin_options_as_SpaceToDepthOptions(); +} + +template<> inline const tflite::EmbeddingLookupSparseOptions *Operator::builtin_options_as() const { + return builtin_options_as_EmbeddingLookupSparseOptions(); +} + +template<> inline const tflite::MulOptions *Operator::builtin_options_as() const { + return builtin_options_as_MulOptions(); +} + +template<> inline const tflite::PadOptions *Operator::builtin_options_as() const { + return builtin_options_as_PadOptions(); +} + +template<> inline const tflite::GatherOptions *Operator::builtin_options_as() const { + return builtin_options_as_GatherOptions(); +} + +template<> inline const tflite::BatchToSpaceNDOptions *Operator::builtin_options_as() const { + return builtin_options_as_BatchToSpaceNDOptions(); +} + +template<> inline const tflite::SpaceToBatchNDOptions *Operator::builtin_options_as() const { + return builtin_options_as_SpaceToBatchNDOptions(); +} + +template<> inline const tflite::TransposeOptions *Operator::builtin_options_as() const { + return builtin_options_as_TransposeOptions(); +} + +template<> inline const tflite::ReducerOptions *Operator::builtin_options_as() const { + return builtin_options_as_ReducerOptions(); +} + +template<> inline const tflite::SubOptions *Operator::builtin_options_as() const { + return builtin_options_as_SubOptions(); +} + +template<> inline const tflite::DivOptions *Operator::builtin_options_as() const { + return builtin_options_as_DivOptions(); +} + +template<> inline const tflite::SqueezeOptions *Operator::builtin_options_as() const { + return builtin_options_as_SqueezeOptions(); +} + +template<> inline const tflite::SequenceRNNOptions *Operator::builtin_options_as() const { + return builtin_options_as_SequenceRNNOptions(); +} + +template<> inline const tflite::StridedSliceOptions *Operator::builtin_options_as() const { + return builtin_options_as_StridedSliceOptions(); +} + +template<> inline const tflite::ExpOptions *Operator::builtin_options_as() const { + return builtin_options_as_ExpOptions(); +} + +template<> inline const tflite::TopKV2Options *Operator::builtin_options_as() const { + return builtin_options_as_TopKV2Options(); +} + +template<> inline const tflite::SplitOptions *Operator::builtin_options_as() const { + return builtin_options_as_SplitOptions(); +} + +template<> inline const tflite::LogSoftmaxOptions *Operator::builtin_options_as() const { + return builtin_options_as_LogSoftmaxOptions(); +} + +template<> inline const tflite::CastOptions *Operator::builtin_options_as() const { + return builtin_options_as_CastOptions(); +} + +template<> inline const tflite::DequantizeOptions *Operator::builtin_options_as() const { + return builtin_options_as_DequantizeOptions(); +} + +template<> inline const tflite::MaximumMinimumOptions *Operator::builtin_options_as() const { + return builtin_options_as_MaximumMinimumOptions(); +} + +template<> inline const tflite::ArgMaxOptions *Operator::builtin_options_as() const { + return builtin_options_as_ArgMaxOptions(); +} + +template<> inline const tflite::LessOptions *Operator::builtin_options_as() const { + return builtin_options_as_LessOptions(); +} + +template<> inline const tflite::NegOptions *Operator::builtin_options_as() const { + return builtin_options_as_NegOptions(); +} + +template<> inline const tflite::PadV2Options *Operator::builtin_options_as() const { + return builtin_options_as_PadV2Options(); +} + +template<> inline const tflite::GreaterOptions *Operator::builtin_options_as() const { + return builtin_options_as_GreaterOptions(); +} + +template<> inline const tflite::GreaterEqualOptions *Operator::builtin_options_as() const { + return builtin_options_as_GreaterEqualOptions(); +} + +template<> inline const tflite::LessEqualOptions *Operator::builtin_options_as() const { + return builtin_options_as_LessEqualOptions(); +} + +template<> inline const tflite::SelectOptions *Operator::builtin_options_as() const { + return builtin_options_as_SelectOptions(); +} + +template<> inline const tflite::SliceOptions *Operator::builtin_options_as() const { + return builtin_options_as_SliceOptions(); +} + +template<> inline const tflite::TransposeConvOptions *Operator::builtin_options_as() const { + return builtin_options_as_TransposeConvOptions(); +} + +template<> inline const tflite::SparseToDenseOptions *Operator::builtin_options_as() const { + return builtin_options_as_SparseToDenseOptions(); +} + +template<> inline const tflite::TileOptions *Operator::builtin_options_as() const { + return builtin_options_as_TileOptions(); +} + +template<> inline const tflite::ExpandDimsOptions *Operator::builtin_options_as() const { + return builtin_options_as_ExpandDimsOptions(); +} + +template<> inline const tflite::EqualOptions *Operator::builtin_options_as() const { + return builtin_options_as_EqualOptions(); +} + +template<> inline const tflite::NotEqualOptions *Operator::builtin_options_as() const { + return builtin_options_as_NotEqualOptions(); +} + +template<> inline const tflite::ShapeOptions *Operator::builtin_options_as() const { + return builtin_options_as_ShapeOptions(); +} + +template<> inline const tflite::PowOptions *Operator::builtin_options_as() const { + return builtin_options_as_PowOptions(); +} + +template<> inline const tflite::ArgMinOptions *Operator::builtin_options_as() const { + return builtin_options_as_ArgMinOptions(); +} + +template<> inline const tflite::FakeQuantOptions *Operator::builtin_options_as() const { + return builtin_options_as_FakeQuantOptions(); +} + +template<> inline const tflite::PackOptions *Operator::builtin_options_as() const { + return builtin_options_as_PackOptions(); +} + +template<> inline const tflite::LogicalOrOptions *Operator::builtin_options_as() const { + return builtin_options_as_LogicalOrOptions(); +} + +template<> inline const tflite::OneHotOptions *Operator::builtin_options_as() const { + return builtin_options_as_OneHotOptions(); +} + +template<> inline const tflite::LogicalAndOptions *Operator::builtin_options_as() const { + return builtin_options_as_LogicalAndOptions(); +} + +template<> inline const tflite::LogicalNotOptions *Operator::builtin_options_as() const { + return builtin_options_as_LogicalNotOptions(); +} + +template<> inline const tflite::UnpackOptions *Operator::builtin_options_as() const { + return builtin_options_as_UnpackOptions(); +} + +template<> inline const tflite::FloorDivOptions *Operator::builtin_options_as() const { + return builtin_options_as_FloorDivOptions(); +} + +template<> inline const tflite::SquareOptions *Operator::builtin_options_as() const { + return builtin_options_as_SquareOptions(); +} + +template<> inline const tflite::ZerosLikeOptions *Operator::builtin_options_as() const { + return builtin_options_as_ZerosLikeOptions(); +} + +template<> inline const tflite::FillOptions *Operator::builtin_options_as() const { + return builtin_options_as_FillOptions(); +} + +template<> inline const tflite::BidirectionalSequenceLSTMOptions *Operator::builtin_options_as() const { + return builtin_options_as_BidirectionalSequenceLSTMOptions(); +} + +template<> inline const tflite::BidirectionalSequenceRNNOptions *Operator::builtin_options_as() const { + return builtin_options_as_BidirectionalSequenceRNNOptions(); +} + +template<> inline const tflite::UnidirectionalSequenceLSTMOptions *Operator::builtin_options_as() const { + return builtin_options_as_UnidirectionalSequenceLSTMOptions(); +} + +template<> inline const tflite::FloorModOptions *Operator::builtin_options_as() const { + return builtin_options_as_FloorModOptions(); +} + +template<> inline const tflite::RangeOptions *Operator::builtin_options_as() const { + return builtin_options_as_RangeOptions(); +} + +template<> inline const tflite::ResizeNearestNeighborOptions *Operator::builtin_options_as() const { + return builtin_options_as_ResizeNearestNeighborOptions(); +} + +template<> inline const tflite::LeakyReluOptions *Operator::builtin_options_as() const { + return builtin_options_as_LeakyReluOptions(); +} + +template<> inline const tflite::SquaredDifferenceOptions *Operator::builtin_options_as() const { + return builtin_options_as_SquaredDifferenceOptions(); +} + +template<> inline const tflite::MirrorPadOptions *Operator::builtin_options_as() const { + return builtin_options_as_MirrorPadOptions(); +} + +template<> inline const tflite::AbsOptions *Operator::builtin_options_as() const { + return builtin_options_as_AbsOptions(); +} + +template<> inline const tflite::SplitVOptions *Operator::builtin_options_as() const { + return builtin_options_as_SplitVOptions(); +} + +template<> inline const tflite::UniqueOptions *Operator::builtin_options_as() const { + return builtin_options_as_UniqueOptions(); +} + +template<> inline const tflite::ReverseV2Options *Operator::builtin_options_as() const { + return builtin_options_as_ReverseV2Options(); +} + +template<> inline const tflite::AddNOptions *Operator::builtin_options_as() const { + return builtin_options_as_AddNOptions(); +} + +template<> inline const tflite::GatherNdOptions *Operator::builtin_options_as() const { + return builtin_options_as_GatherNdOptions(); +} + +template<> inline const tflite::CosOptions *Operator::builtin_options_as() const { + return builtin_options_as_CosOptions(); +} + +template<> inline const tflite::WhereOptions *Operator::builtin_options_as() const { + return builtin_options_as_WhereOptions(); +} + +template<> inline const tflite::RankOptions *Operator::builtin_options_as() const { + return builtin_options_as_RankOptions(); +} + +template<> inline const tflite::ReverseSequenceOptions *Operator::builtin_options_as() const { + return builtin_options_as_ReverseSequenceOptions(); +} + +template<> inline const tflite::MatrixDiagOptions *Operator::builtin_options_as() const { + return builtin_options_as_MatrixDiagOptions(); +} + +template<> inline const tflite::QuantizeOptions *Operator::builtin_options_as() const { + return builtin_options_as_QuantizeOptions(); +} + +template<> inline const tflite::MatrixSetDiagOptions *Operator::builtin_options_as() const { + return builtin_options_as_MatrixSetDiagOptions(); +} + +template<> inline const tflite::HardSwishOptions *Operator::builtin_options_as() const { + return builtin_options_as_HardSwishOptions(); +} + +template<> inline const tflite::IfOptions *Operator::builtin_options_as() const { + return builtin_options_as_IfOptions(); +} + +template<> inline const tflite::WhileOptions *Operator::builtin_options_as() const { + return builtin_options_as_WhileOptions(); +} + +template<> inline const tflite::DepthToSpaceOptions *Operator::builtin_options_as() const { + return builtin_options_as_DepthToSpaceOptions(); +} + +template<> inline const tflite::NonMaxSuppressionV4Options *Operator::builtin_options_as() const { + return builtin_options_as_NonMaxSuppressionV4Options(); +} + +template<> inline const tflite::NonMaxSuppressionV5Options *Operator::builtin_options_as() const { + return builtin_options_as_NonMaxSuppressionV5Options(); +} + +template<> inline const tflite::ScatterNdOptions *Operator::builtin_options_as() const { + return builtin_options_as_ScatterNdOptions(); +} + +template<> inline const tflite::SelectV2Options *Operator::builtin_options_as() const { + return builtin_options_as_SelectV2Options(); +} + +template<> inline const tflite::DensifyOptions *Operator::builtin_options_as() const { + return builtin_options_as_DensifyOptions(); +} + +template<> inline const tflite::SegmentSumOptions *Operator::builtin_options_as() const { + return builtin_options_as_SegmentSumOptions(); +} + +template<> inline const tflite::BatchMatMulOptions *Operator::builtin_options_as() const { + return builtin_options_as_BatchMatMulOptions(); +} + +template<> inline const tflite::CumsumOptions *Operator::builtin_options_as() const { + return builtin_options_as_CumsumOptions(); +} + +template<> inline const tflite::CallOnceOptions *Operator::builtin_options_as() const { + return builtin_options_as_CallOnceOptions(); +} + +template<> inline const tflite::BroadcastToOptions *Operator::builtin_options_as() const { + return builtin_options_as_BroadcastToOptions(); +} + +template<> inline const tflite::Rfft2dOptions *Operator::builtin_options_as() const { + return builtin_options_as_Rfft2dOptions(); +} + +template<> inline const tflite::Conv3DOptions *Operator::builtin_options_as() const { + return builtin_options_as_Conv3DOptions(); +} + +template<> inline const tflite::HashtableOptions *Operator::builtin_options_as() const { + return builtin_options_as_HashtableOptions(); +} + +template<> inline const tflite::HashtableFindOptions *Operator::builtin_options_as() const { + return builtin_options_as_HashtableFindOptions(); +} + +template<> inline const tflite::HashtableImportOptions *Operator::builtin_options_as() const { + return builtin_options_as_HashtableImportOptions(); +} + +template<> inline const tflite::HashtableSizeOptions *Operator::builtin_options_as() const { + return builtin_options_as_HashtableSizeOptions(); +} + +template<> inline const tflite::VarHandleOptions *Operator::builtin_options_as() const { + return builtin_options_as_VarHandleOptions(); +} + +template<> inline const tflite::ReadVariableOptions *Operator::builtin_options_as() const { + return builtin_options_as_ReadVariableOptions(); +} + +template<> inline const tflite::AssignVariableOptions *Operator::builtin_options_as() const { + return builtin_options_as_AssignVariableOptions(); +} + +template<> inline const tflite::RandomOptions *Operator::builtin_options_as() const { + return builtin_options_as_RandomOptions(); +} + +template<> inline const tflite::BucketizeOptions *Operator::builtin_options_as() const { + return builtin_options_as_BucketizeOptions(); +} + +template<> inline const tflite::GeluOptions *Operator::builtin_options_as() const { + return builtin_options_as_GeluOptions(); +} + +template<> inline const tflite::DynamicUpdateSliceOptions *Operator::builtin_options_as() const { + return builtin_options_as_DynamicUpdateSliceOptions(); +} + +template<> inline const tflite::UnsortedSegmentProdOptions *Operator::builtin_options_as() const { + return builtin_options_as_UnsortedSegmentProdOptions(); +} + +template<> inline const tflite::UnsortedSegmentMaxOptions *Operator::builtin_options_as() const { + return builtin_options_as_UnsortedSegmentMaxOptions(); +} + +template<> inline const tflite::UnsortedSegmentMinOptions *Operator::builtin_options_as() const { + return builtin_options_as_UnsortedSegmentMinOptions(); +} + +template<> inline const tflite::UnsortedSegmentSumOptions *Operator::builtin_options_as() const { + return builtin_options_as_UnsortedSegmentSumOptions(); +} + +template<> inline const tflite::ATan2Options *Operator::builtin_options_as() const { + return builtin_options_as_ATan2Options(); +} + +template<> inline const tflite::SignOptions *Operator::builtin_options_as() const { + return builtin_options_as_SignOptions(); +} + +template<> inline const tflite::BitcastOptions *Operator::builtin_options_as() const { + return builtin_options_as_BitcastOptions(); +} + +template<> inline const tflite::BitwiseXorOptions *Operator::builtin_options_as() const { + return builtin_options_as_BitwiseXorOptions(); +} + +template<> inline const tflite::RightShiftOptions *Operator::builtin_options_as() const { + return builtin_options_as_RightShiftOptions(); +} + +struct OperatorBuilder { + typedef Operator Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_opcode_index(uint32_t opcode_index) { + fbb_.AddElement(Operator::VT_OPCODE_INDEX, opcode_index, 0); + } + void add_inputs(flatbuffers::Offset> inputs) { + fbb_.AddOffset(Operator::VT_INPUTS, inputs); + } + void add_outputs(flatbuffers::Offset> outputs) { + fbb_.AddOffset(Operator::VT_OUTPUTS, outputs); + } + void add_builtin_options_type(tflite::BuiltinOptions builtin_options_type) { + fbb_.AddElement(Operator::VT_BUILTIN_OPTIONS_TYPE, static_cast(builtin_options_type), 0); + } + void add_builtin_options(flatbuffers::Offset builtin_options) { + fbb_.AddOffset(Operator::VT_BUILTIN_OPTIONS, builtin_options); + } + void add_custom_options(flatbuffers::Offset> custom_options) { + fbb_.AddOffset(Operator::VT_CUSTOM_OPTIONS, custom_options); + } + void add_custom_options_format(tflite::CustomOptionsFormat custom_options_format) { + fbb_.AddElement(Operator::VT_CUSTOM_OPTIONS_FORMAT, static_cast(custom_options_format), 0); + } + void add_mutating_variable_inputs(flatbuffers::Offset> mutating_variable_inputs) { + fbb_.AddOffset(Operator::VT_MUTATING_VARIABLE_INPUTS, mutating_variable_inputs); + } + void add_intermediates(flatbuffers::Offset> intermediates) { + fbb_.AddOffset(Operator::VT_INTERMEDIATES, intermediates); + } + void add_custom_options_offset(uint64_t custom_options_offset) { + fbb_.AddElement(Operator::VT_CUSTOM_OPTIONS_OFFSET, custom_options_offset, 0); + } + void add_custom_options_size(uint64_t custom_options_size) { + fbb_.AddElement(Operator::VT_CUSTOM_OPTIONS_SIZE, custom_options_size, 0); + } + explicit OperatorBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateOperator( + flatbuffers::FlatBufferBuilder &_fbb, + uint32_t opcode_index = 0, + flatbuffers::Offset> inputs = 0, + flatbuffers::Offset> outputs = 0, + tflite::BuiltinOptions builtin_options_type = tflite::BuiltinOptions_NONE, + flatbuffers::Offset builtin_options = 0, + flatbuffers::Offset> custom_options = 0, + tflite::CustomOptionsFormat custom_options_format = tflite::CustomOptionsFormat_FLEXBUFFERS, + flatbuffers::Offset> mutating_variable_inputs = 0, + flatbuffers::Offset> intermediates = 0, + uint64_t custom_options_offset = 0, + uint64_t custom_options_size = 0) { + OperatorBuilder builder_(_fbb); + builder_.add_custom_options_size(custom_options_size); + builder_.add_custom_options_offset(custom_options_offset); + builder_.add_intermediates(intermediates); + builder_.add_mutating_variable_inputs(mutating_variable_inputs); + builder_.add_custom_options(custom_options); + builder_.add_builtin_options(builtin_options); + builder_.add_outputs(outputs); + builder_.add_inputs(inputs); + builder_.add_opcode_index(opcode_index); + builder_.add_custom_options_format(custom_options_format); + builder_.add_builtin_options_type(builtin_options_type); + return builder_.Finish(); +} + +inline flatbuffers::Offset CreateOperatorDirect( + flatbuffers::FlatBufferBuilder &_fbb, + uint32_t opcode_index = 0, + const std::vector *inputs = nullptr, + const std::vector *outputs = nullptr, + tflite::BuiltinOptions builtin_options_type = tflite::BuiltinOptions_NONE, + flatbuffers::Offset builtin_options = 0, + const std::vector *custom_options = nullptr, + tflite::CustomOptionsFormat custom_options_format = tflite::CustomOptionsFormat_FLEXBUFFERS, + const std::vector *mutating_variable_inputs = nullptr, + const std::vector *intermediates = nullptr, + uint64_t custom_options_offset = 0, + uint64_t custom_options_size = 0) { + auto inputs__ = inputs ? _fbb.CreateVector(*inputs) : 0; + auto outputs__ = outputs ? _fbb.CreateVector(*outputs) : 0; + auto custom_options__ = custom_options ? _fbb.CreateVector(*custom_options) : 0; + auto mutating_variable_inputs__ = mutating_variable_inputs ? _fbb.CreateVector(*mutating_variable_inputs) : 0; + auto intermediates__ = intermediates ? _fbb.CreateVector(*intermediates) : 0; + return tflite::CreateOperator( + _fbb, + opcode_index, + inputs__, + outputs__, + builtin_options_type, + builtin_options, + custom_options__, + custom_options_format, + mutating_variable_inputs__, + intermediates__, + custom_options_offset, + custom_options_size); +} + +flatbuffers::Offset CreateOperator(flatbuffers::FlatBufferBuilder &_fbb, const OperatorT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct SubGraphT : public flatbuffers::NativeTable { + typedef SubGraph TableType; + std::vector> tensors{}; + std::vector inputs{}; + std::vector outputs{}; + std::vector> operators{}; + std::string name{}; + SubGraphT() = default; + SubGraphT(const SubGraphT &o); + SubGraphT(SubGraphT&&) FLATBUFFERS_NOEXCEPT = default; + SubGraphT &operator=(SubGraphT o) FLATBUFFERS_NOEXCEPT; +}; + +struct SubGraph FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef SubGraphT NativeTableType; + typedef SubGraphBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_TENSORS = 4, + VT_INPUTS = 6, + VT_OUTPUTS = 8, + VT_OPERATORS = 10, + VT_NAME = 12 + }; + const flatbuffers::Vector> *tensors() const { + return GetPointer> *>(VT_TENSORS); + } + const flatbuffers::Vector *inputs() const { + return GetPointer *>(VT_INPUTS); + } + const flatbuffers::Vector *outputs() const { + return GetPointer *>(VT_OUTPUTS); + } + const flatbuffers::Vector> *operators() const { + return GetPointer> *>(VT_OPERATORS); + } + const flatbuffers::String *name() const { + return GetPointer(VT_NAME); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyOffset(verifier, VT_TENSORS) && + verifier.VerifyVector(tensors()) && + verifier.VerifyVectorOfTables(tensors()) && + VerifyOffset(verifier, VT_INPUTS) && + verifier.VerifyVector(inputs()) && + VerifyOffset(verifier, VT_OUTPUTS) && + verifier.VerifyVector(outputs()) && + VerifyOffset(verifier, VT_OPERATORS) && + verifier.VerifyVector(operators()) && + verifier.VerifyVectorOfTables(operators()) && + VerifyOffset(verifier, VT_NAME) && + verifier.VerifyString(name()) && + verifier.EndTable(); + } + SubGraphT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(SubGraphT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const SubGraphT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct SubGraphBuilder { + typedef SubGraph Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_tensors(flatbuffers::Offset>> tensors) { + fbb_.AddOffset(SubGraph::VT_TENSORS, tensors); + } + void add_inputs(flatbuffers::Offset> inputs) { + fbb_.AddOffset(SubGraph::VT_INPUTS, inputs); + } + void add_outputs(flatbuffers::Offset> outputs) { + fbb_.AddOffset(SubGraph::VT_OUTPUTS, outputs); + } + void add_operators(flatbuffers::Offset>> operators) { + fbb_.AddOffset(SubGraph::VT_OPERATORS, operators); + } + void add_name(flatbuffers::Offset name) { + fbb_.AddOffset(SubGraph::VT_NAME, name); + } + explicit SubGraphBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateSubGraph( + flatbuffers::FlatBufferBuilder &_fbb, + flatbuffers::Offset>> tensors = 0, + flatbuffers::Offset> inputs = 0, + flatbuffers::Offset> outputs = 0, + flatbuffers::Offset>> operators = 0, + flatbuffers::Offset name = 0) { + SubGraphBuilder builder_(_fbb); + builder_.add_name(name); + builder_.add_operators(operators); + builder_.add_outputs(outputs); + builder_.add_inputs(inputs); + builder_.add_tensors(tensors); + return builder_.Finish(); +} + +inline flatbuffers::Offset CreateSubGraphDirect( + flatbuffers::FlatBufferBuilder &_fbb, + const std::vector> *tensors = nullptr, + const std::vector *inputs = nullptr, + const std::vector *outputs = nullptr, + const std::vector> *operators = nullptr, + const char *name = nullptr) { + auto tensors__ = tensors ? _fbb.CreateVector>(*tensors) : 0; + auto inputs__ = inputs ? _fbb.CreateVector(*inputs) : 0; + auto outputs__ = outputs ? _fbb.CreateVector(*outputs) : 0; + auto operators__ = operators ? _fbb.CreateVector>(*operators) : 0; + auto name__ = name ? _fbb.CreateString(name) : 0; + return tflite::CreateSubGraph( + _fbb, + tensors__, + inputs__, + outputs__, + operators__, + name__); +} + +flatbuffers::Offset CreateSubGraph(flatbuffers::FlatBufferBuilder &_fbb, const SubGraphT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct BufferT : public flatbuffers::NativeTable { + typedef Buffer TableType; + std::vector data{}; + uint64_t offset = 0; + uint64_t size = 0; +}; + +struct Buffer FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef BufferT NativeTableType; + typedef BufferBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_DATA = 4, + VT_OFFSET = 6, + VT_SIZE = 8 + }; + const flatbuffers::Vector *data() const { + return GetPointer *>(VT_DATA); + } + uint64_t offset() const { + return GetField(VT_OFFSET, 0); + } + uint64_t size() const { + return GetField(VT_SIZE, 0); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyOffset(verifier, VT_DATA) && + verifier.VerifyVector(data()) && + VerifyField(verifier, VT_OFFSET, 8) && + VerifyField(verifier, VT_SIZE, 8) && + verifier.EndTable(); + } + BufferT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(BufferT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const BufferT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct BufferBuilder { + typedef Buffer Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_data(flatbuffers::Offset> data) { + fbb_.AddOffset(Buffer::VT_DATA, data); + } + void add_offset(uint64_t offset) { + fbb_.AddElement(Buffer::VT_OFFSET, offset, 0); + } + void add_size(uint64_t size) { + fbb_.AddElement(Buffer::VT_SIZE, size, 0); + } + explicit BufferBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateBuffer( + flatbuffers::FlatBufferBuilder &_fbb, + flatbuffers::Offset> data = 0, + uint64_t offset = 0, + uint64_t size = 0) { + BufferBuilder builder_(_fbb); + builder_.add_size(size); + builder_.add_offset(offset); + builder_.add_data(data); + return builder_.Finish(); +} + +inline flatbuffers::Offset CreateBufferDirect( + flatbuffers::FlatBufferBuilder &_fbb, + const std::vector *data = nullptr, + uint64_t offset = 0, + uint64_t size = 0) { + if (data) { _fbb.ForceVectorAlignment(data->size(), sizeof(uint8_t), 16); } + auto data__ = data ? _fbb.CreateVector(*data) : 0; + return tflite::CreateBuffer( + _fbb, + data__, + offset, + size); +} + +flatbuffers::Offset CreateBuffer(flatbuffers::FlatBufferBuilder &_fbb, const BufferT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct MetadataT : public flatbuffers::NativeTable { + typedef Metadata TableType; + std::string name{}; + uint32_t buffer = 0; +}; + +struct Metadata FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef MetadataT NativeTableType; + typedef MetadataBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_NAME = 4, + VT_BUFFER = 6 + }; + const flatbuffers::String *name() const { + return GetPointer(VT_NAME); + } + uint32_t buffer() const { + return GetField(VT_BUFFER, 0); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyOffset(verifier, VT_NAME) && + verifier.VerifyString(name()) && + VerifyField(verifier, VT_BUFFER, 4) && + verifier.EndTable(); + } + MetadataT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(MetadataT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const MetadataT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct MetadataBuilder { + typedef Metadata Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_name(flatbuffers::Offset name) { + fbb_.AddOffset(Metadata::VT_NAME, name); + } + void add_buffer(uint32_t buffer) { + fbb_.AddElement(Metadata::VT_BUFFER, buffer, 0); + } + explicit MetadataBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateMetadata( + flatbuffers::FlatBufferBuilder &_fbb, + flatbuffers::Offset name = 0, + uint32_t buffer = 0) { + MetadataBuilder builder_(_fbb); + builder_.add_buffer(buffer); + builder_.add_name(name); + return builder_.Finish(); +} + +inline flatbuffers::Offset CreateMetadataDirect( + flatbuffers::FlatBufferBuilder &_fbb, + const char *name = nullptr, + uint32_t buffer = 0) { + auto name__ = name ? _fbb.CreateString(name) : 0; + return tflite::CreateMetadata( + _fbb, + name__, + buffer); +} + +flatbuffers::Offset CreateMetadata(flatbuffers::FlatBufferBuilder &_fbb, const MetadataT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct TensorMapT : public flatbuffers::NativeTable { + typedef TensorMap TableType; + std::string name{}; + uint32_t tensor_index = 0; +}; + +struct TensorMap FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef TensorMapT NativeTableType; + typedef TensorMapBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_NAME = 4, + VT_TENSOR_INDEX = 6 + }; + const flatbuffers::String *name() const { + return GetPointer(VT_NAME); + } + uint32_t tensor_index() const { + return GetField(VT_TENSOR_INDEX, 0); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyOffset(verifier, VT_NAME) && + verifier.VerifyString(name()) && + VerifyField(verifier, VT_TENSOR_INDEX, 4) && + verifier.EndTable(); + } + TensorMapT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(TensorMapT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const TensorMapT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct TensorMapBuilder { + typedef TensorMap Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_name(flatbuffers::Offset name) { + fbb_.AddOffset(TensorMap::VT_NAME, name); + } + void add_tensor_index(uint32_t tensor_index) { + fbb_.AddElement(TensorMap::VT_TENSOR_INDEX, tensor_index, 0); + } + explicit TensorMapBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateTensorMap( + flatbuffers::FlatBufferBuilder &_fbb, + flatbuffers::Offset name = 0, + uint32_t tensor_index = 0) { + TensorMapBuilder builder_(_fbb); + builder_.add_tensor_index(tensor_index); + builder_.add_name(name); + return builder_.Finish(); +} + +inline flatbuffers::Offset CreateTensorMapDirect( + flatbuffers::FlatBufferBuilder &_fbb, + const char *name = nullptr, + uint32_t tensor_index = 0) { + auto name__ = name ? _fbb.CreateString(name) : 0; + return tflite::CreateTensorMap( + _fbb, + name__, + tensor_index); +} + +flatbuffers::Offset CreateTensorMap(flatbuffers::FlatBufferBuilder &_fbb, const TensorMapT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct SignatureDefT : public flatbuffers::NativeTable { + typedef SignatureDef TableType; + std::vector> inputs{}; + std::vector> outputs{}; + std::string signature_key{}; + uint32_t subgraph_index = 0; + SignatureDefT() = default; + SignatureDefT(const SignatureDefT &o); + SignatureDefT(SignatureDefT&&) FLATBUFFERS_NOEXCEPT = default; + SignatureDefT &operator=(SignatureDefT o) FLATBUFFERS_NOEXCEPT; +}; + +struct SignatureDef FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef SignatureDefT NativeTableType; + typedef SignatureDefBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_INPUTS = 4, + VT_OUTPUTS = 6, + VT_SIGNATURE_KEY = 8, + VT_SUBGRAPH_INDEX = 12 + }; + const flatbuffers::Vector> *inputs() const { + return GetPointer> *>(VT_INPUTS); + } + const flatbuffers::Vector> *outputs() const { + return GetPointer> *>(VT_OUTPUTS); + } + const flatbuffers::String *signature_key() const { + return GetPointer(VT_SIGNATURE_KEY); + } + uint32_t subgraph_index() const { + return GetField(VT_SUBGRAPH_INDEX, 0); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyOffset(verifier, VT_INPUTS) && + verifier.VerifyVector(inputs()) && + verifier.VerifyVectorOfTables(inputs()) && + VerifyOffset(verifier, VT_OUTPUTS) && + verifier.VerifyVector(outputs()) && + verifier.VerifyVectorOfTables(outputs()) && + VerifyOffset(verifier, VT_SIGNATURE_KEY) && + verifier.VerifyString(signature_key()) && + VerifyField(verifier, VT_SUBGRAPH_INDEX, 4) && + verifier.EndTable(); + } + SignatureDefT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(SignatureDefT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const SignatureDefT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct SignatureDefBuilder { + typedef SignatureDef Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_inputs(flatbuffers::Offset>> inputs) { + fbb_.AddOffset(SignatureDef::VT_INPUTS, inputs); + } + void add_outputs(flatbuffers::Offset>> outputs) { + fbb_.AddOffset(SignatureDef::VT_OUTPUTS, outputs); + } + void add_signature_key(flatbuffers::Offset signature_key) { + fbb_.AddOffset(SignatureDef::VT_SIGNATURE_KEY, signature_key); + } + void add_subgraph_index(uint32_t subgraph_index) { + fbb_.AddElement(SignatureDef::VT_SUBGRAPH_INDEX, subgraph_index, 0); + } + explicit SignatureDefBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateSignatureDef( + flatbuffers::FlatBufferBuilder &_fbb, + flatbuffers::Offset>> inputs = 0, + flatbuffers::Offset>> outputs = 0, + flatbuffers::Offset signature_key = 0, + uint32_t subgraph_index = 0) { + SignatureDefBuilder builder_(_fbb); + builder_.add_subgraph_index(subgraph_index); + builder_.add_signature_key(signature_key); + builder_.add_outputs(outputs); + builder_.add_inputs(inputs); + return builder_.Finish(); +} + +inline flatbuffers::Offset CreateSignatureDefDirect( + flatbuffers::FlatBufferBuilder &_fbb, + const std::vector> *inputs = nullptr, + const std::vector> *outputs = nullptr, + const char *signature_key = nullptr, + uint32_t subgraph_index = 0) { + auto inputs__ = inputs ? _fbb.CreateVector>(*inputs) : 0; + auto outputs__ = outputs ? _fbb.CreateVector>(*outputs) : 0; + auto signature_key__ = signature_key ? _fbb.CreateString(signature_key) : 0; + return tflite::CreateSignatureDef( + _fbb, + inputs__, + outputs__, + signature_key__, + subgraph_index); +} + +flatbuffers::Offset CreateSignatureDef(flatbuffers::FlatBufferBuilder &_fbb, const SignatureDefT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct ModelT : public flatbuffers::NativeTable { + typedef Model TableType; + uint32_t version = 0; + std::vector> operator_codes{}; + std::vector> subgraphs{}; + std::string description{}; + std::vector> buffers{}; + std::vector metadata_buffer{}; + std::vector> metadata{}; + std::vector> signature_defs{}; + ModelT() = default; + ModelT(const ModelT &o); + ModelT(ModelT&&) FLATBUFFERS_NOEXCEPT = default; + ModelT &operator=(ModelT o) FLATBUFFERS_NOEXCEPT; +}; + +struct Model FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef ModelT NativeTableType; + typedef ModelBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_VERSION = 4, + VT_OPERATOR_CODES = 6, + VT_SUBGRAPHS = 8, + VT_DESCRIPTION = 10, + VT_BUFFERS = 12, + VT_METADATA_BUFFER = 14, + VT_METADATA = 16, + VT_SIGNATURE_DEFS = 18 + }; + uint32_t version() const { + return GetField(VT_VERSION, 0); + } + const flatbuffers::Vector> *operator_codes() const { + return GetPointer> *>(VT_OPERATOR_CODES); + } + const flatbuffers::Vector> *subgraphs() const { + return GetPointer> *>(VT_SUBGRAPHS); + } + const flatbuffers::String *description() const { + return GetPointer(VT_DESCRIPTION); + } + const flatbuffers::Vector> *buffers() const { + return GetPointer> *>(VT_BUFFERS); + } + const flatbuffers::Vector *metadata_buffer() const { + return GetPointer *>(VT_METADATA_BUFFER); + } + const flatbuffers::Vector> *metadata() const { + return GetPointer> *>(VT_METADATA); + } + const flatbuffers::Vector> *signature_defs() const { + return GetPointer> *>(VT_SIGNATURE_DEFS); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_VERSION, 4) && + VerifyOffset(verifier, VT_OPERATOR_CODES) && + verifier.VerifyVector(operator_codes()) && + verifier.VerifyVectorOfTables(operator_codes()) && + VerifyOffset(verifier, VT_SUBGRAPHS) && + verifier.VerifyVector(subgraphs()) && + verifier.VerifyVectorOfTables(subgraphs()) && + VerifyOffset(verifier, VT_DESCRIPTION) && + verifier.VerifyString(description()) && + VerifyOffset(verifier, VT_BUFFERS) && + verifier.VerifyVector(buffers()) && + verifier.VerifyVectorOfTables(buffers()) && + VerifyOffset(verifier, VT_METADATA_BUFFER) && + verifier.VerifyVector(metadata_buffer()) && + VerifyOffset(verifier, VT_METADATA) && + verifier.VerifyVector(metadata()) && + verifier.VerifyVectorOfTables(metadata()) && + VerifyOffset(verifier, VT_SIGNATURE_DEFS) && + verifier.VerifyVector(signature_defs()) && + verifier.VerifyVectorOfTables(signature_defs()) && + verifier.EndTable(); + } + ModelT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(ModelT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; + static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const ModelT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct ModelBuilder { + typedef Model Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_version(uint32_t version) { + fbb_.AddElement(Model::VT_VERSION, version, 0); + } + void add_operator_codes(flatbuffers::Offset>> operator_codes) { + fbb_.AddOffset(Model::VT_OPERATOR_CODES, operator_codes); + } + void add_subgraphs(flatbuffers::Offset>> subgraphs) { + fbb_.AddOffset(Model::VT_SUBGRAPHS, subgraphs); + } + void add_description(flatbuffers::Offset description) { + fbb_.AddOffset(Model::VT_DESCRIPTION, description); + } + void add_buffers(flatbuffers::Offset>> buffers) { + fbb_.AddOffset(Model::VT_BUFFERS, buffers); + } + void add_metadata_buffer(flatbuffers::Offset> metadata_buffer) { + fbb_.AddOffset(Model::VT_METADATA_BUFFER, metadata_buffer); + } + void add_metadata(flatbuffers::Offset>> metadata) { + fbb_.AddOffset(Model::VT_METADATA, metadata); + } + void add_signature_defs(flatbuffers::Offset>> signature_defs) { + fbb_.AddOffset(Model::VT_SIGNATURE_DEFS, signature_defs); + } + explicit ModelBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset(end); + return o; + } +}; + +inline flatbuffers::Offset CreateModel( + flatbuffers::FlatBufferBuilder &_fbb, + uint32_t version = 0, + flatbuffers::Offset>> operator_codes = 0, + flatbuffers::Offset>> subgraphs = 0, + flatbuffers::Offset description = 0, + flatbuffers::Offset>> buffers = 0, + flatbuffers::Offset> metadata_buffer = 0, + flatbuffers::Offset>> metadata = 0, + flatbuffers::Offset>> signature_defs = 0) { + ModelBuilder builder_(_fbb); + builder_.add_signature_defs(signature_defs); + builder_.add_metadata(metadata); + builder_.add_metadata_buffer(metadata_buffer); + builder_.add_buffers(buffers); + builder_.add_description(description); + builder_.add_subgraphs(subgraphs); + builder_.add_operator_codes(operator_codes); + builder_.add_version(version); + return builder_.Finish(); +} + +inline flatbuffers::Offset CreateModelDirect( + flatbuffers::FlatBufferBuilder &_fbb, + uint32_t version = 0, + const std::vector> *operator_codes = nullptr, + const std::vector> *subgraphs = nullptr, + const char *description = nullptr, + const std::vector> *buffers = nullptr, + const std::vector *metadata_buffer = nullptr, + const std::vector> *metadata = nullptr, + const std::vector> *signature_defs = nullptr) { + auto operator_codes__ = operator_codes ? _fbb.CreateVector>(*operator_codes) : 0; + auto subgraphs__ = subgraphs ? _fbb.CreateVector>(*subgraphs) : 0; + auto description__ = description ? _fbb.CreateString(description) : 0; + auto buffers__ = buffers ? _fbb.CreateVector>(*buffers) : 0; + auto metadata_buffer__ = metadata_buffer ? _fbb.CreateVector(*metadata_buffer) : 0; + auto metadata__ = metadata ? _fbb.CreateVector>(*metadata) : 0; + auto signature_defs__ = signature_defs ? _fbb.CreateVector>(*signature_defs) : 0; + return tflite::CreateModel( + _fbb, + version, + operator_codes__, + subgraphs__, + description__, + buffers__, + metadata_buffer__, + metadata__, + signature_defs__); +} + +flatbuffers::Offset CreateModel(flatbuffers::FlatBufferBuilder &_fbb, const ModelT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + +inline CustomQuantizationT *CustomQuantization::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new CustomQuantizationT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void CustomQuantization::UnPackTo(CustomQuantizationT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = custom(); if (_e) { _o->custom.resize(_e->size()); std::copy(_e->begin(), _e->end(), _o->custom.begin()); } } +} + +inline flatbuffers::Offset CustomQuantization::Pack(flatbuffers::FlatBufferBuilder &_fbb, const CustomQuantizationT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateCustomQuantization(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateCustomQuantization(flatbuffers::FlatBufferBuilder &_fbb, const CustomQuantizationT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const CustomQuantizationT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + _fbb.ForceVectorAlignment(_o->custom.size(), sizeof(uint8_t), 16); + auto _custom = _o->custom.size() ? _fbb.CreateVector(_o->custom) : 0; + return tflite::CreateCustomQuantization( + _fbb, + _custom); +} + +inline QuantizationParametersT *QuantizationParameters::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new QuantizationParametersT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void QuantizationParameters::UnPackTo(QuantizationParametersT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = min(); if (_e) { _o->min.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->min[_i] = _e->Get(_i); } } } + { auto _e = max(); if (_e) { _o->max.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->max[_i] = _e->Get(_i); } } } + { auto _e = scale(); if (_e) { _o->scale.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->scale[_i] = _e->Get(_i); } } } + { auto _e = zero_point(); if (_e) { _o->zero_point.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->zero_point[_i] = _e->Get(_i); } } } + { auto _e = details_type(); _o->details.type = _e; } + { auto _e = details(); if (_e) _o->details.value = tflite::QuantizationDetailsUnion::UnPack(_e, details_type(), _resolver); } + { auto _e = quantized_dimension(); _o->quantized_dimension = _e; } +} + +inline flatbuffers::Offset QuantizationParameters::Pack(flatbuffers::FlatBufferBuilder &_fbb, const QuantizationParametersT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateQuantizationParameters(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateQuantizationParameters(flatbuffers::FlatBufferBuilder &_fbb, const QuantizationParametersT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const QuantizationParametersT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _min = _o->min.size() ? _fbb.CreateVector(_o->min) : 0; + auto _max = _o->max.size() ? _fbb.CreateVector(_o->max) : 0; + auto _scale = _o->scale.size() ? _fbb.CreateVector(_o->scale) : 0; + auto _zero_point = _o->zero_point.size() ? _fbb.CreateVector(_o->zero_point) : 0; + auto _details_type = _o->details.type; + auto _details = _o->details.Pack(_fbb); + auto _quantized_dimension = _o->quantized_dimension; + return tflite::CreateQuantizationParameters( + _fbb, + _min, + _max, + _scale, + _zero_point, + _details_type, + _details, + _quantized_dimension); +} + +inline Int32VectorT *Int32Vector::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new Int32VectorT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void Int32Vector::UnPackTo(Int32VectorT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = values(); if (_e) { _o->values.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->values[_i] = _e->Get(_i); } } } +} + +inline flatbuffers::Offset Int32Vector::Pack(flatbuffers::FlatBufferBuilder &_fbb, const Int32VectorT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateInt32Vector(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateInt32Vector(flatbuffers::FlatBufferBuilder &_fbb, const Int32VectorT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const Int32VectorT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _values = _o->values.size() ? _fbb.CreateVector(_o->values) : 0; + return tflite::CreateInt32Vector( + _fbb, + _values); +} + +inline Uint16VectorT *Uint16Vector::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new Uint16VectorT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void Uint16Vector::UnPackTo(Uint16VectorT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = values(); if (_e) { _o->values.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->values[_i] = _e->Get(_i); } } } +} + +inline flatbuffers::Offset Uint16Vector::Pack(flatbuffers::FlatBufferBuilder &_fbb, const Uint16VectorT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateUint16Vector(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateUint16Vector(flatbuffers::FlatBufferBuilder &_fbb, const Uint16VectorT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const Uint16VectorT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + _fbb.ForceVectorAlignment(_o->values.size(), sizeof(uint16_t), 4); + auto _values = _o->values.size() ? _fbb.CreateVector(_o->values) : 0; + return tflite::CreateUint16Vector( + _fbb, + _values); +} + +inline Uint8VectorT *Uint8Vector::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new Uint8VectorT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void Uint8Vector::UnPackTo(Uint8VectorT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = values(); if (_e) { _o->values.resize(_e->size()); std::copy(_e->begin(), _e->end(), _o->values.begin()); } } +} + +inline flatbuffers::Offset Uint8Vector::Pack(flatbuffers::FlatBufferBuilder &_fbb, const Uint8VectorT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateUint8Vector(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateUint8Vector(flatbuffers::FlatBufferBuilder &_fbb, const Uint8VectorT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const Uint8VectorT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + _fbb.ForceVectorAlignment(_o->values.size(), sizeof(uint8_t), 4); + auto _values = _o->values.size() ? _fbb.CreateVector(_o->values) : 0; + return tflite::CreateUint8Vector( + _fbb, + _values); +} + +inline DimensionMetadataT *DimensionMetadata::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new DimensionMetadataT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void DimensionMetadata::UnPackTo(DimensionMetadataT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = format(); _o->format = _e; } + { auto _e = dense_size(); _o->dense_size = _e; } + { auto _e = array_segments_type(); _o->array_segments.type = _e; } + { auto _e = array_segments(); if (_e) _o->array_segments.value = tflite::SparseIndexVectorUnion::UnPack(_e, array_segments_type(), _resolver); } + { auto _e = array_indices_type(); _o->array_indices.type = _e; } + { auto _e = array_indices(); if (_e) _o->array_indices.value = tflite::SparseIndexVectorUnion::UnPack(_e, array_indices_type(), _resolver); } +} + +inline flatbuffers::Offset DimensionMetadata::Pack(flatbuffers::FlatBufferBuilder &_fbb, const DimensionMetadataT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateDimensionMetadata(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateDimensionMetadata(flatbuffers::FlatBufferBuilder &_fbb, const DimensionMetadataT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const DimensionMetadataT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _format = _o->format; + auto _dense_size = _o->dense_size; + auto _array_segments_type = _o->array_segments.type; + auto _array_segments = _o->array_segments.Pack(_fbb); + auto _array_indices_type = _o->array_indices.type; + auto _array_indices = _o->array_indices.Pack(_fbb); + return tflite::CreateDimensionMetadata( + _fbb, + _format, + _dense_size, + _array_segments_type, + _array_segments, + _array_indices_type, + _array_indices); +} + +inline SparsityParametersT::SparsityParametersT(const SparsityParametersT &o) + : traversal_order(o.traversal_order), + block_map(o.block_map) { + dim_metadata.reserve(o.dim_metadata.size()); + for (const auto &dim_metadata_ : o.dim_metadata) { dim_metadata.emplace_back((dim_metadata_) ? new tflite::DimensionMetadataT(*dim_metadata_) : nullptr); } +} + +inline SparsityParametersT &SparsityParametersT::operator=(SparsityParametersT o) FLATBUFFERS_NOEXCEPT { + std::swap(traversal_order, o.traversal_order); + std::swap(block_map, o.block_map); + std::swap(dim_metadata, o.dim_metadata); + return *this; +} + +inline SparsityParametersT *SparsityParameters::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new SparsityParametersT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void SparsityParameters::UnPackTo(SparsityParametersT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = traversal_order(); if (_e) { _o->traversal_order.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->traversal_order[_i] = _e->Get(_i); } } } + { auto _e = block_map(); if (_e) { _o->block_map.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->block_map[_i] = _e->Get(_i); } } } + { auto _e = dim_metadata(); if (_e) { _o->dim_metadata.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { if(_o->dim_metadata[_i]) { _e->Get(_i)->UnPackTo(_o->dim_metadata[_i].get(), _resolver); } else { _o->dim_metadata[_i] = std::unique_ptr(_e->Get(_i)->UnPack(_resolver)); }; } } } +} + +inline flatbuffers::Offset SparsityParameters::Pack(flatbuffers::FlatBufferBuilder &_fbb, const SparsityParametersT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateSparsityParameters(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateSparsityParameters(flatbuffers::FlatBufferBuilder &_fbb, const SparsityParametersT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const SparsityParametersT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _traversal_order = _o->traversal_order.size() ? _fbb.CreateVector(_o->traversal_order) : 0; + auto _block_map = _o->block_map.size() ? _fbb.CreateVector(_o->block_map) : 0; + auto _dim_metadata = _o->dim_metadata.size() ? _fbb.CreateVector> (_o->dim_metadata.size(), [](size_t i, _VectorArgs *__va) { return CreateDimensionMetadata(*__va->__fbb, __va->__o->dim_metadata[i].get(), __va->__rehasher); }, &_va ) : 0; + return tflite::CreateSparsityParameters( + _fbb, + _traversal_order, + _block_map, + _dim_metadata); +} + +inline VariantSubTypeT *VariantSubType::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new VariantSubTypeT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void VariantSubType::UnPackTo(VariantSubTypeT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = shape(); if (_e) { _o->shape.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->shape[_i] = _e->Get(_i); } } } + { auto _e = type(); _o->type = _e; } + { auto _e = has_rank(); _o->has_rank = _e; } +} + +inline flatbuffers::Offset VariantSubType::Pack(flatbuffers::FlatBufferBuilder &_fbb, const VariantSubTypeT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateVariantSubType(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateVariantSubType(flatbuffers::FlatBufferBuilder &_fbb, const VariantSubTypeT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const VariantSubTypeT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _shape = _o->shape.size() ? _fbb.CreateVector(_o->shape) : 0; + auto _type = _o->type; + auto _has_rank = _o->has_rank; + return tflite::CreateVariantSubType( + _fbb, + _shape, + _type, + _has_rank); +} + +inline TensorT::TensorT(const TensorT &o) + : shape(o.shape), + type(o.type), + buffer(o.buffer), + name(o.name), + quantization((o.quantization) ? new tflite::QuantizationParametersT(*o.quantization) : nullptr), + is_variable(o.is_variable), + sparsity((o.sparsity) ? new tflite::SparsityParametersT(*o.sparsity) : nullptr), + shape_signature(o.shape_signature), + has_rank(o.has_rank) { + variant_tensors.reserve(o.variant_tensors.size()); + for (const auto &variant_tensors_ : o.variant_tensors) { variant_tensors.emplace_back((variant_tensors_) ? new tflite::VariantSubTypeT(*variant_tensors_) : nullptr); } +} + +inline TensorT &TensorT::operator=(TensorT o) FLATBUFFERS_NOEXCEPT { + std::swap(shape, o.shape); + std::swap(type, o.type); + std::swap(buffer, o.buffer); + std::swap(name, o.name); + std::swap(quantization, o.quantization); + std::swap(is_variable, o.is_variable); + std::swap(sparsity, o.sparsity); + std::swap(shape_signature, o.shape_signature); + std::swap(has_rank, o.has_rank); + std::swap(variant_tensors, o.variant_tensors); + return *this; +} + +inline TensorT *Tensor::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new TensorT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void Tensor::UnPackTo(TensorT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = shape(); if (_e) { _o->shape.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->shape[_i] = _e->Get(_i); } } } + { auto _e = type(); _o->type = _e; } + { auto _e = buffer(); _o->buffer = _e; } + { auto _e = name(); if (_e) _o->name = _e->str(); } + { auto _e = quantization(); if (_e) { if(_o->quantization) { _e->UnPackTo(_o->quantization.get(), _resolver); } else { _o->quantization = std::unique_ptr(_e->UnPack(_resolver)); } } } + { auto _e = is_variable(); _o->is_variable = _e; } + { auto _e = sparsity(); if (_e) { if(_o->sparsity) { _e->UnPackTo(_o->sparsity.get(), _resolver); } else { _o->sparsity = std::unique_ptr(_e->UnPack(_resolver)); } } } + { auto _e = shape_signature(); if (_e) { _o->shape_signature.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->shape_signature[_i] = _e->Get(_i); } } } + { auto _e = has_rank(); _o->has_rank = _e; } + { auto _e = variant_tensors(); if (_e) { _o->variant_tensors.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { if(_o->variant_tensors[_i]) { _e->Get(_i)->UnPackTo(_o->variant_tensors[_i].get(), _resolver); } else { _o->variant_tensors[_i] = std::unique_ptr(_e->Get(_i)->UnPack(_resolver)); }; } } } +} + +inline flatbuffers::Offset Tensor::Pack(flatbuffers::FlatBufferBuilder &_fbb, const TensorT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateTensor(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateTensor(flatbuffers::FlatBufferBuilder &_fbb, const TensorT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const TensorT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _shape = _o->shape.size() ? _fbb.CreateVector(_o->shape) : 0; + auto _type = _o->type; + auto _buffer = _o->buffer; + auto _name = _o->name.empty() ? 0 : _fbb.CreateString(_o->name); + auto _quantization = _o->quantization ? CreateQuantizationParameters(_fbb, _o->quantization.get(), _rehasher) : 0; + auto _is_variable = _o->is_variable; + auto _sparsity = _o->sparsity ? CreateSparsityParameters(_fbb, _o->sparsity.get(), _rehasher) : 0; + auto _shape_signature = _o->shape_signature.size() ? _fbb.CreateVector(_o->shape_signature) : 0; + auto _has_rank = _o->has_rank; + auto _variant_tensors = _o->variant_tensors.size() ? _fbb.CreateVector> (_o->variant_tensors.size(), [](size_t i, _VectorArgs *__va) { return CreateVariantSubType(*__va->__fbb, __va->__o->variant_tensors[i].get(), __va->__rehasher); }, &_va ) : 0; + return tflite::CreateTensor( + _fbb, + _shape, + _type, + _buffer, + _name, + _quantization, + _is_variable, + _sparsity, + _shape_signature, + _has_rank, + _variant_tensors); +} + +inline Conv2DOptionsT *Conv2DOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new Conv2DOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void Conv2DOptions::UnPackTo(Conv2DOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = padding(); _o->padding = _e; } + { auto _e = stride_w(); _o->stride_w = _e; } + { auto _e = stride_h(); _o->stride_h = _e; } + { auto _e = fused_activation_function(); _o->fused_activation_function = _e; } + { auto _e = dilation_w_factor(); _o->dilation_w_factor = _e; } + { auto _e = dilation_h_factor(); _o->dilation_h_factor = _e; } +} + +inline flatbuffers::Offset Conv2DOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const Conv2DOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateConv2DOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateConv2DOptions(flatbuffers::FlatBufferBuilder &_fbb, const Conv2DOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const Conv2DOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _padding = _o->padding; + auto _stride_w = _o->stride_w; + auto _stride_h = _o->stride_h; + auto _fused_activation_function = _o->fused_activation_function; + auto _dilation_w_factor = _o->dilation_w_factor; + auto _dilation_h_factor = _o->dilation_h_factor; + return tflite::CreateConv2DOptions( + _fbb, + _padding, + _stride_w, + _stride_h, + _fused_activation_function, + _dilation_w_factor, + _dilation_h_factor); +} + +inline Conv3DOptionsT *Conv3DOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new Conv3DOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void Conv3DOptions::UnPackTo(Conv3DOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = padding(); _o->padding = _e; } + { auto _e = stride_d(); _o->stride_d = _e; } + { auto _e = stride_w(); _o->stride_w = _e; } + { auto _e = stride_h(); _o->stride_h = _e; } + { auto _e = fused_activation_function(); _o->fused_activation_function = _e; } + { auto _e = dilation_d_factor(); _o->dilation_d_factor = _e; } + { auto _e = dilation_w_factor(); _o->dilation_w_factor = _e; } + { auto _e = dilation_h_factor(); _o->dilation_h_factor = _e; } +} + +inline flatbuffers::Offset Conv3DOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const Conv3DOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateConv3DOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateConv3DOptions(flatbuffers::FlatBufferBuilder &_fbb, const Conv3DOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const Conv3DOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _padding = _o->padding; + auto _stride_d = _o->stride_d; + auto _stride_w = _o->stride_w; + auto _stride_h = _o->stride_h; + auto _fused_activation_function = _o->fused_activation_function; + auto _dilation_d_factor = _o->dilation_d_factor; + auto _dilation_w_factor = _o->dilation_w_factor; + auto _dilation_h_factor = _o->dilation_h_factor; + return tflite::CreateConv3DOptions( + _fbb, + _padding, + _stride_d, + _stride_w, + _stride_h, + _fused_activation_function, + _dilation_d_factor, + _dilation_w_factor, + _dilation_h_factor); +} + +inline Pool2DOptionsT *Pool2DOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new Pool2DOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void Pool2DOptions::UnPackTo(Pool2DOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = padding(); _o->padding = _e; } + { auto _e = stride_w(); _o->stride_w = _e; } + { auto _e = stride_h(); _o->stride_h = _e; } + { auto _e = filter_width(); _o->filter_width = _e; } + { auto _e = filter_height(); _o->filter_height = _e; } + { auto _e = fused_activation_function(); _o->fused_activation_function = _e; } +} + +inline flatbuffers::Offset Pool2DOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const Pool2DOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreatePool2DOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreatePool2DOptions(flatbuffers::FlatBufferBuilder &_fbb, const Pool2DOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const Pool2DOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _padding = _o->padding; + auto _stride_w = _o->stride_w; + auto _stride_h = _o->stride_h; + auto _filter_width = _o->filter_width; + auto _filter_height = _o->filter_height; + auto _fused_activation_function = _o->fused_activation_function; + return tflite::CreatePool2DOptions( + _fbb, + _padding, + _stride_w, + _stride_h, + _filter_width, + _filter_height, + _fused_activation_function); +} + +inline DepthwiseConv2DOptionsT *DepthwiseConv2DOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new DepthwiseConv2DOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void DepthwiseConv2DOptions::UnPackTo(DepthwiseConv2DOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = padding(); _o->padding = _e; } + { auto _e = stride_w(); _o->stride_w = _e; } + { auto _e = stride_h(); _o->stride_h = _e; } + { auto _e = depth_multiplier(); _o->depth_multiplier = _e; } + { auto _e = fused_activation_function(); _o->fused_activation_function = _e; } + { auto _e = dilation_w_factor(); _o->dilation_w_factor = _e; } + { auto _e = dilation_h_factor(); _o->dilation_h_factor = _e; } +} + +inline flatbuffers::Offset DepthwiseConv2DOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const DepthwiseConv2DOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateDepthwiseConv2DOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateDepthwiseConv2DOptions(flatbuffers::FlatBufferBuilder &_fbb, const DepthwiseConv2DOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const DepthwiseConv2DOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _padding = _o->padding; + auto _stride_w = _o->stride_w; + auto _stride_h = _o->stride_h; + auto _depth_multiplier = _o->depth_multiplier; + auto _fused_activation_function = _o->fused_activation_function; + auto _dilation_w_factor = _o->dilation_w_factor; + auto _dilation_h_factor = _o->dilation_h_factor; + return tflite::CreateDepthwiseConv2DOptions( + _fbb, + _padding, + _stride_w, + _stride_h, + _depth_multiplier, + _fused_activation_function, + _dilation_w_factor, + _dilation_h_factor); +} + +inline ConcatEmbeddingsOptionsT *ConcatEmbeddingsOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new ConcatEmbeddingsOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void ConcatEmbeddingsOptions::UnPackTo(ConcatEmbeddingsOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = num_channels(); _o->num_channels = _e; } + { auto _e = num_columns_per_channel(); if (_e) { _o->num_columns_per_channel.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->num_columns_per_channel[_i] = _e->Get(_i); } } } + { auto _e = embedding_dim_per_channel(); if (_e) { _o->embedding_dim_per_channel.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->embedding_dim_per_channel[_i] = _e->Get(_i); } } } +} + +inline flatbuffers::Offset ConcatEmbeddingsOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const ConcatEmbeddingsOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateConcatEmbeddingsOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateConcatEmbeddingsOptions(flatbuffers::FlatBufferBuilder &_fbb, const ConcatEmbeddingsOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const ConcatEmbeddingsOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _num_channels = _o->num_channels; + auto _num_columns_per_channel = _o->num_columns_per_channel.size() ? _fbb.CreateVector(_o->num_columns_per_channel) : 0; + auto _embedding_dim_per_channel = _o->embedding_dim_per_channel.size() ? _fbb.CreateVector(_o->embedding_dim_per_channel) : 0; + return tflite::CreateConcatEmbeddingsOptions( + _fbb, + _num_channels, + _num_columns_per_channel, + _embedding_dim_per_channel); +} + +inline LSHProjectionOptionsT *LSHProjectionOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new LSHProjectionOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void LSHProjectionOptions::UnPackTo(LSHProjectionOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = type(); _o->type = _e; } +} + +inline flatbuffers::Offset LSHProjectionOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const LSHProjectionOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateLSHProjectionOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateLSHProjectionOptions(flatbuffers::FlatBufferBuilder &_fbb, const LSHProjectionOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const LSHProjectionOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _type = _o->type; + return tflite::CreateLSHProjectionOptions( + _fbb, + _type); +} + +inline SVDFOptionsT *SVDFOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new SVDFOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void SVDFOptions::UnPackTo(SVDFOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = rank(); _o->rank = _e; } + { auto _e = fused_activation_function(); _o->fused_activation_function = _e; } + { auto _e = asymmetric_quantize_inputs(); _o->asymmetric_quantize_inputs = _e; } +} + +inline flatbuffers::Offset SVDFOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const SVDFOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateSVDFOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateSVDFOptions(flatbuffers::FlatBufferBuilder &_fbb, const SVDFOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const SVDFOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _rank = _o->rank; + auto _fused_activation_function = _o->fused_activation_function; + auto _asymmetric_quantize_inputs = _o->asymmetric_quantize_inputs; + return tflite::CreateSVDFOptions( + _fbb, + _rank, + _fused_activation_function, + _asymmetric_quantize_inputs); +} + +inline RNNOptionsT *RNNOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new RNNOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void RNNOptions::UnPackTo(RNNOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = fused_activation_function(); _o->fused_activation_function = _e; } + { auto _e = asymmetric_quantize_inputs(); _o->asymmetric_quantize_inputs = _e; } +} + +inline flatbuffers::Offset RNNOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const RNNOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateRNNOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateRNNOptions(flatbuffers::FlatBufferBuilder &_fbb, const RNNOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const RNNOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _fused_activation_function = _o->fused_activation_function; + auto _asymmetric_quantize_inputs = _o->asymmetric_quantize_inputs; + return tflite::CreateRNNOptions( + _fbb, + _fused_activation_function, + _asymmetric_quantize_inputs); +} + +inline SequenceRNNOptionsT *SequenceRNNOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new SequenceRNNOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void SequenceRNNOptions::UnPackTo(SequenceRNNOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = time_major(); _o->time_major = _e; } + { auto _e = fused_activation_function(); _o->fused_activation_function = _e; } + { auto _e = asymmetric_quantize_inputs(); _o->asymmetric_quantize_inputs = _e; } +} + +inline flatbuffers::Offset SequenceRNNOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const SequenceRNNOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateSequenceRNNOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateSequenceRNNOptions(flatbuffers::FlatBufferBuilder &_fbb, const SequenceRNNOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const SequenceRNNOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _time_major = _o->time_major; + auto _fused_activation_function = _o->fused_activation_function; + auto _asymmetric_quantize_inputs = _o->asymmetric_quantize_inputs; + return tflite::CreateSequenceRNNOptions( + _fbb, + _time_major, + _fused_activation_function, + _asymmetric_quantize_inputs); +} + +inline BidirectionalSequenceRNNOptionsT *BidirectionalSequenceRNNOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new BidirectionalSequenceRNNOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void BidirectionalSequenceRNNOptions::UnPackTo(BidirectionalSequenceRNNOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = time_major(); _o->time_major = _e; } + { auto _e = fused_activation_function(); _o->fused_activation_function = _e; } + { auto _e = merge_outputs(); _o->merge_outputs = _e; } + { auto _e = asymmetric_quantize_inputs(); _o->asymmetric_quantize_inputs = _e; } +} + +inline flatbuffers::Offset BidirectionalSequenceRNNOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const BidirectionalSequenceRNNOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateBidirectionalSequenceRNNOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateBidirectionalSequenceRNNOptions(flatbuffers::FlatBufferBuilder &_fbb, const BidirectionalSequenceRNNOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const BidirectionalSequenceRNNOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _time_major = _o->time_major; + auto _fused_activation_function = _o->fused_activation_function; + auto _merge_outputs = _o->merge_outputs; + auto _asymmetric_quantize_inputs = _o->asymmetric_quantize_inputs; + return tflite::CreateBidirectionalSequenceRNNOptions( + _fbb, + _time_major, + _fused_activation_function, + _merge_outputs, + _asymmetric_quantize_inputs); +} + +inline FullyConnectedOptionsT *FullyConnectedOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new FullyConnectedOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void FullyConnectedOptions::UnPackTo(FullyConnectedOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = fused_activation_function(); _o->fused_activation_function = _e; } + { auto _e = weights_format(); _o->weights_format = _e; } + { auto _e = keep_num_dims(); _o->keep_num_dims = _e; } + { auto _e = asymmetric_quantize_inputs(); _o->asymmetric_quantize_inputs = _e; } +} + +inline flatbuffers::Offset FullyConnectedOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const FullyConnectedOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateFullyConnectedOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateFullyConnectedOptions(flatbuffers::FlatBufferBuilder &_fbb, const FullyConnectedOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const FullyConnectedOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _fused_activation_function = _o->fused_activation_function; + auto _weights_format = _o->weights_format; + auto _keep_num_dims = _o->keep_num_dims; + auto _asymmetric_quantize_inputs = _o->asymmetric_quantize_inputs; + return tflite::CreateFullyConnectedOptions( + _fbb, + _fused_activation_function, + _weights_format, + _keep_num_dims, + _asymmetric_quantize_inputs); +} + +inline SoftmaxOptionsT *SoftmaxOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new SoftmaxOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void SoftmaxOptions::UnPackTo(SoftmaxOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = beta(); _o->beta = _e; } +} + +inline flatbuffers::Offset SoftmaxOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const SoftmaxOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateSoftmaxOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateSoftmaxOptions(flatbuffers::FlatBufferBuilder &_fbb, const SoftmaxOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const SoftmaxOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _beta = _o->beta; + return tflite::CreateSoftmaxOptions( + _fbb, + _beta); +} + +inline ConcatenationOptionsT *ConcatenationOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new ConcatenationOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void ConcatenationOptions::UnPackTo(ConcatenationOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = axis(); _o->axis = _e; } + { auto _e = fused_activation_function(); _o->fused_activation_function = _e; } +} + +inline flatbuffers::Offset ConcatenationOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const ConcatenationOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateConcatenationOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateConcatenationOptions(flatbuffers::FlatBufferBuilder &_fbb, const ConcatenationOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const ConcatenationOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _axis = _o->axis; + auto _fused_activation_function = _o->fused_activation_function; + return tflite::CreateConcatenationOptions( + _fbb, + _axis, + _fused_activation_function); +} + +inline AddOptionsT *AddOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new AddOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void AddOptions::UnPackTo(AddOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = fused_activation_function(); _o->fused_activation_function = _e; } + { auto _e = pot_scale_int16(); _o->pot_scale_int16 = _e; } +} + +inline flatbuffers::Offset AddOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const AddOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateAddOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateAddOptions(flatbuffers::FlatBufferBuilder &_fbb, const AddOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const AddOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _fused_activation_function = _o->fused_activation_function; + auto _pot_scale_int16 = _o->pot_scale_int16; + return tflite::CreateAddOptions( + _fbb, + _fused_activation_function, + _pot_scale_int16); +} + +inline MulOptionsT *MulOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new MulOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void MulOptions::UnPackTo(MulOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = fused_activation_function(); _o->fused_activation_function = _e; } +} + +inline flatbuffers::Offset MulOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const MulOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateMulOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateMulOptions(flatbuffers::FlatBufferBuilder &_fbb, const MulOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const MulOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _fused_activation_function = _o->fused_activation_function; + return tflite::CreateMulOptions( + _fbb, + _fused_activation_function); +} + +inline L2NormOptionsT *L2NormOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new L2NormOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void L2NormOptions::UnPackTo(L2NormOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = fused_activation_function(); _o->fused_activation_function = _e; } +} + +inline flatbuffers::Offset L2NormOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const L2NormOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateL2NormOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateL2NormOptions(flatbuffers::FlatBufferBuilder &_fbb, const L2NormOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const L2NormOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _fused_activation_function = _o->fused_activation_function; + return tflite::CreateL2NormOptions( + _fbb, + _fused_activation_function); +} + +inline LocalResponseNormalizationOptionsT *LocalResponseNormalizationOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new LocalResponseNormalizationOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void LocalResponseNormalizationOptions::UnPackTo(LocalResponseNormalizationOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = radius(); _o->radius = _e; } + { auto _e = bias(); _o->bias = _e; } + { auto _e = alpha(); _o->alpha = _e; } + { auto _e = beta(); _o->beta = _e; } +} + +inline flatbuffers::Offset LocalResponseNormalizationOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const LocalResponseNormalizationOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateLocalResponseNormalizationOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateLocalResponseNormalizationOptions(flatbuffers::FlatBufferBuilder &_fbb, const LocalResponseNormalizationOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const LocalResponseNormalizationOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _radius = _o->radius; + auto _bias = _o->bias; + auto _alpha = _o->alpha; + auto _beta = _o->beta; + return tflite::CreateLocalResponseNormalizationOptions( + _fbb, + _radius, + _bias, + _alpha, + _beta); +} + +inline LSTMOptionsT *LSTMOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new LSTMOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void LSTMOptions::UnPackTo(LSTMOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = fused_activation_function(); _o->fused_activation_function = _e; } + { auto _e = cell_clip(); _o->cell_clip = _e; } + { auto _e = proj_clip(); _o->proj_clip = _e; } + { auto _e = kernel_type(); _o->kernel_type = _e; } + { auto _e = asymmetric_quantize_inputs(); _o->asymmetric_quantize_inputs = _e; } +} + +inline flatbuffers::Offset LSTMOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const LSTMOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateLSTMOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateLSTMOptions(flatbuffers::FlatBufferBuilder &_fbb, const LSTMOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const LSTMOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _fused_activation_function = _o->fused_activation_function; + auto _cell_clip = _o->cell_clip; + auto _proj_clip = _o->proj_clip; + auto _kernel_type = _o->kernel_type; + auto _asymmetric_quantize_inputs = _o->asymmetric_quantize_inputs; + return tflite::CreateLSTMOptions( + _fbb, + _fused_activation_function, + _cell_clip, + _proj_clip, + _kernel_type, + _asymmetric_quantize_inputs); +} + +inline UnidirectionalSequenceLSTMOptionsT *UnidirectionalSequenceLSTMOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new UnidirectionalSequenceLSTMOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void UnidirectionalSequenceLSTMOptions::UnPackTo(UnidirectionalSequenceLSTMOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = fused_activation_function(); _o->fused_activation_function = _e; } + { auto _e = cell_clip(); _o->cell_clip = _e; } + { auto _e = proj_clip(); _o->proj_clip = _e; } + { auto _e = time_major(); _o->time_major = _e; } + { auto _e = asymmetric_quantize_inputs(); _o->asymmetric_quantize_inputs = _e; } + { auto _e = diagonal_recurrent_tensors(); _o->diagonal_recurrent_tensors = _e; } +} + +inline flatbuffers::Offset UnidirectionalSequenceLSTMOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const UnidirectionalSequenceLSTMOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateUnidirectionalSequenceLSTMOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateUnidirectionalSequenceLSTMOptions(flatbuffers::FlatBufferBuilder &_fbb, const UnidirectionalSequenceLSTMOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const UnidirectionalSequenceLSTMOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _fused_activation_function = _o->fused_activation_function; + auto _cell_clip = _o->cell_clip; + auto _proj_clip = _o->proj_clip; + auto _time_major = _o->time_major; + auto _asymmetric_quantize_inputs = _o->asymmetric_quantize_inputs; + auto _diagonal_recurrent_tensors = _o->diagonal_recurrent_tensors; + return tflite::CreateUnidirectionalSequenceLSTMOptions( + _fbb, + _fused_activation_function, + _cell_clip, + _proj_clip, + _time_major, + _asymmetric_quantize_inputs, + _diagonal_recurrent_tensors); +} + +inline BidirectionalSequenceLSTMOptionsT *BidirectionalSequenceLSTMOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new BidirectionalSequenceLSTMOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void BidirectionalSequenceLSTMOptions::UnPackTo(BidirectionalSequenceLSTMOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = fused_activation_function(); _o->fused_activation_function = _e; } + { auto _e = cell_clip(); _o->cell_clip = _e; } + { auto _e = proj_clip(); _o->proj_clip = _e; } + { auto _e = merge_outputs(); _o->merge_outputs = _e; } + { auto _e = time_major(); _o->time_major = _e; } + { auto _e = asymmetric_quantize_inputs(); _o->asymmetric_quantize_inputs = _e; } +} + +inline flatbuffers::Offset BidirectionalSequenceLSTMOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const BidirectionalSequenceLSTMOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateBidirectionalSequenceLSTMOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateBidirectionalSequenceLSTMOptions(flatbuffers::FlatBufferBuilder &_fbb, const BidirectionalSequenceLSTMOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const BidirectionalSequenceLSTMOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _fused_activation_function = _o->fused_activation_function; + auto _cell_clip = _o->cell_clip; + auto _proj_clip = _o->proj_clip; + auto _merge_outputs = _o->merge_outputs; + auto _time_major = _o->time_major; + auto _asymmetric_quantize_inputs = _o->asymmetric_quantize_inputs; + return tflite::CreateBidirectionalSequenceLSTMOptions( + _fbb, + _fused_activation_function, + _cell_clip, + _proj_clip, + _merge_outputs, + _time_major, + _asymmetric_quantize_inputs); +} + +inline ResizeBilinearOptionsT *ResizeBilinearOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new ResizeBilinearOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void ResizeBilinearOptions::UnPackTo(ResizeBilinearOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = align_corners(); _o->align_corners = _e; } + { auto _e = half_pixel_centers(); _o->half_pixel_centers = _e; } +} + +inline flatbuffers::Offset ResizeBilinearOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const ResizeBilinearOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateResizeBilinearOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateResizeBilinearOptions(flatbuffers::FlatBufferBuilder &_fbb, const ResizeBilinearOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const ResizeBilinearOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _align_corners = _o->align_corners; + auto _half_pixel_centers = _o->half_pixel_centers; + return tflite::CreateResizeBilinearOptions( + _fbb, + _align_corners, + _half_pixel_centers); +} + +inline ResizeNearestNeighborOptionsT *ResizeNearestNeighborOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new ResizeNearestNeighborOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void ResizeNearestNeighborOptions::UnPackTo(ResizeNearestNeighborOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = align_corners(); _o->align_corners = _e; } + { auto _e = half_pixel_centers(); _o->half_pixel_centers = _e; } +} + +inline flatbuffers::Offset ResizeNearestNeighborOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const ResizeNearestNeighborOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateResizeNearestNeighborOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateResizeNearestNeighborOptions(flatbuffers::FlatBufferBuilder &_fbb, const ResizeNearestNeighborOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const ResizeNearestNeighborOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _align_corners = _o->align_corners; + auto _half_pixel_centers = _o->half_pixel_centers; + return tflite::CreateResizeNearestNeighborOptions( + _fbb, + _align_corners, + _half_pixel_centers); +} + +inline CallOptionsT *CallOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new CallOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void CallOptions::UnPackTo(CallOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = subgraph(); _o->subgraph = _e; } +} + +inline flatbuffers::Offset CallOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const CallOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateCallOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateCallOptions(flatbuffers::FlatBufferBuilder &_fbb, const CallOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const CallOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _subgraph = _o->subgraph; + return tflite::CreateCallOptions( + _fbb, + _subgraph); +} + +inline PadOptionsT *PadOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new PadOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void PadOptions::UnPackTo(PadOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset PadOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const PadOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreatePadOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreatePadOptions(flatbuffers::FlatBufferBuilder &_fbb, const PadOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const PadOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreatePadOptions( + _fbb); +} + +inline PadV2OptionsT *PadV2Options::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new PadV2OptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void PadV2Options::UnPackTo(PadV2OptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset PadV2Options::Pack(flatbuffers::FlatBufferBuilder &_fbb, const PadV2OptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreatePadV2Options(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreatePadV2Options(flatbuffers::FlatBufferBuilder &_fbb, const PadV2OptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const PadV2OptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreatePadV2Options( + _fbb); +} + +inline ReshapeOptionsT *ReshapeOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new ReshapeOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void ReshapeOptions::UnPackTo(ReshapeOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = new_shape(); if (_e) { _o->new_shape.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->new_shape[_i] = _e->Get(_i); } } } +} + +inline flatbuffers::Offset ReshapeOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const ReshapeOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateReshapeOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateReshapeOptions(flatbuffers::FlatBufferBuilder &_fbb, const ReshapeOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const ReshapeOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _new_shape = _o->new_shape.size() ? _fbb.CreateVector(_o->new_shape) : 0; + return tflite::CreateReshapeOptions( + _fbb, + _new_shape); +} + +inline SpaceToBatchNDOptionsT *SpaceToBatchNDOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new SpaceToBatchNDOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void SpaceToBatchNDOptions::UnPackTo(SpaceToBatchNDOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset SpaceToBatchNDOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const SpaceToBatchNDOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateSpaceToBatchNDOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateSpaceToBatchNDOptions(flatbuffers::FlatBufferBuilder &_fbb, const SpaceToBatchNDOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const SpaceToBatchNDOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateSpaceToBatchNDOptions( + _fbb); +} + +inline BatchToSpaceNDOptionsT *BatchToSpaceNDOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new BatchToSpaceNDOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void BatchToSpaceNDOptions::UnPackTo(BatchToSpaceNDOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset BatchToSpaceNDOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const BatchToSpaceNDOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateBatchToSpaceNDOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateBatchToSpaceNDOptions(flatbuffers::FlatBufferBuilder &_fbb, const BatchToSpaceNDOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const BatchToSpaceNDOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateBatchToSpaceNDOptions( + _fbb); +} + +inline SkipGramOptionsT *SkipGramOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new SkipGramOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void SkipGramOptions::UnPackTo(SkipGramOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = ngram_size(); _o->ngram_size = _e; } + { auto _e = max_skip_size(); _o->max_skip_size = _e; } + { auto _e = include_all_ngrams(); _o->include_all_ngrams = _e; } +} + +inline flatbuffers::Offset SkipGramOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const SkipGramOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateSkipGramOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateSkipGramOptions(flatbuffers::FlatBufferBuilder &_fbb, const SkipGramOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const SkipGramOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _ngram_size = _o->ngram_size; + auto _max_skip_size = _o->max_skip_size; + auto _include_all_ngrams = _o->include_all_ngrams; + return tflite::CreateSkipGramOptions( + _fbb, + _ngram_size, + _max_skip_size, + _include_all_ngrams); +} + +inline SpaceToDepthOptionsT *SpaceToDepthOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new SpaceToDepthOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void SpaceToDepthOptions::UnPackTo(SpaceToDepthOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = block_size(); _o->block_size = _e; } +} + +inline flatbuffers::Offset SpaceToDepthOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const SpaceToDepthOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateSpaceToDepthOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateSpaceToDepthOptions(flatbuffers::FlatBufferBuilder &_fbb, const SpaceToDepthOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const SpaceToDepthOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _block_size = _o->block_size; + return tflite::CreateSpaceToDepthOptions( + _fbb, + _block_size); +} + +inline DepthToSpaceOptionsT *DepthToSpaceOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new DepthToSpaceOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void DepthToSpaceOptions::UnPackTo(DepthToSpaceOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = block_size(); _o->block_size = _e; } +} + +inline flatbuffers::Offset DepthToSpaceOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const DepthToSpaceOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateDepthToSpaceOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateDepthToSpaceOptions(flatbuffers::FlatBufferBuilder &_fbb, const DepthToSpaceOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const DepthToSpaceOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _block_size = _o->block_size; + return tflite::CreateDepthToSpaceOptions( + _fbb, + _block_size); +} + +inline SubOptionsT *SubOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new SubOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void SubOptions::UnPackTo(SubOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = fused_activation_function(); _o->fused_activation_function = _e; } + { auto _e = pot_scale_int16(); _o->pot_scale_int16 = _e; } +} + +inline flatbuffers::Offset SubOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const SubOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateSubOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateSubOptions(flatbuffers::FlatBufferBuilder &_fbb, const SubOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const SubOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _fused_activation_function = _o->fused_activation_function; + auto _pot_scale_int16 = _o->pot_scale_int16; + return tflite::CreateSubOptions( + _fbb, + _fused_activation_function, + _pot_scale_int16); +} + +inline DivOptionsT *DivOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new DivOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void DivOptions::UnPackTo(DivOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = fused_activation_function(); _o->fused_activation_function = _e; } +} + +inline flatbuffers::Offset DivOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const DivOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateDivOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateDivOptions(flatbuffers::FlatBufferBuilder &_fbb, const DivOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const DivOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _fused_activation_function = _o->fused_activation_function; + return tflite::CreateDivOptions( + _fbb, + _fused_activation_function); +} + +inline TopKV2OptionsT *TopKV2Options::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new TopKV2OptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void TopKV2Options::UnPackTo(TopKV2OptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset TopKV2Options::Pack(flatbuffers::FlatBufferBuilder &_fbb, const TopKV2OptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateTopKV2Options(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateTopKV2Options(flatbuffers::FlatBufferBuilder &_fbb, const TopKV2OptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const TopKV2OptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateTopKV2Options( + _fbb); +} + +inline EmbeddingLookupSparseOptionsT *EmbeddingLookupSparseOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new EmbeddingLookupSparseOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void EmbeddingLookupSparseOptions::UnPackTo(EmbeddingLookupSparseOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = combiner(); _o->combiner = _e; } +} + +inline flatbuffers::Offset EmbeddingLookupSparseOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const EmbeddingLookupSparseOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateEmbeddingLookupSparseOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateEmbeddingLookupSparseOptions(flatbuffers::FlatBufferBuilder &_fbb, const EmbeddingLookupSparseOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const EmbeddingLookupSparseOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _combiner = _o->combiner; + return tflite::CreateEmbeddingLookupSparseOptions( + _fbb, + _combiner); +} + +inline GatherOptionsT *GatherOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new GatherOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void GatherOptions::UnPackTo(GatherOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = axis(); _o->axis = _e; } + { auto _e = batch_dims(); _o->batch_dims = _e; } +} + +inline flatbuffers::Offset GatherOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const GatherOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateGatherOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateGatherOptions(flatbuffers::FlatBufferBuilder &_fbb, const GatherOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const GatherOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _axis = _o->axis; + auto _batch_dims = _o->batch_dims; + return tflite::CreateGatherOptions( + _fbb, + _axis, + _batch_dims); +} + +inline TransposeOptionsT *TransposeOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new TransposeOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void TransposeOptions::UnPackTo(TransposeOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset TransposeOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const TransposeOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateTransposeOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateTransposeOptions(flatbuffers::FlatBufferBuilder &_fbb, const TransposeOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const TransposeOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateTransposeOptions( + _fbb); +} + +inline ExpOptionsT *ExpOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new ExpOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void ExpOptions::UnPackTo(ExpOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset ExpOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const ExpOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateExpOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateExpOptions(flatbuffers::FlatBufferBuilder &_fbb, const ExpOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const ExpOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateExpOptions( + _fbb); +} + +inline CosOptionsT *CosOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new CosOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void CosOptions::UnPackTo(CosOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset CosOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const CosOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateCosOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateCosOptions(flatbuffers::FlatBufferBuilder &_fbb, const CosOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const CosOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateCosOptions( + _fbb); +} + +inline ReducerOptionsT *ReducerOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new ReducerOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void ReducerOptions::UnPackTo(ReducerOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = keep_dims(); _o->keep_dims = _e; } +} + +inline flatbuffers::Offset ReducerOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const ReducerOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateReducerOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateReducerOptions(flatbuffers::FlatBufferBuilder &_fbb, const ReducerOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const ReducerOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _keep_dims = _o->keep_dims; + return tflite::CreateReducerOptions( + _fbb, + _keep_dims); +} + +inline SqueezeOptionsT *SqueezeOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new SqueezeOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void SqueezeOptions::UnPackTo(SqueezeOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = squeeze_dims(); if (_e) { _o->squeeze_dims.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->squeeze_dims[_i] = _e->Get(_i); } } } +} + +inline flatbuffers::Offset SqueezeOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const SqueezeOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateSqueezeOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateSqueezeOptions(flatbuffers::FlatBufferBuilder &_fbb, const SqueezeOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const SqueezeOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _squeeze_dims = _o->squeeze_dims.size() ? _fbb.CreateVector(_o->squeeze_dims) : 0; + return tflite::CreateSqueezeOptions( + _fbb, + _squeeze_dims); +} + +inline SplitOptionsT *SplitOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new SplitOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void SplitOptions::UnPackTo(SplitOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = num_splits(); _o->num_splits = _e; } +} + +inline flatbuffers::Offset SplitOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const SplitOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateSplitOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateSplitOptions(flatbuffers::FlatBufferBuilder &_fbb, const SplitOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const SplitOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _num_splits = _o->num_splits; + return tflite::CreateSplitOptions( + _fbb, + _num_splits); +} + +inline SplitVOptionsT *SplitVOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new SplitVOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void SplitVOptions::UnPackTo(SplitVOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = num_splits(); _o->num_splits = _e; } +} + +inline flatbuffers::Offset SplitVOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const SplitVOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateSplitVOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateSplitVOptions(flatbuffers::FlatBufferBuilder &_fbb, const SplitVOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const SplitVOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _num_splits = _o->num_splits; + return tflite::CreateSplitVOptions( + _fbb, + _num_splits); +} + +inline StridedSliceOptionsT *StridedSliceOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new StridedSliceOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void StridedSliceOptions::UnPackTo(StridedSliceOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = begin_mask(); _o->begin_mask = _e; } + { auto _e = end_mask(); _o->end_mask = _e; } + { auto _e = ellipsis_mask(); _o->ellipsis_mask = _e; } + { auto _e = new_axis_mask(); _o->new_axis_mask = _e; } + { auto _e = shrink_axis_mask(); _o->shrink_axis_mask = _e; } + { auto _e = offset(); _o->offset = _e; } +} + +inline flatbuffers::Offset StridedSliceOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const StridedSliceOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateStridedSliceOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateStridedSliceOptions(flatbuffers::FlatBufferBuilder &_fbb, const StridedSliceOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const StridedSliceOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _begin_mask = _o->begin_mask; + auto _end_mask = _o->end_mask; + auto _ellipsis_mask = _o->ellipsis_mask; + auto _new_axis_mask = _o->new_axis_mask; + auto _shrink_axis_mask = _o->shrink_axis_mask; + auto _offset = _o->offset; + return tflite::CreateStridedSliceOptions( + _fbb, + _begin_mask, + _end_mask, + _ellipsis_mask, + _new_axis_mask, + _shrink_axis_mask, + _offset); +} + +inline LogSoftmaxOptionsT *LogSoftmaxOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new LogSoftmaxOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void LogSoftmaxOptions::UnPackTo(LogSoftmaxOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset LogSoftmaxOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const LogSoftmaxOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateLogSoftmaxOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateLogSoftmaxOptions(flatbuffers::FlatBufferBuilder &_fbb, const LogSoftmaxOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const LogSoftmaxOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateLogSoftmaxOptions( + _fbb); +} + +inline CastOptionsT *CastOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new CastOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void CastOptions::UnPackTo(CastOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = in_data_type(); _o->in_data_type = _e; } + { auto _e = out_data_type(); _o->out_data_type = _e; } +} + +inline flatbuffers::Offset CastOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const CastOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateCastOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateCastOptions(flatbuffers::FlatBufferBuilder &_fbb, const CastOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const CastOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _in_data_type = _o->in_data_type; + auto _out_data_type = _o->out_data_type; + return tflite::CreateCastOptions( + _fbb, + _in_data_type, + _out_data_type); +} + +inline DequantizeOptionsT *DequantizeOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new DequantizeOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void DequantizeOptions::UnPackTo(DequantizeOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset DequantizeOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const DequantizeOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateDequantizeOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateDequantizeOptions(flatbuffers::FlatBufferBuilder &_fbb, const DequantizeOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const DequantizeOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateDequantizeOptions( + _fbb); +} + +inline MaximumMinimumOptionsT *MaximumMinimumOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new MaximumMinimumOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void MaximumMinimumOptions::UnPackTo(MaximumMinimumOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset MaximumMinimumOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const MaximumMinimumOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateMaximumMinimumOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateMaximumMinimumOptions(flatbuffers::FlatBufferBuilder &_fbb, const MaximumMinimumOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const MaximumMinimumOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateMaximumMinimumOptions( + _fbb); +} + +inline TileOptionsT *TileOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new TileOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void TileOptions::UnPackTo(TileOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset TileOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const TileOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateTileOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateTileOptions(flatbuffers::FlatBufferBuilder &_fbb, const TileOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const TileOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateTileOptions( + _fbb); +} + +inline ArgMaxOptionsT *ArgMaxOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new ArgMaxOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void ArgMaxOptions::UnPackTo(ArgMaxOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = output_type(); _o->output_type = _e; } +} + +inline flatbuffers::Offset ArgMaxOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const ArgMaxOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateArgMaxOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateArgMaxOptions(flatbuffers::FlatBufferBuilder &_fbb, const ArgMaxOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const ArgMaxOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _output_type = _o->output_type; + return tflite::CreateArgMaxOptions( + _fbb, + _output_type); +} + +inline ArgMinOptionsT *ArgMinOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new ArgMinOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void ArgMinOptions::UnPackTo(ArgMinOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = output_type(); _o->output_type = _e; } +} + +inline flatbuffers::Offset ArgMinOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const ArgMinOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateArgMinOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateArgMinOptions(flatbuffers::FlatBufferBuilder &_fbb, const ArgMinOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const ArgMinOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _output_type = _o->output_type; + return tflite::CreateArgMinOptions( + _fbb, + _output_type); +} + +inline GreaterOptionsT *GreaterOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new GreaterOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void GreaterOptions::UnPackTo(GreaterOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset GreaterOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const GreaterOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateGreaterOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateGreaterOptions(flatbuffers::FlatBufferBuilder &_fbb, const GreaterOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const GreaterOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateGreaterOptions( + _fbb); +} + +inline GreaterEqualOptionsT *GreaterEqualOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new GreaterEqualOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void GreaterEqualOptions::UnPackTo(GreaterEqualOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset GreaterEqualOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const GreaterEqualOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateGreaterEqualOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateGreaterEqualOptions(flatbuffers::FlatBufferBuilder &_fbb, const GreaterEqualOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const GreaterEqualOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateGreaterEqualOptions( + _fbb); +} + +inline LessOptionsT *LessOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new LessOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void LessOptions::UnPackTo(LessOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset LessOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const LessOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateLessOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateLessOptions(flatbuffers::FlatBufferBuilder &_fbb, const LessOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const LessOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateLessOptions( + _fbb); +} + +inline LessEqualOptionsT *LessEqualOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new LessEqualOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void LessEqualOptions::UnPackTo(LessEqualOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset LessEqualOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const LessEqualOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateLessEqualOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateLessEqualOptions(flatbuffers::FlatBufferBuilder &_fbb, const LessEqualOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const LessEqualOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateLessEqualOptions( + _fbb); +} + +inline NegOptionsT *NegOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new NegOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void NegOptions::UnPackTo(NegOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset NegOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const NegOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateNegOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateNegOptions(flatbuffers::FlatBufferBuilder &_fbb, const NegOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const NegOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateNegOptions( + _fbb); +} + +inline SelectOptionsT *SelectOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new SelectOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void SelectOptions::UnPackTo(SelectOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset SelectOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const SelectOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateSelectOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateSelectOptions(flatbuffers::FlatBufferBuilder &_fbb, const SelectOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const SelectOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateSelectOptions( + _fbb); +} + +inline SliceOptionsT *SliceOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new SliceOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void SliceOptions::UnPackTo(SliceOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset SliceOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const SliceOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateSliceOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateSliceOptions(flatbuffers::FlatBufferBuilder &_fbb, const SliceOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const SliceOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateSliceOptions( + _fbb); +} + +inline TransposeConvOptionsT *TransposeConvOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new TransposeConvOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void TransposeConvOptions::UnPackTo(TransposeConvOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = padding(); _o->padding = _e; } + { auto _e = stride_w(); _o->stride_w = _e; } + { auto _e = stride_h(); _o->stride_h = _e; } + { auto _e = fused_activation_function(); _o->fused_activation_function = _e; } +} + +inline flatbuffers::Offset TransposeConvOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const TransposeConvOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateTransposeConvOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateTransposeConvOptions(flatbuffers::FlatBufferBuilder &_fbb, const TransposeConvOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const TransposeConvOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _padding = _o->padding; + auto _stride_w = _o->stride_w; + auto _stride_h = _o->stride_h; + auto _fused_activation_function = _o->fused_activation_function; + return tflite::CreateTransposeConvOptions( + _fbb, + _padding, + _stride_w, + _stride_h, + _fused_activation_function); +} + +inline ExpandDimsOptionsT *ExpandDimsOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new ExpandDimsOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void ExpandDimsOptions::UnPackTo(ExpandDimsOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset ExpandDimsOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const ExpandDimsOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateExpandDimsOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateExpandDimsOptions(flatbuffers::FlatBufferBuilder &_fbb, const ExpandDimsOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const ExpandDimsOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateExpandDimsOptions( + _fbb); +} + +inline SparseToDenseOptionsT *SparseToDenseOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new SparseToDenseOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void SparseToDenseOptions::UnPackTo(SparseToDenseOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = validate_indices(); _o->validate_indices = _e; } +} + +inline flatbuffers::Offset SparseToDenseOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const SparseToDenseOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateSparseToDenseOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateSparseToDenseOptions(flatbuffers::FlatBufferBuilder &_fbb, const SparseToDenseOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const SparseToDenseOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _validate_indices = _o->validate_indices; + return tflite::CreateSparseToDenseOptions( + _fbb, + _validate_indices); +} + +inline EqualOptionsT *EqualOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new EqualOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void EqualOptions::UnPackTo(EqualOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset EqualOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const EqualOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateEqualOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateEqualOptions(flatbuffers::FlatBufferBuilder &_fbb, const EqualOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const EqualOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateEqualOptions( + _fbb); +} + +inline NotEqualOptionsT *NotEqualOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new NotEqualOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void NotEqualOptions::UnPackTo(NotEqualOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset NotEqualOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const NotEqualOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateNotEqualOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateNotEqualOptions(flatbuffers::FlatBufferBuilder &_fbb, const NotEqualOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const NotEqualOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateNotEqualOptions( + _fbb); +} + +inline ShapeOptionsT *ShapeOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new ShapeOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void ShapeOptions::UnPackTo(ShapeOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = out_type(); _o->out_type = _e; } +} + +inline flatbuffers::Offset ShapeOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const ShapeOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateShapeOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateShapeOptions(flatbuffers::FlatBufferBuilder &_fbb, const ShapeOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const ShapeOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _out_type = _o->out_type; + return tflite::CreateShapeOptions( + _fbb, + _out_type); +} + +inline RankOptionsT *RankOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new RankOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void RankOptions::UnPackTo(RankOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset RankOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const RankOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateRankOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateRankOptions(flatbuffers::FlatBufferBuilder &_fbb, const RankOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const RankOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateRankOptions( + _fbb); +} + +inline PowOptionsT *PowOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new PowOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void PowOptions::UnPackTo(PowOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset PowOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const PowOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreatePowOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreatePowOptions(flatbuffers::FlatBufferBuilder &_fbb, const PowOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const PowOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreatePowOptions( + _fbb); +} + +inline FakeQuantOptionsT *FakeQuantOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new FakeQuantOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void FakeQuantOptions::UnPackTo(FakeQuantOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = min(); _o->min = _e; } + { auto _e = max(); _o->max = _e; } + { auto _e = num_bits(); _o->num_bits = _e; } + { auto _e = narrow_range(); _o->narrow_range = _e; } +} + +inline flatbuffers::Offset FakeQuantOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const FakeQuantOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateFakeQuantOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateFakeQuantOptions(flatbuffers::FlatBufferBuilder &_fbb, const FakeQuantOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const FakeQuantOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _min = _o->min; + auto _max = _o->max; + auto _num_bits = _o->num_bits; + auto _narrow_range = _o->narrow_range; + return tflite::CreateFakeQuantOptions( + _fbb, + _min, + _max, + _num_bits, + _narrow_range); +} + +inline PackOptionsT *PackOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new PackOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void PackOptions::UnPackTo(PackOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = values_count(); _o->values_count = _e; } + { auto _e = axis(); _o->axis = _e; } +} + +inline flatbuffers::Offset PackOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const PackOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreatePackOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreatePackOptions(flatbuffers::FlatBufferBuilder &_fbb, const PackOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const PackOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _values_count = _o->values_count; + auto _axis = _o->axis; + return tflite::CreatePackOptions( + _fbb, + _values_count, + _axis); +} + +inline LogicalOrOptionsT *LogicalOrOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new LogicalOrOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void LogicalOrOptions::UnPackTo(LogicalOrOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset LogicalOrOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const LogicalOrOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateLogicalOrOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateLogicalOrOptions(flatbuffers::FlatBufferBuilder &_fbb, const LogicalOrOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const LogicalOrOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateLogicalOrOptions( + _fbb); +} + +inline OneHotOptionsT *OneHotOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new OneHotOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void OneHotOptions::UnPackTo(OneHotOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = axis(); _o->axis = _e; } +} + +inline flatbuffers::Offset OneHotOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const OneHotOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateOneHotOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateOneHotOptions(flatbuffers::FlatBufferBuilder &_fbb, const OneHotOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const OneHotOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _axis = _o->axis; + return tflite::CreateOneHotOptions( + _fbb, + _axis); +} + +inline AbsOptionsT *AbsOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new AbsOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void AbsOptions::UnPackTo(AbsOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset AbsOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const AbsOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateAbsOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateAbsOptions(flatbuffers::FlatBufferBuilder &_fbb, const AbsOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const AbsOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateAbsOptions( + _fbb); +} + +inline HardSwishOptionsT *HardSwishOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new HardSwishOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void HardSwishOptions::UnPackTo(HardSwishOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset HardSwishOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const HardSwishOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateHardSwishOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateHardSwishOptions(flatbuffers::FlatBufferBuilder &_fbb, const HardSwishOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const HardSwishOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateHardSwishOptions( + _fbb); +} + +inline LogicalAndOptionsT *LogicalAndOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new LogicalAndOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void LogicalAndOptions::UnPackTo(LogicalAndOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset LogicalAndOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const LogicalAndOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateLogicalAndOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateLogicalAndOptions(flatbuffers::FlatBufferBuilder &_fbb, const LogicalAndOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const LogicalAndOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateLogicalAndOptions( + _fbb); +} + +inline LogicalNotOptionsT *LogicalNotOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new LogicalNotOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void LogicalNotOptions::UnPackTo(LogicalNotOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset LogicalNotOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const LogicalNotOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateLogicalNotOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateLogicalNotOptions(flatbuffers::FlatBufferBuilder &_fbb, const LogicalNotOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const LogicalNotOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateLogicalNotOptions( + _fbb); +} + +inline UnpackOptionsT *UnpackOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new UnpackOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void UnpackOptions::UnPackTo(UnpackOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = num(); _o->num = _e; } + { auto _e = axis(); _o->axis = _e; } +} + +inline flatbuffers::Offset UnpackOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const UnpackOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateUnpackOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateUnpackOptions(flatbuffers::FlatBufferBuilder &_fbb, const UnpackOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const UnpackOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _num = _o->num; + auto _axis = _o->axis; + return tflite::CreateUnpackOptions( + _fbb, + _num, + _axis); +} + +inline FloorDivOptionsT *FloorDivOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new FloorDivOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void FloorDivOptions::UnPackTo(FloorDivOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset FloorDivOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const FloorDivOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateFloorDivOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateFloorDivOptions(flatbuffers::FlatBufferBuilder &_fbb, const FloorDivOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const FloorDivOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateFloorDivOptions( + _fbb); +} + +inline SquareOptionsT *SquareOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new SquareOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void SquareOptions::UnPackTo(SquareOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset SquareOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const SquareOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateSquareOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateSquareOptions(flatbuffers::FlatBufferBuilder &_fbb, const SquareOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const SquareOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateSquareOptions( + _fbb); +} + +inline ZerosLikeOptionsT *ZerosLikeOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new ZerosLikeOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void ZerosLikeOptions::UnPackTo(ZerosLikeOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset ZerosLikeOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const ZerosLikeOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateZerosLikeOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateZerosLikeOptions(flatbuffers::FlatBufferBuilder &_fbb, const ZerosLikeOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const ZerosLikeOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateZerosLikeOptions( + _fbb); +} + +inline FillOptionsT *FillOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new FillOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void FillOptions::UnPackTo(FillOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset FillOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const FillOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateFillOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateFillOptions(flatbuffers::FlatBufferBuilder &_fbb, const FillOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const FillOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateFillOptions( + _fbb); +} + +inline FloorModOptionsT *FloorModOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new FloorModOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void FloorModOptions::UnPackTo(FloorModOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset FloorModOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const FloorModOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateFloorModOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateFloorModOptions(flatbuffers::FlatBufferBuilder &_fbb, const FloorModOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const FloorModOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateFloorModOptions( + _fbb); +} + +inline RangeOptionsT *RangeOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new RangeOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void RangeOptions::UnPackTo(RangeOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset RangeOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const RangeOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateRangeOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateRangeOptions(flatbuffers::FlatBufferBuilder &_fbb, const RangeOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const RangeOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateRangeOptions( + _fbb); +} + +inline LeakyReluOptionsT *LeakyReluOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new LeakyReluOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void LeakyReluOptions::UnPackTo(LeakyReluOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = alpha(); _o->alpha = _e; } +} + +inline flatbuffers::Offset LeakyReluOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const LeakyReluOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateLeakyReluOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateLeakyReluOptions(flatbuffers::FlatBufferBuilder &_fbb, const LeakyReluOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const LeakyReluOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _alpha = _o->alpha; + return tflite::CreateLeakyReluOptions( + _fbb, + _alpha); +} + +inline SquaredDifferenceOptionsT *SquaredDifferenceOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new SquaredDifferenceOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void SquaredDifferenceOptions::UnPackTo(SquaredDifferenceOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset SquaredDifferenceOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const SquaredDifferenceOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateSquaredDifferenceOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateSquaredDifferenceOptions(flatbuffers::FlatBufferBuilder &_fbb, const SquaredDifferenceOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const SquaredDifferenceOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateSquaredDifferenceOptions( + _fbb); +} + +inline MirrorPadOptionsT *MirrorPadOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new MirrorPadOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void MirrorPadOptions::UnPackTo(MirrorPadOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = mode(); _o->mode = _e; } +} + +inline flatbuffers::Offset MirrorPadOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const MirrorPadOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateMirrorPadOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateMirrorPadOptions(flatbuffers::FlatBufferBuilder &_fbb, const MirrorPadOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const MirrorPadOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _mode = _o->mode; + return tflite::CreateMirrorPadOptions( + _fbb, + _mode); +} + +inline UniqueOptionsT *UniqueOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new UniqueOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void UniqueOptions::UnPackTo(UniqueOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = idx_out_type(); _o->idx_out_type = _e; } +} + +inline flatbuffers::Offset UniqueOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const UniqueOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateUniqueOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateUniqueOptions(flatbuffers::FlatBufferBuilder &_fbb, const UniqueOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const UniqueOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _idx_out_type = _o->idx_out_type; + return tflite::CreateUniqueOptions( + _fbb, + _idx_out_type); +} + +inline ReverseV2OptionsT *ReverseV2Options::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new ReverseV2OptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void ReverseV2Options::UnPackTo(ReverseV2OptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset ReverseV2Options::Pack(flatbuffers::FlatBufferBuilder &_fbb, const ReverseV2OptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateReverseV2Options(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateReverseV2Options(flatbuffers::FlatBufferBuilder &_fbb, const ReverseV2OptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const ReverseV2OptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateReverseV2Options( + _fbb); +} + +inline AddNOptionsT *AddNOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new AddNOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void AddNOptions::UnPackTo(AddNOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset AddNOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const AddNOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateAddNOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateAddNOptions(flatbuffers::FlatBufferBuilder &_fbb, const AddNOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const AddNOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateAddNOptions( + _fbb); +} + +inline GatherNdOptionsT *GatherNdOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new GatherNdOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void GatherNdOptions::UnPackTo(GatherNdOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset GatherNdOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const GatherNdOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateGatherNdOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateGatherNdOptions(flatbuffers::FlatBufferBuilder &_fbb, const GatherNdOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const GatherNdOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateGatherNdOptions( + _fbb); +} + +inline WhereOptionsT *WhereOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new WhereOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void WhereOptions::UnPackTo(WhereOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset WhereOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const WhereOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateWhereOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateWhereOptions(flatbuffers::FlatBufferBuilder &_fbb, const WhereOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const WhereOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateWhereOptions( + _fbb); +} + +inline ReverseSequenceOptionsT *ReverseSequenceOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new ReverseSequenceOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void ReverseSequenceOptions::UnPackTo(ReverseSequenceOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = seq_dim(); _o->seq_dim = _e; } + { auto _e = batch_dim(); _o->batch_dim = _e; } +} + +inline flatbuffers::Offset ReverseSequenceOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const ReverseSequenceOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateReverseSequenceOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateReverseSequenceOptions(flatbuffers::FlatBufferBuilder &_fbb, const ReverseSequenceOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const ReverseSequenceOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _seq_dim = _o->seq_dim; + auto _batch_dim = _o->batch_dim; + return tflite::CreateReverseSequenceOptions( + _fbb, + _seq_dim, + _batch_dim); +} + +inline MatrixDiagOptionsT *MatrixDiagOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new MatrixDiagOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void MatrixDiagOptions::UnPackTo(MatrixDiagOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset MatrixDiagOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const MatrixDiagOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateMatrixDiagOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateMatrixDiagOptions(flatbuffers::FlatBufferBuilder &_fbb, const MatrixDiagOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const MatrixDiagOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateMatrixDiagOptions( + _fbb); +} + +inline QuantizeOptionsT *QuantizeOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new QuantizeOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void QuantizeOptions::UnPackTo(QuantizeOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset QuantizeOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const QuantizeOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateQuantizeOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateQuantizeOptions(flatbuffers::FlatBufferBuilder &_fbb, const QuantizeOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const QuantizeOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateQuantizeOptions( + _fbb); +} + +inline MatrixSetDiagOptionsT *MatrixSetDiagOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new MatrixSetDiagOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void MatrixSetDiagOptions::UnPackTo(MatrixSetDiagOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset MatrixSetDiagOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const MatrixSetDiagOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateMatrixSetDiagOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateMatrixSetDiagOptions(flatbuffers::FlatBufferBuilder &_fbb, const MatrixSetDiagOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const MatrixSetDiagOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateMatrixSetDiagOptions( + _fbb); +} + +inline IfOptionsT *IfOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new IfOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void IfOptions::UnPackTo(IfOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = then_subgraph_index(); _o->then_subgraph_index = _e; } + { auto _e = else_subgraph_index(); _o->else_subgraph_index = _e; } +} + +inline flatbuffers::Offset IfOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const IfOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateIfOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateIfOptions(flatbuffers::FlatBufferBuilder &_fbb, const IfOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const IfOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _then_subgraph_index = _o->then_subgraph_index; + auto _else_subgraph_index = _o->else_subgraph_index; + return tflite::CreateIfOptions( + _fbb, + _then_subgraph_index, + _else_subgraph_index); +} + +inline CallOnceOptionsT *CallOnceOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new CallOnceOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void CallOnceOptions::UnPackTo(CallOnceOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = init_subgraph_index(); _o->init_subgraph_index = _e; } +} + +inline flatbuffers::Offset CallOnceOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const CallOnceOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateCallOnceOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateCallOnceOptions(flatbuffers::FlatBufferBuilder &_fbb, const CallOnceOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const CallOnceOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _init_subgraph_index = _o->init_subgraph_index; + return tflite::CreateCallOnceOptions( + _fbb, + _init_subgraph_index); +} + +inline WhileOptionsT *WhileOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new WhileOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void WhileOptions::UnPackTo(WhileOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = cond_subgraph_index(); _o->cond_subgraph_index = _e; } + { auto _e = body_subgraph_index(); _o->body_subgraph_index = _e; } +} + +inline flatbuffers::Offset WhileOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const WhileOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateWhileOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateWhileOptions(flatbuffers::FlatBufferBuilder &_fbb, const WhileOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const WhileOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _cond_subgraph_index = _o->cond_subgraph_index; + auto _body_subgraph_index = _o->body_subgraph_index; + return tflite::CreateWhileOptions( + _fbb, + _cond_subgraph_index, + _body_subgraph_index); +} + +inline NonMaxSuppressionV4OptionsT *NonMaxSuppressionV4Options::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new NonMaxSuppressionV4OptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void NonMaxSuppressionV4Options::UnPackTo(NonMaxSuppressionV4OptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset NonMaxSuppressionV4Options::Pack(flatbuffers::FlatBufferBuilder &_fbb, const NonMaxSuppressionV4OptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateNonMaxSuppressionV4Options(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateNonMaxSuppressionV4Options(flatbuffers::FlatBufferBuilder &_fbb, const NonMaxSuppressionV4OptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const NonMaxSuppressionV4OptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateNonMaxSuppressionV4Options( + _fbb); +} + +inline NonMaxSuppressionV5OptionsT *NonMaxSuppressionV5Options::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new NonMaxSuppressionV5OptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void NonMaxSuppressionV5Options::UnPackTo(NonMaxSuppressionV5OptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset NonMaxSuppressionV5Options::Pack(flatbuffers::FlatBufferBuilder &_fbb, const NonMaxSuppressionV5OptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateNonMaxSuppressionV5Options(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateNonMaxSuppressionV5Options(flatbuffers::FlatBufferBuilder &_fbb, const NonMaxSuppressionV5OptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const NonMaxSuppressionV5OptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateNonMaxSuppressionV5Options( + _fbb); +} + +inline ScatterNdOptionsT *ScatterNdOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new ScatterNdOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void ScatterNdOptions::UnPackTo(ScatterNdOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset ScatterNdOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const ScatterNdOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateScatterNdOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateScatterNdOptions(flatbuffers::FlatBufferBuilder &_fbb, const ScatterNdOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const ScatterNdOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateScatterNdOptions( + _fbb); +} + +inline SelectV2OptionsT *SelectV2Options::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new SelectV2OptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void SelectV2Options::UnPackTo(SelectV2OptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset SelectV2Options::Pack(flatbuffers::FlatBufferBuilder &_fbb, const SelectV2OptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateSelectV2Options(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateSelectV2Options(flatbuffers::FlatBufferBuilder &_fbb, const SelectV2OptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const SelectV2OptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateSelectV2Options( + _fbb); +} + +inline DensifyOptionsT *DensifyOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new DensifyOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void DensifyOptions::UnPackTo(DensifyOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset DensifyOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const DensifyOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateDensifyOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateDensifyOptions(flatbuffers::FlatBufferBuilder &_fbb, const DensifyOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const DensifyOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateDensifyOptions( + _fbb); +} + +inline SegmentSumOptionsT *SegmentSumOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new SegmentSumOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void SegmentSumOptions::UnPackTo(SegmentSumOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset SegmentSumOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const SegmentSumOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateSegmentSumOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateSegmentSumOptions(flatbuffers::FlatBufferBuilder &_fbb, const SegmentSumOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const SegmentSumOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateSegmentSumOptions( + _fbb); +} + +inline BatchMatMulOptionsT *BatchMatMulOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new BatchMatMulOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void BatchMatMulOptions::UnPackTo(BatchMatMulOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = adj_x(); _o->adj_x = _e; } + { auto _e = adj_y(); _o->adj_y = _e; } + { auto _e = asymmetric_quantize_inputs(); _o->asymmetric_quantize_inputs = _e; } +} + +inline flatbuffers::Offset BatchMatMulOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const BatchMatMulOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateBatchMatMulOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateBatchMatMulOptions(flatbuffers::FlatBufferBuilder &_fbb, const BatchMatMulOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const BatchMatMulOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _adj_x = _o->adj_x; + auto _adj_y = _o->adj_y; + auto _asymmetric_quantize_inputs = _o->asymmetric_quantize_inputs; + return tflite::CreateBatchMatMulOptions( + _fbb, + _adj_x, + _adj_y, + _asymmetric_quantize_inputs); +} + +inline CumsumOptionsT *CumsumOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new CumsumOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void CumsumOptions::UnPackTo(CumsumOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = exclusive(); _o->exclusive = _e; } + { auto _e = reverse(); _o->reverse = _e; } +} + +inline flatbuffers::Offset CumsumOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const CumsumOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateCumsumOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateCumsumOptions(flatbuffers::FlatBufferBuilder &_fbb, const CumsumOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const CumsumOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _exclusive = _o->exclusive; + auto _reverse = _o->reverse; + return tflite::CreateCumsumOptions( + _fbb, + _exclusive, + _reverse); +} + +inline BroadcastToOptionsT *BroadcastToOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new BroadcastToOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void BroadcastToOptions::UnPackTo(BroadcastToOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset BroadcastToOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const BroadcastToOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateBroadcastToOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateBroadcastToOptions(flatbuffers::FlatBufferBuilder &_fbb, const BroadcastToOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const BroadcastToOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateBroadcastToOptions( + _fbb); +} + +inline Rfft2dOptionsT *Rfft2dOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new Rfft2dOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void Rfft2dOptions::UnPackTo(Rfft2dOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset Rfft2dOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const Rfft2dOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateRfft2dOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateRfft2dOptions(flatbuffers::FlatBufferBuilder &_fbb, const Rfft2dOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const Rfft2dOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateRfft2dOptions( + _fbb); +} + +inline HashtableOptionsT *HashtableOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new HashtableOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void HashtableOptions::UnPackTo(HashtableOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = table_id(); _o->table_id = _e; } + { auto _e = key_dtype(); _o->key_dtype = _e; } + { auto _e = value_dtype(); _o->value_dtype = _e; } +} + +inline flatbuffers::Offset HashtableOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const HashtableOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateHashtableOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateHashtableOptions(flatbuffers::FlatBufferBuilder &_fbb, const HashtableOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const HashtableOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _table_id = _o->table_id; + auto _key_dtype = _o->key_dtype; + auto _value_dtype = _o->value_dtype; + return tflite::CreateHashtableOptions( + _fbb, + _table_id, + _key_dtype, + _value_dtype); +} + +inline HashtableFindOptionsT *HashtableFindOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new HashtableFindOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void HashtableFindOptions::UnPackTo(HashtableFindOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset HashtableFindOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const HashtableFindOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateHashtableFindOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateHashtableFindOptions(flatbuffers::FlatBufferBuilder &_fbb, const HashtableFindOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const HashtableFindOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateHashtableFindOptions( + _fbb); +} + +inline HashtableImportOptionsT *HashtableImportOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new HashtableImportOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void HashtableImportOptions::UnPackTo(HashtableImportOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset HashtableImportOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const HashtableImportOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateHashtableImportOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateHashtableImportOptions(flatbuffers::FlatBufferBuilder &_fbb, const HashtableImportOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const HashtableImportOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateHashtableImportOptions( + _fbb); +} + +inline HashtableSizeOptionsT *HashtableSizeOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new HashtableSizeOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void HashtableSizeOptions::UnPackTo(HashtableSizeOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset HashtableSizeOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const HashtableSizeOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateHashtableSizeOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateHashtableSizeOptions(flatbuffers::FlatBufferBuilder &_fbb, const HashtableSizeOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const HashtableSizeOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateHashtableSizeOptions( + _fbb); +} + +inline VarHandleOptionsT *VarHandleOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new VarHandleOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void VarHandleOptions::UnPackTo(VarHandleOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = container(); if (_e) _o->container = _e->str(); } + { auto _e = shared_name(); if (_e) _o->shared_name = _e->str(); } +} + +inline flatbuffers::Offset VarHandleOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const VarHandleOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateVarHandleOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateVarHandleOptions(flatbuffers::FlatBufferBuilder &_fbb, const VarHandleOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const VarHandleOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _container = _o->container.empty() ? 0 : _fbb.CreateString(_o->container); + auto _shared_name = _o->shared_name.empty() ? 0 : _fbb.CreateString(_o->shared_name); + return tflite::CreateVarHandleOptions( + _fbb, + _container, + _shared_name); +} + +inline ReadVariableOptionsT *ReadVariableOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new ReadVariableOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void ReadVariableOptions::UnPackTo(ReadVariableOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset ReadVariableOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const ReadVariableOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateReadVariableOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateReadVariableOptions(flatbuffers::FlatBufferBuilder &_fbb, const ReadVariableOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const ReadVariableOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateReadVariableOptions( + _fbb); +} + +inline AssignVariableOptionsT *AssignVariableOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new AssignVariableOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void AssignVariableOptions::UnPackTo(AssignVariableOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset AssignVariableOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const AssignVariableOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateAssignVariableOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateAssignVariableOptions(flatbuffers::FlatBufferBuilder &_fbb, const AssignVariableOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const AssignVariableOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateAssignVariableOptions( + _fbb); +} + +inline RandomOptionsT *RandomOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new RandomOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void RandomOptions::UnPackTo(RandomOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = seed(); _o->seed = _e; } + { auto _e = seed2(); _o->seed2 = _e; } +} + +inline flatbuffers::Offset RandomOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const RandomOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateRandomOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateRandomOptions(flatbuffers::FlatBufferBuilder &_fbb, const RandomOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const RandomOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _seed = _o->seed; + auto _seed2 = _o->seed2; + return tflite::CreateRandomOptions( + _fbb, + _seed, + _seed2); +} + +inline BucketizeOptionsT *BucketizeOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new BucketizeOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void BucketizeOptions::UnPackTo(BucketizeOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = boundaries(); if (_e) { _o->boundaries.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->boundaries[_i] = _e->Get(_i); } } } +} + +inline flatbuffers::Offset BucketizeOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const BucketizeOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateBucketizeOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateBucketizeOptions(flatbuffers::FlatBufferBuilder &_fbb, const BucketizeOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const BucketizeOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _boundaries = _o->boundaries.size() ? _fbb.CreateVector(_o->boundaries) : 0; + return tflite::CreateBucketizeOptions( + _fbb, + _boundaries); +} + +inline GeluOptionsT *GeluOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new GeluOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void GeluOptions::UnPackTo(GeluOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = approximate(); _o->approximate = _e; } +} + +inline flatbuffers::Offset GeluOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const GeluOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateGeluOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateGeluOptions(flatbuffers::FlatBufferBuilder &_fbb, const GeluOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const GeluOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _approximate = _o->approximate; + return tflite::CreateGeluOptions( + _fbb, + _approximate); +} + +inline DynamicUpdateSliceOptionsT *DynamicUpdateSliceOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new DynamicUpdateSliceOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void DynamicUpdateSliceOptions::UnPackTo(DynamicUpdateSliceOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset DynamicUpdateSliceOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const DynamicUpdateSliceOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateDynamicUpdateSliceOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateDynamicUpdateSliceOptions(flatbuffers::FlatBufferBuilder &_fbb, const DynamicUpdateSliceOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const DynamicUpdateSliceOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateDynamicUpdateSliceOptions( + _fbb); +} + +inline UnsortedSegmentProdOptionsT *UnsortedSegmentProdOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new UnsortedSegmentProdOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void UnsortedSegmentProdOptions::UnPackTo(UnsortedSegmentProdOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset UnsortedSegmentProdOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const UnsortedSegmentProdOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateUnsortedSegmentProdOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateUnsortedSegmentProdOptions(flatbuffers::FlatBufferBuilder &_fbb, const UnsortedSegmentProdOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const UnsortedSegmentProdOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateUnsortedSegmentProdOptions( + _fbb); +} + +inline UnsortedSegmentMaxOptionsT *UnsortedSegmentMaxOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new UnsortedSegmentMaxOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void UnsortedSegmentMaxOptions::UnPackTo(UnsortedSegmentMaxOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset UnsortedSegmentMaxOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const UnsortedSegmentMaxOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateUnsortedSegmentMaxOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateUnsortedSegmentMaxOptions(flatbuffers::FlatBufferBuilder &_fbb, const UnsortedSegmentMaxOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const UnsortedSegmentMaxOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateUnsortedSegmentMaxOptions( + _fbb); +} + +inline UnsortedSegmentSumOptionsT *UnsortedSegmentSumOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new UnsortedSegmentSumOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void UnsortedSegmentSumOptions::UnPackTo(UnsortedSegmentSumOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset UnsortedSegmentSumOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const UnsortedSegmentSumOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateUnsortedSegmentSumOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateUnsortedSegmentSumOptions(flatbuffers::FlatBufferBuilder &_fbb, const UnsortedSegmentSumOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const UnsortedSegmentSumOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateUnsortedSegmentSumOptions( + _fbb); +} + +inline ATan2OptionsT *ATan2Options::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new ATan2OptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void ATan2Options::UnPackTo(ATan2OptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset ATan2Options::Pack(flatbuffers::FlatBufferBuilder &_fbb, const ATan2OptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateATan2Options(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateATan2Options(flatbuffers::FlatBufferBuilder &_fbb, const ATan2OptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const ATan2OptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateATan2Options( + _fbb); +} + +inline UnsortedSegmentMinOptionsT *UnsortedSegmentMinOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new UnsortedSegmentMinOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void UnsortedSegmentMinOptions::UnPackTo(UnsortedSegmentMinOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset UnsortedSegmentMinOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const UnsortedSegmentMinOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateUnsortedSegmentMinOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateUnsortedSegmentMinOptions(flatbuffers::FlatBufferBuilder &_fbb, const UnsortedSegmentMinOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const UnsortedSegmentMinOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateUnsortedSegmentMinOptions( + _fbb); +} + +inline SignOptionsT *SignOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new SignOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void SignOptions::UnPackTo(SignOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset SignOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const SignOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateSignOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateSignOptions(flatbuffers::FlatBufferBuilder &_fbb, const SignOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const SignOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateSignOptions( + _fbb); +} + +inline BitcastOptionsT *BitcastOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new BitcastOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void BitcastOptions::UnPackTo(BitcastOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset BitcastOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const BitcastOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateBitcastOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateBitcastOptions(flatbuffers::FlatBufferBuilder &_fbb, const BitcastOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const BitcastOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateBitcastOptions( + _fbb); +} + +inline BitwiseXorOptionsT *BitwiseXorOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new BitwiseXorOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void BitwiseXorOptions::UnPackTo(BitwiseXorOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset BitwiseXorOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const BitwiseXorOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateBitwiseXorOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateBitwiseXorOptions(flatbuffers::FlatBufferBuilder &_fbb, const BitwiseXorOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const BitwiseXorOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateBitwiseXorOptions( + _fbb); +} + +inline RightShiftOptionsT *RightShiftOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new RightShiftOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void RightShiftOptions::UnPackTo(RightShiftOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline flatbuffers::Offset RightShiftOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const RightShiftOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateRightShiftOptions(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateRightShiftOptions(flatbuffers::FlatBufferBuilder &_fbb, const RightShiftOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const RightShiftOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateRightShiftOptions( + _fbb); +} + +inline OperatorCodeT *OperatorCode::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new OperatorCodeT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void OperatorCode::UnPackTo(OperatorCodeT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = deprecated_builtin_code(); _o->deprecated_builtin_code = _e; } + { auto _e = custom_code(); if (_e) _o->custom_code = _e->str(); } + { auto _e = version(); _o->version = _e; } + { auto _e = builtin_code(); _o->builtin_code = _e; } +} + +inline flatbuffers::Offset OperatorCode::Pack(flatbuffers::FlatBufferBuilder &_fbb, const OperatorCodeT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateOperatorCode(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateOperatorCode(flatbuffers::FlatBufferBuilder &_fbb, const OperatorCodeT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const OperatorCodeT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _deprecated_builtin_code = _o->deprecated_builtin_code; + auto _custom_code = _o->custom_code.empty() ? 0 : _fbb.CreateString(_o->custom_code); + auto _version = _o->version; + auto _builtin_code = _o->builtin_code; + return tflite::CreateOperatorCode( + _fbb, + _deprecated_builtin_code, + _custom_code, + _version, + _builtin_code); +} + +inline OperatorT *Operator::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new OperatorT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void Operator::UnPackTo(OperatorT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = opcode_index(); _o->opcode_index = _e; } + { auto _e = inputs(); if (_e) { _o->inputs.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->inputs[_i] = _e->Get(_i); } } } + { auto _e = outputs(); if (_e) { _o->outputs.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->outputs[_i] = _e->Get(_i); } } } + { auto _e = builtin_options_type(); _o->builtin_options.type = _e; } + { auto _e = builtin_options(); if (_e) _o->builtin_options.value = tflite::BuiltinOptionsUnion::UnPack(_e, builtin_options_type(), _resolver); } + { auto _e = custom_options(); if (_e) { _o->custom_options.resize(_e->size()); std::copy(_e->begin(), _e->end(), _o->custom_options.begin()); } } + { auto _e = custom_options_format(); _o->custom_options_format = _e; } + { auto _e = mutating_variable_inputs(); if (_e) { _o->mutating_variable_inputs.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->mutating_variable_inputs[_i] = _e->Get(_i) != 0; } } } + { auto _e = intermediates(); if (_e) { _o->intermediates.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->intermediates[_i] = _e->Get(_i); } } } + { auto _e = custom_options_offset(); _o->custom_options_offset = _e; } + { auto _e = custom_options_size(); _o->custom_options_size = _e; } +} + +inline flatbuffers::Offset Operator::Pack(flatbuffers::FlatBufferBuilder &_fbb, const OperatorT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateOperator(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateOperator(flatbuffers::FlatBufferBuilder &_fbb, const OperatorT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const OperatorT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _opcode_index = _o->opcode_index; + auto _inputs = _o->inputs.size() ? _fbb.CreateVector(_o->inputs) : 0; + auto _outputs = _o->outputs.size() ? _fbb.CreateVector(_o->outputs) : 0; + auto _builtin_options_type = _o->builtin_options.type; + auto _builtin_options = _o->builtin_options.Pack(_fbb); + auto _custom_options = _o->custom_options.size() ? _fbb.CreateVector(_o->custom_options) : 0; + auto _custom_options_format = _o->custom_options_format; + auto _mutating_variable_inputs = _o->mutating_variable_inputs.size() ? _fbb.CreateVector(_o->mutating_variable_inputs) : 0; + auto _intermediates = _o->intermediates.size() ? _fbb.CreateVector(_o->intermediates) : 0; + auto _custom_options_offset = _o->custom_options_offset; + auto _custom_options_size = _o->custom_options_size; + return tflite::CreateOperator( + _fbb, + _opcode_index, + _inputs, + _outputs, + _builtin_options_type, + _builtin_options, + _custom_options, + _custom_options_format, + _mutating_variable_inputs, + _intermediates, + _custom_options_offset, + _custom_options_size); +} + +inline SubGraphT::SubGraphT(const SubGraphT &o) + : inputs(o.inputs), + outputs(o.outputs), + name(o.name) { + tensors.reserve(o.tensors.size()); + for (const auto &tensors_ : o.tensors) { tensors.emplace_back((tensors_) ? new tflite::TensorT(*tensors_) : nullptr); } + operators.reserve(o.operators.size()); + for (const auto &operators_ : o.operators) { operators.emplace_back((operators_) ? new tflite::OperatorT(*operators_) : nullptr); } +} + +inline SubGraphT &SubGraphT::operator=(SubGraphT o) FLATBUFFERS_NOEXCEPT { + std::swap(tensors, o.tensors); + std::swap(inputs, o.inputs); + std::swap(outputs, o.outputs); + std::swap(operators, o.operators); + std::swap(name, o.name); + return *this; +} + +inline SubGraphT *SubGraph::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new SubGraphT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void SubGraph::UnPackTo(SubGraphT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = tensors(); if (_e) { _o->tensors.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { if(_o->tensors[_i]) { _e->Get(_i)->UnPackTo(_o->tensors[_i].get(), _resolver); } else { _o->tensors[_i] = std::unique_ptr(_e->Get(_i)->UnPack(_resolver)); }; } } } + { auto _e = inputs(); if (_e) { _o->inputs.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->inputs[_i] = _e->Get(_i); } } } + { auto _e = outputs(); if (_e) { _o->outputs.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->outputs[_i] = _e->Get(_i); } } } + { auto _e = operators(); if (_e) { _o->operators.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { if(_o->operators[_i]) { _e->Get(_i)->UnPackTo(_o->operators[_i].get(), _resolver); } else { _o->operators[_i] = std::unique_ptr(_e->Get(_i)->UnPack(_resolver)); }; } } } + { auto _e = name(); if (_e) _o->name = _e->str(); } +} + +inline flatbuffers::Offset SubGraph::Pack(flatbuffers::FlatBufferBuilder &_fbb, const SubGraphT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateSubGraph(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateSubGraph(flatbuffers::FlatBufferBuilder &_fbb, const SubGraphT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const SubGraphT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _tensors = _o->tensors.size() ? _fbb.CreateVector> (_o->tensors.size(), [](size_t i, _VectorArgs *__va) { return CreateTensor(*__va->__fbb, __va->__o->tensors[i].get(), __va->__rehasher); }, &_va ) : 0; + auto _inputs = _o->inputs.size() ? _fbb.CreateVector(_o->inputs) : 0; + auto _outputs = _o->outputs.size() ? _fbb.CreateVector(_o->outputs) : 0; + auto _operators = _o->operators.size() ? _fbb.CreateVector> (_o->operators.size(), [](size_t i, _VectorArgs *__va) { return CreateOperator(*__va->__fbb, __va->__o->operators[i].get(), __va->__rehasher); }, &_va ) : 0; + auto _name = _o->name.empty() ? 0 : _fbb.CreateString(_o->name); + return tflite::CreateSubGraph( + _fbb, + _tensors, + _inputs, + _outputs, + _operators, + _name); +} + +inline BufferT *Buffer::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new BufferT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void Buffer::UnPackTo(BufferT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = data(); if (_e) { _o->data.resize(_e->size()); std::copy(_e->begin(), _e->end(), _o->data.begin()); } } + { auto _e = offset(); _o->offset = _e; } + { auto _e = size(); _o->size = _e; } +} + +inline flatbuffers::Offset Buffer::Pack(flatbuffers::FlatBufferBuilder &_fbb, const BufferT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateBuffer(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateBuffer(flatbuffers::FlatBufferBuilder &_fbb, const BufferT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const BufferT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + _fbb.ForceVectorAlignment(_o->data.size(), sizeof(uint8_t), 16); + auto _data = _o->data.size() ? _fbb.CreateVector(_o->data) : 0; + auto _offset = _o->offset; + auto _size = _o->size; + return tflite::CreateBuffer( + _fbb, + _data, + _offset, + _size); +} + +inline MetadataT *Metadata::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new MetadataT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void Metadata::UnPackTo(MetadataT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = name(); if (_e) _o->name = _e->str(); } + { auto _e = buffer(); _o->buffer = _e; } +} + +inline flatbuffers::Offset Metadata::Pack(flatbuffers::FlatBufferBuilder &_fbb, const MetadataT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateMetadata(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateMetadata(flatbuffers::FlatBufferBuilder &_fbb, const MetadataT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const MetadataT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _name = _o->name.empty() ? 0 : _fbb.CreateString(_o->name); + auto _buffer = _o->buffer; + return tflite::CreateMetadata( + _fbb, + _name, + _buffer); +} + +inline TensorMapT *TensorMap::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new TensorMapT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void TensorMap::UnPackTo(TensorMapT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = name(); if (_e) _o->name = _e->str(); } + { auto _e = tensor_index(); _o->tensor_index = _e; } +} + +inline flatbuffers::Offset TensorMap::Pack(flatbuffers::FlatBufferBuilder &_fbb, const TensorMapT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateTensorMap(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateTensorMap(flatbuffers::FlatBufferBuilder &_fbb, const TensorMapT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const TensorMapT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _name = _o->name.empty() ? 0 : _fbb.CreateString(_o->name); + auto _tensor_index = _o->tensor_index; + return tflite::CreateTensorMap( + _fbb, + _name, + _tensor_index); +} + +inline SignatureDefT::SignatureDefT(const SignatureDefT &o) + : signature_key(o.signature_key), + subgraph_index(o.subgraph_index) { + inputs.reserve(o.inputs.size()); + for (const auto &inputs_ : o.inputs) { inputs.emplace_back((inputs_) ? new tflite::TensorMapT(*inputs_) : nullptr); } + outputs.reserve(o.outputs.size()); + for (const auto &outputs_ : o.outputs) { outputs.emplace_back((outputs_) ? new tflite::TensorMapT(*outputs_) : nullptr); } +} + +inline SignatureDefT &SignatureDefT::operator=(SignatureDefT o) FLATBUFFERS_NOEXCEPT { + std::swap(inputs, o.inputs); + std::swap(outputs, o.outputs); + std::swap(signature_key, o.signature_key); + std::swap(subgraph_index, o.subgraph_index); + return *this; +} + +inline SignatureDefT *SignatureDef::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new SignatureDefT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void SignatureDef::UnPackTo(SignatureDefT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = inputs(); if (_e) { _o->inputs.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { if(_o->inputs[_i]) { _e->Get(_i)->UnPackTo(_o->inputs[_i].get(), _resolver); } else { _o->inputs[_i] = std::unique_ptr(_e->Get(_i)->UnPack(_resolver)); }; } } } + { auto _e = outputs(); if (_e) { _o->outputs.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { if(_o->outputs[_i]) { _e->Get(_i)->UnPackTo(_o->outputs[_i].get(), _resolver); } else { _o->outputs[_i] = std::unique_ptr(_e->Get(_i)->UnPack(_resolver)); }; } } } + { auto _e = signature_key(); if (_e) _o->signature_key = _e->str(); } + { auto _e = subgraph_index(); _o->subgraph_index = _e; } +} + +inline flatbuffers::Offset SignatureDef::Pack(flatbuffers::FlatBufferBuilder &_fbb, const SignatureDefT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateSignatureDef(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateSignatureDef(flatbuffers::FlatBufferBuilder &_fbb, const SignatureDefT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const SignatureDefT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _inputs = _o->inputs.size() ? _fbb.CreateVector> (_o->inputs.size(), [](size_t i, _VectorArgs *__va) { return CreateTensorMap(*__va->__fbb, __va->__o->inputs[i].get(), __va->__rehasher); }, &_va ) : 0; + auto _outputs = _o->outputs.size() ? _fbb.CreateVector> (_o->outputs.size(), [](size_t i, _VectorArgs *__va) { return CreateTensorMap(*__va->__fbb, __va->__o->outputs[i].get(), __va->__rehasher); }, &_va ) : 0; + auto _signature_key = _o->signature_key.empty() ? 0 : _fbb.CreateString(_o->signature_key); + auto _subgraph_index = _o->subgraph_index; + return tflite::CreateSignatureDef( + _fbb, + _inputs, + _outputs, + _signature_key, + _subgraph_index); +} + +inline ModelT::ModelT(const ModelT &o) + : version(o.version), + description(o.description), + metadata_buffer(o.metadata_buffer) { + operator_codes.reserve(o.operator_codes.size()); + for (const auto &operator_codes_ : o.operator_codes) { operator_codes.emplace_back((operator_codes_) ? new tflite::OperatorCodeT(*operator_codes_) : nullptr); } + subgraphs.reserve(o.subgraphs.size()); + for (const auto &subgraphs_ : o.subgraphs) { subgraphs.emplace_back((subgraphs_) ? new tflite::SubGraphT(*subgraphs_) : nullptr); } + buffers.reserve(o.buffers.size()); + for (const auto &buffers_ : o.buffers) { buffers.emplace_back((buffers_) ? new tflite::BufferT(*buffers_) : nullptr); } + metadata.reserve(o.metadata.size()); + for (const auto &metadata_ : o.metadata) { metadata.emplace_back((metadata_) ? new tflite::MetadataT(*metadata_) : nullptr); } + signature_defs.reserve(o.signature_defs.size()); + for (const auto &signature_defs_ : o.signature_defs) { signature_defs.emplace_back((signature_defs_) ? new tflite::SignatureDefT(*signature_defs_) : nullptr); } +} + +inline ModelT &ModelT::operator=(ModelT o) FLATBUFFERS_NOEXCEPT { + std::swap(version, o.version); + std::swap(operator_codes, o.operator_codes); + std::swap(subgraphs, o.subgraphs); + std::swap(description, o.description); + std::swap(buffers, o.buffers); + std::swap(metadata_buffer, o.metadata_buffer); + std::swap(metadata, o.metadata); + std::swap(signature_defs, o.signature_defs); + return *this; +} + +inline ModelT *Model::UnPack(const flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new ModelT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void Model::UnPackTo(ModelT *_o, const flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = version(); _o->version = _e; } + { auto _e = operator_codes(); if (_e) { _o->operator_codes.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { if(_o->operator_codes[_i]) { _e->Get(_i)->UnPackTo(_o->operator_codes[_i].get(), _resolver); } else { _o->operator_codes[_i] = std::unique_ptr(_e->Get(_i)->UnPack(_resolver)); }; } } } + { auto _e = subgraphs(); if (_e) { _o->subgraphs.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { if(_o->subgraphs[_i]) { _e->Get(_i)->UnPackTo(_o->subgraphs[_i].get(), _resolver); } else { _o->subgraphs[_i] = std::unique_ptr(_e->Get(_i)->UnPack(_resolver)); }; } } } + { auto _e = description(); if (_e) _o->description = _e->str(); } + { auto _e = buffers(); if (_e) { _o->buffers.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { if(_o->buffers[_i]) { _e->Get(_i)->UnPackTo(_o->buffers[_i].get(), _resolver); } else { _o->buffers[_i] = std::unique_ptr(_e->Get(_i)->UnPack(_resolver)); }; } } } + { auto _e = metadata_buffer(); if (_e) { _o->metadata_buffer.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->metadata_buffer[_i] = _e->Get(_i); } } } + { auto _e = metadata(); if (_e) { _o->metadata.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { if(_o->metadata[_i]) { _e->Get(_i)->UnPackTo(_o->metadata[_i].get(), _resolver); } else { _o->metadata[_i] = std::unique_ptr(_e->Get(_i)->UnPack(_resolver)); }; } } } + { auto _e = signature_defs(); if (_e) { _o->signature_defs.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { if(_o->signature_defs[_i]) { _e->Get(_i)->UnPackTo(_o->signature_defs[_i].get(), _resolver); } else { _o->signature_defs[_i] = std::unique_ptr(_e->Get(_i)->UnPack(_resolver)); }; } } } +} + +inline flatbuffers::Offset Model::Pack(flatbuffers::FlatBufferBuilder &_fbb, const ModelT* _o, const flatbuffers::rehasher_function_t *_rehasher) { + return CreateModel(_fbb, _o, _rehasher); +} + +inline flatbuffers::Offset CreateModel(flatbuffers::FlatBufferBuilder &_fbb, const ModelT *_o, const flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const ModelT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _version = _o->version; + auto _operator_codes = _o->operator_codes.size() ? _fbb.CreateVector> (_o->operator_codes.size(), [](size_t i, _VectorArgs *__va) { return CreateOperatorCode(*__va->__fbb, __va->__o->operator_codes[i].get(), __va->__rehasher); }, &_va ) : 0; + auto _subgraphs = _o->subgraphs.size() ? _fbb.CreateVector> (_o->subgraphs.size(), [](size_t i, _VectorArgs *__va) { return CreateSubGraph(*__va->__fbb, __va->__o->subgraphs[i].get(), __va->__rehasher); }, &_va ) : 0; + auto _description = _o->description.empty() ? 0 : _fbb.CreateString(_o->description); + auto _buffers = _o->buffers.size() ? _fbb.CreateVector> (_o->buffers.size(), [](size_t i, _VectorArgs *__va) { return CreateBuffer(*__va->__fbb, __va->__o->buffers[i].get(), __va->__rehasher); }, &_va ) : 0; + auto _metadata_buffer = _o->metadata_buffer.size() ? _fbb.CreateVector(_o->metadata_buffer) : 0; + auto _metadata = _o->metadata.size() ? _fbb.CreateVector> (_o->metadata.size(), [](size_t i, _VectorArgs *__va) { return CreateMetadata(*__va->__fbb, __va->__o->metadata[i].get(), __va->__rehasher); }, &_va ) : 0; + auto _signature_defs = _o->signature_defs.size() ? _fbb.CreateVector> (_o->signature_defs.size(), [](size_t i, _VectorArgs *__va) { return CreateSignatureDef(*__va->__fbb, __va->__o->signature_defs[i].get(), __va->__rehasher); }, &_va ) : 0; + return tflite::CreateModel( + _fbb, + _version, + _operator_codes, + _subgraphs, + _description, + _buffers, + _metadata_buffer, + _metadata, + _signature_defs); +} + +inline bool VerifyQuantizationDetails(flatbuffers::Verifier &verifier, const void *obj, QuantizationDetails type) { + switch (type) { + case QuantizationDetails_NONE: { + return true; + } + case QuantizationDetails_CustomQuantization: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + default: return true; + } +} + +inline bool VerifyQuantizationDetailsVector(flatbuffers::Verifier &verifier, const flatbuffers::Vector> *values, const flatbuffers::Vector *types) { + if (!values || !types) return !values && !types; + if (values->size() != types->size()) return false; + for (flatbuffers::uoffset_t i = 0; i < values->size(); ++i) { + if (!VerifyQuantizationDetails( + verifier, values->Get(i), types->GetEnum(i))) { + return false; + } + } + return true; +} + +inline void *QuantizationDetailsUnion::UnPack(const void *obj, QuantizationDetails type, const flatbuffers::resolver_function_t *resolver) { + (void)resolver; + switch (type) { + case QuantizationDetails_CustomQuantization: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + default: return nullptr; + } +} + +inline flatbuffers::Offset QuantizationDetailsUnion::Pack(flatbuffers::FlatBufferBuilder &_fbb, const flatbuffers::rehasher_function_t *_rehasher) const { + (void)_rehasher; + switch (type) { + case QuantizationDetails_CustomQuantization: { + auto ptr = reinterpret_cast(value); + return CreateCustomQuantization(_fbb, ptr, _rehasher).Union(); + } + default: return 0; + } +} + +inline QuantizationDetailsUnion::QuantizationDetailsUnion(const QuantizationDetailsUnion &u) : type(u.type), value(nullptr) { + switch (type) { + case QuantizationDetails_CustomQuantization: { + value = new tflite::CustomQuantizationT(*reinterpret_cast(u.value)); + break; + } + default: + break; + } +} + +inline void QuantizationDetailsUnion::Reset() { + switch (type) { + case QuantizationDetails_CustomQuantization: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + default: break; + } + value = nullptr; + type = QuantizationDetails_NONE; +} + +inline bool VerifySparseIndexVector(flatbuffers::Verifier &verifier, const void *obj, SparseIndexVector type) { + switch (type) { + case SparseIndexVector_NONE: { + return true; + } + case SparseIndexVector_Int32Vector: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case SparseIndexVector_Uint16Vector: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case SparseIndexVector_Uint8Vector: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + default: return true; + } +} + +inline bool VerifySparseIndexVectorVector(flatbuffers::Verifier &verifier, const flatbuffers::Vector> *values, const flatbuffers::Vector *types) { + if (!values || !types) return !values && !types; + if (values->size() != types->size()) return false; + for (flatbuffers::uoffset_t i = 0; i < values->size(); ++i) { + if (!VerifySparseIndexVector( + verifier, values->Get(i), types->GetEnum(i))) { + return false; + } + } + return true; +} + +inline void *SparseIndexVectorUnion::UnPack(const void *obj, SparseIndexVector type, const flatbuffers::resolver_function_t *resolver) { + (void)resolver; + switch (type) { + case SparseIndexVector_Int32Vector: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case SparseIndexVector_Uint16Vector: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case SparseIndexVector_Uint8Vector: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + default: return nullptr; + } +} + +inline flatbuffers::Offset SparseIndexVectorUnion::Pack(flatbuffers::FlatBufferBuilder &_fbb, const flatbuffers::rehasher_function_t *_rehasher) const { + (void)_rehasher; + switch (type) { + case SparseIndexVector_Int32Vector: { + auto ptr = reinterpret_cast(value); + return CreateInt32Vector(_fbb, ptr, _rehasher).Union(); + } + case SparseIndexVector_Uint16Vector: { + auto ptr = reinterpret_cast(value); + return CreateUint16Vector(_fbb, ptr, _rehasher).Union(); + } + case SparseIndexVector_Uint8Vector: { + auto ptr = reinterpret_cast(value); + return CreateUint8Vector(_fbb, ptr, _rehasher).Union(); + } + default: return 0; + } +} + +inline SparseIndexVectorUnion::SparseIndexVectorUnion(const SparseIndexVectorUnion &u) : type(u.type), value(nullptr) { + switch (type) { + case SparseIndexVector_Int32Vector: { + value = new tflite::Int32VectorT(*reinterpret_cast(u.value)); + break; + } + case SparseIndexVector_Uint16Vector: { + value = new tflite::Uint16VectorT(*reinterpret_cast(u.value)); + break; + } + case SparseIndexVector_Uint8Vector: { + value = new tflite::Uint8VectorT(*reinterpret_cast(u.value)); + break; + } + default: + break; + } +} + +inline void SparseIndexVectorUnion::Reset() { + switch (type) { + case SparseIndexVector_Int32Vector: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case SparseIndexVector_Uint16Vector: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case SparseIndexVector_Uint8Vector: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + default: break; + } + value = nullptr; + type = SparseIndexVector_NONE; +} + +inline bool VerifyBuiltinOptions(flatbuffers::Verifier &verifier, const void *obj, BuiltinOptions type) { + switch (type) { + case BuiltinOptions_NONE: { + return true; + } + case BuiltinOptions_Conv2DOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_DepthwiseConv2DOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_ConcatEmbeddingsOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_LSHProjectionOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_Pool2DOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_SVDFOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_RNNOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_FullyConnectedOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_SoftmaxOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_ConcatenationOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_AddOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_L2NormOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_LocalResponseNormalizationOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_LSTMOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_ResizeBilinearOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_CallOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_ReshapeOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_SkipGramOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_SpaceToDepthOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_EmbeddingLookupSparseOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_MulOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_PadOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_GatherOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_BatchToSpaceNDOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_SpaceToBatchNDOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_TransposeOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_ReducerOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_SubOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_DivOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_SqueezeOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_SequenceRNNOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_StridedSliceOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_ExpOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_TopKV2Options: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_SplitOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_LogSoftmaxOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_CastOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_DequantizeOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_MaximumMinimumOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_ArgMaxOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_LessOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_NegOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_PadV2Options: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_GreaterOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_GreaterEqualOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_LessEqualOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_SelectOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_SliceOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_TransposeConvOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_SparseToDenseOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_TileOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_ExpandDimsOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_EqualOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_NotEqualOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_ShapeOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_PowOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_ArgMinOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_FakeQuantOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_PackOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_LogicalOrOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_OneHotOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_LogicalAndOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_LogicalNotOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_UnpackOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_FloorDivOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_SquareOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_ZerosLikeOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_FillOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_BidirectionalSequenceLSTMOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_BidirectionalSequenceRNNOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_UnidirectionalSequenceLSTMOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_FloorModOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_RangeOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_ResizeNearestNeighborOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_LeakyReluOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_SquaredDifferenceOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_MirrorPadOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_AbsOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_SplitVOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_UniqueOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_ReverseV2Options: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_AddNOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_GatherNdOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_CosOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_WhereOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_RankOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_ReverseSequenceOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_MatrixDiagOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_QuantizeOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_MatrixSetDiagOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_HardSwishOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_IfOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_WhileOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_DepthToSpaceOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_NonMaxSuppressionV4Options: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_NonMaxSuppressionV5Options: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_ScatterNdOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_SelectV2Options: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_DensifyOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_SegmentSumOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_BatchMatMulOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_CumsumOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_CallOnceOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_BroadcastToOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_Rfft2dOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_Conv3DOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_HashtableOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_HashtableFindOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_HashtableImportOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_HashtableSizeOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_VarHandleOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_ReadVariableOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_AssignVariableOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_RandomOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_BucketizeOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_GeluOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_DynamicUpdateSliceOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_UnsortedSegmentProdOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_UnsortedSegmentMaxOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_UnsortedSegmentMinOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_UnsortedSegmentSumOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_ATan2Options: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_SignOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_BitcastOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_BitwiseXorOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions_RightShiftOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + default: return true; + } +} + +inline bool VerifyBuiltinOptionsVector(flatbuffers::Verifier &verifier, const flatbuffers::Vector> *values, const flatbuffers::Vector *types) { + if (!values || !types) return !values && !types; + if (values->size() != types->size()) return false; + for (flatbuffers::uoffset_t i = 0; i < values->size(); ++i) { + if (!VerifyBuiltinOptions( + verifier, values->Get(i), types->GetEnum(i))) { + return false; + } + } + return true; +} + +inline void *BuiltinOptionsUnion::UnPack(const void *obj, BuiltinOptions type, const flatbuffers::resolver_function_t *resolver) { + (void)resolver; + switch (type) { + case BuiltinOptions_Conv2DOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_DepthwiseConv2DOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_ConcatEmbeddingsOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_LSHProjectionOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_Pool2DOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_SVDFOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_RNNOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_FullyConnectedOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_SoftmaxOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_ConcatenationOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_AddOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_L2NormOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_LocalResponseNormalizationOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_LSTMOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_ResizeBilinearOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_CallOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_ReshapeOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_SkipGramOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_SpaceToDepthOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_EmbeddingLookupSparseOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_MulOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_PadOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_GatherOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_BatchToSpaceNDOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_SpaceToBatchNDOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_TransposeOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_ReducerOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_SubOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_DivOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_SqueezeOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_SequenceRNNOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_StridedSliceOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_ExpOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_TopKV2Options: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_SplitOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_LogSoftmaxOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_CastOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_DequantizeOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_MaximumMinimumOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_ArgMaxOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_LessOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_NegOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_PadV2Options: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_GreaterOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_GreaterEqualOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_LessEqualOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_SelectOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_SliceOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_TransposeConvOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_SparseToDenseOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_TileOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_ExpandDimsOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_EqualOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_NotEqualOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_ShapeOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_PowOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_ArgMinOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_FakeQuantOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_PackOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_LogicalOrOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_OneHotOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_LogicalAndOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_LogicalNotOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_UnpackOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_FloorDivOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_SquareOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_ZerosLikeOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_FillOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_BidirectionalSequenceLSTMOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_BidirectionalSequenceRNNOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_UnidirectionalSequenceLSTMOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_FloorModOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_RangeOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_ResizeNearestNeighborOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_LeakyReluOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_SquaredDifferenceOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_MirrorPadOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_AbsOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_SplitVOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_UniqueOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_ReverseV2Options: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_AddNOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_GatherNdOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_CosOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_WhereOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_RankOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_ReverseSequenceOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_MatrixDiagOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_QuantizeOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_MatrixSetDiagOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_HardSwishOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_IfOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_WhileOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_DepthToSpaceOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_NonMaxSuppressionV4Options: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_NonMaxSuppressionV5Options: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_ScatterNdOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_SelectV2Options: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_DensifyOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_SegmentSumOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_BatchMatMulOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_CumsumOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_CallOnceOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_BroadcastToOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_Rfft2dOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_Conv3DOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_HashtableOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_HashtableFindOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_HashtableImportOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_HashtableSizeOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_VarHandleOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_ReadVariableOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_AssignVariableOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_RandomOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_BucketizeOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_GeluOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_DynamicUpdateSliceOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_UnsortedSegmentProdOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_UnsortedSegmentMaxOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_UnsortedSegmentMinOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_UnsortedSegmentSumOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_ATan2Options: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_SignOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_BitcastOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_BitwiseXorOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions_RightShiftOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + default: return nullptr; + } +} + +inline flatbuffers::Offset BuiltinOptionsUnion::Pack(flatbuffers::FlatBufferBuilder &_fbb, const flatbuffers::rehasher_function_t *_rehasher) const { + (void)_rehasher; + switch (type) { + case BuiltinOptions_Conv2DOptions: { + auto ptr = reinterpret_cast(value); + return CreateConv2DOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_DepthwiseConv2DOptions: { + auto ptr = reinterpret_cast(value); + return CreateDepthwiseConv2DOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_ConcatEmbeddingsOptions: { + auto ptr = reinterpret_cast(value); + return CreateConcatEmbeddingsOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_LSHProjectionOptions: { + auto ptr = reinterpret_cast(value); + return CreateLSHProjectionOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_Pool2DOptions: { + auto ptr = reinterpret_cast(value); + return CreatePool2DOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_SVDFOptions: { + auto ptr = reinterpret_cast(value); + return CreateSVDFOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_RNNOptions: { + auto ptr = reinterpret_cast(value); + return CreateRNNOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_FullyConnectedOptions: { + auto ptr = reinterpret_cast(value); + return CreateFullyConnectedOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_SoftmaxOptions: { + auto ptr = reinterpret_cast(value); + return CreateSoftmaxOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_ConcatenationOptions: { + auto ptr = reinterpret_cast(value); + return CreateConcatenationOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_AddOptions: { + auto ptr = reinterpret_cast(value); + return CreateAddOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_L2NormOptions: { + auto ptr = reinterpret_cast(value); + return CreateL2NormOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_LocalResponseNormalizationOptions: { + auto ptr = reinterpret_cast(value); + return CreateLocalResponseNormalizationOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_LSTMOptions: { + auto ptr = reinterpret_cast(value); + return CreateLSTMOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_ResizeBilinearOptions: { + auto ptr = reinterpret_cast(value); + return CreateResizeBilinearOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_CallOptions: { + auto ptr = reinterpret_cast(value); + return CreateCallOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_ReshapeOptions: { + auto ptr = reinterpret_cast(value); + return CreateReshapeOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_SkipGramOptions: { + auto ptr = reinterpret_cast(value); + return CreateSkipGramOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_SpaceToDepthOptions: { + auto ptr = reinterpret_cast(value); + return CreateSpaceToDepthOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_EmbeddingLookupSparseOptions: { + auto ptr = reinterpret_cast(value); + return CreateEmbeddingLookupSparseOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_MulOptions: { + auto ptr = reinterpret_cast(value); + return CreateMulOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_PadOptions: { + auto ptr = reinterpret_cast(value); + return CreatePadOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_GatherOptions: { + auto ptr = reinterpret_cast(value); + return CreateGatherOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_BatchToSpaceNDOptions: { + auto ptr = reinterpret_cast(value); + return CreateBatchToSpaceNDOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_SpaceToBatchNDOptions: { + auto ptr = reinterpret_cast(value); + return CreateSpaceToBatchNDOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_TransposeOptions: { + auto ptr = reinterpret_cast(value); + return CreateTransposeOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_ReducerOptions: { + auto ptr = reinterpret_cast(value); + return CreateReducerOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_SubOptions: { + auto ptr = reinterpret_cast(value); + return CreateSubOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_DivOptions: { + auto ptr = reinterpret_cast(value); + return CreateDivOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_SqueezeOptions: { + auto ptr = reinterpret_cast(value); + return CreateSqueezeOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_SequenceRNNOptions: { + auto ptr = reinterpret_cast(value); + return CreateSequenceRNNOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_StridedSliceOptions: { + auto ptr = reinterpret_cast(value); + return CreateStridedSliceOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_ExpOptions: { + auto ptr = reinterpret_cast(value); + return CreateExpOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_TopKV2Options: { + auto ptr = reinterpret_cast(value); + return CreateTopKV2Options(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_SplitOptions: { + auto ptr = reinterpret_cast(value); + return CreateSplitOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_LogSoftmaxOptions: { + auto ptr = reinterpret_cast(value); + return CreateLogSoftmaxOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_CastOptions: { + auto ptr = reinterpret_cast(value); + return CreateCastOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_DequantizeOptions: { + auto ptr = reinterpret_cast(value); + return CreateDequantizeOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_MaximumMinimumOptions: { + auto ptr = reinterpret_cast(value); + return CreateMaximumMinimumOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_ArgMaxOptions: { + auto ptr = reinterpret_cast(value); + return CreateArgMaxOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_LessOptions: { + auto ptr = reinterpret_cast(value); + return CreateLessOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_NegOptions: { + auto ptr = reinterpret_cast(value); + return CreateNegOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_PadV2Options: { + auto ptr = reinterpret_cast(value); + return CreatePadV2Options(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_GreaterOptions: { + auto ptr = reinterpret_cast(value); + return CreateGreaterOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_GreaterEqualOptions: { + auto ptr = reinterpret_cast(value); + return CreateGreaterEqualOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_LessEqualOptions: { + auto ptr = reinterpret_cast(value); + return CreateLessEqualOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_SelectOptions: { + auto ptr = reinterpret_cast(value); + return CreateSelectOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_SliceOptions: { + auto ptr = reinterpret_cast(value); + return CreateSliceOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_TransposeConvOptions: { + auto ptr = reinterpret_cast(value); + return CreateTransposeConvOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_SparseToDenseOptions: { + auto ptr = reinterpret_cast(value); + return CreateSparseToDenseOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_TileOptions: { + auto ptr = reinterpret_cast(value); + return CreateTileOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_ExpandDimsOptions: { + auto ptr = reinterpret_cast(value); + return CreateExpandDimsOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_EqualOptions: { + auto ptr = reinterpret_cast(value); + return CreateEqualOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_NotEqualOptions: { + auto ptr = reinterpret_cast(value); + return CreateNotEqualOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_ShapeOptions: { + auto ptr = reinterpret_cast(value); + return CreateShapeOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_PowOptions: { + auto ptr = reinterpret_cast(value); + return CreatePowOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_ArgMinOptions: { + auto ptr = reinterpret_cast(value); + return CreateArgMinOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_FakeQuantOptions: { + auto ptr = reinterpret_cast(value); + return CreateFakeQuantOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_PackOptions: { + auto ptr = reinterpret_cast(value); + return CreatePackOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_LogicalOrOptions: { + auto ptr = reinterpret_cast(value); + return CreateLogicalOrOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_OneHotOptions: { + auto ptr = reinterpret_cast(value); + return CreateOneHotOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_LogicalAndOptions: { + auto ptr = reinterpret_cast(value); + return CreateLogicalAndOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_LogicalNotOptions: { + auto ptr = reinterpret_cast(value); + return CreateLogicalNotOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_UnpackOptions: { + auto ptr = reinterpret_cast(value); + return CreateUnpackOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_FloorDivOptions: { + auto ptr = reinterpret_cast(value); + return CreateFloorDivOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_SquareOptions: { + auto ptr = reinterpret_cast(value); + return CreateSquareOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_ZerosLikeOptions: { + auto ptr = reinterpret_cast(value); + return CreateZerosLikeOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_FillOptions: { + auto ptr = reinterpret_cast(value); + return CreateFillOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_BidirectionalSequenceLSTMOptions: { + auto ptr = reinterpret_cast(value); + return CreateBidirectionalSequenceLSTMOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_BidirectionalSequenceRNNOptions: { + auto ptr = reinterpret_cast(value); + return CreateBidirectionalSequenceRNNOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_UnidirectionalSequenceLSTMOptions: { + auto ptr = reinterpret_cast(value); + return CreateUnidirectionalSequenceLSTMOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_FloorModOptions: { + auto ptr = reinterpret_cast(value); + return CreateFloorModOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_RangeOptions: { + auto ptr = reinterpret_cast(value); + return CreateRangeOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_ResizeNearestNeighborOptions: { + auto ptr = reinterpret_cast(value); + return CreateResizeNearestNeighborOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_LeakyReluOptions: { + auto ptr = reinterpret_cast(value); + return CreateLeakyReluOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_SquaredDifferenceOptions: { + auto ptr = reinterpret_cast(value); + return CreateSquaredDifferenceOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_MirrorPadOptions: { + auto ptr = reinterpret_cast(value); + return CreateMirrorPadOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_AbsOptions: { + auto ptr = reinterpret_cast(value); + return CreateAbsOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_SplitVOptions: { + auto ptr = reinterpret_cast(value); + return CreateSplitVOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_UniqueOptions: { + auto ptr = reinterpret_cast(value); + return CreateUniqueOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_ReverseV2Options: { + auto ptr = reinterpret_cast(value); + return CreateReverseV2Options(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_AddNOptions: { + auto ptr = reinterpret_cast(value); + return CreateAddNOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_GatherNdOptions: { + auto ptr = reinterpret_cast(value); + return CreateGatherNdOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_CosOptions: { + auto ptr = reinterpret_cast(value); + return CreateCosOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_WhereOptions: { + auto ptr = reinterpret_cast(value); + return CreateWhereOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_RankOptions: { + auto ptr = reinterpret_cast(value); + return CreateRankOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_ReverseSequenceOptions: { + auto ptr = reinterpret_cast(value); + return CreateReverseSequenceOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_MatrixDiagOptions: { + auto ptr = reinterpret_cast(value); + return CreateMatrixDiagOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_QuantizeOptions: { + auto ptr = reinterpret_cast(value); + return CreateQuantizeOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_MatrixSetDiagOptions: { + auto ptr = reinterpret_cast(value); + return CreateMatrixSetDiagOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_HardSwishOptions: { + auto ptr = reinterpret_cast(value); + return CreateHardSwishOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_IfOptions: { + auto ptr = reinterpret_cast(value); + return CreateIfOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_WhileOptions: { + auto ptr = reinterpret_cast(value); + return CreateWhileOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_DepthToSpaceOptions: { + auto ptr = reinterpret_cast(value); + return CreateDepthToSpaceOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_NonMaxSuppressionV4Options: { + auto ptr = reinterpret_cast(value); + return CreateNonMaxSuppressionV4Options(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_NonMaxSuppressionV5Options: { + auto ptr = reinterpret_cast(value); + return CreateNonMaxSuppressionV5Options(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_ScatterNdOptions: { + auto ptr = reinterpret_cast(value); + return CreateScatterNdOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_SelectV2Options: { + auto ptr = reinterpret_cast(value); + return CreateSelectV2Options(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_DensifyOptions: { + auto ptr = reinterpret_cast(value); + return CreateDensifyOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_SegmentSumOptions: { + auto ptr = reinterpret_cast(value); + return CreateSegmentSumOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_BatchMatMulOptions: { + auto ptr = reinterpret_cast(value); + return CreateBatchMatMulOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_CumsumOptions: { + auto ptr = reinterpret_cast(value); + return CreateCumsumOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_CallOnceOptions: { + auto ptr = reinterpret_cast(value); + return CreateCallOnceOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_BroadcastToOptions: { + auto ptr = reinterpret_cast(value); + return CreateBroadcastToOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_Rfft2dOptions: { + auto ptr = reinterpret_cast(value); + return CreateRfft2dOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_Conv3DOptions: { + auto ptr = reinterpret_cast(value); + return CreateConv3DOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_HashtableOptions: { + auto ptr = reinterpret_cast(value); + return CreateHashtableOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_HashtableFindOptions: { + auto ptr = reinterpret_cast(value); + return CreateHashtableFindOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_HashtableImportOptions: { + auto ptr = reinterpret_cast(value); + return CreateHashtableImportOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_HashtableSizeOptions: { + auto ptr = reinterpret_cast(value); + return CreateHashtableSizeOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_VarHandleOptions: { + auto ptr = reinterpret_cast(value); + return CreateVarHandleOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_ReadVariableOptions: { + auto ptr = reinterpret_cast(value); + return CreateReadVariableOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_AssignVariableOptions: { + auto ptr = reinterpret_cast(value); + return CreateAssignVariableOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_RandomOptions: { + auto ptr = reinterpret_cast(value); + return CreateRandomOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_BucketizeOptions: { + auto ptr = reinterpret_cast(value); + return CreateBucketizeOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_GeluOptions: { + auto ptr = reinterpret_cast(value); + return CreateGeluOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_DynamicUpdateSliceOptions: { + auto ptr = reinterpret_cast(value); + return CreateDynamicUpdateSliceOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_UnsortedSegmentProdOptions: { + auto ptr = reinterpret_cast(value); + return CreateUnsortedSegmentProdOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_UnsortedSegmentMaxOptions: { + auto ptr = reinterpret_cast(value); + return CreateUnsortedSegmentMaxOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_UnsortedSegmentMinOptions: { + auto ptr = reinterpret_cast(value); + return CreateUnsortedSegmentMinOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_UnsortedSegmentSumOptions: { + auto ptr = reinterpret_cast(value); + return CreateUnsortedSegmentSumOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_ATan2Options: { + auto ptr = reinterpret_cast(value); + return CreateATan2Options(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_SignOptions: { + auto ptr = reinterpret_cast(value); + return CreateSignOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_BitcastOptions: { + auto ptr = reinterpret_cast(value); + return CreateBitcastOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_BitwiseXorOptions: { + auto ptr = reinterpret_cast(value); + return CreateBitwiseXorOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions_RightShiftOptions: { + auto ptr = reinterpret_cast(value); + return CreateRightShiftOptions(_fbb, ptr, _rehasher).Union(); + } + default: return 0; + } +} + +inline BuiltinOptionsUnion::BuiltinOptionsUnion(const BuiltinOptionsUnion &u) : type(u.type), value(nullptr) { + switch (type) { + case BuiltinOptions_Conv2DOptions: { + value = new tflite::Conv2DOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_DepthwiseConv2DOptions: { + value = new tflite::DepthwiseConv2DOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_ConcatEmbeddingsOptions: { + value = new tflite::ConcatEmbeddingsOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_LSHProjectionOptions: { + value = new tflite::LSHProjectionOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_Pool2DOptions: { + value = new tflite::Pool2DOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_SVDFOptions: { + value = new tflite::SVDFOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_RNNOptions: { + value = new tflite::RNNOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_FullyConnectedOptions: { + value = new tflite::FullyConnectedOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_SoftmaxOptions: { + value = new tflite::SoftmaxOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_ConcatenationOptions: { + value = new tflite::ConcatenationOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_AddOptions: { + value = new tflite::AddOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_L2NormOptions: { + value = new tflite::L2NormOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_LocalResponseNormalizationOptions: { + value = new tflite::LocalResponseNormalizationOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_LSTMOptions: { + value = new tflite::LSTMOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_ResizeBilinearOptions: { + value = new tflite::ResizeBilinearOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_CallOptions: { + value = new tflite::CallOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_ReshapeOptions: { + value = new tflite::ReshapeOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_SkipGramOptions: { + value = new tflite::SkipGramOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_SpaceToDepthOptions: { + value = new tflite::SpaceToDepthOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_EmbeddingLookupSparseOptions: { + value = new tflite::EmbeddingLookupSparseOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_MulOptions: { + value = new tflite::MulOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_PadOptions: { + value = new tflite::PadOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_GatherOptions: { + value = new tflite::GatherOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_BatchToSpaceNDOptions: { + value = new tflite::BatchToSpaceNDOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_SpaceToBatchNDOptions: { + value = new tflite::SpaceToBatchNDOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_TransposeOptions: { + value = new tflite::TransposeOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_ReducerOptions: { + value = new tflite::ReducerOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_SubOptions: { + value = new tflite::SubOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_DivOptions: { + value = new tflite::DivOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_SqueezeOptions: { + value = new tflite::SqueezeOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_SequenceRNNOptions: { + value = new tflite::SequenceRNNOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_StridedSliceOptions: { + value = new tflite::StridedSliceOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_ExpOptions: { + value = new tflite::ExpOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_TopKV2Options: { + value = new tflite::TopKV2OptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_SplitOptions: { + value = new tflite::SplitOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_LogSoftmaxOptions: { + value = new tflite::LogSoftmaxOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_CastOptions: { + value = new tflite::CastOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_DequantizeOptions: { + value = new tflite::DequantizeOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_MaximumMinimumOptions: { + value = new tflite::MaximumMinimumOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_ArgMaxOptions: { + value = new tflite::ArgMaxOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_LessOptions: { + value = new tflite::LessOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_NegOptions: { + value = new tflite::NegOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_PadV2Options: { + value = new tflite::PadV2OptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_GreaterOptions: { + value = new tflite::GreaterOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_GreaterEqualOptions: { + value = new tflite::GreaterEqualOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_LessEqualOptions: { + value = new tflite::LessEqualOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_SelectOptions: { + value = new tflite::SelectOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_SliceOptions: { + value = new tflite::SliceOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_TransposeConvOptions: { + value = new tflite::TransposeConvOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_SparseToDenseOptions: { + value = new tflite::SparseToDenseOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_TileOptions: { + value = new tflite::TileOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_ExpandDimsOptions: { + value = new tflite::ExpandDimsOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_EqualOptions: { + value = new tflite::EqualOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_NotEqualOptions: { + value = new tflite::NotEqualOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_ShapeOptions: { + value = new tflite::ShapeOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_PowOptions: { + value = new tflite::PowOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_ArgMinOptions: { + value = new tflite::ArgMinOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_FakeQuantOptions: { + value = new tflite::FakeQuantOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_PackOptions: { + value = new tflite::PackOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_LogicalOrOptions: { + value = new tflite::LogicalOrOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_OneHotOptions: { + value = new tflite::OneHotOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_LogicalAndOptions: { + value = new tflite::LogicalAndOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_LogicalNotOptions: { + value = new tflite::LogicalNotOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_UnpackOptions: { + value = new tflite::UnpackOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_FloorDivOptions: { + value = new tflite::FloorDivOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_SquareOptions: { + value = new tflite::SquareOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_ZerosLikeOptions: { + value = new tflite::ZerosLikeOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_FillOptions: { + value = new tflite::FillOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_BidirectionalSequenceLSTMOptions: { + value = new tflite::BidirectionalSequenceLSTMOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_BidirectionalSequenceRNNOptions: { + value = new tflite::BidirectionalSequenceRNNOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_UnidirectionalSequenceLSTMOptions: { + value = new tflite::UnidirectionalSequenceLSTMOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_FloorModOptions: { + value = new tflite::FloorModOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_RangeOptions: { + value = new tflite::RangeOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_ResizeNearestNeighborOptions: { + value = new tflite::ResizeNearestNeighborOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_LeakyReluOptions: { + value = new tflite::LeakyReluOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_SquaredDifferenceOptions: { + value = new tflite::SquaredDifferenceOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_MirrorPadOptions: { + value = new tflite::MirrorPadOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_AbsOptions: { + value = new tflite::AbsOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_SplitVOptions: { + value = new tflite::SplitVOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_UniqueOptions: { + value = new tflite::UniqueOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_ReverseV2Options: { + value = new tflite::ReverseV2OptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_AddNOptions: { + value = new tflite::AddNOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_GatherNdOptions: { + value = new tflite::GatherNdOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_CosOptions: { + value = new tflite::CosOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_WhereOptions: { + value = new tflite::WhereOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_RankOptions: { + value = new tflite::RankOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_ReverseSequenceOptions: { + value = new tflite::ReverseSequenceOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_MatrixDiagOptions: { + value = new tflite::MatrixDiagOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_QuantizeOptions: { + value = new tflite::QuantizeOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_MatrixSetDiagOptions: { + value = new tflite::MatrixSetDiagOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_HardSwishOptions: { + value = new tflite::HardSwishOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_IfOptions: { + value = new tflite::IfOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_WhileOptions: { + value = new tflite::WhileOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_DepthToSpaceOptions: { + value = new tflite::DepthToSpaceOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_NonMaxSuppressionV4Options: { + value = new tflite::NonMaxSuppressionV4OptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_NonMaxSuppressionV5Options: { + value = new tflite::NonMaxSuppressionV5OptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_ScatterNdOptions: { + value = new tflite::ScatterNdOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_SelectV2Options: { + value = new tflite::SelectV2OptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_DensifyOptions: { + value = new tflite::DensifyOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_SegmentSumOptions: { + value = new tflite::SegmentSumOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_BatchMatMulOptions: { + value = new tflite::BatchMatMulOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_CumsumOptions: { + value = new tflite::CumsumOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_CallOnceOptions: { + value = new tflite::CallOnceOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_BroadcastToOptions: { + value = new tflite::BroadcastToOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_Rfft2dOptions: { + value = new tflite::Rfft2dOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_Conv3DOptions: { + value = new tflite::Conv3DOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_HashtableOptions: { + value = new tflite::HashtableOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_HashtableFindOptions: { + value = new tflite::HashtableFindOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_HashtableImportOptions: { + value = new tflite::HashtableImportOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_HashtableSizeOptions: { + value = new tflite::HashtableSizeOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_VarHandleOptions: { + value = new tflite::VarHandleOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_ReadVariableOptions: { + value = new tflite::ReadVariableOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_AssignVariableOptions: { + value = new tflite::AssignVariableOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_RandomOptions: { + value = new tflite::RandomOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_BucketizeOptions: { + value = new tflite::BucketizeOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_GeluOptions: { + value = new tflite::GeluOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_DynamicUpdateSliceOptions: { + value = new tflite::DynamicUpdateSliceOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_UnsortedSegmentProdOptions: { + value = new tflite::UnsortedSegmentProdOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_UnsortedSegmentMaxOptions: { + value = new tflite::UnsortedSegmentMaxOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_UnsortedSegmentMinOptions: { + value = new tflite::UnsortedSegmentMinOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_UnsortedSegmentSumOptions: { + value = new tflite::UnsortedSegmentSumOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_ATan2Options: { + value = new tflite::ATan2OptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_SignOptions: { + value = new tflite::SignOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_BitcastOptions: { + value = new tflite::BitcastOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_BitwiseXorOptions: { + value = new tflite::BitwiseXorOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions_RightShiftOptions: { + value = new tflite::RightShiftOptionsT(*reinterpret_cast(u.value)); + break; + } + default: + break; + } +} + +inline void BuiltinOptionsUnion::Reset() { + switch (type) { + case BuiltinOptions_Conv2DOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_DepthwiseConv2DOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_ConcatEmbeddingsOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_LSHProjectionOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_Pool2DOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_SVDFOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_RNNOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_FullyConnectedOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_SoftmaxOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_ConcatenationOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_AddOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_L2NormOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_LocalResponseNormalizationOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_LSTMOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_ResizeBilinearOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_CallOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_ReshapeOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_SkipGramOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_SpaceToDepthOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_EmbeddingLookupSparseOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_MulOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_PadOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_GatherOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_BatchToSpaceNDOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_SpaceToBatchNDOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_TransposeOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_ReducerOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_SubOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_DivOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_SqueezeOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_SequenceRNNOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_StridedSliceOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_ExpOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_TopKV2Options: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_SplitOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_LogSoftmaxOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_CastOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_DequantizeOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_MaximumMinimumOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_ArgMaxOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_LessOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_NegOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_PadV2Options: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_GreaterOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_GreaterEqualOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_LessEqualOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_SelectOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_SliceOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_TransposeConvOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_SparseToDenseOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_TileOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_ExpandDimsOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_EqualOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_NotEqualOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_ShapeOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_PowOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_ArgMinOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_FakeQuantOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_PackOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_LogicalOrOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_OneHotOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_LogicalAndOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_LogicalNotOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_UnpackOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_FloorDivOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_SquareOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_ZerosLikeOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_FillOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_BidirectionalSequenceLSTMOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_BidirectionalSequenceRNNOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_UnidirectionalSequenceLSTMOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_FloorModOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_RangeOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_ResizeNearestNeighborOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_LeakyReluOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_SquaredDifferenceOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_MirrorPadOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_AbsOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_SplitVOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_UniqueOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_ReverseV2Options: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_AddNOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_GatherNdOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_CosOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_WhereOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_RankOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_ReverseSequenceOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_MatrixDiagOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_QuantizeOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_MatrixSetDiagOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_HardSwishOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_IfOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_WhileOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_DepthToSpaceOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_NonMaxSuppressionV4Options: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_NonMaxSuppressionV5Options: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_ScatterNdOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_SelectV2Options: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_DensifyOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_SegmentSumOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_BatchMatMulOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_CumsumOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_CallOnceOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_BroadcastToOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_Rfft2dOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_Conv3DOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_HashtableOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_HashtableFindOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_HashtableImportOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_HashtableSizeOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_VarHandleOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_ReadVariableOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_AssignVariableOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_RandomOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_BucketizeOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_GeluOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_DynamicUpdateSliceOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_UnsortedSegmentProdOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_UnsortedSegmentMaxOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_UnsortedSegmentMinOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_UnsortedSegmentSumOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_ATan2Options: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_SignOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_BitcastOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_BitwiseXorOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions_RightShiftOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + default: break; + } + value = nullptr; + type = BuiltinOptions_NONE; +} + +inline const tflite::Model *GetModel(const void *buf) { + return flatbuffers::GetRoot(buf); +} + +inline const tflite::Model *GetSizePrefixedModel(const void *buf) { + return flatbuffers::GetSizePrefixedRoot(buf); +} + +inline const char *ModelIdentifier() { + return "TFL3"; +} + +inline bool ModelBufferHasIdentifier(const void *buf) { + return flatbuffers::BufferHasIdentifier( + buf, ModelIdentifier()); +} + +inline bool SizePrefixedModelBufferHasIdentifier(const void *buf) { + return flatbuffers::BufferHasIdentifier( + buf, ModelIdentifier(), true); +} + +inline bool VerifyModelBuffer( + flatbuffers::Verifier &verifier) { + return verifier.VerifyBuffer(ModelIdentifier()); +} + +inline bool VerifySizePrefixedModelBuffer( + flatbuffers::Verifier &verifier) { + return verifier.VerifySizePrefixedBuffer(ModelIdentifier()); +} + +inline const char *ModelExtension() { + return "tflite"; +} + +inline void FinishModelBuffer( + flatbuffers::FlatBufferBuilder &fbb, + flatbuffers::Offset root) { + fbb.Finish(root, ModelIdentifier()); +} + +inline void FinishSizePrefixedModelBuffer( + flatbuffers::FlatBufferBuilder &fbb, + flatbuffers::Offset root) { + fbb.FinishSizePrefixed(root, ModelIdentifier()); +} + +inline std::unique_ptr UnPackModel( + const void *buf, + const flatbuffers::resolver_function_t *res = nullptr) { + return std::unique_ptr(GetModel(buf)->UnPack(res)); +} + +inline std::unique_ptr UnPackSizePrefixedModel( + const void *buf, + const flatbuffers::resolver_function_t *res = nullptr) { + return std::unique_ptr(GetSizePrefixedModel(buf)->UnPack(res)); +} + +} // namespace tflite + +#endif // FLATBUFFERS_GENERATED_SCHEMA_TFLITE_H_ diff --git a/tensorflow/lite/schema/schema_utils.cc b/tensorflow/lite/schema/schema_utils.cc new file mode 100644 index 0000000..fc19290 --- /dev/null +++ b/tensorflow/lite/schema/schema_utils.cc @@ -0,0 +1,62 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "tensorflow/lite/schema/schema_utils.h" + +#include + +#include "tensorflow/lite/kernels/internal/compatibility.h" + +namespace tflite { + +// The following GetBuiltinCode methods are the utility methods for reading +// builtin operatore code, ensuring compatibility issues between v3 and v3a +// schema. Always the maximum value of the two fields always will be the correct +// value as follows: +// +// - Supporting schema version v3 models +// +// The `builtin_code` field is not available in the v3 models. Flatbuffer +// library will feed zero value, which is the default value in the v3a schema. +// The actual builtin operatore code value will exist in the +// `deprecated_builtin_code` field. At the same time, it implies that +// `deprecated_builtin_code` >= `builtin_code` and the maximum value of the two +// fields will be same with `deprecated_builtin_code'. +// +// - Supporting builtin operator codes beyonds 127 +// +// New builtin operators, whose operator code is larger than 127, can not be +// assigned to the `deprecated_builtin_code` field. In such cases, the +// value of the `builtin_code` field should be used for the builtin operator +// code. In the case, the maximum value of the two fields will be the value of +// the `builtin_code` as the right value. + +BuiltinOperator GetBuiltinCode(const OperatorCode* op_code) { + // Caller should guarantee that the given argument value is not a nullptr. + TFLITE_DCHECK(op_code != nullptr); + + return std::max( + op_code->builtin_code(), + static_cast(op_code->deprecated_builtin_code())); +} + +BuiltinOperator GetBuiltinCode(const OperatorCodeT* op_code) { + // Caller should guarantee that the given argument value is not a nullptr. + TFLITE_DCHECK(op_code != nullptr); + + return std::max(op_code->builtin_code, static_cast( + op_code->deprecated_builtin_code)); +} + +} // namespace tflite diff --git a/tensorflow/lite/schema/schema_utils.h b/tensorflow/lite/schema/schema_utils.h new file mode 100644 index 0000000..9cca36c --- /dev/null +++ b/tensorflow/lite/schema/schema_utils.h @@ -0,0 +1,33 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_SCHEMA_SCHEMA_UTILS_H_ +#define TENSORFLOW_LITE_SCHEMA_SCHEMA_UTILS_H_ + +#include "flatbuffers/flatbuffers.h" +#include "tensorflow/lite/schema/schema_generated.h" + +namespace tflite { + +// The following methods are introduced to resolve op builtin code shortage +// problem. The new builtin operator will be assigned to the extended builtin +// code field in the flatbuffer schema. Those methods helps to hide builtin code +// details. +BuiltinOperator GetBuiltinCode(const OperatorCode *op_code); + +BuiltinOperator GetBuiltinCode(const OperatorCodeT *op_code); + +} // namespace tflite + +#endif // TENSORFLOW_LITE_SCHEMA_SCHEMA_UTILS_H_ diff --git a/tensorflow/lite/tools/BUILD b/tensorflow/lite/tools/BUILD new file mode 100644 index 0000000..b5073c9 --- /dev/null +++ b/tensorflow/lite/tools/BUILD @@ -0,0 +1,73 @@ +load("@tflm_pip_deps//:requirements.bzl", "requirement") + +py_library( + name = "flatbuffer_utils", + srcs = ["flatbuffer_utils.py"], + srcs_version = "PY3", + visibility = ["//:__subpackages__"], + deps = [ + "@flatbuffers//:runtime_py", + requirement("tensorflow-cpu"), + "//tensorflow/lite/python:schema_py", + "//tensorflow/lite/python:schema_util", + ], +) + +py_library( + name = "test_utils", + srcs = ["test_utils.py"], + srcs_version = "PY3", + deps = [ + "@flatbuffers//:runtime_py", + requirement("tensorflow-cpu"), + "//tensorflow/lite/python:schema_py", + ], +) + +py_binary( + name = "strip_strings", + srcs = ["strip_strings.py"], + python_version = "PY3", + srcs_version = "PY3", + deps = [ + ":flatbuffer_utils", + "@absl_py//absl:app", + "@absl_py//absl/flags", + ], +) + +py_binary( + name = "visualize", + srcs = ["visualize.py"], + python_version = "PY3", + srcs_version = "PY3", + visibility = ["//:__subpackages__"], + deps = [ + "//tensorflow/lite/python:schema_py", + requirement("numpy"), + ], +) + +py_test( + name = "flatbuffer_utils_test", + srcs = ["flatbuffer_utils_test.py"], + python_version = "PY3", + srcs_version = "PY3", + deps = [ + ":flatbuffer_utils", + ":test_utils", + requirement("tensorflow-cpu"), + ], +) + +py_test( + name = "visualize_test", + srcs = ["visualize_test.py"], + python_version = "PY3", + srcs_version = "PY3", + deps = [ + ":test_utils", + ":visualize", + requirement("tensorflow-cpu"), + ], +) diff --git a/tensorflow/lite/tools/flatbuffer_utils.py b/tensorflow/lite/tools/flatbuffer_utils.py new file mode 100644 index 0000000..6ea3e30 --- /dev/null +++ b/tensorflow/lite/tools/flatbuffer_utils.py @@ -0,0 +1,399 @@ +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Utility functions for FlatBuffers. + +All functions that are commonly used to work with FlatBuffers. + +Refer to the tensorflow lite flatbuffer schema here: +tensorflow/lite/schema/schema.fbs + +""" + +import copy +import random +import re +import struct +import sys + +import flatbuffers +from tflite_micro.tensorflow.lite.python import schema_py_generated as schema_fb +from tflite_micro.tensorflow.lite.python import schema_util +from tensorflow.python.platform import gfile + +_TFLITE_FILE_IDENTIFIER = b'TFL3' + + +def convert_bytearray_to_object(model_bytearray): + """Converts a tflite model from a bytearray to an object for parsing.""" + model_object = schema_fb.Model.GetRootAsModel(model_bytearray, 0) + return schema_fb.ModelT.InitFromObj(model_object) + + +def read_model(input_tflite_file): + """Reads a tflite model as a python object. + + Args: + input_tflite_file: Full path name to the input tflite file + + Raises: + RuntimeError: If input_tflite_file path is invalid. + IOError: If input_tflite_file cannot be opened. + + Returns: + A python object corresponding to the input tflite file. + """ + if not gfile.Exists(input_tflite_file): + raise RuntimeError('Input file not found at %r\n' % input_tflite_file) + with gfile.GFile(input_tflite_file, 'rb') as input_file_handle: + model_bytearray = bytearray(input_file_handle.read()) + model = convert_bytearray_to_object(model_bytearray) + if sys.byteorder == 'big': + byte_swap_tflite_model_obj(model, 'little', 'big') + return model + + +def read_model_with_mutable_tensors(input_tflite_file): + """Reads a tflite model as a python object with mutable tensors. + + Similar to read_model() with the addition that the returned object has + mutable tensors (read_model() returns an object with immutable tensors). + + Args: + input_tflite_file: Full path name to the input tflite file + + Raises: + RuntimeError: If input_tflite_file path is invalid. + IOError: If input_tflite_file cannot be opened. + + Returns: + A mutable python object corresponding to the input tflite file. + """ + return copy.deepcopy(read_model(input_tflite_file)) + + +def convert_object_to_bytearray(model_object): + """Converts a tflite model from an object to a immutable bytearray.""" + # Initial size of the buffer, which will grow automatically if needed + builder = flatbuffers.Builder(1024) + model_offset = model_object.Pack(builder) + builder.Finish(model_offset, file_identifier=_TFLITE_FILE_IDENTIFIER) + model_bytearray = bytes(builder.Output()) + return model_bytearray + + +def write_model(model_object, output_tflite_file): + """Writes the tflite model, a python object, into the output file. + + Args: + model_object: A tflite model as a python object + output_tflite_file: Full path name to the output tflite file. + + Raises: + IOError: If output_tflite_file path is invalid or cannot be opened. + """ + if sys.byteorder == 'big': + model_object = copy.deepcopy(model_object) + byte_swap_tflite_model_obj(model_object, 'big', 'little') + model_bytearray = convert_object_to_bytearray(model_object) + with gfile.GFile(output_tflite_file, 'wb') as output_file_handle: + output_file_handle.write(model_bytearray) + + +def strip_strings(model): + """Strips all nonessential strings from the model to reduce model size. + + We remove the following strings: + (find strings by searching ":string" in the tensorflow lite flatbuffer schema) + 1. Model description + 2. SubGraph name + 3. Tensor names + We retain OperatorCode custom_code and Metadata name. + + Args: + model: The model from which to remove nonessential strings. + """ + + model.description = None + for subgraph in model.subgraphs: + subgraph.name = None + for tensor in subgraph.tensors: + tensor.name = None + # We clear all signature_def structure, since without names it is useless. + model.signatureDefs = None + + +def type_to_name(tensor_type): + """Converts a numerical enum to a readable tensor type.""" + for name, value in schema_fb.TensorType.__dict__.items(): + if value == tensor_type: + return name + return None + + +def randomize_weights(model, random_seed=0, buffers_to_skip=None): + """Randomize weights in a model. + + Args: + model: The model in which to randomize weights. + random_seed: The input to the random number generator (default value is 0). + buffers_to_skip: The list of buffer indices to skip. The weights in these + buffers are left unmodified. + """ + + # The input to the random seed generator. The default value is 0. + random.seed(random_seed) + + # Parse model buffers which store the model weights + buffers = model.buffers + buffer_ids = range(1, len(buffers)) # ignore index 0 as it's always None + if buffers_to_skip is not None: + buffer_ids = [idx for idx in buffer_ids if idx not in buffers_to_skip] + + buffer_types = {} + for graph in model.subgraphs: + for op in graph.operators: + if op.inputs is None: + break + for input_idx in op.inputs: + tensor = graph.tensors[input_idx] + buffer_types[tensor.buffer] = type_to_name(tensor.type) + + for i in buffer_ids: + buffer_i_data = buffers[i].data + buffer_i_size = 0 if buffer_i_data is None else buffer_i_data.size + if buffer_i_size == 0: + continue + + # Raw data buffers are of type ubyte (or uint8) whose values lie in the + # range [0, 255]. Those ubytes (or unint8s) are the underlying + # representation of each datatype. For example, a bias tensor of type + # int32 appears as a buffer 4 times it's length of type ubyte (or uint8). + # For floats, we need to generate a valid float and then pack it into + # the raw bytes in place. + buffer_type = buffer_types.get(i, 'INT8') + if buffer_type.startswith('FLOAT'): + format_code = 'e' if buffer_type == 'FLOAT16' else 'f' + for offset in range(0, buffer_i_size, struct.calcsize(format_code)): + value = random.uniform(-0.5, 0.5) # See http://b/152324470#comment2 + struct.pack_into(format_code, buffer_i_data, offset, value) + else: + for j in range(buffer_i_size): + buffer_i_data[j] = random.randint(0, 255) + + +def rename_custom_ops(model, map_custom_op_renames): + """Rename custom ops so they use the same naming style as builtin ops. + + Args: + model: The input tflite model. + map_custom_op_renames: A mapping from old to new custom op names. + """ + for op_code in model.operatorCodes: + if op_code.customCode: + op_code_str = op_code.customCode.decode('ascii') + if op_code_str in map_custom_op_renames: + op_code.customCode = map_custom_op_renames[op_code_str].encode('ascii') + + +def opcode_to_name(model, op_code): + """Converts a TFLite op_code to the human readable name. + + Args: + model: The input tflite model. + op_code: The op_code to resolve to a readable name. + + Returns: + A string containing the human readable op name, or None if not resolvable. + """ + op = model.operatorCodes[op_code] + code = max(op.builtinCode, op.deprecatedBuiltinCode) + for name, value in vars(schema_fb.BuiltinOperator).items(): + if value == code: + return name + return None + + +def xxd_output_to_bytes(input_cc_file): + """Converts xxd output C++ source file to bytes (immutable). + + Args: + input_cc_file: Full path name to th C++ source file dumped by xxd + + Raises: + RuntimeError: If input_cc_file path is invalid. + IOError: If input_cc_file cannot be opened. + + Returns: + A bytearray corresponding to the input cc file array. + """ + # Match hex values in the string with comma as separator + pattern = re.compile(r'\W*(0x[0-9a-fA-F,x ]+).*') + + model_bytearray = bytearray() + + with open(input_cc_file) as file_handle: + for line in file_handle: + values_match = pattern.match(line) + + if values_match is None: + continue + + # Match in the parentheses (hex array only) + list_text = values_match.group(1) + + # Extract hex values (text) from the line + # e.g. 0x1c, 0x00, 0x00, 0x00, 0x54, 0x46, 0x4c, + values_text = filter(None, list_text.split(',')) + + # Convert to hex + values = [int(x, base=16) for x in values_text] + model_bytearray.extend(values) + + return bytes(model_bytearray) + + +def xxd_output_to_object(input_cc_file): + """Converts xxd output C++ source file to object. + + Args: + input_cc_file: Full path name to th C++ source file dumped by xxd + + Raises: + RuntimeError: If input_cc_file path is invalid. + IOError: If input_cc_file cannot be opened. + + Returns: + A python object corresponding to the input tflite file. + """ + model_bytes = xxd_output_to_bytes(input_cc_file) + return convert_bytearray_to_object(model_bytes) + + +def byte_swap_buffer_content(buffer, chunksize, from_endiness, to_endiness): + """Helper function for byte-swapping the buffers field.""" + to_swap = [ + buffer.data[i : i + chunksize] + for i in range(0, len(buffer.data), chunksize) + ] + buffer.data = b''.join( + [ + int.from_bytes(byteswap, from_endiness).to_bytes( + chunksize, to_endiness + ) + for byteswap in to_swap + ] + ) + + +def byte_swap_tflite_model_obj(model, from_endiness, to_endiness): + """Byte swaps the buffers field in a TFLite model. + + Args: + model: TFLite model object of from_endiness format. + from_endiness: The original endianness format of the buffers in model. + to_endiness: The destined endianness format of the buffers in model. + """ + if model is None: + return + # Get all the constant buffers, byte swapping them as per their data types + buffer_swapped = [] + types_of_16_bits = [ + schema_fb.TensorType.FLOAT16, + schema_fb.TensorType.INT16, + schema_fb.TensorType.UINT16, + ] + types_of_32_bits = [ + schema_fb.TensorType.FLOAT32, + schema_fb.TensorType.INT32, + schema_fb.TensorType.COMPLEX64, + schema_fb.TensorType.UINT32, + ] + types_of_64_bits = [ + schema_fb.TensorType.INT64, + schema_fb.TensorType.FLOAT64, + schema_fb.TensorType.COMPLEX128, + schema_fb.TensorType.UINT64, + ] + for subgraph in model.subgraphs: + for tensor in subgraph.tensors: + if ( + tensor.buffer > 0 + and tensor.buffer < len(model.buffers) + and tensor.buffer not in buffer_swapped + and model.buffers[tensor.buffer].data is not None + ): + if tensor.type in types_of_16_bits: + byte_swap_buffer_content( + model.buffers[tensor.buffer], 2, from_endiness, to_endiness + ) + elif tensor.type in types_of_32_bits: + byte_swap_buffer_content( + model.buffers[tensor.buffer], 4, from_endiness, to_endiness + ) + elif tensor.type in types_of_64_bits: + byte_swap_buffer_content( + model.buffers[tensor.buffer], 8, from_endiness, to_endiness + ) + else: + continue + buffer_swapped.append(tensor.buffer) + + +def byte_swap_tflite_buffer(tflite_model, from_endiness, to_endiness): + """Generates a new model byte array after byte swapping its buffers field. + + Args: + tflite_model: TFLite flatbuffer in a byte array. + from_endiness: The original endianness format of the buffers in + tflite_model. + to_endiness: The destined endianness format of the buffers in tflite_model. + + Returns: + TFLite flatbuffer in a byte array, after being byte swapped to to_endiness + format. + """ + if tflite_model is None: + return None + # Load TFLite Flatbuffer byte array into an object. + model = convert_bytearray_to_object(tflite_model) + + # Byte swapping the constant buffers as per their data types + byte_swap_tflite_model_obj(model, from_endiness, to_endiness) + + # Return a TFLite flatbuffer as a byte array. + return convert_object_to_bytearray(model) + + +def count_resource_variables(model): + """Calculates the number of unique resource variables in a model. + + Args: + model: the input tflite model, either as bytearray or object. + + Returns: + An integer number representing the number of unique resource variables. + """ + if not isinstance(model, schema_fb.ModelT): + model = convert_bytearray_to_object(model) + unique_shared_names = set() + for subgraph in model.subgraphs: + if subgraph.operators is None: + continue + for op in subgraph.operators: + builtin_code = schema_util.get_builtin_code_from_operator_code( + model.operatorCodes[op.opcodeIndex]) + if builtin_code == schema_fb.BuiltinOperator.VAR_HANDLE: + unique_shared_names.add(op.builtinOptions.sharedName) + return len(unique_shared_names) diff --git a/tensorflow/lite/tools/flatbuffer_utils_test.py b/tensorflow/lite/tools/flatbuffer_utils_test.py new file mode 100644 index 0000000..7a49d9d --- /dev/null +++ b/tensorflow/lite/tools/flatbuffer_utils_test.py @@ -0,0 +1,253 @@ +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Tests for flatbuffer_utils.py.""" +import copy +import os +import subprocess +import sys + +from tflite_micro.tensorflow.lite.tools import flatbuffer_utils +from tflite_micro.tensorflow.lite.tools import test_utils +from tensorflow.python.framework import test_util +from tensorflow.python.platform import test + +_SKIPPED_BUFFER_INDEX = 1 + + +class WriteReadModelTest(test_util.TensorFlowTestCase): + + def testWriteReadModel(self): + # 1. SETUP + # Define the initial model + initial_model = test_utils.build_mock_model() + # Define temporary files + tmp_dir = self.get_temp_dir() + model_filename = os.path.join(tmp_dir, 'model.tflite') + + # 2. INVOKE + # Invoke the write_model and read_model functions + flatbuffer_utils.write_model(initial_model, model_filename) + final_model = flatbuffer_utils.read_model(model_filename) + + # 3. VALIDATE + # Validate that the initial and final models are the same + # Validate the description + self.assertEqual(initial_model.description, final_model.description) + # Validate the main subgraph's name, inputs, outputs, operators and tensors + initial_subgraph = initial_model.subgraphs[0] + final_subgraph = final_model.subgraphs[0] + self.assertEqual(initial_subgraph.name, final_subgraph.name) + for i in range(len(initial_subgraph.inputs)): + self.assertEqual(initial_subgraph.inputs[i], final_subgraph.inputs[i]) + for i in range(len(initial_subgraph.outputs)): + self.assertEqual(initial_subgraph.outputs[i], final_subgraph.outputs[i]) + for i in range(len(initial_subgraph.operators)): + self.assertEqual(initial_subgraph.operators[i].opcodeIndex, + final_subgraph.operators[i].opcodeIndex) + initial_tensors = initial_subgraph.tensors + final_tensors = final_subgraph.tensors + for i in range(len(initial_tensors)): + self.assertEqual(initial_tensors[i].name, final_tensors[i].name) + self.assertEqual(initial_tensors[i].type, final_tensors[i].type) + self.assertEqual(initial_tensors[i].buffer, final_tensors[i].buffer) + for j in range(len(initial_tensors[i].shape)): + self.assertEqual(initial_tensors[i].shape[j], final_tensors[i].shape[j]) + # Validate the first valid buffer (index 0 is always None) + initial_buffer = initial_model.buffers[1].data + final_buffer = final_model.buffers[1].data + for i in range(initial_buffer.size): + self.assertEqual(initial_buffer.data[i], final_buffer.data[i]) + + +class StripStringsTest(test_util.TensorFlowTestCase): + + def testStripStrings(self): + # 1. SETUP + # Define the initial model + initial_model = test_utils.build_mock_model() + final_model = copy.deepcopy(initial_model) + + # 2. INVOKE + # Invoke the strip_strings function + flatbuffer_utils.strip_strings(final_model) + + # 3. VALIDATE + # Validate that the initial and final models are the same except strings + # Validate the description + self.assertIsNotNone(initial_model.description) + self.assertIsNone(final_model.description) + self.assertIsNotNone(initial_model.signatureDefs) + self.assertIsNone(final_model.signatureDefs) + + # Validate the main subgraph's name, inputs, outputs, operators and tensors + initial_subgraph = initial_model.subgraphs[0] + final_subgraph = final_model.subgraphs[0] + self.assertIsNotNone(initial_model.subgraphs[0].name) + self.assertIsNone(final_model.subgraphs[0].name) + for i in range(len(initial_subgraph.inputs)): + self.assertEqual(initial_subgraph.inputs[i], final_subgraph.inputs[i]) + for i in range(len(initial_subgraph.outputs)): + self.assertEqual(initial_subgraph.outputs[i], final_subgraph.outputs[i]) + for i in range(len(initial_subgraph.operators)): + self.assertEqual(initial_subgraph.operators[i].opcodeIndex, + final_subgraph.operators[i].opcodeIndex) + initial_tensors = initial_subgraph.tensors + final_tensors = final_subgraph.tensors + for i in range(len(initial_tensors)): + self.assertIsNotNone(initial_tensors[i].name) + self.assertIsNone(final_tensors[i].name) + self.assertEqual(initial_tensors[i].type, final_tensors[i].type) + self.assertEqual(initial_tensors[i].buffer, final_tensors[i].buffer) + for j in range(len(initial_tensors[i].shape)): + self.assertEqual(initial_tensors[i].shape[j], final_tensors[i].shape[j]) + # Validate the first valid buffer (index 0 is always None) + initial_buffer = initial_model.buffers[1].data + final_buffer = final_model.buffers[1].data + for i in range(initial_buffer.size): + self.assertEqual(initial_buffer.data[i], final_buffer.data[i]) + + +class RandomizeWeightsTest(test_util.TensorFlowTestCase): + + def testRandomizeWeights(self): + # 1. SETUP + # Define the initial model + initial_model = test_utils.build_mock_model() + final_model = copy.deepcopy(initial_model) + + # 2. INVOKE + # Invoke the randomize_weights function + flatbuffer_utils.randomize_weights(final_model) + + # 3. VALIDATE + # Validate that the initial and final models are the same, except that + # the weights in the model buffer have been modified (i.e, randomized) + # Validate the description + self.assertEqual(initial_model.description, final_model.description) + # Validate the main subgraph's name, inputs, outputs, operators and tensors + initial_subgraph = initial_model.subgraphs[0] + final_subgraph = final_model.subgraphs[0] + self.assertEqual(initial_subgraph.name, final_subgraph.name) + for i in range(len(initial_subgraph.inputs)): + self.assertEqual(initial_subgraph.inputs[i], final_subgraph.inputs[i]) + for i in range(len(initial_subgraph.outputs)): + self.assertEqual(initial_subgraph.outputs[i], final_subgraph.outputs[i]) + for i in range(len(initial_subgraph.operators)): + self.assertEqual(initial_subgraph.operators[i].opcodeIndex, + final_subgraph.operators[i].opcodeIndex) + initial_tensors = initial_subgraph.tensors + final_tensors = final_subgraph.tensors + for i in range(len(initial_tensors)): + self.assertEqual(initial_tensors[i].name, final_tensors[i].name) + self.assertEqual(initial_tensors[i].type, final_tensors[i].type) + self.assertEqual(initial_tensors[i].buffer, final_tensors[i].buffer) + for j in range(len(initial_tensors[i].shape)): + self.assertEqual(initial_tensors[i].shape[j], final_tensors[i].shape[j]) + # Validate the first valid buffer (index 0 is always None) + initial_buffer = initial_model.buffers[1].data + final_buffer = final_model.buffers[1].data + for j in range(initial_buffer.size): + self.assertNotEqual(initial_buffer.data[j], final_buffer.data[j]) + + def testRandomizeSomeWeights(self): + # 1. SETUP + # Define the initial model + initial_model = test_utils.build_mock_model() + final_model = copy.deepcopy(initial_model) + + # 2. INVOKE + # Invoke the randomize_weights function, but skip the first buffer + flatbuffer_utils.randomize_weights( + final_model, buffers_to_skip=[_SKIPPED_BUFFER_INDEX]) + + # 3. VALIDATE + # Validate that the initial and final models are the same, except that + # the weights in the model buffer have been modified (i.e, randomized) + # Validate the description + self.assertEqual(initial_model.description, final_model.description) + # Validate the main subgraph's name, inputs, outputs, operators and tensors + initial_subgraph = initial_model.subgraphs[0] + final_subgraph = final_model.subgraphs[0] + self.assertEqual(initial_subgraph.name, final_subgraph.name) + for i, _ in enumerate(initial_subgraph.inputs): + self.assertEqual(initial_subgraph.inputs[i], final_subgraph.inputs[i]) + for i, _ in enumerate(initial_subgraph.outputs): + self.assertEqual(initial_subgraph.outputs[i], final_subgraph.outputs[i]) + for i, _ in enumerate(initial_subgraph.operators): + self.assertEqual(initial_subgraph.operators[i].opcodeIndex, + final_subgraph.operators[i].opcodeIndex) + initial_tensors = initial_subgraph.tensors + final_tensors = final_subgraph.tensors + for i, _ in enumerate(initial_tensors): + self.assertEqual(initial_tensors[i].name, final_tensors[i].name) + self.assertEqual(initial_tensors[i].type, final_tensors[i].type) + self.assertEqual(initial_tensors[i].buffer, final_tensors[i].buffer) + for j in range(len(initial_tensors[i].shape)): + self.assertEqual(initial_tensors[i].shape[j], final_tensors[i].shape[j]) + # Validate that the skipped buffer is unchanged. + initial_buffer = initial_model.buffers[_SKIPPED_BUFFER_INDEX].data + final_buffer = final_model.buffers[_SKIPPED_BUFFER_INDEX].data + for j in range(initial_buffer.size): + self.assertEqual(initial_buffer.data[j], final_buffer.data[j]) + + +class XxdOutputToBytesTest(test_util.TensorFlowTestCase): + + def testXxdOutputToBytes(self): + # 1. SETUP + # Define the initial model + initial_model = test_utils.build_mock_model() + initial_bytes = flatbuffer_utils.convert_object_to_bytearray(initial_model) + + # Define temporary files + tmp_dir = self.get_temp_dir() + model_filename = os.path.join(tmp_dir, 'model.tflite') + + # 2. Write model to temporary file (will be used as input for xxd) + flatbuffer_utils.write_model(initial_model, model_filename) + + # 3. DUMP WITH xxd + input_cc_file = os.path.join(tmp_dir, 'model.cc') + + command = 'xxd -i {} > {}'.format(model_filename, input_cc_file) + subprocess.call(command, shell=True) + + # 4. VALIDATE + final_bytes = flatbuffer_utils.xxd_output_to_bytes(input_cc_file) + if sys.byteorder == 'big': + final_bytes = flatbuffer_utils.byte_swap_tflite_buffer( + final_bytes, 'little', 'big' + ) + + # Validate that the initial and final bytearray are the same + self.assertEqual(initial_bytes, final_bytes) + + +class CountResourceVariablesTest(test_util.TensorFlowTestCase): + + def testCountResourceVariables(self): + # 1. SETUP + # Define the initial model + initial_model = test_utils.build_mock_model() + + # 2. Confirm that resource variables for mock model is 1 + # The mock model is created with two VAR HANDLE ops, but with the same + # shared name. + self.assertEqual( + flatbuffer_utils.count_resource_variables(initial_model), 1) + + +if __name__ == '__main__': + test.main() diff --git a/tensorflow/lite/tools/randomize_weights.py b/tensorflow/lite/tools/randomize_weights.py new file mode 100644 index 0000000..2b36fb1 --- /dev/null +++ b/tensorflow/lite/tools/randomize_weights.py @@ -0,0 +1,58 @@ +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +r"""Randomize all weights in a tflite file.""" + +from absl import app +from absl import flags + +from tflite_micro.tensorflow.lite.tools import flatbuffer_utils + +FLAGS = flags.FLAGS + +flags.DEFINE_string('input_tflite_file', None, + 'Full path name to the input TFLite file.') +flags.DEFINE_string('output_tflite_file', None, + 'Full path name to the output randomized TFLite file.') +flags.DEFINE_multi_integer( + 'buffers_to_skip', [], 'Buffer indices in the TFLite model to be skipped, ' + 'i.e., to be left unmodified.') +flags.DEFINE_multi_string( + 'ops_to_skip', [], 'Ops in the TFLite model to be skipped / unmodified.') +flags.DEFINE_integer('random_seed', 0, 'Input to the random number generator.') + +flags.mark_flag_as_required('input_tflite_file') +flags.mark_flag_as_required('output_tflite_file') + + +def main(_): + buffers_to_skip = FLAGS.buffers_to_skip + ops_to_skip = [op.upper() for op in FLAGS.ops_to_skip] + model = flatbuffer_utils.read_model(FLAGS.input_tflite_file) + + # Add in buffers for ops in ops_to_skip to the list of skipped buffers. + for graph in model.subgraphs: + for op in graph.operators: + op_name = flatbuffer_utils.opcode_to_name(model, op.opcodeIndex) + if op_name.upper() in ops_to_skip: + for input_idx in op.inputs: + buffers_to_skip.append(graph.tensors[input_idx].buffer) + + flatbuffer_utils.randomize_weights(model, FLAGS.random_seed, + FLAGS.buffers_to_skip) + flatbuffer_utils.write_model(model, FLAGS.output_tflite_file) + + +if __name__ == '__main__': + app.run(main) diff --git a/tensorflow/lite/tools/strip_strings.py b/tensorflow/lite/tools/strip_strings.py new file mode 100644 index 0000000..ff769db --- /dev/null +++ b/tensorflow/lite/tools/strip_strings.py @@ -0,0 +1,40 @@ +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +r"""Strips all nonessential strings from a TFLite file.""" + +from absl import app +from absl import flags + +from tflite_micro.tensorflow.lite.tools import flatbuffer_utils + +FLAGS = flags.FLAGS + +flags.DEFINE_string('input_tflite_file', None, + 'Full path name to the input TFLite file.') +flags.DEFINE_string('output_tflite_file', None, + 'Full path name to the output stripped TFLite file.') + +flags.mark_flag_as_required('input_tflite_file') +flags.mark_flag_as_required('output_tflite_file') + + +def main(_): + model = flatbuffer_utils.read_model(FLAGS.input_tflite_file) + flatbuffer_utils.strip_strings(model) + flatbuffer_utils.write_model(model, FLAGS.output_tflite_file) + + +if __name__ == '__main__': + app.run(main) diff --git a/tensorflow/lite/tools/test_utils.py b/tensorflow/lite/tools/test_utils.py new file mode 100644 index 0000000..ec1fcb1 --- /dev/null +++ b/tensorflow/lite/tools/test_utils.py @@ -0,0 +1,283 @@ +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Utility functions that support testing. + +All functions that can be commonly used by various tests. +""" + +import flatbuffers +from tflite_micro.tensorflow.lite.python import schema_py_generated as schema_fb + +TFLITE_SCHEMA_VERSION = 3 + + +def build_mock_flatbuffer_model(): + """Creates a flatbuffer containing an example model.""" + builder = flatbuffers.Builder(1024) + + schema_fb.BufferStart(builder) + buffer0_offset = schema_fb.BufferEnd(builder) + + schema_fb.BufferStartDataVector(builder, 12) + builder.PrependUint8(11) + builder.PrependUint8(10) + builder.PrependUint8(9) + builder.PrependUint8(8) + builder.PrependUint8(7) + builder.PrependUint8(6) + builder.PrependUint8(5) + builder.PrependUint8(4) + builder.PrependUint8(3) + builder.PrependUint8(2) + builder.PrependUint8(1) + builder.PrependUint8(0) + buffer1_data_offset = builder.EndVector() + schema_fb.BufferStart(builder) + schema_fb.BufferAddData(builder, buffer1_data_offset) + buffer1_offset = schema_fb.BufferEnd(builder) + + schema_fb.BufferStart(builder) + buffer2_offset = schema_fb.BufferEnd(builder) + + schema_fb.ModelStartBuffersVector(builder, 3) + builder.PrependUOffsetTRelative(buffer2_offset) + builder.PrependUOffsetTRelative(buffer1_offset) + builder.PrependUOffsetTRelative(buffer0_offset) + buffers_offset = builder.EndVector() + + string0_offset = builder.CreateString('input_tensor') + schema_fb.TensorStartShapeVector(builder, 3) + builder.PrependInt32(1) + builder.PrependInt32(2) + builder.PrependInt32(5) + shape0_offset = builder.EndVector() + schema_fb.TensorStart(builder) + schema_fb.TensorAddName(builder, string0_offset) + schema_fb.TensorAddShape(builder, shape0_offset) + schema_fb.TensorAddType(builder, 0) + schema_fb.TensorAddBuffer(builder, 0) + tensor0_offset = schema_fb.TensorEnd(builder) + + schema_fb.QuantizationParametersStartMinVector(builder, 5) + builder.PrependFloat32(0.5) + builder.PrependFloat32(2.0) + builder.PrependFloat32(5.0) + builder.PrependFloat32(10.0) + builder.PrependFloat32(20.0) + quant1_min_offset = builder.EndVector() + + schema_fb.QuantizationParametersStartMaxVector(builder, 5) + builder.PrependFloat32(10.0) + builder.PrependFloat32(20.0) + builder.PrependFloat32(-50.0) + builder.PrependFloat32(1.0) + builder.PrependFloat32(2.0) + quant1_max_offset = builder.EndVector() + + schema_fb.QuantizationParametersStartScaleVector(builder, 5) + builder.PrependFloat32(3.0) + builder.PrependFloat32(4.0) + builder.PrependFloat32(5.0) + builder.PrependFloat32(6.0) + builder.PrependFloat32(7.0) + quant1_scale_offset = builder.EndVector() + + schema_fb.QuantizationParametersStartZeroPointVector(builder, 5) + builder.PrependInt64(1) + builder.PrependInt64(2) + builder.PrependInt64(3) + builder.PrependInt64(-1) + builder.PrependInt64(-2) + quant1_zero_point_offset = builder.EndVector() + + schema_fb.QuantizationParametersStart(builder) + schema_fb.QuantizationParametersAddMin(builder, quant1_min_offset) + schema_fb.QuantizationParametersAddMax(builder, quant1_max_offset) + schema_fb.QuantizationParametersAddScale(builder, quant1_scale_offset) + schema_fb.QuantizationParametersAddZeroPoint(builder, + quant1_zero_point_offset) + quantization1_offset = schema_fb.QuantizationParametersEnd(builder) + + string1_offset = builder.CreateString('constant_tensor') + schema_fb.TensorStartShapeVector(builder, 3) + builder.PrependInt32(1) + builder.PrependInt32(2) + builder.PrependInt32(5) + shape1_offset = builder.EndVector() + schema_fb.TensorStart(builder) + schema_fb.TensorAddName(builder, string1_offset) + schema_fb.TensorAddShape(builder, shape1_offset) + schema_fb.TensorAddType(builder, schema_fb.TensorType.UINT8) + schema_fb.TensorAddBuffer(builder, 1) + schema_fb.TensorAddQuantization(builder, quantization1_offset) + tensor1_offset = schema_fb.TensorEnd(builder) + + string2_offset = builder.CreateString('output_tensor') + schema_fb.TensorStartShapeVector(builder, 3) + builder.PrependInt32(1) + builder.PrependInt32(2) + builder.PrependInt32(5) + shape2_offset = builder.EndVector() + schema_fb.TensorStart(builder) + schema_fb.TensorAddName(builder, string2_offset) + schema_fb.TensorAddShape(builder, shape2_offset) + schema_fb.TensorAddType(builder, 0) + schema_fb.TensorAddBuffer(builder, 2) + tensor2_offset = schema_fb.TensorEnd(builder) + + schema_fb.SubGraphStartTensorsVector(builder, 3) + builder.PrependUOffsetTRelative(tensor2_offset) + builder.PrependUOffsetTRelative(tensor1_offset) + builder.PrependUOffsetTRelative(tensor0_offset) + tensors_offset = builder.EndVector() + + schema_fb.SubGraphStartInputsVector(builder, 1) + builder.PrependInt32(0) + inputs_offset = builder.EndVector() + + schema_fb.SubGraphStartOutputsVector(builder, 1) + builder.PrependInt32(2) + outputs_offset = builder.EndVector() + + schema_fb.OperatorCodeStart(builder) + schema_fb.OperatorCodeAddBuiltinCode(builder, schema_fb.BuiltinOperator.ADD) + schema_fb.OperatorCodeAddDeprecatedBuiltinCode(builder, + schema_fb.BuiltinOperator.ADD) + schema_fb.OperatorCodeAddVersion(builder, 1) + code0_offset = schema_fb.OperatorCodeEnd(builder) + + schema_fb.OperatorCodeStart(builder) + schema_fb.OperatorCodeAddBuiltinCode(builder, + schema_fb.BuiltinOperator.VAR_HANDLE) + schema_fb.OperatorCodeAddDeprecatedBuiltinCode( + builder, schema_fb.BuiltinOperator.PLACEHOLDER_FOR_GREATER_OP_CODES) + schema_fb.OperatorCodeAddVersion(builder, 1) + code1_offset = schema_fb.OperatorCodeEnd(builder) + + schema_fb.ModelStartOperatorCodesVector(builder, 2) + builder.PrependUOffsetTRelative(code1_offset) + builder.PrependUOffsetTRelative(code0_offset) + codes_offset = builder.EndVector() + + schema_fb.OperatorStartInputsVector(builder, 2) + builder.PrependInt32(0) + builder.PrependInt32(1) + op_inputs_offset = builder.EndVector() + + schema_fb.OperatorStartOutputsVector(builder, 1) + builder.PrependInt32(2) + op_outputs_offset = builder.EndVector() + + schema_fb.OperatorStart(builder) + schema_fb.OperatorAddOpcodeIndex(builder, 0) + schema_fb.OperatorAddInputs(builder, op_inputs_offset) + schema_fb.OperatorAddOutputs(builder, op_outputs_offset) + op0_offset = schema_fb.OperatorEnd(builder) + + shared_name = builder.CreateString('var') + schema_fb.VarHandleOptionsStart(builder) + schema_fb.VarHandleOptionsAddSharedName(builder, shared_name) + var_handle_options_offset = schema_fb.VarHandleOptionsEnd(builder) + + schema_fb.OperatorStart(builder) + schema_fb.OperatorAddOpcodeIndex(builder, 1) + schema_fb.OperatorAddBuiltinOptionsType( + builder, schema_fb.BuiltinOptions.VarHandleOptions) + schema_fb.OperatorAddBuiltinOptions(builder, var_handle_options_offset) + op1_offset = schema_fb.OperatorEnd(builder) + + schema_fb.OperatorStart(builder) + schema_fb.OperatorAddBuiltinOptionsType( + builder, schema_fb.BuiltinOptions.VarHandleOptions) + schema_fb.OperatorAddBuiltinOptions(builder, var_handle_options_offset) + op2_offset = schema_fb.OperatorEnd(builder) + + schema_fb.SubGraphStartOperatorsVector(builder, 3) + builder.PrependUOffsetTRelative(op2_offset) + builder.PrependUOffsetTRelative(op1_offset) + builder.PrependUOffsetTRelative(op0_offset) + ops_offset = builder.EndVector() + + string3_offset = builder.CreateString('subgraph_name') + schema_fb.SubGraphStart(builder) + schema_fb.SubGraphAddName(builder, string3_offset) + schema_fb.SubGraphAddTensors(builder, tensors_offset) + schema_fb.SubGraphAddInputs(builder, inputs_offset) + schema_fb.SubGraphAddOutputs(builder, outputs_offset) + schema_fb.SubGraphAddOperators(builder, ops_offset) + subgraph_offset = schema_fb.SubGraphEnd(builder) + + schema_fb.ModelStartSubgraphsVector(builder, 1) + builder.PrependUOffsetTRelative(subgraph_offset) + subgraphs_offset = builder.EndVector() + + signature_key = builder.CreateString('my_key') + input_tensor_string = builder.CreateString('input_tensor') + output_tensor_string = builder.CreateString('output_tensor') + + # Signature Inputs + schema_fb.TensorMapStart(builder) + schema_fb.TensorMapAddName(builder, input_tensor_string) + schema_fb.TensorMapAddTensorIndex(builder, 1) + input_tensor = schema_fb.TensorMapEnd(builder) + + # Signature Outputs + schema_fb.TensorMapStart(builder) + schema_fb.TensorMapAddName(builder, output_tensor_string) + schema_fb.TensorMapAddTensorIndex(builder, 2) + output_tensor = schema_fb.TensorMapEnd(builder) + + schema_fb.SignatureDefStartInputsVector(builder, 1) + builder.PrependUOffsetTRelative(input_tensor) + signature_inputs_offset = builder.EndVector() + schema_fb.SignatureDefStartOutputsVector(builder, 1) + builder.PrependUOffsetTRelative(output_tensor) + signature_outputs_offset = builder.EndVector() + + schema_fb.SignatureDefStart(builder) + schema_fb.SignatureDefAddSignatureKey(builder, signature_key) + schema_fb.SignatureDefAddInputs(builder, signature_inputs_offset) + schema_fb.SignatureDefAddOutputs(builder, signature_outputs_offset) + signature_offset = schema_fb.SignatureDefEnd(builder) + schema_fb.ModelStartSignatureDefsVector(builder, 1) + builder.PrependUOffsetTRelative(signature_offset) + signature_defs_offset = builder.EndVector() + + string4_offset = builder.CreateString('model_description') + schema_fb.ModelStart(builder) + schema_fb.ModelAddVersion(builder, TFLITE_SCHEMA_VERSION) + schema_fb.ModelAddOperatorCodes(builder, codes_offset) + schema_fb.ModelAddSubgraphs(builder, subgraphs_offset) + schema_fb.ModelAddDescription(builder, string4_offset) + schema_fb.ModelAddBuffers(builder, buffers_offset) + schema_fb.ModelAddSignatureDefs(builder, signature_defs_offset) + model_offset = schema_fb.ModelEnd(builder) + builder.Finish(model_offset) + model = builder.Output() + + return model + + +def load_model_from_flatbuffer(flatbuffer_model): + """Loads a model as a python object from a flatbuffer model.""" + model = schema_fb.Model.GetRootAsModel(flatbuffer_model, 0) + model = schema_fb.ModelT.InitFromObj(model) + return model + + +def build_mock_model(): + """Creates an object containing an example model.""" + model = build_mock_flatbuffer_model() + return load_model_from_flatbuffer(model) diff --git a/tensorflow/lite/tools/visualize.py b/tensorflow/lite/tools/visualize.py new file mode 100644 index 0000000..15077a6 --- /dev/null +++ b/tensorflow/lite/tools/visualize.py @@ -0,0 +1,549 @@ +#!/usr/bin/env python +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""This tool creates an html visualization of a TensorFlow Lite graph. + +Example usage: + +python visualize.py foo.tflite foo.html +""" + +import json +import os +import re +import sys +import numpy as np + +# pylint: disable=g-import-not-at-top +if not os.path.splitext(__file__)[0].endswith( + os.path.join("tflite_runtime", "visualize")): + # This file is part of tensorflow package. + from tflite_micro.tensorflow.lite.python import schema_py_generated as schema_fb +else: + # This file is part of tflite_runtime package. + from tflite_runtime import schema_py_generated as schema_fb + +# A CSS description for making the visualizer +_CSS = """ + + + + + + + + +""" + +_D3_HTML_TEMPLATE = """ + +""" + + +def TensorTypeToName(tensor_type): + """Converts a numerical enum to a readable tensor type.""" + for name, value in schema_fb.TensorType.__dict__.items(): + if value == tensor_type: + return name + return None + + +def BuiltinCodeToName(code): + """Converts a builtin op code enum to a readable name.""" + for name, value in schema_fb.BuiltinOperator.__dict__.items(): + if value == code: + return name + return None + + +def NameListToString(name_list): + """Converts a list of integers to the equivalent ASCII string.""" + if isinstance(name_list, str): + return name_list + else: + result = "" + if name_list is not None: + for val in name_list: + result = result + chr(int(val)) + return result + + +class OpCodeMapper: + """Maps an opcode index to an op name.""" + + def __init__(self, data): + self.code_to_name = {} + for idx, d in enumerate(data["operator_codes"]): + self.code_to_name[idx] = BuiltinCodeToName(d["builtin_code"]) + if self.code_to_name[idx] == "CUSTOM": + self.code_to_name[idx] = NameListToString(d["custom_code"]) + + def __call__(self, x): + if x not in self.code_to_name: + s = "" + else: + s = self.code_to_name[x] + return "%s (%d)" % (s, x) + + +class DataSizeMapper: + """For buffers, report the number of bytes.""" + + def __call__(self, x): + if x is not None: + return "%d bytes" % len(x) + else: + return "--" + + +class TensorMapper: + """Maps a list of tensor indices to a tooltip hoverable indicator of more.""" + + def __init__(self, subgraph_data): + self.data = subgraph_data + + def __call__(self, x): + html = "" + if x is None: + return html + + html += "" + for i in x: + tensor = self.data["tensors"][i] + html += str(i) + " " + html += NameListToString(tensor["name"]) + " " + html += TensorTypeToName(tensor["type"]) + " " + html += (repr(tensor["shape"]) if "shape" in tensor else "[]") + html += (repr(tensor["shape_signature"]) + if "shape_signature" in tensor else "[]") + "
    " + html += "
    " + html += repr(x) + html += "
    " + return html + + +def GenerateGraph(subgraph_idx, g, opcode_mapper): + """Produces the HTML required to have a d3 visualization of the dag.""" + + def TensorName(idx): + return "t%d" % idx + + def OpName(idx): + return "o%d" % idx + + edges = [] + nodes = [] + first = {} + second = {} + pixel_mult = 200 # TODO(aselle): multiplier for initial placement + width_mult = 170 # TODO(aselle): multiplier for initial placement + for op_index, op in enumerate(g["operators"] or []): + if op["inputs"] is not None: + for tensor_input_position, tensor_index in enumerate(op["inputs"]): + if tensor_index not in first: + first[tensor_index] = ((op_index - 0.5 + 1) * pixel_mult, + (tensor_input_position + 1) * width_mult) + edges.append({ + "source": TensorName(tensor_index), + "target": OpName(op_index) + }) + if op["outputs"] is not None: + for tensor_output_position, tensor_index in enumerate(op["outputs"]): + if tensor_index not in second: + second[tensor_index] = ((op_index + 0.5 + 1) * pixel_mult, + (tensor_output_position + 1) * width_mult) + edges.append({ + "target": TensorName(tensor_index), + "source": OpName(op_index) + }) + + nodes.append({ + "id": OpName(op_index), + "name": opcode_mapper(op["opcode_index"]), + "group": 2, + "x": pixel_mult, + "y": (op_index + 1) * pixel_mult + }) + for tensor_index, tensor in enumerate(g["tensors"]): + initial_y = ( + first[tensor_index] if tensor_index in first else + second[tensor_index] if tensor_index in second else (0, 0)) + + nodes.append({ + "id": TensorName(tensor_index), + "name": "%r (%d)" % (getattr(tensor, "shape", []), tensor_index), + "group": 1, + "x": initial_y[1], + "y": initial_y[0] + }) + graph_str = json.dumps({"nodes": nodes, "edges": edges}) + + html = _D3_HTML_TEMPLATE % (graph_str, subgraph_idx) + return html + + +def GenerateTableHtml(items, keys_to_print, display_index=True): + """Given a list of object values and keys to print, make an HTML table. + + Args: + items: Items to print an array of dicts. + keys_to_print: (key, display_fn). `key` is a key in the object. i.e. + items[0][key] should exist. display_fn is the mapping function on display. + i.e. the displayed html cell will have the string returned by + `mapping_fn(items[0][key])`. + display_index: add a column which is the index of each row in `items`. + + Returns: + An html table. + """ + html = "" + # Print the list of items + html += "\n" + html += "\n" + if display_index: + html += "" + for h, mapper in keys_to_print: + html += "" % h + html += "\n" + for idx, tensor in enumerate(items): + html += "\n" + if display_index: + html += "" % idx + # print tensor.keys() + for h, mapper in keys_to_print: + val = tensor[h] if h in tensor else None + val = val if mapper is None else mapper(val) + html += "\n" % val + + html += "\n" + html += "
    index%s
    %d%s
    \n" + return html + + +def CamelCaseToSnakeCase(camel_case_input): + """Converts an identifier in CamelCase to snake_case.""" + s1 = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", camel_case_input) + return re.sub("([a-z0-9])([A-Z])", r"\1_\2", s1).lower() + + +def FlatbufferToDict(fb, preserve_as_numpy): + """Converts a hierarchy of FB objects into a nested dict. + + We avoid transforming big parts of the flat buffer into python arrays. This + speeds conversion from ten minutes to a few seconds on big graphs. + + Args: + fb: a flat buffer structure. (i.e. ModelT) + preserve_as_numpy: true if all downstream np.arrays should be preserved. + false if all downstream np.array should become python arrays + Returns: + A dictionary representing the flatbuffer rather than a flatbuffer object. + """ + if isinstance(fb, int) or isinstance(fb, float) or isinstance(fb, str): + return fb + elif hasattr(fb, "__dict__"): + result = {} + for attribute_name in dir(fb): + attribute = fb.__getattribute__(attribute_name) + if not callable(attribute) and attribute_name[0] != "_": + snake_name = CamelCaseToSnakeCase(attribute_name) + preserve = True if attribute_name == "buffers" else preserve_as_numpy + result[snake_name] = FlatbufferToDict(attribute, preserve) + return result + elif isinstance(fb, np.ndarray): + return fb if preserve_as_numpy else fb.tolist() + elif hasattr(fb, "__len__"): + return [FlatbufferToDict(entry, preserve_as_numpy) for entry in fb] + else: + return fb + + +def CreateDictFromFlatbuffer(buffer_data): + model_obj = schema_fb.Model.GetRootAsModel(buffer_data, 0) + model = schema_fb.ModelT.InitFromObj(model_obj) + return FlatbufferToDict(model, preserve_as_numpy=False) + + +def create_html(tflite_input, input_is_filepath=True): # pylint: disable=invalid-name + """Returns html description with the given tflite model. + + Args: + tflite_input: TFLite flatbuffer model path or model object. + input_is_filepath: Tells if tflite_input is a model path or a model object. + + Returns: + Dump of the given tflite model in HTML format. + + Raises: + RuntimeError: If the input is not valid. + """ + + # Convert the model into a JSON flatbuffer using flatc (build if doesn't + # exist. + if input_is_filepath: + if not os.path.exists(tflite_input): + raise RuntimeError("Invalid filename %r" % tflite_input) + if tflite_input.endswith(".tflite") or tflite_input.endswith(".bin"): + with open(tflite_input, "rb") as file_handle: + file_data = bytearray(file_handle.read()) + data = CreateDictFromFlatbuffer(file_data) + elif tflite_input.endswith(".json"): + data = json.load(open(tflite_input)) + else: + raise RuntimeError("Input file was not .tflite or .json") + else: + data = CreateDictFromFlatbuffer(tflite_input) + html = "" + html += _CSS + html += "

    TensorFlow Lite Model

    " + + data["filename"] = tflite_input if input_is_filepath else ( + "Null (used model object)") # Avoid special case + + toplevel_stuff = [("filename", None), ("version", None), + ("description", None)] + + html += "\n" + for key, mapping in toplevel_stuff: + if not mapping: + mapping = lambda x: x + html += "\n" % (key, mapping(data.get(key))) + html += "
    %s%s
    \n" + + # Spec on what keys to display + buffer_keys_to_display = [("data", DataSizeMapper())] + operator_keys_to_display = [("builtin_code", BuiltinCodeToName), + ("custom_code", NameListToString), + ("version", None)] + + # Update builtin code fields. + for d in data["operator_codes"]: + d["builtin_code"] = max(d["builtin_code"], d["deprecated_builtin_code"]) + + for subgraph_idx, g in enumerate(data["subgraphs"]): + # Subgraph local specs on what to display + html += "
    " + tensor_mapper = TensorMapper(g) + opcode_mapper = OpCodeMapper(data) + op_keys_to_display = [("inputs", tensor_mapper), ("outputs", tensor_mapper), + ("builtin_options", None), + ("opcode_index", opcode_mapper)] + tensor_keys_to_display = [("name", NameListToString), + ("type", TensorTypeToName), ("shape", None), + ("shape_signature", None), ("buffer", None), + ("quantization", None)] + + html += "

    Subgraph %d

    \n" % subgraph_idx + + # Inputs and outputs. + html += "

    Inputs/Outputs

    \n" + html += GenerateTableHtml([{ + "inputs": g["inputs"], + "outputs": g["outputs"] + }], [("inputs", tensor_mapper), ("outputs", tensor_mapper)], + display_index=False) + + # Print the tensors. + html += "

    Tensors

    \n" + html += GenerateTableHtml(g["tensors"], tensor_keys_to_display) + + # Print the ops. + if g["operators"]: + html += "

    Ops

    \n" + html += GenerateTableHtml(g["operators"], op_keys_to_display) + + # Visual graph. + html += "\n" % ( + subgraph_idx,) + html += GenerateGraph(subgraph_idx, g, opcode_mapper) + html += "
    " + + # Buffers have no data, but maybe in the future they will + html += "

    Buffers

    \n" + html += GenerateTableHtml(data["buffers"], buffer_keys_to_display) + + # Operator codes + html += "

    Operator Codes

    \n" + html += GenerateTableHtml(data["operator_codes"], operator_keys_to_display) + + html += "\n" + + return html + + +def main(argv): + try: + tflite_input = argv[1] + html_output = argv[2] + except IndexError: + print("Usage: %s " % (argv[0])) + else: + html = create_html(tflite_input) + with open(html_output, "w") as output_file: + output_file.write(html) + + +if __name__ == "__main__": + main(sys.argv) diff --git a/tensorflow/lite/tools/visualize_test.py b/tensorflow/lite/tools/visualize_test.py new file mode 100644 index 0000000..68de38c --- /dev/null +++ b/tensorflow/lite/tools/visualize_test.py @@ -0,0 +1,64 @@ +# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""TensorFlow Lite Python Interface: Sanity check.""" +import os +import re + +from tflite_micro.tensorflow.lite.tools import test_utils +from tflite_micro.tensorflow.lite.tools import visualize +from tensorflow.python.framework import test_util +from tensorflow.python.platform import test + + +class VisualizeTest(test_util.TensorFlowTestCase): + + def testTensorTypeToName(self): + self.assertEqual('FLOAT32', visualize.TensorTypeToName(0)) + + def testBuiltinCodeToName(self): + self.assertEqual('HASHTABLE_LOOKUP', visualize.BuiltinCodeToName(10)) + + def testFlatbufferToDict(self): + model = test_utils.build_mock_flatbuffer_model() + model_dict = visualize.CreateDictFromFlatbuffer(model) + self.assertEqual(test_utils.TFLITE_SCHEMA_VERSION, model_dict['version']) + self.assertEqual(1, len(model_dict['subgraphs'])) + self.assertEqual(2, len(model_dict['operator_codes'])) + self.assertEqual(3, len(model_dict['buffers'])) + self.assertEqual(3, len(model_dict['subgraphs'][0]['tensors'])) + self.assertEqual(0, model_dict['subgraphs'][0]['tensors'][0]['buffer']) + + def testVisualize(self): + model = test_utils.build_mock_flatbuffer_model() + tmp_dir = self.get_temp_dir() + model_filename = os.path.join(tmp_dir, 'model.tflite') + with open(model_filename, 'wb') as model_file: + model_file.write(model) + + html_text = visualize.create_html(model_filename) + + # It's hard to test debug output without doing a full HTML parse, + # but at least sanity check that expected identifiers are present. + self.assertRegex( + html_text, re.compile(r'%s' % model_filename, re.MULTILINE | re.DOTALL)) + self.assertRegex(html_text, + re.compile(r'input_tensor', re.MULTILINE | re.DOTALL)) + self.assertRegex(html_text, + re.compile(r'constant_tensor', re.MULTILINE | re.DOTALL)) + self.assertRegex(html_text, re.compile(r'ADD', re.MULTILINE | re.DOTALL)) + + +if __name__ == '__main__': + test.main() diff --git a/tensorflow/workspace.bzl b/tensorflow/workspace.bzl new file mode 100644 index 0000000..b799523 --- /dev/null +++ b/tensorflow/workspace.bzl @@ -0,0 +1,63 @@ +load("//third_party/flatbuffers:workspace.bzl", flatbuffers = "repo") +load("//third_party/kissfft:workspace.bzl", kissfft = "repo") +load("//third_party/ruy:workspace.bzl", ruy = "repo") +load("//third_party:repo.bzl", "tf_http_archive") + +def initialize_third_party(): + """ Load third party repositories. See above load() statements. """ + flatbuffers() + kissfft() + ruy() + +# Sanitize a dependency so that it works correctly from code that includes +# TensorFlow as a submodule. +def clean_dep(dep): + return str(Label(dep)) + +def tf_repositories(path_prefix = "", tf_repo_name = ""): + """All external dependencies for TF builds.""" + + # https://github.com/bazelbuild/bazel-skylib/releases + tf_http_archive( + name = "bazel_skylib", + sha256 = "1dde365491125a3db70731e25658dfdd3bc5dbdfd11b840b3e987ecf043c7ca0", + urls = [ + "https://storage.googleapis.com/mirror.tensorflow.org/github.com/bazelbuild/bazel-skylib/releases/download/0.9.0/bazel_skylib-0.9.0.tar.gz", + "https://github.com/bazelbuild/bazel-skylib/releases/download/0.9.0/bazel_skylib-0.9.0.tar.gz", + ], + ) + + tf_http_archive( + name = "gemmlowp", + sha256 = "43146e6f56cb5218a8caaab6b5d1601a083f1f31c06ff474a4378a7d35be9cfb", # SHARED_GEMMLOWP_SHA + strip_prefix = "gemmlowp-fda83bdc38b118cc6b56753bd540caa49e570745", + urls = [ + "https://storage.googleapis.com/mirror.tensorflow.org/github.com/google/gemmlowp/archive/fda83bdc38b118cc6b56753bd540caa49e570745.zip", + "https://github.com/google/gemmlowp/archive/fda83bdc38b118cc6b56753bd540caa49e570745.zip", + ], + ) + + tf_http_archive( + name = "absl_py", + sha256 = "516e83df99fe7c365727ef09c9e1f83b55985afaf23dd1ca572b3e160057f5f8", + strip_prefix = "abseil-py-b188d9080c8e5628bb52a93a04ad930abb1717eb", + urls = [ + "https://github.com/abseil/abseil-py/archive/b188d9080c8e5628bb52a93a04ad930abb1717eb.zip", + ], + ) + + tf_http_archive( + name = "six_archive", + urls = [ + "http://mirror.bazel.build/pypi.python.org/packages/source/s/six/six-1.10.0.tar.gz", + "https://pypi.python.org/packages/source/s/six/six-1.10.0.tar.gz", + ], + sha256 = "105f8d68616f8248e24bf0e9372ef04d3cc10104f1980f54d57b2ce73a5ad56a", + strip_prefix = "six-1.10.0", + build_file = "@//third_party:six.BUILD", + ) + + initialize_third_party() + +def workspace(): + tf_repositories() diff --git a/third_party/BUILD b/third_party/BUILD new file mode 100644 index 0000000..0b71f6f --- /dev/null +++ b/third_party/BUILD @@ -0,0 +1,30 @@ +load("@rules_python//python:pip.bzl", "compile_pip_requirements") + +licenses(["notice"]) + +compile_pip_requirements( + # Defines targets which use pip-compile to keep the Python locked + # requirements up-to-date: + # + # :python_requirements.update bazel run this target to update + # ./python_requirements.txt by recursively following + # and locking the dependencies seeded by + # ./python_requirements.in + # + # :python_requirements_test bazel test target which fails if + # ./python_requirements.txt does not match + # that generated from ./python_requirements.in + name = "python_requirements", + extra_args = [ + "--allow-unsafe", + # ^ lets pip-compile include setuptools, recommended by + # `pip-compile -h` as future default behavior + ], + requirements_in = "python_requirements.in", + requirements_txt = "python_requirements.txt", + tags = [ + "manual", + # ^ exclude .update and _test targets from wildcards in, + # e.g., `bazel test ...` + ], +) diff --git a/third_party/flatbuffers/BUILD b/third_party/flatbuffers/BUILD new file mode 100644 index 0000000..82bab3f --- /dev/null +++ b/third_party/flatbuffers/BUILD @@ -0,0 +1 @@ +# This empty BUILD file is required to make Bazel treat this directory as a package. diff --git a/third_party/flatbuffers/BUILD.external b/third_party/flatbuffers/BUILD.external new file mode 100644 index 0000000..dfd3a16 --- /dev/null +++ b/third_party/flatbuffers/BUILD.external @@ -0,0 +1,190 @@ +load(":build_defs.bzl", "flatbuffer_py_strip_prefix_srcs") + +package(default_visibility = ["//visibility:public"]) + +licenses(["notice"]) + +exports_files(["LICENSE.txt"]) + +licenses(["notice"]) + +config_setting( + name = "freebsd", + values = {"cpu": "freebsd"}, +) + +config_setting( + name = "windows", + values = {"cpu": "x64_windows"}, +) + +config_setting( + name = "platform_openbsd", + values = {"cpu": "openbsd"}, +) + +config_setting( + name = "platform_freebsd", + values = {"cpu": "freebsd"}, +) + +load("@rules_cc//cc:defs.bzl", "cc_binary", "cc_library") + +# Public flatc library to compile flatbuffer files at runtime. +cc_library( + name = "flatbuffers", + hdrs = ["//:public_headers"], + linkstatic = 1, + strip_include_prefix = "/include", + visibility = ["//visibility:public"], + deps = ["//src:flatbuffers"], +) + +# Public C++ headers for the Flatbuffers library. +filegroup( + name = "public_headers", + srcs = [ + "include/flatbuffers/allocator.h", + "include/flatbuffers/array.h", + "include/flatbuffers/base.h", + "include/flatbuffers/bfbs_generator.h", + "include/flatbuffers/buffer.h", + "include/flatbuffers/buffer_ref.h", + "include/flatbuffers/code_generators.h", + "include/flatbuffers/default_allocator.h", + "include/flatbuffers/detached_buffer.h", + "include/flatbuffers/flatbuffer_builder.h", + "include/flatbuffers/flatbuffers.h", + "include/flatbuffers/flex_flat_util.h", + "include/flatbuffers/flexbuffers.h", + "include/flatbuffers/grpc.h", + "include/flatbuffers/hash.h", + "include/flatbuffers/idl.h", + "include/flatbuffers/minireflect.h", + "include/flatbuffers/reflection.h", + "include/flatbuffers/reflection_generated.h", + "include/flatbuffers/registry.h", + "include/flatbuffers/stl_emulation.h", + "include/flatbuffers/string.h", + "include/flatbuffers/struct.h", + "include/flatbuffers/table.h", + "include/flatbuffers/util.h", + "include/flatbuffers/vector.h", + "include/flatbuffers/vector_downward.h", + "include/flatbuffers/verifier.h", + ], + visibility = ["//:__subpackages__"], +) + +# Public flatc compiler library. +cc_library( + name = "flatc_library", + linkstatic = 1, + visibility = ["//visibility:public"], + deps = [ + "@flatbuffers//src:flatc_library", + ], +) + +# Public flatc compiler. +cc_binary( + name = "flatc", + linkopts = select({ + ":freebsd": [ + "-lm", + ], + ":windows": [], + "//conditions:default": [ + "-lm", + "-ldl", + ], + }), + visibility = ["//visibility:public"], + deps = [ + "@flatbuffers//src:flatc", + ], +) + +filegroup( + name = "flatc_headers", + srcs = [ + "include/flatbuffers/flatc.h", + ], + visibility = ["//:__subpackages__"], +) + +# Library used by flatbuffer_cc_library rules. +cc_library( + name = "runtime_cc", + hdrs = [ + "include/flatbuffers/allocator.h", + "include/flatbuffers/array.h", + "include/flatbuffers/base.h", + "include/flatbuffers/bfbs_generator.h", + "include/flatbuffers/buffer.h", + "include/flatbuffers/buffer_ref.h", + "include/flatbuffers/code_generators.h", + "include/flatbuffers/default_allocator.h", + "include/flatbuffers/detached_buffer.h", + "include/flatbuffers/flatbuffer_builder.h", + "include/flatbuffers/flatbuffers.h", + "include/flatbuffers/flexbuffers.h", + "include/flatbuffers/grpc.h", + "include/flatbuffers/hash.h", + "include/flatbuffers/idl.h", + "include/flatbuffers/minireflect.h", + "include/flatbuffers/reflection.h", + "include/flatbuffers/reflection_generated.h", + "include/flatbuffers/registry.h", + "include/flatbuffers/stl_emulation.h", + "include/flatbuffers/string.h", + "include/flatbuffers/struct.h", + "include/flatbuffers/table.h", + "include/flatbuffers/util.h", + "include/flatbuffers/vector.h", + "include/flatbuffers/vector_downward.h", + "include/flatbuffers/verifier.h", + ], + linkstatic = 1, + strip_include_prefix = "/include", + visibility = ["//visibility:public"], +) + +flatbuffer_py_strip_prefix_srcs( + name = "flatbuffer_py_strip_prefix", + srcs = [ + "python/flatbuffers/__init__.py", + "python/flatbuffers/_version.py", + "python/flatbuffers/builder.py", + "python/flatbuffers/compat.py", + "python/flatbuffers/encode.py", + "python/flatbuffers/flexbuffers.py", + "python/flatbuffers/number_types.py", + "python/flatbuffers/packer.py", + "python/flatbuffers/table.py", + "python/flatbuffers/util.py", + ], + strip_prefix = "python/flatbuffers/", +) + +filegroup( + name = "runtime_py_srcs", + srcs = [ + "__init__.py", + "_version.py", + "builder.py", + "compat.py", + "encode.py", + "flexbuffers.py", + "number_types.py", + "packer.py", + "table.py", + "util.py", + ], +) + +py_library( + name = "runtime_py", + srcs = [":runtime_py_srcs"], + visibility = ["//visibility:public"], +) \ No newline at end of file diff --git a/third_party/flatbuffers/BUILD.system b/third_party/flatbuffers/BUILD.system new file mode 100644 index 0000000..8fe4d7a --- /dev/null +++ b/third_party/flatbuffers/BUILD.system @@ -0,0 +1,43 @@ +licenses(["notice"]) # Apache 2.0 + +filegroup( + name = "LICENSE.txt", + visibility = ["//visibility:public"], +) + +# Public flatc library to compile flatbuffer files at runtime. +cc_library( + name = "flatbuffers", + linkopts = ["-lflatbuffers"], + visibility = ["//visibility:public"], +) + +# Public flatc compiler library. +cc_library( + name = "flatc_library", + linkopts = ["-lflatbuffers"], + visibility = ["//visibility:public"], +) + +genrule( + name = "lnflatc", + outs = ["flatc.bin"], + cmd = "ln -s $$(which flatc) $@", +) + +# Public flatc compiler. +sh_binary( + name = "flatc", + srcs = ["flatc.bin"], + visibility = ["//visibility:public"], +) + +cc_library( + name = "runtime_cc", + visibility = ["//visibility:public"], +) + +py_library( + name = "runtime_py", + visibility = ["//visibility:public"], +) diff --git a/third_party/flatbuffers/build_defs.bzl b/third_party/flatbuffers/build_defs.bzl new file mode 100644 index 0000000..b7f98b9 --- /dev/null +++ b/third_party/flatbuffers/build_defs.bzl @@ -0,0 +1,458 @@ +"""BUILD rules for generating flatbuffer files.""" + +flatc_path = "@flatbuffers//:flatc" +zip_files = "//tensorflow/lite/tools:zip_files" + +DEFAULT_INCLUDE_PATHS = [ + "./", + "$(GENDIR)", + "$(BINDIR)", +] + +DEFAULT_FLATC_ARGS = [ + "--no-union-value-namespacing", + "--gen-object-api", +] + +def flatbuffer_library_public( + name, + srcs, + outs, + language_flag, + out_prefix = "", + includes = [], + include_paths = [], + compatible_with = [], + flatc_args = DEFAULT_FLATC_ARGS, + reflection_name = "", + reflection_visibility = None, + output_to_bindir = False): + """Generates code files for reading/writing the given flatbuffers in the requested language using the public compiler. + + Outs: + filegroup(name): all generated source files. + Fileset([reflection_name]): (Optional) all generated reflection binaries. + + Args: + name: Rule name. + srcs: Source .fbs files. Sent in order to the compiler. + outs: Output files from flatc. + language_flag: Target language flag. One of [-c, -j, -js]. + out_prefix: Prepend this path to the front of all generated files except on + single source targets. Usually is a directory name. + includes: Optional, list of filegroups of schemas that the srcs depend on. + include_paths: Optional, list of paths the includes files can be found in. + compatible_with: Optional, passed to genrule for environments this rule + can be built for. + flatc_args: Optional, list of additional arguments to pass to flatc. + reflection_name: Optional, if set this will generate the flatbuffer + reflection binaries for the schemas. + reflection_visibility: The visibility of the generated reflection Fileset. + output_to_bindir: Passed to genrule for output to bin directory. + """ + include_paths_cmd = ["-I %s" % (s) for s in include_paths] + + # '$(@D)' when given a single source target will give the appropriate + # directory. Appending 'out_prefix' is only necessary when given a build + # target with multiple sources. + output_directory = ( + ("-o $(@D)/%s" % (out_prefix)) if len(srcs) > 1 else ("-o $(@D)") + ) + genrule_cmd = " ".join([ + "for f in $(SRCS); do", + "$(location %s)" % (flatc_path), + " ".join(flatc_args), + " ".join(include_paths_cmd), + language_flag, + output_directory, + "$$f;", + "done", + ]) + native.genrule( + name = name, + srcs = srcs, + outs = outs, + output_to_bindir = output_to_bindir, + compatible_with = compatible_with, + tools = includes + [flatc_path], + cmd = genrule_cmd, + message = "Generating flatbuffer files for %s:" % (name), + ) + if reflection_name: + reflection_genrule_cmd = " ".join([ + "for f in $(SRCS); do", + "$(location %s)" % (flatc_path), + "-b --schema", + " ".join(flatc_args), + " ".join(include_paths_cmd), + language_flag, + output_directory, + "$$f;", + "done", + ]) + reflection_outs = [ + (out_prefix + "%s.bfbs") % (s.replace(".fbs", "").split("/")[-1]) + for s in srcs + ] + native.genrule( + name = "%s_srcs" % reflection_name, + srcs = srcs, + outs = reflection_outs, + output_to_bindir = output_to_bindir, + compatible_with = compatible_with, + tools = includes + [flatc_path], + cmd = reflection_genrule_cmd, + message = "Generating flatbuffer reflection binary for %s:" % (name), + ) + # TODO(b/114456773): Make bazel rules proper and supported by flatbuffer + # Have to comment this since FilesetEntry is not supported in bazel + # skylark. + # native.Fileset( + # name = reflection_name, + # out = "%s_out" % reflection_name, + # entries = [ + # native.FilesetEntry(files = reflection_outs), + # ], + # visibility = reflection_visibility, + # compatible_with = compatible_with, + # ) + +def flatbuffer_cc_library( + name, + srcs, + srcs_filegroup_name = "", + out_prefix = "", + includes = [], + include_paths = [], + compatible_with = [], + flatc_args = DEFAULT_FLATC_ARGS, + visibility = None, + srcs_filegroup_visibility = None, + gen_reflections = False): + '''A cc_library with the generated reader/writers for the given flatbuffer definitions. + + Outs: + filegroup([name]_srcs): all generated .h files. + filegroup(srcs_filegroup_name if specified, or [name]_includes if not): + Other flatbuffer_cc_library's can pass this in for their `includes` + parameter, if they depend on the schemas in this library. + Fileset([name]_reflection): (Optional) all generated reflection binaries. + cc_library([name]): library with sources and flatbuffers deps. + + Remarks: + ** Because the genrule used to call flatc does not have any trivial way of + computing the output list of files transitively generated by includes and + --gen-includes (the default) being defined for flatc, the --gen-includes + flag will not work as expected. The way around this is to add a dependency + to the flatbuffer_cc_library defined alongside the flatc included Fileset. + For example you might define: + + flatbuffer_cc_library( + name = "my_fbs", + srcs = [ "schemas/foo.fbs" ], + includes = [ "//third_party/bazz:bazz_fbs_includes" ], + ) + + In which foo.fbs includes a few files from the Fileset defined at + //third_party/bazz:bazz_fbs_includes. When compiling the library that + includes foo_generated.h, and therefore has my_fbs as a dependency, it + will fail to find any of the bazz *_generated.h files unless you also + add bazz's flatbuffer_cc_library to your own dependency list, e.g.: + + cc_library( + name = "my_lib", + deps = [ + ":my_fbs", + "//third_party/bazz:bazz_fbs" + ], + ) + + Happy dependent Flatbuffering! + + Args: + name: Rule name. + srcs: Source .fbs files. Sent in order to the compiler. + srcs_filegroup_name: Name of the output filegroup that holds srcs. Pass this + filegroup into the `includes` parameter of any other + flatbuffer_cc_library that depends on this one's schemas. + out_prefix: Prepend this path to the front of all generated files. Usually + is a directory name. + includes: Optional, list of filegroups of schemas that the srcs depend on. + ** SEE REMARKS BELOW ** + include_paths: Optional, list of paths the includes files can be found in. + compatible_with: Optional, passed to genrule for environments this rule + can be built for + flatc_args: Optional list of additional arguments to pass to flatc + (e.g. --gen-mutable). + visibility: The visibility of the generated cc_library. By default, use the + default visibility of the project. + srcs_filegroup_visibility: The visibility of the generated srcs filegroup. + By default, use the value of the visibility parameter above. + gen_reflections: Optional, if true this will generate the flatbuffer + reflection binaries for the schemas. + ''' + output_headers = [ + (out_prefix + "%s_generated.h") % (s.replace(".fbs", "").split("/")[-1]) + for s in srcs + ] + reflection_name = "%s_reflection" % name if gen_reflections else "" + + flatbuffer_library_public( + name = "%s_srcs" % (name), + srcs = srcs, + outs = output_headers, + language_flag = "-c", + out_prefix = out_prefix, + includes = includes, + include_paths = include_paths, + compatible_with = compatible_with, + flatc_args = flatc_args, + reflection_name = reflection_name, + reflection_visibility = visibility, + ) + native.cc_library( + name = name, + hdrs = output_headers, + srcs = output_headers, + features = [ + "-parse_headers", + ], + deps = [ + "@flatbuffers//:runtime_cc", + ], + includes = ["."], + linkstatic = 1, + visibility = visibility, + compatible_with = compatible_with, + ) + + # A filegroup for the `srcs`. That is, all the schema files for this + # Flatbuffer set. + native.filegroup( + name = srcs_filegroup_name if srcs_filegroup_name else "%s_includes" % (name), + srcs = srcs, + visibility = srcs_filegroup_visibility if srcs_filegroup_visibility != None else visibility, + compatible_with = compatible_with, + ) + +# Custom provider to track dependencies transitively. +FlatbufferInfo = provider( + fields = { + "transitive_srcs": "flatbuffer schema definitions.", + }, +) + +def _flatbuffer_schemas_aspect_impl(target, ctx): + _ignore = [target] + transitive_srcs = depset() + if hasattr(ctx.rule.attr, "deps"): + for dep in ctx.rule.attr.deps: + if FlatbufferInfo in dep: + transitive_srcs = depset(dep[FlatbufferInfo].transitive_srcs, transitive = [transitive_srcs]) + if hasattr(ctx.rule.attr, "srcs"): + for src in ctx.rule.attr.srcs: + if FlatbufferInfo in src: + transitive_srcs = depset(src[FlatbufferInfo].transitive_srcs, transitive = [transitive_srcs]) + for f in src.files: + if f.extension == "fbs": + transitive_srcs = depset([f], transitive = [transitive_srcs]) + return [FlatbufferInfo(transitive_srcs = transitive_srcs)] + +# An aspect that runs over all dependencies and transitively collects +# flatbuffer schema files. +_flatbuffer_schemas_aspect = aspect( + attr_aspects = [ + "deps", + "srcs", + ], + implementation = _flatbuffer_schemas_aspect_impl, +) + +# Rule to invoke the flatbuffer compiler. +def _gen_flatbuffer_srcs_impl(ctx): + outputs = ctx.attr.outputs + include_paths = ctx.attr.include_paths + if ctx.attr.no_includes: + no_includes_statement = ["--no-includes"] + else: + no_includes_statement = [] + + # Need to generate all files in a directory. + if not outputs: + outputs = [ctx.actions.declare_directory("{}_all".format(ctx.attr.name))] + output_directory = outputs[0].path + else: + outputs = [ctx.actions.declare_file(output) for output in outputs] + output_directory = outputs[0].dirname + + deps = depset(ctx.files.srcs + ctx.files.deps, transitive = [ + dep[FlatbufferInfo].transitive_srcs + for dep in ctx.attr.deps + if FlatbufferInfo in dep + ]) + + include_paths_cmd_line = [] + for s in include_paths: + include_paths_cmd_line.extend(["-I", s]) + + for src in ctx.files.srcs: + ctx.actions.run( + inputs = deps, + outputs = outputs, + executable = ctx.executable._flatc, + arguments = [ + ctx.attr.language_flag, + "-o", + output_directory, + # Allow for absolute imports and referencing of generated files. + "-I", + "./", + "-I", + ctx.genfiles_dir.path, + "-I", + ctx.bin_dir.path, + ] + no_includes_statement + + include_paths_cmd_line + [ + "--no-union-value-namespacing", + "--gen-object-api", + src.path, + ], + progress_message = "Generating flatbuffer files for {}:".format(src), + use_default_shell_env = True, + ) + return [ + DefaultInfo(files = depset(outputs)), + ] + +_gen_flatbuffer_srcs = rule( + _gen_flatbuffer_srcs_impl, + attrs = { + "srcs": attr.label_list( + allow_files = [".fbs"], + mandatory = True, + ), + "outputs": attr.string_list( + default = [], + mandatory = False, + ), + "deps": attr.label_list( + default = [], + mandatory = False, + aspects = [_flatbuffer_schemas_aspect], + ), + "include_paths": attr.string_list( + default = [], + mandatory = False, + ), + "language_flag": attr.string( + mandatory = True, + ), + "no_includes": attr.bool( + default = False, + mandatory = False, + ), + "_flatc": attr.label( + default = Label("@flatbuffers//:flatc"), + executable = True, + cfg = "exec", + ), + }, + output_to_genfiles = True, +) + +def flatbuffer_py_strip_prefix_srcs(name, srcs = [], strip_prefix = ""): + """Strips path prefix. + + Args: + name: Rule name. (required) + srcs: Source .py files. (required) + strip_prefix: Path that needs to be stripped from the srcs filepaths. (required) + """ + for src in srcs: + native.genrule( + name = name + "_" + src.replace(".", "_").replace("/", "_"), + srcs = [src], + outs = [src.replace(strip_prefix, "")], + cmd = "cp $< $@", + ) + +def _concat_flatbuffer_py_srcs_impl(ctx): + # Merge all generated python files. The files are concatenated and import + # statements are removed. Finally we import the flatbuffer runtime library. + # IMPORTANT: Our Windows shell does not support "find ... -exec" properly. + # If you're changing the commandline below, please build wheels and run smoke + # tests on all the three operating systems. + command = "echo 'import flatbuffers\n' > %s; " + command += "for f in $(find %s -name '*.py' | sort); do cat $f | sed '/import flatbuffers/d' >> %s; done " + ctx.actions.run_shell( + inputs = ctx.attr.deps[0].files, + outputs = [ctx.outputs.out], + command = command % ( + ctx.outputs.out.path, + ctx.attr.deps[0].files.to_list()[0].path, + ctx.outputs.out.path, + ), + ) + +_concat_flatbuffer_py_srcs = rule( + _concat_flatbuffer_py_srcs_impl, + attrs = { + "deps": attr.label_list(mandatory = True), + }, + output_to_genfiles = True, + outputs = {"out": "%{name}.py"}, +) + +def flatbuffer_py_library( + name, + srcs, + deps = [], + include_paths = []): + """A py_library with the generated reader/writers for the given schema. + + This rule assumes that the schema files define non-conflicting names, so that + they can be merged in a single file. This is e.g. the case if only a single + namespace is used. + The rule call the flatbuffer compiler for all schema files and merges the + generated python files into a single file that is wrapped in a py_library. + + Args: + name: Rule name. (required) + srcs: List of source .fbs files. (required) + deps: List of dependencies. + include_paths: Optional, list of paths the includes files can be found in. + """ + all_srcs = "{}_srcs".format(name) + _gen_flatbuffer_srcs( + name = all_srcs, + srcs = srcs, + language_flag = "--python", + deps = deps, + include_paths = include_paths, + ) + all_srcs_no_include = "{}_srcs_no_include".format(name) + _gen_flatbuffer_srcs( + name = all_srcs_no_include, + srcs = srcs, + language_flag = "--python", + deps = deps, + no_includes = True, + include_paths = include_paths, + ) + concat_py_srcs = "{}_generated".format(name) + _concat_flatbuffer_py_srcs( + name = concat_py_srcs, + deps = [ + ":{}".format(all_srcs_no_include), + ], + ) + native.py_library( + name = name, + srcs = [ + ":{}".format(concat_py_srcs), + ], + srcs_version = "PY3", + deps = deps + [ + "@flatbuffers//:runtime_py", + ], + ) diff --git a/third_party/flatbuffers/workspace.bzl b/third_party/flatbuffers/workspace.bzl new file mode 100644 index 0000000..e799a70 --- /dev/null +++ b/third_party/flatbuffers/workspace.bzl @@ -0,0 +1,18 @@ +"""Loads the Flatbuffers library, used by TF Lite.""" + +load("//third_party:repo.bzl", "tf_http_archive") + +def repo(): + tf_http_archive( + name = "flatbuffers", + strip_prefix = "flatbuffers-a66de58af9565586832c276fbb4251fc416bf07f", + sha256 = "da06ac2fc6fed8e38b6392f5a20fa24a4290cecaadd87aef16b6b84960408680", + urls = [ + "https://github.com/google/flatbuffers/archive/a66de58af9565586832c276fbb4251fc416bf07f.tar.gz", + ], + build_file = "//third_party/flatbuffers:BUILD.external", + system_build_file = "//third_party/flatbuffers:BUILD.system", + link_files = { + "//third_party/flatbuffers:build_defs.bzl": "build_defs.bzl", + }, + ) diff --git a/third_party/hexagon/LICENSE b/third_party/hexagon/LICENSE new file mode 100644 index 0000000..0353ff4 --- /dev/null +++ b/third_party/hexagon/LICENSE @@ -0,0 +1,231 @@ +/* Copyright 2020 The Qualcomm Innovation Center, Inc. All Rights Reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted (subject to the limitations in the disclaimer +below) provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. +* Neither the name of Qualcomm Innovation Center, Inc. nor the names of its + contributors may be used to endorse or promote products derived from this + software without specific prior written permission. + +NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY +THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND +CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT +NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER +OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; +OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR +OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +==============================================================================*/ + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/third_party/hexagon/fully_connected.cc b/third_party/hexagon/fully_connected.cc new file mode 100644 index 0000000..c27c238 --- /dev/null +++ b/third_party/hexagon/fully_connected.cc @@ -0,0 +1,132 @@ +/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +/* Copyright 2020 The Qualcomm Innovation Center, Inc. All Rights Reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted (subject to the limitations in the disclaimer +below) provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. +* Neither the name of Qualcomm Innovation Center, Inc. nor the names of its + contributors may be used to endorse or promote products derived from this + software without specific prior written permission. + +NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY +THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND +CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT +NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER +OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; +OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR +OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +==============================================================================*/ + +#include "tensorflow/lite/micro/kernels/fully_connected.h" + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/fully_connected.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/fully_connected.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "third_party/hexagon/hexagon_fully_connected.h" +#include "third_party/hexagon/hexagon_tflm_translation_fully_connected.h" + +namespace tflite { + +namespace { + +TfLiteStatus EvalFloat(TfLiteContext* context, TfLiteNode* node, + TfLiteFusedActivation activation, + const TfLiteEvalTensor* input, + const TfLiteEvalTensor* filter, + const TfLiteEvalTensor* bias, TfLiteEvalTensor* output) { + float output_activation_min, output_activation_max; + CalculateActivationRange(activation, &output_activation_min, + &output_activation_max); + tflite::FullyConnectedParams op_params; + op_params.float_activation_min = output_activation_min; + op_params.float_activation_max = output_activation_max; + + const float* bias_data = + nullptr != bias ? tflite::micro::GetTensorData(bias) : nullptr; + + tflite::reference_ops::FullyConnected( + op_params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), bias_data, + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + return kTfLiteOk; +} + +} // namespace + +TfLiteStatus HexagonFullyConnectedEval(TfLiteContext* context, + TfLiteNode* node) { + TFLITE_DCHECK(node->builtin_data != nullptr); + const auto* params = + static_cast(node->builtin_data); + + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kFullyConnectedInputTensor); + const TfLiteEvalTensor* filter = + tflite::micro::GetEvalInput(context, node, kFullyConnectedWeightsTensor); + const TfLiteEvalTensor* bias = + tflite::micro::GetEvalInput(context, node, kFullyConnectedBiasTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kFullyConnectedOutputTensor); + + TFLITE_DCHECK(node->user_data != nullptr); + + // Checks in Prepare ensure input, output and filter types are all the same. + switch (input->type) { + case kTfLiteFloat32: + return EvalFloat(context, node, params->activation, input, filter, bias, + output); + + case kTfLiteInt8: + return HexagonFullyConnectedEvalInt8(context, node); + + default: + MicroPrintf( "Type %s (%d) not supported.", + TfLiteTypeGetName(input->type), input->type); + return kTfLiteError; + } + return kTfLiteOk; +} + +TFLMRegistration Register_FULLY_CONNECTED() { + return tflite::micro::RegisterOp(HexagonFullyConnectedInit, + HexagonFullyConnectedPrepare, + HexagonFullyConnectedEval); +} + +} // namespace tflite diff --git a/third_party/hexagon/fully_connected_int8.cc b/third_party/hexagon/fully_connected_int8.cc new file mode 100644 index 0000000..fadb74a --- /dev/null +++ b/third_party/hexagon/fully_connected_int8.cc @@ -0,0 +1,207 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +/* Copyright 2020 The Qualcomm Innovation Center, Inc. All Rights Reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted (subject to the limitations in the disclaimer +below) provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. +* Neither the name of Qualcomm Innovation Center, Inc. nor the names of its + contributors may be used to endorse or promote products derived from this + software without specific prior written permission. + +NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY +THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND +CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT +NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER +OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; +OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR +OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +==============================================================================*/ + +#include "hexagon_tflm_translation_fully_connected.h" +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/fully_connected.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/fully_connected.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/fully_connected.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "third_party/hexagon/hexagon_fully_connected.h" +#include "third_party/hexagon/hexagon_tflm_translation_fully_connected.h" + +namespace tflite { +namespace { + +TfLiteStatus EvalQuantizedInt8(TfLiteContext* context, TfLiteNode* node, + const HexagonOpDataFullyConnected& data, + const TfLiteEvalTensor* input, + const TfLiteEvalTensor* filter, + const TfLiteEvalTensor* bias, + TfLiteEvalTensor* output) { + tflite::FullyConnectedParams op_params; + op_params.input_offset = -data.reference_op_data.input_zero_point; + op_params.weights_offset = -data.reference_op_data.filter_zero_point; + op_params.output_offset = data.reference_op_data.output_zero_point; + op_params.output_multiplier = data.reference_op_data.output_multiplier; + // TODO(b/138810107): Figure out whether output shift should be inverted + op_params.output_shift = data.reference_op_data.output_shift; + op_params.quantized_activation_min = + data.reference_op_data.output_activation_min; + op_params.quantized_activation_max = + data.reference_op_data.output_activation_max; + + const int32_t* bias_data = + nullptr != bias ? tflite::micro::GetTensorData(bias) : nullptr; + + reference_integer_ops::FullyConnected( + op_params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), bias_data, + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + + return kTfLiteOk; +} + +} // namespace + +void* HexagonFullyConnectedInit(TfLiteContext* context, const char* buffer, + size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + void* data = nullptr; + data = context->AllocatePersistentBuffer(context, + sizeof(HexagonOpDataFullyConnected)); + + if (data == nullptr) { + return nullptr; + } + HexagonOpDataFullyConnected* opdata = + static_cast(data); + opdata->hexagon_data = + tflite::hexagon_fully_connected::HexagonInit(context, buffer, length); + + return data; +} + +TfLiteStatus HexagonFullyConnectedPrepare(TfLiteContext* context, + TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + TFLITE_DCHECK(node->builtin_data != nullptr); + + HexagonOpDataFullyConnected* data = + static_cast(node->user_data); + const auto params = + static_cast(node->builtin_data); + + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kFullyConnectedInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* filter = micro_context->AllocateTempInputTensor( + node, kFullyConnectedWeightsTensor); + TF_LITE_ENSURE(context, filter != nullptr); + TfLiteTensor* bias = + micro_context->AllocateTempInputTensor(node, kFullyConnectedBiasTensor); + TfLiteTensor* output = micro_context->AllocateTempOutputTensor( + node, kFullyConnectedOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + + TF_LITE_ENSURE_OK( + context, CalculateOpDataFullyConnected(context, params->activation, + input->type, input, filter, bias, + output, &data->reference_op_data)); + + TF_LITE_ENSURE_TYPES_EQ(context, input->type, output->type); + TF_LITE_ENSURE_MSG(context, input->type == filter->type, + "Hybrid models are not supported on TFLite Micro."); + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(filter); + if (bias != nullptr) { + micro_context->DeallocateTempTfLiteTensor(bias); + } + micro_context->DeallocateTempTfLiteTensor(output); + + TF_LITE_ENSURE_TYPES_EQ(context, input->type, output->type); + TF_LITE_ENSURE_MSG(context, input->type == filter->type, + "Hybrid models are not supported on TFLite Micro."); + + tflite::hexagon_fully_connected::HexagonOptimizationEvaluation(context, node); + + if (tflite::hexagon_fully_connected::HexagonOptimizable(context, node)) { + return tflite::hexagon_fully_connected::HexagonPrepare(context, node); + } + return kTfLiteOk; +} + +TfLiteStatus HexagonFullyConnectedEvalInt8(TfLiteContext* context, + TfLiteNode* node) { + TFLITE_DCHECK(node->builtin_data != nullptr); + + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kFullyConnectedInputTensor); + const TfLiteEvalTensor* filter = + tflite::micro::GetEvalInput(context, node, kFullyConnectedWeightsTensor); + const TfLiteEvalTensor* bias = + tflite::micro::GetEvalInput(context, node, kFullyConnectedBiasTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kFullyConnectedOutputTensor); + + TFLITE_DCHECK(node->user_data != nullptr); + const HexagonOpDataFullyConnected& data = + *(static_cast(node->user_data)); + + // This kernel only implements the int8 version of the fully_connected kernel. + TFLITE_DCHECK(input->type == kTfLiteInt8); + TFLITE_DCHECK(filter->type == kTfLiteInt8); + if (bias != nullptr) { + TFLITE_DCHECK(bias->type == kTfLiteInt32); + } + TFLITE_DCHECK(output->type == kTfLiteInt8); + + if (tflite::hexagon_fully_connected::HexagonOptimizable(context, node)) { + return tflite::hexagon_fully_connected::HexagonEvalQuantizedInt8( + context, node, node->user_data, input, filter, bias, output); + } else { + return EvalQuantizedInt8(context, node, data, input, filter, bias, output); + } + return kTfLiteOk; +} + +TFLMRegistration Register_FULLY_CONNECTED_INT8() { + return tflite::micro::RegisterOp(HexagonFullyConnectedInit, + HexagonFullyConnectedPrepare, + HexagonFullyConnectedEvalInt8); +} + +} // namespace tflite diff --git a/third_party/hexagon/hexagon_fully_connected.h b/third_party/hexagon/hexagon_fully_connected.h new file mode 100644 index 0000000..b4eddb5 --- /dev/null +++ b/third_party/hexagon/hexagon_fully_connected.h @@ -0,0 +1,39 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_HEXAGON_HEXAGON_FULLY_CONNECTED_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_HEXAGON_HEXAGON_FULLY_CONNECTED_H_ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/types.h" +#include "tensorflow/lite/micro/kernels/fully_connected.h" + +namespace tflite { + +struct HexagonOpDataFullyConnected { + struct OpDataFullyConnected reference_op_data; + void* hexagon_data; +}; + +void* HexagonFullyConnectedInit(TfLiteContext* context, const char* buffer, + size_t length); +TfLiteStatus HexagonFullyConnectedPrepare(TfLiteContext* context, + TfLiteNode* node); +TfLiteStatus HexagonFullyConnectedEvalInt8(TfLiteContext* context, + TfLiteNode* node); + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_HEXAGON_HEXAGON_FULLY_CONNECTED_H_ diff --git a/third_party/hexagon/hexagon_svdf.h b/third_party/hexagon/hexagon_svdf.h new file mode 100644 index 0000000..b037348 --- /dev/null +++ b/third_party/hexagon/hexagon_svdf.h @@ -0,0 +1,37 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_HEXAGON_HEXAGON_SVDF_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_HEXAGON_HEXAGON_SVDF_H_ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/types.h" +#include "tensorflow/lite/micro/kernels/svdf.h" + +namespace tflite { + +struct HexagonOpDataSvdf { + struct OpDataSvdf reference_op_data; + void* hexagon_data; +}; + +void* HexagonSvdfInit(TfLiteContext* context, const char* buffer, + size_t length); +TfLiteStatus HexagonSvdfPrepare(TfLiteContext* context, TfLiteNode* node); +TfLiteStatus HexagonSvdfEvalInt8(TfLiteContext* context, TfLiteNode* node); + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_HEXAGON_HEXAGON_SVDF_H_ diff --git a/third_party/hexagon/hexagon_tflm_translation_fully_connected.h b/third_party/hexagon/hexagon_tflm_translation_fully_connected.h new file mode 100644 index 0000000..4d17d12 --- /dev/null +++ b/third_party/hexagon/hexagon_tflm_translation_fully_connected.h @@ -0,0 +1,79 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +/* Copyright 2020 The Qualcomm Innovation Center, Inc. All Rights Reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted (subject to the limitations in the disclaimer +below) provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. +* Neither the name of Qualcomm Innovation Center, Inc. nor the names of its + contributors may be used to endorse or promote products derived from this + software without specific prior written permission. + +NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY +THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND +CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT +NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER +OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; +OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR +OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +==============================================================================*/ + +#ifndef _HEXAGON_TFLM_TRANSLATION_FULLY_CONNECTED_H_ +#define _HEXAGON_TFLM_TRANSLATION_FULLY_CONNECTED_H_ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/fully_connected.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/fully_connected.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/fully_connected.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" + +namespace tflite { +namespace hexagon_fully_connected { + +void* HexagonInit(TfLiteContext* context, const char* buffer, size_t length); + +TfLiteStatus HexagonPrepare(TfLiteContext* context, TfLiteNode* node); + +TfLiteStatus HexagonEvalQuantizedInt8(TfLiteContext* context, TfLiteNode* node, + void* op_data, + const TfLiteEvalTensor* input, + const TfLiteEvalTensor* filter, + const TfLiteEvalTensor* bias, + TfLiteEvalTensor* output); + +void HexagonOptimizationEvaluation(TfLiteContext* context, TfLiteNode* node); +bool HexagonOptimizable(TfLiteContext* context, TfLiteNode* node); + +} // namespace hexagon_fully_connected +} // namespace tflite + +#endif // _HEXAGON_TFLM_TRANSLATION_FULLY_CONNECTED_H_ diff --git a/third_party/hexagon/hexagon_tflm_translation_svdf.h b/third_party/hexagon/hexagon_tflm_translation_svdf.h new file mode 100644 index 0000000..007dc8a --- /dev/null +++ b/third_party/hexagon/hexagon_tflm_translation_svdf.h @@ -0,0 +1,81 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +/* Copyright 2020 The Qualcomm Innovation Center, Inc. All Rights Reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted (subject to the limitations in the disclaimer +below) provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. +* Neither the name of Qualcomm Innovation Center, Inc. nor the names of its + contributors may be used to endorse or promote products derived from this + software without specific prior written permission. + +NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY +THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND +CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT +NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER +OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; +OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR +OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +==============================================================================*/ + +#ifndef _HEXAGON_TFLM_TRANSLATION_SVDF_H_ +#define _HEXAGON_TFLM_TRANSLATION_SVDF_H_ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/activation_utils.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_utils.h" + +namespace tflite { +namespace hexagon_svdf { + +void* HexagonInit(TfLiteContext* context, const char* buffer, size_t length); + +TfLiteStatus HexagonPrepare(TfLiteContext* context, TfLiteNode* node); + +void HexagonEvalIntegerSVDF(TfLiteContext* context, TfLiteNode* node, + const TfLiteEvalTensor* input_tensor, + const TfLiteEvalTensor* weights_feature_tensor, + const TfLiteEvalTensor* weights_time_tensor, + const TfLiteEvalTensor* bias_tensor, + const TfLiteSVDFParams* params, + TfLiteEvalTensor* activation_state_tensor, + TfLiteEvalTensor* output_tensor, void* op_data); + +void HexagonOptimizationEvaluation(TfLiteContext* context, TfLiteNode* node); +bool HexagonOptimizable(TfLiteContext* context, TfLiteNode* node); + +} // namespace hexagon_svdf +} // namespace tflite + +#endif // _HEXAGON_TFLM_TRANSLATION_SVDF_H_ diff --git a/third_party/hexagon/svdf.cc b/third_party/hexagon/svdf.cc new file mode 100644 index 0000000..b9a9ae2 --- /dev/null +++ b/third_party/hexagon/svdf.cc @@ -0,0 +1,111 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +/* Copyright 2020 The Qualcomm Innovation Center, Inc. All Rights Reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted (subject to the limitations in the disclaimer +below) provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. +* Neither the name of Qualcomm Innovation Center, Inc. nor the names of its + contributors may be used to endorse or promote products derived from this + software without specific prior written permission. + +NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY +THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND +CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT +NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER +OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; +OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR +OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +==============================================================================*/ + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/activation_utils.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_utils.h" +#include "third_party/hexagon/hexagon_svdf.h" +#include "third_party/hexagon/hexagon_tflm_translation_svdf.h" + +namespace tflite { + +TfLiteStatus SvdfEval(TfLiteContext* context, TfLiteNode* node) { + auto* params = reinterpret_cast(node->builtin_data); + TFLITE_DCHECK(node->user_data != nullptr); + const HexagonOpDataSvdf& data = + *(static_cast(node->user_data)); + + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kSvdfInputTensor); + const TfLiteEvalTensor* weights_feature = + tflite::micro::GetEvalInput(context, node, kSvdfWeightsFeatureTensor); + const TfLiteEvalTensor* weights_time = + tflite::micro::GetEvalInput(context, node, kSvdfWeightsTimeTensor); + const TfLiteEvalTensor* bias = + (NumInputs(node) == 5) + ? tflite::micro::GetEvalInput(context, node, kSvdfBiasTensor) + : nullptr; + TfLiteEvalTensor* activation_state = tflite::micro::GetMutableEvalInput( + context, node, kSvdfInputActivationStateTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kSvdfOutputTensor); + + switch (weights_feature->type) { + case kTfLiteFloat32: { + EvalFloatSvdfReference(context, node, input, weights_feature, + weights_time, bias, params, + data.reference_op_data.scratch_tensor_index, + activation_state, output); + return kTfLiteOk; + break; + } + + case kTfLiteInt8: { + return HexagonSvdfEvalInt8(context, node); + } + + default: + MicroPrintf( "Type %s not currently supported.", + TfLiteTypeGetName(weights_feature->type)); + return kTfLiteError; + } + return kTfLiteOk; +} + +TFLMRegistration Register_SVDF() { + return tflite::micro::RegisterOp(HexagonSvdfInit, HexagonSvdfPrepare, + SvdfEval); +} + +} // namespace tflite diff --git a/third_party/hexagon/svdf_int8.cc b/third_party/hexagon/svdf_int8.cc new file mode 100644 index 0000000..8db0218 --- /dev/null +++ b/third_party/hexagon/svdf_int8.cc @@ -0,0 +1,132 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +/* Copyright 2020 The Qualcomm Innovation Center, Inc. All Rights Reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted (subject to the limitations in the disclaimer +below) provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. +* Neither the name of Qualcomm Innovation Center, Inc. nor the names of its + contributors may be used to endorse or promote products derived from this + software without specific prior written permission. + +NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY +THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND +CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT +NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER +OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; +OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR +OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +==============================================================================*/ + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/activation_utils.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_utils.h" +#include "third_party/hexagon/hexagon_svdf.h" +#include "third_party/hexagon/hexagon_tflm_translation_svdf.h" + +namespace tflite { + +TfLiteStatus HexagonSvdfEvalInt8(TfLiteContext* context, TfLiteNode* node) { + auto* params = reinterpret_cast(node->builtin_data); + TFLITE_DCHECK(node->user_data != nullptr); + const HexagonOpDataSvdf& data = + *(static_cast(node->user_data)); + + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kSvdfInputTensor); + const TfLiteEvalTensor* weights_feature = + tflite::micro::GetEvalInput(context, node, kSvdfWeightsFeatureTensor); + const TfLiteEvalTensor* weights_time = + tflite::micro::GetEvalInput(context, node, kSvdfWeightsTimeTensor); + const TfLiteEvalTensor* bias = + (NumInputs(node) == 5) + ? tflite::micro::GetEvalInput(context, node, kSvdfBiasTensor) + : nullptr; + TfLiteEvalTensor* activation_state = tflite::micro::GetMutableEvalInput( + context, node, kSvdfInputActivationStateTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kSvdfOutputTensor); + + if (tflite::hexagon_svdf::HexagonOptimizable(context, node)) { + tflite::hexagon_svdf::HexagonEvalIntegerSVDF( + context, node, input, weights_feature, weights_time, bias, params, + activation_state, output, node->user_data); + } else { + EvalInt16SvdfReference(context, node, input, weights_feature, weights_time, + bias, params, activation_state, output, + data.reference_op_data); + } + return kTfLiteOk; +} + +void* HexagonSvdfInit(TfLiteContext* context, const char* buffer, + size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + void* data = context->AllocatePersistentBuffer(context, sizeof(OpDataSvdf)); + + if (data == nullptr) { + return nullptr; + } + + HexagonOpDataSvdf* opdata = static_cast(data); + opdata->hexagon_data = + tflite::hexagon_svdf::HexagonInit(context, buffer, length); + + return data; +} + +TfLiteStatus HexagonSvdfPrepare(TfLiteContext* context, TfLiteNode* node) { + TfLiteStatus prepare_status = PrepareSvdf(context, node); + if (prepare_status != kTfLiteOk) { + return prepare_status; + } + + tflite::hexagon_svdf::HexagonOptimizationEvaluation(context, node); + + if (tflite::hexagon_svdf::HexagonOptimizable(context, node)) { + TF_LITE_ENSURE_OK(context, + tflite::hexagon_svdf::HexagonPrepare(context, node)); + } + + return kTfLiteOk; +} + +TFLMRegistration Register_SVDF_INT8() { + return tflite::micro::RegisterOp(HexagonSvdfInit, HexagonSvdfPrepare, + HexagonSvdfEvalInt8); +} + +} // namespace tflite diff --git a/third_party/kissfft/BUILD b/third_party/kissfft/BUILD new file mode 100644 index 0000000..82bab3f --- /dev/null +++ b/third_party/kissfft/BUILD @@ -0,0 +1 @@ +# This empty BUILD file is required to make Bazel treat this directory as a package. diff --git a/third_party/kissfft/BUILD.bazel b/third_party/kissfft/BUILD.bazel new file mode 100644 index 0000000..25acaa6 --- /dev/null +++ b/third_party/kissfft/BUILD.bazel @@ -0,0 +1,20 @@ +package( + default_visibility = ["//visibility:public"], +) + +licenses(["notice"]) # Apache 2.0 + +exports_files(["COPYING"]) + +cc_library( + name = "kiss_fftr", + srcs = [ + ], + hdrs = [ + "kiss_fft.c", + "tools/kiss_fftr.c", + "_kiss_fft_guts.h", + "kiss_fft.h", + "tools/kiss_fftr.h", + ], +) diff --git a/third_party/kissfft/kissfft.patch b/third_party/kissfft/kissfft.patch new file mode 100644 index 0000000..404d91f --- /dev/null +++ b/third_party/kissfft/kissfft.patch @@ -0,0 +1,116 @@ +diff --git a/_kiss_fft_guts.h b/_kiss_fft_guts.h +index ba66144..1a0f4c2 100644 +--- a/_kiss_fft_guts.h ++++ b/_kiss_fft_guts.h +@@ -1,3 +1,6 @@ ++#ifndef _KISS_FFT_GUTS_H ++#define _KISS_FFT_GUTS_H ++ + /* + Copyright (c) 2003-2010, Mark Borgerding + +@@ -135,7 +138,7 @@ struct kiss_fft_state{ + #else + # define KISS_FFT_COS(phase) (kiss_fft_scalar) cos(phase) + # define KISS_FFT_SIN(phase) (kiss_fft_scalar) sin(phase) +-# define HALF_OF(x) ((x)*.5) ++# define HALF_OF(x) ((x)*(kiss_fft_scalar).5) + #endif + + #define kf_cexp(x,phase) \ +@@ -162,3 +165,4 @@ struct kiss_fft_state{ + #define KISS_FFT_TMP_ALLOC(nbytes) KISS_FFT_MALLOC(nbytes) + #define KISS_FFT_TMP_FREE(ptr) KISS_FFT_FREE(ptr) + #endif ++#endif // _KISS_FFT_GUTS_H +diff --git a/kiss_fft.c b/kiss_fft.c +index 465d6c9..9133a01 100644 +--- a/kiss_fft.c ++++ b/kiss_fft.c +@@ -375,7 +375,7 @@ void kiss_fft_stride(kiss_fft_cfg st,const kiss_fft_cpx *fin,kiss_fft_cpx *fout, + //It just performs an out-of-place FFT into a temp buffer + kiss_fft_cpx * tmpbuf = (kiss_fft_cpx*)KISS_FFT_TMP_ALLOC( sizeof(kiss_fft_cpx)*st->nfft); + kf_work(tmpbuf,fin,1,in_stride, st->factors,st); +- memcpy(fout,tmpbuf,sizeof(kiss_fft_cpx)*st->nfft); ++ /* memcpy(fout,tmpbuf,sizeof(kiss_fft_cpx)*st->nfft); */ + KISS_FFT_TMP_FREE(tmpbuf); + }else{ + kf_work( fout, fin, 1,in_stride, st->factors,st ); +diff --git a/kiss_fft.h b/kiss_fft.h +index 64c50f4..24e4d0c 100644 +--- a/kiss_fft.h ++++ b/kiss_fft.h +@@ -7,7 +7,7 @@ + #include + + #ifdef __cplusplus +-extern "C" { ++extern "C++" { + #endif + + /* +@@ -29,13 +29,13 @@ extern "C" { + #define KISS_FFT_MALLOC(nbytes) _mm_malloc(nbytes,16) + #define KISS_FFT_FREE _mm_free + #else +-#define KISS_FFT_MALLOC malloc +-#define KISS_FFT_FREE free ++#define KISS_FFT_MALLOC(X) (void*)(0x0) /* Patched. */ ++#define KISS_FFT_FREE(X) /* Patched. */ + #endif + + + #ifdef FIXED_POINT +-#include ++#include /* Patched. */ + # if (FIXED_POINT == 32) + # define kiss_fft_scalar int32_t + # else +diff --git a/tools/kiss_fftr.c b/tools/kiss_fftr.c +index b8e238b..0d22a04 100644 +--- a/tools/kiss_fftr.c ++++ b/tools/kiss_fftr.c +@@ -31,7 +31,7 @@ kiss_fftr_cfg kiss_fftr_alloc(int nfft,int inverse_fft,void * mem,size_t * lenme + size_t subsize, memneeded; + + if (nfft & 1) { +- fprintf(stderr,"Real FFT optimization must be even.\n"); ++ /* fprintf(stderr,"Real FFT optimization must be even.\n"); */ + return NULL; + } + nfft >>= 1; +@@ -71,8 +71,8 @@ void kiss_fftr(kiss_fftr_cfg st,const kiss_fft_scalar *timedata,kiss_fft_cpx *fr + kiss_fft_cpx fpnk,fpk,f1k,f2k,tw,tdc; + + if ( st->substate->inverse) { +- fprintf(stderr,"kiss fft usage error: improper alloc\n"); +- exit(1); ++ /* fprintf(stderr,"kiss fft usage error: improper alloc\n"); */ ++ return; /* exit(1); */ + } + + ncfft = st->substate->nfft; +@@ -126,8 +126,8 @@ void kiss_fftri(kiss_fftr_cfg st,const kiss_fft_cpx *freqdata,kiss_fft_scalar *t + int k, ncfft; + + if (st->substate->inverse == 0) { +- fprintf (stderr, "kiss fft usage error: improper alloc\n"); +- exit (1); ++ /* fprintf (stderr, "kiss fft usage error: improper alloc\n"); */ ++ return; /* exit (1); */ + } + + ncfft = st->substate->nfft; +diff --git a/tools/kiss_fftr.h b/tools/kiss_fftr.h +index 72e5a57..b888a28 100644 +--- a/tools/kiss_fftr.h ++++ b/tools/kiss_fftr.h +@@ -3,7 +3,7 @@ + + #include "kiss_fft.h" + #ifdef __cplusplus +-extern "C" { ++extern "C++" { + #endif + + diff --git a/third_party/kissfft/workspace.bzl b/third_party/kissfft/workspace.bzl new file mode 100644 index 0000000..2098148 --- /dev/null +++ b/third_party/kissfft/workspace.bzl @@ -0,0 +1,15 @@ +"""Loads the kissfft library, used by TF Lite.""" + +load("//third_party:repo.bzl", "tf_http_archive") + +def repo(): + tf_http_archive( + name = "kissfft", + patch_file = "//third_party/kissfft:kissfft.patch", + strip_prefix = "kissfft-130", + sha256 = "ac2259f84e372a582270ed7c7b709d02e6ca9c7206e40bb58de6ef77f6474872", + urls = [ + "https://github.com/mborgerding/kissfft/archive/refs/tags/v130.zip", + ], + build_file = "//third_party/kissfft:BUILD.bazel", + ) diff --git a/third_party/python_requirements.in b/third_party/python_requirements.in new file mode 100644 index 0000000..3614311 --- /dev/null +++ b/third_party/python_requirements.in @@ -0,0 +1,34 @@ +# Specify the Python packages available as dependencies to targets in Bazel. +# +# When modifying this list, always run +# the //third_party:python_requirements.update target: +# +# bazel run //third_party:python_requirements.update +# +# to compile (using pip-compile) this list of direct dependencies into a pinned +# requirements file---a complete list of direct and transitive dependencies, +# pinned by version and cryptographic hash. The pinned requirements file is +# used in @rules_python's pip_parse() in the WORKSPACE file to create the +# external repositories available as dependencies to py_binary() and +# py_library() targets. +# +# To upgrade dependencies to their latest version, run the update target with +# the option --upgrade: +# +# bazel run //third_party:python_requirements.update -- --upgrade +# +# Without the --upgrade option, the underlying pip-compile only adds or removes +# dependencies without upgrading them to the latest versions available in PyPI. +# +# Both this input file and the pinned requirements file should be committed to +# git. Avoid committing changes that break other developers by using an +# environment that meets the project's recommendations. Dependency resolution +# is sensitive to the Python environment (interpreter version, etc.) in which +# it is run. + +tensorflow-cpu +numpy +mako +pillow +yapf +protobuf diff --git a/third_party/python_requirements.txt b/third_party/python_requirements.txt new file mode 100644 index 0000000..d0021b0 --- /dev/null +++ b/third_party/python_requirements.txt @@ -0,0 +1,641 @@ +# +# This file is autogenerated by pip-compile with python 3.10 +# To update, run: +# +# bazel run //third_party:python_requirements.update +# +absl-py==1.4.0 \ + --hash=sha256:0d3fe606adfa4f7db64792dd4c7aee4ee0c38ab75dfd353b7a83ed3e957fcb47 \ + --hash=sha256:d2c244d01048ba476e7c080bd2c6df5e141d211de80223460d5b3b8a2a58433d + # via + # tensorboard + # tensorflow-cpu +astunparse==1.6.3 \ + --hash=sha256:5ad93a8456f0d084c3456d059fd9a92cce667963232cbf763eac3bc5b7940872 \ + --hash=sha256:c2652417f2c8b5bb325c885ae329bdf3f86424075c4fd1a128674bc6fba4b8e8 + # via tensorflow-cpu +cachetools==5.3.0 \ + --hash=sha256:13dfddc7b8df938c21a940dfa6557ce6e94a2f1cdfa58eb90c805721d58f2c14 \ + --hash=sha256:429e1a1e845c008ea6c85aa35d4b98b65d6a9763eeef3e37e92728a12d1de9d4 + # via google-auth +certifi==2023.5.7 \ + --hash=sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7 \ + --hash=sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716 + # via requests +charset-normalizer==3.1.0 \ + --hash=sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6 \ + --hash=sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1 \ + --hash=sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e \ + --hash=sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373 \ + --hash=sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62 \ + --hash=sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230 \ + --hash=sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be \ + --hash=sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c \ + --hash=sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0 \ + --hash=sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448 \ + --hash=sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f \ + --hash=sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649 \ + --hash=sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d \ + --hash=sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0 \ + --hash=sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706 \ + --hash=sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a \ + --hash=sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59 \ + --hash=sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23 \ + --hash=sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5 \ + --hash=sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb \ + --hash=sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e \ + --hash=sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e \ + --hash=sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c \ + --hash=sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28 \ + --hash=sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d \ + --hash=sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41 \ + --hash=sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974 \ + --hash=sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce \ + --hash=sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f \ + --hash=sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1 \ + --hash=sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d \ + --hash=sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8 \ + --hash=sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017 \ + --hash=sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31 \ + --hash=sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7 \ + --hash=sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8 \ + --hash=sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e \ + --hash=sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14 \ + --hash=sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd \ + --hash=sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d \ + --hash=sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795 \ + --hash=sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b \ + --hash=sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b \ + --hash=sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b \ + --hash=sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203 \ + --hash=sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f \ + --hash=sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19 \ + --hash=sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1 \ + --hash=sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a \ + --hash=sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac \ + --hash=sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9 \ + --hash=sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0 \ + --hash=sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137 \ + --hash=sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f \ + --hash=sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6 \ + --hash=sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5 \ + --hash=sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909 \ + --hash=sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f \ + --hash=sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0 \ + --hash=sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324 \ + --hash=sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755 \ + --hash=sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb \ + --hash=sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854 \ + --hash=sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c \ + --hash=sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60 \ + --hash=sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84 \ + --hash=sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0 \ + --hash=sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b \ + --hash=sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1 \ + --hash=sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531 \ + --hash=sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1 \ + --hash=sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11 \ + --hash=sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326 \ + --hash=sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df \ + --hash=sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab + # via requests +flatbuffers==23.5.9 \ + --hash=sha256:93a506b6ab771c79ce816e7b35a93ed08ec5b4c9edb811101a22c44a4152f018 \ + --hash=sha256:a02eb8c2d61cba153cd211937de8f8f7764b6a7510971b2c4684ed8b02e6e571 + # via tensorflow-cpu +gast==0.4.0 \ + --hash=sha256:40feb7b8b8434785585ab224d1568b857edb18297e5a3047f1ba012bc83b42c1 \ + --hash=sha256:b7adcdd5adbebf1adf17378da5ba3f543684dbec47b1cda1f3997e573cd542c4 + # via tensorflow-cpu +google-auth==2.18.0 \ + --hash=sha256:c66b488a8b005b23ccb97b1198b6cece516c91869091ac5b7c267422db2733c7 \ + --hash=sha256:ef3f3a67fa54d421a1c155864570f9a8de9179cedc937bda496b7a8ca338e936 + # via + # google-auth-oauthlib + # tensorboard +google-auth-oauthlib==1.0.0 \ + --hash=sha256:95880ca704928c300f48194d1770cf5b1462835b6e49db61445a520f793fd5fb \ + --hash=sha256:e375064964820b47221a7e1b7ee1fd77051b6323c3f9e3e19785f78ab67ecfc5 + # via tensorboard +google-pasta==0.2.0 \ + --hash=sha256:4612951da876b1a10fe3960d7226f0c7682cf901e16ac06e473b267a5afa8954 \ + --hash=sha256:b32482794a366b5366a32c92a9a9201b107821889935a02b3e51f6b432ea84ed \ + --hash=sha256:c9f2c8dfc8f96d0d5808299920721be30c9eec37f2389f28904f454565c8a16e + # via tensorflow-cpu +grpcio==1.54.0 \ + --hash=sha256:02000b005bc8b72ff50c477b6431e8886b29961159e8b8d03c00b3dd9139baed \ + --hash=sha256:031bbd26656e0739e4b2c81c172155fb26e274b8d0312d67aefc730bcba915b6 \ + --hash=sha256:1209d6b002b26e939e4c8ea37a3d5b4028eb9555394ea69fb1adbd4b61a10bb8 \ + --hash=sha256:125ed35aa3868efa82eabffece6264bf638cfdc9f0cd58ddb17936684aafd0f8 \ + --hash=sha256:1382bc499af92901c2240c4d540c74eae8a671e4fe9839bfeefdfcc3a106b5e2 \ + --hash=sha256:16bca8092dd994f2864fdab278ae052fad4913f36f35238b2dd11af2d55a87db \ + --hash=sha256:1c59d899ee7160638613a452f9a4931de22623e7ba17897d8e3e348c2e9d8d0b \ + --hash=sha256:1d109df30641d050e009105f9c9ca5a35d01e34d2ee2a4e9c0984d392fd6d704 \ + --hash=sha256:1fa7d6ddd33abbd3c8b3d7d07c56c40ea3d1891ce3cd2aa9fa73105ed5331866 \ + --hash=sha256:21c4a1aae861748d6393a3ff7867473996c139a77f90326d9f4104bebb22d8b8 \ + --hash=sha256:224166f06ccdaf884bf35690bf4272997c1405de3035d61384ccb5b25a4c1ca8 \ + --hash=sha256:2262bd3512ba9e9f0e91d287393df6f33c18999317de45629b7bd46c40f16ba9 \ + --hash=sha256:2585b3c294631a39b33f9f967a59b0fad23b1a71a212eba6bc1e3ca6e6eec9ee \ + --hash=sha256:27fb030a4589d2536daec5ff5ba2a128f4f155149efab578fe2de2cb21596d3d \ + --hash=sha256:30fbbce11ffeb4f9f91c13fe04899aaf3e9a81708bedf267bf447596b95df26b \ + --hash=sha256:3930669c9e6f08a2eed824738c3d5699d11cd47a0ecc13b68ed11595710b1133 \ + --hash=sha256:3b170e441e91e4f321e46d3cc95a01cb307a4596da54aca59eb78ab0fc03754d \ + --hash=sha256:3db71c6f1ab688d8dfc102271cedc9828beac335a3a4372ec54b8bf11b43fd29 \ + --hash=sha256:48cb7af77238ba16c77879009003f6b22c23425e5ee59cb2c4c103ec040638a5 \ + --hash=sha256:49eace8ea55fbc42c733defbda1e4feb6d3844ecd875b01bb8b923709e0f5ec8 \ + --hash=sha256:533eaf5b2a79a3c6f35cbd6a095ae99cac7f4f9c0e08bdcf86c130efd3c32adf \ + --hash=sha256:5942a3e05630e1ef5b7b5752e5da6582460a2e4431dae603de89fc45f9ec5aa9 \ + --hash=sha256:62117486460c83acd3b5d85c12edd5fe20a374630475388cfc89829831d3eb79 \ + --hash=sha256:650f5f2c9ab1275b4006707411bb6d6bc927886874a287661c3c6f332d4c068b \ + --hash=sha256:6dc1e2c9ac292c9a484ef900c568ccb2d6b4dfe26dfa0163d5bc815bb836c78d \ + --hash=sha256:73c238ef6e4b64272df7eec976bb016c73d3ab5a6c7e9cd906ab700523d312f3 \ + --hash=sha256:775a2f70501370e5ba54e1ee3464413bff9bd85bd9a0b25c989698c44a6fb52f \ + --hash=sha256:860fcd6db7dce80d0a673a1cc898ce6bc3d4783d195bbe0e911bf8a62c93ff3f \ + --hash=sha256:87f47bf9520bba4083d65ab911f8f4c0ac3efa8241993edd74c8dd08ae87552f \ + --hash=sha256:960b176e0bb2b4afeaa1cd2002db1e82ae54c9b6e27ea93570a42316524e77cf \ + --hash=sha256:a7caf553ccaf715ec05b28c9b2ab2ee3fdb4036626d779aa09cf7cbf54b71445 \ + --hash=sha256:a947d5298a0bbdd4d15671024bf33e2b7da79a70de600ed29ba7e0fef0539ebb \ + --hash=sha256:a97b0d01ae595c997c1d9d8249e2d2da829c2d8a4bdc29bb8f76c11a94915c9a \ + --hash=sha256:b7655f809e3420f80ce3bf89737169a9dce73238af594049754a1128132c0da4 \ + --hash=sha256:c33744d0d1a7322da445c0fe726ea6d4e3ef2dfb0539eadf23dce366f52f546c \ + --hash=sha256:c55a9cf5cba80fb88c850915c865b8ed78d5e46e1f2ec1b27692f3eaaf0dca7e \ + --hash=sha256:d2f62fb1c914a038921677cfa536d645cb80e3dd07dc4859a3c92d75407b90a5 \ + --hash=sha256:d8ae6e0df3a608e99ee1acafaafd7db0830106394d54571c1ece57f650124ce9 \ + --hash=sha256:e355ee9da9c1c03f174efea59292b17a95e0b7b4d7d2a389265f731a9887d5a9 \ + --hash=sha256:e3e526062c690517b42bba66ffe38aaf8bc99a180a78212e7b22baa86902f690 \ + --hash=sha256:eb0807323572642ab73fd86fe53d88d843ce617dd1ddf430351ad0759809a0ae \ + --hash=sha256:ebff0738be0499d7db74d20dca9f22a7b27deae31e1bf92ea44924fd69eb6251 \ + --hash=sha256:ed36e854449ff6c2f8ee145f94851fe171298e1e793f44d4f672c4a0d78064e7 \ + --hash=sha256:ed3d458ded32ff3a58f157b60cc140c88f7ac8c506a1c567b2a9ee8a2fd2ce54 \ + --hash=sha256:f4a7dca8ccd8023d916b900aa3c626f1bd181bd5b70159479b142f957ff420e4 + # via + # tensorboard + # tensorflow-cpu +h5py==3.8.0 \ + --hash=sha256:03890b1c123d024fb0239a3279737d5432498c1901c354f8b10d8221d1d16235 \ + --hash=sha256:0fef76e10b9216657fa37e7edff6d8be0709b25bd5066474c229b56cf0098df9 \ + --hash=sha256:26ffc344ec9984d2cd3ca0265007299a8bac8d85c1ad48f4639d8d3aed2af171 \ + --hash=sha256:290e00fa2de74a10688d1bac98d5a9cdd43f14f58e562c580b5b3dfbd358ecae \ + --hash=sha256:33b15aae79e9147aebe1d0e54099cbcde8d65e3e227cd5b59e49b1272aa0e09d \ + --hash=sha256:36761693efbe53df179627a775476dcbc37727d6e920958277a7efbc18f1fb73 \ + --hash=sha256:377865821fe80ad984d003723d6f8890bd54ceeb5981b43c0313b9df95411b30 \ + --hash=sha256:49bc857635f935fa30e92e61ac1e87496df8f260a6945a3235e43a9890426866 \ + --hash=sha256:4a506fc223def428f4329e7e1f9fe1c8c593eab226e7c0942c8d75308ad49950 \ + --hash=sha256:533d7dad466ddb7e3b30af274b630eb7c1a6e4ddf01d1c373a0334dc2152110a \ + --hash=sha256:5fd2252d1fc364ba0e93dd0b7089f4906b66805cb4e6aca7fa8874ac08649647 \ + --hash=sha256:6fead82f0c4000cf38d53f9c030780d81bfa0220218aee13b90b7701c937d95f \ + --hash=sha256:7f3350fc0a8407d668b13247861c2acd23f7f5fe7d060a3ad9b0820f5fcbcae0 \ + --hash=sha256:8f55d9c6c84d7d09c79fb85979e97b81ec6071cc776a97eb6b96f8f6ec767323 \ + --hash=sha256:98a240cd4c1bfd568aaa52ec42d263131a2582dab82d74d3d42a0d954cac12be \ + --hash=sha256:9f6f6ffadd6bfa9b2c5b334805eb4b19ca0a5620433659d8f7fb86692c40a359 \ + --hash=sha256:b685453e538b2b5934c58a644ac3f3b3d0cec1a01b6fb26d57388e9f9b674ad0 \ + --hash=sha256:b7865de06779b14d98068da387333ad9bf2756b5b579cc887fac169bc08f87c3 \ + --hash=sha256:bacaa1c16810dd2b3e4417f8e730971b7c4d53d234de61fe4a918db78e80e1e4 \ + --hash=sha256:bae730580ae928de409d63cbe4fdca4c82c3ad2bed30511d19d34e995d63c77e \ + --hash=sha256:c3389b63222b1c7a158bb7fe69d11ca00066740ec5574596d47a2fe5317f563a \ + --hash=sha256:c873ba9fd4fa875ad62ce0e4891725e257a8fe7f5abdbc17e51a5d54819be55c \ + --hash=sha256:db03e3f2c716205fbdabb34d0848459840585225eb97b4f08998c743821ca323 \ + --hash=sha256:f47f757d1b76f0ecb8aa0508ec8d1b390df67a8b67ee2515dc1b046f3a1596ea \ + --hash=sha256:f891b17e3a3e974e93f9e34e7cca9f530806543571ce078998676a555837d91d + # via tensorflow-cpu +idna==3.4 \ + --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ + --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 + # via requests +jax==0.4.9 \ + --hash=sha256:1ed135cd08f48e4baf10f6eafdb4a4cdae781f9052b5838c09c91a9f4fa75f09 + # via tensorflow-cpu +keras==2.12.0 \ + --hash=sha256:35c39534011e909645fb93515452e98e1a0ce23727b55d4918b9c58b2308c15e + # via tensorflow-cpu +libclang==16.0.0 \ + --hash=sha256:2adce42ae652f312245b8f4eda6f30b4076fb61f7619f2dfd0a0c31dee4c32b9 \ + --hash=sha256:65258a6bb3e7dc31dc9b26f8d42f53c9d3b959643ade291fcd1aef4855303ca6 \ + --hash=sha256:7b6686b67a0daa84b4c614bcc119578329fc4fbb52b919565b7376b507c4793b \ + --hash=sha256:a043138caaf2cb076ebb060c6281ec95612926645d425c691991fc9df00e8a24 \ + --hash=sha256:af55a4aa86fdfe6b2ec68bc8cfe5fdac6c448d591ca7648be86ca17099b41ca8 \ + --hash=sha256:bf4628fc4da7a1dd06a244f9b8e121c5ec68076a763c59d6b13cbb103acc935b \ + --hash=sha256:eb59652cb0559c0e71784ff4c8ba24c14644becc907b1446563ecfaa622d523b \ + --hash=sha256:ee20bf93e3dd330f71fc50cdbf13b92ced0aec8e540be64251db53502a9b33f7 + # via tensorflow-cpu +mako==1.2.4 \ + --hash=sha256:c97c79c018b9165ac9922ae4f32da095ffd3c4e6872b45eded42926deea46818 \ + --hash=sha256:d60a3903dc3bb01a18ad6a89cdbe2e4eadc69c0bc8ef1e3773ba53d44c3f7a34 + # via -r third_party/python_requirements.in +markdown==3.4.3 \ + --hash=sha256:065fd4df22da73a625f14890dd77eb8040edcbd68794bcd35943be14490608b2 \ + --hash=sha256:8bf101198e004dc93e84a12a7395e31aac6a9c9942848ae1d99b9d72cf9b3520 + # via tensorboard +markupsafe==2.1.2 \ + --hash=sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed \ + --hash=sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc \ + --hash=sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2 \ + --hash=sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460 \ + --hash=sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7 \ + --hash=sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0 \ + --hash=sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1 \ + --hash=sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa \ + --hash=sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03 \ + --hash=sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323 \ + --hash=sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65 \ + --hash=sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013 \ + --hash=sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036 \ + --hash=sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f \ + --hash=sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4 \ + --hash=sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419 \ + --hash=sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2 \ + --hash=sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619 \ + --hash=sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a \ + --hash=sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a \ + --hash=sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd \ + --hash=sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7 \ + --hash=sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666 \ + --hash=sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65 \ + --hash=sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859 \ + --hash=sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625 \ + --hash=sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff \ + --hash=sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156 \ + --hash=sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd \ + --hash=sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba \ + --hash=sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f \ + --hash=sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1 \ + --hash=sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094 \ + --hash=sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a \ + --hash=sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513 \ + --hash=sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed \ + --hash=sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d \ + --hash=sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3 \ + --hash=sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147 \ + --hash=sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c \ + --hash=sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603 \ + --hash=sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601 \ + --hash=sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a \ + --hash=sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1 \ + --hash=sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d \ + --hash=sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3 \ + --hash=sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54 \ + --hash=sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2 \ + --hash=sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6 \ + --hash=sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58 + # via + # mako + # werkzeug +ml-dtypes==0.1.0 \ + --hash=sha256:273c306db846005b83a98c9c7ec3dc8fa20e8f11c3772c8e8c20cc12d8abfd4b \ + --hash=sha256:2de6c81b0da398d54aabdd7de599f2dfc43e30b65d9fad379a69f4cc4ae165d3 \ + --hash=sha256:36e8518c8fd2c38729f020125f39ef07b045f5c16d0846320c7252d7773285ee \ + --hash=sha256:377f2d5cfbf809b59188e0bfda4a0774e658541f575b637fee4850d99c2f9fdc \ + --hash=sha256:41b6beeaea47e2466b94068664c9a45b2a65dd023aa4e5deeb5a73303661344e \ + --hash=sha256:77970beeb3cf6ac559c4b6b393f24778a5abd34fafbaad82d5a0d17d0f148936 \ + --hash=sha256:87aa1cf83d41fed5a40fc27ee57ac4c1bf904e940f082531d3d58f1c318b5928 \ + --hash=sha256:8c5c9fe086756fbc1bf51296431d64429536093cf6e2ba592e042d7fc07c8514 \ + --hash=sha256:8de9bbf5bed587a1166699447ea14d1e8fe66d4e812811e37bf2f4d988475476 \ + --hash=sha256:99fab8262d175c49bf1655c229244f301274e8289449c350ba4d5b95ade07d9a \ + --hash=sha256:a29fbf128583673eca0f43def1dbe77e02c1e8b8a8331db2877bbb57d091ef11 \ + --hash=sha256:ad765159ac6c18d5ee7d325fcf34d3106a9d9d7a49713d998f5cfa330a1459b4 \ + --hash=sha256:b9c5578dffd85637a7dd437192de18bc1a14eb6ba7d53ef40de3f84c51c789e5 \ + --hash=sha256:c1fc0afe63ce99069f9d7e0693a61cfd0aea90241fc3821af9953d0c11f4048a \ + --hash=sha256:c9218175b06764b8ddc95cb18d11a6c4b48a4b103a31c9ea2b2c3cd0cfc369f8 \ + --hash=sha256:dee8ea629b8e3e20c6649852c1b9deacfa13384ab9337f2c9e717e401d102f23 \ + --hash=sha256:ffb7882dd46399217dc54f37affc899e0a29a4cfb63e5bf733ac0baf4a179c77 + # via jax +numpy==1.23.5 \ + --hash=sha256:01dd17cbb340bf0fc23981e52e1d18a9d4050792e8fb8363cecbf066a84b827d \ + --hash=sha256:06005a2ef6014e9956c09ba07654f9837d9e26696a0470e42beedadb78c11b07 \ + --hash=sha256:09b7847f7e83ca37c6e627682f145856de331049013853f344f37b0c9690e3df \ + --hash=sha256:0aaee12d8883552fadfc41e96b4c82ee7d794949e2a7c3b3a7201e968c7ecab9 \ + --hash=sha256:0cbe9848fad08baf71de1a39e12d1b6310f1d5b2d0ea4de051058e6e1076852d \ + --hash=sha256:1b1766d6f397c18153d40015ddfc79ddb715cabadc04d2d228d4e5a8bc4ded1a \ + --hash=sha256:33161613d2269025873025b33e879825ec7b1d831317e68f4f2f0f84ed14c719 \ + --hash=sha256:5039f55555e1eab31124a5768898c9e22c25a65c1e0037f4d7c495a45778c9f2 \ + --hash=sha256:522e26bbf6377e4d76403826ed689c295b0b238f46c28a7251ab94716da0b280 \ + --hash=sha256:56e454c7833e94ec9769fa0f86e6ff8e42ee38ce0ce1fa4cbb747ea7e06d56aa \ + --hash=sha256:58f545efd1108e647604a1b5aa809591ccd2540f468a880bedb97247e72db387 \ + --hash=sha256:5e05b1c973a9f858c74367553e236f287e749465f773328c8ef31abe18f691e1 \ + --hash=sha256:7903ba8ab592b82014713c491f6c5d3a1cde5b4a3bf116404e08f5b52f6daf43 \ + --hash=sha256:8969bfd28e85c81f3f94eb4a66bc2cf1dbdc5c18efc320af34bffc54d6b1e38f \ + --hash=sha256:92c8c1e89a1f5028a4c6d9e3ccbe311b6ba53694811269b992c0b224269e2398 \ + --hash=sha256:9c88793f78fca17da0145455f0d7826bcb9f37da4764af27ac945488116efe63 \ + --hash=sha256:a7ac231a08bb37f852849bbb387a20a57574a97cfc7b6cabb488a4fc8be176de \ + --hash=sha256:abdde9f795cf292fb9651ed48185503a2ff29be87770c3b8e2a14b0cd7aa16f8 \ + --hash=sha256:af1da88f6bc3d2338ebbf0e22fe487821ea4d8e89053e25fa59d1d79786e7481 \ + --hash=sha256:b2a9ab7c279c91974f756c84c365a669a887efa287365a8e2c418f8b3ba73fb0 \ + --hash=sha256:bf837dc63ba5c06dc8797c398db1e223a466c7ece27a1f7b5232ba3466aafe3d \ + --hash=sha256:ca51fcfcc5f9354c45f400059e88bc09215fb71a48d3768fb80e357f3b457e1e \ + --hash=sha256:ce571367b6dfe60af04e04a1834ca2dc5f46004ac1cc756fb95319f64c095a96 \ + --hash=sha256:d208a0f8729f3fb790ed18a003f3a57895b989b40ea4dce4717e9cf4af62c6bb \ + --hash=sha256:dbee87b469018961d1ad79b1a5d50c0ae850000b639bcb1b694e9981083243b6 \ + --hash=sha256:e9f4c4e51567b616be64e05d517c79a8a22f3606499941d97bb76f2ca59f982d \ + --hash=sha256:f063b69b090c9d918f9df0a12116029e274daf0181df392839661c4c7ec9018a \ + --hash=sha256:f9a909a8bae284d46bbfdefbdd4a262ba19d3bc9921b1e76126b1d21c3c34135 + # via + # -r third_party/python_requirements.in + # h5py + # jax + # ml-dtypes + # opt-einsum + # scipy + # tensorboard + # tensorflow-cpu +oauthlib==3.2.2 \ + --hash=sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca \ + --hash=sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918 + # via requests-oauthlib +opt-einsum==3.3.0 \ + --hash=sha256:2455e59e3947d3c275477df7f5205b30635e266fe6dc300e3d9f9646bfcea147 \ + --hash=sha256:59f6475f77bbc37dcf7cd748519c0ec60722e91e63ca114e68821c0c54a46549 + # via + # jax + # tensorflow-cpu +packaging==23.1 \ + --hash=sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61 \ + --hash=sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f + # via tensorflow-cpu +pillow==9.5.0 \ + --hash=sha256:07999f5834bdc404c442146942a2ecadd1cb6292f5229f4ed3b31e0a108746b1 \ + --hash=sha256:0852ddb76d85f127c135b6dd1f0bb88dbb9ee990d2cd9aa9e28526c93e794fba \ + --hash=sha256:1781a624c229cb35a2ac31cc4a77e28cafc8900733a864870c49bfeedacd106a \ + --hash=sha256:1e7723bd90ef94eda669a3c2c19d549874dd5badaeefabefd26053304abe5799 \ + --hash=sha256:229e2c79c00e85989a34b5981a2b67aa079fd08c903f0aaead522a1d68d79e51 \ + --hash=sha256:22baf0c3cf0c7f26e82d6e1adf118027afb325e703922c8dfc1d5d0156bb2eeb \ + --hash=sha256:252a03f1bdddce077eff2354c3861bf437c892fb1832f75ce813ee94347aa9b5 \ + --hash=sha256:2dfaaf10b6172697b9bceb9a3bd7b951819d1ca339a5ef294d1f1ac6d7f63270 \ + --hash=sha256:322724c0032af6692456cd6ed554bb85f8149214d97398bb80613b04e33769f6 \ + --hash=sha256:35f6e77122a0c0762268216315bf239cf52b88865bba522999dc38f1c52b9b47 \ + --hash=sha256:375f6e5ee9620a271acb6820b3d1e94ffa8e741c0601db4c0c4d3cb0a9c224bf \ + --hash=sha256:3ded42b9ad70e5f1754fb7c2e2d6465a9c842e41d178f262e08b8c85ed8a1d8e \ + --hash=sha256:432b975c009cf649420615388561c0ce7cc31ce9b2e374db659ee4f7d57a1f8b \ + --hash=sha256:482877592e927fd263028c105b36272398e3e1be3269efda09f6ba21fd83ec66 \ + --hash=sha256:489f8389261e5ed43ac8ff7b453162af39c3e8abd730af8363587ba64bb2e865 \ + --hash=sha256:54f7102ad31a3de5666827526e248c3530b3a33539dbda27c6843d19d72644ec \ + --hash=sha256:560737e70cb9c6255d6dcba3de6578a9e2ec4b573659943a5e7e4af13f298f5c \ + --hash=sha256:5671583eab84af046a397d6d0ba25343c00cd50bce03787948e0fff01d4fd9b1 \ + --hash=sha256:5ba1b81ee69573fe7124881762bb4cd2e4b6ed9dd28c9c60a632902fe8db8b38 \ + --hash=sha256:5d4ebf8e1db4441a55c509c4baa7a0587a0210f7cd25fcfe74dbbce7a4bd1906 \ + --hash=sha256:60037a8db8750e474af7ffc9faa9b5859e6c6d0a50e55c45576bf28be7419705 \ + --hash=sha256:608488bdcbdb4ba7837461442b90ea6f3079397ddc968c31265c1e056964f1ef \ + --hash=sha256:6608ff3bf781eee0cd14d0901a2b9cc3d3834516532e3bd673a0a204dc8615fc \ + --hash=sha256:662da1f3f89a302cc22faa9f14a262c2e3951f9dbc9617609a47521c69dd9f8f \ + --hash=sha256:7002d0797a3e4193c7cdee3198d7c14f92c0836d6b4a3f3046a64bd1ce8df2bf \ + --hash=sha256:763782b2e03e45e2c77d7779875f4432e25121ef002a41829d8868700d119392 \ + --hash=sha256:77165c4a5e7d5a284f10a6efaa39a0ae8ba839da344f20b111d62cc932fa4e5d \ + --hash=sha256:7c9af5a3b406a50e313467e3565fc99929717f780164fe6fbb7704edba0cebbe \ + --hash=sha256:7ec6f6ce99dab90b52da21cf0dc519e21095e332ff3b399a357c187b1a5eee32 \ + --hash=sha256:833b86a98e0ede388fa29363159c9b1a294b0905b5128baf01db683672f230f5 \ + --hash=sha256:84a6f19ce086c1bf894644b43cd129702f781ba5751ca8572f08aa40ef0ab7b7 \ + --hash=sha256:8507eda3cd0608a1f94f58c64817e83ec12fa93a9436938b191b80d9e4c0fc44 \ + --hash=sha256:85ec677246533e27770b0de5cf0f9d6e4ec0c212a1f89dfc941b64b21226009d \ + --hash=sha256:8aca1152d93dcc27dc55395604dcfc55bed5f25ef4c98716a928bacba90d33a3 \ + --hash=sha256:8d935f924bbab8f0a9a28404422da8af4904e36d5c33fc6f677e4c4485515625 \ + --hash=sha256:8f36397bf3f7d7c6a3abdea815ecf6fd14e7fcd4418ab24bae01008d8d8ca15e \ + --hash=sha256:91ec6fe47b5eb5a9968c79ad9ed78c342b1f97a091677ba0e012701add857829 \ + --hash=sha256:965e4a05ef364e7b973dd17fc765f42233415974d773e82144c9bbaaaea5d089 \ + --hash=sha256:96e88745a55b88a7c64fa49bceff363a1a27d9a64e04019c2281049444a571e3 \ + --hash=sha256:99eb6cafb6ba90e436684e08dad8be1637efb71c4f2180ee6b8f940739406e78 \ + --hash=sha256:9adf58f5d64e474bed00d69bcd86ec4bcaa4123bfa70a65ce72e424bfb88ed96 \ + --hash=sha256:9b1af95c3a967bf1da94f253e56b6286b50af23392a886720f563c547e48e964 \ + --hash=sha256:a0aa9417994d91301056f3d0038af1199eb7adc86e646a36b9e050b06f526597 \ + --hash=sha256:a0f9bb6c80e6efcde93ffc51256d5cfb2155ff8f78292f074f60f9e70b942d99 \ + --hash=sha256:a127ae76092974abfbfa38ca2d12cbeddcdeac0fb71f9627cc1135bedaf9d51a \ + --hash=sha256:aaf305d6d40bd9632198c766fb64f0c1a83ca5b667f16c1e79e1661ab5060140 \ + --hash=sha256:aca1c196f407ec7cf04dcbb15d19a43c507a81f7ffc45b690899d6a76ac9fda7 \ + --hash=sha256:ace6ca218308447b9077c14ea4ef381ba0b67ee78d64046b3f19cf4e1139ad16 \ + --hash=sha256:b416f03d37d27290cb93597335a2f85ed446731200705b22bb927405320de903 \ + --hash=sha256:bf548479d336726d7a0eceb6e767e179fbde37833ae42794602631a070d630f1 \ + --hash=sha256:c1170d6b195555644f0616fd6ed929dfcf6333b8675fcca044ae5ab110ded296 \ + --hash=sha256:c380b27d041209b849ed246b111b7c166ba36d7933ec6e41175fd15ab9eb1572 \ + --hash=sha256:c446d2245ba29820d405315083d55299a796695d747efceb5717a8b450324115 \ + --hash=sha256:c830a02caeb789633863b466b9de10c015bded434deb3ec87c768e53752ad22a \ + --hash=sha256:cb841572862f629b99725ebaec3287fc6d275be9b14443ea746c1dd325053cbd \ + --hash=sha256:cfa4561277f677ecf651e2b22dc43e8f5368b74a25a8f7d1d4a3a243e573f2d4 \ + --hash=sha256:cfcc2c53c06f2ccb8976fb5c71d448bdd0a07d26d8e07e321c103416444c7ad1 \ + --hash=sha256:d3c6b54e304c60c4181da1c9dadf83e4a54fd266a99c70ba646a9baa626819eb \ + --hash=sha256:d3d403753c9d5adc04d4694d35cf0391f0f3d57c8e0030aac09d7678fa8030aa \ + --hash=sha256:d9c206c29b46cfd343ea7cdfe1232443072bbb270d6a46f59c259460db76779a \ + --hash=sha256:e49eb4e95ff6fd7c0c402508894b1ef0e01b99a44320ba7d8ecbabefddcc5569 \ + --hash=sha256:f8286396b351785801a976b1e85ea88e937712ee2c3ac653710a4a57a8da5d9c \ + --hash=sha256:f8fc330c3370a81bbf3f88557097d1ea26cd8b019d6433aa59f71195f5ddebbf \ + --hash=sha256:fbd359831c1657d69bb81f0db962905ee05e5e9451913b18b831febfe0519082 \ + --hash=sha256:fe7e1c262d3392afcf5071df9afa574544f28eac825284596ac6db56e6d11062 \ + --hash=sha256:fed1e1cf6a42577953abbe8e6cf2fe2f566daebde7c34724ec8803c4c0cda579 + # via -r third_party/python_requirements.in +protobuf==4.23.0 \ + --hash=sha256:03eee35b60317112a72d19c54d0bff7bc58ff12fea4cd7b018232bd99758ffdf \ + --hash=sha256:2b94bd6df92d71bd1234a2ffe7ce96ddf6d10cf637a18d6b55ad0a89fbb7fc21 \ + --hash=sha256:36f5370a930cb77c8ad2f4135590c672d0d2c72d4a707c7d0058dce4b4b4a598 \ + --hash=sha256:5f1eba1da2a2f3f7df469fccddef3cc060b8a16cfe3cc65961ad36b4dbcf59c5 \ + --hash=sha256:6c16657d6717a0c62d5d740cb354fbad1b0d8cb811669e06fc1caa0ff4799ddd \ + --hash=sha256:6fe180b56e1169d72ecc4acbd39186339aed20af5384531b8e8979b02bbee159 \ + --hash=sha256:7cb5b9a05ce52c6a782bb97de52679bd3438ff2b7460eff5da348db65650f227 \ + --hash=sha256:9744e934ea5855d12191040ea198eaf704ac78665d365a89d9572e3b627c2688 \ + --hash=sha256:9f5a0fbfcdcc364f3986f9ed9f8bb1328fb84114fd790423ff3d7fdb0f85c2d1 \ + --hash=sha256:baca40d067dddd62141a129f244703160d278648b569e90bb0e3753067644711 \ + --hash=sha256:d5a35ff54e3f62e8fc7be02bb0d2fbc212bba1a5a9cc2748090690093996f07b \ + --hash=sha256:e62fb869762b4ba18666370e2f8a18f17f8ab92dd4467295c6d38be6f8fef60b \ + --hash=sha256:ebde3a023b8e11bfa6c890ef34cd6a8b47d586f26135e86c21344fe433daf2e2 + # via + # -r third_party/python_requirements.in + # tensorboard + # tensorflow-cpu +pyasn1==0.5.0 \ + --hash=sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57 \ + --hash=sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde + # via + # pyasn1-modules + # rsa +pyasn1-modules==0.3.0 \ + --hash=sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c \ + --hash=sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d + # via google-auth +requests==2.30.0 \ + --hash=sha256:10e94cc4f3121ee6da529d358cdaeaff2f1c409cd377dbc72b825852f2f7e294 \ + --hash=sha256:239d7d4458afcb28a692cdd298d87542235f4ca8d36d03a15bfc128a6559a2f4 + # via + # requests-oauthlib + # tensorboard +requests-oauthlib==1.3.1 \ + --hash=sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5 \ + --hash=sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a + # via google-auth-oauthlib +rsa==4.9 \ + --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ + --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 + # via google-auth +scipy==1.10.1 \ + --hash=sha256:049a8bbf0ad95277ffba9b3b7d23e5369cc39e66406d60422c8cfef40ccc8415 \ + --hash=sha256:07c3457ce0b3ad5124f98a86533106b643dd811dd61b548e78cf4c8786652f6f \ + --hash=sha256:0f1564ea217e82c1bbe75ddf7285ba0709ecd503f048cb1236ae9995f64217bd \ + --hash=sha256:1553b5dcddd64ba9a0d95355e63fe6c3fc303a8fd77c7bc91e77d61363f7433f \ + --hash=sha256:15a35c4242ec5f292c3dd364a7c71a61be87a3d4ddcc693372813c0b73c9af1d \ + --hash=sha256:1b4735d6c28aad3cdcf52117e0e91d6b39acd4272f3f5cd9907c24ee931ad601 \ + --hash=sha256:2cf9dfb80a7b4589ba4c40ce7588986d6d5cebc5457cad2c2880f6bc2d42f3a5 \ + --hash=sha256:39becb03541f9e58243f4197584286e339029e8908c46f7221abeea4b749fa88 \ + --hash=sha256:43b8e0bcb877faf0abfb613d51026cd5cc78918e9530e375727bf0625c82788f \ + --hash=sha256:4b3f429188c66603a1a5c549fb414e4d3bdc2a24792e061ffbd607d3d75fd84e \ + --hash=sha256:4c0ff64b06b10e35215abce517252b375e580a6125fd5fdf6421b98efbefb2d2 \ + --hash=sha256:51af417a000d2dbe1ec6c372dfe688e041a7084da4fdd350aeb139bd3fb55353 \ + --hash=sha256:5678f88c68ea866ed9ebe3a989091088553ba12c6090244fdae3e467b1139c35 \ + --hash=sha256:79c8e5a6c6ffaf3a2262ef1be1e108a035cf4f05c14df56057b64acc5bebffb6 \ + --hash=sha256:7ff7f37b1bf4417baca958d254e8e2875d0cc23aaadbe65b3d5b3077b0eb23ea \ + --hash=sha256:aaea0a6be54462ec027de54fca511540980d1e9eea68b2d5c1dbfe084797be35 \ + --hash=sha256:bce5869c8d68cf383ce240e44c1d9ae7c06078a9396df68ce88a1230f93a30c1 \ + --hash=sha256:cd9f1027ff30d90618914a64ca9b1a77a431159df0e2a195d8a9e8a04c78abf9 \ + --hash=sha256:d925fa1c81b772882aa55bcc10bf88324dadb66ff85d548c71515f6689c6dac5 \ + --hash=sha256:e7354fd7527a4b0377ce55f286805b34e8c54b91be865bac273f527e1b839019 \ + --hash=sha256:fae8a7b898c42dffe3f7361c40d5952b6bf32d10c4569098d276b4c547905ee1 + # via jax +six==1.16.0 \ + --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ + --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 + # via + # astunparse + # google-auth + # google-pasta + # tensorflow-cpu +tensorboard==2.12.3 \ + --hash=sha256:b4a69366784bc347e02fbe7d847e01896a649ca52f8948a11005e205dcf724fb + # via tensorflow-cpu +tensorboard-data-server==0.7.0 \ + --hash=sha256:64aa1be7c23e80b1a42c13b686eb0875bb70f5e755f4d2b8de5c1d880cf2267f \ + --hash=sha256:753d4214799b31da7b6d93837959abebbc6afa86e69eacf1e9a317a48daa31eb \ + --hash=sha256:eb7fa518737944dbf4f0cf83c2e40a7ac346bf91be2e6a0215de98be74e85454 + # via tensorboard +tensorflow-cpu==2.12.0 \ + --hash=sha256:361b19b5a64bf611beccd22de1fc04f614a8c157ac99893d9702ed24932018d6 \ + --hash=sha256:374b15d1cec1a62006e388062e89dd4899a121272d41ea5d3fcbcc96e2d875c9 \ + --hash=sha256:55685b9a19c8ecb2587fb53914c045b188ed0289a2c6495e4e59d5fb082da9cc \ + --hash=sha256:5beeb99d2a1cc1383ca981513c35a4a18157e52d91a89e69c94cb7b7e411f0d8 \ + --hash=sha256:734ce850e2b3493041bdc071b594f0f78d35e4bfce5a7e0a98d449b20420e01d \ + --hash=sha256:8fdb636736f95094368bc7d26bb3b8ed93ba820cc5d95f847e00bf4a7645463d \ + --hash=sha256:a406f751180fe5282776e8bc84f39a2dc2b796c3ae35fbe20e4edc86ec580dd3 \ + --hash=sha256:b6ba926f9a56cdf0657defc6d046735e31ded383054f67c1a16ef2b0511f68d7 \ + --hash=sha256:b9c8f0d0658da8a5b25a4fe5ca315f86c449eb11e30d79cea49c7658be75a825 \ + --hash=sha256:d5ad746bf8c87d9a9fcea4698828ba1d101a7f7bfd323a2571130374a192578b \ + --hash=sha256:e8c7047552a2d759f3e65ac13e36dd24bb5fec2e6576e848287811ec44b3d62f \ + --hash=sha256:ef4f142b6fe75fcc71ada6331ed2a15ed61b7034187049d0ef1dac482d52db78 + # via -r third_party/python_requirements.in +tensorflow-estimator==2.12.0 \ + --hash=sha256:59b191bead4883822de3d63ac02ace11a83bfe6c10d64d0c4dfde75a50e60ca1 + # via tensorflow-cpu +tensorflow-io-gcs-filesystem==0.32.0 \ + --hash=sha256:045d51bba586390d0545fcd8a18727d62b175eb142f6f4c6d719d39de40774cd \ + --hash=sha256:05e65d3cb6c93a7929b384d86c6369c63cbbab8a770440a3d95e094878403f9f \ + --hash=sha256:122be149e5f6a030f5c2901be0cc3cb07619232f7b03889e2cdf3da1c0d4f92f \ + --hash=sha256:1ce80e1555d6ee88dda67feddf366cc8b30252b5837a7a17303df7b06a71fc2e \ + --hash=sha256:21de7dcc06eb1e7de3c022b0072d90ba35ef886578149663437aa7a6fb5bf6b3 \ + --hash=sha256:28202492d904a6e280cf27560791e87ac1c7566000db82065d63a70c27008af2 \ + --hash=sha256:336d9b3fe6b55aea149c4f6aa1fd6ffaf27d4e5c37e55a182340b47caba38846 \ + --hash=sha256:5635df0bbe40f971dc1b946e3372744b0bdfda45c38ffcd28ef53a32bb8da4da \ + --hash=sha256:74a7e25e83d4117a7ebb09a3f247553a5497393ab48c3ee0cf0d17b405026817 \ + --hash=sha256:79fdd02103b8ae9f8b89af41f744c013fa1caaea709de19833917795e3063857 \ + --hash=sha256:7f15fd22e592661b10de317be2f42a0f84be7bfc5e6a565fcfcb04b60d625b78 \ + --hash=sha256:8214cdf85bea694160f9035ff395221c1e25e119784ccb4c104919b1f5dec84e \ + --hash=sha256:842f5f09cd756bdb3b4d0b5571b3a6f72fd534d42da938b9acf0ef462995eada \ + --hash=sha256:db682e9a510c27dd35710ba5a2c62c371e25b727741b2fe3a920355fa501e947 + # via tensorflow-cpu +termcolor==2.3.0 \ + --hash=sha256:3afb05607b89aed0ffe25202399ee0867ad4d3cb4180d98aaf8eefa6a5f7d475 \ + --hash=sha256:b5b08f68937f138fe92f6c089b99f1e2da0ae56c52b78bf7075fd95420fd9a5a + # via tensorflow-cpu +tomli==2.0.1 \ + --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ + --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f + # via yapf +typing-extensions==4.5.0 \ + --hash=sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb \ + --hash=sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4 + # via tensorflow-cpu +urllib3==1.26.15 \ + --hash=sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305 \ + --hash=sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42 + # via + # google-auth + # requests +werkzeug==2.3.4 \ + --hash=sha256:1d5a58e0377d1fe39d061a5de4469e414e78ccb1e1e59c0f5ad6fa1c36c52b76 \ + --hash=sha256:48e5e61472fee0ddee27ebad085614ebedb7af41e88f687aaf881afb723a162f + # via tensorboard +wheel==0.40.0 \ + --hash=sha256:cd1196f3faee2b31968d626e1731c94f99cbdb67cf5a46e4f5656cbee7738873 \ + --hash=sha256:d236b20e7cb522daf2390fa84c55eea81c5c30190f90f29ae2ca1ad8355bf247 + # via + # astunparse + # tensorboard +wrapt==1.14.1 \ + --hash=sha256:00b6d4ea20a906c0ca56d84f93065b398ab74b927a7a3dbd470f6fc503f95dc3 \ + --hash=sha256:01c205616a89d09827986bc4e859bcabd64f5a0662a7fe95e0d359424e0e071b \ + --hash=sha256:02b41b633c6261feff8ddd8d11c711df6842aba629fdd3da10249a53211a72c4 \ + --hash=sha256:07f7a7d0f388028b2df1d916e94bbb40624c59b48ecc6cbc232546706fac74c2 \ + --hash=sha256:11871514607b15cfeb87c547a49bca19fde402f32e2b1c24a632506c0a756656 \ + --hash=sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3 \ + --hash=sha256:21ac0156c4b089b330b7666db40feee30a5d52634cc4560e1905d6529a3897ff \ + --hash=sha256:257fd78c513e0fb5cdbe058c27a0624c9884e735bbd131935fd49e9fe719d310 \ + --hash=sha256:2b39d38039a1fdad98c87279b48bc5dce2c0ca0d73483b12cb72aa9609278e8a \ + --hash=sha256:2cf71233a0ed05ccdabe209c606fe0bac7379fdcf687f39b944420d2a09fdb57 \ + --hash=sha256:2fe803deacd09a233e4762a1adcea5db5d31e6be577a43352936179d14d90069 \ + --hash=sha256:3232822c7d98d23895ccc443bbdf57c7412c5a65996c30442ebe6ed3df335383 \ + --hash=sha256:34aa51c45f28ba7f12accd624225e2b1e5a3a45206aa191f6f9aac931d9d56fe \ + --hash=sha256:36f582d0c6bc99d5f39cd3ac2a9062e57f3cf606ade29a0a0d6b323462f4dd87 \ + --hash=sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d \ + --hash=sha256:40e7bc81c9e2b2734ea4bc1aceb8a8f0ceaac7c5299bc5d69e37c44d9081d43b \ + --hash=sha256:43ca3bbbe97af00f49efb06e352eae40434ca9d915906f77def219b88e85d907 \ + --hash=sha256:4fcc4649dc762cddacd193e6b55bc02edca674067f5f98166d7713b193932b7f \ + --hash=sha256:5a0f54ce2c092aaf439813735584b9537cad479575a09892b8352fea5e988dc0 \ + --hash=sha256:5a9a0d155deafd9448baff28c08e150d9b24ff010e899311ddd63c45c2445e28 \ + --hash=sha256:5b02d65b9ccf0ef6c34cba6cf5bf2aab1bb2f49c6090bafeecc9cd81ad4ea1c1 \ + --hash=sha256:60db23fa423575eeb65ea430cee741acb7c26a1365d103f7b0f6ec412b893853 \ + --hash=sha256:642c2e7a804fcf18c222e1060df25fc210b9c58db7c91416fb055897fc27e8cc \ + --hash=sha256:6a9a25751acb379b466ff6be78a315e2b439d4c94c1e99cb7266d40a537995d3 \ + --hash=sha256:6b1a564e6cb69922c7fe3a678b9f9a3c54e72b469875aa8018f18b4d1dd1adf3 \ + --hash=sha256:6d323e1554b3d22cfc03cd3243b5bb815a51f5249fdcbb86fda4bf62bab9e164 \ + --hash=sha256:6e743de5e9c3d1b7185870f480587b75b1cb604832e380d64f9504a0535912d1 \ + --hash=sha256:709fe01086a55cf79d20f741f39325018f4df051ef39fe921b1ebe780a66184c \ + --hash=sha256:7b7c050ae976e286906dd3f26009e117eb000fb2cf3533398c5ad9ccc86867b1 \ + --hash=sha256:7d2872609603cb35ca513d7404a94d6d608fc13211563571117046c9d2bcc3d7 \ + --hash=sha256:7ef58fb89674095bfc57c4069e95d7a31cfdc0939e2a579882ac7d55aadfd2a1 \ + --hash=sha256:80bb5c256f1415f747011dc3604b59bc1f91c6e7150bd7db03b19170ee06b320 \ + --hash=sha256:81b19725065dcb43df02b37e03278c011a09e49757287dca60c5aecdd5a0b8ed \ + --hash=sha256:833b58d5d0b7e5b9832869f039203389ac7cbf01765639c7309fd50ef619e0b1 \ + --hash=sha256:88bd7b6bd70a5b6803c1abf6bca012f7ed963e58c68d76ee20b9d751c74a3248 \ + --hash=sha256:8ad85f7f4e20964db4daadcab70b47ab05c7c1cf2a7c1e51087bfaa83831854c \ + --hash=sha256:8c0ce1e99116d5ab21355d8ebe53d9460366704ea38ae4d9f6933188f327b456 \ + --hash=sha256:8d649d616e5c6a678b26d15ece345354f7c2286acd6db868e65fcc5ff7c24a77 \ + --hash=sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef \ + --hash=sha256:9736af4641846491aedb3c3f56b9bc5568d92b0692303b5a305301a95dfd38b1 \ + --hash=sha256:988635d122aaf2bdcef9e795435662bcd65b02f4f4c1ae37fbee7401c440b3a7 \ + --hash=sha256:9cca3c2cdadb362116235fdbd411735de4328c61425b0aa9f872fd76d02c4e86 \ + --hash=sha256:9e0fd32e0148dd5dea6af5fee42beb949098564cc23211a88d799e434255a1f4 \ + --hash=sha256:9f3e6f9e05148ff90002b884fbc2a86bd303ae847e472f44ecc06c2cd2fcdb2d \ + --hash=sha256:a85d2b46be66a71bedde836d9e41859879cc54a2a04fad1191eb50c2066f6e9d \ + --hash=sha256:a9a52172be0b5aae932bef82a79ec0a0ce87288c7d132946d645eba03f0ad8a8 \ + --hash=sha256:aa31fdcc33fef9eb2552cbcbfee7773d5a6792c137b359e82879c101e98584c5 \ + --hash=sha256:b014c23646a467558be7da3d6b9fa409b2c567d2110599b7cf9a0c5992b3b471 \ + --hash=sha256:b21bb4c09ffabfa0e85e3a6b623e19b80e7acd709b9f91452b8297ace2a8ab00 \ + --hash=sha256:b5901a312f4d14c59918c221323068fad0540e34324925c8475263841dbdfe68 \ + --hash=sha256:b9b7a708dd92306328117d8c4b62e2194d00c365f18eff11a9b53c6f923b01e3 \ + --hash=sha256:d1967f46ea8f2db647c786e78d8cc7e4313dbd1b0aca360592d8027b8508e24d \ + --hash=sha256:d52a25136894c63de15a35bc0bdc5adb4b0e173b9c0d07a2be9d3ca64a332735 \ + --hash=sha256:d77c85fedff92cf788face9bfa3ebaa364448ebb1d765302e9af11bf449ca36d \ + --hash=sha256:d79d7d5dc8a32b7093e81e97dad755127ff77bcc899e845f41bf71747af0c569 \ + --hash=sha256:dbcda74c67263139358f4d188ae5faae95c30929281bc6866d00573783c422b7 \ + --hash=sha256:ddaea91abf8b0d13443f6dac52e89051a5063c7d014710dcb4d4abb2ff811a59 \ + --hash=sha256:dee0ce50c6a2dd9056c20db781e9c1cfd33e77d2d569f5d1d9321c641bb903d5 \ + --hash=sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb \ + --hash=sha256:e2f83e18fe2f4c9e7db597e988f72712c0c3676d337d8b101f6758107c42425b \ + --hash=sha256:e3fb1677c720409d5f671e39bac6c9e0e422584e5f518bfd50aa4cbbea02433f \ + --hash=sha256:ee2b1b1769f6707a8a445162ea16dddf74285c3964f605877a20e38545c3c462 \ + --hash=sha256:ee6acae74a2b91865910eef5e7de37dc6895ad96fa23603d1d27ea69df545015 \ + --hash=sha256:ef3f72c9666bba2bab70d2a8b79f2c6d2c1a42a7f7e2b0ec83bb2f9e383950af + # via tensorflow-cpu +yapf==0.33.0 \ + --hash=sha256:4c2b59bd5ffe46f3a7da48df87596877189148226ce267c16e8b44240e51578d \ + --hash=sha256:da62bdfea3df3673553351e6246abed26d9fe6780e548a5af9e70f6d2b4f5b9a + # via -r third_party/python_requirements.in + +# The following packages are considered to be unsafe in a requirements file: +setuptools==67.7.2 \ + --hash=sha256:23aaf86b85ca52ceb801d32703f12d77517b2556af839621c641fca11287952b \ + --hash=sha256:f104fa03692a2602fa0fec6c6a9e63b6c8a968de13e17c026957dd1f53d80990 + # via + # tensorboard + # tensorflow-cpu diff --git a/third_party/repo.bzl b/third_party/repo.bzl new file mode 100644 index 0000000..5e3fbcf --- /dev/null +++ b/third_party/repo.bzl @@ -0,0 +1,95 @@ +# Copyright 2017 The TensorFlow Authors. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Utilities for defining TensorFlow Bazel dependencies.""" + +def _get_env_var(ctx, name): + if name in ctx.os.environ: + return ctx.os.environ[name] + else: + return None + +# Checks if we should use the system lib instead of the bundled one +def _use_system_lib(ctx, name): + syslibenv = _get_env_var(ctx, "TF_SYSTEM_LIBS") + if not syslibenv: + return False + return name in [n.strip() for n in syslibenv.split(",")] + +def _get_link_dict(ctx, link_files, build_file): + if build_file: + # Use BUILD.bazel because it takes precedence over BUILD. + link_files = dict(link_files, **{build_file: "BUILD.bazel"}) + return {ctx.path(v): Label(k) for k, v in link_files.items()} + +def _tf_http_archive_impl(ctx): + # Construct all labels early on to prevent rule restart. We want the + # attributes to be strings instead of labels because they refer to files + # in the TensorFlow repository, not files in repos depending on TensorFlow. + # See also https://github.com/bazelbuild/bazel/issues/10515. + link_dict = _get_link_dict(ctx, ctx.attr.link_files, ctx.attr.build_file) + + if _use_system_lib(ctx, ctx.attr.name): + link_dict.update(_get_link_dict( + ctx = ctx, + link_files = ctx.attr.system_link_files, + build_file = ctx.attr.system_build_file, + )) + else: + patch_file = ctx.attr.patch_file + patch_file = Label(patch_file) if patch_file else None + ctx.download_and_extract( + url = ctx.attr.urls, + sha256 = ctx.attr.sha256, + type = ctx.attr.type, + stripPrefix = ctx.attr.strip_prefix, + ) + if patch_file: + ctx.patch(patch_file, strip = 1) + + for path, label in link_dict.items(): + ctx.delete(path) + ctx.symlink(label, path) + +_tf_http_archive = repository_rule( + implementation = _tf_http_archive_impl, + attrs = { + "sha256": attr.string(mandatory = True), + "urls": attr.string_list(mandatory = True), + "strip_prefix": attr.string(), + "type": attr.string(), + "patch_file": attr.string(), + "build_file": attr.string(), + "system_build_file": attr.string(), + "link_files": attr.string_dict(), + "system_link_files": attr.string_dict(), + }, + environ = ["TF_SYSTEM_LIBS"], +) + +def tf_http_archive(name, sha256, urls, **kwargs): + """Downloads and creates Bazel repos for dependencies. + """ + + if native.existing_rule(name): + print("\n\033[1;33mWarning:\033[0m skipping import of repository '" + + name + "' because it already exists.\n") + return + + _tf_http_archive( + name = name, + sha256 = sha256, + urls = urls, + **kwargs + ) diff --git a/third_party/ruy/BUILD b/third_party/ruy/BUILD new file mode 100644 index 0000000..4c36181 --- /dev/null +++ b/third_party/ruy/BUILD @@ -0,0 +1,8 @@ +# Ruy is not BLAS + +package( + default_visibility = ["//visibility:public"], + licenses = ["notice"], +) + +exports_files(["LICENSE"]) diff --git a/third_party/ruy/workspace.bzl b/third_party/ruy/workspace.bzl new file mode 100644 index 0000000..5076962 --- /dev/null +++ b/third_party/ruy/workspace.bzl @@ -0,0 +1,15 @@ +"""Loads the ruy library, used by TensorFlow Lite.""" + +load("//third_party:repo.bzl", "tf_http_archive") + +def repo(): + tf_http_archive( + name = "ruy", + sha256 = "da5ec0cc07472bdb21589b0b51c8f3d7f75d2ed6230b794912adf213838d289a", + strip_prefix = "ruy-54774a7a2cf85963777289193629d4bd42de4a59", + urls = [ + "https://storage.googleapis.com/mirror.tensorflow.org/github.com/google/ruy/archive/54774a7a2cf85963777289193629d4bd42de4a59.zip", + "https://github.com/google/ruy/archive/54774a7a2cf85963777289193629d4bd42de4a59.zip", + ], + build_file = "//third_party/ruy:BUILD", + ) diff --git a/third_party/six.BUILD b/third_party/six.BUILD new file mode 100644 index 0000000..b703fe0 --- /dev/null +++ b/third_party/six.BUILD @@ -0,0 +1,15 @@ +# Description: +# Six provides simple utilities for wrapping over differences between Python 2 +# and Python 3. + +licenses(["notice"]) # MIT + +exports_files(["LICENSE"]) + +py_library( + name = "six", + srcs = ["six.py"], + srcs_version = "PY2AND3", + visibility = ["//visibility:public"], +) + diff --git a/third_party/xtensa/LICENSE b/third_party/xtensa/LICENSE new file mode 100644 index 0000000..d645695 --- /dev/null +++ b/third_party/xtensa/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/third_party/xtensa/examples/micro_speech_lstm/Makefile.inc b/third_party/xtensa/examples/micro_speech_lstm/Makefile.inc new file mode 100644 index 0000000..4f5b085 --- /dev/null +++ b/third_party/xtensa/examples/micro_speech_lstm/Makefile.inc @@ -0,0 +1,29 @@ +# The lstm kernel is currently only implemented for OPTIMIZED_KERNEL_DIR=xtensa +# and as a result, this partcular example is also excluded in all other cases. + + +# TODO(#715): Adding support for nullptr bias in fully connected resulted in the +# micro_speech_lstm example crashing on invoke to LSTM. This is somewhat +# unexpected and the test is being disabled so that the root-cause can be +# investigated independent of landing support for nullptr bias in fully_connected +# for all the platforms. +# +ifeq ($(OPTIMIZED_KERNEL_DIR), xtensa) + EXAMPLE_NAME:=micro_speech_lstm + MICRO_SPEECH_LSTM_TEST_SRCS := \ + third_party/xtensa/examples/$(EXAMPLE_NAME)/$(EXAMPLE_NAME)_test.cc \ + third_party/xtensa/examples/micro_speech_lstm/no_micro_features_data.cc \ + third_party/xtensa/examples/micro_speech_lstm/yes_micro_features_data.cc + + MICRO_SPEECH_LSTM_GENERATOR_INPUTS := \ + third_party/xtensa/examples/micro_speech_lstm/micro_speech_lstm.tflite + + MICRO_SPEECH_LSTM_HDRS := \ + third_party/xtensa/examples/micro_speech_lstm/no_micro_features_data.h \ + third_party/xtensa/examples/micro_speech_lstm/yes_micro_features_data.h + + # TODO(#802): Disbaling test that takes a long time to run. + ## Tests loading and running a speech model. + #$(eval $(call microlite_test,micro_speech_lstm_test,\ + #$(MICRO_SPEECH_LSTM_TEST_SRCS),$(MICRO_SPEECH_LSTM_HDRS),$(MICRO_SPEECH_LSTM_GENERATOR_INPUTS))) +endif diff --git a/third_party/xtensa/examples/micro_speech_lstm/images/lstm_model.png b/third_party/xtensa/examples/micro_speech_lstm/images/lstm_model.png new file mode 100644 index 0000000..8f69ade Binary files /dev/null and b/third_party/xtensa/examples/micro_speech_lstm/images/lstm_model.png differ diff --git a/third_party/xtensa/examples/micro_speech_lstm/images/spectrogram.png b/third_party/xtensa/examples/micro_speech_lstm/images/spectrogram.png new file mode 100644 index 0000000..7da0123 Binary files /dev/null and b/third_party/xtensa/examples/micro_speech_lstm/images/spectrogram.png differ diff --git a/third_party/xtensa/examples/micro_speech_lstm/micro_speech_lstm.tflite b/third_party/xtensa/examples/micro_speech_lstm/micro_speech_lstm.tflite new file mode 100644 index 0000000..a1c2c7f Binary files /dev/null and b/third_party/xtensa/examples/micro_speech_lstm/micro_speech_lstm.tflite differ diff --git a/third_party/xtensa/examples/micro_speech_lstm/micro_speech_lstm_test.cc b/third_party/xtensa/examples/micro_speech_lstm/micro_speech_lstm_test.cc new file mode 100644 index 0000000..c1239e0 --- /dev/null +++ b/third_party/xtensa/examples/micro_speech_lstm/micro_speech_lstm_test.cc @@ -0,0 +1,153 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +/* + * * Copyright (c) 2021 Cadence Design Systems Inc. + * * + * * Permission is hereby granted, free of charge, to any person obtaining + * * a copy of this software and associated documentation files (the + * * "Software"), to deal in the Software without restriction, including + * * without limitation the rights to use, copy, modify, merge, publish, + * * distribute, sublicense, and/or sell copies of the Software, and to + * * permit persons to whom the Software is furnished to do so, subject to + * * the following conditions: + * * + * * The above copyright notice and this permission notice shall be included + * * in all copies or substantial portions of the Software. + * * + * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + * * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + * * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. + * * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY + * * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, + * * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE + * * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + * */ + +#include "tensorflow/lite/micro/micro_interpreter.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_mutable_op_resolver.h" +#include "tensorflow/lite/micro/testing/micro_test.h" +#include "tensorflow/lite/schema/schema_generated.h" +#include "third_party/xtensa/examples/micro_speech_lstm/micro_speech_lstm_model_data.h" +#include "third_party/xtensa/examples/micro_speech_lstm/no_micro_features_data.h" +#include "third_party/xtensa/examples/micro_speech_lstm/yes_micro_features_data.h" + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(TestInvoke) { + // Map the model into a usable data structure. This doesn't involve any + // copying or parsing, it's a very lightweight operation. + const tflite::Model* model = + ::tflite::GetModel(g_micro_speech_lstm_model_data); + if (model->version() != TFLITE_SCHEMA_VERSION) { + MicroPrintf( + "Model provided is schema version %d not equal " + "to supported version %d.\n", + model->version(), TFLITE_SCHEMA_VERSION); + } + + // Pull in only the operation implementations we need. + // This relies on a complete list of all the ops needed by this graph. + tflite::MicroMutableOpResolver<4> micro_op_resolver; + micro_op_resolver.AddUnidirectionalSequenceLSTM(); + micro_op_resolver.AddReshape(); + micro_op_resolver.AddFullyConnected(); + micro_op_resolver.AddSoftmax(); + + // Create an area of memory to use for input, output, and intermediate arrays. + constexpr int tensor_arena_size = 32 * 1024; + + uint8_t tensor_arena[tensor_arena_size]; + + // Build an interpreter to run the model with. + tflite::MicroInterpreter interpreter(model, micro_op_resolver, tensor_arena, + tensor_arena_size); + interpreter.AllocateTensors(); + + // Get information about the memory area to use for the model's input. + TfLiteTensor* input = interpreter.input(0); + + // Make sure the input has the properties we expect. + TF_LITE_MICRO_EXPECT_NE(nullptr, input); + TF_LITE_MICRO_EXPECT_EQ(3, input->dims->size); + TF_LITE_MICRO_EXPECT_EQ(1, input->dims->data[0]); + TF_LITE_MICRO_EXPECT_EQ(49, input->dims->data[1]); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteInt8, input->type); + + // Copy a spectrogram created from a .wav audio file of someone saying "Yes", + // into the memory area used for the input. + const int8_t* yes_features_data = g_yes_micro_f2e59fea_nohash_1_data; + for (size_t i = 0; i < input->bytes; ++i) { + input->data.int8[i] = yes_features_data[i]; + } + + // Run the model on this input and make sure it succeeds. + TfLiteStatus invoke_status = interpreter.Invoke(); + if (invoke_status != kTfLiteOk) { + MicroPrintf("Invoke failed\n"); + } + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, invoke_status); + + // Get the output from the model, and make sure it's the expected size and + // type. + TfLiteTensor* output = interpreter.output(0); + TF_LITE_MICRO_EXPECT_EQ(2, output->dims->size); + TF_LITE_MICRO_EXPECT_EQ(1, output->dims->data[0]); + TF_LITE_MICRO_EXPECT_EQ(3, output->dims->data[1]); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteInt8, output->type); + + // There are three possible classes in the output, each with a score. + const int kYesIndex = 0; + const int kNoIndex = 1; + const int kUnknownIndex = 2; + + // Make sure that the expected "Yes" score is higher than the other classes. + uint8_t yes_score = output->data.int8[kYesIndex] + 128; + uint8_t no_score = output->data.int8[kNoIndex] + 128; + uint8_t unknown_score = output->data.int8[kUnknownIndex] + 128; + TF_LITE_MICRO_EXPECT_GT(yes_score, unknown_score); + TF_LITE_MICRO_EXPECT_GT(yes_score, no_score); + + // Now test with a different input, from a recording of "No". + const int8_t* no_features_data = g_no_micro_f9643d42_nohash_4_data; + for (size_t i = 0; i < input->bytes; ++i) { + input->data.int8[i] = no_features_data[i]; + } + + // Run the model on this "No" input. + invoke_status = interpreter.Invoke(); + if (invoke_status != kTfLiteOk) { + MicroPrintf("Invoke failed\n"); + } + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, invoke_status); + + // Get the output from the model, and make sure it's the expected size and + // type. + output = interpreter.output(0); + TF_LITE_MICRO_EXPECT_EQ(2, output->dims->size); + TF_LITE_MICRO_EXPECT_EQ(1, output->dims->data[0]); + TF_LITE_MICRO_EXPECT_EQ(3, output->dims->data[1]); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteInt8, output->type); + + // Make sure that the expected "Yes" score is higher than the other classes. + yes_score = output->data.int8[kYesIndex] + 128; + no_score = output->data.int8[kNoIndex] + 128; + unknown_score = output->data.int8[kUnknownIndex] + 128; + TF_LITE_MICRO_EXPECT_GT(no_score, unknown_score); + TF_LITE_MICRO_EXPECT_GT(no_score, yes_score); + MicroPrintf("Ran successfully\n"); +} + +TF_LITE_MICRO_TESTS_END diff --git a/third_party/xtensa/examples/micro_speech_lstm/no_micro_features_data.cc b/third_party/xtensa/examples/micro_speech_lstm/no_micro_features_data.cc new file mode 100644 index 0000000..189d864 --- /dev/null +++ b/third_party/xtensa/examples/micro_speech_lstm/no_micro_features_data.cc @@ -0,0 +1,1095 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +/* + * * Copyright (c) 2021 Cadence Design Systems Inc. + * * + * * Permission is hereby granted, free of charge, to any person obtaining + * * a copy of this software and associated documentation files (the + * * "Software"), to deal in the Software without restriction, including + * * without limitation the rights to use, copy, modify, merge, publish, + * * distribute, sublicense, and/or sell copies of the Software, and to + * * permit persons to whom the Software is furnished to do so, subject to + * * the following conditions: + * * + * * The above copyright notice and this permission notice shall be included + * * in all copies or substantial portions of the Software. + * * + * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + * * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + * * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. + * * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY + * * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, + * * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE + * * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + * */ + +#include "third_party/xtensa/examples/micro_speech_lstm/no_micro_features_data.h" + +// Golden test values for the expected spectrogram from a "no" sample file +// speech_commands_test_set_v0.02/no/f9643d42_nohash_4.wav. + +const int g_no_micro_f9643d42_nohash_4_width = 257; +const int g_no_micro_f9643d42_nohash_4_height = 49; +const signed char g_no_micro_f9643d42_nohash_4_data[] = { + 127, 85, 81, 77, 74, 65, 57, 45, 36, 24, 10, -5, + -20, -37, -53, -70, -84, -99, -111, -111, -102, -89, -77, -65, + -54, -44, -35, -28, -23, -18, -16, -15, -15, -16, -19, -23, + -28, -33, -40, -49, -57, -65, -74, -84, -94, -102, -112, -118, + -120, -115, -109, -101, -96, -90, -86, -83, -80, -78, -77, -77, + -78, -78, -79, -80, -81, -81, -81, -79, -76, -74, -70, -65, + -61, -55, -50, -45, -41, -37, -34, -30, -28, -28, -27, -28, + -30, -32, -35, -39, -44, -49, -56, -63, -70, -78, -87, -94, + -103, -111, -118, -118, -113, -106, -99, -92, -84, -77, -73, -67, + -63, -59, -57, -54, -53, -52, -52, -52, -54, -56, -58, -62, + -66, -70, -74, -78, -82, -87, -92, -96, -100, -105, -108, -112, + -115, -118, -119, -119, -118, -118, -116, -116, -115, -114, -114, -114, + -114, -115, -116, -116, -117, -119, -119, -120, -120, -120, -119, -118, + -117, -116, -114, -112, -111, -108, -107, -105, -104, -102, -101, -99, + -98, -97, -96, -95, -95, -94, -94, -94, -94, -94, -95, -96, + -96, -97, -98, -98, -99, -100, -100, -99, -101, -99, -99, -98, + -97, -96, -95, -95, -94, -92, -92, -91, -90, -90, -90, -90, + -90, -91, -91, -92, -92, -93, -92, -92, -92, -92, -92, -90, + -89, -88, -88, -86, -85, -85, -84, -84, -85, -85, -87, -87, + -89, -93, -96, -99, -104, -108, -113, -116, -117, -115, -110, -104, + -98, -91, -83, -77, -69, -63, -57, -51, -45, -40, -35, -31, + -28, -25, -23, -22, -21, 127, -103, -110, -111, -102, -117, -118, + -108, -116, -118, -110, -118, -124, -127, -119, -118, -118, -119, -127, + -116, -121, -119, -126, -121, -126, -124, -126, -126, -122, -124, -124, + -123, -124, -122, -125, -125, -126, -124, -126, -125, -124, -123, -126, + -127, -122, -125, -123, -122, -125, -126, -126, -125, -125, -127, -124, + -125, -124, -127, -126, -126, -125, -124, -126, -126, -127, -123, -126, + -126, -126, -125, -125, -123, -124, -127, -125, -126, -126, -125, -127, + -126, -127, -125, -125, -126, -126, -127, -124, -127, -127, -125, -127, + -127, -126, -127, -126, -127, -127, -127, -127, -126, -125, -125, -127, + -126, -126, -125, -125, -127, -127, -127, -127, -126, -126, -127, -126, + -126, -127, -125, -127, -126, -127, -124, -127, -126, -126, -126, -126, + -126, -126, -127, -126, -126, -126, -126, -125, -126, -126, -125, -127, + -126, -127, -127, -127, -127, -127, -126, -127, -126, -126, -127, -127, + -126, -126, -126, -127, -126, -125, -125, -127, -127, -126, -126, -127, + -126, -127, -127, -126, -126, -127, -126, -125, -125, -127, -126, -126, + -127, -125, -126, -126, -126, -126, -127, -127, -126, -126, -126, -125, + -126, -127, -126, -126, -125, -126, -126, -127, -127, -126, -126, -126, + -127, -126, -127, -124, -127, -125, -126, -127, -127, -126, -126, -126, + -126, -126, -126, -126, -127, -126, -126, -127, -125, -126, -126, -127, + -127, -127, -127, -126, -126, -126, -127, -127, -127, -127, -127, -127, + -127, -126, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, 127, -81, + -109, -111, -102, -113, -124, -108, -120, -120, -125, -116, -122, -115, + -123, -123, -123, -124, -124, -117, -123, -118, -127, -121, -121, -121, + -125, -124, -119, -119, -115, -119, -116, -125, -122, -116, -120, -116, + -122, -119, -120, -125, -119, -126, -123, -125, -123, -122, -120, -121, + -122, -123, -126, -122, -124, -125, -126, -125, -124, -125, -126, -126, + -126, -123, -127, -124, -126, -124, -125, -126, -124, -125, -125, -123, + -127, -126, -127, -124, -124, -125, -125, -126, -127, -125, -126, -126, + -125, -127, -126, -127, -127, -125, -125, -125, -125, -125, -126, -126, + -125, -126, -127, -126, -125, -125, -127, -125, -127, -124, -126, -126, + -127, -126, -127, -126, -127, -125, -127, -127, -125, -127, -126, -124, + -125, -125, -124, -125, -127, -127, -127, -127, -126, -125, -126, -126, + -127, -124, -127, -126, -126, -126, -127, -126, -127, -126, -124, -126, + -127, -127, -126, -127, -125, -127, -127, -126, -127, -127, -127, -126, + -126, -126, -127, -126, -127, -126, -125, -126, -126, -126, -126, -126, + -126, -126, -126, -127, -127, -126, -125, -126, -127, -126, -127, -126, + -126, -126, -126, -127, -126, -125, -127, -127, -127, -126, -125, -127, + -127, -127, -127, -126, -126, -126, -127, -127, -127, -126, -126, -127, + -126, -126, -126, -127, -126, -127, -127, -125, -126, -126, -126, -126, + -127, -127, -126, -127, -127, -127, -126, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -126, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, 127, -110, -112, -105, -113, -113, -116, -103, -120, + -99, -117, -121, -118, -123, -125, -123, -124, -124, -119, -119, -125, + -125, -123, -116, -118, -122, -119, -125, -119, -125, -124, -119, -119, + -123, -122, -122, -126, -127, -124, -121, -126, -124, -124, -122, -124, + -122, -126, -124, -127, -126, -125, -125, -122, -123, -127, -123, -127, + -126, -124, -125, -125, -125, -125, -125, -122, -125, -126, -126, -126, + -125, -126, -125, -124, -125, -125, -126, -126, -125, -126, -127, -126, + -127, -125, -127, -126, -126, -127, -127, -125, -126, -125, -126, -126, + -126, -126, -127, -127, -125, -127, -126, -126, -125, -126, -125, -125, + -126, -126, -125, -127, -126, -127, -126, -127, -127, -126, -127, -125, + -125, -126, -126, -125, -125, -124, -126, -126, -126, -125, -127, -127, + -127, -126, -127, -127, -126, -126, -124, -126, -127, -126, -127, -125, + -126, -125, -126, -125, -125, -127, -124, -126, -126, -127, -126, -126, + -127, -126, -126, -127, -127, -127, -126, -126, -127, -126, -127, -127, + -126, -126, -127, -126, -126, -125, -127, -126, -127, -126, -127, -126, + -124, -126, -126, -127, -127, -125, -126, -125, -126, -125, -126, -127, + -127, -126, -126, -126, -127, -127, -127, -126, -126, -127, -126, -125, + -126, -127, -126, -127, -127, -126, -126, -126, -125, -126, -126, -125, + -127, -126, -125, -125, -127, -127, -127, -127, -126, -127, -127, -126, + -126, -126, -126, -127, -127, -127, -127, -127, -127, -127, -126, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, 127, -93, -105, -118, + -100, -114, -108, -111, -110, -114, -120, -120, -120, -122, -121, -124, + -124, -123, -125, -126, -127, -120, -121, -124, -120, -122, -123, -126, + -126, -126, -122, -126, -125, -126, -126, -127, -124, -126, -126, -123, + -124, -123, -125, -125, -126, -125, -124, -125, -127, -125, -125, -125, + -122, -124, -127, -127, -126, -125, -126, -126, -126, -126, -126, -125, + -126, -126, -126, -127, -127, -126, -126, -127, -125, -124, -126, -126, + -127, -126, -127, -127, -126, -127, -126, -127, -126, -125, -127, -126, + -125, -126, -126, -125, -126, -125, -127, -127, -126, -127, -125, -126, + -126, -126, -125, -127, -127, -127, -127, -126, -127, -127, -127, -127, + -127, -126, -127, -126, -127, -125, -126, -127, -126, -127, -126, -127, + -127, -126, -125, -126, -127, -127, -126, -127, -126, -125, -127, -126, + -127, -126, -127, -127, -126, -127, -126, -125, -126, -126, -127, -127, + -125, -127, -127, -126, -127, -125, -126, -127, -127, -127, -127, -127, + -126, -127, -126, -127, -127, -127, -126, -127, -127, -126, -127, -126, + -126, -127, -127, -127, -126, -127, -126, -127, -127, -126, -126, -127, + -126, -126, -126, -127, -127, -127, -127, -127, -127, -126, -126, -126, + -126, -126, -127, -126, -125, -127, -126, -127, -127, -127, -126, -126, + -127, -126, -126, -127, -126, -127, -127, -126, -126, -127, -127, -126, + -127, -127, -126, -126, -127, -127, -126, -126, -127, -127, -127, -126, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, 127, -84, -118, -108, -122, -120, -111, -121, -118, -114, -121, + -120, -119, -124, -125, -121, -125, -123, -119, -127, -122, -121, -124, + -125, -123, -126, -125, -125, -123, -124, -127, -124, -126, -126, -127, + -126, -124, -125, -125, -127, -125, -124, -126, -124, -126, -126, -123, + -127, -125, -126, -125, -124, -127, -125, -127, -124, -126, -126, -127, + -126, -126, -126, -126, -127, -126, -127, -126, -125, -125, -127, -126, + -127, -125, -127, -127, -125, -126, -127, -127, -126, -127, -127, -126, + -127, -127, -127, -127, -125, -126, -126, -127, -126, -127, -126, -125, + -127, -127, -127, -125, -126, -127, -126, -126, -127, -127, -127, -126, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -126, -126, -127, + -127, -127, -127, -127, -126, -127, -127, -125, -126, -126, -127, -127, + -127, -127, -127, -126, -126, -126, -127, -127, -126, -127, -126, -126, + -126, -126, -127, -127, -127, -126, -126, -126, -126, -127, -127, -126, + -127, -127, -127, -126, -126, -127, -127, -127, -126, -127, -127, -126, + -127, -127, -127, -127, -126, -127, -127, -127, -126, -127, -126, -127, + -127, -126, -127, -127, -127, -126, -127, -127, -127, -126, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -126, -126, -127, -127, + -127, -126, -127, -127, -127, -127, -127, -127, -126, -126, -127, -127, + -127, -127, -127, -126, -127, -127, -127, -126, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, 127, -4, -77, -118, -107, -77, + -111, -103, -114, -108, -119, -118, -119, -121, -125, -122, -117, -126, + -112, -125, -119, -120, -118, -116, -124, -119, -118, -121, -121, -120, + -120, -119, -125, -124, -126, -119, -122, -124, -122, -122, -119, -124, + -120, -125, -121, -123, -124, -122, -121, -122, -121, -119, -127, -122, + -126, -121, -121, -123, -123, -126, -123, -125, -126, -126, -124, -121, + -123, -119, -126, -124, -123, -124, -124, -125, -125, -124, -127, -126, + -122, -123, -124, -126, -126, -126, -126, -124, -125, -125, -124, -119, + -126, -125, -125, -127, -125, -124, -126, -125, -125, -125, -125, -125, + -126, -124, -122, -126, -125, -126, -124, -126, -124, -126, -126, -127, + -126, -127, -124, -125, -123, -125, -126, -124, -126, -126, -126, -124, + -121, -120, -123, -122, -125, -123, -125, -126, -125, -123, -124, -121, + -123, -125, -124, -123, -124, -125, -126, -125, -124, -127, -126, -125, + -125, -125, -125, -125, -125, -127, -125, -126, -126, -125, -127, -126, + -125, -125, -127, -125, -126, -126, -125, -125, -126, -125, -125, -125, + -124, -127, -126, -127, -126, -126, -122, -126, -126, -126, -127, -126, + -126, -126, -126, -126, -126, -126, -127, -126, -124, -126, -127, -126, + -126, -127, -126, -126, -126, -126, -126, -126, -127, -125, -124, -125, + -126, -126, -127, -127, -126, -125, -126, -125, -125, -126, -126, -126, + -127, -127, -127, -126, -126, -127, -127, -125, -126, -126, -127, -127, + -127, -127, -126, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, 127, + -19, -89, -106, -96, -105, -115, -114, -122, -108, -125, -119, -119, + -124, -125, -120, -126, -124, -119, -124, -122, -124, -122, -126, -121, + -118, -125, -125, -122, -123, -121, -122, -120, -125, -122, -125, -124, + -124, -126, -121, -122, -125, -121, -124, -125, -126, -127, -126, -121, + -125, -122, -125, -126, -127, -123, -127, -126, -122, -125, -122, -126, + -124, -126, -126, -123, -126, -124, -125, -125, -124, -126, -126, -124, + -126, -124, -126, -127, -127, -126, -126, -126, -126, -127, -126, -125, + -126, -124, -123, -126, -126, -126, -126, -125, -126, -126, -126, -127, + -127, -125, -126, -125, -125, -124, -127, -124, -126, -127, -126, -126, + -127, -126, -127, -127, -126, -125, -127, -127, -126, -127, -127, -125, + -126, -127, -125, -127, -126, -127, -127, -125, -127, -127, -126, -127, + -124, -127, -125, -127, -126, -126, -126, -126, -127, -126, -127, -127, + -126, -127, -126, -127, -126, -126, -125, -126, -127, -125, -126, -125, + -127, -127, -125, -126, -126, -126, -127, -127, -127, -126, -126, -126, + -126, -127, -126, -125, -125, -126, -127, -127, -127, -126, -127, -126, + -127, -125, -126, -126, -127, -127, -127, -126, -125, -126, -127, -127, + -126, -127, -125, -127, -127, -127, -127, -127, -127, -126, -127, -127, + -126, -126, -127, -127, -126, -127, -127, -127, -126, -126, -127, -125, + -127, -127, -126, -125, -127, -127, -127, -127, -127, -127, -126, -127, + -126, -126, -127, -127, -126, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, 127, -100, -100, -111, -120, -116, -107, -119, + -117, -123, -124, -123, -122, -118, -124, -124, -123, -119, -126, -122, + -126, -122, -124, -124, -119, -125, -125, -126, -125, -125, -120, -125, + -124, -123, -125, -122, -123, -126, -126, -126, -124, -122, -123, -125, + -126, -125, -126, -125, -122, -124, -125, -122, -125, -126, -126, -125, + -127, -126, -124, -125, -125, -127, -126, -127, -124, -126, -124, -125, + -125, -125, -125, -126, -125, -127, -126, -125, -127, -126, -126, -126, + -125, -126, -126, -126, -126, -124, -127, -125, -126, -125, -124, -126, + -127, -126, -127, -127, -127, -126, -126, -127, -126, -126, -125, -125, + -126, -127, -126, -126, -126, -125, -127, -126, -126, -126, -127, -127, + -127, -127, -126, -126, -126, -126, -127, -126, -125, -127, -126, -127, + -127, -127, -126, -127, -126, -126, -126, -127, -127, -126, -127, -127, + -127, -127, -127, -126, -126, -127, -127, -127, -127, -127, -126, -126, + -127, -126, -126, -126, -127, -127, -127, -127, -127, -127, -127, -126, + -127, -126, -127, -127, -127, -127, -127, -127, -126, -126, -127, -127, + -127, -127, -127, -127, -126, -126, -127, -126, -127, -127, -127, -127, + -127, -127, -126, -125, -127, -127, -127, -126, -127, -127, -127, -127, + -127, -126, -127, -127, -127, -127, -127, -127, -127, -126, -127, -127, + -127, -126, -127, -126, -126, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, 127, -112, -94, + -107, -104, -114, -106, -119, -112, -107, -117, -125, -114, -122, -123, + -119, -117, -112, -119, -114, -123, -116, -123, -126, -117, -124, -126, + -125, -123, -123, -127, -123, -122, -120, -117, -125, -122, -124, -123, + -122, -122, -124, -124, -123, -121, -125, -124, -124, -122, -123, -122, + -123, -121, -124, -125, -125, -124, -124, -126, -125, -126, -126, -124, + -125, -126, -126, -122, -124, -124, -126, -127, -127, -125, -125, -124, + -126, -126, -126, -126, -126, -124, -125, -124, -126, -127, -126, -124, + -125, -126, -126, -125, -126, -126, -125, -127, -127, -126, -127, -126, + -126, -126, -127, -127, -126, -125, -126, -126, -125, -127, -126, -124, + -126, -126, -127, -127, -126, -127, -126, -125, -127, -126, -125, -125, + -127, -127, -127, -126, -127, -126, -125, -126, -127, -126, -126, -126, + -127, -126, -126, -127, -127, -127, -127, -126, -127, -126, -125, -126, + -124, -126, -126, -125, -127, -127, -126, -126, -126, -126, -127, -127, + -127, -127, -127, -126, -127, -127, -127, -126, -125, -127, -126, -126, + -127, -127, -127, -126, -126, -127, -127, -127, -126, -126, -127, -127, + -126, -126, -127, -126, -127, -125, -127, -127, -126, -126, -126, -126, + -127, -127, -127, -127, -126, -127, -127, -126, -126, -126, -127, -127, + -126, -127, -126, -127, -127, -126, -126, -126, -127, -126, -126, -127, + -126, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -126, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -128, 127, -90, -76, -94, -118, -108, -124, -90, -34, -76, + -124, -107, -110, -88, -84, -86, -100, -111, -106, -120, -105, -106, + -123, -127, -115, -98, -103, -108, -117, -107, -110, -102, -101, -112, + -108, -117, -118, -122, -124, -116, -121, -121, -120, -119, -122, -125, + -120, -117, -117, -121, -113, -121, -120, -118, -115, -121, -110, -121, + -121, -121, -127, -123, -119, -122, -126, -126, -121, -123, -124, -123, + -124, -126, -123, -118, -117, -126, -120, -123, -124, -125, -120, -122, + -124, -123, -125, -115, -121, -120, -120, -126, -125, -125, -126, -124, + -127, -127, -126, -122, -123, -124, -127, -125, -126, -124, -123, -124, + -126, -124, -125, -123, -124, -125, -127, -126, -126, -123, -126, -124, + -123, -126, -126, -127, -126, -125, -126, -126, -126, -126, -125, -123, + -126, -127, -126, -126, -126, -125, -125, -124, -123, -126, -126, -126, + -124, -127, -124, -126, -127, -125, -124, -125, -125, -126, -126, -126, + -126, -126, -126, -127, -126, -125, -124, -127, -126, -124, -126, -125, + -127, -125, -127, -125, -126, -126, -127, -125, -126, -126, -126, -126, + -127, -127, -127, -126, -127, -126, -125, -126, -126, -127, -126, -126, + -126, -126, -125, -127, -127, -126, -126, -127, -126, -127, -126, -126, + -127, -126, -126, -126, -127, -126, -126, -126, -127, -127, -126, -127, + -126, -126, -125, -126, -127, -126, -127, -127, -127, -125, -127, -127, + -127, -126, -127, -127, -127, -127, -127, -126, -127, -127, -126, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, 127, -83, -104, -105, -89, + -113, -106, -93, -95, -98, -109, -118, -118, -117, -84, -87, -112, + -124, -120, -120, -112, -117, -118, -119, -107, -112, -99, -119, -115, + -121, -105, -118, -117, -122, -117, -122, -124, -125, -118, -121, -119, + -121, -126, -122, -127, -124, -121, -120, -121, -121, -122, -120, -123, + -120, -123, -123, -118, -125, -122, -127, -123, -121, -126, -125, -126, + -126, -125, -125, -122, -123, -124, -122, -124, -124, -121, -122, -120, + -124, -122, -126, -125, -122, -119, -123, -123, -124, -124, -123, -122, + -127, -123, -126, -126, -125, -126, -127, -125, -124, -125, -126, -125, + -124, -125, -126, -125, -125, -126, -127, -126, -124, -126, -126, -126, + -123, -124, -125, -125, -127, -123, -122, -126, -126, -127, -126, -126, + -125, -126, -126, -126, -126, -126, -127, -125, -125, -127, -124, -127, + -124, -125, -125, -125, -122, -124, -125, -127, -126, -126, -127, -126, + -124, -126, -126, -126, -127, -126, -126, -125, -125, -127, -125, -127, + -125, -124, -127, -127, -126, -126, -126, -125, -127, -126, -126, -125, + -126, -127, -127, -126, -127, -127, -127, -126, -127, -125, -127, -126, + -127, -126, -125, -126, -126, -126, -127, -126, -126, -127, -127, -127, + -126, -127, -126, -126, -127, -126, -127, -126, -126, -126, -127, -126, + -127, -127, -127, -126, -127, -126, -127, -125, -126, -127, -126, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -126, -126, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + 93, 127, -105, -110, -101, -114, -89, -108, -106, -101, -100, -110, + -118, -109, -85, -111, -115, -88, -122, -120, -116, -110, -108, -119, + -103, -123, -118, -121, -123, -124, -123, -125, -102, -119, -118, -118, + -122, -115, -124, -125, -118, -125, -125, -125, -123, -120, -124, -124, + -127, -123, -121, -123, -123, -125, -124, -124, -119, -124, -124, -123, + -123, -121, -123, -125, -127, -125, -123, -123, -118, -124, -127, -123, + -117, -122, -125, -126, -117, -116, -124, -122, -125, -126, -124, -124, + -124, -125, -121, -124, -121, -126, -124, -124, -126, -126, -125, -125, + -124, -125, -126, -126, -122, -124, -124, -127, -126, -125, -125, -127, + -125, -124, -127, -124, -123, -122, -123, -123, -122, -127, -124, -127, + -124, -125, -123, -125, -123, -125, -126, -125, -123, -125, -123, -124, + -125, -126, -125, -124, -124, -125, -124, -124, -122, -125, -126, -127, + -127, -124, -125, -127, -126, -123, -126, -126, -127, -125, -125, -126, + -127, -124, -126, -125, -127, -127, -125, -123, -126, -126, -125, -125, + -125, -125, -126, -126, -126, -126, -124, -126, -125, -126, -127, -126, + -126, -125, -126, -125, -124, -126, -126, -124, -126, -123, -124, -125, + -123, -125, -126, -127, -127, -126, -126, -125, -125, -126, -124, -127, + -126, -126, -124, -126, -126, -126, -125, -125, -127, -126, -124, -124, + -124, -125, -126, -125, -127, -127, -127, -125, -127, -126, -127, -127, + -127, -127, -127, -127, -126, -126, -127, -127, -127, -126, -127, -127, + -126, -127, -127, -127, -127, -126, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, 127, 89, -45, -79, -97, -83, -116, + -108, -119, -114, -118, -117, -118, -114, -110, -113, -118, -111, -104, + -116, -120, -121, -117, -120, -114, -127, -121, -117, -122, -115, -122, + -119, -122, -118, -117, -120, -120, -121, -125, -125, -118, -121, -127, + -124, -121, -124, -123, -121, -123, -122, -120, -123, -125, -121, -125, + -124, -123, -121, -123, -120, -124, -123, -125, -122, -127, -124, -124, + -125, -125, -124, -125, -126, -121, -124, -122, -125, -127, -126, -127, + -125, -122, -127, -126, -122, -122, -127, -125, -126, -125, -126, -125, + -126, -125, -126, -124, -124, -125, -124, -125, -127, -124, -125, -125, + -126, -126, -127, -123, -127, -124, -124, -125, -125, -125, -124, -126, + -126, -126, -126, -127, -126, -126, -126, -125, -125, -126, -126, -126, + -126, -125, -124, -123, -127, -126, -126, -125, -127, -126, -125, -126, + -122, -126, -126, -126, -125, -125, -126, -127, -125, -127, -126, -126, + -125, -125, -125, -125, -126, -124, -125, -127, -126, -127, -125, -127, + -125, -125, -124, -125, -127, -125, -127, -127, -127, -125, -126, -126, + -126, -125, -125, -125, -126, -127, -127, -126, -126, -125, -125, -126, + -125, -125, -126, -126, -126, -126, -125, -126, -127, -126, -126, -125, + -125, -127, -126, -127, -125, -127, -125, -126, -127, -126, -127, -126, + -126, -126, -126, -127, -125, -125, -126, -126, -125, -127, -125, -125, + -126, -126, -126, -127, -127, -126, -126, -127, -126, -126, -126, -127, + -126, -126, -127, -126, -126, -127, -126, -126, -126, -126, -126, -126, + -127, -127, -126, -127, -126, -126, -126, -126, -126, -126, 127, -49, + -100, -115, -102, -124, -116, -116, -124, -120, -125, -127, -124, -121, + -120, -123, -124, -121, -125, -122, -126, -122, -126, -125, -123, -126, + -124, -124, -125, -124, -126, -125, -123, -125, -126, -127, -125, -126, + -126, -126, -126, -124, -125, -126, -126, -126, -127, -127, -126, -127, + -127, -126, -125, -125, -126, -126, -126, -126, -125, -126, -127, -126, + -127, -125, -126, -126, -125, -126, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -126, -126, -127, -127, -126, -127, -127, -127, -127, + -126, -127, -127, -127, -127, -127, -127, -126, -127, -127, -127, -127, + -127, -126, -126, -127, -126, -126, -127, -127, -127, -126, -127, -127, + -127, -126, -127, -127, -127, -126, -127, -127, -127, -127, -127, -127, + -126, -127, -127, -127, -127, -126, -127, -126, -127, -127, -126, -126, + -127, -127, -127, -127, -127, -126, -126, -127, -127, -126, -126, -127, + -126, -126, -127, -127, -127, -127, -127, -126, -127, -127, -126, -127, + -127, -127, -126, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -126, -126, -127, -127, -127, + -127, -127, -127, -127, -127, -126, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -126, + -127, -127, -127, -127, -127, -127, -127, -126, -127, -127, -126, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, 10, 127, -51, -66, -89, -103, -108, -89, -62, + -58, -36, 57, 118, -35, -67, -112, -93, -127, -99, -125, -108, + -96, -106, -101, -114, -121, -121, -112, -112, -123, -124, -112, -120, + -118, -117, -121, -115, -116, -125, -114, -124, -123, -119, -116, -119, + -115, -125, -122, -121, -123, -118, -122, -127, -114, -120, -124, -122, + -120, -124, -124, -123, -126, -124, -123, -125, -125, -121, -119, -122, + -122, -122, -124, -126, -124, -122, -125, -121, -126, -122, -124, -126, + -125, -126, -121, -123, -126, -123, -125, -125, -127, -126, -123, -125, + -124, -127, -124, -126, -123, -126, -121, -122, -123, -126, -124, -122, + -124, -122, -122, -123, -124, -125, -126, -124, -123, -125, -122, -127, + -126, -123, -121, -124, -123, -123, -125, -124, -126, -125, -122, -124, + -122, -126, -126, -123, -125, -126, -125, -127, -124, -122, -124, -123, + -121, -123, -125, -126, -127, -123, -122, -125, -125, -124, -125, -127, + -123, -125, -126, -125, -124, -124, -126, -124, -123, -124, -124, -125, + -126, -125, -125, -126, -124, -124, -127, -126, -124, -125, -127, -124, + -124, -124, -123, -126, -124, -123, -125, -123, -125, -122, -126, -125, + -124, -124, -124, -124, -126, -125, -126, -125, -123, -126, -125, -124, + -124, -124, -126, -125, -126, -124, -125, -126, -123, -126, -125, -125, + -126, -126, -125, -125, -124, -126, -127, -123, -126, -125, -124, -126, + -127, -126, -125, -124, -126, -125, -125, -124, -126, -127, -127, -126, + -127, -126, -126, -127, -127, -127, -127, -127, -126, -126, -126, -126, + -126, -126, -126, -126, -127, -127, -127, -127, 76, -3, -77, -88, + -79, -68, -21, 15, 92, 127, 53, -56, -111, -93, -96, -91, + -89, -90, -101, -115, -100, -107, -109, -113, -115, -116, -116, -114, + -114, -115, -114, -115, -115, -115, -118, -118, -118, -118, -120, -119, + -121, -118, -122, -121, -121, -118, -120, -122, -121, -120, -119, -120, + -121, -123, -119, -122, -123, -122, -122, -123, -123, -124, -123, -122, + -122, -120, -121, -122, -121, -121, -122, -124, -123, -122, -123, -125, + -121, -123, -123, -123, -123, -123, -124, -124, -122, -123, -123, -124, + -124, -124, -124, -124, -123, -123, -123, -124, -123, -124, -124, -124, + -123, -124, -125, -125, -124, -124, -124, -124, -125, -125, -124, -124, + -124, -124, -124, -124, -125, -123, -125, -125, -124, -125, -124, -125, + -125, -124, -125, -125, -124, -123, -124, -125, -124, -124, -125, -125, + -125, -124, -124, -125, -125, -125, -125, -124, -125, -124, -125, -124, + -125, -125, -126, -126, -125, -126, -125, -125, -125, -124, -124, -125, + -125, -125, -124, -126, -125, -125, -126, -126, -125, -125, -125, -126, + -125, -124, -126, -126, -125, -125, -125, -125, -125, -125, -126, -125, + -125, -126, -126, -124, -125, -126, -125, -126, -126, -125, -124, -125, + -125, -125, -126, -125, -125, -126, -125, -126, -125, -126, -126, -125, + -125, -126, -126, -125, -125, -126, -126, -126, -125, -126, -125, -126, + -126, -126, -124, -125, -125, -125, -126, -125, -125, -125, -125, -125, + -126, -126, -126, -126, -125, -125, -126, -125, -125, -125, -125, -125, + -125, -126, -126, -126, -126, -126, -126, -126, -126, -126, -126, -125, + -125, -75, -84, -95, -91, -65, 8, 127, 86, -34, -124, -81, + -74, -107, -118, -118, -116, -108, -116, -113, -118, -124, -124, -125, + -125, -127, -127, -126, -126, -126, -127, -127, -127, -127, -126, -127, + -127, -127, -127, -126, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -126, -126, -125, -126, -121, -117, -121, -125, -126, + -126, -127, -127, -126, -127, -127, -127, -126, -124, -126, -126, -127, + -127, -127, -127, -126, -126, -125, -126, -123, -127, -125, -126, -127, + -127, -127, -126, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -128, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -128, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -98, -101, -92, -91, -61, 127, + -66, -88, -96, 0, -10, -117, -116, -114, -96, -112, -111, -119, + -119, -93, -122, -122, -123, -125, -124, -125, -127, -125, -125, -123, + -124, -125, -124, -125, -124, -125, -126, -125, -126, -125, -125, -126, + -127, -126, -126, -126, -126, -127, -125, -125, -127, -125, -123, -125, + -124, -122, -113, -121, -123, -124, -125, -126, -124, -125, -126, -124, + -123, -127, -124, -125, -124, -125, -127, -126, -127, -123, -124, -124, + -124, -126, -121, -124, -126, -126, -127, -124, -126, -127, -127, -126, + -126, -127, -126, -126, -126, -126, -126, -126, -127, -126, -127, -127, + -126, -127, -127, -127, -127, -127, -126, -126, -127, -126, -127, -126, + -125, -126, -127, -126, -127, -126, -126, -126, -126, -127, -126, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -115, + -75, -80, -60, 39, 127, -46, -90, -87, 59, -23, -79, -97, + -101, -105, -119, -112, -116, -96, -102, -116, -123, -124, -123, -124, + -123, -122, -123, -123, -123, -122, -124, -124, -123, -124, -124, -124, + -123, -124, -125, -125, -125, -125, -125, -125, -126, -123, -126, -126, + -125, -124, -121, -122, -126, -121, -117, -113, -112, -124, -124, -124, + -125, -125, -126, -122, -124, -122, -125, -125, -124, -126, -124, -125, + -126, -123, -124, -125, -123, -126, -122, -124, -124, -122, -125, -124, + -124, -126, -125, -125, -125, -125, -127, -126, -126, -126, -126, -126, + -126, -126, -127, -126, -126, -126, -126, -126, -127, -126, -127, -126, + -126, -127, -126, -126, -125, -125, -127, -127, -126, -126, -125, -126, + -126, -126, -127, -127, -126, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -120, -78, -77, -42, 127, 125, -45, -93, + -102, 122, -104, -116, -117, -97, -110, -119, -117, -117, -94, -113, + -112, -118, -118, -114, -118, -120, -120, -123, -123, -125, -127, -125, + -125, -125, -123, -124, -125, -122, -122, -122, -123, -124, -123, -124, + -125, -126, -125, -125, -126, -126, -124, -124, -124, -121, -113, -117, + -125, -123, -124, -122, -127, -127, -126, -125, -126, -125, -125, -125, + -125, -126, -126, -126, -126, -123, -125, -125, -126, -122, -125, -127, + -126, -125, -127, -126, -125, -125, -125, -125, -126, -126, -125, -127, + -126, -126, -127, -127, -127, -127, -127, -127, -126, -126, -126, -126, + -126, -126, -126, -126, -126, -126, -127, -127, -127, -126, -127, -127, + -126, -126, -126, -125, -126, -126, -126, -126, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -126, -127, -126, -126, -126, -126, -126, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -126, -126, + -126, -127, -126, -126, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -126, -126, -126, -127, -126, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -126, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -126, -127, -126, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -126, -126, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -106, -79, -80, + -43, 127, 69, -75, -105, -112, 58, -104, -112, -114, -107, -99, + -107, -109, -115, -76, -113, -114, -118, -116, -123, -124, -125, -123, + -121, -121, -121, -121, -122, -119, -118, -120, -121, -123, -124, -124, + -124, -125, -125, -125, -124, -124, -124, -123, -123, -123, -124, -124, + -125, -125, -122, -120, -124, -125, -121, -116, -124, -125, -125, -124, + -125, -124, -123, -123, -123, -123, -124, -124, -127, -125, -126, -126, + -124, -123, -123, -124, -124, -125, -123, -124, -125, -126, -123, -126, + -125, -125, -125, -126, -126, -126, -125, -124, -125, -125, -125, -125, + -125, -125, -125, -126, -126, -127, -126, -126, -126, -125, -125, -125, + -125, -125, -125, -125, -125, -125, -126, -127, -127, -127, -127, -127, + -126, -126, -126, -126, -126, -126, -126, -126, -126, -126, -126, -126, + -127, -127, -127, -127, -126, -126, -126, -126, -126, -126, -126, -126, + -126, -126, -126, -126, -127, -127, -126, -127, -127, -126, -126, -126, + -126, -126, -126, -126, -126, -126, -126, -126, -127, -127, -127, -127, + -126, -126, -126, -126, -126, -126, -126, -126, -126, -126, -126, -127, + -127, -127, -127, -127, -127, -126, -126, -126, -126, -126, -126, -126, + -126, -126, -126, -127, -127, -127, -127, -127, -127, -126, -126, -126, + -126, -126, -126, -126, -126, -126, -127, -127, -127, -127, -127, -127, + -127, -127, -126, -126, -126, -126, -126, -126, -126, -126, -127, -127, + -127, -127, -127, -127, -127, -126, -126, -126, -126, -126, -126, -126, + -126, -126, -127, -127, -127, -127, -127, -127, -127, -126, -126, -126, + -126, -126, -65, -125, -101, -70, 127, 48, -51, -71, -58, 2, + -104, -114, -118, -97, -115, -118, -118, -103, -114, -122, -120, -123, + -113, -122, -118, -122, -120, -125, -123, -123, -123, -126, -126, -124, + -125, -124, -123, -122, -124, -123, -123, -125, -125, -125, -126, -126, + -126, -126, -124, -125, -125, -125, -125, -122, -122, -122, -122, -112, + -123, -124, -125, -125, -125, -125, -126, -125, -126, -127, -126, -125, + -124, -124, -125, -125, -122, -127, -126, -127, -127, -125, -125, -126, + -127, -126, -126, -125, -126, -124, -125, -125, -127, -125, -126, -127, + -127, -126, -127, -127, -126, -126, -126, -127, -126, -126, -126, -126, + -126, -126, -126, -126, -127, -127, -127, -127, -126, -126, -126, -126, + -127, -126, -126, -126, -126, -126, -126, -127, -127, -127, -127, -127, + -127, -127, -126, -126, -126, -126, -126, -126, -126, -126, -126, -127, + -127, -127, -127, -127, -127, -127, -126, -126, -127, -126, -126, -126, + -126, -126, -127, -126, -127, -127, -127, -127, -127, -127, -127, -127, + -126, -127, -126, -126, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -126, -127, -126, -126, -127, -126, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -126, -126, -126, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -126, -126, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -126, -127, + -127, -127, -127, -127, -127, -127, -127, -114, -109, -103, -78, 127, + 1, -60, -69, -28, -52, -113, -120, -119, -97, -123, -126, -125, + -109, -125, -127, -127, -124, -124, -126, -126, -126, -125, -126, -126, + -127, -125, -127, -127, -127, -127, -127, -126, -127, -126, -126, -127, + -126, -126, -126, -126, -127, -127, -125, -127, -127, -127, -124, -126, + -126, -125, -121, -114, -123, -126, -127, -126, -127, -127, -127, -125, + -126, -126, -126, -124, -126, -127, -127, -127, -125, -126, -127, -127, + -125, -127, -127, -127, -126, -127, -127, -127, -126, -126, -126, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -128, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -126, -116, -107, -80, 127, -24, -83, -104, -76, -16, -83, -100, + -114, -87, -116, -122, -122, -111, -118, -120, -120, -121, -119, -119, + -118, -117, -117, -118, -120, -121, -122, -124, -125, -125, -123, -124, + -123, -122, -122, -122, -122, -122, -122, -122, -123, -123, -124, -123, + -125, -124, -124, -119, -122, -121, -122, -116, -126, -125, -125, -127, + -127, -127, -127, -127, -124, -127, -127, -126, -127, -123, -123, -123, + -124, -123, -124, -125, -127, -125, -126, -126, -126, -126, -126, -126, + -126, -126, -127, -126, -125, -125, -124, -124, -124, -124, -124, -125, + -125, -126, -125, -125, -126, -126, -125, -126, -126, -126, -126, -127, + -127, -127, -127, -127, -127, -127, -127, -126, -127, -126, -125, -125, + -125, -126, -126, -126, -126, -126, -127, -127, -127, -127, -127, -126, + -126, -126, -126, -126, -126, -126, -126, -126, -126, -127, -127, -127, + -127, -127, -127, -127, -126, -126, -126, -126, -126, -126, -126, -126, + -126, -127, -127, -127, -127, -127, -127, -127, -126, -126, -126, -126, + -126, -126, -126, -126, -126, -127, -127, -127, -127, -127, -127, -127, + -127, -126, -126, -126, -126, -126, -126, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -126, -126, -127, -126, -126, -127, -126, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -126, -126, -126, + -126, -126, -126, -126, -126, -127, -127, -127, -127, -127, -127, -127, + -127, -126, -126, -126, -126, -126, -126, -126, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -126, -126, -126, -126, -126, -126, + -127, -127, -127, -127, -127, -79, -104, -97, -69, 117, 8, -74, + -103, -86, 127, -74, -97, -123, -89, -113, -124, -120, -111, -87, + -107, -113, -94, -111, -110, -111, -114, -115, -115, -117, -118, -112, + -122, -125, -124, -117, -125, -123, -122, -118, -118, -124, -121, -120, + -121, -123, -124, -126, -115, -120, -126, -125, -120, -111, -122, -126, + -119, -82, -95, -121, -119, -125, -125, -123, -124, -108, -112, -122, + -123, -122, -122, -125, -123, -122, -119, -122, -119, -127, -118, -118, + -123, -123, -110, -96, -109, -123, -121, -125, -124, -121, -126, -120, + -117, -123, -122, -120, -121, -127, -123, -121, -119, -123, -124, -125, + -121, -126, -125, -124, -125, -127, -125, -127, -126, -125, -127, -126, + -125, -123, -122, -125, -126, -125, -126, -125, -125, -126, -126, -126, + -127, -127, -127, -127, -126, -126, -126, -126, -126, -126, -126, -126, + -126, -126, -127, -127, -127, -127, -127, -127, -126, -126, -126, -126, + -126, -126, -126, -126, -126, -126, -127, -127, -127, -127, -127, -127, + -126, -126, -126, -126, -126, -126, -126, -125, -126, -126, -127, -127, + -127, -127, -127, -127, -127, -126, -126, -126, -126, -126, -126, -126, + -126, -126, -127, -127, -127, -127, -127, -127, -126, -126, -126, -126, + -126, -126, -126, -127, -126, -126, -126, -125, -127, -127, -127, -127, + -126, -127, -126, -126, -126, -126, -126, -126, -126, -127, -127, -127, + -127, -127, -127, -127, -127, -126, -126, -127, -126, -126, -126, -126, + -126, -127, -127, -127, -127, -127, -127, -127, -127, -126, -126, -126, + -126, -126, -126, -126, -126, -127, -127, -127, -127, -127, -112, -97, + -88, -59, 127, 28, -66, -75, -45, 117, -86, -113, -121, -76, + -105, -117, -117, -97, -51, -98, -111, -113, -61, -116, -119, -123, + -123, -120, -118, -119, -112, -104, -123, -123, -125, -112, -123, -123, + -122, -115, -117, -123, -123, -121, -117, -123, -123, -124, -107, -118, + -121, -118, -111, -88, -110, -113, -87, -44, -105, -124, -124, -116, + -115, -118, -125, -116, -93, -116, -124, -118, -116, -122, -126, -126, + -118, -116, -124, -124, -111, -91, -117, -116, -111, -93, -115, -117, + -122, -118, -121, -120, -124, -126, -114, -120, -123, -123, -118, -119, + -117, -127, -125, -109, -124, -125, -125, -122, -124, -127, -126, -125, + -122, -125, -127, -124, -124, -124, -126, -126, -126, -124, -125, -126, + -126, -127, -126, -126, -127, -127, -126, -127, -126, -126, -126, -126, + -126, -126, -126, -126, -126, -126, -127, -126, -126, -126, -127, -126, + -126, -127, -126, -126, -126, -126, -126, -127, -126, -126, -127, -127, + -126, -126, -127, -126, -127, -126, -126, -127, -126, -126, -124, -126, + -126, -127, -126, -125, -126, -125, -126, -126, -125, -126, -126, -127, + -126, -126, -126, -127, -127, -126, -127, -126, -127, -127, -127, -126, + -126, -127, -126, -127, -126, -126, -126, -126, -127, -127, -127, -126, + -126, -126, -126, -126, -126, -126, -126, -126, -127, -126, -126, -127, + -127, -127, -127, -127, -126, -127, -126, -126, -127, -127, -127, -126, + -126, -127, -126, -127, -126, -127, -126, -127, -127, -127, -127, -127, + -127, -126, -126, -126, -127, -127, -126, -126, -126, -127, -127, -127, + -127, -127, -127, -123, -108, -110, -78, 113, 105, -19, -41, -33, + 127, -125, -125, -123, -78, -85, -103, -99, -82, -23, -119, -126, + -115, -36, -95, -106, -108, -102, -120, -117, -116, -119, -95, -123, + -122, -119, -107, -111, -119, -120, -119, -110, -123, -122, -118, -102, + -119, -126, -122, -117, -91, -118, -117, -115, -37, -102, -117, -104, + -59, -67, -107, -105, -106, -100, -113, -116, -113, -97, -104, -119, + -117, -116, -99, -125, -126, -121, -111, -114, -115, -118, -105, -91, + -121, -124, -125, -104, -118, -121, -123, -115, -116, -124, -121, -123, + -107, -123, -123, -122, -109, -112, -122, -117, -125, -104, -124, -125, + -126, -123, -122, -125, -125, -125, -121, -126, -127, -126, -123, -125, + -126, -126, -127, -124, -125, -125, -126, -127, -126, -127, -127, -127, + -126, -127, -126, -126, -126, -126, -126, -126, -126, -126, -126, -126, + -127, -127, -127, -127, -127, -126, -127, -126, -126, -126, -126, -127, + -126, -126, -125, -127, -127, -127, -125, -125, -126, -126, -127, -125, + -127, -127, -125, -123, -126, -125, -125, -125, -123, -126, -127, -127, + -124, -124, -126, -126, -125, -127, -126, -127, -126, -125, -126, -126, + -126, -127, -126, -127, -127, -127, -127, -126, -127, -126, -126, -127, + -127, -127, -127, -126, -127, -127, -126, -126, -127, -127, -126, -127, + -127, -126, -126, -126, -126, -127, -127, -127, -127, -127, -127, -127, + -127, -126, -127, -127, -126, -126, -127, -126, -126, -127, -126, -127, + -127, -126, -127, -127, -127, -127, -127, -127, -127, -127, -126, -126, + -126, -126, -126, -127, -127, -127, -127, -127, -92, -91, -99, -61, + 127, 49, -51, -67, -41, 120, -93, -113, -122, -71, -84, -89, + -83, -45, -43, -121, -124, -113, -15, -109, -114, -116, -109, -106, + -114, -117, -117, -99, -119, -120, -122, -101, -113, -115, -113, -106, + -111, -121, -121, -121, -95, -121, -115, -107, -88, -119, -117, -106, + -75, -4, -109, -114, -109, -96, -103, -110, -119, -105, -98, -112, + -121, -119, -94, -119, -125, -119, -94, -113, -117, -119, -116, -108, + -112, -119, -121, -97, -122, -124, -125, -118, -105, -120, -125, -124, + -109, -124, -122, -120, -109, -119, -121, -123, -117, -105, -121, -125, + -121, -105, -113, -122, -125, -122, -120, -124, -126, -126, -122, -126, + -126, -127, -123, -125, -124, -126, -125, -126, -126, -127, -126, -124, + -126, -127, -127, -125, -127, -127, -127, -127, -126, -126, -126, -127, + -127, -127, -127, -127, -126, -127, -126, -127, -126, -127, -126, -126, + -127, -127, -127, -127, -127, -127, -125, -127, -127, -126, -125, -126, + -126, -126, -127, -126, -127, -126, -127, -125, -125, -126, -125, -126, + -124, -126, -126, -126, -125, -125, -126, -127, -125, -127, -126, -125, + -125, -126, -126, -126, -125, -125, -127, -127, -126, -126, -127, -127, + -127, -127, -126, -127, -127, -127, -126, -126, -126, -127, -127, -126, + -126, -127, -127, -126, -126, -127, -127, -126, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -126, -127, -126, -126, -127, -127, + -127, -127, -127, -127, -126, -126, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -98, -90, -88, -60, 120, 16, -82, -117, -69, 127, -62, + -94, -104, -54, -120, -125, -117, -66, -29, -75, -89, -75, -37, + -119, -111, -109, -111, -112, -111, -116, -110, -108, -121, -127, -123, + -95, -118, -118, -117, -101, -113, -119, -118, -107, -105, -120, -126, + -124, -92, -119, -119, -117, -25, -80, -87, -102, -104, -90, -122, + -123, -121, -103, -123, -121, -120, -108, -112, -122, -125, -119, -101, + -119, -119, -118, -112, -112, -114, -117, -91, -87, -120, -121, -118, + -116, -121, -126, -126, -114, -119, -119, -124, -124, -112, -123, -124, + -124, -106, -121, -122, -119, -107, -113, -121, -122, -119, -123, -125, + -125, -123, -123, -125, -126, -126, -126, -125, -125, -125, -124, -124, + -124, -124, -123, -119, -125, -127, -126, -125, -125, -127, -127, -126, + -126, -126, -125, -124, -125, -125, -125, -125, -125, -125, -126, -126, + -127, -127, -127, -127, -126, -126, -125, -125, -124, -125, -124, -124, + -126, -126, -126, -127, -127, -126, -126, -127, -127, -126, -126, -127, + -126, -124, -124, -122, -124, -126, -125, -125, -125, -127, -127, -127, + -123, -126, -126, -126, -125, -125, -125, -125, -127, -122, -125, -127, + -127, -126, -126, -126, -126, -126, -125, -125, -125, -125, -125, -125, + -126, -126, -127, -126, -127, -127, -126, -127, -126, -126, -126, -126, + -125, -125, -125, -125, -125, -126, -126, -126, -127, -127, -127, -127, + -126, -126, -126, -126, -125, -125, -125, -126, -126, -125, -126, -127, + -127, -127, -127, -126, -126, -126, -125, -125, -125, -125, -125, -126, + -126, -126, -126, -127, -127, -128, -108, -114, -96, -70, 127, 8, + -73, -82, -26, 83, -78, -101, -111, -71, -127, -122, -113, -55, + -76, -96, -93, -64, -40, -113, -123, -123, -96, -126, -124, -122, + -103, -115, -121, -119, -112, -103, -122, -125, -126, -108, -124, -125, + -120, -102, -112, -121, -121, -108, -98, -112, -111, -111, -24, -124, + -119, -117, -93, -118, -124, -124, -118, -103, -119, -125, -123, -101, + -123, -127, -123, -106, -120, -122, -125, -121, -111, -121, -123, -111, + -66, -117, -120, -118, -105, -125, -126, -126, -121, -120, -121, -127, + -127, -121, -122, -125, -124, -109, -117, -120, -121, -118, -115, -119, + -124, -124, -119, -126, -125, -124, -122, -124, -125, -125, -125, -126, + -127, -127, -127, -126, -126, -125, -124, -119, -125, -127, -127, -124, + -124, -126, -126, -126, -124, -126, -127, -126, -127, -126, -126, -126, + -126, -126, -126, -127, -127, -126, -127, -127, -127, -127, -126, -127, + -126, -127, -126, -126, -126, -126, -127, -127, -127, -127, -125, -126, + -126, -127, -126, -126, -127, -127, -126, -122, -126, -127, -127, -125, + -126, -126, -127, -126, -124, -125, -126, -126, -125, -127, -126, -127, + -125, -126, -127, -126, -126, -127, -127, -126, -127, -127, -126, -126, + -127, -126, -126, -127, -127, -126, -127, -127, -127, -127, -126, -127, + -127, -127, -126, -126, -126, -127, -127, -126, -126, -127, -127, -127, + -127, -127, -127, -127, -126, -127, -127, -126, -127, -127, -127, -127, + -127, -126, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -126, -127, -127, -127, -127, -127, -127, -127, -127, -127, -87, + -120, -110, -83, 127, 37, -33, -33, 33, 48, -105, -122, -117, + -70, -124, -119, -110, -43, -107, -122, -110, -63, -27, -100, -109, + -112, -90, -123, -123, -124, -96, -121, -123, -115, -104, -106, -120, + -122, -120, -107, -122, -123, -126, -92, -117, -121, -116, -92, -105, + -110, -105, -77, -44, -104, -114, -122, -101, -125, -120, -122, -107, + -119, -122, -120, -115, -106, -125, -126, -121, -109, -121, -127, -124, + -112, -122, -120, -112, -106, -77, -118, -116, -116, -103, -124, -125, + -123, -115, -127, -125, -122, -123, -122, -124, -124, -123, -109, -121, + -123, -125, -116, -121, -122, -123, -118, -122, -123, -124, -125, -123, + -127, -127, -127, -127, -126, -126, -125, -123, -127, -125, -123, -125, + -118, -127, -127, -125, -121, -127, -126, -126, -125, -124, -126, -126, + -125, -126, -125, -126, -126, -127, -126, -126, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -126, -126, -126, -126, -126, -127, -126, + -127, -127, -126, -125, -125, -127, -127, -125, -127, -126, -125, -122, + -125, -127, -125, -125, -125, -127, -126, -127, -125, -126, -126, -127, + -126, -126, -126, -126, -125, -126, -126, -127, -127, -126, -127, -127, + -127, -127, -127, -127, -127, -126, -127, -126, -126, -127, -127, -127, + -127, -127, -126, -127, -127, -127, -127, -127, -126, -126, -126, -127, + -126, -127, -127, -126, -127, -127, -127, -127, -127, -127, -127, -126, + -126, -127, -126, -126, -127, -126, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -104, -122, -108, -73, 127, -30, -87, -82, + -3, 28, -86, -108, -119, -68, -118, -124, -127, -66, -114, -114, + -109, -47, -66, -99, -108, -113, -88, -113, -120, -125, -99, -126, + -125, -122, -101, -114, -116, -119, -121, -109, -114, -119, -122, -91, + -114, -120, -123, -92, -124, -120, -115, -54, -100, -119, -117, -111, + -113, -120, -122, -123, -114, -127, -127, -126, -110, -123, -124, -122, + -116, -120, -124, -123, -119, -118, -127, -125, -123, -92, -122, -116, + -113, -104, -113, -122, -125, -125, -120, -124, -126, -126, -121, -125, + -125, -125, -118, -125, -124, -125, -123, -120, -122, -124, -123, -118, + -124, -125, -125, -124, -126, -126, -126, -126, -127, -127, -126, -127, + -123, -125, -126, -126, -118, -123, -125, -126, -122, -127, -127, -127, + -124, -126, -127, -126, -126, -127, -126, -125, -125, -127, -126, -126, + -126, -126, -127, -127, -127, -127, -127, -127, -126, -126, -126, -126, + -126, -126, -125, -126, -126, -126, -126, -127, -126, -126, -126, -125, + -125, -125, -126, -123, -126, -127, -127, -126, -126, -127, -127, -126, + -126, -127, -127, -126, -127, -127, -127, -126, -127, -126, -126, -125, + -127, -126, -126, -127, -127, -127, -127, -127, -127, -126, -126, -126, + -126, -126, -126, -126, -126, -126, -126, -127, -127, -127, -126, -127, + -127, -127, -126, -126, -126, -126, -126, -126, -126, -127, -127, -126, + -127, -127, -128, -126, -127, -126, -126, -125, -126, -126, -127, -126, + -127, -127, -127, -127, -127, -127, -127, -127, -126, -126, -126, -126, + -126, -126, -126, -126, -126, -127, -127, -127, -127, -124, -119, -109, + -82, 127, -39, -88, -85, 2, 5, -78, -93, -90, -88, -121, + -123, -122, -66, -126, -121, -114, -28, -101, -113, -114, -99, -112, + -120, -121, -115, -106, -122, -124, -122, -101, -125, -125, -126, -112, + -124, -122, -122, -101, -113, -120, -120, -108, -100, -111, -119, -105, + -70, -118, -125, -125, -111, -127, -126, -122, -114, -120, -124, -122, + -119, -116, -125, -124, -125, -116, -124, -126, -126, -117, -123, -126, + -121, -105, -115, -119, -113, -105, -108, -121, -122, -123, -125, -126, + -126, -125, -119, -126, -127, -124, -112, -120, -125, -125, -125, -126, + -126, -126, -123, -119, -126, -127, -126, -126, -127, -127, -127, -126, + -127, -127, -127, -126, -126, -126, -125, -122, -121, -124, -126, -126, + -122, -126, -127, -127, -125, -127, -126, -127, -126, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -128, -127, -127, -127, -127, -127, -127, + -127, -127, -126, -127, -127, -127, -125, -126, -126, -127, -125, -126, + -126, -127, -126, -126, -127, -127, -126, -126, -127, -127, -127, -127, + -127, -127, -127, -126, -126, -127, -127, -127, -126, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -126, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -128, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -128, -85, -120, -119, -100, 127, -19, -57, -52, 48, -43, + -105, -120, -105, -75, -107, -110, -104, -79, -123, -121, -119, -30, + -113, -116, -116, -103, -108, -113, -111, -96, -119, -122, -122, -112, + -106, -121, -123, -120, -114, -127, -125, -125, -92, -124, -121, -123, + -91, -118, -119, -116, -95, -104, -115, -118, -113, -117, -121, -122, + -121, -116, -127, -126, -127, -113, -127, -125, -125, -118, -124, -123, + -123, -116, -124, -122, -121, -115, -114, -126, -121, -116, -86, -123, + -125, -124, -125, -124, -123, -123, -114, -126, -125, -121, -116, -114, + -124, -124, -123, -123, -126, -126, -126, -123, -125, -126, -126, -125, + -126, -126, -126, -126, -127, -127, -127, -125, -127, -127, -126, -125, + -119, -126, -125, -127, -120, -127, -127, -126, -123, -127, -127, -127, + -125, -126, -126, -126, -125, -127, -126, -126, -126, -127, -127, -127, + -127, -126, -127, -127, -127, -127, -126, -126, -126, -126, -126, -126, + -126, -126, -127, -127, -127, -127, -127, -127, -127, -127, -127, -126, + -126, -126, -126, -126, -126, -127, -126, -126, -126, -127, -126, -125, + -126, -127, -126, -126, -126, -126, -126, -126, -126, -125, -126, -127, + -126, -127, -127, -127, -127, -127, -127, -126, -126, -126, -126, -126, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -126, -126, -126, -126, -126, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -126, -127, -126, -127, -126, -127, -126, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -126, -126, + -127, -127, -127, -127, -127, -127, -127, -107, -110, -117, -106, 127, + -42, -88, -112, -12, -18, -68, -84, -77, -92, -115, -124, -115, + -79, -115, -122, -111, -34, -108, -115, -117, -88, -120, -121, -120, + -99, -121, -122, -122, -109, -112, -119, -119, -114, -115, -118, -119, + -110, -97, -120, -122, -115, -87, -117, -123, -124, -107, -121, -123, + -122, -119, -116, -119, -119, -119, -116, -121, -123, -121, -116, -125, + -127, -126, -118, -123, -122, -123, -114, -122, -123, -123, -110, -123, + -126, -118, -101, -109, -119, -123, -120, -124, -125, -125, -123, -124, + -126, -126, -124, -112, -123, -126, -125, -119, -127, -127, -126, -126, + -125, -124, -124, -124, -124, -125, -125, -125, -126, -126, -127, -127, + -126, -127, -126, -126, -119, -124, -124, -127, -120, -127, -126, -125, + -126, -126, -126, -126, -126, -127, -127, -126, -126, -125, -125, -125, + -126, -125, -126, -126, -126, -126, -127, -127, -127, -127, -126, -126, + -126, -126, -126, -126, -126, -126, -126, -126, -126, -127, -127, -127, + -127, -127, -127, -126, -126, -126, -126, -125, -125, -125, -124, -126, + -127, -127, -126, -126, -126, -127, -126, -126, -126, -126, -126, -126, + -127, -127, -126, -127, -126, -126, -126, -126, -127, -127, -127, -127, + -126, -126, -126, -126, -125, -126, -126, -127, -126, -126, -127, -127, + -127, -127, -127, -126, -126, -126, -126, -126, -126, -126, -126, -127, + -126, -127, -127, -127, -127, -127, -126, -126, -126, -126, -126, -126, + -126, -126, -126, -126, -127, -127, -127, -127, -127, -127, -127, -126, + -126, -126, -126, -126, -126, -126, -126, -126, -127, -127, -127, -128, + -95, -114, -110, -102, 127, -62, -85, -79, 37, -72, -109, -114, + -85, -99, -120, -123, -103, -99, -111, -106, -86, -59, -114, -120, + -122, -102, -120, -121, -123, -101, -123, -125, -125, -106, -126, -125, + -123, -114, -121, -123, -122, -96, -105, -117, -116, -97, -104, -115, + -116, -114, -115, -125, -125, -126, -119, -126, -127, -123, -118, -123, + -125, -126, -119, -126, -126, -124, -120, -124, -125, -124, -119, -124, + -126, -125, -121, -124, -120, -115, -120, -108, -123, -122, -119, -120, + -125, -127, -126, -123, -125, -126, -125, -119, -122, -126, -122, -121, + -124, -125, -125, -125, -127, -127, -126, -127, -126, -127, -127, -127, + -127, -127, -127, -127, -126, -127, -126, -127, -124, -124, -125, -124, + -120, -124, -126, -125, -125, -126, -127, -126, -126, -126, -127, -127, + -126, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -126, -127, -127, -127, -126, -127, -127, -127, -126, -127, + -127, -127, -127, -126, -127, -127, -126, -127, -126, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -112, -108, -114, -108, 127, -88, -106, + -120, 30, -76, -96, -105, -89, -89, -101, -104, -81, -119, -123, + -115, -58, -73, -111, -121, -116, -113, -125, -125, -111, -103, -123, + -122, -119, -113, -127, -123, -119, -110, -124, -120, -114, -88, -125, + -121, -121, -110, -114, -120, -117, -104, -117, -122, -123, -126, -124, + -126, -127, -118, -125, -126, -126, -121, -125, -125, -124, -117, -124, + -126, -125, -121, -124, -126, -126, -123, -124, -125, -124, -115, -102, + -121, -122, -125, -116, -122, -125, -125, -125, -126, -125, -125, -124, + -126, -125, -125, -116, -122, -122, -123, -125, -124, -125, -126, -127, + -127, -127, -127, -126, -126, -126, -125, -126, -125, -125, -126, -126, + -125, -126, -126, -123, -120, -125, -127, -125, -125, -125, -126, -125, + -126, -125, -125, -125, -126, -126, -126, -127, -127, -127, -127, -127, + -126, -126, -126, -126, -125, -126, -126, -126, -126, -126, -127, -127, + -127, -127, -127, -127, -127, -126, -126, -126, -126, -126, -126, -126, + -126, -126, -127, -127, -127, -127, -127, -127, -126, -127, -126, -126, + -126, -126, -126, -126, -126, -126, -127, -127, -127, -127, -127, -127, + -127, -126, -127, -126, -126, -126, -126, -126, -126, -126, -127, -127, + -127, -127, -127, -127, -127, -127, -126, -126, -126, -126, -126, -126, + -126, -127, -127, -127, -127, -127, -127, -127, -127, -127, -126, -126, + -126, -126, -126, -127, -126, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -126, -126, -126, -126, -126, -126, -126, -126, -127, -127, + -127, -127, -127, -127, -127, -127, -126, -126, -126, -126, -55, -107, + -121, -119, 127, -105, -112, -108, 69, -107, -115, -121, -63, -117, + -117, -120, -94, -110, -112, -113, -35, -103, -115, -119, -100, -125, + -127, -127, -104, -117, -118, -121, -109, -119, -123, -124, -109, -123, + -121, -119, -88, -119, -120, -120, -106, -126, -126, -125, -113, -113, + -121, -124, -125, -125, -126, -125, -120, -122, -124, -124, -122, -126, + -126, -127, -124, -121, -123, -123, -122, -123, -126, -126, -125, -121, + -124, -123, -117, -112, -120, -120, -116, -120, -125, -127, -125, -127, + -126, -126, -126, -122, -124, -125, -125, -117, -121, -125, -123, -123, + -126, -127, -127, -126, -127, -127, -127, -126, -127, -127, -127, -126, + -127, -126, -126, -127, -127, -126, -126, -122, -124, -126, -127, -124, + -126, -126, -125, -126, -126, -126, -127, -125, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -126, -126, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -126, -126, -127, -126, -127, -127, -127, -127, + -127, -127, -127, -127, -126, -126, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -126, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -126, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -79, -71, -95, -95, 127, -93, -94, -88, 78, + -117, -107, -94, -60, -113, -110, -112, -73, -112, -108, -105, -60, + -114, -124, -119, -107, -118, -117, -110, -113, -122, -119, -113, -121, + -121, -116, -113, -118, -126, -114, -97, -89, -107, -123, -116, -108, + -119, -124, -123, -116, -121, -122, -124, -123, -125, -123, -123, -120, + -124, -125, -125, -122, -123, -126, -126, -122, -123, -125, -123, -122, + -123, -126, -121, -126, -127, -123, -121, -116, -121, -118, -121, -116, + -125, -126, -124, -126, -127, -126, -125, -125, -127, -126, -121, -125, + -126, -124, -122, -124, -124, -125, -125, -126, -126, -126, -127, -127, + -127, -127, -127, -126, -126, -126, -126, -126, -126, -125, -125, -126, + -127, -124, -124, -126, -125, -126, -126, -125, -125, -126, -127, -126, + -126, -126, -127, -127, -126, -127, -127, -127, -127, -127, -127, -127, + -126, -126, -126, -126, -126, -126, -126, -127, -127, -127, -127, -127, + -127, -127, -127, -126, -126, -126, -126, -126, -126, -126, -126, -127, + -127, -127, -127, -127, -127, -127, -127, -126, -127, -126, -126, -126, + -126, -126, -127, -127, -127, -127, -127, -127, -127, -127, -126, -127, + -126, -127, -126, -127, -127, -127, -126, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -126, -126, -126, -126, -126, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -126, -127, -127, -127, + -126, -126, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -126, -127, -126, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -126, -126, -106, -92, -103, -99, + 127, -102, -102, -84, 97, -95, -105, -88, -63, -122, -123, -112, + -109, -122, -119, -104, -88, -112, -115, -104, -120, -124, -124, -116, + -115, -122, -122, -117, -119, -122, -122, -107, -121, -124, -112, -73, + -110, -122, -119, -105, -126, -124, -126, -125, -126, -126, -127, -123, + -126, -127, -126, -121, -124, -124, -124, -124, -127, -126, -125, -125, + -126, -125, -124, -122, -123, -125, -123, -125, -126, -126, -122, -127, + -125, -123, -116, -123, -123, -125, -120, -127, -126, -126, -125, -126, + -126, -126, -125, -124, -124, -127, -121, -125, -127, -127, -127, -127, + -127, -127, -126, -126, -126, -126, -127, -126, -127, -127, -127, -127, + -127, -126, -127, -127, -127, -126, -124, -125, -125, -123, -126, -127, + -126, -127, -127, -127, -127, -127, -127, -126, -127, -126, -127, -126, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -69, -90, -95, -70, 127, -118, -89, -41, 92, -101, -111, + -76, -54, -93, -97, -88, -110, -113, -109, -81, -107, -116, -122, + -111, -119, -121, -125, -110, -116, -120, -119, -111, -118, -121, -123, + -101, -118, -117, -87, -91, -118, -124, -124, -117, -118, -122, -120, + -119, -121, -120, -121, -124, -123, -123, -122, -126, -127, -126, -124, + -123, -123, -123, -123, -123, -123, -122, -120, -122, -124, -124, -124, + -126, -126, -124, -126, -125, -122, -114, -119, -126, -124, -121, -126, + -125, -125, -124, -127, -126, -127, -126, -127, -127, -123, -117, -122, + -125, -125, -124, -125, -125, -126, -126, -126, -127, -127, -127, -126, + -126, -126, -125, -125, -125, -125, -125, -125, -125, -127, -126, -125, + -125, -122, -126, -126, -125, -126, -125, -125, -124, -126, -125, -125, + -125, -125, -126, -126, -126, -127, -127, -126, -126, -126, -126, -126, + -125, -125, -125, -125, -126, -126, -126, -127, -127, -127, -127, -127, + -126, -126, -126, -126, -126, -126, -125, -125, -126, -126, -126, -127, + -127, -127, -127, -126, -127, -126, -126, -125, -126, -125, -126, -125, + -126, -126, -127, -126, -127, -127, -127, -126, -126, -126, -126, -126, + -125, -126, -126, -126, -126, -126, -126, -127, -127, -127, -127, -127, + -127, -126, -126, -126, -126, -126, -126, -126, -126, -127, -126, -127, + -127, -127, -127, -127, -127, -126, -126, -126, -126, -126, -126, -126, + -126, -127, -126, -127, -127, -127, -127, -127, -127, -126, -126, -126, + -126, -126, -126, -126, -126, -126, -127, -127, -127, -127, -127, -127, + -127, -126, -126, -126, -126, -126, -75, -94, -93, -65, 127, -75, + -76, -17, 50, -85, -115, -72, -57, -98, -106, -108, -117, -121, + -115, -78, -121, -125, -123, -108, -119, -119, -113, -107, -123, -126, + -117, -122, -124, -120, -108, -111, -113, -112, -87, -117, -121, -121, + -115, -124, -124, -124, -120, -123, -124, -125, -127, -126, -125, -125, + -124, -125, -125, -125, -123, -125, -126, -123, -126, -126, -125, -123, + -124, -125, -124, -123, -125, -125, -124, -127, -125, -124, -122, -119, + -123, -125, -123, -126, -127, -126, -127, -126, -126, -127, -124, -126, + -127, -124, -120, -123, -126, -125, -126, -126, -126, -126, -126, -126, + -126, -126, -126, -126, -126, -126, -126, -126, -126, -127, -127, -126, + -126, -127, -126, -126, -124, -127, -125, -125, -125, -126, -126, -126, + -127, -125, -127, -126, -127, -126, -126, -126, -126, -126, -126, -126, + -126, -127, -126, -126, -127, -127, -127, -127, -127, -127, -127, -126, + -127, -126, -126, -126, -126, -127, -126, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -126, -126, -126, -126, -126, -126, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -126, + -126, -126, -127, -127, -127, -127, -127, -127, -127, -127, -126, -127, + -126, -126, -126, -126, -126, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -126, -127, -127, -126, -127, -126, -126, -126, -127, + -127, -127, -127, -127, -127, -127, -127, -126, -127, -126, -126, -126, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -112, + -51, -48, 9, 127, -98, -68, 68, 66, -64, -81, 1, -85, + -108, -116, -105, -121, -122, -113, -55, -109, -112, -113, -118, -122, + -125, -88, -108, -117, -109, -121, -116, -118, -111, -71, -113, -121, + -107, -108, -118, -122, -120, -120, -122, -123, -121, -126, -122, -124, + -121, -124, -124, -123, -127, -127, -126, -122, -125, -125, -124, -121, + -125, -125, -126, -120, -125, -122, -123, -124, -127, -125, -125, -124, + -126, -125, -121, -123, -127, -122, -124, -123, -124, -125, -124, -124, + -126, -127, -125, -126, -126, -123, -126, -127, -124, -125, -124, -126, + -127, -124, -125, -126, -125, -127, -127, -127, -127, -127, -127, -127, + -126, -126, -126, -126, -126, -126, -126, -126, -123, -124, -127, -126, + -127, -127, -126, -127, -125, -126, -125, -125, -126, -126, -126, -126, + -127, -127, -127, -127, -126, -127, -127, -127, -126, -126, -125, -127, + -125, -126, -127, -126, -127, -127, -127, -127, -127, -127, -127, -127, + -126, -126, -126, -126, -126, -126, -126, -127, -127, -127, -127, -127, + -126, -127, -127, -126, -127, -126, -126, -126, -126, -126, -126, -127, + -126, -127, -127, -127, -127, -127, -126, -126, -126, -126, -127, -126, + -126, -127, -126, -127, -127, -127, -127, -127, -127, -126, -127, -126, + -127, -127, -126, -126, -126, -126, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -126, -127, -126, -126, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -126, -126, -126, + -126, -126, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -126, -127, -126, -126, -71, -78, -73, 4, 127, -80, -78, 112, + 39, -52, -83, 4, -95, -108, -109, -108, -124, -123, -98, -77, + -107, -109, -119, -113, -112, -110, -84, -120, -117, -106, -116, -121, + -112, -105, -114, -122, -111, -110, -117, -116, -116, -119, -123, -123, + -124, -126, -126, -127, -126, -126, -126, -126, -123, -124, -123, -125, + -122, -122, -122, -126, -123, -124, -124, -126, -126, -127, -124, -124, + -124, -124, -123, -124, -124, -124, -123, -124, -125, -122, -117, -126, + -125, -127, -125, -124, -125, -124, -124, -125, -125, -123, -124, -125, + -126, -123, -125, -127, -126, -127, -126, -126, -125, -125, -125, -125, + -124, -125, -125, -125, -126, -126, -126, -127, -127, -126, -126, -125, + -126, -125, -124, -126, -126, -126, -125, -125, -126, -127, -126, -126, + -127, -126, -127, -126, -126, -126, -126, -126, -125, -125, -125, -126, + -126, -126, -127, -127, -127, -126, -127, -126, -126, -126, -125, -126, + -126, -126, -126, -126, -126, -126, -127, -127, -127, -127, -127, -127, + -126, -126, -126, -126, -126, -125, -126, -126, -126, -127, -127, -127, + -127, -127, -126, -127, -126, -126, -126, -126, -126, -125, -126, -126, + -126, -126, -126, -127, -127, -127, -127, -127, -126, -126, -126, -126, + -126, -126, -126, -126, -126, -127, -127, -127, -127, -127, -127, -127, + -126, -126, -126, -126, -126, -126, -126, -126, -126, -127, -127, -127, + -127, -127, -127, -127, -127, -126, -126, -126, -126, -126, -126, -126, + -126, -127, -127, -127, -127, -127, -127, -127, -126, -126, -126, -126, + -126, -126, -126, -126, -126, -127, -127, -127, -127, -34, -51, -49, + 20, 123, -53, -57, 127, 27, -62, -67, -24, -108, -107, -112, + -103, -109, -112, -89, -118, -117, -123, -94, -118, -122, -108, -80, + -109, -122, -102, -121, -117, -119, -118, -113, -122, -122, -109, -122, + -122, -121, -126, -123, -124, -121, -122, -123, -123, -122, -123, -123, + -124, -124, -124, -124, -125, -126, -125, -123, -125, -125, -125, -125, + -124, -123, -124, -123, -126, -126, -125, -123, -125, -126, -127, -127, + -126, -125, -121, -124, -122, -125, -124, -124, -126, -127, -126, -126, + -126, -125, -125, -126, -125, -126, -124, -124, -125, -126, -126, -125, + -125, -126, -127, -126, -126, -126, -126, -126, -126, -126, -125, -125, + -126, -126, -125, -126, -126, -126, -126, -125, -126, -125, -127, -127, + -126, -126, -126, -126, -126, -126, -126, -126, -126, -126, -126, -127, + -126, -127, -126, -126, -126, -126, -126, -127, -126, -126, -126, -126, + -126, -126, -127, -127, -126, -126, -127, -126, -126, -126, -126, -126, + -126, -126, -126, -126, -126, -127, -127, -126, -127, -126, -126, -126, + -126, -126, -126, -126, -127, -126, -126, -126, -126, -126, -126, -126, + -126, -127, -126, -127, -127, -126, -126, -126, -126, -126, -126, -126, + -126, -126, -127, -127, -126, -127, -127, -127, -126, -126, -127, -126, + -126, -126, -126, -126, -126, -126, -127, -126, -127, -127, -127, -127, + -126, -126, -126, -126, -126, -126, -126, -127, -127, -126, -127, -127, + -126, -127, -127, -127, -127, -126, -127, -126, -126, -126, -126, -126, + -126, -127, -127, -127, -127, -127, -127, -127, -127, -126, -126, -126, + -126, -126, -68, -64, -54, 33, 127, -78, -87, 97, -15, -68, + -95, 15, -116, -121, -121, -110, -120, -115, -97, -111, -114, -120, + -81, -119, -107, -109, -94, -109, -113, -89, -102, -98, -107, -100, + -123, -119, -124, -118, -121, -124, -125, -122, -123, -123, -122, -124, + -125, -126, -125, -125, -125, -127, -125, -124, -124, -125, -123, -126, + -123, -124, -124, -125, -125, -125, -125, -126, -123, -127, -124, -124, + -126, -125, -125, -125, -126, -126, -124, -125, -126, -127, -126, -126, + -126, -126, -126, -125, -125, -126, -125, -126, -126, -127, -124, -125, + -126, -125, -126, -126, -126, -126, -126, -126, -126, -126, -126, -126, + -126, -126, -126, -126, -126, -126, -127, -126, -126, -127, -126, -126, + -125, -127, -125, -127, -127, -126, -126, -126, -126, -126, -126, -127, + -126, -126, -126, -126, -126, -127, -126, -126, -126, -126, -126, -127, + -126, -126, -127, -126, -126, -126, -126, -126, -126, -126, -127, -126, + -126, -127, -127, -127, -126, -127, -127, -127, -127, -127, -127, -127, + -126, -126, -126, -127, -126, -127, -126, -127, -127, -127, -127, -127, + -126, -127, -127, -127, -127, -126, -127, -127, -126, -126, -127, -126, + -127, -127, -127, -127, -127, -127, -127, -126, -127, -126, -127, -126, + -127, -127, -127, -127, -126, -127, -127, -127, -126, -127, -127, -127, + -127, -126, -126, -127, -126, -126, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -126, -126, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -90, -51, -50, 44, 127, + -89, -89, 86, -40, -95, -107, -4, -110, -114, -108, -124, -114, + -112, -103, -122, -125, -119, -81, -110, -106, -119, -112, -111, -86, + -84, -95, -108, -108, -100, -120, -119, -117, -120, -120, -121, -124, + -125, -126, -126, -124, -123, -124, -124, -123, -123, -123, -122, -123, + -125, -126, -125, -126, -127, -126, -126, -125, -124, -127, -124, -124, + -123, -124, -124, -124, -124, -125, -125, -127, -127, -127, -126, -125, + -125, -125, -124, -126, -124, -126, -125, -126, -126, -126, -126, -127, + -127, -127, -127, -125, -125, -125, -124, -125, -126, -126, -126, -126, + -126, -127, -127, -127, -127, -127, -127, -126, -127, -126, -126, -126, + -125, -125, -126, -126, -127, -125, -127, -127, -127, -127, -127, -126, + -127, -126, -126, -125, -126, -126, -126, -126, -127, -127, -127, -127, + -127, -126, -127, -127, -126, -126, -127, -125, -126, -126, -126, -127, + -126, -127, -127, -127, -127, -127, -127, -127, -127, -126, -126, -126, + -126, -126, -127, -126, -127, -127, -127, -127, -127, -127, -127, -127, + -126, -126, -126, -126, -126, -127, -126, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -126, -127, -127, -126, -126, -126, -126, -127, + -127, -127, -127, -127, -127, -127, -127, -126, -127, -127, -126, -126, + -126, -126, -127, -126, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -126, -127, -126, -126, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -126, -127, -126, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -126, -126, + -67, -44, -61, -8, 127, -68, -83, 19, -42, -93, -108, -61, + -99, -109, -106, -111, -105, -115, -123, -117, -118, -120, -92, -115, + -114, -99, -124, -99, -88, -100, -116, -110, -121, -124, -122, -125, + -121, -120, -122, -125, -123, -125, -126, -125, -126, -126, -125, -125, + -126, -125, -125, -125, -127, -127, -125, -127, -125, -126, -125, -125, + -125, -126, -127, -125, -126, -126, -126, -126, -127, -125, -125, -127, + -126, -126, -126, -126, -126, -126, -126, -126, -126, -126, -126, -126, + -126, -127, -126, -126, -126, -126, -127, -126, -125, -126, -126, -126, + -126, -127, -127, -127, -126, -127, -127, -126, -127, -127, -127, -127, + -127, -127, -126, -126, -126, -127, -126, -126, -126, -127, -126, -127, + -127, -127, -127, -127, -126, -127, -127, -127, -126, -127, -127, -126, + -126, -127, -127, -126, -127, -127, -127, -127, -126, -127, -127, -126, + -127, -126, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -126, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -126, -127, -127, -127, -127, -127, -127, -127, -126, -126, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127}; diff --git a/third_party/xtensa/examples/micro_speech_lstm/no_micro_features_data.h b/third_party/xtensa/examples/micro_speech_lstm/no_micro_features_data.h new file mode 100644 index 0000000..5bb404e --- /dev/null +++ b/third_party/xtensa/examples/micro_speech_lstm/no_micro_features_data.h @@ -0,0 +1,45 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +/* + * * Copyright (c) 2021 Cadence Design Systems Inc. + * * + * * Permission is hereby granted, free of charge, to any person obtaining + * * a copy of this software and associated documentation files (the + * * "Software"), to deal in the Software without restriction, including + * * without limitation the rights to use, copy, modify, merge, publish, + * * distribute, sublicense, and/or sell copies of the Software, and to + * * permit persons to whom the Software is furnished to do so, subject to + * * the following conditions: + * * + * * The above copyright notice and this permission notice shall be included + * * in all copies or substantial portions of the Software. + * * + * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + * * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + * * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. + * * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY + * * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, + * * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE + * * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + * */ + +#ifndef TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MICRO_FEATURES_NO_MICRO_FEATURES_DATA_H_ +#define TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MICRO_FEATURES_NO_MICRO_FEATURES_DATA_H_ + +extern const int g_no_micro_f9643d42_nohash_4_width; +extern const int g_no_micro_f9643d42_nohash_4_height; +extern const signed char g_no_micro_f9643d42_nohash_4_data[]; + +#endif // TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MICRO_FEATURES_NO_MICRO_FEATURES_DATA_H_ diff --git a/third_party/xtensa/examples/micro_speech_lstm/train/README.md b/third_party/xtensa/examples/micro_speech_lstm/train/README.md new file mode 100644 index 0000000..874d9b7 --- /dev/null +++ b/third_party/xtensa/examples/micro_speech_lstm/train/README.md @@ -0,0 +1,107 @@ + +# Mini Speech Training with LSTM + +This example shows how to train a 125 kB model that can recognize any of 2 keywords from the below 8 keywords chosen by the user, +classify all other commands as an "unknown" keyword, and predict the chosen keywords from speech data. + +You can retrain it to recognize any combination of words (2 or more) from this +list (all other words would be passed to "unknown" keyword set): + +``` +"down", "go", "left", "no", "right", "stop", "up" and "yes". +``` + +The scripts used in training the model have been sourced from the +[Simple Audio Recognition](https://www.tensorflow.org/tutorials/audio/simple_audio) +tutorial. + +## Table of contents + +- [Overview](#overview) +- [Training](#training) +- [Trained Models](#trained-models) +- [Model Architecture](#model-architecture) +- [Dataset](#dataset) +- [Preprocessing Speech Input](#preprocessing-speech-input) + + +## Overview + +1. Dataset: [Mini Speech Commands](http://storage.googleapis.com/download.tensorflow.org/data/mini_speech_commands.zip) +2. Dataset Type: **Mini_Speech_Commands** +3. Deep Learning Framework: **TensorFlow 2.5.0** +4. Language: **Python 3.7** +5. Model Size: **<125 kB** +6. Model Category: **Multiclass Classification** + +## Training + +Train the model in the cloud using Google Colaboratory. + + + +
    + Google Colaboratory +
    + +*Estimated Training Time: ~2 Minutes.* + +## + + +## Trained Models + +The flatbuffer model generated as a result of the traning can be found +[here](../micro_speech_lstm.tflite). This model is quantized to int8 precision, +i.e. all the activations and weights are int8. + +## Model Architecture + +This is a simple model comprising of a Unidirectional Sequence LSTM layer, a +Reshape layer, a Fully Connected Layer or a MatMul Layer (output: logits) and a +Softmax layer (output: probabilities) as shown below. Refer to the below model +architecture. + +![micro_speech_lstm_model](../images/lstm_model.png) + +*This image was derived from visualizing the 'micro_speech_model.tflite' file in +[Netron](https://github.com/lutzroeder/netron)* + +This produces a model with an accuracy of ~93%, but it's designed to be used as +the first stage of a pipeline, running on a low-energy piece of hardware that +can always be on, and then wake higher-power chips when a possible utterance has +been found, so that more accurate analysis can be done. Additionally, the model +takes in preprocessed speech input as a result of which we can leverage a +simpler model for accurate results. + +## Dataset + +The [Mini Speech Commands Dataset](http://storage.googleapis.com/download.tensorflow.org/data/mini_speech_commands.zip) +consists of over 8,000 WAVE audio files of people saying 8 different words. This +data was collected by Google and released under a CC BY license. You can help +improve it by contributing five minutes of your own voice. The archive is over +2GB, so this part may take a while, but you should see progress logs, and once +it's been downloaded you won't need to do this again. + +## Preprocessing Speech Input + +In this section we discuss spectrograms, the preprocessed speech input to the +model. Here's an illustration of the process: + +![Spectrogram LSTM](../images/spectrogram.png) + +The model doesn't take in raw audio sample data, instead it works with +spectrograms which are two dimensional arrays that are made up of slices of +frequency information, each taken from a different time window. + +The recipe for creating the spectrogram data is that each frequency slice is +created by running an FFT across a 30ms section of the audio sample data. The +input samples are treated as being between -1 and +1 as real values (encoded as +-32,768 and 32,767 in 16-bit signed integer samples). + +This results in an FFT with 257 entries. + +In a complete application these spectrograms would be calculated at runtime from +microphone inputs, but the code for doing that is not yet included in this +sample code. The test uses spectrograms that have been pre-calculated from +one-second WAV files. diff --git a/third_party/xtensa/examples/micro_speech_lstm/train/micro_speech_with_lstm_op.ipynb b/third_party/xtensa/examples/micro_speech_lstm/train/micro_speech_with_lstm_op.ipynb new file mode 100644 index 0000000..1b6deec --- /dev/null +++ b/third_party/xtensa/examples/micro_speech_lstm/train/micro_speech_with_lstm_op.ipynb @@ -0,0 +1,2585 @@ +{ + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "accelerator": "GPU", + "colab": { + "name": "micro_speech_with_lstm_op.ipynb", + "provenance": [], + "collapsed_sections": [] + }, + "kernelspec": { + "display_name": "Python 3", + "name": "python3" + } + }, + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "fluF3_oOgkWF" + }, + "source": [ + "##### Copyright 2021 The TensorFlow Authors." + ] + }, + { + "cell_type": "code", + "metadata": { + "cellView": "form", + "id": "AJs7HHFmg1M9" + }, + "source": [ + "#@title Licensed under the Apache License, Version 2.0 (the \"License\");\n", + "# you may not use this file except in compliance with the License.\n", + "# You may obtain a copy of the License at\n", + "#\n", + "# https://www.apache.org/licenses/LICENSE-2.0\n", + "#\n", + "# Unless required by applicable law or agreed to in writing, software\n", + "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", + "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", + "# See the License for the specific language governing permissions and\n", + "# limitations under the License." + ], + "execution_count": 112, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "jYysdyb-CaWM" + }, + "source": [ + "# Simple audio recognition: Recognizing keywords" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "SPfDNFlb66XF" + }, + "source": [ + "This tutorial will show you how to build a basic speech recognition network that recognizes ten different words. It's important to know that real speech and audio recognition systems are much more complex, but like MNIST for images, it should give you a basic understanding of the techniques involved. Once you've completed this tutorial, you'll have a model that tries to classify a one second audio clip as \"down\", \"go\", \"left\", \"no\", \"right\", \"stop\", \"up\" and \"yes\"." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "Go9C3uLL8Izc" + }, + "source": [ + "## Setup\n", + "\n", + "Import necessary modules and dependencies." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "dzLKpmZICaWN", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "9ba23229-9705-42df-8f7b-c4c321660a9b" + }, + "source": [ + "import os\n", + "import pathlib\n", + "\n", + "import matplotlib.pyplot as plt\n", + "import numpy as np\n", + "import seaborn as sns\n", + "import tensorflow as tf\n", + "print(tf.version.VERSION)\n", + "from tensorflow.keras.layers.experimental import preprocessing\n", + "from tensorflow.keras import layers\n", + "from tensorflow.keras import models\n", + "from IPython import display\n", + "\n", + "# Set seed for experiment reproducibility\n", + "seed = 42\n", + "tf.random.set_seed(seed)\n", + "np.random.seed(seed)" + ], + "execution_count": 113, + "outputs": [ + { + "output_type": "stream", + "text": [ + "2.5.0\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "yR0EdgrLCaWR" + }, + "source": [ + "## Import the Speech Commands dataset\n", + "\n", + "You'll write a script to download a portion of the [Speech Commands dataset](https://www.tensorflow.org/datasets/catalog/speech_commands). The original dataset consists of over 105,000 WAV audio files of people saying thirty different words. This data was collected by Google and released under a CC BY license.\n", + "\n", + "You'll be using a portion of the dataset to save time with data loading. Extract the `mini_speech_commands.zip` and load it in using the `tf.data` API." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "2-rayb7-3Y0I" + }, + "source": [ + "data_dir = pathlib.Path('data/mini_speech_commands')\n", + "if not data_dir.exists():\n", + " tf.keras.utils.get_file(\n", + " 'mini_speech_commands.zip',\n", + " origin=\"http://storage.googleapis.com/download.tensorflow.org/data/mini_speech_commands.zip\",\n", + " extract=True,\n", + " cache_dir='.', cache_subdir='data')" + ], + "execution_count": 114, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "h_72nZHA9UH9" + }, + "source": [ + "Moving wav files from command directories to unknown sub-directory (Factory Reset to reset data directory)\n" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "vB4Jq4Qg35iZ", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "85d90934-908c-48b2-85b7-8500ad302e04" + }, + "source": [ + "# comment out below line if \"unknown\" directory already exists\n", + "!mkdir /content/data/mini_speech_commands/unknown\n", + "# moves files from their specific commands directory to the \"unknown\" directory (replaces all files with an existing name therefore example set is smaller)\n", + "!mv /content/data/mini_speech_commands/down/* /content/data/mini_speech_commands/unknown\n", + "!rm -d /content/data/mini_speech_commands/down\n", + "!mv /content/data/mini_speech_commands/go/* /content/data/mini_speech_commands/unknown\n", + "!rm -d /content/data/mini_speech_commands/go\n", + "!mv /content/data/mini_speech_commands/left/* /content/data/mini_speech_commands/unknown\n", + "!rm -d /content/data/mini_speech_commands/left\n", + "!mv /content/data/mini_speech_commands/right/* /content/data/mini_speech_commands/unknown\n", + "!rm -d /content/data/mini_speech_commands/right\n", + "!mv /content/data/mini_speech_commands/stop/* /content/data/mini_speech_commands/unknown\n", + "!rm -d /content/data/mini_speech_commands/stop\n", + "!mv /content/data/mini_speech_commands/up/* /content/data/mini_speech_commands/unknown\n", + "!rm -d /content/data/mini_speech_commands/up\n", + "!rm /content/data/mini_speech_commands/README.md\n", + "!ls /content/data/mini_speech_commands/unknown | wc -l" + ], + "execution_count": 115, + "outputs": [ + { + "output_type": "stream", + "text": [ + "mkdir: cannot create directory ‘/content/data/mini_speech_commands/unknown’: File exists\n", + "mv: cannot stat '/content/data/mini_speech_commands/down/*': No such file or directory\n", + "rm: cannot remove '/content/data/mini_speech_commands/down': No such file or directory\n", + "mv: cannot stat '/content/data/mini_speech_commands/go/*': No such file or directory\n", + "rm: cannot remove '/content/data/mini_speech_commands/go': No such file or directory\n", + "mv: cannot stat '/content/data/mini_speech_commands/left/*': No such file or directory\n", + "rm: cannot remove '/content/data/mini_speech_commands/left': No such file or directory\n", + "mv: cannot stat '/content/data/mini_speech_commands/right/*': No such file or directory\n", + "rm: cannot remove '/content/data/mini_speech_commands/right': No such file or directory\n", + "mv: cannot stat '/content/data/mini_speech_commands/stop/*': No such file or directory\n", + "rm: cannot remove '/content/data/mini_speech_commands/stop': No such file or directory\n", + "mv: cannot stat '/content/data/mini_speech_commands/up/*': No such file or directory\n", + "rm: cannot remove '/content/data/mini_speech_commands/up': No such file or directory\n", + "rm: cannot remove '/content/data/mini_speech_commands/README.md': No such file or directory\n", + "3311\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "BgvFq3uYiS5G" + }, + "source": [ + "Sets wanted commands for training (Available commands: Down, Go, Left, No, Right, Stop, Up, Yes, and Unknown for commands that are not to be tested" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "70IBxSKxA1N9", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "a22025ee-8e85-4377-b223-b1d804fe5a25" + }, + "source": [ + "commands = np.array(tf.io.gfile.listdir(str(data_dir)))\n", + "commands = [\"yes\",\"no\", \"unknown\"]\n", + "print('Commands:', commands)" + ], + "execution_count": 116, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Commands: ['yes', 'no', 'unknown']\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "aMvdU9SY8WXN" + }, + "source": [ + "Extract the audio files into a list and shuffle it." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "hlX685l1wD9k", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "c1847369-0ba1-4ddc-f817-e55e08fdced2" + }, + "source": [ + "filenames = tf.io.gfile.glob(str(data_dir) + '/*/*')\n", + "filenames = tf.random.shuffle(filenames)\n", + "num_samples = len(filenames)\n", + "print('Number of total examples:', num_samples)\n", + "print('Number of examples per label:',\n", + " len(tf.io.gfile.listdir(str(data_dir/commands[0]))))\n", + "print('Example file tensor:', filenames[0])" + ], + "execution_count": 117, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Number of total examples: 5311\n", + "Number of examples per label: 1000\n", + "Example file tensor: tf.Tensor(b'data/mini_speech_commands/no/53458368_nohash_0.wav', shape=(), dtype=string)\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "9vK3ymy23MCP" + }, + "source": [ + "Split the files into training, validation and test sets using a 80:10:10 ratio, respectively." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "Cv_wts-l3KgD", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "8d1c6417-0157-4945-f0ef-864a87ce6679" + }, + "source": [ + "# Take 80% of total number examples for training set files\n", + "train_files = filenames[:4249]\n", + "# Take 10% of total number examples adding to 80% of total examples for validation set files\n", + "val_files = filenames[4249: 4249 + 531]\n", + "# Take -10% of total number examples for test set files\n", + "test_files = filenames[-531:]\n", + "\n", + "print('Training set size', len(train_files))\n", + "print('Validation set size', len(val_files))\n", + "print('Test set size', len(test_files))" + ], + "execution_count": 118, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Training set size 4249\n", + "Validation set size 531\n", + "Test set size 531\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "g2Cj9FyvfweD" + }, + "source": [ + "## Reading audio files and their labels" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "j1zjcWteOcBy" + }, + "source": [ + "The audio file will initially be read as a binary file, which you'll want to convert into a numerical tensor.\n", + "\n", + "To load an audio file, you will use [`tf.audio.decode_wav`](https://www.tensorflow.org/api_docs/python/tf/audio/decode_wav), which returns the WAV-encoded audio as a Tensor and the sample rate.\n", + "\n", + "A WAV file contains time series data with a set number of samples per second. \n", + "Each sample represents the amplitude of the audio signal at that specific time. In a 16-bit system, like the files in `mini_speech_commands`, the values range from -32768 to 32767. \n", + "The sample rate for this dataset is 16kHz.\n", + "Note that `tf.audio.decode_wav` will normalize the values to the range [-1.0, 1.0]." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "9PjJ2iXYwftD" + }, + "source": [ + "def decode_audio(audio_binary):\n", + " audio, _ = tf.audio.decode_wav(audio_binary)\n", + " return tf.squeeze(audio, axis=-1)" + ], + "execution_count": 119, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "GPQseZElOjVN" + }, + "source": [ + "The label for each WAV file is its parent directory." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "8VTtX1nr3YT-" + }, + "source": [ + "def get_label(file_path):\n", + " parts = tf.strings.split(file_path, os.path.sep)\n", + "\n", + " # Note: You'll use indexing here instead of tuple unpacking to enable this \n", + " # to work in a TensorFlow graph.\n", + " return parts[-2] " + ], + "execution_count": 120, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "E8Y9w_5MOsr-" + }, + "source": [ + "Let's define a method that will take in the filename of the WAV file and output a tuple containing the audio and labels for supervised training." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "WdgUD5T93NyT" + }, + "source": [ + "def get_waveform_and_label(file_path):\n", + " label = get_label(file_path)\n", + " audio_binary = tf.io.read_file(file_path)\n", + " waveform = decode_audio(audio_binary)\n", + " return waveform, label" + ], + "execution_count": 121, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "nvN8W_dDjYjc" + }, + "source": [ + "You will now apply `process_path` to build your training set to extract the audio-label pairs and check the results. You'll build the validation and test sets using a similar procedure later on." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "0SQl8yXl3kNP" + }, + "source": [ + "AUTOTUNE = tf.data.AUTOTUNE\n", + "files_ds = tf.data.Dataset.from_tensor_slices(train_files)\n", + "waveform_ds = files_ds.map(get_waveform_and_label, num_parallel_calls=AUTOTUNE)" + ], + "execution_count": 122, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "voxGEwvuh2L7" + }, + "source": [ + "Let's examine a few audio waveforms with their corresponding labels." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "8yuX6Nqzf6wT", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 716 + }, + "outputId": "8575133a-16c0-4d08-9857-ee4ecbfbe2fd" + }, + "source": [ + "rows = 3\n", + "cols = 3\n", + "n = rows*cols\n", + "fig, axes = plt.subplots(rows, cols, figsize=(10, 12))\n", + "for i, (audio, label) in enumerate(waveform_ds.take(n)):\n", + " r = i // cols\n", + " c = i % cols\n", + " ax = axes[r][c]\n", + " ax.plot(audio.numpy())\n", + " ax.set_yticks(np.arange(-1.2, 1.2, 0.2))\n", + " label = label.numpy().decode('utf-8')\n", + " ax.set_title(label)\n", + "\n", + "plt.show()" + ], + "execution_count": 123, + "outputs": [ + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlsAAAK7CAYAAADFgUrzAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nOzdeZwcdZ3/8ddn7tzn5E4ggXBEQI4hgCyQ5ZAQlIDuSkBXLo2usqu/xXWDsKh4gLritagbFQOoIKJAkACGW+4MRyB3Qgi5k0lC7mSu/vz+6JpJ92R6ZpKu7urj/Xw88piu6m/X9zOTec98pqq6ytwdEREREcmMkqgLEBERESlkarZEREREMkjNloiIiEgGqdkSERERySA1WyIiIiIZpGZLREREJIPUbImIiBQYM5thZt+Oug6JU7MlIiIikkFqtkREREQySM1WgTCzFWb2FTN7y8y2mdkfzawqeO6zZrbMzLaY2UwzGxZ1vSKZpkxIvjMzN7PDE5ZbDw2a2QQzW21m15nZRjNbZ2ZXpdhOLzN72sx+anEzzOx2M3vEzHaY2StmdljC+A+Z2ZwgN3PM7EPB+n80s7cTxs02szkJy383s4uDxynzV4zUbBWWTwATgdHAccCVZnY2cEvw3FDgPeDeyCoUyS5lQgrZEKAPMBy4BrjdzPolDjCzAcCTwAvu/u++7x59U4BvAv2AZcB3gvH9gUeAnwIDgNuAR4LtvAyMNbOBZlZOPFPDgmauG1AD/D1h+v3yF+6nnz/UbBWWn7r7WnffAjwMHA98ErjD3V9393rgeuA0Mzs0ujJFskaZkELWCNzs7o3uPgvYCRyZ8Pww4FngT+5+Y5vXPuDur7p7E/B74tkAuBBY6u53u3uTu98DLAI+6u57gDnAmcBJwFzgBeB04NTgdZsT5mgvf0VJzVZhWZ/weDfQk3jY3mtZ6e47gc3E/xISKXTKhBSyzUGz1KLle7zFhUA34JftvLa9bECbfATeY18+ngUmEG+4ngWeAc4K/j3bxTmKjpqtwrcWOKRlwcx6EN81vCayikSipUxIvtgNdE9YHnKAr/8V8BgwK/g+74qkfARGsS8fbZutZ0ndbElAzVbhuwe4ysyON7NK4LvAK+6+ItqyRCKjTEi+eBO43MxKzWwi8YbmQF0LLAYeDs6r6sws4Agzu9zMyszsUmAc8Nfg+ReJH6ocD7zq7vOJN2enAM8dRH1FQc1WgXP3J4D/Bv4MrAMOI35ipEhRUiYkj3wJ+Ciwlfi5hg8e6AaCE+KnAquBhzp7R2BwztVHgOuIH17/KvARd98UPL8LeB2Y7+4NwcteAt5z940HWl+xsH1vTBARERGRsGnPloiIiEgGhdJsmdkdwUXV5qV43oKLqS0LLnB2YhjziuQqZUIkmTIhxSysPVsziF+4LJULgLHBv6nAL0KaVyRXzUCZEEk0A2VCilQozZa7Pwds6WDIZOAuj3sZ6GtmQ8OYWyQXKRMiyZQJKWZlWZpnOLAqYXl1sG5d24FmNpX4XzX06NHjpKOOOiorBYp05rXXXtvk7tUhbU6ZkLwXRSaUB8llqTKRrWary9x9OjAdoKamxmtrayOuSCTOzNpeVTkrlAnJVVFkQnmQXJYqE9l6N+IaYGTC8gh0tWYpbsqESDJlQgpWtpqtmcCng3ebnApsc/f9DpeIFBFlQiSZMiEFK5TDiGZ2D/F7JQ00s9XA14FyAHf/JfHL/08ClhG/19NVYcwrkquUCZFkyoQUs1CaLXe/rJPnHfhiGHOJ5ANlQiSZMiHFTFeQFxEREckgNVsiIiIiGaRmS0RERCSD1GyJiIiIZJCaLREREZEMUrMlIiIikkFqtkREREQySM2WiIiISAap2RIRERHJoFCaLTObaGaLzWyZmU1r5/lRZva0mb1hZm+Z2aQw5hXJVcqESDJlQopZ2s2WmZUCtwMXAOOAy8xsXJthNwL3ufsJwBTg5+nOK5KrlAmRZMqEFLsw9myNB5a5+3J3bwDuBSa3GeNA7+BxH2BtCPOK5CplQiSZMiFFLYxmaziwKmF5dbAu0TeATwV3ep8F/FuqjZnZVDOrNbPaurq6EMoTyTplQiRZaJlQHiQfZesE+cuAGe4+ApgE3G1m7c7t7tPdvcbda6qrq7NUnkjWKRMiybqUCeVB8lEYzdYaYGTC8ohgXaJrgPsA3P0loAoYGMLcIrlImchhP3tyKY/NWx91GcVGmZCiFkazNQcYa2ajzayC+ImNM9uMWQmcA2BmRxMPkfb/SqFSJnLYD2cv4fO/ey3qMoqNMiFFLe1my92bgGuBx4GFxN9NMt/Mbjazi4Jh1wGfNbO5wD3Ale7u6c4tkouUicJzynefYMr0l6IuI28pE1LsysLYiLvPIn5CY+K6mxIeLwBOD2MukXygTOS+/31qKdeePbbDMc8uqeNzd9eytzHGhu31WaqsMCkTUsx0BXkRKUq3zV7S6Zgr7niVvY2xLFQjIoVMzZaIFKWYw3ceWUBDU9ebqacWbchgRSJSqNRsiUhR2NvYzL2vrkxa96u/v8sfa1eleMX+rp5RG3ZZIlIEQjlnS0Qk113y8xdZuG77fusbD2DPFsDsBRs4b9zgsMoSkSKgPVsiUhTaa7QOxmfv0t4tETkwarZEpOBt2dWQ8jmzLBYiIkVJzZaIFLxr7pwTdQkiUsTUbIlIwVu1ZXfGtu3u/OGVlWzb3ZixOUQkv4XSbJnZRDNbbGbLzGxaijGfMLMFZjbfzP4QxrwiuUqZyC2ZvA75vDXb+doDb/OV++dmbpICoExIMUv73YhmVgrcDpwHrAbmmNnM4GrALWPGAtcDp7v7+2Y2KN15RXKVMpHf3B0z473Nu1KO2dvYTFV5KQD1Tc1Ax+eFFTtlQopdGHu2xgPL3H25uzcA9wKT24z5LHC7u78P4O4bQ5hXJFcpEzmmox1bTc3Otj3xQ4Brt+5h9PWzuK92FR/+0XMpX3Pdn+YGr41x818XpBwnrZQJKWphNFvDgcSrAq4O1iU6AjjCzF4ws5fNbGKqjZnZVDOrNbPaujrd8F3ykjKRR74zayEf/ObfAHinbicAX73/Leo7uP7WS+9sBuC5pXW8tXpb5ovMf6FlQnmQfJStE+TLgLHABOAy4Fdm1re9ge4+3d1r3L2muro6S+WJZJ0ykUVNzeHe39CDk8Bium1imLqUCeVB8lEYzdYaYGTC8ohgXaLVwEx3b3T3d4ElxEMlUoiUiRyzfW9Tl8YZuuhWhigTUtTCaLbmAGPNbLSZVQBTgJltxjxI/K8VzGwg8d3Fy0OYWyQXKRN56kAucFq3o57PJFxN/rX33mf6c++wftveDFSW95QJKWppN1vu3gRcCzwOLATuc/f5ZnazmV0UDHsc2GxmC4Cngf90983pzi2Si5SJ4vD2mq37rfvurEX8y29eiaCa3KZMSLEL5UbU7j4LmNVm3U0Jjx34j+CfSMFTJorXxh31UZeQk5QJKWa6gryISCCMM7a27WnUNbdEJImaLRGRkHV0jS4RKT5qtkREWoT0ZsRNO3UoUUT2UbMlIgIcOu0Rale8H3UZIlKA1GyJiARum72kS+Pe391IfaOuaCoiXaNmS0TkIFz/wNtRlyAieULNlojIQdi6uzHqEkQkT6jZEhEREckgNVsiIiIiGRRKs2VmE81ssZktM7NpHYz7uJm5mdWEMa9IrlImRJIpE1LM0m62zKwUuB24ABgHXGZm49oZ1wv4EqAbh0lBUyZEkikTUuzC2LM1Hljm7svdvQG4F5jczrhvAd8D9oYwp0guUyZEkikTUtTCaLaGA6sSllcH61qZ2YnASHd/pLONmdlUM6s1s9q6uroQyhPJOmVCuPK3r0ZdQi4JLRPKg+SjjJ8gb2YlwG3AdV0Z7+7T3b3G3Wuqq6szW5xIBJSJ4vDMYjUCXXUgmVAeJB+F0WytAUYmLI8I1rXoBRwDPGNmK4BTgZk6+VEKmDIhkkyZiJC7s6ehmW17Glm/bS9PLNjAik27oi6rqJSFsI05wFgzG008PFOAy1uedPdtwMCWZTN7BviKu9eGMLdILlImRJIpExH6/SsrufHBefutX3HrhRFUU5zS3rPl7k3AtcDjwELgPnefb2Y3m9lF6W5fJN8oEyLJlIloPTZvfdQlFL0w9mzh7rOAWW3W3ZRi7IQw5hTJZcqEAKzYtIuH3lzLv59zOGYWdTmRUiais2j99g6f//ZfF9C/ZwUPvL6Gs48axPWTjs5SZcUjlGZLRET2d8VvX+W9zbu59OSRDOlTFXU5UqQ27Wxod/2dL67glDH9+fXz77auW7pxp5qtDFCzJSKSIe9t3h11CSIpfX3m/KhLKBq6N6KIiIhIBqnZEhHJsPNue1ZvtRcpYmq2RKSg5UKTs6O+iV8/vzzqMkQkImq2RKRgNcecCf/zTNRliEiRU7MlIgXr4blroy6hlVHcl34QKWZqtkSkIG3csZcv//HNqMto5XjUJYhIREJptsxsopktNrNlZjatnef/w8wWmNlbZvakmR0SxrwiuUqZiN747zwZdQmSQJmQYpZ2s2VmpcDtwAXAOOAyMxvXZtgbQI27HwfcD3w/3XlFcpUyIZJMmcgvP3liKcs27oy6jIISxp6t8cAyd1/u7g3AvcDkxAHu/rS7t1zd72Xid3wXKVTKRIQu+fkLHDrtkajLkGTKRB750RNLmDL9pajLKChhNFvDgVUJy6uDdalcAzya6kkzm2pmtWZWW1dXF0J5IlmnTETojZVboy6hXV7cp2yFlgnlITsammJRl1BQsnqCvJl9CqgBfpBqjLtPd/cad6+prq7OXnEiEVAmwrUyh2+P8/tXVnLLrIVRl5HzOsuE8iD5KIxmaw0wMmF5RLAuiZmdC9wAXOTu9SHMK5KrlImInPmDp6MuoUP/91zRXthUmZCiFkazNQcYa2ajzawCmALMTBxgZicA/0c8QBtDmFMklykTEWhszo/DHovX74i6hCgoE3mmuI96hy/tZsvdm4BrgceBhcB97j7fzG42s4uCYT8AegJ/MrM3zWxmis2J5D1lIrvcnYamGBff/kLUpXTJ+T9+LuoSsk6ZyD879jZFXUJBKQtjI+4+C5jVZt1NCY/PDWMekXyhTGRHU3OMr97/Fn95Y78jUpJjlAkpZrqCvIjkpabmGJf96uW8bLSmTH+J11e+H3UZIpIlarayaPveRu56aQVe5O8BFwnDz55axpwV+dmwvLx8C/91/1tRlyHSoVVbcvfdvflGzdYBuOfVlVzy8xfY3dDE7oYmvv7QPJ5bUsfW3Q1dOjn3vx+cx00PzWf09bN46M017G5oYsmGHfxo9hI+8rO/Z+EzECkc+X6Fa/3JJdkw/bl3Dvq1Z3w/t9/dm09COWerWFz/l7cBGHfT463r7nzpPQC6lZfSo7KUTTsbqCov4aPHDeNbFx9DVXkp79Tt5CdPLGXm3LWtr/vSvfvfILflqtcvXX82Q/t0y+SnIpL3Skos6hJEctqu+ia+O2tRWts4dNoj/OaKGs45enBIVRUnNVtd9NSiDR0+v6exmT2NzQDsbYzxp9dW86fXVh/UXKfd8hQfGNabu685hf49Kg5qGyKFLt9brbVb9zBvzTaOGd4n6lKkQIW193TW2+vVbKVJhxG76PuPLc7qfPPXbufEb83m6hlzeH7pJu54/l2WbCjK6/OItGtnfX6/NX13QzMf+dnzUZchBSys84NXbtkVynaKmfZsdcDdeaduJ4N6V7EoogsRPrVoI08tSr6+339NPIrPnTlGh1GkaD3wxur9ciEiyd4L6fZVc1a8z56GZp5ctIEPjxtCRZn20xwoNVvA3sZm5q/dxuPzN/DEwg0sr8vtLv57jy3ie48t4uLjh/Hv54xl9MAemKnxkuLx9KLCuQHxb194l4s+OIwelWVUlZdGXY4UkHteXRnato6+6TEALq0Zyff+6bjQtlssQmm2zGwi8BOgFPi1u9/a5vlK4C7gJGAzcKm7rwhj7q7atruR+19fzcvLN7N9TyM765uYv3Z7NksI3YNvruXBN/eddH/64QO44rRDOWFUP6p7VUZYmeRDJvJZaQHt1f3mwwv45sMLGFPdg6eumxB1ORmjTGTGll0NNMec6l6VbN/byNxVW+ldVc4HR/bl96+E12y1+GPtKv5Yu4qnrjuLQwb04Pllmzhz7MC0/+B/etFGyktLqDm0H0s27OC4EX1Dqjg3pN1smVkpcDtwHrAamGNmM919QcKwa4D33f1wM5sCfA+4NN25u2LB2u1M+mlxXFbhhWWbeWHZ5v3WD+tTxVlHDmJQr0ouPG4oRwzuFUF1xSPXMhGLOWYUzN5Pd+eBPLyQaWeW1+1iwdrtVJSVcPignlGXE6pcy0QhOfFbswG4+5rx/MtvXs3avGf/8Nn91v3yUyfx+d+9xuWnjOIPr6zkJ1OOZ/LxwwFYv20v9U3N7G2M8dMnl7KroYlvTT6GZXU7GT2gB1fNmJO0rbuuHs+ZR1Rn5XPJBkv3BDozOw34hrufHyxfD+DutySMeTwY85KZlQHrgWrvZPKamhqvra3db/3O+iZi7pSYUWpGSQmUmlFaYkm/UBau284FPymORutgjBvam7OOrOasI6opMWNQr0qqyktxPP617OT9Xl353d2VX+9daQK6tp0ujOlkS326l3ewfXvN3Ws6ryOaTDz05hrKS0uo7llJU8z57F37j0vlO5ccQ7/uFexpaGZw7yr6dCunoTnG3sZmulWUEos5g3pVUVICW3c3AlBRVkJpidHYHKM55qzYtJshfapYv20vu+qbKC8zdjc0c8TgXvHtNcXYsquBvt3LKTFr/f9yh/qmGIcP6smWXQ0A9Kwso8SgOeY0u9PU7DTHnJ31TTyxcAM/fmJplz+3fDXhyGo+8w9jOGxQD7btaaS8tIQde5uoKi9h2+5GKstLGdW/+76fgSUW/5lYYtTtqGfhuu10Ky9l4456+nQrp1tFKU3NzpbdDby/q4FeVWX07V7OwJ7xveA79zax6v3drfPMXbWVz5wxhnHDeqesMepMpMoDwPu7GigrNZz49xgOjuNOsM5bn/P4k0nLSeOCCtp7rjnm9OtRgTuUl8a//uWlJeyqb2L99r0M7dON93fHv96G7Xt9innaWx9zIGldfEwsRtHsTEjlyg8dyqad9SzdsJMR/brRvTK+D+kTNSPoUVnGrvomGppilJeWMHpgD/Y2NlNeWkLMnfLSEhqaY/SsLGPx+h307V5Oz8oyyktL6F5RSnPMaYw5Tc0xtu1pZMP2eo4b0YfBvatS1pMqE2EcRhwOrEpYXg2ckmqMuzeZ2TZgALDpYCb84u9f59kl7Z+zYUZrE9bQhQuNFrMF67azYN12fvHMwV/0rpCUlhjvfHdSGJvKaibcnWO+/njnAztwwwPz0nq9hO+ZxXU8szjac9P+8sYaVtx6YRibyvrviSnTX2ax3sFd8Ga8uKL1ceL/98MJ17UM2+JvT6Sy7MDOr8y5E+TNbCowFWDUqFHtjrn8lFGcMXYgMXeaYwQf4/9i7q3rZ729jpW63cBB+fK5YxnQs4PzvrqwR7Qr+0y7smO1K3tfw5grV4+ydZYJM2PqmWOY/tzybJcWqupeldTtqAfglNH9ufC4oZSYUVYS31tQ3xTjxgfVFCa64Jgh1Bzan1iwB7A5Ft8L+KMnlkRdWsZ05XcEwOcnjGHTjobWXJvF92ubEXy01sckPWcJY4K94YmvaVlvsGF7Pbc+uohJxw7h1DEDWvfANsZi/Prv77JlVwPnf2AwL72zmc+cMYZ+3csT5m0zT+u2k+coCT4Ba1NjSfD853/3eka+zsVqeN9unHP0IA6r7hnspTTKSkq46aF57Gpo5hsfHXfAjRaE02ytAUYmLI8I1rU3ZnWwe7gP8RMg9+Pu04HpEN9F3N6Y8z8wpEuFTbvgKL7/2CJ+XgR7bs49ejDLN+3kI8cN44Mj+jCsbzdG9OvG3FXbOO2wAZSWGO5eMOft5LisZ+Jrk47ma5OO3m/93+av57uzFnLv1NMY0if1ru988alTD8HdGX39rKhLyZjyUuOxL5/JYdUHf97Wl84dG2JFoQgtE13JA8AlJ4xIs+Su+fxZh7W7/gsTDs/K/Lnula+dw9xVW5m/djsz567l1DH9mb1gA5t2NnDx8cOS3uSVqLzUeP2/z+O9zbuTLvxb39RMqRkxJ6uXoPj4Sel9P4VxzlYZsAQ4h3hY5gCXu/v8hDFfBI51988HJz5+zN0/0dm2OzoefyBiMWfJxh088tY6Xlm+hbfWbGVvY34eYuxZWcbXJh3NlJNH6jpbWXYA56fkfCby3fNLN/Gp37wSdRmhevorExg9sEfUZRyQqDOhPMBDb66hd1U5Zx1Rzfu7GzjntmeZdOxQ/pCBdyImOmV0f7426Wgm3/4Cd149nrOOqCYWc37y5FI+c8ZoNu1soE+38i7fBaXldnU3Xng0Ty/eyN1Xn5KXv+NSZSLtZivY+CTgx8Tf0nuHu3/HzG4Gat19pplVAXcDJwBbgCnu3ukxj2wEyd3Z09jMkws38vTijTw8dy2NzdHeIvaCY4bQu6qcnlVlfPKUUYxJ4y9cCU9Xf7EEY/M2E/lg3bY9nHbLU1GXEYp3vjspby9lEXUmlIfUmppjnHbrU62H5sP04rSzGdY33Pv3rt+2lz2NzXn3B0dbmTxBHnefBcxqs+6mhMd7gX8OY66wmRndK8r46AeH8dEPDuO2Txzf+tz7uxp48Z3NPPL2Wma9vT7pdT//5ImcdEg/dtU3Maa6J7GYc8OD87jn1ZVcfsoorj59NG+v2cr5HxjC4vU72LyzgZv/uqDDc8i+//Hj+MTJI1M+L/kjnzORD/K1OWmre0VpwXwunVEmsqustIR+3ctDa7auOv1QNu1s4GeXnRDK9toqhNMcOpJzJ8jnkn49KrjwuKGcO24Qw/os5t/OGUvdjr0M6l1F76rkSwSUlBi3fOxYbvnYsa3rWq6Vc8KofgCcO24wLyzbRFmJccqYAbg7L72zmeNH9cUwulXo6tEiXRHSLd8it+DmiVGXIAXsa5OO5srfzul8YCd+dtkJfOS4oTrnNw1qtrqgsqyUGz8yDoA+3VJfh6krTj98YOtjM+NDCcsi0jWDelV2eHKtiMCRQ8K5gPVHPzgslO0UM91NUkTyjpnxb+fk3DvuDshXJx4ZdQlS4Ib26UbtjeemtY3rzjsipGqKm/ZsiUheyucDGjOuOpkJRw6KugwpAgM7ul5iF+T7HzW5Qnu2RCQvjR7Yg/+aeFTUZRyUrr4dXiRKFx+vw4dhUbMlInnJzPjXCYcx9+sfZtzQ1PfvyzW//8wpHDeib9RliHTqkAH5fRmGXKJmS0TyWp9u5cz60hkpr+Sda07Xm2JEio6aLREpCNd9WCfyioSpR6UuRxQWNVsiUhDKS0v4+SdPjLqMDj153VlRlyDSZVd+aHTUJRSMtJotM+tvZrPNbGnwsV87Y443s5fMbL6ZvWVml6Yzp0guUyaiNenYoTl9ODGdm0vnK2UiP038wJCs3ui50KX7lZwGPOnuY4Eng+W2dgOfdvcPABOBH5uZzg6VQqVMRGzaBUfx4rSzoy5jP4cM6B51CVFRJvKQLhYfrnSbrcnAncHjO4GL2w5w9yXuvjR4vBbYCFSnOa9IrlImcsCwvt1486bzoi4jSRH/7lImpOil22wNdvd1weP1wOCOBpvZeKACeKeDMVPNrNbMauvq6tIsTyTrlIkc0bd7BX27p3d7rTAV8X3lQs2E8pB5PSvL+FwOH47PR51eQd7MngCGtPPUDYkL7u5mlvL2sGY2FLgbuMLdY6nGuft0YDpATU1NgdxuVgqJMpE/fnfNKXzkZ89HXQYAw/pWRV1CxmQzE8pD5s375vlRl1BwOm223D3ljZXMbIOZDXX3dUFINqYY1xt4BLjB3V8+6GpFcoAykT+OGd4n6hJa3X55br9TMh3KhEjH0j2MOBO4Inh8BfBQ2wFmVgE8ANzl7venOZ9IrlMmcszZR+XGPQj7di/aW/QoE1L00m22bgXOM7OlwLnBMmZWY2a/DsZ8AjgTuNLM3gz+HZ/mvCK5SpnIMb/6dE3UJfDti4+JuoQoKRNS9Do9jNgRd98MnNPO+lrgM8Hj3wG/S2cekXyhTOSe0pLoT0zvWZnWj9q8pkyI6AryIiIiResfj0y+wsbk44dFVElhU7MlIpJhxXvVB8ll73x3Ep89Y0zSuoE9KyOqprCp2RIRyZAvnzuW40b0yZmT9EUSlZZYUV9tN5vUbImIZMiXzz2Cmdf+A72qcufiqiKJjh7SG4BTx/SPuJLCpmZLRCQD9MtL8kG/HhWsuPVCzjmqwwv7S5rUbImIZMC9U0+LugQRyRFqtkRERIrE2EE9AThkQPeIKykuarZERESKwAvTzubnnyzc20blsrSbLTPrb2azzWxp8LFfB2N7m9lqM/vfdOcVyVXKhMg+ykPuGN63G2Wl8V/7ehNidoWxZ2sa8KS7jwWeDJZT+RbwXAhziuQyZUJkH+Uhh3SvKAXgiMG92n1eTVhmhNFsTQbuDB7fCVzc3iAzOwkYDPwthDlFcpkyIbKP8pBDBveu4t6pp/KjS5NvPfmJk0dy9lGD+NxZh0VUWWEL44Zdg919XfB4PfGwJDGzEuCHwKeI34hUpJApE0WivNRobPaoy8h1ykPEXrr+bOobY63Lp44ZsN+YPt3KuePKk7NZVlHpUrNlZk8AQ9p56obEBXd3M2vvJ88XgFnuvto6uW+FmU0FpgKMGjWqK+WJZJ0yId//+HGc/4EhfPBm7YhRHnLb0D7doi6h6HWp2XL3lH9pmNkGMxvq7uvMbCiwsZ1hpwFnmNkXgJ5AhZntdPf9jt27+3RgOkBNTY3+ZJScpEzIJ04eydbdDe0+9+2Lj8lyNdFSHkQ6FsZhxJnAFcCtwceH2g5w90+2PDazK4Ga9kIkUiCUiSLhKX7Vf+rUQ7JbSG5THqTohXGC/K3AeWa2lPix9lsBzKzGzH4dwvZF8o0ykWOuPn10Rrbbt3s5/6LGqjPKgxS9tPdsuftm4Jx21tcCn2ln/QxgRrrziuQqZSL39Kgszch2zYxvXXwMd7/8Xka2XwiUB7zQvSUAACAASURBVJFwDiOKiAjw1YlHctSQ9q9fJCLFS82WiEhIvjDh8KhLEJEcpHsjioiEoOXK3CIibWnPlohImr738WM5+dD+UZchIjlKzZaIFLxM3+/t0pN1cU0RSU2HEUVEREQySM2WiIiISAap2RIRERHJIDVbIiIHqHeVTncVka5Lq9kys/5mNtvMlgYf+6UYN8rM/mZmC81sgZkdms68IrlKmSgOT31lQtQl5A1lQiT9PVvTgCfdfSzwZLDcnruAH7j70cB42r/ru0ghUCZykYX7fsSBPStD3V6BUyak6KXbbE0G7gwe3wlc3HaAmY0Dytx9NoC773T33WnOK5KrlAmRZMqEFL10m63B7r4ueLweGNzOmCOArWb2FzN7w8x+YGYpL7VsZlPNrNbMauvq6tIsTyTrlIkcpPsVRirUTCgPko86PcvTzJ4AhrTz1A2JC+7uZuYp5jgDOAFYCfwRuBL4TXvzuft0YDpATU1Ne9sTiZQykX8mHTs06hIKWjYzoTxIPuq02XL3c1M9Z2YbzGyou68zs6G0f4x9NfCmuy8PXvMgcCopfrGI5DplorhNOLI66hJyjjIh0rF0DyPOBK4IHl8BPNTOmDlAXzNr+Ql1NrAgzXlFcpUyUeBmXDU+6hLyjTIhRS/dZutW4DwzWwqcGyxjZjVm9msAd28GvgI8aWZvE79N2a/SnFckVykTeeLyU7p+P8PaG8/lrCOqOW9ce6cbSSeUCSl6aV2Zz903A+e0s74W+EzC8mzguHTmEskHykTu+uu//QNNMefi21+gd1UZxw7v0+XXVpSVcOfV2qN1MJQJEV1BXkSKxDHD+zB6QI+Uz3/0g8MAOHFU32yVJCJFQvecEJGiUVYav7jp4YN6ckj/7knPfeeSY/jZZScA8Itn3uF7jy3Ken0iUpjUbIlI0ehRWcbd14zn2OF96Nu9gktrRvLH2lUpxw/sWcGehma6l6e8DJqISKd0GFFEisoZY6vp270CgFs/fmyHYz9+4gjm3zyRslL9qBSRg6efICJStMyMez57KucePZieFe3s6A/3looiUqR0GFFEitpphw3gtMMGRF2GiBQw7dkSERERySA1WyIibVxywnBG9OvGJ8cfEnUpIlIA0m62zKy/mc02s6XBx34pxn3fzOab2UIz+6mZ6WwIKUjKRP4b0qeK5//rbEYN6N75YOmQ8iASzp6tacCT7j4WeDJYTmJmHwJOJ3514GOAk4GzQphbJBcpEyL7KA9S9MJotiYDdwaP7wQubmeMA1VABVAJlAMbQphbJBcpEyL7KA9S9MJotga7+7rg8Xpgvzu1uvtLwNPAuuDf4+6+sL2NmdlUM6s1s9q6uroQyhPJOmVCZB/lQYpely79YGZPAEPaeeqGxAV3dzPzdl5/OHA0MCJYNdvMznD3v7cd6+7TgekANTU1+21LJBcoEyL7KA8iHetSs+Xu56Z6zsw2mNlQd19nZkOBje0MuwR42d13Bq95FDgN2C9IIvlAmRDZR3kQ6VgYhxFnAlcEj68AHmpnzErgLDMrM7Ny4ic+truLWKQAKBMi+ygPUvTCaLZuBc4zs6XAucEyZlZjZr8OxtwPvAO8DcwF5rr7wyHMLZKLlAmRfZQHKXpp367H3TcD57Szvhb4TPC4GfhcunOJ5ANlQmQf5UFEV5AXERERySg1WyIiIiIZpGZLREREJIPUbImIiIhkkJotERERkQxSsyUiIiKSQWq2RERERDJIzZaIiIhIBqnZEhEREcmgtJotM/tnM5tvZjEzq+lg3EQzW2xmy8xsWjpziuQyZUIkmTIhkv6erXnAx4DnUg0ws1LgduACYBxwmZmNS3NekVylTIgkUyak6KV1b0R3XwhgZh0NGw8sc/flwdh7gcnAgnTmFslFyoRIMmVCJDvnbA0HViUsrw7WtcvMpppZrZnV1tXVZbw4kQgoEyLJupwJ5UHyUad7tszsCWBIO0/d4O4PhV2Qu08HpgPU1NR42NsXSZcyIZIsm5lQHiQfddpsufu5ac6xBhiZsDwiWCeSl5QJkWTKhEjHsnEYcQ4w1sxGm1kFMAWYmYV5RXKVMiGSTJmQgpbupR8uMbPVwGnAI2b2eLB+mJnNAnD3JuBa4HFgIXCfu89Pr2yR3KRMiCRTJkTSfzfiA8AD7axfC0xKWJ4FzEpnLpF8oEyIJFMmRHQFeREREZGMUrMlIiIikkFqtkREREQySM2WiIiISAap2RIRERHJIDVbIiIiIhmkZktEREQkg9RsiYiIiGRQuleQ/2czm29mMTOrSTFmpJk9bWYLgrFfSmdOkVymTIgkUyZE0t+zNQ/4GPBcB2OagOvcfRxwKvBFMxuX5rwiuUqZEEmmTEjRS/d2PQsBzKyjMeuAdcHjHWa2EBgOLEhnbpFcpEyIJFMmRNJstg6UmR0KnAC80sGYqcDUYHGnmS1OMXQgsCnM+rIoX2vP17ohnNoPCaOQRMpEq3ytPV/rhjzNxAHkAfT/E4V8rRsymIlOmy0zewIY0s5TN7j7Q12d3cx6An8Gvuzu21ONc/fpwPQubK/W3ds9/p/r8rX2fK0bwq1dmQhfvtaer3VD/maiq3kItqf/nyzL17ohs7V32my5+7npTmJm5cQD9Ht3/0u62xOJkjIhkkyZEOlYxi/9YPED9b8BFrr7bZmeTyTXKRMiyZQJKXTpXvrhEjNbDZwGPGJmjwfrh5nZrGDY6cC/AGeb2ZvBv0lpVR3Xpd3IOSpfa8/XuiFLtSsTBy1fa8/XukGZyHX5Wnu+1g0ZrN3cPVPbFhERESl6uoK8iIiISAap2RIRERHJoLxstsxsopktNrNlZjYt6noAzGyFmb0dnGtQG6zrb2azzWxp8LFfsN7M7KdB/W+Z2YkJ27kiGL/UzK7IUK13mNlGM5uXsC60Ws3spOBrsSx4beqrGaZf9zfMbE1753mY2fVBDYvN7PyE9e1+/5jZaDN7JVj/RzOrCKPuTMvFPIAyoUxER5kIpVZlIsxMuHte/QNKgXeAMUAFMBcYlwN1rQAGtln3fWBa8Hga8L3g8STgUcCI35rilWB9f2B58LFf8LhfBmo9EzgRmJeJWoFXg7EWvPaCDNb9DeAr7YwdF3xvVAKjg++Z0o6+f4D7gCnB418C/xr191UXviY5mYegNmVCmVAmkmtTJoo0E/m4Z2s8sMzdl7t7A3AvMDnimlKZDNwZPL4TuDhh/V0e9zLQ18yGAucDs919i7u/D8wGJoZdlLs/B2zJRK3Bc73d/WWPfzfelbCtTNSdymTgXnevd/d3gWXEv3fa/f4J/qo6G7g/eH3i1yCX5VMeQJlQJjJPmQiBMhFuJvKx2RoOrEpYXh2si5oDfzOz1yx+OwmAwR6/5xfAemBw8DjV5xDl5xZWrcODx23XZ9K1wa7rO1p2a3dSX3vrBwBb3b2pzfpcl6t5AGVCmYiGMpE5ysRBZiIfm61c9Q/ufiJwAfE71p+Z+GTQvefFdTbyqVbgF8BhwPHEb2T7w2jLkQTKRDSUidylTEQj8kzkY7O1BhiZsDwiWBcpd18TfNwIPEB8N+SGYHcpwceNwfBUn0OUn1tYta4JHrddnxHuvsHdm909BvyK+Nf9YOreTHzXd1mb9bkuJ/MAygTKRFSUicxRJg4yE/nYbM0BxgbvCKgApgAzoyzIzHqYWa+Wx8CHgXlBXS3vvrgCaLkh60zg08E7OE4FtgW7Zh8HPmxm/YLdnB8O1mVDKLUGz203s1OD49ufTthW6FqCH7iE+Ne9pe4pZlZpZqOBscRPyGz3+yf4K+1p4J+C1yd+DXJZzuUBlAllIlLKROYoEwebCc+Bd2gc6D/i73xYQvzdAjfkQD1jiL9bYS4wv6Um4sd3nwSWAk8A/YP1Btwe1P82UJOwrauJn6S3DLgqQ/XeQ3xXaiPxY87XhFkrUBN8M78D/C/BnQoyVPfdQV1vBcEZmjD+hqCGxSS80yXV90/w//hq8Pn8CaiM+nsrH/OgTCgTUf9TJpSJXMuEbtcjIiIikkH5eBhRREREJG+o2RIRERHJIDVbIiIiIhmkZktEREQkg9RsiYiIiGSQmi0RERGRDFKzlYfMbIaZfTvqOkRyhTIhkkyZyC1qtkREREQySM2WiIiISAap2YqImbmZHZ6w3LrL18wmmNlqM7vOzDaa2TozuyrFdnqZ2dNm9tPgvlQzzOx2M3vEzHaY2StmdljC+A+Z2Rwz2xZ8/FCw/h/N7O2EcbPNbE7C8t/N7OLg8Qoz+4qZvRVs549mVhX+V0mKiTIhkkyZKBxqtnLXEKAPMJz4vZ1ut/iNPFuZWct9ql5w93/3ffdemgJ8E+hH/P5N3wnG9wceAX5K/B5XtwGPBNt5mfiNNweaWTlwHDAsCGk34vex+nvC9J8AJgKjg7FXhvvpi+xHmRBJpkzkCTVbuasRuNndG919FrATODLh+WHAs8Cf3P3GNq99wN1fdfcm4PfA8cH6C4Gl7n63uze5+z3AIuCj7r6H+J3OzwROIn6z1BeA04FTg9dtTpjjp+6+1t23AA8nzCGSKcqESDJlIk+URV2ApLQ5CEGL3UDPhOULiQfrl+28dn2K1w0D3msz9j3ifxVBPJQTiN8p/VngfeAsoD5Y7miOYak/FZFQKBMiyZSJPKE9W9HZDXRPWB5ygK//FfAYMMvMenTxNWuBQ9qsGwWsCR63hOjM4PGzxEN0FvuHSCRsyoRIMmWiQKjZis6bwOVmVmpmE4l/ox6oa4HFwMPB8fLOzAKOMLPLzazMzC4FxgF/DZ5/kfgu6PHAq+4+n3joTgGeO4j6RA6EMiGSTJkoEGq2ovMl4KPAVuCTwIMHuoHgRMepxHfnPtTZOz2CY+kfAa4DNgNfBT7i7puC53cBrwPz3b0heNlLwHvuvvFA6xM5QMqESDJlokDYvjcmiIiIiEjYtGdLREREJINCabbM7I7gomrzUjxvwcXUlgUXODsxjHlFcpUyIZJMmZBiFtaerRnEL1yWygXA2ODfVOAXIc0rkqtmoEyIJJqBMiFFKpRmy92fA7Z0MGQycJfHvQz0NbOhYcwtkouUCZFkyoQUs2ydszUcWJWwvJp9F0gTKUbKhEgyZUIKVs5dQd7MphLfhUyPHj1OOuqooyKuSCTutdde2+Tu1dmeV5mQXBVFJpQHyWWpMpGtZmsNMDJheQT7rkabxN2nA9MBampqvLa2NvPViXSBmbW9hUU6lAnJe1FkQnmQXJYqE9k6jDgT+HTwbpNTgW3uvi5Lc4vkImVCJJkyIQUrlD1bZnYP8XslDTSz1cDXgXIAd/8l8cv/TwKWEb/X01VhzCuSq5QJkWTKhBSzUJotd7+sk+cd+GIYc4nkA2VCJJkyIcVMV5AXERERySA1WyIiIiIZpGZLREREJIPUbImIiIhkkJotERERkQxSsyUiIiKSQWq2RERERDJIzZaIiIhIBqnZEhEREcmgUJotM5toZovNbJmZTWvn+VFm9rSZvWFmb5nZpDDmFclVyoRIMmVCilnazZaZlQK3AxcA44DLzGxcm2E3Ave5+wnAFODn6c4rkquUCZFkyoQUuzD2bI0Hlrn7cndvAO4FJrcZ40Dv4HEfYG0I84rkKmVCJJkyIUUtjGZrOLAqYXl1sC7RN4BPBXd6nwX8W6qNmdlUM6s1s9q6uroQyhPJOmVCJFlomVAeJB9l6wT5y4AZ7j4CmATcbWbtzu3u0929xt1rqqurs1SeSNYpEyLJupQJ5UHyURjN1hpgZMLyiGBdomuA+wDc/SWgChgYwtwiuUiZEEmmTEhRC6PZmgOMNbPRZlZB/MTGmW3GrATOATCzo4mHSPt/pVApEyLJlAkpamk3W+7eBFwLPA4sJP5ukvlmdrOZXRQMuw74rJnNBe4BrnR3T3dukVykTIgkUyak2JWFsRF3n0X8hMbEdTclPF4AnB7GXCL5QJkQSaZMSDHTFeRFREREMkjNloiIiEgGqdkSERERySA1WyIiIiIZpGZLREREJIPUbImIiIhkkJotERERkQxSsyUiIiKSQaE0W2Y20cwWm9kyM5uWYswnzGyBmc03sz+EMa9IrlImRJIpE1LM0r6CvJmVArcD5wGrgTlmNjO4GnDLmLHA9cDp7v6+mQ1Kd16RXKVMiCRTJqTYhbFnazywzN2Xu3sDcC8wuc2YzwK3u/v7AO6+MYR5RXKVMiGSTJmQohZGszUcWJWwvDpYl+gI4Agze8HMXjaziSHMK5KrlAmRZMqEFLVQbkTdxXnGAhOAEcBzZnasu29tO9DMpgJTAUaNGpWl8kSyTpkQSdalTCgPko/C2LO1BhiZsDwiWJdoNTDT3Rvd/V1gCfFQ7cfdp7t7jbvXVFdXh1CeSNYpEyLJQsuE8iD5KIxmaw4w1sxGm1kFMAWY2WbMg8T/WsHMBhLfXbw8hLlFcpEyIZJMmZCilnaz5e5NwLXA48BC4D53n29mN5vZRcGwx4HNZrYAeBr4T3ffnO7cIrlImRBJpkxIsTN3j7qGlGpqary2tjbqMkQAMLPX3L0myhqUCcklUWdCeZBckyoTuoK8iIiISAap2RIRERHJIDVbIiIiIhmkZktEREQkg9RsiYiIiGSQmi0RERGRDFKzJSIiIpJBarZEREREMkjNloiIiEgGhdJsmdlEM1tsZsvMbFoH4z5uZm5mkV6FWyTTlAmRZMqEFLO0my0zKwVuBy4AxgGXmdm4dsb1Ar4EvJLunCK5TJkQSaZMSLELY8/WeGCZuy939wbgXmByO+O+BXwP2BvCnCK5TJkQSaZMSFELo9kaDqxKWF4drGtlZicCI939kc42ZmZTzazWzGrr6upCKE8k65QJkWShZUJ5kHyU8RPkzawEuA24rivj3X26u9e4e011dXVmixOJgDIhkuxAMqE8SD4Ko9laA4xMWB4RrGvRCzgGeMbMVgCnAjN18qMUMGVCJJkyIUUtjGZrDjDWzEabWQUwBZjZ8qS7b3P3ge5+qLsfCrwMXOTutSHMLZKLlAmRZMqEFLW0my13bwKuBR4HFgL3uft8M7vZzC5Kd/si+UaZEEmmTEixKwtjI+4+C5jVZt1NKcZOCGNOkVymTIgkUyakmOkK8iIiIiIZpGZLREREJIPUbImIiIhkkJotERERkQxSsyUiIiKSQWq2RERERDJIzZaIiIhIBqnZEhEREcmgUJotM5toZovNbJmZTWvn+f8wswVm9paZPWlmh4Qxb7Zt2lnPjQ++TUNTLOpSJMcVSyZEukqZkGKWdrNlZqXA7cAFwDjgMjMb12bYG0CNux8H3A98P915s2l3QxMrNu3i239dwO9eXsmj89ZFXZLksGLIhMiBUCak2IVxu57xwDJ3Xw5gZvcCk4EFLQPc/emE8S8Dnwph3qx4ZvFGrvztnKjLkPxS0JkQOQjKhBS1MA4jDgdWJSyvDtalcg3waKonzWyqmdWaWW1dXV0I5R2855duarfRmrdmWwTVSB4p2EyIHKTQMqE8SD7K6gnyZvYpoAb4Qaox7j7d3Wvcvaa6ujp7xbXjU795pd31v/r7u1muRApVvmVCJNM6y4TyIPkojGZrDTAyYXlEsC6JmZ0L3ABc5O71IcybURt37O3w+Q//6Fm+/tA8djc0ZakiySMFmQmRNCgTUtTCaLbmAGPNbLSZVQBTgJmJA8zsBOD/iAdoYwhzZtT2vY2M/86THY5ZsmEnd770Hl+6980sVSV5pOAyIZImZSJP7G5o4oo7XuW9zbuiLqWgpN1suXsTcC3wOLAQuM/d55vZzWZ2UTDsB0BP4E9m9qaZzUyxuZzwsyeXdnns7AUbMliJ5KNCzIRIOpSJ/PH0ojqeXVLHWT94hh8/sSTqcgpGGO9GxN1nAbParLsp4fG5YcyTSS8v30z3ilI272xg1ZY9B/z6mx6ax6EDenD1P4zOQHWSbwohE8Xg5eWbGTOwB4N6V0VdSsFTJvJDie17/OMnlnLSIf04Y6zOjUtXKM1Wvtu0s54p018+6Nf//pX3uOul9wDo36OCi0/o6E02IpILfvLEUn4U/OW+/LuTKEn8LSNSpP78evKpdCu37I6oksKi2/UANd9+Iq3X3/DAvNbHX/6jzuESyXW/ef7d1kYL4NfPL4+wGpFoLdmwg5pvz2bjjr08sTD51JgbHpjHC8s2RVRZ4Sj6ZuupReGfc6VvTJHc9f3HFvGtvy5IWrdkw86IqhGJ3mfurGXTzgam/fntdp//5K/bvwySdF1RN1svvrOJq2fUhr7dTTv1jmWRqK3ftpdX393C5p31rZdo2dvYzM+feWe/sS+9s5nGZt3zVIpTy6HCpxbpTaCZUnTnbD08dy3NMWfzrob9/roVkcLg7px6S/LlW44c3IvFG3a0O37N1j2MveFRnvvPf2TUgO7ZKFEkJ+yq79q1Iu94/l29ASwNBd9sbdnVwLNLNvLQm2s5pH937gxOZM+kN1Zu5eXlW7jn1ZXMvenD9OlenvE5RWSf//rzW/utS9VoJbpt9mJ+POWETJQkklPcHYDLftW1N4fd/NcFrc3WnBVbOGpIL3pV6XdbVxVss1W3o547X1zB/z69LOtzz3hxRevj9dv3qtkSyaK9jc3cV7v6oF774Jtr+dbFx+iXiBS0uau2Mvn2FzjziGreWt31e/1+6JYnuXfqafzzL19qXfeHz57Chw4bSGNzjPLSoj4zqUMF95Vpao7xP48v5uTvPBFJo9XWR3/2/H7rHnhjtc7rEsmQh+euTev1x37jb62P73xxBcvrdPK8FI69jc1Mvv0FAJ5bcmA38l67bS/rtyffyu7K387hoTfXMPaGR5n44+dCq7PQFEyztbexmYfnruXwGx7NiSarRUObk27Xb9vL//vjXE75bse3AxKRA/eHV1byn/fvfwjxYDz05hq+PnM+Z//w2VC2J5ILfvi3xWm9/po75yQtNzTFeGzeegAWrd/BF3//Ojv2NvLYvPWs39bxPYaLSSiHEc1sIvAToBT4tbvf2ub5SuAu4CRgM3Cpu68IY26A+19bzVf+NDeszYXu6hlzeGrRRt69ZVLrO56aYx5xVZJJUWei2DQ0xTjum4+ztzGcdxQeOu2RULbT1v89+w63PLqIF6adzfC+3TIyR65SJqLT0BTjS/e+wS0fO5aNO9I7qrJj7/4n1D8aNFsAj7y9jo079jJnxfut6/p1L+eGC8dx8fHDqG+KsWVXAyP7F9cbUazlJLmD3oBZKbAEOA9YTfyGo5e5+4KEMV8AjnP3z5vZFOASd7+0s23X1NR4bW3qSzOs3LybGx58m78vzd/rWj3zlQms2LyLP722mlVbdnPu0YMpMfifv8UvuHj5KaMA+McjB1G3o56F67Yzblhv1m/bS0VZCXU76vnGRR/A3TFLfQVsd+epRRs5/fCBPPLWOn7x7Dv8+NLjaWyOUd8U4/iRfakqL+2w1pbvlZZ5Opuz0JjZa+5e04VxkWWiGG3b08gHv/m3zgem6dEvncERg3tRepBXmt+4Y+9+N7i/8cKjuer00Qe9zahFnYmu5CHx51Ys5sTcKS2x1mUz2Lyrgd5V5VSUlez3cy5RfVMzFaUlxDx+W5u2Y5qCP6abYt56/lLLiLZ3KGj787OrP09bxiWOj8WckhJrrX3dtr3071HR+jM9U388pKP2xnP569y1jK7uSXmJ0b9nBYN6VbG7oYkBPSpZu20Ph1X3pG5HPdv2xJuzyrJS3J1NOxsoKzF6VpVRXlpCLOa0dDLNMaeibN9Bu8bmGGXB/3d7Wr5m7vH/t4qyktb/x5iDGZSaYcH/d2f/T6kyEUazdRrwDXc/P1i+PvgEbkkY83gw5iUzKwPWA9XeyeSpgvTV++ce9AmwklmDelVSXlrCmq17GNSrkobmGHsammn53uzfvYL3dzeyp7GZnpVlDO1TxdKNHZ8Tc/ignpSVGN0qSnl79Tb696igsryEitISylv/Ga+v3NqlGqvKSxjYs5KSoKiW2spKjCevm5DydQfwiyXrmfjSvW+wYO321h84rT9AWgYkbDXVmJaZPWFw67o2Ve23/XZev2+57Tba2X4nNXX0Oe3o4lvXM+WoIb1wj3/eMY/XGV+GWPA45s7q9w/snqu9q8oY1Luq9Qd9aYlRYkaJxX9xl5hRasayup1s2dXAkYN7tf5CT/VtlPT1bOf/Ib4+uYa/fOH0lDVGnYmOmq22DUZVeUloez7bGtGvG9v2NLa710cKz6JvTUy5cyJVJsI4jDgcWJWwvBo4JdUYd28ys23AAGC/XVJmNhWYCjBq1Kh2Jxw9sCcfGNab+Wu3p128hOu4EX0AY2d9E/VNMQb2rGDg4Ep6VMa/1QzYuqeRdzft4vDqnq2v61VZlvRL84yxA/n70k1UlJVwWHUPYg57GpoZ1KuS4f260buqnMryEpqanaaY09gcY1ifKta2c45AZVkJTnxXeveKUob0qWJEv+70T3iXqENr8xWCrGdiRL9urYeoreVv6eQPSX+N7VvX/pikr0TrGEvxmuTnk9a1rmr/te29PlVNpHjNHS+8u9/z2VJi8a99SyPU8tevQetySbC8+v01nW0uSe9u5RwxuCexGDS74+40x+INXczje2hiMehVVUZTc2y/64O19zWOr9///ynVmO4VHe/tPgChZaIreYi1c5rG0D7deHfTrg6LHNm/G6u2HFhTDHDyof1xdx58M703ZwD0rCxjZ8R/QBSzrnz9OzsK1J6cu/SDu08HpkP8r5b2xvzrhMP41wmHAfDqu1u46revsquhOXtFhmR4327MuOpkIP5Xa6+qctydFZt3M/255dQc2o/zPzCEQwd0p6E5xq76ZrbvaaRbRSk9Kstojjm9KssO+Aa6723eRXPMGZPQ7Eju6kom/vP8o7JaUy659OSRnJ/hd0GdOqY/9049La1t3Hbp8Ul7W6477wiuOP1QeusyEwekK3koKTFW3HrhgWwzlFMicukabe7xYG4oigAAIABJREFUxry0xHLqMOLHThjOzvom/vfyE9nT0ExZqfFO3U7GDe3deni2LOESEpt21jOwZ2WEFYcjjGZrDTAyYXlEsK69MauD3cN9iJ8Ambbxo/sz/+aJ7G1s5j/ue5NZb6/v/EVZdvc14zljbDUQ/4trzNdm8bmzxnD9BUe3O35Q7yrGj+6ftK6yrJTKslL696hIu55DBvRIexvSoUgzUWyOHNKLMQN7sLyTvRYHamifKtYFe0rTbbRafPzEEfz59dUsvHki3cLba5QPcjoThXjuqZlRGnxaC24+n3E3Pc554wbTq7KMv7xxYHtZD9Ts/3cm5/1o3x9AP5lyPBOOHESfbsl/WLScW3XciL4pt1UIjRaE02zNAcaa2WjiYZkCXN5mzEzgCuAl4J/4/+3deZhcZZn///en0+mEkJ3sgRCQIARkbTYRUAgYQA2gIrgQHDGuM/JzGaNRhnEZM/rVYRi3CYsGVBBRIA6bJKKgEqARkIQACXsgSxPWECDb/fujTifVnaruTtc5tXR9XtfVV5869dQ5d3fX3c9d5zznPPDHrsambK/+ffvw4w8dzIvr1vP4c6+ybPVannp+HXc9/jwPr3qFBolj9xrF1feUf6xXW6EF2/+Jy2pSVeREPbnx3KOY9sO/8tDKru8S3x33nXc8i599OfUJeL9/+v58//T9U91mjXBOVNCApsYt/U5EbCm2puw9ivlLtm8+xPPeNZlv/N+DNCg3gBzgkW+dyN8efY6zf3Y3F53VzKTRg9zPdVBysZWcW/8scDO5S3ovjYjFkr4BtETEPOAS4HJJy4DnySVaJoYOaOLACU0cOGFYwec/d9wkjvrurVnt3qzqcqIe9GvswwcPm8B51y0ueVvfnLYPQwc0ccTuOwFw0Vldjv+2LjgnqofU/gP/9p5iPHjXXN/61ZP25lvXL+HUA8fT1NjA2988ygVWJ0q+GjFLWV3m/tr6Tex93k2pb7eQi85q5vjJo8uyL8tWd6+8ypJv/VBcRPDq+k2sfX3jNpNQbw93GN1X6ZxwPpTursef5/T/vaPo821Hsto8MftkWl95g5GD+vHQypeZuNOOPRow3ltleTVizdmhqQ+/POcwHnjmJSaPHcyNi1ZwxV1Pd/3C7fD4d05i0TMv85adh6S6XTMrTBID+zUysF8jD31zKn0axJzbHuN7N3f/jtlnHrpL143MepFDdxvOP84/gTsfe5516zfyuSvv2/LcHV85lrFDduCOx9Yg4Lvv2w+AkYNy46j2GjO4EiHXpLostgCO3GMER+4xAoCj9xyZerElyYWWWYW0fdL+zDv24OXXNvC/tz3W5WsG9W/kO6ftl3VoZlVncP++W87ATDtg/DbP+1R66XrN3Iiluu1L76h0CGaWgX+duhdT9xnDqQdu24nkO+9dk8sUkZnVm7o9stXRhJ0G8MTsk3n2xdd46+w/VjocM0tJnwbx048cDMA1nVzyftzeHltpZtnwka0Oxg3dgZPeMqZHr/339+zDhWceyEPfnJpyVGaWhnmfLTz1zL7jB6dyDzszs0J8ZKuACz5wIOe/ez2H/kfxK5pu/9d3bLmFRGfzJJlZ9Sh280TR+25qaWbVw0e2CmhqbGDU4P5c95niE7DuMnzrPGQutMxqx11fPY5dhu/Qbt12znhlZrZdXGx1Yv9dhjJmcP+izx+wy1BmnVR4yh0zq06jBvfnpx8+mHFD+jP/80cD8J4CV2CZmaWlpNOIkoYDvwYmAk8Ap0fECx3aHAD8BBgMbAK+HRG/LmW/5TTvn4/k8dZXuW1pKz+69VEAvnnKvgBc28mRL6tP9ZATvcE+44bwt68cB+SmGunbx4e2suKcMCv9yNZMYEFETAIWJI87WgecFRH7AFOBCyQVn3Wyyowa1J/Ddt+JL71zLy7/2KH8beaxfPiwCZUOy6pXr8+J3qapsaFXTkRcRZwTVvdKLbamAXOT5bnAKR0bRMQjEbE0WX4WWA2M7NiuFhw1aSTjhu7gf8zWmbrKCbNucE5Y3Su12BodESuS5ZVApzeqkXQo0AQ82kmbGZJaJLW0traWGJ5Z2TknzNpLNSecD1aLuhyzJWk+UOjGU7PyH0RESCo6q7WkscDlwPSI2FysXUTMAeZAbpLRruIzKzfnhFl75cwJ54PVoi6LrYiYUuw5SaskjY2IFUmSrC7SbjBwPTArIhb2OFqzKuCcMGvPOWHWuVJPI84DpifL04HrOjaQ1ARcA1wWEVeXuD+zauecMGvPOWF1r9RiazZwvKSlwJTkMZKaJV2ctDkdOBo4W9J9ydcBJe7XrFo5J8zac05Y3VNE9Z7ybm5ujpaWlkqHYQaApHsiormSMTgnrJpUOiecD1ZtiuWE7yBvZmZmliEXW2ZmZmYZcrFlZmZmliEXW2ZmZmYZcrFlZmZmliEXW2ZmZmYZcrFlZmZmliEXW2ZmZmYZcrFlZmZmlqGSiy1JwyXdImlp8n1YJ20HS1ou6Yel7tesWjknzLZyPpilc2RrJrAgIiYBC5LHxXwTuC2FfZpVM+eE2VbOB6t7aRRb04C5yfJc4JRCjSQdDIwG/pDCPs2qmXPCbCvng9W9NIqt0RGxIlleSS5Z2pHUAHwf+GJXG5M0Q1KLpJbW1tYUwjMrO+eE2VbOB6t7jd1pJGk+MKbAU7PyH0RESIoC7T4N3BARyyV1uq+ImAPMgdyM7t2Jz6zcnBNmWzkfzDrXrWIrIqYUe07SKkljI2KFpLHA6gLNjgCOkvRpYCDQJGltRHR27t6sajknzLZyPph1rlvFVhfmAdOB2cn36zo2iIgPtS1LOhtodhJZL+acMNvK+WB1L40xW7OB4yUtBaYkj5HULOniFLZvVmucE2ZbOR+s7imiek95Nzc3R0tLS6XDMANA0j0R0VzJGJwTVk0qnRPOB6s2xXLCd5A3MzMzy5CLLTMzM7MMudgyMzMzy5CLLTMzM7MMudgyMzMzy5CLLTMzM7MMudgyMzMzy5CLLTMzM7MMlVRsSRou6RZJS5Pvw4q0myDpD5KWSHpQ0sRS9mtWrZwTZu05J8xKP7I1E1gQEZOABcnjQi4DvhcRewOHUngiUrPewDlh1p5zwupeqcXWNGBusjwXOKVjA0mTgcaIuAUgItZGxLoS92tWrZwTZu05J6zulVpsjY6IFcnySmB0gTZ7Ai9K+p2keyV9T1KfYhuUNENSi6SW1tbWEsMzKzvnhFl7qeaE88FqUWNXDSTNB8YUeGpW/oOICEmFZrVuBI4CDgSeAn4NnA1cUmh/ETEHmAO5SUa7is+s3JwTZu2VMyecD1aLuiy2ImJKseckrZI0NiJWSBpL4XPsy4H7IuKx5DXXAodTpGMxq3bOCbP2nBNmnSv1NOI8YHqyPB24rkCbu4GhkkYmj48FHixxv2bVyjlh1p5zwupeqcXWbOB4SUuBKcljJDVLuhggIjYBXwQWSHoAEHBRifs1q1bOCbP2nBNW97o8jdiZiFgDHFdgfQtwTt7jW4D9StmXWS1wTpi155ww8x3kzczMzDLlYsvMzMwsQy62zMzMzDLkYsvMzMwsQy62zMzMzDLkYsvMzMwsQy62zMzMzDLkYsvMzMwsQyUXW5KGS7pF0tLk+7Ai7b4rabGkJZIulKRS921WjZwTZls5H8zSObI1E1gQEZOABcnjdiS9FTiS3N2B9wUOAY5JYd9m1cg5YbaV88HqXhrF1jRgbrI8FzilQJsA+gNNQD+gL7AqhX2bVSPnhNlWzgere2kUW6MjYkWyvBIY3bFBRNwB3AqsSL5ujoglKezbrBo5J8y2cj5Y3evWRNSS5gNjCjw1K/9BRISkKPD6PYC9gZ2TVbdIOioibi/QdgYwA2DChAndCc+s7JwTZls5H8w6161iKyKmFHtO0ipJYyNihaSxwOoCzU4FFkbE2uQ1NwJHANskUkTMAeYANDc3b5OUZtXAOWG2lfPBrHNpnEacB0xPlqcD1xVo8xRwjKRGSX3JDXz0IWLrrZwTZls5H6zupVFszQaOl7QUmJI8RlKzpIuTNlcDjwIPAPcD90fE71PYt1k1ck6YbeV8sLrXrdOInYmINcBxBda3AOcky5uAT5S6L7Na4Jww28r5YOY7yJuZmZllysWWmZmZWYZcbJmZmZllyMWWmZmZWYZcbJmZmZllyMWWmZmZWYZcbJmZmZllyMWWmZmZWYZcbJmZmZllqKRiS9L7JS2WtFlScyftpkp6WNIySTNL2adZNXNOmLXnnDAr/cjWIuA04LZiDST1AX4EnAhMBs6UNLnE/ZpVK+eEWXvOCat7Jc2NGBFLACR11uxQYFlEPJa0vRKYBjxYyr7NqpFzwqw954RZecZsjQeeznu8PFlXkKQZkloktbS2tmYenFkFOCfM2ut2TjgfrBZ1eWRL0nxgTIGnZkXEdWkHFBFzgDkAzc3Nkfb2zUrlnDBrr5w54XywWtRlsRURU0rcxzPALnmPd07WmdUk54RZe84Js86V4zTi3cAkSbtJagLOAOaVYb9m1co5Ydaec8J6tVJv/XCqpOXAEcD1km5O1o+TdANARGwEPgvcDCwBroqIxaWFbVadnBNm7TknzEq/GvEa4JoC658FTsp7fANwQyn7MqsFzgmz9pwTZr6DvJmZmVmmXGyZmZmZZcjFlpmZmVmGXGyZmZmZZcjFlpmZmVmGXGyZmZmZZcjFlpmZmVmGXGyZmZmZZajUO8i/X9JiSZslNRdps4ukWyU9mLT9XCn7NKtmzgmz9pwTZqUf2VoEnAbc1kmbjcAXImIycDjwGUmTS9yvWbVyTpi155ywulfqdD1LACR11mYFsCJZfkXSEmA88GAp+zarRs4Js/acE2ZlHrMlaSJwIHBnOfdrVq2cE2btOSesN+ryyJak+cCYAk/NiojrursjSQOB3wLnRsTLnbSbAcxIHq6V9HCRpiOA57q7/ypTq7HXatyQTuy7gnMiI7Uae63GDTWaE9uRD+C/TyXUatyQYk50pIgocbsg6U/AFyOipcjzfYH/A26OiB+UvMPcNlsiouBgy2pXq7HXatxQ/tidE9unVmOv1bjBOVHtajX2Wo0bso0989OIyp2ovwRYklYCmdUy54RZe84J6+1KvfXDqZKWA0cA10u6OVk/TtINSbMjgY8Ax0q6L/k6qaSozaqUc8KsPeeEWelXI14DXFNg/bPAScnyX4Dil6H03JwMtlkutRp7rcYNZYrdOdFjtRp7rcYNzolqV6ux12rckGHsqYzZMjMzM7PCPF2PmZmZWYZqstiSNFXSw5KWSZpZ6XgAJD0h6YFkrEFLsm64pFskLU2+D0vWS9KFSfz/kHRQ3namJ+2XSpqeUayXSlotaVHeutRilXRw8rtYlrw2ldMDReI+X9IzhcZ5SPpKEsPDkt6Zt77g+0fSbpLuTNb/WlJTGnFnrRrzAZwTzonKcU6kEqtzIs2ciIia+gL6AI8CuwNNwP3A5CqI6wlgRId13wVmJsszgf9Mlk8CbiQ3RuFw4M5k/XDgseT7sGR5WAaxHg0cBCzKIlbgrqStkteemGHc55O7nLxj28nJe6MfsFvynunT2fsHuAo4I1n+KfCpSr+vuvE7qcp8SGJzTjgnnBPtY3NO1GlO1OKRrUOBZRHxWESsB64EplU4pmKmAXOT5bnAKXnrL4uchcBQSWOBdwK3RMTzEfECcAswNe2gIuI24PksYk2eGxwRCyP3brwsb1tZxF3MNODKiHgjIh4HlpF77xR8/ySfqo4Frk5en/87qGa1lA/gnHBOZM85kQLnRLo5UYvF1njg6bzHy5N1lRbAHyTdo9wdjgFGR27OL4CVwOhkudjPUMmfLa1YxyfLHddn6bPJoetL2w5rdxFfofU7AS9GxMYO66tdteYDOCecE5XhnMiOc6KHOVGLxVa1eltEHAScSG7G+qPzn0yq95q49LOWYgV+ArwJOIDcRLbfr2w4lsc5URnOierlnKiMiudELRZbzwC75D3eOVlXURHxTPJ9Nbl7yhwKrEoOl5J8X500L/YzVPJnSyvWZ5LljuszERGrImJTRGwGLiL3e+9J3GvIHfpu7LC+2lVlPoBzAudEpTgnsuOc6GFO1GKxdTcwKbkioAk4A5hXyYAk7ShpUNsycAKwKImr7eqL6UDbhKzzgLOSKzgOB15KDs3eDJwgaVhymPOEZF05pBJr8tzLkg5Pzm+flbet1LUlfuJUcr/3trjPkNRP0m7AJHIDMgu+f5JPabcC70ten/87qGZVlw/gnHBOVJRzIjvOiZ7mRFTBFRrb+0XuyodHyF0tMKsK4tmd3NUK9wOL22Iid353AbAUmA8MT9YL+FES/wNAc962/oncIL1lwEczivcKcodSN5A75/yxNGMFmpM386PAD0lunptR3Jcncf0jSZyxee1nJTE8TN6VLsXeP8nf8a7k5/kN0K/S761azAfnhHOi0l/OCedEteWE7yBvZmZmlqFaPI1oZmZmVjNcbJmZmZllyMWWmZmZWYZcbJmZmZllyMWWmZmZWYZcbJmZmZllyMWWmZmZWYZcbNUgST+X9K1Kx2FmZtXJ/UR1cbFlZmZmliEXW2ZmZmYZcrFVIZJC0h55j7cc8pX0dknLJX1B0mpJKyR9tMh2Bkm6VdKFySSgP5f0I0nXS3pF0p2S3pTX/q2S7pb0UvL9rcn6d0h6IK/dLZLuznt8u6RTkuUnJH1R0j+S7fxaUv/0f0tm3SfpS5J+22HdhZL+W9IQSZckufSMpG9J6pO02UPSn5P38nOSfl2Zn8CsPfcTvYeLreo1BhgCjCc3keaPklnTt5DUNinoXyPiX2LrRJdnAP8ODCM3Wea3k/bDgeuBC8lNKPoD4PpkOwvJzXI+QlJfYD9gXJKkO5CbNPT2vN2fDkwFdkvanp3uj2+23X4BTJU0FEBSI7lcuAz4ObAR2AM4EDgBOCd53TeBP5DLl52B/ylr1GY9536iRrjYql4bgG9ExIaIuAFYC7w57/lxwJ+B30TE1zq89pqIuCsiNgK/BA5I1p8MLI2IyyNiY0RcATwEvDsiXgPuBo4GDiY3M/1fgSOBw5PXrcnbx4UR8WxEPA/8Pm8fZhURESuA24D3J6umAs8By4GTgHMj4tWIWA38F7nOBnK5tiswLiJej4i/lDdysx5zP1EjGisdgBW1JkmCNuuAgXmPTyaXWD8t8NqVRV43DniyQ9snyX0qglxSvp1c5/Rn4AXgGOCN5HFn+xhX/EcxK5u5wKeAi4APA5eTK6T6AisktbVrAJ5Olv+V3NGtuyS9AHw/Ii4tZ9BmPeR+okb4yFblrAMG5D0es52vvwi4CbhB0o7dfM2z5DqefBOAZ5LltiQ6Oln+M7kkOoZtk8isGl0L7CdpX+Bd5D6xP02uIxgREUOTr8ERsQ9ARKyMiI9HxDjgE8CP88fJmFWQ+4lewsVW5dwHfFBSH0lTyb1Rt9dngYeB3yfny7tyA7CnpA9KapT0AWAy8H/J838jdwj6UOCuiFhMLukOI3d6xqyqRcTrwNXAr8i9h59KTi/+Afi+pMGSGiS9SdIxAJLeL2nnZBMvAAFsrkT8Zh24n+glXGxVzueAdwMvAh8i94l8uyQDHWeQO5x7XVdXeiTn0t8FfAFYQ+70ybsi4rnk+VeBvwOLI2J98rI7gCeTcS5mtWAu8BZypxDbnAU0AQ+SK6iuBsYmzx0C3ClpLTAP+FxEPFa+cM2Kcj/RS2jrhQlmZrVP0gRyA3rHRMTLlY7HzMxHtsys15DUAHweuNKFlplVi1SKLUmXJjdVW1TkeSU3U1uW3ODsoDT2a2bWJhkA/DJwPPBvFQ7HOnA/YfUsrSNbPyd3T5tiTgQmJV8zgJ+ktF8zMyA3liQiBkbEPhHxdNevsDL7Oe4nrE6lUmxFxG3A8500mQZcFjkLgaGSxnbS3szMehH3E1bPynVT0/FsvYEg5K6KGA+s6NhQ0gxyn2rYcccdD95rr73KEqBZV+65557nImJkJWMYMWJETJw4sZIhmG2Rck50q59wH2HVrFhOVN0d5CNiDjAHoLm5OVpaWiockVmOpI53VS67iRMn4pywalGJnHAfYdWsWE6U62rEZ4Bd8h7vzNa70ZqZmbmfsF6rXMXWPOCs5GqTw4GXkrs6m5mZgfsJ68VSOY0o6QpycyWNkLSc3GXXfQEi4qfkbv9/ErCM3FxPH01jv2bVStKl5O7CvDoi9i3wvID/JpcX64CzI+Lv5Y3SrHzcT1g9S6XYiogzu3g+gM+ksS+zGvFz4IfAZUWez7/M/TByl7kfVpbIzCrA/YTVM99B3iwDvsy9PCKC9Rs3s/aNjZUOxcysqKq7GtGsTvTodigTJkwoS3C14hd3PsXXr83dkHzkoH7cPWtKhSMyM9uWj2yZVbmImBMRzRHRPHJkRW/zVXV+9/flW5ZbX3mjgpGYmRXnYsusMnyZu5lZnXCxZVYZvszdzKxOeMyWWQZ8mXt5qNIBmJl1g4stswz4MvfyiEoHYGbWDT6NaGZmZpahVIotSVMlPSxpmaSZBZ6fIOlWSfdK+oekk9LYr5nVN59GrB3uJ6yelVxsSeoD/IjcHbEnA2dKmtyh2deAqyLiQOAM4Mel7tfMzGqD+wmrd2kc2ToUWBYRj0XEeuBKcnfHzhfA4GR5CPBsCvs1Myvq4ZWvMHHm9Vx3n++oUQXcT1hdS6PYKnYn7HznAx9Orsq6AfjnYhuTNENSi6SW1tbWFMIzs3rz/KvreecFtwEw7z732VUgtX7CfYTVonINkD8T+HlE7EzucvfLJRXct++WbWalem6t7yZfg7rVT7iPsFqURrHVnTthfwy4CiAi7gD6AyNS2LeZmVU/9xNW19Iotu4GJknaTVITuYGN8zq0eQo4DkDS3uSSyMd/rdfylVdm7bifsLpWcrEVERuBzwI3A0vIXU2yWNI3JL0nafYF4OOS7geuAM5Obupo1uv4yqvKy//vIt8fouLcT1i9S+UO8hFxA7kBjfnrzstbfhA4Mo19mdWALVdeAUhqu/Lqwbw2vvIqA/c8+QIH7zqs3Tp319XB/YTVM99B3ix9qV6ha9333p/8rdIhmJltw8WWWWV0+wpdX+penIqcI/SpQzOrJi62zNKX6pVXvtR9+3nMlplVExdbZunzlVcZuu2RVh5e+UqlwzCrGS+9toFlq9dWOoy65mLLLGW+8ipbZ11615a7w5tZ1973k78x5Qd/rnQYdS2VqxHNrD1feWVm1WKpj2pVnI9smZmZmWXIxZaZ1aQXXl3PPU++UOkwzGrGxJnX89r6TZUOoy652DKzmjT3jieKPvdYq0+bmBXyyusbKh1CXUql2OpqHrikzemSHpS0WNKv0tivmVlHmzYHn/rl37c8nr9ktT/NVwH3E1bPSi62ujMPnKRJwFeAIyNiH+DcUvdrZvVNFL6B1uYCF3VeMP+RrMOxTrifqJw5tz1a6RCMdI5sbZkHLiLWA23zwOX7OPCjiHgBICJWp7BfM7NuWecjW5XmfqICHlr5Mv9xw0Pt1i1e8XKnr7nnyRfYtNl3oUlbGsVWd+aB2xPYU9JfJS2UNLXYxjw1iZmVwncrq0qp9RPuI7pv6gW3b7Puoz+7m/uffnGb9es3buamRSt570/+xk/+tKwc4dWVcg2QbwQmAW8nNyfcRZKGFmroqUnMrDv+9IgPfPQy3eon3EeUbsVLr2+z7l+uuJdP/uIeAB5e5QtM0pZGsdWdeeCWA/MiYkNEPA48Qi6pzHolDwbO3r1PbfvpHOAjl9xZ5kisG9xPVJFP/uIePnLJnTz9/Lot625avLKCEfV+aRRb3ZkH7lpyn1aQNILc4eLHUti3WdXxYODKuvPx5ysdgm3L/USVuX3pc5w/b3Glw6gbJRdb3ZwH7mZgjaQHgVuBL0XEmlL3bValPBjYLI/7Cat3qcyN2I154AL4fPJl1tsVGgx8WIc2ewJI+ivQBzg/Im4qT3j15/KFT3L5wie56dyj2GvM4EqHU5fcT1g98x3kzSqj2xeN+Oqr9NzlU4xmW/zpkVY2bNpc6TDqgosts/SlOhjYV1+ZWRY2bQ4mzbqx0mHUBRdbZunzYGAzM9vCxZZZyjwYuHoVnuDHzPKF7wyculQGyJtZex4MbGZmbXxky8zMzCxDLrbMzMxsC8kn3NPmYsvMzMwsQ6kUW92ZBy5p915JIak5jf2amW0Xf2KvGPcT5bP42ZdYt35jj1+/0ffeSl3JA+Tz5oE7nty9g+6WNC8iHuzQbhDwOcCzxJpZZfgqq4pwP1E+r76xkZMv/AvH7TWq26+596kX2j2+cZEnpU5bGke2ujMPHMA3gf8EXk9hn2Zm2+3r1y3m6efXVTqMeuR+okzWb8wdlVrwUPenWz31x3/LKhxLpFFsFZoHbnx+A0kHAbtExPVdbcxTk5hZlu57+sVKh1CPUusn3EdYLcp8gLykBuAHwBe6095Tk5iZ1Zft6SfcR1gtSqPY6moeuEHAvsCfJD0BHA7M8+BH6808GNisHfcTVtfSKLY6nQcuIl6KiBERMTEiJgILgfdEREsK+zarOnmDgU8EJgNnSppcoJ0HA1u9cD9hda3kYqub88CZ1RMPBjbL437C6l0qcyN2NQ9ch/VvT2OfZlWs0GDgw/Ib5A8GlvSlzjYmaQYwA2DChAkph1p//vmKe3nHXqMY2M9Tw5aT+wmrZ76DvFmZ+aKRnvvjQ6tS2c7CR9eksh0zs+7wRzuz9G3PYGCAMeQGA3uMShHrN25m+qV3ccdjLpLMrPb4yJZZ+jwYOGVf+d0DLrTMrGa52DJLmQcDp++2pb55pZnVLp9GNMuABwObWSV4rvXq5CNbZlbVlq1eS+srb1Q6DLOa4LnWq5OLLTOrOv923SLO/tldAFzV8nQXrbffDYtW8M9X3MvrGzalvm0zs458GtHMqs7cO57cspzFWZHf/T13cei79xvLCfuMyWAPZpXh04j+GDg8AAAgAElEQVTVKZUjW13NAyfp85IelPQPSQsk7ZrGfs2s95N7j17B/UR5pHUacc1an7pPU8nFVjfngbsXaI6I/YCrge+Wul8zqw+utWqf+4na883/e7DSIfQqaRzZ6nIeuIi4NSLWJQ8XkrvJo5lZl1xr9QruJ8okrQ8n1973bDobMiCdYqvQPHDjO2n/MeDGFPZrZnWgwYe2egP3E2XiqxGrU1mvRpT0YaAZ+F4nbWZIapHU0trqGxmWy8LH1rB+4+YtjzdtDla89FoFIzLLefbF7N6HMy6/x+/zKtNVP+E+wmpRGsVWV/PAASBpCjCL3LQkRUfeedLd8luy4mXOmLOQPb92Izc+sAKA7978EEd854+sfvn1CkdXmzwYOD2/u3ebfyepuu+pFzPdvgEp9hPuIzrnA8HVKY1iq9N54AAkHQj8L7kEWp3CPi1FL7y6fsvyp375d973k79x8e2PA/D8uvXFXmZFeDBwbfnatYt4Y6Pvt5Ux9xNl4tOI1ankYqub88B9DxgI/EbSfZLmFdmclUlEsGz1Wr5zwxJuW/pcu+dannyBTZtzGSsPT+4JDwZOydPPr+u6UYnWvLqemxatzHw/9cz9RG362V8f5/lX/YE7Danc1LSreeAiYkoa+7H0fPZX93J9csqwMz4k3SOFBgMf1kn7TgcDS5oBzACYMGFCGvHVjE/98p5Kh2ApcT9RHmn+z/733z/InNse49/evQ8Dmvpw9J4+bdtTnq6nTnWn0AK2HOGybHTnopF6GaPywPKXWLLiZSLvPMiGjeV5/y187Pmy7Mes1qx46XU++Yt7OOvSuyodSk3zdD3WqRP/+3YAznnbbkx/60R2GT6gwhHVhO0dDHxMZxeN1It3//AvAOwyfIct61a/Up4LNK646ymm7D2K4/YeXZb9mVl9cbFl3XLxXx7n4r88zmX/dCgjBvbjyTWv0tingeMnu3MqYMtgYHJF1hnAB/Mb5A0GnurBwO09/fzWWzG8sG5D2fab5S0mzKy+udiqQw+tfLnHr+14KHlgv0b++uVjGTKgb6lh9RoRsVFS22DgPsClbYOBgZaImEf7wcAAT0XEe4pu1MzMapaLrTo09YLbU9vW2jc2ct/yFzlwwlBue6SVd+03LrVt1zIPBq5BvhrEegHf+qE6udiykk3PO9o1eexgdh85sILRWC2qigsx3EuZdepzV97LBR84APmDyXbz1YiWqtuXPscjq15h4szr+b9/PEvLE77Ky7r2g1sernQIBPDru5/yDU7Nirjuvmd95W4P+ciWperf5i3esvzZX90LwBOzT65UOFYjbnlwVaVD4IYHVrDwsed5cs06/nXqXpUOx6xHsj4+e+ZFC/nB6fuz87ABvLZhE0dPGuEjXd3gYqvOXHz7Y2Xf58SZ1zPzxL345DFvKvu+rfpt3LSZR1atrXQYvPzaRgDWrPUds8068/mr7m/3+I9fOIYJwwfw4msbGDGwX8HXRAQrX36dsUN2KPh8b5fKacRuTLrbT9Kvk+fvlDSxlP29tG4D/+/mh9m4aXMpm6lL37p+SUX2O/vGh5g483oeba18p2rV442Nm9hjVtGb55dV29RAkfmxgfpU7n6iXn392kVl3+f8JavYY9aNNH9rPs8kt1B5bu0bPNq6dstNio+c/UeO+M4fue/pF1m2OjfUZOLM67l84ZPc8egaNvTy/rzkI1t5k+4eT25akrslzYuIB/OafQx4ISL2kHQG8J/AB3q6z/N/v5hr7n2G5S+s44IzDiwl/F7v9Q2bOOnC23ms9dVKhwLAcd//M4P6NXLs3qP4+rsmM6CpDwOafIC1Xv367qe7blQmr7yRO7J1VctyTjlwPKMH92eXYQNoavTQ1lJVop/oTV5bv4m+fcTGzUH/vn06bdvd2UHS9B83PLRl+cjZf9zm+dMOHM+zL+VuUHzKj/7a7rlixeGXp+7Fb+55mh9/6CBGD+rPjv0a6dtHNXvKUlHiFTiSjgDOj4h3Jo+/AhAR38lrc3PS5g5JjcBKYGR0sfPm5uZoaWnZZv3Emddvs+7st06kQaJPAzQ0KLcs0dCQfBcseGg16zdu5sAJQ3v885by2yrtV92zF19xV/V0Zj31ppE7csjE4dus7+z32dnRiWKv69MgZr93v6Kvk3RPRDQX32v2iuVErSqUy9XkvQftzPdP37/SYVSt7uZEVv1EZ/nw/T88zH1Pv8jQAU3s2NSHxj65fiG33/Zt8/9fbPtch8dR/Nn857q7j5Uvv87g/n3ZsV8fhFi7fiP9Ghv43d+3mXSinSl7j2LMkP5E5KL41Z1Pddq+tzv5LWNZ/OxLTNhpR159YyN7jh6UqweU+7tLbcskj7cuN4jksVi3YSP/++f2w22m7D2aR1a9Qr/GBk58y1g+f/yeReMolhNpHFLozqS7W9okN3x8CdgJeK5AoJ1Oulss73739+Vsjtwl5Jsj95Vb3rbtqpdfL/GWOj1/cSn7rc16vnSPtr7K2uSoQ0fq5LfS2e+60FMNDfX6G66MWrhj+2//vpwvn/hmzpyzkE+9fQ9OOWAcr67fxP/36/sYskNf/usDB2Sy35YnnmfWNYvo2yiu/fSRNPap+aNrqfUT3Z2Y/Z4nX+Afy1+iX2MDGzcHHdO74xEStXuu49ZU9LmOTfOf7/j/qf1zORs3B6tfeYMRA/vx4rr1jBzUb0tR2Jn5S1az045NReKtP21H9J558TU2bAqeXPMqmyNXM2wO2BxBJN83J+s6PlfM/CVbL+BpWLSy02KrmKo7fxMRc4A5kPvU0vH5jgmy7NsndvqPKP+Xuakbh2B7m6WrXuH4/7qt0mEUdNJbxvD2N49iUL9GJHj1jU0cuccIXnptA3uOHlizh4shNz4F+G9yd5C/OCJmd3i+H3AZcDCwBvhARDxR7jgr6aQL07u5bpYO/fYCAL74m/v54m/aDwyedfLePLlmHQfvOgzIne7Z+7ybuOLjh/Psi6+xy/AB7LfzECTo11j8f89L6zaw/zf+wKETh3NXh9ul7DHrRi46q9lTYyW66iPa/Orjh5ctpqysWfsGjX0aeGTVK4wfugP9+/ZheFJgFbJs9Vqm/ODPZYywPA7edRhHTxrJnqMHcuzeo2iQaGwQL67bwJAd+iK1rw0iYrv7j8grxjZuDt7YsJmB/Ru3HPVq2+bmHt4TMI1iqzuT7ra1WZ4cHh5CroPpkX8+dg/+54/LWPCFY7r8xCeJPoI+iDqrswCYNHrQllsvbNi0mXPmtvDnR1orEsvFZzVzyG7DGbJD51P7jBnSv0wRZaMS41PaPlT0qZGjcxs3bebFMs57mJXmb80vuP7Mixb2aHsdC602H78sd6psrzGD+OIJb+Yde42qmb91ouz9RG+wU3JlX6FhFIXsMWogP/zggVtuu1MuE4YP4KnkAhOADx02gXOO2p0/Pbya5l2Hs+/4wfz3gqVcMH8pkBvD9bt7t/75j9trFJecfciWx90tloYVKTx78kFdbacaEY192ObATNs2e3oGJI1iq8tJd4F5wHTgDuB9wB+7Gq/Vmc8fvycfPXK3Tit821bfPg1bDjuX04ThA5hx9O5MqZ9P5ocCyyLiMQBJVwLTgPxiaxpwfrJ8NfBDSepJXmzeHBzy7fmseXXrLQv2GTd4y1gOaH/6Pbc+8pa3ttnSqsj6jq/N/95V2/z1z619Y3t/TAMeWvkK51xWfMzePuMGd2s73X2XDd6hkStnHNG9xp0rez9Rr7pzCjJtt/3rOwqu323EbluWz52yJ5885k1bipj/9/79WfNq7rRpR7V8VqOYkoutbk66ewlwuaRlwPPkEq3HJLnQ6qHzp+3T7hNFFp6YfTK/uvMpDt1tOHuMqsupe8o6jrGhQZywzxiuuGvrANmxW44Oast4DkHect565Y0t0daxJJLylguvp9D2OtlP2wvyY7V0NDU25P3du6PrDm1gv3ROB1Sin6hX5Si2bjr3KK6991nWrd+4XeOX8o8WNTSoYKHVW6UyZqsbk+6+Drw/jX1ZaQb378vuI3dM9VYQF3zgABoaxL9ccS+XTM9dhPHBw4oPXLXt050xKt857S1857S3lDWuUvz7e/Zhz69Vx/21asnvPv1W3jJ+CH1rcMC8+4nyKMfZ5b3GDGbmid07imo5VTdA3sogxQPzD5x/AoP658ZgvWf/celtuLZ5fEoXmhob+Ma0fTjvusVdN65i133myGQAfK6Ha33lDT5/1X2c967JPLjiZQbv0Jd3vHlUl9t5+vl1HPXdW5l10t58+4Ztbzx8x1eOrds7b9v2yfrI1slvGZvp9nsrF1t1aIemdE4NvHv/cVsKLWvH41O64awjJtZEsXXnV49jyA59efn1DTRIjBjYj78sfY5nX3qN/Xdpf8++kYP6cfnHcmeMJ40e1O197DJ8wJYLWT5+9O4sW/0K37nhIVqefIE7v3pc3V1FbT3XkPFBzx996KBsd9BLudiqQ//7kYN523/e2qPX7r/zEPYZP4TDd9/JR7KK8PiU7rvzq8dx2H8sqHQYnRo9ODcOKr/gedukEZnuc49Rg9pdnWXWXb1xcHlv4GKrDu08bAAH7zqMe558ocu2P/voIXz0Z3dveXzVJ4/o9H5BluPxKd0zenB/hg3oywtVchuIKz5+OB+8eCERcPzk0fz0wwdXOiSz7ZLlacSWr03JbNu9nYutOrXPuMGdFltfeueb+cw79gDg718/nojYcs8XszTd8ZXj2OvrN1U6DACOeNNO3PqFt+duDlk/tyqxXiSrAfIXfOAARrgP6DEXW3XqaydP5rSDdmZzBKf9+G/bPN9WaAG+zYZlqlrGI32gOXdNw8QROzJxxI4VjsasZ7I4stU2ntB6rvauH7ZUNDU2cMAuQ9l/56FMP2LXds9d/clUbmJo1m2fOHr3iu7/bXuM4Msn7lXRGMzSkHat5SFg6XCxVef6NIh/n7Zvu3XN3Zwawiwt0w4YX9H9//QjB/sIrvUKaR/Z+txxk1LdXr0qqdiSNFzSLZKWJt+HFWhzgKQ7JC2W9A9JPZ7/zbL3s4/6Cigrv42bN1ds3w9/ayoD+3lERVbcT5RXJabrsa6VemRrJrAgIiYBC5LHHa0DzoqIfYCpwAWShhZoZxU077NHcs/XpnTrBoxmadt77GBO3HcMN597NDefe3TZ9vvhwyf46trsuZ8oo7QHyH/k8F27bmRdKrXYmgbMTZbnAqd0bBARj0TE0mT5WWA1MLLE/VrK9tt5qK82tIrp26eBn3z4YN48ZhBDB5TvRrlDd/CpwzJwP1FGad5n66AJ7hfSUmqxNToiViTLK4FOr5WWdCjQBDxa4n7NqpJPmZSunKdBdh/pqw7LwP1EGfVJ8dDWrz5+eGrbqnddDlSQNB8YU+CpWfkPIiIkFZ1uRNJY4HJgekQUHaAhaQYwA2DCBE9mbDWn7ZTJbEkzk8df7tCm7ZTJUknjgHsk3RwRL5Y72Go0clB5PkkfNWkEpx5Y2YH5vUU5+wn3EZ1L6wNE867Dqua2LL1Bl8VWRBS9ZaykVZLGRsSKJElWF2k3GLgemBURC7vY3xxgDkBzc3NdzRVnvcI04O3J8lzgT3QotiLikbzlZyW1nTJxsZU4cMJQ7n0q21/HoROHe2qTlJSzn3Af0bnBKc1Xe+6UPVPZjuWUehqxbTJdku/XdWwgqQm4BrgsIq4ucX9m1c6nTFJQjim5XWeVjfuJGpTVnejrVanF1mzgeElLgSnJYyQ1S7o4aXM6cDRwtqT7kq8DStyvWcVImi9pUYGvafntIiKA7pwy+WhXp9YltUhqaW1tTe3nqGZtv7Tffuqtme3DVyGWjfsJq3sl3VwmItYAxxVY3wKckyz/AvhFKfsxqyY+tZ69SA5tNTaIPg1i0+Z0f+ymPg1Mf+vEVLdphbmfqE118Y+mjHwHebN0+ZRJCjYnxVZWp/r+5bg9aGr0vz+zYobsUL5bsNQD/7cxS5dPmaSgbcyWyKba8sB4s87tO35IpUPoVTxHhVmKfMokHVP3GcPiZ19m9JB+W04pmpnVKhdbZlZ1PvOOPTjriIkMKePd5M3MsuLTiGZWdRoalGmhdfjuwzPbtplZRy62zKyuzDh6dw7e1cWWmZWPiy0zq2pvm5TufMSNvlujmZWZiy0zq2o//fBBfPhwz4Fn1l2PfOvESodgHXiAvJlVtQFNjZz/7n3YZ9wQjtlzJG+d/ceStue7Plhv53vIVZ+S/yKShku6RdLS5PuwTtoOlrRc0g9L3a+Z1Y/GPg2ceeiEVE4BZnXvLivMfYRZOqcRZwILImISsCB5XMw3gdtS2KeZ1SPXSbXIfYTVvTSKrWnA3GR5LnBKoUaSDgZGA39IYZ9mVcuf5LPjo1I1yX2E1b00iq3REbEiWV5JLlnakdQAfB/4YlcbkzRDUoukltbW1hTCMys7f5LPiMdb1ST3EVb3ujVAXtJ8YEyBp2blP4iIkFRobo1PAzdExPKu5iSLiDnAHIDm5mbP02G1aBrw9mR5LvAn4MsdG+V9kr8JaC5TbHXPBVv63EeYda5bxVZETCn2nKRVksZGxApJY4HVBZodARwl6dPAQKBJ0tqI6OwTv1mt2p5P8h8mN2G1dUMaddIHDtklha1YPvcR1ee3nzqC9/7kjm61fdPIHXm09dWMI6pvaZxGnAdMT5anA9d1bBARH4qICRExkdxh4sucRFbLJM2XtKjA17T8dpGbRbnTT/Ld2JdPm6TktIPGs/OwAZUOo964j6iA7Zkl4aZzj+aoSSO2PJ40amAWIdW1NO6zNRu4StLHgCeB0wEkNQOfjIhzUtiHWVUp5yd5nzbZqqtTTFaV3EdUub59GvjZ2YewYVPQ1Njgy1AyUHKxFRFrgOMKrG8BtkmiiPg58PNS92tWxdo+yc+mk0/ybcuSzgaa/Um+a+4Eao/7iNrQ2KeBxj6VjqL38m1mzdI3Gzhe0lJy47FmQ+6TvKSLKxpZjfOBLTOrRZ6uxyxl/iSfHd9ny8xqkYstM+v1Jo0ayMG7DuNL73xzpUMxszrk04hmVjP6NvbsyFa/vg3Mfu9+7DSwX8oRmVWvn330kEqHYAkXW2ZWMwY0+WC8WXe9482jKh2CJVxsmZmZmWXIxZaZmZlZhlxsmVmv853T3tLu8QG7DK1QJGZmJRZbkoZLukXS0uT7sCLtJkj6g6Qlkh6UNLGU/ZpZ/Tpur67HoZx56IR2j8971z5ZhWNdcD9hVvqRrZnAgoiYBCxIHhdyGfC9iNgbOJTC05eYmXXpkrMP4YnZJxd9/uNH7dbu8fAdm2hq9EH8CnI/YXWv1P9A04C5yfJc4JSODSRNBhoj4haAiFgbEetK3K9ZVfKn+MqbdfLkSodg7bmfqGLjhvSvdAh1odRia3RErEiWVwKjC7TZE3hR0u8k3Svpe5KKzsAkaYakFkktra2tJYZnVnb+FF9lfM/5iku1n3AfsX1uPvfoTp+/9jNHlimS+tZlsSVpvqRFBb6m5beLiACiwCYagaOALwKHALsDZxfbX0TMiYjmiGgeOXLk9vwsZtXAn+KrTKF/SpaucvYT7iO2z5vHDGKHvoWPb+w2YkdGDfaRrXLo8g6BETGl2HOSVkkaGxErJI2l8Kfz5cB9EfFY8pprgcOBS3oYs1k1265P8cBuwHxgZkRsKrRBSTOAGQATJkwo1KQuPfKtE9nzazdWOgzD/USt8lHf8in1NOI8YHqyPB24rkCbu4Ghkto+ghwLPFjifs0qxkd7q4MHvdcM9xNW90r9bzUbOF7SUmBK8hhJzZIuBkg+rX8RWCDpAXLF9EUl7tesYiJiSkTsW+DrOmBV8umd7nyKj4iNwLXAQeX7CczKyv2E1b2SJhqLiDXAcQXWtwDn5D2+BdivlH2Z1Yi2T/Gz6can+IhoJfcpvqV8IfYeIwf1o/WVNyodhnXC/UT18tHh8vFv2ixd/hRfRjOO2r3SIZhVvcE7FD6uctFZzWWOpH6VdGTLzNrzp3gzqzZXfeIIjvnen9qt+9rJe7PL8AGVCagOudgyMzPrxXbdaUfm/tOhTL/0Lu4/7wRWvfI6k0YNrHRYdcXFlpn1Gvefd0KlQzCrSsfsOXLLNFdDBvStcDT1x2O2zKzX6N/kf2lmVn38n8nMalZ0uI1Z5D386kl7lTkaM7PCXGyZWa/03oN2rnQIZmaAiy0z66V2aMrNB3f83oVmTDIzKx8PkDezXkN5k70NaGrkrq8ex7AdmyoXkJkZKRzZkjRc0i2SlibfhxVp911JiyUtkXShJM+BaWapig4zUY4a3J++fXwAv5LcR5ilcxpxJrAgIiYBC5LH7Uh6K3AkuZs47ktu8t1jUti3WdVx51I+HYsrq0ruI6zupVFsTQPmJstzgVMKtAmgP9AE9AP6AqtS2LdZNXLnUiEuV6uS+wire2kUW6MjYkWyvBLYZjRqRNwB3AqsSL5ujoglhTYmaYakFkktra2tKYRnVnbuXMy2ch9hda9bA+QlzQfGFHhqVv6DiAhJ2xzYl7QHsDfQdi32LZKOiojbO7aNiDnAHIDm5mafJLBa1K3ORVJb5yLgh511LsAMgAkTJmQTcS8hfGirEtxHmHWuW8VWREwp9pykVZLGRsQKSWOB1QWanQosjIi1yWtuBI4Atkkks1rgzqW6nHrgeE7cdwxNjR4MXwnuI8w6l8Z/pnnA9GR5OnBdgTZPAcdIapTUl9zYlIKf4s1qQURMiYh9C3xdB6xKOhW607kkHUxb52LbYeq+uXr3E8fszgn7FKp9rQq4j7C6l0axNRs4XtJSYEryGEnNki5O2lwNPAo8ANwP3B8Rv09h32bVyJ1Lmey60448Mftk9hozuNKhWHHuI6zulXxT04hYAxxXYH0LcE6yvAn4RKn7MqsRs4GrJH0MeBI4HXKdC/DJiDiHXOdyLLnOJYCb3LlYb+Q+wsx3kDdLnTsXMzPL59GkZmZmZhlysWVmZmaWIRdbZmZmZhlysWVmZmaWIRdbZmZmZhlysWVmZmaWIRdbZmZmZhkqqdiS9H5JiyVtTm7YWKzdVEkPS1omaWYp+zQzs9rhfsKs9CNbi4DTgNuKNZDUB/gRcCIwGThT0uQS92tWldyxmG3D/YTVvZKKrYhYEhEPd9HsUGBZRDwWEeuBK4FppezXrIq5YzHL437CrDxjtsYDT+c9Xp6sK0jSDEktklpaW1szD84sTe5YzHqk2/2E+wirRV0WW5LmS1pU4CuTziEi5kREc0Q0jxw5MotdmFWaP4BYr1LOfsJ9hNWiLieijogpJe7jGWCXvMc7J+vMapKk+cCYAk/Niojr0t5fRMwB5gA0NzdH2ts3K5X7CbPOdVlspeBuYJKk3cglzxnAB8uwX7NMuGMxS537CevVSr31w6mSlgNHANdLujlZP07SDQARsRH4LHAzsAS4KiIWlxa2WU3b0rFIaiLXscyrcExmmXA/YVbika2IuAa4psD6Z4GT8h7fANxQyr7MaoGkU4H/AUaS61jui4h3ShoHXBwRJ0XERkltHUsf4FJ3LNZbuZ8wK89pRLO64Y7FzMw68nQ9ZmZmZhlysWVmZmaWIRdbZmZmZhlysWVmZmaWIRdbZmZmZhlysWVmZmaWIRdbZmZmZhlysWVmZmaWoVKn63m/pMWSNktqLtJmF0m3Snowafu5UvZpVs2cE2btOSfMSj+ytQg4DbitkzYbgS9ExGTgcOAzkiaXuF+zauWcMGvPOWF1r9S5EZcASOqszQpgRbL8iqQlwHjgwVL2bVaNnBNm7TknzMo8N6KkicCBwJ2dtJkBzEgerpX0cJGmI4Dn0oyvzBx/5fQ09l3TDsQ5sUWtxl6rcUM6sZc9J7YjH8B/n0qo1bghw5zostiSNB8YU+CpWRFxXXf3Lmkg8Fvg3Ih4uVi7iJgDzOnG9loiouD5/1rg+Cun1NidE+mr1dhrNW5IN/Zy5kR38yHZnv8+ZVarcUO2sXdZbEXElFJ3IqkvuQT6ZUT8rtTtmVWSc8KsPeeEWecyv/WDcifqLwGWRMQPst6fWbVzTpi155yw3q7UWz+cKmk5cARwvaSbk/XjJN2QNDsS+AhwrKT7kq+TSoo6p1uHkauY46+czGJ3TvRYrcZeq3FDmWJ3TvRYrcZeq3FDln1DRGS1bTMzM7O65zvIm5mZmWXIxZaZmZlZhmqy2JI0VdLDkpZJmlnpeNpIekLSA8l4g5Zk3XBJt0hamnwflqyXpAuTn+Efkg7K2870pP1SSdMzjPdSSaslLcpbl1q8kg5Ofh/LktcWv6thevGfL+mZQuM+JH0lieVhSe/MW1/w/SRpN0l3Jut/LakpzfjT4nxIJdaazAXnQGHOiVRidU6kmRMRUVNfQB/gUWB3oAm4H5hc6biS2J4ARnRY911gZrI8E/jPZPkk4EZA5KanuDNZPxx4LPk+LFkellG8RwMHAYuyiBe4K2mr5LUnliH+84EvFmg7OXmv9AN2S95DfTp7PwFXAWckyz8FPlXp91iBn8v5UMe54BxwTjgnaiMnavHI1qHAsoh4LCLWA1cC0yocU2emAXOT5bnAKXnrL4uchcBQSWOBdwK3RMTzEfECcAswNYvAIuI24Pks4k2eGxwRCyP3rrwsb1tZxl/MNODKiHgjIh4HlpF7LxV8PyWfso4Frk5en/+7qCbOhxTUai44BwpyTqTAOZFuTtRisTUeeDrv8fJkXTUI4A+S7lFuSgmA0ZGb9wtgJTA6WS72c1T650sr3vHJcsf15fDZ5FD2pW2Hudn++HcCXoyIjR3WV5tKv186U+v5UMu5UE850JFzIjvOiR7mRC0WW9XsbRFxEHAiuVnrj85/Mqnga+ZeG7UWb+InwJuAA8hNbPv9yoZT13pNPtRSrDgHqplzojIqnhO1WGw9A+yS93jnZF3FRcQzyffVwDXkDkWuSg6ZknxfnTQv9nNU+udLK95nkuWO6zMVEasiYlNEbAek8gwAAAGCSURBVAYuIvc3oIs4C61fQ+5QeGOH9dWm0u+XonpBPtRkLtRhDnTknMiOc6KHOVGLxdbdwKTkioAm4AxgXoVjQtKOkga1LQMnAIvIxdZ2BcZ0oG1S1nnAWclVHIcDLyWHZ28GTpA0LDnUeUKyrlxSiTd57mVJhyfnuc/K21Zm2v4RJE4l9zdoi/8MSf0k7QZMIjdAs+D7KfnUdivwvuT1+b+LauJ8yE5N5kId5kBHzonsOCd6mhNRBVdobO8XuSsfHiF3tcCsSseTxLQ7uSsW7gcWt8VF7hzvAmApMB8YnqwX8KPkZ3gAaM7b1j+RG6i3DPhohjFfQe6Q6gZy554/lma8QHPypn4U+CHJjAUZx395Et8/kkQam9d+VhLLw+Rd+VLs/ZT8Te9Kfq7fAP0q/T5zPmSTD7WaC84B54RzojZywtP1mJmZmWWoFk8jmpmZmdUMF1tmZmZmGXKxZWZmZpYhF1tmZmZmGXKxZWZmZpYhF1tmZmZmGXKxZWZmZpah/x9RfaxCkytWfwAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
    " + ] + }, + "metadata": { + "tags": [], + "needs_background": "light" + } + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "EWXPphxm0B4m" + }, + "source": [ + "## Spectrogram\n", + "\n", + "You'll convert the waveform into a spectrogram, which shows frequency changes over time and can be represented as a 2D image. This can be done by applying the short-time Fourier transform (STFT) to convert the audio into the time-frequency domain.\n", + "\n", + "A Fourier transform ([`tf.signal.fft`](https://www.tensorflow.org/api_docs/python/tf/signal/fft)) converts a signal to its component frequencies, but loses all time information. The STFT ([`tf.signal.stft`](https://www.tensorflow.org/api_docs/python/tf/signal/stft)) splits the signal into windows of time and runs a Fourier transform on each window, preserving some time information, and returning a 2D tensor that you can run standard convolutions on.\n", + "\n", + "STFT produces an array of complex numbers representing magnitude and phase. However, you'll only need the magnitude for this tutorial, which can be derived by applying `tf.abs` on the output of `tf.signal.stft`. \n", + "\n", + "Choose `frame_length` and `frame_step` parameters such that the generated spectrogram \"image\" is almost square. For more information on STFT parameters choice, you can refer to [this video](https://www.coursera.org/lecture/audio-signal-processing/stft-2-tjEQe) on audio signal processing. \n", + "\n", + "You also want the waveforms to have the same length, so that when you convert it to a spectrogram image, the results will have similar dimensions. This can be done by simply zero padding the audio clips that are shorter than one second.\n" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "_4CK75DHz_OR" + }, + "source": [ + "def get_spectrogram(waveform):\n", + " # Padding for files with less than 16000 samples\n", + " zero_padding = tf.zeros([16000] - tf.shape(waveform), dtype=tf.float32)\n", + "\n", + " # Concatenate audio with padding so that all audio clips will be of the \n", + " # same length\n", + " waveform = tf.cast(waveform, tf.float32)\n", + " equal_length = tf.concat([waveform, zero_padding], 0)\n", + " spectrogram = tf.signal.stft(\n", + " equal_length, frame_length=480, frame_step=320, fft_length=512)\n", + " \n", + " spectrogram = tf.abs(spectrogram)\n", + "\n", + " return spectrogram" + ], + "execution_count": 124, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "5rdPiPYJphs2" + }, + "source": [ + "Next, you will explore the data. Compare the waveform, the spectrogram and the actual audio of one example from the dataset." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "4Mu6Y7Yz3C-V", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 145 + }, + "outputId": "cdb225d1-54f2-4554-e518-a9f32865f728" + }, + "source": [ + "for waveform, label in waveform_ds.take(1):\n", + " label = label.numpy().decode('utf-8')\n", + " spectrogram = get_spectrogram(waveform)\n", + "\n", + "print('Label:', label)\n", + "print('Waveform shape:', waveform.shape)\n", + "print('Spectrogram shape:', spectrogram.shape)\n", + "print('Audio playback')\n", + "display.display(display.Audio(waveform, rate=16000))" + ], + "execution_count": 125, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Label: no\n", + "Waveform shape: (16000,)\n", + "Spectrogram shape: (49, 257)\n", + "Audio playback\n" + ], + "name": "stdout" + }, + { + "output_type": "display_data", + "data": { + "text/html": [ + "\n", + " \n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "tags": [] + } + } + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "e62jzb36-Jog", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 499 + }, + "outputId": "bc3a0ea2-38d7-496a-d1d5-5048f99eeed6" + }, + "source": [ + "def plot_spectrogram(spectrogram, ax):\n", + " # Convert to frequencies to log scale and transpose so that the time is\n", + " # represented in the x-axis (columns).\n", + " log_spec = np.log(spectrogram.T)\n", + " height = log_spec.shape[0]\n", + " width = log_spec.shape[1]\n", + " X = np.linspace(0, np.size(spectrogram), num=width, dtype=int)\n", + " Y = range(height)\n", + " ax.pcolormesh(X, Y, log_spec)\n", + "\n", + "\n", + "fig, axes = plt.subplots(2, figsize=(12, 8))\n", + "timescale = np.arange(waveform.shape[0])\n", + "axes[0].plot(timescale, waveform.numpy())\n", + "axes[0].set_title('Waveform')\n", + "axes[0].set_xlim([0, 16000])\n", + "plot_spectrogram(spectrogram.numpy(), axes[1])\n", + "axes[1].set_title('Spectrogram')\n", + "plt.show()" + ], + "execution_count": 126, + "outputs": [ + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAucAAAHiCAYAAABLImLmAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nOydeZxcVZn+n7eqestOFrYESCABQZCoEVAWUVBAweiMMiCjuAzoKC7jb8YJKqAsoo4LyrigiCDDqqIsCXuAEAghYctKyL4vna3T6bWq7vn9cbdzb51TdaurOl3pfr6fT5JbZ6/q6tRz33rOe0QpBUIIIYQQQkjfk+rrBRBCCCGEEEJcKM4JIYQQQgipESjOCSGEEEIIqREozgkhhBBCCKkRKM4JIYQQQgipESjOCSGEEEIIqREozgkhhASIyEEiMktEWkXkZ329HkIIGWhQnBNCSI0gIleKyKOxsuWWsot6aRmXA9gOYJhS6v/10hyEEEIsUJwTQkjtMAvA+0QkDQAicgiAOgDvjJVN9Nr2BkcAWKJ6cEKdiGR6YT2EEDKgoDgnhJDaYR5cMT7Ze3w6gGcALIuVrQRwjogs9ewnq0TkS/4gXvn52uOMiDSLyLu8x6eIyIsisltE3hCRM73y2wFcCuDbIrJXRM4WkQYRuUlENnl/bhKRBq/9mSKyQUT+W0S2APiTiHxfRP4iIv/nrW2hiBztfSuwTUTWi8iHe+8lJISQ/RuKc0IIqRGUUt0A5gI4wys6A8DzAGbHymYB2AbgfADDAHwewC988Q3gHgAXa0OfA2C7UupVERkLYDqA6wGMBPCfAP4mImOUUp8DcBeAnyilhiilngLwXQCnwL05OBHASQC+p419sDfOEXAtMQBwAYA7ARwA4DUAj8P9vBkL4FoAt/TsFSKEkP4PxTkhhNQWzyEU4qfDFefPx8qeU0pNV0qtVC7PAXjCqwOAuwF8TEQGeY8/DVewA8C/ApihlJqhlHKUUk8CmA/gI5b1XALgWqXUNqVUM4AfAPiMVu8AuEYp1aWU6vDKnldKPa6UygH4C4AxAH6klMoCuBfAeBEZUfYrQwghAwCKc0IIqS1mAThNREYCGKOUWg7gRbhe9JEAjgcwS0TOE5GXRGSniOyGK65HA4BSagWApQAu8AT6x+AKdsCNcH/Ks7Ts9vqeBuAQy3oOBbBWe7zWK/NpVkp1xvps1a474Ebt89pjABhS+qUghJCBBzfvEEJIbTEHwHAAlwF4AQCUUntEZJNXtsn7sxjAZwE8qJTKisg/AIg2jm9tScHd4LnCK18P4E6l1GUJ17MJrqBf7D0+3CvzKXvjKCGEEDuMnBNCSA3hWUPmA/gWXDuLz2yvbBaAegANAJoB5ETkPADxTZb3emX/jjBqDgD/Bzeifo6IpEWk0dvYOc6ypHsAfE9ExojIaABXe2MQQgjpBSjOCSGk9ngOwIFwBbnP817ZLKVUK4CvA7gfwC64nvKH9AGUUpvhRuHfB+A+rXw9gKkAvgNX3K8H8F+wfx5cD/dmYQGAhQBe9coIIYT0AtKDVLaEEEIIIYSQXoCRc0IIIYQQQmoEinNCCCGEEEJqBIpzQgghhBBCagSKc0IIIYQQQmoEinNCCCGEEEJqhH51CNHo0aPV+PHj+3oZhBBCCCGkn/PKK69sV0qNqfa4/Uqcjx8/HvPnz+/rZRBCCCGEkH6OiKztjXFpayGEEEIIIaRGoDgnhBBCCCGkRqA4J4QQQgghpEagOCeEEEIIIaRGoDgnhBBCCCGkRqA4J4QQQgghpEagOCeE4M45azDtbwv6ehmEEELIgIfinBCCqx5cjHvnre/rZRBCCCEDHopzQgghhBBCagSKc0IIIYQQQmoEinNCCCGEEEJqBIpzQgghhBBCagSKc0IIIYQQQmqEqohzETlXRJaJyAoRmWaoP0NEXhWRnIh8MlZ3qYgs9/5cqpW/W0QWemP+SkSkGmslhPQdXbk8unNOXy+DEEIIqVkqFucikgbwawDnATgOwMUiclys2ToAnwNwd6zvSADXADgZwEkArhGRA7zq3wK4DMAk78+5la6VENK3HHvVYzj5h0/19TIIIYSQmqUakfOTAKxQSq1SSnUDuBfAVL2BUmqNUmoBgHjI7BwATyqldiqldgF4EsC5InIIgGFKqZeUUgrAnwF8vAprJQOQr93zGsZPm97XyyAAHAXsas/29TIIIYSQmqUa4nwsAP30kg1eWSV9x3rXJccUkctFZL6IzG9ubk68aDJwePiNTRWP8cTiLXhx5fYqrGb/QimF7Xu7+noZhBBCyIBhv98QqpT6vVJqilJqypgxY/p6OaSfcvmdr+DTf5jb18vYJ/z22ZXBNw1/nL0aU65/Cqu3t/XxqgghhJCBQTXE+UYAh2mPx3lllfTd6F33ZExCSAX8+LE3g+vn3nK/jVq7g+KcEEII2RdUQ5zPAzBJRCaISD2AiwA8lLDv4wA+LCIHeBtBPwzgcaXUZgB7ROQUL0vLZwE8WIW1EkIS4m73IIQQQsi+pGJxrpTKAbgCrtBeCuB+pdRiEblWRD4GACLyHhHZAOBTAG4RkcVe350AroMr8OcBuNYrA4CvALgVwAoAKwE8WulaCSGEEEIIqWUy1RhEKTUDwIxY2dXa9TxEbSp6u9sA3GYonw/g+GqsjxCfB1/fiFGDG3DapNEl2y7a2ILm1i584G0H7oOV1R5KAT05XsBxFLrzDhrr0r2wKkIIIaR/s99vCCWkHL5x7+v41z8m29h5/s2z8fnb5yVq6zgKm1s6KllazbOqeS/GT5uOp5ZsLdruhhlL8barHuNhQ4QQQkgPoDgnpArc9NRbeO+NM7Fxd/8R6Cp2/fr63QCARxYUpqac+usXMPnaJwAA9768DoB7GighhBBCyoPinJAq8NxyNwf6tj2dZffd1tqJ9u5ctZdUkhdXbMeKba1F2+imlmIOlzfW78Zu73ChnlhhCCGEEOJCcU5IH3PSDU/jn37z4j6f99O3zsXZP59lrY9na0mavMWX5grAns4sxk+bjjvnrClo9/AbmzDl+qeQyxfaX/KOQt5hthhCCCEDD4pzMmDYF9HpnsrJN7cUj2D3BT2Wxp46VwrY0uJ+k/DnOWsLmv3g4cXYvrcLO9u6C+pO/dFMTP7BEz1dASGEELLfUpVsLYTsD2RzvReJ7fdGDlVoa9nd3o1B9RnUZyz3+CVebt/+Ymq2pQf2IEIIIaQ/wMg5GTj0ooK26dC2rhx2t7uR4Wfe3IYr7n410XjdOQcn//ApPLZoc8Vr293ebbSOlMJmY/GLJ1/7JL505/yCev1lTmKF6fc3NoQQQkgZUJyTAUNf7FM886fPYvK1TwIAPn/7PDyyIJnY3tnWja17uvDl/0sm5m3kHYXJ1z6J//7bworGAQAxyOhnljVb2yvtlqXUa9/SkcXE78zArLfs4xFCCCEDAYpzQqqATXs2t3b1aLxUlW4kco4bMX/ojY1l91VQPbqhCewqZbiIlm7eg5yj8L8zV5Q/ISGEENKPoDgnA5bd7d1BRpIV21oLspP0KVUS5360u9KnpkfBS43lC/pSU+rj0NpCCCGEuFCckwGDLgBXNe/F5GufxJ0vrcWst5px9s9n4a+vbKh4jmrpe5OFpEfjJBTKJpRKnufcOkaSmanMCSGEkACKc9IvcRyF1dvbrPVrdrh1M9/chuXb9gIAFm/ag1zewbod7WXPV20/e7VsLT598a1ARNgXU+BKv6yhby8IIYSQPoDinPRLfv/8Knzgp89i8aaWoEw/uVK3e/jCVQS4YcZSnPE/z2Brman8qq19q3XKpr8uf3n//dcFeHLJ1srGrHI7gKeKEkIIIT4U56Rf8uraXQCA9Ts7grKI/DNoQYHgxRU7AMB4MM6+RF9ed87BJ3/7Iuat2dnj8XyRft/89bjsz4XpD3u6tmL1SqlkNy0lBmzryuHOOWtqa08AIYQQ0ktQnJN+SeC1LiHo9FqRZPaUC383B7c+v8o4X7XQx1u3sw3z1+7CtL8tKNpnw672gnzmldhE9Jcuct3jEcub0+e6R5bgqgcXY9by7b04MyGEEFIbUJyTfklgW7HWh5gEYTFN//Kanbh++tIer60YX77zFdw5Z03Z/Zpbu3Daj5/BDTN6Z109IbGIN5w+quN/i9HRna94TYQQQkitQ3FO+iUp751dKuKrR9Yjgr2PNiY+tngLrnpwsbGu2IpaOlwB+1zsEJ9KnCC21yCpvSSSKtEovstdHG0thBBC+j9VEecicq6ILBORFSIyzVDfICL3efVzRWS8V36JiLyu/XFEZLJX96w3pl93YDXWSgYG/gZDR1OIESGuqUVfhLq2luT+lLyj8LV7XsOijS2lG8d4c8ueotlkyk2l2JODf0qhVGwTbcLXJlgLknnOFTSfunG86OMbH12KmW9WtqmVEEIIqVUqFucikgbwawDnATgOwMUiclys2RcB7FJKTQTwCwA/BgCl1F1KqclKqckAPgNgtVLqda3fJX69UmpbpWslA4eUSZxr9SZbS0SwJxCVG3a14+E3NuErd70aKd+4uwMX/m4OWtqz1r7n3vQ8PvDTZ6315UbuU4E4773ocm+OnXSPAADc8twq/OdfivvvCSGEkP2VakTOTwKwQim1SinVDeBeAFNjbaYCuMO7/iuAs6QwDHex15eQigkzhoRlNt1nE+095TfPrMDLa3bioTc2mucrW+RGV9WdcwrG8POi56sooPWRejRswj6lxjbV93U2HUIIIaS3qIY4HwtgvfZ4g1dmbKOUygFoATAq1uZfANwTK/uTZ2m5yiDmCbESnoxps7X4ZVrkHMCSzXsqnFlVdCpnMEqkc/hgT2cWR3/vUdw8c0WkffBNgeO2GT9tesX5zOOU+ytY6vkbN+KWNQMhhBDS/6iJDaEicjKAdqXUIq34EqXUCQBO9/58xtL3chGZLyLzm5ubTU3IACRl8GBHNigi9EVrhT1CQUUi9Sa/eN6pjuzc5UWM//LK+kh5KhXaeFY3u172m2cur2xDqKVzqSFN31oUnSfiOjeMV+Tn8n8vrcUZP3km2USEEELIfkA1xPlGAIdpj8d5ZcY2IpIBMBzADq3+IsSi5kqpjd6/rQDuhmufKUAp9Xul1BSl1JQxY8ZU8DRIf8IXdLomtmnFYEMoyvOc6+1fXbcbAHDHnLXGMaIbU0uPHZ+pFKng+Spk0u6D7pxTpEf51OJXV9/7xyKs29ne18sghBBCqkY1xPk8AJNEZIKI1MMV2g/F2jwE4FLv+pMAZiovLCciKQAXQvObi0hGREZ713UAzgewCIQkxBfOuih2DKo4YmvR1GdPUyku2LC7KgcSlZpdP/kU0J9v7CajkkOIetwznDvJ/En2BZSqI4QQQvoLmUoHUErlROQKAI8DSAO4TSm1WESuBTBfKfUQgD8CuFNEVgDYCVfA+5wBYL1SSj9ysQHA454wTwN4CsAfKl0rGTikAm9FWGbKu90becDD+SqJlvcMQe/kaI/ccFRpo6c+XLEbmnLTShJCCCH7MxWLcwBQSs0AMCNWdrV23QngU5a+zwI4JVbWBuDd1VgbGZiIZvPw0UWrSe5VSwKWGqcS6Ry13ijcNXcdpk4+tMiBQT2fq1hEu1jGmWpsiI3MxW2ihBBCBhA1sSGUkGoTbAjVyszZQZRRaJbrOQ/L9EN4ek6SdIuvrN2F7/1jEb7z90U1aflIfpKoORd9HOZrIoQQMhCgOCf9EvMJoXoD95/1OzssnvOe88TiLQCAXN4Wza6Oku7Ouxs+t+3prMp4BcSWGXl9ynwKpjSM5eaXr8UbEEIIIaTaUJyTfok5W4tua3EbbNzdEZT21Nsc9bILNrW4YrmlIzwhtCrWjNgQ9Wn31zfnmEfXn1vPplPWVyTJuIk95yXaJfm59ObppYQQQsi+hOKc9EvCDaHJN2VGI8OlxV4pm0Ul6RMjUWXLPBlfnOfNKRNbO3PlTZqQ3tlwWoUduoQQQkg/gOKc9Ev01II+NsH799fctPy5Mg8KMulIfdx8Dw/xsc4Xe5zx7kCyebNvHqgsohzvGt+MaqfwAKhSGJLrFK6nWJ2hcm9XDttae8nyQwghhPQSFOek36CUwpUPLMTr63dHDuXR602s3u6eqNnWFUaabULQdLCPLshT2gOnSqeChl8CqMhcdV7kPJt39knAuaKc6cZNt8nGS7IR1DTSR375PE664elEcxBCCCG1AsU56Te0duVwz8vr8Jlb52o+8pCIN7yHc+zpLPSRlztuEk0a97GbSHt3IMUi/pV5zntWF7ZJmq3FfF2srLCNwu72boyfNh1/emE1APDkUEIIIfslFOek3+BnR0mnQzGrC9skgtfUtqdtbHOXSynLR1bznG9r7er5RCXXUd4tTTmvd6VpEhXcDbAAcN+89eX1VQp3zV0b+eaEEEII6Ssozkm/Iee4IjWTMr+tS0Vys5HUhwmOnTeU9UYubn/MtTvikeDCyH0xyvWf233sxec0HUJUbO5qbDDV12S6EdjS0onx06Zj+oLNBXUvrtyB7/59EX7w8OKK10EIIYRUCsU56Tf44jqTMitkJxLJLay/5+V1xn6RQ3KUubwUCze2BNe+x70Y5QpW21Kq6UWP5oFP6BcvN9puGDdJFp1Sp78u3bIHAHD//MKoent3HgCws627jJUSQgghvQPFOek3+Js10xZxrgu7UpLR5oPWRaAq0Vbnra2twfXq7XtLzB7FJnCt66qSUK9U1yulEol4pZKL+GLPwXSYVDn4/R9ZsAm/fGp5zwYhhBBCKoTinPQb9niH/gxpyJg3FvZwXKeIvaMY0c2oZZ6HaRHepiZFxWgvZHEpZWspd2qFZJH4UsLcHyOJOL/+kSW45NaX3Paxuivufg2/eOqt0oMQQgghvUCmrxdASLXozLr2hMa68J6z3IOFTCjLA388mxiM9OsNM7o+VxXFcjljmjCc/1S1OZPkOk8Shb919urkiyKEEEL2IYyck35DKS0YzR6SfCyrTaTEuHqe80qkeSmbyoZdHfa+leQmN1h4yiWJUFZKJRLyIiU2qWrtesI+SBVPCCGElITinPQbbPaTsD75WKV85vE2xcri9PRQncI2ydflllcmP2054+0U95yXlxunhK2lxIbQYvTylxqEEEJIWVCck36DL95sHuZyosi2DC3ljJc0lWDJtSRpo683webQcifuiX5NnOKxRJ9olhj7XJXefCTpv7mlA8u2tJZsRwghhPQUes5Jv8Eo7Cz15ei4crKf6Kd1VjONYdL5i5ZXMlfkunhEXM9zXm4O9nKsSfH1BVVlhsLLaf7eG2cCANb86KNlzUEIIYQkhZFz0m9wSkS7I/VljGuLRJvm0HOY66LvqDFDgutUAjVYbj71ci0iSYiP01v2D90vXrJtMZtMsCG0h+voYT9CCCGkmlRFnIvIuSKyTERWiMg0Q32DiNzn1c8VkfFe+XgR6RCR170/v9P6vFtEFnp9fiWlzv8mAx5ffEezFvbsbWOzWuji8I0NuwEAza1dJcfLpHv+q2ZdSwJveRIvetnrSZpKsVTmFaNnv5hHvZgwD2Pn5f7IS21afWLxliCHPiGEENLbVCzORSQN4NcAzgNwHICLReS4WLMvAtillJoI4BcAfqzVrVRKTfb+fFkr/y2AywBM8v6cW+laSf/GKN2sp3uWGCsiyM380UvH1503Czd7nvPS5UmwRvR7PGJsfFVZtFwhWRYWJG5XwtZSaeTcMPaLK7fj8jtfwU+fWNbDUQkhhJDyqEbk/CQAK5RSq5RS3QDuBTA11mYqgDu8678COKtYJFxEDgEwTCn1knLVy58BfLwKayX9mNC3HMpWe9Q5fPCuw0cUjhURvmYRXI2vcpJsNrVF7uM9fDLaCam2jaKVIJI8u0oib7oCkoxYLMIdTaVYpJ15YCu7292DrdbtaC+5PkIIIaQaVEOcjwWwXnu8wSsztlFK5QC0ABjl1U0QkddE5DkROV1rv6HEmIREcAwB7CTCNp0qLrNtsrFUVHloY51xDLvtJIG3PEH0v6kuXXKcJFhPJq1CZhR/nGqsx6ssGjlPciNFzzkhhJBaoK83hG4GcLhS6p0AvgXgbhEZVs4AInK5iMwXkfnNzc29skjSdyilSqauO/N/nsEFN882iitb1LusDaGa6I/Y2UtIvuMODd/KNiFaKjd74ayWFpbnVqnnPNqnvO8KEnvTC+aJor/Oxb5pKHViq318QgghpHaohjjfCOAw7fE4r8zYRkQyAIYD2KGU6lJK7QAApdQrAFYCONprP67EmPD6/V4pNUUpNWXMmDFVeDqklvjLKxtwzk2z8Nxb9huvNTvasXBji3FDqK7lbNlcjCLbEnEvJ2KcpK1tTdbrxLNXTqK0hQb817N4wsXoPJVma4nYWqoot/2RqmUJIoQQQkpRDXE+D8AkEZkgIvUALgLwUKzNQwAu9a4/CWCmUkqJyBhvQylE5Ei4Gz9XKaU2A9gjIqd43vTPAniwCmsl+xmLN7YAAFY370VLexYvr94JAGjpyBb4gJUmzoMDiZII2+LavOf50SPXdlFZDkm+CahmfnU9Cl1uRLon9pdK1q5Cdd7D/oWTR33xUXa3d2P8tOm4/YXVPZuQEEIIMVCxOPc85FcAeBzAUgD3K6UWi8i1IvIxr9kfAYwSkRVw7St+usUzACwQkdfhbhT9slJqp1f3FQC3AlgBN6L+aKVrJbVNS3sWndk8snkH/3rrXLy8eif8M31EBF+79zVceMsctHZm8dFfPY8z/ueZSH9jaj5LfcnTPasdrdYj9WK2aVjtKEmGt0W5KxG7Rcaq5g1A8qwupaL5nq2lzPmLZ2m1123a3QkAuHfeemsbQgghpFyqckKoUmoGgBmxsqu1604AnzL0+xuAv1nGnA/g+Gqsj9Quiza2IO8onHjYCJx47RM4Yexw3HTRZMxesR3rd7XjtImjAQApAVY17wUA7NjbjQ27OgrG8oW8dfNlEiUcFJn76TYUm6Y7+qAheGvr3iKRbfN45R82ZInG90KUvuhCio1RjbmCNvbxVIKfSyWY5uXJC4QQQnqDvt4QSgYgP3tiGcZPmw6lFM6/eTam/vqFoG7hxhY4nspOi0Qi5/7JmjlTWhYkiIaXsUZdKzrKXJ5glCq0SBaxLleo94SkYlS3gpR7WFGS1IvGMYK/yqdo3LyIrSWYm3Z0QgghVYTinOxzbp65AgCwqaXTWN/lncZYl04FEdGUSBBltpz5Ewp5razUxsohDdEvj0YPqY/Uu/206LZ56pLYBFzeMY9dSVrFcgV8dEyzpz0+b6KxEr5aekS8lAi2H+ZU+YZQ09BJhLvPTx93bzodh2qdEEJIz6E4J/uMuLDSD8vRRaovzuszqUCQpyQUT3mL+AkPIdL/tllI3H8FpUWkTTSf8/aDAdjziieJCCezspg9OIkkYJ/rRPsCktwIAIgoZKutpYInWqk9xZ/7N8+6N53J0mMSQgghZijOyT7hxhlLMeHKGZizckdQpots3arii5u4yFGG8lICr1REWcEcaU2SFeXIMYMBAG87ZGjhwIiLemOTRJaZRFHxMtvYsPdVieaIj2Vrt7OtO2xX6oYjybxV0MPFBX5hnS1CrwA4jsJ3/74QSzbtqXxhhBBCBhQU56Tq/O65lViyaQ827e7A+GnT8fLqnbhl1ioAwN0vrwva6eLcdO2oqADym0RFvTky7hONOSvj9UkTRgIAPvHOsVq9pV+JG4Ak5TqRG40S8xSOXx1Pe/kUH1UStHrXdU8GWVKS2mRKrqrHnnN76FxfY9J5HaWwfW8X7pq7Dp+97eWeLYoQQsiAheKcVJXunIMfPfom/vm3L+K1dbsBALfNXh0I3/cfHR4UpQvTiMj2rpVS4YY8rX1eFQp5d7zC9ejCKZc321pSnr3m4OGNCKSlNVpb/AbAhjXgW2Y0O4mtw745NMFclnldkVqepUapErHopPkqI5sILGNFrnum0ot5zk0j2uwwSoXvKVpcCCGElAvFOakKv312JR54dQO6cnkAQEc2jxGD6gC4Bwb5OkbXM3mDIAdC8R33iPtN9LZZbXeoKZKsC7WZb24z1GvXFo+47RTP4oXxSHiSNsYm1qmS2GZ6i6SHC1U1fSOKefd7LsqLec5TqdLritcopYl6r9/yra344+zVPVofIYSQgQXFOekxP37sTVz4uznB9bfufyMSnU570cO8UoHA1YWQLXIesbVIOIYvdPIRcW4Wt/51NCWiJfJryJFt81cbo/OFRYVjWNokody85fYsLglsMBabTbH5dP7x2katfxkiPlGrYv2TH2RkHcMYOfcj4KY6M6b32dRfv4DrHlmS+PUghBAycKnKIURkYPLbZ1cWlNk84P5VSvQMLVq/fGE/R7O15POO0dbiR+rj85koJVRtUWxbRN10A5CEEi6ZEn37RtzFLSq2VXzzvtcxdkST1yfp2OW1LVVeTf0rhS4nw9zRWkep8EAs79/2bvd9mncUMmmeXkQIIcQOI+ekJEoptHXlALhfz9/46FJrLmc964of4RZoHm9Nl+gR8O6c3s+fN4xO5hyl2Vpg7Gd2nJgtMEaU2WNcckNoEo+3Rd0lSYltF54JIuHl+sQt89rWk3z2ZK1Ktbd+S5Ggr3XMIh3DDaGlR/fbOipsH79hzFfzzoEQQki/hOKclOT++evx9msex/qd7fjCHfNwy3Or0Ly3y9hWj4D7UXSRUDjpYkW/7s4XRsD1yLkrzk2R88IUjIBZcHXnzUJeGcp0kkTRdXwrRDGLiKm8J6kKy2lfLvHXqCfzJW5X4TcCSrM9lWpnm7vcNdjeN5FvGWI3paYc/X+eswbjp00PboAJIYQMbCjOiZF1O9oxftp0PLNsGx5ZsBkAsGp7G5pbXVGuR6Hzlg2aeYNVRQ9e6/06s5rIDrK1hDaYXN4JhJ4ete/S+xmEs4orTAP+2qK5zW32HIO4K1NXjjugydg32QmelvIklpwyBX+8PElGmCRjGcdO0PZr97yGtTvaSo9nKJMiuz6LzZ0y7EFwH9s97o7S3lPBON7eCUch7yh89a5X8cranQCAW59fDQDB7xYhhJCBDcU5MfLGBjcN4l/mr4+U+1FhfSOmLjzAz50AACAASURBVMh1z3kuqpYBmLOyANGotp6tJRDneuQ8cppoGHE3qSWr7jKIVtsGTttBQeV4zvU2el7tcm0k5dpUKqHSzYuu1SSJ9cb2AFizvS2y8fLxxVuNY7R15Yt881F8DUEE3NAs3BAardQFeHxiRxPu/ty+yHccYMfeLkxfuBlfuetVdw5a0AkhhGhQnBMjjd6R9NHItApERpLIue8/F0gg0kz5zOPz5LXIuS+AcnmzraU7Z46c++iaamhjuP/ZJHIVzNladrWHp1ma0Kf91LvHGee2byo1i+2yLRZJIvBJbiKs4j8WKU56Q5LY1mIe9syfPmsdQn/fnXPTrMh7Ib4OaypLy+vvY3o/ADEBbpjPFjnvyucLNosWm3/WW80475fPl94vQQghpN9AcU7w0Bub8Nq6XcjlHXzkl8/jL/PXoyHjvjU6c3nUp73r7nwgMnQhZEqDqF+LhJs4bQcImbKuuFkvSoh6g+fc5vWeeOCQgueuIlHOSE1wtXFXh9YexuvCXsmi3NHxLAKyEl+6ZRxre63JI29sBgL/vHnMpOxuz1o3ESf1i7tzh+2+eMe8SF3wXigieuP2Fn3zpumJBVYqQ0YWe4rLwveU//vUlQ2zDvlLKRY4v/KBhVi6eQ+2tHQWaUUIIaQ/QXFO8PV7XsMnfvMiWjqyWLJ5D655aHGQo9xxQkGTdcINmnokL3odChZ9c6iPYxHypmwtjlLBdS7vBBYDWypFo1jWynRtuG1P6O/VRZYpr3Upr7U9KmsstmKzuNjtLpaoux71byse9S/Giua9wfWzy5rteeIt6Bsjt+zpxI2PLo3WW/oUHzS8fHZZc6K+RQ8QUsVvWcLNovF+uq0pLty1OvhCPLTH+K3991qxjDB1ad9Gxsg5IYQMFCjO+yn+B/2KbXtx37x1ANwI+fWPLAEAXPnAAvznX96ICIJug3dcDzTm8k4gJHIWYZ2z2F1MtpasLQKu21qcwn55S7/HFm/Rnr8/b4guLqc9sBAA0NadDxrZItR66kaDjT5CIiFta1OB/rIJ0Av+d7Zx3meWFZ6WGm/z8Bub8NRS1+N93/z1+L+X1pa3pth4f/A2PhrbFRlHjywXy2Vv3yxb5JsLwPjzLxgzHo1X9vn0b3z8Nv7vkaO9p01e8+6cgxtnLEVrZxYAkPG+tcpZvnUghBDS/6iKOBeRc0VkmYisEJFphvoGEbnPq58rIuO98g+JyCsistD794Nan2e9MV/3/hxYjbX2NxZtbEFnNh9kV5n1VjPmrdmJCVfOwKrmvbjygQX4778tRGc2j6/f8xpu9Y4Qv+fl9fjrKxsikW6TyBZBJJ1hEDnX2nZZLC5ZJxwjzFGu1Wv9IpHzwNaiec4N+dOBqFf95dU7g+ugSRJRrNWHIipsHE3RWETAFSm3ikNLhN60vsRjJljb5/80T2tT/JsBnw2evadYphIg6tNObKVJqD2LzRv6vKONio1fbI1KF9mxNhEB7pWJVhfX9H5d3lHB70gqps4VgL+/tgG3zFqFnz3xltdGew6EEEIGBBWLcxFJA/g1gPMAHAfgYhE5LtbsiwB2KaUmAvgFgB975dsBXKCUOgHApQDujPW7RCk12ftjDvUNYDq68zj/5tn46l2vYr6Xlu2vr2zA7S+uAQAs3NiCBRtaALieXx9d3OqiVxfIvmjXM4vknTB7ih5l160lto2iQZmmMrJ5s8CP+Nb9TaDajUHecAMQx2RHsEVdzbmv9XpbualfuE7bGOffPDvSvtQ88fHN5aVJdINQZKDOrGYh6pHzPEprp5vX+7YXVgfjrdnRjkcWbLL2KTZrseBysTzz5j0HvnA31+kCPL42fUOoX6hbV+K/FxLp5153dOcjbUzrX7BhN344Y2nFWXUIIYTUFtWInJ8EYIVSapVSqhvAvQCmxtpMBXCHd/1XAGeJiCilXlNK+Z/EiwE0iUhDFdbUr9nS0ond7d1o7XIF9+wV24MP9ZSEETmlgMENboaS3R2h99jmEe82ZVqJ2Vr8SF63Jeodjb6HAt/XDxGfeV7vV7gh1N1IqgrWqYtsk68dQGhVSCA8w+wZ5gh5uSeElqOVkkXXbTcV5bW3ob+exbzNu7QbvCTowrcY0xdsxhdunx88/o5nOTKOmSByPm/NLry5ZU+idRSzp6jgr8KfTSSXuUm4q+iafPJaxD343Qqi4yrcOOr9Pkhsu+hObQ/Bp/8wF7+ftQod2aiQJ4QQsn9TDXE+FoCeDHuDV2Zso5TKAWgBMCrW5p8BvKqU0k/i+JNnablKip0iMsA45cancdbPngssHbrgTYkEm8i6c07w0Z7NWUR4voSw1l72nGMWHXrUu9MSAfflTSRqnzePERXihWPp+tEmJkNbgVnUR9oaBJhtI6neyhhVjdwMhKknbdK7VCTetla3b/GbhsK+pdcw+donSw9UYj7f/tLc2oXP3vZyovFs6ymss9fqlql/vXVu2KeYAFf2+dzNom5twXun1JixyoitJfjditUphYaMl8I0lhZSKWDemp1413VP4rFF7qFgGe/3vK2L4pwQQvoTNbEhVETeDtfq8iWt+BLP7nK69+czlr6Xi8h8EZnf3NxsatIv2dHWHYjs+GaxulThJrKoINcEsmFjp1Kh6NXviPKRg4BQ0A9w0y366J5zU+Q8a7kx0Of2bS05R2kCR+tnEeemqKbN9qCfJiqxMn2s+Hhh/+h1KVtLpK9W8R/3v2Est2h/6yp8K1NSynVFPLOsGTfMWFqy3V1z15U3sIct5SJQfK35Ij8nqwBHmMYx3kZp45ij434//xuisC74Nsaw/iByHouKm2xg+nyLNro/1zkrd0TmyzvuWvx6Qggh+zfVEOcbARymPR7nlRnbiEgGwHAAO7zH4wD8HcBnlVIr/Q5KqY3ev60A7oZrnylAKfV7pdQUpdSUMWPGVOHp1CYz39yKXzz5VqSsy3TkPaLCMtjMaYmWRwRyqWwtTjiyLZ2h/hV7XrsJ0L3jprkjkfOctpHUz9aiZYrp1k8nzZkFjSkaXjpyrgnyJBlaSolapXmNLU30sWe9Zb651Jfd3p0rOf81Dy22LcfI2T9/zlJjZ/qCzcG1biHR8S0a5dIW81sn/dLMJup98WqieORcF+fRVibPeVgXvpfiFpa8oyL5/6Prtwzo1fmZW/LBmGFq0YcXbMb5N8/Gowvdn0tLRxYvrthuH5AQQkjNUg1xPg/AJBGZICL1AC4C8FCszUNwN3wCwCcBzFRKKREZAWA6gGlKqRf8xiKSEZHR3nUdgPMBLKrCWvdbvnD7fPzy6eWRMl8UpyQqrH0dkXOc4Fq3imQt1hJjthZtvlw+HM92uqcu1E3pDyOne+prstwwOCVEfUlbS5lRYT0fdTCWdm0SgKXyaBenvAV++g9zjeVJnqe/ATPOup3tZa0hzrk3PW8sr0tX54u5pOuz3US9/ZrHra/yttYuqwC3HzMUTYlYuJFUBUI7rAvfV8GNr19jeM8VzqeQ9sV47D3oOArrdrQBcDeBA8CX7pyPT986F53ZPHa3d+Pf7pgf8asTQgipXSr+9PQ85FcAeBzAUgD3K6UWi8i1IvIxr9kfAYwSkRUAvgXAT7d4BYCJAK6OpUxsAPC4iCwA8DrcyPsfKl3r/saa7W245sFFEQGaM0Sb0ymJlPuf3VmL+O3K6gK60FqSEi1bS4HnvFAsd9lSIjphW/8mwGpriVyHX/uHBxIhrDdYYOLELQeAPbIatLWIO72f6XCiqH1FFdTHx47ObSkv8sjnD8+vKtEiygd++myCVtWjWllE3li/O9GYxWwhtm4f+OmzwZivrtuNZ7X877oFpdCeUijc9Vzm8Vo9RWcYTfcOIfLXX+y5KS0LkNdMt7XE53nde826cg7+PGctnlq6Fbd5aVQJIYTUNplqDKKUmgFgRqzsau26E8CnDP2uB3C9Zdh3V2Nt+zPXPrIEM9/chgtOPDQo0zd/6eJcF+LBgT95x3iiZ7vuCzdka0mlJMzWAkQEeWBriYjzcLxuQ87zqFddt6TYThnV86P7wrm0qNdRBRfFPOcFTaPRcmUuL5ys0GcunuG+3BSIz7xZOnPoM8tqe49Fb6xvzQ57FF1/bxVGie3CV38PfeWuV6O9/G+KCqLqhfsa9Lbxt0noR9ezGEUx3TwGor7IjUdemy84ddT7V8+wVCwyTwghpHaoiQ2hxMwOT2Do9hDd0+1HwNMixnzfkQOB8pYxDIcJ6WJfJBwnm1eBxonYU2wHEuXDbDK6UA/XpPvMzeW+oHBUuCG0VORcz5YRiYBbxIlp82h0I6lNqBeO9eDrm/DmllbjPCZseun66eGGy0/+bk6Px+lvPGfx5QPFxee2VjcJ1ObdHZH3DxB9P+lj7OnIwn8HmTaE2vS+HnGPo9+o+m9oU/7+Aj+6iuZDj9RpN81+v1RwU64024xbduecNTjxB09AKYXX1u3C2656FM3e63PjjKW47M9hWktCCCH7HorzGsYkRtu0DYG+EE6lJLox0rvMWWwtbV3hGPoGQ79NWkTrK5o9xbFEzoufFprTNsFF85xbhHo+zBrjt9dFcZch5aNO9GAZ3Vpis7WEPX1sgUqT6LJJwngU3YRuo7CRRHhv2t1RulE/58ZH37TW+e+j5dv24oq7o9Fx283XST98uki2luLvkYLIuRYB99cSPyFUt8PE++uHcMW/hdFvlOM+9qz27Znf7+qHFqOlI4v27jxunb0anVkHL650N4/eMmsVnlyy1fzECCGE7BMozmuQO15cg/U724Poly5G27sKLSQpCW0oeUdp6QfNEWb99EH91MduTeyHfVXQV/ecm7zvQNQiYPScJ7Cn+FF73Z9rOyjHZGtxlDkFXklbS+T+Rhds5uuwXynRbxfwxQRlOXzzvterMk5/RZfBT8TEp/4WKtwU6hK/KXOj1dEy3RMeby8Io9dxb7lom0X992g8cm56j+kRd5uNpivnhDcBXhs/i05nNl8g5uNs3N0RzL1xd0ciuxUhhJDKoDivMbbu6cQ1Dy3Gl+58JfjA1D3de7Wot36Kpy9Ss3knEK/RDaHhte457+guFPBpCW0tetQ7lw/Fgy1armdu0a0sQQQ8QdYVX/i7GTHcMl18RG0thaLF0Tzekei19mj0kPqwXBPy8U137vpR0FbHHjkPn/dvn11paUX2BcXSMRbLj27bZKq0yHmSNIuBkFb2tI55R9nTQRZZo+No7+3YyUZ57XyAeH71bD4U9SnDy7OqeS9O/dFM/PY597174e/m4PO3zyvqfyeEEFI5FOc1xo69rs98c0tHGP3SBG/UhuJ/2Ib2jmzeQXeuMLrdrQl8fQzTeHr2l4hfXFMFUXGuRd8NItsdo/DApKwhWq5fOyr8FkC/jkTZc4WRc1046ehll51+JADgyDGDEfiKI9lVbNHyQpFkWkOcpZvNucBJ33PVP8IsrfGotv++LdwQaj+8yCkiwB2lCvKZ68I9fjMQ5DJ3VMGhRfqa4xlcAmub4xTccIZ+dCd4XnGLDQBs2dMJAHjW29i70bNOtXXnkHcUjvrODNyqZQwihBBSHSjOawxd6KaCyLnuOTdbUnTrSdSG4rXVxtA3hJpsLX5fwBX9gT0lH0ak9bbRa9OanGCM6Kmgxa0qrpDRxLlTOLd5Q6iy2FqK20+S2FBMyWE2tXQWHZf0PbaDmeLEf2amPQ2A7w83Y/KjB9FrfUOoX+dv2NQsaQX9lAoexB3mehal+Om0eUcF/4+Elhn/MC9Hi6YX0pBJu+1iN5/ZvEJXLo+8oxKdFEsIIaQ8qpJKkVSPzmxo6fA/aHXB3q7ZWqLiXLeehELYv9bb6laWiFD35lEqFPh5FfWL+9pBX5PN4tJtiL5HbwDMAj+r9Qs8544WqU7gOTdHw+Nt/FSRKGijRzD1KKjt2wgT1Ob7P/774K2te3Htw0uCcj06Hhf0+s2haby4HcYXxjnN1hJEwoMTfot7zhGMFZXZJluORCLnfllsc6qjUJf2RLwhu41/IBJvQAkhpPowcl4j3PPyOkz724JAIANh5Fz/cNwbybTithURzWcees51j3inbo3JhmPoQr3TGy8XOzRI94uH4tzsMzdF1HWB32WJeluztXjz6baWUtla9GwZNq+6frhMeNR6KJh+oAkxfYx/1/Jgf19rY4TKZb9Ht2Hd9sLq4LqYAFcwbAjVotdWy4sm3MN+Xp3hZiAU9U5B5NwncpOJ6M1ANqcKbgJ8so5jTH8KeNa52E3x3XPXYfy06dZDwQghhCSHkfMa4coHFgIAzjxmDAD3AzyMnJsPEOqw2FqCvOSOE3ygRyPnWiQ+q0fiw4h11tHH02wy3gd8RJBbMrf41/m8OXIetbVo17nQytIzW4s5an37i2uC6ycWbwmug0im5aignkrsH5QS76TmsW0I1a0r8XeNyboS9isU9SkvRJJXJluLb10JLVWByC6SrSXIqhT5BsifL7S12KL/2bwK9lLEn0tXzkEmHVXzNz7q2lv2duZwwOB6EEII6TmMnNcYu9qzAKI5vnVh3WaxteS07CrBh6ruOc/qG0I1K0t3oa0lYmXR/OJ5pWdrKTwMCTAfSORmfHEK6m2R8y6DrcV6eJHFc246WVTnjQ0tWnv/IipmfHp6sqJpLLJ/YfsZqmIRcFUodvUIuC3NoilbS2BrcZwiwr3wtlL3nMctK/43ctm8+dwCwLVvxTeC63aYyLkK0KLxjJwTQkjFUJzXGLva3WwtIuEHXZdlM6cvsnPa18w53daiRdFtkfMOQ7kb6Q6tMX5UO69tDtWj5dHIeaFQd1S4ji7DhtGCMbK+9z0UDbpIis5tEtPaJs+C2igC3daiIp5+QvKGk3eBaE7y0Gbi+7DthxDpN5x+k5RWV5CtRZ/Pib6pjSeLFqzf4Dn3/tX3pORizzObVxF7mbvOUNSb9noA4f8nO9u6rd86EEIIKQ7FeY3R4kXO06kw13g09aEesdZEeF4X05o9xbuOiHBbFF2LdGc1z7n/wR0V0Mk9544mspN4zoONpFqUUfeWl7K16HmmEwW9A197YSq9xGOQfoktcu7e7BazvBSLuEfL/Mh2rkgml7xTuBZ9I2lwYxCrM0fjQ5HtV5l85fq+E33M7pxjjZB3Zh105fJ413VP4uoHFxnbEEIIKQ7FeY3hR85dce5+AO7t0sW0tpkzG27g9MV01nHCa83r3WHJla6LZd/i4tpatEwreYNfvES0XG9jy9Zi8pkDodjXo5NZS2aXnFGch5aDWcubC+rjaK4WY350m32B9H9sqRQ/8ZsXsX6nm/c7fP9oeyWKWVcKBLj7r+5Vj2/UzDtOmMklthbd1hIIaU3wF4h6b8zunH7ib9zWEt6UB+kW/X75QnHuz9eZzaO10/3/5YFXN4IQQkj5UJzXGLu9yHlKJBCh7YYMLYAmzrUPSz1arotmm62l05BKURfTOct4kci5LZWi5n3vSeRcqfBE0ajn3HwdEtoK1u5oN9THWqtQVJmCoZTmA5efP/mWtW71jjYAQHNrV3BAD+BnAIqiHzTkKK0hQruIewiRea68U/itjn5Akd8vLvzdPSPRQfVDiILUobF++k1+3LbjRs7t3yj4+2L853z7C6tx7k2zzE+MEEJIAczWUgPo0V9fnDsqFKmm9IlAKKwdpWVr0WwtNhGu21p0kR14zmOpFE1Rb1uec1Mb/eRDmzg3ec6BMGKuR/ZMudR1HFWYQaMYSrsw2REYOCelOPVHM1GfTgFQxhNCfUuIUgrxd2zEghLrFxXuvliORrL1fvEouW6HCfOqh7YW054Ov67gdNRA1CurraUjmw/+v/Kfl59ytCuXDw42IoQQYoeR8z5m0cYWtGti1Le16PnKdUEesbVEouhhnnNfyNpOAm03ZGiJX/vCOuc4xhzlXTahbkirGD0VtPDgpII22nUusOjYxjB7zi37+Iz4usRkRwDCSCMhOrabtgdf22h9/+WdQpuUL7LnrNyBHXu7o3V+v7xTJJOLdopvzB+un87rd08FtpZwQ2g+9nuUy6tIStN4v2Ke872dfuQ8+ovT5tnzbn9hNVY27zX2J4QQwsh5n/Lssm343J/m4crz3haUtXS4kfNc3kHW+zRsK2FrAcJoc85RgbiNinNzrnTbyaFBjnIt53mHLWViiQ2hWVvU2+I/7zKU52y2llyhSFBFNuTF2b63O3h9dY+7zuihDYnGIgOL2SvM+xn+8fomDG+qi5SFhxApiBJj3aOLwtz78Vzm3394CUYPqffqoui2sbh3PG5R0+eL2loK05X6v/e+Dtcj7oWec/ffrlwebd3itY/S2plFXVrw/YeXYPyoQZjxjdNx+4tr8IVTJ6CxjhF1QgjxYeS8D1m93fWsPrFka1C2p9MV59m8CsRrmyVargtu3X8epC3U6m2e8+hJn4WivSsXfoDrY9ii6DrxY7+BaFRc/4CPnGSo6Qv95FPzGIVqWs/WUoqWjixmvrmtYF4d2lqICX9DqIk75qwNrh9ZsCm41r+dUQCeXroVqwxRZEe5YlZ/f2/3our+74pveenI5gtTImpRdScQ7m5duLFT4eU1O70xo4JbPyU4bqPpzjnG323A/T8i2MAeU+etnTl87x9uBpdNLZ342ysb8JPHluHPc9YEbRZtbME3733NOj4hhAwEqiLOReRcEVkmIitEZJqhvkFE7vPq54rIeK3uSq98mYick3TM/sC21i4AsU2Z2TBtYSDOtWwtehS9y2BV6dZ8pLb0iTa7S8Ti4q3D5EkH7LYWHVMaOl3oJjmkxxQR1IW8yXNe7ICYYtiWc81Di8seiww8bLm/r7j7NazxNo9e/eBivLhyBwB3I+kX75gfiG4dx1E44ftP4PX1uwvq4taVax5ajD97NwPx3ylHi5zHRf2OvV1Bu5yjIntbspp1xR8xSKWYdwrOF/DrOrq1DaGxde/pyOLB190ble6cgz2e/WX6wi14dd0uAMDNM5fjH69vwpLNewqeNyGEDBQqFucikgbwawDnATgOwMUiclys2RcB7FJKTQTwCwA/9voeB+AiAG8HcC6A34hIOuGY+y2d2Tx2tXVj/U43k8im3eYInP9h32ZIn2i77rREy3WRraNbXKLWklDsB+NZBHlPo1wmS0pBm+BApeRz2OwppVEYO6KpJx0JKYp+rzh9wWYA0Y3ecZ72vs0xkXcU5q3ZibU7CzMR5WOieXNLZ+RQMiCMgPvf0vn9fK84APxq5nIs29IazAeEor4751h/dztzDrbtcUV/KiVYvrU1qNuwK/r/3Bvejccb63fjn37zIra0dAZZb+6euxbfvPc1rNi2F125PNbvbMd1jywJ/t9pac/inpfXMc0pIaRfUg3P+UkAViilVgGAiNwLYCqAJVqbqQC+713/FcD/imt8nArgXqVUF4DVIrLCGw8JxjRS7LS82P6kgg1LPeWRBZvwnvEjsXp7G449ZBg6s3kMbshgcH0a21q7MGpwPXa0dWPN9jYcc/BQTL72SQDAO8YNBwDsas8WGz5m8wgfRE7NNHxYRjzituwK3eaod6dBzHdbrCw9PQjQtiYdf54kUXafnkbOn1pqF0SE1ApPLNkascLp5GJR9V8+vTyoyzsK//3XBUF60T0doRift3YXBjeEHwcvrdqJl+BaXhyl0NKRRat3M9GZy6MpH3rE27tzwf9hndk8bnrKnXN3exYf+kWYQnH5tlCoA9GbAwA45cang+v7528A4Pr3RwyqC7JY/XH2agxtzAS51O+euw5LN+9BzlEYM7QBF7/nMKzc3oZTjhyF1c1t2LCrPXitfvLP70DOUWiqT6E+nUZ7dw4tHVl84p1jMXf1TkxfsBn/fuZRWLChBZMPG4GNuzuQdxycfexBeGXtLjTWpTF6aAM27upAW3cO7zr8ADTVpdGVy6Mjm8ewxjrsaOtGXVqQFsEBg+rRmcujvTuPxjp3vhFN9ejozmNIYybYbN9Ul4ZINFd8W1cOg+ozaOvOYVhjXWCHyjkK+bxCJi2oS6ewtyuHpro0snkHQxoyVftMI4T0LdUQ52MBrNcebwBwsq2NUionIi0ARnnlL8X6jvWuS41ZwMKNLTjqOzPKWnwx9P/nCoV+mGO4pyzY0NLjvklIok9tolcX9iaq4Qm1ZXwwUY7W7nnknJD9m51tXfjDrFWBZUSnrTuH6Qs3B49bNXE8661mzHrLvME17yhc+cCC4PFPHlsWqT/u6seD62LnCvzh+dWRx0nOIADC9LI+rdpzW7gx/D+0ubULv5q5AkD47YTOt/+2oKAMAK6fvjS41l+ffY0I97cQQlz2+2wtInI5gMsBYOTYCfjWh44GUPifXHyLYGF9DK1BvE7v6yiF3zy7MlL/pTOORFN9Gns6chBxLSCD692X+pZZq0o+p1qgkpuOpPTWFD2NnBOyv/PMsmY8s8wssp9fvj3y2CTgTbR35zFj4ZbSDQH8/bXSp4IOb6pDS0cWm1s6E41ZCxw+chDWxWxEZxw9BieOG44ZCzdj3AGD0JXL46VV7rcNTXVpfOJdY7F5dwdyjsKaHW1Yv7MDRx80BIePHITDRg7Cq+t2Y/yoQThy9JAgN/72tm401aUxtDGDp5duQ1t3DmdMGoMxQxuQSQnSKYGjFBZv2oNjDh7qnuosgkH1aW6iJaQP+MaPe2fcaojzjQAO0x6P88pMbTaISAbAcAA7SvQtNSYAQCn1ewC/B4ApU6aor581qWfPogK+fe7bSjfyuPIjxwIAplz/FLbv7cLoIfXGDWG9RTolyDsK9emU0VZiKy+HurQYs6iUigylpDqCvZxsLYQMFOI33K+s3dVrc932uSn4wu3zAbi/93f/2ym4+A/ul6R3X3YyPvqr2UHbS04+HHfNXQcAOHLMYKxqbgvq7vziSdi+twuTDhyK22avxtEHD8VhBwzCim1uhpt3jBuORk/MHjVmCBQUBtVHP9ZueW4ljhg1GO8/egxSKaC9+1fP1AAAIABJREFUK49BDWk0ZNJVORjp/334mIr62/jm2Uf3yriEkOrxjV4atxrifB6ASSIyAa6AvgjAp2NtHgJwKYA5AD4JYKZSSonIQwDuFpGfAzgUwCQAL8N1kZQac7/moyccjDvmrMUZR4/BA6+69x2ZlJTlrY5Tn0kF0ROb0B1Ul0ZrVw6NdaEI10WzXt7jdaRTyOYLbTH16ZQ17SIA1JWoT0rOUb1uGSKE2Dn6oKHBtVLAKC9HO4CCHPDXTT0eJ4wdjrmrd+LnF56ICVe61sR53z0bY7QzBn7+L5N7tJYvvf+oyGNdjPPEUkJILVJxthalVA7AFQAeB7AUwP1KqcUicq2IfMxr9kcAo7wNn98CMM3ruxjA/XA3ej4G4KtKqbxtzErXWktcdf5xeOpb78cpR44KykYMqi9oV58Jf0QNmcIfV1o7vrJRq9cP9cjoberd8qb6sH6Q1lYv1z33+tx1aTG2Ma1Zb2tav61fEsptTwipHiMHh/9fTRg9OFI3tCEqwEcNjorzey8/BQBw8UmHIZUSXHTS4fjFv0yGiGDsiCZc8YGJEWFOCCEDiap4zpVSMwDMiJVdrV13AviUpe8NAG5IMmZ/IpNOYeKBQyLHWA9pSGP7XmBQfTpIcTikIYOdOdf2Mqg+ja6cg8a6VJBNpakuHaRka6pPBz7SprpwjKb6dLCJqskT4k0RQZ5Bm9fW/UrYTYXWmEkHG0Mb69JBVLsxk0Y2n4u00aP2bjQqG1z7bRvq0oC3DtO3BPXpqKg3WWPi7emzJCQ5QxsyQdaVSjn3+INxt2dHSacEH598KP7h5TEf3BCNSOuBhyENGZxy5Cgsv+E81KULb7BfmPbBqqyPEEL2Vxh67GNGaF/x+l+xDm0M75kG6RFuz0upC2s9Qm4rH6x5MP02kfoG/Wve8C2hz91Yp0XlDeUN2oesHtGOXFvalGobaZMujMqnmD2MkIBLTj7cWvfrS95V9niXvvcIAMB/nXMMFnz/w0H5tPPehuPHDgMArNi2Fz+7MLSdZNIpfOSEgwEAZ73twMg3fH6mK5MwJ4QQ0g+ytezv6BElX+gObazDVu8gjyFa7mH/elB9JsgrHBHNFnuKL7LTKUFdRgrbWvo11acBb2+W7s0snDPrCmvvwMEktpb6TCqI7PubUOti4r3NkIO9Li3wi/329ZmUMS87IQORay54Ozqy+WAvi85JE0bi4pMOx+feNx7n3DQrUnfjP52AZVtacfuLawC4/9+cdeyB+MHU4/G5UyfgiJGDkNJE9pD6DG78xDtwwf+6mzvTsbvk31zybqzY1opDhjcFcx8+clA1nyohhPRLKM77GN236QtgXZDrB4M0GfzijZZouS6gB3mR8bq0IJ0KBa2fuUWPkOtC3RY519v4grvB4o0vFUXPpASpFIC8va1ucanLpOCr8/pg7jTFORlQ/OOrp+Ljv37BWFefSeHnF07GdVOPx9uvcXOQf/vcYzB7+XY01qVx4z+dYOx38UmHQ3kHDl045TC896hwP4zuKR/WmMGezhxSKcEJ44bjL19+L44/dHiwrl1tYfapiQeGG0Pv/9J7e/6ECSFkAEFx3seMHlKPE8YOx3uPGoWlm/cAMNtaRELRaxPTtmvfDlOXSqHOi27Vp3VxXmh7AVwvuk80ch6K/YwnsvUbA5s9xSTa0yn3ND13PJs4D7O/ZFLR8nhbQvoLj3ztNJx/82xj3fhRhRHo+D4O/cb+i6dNwFfOnFhyThHBL0pkRXnyW+/H9r1dweP3jB8ZXE8+bETJOQghhBSH4ryPERE8dMWpEBF84fZ5AIBhjaEP3Y+iC0Ix2qBFvW2WlEZDBLwuk0Im7QthQSYl6EZM7FuyuOjCOhTnqUBY273lxUV7JiVIlRjDfd6+BUaCfn6mmDqazsl+ysQDhwQ5u+McOWawsRyI/n77PPOfZ1rToJr2cIwYVFdw+mYSDhrWiIOGNZbdjxBCSDIozmsAf4OULzH1yLke/fKFdX0mhYwnzvXNnINKiPNMSoLIsy6sS/WLlzfp4twTxomsLIbrTDoVnOZZr/nTdVGvR9TrDBH3DDeWkf2ERT84B8d7VpP/OucYfPSEQ3DmT5+NtPnLl9+LRRtbIjfePjP/3/uxpzOHhkwKo4fU4xtnTcJVD7pZZg8a1ljwLZJ/sqUYcp5O//rp2LS7A21dOeNchBBC+gaK8xoi74nUAzQf+uDA1iKBSK1Lu+K8C1HxHvGq1+tRb/e6Xo+cZ1JIe9dNFltLY705cu5fu7aWUCD70fzIIR8RW0thFD2TkuCwJJutRRftfpu0d5S1PwYhtch3P3IsbpixNHis/45+9QMTsW1PeIT9ieOG440NLXj34QdErCI6R44ZElzP/96HACAQ5/rma58HvvI+bNXm0Bk7ogljRzSV8WwIIYTsCyjOawj/eO2RWgYXXXz7H7716VSQNUFPk6h7x/1+IghsI4116SBy3mCJnFttLd6G0Pq0bo0JI+d1aTeSnYeKHliUsWRr0UQ2vG/iIwcuGaLl/px+Pz8aGM8SQUit8Kkp4zB18qE46YdPG+v19/yDV5xmHee9R47CnFU7jHUPX3EaXli53RgdHz2kAaOH8DAfQgjZn6A4ryFyeUPkXPOcZ7RMK16QPRY5Txf0c60sofUkkwqFteMNMjiJrcWLemfSoUc8k45u5vS1QfRUU4vnPBN+C6A8dV5sQ2hQrnnO/WKKc9LXnHH0GMx6qzl4PLQxExz8VWzDcpLNzIePHIQ/ff49wWFjcU4YNxwnjBte5ooJIYTUKhTnNUTOcUXqyMHhhtDBWraWIDtJOhVE2QcbBDkQCuuUSGBfaYjYWiQYw2ZriWRr8SLnaZGIwE9r136eCFOEPH6tR8BThsi5TZyH/VLGLC+E9AUTRg2CnzV8xtdPx2OLNuNXM1egqT4Ngf3m0XbYls/z3/4AhjXVobEubdwESgghpP9BcV5DHDFqMOat2RUc2gG4BxIBvuc83BDq+9NNVhYgtLukI5HzdMS37nu9o6kZzSeE+hFwEQSWmrpUKPYzKQk2dkZtLVoUXRtb94vnfJGtRcDrDdFyf91uf9DWQmqGyYePwFNLt+HCKYfhuEOH4dhDhuJrZ02K3Dge5WVfufnidwZR9lKbmQ/joT2EEDLgoDivIa6bejzOPvYgHHvIsKDMz9zSoHm96zMpOJ6yjlpZCr3j+sbJhrow0q1H33UxPai+MPru9/XHCyLnmfBQozrNamNNq5gOx/P1dCYtkJz7QD99UBfyusAJbwZSxg2hR44ejFXb20BINbnoPYfhrGMPwmV/ng/AzZrywZ89BwB46lvvx8QDh+AT7xwXtNdvpgFg1n99ACO8b8QuOPFQXHDioUHd1z84EadOHL0vngYhhJD9AIrzGqKpPo1zjz84UhamHJSIrcUxRM71a/2QH91zXqdt5gzEeZ05ut1kiJxHs6Sk4OuPupQEtpYkqRRTQdQ7hbTna8lYIuf6mvSNpH5zPXLOKDrpDW74xAm4ZdbK4LGeNeUIw4FAcQ4v0uZbHz6mssURQgjpV9CsW+P4kfO8o8JDiOpCS4oeLdfTtGW0qLIf3W7IpAMrSF06FXjc9Yh2kzXPuTuGiESi7/7YGe2GIZJK0ZCCEYjaWkKRbUulaPGqGzznzHlOKuHikw43lgtg9Y7zdpAQQkg1oZKpcUYNdtOgZfNK2wSqb9Q0bwj1xW88cu5bT+oyYX5xXQg3Rfzn4Xh+tpa06If/RDeH+iSKnGvrC6/D5x0V+NqBRPohRNoYwRyGXM+EJOWYg4YYy0WA0yeZrScpQwpDQgghpKfQ1lKj3Hv5KXAchYOHN6KpLo1vn3sMNuzqAIBAHAPRaLTuP/fTLqYl6jn3BX69RUzbIud+m5SEEe66dHiQUV1aQs+5JUNLg8HWUqenZrREznVby2DttFO/n+5kYeSc9AYigkOGu0fWjxhUF6vrixURQgjpr1DJ1CinHDkK75s4Go11abxxzYfx+VMnBIK1O+cE7YY1hkJB95wHUeV01CPe5fXV2zZYIue6OPfHSKUksjHVFDmPeMRtthbNL54ORLYY+5luJPSIu96PnnNSCUcdGEbOH/3G6ZE6/33m34T6mA7/IYQQQnoKxfl+gC9IJ3qb0E48bERQp0fx9NNCfdGsR84zKQnEue5V14VwY11hxhcgapPxRUq9dspoJq1bS2ziXMvWot0w+NpGH6PBEjlPaekT/ea6OCqVN5oQG6t++BGMGerayI45aGgkaxIQRsj9lKGXnGz2pxNCCCGVUJGSEZGRIvKkiCz3/j3A0u5Sr81yEbnUKxskItNF5E0RWSwiP9Laf05EmkXkde/Pv1Wyzv7CJ945Fo987TSccfQYXPSewwAABw5tDOp1cetHvfNamC+dliDqrotwmw0lEonXotthtDyMXttOAm2weM51UZ8yRM6jBxnpKRgLbxKGNWo3JfScEwCfOeWIRO2e+tYZwbVIuOnTiYfHEdb5NddNPR7LbzivsoUSQgghMSoNM04D8LRSahKAp73HEURkJIBrAJwM4CQA12gi/qdKqbcBeCeAU0VE/6S7Tyk12ftza4Xr7BekUoLjx7rHdF879Xi88r2zI4JXF6ZDPMHalXUinlg/cm7LohKJgBuyq6T0Q4jSYb71BkuE3BQtd8fRs8kU5iuPRvP1yDmCtsGNgVafoa2FABg5uL5km1GD6zHxwKHBY5Ewc1ChNEeYlsWrTKWEp9MSQgipOpV+skwFcId3fQeAjxvanAPgSaXUTqXULgBPAjhXKdWulHoGAJRS3QBeBTDO0J8YqM+kMGqI+xX8l95/JL542gTUeRsqpxxxQJC5JaN5uh1HoSubD/r7RKLeabPQNWV/yaQFOYM4r8+E/XRhrW9kjWRrkWhZ4XiFWV5SWtYY3WeubyolA5c9nVlr3ZGj3ZM686bouBSJnPO+jxBCyD6g0mwtBymlNnvXWwAcZGgzFsB67fEGryxAREYAuADAL7XifxaRMwC8BeA/lFL6GHrfywFcDgCHHz4wPaBXnndscP3kf5yBsQc0oa3LFeEfOu6gIEqdc1TgI7fZUEzpGPXrlOZhT4kEudJ1m4yerzxa7vYTCdMmZiw3A1avum6H8cfQ5qvLUJwPFMYMbUBza5exzpbecOrkQ3HV+cdhyvVPIZ8vFOBB5NwYOvfqyl4pIYQQkpySSkZEnhKRRYY/U/V2yt0lVfbnlohkANwD4FdKqVVe8cMAxiul3gE30n6Hrb9S6vdKqSlKqSljxowpd/p+x6SDhmJQfQZjhjbgmf88E1edf1ywye3og4biuqnH4wunTsCJ44YHffTNoRGLi8GGoovzdEqQzRdGzvV+kci5Lvb9VIopCSKY0Xzl5jXpBxalDJHzugFoa5l0oDk3d38g/uPUN0O3tNuj47asPb+86J3B+6ncyLn/vh53QFPxRRNCCCEVUDJyrpQ621YnIltF5BCl1GYROQTANkOzjQDO1B6PA/Cs9vj3AJYrpW7S5tyh1d8K4Cel1kkKmeB9fX/BiYdiSEMGZx17IEQEV19wHADg2+ceg588tgxDtXSMeuaTqF3ET6UY3Rzq501vMETIgWjU2w+SizZPJp1CziDwS2Vr0fOcmyL8A4n+HMkVkSCMPXZEU+TGryGTQnfeMfar0/ZO/MuUw7B0yx4s2NACIPymxbdk6cQj5wcMqsMu7yZgUH0Gv/vXd+FdRxj3vRNCCCFVoVJby0MALgXwI+/fBw1tHgfwQ20T6IcBXAkAInI9gOEAItlYfMHvPfwYgKUVrnNAk04Jzj6u0HH0lTMn4itnTgTgHlu+ZJMrXs4+9iAcOWaw0S6iVOi9dW0thcK6zpIS0RfTokXf66y+9eIpGOMR/Pg6BxKqmAejH/HCtA/iwlvmBI+v/8Tx+Ma9rxvbvlsT0D/+5DuwpzOLTbu9Q7xS4R6MOKlY5HzGN07H8q17g/pzjz+kwmdBCCGEFKdScf4jAPeLyBcBrAVwIQCIyBQAX1ZK/ZtSaqeIXAdgntfnWq9sHIDvAngTwKteJPV/vcwsXxeRjwHIAdgJ4HMVrpOU4MZ/OiG4vvXSKQCihx35J3Nm804gXNIpIJcvzP5i85z7Yl8ALUNLKhij3irOC20tKYlG0X0Goq3FoDH7DfEbD/9nfde/nYz3HTXKKs79FKP+BudhjXUYdnBdZIzPvW98QT+JRc4PGd6EQ4bTxkIIIWTfUZE49+wnZxnK50OLhiulbgNwW6zNBoTJyeL9r4QXXSd9hy56/Y2bmVQKvpMgGjlPG/tF0iD6thZNWKdT4RhJxHk6iL7r/vPCdVaLg4Y1YOse86bDWsHkj+4vpEQiz8//WWfzjvVkzge/eiomej5802uTSglW/fAjgRB/28FD8eaWVgDFPeeEEELIvmDgeQBIYlIpwUHDGvDNsydheJMbdTx90ugwuogwcq5bWaInfWqec19YI0yfWJeWIEoZzY+ue84LbS3plAReaz1FYzl5zoc0lL43vfWz70k8XiWcOnFUj/v2NyE5dkQYqU7Ffp6BJaXIc5500JDIe9REKiWBEL/v8vdi+tdPAwAcMqwRnznlCNz2uX3zcyeEEELiVGprIf2cud8J9wM/+o3TMenAIbjthdUAXM+uvyHUlmu8wXCQEUSztaRTgdDSc6zbhLrud/f1WVq7GSjnUJhB9Wns7coBAM55+0F4fPHWgjb7wwZTx7wncr9FF97xV9+/EbPsA/X6iNau9I3L8EF1GD7IzV6USgmu+/jx5S2YEEIIqSKMnJPEHHvIMGTSKQxpcKPogxoygSVFF+SD6kNhrYtlX+jm8k5ga6lLhbYFm5UlMkbEDuN51fVc6enyxLQ/z1FjwnSEUycfGlxva+0sa7xyeIeWznJ/YlhjeE//zbMnVX38Yl8E+O+huOh+/eoPBdci+8dNFSGEEGKC4pyUzbnHH4xLTj4cXzx1QiCkdFHcpG8C1cpDS0K40S+VkmBDo81zbsq3nk5JsGE1ctBRmcc4fu2DE4M1+eg3F/vKMlLuNHp0d1/bWnSb0UdPOARXfGBiVcePRM4FeN9Ro/DvZx4FoFCcX33+cbjnslMwYlB90CetWVa+fe4xVV0bIYQQ0tvQ1kLKZuTgetzwCTe7y88uPBF/nL06chBOkyVyrp/a2JVzgnpliJzXGyLuQGhrSWs51iNty4icOwp4colrZbnjxTXaHOEYR46uzQN+Tho/MrjWb050Jh44BCu27TXWVcI9l52Cs3/+HIDoa/X5U8fjTy+sqXj8uBPl7stOCa59D7p/gNAXTptQ0N9/z6350UcrXgshhBCyr2HknFTEsYcMw08/dSIy6VSQms5qSfGE1RGjBqEzmwfgbsp0/j97bx4n11Xeef+e2rfeW2q1Wi1rt7zgVdiAcVjMZjaTDBAnhCWYOCEkhIQsQDIJE14mJEMg5CUvec0yCQQCBJiEMMkQSMJkNQQYwDY2tmxL1tKSeu+u7trrzB91u1Vqd7dUX6lvt0vn9/noo+qqOnXvPffcc57zPL/n9ywmhJ7+brOBv5z3PdrkcW/+PB459yE9li/pxEyDtlIIzkc6k+e8oz+rX3ruPr3ywLZz/t1zhXOnq002O79/+Nqhs7dtSnV87yuuWvY7a0Xs2LM5p44gmTYaMY3lG2o2o7MXStXm9LX1NnnEpdNRlOU0yj08PDw8PNoB3jj3uGD4rZdcrofefavMTMO9aW3qSC7r9U5EIyoHVUFj0dOc85UqhDZTVRa875EmrnqzfOJSdY+zYTlGyFJmzJtv2atLt3S29LvnAjMtSwnZN9Bx1rbOScO9DcN+Uy61+P6lTW1bZPi0hGalnK/e3ygM/KXvjazcYBV0pM4M4C3Y3S++alCf+emnnvHZwlhYrrrnuWxqPDw8PDw8Njq8ce5xwWBmi57yr/3ys3T328+UwK9UAyM8Hln0gFZrbtELmohGz/itBSyX3HcmreXxUoqXnoOBK10YvvZHX3tAL7hiS8vtmq/KrSD69+ZbWku4bDbIbc185805A623vWX/5jP+/tZvPPeMvxc4/z9+43YN92bO+OwpuxuSk1s6U1qK33/F1Tr47ltbPyEPDw8PD48NBG+ce6wJohFbNKpf89RL9OHXHFikjiRj0UUqSqVWX/ReN3POm9GsBNMs3bjoOY88njoz0PV44205lKqP1+Rb1pu+QvvXPW2HbrlsQH/86uvPeP+Zl2464+9fOIuR3XwNzQb2FVs7deVQw2vfrJJC9hTXDHe33mgFLBw+AtzzXZn44uvLBzvPuO+ffMONZ+icL8WPXDukL/380/X0vf2P+ywSsQtehMrDw8PDwyNs+JXMY83x27ddqedePqCrtnXpyTt69F9eesXiZ80VIFcyzpuTPBe+29yumXN+KuA9/9ODo+d0bssZ58t56pczBiXp5def5qI/u8kjvKMve8b3XhZQLhY45gtYsG23dqcWvf7NRzedNsQv37o8tabZ69583LWktSz0fUMZ5fT7H7j9mrO2TaxiQN+0p3/xapbz/MeiEV059MSUoPTw8PDw8DgXeOPcIzSk4lH9xc88TVcOdS3SEvo7kstKKTZjsbLoElttwYvenHR6/8jMWc/j116wf/H1Ndsa3uTOVEw/emBYks6Q5VvAvoEO/clPnq4a+Z4feZIe/Z0XnmEoNleVfNut+x9H35CWVDBdckE/edOOZc93wTh/262XnfH+csbre1959bK/8bhjr4C+7OOvfTks3LOlnvMrtp7uj/etcC4vvuq0jvxyG4hFyoyXKvfw8PDwuAjhjXOPdcEdT9+p//fHrtVLrhpc9Div5FFdVGhpsuSc3KKB2OzpPhe6R7O3+2M/+WTt2ZzT/3zzzRrsXp0Ks8CDv3lvv26/YfsZvPgFXB0UFkrFo/poYKy/9bn7ms777Fj6swte6njUtH9LR/A7y/9SLhnTzYGXf0GP/L2vuFrv/9GrNbgKXaRlNPW9W6JLvoAfue50P2/uSJ4+x9TqCq5PDmQi+3LJVb/n4eHh4eHRjvDGuce6IBaN6CVXb5WZ6fdfebX++VeftQrnPFBosdPFZZyTvvHohKQzqSkvfNLZEzObjflcMqav/tIzHpd4uBwWWq22AfjkTz1F//tXnrn496H3vEg/f8veM3zcv/L8RmGcpRzys20sTHbGhmDhMpyT/uebn65PveFGSdIfveo6feKOG9QbcLt7s3H98LXbzin5dSUqTO8Sj/rC5iBiZ573QvOlG60Pv+bA4utENLJ4rssd763Pu1R/+ws3a8/mjakx7+Hh4eHhsZbwxrnHuiMVjy4axx+4/ZrFapD7t3To5ddvW5RHNNMZXtoFHJ8qLL7etalh0O3qzz7uewtotYroAhaareS1lhrG/iV9Kx9b0hlUmIXLMZNe89Qd2tGX0cuuGdIvPqfhbR/sSp/m2S95Wj/y2ifr9Tft1CV9GV2xtUtP29PwmHem4rp575kJqdJpbfAP/vi1q57fguHcjKXKOwvnvVS6cmHzsNSYv3q4W7s2ZRevY8F7vhw1JxoxXTZ44aUrPTw8PDw8ngjwFUI9NhRuu2ZItwWv/9dbfkiSNFusSGoYbVsCFZZrt3frT/6t8b1c8vQwXvCyr+YjtnPYkv7K8y9VeknlzQWPe4zoBwZYyXltMm3vy+hrv/IsSdLPPXuPnnP5Zl2xtUs/+uTteteXvq/BzjNpKXs25/SbL7n8nI/94zdu19/ee0LXX9Kj/Vs69MCJWT338gHd/fC4ZkvVxTNZMPKbsTSqsZgQanZGXy+Y2qvtfyJmj+sHX83Tw8PDw8OjAW+ce2x4LBjDV2/r1oueNKjhN2V09XC3rt7Wrd/84n265bLTiZcLyi3LedgXcC6e8zctUxzoxp19+uln7NIdNz2+ZPzZsNIh3QqfRyO2mFx5x9N36vU37TiD0kKkFG/eu2nRCP7QT1yvj/3Lo3rnS6/QM/7bPzYZ5+eGZinF5r6ONNGOlsKW/C+traKMh4eHh4fHExHeOPfY8Egnovr8G5+mfQM5mZmuDvS6d/Rn9fHX33DGdxcM+aUFJK/a1qXvHZ2WtLxUoiS9/uk79dj4vN5w8/LGdzRievsSxZRW4eTOLD7URGtZDQuG+cuu2ar7R2YWIwgUO/uzetfLrjzjHM7lPBZwmtZyZpRitfbNm4vZYmMzsHBPPDw8PDw8PBo4L865mfWa2VfM7KHg/54Vvvfa4DsPmdlrm97/mpn9wMy+E/zbHLyfNLPPmNlBM/u6me04n/P0eOLj+kt61JGKn/V7C6Xgd29amfe9kgHZmYrrfT96jTrP4TitYoFb7dxpikguGVuVv74c7vyhXXrgXS9Q/zkomSx43jd3rG7IN2uvt+rIji6hqKzGy19MqJX08Gi+xSN5eHh4eHhcHDjfhNC3Sfp759xeSX8f/H0GzKxX0m9JulHSDZJ+a4kR/yrn3DXBv1PBe3dImnTO7ZH0fkm/e57n6XGRYKAzpU/ccYP+8MfOTHps9pbThNDzwQLdpisd1407e/WOF+7X7/zIk5q+cW7nZGZKLeHCr4S3PGev/vJNN521aE9Xmm9GohHTmdLtZ78O505LK66WuOvh4eHh4XEx4nyN89sk/Wnw+k8lvWyZ7zxf0leccxPOuUlJX5H0ghZ+93OSbrFzWfU9PNTgVi/1sv/EjZcsvl6J1rKW2Nqd1jtfcrk+8toDMjPd+UO71Z1JIO74uSIWjeiagAK0Gs5FYnEpXv2URn8204z+6w8/6Qx5x9WP2fh//2BHy8f28PDw8PBoZ5yvcT7gnBsJXp+QNLDMd4YkHWn6+2jw3gL+e0Bp+c9NBvhiG+dcVdK0pL7zPFePixjNRuB67fNed9NODXYtXwhoPbeeB4KiP62cx2/fdoUeevetZ7y3pSu5rDTi0t92cqrWG9r0SyuMenh4eHh4XOw4a0KomX1V0nKVXX69+Q9pFFWcAAAgAElEQVTnnDOzVl1wr3LOHTOzDkmfl/RqSR9v5QfM7E5Jd0rS9u3bWzy8x8WC1YzG9cQaOs7PGXfevEvXDHfr9rvuPuc2ZqZ49DSPfuG90wZ4Ay+6alBjs6XG5028+4XoRSZxbhQdDw8PDw+PiwVnNc6dc89Z6TMzO2lmg865ETMblHRqma8dk/TMpr+3Sfpa8NvHgv9nzexTanDSPx60GZZ01Mxikrokja9wfndJukuSDhw4sBFsHY8NiAXO90bDM4JiQa+4fttZvrl2iERMl/Q1ikAt3cT8zDN263lXNAJi127v1nDP4yupLtBiTI9nzv/Rj1+3+NqaKC8vuGKL3vjM3fqZZ+y+MBfh4eHh4eHRJjhfWssXJS2or7xW0l8t850vS3qemfUEiaDPk/RlM4uZWb8kmVlc0osl3bvM775c0j+41YSrPTyWweff+DQ957KGYTnUvTydZL2xvS+jQ+95ka7dvqzQ0brjbbfu13XBuf2Pn73pcYm2zWhFhz0WjejXXrD/vJJRPTw8PDw82hHnq3P+HkmfNbM7JB2W9EpJMrMDkn7GOfcG59yEmb1L0n8EbX47eC+rhpEelxSV9FVJHw6+81FJnzCzg5ImJN1+nufpcRHi+kt69JHXHljv03hC4Hy2vpdv7dQ/PzSmTbnkqqIzC8Z7q/KRHh4eHh4eFxPOyzh3zo1LumWZ978p6Q1Nf39M0seWfGdO0vUr/G5R0ivO59w8PM4Gz3d+PEh+5q8871LdeuWgLt/aqVMzxRW/t28gp/tHZpRN+NpnHh4eHh4eK8Gvkh4XDbZ0pnQiMB4/+YYbtcNrbC9iwSgn6inNko21wAUfXYYw954fuUqvPDDs+93Dw8PDw2MVeOPc46LB3/7CzYuEipv29K/ruWw0bOlM6Y3P3K3/dN35JaYu0GPiy1jn6UTU97uHh4eHh8dZ4I1zj4sGPdnEep/ChoWZ6ddesP+8f2ewK6U7f2iXXnlg+AKclYeHh4eHx8UHb5x7eHg8Dh973QHt6GudfmJmescLL1uDM/Lw8PDw8Lg44I1zDw+Px+HZ+5cr9uvh4eHh4eGx1jhfnXMPDw8PDw8PDw8PjwsE7zn3aAvc/fZbVK7W1/s0PDw8PDw8PDzOC94492gLbOlKrfcpeHh4eHh4eHicNzytxcPDw8PDw8PDw2ODwBvnHh4eHh4eHh4eHhsE3jj38PDw8PDw8PDw2CDwxrmHh4eHh4eHh4fHBoE3zj08PDw8PDw8PDw2CLxx7uHh4eHh4eHh4bFB4I1zDw8PDw8PDw8Pjw0Cc86t9zlcMJjZrKQfrPd5tBH6JY2t90m0CXxfXlj4/ryw8P15YeH788LB9+WFhe/PC4tLnXMdF/pH260I0Q+ccwfW+yTaBWb2Td+fFwa+Ly8sfH9eWPj+vLDw/Xnh4PvywsL354WFmX1zLX7X01o8PDw8PDw8PDw8Ngi8ce7h4eHh4eHh4eGxQdBuxvld630CbQbfnxcOvi8vLHx/Xlj4/ryw8P154eD78sLC9+eFxZr0Z1slhHp4eHh4eHh4eHg8kdFunnMPDw8PDw8PDw+PJyzaxjg3sxeY2Q/M7KCZvW29z2cjwsyGzewfzez7Znafmf1C8H6vmX3FzB4K/u8J3jcz+8OgT79nZtc1/dZrg+8/ZGavXa9rWm+YWdTM/o+ZfSn4e6eZfT3os8+YWSJ4Pxn8fTD4fEfTb7w9eP8HZvb89bmS9YeZdZvZ58zsATO738ye6scmh5n9YvCc32tmf25mKT8+zx1m9jEzO2Vm9za9d8HGo5ldb2b3BG3+0Mws3CsMFyv0538Lnvfvmdn/MLPups+WHXcrrfUrje12xHJ92fTZW83MmVl/8Lcfm2fBSv1pZj8fjM/7zOz3mt5f+7HpnHvC/5MUlfSwpF2SEpK+K+ny9T6vjfZP0qCk64LXHZIelHS5pN+T9Lbg/bdJ+t3g9Qsl/a0kk/QUSV8P3u+V9Ejwf0/wume9r2+d+vSXJH1K0peCvz8r6fbg9R9LemPw+mcl/XHw+nZJnwleXx6M16SkncE4jq73da1TX/6ppDcErxOSuv3YxH05JOlRSeng789Kep0fny314Q9Juk7SvU3vXbDxKOkbwXctaHvrel/zOvTn8yTFgte/29Sfy447rbLWrzS22/Hfcn0ZvD8s6cuSDkvq92PzvMbmsyR9VVIy+HtzmGOzXTznN0g66Jx7xDlXlvRpSbet8zltODjnRpxz3w5ez0q6X41F/DY1DCMF/78seH2bpI+7Bu6W1G1mg5KeL+krzrkJ59ykpK9IekGIl7IhYGbbJL1I0keCv03SsyV9LvjK0r5c6OPPSbol+P5tkj7tnCs55x6VdFCN8XxRwcy61JggPypJzrmyc25KfmyeD2KS0mYWk5SRNCI/Ps8Zzrl/kjSx5O0LMh6Dzzqdc3e7xor98abfakss15/Oub9zzlWDP++WtC14vdK4W3atP8vc23ZYYWxK0vsl/aqk5mRCPzbPghX6842S3uOcKwXfORW8H8rYbBfjfEjSkaa/jwbveayAIGx9raSvSxpwzo0EH52QNBC8XqlffX838AdqTIT14O8+SVNNi01zvyz2WfD5dPB935cN7JQ0Kum/W4Mm9BEzy8qPTQTn3DFJ75X0mBpG+bSkb8mPz/PFhRqPQ8Hrpe9fzHi9Gl5aqfX+XG3uvShgZrdJOuac++6Sj/zYZNgn6eaAjvK/zezJwfuhjM12Mc49WoCZ5SR9XtJbnHMzzZ8FO2Uv4XMWmNmLJZ1yzn1rvc+lTRBTI6z4IefctZLm1KANLMKPzXNHwIW+TY1Nz1ZJWV28EYQ1gR+PFw5m9uuSqpI+ud7n8kSEmWUkvUPSb673ubQRYmpQfp4i6VckfTZM7n27GOfH1OBaLWBb8J7HEphZXA3D/JPOuS8Eb58MQlkK/l8I36zUr76/pZskvdTMDqkRvnq2pA+oETKMBd9p7pfFPgs+75I0Lt+XCzgq6ahz7uvB359Tw1j3Y5PhOZIedc6NOucqkr6gxpj14/P8cKHG4zGdpnA0v3/RwcxeJ+nFkl4VbHik1vtzXCuP7YsBu9XYiH83WJO2Sfq2mW2RH5sURyV9IaADfUONCHm/Qhqb7WKc/4ekvUFGbEKNhKYvrvM5bTgEu76PSrrfOfe+po++KGkhU/u1kv6q6f3XBNneT5E0HYR0vyzpeWbWE3jonhe8d9HAOfd259w259wONcbbPzjnXiXpHyW9PPja0r5c6OOXB993wfu3W0MtY6ekvWok41xUcM6dkHTEzC4N3rpF0vflxybFY5KeYmaZ4Llf6E8/Ps8PF2Q8Bp/NmNlTgvvzmqbfumhgZi9Qgxr4UufcfNNHK427Zdf6YKyuNLbbHs65e5xzm51zO4I16aga4g8n5McmxV+qkRQqM9unRpLnmMIam2fLGH2i/FMjI/lBNbJlf329z2cj/pP0dDXCsN+T9J3g3wvV4ET9vaSH1MhO7g2+b5L+KOjTeyQdaPqt16uRCHFQ0k+u97Wtc78+U6fVWnYFD+pBSX+h05neqeDvg8Hnu5ra/3rQxz9Qm2fFn6Ufr5H0zWB8/qUaCgJ+bPL+/C+SHpB0r6RPqKEu4Mfnufffn6vB16+oYezccSHHo6QDwb15WNIHFRQFbNd/K/TnQTV4ugvr0R+fbdxphbV+pbHdjv+W68slnx/SabUWPzbZ2ExI+rOgH74t6dlhjk1fIdTDw8PDw8PDw8Njg6BdaC0eHh4eHh4eHh4eT3h449zDw8PDw8PDw8Njg8Ab5x4eHh4eHh4eHh4bBN449/Dw8PDw8PDw8Ngg8Ma5h4eHh4eHh4eHxwaBN849PDw8PDw8PDw8Ngi8ce7h4eHh4eHh4eGxQeCNcw8PDw8PDw8PD48NAm+ce3h4eIQIM3u6mf2bmU2b2YSZ/auZPXkNj3fIzJ6zVr/v4eHh4XFhEVvvE/Dw8PC4WGBmnZK+JOmNkj6rRonomyWV1vGcYs656kb9PQ8PD4+LDd5z7uHh4REe9kmSc+7PnXM151zBOfd3zrnvmdnrAi/6BwOv+gNmdstCQzPrMrOPmtmImR0zs//HzKJNn/+Umd1vZrNm9n0zu87MPiFpu6S/NrO8mf2qme0wM2dmd5jZY5L+wcwiZvYbZnbYzE6Z2cfNrKvpt18TfDZuZv+52RtvZu80s8+Z2Z+Z2Yyk15nZDWb272Y2FZzvB80s0fR7zsx+1sweCs73XWa2O4gozJjZZ5u/7+Hh4XExwRvnHh4eHuHhQUk1M/tTM7vVzHqWfH6jpIcl9Uv6LUlfMLPe4LM/kVSVtEfStZKeJ+kNkmRmr5D0TkmvkdQp6aWSxp1zr5b0mKSXOOdyzrnfazrWMyRdJun5kl4X/HuWpF2ScpI+GPz25ZL+P0mvkjQoqUvS0JLzvk3S5yR1S/qkpJqkXwyu46mSbpH0s0vaPF/S9ZKeIulXJd0l6SckDUu6UtKPLd+FHh4eHu0Nb5x7eHh4hATn3Iykp0tykj4sadTMvmhmA8FXTkn6A+dcxTn3GUk/kPSi4PMXSnqLc27OOXdK0vsl3R60e4Ok33PO/Ydr4KBz7vBZTuedwW8V1DC83+ece8Q5l5f0dkm3m1lM0ssl/bVz7l+cc2VJvxmcfzP+3Tn3l865ehAN+JZz7m7nXNU5d0jS/6/GZqAZv+ecm3HO3SfpXkl/Fxx/WtLfqrEB8fDw8Ljo4DnnHh4eHiHCOXe/Gl5qmdl+SX8m6Q8kfVnSMedcs+F7WNJWSZdIiksaMbOFzyKSjgSvh9XwuLeCI02vtwbHaj5uTNJA8Nnid51z82Y2vspvycz2SXqfpAOSMsFvfWtJm5NNrwvL/L3lXC/Ew8PDo53gPeceHh4e6wTn3ANq0FWuDN4asibrWw2++HE1jN+SpH7nXHfwr9M5d0XwvSOSdq90mHN4/7gaG4Dm41bVMJhHJG1b+MDM0pL6znKMD0l6QNJe51ynpHdIMnl4eHh4nBXeOPfw8PAICWa238zeambbgr+H1eBW3x18ZbOkN5tZPOCRXybpb5xzI5L+TtLvm1lnkMC528wWqCIfkfTLZna9NbDHzBaM7ZNq8MhXw59L+kUz22lmOUn/VdJnAtWVz0l6iZk9LUjSfKfObmh3SJqRlA+iA288h+7x8PDw8JA3zj08PDzCxKwaSZ9fN7M5NYzyeyW9Nfj865L2ShqT9G5JL3fOLVBIXqOG9OL3JU2qYTQPSpJz7i+C738qOMZfSlpIJP0dSb8RKKf88grn9TFJn5D0T5IelVSU9PPBb98XvP60Gl70vBrc+NXkH39Z0o8H5/JhSZ9ZvVs8PDw8PBZgZ9IbPTw8PDzWA2b2OklvcM49fb3PZTUEnvUpNSgrj673+Xh4eHi0G7zn3MPDw8NjVZjZS8wsY2ZZSe+VdI+kQ+t7Vh4eHh7tCW+ce3h4eHicDbepkTR6XA3aze3Oh109PDw81gSe1uLh4eHh4eHh4eGxQeA95x4eHh4eHh4eHh4bBN449/Dw8PDw8PDw8NggaKsKobFU1iU7es/+xSWwWuvHqtOegyyiCDhHSarHWTurwnasmRzcJmJSFj1R2I5eX6TC2vEbAZvB67M6a0cR9n0g8wTtk0iV3bx6nA0WF0XN8PXRdmETN2m/RMvsTF2E3b86PE+6FuE5Ah4vUmUDph5nJ0r7M1pq/b67GHxm4bpg1G4JeU4K/WGXNDd5dMw5t+lC/25oxnlQbOPjapSDdpLucs59wMzeKemnJI0GX32Hc+5vgjZvl3SHpJqkNzvnvrzaMZIdvdr/sl9q+dxyJ1q3ROcG2JMYK6JmisCJe3aYTTTdj7AZsdzBjpeaZMeb3BPu/jJaZu3owpSaZPe9kqazMGuGN5108oaLU6UDNVP2BFvoq8nWz7MK7x02YmrsHtTAtUl840+RnGX3rtDHHtrEDOtPavRGKux4xW52fbFiuNdndXi8EDfU0nk8t2B4xubhRg5uIOJz7BmKlOGzt4l5FfF5wjlQkv75i796GDdeBWFaNlVJb3XOfdvMOiR9y8y+Enz2fufce5u/bGaXS7pd0hWStkr6qpntc86t/Ig7KQomqmJP6yMWe7egkU2uS5Lic6gZnhCLPXRHz2YNapBkxtgNpMejxnnucAG1O3Ugi9rhzWPIHhJ6POo6ypxgu7LxK1KtN4KXRo0KK7I+ScNnqNAPjd5Z1jGlznCPR8emi8INZ5a1i0MDj3h6JSkxw3Zl+W3MUMuOsOPVkmy8UOOcbODphoxuWOIzrGE1yyYlul5mjs6jdlaBC/saIjTjPCg/PRK8njWz+yUNrdLkNkmfds6VJD1qZgcl3SDp31c8RkQqd7Q+0InBRb0H9QR8gOGDn4MTFNmwSFJqgvULuW+SlJpix5vfHC6PJj7HGs7szrADQmRPsvFS7mDjJT7PDLxyjo2X6Go1LVdBfiiJ2pFFJgo3SDFoNNGFsAbnMrqJqEGKXgU+QnG2zquSZR3a9SgbnPMDCdSOUhwSs8yQodSIOty0lHrYQKuC/bTEo6rkeUjCSPP8AOuTRAq63CFiBRgx7mbzdDVzHumX3+FNV8O6cM7NbIeka9UoVX2TpJ8zs9dI+qYa3vVJNQz3u5uaHdXqxrzMsZtKvIXzm6FxABdeykfMD7JbnJqg4Sg2yKNws0ONNDoBR+AE3Pkoazi9GxqFcC6duBRO3jPseNzrx45HczDmB9h5Zk+0Pq6p8UrnCOpZrrDgDN6o1lJwAw+pYQlIh6lk2MM3N8iMbDpeuNHLro+OM+rhr7KpM/QIDYlSFzaF65Eud7LjxefYJqKaoRFVNjap83MtEbpxHpR+/ryktzjnZszsQ5LepYZP8l2Sfl/S61v4vTsl3SlJiWwP8oITj0wMGgf04aB0g2I3a0eN7BpbX/BCQXmMmZPQ4w6NtPw21jE0hEzpIvT+ZU6xGzE3CHcR0AuHedmUkgjqSFAqWqEXcoihl6oMxxg1zitwI043qrNDrCGdk+izlx4PN6u6nINGNrzvqSnWoRXoDc0eYVTC6j4WoiGGIaVOdT/Erm1+C/RmQVDaMKdJsuOtJUI1zs0sroZh/knn3BckyTl3sunzD0v6UvDnMUnDTc23Be+dAefcXZLukqRs/7AjN5V4nBJ59nBQviXmqpcgjzGHmuFBTr0/Kch7nd7B7gNd6EuQix+H4wyHZiEnMb+VdUzXYTZgSl2QRpOHm4gtbIAS6gcdY3TBxiFkGPWgXqo6pd/AxNUa9LwadKRQFHugughMqg5b0Sk5zdolptncMj+URu3CzEOjfVncxAY1z/FhzWgknVKn4rMbzzoPU63FJH1U0v3Oufc1vT8Y8NEl6Ycl3Ru8/qKkT5nZ+9RICN0r6RurHqPOuODJqdZvTLGXdR01silHGkvxQVA6DPVy5LfCUCLlP1I5MPjsU440vT4KmoORh0avgzNXrBCuig2iYsBoCQ3/03wPuqGmG3i6caRzYJSmpcCxQqNPs8OQigbpG1QRY34z3FDDZMTpPcxD4SxcigOhtdAoRDUN1dTG2D3AMp+Qfpg9zuijkUrImr7ngDA95zdJerWke8xsgUL/Dkk/ZmbXqLHHOiTppyXJOXefmX1W0vfVUHp506pKLWrsJqtgMawNtD4SqKYpBfWmpU9Aztc8e4hpZIBONvThpwto5yFohG4Ll1sdtuIHee6k86AAwMm71BVuIjBZZCjFi+Zf0Cgg3TiWOsPljlPucRSq2ODoWjdrmDlFlafClTFNTrPznNoHPdnw+mLQgUZUVyS2WaVRgcJmNnHWoYJNYpIZy2UYGS31suuj0TVJ0r/ypqshTLWWf9Hy/qe/WaXNuyW9+1yPYTW2q6+lWh94FbjAdB+kkmyMkFjsZYO83ImaYbUWGlqnXPy5Lez+FTbBRGBG9VP2OPNYYAUHaFjQoCANWcehlBjdlFGQUHdyKtzoE873CFkysNQVLsfdqFwd7Rc4NisweY56UWmEhm6oU5NQuxp6Q7mkJZs8a8BTT51SNOpRS0AaaAzOSZA7jgt5bbx80PaqECpjN5VMGjisBB/g5DRMpoETWzkHd65wAc2AQlCSNHE5m/FxgQpovNJNxOQ+mmHLmjm4gOJcA3i8JJTQpPJ/dPNINjuUk02pPtRoSuRZO8qXpdQiyo2nhV6oZ5kmf1OvH42S0QgNve9UfjgxC41s+vzRoBxoR/skbMWcSgcbZJS/T+d3+uytJdrOOCe7XhJuK/SHKyeVHoOJbFDXlHKW6Q6URC/OBzjhlRr1VKMZytXFoKc+BqkKqXGYIA0jEVRNKD1KN6sw5wOE1innlW4c0zDaVeiFFRFxNVnWDhuhMDpK5wi6AaTXR/NEuAGEmmE6TPY484gU+5lDhNKSkIIUnSOgRzpWYPYH3ahSKloXlCyOTYacxX0OaC/j3LEdEAlDUmOLcnNriXAT57DkIzQKaeEOKj+GpfiggVCltYRCTkqjCz01sino5J0fojr87Hjp460bFjRxjhZBwco+0NjCG2N4fXQuo0Y2poZBjy2u2AkTQumznoCUCuolnhtiA5smSOPoGqCocDsCNcNJuaU+KCEMnz1akbTU08EOKLVXEaK1gosyWkV6vPU21DindIM6fKhoEhUNldIJqgi9cNRooveP3gesqw6LXVVgzkD2cWKl53g8aHBRTW+DBiW979RrS3TcKWWOtsOeZWjE0E1E2FWZKSWQegspH5hKKdJ2mH4D70PXQebxyW9nkwS9f1i9iNRmgX1JUUuHayrGYOVoHLk/n4TQNUJ7GefGOrnc0fokRQeBg8Yd9VLNQeOOhp6pnndihlZlgx4EaNzRyABVa+E8TdauAh0IlANOuYW0mBDW2KYzJbh/NRh6Lg+ydukx6vFDzfBGh9JMaJSs1A2LOtGKljChl/Zn6PcPctXnB6EkIkyapJKWdM2cG2h9A0+TjqkdQcdmtAgrjfezCTdagQvDxqOct5dxLgl1Mgn1UKOCyl7NDbKHg6quUOOHJm052JDSdihov9B2VOWl40FYnGlXuNl6uF+g95WXnGftYsCbXeyDfQnnpFm4caRVkmlxn+6H2WChRjblOhfh8QrwvtNkc0proRvVvvvYiY5dzYxz6vCxWrjJlsT+oPNRCtR0kaQKVfaBikdR2K7cwThlVMd9LdF+xjkAkRKjPE1aTChso7AOF1CKKjSaqPenxqRzQ/dSUXpKvkaLTbDjYbk66LGgBmX2RLi0q3ql9Xa54zCKBJWSyjBagvMo4AZ+djskc8NNC5XGC1uWja5FVAGsBjWvi31QWStkGhSlOGAZP0CjofMmjcpRTz1NQKXPUHKSLdARuBlYS7SVcW51biS0CgdlmijnlXoKaXJSARrLWM2EJnZCYDk3WoyG0pmg57zcw9pR5Y75gXApB9T7Og+T2ehzRGhQVMqNJnvRa6MhcqxcBDcD2ZFw8z3oXE0LctE5l/KW6TgrQ88yjWBQ3nIlzR6IjiPsRlSAt5dQcSVuLJMq6tL51FBAzSRIZSp3bzxTeOOd0XnA6my3TKoGhm1sUS4wNUITU6xd2OoiMRjey++CJai7oN78cVrxDCoxzLAOnd4VMr2Ics6pUQ8jJvFZ1q4Kj0eAZTchPWV+ED4LY9CxAT3EhX5aOZUdr9TN2tExRiMfdHNFaTRUYYTr4rM5F+uqj7IHaXp36wMmPUaTlVEzLHVMixfRsUKpTFW4cVxLtJVxLsdK9rKQRrhGDA3tUXTAh39mGA5y6L0rboaHm4OTRi/b0pf6WX/GZ2BlNtif1ECYH4LcQloiHbXiXlvK5yYGc+YEOxZ1GNANJ90gUaoWnQPphixsKmESGha0omUVVhalXHVqcNE8rQqsTUDrBeR3s10SsT8o9SY1DjnnsCghdbrR+RZXFvVFiNYW9RgrDkS8W1S1o9TF2tHj0fD/9E7IWR5nx6P9Uo9BLlyStUs/xh6ZwnbIhZsIV/+9DukwFNR71/kIu3/jT2LHo1Uma6nWz3N+C5XiQ81wVC4GqX10bBrc9+eOwOQyyOGntBbq1aQb8QhdU6ABlIEOn0Ifu8BEHlp40PFGedkkEkGNXqq6Qo9HEzupE7PcycZK7jH4MKwh2so4l4RcakQGjhZioOL6lI9YD3d+0vQe9jAmJ2kYi7UrXsqswqJByyIJlR8G2I2PT4abPOeiVD6OHY8a2XRcYz19sDmmXqPCFtYwfZwt2NQDXk+wsRKBKhoFyB2nzwIFlYqMQ/oNrTpN1W+o55xU2ZX4eWIZ4S4255LaIPECpGXCyubpcTZWUqeY0Tu1nxXOSMzCdXZTyAoY54C2Ms4jdbZbJiENF4UZ61D1IWwdTurFoRMbLoTSDUPy8ESjA2yycbBfqrOsY2pp6rGgoXXIPx5lz9HcdjYJJyapIRqeFng1BwcL5eFDLjdV9pnfytrR68MbKzoHUqllygiEKiFVKI9HawV0HGUdkweFvCSuN9/zAAtBjV3FDEricU89xHZk+S0sIYJWCHWRcAtdUV31Ujd0Zq0h2so4r0ekMuGZAQ9QYoa6GCE/kOozQw4xLs0NvWlYlg2iVmBDP5tjE3elyh7+apyNs8g8PB70aqoOIxh97HDUUMNFnSANKlpovV8oTYFGBco9cIxVoWeySnmhqFnoeQb0PtDEXEq5wkWIYPSXFNuRuMOHthu9lhnZtHgR4Y8XtsOSzBC0QmikzDheiZlwiwnRsbKWaCvj3BzbhRJPANUrpwsv5WnW+lk7ikg53IWC0loKsPQ4NbLrlLSXgln5BXaekXLIWesRavTC86SGUx5y/4FR3/UQO8mZXaiZ6jDKwqUbITd3HhbpoQpZ0CFCOfzZk6xf8iFXhsX5F5A5gGWEaTIiTeiFGttzW1t/kEqQWx0HFBqJc9WraRaWo1KRmVPsHhBq0VqjrYxzCqJyUIPemBLVnz7F2tEJkXqb6IiqdkKvA1QcSHtAjywAACAASURBVPWyFbs0zzrGoBEqyLOtQ6M+OkuLZNHzhLQkmGhUuYS5XzP3sEVmbrj185y+FB0K33OeZ8DaUYUeTH2jVZKh1nJygrWjRjbNY6JFemgFW7pJwtEumLiaO8Fu/OSlbLHtPNT6DcwPsXWIq8WFWx8Cq2NBVZnkhK8QuuYgN5U8/P33sJt54qnsoaJGNuWT0oeYPowO0jeSo6w/SznWoQY54PVOuNKXYISGcschrSU+AUteZ+CmharRlKAkWMg5GAg0CjHH+oTmNcShyguNelBHQ9+DzGEwtRtG1+AYo57lCnRshCkrKkkpWBiNFlmi6jB07Zsdbn2AYqlBquwDlWgM5kPQ5FoaLaHVa9cS7WWcG3tACJdxal+4N5NqGNNKWxW48GZGYEgehrFKm6CayQnWoZUedjyboZy9cGuB00m/4zCUtIRFKmp0EzEGIx/wOer6QevXN7UfcsDzMHFujlbVgxtquJGjRmgBUvvyWyF1AEpa4mglnCKy0ENscKdKK6723suSH0evZ/wiKhWJE7KBlHPnYbYOUbWW+Bw7Xj1GZWHZ3IKL00Hd+LVEexnnjhmjueOtD7z5zWyQ93+PDbpT18GCCjCRje9AWTuMNKTDUM5yyFKDcch1rmTD5dBN74VeOMrvnII5A9B7V9jKbrw71fr9w2Xc4UaOJlpmjkHFKjhH0POk7WgVxtltrF8SMHmfYmYH2w1QDz+VfDx5AzOyk9NUxYYalDCXgqjFweWLSiLSCqGxPDtesRcKNhxnO/hy98YzhTfeGZ0HnElVsAOKllpvQ2km+SE4cc+w49EFm3LjsdIEze+DHuk61B23TkZnqkN1mAqU1aPJc6RojiRVOll/9n+bnef0LqqLj5qpSPmrR0H1PxhNoBsduuGk0TyqSkI04yUsJKT8VnYf0uPsGaKeZVLERpLisNJnBFYkrUCBEUprobrqVJoyCRXcKiDZEioUKneQTYD5PSxxIwY9DakJFtUp9rMNZ+4xOCmtIdrKOKdqLaT8MVUq6LuXGXcTl4XLVafeJsrNrYENUqMhTC7LQEumHK4eKk20pEY2tWSszo43ux0WJoH5O9RzbrBfCKWCGtk9DzHjgFYDxpUpae4V3ETQ41FK4DwsUBeBtB0qTpCFxjmt1UGNbMqTpmtfFarRECefJJWJchhcLksDTLOYRtJLXcwgIDaZJBktWNUPk/PWEG1lnLuIVAdJIGUwudHBOrUHZlnDnXIKKgcUNrF2tOBHdA5KNeUgPxdKDcan4XnCxEfan+Ue6P2BfOAK5BbSQjalXtau9wHWL1N7wpMIpUZvqYvKnbHjUQ84dWxQ2gedO7Mn2NwycwmMjkLaRxE+C5UcG9OZUch3hlRQamTTzU4VbnYov5o879QInd/CLi4OijtKkotA5xJM5o3DwlNVSNtZS7SVcR6psEIAZCDU4+EuhJ2HYFnaPsgVg7JX1JikBkmlL9wa29TozR6mbkbWzMVggm1vuP05s4e1o4of0zvDTfaihiEBNdKokZ2aZO3KkN4Qtm711G5IL4KPEDVCqReV9svMdlgCHuqq16EXtfMwC5lM7YZRamhQdh5uPURD1UxovkB6lO10Kjl2wGqGtUuNspD/3DbvOV9T1OOs1C8Je1ZgRUsHd+W06BFVAMD66CGXvKa0FheHXG6YSIql+OBCX4VqO73fY9c3fgPjAEQnIBe/g11fAiaSUi8cGddUlYQqysQgha3UxdrR66Med6qegitTUsYcLF6EKQ5dIevNw8JvmVNsEpzbwk40Qbn4VC0XGNrxedYnNDpDjeVakq0nNLnW/uU7qF3spTegdmuJtjLOZcxTRQxRPMjhBo0uFNRzh41lKlsNQ9YFqAMegfeP9me5O1z1lFoHu4FjN7LjGdSpr2UhLSlkZRKs0gPGC332qJFdh2OaGr2Yq06NeuhowEWW6H2gnvOQE3rp5oqCUFUlvmaWuiFFbxJSF4FaSzUNnXVwTMMcdRztohvx+f/0FNQu9whUClhDtJVxHqlK6fHWZ6rCptZHXgmGZjsPh8tjTE6jZjJqvGKvCj0eLFBBiyXBSSoxCWkmgyw0G8uxdjWY8Ooq7PpoElUtEy7NKzUGvYwgckUTGKs0+RseD28iIC90foDyUFEzGdx8YE899LzSQnMUdHziKCDlnMPxSc8zAqsWk0qmVIkmOc0ujlKL4lBK0eBCS+/d7G5YRliSvs2broa2Ms7rEakMitmkgPQVzVifHwhXGSFWgMYrFOUPW1s2BnXAKeIhb7DL8PKokY3VWiiVG15fBEZMqOpKmF5bGh6nUSushgE31BH4rNN8FkpBpMejtA+aj0Q3V/TZo4pHmVE2QPv+7SRqN/K8LagdpbWUc+FRHnFkGyIKNx6iqit0Iwc3LWHmBp0r2so4N8d2r0T6CsuBwTFOF2xqZNMFG/MY4fHK/bTKHTteBVYypR5iBzn1mUeYu6kwDPsTGsuCxZlwvBSCqsMQigPVYqdGE6lQKHEjNDEa7vGqcG5JzkCpwZ5wDRK6piSnaGn1ECUDxY1sCjq1UAO2CgxY6qXHTj5YIZQavfUUe9hpZVFKE1pLtJVx7iJSDeycuh9tfeDNDbBRnt+Gmqn3fjboSt1s0JGqqRLrf0mKQBcxfBalKp2BaZl6djiKwlYa04XNYH/W+9guN3qKZTpjPjCdKUG3FGEBMHrvUuOsHZVbpVGIwmbWjtJFCN1A4kWBKM+WoucBJsk1ei0LRdCoKjXq6WaHyv9Rg5k8t5kxNr9T734VFEqSuBMMR9JhIqkvQrTGsBoLSU3tbN3QLne33EQS58ERXrzEF8LZYbb56P8um/Dnr6LuO/YQ99zHrm/ySuhBgBVJI1AdhhYvqmdhBdQE1MGFm7I6LLJUhu0iRRj5ABETmidSBAWPJO5xT0yxdlVIM6FGNo1y0s1HDHLO6QaQrinjV7IbQamS1CNNlTtoNJbqv+ceYTeiApRQSp3hSilG4TyNaYtwo1PpoFncF7GUopkNS/q4pAE1AnF3Oec+YGa9kj4jaYekQ5Je6ZybNDOT9AFJL5Q0L+l1zrnVqfcRqQpoHORRpBM+TRaiigpdx9iKlt/KhsbY1czIphMwVSqY2QmNNGpM4pg1RAT2JzTqaUQhNsMm02onTDSiXHXYL4SONj+ADoXnJLqAhl19mM6dtGZDfoi1i8Dro15GTC/KswPOwcRcvmay80xPQIpDBs5JsJAN2exgFbYnBosQX1+0HG6+wFoiTM95VdJbnXPfNrMOSd8ys69Iep2kv3fOvcfM3ibpbZJ+TdKtkvYG/26U9KHg/xXhjNEH6ORNQI9Fk4WmdlGBbdaMhmZj8/CAUK2FopaDet5zUAWlj4U+6lA9RbByKr0PVBKRLjKRSnhGtsQSt+rwWY9DhSUKHD2EVYspt5omz9H+pJ7sBIyYlOB9wDUpoJ+BJq5SVDKQGgHVhGh/EopKtMgG2dwgW6CtTrXfYZQlwtYvmmO3ERGace6cG5E0EryeNbP7JQ1Juk3SM4Ov/amkr6lhnN8m6ePOOSfpbjPrNrPB4HdWOAgLfZaBig6VDIxDPW8aoqMLBekT6XwWNPoQs+PVNrOVwqDxSqX/VIQ3nnLHqU79KfZA1BOw2EQxXKmtag4uTmAzkIQc8AosCkSjTxQVqvJCJSYhbYd6wKm0IX3Uqe445fWG7bih953mDFDUY+x4hd7Wb3w9xgYLnd9phVCD6/rcEHuIkpPMi1LzCaENmNkOSddK+rqkgSaD+4QatBepYbgfaWp2NHjvDOPczO6UdKckxTt60MRIigdQzV26k4zQhDvKY4QTYthJTRTRBOSOT7BExHqGHS9SYDcQc6QhPSUJS7kXNrPzpJU+KW+53AslScF8RPsS0xsgdxwrMVDdamgsV2FNCmqE0v6k3H9cDIpSB2DCKwWltRR7oLIWpEZQEPm/OOT9x2Bl0UqWTS6pUUYVSEyziZpuyLL3MLnOtUToxrmZ5SR9XtJbnHMz1iSQ7JxzZq3t7Zxzd0m6S5IyA8OOhD6JNw0nOoQYHpekOky+ogsTVSWhuvFUJSQSpZMUvBGUT5pi52mQ1pKYgd4fqKQRNk+aUvGpUU/a0YRJyuWm0QRK0StC2g6VgaN82TTU5S4AaV6JV3ilDhHqcQ+74iqpXSLxNbOSpUUbaE2R1tuQYosSr7ZagxNucTN72KmRTZV2Zq85D7nOR3nT1RCqcW5mcTUM8086574QvH1yga5iZoOSTgXvH5M03NR8W/DeioiWnLoOtb4aTu5tvRvoBEWN0DIMWcegQhBOOKGeSXh98T7mxjFqpYXrVFGEVtCECZMV6H6tx8PNNKJGPVXSCPM86QaXcpapERM2tS/sypQU9D5Q+k0FUhCTVG0H0nayJ8OlLtKCNHSTSw3DjqOtP4Dzm9hOh2qxYznSHMy1gvfc4aJHIS/s54Aw1VpM0kcl3e+ce1/TR1+U9FpJ7wn+/6um93/OzD6tRiLo9Kp8czUejkq29buaBhVCqdwS9XKkT539O8thDioO0IeRLtg1KHEXhR5wCsvCIj2Q7Gdw4XVz7EbU0tBTj4trhathjLWPodev8+HWDzgzTLWIUTNsTEIVU1z0CC/YlE5Kc9SLUAaOemwhKH0jApWLSt3hVobNjLKHPTXGQgr5bcxjR2qRUFoLpjJRDXeIGvSc8wRU1GxNEabn/CZJr5Z0j5l9J3jvHWoY5Z81szskHZb0yuCzv1FDRvGgGlKKP3m2AzhjSRmkoludUY+x8UrVWmIwQZN6cTCgZ7JSZB3a08NcqPPQCnXw+rJp5sYZm2OhCAcTNB31vtJJkSpFws0xPc+5gdYb0iTuCpwjaAIjTf6mcwvlqmdOsMFCuc6FPsq5Ys1wEj50wFCOO13Dws5jmtrDHoj0BHtwS11gjoBGaPokCz9Rj3SkzPokv53dg+wI21jV4hvPOg9TreVftLIv4pZlvu8kvamVY0TLdeWOtL7NLva0Pmt0HG65iSSugoJLCkNeIeYjwok0NcbaVfbBE4WowxuRg0b29Cxb0WI5NglXZ6HUFuTGV6EXjiauRmAt9xSU/6uAZES68aeqK3RuoUoM2OUO189SJ0ymhwYJTeykcy6t5jxzCSXxs2Y0ukZpJrPbwuWq1yCfO3Oi9V1SsY+ZbtO72Q43c4rt5KCAjaIw+lRNh0ujWUu0VYXQeiyiYj8wLsCNoQlwWFFhBh4PLvTUm0a5+B3H2MPYkQu37O7kFFt5a0ko3QgntyrUZTOoYpOAlUVLlNYSssQkplSA4+F8D9ZMpV7WjtIbKLCDAnLV57bB40EaRgJ6wPND4RokWOcc9kvYOQPJGRg9hJs5gXbxOdYptQQsogc9y2VonWNddUi/KXXD9WQN0VbGuYtI1Uw4WyDKyRYNq8OxQ3moYRsxs8PsgDsSbMafKLKOcSEbJKm72Wagei1MlIUeaepNi6TYZiA2ynad1Uy4iaskIoSTo2GiZQEKFdAoGaVF5I4zg2Ryb7huMWqEYtoHdMDQBFtKu6IeaboWUbpP5iRb3Em+mySVOltvhzXc6bpOlz2YJBstsWedGtnec77GMNdQbGkVUaAJXdjUchNJUgSGLqlsWZgeP4lP+LTKHTWyM3F2IyKwEiZU2ZKeASUVZljog0pMxuNQHabEpqByP0zMhRVJ69DjRIoQUSOGcrmpMUmjZHSOGLsK0hSgI4X2CzVe6Zwbh8FDuhmgc1kMrM0SV11JTrO5rLAJ1paAXtsyMM5jMCGUy61CQYOQRVBwwmu47NhzQlsZ5y4CH2TQhEqyUa8RrRBKaTSYcw69OLRS5I5ORgb+/tjA2b+0DLZvZuUbK/AG9iTZrqzYyW58tc5mt8kCDdEwWJJasLSSKVvoi/2t9ycusATD/+Uu9uxljkFONqwQSiMDhPcvSXGoYoM95+E+Qvg8Y7TdPDQooapMeoztAqf2sF0njkQAo57aAzG4gaAVQmspdqKFfjZPJ/JsEkyOwl3LGqKtjHOJ7ZyIB6HrENyVw0x+zB2noVLYjlbHi89CjybcKu/oZmUYZ8ps4k7H2Mw9XWYe8I44m2xG55glE42w58HV2P2LpWDCK0xGjB+HibJkLaQShdCTnZyE0nj97HhRGAXEkQG47tLETuq4ocYyrtRK1xRIZ6IFcCpp2o6dKDV8sQ4/8DN0P8QeovwwW09ohdDMvcfZ8Q7AhA/oqS9sgUl2a4i2M87DAuUxUnoKlo6D+uE0xkq9KjT8NQWNVwoqiVipsRl/PM8shGQXi+XPF9mCtrWbuRknII2G3ofMQ+z6aOGVSlfr15ceCVdlotgHqVqQ4lWP0uq1qBne7IRenAl6lmlhO8w/htdHjWxOT6FrNNXKhvxqIIs4t5VNSMlJti5Us2z9mruGFVqhuYMZKKU4v4VKZK0dvHEuaPdCbwVVQaELRWKaTRiUj0gxD5PSytDoPTreg9rVquzGXzZ0ArWbjLIbP1VgN7AXqt/U4UofgQmodXgfCvvZ7tEV2FQZm2y9Ha34SOcInKgH7zml9iVZsAsn2NI5MAI99aWucKvldhyFRi+gakncqKeSiKS4oCRV4SaCgtx36gQrdTHnBN0gUXnJeJ5NStTIJrmKaw1vnIstFjXID6QLIU0kzZxig25yP3uo5gZRM8w5z8HEzs4cC2HEIX1jrsImxenDLFOWJqX17GUc/ijkqidT7P5Ricn8KItEGOSck+edzhGUppCEHHdKo6FRQHp9WDeeJsXDBFS6plClQWqE4irX0FgOvVokLgGPmiHalYMTIH1maYXQOuyTWAFGVGH0gh5vLdFWxnlDShFohubBwINh9STkZBf7WLupvXDnCpOvipupph5rl4dGbyoG1T4g/2Y0z7jcl155BLU7mWdZdxPj7Dy3bGa0lgRUeSlXWMQkOgultjbBcHCu9Uk/MRmu0DnlLNPCYdSTXQyZ4w6DVjixkybv041qqTtcI7TQR2ktbM6d38wGNhVDwApnQP6Ges7phiyeZ/MfTQitpmF0Bm6sogWqjb12aCvjXMaSOcqgghydgMshJo1IXB+deuqxGwfObMUqc4tNzbGYdQes9Lmjh3mkhzLQ6IWu8+4Us2RKNTaVxOF55mFibq2DErPZ9UWBLn41F67WPNVMxgmTUPqPFn6jmwhc1QmCUh7pXE03ZclJyI2HxnkFONwkfn3IWSfh8ZKcan1OKvbAInN0fYbtUseZ2PzsHsbtS0wzI7u4iYYB1w5tZZxHqlJ6ovUHq9gTjvyidB6Z9fBO0YmbqrXE87QcOzve5gx7+J/UO4LaReDsNldlD/8/ProXtXvu7gdQu8k423WW4QCl6jf5ImtXhRVQI+MsQkN0zh2MIlHQpHGDyjd1Sos4ydrRuZomAVMjO32KtaM1IiJwk1TJQpoJdfhgo5eNa3p98Tl2vEJf64sf9e7jOiS9zAlW6YCRSmgP1FJQDW8DWsIb8JTOEyBElAAUjvz21ttIUg0WVKByYFSea9N3WcORp7CViaq1jBWY+25XjrnTHphh+ugZKKX4tEseRe0oZqD6DU4IhTc+EWceEpeDERpoIMQOk00E3OBSWgSOdrFmdCGkeuXUU0+VtahxTiMR1OBKQQ845ZxTx1TuKLvAmZ3sRCkvGxcKBFSMWAEWfctAY5k6IyHNhB6P0mFonshaou2Mc7LWlzvASKCRZ7ojhBMipd+cuIGtMOlRdrx5mEhK9bXjkCc0Cz29/UkmUZGNsV0Z9dQnINF2sshCLZQOs72LJW8cz7NwaR16iasdrT+4MRh9ouXKDRp3dC6jKih0bqHGVgpEYaXwDRKa/E2lDWlyYC3Jjjezgy1+XQ8z9/LEZdCoh8okZFNNqWjUCUYrhNI+oeop1NGQGt94JULbyjivR6ViT+s7J1LVknpjckdp6Ctcb1qJKQ3i6n+Y/xhlW94ZGLMezLBM2aE0MybrcMXug3p1IxGWrXfPMba76u9i50kTganaTmWE7XLJ3aOeZfoMlWASd+oUG5uUvlHYxNplIB1mbjDchEnK66VGvYEosyRVoZFNaUI0+ZEa9ZlRKBsIDWYCSr2hRjatEFruYnQYq0F1mASbBIv90Pu5hmgr49zqjPdFKCP5nWzwRMqwMAKcoOb7aew53IWJ8l5p4iPFKKTR9CWZWzMOO7QAQy0FslOVdP3wUdTu0ele1I7ShCqw/J/rZsdz82CKhVZ2FVppBpJWJV6xkwIXv4HnSSUYqY47rrwJz3PrV8dRuxPPZLI5lH6DaSbQgVbOwl0uBLk+WrAqMwrpMDn28CXHWOS3mmODOjnObvrsrpAns3NAWxnnLsrkochaGJ+BCxrkTdIJOArJsuVuSBeZg/JVJXae81Ct5WSR0Rsu6WCVUKqQA9AJd2U0MnB8jvXL0/oZN74KDVGaSJqFuvhRmEhaLbV+3x0szERpJnQuw1EyeH10LsPJ+7A/KTeeF4NimLyG6fOG7QGnRj29fzQCRduhRFnocyt1QQnaIowmJNlgodSpcg87Ho0+rSXayji3uhQDXguyyNTScIGZhwoH0HsAHYVKn4RZz3ATEfbCtAl6si9JM2/Tt6dZBvHDM2wBPTnDLKenbTuE2o1D92QCZuLkYAi5CDnu0RitkNf6wI6fYudY6YCFkqrhrkyRMi0Uwo6XOQml/6AOOI0odD0SbhXG+U3hVniliJYh/xiuKdQrXeiDUXHAr44V6JiG1Vah8wxvkGABqTpUa8kdgTvHNUR7Gec1KTnT+gRXAjz15ES42sBUrcVoPROavUyr8UF0JljHUPrGPIw9X93JaB9/PX0lavfinfehdl3QAvrKif2oHU3ovbKbSWF+Y/QS1A6rymRbd/vF5uBOnLruICiFjXpCKRefGqG0YmcCFpqrQiUvCsoIpMl6VKkMV8yNhVsBNUyZ5Aj0LNONDq22SjeOtWS4c1k9CXcRa4i2Ms5dhE1wxNtbhFzuaAF6zmmSGBxz9PqSk+F6mx6dZJzlpw89gtp9f5YlPr5o0/dYu2FmZJ8oMXrKYIIVPdrZySIKVRjaobSdk+OsX/p7WKSFFLvCHj+o1kLlVimthYbky13h6rFTY4tynamaCeVkUyWv7Al2H+agjClSUxOXiqRVNHlOROvHi8LcNU59Y966OtRHpxskKolY6g7Zq3gOaD/jHFQTI/ZIDNJTaJU76jmnxYTClkmjIfmhLIux9sAVtLODWTJlOHNTycctyRnU7lCR0WiScFb8x3svQ+0u230Mtcvl2P2DwhYqz7U+6RvcqNKk6vhsuFHAJPQsU845LX5DKJIS9zIWe8OlmVCaEDWW6fHomtLxGHvW57ew3U4lxwxmEoGCgV+8MRbeqLL1qx5jBkjuMHsYZnb7hNC1hbEdF/GQlJniXOgZ61hmC4asK12soUuwWSNfYh1zqMCM0HSU3cCpClthhlMsAbUG3X49cbZpoXSf2675P6jd/3qUGfXPuuQh1I4mEI9PtZ4dWBpm1mTyCLsH1FNPvVR07swcZ+2oQyTHmGia3xJuobkyjGBgZS1SUVtSZjRcakSph1mwRI5Z4pvA5DTgnBfDrX5ahH1JEzvp9eW3MyOb5hmsJdrLOIcgnociU5Piet7MRtPcMJuBSdlxSYrAxBEqDUyL0QylWDsqUUiLCRVhhm0UrrzbE4ye8s/Te1G7HKxD/YztD6N2VDWHcs4RYMiaGnc0SlaHG2oq01qEOuf0+ua3sHaU1kLpRYKOG8o5p55sXIWRFmeChmFyGsrXwoRQMsVXIuEWuqKoACaDJKXHII2mE0o+jsPJcw3RVsZ5pCqlx1p/sModrT9UaVjYgnqN6GaA8i2ptUzpPqUsO2AMGqH3TTPu+K2b70Xt/nlyH2q3L8cGWhKGaKhRf6rA3HfxDOO4b4MRhfvzzOIqVtlU6YCGeGKUHYtysmNz0NMLaSaVDupNC9cjTT3LVLGKbiLS4zD/CSZ2zg3CQi/Q404pHHNb4DMLK7zSzQeZqulYIcowEi9exHVM4eEgKlBXfS3RVsa5i0iVTOsTB5ElovxAuoCmxqHRy/IllYCJnYWt4Xrqqc75dT1HULvPHrsetXv2wIOoHeWcHy+yEq+03TXdjAPwzUkoMTnLaEmb0yxr8sh0N2pH1iYq05o9wp6huSFYNRBWCKULduYEO1oRzoF1qtoBDSdKXZwdZv2ZZYJHGNTIhoJOWB2GOPgkXtWysKn1AZOYhbUQoMc9Pcp2uGWYaFnsgx7wSbZDKvVsPFN4453ReYKUJK6ALHLujQnXk01VXqiH38WhF2cqXLrBNyaYpN7TNrFiOw/PsZj8UJrRb2IwZk0lJul92N0xhtpNlpnUxJYUS5R9MMbun0Va7xeqA15i+yrFoeecereoR5oaJHNbYQIjVL+psPQEvBmgHlsq3Rh2YieV9e27B+bPbGFKUNnH2PFqqdbzUqgRSo1eWiEUP+t5SnRtH7SdcU52homZ1if9+QEaCkbNcGVRXNwHGBWSZNCwqCeh7FWdhViv7WGeXurJniqzlakjzgYMVU+hnPqpCjOWaX/ee4rRkjYPz6J2cwXm1qwXW59iYQFNLOVG/QVxOLdQDzGlReBCKCFHujMjMMkPqqdQUGM5zh49TBOauJzNSXS8UJklQhmZ3cY6JQ6LF1XTsHjRKMspym9jk0QCjjFK8VpLtJVx7ow9yGVQrIAuhBHocacVQildhOa/USPbxWBxBHiiJRhSqBvrz/4Uc8NloDpMDWb+DEAJRmqcV2CG9FUDTLrj4CzzgNdDzKSizxD1ZFNjpAyNZapYVaYeaThXY2OS9mcXrBHBHL34PqSmoIFHq1zDasA4QTrc+jdKTLW+yy1n2UlWoZY+zS8p9jNnD6Xf0OJFnd9k0rxribYyzq0uxedanziIbiv1SNMwDz0e3URQGk2kACeNLWyHPV1mK9MsXNG64yymG4WbiHFI36CSj7k4W9EicGCPFZjFdVknIyB/Tfl30wAAIABJREFU89gwaueoexmg/zus3fTucClzlU42pjMj4ep5pxhzCuu448qbNKoK6SLUeKUcd0qHoXrzlBpR6mRrWBkWWSp1te5RTE6xa6Ma9dEyO14V5ABKUnyeHY8W8po5MITaSZIO8aaroa2Mcxdh1b3ieZAQ2h9uQYxyN/Qsh+w5pzJiVUijoR7N0SLjCVHj/CRUMxnOMlUS2i8p6E6bLDNLZrLELIt7preidsk45GrS2Hqp9cXp1JPhs07zYICijCTsqadzIAzO8JoUIapvSOHrlWOOO7x/3KEFi2vNsY6hWuDU20sSSel0lJpkfRKfhcWE4sw4T8CKpDV4vHI35TKtHUIzzs3sY5JeLOmUc+7K4L13SvopSaPB197hnPub4LO3S7pDUk3Sm51zXz6X4xCbhCivRKERSo1eGmqrQ7oInWhcFiaEJtjDX62xh2pXjrnTItDNOJBmMfIyjMlTWstsjUUUpmAEIx1jlszxGeZxT8YgF7/ILJlIZ+uWTB1UFZUkldizkGLS9ip3szGWvwTqT0MFqThN7IR5PskJ1o7K5VKHCO0XXKAO9idVXSl3hmtw0VwKUvcNlodAstES53LTarmVLLt3mDu+8SjnoXrO/0TSByV9fMn773fOvbf5DTO7XNLtkq6QtFXSV81sn3NuVQuO0loIpw3zNCFvMjENQ2b9sGJnLNxsaQcNkp5BtjJVITF0oso8xFW4uyrXwg1uTUEazWbIqaeVRY9WmLRhR4q5l3dsYhbsw8db57hbMVzSKzUKU6Nn/85yyO+AdBgopUgrdlKHCPbUQ4MrwUoF4JLsT5Rq1dTjTnXjyznWoSWQa0DknyVut+AKoZAOQz3ulKOXOww5V2uI0FZ+59w/mdmOc/z6bZI+7ZwrSXrUzA5KukHSv696DGO0FmKn4SppMORJjWyMsHeSkNM7W2YrRTcsU/8gTCicKTHP8v4eVoSIFgUqws0ApdFkYswi2dLFEldHppjl1JNj46U+CzYfUI4U8+Khd4sWa0lMw4UXyORK3ChMsCEmGHzCmwHqkaa0HVJuXuJc9dRkuN7Q+X5mwSZnoCMMRqnRsUKuEEo551hlDqLcAzlea4iNwDn/OTN7jaRvSnqrc25S0pCku5u+czR4b3UY2xmSSYpOwDSpieqj11OwKBAIx58P4km2UuztYu67AiRc7swxD+qXj1yO2m3vYDrnnQmWXTYUY57lR/Os0gtV2+lOsus7WmNi4DMF9sBHOlp/jqKH2c6fGluUvzo/xOaW5DhbsGd2hasOQw0Z2o7SUzofY/chDyt9Ek+vxO8Dr9QK10xop8XzzKJMgSRG4oSUuJRiBBZYqgElPInf83IHrJcCHRRrifU2zj8k6V1q7HHfJen3Jb2+lR8wszsl3SlJiXS3Midbf0DqidYHULmLTWxYAQBKGdW7YSJHBca/4EO1Y+sp1K4Ii+bkq2wGnoN6YP19jLS3Ocna1WAVRlqEKAbL+J2YYx7+kZOM1rJjKyx6VGAGc3d36xZXrZOFWMvfYBukLFOl1MST2IJW3MLmpNh0uJz6EiwK2/EY5NlCtY/5TWwtokpeKUj7iFTZ9XU/wChzJ29kcwvd5FKDkvCksXob3Dimxpizbn4LWy/js+wmxOahHGnmIk4IXQ7OucWYvZl9WNKXgj+PSWrWPNsWvLfcb9wl6S5Jym4adsXe1juZcNMod4smhFZ62GA1yB1PZ5kHtVJhQyoGZ7YT82wCrqbYDexKMMPp6k3MAjpVYtdHK4TS/hybY7vOZIydZ6aDec57Uuz+0XZHZ1qn0RQqbEErbobScVCvPDYHHRR9cC6Dk24VqrzU0myyDrtAXTTkYlDzm6kqCTve+FVUxgY2g+dZ6IdrX6H159Yl2LPX+69sHZq/YgC1i5ahxz0F6TBUZa648SqSrqtxbmaDzrmR4M8flnRv8PqLkj5lZu9TIyF0r6RvnO33GlKKrZ8H4RbS3XWGUYg1s4cZy9kMawdr7ahUYp7X3iSL6ZbrbOKmZdyPFpg7bW+ORQa+O8okA6/ffAS1m4jChNAc825NQY90T5YZy/NVNj73d7IH9/B06zSawjTlzLGVKXWSLYR0M0ClG2spqARVCDchlFIeaeVNXNGSJlqGXMGWRptjcLNDueMVSDUhjiKa7Dr5VFZZmaqg0Aqh8cOMrjr/JLZepg/TrOq1Q5hSin8u6ZmS+s3sqKTfkvRMM7tGDVrLIUk/LUnOufvM7LOSvi+pKulNZ1NqOX2g1s+NcNPoRDp7CWsXgTrgFHHoed2zhT1UlHtcgVKKlPaRgsV9ZmDRo84UW2GmK8zoTcD7noOJncdggmZ3mk2mE/Ns81GHEmRREBFKdrINdfk4s2IKw2wyS55ky0c1Qz2vrF2SpW2oAh22USj8QPORaEIozIkXZL7h4kx0k5Qah4V6OsONDBCOe+YELNIDKLwST1ot9jH6aCXLNhGREuuXud2QwyZJ9/CmqyFMtZYfW+btj67y/XdLendLxzCmvELU3Ki3opqFCZpVqPsJjfoyNHqpB/wEVBfZ3808muMltqI9MsV4vdu7oIUAcXCS6ePt6mYizY/NssmtK8M2H1szLPJxKs/u+1cOXYralY61bjBHB5h156KQCzzPnvXiVrZRtSKcy6DRO7uDtYvNUQ8/O16VJq7StYhWFoUeaVytGvqlqD56oY+1yx1nJ5oYa90mgErAKvSxnU7uONvA0wTN2Dzb6RT71juN8sKhfa5EDaoJkV1KTrf+cIwDbrt0HiHdMnuoaKJeHc74pwrM+KnV2fVV4XnSojl0s3NynvVLucoe0d4MdItB0M1cvsCIryeSkM7UwRJsD45sRu2IoV2vQr4lTLhzsFAZRoYtvJFZ5rKt0U0LNJapB5x6iKnx2ncfu8Cpvew+4AqhkGJJK33C4ChGsbv1Gx+tsJtOk1Zpxc5KFlLmoJFN9d/rUFVmLdFmxrlTZrT1GWBqV+vdQCfuSAka5xU2yOdKLKxULLIJeGaauWN2DjIVjRLU5d6aYbQIGhmgRY+oqszJPDNec3FGT5mdY5udjixzw43ACqGXdE+idhSIjgaTuGs97N7Vy9ANR7nH0EFBi/TQxEcqMUklEWExYMG6aJraDTc7sF9oVcv0GBtoJUhPSU1AiiWkaxH/ElS8xXz6eozZH/E5Sr9hx6OSj3Szs5ZoK+O8HjMV+lof6SS5E4vkUw3caXar5mpw5k6zC4wfZSvhoQijYXQOs1nq8o6Rs39pGRwq9KF2W9OM1nK0wHS5r9t8FLU7OM3uQ3eOcQ76MszN2Bln9/3wDOzP7SzB9t6TW1puU4bRBAc38NEUlC17hG3Ey0PMs1HuZQt9YhL2CzSA8ttZuzjLqcYVQun1VWCV665H2ZpSgZU34/PQqIc67jTyQewPqstNOeelbmZ/0POkCa9UgrEEK6CuJdrKOJcx7wPxkOCqc5B6XIOZ4JR350rMm0YnqGicJiKyFaYrxozJPJS525FmYstbU2zlHSuzTdlQjh2PSmHOVKDHPc487n1pthkYLbD+NJDo3NfPqDfTs8xYrsIoWXk7c4VGJtjxaNSx0kF1x1EzrORVA8VoJO4oCptzXgL0jfNBdgQaal3h0nZIFJ566TseYxtjbJxDT7ZBdRiagIopZWuI9jLOJTlCUAN2RR2GSinni24GItSbNgM3A0nI+YKh9ToMRYzBlXcoAz3gReaxHUiyxMdqnbm3+pLMfZeGRE2aCNwNPUDdSbYpK0L6VC4FKoTCPJF0it2D2XlmjMSz7HiVXjhHFOByBefcSJHNnclxNjbLUDCCFkuiaxEt6kSPR6thzm4P1xsK/T2IdkWPVc3CCprQyK5kYK4c0H6XODWMMhrWEm1nnCOQ8QMpShEYSozCnZ1N00oMrBlVKqjPsB3vQ5ObULtdQ4zjnocE1n1ZKHAP8YNx1i8vuYSdJ+2XB4+y4haFLWzhHcwwrzQtskT036eLzKUZjdIShXDhhcZyNAF5qEXohatRvgFrVmbqoHhNoeXmw5T+kySVqQ44OxzluNNNRAyOT5LxmpxmN6+aZoM6AY9X7mBzRPYYu3luE1uH6tGNZ523lXEeqThlT7Yeypq4tPVuoEk4aWYT0nlNosYyHBnRIg3Nskljdzfr0JMl5lm+vuMQajdWZR7inhijYdwy/CBqR43sOowL3nrZfajdZJnplQ9BffSdWTbORorUUmsdJ2fZGEukmQecbgYScUY3mB6HXPxOdjw3CzcfUF4kTM6yJGVOwAgGNGTM0aQ7drzEDDselaakiceEX13JhUs7FbznySmYZ9DBnC/JCajUsfFs8/Yyzutx09zAxr4kyG5Qgjn8uKoMbEdDs5RviYsXwVmqBt1p/TF2A4+Wma56P8wuO1WGNBNY0eSxArs+WiyJqt/0QX28x/KtPxB0THekWFhubp5ZFbVDzENR2c7GioNJ6nTdjUCPSB1WaqWGE6JySqrkoNE7DY16UOxP4puPUjc7XnwuXOUOkqRpkGaSmGUb6ipcn+N59syWepkdF5+FnPo+uLNaQ2xsS7ZVOMaNSuRbH3g0tEfluTCXCoIa2XFGkVaFFQRTFsYuO2FC4UMFRsMYhJIKFejG6Y+yzUAS8panq8yTPVZkBt4gLEL0rVPDqN2+Xlb59hTwZu/fdAod63gFTkowoSWzj43pWh0ak4LXV6KcQNaMFi/CdBF4nrC0BNaEJsX+JG4s00qfMZjwWu6A3mXg4ad9iZ1g0FlX6oHRJ5gQWtgMI78wh2kt0VbGuTOpBnbnhOGAeX7QC0CNc1KUSeL0m+nd4XLcs1CtZQuUzSnB3dVgnOlr98aYB/ybsCwi9RDTiAI1suegB/x5Qw+gdt+bHkLtnj38UMttHpxl+QIOGtm1SdaXtMxVdxcbYwVYydRBT3ZqjHqkUTOsj0497plR1i/5bSEbMjAyQJMm5zdRCgctgNN6G7xhgRuI9BzzgDtIh6ESjFQ4AwYr1xRtZZybg0kZQJkEShFzbwycaCjoBEWvD+/M4a6FeqQzsBLKdI15lnsh55yqp1Ad90thwuu2FNu0UPWbJIyRp6JQlg2MzxOUOx5lC2hqABbWgpVM54tQ7gwmrsam2LNegXlF1SyjDhjsT1qRtADneFoMiiZodhxhz97Ubui1hedJN0mpidbHC6W1lLpYn9AKoXOD7FnHyjdQOjo1Br2ma4i2Ms4pEqBqVqE/3MIWVAUle5Jq/IZcxQ9e32iRGTIZaLweK7AEvy0p5iHOZFgxoeMFxkuqhiz4GoESFd1xNnt3QJHm/R0nULsH863ToHozzFgemQov+VSSqkW4Mc6zBZt6t2B6AuaORyrhcquxygtN+qeKY5Aukt8KTxQ6iqhXmibKFntan3PpWKEeaVohlFKgklPMkKjk2FiJzXvjfM1BJpz5za0PPDpBJSAnm3LMZoehVi8M0blIuIU0RuahCkqCGUA3dR9E7b4+vQu1+2Z9B2o3nGEe6RqMC47CRNLLssdRu3nIK/va+D7UblOK0YuO5ls3mE9MMCUhB7nctXm2DKSPMIpXYRtbCBMTsBYCnDsTUIYWPgqhRx0ptzpWgJQKmoCah9TMcdZudhtbM7MnoDY3oOL2fI/RMqeuZE4bWoSo62HmRIkUYQ0FaJznt0PvoCT9K2+6GtrKOHfGBjrhZdMKXXQCpl6jKkxApbQWOiHODbLjXdLBjFBKb5ioMkLpga5DqN0IzMwdgJm5DxU2o3a0P8cqzJKJw93cQIolyv778R2oHUl+jERgtGuKce3SI8zoLe6AFULzcDPA8mRVZBR+nB9EQaOOlONO5+qwEzRpEiPdDNDgYTkHvctg8zhxDVsXaKIlxcxOVrPBRWCthwqthYCarSnayjiP1Bh/a34APFQ0JwY+G9RbQU+UemNoAQcaiaBqLYfmmITfcGoCteuIMDfVMejJjsAbMVthHoQopJlQjzuVwqQVZXNQppDgxCikp0CjvrgVWqFlyJGeYe0qLKAgmFMtuA/HjhQo6MQ3H3Cunt/E7h+Wigy5tDpdixy0pohfg0ob0mqrmVNQrxzSWjoPs5tQ7mQ3gdJv1hJtZZzXEpDGASbTsCURYd4c3gzgdhDUSzVaYivoZIntzCmdgrajCa8/mN+C2nVAickaNHpHIIf/1Dy775szzFKLw2TLcrX1++em2eRiNegpTDErLTYBE+5gshedO+swYl1Ls0mQFmLDlUWpQwRWtHRdcIN7jJ0o3QzQ8ULpPnRTlj3R+txCeOqSVIUJk7F5WNUXbiJmh1kUkFZpTY1Bb+Qaoq2Mc6uzLF/yEFMvjqMFFaAHlVZJS06xhzG/lYXI6YRIi9EM5xhn77vT21C7bWl2PGr0/mCa0VMoqFLIyTzznOeS4XmyJakrwQbowZn+1htlYbGdaTadp46wdlDaXlCASGVYwC3DhISw7FzYeuXUA56H3GrquKEiCtQj3XEUGpTQgKU0mtltra+ZnY/BqrdwjDnoWaY5aDRxNTPC5unCwHlwztcIbWWcR6pSeiwcWgsNfVFuE21HufHzm5mRnWa1WlSFsmXU0ztdZp7zhyeY1GBmM6PfHJ9ju8DuJOuX8SKzuLbnWGinM84epJkK86yM5Fl/GnSLlUvACw6PZVVa0RI1w+ob1LFBPe5leDxK7aPGJO1PWjaermG0P+kaRmktiK4q4c0H9ZwTg7ncwdZnyjmvx+CcVIfyp9BT76LhRlnWEhvwlM4DxibGKtg00R0oDbFS0EFHJ5q5raxd52F2QKoucniGueGetvVR1C4XY8Z5DLrFThSYR5rQMCSpWocRExj5oP0yV2JuzdkjzCKJbSIWJSy2AxfQ+CzcwA/ChbfAjtcB5whaNIcahREqlws97tSoj82Hq7oC65ThyADdXFF9+zjMbUiDKDURvpC43CPVHc8dZg3L3exhKGyi1SFZs7VEWxnn9ahU6AtHM5QWKqCZ9dS7RbncCTjRUBmxCMyyniixmbQDepZ3wdKpY/DGUxWUxybY5uPm4UdQu5ECM17L0KindJhskj24M0lmIVRLgHMOPeCpU8z6KbJgEDbq6cY/PwwNCzqXQRoNNZaxWAA9HgRdG6iRnR1hc2B+CNK8YFVtXLwIGMw0ClHsgXPLFDvg1D4WiaXymbC2oDofZCpea4m2Ms6trv/b3pnHWHaeZf757nLO3W/t1VXV1VW92d3tJdh0HGcbmQRCCBFGI4gyAiUBRpaYQZpNGoyQZjTzVxjQaEAgGCuEISMgCQmZWJAEPBDGQBzjLI63tntxL9W1r3dfzr33mz/qOhSh2+3z676nbl+fVyp11a16+yzf9i7P+7xySv4HtTbif8K2GSpC2Sts0hUPw4MQGudugd0n7TpHU5CxCNvxcw4Lb1EWlOU6q/aagVj1A3lIqA8lA+EpZQhPqdZgYa5heqPTjEpja8O/ExGpQD5veDBR464xwtZCvMTWOjUKKWQuc4Xp0cJOGjmvgbIGScrC54tT3nHYhbE+yswUjK+GEX53i3bR9P98qXVmLLcS7F3GYYfQ2ijblCjmnIbAG+OwgKaHMlDGuSQUfSAeL6XZohhwGnWgUaoGrMhPbLLr0YMpATt9ZqFFsgJ5x1/a8t8pUpLccVj4EzAFIzWyaUfSdgvCaBz2PqPwvRjiPEKjgnbZpe3fqVDYBzWy6fuszDA9iuWm0IEm3DubcI+ncBGZYM0NXJiLYUJwr476T4s3c2z/G32eRYi9fLDsKZjycYWlL+ojEJrQQxko49y0JacECkJB8SNlKqBRFRoBpzg4gsOXJBjIxumoJDTOKQZ8s8kG/h2Tl5AeZWsZTrCTnlIbFppswsykWUT6yAFWeXzuCnOS1jdYqixS8z+xOxBCQ9PqFDIXaQQL1KT3SdlhMksQancKNnCDTZYSDGmHC1c9GmSkoWwoQ+fZoblzjL2YygwzYOMV/+s9vcwWe+kw83BxMS+tzYPsMF4W8pxDDH8vZaCMcxuVGvlgOhbEWPd3fIAmtmArYojTpIuxDWcUZRxYr7MURgQCPEcdNvBF2NynAlMKmRhlQWH3SakGOwF3GEnlII/7Zea0NMb9L6RoBWYT4BpKQqrBtgMboUDjjkZsXYYM086xYPdOByLRmjCLC7cIXFeUXmV7bgN2FnU34ANC45xyepMGg4UjbLFTakNKrUwz99krbOwaw8yDj0N2mF7KQBnnxkrRpv/ZQNgDaAqZFv1Qzl1chRwwXo8eaNUWW4y0U2QS9tj2IMVTwWMRW0oxmYM53XILYsfh+FU95rS0O2wcmsOUMsL/go94bG4GTcVHxUbZJhiBLC812kGTUurBLCd1WuhejWsUKE89ZHmh0IjtU+yFUlgSZkKp+99bmplggxqRNqwzWGSbEi0kxU2IFoPtm/FGZKCM806Ud87yKxSGQbHjQdN64RQyZUZgUG6NuAy+UWuzB3ypNIX0Tg9dQnp/vz6H9O4YYrCPqxUWIT6QYljGDchbtl1hTosTY15gPQkbA3X8H9jUOKARYrgUsPFKO2hixiq45zrwfabW2YupTkCcLYxI06xqHGaNKVSSYvFbsL4rsUULvJhaaca/MZFah5Fs+GiUd7xwHDpIuK6BqbVS/WcK998d3YREG1b5V/2P6vYJ/7t+asW3iqSbaOBAWVfgAUOLaSjPOcVbbsCmOYezW0hvKsFyzw1oWbxzkvGqP7U2j/TqkOe8DiPgcdhZtLLBjPrUKLMsDIz2Ghe05p6kkXPIj04DDTDbReEpMGmF97LkJmTWmmfjN/PXEDJ3mDmqFGIJk2QaPscmDOmgKXHnsT7CImF0fhLkIl2z9NmijWBhH5QggjotlBGol9J/d3QT0naNSnPASACLmKZKaUqwTVOeMJJNF4fLbF682SSikOIJhgsXKjDED2UuzTpv1jz2fDsF2CF0gg08Nc6HDjAnaWeNAWYNKOyUmAFrXdgcBhab06LxoPnD6WFF97LiXLA87uv3wU2eIh6hcwV7ZKkMIsQSj5zTbDOlKawcYM9HeNUjAL4r8XdC60tGvsXOheIJtklghAGN1PdQBso4Nx3Y9Qyknhu0QQWcBE0YraiPMD2aWqd0ZzRlnYmzcMWlInsxiViwq/jZDcbnVq6xCXPXwWWkRykRO7CcP5tgE7SUZFFGC7nH22lSEAoj4PjgZXqtoWA7hFJsPC3epw3j6PXoOOC28QF37KTMHTTbTA2u6niwGahYw/8AthNs8DDrCiwk3bkHGtkQvx+vwAZSoDao1zJQxrmN3EThpE+hk5xu+OlFpkcPtDpsbEEPJhzlgJQD8znm0c8nGZH7q1X2QscS7IUux1mEOAJP+jGHpYTOF9h7qTRgM6EhFu6tppgTWLsILAtaiAj3JApviFUg3Rmlk6XPR1lJKAQRjh+F39D3QjMm1ImgARiKcadnilNm3kdjiF2Q9D5JrdEmRDB7AWEtHdjtmNYL1EeYSesWaRqpdxKYcW6M+aSkD0pas9be3f1sRNJnJM1LuiTpQ9babWOMkfTrkj4gqSrpY9bab93oGhFPyiz5f8kE04ajB9C7rrI6RBw9oBh33JWNtt2FzYQaHTb1v7ZxGOkdyjDwf8uyk95rsxd6aYdFOk6NMT6+IcjH3oDYeGrUZxLMgq2M+F+AzlV2j3BKY4e6Psr0krBeh2blPEj9h5muaCQb7oG0iVQE7vG0eB+zCVGnBZ7RmPM6wOArbUJEHXHaIbQyzQYvd5Z5jrVplhltwUxELyXIyPn/kvSbkj6157NHJf2ltfbjxphHuz//oqQfkXS8+/U2Sb/d/fd1xUaZ90oiHbhjJ9wQIbSaRyvghk+x+PSgoDzZJUgd8JMzN/QRrymLEAf1+KV7kN6hIYZVn4c84C9tsOY+Y2k20RyIVd+uwCYcFNcbBTRpU8yjHvoWW+zlg0gNG68NaNS7sPswNXqdgHGo1LmixittJkQj9VSPUj7CLQJ3x8a9QQCem1BGSzfhsDgQtggdncKdDGLglNggBNxu4w1JYMa5tfZJY8z893z8sKSHut//vqS/1q5x/rCkT1lrraSvG2OGjDFT1trXB8Qa9pJJbSA1esFZ3b0gU2szR5IX4VAOY0ivdqnCsOMzKRbJThh2YscjbNN42/RlpNeA3aBox9VjI8xyos2gNqCRTaNblSJz5mzT/4YUAQwvklQ6zMacdvpMQGOZRrIbsH6G9qSIw6ZAOHATMK+6hZDH7CXYTAgavUH33MCZD1qMCMYPG9nQ0fHSsCA+4N4+QV+vl7LfmPPJPQb3iqTXwm8zkhb2/N3V7meva5xbsUYAFNNGpD7B9OhBEWf002pBoz7ahBgzeIBm4gxQ+s7cOaS37LEIuAvxRUmoV4KdPmlzpulEAeltwfAdLeDJ5pnXWS7D99n2/z6txw7CCFx7NGtVmYFWDDWa4MEbL8PmRVPsgukF2OEVRsAjAfOH07qu3AKbaBt3MTMlvQIhgelgI+eRlv/7pHSWtElPJwab9q2x86s2waKfBrxLScqdhwUYPZT9Ns6/K9Zaa4z/2IEx5hFJj0iSkx5WHLC11Ef8TzwYCA08JVgfD5ZRoZWG1yvB68FQQKnDvI8SDMOlYFXaq2WGARiCzZk26iwiPZtkMJqVOqu6o7CWWoNt+sNDDH6zU/D/PmkRlZdla8HdgilrFzZCAQ6LJMUr7D5bqWCZGCiMhkZeaSEpxR9T+E1xlili5rBEsKxA6WW2Htqu//uk3UjjFQr7gA4uNLITm8yRaw5Bpp045aruney3cb76GlzFGDMl6bVWNIuSZvf83cHuZ/9ErLWPSXpMktJjs5bATUjEwoOV/JTn3IGOXfUgjB7AA4amyFOsnlAx6CXRpkDUyKbY+JUKwwBMJFnKhFIbjsEUzYbDsIUbcDNN5plFEouwgzfn+sfwX1ln+A3rUaMeGq8xSktC75ONQbQKaeegsUyzjhgOE3AzqBa0YzArECwkpbR6NDOQWmFSuVZJAAAgAElEQVQvdOukf28gsQOfDRaS0gg4ZbBp5mH2kM5pt/9A5/ttnD8u6aOSPt7994t7Pv8FY8yntVsIWrgh3ly7EYtmxv/Col00iVAMeBPiNPNn2aSjTAwUH0ifr9piYSMPhrfWIL/aiSTjD3/H5CWkd6XK4Dd3D7H7pO9zDHqdpSTLYCxW2Pg1IYa/3fG//lplmNIdYdaPKcICGihRmJWLQOeDsoS0oKPaTjLDyd2k7eaRmpwS8wYoDKo+DOE+MJJN4Sm5K+y9FOfZWUSciIjH5lhyje0RXpbtf06RvctWis2VeBlSTCbfxMa5MeaPtFv8OWaMuSrpP2vXKP+sMebnJF2W9KHun39JuzSK57VLpfgzvbw3EumgqUTcKpviLWH0gHYEo3Af2vr4hUXGMXl3bgnpbTUZ7COaYgN4DKYUajAzMOGw4oZNWOVXhZ1an7l4COndc4iN+7OvsOulRiCwF4j12AETo3ARGDmPwuJvCk+JF6GRPQKfD0MCkZo8yuOeha3cA4bDUGcgswTp/6bgjcIzGmHOk2yOFeeZp+OUYQYe8pVTeyfSZIoOdHZ6KUGytfyL6/zqvdf4WyvpX/u9hmmzSUQ8bDp5ovCspqlEkkm4GaEdSen7nB1jWOezZUb9l4qxk2kVdl6ZirO0Di0krcIw1WqdGedbDTaxD09tIL0WiGRLUm6c4dG8ln9vPJaBudkFlpZLM39FNciln1m48d9cS2gB485xpkcF83nDwk7MHAaNeqoH/XAMVWhDqIIHnSRKP5xa9x/RqhxgphuuhyCd18UcD4k7gFTihYAv+AZkv2Ett1QinlV6yf9KLs36D4NTdhEaWaaReqpHu8fVIBsN3YBnMowl5O4sa7n65eW7kF4ZltffAzGCc0lmvD61fRTp5eLMcip57L2UG0xvKsMyA5kEs0iWzwHi/xxbDBFo/BTuZJtSrMTm5vbdwWLHqbGcP8cCGxXYMC7BliyGSiY2oeEE9+rEFhv3wpFgK2xpUy4aOS/P+DfDopB1pQ0LSWN19nB1WKDpQHgKhd80YGdRSdJTXPX1ZKCM807cqDrp/4QiMOKRsyxltnmCvXJqLOMW1NCJsDDVbWAhKWVroQWhd+TX2fUgZnmlwQbwWIpZJE0YDW1G2fOtFVjEvb7OIu6taXZgD6WY82GG/EdkzDpzPNJX2Roqz1PmB6SmditYfCfNylGsM476BcznTaERlJCd1gxQoVHbDuw4RnnqRc5a+CojsHkR7RBaHWfnSW2EGvX9B0+hMlDGuQxL8eUu+18d628J1simzQNoQwXcMhnSwNHN5u8vzCG90VNsIChcpA2LyzYbjM0kA+E3pSazSFbKzMi+c2Ltxn90DTlrWCvahsfW7dImgyXZqv/rUROmOg0joTAijZuu0M6bcC+jkXN6zHO8LLwe3KvpHk+ZwzD+GAaKYhB+Q5nKqMRq/mdacoMZy/VRWNgOIULUUU2tQnaYPGzEBp2WXspAGedWjCy/OAdWY9AHBaWvghsUjQI4sOkR7XI3Nskw2TTiXoFVTRRbXW1ByscYM+qp0KZAETjRskk2sdc3mRPRqVNuUcLEwC5F+cPpnpRkSSQMfaN7GeYBh0Y29MM5BSPtBQX3XMoU0shDJxAuvWgD0g3COi2aaaGkDUSoQ9ZKsUGIV9gi8jLsetkLDLZYPwALDXooA2WcR9qSW/A/GTxA2xM0zITi4OiGQQ0EF9J64dbcsMDvYpldEPNdw06mYwk20dZqzAhtwXBotcmciFfWWQQ84TDLAvQ529WjzCSb/t8LjUi3E2xuZq6wC1Ij22FlIrjYnPJk0/sMupkQ7Z1BjXrK8kINQ0p1XJ5i95ncZOuoCun4yDhQRhk8N2GHUCzwcqVjzMAy7TBy3lPZ5Tn3P/uIveXBwCTd8HGOFQp2Plj2H0fOEzHmRQw57MQuwgLGV7aYEfqOqYtIbzbNWGzqbXafo2lmIWxWWMSiWmcnfSQGqbYSMI1c8x8BijTZyWSjbJOosSHHcAO8R8A9kO4tlRmmR+Ep6SU4fmMwYwLvkwZ8ME4aRupjMIVBbAjpJposVf3vSZ1osNgbit83HaZHecdpNqgTD9j5eAMyUMa5LJsMUVKMSKkUYWo2sxxsAaoLG1Q0aKMJyH4zlWZprAmX4W+aHdadKZdgzkDJYy/GjcJdCgrNKFAMeH2LYQAi0MiGNWISMJhNEzbggHzllp4CcA/EenQMaKElRTLBrGMjT2+UqVHGDxswdtyDMBPqJFFDlDKh2AigcqaOKowQx6ps8CjG3QbMKhN4ZuANyEAZ56YtJXb8D872McBFDI1sisFaPc2GijZ+2IRtd6nz4eXYplGGkWxqnJ/MrSC91QaDmVB4ihNlmylllVk8w3jjnRkWcTcwAh5dYs5O26GYev86ToE26UFqeI9IslrewOlWKQ84hQBQGA3lAafPR50B+nxUz4PzmhbKxqvsvdB11Hb9Xy+xyYIMTUjN24EFoTgijR1Odi7EII97L2WgjHMZ1tkyARAAFNZSnIfpGngw4QMGYtzjsCCULmLKLkIlBcMxow57oReLLFIfi7CTMOsw7yozxzIY5RIzlqNJdji14OEULcPDyQWRc1jYSdc6rROhHS2p0JQ1bSrjMmQYvh4dP1rQC5NdgTc9ohkMmjGh2Hh6RrcS/m+0BZsQuUU26G2HQn1gUAPqOdtsMbSycNB7KANlnFsD0xNgHtCNlNJsUaHFQjQlSKN3Diz6KTaYcReHJ9N6k0WyZxPMAqJNlr527gjSO33kCtIjnTAlyUJDtA3ZdmIFpkejxJU50JobGncdGN03EJtLY0207oY226G9F6h1R8eP7rlUqBFKBz6zBBvZjEKYF6ybok4gdSII33wMQpIS68xDqk6xc5bg6SUpVmN6tSlmgFCGpV7KQBnnRgyLRQxKmsLCVIpw4QfdSCMKI+eNYaY3mmLex0KVXZAWhI5CcuA706tIb2OWWQgbdba55WDToxZsSBM5z57Py7FNv3QvPLFJQSilUoRGE6Xwo92OcZt6uOfS5jdBZwYyV9kAVqYgPIX11cKGTHEWspkEXN81dI69mJ072EIigT6K368eYIuPjgHFcse32eDVR/qPEpHKYBnnbSu34H/W1sb8vwYaOc8s0hbGwWLAm0NMjzZLoobFdIrBKYbiDGYSMSxy/mJxGukdTLHc+tEs6wV+ZodhxxNxZlHG4xAbn2HriDKaECNbEip+pBHUxDo00mB2rU5ZXqCfQ+E31NmpjzE9mgWMUuhiwLAPSvlIDTwaAadOZ/EIM2ADjb5SLn1YzJu9xIIv5Vn2LitzwRrZlBGolzJQxnk7blSaDuaRaHFLCUYPOEUQ04tAo74xBSc53Lhpc5+jKdZB5XyJWSQTSZZSWKoxL4kWvBbq7EQr11hGYTzHTt6VCTaxbZnpuStsXyFOJ3VUacCANkGhUTGP+bf4vXiwtwTdcyn8pgwj4EHPF1ysB51OOj+9NKRSzAVbxJjcBAxzsPCxPsI8uXaCRt2YGoVcUcINZ4e2Le6dDJRxLsOMURKpwkU/8I0HDaPBzA9xSM8F9Sifdxa+mGNZZtRvNdmEuVpmpNBlj+1u7Q5MQ8ZYBNxrs00/nYY89UVmnLfSsECp6P99OiwZpCYsUq+PBluAiutuAg5u0ToDuncG3aCOOh9UKBSDFExKklNiE6Y+Ap0kOK8JvprQL+4qMjXaIZQKYbCRpOQ6HAT6PnsoA2eckwODGPQ0Ik2F85oyPfp8yRXIagE95UqLRWwv1SFfOUyZJGHO+utX5pFebpLd5x0jDA5DnYjVl1kmghY/RvIsfEedTg9445QJKloPlg8aN12BWGcMtYN7WXWK6VHYDi5EhE4ShX3QgE/QzhU9+9wdyHMOGccaef8DiDHglK6TYsfL7EZboGu7JDXzzKTFxdE9lMEyzi2btKSwiS4OevAmmM3EnQjoSFKMOy0Su1phRmEyxyyLq3VmIWSgJZNwmVF/5jKzLO48xHjccy47sTenWdjPW2bhyXYNwlMo4weBtUAGG0r9R4vGaYQYt5uHxlYcOgNBFyJSY5m+F/p8FOOeu8C8jwqkDaQ87glonNPMlQHZyihsJkTHHHcIpQ6ZhdfD7yWMnPdWYOScBF8pPAUXJ7FAL964aao0aA80FWMvdLPBdtLLRcbyMpdjllMmwU56CjNZKjBnZ36YVeu1vGCxjLFtGFlJ0uYWANYCCwoxRjroqBHNyEM9inFPLzI9GhBx4bjXR5gezWBQg6twhK09mBwVrPnH64GOuwGY+tQqO/fKM+wm4xV2nlQn2fWSa+z5aEfSWC0sCO2pmA7bcMhmQztKdUCTJEmqBcyMAGm5VWVkHzgaQ9vGjydYwWQH9xBnMpKAzYu22Ik9nGYn9lKZAWanx5lFsmxYBqO9yU56d5NNUBKpqsyyvSW1BAvgmD+GI/U0skyN18Ix9j6rB4KlKKSR88Qm08N7LuTYpgaQBxlGKPuNU2SGqFNiUIzyjH+96gFm9FLHqp0IlgaTYupTK3DQaXF0D2WgjHMLC0JJ2+QOrOim0S2K86OFpOVZptdKBptua8KCwhTcuVsdtopTMQZrKcCmR54X7NJuw/dSqjNjOQopGNuQStHL0U53/nXiJWiMQKMX85XT5rw0Ag5hA5kr7ILVA+x6lmL44R5P2WHgksUsKNEG00uvsMOBBsJkIAsRfi/+dWiRLLUHqONBs13VSXajyU22mbWg89FLGSjjnLK1kBo/GuWgxTu0SQ8VL88mebQGFzGciZSiMAmr4JqQyH0bYuOpGBgiicJMRGEbdmajsJYWPAiHmQVLseqdin+9CDRiGhDeEIeNw6hQzDmmr51nejTKSHs9UAr+oHtLUKOe1hXRs8EDnTclqZGD3YdhZoAYsLEGLFqFDgtmZqK0m5TZkELm+tAS7sNbugnpsOhDkAODOYUpXznsjkcFF5zU2H1uNRhF4VyS4XZi8AHnMgwD8LWLR5DezDi7XjrOnBYnxXbTThtSMEJjud2A8JQ4bHrkAJq0GHRwA+4QSikfs4tszLePsbGj0TunwPSo0MwAhRfhuqmAWWWoQYkb9UAnghYVkrPdhdkELw33P1ho6VbZfVYm2X3SPYnW5vVSBso4N5bh4UglsgdwYhLnUE0tIzWcejYQLkI3RGrUTyXZCVqH3g7lVb9YZhW9P3T8DNJbrDJM9qVtFn7Np1hYk0bql2Fzpsgmq/bicDT/C6KdgA48tEI5DpXplWahkU0j0hQugo20YPVokyVKbUgx9bTTZwNCSDFTCDzDKKNJB1Aw1sbYYnAL7B4TGyxoU5pniyi9yqzlZobZZdkLMJ3XQxko49waisUKLrpMowfUOKAbIj3QPIjeoBXyDQiia+Ge10wOJJlLv1xjL3Qmxarnzq2zymMnziboOCx4LQ8xr7MEo1tmgzlzXsb/YUgYXqTbp7kP5p+GkWzaMI5mFOj16F5NMwOUp57ilmmgiJ6ZMQjXovNz7GlWmbvxgP/ATRwSUtBi3nYSUtDCd1mDnUxTa+wcakzCxd5DGSjjnArxsCklIu6SBueOCx1CymFM8asd2GBkp8lu9GSa8XlvGXbyutC7asKTMALDRtkk88ocSN1Y8tiJTSkmS2LzhTb4IQYzTc3SuhSMAYcRVA/uLZRRge6d9PloQ7Wg4SKY+o82WYLOAHV2KAsRPds33sayo4SZhMaWeK0cBZ0ztagXXBZCktpuWBDaU4nVrYbP+jcudo77T73Q6IHghhijjS3oQQgXFaV8bGWZcXcqy/A+ZyHn47vzZ5HeN0qHkV4ahrf+bold79TYKtJ76vnjSE8Qyx1PsxM0O8Ii9dUEm5/miv/TsEahaDRyTo1eGiEO2Pmge5kL6WTpnkuNbGwAUQYwaBjiuinafAoGipoQJkQZVJyyf0OUMMxJnKKQOmS0SNYpsAsaWHjjQThML2WgjPNW0mjrlH9De+i8fwNo6V0sPEKLd2gjBkqzhVEfEZgjh+3RaYTYgyfMSJTlnmchcfwTayeQ3liahUMXIavMW05eRnrPXZ5Beh0ITykV4IIArCuSFAWdRQ0s4qYRPwxrgUYhjSx3aDMaCIehjd9oJoLCdqgBVIE87lTomUIj7m4BFnFHmaFGnY8oYF6h2GoOq2XvkuL+S7PsZbpFCPeBhau9lIEyziU2+Qrz/k+L2yW1R6M/NJrWhMU7tMnBJnwxERhm3GmzsNjdyQWkNzLNnIEnNu9Cems19j4nYVOnOw6ySP3FDWY5tSps0482KHbAv0oL0pjGVoMttKQQPZx1hEIpJikmm+7xDQjtoyQDtNEczY6ml4Ol/6uPBAvFoOuBGNq5y7BvxmHmGdMOoRXYLIlG+GldQ/oyxLD1UAbKODeWRRHarv+JQFNttLFFitkwnBYKPh8uTqqyjfRCaQzpnR6+gvQeX78P6X3kwN8hvbn4BtKjMpJgm1QJpnaOZtnzbVSZE9GCVIoqQerGDLDO25DDHZ4vNJtHG5VheAMMiLTTlD4l2FQ3ZtaCj0eL8Kk0hikJNVOjsJ3sIptohTlYNNnx/4C0TX0EUiJGmmySUTtJTXaf8Qq7z8YYNJR6KANlnFtzE5yoPoVy4FIu29oE08MScKo7y1ARiryd3eizOweRXirGsANf3rkX6RU8BsN4ePzbSO+vdk4iPQoTOrPDsP8jSYYdP5Jn4cLVSbZwtyv+My2VCrPSKhFmbVVYeQKG37jrkIZ2EuL+obODi9uh89GG3ZVpR1kaAadnHw3cJHYgdBF27KRGNoVUuDv+53V5ChrnkO6xPsb2FuoAxgEOX+JORGM4WPa2NyJ9YZwbYy5JKklqS2pZa08bY0YkfUbSvKRLkj5krX3dGI+NSKTbuQOQA7CrulzGtoS5bCn8hmLFaBSOOh+bMII6l2M3ejSzjvQWaqyardlmS5TyuJ9IswLbp7dZs6TVIltIjTrb9e8/xOBFeYeF4eIgdHSxzIxzA7umWhcW5RaChdHgwkfYtfh26TaIsdyUMvB5BqnYOhVsjwHqDFCaQtqopw5oA2kEnMJFLDQIaB0Mb7oI7xPOsV5KXxjnXfkBa+3eHPejkv7SWvtxY8yj3Z9/8Ub/Cdlw0CKGC78JGQcwBSNkDqCTtTRPNzb2QkdTrPrqjgzDCRUhF1U+zjAHmx0WsfXgiR2FKZMmLbDNsAj4OuwhvlBiwF4nxhZEFGAOOk1qbTE1QaPeyzGj3tlhN0qdjw4sNqfsME6B1t2w61E9ShW5fYIZ2TRQ1MzA9wmjtql1iI0H8FhJigIIRycKjWxaXwKgN5KU3GCGS3UC9i+BHVBjtbAg1I88LOmh7ve/L+mvdQPj3FjmqRHMHsX50Ur+6gy0zmmUisI04QEahY1XLqwzzPnhDEth3J1eRHrZCDsJN5IsZXKxwXLWFyvsfb5n7GWkV4De47eTDPD83AJjh3FcFgKqV/1bCCMTDE+xtckcuegqs2KizWANBBrht5D72Drsek16rEZpKp89Xww2ParDAluaVaVR1OwiG7/6EHMeKTMagU91YtBRhQ4L7RBaOcg8R5q9oGw0NPjZS+kX49xK+gtjjJX0P621j0matNa+lmNfkXRjcKplRjMpxBk6x0Zz+wSMaNbZrHM3g43iUKw67Y6XvJcZvbTzZibKCOcfzJxHejl3DemdqUwhPRrhPw954+eSrCD0UIphx6/m2bjTJkuLq/5hV5UkTP9HmTHSHmeOR2SZ3aeXg9hxyJhDjXp8PRipp4ENKi1Y/4RhJtApa8D7TGzDmogimy+VNKU39H+f+YsQWnQSYsdhh1BR3nFYL0CzQUHVKvqRfjHO32WtXTTGTEh6whjzj8Jw1lrbNdz/iRhjHpH0iCQ5mWER24lQX7WSbCFiztYtyu/E1KjQjZt2cystMZhCtck2qe9cYZHXzD3MqL8vdQnpfX+W6ZU6zCv76vqdSK8Gw2Jp6CTdN84yHzXY9WMx5T/MaCFYNpNluPhyiY05b6vOrLROAkbTPEruzK5HmadaGea0RGsQ+w9fC2UFSm6w91mapeB/dr3qOHsx1DAk3TArU2w/oudzKwXnNGzMRCFCyTXYiXsoLAi9plhrF7v/rhljviDpAUmrxpgpa+2yMWZK0jVDiN0o+2OSlJqYtYRDFzGoUB7OgCPSNFpBuYjphk/52N17GE5oLMNC9Ylhtvj/bpMVTH6nyFhl7s8zqsjTqVeR3tkU4whNQjLpHdwDPlg5Ob/kW+fq4/PoWpXTzDrotNgm0R5lEfdYAVLOwdSzW4QwhTvZ3tKmPdKhUEx9ci1YLukWxWTD7ti1sYCbCcHiR0L/3EoGG3WjGPfENstCVCegwwnhPmETomuIMSYtKWKtLXW/f5+k/yrpcUkflfTx7r9fvPF/xjYOshhpUyAaOacbIt3YKKa+mWcHRQ3yv4+57IXGIuwBOzCqWW6yASw2mDd3Tw4aXLCq8N35V5DeVY8BWCtwQSxWYYoGSgmMe+V+aGQXaOtNphahMAx4PQuNpsYYtOoLkDWHXQ1nFCzEqtPADT2LaHaUnkVYjzbXgu+TQDhwEGwH8pVDCsYOrPcgXVMlyYPQoqFng+0n8kZk341z7WLJv2CMkXbv5w+ttV8xxjwj6bPGmJ+TdFnSh97If0YWpLvjX4cuROqVe1nIogHvMwYLNNvJYD3QzSLzko4eYouRUhtSo74EjfOlOoP7vBRlsJ37k5eQHpW/KDI+dspTX/GY4Vvz/C/44WEWsd2CcywCjTvnJRYhbgzTNCBTUyLYPclCeIqFvPEUq44DTBRzTov1aJOlgJs6YWcHGLAJaGRHG0yPRpZr4+y8zF1mUcVmnhlY5ROs47QkiXEh3FD23Ti31r4q6S3X+HxT0nv9/F+xmtXY8/5xl6VZ/6uY0B9J0vYwTLGSToPijUIsTGNFYBFVBxZtpZJsEVNjeS7N8DcnsizC/3KJpRSWa4zlhTY9mnOYszMTZxQOPz79HaT3hcXvQ3rJODPqr1Mq87pCsyVxl1lN8TikOxtn1g+N9EYgdtzSCGoMegMwdN6GToS7SSEASE2C8A1qnFOsemOIcnojNRywiwPOckovmYCOXKRFI+5ITeUZtrdQu6wfZd+N81sprYTR9p3+D7YGsGMMXMEUZhIrUv5Odj0apaIRd9Nmz0cNhGeuHEJ6XyuxCOqP3vcc0nvbMMOAU57zsxXGuvLn2/cgvRjMIU+7rE3hQ5PnkN7lGuSPA/K1HdayM5Nim8vWBqTDyLGT1xThsUPhN1VovMIIuGIwqgkZuYZfhhA9CDmojUHDcIsdKpVJdr30KrseLWKkdVqk+JHCYzGWG9bYUScCliIp/cdfR3qVn3yQXbCHMlDGuayQUZm/5H9z81KwNfBRGsmGmC8YAaetpCl1I22gkkywVVwus+jk2DQzCv92kRWEdqbZ+zwN2VpSMML/fJkVrm43WW49AjvWxaEz0IQnbwRjMfxLqQKrzSvs2ZwJ5vk3mzACDrOAVHDEnV4QXm/7BHufDtvKlF1gN1qehllVWEoRg1zZjVywgTcyYZwy5ODPwLUHJ3UM17zBOop3scyoU+w/ovOBMs4jbckt+J+01Qn/E7YKCxjpWU1xcJQFpcqgx7gRA+3mlocFoW3YYTLnMrq6ZowttS8/yyLSR97OYCYnE4xq0IOd2Z7aPor0KLXht7ZY86IoLCBuAiaUQyMM6rNVYww2BQoB71CKO6aWvsLmWGUWpuQTEDvegh1QYZfkFgykOJAHnDbpCRrL3YIBNBoBd4swUg8i59TITsGOnRSyEzTTjiwLUKQX2bneSxko49walqprgfGkRm9lNtgucOXDEKsOsWmNg7ChSYKlyA9mWPhneGT5xn90DVmoskLLQp15LXccZff5zM480ttIM4jDu7OMreWeqQWkV+yw9zkEmyx9e5NlBhyIAydCHc4YhGEQPL0kOZOs4LWSgJmBGjvmohCLb5cZ1I5CEGl9EA3xU+YOD95nahWpqTYarPPowOZFzWn/li+hX5SkOqx5y19gxmttgq4FWpcCoVMzlOO6dzJYxnlMqo/4X5CZZf8DunWKVtYjNbWGoCJteQ33tajDDjQXwlOGnSrSy8XZZnN6mPGHX0kwzPLVCuMfK3psU3xy5RjSG4MtXimv+tH4OtIrpFh0mXaGPV8Z962zUGYOYLsNG6PRrEATwmEcuJc1WPguOcGcgWaDZWfaKRip99jzuSzRgo3QxjCEfcDgJKWNh74jFornRo0T4RLCbHFpttZpp0/cZwU6gLRYuZcyWMZ5hEXBywTXC1N0w2eY3sZbaQk5DY+wna1dZxtUQwzXUoF4mE2IdT6RWYF6LAI+4jDDgkZ6KYvNNwtzSM+NsEzL/YlLSO+Uy2A7I1HmfBBZqWaR3vwwS+dt1ZnD4iXYWt8qwgZSLnP8Y1FIAwcj7gbuuRT20YKUiDG2tWCKQgpddIpMjxr11PBFzQzFMib0HimHext2RKdwmOQmWwzxInsxjeH+M4X7745uQkybLWTiNVFPqzTPNm5nC1ZZG9gqG3ado/jASIPt3EWYjlossYj0JsT1vnOcRYgn4Mk0A+E+Dtz1v7nCsNzfXGBOxE+fegbpvT3N2FqoRIHFtVFmp7xNM2ur3mLHQNph2a4ohNG0YcAACwy9RmuQrg5SFEYhHIY6A9QwjED8McUtU2cgBiP8TWick3HA5yycY5RYgrKuUKOeFpLGy2FBaE/FRlkXMrJp0MJHmkrMXmR6lWmmFy8H29rZy0GMGTxA3Rg7Ya6sMXjKdo1NmBakmHxg+jLSO5lmmYHDhzeR3oslNkFfLDK9i9UxpFdusYm9AzjL3zN7Fl1rqcbgMBQ7fvHqBLsepFJ0ppnzQWE7pszukxpO8RLT81hLAwka9dTAa8OECUQzYSciAaO2LRhdJrXtlPu9PMXu0YV4ekITKUnNLGSViTCPLAK45nstA2Wcmw7zshMQlf8AABkeSURBVAkUJuhuZ5QdJkJbEdNO4BDTRqkil0rsZKKp7ulx0E5W0vIGM5xoUdpajUEjcjBsdCq1hPSyQ+x6Begdb8Lw1laDWRYEJrRSZ1mdDQhPqTRh0VaCWWnRDAuF5pJwrlTYXLE59nyxy+x9NmiTQnimJKEz0BhmejQwRZsludCApSwvtJ6M0AjHquxcMLRovAKLo6GdRB0dWmdAnYheykAZ59awyUAKVSjOD6eVoNGbgKwy5UNwY4OFq7E0ezHHhhlloAO9lqevMmz17DgbiEqTeUkdSMXwxecZT+yB0wxGM+ewiPtYjMF9qHE+lmCY85mU//VwucKyMwmYDXKibC204EFPI/UTaTYG2yXmtDgptic1YMtyysgVK7JxgMkgLNh4hXZTk/KVQyhGEjZZIrzq5WloEEChHUI7UbYWaPMil0bA+y9wPljGuaxQFCG95n9kDNwxKjPQ6M2zA7QxzxaVk2Y71HiGsafkEyzHulxhkfMfOvAy0vvAvazTZxyeTOst9nxfWmf86D9wkr2XJzeOI72IGDvMVIoZ53elYYQfAntJ06PZBKPfWGyw7MwTF04gvQcPXUJ6T75wB9I7cpI5uGnYObXpwYJQSENL9ag02XTBASZBe5Ia9YltSGoA7zO5Dp25HExTA6FFsvVRdo/YQWpAmtYCzCiExnmPxQiliEoHATc6jJxTDy1aggWhMRZVacIbXWsyOEUzz56PRv1egFhn2mHymMuw3LMwsvzQKMMtl9qswJYWhI7CIsanFueR3vY4pFKEre4mXf9ORBkWblCmnXtnmMNCcfj3n2D1EFsNZlnUYLF5s8KsNGcHQgdYXANjqyk2nhZa0sJVB8JvCK2yxJ+vE2eKiW3/AbRGPuBGULRDKCyupVSKNMsSrYUFoT0V05Hi4Kyv+aciVisFK/kbwXaBi8CW1y3Y5U6QiWFrkeFs77yDUeNdLDCA5yLkHX/3OGxClGRG/ZzDeMDbxLuV9L5D7PleKbKiwqEx9nxFiK9eqUCnM+vfAnpomDV0qltmTE67DJJ0pswKYYpN5gBSRzyfYlbhehlCymDktTkMi9s3IDsMhG+0YGEnZV3xYLSXOh9Uz4PvpRP3v+fiJj00qwNZUKJ1phej2PEEhHil4KD3UAbKOJduwuPyex0HpsxGWeqLtso2tHkcpC2LxqFrDjHnYwkWeV0uMrhIscaMu88XGJbbibOw2PFRhsV/7ygj4n9P7kWk99Y0o5hswhP0bwp3Ij0q203/J/bFBogWSCqSynZJBWj9nN9mzDfTWQZJohj3QpU9X2SHWdn0bMi9CrsyQ3gKzf46sOkRLSSlLDYU1mLhoekx/12JLf9nZtuB9QkwO5NcZ55cZYadl5EmtK+gcU6dj17KQBnnxrLiToJpi8AIeHsIerwlSOs1xIxeF2LOG0W2GJ0su14Ewm/mhmhbPSa00Qttyb5aZYWPT9hTSO+hURbtnY6zcchB4OuDuQtIjxbY1kEYlUbA8/DknXDY9aYSzMiuQlDvhRJzBrDAOEMnwRTLB5nDSWEtFGI5eoatvZUHoLMDMxEUUlFj9dgcMhLxv7cktlkWqTJFO33SImfaLwX2gymy99J2+69F6GAZ5y02aWug7W60DtvE0gYcKTbp4i7buSfzLFwxNcXwq7T5zaEkKxL74OgC0ltpMVjL+eok0jtbYlFUykZDo5NPbrEiv+OZNaQ3GWeGYT7KDFjqBBIe/g4cgw7kLSvAKrFh6Aw02uzYKQLOeEnKppiVVmwwB7edY3t8xKOE3kyNdotc/z5mLUP0FL7P2igsRoRGPRWn7H++1MZgc0FaZwCpDSlfeScWrHHubgU86G9ABso4l2FeKDnT2hADTqMVmTF2EB4cYrzc0ym2k4467D5dw6IxJ5LMGZiNs0LLIxDL/Y7UeaS3Ncwi7i83WMErdSK+scEKQr99iXUInRyDbC0jDMNPnUdSpEkjxNSxujPPHKRGh0HD1uss/59xGGh5DXZcbc3CA7sOI+CQBYViwGGNM74eLSpMsi0Xw2golWJ6hRmG9SH/67YTp9hxpIY7hFIjm0oHwn28TP+Zwv13Rzcjhg2OC+xX2qjAK7OoQx1GRwoJFhUr1CEzAqQfo7Rlp6dZDjIGI8t3ZZgz8NYka/F6HMI+3u4yTuhonrW3Pz/Gdv2zHisI/WrhJNKjjCYEOy5JTVAE854JBhGKQs9/C4KPv3yFjcGpsVWkF4OdPr1WsMVe8S22l9XnmXVuyuz5ok3YhRHaW7hBHbRShi5A6McB9j5ro7Srpf8X6gTcsTPqsb0lvcaCGtVxNgamBbnmR/rPFO6/O7oJ6USlZhZM9JL/Aa3SHQpWS3fWGZZ722GLo1Fm14tCGE0EFqBSI3sHGluffOXtSO+ZycNI74dHX0B6b01eQnrHYuwEPRFnm+lcjEVt74gzvb+qMIOSGrCEFrEMCzs9WCS72mAR8IkMcwC/sXAI6U2NsGxeo85YVywMpGC+cki2bGEBatBCgmASZ2upjbD1QKgNJakNo9npZf+h+tIsnJtwqsTL7Fyvj7D7pMw+jWHYmyAsCO29ELaWCPAKTRtuwBk2ySlbi9e4PYa47bGoQxHu3DGY33voEIOnUKjCr730g0jvQ8e+jfTel30e6Z2Ks6hf0rDNey7GrvfBLHN2qhCsudnx7wRuthkMo9Rma+GQyyBepQxzIhZyLNt1ucz03ATDKdQhBMCzkCi7GmyEv51gBklylTofTK0DXyds2aAaZPyIV9j7JIZ2O8FeZmqN4lqYGo3UE5tM4vUJNDPQS7k9LLc3KBFPSi/7H53iIf+bIm1UcHJ+GeldWGfGXRSmgpWAGPAJFtGkcINfPfQFpPd72w8ivUtVxo/eaLGlds8kw0j/2cJdSO/FPMOqH80wYOhynRXY0kLg+1KXkF4UhpwqHf+R82crLLJMpQH5Zydgd5iFCgMDZ+LMyKaMR9SYVIJZCM4SO1S8LNvj04uQupEtWYzlprzjGEbD1BSBhii5YHIdQrwgHBe3d4UvkxrnUcgOE6uETYh6KjYm1UEqi3jmHZdNgiGHNcT458e/g/SysPTchdVJ2Qi7XhRGsvMRtnP//MjTSG8ddjLd6bCo5hWPRQv/X5y1ZKeFj09vzLHrwcYyKzVWVPhcYQbpJaLsvZD3+cFRttaJIyBJW20G2Xl86V6kN55kvQkuFthaaMOGap06pI8rMj0aybZRpkeZO6iRHaEFqJCPnSJPqdCAXQyYBA1QRCpJTomds/RdQgIpzPKS2oBFuaPQ+eihDJZxbhishRSc0NTeRp3tNA8OsWYt1FiOE/J3SQ7MK0WgcV7osOuNRNjUn4ZUmJOW4XPnYiw6ecJlEfcFj0U1aSObFrQQ6pCOr+zBro/wdCIsB7RLK12zlIJxZZth1dMwAl6G2PFWlbbshONQhD0woHEeq7D7bEA+b9rIJrHDnq8JIaSjLzFvYP1e5uRSIRF+aizT5kXxCjTqIcsL5e6PVWCNXQ56Vj2UwTLOY1IdoD9I2qx2hEWWaQTuK2sMpkDp1XZqzNiiHUmpfOzwU0jvlSprPb7RZHjgB/KMrYVSPs7EWPXVCYfBkv7T9J8hPUBUIEnagUDUKtQrdiCAFQgt7CxZtmYp9/sjd/0t0nu5MoX0NiossNHOQeO1wuYK7VIYK0PDifn9nBIRRtwJWYMkxcvMqN+4GxrZtGgSYs5JXGr4DMs+bd7Nzq/cJeZQp5aYUb9zgk3O6iRbs7FaiDnvrVhWEEA2qRhsN79QZL2WyxW20bR2mN5bTl1GemuwMyXFhVJj66lVxp6ShlzLj1cZBGDEZTCoVIxtpm/LswzNUYfR4w1F2PNRSUA+/Qit3QBSspSthW3nFbE9YjLG2FP+pn4c6ZXr7D4TDhtzCocxgKFH4nCRJjtSUJZZkhzWYoDDaCj/O4TDYIFNlghbXPEIM16pY0V5wKuTlD0FqWHMeWIDYq56KANnnEfABkcmQrvGXt3JOWbEbKTZThOZhJMVRvi3ymzTODXJ3kscwmiGkzA3C4UWhG7U2fuMGBZF3WmyDI0TYRj3uTQr7JyAHULHIEyI1kS0AWTk5RqLLJNupJLUgif2aJy9S8pXnk2yA7QOeyi06pCuLs2eL7XCnAEaAacwmugahO3AQHblAOzGDbNyFFJB+MolyYJ1SzHnNCtAs0GUXpI6EbBGXV42xJz3VGyEbVTojMmyMMf351lE+hnLIr3ZOMOc08LADxx5Cemt1Bl+lUZCc3F20Gfg+6y22MnUhOGm9Rpz5ipNdp/rOyxj8qLLOpJO5Zlxjp9vmxWgHprwD0v6yZlvoWuVIXccdVg+v3I/0qu22EGYddnaK1ZZaDl5jmXlGqOwEQpkQYkxhINakD+cGk60OJA6Hw7kVafUjU22RaiV9P9iaGSZNnSiHULjEC7i7rCgW7TBXkxjqP9M4f67o5sQIzZpPWBXjOcY0O8DGcaz/NM5xj8dhSBwV2wHdg2bUnGoR+UjuStIr2qZM1C1sIochn/WoaG20mIWwnqLOVfLHjOcLtdYNRvp2ClJ4ym23gkbzSLtOw6lBOcKLcqlsrDF3gvuEQHritSAEfAMxMaPwe6NV9j1mtCJyF+AdKRTMPoKpyflR6fUm27R/3tJrcPupxBmQnnAmzn2UmiHULdIu7QitZ7KQBnnFHNOIucliH/chI1ChiIMhkHbWnRg/qslyBcK0230em0bbAFIwkDmB/hi4lGG5R6CfGez8W2kV4KsMqUUDPtBaUMnqQ4a0tBrdSDLSwR2GDk4zSBJdcsi51+JMcjVqxusN4EgRWEEGuc0Ih1pwIY067BpDmyAU5lkepCECL9PiNbi8CLAoEKN1/QKe5mxGgwuDcGiasgZb2Gn8WgjLAjtqZg2LFYB86ACab2erDBs7pLLMNkJyFdOadkIxlbimN4dyNFMn4/CaKgepaakz0clSp0I6FxlYSEpvU8qTesfikGNV2qcU0lDR47ysWdgkXOrCYvSomxP6iThmt2E2PgMu8/aCAwYQHYYGnF3md8vt0A7dgbbATVW93+ftJlQaZbNseGXYW3Xi2yylA/DGjsY4c++wJro9VL62jg3xrxf0q9rNwD8CWvtx2+oA8aGwIHbS8xN/kyC4TQj1COExVdxqOfEgjUK4xG2adCitArkyW7SgtAthuUeHWabYtqBlBFQ6PysQ9zyeoG9z1SSvZcGMAydeLBraCTFsnILGwxmQt9lCrKudNaZM9BJsrmZWGVr3csE6zjWx5kebcAXLzGDkhr1XgZmoCDmHMbB5KX936cDoDCSVBuFXWFH2H5bOMrWXqTFni9zle2dxbdMID1J0jmu+nrSt8a5MSYq6bck/ZCkq5KeMcY8bq29fsWhYXRNqIocMiO40Hjd2GLVJsk0ZDigHcEsW4xplx3YXpSl9zZL0DOH415fhTnPDJsva4vMcMpPspwundfVJmxZ3mLj3oQNaeJx5gTWVv3Ps8gUc6zcgI16KqUCgyRV4sE2h8lcgJ0+4W1GPLbpRiE/OmE2k6QWDBGTZjuSlGZJY7Uodhz6SJQKkxTmWlhLRu+RMt9QakMq8R1m79TG+88U7r87+gd5QNJ5a+2rkmSM+bSkhyVd1zg3LdaFjOCbYmWWEtwqMqMwCo2DahE2E4ItqKMTDG5QLbKd1M3AVHeDGXedMjthrMuicNQZSI8zCofiCqQciEP6uCE2XzzaWn2bOQM1+Hyxkv99opph1l0twp6tFIe86iV2PTfPDtDGDtwjCsFiwKOMVAazrlD6ONpunkaWaWdRuAVyYxnyo8dgywYSUKTc77C8RKYDu9fCMa8Pw74nB1kQjLLf9FL62TifkbSw5+erkt72uhqGeXhRcFbQqIOBRnb7AkvH6wC7UQNbJre22QEaH2EnWqPEDJnkBQhPybFNqp2F7Y9jkF/7WZYLtrNs16eNYRuwdiP3DJtn5YOwyA+Ogzni3+KyHnQcW5B/GjbbiSZZpL5ZZWM+9AKMZENjEmOroX/rBN3pE5YokPNSkhzYpAdHbeF9Uj1qE7hV/3tL/hXmkW3dy1i1YjW2/zXzbC/LwMJVL8UmtVOERBY9lH42zt+QGGMekfRI98fGM5/6D4yrMJQgZUzSxn7fRChvSMKxuj0kHKfbQ8Jxun1k8Mbqm/t9Az2R/R6nuV78p/1snC9Kmt3z88HuZ/9IrLWPSXpMkowx37DWng7m9kKhEo7T7SPhWN0eEo7T7SHhON0+Eo7V7SGDOk59SL3+XXlG0nFjzGFjjCPpw5Ie3+d7CiWUUEIJJZRQQgkllJ5J30bOrbUtY8wvSPpz7VIpftJa++I+31YooYQSSiihhBJKKKH0TPrWOJcka+2XJH3Jh8pjvbqXUG6phON0+0g4VreHhON0e0g4TrePhGN1e8hAjpOxAbcxDyWUUEIJJZRQQgkllFCuLf2MOQ8llFBCCSWUUEIJJZQ3lQyMcW6Meb8x5hVjzHljzKP7fT9vNjHGzBpjvmqMeckY86Ix5t90Px8xxjxhjDnX/Xe4+7kxxvxGd7yeM8bcv+f/+mj3788ZYz66X880yGKMiRpjvm2M+dPuz4eNMU93x+Mz3SJsGWPc7s/nu7+f3/N//FL381eMMT+8P08yuGKMGTLGfM4Y87Ix5owx5u3heupPMcb8u+6+94Ix5o+MMYlwTe2/GGM+aYxZM8a8sOezW7aGjDHfb4x5vqvzG8bA1p2hXG+sfrW7/z1njPmCMWZoz++uuVauZwtebz32rVhrb/sv7RaMXpB0RJIj6TuSTu33fb2ZviRNSbq/+31W0llJpyT9N0mPdj9/VNKvdL//gKQva7d/zYOSnu5+PiLp1e6/w93vh/f7+QbtS9K/l/SHkv60+/NnJX24+/3vSPr57vf/StLvdL//sKTPdL8/1V1nrqTD3fUX3e/nGqQvSb8v6V92v3ckDYXrqf++tNsw76KkZPfnz0r6WLim9v9L0j+TdL+kF/Z8dsvWkKS/7/6t6er+yH4/8+36dZ2xep+kWPf7X9kzVtdcK3odW/B667FfvwYlcv6ApPPW2lettU1Jn5b08D7f05tKrLXL1tpvdb8vSTqj3UPrYe0aGer+++Pd7x+W9Cm7K1+XNGSMmZL0w5KesNZuWWu3JT0h6f0BPsrAizHmoKQflfSJ7s9G0nskfa77J987Tq+N3+ckvbf79w9L+rS1tmGtvSjpvHbXYSi3QIwxee0eVr8rSdbaprV2R+F66leJSUoaY2KSUpKWFa6pfRdr7ZOStr7n41uyhrq/y1lrv253Lb5P7fm/QvEp1xora+1fWGtfaxf6de32u5Guv1auaQve4IzrSxkU43xG0sKen692PwtlH6Sbpr1P0tOSJq21y91frUia7H5/vTELx7L38j8k/UdJr/VkHpW0s2cT3PvOvzse3d8Xun8fjlNv5bCkdUm/14UffcIYk1a4nvpOrLWLkn5N0hXtGuUF7fZiDNdUf8qtWkMz3e+/9/NQeiM/q93shOR/rF7vjOtLGRTjPJQ+EWNMRtLnJf1ba21x7++60YWQHmgfxRjzQUlr1trBbOQ8OBLTbor3t62190mqaDcF/10J11N/SBez/LB2HappSWmF2YnbQsI1dHuIMeaXJbUk/cF+30tQMijG+aKk2T0/H+x+FkqAYoyJa9cw/wNr7Z90P17tpv/U/Xet+/n1xiwcy97KOyX9mDHmknZTfu+R9OvaTeG+1vdg7zv/7nh0f5+XtKlwnHotVyVdtdY+3f35c9o11sP11H/yg5IuWmvXrbWepD/R7joL11R/yq1aQ4v6B5jF3s9DuYVijPmYpA9K+qmuMyX5H6tNXX899qUMinH+jKTj3WpcR7tFNo/v8z29qaSL6fpdSWestf99z68el/RadftHJX1xz+cf6VbIPyip0E01/rmk9xljhrsRqfd1PwvlFoi19pestQettfPaXSd/Za39KUlflfQT3T/73nF6bfx+ovv3tvv5h7vME4clHdducVQot0CstSuSFowxd3Y/eq+klxSup36UK5IeNMakuvvga2MVrqn+lFuyhrq/KxpjHuyO+0f2/F+h3AIxxrxfuxDMH7PWVvf86npr5Zq2YHd9XW899qfsd0XqrfrSbqX1We1W6v7yft/Pm+1L0ru0mx58TtKz3a8PaBfr9ZeSzkn6v5JGun9vJP1Wd7yel3R6z//1s9ot8Dgv6Wf2+9kG9UvSQ/oHtpYj2t3czkv6Y0lu9/NE9+fz3d8f2aP/y93xe0UhS0Evxuf7JH2ju6b+j3aZIsL11Idfkv6LpJclvSDpf2uXRSJcU/s/Ln+k3ToAT7vZqJ+7lWtI0unumF+Q9JvqNnYMv27ZWJ3XLob8NZvid/b8/TXXiq5jC15vPfbrV9ghNJRQQgkllFBCCSWUUPpEBgXWEkoooYQSSiihhBJKKLe9hMZ5KKGEEkoooYQSSiih9ImExnkooYQSSiihhBJKKKH0iYTGeSihhBJKKKGEEkooofSJhMZ5KKGEEkoooYQSSiih9ImExnkooYQSSiihhBJKKKH0iYTGeSihhBJKKKGEEkooofSJhMZ5KKGEEkoooYQSSiih9In8f49CaZDAxKtPAAAAAElFTkSuQmCC\n", + "text/plain": [ + "
    " + ] + }, + "metadata": { + "tags": [], + "needs_background": "light" + } + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "GyYXjW07jCHA" + }, + "source": [ + "Now transform the waveform dataset to have spectrogram images and their corresponding labels as integer IDs." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "43IS2IouEV40" + }, + "source": [ + "def get_spectrogram_and_label_id(audio, label):\n", + " spectrogram = get_spectrogram(audio)\n", + " spectrogram = tf.expand_dims(spectrogram, -1)\n", + " label_id = tf.argmax(label == commands)\n", + " return spectrogram, label_id" + ], + "execution_count": 127, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "yEVb_oK0oBLQ" + }, + "source": [ + "spectrogram_ds = waveform_ds.map(\n", + " get_spectrogram_and_label_id, num_parallel_calls=AUTOTUNE)\n" + ], + "execution_count": 128, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "6gQpAAgMnyDi" + }, + "source": [ + "Examine the spectrogram \"images\" for different samples of the dataset." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "QUbHfTuon4iF", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 646 + }, + "outputId": "f5057001-c343-43c3-ca64-cff3030d3380" + }, + "source": [ + "rows = 3\n", + "cols = 3\n", + "n = rows*cols\n", + "fig, axes = plt.subplots(rows, cols, figsize=(10, 10))\n", + "for i, (spectrogram, label_id) in enumerate(spectrogram_ds.take(n)):\n", + " r = i // cols\n", + " c = i % cols\n", + " ax = axes[r][c]\n", + " plot_spectrogram(np.squeeze(spectrogram.numpy()), ax)\n", + " ax.set_title(commands[label_id.numpy()])\n", + " ax.axis('off')\n", + " \n", + "plt.show()" + ], + "execution_count": 129, + "outputs": [ + { + "output_type": "stream", + "text": [ + "/usr/local/lib/python3.7/dist-packages/ipykernel_launcher.py:4: RuntimeWarning: divide by zero encountered in log\n", + " after removing the cwd from sys.path.\n" + ], + "name": "stderr" + }, + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjwAAAI+CAYAAAC4x9CRAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nOy9Waxl2Xnf9+0zj3eu8dZc1RPJbpLiYEpRJNGTbMh28pIBzkucIH4KEiAKDARIHhIkzpsDCDAQIEBgJAgcwwiSWJGixAkkihYdmRIltZpsNrvZNd+6defhzGMeqrrW7/vvs1dXCa2uo+ZZQAHn1j5nD2uv9Y3/7/8l0+nUFmMxFmMxFmMxFmMxPssj97JvYDEWYzEWYzEWYzEW4097LAyexViMxViMxViMxfjMj4XBsxiLsRiLsRiLsRif+bEweBZjMRZjMRZjMRbjMz8WBs9iLMZiLMZiLMZifObHwuBZjMVYjMVYjMVYjM/8WBg8i7EYi7EYi7EYfwojSZJ/kCTJf/my72MxnoyFwbMYi7EYi7EYi7EYn/mxMHgWYzEWYzEWYzEW4zM/FgbPJziSJLmTJMl/nCTJ20mSHCdJ8o+SJKk8PfbvJUnyQZIkB0mS/JMkSS6+7PtdjMX40x6LPbEYf9ZHkiTTJElu4e9naaokSX4hSZIHSZL8cpIkO0mSPEqS5G9lnKeZJMlvJknyK8mT8Q+SJPn7SZL8WpIkp0mS/G6SJDfx/Z9JkuS7T/fNd5Mk+Zmn///NJEn+GN/7p0mSfBd/fztJkn/16efM/feTOBYGzyc//nUz+ytmdt3M3jKzfztJkj9vZv/102MXzOyumf3PL+0OF2MxPt2x2BOL8Vke581s2cw2zezfNbO/nyTJKr+QJMm6mf2/ZvY70+n0P5iGnk7/ppn952a2amYfmNl/9fT7a2b2a2b2K2a2bmZ/z8x+7el5/j8zeyVJko0kSYr2ZE9dfGpQVc3sq2b2bVw+tf8+2cf/szMWBs8nP35lOp1uTafTAzP7VTP7kpn9W2b230+n0+9Np9O+mf0nZvbTSZJce3m3uRiL8amNxZ5YjM/yGJrZfzGdTofT6fTXzaxlZq/h+EUz+5aZ/ePpdPqfym//1+l0+i+m0+nIzP4ne7I3zMx+yczen06n/+N0Oh1Np9N/aGY/NLO/Pp1Ou2b2XTP7OTP7ipn9kZn9jpn9S2b2jae/28c1Zu2/n8ixMHg++bGNzx0za9iTBX/3o/+cTqctM9u3Jx7BYizGZ30s9sRifJbH/lOD5aPx0Rr/aPySmVXN7L+d8dtZe8NM9sfTcdfC/viWmf2CPTF6vmVmv2VmP//037ee8xo/cWNh8Hw6Y8vMrn70R5IkdXsSpnz40u5oMRbj5Y7FnliMPyujY2Y1/H3+BX//35nZb5jZrz9d588z3P54Oq5Y2B9q8HzLsg2exXg6FgbPpzP+oZn9rSRJvpQkSdnM/q6Z/e50Or3zcm9rMRbjpY3FnliMPyvjD83sbyZJkk+S5K/YE6PiRce/b2bvmdmvPsXZfNz4dTN7NUmSv5kkSSFJkn/DzD5nZv/H0+PfsSdps6+b2b+YTqfftycG0p8zs9/+E9zfT8RYGDyfwphOp/+Pmf1nZva/mNkjM7tpT8Bqi7EYP5FjsScW48/Q+A/N7K+b2ZE9wZ79by96gqcg5b9tZg/M7H//uEqppxicv2Zmv2xPUr1/x8z+2nQ63Xt6vG1m3zOz70+n08HTn/1zM7s7nU53XvT+flJGEsDii7EYi7EYi7EYi7EYn82xiPAsxmIsxmIsxmIsxmd+LAyexViMxViMxViMxfjMj4XBsxiLsRiLsRiLsRif+bEweBZjMRZjMRZjMRbjMz8WBs9iLMZiLMZiLMZifOZHIXbwK3/7v3lWwlU+nrhjvTXYSjhUPvHf65wN36sc+oqw3Cj8PaiH703FDJsUw+dp3h8bgdGg9tifn99NcFvDWuK+V+yE3039IRvhu42H42efO2f9jRTb4RzVvZE71jkXprm/4i9QaOMc3ez74Bg2/MH6o3C9cclPHt9T5SBMgp6D77eyN3DHOufL4Xwr4XzLt/33euvhOScFec5eeLby0dCf/2x4wXxPiV9K9p1/9MuRWfl0xl/4hb/77EGKR71n/z9a9lWm40pYH7n+2B3L4+9k6I9Zksw81rvgyVHLO2HhTEt+G09zPEeYxGTiJ3S4Gu453/X3MS5jfct+LG2f2qwx3PD3mMO1RzV/j8NGOP+w7i9QOQr3UtrvP/s8LfrvTbDWC6d+LY4r4XrFt2+Ha7113Z8Dz5nv+Tko7IU5Hpxv+mO43gTvmtc1Mysehfsf14vu2ATPU8T5polf5lPspZGc47f+z7/z0vfE1/6dv/dsT7g9KwXACf6m7DczK0D2JWN/jL/rbGBfydbJ96FPRL5RtvJ7PJ8OlW9Hr5TCtb0Is2TC+w//r/qqdBq+NxHtu/Kr7zz7fPqXP//s82DJr3uef1TJ1mW8pyf3MnupNO903d+HrwWOxXHZf7d8HM5Zanl50l0P98n5UZugeBL01fENf4HqQXi4/jJsArn1cTn8R6nln/N3/4f/KHNPRA0eLthix9/0oBnOWQhy370MM/9SS6d6jvBAje0wCYe3sm8r79egFds85h98WMOE4ZT5vvmBn+mioLE1xuLipjHz8zGseaGUwwbWjULjblAP5yifZi+m0ok8J5RHbihSBn+Oqrh/mUcuoO5Zvwh5j3mcv33RP2cBmy0n0i7fD88zkTmmgi50w/dyIvjmYtAg6WAhicFDYUPFb2Y2XAqCs7x14o5Ny2FOB2eC4FHhxfuY5r1A5PuaFsLCzw3E8IKRk+v5hUmDtfTetjtm1fCsvevr4Rwj/5y8j0Lbn59KbVyW/TKAkcZzihjjUydiVA7PBU9o+LOvzryumd/vE3EW8u1wX/m+alfMfyH8Ts+fPw3KZLRccseyFJAOGos6x3Mx8MiUwaNykvk91QV9KHU6j2Zejhdo1IihXIahPKp6S8PJHIi3gshxGhDqtFG3FduiyxrhXppbYa3T2TXzcnYixtDpL34B1559XTNvLFL3Pjl/+Nx44B3vcSXcY385XPzoFc+DSFmj5+f8UPc++Zt7GkZ61c/jqBL2lQY3CrAz6Aip88t7VF0cG1GDh8qtt+rfDm8gh0WeF4XLSVGh3VuF0EjC+fUF1/Ymmcc4YY273lLd+Wpg8eaLU+9iUkwyj9G0rG0HK2H/88IbhZ+p5Z70wjmqe2LIbMCQgfXfF6uex/QeKTiHdb+4aPFzYZRO/GZoXQqLkBEjM7NxOdwLF68KNBpbatgVT8J/jOoakQifaw86zz6noh9zMOhtT+phDRSOvWQYrMNYEYMnYWTzrI8clL77XvjeV4Oi1gjGBEJDjRWD0p0WYQz35By1oIBHTXHlMAavXHB/F/fbM7+n9zhq4vx1Lz/6K9lGemk/7OPcaZjXwYUlf71jRH8kskIFx/WV80vbim0YfWrUYB7zhx13aLQRZEu+E+ZfIzw0YHUdGAyl/G4wfDuvnvHfwzZLBvNn8NQeh0ktH4Z3cvSqdFHAc6jsoAwbVb3s43cZ1VEHlzJmXBIlC6NpXKTSzo40DVbymccq+34hUYkPYExo9IHyWZW4y3ggGKBrls5O7bHf+/21sP5GNTH68Nzr3wlOzNHXzrnvFVvZTgbtgLFElzjnFURq1HCkDklE3Q6a4fz1R0GeqGzp0aERwzc2ogZP7OWMGfnAg3JRmHkle3JFwr1YDM7iT/wDnF4Kf1/4jhc8/fUgVPe+7DfYBA7VGKfXkDFdxXFOlXj4vPdmUHBqeHGs/dC/nNMrYbLUkGGozln1Mt+cR712b4kn8cdo8XNxtS94r5qjdTESocLmKx9LxAAC6HTTv+vcKLwMRntSv7sZ3mFsjl/WoAc/WgnrobjXku8holX1c0HDfyJpmukrV8I598NaH656L6zQCmtstOSNlWEjXK90FL6XE+Gef7AX7unMqjs2xjlzXZG4jHJxbbS80ZcrwYkZaXQ3HJuIcupdCGugWOe68Qsi1w7X651dc8fKB5gfGNi17/l+jFM89+CMT8kVjxk+9u+pvxr2SKEbnqV46MPHQ6wRjfQl6DfZvxoiZeUDmcceUtaNbMP0ZY3eBlL2UIiNB6KkNiBXNG2H6VXZx7TQEGulKLqmcBrmvn/LO6Rnfvvxs88nXzz77HN3zb/XymG4OA0jM5/C6ZzPlp+lk7BOuxsqB3G/Xf+gpWMabGHdM3tgJlGWlqR5YVw0Hkuq6o3ghI3OLYf7kOwN4RpqODYeMYqWHQGj0appN6a4ih2/p7kOjm6FtV469dHR5XePw/lveUcoNqIGD1MUqQFN7TwoVcbNbKNpsIzIBNMyIl+re+E+tn625o41tmg06f2G8w8gy1Y/8DfZPp8dXXIDp+fGMPMpouPr/uUQvzKQd8OIT+ccwrriGbj8qIY44SFX97M9QIZddY4re+GkDCGbmdXvh41z/GqYf8Ux0dhd+4E3TDvngReRlCI9DxeefOnohPQoHoS5SI4ClmWy7l8s00wqOAudMPmJGNiM3NAYGqz6NVXcDcp4vOGNoco2BB0jHVUvpIdnQg9Ejb7mW3hJopwmjFrgdxqBYcRk2PTXphIrH/oF7YwoGFuDda/EKp2w6dTQmMLYYvSg+yXfj5EpouKeVxCj8yvhc0PTt8An1Rmt8t8rPQqRm+41b1Tm8Wz0YDUSN1wLe46G7rwM57QhGnP0qjfO8pCDGllxaXOJ3EzhADu5K/Lh+I2gxDXCvPezIYqxdCeslWFd0vdYl2OJQiXIPqsSJz6SRo7iV3qrNAS8nB3AmKXOWP9Dn/be+3J4TsVsDmGEtK6KkwQ7un0x7CXNCnj5LNkQzA/1iZlZHZG+HqJjmv7jHOg6cPNKSE3Ly4jdr4e9ybn/uBE1eGig6CJkqI5pmXzPX7zQg9AWSzWB0iXGprzvr3V6mXlVf4+9NVq7Onm4f2yA1gWvqJfvwtNa9sc40e3z4eJq+XKjpAB5WLy6uGglTxhpks3mIjWSV+XgYjKTcCKxShIFLAeD2UrHArreDBsnK5Wp5+yd8YLEzYnYpRSEjEIVTzWW+/LHcC3MRQEpiXHdGyQ04iZleSdDGveCeyHWAMZQTlIZBCorFiRBu5ikHRTkeMkLQBplxV0PRB4hJTcUZV99EATwcIUL033NRXWmEtZ2OCMFRSMqxbRmedun0vrnghdDcLCZmSnm6emgsWnmjStGlszMaj8+CKcreWOu9MMH4XdfhBEllx2vhHnUCJXDXnE+8l4GjavE8Mwfro2ykPen2ArKjqmm/VlUIikKrmeeXyMMzDqkcECQu61LiF6m0kXh40SCOLyepmF5zK1nad3E6ykQlzrWyfEbPu1dgYOQSufgnKUjv96IJyo5Q0yizFh+apQ13w+Kovd1H1Vlqt6/J38O6jadf9Xvz/5fIuHMZKRwq5ERj/DAY2eu28wLiu5aEPadc36zctGnFhfBbggInF6SlYCHK3lj1y0uzRUy6kKDQSNBQ7yQzhl/jpqhiqOfbbhkAbbMzDq12UaH3nMJOqdypKA+JvL9OaIpxcFsQ0NBgy6MWZYUTEbUSxdadR+h3HW/DpZuo1pFDADmgpmTHstmm4dBrzwZhM95iYLk+uFYriMeO/AfMewPjSYFw/YuBWVfaGcbhr3NoKiLJ/4+qEiG53w6x0VZRMny/qnEtRKLc1WUSFbnTPBGlt/z0cABjMoS0jn9894gqWyFDTPY8Me4xurv7YdzXFlx3yPOQZ21wUV40rJmh69dCve4F+5/tCTVejBWGB008+96iBRiIkqyci8omf6l5w/ff1rDpbwhK1RGVhB97q/4+WTkRh1jVgbRsGAVj5mXdVqFSmeYxpYaHTQSFKfTX4HBIDqE91LfCb9Tg4FyXKNQvOf6NvaVRJMYgaEzbeZ1z7ApEXjoLxdVF2eKTr/u/dNXw/5RwDdB/944dF+zMnSbGsUTzCOzFWp8cr08L/jf7GMMHt60gvEmGRGNobyc2mM+nJZMY1Lo5U8khM7oz5GfoBaMI24MvX8u7FROtEbF4g55rFI52+jg5k55rNhsmt4YIzBQwbM5A8d8JVOq1BHnV4OHiovGlQLOeP68bCJGx0YR6zwHbE5u5G9yuARhcegVb24YjjG1qYbjPIwECni4DqyJAHkniKaksCEnQWJNKtEt+Gwo7qkPRV2SyAHBwiy1zp964yrB4lOAOBW3ruesVFXx2L/X7sWg0EtCReBK5/v+WKENeoPa7PTZk3OEG9MKK0bVOq+ikkzmkeDmiuJvVhEJ0Oou+jCIyGglHOWmCmYaUYUusRF+7xCQncxhhIdywHnocquZURDzclDlCo85B1qx+nT8RIZRPhNkTZCvmQccpyAYMEjo3Jn5CET7LIxckcc0CFWJc06I9cxrAUh7dtbBzDuhWllNqIILWEhpfm03G96QFYHRe445a3w3imnlvPr9km0TTF7AL45XabHEsJKtfFw0QgDHjkNBoic8Vt/CIlzxK4GVXt11sf4Pw2flXuBGmSDCoxgYpohqUkWVFT2ZSirdRZrEqOF9KWqf32XVmm42h6fS+2dYXlJanAMXUlYlxpJL73C7vDCB4Ao2HR9jwStfEj2d1ezyXCf4BvMn3MesPIJiGonnXfvR7rPPk0seu5FrQ7Ee+1TS9FrAGgxWpBIQo3zIqjct6w4vs3gYogpqeJXf2wrHpBJrRODzgTcE+uthM9V//174/89tuu8xx5/v+gVBYd++6aMuXA/lffDTSJRocBbpIgFWD1h1hohJdccbpqN6dsRkACOdoXwzn9pkWXq+LZVwUIS9i37+3X3QyFFqjEq20TcPg8YfZYymlaik0hAJRH8EVsD14OlQxLmLYE6puFlEsvyhvC84heQVM/OGWPWxdx66iDDqOuWgA10WehEGAOiQ6js/wf0rRxYNwrZgLKsOZxSOKR6V3ESNLdlXS+FYTzjlaJjVdkhG5O8xi6/HzKy3hnvG70bCnde8F344kPUSG1GDxwkeIR7khNFKq+55bdxG3lBL0LhRxtVsM20IOZETQ4OCcyhVkAVEkAuQ2RXBCHVhiJEo0cysuotQK0G/KY8PRoHMf/WA6b9soy/TUzKzQSQUSkNMf0ehQONNgcN8v/Utv5l7G0HJs5w4leriHChQFx5sqkwRuolkX4XDCFjpJQ1W2tDgq24L4JVVEMJB03ot5L7z3WV3jGmaCvZfTkjxSHaXaCoXUSgSFhZPhfDxiyEtYzk9B0gPz3rDi8Rh3TfDOdRb5jo9fNUbuVz3Awm9M2I8Afi4LKBi7rncwAtmVmZ16HGf+qIHRlzbV0WAYHSueBwF758RtnHNPyfntfbujj/na6H8vNgKkzUUvp7CBw+ffR7d9EblPIwsbEupk+3cKRcXDdvOGY81Y8RhVGYUwd+Hi4KLOmG0gyXfatS4algxNPi741f8WmFAwMlj0QWEWWgUn85e4wGqxSTYwP2hxyhbOW9mPu1GupLumr9JBjDaF4QwFJEyjTxVcT1eS9NWnCspfnT3TJJbTT2eXg4vX22O2IhHeBz+Rhdv+MwHGCvrK/OlSjIE+UULfCwyg5aeKtkR5ZecP6vkuyuRJnc+qfpsbRKQHf6/uiVWMbBLSurnwNNiaGQZc6kIDBZXmm2ahox4Tsg187lLgpyn8m5vegXnvCpGuURJ+nXg75EcGaQSMJOUGRRVf9UrmXkY9Ngp3PMHPnLQux6MmpJgAZjSG0n10mgVLKeVbM+FgFcNGY+RBmIqTFMqzN1PCn7B5QHu1WPEVlV2QKxXfz4lYJaOYHJw7VfeDVGozpte2U+hQKkwn1wP/DqjsKYGy9kUATo/1V28J0kzMR1Ffp1cV9Jiy8Gg7d3ccMcoU12USEp1aeRo6m4eBmWOw/FJqoTOl5Zkn9yAkSOOMYG5VOi6pvoI1qVoVDLS4/rOGWVRWc11qWz9DnSNiH4qPeeIAf1Nti+GC7AQKMUVhPsaSxCYc6Ls6VPipiIFLJT3Om/Uozr/QwQE9JwchJ6oLqOhxPWi58s7ffgJYXgY7SidZlujroWDVimQekEXIV7WGPIqL479EHovhepm2wmZlBHkSyVQjqRwQLGXn5V3bl0ScDaMt5F3Iq2Ka/clgs70EdNFati56iUJhbq8cGQBceMo/0QdWCtd5Cz7c1w+5/w5+G5UWNDISYFDMzaKVsLNw+gDoE+Db7wuoF8XpfDbjODe8q6PWhC8SsOimCpvJf7Grwem04YXkS5KdA9DacvepGFX/fDAHZtWw7VdakdeF9+fhqQZjS0f+d+VQGnU+VKIIKXalQDLQO4hM7PWJjxArF/FDDC1ommW7pnwnNXHYsighLgPAHbttuKkWLUkPEJIPQ5Q7aaRLKYsY0bwyxpsB+DYtRVQCwM1zbScnaqiDKNcjOFJVHYwIs+1rnjOWGEK4QhaIeYIC1m1JlEQOr/HN/xaZEHOJAs7amY97J3arhhDMCRPrnnHkgY2QeMp5xo6qeez8daAo6/ONaNNlOmqy7jP9NkIVD66DpyidrNBmlqhLLHx3GXpqR9iUsY0zjX6gJejD66CbtZvzLxCL3hZ4I0hiazwehSwhU4kxyoRHhovDh0vOJe+hyG40TmLzSwktbyeyxELlog9sVQgdM7AO9TWGwhdUkCo4dhfjnglpFsnFb+ElGm5K8kaf1eX8uLeWvCCGbKeR+JBGmsT5+WLMhuyZD17m2lbCJapT4bwNo/9wk82guRULJUNZuN7VLgUT8M9K5Mp0zuDVR+ZKLYA3AYHjZbIct2rXODffRGqrmx1kJv5/2YegEymcD2/wx/KVFHGpVoJOAoGJYmbDXytilFZ2kUF16p3x/t8h7xfwWS5KOAcYniccmOqT8qiu0gtds4IToeGkkQOBmRhrvB7guu4H9bw0S2v7F0l6E42cz+/VxDHjA7e8h0Bp2MP9lefb90PxfklTw6dB+1ZRedUCV6JlVRd5oxAzrfoJOoC1Sd8TzGjTwMfWfeh0AdiUHNRzCmMphcgbItjeHCeoTwcFbxOijsHPXafmrURnOI8Fm86ysIT+mM0ciaSRikB0MxIioYI+XJOBYhVfwRAMPE3ch/Nh7Cel/05BjDKUuk6fJXGXPt8NvhbIys0ZFKpJDwrN4PmPVmWruh+LtAsgKKZDy0mKQ4OzKOUF/MYKxA0EjQPY9gMk82U0MFbXnpt/PMQZRmt+eedQqGpcuN6qDwKkqh7xWN9mBZzjT7NrP/aeRzLBsI7Qr6LsjkzrmVmroknq+p6UlBAR2Uk69IpRo8Hds6Jm+PPS0UNDIHmQ2mVguoVClEFijItGQvDn9yqZR6r7gQDcyJEdifAepTFAHDtVtgvS5wFpri0ZH0eBteYq8BZ0/Rh9jkop7QyiJmA5v3slNnxjbDIioIfYgqKSnYszjUBvG2hWGG0Ro175zDiexqFKjJ6eVExQoiQrEBebvhr1beyjRqHp4pkQ3z/SP89wlBI+qvnV2NRddaz70nVNSkC+qmU3OxrDyTa1geeSrG7sRE1ePjyiaw285VTjlhPwkv5SL7RLQymyDQ0XoE1J3K5eJJt3fXQkmaMkJi+ALIwa3Tp9AqsXVjgyphcwnwMRTY6ym05vzsnDKPmPQnrrnDD+nM4o0+OZbE8q2VNrE8qN46Q8uq7YUXuvylN58ZcB369NO6HhaA4Cgc2hJfdPSsPMwejthWe/+QmqoTE6O+Dw0XJBcnMy2iJmdmY5a3Xw4JgVZaZ2bjKyIoIZqw3loMrXsj1NhIl40L7y9LlG4YSWVRLx/4c7YtMacmlSWoqx7iGTy8h/Xfov8dzKO9THVUip0g/K1B04hp/2p9sEKwvbUTI5aK8Ug5jiP3YPSeRIKQflFx1HgZTVS46J6lWOk6pAhZMTZq3JXz59HJ+5v/rtXWeaFAxrak6qYcbcZVGJhxzSiiIaCwddHWu2xcYZVGncLZDreuyGzpjzOhZiN/JHNcfcw7CtbQS6wjNu2uPpWM82j2YiBOH74HeTxkkWPiMVj05x+xonhqwnXN81/bc4wWYlv0xCjCX5ogwaBY1D0cMFSMdEmIbwtqdFmWRHEDwNwWwC2OIC/v0mtwHq+oj8sQtLrGzGL5PAckQXRo2/AXyPSr72ecz8xGqoVS3OoNHmahXZgt0LV93pF0RV5cNG3Mjb/B4j0I8LJT4pvpHUdjhPqbZVbwvbZBB2nEoSWnnACRliidgnl0Bx7XbIc978jlUc/W8UCKLcSLhkwFpHYgzkvsooudPbqApBggU4deh4cQSYhVKS3fC5/233CHLd7JlBp2fLvoaVvb997gPCnv+GI10KkZ1CDhSDOl4T7pmWxdYhUIeHu2XFT4r4+ykiPL++0Ho9aWFBjdI7P5f1nB6guR2EjnwfaTUWAmfU7QeS7MNGcWFkbojRXro2pdkG7mu6kkij5RTrNgyy8a0ardxrtlkIM8JPVc6zs54UPco3tUbW3JtydJ8NE4vZ5sBrU0vW+jMa8QuK4ui5evEb6XA34Sh4FhJMIy1x/YnGlGDx/X9kMgKQ09MbynGxoXcZL4JJO4jYq+hPurO6gO/i7pXwi6qbEk+E/fsBE9NDCMsrqUP/bH9N3k+GjXZhosutPZlpGwE/E38AqNhlQN/fvYd0zl2OB1Rap4ACudXJmdu4Gk2/qZzNexYZRt1Te0kLUZsTl48PyLuWSbauCcPOgeDBhk3pJZWE9ynOXga91pufvp6MHKaHwR3bbTsleBwLWwsVRAVcIT0EC1IgcWXs7VnAWmUsUQtqMj4LH0BwrPiUZ0dCnfunSfXDp9dGa/P6rmIj0Zu/JrKvo+V91EO3hANjcEGxmZ+D7q2MoLl6gOErpVJBI2foGlujPNM8XvzMHwrBf5/NiBY0ygOjJydXfUVnRIZdOlbSeeUT2YbEGp4OSZnYYOmY6mpJOoXBmra52Rtw0DJicGjOvajod0FYsDtPDFpkkamfmH0JNXmA3PXF2OFuk3fIQ1OrtOacu3ArtB7JFExI156LRIO6/zExsfw8OCz7EHnXWHzK3Hf8fVskqH2xfC5EVrT2HBJJxnh3st+Jeda4SYHSxG/yFsAACAASURBVOKh4XejRrag4Ms/ueGvXdvGfbkqaTUsIuh+GFR6j+VDeuDh/0+vibDARlGwdyxv64gO2UpArH0aQKkuuBn9szQ6wVJ39YAo7JR1d1wMD56bEBQ8fxUpBRgo0zxLEP33KIh071Tvhxhs55ovvXepK7wvxekwPZJihAUxIN8Dw+5mZstvh+qr08/5vjg8v5Z850bgMUF3bFUyVay9VoZ3aTajNHVttjGkRQnOW9aqRkZtMaX1beETA5BWmyXzXSRXlI8kfM5DcZGUUUftgfeE2lfgPCDa1r4ojUodr8vzAzQ/rcEqOAJ9j69lp67ZaNLMcyVphaeL/kRoTlxQWaaJaSw29EwRtTq/Ijv6oKz+hHJw7alR5krbJYJdDbA/Z6RrFMfh4mUOEuwRdYydYc79EsH6jNUniswx03WV/WxjJR+hX2E5Ph2fFKXGn5CiLWrwsOdFTUCotH4J5lVALW9sKLQq42o4JydLIzB2LpykWBRvGdGl0a43kUdMiRboXUgaYSu8kfYVOT/o63lf+hIdHqLsz5E7DOeoP/QvjtEsArfLEr7nRleuIC56jQw5Dxybu/nAawg2VFWv58wfBAW99xaEtEQuqMjbX/Vla/WHYfelGmFiLokJ6cwhhoeYjxjIlRu0IqSE3cthDpUro30R5fvrNKiyFZ2mDlSIfzQU19ADEHrp+37Bda6hU7hgf4YZJKGaEqpvB4PBGYdm1sE+G7f8A4zqIMFERFexAN0L4XtFwQ8RQ9AE30lPvHaXwhaxU7wXQtCF1y67Y7X7TAeiVFfWdncjPHfnstdwxLnF8CdM3TQezl+3dOI7WXUY46A5uuFVDw2IgTi8LpuASLdGWRh9SDkBOGdW1NtMjHbZcoxMK+M/IxWM4qgT4C/m/2SEh/J/pKl9LA9V/DQmumf8MZZ2O5iF6GXXg1JEcAmgf8UnHbzBYobw/0v3/Ms4Qjm+pvxobHFOB8qRBfJgjaLFRryXFkKrqS7cGfw3WmIaqxhLxjByIMxKAkTuQuCW1rzymFCjN2V1HYKSHdYPP5uZ9c7A6OgIvsSFq7HZBHC2/KNw7OSGv43+OsjfcqIscBqG6BUzxfkeyByvvgdP+oIsDKLs8fH4mvAIReQojZyVD4ImoXI2Mzv6QlCg2kPGkQ2KYnFgRmK+IlTxL2sQSM21MZB0C8OxJamAYvqL0UszszKwBoyyaOi6hJJyBVBaggolKPjaI9k7wKUcfnndHVv6MEhEZVqmcqbyKAqZZflRkLCdsz6CRNzccCW7fCcWep/CsVDGWToPlF0pAjMYRoqPOfpGIPxT5Xq6Gd7h+W8HsIc2MS0fBplU3vfSfXydIarwsSIA9U4x3Fhvdf6cAFdqTRqSFLdT+JzmWpstA8y88mSqWJ1rrhU9B99z5zz+X+7D4W+EeoT6UI25yiEU8Cb2vuCA+txmsm1peDhCWrkWOd90zTIypOmiLvE9MH60EstVtOk52JXgfHYmgPd/fE1sB5wz1azeYd74I/89ZlG0vD82ogYPlZbiBPqQX5x0DTtTSKm1y78JOFZPrrwaVuVoJJPHv8f+HnkvlR1EoSSKkz9BFKehijrcZO1OeFDNvx6/NtuwMLPM9hFmZtaj8gj/LdmHzPyume9RpgRNrkwU/BC6mZlOS/X7QnTs9ArZEU2+R5yRnAMWeipnjK+ydQXLfedluOga7juFUYEBxP4zZj59wec1MyvjnMTRjKQSi3iTYkuYeV2KE+/umo+vOxp9WbPs9zUp+MXHyqMOyr8Vs7H9896IcgMG4aTsLz5FFLd9Lfx/MtZNgYjoYbZ8ok/EtIGZWQ30DGyZYWZ2+Do6mCu4FeugdSu8bMVJueqri37+s1iee2tei/my7/mjauD8smJG5TiNjlTjYfyt1BqU4zQ6SkIxMI1whLGogCXlsQhPSkbCoFJ6A8pgFwxQ4HaERoXGl+uBqHgkGFEjwbe7amflroE4pYOroGI2sdZUkuO3UvwQeYRwbZUtrsJb5phGE3VIR7FQJPp9ATURj/Cw1byE1c7+XrjK9k8jkiILyIWoRGm7NM0qwcd+tfZPwSp7KnnhJSKs/BserqJ/E0Cz5X3hUEAEJlWlhXl2/X/WpBoDwnhS95OQPwXOaMVfQEHMHw0tv994J5zz6KakAAjOVku4Nvv8WlbPVBiJDM2EdgCWdYroEddWLiJ6Zokw2rLSx1WSrc+fN0tBRyC5MmhzI+s74Tqq7khZOqI6LOvVCqvKAVo/SLsBvnMnQERZVnaChDoWnpmjt4LFUJHWGKQO8NUYki4io63MD7E5ShMxxH7Mr4ZFNlGD54Skiv6QGjbP7kMicex0btJs0fFnyflptI8fYx0o5QJuuSbcV06Z0F8Sqczu3kc3529POLwUI9YS3eAaWP++D60cXw8TrIYz55DVq+l+gNmKmtEg3pcC4Zli0bXiejCKnqChwXRUqgk0FHUiTlJWpEKNAs5jiuiXpdxSPOOMQNxXVcv7Xc8zuRkH+Jbzu+aw2ffoYC6CcfLpu2wHvXmf5KdRM8aN5wYt6+I6enX2xlOrlZb7sOontvYIaSCAK/tnhKBrG1T2q/5YcgLCrsFs5W7mF17zrkQfoDDGJTFI9pgWC/+//J4X7kevo5+OYBIInh5I+L58gA3M/Ksnt7XWRaZB/DG3kGUK6tvhphNIUi17X3snuGa7X/FJXW4AbnQ1ypbuhnfTEVIwEgoqoy1TIRSYWf1vXuZg1KXQyU45uSoeeQwyBCs3S/N374bfrQTJNlzSXHEYIzkHq+cY7VEwenszvMAUTT++qpirNlouuIotOQc9SjWOXUFB3e+5BJ3Vc010S59IujkSOWXlKI1o3Tv0IlNM1OTWUmVEJe9au8g5OtkRGTJdk6SvedtL9/alyHuag+EKErD2WlLZxjWgUazmg7BYjm56JVJDA+cWZLXy8FB2DEU2udY8SJVrFRKVsSr7GGaPcpHyWMHNvQ2se1lT3C8OsCvXdcpfZAsjQymKGOwRRysj8phZgnSvLpxPjL58RhWbFiuxi8Dha9ncVPxd/ZHfR4fgClL5GhvR7UMh0tjKZubdeDvczM5PScoJ4Wq9sV5WxLsqtN1dnDPFFgsQVUs8XRGkH43jVyLo+yPxNhGW7V4EFmdHNjPOoYYXjcXaQ/87h8bvz/5s5heJlsim+oxgnFwLu48bQMmaHn8dJHeySako6dEXBMjOjckmgmZesRckBdODsK9vhV1DLpt5GeOMbs2KBaBwz0k1G9NRWs3W/avXn312VO2pyB06gAt2pkhsHHovpZstZgNMmQpTAki3jxkBleiu73qeDfIcr3uJOEXEZwR+oFxBigEQ/UkeZHNCueiSyCCmeZXss30hW5JW0F6sukPWax8K8n2bBDdHzhos9e1v+FwQo6+aSpmHQdlH2ZSTNct7V2LS4xsgFBSCyZNrNHLC/yu4me985bZfjEc4PykMdN27yj+Zat+k2R9zqTycQ+kSaNyrIcN1f+aPwo3tvSW0EJGehbyPgbQ7KuAYMbPasXxKjJAEMDhdOne8Xhn7Qw0vNuSOFf/QgFKdV0Yp+jgGDJcR1Siuv4tUZjhWYHj2lT0p84PXFKOwdiWzB5LDRtoq0ZdD4R6x6okZGDWkL84BlFiEHyLXA5eMKLgS0mSr72eX5qeYouFFuty1yFo2ctWNwrCgzvHSXVSoRPpx8bk1HcX3RA9OUxjE/ij+ZgDiwbHgwUh62F9FmXB//oQ7BTXDxwr6pqLTeaLhoXxI9Q9DvP3gSyGqw4onM+W/8duYnEou515VBcGKC38fLBNW8Lgz7vEsOUn7+DJVf6x7lsAtv2g3rgSN1+qCnK/iJ3l/O1gyaWNr9ucUmBXC/fSyRGfgSExEsWhK46PRvOsFA5vN1u54wpD2jSBEY+Xm5J5Zfifi3bykQVK/SSG8S42YUa4wamPmC1U0lZSFC1IMD9/t8XWJMGdETDSC4e5JUnI0LlTOunvGY+v5S8CaFSVNw6j+4asgahUtXcUS0FRxVqWXmU/XjZhWikQvdZ27fZxyHsJn7p3WVf+9EozWvKTFHPCZfTJl/3GuPjGmZXplA8lgUeAe3ULliihjekJaJkejId8mcFg8OYS4tfyUpeJqhAxW4RkhdTSUEJ4jQJRikrUfhnMc3YLxpoaXMxjkHrEIo2BhLBI9P/lDTq6qoRE+9+T+yQ9R20XKSfAKLgSprNrEYRWyDVjy8nTO+wVTRAm75uiJT6Fy1VTNPAxuSBqCxUON4iAPLo/B91U6FsPwbJDoDsQeIcXTrsVlSCyuxeUfeAl+eitIupxi7yL92Xg9Rq86su5phKTWCm5ZGWf3t6E9EMFVk6B6P0yQclNRAXHPKS8K09uuV555ZTVRojmsg9Mr4eDq236OK2DE7l3wkRvuJUY/FH9SBl6reyG7p9fLGmweOslIf5t5pyrVaZsg3QhAnI0/6QSa+UopBfry2mvvBu259XPyYh3nmBzSPogYTPXQMBpIyTcxKrqmsioGU4ECTLHqE7dvxdhiVKe+hWibOLiuCEnmke2aGMUx83uClCWFtn9Pjt9P5QILjaCXFZw9xnMqt1ZsRA0e9hJR0CRLWqnQl+74i/fA0aAgJ4fqZmnjevYD0IgxM6vfZd24/y7vsb+Wfc6TW+GzgoiPr2cACyO6WI0OLsqKhGvZx8s15pRNf3Qzm2guxmbN85xcQQWUlCLSw1y665UwgZJcB0t3/W4g+DjlleyGlTxsCCM20jOVXZS9X4q4Xy9pOOUDULUKBhcZlHOMXFm3pLQ2gOW4H94DI2RPrpcdGq/soZP3UpDSh2/6GDeVDhsXmpltvB0W7eFrwm9VpdGLKI601yBQVJ2d5ofhd67C0cxyJ/RuYZBo9eOtIEBW3vfao4VoDY0rbVvjSuwVLLwTfnj4OX/MGe2guT/8og9P1LfARSSOFknWCMJc/rHff/ufJ1uzzd1g5RQNi1SfJ8y9NuZkZZCuRcrI1kVE2ZXIFpU8sbL0x18N87n8Y8HYwOjNS3HFoJrtxDjjGAaDsgAzwq+tkFxfSBII6prFOWLEgGoM0SA8vol9KwB/Oj8p8kLcovLwNO4zlZ4d3SXhsN4j7QfSSVQ62e/p+MbzO8ZRg4dkdLp4XRoFk65Wt2NtVGOaJetU8FKNwf5Zha5YizyHGgJo/7D2dvjd/te9dsofhJOk+nEB00OLXw0SpjTUuyAJmlYFOKAXPAgFVzqPWK7tvAgNMy7PFugqVOg5tc/7ZcE0FhU7DRwzD9AsSZiRRg1xMGZ+4yT/7A+ffS78ja/bvA2mj/julKXX9YSRhrqx9gBFx1Yd5jflzR4wYubPMS6He+SeSO8/VEHIPdEoVUOGgoiM3YmA9OjEKC6DniJJQc3MDEDlPI4lAgoZnoQHIt+NmdmkFASPA1rGIlliX/O9peg28GwOaK/g9U64r5OrPjrjAL6b4YelE/9CqdjnsVs6187IdSXP/k2sLUQseu4wY5oOwfU0XeQ4eggwjgCT1Ylxx1TX4JwMAKgR7chrFXCM/elSPfLKs/p2PbkgPqsuAECbKSd9F9X74SRd6dR+4TtBme295QUKneje6myHw8xnfTRyQ6A720y0L/j74D2r0RQbcR4eKCLNx9IKrG7PXvBm/oXUH4nFDKXrcpEyQTkYOfl+Nqq+dV1WIUCN+2DGTI6l+zMMo6kQCtKDpdGnwjFWCcLwuqbduOF8CsN/j3PCEm8zz3SqOCbeJxekRuwa98JNd8/7k7B7bnU3nKOmJdVYI2oMFYpUrv7+++DFaP9r33j2Wfll5mFwI7sqJGEZpoGiBgMNjfb5bK1QRaQmL5QL+v7c73bD75hu0PsgLk/3twNWL4mB2uE6wm/q2fekgm0QaQicQxqrUAJdRUkijzB4NOXHfRxLw7rfRKpaVn/o7/HkajgneUzU6yWGR8k4OXyPNsFLQhYUO9lz/LKG6zFIBzfCHRYjgIyVO2dhs8y8Ys0rRpHRNYiVVK8oOGok2dOhaV5f+IJ7lEABgb2qy/g8DtupXDtMOcn5He5WfkcHjfCJvoCbCZlQY+LeL4Y9lyrUAC8W95KwTnhmZ9kSDi+KtFhbQmrN+4gILj3/nogaPI7uXNvcX2T4EPgYIZtqPMw2mkgP7RhbT7KlUkEiB7SKp3V135iOwstoSGn7ETxiMbt5/hIxCVoqiE2qnnSsm7nrIBxZ5HkXJcrGCGmYl/dJOvTarn+f+YNw8dOv+sQzq0QYqjy+4ZW1a04qypW07Grxc2Hz/ErKNw9j7QfhRRy9ggaehexN15OmmmwdoISeNJxam9nGEPfZ0n0v+dsXsJ6JsVGqAChgxVQw0qSl8/WHQdI9/npwtbpn/T0S9FmRfH/XlXuIckJlVv8Qfda2pcpplbLFn588PwRr6n1wHkdCm8EeWYrvqW/hM3hyWhclXYu505YwjtgVLUt079O5VEU+D4NRZNcqQInvsirnTBw/cfZ4Tr5nnSc6ghoRdRE6vEqFHzh8jCh0yjSNODhOOdxXSSKDTFspJojyf+09pPYv+C8yHaV6iPeh1bs0Rpmh0XkskqdIKqldJbE8mwNuMyMRIZnUdBfn7vg6UtuydxoP0fT3+Pn1RLwsHUqqI9ausyzxcOn+KbPzu2aSbiFN/4Y/ieNXSDWlxMuXVFjtwzBLbDqa9NW9IDgnW3ER0KzlgMzVqtE3joQ4S7uzj41krghqZJTBTFJ5Ig/JVcH51pTWo7983rIGp4SGyyiva4LAS3+OAnh4Upu0MrvEWisD52GQaZqhfA3fxyqDuCfUU6QhynWkJeVMK6kh45l5KXn8tQgkTxlsSJ2ooj69GhY014amprg3VbGMzwQBkpP8QKUeJGm3FyZBG+9qesqdH1Fbep463z7t5ueAwlcNKpLQlY4g+Tf9Buc+UwZzUu7TUSkLAJ5RiHnkpqJBlkVuZ+YVrirBfIY+0cHUqBpNriWCio4sUjyRx1lYHDOzZBxOWlLqjoxUXopOApCJvID1qSe2vxYWHLFkZn4OVNekAIMYkwysZ6oDAv9Q4DmeLY2Hm33doujKKQs/Urjb2efTqN/xjexWRbERj/DA21ZeA4LOWpfC/6+9K+kKlOQqlw/PnxsgraH4Yva+ksafMeR89yKu516wALDBA5Lf8avcIec5WyJ3eqvZx2jFKniTngc95FSVgUsJ+WMqxDlWfxhWyu6Xg1mvXDuDSC8bAjRp1Ohzkkl2IDwbI4TpY/wWBUQSG/deIDn7KQ0Kd0dzf+onjbgqthAwM2s8DC9Xo2T9pdkSSzEqNHiqB7KvsFb47spjYQd39yXKHq0sCJ428xEflq8XpC8YFdJQjJVcIdtaGQyw0cooeRYFMalhvUmD02kJ4MezlLBKiBk+17b9PdK40IoUroNxlRE1/z0anBrp4zkc1kr6gjHNwk7fczOIX0HkQEuJCZrVPkyNh2EyTq6pAT/7s0apOyD1q+1lyzemcJZui+HCCLPgLalrhuIwMgDgDOVU6iuCVSLdHJ7z9IrAOBx20J+DnDQKaOY64nrLSSSIa1go8ax9CQ6ZpJLG5NxzPclUtuA32n0B91x/ED63fO9eVwKvab3YiBo8BNUdCa8Bc5HcuN0z2aHE08vCt/FHQRl33mIYxE/Q6vfD7w6/IPTsaCCYE0Czb/cQZmVS8kJjCmOLeB4zswE9RYTJp5J/JV15T1iSHXeElM9SQNDI0ZdIQ0kta5fjlgW6/4VwAUYJNJDFlKVG4gjebHwYLkDyOzOzPnpGaSQrjzlO93jBJiI5W37+qrQKGWmg0pGkleAQKMdKVksBM18ZwkieGkYUKBqdYVqM+JucRBiIG9GUDYkutReYa38RIf1yHaRF6PV7iOo1fGy88IOwYUZXSEIjkV/wYqWwDGBMp/EzKUlaDIaM8k/x2cqS0u9CuU4KaG7cyjZIdF+RPfb0cvb+IN+MpkDnYVBPuIi+ePwuSjlRo44M5oLHwlcJRh5IMQCjM0r4x6IMQgK09U5jCy1bil6IUY7HOsEzaqQQjCQS3WDkJlYF7Lh2JBrIxqXqNHMeqXe0BQgNPX2HHBPB3rkG3XCmRhKF4l7VgAUjs8TmaPrMFQ28gA8QNXhoFRcVEc+TsJGXgmYjJdPb3whfdrk8yemfXJ8tvMzM8u3w5NN1PyuTIVZNmyanVGLto0pr3a+SHH7HEL2mtGiBqjfI0ZMID79LC1yBzy4loBEkCm1l1+zNVqAaJaLyi1URMXKj2JF8LwgtzTszRaJ4EaauXAfyZmTXv6ThgOWITA2a/nkpsBT/1ltF9OSBn0NW61Coplq2QGgor1H7/OzqyuUfenfw8PMBLEGOJjOZe1kO1d1w0va5cGP0yMzMuufC5/pDcSTeDIu9PxTcy+fC4k+OggDJVfxcFdbCfh9VPPCjeTG4rSc74dioKfLjLpSfpgcwlJixjVYvLMbQPnQqJ9w5Af4mQSiLEMykknP+Wmk5ueJaM8i9MpK78mPNBLAJrf9dVspeU1quJYIoajri1FfKuzZCtK4ixgSjP1npGzNpXSSyunUFh8Q5Jei/v4bikC1xrsk2LUY6q7u08MWlA6EnphLhYdV1Ck8GguBUf6t73AfA9SoXEQ020ZWFVrbB7O4D+yrV/zIyogZPEgGZOdI1MpJqd1i8VD0HFTwJzFJltmeDgGWfHTOzcY2UtspihI+YlNKOUHXDG6TXaOY3Gzup56SdOVkthxIK1Vw2h2OTZFhUrWK8YF2EfGy1hB27Lj6mOhLT6o54kYwmdNfycgxNQFNKEm0AxuqVhAdi6mY8hxieQX22cZZiVEXKcCQpCkZgxtL4c/VH4UUfvoodr8A/vC9VkI6tlKBcMcqa94CVkfYRZIdOdWRGRCPmXdHI0YqXXic8W6UiLNJwRQsrYT7OrXrJvPUYUlsA09M+S1nCfShxqfPaJUpUOcqWpKwSYRpSsVYkb1Xjh1FAKj91ONj3rr79AtL9UxqusANYRiWrZaHLSPBSnLdUdRHeC3VIGlSM9SYRS+oUGjzNB34Bs+2Bduhe/jDbKHXRJRiAynfm0r7yKmmgsD2RrhsaDApMpkTW+XFzB4da93ANBvzhG/4YCXxjrWSWwaejJLdkSdaMxBhfXf9+OHi6mZ1hUtkYGx+D4Qmf1drlYqCxklN8jGsfIeePYE848iiPmkrbCebxc10hZ2MKCtgf5QTpQqCUpJeWM/pgdauC4zlTwGRY5CmQFhmOsbBHWpaO29K0GIW2RoZ4PS0j5HBdr1fVw5wtZFWRs+qkIE0T2dG2suvDhaVjhpEh3P/4sc3bYJPD4+thLVaFKp9zo8YfIzLKxLr8h2Eh5a+EEIkasvQGU14YQZkU9Kf+JMOVsPj0XcYMGZ6TOIoTwRpU9iEjFJM2QmRFEPrsn1Wvh03RHfq9v/5b4f5Pr/rzDw7CpOS7EcMZt6zClykArVRjdI/7Q98192ZZuJocABkftaDA3cdzysxPc/Ddci2qwlUnjiOWCiOm3TlmImeZkkytN/Zuw6tM0TEQYiB74OgWUqjKwsyUFiGnXUmtPQqftZckAwfEtihPDg0NXQ9L98JBba/BczpdI1kZ4qsmZT8JjCiprmGqkEar3j/Xuhq3NOh3vxTuv7btv0eDLRZF1RHn4UHOX8nT+BCMIqT4Y7inZQFxotl3JxFgMrk4+pLHZ2XWuKlw8zBhlcew3C+oBgofNfzG3ieF08iGgiwmJbaZnyttUHgCJmcCiXOS466j5JK9ucy8MtSUVlb/F83vkmAq1Xna9cCBcJf3SQGkaRbypAwbfpGwwqF8jOf82gWb58GUhHrl3PzqCTkSSVmKj7+JajkqQe13g6Fdo6kw+S67m17yMIqjERiuqYZQt/N33O/a3mEcaanSaAZteLLnNWHj/bCZ1v9iiHnf3/X5h+IlODQalKwhUgg261w/uyhhILKL0QpGWczMlu7A+wSDefHUvwtWV2ovMyqrJkC7k6JXVBBjL9QZ+tMajjGfoknWNhU6iTPNfMpJ94sjDcyqCDO/ZlN9FXE5ykjl4eE6TadzZp/PTFpj4BhTWGaeeFCbTLsKLjjriVaLuXS5P7b/+WwcZVZLh4qkol1TZKlopt7QCjfOeQyC4eZY3pPDEk75/3ItiLLiC7SXi7eWQF7/UDtXE38WqV6Ktbkvsl09oxQCYmSI2ySVxL9yAnabVAC8hAJKgWYZPhSvhGk3V4Eg0Soe06Zz/Lu3LhsMC5Zeewo4jGOqWDh3ushd91xs4PqBl0ack1TkBvfItgjaEb1zBjctyoPGs64RYnqGSBlpddM8DLZ+4LLU9N7yh+j6Lm0hKFAUcDxCOpT8R+qJutYVaudPZh/Tpq3OMJL1zHWpoGtH8MZceoTFePm2v8l9TF51xW+61ivh8zoWSy4vhtfFcM7ytp/jBGksdqGuP/T3yGKAVJk7q49E+TFMz7Wt5IKxiISrAjsMX1SeLYJz5xG07FsRZBdGsNBD+4VxrSuBJQ0UpqMGkWiXOnQuukE+NW0nwojldX8fJLkuiJIlB40rrZYiGMInFBZR3SGGB+eQeaRMV6MsZnCWkIWIRdsY/Un12COGUa7t9hKuXdXoDN5hR3xax4MEGaRl6S7gEnkWHVGDh2y5sRB3zNqi5Zu6MYZqERJ0YGMzW10NT94Rzg4aQ/Wqj23tbYVVOFpmHNDfRl6xPxgUUnzZ2qHVtVxY9cfoKervWIrOTaklsq6ppniKDiAr5+fvSKqoVTkUEI1H2R49S6D70gW8jNLs6mPBZZCxeuBfQOtK2Dn1R8FQGBcjqYiXNLI62qfp8IFXkEoQxzmllUEuWpcNEOR+0SgRgdAESGuUlmH+2mOJEqFCYlDKXiuuv5xIExLBbf2MP3imGoycw5aPPK2eC0KkCyBIveb399FuWDf9M0ImehAeLoHj076sBkl4tqUPxQmIVIJQCZ/5Q4C4L0jTXOAa8OwZSAAAIABJREFUFN/D99s9E+5XebaohNURmofBaA0xMKnWCVCQzfv+mEsBi7Fy6f8OHt7ON8LEKzSBUVB9XzRm6YDqPVIupjBdxGlKZIIpFxrHnUv+e0W0DZmU/XroZlChlQ7VcsTHlPOO34ku7jB4jDlWo4PPohkb7WTP4bCwbK57w3+POKZUliAjNThQ4kFEpYYv0E83avA07geh1Fv1b7h5FzeDhaYLiGFGxSFQeTANNHxVvogxkQs0YOQcn/p7LDTCih2dzhaAZmYjciNIOi0HfAGroXQhxAi3fFNQ0U5MXWHxalklvWytJnHpB+E6OrnKOCbuQ8uhoUtOL0kY0+WMkd7aFm6Y9bCcjm/6Fco2FMpxR/bYURVVd/Nn7zgPm9w7RcEssR/ZRAyGKiKnfN4nf4fPjOSpMcEKudYFf7C3ikqvSDuUZVAMaEUcMV0a9m8+QjrnElpQSDo4B8dF11sHhsxwKKDGo3Cjr94IoIe7+z6uPeU+Vv56sq7j2orFYVWIglQpk9TD5N9kxNZom6u80Xm8l8EMLHNVfwhw+bn5K9NqgXWYqT42Kzbza3gkEUuuHQX6Pvq5YOTEohtRPhbKPjqnUjVLY0gNKtfDSvYjdWDvTHaFFR2VoYS6XT9GLF92NjfzWQJ1rinHO+ezj2Xdk5kHFWuz6yz59OR6dPKyU3eszFIKF5c5Ih5MHHkaOTFqDB1Rg6e3gTcsyoeRCUdnLd1hJ5ESN050ExUdzUY24dzhkdf24zIquETmjRC5SQB8LtX9zuiTPyBS6UUFnArX4pgGnRmezGl8kucg2E07AcNT0LQbG3W2BM3u7gv6Qc8Ri+ARh+BC0RKWdu0IpPqIXcEHYvEwdUVOnnlsLeFwNSjRV2VPgZWuOgnPuP59/yL23gxfJi5IhRJbFqSqJXCPxER1JWLGtIwSJ7Kfk7LKttFU2IW4BWdEfo8Ur1QfDohaQzBeRqDMnSp3y164uDoqEzSoLR0Ac7QskSwUIpT3siNgqdL8/dmKXSOsHLrHHBFhMvOjmZm1LocbUe6ZeRhO/sAATpUVMyUk1BT9HJW4yofw2e0zLQCBuNDoBtepi9xF2LpTZeOIbqiSZfaC9CWKM+qvU6P7Y4w2sjS8dVkMI4gM5WSj/k1RxLAFEc6ha9bJK1n3fBdKoltyPEKAiYihRXtBy9KdjsXnikAwGIlT2o/YiBo8VG6qIPmwuUgUxzU6kwfn5O29Ga51RnjBa0WUNKuhgWetlL1UHfXBoQODpygsr8MKAINiydBaHyCFU9nx33MLT+6RoDNVjFxsNIwUpOUWqCxyLtCUtUtBivkuSsUIQ8p1AakyqsEI1eklv3wcKZ8G6fCiVL/xnD1gYVLnmINBIch3qULDbVapcDy+GuZt7T1pq4Dvlk+QPhTyP5bFVo6EWwbKhOWcGqWIYaTYlVsjEzRe/LNle+2KX7mxEfIDj068pbS2HlzHVj9ozUT4uYYrYbGnuFvgVfbXEYmTTuSDc+FhBiN/k0VEfzSlyEpGruecRHBpoJBA0MwzKnMP63siyeGLADQ/rcEqNUa0UlWh1PXiy6z8OKx1LWOmo6x8cByuMkgZfKlkT2b//5P/CB9VBq98EA6ywaaZr5ylYaHReJL1aeRjjF5u5X0agP4csTXgGIi1sAbPOsaWq0kxLO9f0AeOjqEtQH7+FWtwSkdIOdlo3DGFqBjGFUSnj258Qr20ymCP7a57hFh1hzcTPmvfHdcPQ4QGe2pMYFgslf1O6Y7C7FWqXgtWgXyul7xFVSnOjnF2BO1GQZoUtZSMHgvYOoUmvroN61x5hIiVEAAXSxM9mZz/HoVg44FgYDZne9xmXkFngVnNvNDSY45vhn2aBJNAUGxjR+ZeokEcPURyysfhhvO9SNjpJQ1H/4651tJLB3avqgUcPh5f92uRc0r8DTugPzkGdt8jafuCFJdbNw/9Ozm9Gs6hdvIUnoWWF1Mp9CNlqq7sNhKY6HT9HNAh2XsU3OpSUyxg8N4XSoLhgRUyRspsJFWMlSoY2Hc0TBc+ptLI8MbzESXM+deoBh0J1/FC5ipW1TIPg/velaUrRQaeUVmtx+VspdV4MDv1l2rSTPC0XJsQBPq0Csp1rYQ0e0jMaYQE3gH5U73bCMBR7z38TWLGljQqjUU0uKYGstapC3iPMQyMppyKwN9oBM91iYdAqe7679EY1cgNjcBY9G3vLThykaiqjqjB07oY3ri+YEYB6G3qImFYWFH1tIqLx3jZJS9hK/kwe3mJCxcAYu6P/eOcrYeZeHgaBGel5AV/+zi8gUJVokT4XNxCGF7WXKxSigqjc9Efo7BkXlh5HlzJ36oXDs5QUiInlktGFhAxQn1JTbheV0iz6PnW3g2SuXXZL5gSDJnaO1vu2Pirgux7OljKPi/D4dXwvhKt7gOmRytSmLI5+888h0H71eCisZ2L9ori+uuvSYsPRGSWgak4eEMAtVCkijOKVXuU0T6BwHUtSkiwLtUouFoP8e/3+h5s0C2F+7x0Obh5D+554pJzm0GADMZ+rfRQfj46ZHMjfx8llMdPpLqSESpVjK7/UEYU1cysvhckyNFNqSTDd70368/hmgpH2Gdf1qBuoNxSgCuPpViMIWPSbWmQKo+1Nckg3DTzctD1T5PGnKdozVAUB508PCr7mEqKRVmKx3SghcIAhgEjSGp0EPSrUTRXzBDpUh6jN3AGoVKURHpYsZ8k70vfBc/fPq+hWZwPETaNemY5nh834iktnFTDsSfo/TKOlGQTvMQoiJmkWGCl/WD3nPteH80El+v+yScsWxXJXISUapSDSVsvemtiD7XAYwFQJgV0726Ez4ngCQr4W19AdxnKT0vzQarAfKaC4hxvgqQwuDlSxhami0J7qJVe5NARF7PxaHa4eaJliWfDblOyQhovxz/tO8G5xqgwFOaxM7TzjKAQNRfNZ1IAOrEHOz/npRlbB7iuzmL7UaAoEJ6pGAdal9tw+B6JWFJBKB6C6U8qj66wHTOV21/za/b3d4OR+/mr3gBuD8PF6cSUV3wENw9np9fxizGPEvbSRnYIhrJldN7P49IH2YrFcZpg6pQ0cNAM51cwrlPyJMaLEEmq4J+HMc6IdFaFj4yOlFYuul5Ucv6TKwDhR+ATjFRoJIwGJSvsVMbUUa2o+5bnUGgCHdKio3Tw32ND68KppFdxX0xbKZWCa68hRiWjLCkMEqJc1FG6v4nFW3lfopLQE63NbB1CY0vZwal7VK7RDmjCKFZSU6fnXiDqGW8twXymAMlo5DiAply8jJI6DbHVIedYGne54WNUD/aD6bh/5Muj3tgMOaIHJ/7tn/TAtoq0VVeYnAogKZsoqJgRSFSF5A/91LnojKaE+tkvmAKMG0UXMhUQjU2zOJu1i8SBXFDfPMssq/t+gTp+nQg+jCDMqYCtqFxTRh8aB5KHR/O78zAc8Bv6VwkfCTYri7PA51duHJJPkgxQI35Mjyg3C4VZjCjMMfhGAKbKLUNcikuTphp44nQNL31fXws58YO+j6nTiSG7cj5CSbFU8xbJMQygQj47tDkGXjDR++e+krmjIuNzp6q0IK5UhrJ1Rf12eME7X/Yvg+fMYj1/mYM0Fkw5aaqE5fWa0mITTJ0nriNSHbQkOuBSLJGydH7W6INLP2u0DrJUjYQ20k5U2grsTdiLqiW6hvUgkWhYj5g6OUVKDmE0UBjEpqmpXlQs1JHzk64i1TkBe8Sl9WQe2WPP9dA0j8NitK0iDqXDzX1S3dJphasSHFdn34zSZbtQoiySk5uo/ilkb+SzK+GHWtFx5zDEvS6t+Dq5x61ghu/th913/qwPQzWbweoYSISndxvm7pnwFsfSc6t0iIiX9lU6AXBRw+YwbGj0aQrA5ZZTHWZxTPvXMP8b4W+g8FWOHudJsdJC2IWZuusK4G/5TliV44oQRGb0AqvszR9qWQ3Wj4Z6m5VDRB+kVYcr8xdDg5FDGsDFlhqh2H/CtMywM0PtVcmXe44mfx98J9pRehnK+eC1sBhTApDOj1yA0VhNR9EIOYO09HHee1OXm2G/P+p4l5vGURtMzknZGz9r6+H8+yv+WK8fxKPuW+4DBI+jRHDL9/35c0jVHLyO6KgUd/A9pUC28zCIdYKBp3PGiICWNNO4T1VYYXmcXEFKS9Yb95WudToFVRheqQITViFt+INHr8KokXvkvZCCoy+EvU5RK9v9eVwckf8TSTc7A1vWSgX7vesTJS4yS73TeCBy4VQmBSPWe47OFeXayTWJBOFPDZBwTlhWr86a23+Rnnc6otvHEaRJeI+hywEpt7UfDT0h1V+uC244f2/ktfZRO8xKU8gFr60Ga2uz5ndYCTmclUqwIBTrw9RXSxiOxk08UC/8Li+RIDYW1SgIN5t6FAQkMiWg7SlioM+sHLqZF7jOixLGZyq1dARmtoAoC1iW2KJUmTvxCls+gnd6KxiV7KvVOzN/gAUWEDIikPJiEO1SI9Q1iRUv2KUgh9nvi7QFyoTMd+QqMyQSREZfTVmr0ctxcjUYORRYafZxnFPIRH//UUhpXV712o8GDzF6PemqfjJEJ3UxqLoPggFUvxS000QqQFtdGBpCV1E+zBaPLuUHTNNQXGKW56rhO4VSW38neAsHn/NWE+Xw8geRhngvabgO8RH8B2VdqgUR9s/62x5xvPO1IDR5TjX+uAS0oKID/cV9oIYRW4ho+tAzpPtjxK91ziJNKvs73wGVxco085iLsAqWSHUIh2vCLVF2yijq5VT2hkVI4uywWKnlkQnOEXdFG+IE0FhM4YAQMKEDrU4+i38+sV5aXAw6KVn8NLoQqIA1r0qsAV/A2ZpXiG+uBfKxnGiWNlB8v3n7FXfsL9384bPPh8Ww8gZykycwclo9r2RHqP7I7dObVcGWbWWSXlyxP3yRVZYHynRz0agXyUWoIc0chQxKRlPGpzNkJOeK37FCoLvuPXNGOXQh99dANNcUriDXdTwbjzQPwz0jycGEnboF8kYNC1NwqlAlvwuVpdLt0whVwjxWRTBVqeumsRW0x8lVAUVTOKZYpMNncrBoCoONEydr3jj+S1ffe/Z5q+sXLSM+jNIqnUQBN7nf8lpg9UYARbfQmX3Q9c85pSEme7O9mY0/pJBlmmks3rjHWpkfmEcaOdrTq4n9uPNT81emRSPEyXvxVyoxfYLX8ODP+3fpnQAcEJG7+n64kcNXswHivC/F6Th2X3kPjGDEGqNmcf6YmU2QyRgv+wVRehzuebA2O9pjZpaHTNdCATrXWjWZtaf7AormO1TyQpfSFx1CQ6a7ifTfoUS4cY62FvHQeKFIFRlEWaO0GbHxMRiebI4NVzmASVdkuAMyiSVWh6JmOuew7zf1a81gCXxn97o7dr0ZIjw0cMy8MdQDsu6w58/PiM+tDR9a2aqE6MNpLQil0UPZlK1sz4CRAN0ADC2S2VOFBfOv2ujRAebEG6DAJeFdqlQar7p531srB2/Ao0dYVMOMrExKUiWXvLZfvTkoDCr5+tYLmO6f0nDCDI9xesU/E4WNGgJ8X6keMZhfRgRW3/PSZfdL4Z0o3QOFFNMKBeFeIjO2GjWMBqrxvfKDcNN3/2p4OPKImJlN1sI95yWV1IcA+aNHXurVKuF3m83wALWCX5c97Fs1hg4Pw/6cHpEXQsrX+7OjE2aeJVfTLA5Lg4+VQ2E6B+A2L41Fu2C3pvFc1rQ3CC5ZdTcvg9Eu16xY6TnK2fNJx0+BvqyMZGWvOmaHt8Jcp1pL0Cjj/4vRRCWuJLqxZr6uqWkGvsvMLMEcJAKLGC3NxjkqYN5V5UawZdpJPQedwmdTXcNrD5cUyoKJlb1k7F8HR6Jb17QP5kAyJePBbONlWpIX9SfssRLvpbWUzSTLB2dH1Vj5mIYxsyJDq2V/khbeyDmJ/nTwVi9UvKQow/Vg2urtB17AXlgPv9PQ+BA8PONxNsDRUWmPxDgkDbZYxY5FlI0jBcxFIyfVpBELNifz30D7B7Yg0NwpuS5OL3svuLo3uwxZcSUUzGkeITRylW7QI7D65pxRNn9l6a5NCB5/HNlJWtJskaobpploHA+b/gL0qpWXg0y13uPWEHc29xJBpYoR2n8zLGgaVF1Nz1VYDOAXBA2e9aZ3RYm/WSsHa+5+y0eC2uC5KMokT8CYXj4XzlH4nhdCndfCBJXuew3BiNhUvGVWY03y2ZVwHArCp7Li9JRkXzEaMtG+SnMwXJ8qGHWpPnHY29rXzaWqIk1BuSeUPJVrVo0EFg6cgtwuxgI80UosQgcUKEtsTsQmddGqkd8TwyYNHspBf46SS2f7Y7EIDCNsxNXEOIUSbdZN7ihNatBHys92CMzMGUapNky57Mi4uw/K3hdAPkQNHoJSazv+WDcDYKuCs3OBDyfWHF4kDYaR5NnJfrwvUvX1lRD96apJy/tFhOerV3znutvHAWGr115Ck8OTI1y76d9UrptNiOXaOyg3QgaeYxBhrNZQJc+voPFz3w4u0fG1ELtUw6t9EUar5oxrsxWoluBSANW2s3d9467X8qNGeLjiSZBApzdeoA3upzSW7oXnOnwNJfq69FxPnuxy5OY9LznZl2nocAHZwGf1UisZ9AZlKTygEZqTe2xtIrwuYX+uD+e0rPpnGbcQhar5hX+1Gm7yRDoxVgBQogNya1k4i0bh/L//wAMKamuz67fJnG5mVnyUza219sMwP+RgMfP7eOOPgb953WsPnnMiLVVYpk4FpGlvRkMUUzFvg0Bf3RM0cqrC7sssgVbkkHuIzp32/ONaVIoeZ6BClrLxqZlPHTce+rPsfBkEuLJf6Ey4CI8YVFx/GrUotFHhBv2iZLWM4KpRw/WmETBizdZ+GO6XDV/NfNGD9hPrAq9VeiwOAiNIGczTZnHcn28nET5rlJwtOjRKFBvxCA9yYykgKyqPRhn1/WZm+R6Q4St+cRXxgtl/pCOg5ce9ED+82vRvYASTf0kSqycwybfa4Rw/s3FbzgFEvICWydnD9hRsW2HmGTRT3AKYq6F4Dfwd50o9Refta2454lEcfilYQI6+PhYmlfvP6m+SwgFhf/WXBd8D1uRxWThT4PkNVsOxmIX/sga9N8673mtWw1Uz8djkd3QyesTwyIb3IWk1SLgWsU8r/nvTPMjNtH8R3qWS3bkqC95X3wuJwlJYIJMd/86LmKCUg4PnPhiEC1yo+hzD0SBMpGJzqo2gabp74RxL9/1zsmxY/aUeopl6jCTurUsAT4sAJ32A4rBcxSNToGLns+LPUUvMychiidYoNbMEKawhIzyKe2GbAmLoxKqhwk21VeDywLwr8R1/15amvK59jBjHWQSvKUeI2NdYBAOqUruZU4+W97MpShTbwh54nMfGI4mUrcw2Dp/cmGUeKzrDPHxW1utRjLcPa58dBY5uiT6BvaBBhNiIR3i4CVMpLSjqDiZIWUJxn9XH8nLIFxJR2mfKYaVdrfpd9L3jK88+//jEhzcen4Sb/plLd5593heJUsIqb0glC3ECeZAQjoXKvrgTvjeU6E8yyl7ZpNJmuLP22C9Cci8oUG35Q96XvxbLlzUyxEFMgr4LRm7Iqq1U+a4twop/19UIF9F0hLJbgJYb9+evIoXsx+VIZ2UKd20Ey/nde8tLjaW7EGYwlPvCy7T+TniZ+2/5BUGBRcGpEYwpDGx95wz1xwweGl6Dq/4k9Vo4SX/TH/vO/o1nnx+1PCCCIOM/d/nus88PO34SSFBop16UDbfDsSrmUXlRaKCo8GXFS+XA7ysaoFl4RjMfJVCma/LX5Bypnb8WIxkvwjnyaQ2uMcqt9sVsQHCKgZjcNVJOvYp+c4coDTcJ4nFuNMrO/lncm5rOmWCZahNTRzwoUU/HMcfixKZsuoxinyffZT4KhrLgaKbox6VrlhF+TddlRU+0c71jDleYDgwlTbUxPcWoTgq4jfvX1Ca/u/dF9FVUqAaSNJrViI2owUMl2FVmXnr9ke6zrIzrSxkeSQkpKPbbXmq8gbTV7x1ddcd+6czbzz4/GHjyl+3lIEhfQ4e0t1ub7nus9lgp+XzOtz4IlV8bq2FXCo2EFU/CDisKg6aj2RbD0UVFMD8E75p547D+wJ+CuI8ULT3+rj4EN4mSdiHcruFanp8eS1+MQ0dCJ+y/5YMgjVixZWY2IHgTgrB7LpJcfkmDVUl8Jyl2XBoasicaj8N/HF/30qB9bnYOW0Geu18Oe0Tf+fLt2ecvawkuqrtU+BKLUexk7/0uhU1bWlzUw9/nlv0DsNy8XJB014VgPe70QjhTG/t2B+EFFFrC7YR0Qe8smofq3qRSiGASmvf9PR68EZ7NORlyjt4GgNUKLkd6INYZmg5Tio9kDobn0coudOHza8SMzhMj4mZmh6/MTrfrOWjUpEq3mVrE71Rp08lXHBCNXHUesgyBQmTvjOvS9Hc5nMT1mWx7eUn4RIyLSLmChoCuakk5Bx1vbWtBbFGh659t7V3wSr2RDZGo7iKbI1Vg3I8rON/xNaV04D3Zc4/nJh5US5IvkvwxalXGLD0uDFq3m3UfiljFjC01/SocQAIURbOcxw64AzOwLO7Fb77zxrPPb9z0PN6NRrgeQVoDWYQJNhjL0M08kZNuRBJwMaWVYtbFlORG/vzkTNEoDhcvPcdUdRDeRfOen+PO+aBRhwAmayXEOCNs/OSmcU8dAZgWwsJo3A0PcHJz/jA89ccAgZ9HVYiEd4nP6EhVXetifub3zMyWPgwvZvenIGG1SIEsxrLhCUZ2SkY8uVjvNq4pxQk44jMKXE3Pob2DFiIwjXWl6d0HYnjojLRGXsOxCXDKm+WzXQzXHktukEqzIcy3nK/TS8KszmgFUrKK0+kA06KRvkoGu7Dub3qNWio9D2Mlw8DWNUuuJyX1I05wKBVQxMHwPauy59ykHO+MkuxUNVcEDOtY8aVhpTLjz7qWmaSBVH4OYMgUaPEIpgtprFQhELnz5J4Yicy7lLv/HnVNKvPC6I/eP+VLJEq0+qPwEnvrGj4OH/fems1ebebTfCloRWREDZ5xRKhyMmOhxGlkgobsMQWPrNX3J7kDN7IqUuMIgMfLFZ9XGOOmVyH5O/KG/5Uv/cGzz79x+w137JtX33/2mVgibXHRvxxmvSzVHnzhGsrlPNbQakP7EjUQ1emc9xuAQkCr6Rz/BIQqad7NvJfaX/Xam+XRXFxsg2Dm8SeKV2B3b2J29HetK8HI0SqMeRhsfdFGz9MUHQOUlObqHVWAtAp4/PWgkLmvtD1FlwaJ7CsqZ+bLmw/8F0tQ8CRcM/NA65ShlNFHyqp+Egj4L0hbiKViOLbX94YtCQW3TsKe6/b9vlquwTiUXl10yEolMN+aH2zgqOBHl6pSjiG8w9oegOzSIJSCX6MJjLBtvI0oVNc/y/FNVMsuzd+ecP3aWK0UqUhV55ciWfvSMdrICqtUuwHI2ZUP/VokZ5hyqHHwvY7F8OI+JrDXzKeuykh/Ks6F1c05wbxNgV2dlDFZVW8xDEBC2vxAStuxlfoNv1byGZ3IlfXadTqXYqVxZfY5zLJxUmPZOw9/LuxvlY00bAhsV9uBe0m7scfGcwdINXLAUJdn8/Xfa1/GC5aFlgOug89zZdm/gc1K+FsrseoFWIuyuthZ/UopJGC/fewJChtwYX/+yo/dMYKitWTdDVjnKW8Tm3uS4hMIc9ADvkAFAkN/GiJ0oVcxOAmydeh4BXpleKxm0mQSGB7dzMNctpJ331P2TnSUniyhWeT+/PHwDDIiXFqK6riRxDuhgedYas28V4xpUlwH8R86ny2W3WJtKNMvK1SUhdm1C1BlD0OMz9aX0vNHD0OK+ewrfhKuNdEFve1dUe6zMvh1jnf94u6zPjfv1+wIhsHoBC9jw2vh/E6QJ+qs+W7y/hhghVa7HcLfJ1c8oID9l7R/1AhOAVvArHyQDWh0hQ1zMrIiJqloP0uaNfIBTZRiYSYugpVYsi4p0zQix/XMCIZirlxnAIkgufWgPRFhYPN3vTOC52ST6ZIcWwo/zD8Oi3EihlHxGBw3gnfinBck5Jr1bDrfjGQpSzJVoOJqFNf07Hsd/R6OSdrQQSZwbV1L/Jv66eNG1OChBx+rMLCI5Uu6bLXS2D+LG0CrtBp4ulWxvA6RI9LmgozwHIzD9zqCwszjLa4U/Vv8ncfXn30uIg44EQ6FBAZPf00sa+Q661Il0roC4R7h2HCdbpVQ6mT298w8UdnxtbDi2+ez3+fRTT//5AWht5UCH+Pv5l2vWMigrMSDo1r4oVsHF+YPw1NGp/o2yjnV22FrEMX3UDCoks1lcGxMRLhPYSilBD+EgQMVy9pgOaqS3VE5ayEC8UQUXvUf+XXTvh400F7Xa5YPi8Fj6g79gj4Drq0hJqG87KUjsT9Lm76Ciz33RmBu7tz3IdDKXnYqxXG+KB4Cj/PwL2ajJlsXc5nHeE7u22HdbyxXmTR/7eXcYMHdVNYNo88aVYiVKnNv1R5l6yTX7FV0jdtLEZ6fmKOSFcEwE2qQDaQ4qxJVXQ6bU7E5yT6MHDjG0yXvnY47IIIVtuZ8C463kPhx7XC9aR/L3BGgD+aHa6kie4KygPIkZRNgG6fb0YTPDcGqcrDKz/WL/JgR76UVOcpIDq3MshLmnQ+TmZMXQGbJwlFYkSVxl38HFR1fXvWz8KgXvMPNqt9FJDfbhbR/teFJIFi19bDrvc1vnL3z7PNvP7r57LPLsZpfaKlQbiQvXGwjzbQTvnj4mp+rAin8RTAz2qaKt4VSRHooqaZtuOeNt71J3jkfNHbjDjA2r3oNWj4MG7O96aVKZT8c0+oujnw3PEB/df4QmuShYC+c6pZ/X23g4jVEn1Xarn+7YxFQ8aSULfgdrkGmnd6bYvQowDWi6FhxCQCt+QtQkLLruZnZ5WoQIO/seuTiu4+CQKlVw8K8sOyNmq3DsFcrZS85mf5aqQcnJrnrJ7LVFEOiAAAgAElEQVR3FvNYlvsHCZriIWiscF9papMCXBUosVaMmjGFZaY4PJu7kQW47klZNJ8x1fMP86TevCthZ7RHjBpGeFJAVvzMtfuIBAdUVrPgRBW1i5igjUrxUEhW2ThZo1zL5AZBlLbiF9Xkcjh/4eHzO4V0vLhONWpI/aIZG0Z1xpGSdZ5fdU0dMFnNEhB7R6cihf/FMXX4YiOqUVgppM0FmX/mAhrFOGKk31QyoBcJj0x4OWjkKDCZXBzNoniA2AFMhR1JjJ7nfGfHkx6cvRwkWxuNBic9P3WOWFLpvrEQiqLguKkIXExFT3RxYTivp5kdJeIiVI+VC+/gc35+PM8GOk9P/Ps8vQQCQWnex6hOdddLO/KY0GtQZtp5GLwnVixMIykhTXcRlKppXhorBLXG+lmp4CHIONaZ3YEYBzrXBML7Y+zXQ0NponOAhd+Xxp90RqaS/xyhS3ltORjYEwnRk/m8N/CTUC4iunQYhNL4da+pcp0wkSR+M9P5cYd8tQ2Bz7vSUw2YL90vLAoZE6Mq8sOlYOYv6OmiJ477RcH0mEP1+t331JDhemZVq5yDBqVyv7g5payORKlTeFRyoUmUaFzDTRez5daUuk2oFKyJlFY5rN/xiVwMqbCCBhHqwN5VhByX3DUwHLWApQKiQ80Y8G+NCpMmgNxRJzdVNmbjgJZuI+OBVNXBq1KFiT/1/mMjavBQMBN38GTMVqwpy7cw26gx8x4gQ38q2O53w8p+pe5RVG8shbdzICVQNHjYduJImF3b+PtvXPtjd+wfv//lZ5/HQ3QDFx6eMcLQKTpugCEVfMp5jFJkY+2momioeFBjyzWShIeSEip4b6vvecPx6JUgZcmvo0qSjS+7Z/wCZQkjDRwzzwA6LvP8z5+b/bRGVmde9VipBJUngmkUDa/T8GTqSz0h1yNLSSQzvKsUKR4iE6kGfDTSBTx+AoyQZxL2pyiDm6q/5A+eQnPXyzJ58Oib5fBwXUl1E4zceyCS+WJ4uBKUR2+QTRg6uuQncoLvVu/5a/O9NX+EtPENJdwMn0+vyRxj7uqP+NnPN53N3vrzh+8/rXHm7bCwdr8Y5ildxRM+qwFfRRFFCgtCNm8c0ugDDRLtG+dA+TifRt1KcDJi/QZT9AN4L703kbba1J4t4HVbF2GCzTTG95Yv+MjmKTBpg1WvhwiELp6oxWmzh/w/K6BSEWg8t1YcU54cvhauvf7HfrKOb4XPCoo+hnFU2w4vVAMpzgF5Ab/4Y5qHhs/K2+I8WFw8lh/NCydBAgNidD6cpC3m82YtSPeWACJovIzFU6QX2Yj0kGeU6ELFCyxiAXIohxl3/T2e/b1w7WO1aDGPh74IzGoQdIQnxTADinpnykS9HtdlHQJHrWLfIdc/mycMg5EqHj0jgjkxhnJDGLTFbA4gr9jnT7j7clpG1vz3GLZVsju+SwWPTzMqI1MVjhAA9cf+IIHlXPZqNLkIhoIH8V321HtyM+Gjw2WIAiKWQVHsxOZsVL07Xs6HSWG0d73ic2uMDD3aEKZlGFFtEBkmkopOkGpLsWWjAWKq9J+CH0B2Tf9pk0kO4qtc0DnxN1J/FOaD7NjzMo6vhznM6pxuZlbdh9d/VUloZ4N+zczyMEp6oFrTrAP3phKwbrwTTsr2LcoDU0X7lfYFfw6SKir9ANOaCKSkdTGc4dKKtGJBxHKMBZZkGSpmlhv6eaSBMlj31krtHlrhYLvo3ieLuDpr1EsqT6h76HjrPMb6frrKcKYhRV+pk/e8I2rwsPojVauPRejCjALYdZiEvChIRHiYpxyM/G018XQ5Tdxi5AWkwP5ZQ3C93G75t0hCs//r/uvuGEtrj1ooGS57TfX4X0aK79i/0eoOo1x+DthrjKBlFRZUcGoVs4Q9VcHFRpUkQBSL2fG6yHsiiSC9HvX8Of0pJtU2F4J6TpwDNiqdw+ahbK1xBkzCTTGUMaEpECaW8PoP/IvurYW1fwKyLQV5EtegipqkYlwrqYaKiBLp/o74B76KA4raGThmNm4CyyDp7LVSMHK+9Ud+z33t8x8++8xob0GsvjUYQLULfh7XwPvz7jRskGLTKwHCq5IdH3lkKnK4LIUIcN6y+jSZ+T2nGKr2JjBgkBFqJDFqolireRh0pBipKUhamwSm6suwMlSj4GSaJm8VuaLM/PvSaPnpFRi2GelIM6lCFe1I5ui1H/h19OAXw+dpCwagUDWwxHy55jfk9k4AirHb+FiqHxl5VFLQISM+itmjo4Xtog4ZHRfVJ1zPbTGGyKBcbGWv52UUQqeiRBnpKU2fESP5IiNq8DB3n/LmSXCEByodS5RlA1w7hWxjZQrE+uoFP8sjmIQHUkfIPliDSMvqI/TkOVvJ7rj+YOibvDQrQfJfOxNyST/e8qsk6WV7XgQVV4QzoHUtzDGbxCnXDjepAii5eFMCkZubIWX1ZskMLFgDej0sy9YyWwo4zY07Hh5JVfmITzimTUbnYZBIMw9A1mSU/b40LMz30DnnNSS9VL5X9bQoiE6u+7XHyAFbVWi7D26lGFFbzxOY+47PMOYKbfHkzgdhX6t6C4q4ua98/rY7RoJBdkvfKPl9ywiu0lXk8ABvbASr78Mj/zAJDDE1SFjlMpISYkYznUBPnSN8Tu/b2YZSqs3HYPbneRk+2sV5kSgIqj0VKMvvplJaOD/XZcrLZ3FIpJyazl7JZ4scHi5NJxE+H7zuBdzyZjCdS6jmHUo4g8ce73pLgP3fOE7v+++x6lczAblRuN5QeHjc97jGZLopF1Jpw4hcK/Zmr2dds45jLgKt4DtM0wDgniIRMB1xpmV483rBLGwADZzUiOXakJw9lQaeK4gX/ujUGxonKNZ/fdVXX+10Q7ydPbGUor5WCG/kvFSCPDoKUmq1Ee5jcqrdBKHsJ7qCaAj4YyVwKjDSoS/RNYuUzcxFqJEbRlpIM0ACLzNpJyHvqbOB0uATGLC57JUWW4SjWnYYlmOwGsmPvqRBg7KPlOx0lG3wpsrGuV+Wsg0lRmQ0pVIHD0/rgqYHwmfl3uFgRY3eh4vwqDeONcU+P6rEytXwoKtVb7z+wWFgbbxU9+GrBri1trGHr0kfPfL17PcbmceOBmHDaHDRGTzKE0ZH4sBPgpNzwEpoZ2gqzRSvC/Z0rCkv30WqZcIcDN6fA9DLXA+Ws/UJy8jVWeKob4fFd3xDcFURDh0X3YR805QW4QG65wgPUOOYrOL1UniBd7Z8b4YzG+Glq4HTQ7k5dYhifcZH4XuDSTZmrNjKXs98lpRhxxSiOFqcn+fFaGnUhi2UtD0FAwLUZam0G/ZOW5qrxka8eSg8drXSegCBMWw+Fo6V4VpYoPklje2H819YD5rkrbUt97XfeBCAL1884489SoL1qymtV5ck9/N0/P6+byTSAmZoLKu8DHKzVi+80aWL3jBilUjyAy98HRbqmoDM0Bfl8PPh/xt3/T3TwNQoDkuUNXdNY4LAVPVsqmCLbW0KjT5Bg4j0VY78fJMXQ72vYgtej6R/eM+sWuqvZCvrlzUcDxGJBxXEjvYitUf+nTTAeKwcEqQmYMheU4Q0clK8TNirbIXRESA5z6+CzTXVlHSaM2ZZiXXO3+R5lIOTw8rM7Hw1uKZ7Pb9fVkrhd0vFIOy/tXPLfe9qM7ifb297qbcG56SDvdnt+ckqvhus0d55LacD19HZbNlV/144p6ZZWIqt9ASM/PKdKXMs382LVKR8WoM6N9ZXkUNTFEwfqTHB4N3ul8K71OhDBakwTUexESjPR7C4mW/Sq/xTDh8jjNf0Qe88DC/27FkBHKPSt1jwEzRB6ooprdyOf5gc5njUEH2CCI8afWSmpqGnVAp9rks5R+ci6TD8HJT3sF8QXVJ8Izudj6r+HAwIuKicREd7iBIVRZfFxsektAA0FUXK8DVzgCnPHhTZY6mQYDirXgw7/nDgXeKvnA2tUdtCGrjTDsLyRsN7gD7kHTYKr2VmdtQL1sSjxz6lde1iYJA77Ibv5QWTQJDkQBYJBVhP8qWsTisA+5PiHcCUaP8lhoDVO+TvKkhNaZfayk6QpMfX/QqlsiWfzmBJcCsRAVdsh9+pwVPdDRPGBT/Nzx+Gx4GW8XEi4EHuLAWSs/O2YggICmcX41TPn0hI2oOKwxymMA9kTFaQfKTbu/MOgTceitF03AlfPGz70Mfr14IzctD3+/1aLezjR9BAd479RG61g7OjzUmHIBskW/NJ34u8PMp4re4lfx7MziPRwlRIp1dh6CsjPTJoMdLAKIcMRlVawszDaGyF+e2cy96zxJ0p1ikmO2jkubJxrXAEkFjb3hDgTCbyfmSuU9g70npI+nYMcH2pHn6oLYiYBi+JpcGiGBLbjpb85BROWI4mUVUQnvakPRHnP9aE1aUG5Zhj0u76yXMVwsxyiK4kMar2CcuiK1D55Di/YqhuGVGDp7UJMkABknH/N++Fzyff9Dt+uRaeTu/rtBUEInE1DGObmW0A4NgSg+ebF0Ovq9/c8i0jaChtd4NwVJ6fESzrzfPebSCb6y9sfvDs87dBQmhmdryHhb0puxe7VPv1TItY5Cjb11YC5FeggDXzIciSlkdDsHTRxFKF6vZPhznX6Az5EFoXwZ8iKS1if5TGoHsm7JzatuA57gaLsPNmaOnbfE9cuDkYxGGMgTsrP/ZbaXAFgGYxNNoIXatAYVfgGHCYQz1dYkpaAFoqeNDlwUVP8ZzqxDB03V9FSkhwLiN4m+dWvRtGjM1SyS+4O51gOX1wHNzN4443mlhEcGXDh0/ubYVz5LnHuhK9RNPicl3ZwbNTqg2wSjPCluov5Bok+2Nk43atFSRtRYLLfe+PzcVgnypWm2nbBq4p4nnMvD5Zuu3Xw85XwiS6LtmivQYko5OScsIAHARA3skYEQfy1jy55/BZq4b6g3AzYxgrJSHE5J6YiB4ime0E6ahE8IE0SKrb/hxdFMFo13nXV5FqVE5P40JTtCWC6yNrkRihgtxHFgmkjnGkFRKfLUaQrCOO4SETayRsdPQ6wvyP/Qz1NsMsj4Zeqn7j2p3wPWjmhkRgWtAKJ4Lv+e7+lWefry17yf/ecTBxL9XR72boQTD9UpiGqw05x2E4xw+mIQ5IQ8jMLFcGgLXnn7N7BWWlYu2Wt1GBgdRBz6d+PVmWGCsMDyvinkqNpH5aQdFDQ7ql237+9z8HY8V5mP4c9S2UAl9U1yB8VB6eSi2kIwZNgNC/EGEne0nDsbTCPaSBY2Y2ZefjsteCvXUYCWIAuwqJCCMs96YaMjwHIww1D3HzSlYEPxVLFBd2SG9ZetkBo/Bw27/LZRg5GxXvJL17EJiW2WaiLc1DWVDw4/d9SmvzerAmHj7Ibv3QvBg2xXAkVh8mIVf0L6CPeWXjT6XvcEr+gey5jI70ChJnCiCmIF7WoMEX0xMEq+bFmGDLlv0veB3CqJkqYI6VHyObIBkJttJhdCklS/cRrZMKJRoJCkAftnBwzGv5vT/shouPtv3DkBOKuLlUk1ESIMq+rT6enXkx80UxzK4oxw1ldeO+X7OtTcyPYH+os+pAnsTS5XmNDWRg2XS+aXB+gsSD4XOslxaVOEt1zczqlXDXuggJHmaH5OuSmjqE6X7S88YKG40e9P0CYtf1QTU86k5H33AY2iC0DyF4/yAI7f5D777kz4VZlyIny7XDOcZNbygNVki6x3SOPwdTiKr8yIWS4nVhvhdTpykt3nL3rIABM1ieNYrz/zP3Xj+SpemZ3xcZ3makq8zypqurq810T5NjmkMDcrkj7MoAAqWFdLG60Z2gP0iAIOhKEKS7XQhag6Wo5Qw5ZHMcx7Qr02Wz0puIDO9SF0Pm+3ufk+ebKqDVFd9VVJ3IE+d85rXP+7xsBaERqsKJ7exJSRVLOmB63oarSkJJ6CTjj1IB6cnxsoK0sc7Cyur2ANNF4i2nrUkI6TQF6gmxo/T+N/xFCvflL7xUOkHkd0bMg3R1HkAJZNr+/t/70ErPf7DvsTnfXDUSoyddO3NKUDh1jZv8fjtMoZCY9vzE9cDRkxXlRAdk8oYoLjLrZiJnE17wRGUo74ezqRFWZj5iPGevazj2Z+wbfVZGvxK0GDk4D6LE+XeM6ijYlrg2NbwcP5Brq5D+WwlZhO2mOMoxolwZpEYTBSwUdcrITGfYRbI0JZSO7XNyQZzrU+xTcrlpBJcpp5lSyTjWff93XA8aYgmW+PRj685PbK/z+Ul8+dtG1OBZvmezeXzLf9Xl8tAgdNwVCnkYGoOBFzYft2+cfb550TwyxelUULVxe2nfXWPJekHMRQKQWb5+0Pbao161Xf/zHV/gf71pLtqnm+ZF5qV0foaNnakLoVSRqD53KZwC45Q7sLka19M1v7PUg1dkQmHkDjSVmILQK8D3aMVOFYRb5GZKGDUIU5eOveTP9dKr90Zg4c3je7E+N69rOBZj9oITVlMC+tgnLgTvkZSkmSy9ocYT+7GjN/3CohVVqL3w+23/fdY420dVJEe3mZ701/iex8IenNYzp7AvMuKuRU+uXPXhpRo2pkZ0x3gY5d7hcNWWWt4KYd9s2lk9mvmzT1tbmw8P38dCPRGL84LJpMN3TOMrCzrPn0biCOykd0/wZwgeCrDwkmnOr3VwC2M+NULi0lGaJo10xnbEpxGjiXsgEVXAFqNiVswIFbqC9V1/KK28hFM7g0MzThBcITjQ9Pt+NgTg+Bhtek4kUIDjrTijwRpk8J7/O/a8pAxSkDj7Z2sVG+dOU5Zpe1Ofsf7UfrB9K72SzPXt0sge1nr3w4j3JyNq8LAcWRVkWsOujITfegz1HQpXBngoDhZt9jYqHqE0gkYvCYLyGKmwey/W3bVvXrMeXHvo1nx52e/kEtJTA7EYWPb3zmWD9H/6wu+EPLzDBbGsJ9jkGWn2Nl2z9xkt2Spqo9XYQadQVSFDcsAxwc3K9TEAlkHqkMm344RWegYgjCWHnkPrDQJzQ3AR4JDBtcLx/JGO9LDFPP5K1gu8TC5UHfzaqsClIDp8y/ZigngQf5cRMAMFM39ZBRsNZQXbsoGgGtiMNpWg4Ft3paEusAyDSbpQOuh7YUJD5hhR253PfYx+tgSwe1EMbDg/TD/XF338u31EVL88GKn+l7wrnRqrUYcmRVmH4InaYjeno6Idqudh0EBhNaniXHhN7VhXpSXXGPFiFZXSJVz6kf3g7u94Del+G+tQEPwbjYJE2xfIO9WHQ3ZFZwRXojOlHbtJAgNDigRstxh7t87xCJg6NfqYRnQNX8VIH0r6iIMyvyIVbiQ85Z7Qfa/Myxwu3cUyeu3RSRqDrwrDE2ukRguL4b2sVGLR6Dsti0Ds2c/3UC66IFLjG3ULcf/1oQcLv9ewZOGVO14r7CMVdrVm19RrJKaHbSxCCGGrZ7uL/EBFAaOV8e/WiT9sM2KXpLx14RCsnNiQ2neMZZBqbJL/RDchU5E0jBSEzhJwUtmHEEJ/hUlv/K5Y9PQ86k/9ew6bttZKbMiS9UmFxFmRhP1rGjReaNxrhIR0DBpaznciHkmKDlTj0pXISqSNETn2EEqE71PC/CF4b00xJbVNeGg3cX/h1Bi37bx0Kn6zbOTsnPFshuDxfJeqlpvIv+sl5+NNM4AyMscV0EmQkVkblb795uOzz7/e9U4M+xkFAY4uHNq7ca5I0hmCnx8lHWUax0XMFafDNNH8FS66c0/DZSDGfBpwOITgjLzGEz+H5AwjTqd3wd/k8B2TF5oWdM4YIp1a7syqwwQBJP6tso/A4somyFllDghAn/bEUUHlH+W/crfRcRlcEPA3nqP2VCJDrBwF3kYSKk7WxAIdqodoOPG9tajCVYtJWpKD86/fc30hX4G9JG7wZM737EPw1q8jSNMGgvskZvDXmLaZ7Ntm/aLq6+lYpnq54oVjBStSkdX5y00zjv6L678++9wR6+0H9w1DcHXDV3tcRLTpedtOx+WmN4xaKG2vVf1pOKbgFIWW2bDJm7TB5TNKx30oap8buS+liNw05GE4lZI5eg2TslfIFMZFcO9oJQS5fAYrimlBY9EL/lrjS3NTpiUYEa9Qbvh1DTb+7K7YxGSlwicPw2j8wksGemWnYvuk8fDEmJD12tEd+ztWXOjZPI3gLcjSWt2U0DgA7nkcA41K5iDQy1e9RdUFyUlPwv4kHizjTP/44XX3vQsX7McH0o2dRg6jtDlJe/PahboPnxwgxdXa8flAdx5xplkJGYIYkrKdWaXF7+kZJs+IesvzMOiVE3+uOLNT4smUeJB8YYuqbOxj5/L5RmIIIQwgw/ISseQavSzDeEJW41qMNTvWs3CIKrDSCz9BwzV7GO4vjeg7eXwoTgbebahYfUKEON/SEobdEtSocZFgWSYHksY1tlAJIYQK5EmsL2Rtyx7y4B2/YcifNNHIbGT8luahIE8T76QNZecWVSaB0fxY5VEGG+b9VU8uuDeyFU/w8GRMEL0lsfd31+zfeZyoe8deonz0xqOzz09PvEmew9/dWTYJpeDmHITj/W0fes+jT9ipHKIFTNAUnSMTIV+U/Oa6fvFdvzLla8Hv0TAqHfkvjtDnRjksmJ6ibiJPUwjBNQyditAiD89A0LP9dduxNKxLBy8PRvvaBiNcFaRU1CMga3bDK3uWO2uaiQrTdzP338tHqNW5B+ogORxJE9AYL4p7XvEACQhl9EcNquGqbUxWW4UQwvOR/SENnBBCeIGoagHn7+3rEr7EION6CB4XRGLRC3X/HJ8fmixoFH1YgKBo4hRD8GfOzZ2eb9jBU5ljRiFI/dCTSq96hKhyHoaL1mAvlg8lko6GoWpokMQzZgxRAdclEkQ+Mk31MLrE86HAZ5cOlvVawHrpM9Yen1+urc/Rv2IvM1yUsGoPeFQYHQPpXlA4AmZWcDQkPFXaEJ96BJZIHOjmPfu9o7siM8grpaXi8BeYgtLIL/eIknHS8epctPnQtBs7uituLjbiER5MWEfom8cpXYA1x0zdVnvmr42atIpRTieSoYDn+L3mQ3ctC/OfQjSEEN6rm+G0C8Tmt5aeuu897dvf3V70oGh6mCvo6vyj/ZvuezSA1pf8JGwfYrKeSbrrGsLtxIQIA+XCgc3JVNJdFDhq8fNAM43FnlgheC9tsKTGyvnVA64ZYPBVeIUT6eANHp7yrqS70DCz+tykynBF86jzNbLgzUhUY+CfiyteqnYOzKgu7fi/o4J0DV21WzeUgBKMMSTNcLiWqbrQtYToSfl+4ac+erX//vnlE8p4OobRW8v7H/hu1Titfnp8zV3b7Zi0ZGrqghhNPHP7UlJOR4UFCxdL/iEvX7KI8RcnHgM4xRl5WvaahVHWAjlBFPeRku4JwZPeFXGPxYf+fPfWEPUM8zfS+oC1bvlNS0V6+I6/RjmlRsgsxaHTCHOswofPSAiGYuM4wWOJPtA50b/rXgX2EMaKa9kTQnD440TzNkR3L7FEOl22JMrqIf8TPEh4Zup2navj26iYE0ZpAqjV4CGwmCkotRUY+NApKKO2gZEtfUbXgucVoJ7xsnR0yS5IXw4+QOxQcyhIiwJyDIOqKm/QyJsSvN/3Qukidu9YgA6rWTM8iiVgbCROtz+wnXGx4gXiT3cNiXVn2cJcuyc+xH13zZhjX4x1dZDfveNTYVMoynHA3ykFKKY/0aSxcP73QgiBlbtkItXiARo12ryPnijxItrVmPecKoiX/aOW/CbJDu0+/Qtm5Ci79zwMYjTyoFyoFb1Cb/fNpVRWbgpVdRBYScUoTkIwR0o7aTSRfyRWHqr08vSID96TYoOUBn86ZiX0F8r5Mz1COKgj5+X2srlsSwWbhB8+veW+953L5ri8s+KrwNaLdo4LkMx/te3v8daSndtSVri1uFDiBU9B6Z97gj59Iv8oahJ4Bcw/GbyrL7yzwDWMMRK/rsF3zmNv6L7sXOGm9ddocGs1oWM05/wqP1RaT6/gsxAjVjmN/IO4/pGRsno9j66tDs6trtcCojih6S+SuyvA+VVGYzq8C2IM0VjU52c0loaGAqsnoCzJSpqaRs2pBOC5NgvO6Xr5lBl1COd4Uk7/3quMqMEzRd+koVhpjl2UPBFyR1q4ZR88Ca03WJ5jH6sS4t5AfeBQYu8X84aIWhaJ8pOTG2efV5DUnQq8n0ZOd+Jn8j+5/PnZ51+2rGT9n1y9777Hpqaa3pge2T0FcB+ai/ZcfRwG5WggdkRzuo4xV5QaiQI7VyIGBPAymnfuIc1CDI+utTOaBEhb7trhVhAeq7Y4dWr9z8M4Bj5mEWnMo64/ud1jO6HFupASOs9ehA3K/rnVE0oAQzlHiC9wgEGZT+eARNJiOpjiIjNt40t/k8Ft0OiLOzjDGazkvOTsoaLrGDH0j648cd9bBgP7RHIMTIOvF82qXNvwMuKTlnlajw59hLjXNmuuccVPcufR+aUsagA2vgSAfMPPD88x9333kig4CHveb16Gi8iw+kfSFVRa2ipg9ZdobXMr3Yomf5jKHxpNml7lWSIORZ0qnk3i6ULwke+OD0o6BczfUkxXWLKHzEn/vQnK1Nm6ZCqRrMoL8LpJENxRj2iRAs+0wwCKrAZ8ZSS6hrggTTP1Gbkhd5LCXAbnfy+EEFY+MVmw822byEXd97ipRuJiI948FMRyBWEJZbiSG02VFPEgBJyF4D1FBmee9zyOhk1BN/te0OyXbEW+UXnurpGwcG9oGuODhv/eNZzMthT8r+I0f7Npf3ev4yNNI/TuOZa+QaV1s0gGHW9Q7W/Z+7DSpHDgBfhokah9dymcokpLNzlLAGkYaakjD6nmhdlVm1EiRgBD8FGdRGlpjzgEb7TWn9kGGtdAQbA/f6QjnF82jF2q+gkt5k3BHz71oc0cvKajt+T+LOdEUCTamVhDulgWGj8aXmfuPhYlUhXL/Vd7Yn94+IHSQdt53+z5ObhftvNzo+o14+dty9Ex6pIXxO6Lvt1zIoUdkqwAACAASURBVJY+DaxfHVu7EjWu9kBCym7VIYSQBUN2e1M8PsyB82zF640paI6FSBlyDmvdW5+/qKeLFDLKojwtEkXkYIWVciq5bELu/M8hhLDywH5g95v+Ig1/5zNrpSl+K9GegikbAUWn9aYaLAnOUaO9GJT/p33geQ69wGckK0HgmJK60++S3WXtF16A7HwHXdvFgV79hb3PYMlfZCaAcoxs7CGE0HyIc3Xdv9vRXfIP8dnTC3XyXxXTMsN07JIdgo/W0ApU8GP7WjqqvrINkGvJ7r/T96u4gbz7dxYfu2s/a5up/TTnYemuXw+iRlqldQ2op57EATdHZnyRLK0nvCLbLROI/WPvoeSqzAH4TZIlA+1CevTEW+FSihhjBz1N+Sxys3wAg6QgHe8RdSC5oAqEhTFKtlX3gYcnO/YP2duwOV/8tUmmk7fmr7UEt04VRs1+2+/Zqaxz2lDlxmgN51C5Mfg9rUhxFXfwfjQMz99WwUlB13iaDj5llCh/LD3qlu3MbXW8wcBqyE2p3b3b2A3njZ8felLQ95YMxKwG1QqiP7uQJ2rwDLWdBMYEvdJyS14zsss6hxqOdB4SmAemFTB1ClCnE6lKbB4GlScLXRIYG7x/RZqguu/Gmqfup9//5IoJTXX82EpniKhOgtE+ko5y0RN5rrRoXZB0UR6s38M9v4dySyQcYvNQf/7yJyhf147lz8+PEIcQQolGH17g5KoY+jAg9B7jCtispR9a7+L5DpRiifor6aBrt27MKEtKy3EzpeCJzxu/pUrLPjvgalAlaP+vijoabsKkML/4Tk0BCzZOJGb8LRhAWwISonF0H7WeivU5BKQ8L7v8EFGif//k7tnnqYBU2RQuI52zKRynN7w1zfxp4x6I2rQnFpTaUEDFrimchtRRycA11GjbaZZgH38PbjbmjHWtPQhWoj8wmBN05fjpFvpnzSNegYOEdpO8n8/2ie0bzZGPV01DZKQUhHPowsKiBFxaScLa9K6aDwAUfTs9wqrpB95z93f83zHkTTba1rteo/8Bqh+X896y+9uOUUbMNAUMwcDzSAMnhBCG2IBqyHThZl9CypoVYCGEsFRGKuXIS99S06SqsjCPiM3RPDUGI2WVfb8P9j60z2Pws9QeieGI39KS9XkYjtQP5zzBY+Mq1iT9CZtX9yJ1CA2eYVPkCG0mibTROGK6Sw19prG0nRJTOKqoOaaV9HwwI7+1a56g5qRnwrvyhh2s3qfeIaDhrAZb6y377aVP/T5qvWnvxp5hCboEvGcCO5NS7RaCAI7r6d/jujHoEYLHsnFozzBG7HStYyNq8JRQVthbTwfROjrxvoYYkAKRB3NkWxDSClp+3DVr6KqchjpqBTdFcDLCcwJywazEwPYAkBiLZpnh+Wul9BTL9h5ccAlbOsS98Ovk0ZKAaHYFOLLMT9OG9PrUUOIcu2qPCIBVR1puXD3WCVJa6n1Vdk36jXP+WuOJ/QDbTMxy52/+1zmqxoEZ9m7aouRy6UJuQUCHs6rNRaIiBfYPt2mCeDBCVEdhPACh5IKkvrh+3Uv+GrEibCz7m4e0jzEByGKAujDBXS9aiLgnFttnHSMAbI/sB7TJaCPHiKu/B/+OoGjF1x320HMrsoYTiQSRJZnRb5VxoBtKtGxxQFowRSfAsmldrudkFABboDOWkCmsgFIcIlvgiNOWc4Bm0GcoSzKMwQVJc/BcOQZf+S3OtTrrLDDQa9wPZP1WwC6dpFxWIjc5kwukRNAoTufG+b+lY5b1C+B6mcGASHR+R9xAixk4XyS8DcHrBtcWQuaYZyQv9sIQDjVTiImmtA50HV56RL/augmcjhxkNykRIiTX2kAsZmepYm3+ft+Hruvgx/jhgW80yNy9lpv/4PDO2ecGtMeXHZ/6olHz+MADF+9esPB6s2T32Ol4k5Md0hUERjwOeRJC8ORjLBtOhL8hBJVFlGV+id4qS+cL4xiGR0uU05SaHoYKiAcZAQwhhNI+QSFeok+RQqORQwbmeRkuqvXIFPpIWMQLHfSD0rYNKHcuCZCfkU5HxiYKgsI+ibnC38E5lCBLyCMkzZRyCCF0wG6rQFmeYxrizV95cfKgYRpou+bPy6UbZqX/xc4dd20N9A88mwMpveHZ3+z6yA0Nm+7I9tt6zaNlnWJp+305ZqWdCHfyZzm+JGGlJg4hIfhxJKrPQI4Ya/MhHe/nYdABcyS0ysQLDBnZk0OQ5p4iw2ijUkaqE0ClqJEJV7KObTRcltRaSrVVCGKkxXwxklJK8UkPe3Gj4bV4rXC+Q32/6g94YQXUHUd+U/E9j4WZPI+eftMK9LL0bSRvjuJvmC6fifFNTh3noItNxnns5LxRRszvBMaPOnUs1ta1jo2owbP4KD3C45qPcV41hw3Fp4KZ3aapxJV4sDUCC/OBD2F844KFuf9qz5ecNmEoPWybVfBGw2sZeoc3VnwEidiDDE52q+1Pc4ZlhK30RqvaCI4byFUuyDzGBKcDzMkmpCAhgFWFasxKTkutaEO3cZVAdglVgodHKyMyUDokGyS2Z14GD/IEZaXFHS99aTRqHtw1INWUL6aGgr4qnHucX42mpTGeqtBgJZ16eRRmGmZ2xjiWWRsNThHN1NL8jw9vnH2+s+hZTY+QL2qAv2cklViM3NRFWex2tbPkP/zNyB8QRyAq3d5dUYucueJeSp9Bib42HgMbJ9VXPEuuAbA2pnT9ecLcDVZx1l7AiBZqis7ldJwOlVYp5oxhn0qW0V+TaCYLcLqX4ARqyxAaW5FKxaV7/uLW7+M+gDtkj7xgLW3YD6zKizK1O5ja3+Wafm9TD2nvyoBO7WHozwtT62xjlElkXtKLT9jyIrfv546ONx17dRaov3SduDYuih3ra6atWCIjTjzIQygH3jUqjnFxRH6BYXRiTaYSdn7SNjf1e5ceuWs18Htoj6ztvnmVIzQF1RJWkpTpPbpDm+mTZ2ad5NYUGo6ScgWqwcjpXfTXSPBG5lAtIadw1K60jP4kwHqIsI1q6QKHr52oNIGhyhRMWTqizyI4HRrItSd+7kZNewEaRlEv6jUNJ3wRxZlU0tdcFVgsRcF/UxgoaDlzmi6UCAviVtfvlUAxwDYWIcQN29zh+ddUeIUTkGXK/b+7/Pjs8w/3fX88siuzse9K0VvpTzogcBQOnQJaSPCxDjt+MXpH9gKZgRgkTBfIXswgxeWiryIWBssoL1Y2a8IoCAuIRBYYMZ+XQeOY1TQ6F6esLhJMRp6lypGKRDrGmlZy1cISHKbj7c6ERKkpPxOpRcjFQ2EgPmV6isUnsuZX6hbZzEnVIRvl0hAfS2XvaQUPLSkzt0+VzZo0SPi79Y8lZQbDNKG/cX+NZvo0+8sJb5UtXA+XaZDbuT6fX1VrCaKpNcVCr2YcqdLiCxQEr0BuEVb4HA59KIgprVtC5rOPHy/KQz49NIH4h1e/PPu81ffhb3qOmqqqFu1F2xCAk6GfulN4CqVdfxjYtVYjPBR0TCMohgfFYkmLlsIyYu1y/lWosGFo57J4JQhFc90TTe3I/ivA6tKxXTy+4yUV0f7cyI1789caOu1Qq3CfRM6EE6paSYHt7cjBVH7QABZDg7hcKouhhpYRvlehwcqvBcWTEbCL73Vv+vN36ZqhH1fL3lipQJqxDUQIIZSQP6fgf9j2qejO0F5O264RZLxRtX20Hfz5rl1EBGnqHaH2I8vbFy7755/u2MLNuPaif2gMKfaO54UO5WDFf3Hxvr1c69b8eQGuspABBlGIPNsE04cQwtFbtqGrAmQlNxWNQXXMqE803cUKxVh/uf5GCrA3pMM4QgjhFDibLKqoput+bzMq2Syk11Nz3xfq/h7jA2yWij9z+bL9e6RkmRX06jqxF1fWa+qeoezFUxeplfsz+gNHPgFej/Te5F5ykSCxP1w7pVdoqBs1eAon9gJ98dBomdWf2vfYHTgEfwC0RxNJmZgyy0mssoFw9YLstK0BGnqWfXXXxqIinX4zlOa+M0Yjw763pnsZ+/fKJbPOD/e94FxAfyvts+JK6JbTSwxpkGgpXw3wJG1cR6Hal35ldfwdMRvlfb+D2Owz0RbKta6wv+te9Nunsgc6/5K/lm/b3/VXhFdicv4hGq69AqPU1zQoDOjZJiKgKdVWIfgomRpKXNtYlRorKdRoculP8vVIVC8XARbGwLGO1j1iYA8RVd3v+w1NoPK9fZ+mvrZkACVGbtQwYhTn0b52SsQzkiBSKrHeuGTptJ4wPreX7PcmYwWM2J4lZurkuj88jldJO9LjddhFO1HVAsNBCfvmYVDecf+qg+uBw/4lTxdQobqUPoeOAkX2KCt+FC/KYhqHLxQjlKkejUKl8caFEFxboOkaLVl/6K7VbG9rpmHzxCbvasN0WaXs9VWrCBC+3J9DG1A7DA8IQ/WsM6qTKG5B9mIs5fLs8UUdothBF7GTa3SmHAXIC79QjEiXz2exOHfE8c0Eo4kXyS6nzPHrBNW2oGSlh1J3/XyMhnpabzesCeizgXcbyvAGd4Z+hxay52uMn25edf/+4KJhhpo1r4F2d20TdiPUv34jSHoDbTmKB0oTbp/T+pOFEEKHjywGCau0VKiyTQQPbH/Fa8nFJ2xqpwYJnrecfsCGTfu78qGf++GS/V79id9MM4CWIffCYPkVTPevaTjmAyzzVFJalRc27yNlKSdoMlLBwPurN0vDdl16XR28a+eqCjicgvsoXLQKgkpWgdVp4M2VH3txcvi7dnYWL/kfeIEc3Z1Vj+F52rZJfm/Fzj6rvkLwVVu1i+lRIqaprwrlBZXOjrSLyYF4cLwv3FqOIwlltoK1Iq4pZqwwWliQ6iM6P4m04RwMGsuMDOq+Zyq+fV3ay0Tei6krcslIkN0xWWvhRRruTNsd0ZDRuSYoN9GaZ98O9WTN9l627AXy/sAOj0Z4SKzJCE9/ICktVzgi+mQTk9X0QmMMbA6rg3XuKSeyQzEc4diPFzVkyb+zz5oaTDNqQpAqYDy+Em6SDiNGEaDjtzQPtc8qLB27Jt5bKaCJxu9ckoXDrzvsghgWx2OCGL1L2YdeHUnC8ULJXIydAbqqX/AmIX9vseQ3Yadhzzz9FdyXNa/Q2U1Z+5twcZRHw+VVCVL1uG2Hd4pVJygVOA86U2sN6TTMzrRrP/Xh+52PwCkDi0TxBK56p+Qfsgi8T3/dG1uVLTtxNIwKnQhq8DUNck248lBZEwKHY6XnWlpLr7LBooELQlgJgbL1PX+uGG0iTEAVqSO7E45HV8Iqe4oR3dbtSIplnG4cTzBhBGiGEMLtpoWvnnftzF2SPnc8t5OZ/60C+E7uVG3RtgUM9bRnlt1aTRYK7/2w55nVF44AWo4w31JoT+Sab8mTniqgnFSSyXkYlD+MpCjXDp3kIJFBnokYI7NzMkRZuoilKFkCq7MpTkUIfi31HnxGPe8TpEbZE+tU7k++qKLgzm4vmWdBQzzzmd84mev2W9mqv8c0w4mUdFQJBtVhOv6Q+kuhFd03wCEmXHQjVFzlOjQQ/D1oSE5q/rd9dDP9OeiQ6VrERtTgyaKxWk5avLsQOC1m6aHkXjaSKqGAVRZjlqaWxeweg2ugNfZWWR3GUQMnrCOcHfw9DWuzbHW0dD4wLQS/STTMH00PADy2gDnWzta0ARPcCBQkMseMmhJvMRKqblYxHL3jU0nOWgduJzcQMrZauoJjG4rypp8gYnr4HKXN+Wst4XrIYc3VG3TrKg34XJWWAu6wzlUagk3/RZ6dnggN56V20w0SPocaZY6PRDwoKg8nvLSVAJiW1Uvdg6e7IFqH0Rmms8vKjYGx0/XRmfcXjTCJzYLvnXiP46CX7h7mESE+FTyEq1yEFNUUJb83lKyb893o+Ci4GWshAe65GIzATEBvoCmnaDUpMaGSyS7SoYO9mugUDqyhtiKgjAQ2OGm4QPlXnylGxT5rVWPo2SZwsjrSSkKLc/7mCwPvf/eOFedM7vi8Dx85n/eOd27HXm5wMfixwPnhQ/qvTUCcOLzgZXyGIGmhX3Gs0ixqktL8GSq4MmM9V+dnJGK90RYiqX8dUYNnCNIyxQkQBOb6m0RAmIlKEzwolfFG2cd+y3jztmiIGwAzDCXC80XLhNsUp2O97KX7MUgJtw99HLZeM6k0Qk5fW0S49J9WUZXTrxHE7BrcSW6Tsl6p51n9oflSftcdUt3kKG1Wz8ZREMBlOX7DSwuG7Isn/qAUD2zuDt/1SsYZOQemhXsXXwF+/zUNzieNmnxbQNoAPGq5NnPOeljz6MZ+9Ka9v56dlU9tngarQoOAe1JI99eFbwOGWF5SWjSG1IB3/YawlAQthhDCrGVfHAm4kmlqTUWvoEM6u57/u6dvu+/96dV7Z5//aOOBu0Y5sYv7axVmH/3QFL83PQAouuaf3xky2BMXfua/d/iWrY0axY78jVUtoiPpBedesvrl6xzcK9wbyqJLji2tWKPcUh3icDu4v6ZhSZCp1yYpRs7ogjeiF9DDSsGMNLay6sQi/VlqmJCvlPyit4EXrUu24oM3np177eqqD83SKd996kOz+bumSDN9mUiko1ylaE2MGrRCOh15GV9pmjLoHYll6qAPds8EEgSRocZDmWOoX0ZxtKWK45T7qnppNR7ZXdnUS3/QUYtL9CHHkLqGx5CmoYep9O/Fut20KSzJLVgTLFEPIYQRQuUDACifT732oCc3OfFCb1iw32YH24ywso6X0Bm6LRgYttKK5CxdOFVSiOw9MxLG1o2PTVq0b/o/pCFDQVJ97DXtyZUUXpEQhBuGprv/Go2c4aIA5jr244wchiAGc3Hh3P+fl5HWRVwBxkwR6ZlwRqh2NMZ6NZ7A+Nvwk9HFv/W3mULtXiUYVsCgkWoxxxGj3jiZt3FLJdVkyWyl4V+ULVsUvHkAHh4aP7+78cx9j1WZXYna0vkh63JXIrjkPqlWpJgB83W6nx6mdSWyRb/v07AjIaSD0pXGIIco3TyeCec8uchgOvhYg3UskOluqNdvn13aSpZkMZIC9qnF9PLHfMv+rntN0i2Ob07zNPZxPLJF6sj9//DiQ/stKc4h/xSjP9oYl1Qp2UUvrCcwUBYk+jNDbIjszdmu4kqRmqr6hSKh50LJC7ZTyhc8Y0jInfRMBh1ARxYZKY9/FW6qeFn6GsiPJHfcBRkye+soRoUWuUY3+G+SFikh02dHlj8v5dLD2ieCAGXXcuJ2dANR6F294QGUL/ZMO7GUb1YRDA+8ZcVDULFMyn51cli5GHr96K3zSzNDCGH7u3ZQ1Jp2aUNsIBo4IUiIUKbYgZYxxbVtrbSIeKklYpw0BUMjClGTOeQcodDjwU32i7HPOp/c90ojwD1wchXNELX0HJASPVdsL1IEOZhiI1y/G1G+MUZtV1bqeH4k6nnXNqq2dGDkZk+AZzRkcniwmkwCjaZPjnwYjaXopJ14suXzSvVF06DDsReHM3q3Qu/PiBiVN9npQ/CCurbp70GwM8/fYC2d04n4qXkZ0xQZX2j7ZyWpXP1Z+nuofHNnC9uoeOzv0bpFGePvwXTXyd30HIgzrjp+zxK0rFitYs1evFQAzkXsIkYbLxR9JoPQCnYGIP1CCCFMkF04FWNiRrLBhKGBv0P6qfbYf6+FyMq05z2hMRTMtCeRZXQbCOSwGqjxmR6lpLFfgirWirkYsWtsRA2eBB4HIw3UyF5DIXgBPpCSaW5sKuZKzkvwGw3baTfKB+7alz27qRpKW3mbJQIcVwveenvQsnvQeg4hhJWmhZ56FRO4/UdSgsD0ZaRpXk4wFeOUMmRVYgz1qVHjcCWREmgKpkQpc8pahODBw0xzavsI9stS0HIOJDAzwUOQZ2OwbFuy2J6/1hJpqR7NMbMFgBrALClnOi8E7xUTy6BpRnqUWlJejvw2B50YxTLw77RKiwqeS6kAxPDMDJLpRR+Z/XfPLT31L67/PPUZe9jcf3XomdTvNs2JqUjJehuMysQBNZqChwCuobUrNP0F27PLP/MTxAIAnr/lL4RMDmnfjrRTYEUX94EyFHOtHVvxnAyHGcPeqOwK9cUy0odr/j0Ixk4Av1OaZSbmCdtPK+Kc0QSFm6n69eL9dT8zJadG2fDE9tu165aCag18xP1y2XIziuE5wZ5tFmyuqgVpOI3Hmgm0gjZOgkiTWBp8bL8p3lqPC+ovTRBJzXb9mSi0ILuAoZoVX95IzzN1FdnqdCS08CM2ogZP7YVtBo0IcANxk/QEKMUDoJ4i19sRzonby/D0j/ZvumvXUGY6EUj8GOXtj49Ngr+z6hvSsEcWuUNC8AZQDfnY7rJ3IQrP0TpBZpWHYyDAxbJBGRweQi3aWAiPc6dVHK61RCSV4gjSlE6c1RaRvTtqpKfFyMPTlWq9xj2zyvqXTEAwKjQvY7CO3DSietpzhntAo6NM6SX4QhgNfEkuHBXuPINcy5L3FRxRm64rUwcFuf84raxULPFZg0AXLz/+7Prfn33eFFAanZMaLP/rdS/ZGP3RKi1WwxAUfbwr/FnAXiRo+iPbj/O6dM/ucfCOKAFEgpTfig4glYWeYRoA2kl8HoaT8dwCsqe4V1TGFNsgdRXqizT8TSIdTGUv95847h0YPCc5+V5EwME4LrbkvNfgDCNSU8p7PfGsZ3poSRxvUiaQG27znk+blC+ZvMznvVIdsU+hgIULaIcyWiXQT4oqcA5m0vk9dwhMmkaFSTJ8DCf2QvqcquxiAKD2wP6uJ9aPK/x4hYa6UYOHtOiqxJ1VH8nN8gWKclh7YLXMI/LxZceHgi5XbCP8d1c+dteejsyCeD7w7uztRXNNWeFxMPKgWWJ/XkgYsN8BKeGJeYCnEpYbX7PTt7DrV4AEi6p0XOVGpOSSuIyEQcLIjQjpEdIbjcf2Awyn/+ameAy5pF2e7Tn0/1GmrfdAubkyNLfeAlt2C5Ux82fvuKoCei7afDPN6w0hhN4FSmZ/zUXesB/UoyykVK6E4MGxNFYUPO24LMTAdsBOCWamUb4vCGcH8VhBvLwSpKNiGTgYwV0U/N5fbVrER9NR5aJN+lLZJk9Za9mXSKsMs3tgU9awOV5n/wObrMq2/xpFkkYBHUs18IwqI1ggMlJHaA6Gq1jDFji56ifNFz/4e3TX0w87nSdGM1U+UA5qYUcajYrK0oyTwf5iaRdUClItNzq0B+tXTBeMJ94AvrtoUcm+hG1/sW2b4ELdjJr6VY/AHgyBh5SO60UApkf7Pro0umoTeQojJ9uWrgFId5E1OgQv84o76YBjrhkpW3QMBMPDPXL8JiJq4gRwfV+loe5v4eGBJSzXGEZyXb61EVlK48kQ0umnlQn5Q9AMf973IaRnfZMomqr60fMbZ5+/f/2Ls88l7USG0et7S6NYsRcak2017zfaFB6sCn5ao9pLi/l5dq1lY9UQPClhtNO5zP+MwoIYob5/DpJHaqXXQorA8QzJvru5Ng8d1bP4nvTgQqiY+J58b/54eErAxMzy5xuCIXiDQd+C60UG7RA8wSS9eVV0bGegQttx7wBHEUtRK76HtqwaPDS2nHcvniL7TY0El0LG9C/a3oNldOaX23bev3nR58tzEPYDOXN9KIUmjFEtE6bBE8TbZwpAjXQaL6wgUUXootoy/Y57DPOYqOZiimcOnQAOsuknms5GMtS1TUZ4/N8RxMx+VkqlQPyHkt2lEXo6vpgQwmgRTaDlmiPW0+akYDHurtjCDgUDc69iynKl7AXtat30F9nB23vioK/a33W3ff6PIOZs0z/k9NAmJVNOl62FY4CWRRcMVuHkaa8xrC+DIAuSLo8xVnPv8+wkDFMyxMR4m2REDR4qRd2E/tfto4YZPQBPlTgUPJstStXGFKdc+2W933h+9vmv9m+7ax9CQH7eNpTnh0vP3fcedrAJG95oavVMwzcbtvrHLb8JMxC+SrldBMnTVDYaO4ezzYSGVomqL7T8QSRPiqbMKBSIuUkoSW4ubbwHZVvdAoW65NB7ALlX9vw60TgaiyfNUvThEgyj7vwZPBycQ+XTifEmUYhoZIUeDpWnRvxcE9c9v1eO3rZ1ad+IAARZUq4MAASHSsTBNbKF8Er00cM+Kja9YNhCfrUihQgEc/6zmxbe2B54y+utZfOWd8te8POe2530sAhbRuTFm2W0QolXXVlvBDRJB2Qgj+HSXSztlvuxfeCrADS/rkGlxf22+CgdpK14zhpsWYKbQ/Dr4JiWJYoTa9lCWofaE1tnpWqgrFY5SINKjVIaWIM9++KpOMYDpLt2egJW4v3w0nmJShK0rFVaRHXkpEprWqfnbbJa+zs6Q0YicUzdq8HJv+N50SAII5jKueQKZHC/RLT//48qLQKP1JBhjTzDuDGQpCrx0u75ILBFodzm4i9LbIueojI0f9ky7U/g85WCz611S+lJwFLOtEIZ+VhWgYQQwiePWWMvhHwsUU54wTD0qiRk8s/Bg0grOwQfddB1clTvOF9aHdR4CibkFb+7XIoE0ZmW8L/QGOqveqN18YH94KTs5zvXAxajgqhAhMjwdQ0aKI65WBQijQQFYToqfmUQTanuKviOCFEAOvmWHClepG9NojImwrTsSPLwWdlQT/F3jao/0+TJWSn6P3yB/niPW3Z4vrf+2H3vV0cW/VHCULaV6Qxsv+VzXgnQkx4J5oGClOX9IfhKk8JRuiPhGgfL/IxTMFRq3JJTUflI5mJApLFity19xSi6VU4xpaXpC47yPmTMmjS9RFRBDdR0LKMUUKCKVlOQ1FcaQZqVzielzUrpdg77cqnoz8ThwA4k92+h4O9RgCFz3NW+DSDKFdxc9sAEBfev0kmUt9Mrjp0xJHLBOQhwpppf+Pt3rp2f2QkhuEbFXE8lP6WMY8X4bxtRg4eeS4JBlF1xMSlq+dafwKqX6IMjMMOckIU1hBB6FZMAWwJYuFu2UocbVW/I0ADaKJk7dTLz7mwVM6tGb9UtTgAAIABJREFUE0vY2yh7nwpYxnH0lGQVSTUueeEsSvlibM1UTgkuGFYniLAogBuH3tFgSbgdNtI9m8oe+C1gyGjZKQXf4kOflhw3kHeWdFfnqq1v8ZgYnvkzeJwhEMNTRAKiLqQrr+hC5fTWpGrv1N1DsTP2mUZZ6dDPOxv9RnvaaF+ilBTnWIlF2ThYwJVLsLDudXzbhgK0Ux0VVk97XvouFu2QZOTckndrBamCZ098aOHiFZugw+uS3kDlTW5X2N+hJJjeVzyVK2WO9MqLVVryvKthOg+D+6HYQppxNR0EHmtQnMCBQvadXEXpeYTmRDGnTAmn4eRCCFFMGrnRVM/l0WqEbUKm0sCz17SFXq94C5h7VqsOOaijdN8vAFQ8WUzPIdKoVFDxqMm0vf87yidNs9OgJylrIh0caT5cAvHqcCk9muQwp6/ATRXvlo7u2lpN4gRiigAMwR9QRWRTOPDAt4RP58fHN84+V4RccB+l5x/WnoS0wWqPT08uuWuHQztR2tdnsWAr8hSVXgpGy9VwSp9596IGivK+gPPqj3EPpBAJ2ArBW+EFqQhyHba1OgEVDzRatecWcTulI38TChkaVLVNr8RIhjdY9mtYOkRZuoI38d4L6KJc3Z+/1hIOrwHdqWkfjkSvF8ihvtf1XpBimTVf7hrwaTqNRhmUrAoenscEf1OkGoYGG53IsRhNFSln5/h/tt46+3y57sMWm+ifRVb045F/0b2ubWI9j+yLRQVBAyeEEHpgWh4JM20GZbeTdS93sof23T7A4BrFHiKlX5DGwVTsNJp0rSlr1Wmch8G9Mq6wn5L/Ho12xV3wHccRbMgoUgQTa+Fz5S9ssz/7vukMVl2G4OVn7YlGf/BbcuamqGZyTowwdFdQYq4prQYMeHLKFSQ1RXzaadfv2Skal2b6/kxwnVxlr6RyaQTScPmHu9on9XfxbzYdPbkmt8dvJxsTQ1dCvibSZ646NLz0iBo81Rc2eSdXX65BYSLUxyo5eWiSFFKIXq55Afhe3RK8/3brXXeNfbG+0fALfB3J7785sj4lDan2OIGkVpKnizWLDNVKdmK37gu8vIEyWLGKW2/ZBOVO/DMevQeQHNDsqmQW79uqdqX0n5tGGZppMVPA0gAJIYTWLZ4GCRXTq8SfdS5Lx2NyiYhyZd+tQdPPQQFg53Hd7jlcfgXT/WsaVEYkn1NPiMzYMWdBe/LQ4OH3hgKGjaU2GE52QEIFtJPhWkGeEWIvbg/Ox1RKWEcN++LusXcH/4d3f3j2+Uc4myGE8E7TcuQnEDQ3a17DkePko43H7hqpLNhU+N6hP7eNMnrsdbwWmyE1sSCVLNXn50fHNCrAKKCWlFP+ucaUIsCZJlLcyjwM7h0WsCRaktCIFl+GKYuE04ZrPHOaaqXTpriq7d+zhUjg1TAogxO4rUjVZP86CjbQIb1Y9AbPWtk8xtHUy8FjNPliqrUnfejGfZSGa2UkO20JQH8GzqExm5MKLxqlf2VbrkV653EPk2NKgfzEGWnlKO9ZBdS2e8V/j3x/X1nz0B66WmulxvI9W8iDuwBAiVHjCPMkwsbytwwWTkHLQ8zknUXPhDxERGZ76B/yNsoDublGWf/auy1Y/HseRTW5ZMvfrJh2yggCPrNnUo/CMIQQOgDyacppOjk/baOH3jWui7V+UKIogs1xYmNsl1p9NcukWPUaMWU4WNpHkIent+bXt7+MNFlHPYo5G5xfclEpbQN5cmTNnbOg1VEp2BwFPsc8HApjKh1Nt2RTKiJCEMM5EvZ3aQrxZscDe4FlwfAUYX2tlXzol+muvUgfrO9esIhuQ3LuJCzMYtPuV7yU3kfz0Ky0iyFxW5RDB9GZjCha0m0oDmEhrYJSDAXK0HmkauB+o4wXMe4quLRikHtd+2A5vjacM9Uno0glq5NNrPSSAhBG6IbL6rThObSAC0YDKwGLBe9lUA8RsxOC53y72AATuYBlTgGnSCh7wik0bYj0GnVUImoYwZMl8HwY/C7JTxNVxc5B8NdGS6jyW0/H+jguMDFMYyNq8MQqs/betz8l6lr7wMT69RDxTYH7vOMlM/k3xnKKjkf2ttqDizwHZLHck1Di2+u2OpsV/9t7ByZwW7vpq01+Ka1AKO7bRlOODXp5aZT9IcgBEwXEKVEv3oFKkTrSXjPueQWb00dTPno9BBCGEELp0HZl96LfWuTh0ZQZR/WxKb/BRnon69c1eOCZB1egZedKev6ZXr/ytjiGY9j2enboCdWf+PXqoYzX7Rs5zjxzicgsFZBEkxgZojJaOJaUEGrblS32X29/cPb5UsX/QB+97kgo+JOtq+57//LNvzv7/OsTj1wcwMUnZb8SFO5v2W8lCgoAYi4cSdQT602Bq8LdKQiN3CAy23ojHfNAmfEqrLJf16g/xbnfAJZFdAGrOmMM432Rn47dnJxv6vhBfirmlHqp+sI+K+CVTviCUIO4SkyJnmQR4Wd1VKcrqf1lW8yJsCQzDUsCXHWKC2D8D1XpH9mmZ+zl7EIfeNTbaHa645UNq6cnitNBk+Txor9/HuXsjMgkCi7wiApvWDU+0tACsXrUIXuFETV48l178fKBX+DORZRaY00TncIpSOXAMxXD3kAjyce3xvZ2HUnwrpZM8uz1PJ/Hfta0AkOJL7reMHratjixkqCdouklORuULjsWMXFej0YC+GdslqZlyCmHPgSvQDWvTa+K+XUCkUPw66neF7lxKJimBf+9IVJVU+GoIdeOGnONB3bQT/N2sbQdKdd4TcN16cXjaXjXVSzEQugRmBL3QCy8HuOo4N5IYKegIAby/EwdKKaECpjEYdOGSCWkgWZy+O82zMk4ltpU9r5qomLzmxueh6cFLfbTTW8M1ZGqKqAy67DjfysL7p1TUWKNB2ijEiFm5LxqFMd5y7IPiKNjL76i4Bom+J6WUc/DOLpjk0EjbqJl3Xj/GIeVYkOINVOH2g38nRpNPBN0KkaLwsuUzqLiIk9KNDpCc9nsFRgh4qgcIg27WvXyjdXIOegh7ZdVRyPegx2vy5hO04nMAoQ/rtv9E9WbkPGzguhDFEhodMwZMqBRIU41BE+k2ZcWI61b9m+mxTSlRSjAV8bDM8GiDlb8g9FzcR5gJPytwE6G2Cn41RucRTrH/mLPAMjfu/go9e/ut00yF7J+hk5QttoV5GVuyTZXbg04lF1JfdUIypWycSA7NTXIPTlJiR6EIOXK4tk0wHehZF+upwkxIVKlRS4XGichSJQISrJ05CWaIxeUxp9Mk2Uk3dW+XcP37P/nsTM0jddphOjMGQUaMsb0DiXNRKHKc5Wg0SdfnpT/uoaepHuXPcXIYEEiB/Sg9N2ckcMw9rO8fM8mi7T5IXjHYib5IqauaPwkmJZ3zQWciVKoFU2gPHpuD6xRqCy8+NEFLxemhXQ8h4vuETsiRg3lmnqpzS/RkuJde0/FazlG7/L8GTzczzTikhF9+yzE2G4/J6LUxP5A9in/FPtzKXlqJgVnyojFb+7/cpWy6lgOAX4mgaDu7Rqqr/TaBQD08wDGjta9h7jTNut7Zd3n/9od26jE+oQQwvQqNmPHrnWvCTccuOKygjkdrtsmZmsdHaU9CiG/Tt2LiBKJI8FUGGWQYh1dxbGcl9iIR3iQAlGDh8KSwkAPNf+tndSJbehest+qSknenZrNwoOuv0ltyb67J8xeNGyYCsuKSzxApcb4hTdkCAobEWwqXnU2hTU6BO+pJxqLYg/G8viMICTIHVFGGOOfIE8O24b85t92j6UHUn21jvAq5qC/4g8DwevK5Fw8tHVq35QmjW4uYRjNX+9Qx4ZNDg/N6dMh0AiMK5+NMMLSaJxIo9aeA7z6uWYfG65/IsyPvaKeM8PaZQEuuk7qrDiT8laGynsT/6JFEE3dqHhPiOdzion93z/9lvvenYtGPHiY8y/w6KlNUAa8WDPpen6aoXD34tDzZ7lLToZzDTlvIYSwcIi5k32w+7s2P671geK6YmXUczAc6y0+F7eF4qPycudFG1AzasZ0u0YHKCPVYXTPBWWpHddPwBGjZyINqxRCCIuXzPDoQ59cqPp89ruLFrbYGXp9NcOkdHFepjJZbHh7sCcRHoCkizWvR8dDgJ3bqEBs+JfJsKRcZAsZpac1v6ETzNT/MGjghBDCqGn3LO1pNgHPcXr+5xCkeESqv2PjtzAtp3QIDSEU4X2WkR45viO9N2K/kAJ+VC6cDiyBm1Uf7+Wm2e37DcRoEK3pZ5/6svTCZTsdp3UvbTJ9m9lT9oqStAyNkFjYNVapwcXWSFCMp4IVCQnvCJ+Veyft/ke3/aI5b4mcPxLFIYfOSCrmxov2kLp50+6Rmc2fdCfOg4JecTqxfZ8WMQvBK7dBhIfCtSgRR4uOBMP3Rcmlu2iaeOM09IcrYlABe+f2mwLm0W26JJv2jZIZK+yHF4IHNPdw9sl0HoLnxZpIGrwAYT/esgO5oFtqdv56/uaafWR4PQRfjUSHJtcVx5BnU36bc/fSzk4//Qy/rkEDjS0iEv2xIseZxkTp2C/EuH5+ul3Th5TJGiVjpRtl6yilT2AIyTPMM6jyOZdym0bBLyb1UFYE4Y+eXT/7/NEVA+SXhYm8DW97edVr+04Paauhf4FZG0bUCiI1BbHegC06lZSWq74VzBsjkRk0V01S1SB1F+GVYqZbDcwYDig24kzLwHyoomY66uRaejk1y/g1FUPEfWXTJmHzro/zv9Uw4diV1qhvVw31OZI3/9F9C3l/65Y1Ldq/7MHHgw7MReUdwD+r4NPpXpeNUOIC+3tUtuA1yCFl2oIhPM0t0xLWSBnnVT2b0xR8weJj//xHMFQTJc9cQ2w8NaAIcl98IFUzF23X06gJwRvWZFrWyph5GIzOuFSGYq4gh4aS0nLYHzGO6X3y7xIluB2bHAUF0qBiiFhTa4zwaAM+losuCButY61lWrrvv8cy9YI0Uvp5x4T7e6w/DSH8Tw/+6OzzSsUm61LNh+8/2zLg34Kc2xKqY1zR4Szdo2RkLAQfbtdCBBpHTNVrKiWW4qGX6sC4EonjWmv0Zx4GDb6TK2yP47+XS3GcQvDGhBZUUKzzihrw3N8qw7iWfK7F+5IWQ2Nf7QHlSDYlFVNH1QpZ/fcFHPd2zfSVtkn63lWDZKwUbLIetPzmY1l3R3s/kpVZ5OfCmt1zAiD0TMDT7BSQEV4pNh2dnfiorSs3v2HzoZHT4kG6YOc5IyxAsY5plA6/bUQNHocvkc3LTRirLoqVU2epFOCJUsiFEMIzMKy2BWOzUrCT8lbVS+39q7bZ9lEC2Kh4q4yLv/DAb1D2xTp5n/k5SeekhOJCkL4iaqkSyMeDLSBoNZQ4as/tBzV8yDQGDYj2Vb/JKcB1czXv202O79jLJEoz8W69jfS0lWKE8kf2g4Pl+avM4nBVh2zbIIYmDQE13BznikZFcM4cv5KkvnheErwimGtXii7PwXVW1tRMBCTP56dXpwZDdpAejaCRM5XQ0B9e/PLs8/7I9sOzTjoJjQI72T191gQZW0sqyfCMqkBd92dx5CjXaIxqitI9o8hGpteIHVn7ez/hO9+2H4uCdl/TcNxRmfP/P4R4xaAjs9T2RCmRIcVuuL6Bsp9dM0v8lvbtorxMRNogx9Wg2ju2TZBHW4j3lnxocIxNUJJw/IueHdYGfvz2os9qfHZohv5YWqUUUN3S6/vNuIDqscmAnBda9gujT6I/+Qc2CdO7XuidPrfNOUV0VyOnDJ6oo+hswEz69zhexQmIGjzFlr1sX/omcfMyjNm6JflGClWRV7MUXNOlivfkmnCbFjJ+J3/StvTUlYpHXr5Rt43y2bFtklLeb7Q8NsJQ8pKOaZJGjjbfxOHWvKSj8Ra+Qke/j42gqSnH+yGCmdmCBC8DIQSkr5fnp7BQHFD7lmnUaNSFlUmCOak/BsHbVa+hu9fPN3JYHTYvY7h8fv5ZOW5o2CZwWxCq6gToff5xqKLjeqliGaX0s4sRdOkz0hvXikHuD9fYUaqLjt8FZkyY4H7eMfrVW2X/hx1Yd1Uwq7PNRAgh7IE3Z/TC76EFGBCZdfu705LgDkAZ0ZezufjQPmvVD41MpySVkA7Vp4mWLYj80qDd/shPOFOUsfYUr2twLxZOSH2RbkwkKtZoXIqBTWePAX7l66GhpOvgmvJGIjXcw5oWc88sUbiNZXuY/RN74M2iV3ofrD07+/xw4EP1f7x27+zzLiZEwc1kJj8u+IlkDy49LwQ7s9l1oeyVDSM+Y+n2PrljLz4diE3QNGWWbQEvJOBmRmdUnzjMIfZ9oqs6sY5fFYandZ0PLT+IeWi9gfCYou8jRFTDVZTeARxVlcZBN5S8BuMQHuDf7V1315poznbYM40xEYbLetl+L3/TS6X2lm2SBVDNFwTdT0yTtm1wwlErZWhos5xYBBu5I1T5Hd9Oz4k6QGHEa+c6FQSbQ0Az71d/5ueKJYuDpjd8h8u0tvxvMy9few7G6sn8dUtn/tmxGIuXwUaweqjJXqo4BK4fHYS8eJSnxIaI8GVK1TVHlOkk9X8iMgujRnl+iHsgTUHC6APe5HDorejfXTSMwqbQBzPUTy4tFfwTVBEouDJPQwZ0CQsSdeJ7qxdJwy7RfRsNfLknEhGYiM3O16bDp1E/YqgUdD0Pg3uYVaHKGeRSqhptjGAg2UiZ3D4JQk9eE8wbub/Y42u85GXYeB9g3khKTq8dde0/LjftsDbyPkz0ed9o8msihH+w/+bZ5w+ahtxWSge2P9rveGWjzXE5mO7i95RhPIeoTkaqcybosq6tK3L9873hBOEw8aiavsUc01nIR8goY2SIOuKgZbYKEM4VrlX+yL7Xuu2/51IZWpECy+80co434bJelrhzF1KqP/bW6KWqzdK1Rfu7zRMvmTug7s7ImpGIynXEbfvFPoGtpdEZpwxF6VDgOoUh89FjO4mIHZBoH4DNVdlB1d2Sf9FZLPWIteZmHS4KGVs/fRGZxkqAQzHc/ZdeITn7NQ0+e6yZYECES1sukBhQo8kU/BQUiSoFKmotiUdX53GEfXYSqSQj9kerYbiPemBD1TlgeasS/uXxcmUBNJOXZxeNhLvjdMzAaV5eIIOqzA4+9yKpXH1+pu7EICQ42UXDtNkizk7Zk8SHLmonYq0luH/UYJuHweVz1btiFFA2qZKqbtlijiSd79L+kfQFo+UJ3qQcnHLOteybJWB6dj/06jGttD0E38uNeLX9ofdoPmhYhGcsXsa7i4bvOQb3XF1ya4/bFnrSHnJslaINrl3aDVmNWs3fv9uL5I8wEt0AsE+ZzlZjhXQeiXR5yv7RlHKIGE2x8dI8PInKGuxJCj0lGeLm0gPAkO5o0T5/0fKhvjcbJikORr75Rg1SShf/xw/NCnnrmm2mNSkVfNQz14MVHSGEENB/hJ/HNRGc03TPhp5jArEOL4WHSJVHDGNDTzQRYcPfcZ2W7knpORp/ToRUq4TSzT6iPTNhhKXiTYQggRzNaqqKPEsALauRPW8jJsBZKl449ntl3LRFadxPB2hyXVWAkzdnKluW6QGyNSs7qQNdR7h21EPj/ijv4oIs6/rf2uetK97J2F+1CcuKBf/Dh9Zbq1q1zZ7g64En2vhr/3Lt2/YwJEjT8xHrUs6zqmleJ4CZypWIXRZzrKBxh5OKUDAUD6FMXyF8/3UNYmkYjNeoGPdHspcWdEEkbUe5mOjxxqaUIoPTWnIwah9CCDvfSt8rMU6lS4jqsCfWcsl7GUcIh2k18t7QzsR60ayEf/Xgffe9Jtq0TAVwTKdfAc1snTLYRsNsqUw+BW+cVnAR05Pf8gswWkXPyO75TlcIHroR68buvhexVGL4Vh1Rg4ebRIqjfLd0tlXQQ01uFvEwVcj+46hIGd4BNsKTtgcoXG/YzmZzzxB82O5FywTujSVfcjYZE0zknyV3BA4aeKw5oR0nrkbTG2nvqb/nwnSi4MhNkSD0wm8rkI9Ch+erdcsvPddXK034/C4F1/UHtrJj69a57B8y37W1YI+2EEIo79rfDVbsuWIRo9c1KOi4XsufCZ/H1fRUEvFNiWIANuBD2wk1qFY/Qdflb2l7aXjLcCTUI+PzK2DXVbWIsndVgrjH4kNJfcEpWBAG8787NGfkO8tP3LXfu2XVKvRmO0P/nhT2Jzf9b7NRYoZ4AqV0oFyTtJhrqihygcBLnh2NzhBPpUqe1V1U1orzI7dPvjt/TgD3FfezyntGsBN8YVwXpVmAf8rzQRb5EEIYIZ2m6UkGO1xlrLZCorzUqjqus2jOR0/sUBQbttCEVYTgCQV7ohjK0PY0hspFDR/bKBX9hqbTrwbVsG8vxAhMYctPVv8aStYF5H+KlGqiaAUZG7adiDEyq54jHocySYsqnAH0Cmrit/Dw2GeN8OSA85jBE1fgHxWElraznQQPfE6EI5sLaj6TQ63pR4cmLJdgFb/oeJPz0prN7FbG4wmmB7Y6xQNYvqI8uldtPiov/DPyoGv0h0KQC9y67SecaZAYgZzCndy8IkWpxsQYSlh5fgptO6SFE9u8nct+I/c20rl2piX7bkLxAvtT2caPz1/03s0n1+vw7QhAU4OGoGDQ7tfkrmEaUEPoB+8A26JeEnA7aenIELxyVq+a99T9xjNNoGj7hp8DBywUbMFHK4/PPv+f937HXbu6bIeEXdbXl3xsvI9UtGJbFqr24yPSJ0g1F5l2i1JlltbXLARfts9om87VFI81WhIqCzA5TytcdzG8WDexPn8GD88EjYlET0GcF8WuUSYk9inuz79jGwL97RgFAKM/C6P0CGtGtGN1M530cOVDu2m9aOE/5eEZ4MEqkkP9pGW4hYOceTh1ceQ3t21jZpRGhc9b9X83RJ+LzBXTlf1lMWpwRnLrfkO7KFHWW600eDLjdHlP+ERirTHn1JsxHJDiYmMjntKCoFbPaAh2X0Y01HIP+Hf3un9qkmjxRZsFb7isAJ32qONDSMzrJ0CNsHazMKIOD6T7LCM8E3+P7Kpt2GEVD9n1U8fmghpic96zyCuH08Dn7NB/kfOjESNXjimRAApLgoMzMlf156h4qfpdOLp0ftQlVnZKbyuEEPLddOtluGz3L5wgbVidP4uHHmDnavr3plxXifi5NVIKDChgKkhtXkllXH8qIHPgs2JVPewMzfRyCCHM2EldKxIZ5WKzRaWaxz9bT31K68GqeUb//Nan7tqnLbMmVhdtc++3/eYul+whJ6s+NTE7gRAH9q6w58+tawIq54r0QBp1oZe6fN8mhE2VQwieqFM8Xe4L0lBonyYOguHnZTCtGUuvpzm4Ifi5yOg17D9WZqlTxaoqrRhc/sJ+gNicBPyAz68pMzhm6iBs79r+Ll0yZak8PD9/YU2hPrj4wl1jNOhyyZQGQcohhNBetFzrSHp0ZDApqg9P22iiW0IkqCQtVQBMXhC2TPJbjStCbAh2aBKXai9BxzQvnHUkLHRnU+8BO1KpXmIjavA4fp3IN9lkNMHYGmuqyQoiILwPBeh1vWyhoZxI3+s127E/enTLXbu8ZtfYrqJQ0TI8KFntP8J8JgBuykBJUJwCsB1/gwC46pv220e300n3EmSAGFSgeoBd6pGKShStw8vIb3PDsgN2IrW2bXMyrnqplYGrW+z5ueuiV4wTaHPYWoLGrOOgOfKT5hrjetiZ93Q1VMuUIXBhev4Y+u1clt9GlM/xAUXwXTFsi+4VKjga5gNpLeH6FNW8UK2jElPBm8sQ/M1COgjzBBiFWcdvxkzFfu+0C89WPcUIMDmNBT0En67Z/rZN3uovpcrzmt20f9HPT/ML+8x+aAksURWp9BT6/nkZrtpM5pN8YcpwzHc+VWAyU2ZsuCpAVoJj1SA5vGNrRKdFHXQassmmvEzn+Oe/dskO3SIiPGqs/ME145jqTtKxDgT11/I+UuPSVpINYdsJ5eEJcE7YZHQqUa6FEvh6xn4RuwjP5ATfM+I6rUPfPk9/T23YWzdMt4cF6N+x2OcVqBpeukprYeIfbIx0BsuWlYLdbaCpmu74iMm6WPbanWHAq1Wv0Rnx+f6bn7lrmz3LFzw+MvN/UYgHGf3Z6vscw8IBrFaCDIV9lqXAallTCKilenIVRg6b5CnRmTM6/LVYOTs9TIbbtWxzCAEUA6hzH2jH4z68bM2vl/btAJzc8JNQ3UFLCuA+6g/nr1s656YcYdCmt6nVOY60U1IgNFjJXK2pLxqGrPYJwRtNbDa55AMp4QQpKN03TOGoUuhexvPCuOoL7wr3XuG5xwnsvWGaqyY0FFs9NEdEmjorwv30F/bQ+Xf9Xhm1CEqDoyL0uYML6LNV9vcvPUcJboSniOdv/wMx9FlsIGW7xNvFmJZLB+lM9vMw6PD20XNRI8C9SDouVrnJDgY05hcf+R9ovcEUp78HDX/ew1W/hiTOioOwAj3T20e2F+sX0m/ypGOh2feaXllSb3zZs3zgp/vr7nuM4vS7/lyRU4fOegghhCJ61MHIWTjykzWr2GbPLvp3ySGllVFFwfOD+4/WtDkcUu5SNXkEaABBy4nea6zc+6qYln21TsTaYh8seTcqVj2sY2xChpOH4m5OIoxpG2WT/Ft9Hza/XDFX+v4euqXn/UOuQah2mn4DnbDXzj6ajNb8Yjt+Fj28kSg0BSm5fDTH7Sp25BrnWCNDTBNzWhXU53KiUgnCZ1z92KTF/nd8epG9tWgshxDCtJyuPPrLtr6VXVub4brmR1//cKR7LPkXQ59RnfKBnwvifRTYSdA/FYZGBt2e0qI31y3dfqv9hv8ePedE5AP7SPlOOIjT0O/xmRVUTCNHcXl7SF3ttIDhWfQbc3ALhIItv6EzOYaP8VEb+xKoXNSorX2uSGduVisSo5UggUSLGC10YHSPKZ2JRP0c1COFmPJ1DnYpd/MrssgxMqsuZvdrLXzBZ87ZwXteUfNsqmNJ2ceIgDabOXniAAAgAElEQVQqJT+mOgFco0QrGZR5M6qjzu97ze2QNvZB4DZBuHG54oXE8wMzmmZi1MwQdclJqmqGjgLE6WjHeGbhlur+t/f2bANmJcLjokbA8ISJt1by2zY/ShrIM0dITaJ5L/6dfwUn4KUjPMNFPylUsvRmNQtBrKKG1LnZ6KXWBMw1Rij7TVcH6zl6tBkbq7veXDOT/EhI0GgM5aQ+tLFiUrydsZ1QuR85bBpJjHTZ5QGj4NQDRTyBHmbOf6xbPS1h9SKZFotRu+/8kWm4hJJkmFEwPFlEg1TYpQHg5xHDk9YpXAcP8tFbAualcou0bOFI8FUUzv8cgt9/7IRc2ZSGrhBsiRQqzjc5pkLw3i2VgvZ/I3Hi6Yp/ATIvV3L+vLPs9qhjE0QG2xBCWIBRMxv4dys9tkmYRM4VU9PZY2GOxS11DVmVyTPWv6xGE9KeLf+MvP/iI/vesXKZzR9sxw2WyoNAO0iLNC+nxClk+lD3OtUBZVOC9R3/VvkW4eNzgzoqxuSs3DKdY1MAL2BMXFv2GYlDcEwt5v1DsnXKOzCMfv78svvedAQIQN2fnRl+eyosyQGVWUyXD6/7ezBNlpWU0+KypAYw+qiinMHRym0LKBpiXakyOIjhqT/3z9FBSxDlz4qNePNQKK3lL/xdX/yBvRytc2GQ9xtUFN1gzV5i0rQd2ZS66Kd9Oym3Fz/xz4ib/uLIx/ZpXY9hMa9INdfmjj3kNOeVbA+bi60l2GIghBAywCrpgXXYGQ1W0RAmU6gcKHo9KixYvqzVV90NlQr/8FvKF4Jnruz6Hxgsn0/aVTr032M6SkOQ+a59V9Ndfdw/gz/M9SJx7tc0uC7EE2jax61RhFxQBTG7oJfQZC/B8wNjQvEl3GMLQ1QCRfrWaPSVAFONWrASk6lcBS37fZ+utbXtxGlK/xJlSJ+1TAZpaJxz7kCvgoXLA/szWPXPyLXuiSGzAA+WSjIn4Hz33vJabNh6dBfYOJHKjn17ef7OBPcV51f3LJdVnTY6CIprm9BpjvhAvEeiyCaFKyjWI01LoV3VobauQCrpkmNaTk9vPet6z5JGzv22HbKp4GgaTftxBS33t02w51b8y027JqRIopuR5p61q7bxNY1Mo2asrSVYyIPFjq2ZDtdOArc7vJvuBEy/KoOHD9q64d1IV2GAH9R0C/NwWpFC4BrLlg/kJguQWMdS4/te2VBOy5e89fkfDt49+7zbt3uul3wc7c4VkzyP9n08dQLAYxbKI8GtsohQ4k66J6dRgTSPXqMstGJVARHfQTxVCJ4KgIqKzKYheNAyDZwQgsda4flHQr7YeGIP1rrp9wt5eLobavGfz59RffIKFJpf02DUgntbw9+MsiiQ3LE1K6kpFKQCIzn4233P0+mMV1ZLTLUiAl6eRgZprChegREfF0Es+vsXj+hw+HepIQ79k61r7loZlSCDjk1QoSobvwpgsrCsT8vnz12i0SqGKslYo07XCgHiKsH6ijXU+w1Tym4X1DYkYV+k2vF1Ddf1HfOiBg8xhKLDQw3yqHUzvUKVc0F82m+uIU0jqRKuC7HvmnVwzx8pna9ue0HeAidUE6Dl45HfcCyyUQwkCXdLOdvbZSkvp5GjzkF22b5LvE0IIUyQsnXl7MrXg5Ytg7wYNTzHOv2IZmaAcdVIGc9gjIKAWaSMnk3srQTBZWS8tMGTl/5KBO12roMY60TAvBDoCi6iV1zZtB/7y2e33fd+/7IRkf3F8dvu2rs1K+27XvC1iN9benD2+V/1Pzj7vNnzifCbNTP5C5Lk36+j43rLZnkqecnQSUdOjevk2Ej9mu/J05D86F46BxDXSSNIxJJQ8Q6ltUS+A0WrHjb3OAm8OtIyAYSCCQxPjIeHHjKiOp0b89c5nR47e70oiH2CJrRakk8vWPtPkXmZHqYK9xFpIbSdCMnZaBgJQBOVr1FWWQV2bnxswrh9zf6QXFQheE9dhfbxyLT/RsNLxM1jm5RcCXw6O8L70cCLT/38j+splU3auRne/kC4VVidSPkUgjdyyC6vczVmhaaQO1Jh+Oo8+V4EIzkPw3FORVjlqVf1PZiiSPSlwz251zPTdF2jhgydRK7dWM4ODSVNlVAs7n7oBW32MRYaDsF223tCnZE95J2m9yQaaPa5XLRQx37Ry8EYF90BOOYKBT+RrIjK0Hmv+g03HuJML3iPjJiek5wQFp6eXyjQveUPXREprgRHD86By2pE9r1GBGMjavBwc2kZIUN6I2w8NWrcv8Ui5MamFbix5F1iGijKwvwlwhY/b3tP8T9b+eXZ5zcaJtnGUn9Ky1pJD6/UTDvlge9hs7gQQiBec6DVKthbGSnTL+6h2m0d7LDCB8S1SDQ5rKQbnHxVZg5UqCxEAHmOkbdN1lc/V52L4G+YaATp/LRYCCGUDpAzHqL/UjnS3vs1jSkMmYxL4UjIFZ6QerqO7kGjnmQ4ZuRADSr2mREPx9Hvcy3l/K393DbV7reEJj4Cjm2jqTAjkbpnxw2bq2bJaw/SS5SywgMCF3xyaFoztyohehpAkjJjuTxTQqrEppHye8cgL+kuNkrkXFVf+O8dv2WfJ1U/Pwzfe0Xun8ORh85f4aKTF2z7NJT5pMOlDZZdXzeVTePzr2mW1HEUiZx1ODfSMYgs4lomGjGH9Gun3zYF1pvYTbU0/A+vPDz7nJUDSRbmPoS16qty3iZEndN6Ux4Mw0V1oOcW8n7DzY7tmQejdBncP/QOSBZknzOku/L7fpJdOyXRNWmNthUmQuD5qzgBL008qLlO/mC0pJD9obTlAkLsLJEV2pLwzaah39oClphiG05kY5xg9t5Fu/H/eHDHfY8b6sWxl/QnAFAWEGZcrfn02ckhJkvC977LoVwis6dLMfh7EDNUaKUbVIrNofflGJQ1JYeKPG0jUt9EOWORilz4G1iyntM5sHtUt/wB610ARwawEYWT+cMrlLZtr4yaiIqpXIA7q56oA4/LuVIB8I9DMV29y+DUGKbfnzi54r7/3vZH6fw0PJtaXUQF59qaSHVNaJp1oViAjZK90Me7HhVNDzOATycrIfoAFtjT594BmQKjcJqxPaUEi4zSFYUJnnJtJF2FaRwV0WtO2aZz6RhPD9ikiNCsVUokaF4G5QoNBo1oOaLOCBBbGnQ7xeeAw2L8lbdfzvGORdOoozRywP2h6ckxcnS7XbPeLiz53NoRIpt1oWPYQ6NcAvmXyt6I2WnbgxXEWGGp+EnL68r8NvA3l5D6euS/N7pm17LSXJWA5nzDPz/B1ARIT5a84p9Ab1Sfp6doOccasauBr6frMd3R8dIRnhj3Czdk77LsICoCvQeUehZC9eGe5yRnyuk9qSOsL5jQ2y/78OGjoUV/HnXtnv9k9XP3vRZm9udlT5/7y2c2mwX0LRn0vOW+fME2tjI5Z3cArhRgJxXlKXKsp5LfZTXJSJcNlrsCRylwyfuhuBIKARo4IYQwaAL4irOhefJYKwTy8HSv+Fg306UU9vNIPMg9PGXTPaGHbX6C1JTge4j3YZ8kvf/G39q1/W9IZ3qcl0SXchgkVAKKC2N6Lt+RCBXOpgKyeQQPvwFPcc0LQKYwWG0VQgjFdZu79arfSHQsXuwZaIqdoEPwof3+LS9UeT6LAJR2lv3ey+3axh8K/o1efMyLJGlgTSqTiANRtmbXZR1HeizVbiwbnscIDw2+jhEJh/KOyDD8syCGIHmftCUFDWwCvTWVSyxbUVrsEHvHOcypw8Hy9aZ6p/YC2iapD1qEHoyQU+l1RchELDU1gyDUKM4IGJuSpK2OjwBa1rL0G3AQAMEYbvhnzIBpuXLRn02SHk60CozPGQHrk4xYo9/8LtP46giS80v3QWxEDZ4YuLJ5H+mdu2jhMPCCuYhKkwRgEHNCgFL5fR+jJ79OTWJbH9UMp9Mo+pL1z7qWUGcJ4IOeJ3K6Xjbsz7WK38nPF+23Cyip2dzxoZRuGQSF4qJM12xFFra8JTAGVod51VPhBHHXYk0OZTBUHKsWQwV/KAlrcLGNfCxKxRVUSyKww7el0SN4eJSKmp2SHR5gDruluzQTqhsWJMrCqhDFx9BQTAhVrMvxGzbXChymIp0oOJRd0COElRQuFc9yH47esy+Xd6QjMxQLgYpTDX/38O8LXvg2kIdTJtkJhP1bl03DffbYV2EyFH/tgj+3m2PTcDmex55/RscEryl3CFL1MNOowVRZ04BVsk8CMakr1IBllDxWxvu6Bnl4GLFMcKecpl+LMXtz37LwQttHuL+L9I1zzphWaTFLKkSRvMbzHUIIoQhmZPS+msrL3Du2F/jGsifvYmq3gujPQdfrq7WmbaRWzyvVAtJKWt3FvpAZdgpQpmXok9FEzgsW8VQi1zkUEUxHNskZkY2c12SqCpFxigyFlWL/pEXFzxvx1hKs9pBD2LkMemt8TzEkFAyJKhFMLDfh3q5PK41Qjrp54q/9omwRmD9afeCu/WnTqGUH4JD/+67H+vzlvqW4VPj+yaX7Z587iZIaG4fwYKs1adoGi3xc9VNeAhiZPCYZEcyzFSzAsbes2Rl6QTZ5HukvhoBLR177tW7Zpmcj0RA8BxNTGHkpGx8tppMLjur2XGrIEOBc3rP37K/G2sy/njGtnN8FmDiqEHwFhmJsXFRPGkWymeE0wibqeqtJdVQBTTAp6LUE/jR7fnVcCCE0HtpzaGSIoOsFODjTouYikHYTJyALLXZdnIwLReAXMFmX3/E15b8+MIemO/J7hV2k2bhUnQXKJFVw3MNMDYYQQgnpQYd3EqMpRk/gsnz46dqTdOM50atwDgajJKUjpFSuRxwxSQm5IIb8GWWOOwcRfyjmZAxhlCoZsSNg1UawOI8a8aOhweznzUW/t9m4+uGJzwEPgP25vWjWHKsWQ/D6MCfnqorO6vsHkpODsZJtQx5X/T1mdbSWGPmJzDKFJpGnHKJNU+AvT0XejxEZnx1IdSUCAKVNOskhdcQKgXREv+rCrOJFkmzKdTWVjZDG+hqC0q7b5/qSz1mSaZLleiF4rp2ftbwhQ1bSb1XYw+RN97VbNdtcx2N/EneGtkH5W42idw3a/96QR71v+ZNCz/e04rXOGB6mTxOKd3EJmymI9hufr+BC8F6VV1zSyJB9UJRThviClN5cIfhITYwsrdiS6q4LNj95cDlkRxqSeP2DTTwLrsopXVnqcG0EuroO55fWqoJw3DLS5ZtCmymz0p7/HpW4pkKHS+kYJMdHglTYrCgcNEzRyn6oIRTwecej9sjL87hlm/Z3Lzxz3yujP95+xwuoxYqdwedbZjGUtrzIG4LXJidpPRfFyaqhhHVC5EYNR9coUatUuab4ae29xmuK5ZqHQd2Q1nIjBG8oJ3BKeEfVIaOUKHVCTrH5coRfxxUNqNHESJtEH/KOjFMI+cr25S64agoiJG80zABS/ilGeIr4u7yQ4WaZWhN9eEByTnm3AjBvI9Ct5Ha8s1DYsBelARVCCO0+igiE9HD81BbqtIbMheCAqByUyoLNfQdr9ndaJUn98pWVpfOAKkDMVfywmmHFv4CrMBDw42gJPZQIRhv6jXBp3U75SJqxkedAoz8rBbOgi3jI/3b1b933Nsf24//rs99311gVxs7sJyMv2Qp/akZT51AwPKDgLjzwp5SKhRt0eEUSk11sypocgCNEVgQwnYZmV6+d11Qwu9QmHoPYnhBCKB2DXPDYPyPLzftrfg2X7pkEGi3OISoTg05A9wo8pkHM6Aip1xKDXhgM2YEaPNg2JSHTowfLyKmG4WlUa7qZZ1XxCqwNcI1xj727PKnYi/YK/gd2sTHfqPp83TadDEQXf7rr8XV3ly2FvSAW9vMjS2mxv9CkIsIdUboEYyvpHkR5ULGzoECNQxfN0yasSMmc3KLh5b/IEvvThfnj4Wk8BpXEJaRhPcLAGSQJTiJgbHS/JXAe/zAUJ0hsjuLmGIViGlmrZp2MlPSkw7SKAV+AUULHeLPrdVJ/ZPJto+bxMeyWfoLKEY1eNkrpvRQIWk70uuJAaGuy6o2mJRg5nYHXc0PwYinrcv8KmM+JcVWHCem/mUS4uddZ2r4gNDCEFsRK1nXEMTzYoNouwTUoZOhdLDYyoKp3kkPahmWfJQF6fbJvLs8Hax5s8O3mY7uHaOpftS3d9b88NUPmn9/y2v6PGwZi/u+v/rW79pPOzbPPf/7UakzvrvnT/MtNwxdQwIbgyZqGCYOQOUtESDpCXkgjR5jJ2A+ookDBFPsh0SSPXqSCkbl3WYI69M/RW4PykJrUyh7CmALI7m0A/4Spm8eKFIK9TxG7VtnijAu5Vn+K7wndA42jPgSu3p+HfCwl5EyvlXYRgVEG34hn1L0KJZuVXjiQc7NCekroFCDmNy/78zKFNfFfNn7mrh0DLPHlyABDOw3/on99cOvssxo8pPQ/6Jl0PFwTbBnY08eyFnWk9cpb6UzOfG9dJ5dSFKeR68v0aKLP1CEA8Ir5moPRunm+0aj7jVgzxV1wL06UhZltjLAFVJ/QWBlJs13+m7ALjchR5ig7uP87SefA4GnCSV6RCqta3c5EVaq0ujByrpSlqgRjt28TOZQAwAJ0Q6niLfgJ8DillfTD77oS1P2m7YNeol6Q5++xUziixyXv/M7YZ0v715FNHd9jVWoI3jYpCOY0NuJVWrhpgnXSHXh+lmoSGKMKuGPkgDnxwRWlmrfP/3H7LXet+qFN+i1Bdv5O88nZZzK7nsiJ+rdH3zj7fKno3eUlEA5959KTkDYIVK4K58jhPk56UxDx7ZfLU/ofE9wHvJT6c8nHwrjorwKL01JjBR79WI2y8yuCxsIrQiNKQZ6zPHh4RKBncP/a//E3Z5+7/+KjMG+DgpmcORoBoJBW/EeXJGuRthAxugenFCNVCi7ML/dzzSvlmqa43OB3acxf8ILtwopZztpE8XbJwMgD0YzHM1RNojnT+1ICdaFkk/z5oaebvtKwc/wlaPqVHIZgc025MwqhvCuMkDpeEXEMGRnvitBmFM21XSj477GdxEJ//iI8ruGjIxH133PGoBYnppBl6t/xvCgFhyO5VWwOIglMP2urkf7F8zF6IWibJP8CTPVcXrSb1sSooZFTlHTXEloePe5bdmK14I0Ocli1x16XVZHmbQ/TacWZ+irm00MkZeG92zowi3NQSvdIT4FFnLX992hXzBpCjgjjKEMMoBT7EEtZ3v+KDJ4iUhQz7THFPDPXXlwcgmbVSuOBn2Lzri36UBk309VVb/n+4sCiOP/30/fdtf/x9/7fs89/tvLTs89fDD0d6t8cWRtp7dT+2aG9KLlEFL1Oj/Kw7+O1LA8UOo8whgCuAKSlrLUZpMVOJbzHsOzeh/7+lRfnY0JG9fQKBB2ENZUP7bkmxfSNptTrFEBF8cz6q/Y+nf/GjJzq5iu0wf2aBo26KXSsCneG4dUDGddQ6SAlyI7f5QBefwSbpZGwNMI8TSk7RS33IDZHCxaoaBzouuKF12EbjT+llOzBilWedMXdf7NoPYXyEO5/c+zbvbNFzH7Va79HxxZiu3vJ7vfpU3/2Z2hJUThJj+J0BLPByBnnOyvGJ42myqZENiEPmV5UXqXJEsgutWfYHIzaM3uP3jpkuhh/UYwNdHOCqBP7z5WX65kD0WXh2M8TDW7u50SPNKSmtciGxr2mJ99aMWebHFML4kl8r26FNXnJxWxP7OWGCM13Jn4im6g4Xi16Xfmib/foT/yh7gztnNHIUZxOD2k31WXkFapJhGcXOLo+DMLJc382yzdsfiZTMSq756fCFBNZfWx/p82NYyNq8IzqCFXKBk3DIahwpzWnpF+MGrHCeffIo8svrpjFrKyTzHsWbu64a5tIDLPtxJtF/71ewzbCL9pX3LVyisXcLPmQYA3f0412fGqbZiJlAdlF/B3y30HAzWzMlpEyQkZ8FENAvgtlN3XPgdcZNv09qAyXfmFrsfP7Pm5cQHsKEhn+5pod/LH0OcoiNZYd2efR0vxVaYHpwHmUCQIz9j6TazRChytqydjHWSTaQ0DlRFJaaVULmlKh4aK4iVizWjcY6d0XLywPYOSGPy+HI/vxugiXY3g/bCT85088YSidDG3SOEK1IvEVGfHMHXWI2tcuHSVGSEp6JtFLi9W/kmbxjiL+JCKVFSs2D2NSPl8xxbBqEkgPU8ofwZr5yiybNIUHcKiDQKxIjsaPGJcubSVOoG9s6f9uqWD7tAjwcS5SvTCVwpHH4DSgkXOr7OvvWbnYE6uSht3Nuj/Uo6r93XbPdCyJDEMIoQEANvVfCB4kPZj4jXoBZLxPsIi5K17wkEdo3BEZD12WRbRnUvNr7dbmFWpbXpppubwvEYdTWvKw8G/IAjNHJ5uQYN4i0kB/fOWR+14D6aiyuJvspP5cAGKft82L/Hj3xtnnP7v69+57f1D74uzzf97w1342MPPxCVBsD7qeRexZx6RZq+dPShGYpJFEwNjvpAcun3LTS84RgNzTQ23kagdHCQWdkYPzpcKdS6OCil7r3nftPRM4ILyaprsKSKFNpOydv8dwsxpN8zA4bzRkFCRZRfZFcQKOgE7BsJANzYcIXV/1i0KPmH27QvDpKDbw1Mga02JDMZq4Jv11v2erzxGNwG9p6m4Go/3GmmeCG+MH1Auuwpr7p/VPzj4v3/WC88/37p59ftL2uLw3V01J/PqFWanK1syu0UNpmltGFEfXiTLPVclpNdqm/ceRdHw+xT0a9pqOdT6EEKbAOs4jGadzUgAyT0QNaaSLMeHOiMyhazqKyHReGOdH10w3jKRiMCBdOQZOJIE5HabLHFYmK6v4rw5tj7Ed0fWqNzo2cnZtQTT1f9X8iV2Dnvh86Mv2Dqcm1OnU62gIq+IKvN/DoSn371//wn3vac90qvaWrEMXP+uKw4vvEvujUZxqwdZpK+sFJ7MovS2GydOtGgX5x0YcwwPbYiBePwFjmSoFoHRXpeciHBinWfBtDO0EbK34SfgJqjO0HPz316zc/G7NEzn9+sTSXZcqttH+t4ffdt/7n/sGaP6X7/zYXfu9qvHwLGdtw2RFSP961zb8StULZlZ0rQoIbKdl1nWmDM9AABf9AbgXpJTZYWc0YEBjggJIS0Zdustfy6fgVpQtdZaSaw8hhHEtXWgXEf0pHYA4S4XWHIzBhfPLvMs+aOiMCa3SYui9/qWEdOHdtq+nszXzLC1IpQnTTMVDu9bxwUunxKseHpPa0ysETzw4Xj2ffySEELLAp21Ky5abNVMEu/JyPWi/i6j9/2XHc8gfonRtpeLP1ROUs19ctrPfHnhcwwmrGo+87GLKRDGM7KVFg1AZq9lqQp0Mci4dvod7iGFa2pu/c8DRB/7PtSOKOESJKirIGE1VUeey8qtzQ8hZWyAClTPB36asOxVZmtaCIgRfmaxje8/2N/Exkw1//09Ktoc7Uhp5iIPGfV9f8BvnCN971PGWF7sStAWr2pmyXN4mdV9C/ytoXPqk4x2JHz82TF2j4Z3yAZxyB9cSpUSevWzRK4MhzkS+hUoyceqIr/rKeHg4X9VdLR87H/iVYFPGUFJCspLyoScSYqCFqECsH+5ZXv/DZQE1FpFv5OkTnovtvp2+f/P8XXftaN1OcBmhjq2BF+Dvr1v12JctvwnzsFpp4ITgiZ0WUIY3lfLyTCed1ZcdnzVvTgHE7yUqKPA9NggNwTMqxzzMWYTILteHgpbGorQdh8t2aCrP0hvhva5Bj5DKLcGwi1fUNXEVDIIXpFLkHCq4kvgPTWNy8Lc1pcW11C7fjk9FnpF/l0GVk3LVsDtz98BjAQ4umNDWCA974v2yZQqiLVQQywB5Xq360CbD7ZQZR1te01ZXbVKm0hDYpVY06knunYjMS+vSHUKQykgYzzFWbSnnnofBhqk08BIRnojs8O0j/D46uXZ+pC3bU2MlnVeK54eyL1bSrE4bDS/XqDT49iUkutQ9ex/N27RrAFO7PaDf1TBqITx2p+6rHxkZGi0IHvXIfptn55c7nsH8m+vWO0YdldUl2/gkQAwhhCn0dLVi76YNVBeOsDE2/KZYOEC/rxUwTz/x70KHTCEDsREvS8fn9jW/gZhDo7xKCEfsi0RulkYOLLj9rpcMBFVpE8LNPbNAt4+9MHtzzSTH7bp9/qjxpfter2absiWJ2wNYv5+0TCt0xn4RL5RNcJYE2d7Fpp8K4JjslOwV1Or65zhtwJM+8ofIKSuJ3FQR9IqVSjsiu0S5JxV0ulHTvG/P2LrppV3x2N5zXPcHhXwtxPOc3FYNMQcD88bGk2oIUmklmmqif8TJjfTSVw4FV7JKhEzbIXgjpH+RkQOJ0sKo0fYUrJTRc8sKtELb1k45uDiy6z4VfQAw5AcN76hcL1o66r9etohre+YF/097N88+/1/P3nPX3l81B+ThqVmjJ03vLQ8fmsw4FZwAowRalcNrDsAcIV5tPPb3aL0B4wCyMGZ8zqPB07lq78FeV5q2imHSOAbS04w6ZIC01ak4haXn6A+1IqlLsP06gLhE3WiDaJ8tPrM6nUzhHAPSsFz0UZAb6IfxWLykz0/MEy/Bub5c9l1YWd318b5H7FLXXK57L2kVOmqAcnYFLf/t0xv2N1JARFJFzWSwz1bn1yY0ym/7559eTC+JnwECQzborPi+WajfKMZQRtTgOY1gPuitxLhTCIbsXPMHftJECgfU3LeXPEiL4bePn/sFvrpmP6AETWzO9q9/9c2zzxvf8hvhesFCH6s5r+1p8KyiDPayVKQ86YLNVdgvCZqczKTCCqf5QtXuf3TiJVuhYvMzFOI+ejO5tkTHUrphJDwbnN+R8JGwSoJVWsoh07mUHvNdmKDiLyvWeg04kyn2yPxRjrjqD61e4khj0Q1BCO4UagDPcf1j+7zzHXE4kBZLRHhA3b7QsjOR6IyC0H7FZ4N9ykHzpFiX3qV0aeMCtUfSMBYl7DRURbgAACAASURBVI/63iLkuWUaubngpV4FC/DPLn/mrtFDZmntRFqvVN80WXBy7DX07ATd5OU1GfVkKXoiDYLXHi6lR2bZ2Dcha13X6Pk7FEzB0XBTLCCNwam8hmNwF33IezI6MBMCVjab1GphrldaFFWv6UzH2H1ZSXx7xfTXWsmTmrG/Yy3nBcifrBgf3C7y4I963jDqAdD8RsNj4/5uy1JOx5IN6cNJZwf2seiketUmgQDsEELYQWuMkpSz90BKuHDdJmgsFc3k4TmV80hc2ynsCjqXIXgnoCC8cbERz37hkGtKKwOhRH6JyaLk5JBnLVT9Aq/V7KkXUfW01fWRmu9v2Eb4T9//pbvG0r69if+7f7Nn/Dp/8rbd4wf7vrXEQrh99vlixRs871bNU6yjBCMvLv3VkoXUFUj2Hx4auPKja4/dtR/82ipPbr1txhtDgiGEMBqjSktbdESah7IyxIVolU0Ze5f9cELwuevyHgyvRnoVlXp3gxX7bsKgQlSn0EJFzfzJdid8ncKSKE4PBHEz4VVxRpN4ikyZHb4NyoJt97VwgpTyglC8z45trmmr6C5hI8ZEM0zanaJkBxsQSni3TNVHNrP4u1LZP2MDJa3HI2/cf47Q+88rht9bK3lv8y7Cl0o8yIKF7ROLf19Y9tKRDsegLPxfoNxXNtoxogs0OFWBsrP8/8fem8ZalmV3XvvceXzzi3mOzMihstJZWVWuctldVYaCLmObRlbjbrkFNEJCQmIQuEGAkKARfEBMLUstIbUELdE0ctONwcbGpnFDeSi7qlyuIefMyMyIjPnN787z5UNGvfVb//POqchWknkdddaXOC/Oueeeu8/ea6/hv/6r44mi3ZpjZCHWRoSqZvFw/N4g496pgHw4+gRzhxBC6zKcNjEm2Pl8tIm+gcJJRIOnsuUntCNsJMY8rcWFVkZiemsas4o0Vg2GTF+AP6RHGQgr7O8ePHt0TM6pccX/lu8eGBjvWy9fcee+8qKh399t+9AsK65yWOB96WzwzAlLk7136PeyiyftZSg25+SG7Z1cV9sHPnw872Mvk8BBbssW0AwBkdww2ZlWzGmapBo8BKseXhFEPPhDiGyft/3LiaBJh7Ixb6FE+z4iJE9du+Ou+43bZriUCs+4c39u8+2j42tVvyv8S6eNNZklgN/oPOGue6NlKMzdoU+j/FbHMD09WMgXmh4z8OVVQ7o3xEU79YxNhNekb9BzT9rEZiliSfqnHO7apFHQMkv5lCU5iX1UOw0z3Nw96d8Tva/hsj2jco5EqNbIK3Ed2CPLB1rxZ8fTanI39kUQjmc8VWVCI0cNUt9fzn+OCpjjHuMmQWVCfsdblyzh1BQLhU1HNUrkutZL6oD9uWjkaMn3E6dNcV4UjA0J2J6u+vBS7YSdq2CXmcnu9O3upaPjd7veC/6rZ75+dHz/hOEQ/uTwUkgS4hpCCOGVvuGH5koFsWlrfNax8dcWPExPCW+i60FIo0EBmjSQq/cWb004wwbHq2/630EDnlHdELzeiq0rTiuuJdlw5w1Uw07Uo7NDOi0akWNkMy1NrQ17L68Ir8NDuVj1//8AHBLLkqf58aZVJ58qmnX4loQ3ri3Zump80jsS39s2PM6ZpnfeDxHxYUprreGfg8SGl5b9un11257lwmoyGzRZmMnrE0IIA1RKKulhh5sBoj8KgGeGSfGNafLIEZ7YKSh0pqYiYTJl3yft7cEKIpavN6T2fwfU8FtbHjX+uxNjXv5+zVdx/MyJl4+Or5Ysufy5hu+qzpCeeor3B7YCvn7dJtOJZ7y3SWt9KKvI8SYIidQBqtNYgZYXNmVXZSAveIIqOfU8clDATMHEPHqIYngcgymp1+VVu2awku5hG4rhsnARsUUFDKMPgr7/qIQg4LT+Y9ykNEWRFCUKIYTeaURu2LtH1iJxI1OJCLhyXcc34K9jKjQGRse1GqFix/HlFVOWT697tO0yPKaqTIiNYnLF4xiWXmluD8aS3hBCuADXn01+Qwhhe2Jun36OwvTZiar3Fm6jV5B6s6TRZ+WQGqbcyPueycK9X86DnnSX5vCMpAfeIgjnTv0eImar0kyWgGPRD649kTKxEyMK43sq2LU0orrZSdsxB2WQTe74L3NpN9GlaZ3q18GGfLNrL2ljxc+pP2lfOjpmViCEEL7SsOgM8WoXhHr6BixCFuaE4HvPjWQgV5FF4T53q+WjOMQPKZPzC6csGPHSlgebNSs2xkslU3oNwQhxLdWkE3wOgZRWywZ8eMHfo3rdFPEHaUGUDlpOASMzFD9fIROo5OQwsGVJaQ1bpjRKCMvXZDWcabSOPQ7Bl6Zu97y2+bu3rPx8E4CtL6973oGfWnrTnkNWym7N7vlPftFwAtq3iwbPVGK5VOhPNv2mcLZmlvzbbfNS9zp+F2PuundeGnO2kq0XRgLP/KFNrvs/7p+fFnRJ0pd9MCoTEV/Z89eV2naye1qmFpL2+YECcBnOZuj5z443qx4IDaM0UkLtjJ1P4CrRlBa94PGKRATQ0X2Mvlqxhof7yYYysfsx0j14V0+s2U69LqyvZEFXIH8pb1FVltKG4EGZO0Nbf2SYDSGEr6zYBvFjJ95z5+5PzZNmqvsXN7/prvv9tjlM6uyQSK0j1TZDpJjdRihjnNbk0PEgwZCJNSCF3aDVQYsgNNz6ABxrexni1dR4IEA4BszG9GblXyy137ABLl3xc6UPfFZpFxQZMpx1pNqUD4nG7LTpFzUJMp8/eevo+HzJGyv/4pI523szvybqaAz7qy0D5D9XueWuG2Bg8zLhClDQ2rqC8uvvGMj/qU2/J7FK+mrDpwKerfrsC6Uzxn6O714R4HYRz6hV1zxXr5syLK3437IzBh9cJ8V7F0k1eCr79uX9E+KVDzixUU4tDMHFsj3oyWVvjZ4+bfgYDtAFCQP+3Lq98PtCK3u9ZyG2N9vehSLYmWDh39vzjK1PNsxFOykkGAw7Mu+pynGG+2vonWWEq0IyweZvfPnNmg8LtKD4CVgNIYQcgV8adcHr2H7BjBxlOuV1fWH/jTHQPhSyJ4cQQn8DhF4yB5mqIoN3CNLbpoUFu7d4rSUm9ePp6zUaxemhm9QymJaV38VVROE16AbB96VYBk4/4hqUoIsbSwwvhb81CjUroBoDad6Doe9nRYK3sZSwXl0xRXpKQor//Xe+cHT81WfNqDkY+0H4X3c+fXT8ldVX3LkXKpY/+oOWrfdTS75i5IW6GUoPpAvrRsWcpLYoZq5/12pDezgxA6PdvQmQxavRdDN7nqUB5T8uoXHPlN5EdxdGDQXrxKxHGp4sQrVq6R3/TqrnTakNhQ/ONWPGO1HMFQlTC8IzRqegfN/fn8ZFLeUlDQOajErZeDmyv79UNyf8uwNPoMUimz3hOninY07zU00hB4OQ5+1eRzCzZwzvuqZcFpCmlLGxqWkI9ly3W35d0ZHY3/PP76I/Tbv/waG/roD0ZW7n0VMBj36lQjKYBx3YH8sn/Cy5uGJhu7Wy3+xpzdHgIelSCB6Y/IWaT0f9BYQBS4I9mWKlvA4Ay3f6l9x1RMT/3Ruf8feAJiKW4dkNP5l+fPnG0bHyipyAQv+/7nsMEtN3p+pmEJI4LYQQctfsXO6Of/mjDRvH4r4g4rFIafD3ZaMFNjsm3JQb9+y3DVak4ozNYAWnQ46eGBgQZe+kbO+fTCgx+xhlXkVPM/ReSitbVhI0RLVTN7ASIt7KktyDbaG4EdeIEf24hqe8l8T0c2VLAaB2rJiKCQzu6/fsQV684D3RL23aWj1T8uF7Vl+dLfj1/he/bMDOJmgo8qKEbqEE8W/c+6fcOfI5/dLaHx8d/077k+66T9VuHB1/f+xRxXfA3L5Z83ptG+1vIvRcUv6XfEoa2RmtKZWu3HNKhwsY9YRw3beW/bOyEjAnZVq9U7hWCQuZoSUdwzN+4rPJ9EhArpVz9v76bVOKrnlzCOHwWWQktgWPCr3lKsKC59B5qWcGytWK58lZxTyazqV0HvvVJ4og1cz56OXf2DGi3E/W/Jp7bilZkXO/5b78+bV33XXfR3ulizXv7dwLtiZ+75bvbbdaR48vOAurVR/hoZNfbfiXPYFj1GvZeyo3vKIkPQ2aIfxQSTV4+pv25Y3b/gUfXMMshIfZvuGtuTfP2lcMO34DyyP6w7b2/UuCXh+ZkfDGto/ifPKkxfr//PrL7txnqzeOjn+qYhPyixWf0uqj/u1fWP26O/ePuvbdtKY74hrsIN45Fs1GfEFBeIRev28RKjLCHrZ8XHeK8uKieOrj0zYZxsKZUka1Aj0xJTcjyaSW+VV3EcEDvqBxx4dk2+eR1hNvtvbArh1E/v3ynsT3RLPFC98Xd8nmav8/VYA1KxyTo8AxXEdSqfJUKlcc5kEYzEmI6ADGArzNMzIkjw8qnNAVRlv2JbqwaYqTTRNDCOGllqW0vj274M79M5svHR3fHPlBYEqYeLhzkh54oWzK/t8989v+u4emtP/BgTkxn67fcNd9v2/PNZF1e7Fhv20mA8RWOMMcNlAx5pNSlCGExE722qKDRk5KQebHJgwy7D0DigmJDB4+Zb+rcVMrTe24JrqJmF0S321t+b2mDY6zWJPmN00H56rotH1PoqOYAloAQuN171M+k/Fa13LTbHh7Y+jndm9mTsBM0lHv4OWeR6XX/anfpv9c0/avB5LxIDbuhngqS5HtgZfQ8uL70j/yYGTzuZTz9x8hlP2p016xEZ/6jXcuHR1XBcpyoglC4Io/t4uKribaK3Xe81GoU9dsksykrD5NUg0ex38wFsAx88zInSoZ1Hj4aEGkKWrzWxK6Jmr8yxd8hIe4l//q1a+4c7/4xHeOjv/ppinYZ4t+o65iA74oWIOfa5oR1UOeZncmzLHob9IW8kKCK9sNH4a9tWS72s2OHZdlIhD2MtbYeC85h8mOv9UHyTXK5BJRPEe/ArrvLnLc58VwQe+r2pZofnpH0mWdc8ttAuPFM3g4niTyU6oaYp2GAjSlB68pLQo3y+ZtoXtYJ+26RNMQZeDGoj3MGF2KAf+oQ5TnByDm3Y79gG9NvFFDtmMlQbuHHPzpoo/w1NBL67f3LCIzrXvF9i42k59ECiAEbwxVIlvTX2s95a57CuCoWwP/omjk5GRzGpJGHwbnTJpKkqFCGWH5JyPmipniu1nEXlrOSIDNq8zSebTjGPl91BmDYykzpuExBKdLTqibC/h7cst/+QSsvaVt4K8U8J/C6eLSk7LHdgDuJZbtUlmsN0h77iOuDJiP8WW7U/9bSo6KxQ/WGJt2N0a8ZcL0sIKbu0hTt/J+MyDmtCskbwQ7P3PO1pWmtEYw4JStmfQVxMlFaz4SxNRXWYDPaZJqjdTvAVx0QWiksUE+c8lilW9v+/JQx4xc8Q/2NOr92eX1v7zwa+66/2H/80fHN3reah1CqTLaE0IIv3nLSspfWbYKq6sNPwnZJkLxQwx555G370oS+rtdr+zdM2ISnpDwyS0g+pnemqrVSh1Y8Qu9dBd03E1pxHiH0RP7f62SoNLSzY+qPkdjReyR6jY6otfUFWWS3p+hwcNGhIXu4mn3ItJYNBJjmxnJ6MQeZWpRPzdaBScRmlLe/wk/H9ZewVwUin0aK+0ruN9+8masvcC46bjmmCGEwj37QbVLNpGeXPHr6pNN8wBPF4XGoXbTzuW9Qh8i4voL6I83lhTATYwjI7EqX6hZP7yDut/hdrBhaFfql9CLj80WVRjxmktJOdsfxFjoWRQC1TgWA7YGzFdaB/KPSzifCYOKPStTTkv+N5LGQrG2xLUNUR1XetM7lqUXbY5pP0n3HvDV2rfMsTpLuxgiLRQo+9J9q1g6u2oXVoW743sVS0/lI7/9MjLUxv7y/7b93CYPD1NHIfh5WhHSwHsA77OKakuKfa6AzPBg5Mf45X37nTPx8tZB6/Dym5YePndRQGmQg12xihEw4b42WxWalqoZYv3DlN4uIqkGz2ANIFQJKpAgbaVkoadfePJ77jrmNsvSnIRN0VjJtJzzk+lfW/vG0fG2lDQfzOyFvCf5nK8VjfCvlELHTc6b+31vMbOXDydQSVblz63b71ZjaA8W+q/ffd6dY/XYu4f2/NqCgjipnOSdGXVQLAk3Wxo5uRReA41WuO9ibyZJswzhopTaUomV4h0RxFzbsXcxWP8A9YYfkTAqwkjYtOZ/b+UB22X4e/D3d05KL5lBAhBaXgoxPDGjiSzoqKaMUbAjjTw4oVE3O9aNunPJvvCz62aQfKLh8QMnUQ6+LgjQIiz4zswPUA7nCOSs5vy6+gQ6KOeiV925bwGnl5bS6mGtalsZssxqKnq6C1bZyaMZJLFS7AQHfOmd5CqttLX5cQl/B59VDRfHXSO/Y7Rs40un4v2b2uFk1xbd5KIf0DGMIUXhFw+OfzEaTWIETR0/Ns1VJ6D+lM3hu+g/dbnpHegeovMD2VSJVSV2dFVAenSM1ejYATiTrSRUWFHVGQpVCoycRtGvTfase+3A8wOxgOHZa1Y0cLelHdER0Rf6lUrTvq/Pn9bxe94IoPTKkoK+kiXV4GEZnoYnZ6umEW+0QBrY8K4iAbxavUTvite9LMyrOeRDVqRk6HzBIiZXJB5JL3KAiXF/zd+fk/Ctoa8Tfm9oEaVv75nVuiEkZaweO5Q46V2EVnSCNkDARtBdUcK1U3gUsZA3jJpS3xuEXKTceFffEwWOyM2kkrz5MfSuIEzy6UyqAlh8gMVc9tOOqZZxbQFdWAjH0G1Ys+QxU9AveW3YETiEEJo37LgLWqk0Lh8NyzvSwMrxxyH4bu/6jFFKFCq/ZgqXofzf3/UM5jQSzlV9hOebKONdlUoQdk9/gOOTUs31dNUMrE9L6e5fatrfe3UjJ/2NztPuOlZhnit50AnZ1B+II3TpKYsm33zVvF5taEmjUlO53FA5BJrmpKFUv6dW68cvSU07JwLsdTaI/gw4arE2HpjrxQNgEgWTNoVunYsDEuPseSilLW/VRCn9vvgceo4FJ90S2pqIk8/UaEsWJDMI63m73/2R58nZARXL1aZQOsBIv7EvgE7IcyfMUbm25iMwZ9C767eu+2baL54zQ2al7JXSNoytGqAhyrVz74H9ntyeN7bGJXtPBTRkrbzsr+ugH+N0mAzpUEnvlg6HR63dfMUGtj+2k3/n1c+66yYHtiv82LM33TmG0pjCKV70k/Pv3bTy03pJvEFMEm3URorvzy1bw1CSEIYQwgqat3y26hHr/Pvnl78TkqQ9t8nbLnhPcQOG2E8uveXO/f0H5n12BgjXCgPl+KQ941yaq5YRao3R0mNhciPsnvTKorYDYsDkdRJq91HFcOgVGqM/WnpOiVWhJHRSr2w/uuX+UQltavaRitalFQgaCGprBm4KSiLpGhRio1MguaucE1A0cVa9YXL0wRk54mnNaACLlji76Y2XH4iWwbKC5GrJV6ucQZuWV8fe2rpUOj4ErtxXN0eWc/iLr3zVnft3nv3do+NnyzZAX62/7q57a2wp5W90PQP7K21LgysNxa1tWySMSAjrRJgVjyeSDEEYt5n5F36r/obdo31u8RyCQg+G8xn7kfV3/Ptiewd1lpbeANWB9FycAWTsGMZ73oCMgPnIa8sWkmfCORkrxQeiddr2he9oKI1yiffyBH9CqonGwUqIyXtsIf10Vioch8v2O7eG3hDndz+94tcj4ROM8Ch/3bmafd9PXXrbnWOKbnfoP7eMNNnEVTf7Obu8CrqHgh+fGTIbM3Rf73zCR/Ny+Nys8+iZgFSDhwpXLdop+mE8c9EGdqfuN+PcSWxgklMkud6zJ+0e2qeKjc5UiOGh5RtCCLnINp2DkVmqpZz38i4CsX5CkrobiCAx7TaV3eP1vnl5qhxZ/bEuUSh6wc0q2EDHEgUZoJFh3b+M2n17FjV4mO7Kk51XwuldlIXGALh4bfOI1wmQk6g7welMAXyeFv0XUPlxYx83Fy+l5ajnqUOnfj6wPUDttj/XOwPF35HBhtAYGkg1FyM8eUktDtaPf8+6hmecG73k51AqjtM1e0m/sGkl5JvSeHczbx9ckTARU1qn8v5zbANTCsk4rvNF825zTwm+Z2jGEPl1nq/6El82JC1KDobRWI3Mbq7aGOwzFaZ6kilgGcc5VCXfZ38zuYJJjaZFEKrM4QZ0kXZLR0qrdsuvifYVApr852rvmYIYnIBzJNtCoZ+sw3hT1yA0L4YRnlFLzyvbwAEJkJ/zowWSyoOiV8h5TJDe3Cvht4aWImI6WI0m/l0reEfr+7cv2XUn/fOTb2f3vh2vn/Lrr42S3Y4wLdPgeSCGUh2pNqaxlGm51UNVozhak67pfAZV5gd+rGZgZNbq0zRJ75bOKi1xtlc2LSf/b5z+v4+OD6R6iXiWJWmD2zxjM68O1F5NlCO5d+5Lg9C78ND2BM1+E12Y3+3YsVJps4fVd6a+PQX7Z1FWKv63/OyJl469LgRvwLGhaQh+odDI6fX9987heWjJJT31kbDukoeFxpAqTqYv116THjjPgssFhSzjhp9ojbv2ue5pwSBBsVT3xOsB2Vd5zx6sdyq5OenHJY6sj9iNsSw6vNdYuw+MvUZ4RngPhI1UfYAkdC34EPOWyUTv0mKyhuvv4fnlHjTsFItBhcvj14dn3HXfgCGwK2CJU0CA/trtF/x3oxLnCnoUKSfIaQyekr1dqcmAPZS/v+Mj0Fx/WrBA1tq3tzxeYcIO0I7o0X9fE0FtjWwSJsA5oulL8rs5JvIFkc5lcFOhRdBU09MbNgE70naCbYcKt/2C6V0GfQkqUocntH8EjoWqgVGdCLxY5R3/HOQrnDb9xO+jaW6QFNlezxQDIzUXGj46c3/i9x4KMa2M/rwrhJ40OtS5ZuCgIcYQnev6mlmLQ3Gu2zByOtIKKYeoTl5wbbtIabUObTzyq/4ZT6/Yb7txzyPDa6u2r/b2bTHFTJqePXNBMV8p8shl6QparpdMwazAGroi7It57M7l4LUqAYnFSDQFhFUbVwoCAkPZ30DM+u26vbj7K8dTzYfgS2Rv9n0+h7wDLfITSHPPe6PkicxJ2REDyhk8oBtVo4Z/a4qEURwVhw3Az9ZuxV0YJO3zoozwU100RkLvBdTOFwTLwBLz/noyKHGwsXhGDoUBugkqTUoPpNM23pEal/XbiBzIz53A4Knfxbw5K5VSsLdTOz6nFLoxdambrIvqyf3vdW0i/W7ROjzTqQjBNwx9/dAr7S+dMP1xpu49zB4aP770wDB1fzLwlZCMspyWljNkT78E6uJrdR/m34fV8Wvv+IKCk0t2f22weOd1M4Dyrr9cMqHeynV/boA2DHxPsTYfNLKVImABhM9EWhJt/TClkdgVC5vOgxReOLVO3LMQejIFpUUE/oZ2GOvXR0DtwD8jU3cz+W15GEP73OyFU4kUCUWpBjjAi2b052TRe0W3R1i4KXu9RiVZ0k8ZCSv19QNbx02BkGx1TUFVhd6FRuvmuq0dJe3k+s7Juy6i63zE6kfZ80gKmbb/qaQaPGUap7LOtg7MY/u11qeOjp8oe4XCbsf6gpkWYrroQCI1/BwnjP6tfbD4OVrMyuzKHP+4IS00NDfzUEbiErOX1ixlFrLMPQQfAfv1io3jn77nWV8jgLnmstC9YlG+JDum59+SPjF814rNaZ/HQicniOiU1nnwJsiwFVv2Lqp3vDfeuWzvOwID68r3kjksFkHY0kE7YXMz02o2Th01hpZuANSId6TjyfdABtsQfOUJCSYHgs1y1T8yZYkLU0AzPbsnEEn5iSWf7yeb8l/e8IrzpYHN7y+teYXOyqmvbNoE1h51JPjcF6TvNkJl39o1Q6lS8DqIafbPnvHAZ24Y77T8IMzR4mCMtik54cQqw7jtnJFNkkYmHRrdl3BOyfwWQYix4ZqIGR1t0w+MBL1/E2DGmsnGCtfLZFkKO1hsIdQdVItzwDFG0pTS9e3SSlmkrWeyAW+9ajnn6ZLNqbakhNqgvmdEJ4QQvnl4+ej4/Ia96B3h2rmHIhht78Dq4W/f84SCV9ftngQ0d9sCHAYH3A2hmSF2pl3yYzzqHO+sbuV8gIFs2XOBAnRAJpmDzpjLnJiAl2fa+5AwPK4fkGA+pnfNGv3Vyov2kOLhUDkWJQRWEuVj1wmhFD7XlQjJiBiePZ9TXAcrJyNSKo6meuIHb/vQ7lkjKZJYxQoypqzVzDu8tePJzXhPotln25KzhFKpPPDfrbwdFGI/mJ8utr3yJRHYuKGAPzumwmEqKoQQSi1GccS7ANX/4VX/2whUbty2cWz9mI8KLIKguMg3TRSMDdNHxbbkqVHBNvK6wFXIOSZn8fqJm9AmjWTN5t6pbOn7z+K6gX9fNIC0LP1gcHw0dm/i118bDz0VJ+AWqh93pSqTxKMkM1NOE4oSAy5hI3hhzRwabahIAravvedBy+w3tFT1G8vZ0+Yh3LlhmwIbt4YgmC/Zg2mMjlJwOozMKjfMIggdn3mFf/jr3JwSp81JTVqg4JhYwIJUWE1W7XP5ooJh4Twsmc7VSPp0RG9EgMkEPmsUatkWPDu6v7bjddipihn3T0lH4K+uGSyCgQIl5mRU8u2OnxCslFqWfowvvWx0LHNQOkQyVoRT5Ev+XA5rYiaVqWwAzkifXufS/cpfBiM54ruQWxRrNj75pZRonkh6SgvfF+OMYBd0GC47e94araLj6UB/N8J2dQCbxnnv4uy27SVqznLwAMqy4RfK1h0zLpZPWoitLIZWbwTiPgn7jWA9slS8/0DA2afNuCqnGD8q7UNTuN1iMjNm4217Vfou2KspLyBYQhsm4fhITQghMNI/UR4nrnPeT/Y9AppjxIZ4LJILqhQPwMOw+eit3j4qKXaANcBUaYoxMVwG7kl6CjGCNi8kGxplVM51TwudwT07N2pK5ABjzwbjy297BThYsxc4FsOL70sjebvbG2gp6AAAIABJREFUdvE/rBkp2lDKudgzp1aUKgus4+t3/KZwcsOsuS3ok5l43K5So+UdoaUzdo+zy7bJNAr+OVhNMpcUAEP03VFKqnWSPO+5VvMyjoxmFtiMWZU70qj5xcPxhxyoMNLYx6m6Y4URpHEQQr4c8HEE19eELHPURQRJGnMOzgAAi/tp5JR/5yRtRTqQSVMi6TCiGHFo7XgnoHHR9Ns7Az/vafgz9ap0LvcGtv7ud/1++2DHzl085cOBpU1TBqM9AIf7flItbdhetrfrn3/aAuxCDFOmndg5QUvPZ4juae9H3mNOPj59T0vk8PuwUloutSFe/x5aQfRt8PJ3/W7MHHbsh8MbGK/ZDxhLiKr6tn1OGTpDM9mjqN6xn3eYsxenyPA8wp+NP/a7+PQcNpYqiAc3fZ5iAKu41/ZjcJCHUaZeD0r2Gn8Ej1j0K1qkhJGQZXHhx4wQelX47rKHQjnREmh2ByHIttjzGnz5Dfvg3vN+By2CNXleyMs5hEmvmiFZPlg8pmWWGfO5tW1D4y56h635ZcZ3Eov+4D4MaFTkfQ2Xjsd/hJCMv9l53k+Oyj7pAJKNsoG6V5A33jRUdHndr4mNpgHIrr992p174qrl4Z446wHGN3cs3D4bY64Iu21uAwZc01saA7R+eBMNTqd74i3UMHiCKxkDhzYTnADBrnXakbK+2V5M8SKM9JFKQANZU6jD4criYXgIfmdRg+oYZ1SnUKc0bojTic8x3dfzOHKnI7XTeYTBdyzoSrcyPP44BA8yL+/499BrwBAA9ida8fPyTw8slattFchr8/Vda8y5I52eSQNz+I7Hjs4Apn73tpZ24plZZCFztgMCR90rc2175vINcXA2gH9CNExQKCGHaK/ib2hUOuJzecYR+pZqo9g0iebzR7eOMskkk0wyySSTTP4syuKxWGWSSSaZZJJJJpl8yJIZPJlkkkkmmWSSyWMvmcGTSSaZZJJJJpk89pIZPJlkkkkmmWSSyWMvmcGTSSaZZJJJJpk89pIZPJlkkkkmmWSSyWMvmcGTSSaZZJJJJpk89pIZPJlkkkkmmWSSyWMvmcGTSSaZZJJJJpk89pIZPJlkkkkmmWSSyWMvmcGTSSaZZJJJJpk89pIZPJlkkkkmmWSSyWMvmcGTSSaZZJJJJpk89pIZPJlkkkkmmWSSyWMvmcGTSSaZZJJJJpk89pIZPP+YEkXR346i6D/7uJ8jk0wWRbI1kUkmXrI1sViSGTyZZJJJJplkksljL5nBk0kmmWSSSSaZPPbyI23wRFE0j6LoCfx9FH6MoujLURTdjqLol6Mo2oqi6F4URf9ywn2aURT9P1EU/Ur0vvztKIr+ZhRFvxlFUTuKom9EUXQV138hiqJvRVF0+PDfLzz8/5+OouglXPcPoyj6Fv7+/SiK/rmHxzeiKPprURR9/+F9fjWKosqHP0qZ/ChJtiYyycRLtiYeH/mRNngeQU6FEJZDCGdDCP9KCOFvRlG0yguiKFoPIfxuCOEP5/P5vzmfz+cPT/3lEMJfDyGshhCuhxD+84fXr4UQfjOE8CshhPUQwn8TQvjNh/f54xDCk1EUbURRVAwhPB9COPNwoVRDCJ8JIfw+vv4XQwhfDSFcfnjtX/1wf34mmcQkWxOZZOIlWxN/RiQzeNJlHEL4T+fz+Xg+n/9WCKETQngK58+EEL4WQvhf5vP5fySf/bX5fP7N+Xw+CSH8TyGEFx7+/8+GEN6az+f/43w+n8zn8/85hPB6COHn5/N5P4TwrRDCF0MInw4hfC+E8IchhJ8MIXz+4ed28R2/Mp/P787n870Qwm/gOzLJ5P8vydZEJpl4ydbEnxEpfNwPsOCy+3Ai/kB6IYQG/v7Z8P7k/u+O+ez9hM+dCSHclGtvhve9gxDeXxhfDiHcfni8H0L4Ughh+PDvtO84k/xTMsnkQ5FsTWSSiZdsTfwZkR/1CE8vhFDD36c+4Of/Vgjht0MIvxVFUf0RP3M3hHBR/u9CCOHOw+MfTOQvPjz+Wnh/In8pxCdyJpl82JKtiUwy8ZKticdEftQNnu+GEH4piqJ8FEVfDe9Plg8q/3oI4Y0Qwm88zJ/+MPmtEMK1KIp+KYqiQhRFfymE8GwI4f94eP7r4f1w6I+HEL45n89fCe9P/M+FEH7vH+P5Msnkg0i2JjLJxEu2Jh4T+VE3eP6tEMLPhxAOQgh/JYTwv33QGzwEn/2r4f3Q4v/+wxDwD3OrPxdC+OUQwm4I4d8LIfzcfD7feXi+G0L40xDCK/P5fPTwY38UQrg5n8+3PujzZZLJB5RsTWSSiZdsTTwmEhlYPJNMMskkk0wyyeTxlB/1CE8mmWSSSSaZZPIjIJnBk0kmmWSSSSaZPPaSGTyZZJJJJplkksljL5nBk0kmmWSSSSaZPPaSGTyZZJJJJplkksljL6lMy3/+M//JUQlX/4znS5rDVMqNrNJrWvE2VHVreHTcueAr8Qq92dFxfmj3GDXz7rrK/vjoeFz354rtqZ2Tz80KkR2X7Lh2f+Suy41muM4//zxvn5tW7P5T3C+EECrb9jvHzWLiPYL/WCi2jaBzuGKfi2a+eq50aNcVd7v+/kV7rs6VJXeudrd/dDxp2P1nef8ghR6IQiN/blq2MckPbaxK91vuOn6u/fSaO5Wb2O8p9KfuHJ+F9+9vltx1X/97vyyj99HLs//hf3v0QyZYEsW2v27ctOOld/y7bF+ynzGp+3OVB3ZujFc581Mq5PC6Ch1/boZhK+EVDVf9dXmbsiHyryRMQLOW9tvmeCO5sb9uDu0yLcm5vP3u/FDmWxVjYtMhlPf9dROwmej4TGv2wUKH89dfV8T49M76dzGt2N/Nt71eGG7YcaFnx+MUWrmiX7ZhuGL3r25BV8lv4bvgew8hhNf/43/7Y18Tn/8r//XRDyn07TfV7vbcdePl8tFxvu9/SGHXrp2u1ty50bINSO2lu0fHvec8YTB15mDdb23cX7gnDdfL7rrqPXuO4aany+mvm56t7PsFU+jY38PV5G014tQu+FeXG9tJ6svBmt/Xmu/Z83PvUumf9L+tdGBjPitCB9X8/asPBkfHww1/j2LL7sF9IYQQxs3jf3eh58cqwiMXul5pDDbt+zgepUN/3Qh7pY7jH/yDv5a4JlINnuGmTbxIqtdrN0wLDk4bi7Zu1AF/08AJwRsC/Q0b9MJANvst0xTTs013jgbKrOhfQOO6abPeJfvcpO5/dlSy5yrvDty5XMsMhsEl2zFmBT9JplXcM6XSPz/wYzBctRfHzYOKI4QQhmswhqZ+IdLAmpb9u56V7Dm5OIYn/A5UOjAjMJr5Zxwt2/fNMcaFmr9H74KNsf7OYtcWyqTmx5+/e7Rs54r95MW8CDIr2TsaN/24c1F3zss56Pq5LM0JCOmjR/z5c1nF/NxoGf8vRk1EHSKx3hx8gonQpOVtSQROxblfEs4A0nNhTGfEn5oXYAx17cEGJ/yaoNGUG/mBzA/oqPBz8i5goMzKsnCLNpCjVXGE8OcMv03fmY65uwfe2yzhOIQQpuVk43ARhO+5+a1bR8fdT51z1xU7NvGjqR/rqGuGxnzDW42V+6b/58t2rtjxm+CkbnqwsucHnnpxWrEBpqMaQgjzPPRb29+/gqHXDXhWsM9NarhHL3kRFzvyjAgW0Liay9qkscjnff858DvFKachxv18UvX34B6ujnFunGJgnbBrix27rthONmrUmWq+vHN03PqkeRWRfC/Xme41aZJq8HCSlPf9Qx98wjTp0js2WTvnxTpfEW0Gqd2xz7UvcyL7iTBet3vS8n3/b/xY4RTqXrENmM+vkYNSC9b0Kf/848u2A5UQTaInEEII/dPJz1jes91jWvaany+Ln9Mo1zwH4/Ckj5Qt/ckd++O50/65YNi4cRXdTkNmsObvXzqwseudsvtNL3njs9C1+2ukjH/nB/798rdx550XF0+50yAp9Oz5NMrC69Rj52IttiXSRmMC0zQnBkNp347V4KGBwggPjZ8QvBGikQ9GVsS+dn8zQtI/qWsT+mPP32OE6JVGhnJjbh4wagY6HxAZ9EHbMF62HxBNqIgTbxEzVvIHNrDjhv9tfG802PQeVOglCYhSv3KO6LuYlRdvHVAat6D/P21Gjl/XfqMervnIQf7lQzv3go/ccN+O5va58ZJfWFPoi4I4SzQ8KrftuyYaTcJz5SUyQf2mThsjFaW2fdek4seAkZXhkteRq6+bYVfo2qTa/jG/X+0+Z8qlKobduI61I/Om1EXUEw61BikYOGDEPYQQool937Thx6B8iPFB9Ge45p+f70KDD7OCKQYaZeqE0wgcN9SbSpYfYvAgelLwL6fYw4DB6Cgf+FAlLcJxwX8dQ4b8AfoCCocWdRmcabhzpa+/an985il3bobnd4aGbPa0yHWRcjPhZCp0JcKDVakhtjle+Ew2cSpIH6L319Xv2Bh0z3qDpAcjp9Dx4z9mFAa/TT3u4h3bQaeVDXeOqTyG1NWwo4eiBht/jxrP/ROmuFa+Z01+NS22CEKPnZvUzOtvH92Q/YpjqO+hfGDHXej9acmP9WiF4yn3x5xlpEYjBxWkWPon/Lk8Ap0agclhQ2ZaTw2vGRZaz9vhobKLuSgeLJ+TYzWTMZiVbfHMe2JgIypSRJRIf0vFHMrQPynPge9jNCmEEOZMl/N5xXgrYBwH6/4cDRtGzTT1yHsWvJ+1EMLoNvXntJJsqFVvi2t/wSZITqJidMZyiBRHon9oGTlHWKR/wSz/8gNJu50zJVw89FZ057zpXdXxK2/Yi6ZhVJA0ZucsIvqy9neet72tedteumZXuD+q0VTqAF4ijhaFUcliV40JvkNJWzEtqcYQ/mRkqLLjx3G0aouwLIGD0bqNPw2j0arf8/iMaVFUlVSDhy+kfMtr1dxpmzSMbhRbEmaEJayhp9KerfIIier+hlrP0FJiDA2++Imj48qWH7wZjJwSzk0rPjLB8ORcQnhTRCaY9tH0GQ093eBGS/Z7CjIGNATywELpJJ8Vk1N+umFQHOZm1zRsNParYXjZtLGGcmnQzsmILr+TEZnKnje8XPpSImz8Pa1P2HPogloE4cbkjOGGXIhXpNGNAexJ9eZ7MDy4kItdiQTRWNmUZ8Q9h7AZFf/BDb4qZPRjOL55n+V1URJiVhjxCkGMtLxOaGCVlpPPlYDbGWyKI9S2NaERHob6nTMlypGGnqbF+Hv0GWlIcr3ru+a7iRmOWGZ0dhjlCyGExi377oNrixftoZdOfIx674NN0x3DZZ+2Wn7DJpUaMtTBxR0btNF5H3qsbtlL0dQLjRVu8IPT/jkaN21hjVa8F0M9Vd73E25Wtt9a2ra9ZiIbNXWmztkKDCXugeOmXGc+YVh5w+95Oy/Y72nc9Qu+c4aGqf1/bcuPVQ4ObvM1P6H3XzTlVbsnY8C9DLpb98o+MEnl33nTnZv+zKfCcaK2A/+OZo++JlINHi7I7tPe669smxaMpnYbBVHloPTUKh5u2IRllEUnAqMzirFhlKh7we86eYCeRiegweeqvLChi8FDS3W8lBypSbpfCN7IYXorhBBGCMvSyNHNnimhkiy2/gkAvSqSMgNYj56ShgG50Kc1MYYAXJvA4i8fSjQPiko9A0osuoSo1ACYJs1BL4I4ACnWRyTGBH/jpKbnME4SdiawtWb4zDCQYJczqtXwTIKTacoGG+vIY92dQTVLATRruss9B6ZRrq8AR9xfsDP1m1hzXNLyOwlM1vQncUAlRJMm2sGI6ZKJGGzc78SL4fulgdK87Qd5d8V+t84RCqNEGsVpXbHvbt5IvsfHJdQJtT0YDMsr7jrq/6JiW4iJknVf/o1vHh3PP/e8/f++H9DSXUtVdZ7x4TR+H/VKUXQ18T2almekYrDhrdfyHiIys+T9hOt9JE5Sb9MmFfWA6kuOz8HTXrlMMb8Hq8mpHu41y9954M71r2Lsxn6MOeb5oVcM3bM2JtVtuy4n19Gxn3/mWXeOy8xFceRdcI+tXT8MjyqpBo/LWUp11HDVtMFoCedy3iqOUjZBosEJVuWAhOCNC7W6mafUqEgORsNwhRNZgVjUzOpJ24tjpKbxhrd8u9dsR8rLIuLYMZwXgn/B4xQjoeAsZm+QVHZtsWkol2CvIcBipZYA5mDkjCU3S+OW468TuXcGYLSOKBKMeelQqrQKxxtRml5cBCEA1qfp/HWa3qHQI9HKHW58nQv4jIZtMTS1e/4UozpMu8VD43aswGSmraZqsGGaegXlr8v3kt/fFNicaOyvG0Lfcrw1rVRsY97sulNhuAqsxCqrPcRwocEjYzxeQRWppMxcmgkRrwefU/D08d8Vgh/jtEqvpDmxKFLZBUYRBQ6DNb8Iqjuo8BFdl9+3AZid8ROu+89//uh46TVbaKOzftLmT5oVXRJnjFjGxnv2Xb0zcg/o2fKuD79GUxhNZb8POV0FhzoGzuYck4gfdQHxe1MF02MijWsyn3Hc30hef4wC7/7kKXeuumsP2XnO57q5pyoelV/u92z/Q6uI0qlRycBH/bYtHsWtzpp03iUHnCLpER55WZQpkN2V3WT0PUvFywc+VTKtHW/kqGJgZEJTZjynESQaGow+KPakgjxurusneedp077Me3Jhh+DxPbV3/S42LQE8LTljVo+xKmmqHqvLWcoiQpovli5ChKd2w1CTrKwLwZd+shwwhBC650wBUZGMl73hxWiSPmPjXdsV2k/4cEL9trnIvdM2sTWCtAhS6B+PPRmu6XU4FqCs29zEy6vet2OmnBQMy/RUGjYkLapA5VuQtBVL4hWHwOiMU9KCY6JnqhgkVoiVWhJVLfA6GMqS1uP3aaSJoGVWbI0kNUUDaCLPT0WUk6jzJKE6LSdVVCVgslSvMcVFz1xB7mn0AYsgXOtzUFPofCjv2KLoXBSrrmMTqX7Lv0yXCQDIuHndo8APnzGYRUkcrhGqKMdNG3h1rpkKo4ETgseRaPUVK6CmZVs8WklGY1YjNy79jI8pdm1STcavcE5VJL06RNEC9c7KdcF9Ys9W3F8OVb9DMWgbt22i9k7bg6gOGgeW9/vxKaPcv7htirN7zhs83B8VN5cmjwxaphUfgueMcZw8slky/8oS7BDEyCHquuatvtp1WwyK7iemR8vrVr5lobrRebMCSze8O9h60SzcQt97F7RoC1jYytHgRHhs6Fn3z/sdzoVXkdqZ5RUUjZSW8tjgnG6M1XdM4/auWog55u3j75jByXAzDTGt4k2pAhtt2LhqGidGZXD0vYsX4eHv4rioUcANTCpHfbWO/ETN1x99lyjHCAZKXhY8lSANAcULpWG/KIr9ocHGTU3n3qSa7DCxgivGoYN7eh4hMZp4D1XMIyrE5Goxl26Mzfv58deFEPKj479bx8AZZZpOmycc/xmTfMcmFte5QhOisU1MTfWEZZv4xITqtSzLnzbUQjVRxzja5CaO+0klFvEmxY5fMGlpescpR/yQ7gWkjJA0Miv6XBRVAfM4V9lPNoa6UijgDGd8TPnravdMuTDFF4IvMClLpJ7UKWM8R/2+VyDcp9P44IZnzEKr3fXvgpVfupelSarB4wfFX8ooCXE17St+Q6cB1HzJIyPbz1u4rIpBLlUF7HYCgLO2HzwaXiqtF8xFZo61+5wvx3CkeF1/f1qW9btau2tSOkSqo+qficROiu9heJXEgFrJ1Dtli3taFfJF8luI8RBtmXFHNdJ+1ockGLnJD2SHo42DCVp/2ed+979gZUUMR75/D/sc87sh+CoPVmhouHMRhGuCG7/iLtwilJ/BiIwaHa4KDlNA+SpYHTXWdBQ+l8anEyUYbyF4YHIav44rp5YxYEnuvJgM+lXyxSIiPvRE1dONcM9JinFMD1Mjcc44rPm16Qx/GZ/Rin2wtA/wtKgI/k6NQiXhdlipF4IHVus8WASZVRExQen5pOKjzf3zZtQoPo8R83FDCkLg9TOapOXO5YNp4rmNf2T8QMMnTf8P1pUzwg4VbEs9q/sO05M9GgUH3igYp/BsEeDujMWCGDxYj4dX1UrHsWLemL4lm4us79ZlTFSNSuLamRAirr5qkzOHqgcNRNAx1ihRhFhE5YalL3tX/cJNC5CkSXqVFn5sVVgzO5fszXXPg0NHyQXxLNMNIQ2Epd1HKiMvHAoMXxVnyRatehTOc8RzOMxRCKH2wD5Y6Pib5IfgfQC2RV8ieR+iSfLzF3r++Ycb+N3wNgp7wqZ81p4jRlYHg0rZL0fPXbRzjina36N4AC9NomhUMjxuv+Bzv8QFFQ695p99z+gDJj/74+7cFCFORtR0sS2E4PFK2Hw0NeVAy7HoA24n78FtmClGk6s8UsA0lTbvr1CAlHswopGXqqGkShMlBqQ3rmDhNK+shrTe4VMplYsEDqewNfsIpdwEBjwjOu//R3TcYQjBR6j4zmKRIAeKlnMJ9ASxKN/8+OsWRbQa6wei0X7qe+XJITZQ+WMmp+3+9bskZ/Hf56vlpLDjadNVdNoUz8kKLi1Mof6PcTZBbxWB91I8Jz+nET/nqDjSTp2XCcfBrxFNw1IYecxL1e/SGxZW3f6MB57XdpJzqozMcX+s7HnnfYTgSU5SipXvv3d03Pn8Zfv/Le9AT6uEPjx6njc9pYXJ0HrCa3QaCQSdKnZjis2YrL8h+Bxs53IT1wnwj9VWYlm7aEHMGgVwkaDo4TzxutFaWqoKh4pVglcyvSRcQSAsJHg6BK/AGnugPD/t451LrwF/o20+oEgUjOyYi/Ec9TsSZQGeqnLPg056F48ng9I05wSRp/aTEq+9ZsBD5WpixRjTWPWbkidaAOHvZ+WURkicQpR56VJCis1J4LjRtA9LU5VQ0DH/MmImq52RhFglGY61LH2M76PirOzIRsV7imImkZ9WRx184njSQMXHRDTYVPHjc1xjOgbOmF8WfGCLuDbh+aFRiXvGCBxd9ZF+N44xD3Iy3uRV0udfBKGT5eANsg8xuh1JKoPGSvO21yvtcyA7rSZbfEOw9Y9W/HU0IMp7yaAPtkFQ0DWjNZW7PtTWuWqLYgRuHCW6pJHjGcBFiNkUI53rKsZhRUdLU6i8Pd4NgwYhhNC6BgySMP4zOtM+75XS5v/57tHx/pcvHR1rOyiOMVNkIYRw+MUrR8esCOufEtAy9rwPLcKz9Ipp1d4Vj4Tm5sYNd7isTMKsxvDnumdtYGl0rP3+LXfdwU8Ye2cMIwRLUqNQcxhisxJJo9xlbhGxV0gIyVww6qFEIHyKgb1JyCQtKQj4Zvm3ehCtp22sFGhHenQlJeRzjmBssQIvhBB6J23ire35caS1zhJ7ZTpluXwsBQplp7lwpj4c2HDxMlpOlm7Yezh8QjZjp1CEPwbgW01zJHnwsY2a01SmGwHTLlog1xEfM/CsEz5tpWBkvBdu8DEcTYqXWtkCSaV3Il1FlEszCd8GU1zKQeeq5JiG7Mt7csSGkn7Io7KzIuudnD28pYJIXWm7P8f3xDGdSLSQRvCjthv5KIVtEBp37KUPxejgxjeR6qL6vTHO+c85Y5CVtyll15pSZ88spsr1OtdaQiIfNI53P+33w8o+mJzBftzfkNQXozhiwHPOMqoa5WeJ11EHhSB6KBaVtGMaRuWWv/9gJTkTsH/N/mPlujccWz9h2QRCGGq3veNK7iMtIHJ4OFS7Nd7yed72Uzb+3EN/mKT30gJoSMu1ZwlzLa2CKDaR8Zz1bdssJ2ek7ASimyWV3lSQ//XbwMdg01YQJjWRhjH5d1qlF3PXPbFGSzCiYohyTAx6Ofo7XShUQr5uMcvz5w+Oj38qzQCNqINP+sXs+q6kAPdmeVsM2jCODUPJnB1CCHMAucs7pt3zNz1GaBGEEY0WmoDGqqFSQOBURFpy6sjvGL2XzdIR5skmy4jMcM0+qH2YeshIxrAnmKdn/ouvu3N3/v0vHB0zCqUzw1WQyBgwKjVXjAKzmgAcR1N/ExoCSsxI73aW4knzfTZqfhAO+gi9H3hVqW0ifiDqcfPdx8gRmYIBrYGyXn8QUObHLZV3zUnef8qjZklVoZg3RlNUv7mqJIzFqOkH2zXclKaXJKpjBmFS9Ts6nVWt+qVzHWu2C6etfACnsKa/hfdLTlWxSjKSVCujvQe+uYBzLEr7uk9g/8L67m0kG5ix/ns4d3hZ6FHwuxmYUF43Zo40fUnjiwZy51PJzajZGuqHSarB008BX/VPIK96D6BZBUZigGKke0hPMeWU0814SIPE37+Cmn5Fm7OSiqmeGLkgjIS5ANU46R1gTlJTAc9fu+839MIBOpbXvTtL3oEBGhTqS2QoMcZdQES8sF8St8PJFeM6Yl5badP/FJG+J1YSr2M+VtOSHeCCipKWpFHMsvRic/FIR2gs0/vRDZAVGMpAzDlckI3adRJGOFznPSNBwqPpgLmu35dsMgyvq1HG3/beX/+CP8cqKnB+adVJLiUK5QDIGpaHEcjxqO76h+yfRqpbS7kRyWEPrnhKy47rJb92DuZmiGuEatzAuMKB1Q2CY66pQaYm2E6iKHOJWDG2G1kUWf+m6YfJCZsEscpCDGHtjgeGMWWhBk/58PjocIz3ibxGEoFntKn5tr2w9hPeSc7juxrv+RfRB7Ft466fK72Tx/eM1ApKromROjtM33L+aoRHMT3uJJ73tDidYCZ3RQSxtZ8cJeL9m7e9l9c+d7w5Uez4xTkAGWdRMK0s1iFmqvZAG8Uifbn06Hne1CtpaKQ1s2TPKiWV44MpbwtDoewd0t/01zHsmO8J/gMEiMosSQ4a3oObagg+TVMSsqnhEgk3kCKTSAoNqqlUmfVOmzZTUDeta5/T9xOBbS30+V0+U4xFh7lB7jptksSA56g643tXcLkaORTXCFM8deKw+O6rbyxehMcR+aUQ97m0jzKHl46/LgQfKfLN85KfSTdZGi+uk/V7/iZ7n0DUM8ZubsdaHcUyb9eHO/ATAAAgAElEQVRqY12MaEQvtW2Di9bEADg4nCQrX38Pf47gZBqViqOhcXWq7nEZLfCutASVXtw7nqZfDVNf4Sjn+H5das1fxojPIqa0Rqcsb0qnM432YCgRGEfAKlERz7lCFmN/z0el1vAn/J+MBBFGEILXi1pOTQ4nOsbaz2riaCLUESLQHr9ZIjxzzPu0KJFGRGfV49PDo4aMNwHNohfKqEbunBaME4zF4TJtAm+seM4i/92DdZv84zojcf67lm6A2TqFL1Dlkbulq8V84mtWYr7zk1bmp9USXMgk5wvBW7sErmrfDJYY9p/0O0t9C+XUmi7CODDNpJTkg3UbhuETHr1Jj5ALcfPrO+66wXlbHFop5fqKaFSE5G8AiGkUqnXVnkuVHq3fePoEyh7RK91jyNas6bSDZ+23Lb1jO9xIDFgHXhRlxHy4VgXQkufzt19cPHc2sVRZxpPvVSMfsWogCt7XxPWp8pdRKcUqg+jpIvqw94ykSVmSnVIxotYEo1nEm8TSOcC9xDYgeK3aCyePZp+MbgzXkw0jjZQNT9oPyPdtnmqUhfN0qeijDqt1G/S2lE45Tx1pw0JbNzE7rt/x303dQlzGRJmW8YylR2fR/8iELQWWrtuYHV722wvBpap/WATTOynONYG4iGZv/qnHhhw+CR2pdgD06eFTNmkrkrkgzrHxnp8PpEfRcuq1V+1ZDq7ZC1QOmtYVmxCjpijyGXU15pToizFSwGzDEkII3YtIW4mTwagOo0S6/ogti/XfWwPW9g2/4TLFxc+x9VEIISxdt9C18vywZQeNytoD/55aF+27dIzTJD3CgxSF4gRG5yy1sfp3rNdJ75/9jLuu+SrKSSRq0XreiAdGqHKKs8ra55beUwJENq6TLtybx1dmKX6Fhp3mFFmGx5TTgy95lCfHavVVKeG/iK7wSrhFr4RVJyljMJEuxMRGPSppoE7k+iuGsO9+7rK/B+6ZVIIagu/HpUyqZE3unRKwcxu5fRh9WgW2CBLHf70viuEhGFlxW+U9G9DuBcGCwZhwRqPu9dPkcxQXLUgJT8fu4YC+elM7JHOsKtjqA/u7/aRXIEVw18S6oBOXh6ihtqpw3cbFkBmtEfjMOL+mYe3v1/c9P9d+BxuoeMvEXjFlpuuWTNRqmDqmZUb9xFmgl/2oZJEfpVA/dM8nV7k6p1kI+TowmtJ+I79r95Nex0RuXmorEGQrsNccXvL6bOP7Ntnbl/0LcxWwomj3nrVnKWBecmMOQTinUlJTjDzmcsl9x9pPKZiIwLBkB4HXqU6jka6gZc7N/Wv+t5WBFyWz9VT2qwMYnLqX1bbBtIx0V/ucFPvguz404sEkJuQQfM4y9xfMyNENvY0mbnqOBgRTOM1bfocgoE1TMXwBGtryBGn23ST7CyGE4ZK4VBBGmwoD0l9q/hXfJQ08V75lxCKHL3ruGlYrEI/UOZ9MqjXPKQvz8UDX95+LQGs8o4RaD376qj2HdDr3v9sOGZoMIYThio2jlv7TyCx2/TkqIEarumeSmVQ/LnG9l/AzYpVMTCuJviLGRtNR3LgJ7I31usImqG0hSAA4S0lNcRPX53AEiCml1ozAaCSLlV/aL4u/M8ZATCZZMCN3LqhmA/ZO0xv8PqYeheSwfhuh/algAHvJpKbckApM8SlOhwahGL58Zho1sV5a1HGLtyTC0ruI+qJv0jzymwY7qbcvVuUcqlWl+pOGYRnzVPEfLhUmBhUjzLkxnVj/zknw2nzX7xOk7og5D/itXEu5cbKBHXLJBTguNRWLjuIWWulFVS1GepJloLg21/9N8XW4trYtzhobVRNjKI9Bmpn6fWktcds8vv0XzXZgKi0Ez5eUVkijkmrwdE+wF5U/xx9UwWSayY5LJP3Km0Kml6vh2P7/4Krf7Eso+S5K9Q/L4yfSAXy0BGwLFsNwSfpIsTeQeLN7TyF0tgXPUyzT8pa9gP0nvQYvnzIjZyycB6NlWMIlW2wsbQzBeyxKtESDYSQspaw6YFM4cieF4Mdf+SeoLDrnbDxqD/ykoMGp3dhplMVYfaG4CJT/ILnZj0qYeqjdtuO+Dw44jpjxkv8d1bs0NERh8U/qb3XyyCsijUt7yASyqabybbjnVYMH99cUi+ejsmPteh5pihky3ABRW1sUP0Lv7StIfcnmQcMo1lG6bg9W3rU5pf2duuewPmRDOHXC8kf32j6iy/fG0vn8MDltqAzK7G002IABJVEo/rbRyuKtidYVM15YXq4EigM0L1acIKPxsXSU+4NRBJ03OJadjVW6sbQ/hLiR9iWtwIFhO0w2thh51Pk2BY5mVvOLzusCwEnyWipFT0vmSnl2/HV6LcZRGx3PCPWRc2tvmc7ffdqfLCIC6zBNoruI9Tm8LI1Fm4Z3df3PRAfNYSySCuGHSarBw41IDR4NRf1A1GJ2hsy15EgKhQZOCL6cUUmGaEBMyv7n0DOI4Xt4f1ZZxCIkdtw9Yd+lXa6HMFyUv0GNHHd/Zm2YRpAFxXAqyQRDkMoejayUafQxkpLcG02bvLYvgOWZm4zgkVgJodE8Khm16on9Yc+wheylBWEERin/XSNA8cKUZ4XCiAyNFW2JQKNjLJGVJNp4nbPE9wyl4fAQxYSx/lN4fmKLlE+n2EH0JDlYkprCcF3KVX8Tk6CkfiNycCXfn2m3rijfU0vmbSo5Ij11RyAnyr0CY3QkBkAZeJwJNlrdkJkuVazSIgg3xdzQ9FRaU0dtLcG0dhoPDz8XYztGZELPcY417oAvrK5Osv1dEz11eMUWpzYPLWIdsFp1ItF+zpu5NtkjPQMxNvLKGfGrSOXiAPpjrikt2kKYzzGWdabjxdY6uAqM010ZA0Rd2udBeCsRfRqfjbv+y7lvcP5ozzBmbNoXUpSLSKrBw2jByts+Vntw1aIYTeBqRk0f3RgiytJb8i+HFjm9H9bzhxBC91RyOoQW9PJNqeAiSC7FaqVR09uUPi4oD++v47cInrZ2j8aQkJRBg61+37t5t37GdhoaSmrYzcDeqZ46DQPtEky7g/cnGj4E79nM8/4cx5ypx6G06GD6TMeY3x1rwodXysjWQkZ4mCnBMKk3y02Q3brfvwmv0y+wQ0aNShId8JEVf86xrQKGoOkczqNYJRabNKaQI9JQUj4dDlZROqKPm5gPUmlCJ4DgyvodSZdfwh9qBySsdwWMM+12vuFpcd996ax9ruk1/xTRpvo79gWDTcUjJRsonDPckLUsnYakVs0sglA/HDyZjOFhlFrpMw4vg0tMonWu/9QkeTxJ/qdGNKMKDo4x0jlrhwMhDVx+x0I3CjlwqW7SnGjk0VEuiB6vYI51kV1J6fQ8uCTs4IgS5Tv+y924FpPHiuOtdBvUJ1OhR1l+h9g7fK9UNJMjayjOe3UH0VK8m94Jf4/6few1j27vpBs83KR6p9SFssNDGD/cOEPweX1tV0/LkhOjeyp5Q2SYKwRvWXdO+Z/jSmtxSvOBjP7EexsdH2pWb5bjodEwGgJ3f1pcaQijRARvhSDl4CklxEoLwJLAwycAnpYhXnkDFSkC1qMw2hZLrSHEXN3xg9ADb9NI0l2rb1gekazX02KK6/8xSVKVTJx40N6XRkho5IxWvYHavAHDlopBm16mAI5duTxaOBQ6yZtFXtIonhzRn3JzBwbErCpzFh6s3sNFKtQRxaZAvdC+pBikcOx1KowEKaZiUrfBWi57p+7k09tHx/e3fP+Own17Of0zwPl1kglDCxJh48bisFuiW5iyVCzXIghLkJdugvpCqjgJHC7ueOxAAftLTMfDDuW8LwpDMMurtdyZ86N5yxZr64KmZVA0IbCCDlpcaHELoQM0rjRa5YpUlhUncvxxPhbKwrEYgPxrJvefM6rDirCUAIAKH0XJSjnm3JfVcS3QARQsLDNHfNeaoSEAfjpL1msqqQYPS/YUGERlnBa25QANtecPc64wNArSf8QBoOSJB3iO1BCqC9P5AerTC5a9nukWF7oVo4NNQdXo8wtAHizB22+d9z+0fp/4G3lGjGNaOFhxGhSWXMY5gI7/oBI9MtysTNRMU/bX/FxirpxG1CJWpMTwLA+luu3/HgKTMe0lK1+N/vRATkuOG1dpFDyeJQaGRaSF16kn5EDsKW0hYlEFBwKGglI/hX2V5BkZbVJG2AkwT66XltzDRdF6/ssZGucYqzIfr9t8e7IpLxGyVfSKbbRpNyq0kqsk+ac2mHWGKeeVGICMoqWt4Y9L+Ez9DbTj0N0Fg7H3Kb8ZFF1rHj+I1IvcZIcr/p3TOdUswRDOGA0vTdmU2sAISZsk7ofkkAvBb9TEd+oYOMI/wXvlGqAXaQPTlBK91NDmvAC244IYSgPwqeWPdypCkKizzEUGLYZimNPhZTRJ05euj5dwLlX27Jn768ffLwRvZOs7TJN0pmW81LTmiFSktQd+hA6etuPqPf/jiClwYWd5wSNiBsQYoqGkxpDDjYADoyAN0YYrdmFVuO6YVnD4FXkBvmt7MnBblTbD2iUYjpUDfw+GxlXpcUINBc9Br9Kh+6UnGSdXrEwxoTmgpta40FsXfKiMwLKSel/8E95A/W3J4yyYcJwUAzMroYfZql88pQPmxZKNoTTDiBEBpof0Wq4rxRlx7sXIC5OhM6EAA6h2z44PpSMMjZq00tHhplfMBYCYXU+yJX8dozWaTqOhRMNoJnsAgaJnSx79vVs276RYlJQWUgLkt9L3lEY1Qd1FByoG2j0ey7owsvYaosMXwZgsOM/eRrKOWbphlkzrouZ5ISm/P42qIallgUb7faNZ/2Xdk+Brk82+eev4dhIFqSQjhkdbnky73EiBhUpZPJV7fiPqA4RfuOXHcXyGpYDHR1Lef2Y71qpDBi10r2dqs33B3nXzZX+T3efMwV2+IZx4wABzfSj+kJE4paNJk0dmWo4xmbJhKAwGjfAQlT5a8TeZwGvNp3BZUGnHgJEcCFHa7OpMtlK34QTZPMQLI3aC+f6ZKFimoxp3/T04duotT/BbXYmvRIkYhtVOwO6rZOx6WKRplrALT8q7rmzZALWu2CYQ6zqP99R8z09CRoOU24He3QAe4qSmSN2PX5he4O9gz6oQ/EaqzgLTEnPphFxEtMMTrklYGCkh3WRdqgdKVUHmScZVCLIBa4UYzmmqyj2HS+cIhgdRnNK+HyAaKK5yRYxD1xFdsDkOuI15qdGkUWQ/vCf57J2BKYO80PuHDsgyGVFLoxkQZ4SpTkfgKOvPjUcKhuXjkoNrNqHVSKA4h1QMnr1nGGH255JwS6p/HJ9aLfk5GEXQyAGnkUbquaHPhWCS1/L5B7LnzYjTqfjJUqzYoMzu25hGYpFQZwwuej3Lvlvjk4LvSZo7mhInPlAruLDeu2c19YiUFgITrcvSGQDLbO8pwfBsH7+W1MCMpkyXp1QliKQ3ocDvGQmR09INm4WHV5DbTEnZxIwm5Ood0ZIQmE1WbCKUHyi7L24vi4gLJZeyodOIGq9I/xGEq93nUiJeNbE42+ftmQfiBY9WuPgAEr/uZ2EP+CT1jgjW007t1XswVsDWzHYOIfhu6cqEvP+0fY6kWlqJRT6Yzln/nhj1Ulr2wgBkUzB+NKS8CNI9Cw+qSyUnRnQvWeFyo+v7/oqetyUFoMp1Fkv9sevyINnwqhk9lCvPDiGEPBS6Yu96CWDqaCwhelYyicHmel1p1BOGAfE3ur5dVCcFB1Q8RARGaCeml+2mhwKQqRXQ7uamMC07QHYyaWoMlA4ZISpYMeL6WKSaRpoCyBdBaGi4fnpq/BB/I5EPGi+d034e0VBk6ngoRTAOdybjVLtn92/ctEmw+7zPUfO3nPgTH1YguWBlN7mDefMd+9z9n5AIwFgXK767hZRTMznC4+aY3G+OyHKkUU/cZ47Paaqbzm8ss4Ptsbrlz9GQ4T63+oZSEKAISTI2q6+a19++kkxQOADjc1FY6NPkkSM8Nelhxfp5pkc0tM93pXwkbjCRU5xKxUj5Pui4T/jnqN1kREC+mwRQ+BiR4CFIWbR2Y0/otKxVJ/ydNHBC8M1V++v+HJsQcmPRdBEbtWl32ylAfpG4h8MlDApOMZISgsft6HfTuPW9ctTLSW4tQeXHdiAheBT/FPdovitxzAWQMjYfcqKokc6NWiugRsg/p5UZMzqgfXfSwvdkRqaRoJ7ywIjOQ2Vb5g0iVlpl4dKajIiqp4i/1ZMmhmAmnmcJTNRUxgRgv/8FdqgGJsecUTmtpgvAOQzFnR3hofOnvaU03kahBqpZi+L5MwUvwTznaA2xQUy1nBiixtAiSFLfuDTuKBWS0E5FjzduwQmCrlPOtC6cB03TEPTaRpQ6RhUAA37rs8LkzBT2st8naCuPq7YoYr3PYIRoGjbXNEtj3k4pPeIUU8Au/owRDybkA9U4HCfQToQQQuMODNNzMteZCsO76ZzTxW+HxEyFEMLtf8Ks2xLWjtoVNVRpsd3FD5PU5cMNLYl3JwQfPtSXyHz8RHuHlBjzhoK6Ld1VTyd0VAwh9M6a5mc32BBCyONjLK/rnPWTtXMREZKxKnd49KjAGG5qNRSapEootHMGxGdKLw/Ssj6IHrUaaJ4DiE3C5nX0Gemc8WPAHPXSmzaD9j7pc3eOWVfu3wYex1VJSAl8qW0f7Nb81OKGlxv6z5FZlWX13fOPxtv0UQo9Eoe/ErjR4ERIFGcMpjSDJM+Pdkh2oFyplkjiB0qLDiixl1sHaXqTj6U9eYjRU/0Nw3wqToUr/ScwWSrJfC8q8WbxJ9PZsTYZkCWhrCbz8mQkSpuNGF2DUxksElDK2mdKv3RII8/fI41VexGEJHMN6IAQSS8trJeylDtT/yhhZfuCjcejVqwp9oSVQtzLdDxpUMZA8ohgKAUKK4RdhZiuHczLQs3fY7pvX1A+adaDprRy/WQSxRxK0edr3sOZT/A5rFXt8cYIrlJeMLJSvyt78SbsBeiT1Tf9deSz60hazFWx4ZSSqzom6hR8oEqqwUNPXzvYMp/Zwg+IkaxdMQ2Tu+3ju7lTpqknQJAPzwndNCzf0b6/R9TEtS2vlMbLiBrBg1UcUPWBTQRV/IyYjAGa1KoQt6F/gN4kfHGMCmiUhQCxqShE8iXpIl35voWN9l40TEx1z++0zsOSMCZB0fwtygg7ADhbCRAJ3tPGe4wukXOJUa1Fkf7m8f+vHgg34NJBMiNsWh8sestKVhhr98D7I0JAvhiNNDFVohiknMPHyHOR4I37mxhv9BRJ8BeCT1PESohhTPgqQ39dFcSMvXNifAMXNHMpPv+M86HNt4osnkoBqfSqYNIYoeomOyqcF3rORaQx/LFyX+KwRrqDfvzioifnjm8EHIJP7/U2/MmNl22y58beOu6dxP4ClaARfTojyh/jeilCt8aqbYdpetaO956UTuHga6PDG2vKu2RfOBUjOr9q5wYH9sHcsq82mAHiEQm2jFXe85g1hPQwzvV8tyO3btXQYKpKHfvld9Bt4Clbf92T/jrev7LrToXmHRDsYj/pnJGMB9ZS9wP0mE43eGgJS2UTQ4sObLuarMEnUlo7b4FqfAnhPHlR00ly2Wce1RLjTa+U6u8gFQYFq5sFae5nEnd2nm6Bi0ZaXABhT1KkEEIYrh6/YEPwhhO/K8bZAYNz9U2/SqdgrsyP/O7UvmbWnYvYC/+B64MiqSoaJCOmn2SjYimotrgodjGRhbDQhWHxsdL+ArKs8VkTeiGFEEL+UKwESFpJKDE3VOAjoXTIpaS0GHondkZbM8wc/XtySqh2R+eDHbNoQPFIvK53RtbViLrFf24EvcOojka5qAxUL7ByjWnDmEODMRnLy5jx/v5jLg1H3MRoyb93xxIsm58zEPH8jVv+Ola6akXeIghxlFy/sWgafiPb9IQQQusieXj8x3y/xOP/P4TgoqUKEKdeJIZQWfG1wpZC/ayRD0bS+4h0xN4557DgLae7dtONK+aolvN+06g0bAMb3FerD/eUvWBePb5qRdNW7plS8MC6lw1AN+IclRRGbOW927uGCjpEzWMEiNRdH1ZZOrlUYqXcqEqq30MaQgBn00P7RfXb/tzgBfvlNHKmO36WVE6bVp2LwpoOAObtSSO48zYSlbvHl7uFEMKskhCeDgKapHdVk7eI4Tl8QqJhMAjVW56umZE2Q+VHec9fSMt3sK4Ei0xv+PdExmZGauiJhRBC414yz4/LmyNSo8SDDOvWpc8WcVLaA4fhZk5eNvJbFHH4CmJZxJh3KULxyl0FV0rHZGf8SWWhw6JITJfXpvG7eDbUZINEU2HEVBw8mRwJYkQ01taHxylRzzQuJm46jZvJaWri7fQ5Rudswu1LzokpLXXC5nBwcoPkCCvHXwHTDvTu5r2/roh012ht8QweznUOoaacuGkdXJVoHcDIaaSuFN2opykUJQM6nXhHinmkk58X6g5iSgYyZ/soueeGHotCgd5gtuX3ueIZ+0EThNmv3/b58XkHEbC618EReXjEOJ7SAML9Y7hb3FIjoow+Es8Tgjd86bjpe6JzmFa56KqiZbyX3j3+fj9MfgiGB18uXCJU9q4csKHodaQrnvGhlQg5xRwUf+2sD+EN+vZrC8KHMUJYcL4sEZ5X2IzT/r912V0Wcl11FXjSDskky1B4CH6BKXp97TV7I3e+5N9c7tAmLw0vtZ7ZyFUrxJJo9FX4PpXnh0Do+pYw5oJRroyeXkqOSEUyWPPjU79rJ5ff9T/u4Co6FN+xc53Ti4fQrG7ZXB+sY87e9Yqz6zp7C76E0QGNWGJTTKNMZyg+LwZVksemjScJsNUIjyPEFGVDI8dFmsR4q9+0OdA/IRt1CqfVbMnmQPVNGwQFBHfPhURhQ9LeebCD7yZbUApavtcxbTy96T2tGnoY9S6gokbSFLk0DzkhZamYpgrmXE7blCyAMCJDg1LxMZzPWvnngM8SgWeEgN91+o98ic+dLwMsLF4/ezFxv4o1peTwChCeTn9RWMs5ddKIIonHyZ/0FvBoD6X50C3PXfQ8J6/ehUUsCmQGZ2rSE+gAa+5xrK1MuB4V5M8U11BK7gdkEWHE8p7X97vPJONRWX6exFMVgg+4fGitJZynpVkIItaXqABFsSFlM5McfHnTNDPDx4Oe9HICP8F04hVWnucGApTFbXwUwT8HK2AU58Dy3NEyOzdrlALfKzqVEZlYWm8DqxvI/NpWsien3A4VIN3rt72W2X/a3KyV6zZrClp1h7YTvU3/slfetIU5XLNB1dJSUrFrSSqpC9SYIylYbgTPvLd43qzbE/ETtYEnFYqmCFnarx6mq8zCIh8IdsgpAxlPTX/ZZ/x4uuo42WQLPZ7z9+GGlIYl4nPomiNn1jxWUoOIMbA5ioVyhIJiWJD/Kw+QZ6wrfMte6OHYhyRK6FI9WdaIJbA/IH/TVIfToaJtqUMdBlAM2NQ00YKJM47llevGSknD5jDFx4jA/c97YBv3KOWHIt6ERo4S2jFCpcYQdZquRzq5hRSnhQZPTrijiiu2mOoVW/y9iVT91pDSkq68EXTN/ECc6zX73Az7lxbSMCORF4wN56mC8CmufY5E9In50giPS4Vxfcj6ZnWXgvzTJNXg4U27AlBykwsPU97xP274DN5+K9ndKRfZWMufy+dsFKp14bjp2MgWtgTsdsHuyXLq1Vf9F+x8mpgKCdOhFJZlfqqjXd8uWWwjV67tI2BLy2b0HR6Yebt/zY/32us0BOT+KG0ffsIrbeaWu6cRTSr4d0GcjqYvu+dsjJ1i1ioGpK00b+t6sEienGFkdtKdLF5Gy2HUHO2BjEURm7OWU3MDU0NjDOeBpJRjLcmGKMkaDRISFsaqGfCxGPcVPqcdmQk6ZMqGFZkh+PSfRmeIm4ul9RBtmpeBj9nw1zXfAmmgYISKrJpEdah6rEzlXantuHPX9y2mHnNwSC532iZCeUtauxBPJfO5yDmDzS9WYp+AkVkUOfUN25x3P2F6RSODjlxRCf9o8EiT2/ZFGw9GGDQS5MC2StJOUDib08p4soO9GjxcL2UxBEoALRMCoA7BaGxb7rjr96tCzSZEp2cv/fKKD4e10E+pfej1/dIK9pO6v/+M0UfAP9JY1mNtjGDIxD6HaznvdT9x3diVqoCQWRijmuZkI++0Pnoqj4zhifWgwQ9fAq329o8JUIqeXUl2QYgL9YlR0HtgM3m26Xf7UskebHjWRzfIbzeFl9e6Is+IHiZRW6NEpGxNxl6svmZ/x/gJhskGVZ/J4Ib9ltx9ry3YrE65C/jyY0yv+G7iebQxHim99RknwP4w6hLvv8RFL2PMFIZmT3BuBMO02E/e5D8u4bM3bwITJY0ty+CSUYZjeoB6jhspS93TmGM1z861SgUV4+Vg2kqZnLHpxrhrEsC25R0JcZ9EGkGjREjNsJ9VCCHkEXWZNnBOnp/hb2Ug5ntiekvTfRN4mEXhY9iom8bdKnigA4HKhX1UJskGOkspnfZFBNgkNXwPI7vUSp4HH5fsfBI4TVSTMvUZgn8nigubwzCoyDzi5sn1QTqOEEKYlUAjIPfnmDJ6rl24uZYqe5L2x7VaVs9zvj2Fv44Ziqgo8A8M0FrT5t63r1901507Y9bWXIymOVI9ClqOUAZPLOysINXNm3B2hJ+LS1ANzoCxI69U74S/v2sZIePosYl2GFtXuEcauadKqsFDThf12FlB0j2ZAtob48f2/VOPyrB2cZ2CueoXLMbWPRRAcwNlfj3pzot0l+u7oxZh155Dw+YD9DRhWD6SydS6ApCW8uUxdSoN40Yd4JO27fljuU2A7rSbcFoTSEZ/2PBVWZJJBqiAZheRgPGjljtxXmqUNe6CvLDuX0Bvk64BjKbG4rmzjJJ0ztuxluj3Tyd30Cb2J074Z8fOMBTbr460vr5zpzSIzZEQDyMw2p7CNfdUwDSiTcQutZ8Ubqpde8+jNXUHsZZkDy+fN+XC9LZy7Yyh3MbGshUAACAASURBVBWD5PizugRoSrQNIM+7A+9GTpCbLq96q3Jyy3YMps8KUjjhGpxqny1c6rARAiKlkTNaXjwngP0GXZpUIlrEp8VwOpgemh6OqNbxGvaekmjaIw4NAcyaInQVZ6IjaQ/3N9wp9/4YjYhFwWGgFBr+JB39Bzs2CE9dvOeue+NlUzy5VT+QrX2bl1FdCF77wM6U7Mco6Nel4DXbgvFfes8bBdsvAL5CagxpHup6+snad1gojKMaPC7N+2ExLXMjPbwmv5whQij70QWdyfw2CdXi5dP6LJ31FkOfSk9AgX3w8lTu+Z8zBNfDFFVVsXLqLSDs5fGrt1DaDmXpKruCR5erscI8rirt0GdDI3su9dqZ19YGp75XVzJOg7lT5VAgilQnF40XgsVUIbjeUsvyrkvJ6UyGQqcY1sbtR28K91GJK3EGc7jmmB1mRYaaG5/mzzn/XGNcrYBiWknJLBNwRnHm2+RoAaN8ygnF6I8jWEwh/1Oa4Yibf0eMBJR2zwm8LAuYHmswSik8cNgliV6Sfv9MxedBtoc2yOO+OAH87nHyJk9w60Q5WaBfiWWI6Q+S4S0gD4/jF+JeKY86OIlNduAn9NrLdlwR7hemtGhAxHE6yc/oUuowcpRChER42vaA36eGEteqW8Mx7Cv0pxoTMHiGKIopyMLln5Wat1aGb9tEmp/2mwgjShGKZVS3cF72T/mH5Pw7eEJS2DD8yW2nQqeuftuf42/rIk2tDg0dCZJ2/jBJNXjowTdu+pvSC3HcEAMBvJ61mNXBrgeZFSo2eyd9ewGTkZRMN+zFTar+BfdapkWGazIxOLlQ+ZHb85EghycQQ8bhCThhZIx7IMyre4PcLXxtFkkDaIL+Kc1v+y9onyPXjihtZt20izuULMkilUgytUpievyxgoppAM0VSBszsEyYCnMkZhc+APz+IxLX/ZrjonQG7A0ndhs3MAW4s2G3Azyqh0MMXSVZKfG7Y33uILGOyZ1kJ8BtNPycGDyuTFgMDeI1p0rxACmt2dofSfg+cD3KzsKoTlp5vGu3IgjT1sh+6LyfrCp5z1i7ABhDUmDq3kdah2pGDD4Iq+xHJSSPI6+N9mcjHmsmGxjLupVzhWkP1+hZUq06bpQkkH/3jG6kdhwrpyZ/jExFVp3RUNJI0KN2u18/ZT+aBJgh+Dkwmfh5T2cq/5rP6/WBaY1QLDPu+ev4PnWMOXbKtDwBZIIpRB0rBgfUaHVVWo5l3V/negR+AB8g1eDZeRFfXvWDXt6wJ5gxlSSK4SwoEXtCXsCmaPV1myXKeVGA5avI9jlYmPvawBEgsHGbMz54AcNlJMj2+RLwPfA2dYOjl7P5Hf92tj9lEyrWeoP4ofdYKeUfkWWVTG+F4BtuailoFU3uGHUhPiuEEJavo8modLd1/b5Qzj5NsUfUGKKBVZXGeyRrI/K/cS95I/y4hJtnGlEkiye0hxyjRLFuxAlVLqoYON/yErEk4LjouF4kRE9jSEHR+LMgIFrncPJjGjEis6t8Nw2qwhm/s6wDv3DQtTWRFwyg4xWRx2cbGxqAsWaI0C0z1TsolsgvCdknIs0zhChLAtz2Dof/bgcux7wfrovRxGjY4gV4XCuCKaKBSljJDVGByRwb3i8Ez5h/8huMqiaPterBPDZZzoGOGCR8J2Wh7mCkSSvuaCSQjTjGEYN9dDaWyCbSTEOAmw+GXh9zvQxbXjEUnrBzhbL3tHKsfqbR1JTI6Z491/JbYphiX+J4hBBCBZh/RixVN/LdKNbKFYJg3msHB+fYf1gGD/Pg+WXpywHjguyRc2l5z5K6qbzgiIBj7BClojeuWqjEKpeT2w2QdCkEz+1DICQBpSGEMEQZhEZgggupIxIkpcajFVtFd74k5a3wUGKskwUyEJNcUBcDLHdZRKxyUmuX1VdMb1W3hc9oJTnEyXs6EsKuH6s8ysvZSDQEXy2mqTAHSsRw99cXD8PDzZObeCz8DcUfZwjGoRrwueM3Z+0UntYfilVVVDwzaUrJ9hFpbKUxskzcp3nD7jEUwjx+n46BMxzFSyWzbKNiO9BOVyoLc9z8JOqJd+MYcnV94x41WVjdMSqOxJHLIb02c21E/O1ZmaQVO1xXTvErp6m7/+JZPPnR8ceDTXkneA9swhuCbw9AEsIQfFd56h91uIj90RRzOSFyqpxprgloSmWyGti+VQrup1FVOuWC9ypQ/4N7rrLqF//6klkQ92RN5AvJWQju2YxYqjPiup6vuVMu6jXTrgGkq4CtpZxLjOKUPOVemGD+0FnrS29C7qnqDKZJusGDhTbd93dtnjcXswH+gN4oGasR5fxK3lyyXWI0gUXb8QbDmXX7rqnkAOiVPRj4FVAt2+i1ijbzBkKCRgKoyUBd7gTvSlJfE7bakJ5enAiRgJbHQ+Y38BnxUBhZUTr0zim759J70tTuLCpeXPRAUmbn0RlaFDNbRnBD1i61BO5qSaprRCtWvfOkYMlXdz4AZ/hHJAQZU8HGWie4TVZu4kr0/RgyhOwYYbXvLha8UvFPE7A/Gqnh/XWDcC0dZEmwxJyswKo4i2ivEWtkiGsLJT9nh1P7QrId50viidJi0G7ydMgwdmUpsSfh5rs97+5T18yF/2sG7F15B9VBKUR2MS6lIg1C+38FwJfhOes8WwSJEjAfGgWnjLU3HElL1/05GnnE6ehGyrFXA56cUM7wlEdkP6jWRSkwWbFztTv+HH83udaKbQ0p4rghTiegHFwTmtI67CWXJZHepf+6B+HPT2ENIjBRuu8XOIHx2gOP8692z58jM7IrbRf9x3PKKs7oNO9fve+v617EM95Lnmcq6cSDuE9xXToJI1pz+46t5LOXPZdFC+G4asWH2Pa7ZoSs1Mzo0LL0w77dIy8ecRVkFvW6INYP7P4u8qHVHkh3xRhn4clFLby5Fa/ACwc2HrrZc5HOan6SRyAbzK/b80c3hOYe/Cx9abzHxb0vTe2Y7+V1B1KaX9tG5ZCUrLt74qtX3/LXHVy1MVDiRHpjkaD2Gb1yXBofAIz2UQmxay4gIPsQNzOtzmFEQNO3NDYZCtaN1OE/FPOxiugJwKFTiYIwTBzrWM4NuKvGhB07bISGnRmh0m7v2AwH0hB4XjddcIiqk5I28ARecCZecA4GSa5FPI+7zD1Ha+yfgxtLWTcFlNJzwx9LFRXbQpQlCjiGkzQmqakYCoxwaGRkEcRXJeE3rXr9wC7fanwzQqfA7FzCOtM1wbRS54JEl2Bgr962cwfCdzbFBq+O3/p37fm1iCeJC0YjfmP0myvV/R5yZsUc+9N1YHjyft5fXjNL737ZezvNkj30zZqf7Aw4lN+2ua0REkZ+Y5QUGC49x/XOSk51hPowmuK6BdFpGKkrb/nxHgHW8aFFeErwXMbCQjpCD488cCi3b3nz/LknDIZNqvYQhF0ZEZ7evv+uK7jHewe+ZrNRtheshlIBxorrGi3IP1Zq5A4kdL1s98+9ZyM7FRI08pio1+5TE2Lxo3Rwgh4pGgXh4tYOtg4gFqsWwjOykkCAtNzEDp5IMZpSGpjzu9Qoi5XqQxhBmlbYc2vxlDtTnlOsfgWT8s+Z4rbgwav3kxSdUeU7chgImW8MSvL9C8v3bJg8vjTSNLpEY6sGgP5gw89tlwqT8YmaNgGLUn1Vh9ImkVq3K4YROU2G+t1IA7FkWFioI7xPZbStwMs+WJJBgGJm0UOMDZrrWJb+RLiPjp4pLbC5eEvCg/DLx/+mELyzNwneQlj/vh3vPp+8XlgaPhId6Zn1k42mQ1CIVLx/7krWdd63L9txDDpAIxXG93BdLsQ81V5XBwP7QZ/ZMHrp37n5tLvu+ZPGSXG75Sf0BItu8wnPjri1bfvv+Gl0ORAamKR0cAhxri333Uzf4nOxXl0kJVTsI9rfEPrQPS0GMh3oD4t4kNwZeQlJE3DcRBXVXDwoXnfY8lGLU0hVrVXsBWxc9aNwY98SiYW81wYkCxsJRqhWNcV5WLaXqi+xcMdWyvCE7OhosjYnalx7ejl8j2CVXKdEf/vLZy1h/e6bZv5rk0B6ihpBchNIKgsYrWldYgm8v66HDr8l8URzqApjSb/2V6ORpnn4tHAzc+WuRUdhAbU7xEU6tPyU+fIYf8zxZJYh+PdMIHGsPQU8I43OMIzO5yrtCxEZ5k0sCgVDIFYKjT/JwaJzj5w3CtAk3o5U+SGE0EV1FIk5NfU13bdzpQN//+FJM6hGKyDVVFgYhm6l5ENl726b8zZbFQ8BEaQIx7oRpkX6uJFzeNQIZiRx6d3FA/J7wC7Wsjh35BNypewhhMOrdlwUXAfT+053yFjT01djhTg3GuyaIiTLNbmuQghurtQFkM3qruYNe4G908FfByb5jSWvaFl+Xo5svv0Hz/62u+673QtHx39w/5o7F522yFC1KKBlrLnZtj0wq+dCCCEHmggtuGCqW7GkM3wd07B6HSObWsHK9eMIVCWKw4igpqnTJL0sHaWdEyGBK5PwD0YBu7yGEMKDrsW9psKhs3Vg5xonTOmVhPH0zLKtjlrBjxDvT6BXCI6jyr3USACIbHgaSSUIgV4zVH7MBOvDPL5arcSoCB+fMwiZV80JDoh54bx45kyZxbySCwj785ZK7Y5hbd70Yzxaxj0AfGaaLQSvjEpd/yCMemnTOYKuXdnjgzRX9+MRLjQqx7KEZn0OW94XPKFYGylWMLDTtkR46FXHjZVjHjzE+7jRK9VqMebuNVzPv/nOY2B3GnqSip6BZ0QdlU7XtFsDaeqxgJvHSCuPpJ+Li9pC4SrDL2k0crJ4pvi+SMHlbPQLpuXY+8QGEePXYUseFFJUpG0Bm6TuPbN4QH7OWWLL1GFxOB2pjnJRcNFhnkgT/y84IGLN1PCkQcLeXFqWnoYvocET6/KN1BUrmQriPA7O2CTYrPqNgqmrBvLUY1mArxyaFaVszTRy7t31oZUcMHA5lKUX3vSLwlVsyoQm47tGS9fQbaB1Cca8rAnuj0pB0AOVQQ7pW2VTpiOhzNxpkmrwEDTUrgh3DcF4UF61de/mHX7T3v7Sp3wupogKpbfuGAy7ueTLTi4s22za6vlZXi2aNrt62ocV3rlv353vJEc3ZmgMWJCWDuMNm0AsTZ2NkxVPdUc8epR1F4RkbbuDDr8dNBkVQBsNKn35zRv2fVqy7rhi8FiKsaHR0brs3zUrrMgBpAyaXPR6zpHXyfx0BgDWb0ciZYsgrDSh56LtAKgM1BAgGZ2L9oQQJpgrjh08hVpdK8R4rasTGCZvQBpB6qMtRKwvGvmt8smGuPuufekbhNLagqSiT26YniAGcCBlOaycigQbFzrH4840Gsbq0xNlH1r44tW3jo6/efeiO9e/YTveZAUA0Ft+zrr0Ymwc7dgxKEtBAR2VueCwFkGI4UkivQwhhBl/oowFjZC2H2oXDXJNKVf9TdZeAsbmCUmLwSGhIaMtVeg8xPhjsH0pfoV/k6BWcW117I+ajnp23Rhli1Dcv779/zH3Jj+SZdmZ37V5Hnx2j3nKyImZlTWBVU1KJAtskg1IG6pXDaG3+nME7bTURhAgCeiNwIXQQrOparKbJVYVi5Vzxhzu4bPbPA+9yC4/v/M9f68ihUSF3ZVFmPmzZ/fde+4ZvvN9H7nPnQ0tEtrY9h4DcWelhs9YEoQ/RcCuuDMkl1zjQQiecy8j4Pr+Dv4d7zO59aLEj/w+cu9oQJYDLi8iWJ4wEh0e2pckQr4UavDDno+0Kt+x1apCZ9W6PZCdzXgpXZa0el1/2jdQ44/w/MCQssQ6bwqRk6Oh9989QzaCnyNTZQjekCoQjhul+txfv1VDqNAgjaiAWUfxDs8A9U1tfXWgUtK3C/FgHlw+dH5C8IuNf6eLkIGICpyy3FV96dcSuyFcC3yM6vcbHXT0t8Es+krKe5V4h8E9P3nOBDmyW0WxWTQoU8ECEBDq5ESkJZsGK5L9cZpe4rwST4boSmv1hXOU1t73Xhmzwpq5aRbMLuyfm3WcSlbV0WHIPBb53diqEQkN/N11UZwczi0Im0oWaoG9moYt0NQ7y8+6DpgBI5WA2iDuq1XcE1qC+s3o3fZrioy4GRVAhhlU4tY+ykJlsMyrg02pl+oLOYzR9u4CFbkGy7IRJmc4NbpfsghiyF2jWaISuHHmEsUQ0zrGoiW+NYQQhuDoKQmFyxJl35R4Gr0h6Fd4fvtbdKVWdqaFEEL5JRwNQX8w8KLtimQ9cUYptQkD5QIEWRkIhuD3hGq2JY1Eh4eHVvmZ/+jgoa0aOhYLKVttVW0WXo79Ncix8erQrOXujjc8m7gGs0IhhHB2YBZAs0uzBctMAMOW/ENMtWxxDW+KMwSjSqHSoURaCxjOpfABTUGQpsbM6Q1Rt0tETOkVR5hIsdD0+sRY0BlSZ2UEx067fgjkKx/Ht20yY6CgZRrF0Xo8sJNevWYuVmKQtwWbXzNmFM+LdC+xS0uSWHHZLgWDeqV2yephCUd4pTAcf49kFQioTdKx4f0qhsdFjlLqJl/ISLhEThDB5oCVm4ltyQB7N93w+9aVBoHZmOs84nk25BQeY5OUij6FN+ni8MA1VLbGyQwoUTTa+/metq8753MFu7R4YnKuq8/9vQ72gOkSLi7u9Zk4GnSAfANIPLZsLPwxZO1lp1ukuwpzHRHMps6WBnt4fl2D2ARBZzi19HrJG1p2CeYSkOsN/F1v7CdrOLYbKQjmjfAJUjzMFWNIG5RELSBZLq5TBnWK11rAuT1/R3CFuMYQuFKdDmcbi/E2Tkeiw9N5CyCqm/70uQE21O7IJp3lpxBCWC+YESEKPYQQ3lkzd/1O3VrtNHVNA/jBpnf/X5bs+sdd/wSGMEoBpE6ZgXR0AJBdrfkUyRhOGj3m/KYvu41b9l25U7FsWDPde+IobYDlOcWD0H+O3QS6ERlFqlSBo/HGItRl7DIBsiroRFHQM8IiGqOiHYKPlooXUsYpX10ejMOivMmRIa4NmQ7N6jEFE8nwUHZCnCFyixALoOljOisREDvlPwAU1czgGB0RPHxDCCGLCFCBi/ws16JqGRHjNFFnBUESM706bq1ZevQg44ETnRswiH2/aEfoonS4K5nHChouRku/4BhxayTdP0VJAO3X2h2UxHs/2gKTM1q2FcjJoeWHVRh0LsmSPLjhTym1uxw1tIqffse/l4PjFAGdYzCrWjyWzCmcHDqhisVhZTdSRsERqLgRZjeZhVLCPDIo39sRsBZGGynx476HcbR65lVq1nMJqMXObc+qeDi2H5tFIDGvSZaoDZLeU+EbgtNK0tEQROwTt6Xkgj477d/iWuKZoarq9Wd2kfN3v6UMD/eqpnRJFEiv9XzkO7FGIBHLS4fV6cge5GbRVtP52F/jR1vPLl/vD31Ol05Uo+ydEKbKpwAjz0XpnHIVE9Hxqv8/ZsVbf2zXX54Kpfe2PanUsTecXnPJvRUyAJ0VEEUORDdIAXocZZAyqTNEnSpH376dALLV6IVwCPwWrb92cQBJdSBs/Noc0+5tbYPEazgH2pq/CsMx9SIrUhAStAFS6JpVoNOkBtwztvK79E7iSyCMsrkedO3t/QzqzD+QLio8E+XKiANvaiTH55cRcCW1lLTRYQayQSqWl/PeE2iTM0scC2a2nHCkdJwxyNComozPWh6YoQssRVyeJh3YkJcgrsr2/oi+ELIVqygeymljECTqNY4gLlKWpz2SAKEH0Hbt2dWvQwjh4j0+c4UE4H5xX1oqzsEHmXvePnc4swPx6+tf7ZRpZmKOzrWW9NXX0SX4Rc88x3+2+8R97u+O7to10gITKQmmgd+NfeZA+IIr5W9RAD0Z5Lt3JRBCcEWBaz27OI9arXDEqwyoxU7SyamKAGnSSHR4Ks/tWwZC8z+GM0EpiOncT979dUtNvEj7Ij+dnJyGwRivRmZVJ3Ia09lqfeI5gOZ1glbiQbNUPs8XfbQ5/At7Ikuk3jeF4+DiVxaO6yJnND6RjcKU/RgYpLSoCXOTagRY27f5P39bvRX7sc1H8dIVLF9mRQeLxqgKwTgHUgv+cFVK9Vc/tv9QTSEaCNdaunpNWs4YMKLUjjUOtuOG4I19QXhAWN4j5mG04/dH/RFwAtL6mnblLnutAnzH34sHhbs1Jj8tToR0tBmffYjgBFDSapT9nqsVrs740AEJIXjZF0m9Z9Gan5QNo3Ol3TAnCMiUILJUs3scTGxCUjLH/OHqrDhNJxDl6Xqhg6kBzSoMluAcO7hKFiCGi/CFxSiuh+AVtWkfuvelE5SYtASNJqfoLlvAcfloRy33kmDBXDkNx5yCogvA8JxIA04zb4vnftUacBpSK6Y+ZXcogTe6FHKSYKA6exFn9kwSAK68Kt10PKY1gOK/uZ41E8SMj9oSXoNVjf51/zk6Qywh/raR6PDUn9uvG9z0Ezsq2p3dWLNwfjCVevwYbeNz/3U91Cw7KJBeK/tQMYOVNxPCk42KnZ6Zd/wGICnhSQfGS7x/Oh2VglDgY5EMW7aLtLNkBtruyid+DuZ4qMuG91aISSoigu0Ix810ZtcsHUoqsQ76fSkzsTrYvgu+ENnozBJo23gRoGsanMg16Nip3AGjfblHxy5M7EU8g/obGyTvIzBSNWcYzU8kUmQEOxRQI7MpNLB6WBKEqaU/GmpGtiqp4v5GSzEJpQNiKljCyMkhwFJYXoQMmTE+bftaWB54HPJsTZXsiFFq3VvfadpszaJgP0Z19PogOf1cEKblrC1Ulu1D8OWu3DlToAogt9cRRlsEYe5gVHAzJSiUCX4FBtcfHY2ckDDyoBvLo+S0MTsQgncg2ABSOvDXZzm4c18cjfOr501xgi77IBlLJ3mRkMlj40FaHHGqm+9UfK3nZd+qF+9ULW1/JnWfM1RR1BFvFM0IK4XLDAF7B6KjQtcT+zxD8DZeZSdYviN+S88JbrOCZJCov0aGcS39c/7LAnJPGokOT+86oqT4JqowAkOp6nw8aJibdiC87gcte8BsmVsIO2UdzKuqaEzdHQK2QvAaJDOUqioVH0EOwN/TlLLYaZdkKPYwjv/JF2eXYNcUShDXVpoWQDMxQuUSPIG+ZmrwUluU8bMrojDeuYU0ZgLGZqyHMkYeaPlFPn4h++jOv+eiAfnuzBgOLbrANBpfhUEQnwNhxifWIuykjNLVGeIzYiq4fBBfL9dBzIojdRTjS0dTcTqLhJZ4rkUHpo6oFdpLDRDo8MxG8WAtkhBGNIQoVDyUxcjbYnZXfD7ymDzrea+VB0Y577300dTuuVdOkIXAvGrGwDMD22vNgM7RNRkRP12B4YhPYT6jJIz2Wh0NJ9qpuA5ck9fXUgmzERFy2f7Vn0tqS9fsQ2KmHtlYQgAmsq+WwNysF4RgFyoCx4gYx2JcNkDS++IT76Sv7xlb4qen/ozKkTsPpbWZrNnKMzT7aDcdECWadSHudLCLgEAwPC4IEIeKZwrV0pNKXwryTxqJDg+N6qwiD25gq/fVCwvz8gJuJtV1q+9/XRmYFTo8+y2P03mJnTOXev9kaPfBbqsQQjhas3/nkJHqdkScdNtccnWobq1bSDGs2/XOaj4qnbfNGGvEzUNhIZ1qzaY9yd7PLYSoCQbGEWe9EuzMzfhDjdkZgmDVqBYu4Fk3/DXInuqIAeU+RhATbTz1BxxZmdU5UFXi3wyVv1iFUXKARGxI7UQgD5H8PGdwy36eNCq+/BsRVKRToxgbKiS4tSjkeY4PKEEXRx0eZpfoTCgXEZ9zX+gqXGQqZ/hR226GbbzqNGULCGhk3yrvz2+GYkdoF0h/EYLvMNUWYgZJS5KVyufcwavZmcXVzkv/utZS7OUqqqXTmWA2Wx0SZn3V+XNrQN4bg3W+um//r3is3p14cU92TfLAXUq3WBzxZwghTEE8S86fEPxv4/6OUBHg675s+5N6t2aewQYM9C/aN93nyMic2/MeyauB3cj1hjdKnwMXlEWX1ljIdkk0S1HUEDyrsTrfLMGTKiMrmd8BnEXtOnTXAAGikrc6mSRtnkkYiQ6PS9NJXXmSNgPG2qm2jT9q2dMfnHtHY4l25xpa7TpDAXOBiLDdErcb/DRaLlqSzdUZWEnFIVpjGUwHWwDz0rUxQuZGU9ekIW/JwVVAFqqFRRLpmoFRSQt1JZ2c0pmAKxEdcjPXn/nn1IbshGaJ2ndtc9PTnkkLrpvWSP07Hu9CgHPjic3/+TvfQBXudzT4bBekNxDMFbN6SXgCLSVFDoL/MiKyDeyqUx+JdXbiP8Tw0DCrQrd7luIo8Z7jAOch+Gh5Ll2Hi5ZZrEzT7zkqPpM5diBdVK5lfRCfJfLtslJyImhZbNf5wGzNUDi+qHuXhj5SBNsBJ0dblNlRucAcR/SLUDqYJTQvvKnBqNxpHCl1G3ndJCHnwLEKwn/N30x6gLFK88Cecr9opsnhjORAZwZDsxvV53gP/gn5eULwagMUzw4hhFzNFsiCTQkykdR8WySUOPXvCFSeI8Ma0R0jIalkbZ1avWSu4ziiNFNWf4wsuXSxxQkmaxMMMY31p6/PPp7o8DS/tItOKyrOd/VE9575stUYjKqZnv+64dJOgtw2WjvFwNLJWUqpJ3dm/04LQdN4k3liGDZRXe7DmCkJGokO19bsaZyf+pC4+BSAY/E4ie9ICUNzf2x/R72e1EtvYB2pW8pfg04OU4kheO6dVHwg6g6ucUNq48gSMWrPDuMzBuOmvwYzFDo/KTBlnr0PRzre93xjwzEoY6o1de06QbTUg6HCos7AYB9oJBrHoB1CCMUT++wwoeyWQ2aIon0h+I6LhaSo6Dg78Kn8zMl1lIQEw8OSwFrNe3N5OB7sjmKDQgghjIdEVwrDMY07DG5OAjceGASDhuB5Sz7Z98jwLER/l31Ir6hFxTXSkoljdOv5jKR8icNbSzCrMIi1KILsXg9A5wypnh7MqdomZhVoB7XlO38OmRAlxSMPGGyRziczxWaVFAAAIABJREFUDFqSy0HuR20YyV8pVtt85K9/COmEggTNhHgcN21CHrd95vHDTStbPfr0rntv408N0PKs7ZuE6GyRO6/0wi9aOjU6jyQirAjZqutcw9xlpAeBGeIIbxzsFTPX3Tt+HqtP7LtHQniaNBIdHk8k59+LI9SKgIs+tZ2c0W9DGDyomeVUMUFHXnjhwQtzHKyTYx9Ks/031QOzq2yUrW0zdBWp1TPqq6N7pHzdf+7lwiYof+CjTX3gHGyJT2ERCtzJR6natooSlDItE3je37M56O9KKyLbSbUdHI+UshljydQ0H9mCad33c8DMkBo0ymE45ffE1flmBsHIjlckglfA55Q3CVkGjYK555jGVYwNQcwRB5KOLaLSrPCguK4tWW9cY3mJrhix8SCJ6FQh65mRICZNQk/JrGxCY+gUHFxKGVFFp1RHSt25F/YDlBaAgyXx7tRvrAwmslbz2D7qfw0KEDgVh4oHqHK+ECbg2LEr8ferArCrMOYorzLoIelpCP45RDKK+MnKueKddmZqZE1R5Vv2XJwunTIyEw+iTM4VlNPaD/13VwDSJQPD6QdiGJiFEgeeDvZ/ODBH5sd7T93n/v7o9uVrln1CCOFJy5wjxbQSP0pmcgWJL0ADoI5pHiV37SEgASCzRppFo9Oq5Sj3fQwa5SzoPIScy8XrSxAlHinUgFIyOhrZCviNJjX/gBm5zHWCqDDeNaMxESVygqKngoEhY6sag/ENqtWB2Euu3wa2aL3so805NiZ1r9ISVnPD6iFGvIwkRfyixHtKXc5UefVEsDNwGBSbw5IWwccR4U+e3WpvqWBOXR95nnRyIh0pnPIEwWd2aWVHqxfNciNzg0ccfcyntoOTZVY6R53TQG4fpVbnaDyRjOg9GH52Akk5h/cccbxIMSBYBnbKuMyjPC7iYxQbp9EtxxboKi7GmBC5RvvE9mO+JkzIDeD30KKujiNb2xuilv7FCXQAhRV3ABABnUptt6ZzqDw8zhmFoxAR3cRPiyt5vslBzhsXzChIG/a+8FIycjGCtCH4dbp0GWbJ1rG7SLBaa5/bzXRvo6QiXZLsQCT/Twg+o6T7hVxJ65+hvCXipKlM/AMkjrXXtx9wNvY1PVYh5qIa4M4huf4CTnoG+LfuHbkPnD1lUYWnujyd+RD8+l6U0F1Z9Y4XxUlzz+JL6ey602437W593ZHo8HDjrX0mXRYAmtIx0q4TetNpOYynUI4tguFYl8TBZ7bSlP+mPTCDGDFmMPZOGbrnH0Bq0x7+q44Pw0h2xnNL2wFT2fhTnJHdUgjYHOwFYogaXez8zO5x3PDGgiDmsQCO2WLOQ0wZmenwVPcFxFa92qBp5ioDnI7iENJgIVMAmucYsn+o3tcqDK7nOELGr9+0l5OGHnT2WiNRZoYmcGR1rvkcute1ln41KDBSxsRtaaQ7blz9uRDiQZk6B5MLMLC/5ZkZmantDP3p9CxnXhS7odYbPlQ8wcaaDgXDA/bYOQKmyHPCj7tZ8eLGRaTmvjj3KW4yqzvOLDVBtXi7wCydOpUcLLskUf2/qVF/ZK+p7K7rhpE4AcYhhFAFDqN3VxTAUTphQBDtVo13PNnY4Zjj5byicGlROLIcV5Ic9sxAnP0eg0x/DZa+hiKpwoYWklm2dvz+IMwi3/JnwWDL1qWWikdoHJgD3yoICddxNhQqCwbeEZuEsy0Fbh91DklAqU4l1wyd0aInjQ6lV3b/1DT8bSPR4ZnAsdz8ue8tG+3ZDj3ZA45GUlRlHMYR455BlPQdO0lGAm4uAImu6e8MiJbm2gqNSSfAVI0Gs0ZKgkYMQQHfnUn7Se7MoXqeoPsRKX1QxA3gx0gK7xacIele8voy8h4yLcz65yTrwIyMOhrsOOL100KlSh0hTXfyh2vmJneKUl519VL2HG59I3pXvNEcZYmMApoTOqdYt3ZtywkCpBFJIWSh+LnI/sNtsa4eQgj1J/ZagYXuGnjOeQFWc10qPxf5ZMilFYJvB6/XbWJn4qUTW3RxIV2TaP9dwBDnu35hzjftvTVBhg9z9qAiLM+4ptt/bcEYuuySdJmhjEUnWDNB7MhTrqNVGP0bV/+/Hojs3FHpHOd4yE90mWT8mV7fgW8TMmFJTMgOgC02mE6OduLGBZNKrUHsTLbgv3yCzE1mxwx0pJoALE6SAz+cSGBPUsISAoKsv8kKQcWCCeX8UHrq63uBMwQ4iYoKJ2G53Jxj3Y+2/H38/810JmN4cDNnH/nMB8sjRNjrQuCEaZti6qFFbBtVGJuqNzw9kH61Bt4ZGoA4rHriJ8XVkDFDlacCnoZ2S0e4PojhcWKn+x5IVvnCfngkkqaT0BeDW74aLR8hxGJpUA6uQgwxYAghFFqsryOte0PaZxNYjfNdgCaxN1QokTgv7Sqik6Nt6DT8Ayzs/AqKhxJAyixchH2Yxld9ODo5iozEP0nKpe2hDtOlfEjjmINURUATzs4eulAifEtw+niIqWErrZvRvlb1oe46uERUcubtmoEKXgxZ8PfXJ4hZD48FSuTsopqseSNdadg9jqUL7BfHdpLrPbpDB5dUigCHp5KuH2akcwlgWVdGXUFcG4VmWd4ab2m9FkGPOJ4s4Ss2xxFYYp4inVgE2gtua47vZklFtdXIfJ6VIIbPsvuW/21llOjYwLD+sb/GxduYKz21kdWZsiIhMkCEZ6iaOVdwr+PPsiVKWhNEv2lZU8xcK6dcDrj+8Z723NtLlhsjGR5ka3q3xblH2ZNs6doxx+cbIU1NGInbhwedjvIhqNV3zNorMp8HsCLi50gLXzC1vO5Ty/caZvn3e/4LmCHp35UfDn6BfMUitEXeGzZyEhSlm4QYm+MLtBIIJ8gQD18ZQHmIUbU9hBBCDyW5XfvuedffYw16WWoQ8z0wYosQ55x8MAiCVRGdh5o6JKxOzRO6cuj4quPFji7t7kqBj4QSF5VXq8ejT8ZWGmJtR6YzpKVW12El2Toafpc+VowQfSbl3ItxcnTdOBI3sQS0xZo1dGBbGDbN6pEz66l0jNRAJrpd8mXqPaCkn/TNi1Yc0I2mfa4z9sb9ABmeVB90EoLzK9xFqViY4Pdq5qR98pXvQ87AnlBXbyYgUkeAqDpYeI/7RbEpnG89yFdhUH3cderoQYSsjvJPsQSuB9isAuwPOrG0484BeGWaZlW7Br9rvCHleyzT0qFw7SDmV+mf9U9tUx/90O6xc8ffRzYXH1nmtswoZxFoZ6WaQKqDZcG/R1HeTMWXW4ZwgAqAkMxE7Jrs6epoDJkJVngG/j2+A/WCX4uiO4DhalsCCHxD2/5uIeslqWEkaSRjeACuVE2N3g1zchjNq2Kyq6uue+tewAOho6F6XBQT1VISsy6DrM/+5MowSnhuWvNLka8n47+bYMXzx5gEqc27g0UPDzxDTcNO1rFAL+wPVbSN8xqJzNOgzm/5BZqDLlYPXVqFtnjMjk5colTsUcfDI8+aWRzF6UzqqOnKIp9S7BJLpL+3guEshscJiFODCE3LvMRuRFLScGxYjtLI3rWGJzgrzCap0jYZfCNlhIRWevddXNvaagw212bR11BpxEsJDJMP0BFxMPbBDvlIRkJJwZIZbVBa2/sxWXOZBDpYWgZ3AFA8T5afdOg1mA2izZgnXCPC87MCg9lnj8eTOeM/NGhr0Qnx7zkwLFmMhe2YjOYRIk38HfdfREsL36UNINzvM4lbTz6yC1HMufHEG7tDiF5u1X0Km6z+DBa0lMtsfLYl634TmSy5ySXWH7sT80KDwKztzt/7OT76fWTpyn7f0mTMQb+iPEos/xelKtNdh7ByNn4fsLNMMV9JIznDg8zBcFt0UZjaouaFCopRFVk2wDpKV6WcnQrtkXdcKnivM/KRHBlQleOGwOLFsf2dGo1Z21Zyf+SnJIXN13zfvJDTfUG7geZ+LFT5TgRyW06FCsN9ezkWQBsPFgXCVSEeevauv/88uj/qz+1zvevS7QZnJdLmzFQlPqfnlMP+iEFLT+IXL9fMEKzOq2fafemKnTVRXpH4dgnHvSOGkyn2SPSDwTSxKrUT2JmCMdNOMmYOckqQxoyDlCe5p0dbwLLIQb1EB9TF0Nf8SLhZbvqF9POOIUdvgARKHaNHpxaAUKoihOBY12k4GemH4Dtj/vr5A/feaGB7ML/uI5VJFwBQ4AOzojzNzaOOKbNBXEtL6TBy+mrfgFX2dzWoVt27EV+SJlkcy4wh+IArL7pXUwRLDCwbX/jPOQCslIviHEUtQfI+VA/QiRwrgSU5Y1CmGW0IpguZG9VnI6s4aQ80w+OyY5IN46mRlUaaBXA7BCo7WxWCC5JOvqvPCQG6TOkUZ2cmF++EcI7Vuc2egNAT9k8zggy28zHs9FeN1w6htQ5XPrb/YPSeRCSk46xjd72NWuRI+DYITpxI9mdB77fsT4gFPFxScE+OvPHNglBqcTNePLSLVsHGjgdxd07NjVUGStdWKsyYLhhHujOJW0WxUCSFzCl2ZkhHxl7XXvq56t6wOY8QfyEbNATGpvYiPss1kwwBy6Ojdf8FpTO7l/4uNs0KGne2bGbgTEQAlJgaLUfx4NOMAJ0ch4cTA04guTokHEndP4x0I3IovC8psbhsI1PeyrSM/VgShUJieLSUdL98Eq4aGcmisfT91b5o26F8Untk+7v7wHumjKRvrnnCoTQwIo+OlYjsaqc1HSlRhis/p8OVEMV5dmXOGMLXNzm4dtY/sXs9+rGUreDkaGRP/jDl70nF8OuoSjafuWbZ50Uc1CiHzCv+HilJobxJ5Jxa3vYPeo4OqEUTjkXHbyyuN5VJWkMCgJUMLeXmAOSfS3IgTWCyNPiMwUaezds9Tir+c1mI4UbY02HLFvP4Bb3E3ynFCodmPRk8LFyWXIhs6Tx/g6xnosNDUUeldiZglQBbTSVyoWmkW4OIJ8tYjKxCCOGrqRX98nl/UA/xWRXmXAAHkynHn56zPVtAGSXFAwtzEeKenRd+N8TR7X/9H/ZS03SpDk8/8A0J7oPqszr4feqslE8QUdwkQ6T/XOnkaoLCEDzPT/GcdXLptKjB8er593oASUcNur239iUyZc3X99x/V4ORnGs5lt5Ot5Flv7tyVMTRwOfYAi8HHZ+zZg5YCqPBUhE/HsZ6DTpDGXGo+FlqaS2EE4SM6f2J39Mk8VRxxAY8uP+vfefydWvijTsdJbKghxDCxdK8su4HYG7u+O9ii2910zta5zhMqkKGen5q71FLaykl8SayEOff8Qu/tA+QKoIY3cN8FvoMV2HQMWCWha3DIUQDNQ7HP/WaRJcaZAx3WFNRLNXVIFrFW442kXWT7M9oB+8dSdch7TpBxQKSL8Czvdb0zN6kaqhAZ7IozKXpr2xtzxqS+eA5OvT3SGD/jKzLkpVksKOO6QR2iNcIIYTsK/u+WQPBewL/Xk7a6pkldskBTUIB70Qs8G8byW3pyNxcvC+gIUZ9CW3XoQzw1Zm37q2ireQ8SMoKIv3AVJ+yUy6Av9EW30zDFg29ZE2BTXDgzqV7gGN720paB7sCDv7KPD3lgODGjJCP4d9pzMHyyD/EAZjtdaMvcvHt7K275Pax/4+UYMC1k+vHdwTx+oMdWfA4GIkdCsFna6ZCosd76d60a7ITcGUG/T08u6SOJz2kSFKmmmkE9hOUGWkGcCl1iZZjVOsj+DoAYDXlv8TfTZS6HV/t/k6cvnneFkt1wzsMx307/eo5/97HaBHLArDQzAvbMVDShy0JQEgGSIJFxSqBk+tcomWWElQtfbhjDtbkie19dVackyPR8nAXWQfMY6TMguek/C+rMMabV2dPFN5AqgMVw3W8VZJZcW3kTCgmiDTrHBIXxa7WSOs8CFmVh2eKPaglreIJgj2cISz5huBLqKd9vyGL5HxDs0xbAPnuu9Pq2NnLSsXvKzr3Tler5g+D4itQOshZxmaXmThK0y27/+qXdv/awZpq29/VhTS1jzNlvBm/7kdpQFS0GSBh/BbiQXutrYLU9mFpfbztV3kBkvQzaR1Nv4TC+D0zIDNhU56Rn0C6o4KztwLKZE2RtNri05TX7KQePfcpqtJtO62mMLBrFW98D6sAVouuDyOWCH8AvN1q3VZTd+jnIHuINGMCz48aGccrgcW7/onfDO375mDp4Z1BFxUzexnl00H5zHWHBc/ZE9Eag5I6D2sFT6/CiONUUl4RZtBmlXgHT402Dz5mS7Wrjp084w35bpzqKTz/olSKSBmhpZh5gjhpmWy0BreJsk2DzVWJOs/PzeEZiYbVJyd3Ll9vQlZmXYDPxPqVi35RtQZ2mFDDL4Jj2rW/q+b8NWb44Xr/xAzxLc1y0clRPibXrcc2ZDlkHPfJ/dULAtixxLnQbD9H3j9yhw0U0XrfJu2kJfznMoMEe8G9itdaNmdwEuHhQTfaTLjKWOVgt2KuI8/8OhwvLTnhnGvifJnJD53VkbGUqgalitLp+IyrC1rEccmyYiPOZ/0xoAnCz5XCOujfAK5NnguDOq0U154wMRGPZ6RjrTCapJGc4YFMhKYnaWQn68jidPzn5m0zPAvxJHP3bWbn7Oho+Bz6VgVaWi0BCzPtpWl5kIDNAGIM0to5vYALKvfYP0QketdOncNDfx9L1ogl1efkGHRPkhwxIU3AOijbQPX6ke4rGAh2Q52/J7pB8H9y0jbujBjxG1JmIa5ECbeI94n8HZ5bHqUw1epahTFtXJ2OVYeBBiWig4XPaqRYfYZsI0tTCdpAqWn8PPFQpU5QCD51vdAoCbZSDycyoPL68w2ZBOwDBWi+c9NaWXpCSvivHvzs8vUv2wbUeFj1oj+diV0zlfLXb7FhAXtTHcwl9t/bdU/n+m8+NYKuSsV7nDMyOxPvJI5vnDPw9U3jc8xcSODGTJ8Ga6swGGQRQK/inrQxynBceGavhQ7Jy27gPXWomJFRR4ZUG6RjILNyCH49RzA8cAQ0++N0n9jNJfiV5b5t3KOKz9xkusBR3rMoWYk5XSVAcDSkaem0JbXC5hMkHzItcUjwZw0RP23fB4bzkV+MvY9sjyxI/Kn3CNmJnFR9erdtr9Yeo9NLfgr11pK0KnUkZ3joI0hddUx6eRjcSN3zmhnB9ECcphOooAPVPSv51UpHQDV4+jFeawghzAHGcsA3ibS4gHIFf33+GgKmyw0fbQ7gNEUIFulDadkNyHkuViUoZOSvfCfMGqmTUESbOlvFNTqiIxNlH726/k3gegiezFCJDMmgPJGSlnNyII2RlbLYKoy0axtHNJXgdKTVmcAD04yfkyEh1UHCM9f2Wdd1wb0pXTNJjKfEVChHDw8MBj5LAeRn0bk4E6OXxma9VfW8W9dyZmwWKKuXJTVYB5dPhMk5gdqeg9w4VbGc2+tmVS963uKyxdcF0gt91nhLsj9uLTFTLUBOdmUm/ZY3Ndj2TyFgLeX2kd3QEmrrIe2z/zs6VE7aRogBE3WemOnmuk8yMRoD5K/+WAg+iCEnz8EfJwUjco8o+2a1ksHPITmQUi4cZngk++PYxxdQHtAgAHPM5/L1m/aS8x1CCMvF1feVk9Z5BlrafcVKEkWsNegisP2bNLckOjyMKpVIrgLRrywWb+ee4D+k/ZAjh1ZPp5YrqT6mrntDfyPk2lEQlQupmDJueqcm88o87VHDv1daAw/PmXkyD274aPBL1EdngvznClIMwe0dwwU9O7TC9qIs5b92/KMidqbQFiItOBqNxzZXx9/zq5y8P5qu5QbgYdcXHBPLLhrNjqDpVTkUoB1KV+6RrWJJi44zdn9EWgLLVMm72AIewWvQqCYYYwccVh4eSkvgeklgWM1CrYEhlnpsX3+BvRyBAVqd9BkOf1UbHwFwXBWL9X8efv/y9b/Y/rXdh6Rn9rsGqtDsaIrlc2Q5y1/6U4s4mpcjD1YqACw66nq7k4YztwQdRk6Um5m+Lx7698jI67uKBPB/k18cVm6wZEGQrpZy+TnNtNHh1gOMfi731WhX2K95yApxK0kJ3brfEEAwgvKqCFt23oFC94k8S3BcjddBjigHNdmnK9s+Uum37QxptczBXtuTrgHcY1o15O6ajS9L4qAPu0C1dHWi+e+kDqv6V35+2vCcKvvx15/W6LWKv4DAaHALAdOpAqvtteIbk0aiw8MoTw06Mwc8zFSEcF6AYNzdeJZJPhyVta8W4124cslurD3yIDBn9Nj62hWPcxs/Tnh4hi1bhMV9u6/Whgc45p/ZToxI3mNPTUQThErw6X37rqWUOlxZTIwejcAyo/Nvr4eb9tu2f+k3eg8kf0tZoI4jI0Fx3ZV1ZJ+wRq9dYPxttZdgjd5ZQetOxxyA45lfDmHOsqlGiiSZE2bebEyru5KsOd4WlYzIXB1JJ2UHloJtYZZycN1/NxsW2Gq8ENbX9BB6OkI1cYouqiAG659tmBrlr9D2c7PoM0FbZUtZ/erLm+69NNjTU+DZmjQV72Qvfw4piRBClPofY9myayZxIlEDaLwu0Ti1h9gEIl9LMG7haPWCADrVzPZHlLyH8fU4PV84RlsA8nfj5ymX0E7tZQ/s9TTvP7iA88psVQhSOk4wTa5krQ1EDOgk65nBvWQQ9EdkTbjPpBmAd9w991nJNMtYyP5o5nd4jc6hf4+2S/X3mOWmPYzI7iRkM7kPlqwIyXwziFSql6SR6PAwc1A58gtjsHX1Ex9LbdbzSwhg8MhmpduENo14fWwNz0lbOjMm6VNRn3Utbvb/SsftHoB8d4YZpIpNV1s0vUgilhLKcwqcaApyCDVolxmKr4JEmStxDmi5iN1SpXNb8Odvq8hhiB3zGBZmNSq5Pjbs2M8jwc7qlDFlnR0h1aqS7qswcOtMsyqegJs8KvwZf5A6ziYnQSFZNzpNKjdAWBuCyAgDuOvg8tcYgbE1J+UB13WIZ6nYLAogata2hNK0MhyzdJXD5E2lrudK3Q1v+BfUTsKeWwipn7vfJI+wn1BThBHSjjYe+mqY6YxGMGAY1DNi58qqjBLA8MwGzrW0T5yLdKvSkVEl9TiKh8zQ2wfHA6bwFdiwDByelAS/PBvSwlRMQcyIXiLW23DP1npBMhMsoU6lOcd1TsEZUhbxbAV7R6oabOopNXxtkEB7YmYjdhznV8RhY8lM6UWAlaMzp7qQE2S5CsdKgGuvx+hwy3Y0PW0vI3iqhJHo8PChjhv+C+vPbNLb95hfj79G4VTaVt9nZxZSiVUBCHLyxHDmMOva8k0AaO8tnKridBTB0VOQclcPOjyzTftcOsGrJPNoCF51ftL0Uz6uIyqpA+906Fda7Rkyauty+PGAE8Bxvmu/lS3qivtgNk/bZ10rYgzHy9fXxHfd87+TrKvabj6DxAEJEDVrsgqD2RPyfmgk5Aj5xFmpPYVxF/E8GgeykGZF3d5hGWTT0aDTydH5ZOZOnSHHKqsRGrcxDzHl4cG+VUzCWsmiqZmA0k7hPbamdtOdmfdW2hNkX0XNfIggyXF7qL4TTuFNAYc+PsUD0AMOz5TPetL09omHspJMOsV7OJyq9+WyE/14u/OmBpn2aYvS4pDwMasDzxJF4VzOiRjWbw0y2BSzVG4ZnA3jDTrp/hoEU2nZzR32gkFy4P1y/PUL122NaccxMZzDQ8uA5qV0RycnLdpc1OBS+MQMCgA5UL903/Zn3r3/3e6rfc8nEc5+YNefyL6Na55Q/M0EmbhIez9Ie9kFps4V9wGBzr9tJDMt8/7lmp1b9qfNr2zCzt/xlyTfgjLCTnuYTKTYRlLSmicYzjG83/S2d5R6NZRpaOjEWaETpVpdbPNjylFb4JnOHze0jm+v1eAybe70fyRq7962Oajsu7cCJYZGkoYdoZ5M0kBVOp/BqChbMyMzluuUa2dSR6uuZhMW8ZHp2heQFbkLIsmEqPdNDYfhSbg/RoP6LPs38MxVARwZUu4X7XpL0t2goSaGJNLGi/TxTDI8rrVdSyy4vjuABJydgqOv6Xs2AByN/Ml1MQE/F3A0g5mfhAHIDLsd781xX7GFVbFljPyVDygPu7DY8LZl/tLukfMTOUD5ngRklefAK7A8kOTTrJ6/49cDfMbJpthSSv8o5oplf9F8U0Xzy+tJ52L2xIwOO4e//gLeb3yWgiR82pbOvZqRAMTRaYDXreCrsGGGxZKWjdXHGq7fMKS64tPSXJdjcewKKMkJvofOEfGu2ll98Id0SELs0GYMziUDwOorP8m9W2i//8xfg+zZTtJIGy5wXn0TrOdrl7SUop7P4OJBfO2UkammtuaH9kBI8T0784Zt8x3LWalD0hmSMESRsuDvQWeWAoI76LAq1yW7dAB+nT37MZO+ou5wG1qqZhS85k/JIkp0A1ykrIrr7MQSzAN5kLQWTq4ccuOoYa4exGeQOBxYVp71AHiF4plkcRIW5fH37XmvfWHPZrC1ej24jnPF8VT5zzlupASV7JQwwvI67Jyj4xKCZ0DVSJQATZZRlEuLQ+UvGHnpgcM97brKin5BZL80A57+nj8hjrvm5PzezS/cewtMQg+e3rp44kd586iGZVF8PkNaCp0gkSg0wwPI3z+FgzvHvj6TKiek2zE4j/kzv577t3ANOMi6h1meSSqHvqnBrCdJNYtHEvjhkJquSdYCZH2axYprxddOr/EGshv6nJl5wn6cS0MBtdayPeksxB4Zr/u/Iy8WD/soxAPXEFxbHpx1o4mt53LWnxkLnD2q8cas0ea6xykcH9jNUPZFS+JOrFXmPoPSrmYs05gDOkrnb/uLMGBve/m6MEOFJXcW73gNb9rnMp3XPyeSHR4cliU9wFCGuHiHQDXJfCTQrjsCNjION/2OH6EvVgnAmmUzpNOCkI89og69vZzk/ELmopkrbmTL7mX5xE6g5boPL4pE7YtN2vjEfuhLecDU58pDPX5e9E4fIwotb9BARlgtXb8rPiflDV7TyUcEz2GxdA6P/1zzkf3O9j0Nj/BSpridX88vAAAgAElEQVTxyNZI7xqyRKvn77i0tpOBGMU7DEn1fgXt8czla2XKJ6hYQfJ8Rku0P2rrfDYGixOC4KokUCHjLDluFP82f9vCfdUNIqGglqo+qr24fP1XR793+fpu9cx9boLsrpa6ib1jO66Ke8527HO9qS8j98b2gLNVOXROribqjIhUwuGc7HibkW1dvcCVEZvPbRWznm6tYw0o7xMzbakEdlzd9/76cBg2pZOVWl2iZzi4Zp/d+qWt2eMfSGkNkggZ0W+iDR4LYR7B6r1bV2NHQ/CaW6Wad1ZYxgrgqgmiLZZH5/C4LXxqa3aT3aHfV3Gg5XnRz+MCiQIN1oi1UuxM562r8WpKPDjC3CmHzhxzThuktBnVr2xP929+SyUtbi4VfOQmdwZdGZnZcy8AKIfipzaQKB8zXa1ES0WIEmq5ixuFWlTaxsa6pzo8i3O7af60wpGfOhLSZaQVn3TZy7l0LnC6EnBBeTjrhZa/BtvDNdqn8+JBg/HpyAgTNfS4+FsKF34Vth4QFOKv4ZxbZQ/AVJJ7Rw/hVRguzY3bKx36z/XRNFQ487+DKXstF7HU6IxlRqMw3JOUSWlgZiU6PP67XBeYlBFmrq1e1krMobtU5w3/nM78Qy9CLT0np0IbXvt7DZvYg6EPlzfK5lA9O/doYQZGLD9HyP8wd4pJcB+T0gHtlSv5anYGnEhK0RFbqpH5dWR7Qh+wEoNngTaEYLB9P1L6Y1ymbPHMOCBLrcECPzfc8fOUxmdbD0Bot+WjhTQIarWM7Ph75LudXeBelQWXAc/bVM65VM0efAZdhlrSmmMvpUW9gGdgv+8PXPL38PYjfEBwgDLS3k8ncykiWc4xRzZMeekaX9rrs+/Flx45NMMz2LDfqZivpJHo8DCLQ6XtEELo3kHUjwesnnuK4mayV7fesoitj2iqVpSOiwQGYjo8Jx0/s/NNe68DXZ9QFQHSl9DCkTZF1x0FSu90U1TVT21xRbSTYIuVfLGEe+ShoC2subY91Ol1/4Arh9joG/EbsXxKIkbBVICvRwGaxCRRZ6tzy28GvqddWixpaflkivfIAfRNGDR/V4P3zvS36oPRwVbdIKa/HUlg8Ayi3OTZrjji1IGVA4LPnJw/6siy/KCSC3Hkgl9fKHXVy5CSrpYKBDe7PW+xVEeI48XAHBt2qKi0BBXMWXoOwRvfuMAqBE/B8OTUP6gSM8YyBTxAuQ6WoieWRUQ82VHjfnWnjIJxXbZtBUHLjngWZZr5mmS0sBYjrcSYX3bxhOC5jRx3kTpXuGZBeHJ8YwfuXSlKQFabHYjEUZmOl/CdwXY3Pgc2a9d9LIzRmZXOxh/2ZCdWrA+d70zRzzHPys01n5o9PrJI32FHtfoHx3y6I9433tNKwwzrliVLiuSGEEKPOB1xtnjOTZo2x8Vjv68I+Yi0vSeMZKZleLjDTX9jjBwJytSFXKiZ0ZsI8+PxM7O4JCE8bHvjuL1tp8AsoVVZ9XQG58AQkPPi2B/U1OwYPJTTAw+YLYazhPZTgtZCkPZiecAVGNXDti3IonithC+ouKcClTkGVBAO2CiyjukMaXsxM0jFC4Kz5Xfi8Ubec1GwbGBkNdjBlciC+oaGw1Dw/uQRENsSqZET5JkgHur+Rp4J2XfHwi1Dh4o4PMUTuHuWr80hXa34K94jcQ7TXb+o2hfxlmiBL88LcvRW2ZCepxMoQ0sGhgrm7W0Bb8IhyT81e6JlJepslXZ8pMLAqy2iQsRDMdqPRMvUVTqTw5UJUYqdqrRCn85bWLnBfcq1npLy4QSs+5lzOXri/d8wvWY2MnMK3jIJHpeAH4ylTEPHhuBpbW1np1QUyG//rn8l+CQ8swFKUGNx3lLIwGQlO8NW8TkcIw34GxvmyCjEowi6h7N2/KRmkUEai25jGmfsQnzKFJ1MiaCIXWIFQSs7BJRr2/54F9gc4Lo0+CV1Qeno28rw4EYrr+Q9cHOQgG4oLXTjHn+tX0B7d82qtvq2k/eu+z428mO8PPOeBlWMByOfg6SnyghWhR6dhyh1W5bkxneQedJFgo0TaVHGM1V2yu4dzA+Mrwqi8bAiJUAIIRTwLCJAX9QVeHBlxekYg8JfOxeY2qUjM63oIRzvodDpq+77gzHbRZbue2jH7K6ex0PnOKIiHjM0o7UkQ4LgY8jhxAxMxKFCCUpr5DQOJG3TOjjZUHs3/Vy7bjQNRHEAk38qSEmZmJ6Npv+hGTgMqgZ9nrMNuV0wJ+Rw5GvRZKNdCB8JiQcnALMGlW2ALp0SvDGyTpUkEIKtIc5BW3DJmaXcMxkH8rzaSf36PXtdfRFWbvDMdc0Wsn0HOKSUlNDh2gRrlkI7taMDUGJDSvN0hF/HwS7wUjW3wLycEe4lVi+UK6j0ijVOvCGOF+9x0vfnVX3NjH4bwGTN8DQApj/tilNDvqGvRA7lrqRkfvM5yXIRu6HvEcOjjrnLEqMJKUoCSRsq6wDnL+keRpJw4ZRMvkGZ9/Xb0uWaDphFFveuXyQ33zFP6dmRTxmz4yoPzzSt1Opjcwre2fUCglSSfXHhkWTEELjOEs3O3ISFaQkIrAEgMSIDKqyHEML43CLACJU2WWv3/G/LI3XJVsRO8BFlrm3zevj7fqXx+yr7Eu3DceICVYLC4rldZLgZ3yFGfIFiDSg2mxUldWYoOrf9Rp+V8niNv1lBDA+dV3JxTIUx2QkgavakG+PUBMkG4oxVHR9mFZQCgJ9lJwVb5UOIpts5urfjHSpGvixTzOp+739wzwipstJdeQ65mDs1T088QU3us46Fy7sln4Fx2l3anou30pP4CHB0z/Z3Xgje2Pa+7EiLL+U7cPCOBevosm3q+BKHBeOeFzFHUhW03359gObvahCoPbjNRSvYL2RkNOhMxP6QgXgAhmvFjAF/oyVg1/2IvRqRW8E9srwSgoCkS96R6Rf5BQge8/5z5Hyrlzx0IwuH+w8/enz5+kXfH1gldG1NRejuHCXFmz/2hHBPvrINX71h59d53qcNuV+01J2EIWMDA6U3Ig5PDeKnkqULMU022pgxv4FzeZDsxnC89icjBpdODgzi+IHwVWBF7Wx03HtUUF4rmUeiQoDf3bI+tv2Bj/Ie1kzT6mDbOwntfWjtwF5t/YP7WDj+M7IjCmKdkS5KcrO5f1A0UsrfkHNst34BjZABy5G0rOIjyilbmQUAyhq/tpTzu13Xj6QZk9rGNcK/vIYAyXLIyFAENIQQSgBPp6d+0yyy2GDsAotXInljw8l4ZK92+kMIIQVDkYSx0VRtKsa5TEkmyONv5CZjAhXlgckgU6P4HpapswK6dsBq2gHpZFov2OI7HPq9edi2f3+07omlWLri63rWT1apaj98eCEs67mr77F06H9LD5Idih0kzmEo0tnk3WJ2QjmX5iyRbPhFUpA29ctriK317Nurxz4+2QZhKuQj0ltyFpzCYIjDw3LftCGbiWSnLJ+pGCuyP8rDw0CcQUCES4YizUo8yM6jsl/rsyUfGpi9Bdc2gUOSqfiMSyNv87WGNN9CtA1/eWoSKHlRHmDFIzKQWTk/QoBe979lib2UkpIZS7nVJ/63dd+y1wTrV0Ruiqsi3fD3y0xt4andx0hKg0sEON8E+pDs8HBxySYsoO2PdeWlYlRyYCcWzoPvblpkdwLysWzFO0YluHcfNb3X+nhgWZ2qtKV3sJgJgDr8Y6mr0oiIx5kHJxCV3/Oiqj68Zf/e/WtZ5CgX9VWXCBFAl5iHkb8PZngi5UU6HuK3sCzEElRUER1/05MVhH+Wj2DcJCIeN+ONMQ/5WUn4LTiV+K5VLGn5Vjd7qe3ITp05QdIhmrmx10zVRpwml0HSzAEdMWDXJFpjOUrlI2ZMO0tXI/FDI0gdKHNsATe9XpA6DZK9GbFY10tW6qaAZ2fqPewhulCUW8s5evhpke4dsK7fqnmWuEctsy2akWB2Id8C0FJS9HRoZwJank6uLv+o8xmHkVmVkUG5b8mAUckFcbgp9iR1jgOsIRQAgA/kW/b/k2tyCMIeKZaK3XOUjJjWpCzGcpd2iyGrM7uQaA+3kgfdwET1uDBX60W/J+5Vbd3/qn398vV/s/0r97kvO9bvv1PxWc+PDyyLo0zLXMNFyE4sJRM3DWaUptckmsKziLTmA4/Ds3LS8J5j9ghyStJyz3LaaI996fIxlBsjVBAJI9HhKZ8iyhPJApY9OCUEGIfgy1Pa9vnXz42U5uaarWQlWmLtfi3vwS0EPL469ZY5u2se9OKFORMzYRnOVKCXpXpfFXRSILWqvCIpEBuefujfK2CTzkWNPXVOtmkcHkJ6RTVzjTzKJ/Cm973TN167Wk5iUktI+UpEsf6JbczWQ/NuSWQYgj/Yq69Ee41CoJLiXMJh5t91b6xeNOtS4MuY1yGEOUm5ZD8S2K/kXRmsP9eRo0SRz+z1cNu/x/bwGQy6ZvUyg6uDlhCSQbQ87Oeg88+JwzNbXp2pCcEzLQ/m/gvYlv5l14z7YOY/V2/YuuwEwSugnZapdnLBfP1Be6lMzqSMSFKa53t60Naf2kWOr8W3pdNJ1dZ2h9dawS6t5Rk4iVCumIvD4IDDGlPF7asQnP4gO9gygo8JADTPJUvEDHYRGUvtiGNpbSkNMuWndn3FqrJrb1qP70wmMPnLE09UxLJvEY7+0dSfa2/VTbysPfUb94e3nl++/tWRV/csNMHfQ5UDNVBV+23FR95oOOdbgjU6+ymsZ7U7rov0QHC3mLoJOryzUo6vHNjr9sPXLwUkOjwsj/hun6j3+5vRGXjPtwdwH52aEEK4v2GnOAGD9ysn7nM9zJgi1g8GlppLq8FF2mvJFH1bQijgWVTc0xkzghgTWGsViEoDFpGWIFgvBrAVgrTByuHEElHnruJj7O9KZ2S189fgs6YDFUIInXtXOzmaSowjzQvBk3alhUWPDpZzolbPtosgbXxJiEZVqiEuSh3cEIdnwEPQ/l+7t0Yb8d/t7nEW/zl3sOohS9iAPAfW05P0NikRkVUW47x9YUGMya87ZqirkHtQcDMjWN2P7l/4nO4rvncx9huLnCZK2hjn8CgItv2ATqsCw+01Hc6JHMLZBGqBVRh0ctxv1AwDM4/CSTQHc7XipVhy4eGp9n5KRu2RBHSYN1eWFuxQCrAFhR+4s0CyIvTn3dooq2dnL3cbPjnQwvq+X7ezcSETOYbHcD72jj4zqbt1f/39CwODEZ4RjoWg0FVG4s85LZHn0Vk8zttrPQ+HN8AxlPfPycEnKKZa9s+pjfJZRB8vYSQ6POwMmkg/PlNKXAjvbno2VLKjMhUXQgg7ACEeDW2XKyqd9cxNmT0ay+dCPjYnsRO4d1TFl5HpUgw4eRnKxAwIqRMPFiVacj9Hu8AQSZNAiRxIX3/QXirug6Uq5eEh7IF8N3O5Pg8/vQbfc9ISsheIOWHXVwi+E67oMaqhgO4uOl66UVZhcK0z2tYSIZ1SxSQsgVnSOXQkjMjIKUkdtWQi2Yfx1QekZg4SuXzgGFRe+C8gUyqd9HHX74kXZVDZC2iZvFsbhfis7enIulAKWX+Tg6FdIyOUF3NkY3MnyPYI7mPRs/cyO3KPCGEVuM0M3oyko5KZ5ecKr0QjUA/D33xXAq5r9QpawcEA6EAoL5MDMWuJAmtWn1H6SCW7/8v/f+Y7lPLvmsEYd8Q+OwcIOCNxjBhQp8fxTlOErw37gBlFfWAs5el6vla2Fr/7ZcOmftbbc59jpUQTAH/3xT37hwD5G3vmADF4H6f9XNEuaLnIZaQFND4FbKTyCLpmSofBbjrFULELD0GSfq54bP/u3/mWMjyNxyhHrfvJ4wFP46iEYtfxENWR+fjc6o03qva5lqTpvlu1NN1PW2+593aK5jSpn1erg2YbbM1ZQc6XwN8zSnmjtETGh0BljShJ8BbBbHBdJ2SQMijp6jVcp5Rm13gramPw2GovzCO5eFtBnriERKm1l/aFLeimpcUws+wZYVLF05lJyYxYHd6HypmswuDBRwdPyz4OGDmNTMblUNKvOJmQkk96hilTv2t+ntKBDg/2sMAOXNIlIUjSlLTD8CAjl6v6hXO9Znt6v+vT8nRW5jIJt8sWJM2WFiRpliiOTTkEKbuV4zObDogqTlkJpKZn12VTDJkywDXEMWVHEHGEIQimCi81g0si01XkpmK5YY65KB36dT/cBZhXsgMucyURe/UZAjpgPnQ+F3ByUrLnqKXIPTavSGkKzpuSC04RxNW+8Aa6D1oHR54nmmEMmnWcj+3s7BVss94q+QiRGVCFf2xt2eZs9Xz2p0DRUZTrJgm6Y2XBi46glziOSKXYnAx3bT6aX/jrn+8yGJFSN56plrHcwFuqUZc0Eh0eRvoLbRsEFoVkSozcdPxg7bn7dwO94t0EWdb23B7cd+qeiOLfnz68fH1/04t7fPqLO5ev6ZErfwxTo8qNwN9GWQul0efeuPZTvwgpuaARBf/ttBEj6WB7XdnXVC5xH/7vuLk7d+y3FS+8wWGbOqUkQghhAFK33Z+a9R3c9BuqvwsBQMF88UwTf9aV3ShiGnWa3vxgeWGEOr4SaLENX6ny0/G4TgdAp9Ky8uTQwdbWUWbGJuiWUxoBpqT1GhnHBh3iBysYchqfjWx9nHf8Wrm5aT9uIQ+6ikVLpuWiRMS1CjAJ0p47Ab6DjpHqV8327Lu2Sz6l+J+e3cYFZd8i4nRq7KpAgcMkUgKGDSHDtsp8sLyoz2kVxuKGPYc8BGMHN+Wwf5GJfS+NOUxLS3n79+y5Nz4Gk7NiMaGZlhNCTzqRBOFHOGgw9Xrg0vFUdl86ZZ0HkGYQez/DGXI6iCcGZBYnoiGHwFuddHZF39jwIPwzJCOYGZqLLmT1U5xXkjwZb8dnU2a7tpfS6PRSpvkAzrq8qMlXXtp7rffsYZRe+efZvwuc0atvyeFhrZ703iGEMKVaLEiGFKfzpGv9sxcFv0p6aD/fLpqxaeZ8u15rCsM59ddgWewfTwSkdQvZH1ib/pm/xvS51aCWdf/w8y/tBCL/yGLdnx6UgmjfjVdSj2ir0CF5YNcnIVoI/vAfbMsmwmbO+7KtK68xM6S8BtzAozUB6x3b4uo8tAsW2trtZq8rr/w8zko2J8r46w72PYALz1cvnGVNm06OOmfMrETKIcDjRDBdMUDZvHRRMetS9PbQGXTeB52kEDxeSsuHJCyM1FGIZUgQsxxC8fnaRjv2cyxbheDBw18eW4bn928+i71GX9jZ02gGYKSuWICAA2gy9+aQ0hhtCYToeNCRjEiFkHNJmHvTmEe2r0foGJj9qcr9r8BgwMiyhmKWBsBu6Hu1x/Yj2+9IJgwt5UOgIrTrkBmHRTZe58nxxVyoI2uv2YGo19DnTOeLZLULVf3F2Cz76JTZmpMhSrmS0u/j3NRSMc85Xc90/Nl0k+5qlsVeD25IeRHOykLwcCkosC9YGSnGOyRareghxqDtUqZ54nAj9AEJI9HhoWpv6di/x+6l4gszNsNd/+3kFqiLI3M2tFP2rYp9QUdaRsZ4cGReDSGEr9DFMZY2aYqzscav6c7FGi2WHPYQhSSx3iAvYognWEByCMwTDggHoEsQnXOyDeIw8LCK4IdigMSKEXJGW0pO7bvgjoBBUJFU/hbV2eKIYEnw2BhRKM5oFQY3KOVVSsf+XgfWVRrNVPFwS+hWYakq24t3gIeiSu2eORwqvQ8aLOVhcvp4CgqkDg/IIeeC2RhNbQ1UC/6hnw9s77+999S9x+6uu1vmzWUl3CQDu5a0XDnCdT+KJABeF0VvxYkRR9jZAaiE3IhmYNycSxkni9KHoypIKGd/E4Dm72rMwIScoYCkpLv4TBTwyixiWTBjA6hhO0yJ7B1m75Ziflw2E4e2ahYSR6kBAj+r2BauBxe0yLqZQjJiqF2HAOi/eGZAuT/d/dx9jnunnPdrdoQgo9uPr5osY/TwQpDAXmgQcpyfjv/DERybfOf1HBKlvCBNAOdOM2qsZHxrWloc2o1Rf4TU0wc26QqiYivpl21vmTfAQ/Dvjg2b827TsykP0bbaGXlnaARnqC8PmFLzjh9CFiE5b6pP4kXnCBTNdfznHB185GCx1+XDeINVQFYnkhqfAWTWiL+GOlvEx7BEORe2aQKhc1IWI4ZnsBmf1SCbdb4nHXOIgCJtt5AmiWMaXplB3Ah+ojqa+fP4UhL9+UjLd4xUjXZLOGMsy9kB0Hfp/YiTi5JKQVLLBFZHHCUc8MQqLY/83qy+bZ54J6HDSrF9O6jv5OFhPup64g86Ud2y7H3s98Vzs4hTSd+zpKwO1ekBxBZH8RmDQkKbsxtiQ3mwsDsvqse1vPL1qgy2Frv1IE4oQcApKRcxuFbb57KlsDFubQdxNrWhAiVIchlF+JWwBNQZcrgFZfYm0BrZDRWTbYJKISfr7aBv7Zw/+fDTy9c/O7/tPtcs2ySoKkGlbnvi5YWPjCcDlKroAG55g5xFWXIupVznzIn3kD+J8WxkHZQOgYVV+ixk39gYpf4HETAU9f5t47VLWmr0GM2zxXA49T+axIN5Oen+8YkxRn7/vuF72M4aQgjXS5YO/48v77j3SgW0LI51p+B+4Vkr5Tk1UmZSVuXhxNKRPoDyMTqNhGyq0CIoN762PNyKdwoKqAho+YHPQrEzroU4wVbSuZCqYejvMMODyDavByh+p8pC4J+5gWAUkDmj47WSLbgwFOzS0s3PspXiCdii78RIgzfMPPjmRTHuOHAV5Epj4Esvsu5hJ0Qb0w09uBzQd8S9Ew8q7g898nmjJl41xhH6+BnssOwdQgj/8MrsR0Vo+k/30YLLNn11JtBSPZHUCgn1FvJ3BLdyvpX91+33SKYP95GN35yppHLXCgxG5cx86Fy7jtcnfq5pm0Yb6oRcvdbTkmUpH9i/e3fj9wvLUYqxGa/xPbHVqEFGyjm4fgZwh9mZd8SJ4dHkAJUHOD5oHrh//2PL0sfk6wkhhFc92zubssf2zyxgSDVsv8za/nvnYARXqSh2HUYcHuoMNrjn/OdGW/ZsGl9INi/L68efm3Fktb9tJGd4mBWWA5hppDzwPe1Nb30vPjPE0tvf9zX4P3j46PJ1A+UujeRY0y8IlfZO1Upcw7F3tkb7wOZggWYEFMfOCs0q0GEghf9C1iZJmArt+AN9Kg6VV0y21xHDhktqGtDdh5SBsof2h7UX9kCHG1K7ZseR4M5pZKiCq6nEEvh7+JtD8NpaihFSPNHl9eMzsm9sOD4PzJNmxehoRFKu7LjTDgm8RyHKRS4etxVhhMW+5fPS7jue79oizfvSziOuzRHZg+XQPuvYYt9u+lI0WWa7wqA8gpNTztpNl2ShfH/PWNf/9uld9x5PUB5cM+0OmsYfQAtIuKRFNmOOII9Zo+K+t0ERxe2Y4fiSNKNGJ2oFS1p87vMtLMZefMQy3PMOCdmqU7c8AzGZ8GcwCpqd4aGYlAlj63xWVNuJA0p1NM2Ow1gIQ/lcZpDQUL6e9bL9thsVj2vjv3/StAzPy6kX3CPXTk04DD4+tM7nm+s+bVvbsYChAlWCo6lkL1+B904c+GwHHXkSrI2u29yVn6F0J3QPdHwnQl+SIt0Npk55yBY8H7/Blkh0eJxxl01YQTdN/xqwLUK5Xbxtk6zZn19emKdKVtO3t3wP7gXKWGx1DSGE4z5AtOIMDZkKbcenx4g8X0rbuBJYXf6/GB7WfqMA1hD7nit34HXjqb8Psg5rzZVMzpVjPwete8jO4BB2JIQhhEILkW5doq+FzWP3OsT15JAnLqjQVl6U+CiVjsMCy2cVeXjiuHe0s8YJ34nDwGeuByLX1YgATaEAoDM0EgwP14djLtX2deKx5B5de7xkkKb8rVzPCQKQ1byPmEjyp3IxD4vWC/v/tt++fH2t4Pf+5+Clr1b8BC1K9uO6I6SvZO+wpFXJeoeq0rAgbCqHwgQBFLM94z2//6gRNdcSD5xKOpyKAyKhmz7DlRjE5sBxS8lhGYCJSg29IXRrVp4R9agoaK+NI/0cOvqkc4cZJFYkxrckCsCBq/gVdpkNr0k2E3NQOIb8gmhAkXvn8wtPkf6Ta19cvn41NcxBTyK/tysG+Xg+8s7Q969bF3NNshQnPTsrO8i4LqQyUj4B0F442Sb4PdVH/u/6BZtXNj3oel4gACz4Hif3fcyaKSM97Vrp4Fvq0mKpJFJGgTfNLEXhWL4cxrgmRo/AZ9bx6zlvvH59YMRLP7jpW9ufnNsDrxZl8WKOZqDLjgCCGb11pQ6Jv0u1bbo0guAhpkypcbgM/Tsa457Q0PtWY38NPqf+tn+k7HSqHKL8V/TX7wNsrrgS1s1dh45kFpjFUaXz7BhRoLaTYg44P6q4vgojjntHy1YubS7ZUefg6Q5kbZ1kW3IIjIHBUgeYazND7IJ8leOq0Ww64RBJ9sRx+fjndX3drFle8App2IKaeHPbWcPw7BTsdVHAUJt52xSMnEMI4VULm8QpskqGB2U45QOievVoKHQVMe3mSlexTHAI6XD6oEiArvDXNEu3CiMNXcFFB/OkVW1E9gsp0S6B6ZlNJOOAhhMGbTMlNqTTIV3FA3D3McOTFsLKDCQXKr6SFPpoAlZg9QQ6guxUU7oK7gMN3jlI0zIWBPZe1vbVo4WPdkjx8EnLC1V5okPojglOp3/fPqc0KkwcjAUHyg7nzCF4tqTUTULKSMPF7Op9pQ7PAhmfmeAnk0aiw8OASg9xlj2ISeg99EbpXtMu8sm+Z4ysICq72bCHOBJtHTo5SqVdRFZHuT4CIwxEYdrOWPjYFtfgLVGOZaoZGzYlhq13C/ckJHF0eEbycHjQOHI5jXIw3xFgMsQ+tcOKJa7xGphj4yEUkTHYtmvUn+EQkNIUWZKTiAcbT/3h19tD6YDimVIWW4VBQU+C37W9lc62trC6bIr8RLawM2WcUoE8QkOUJRm3wk4yAqlD8A6bZj0d26qWs0Gm57+aX38AACAASURBVHhmJDvK7sqFMKWuFcxojMTD/ru+NTDkkEbTz70cmsVV4HMOJGuTNQgYvxRMxS5LZiJaiYmcSyCUJo6pe7WRDsHPv4KWNYPwm5GRuXIdXN+gBfd3NeYQlHTEqrp9ByAtFZFm5/BJp2zxkdktpwcoLdP8u96t+MwpH9K1f+MndP9PwMkmeoPE7QyEF4tcTPycBjTE3LSECZRr/U9qn1y+fjzxmaAJwDMzMbSEhtzc8iWt/3hi4OeTE3jR4oi7kris0UU2Ht+TPYYo6Db2kmTzmPFRuzPbsr8rPcb1BArCkmLu9LV7r35LSQv3qVEegWV0ILIVbzR6YzvRiyUfnsxhULxwmr9GBak5JSlji1614i1z6xwLinXm4AfbHoNgKvKo8boOD1kkWnJw14B0Qv+Gf2+Cw2OO1mDlXWEXWNZ393thvAt/X3RKWJqg4xJCCJ1bcDrEoeKBSh0sZUxOArnTAaCDE0IImbjsz+r5O+7wcdkocSDJZbGUc60AfkzF93DumQnSbNqsfrXTEYJkfPAclERsimsoQJPrWY2So39PIIfsAYRJzEAIIew0DNOjmZsC/l0Ej8NIIt0q7MLRwEcSxOMsUI5aCFYpV7SJVYHTeskmoVv0db0UjD0jenV83S0nObd41hFFdPxd+eUKbgp2LiIISHf88UKQcSRDzvXd939H7aX8xwQ6+ttg5qAq89S/juAB3/3qLyVlNqEdFDAj/m7tn/xzbr3DzGz8puhMbU/crnqHpIzIoruw9aadWK9gXA4HPhPxk20ri12IlxCnFLAQe0/nkJidrz8MPOqGCGHTLrCEqNg4PMKxlmiTss78LmSe4iRarhqJDs/6x3ayHv7Yb3giqCdQTF6r+pOfqeY7Df+AH1atFsm03U8P77nP7VTMOF4vS9EPB8u5iP+1a3aaLLEZUrIRnTOnYnIPLG2eGjFiFcZWfHWEZybPA0Iig5guGsW5sKSVxDugmiOMCFneEtvu7lnxPSyv8fDTBZlDpilJqFK72AjsqxzaDydz8yoOhztQsDuyONq9NEXmSgHBjpMowbHgdxdEm4zp+8KpfVBxB3V0SPSvu7cctkgVukugVmB5dSKGfmPLvMCDC1+HPamag/LP137t3tvO2H7/ad+Y1HdyvgTwsm/XVKblHshFne5f1W+sXM4enLalK4jZvUeNrFNgF4SJlq3/ytbMvckgZpbSPQxbK2WEVRguu0HSPS1bocytGfI0Me2SKSy+QtAJvzbV9g5wdscmsXPfnwWLMrLzsMEZlfqB8ruS//M5nP3Qb/gMsT8orWkWpA086nfXPHbtDJwGnwfbxHs5f25+WLaKx88zN917n0B3i9x2IYQwRYLhwzv7l6+/PJUmIcgwzSKAUbBIX/j5Z9DkcNbKPwUnaqbNEpjH8Z55YlEuMHu5+AZUDYkOT/s+WIblkHULCKnE6Sz+Ac8WfsP/h6E5Nmxf36t6uuD7VQuJ/6nly2JsVT3u+ShviUiBQDXeewgh5Gr23TNhVCUQegJxxELdezUTaOvkhGNigIwkW/dCEE4W1illodG5mCvGBreijJQ8KFnuSsmzYCv6tCzki6dI5YKHR5mQ2VYfUdEmh5tEFHQWuzfAiXSwej24LDdkE+QXWI6KaMKQYVfaW+mwOpJDSenm4IQMdwSXguuPwRar2YekshWRo1o/p54RDU+26A+Bw7alzbVLi85EV4hGWboiVf5E+mCvlc1OqKbQ6cC6Q5lCnwu4nFnmF33vTZy2YU+SnB86qXKQO2dAHQBXerTXeYF2DK7hgD6Lv483NaiL5aL8psADUNKK8KogS81DLwTvRBITk5Vu2zmyIoUz6WqEdA4P2elQa04oaXk8cCgBCK28Wwxkl2DDnklZ7KJv9/iLC5/u/8vdX1y+vpc3Z6W78J5Xa27GgB1bIYTwR2tGUngy80bp4sL+jgSFUwkWlgy8SzN5L349OzAysLzq1BCeobaR13ROTlHrwfYy1f+WSlrjZvzmckq47CwRcN92zQxds+CzPyQVY+s5HZwQQjgDL4/yDrB23x/4lANbSRdgRs4d+XucwSPPlP0mHfCaLmUnEQoR5cryyUNBylFsr2PWTEsMLn0rb83IiaRETg4EbK8VH1M9tDno3PSLMN9B1mUnfsmQvI6cPCH48lekPZryF2x5Xl/xDA/LRRpkEPQrzyuPs5+HWQievdT5AQIOIc9RJKtH4VKUTVjCCsE7x4o1SGRaRr6a35VTErcE4qd1KKQPhAfhl70Hl6/vlrwt4ODebyseYtcOgskRM70yV2g//Uoi3XrVNuuF4EpSoMBwh7fMFR3JqbQoUz+LbdppAePyUEjiS3pTI47NO30aXxJSJ5onWOQ9OhOYQi2p0H6OEniN6IRG6BgQuKotJayAXDJff5bBXnxGn2O35IMAh1dDxSMnNzKFUd8s+G4i/ay7Rzybaw3zqp+drcd+LukalWfSpQV9q61f2OujH0rHHM4hJTydNBDgJNhQzmvz84QblpFMPIjNOllLYJ0EOHi94R8AuQVOxz5MpdG+ByenN/MbpQO1SS1p/fzEUnpbTf/dh59DaXmHnoa/fqVuT6B36lNZ2QoQ6yQbE8Zn7dLhYKSuBwuxEhPok2lJiJIUWe/Uu9SrcgAxG0SeCv1cf8cWTVEWYfcmutPwM3N99dwRRQkpIUtoxb4YC1yUpIS5nqaJ3vwg8RbLkerk0qnTaJYaPeQfCcGvh0RGZsoSqZ5VzLSpLXQcUNoyDWujpHjMYhCjspQDvQotKmL5QvAA4c8HPmv7BRjZdyAO18j4hU/A5qtzXzLLUBaijtS43OP0wu7rzkPflvPowO5jIZnrUGYkGo8rIWBdyzjM2iXRWjjw+gqyjzuHDCSECykfco2lxpptvDpLFEKU1d4+GH9PSihIQkja6sWeZKGowSZ7jozwdJr0XhaUZBLF+Omu/ftP1z527y2Q+qVsygd5L5Y3AAmc6mz92/P3Ll+ryvr2OhnMkTXL+WtMMgCJS4ZnCiLF/i3/fCnt8fLPIO556D4WRvfMLizTQnpYvnq/JJWt2t9NEPSTkQxaxrsR4CIyIXNQVuezfhLIhtoo+wzPg4Y5OadjC/PvV6TNCWNDyF/Ou+agUHcnhBCONjCxOFRnm36CyhRgS9hEBBlOb/kawLJnC6H2xP8dW++0NDFZw3whbTe4Lm3p2KRV35nvNmbrgeJj7DUP4ZzgMlgyq730D3u4iYgI36Ukh7x+WpC6S3y2v+t/WxXq79mRve7tvX6q8k0MRidKKOkOLQ1AMG2a7eKec+BVDXqxhDOSNVyiHMXP6TN3ul2SOXD6UJP4v3Oq6kOldAix4x/PDDT053ufuvf+u+uW2n86sqzLVEpaFEfcqHu7cHyGdH4H91URjwF7TstidfDwtM78A3ZEgXQII5EoXkv5kmRqGYoFa7BD87R6TA1hSmp//kZl9i7F3zyBvgvB1TDwduSs4tTw2zRD7qQIENMuBYtZQAAy3vRn2XiXNWa1faBYAeGiOq8lUKdk5GESY7OVseB9KtOWg9dLGZYQQmggGj6WTd0e2CTkQLmgMBQObbhglk7JF2dgLc+cQC1dcG0sfU/XhfMNtiYPrbmF0Kg4XFDh9aEPiScKMRqK6xhk7YTMI3vSLHjr+8G6RU0zSVs8LBtomWntwdxnYBgNDiX9/f6euY9nI5+dIccCcYCpvN9QrGfmqv4Eoujo+DoWq2xK17K45zcDo/ZZTTxHTAmp7POP/eFBwrHuHQGRwj+MtCjT1oN1OdLyTTtVic/ObP7Knu/Jd6VzBd+tshNu48smYnYpg9LBKtLo06r20Zqak646JwshCt25Lo2qZlbsNR1sTek60kPpdHAMpQkAd2pAKXg6i2TpWLAMjt+KgGxZ2hNkPu6tCbIaYyp24Zddy9o+6RgWp7HjbQszwfyuEHxGZlnCZEm5KE35CJnk9oVNnoqTsuRH2Q9GuV9/jjfl3nJgcK4DBYk7hyoBS7QKY+dv7Acf/ZnY0mOofKtsA7pVQ80bsRkyPOyGVAeSzuw0iIj1Z3YN17IumSYyLWt5Mn9ix+XkmneOc2dXY1aUtZ5cOP/H0Q/cez9eexyuGv/z2R+6f3/aMTblf7HtAf/nE4m8MDJYw9SxfP7ck+GkcnCGOj4Dw+xbaV/Eut+1a87JXydzPKPMk/JWoWloyk5XedZZwFUiklIJI1ktHbiL0bYYZnBbTPq2uPaLPrXMqEk5Az5d2oNrgj9A65BvlcwxUuPIUcz4737+EjV5iP9l13ymqQodnnbXH+ITRq1YMNmcv8dUL/5w8iUtwTkQZ3SCDgHpgKo+h3GUTg3ij9c+81Z1sA0vOUuvL8SOtIQU4H4LZx/Y/CQR0mk6mFGa+LPOKLCkxRTyqgxuvGzP5javjKFwEiKq8vz9isXDezxUVZmYmRsFwrtWT15fgiQCLxW0PNyNz/CQm4OlmLGUvankfCEl4D/YMuP+eOCxM6TL/9HWU/suqd3tt22/K34vAIvBrp+IOj1KXBNtXcRQNto0MRt4vrOqn4PiEcrg695mkHiVDrPS6OcQ6b6uVMXvcrDEefQTdL0d+Gcyu2aLrPCpf4+QiYxgLAtI3DsMU8nPJykGpiKw3H7XngOZeYuP/XM9/4GtMS2Lze7C4e76e5w2UMoDAd8y549YVkCUfuXpyJz7fxfevXy9Kay/f7r1Kf7G750X4KbKRjpHbHx6ZJ0HSh8wR3JUubUYuY43JBP3DFg5ULgUpGw/wrovtP17C4gWz3ZtvehJsDyybFWh8/rnxG/B8CDlKhHgtIU6H9gXW2IYPkGk9Uc3v3LvMa39L29YGltBjPs44dekpEV21H/Y9y16xRoE0kqoM/f89S+g8ly741skOof29AmCVrxCbd/mqnNXohekUxUAyizUctPuN7cvbZWY1rQcTrziaF14QGK0nyIlEjhlETIo3AqvoVgicgC1hZSw8di+YLjp1whV3InrWsVglp0hTlPKc4P5lnWZazoX0oDhBRHZPis8TyyNRlrW2e5MKQzJ/vH+tQW3iOyPSkvQcaKjN5LsSQENBsqyTiN+MPaByj0Alav44corQrLST9oeB+TS3LwvaQ3PNe2+hjPvpdebtsA7LZ8q4/7PQUpgojIcyFyozXDixpj/yPNkf4h2q6zASINYjoe9dpul7tl7/fsCzL5AB5dmgG8B0E3SOj2oEYSm0n6elngujhVZSrlkHSbdQAghDPNkf9WsKu6LzNCC9WFXIEWxQ/Bn2SvwPXRFWuKdikmvPOr77MyXEAjdqXlQNBtw8hDdXqx5x6sEosfhLf9e8TC+0845rSjrTZr+g+RBmmx6o1T9yq7fq+H5inPL1v9l+lvK8BDkuvkrzRzAyfkAgmuyWAc9aHbICXanbmnuOY7tqlh3auv8bfct9977ZeMTKLzlJ+//fvmO3ccIYDQBYqVRxtJI0bUbMho89pZtsANAmxxOdNAHUvdkpmjxhTlXeb9Ww5COvNjNDPeNzD8zRSQGzHUFTwCMjcpabP/CftDZexDGk5ITsT55X1oOgy0Cwvx7caW20tnqRbPTOowvW9QH/kfxkNKuE861ziGNCHFbIy19IbWsmmZ85nSU8/I5lwmSZzIm6aE66fjuzjv2A3JCOprBoaP4mH97YnvzX1/7O/deM2M3+jdd+5yyKW8W7XM393zJjCDmKTLQ+WNv8jLX7P4fNjx28Odjwx8uVUwQc0IcXgTvRDZrSd8XoVlEh0cPYWK5kkjt3tRgZyWzBYMbsrjxHDLSej5Hqb/0UjjOwL47z+K9ir9+sRQPXh0NUE5j8K6MEXh+403JYACcrXIJLDumLuy7pte8o98oWpZIpUye9i3Dc6ts6/l+2fPpPFY9BgySZT479bXoKroOiWkdZfxvoSBwSoIYOi/aAMCMN7Nhy7ZPMGQQjCzbfj+6zHLTDiwnWRJCqEHHq3/jW+LhIRAzJwa90EK6HQJ5s5pfhNWaPYBfX/go7LhtB3xvx6z0h4199zlmfH52esu9t3ctXo9kBiM1Qi0yLYRJy2tYhMLLwHT1ksZG29JZrpbDwzkdPb+AxmXIPRAXIBgYJ36nhyT+rV43o3reY14cHlaxlkJ8dva+WeMK2td7e/63rH9mX9C57RfoMmGl0cnZ/V+tc+H0L9+76uNvdjBzIwcYRw4O69BL2rhylJYF+W929BUu/HdNmvbAZsX4Q5YLZyJpfqeLJmuKGIsIloqXoVi6NCyQPf122TcUPOmbBz+XU+cMOfWPKs8uX/9q4Pc+S+QjESYmgJr7fbwtEWVC23AfQVKQA5pYA/KQaUTvMU6SJUdJkZQRuq5Y2iwer17aE4iDMAOpnGK/SGKnpf08MDDDtzz2h9kaF81/A+V4liS5/0bbqm+Ge2wqmyiCX3E8id0iZi8tJSGWmV4OPDaBGpKnE1scisuh8sCNkm+pJeP4ZOD3BDnl5mydF8oFx60lTMtsZoiIIuOx8RxVJvg5504EZhmQLFn9kECC+Mm4rtSrxmu3pY/FWDLNTZG10YY3DN20Ga/Zjp+82xvmxfJhfyVpOkZ27zS9t/vvTy3jU835jVIr2g/opC0lrYRJhS+Qri5rNA68AsistBW4fAJnJSdzhcs3vvDvDVvmDZXw00pn/vAg67CSXnEUL/zf5VHSGGwh9aygYvxTO46g0eiyP5VXfh5bD4DM96Vl5wDoIU9w/OF///7l6/LRCqbvqcOWdl6i+5yCFTniOudCkAMSB3omiRtDpwn7Jc3DWDKPzNwoRw//ndLAmV1awDkMT3zZ5ytw1Tys+ezJAlZVCdI4KC2h2L4WmNWLOWkvZtcPDi520ITggyKl8Ce2b1TwZYVsC6zryODps8ig3DOr+4e9gLo0o/2cZHcpU0K+nlUZ7XdZG8VBKqz1pRdgTBYCyAkwMKWaX6hzPKMJOu6WMhXjF2YYVQMqf27PndnnaU2dS/xDyxX4bcrXNue2pYxRX8CMGMrs3UI9lA04P9p65j5H8t27VR9IvDq1xXL3ht9zRx3bZ3Ws7cGhP1DooMwlQ5U+QuIgoRucJcqI/Ew7fk5m9yz54PiMVAGBoGUlj0wYiZ8kPYaWtEhOxwO4/sQvkosiPPeMN4hfotz15cIAzNev+fT0wZE9xGs7Hh06mtlPmAt78MWRPeAMUq1asyTALSPMj4ysCcRVUG7vmn1u7UtJEeKEUMZqOjmFDrIn10TX57kZ7fY9ScsDa8VSYwh+z9afxeNoGGFqpxeFY1lqU9XzYsvuvy4CoRdvI+KW649AcLn9S7uRzu0EMZU3NFymitkz+U3kX4mSrNnQUliOYFiSUorRaH5mr6niHIJv7aQD5UDrQey5KoXjOWfFUWJ5zZVwJIPxo7vGz/DXLx+49/7Vg59dvtYU/Z83/+ny9S8Gdy5f/9X+++5zN2tmC86lQ7PQtJueoJyRFuHgMTK/+wOPJWqi/NBteoeHxwBtCw/uEEIoH8BObvrvdlT8dQZW8pyQ8clLB9cqjFScwLKA9UfXAeZVbidwnA2PhQIAhxsdyGXTO5CFZ2ZjJuLAE0y+ANB+Xpf0JZoS0i2pBLiWbE2z20se9goxYFCuQtiEfPzRtuFdyxn/O99rWGfyJ+1d997Da5Zu+/RTj2lt3rBqyGBydTNOCCFk0RU3yeoc4HNSIifb/ByVnozIa4QY4d0QQhhXgBHCGlH27TSwd8tvqy2do3tDCNLgM1CeXY3qDO1pGYlwlofm0W6/Y95od+QPuhr4ME7a3htlmq7fkxY6p5CLN4YyedhE6JT/+j38TuoNqX4Rh3YhJYmwkiqB3VDqUI2b8QygdDwi2RPoYvXBa6MNKUnOnHOG6CuK7c137btGmyIc6FTQQ+x7x98FRmj1IDwO00VGXFVEd4KKQmbB36UdORk6QMQByTNpPbTPqbPFkYQz4n0owJSdgOqk154C74X6earmAwkyqX+440n9ngzMybkhTJd9EKt1AW5hcBNCCCPw8CjzOQep8jXyX87iU2dnAzt4I+RsOWACkTGOpO/hJ6nRJqCZYHW1H2RnX0mqBk4pqUCkk5UYyJRE5WwGmF7350TuiU0i98Fcnh3nWm0Hn8vYoDKhcChr6jrAvJJVIEZlOpLAkiz8bKGWslsZ5ai8bNx/OrUMw4c3Xtg9iWjuXx2Y4z8RDp0SOiM3bvl91Rvavrq1bu8pbnVMxn8FhrMD7ViMEn62W+uyNZ18RFvnEc+UGTAhHiRdjMpBJY1Eh6f2zL5EZSZoe9g11PaY4pCGlz8fySG4ZSdpDbITGyXvOhILcND1iD7yb6igJ+uq5EYgJ0MIIXTvxLdCO8VqzL8eQA6YLKU74mhGUtd2eknQk1FtJhIJRRweMi1f+IXBtvQSNLH621KCIY+TMC2zPFN9ZTd88ZbwMNA7l0OYrMwLKflRTZ5zENF3WoHBqLwO2ozebdkfFN8UzR9HnhYpR9lLRlDaKeW674pi3XENtqxHun9wDQX+OXkK4UzpQQKIBws7DkPwDOmqZr4Gvq4HAsokVudjdF9tVXx7LrM6JFULwZdBOB8RyRa0zy7EMq+VrEvr6Stfo83A2BNwq/gb4vdUIZzaZswcakaNGXTldFqF4UogcEIWiifEQapz4TCKesgSVoCM+7wiXGUbUFU/k4gOtmSGxoOJkOLx0I50WCE4qX3uv5sBwhT0A7l1zx2Vw4bfKvr1fOumRdFfjQ34d7fgS1Pktjsb+331s0e3L19/564XJ23nbY9MhbeKg5m5SGcn1reSlWZi2OUjJS1mlGQ75i5wnt+1hzYXzJSji/kGQP5Eh4eq2QXhGZlWAU5DSWjhn6HzplOZ+M361Uvr6/1IHlRrbA9qLFFeDkDJctMvriEBYxAYGwjTLx9OlLXWXpMcq3gaf2iPRQ2c3DIDyYAR9EnCvwi9PJyOoi/bOnBg85Gwgzbtd3duxTsTzpkTh5k4E2aGVBcsi9+Z1kgUWQ4F2ebAYcQ1N6m+/kL+nQ12N3Btq2wD2iaVGp9aMjpPA2A0yDQ635C0M+ZsoZU/rGfNznDQSY/gwmiTxJ+is1+Ecnq/JrwrYAt/r+lTp3/z/P7l64+afr8XkUYkMFm7tKpodZ+W/ByfoIzFFPp0TSYcmQbtJOtP7RoF6QCagHhVpRA4XOlGljPtSeEcGYhmvAMbOTxWYJAjapki6DTewRvflPnkc6n6U3ZeAOD4CNeXLi2Sxs77QiOAOSyxzPgDz60xBSnevCTnBLI1gxsCdkaVgJQISli5gzT+utSE0jiI/sf/9M8vX/9P/9X/5j7XnipPBAYEWlV38hTGm3tnceyDhTLkMBQzRq0xxfCQi4h7QklNJ+D3m0oJmHCTLPa7rqXQMyOkZJ9JI9Hh6QMPoqUSUgMwOtHaaWrHrHup5FOVDzaMb4PpaVVVH8LwkHsjhBAG4M6gGmwIIWSQUl27aaCE9qmAoidcyJLyRiRCVLqS/1GIUSM0er5zSak7/g2nUO2v4Q4nyf7wADqXrAvPiBz3diTle/VrHaM1gKdl37GcqRkJBvFaguGhTL4kVWNfheEECrGMtJTB9mGttpB7ZyLtrdTWGlyPx3Xwmauz5XA1nEK5D+5h1YJjmVTLQOT2AV1IWL/l9+aP1i0F9uvudffe//DuTy9fP5d2nj+pfXL5mqzrytfzsm/YPqXATxNvQSyJsKzXdmxjbUjf/icgZ9tueCTxy23bZ/lHIONUckcadAU0o8Q42op/1klO6yoMaiPlAeYdC0vvFFI/GcnwzAljGMUfYOTkCQkBtOLmisfo6AMGbdbyh30K2Jy06n2hFJORIIYl4SVwL6m6jyz3QFy1m/X7hdIp//Kjn1++/l9e/YH73H+9/uXl6593fOdifsOiUCX6JUbofIAmHnEch4AFqVPJ7Ntkxxty4rA4SOWhYyEMylQwmJ0C3iC2i+Xz3P635PDQkWE0G0IIza9wY4jYGa2HEEKrYAt5KJ07B3nLiWWxeMvSbZWF7oemnZ98Zilvsj+HEMJsZAuvlbIHPNnxJzq9VgUtU8zMrR9N1JAhV41eglI4nYvGI/uu4ZZ8AdZF9cAvoPY9+0J1tuio0tEYSzqykFA+yg2uzrpodkL4sfx9EE6l2RCsMydoKVnFVRguE4ZUe14yfowAJ5LxIxYgJ0yjXDsE9GnnHMHuor3pDlkaer0Gx9pn0mxARgB1lCjwx+7QBBwNda9C8MRq/23zF+697sIcCLasl4SZlhmf/thHCEyBp8jnIdgRdnc1cz7a/4ObBrr+9EK4BTDITULm6RA85iuUZeEjMnUZO7HKTjx09SpaIQMnkq3Q2bJIRPTopfv1xizobEckKYo2b/MJSaYkCGBXj8hTDJF5YkCQFjZlkkhGSsDbWH8b3tAOwctGwdDpLX8Rdh0SkB+Cd/Spq/UXm14+og29mHVx0v/8nnUzPO75Q5t7pFKwOVZs2RKOXf5AJY5YkpD5Z/czhWJljjnnKlo87+L7eFtaecTnBrdeX1E30eGJw6+E4FucaRDLx/G8Bhqvnzy2B3L/PePeeXnRdJ/7cNeIB0diOK+/ZfXNnhi99pnl6VNs1RXQ8pKsjZv+5HcGHSnNUdkvBHYPMD0dQgjrn9jkTZr+u4mX6V2PPzBYPlKOHjpNm78UEcUf2inHklOE64NZOskgFRC9xBEZhiCMzFJmGa2j9i7ZH5bo5gng6ZUYMKoZx53iP+YyPBrNIquTVtAs1xsyKzoXIzrEGv28ZkmLZc3RhqwHppPFKFEqY7oBkGTZr70zpCI3Cr7WTR6eVtXf5N/3rdzVn7GLytsFjmHfLziSmy0SgMmMHFV5muPw3EcIWVx/gudLIHIIwZex+t52Mfjhc0rJ7RJsPiurFX3zYw6gqdMvfCHZEzoyPT8XMzgT+X1Jb79ta4e4oKVkFBbkbVGyTJ7FxE2KFAgbcLLCmcZrzuQcyuCcG+7gPoQLDYH1fAAAIABJREFUhwH7WDxbyiZtox32/zr+0H3udAgwvZStKMidFbbpIoD3xL5GdM2YgRbeKoK8R6InxgzPkrjPp34eu/dt75QO/TMkV9hiBxnBfaWkR5au8S11adHJUeZcHmj8nCphR2pvHHWbsN2yPWDl09kt2pd3JI1AB6g/0faoq792KWltHkiFomSJgIKfgNwss+4do/m5Tcis6B9i6/7V4OkQPKiUG1EdBiqYawRIh+Ho96WlE18XyRphEJujmI3WQ2QkhCPEfRfXi3yO2YWIJAVwO507yFYN4+/3TQ12540Q9E+kxOm4cMSguC4tARw7ZWy2qCtBHqEhGmmx0Qs2ScHoBNBHBEIxSq+ELHODjrO9d6vqv+DVyJyE96qv3HsEW/YFhESR4b89unP5erfiF9Vh1xyqXMEb5nHbrpnB4ZoSDi7y3xxJ2nMM2/LOnscgHfft/o8BBp3LIeOcUbE7ixwAvkjtV4788+QBwcBqVUYK7dtzlEAyd/xGz2MNT4WLZUm+onseHLggrQCxZRK0LcDRk5JSjMv4IPOvSUnev2Z4stD4mokkAp8fMyQajJzDo/qLxq/cey+mlgD4wX9m772DLMuv+75zXw6du2emJ8/ObE5YCIEIFMAsSAQVTIlKLmfZcpUtuaxgl8tVKlly2f8o0SWXVLJsUjQtUqZKsiBAIiWSIgFIRF5wA3Z3dnYnh87dr18O13/MoM/nfF+/hwULhWku76lC4c7e2/fd+7u/3/md8D3fU/N08AdrV8J1r3Y9Pfx/XflwONeo+VzUZtpM+1bQxHRTU06kQdiOe1nnJLoqKEs1KB8GGMdRUaqicX+NOtPIYQsKBeuHCuwpBLAq01tLkE5AN1ngK5jSap6J143moXGlzxap6K/uucY9Xove4NWmn1uXag9ycVxdjyG8knQ+94eXvCRyxgNBrw/6/u/yNVei/PBmFpoJKiCYY6cpP5KKcUNSo2aHaSsFHPNR5NsHwDEn6Bhy3g9nryqhIKMVeN6deB0rwtRADs8lz9idP3pKfJJw7EO6UHBPnGG5aQtS7RiWhCKCpL2uphnHxIbQoNK1SdZeYkjMIhC1J4GVsBEQUiGWMpuAah+sjyy6Eq9Lc7inqx7tXV/29d4aRIeGaXAtS6dCDKy+otv5d1UJWbYRblRvOXRnnxI9Jh+JpqOIy0vRQ26svxN1i1aZHQEpAAbQw0bXkwabBBWPhGGXINeRtCIIWJqExrZEcUgFUY7fcoh7poff7v45wt/EWWcULleRaCCJKLFRJxKFIjB5IRcNu081HY/zp5evHhzfGsaQ+1f3UIl1LNI9sAHu1n40eLogAp2fxW9L6pGkncrOXrnj79k9E5VeGc77AA6g4n/53QYLcS8m9o6R2YIymC9O7mo/TaYaPHPX/WGU0I6VWQTRaogwj75VtSmg5TcAJFYswLQw3U7Pd4JHT8Tyvct3/Z4DlrFpCqDuz9jdkdA4nj80QROPu7w+ebPn5tScTCobjBzd4HhPTS+GEmVNA7HMEsM/e0tYPi8CjFyPAzR74/BQWXdOqhgYdRB9UIByV4Mt4MNwj7GmlUdA+KzscD3e0oOVK3ITzJ0xQDCo3FnaroR50/hYJrE86z3CHFPFjzmlBjyxSwZvVnsDfWD2+sGxhuXPHvOw5GohkgC91fOKTRoh1xoaRnPpC+VFAiXOfP+0CIlGjwmYfmM9FjqQ/ytE4mQeFMAzMlqNA9mfIf2+/3ed96HsWykIjoD0T8MrH0y2JgatyRieAhiwFcNDRuryTZALSlNKgm+V54dzn0ZocSNex/WoBg+jOKXX4kdi3608+kMN56MirMEDXR9G5/375hx/8yKiODmZVI/XPdrYkVw35/DphbiuWij+YVl6bjMqjIDFke80fAJpa9krW2wJwl5zUnWYnPHIXyqNvI0VY6jYGssU0ditf4dSWo1ziG7syEMjbh5wHZKGoAe1txG9vLto/XBmwSM1zy3E8PcrYJOcK0alwVLSG3vRFR0yOoPO6Vo9UEAn1n4rDkmhdHiuPi8WfvcY8pcFUarcxOsyjiQ2RH8QVcyjaRVW+N71e/FkB4BZYrJ2L2iY0Y819cXqyYW3wfh8Po4VI1Sq+NkIVKNXNbwbS/ibJ49e5CcAVNFNWSOg9CLzsh5HAI7khQywj/uf/lUfi3vvn0xap9G64FHxO4jSSLA2U6lq4b7FaM/9eyINi6qNGYmQ3Os7MPlkNXoBX969cHCsEZ5duNIjDKR2XF9r+IaRk4qddBMU+FCO9RuCV3jM77/fjwYPHa/5WgSp7jSx4eGWQ3H4mMbSsnruVTQqtXKFot/pSEiDgGBQKQhwmJH0RM9xA5PpVr6BVBX34uU4H3KYl8rJRlucWBMtYMnjXUZirBBb1D4bz5FNmAZDeW5yNcjX2+fDv8+UPF/+M7c/dHD8x05+KVw3xJrYnFKJ8MhMZMd9Zdv3UYL1R0vSuwzpRq3gCiIOTspviHMl7QOYAlcqgQM6BbkNX8O6rnKI+Iy0GGCKvGOmZSXkI+aDHqWCKzkIC8djDv72dXSHfcZD3Lfasfx0puAfRPuPrDXcyAnN/iwC6MjymWoTv21wagiJ0ZBMllyk6rXjuxWEi4hEmamktI5/1d9n7b1oQKeeHBvvyRhTCXTlO7Fqi+R/zdV4HTuWK1cQh7wVDLv4HDM3mbeN9+gskW5anh/3H9SSidcdBaGhz3YAauiz6kbZlMnEWtyRKBnWy90P+bHSuNNYUSN6Ui8tBR/TGNL0AP+t0boQjUD+XwGU2zBcnp+5Ec6Ra+daN5ZvUokT2KkGQ+gHdE+IhJhKhwJvRrZ9qyDPe7sZc0lnZtxDLhXiu/V7DMdOmbMwxPLSSJJ2njoBlOA8TOH8OQrClNtQo9koJVaSShp8fUlRdC/4ycJd19UjYRlOFD8FYYSgiCjqKG41NgQHUF6YoofsmaZpajjDs5f9uVpLcX2zEfY3mrEtBIVGjvaa+8a+/52uCaZ9a4VoyJSwPve7Po5zixFrtdednIYYgKfIqnF88uDEY3fzzonJ+EMt6EixHgNjtabLr/n47woH1zSZTjwIZl72ijKL+WduwDPX43V7qP4oiBe2sOpeHym31VPsw6J9cSMCEZ5d9mjQnUpUWK/f9dB4/64rXy1fT7HBr5yNwMsd5EGH8BpG29Ku/m2mIsKp8K20JcX6C/A2wBRa3hAsEasHZPNjBdcYMRl+uwV2Zb2OhpLmtUOIHRutOhfsz6VEdlTaClqevemKcOuJo1ia5ULjjBWIQwGjMyqiKWamrSoyH5ge5jdiyNwsRpeK8gOcf5WQ5Y3XtVURQYj96i5J+gE8PMOT/u0WZWK+ifYR9/JxZ/mJBVfoBHKamc1jgrDPlqa6OxVXX1srAnRlY1TS1Qumorntk1vbR/D3uEGYmQ1QcVVkW4Rl8fzZj6kaz7GAgc6DUlewSkudqaMgjDqlF30OlN+MCiL0s9JoI9EC0sOKkaLYliBuXynaGI2laEv87ckVGjNvuF5vPiObMfAmee3tBEOvjWKGUil+85cavn99z/xb4dwuylePFXxvfBHYHjOzp2a8l5ZGeNp5f/7tXsTwvPW2P9jjl3zfvLElqWLqE3GSisxsXIm/3TvmuoAwkZxkVJg21L2meMOfv3cM95DS9r33uyevjsQ0md48FKkN9eYJ4Fq87BPh9sekSgsRk83rMrAo7bw545EaZYgkueBjCxGnw34kJckdMKU1d8FTZnu7cSLk19AXR9I5o+HhaZW8hAF7Cz5ZxyraYBspwJT/Jh26lrcyFM+KLTOzNigCivuySPH4jLooroTfV5+RBhXXl3bfJtW4gnj5HFr2vvOYf19u1kextQSFkZtCU40OP6dOAN9fsTiBKBI8PN2SGlR+3J5MERPICzW0TG98IJUa0/A9gXMKES8FJjNF9GInOio/MusNQr/eiOdWSr6rEzi8JyknEpSqpztECJq4krGWBqjg6ixEJbeO8t9E7p9D4cPMdfB9Ca6ttAlj6ISkAAh3gcorSBSY36I/xUh9WJJAj9NDr0U8rTXRj3E0G+dbD93TNY2STEjxJRKpJ5VCIuS1ORTIjEr4zjIfmhfx2/vRqMnDUa68LT24APpnD66cPAfncEvAdsT3/OLOcwfHbKZrZna1505ARRTtra7vo91CfMaFE55hoTG/PBsdlbtvuQMyFKeWrUPSM7oBYLwG3JPiZe2T2DsF5F++Bjbuk5OxjjkYYvnCd8jgYXWGeuX899aTSHNIWJGTtboUB5b8OmSF3GxHg+STp/yDa2dlcme8dCsysKUwVsoIt2mJL3vhbL0Z63Nzx/FR8XGGyvMAp0S5T6iwtGknPRsSxpUk1cGy530BbSxe9jFonFZ+CHhfTJNrFgQKd4z4jOsSf6el58TfKHiaPDxjrN34d9Xx6dbTdgdHTTCPlFuIOIy9SzrYftiWqHZoUIiGpEXpx9Xx4OV481CkPQa4h85Lbv7ssG4W07Da0Zg6dgTA4yvSufnHTnjZ7d+9/L3h3Kfm3ntw/Fg99tJ6ff9wC04Z2NdQlt6RxsGFO5i0AKlqKjqkOgpxoyVnSkN6daWIGu08iTYiW1LccQxpdW2Yye/GFKU4O4E5+whmtEpouzHAprf3UYm6sfqqHiftEJvz0ukItqUju7aFiJ84nWRozkkj2xFbEWA8R6LHy2sgg5RIerOK9kSPy28j4lNc8T2jtxXnTQ+QgH2pTGGE9I8ufcEmCY2VGfE632i4Yvj+lTfCubcbvrcR+0ows5nZELw28yej997YA6u48FuVZn1M6jX3VrfT6OGSQiKV6EzzvN8zD4NzzEFnpqH9jpE534J4EDrk5OfjB95+wgeJm3ZlPQ7C8JT/Xa8bd0FtKPhN+YkzXwn//se3XTmeqcfFsFrzD/JDl14P5/7lK88eHOdhiBUkN5s75tZbpSw9XhAlamHCF6W3Tn8BH0qo0dmfq3dS+CHguRdmcM/tuIPSGWgfD6dsUGE83EQOj+pU1+QyQmzUIIGVT2CyTkLigsa6vTMtIp4ZN2xGdQZH0OAJGIUpURCCzsdo7tf9HqS5NxMyR6bMBNPFb6SRG26KfQLtpUqLEb/a3XDK9s/53zHVahaNstGKP7D2orqF/it/49l/FM691j11cPzVRgRvPlr3KO4Xt/ycFiwEMlFZ06HsmU0rZX0k8A6pzM3M5qu+mejGyMgQ9V9J2METOF3dikQd+H3hwSp1Bde+ci4dBelt+LgVwUDcF9K9aeXDbBo5FALWdYDT2aMur3xq2DxH/TjWAVTMaIHoanZLT+QeoUml4FdYmt9HU1NGlsyi8TYvjRuvdhzL9h/O+V62M4rG4RcQju+Id/qxFW878UYrOg6nsXfeAV7t7npMNxPrNxIno1Lz9d4Unp8+Kq72MHZa4MPu5kWhjkng2HFdKQ1MAet2GrGoylSDp+50GHbr48KpAO8+MLYKnweR80vzMba13faF8j2rXsK6Ic2iSER4TBhbGRZU5sranE8oGjxapTV/DCF0Mcq49PKlyVGiAEQVz6DL4jHFV+E+Kdev4j4QUVNPPUQ15e84lCGapIRPE6I4en+S1WnzUG4m41iiyXlhcpAQL5SXqOJRkGB4YDzLYnR0wLaq7SOCkSPjxKgOGbsVFxaoCMZ6mvkxv7mmKjln+zOyobPUWjuAYwy4Js7VIiDpHnKcbBdhFtdtXXKXReT5pqWtyBy7Lw0QQ4UQsY8amZ3S6JFEpkyPm0knbe6fknIidYHOe/YY4nONReKm0BgcCcEjBSNHelEZvXnZpPqngOEUDp3dLXg+xIdr4BTjVHstTnY6XEVi0MSoGZyAITNUgAm+l+BRAx0LnoNVvmYxOtOQCM8zNd9wvwDg8GwuRnF+V/XqwfH1fsxI3MS/l2QjagLQfHbGLfO1evQs+zfcAOrMCVM0DHhtGZGgUIARNXL3mJl1gEfs5yUjgfm95J02rHt8srFckuDDNJlq8BAboGHzSc0sFZPAiEm1FB/sFDrHPorukiuFmCupLPnfPVW5Fc59fv/xg+O39yLx4Ik5v8/xqn/8wemovNiry2Tz2NjzyVAAAK0rIfRQKSVQpQrSNDkhNuw8AkAeiLrG8CtMJQljLonKlAmZYdlpTUH5e5qqijggHI8Zb37YFcVfAXtsXaIJ5LYhoeXOY0fPm2WqipEOjYppFCsIxklZmJli4ViP8fDQ7tIGfLgnG5WOtQLBM3elGzvnm/aHCi1nTvnEPy7gtQ/MvH1w/E833xvPzV09OL5AqnCLDREfmXUjarMbrb5GG9WVq9H6TkhyimPF8LCcvSxszb0++irtR8U/WEUvIpDOqaE/xsDN357Q5HUaz5Y+/5EQVDYx6lZUnCOMoVQMjQRUDdrGI0VUi6tgcUE8S8jGJXFcaVDC+EnEKKMBP1SGgUlEtmbWqxyOXxmJHlgo+Tz9p9cjN9V/9/gvHnrvD5XjNr058vfeFJDN+ZKvpc9tPBrPoUz9DiqhH1vZCNd9/ZRvgnUBXbdugN1cWm8w+JgCx9Q5ESd06ZiPgfJnjU76RtS548pLU19FRHQ76++8u+5Ug6eI7JGyfwYkOsK4AwGjMS/eG0gDQdx0d84H+WQx7uik2V7ORbf/Lfz4c4uRv+d6y7XNbNGt5LWtGEE6say0wIc/fw5Ror4AwoY5VGnJGPSxqLQCg2HZ4ch/a/Z6nEy7F7GJjRGT+bF2GGdlFjFZajTxHj1tLIrvG6qtRCH0QiWZbOTQBy0Bb9LY6iHSUIrZyyMnfKexqBsMFEZtzAQTIxskyYQZ8R4uKnJ4MgB9kmE7jEHaEJVSsjsCsnXOElA/BEahK3nM0wWfZM/MRATr53dcGX904c1w7nbLJ+AppKy1j94Q3qamMIgvoHGYSGSBofG5avSk99HBO5mJg8q+SsQ/5SSCRCNzKOsqGEc4rgjYt7vC1ObRcwIKMBRZyVqWzZL4nnQvzhVWUQ1T5d04/HhrM0YmFoERLdQ1jcIOxbjJTLyOUaPSzbhg+o8AjyUGcIp5VNwC8aBEeEgi+AOnL4dzdwduhFwo0giJv7WOWyq7OQ3nFxZuhlN7WOSvrTku4lExeFho1Lwbx7hwHMbKfFyPrODqo5mq9iTr1Ql4FRwQeIs6gOuO0WbwvaVzwjSZHuE5hQqPnfiDVOJdpCQ0t7nb8Bf/PY99I5z7/nn/9xxotk8VogHSQc7yK52Y7//hut+DnWjNYrfYPsIR7zkXo0Q7XVfaGja/u+aT8PSqK3ANu97d9OuGQoFP73kkvXwqUAoMr977SLxu9g1/fq3KYWqpcT5+J0Z4QpsB5SNBt29Nn3Azr8Gm1LRYMIbE+SLPDwHMZmaVbdAfnMRG8s7B9981IZicJISaKmGljYJQLUes02RAMzdBbSkwzUANzKZTOpgT79gVgyo0rBRjiBt17Zh/aBKnmZk10UHwA9W3wzmSCxK7YGb29Lwzye7DSutJdDSP6ExXiOaIzUmaIK7TdgRwYm7fiOmBxVUPdSpAM/RVmpkMik7AtJzIOUbzWHGkfc043uSQOSrCihlWbPWEJye0/1iI+pMkc9VHov4nXcDWNffaclLaznPKvhsIBRfABySpKQJqC89FjyvgsaR6lwScoc9Wc/IWq1WHdxJ//hqwP399+/Fw3ZstN1Z+cOHVcC6PPPv19uQGeY8fW594jj2scjVpEMpzmta764pi7oKP3d62eIP4FqqfemCKzj/iwY2pTDvfhg8wPcIDLhH18tj7ZMbhN9Z9MoaWLy17iO313Yi2ZYUVw9+as1yHa/QhUZwvAfw4FHd5E2WlH1i6dnD8K/fiBGKuPicAlhxCaTdvuQE1K2RNxN/kJExKoJ2mJvhrx1Z8oW/sxPRcR4DK4RzwIskYqJEcHpNBxUwnD7UEGn8XiuRE905rQNo4h7kkEQgaOSyPHx093R6MHAKTdX0Qa6GkhORf0bEYYzU+5G/G7jllwdNRUcOleQbpOXnGLoDKec3V0wtGaFnXDktr/8DiV8O5D89cPvQ6M7MhXmit7dHYhXLUC1d2YflriTINFDoZ8i7chM+ej57u2i5JkcKp0KGZHCH6/Vh5md+erG51jlA4tzRaeBSEaZvQC0mMglFn8rbFirW+4KX6MJxCXzGpqssjVaKppFEOaUfMjcrl6Jw2L/qCnDsWvTZSMFRn41zs7CISgvmgTaY5nx+djUbHrbYbPD+TelPQH53/eriO5esNAfB9APier1QuhHPcb19reEVlSWhgmJtKlF8MY65VjS2MzwDfvqCgZQD+S6txH12a8X9vNdxQGohD0264gVyqT041qkw1eJhS0HA40xBsSlgWYB4jJvTczGLO/4Wyh99uDCJqfDXvD/JKL5a+9pBXUGKy07P+d9t9H7zrt6JHWZ/3hbK/Ga3R4yfB31NEJGhKV2SlVK/ccuuCrKFmZr1bbpQNL/mHGwNXYmOkEjWzoOzZEFL/jozJir8Z8rVlHOeuIgIDAkpNc5YBxVCDKjTek/nJ56ITPw1z9LCEncM7x1k2rsBeHEun32AQjyb/HSNDen8aL4XWZIOKQOWhNGwkhkQNKkYcclKyHjpDE7ArqYirTTfaX62eDudOIW1dlLVU1A6fD4SRWDOzBM4IPWyzGBXJo4JGe/1p01FKD9UkphU7jGpg/qpBQsC69n6i81OFapSajRCxG+spdARkABxGio0pJ0SLgU+nId/rlHC6QGbAqL1Z8TkwEMMopDU1zcvGovgMrbPC+YPxbXcEt4V/K2i5dhtpoMdR6aX9xDAxRwL4qub97/7Rb77v4PgTvzt2VScpoZJ25mGZX6rEUtw7fTeo2LFgrR3vcfycr817t2K7piaYlrlvmsWUVhddD8qVqPBXH/fnuv1K3M/rz/tvt8pgSBfjuYJ7KiXFNJlq8HCTmr0WP9weMCWGed2QnF/lmK/kR2vR4Hmh4qGhdYCvzkozwb9574cOjt83ezWcu9kDKl3yKF+5AvDj874bs3pLRdtfbG77+ywsuPW53xbeD3i6M1+SknJG8ARRXsakWaz6/XdPSnO6qv8eFbhZxMtI540IaCbQfAob9Fg3c7AwE7yuOCCmwjSlRX4dxRmF+2N4tArsKEjrHDx7RHgUtEwMTHkzniQpoRpDAQiNedObU4OEx/GDlVAZRIbmMaK2KWBYzrExckT8HUk870hb9Q8uXj04vix52Bf3fW2eELDzL9168uCYhQ7re1G3MGKQLIiT8Rao7U8BVCutY+gk3XwrcnyVUGI9KMrmStb1Ijx66YHHNLKOcQHX7j6FiJp0hmaUTh2JoyBFGDYjkhDeiTosXcbGNyc8OeDo6YlN11xFRBSbqqZURjSAtEs5frqACEPvVHwOpkIL34hKMnnalZoaMs2nDm+cmZNvvtXy9bK6Eve5t9vuiP/lD/2zg+NXO9FZYHr4lXYk7fwne14c8P0zMd1Fg4fUD7PSzfmlnvPZ5YQYMB35+DS3FUx6+CbStRgtGcCAHc3FdXt1zZ2kMgqeBs048Utv+zim55WIbLJMNXi4+eyfEWXJiHo64YTFdvWfufdsOJdHKublpn/Un1j+Yrjuo/Me/r7dj0r1Xtd3dGWdfOysG1h95EfyEoUqIUesDQrZDoNVGyqBYCpmowLnjZa+9geHh3mHymHBqhxJOfUmVA6ZRU+Xiz5RYDvTFAIWpiHDCMS0jt1qAPRhH3YX4hwha3BgfJ7Q9fuhCh+d+WzBhijmhsJSZUaJzOIGWUJ6pLcohJ49brLx/gEzxrkh5IWB80jmQxXRduV94rsVYMC/2ojEn9+z4IpZsXGsuKrKuj0371HVN7d8Mc1U4trsVHyCaNUPK0M4VmNVcaTNOBMnPnvzpcohg/fhGgt4HovzorQdF0XAgE1pnsmshUbbjoKwFJ3phaHqBwK9JcKTP+GTeCC6j1E44kv60mmb5/KiB8kknifXi/A3BYqB4/otEZ2R6qJ89XAsZkHY/4lDG0qEh2Xkn9112MULxIyI6D0+WneywbuSKSHR4XrXnYeylGBv7kHha2X+gq9BbZvRbqFjARyCWi2u2z0EC+ZWIs0MubC6wAAmEtnsPg3KGXvnMp2iEL+hZdIEOZI8Lz8fw1drIBf87y/9i4k/1UD97N+98/3h3EcWvYpjW0ITu30foK/2Ys+RHfD8vGoeOtNQJY0OxSHUSyBawocaCohx9g2A4rQNB4yEbiNGhvLwiK686kafbqBa9UThfpGXNFAZFR+wKcfaQpBpWTMK7Iu18IaPz94FSXWw5YBmAPqTz7F0l8+lmJOjIExD0JjQZyWWQ/tg1VGBp8Ds2h0YQ+S0GupYAwQ+Bd9FojptHsrUiabMaORoCrK7AiXOklBhjX4LHitpJ8wi1861VvQQlkuu+K8kfm67Ea2ylIDQsgwkFCRJ5zSiNkLJeisfN9DODlLYZd29D/f4chpBAn6vPzc5pThGa8DrsHbGmgofAWGDaDZbHh6TiQPDpXo2RtIH1MEy1gt139z2Ev8mc4txsyTX2mZdeyL4dxmm0MFSLWZMu8kmywiSbtSdnt9niHsWJI3JYpdfXXssnPvxU1/zx8KmQd4dM7NXuq7I86JAtoaurK/1InSDTbnraCzKPdQsFgMsPBYpI8iL1ZJm3UMwHi+jT2ajGVPRy3O+vneFwTxsv2wVItE8Oiqj7xTTcuv05FYHBGUyT91fjopzBfw3P7f+PeHcUhFNAlFR9cx8LC8nEaEqR5ab32vH5Dep4fmh1IMYQOl1huLqAo+TX4cFK9U1TfQsGmNDxaU5KVMMxFcM827EyUS225KkSEKTSeH9INlmSINI9AT0EGPe/vybhxs5yvnTZHWQhHLZ7FI5nWgspNBT0yJID00YriaAUhyCDloKKB4rYJ/Um8f6Z7BUsRtcj9pQsgtiQ7LFqnE1gkKZhtEbA0wj3TWs+00rYm1f3kPz0E4EfL1/wYujGjQWAAAgAElEQVQIHpXWElfbvsbPz3vedLMcJ+ZNsGCmAmosoDqK7OADKShgA9KOODEBIyTzebiH3+O3GSi4HGMlxgrnBb+v9l6jbtE+W0dBSnPoMYU0RFKP77t32+dAryoYHkbghe+CURFiJ7sSHZ9HtPHUYozWXb3uFjyjPzNPxrx8s+XGUF8IaheWfKFpxR1TXKUl35PaYhQs130/ZHrLLKZ5f9+iA5Wv9KJHs4Hu6RrhIaZ1V7wwrs+Foiv8tvQBIt3DfCWGj7fQ9klxrDRy+Hes1DaLEZ6BfEOmRwNZ8N24txdO+zj2vw0fYKrBQ3r20rU4sGkeYDx4fDnxIre7/rIkXTIzu9p0hUXK7RvtuGs/MeOpqb1+nITLYF6eK8WwxY2Rp7/C5FLyMSil2m1RNucO9+hTMXjY60gVG3Pw7d5kV45hefW4e+joXpSUE3E7WtJKsk3m//uijJhm0c2PKSjeT0nVQsm9GjzbCAdPATSTXfko4hVYzdQH31I9Mh1YmqCEVSI8w9Lk0ntG02p3/br983E8GREYCWaPEUYaLv35aEEyxaLcUXlEKpSSgqy13NBLYsmy1YQCjr+668r9TDVuOvE+UI5Sls7qj4GW/0JdtZE2VEZsZtrqi1E/Ne+59T3SssMaxpI4h2mEk3IPGjmM4LajYx5SiEexSotGDqMDu1qOjMhNYazhI1oFCCyCG98QSkFbjdxDVd1iXfK8NO5X/HnL2j+tAtzIZpyzZHyeXYjVRSRVDHw0kvpq9nw+M3VrZvbGrjsIP4Y11pfO3Y+U3UH4V9sRJnK+7GDJk9LnhFXMLF/XrAbB4JutGCmrIeMxECAxo01zgIasLkWMXrs/WbH3CRthbYdEcJdmUc1l3yHiQRoCvZgODKXLpQ2AB49HpfcDJzyn+IWtC+HcatUHYq7gxsr1Vty17wKnc7wS3VmWuutC2dn0j1W+5hOtopsMQaQSYVv+mt9z91E/LkfKkeC112/KBgHY0exL0ZrYP4+JN5ocPaGBOc6TA4Uum1Ptjp9rIDqjKbNQGSI6m+mNkHISo4nVR9V78SZ0NkYaXYIBN3PL79maQif+sIQ9j6r3/H33I3YwGDmKq+J+rv3CmKvuzQMrI0B1ppkU0ByNTXzzrjotfsyu3mYRSzXW1oJs0FBEaxJh/d4VT0WvCVkQK7gYvjcze3nDsUDE0OXFo2SkNtWUFgHlHNMIAQxAYsVbVFZ80+yItx8wKJimWl5Og1Z77HH9DEj0KBHi8IxTUtsPS/b3/PvVZn2j0wo4MhWr2cYUxVC8/nNzvnF/bc0N5YakOeqo3NFUSR7kdCRHbHajMiLWjE1AzcxOLPp+pZs2IxPkDRoqXw/ur8SANXgnLwOM/MnZWJb+YtdJ1M5XY8rpH9z40MHxnz7/6+Eciwqer904OL4na5OpqWOn4n7L51cHhGO+V/b1srYT9UK1QqMp3mMRhszaXTc6CkvxW/C3Rm+LgpoiUw2emauIWghGgV45N//5Oel1hXCBWos0UPZR8q39sij7g6h4KuAQeO12BBFwEnZByFe+Gid5nt1zxfhsogybuAluRvfv4cdjnV0x57XkNLQIAGlj+6SkraZU1JB7R8vNGxcmKEjRONyEe2IwM0rA55jKfqkOHKP+YlQWaESBSVZxRkdBuBkRTqYVZay00fRniMhIRJSTJ2Bn5LLg6U/ZAwMtQV+NUBj6YgjwU+al7D2kYuA93GpEr2h3wSdSQz76hbor6jf2Ysj+eN3X/519V8YFMXgCUHRPrGhcyvWi0zKZ0qqBPCNdiUiMEOEhe7NicTiOBekNN0LUlmtAddC0oNFRkOqMK4gmjB8l3SvO+Us216QCqoZKr24cxODIwmjsdidvX0Mh8coDnDzcRNqqHn+LxkpBoj/cnBWXwlYHLILRtA+N6k3Bo9LgmUeo+41+XB8vNt3oO12O0dFPnnz54HhH4Bk0VojvmS1ERVsA6WFFOHqIyVUiUK6X3Y4bDIp3ZQ+8vc04BpustCPhpoDLu8Dh5i+886aLUw0eRnWmpRdayDG/fz6SKX19xwFWz69EznSG0m60XOPeE16AMgb9ybmY7//yhlu7T5+O2J+r2x4polW5r2FMDHLxjrAkH0OZJUPQLekBwp55klbiZqib+KiAr7o3Oe3DMm822zQTQLAoXP5eSPfqPktIgsyfhSt+8cbz/tuVyNNmgzoML0mzsCR+DEuC32Z7CuX5OQrCtBAjJmlOjIn5w+eNmVlx0+dOUVKcpM5gylAbf86/4b+3/ew7o6ReuCzp5ieBcRN+mgTg7KFgeBi9ayGSdWkxeptf2nTF/HtXXwnnzpd88tQEFc1qL3JpvXztVLhuFpQOexvSPJTgZI6/4KkIuC1JhGdtzb2T/BROmRRmVCIeB9PbvTMR41S454uculbT2SMydU/hDXpY0r4JLw4NJE3aO8yiyq5VE8cVXbjLQjHw0nX/7sUtZBOk/9vxWQ+LX7sXsZ4l9knDxl+R/o502soSQWI0SDdgGkqMSF3Zis+xWPY5u96J3i+rpX7Pkhsu2i/rP1n+3MHxF9qPhHPcU1eLMaW1q1GLB/LyTqyu5Lvda8Zn5Pgs1yL3CA1TprOHYrG3AfDW6schDNoCWJ6VjLJSRTXgt8FQOx3DwyaBCjRd9P9QWfBdlWAoM7Pb5iv5X38l5hufe9qBi7QW97txMfzJS16m/ht7F8O5Z5a8E+V6JxpKHOg5DFBtOU7yFj5ATtD3zEMHfNIz8T2Z98zLQiENufI3EKyXMFctG+GIYW1ZbD1ygmhvkuvwZsj6Klwf3XMgPdyP02L9af++fLf9k9EqyxFoLXOwfQl5fmHe5ALj1tdrHz0QDwG7I5aQr2saYvIipAEsfTNtHy0/QgpeDFRGWDXdReO4C/D01lOTOWJywkAc8GpaEroIDBJaAlx6Ljo7M+iLc0vQ9NcAVFkuxTVHQPOr+66MX3gkpgDYD0hTWkzHMzKr5kJl0ee2AlFnlt3ybzeVIwH3hLHbX4pzOwc+mLQ3efyJAVRi0VAUounLIyBFsOUOr/nmvPR09IgY3Ti2HHP21P9qeC6jGovL5ZwAk8+hA/hqLd7/63fdaGJX71mhOtja94U1W43nuhj64/W42d/cdoed4PqC7AXEtP746RfDuRY23L9y48cOjv/Xc/8kXPeb4MkpSmUHQcyXuzHjcbfrezGNq/cuxnV1bdM9dk3zFuCtLghmdgvvtgLoyUCA1cTy/rvtS+EcFyhTokPBQs3PoqfXBGqXw+Qd13P1jsUXZ+kgP+qOlLi9veEW7iOPxQgMLcJLsz6Vc3OT+3w8Ja22r6LV9ss3owd4bNEn/ckZz7/u9aQ0HIZGUZj7GgV/H+UToJQQNVKjhh9E6bg7XRhbMKi0KiQ8o9yji79Llbm3Bi4DpiI0LcaQ/WL0uEkAxVYCynTKlIly+bSX/f68n5nZHBRLUHzynkdCWI085X1bZ4DNEmOCKcKORANDdRTsdwXC0+DRKjA2e+2gSk9xZ62zSGkpgy/fU0vimaLD4bzk9Uh89lInNm8jYehaL3qR76nfwL9cuQ/Ek6Pe0WeksCO2Gkb0zKkjzMz2Sq4nbg9juq6z7RGlJHRyFkOXKa0dif4AKM4xzQl5IY0cpRY4CsLoeaPoE3OsggjNoyvFqAP2Uc2kOniu4hvrbtn1cVXuUQeB3r5UBdZRDt667QtrsxqjJwRg70uF1XzNn6PVl5JsOJq7bZ8bS7XJ6ZbzpbjP/WbbI6LvmXcj5PPt6OSz+pipLzOz43nf8/6Pex8L58jkfAGe1kY/BgpOL7oCOVWPa4IGz+vbMdX27LLv7+TWurUb1w5L9dX5pWHD49INIS9c9vGuS1/LaTLV4AlVHdKRlEh0/uDbezGE93seee3g+GI1fmB2jiVqXOmy7/V9wN5ux/vTAPrxp74Wzn1ly5XsySqamUmZ3511NP6Umn6W9bZuQsmJYcHUkVZSVNHuoXFqcvohRF1Ef/cQ5i28Jrlx/LN1UgwZYjEAqBzjVdr2CUUwrpnZsOzv3UB6IxUCRGYmtLQ9f8vv0anGyduquNIpraEJnzbdPAJC3BI/kTZ0DZ6KGBP9WZ6Mf8YKq2g/iCFLBl8xsHu4fwnVcc1zk1sb5MZYmBHBkCkbytSBZXlbyot+Lygd/vjs2+Hcv2i5or5diNGft7terTKPiPHdVjSMgrMwF5UeieFyu2SznIwnWCpHr534IXUkEvDrpMCc8L+bxcrFnHCUFW765k1DpiD6e4CIjxpDR0GCQ3emOfE68igpaR2jazNlIX+d4OyVxDD6MgDNSnS59Q3MzaXJPWv2QHynTM732j4fTixHQ4BR6r2rHu2ZfTK+y4maRz6+0ozpqBqAoD804yngzVE0yo7l/R6vdqOTv4l99PevxP3w5+45LczlnO+BShnByIpWcN1u+bVaREBaGBIOK3h9t+d7gUZumOJiUEUDLhXcc68toNApMtXgYXVJ7Uz8wMdmfND3OpN/kBUY9wSUwXPHS26Zak7/vSBeerQS21O80gJZn0xyhjUZ+lupRKv4GqIbIwk7K1X8wW+JUiJGRataiL9Ipc9WbhuEVeA0SbXbL/KZvUUxynBLbQJJYCqjn4oJKW/lJp6jkH9JhaH4VCjDE/Q60ugSMS10kGu3jh5egWmsIb5R4YYayjCMNHiCoVGsFo2jYflwSgSzCOXQdiLETzE6oCDzwGotZG+hIaZiSjAHRriuIOH1GwOP4pwvxPTGD9XcG2ylMaT+1a5bj7+y+9TB8VI5RpCGS+6J3tuLxtAAa7rP91ZHBUp7X7pXE6Og1Tx9AmbZmV02gRTguERUyQBODIlLW6syYYhPf2dwre+qtABUXj7mOndOmr12ev6OjKyZmW1twMmdnZzuYu82pofM4rfUzXjmcQf3srxcq6hGO+BaEz117lLEj1IY1a+e8efXzZgGj4KWWUa+ByLeC4UITP6V5hMHxy0BS9ZgLK4P4n57ouJ7+B2QENbEwmbkrKAZD9BLVIvx71i0wOyNRmAYAVNCwTwchIDvmYlGKo2o9Nvwi6djeE77hN3fjx+O3tUjxzw81pRQH1/8+2djb4/n8IHnc36/ThoH+TPoTnqhGBXnc4tOgHK5F3vhfPbejxwcnz7uv9UTYOGxBUxCZaOlp4jNKCceCknLhsLknJ7096nV4sfv1Q73NhTDM0SvmdyT0YsieaEq1eJlUHWDvLDQiPdnab6mBwgA5aaZrkiYCMqifF1CkLh/YWdyiofGlhIUHgUJhi7mkdIZMAKoVU5dUNYrBT6Zlgl+L+2pIevHCjJnJSCNq2ntLvoFMYZgYGll4QjrIJk9nMzMzOz1jqejXpaeP39ozr3PS4W4cf3rnWcOjlfLHpnd6EmjRGxqCmpkxKAw7xNMyeS6SLOo4icmQTdQgmzbDf/4qaZ54UARaGlm1r4DTxfzRykjEqzVo9g81FDA0ca+oK14mBIqCjZkE6htNV5XZl3fdXH/vDBXs4J3ZyeGmEc0UPlNhE9nOA8DXkhiT1R9n9DCmiaaTpOIsbUW59u9qo/BsXI07Nhe6Ut7Hv35S6c+E677w7OeNfk72x8I52gAva9yNZy72vEo1x887uvvX289Ha6bwTooioX9/LIXHv3SN54K59g2afWYr9u1rWh4FbF3avRnsI8GrVCNCm5uJv6e0xoAq0w1ePLITT97KuJvnpj1SAsjMuuSj+8iRn+rHwELs7nD645P5aNi+MGa9xJ5TbTvjQHCh3K/Dx+/enDch6d1V5DnNViqmyblkgFX4+PRERBjGT1kRloFRmNIcAgDLETeo9+QPjFQgoMp5IUjKT0ewsjJwTDSTuSsMBwpIyz+STp8JdUqIEWizUOn1U735uEdTdjwj4qEtiHwALVKi9GfnHyvudf935oKI7aXRuJIKvNYzj6sirFCox2HvcU4L2u3/Dm0Ik5B7fG3/ZCp7UfKMWXN5sC/0ozK8TP7XsCgwMs/vPSlg+Of3/rgwXFvKJWRUHQj9dQ5N4Hh0QjJ1rrrgrv1yZwjqlRrwK2075AePN6fr7ZQE2LDFbdyegNf7yUhRwzFNVOM1ocl+QVs8DD+FpaiEqBxeedqhCbkYTh3WoKPURLBB8LSZ7PYfFmjS7d33KBKL7ux0j8munQdRuhq/N2Njhs1zZ7oZ1KPgLDw+IUInKuieulZYSvV5tfflH+w88Hw798PZ+FrOxEb9+HT3nfyH++8P5x7adfTXyMsYgUVc95/bT02Lj0z54ZMSRphM61Hg7YkmM1lGLA3tmKQooZCgc4t/061M9E4pOGbameDKTLV4OFmv96W8FvVP+odNClbKEaLmU3KtO/HLzWeOzim0vvcZkRu/9dnf/ng+OliLGv5es93jLrkmbpQkCxt32nGhbKfR48sUZwBKc7uv1q5ArbVnHTqnbvm99h9RjxRGhNvuPJNHo3vkidTsVAhMyJTlWoh2odcTwqyJa5EqB2sg3Yhc1f9gTefj+9CjJDg4GLpvMw6pkyY4pmw/h+qkGgvj3ROSYgiB2CnVjI6GnJDSRGSWC5grkTn95anpKqArSLmIy/g6Q5aUBT3ZN6T5bQ4JapAipQ0RjZnsR770mBuHyH7rvzdNnLCDL0XRH/sIbyu3C1MM5VvQjnKZdVzvhDag/gc1BnauJQKfRMNQ5Pd+J5MU9eKk8GVQ6SeU0khUq1ptO0oCIs59mHwMNpjJkZpVZxCttiRtOPdTRgrmAJKNEvRTbzHbtsL+F7K6oxobF+4nYYrcBgF20LIwezxyZEsBd5T2Bj7x2ZeOjj+RjvidP4ZOqKTz8ps3HmgkCxxGcy2+4P4nrdAEUPAuFkc88GNaBOcet6jP5tNPxccAjPLgasvX4/GEDmMOiie6bTjM87MAEDe1grKyTIdtAwAb7sWJ+8mDJnzNR/0C0LO8meP/ZuD45rUfe4i5NaBJvr9c7Fcr4FyvX+Ij21m9r01Z3L+eic2DyVGaCnnu+fSTDTKOBG0gqgNLg6G8k02gRnm4M9Gxdy46NfW345DTpp+0gDkdqS/CXlSJO1WvYp+QMfiuQoMIIJgVXFSqbaPyTmsoX0QMVYjnCqE5durkwGypd04D2jkMFVTirCxIyEh3YBXHANpI1WlzVLDdeKxEwdFwHFvOY4nO5+P8RpN8Hi0cSYZ0hWbRWDyWCd4RH8IIv1GM/J51HO+Ef7Q7MvhXH6Ma9flsy3vFH1pxvXJPeEtYflyeS4aJMTYdC+A/Ve6qp+q+qIoF2IOlQaWYnjYEylHUruqeMswOBvSXoMbOw1O9mEzM6vdnPytj4KQaTmPNJACvUnkl1Tj9y8jzdHYiQ5pGdG07hUPRXYlwnAHYcrmRtxkq0tgzca+1pNIup3y3yIztJnZ3R2/v1bisv/XDvBIw0WpogI+6blKxK5VEn+fK8C//ZnlL4brcvAy/v7Oc+EcqyE/Mf+b4RxL0d9f9SKCVxvRoKKxqOX9t5s+BotPRmNrBxG39mtuNJUfjfdgC5BE9rIW+mw9edYLkt64HUPhNfAnfed4eBCKr4gyYNNOostnc9EV3cFu9towxs03UY3F3OPFUgSHrRZ8wJ4ox9TaW2ChVNAyo00sl9fJSpDW9rawQVcPNzQKO3HoOujXox4K/6WMvEN4FNM6lvMmbIw4JrLXMRwejP8pxINjShXXVhChbcdit4i/6cUHIVmdtpagBMbnI0g8SKGRUBCcTgoDaCBYA3L5KCajsgbOlSnePMcpJ1CqPlKENC5HJVEMeOTyluTSaWCPpWn8P5C47WwlhgY/vf78wXFvJa6X58peer4qIKTvqV45OP7loeN5usV4j2m5+yKI5rrwYJVZmbrgyblowb+250r25NwU65ugYgF/J00Ylfq8iAQTVqhGMCkONJp3FKSCVDwNF8U9MeKjLMYNYm5E/5B+oIdz7b1oQBZgAOWEKJKMxyyamFuKc69xz/ek7m7E31TBN9feqUw8d+mcz6MrN6KSfP6ER0GO5WMIuwLFuzNyxf1aLxqA78ccawh4kCXmZyUbQlD0zgj8crIfXgUPz+mzMRXQQRRUu6WzEejgBOaEAJNHU6jD2UmdDsfqcnyOfZJAfhv0Je84pXXzRsy57rT8Izx5zAfv5HJkd6S3RgvWzOxKxyfDIrpSvrrzQrjurX0HW/2BEzH6Q/zQ9W58Rvbrud3w3XNzO+ZbCGjLiSFTxYZBrIkKN/HqXdn8oMwaj4iXPcGL1zRC5fZkgCyrwjSiGTpuI5JSvxXv31lCJEhAsPy9xgX8d4kKMHxf2ItGmZbLh2ckyR0Nu9bRU+6hzLg1+ZvQ4NMID/+t7R567DjPjVQ2uhCBKU7BR6FKSxvwsZx6/zFhnGWKVioVGSmqo5ng89Xr4TrSTrzYiNHXv3v5ew+O/+TFL4dzf2jOewc9U3Uv+F4h8nlcrzvgSdlcd5G25ntqdJTKfk4o9tm3b6s7uUHhaEqLgxFI7k7UxdMt+vNzDSjInY1ddb4cBakiKk6jprUdN2q2LNCy9AraU6hD2gEx7AA6RiMwvX1XwpXZ6AW09914Kcwi1SqkdbVjvg8NfzPON/qq9bdiBL73vN+TVcsF4ZkhzQKrrczMfqD++sHxx9Bt/G9uR/zb56CfT0gnaXZcvzuI/WIYcSVfT1Uws4+ueFRVKy9J9Pt5KasnH9MCsii7rTgPzi+713x9K1JS8Nu/BZZqNZ4boA+Ym/8OtZYgGd17LkSAlXY+/6ZoHp9GDvk1zCJwahPu7KLggD6y7B7fF4VpmR/rbkcIjjBIa7d8YHUzNnTPHQm3TPusT1jSyZMW3sxseMyfo1mWYWW+WqvA2EmbpwRL1FtiikRKlIEh0BK9YQVWN5QlU0xmZvvPTyZVNDIvTyobNLME4eyRcI4Ur/gEFTiHJcCt9ACynkQJ8FCFhgdJ9y7Hy7afPdxwMYub21AiDqSCmBaRG5DrSTxilpSTrbmbm9yANJHGoozyiR1gKQwsRi3mpGjg6Yp7sycKMULySM0BziQPNTP754kDmpcKbnTMTmmupiR3ISqF9aJsykwzvbEfw+Zv7binq52ticUoIZ0WsCJmVrztG23nglAXwCguogqvq+lL6KuyMHMfBSE2kxhIMlWbmTV3faxn52N0gylDxVGySKOw78e181HHDPuTo2mM7HFT1WhDBxGeyrPRQC2i+KT60WhodPfcIOYGr4bdrhDzUj7bevTg+N9gb/wD0jz0lZ4zKL/dlTA7RMegiSzK1b4HEVbLcW0SrkI8j5lZ6y95+qv2FyLIn8YjOZe0MnJ/1v/dvRsdiR04xi1UvmlWo3IRGCTpazZNpho8p1c9RE0adzOzRSDn1zbdWMk9Fhfr9X16YXEib7f945Pie14oq49X/eVOSCkfCQvXBFjN/OPyKRAPzkknXU4MKUVkCR3Dor2KgK2gVLXUkaH3xs2Ypyic8HHsrYHV+aQ0YcX+VhbPhh6WKotWC4Bs9CnZl2aRzL2Pta4gkzPuX6zH5+DELquHdR7P1ZCWFEu4FhiLQfMIGjwwVkYYp80X5FnRR2jMMISS1SxHH/cvr/n36i1KWgw4GiWRZLSR3cE1akjcUf1qVChM0Wo39tHM4SHkK0LoebrontwlSVPz3zuVqPQug4cnh/VdEm+TDs35+ZhOexl4ooATkM+kDRApZLTdF7oNGjzRyJFvjUfWDYi92IhdU9uNHF8SxD4SQmOzCx3Ql+KN5Us+HxS8Sw++I7QeZejabgUGpICizx7z+3NvMTPbn2Bo9KQijOtWdekcgLIDOcdWB2wsWpQU5xuoSvrowpvhHA168st9ZhRbMv0u8NK9adFIf7vhRvoLMzHiygjPz93xcvYZoWOogJH5xV9/PJyb/fNucXd7cfzZhqKAYEkqHDrsdF46EQMnTIsxEjcsSOYFe54WFEyTqQbPI7M+gf7U+c+Fc+TDYTk4c49mZrcWYsiKwuoMot6L0riL173UjHwebzZ8Al2YjSWAPYQSHlv0532pF3uMsLXBcjV6HiRTIhHZmPWMdhVajUHFOTgZF8oZ0HiXwCa53oppt7O4Tssx+1Ae2tCthckQvChROPR0qgLcJn8GG79p6JnFSAtSStp7yw29mSfi5tRGLrhcR8XHlE7WD01Gh0d4FLM0RNf3RICy3OiGQkTJEupA5ChRvWng1T5TIHiu+Tfic+w+AdK9xyYTD47hRmDAnYRTcU+Izn51+8mD4wu1GJp4b+3qwfGyYBk+jtD+Fjo+1xIBkYJe4rml2Jj4jZLrhT56shXEUWEK5rg4U4UlH5PPvx4rR2cXML/Jzi7fqYd7aOfp0QreZws8WzKXCGIe1d85XuG7JVsb/h2KcMaeeixiom6DuVpTSdy0tOqGHEvDKe9fh9698WIE4rKvF/VxQ78XWoaU5qPj3QWvkBIn0vhmv0HtC/bsimNQfxjz3MysA8PxNMgGSUJoZvYvdx0b90wtZl4+suJg5I/X3wjn/m3L5/D/cO7TB8c3BtGK/vSm3/+Hf+Qr4RyjP9cb0Wu+i44FbI2kZJzcX1qCVxuQ22wbXDvFeA/ih0rFd07YNtXgIZHTNKGRo+yOu1BYygpJgBUR5Bq6Zlh7vD2FD2xjIACuDloWoNSotR4jQQVUFCVJ9DapEIcVn5B3d+N7kpejI+Wt9ZIv5q54L/R09mE0KZCMnqgyXN55yw242UeitUvji5Em9WzpsWg5KYGIzaY/4+xsNGpoAKkHZGf82pEYW+wh1sb4KIL/KEhouQAvprAVx3NUP9wwMjMbYfEmwuzNVg2h2kqMJmIZKhoJwzOOgPXp1+MzpmiDoCmtEZ4j34hqorQJfA++uVZekaX1s2vRYPhi4fzB8cdWoqf7/tpbB8evdtzBeU/1WrjumUXHE7SFqiFsqDDQtLmu9qyj7Pehr4Y6Z/Ht0YBU+bOMNAu5yVZqTGnFTZI0A0cRtDy36AYrGfjVwI+RoD0AACAASURBVCPe69GFaAB/4WWfHxcuRkOJnDfrLRDU3ou6ugMGeq1IZJqJurp/J96jchrVd6KD50AaqA5vBfw6jGBoCvVRpHK3hNKeJeWPIayXsxjtV9gI5c2mG/p3Z2I2YQtd11/qejVXKZlcndgeSjR+SuXiypI7DIz67e7FMSYhpUZneM9kxfcM4oPM4rienhWOlSkyvbUEXu4X7r4vnLtQ92gK+Sq0pp/5QW0ZcRx1x/zYu1Ljy2oPZUPtDA+PEplFI+d0zSMkr87G8lmGRjVMGoDbX0XHXVE8O+exoW/HiVw5OZlQ5g4IsegZtCXUWgWzq4ZaF0/6OJJgyyxSC5AkThd68ZSn4QbXo0HYOQZAITaIjoQ00zfdgF0/KVxBMA4au0IJz1JWNjjdemcG93dT6qAAaJ/wdwpVTRarrxQzFigGdA9EZGX2Tf+75tk4t2l4tc+I18swOqbK/uMCmAcOTZ+fbLQj6Q/Vn/NvtFpxZXOyGCN3/Zo//5lKxMBcbblX+Yt3Iyhzd8Xnx3M1r+ZSZ6oPpXpFevgFkCP7n21KOvuSK+m+crfA2yyLt0/cAFnRE/FErYnecFo+uwe9c8o3zESNMmDZ0mmcSA9J3nPco2ts4MlmlWZmj9Q9yrLTFxA45ti8kAYuAsC7ueM6ZuG4kF9BzjwWU6h09tic9KkPROPqDsqur9+LzKfPHvfozGIpwhbYx4ugX+08sAhysdd6cR9ixRWb6/7xhS+E654v+XO82I3EgP/eMY/IKKZuhPn9QsWdh1uDmIXhN2xIu5UW9netXDxb9zV+DVCWPenSQGdE2048d8KdmG9seIq8KlGcDxzzdN2WzqUpMtXg2YOH8/7FmA9coQUKw0hLX8lwrCV0NYAP+DGK0lPgw4vu8ZWFIngf91TCwrMwcsj4/PgpKT+97hGSOcHf7KFcMg3NVEWxwchRorneTTcgFLPRKfgEYrsHYQGw5sLh15mZ7dKbmeIAJgjXmmBCBrfwntq3CYrZ5sCIKqWZeTyHtpYICl1KsRm6JDdMb+Xohe9bj2OBouR4LFKD1NdwTptd4SNpiB6nGhfxD01pYRMfq6KCMcRzuWXhqlkg87ZUFsLw1DLpIagauG4fL8V19b7KDZsknXkYc8JTwJLZt4AL0irPq8ArTOXiQONjDZAUsCDXu9EjvoPKTk1hkGNmqCh8CiJluvkRL5Lf9TWmBmY6ATN1VIROJ6vZGv3osLA1w5eunA/nON/e3o6GBiPAJCgkS7aZWf445qUowi30zzp9wg1zGjhmMZo0Oxcj2Fd2HOjLqL1ZNMTOzk1uY9TCXP9oNUY290jNUnaD7We3PxSue7zqRoE22qbTcaU/GdD8U+u/++D4Q3NXwjkGHy7VY0bllT2wNctm9sqW76MkXKzX4lgRr6u9xmhLBHC5/NYa1qqSTE6TqQbPdtsn75ctlpUGWncca86SQoI/s/hCxKVoiKqGcOFyOYb3GEbfbMXIBA2era509IQUwdmgueXA24FNbYwDhGkECUyQOEycHtt7HMr4ONJPAnTNoUx4MB/HODSq1I0LxgVLmZmyMBN2aK0qQjlzDliDRMrSi7sAci4I8SDur/Ozsk42YP/vw9rRAy0TA5IiMsUeMGaRQVS/ZflNsAxrOfjm4SRFSgEwhbdvrBfawT2kFQgJBUezk/ljSDegstHzdfWKeJurRV9/C7noSHDdKgM7PdPVguuCG9KahnqBGDqzyM3R5lhNKUtXNnni2jRF20NqhXw6OUkNst/cjmDvSrf8W/dOw5AeS+XC+Cy/c7zCd0s4btx8bt6J32t00t9jdj4aEyyuaLbit5yp+3xgmfdA6AD6MC6UI6YMjp47G1KxAVlGX8W23KPA55D0JNsndNBCQzu6b6AaeV4ce0JDPgC6+/p8rNJicUBfqMNZtfV2O0Y9n59xvM+jNTeomOoyi/CSm50Y/eG3XmtEY4t74gWUnl+7Gquz91CkkJcCmesNVFMj46EG5i0SIJYPrxg/TKYaPJ884+yo58uRQZlWINHfCjjOj8XsXTopFjw+nGJ91gc+SdTLo6zNR4t/qeThw7f3ffHpJHxkxfPJJF0yi7lCAgZzwq8wRAM9TbG2XwDT60Z8txzKt1mNoKRapVP+Lr3bMkGRd+734idl998Bnr+2GCdJDq0KSCZnFify5h2wjYonGipIphiEapRx824/Pzk1eBRkwB5qucMjKWZmI04Cif6w7HjUiQqrsnu4kSeOYmhtkm/G8RwEvh1OYGUQ9MNUlymDSwKiJeP461tQsLtRwVJJ6QZBfMeJSkxNbMKImkF4Xb3lm3uevtWKEfa6S5Ei0rYv7aLPsTWZsz0YiGNcOzCUGG0r6LdAk8MdMYoNWJ3SbX/e7qoYwcN3GE16SMKoCMv3u0I2SYB7oyNrG+O5NBchADPoAJ7HmpuRiCX7VAUMnZntoGprfdcNnqcuRdDvlTWP4pSlpHwW81nTkwTizqHK+M3tlXDd53pOq3KiGFNC5NS5i3ZNF0sxysLqx+u9uOaInfnhxdis+82OV3RtAzby8k5Mrb130bmvNiVQQOyqQisUZ/NNyUml2qOn3di6uh6ff6nijtGVTT83KwYPjZzWYDKmSWXq6mEjUAKHzaJn1AGbsnprFLVGG6ABHkLD6nUtgruUFQ+yWJqMlTk/4xbzeidapo0uSrclfz6CMqu/7u/ZfFo8RUYwpPImGE3zygCaHnqsmxND6IksdHqiiogvLAFQCWIujVB12pMnDcsN2Qm4P8UgCaXmFp8/lWaaHYxXCVGT3lGs0qIHj42oKIZKjx3F5TX4b43GsPw84MQ0MgGsj9qWCkA++Bsxyhj9078J0R/t0I0f3Nl1xXnmeMTw3N1z5bgq+f4WDMLtXszBczN5c9c3DG0IOQDwfoy9Fc9IEsWxbuMY/54YNaH3k85F0izhniNdRuFjyzku9+Lkb8F5cBS7pTeQlhgB37VxJW5mK8+7fp4VsOouHSnh6GGmITclzcF5o208GC2oosJuoxk39FlEcbTb++1tN0JOL0VMGjMDxL4qPxQpVzrSVJC4nRsdpGvn4j1ouFxrRwedtC3szWUW02l3O2jRIU15f+FVb9/0gQsRyrIYCAUFolI6PBhRrsX/3se61cIUOjU0bvUZ+3AUi7l3nvKdavB86jf8xT+9+Ew4R64WPrQyInKiKciV1zIHr4BaQxOxsYoXAgalkqWwczjVtbUlbUWgrG4sUDZko03a6nK7KH6FUpfGnC2Ae9tIW1R2dCNkyimemwf5NCoK7z8LOi+n4FPpSTiygIaF5Vsx9E7HOkVmU6KRod3BsCH3QAd2nZ+FfTxjHiXqR7BvEDcfRqqG0hsoR9CvvsiU6ivivxgt6M+KQYJ7SPs66y7i79DWIlGjAN9rnA0aa1owaQxejXb9e93OR8A8uyerwrp50zfDxy7cDeeuIcrav+26oPdorOxpI4qj1VfBMFWLE5LbxsusysRE1JY4oPsPc3jZ/kDwNiTKG83KhoBvGAwlMa74bZL9yXrnYUlnw9f61zYuHBwrZuwb1zySoJ22qdLu3Y4bdaHmE5W8aPvt6HBxrqTz8f4Ly25EtdmrS7JbhFl034z4nuJFd/q1Srd9zw2nz+05lvTxsxHXRmzRbUkXHUMH4pttf7D+bPzmv7r22MHxj6x+I5z76dcc7/PcamzDREwuK4J1z/7wRS9t/9KNCGWZtteTj2+jgDTnetwLroKrqSDG0OuYIyE5FO0620OvtMUTk8HrKkmq1LyZZJJJJplkkkkm7zI5eqjQTDLJJJNMMskkk++wZAZPJplkkkkmmWTyrpfM4Mkkk0wyySSTTN71khk8mWSSSSaZZJLJu14ygyeTTDLJJJNMMnnXS2bwZJJJJplkkkkm73rJDJ5MMskkk0wyyeRdL5nBk0kmmWSSSSaZvOslM3gyySSTTDLJJJN3vWQGTyaZZJJJJplk8q6XzODJJJNMMskkk0ze9ZIZPJlkkkkmmWSSybteMoMnk0wyySSTTDJ510tm8GSSSSaZZJJJJu96yQyeTDLJJJNMMsnkXS+ZwfNblCRJfipJkr/6sJ8jk0wyySSToynZPnG0JDN4Mskkk0wyySSTd71kBk8mmWSSSSaZZPKul9/RBk+SJGmSJI/i3wfhxyRJvi9JkptJkvy5JEnWkiS5kyTJfzzhPrNJkvxqkiQ/mdyXn0qS5G8nSfLpJEkaSZJ8IUmSS7j+I0mSfClJkt0H//+RB//9+5MkeQnX/askSb6Ef382SZI/+OD4apIkfz5Jkt98cJ+fT5Kk8p0fpUwyeeeSJMlfSJLkH8t/+8kkSf5WkiTzSZL8/Qdr6VaSJH81SZL8g2seTZLk1x7M5Y0kSX7+4bxBJplEyfaJd4/8jjZ43oGsmtm8mZ02s//UzP52kiSLvCBJkmUz+2Uz+3yapn8mTdP0wak/ZmZ/2cwWzexNM/ufH1y/ZGafNrOfNLNlM/vrZvbpB/f5DTN7LEmSlSRJimb2vJmderBQqmb2fjP7LH7+J8zsE2b2yINr/6Pv7Otnksm3Lf+3mX0iSZIFM7MkSQp2fy38AzP7KTMbmNmjZvZeM/sRM/vPHvzdXzGzX7L76+WMmf1v39WnziST37pk+8RvE8kMnunSN7P/KU3TfpqmnzGzfTN7AudPmdmvmdn/m6bp/yh/+0/SNP1imqYDM/tZM3vhwX//UTO7nKbpz6RpOkjT9B+a2Wtm9mNpmrbN7Etm9jEze5+Zfd3MPm9mHzWzDz34u038xk+maXo7TdMtM/sUfiOTTB6KpGl6x8x+3cz+yIP/9Akz2zCzm2b2+8zsv0nTtJmm6ZqZ/Q27r/DN7q+182Z2Kk3TTpqmn/vuPnkmmfyWJdsnfptI4WE/wBGXzQcT8ZvSMrMZ/PtH7f7k/juH/O3dCX93ysyuybXX7L53YHZ/YXyf3d8gfs3Mts3s42bWffDvab9xavKrZJLJd01+2sz+SzP7e2b275vZz9h9Y6ZoZneSJPnmdTkzu/Hg+C/a/SjPF5Mk2Tazv5am6f/53XzoTDL5LUq2T/w2kd/pEZ6WmdXw79Vv8+//npn9SzP7TJIk9Xf4N7ftvvKnnDOzWw+OvzmRP/bg+Nfs/kT+uI1P5EwyOYryT83s+SRJnjWzT9p9z/WG3VfGK2maLjz431yaps+YmaVpejdN0z+VpukpM/svzOx/J24ik0weomT7xLtEfqcbPC+a2Z9IkiSfJMkn7P5k+XblvzKz183sUw/yp99KPmNmjydJ8ieSJCkkSfJHzexpM/vnD87/W7sfDv2gmX0xTdNX7P7E/x67nyrIJJMjLWmadszsF8zs/7H7c/j6g1TXL5nZX0uSZC5JklySJJeSJPm4mVmSJH8kSZIzD26xbWapmY0exvNnkolItk+8S+R3usHzZ83sx8xsx8z+pN33TL8teQA++8/tfmjx//tWCPgHudVPmtmfM7NNux/K/2SaphsPzjfN7Ktm9kqapr0Hf/bvzOzaA9xDJpn8dpCfNrPn7H4665vyH5hZycxetftGzS+Y2ckH5z5gZl9IkmTfzP6Zmf3ZNE3f+u49biaZTJRsn3iXSOJg8UwyySST74wkSXLO7oMsV9M03XvYz5NJJplk8js9wpNJJpl8hyVJkpyZ/bdm9nOZsZNJJpkcFcmqtDLJJJPvmDwAZd6z+xUln3jIj5NJJplkciBZSiuTTDLJJJNMMnnXS5bSyiSTTDLJJJNM3vWSGTyZZJJJJplkksm7XqZieD78x//aQb4rGcZzychTYYOa203l3Xhhdz5/cFxqRFqNXM//ne/4cWu1GK4bFQ+YWS0VE62yjXtKeq697L9d2fHrBtV4k0IL52qTbcDUH8Pm32yGc/vnnJdqWE7iuVP+74U34/g0V/0Ze3P+30sC9azs4N3kPWdudv1UPv5260Tp4Lg75++28rVGuK57onrodWZm1XUnEe2s+JQZFuNv8fuO5By/m84lSgHzIBnE9/y1z/zFRK//bstHfsLXRHHfX6TYHITrhmV+13w4x/k2Ksex7texlrb8nr35eI/yjv+2rokES4JzvbQbn7Gz4uss34tjPaj4UNfu9cK5ZMC16tW1HI/7z+H31HmZ6/s5HZ9hxZ955kbHf+tEOVxXbOL3JDOf7/ozdhd9zhabUQdxrfK7mJkNMXY613OYm9Rd/C0zs0LHr+N4mJlVf/nl8O9f3P/pxMzsB3/gfwkXcrz3z8Zq5t/42T/30NfED3/0rx48b77h32s4G5+1cOW2nzsXuftybddhfF8zs+6peb+uj3lfiBM/GWK+yajwe3G9jMpx7hW3/fkbF2fjOczvylornBuV/Lv350s2SXqz/ntlWY/tY9j3oOPzcflZru/jU1nrhnMcu7So44N5uuTfprUa52xt3d+ztBnvP6xPNhlKa74npkV/z6QX3zPp+r8HyzPhXOHaPf/HjHM0jmQuDes+xv25aC/82qf+wsQ1MR20jD8r78SH7s37n+axqHPdOFlHeby4LHhOws6yP/Ti1zbDdbvPLvv94rsFRaTKjM8/LPk/hnIPDsLY5oFFVNn1+zceqYXr8l2MQT++5wgff/90XGCdZfwDz9sXo+D4l/cPju98NC7EfM83An1+Gh7FFhbRfidcl666wVPZEqN1YbKRM0k43mbRENZnpFGc7/vfJUcQXpby8+EVB9X4XQc1/3f9elSOg1lfrLmhzJUC5mllsvFdhnLprEbyVm7i3IzTgnyTbV/T/RlR/DTKRHEOZ2BAtCffv7Thmrq9GhVW5Z7P59bqXDhHY25Q89+qbkTN313whVxoxTnbx8aS5jCnRjqpJjtT5XVX9r1F2cTof+D+1ANmZqVtf2ZVzElRFNEDGVTEAKz6daX9o8fFGAyPnB/rvLFZbG5iAKdv3fDjpy6GczlVhg9Ex4n7SRnzy8ysv+z6urDbwXEcz/Zp161pvH1Y72kuvttgxr9RZ9nnrBrRXC80fszM+lX/gcU3YHidj2unOPLrcmpM4N+jXJyzSdfHsdD246HMwwH0Tr4qJgJep3xjO56a9zEelWEftPvxHrvw5leiwZPuu17rP3by4Li4FXWoweBh4ORbyXSDB2tXPdFig96n30Y9OW647aX4gWsbvA7HVY3wHH4/M7MCJlD7WHyd8q6/QO1W2593IU6E0q4rpUE1kmDSuu4s+PMXZSIzKlIVg6E3Ty9PDAb8M9+2iXL7Y5MXIpU9F5uZWWUTC6Dkz9g5Ox+u681ikotH0Z/xh5x7yxdi51j0uBlByHfjfGGEonVM5lLTx6e064sjGR49iydsaFPWByMM7VNxTtGoqWxFZUBjhRGz/J7Mt+Nu5KihQYO7P6ta26V+0yecGjyMEqlipqdLo6y0J04RjITKRvQUW2dd0XGOmsV5ymBmfjuOVQ6GV382znvORY7HqDDZYE9Ebw5mYWhsx0XReMS/KR2J+tsxcto5CS9VdFcyG5W9P0ec94xWaNTzKEgwKO+6Ui/aSriuc9G9u9JG3MCSS+cOjkdiTNCg4uZW3I/zobgejZxwf0Q+ejB+VMd0l3wedefFgIfTNqzJ1olLO4swolfi2hkxuiT2bm3Nn2XvETdyejPxOYp4TUY6zCwYAsW7MU2QVvwHgzEqdml72f9Dvxbvz2h/92xoCG/ly96yK5lndCbuE0ntxMFxfi9uer33P+7Pv+PnBosxwMB3G56PzzFNpho8NCZUUdDToKc4qMcPzFC5hs1pJFC55+5FyzH3qHuAwzh2wUMeiW6vTPAwhxVRPJh5XLz3n9knOSeJWu5cOAPxzNMSDJ6hGGxY90PMrUrUm9aHbiyIYdQ448+vEYM8FAQ3tVxPQ/R+rBtXoenX0shhaNXMbP8srO54ixhiVpsPt+HGNSxNjnA8LOH4Mv2p0cVRafLGWuhyrsRJy82f36suIfTcHgzPszFCQmOLxlBf0rXdFf+Wsy9FctbG88f9fhKxzHewJmDodZaiBuca6azEhctoSkHunwwPvz8jY2ZmQ5zLS2SZY8colwodNI2o8RkHtehlM/rIuU0Dx8ysfdznc2VbIhXlw1MfefFY0xa9ooeewZoqg0dPHxwnA0lx0liTT5KWfe7w+9+/FtsUUz3teN1wFo6FRurx2zQ886IvqYMLLTE8+XetqOBK19zQS586e3Bc3on3aB2jQxOfsbPEEBKe3aaIVlknuLodo/iMwORgAI5lPPBn6gTQoVadl3vEdUbx9ZsHx/kNCYKcPua/PSsBhjZsiXlfcxpIGeJdpg9QlKkGD3+k0JTJBUVNnIAaRvOXXVHvn5/cQoRpjd73XQjncjCUCm2ZhDBQqlvxAzRPu5KlUaYTmR6lplEmbbo5yTMzxFmQxTAq+Lv1YzbKeis+rgmMEI0YcAF0l+JDVtaY1ovjv3fOVzDfLd9TnA48WBmD3gq0ABaYGoccV1XaNBYVnxR+awbe/RFU7pyLjHIOBQswKPM94j2oKFIxjPiNaLB3V+KGOzjta4mOiVn8LiHdIg7HCOPbfPpYOJensyNzMc3DCIGRMBYhgYE19xvXw7ntj3tfROLCzA5JTR/cT3FM7uF35wU70z3cMFWhYVpZj1EoGmmKf0oThOynRNTKu5OdxrQ6Aesh6y88o6T1joLkO4ND/7umfQoNf/a0Er9Xio06J/OZUR2mt3LN+L36x9wrHIneLn/D8UODWTfKVI9TugvxezECo4bn4LRHr2go08Axi+u7uxB/b4glzii7Yh419UopICpi9bjfDuuI8CBqtvxKHMf199A50QCAv4BiVYPxhejl4Hh0yHIIIhR2JN0vkZyD6ySaR+enfKehl0+UqQYPF6hublzYVHQ5Cbn25/zBZq5Hi7OHnDaVkirm/ZP+mPW1+PXpXfVn4kyg4iSYcCRvXUK6oNQQ/AoAlTO3fGK0VqOy4m/pEmIaa1gWb5aRFg7xZKiB5TtiVL7tz7x/Kirc2VsIw0IJlLei4ty74KutLOkTej15rA1V4ASRtk7EQebmOpA53YdTXNn068qNycroYUkA0CdIcQpoOQAjRfnS21STjgpxgJSNgjDpyQ3FIAlAZcznzlL8JkzPqbFSQBRnIL9dvuMW6/5ZT1tUN+PaIZ5v//1n47kBjwXbh/XPzW4Mg3TZAY6dD54J56ggB2VGQMNllkekursco1CMDBVaUeESc8jrdBwZddb3JLCTMoZ17NF4O3pcsUMYL9zMCGA2ExCzgsxbro+SnbiBpZUlP9fx64YzEnVDxLn00tvh3OiSz48+shC6dnL4zEOxR1vHGWmK57heeEqNE0bq1REqIbHBdJc+B/e52m3ZT3Y93zU4E1OKxW03htpn3fO+9z7VC/htsa+p88eKkDD+o4XJTeHpyCq0orjtP17admNosKCYWexrM1PbkgV5xxgeTVUxLz6octOOX7gEL7i7FL9cGQjwUcmVjea6GZ4Mv2VxguokpIKh4hnzRAkW+/JGPPeCe76MUtTuSuUKUx0CYC3sIyc6G3+8gHB1fwm4qFx8meIeI2rxXPPk4canWcRN1db8/lrxwsiZbq71O0gNHptcgcA0glr/ZVTJtVYnKxkqgYF6EEdB8EilHXismvIFFknnPdNiuW7cgUclH4BC0++hlR9MO6ozwogADQY1UGnwlLail9fD72lKqHXJc+bUA8S4mZkVOsBeyGZPp0OBvpx/YUMSr7rzuFf6VATfQ6EzwpC5WcTpJBKVpOHROhm9ZRqS/PaaWqPxmfRVQU1w1VM1jPz+3ZnDjaSHKYzw5N/ySIotxRAGDSMFvKbrXqiSnj8Vz+XpPeBQALVMo6Qnj4dzoVBgcLhBbWZWv+vftXlS8BMT1r4KIzWatqJ+6y0qfghYKKgF1en9OjIv27FaeLTiBoQC7ctMMWJu52TpEDKhEJLFy/6tNRvSOeYvXr3jxkr+5dgDuPdBx+nons3vlN9wxyoZRKMmhZ6ksfyt5FuktPy4M6+GDErjgDsYD2sTNyJl4yeAq2FliVxHpcqKMLMY1t67EL8OS3wJLFSgGn9by+T4QWjBlu7Gmbz9Xg9pFjqq3Cd70qPT7gXloOnzm1LRwer7okTRaliJokOLWA+Ns6z2iPcg3kJTSW2ktFh9VdmJmzUxIjI/rX8SJb6yiApcs/hpHcejICENdBaboO5lmGN5CdFzTQyWFGyLyAoMJTUKwrqSTZaVdPkS14B4ZFD8vSXB2OA79CT9wOhgC6k2/V68h6bEzfD8GrKHARGwMjK3WSyhwE7qrmDM5yTyiH+qAlfnKv7h4ZQXOg8I8O6KQVhJDr+/lkpX7vkO1DxzeMj/YQodwf4LFw6OCxL1LL8OY6gbN6n+e7wyKy9OQOHuzsFxuumGUr5yOlw3uOiGUl7SXaXrWwfH+x/x6p+lfxuxa/d+0O+hEZi5q35PhWcsfMXvk+94dKMZbTcrYtvQ+VwiGBnLsShG0wDn2hcEOHzXlekYwB3zjdHefAzEhcrh8lY8t/24/938W/E7leF0cA8Z/K7HwnVDzJe84ECLqKAbLXsUKteNVlly2wMT3SdkkKfIVIOHVuZ4qfXheJOepJXKAOrpByZ4mIaMgooZ/u6K4VW9B4CjeLCcGAVooiQ3WZGpVcxNgdGNQTV6L/kARBWA1XGEa/ekAq2LT1AAH9CcpAeY+lKcET0K5e/B+DdPTE6R8D0HEmGjAUrvQkvUqSDq9wTQDIMnH3XRxHy1Vu8cBaHXz42UVW73L8TcHgNps2pI/84PuZEoTqK4DydDlIYBX0LDSI0O3qN9XCopMMc0VZwbstoD7ympUK6D/TOCjJxiy3IekRNEq8VYRaPYvv2zfq52GxESwU1M+l2z6J3vn5PUAfJ8vS0fnxNfjrsHcYRjVYeTAjxqCE2BDBwF6aMku7SHKksBLacrPvjqVIUoTipRuONIe5zw48K2cOEAEzWS6qIRAbD4qf3nmuoezwAAIABJREFUToTraGhoccjmM36P41+IVkgPXEEF0W/hOsw/nW80vvtz/M4SPYaOH4tYLjBtKBGkRdCX4Lf7EWIzXo4PyUFXdxfiBK7d9m8/rIEyoiHG7RzK18d4YLCuFuFMCYancNvvqfefJlMNHkY+1PvhJjiNXLC84QqgfTp6JzF0DQNKFkMIvQuxYeukD0ogIbRIYsdwu1aFhCIAMZpoRFG5KxEco1cLL8Vw7c6Pw1C6FxfiYBYg2AoqdBS4SCMqH0/2BljoYzw5/nKlBjdoMQ4ZbpaNq73E1IT/985SnKwrX/Cw9PqHY/6Y8yen8xOPUt2cTDNwFISppM6yj/vc5RhabsETV6JLglw1SqZg72+KkoixsqIoHjHTaX2kbLR0m1EoknSaxSrMjoA3+yAfY6p4+zFNafFvwqlYpSUbC+85qE7GTbAAYPfxeC6HTWf7uclYsPI6ohPzskFgCqcF+XFg8QYoANh+PIY2qSdLGrGc8FhqDPQXfKPSSN9RkNprHt3onQd4V4H8IDetXY2eWWnTjZf+gmBz6GTgurQk0TqskeK+AJrnUCZNw6Ie19UAgZvucjhlVXDibT8TMwHzV3wS02gaSTR+5gacgHPhlHVWfR2X1xEFrsd7lPb8HruPxrGauYlUupBghmIJVqVLSquIT6NFNqVdP67fEQ4gRrWBeWufjPs+U7tqsHWX/X2YHtYIT/PDlw6OmT77VvItDB4/JqeNmVl3DpVBUzzdInJyOrkCEysVoCgGDqTyhdAa1VLoFjgQGOloyUYaIhhS0REUDA00qa5JMVZb710K50pFD8l2BX+TNLB5lLFx7UsVQxnAyJYSbuF+og8r2/4fyKGwfzp63Nz8NPUxRL6U6bP6vTjgTOup4V7d4OY6GYcVlNGU6pqHJQUwseYBKiZTtZmFuaKVNcT0VAQL0Fs4nIxOhQb2ULAMjOpw7Wh0YPcRn7QKjGwswsgVhdiBR1i/zbUpGzX03EAMHhokAx06prF4S7HlyUw+qki6rgviwRIwPPtC/HkJ0dddYZy95WPQejZuoKMOOICI6pboMR25bj7O55mrh1s8iish6H0aGeXDkt4FX/csjCjtxDEjXpEEf2aS2hir+kBEFJwuQ+FrI2DapLS9tIVUyUWf7AWlCsgR7CnQCnznkMYUYYq+0I7zoYutId+VyM2O/3bnOOZsR51YF3KYmZlVbtCQjKGb6iueUtz/0QsHx1oF1kWWTKPxLDjRvbhyC8bWKaSjZD5XkHbrnBTiQabBQb2RSIS7/rJbn60nI15rmrzjKq1hKZ14LqSOxAMp7vmI6f5FZbx/xm+i+A+Gq8f4TRC205AxowXVW54g3T8V49rE92jqjpVHLH0dYzQOFW3x3GA4WUmlACczAplbiBvhaL84+VzXB0yzdUw77V4kdXm8jhNbMSH0sueu+6QmJshMWGAlZ0avXTdXGtM03tSgOgrSPucLNLRwEIA1ozhaWcNz6s2HyqAmgcnxOVh+2l8Wa4Isz296tHHvmeiycq5XN+I3334cBoPMKa5Pkqyp4tRQefhtdoWQ+5OAkwZwQbAG5LAaq4Tjs2CjGksV54DL24n3aK/iO8kaTgr0lifjFCdRBJiZ2QRiTfV6c2Aft/ToRT3JJVVZ94/XOR4tWepnBf2SqTjfF1oPRIry2AQ1csDow0jWRFiDmOpKhkvjXrnKGO1Qxz7f8edvI0s2e01SrWeQApbIJqEJ5GtTgkLq6tKetCp6zPe2yqe+HM6lz3oYlBjOgAE1sxyeY+m1qBc2n/b3bpzR4hO2AJnsaJE0VfdszbB8UwYr8Xumeerhdx71fOcYHtl7uLktfsEZFje/92S4rnERDyYvN3PF86C085gqMIsl62qQcMMoNaIrSg82gYup+X5iDTQlRPzCtDLeGC6M91jf9IWZF+LBpUuOCtvaBI9ENy7EBB7sSJRvOo+NsSAe7MrhxpYC1SiNcwqY9vemkaMTmeOoHhAXVVlwRiu/fufgeOcDXnmj1W5HQZIJPZQG5clLSblZQv58EheLxfLZMYZxsIWPscWC0LN40q0OBeHSOdl6cvIzasoptEDBwi3GrN5EUk2zGD2u34znCJrkPFU6CXrBqUR4RmD6zqNabFAWRYZ36S4LWJbRoJLwkLVBwNYAkF3wk9yEtVBAw/QH/11aKTTASk0+saMirCLrz4H7bF3K0sED0xUiyvpl14M9aTXC+d097efGwPowepU7ioaoGhCUuas+9htzk/VP7W78doHoEutl84X4zWu3EETQwo45RPHp/EokqAdIk0b8QkTxk++PP8BHgXfdjlt2MLZ2L0jGpsHr4t9xDNgDrz+l/5YCHEM1NaPTPUn931j3+1+MOKxpMtXgWX7ZoyI7T0QLK7wsU1NjAE1cJtGZnWf9y5VhqapiprLUQWZ1UW9ucqqKDdLG0y1+07ZQgfP3QqNBMZr4W6qY81W2d5DQ+wRKTW1BkWJ9JbpgcQv1UumVBI9Y7jFNCfB70ANSHFDwjkQvc8NWo3X7Q77iqNCbJ4+gN1vlRorogHha05qshqokwWPVb7v7Rq+0cnk3XNe6uIDrpPUDjO8hzimjKj1FbScS2INX5Bx+boR9S7dvKvT6rXiueebw68zMBmy5RINHjCaG+oeLMuE6uBhLLhFMRQ6RmuGMYBJABVEoaikZDrEgNf3HgIzqDBNQ78F/npnct0sbrR4FoRNAACkNHLNYVaf8N8N5jwbp5sZq3tpNKDFh3maUryhs8aVbDivYvehOlaaEqP81mJZDFdU4xQOrXCfThFDP9gWbU9wlnuydpbTGeO8AGxmrmIZxxL1MQcoprK2BpKlr7puOOTEMCJCIdVidnOHQCtYcNkFiuUpv3g3XDS64kVO8EXtvTpOpO0oTbK5jhgZwNu0nPIdW24gX0ttUbM4QgxkYduX7EhylC6WH+8/ciFo734PCQvRn91LUsB2ENVXxc9KwAaaCPMkz052PC71a95s2d2OYd7eJpp01NBpUBQvJKWi5CWDqpeiO5275uzJ/XNoRgwpRKU0pBpwUxkeVOw02LROeveXfcO+sgg39uHnc/7H0+mRulYclk1oRTNuIygK0J7hSje/GOVQmsNHgUxEXxmq5sjRADAUA3Ixk/bWOwyNWpYd/6zNyDpR3bKJ0+F1jBXFYV+0pKfhgiGvxEqYRAf9mZj1UcDGoWrgTIws5YHiKsuY6J2DcSlSV64pjoOM4dw1Vkqtysnk42FIjmywO0A36KAgbURJgXdqOEZ7CDhpiPhajOCy3H2tZQAwnAdyC66BTpQB9O40KMeopcdpYxTct5aSbOPXnECq+0NDUlx/Pbcbf7sHQR3ZoDLRcBbP+vnQ6Z7cBNcpm3/CJuvZh1yca7SfQuj8rBiEXk9rv2FMCv5xgWQLjv7Z9QfCE3Rz6l1bDZVwjueUpuXORqQbPNsLcFTGi2Pdj4U2f8LqZRVK5OEL0eDgISurXRY8eVb6cvM3T0eScvcrmiH4PJcWro7S9diMqoZ2nfBYG9lwZuWkU/s09gh7iC5w67Z57A13PNxsCj2eRVlEb0QBHIWM8QjXJylf8ur2L4bKQolSyLG6uoe+VRi4YhdKKGkx69ehzSIUUSXp1BAGa/O5kXlUuGe21Rgll6VMoAKpr6NYtDW+rG+y5JQB3cvUFTJ0QCNLgEW+Npa8Doabi96NxPBJ+JY5JTpQqU2GlWNRorTMMC3Ozj9eNlhFNGMPJcVwRGr8YG0z2muA6knRXbg8RCVlX+bPAkqRo8yHPyLD8GFv2BOJBbU1TAEBaCwqOgkxKJbVqEWMz86KDZkt78VwujJOkXlvcxDEfbkq37kc8FJlKNJPFANRhYxEetIIYq47Chq6Vl4x29BZYjCMRGOzNuh+Wtg93mkaCn+0CN1cSh+P/Z+7NmiTLruy84/PsMWdkRuRcM4CuAtAzRZHWalJm4ose9aRfIDP9Kv0F6oWSSDOKbBlaYje6C6gJNWVmZWZMHuHz7K4HAHm+ta7fi6y2YpWfJ890j+vXzz1nn73XXnttqay2VPfFn0Unh+TjmaVyCxDKXTatETY8+tqZBc10JBEAeveCTBQK50Tr19HpGD1SvSGmz2Y7dlZmjNfm8CTK5fHe+JAb0qqQyGN1Mi9SJe2v4P3f1xOxAQfIGd8zoClOJCMUKj2gbF2x4srLf4VvkVE1xAdFByGEEHI4/ddVffjX4839xXYPrMx5jMqCopXmzwAH2wYLw3hfF38KmHRkRgXOp5PY2l/G+b/6CU41M+AVRBdTexaMUr0sXQhucJqynIYfalQ7bOMBh8fI+lmSDvy9rltVYroPhn+6k44gecUgDQoRUE+piHy9OStc98uqBSro10bnx1MAtBFTU5WleOY0ocmymSuX1AdBkFGwtQLnZQk+XLViHdexX9xpKl3gd96zKi18Vpw52xO9B+ll9aG0OY/sJExp3eN6FVswhEd5s1l8LoQQ5vejQ+Ik8zxQIufflHopSO/YyWVxeBqZg/PpDV1HqI5NNpmOrx0FF106UBgqZ7opJnfYU8X2Y0rfRheapVPt+5apdfb6C0H5qERA10XdO9RESp776WRq2jzpqr6nG7fxDQM5vYj08GPLCHucTOV9m56L2SQJzEMiX0exODyPhRnf3d9Ea+BKyGV41xQ+c8M2RN+qBHIAp8m7OuskpVeTkDfSf2iVBavNr52jItwOP9CByCx7OpGHjbh6r8fR7b7pagRUQ7prcKF8qt3bMRyfzHQBTfexATrMq3qJMgiytiqGaENRAgDm5cREDKpXRmjGc/JIl8aCDoX349qGMQSELI0h3ekgomURGo1s44UacxoYbuSECCP5XjahNAbLAp0mh0fjy9qFvkXyrXMIaOhqEKr19aCOrTs1WbwE8L0oueDnHu5//tIqdk7iviLBeNA3jRc4V+szfW/1k3igrqfpa3EJ/R4PJHggtb0MvbjZiV1b+bo4yC5wuQWDZcbTo/gcZjuO9mOepp7KwEtLxZDTwzTH+lirDhckDttZwNYpw9vRfvbv6j0SeXQNnRmkWOpKKVHHAPtx/qY6ZYULtFCyvopsLZSD7lP5mbemwWtDTkm18CCJZzgDy8oL41odQQ/IHDaegY2XhjxBr4td1T2lP4Fyf+OJZlSo+M70paNVDOq+jRhnNsLDw96cBFHEBbwupc8hueg5pPFng46Lfo4HcPOrDCVTz/SQb8EGmJZyGh2z5YKVZOO1CCuZU8PUgSNB1Wr8QUNzeIr5zSjG6lo/N4MXnqsYJwSwf7WskzdFFMHDY2XPkxGLO5yCvCO6SJCzEQRPDtyriS+TEv7xNRGJrRz4WTQ8RRMQJMLl1Tl0jpsfaa64+0Hs3ZZFeC0gIk6UduY3O99OWub9uwIxYfTStT5LwuG8ZnL/4bt8T4f093h60GFINlDF7zaphjVI/2tEpZ7yrQDxmVhn62I5bpiyoaol/LuDPV29sEMG9z/Z07W9s9q8953Dk0X434YxQhWZKIybnc0qH2avRiccD+7FBdf+BJpmx5prpWBt3uA0aXCK9Ta3dC33yzyx3vDSforYQrxXtOq+GasJK/r8c2PCqiAO123NIpPn9Azel4MUacG7p+4CzpeZZpKkpyNL7EPQNDj5ghMv/cfjHd0zMWJQQ4pSBWvfhXPaEe6skenwMHe//6kuwiH4Cyy7bT3TzzG/23qiRmlwD/lzLELP+RH2dwiMzkXZyG40kNwMCQl5KAtnpa3qz6ExcazRICMvT8XUy+g9VFeU65MnkYzV2onXb91VIg31QqYG3/M9R3iYQ57Cc8+PTWEU686jfVHlZK7dIrEi8skz45GV4Sj5/DdesGFfIfVz2zDKvc2HlPMucoBBxqZITWPQ+XNl7PJAI3kz0ZiTaVhbsuR5cF36AURj2Xqq7/WoK2X2hCmuBQMVFykDEJl0VuLLhDOEJSzXdHi9C27fjjn6N1BsHWFv7lnA1IkLP2+CntNitE9TO5w8HfHq+sbhkd/tpmVgH/79NXwtLYHMfgvj/n0NKXggB+MLjd6nR9H21T/SHla9n0c76IgiHaUZ9Fh83UvXens8ozs8axCcpvQzC0GJtyG4QrOjqrxhfO65qQzvxbNgNdP1xlspH8R1uuwron/zNvizn6ZnNRw4oG2RbgYO/DJAsMrFJVTFXTiRqD6de1cHZ9rN+8sxsKs9jWfg6L7ydBgENJ6lpzZ9ZDo8ZG879Cdlmbhn8hpCCKF+AePraA+uUemgdNtyuPTmEkJRrNV3iWyIBs7A2k80J2WEnNGWo3gZH8DCUl+i0WNrcCl5Cn3vnfsRG33Sie705JmGHvmjaPlzhgrNpulNLItQjxXSdc3acFylV6pxo1Mk0D38GtJRjTND0Y743MyQ0IgzOnp9xfDvbZBIvYIhnjfUAaYDWe04ArP5dQjan0bSgP457BHKyYegJfFZ6tqM8iggGILKy7vEPjk2POwSUgfU8rFnSWdofMeaEAIl4UFSMXLz5DjeR72tjsx8joh+P6O1xA5kAKpqWySNtdD5Ke/Fv1udxb06PPXCDESs5hCux5vFsJZ2H9S3ctuyDYOOM/VXvC8hg8nRe+roc18tLEVR6YKg36LzZ8RhzJPLRIggJM8dI/2S21KxQh3pg5VRPENOjPPaCtg7+aGdZbTxv4L9b9qawl5yx0uaLxsqTGeZ1dRXhgTR8V+ZpmmOWQILSKs4w13LTe4DtstT9XxvfLq5YCgE7Qs4NvAha2R3S5eUk77H/B0/R3XjEHQzzI3ZTuEzLvjG14puXP0s1uh5vyzm8rxp5xRM8SZQBP+cVB4ZYXf/w3gvV38Za/+9KkecKyOiTmF819Y8tDuJzsoKE7m2iDIPVMd6woUSCJrTnpXd4lYokDbPGTFQNCb0+lxs4wN47uZYM3+8sKbOTH3sGJeBbUqY3tpG4z4jogHHMFEuvOZvSufYJLqswzCXkAojf8D/bmGoJA0d++n4M2Faumacq+t3050t+Tduyzk8rPZz+8HvLgz1CwjhlwbxCxLl60BZakZGnoyiFed+KRhBc44KqPXCChZYgmvTX6/Fh3/TiNcs9bxcGZcwvytX31yw4MKl3AdOLdiGIY2TWS7sIrF09A0dpoPiHB4q3PPMyJuGS04mW6/BHnW8R59PCuuNXc8OP4cte0JQG5YDZzNnSCBTXOvbakBZjDJGtXPO2qFUO9gTppFFhLGi0l3SE492PNEYHPyhnPWQK0B92u0z+Y1SsGC9+Ihwe7UbaSjC5bPntOQ6S1Es3zSyER5EJB6lMqfGyiwSV0NQlMgNpzDkES333tF8SP2c7R0sv00RNGPms9Ny/+7mqDGEEHJX6RHg9Y8ilMZGjzlbJNP99KmU1hINU1GdRAdlNoZXYMKD8xHKKnv6XQvwLbzUsYLNQYLpyBSfyeFxCX+uAx7sozuG1MBYlK33GvUuxqb+zA3H797/KEMO+gcajNjY1Nk3XfUGzrxFUCRm+4bXPDsOdHNIaLC8fF+4EjjtXVqC3cZ7Dwyevoyvvapu8HAzqlM2A8u9P901w490QSIKHm++fiLifhQfQKejiCgrIxddFES05qmfW8/MQLFazPbLdLZ5v7sqNTk8vg7W+5tbt7MZZwghDE8igzyRGtyCkaZ/UxyprWsjxdV9Sz1gkSgxrg8PO1ZfOTFZAgubKKaHGfC6REYFjkz/DT0XCqABuM7YOgXAdn0ojrxRE+7vRwjzE0iZuIp45wMgm08MlZRehOmBPfsZriwICC0o99uemO3Fh1G+0u8mOsb0lqe6eRa0v9T9KDp7uH2SoENQXyKNZrBpZHN4EBHWP7auprvkWoAL8Cy95n5hJXST3c18jdYTnaHeA+R+rb8SYXrnfEx26cXG/3fHiyjUzhd6yBaGiOTei46Y62Fwk3pJJP9VqusDPmhGl5yO0XSUnv7zaDPUwM2xdBfVekfQbkoQkzOgygYcQkKonkLkNTx/zIPAiYI1RCV0fvr3bMFswSD6lce698a4/JxHrKI+Xkw3nM2nINQeGjcLESzRxRAUumZDTyc40tiXrd3H6AQHkKVz6JCQz+O/ZZah+E5HwHsWCQ+BTpNqLwoHwqkYee4zoqUWlpYhEzG1IEMOTavSmsHhaf8mPntPadFRKvccmt1MUpvd1x9KvoXLfmzb0NSOIZtjGmH9O1Zt5Sylxb5b/UcRFfNKPxbPDE/8aNvMZfM9QR0eX/es2koo4Qu4FD93e0831rN/jKryxXvK4ZqBqxXQGiXM0x3xpVdG4uvcxjM7Qj2j4GcBHLGVzTHnxAMVKQxiRZ4JnpLfUxzpQTGD4jbtiYtR8r1vkwnIdHgksnNeCoyqHG6WbxndQt7wPP3Oyijx7d/Xg47lv2PrdE6Ic53Bv5GSPIfH8N68bSmnx1BU7RIWtcMDG8dJWhOIm7nHfFaICNKcRtXFBT365MATL1hVADfi9ADigteWRqA6qHEpe+inIqqc9jinUAd1tI3OrovcMY978yYq5vyA2IaRckvNp6bTAj2n67etNxwQGd/I5DpdvxMnzZ1LoqNOZOVa5JodnFg0yLVh1RhSWntoUSrKVypXmw/tEEJYgidWHJhDCFRyaZdY7gBJhVBn0VJfwr+x3nNMXS2gdls1cvME+lZN4wExACnv6aZYY356H8R96yXE813M/z29/71fbTa/031vmRBfjy1q34ZBxMSrXDkmKAevXehzkNL2Y2v82UJjUSq9m2NEpLP5ja5ZrnWK4vlhyZTN8KEhTUB4nK/G66xRNXveVbLtez//6tVrr9Bd4O9IWs7/g1WjsXLRjoX6xTr1PVIthLZgk7BEmjfn5xCWn5ft0wkkAl0/d3Agvp5be5DKNSqaUYFdsHVVFIfqO0J4lkB4RiZhTc9vJs/U+vqQG2KGjdEKH4YbcFaazJsehcWXvnh3P40Gcd7A5M3SD1Ln37CkmBwV32xShrdrK22SvsEOW3Gjd3JxwqcX6vRJUzWrl2Tn5qLBk1McOoRkk3Li8bWTlktIz4wQATHtEYLpMRlUzDt22P/67fjl3LDD29tn3NMi7NFt3bhsqeLcDZZbjm5Zfh5RGKHf3c900iaoePF+NOT+MGXmHJ5iRrp50dzsdISgz3l6K36u9Zk3IM1v/FwIQVAX5yjQoaLo2qJpEzlON1/1alzEtduokhzrvlpeQfejrIdkq4ZKGZugg3o0bL2X0QAuTIqfKebWEz9dN69vDxbKfdiWve3bE0xHlVFS7grgY7SN8dRc7UsaHXV4yOFhBa+ncon4eKsX2mvyS5yvKFyzhDAgixTMBnN5M51qvLDRIs5Bb6pG8s3dmLNlu6DwhgZTpW/SNev694iK6Hs8i8t4naubcwiHp9LQw2B+DefTqumkdx44vn1z9EmPSQAYOEep35Pg/8KGsqH1HxqvTVp20mTrKdpJ3Aer2x4ANUL8sGcaSMTT7HPcUO4MsdrDo+DRbTwcoC6ebvEDiYPwmywuC3p5X05gZWTgUt2EP6fk8CSS9UgdOKEZX7cyqLX2Aqz3OyBXPjdiNegEjsCwdHPvMzz3h96ACS/t9hmVeAn04AQpkoxGsdsw0iqnXA6fHl4i0qJ4nKNkcJb5d71Hil3zPo7/zxfyXv+PIruXa9sdTUZeLko4P0iPFPNAWsh9Hx/Z5wT5tfdQPbgyjgJJzDxkXHF2TVTHFlwB0fMSkXOrYR28H8TDpGicihUe4nhqRhXOY6HH+9CP5ZFKGe/rPe6klEQ7iZS80dzO9jk8TG1M0AbI0/5SyWo2t/vTuHhKJksigRQOPk9HcZ3Wz9XIj46jYaGNcVvdAz+tfGlpzN14X7UL/TvRHcNbtareR7MUHYiHrY6891U/pjLLjfh306466XMgoOWOBxnxte93SoUw2+Jk/cZh9JRmxlXjXl0Yr63Ui/9uPov7anBHHTvaBXfuq/g7+g6TPSf4b+b6/KGRndLCoe6EOxpIeo6Jkmymepzoi2s0nsdZ2LnUJ0XuTEJzZECPMAu5wUFl5zT1gVjV4p89+kWsYbz58U5IGwn0B3B+44n+gG8qMZfAqpC8TaTwHKyKYwlCs/fZopNDPZKeEfL2/wGG2bgXVLgdnMYJmRinosL96xk4/Bw3/ESUSKTNep4/1CABTxAT23SVXrozMT7CenhhEgOCXm92rvzfnb/Uxnrcq0QHHIYnedzRn+o30Nna0edAXkoRVVSLhkH0tElmHBkd1l6mp9omcKLKNzoJk5P4fdV9tRm9QTSQcwQStZY6PGwtQQFPH9NJehS5PonXXN2YoUQAMvdKu5ThdpIo4FameTHoiOdsPommsL1ACCGUruPz67+ltpX2lAG1Uwf43UNDXNOEYceH+kxow5y0zKoh5w/p3orvjUbqrCwQ0NDBCUHTpNMbCuraukHhy2Kqc1x/jmuYFpqIKrI/omUFFsgxe0PdBe6las1PCSRcvxWdnN0v7Exipa9lWzrvoZk20O5EEADOV+Hq9T2eTIeH9f5uLAmtSvlpgtuCf2jfPi2thdLmumDqi1RsdVVZREkzV83EA+Cmcf4Nh3uclIRl9Rj1TUJQI+0tEVi+nSgNhsFlyWLumW6U5pP4uvNzEyYrpBvByiUqC8DhcaLoHGnJRKkmoNGs9hpyT3aNNeyPOzLkADAKcQmCbRjuePx+eE+bFQ50jyKJrDBNGoIagBnWCh2XEII4WK6gLPuKGi72TGqXuA+rrpzt45AppyMwbFGS6BkGYTLP99M5WptEAjWicjMeMnr9Uita8ErJ+gbl0GkZlZClPZ2E3TrEROc6kdf9+AAqFqmPF0Rj8YaTp/F1WUiyDKtSIsyf1SPqhxqNZ9HhG57Gg65oZPoVSNrOlWSKtnJjIpIpZenBzoICUqGJLuIgNEslsa9Z2jejDjA96eehoJnsN2XUhOfNaOBuzpXf8+hBrM7jlnAZEioyU+cpBE0lkT8bggZrJBLnLUh4jN6JAAAgAElEQVSmWv/EkM014EYvPqEzyipdrxbb+QJtklwbDEuGtqt+bvsbRSLfWS8tyW/aZqUOAR0I97qpucLS8N++h0V4CEEp4zU0XsZZcHn2EvgLPrHUNCnCErnTcfNWPI0TERTTeuD3OPOcolfObJ8iPdD+3KoCIEu/hKedb1kp4h/Do+1baT5zyzOFDyt0WiHU5XAnScueGqShniH48gNUEEFzAJrPcZC37DmxNwyuMby9fVLLbuh+P3wu6i/jJHYfabTJ1g9ZWiU0UFNruUHnIqGgjPRqUch+Ou+Dk3TDL1ochs7QyE7bqBC0dcnSc99zTIMkHKUJHSo4P14Zg4h4MNB1vwY3p3YaI63ZXE0eSaXegHQJiKpkDtXLTjy4iLCWuqaei0uWLODLpbSWKA71uxZ3segyahd+qMG2DYLOHTrvM76uXhp6MqdTl4G0QZbEyarVTtxzhbmpZqOBJQMpV9YnKrkyCZEdkMyd+0O7xXRt4VQVN6mE3zzQ9756HkV1Ks/i54q2bigEWjVVfDo5jZdG9MWZdf0mzlurQBwjCJ9fqyGnLaiqeoJWI1MLzPq/dR+jFYsFtewMwEos52SJ+n9G4O0jO6XFg8+cKHpfrOZywisjEo9whnBs+OMaL/QXCKvekYMM744EZzpinvtlWsGb86XxSPww5vVZGROCHpKuTkyiZ//N+P95k+0udZBi8N4nGPmRbWCS8HhIZuiF+GamM1RD7tf7ZWU5TXRy/Poku4mq80367/yhxirloHYHj4hl86VJOmA9z0yUiw53GQbd02KFESMtd/TjawYB7qxJ5cqe8y3Aq6rbxiWnBNWDSyMVL3bTnRWpvvKGm1wPMLBrI5GyX1a1poZndRLnnDwEh+hnMPbliiGn+L6qEZqP2xHW/nIYc8CuYTWFyvNk6bZl8/r2Ki06OfmMgosfagzvxIW1/8uY9l/sqBM6uBs3yXRXjUBpAF6KdUdfleN8FNHo2UnLTKPUOnaQMnDFXipagO5ng9z/Pdj4jq1nNrklSnGm3JPicTS8o5FuyHINvB3oziwrFkgQcbflUL1Mz2TQ4aGTsPby+69jdLLe03VfgAozpStCCOHk/96sl5fQ2eJZbJxcAUzwdx7UiZZSxjPzkd2OmgbdOEPSPBRe2tQqlKTs1h/ODVAL/CDXYeCPS8DaSBc4c75xBticRiNDP8arcMr9eI3hMdsv6Of2fxXd8MufKVRJCCOhTnkfqM6EMJ1+bu/j+Lmzv7BDsoMqtl0/nMCJQX56brdYP8N7Fo3zoKTnPrD5ruEa7iDzmn4o5HBWzaRb8fbB9yKihdtLOHEUY7PKvywyL40SnZXahR3ULaJEeg32lCMq6QEH04eO4hC69iaHc6BNdMyXbRP1G2Y0DsY1XXxx3l5t/FzBpPiLcF5G1gWdh8cS5Ob7R0oUnS3jPd6M1MhVod5cK+lvI8euthsPsaV1nuZ+d4QnpCA8mU02t7AsnQjt7CBu9Hlbnz8FN0s966t4H/o6rlvFpciu56eblapD0L6HIYQwPaCzBVK82VkGbYkeckAzV31z2GAXeMC3Hqga53zxeqg1AwQX/pTAxfc0Aku3CxUgYMt3wdNp6hcIGTlD6bJy44jxZgTMswkuRMjR/ijuz5v3I8fJ06N1pFFHd03GPWNkOjxzpC/qL/U9lh3T0XCi1DrPfKNuZDoQREXcY+Mi9JQTjXjdYFJC9kSe3PDTy3QFTUa6dK6cmDy+Ezd61ZWbwWxfDzxSB3GUvAbjPHTeA3FRbXaYnEIZ0yuC5CAD2mY2tfcovvZnzYi7dz89DSK8EkM89j+BKu47xnHCxuRzclL0NgwaG4psOhdngoahjqyQj5NslZLb+DmPZkmK9tJ2kVZg0OJSAYxFMnhgibbR+Ch5DmtrysP3fM/N91G+vGswKow907dM3YYQwvwFvOhdb7keRwUGvV1Wa/ubq+j5D7vWHw+IT6GtP6BajN836sS/y+3rbylCA8gj3dx8M3ycaEFBHtD2+Tth98OI6nR+ulk9OoQQiggCZq10Z6X2VD3Dyd0YBfUeA30wdF+CsQd6fWYQGEwuLKiaMwhwBWKgG4lDO2X7eAqVcgm5qjoa1zdojIr96AEinRwX+iWq4+do/8FmIde8733uWy/ERSCUQJBgX/Y+i3vHbSM7FngX9P47sYhHEHRH81I6IPyh8focHpsTqcwiB8N5HXimrsQqZDw4Ha2nuhBu3oTugB3orEjxhUHniGhBVufmonFzpEU9mvoldEuqGZYIsH9CKOp5fD2AvH+iYzAPGX+++EF5+7tyd/N9OSIhfaEG6e+JomiGgKOLTFKZO3H/Kdc4/NW3SM5+XwPTWb5J39Scp6Khhsp1sucMY6Yl+umop5Oi6bSLY+uOPh0gWyYUppycmpglnHG21/Dy8nWGGmp+zL1kpO5v4ncPH/C012uUkB6oWMqJ1TFr3NblWKNBEjQrJrDIUnSPzCcgLUs0blUzkpp2euD09dY3A0Unom7DGDyGl0A7ZUEh+1mVu/rbKTJ39XOt0mo+j5/d+w9fvXo9+uCeXgMIQ+XG1iIey7KMINzODCotJw4KaWOkb2lDYPAVJ2poy+CCDQyVrACVHEPHY2ZoPFu4dB/ruqyf0VGyAIFcWyrfW7o5v5eu4Fv8AkrIFoDwDKc99LOGZPOE5ls/zs+iDpJ7Mx0Z+zbtVjIdnr2P0xENoic19LhxQbus5nlp5cjUTAhBDXqiIgUjUV1ERAmv+6cmYoSHv7KcIvkW7S/jBzs/0rwP5ydxiGGjF4dGWkZDxHwGb0Jab3xh6Qe44eO3dAWtrzGXTI9a92rCnxNrSCfzimvMrDK//RX4PSaQJuvAe5mlOHPXb2ZnXH+IwR4xZ38SjZJ3hxeyqiGbdPwddibCs8Se2P1MP9h/kC7iRmRI5r2d7lw5wjM5id+Xr5jDg8h3jiDABTHX0JzKdwzVw1pfGCLNKDuPYIE6HyGEUL0LcUEjFdfgyJyfxwu+uNRFe7AbvfurrvV3QtXkbkVxeTb6zUMzJX+TrouSMPwda9X9u+Fk3DkO8m9D0Py+BpEWltT7PicX08n6/M3zmiGW6JA++tcRivZAog3xQmqwhaBOI/ecd1UPuQybA66L2880riedmBBC6HXiGitYn61xHwgMkBTvgccz1tcDqRYehFNEl32vnGdUvxf5adOpw5J46To8CJRpkxwNo+3y6lOm6nkuNJ+oYzE8ic6iE5qzRuaJ0n2DHqH+OHZiLaESZGn9YXKoqfNy5MbTyL4a3I8LwXlAfIh8aCEoz8Grf1S1Np3LoA9R3yJsxzb0Xo0m6JVFcrsfI2I91fcEXr0DEbTnCj+yNNx1crigcl19pJXYj05Sg74IpfTf3qNoIKMLVguEYCXQjibgGq4AOifZGf6aP+ttGCSds/+Urz2ii17Rt4CxcdKeOM54z5vTUjrfU1oUXeO6XFT1EKBjkSinJnxvzsocpd1MyTrRfkVo3IyjfJcXOlTgsMERm+2Z/UAk3aqoQbwaRXtSRl+tqnVVH6BkPdGPCxa3M9AA53gnHgorkJGtwj4U2F/O1c37/bBpzHbcOYT9c3mCLRgM8IhEUislhCDdzJ23xUDHSf5Me1ALJ6F2vEYQYFpl9WfR6MzfiSky74FH+1+4Ktp7vA95K7VX4+pD5XgU3jJCC69Bxfwrohv6OfIvS7aE2NLGgymeo/w77xm2xHpemjO04N63Vi9V+O8iOOwp2l66U5YmQEwHJ4RvR1TmyFZapsiTGSUhGeMHLSw1SwlrX6DzNpqCvozWYLqjxqWGGnzvmspDIcETaGxOJTkhmL/NoyseZNI4M+FZw+mzQ6z7Jg2C/t3iLpQlYWCXJuKWRwO5RJTHSN1gRlYWVC8zkAWmozp6DbZ4kNSE2V7CvIkeNRltDIQX8038Md3H21eDy3mj6OVk3zdufO3pLvYb8ioRlnDSKU3o/+Drape6qEbHENqDXpQjKVxHjs5wLIy/QuXwYi+daF/AAcEmoyF4+b0ZXDg8JYgNzk510e5U4+FRL+p7Xw1RsQOuxNLE8EpFNt61IAZQv8/OHOJsPKg8Fc1CgQTCU9gcmTppmcFacfJPM/T/NQdR9wqQFHcmeJa54zaGLIlXufJwIzKU0JU6B6H5RAPG0b14pnBvJnt/wSnbV0NbeRl/qGcaJP2M609szebQ8DZvFYOUWZA9YJpplHhwJ7qWUaVF5Wu+7rxvRQMo6V9b77mAQoRVxZ4hBAWpB+RODYn3jvQRFWRKzsU4mYnxpuRZI9Ph4YJyY8YUFDdhoqka1pOjIuyjMWvGFePighTyq1257Hh6rpDRsiA85jAU8fBnpnfShgpz7368x0ICwcAcOFcJ8KcTR9dwZKp7cfVOTEdige8rX1l0hN9Tfa5/59/36vqGzhCxc/VR8nH4Ox3WVelyfU9k5RPwL1GIdKdsGwY3skQxhmBoc75g7+E32lpnlSDfcx2eVZF93ewmmXYE3OvGMYfIsdTX689Avi1f6Jqa78BwIqAZneqmaH6Vx3t2i9Tts/sXjR5e0vgxc5TYeCPGO/txQVNQ8E7Tulf3Y4praQ16x924eQ6O9e9G0FOhQvrMSMs0nIk0cnNzdUnlSqPL8X6MIt3+bcMQx5nBr0XhbHg72XfuSXq5OdNRReiuUVsnhBAG9+DkWnNS8kHonLitZi+tgvV4o92iYxFCCL3Hm3mUxWPrRQVOT62u7w2fxw1JnpxTBxi4Vq2ARRSU7YxaVOK8tr6MG7fUUxRqDq27vBG36Yt7UEubR708P5fp7LrDSQ07/hbKFoQQwhrn+bcRqH3tsvTEW1gnXNjeh0k+Z54eYUx6c+408RDwjZJ1jywpZ9rNI26m0Nwr7j7c3KjNH6J0bbdb5Jwkcr+A7Nt1ODxXCpUxXTDbdb4IFknbCKAv8HfSzVzvIzBFaSknIlZiLOxZM31W7ukipI6Mb2Apl8/YsNsw2BajIQRB/Rw3rksdCORtBpd7hErI+boRBNmyZZZ+fRoeJz8SKpcy9KCIjz9nKiFPocXE5rQhhDC+jQjNtHao+1Mwj7DURyoP6uBrU4RtoeLqwsjIg2nct0eNaNy71rBxiGailbJF4zD23kurCdSIvYjWpkod8ptRnBBCCMXN5ndR1/8nas7qv20ZtIVcz7WOHlITFi5YxE4np3amOfXxrfiMaP/9sKSIYLGvz5LXJ6JPwdsQdB+sdtQTXw3jRvCgMC2DMB/quinU4NiZLlMBZOHC5/GhOzrKFLBnVIQ7Y1mZ0TFtAYyBFxR4h3QOVg9bACIcnsLm1yHoXHlqU9BCIH2e2eHv/DbnRLbwIPOZZtDFq89Ic/CwrNghSIlv8nuc65N1CDLKdnE2lsQzMvLUGn9LVkWN6K444gUBuaySxcmRXv/u3chIo+praVcvMq/ER1X93Do+458uiMWGjkRM/B55+LkoIRcsm+aN7hh0IchCOg/Ly95FxwMVRru/2T6GJkvo6ZxM9vRzWcrSnKfGmUUuIIHQqamakNoIkg5+fXKGpPGuBRyNX8f3uo/1vekhSKQHengU0F15AYfdlZbZFmJ6YtdA9Mz2FL+9T/yDis/mlO2W0X9ppnuCiM8CaayVOVdMW91uKSGCnJ69mkYqTKF16sgxfKmO1wK/zUnXaaN6YQTpd9O5KdswaFtJYK5cqJEZ3Ilzc/Ch/sbrd1D9U1CnlCm+ClAiR0fLaEkx3zEvHYO0hVXBzgws4VzPDlmuRVf9xmV2PovvXf+Ffvf6LP62iaWEyFdjDUH1woIiLLGyyvyEKsRaE2lDOMtcR0RsQwihBp7b6ELXM2UiKpd6X+QWMSvgFaZ0xFwGhmuJFVslk3NZAa2ivM0fGtkID+Yhy4viTXtJM/ONFAIMIYQSKpYYJXhFGKXGdz9Xz+vyJyASO/kRztYQaTGH4o7+NrKtRvc1DKYTxcOj/kLv4/qd+EPr1mdLyiXNIRnN4sZ8dBidn08+uiufK6H6gz2xQtASYndkWDrI1KNzQmpIQTl6Jcqh9XTHbu8THALved1mCg8oeEl8fLP3YPuqtCigKE00bS4qPRpmnWxWCTI1FYKuMYoNOnzPCDlL7oENQouG3N28hVSiqXdT8K/cNhVjGPvyN/E5T2+bKiv4EAUTalvh+rVvDBm6A2IkDN2yptd/OkjXfCkVNqd+iPyEEMLRTjRYnZFyB5sVdHy2v7tVi5O5PIcOjx1iHC72ubrpbvwcNUZCCKGy9Q1DqTkFNPiewg9NKOj3HpvmkVTs6nrY/3Wca/bcKl/ouszN4aTf0ufFMngGFY4SkSuZUCCWLuvyVphhKU4R/BTO1Q4uD+J95IzDE0h+Jy/TbLWW2Ot75PMlqpfwd1Og7OuWnVdX4DtZmneJysuRkal3/2Gz8ODaCzPmPJPSi3+ozN14qc+awq4uy5E1Mk8UHmi+6Vh2LMRqvyL+zKMTaiUsAEeOjvRBtZ7Cc2+m33KytxMTjnhpztvNj3Y2fi4EPYC4mfsPdcM2QKzOW3VCVhPWKYSpirixgwfX8jmW6pf+X825MsXF7tUhBGkSWr3Efdg0cgMktBFIUMd7Ts7moewwZlppewiKqkm6J0OC4IcaRKOIrDnfiPC9D8ogJDY85rR/j1oveg2qlM+NCiJOOuZWNEaCzq+TbcMtiO45YQ2Wk5yVnKVzVlPeh16BbSgmx/bdmDrKM+SMVEwnxBGYywGsMbaqc31IYi4X1YBwb95tq3OyV44nHtV0B19qvpak6wTnYby5Ysf3Dm1tlkrtDzWkuGWSHhnzkPKDmmlfFxSkiGDzK6haNzz1B/Vg43yQL0qb7tw47gOnB5D7ufeplUmjaSptWOXaAtxWukNFRXPaS+9ywL1UMZ95iKaprSdqyBcV2BN+t+lakGaR2zGZky75GZaJwZbjfbkIraqP63oZI2VJovLwti4Yon7fXWsJjKEZJUlBwfjevKm7lSmQhGgg8qpMDzTO1ONk/s6Jz3TEPC9M/oK0oHA1ZRzo3oCUOUX2Z/HcbwG/pW7dfsd30o3Az44itHI1jp51wazjDvg9L9/UHcBGc66hc/Kf4lxefID+LEZmrr+Ir90YjcHTEBK6OUbjozgHTOmEEEIPoortr42UDjE1SRVuH3ovFQEkLh78Ukmtg4dx93vftaxWJpxrNv8b3DEUhFC8HaQ0PM1v4pt9S+WyrH5039KHaGo76dmCqG8uS1+a4fRu0xy5CfgcXs6O1xJMjdRc3bkff8Aio/rqqh89wsO2QtAXvThZVePwdK/j37nD818uouhdD58Lia7wQOzO/GBJq9LSf9eg/TQ+2L5NUe5uRhWaTw0Ffzfarf0P9Tn0H8c5dBIqg7MBAk3nhDJ48MIU7jmiCN6Fe3QLFY7n5qzcxn2c6inO7yMStLB0bRFIfeOFvBX6j5Fq47ljDUKnQJO8ZJ3OEKs1Q1DAgan5YNy7gKapa+MglW82o1AhhNB6RgeF9t4qn5vpzpb0KgSSLC2qggIkLjKZNTIdHnpmjp4Q7ZggsveKEZKAy/30H8coYVnWB0CFSOcr1MGBcMIxD2TnL8jnpvT4/XPx38yBZjVTHR3bZoDhdwIayZanjWhUP70+ks9RFM2h0PwsHQphuSc3Ys2qqJh6zFJh5vN1TaoSWyHYGcnIdHBihorBOX6KX2MbBtcpc+SXpg4rpeeG4kjzzcS+iq9ZCZklOOfChlzrbDPhKJSIEhoBkZ2i82W9yVwn5cFYxEehNufpiDCj2QXGLSx9XZk4Yh4T2RlbAziM3QYEQ01Phxo6dHBCCKGI1hKDuf7m01bcq2Psv95MT6DyDfe+3td6uTnt5kKSg1Pa0I1/8sMOrmc8164FZnxvdKLPQVSAbZ0SkSEfxKvA8sITVCgsP45GjIj+upiOxHbfNgoGKhmn1vSXyO+CxFujMNAGU3Q2hHQibqKqVVSd7S3Yiay+a4KeZ0kVm+joDOm68rUV/+xsvk710irV6ul15HWk5OhXeKERQZFEIVPGyHR4WHniHvMiRSzO4SuSl2YJZ2LzyMpZOiudnl6yDxbQHwk2LBXT3owEhaBoECMDRze4QIkshWBl6aag/JO96Ob/p+eP4ndZxFpHN+heX40qSWwL0+8hh2Pn8/jbyN8IQSusvNSR1T1S2WPPieqdvockGkikZ3ioxf+f7r3eevk+B/cEES2Pdqin42R9DpfYLxxGi0jxQl/3XJeUVQghhEJts5PrqS/+lrX1osqh8qjZ1kh9DiE/qSa0fH8ZcL7zVwipT07SvbniNUM+fe9mGr/7R/tn8t7HnXia1Eqzja9DCOGsF2+sbO0piPic9e0HgOC8MI4Wx2wnPsOdz83Rv3PbPx5C2NCeB7fV/nr7ytLnUEJO4zyGYIGl8XR40I3tAGMwyVSup2xm6MBOrk8IIUz2Npf2s6t3CKbCfKP3yGDV0SXaLYLzrn1FkvHEfHSipXOsGw/WeY9eHi8CiBl+gNBLWrr3SxC99fOk/Vk6X5Rnf+NF/Lvrd6yFBuxhQocMZ3HrawqRecDE4Pr1hXiyER6KKdknqxCnm2cQWSvdzR5bCHooNgEtemfUyS5y9Z+pUbp5I97Yyf+u5T/jN6LYDPklS2sYV0Fawb3ina/i93FRj450QlimePBr3Yi9x3EivWz1bBL5OONJ/JyXLJLdn7PDaVkBd6apc7cqxuvwHptP5GNyGHrkQSJ6FTo03s2c6T832kVcY2YNZnv3cXiD1rCNZencE6xcTJThQ+qAGlAhaAq198B1HOLLEvbBrJVeluncOFav7HwSb3Jw1yJuptaeqdEovBMP9P6N/l0L3cGnQIZWNdu3Jyirt07nIkhqmlBSEk/VZevGXofzkrcU8H49RhkXw+ilD0b6O0/24gl0boFEH+KF9w6UUzdGGL/fjN81PtMTjkKg3TflrXC8Y07U70bjhdqP/r14z4OT149mv6/BqldBM81BZVqibMUttK2OZpb7m88Qp25MsOea31h3eyCdOXA7s9KHUxMTZeq1rBnssMDSoY2oWPBIqYZlzb58tTnAcyVypopd8JRz55XVaeKI/qDmd7CvTLl/CCFb79NYwpyQV7Mwx27v4+gkLGvWGQCNQMdH4K2OLc1JiZjy6wfG2WXpVJU10mH9PN4Yex75wymO4ueKJiiVpt/gfVAECjXuDI324MeaBqLSp4pN6T0SSXDYnxEACXnN5/rBm7egwlmx34nIwKXn/+6rWI11citWiz19YmQcODLFSz0kSVRmOXEIIUxuxe9uPMVG0anKRDU5X0JUziDHu+Q5vf+icTWpsUjUwRGJbRjkxDDK83SqpINdWA9rwEtr2b15hdSuOzW8RskChOFJXItjpFcdlRTktKnrZn6JyqO6/gDyY9ZIfSWWEG/L0lHUYvJDh0FBCaKHjvhNl3Fhftnfl/coNlhGxZZ0WA8hzFGl1a5ptHYzjHNQLegcDFEGT8XnYE7fmt2yHbkubkaG1tbjIqvoYRuGpFHY1Nb4aQyQHCGpXaKKykjLDC5JZK1c6vNalqODOr5l1VH4Z/2L6Lyu39Z107sfv8srF0Xd3NXo725OHXuPvTFBPX/8QDHKnXTZBtpdl0ehk+N8V9qQ1rM43zd/ateXwhHjIKFqcmp6cCWcQzzP/R5HUMH2tjuCjsGmpu2VEJLFM1kjm7SM6/jhI0x0gd71y/v30okY2mUdQmQmpU2Slje8Yb+l0sCM0u04sUJotvnZ+QIojqEWvfvxae19grYHbxiZC3PgCFUJeg7znD79v/zRZ69ePx9G6MMrUvZ+Eedx9K/Um5iegexsfIs6nByWTmbBka7tQIMmFRnej4v8G0vBpEmGhxCkfwod09aT7YN4SLDe/Rzd0s0RFyTI5qmAktaZEYk5N0yNumPEOfNOwkJcRNqtaFE1n9HKos1iK3pHnuph48yAderR4AqcnpKlByZ346ngPayI8EiljEXAswWkJrwECuMajoun7l524p472tUJIsH5yoQN39qNTNLfdBGcGBeqekFRU72vdQppedFIJ7k7ZWAbBgVeWeXkqZjyTfwh8xM9F8ZH0XgULZqXAAFzMW+rDab2l6fbj/7t569eX/ybN+L9WoArVA0LVOhs9u/qF9C+1UEyvvxAr8Gq2UpHnzNL3Yd/Fp3o1aU+dJ5RjjQp79EyGTij6NgFQ4ICAg5XkJ+jzQxTcD54TjhYIoGiwXQ7X8SbIf/N02JSaZfCHdo0ssvSMRF+SJGItKzEmylb+bp4ab6/cZ8kJbW/NKXNg3j9qql3zgFVenqAi1CIt15CDJg00e8G99h9DAGwjH5cw1NHYPjdukguJ6hOKOKGx/oF138GFU5rOxEa6RyI/ttxvipn0PKxtBLnxM8O8i+KKR1xQzC1UV+Dae0C7LsXWNevy/nahuFROX+TR+WMNpP93+J1GBC4iFhad+YQjIz8mj5jLkNdtWTl2gWWdoO3U+5YsUGLQYB9HwxpoavrmWkswubTQ70POjKLmW7cKjhvUyjk5o1wvazCuO9pIMGy992qiQGmlBCWz/U+pgdw+lx4MOW88L2zFPmAzX/zQw5JL+BencPDg5oNbkMIYXAa/3DnY3U8F7VorJgWc/Vdanh5qfLNXz1+9ZoI0to033IpwV0Itpfc9pF7CNAoUWzAM++OoYGwz0tWMVr5eoZvr7IZRpJfYglLw2k7P4oImue+RtFUONG6At/Hc9QDaFJg3K7RiaVydlblXlZjYh+ZDg8FlBZWOkplTBIoD/9RdyTzz74IqRHC0rL8VBcCSU5j0+ihAu3gRJ8O87HUsvBUDLktLihFkmD3AXQeLC1Dz9rhWiIrDk++BBmyVY2rcPe+rhKKvXkDxEEf3J+hPtLy1eYoMoHw4P59o9AIcLM5irPzm81liSGoLoML5ZHTQtj1n9oR97/moCPTfRzn+uBDzRcN7saN60aPTo03Ssyz4SagWm8fwS6SqGgAACAASURBVKjaic/aly7+v6e0+nAoiy/NEbgLCYaPVEa69BaT9XhGGYfA8L5VvFwC9bTmpKXe5kh9bet+NsIcm8M2gwO0fxAP0JuiprRYdvviKyUTFcAZKpuQ4d8+u//q9Tu3Ysnjy11DylDZk2grs9x8crmGDPsFZhFRf6iRpv6cEDf9dSzQuPlv7sl73CPD+8qlqnTim+RiehBOwqvvK/LoeNY4N2R4B9IdJiKpYoDptml0Dylf60WVRyBbe6EPc3wbn2VayXifuVW608GiG8+20OFkunE1MaSpgYbWnymyOT2mo2qBCjlCGX04pd+anZUDzD9Tcq2nemDNdugYhdcer53SSvAQ4CUf/02MjHpv62KVDtpOZE2B76e7ZpR4H96bDwvDGetECFpP4x/27xoPCA+gdmUVL9gQ+x/H1zNj99Nr9QNOuFBDcwTehXIlVJcn1rtnsUiHD+nh5iwakI7b7G/i5Yxcx96+A//e+TL+uEsTgRzcTSevU2W1+9A63rMhKaMQL+fessEDrPvIhbHia0cNq1eMUo3vldKDxtuhSI68lK63Qe67p0PKkTImSGAI6rt4a4kFScz4nUR0fBStlxbXn7/HNSBzZ5c/vR25GP2J/rh1Cimt2dSALN+OXzYz5JTI1tikH3bqKaKBDZ3HFQJFP4DCajOqtrT+QnJgbF8MIPfEvexd36//RXQS05y9EJJd1vlv2tnKta5LgHWJSlkGD9ybLPgIQYVzvUqLjrlzVdOqUJ2zycaik1sOdYPrifRwxZBTIim1BD8wXp+9+EIIoQrH8epHqAY1JfVRF/vb2k6wapIq/iGEkAOHh8GVOyS3/i7uwXnbkNnz+N7l+9GXGJnwINFD78yQNTIdHhollg77e1c/jSiFfzkPUm9eyEEuji9W8m92P1ODNTmAtP2uGQqR0k5XPKV+TGGa7vELV8k8U+ar3TkUwq5FeRdXcVL292MkOpvoo2HX2sWV5TP76R6/kFuB2M9MlZ9zkuWRU8nTnSamLbxKa3gL82hrqfUkekeX76e3Ctm2cfCP8Qef/6lpuGAu6hc6Gez9klW+z73j6QGSPMeHBu2jTJ2wsKdyJa1ZtYMaUZ+3hWDPH3Ji/KDOoV9W3vP9+DmuRktkWZwcS/NSmTzNwQkhhCGqHxfm1JC3szKO0HyBNLsRmulgUaOncO1zFV9XbsJrjdwi3db6QbsNg44cHQtPVywzAq40ns5v31xvfG9mauZEBLK0r9hJnQT/EAzpdu2oDBspA+sof2Nq0Lsg+XsqBsUBpadxfTnpl53U88Z3JRLsQrzdh9gHlDKZuoguugbUrdrt4/RKEs4dQSnv8jLdi3NeNeJ573H0jspZLSOwJpwcnzVeu0rLFT6r15u/xBEYLpKaNUCk0yAHpF2aZWed9/Sw5491VjoXXlbLghUWTfeRTgkPXXrWrq2yLDPnagYL62lmBKv2TvSArj+NkPrKRNyKjDbuG3zSg8PjzhbumdHL3ie2GR7BO/fO33DYqOTsbUQEFfB9gTedmzM+jDfJlOI2wvdcb5c/jT9yYegJsy9OQuXZnLaPfDiEPsc1PUXIQ4c6F65TJdc340slZF9TuTtxgSyhPkkHJwQVL1yZw0Oejnt9rI4RFWZz5p+cR7JE0cQ4FzPMD5yc/SPl6Qyn0UrvNTQaefbhnfh3P1bJC2r0PL+JJWfLlt5H+SLOT1bAx5FQsH0a/8ODum0YtPl7H0OyYF+jcqZbHP2hzScvMwRtGDpCT0QPAig3QqcmBBWzFbV4OzNY8u1teshl9L6Q1IPjae9Cs4VherCzhuPIcvbBA/2u+jMG75bWw/niTbhprwtY6p4OXiJVvLaGwPNbaO5pwoMeUP1+OP+GfN1ZS3kRtK/5BZSnzcbRHrqmU9bIdngYTdjDoVcvfT+Md9H8BkrI9XQEJgdv1NGBZWHzd4WgC5YVKSGYJgEQ6KU5PLf/LhrBF/9CrRJTM2xg1r+rm5nKul7pxU2URSItnMRVuDavW7RcBukRd95TB1jz/C3OseF7C5Mr5+bgpslqMuopLVFhtjVCR5JVEq5mug1DolmSSW2ziyaROZDS/M/WCvPgEpVaqrXxFJ7hfcuz72w27isrmHQxQA7uwZV3dabENhWTu8axYQNSkylYIPXqlSbr4us999UgTvLMUhMFqCTnsTA7l/qjW7txcbOUPYQQ8nfie72xBloHjTj/nU7cMMWO7U2RD9D7zy02G4PC2DhZ4IMVv0U0+30NQdLh5DS+1oc+O4zR+6ylz4uBsaeyeW7QxvhBR0FBt890/LOkNYjMjk7MGRKNm3SHTRAeyzmtQE1gm4kQQljk4RwDfV0n2pXE12MrluAaW9TSHQGptjWeUa4Rz7m55cGpyO4iunnsf65Tz4ZQJdnRzMp1PDhu3ok/tPWVGtGbt+NB4Y5v1sh0eKiVUb3S95rPEb0V6ZDol5Mv44cCPVymhKpm3LkbvNkbD08/FLghpHTSZKo7fxQNVpZ+TPdR/Ae70vp3OSS7/xHKAR8aSevTiN/vvxe7e3bz6vnOkeJaW2k7Wfwz00YogjOUS/HAE8OmnyJhjBo676VHQHlzeFrP4pezIiOEECYgjdMx3Ub4noNzkeBn0IC7NhXWR0KHB3uJwcLM1n3nj+Jke+WiOlHx+s1nlrbKx3U0t87KRBiLFuXlIUw27aIowaLZ5m+ofOsHBLgtrkaL1A8rXko9u8dH0SFZTtPhwPVVvMfqiXodLLG//ES1r9YHWMTewBGVagWgS0tTpmXlWgJZ/vrZxvsde2saRC3DO9sXBFQhFsdWJt33tN2Kpq0yHAb7iSQtk7fTe6DzxO92bRY6Q3MUvrhA4XQH6Ukr1CHy6DIUUnFHHuWNRRkgwi+b+t1roKqzx3GPuWzDYojqJWtd0XqK0n+TqyDCRiRodmG9GW8h8G6p8Vrk4yL2LAR/K1F8d/TZHNx9guk+dJBIE7GvIind+19mjWyEB4bZ87H0Hpm7m9czvEq7L+YbeXiMjchJmKv50pSWUSlTVTFUIW1RsNDzx9xgCY2YFSed9+gRClJrlns8/xNocZhBPH07VnicXcdDbDFR68iINddLhxK9LxF5A8PT+Nq5RPJddkAz2iAp2lNfVehPuHE//ykqh2yR0zniszn41et6aN/jwKPlpk6U2lN3yCIQ0Z7w8nJ8lHIJngIoYs6cvEmxTBoNb2LKvjjBdJ9KiD4T3B+msVhFZWeAOivmHENfx/sNMW0q5f1GkmQ6zfWnOHJwXJynUwacv9rXhd9CSw0vIrgqRC/t5CCWIH7d1wNujSavbv/y90/DpuHOAG0QhS+3ZVTPSODG4Wm3yvXsXbLzs/hv59UU2SoFoq7eloB7sNLX67OpKYMvR5rofPt6Y3o10QGc6A8+t7plkV8fv83LzSEuSx22lRes4LZ8bzJ15ylsIvKNF8iMPLS0GNvFlNNTEvkr06JDJS5T9VRPDkHPTkfKWDWXh+TFbNc0l9BTMy2Vtmm8tsPjvAtV342v3fDz3+408RpszJnQb8Dn+qfppED/O3qx3BxzM3orpMw8TbP/6+gZdH4UIVknhHGumk/Vm7j8ICJICY0bMPka9bg5upcKoa8QUZbsAdO5cKNKx4ZwsBPhWBnT/EYf4vU7QK/YGNo8dxGyy7i+zwEP7ymCws672ycr23wBPsEt6Ga4viYdI2t4K40zM4oB2l8CWrZqBqY4O+9Z64en6HWFiMnTmBJxTww5BVrjQqDrHhccrpGo5ExPY3KduvQ8JerncLadoLlmU14TtSk3cKidxfnJ39a9yUqsQoYWEUvgQwih3IrXaaDFhVfl0D6t7JBcl1IkIyZ6H0WgFd8mmv2+Rg4yIt4Hi4MBrvN0FtX0v6PTTj7Pwr6L68+7a9dfxOc1uBcXXOVGF21uSefKHR5+V/pekv8f23mF9FTOeliRxlCpxfdGHd3f6wyzyN6Jbmd5j5RiKVjLFgodFnbUYZv3QKa2isTZzmbwwfuV7XweA4nhXT3nJlgHbEvFjg0hhFCG3MN3hvBIN3PXVGCzN5KQTNCOPbfc8LMySzjL9l3k6fg1iDIkSh1TFHwTCyGlFDiEEC5+GjcHiWp+j6wsu35PMXrqOawNBlziHofo85OzRUi9EHcqBa3JePalXvp7NMYJ+BAoEZ+vRzk1IDxZ4li+Rnh9Pk/XA9qGwRQtIwsnCBI+9qpDKofnLHrb/SIamOt3YFzMyPH6TgrsPgLnAxGUX4NzvbCScjpAiQiK9b+Iwry8nE7vomnkzUl6kDEH8lRDT1DXfVqjnNYrTRbP4h5kqbj3qCsxkDBFaaI6ZatWobLz1x3c8FznQAQXfW+uN28Sl7yYoSzbU6DbMCYnca4lwDUURwJLW4sMVh1lZ1UjNaZqnsqlvIUdkL034j1S5yg/N8cIrSBmxhHifpnbWqSaMGVtCobG5+7FD84HxgM9R0sYICv5kV5D0FKv08FeOvqlSidc/BS8F9rZmd3jLkSFXeSWw1QJ+ez5PAd3/HMQEjb0p3wV7V8fFVsJrqOIWKYL7/rIdHj0B9gfAnqq3vBmXm8SQlBonyVojlLMMrqZ8/pOYuOk8PB0Ay4oUQYpkKk2d3hYsl42OHUJfkdhoH9IdeUb/PC8OUZrwKT1F/JW6L6F77aGbox6xIG1J0/nxcnfacMjIELvCSVnZAt8LbEMmc6bpyi3YRCl5NxOjGDNdZlIR8ExcOf14o829w5IIIPQ5XF1cxKOicz6uudXJ5BBHNSuF5InIRgp2knNTjGUsxfGVkkGwmPlyvYtS4Mrm1+HEMIM6aOc9eoKh9FwVlBmO3upcNJNARU15gzl8d5srL+tWY7XJ6HZm6SS11bXQq9ULRp/FkTAHFXdhjGC5IRoRxm9gYGrIzDN/+fLV69n796V92bNOL/kypV7etCRqFwx4kuqyOO1GqPJz1AxagEi0cbapb7nbUN+P5ZtS7WCL5Nr6v3P9+GIYe2Vrg29ROpobgUm1KK7eUsjRjpsQlo+t317N35w5dWbcOhrZ651FF+TT+V+PhueVk2qYYou93RMnYDNhrKzdoZTZuO1q7T8gfbvA3HgM/Vfl4E4aIt33JQz4FfpBwSHE0flvjL+TsuE9YPsTpyVOqq/BCn3ri0g3LTD2he9uGLvgAvw4lKFcvh9g/vyVijD43duDtEEdj336eC8UqQxhBC6j5BzhX2YqQCvrBfn93D9FL3snY5Yyt9sy2Dbk6H0A9IFQSTSKwuP/j4+JFYbhBBCAQYlq6GrNviz91iZlWELsnLfgnSa0aPmTZ7cH/fKqNfjlYvjdOg9rTrUf2fjC1Rpva+egBM9Xw0LJCYX0ZtYW0orT16QXY9kZ7baWO5YiuR6MwkzhJDaENHtH3WrErZxCwY5lkypsBonBCWrFq0I5vqv33j12kvFqTk1OInzOTp2vhS+61iPtsYLpIiOodL7YyVW0w56FSPtlCM8HOT6rOyZV+/EdVop6wbsXkcUiiktdg0PIYR1EWkfm8c6OK69R+m5L+misKfPqVEFcro0tBGVkTtf6HO6/CMQlaEFtjAhSYIW/RN9TlWoYBNUSeydfLr9yBqv3y3d+RpwHsn58PYOrPDx3PQIUCXJSzPL75LE51yA/GxzxB2CCr6RFOebgVG755alQy68Z1e+ZYd35ztRYMpLd8NJfEnnx2HGKqo9vJKMi9ejYJJDi4iyvWyczotXIJCAxlLxoqlGU3sn0eSQPF3ju7S+Bvx5D4fpFlZpjW/Fm1cYXj+Xpf558TMcshkEeqaD/XNERz1aZtUk0Z6RSxHAeS0O9QvYt8oF0kiolCa3JgxYfxb/PTp1ga54TVebZgQrAmlG+F8+oJaCGVWgOtL760AjelY/enPSNa7Z2NO/ux7HZzgcRmPoSBMR173P9P7Tmoe65hKN/TZ2S+dgwOhrVhSOy/omf3MicKVuDnv+eUNdrNNyLz3C3ftlhI67P9bAkvpZCa4hlkDLeI43b4FTsh/XXm1f1834Oq6ViZHX2IaCrVFcPV/u105wOjmO/HLtTNFqKWepKQY0XsFVQLrr7C+smhDPZnDKVLfeI89OVqKGoBIE7U/iQTfd0efEPVG0syxrZDo8CuFZioUN2KgrYgcdjXH/1Ehm+K2MEgZWetmIhUxharwOOjl5iwzInVjBK/acHx2xWctaOuD7qiseQFZZwl5dXrGDqN2j6ilIbWsiWXm9CKsApqaSTEVQciNCUCOQJb5IRGayr++VKMCFKfYyZEoLJNKGGcKSw5N/GuP+hxiiw4NUqxNv+cwdNRSDnkhV8XU6SsThFYNpIp6JtixMPzj0Tl6b3WO1Gh9SrRJfT8u6+VmK7kZ7RbVYQ13YfoUOT4K0zLVo5NAVVJLzICYXrZpLCNgtQ2dwAE2nen06UTttiId+obAnUbrhbT1YWp9uPpR9b5ILk5A/2LLBNVb7RqPk4XFEU/b+UXsFdj6I77W+tlYHd2KQoYilzQX+6YErbdPgJD4jPwtyx6g+trOMtqncs1YsSIfmKiDCF/QZF1GKnrfKyCUcjzqa3w6CHnqie2dIUxUilV4BRWqIoOfWPJT8tFA3hwQIT84quEq9uEfoLzjvVppFW6pq/z9GqYarfxlTm267SFdZfVetJbi4yOoOIYQGyF3cyM7PoJPjgoI0BjTurj7Liig3es1ncWFcGf+h8Tw+kNpFdAO9Lwe9ShLrQtDogk4OCdch6EP1hcbyxpzlKdZd3AuIyvmX+luog+R52xU4ISw39PtiO4mCc7IoLmh2hNCudKm1Q4wpM7/HrANbULQLwuPbZ9xFLRZGwxW0OZ+sLAkhhDkOLT/A6vj90kPO9jR1RYaGWJYQ3bJVCiu2QrCD1cTNsspRmc4ZoW1DwdpT0IA7esIf5FIKLEvnoeO6KHPo64SaBQjQs6fS8iyrJ51NMits8lWD/SvR7nQGQOza+rn1dbocQ66rXcF/P4oj/S1sb5NWDfRDDrWR8fWyrp4bHf/uu0ryO/ybWPFw/cdH+gXwbMkT9GCBe7No2YRpm9QEHJaGNGUhaLRhfCYh2HPBPQ6faV7s1hvRkPfHauPnF9GxGWCPOfpK1H7nU8s0gH/pv4XIOlNhs3t6jfFT3POOkfWJtpmyOoNy2jWXOWHzVrd/Z/99bCrL9bK0lFb7q3jR3kMvkU0fr53SSrSax2RS78aRD0aRXsrN1NUaEHfRPtf8OkYK0/d1AV2/GxfN7mf6cG7eiDdZgIqqT3IJzosf1JyDrIibTk5iDuBBT2wv04Nmx+rMDrMm1MaF1jdhwxICKTokC3ueTHHNDOEhSVpy3G2LULAoE8q6bHHhvy0FkcjSCvqhxhTpSqZXmX7y0TdO196n0dvsvOulR/Elye5O+Kcz5CR5rtMFNEwS+koAI9auw3OGvjvH+of7zc0KxHNzGMjTWe2a4eyCR2F7jrL6jESdFzbbQyBkirZMRxUu43ct9y3vTfvkFZTU6DGHLY/fKn28XJaD0h6ubL3c7FRWLw3hOGJPgI1/8oMOppZIVJ4e6IHO/eI8nfP/9tar1y6kSUmRwgSOvgeuPGQtzcHGmXRyvKS5gkIJR22Fh2j7UUj5qFxs3VPm89lzRJ1mMnJAGNdAUhxJF8fCW9rg3yXzp7l/SIPIX5vu0SnO255+QR5n2dJS3XlUSso92jxSZuHgb5WEdPEXMdqhOXHfYXJYSn0va2Q7PFh3jtwsuFDIzzCOTeNlvIiX55KgROTAuTiTY+ho2I+79X88f/X64q9O5D2myVjW5n05mI7y30ljLDoPVs1FR6B6pcb9Ug5x05+gVgm1T8YG9UEPaHSsK0jOGe+lhU1KToxXaRWwL50jJPo9LO83hCdTzBAGKAlPxtfUTtrG5qHSpTxjzYaMzTq6HY2IC2lyTEEYTHB4cLA45y2tB5k7udLaZaJ/tGLpudkTOjlDdFZ2w8nnnOvpe2uKrFkEy7+jjINXeoWMewyolhJSZgYfae0K48+gHPtAv4DNQ1nJ4j3JsiQvQjWlIs8cIdEQ20IdHqZKuNa96znTQMWhabi04GBbdVelO8PnwG1xjg0QnmXF7CxMJnmfa0P0lXtnc421kuDNESlCS5Xp3AwtLn98qiVK41ncI8Mi1pdpFJWeIzC2y/P8StAnGOBQH2pPDS2d+4rr8IDzljcJBqqMk5Dv5wKf280HB/Je/SLuVTaqbj0xmRZ2d3DebcbIdHgSYmoYrSebS8Y8UqST47k8euTUMSmZhkJxGCeh+1Bvuf/T269ez01ZUoX2yCDXe+TfJfQV4CWzdYUfQCRpLSp6j8VGXIWrnr0H2X6SNb2b+bO/jju2pOlvQd8q3peIaUmgS+7YceNkNe0kx8Q/J8rWtzKgYuco4J6XGUjQNgyW23MOs0jFXllDBdSl9YAickrEzCvWmLp0R4ZzzX1Vs4qOGXlnxl9ZoaS88YkaguEDoEbU7DCyvli2oXvYmx39ELTqcBz7dyZ6vFWexR861UrmkMd9KZ/H+ApUin6pEzk7TM/D9q7jftzZjxHxjUXEIu7ovLby5vzJ8K4GNKNbJOOm3tIPNoh2MF3UeK6H5TXKpGtXtlaIbNq5U/k4QszDv3rw6rVTBwooTPEggHuwMGaqxOwUUyfu74BOMbDqIsmAoN9UzdS79+/G0//iWiG/JZChAnV4vDFuCyXrA3cqcb/epwqK5swSTJxmgX/PPRAiiuMNrpHiYnrRMypM6ZcMKWPBDBt0e6GRtikJrz1eW2nZ+zBRpp6ROFVeQ1DEZ9Y0DgH+Wb2Mf9e/pyu+gIolL0tfZizQtH5fo+PX9wg56Lz5ISaMeLsPdnIuXOqbsx1CAenk3SJLys0gED1J5KDxdbympwfKGX2hiHKliRCGoE6fw6l0Pj0lN4d9ZypsamXv2zB4hhO99EiRc+iVFERHS9b/jfOUpaHjZZoycI9Mr2bJ4QdTWm48if8e3bVNB2dlMULEbU0ImVbKWSAUmP+3t8YxuyElvo4MTveZXzWjjYN3DSRylaXIvO8km3SonAdSgVVr9ifcq55mCSll6b5euKe3UapBAlkW7R1at3QEmo7+iKq/oVjDn0VvlrxSj4fYiDLZIgj3MUCJ+m2dbJlfTzkRUMxIy1NhvFLURdsdRc/INW4CiM/1eoym+jdWKTVL39O09w4wcB3RnhRrlm5mxmZg6S6gscuqoVzQZaOenfc1k0bb3igYc16GKGF9bO0pUKrvWaWskS08yF5adoCRUMSGnpc/0QkiW9ube/JQoJPTfqJece9BfM9L7Qjx+iHO6hV6iwmPkAvZ5o4boH4eP+iGjaKETrouY9EPbuscVC43k8zIY/hDg6TiynX6e7zn8W29x+lB+qLhOuD8eMopjbwYgm02M/xpSE7uW3ju39egPAP1iXx/cJ5qHX3mYxy6nuZlZESn0VVr6WDlrIyKHB6Sm6kpFYI6rwtrTso+WAnSJAxd6Wqz6FwIQUTKEgU1TAGbiBuNOA2zN8Zlj6y1NWlco2eRfLU5drKEnZOANNzays3zx0BtqUs0To+4q95GpLzZ/LIFQwiK/H4bzZHvazDIZW+qlR10TFff/vf60Cf3YvS0qBo3BygMD7dVya8Ph+FCjcd0F8T1VrrXKOkXm2uS6V2gNge5jhzWWG+kqOFhO27q0o7Cdef9aKwlFWZ7hxWOLhkxxj160MmzofcY1zMi/93j+MErUx8ffxWfU8HQq2UnPrfhnfSAhjpvnr4M+PfkAFVfF3oRonkudpw1XhvhcU9yBIGp41/EsPzyA4Xp6NS4dg3hdv6A67ctMsAh4GkrOkAO7XMaKIiVaIRKMp3zRsi/oVZQSye5doX7MI+zXAIZzapfmFZofQ40TPWwhKDpTlkV8LA7c6KYy81glTF0UDKjFzguRdNm4iHmKblCRvsLwsHScX0LHR46x9TicPSBDvBkNz2aXdnfieYKdufMmjSRqFyxg5SHLJvmOsLAAKT/lh3GeJjOJytdxhub78UfUH1hKsNAL70Ro/QUMnEzUYFF2rB8bca9hgqomd0j0mKLOu7D1MFX1LtyFIoOvBNH4XmwBQUDmBB0r3YfWaXJbzYHGb6/s1Tit2EwIKWNnFqKc++zaFy772tlBPfErX/3RN5b3I2Ga1mJzoqjnHT8vVM4u2tTALH5wsQRsV+m+69vI+mUjt+DAzjRI/ZsGR2GUkm/ewkRIPZ4mxmkT+6Mc3i4ZrNI15zv1q6SbF5ex3tc22JcgZ+0HumX187YoDW9v9oE/3byNx16EXpsuo1I9yuyRqbDw9SDE1nv/E20qrP9uCPZCyiEEAan8WFNrXSXC5Y9Ytwh4UiozzKINOSdqEJBGr+ZwwPv3NvVc5F4ybp8Dk6OtxIYsSfPZfqUT8HfSkCV/G2WtiKvJqEHM9v82g9oPl86RiGEsKIjCRStlJEWcxiTskKeo6fjxN/dyKh8+qGGiGDyN679c/H1witBME+u0UPEMguqdR0QjjGqenh9Ov0hhNC7z7Jx0xWRajG9R2o9kbjulZx8jyTlELQ/UNnk5bn+uJ6d/JifEh21PU2SKnWwjGg/Bw+hdKN7k8/Q9b/YaoJNR9f7eh+tL+P3Ta36MU39PUGkhU0iT25bBg8tOgXuYFPh2Pk3jafRCIzfuyPvjQ/RogQE7uYz/YLeQxB9zcyWr6PxG96KRt3tlBTgJNZzfO1pfwbbFfRdW7nDAERmMjJDiI9Ogbo4EZ6Cr46CM2D39+gcSRBg97gE583Tbuyx5+/xu2kn3Tlk9/TeQz3M2k9AWkZzZkfztHjkO3J46DC0v1KDdfYXgLaw7vygpvNSvzBtBKAkZL1751xWWLlTQ6fJJ5awIx+GC4DVzxGtWU8k3j8XjAgNBiWwutM0eo5u6W39AeRKjI+BEu3YAYS87bqi18gt03U6Cvn0zcEhB0tGF3Qp07cyW6rKJnR3yCvx9ckgm47ptyg3/L5G6Lg0ygAAIABJREFUGtHeDaBo6DiKgz1SMcPPlBmrGF2l/PotlGXa9XkAUcU4IS2B+6g/Te8B5XtuejsugmIXrROa1poB/JuiORMsdR/lzdGgND8QmYXJPbDUfe36OlB9nh3jfq/1uwRpsuU2PUIlmXGE2HZiiSi+fpHOf/NUbtrwSiwe3pP97RPioRPCfVB1/akm0xVeRRUnyve9p/h+P3qPrGSaqXdfDrfiZxkEOPrKUTZ6AAEAv76QaBHdlU14sF2LUeJgqvd//TJuujzECylkGEII0+M4x+VzXc9cw65ATCFh7unR2DIqSGNNriz9x9SubRiu9eu34n0ltY2AiFrTZaJv7E/p6qdsOxFy31EvrSVg6NEtc0LwrLT5pl6DrQiSKsDxNXO/7nXzMHbHi+Rp586kKUD79qGOhIubcRMxKkn09GKw73sIRrVwbV2dSeDlWvK+PvX0iH4JY1//Wq8/PcQ9g43mXB8aVa/gUrQivi7Ys2YVGDunh5Be3h+C7psKnqFHcFsxBFEEOdE3LqrUSJ7/7XvxGY2P9Hlxjc0DnXkr3eZasaXBAIEluBWbTiJI41vpJE8vBy/U4wPko6x+o5atAf2mwT15K6wpSmgesNgJvOcVoFQmL94Y5E3JAJSoewqAhwk1i357I9g7ltYT3gPSaU4qZqDlYp9pwx0v/tuRkW0YbOLZ/lVE/vvvadWByDiYEyMFMhk/sXYRJ7TcM24Z0h6OjoqwLQIJ9mT67X1sDnBDcKVlvUkpzgH/plFRI0kn56aj1RuuXPzqPqzNUCByam8RPa86V5LVsDhj91t6jyRWezPc1lfx73pvGK/wNSunlBjuacn4msUsux9pFD4+SU9tZo1Mh6cseXB9j4QoLhLvI8WF4YJ8ciPw2NbGV9AKK30AAqfa5YkazWHMEocTqrYSPIfn6Gb+ZjTo7hTQoXIiWYACbX6mU84FujyMO6pUV5hoTgjd5PFzkCsfvqmLsIxOuER46kYCY7pxblVU9NDp0CYgX3J4jIPUeAGV1ccmsf8k3gvbTCzq21eSwudMwrETUsk7I5kyhCCOLT8XgpYgc495+aY46bN04yvVYhZpkbQ8eNPKteFMLF0wb7HZWZkeWFECDhk/PKrP00mZ3BNcU967bYHz1Lk5tZcQl6PsxG2HXxGJ7tocDJl2swMUC2F9gu7SJjPANhleXZlbbg5iCPmHEMIY5E1H87ZhkH/58r+L+cgENYFOulfbYm2WMnovrdFmwrmSvKZfn81D+/dRBGOinUSXllfWOBP7oGjCiXTSl8/jYdk9tc8B8Wm0lRNAEIO9tJbXxuEB2u89LmkzvMpV+HDgHHlKa9yFYTcnrP84/QzX1Cts+q1c6uecQ8WydGkGe8+kGg6Z7kq9pcTI5vDgvPFKCg56iyf/Tj/4AhvAFyG9QCp0OjFZEAY7Azmxpb5O3vBOnInGOfKG9y2qhpPTeqbXGJwAfsOh5oaHkVyid8glesGYYaamAmH5akUN8539uELzhlE9v4neRe4L67vCSjt4+Nfv6D3WznC/7p0T1SDh1hYaS+ddL0RysLZnelCH5rw6CX0bBn8zkRVPQ2g1m4fs+JynYWEDRRvJ5oxrrGYVfeSaiTbSkTpeA2jX0DEOQdNAeYPU194m4ncjt6uM/9wzNNW0NTVvxTkpGoJEsUHyXBJEeCBNq6n+ttFdNnXC60k6/F20OSDR2h02iqwF9NnKewNS6tEZqpqbbA6DXTSv8RIpmL3tS2mRO5NZjkznwp1c2IepVQyy8zaRof3PtAyp+070SCodnVvyh1qoAna+VOedrN4S8aWnwmgXmo9jJNE9V/GoOpqJVq1beh9NaIsfxahzcWIq4iD5u0p5In3Bt0jZgw913Tc0g8bGMgsroJnFrrWWEHQmt/H/E/+2dcBzlKR37+XJ6tPJ3neV0qJyokHqhAX58C//XJUTtW1DunfICUpUW1EzwIiy/O7qhRnmQlyF9Ah9ktlZd15Phzh5yHhk3v4y3tjl+6abQE6CiTUR/am04tMumX4DnZzRXD3+2Qh8jpZen5UtRGSc1MeSNicVc3PQ4XQiu2woh4OxZ9tf6j0O0SyWz5qGflsGbYG8tnOIVSFOqiNv1iPM6Q6qqrDWvRKBRiPhGOKjPCy8QzW5Wu5MNA9imD2fm2FDamlRQiQ6Tjcny7rB30jzFi2S5vorok/cvGFIFtJia6sCY3pgDf6NK50vW9RSMpQOTXPnTd9X4Agxkva0G5ssu11O8V0SMvow6NuI8NAWsnfb0DRuGPQ4v4cSIgf/qIaFQozCAzpSO9v6Iq7ZwUN9j3Z8ugfO1QuD9POQInAx0RQuYwj6XMoQt6zu6YG1ACF4MjPnCvt29lZ0jNZW6VXEunQQgUFYVtcAOQs8gEFBQeFA52c1AGevruuUKBdtUvO53iRtmRcCVa+QLq/CtlgQME/p9PCHRqbDU+xDJdlSFHSAiJBkkTdL1nCT5LEBmoy6R8jF5JEiDfrFz3SR0zkiZOr5V1avOGqx/xG6QWOjVLq6YXuPkfc0o8TN4emuJSboZDeiOCt7iiU0Q6yrekhoo6xwYPL+swPwEGY0/Hp9dkh3PSNuKqZ0HKIvAdXxvkHkciVKOiW3HF87uXwbBiMQQu/+e6k+7lIH0jTXdShym1+7OriQ9S1VzAOTGiZ5M45TBHbzQ7XgTTjc3i19ApLjilLz1mBzAQHB4Do2nbhOJ7esCgxOifDwDvT6xepmLlEIIZSREp4NIFJmFWd5CA96b6A1EAqvApui+CAHZJZl+iGEUMD8+LpfVzajCULIDCGsiv80+P77GrT50534e3c/t4rdk/R0FIPOwX3NqZf7EG4FwuOigYPTODn1S30ORMZ4yC4ea7TQehLvo/fAeHN4LI44kBJA292s6RwUcLiVTfW7O4rn1wyK3YWqp5tBU/CG3JSysDNbmjvDXnmgsgYi6u1WlNun88Nzo4YKWyfaV6FLVrSuCqzWJvXEEXSug8J31UsrQczF4ANn1Yl7lWMI2iW6pWMesr5LhO+cV8gyPIfO+H2ieGrRICbTpThYHcPu3bmVHVQZxEJGBu7w1PcgNT6ILvj9PcW/68VomDsTdeyoCeJ6J6wuIdzu0QvRGp/HBZ1F8D7c4eEG89zyOoPERjSIz3PbER6ORKUUnW13DKkr5RoSmHv+nZdlinKsoY1slDhjl2jb7TSAy7rB5lzPtimqNfQ2Aichb07NrJ9+OjuaIu/lN78OZmAraARaq6rDtoBqbQGNRZfW4iIPdeh1yxRnX8CwGb+nAIew0YyH2qCnB+gaFWhTO4BE0pZ/42W2GUjiNgzdv/G5Up8nhGxpECkztlQYkSIJfs08UCMsIYsh/MX4j4F1PR/cjX/nDZAnVLw2omwZwfwYpOWmMYepa+MaN1ItxdRRy4yLIMT6FlGchE6a9ETE/NuaykMFvdAycUG89qrM2tnmQMXlF2jLiqagPN1HMIVH4wADkfDvjLRMyMobhTkRKW3wpn0RLvc2E4nnjhJlCD4xpeWHkWhCkGNjfVZ4YGRVCBCp8TwzD6eyIVnciO4IVFECeK8dvYmFOVQfvow9wxazjMfmiws5WJLdPHctJea+AVhpgjlwh6fxnKxEfY+Or/dE4iFPnsPgZPusuzw//Ma8oThcD81vrBcOUJ2s1J/0lxvoNfp3Acuf60F980b8cjr3iVJdqG271scsw2hf3cQHWKtD36SbTroqdKwsHQ09vWQ9TUk2bxyhIST3T0+UyDRZAHpHGmFhkg5r7DOpHLN7LJlDxb979/D81esPl7flc1M0xSxf2wNYbXbol64gjHSj84C2YZCcTJ6YE/mJsjvvwh16DqKb9RdxDRBxDyGEPJG2errDQ1TV+XU5os2GQlUz5r5/D44SoNTDmhr8pz3LHWM0GzFKWmC9TaxsfHkrXal457N4HxNll4QZBSxBs1iYaGf5TgzCZ339bvHRPcBhB3bk7Z2GQtvYeVeDd6rSMxhclo13S3L5d9VaYgkPrvjcqg+A5LCnUO+Bfi5vP5aDi40okacHmDpwSLf9VVwYrKLyz3KSXZk2q8T37E/ijbGqJUE2ZSNJm/8V+AS+QE9bMY316UWs6/7R8Zl87qd3Ylf4J33dNBc4gJIaPVg04GSVjXDGnG7tXN7S6gQ8C4+whqebuTg+SkZo5twxQtlG+J6/i+k9r+6jA5kw5gyuDI7lfiFnbNrWrcrvIzk/hHTELCE2iSrM9T1FMG5D9v6ba11v75zGtdmDIl/eooXRCLD8gRqCJSB75/cIzJoHQmXluT9+/M2r11djTYOQH7HbjIvWNA5DC4dMZ6KGh2TtSkXn5/F+7MQ6gQ7WuG9CZHDefD3nFpsdHud8MVWwjQiPruf42p30An7XzAJGOua1Cyccx/ntPUSncEvlkje3dvQM/5T2FF7Wzaa/VhnJA7jiQS3upQYF5d9cqooraQtLC2rP+9H4jdCcttw2lIj95UxIU85OmwIGZdMDBMKGnAZs93JTgwz6RuWn6gzx3JBu7LbuKcRatoppAhiN8ziPI5PvoOeVptO0aWSntIDqJJR/8R39u+l9M6iNkyB2shsqflDJ9Srw4HyB9u+BsGuHLJGWIcWarFySDyfhLQpKlF4KzO92US1Vo9W/2y3HHzu6ioucfURCCKEMDs/MpPgXQ9bx6u1TaG0BaNSfJzfD8X9WnPTFP4+7iCXrCeFBODKJJodMkVh6i6kw6veMjsPWDaJaRAa9Mo8Odpb0ebI8lyTd+P/uRLOVSf3MOp0XKEQJw2BOmbQMsfvarcRTtms8hGYxLpYn1zGsu91WT/YJFMYX5qyQG5Br2IIYb7Ynrk21W4l7pzPRSpMxVGwp9la339Ltx4VatYNlfB2duaUFKmfDuPjf2I2VqfdPtSX9k6fxwFt30422/LcptfO5uaTGNowiiMqlMc8Ms4PgZ+wbMbn/OB72031DA3FAkv8xt+Ki+sv4nIenijYS4RmDU+Ll5QF0hET6msWDdogz1TaHI0MHJ4QQLodxU+/V9KBjiqsADpoHEvK9tnWo6+ZczFWN7wFlNhqEnCHOp2XLi13jw8Ebqr/Ed1lQN0WmJ4vKwkyMZ4dYaecd77PGazs8/oDTiKauMtz+3/7m1evz//WfyXuEIKVxpnt9UDWmKnIIShZzrSBG1s0XOJza7hWnc1v4O7MOLqZs3JmovYgXHT3SFfp0EN3p/+lPf/Hq9X948ZZ8juTm+UINZ/swkjF6T5QoIJ17rRqGg/P/1f+ongydEEEPMqoAfLNVSWIzlWqZ44xrbMNQjZh02FY62Hv6EEaJjksIIRTnDBAYEev1SfL0QYJ4E/pHA4uqpTLPKrFugNwUje08WMRJaCHdRZ5ZCMoDypviLJ2cpZWKF8AhYBWV2/2v+9HZuuxqnpSVZC9exs+5hhUvOhmYci+qVcZBD9Dp8/gQj96P+28yN0OJfevKvavaZgiThPcQlACaYJBvwRicRKNAJ5oOTggh5LCeh0ZMzkJWUtPIdtgP78ZnlEB/zqHsfQTk0VNa2Kt7f6/O6/k/j85rogcUuZLwEp52VAynAvRnutTjd7cRDeqgEO+xd2nCaGjTUr5KL49fWCuT5lO8B9rI2goWyAkt/Bc9C+Zofu3+ukg34D0/bwkOJJpMM6AET7g4McFhoNoH//F5eN2R3VoCE7se6EdpfBpnKFE3RvbF/xKdHCdRsY0DGxk6P4bog1eBZYlNpaWZ3FuUhmXmE0j/KRibBE9HVCb1+kJatm7NLDHv4CR80Fbr+HwYT7zOb2wlo2utz0EFZel0VvzwkAaFxkch0sIUSd3W2QRKy8mu8+nkdXGYU3p/beOQ32i/V5xe9zOWm1+HoPw15rNXVlI+BfQ7b1gOngR0EhVtt1dAe5nbghhjXfoZ25/FH9coTzf+fwhqEBPIClRmvS0EBQaXGUGGdJS2USjSMEcLu7ASX1ZzBdPhIW9xbr3AiuiW3gU5oj+2aAdRrwt1pgqxpmdAw7z1+tHs9zZSfO/mU40C5s04v8MTfQ7k6VQvrBS6FOe09Zt4iNy8p04uBT69Inhw1wiHvxuNZ3qP3Tejse783CRWhunOZqLp9O9GsaDrhiioBwhf3UTniE5HrmgTjADBKxwpuFlVfy2M0KJMzomBHnrTaZyD/LFVUCL9tTAiP2UveJ571sQDXrk+ELbBSbwv0mZCUFt2/Zfaey1rZDo8ATnsdc5gRmxeZmmmxsna/RxVTuYMCVkY81qx6JXlyYnqIlSaOEeB/AhqAHm6hR6nowoKESLiNuKtsPSNNS5tLXZ0kZOofD2LEFXddtBlP96IE8jDy/gF5GX4/S8heuhl6RqZ6eVv/X/xP65+HBdh/6F+rv1lfE1RuxCU/+RQNHu+UCiv8972GXdBuIjilNI/55FoCyRm7yTMKGlwSuNlBFD247IKBrZpIXrZfqL76uYt5Mvresj0J/ELhlZ5dP8kWtKrUVxgjjzSUepeW5Q6RSWIcQgIh89gHPNn6kx0kOJajvQB5Aab0UyPSqcgKudu6d5cUlbf+wZNNpvO2VTvo3IOuQ2vKipuXt/Fkdu/dMmObRhMWdBsjW7rumFqyh0EOvrzHZ3DMptNvo2+hDYX0vfQppZd3OlcERUKQblEY6MmtJ+CLGw2nvu9UY4/7p/d/lI+9zdnj169fmmwrSsex5s0/h5kG0p9cybgoExuyVuh/Xn8u/6jdOS/+H5Mww0u1VivdqIxK1zqcyK/k611XAaGNj5RVs9sNhFuc/TlvQx9Px/ZDg9KOx2yZ/qIjHhX2C3DGCeqo0ZchBRIs5QHFrIvNBp+n1gae2mEaoafegvu8Eh0xe7xhlaxOiGhKE3hRNNU+Punp69e/+s3P371+mqqHtXbRzGv1N3R/PfTFxHxmRi5tfKSnYaBHignOoxO4uuaqWpffhBX4dHfx+d5/se6UdhLyztgD+7H177Iy3BaiRD6NbZhMAXF55oojST07iJlOCAo4BmCpmgFAbDLc+84V2J8hIMlhQQdgq5TR0tqUPq+c6wPgv2ASoCo2k3dPE+uQdicmgOCUvFcT9/rvrMZMmDVVAhqvFh6HkIIS6KsIO6vTfizfBENw3zfPFN8Nt+zQ+dORHiuh9EYLq3ihbSjstI5UtNT1Y4uGEa6aWjKDznSuqV7KoNnCNv+hBDCEC2DFsbJmGE9M3ig2m4IIfRP4zPylJNwwZhqM4I4+ZcJh6obn8voSB0lZhNaQD3p4IQQwo8PYoO5yVIdBtIb7jXjnvvF+KF8LocWLrkPNZBYN7Gv+rpm6eQU4ShNTi2lhQCn3FbPdH4Z3/P9mMPvyaOPXu1SP0feLbX4QtD0P9ON/izIvXJNp6yR7fAg7+4S1vTm6PC45siiHheyL8JyLy5YToJ/jovc+TEieuiliNwP3IgZHqFLyO99HB/45fuA+T31RQKoa8kxBW/M/D958OTV619eRefndsMEFjCI9iRGIlUFY4R7TqQGMcdeJcfRv4doM0Ng0QnNmVVs7JbO1hIZP3Pbhj/z1tM4oWxqGIJqfzhpkkjk/kdx7TmJnfC9N2Lks8xqyzLdQ5RnPJ0ZOD2MWENQAj2jUu/+LPo3xtMpQuNmYTojJZB7l5DGyJmzUkXp7sykGhbg8ORQuZgb632wQszL4ynGmEjRwoFjn7u8XUNERzNE3OTaDQtaEMj5ntuGsfNvP3z1uvc//OTVaxKMQ9BgZllKr9LyFAgdf4oyJg46+jGmhVa5wX4EAtr+TO3srBk5kL6np3sskLEUMDTaqqjKuNNUBGCVUWZHR4lE5ZztTTauHVt1JdPDrilHlf/ZPvSzRronqqdQebaFv2jTyBv1BLdS7cSFOrD0JbNAhZlenz3Puo/hTNhvcS291x2ZDg9FubyUmJoE/PKZYcZUuEwsoJ2UumObZOa+vbGc/l36v1ewxU4+zqou4kFz8Kv4MKjqGYI6ae5Q8ec0m8r0ZUnrv7rzyavX//lSI4O7jRgelm+p1fuHeYRnFtZojqkrKdM3PgE1l2oX6YZkjKDdvW46VImcNgmsGVyrEWRMXFRrGwYRGCKKCbFGVPUszElnw7zuQ0MOMBfdR/FZ+uEoHAV/XIikhZRpe1N0mWzP5WEtmd4KIYSjZoTkmNLq3ij8XWtEAz5qqFGdA/ooGHoiJa1sXmjOygRKxQXnOVDgDb+zemZ8hUfxHh2dkS7rfk71EKAdQj/FULRwDvvnndqXmz0e5zCy9crO59sH8Qz/+sevXrc+jca0956lbFBEsve5ooGTAyhvW5qX54u0iLCmvHufxms6P+rmzc1E5fHPtchD7LhXXsLByrJ9HKzCDSGEjzqREDmcqq3+0VGE3YvY8J6GXbHAwNpCVC6Rzj4yRBTOOPk3a3Oix0hjVQ/0/oVbdO2px/iaaVivDqWWlAcSPFcbsJMu58IxtYxQ1sjW4WGPJtdVwQMn+Xj3C53k67fiV7S/1s3Klg5chK5bQlg+ZzCwNCkzW0BeDRerH8aE1YZGDmXukNoRS69aY/54R++ffYqq9oRZ3roCBHy7rqd9Be7zpUEfEmEe6uqa5ONGL3cB17oyMMpJnWPDkaVZVD9Lr8Qi4uOcFiFMM/1nacNtGHx8ex9F7/vsz/WZUP/I0TR2BPa5YKqKBtej5bS9E4IabXILnJAvlRSGkOy34+R3rALqoBHfI2+nvaPRSL8H8qM5JCsgPmtr20AbngfJnynZEELIHcZrzl7qoqVhDvjd02NnzCNdYk1S2VOo1Fe7wBYS1OhxBHdZhmPk4uxpZekZwWv/3vbx2khVKO8i5WFpXnI2ifyHoDY/Ud1FVXG0mfAGnnSaXAOoPIgXIfrQv6uGXDqzu0YPDtZElRYMA9GZvz8/kc+9fxRzPbumv3I5izakjfzfrGvFAAwCLJU7b6cf/mKvueeqOlekXRSsunJFIUIjLc8HCEBwZrtA7f6vKR+gv00KWNDlwFFsQc2/xZbIRnjwwyeHHp7El81vWKWlq4Tl4IO7Jg0PjR5C+e5ckdvVeqY/nJvKPUmOrKZ7vIZr9DCiICTrLTQYlbnjNTuMX/7GjibyP9iN4mnPJpGl/5PWN/K5D/sx3bVf0YNl9yBG3MORLSDA+XPMox/CrIzxPjE3byMy+4TzodcYnKZvtgYquvyAHt3a/Ox9o2zbuPjjaKASneMBqU/N+ePBndDhwW8mv8ejWWn+ZwEC075ZlW7SI804NrcacU3NzVNimTo7Ps+MB8Su6iUzzLMSHB4j4edoXPByflt/TAHwvQtu0mvIMZ1Wts8RNWrre0TAlrX0MmTOwbCjjhcpIq8rGlg1zsO0jR5Urm+1BYNB5/gWlKVdOgEOnjsaTO0e/V/P5L3eH0enoQsVca/Eoh6LE7+prkz0x20MnQI/T6iRleBp4rM7cGT+5zd+IZ/7Zhpt/POxVvg8G0S0aVBFJeShRn4TaEytTE5ivUP+hwWdB3TM4/8XrSM6nfaRSTWUD8lNCDbiM6V9sl7XYYieaq6IzYCSIpONM2s2i4AvDV3bNLJ1eC4IA+p7DTgyhBzd+DK36caXh+7ep9BJOPby0Pi6f6pWg/LliZTZbspBaodTBSVvw2PPN24mgDqniZPu5MTqS+h5mBp0B179v9n/h1ev/333XflcAw/AG4uSU9G1g6t5FDfL8Dpay8KZSYZj8fYfemQWX/Mgnxzo56jX4x28yQuqJMib8aWssy2sSGnBue8+Jvqgn2OpuBPcRUgzo7UEKx28dRvTfQmjzQAQxM7JoRlHeXyWAkBp6tTQn4sQF3+zEn/4TdcIlDDGM/8BRHjMWSEvqPVl/Luepxiuon3KWdk4uTTFAb7b7mN6O054/konkoJsjrqwZQt/t5ObORy+z883R2HO12KByBZuCamqpU2vnevvG8DJce4aHY3FqZaDi+wCZUicVAxO6GxX13p5uDl4cNIszwnuvxBCOPy7uJGv31PHNs2Z/WXvnvz7fi1qQTxqXPrHX43b1c0K/CGEsOqCS2oOPEnG847zFuKzqVwgc3HfrlGLk1AyUcIpsgkrazpaTbHjjZeG4MJH8HkjwMA1wRTZb6+Bf3yLTZGtw3MY73o9VE+P0X0Bh2yCtIw/27E28aNjaIkAoppmwHIuSiiwaUYqr4Jo2csNmbbyPLscTvhtXrXGMvWsxninTT3t98txE/3/zL3Jj21Zdt63b9/f6JvXt/nyZZ/VJKvoZJESSIsyRdGmCdmWYMEQDMuADY888swTj/wXeGDBBkxbkE0IpkxblKgSKZJVpSKrKjMrKzNf38aLeNHfvr/Xg6qM9VvfiXPqJUDku3t0Is65p9nN2qv51rc+6trieLPiPTyPByYEfu/Tr7hzr58zN2lv6BUqbh4EtGm2ShjGT0IKXCqcOcHYMMxHuoAQPAHeQEJ+fJ7jCpo/733oYM46T6R0J93mPVE0+I3VZ5JpAlC46xdZV9xkFPNB5XKEcIMaHC6cJrvHzoFZm1pNObtic/YpMgSzUn4hiwyoaUHWRB3ZJAPBzpA7CHMqpXw9SD0fVyU2CO/SqAYsYkOehZR4LXGRAYYn3Y+3RMeMwVUklELchISKU8PTK+pGNk+CceeQm4phIOIuVA5SBiuOknK3cc27sQhHqD3DvBn58cp22Z9+wZTvGcDk8D0pMoXGMJl6+5vXbADVAUBqjXQCffCNknEu7I19Zse9mSk2AwiJYc/L9Pp5w0m1Gr6vRnsgXxRDIo25OSjZ3C5V/Mf0D2AYS5ZWAcpQX9nTmVKO4e2u+rGm0ahhdl/jcXbqYQjJpYuSWrKH55gxUX+OSOvVj8GTcNF/AeVob82vZM6L7jpATjKZHMhMvArLt22SEy8UQggVbLLU/pUckc9TzwTfkXFbJaGaYXdWsBsF2G7Pu4Z+efH2yfHhxCzFSUJg8u+/+W/d3/986/WT42LOzwTiL7ix6AZEUZER4U4sTRJEb2paAAAgAElEQVQwmSFKzZg7vAlSKjFs4rxv8xjS4vcTs6Rj3jkTj9PhfO5uxFsurv6beMU6Z6w/a0/8JhuXGanZL1S8ckKXsLpgCz4jKPPDtgl+YmUWaj7Oe4QQdlbwMYNjWEKifKeBlyEVBDltQghhtIAYfzbeUh+jhpzgksOsBCZnCQG4bDcRsGNgIIrL9t1D4QNimQ8tqZJqng5Si2Yp8dypP3mpjUYoKRKovIfgwfqTonhgIJ+nksGVb4EtHBmOGsrIt6zvtTzHeNUmErO7NLWd59R4J5meUqyw5bBp/I3lj925//fg7ZPjopQluFkzZWiEjyuWvaBl6Hg28e+RW4vH/sxguKSwHnuHXmnKVO29JkPVSKjpSwYXEl9c2FAiKklZ0owWcQ9RDM/ad20T2flr3gOW1JIxPLD6dfOhFUZlJQK4w9+lPUlThKeFi1qVDldMT/qK/A1qGdGKGCyi19W5kVBawnGVwPOkgEMKIvX+8J4jqYP1YGCDdaNoE/6znmeP5AJ40PUF6Wqoe6RlAIi/oENm0pCQFkDLCkYmRprfIk6oMCnG34N/jzQ9GsTRJJ3rz2FaOseZXCIjyUJyAE0BbzrcWYLXhR4Z3eg4Dlx/IfjsMQpt3SB4j3LRv0izZ4uQhTJD8BldGWRzKfEgrcFBX8LUJDUVDxJrz1FYEoOgrSpW6oCFg5ARpkznAUaAFmIkri2ifOO7xw9gVJzxi5+GhabghtzpWapaioTjltNw8Dw0JJIwXKG4i/SAlr3MlSObK61LQlgIbA7Bx5qWXjwA5lSwpK2Lsqn8rA0W4g30sWT/cK9XhUqzpT5vt/tejq/Co7+S916EBgBaLWyCuja5/lRZGSvfFVoKOLriLbt/76KkthN7J4bQqHf6nA3B950jZZW1Qz1v4Y5X+o9eI8FuPFRm933bN79IQd1kHh56arf8APvCnPFYg8X7yMc/Iy6wmLpMy/f8AB++bj0WYZzFpKxKyCwuV78rIQZqnLopFA5Ot+J1wucgHCOua1yq9VPGeOD/8ey9k+OvLHng3kdHBtw7W/YcAXlon4rvITGcexFRSOjVodIRgmAIML599QxzvkjZiSrmj2J/iHHh4sgkURC8pMYNuHEZXknxAKTwTYr3ojCIzCPIQM43JcQkN4umvbu6RFgDWelPKrJZycYoZE3QVXMeoLRRMtX5btqU78OuFPAk4FHTeGk5ZvxJKgkMvTLEFEII2fPWyVPZcMZnIEPoGa95AZ45jBfg/bMIn4iykjuwdTw6j/5RECmADeVt2RQzp0tqzURlqZpIyH0OGsucUMkhEWAIIWQRPlTPyhAGqXrxmUThSgSJIUG+l+VP/Eba20Ah2LyNkWLoci6by49Df4lRCAnXxRR1vV70DK/LSAb4Xvu6O/dn21dPjv/m+U9PjrcrPr1/rwsFW5R0lkqpLPs+6PZNuPYIGpfQVw6h6WHHaytp1KJLSSkWljEau0Kl7jKnqFLBCcEr+zl6p/O63zLq8+IaT3JIC52pJSOI4aFSc/SKX/DNS4gV7sXjb2jBarXcyg4r5PoBLsDdmRIDsIFnl/HsiXgYuInXngrOCMA1YlQi7m8SD6qyz7CeFBc8hruDSs6e1K5YLdpu1ZYdbowPeNbwvBKs3bL7DIMoE8iB9TbdKSdkuWlGynzAhaTKUIlst/I7ZrxRoe2uaUe+/KZKw+dNQ5zMOlTBSR1U3eZU4Omp0aw6unjT4hFwFlWM1RWCJyI7PPDz7fwZA1ceDrwiUwGQhAr8ctl3TuO5YRQKi17qjeF1mQo2h3JnAjBySuJRtGZTReljcIRMi1jTkoI7RWhqVozH8OSPfR/313AfALDVg1S/a3+rDI1rCqSlBy+u+sDLbEuf2QI+eAOs04o7g1KgnhVfKsWP5cItZKFeRDg1vg5sOH5VAPT0TANMPZJCrSzc2rzoO7sMxUs9Dgy/EH/TEsKzHwOneaflQzHvrJmV+P3DS3bdlq8RUV+w/q6f88Zvt2edfiTlXOi5mdHrKWGZMZT2lKalMzIgmLf8I5BxQmQoFspBSMRLlwYlQeHQNvSuJBMRlqKQgaSWqPDM4LYlT0sIfvG2zgE8KGEIbmZKNc5wET0rE5lMtKq186ih9wRVz98xW0yFBkNQqi262D35ILqna/QhhJDXshOgYuhLrZ1Pj4yI6p0ViRHxHmP73dHAa2xLBevknoYOFrAxIgV3KsLdkS+K95dYFaaQK+6ZnpqhKshLp18XgqcdYMq2EhTOQ3PMxegnTdFnZoKCtN28V0wJ/nZZhrKoqVAlgVz5u4hrGV6LrISECgCc1MXDczCwRV7O2e+afaHbr7HyrrwiPTIaHeDcBJtrRmQQw2JTwTLMVsBae4isFk2dxyQuPRGGYxRXPX5NMEIMuwEMWngqbM30oomyPMudHn4oHPixmOZsvX+RFNwvqx3fOD1XXgssU5kv7fkJ3QE2RzG/B++eTv2uiiGbrgl6PfvM4NJ5id9l1UBIYKrnt9LL/ruPf8Fd99vnPzw5XlrygvCP9145Ob5StcmXPe+/80nDhGtSAV1tXAcTguubgjtjmrp4kNhfWalXRyOX+4ni32oPbb86fENqdZG64FJ89YUs9t9IWZ+ElthbrJXRW/edQvwGNyz1slDhUY2fuFxyKkSsWcTy8g3/gP23kKsv6W+se0TCLgVgcxAX7/rRObph382iZ8O6WqV2zM39p+9sx5rie33BwFeP2gZm0Uq6TD0vCPrx7r6FFfJ5f+7+rp2bITOmctsPhpusghNoXjkdQ6VgNJILFvf8OSqmAyn2zgWxeNekzM5784fQpJJDZaIs3kt6IgtSL4uYNDUCnKBmt4s1S9dvJBsG2CIFU8e10cDPyw4qn2tRw0Vgxuqgw79737sGq6u2w3eee0soXbNBT0tIK91BJ4D/Zlzz/ViEZTqSWDrBnCQ2zHTkOvRj75pXNMZlrBGxgh2TLwwJdd9PIM8j7Lz508WvFrTkPKs8m7/EdGYkMoVcjUd66rVaenmPhar9/ZnlSMVF5z2zFWuP/EC0geFxiQG5+PVR2faLjgkGkaKXMbf5+5d8gsmdHpiWxVPfxpoj0WxaJk6lYPNUvTMtGMOaNp6CIpPCmkstCkYIBIIzSQZIwTM0XhOj+Y79jjCIqVRp2HvXThIIHoKvzFAG5lfHmp64vzoenlH8TVkokkqNEvJxoxuJ94fhI6LeFezGiTwT9z3DA7p5cGNJqinEc81Lfhfnd7NquypN9FwmpXVPJRTGcEE9b4t0XxC73FgmItxrJTu3u+fjvZU6UPtgyeye931c2IufNc5LgPGMeMow9pE6WAlymuOx9za4VeawbhC9JJx7mmbL79VKvxS4ukEOEZF0/alzliFUMXQdyD+h34dL/AD/jscde+Bx8A/Pr9jvnrdNy1054yXb4VP7mOyiN5fHVLAkpAXd3pe/kLXfB74gJYWD0sTm0Grv+ns45mVJjx8t2Ln0QMcQShR4fnpnRfk8iPcsxxUPVRZ3NmUwn4dGjwytbVJRhJCc9NEDyFjDRYUGsrSgwDt+nuBxNaO6VvI+3VhNT+INlQg5K7wMWfHwx2GrRrJx3kUYSw2J9bJtKh8Cs7nX9OHmEUJOWhJm2rcXSStAH4pNBrXsctf9pj15YJvZUIxTNoV1kNiWZSai/WjH6unLwSDh+I4l9Ojk7RewAV44S0sFsy8UFp82TkHN+lt6Dwql1vn4jBdl6ORiUwAaY4UDbCQFeQ9uYlpWof4YpIQgP9Iq1wRua70vbmKzrn9Jemf6WzaxL7/uUb+fPDfLIJ/zk6TZoEbl+78DnoZMK364nQdGwlEcJyp6qvQ5JUcmIedPUhYbPX0Kbp6H5oD2BB9rxYIYxeinN7FDLbLKfiKuIS9WkqtfJ4r+4l2TKO0L4OWQ16DAUhxNEdXStXgolRxNWWdbPm+uwnYv3luXbvn1Pq5BmUAYKy3A4TExPeIlInW+k2NKeYEUeKVqcDxIwkZLBXFGPJxsQMufWv+QmiGEENItWQif/1+85CT0VO/xPLS4UimZgZ8bs7QtHvUO09LXrEPKJuJ7NPQ+SyPjTqYlFZlpNl6ucH4obIGh6Wi5GHgKobH+/vbb7rrfOvNRiGsftAzfs1ywcJcW5R0jG3IsRXkDPDKzuqwJeHzIHF6VqEADdA8RXqknYP2uxK+X9R9ZRx687qMJRSifzUv+HPVDx3otIHHiG3urL75PJAcAXT0g+SGewcGPuG1xrIt1iuIy/Dh+TAgeN6KeJsbFFZRZOqRrH+BjWShUqBQUTWQ+WYbbwvjMRalC1S0+AV4uVmxzyr1mGkRP+LiZRdPu+A8gM+bgwA8UQZS5JjPJBAuFDSIrcVtfzBH/l3nGGHdOZDn7REObcVXcE/i7Xlpj5hiVY+0LrgONP9MoiJRDwX2o/Aw8Ft1h6nSDPAKmggqaYniI0YsoqAmKzKvLtgOXAXR43PYLvDWApbgvAgQMsbNlWTAogDuuAlQsrLKuerpkcFHJoWzpbcg9qPSV5By9OmLNEsDm1n7LX7f71XiiztA5HQGvmXs9yD8NN89D49wvM7wlBR+rOygGWfTnKJ+1EjnDh5zrhWPh0ME9MyPdQ04HudYeeehAb80GU7N5649tnvYlsWbpDrjohjbvv7ny0F33uw8N07NQ9J6VZWAxf/Dk/MmxenEcN46ErVj6YShMy5kmQ3LAyrT82iQmTVWJ4bp1Xn7fP5t7yN7bKDEihrHjtpOq9i6j2Rly/k04l4b1F8cxJV/JOJ90Oqunl4F6P3hDBQ9up4oAs6QZEhINnNZtpGo0NhN197oCiyxmJl/t4sJiyDF2SCT+0i2/UFoXkBWiqY54//66H2DynaxUTavPZ0Xr7tp1LOwYQgh7CGOlRfDnIIDZBxpy4gZR8pmUAWFnt+Grleaa7pcYGs2SK4KI0CkRcwjQpJeyD3fvF7EGHddHQniVWGE1JPg8VdLzrPiMEEME3OywLcJbAqW6tuR32TxecojJzrBrCCHcu2McJOmaAHGBsUmJd4Y4G1f/R5VKKmxSgJS0ALoe2WZ4NlmXQ/D9FanVxWexAKnUBas9tuOep88KoXo60ZRW+madpuHp+N2X2ujtJjB5Ijw2ylXFxlCYQhp4jse6JujpLAl9iduHoAxp9g/lVE7qcdEbESkTwpIwSDD5qHHOXXemimryQ++54e+WaiZo93bU5R5vqAxANpht+v5mQsgYHp5pywvyTMX2tkitLij6w0U/ThmsH+7ZOTH4uN8qCSHB3065lbpspDyIYIMT2s/B8CR4VqCcklW2LJslOUh0EnLylmL4bkLw2J+8WEk+LizPxmbiafT9dfRMaCYFY4VVgNiGmlbJbD0hWDx6lZ4s/3CmjT89som9UPYukgk2iEbX75JkmZ1JtfTx1BbAcAkZKTsS/8Z4anZFbwNCLAa0G4K3PlUYOfez7B0MoZG6QAX/PDRHAIjX65yNx/Cokp6N4R3S+7uFrHUYE/BNjiCNZRq05ha8fyz1EIJXJpQ7qgKvzoO2od1Xi14R37xkmSaqwB91TLPVzEIyI1MhUZwOwcKqzLkSJeyDJf8e5BVR4Z5CXaz8jn/HwaZtCvSUjaQAafOKvdjCXXnH6ulhPnXfpxCmVozkPDRiLVzRTlkSxOIoU/HyZ6igfdb3Cz0+NJpV3hfh0U8PBVDbxiYLbxLfKQRPJqqYsQzuqcYe12pnGB++XS+Zu0P5rfoIhdGjf+WS31QfPoPmXBXBAOVlsiyEgsd0RcYr+gHG9UxKtqhhxJZDH3PfV0dHdQsK1UVlJrdj9fSxOQb5/IvvEz8nLd2OI+y72HNrWyAXPBsff9W6GbwHM6AGdbkQ7zH0mFyHB1GB7opeooN6kkLMTUYVO1+4NH6wOaianTACJmEmlmgRGVeXV2yDaA6EbRRuzGzeD8bUuTi1VAGe7cKQ/v2p6Gn/uM2DfSWzx4FxNaUTv8uI0Gb/E7D4orwlX2aj4sbMvJQaBJh7xaN4RV/HgV44zj31PBLsN81psUl69eIVyMK+zdN+2t+juGqCOZfxDx/EuEyGohidq5kGvNPxrolq0SbIUBiax7AiiW3NSMX1SQqpzO14S901JTkkADQhbKWKjAun4VQSXquvANC0utxOb447dA6B/M7LgAFLCSBYwftsVFC0lmLtKcDj49MVlxD8xpcTcDrDa/TaV59INhcwb9XHXlAdv2pKevWp9/BPAKrtQUHdLHsL/bND49SpiUeU6+z5T3DdKx50ygzEjJRsmQEyMRX8Db0zNCTyh7L+MNfTXdlPnEIiCiGdBTilCR3H1xHuasUrNfSwqRFAOIxiZpNassKDDiL+I4TgPqi7hrRM2eiY7qyamOccsT+o7YcQwvF1YAHEpesUKpV5MWSAkdpG9G7IAHQQP6/scvPw92BmhX4nwZDpBa/u3n5uE/vMcnyAvlIFr4jgK3I5AKv7HtFPErcZiKImAtB0rmjBOGXjwli6PySAeKko6DjRKxjn0pyX5rKjCLYWUDGVOFUgeQ8l4ySfDxVKLfzJsK9uELw/jdSSUAV04G1nTakQQhijBIpmk9xpWKbJlRqU9JGfOI8ahukpiofn8C9s3o+u+PVOo4BkgzPl22CihlZc54eX8ewExchVVQ8e2DmV/iEWjxbyVAg9s8AvqKWrHoSTeyRIZZ0H89AcIzg2Jk0NP3zdFAb1zjSvmOZP4zeEEAbgzWH4trQvpSswzBoqceSywO0cvOmzVBg6oYKjrb/qB2kA5YuIjB/v+tISa1WbENsNsd7RWKKk1/eWfA5sysOmlAgizk2SVFyNRHSIGq5aRNfdH7i20nPfx+Re2/y+jY2m/ruK6IJXIyGlC32JcuvmnOIgE1pylhZifsqdEsfDo64+ZkSRFTmEEIpIXeNC2XtHwFb4IOU7SaqD5QqRAUehG+mA7y9CKAdliMy/GvoiLkUHwIG0cn6R/uLFhyfHOz3T5rTq+QRWw0RcZdyclCZ8WmE+Pq0cf48B8KZKFEXvBfE9ulDKz4m+94NBwOZE5AhJpbubmMhzWFqCzRV1PL1UTwghyhzrF7KA8VAehZ7CpdtS6wp4AsUX06PkyNLkHRk6KtT9pGWpBiU3qxdNGN9vmkXzxpKvSdKHx4eZXSGEUHjbFv9QKj47TA+syKmkzjNuGqnhB0bl7HOAoFfFzY/fpS9JGLmD4sl7EnZDynpYsrGpfuQ7mVl4kXTz1OkKT1JR3gj9wTw0vBLn81iqnpOPSvcJej013ZyhjaXbNkbtcz50RCNDyegogycANys2JKkoKKMESUkKLOD81bUn7rq9gU2Ie80Nd+7VS9snx4OxrZ2HWx78lWpgXkqh5zzKOyhelGzhjIb0L/kJp3hdNhoWmkVLygdWndcSIAwjKrCdsnHhR7YBtK74uURjIYkEWFuiwlPaxSTU9NkYQGWS54M8NiH4dHai6JWKn4u8rVgJWmiaOj/hOYCbxeImLkgFJweHXABZqdM1AGBaq1cfvoN3EhzC8dAGcr1o2n9b4sAd9DeJp0IIYQiFZyRkZjOyaFbjAaBUZFSpPHoNQgzKim7yxUO7Tqt7F5F2evianwdE8fP+8+jhieNI0XnDcJda8gsPTCA2rohiC08b62VNJKulDOt2KBwxDmybAPymJ2XU9/OmvGTCRuts3d8yD88S6vUcDD0Il6SXlbJXqAYfmzmYve6xP6OhSeMMvm2quyTqYqWF9XVKPhUSFgo3CfEx44J0FhQsVZQCvToocTFQ0lEYdYrlUi6ak+uUGwabcFx9wJfZ+E4uLV3CEHHZmJH7ye98dqzJReXQIceZhha5HinHu2timMHDrCFg4nRSQv/hIgiI8fUEUXv30AyETCFewFXAYD4TyogZsgnzR37ddi8g/Cdzvb+GMNZRglJDGFbOT9osnp06lAwxyHwmENW2hFwQZSGWf+Kt2qObtgEcOY9gfH9/kXIriQoPtcCspJYRlFSBZT9Sym0qHYLWphfAhUDkA6jB6USje0zT2eOqsWtLoignpoSuZgVUEbClRVIz2DxqJa+VlVGX6EnbNgFS9ocQQnEBQC8ZYfI0MEX9p79DDa726RTwIfixPn4lHoCrii8b3c0Rtmlkiyl/T9syMD3ZXrxH+aW1OIKxlHjFKPi1eOjxdWzcsn+lY8KwvdV4zUW9RPTQMUSrJT1cCQoRbNWCKSjq4fmFa49Ojre7sFiPfAG1Gxu7J8eNoVApfNXia7u7Puc+g7CTq6UlrK9peEunUrttFhNy0rAVFZLuTEGAHBwZKAwO8TwkKwwhhDwyZZRLKRZnJM05dAtfQLp/SY3GL0G/vRXJ8GG92EG8UavGAz1elDFq4LI/FYzMkBPDIxo+SzLeKf/VaCZmr5QTYYD225eNh6chAL7HXQujfLRlxIOFBW8sVEE0e1z2QnJGrh2hceC6cgqheE5T8OKosuWwpIKHoyeHMojZ0nru8DUpLYElSKN5KnWMak/gaVpOsOqkJZeWwHiIrHHWCumg1cIpb0PgSppfGh3tS1DEb7iR0AFceq0LAnZDp7TPg8l5x79HHhVyG5dfLKef3xyCXygaEmIYKyMenktl1ExBp+4PvLW83bK4W07Ml3Y/PiugcWi7LS0KFSrclJWATTPLPm/qgaGCUjj255qXTnenhuD7y5GMCUHkPDRaim6cRThSMVr/gQc/HrwJnhzpWrLNkyBtIkMcR/wZgmR34f66dojvKhb9pCW7a12U9ObIXob1s5QCnzV/Fsu+D5rMBInwPsFjACtV6Sqo+OeeS7YHBWQCfm+MeT8TAe6yVGWAXS0tADszXfFiH9ixei7iWqYv8qmVYK3NQaNc56anxhGN2jiZEkKI6JZUctiHSV5VLUrp2KBJfSHAcWfURsq52LHCFujxp4fnWtkD57oQfiwcHYIH9tcqtuYOd31H9hDDTsvGzFdOK6AZCS2c21oUMcUYeUbmHg0jydSIIwGO0MDQkSKwBUeAC2yiwgJ6wA1/ESD/izP2SONL8yWVXIuhnlHNP46p6OQtiaQ7I846EvCSiwdK7HewAGGJ/3dF4yRWIonsjpNcN20qbGoZrNXNpeHwNsGzcl4um3Qcy03SyPQaCoaHmAotOzGoWp8fNE2JGl/0G9DswDax4q64UInhMaM9UhHd8dLIPHAlGdQAwuBQyVbleR5aEQUlXXaZpnxjEW5/U0oz0MMlcp8bBu+v85J9qMoQDYYUQCp6HRWeTkO9f8DUNSW8umgfWy2ieGjXa1SXkHV465EHb25umEY8kvpyAemzxT14DVcl1IFyKArm7Z2xDittA4Ct6xYCN7UrHFbkz/KFrR04mdkq1ad+QEkYmRW8RXp4uqRWxY4yNKnsxMtqzUvWbwThq2FJeazEgNVnJvTJaRZCcPJh+QMz+ztXvCKQ64D8Txh8uU+koWwp8R2fpZ4JRih0j6LHh7L7L44uuetYI/H+sQfGlvJ2rt21Nbew6l3ifeA7x3t+3RYPEWrNxU8WGmT5moSbj20d5w4EInHZFkxKsrRovHGvVOWQY5Fv+DXQYpo6jTUtfNw/3aD5eS0ZtEwcgiY3wOgr7yNmmVAOQD/c0e+zsLLMQcZ7k6rgqmVAi5Y4nZWP/QQ6eNPMPA2jMExGt6sOAAdR48JkylQ+EhaJ+5Pd6yfHl6revVFFiGu3570/y2DsLAviURWnz9v2M69NpIllkD7gLTsIP6nmzs1DyQXLhseLKDJUFp3yrCGYOWhU/nis84Hhi4x4XB0vjPJ54G9iqRSsunjXFmDzsldIyBzuiNoihX1hQX3kP6D1DkF6/nfvnTUg5rMuSC9l4ZJaoVD1i7+HMFlaMEIjhLG6eRgtqhwym0RC3cXn1ul9sCvnjv166J7DRjjw96DcUf4bF1rHi7XP+z4o72BdqYdtdLrCo6UPXEh0/iA8TkZy/S7c8gt48pp9yEiyqEYVm/iqvNKDdPwW6/T46xiqYmgtBA9N4LhqkgoJWVV0prBtaOYR9zbO+zeWPZC/hjjcVsdnadFjeWnVjIU7n3ryQmb6piTrdwxv/6Tm5xczHj3QLx6BPT7j121q174tVfQDUHzIfrX/q2HsvGoazGHma/d0h0gIXjdRKEtSSyYedEX85MVI1gfNdyR4AheWkPdahuJx9Lothom4I51HIEJaB3eneA64mbCDnr/nQRX8FvU+vCgAlKh05RtqYRIq4JjF5RiqYlHREHy19NWi9wOuFU2ba0lqcBGdQrbmvDDfjo/AFC2ptRmwMHurNz77SC2gOI9gCD6M4zhH5rC0BC12934yt6f5mOvkWl1X3PvjmMhDCKED8GYkOxFu/8ouPKBVPzFHNXpO/T2WV2xtKoaHSjoV80bea7kPkZb++oYnT3sA61YB0+57EAKeybx0jLOi8PRZxBMp5SOx3GYAPk9ysnCpmBYE3AqajhFqFul4xhEghhCflq6pupRJ8xjmZfaVAwSf9zKM3pSBsOIX4e1XZYgeSyouWkG7jHIDo5pksmJqOsCrrB2On2b/+Pfwv+PYkj1/q+vxaeWs9cmzXU80NkVB3XPnzdu/cMFngHgspsb1+MLxoeICPNXtjv+Y/K79PVryk5bezFxDlVY7XvvQFKWD1+Kz6VS5d+OBT9PQF2XcX1219AQPT/0RtO6rYBO9J9Ya0s6GUg/o+TdNyjotTcaQSk1WvETsZN1YmOLLMFZJMA/0SunGwgXG+ytGhQtK04QHPzTBP3zLx3r+sHvz5PiVVauxQDBzCCFcLdu57x1c9s/GLBlKyIznRti4Jk+9MJqAaTcjXCVUclgRl5WgQ/AemQhpIEPGGrfNnX7ui6Dvv6zGMCwzEdQrSRCjYtd68ACq13P1A5sf279k66O0L5sgFnyp7e8/gVekuIcHXPVjnmvG88eMQW42+Mwv3Ma6TXaCljdK3nOazyJjRDqIoOh2QYojUhSkEG4W0k4nKISTI3QAACAASURBVDqiyJDLh/dTZR44h6l4mkqfmTDQGlxOWUlAulJmqGCeqYL1+XvkJFUXMql7JsxdYzZTad/kSH9Fai3FFN4NIYTiAYg0s/o7KJewVVX5YzJA9ZmUPcDzJoxCiIyhwqPyh0Nb2ZZQzAVk6kFx+ezxpr8Hblpf9IKwHWygD9u2VjVJhfNtOpaNGVQTGcF+0XB1NRG16jmA9zNJZpigtt1EjIDCHfubIUV1RAT8XX3s8YGp81hzGKeIsYA5Nzq9Qsup7YUxPNrnjMcSLc+UsxBkYifgEOhaVO2ZIRV1C/OcVnRvIw2eMolkgiF4TEVf7k8j0r2vvCOtAU25dB4N6cgllJDgxF7K+8VQRSf/e5s/ced+0LA48UrBu5GJEXpets3peFmqzoMyfCbAagrZ8jYtW3dZyKImlobFHChaZh29OsMEPpt5aFSAk2LHxM4oaZYrZSILef9duFpiQIAheNC8lmzhuf6yDYQqZSTWixDBHZoUyV31c+pwYAoPiQefdrxitAxP5GHfe3+GENQzwZ1l9kxYEkA/ywmHDjJIivvCK7WOjsUGlJNNYJSydZCS9P7uOShsUvR36uqQxWMAmZH4ooJZw6OcLxG+oTlozIYtHpiQ1ExWzkv1ABO3pLwt/H7eMcLvAqO5syk1HSG7uSdFaici1V0zschvRaLcEPzewwzqtVVv4A7Af6YJLGTML0EZovERQnB16CYyIejFYcg6hHjSypRmaTG9X9jN6YVKKZs+i0djP1QqmcIxFDahgmDoykWYZL6wPtckgeJAW6LCM1ixm3KjC8Fb8LTs1YvjtHDVpkmdT/ZFLT0AV6KGnNIJeIgiOpbaonLEsOnmwQyBpArYdNdOc/EDsCDZKuer9jJ/feWzk+Oi5Dkz9PVg4BGUFXiDnvd8bOI+eB96XRS/k8yYHIR2lMDx9O9RBXP5Y5sI2+9rdVI7nIlSU97COIHccR7d967cCksKyLzkvCGmJgTvHegJGJbYH+LONPWcrV30QsPhTWIA4SGEUHtg92xd8e+Yr9qcmooFuJgHfmgUr6Fy7n39jCdg+3DP0m4zQsY5WDzd2zgT5lhyhIwWhH38yPpkuGaDM5JUXaasK8MsQZmaFkuFhxuE0mZ0ztnfKx9rNfYXw/CQ32QeuakYCRjAkKp/6hdwf8nmg3rSmR2roSQqjVRClBWfRkHpwPc1DfSN75p7vnVNcDTYQ3QcPKZLMpsYXSVppxC8NrbxPJlTxQWwKw/sRfpPhT0f86265d9xCHykJimwYKjLklPcNslrD/1N+J0R5ZugZeyHSkPSWwUMZeA3bdZDc8W/hcaA8lAhJEkt2cNzOi4vhOA704UoEgpDVp/GxxtJBa6uX05sdW05pmV9RygedKvp/R2hlAj3lU/BeXDVPlrddNzE9B0ZLqArPwSv5NzqmvvznYrfIJ4ODfPwoOOZN+kZUpAymWRLZdvEOqLVJ4WPWIbCKXMynDu/aErOUIDJOQhtVZQYsqQV8kVclV9WYzV3Bz6WlUTFRT1+2ZiaMyH4748L9YUgLmn1eMNK5bM1jNB4FWDepmi5jNLIA560bFG/u/r05HhRSC/7qP683/eD2Ti2jiwLoHkIpWRKBUXSxolRUDDyaAG4GqSNT0teQE2LIFITDw/du4UDyX5cAz8QjIeMZGJxrh++7u+x8MnpIDXNYKJsmUf2cc5Fztnd91eiF/+saYiWc5bGQgje68KwjBoBVLyKR1IqBetg9xs2fyNcO0wS0qK/CX1P3FUGoVHNyr123bI38lKjrjNCyjoKRI82vJE8Qf23tkRUWPA23VfhgmNsDAXxjo4QtpquyEaHtZQ9lgLa0AlYUFyVw/Qwfq8fQ8l0GFytJQj+pNaFF6dtSFR4sp14QHAO4QvKQ037pJBVMBrjoKyhoV4WtrForVSG9P4seaHswe6exXhl5fCmPZAsnMqGyhZhUJ3GaxOVtHXQOQCDbvd97JdKzlLehxiedEy7uL/vhcwILsg8eHjSh36gyCsSKY0Rg+WKxLjJYZFQPDQv+CdadC69M54n8aW1OBD7me94acgaPUm8QypwyQlF9nEVtrT0tZ8IHuc7lp/7Odu6AcUi61+kXLCXVB6eC1UbwDFB97JDbFbMRfW8663UzTXTgBuSzl5bMAHfQh2SdNOLqyzCU1RwQvCufafUiIcngKFZLdZZGWzWS6pV2uF0yfoq/0hqGyUoxbPs6YI6wivCR89hSMt5ESFLldTPhbREqSM1iHqxXEiPBXsF3O3Y9BdlJ8WlrNdXv++Vic45m4utcxrGtGMlq22fxzyCorxZ92yTA5RbUWxODdXThzAe1OBoQaYHqV7OeZ8RBX5IADJuOV4VfBrT9oUNegIunySAvj8WYDLORY1BjM0j6Ad1YY3eiM/eTGrJIa1NbJCSqUGeFYejSSjkpSEnxll5j74IF95TN4gyqkaPqv4dqRUyrZeKSwjJChsHlTFjuuVCSHY1u1CYIJr/8fZ7J8e/tHLP3kNG8WzJNpmdvrhhMXvJwhlCCEdYHHS1qmchjlckBI9jIhZHwzh0vQfpR05y7eNB9nRlax5ByzWE31oIV7Qveq2D8yEd4aay46Io98QGOK4dCQEs3bKJeXRDSYDssLINUs1rgjvAZq+g5XbbHqgKDzP/8vhQzRAkbudcxQt+4oAysiae3rc4H1Np9R0HG/ZshrBCCGG0DA8MlRwxPhiCSi1LCi7lghhTTKebAXuR5DGI0CzEVEvXOlDO6zCH1dKLyNJiLSrWHgzBGzZKs+DoHkQnpQfehWnEsKRRrhupY3mGYqRMv1VkemWlNAMzg5TzjQpVBdxUD7a9N/7V85atWJXEFK4rFtv9we5Fd13hoXXC6Jp4f8C0PBVQPKMXZByfSC2tKbBxk6G/RyYB+uCMAPysty6XUe+SfdOFBuHZVA8P9xOtPZfUkkNa6KAo/40dk+lVGS450apP/Vt3NyAo8DPNq6fhqAuF9OW6AXNi0+pVZYWYh6EAcel69UyS6t6wQ42d1u/hj2/6c+fKpiX8yx3L2Pr6qg9p7aLoHOtvhRDCGKDPg/uezKp2wTaaTpupzL6P6bLXfuT3uEKuO3IdBHOk9hr3TBFoVIC4Zxb3w9w1zlPOh6HMS/IQaYyZi1zxXhQGLLaryQDNyygeqsKAUUc8W8eEocp8Q0JC+zboB0UfjrpUM2zGTs8WzLOmKOKYlxer3q2XT9v773T979I1lFFpY1JJYdwUNqTxou+Ewo79bngOvCXCTOtwOm1vkTm81kjXO4DKFaS9C5ao+DzB3T473V2jbn6Xlt48PQz2MhvLNjD8lBdFn/3ZXxQZDLlS2/K7IEH/3BAHcg8ayiqD6d2kQpkXfF1nHaVAxDB2imgCepxG7RsXtt25dxctBFwW78Afbr9+crxRNuDL+po3Fo5LJlx0G6LSoclRuQNQj0QyHq3NAHdgODgEH75VbI4rRQV5rxne9FarE6RPzCyMESWxZCknLWOU1F4Yw6OeG1dEEspEXjqBL8PUvRDikfOq1Dj3vZLikf1YyRGRuuY6Vsaa5yKU4Ugpbl7OxF7H91KN8/BtsL6O/QivosOWV80E3Or7vG7W2Spl/VS+c9sAoOvXDtw5st/m7tlCGS76CeT6VV37ANdVwCQ7krFw1qzUDeqhMHDJU7I4Qbh4157VvDx/Lh4K1UwC3ijHEJSWheAGJjV/aCEzfV0tIY6XslrzHN38ip3KkNW5mCAAP/AKycOqKdWrJZuzv3P5Q3fdBoBb/9O9b7lzxLL1BbjI0OsAQm/WlQVOBUgU+Nyb9l7DXQyOpNmyQHLnvFdO8sA1jbR4KBpxEylJBnChA1kvqeHp94xisuy4ffbF8QpfVnPKBRIcht65EcrghMqIAkkvTvOihC6BAeEx+X9CCGGIsEeE6ysGcNyTTCyHc2nHr4mIhwq/I7nsGwte4Xk+sLWkxXbfWrJr2+gQBT4TppArxs/Lte/6Tjh4BySbVPSPvYc4Q4VHvScx3v4QfBmVAezuJA4/3UfjuPOU9ZryMFJTLaEl8/AAXKQCt4QSIYzHaqogNXfFGsS5opTu201WcQtz4g28cyPUHtt9GE9WMFr7HLR6Qf5TSYsjRQpBlDfJMnPgbOFN+FdbN8Jp7WzNawysz9KQFN8zl22mbW/5TigA+U/rUwUzw1HqNnf9j42crMsh+LIT6ilz99fUXdyT9dDmEaDpCDex0KaymVGAa5YW+5PYhRDiq9Fr4cmF++A7WZIsrS6UUrje1XNAK28mGSNpxP8HEuPPZ+zZRwhN7cigF7Eofu3cLXfuxw1T0ssShu31kU3YMAmYF/AmCQurNS/1en2kmyO1lpXNQwih85o9WwuojulBEJ4fUu4PN1it1S+eIXg0Ks/cqZAanL5ZRXhLMDTzmKXFeco5puvcKfAiY+gtUKufIGYmoqi8pwxWZmzvgQfOSLK5mBGsYTEaNVq/bvMPzCPf+U2bGyrfN6tmnZTFcKUXP43OW696L8Jy2QTjTsOvOWLqIpGMTfvdZBteolW/dsYN02CnQjxY+wBMziLz4sibk4oWR7K6kVkdlyEdQghZRzz44oZxosLjXLoR4h87dvV/BE7A8IVqetTQSVAYUSZc2Ql/cpJAne9cqNgUcoIa5wJbvOV32ee/YLPcxR4TSA4V7OZQ6fJtr6+Yu4OZLGPhJtnGxL6x5gvS3dq1IGld6q50UJOFs1A3uBedNEmslrT09Drn/pS+cy7Pacz/56Q59mMK5gTXck+wDEkuXadQJWQ/Nq5Y5yzd9RPfVanGhFMldwYwr3oVpmXwqeTjgSMVlDy51/Im/cHATMC360/dOWLUegMB+sJbk14iE5x/9gxKSDfr7zGBYkOMjYa0XNVz9azg2knR/44YoWwZbM3HiqeKD/eH9OlrTgG9Kb7jHGJ4HAYSa7a67b+jswHDSfYCGrljGQhnBHBPUm4WeDp1XRVap49DdyPeY6aeU65pNYyf/ZbhbAojk89ZIbMswlhQnOa9Y0s42ajqw3EPeJBKBa80EeDcueo7KI+5OEH5lmlfOiumbEoIIXTBYF5+Jl46KL40fpWEloZieVeA54iosAq6ZvUNUVg0UpsxoSXX0tK4NRtdtcQraFgJCzQXqYxKlzruJ4KAwMu+WMSOTXndn6PrkoqYpjNWn9o9Dt/wbkZqnC7jZc9LHmYF5LTujrq50ajkrJdskk+1eCjKONzd9xtLDux1/Q99QHNGUBvmTK6hSpkdq9btNPeY4xD82KcivDR2vP5DL+12vgk8Cn5X9HrdXDRXzZx0BpoxjXPK+8RzlR3FEJwO1FM8AQF97TN+0S3cM69Fb92EmSqhmRZCtEJSNjgDAGXJj9d+2xSZi4uG59ksezcUs7Ye9PycvVAxTM9+16+5wwO7fwpZJynFna3a3FYm55A3YV/AcbfvFZICyAzTAp5ubNlCyK/4RZ1G6m6fxVVLXi5MYeyMpB7QtKzW4U+bergdFf8cFg+lXOcc04xabm6Kv6GnfphQZd0V8pZwCPFD07z0NTODWC8rHpoVkYN8j84ZofU43caIMN//ZNdi+72276DLZw20uN+xl2z3pSPRBo99Z00R5i1t+Wd3s6BVKdLrqaFifqi/B/n4ppoxHVNsN2nOaoLSGNEWYjh1r1n/vrmNnv7qi/OXJCo85KzoSny7+pgxUfu/YhnoCckKoVQc1bjGBpm5ogpVD2BO7Vj+jiEtjb9SW9RUx/K+DX7rLGL1YoWN4X5T7T9q/lt7cGh+2eOKaY43FnfddW1wNIxGfhISVZ96xWuVtA6nTNXVCtv0lGmWBMCt9FaosKDiq/fghN17xwt6Tmwi+r9IjZQvrcXg2jTzjE3ZwTlPh5L9w3AGQ2FarDAOSB5CCI1rcEkzJq5FGVEPqr8hNXMAVsxIlfKlis2xAxSyVV6RN+sWw2lKJ3xn54rdX5QVhpYmCFvNxNNEQkT1iGap5PSombrLQrtl76UEiKkKCAt1U8ArM41XQZ7kB4qkvceUllDKC471F7Fmv6xGmUyvpNKEjCETNMxL41e9WFT8HW4uElIhoFb7EGNEp6HIQVcKRJ0/CYkpfGd6dbRI89qGWT9LFa9E1/O2kGlU1CTke9wxQTtVmoUKQt0bqs3ZYQZrbCo1yVLMQJSw3hCGvRqkzD51Gaaqj/CVIyS3eF3HsO3fY/frdlMt85TUkoMGLNwnL9ZhuQHJ1mHjpkjFIgTfKSUoFvS4hBDC/tvxlXQ5QRWUyXOMx2qKL91l6kFqnaPbwg47Z+O7ToFw4017aXVx/rWLd0+OM5iRnzQ23HUsGFo/6xfA845p+fsN8VAVrZPbZXvn4p6UlnDeBHcqZIGIJ0dSRyjDyb2jCg95kNS6Y+hq8c7pytW8NL5rHkSyQjPjFm4ErJrAzcLKwm69SFcQFK6ZOyTi4rPWf+R3y2ffsjmQbfnNdwx24o5YojdWTdJREb936Dmg6LL/jbUfu3P187ZYb7f9XD/AHGatq4ko+pMWvFfi/Znt4p3XMBg7fvG7sF5PWGU3IVB242vOlA5RyuOM7NYcN1V4MqeHU4q7fiPM9G19z6OHh2u9dADuIimpQvzR4sfe7fnkNyzuoaEqlnSgEpWUjqxYUle8GH2oHtZBPV7xYpZjBIMERY80C3/77Y/cdccjU1aUpZzhLvL3HIgHtIQC1GPR+rIlW7dDoWDI7qMoKPB76awaEhi3jBr29vdQ5DixqxFlES2Jh4yKJEOKGhaj15y6yM9riQpPfpkLXiqMx4Q2dKNLx1isIXgtnBibinhPqJAkAckqEjOmJ6dxzY5VM+2g5lYEN0JLDo8uKB4J2TZdQZSzjIO63v/48fWT43c2zSLeLMfHcMl0G0IITQjqsoQfOrcgSEAtrhvtpIbsB1ko3LBb6NOqz5wPnXOxrxxaGDe1Uh0tAMKNETzYHDRWGabrPWJ5U1dRh1/69OtC8OvHYRLUwcB0c/FYVp/ZA9tn7Nzha363ZLX0mQg21lbLPPe/O9iwCVHK2YfnRHAu5m1gf//5O+7cr6zeOTlOi9BYrMODtG+dXP3IKyTtK+Daqfh7UCZlgcUZLclAkaVcUnVT8Jym1701xeyY/iJA1sJgPskjIUKA5xHQ0M9a64rf4HyJkVN/8lJboYGwYwLAmuUBDr7m40WUAYrrYH0lKitJPF1KDNgBVmftR6YAk94hBD9vNFrB71Fy3C5C0QUoLv/m2VV3HQHHysnGgtFVYOPuCzauUrb3Z625EEIYPwHmVHCak1Wb+zPWhjv2wqUAuAMZxUMIYVLH3w3/O9ZWpBNEown0hmv5oDjFVPUKciIlQm+kJSo8Q5SNT9X9E7Od00NJ6tovIytPtTkCxhjuinoHrJM7G/6V2UERpuUYkrzaU78Sm5fjuyGp2jEb8UORel+wUlOy+722bqGrHzy9cHL8jQuP3HWfHVqsZ7XiYe9FoP07Qz8A7XMmqNOwUvUdGddWD9gkBqvSOeuvYwadTnKGKZU1mPOVW6uGNuehUflLAlWTe6ckfELENuiacNkq9CaJ140JUUpURwyIq/elFjF+tnDLT+7mK0hhld/ttmxg+qjPVqr4C5fBCP647ZX07x7ZRnC54qkUfthD+h9S0duvK9EW15KC8OEVoBwT0HIaniAFJpPkZCoEbDOsaSo52YqXLWkoixoGiWvKzTTvxIMMt0YI+dDagEVElBpMj0hafkyYKWk+RxrOHd8AlmWg1og1XXMuAUfCQGSqp2fz4oKPt7xaN3mflkyEf3b/zZPj9y88ODlernt5T6LZCMXKqnVKWub6DEB+ZmCrDBqsoGhuXUgJW0iXX1IHBuYB5WQiD5k/xz4nBjctOFhGCb5IckuyhweZGqOGfzN2Et1Lil+hxa5ZCj51145TIz9QxzftQrX6+buBgMy4eUwgKFrn4z97oi5jPK/+2N6LXqEQ/GY/kfT7PNyME8EanC1a5928YWrx457PuWQ6Y2voJSfpymlBhBDC3r7tjLMqskmEZG3hNkCwl/z7UwsfimvRXYe13RcODsebIIKECpYW05y3Rre2ox/QLCdMMfKPhBDC0Q0Q5okwIN0DYS/q4WHITDE8ZLh12ZTKBwTOlJ6PKoXyU/th+7rfxJeAddnYNGXlsOvd65817abvrTx25z48Nnfg/sBrtjdhBDzMGzBAs7lokIWedBDmekDR0VmQ0B0s1kxXSgkQjyPW8pibB/EQUotvCi+a1uMK09M3Wx3rhQf2jsfX5g/YxnlF4/Tohv+QlY9NDh686eUPPTydTd+HDGkxg0hTz2lsq3zmPsEQcE8IPbl5ajVwGhmRsiCMBEDhOVPybr2tnm1SY2Ek/epZy2T8ztPLJ8fFvF9/h/B6arZtCgaCUlRSyaFXJHXe7xn0iPaOvfWbpFMqZcznTTnZKs/pnfHv37wE2YioQJQqxY4VypLUEhWeEiiyhwUx+/sUqkDYi5eFSoiGnPqoYO5rKHnBVkEmVuOKn2gEI5O0LYQQG1agCzYEv8BmovCQC4b3V+vCuT/lNdbqtsNrSOuzpnlurtXMFbAs9bJ2UAWdYYQQQtg6NC2kV/YfwJpZqU2TECnR7BxYWGY1FTieU8WOyop6+hxDqtQUjEv3j1tAL7M5JR2K/izB8u4v+gnhapPJXCFRJ3EBA2ETJcnXOCFRwLGUJ4RDGJv/2S/tSDbxetEkDIGWdMOH4LMJb9Q8CH+9aGtiKmK0MTBZw6hPVkDRAfrVQDwwAS77FGpi6eRm5pdm9jgyQ2F5zgJAPcUGkRYs0QgWUwTLFce0LHPi4I340hXz0JZRYLl1CfJGhqt9jpgrf46GsoYoWJKClr3KGHoV1MPMuky9VZRUUVoIPDrf8WPepRIlU8XxjEHY/cvPbrrrVlfMcJ1IJ3x93TACXzmzdXL8RLyj/bpt2yMp+TQ6sg8vPfXnaKD1NjHvJ6LowztKyoUQQhhDocoJ7o8KKPfY5iXfWU0oMowAhSAZeY6Oxl/ngOdfYE0kKjz9IWsF+LsWDqDkQPPVopFx+IwQ/Ibm3FLycawBpAUQj1+J5+9xWjcu0xovPKfaaFxVau1khyiXc+2BTcJzNQ/We2/ZQlcjaPx/eeTrp5xFyu+P9z1Ka7FqO6NWYx+BHXo4tE7un/NKUxbx2PyxYHio2PBUQokIdWPWHtpgdDfj70/BsfKT+UtJYRyfFeGVbJLeRfV2sd80vOcUQ9xfNw+H5VBAM57tgPuKp02oUuCeJ5t4s28KSQ8yoiMemG+ef3hy/OGhB3hxU+D6CMED+8kjNeoJ0D4T/wGk0R+DcyTTE+GO+lyzqpqRAMgKo23qU/NKTa/b4E9ks2ZorbIVfIspQKzjQo/PPJZbOb5uY8S9QAkUuc41bO68OkrxUIDVT04ehU8gTKZ7DUNtjjLiWIxwpEk3L8qeBw+2ekv53ZzP37z+wF23XjCFJycYnh8cWCh3+9g8BRXJ0mIF9rSsgRSztF4VEBWM39IOgfb+smEngW8Df6qRNMTcZ9ayyqckBYU4NxrhivUhrk0jO0ktGcPThYCp+9mbBhDBTTz5mDHra8im4EikZrzOd2QGrnetwUK+l0jMFY0a50RCaxmcU+S/Wzh437JUAiZDp4JIz9dtpTxre2bMB0VYwRUjIbxR88jqAaTeV9c9idujtu2MTw69K2CCTqYmr1WjXTadVJ4u7kG5xeSKbMKLAD4f+j44sjIxEaWS/UqB1rw8fykpfPdpQmo4LcrWeckuIjBPVmARC5ueIBWwfN7qx96ne3TDdpMkfN0YuLy0hHPq920s91b8WNLDM4LwVdDyR/sG8vrlM/fcudvwbPZG/sXK8BTVKvashliiTmiIQjLeRBmDvRebR9l9PxjOm5n35ybrwPDA6lVvWBYFWhWTlhqd7hZU+UdshCoK89DGMXiNsmTbMlxRlvIyxcN4PjVXXw5JA6oYUlYruy8jCJw2mgnK9VJ9Gk89omnvTFppgk3/3rG3dh6mDaqwe+j3gptnDdLAQs+VvN+URoiDNw/VvWuHM8G9ZHBPp3x2/frIAMoykVAxw7czKeYb8ZB+/huZ5lRW1PhleIrhffXEOexVDPj/tPbCtbSCsrTSsUIeBsmJ5wapk6QLngC677Uwp3NfKT0+N4/EcwhbRSj28R7PhV4+JrtomsBMrPwQQ7zY26ueX34XoarmyMAz16relPvxkanh1xf8ufvPbVHNNPywYFom+UjGUpeotAOFTcpCMGTi60cJcA+KqU7QbAfWssRcnWeIBGESMpuH5sJH+HyJMobDG8hyEiValRy2OGUownSOObz/tt8FcwgPUBFXi5gxfa1EfvgmNgjJ2iMObRl0CbmM18qOuzaAz3reLU/Onu2nXknfOGdCpIc6WzpnmYUS8fZgHUyYwaVOId5DykcQbN6qSh1AlN5w4ZME13uElHV6uodKgbSO50ZLJsxB2wAJ3MFbtkCUxZgygQpOCJ6dXrO73JrDVKyJQtVJYE2mJ5V7knK4UCnti6Lv1qMYtV0UHQ1YHxq1vFizB15d8GB9R0K7aFrBaBKP29KsQIfTkb2Ac5+e9MFZkeMEO0tIiySk/SMvUMbYDzJQQpS2hnI94uEmQJ1GuOxJ1BG+CF9bosJDIq6xdGxv3T6ujM2yuyn3wILvCz8NKabz8LJo6rkr7pnAw6MYheoWPA7QwDubvof6wAe3hUFz8b71Qfus/U7DYk5hU1crsqiulPwkf3/BeHhqaeuQZyO/CdRRR+hBz4Ng3r1gvvLjgd/8jnqmfWVhgWtyQve89U/+2PcBvQn9dVjOHbmuf/pxCCGMIGS0undcaGUeiQcjdPM/a2p5UznWxcpNS4U7GZWprERIHqmEal9DGJdQ86fto6RunAeb/kUcxEsUnsO2zan+OF6EnFswqfrhtk/pGyPle/2s33WePzPlaAPnCgLebBzZeyhp4JhJFrR6RbHL3BIxZQAAIABJREFUNhy7mWutK7Bmpc7WlIzKTPGVkNYYjMwkegwhhGnhdM/TSLCITAqZRyOA7PSuMKfKSHjgyRUVgrBJa4kgdBNDG6rgUFnR5Jkc1shgKX5dJWEIyZEVMWJiZNWbqx6kspK3jUKztJaxiZCvh/hNbf0lqYO1ZWsite7dzjNwULWvIswr3tHJc0yyZf+hNHaUysKFXp/DgJbX536ihT+ZWc0KC5OiH2vWMmtI1CepJZeW4AcJgygFhwsRybPLACpr2jgbC50xGyoEvziG4hbmRqNu0tZFu2dnEL970npOTDXGOd2Mk/gxCFTOyO7+HJrAAqzl1ayg+wEYOV/0GwQDXHs97+JcKPZwbP9/1PVaWeoI6H6Rw04YZfkt/jrGX3Ui02sUEdoMZ0JxnEf3PZUVZ/FFsGtYH5o2TqtG+rAHq5IWrHoHcigQqgYCc/sp0DXc4jw+Q7GOCfSVshO9FnifkNjAkhAhhLBesR3u0sqhO7fdtAnB8g4heKAkcUFjCWmlDuw9Zht+wlE+ORC3CGl6qpXTJNTsPVID/+wZFJvyExuc3qZXvEoQ/Ophm+VPl0maKcrNNIl75mU1yr4sLG9VGPj9EaOHYFVReFhUnGOpcsqx+0p4pbxrN52lAfqViBBxR3p/Z3gLpoT3ycF7OZSFS5xOf+wfcLZqApTFovPSWQdNe9hi1bv8muchdxSMHMNwrJQLjBApjcpwCzw/EvXh2qKBq0YiefXU2KWxRn1Bw8GLd2w8CYb/eS05SwvsehNhIJ7GcLoov8sQpFHqvmJWDzV31f6dwFLeEiyqvlTPzUMv4IarVbgzbqH4c4MFe6Aj2JIFS9BdJH4MK/jjlrd0/87aX5wcf79z7eS4LGlgFwrmGfp/dt5y55jdlRYz9RnAbwSDVqp+h24uU5vz709Ac+VRvOJIkO1EXO8Rzgw0jj0FuobW5qExVZygZZ3b1GQ0DMEKzZpZSEW6fZa8JV4y5JBB0luTMcH4lfZschciLmj8pO+F13QRGBUB+i4s2wS/umCKTL3glY7bzywt/fKGD8Nu1EwKMiQbQggV4HY6Hevwqgj3AXlzRGHTFPPP20wUU5bPUeUzDa/RTEjWsugvl30l5IV91MDTZIBp7sXSSyjs55GHh94Ol7qd4KGdJQDmK5KYwqwtGrzn/pU3/HbeN8+gKk2Hr56+1WnF7yJIOztnxFBhCSXZgOm9ur5ssnpBCiu+um5W+UCUoWdA3zbh4XnSEC4QvNaBMOs7zI1ox2mGbxH6CkP/Hkx1nyrdAwyhrBAWMszLckQZkS0jGGgkEAzB7xNk8s9LPcLeOgz0L0BQm6jwlPMmLBuPfKcXQafOBa9sonQDarFgZwHgnG4QSTWa6BVU4KjzTGDxpaSDCHbWRcowXOEowV2Lb8lJzbC3Fg230xPzbTNrI/mfL3/n5PjhyEPPf9S7fHL8m5uepv9uV0hU0MhpQiwRi9OFEMJ4CZkmwhU0gnevB0R/vhHvuVC+Hpe2KUJ7AEWVSuoX4Vf4sloPIVtX9VyZq8mhI3OKikwEyOrIzU7/TQh+zqpHgPdoXAULcEJtIMXwuLo74tam4rzft8V/vuKl0vJFm1N3pexECV6dG2d8ynpvZB9UxHXPt/ykyi7AIOv7TqbCw+8eCyNztskUZd8HkyxScFWBYuYa+6ohnnDMi8GKeJeyMe4a+berLzSHaenunYgllXVOy14TR5JqhFGWVKCQPP0bfj64qu0KciWWCvuCbqT0MESKk+IeSgVBhedaxdC25yS96BYwH1ot/XrZfvfDhsWfv7bpKe2//dFrJ8eafLK4Zi8ylFIsYxTDnTCzUI0F3HNyINkS+XgDgcztjohRxoL6Qn81fh4w81VxsQv3TC40rv0VeXhIkR11PZ3OgpjzmFyv8Aj/zcz5znjC34Ph80jBuARAH12t5C1R7Z8tAuzEomWdmP6y7zoqOc0rfhCLGEXV6j/oG1D5jYJhcYpSbnwLVKSvl30nH6JTSEIYQgh1BK/72BmbWR8v6qGIYq7kn02v5pRW9bFkH8Xw9YTglVYV2vTYUgFQnp95aDNXXy7ee8m5qJ5HNs36pJLH71fPZlJBSSpbDgSo5SmYeKAKD9zh6YJ/AAHHW0c2L4cy9whovrzoBf8uio89b/t4cCFrC7QINPjihveNN45Ioy9Zk7AwC0cIK8mc4qY2Kfk+yB2CsFBCYRlYyMNl4CHaCRqJDHZc4oMaTHTtr34w0MtffsNnxFVOD8GHmdQIoDKhtQiZrciQr2YuJoWKOc5cLxrS4rrVNV08RomSrn8AE18K2GDUGN0G8eD7y3fdOcr8jaJ5DlTe//misZQvV/1LVsCLtdP0LvLC22ZN9pEMUFvxG+cxQtOZFT/fCghhdzJ+M07FYNk0tOkKGovsigtZ6p59/IqdjMNVntYSFZ7thnVYJB2figa8Oi0FRkKD7gnqnaEkCl+17OMmawghEMCuyopjrYWgUwIwfpp2bP0hmEOvI7QjblfWLFIuomd92xTutbyl+9XKQzs3tFTd77euuOv+weqfnRw3p15ZuVdE2QnJzT+GMnQ8tN8ReBpCCEWEuNTymNHCh9XQF8dSum+DyFT2EEIYoOK2xn6zMaHHvGRQzEOjYjAdxVuDSRlWnGNj8fC4bybYVgQ457paqXFZQzrvSfo1VIZruKdTT/yucPFXDTV2vmoPv33oQ1M7Bybcr4sXZ7NiUmqn4xUezs08gPYjpaVGp2iWFvu4v2bnNONsuGjnys+8stIBkD/XVJDt6fO5uO/vz5BWEAzVLCakNZHSEpRdO994wfoUX2JzbN6UwQnKfCRcza6IUATjlpgCtcdqQNtx/bH3Su5+xfqN3piol8KONYOru2YvqXUhyQVTw4f+9sIP3HXYQsJUOujHYND/j+u3To7/eeeCu+5//MrvnRx/0vcQiY+ahhHKC1En5ToLlR4NvBWwdtE66OmR96JdWLJOkVKKYYz1OTy0vaa/7tdtDpEB9f44RmzA/tRQYf//lVVLp7DpV3znpXugVqcQ0Uw4KkYJIQpuCpFq5sQ8SAXepCwth/0hm68oNdwgVFkhlTutl86GaPgJcmira6thUXAOI2iO53I2wq+W/e70eGxugvWMV2kvg43sfn/dnRvGpA8MB36mLS4gtfSBzxDLrto7T4ADmUkIwLUEQ1eVyrhaOeo2nodGYCuxYJGyDUlexIS5wt8NMAUi6bM4F6k5RiuJPFhisToLaiDzuQgA4qLfgcYYpCKslqWStxTpwL277eflm+fNaqUnKIQQjqDwtI7teHnVz/t+K74jSYrmmHt1yiaU3nBGnjpjYqb+UEnQcF31iSg8cQBkDf0nzLN5aHHVr5llE4In/1McpQuHy3zmPcnhoiUoKJ+768ItA7Fbf2Qv3F/xW2AD3nlNrlj+xJSoo5veuma4jsVwMzJR2qBkPxDowAW4Qlrouos5D/jvzOzZivUkwJmh4RA8AJmZw4r7ZBp8/6kfjE7N3jErRsZ0enoYOaL44pVVTroACIZXHR30gmp5kKSWTDwIja2wKGyPHRNEdCklFUNULxGFNieyTjQyXuqHJ1nScUUvFVTMsgAqsFx6McZeS1AwAqUgqgY8K1Vh3fufH71/crxYsA3jtzd/5K67ljN31R+233Tn/p2yuUZV4dmEa/RCycIKzxr+Q1fL1il9Qb2TUO6oTOp4ib/Czd9f9YNdfhbviiaGh+5P9UjMRUMMu7xt62MsSjrfXRUN9kwkvo25T1ZdVebpYVU+vvIO6RggGITQjUkDE2UZhhGTP+M/4Pa+/XAAwOMr654s8+aiATT3lvwHPEdIi6HzEDzp2sVz1glHHT8h8lWTnCMBV85QCmIKzUuZllNQ9JRws3rfru1ckHR2hDSoBCsTcg9VtPtSUmXxfji1KfFg+1y8RTwPjYB6YmDa52RiEuskSQyciyrHXZ1FnFu8Kzw8wLmpYcx11bhinVh/LBmC4HrKN4Wb6jXUdJT1Ti9fA4v/v/z0P3XX/cMrf3pyrBm7z0bmTXmzaF7UuwPP9fLHBzdOjj/Z9udWF2zDPe74jZSFfqn8rK+IIYEkm1nd98+zXQDDn/j7j1bs2gwIN7MS/qMs09IS3M+5T+ie0UcSVcHrg4ktUeGZgCtj1PRPzEFQxDFthuA/TjN10jFKgmp95MIhoVsIXvBrWIHvxU7RFHLHRquKDBaYAw8muF3VQhtiAj1qeu/J3z5vAOQl7Pb/bPcdd90E7lQuhhBC+KBvccQfH3ue8CtVgJfQdes1P8mH0OoXy95S5wIogfdBC8vRgzcW9ksqkpF0Uo435FSSR/CltRQVCPt30tzW0hK0NiM0CGSBRchQlSYC+iJgfWAlDl6HolkUSw4CSslKU+Dm0LTxq8hCWUNNrI8P/Nzbatmgv7rslaH3Vh+fHLdEW2R9Ls7Ladm/5JPH6FgJVVHgEoxMAswQQhhcwEAJDqhViGdQHmwwW8We1b4sVP/4nYbT4oqHako15VgugU3+ZTUqOVpLkY17gyo19GBqAgAVD7IdN65qmRA7zqhiiEsJgj6oCBYTypXuQ0nvz3NNzOf/7pX/z123ljEDdCSL/2zWDNLjqQ36946vuuueNO0Dzq94bNyDJ6jNeNHztGzN7HcMb+088iDD/LIJqEzBf+ikZR2bve4nYxpGB4vozoTSpoTotmKoOA8AW43iqQ5OnxM/ryVnaYFVsdnxbgvH78Fy7wkCXAUzeUZYE0sXdXeVM82fIxhZFRl2LImoWlJnhc/Tju0iRFpNSFlkKExDZpME8owHPRPao6J95zeXvfl3p2+a/Cgh3/Pvnf2++/s20oq+s2+4IC1cd6FqM+073/cF75auCenEz1pKKvXSQo6ESIB9UXwPcQ4Espf8HjkXLY2ikXEZVSEIY3RVXL8Z63vNaowLBapFSayPek6dQslojlZm4BoW4r4MvjMtNX+o5Cxjh/jFjQfuuvttm9uf7HnAV/mMLZi6aLajHLIJJ/FxzfqqPbu5L9eBlye9Z8YaFZUQ/HdnRbgPSbbaF88Q+mdSIkuwhgaJA4qnD2DLt31/Fw7gSdTw5Rw0V7Yhhik3BKlDJxiexXsoLbHm+5rz1ie+KHCYP5KXhF6LqkgRA92VXNC0Yj5Zib2ZEYwXviWFqj6eGcbmSGL2v17/6OT49/e/cnKcFR4e0j9oMWrSM2gm7gjeWIKPw4pffzRwFCLv6popt90xi5SBtFPkTlzYKgRPEDysp2OvI/dYUlKItkSFp16yjmi1fQiE9a0YklClxqGu5WnH1wkCs/9rQbcu3MLqCuWi0lAYtUAtJsfGhaIhLccPBCLDksdgug1pJpvf0bFNvDOrHmGahalAJeSRsCm/icqD25LzfaVomsG/bXlr4BW86O+ctTDZnZ7fgG63zDK4+qavcrjXNilbw5zQjbB/BOS8hA1p3SYVXqXl1FMg7Ry0KbyeBLxGqvlifRSfC+A1JosqBO8pmiSkz/IeWXHp0qtKZV4F+JReENEX0kiNHAu1/adHNnfOVkxju1H1FuW3Vu6cHL+iteGQ0bUvoA1an1nMsadPvKuMSkeu6gVDvWrz1IkTif/lSrZYh+LFdgq9KqK4z6yIyuljUYyAH4rgvGKKh46FqNLRNsg8mIfmYIJcywlcOyrHCVvQcyTupHzWjY7Gqj6bf7PI6FQA4i5LVOvXQS8g0+9P38XuM4KAq0oGziLAS1cKfk18CE/96zXDuN3ueFm9BDJZJZrN1JARPPRurikU/3AhnhhtAR7+RvCbagpZqoO2dBA8qYUdW99J/Hu6T/RWTrf4Kjuy1yySH+/Un5zaEhWesXMnC3U7XFbqJnYPSELEY3JR6x6L94STNQIsTGBJpoCnRaEYm6TaRrRK0gl8GKOEsF6lYj9UINkKRvwMzPY92QSooGzKCJOteVNcBmRsZnr806GXFgwdHPf9JGeJAJaqmIkJN12w+w+FHIZzZCLWHftr7YfgeHlnDklHujGTJRKugIKXVCIjoeK806K0r9G9tSce2Hb4up3060OyWiCg1FuXRxmHlaqXSkuF0wXuft9nFn5txUKv5/LeS1jGYvpQslCOhjbHxgBCXrvsi/I8ObDwcFFS59s9ExSsDTQTXpFpwb6zvOQlc+cQNP0lL/9cYUaMjZagSPdtvkRTcE9XeGaSrs4NI4kJ/qW1mCyqjNCE0OuiPF0Ou6fUAQS5JvD1uGfLfk6x6AxouR/T0hVDyL2Hxm8IQl+Cm76S93N2BI2wL7H9r5Yenhz/r7u/dHK8nPfrj2nvzaGfzxngLZckZX1wzTqFRsVSxQ8UMXVLFX+PHpSog4EQD+6Y8KLjQ50gbBrNoU7Aud7ZjC8jkjQntCUuH2I3is/84AyWbWFnQT2vFWZZcyoCFsY31B/FExqxsJxuHvxbNUlHWMiKuxKhiU2rDD7GmFTRnVaIJkZVwFidEa/IP73/9snxKhDwX1vxSX83S4bu+vbha+7cesHwOKoMNUAluo/xfNLzWKJX6mZt/KDvNyDypBzux9fQKD5EGQApLEpwWvOabK6orN64CkV6DgslEpxIBULBsC5zRTlHcI5h0hC8UCXPjy5q4hrISaGNelLhwC+sydl4STHo2T3Tdf+Or9VNiC9g0d3resD8X+7bPPru9LI793VgeN6p+Ln+IGsm4J2p3bMvPD+srdU88rtTZQFYM5SnSK956TsBzcJAsmYct49inHBtCuGu6gN/jy6o/lX+pYenu0A0FETBX5TyOfPQqExw/iYpLhXvRHZwhEgojEz4LO8gGymLvQ7EU0857tiVE2SM7id+bfpzdOTksNn8D/f/lrvunWX78D+484Y79w/f/POT428t3j45fjTwns1vP33l5FixdzmEaHePvKwetwBaxpJ+5Q0Pn2AIrTn0sfT9lnXk6pLXVnbbNjhT1L4qbYlXlYlMgoVilC/O2x2C7//qVoIrUVqiwtNs2ozN6gBDyaGS0LysmTt2nOSdybXtj8ZVL8DLSPmLpCICe6ud5wqMXcP7Sv9QAYrU/QDZYOMyiodKWjoXvTI5U5suSUbKf3TdwkzryODaFaKKf3NsyPz3Fj1WYgs1HTQuTMJCNuXr+aRhWJ9K3ptHLWdFQICX/LeMqjZuBaHRp1s+PVKJhkOmZSfUJ3tpDYKisG/zISlMlxbwNTFejevxqcqcU3r/1iW7UPlIGN/m8A+XhPgOIMOUeCaW1mwhrJS85B/j4w4QP/vWwm133X8G7qiigJzug/jnT5qvunON0emMk+p5XK3aHF4QoP3Tu1C+4J0u1fxgsC7YRKtL80/dhYGVIEdPX8KwuRZD/0JsuHc6AlmLLDOM1Tkf5q4xzOT41NSyj3dYhtUfm8zZe9cLUN6fXhf1ILHKfPHQP4DKF/ekooSD6VhXqhFm4Gk4jQoEcTX/7ZV/4a4j/vJXvvaZO5fB7x4OTcl52vcy/GzdBMOjQ2+4DvrWd1kptpuq24DQg9uVml7pFMDHounXS3aPlZLX4NMX7f13tu2dOxkpy9KJd2BwL+ZYV7b92uEaOb7xV5SWznjdaMELxDxKS1BjS6rmqwoJqABCdxOuX9X6EJqKsvQiHpvRSU5fazj9OPhNlgpOCCEcX0d6HReYLFgutqlkWdANSCbMEELoUlvEvFuX9PU/6Vy39w3eA/NWzayGP9l/xZ2r503Ak6OhLyGnh3uGGSqX/Du60BVCNaOev8fSA4QQRVlxuBXBOJVgtXL+5BTQOw8NXcE1wfUQgg856ffSnR8pn0GyUta60rAY3qN9wc83ZiQqtsg9CplY+bL39iyJksN2q2nKxKNjE7jVK/5jamlbMG9kvGvicg7cUUWvJWzmTRZ0EeNTwcy2f+xDwIV1e396VQfCP0UZl971O9xkieyFQr+P+kDMflPsyJAM48LWHGYxg6O6FdaSzqW5aPxmvDu5aUIQb794yFsXbVw0lMRN0FGNSF/TCFXvML3u9LRFvGkJEQO2yDjDPiV0QBNMbg0MxKwcOu+VzNOyWTItdyChry7wH3lJNuiitAm51UIIoQgG861nprGNl/zcbg8K+I2XC8/37EMXLvhOzoLokAaU7vsu2zkBNM59Wh0dLrT74g6eZIUnA2KhmWQf8KU5gRQfQ2VCzzE5o7RvvdKVYohMO9NK6gS7aXNVfAloU8oR9ELron82gc9UxNpnfX9QyVH+Bm4DzYF3Ef7rZ6agrFYsJWy96K2/91YsBKD4nt+9//WT49+69LE798GxmYS0PDQ8wLFubHl/cGndBphu/tIn/lsa12PI3oJXipPoAyYJGRTz0JQl+vPGbMEQQmhDJ1UBnorZIELw3zxGqQPFbrCOmYZouSlQgGgRv8qCCSzWzQshhBbmaV2U9LcXTMH+9zc+sN8IHer/ffDVk+M/yvp7EIR/veCVobsDw6t9emxATsW/PXxqitLCil8vy2AOp2s/JxtE3xVbDL4xTCkYJ2YdkqskfeTfcYKK65mOn9CDs8pS+NOmSk1SgeR5aC7cindvXYiXAT2poUT5rBQPlM/0eg68c8N555UXy9WNI9dLQh06leNdbLqRNY3vPpMzLGZGLI5fr5p8LspG9JOhedk3M2Awl/IU9PCrEfDNzYcnx9/dvuzOLQKrs80sTHnHHDa6jmCEqnXbzW4/8e/FLMfCXRsAkoCG4OEO5S1xUjBAgZ+pgllCpQOdS0ktGQKH+yQR/rkKueraZ0qhCGZOGucSVE8QWBVb5/0DHPeLepdi7qk4HXofFFNBjoniNm5yTviAaPkLLwCVifWKF8w3apZFdRm7JsHGIYTwpw1z+z9sealHJactcUPSi68A/PanT3w2l3OAiSXKYqIlbCT9NeFmgvs+ic9INxbnbk4gcJyHRsHGMgIRzyOEtqae+/CePMD1DeaRlluBcaVCu4gitzQWlJ7d1awSS24Bqa9rBT9nWRBxA8Vv3yp4fqhfr/7k5Pizocf3/FHD8Av1rM9IXAaC9UwpPgXjtSuWyfKsqbUKrE0AHJ5p2AqptUqQNgFOMQiXCKusO1C0KEYpFjWdqpX6YoKa8moeQcsM/SivChsLf3bFYk8nsO86HiIa19IX/J1ukJQxDI90xHClt0CzGqmUKbEqjQl6eL5R8Om8S2nULEx5rWwxbUDHIbx/71Yfu+voGSKeLoQQ1iFs0mf9Ox4MzFB+7Zz9Ttn4GcZaKHovTg1y4bGUehm0baKOrtg+kd2W7EcIOWWJ5/zmnqoGJcdN9/OklpylBWEwXfRaSOEAQiSGHTEEL9CzfeFtQar48RXEDRMq3apSk5SWvrh1uhaYF5xO9Zl92/EVP4i+YjUwG+J94Lcp+RZrjNRz3tJlSm4Vro83ix7I+V+vf/vkeLLu7/+Pj75xcnxZqF7HmMwEo23UfSeQh+fPetfcuSpCXBMI7a6w8w7yUIwkFZvzIGLBYk31yCU3h8Kdgs0RbirGDcOs85IhJ81SoFeSWDNd8C7rUNYLlXQCObXOHTPzJuJ5fH3JBGJBdqCHYFL80yPzUP699e+569YyBoT/lvA43Mzb39/r+ewuCtztjHlBlgveRb/ft7Vzvu4Vo1s7ZIVEyGlHTH/w5IwEnM3faS2t0Tmb0HkU252IB2m8Z4Ov3FQEpSc1hmfmUeHhXOdc1KKRwwV8r3w6yTnVA0x57appy3Vcgwpd5PRz8AOBQVC3b3vkgPPqqdFGBesYqOhn4kn/0765pZ4OvaL/mzUzXPdgnXyj7EHFP+6b115DZkfQOHXdstEQPpZaWgwBH3a9K4vUKcOuhIebSHTox48114HipJjVTcyhKqbOKfLiDp5khWeKDIa0WDhOI2dRONHYuFiTaNHpZdGJTKuh/tDPtMEiQYH+d63zyHLB2KtGmO3CvSfaLl1s3KiyslENFuJ7Pe1qmPiP+682/vXJ8QVodi1ZUPdBO54TV+h/A3Dot7t+87gK9j6mAmfEQmH79ZufuL+f9213/WzXNpLympdo/ce2W+s8GC6Co0EKLA6w7h3oMZ4q4qW1CrLNWpfsWIW7421R6Ab57KTcAD03BKuOxIXuwJvi/aE7n0R1qav+JVcr9veq1LPqACewXvbK8S9XDWx5btnOdWQ3/gsI5o97Hm17qWCa2IIUVjrESzMk0B55ZYU1vXY7fsKl4aUkPmkoeIVUOyFuinqBo3PxubX9I3sv59EJnq9sKhXp44jt1OtHGpb6/QRQ1ktqrmZhTB23EE7Bq8XcQ+W/rpHPm2L8ONe19hzB5H0QG0aKUWOP0jIhfRjN+mzuX08g0H5//BV3HcNdNUFd/1/Nd0+OF3DuVteXj2BBaPXOPGjYs7VAdK1sA0DaBs0cZgmKpSUvMw52UFBcEh3IwxNQZigvwHAXDpRbuMoJUJrK3pHlwOWDmPlxWku2F2DNR5hzCcrExhQhHsTevPSx1xIO3rENMlJ0D43KlVZcX3xgDxgs+cGvI3uFReHU3dm4bAOsYFt2tJPnGsLAd+s9lkDW91rFj1wR7g66O9dTfmhaU+s7lpL46T1s1b5b8J6hfzowHEUOz0rLTGMoTLNknjZtcBjeymYUDBWfRq00NWwLdxGyBL/FPNLokzSLob+0VsLG2td0ZApw3QRoufCcUh0khf7gmPCbUdpvlqrksL1RtXDRRs57T1bS9rtrOZNeE6m38kdgWl6V9Mcftk1bvNWQwqKLplXuIlXvJ7seM7BeswnS6XlTsQwm2W6foCbhT3HYDuEpwt+a/jsjyyxvIuHgFIgItdiu8u3Yc0/9dwghhOaVL2DOfkmNSkIqgROFWbMT4W6jZzeihLBkBOSsQh+oPyiTszNICD+QEBwjFJFyBlCA1MPP5U9m/YJ8DI1VzYDaQS79QdrmvWJsbh+bECrnvFXY6NoeoopMD4B9FgYfjPyEI3nmRKgaHL/YSIH8rG9FRd/fwnkE5RzlVe0R9m/JZuXYfBH6kuRq6TUD6/moAAAQ6klEQVTUtBkIdgYAvJnD8/jBIc7h6b/rNYGFB2BzBT271lAq76HC7Ot+ph3eZNV2/zumyFPj18XASR+xrs7ZsQPeyju6Oi7imeCkvJT3ZsOTsXlu/vsnv3xy/FtrH7jrqMi8UfAkFqyeuyesfu9XLFU4j8WmGQI/wgbUHftZyHDBMayG7p64cRAeCALQdOSUIrOZTUcX50QE2jw0n3Ybj1mixRcpO0HXuwAvHZsrwlgjMQjoYdRyA3HYuGHHv2R7ZON8qerNMArmjvidKYD/HELugri5fg0YHq0afbNgCtUPi5fduf/tzi+cHL+7aXNdw7B0t59b8iY9wZbu/YdejrH0Q0pkHK3UWVo8Q0PO5wT3PZM2ZI5oOOXksWIwjWrge+rPn8JDBg0nP8UZRWNGQ8DElyl9Qnp4ev+q54cbn1I18L2ObsKoEmOEY6TlXBzZneKMkGBAGMH/8tk33XXXV03+f/LhJXfuxtsm4/9DFI/+s2OfebtZsXWgxIOxir40lmJ587rH3o1XbXAeHviwCffY4paf0Cwt1F8nGErChkcwjBWgjluSSkbnEkOUrcvhhVuiwsOMhmlDQlqYQC5cJNVVhwv2BVF2YiwAgpXkOpZ/j5BBYUyTiAeTlBoqwsKiH8noinvHuEKlIfgNgvwjIYTwd+s2sX9n/Qcnx6ycG0II/6T33snxzdIzd+5izh6om9PB1FZpUrpkGjNZ3aTk4RkdwoLoCk4Hgkld7x1UfFb+jFjv3vx5750FSEtFQ1pcyJoxwg2sIHwhFLKO30T2uSRG2DJ06ja4MWZDMRb69kMW+gwhhKULtphWxdXWx2K9BzDyP9r9lrvuZtU8NWSR/ek9bOHWhKjo/XOWhVJBCPjTA+/hIRO81g2iDuJq/uTVh26HSh45WkZxVRHaOcjDEfGNBX//YUKoO66p94DFT93mPyeNoVfCCr5ICr3bT8T745j2sT4U4zdA2NxRkoT4FHNNpOmgdqKSpzIpQxv3rwFe7DeuenjAAhbuN/76Q3fuAcoJsZL65bIH8P3hlieeZSvlgCcT7wz3oVEtHi9AbJ8WDmYAJ5IkxCQnGEIK5Kf8yzV9n9KL1nwFxojMewdaLrz4RpGo8NxYNfzHjxa9GZlFrJpWZE46MgOuFs0SIXkQC4lq8bik2Cmt26IvTRIyMZa0kgs6a1xCcjlWtqYg0kKMmBddXy8ubCODZO2Mf/gzdMpKxs5pwdFXC7Z5fK993Z3bH9v9J2Ji0rLuTu0lH0vg81nPFKytht/8moe2mRTXTHL0p7LTYoG1z8t7oB87F/0EZWoir1MLax4awxLM+NGK6En8N+Rt0Q3R4XYwt9VrSEyCKt/MYmXm0XjDb8YDMG//g2vfdecYJn1LAPTEmoWiWYfdurcoWfPtf9/7RXfucGBzpypA/odN2zVzCJtWC/66VscmSKfrhYtztxO0LApJaFkfjFYk/LAP5lgRqsM1W0uVh+ANui4WEieCgPxTMbErHU8HnhbZNQ+NYd6Fe3asijjnZRKeJxLmQLe5MhuaEYxwS6RGHYaW+wQVnBBUARJjBO+1eFtS1lEOydfS8nN2pLFptDWQwf7JkWXlcq2EEEIe2ZWK05lMkQ4+9POrWjVli5nDB1KP6/m+7SdpCdGmXdjKvz+NAo6herG5tUXIUEHvkgFBoXriaESqQpXUkqulZ02wpYV4y/Gl4AOW616V3r9gA6LaHH/XB5gwMidohYl3hkpIlOXUjil3IkXtEnhRuJFR+9R3dKEO8WB0e/HW7B80DKh2o2T4nucSw/jzprk1tbo0U9jz4pIiWJQ4HUXwV6HZrVW9RT9EjLffAEBTK0NjUxjIBsHFoG75ziWWWLbDlQ/mz5pNuzAHT/jr3DCo8E1I0XdsvJyzogA7oX3O34SlXviO33jlgbuuiMVUEU2fIc/W1Gued6BMcL69U/Tps29BGRrW/IK5A84RZRXfLNp8JgN4Z+R3wgnfo+I1whFS0XOP7P1H9QSWMglpjVbiPTxpsIwPFvFtPf+dM4Z5hZsqoticPMz/SU6r3rU5RPLjfZnZpIkd9FQpKIqKjOL9nF7IW0TKEgCovi2yiZnEBPLLPeihmgjIvPoECTKL/v79Nbt2t2cxyeOhV0hI8bCYkzpV0KiuVWyBN0c+XYxEsNmMn885YHMKZb9PFOH9ISpPlSb28aTpw1Z5ZCtqktC4QhIjOy6u+A2xd4gKDhI5Gi6B7gGQgb68Is+l1xO0Z2mJCs9uz2bGbEXYd5FbTz4JBUrROplIFWDGWVkhWC17KiiqkVPxmBVlk8XK4YQX+eoUGXXTUWniPaSCg8R+JRsDoYS9sQ/Q/weLPzw5rsCq7hSkvEbaeHNqYh79oyfvnxz/rTOeeJBgUeIyihLXY1ZOUcpfjEanWyUzEQjUKjOS/TKuIxNOis5xfLkJHPlSM3PROB/66ww1+OvSbXhWqvFWUgTcTcWZbL5i9VK5T6viSRApq0RLhuD7C3dPjh+ItfB22bw6zalfFBPgWf6ybfMyJ0o0lXuds20scsWTjdKnz7fhWMCVqFg+kqQKB0y9hGcf+Y5kGIt4nhB8jazJajwad1KG611B0eN4iz6uRcLoBIoqj9AcNM5hYlkmwjhPpVFDtJyzm3/h59Hhq6AswRRIi2E5SdpDsAQJhh0LZxr3F11X7jsTNmAyGv/ysi+3wjWhMpjt28cWtmqLor/XsH1ZiTSZSKJE3mloMlSUNGw1BJVCthk/f3W9sL9mC3bPkXiaUkz2ECNgUoPC9sx+p54gGnyzV1+8emiiwvMYdTpmcVWig2dSVNQ4tfr6Hf87hj2o/KibfyAfy+YsL0XtU4Hmpiqf4mu1aCYFnoXBUdAaZbYSmI0bNmEfDvzGMoFrgGEE3SDO5szXqgvlv7hoaemf9rxGSFbmzYJZzlpjqwkf4Xbba4R0FRdQlJFEUyGEEMBaSwUnBL/xjjcTrFSX7RB/2ctq3Qv2XZk2NsSSvCyVbQGrOtZvJWGEB4ICXL1i0E8jOKA45VvTuikAf6HquT4YUr2W80D7ISo+cy7+pO3nHtPGtQjhvQMLqTLbKoQQKpAhtx6bh+fcpgdcFODVSUqtTWENTySkRfExkyy2zAbW4PAFFRfVR5Yw10XZiniaf9YiSvAiq2fOn8LDOVbcgzfqjGBgmvHvTkXp8d9UvJQdR8pzoNH7qgpJ7ZEdty7HvwdJQjW5pQkmefXa5oA7ouf0h00PTGZdxaykmS2CkOyNiuE087LZkPlcmZb3u2aJ7+36KMEuFJJM2e4ZUaKJxRFR7SIlgquhh39Ksk9JX+fzaCyEEEIaHtIBqFNyDf8sGpuzXjw4W1uiwsMNLb8vpeDREeRYyaqHB02zD9hYZ0XBaBT2kbR3zIWhuBkXHtq79IA8VwI2AkB1Y2GdJ8YldTFwk1n+VNIBUWfpwq96ANr3js1CXsrbTvig7TE2pNW/ueip+C+V7J6qKOVA970MH3NVrnvcNf9kSnbhjRWTAts7EpBFy8LdqXH4ElzMnYtKYgkAKOpTaZrwPDQqvQwdjcTbRVk21c/A36pgpwmcZXRL9ltfy8mfG9XgFsb77otf+C/bl0+O/+6yJw1ksc+mAOEZxuKceqvmsz2msTGbEL61Yt6lrkyWT1sGgvu114zzpySawHfGl0NcY82s6R5JvuIzgHTOMj1XU/ppBLDsxEw1WAh33WZTL6rRp2KO56XRSGHNPDFAR/B0lnb9h3APSSrjkVQT0XmKZZyHdaxVvIfiP1jnLiWxtTwKIrv3DT4yQKVmd+Cxr5cqSDAZ+3X1/T1TjjLY0FtiqBBqQl60EEKYwOu5uOoNidanS7gO3xaZmFBclAQ4oQ4WZdKsxrCMJMjg2cUdwetuksgpASSOU7lbCQU8pSXX0kIcjkI0hBDyxxAGOHXUi48HRmLTJC8k/0gCqZMS2hFkNhWei6NXEeNH1mrZ6wuhC0tEY/Wtq1ikO/Hgab7j3ldU8wUgW6Tqf7L+fbsHRjEjC4q8OZphdTwxrV65HZ4OTZH5J1tfOzm+UvOIMyL488Kvc4QYb/oYU0YmpIvhiiDpkLxZ+pjhGmr8mdb8FdPiBknLNpK1V+E5EY7IUnDWe5CNFR6BbMM/YLiKMdJ9E30/Qerz/Yc+y6l9xgRuWcBxdQDRdE4ReMm6OwdS460EcPOCkGWQbFC5St5dMCwQQfcKyL+8AN4SkTutjC3ICeRYquKt5RlSaXU+F8CgPJVzI4RlpyQ5rEt4DsrWTDJYM6PTFULlFUk1ycY2f0YA5x/ZqpVglDgX9cC4JrKD1n31KTyWF/39iZ1UaAJx9jQC0uKNHy6jAKZyz2EYKk/k28gjBFn6dzZ/4K5bUWATn71q66oPTeN22mfB0Gt/c90zmN/aswhCVzwfUxa5BV5tVvPzsnzHftfb9Ps+GfR1DIl5SyMZQFnKhyvI/k5wzpDSRjN5CafoLWoMOL4lKjyTA1RlbQkPBZ5BRaBx2Wuj+SMbxAgVOOZMXJHOEDxjsjIukoshL24vvmNSlhbdsBGmaCwIDk5WQlpF6g9CGc+B+z+vfdWdO9z/lZNjAi+Vtntx2WDqG1X/AcsF2zzoJQrBc0Kcq5hvWLl2jlFaYCBYiSHck3Rjaoy7fg+eGl0M8O7pPKCzKc1nz6FsLz2Hxe7ozX1fVLbhXZSsQ8e0vOytPFpUZHWOpKV349eVqyGHedrbFEW5ZoP0ex+/686xPtS7Fzzv08c7FmY6s2Ca//Omd+H2WvZtpZp/yQGyN/msEHxmCFPKs3kJk0I5HLSFox5WJXE6uWci8tiv4pwu79qaaF7x50qk/cF4DusC8mQ0StLe08PTsQesHRhCCPU7AIp+AVbZL6sV4a1JImelh0SzDrOguBgsi+IJo5bp5nkps8Zssf+/vbPpTSKKwjDY0QFmpKLWhenClSv//y9w60pN3BjBRNOQIJZCcWgDLs9z3inqsk7eZ9sUZi733vN9TiVDKalTj3CuVGgP4G1WIctoghatjFGd9uFNnI+3H3MPnaKKB9FQ0h6pD+PzOFfrdZap/L+DlJ7X7+MzDpJUzLFbfO/NeT4TNN5H36RvFUeHbPPzH1ZUcvCMslY15MROAgaDOaIE+K5WCgkcLtX0uCdZ+aPCU6AsrJ7lvz3axKFcvkY29TL/OM9Q0Xqyk2RetFYvMN/kVpKbq4sjLs1eW+iSVM6IWVcaO6cAUjfzTUVPFl3X4t3ggDste4ducTnPQmHwBV2eJxAC4hVYNhFDu55kZWXWj52tlugOQoeNcNVd20dMtzUK4Sqeq1pA4RFBy4t6uMgf0pxiI8uu4/rUiIpw5s19gcoFO8fqO7GL7niaT+sWlpxqMqM5cnhwPh6u81pcv7j7Yuj1cuPOYhv/dyMz3jZXsfAnw/yM/a9hIv84y+Zyg0q94fMIp55KVUgzjb2+lZELrOLYa2LvkbDFTpLde0gq1p5QNH7YwFGiCKkHVyMCjgmsKlzJ5FNcNKtX+V24twfi8Xiwuzv8r1ExDoNVIXMfePI53v/XUwgzMR77WAsVYDQQ9oUohrjHOcFcBSnPknrJ6u/xheuXsY/Kn3mx2SpFnTGUJ/orpD487+I+Lls9leKw3p5lhZeybLVAVYx4AssLGKBqeGOvaJFQeYm74DEVR2kqDJu5lH3PO16nHnBVaMRqSTnlrcrK8Sz2ASu3Wy0IjoRR/0b/oKncxhhjjDEd4999QcYYY4wx/ylWeIwxxhjTeazwGGOMMabzWOExxhhjTOexwmOMMcaYzmOFxxhjjDGd5zf/g5drwf0kYAAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
    " + ] + }, + "metadata": { + "tags": [], + "needs_background": "light" + } + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "z5KdY8IF8rkt" + }, + "source": [ + "## Build and train the model\n", + "\n", + "Now you can build and train your model. But before you do that, you'll need to repeat the training set preprocessing on the validation and test sets." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "10UI32QH_45b" + }, + "source": [ + "def preprocess_dataset(files):\n", + " files_ds = tf.data.Dataset.from_tensor_slices(files)\n", + " output_ds = files_ds.map(get_waveform_and_label, num_parallel_calls=AUTOTUNE)\n", + " output_ds = output_ds.map(\n", + " get_spectrogram_and_label_id, num_parallel_calls=AUTOTUNE)\n", + " return output_ds" + ], + "execution_count": 130, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "HNv4xwYkB2P6", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "19e46c86-5f05-4bf2-ac06-1ea1e5f03dfd" + }, + "source": [ + "train_ds = spectrogram_ds\n", + "val_ds = preprocess_dataset(val_files)\n", + "test_ds = preprocess_dataset(test_files)\n", + "print(val_ds)\n", + "print(test_ds)\n" + ], + "execution_count": 131, + "outputs": [ + { + "output_type": "stream", + "text": [ + "\n", + "\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "assnWo6SB3lR" + }, + "source": [ + "Batch the training and validation sets for model training." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "UgY9WYzn61EX" + }, + "source": [ + "batch_size = 64\n", + "train_ds = train_ds.batch(batch_size)\n", + "val_ds = val_ds.batch(batch_size)" + ], + "execution_count": 132, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "GS1uIh6F_TN9" + }, + "source": [ + "Add dataset [`cache()`](https://www.tensorflow.org/api_docs/python/tf/data/Dataset#cache) and [`prefetch()`](https://www.tensorflow.org/api_docs/python/tf/data/Dataset#prefetch) operations to reduce read latency while training the model." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "fdZ6M-F5_QzY" + }, + "source": [ + "train_ds = train_ds.cache().prefetch(AUTOTUNE)\n", + "val_ds = val_ds.cache().prefetch(AUTOTUNE)" + ], + "execution_count": 133, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "rwHkKCQQb5oW" + }, + "source": [ + "For the model, you'll use a simple convolutional neural network (CNN), since you have transformed the audio files into spectrogram images.\n", + "The model also has the following additional preprocessing layers:\n", + "- A [`Resizing`](https://www.tensorflow.org/api_docs/python/tf/keras/layers/experimental/preprocessing/Resizing) layer to downsample the input to enable the model to train faster.\n", + "- A [`Normalization`](https://www.tensorflow.org/api_docs/python/tf/keras/layers/experimental/preprocessing/Normalization) layer to normalize each pixel in the image based on its mean and standard deviation.\n", + "\n", + "For the `Normalization` layer, its `adapt` method would first need to be called on the training data in order to compute aggregate statistics (i.e. mean and standard deviation)." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "ALYz7PFCHblP", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "41f2133f-823d-471f-9172-ece3c0b6ea03" + }, + "source": [ + "for spectrogram, _ in spectrogram_ds.take(1):\n", + " input_shape = spectrogram.shape\n", + "print('Input shape:', input_shape)\n", + "num_labels = len(commands)\n", + "print('num_labels:', num_labels)\n", + "\n", + "model = models.Sequential([\n", + " layers.Input(shape=(49, 257), name='input'),\n", + " layers.Reshape(target_shape=(49, 257)),\n", + " layers.LSTM(80, time_major=False, return_sequences=True),\n", + " layers.Flatten(),\n", + " layers.Dense(3, activation=tf.nn.softmax, name='output')\n", + "])\n", + "model.summary()" + ], + "execution_count": 134, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Input shape: (49, 257, 1)\n", + "num_labels: 3\n", + "WARNING:tensorflow:Please add `keras.layers.InputLayer` instead of `keras.Input` to Sequential model. `keras.Input` is intended to be used by Functional model.\n" + ], + "name": "stdout" + }, + { + "output_type": "stream", + "text": [ + "WARNING:tensorflow:Please add `keras.layers.InputLayer` instead of `keras.Input` to Sequential model. `keras.Input` is intended to be used by Functional model.\n" + ], + "name": "stderr" + }, + { + "output_type": "stream", + "text": [ + "Model: \"sequential_3\"\n", + "_________________________________________________________________\n", + "Layer (type) Output Shape Param # \n", + "=================================================================\n", + "reshape_3 (Reshape) (None, 49, 257) 0 \n", + "_________________________________________________________________\n", + "lstm_3 (LSTM) (None, 49, 80) 108160 \n", + "_________________________________________________________________\n", + "flatten_3 (Flatten) (None, 3920) 0 \n", + "_________________________________________________________________\n", + "output (Dense) (None, 3) 11763 \n", + "=================================================================\n", + "Total params: 119,923\n", + "Trainable params: 119,923\n", + "Non-trainable params: 0\n", + "_________________________________________________________________\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "wFjj7-EmsTD-" + }, + "source": [ + "model.compile(optimizer='adam',\n", + " loss='sparse_categorical_crossentropy',\n", + " metrics=['accuracy'])" + ], + "execution_count": 135, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "ttioPJVMcGtq", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "2fcf46d7-5704-4547-e8f4-b28e93cf474b" + }, + "source": [ + "EPOCHS = 100\n", + "history = model.fit(\n", + " train_ds, \n", + " validation_data=val_ds, \n", + " epochs=EPOCHS,\n", + " # callbacks=tf.keras.callbacks.EarlyStopping(verbose=1, patience=2),\n", + ")" + ], + "execution_count": 136, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Epoch 1/100\n", + "67/67 [==============================] - 10s 132ms/step - loss: 0.5685 - accuracy: 0.7677 - val_loss: 0.4034 - val_accuracy: 0.8249\n", + "Epoch 2/100\n", + "67/67 [==============================] - 0s 7ms/step - loss: 0.3422 - accuracy: 0.8647 - val_loss: 0.3415 - val_accuracy: 0.8663\n", + "Epoch 3/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.2520 - accuracy: 0.9028 - val_loss: 0.3160 - val_accuracy: 0.8908\n", + "Epoch 4/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.1916 - accuracy: 0.9303 - val_loss: 0.3530 - val_accuracy: 0.8814\n", + "Epoch 5/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.1372 - accuracy: 0.9525 - val_loss: 0.3112 - val_accuracy: 0.8964\n", + "Epoch 6/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.1110 - accuracy: 0.9598 - val_loss: 0.3256 - val_accuracy: 0.9021\n", + "Epoch 7/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0946 - accuracy: 0.9635 - val_loss: 0.2767 - val_accuracy: 0.9153\n", + "Epoch 8/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0748 - accuracy: 0.9718 - val_loss: 0.2612 - val_accuracy: 0.9115\n", + "Epoch 9/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0688 - accuracy: 0.9741 - val_loss: 0.2760 - val_accuracy: 0.9209\n", + "Epoch 10/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0646 - accuracy: 0.9769 - val_loss: 0.2664 - val_accuracy: 0.9228\n", + "Epoch 11/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0605 - accuracy: 0.9769 - val_loss: 0.2673 - val_accuracy: 0.9303\n", + "Epoch 12/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0572 - accuracy: 0.9798 - val_loss: 0.3040 - val_accuracy: 0.9058\n", + "Epoch 13/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0606 - accuracy: 0.9788 - val_loss: 0.2745 - val_accuracy: 0.9153\n", + "Epoch 14/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0579 - accuracy: 0.9805 - val_loss: 0.2417 - val_accuracy: 0.9266\n", + "Epoch 15/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0419 - accuracy: 0.9873 - val_loss: 0.2764 - val_accuracy: 0.9190\n", + "Epoch 16/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0395 - accuracy: 0.9840 - val_loss: 0.2875 - val_accuracy: 0.9266\n", + "Epoch 17/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0335 - accuracy: 0.9868 - val_loss: 0.2856 - val_accuracy: 0.9303\n", + "Epoch 18/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0417 - accuracy: 0.9842 - val_loss: 0.3096 - val_accuracy: 0.9171\n", + "Epoch 19/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0319 - accuracy: 0.9885 - val_loss: 0.3236 - val_accuracy: 0.9209\n", + "Epoch 20/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0337 - accuracy: 0.9887 - val_loss: 0.3286 - val_accuracy: 0.9303\n", + "Epoch 21/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0357 - accuracy: 0.9845 - val_loss: 0.3146 - val_accuracy: 0.9077\n", + "Epoch 22/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0333 - accuracy: 0.9871 - val_loss: 0.3545 - val_accuracy: 0.9228\n", + "Epoch 23/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0261 - accuracy: 0.9899 - val_loss: 0.3522 - val_accuracy: 0.9247\n", + "Epoch 24/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0232 - accuracy: 0.9913 - val_loss: 0.3834 - val_accuracy: 0.9171\n", + "Epoch 25/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0221 - accuracy: 0.9911 - val_loss: 0.4048 - val_accuracy: 0.9228\n", + "Epoch 26/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0199 - accuracy: 0.9922 - val_loss: 0.3371 - val_accuracy: 0.9209\n", + "Epoch 27/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0185 - accuracy: 0.9927 - val_loss: 0.4103 - val_accuracy: 0.9115\n", + "Epoch 28/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0176 - accuracy: 0.9939 - val_loss: 0.3697 - val_accuracy: 0.9284\n", + "Epoch 29/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0161 - accuracy: 0.9939 - val_loss: 0.3608 - val_accuracy: 0.9341\n", + "Epoch 30/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0159 - accuracy: 0.9941 - val_loss: 0.3661 - val_accuracy: 0.9397\n", + "Epoch 31/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0298 - accuracy: 0.9906 - val_loss: 0.3248 - val_accuracy: 0.9266\n", + "Epoch 32/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0360 - accuracy: 0.9868 - val_loss: 0.3489 - val_accuracy: 0.9303\n", + "Epoch 33/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0273 - accuracy: 0.9913 - val_loss: 0.3359 - val_accuracy: 0.9171\n", + "Epoch 34/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0229 - accuracy: 0.9918 - val_loss: 0.2956 - val_accuracy: 0.9360\n", + "Epoch 35/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0228 - accuracy: 0.9934 - val_loss: 0.2775 - val_accuracy: 0.9341\n", + "Epoch 36/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0313 - accuracy: 0.9894 - val_loss: 0.4389 - val_accuracy: 0.9096\n", + "Epoch 37/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0386 - accuracy: 0.9871 - val_loss: 0.3137 - val_accuracy: 0.9247\n", + "Epoch 38/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0254 - accuracy: 0.9901 - val_loss: 0.3331 - val_accuracy: 0.9228\n", + "Epoch 39/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0157 - accuracy: 0.9944 - val_loss: 0.3147 - val_accuracy: 0.9303\n", + "Epoch 40/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0134 - accuracy: 0.9955 - val_loss: 0.3165 - val_accuracy: 0.9228\n", + "Epoch 41/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0107 - accuracy: 0.9960 - val_loss: 0.3198 - val_accuracy: 0.9247\n", + "Epoch 42/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0097 - accuracy: 0.9967 - val_loss: 0.3278 - val_accuracy: 0.9303\n", + "Epoch 43/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0091 - accuracy: 0.9967 - val_loss: 0.3395 - val_accuracy: 0.9266\n", + "Epoch 44/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0086 - accuracy: 0.9967 - val_loss: 0.3511 - val_accuracy: 0.9266\n", + "Epoch 45/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0081 - accuracy: 0.9972 - val_loss: 0.3581 - val_accuracy: 0.9266\n", + "Epoch 46/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0077 - accuracy: 0.9974 - val_loss: 0.3642 - val_accuracy: 0.9284\n", + "Epoch 47/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0075 - accuracy: 0.9974 - val_loss: 0.3716 - val_accuracy: 0.9303\n", + "Epoch 48/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0073 - accuracy: 0.9976 - val_loss: 0.3778 - val_accuracy: 0.9303\n", + "Epoch 49/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0072 - accuracy: 0.9974 - val_loss: 0.3856 - val_accuracy: 0.9341\n", + "Epoch 50/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0070 - accuracy: 0.9974 - val_loss: 0.3949 - val_accuracy: 0.9303\n", + "Epoch 51/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0068 - accuracy: 0.9974 - val_loss: 0.4079 - val_accuracy: 0.9322\n", + "Epoch 52/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0067 - accuracy: 0.9974 - val_loss: 0.4251 - val_accuracy: 0.9303\n", + "Epoch 53/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0064 - accuracy: 0.9976 - val_loss: 0.4374 - val_accuracy: 0.9303\n", + "Epoch 54/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0063 - accuracy: 0.9976 - val_loss: 0.4655 - val_accuracy: 0.9228\n", + "Epoch 55/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0061 - accuracy: 0.9976 - val_loss: 0.4864 - val_accuracy: 0.9266\n", + "Epoch 56/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0059 - accuracy: 0.9976 - val_loss: 0.4829 - val_accuracy: 0.9266\n", + "Epoch 57/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0221 - accuracy: 0.9932 - val_loss: 0.4664 - val_accuracy: 0.9134\n", + "Epoch 58/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.1001 - accuracy: 0.9682 - val_loss: 0.5172 - val_accuracy: 0.9040\n", + "Epoch 59/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0591 - accuracy: 0.9788 - val_loss: 0.3654 - val_accuracy: 0.9190\n", + "Epoch 60/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0334 - accuracy: 0.9889 - val_loss: 0.3255 - val_accuracy: 0.9284\n", + "Epoch 61/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0174 - accuracy: 0.9941 - val_loss: 0.3431 - val_accuracy: 0.9228\n", + "Epoch 62/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0102 - accuracy: 0.9974 - val_loss: 0.3411 - val_accuracy: 0.9303\n", + "Epoch 63/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0114 - accuracy: 0.9951 - val_loss: 0.3733 - val_accuracy: 0.9397\n", + "Epoch 64/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0101 - accuracy: 0.9969 - val_loss: 0.3541 - val_accuracy: 0.9360\n", + "Epoch 65/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0128 - accuracy: 0.9969 - val_loss: 0.4403 - val_accuracy: 0.9228\n", + "Epoch 66/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0150 - accuracy: 0.9951 - val_loss: 0.3882 - val_accuracy: 0.9190\n", + "Epoch 67/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0092 - accuracy: 0.9974 - val_loss: 0.3697 - val_accuracy: 0.9266\n", + "Epoch 68/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0083 - accuracy: 0.9969 - val_loss: 0.4283 - val_accuracy: 0.9209\n", + "Epoch 69/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0149 - accuracy: 0.9955 - val_loss: 0.3324 - val_accuracy: 0.9303\n", + "Epoch 70/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0097 - accuracy: 0.9967 - val_loss: 0.3717 - val_accuracy: 0.9247\n", + "Epoch 71/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0163 - accuracy: 0.9946 - val_loss: 0.3632 - val_accuracy: 0.9341\n", + "Epoch 72/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0103 - accuracy: 0.9969 - val_loss: 0.4045 - val_accuracy: 0.9228\n", + "Epoch 73/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0128 - accuracy: 0.9962 - val_loss: 0.3544 - val_accuracy: 0.9284\n", + "Epoch 74/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0136 - accuracy: 0.9953 - val_loss: 0.3371 - val_accuracy: 0.9341\n", + "Epoch 75/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0126 - accuracy: 0.9955 - val_loss: 0.3302 - val_accuracy: 0.9303\n", + "Epoch 76/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0132 - accuracy: 0.9958 - val_loss: 0.3230 - val_accuracy: 0.9435\n", + "Epoch 77/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0122 - accuracy: 0.9962 - val_loss: 0.3205 - val_accuracy: 0.9397\n", + "Epoch 78/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0062 - accuracy: 0.9979 - val_loss: 0.3369 - val_accuracy: 0.9379\n", + "Epoch 79/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0053 - accuracy: 0.9976 - val_loss: 0.3293 - val_accuracy: 0.9416\n", + "Epoch 80/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0051 - accuracy: 0.9976 - val_loss: 0.3313 - val_accuracy: 0.9416\n", + "Epoch 81/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0050 - accuracy: 0.9979 - val_loss: 0.3335 - val_accuracy: 0.9416\n", + "Epoch 82/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0048 - accuracy: 0.9979 - val_loss: 0.3360 - val_accuracy: 0.9416\n", + "Epoch 83/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0046 - accuracy: 0.9981 - val_loss: 0.3389 - val_accuracy: 0.9435\n", + "Epoch 84/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0044 - accuracy: 0.9981 - val_loss: 0.3422 - val_accuracy: 0.9454\n", + "Epoch 85/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0043 - accuracy: 0.9979 - val_loss: 0.3463 - val_accuracy: 0.9454\n", + "Epoch 86/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0040 - accuracy: 0.9979 - val_loss: 0.3507 - val_accuracy: 0.9435\n", + "Epoch 87/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0038 - accuracy: 0.9981 - val_loss: 0.3550 - val_accuracy: 0.9454\n", + "Epoch 88/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0035 - accuracy: 0.9986 - val_loss: 0.3626 - val_accuracy: 0.9435\n", + "Epoch 89/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0031 - accuracy: 0.9986 - val_loss: 0.3761 - val_accuracy: 0.9435\n", + "Epoch 90/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0028 - accuracy: 0.9986 - val_loss: 0.3837 - val_accuracy: 0.9416\n", + "Epoch 91/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0059 - accuracy: 0.9981 - val_loss: 0.3513 - val_accuracy: 0.9416\n", + "Epoch 92/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0088 - accuracy: 0.9965 - val_loss: 0.3317 - val_accuracy: 0.9360\n", + "Epoch 93/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0187 - accuracy: 0.9936 - val_loss: 0.3797 - val_accuracy: 0.9209\n", + "Epoch 94/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0458 - accuracy: 0.9863 - val_loss: 0.4049 - val_accuracy: 0.9284\n", + "Epoch 95/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0402 - accuracy: 0.9859 - val_loss: 0.4151 - val_accuracy: 0.9322\n", + "Epoch 96/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0212 - accuracy: 0.9932 - val_loss: 0.3317 - val_accuracy: 0.9322\n", + "Epoch 97/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0124 - accuracy: 0.9960 - val_loss: 0.3416 - val_accuracy: 0.9416\n", + "Epoch 98/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0066 - accuracy: 0.9976 - val_loss: 0.3828 - val_accuracy: 0.9379\n", + "Epoch 99/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0056 - accuracy: 0.9976 - val_loss: 0.3558 - val_accuracy: 0.9379\n", + "Epoch 100/100\n", + "67/67 [==============================] - 0s 6ms/step - loss: 0.0049 - accuracy: 0.9979 - val_loss: 0.3675 - val_accuracy: 0.9360\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "gjpCDeQ4mUfS" + }, + "source": [ + "Let's check the training and validation loss curves to see how your model has improved during training." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "nzhipg3Gu2AY", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 265 + }, + "outputId": "a006e1e9-da10-4617-e699-01c5b04763dc" + }, + "source": [ + "metrics = history.history\n", + "plt.plot(history.epoch, metrics['loss'], metrics['val_loss'])\n", + "plt.legend(['loss', 'val_loss'])\n", + "plt.show()" + ], + "execution_count": 137, + "outputs": [ + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAD4CAYAAAD8Zh1EAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nO2dd3hb5fXHP6+GLc8sj3gksbPJHk6YCSTslUAZYbRsaNml/GhDaSmlUFrSQge0QFllE0bbAIGwAiEQsvd2th3HK7HjOJ7S+/vjlWJ5y7ZkWfL5PI8f6V5d3XuuJX3vuec957xKa40gCIIQ+liCbYAgCILgH0TQBUEQwgQRdEEQhDBBBF0QBCFMEEEXBEEIE2zBOnBCQoLOyMgI1uEFQRBCkpUrVxZprRObei1ogp6RkcGKFSuCdXhBEISQRCm1p7nXJOQiCIIQJoigC4IghAki6IIgCGFC0GLogiB0T2pqasjJyaGysjLYpnRpHA4H6enp2O12n98jgi4IQqeSk5NDXFwcGRkZKKWCbU6XRGtNcXExOTk5ZGZm+vw+CbkIgtCpVFZW0qdPHxHzFlBK0adPnzbfxYigC4LQ6YiYt057/kchJ+jLdx/kTwu24nRJ219BEARvQk7Q1+wt4amF2VTUOINtiiAIIUpsbGywTQgIISfojggrABXVIuiCIAjehJygR9mNoFeKhy4IQgfRWnPfffcxatQoRo8ezdtvvw1AXl4eU6dOZdy4cYwaNYpvvvkGp9PJddddd2zbJ598MsjWNybk0hY9gi4hF0EIfX77wUY27T/s132OSI3nNxeO9Gnb999/nzVr1rB27VqKioqYNGkSU6dO5Y033uDss8/mgQcewOl0cvToUdasWUNubi4bNmwAoKSkxK92+4PQ89AjjMkSchEEoaMsXryYK6+8EqvVSnJyMqeeeirLly9n0qRJvPTSSzz00EOsX7+euLg4Bg4cyM6dO7nzzjv55JNPiI+PD7b5jQg5D90hHroghA2+etKdzdSpU1m0aBEfffQR1113HT/72c+45pprWLt2LQsWLOCZZ55h7ty5vPjii8E2tR6h56GLoAuC4CemTJnC22+/jdPppLCwkEWLFjF58mT27NlDcnIyN998MzfddBOrVq2iqKgIl8vFJZdcwiOPPMKqVauCbX4jQs5Dj3JnuVRKyEUQhA5y8cUXs2TJEsaOHYtSiscff5y+ffvy73//mzlz5mC324mNjeWVV14hNzeX66+/HpfLBcBjjz0WZOsbE3qCLh66IAgd5MiRI4CpxpwzZw5z5syp9/q1117Ltdde2+h9XdEr9yZkQy6VNa4gWyIIgtC1CDlBP1ZYJB66IAhCPUJO0KWwSBAEoWlCTtDtVgs2i5I8dEEQhAaEnKCDyUWXkIsgCEJ9RNAFQRDChJAU9KgIi+ShC4IgNCA0BV08dEEQOomWeqfv3r2bUaNGdaI1LSOCLgidhbMGvvs71FQE2xIhTAm5SlFwx9Al5CKEGjsWwqe/gj5DYNg5wbama/DxbDiw3r/77Dsazv1Dsy/Pnj2bfv36cfvttwPw0EMPYbPZWLhwIYcOHaKmpoZHHnmEmTNntumwlZWV3HrrraxYsQKbzcYTTzzBtGnT2LhxI9dffz3V1dW4XC7ee+89UlNTufzyy8nJycHpdPLrX/+aWbNmdei0IUQFPSrCyqHy6mCbIQhto2CTeawsDa4d3ZxZs2bx05/+9Jigz507lwULFnDXXXcRHx9PUVERJ5xwAjNmzGjTRM1PP/00SinWr1/Pli1bOOuss9i2bRvPPPMMd999N1dffTXV1dU4nU7mz59PamoqH330EQClpf75ToSmoNut7JeQixBqFGw2j1X+ndAhpGnBkw4U48ePp6CggP3791NYWEivXr3o27cv99xzD4sWLcJisZCbm0t+fj59+/b1eb+LFy/mzjvvBGD48OEMGDCAbdu2ceKJJ/Loo4+Sk5PDD37wA4YMGcLo0aO59957+cUvfsEFF1zAlClT/HJuPsXQlVLnKKW2KqWylVKzm3j9OqVUoVJqjfvvJr9Y1wwSQxdCkoKN5lE89KBz2WWX8e677/L2228za9YsXn/9dQoLC1m5ciVr1qwhOTmZyspKvxzrqquuYt68eURFRXHeeefx5ZdfMnToUFatWsXo0aP51a9+xcMPP+yXY7XqoSulrMDTwJlADrBcKTVPa72pwaZva63v8ItVreCIsFJRLc25hBDCWQuF28xz8dCDzqxZs7j55pspKiri66+/Zu7cuSQlJWG321m4cCF79uxp8z6nTJnC66+/zvTp09m2bRt79+5l2LBh7Ny5k4EDB3LXXXexd+9e1q1bx/Dhw+nduzc//OEP6dmzJ88//7xfzsuXkMtkIFtrvRNAKfUWMBNoKOidRpTdKr1chNDi0C5wVpnnVWXBtUVg5MiRlJWVkZaWRkpKCldffTUXXngho0ePJisri+HDh7d5n7fddhu33noro0ePxmaz8fLLLxMZGcncuXN59dVXsdvt9O3bl1/+8pcsX76c++67D4vFgt1u55///KdfzssXQU8D9nkt5wDHN7HdJUqpqcA24B6t9b6GGyilbgFuAejfv3/brXXjCblords0aCEIQSPfHW5BQaV46F2B9evrsmsSEhJYsmRJk9t5eqc3RUZGxrFJox0OBy+99FKjbWbPns3s2fUj1WeffTZnn312e8xuEX/loX8AZGitxwCfAf9uaiOt9XNa6yytdVZiYmK7DxYVYcXp0tQ4dbv3IQidSsFmQEHiMAm5CAHDFw89F+jntZzuXncMrXWx1+LzwOMdN615vCeKjrCFZG2U0N0o2AS9B0JsknjoIcj69ev50Y9+VG9dZGQkS5cuDZJFTeOLoC8HhiilMjFCfgVwlfcGSqkUrXWee3EGsNmvVjbAuyd6jyh7IA8lCP6hYBMkjwCtoXxnsK0JOqEWLh09ejRr1qzp1GNq3fYIRKvurda6FrgDWIAR6rla641KqYeVUjPcm92llNqolFoL3AVc12ZL2kBUhDFbqkWFLsPuxfDvC2H5843TEmsq4OBOSBoBkfHd3kN3OBwUFxe3S7C6C1priouLcTgcbXqfT4VFWuv5wPwG6x70en4/cH+bjtwBZKJoocux8DHYuwR2LYJPfw1jr4Rz/gC2CCjaBtoFSccZse/mMfT09HRycnIoLCwMtildGofDQXp6epveE5KVopEi6EJXIn8j7FkMZ/wWMqfAsudhxQuQOByOvwXy3Rm+SSPN86oycLnA0j3Hf+x2O5mZmcE2IywJSUE/FkOXkIvQFVj+PNgcMOEaiO4NF02Akr2w6HEYd5WJn1sjzKCoIx7QUH3E/VwQ/EdIuggSchG6DBUlsPYtGHWpEXMApeCMh6C8EL7/h0lZTBgGVpuJoUO3D7sIgSE0PfQIEXShi7D2Tag5CpNvrr++3yQYfgF8+zcTRx98hlkfGWceKw9Dj841VQh/QttDl5CLEExcLhNuSZ8EqeMav376g1BTDkeLzYAo1IVZxEMXAkBICrqnsKiyVhp0CUFk50IozobJtzT9euIwE0MHk7IIEOl2y7t56qIQGEI65CKDokJQWfc2RPWCES3MbHP6b4yIZ7j7XYuHLgSQkBR0h7vcX2LoQtBw1sC2T2DY+WCLbH672CQ45/d1yzIoKgSQkAy52KwWIqwWEXQheOz51hQJDT+/be/zeOgSchECQEgKOoDDbpFBUSF4bPkIbFEwaHrb3mePBmUVD10ICCEr6FERMsmFECS0NoI+aDpERLftvUqZ1EXx0IUAELqCLvOKCsEibw0czm17uMWDI148dCEghKygO+xWCbkIwWHLR6AsMPSc9r0/sodMQycEhJAV9KgI8dCFILHlI+h/EsT0ad/7HdJCVwgMoSvoMlF0aHD0IFSXB9sK/3Fwp2m21d5wC5jUxarS1rfzlU3zYOXL/tufELKEtKCLhx4CvHYJfPZg69uFCpvmmcfh57V/H/720Jc+C1/90X/7E0KWkBV0R4TE0EOC0n1waHewrfAPh3bDN382VZ+9Mtq/n8g4/w6KluVB2X7T+VHo1oSsoJuQi/Ry6fJUlUHFoWBb0XGctfCeu6PizKc7ti/PNHT+mIJNayg7YJ4Xbun4/oSQJmQF3WGXStEuT2011FaGh6AvmgM5y+CCJ6HXgI7tyxEP2mnmGu0oVWWmoyOYvuvNUZYPG//T8eMJXZqQFfQoSVvs+lQfMY+hLuh7vzezD425AkZf2vH9+bOfi8c7h5Y99GXPwjvXSbpkmBPagl7jlJnDuzIewaooMb3DQxGXCz78GfRIh/Pm+GefDj+20C3LM4/KarJvmqNou3k8UtDxYwpdlpAVdIe7hW6V9ETvulS5PXS0f9P0OpNN/4GCjTD9Qf/NAepXD90t6OmToKAFD7042zyKoIc1ISvoMmtRCOB9ex+KYReXE776AyQOh1E/8N9+j3Vc9MNFziPog6ZBeYHJ+2+IywXFO8zzI/kdP6bQZQl9QZeB0a5LqAv6+negaBucdj9YrP7br2deUX/F0CPiIC3LLDc1MFq6D5xV5nl5YceP2d1Z/Rq8fIG54HcxQlfQZaLo9lFbZTIeOgNvwQo1QXfWGO+872g4boZ/930s5OKHAcqyPIhPgaThZrmpOLon3ALioXeU7Z/BvDth9zf1B6S7CCEr6A4JubSP7/4OT08yKYWBpp6HHmJFL2vfhEO7YNoDYPHzz8Sfk1yUHYC4vhCfZi4UTWW6eATdGimC3hEOrDeZQvYYs+wJd3UhfPqmKqXOUUptVUplK6Vmt7DdJUoprZTK8p+JTeMJuUg/l2YoL4bSnMbrCzab2G3++sDbEKohl6MH4YvfmTBGezsqtkREHKD8Nygal2L6rCcOb3pgtDjbHDNhKBxpQ8ilvKjj9oULh/PgjVnmonnJ8+51+4NrUxO0KuhKKSvwNHAuMAK4Uik1oont4oC7gaX+NrIeq1+DpyYRZTPpihJyaYZ3roU3r2i8vnSfecxZGXgbqsoAZZ6HkqB/dK+x98K/GqH0NxaLfya58FSJxvU1y0nDobCJGHrRdkgYbOY39dVDL9oOfxoK2V90zMZwYeGj5kJ/1duQNtGsC1EPfTKQrbXeqbWuBt4Cmprm/HfAH4FKP9rXGO2Com3EV5kvpoRcmiB/k4nxFW1vXF7u8dpzlgfejqoyI1wRcaEj6Bv/Axvfh9NmQ99RgTtOpB8muag4BM5q46EDJB4HR4sbe+HFO6CPW9B9HRTd862pZt23rGM2hgO1VaYp28iLIGUMRPcBiz00PXQgDdjntZzjXncMpdQEoJ/W+qOWdqSUukUptUIptaKwsJ2j7b0HAhBbvhcQD71JVrxoHmsr63tkzpo6ryJ3ReDt8Ah6VK/QEPQjBaaIKG0inPzTwB4rMq7jaYseQfH20KH+wGhNhbkr6+PloftSjJfj/n4UbOyYjeFA9uemjmKUu0rYYjEX0RD10FtEKWUBngDubW1brfVzWussrXVWYmJi+w7YexAAMeV7AImhN6LqCKx9C+JSzbJ3p8PDueYOp89g09e7qZxlv9py2C3oPbu+oDtr4L+3md7tF/0TrLbAHs8R3/EsF0+WhbeHDvUHRg/uBLT5zGOSjEfvy4Ukd5V5zG+h+rS7sP4diE6AgafWrYtPCVkPPRfo57Wc7l7nIQ4YBXyllNoNnADMC9jAaFxfsEfjOOwRdKkUrcf6d6C6DKb90iwf2lP3Won7RmvkxeYxJ8Beeqh46C6XEfPsz+DcP0DisMAf0x8hF4+H6PHQ4/qatgLeueieDJc+gyE22TxvrVq06oiJxdtjzAWh+mjH7Axlqo7A1k9MuMVqr1sfwh76cmCIUipTKRUBXAHM87yotS7VWidorTO01hnA98AMrXVg1EIp6D0Qe+kuQEIu9dAaVrwASSNh9GVmXYmXoHvi58fNMHNiBjrsEgqCrjXMvxfWz4Xpv4asGzrnuP6Y5MLjoce6BV0p46V7e+ieHi59BkOs+664vBVBz1tj7uRGXQxoKNraMTtDma3zobai7vfkIT7VZL50sV5SrQq61roWuANYAGwG5mqtNyqlHlZK+bniwkd6Z2IpcQu6DIrWkbPC5MpOugHsDuNFeIdcPBkuCUMhaYR46FrDp78yYw4n/xSmtBo19B/+8tCjepvP2kN6lvlcS9030cU7zPcgMtbLQ28l08XzvRh/jXnszmGX9e9Aj36QPrn++rgU07a4i3Wv9CmGrrWer7UeqrUepLV+1L3uQa31vCa2PS1g3rmH3gNRh3YTbZcYej1WvgQRsTBmllnuOaBByGWv+VHbHWbgL3dFYLsgNhT0ruTNeGLmS56CSTfDGQ8FJkWxOfzloXvi5x4m32y86yXuSTiKtxvvHEwMHVrPRc9daWZkSs8CW1TLXRxDkSMF8NqlsPgvLW9XXgw7vjR9fBoWl8W7x6iaC7tUl8MXD3f6ZOChWSnaexA4q8mwHZKQiweX09weDr+grldIrwENQi77jLcBpjtfZSkc3BE4m6rKjCca1QtctXX90YNNVZkpEln7hunTct6czhVzMP8XZ5VJiWsvZXl18XMPvTJMz/aVL5lB7+LsOkGP6gUWW+seeu5Kc8G3WM14Qn4YZboUboXnTzfjJctfaNnJ2PRf871tGG6BugtpcwOjWz820xWufavjNreBEBV0k7o42FYgIRcPuauMFzzkzLp1vTJM3NxT5l+aY/p6g/G+IHBhF5fLDM56PHToGmGXwq3w4rmw8yuY8ZTJN+9sMYe6fi4d8eA8VaINOeUeqDkKC39v/uceQbdYICax5Rj64TyTDeVp9pU8Mnw89F3fwAtnQk0lTLwOSve6s4CaYceX5jeU3EQ9Qrz7/96ch+7JEtrcKIgRUEJa0Ada8sVD95D9OaBg0PS6dT0HANp45lobQe/p9tAThpqCn0AVGHm88a4i6C4XLH0Wnp1qJlS+ai5M+FHw7HG0oyf6od0mDADmjuxIfmMPHSDpOBh2Pix3l6gnDKl7LTap5SyXXHcFsacaMmmEOY7nuKHKoT3mriwuBW76HE66y6zf8WXz78nfACnjmr7gt+ahexIO9nzbqS0UQlPQ41LAFkV/DkgM3UP25+ZHGN27bp1n7suSPaZCsLYSevQ36yxWSJsQuEwXz2BRVxD0owfh9Uvg459D5qlw6xIYckZwbPHQ1kkutIaXL4S57oHK8kITK29K0AGm/AxwhxM8HjqYOLq3oJflm2IqjzDlrjBhmZQxZjnZ3eUjlAuMtIYP7jbCfPW75nfRe6BxeHYsbPo9lYfNBbS5amF7lPleN+WhO2sgby1kTjWf0ZYP/XYqrRGagm6xQO9M+uk88dDBeE+5K+uHW8DcLoLxTjw56D29SgrSJ8GBDbB/jf9t6iqCXpQNz58BuxfD+U+YXhxxyZ1vR0Pa2nGxcKsJEexZbPrwHMtBbyLkAiaklnkqWCPcd2puYpPrC/r6uSbV9aVzzfckd6UJMdijzOtJI81jKGe6rHkddi40A9+e779SZlKQXYuMADfEE2ZKHt38fuPcqYsNyd9onKcJ10KvTNj0v/qvb/04YN1OQ1PQAXoPJNWVJzF0MF9WNAxu4HXGpZieE4d2GzGAuhg6mJzr+DR4ZabxKMB4M5s/hPdu7lh895igxwdP0Hd+bQbAKkvh2g9g0o3BiZc3RVs9dE9owB4D3/21cZVoU8x8Gq58s37Va2yi8e492U37lpkqyIoSeOk8E/v1hFvAhGiieoeuh152ABb8EvqfBFk31n9t4DQzzpPbRKO6A+5upMkjm993fIoJ3zXEc9ebngUjZpiLhue7v/UT0zRvyVNtPxcfCGlBT6rNo6q6iatrdyP7c/OjSx1ff73FajySkj11RUU9vDz0Hmlw3QfGi35lJmz5CF6/DN6+2nhuO5u5HfUFj1B5Sv+h8wTdWQtf/RFevdiEJG7+Avqf0DnH9hVPqKRkr2/b7/jShE6OvwU2fwB7vqu/n6bo2a/xRT42GVw1UFliLt77lplxl+s+NAU01UfqBszBXACTR4amh+5ywgc/NZlEM/7eOPUwc6opsGsqjp6/wVTdejtADYlLadpDz11lLpI9B8BxM02mzNaPzV3yf39iJk054baOnVszhLSg26khrrqbT3rrcpkWp4OmNz1NmicXvWSf21vuWf/1XhnGe7VHw1tXwd7v4axHzK16RzJgvEMu9iiTz9wZgl6UDS+eBV/93qTv3fhpXeipKxGbZC6uvvyPa6vM4Nqg6TD5x6CsZoAXVVcs5Csx7mrRIwVmsPzIAeg32YjMdfNh/A8b94BPGmHaCQSyZsHfVB0x3+dtH8PpvzHtgxsS7XaCmoqjH9hgwi0t3dHFp5qMIWdt/fU5K8xdjlJmnCo+HTa8B+/eYLa97N/1i8H8SOgKeh/TpCuhOreVDcOcA+vMl6qhJ+ahV4Y75LKvvnfuTe9MuO4jOPUXcOcKOOlO8wP3pF61B29Bh86pFl33Djw7xVRHXvoS/OA542V1VTxVna2xb6lJQxw03dzmj5llcthjk9reRMy7WtTTGrefuwoyabgJ03gPrIMZGK0pr1/T0JU5vN+MCWz/FM7/M5zYgjc8aLoJkXjPqOVymhh6a+2T41LMoKd3Xn9lqZmH1nOXo5QJu2R/DjnLYMbfjmlXIAhdQXenLvatbUbQu+B8f35j0RwzCcOmeaZ/N9RPV/Sm1wCoOGi+oC3dPvbONA29PLfwaVmwf3X7J8JtUtADNA2dsxYWPADv32TSzG5bYqr7ujrpk8zYRmvf1R1fmsyTjFPM8kl3mseWwi3NEeuuFi0vNIJuj6kb+GyOYwOjXTyOXltlioWeO83kl181Fybd1PJ7Bk4zorz7m7p1B3eZC2hT+efeNFUtun81oOuPQ4y4yDxOuing38vQFfS4VGpUJCmuJgYlclbCn4fBniWdb1egqT4KCx8zOcZzfwTf/gX6jmk+c8OT4XBod/0Ml9ZIzzJeWVNzVPqCR9AjAizolYfhtR+YQabJP4Zr59X90Lo6nv4grdUC7PgS+h1fd3FMGm4G+Iac3fZjegT9SL7xGNMmtO7l9x1lPr9Vr7T9eJ1BbZX5PfxtAnz0M/Odv2FB46yvpkifZNpleM/M5Jme0RcPHernonvuuNIm1K3rfzzc8Cmc84fW7ekgoSvoFgulUWn00wdwuhqU73oGOfYs7ny7As3+1WYmmVmvwfWfwLRfwTmPNb+9d/y4uZBLU3g8DO+QQNURePdG3wbIqstMXN4jFoHoia61ewb2xTDzH3De4/VbnHZ1Usa4xypaEPTyIpOBNGha/fUXPAHTH2j7MR09zTEP7TGZHP0mt/4ee5S5K9i+IPAN3dpCTYUZS/jrOHPHGp8KP/qPGTfxdbYpW4QJV278j+m/AiZ+rqx1/eWbI66JatHcVWbw2pPZ5aH/8Z3y3QxdQQfKovuToZooLtrrzgDoSAy4q+L58fc/CQacCKfeV3cr3hTegt4WD733QPPj9y482vg+bHgX3rux9R4knsZcHgIRQ1/2nOm3ccZvYPzV/t13Z2CLNHdX+1oQ9J1fmcfmQmptRSlTXLTtE5N90e943943+cdm6rWFv/ePHR2h8rBprPXXsaZYrFdGnZAPmt721NQTbjVZP2veMMv5G0x1bWsDlw2notPa/F7Sslp+XwAJaUE/GtufAaqACu/URWdt3WBPzoqu1eHPH+QsN2Ib08e37aN61YU92uKhK+XuyOh1UVz7tnsChU3wVSu3j40E3c8ees5KEzcfei6ceKf/9tvZ9Jts7ro8xS0uF8y/z3ica982FyxHTzM24C9iE+sGONMn+faeyFjTYnjHFyYTKhiU7DMdDP8yCj7/jWlxcN1HcMPH7RNyD/2ONyK85GkzZnRgQ+vxc2g8Fd3hXBPK8o6fdzIhLeiV8QOJVDVUHvBqwJ+/3uTS9jvBZH8cDqMsGK2NoDfszdwSStW1AGiLoIOJoxdsMqGWQ3tMCOukO2G8O3bfkmfZlIdeW2FukzvK0YPwznXmx3TRPxrnF4cS6Vnm/+IZcNzyobnzWPUq/Medcz7wtKZTUtuLJ9Olz5DGGS0tMekm490vfNR/trRG1RGTvfTKTPjLaPjmCVMBe/OXcM3/Wr479RWl4KQ74NAu46UfzvE9ZOM9Fd2Sf5hHX8JYASLAEycGlprM6bDuIfSmeTDE7cF4vIcTb4d935sqsJayO0KJkr3GA0hv4y1drwxTOt7WnOW0LJMBkLe2Low1+nIjzju/MkUSP/4GIqIbv7cpQQczMOopK28PWps+5mV5cOOCtglSV8TjIecsN+GXr/9oYrC3LjHpb3lr/CNa3nhy0dsqPBHRppPjgvtNocywc317344vzfdhxEzfti8vNvH6zR+aOwJPD6JTfwHjrgxMXcHwC6FnfzPhCbRc8u9NXIoJ0Xz/DHz/tOmtnzLW//b5SAi7NtCjbybLXMPoscOrReWe78wHM/RsE99qqqw3VPHEz9v6Qxx5MUy8tu2erGekPneF6es84GTj7TviTb5ycTasfq3p93p6oXtoWP5fUdK+LnRLnjbFImc9EtRbW7/Ro5+ZQi5nufHO8zcY4bJFGC9x/A/9L2CeTJf2eJJZ10PicHjravju762HNI8ehLnXmaZinz3YdHGSs8aER7+eA8+fCXMGwX9vNRezCdfCtR/C3Wth2v2BKxKz2kz1ZqU7E8tnDz3VpDl+MtvMRXDuH4PaXiKkPfSkuEjecJ7I5MMvm8yLpONg7xITT7NFdrw4pquRs9xkjrSWN9yQ0Zeav7YSk2B+QCtfNnm9J99d99rAU00F3L7vTTl6Q6oON+OhuwX9zStMteLty3wvjtm33MROh18Ax/+47efTFVHK3HHtW2rCLn0Gw6hLAntMT3aGrwOi3tij4MbP4H+3GW82Z7m5uHt/1t4sfsJ8F0ZeDN/+1YTuLnjSXLj2LTWpxfuWutstuysrT5sNQ84yVZydKY7jf2hSgq123+9m41JM1ln6ZLjkef+GxtpBSAt6r+gIPtXH81tewbLxfbBeYQom+p9oNkibCGvfNAMdQf5Ht5mDu2DpM+ZL1td9+5ezHFJ9yBv2J2A1of4AACAASURBVGkTTdmyzdH4ljk9q/mUu2ZDLodg71Jz4QWTLjamiRlhGnL0ILx7vfGIZj7ddZps+YP0SXUtVn/wr8B/V8dcbkJVSa2k5TWHIx4ufxW++xt8/hAUbjNNwHpn1t+uNAeWPgdjrzRjHWkTzUVg03/rtkkcDmOvMGGlAafUTWQdDCLjjIddWeL792vo2ebidPZjHQsl+omQFnSLRWGJS2a7bQLD1r9rQi0AA04yj2kTYfm/TCwy6Thza/fNn80XzDNQ2NWoLIVFfzJi7qw2seoff+OOZa8zYwOdSVqWEfRh5zUuo0+fZH6cZfn1C5u0blnQ175plmMSjQc36pKWw0GH8+C1S8z4wQ2fNO5HE+p4Qh+d4Z2D+Rw7ehylzB1byliYey38azpc/gpkTqnb5qs/ANqESpQyA+pJx5k7rbSJxiHoamMg465s2/aJw0yLiS5CSMfQwYRdFkVMNSPUS581uaEJQ82LnhirJ46+9Fn46rFOn+fPZ0pz4alJJjY5+jIzRVrhFnOrmrfWdMnr7BH0zCmmI92Eaxq/5hnQazhJRm2lyXGOiK1b5xH03BWmq+Okm2DK/5ksmm2fNH/8omx44SyTZnfV3PCImzckdbz5O+uR0LuTHHiayTiJSYBXL4KPZ5tmV3nrTB/ySTfVOVpginim3Q9Dz+p6Yh4GhLSHDpAY52BB8SRuttjNrc/wC+pul/oMNgNzuatMz4av3BWV+RuCZ3BLfPxzUzRx0xeQ7hauHV+Y3i2e9Etf84b9Rd/RcN+Opn98KWPMwHPOchh+ft36hn1cwIi7xWYGUa12mHyLafm78BH45k8mY8L7Nre63GQ5LLgfUKYjpHc5dThhj4Jbvgq2Fe2nzyAzrduH95jJMpb+06yPiDMXbaHTCHlBT4qPZNVeu7nyb/u4ft9riwVSxxkPfcH9xmtMm9g1mwxt/tDEUc/4bZ2YA5zzR8j+0szi3nNAXYZCZ9KcJ2WPMoLfsBzce3ILD0oZL7280Hj7nvM4+aem/8bOr8wFOHel6ZK36X9moCxhKFz5VkA71Al+wNEDLn3RXIh3fWNSFfuf4HsBnOAXQl/Q4yI5WF5NzehZ2Ld9bIoOvEmbaMqE89bA9F+ZtKmvHjNfvIiY4BjdkKoy450njWwcI49LhjMfMt5PZ3vnvpCeBatfrz/w7D25hTceQT/xjrp1466Grx83MXLtbuEQEWeyIsZeYVochHLhUHcjIgaGnWP+hE4nDATd9FsoTD+b1LtWH2ure4y0iYA2VXEn3QXbPzPLBVvqe8LBZOHvTbXZZS833cBnwnUm62XYeZ1tWeukTzKVjQWb63J3mwq5gBlA6zvGDCR5sDtMGtuWj8zdVNpEU3Zti+gc+wUhjAgDQY8EoOBINan9BjbeYMDJpg/GOY+Z3HTPHIH5G7qGoBdlm4yWrBuaH/C0WOCs33WuXb7iqVrNWd66oF/yfNOFKMPPM3+CIHQIn+5llVLnKKW2KqWylVKzm3j9J0qp9UqpNUqpxUqpEf43tWmS4t2Cfriy6Q2ie8OPv65LZew5wNzSdzSOrrV/puRa8zqgTDFFKNIr02QW1Wuz24ygQ3jljwtCF6NVQVdKWYGngXOBEcCVTQj2G1rr0VrrccDjwBN+t7QZPCGXgrJW2rl6sFjMlFodFfRPfwUvNDPtm6+4XLDubTOgG4zBTn+glAm7eBcYNTUoKghCwPHFQ58MZGutd2qtq4G3gHolg1rrw16LMUCn9axNiI1AqTYIOrhnMd/Qsda6Wz4yGRnFO9q/j93fmHTEsVe0fx9dgfQsKNpaNyNRc4OigiAEFF8EPQ3Y57Wc415XD6XU7UqpHRgP/a6mdqSUukUptUIptaKwsLA99jbCZrXQJyaCwrJmQi5NkTzSlPe2t7Xu4TxTyASwbUH79gGmYjKyh+9d67oqxwqM3AVcVWUmP90WGTybBKEb4rd8MK3101rrQcAvgF81s81zWussrXVWYqL/ejYkxjkoONwWD909eNfesIunlWxEbMtVji1RdcRM8jzyoi7RA6JDpE4w1aSeqf88Zf8SLxeETsUXQc8FvGdGSHeva463gIs6YlRbSYqLbFvIxdOUqL0Vo3uWmNnSJ14He741/VfaypYPzSTMY9vYO6Ir4og3eeMrXzZNtBr2cREEoVPwRdCXA0OUUplKqQjgCmCe9wZKqSFei+cD2/1nYusYQW9DyMXRw/SXaLeHvsSkGA6/wFSfejzTlnDWmGrIsgNmee2bJuPGu7I1lJlyr6nsXPpM417ogiB0Cq3moWuta5VSdwALACvwotZ6o1LqYWCF1noecIdS6gygBjgEXBtIoxuSFB9J0ZFqnC6N1eLjbX7yqPYJesUh875pvzSx46heJo4+8uKW3/fRvbDq3+Z5fLqJ35/6i/AJSySPNBe475+B3hnioQtCEPCpsEhrPR+Y32Ddg17P7270pk4kKc6B06U5WF5NYpyPA3HJI40Q11S2Pru3N3uXAtrktVttphH/9k9b7rm+4iUj5lk3mp4kOSvMdHITfuT7cUOBqf9nQkl5a2HI2cG2RhC6HSFfKQpe1aJllW0TdO006XZtmQNw73cmg8PTxnXo2SaXPHdl05We+5aZWdwHnQ7nzQm99qhtIXV83QVOPHRB6HTCouvRsWrRNuWiuzNdDjQxMFqa2/xA554lpo2rJzNl0OmgrE1nuxwpMHMpxqd2iempOoWp95nHyNiWtxMEwe+Eh6B7GnS1JXWx90CISYJVr9QvMCovhn+eBM+dBkca5MpXH4X9q+raCICZPWfASbC1CUFf+bIZBL3i9e7TzL/fZDjjIRh7VbAtEYRuR1gIeqJXyMVnLFaY/oCZ5Nh7jsMvfmuyNA7nwWs/MBNOeMhdYbJa+p9Uf19DzoSCjeY93uxaZBpWeeYE7S6ccg/0b8cExIIgdIiwEHSH3Uq8w9a2kAvA+B+ZHuSf/cYMjuauNB77CbfCrFfN9GhvXWU88yMFsPVjQDWOlQ+cZh53flW3rqbSxM8zpnbk1ARBEHwmLAZFAZLi21gtCsZLP/tRMxfi9/8wGRoxiSad0BEPF/0T3r8Zfp9S957U8Y0nKU4eBdEJRtA9k8zmLAdnVf1JcwVBEAJI2Ah6Ymwbi4s8DJoGQ8+BL38H2gUXP2vEHGDM5WYGltyVENsX4vrW9f/2xmKBgacaQdfa5Jbv/saUww84qfH2giAIASBsBD0pPpKVew61781nPQLZn0P6ZBgzq/5rw8+vPwFycwycBhveMzP3JI8w8yqmjDVVqYIgCJ1A+Ai6u5+L1hrV1urLhCFm1vKeA9pfuTnwNPO4cyH0yjAhlxNubd++BEEQ2kEYCbqD6loXhytq6RHdxLycrZE6vmMG9OxnZq3f+RUkjQBXDWTKgKggCJ1HWGS5QF1xUX574uj+YuA02P0t7PjCFBuFS+MtQRBCgrAR9PRe0QDsO3g0eEYMmmZa4q542Xj8Uv4uCEInEjaCnpkQA8CuovLgGZFxivHMq8skXVEQhE4nbAS9V7SdeIctuILu6FHXtCtDBF0QhM4lbARdKUVmQgy7i4Mo6ADDzzPzhEr8XBCETiZsBB1M2GV3URBj6AAn3gl3rzEFSYIgCJ1IWAl6RkIM+0srqKxxBs8Iq637dFYUBKFLEVaCnpkQg9awpzjIXrogCEIQCCtBz+jTBTJdBEEQgkR4Cbo7dTHoA6OCIAhBIKwEvUeUnT4xEewWD10QhG5IWAk6GC99pwi6IAjdkLATdJO6KIIuCEL3IywFvaCsivKq2mCbIgiC0KmEnaB7Ml1kYFQQhO5G+Al6gum6KKmLgiB0N3wSdKXUOUqprUqpbKXU7CZe/5lSapNSap1S6gul1AD/m+obxzx0EXRBELoZrQq6UsoKPA2cC4wArlRKjWiw2WogS2s9BngXeNzfhvpKTKSN5PhIdgW7p4sgCEIn44uHPhnI1lrv1FpXA28BM7030Fov1Fp7FPR7IN2/ZraNjD5doOuiIAhCJ+OLoKcB+7yWc9zrmuNG4OOmXlBK3aKUWqGUWlFYWOi7lW0kMyFGYuiCIHQ7/DooqpT6IZAFzGnqda31c1rrLK11VmJioj8PXY/MhBgOlldTWlETsGMIgiB0NXwR9Fygn9dyuntdPZRSZwAPADO01lX+Ma99ZHSF6egEQRA6GV8EfTkwRCmVqZSKAK4A5nlvoJQaDzyLEfMC/5vZNgYlGkHfWXgkyJYIgiB0Hq0Kuta6FrgDWABsBuZqrTcqpR5WSs1wbzYHiAXeUUqtUUrNa2Z3ncKAPjHYLIrsAhF0QRC6DzZfNtJazwfmN1j3oNfzM/xsV4ewWy1kJMSwXQRdEIRuRNhVinoYnBjLDhF0QRC6EWEr6EOSY9ldXE5VbRDnFxUEQehEwlbQByfF4tKwWypGBUHoJoS1oAMyMCoIQrchbAV9UGIsSsH2grJgmyIIgtAphK2gO+xW0ntFiYcuCEK3IWwFHWBIUpwIuiAI3YawFvTBSbHsLCrH6dLBNkUQBCHghL2gV9e62HdQMl0EQQh/wl7QAakYFQShW9AtBF3i6IIgdAfCWtDjHXaS4yNF0AVB6BaEtaCD8dKzJRddEIRuQNgL+pCkOHYUlqO1ZLoIghDehL2gD0qK5UhVLQcOVwbbFEEQhIAS9oI+xD0wui1f4uiCIIQ3YS/oI1PjibBaWLStMNimCIIgBJSwF/Q4h52pQxOZvz4Pl1SMCoIQxoS9oANcMCaFvNJKVu87FGxTBEEQAka3EPTTj0siwmbhw3V5wTZFEAQhYHQLQY9z2DlNwi6CIIQ53ULQAc4fk0L+4SpW7pWwiyAI4Um3EfTTj0sm0mbhIwm7CIIQpnQbQY+NtDFtWBLz1+dJf3RBEMKSbiPoYMIuBWVVLNt1MNimCIIg+J1uJeinH5dEjyg7z3y9I9imCIIg+B2fBF0pdY5SaqtSKlspNbuJ16cqpVYppWqVUpf630z/EB1h4/Zpg/h6WyHfZRcF2xxBEAS/0qqgK6WswNPAucAI4Eql1IgGm+0FrgPe8LeB/uaaEzNI7eHgj59skQ6MgiCEFb546JOBbK31Tq11NfAWMNN7A631bq31OsAVABv9isNu5Z4zh7I2p5SPNxwItjlCN6LkaDU3vLyc/SUVwTZFCFN8EfQ0YJ/Xco57XZtRSt2ilFqhlFpRWBi8Zlk/mJDO0ORY5izYSo2zy1+DhDBh9d4SvtxSwGeb8oNtihCmdOqgqNb6Oa11ltY6KzExsTMPXQ+rRfHzs4ezq6icd1bkBM0OoXuxv9R45mv2lQTZEiFc8UXQc4F+Xsvp7nUhzenHJTEmvQfPf7NT2gEInUJeiZlkRQRdCBS+CPpyYIhSKlMpFQFcAcwLrFmBRynFjadksrOonK+2FQTbHKEbkFdqBH1XUTklR6uDbI0QjrQq6FrrWuAOYAGwGZirtd6olHpYKTUDQCk1SSmVA1wGPKuU2hhIo/3FeaNTSOnh4PlvdgXbFKEbkFdaQYTV/OTESxcCgU8xdK31fK31UK31IK31o+51D2qt57mfL9dap2utY7TWfbTWIwNptL+wWy1ce1IG3+0oZtP+w8E2Rwhz8korOWlwH5QSQRcCQ7eqFG2KKyf1J8pu5YXF4qULgUNrzf6SCoYkxTI4MVYEXQgI3V7Qe0TbuTwrnXlrcyk4XBlsc4Qw5dDRGqpqXaT0iGJcv56s3VcihW2C3+n2gg5w/cmZ1Lo0s99fz8FyGawS/I+nmCi1p4Nx/Xty6GgNew8eDbJVQrghgg5kJMTw6/NH8M32Qs7+yyK+3CKFH4J/8WS4eDx0kDi64H9E0N3ccEom/7v9FPrERHDDyyu4/fVVbMgtDbZZQpiQ5y4qSunpYFhyHFF2K6v3iqAL/kUE3YsRqfH8746Tuev0IXy9rZAL/r6Ya15cxpIdxRLvFDrE/pJK7FZFQkwkNquF0Wk9xEMX/I4IegMibVZ+duZQvp09nZ+fM4xN+w9z5b++5+J/fMcnGw5IVanQLg6UVtC3hwOLRQEwrn9PNu0/TFWtM8iWCeGELdgGdFV6RNm57bTB3HByJu+tyuHZr3fyk9dWMqBPNJdOSOeSienERNqYtyaXt1fsI/dQBVOHJnLWiL6cOiyR2Ej51wp17C+tJKVH1LHlcf16Uu10sTmv7FhMXRA6iqhOKzjsVq4+fgCzsvrx8YYDvLF0L3/+bBtPfL4Nu9VCda2L41LiOXVoIl9vK+R/a/aTGBfJgp9OpXdMRLDNF7oIeaUVTOjf69hy1gDz/JtthSLogt8QQfcRm9XChWNTuXBsKvsOHuXdlTmUVdZy8fg0RqXFo5TC6dIs2l7IjS8v5+9fbuc3F4ZEwawQYFwuzYEGHnpSvIOsAb34cF0ed54+JIjWCeGExNDbQb/e0dxz5lAevHAEo9N7oJSJi1otimnDkpg1qT+vLtnD7qLyIFsqdAWKyquocWpSezrqrb9wbCpb88vYll8WJMuEcEMEPQDcc+YQImwWHl+wJdimCF0AT9tcbw8d4NzRfbEo+HDt/mCYJYQhIugBICnOwS1TBzJ//QFW7jkUbHOEIHMsB71HfQ89Kc7BiYP68MG6PEmL7eIs2HiAG15e3uWb+ImgB4hbpg4kKS6S336wkfU5pc2mO9Y4XXyyIY+KaklfC1f2uz301J5RjV67YEwqu4rK2djFhaK788zXO/hySwEznlrM459sobKma/5eRdADRHSEjQfOP471uaVc+NRiJj36OffOXUuu1wTBlTVObn1tJT95bRW3vb4y7Oc33Vt8lAOl3a8BWl5pBZE2C72i7Y1eO2dkX2wWxQfrJOzSVTlQWsnqvSXcPCWTi8en8Y+vdjDjqcVdUtRF0APIzHFpLPvlGTw5ayxThyby8YY8znlyEe+s2EdZZQ3XvriML7YUcMGYFBZuLeSB/6xv8tb7SFUtS3cWs7c4uM2capwu5q/Po6yyps3vnbd2P2c++TWX/PM7jlTVBsC6rkteaSUpPRzHBs+96RUTwZQhCXy4VsIuXZUFGw8AMGtSf+ZcNpanr5rAtvwjvLeq681HLGmLASYxLpKLx6dz8fh09hYf5f/eXct9767j0fmbKaus5S+zxjFzXBoDE7fxty+2kxgXybmjUtiQW8q63FJW7y1h64HDeCI2/XtHc8qQBH54/ABGpMZ32nnsKDzCPW+vYV1OKeP69eSVGycT72jscTbE5dI88dk2nlqYzcjUeDblHeYPH2/mkYtGd4LVXYO8BimLDblwbCo/m7uWVXsPMXFA7060TPCFjzfkMTgplsFJsQCcN7ovY9N78K9FO7liUn+slsYX6mAhHnon0r9PNG/dfAK/vmAE8Q47z/5wIjPHpQFwzxlDuGJSP55euIML/r6Y2e+v54O1+0mIjeCO6UN44dosfjtjJEOT4/jf6lwu/se3zOuE7AitNW8s3csFf1vM3oNHuWPaYDbklnLNC8s43IqnXlXr5PY3VvHUwmxmZfXjP7edzI0nZ/La93v5Lrso4LZ3FfJKKkhpkLLozZkjkomNtPHs1zs70SrBF4qPVLFs10HOHdX32DqlFD85dRC7i48e8967CuKhdzIWi5mc+sZTMuutV0rxyEWjmNC/F44IK2PSetC/d/Sx3h8erj0pg6IjVfzk1ZXc9eZqth44zL1nDmu0nb/4z+pcfvmf9UwZksCfLhtLcryDMek9uP2NVfzohWU8f00WiXGRjd5XWePkJ6+t5KuthTxw3nHcNCUTpRT/d/YwvtxSwH3vrmPBPVPDvkWC06XJL6sitQUPPc5h5yenDuRPn25j+e6DTMoQL72r8OmmfFwazvESdICzRvYlo080z369g3NH9W0ynBYMxEPvQtisFi6f1I8ZY1PJSIhpVqQTYiN54+YTmJVlPPrz/vYNTy/MZmfhEb/ac7C8mt99uIkJ/Xvy7+snkxxvvMyzRvblH1dPZNP+UqY8/iWPfLiJgrK6wc6Kaic3/XsFX28r5PcXj+bmqQOPfeEddiuPXzqG/aUV/O6DTWEfNy4oq8Tp0i166AA3njKQ5PhIfj9/c9j/T0KJjzccoH/vaEak1A9vWi2Km6cOZG1OKUt2FgfJusaEt3sUxkTYLPzhktFkZfTi9aV7mbNgK3MWbCWlh4N+vaJJ7xXF4ORYJmf0ZnR6DyJt1jYf49GPTJz/sR+MaXRxOXNEMgt+OpWnFmbz4re7ePX7PaT3isJmsXC4sob8w5XMuXQsl05Mb7TfrIze3HrqIP7x1Q7694nm9mmD2/1/6OocS1lswUMHiIowXT5/8d56Pt5wgPNGp3SGeUILlB6t4bvsIm48JbNJD/ySCek8+dk2nv16JycNSgiChY0RQQ9hlFJcltWPy7L6sb+kgk82HGDD/lJyDlXw/c5i3l+dC0CkzfTfHpIcx9DkWAYmxtI33kFSXCQ9o+1Nflm/zS7ivVU53DFtMMP6xjV5/IGJsTxx+Tjumj6El7/bTeGRKpxOjUtrHpoxkrNH9m3yfQD/d9Yw8kormbNgKwmxEcya1N8//5Quxgdr92O1KIY28z/05tKJ/Xhh8S4e/2QLZxyXTIRNbqCDyeeb86l16UbhFg8Ou5XrT85kzoKtvPr9Hn50woBOtrAxIuhhQmrPKG5oEJc/WF7N8t0HWbbrIOtySpi/Po83l9UfyHTYLQxOimVIUhwZfWKIibQSabPw/OJdZPSJ5o7prXvPGQkxPDSjbY3ILBbF45eO4WB5Nfe/vx6rxcIFY1Jw2Nt+J9FV2VNczutL93B5Vj/SmigqaojVorj/3OO4/uXl/OHjLfzq/OPaPDbidGkUtPq+WqcLm7X+BaOwrIq5K/Zx6cT0Y+G17krB4Uqe/Hwb/XpHMTa9+W6YN08ZyOq9h/j1fzcABF3UVbDidVlZWXrFihVBOXZ3RWtN4ZEqdhcdpaCskvzDVeQeqmB7QRnZBUeOzXsJJqTz8nWTOGlwYG8lj1bXctW/lrJmXwmRNgtZGb04ZXAiZ41MZlBibECPHWjufHM1n2/K5+v7TiPJR4HUWvPg/zby6vd7OOO4JJ6cNY64FtJDtdZszS9j8fYivt9ZzNJdB4m0WfndzJGc20TYpqLayUPzNvLfNbn8eOpAbj1tMFERVr7LLuLut9dQWFZFjyg7D88cyYyxqSilKKusYcXuQ+SWVFB0pIpD5dXER9lJ7RlFSg8HLq05VF5DSUUNUXYrKT0dpPaIYmBiDHZr6N1lHK6sYdaz37OnuJy3bzmR0ek9Wty+qtbJba+t4ostBfzuolEBF3Wl1EqtdVaTr4mgCx6qap1U1bqornVht1roEdV6nrk/qKxx8m12Ed9mF/PdjiK2HDDdBwclxnDq0CT69TbCkRzvoGd0BD2i7MQ7bI08zK7EupwSZjz1LXdOH8y9Zw1r03u11rz6/R5++8EmBibEcP6YFHYXlbO7+CgRNguDEmMYmBDL/tIKPt+cz76Dpvo4MyGG4zN7s2F/KRtyD3Pe6L48NGMkSXHmYrKj8Ai3v76KLQfKmJzRm2W7D5LWM4rThiXyxrK9DEyI4f5zj+Ppr7JZvbeEacMSOVrtZOWeQ9R6ta6Ic9gor6qltcm7MvpE8+fLxzFxQK+WN+xCVNU6uf6l5SzbdZAXrpvEqUMTfX6fR9RnZfXjl+cfF7Dfjwi6EFLsLzFC9enGfJbvPkhVbdMtERx2C7GRNmIibcQ5bMRG2ohz2ImJsBIVYSM6woSPIjx/Vgs2i8Lm9Wi3KmwWCzarqvf82DqLBavFvBZpsxJhM++xWhQWi8JmUdjd+/OMRWitufr5pWw5UMbX953WoofdEt9lF3H7G6soqaghtUcUA/pEU13rYmdROQfLq4mwWZgyOIEzRiQzbVgSfd3Nv2qdLp77Zid/+Ww71U4XPaLs9I13kHPIXBCenDWO04YlsXRnMb+Zt5EtB8q4eHwaj1w0iphI27H3P/VlNhl9Yjh1WCJThiQwMCGW3jERRNgs1Dpd5JdVsb+kAptF0Ss6gp7Rdo5WO8krrWBX0VGe/GwbeaUV/PjUQfz0jCHtGpjvLPYdPMrHG/L47+r9bMo7zJ8vG8slTQzot0RVrZMnPtvGvxbtJCE2kodnjmTq0ESiI/wb2e6woCulzgH+CliB57XWf2jweiTwCjARKAZmaa13t7RPEXTBF7TWHCyvJq+0kvzDlRyurKH0aA2lFbWUV9dypKqWI5W1lFXWUFZZS1llLUdraqmodnK02kl1rauedxkolAK71UKk1VwIDh2t4TcXjuD6kzNbf3MLVNU60ZpGYwslR6uJtFmJimheJHcUHuGTDQc44P7fRdqt3H/u8HpNwmqdLnYUljM0ObbR4LjWukP51WWVNTzy4WbeXrGPCJuFpLhI90B8BBHWugutzaLcF03z3G6zYPdceBtcVM1F2FxU7e7nnotspM1cwB12y7HnkXYrDru5mDc8l4LDlXy4Lo//rd3PWveE3aPS4rnh5Ex+MKFtYu7NupwSfv7uumN3mj2j7aT1jKJPbCS9o+30jI7gwrEp7a4K7pCgK6WswDbgTCAHWA5cqbXe5LXNbcAYrfVPlFJXABdrrWe1tF8RdKGzcLk01U4XNU4XtU5tHl3aPHc1XOeixqmpdZllp1NT6zKvV9e6qKo1+3G6TDZPrUtTU+ui2v16jXtfMZE2fnbmUMlUAb7ZXsji7UUUlFWRf7iS0oqaY/9Pz//a6dLHLr61TvN5+ROLgii7FYe97q5t78GjuDSMTI3nwrGpnDcqhf59ov1yvBqni8825bOrqJzckgr2l1RwqLyag0erKSmv4dcXjODySf3ate+OCvqJwENa67Pdy/cDaK0f89pmgXubJUopG3AASNQt7FwEXRCE5tBa43TVXUydLn3sYum5ENc4XdTUaqqddWM/1bUuKmtdVNWYdZXux4pqJxU1TjNOVGO2yUyIYcbY1GM99XPzYQAABOxJREFUWjr7/Np799OSoPsS3EkD9nkt5wDHN7eN1rpWKVUK9AHqNexQSt0C3ALQv3945h0LgtBxlFJmLMPaONwUDgSqVUCn3g9qrZ/TWmdprbMSE30bPRYEQRB8wxdBzwW8gz3p7nVNbuMOufTADI4KgiAInYQvgr4cGKKUylRKRQBXAPMabDMPuNb9/FLgy5bi54IgCIL/aTWG7o6J3wEswKQtvqi13qiUehhYobWeB7wAvKqUygYOYkRfEARB6ER8ynjXWs8H5jdY96DX80rgMv+aJgiCILQFSZIVBEEIE0TQBUEQwgQRdEEQhDAhaM25lFKFwJ52vj2BBkVL3YTueN7d8Zyhe553dzxnaPt5D9BaN1nIEzRB7whKqRXNlb6GM93xvLvjOUP3PO/ueM7g3/OWkIsgCEKYIIIuCIIQJoSqoD8XbAOCRHc87+54ztA9z7s7njP48bxDMoYuCIIgNCZUPXRBEAShASLogiAIYULICbpS6hyl1FalVLZSanaw7QkESql+SqmFSqlNSqmNSqm73et7K6U+U0ptdz+GznTqPqKUsiqlViulPnQvZyqllro/77fdHT/DCqVUT6XUu0qpLUqpzUqpE7vJZ32P+/u9QSn1plLKEW6ft1LqRaVUgVJqg9e6Jj9bZfib+9zXKaUmtPV4ISXo7vlNnwbOBUYAVyqlRgTXqoBQC9yrtR4BnADc7j7P2cAXWushwBfu5XDjbmCz1/IfgSe11oOBQ8CNQbEqsPwV+ERrPRwYizn/sP6slVJpwF1AltZ6FKaT6xWE3+f9MnBOg3XNfbbnAkPcf7cA/2zrwUJK0IHJQLbWeqfWuhp4C5gZZJv8jtY6T2u9yv28DPMDT8Oc67/dm/0buCg4FgYGpVQ6cD7wvHtZAdOBd92bhOM59wCmYlpQo7Wu1lqXEOaftRsbEOWeFCcayCPMPm+t9SJMS3FvmvtsZwKvaMP3QE+lVEpbjhdqgt7U/KZpQbKlU1BKZQDjgaVAstY6z/3SASA5SGYFir8APwc8U773AUq01rXu5XD8vDOBQuAld6jpeaVUDGH+WWutc4E/AXsxQl4KrCT8P29o/rPtsL6FmqB3K5RSscB7wE+11oe9X3PPCBU2OadKqQuAAq31ymDb0snYgAnAP7XW44FyGoRXwu2zBnDHjWdiLmipQAyNQxNhj78/21ATdF/mNw0LlFJ2jJi/rrV+370633ML5n4sCJZ9AeBkYIZSajcmlDYdE1vu6b4lh/D8vHOAHK31UvfyuxiBD+fPGuAMYJfWulBrXQO8j/kOhPvnDc1/th3Wt1ATdF/mNw153LHjF4DNWusnvF7ynrv1WuB/nW1boNBa36+1TtdaZ2A+1y+11lcDCzHz1EKYnTOA1voAsE8pNcy96nRgE2H8WbvZC5yglIp2f9895x3Wn7eb5j7becA17myXE4BSr9CMb2itQ+oPOA/YBuwAHgi2PQE6x1Mwt2HrgDXuv/MwMeUvgO3A50DvYNsaoPM/DfjQ/XwgsAzIBt4BIoNtXwDOdxywwv15/xfo1R0+a+C3wBZgA/AqEBlunzfwJmaMoAZzN3Zjc58toDBZfDuA9ZgMoDYdT0r/BUEQwoRQC7kIgiAIzSCCLgiCECaIoAuCIIQJIuiCIAhhggi6IAhCmCCCLgiCECaIoAuCIIQJ/w/jdJK64cfK/gAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
    " + ] + }, + "metadata": { + "tags": [], + "needs_background": "light" + } + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "5ZTt3kO3mfm4" + }, + "source": [ + "## Evaluate test set performance\n", + "\n", + "Let's run the model on the test set and check performance." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "biU2MwzyAo8o" + }, + "source": [ + "test_audio = []\n", + "test_labels = []\n", + "\n", + "for audio, label in test_ds:\n", + " test_audio.append(audio.numpy())\n", + " test_labels.append(label.numpy())\n", + "\n", + "test_audio = np.array(test_audio)\n", + "test_labels = np.array(test_labels)" + ], + "execution_count": 138, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "ktUanr9mRZky", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "475fcebb-64af-4c8e-e4af-784289106a5b" + }, + "source": [ + "y_pred = np.argmax(model.predict(test_audio), axis=1)\n", + "y_true = test_labels\n", + "\n", + "test_acc = sum(y_pred == y_true) / len(y_true)\n", + "print(f'Test set accuracy: {test_acc:.0%}')" + ], + "execution_count": 139, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Test set accuracy: 95%\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "en9Znt1NOabH" + }, + "source": [ + "### Display a confusion matrix\n", + "\n", + "A confusion matrix is helpful to see how well the model did on each of the commands in the test set." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "LvoSAOiXU3lL", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 497 + }, + "outputId": "d3a37e34-55f8-4ca6-d826-ac8255e8f36c" + }, + "source": [ + "confusion_mtx = tf.math.confusion_matrix(y_true, y_pred) \n", + "plt.figure(figsize=(10, 8))\n", + "sns.heatmap(confusion_mtx, xticklabels=commands, yticklabels=commands, \n", + " annot=True, fmt='g')\n", + "plt.xlabel('Prediction')\n", + "plt.ylabel('Label')\n", + "plt.show()" + ], + "execution_count": 140, + "outputs": [ + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjcAAAHgCAYAAABZ+0ykAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nO3dd7hdVZn48e+bQhJ6CcQIDE1EihCqIIFBkEhTQBThQboTC6IOqGD5DUVwUEEdRkCDVGVEpAgIgyWCRHpA6S3Sk5BCCXUCyX1/f5wd5pBJcm8ud99zzt7fD89+7j7r7H3We/Ncct+8a629IjORJEmqigGtDkCSJKkvmdxIkqRKMbmRJEmVYnIjSZIqxeRGkiRVismNJEmqlEGtDmBhvrPGAa5RV586YeoNrQ5BFTJ00BKtDkEV9Mprj0d/9vfmzMf6/Hft4OFr9+v3sCBWbiRJUqW0beVGkiSVrGtuqyMohZUbSZJUKVZuJEmqq+xqdQSlsHIjSZIqxcqNJEl11VXNyo3JjSRJNZUOS0mSJLU/KzeSJNVVRYelrNxIkqRKsXIjSVJdVXTOjcmNJEl15ROKJUmS2p+VG0mS6qqiw1JWbiRJUqVYuZEkqa4quhTc5EaSpJryCcWSJEkdwMqNJEl1VdFhKSs3kiSpUqzcSJJUV865kSRJan8mN5Ik1VXX3L4/uhERQyPi9oi4OyLuj4gTiva1IuK2iJgUEb+OiCWK9iHF60nF+2t214fJjSRJdZVdfX90bzawY2ZuAowCdomIrYHvAT/KzPcALwCHF9cfDrxQtP+ouG6RTG4kSVK/yYZXipeDiyOBHYFLi/YLgL2K8z2L1xTv7xQRsag+nFAsSVJdtWgpeEQMBO4E3gOcAfwDeDEz5xSXPAOsWpyvCjwNkJlzImIWsBIwc2Gfb+VGkiT1mYgYGxETm46x81+TmXMzcxSwGrAV8L6+jMHKjSRJdVXCUvDMHAeM6+G1L0bE9cA2wPIRMaio3qwGTC4umwysDjwTEYOA5YDnFvW5Vm4kSaqrrq6+P7oREStHxPLF+TBgZ+BB4HrgE8VlBwNXFudXFa8p3v9zZuai+rByI0mS+tNI4IJi3s0A4JLM/F1EPABcHBEnAX8DzimuPwf4RURMAp4H9uuuA5MbSZJqKrP759L0fZ95D7DpAtofozH/Zv72/wE+uTh9OCwlSZIqxcqNJEl1VdG9pUxuJEmqqxY956ZsDktJkqRKsXIjSVJdVXRYysqNJEmqFCs3kiTVVVf/LwXvDyY3kiTVlcNSkiRJ7c/KjSRJdeVScEmSpPZn5UaSpLpyzo0kSVL7s3IjSVJdVXTOjcmNJEl1VdHkxmEpSZJUKVZuJEmqqcxqPqHYyo0kSaoUKzeSJNVVRefcmNxIklRXPudGkiSp/Vm5kSSprio6LGXlRpIkVYqVG0mS6qqic25MbiRJqiuHpSRJktqflRtJkuqqosNSVm4kSVKlWLmRJKmunHMjSZLU/qzcSJJUVxWt3JjcSJJUV04oliRJan9WbiRJqquKDktZuZEkSZVi5UaSpLqq6Jwbk5s299Ef/Avr7rgprz73Ej8bcywAQ5dbin3OOJLlVluZWc/M4LIvnM7/vPQaAGtsvT5j/u1ABg4eyGvPv8yFnzqpleGrg5w97jR23+3DTJ8xk1Gb7tTqcNShzvzp99h1lx2ZMeM5ttpyFwBWWGE5LrjwJ/zTGqvy1JOTOejAI3jxxZdaHKkAh6XUGnf/ZgL/dfD339a27Rc+xuM33c+ZOxzN4zfdz7Zf+BgAQ5Zdkl1POpRff+Y0frrzMVz6hdNbEbI61IUXXsLuexzQ6jDU4S76xWXstdchb2s76ujPc8MNNzFq4x254YabOOroz7cmONWGyU2be+r2h3j9xVfe1rbezptxz2UTALjnsgmsN2ZzADba84M8dN0dvDTlOQBee85/GannJvz1Np5/4cVWh6EOd9NNt/PC82//Odp9j5256KLLALjoosvY46NjWhGaFiS7+v5oA6UlNxHxyYhYpjj/dkRcHhGbldVfnSw1fDlemd74y+OV6S+y1PDlAFhprXcxdLmlOPDib/GZ353Exh8f3cowJQmAVVYZzrRnZwAw7dkZrLLK8BZHpKors3Lz/zLz5YgYDXwYOAc4a1E3RMTYiJgYERMnvjKpxNCqJYuvAwYNZORGa3Hxoady0YGnMPpLe7PiWu9qaWySNL/M7P4i9Y+urr4/2kCZyc3c4uvuwLjMvAZYYlE3ZOa4zNwiM7fYYun3lBhaZ3t15iyWXmV5AJZeZXlemzkLgJemPs9jN97Dm6/P5vUXXuGp2x9ixPr/1MpQJYnp02cy4l0rAzDiXSszY8ZzLY5IbzG5WWyTI+JnwKeAayNiSMn91cbDf7qLjffZDoCN99mOh/94FwCP/PFOVt/yvcTAAQwaugSrjlqHmZOmtDJUSeLaa/7EAQfsA8ABB+zDNb/7Y4sjUtWVuRR8X2AX4NTMfDEiRgJfK7G/Str79CNYY5v1WXKFZfjyrf/JX350KTefeTX7nHkkoz61A7Mmz+SyYlXUzElT+Mdf7uGzvz+F7OribxffwIxHnmnxd6BO8ctfnME/b78Nw4evyBOPTeSEE0/lvPMvbnVY6jDnnf8fbLf91qy00go8/OjNnHzSj/nhaWdx4S9+wkEH78vTT03moAO/2OowNU9FhwijzLHPYr7Nupl5XkSsDCydmY/35N7vrHFANf/E1TInTL2h1SGoQoYOWuQou9Qrr7z2ePRnf6//+oQ+/1077FPH9ev3sCClVW4i4jhgC2A94DxgMPBLYNuy+pQkSYuhTebI9LUy58DsDXwMeBUgM6cAy5TYnyRJUqlzbt7IzIyIBIiIpUrsS5IkLa6KVm7KTG4uKVZLLR8R/wIcBpxdYn+SJGlxtMkThftaqZUb4E/ASzTm3fxbZrr+T5IklarM5GYV4EvAXcC5NBIdSZLULio6LFXahOLM/DawLo1tFw4BHo2I70bEOmX1KUmSVOoTg7PxEJ1ni2MOsAJwaUR8v8x+JUlSD2T2/dEGynzOzZeBg4CZwM+Br2XmmxExAHgU+HpZfUuSpB6o6LBUmXNuVgQ+nplPNjdmZldE7FFiv5IkqcZKS24y87hFvPdgWf1KkqQeqmjlxl26JUlSpZQ5LCVJktqZD/GTJElVkl3tsbqprzksJUmSKsXkRpKkuurq6vujGxGxekRcHxEPRMT9xaNjiIjjI2JyRPy9OHZruucbETEpIh6OiI9014fDUpIkqT/NAY7OzLsiYhngzoiYt/fkjzLz1OaLI2IDYD9gQ+DdwJ8i4r2ZOXdhHZjcSJJUVy2YUJyZU4GpxfnLEfEgsOoibtkTuDgzZwOPR8QkYCvgloXd4LCUJEnqMxExNiImNh1jF3HtmsCmwG1F0xcj4p6IODciVijaVgWebrrtGRadDJncSJJUW13Z50dmjsvMLZqOcQvqOiKWBi4DvpKZLwFnAesAo2hUdk7r7bflsJQkSXXVoicUR8RgGonNRZl5OUBmTmt6/2zgd8XLycDqTbevVrQtlJUbSZLUbyIigHOABzPzh03tI5su2xu4rzi/CtgvIoZExFrAusDti+rDyo0kSXXVmsrNtsCBwL0R8fei7ZvA/hExCkjgCeCzAJl5f0RcAjxAY6XVEYtaKQUmN5IkqR9l5l+BWMBb1y7inpOBk3vah8mNJEl1ldXcfsHkRpKkumrRhOKyOaFYkiRVipUbSZLqyl3BJUmS2p+VG0mS6qoFe0v1B5MbSZLqymEpSZKk9mflRpKkmkqXgkuSJLU/KzeSJNWVc24kSZLan5UbSZLqyqXgkiSpUhyWkiRJan9WbiRJqiuXgkuSJLU/KzeSJNVVRefcmNxIklRXFV0t5bCUJEmqFCs3kiTVVUWHpazcSJKkSrFyI0lSTVV1V3CTG0mS6sphKUmSpPZn5UaSpLqyciNJktT+rNxIklRXPsRPkiSp/Vm5kSSprio658bkRpKkmsqKJjcOS0mSpEqxciNJUl1ZuZEkSWp/Vm4kSaor95aSJEmV4rCUJElS+7NyI0lSXVm5kSRJan9WbiRJqqnMalZuTG4kSaorh6UkSZLan5UbSZLqqqKVm7ZNbk6YekOrQ1DF7D/yA60OQRVy8dTbWh2CpIVo2+RGkiSVy13BJUmSOoCVG0mS6qqilRuTG0mS6qqa+2Y6LCVJkqrFyo0kSTXlhGJJkqQOYOVGkqS6qmjlxuRGkqS6ckKxJElS+7NyI0lSTTmhWJIkqQNYuZEkqa4qOufG5EaSpJpyWEqSJKkDmNxIklRXXSUc3YiI1SPi+oh4ICLuj4gvF+0rRsQfI+LR4usKRXtExOkRMSki7omIzbrrw+RGkiT1pznA0Zm5AbA1cEREbAAcC4zPzHWB8cVrgF2BdYtjLHBWdx2Y3EiSVFPZ1fdHt31mTs3Mu4rzl4EHgVWBPYELissuAPYqzvcELsyGW4HlI2LkovpwQrEkSXXV4tVSEbEmsClwGzAiM6cWbz0LjCjOVwWebrrtmaJtKgth5UaSJPWZiBgbERObjrELuW5p4DLgK5n5UvN7mZlAr5dyWbmRJKmmejKMtNifmTkOGLeoayJiMI3E5qLMvLxonhYRIzNzajHsNL1onwys3nT7akXbQlm5kSRJ/SYiAjgHeDAzf9j01lXAwcX5wcCVTe0HFaumtgZmNQ1fLZCVG0mS6qo1c262BQ4E7o2Ivxdt3wROAS6JiMOBJ4F9i/euBXYDJgGvAYd214HJjSRJ6jeZ+VcgFvL2Tgu4PoEjFqcPkxtJkmqqjDk37cDkRpKkmqpqcuOEYkmSVClWbiRJqikrN5IkSR3Ayo0kSXWVC1u01NlMbiRJqimHpSRJkjqAlRtJkmoqu6o5LGXlRpIkVYqVG0mSaqqqc25MbiRJqqms6Goph6UkSVKlWLmRJKmmqjosZeVGkiRVipUbSZJqyqXgkiRJHcDKjSRJNZXZ6gjKYXIjSVJNOSwlSZLUAazcSJJUU1ZuJEmSOoCVG0mSasoJxZIkqVIclpIkSeoAVm4kSaopdwWXJEnqAFZuJEmqqaruCm5yI0lSTXU5LCVJktT+rNxIklRTTiiWJEnqAFZuJEmqKR/iJ0mS1AGs3EiSVFPuLSVJkirFYSlJkqQOYOVGkqSaqupD/BaZ3ETEy8C8Ebl5fwJZnGdmLltibJIkSYttkclNZi7TX4FIkqT+VfuH+EXE6Ig4tDgfHhFrlReWJEkqW2bfH+2gR8lNRBwHHAN8o2haAvhlWUFJkiT1Vk8nFO8NbArcBZCZUyLCIStJkjpYVScU93RY6o3MTIrJxRGxVHkhSZIk9V5PKzeXRMTPgOUj4l+Aw4CzywtLPXH2uNPYfbcPM33GTEZtulOrw1EHetfa7+aLPzn6rder/NMILvvhxTx4630cevLnGDxkMHPnzuWCb4/jsbsntTBSdaIhQ4Zw/Z8vY8iQIQwcNJDLL7+GE088rdVhqUlVJxRH9nD2T0TsDIwpXv4hM/9YWlTAoCVWbZNpSe1ru9Ef4JVXXuW88/7D5KYH9h/5gVaH0NZiwABOv+1sjt/rWA4/5fNcd87V3HPD39jkQ5ux+2f34rv7/VurQ2wrF0+9rdUhdISlllqSV199jUGDBvGXG67gqKOO47bb72p1WG3rzTcm92u2cdfqe/b579rNnr6y5RnT4jzE715gGI2hqXvLCUeLY8Jfb2ONNVZrdRiqiA23fT/Tn5rGc5NnkJkMW3pJAIYtsyQvTH++xdGpU7366msADB48iMGDB9PTf1BL70RPV0t9Brgd+DjwCeDWiDisB/ctFxE/ioiJxXFaRCz3zkKWVIatPzaaW66aAMBFJ57Lft88iB/fMo79v3Uwl3zvohZHp041YMAAJt7xB6ZMvoc/jb+R2+/4W6tDUpOujD4/2kFPJxR/Ddg0Mw/JzIOBzWksDe/OucBLwL7F8RJw3sIujoix8xKhrq5XexiapHdq4OBBbPbhLbn9mpsB2OnTu3DRd87jK9uM5aITz+Mz3/9CiyNUp+rq6mKLLcew5lpbsOUWm7Lhhuu1OiTVQE+Tm+eAl5tev1y0dWedzDwuMx8rjhOAtRd2cWaOy8wtMnOLAQNckCX1l0122JQn7nuMl2bOAmD0Pjsw8b9vBeD2a25mnU3WbWV4qoBZs17ihr/cxJgxO7Q6FDXJjD4/2sEik5uIOCoijgImAbdFxPHFA/1uBR7pwee/HhGjmz5vW+D1dxKwpL63zce245ar/vrW6xemv8D7tt4QgA22fT/PPjG1VaGpgw0fviLLLdfYgnDo0KF8eKftefjhf7Q4KtVBdxOK5z2o7x/FMc+VPfz8zwMXNM2zeQE4uOfhaVF++Ysz+Oftt2H48BV54rGJnHDiqZx3/sWtDksdZsiwIWy43Sac+82fvtV27jFn8unjD2fgwIG8OfsNzj32rBZGqE41cuQIzj3nxwwcOIAYMIBLL72aa6/9U6vDUpN2mSPT13q8FLxXHx4xhMYE5HWA5YFZNHYTP7G7e10Krr7mUnD1JZeCqwz9vRT81nd/vM9/12495fKWZ0w9WgoeESsDXwc2BIbOa8/MHbu59UrgRRrbNkzuZYySJEk91tPn3FwE/BrYA/gcjaGlGT24b7XM3KWXsUmSpBJVdViqp6ulVsrMc4A3M/MvmXkY0F3VBuDmiHh/78OTJElaPD2t3LxZfJ0aEbsDU4AVe3DfaOCQiHgcmA0EjTk3Gy92pJIkqU+1y9LtvtbT5OakYsXT0cB/AssCX+nBfbv2NjBJklSurlYHUJIeJTeZ+bvidBbwIYCI6Da5ycwnex+aJEnS4uvpnJsFOarPopAkSf0uiT4/uhMR50bE9Ii4r6nt+IiYHBF/L47dmt77RkRMioiHI+IjPfm+3klyU82BOkmSVKbzgQWtpP5RZo4qjmsBImIDYD8aj6LZBTgzIgZ218E7SW58yJ4kSR2sK/v+6E5m3gg838MQ9wQuzszZmfk4je2gturupkXOuYmIl1lwEhPAsB4GJkmS2lBXew3CfDEiDgImAkdn5gvAqjT2s5znmaJtkRZZucnMZTJz2QUcy2RmT1daSZKkmoiIsRExsekY24PbzqKxVdMoYCpw2juJwQRFkqSa6skE4MX+zMxxwLjFvGfavPOIOBuYt0p7MrB606Wr0YPtnN7JnBtJkqR3LCJGNr3cG5i3kuoqYL+IGBIRawHrArd393lWbiRJqqlWPMQvIn4F7AAMj4hngOOAHSJiFI15vk8AnwXIzPsj4hLgAWAOcERmzu2uD5MbSZLUbzJz/wU0n7OI608GTl6cPkxuJEmqqTLm3LQDkxtJkmqqqntLOaFYkiRVipUbSZJqysqNJElSB7ByI0lSTTmhWJIkVUpXNXMbh6UkSVK1WLmRJKmm2mxX8D5j5UaSJFWKlRtJkmoqWx1ASUxuJEmqKZ9zI0mS1AGs3EiSVFNd4YRiSZKktmflRpKkmqrqhGIrN5IkqVKs3EiSVFNVXS1lciNJUk25t5QkSVIHsHIjSVJNubeUJElSB7ByI0lSTVV1KbjJjSRJNeWEYkmSpA5g5UaSpJqq6nNurNxIkqRKsXIjSVJNOaFYkiRVihOKJUmSOoCVG0mSasoJxZIkSR3Ayo0kSTVl5UaSJKkDWLmRJKmmsqKrpUxuJEmqKYelJEmSOoCVG0mSasrKjSRJUgewciNJUk25t5QkSaoU95aSJEnqAFZuJEmqKScUS5IkdQArN5Ik1VRVKzcmN5Ik1VRVV0s5LCVJkirFyo0kSTXlUnBJkqQOYOVGkqSaquqEYis3kiSpUqzcSJJUU1VdLdW2yc2AqOgsJ7XMpdPvbHUIqpDXpkxodQjSO9ZV0fTGYSlJklQpbVu5kSRJ5XJCsSRJUgewciNJUk1Vc8aNyY0kSbXlsJQkSVIHMLmRJKmmuqLvj+5ExLkRMT0i7mtqWzEi/hgRjxZfVyjaIyJOj4hJEXFPRGzWk+/L5EaSJPWn84Fd5ms7FhifmesC44vXALsC6xbHWOCsnnRgciNJUk11kX1+dCczbwSen695T+CC4vwCYK+m9guz4VZg+YgY2V0fJjeSJNVUlnD00ojMnFqcPwuMKM5XBZ5uuu6Zom2RTG4kSVKfiYixETGx6Ri7OPdn5jvMk1wKLklSbZWxFDwzxwHjFvO2aRExMjOnFsNO04v2ycDqTdetVrQtkpUbSZLUalcBBxfnBwNXNrUfVKya2hqY1TR8tVBWbiRJqqlW7AoeEb8CdgCGR8QzwHHAKcAlEXE48CSwb3H5tcBuwCTgNeDQnvRhciNJUk21YvuFzNx/IW/ttIBrEzhicftwWEqSJFWKlRtJkmrKvaUkSZI6gJUbSZJqqhUTivuDlRtJklQpVm4kSaqpatZtTG4kSaotJxRLkiR1ACs3kiTVVFZ0YMrKjSRJqhQrN5Ik1VRV59yY3EiSVFM+50aSJKkDWLmRJKmmqlm3sXIjSZIqxsqNJEk1VdU5NyY3kiTVVFVXSzksJUmSKsXKjSRJNeUTiiVJkjqAlRtJkmrKOTeSJEkdwMqNJEk1VdU5NyY3kiTVlMNSkiRJHcDKjSRJNdWV1RyWsnIjSZIqxcqNJEk1Vc26jcmNJEm1VdWNMx2WkiRJlWLlRpKkmqrqc26s3EiSpEqxciNJUk1V9SF+JjeSJNWUE4olSZI6gJUbSZJqygnFkiRJHcDKjSRJNVXVCcVWbiRJUqVYuZEkqaayoruCm9xIklRTLgWXJEnqAFZuJEmqKScUS5IkdQArN5Ik1VRVH+JnciNJUk05oViSJKkDWLmRJKmmqvqcGys3kiSpUqzcSJJUU1VdCm5yI0lSTVV1tZTDUpIkqVKs3EiSVFNVXQpuctPhBgwYwK23XMvkKc+y996HtDocdbgjjzycQw7Zj8zk/vsfYuzYrzF79uxWh6U2Nnv2Gxx8xNd44803mTtnLjt/aDRf/MyBHHP897j/oUcZNGgQG23wXo77+pcYPGgQmcm///inTLjlDoYOHcLJ3zqaDdZ7T6u/DVWMw1Id7sgjD+ehhya1OgxVwLvfPYIvfOFQtt12D7bYYgwDBw7kk5/8aKvDUptbYonBnHv6KVx+wZlcesEZ3HTbndx934PsPuZDXP2rs7niF2cxe/YbXHb1dQBMuOUOnnpmCtf++hyO//qX+M6pP2nxd1BvmdnnRzswuelgq646kl133Ylzz/uvVoeiihg0aCDDhg1l4MCBDBs2jKlTp7U6JLW5iGDJJYcBMGfOHObMmUNEsP0HtyIiiAjev/56TJs+E4Dr/3orH9tlJyKCTTZan5dffoUZM59v5begCip1WCoihgD7AGs295WZJ5bZb12cdurxfOMbJ7PMMku3OhRVwJQp0/jxj8fxyCO38Prr/8P48RMYP35Cq8NSB5g7dy77HvYlnpo8hf0/vgcbb/i+t957c84crv79eI798ucAmDbjOd61yvC33h+xynCmzZjJysNX7Pe4Vd05N2VXbq4E9gTmAK82HQsUEWMjYmJETOyau9DLBOy2205MnzGTv/3t3laHoopYfvll2WOPMay//mjWXnsrllpqGPvtt3erw1IHGDhwIJddcAbjr/gF9z7wCI8+9sRb75106hlsvslGbD5qo9YFqIXKEv5rB2VPKF4tM3fp6cWZOQ4YB7DEkNXa40+oTX1wmy3ZY/cx7PKRHRk6dAjLLrsM5593Oocc+qVWh6YOteOOo3niiaeZWQwR/Pa317H11ptz8cVXtDgydYpll1marTbbmL/eOpF1116TM8+9iBdenMVx3/32W9eMWHklni2GqACmTZ/JiJWHL+jjpF4ru3Jzc0S8v+Q+aunb/+8U1l5nS9673jZ8+sAjuP6Gm0xs9I48/fQUttpqU4YNGwrAhz60LQ8/7GR1LdrzL7zISy+/AsD/zJ7NLXf8jbXWWJ1Lr7qOm267k++fcAwDBvzvr5odRm/NVdeNJzO5+74HWXrppRySaqGuzD4/eiIinoiIeyPi7xExsWhbMSL+GBGPFl9X6O33VXblZjRwSEQ8DswGAsjM3LjkfiUtpjvu+DtXXHEtt9xyDXPmzOXuu+/nnHOcrK5Fm/HcC3zrpFOZ29VFdiUf2XE7dtj2A2yy/e6MHLEKB4w9CoAP//MH+fxhB7D9Nlsy4ZY72HXfwxg2dCjf+ea/tvg7UAt9KDNnNr0+FhifmadExLHF62N688FR5rKtiFhjQe2Z+WR39zospb42cMDAVoegCnnp6etbHYIqaPDwtaM/+9tu1Z36/HfthMnju/0eIuIJYIvm5CYiHgZ2yMypETESuCEz1+tNDGVXbj4D3AjcnJnOEJYkqY20cLVUAn+IiAR+Vsy5HZGZU4v3nwVG9PbDy05uHgP2B06PiJeBCcCNmXllyf1KkqQWiIixwNimpnFF8tJsdGZOjohVgD9GxEPNb2ZmFolPr5Sa3GTmecB5EfEuYF/gqzS+4WXK7FeSJHWvjMpN88rnRVwzufg6PSKuALYCpkXEyKZhqem9jaHU1VIR8fOIuBk4i0Yi9Qmg17OfJUlSZ4uIpSJimXnnwBjgPuAq4ODisoNpPCuvV8oelloJGAi8CDwPzMzMOSX3KUmSeqBFe0GNAK6ICGjkIf+VmddFxB3AJRFxOPAkjRGfXil7WGpvgIhYH/gIcH1EDMzM1crsV5Ikda8VE4oz8zFgkwW0Pwfs1Bd9lL231B7AdsD2wPLAn2lMKpYkSSpF2cNSu9BIZv4jM6eU3JckSVoM7bIXVF8re1jqixExAtgyIjYDbs/MXs9+liRJ6k7Zq6U+CdwOfJLGxKDbIuITZfYpSZJ6JjP7/GgHZQ9LfRvYcl61JiJWBv4EXFpyv5IkqabKTm4GzDcM9Rzl70QuSZJ6oIXbL5Sq7OTmuoj4PfCr4vWngGtL7lOSJPVAuwwj9bWyJxR/LSL2AbYtmsZl5hVl9ilJkuqt7MoNmXkZcFnZ/UiSpMVT1WGpsldLfTwiHo2IWRHxUkS8HBEvldmnJEmqt7IrN98HPpqZD5bcjyRJWkw+xK93ppnYSJLUnrqcUNwrEyPi18BvgdnzGjPz8pL7lSRJNdLEPwMAAAg/SURBVFV2crMs8BowpqktAZMbSZJazGGp3jk6M59vboiItUruU5Ik1VjZTwu+OiKWnfciItYHri65T0mS1ANdmX1+tIOyk5vv0khwlo6IzWnsKfXpkvuUJEk9kCX81w7KfkLxNRExGPgDsAywd2Y+UmafkiSp3kpJbiLiP+Ft6dtywD+AL0YEmfmlMvqVJEk91y7DSH2trMrNxPle31lSP5IkSW9TSnKTmReU8bmSJKnvtMscmb5W6pybiNgWOB5Yo+grgMzMtcvsV5Ik1VfZz7k5B/hXGsNSc0vuS5IkLQbn3PTOrMz875L7kCRJveCwVO9cHxE/oLHdQvPeUneV3K8kSaqpspObDxRfNy++Bo0l4juW3K8kSepGZlerQyhF2cnNDQtoq2YNTJIktYWyk5tXms6HAnsAD5bcpyRJ6oGuitYbyt5+4bTm1xFxKvD7MvuUJEk9kxVdLVX2xpnzWxJYrZ/7lCRJNVL2Q/zu5X/n2AwEVgZOLLNPSZLUMw5L9c4eTedzgGmZOafkPiVJUo2VPefmyTI/X5Ik9V5V59yUXbmRJEltqqrbL/T3hGJJkqRSWbmRJKmmqrq3lJUbSZJUKVZuJEmqqapOKLZyI0mSKsXKjSRJNeVD/CRJUqU4LCVJktQBrNxIklRTPsRPkiSpA1i5kSSppqo658bkRpKkmqrqaimHpSRJUqVYuZEkqaaqOixl5UaSJFWKlRtJkmqqqkvBTW4kSaqpdEKxJElS+7NyI0lSTVV1WMrKjSRJqhQrN5Ik1ZRLwSVJkjqAlRtJkmqqqqulTG4kSaoph6UkSZI6gJUbSZJqysqNJElSB7ByI0lSTVWzbgNR1ZJUnUTE2Mwc1+o4VA3+PKmv+TOl/uawVDWMbXUAqhR/ntTX/JlSvzK5kSRJlWJyI0mSKsXkphocy1Zf8udJfc2fKfUrJxRLkqRKsXIjSZIqxeRGktQrEXFIRPyk1XFI8zO5kSRJlWJy0+Yi4sSI+ErT65Mj4ssR8bWIuCMi7omIE4r3loqIayLi7oi4LyI+1brI1QkiYs2IeDAizo6I+yPiDxExLCJGRcStxc/XFRGxQqtjVfmKn4f7ml5/NSKOj4gbIuJ7EXF7RDwSEdst4N7dI+KWiBgeEedHxOkRcXNEPBYRnyiuiYj4QfH3073z/o6KiDMi4mPF+RURcW5xfljxd94Cf077509Fncjkpv2dCxwEEBEDgP2AZ4F1ga2AUcDmEbE9sAswJTM3ycyNgOtaE7I6zLrAGZm5IfAisA9wIXBMZm4M3Asc18L41B4GZeZWwFeY7+chIvYGjgV2y8yZRfNIYDSwB3BK0fZxGn9nbQJ8GPhBRIwEJgDzEqZVgQ2K8+2AG4vzBf2cSgtkctPmMvMJ4LmI2BQYA/wN2LLp/C7gfTT+x78X2Ln4F9Z2mTmrNVGrwzyemX8vzu8E1gGWz8y/FG0XANu3JDK1k8uLr3cCaza17wgcA+yemS80tf82M7sy8wFgRNE2GvhVZs7NzGnAX2j8fTYB2C4iNgAeAKYVSc82wM3FvfP/nDbHIL2NG2d2hp8DhwDvolHJ2Qn498z82fwXRsRmwG7ASRExPjNP7M9A1ZFmN53PBZZvVSBquTm8/R+9Q5vO5/2czOXtvzv+AawNvBeYuIDrAWJRnWbm5IhYnkb1+UZgRWBf4JXMfDkiVuL//pw6LKWFsnLTGa6g8T/9lsDvi+OwiFgaICJWjYhVIuLdwGuZ+UvgB8BmrQpYHW0W8ELTvIoDafwLW9U3DVglIlaKiCE0hpS68yTFUGZEbNjNtROAT0XEwIhYmUZF8PbivVtpDHndWFz31eKrtNis3HSAzHwjIq4HXszMucAfImJ94JaIAHgF+DTwHhpj2F3Am8DnWxWzOt7BwE8jYkngMeDQFsejfpCZb0bEiTQSjsnAQz2876GIOAD4TUR8dBGXXkFjqOluIIGvZ+azxXsTgDGZOSkinqRRvTG5Ua/4hOIOUEwkvgv4ZGY+2up4JElqZw5Ltbligt0kYLyJjSRJ3bNyI0mSKsXKjSRJqhSTG0mSVCkmN5IkqVJMbqQOFBFzI+LvxR49vymWbPf2s85v2vvn58Uk9oVdu0NEfLDp9eci4qDe9i1JZTC5kTrT65k5qthD7A3gc81vRkSvnmGVmZ8pHpe/MDsAbyU3mfnTzLywN31JUllMbqTONwF4T1FVmRARVwEPFE+B/UHT7vGfhbd2Zv5JRDwcEX8CVpn3QcXuz1sU57tExF3FLvPjI2JNGknUvxZVo+2KHaO/Wly/wJ3Ee7KjtCT1JZ9QLHWwokKzK/+7A/xmwEaZ+XhEjAVmZeaWxaP0b4qIPwCbAuvR2Hl5BI2NCs+d73NXBs4Gti8+a8XMfD4ifkpjv59Ti+t2arrtQuDIzPxL8ZTb42g8Th+KHaUjYrei/cN9/WchSfOY3EidaVhEzNsheQJwDo3hotsz8/GifQyw8bz5NMByNHaP355iZ2ZgSkT8eQGfvzVw47zPysznFxVMRCzH/91J/DdNlyxsR2lJ6nMmN1Jnej0zRzU3FPuMvdrcRKOS8vv5rtut/PD+j4XtKC1Jfc45N1J1/R74fEQMBoiI90bEUjR2XZ63M/NI4EMLuPdWYPuIWKu4d8Wi/WVgmfkvzkx3EpfUNvwXlFRdP6cxBHRXNMo6M4C9aOzMvCONuTZPAbfMf2Nmzijm7FxebNw6HdgZuBq4NCL2BI6c7zZ3EpfUFtxbSpIkVYrDUpIkqVJMbiRJUqWY3EiSpEoxuZEkSZViciNJkirF5EaSJFWKyY0kSaoUkxtJklQp/x8iVb+UZ6psWgAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
    " + ] + }, + "metadata": { + "tags": [], + "needs_background": "light" + } + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "mQGi_mzPcLvl" + }, + "source": [ + "## Run inference on an audio file\n", + "\n", + "Finally, verify the model's prediction output using an input audio file of someone saying \"yes.\" How well does your model perform?" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "zRxauKMdhofU", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 333 + }, + "outputId": "de931674-31e5-42af-c7f2-30420aad5dbe" + }, + "source": [ + "sample_file = '/content/data/mini_speech_commands/yes/0132a06d_nohash_1.wav'\n", + "\n", + "sample_ds = preprocess_dataset([str(sample_file)])\n", + "for spectrogram, label in sample_ds.batch(1):\n", + " prediction = model(spectrogram)\n", + " print(len(commands))\n", + " print(tf.nn.softmax(prediction[0]))\n", + " print(tf.nn.softmax(prediction))\n", + " plt.bar(commands, tf.nn.softmax(prediction[0]))\n", + " plt.title(f'Predictions for \"{commands[label[0]]}\"')\n", + " plt.show()" + ], + "execution_count": 141, + "outputs": [ + { + "output_type": "stream", + "text": [ + "3\n", + "tf.Tensor([0.57611686 0.21194157 0.21194157], shape=(3,), dtype=float32)\n", + "tf.Tensor([[0.57611686 0.21194157 0.21194157]], shape=(1, 3), dtype=float32)\n" + ], + "name": "stdout" + }, + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAEICAYAAABPgw/pAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAT+klEQVR4nO3dfbRddX3n8feHRNABZxjl2oEkEEZia3Qs2Gu0q6AMxU6QAq0PFZa2plqy6DSrdGzV2AfGQW2xzGpnzZgZxQ5LqqNIHXUyQ5zU+gSoQC4IYsDINYJJFAyPFR+AwHf+ODvO4XJvzklykpv88n6tdVb2w2/v/b3n4XN/97fP3klVIUna/x002wVIkkbDQJekRhjoktQIA12SGmGgS1IjDHRJaoSBrlmV5INJ3tVNn5Rkwy7u531J/my01Q085q8n2ZTkoSQn7M1jS9Mx0DVQkjuS/LgLrru7ED5s1Mepqqur6meHqGdZkmumbHteVb1z1DUN8B+BFVV1WFV9dXd3luQLSU5O8o4k79j98mY8zrLuNVyY5I49dRztfQa6hnVGVR0GvBAYB/50aoMkc/d6VbPrGGD9rmyYZM6Ia5EMdO2cqtoCfBp4PkCSSvJ7SW4Hbu+W/WqSm5I8kOTLSV6wffskJyS5MckPknwMeGrfupOTbO6bX5DkE0m2Jrk3yXuTPBd4H/CL3V8MD3Rtfzp0082fm2QyyX1JVic5qm9dJTkvye1djauSpFt3XJIvJnkwyT1djU+Q5JAkDwFzgJuTfKtb/tyul/1AkvVJzuzb5oNJ/luSNUl+CPzrYZ7vJF9Pckbf/FO6uk7o5l/SPccPJLk5ycl9bZcl2dg9199O8rphjqn9WFX58LHDB3AHcGo3vYBer/Sd3XwBnwGeATwNOAH4PvBieoH3hm77Q4CDgTuBfwc8BXg18Cjwrm5fJwObu+k5wM3AXwOH0gv+E7t1y4BrptT4wb79nALcQ++viUOA/wJc1de2gP8DHA4cDWwFlnbrPgr8Cb3Ozk+POcPzUsBx3fRTgEngj7uf8xTgB8DP9tX3IPBL2/c95HP/VuBjffNnAbd00/OAe4FXdPt8eTc/1j1n/9h3/COB5832e8nHnn3YQ9ewPtX1hq8Bvgj8ed+6v6iq+6rqx8By4P1VdV1VPVZVlwEPAy/pHk8B/lNVPVpVHwfWzXC8JcBRwFuq6odV9ZOqumaGtlO9Dri0qm6sqoeBt9Pr0S/sa3NRVT1QVd8BPg8c3y1/lN5QylE7ecyXAId1+32kqj5H75fGOX1t/ldVfamqHq+qnwy53w8Dr0jyT7v53wQ+1E2/HlhTVWu6fX4GmKAX8ACPA89P8rSq+l5V7dLwkPYfBrqG9WtVdXhVHVNV/7YL7+029U0fA/xhNwTwQPdLYAG9cD4K2FJV/XeEu3OG4y0A7qyqbbtQ61H9+62qh+j1XOf1tbmrb/pH9MIYej3iANd3wyZv3Iljbqqqx/uW3TnlmJvYSVX1XeBLwKuSHA6cBvyPbvUxwGumPNcnAkdW1Q+B1wLnAd9LcmWSn9vZ42v/cqCdxNKe0R/Qm4B3V9W7pzZK8jJgXpL0hfrRwLem2ecm4Ogkc6cJ9UG3CP0uvbDbftxDgWcCWwZsR1XdBZzbbXci8A9JrqqqySGOuSDJQX2hfjTwzZ2oeyaXAb9D7/P6leqdx4Dec/Shqjp3uo2qai2wNsnTgHcBHwBO2sUatB+wh65R+wBwXpIXp+fQJKcneTrwFWAb8Pvdyb1X0htamc71wPeAi7p9PDXJL3Xr7gbmJzl4hm0/Cvx2kuOTHEJveOi6qrpjUPFJXpNkfjd7P70QfnwHm2x3Hb2e/lu7n+1k4Azg8iG2HeRT9M4HnA/8bd/yDwNnJPk3SeZ0z9HJSeYn+ZkkZ3W/zB4GHhry59B+zEDXSFXVBL0e7nvpBeIkvZOYVNUjwCu7+fvoDQl8Yob9PEYvEI8DvgNs7toDfI7eidm7ktwzzbb/APwZ8D/p/VJ4NnD2kD/Ci4Drum+xrAbOr6qNgzbqfrYz6A2J3AP8V+C3quobQx53R/v+Mb2f5Vj6nq+q2kTvJOkf0zuxuwl4C73P9UHAm+n95XAf8DLgd3e3Fu3b8sThTEn7oiQXAM+pqtfPdi3adzmGLu3jkjwDeBO9b7hIM3LIRdqHJTmX3lDKp6vqqtmuR/s2h1wkqRFD9dCTLE2yobuUeuUMbX4jya3dd3c/MtoyJUmDDOyhp3cToW/Su6x4M70r+86pqlv72iwCrgBOqar7kzyrqr6/o/0eccQRtXDhwt0sX5IOLDfccMM9VTU23bphToouASa3f3UryeX0vip1a1+bc4FVVXU/wKAwB1i4cCETExNDHF6StF2Sma6uHmrIZR5PvGR5M0+8nBngOcBzknwpybVJls5QyPIkE0kmtm7dOsShJUnDGtW3XOYCi+jdLe8c4APdfSeeoKouqarxqhofG5v2LwZJ0i4aJtC30LtR0nbzefI9MTYDq7s76H2b3pj7otGUKEkaxjCBvg5YlOTY7t4ZZ9O7JLrfp+j1zklyBL0hmIGXS0uSRmdgoHd3ulsBrAVuA66oqvVJLuz7H1nWAvcmuZXevaXfUlX37qmiJUlPNmsXFo2Pj5ffcpGknZPkhqoan26dl/5LUiMMdElqhIEuSY3YL2+fu3DllbNdQrPuuOj02S5B0i6yhy5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSI4YK9CRLk2xIMplk5TTrlyXZmuSm7vE7oy9VkrQjcwc1SDIHWAW8HNgMrEuyuqpundL0Y1W1Yg/UKEkawjA99CXAZFVtrKpHgMuBs/ZsWZKknTVMoM8DNvXNb+6WTfWqJF9L8vEkC6bbUZLlSSaSTGzdunUXypUkzWRUJ0X/N7Cwql4AfAa4bLpGVXVJVY1X1fjY2NiIDi1JguECfQvQ3+Oe3y37qaq6t6oe7mb/BviF0ZQnSRrWMIG+DliU5NgkBwNnA6v7GyQ5sm/2TOC20ZUoSRrGwG+5VNW2JCuAtcAc4NKqWp/kQmCiqlYDv5/kTGAbcB+wbA/WLEmaxsBAB6iqNcCaKcsu6Jt+O/D20ZYmSdoZXikqSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDViqEBPsjTJhiSTSVbuoN2rklSS8dGVKEkaxsBATzIHWAWcBiwGzkmyeJp2TwfOB64bdZGSpMGG6aEvASaramNVPQJcDpw1Tbt3Au8BfjLC+iRJQxom0OcBm/rmN3fLfirJC4EFVXXljnaUZHmSiSQTW7du3eliJUkz2+2TokkOAv4K+MNBbavqkqoar6rxsbGx3T20JKnPMIG+BVjQNz+/W7bd04HnA19IcgfwEmC1J0Ylae8aJtDXAYuSHJvkYOBsYPX2lVX1YFUdUVULq2ohcC1wZlVN7JGKJUnTGhjoVbUNWAGsBW4Drqiq9UkuTHLmni5QkjScucM0qqo1wJopyy6Yoe3Ju1+WJGlneaWoJDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktSIoQI9ydIkG5JMJlk5zfrzktyS5KYk1yRZPPpSJUk7MjDQk8wBVgGnAYuBc6YJ7I9U1b+qquOBvwT+auSVSpJ2aJge+hJgsqo2VtUjwOXAWf0Nquof+2YPBWp0JUqShjF3iDbzgE1985uBF09tlOT3gDcDBwOnTLejJMuB5QBHH330ztYqSdqBkZ0UrapVVfVs4G3An87Q5pKqGq+q8bGxsVEdWpLEcIG+BVjQNz+/WzaTy4Ff252iJEk7b5hAXwcsSnJskoOBs4HV/Q2SLOqbPR24fXQlSpKGMXAMvaq2JVkBrAXmAJdW1fokFwITVbUaWJHkVOBR4H7gDXuyaEnSkw1zUpSqWgOsmbLsgr7p80dclyRpJ3mlqCQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEbMne0CdGBYuPLK2S6hWXdcdPoe2a+v2Z6zp16zoXroSZYm2ZBkMsnKada/OcmtSb6W5LNJjhl9qZKkHRkY6EnmAKuA04DFwDlJFk9p9lVgvKpeAHwc+MtRFypJ2rFheuhLgMmq2lhVjwCXA2f1N6iqz1fVj7rZa4H5oy1TkjTIMIE+D9jUN7+5WzaTNwGfnm5FkuVJJpJMbN26dfgqJUkDjfRbLkleD4wDF0+3vqouqarxqhofGxsb5aEl6YA3zLdctgAL+ubnd8ueIMmpwJ8AL6uqh0dTniRpWMP00NcBi5Icm+Rg4GxgdX+DJCcA7wfOrKrvj75MSdIgAwO9qrYBK4C1wG3AFVW1PsmFSc7sml0MHAb8XZKbkqyeYXeSpD1kqAuLqmoNsGbKsgv6pk8dcV2SpJ3kpf+S1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiOGCvQkS5NsSDKZZOU061+a5MYk25K8evRlSpIGGRjoSeYAq4DTgMXAOUkWT2n2HWAZ8JFRFyhJGs7cIdosASaraiNAksuBs4Bbtzeoqju6dY/vgRolSUMYZshlHrCpb35zt2ynJVmeZCLJxNatW3dlF5KkGezVk6JVdUlVjVfV+NjY2N48tCQ1b5hA3wIs6Juf3y2TJO1Dhgn0dcCiJMcmORg4G1i9Z8uSJO2sgYFeVduAFcBa4Dbgiqpan+TCJGcCJHlRks3Aa4D3J1m/J4uWJD3ZMN9yoarWAGumLLugb3odvaEYSdIs8UpRSWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRQwV6kqVJNiSZTLJymvWHJPlYt/66JAtHXagkaccGBnqSOcAq4DRgMXBOksVTmr0JuL+qjgP+GnjPqAuVJO3YMD30JcBkVW2sqkeAy4GzprQ5C7ism/448MtJMroyJUmDzB2izTxgU9/8ZuDFM7Wpqm1JHgSeCdzT3yjJcmB5N/tQkg27UvR+6AimPBf7qvi3FexHrxf4mnUOpNfsmJlWDBPoI1NVlwCX7M1j7guSTFTV+GzXoeH4eu1/fM16hhly2QIs6Juf3y2btk2SucA/A+4dRYGSpOEME+jrgEVJjk1yMHA2sHpKm9XAG7rpVwOfq6oaXZmSpEEGDrl0Y+IrgLXAHODSqlqf5EJgoqpWA/8d+FCSSeA+eqGv/++AG2baz/l67X98zYDYkZakNnilqCQ1wkCXpEYY6JL2WUmWJXnvbNexvzDQJakRBvpuSnJhkj/om393kvOTvCXJuiRfS/IfunWHJrkyyc1Jvp7ktbNXuQCSLExyW5IPJFmf5O+TPC3J8Umu7V6/Tyb557Ndawu65/vrffN/lOQdSb6Q5D1Jrk/yzSQnTbPt6Um+kuSIJB9M8p+TfDnJxiSv7tokycXd5+uW7Z+xJKuSnNlNfzLJpd30G7vP7LTvg73zrIyOgb77LgV+CyDJQfS+snkXsIjefXCOB34hyUuBpcB3q+rnq+r5wP+dnZI1xSJgVVU9D3gAeBXwt8DbquoFwC3Av5/F+g4Uc6tqCfAHTHm+k/w6sBJ4RVVtv8T/SOBE4FeBi7plr6T3mft54FTg4iRHAlcD239JzKN3o0G6ZVd109O9D/YrBvpuqqo7gHuTnAD8CvBV4EV90zcCP0fvzXIL8PKuJ3JSVT04O1Vrim9X1U3d9A3As4HDq+qL3bLLgJfOSmUHlk90/94ALOxbfgrwNuD0qrq/b/mnqurxqroV+Jlu2YnAR6vqsaq6G/givc/j1cBJ3Z1ibwXu7oL+F4Evd9tOfR/017Bf2Kv3cmnY3wDLgH9Br8f+y8BfVNX7pzZM8kLgFcC7kny2qi7cm4VqWg/3TT8GHD5bhRwAtvHEjuRT+6a3vw6P8cRs+hbwL4HnABPTtAfY4d1dq2pLksPp/ZV8FfAM4DeAh6rqB0meyZPfBw65HKA+Se+N8iJ6V9SuBd6Y5DCAJPOSPCvJUcCPqurDwMXAC2erYO3Qg8D9feO4v0mvp6fddzfwrCTPTHIIveGSQe6kGwZL8rwBba8GXptkTpIxen9ZXd+tu5becM5VXbs/6v5thj30EaiqR5J8Hnigqh4D/j7Jc4GvdLeFfwh4PXAcvTG9x4FHgd+drZo10BuA9yX5J8BG4LdnuZ4mVNWj3W1Drqd3U79vDLndN5K8Dvi7JGfsoOkn6Q2j3AwU8NaquqtbdzXwK1U1meROer30pgLdS/9HoDsZeiPwmqq6fbbrkXRgcshlN3UnWSaBzxrmkmaTPXRJaoQ9dElqhIEuSY0w0CWpEQa6JDXCQJekRvw/W6G8dwpGonMAAAAASUVORK5CYII=\n", + "text/plain": [ + "
    " + ] + }, + "metadata": { + "tags": [], + "needs_background": "light" + } + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "VgWICqdqQNaQ" + }, + "source": [ + "You can see that your model very clearly recognized the audio command as \"yes.\"" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "NNPrzNrcDqq8" + }, + "source": [ + "##Run TF inference on multiple audio files" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "GmyOg4kwDEVH", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 1000 + }, + "outputId": "b770a9ce-6797-42bc-8b2f-06523463c84a" + }, + "source": [ + "import glob\n", + "import pandas as pd\n", + "pd.set_option(\"display.precision\", 2)\n", + "\n", + "txtfiles = []\n", + "for file in glob.glob(\"/content/data/mini_speech_commands/unknown/*.wav\"):\n", + " txtfiles.append(file)\n", + "\n", + "for i in range(25):\n", + " print(txtfiles[i])\n", + " sample_ds = preprocess_dataset([str(txtfiles[i])])\n", + " for spectrogram, label in sample_ds.batch(1):\n", + " prediction = model(spectrogram)\n", + " plt.bar(commands, tf.nn.softmax(prediction[0]))\n", + " plt.title(f'Predictions for \"{commands[label[0]]}\"')\n", + " plt.show()" + ], + "execution_count": 150, + "outputs": [ + { + "output_type": "stream", + "text": [ + "/content/data/mini_speech_commands/unknown/4c77947d_nohash_0.wav\n" + ], + "name": "stdout" + }, + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAEICAYAAABPgw/pAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAUd0lEQVR4nO3df7RdZX3n8feHxGAFV7FytZIEQyE6RseCvUa7BixF7ApSwapUGDsVrWbsNFNsrRqnlnFSO0WZZWdNTSvYYUGdkUAd0XRIh7Yqv6pALhTUgMg1gkkoGH6O6AgEvvPH2ZeeXO7NPUlOuOTJ+7XWWdk/nr339+xz7ufs8+yzd1JVSJL2fvvNdgGSpOEw0CWpEQa6JDXCQJekRhjoktQIA12SGmGga7ckOT/JR7vhY5Lcuovr+VSSPxhudTNu81eSbEryUJKjnsptDypJJTlituvQ3mHubBegPS/J7cDzgceAHwJ/A6yoqoeGuZ2qugp48QD1nA68q6qO7lv2PcOsZUD/hd5++OIwVpbkcuAjwLEAVfWRYaz36SrJ+cDl3eixVXX6rBUjwCP0fckbqupA4BXAKPDhyQ2S7Gsf8C8ENuzKgknmDLkWabcZ6PuYqtpC7wj9ZfDEV/rfSnIbcFs37ZeT3JjkgSRfTfLyieWTHJXkhiQ/SHIR8My+eccm2dw3vjDJ55NsTXJvkk8meQnwKeDnu66OB7q2T3TddOPvTjKe5L4ka5Mc0jevkrwnyW1djauTpJt3RJIrkjyY5J6uxu0k2T/JQ8Ac4KYk3+mmvyTJ5d06NyQ5qW+Z85P8eZJ1SX4I/OIg+zvJ6UmunjTtiW6Ubr2rk1za7dNrkxw+zbqO7rqIjh1gP+yX5MNJ7kjy/SR/meQnu3kXJHlfNzx/4j3QjR/e7fP9Jl7PJO/r1vFPSd4xyPPWLKkqH40/gNuB47vhhfSOSv+wGy/g74CfAn4COAr4PvAqeoH39m75/YF5wB3A7wDPAN4CPAp8tFvXscDmbngOcBPwJ8AB9IL/6G7e6cDVk2o8v289xwH30Ps2sT/wp8CVfW0L+N/AQcChwFZgWTfvQuD36R2sPLHNafZLAUd0w88AxoH/0D3P44AfAC/uq+9B4F9NrHvAfT/Vc+3f7vnAvcBSel2g/xNYM7ktsAzYBCwdcD+8s3s+PwMcCHwe+EzfvL/uhv818B3gor55X+x7PbcBq7r983rgR8BzZvs97WPqh0fo+44vdEfDVwNXAP+5b94fV9V9VfX/gOXAOVV1bVU9VlUXAA8Dr+4ezwD+a1U9WlWfA9ZPs72lwCHA+6vqh1X146q6epq2k70NOK+qbqiqh4EP0TuiX9TX5qyqeqCqvgd8BTiym/4ova6UQ3Zym6+mF3xnVdUjVfVlemF5Wl+bL1bVP1TV41X14wHXO4hLquq6qtpGL9CPnDT/FOAc4ISqum7SvOn2w9uAT1TVxuqdK/kQcGrXrXYFcHSS/YDXAB+n90EF8Avd/AmPAqu613sd8BADnCfR7DDQ9x1vrKqDquqFVfXvuvCesKlv+IXA+7qv8A90HwIL6YXzIcCWquq/o9sd02xvIXBHF1I765D+9XaBdC8wv6/NXX3DP6IXxgAfAAJc13WbvHMntrmpqh7vm3bHpG1uYs+Y7rlMeC9wcVV9cyeW3W4fdsNzgedX1XfonRw/EjiG3gfXnUlezJMD/d5Jr+FU9elpwkAX9L66T9gE/FEX/hOPZ1XVhcA/AfMn+mk7h06zzk3AodOcaJ3pFp930vtgASDJAcBzgS0zPpGqu6rq3VV1CPBvgT8b8Gd/dwILu6PWCYdO2uau3Jr0h8CzJkaS/PQurOMU4I1JztiJZbbbh/Seyzbg7m78CnpdZvOqd17lCnrda88BbtyFGvU0YKBrsk8D70nyqvQckOTEJM8GvkYvFH47yTOSvIle18pUrqP3AXBWt45nJpn4Wn83sCDJvGmWvRB4R5Ijk+xPr3vo2qq6fabik5ySZEE3ej+9EH58B4tMuJbe0ecHuud2LPAGYM0Ay+7ITcBLu+fyTHo/a9xZdwKvBc5I8psDLnMh8DtJDktyIL19eFHf0fYVwArgym788m786qp6bBdq1NOAga7tVNUY8G7gk/QCcZzeiT2q6hHgTd34fcBb6Z1sm2o9j9ELxCOA7wGbu/YAX6Z3YvauJPdMsezfA38A/C96HwqHA6cO+BReCVzb/YplLXBGVW2caaHuub0BOIHeCdk/A369qr414HanW++36Z1U/Ht6vyIatE9/8nq+Ry/UVyZ51wCLnAd8hl5gfxf4MfDv++ZfATybfw70q+l9k7gS7bWyfXeoJGlv5RG6JDXCQJekRhjoktSIgQI9ybIkt3aXYq+cps2vJrm5++3vZ4dbpiRpJjOeFE3vJkTfBl5H75cK64HTqurmvjaLgYuB46rq/iTPq6rv72i9Bx98cC1atGg3y5ekfcv1119/T1WNTDVvkLvrLQXGJ376lWQNcDJwc1+bdwOrq+p+gJnCHGDRokWMjY0NsHlJ0oQk012dPVCXy3y2v+R5M9tfDg3wIuBFSf4hyTVJlk1TyPIkY0nGtm7dOsCmJUmDGtZJ0bnAYnp3ZzsN+HSSgyY3qqpzq2q0qkZHRqb8xiBJ2kWDBPoWejdamrCAJ99TYzOwtrsj23fp9bkvHk6JkqRBDBLo64HF3T0h5tG7BHvtpDZfoPtvt5IcTK8LZsbLrSVJwzNjoHc381kBXAbcQu82nhuSrOr7H10uA+5NcjO9ezK/v6ru3VNFS5KebNbu5TI6Olr+ykWSdk6S66tqdKp5XikqSY0w0CWpEQa6JDVikCtFJe2DFq28dLZLaNbtZ524R9brEbokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjRgo0JMsS3JrkvEkK6eYf3qSrUlu7B7vGn6pkqQdmTtTgyRzgNXA64DNwPoka6vq5klNL6qqFXugRknSAAY5Ql8KjFfVxqp6BFgDnLxny5Ik7axBAn0+sKlvfHM3bbI3J/l6ks8lWTjVipIsTzKWZGzr1q27UK4kaTrDOin618Ciqno58HfABVM1qqpzq2q0qkZHRkaGtGlJEgwW6FuA/iPuBd20J1TVvVX1cDf6F8DPDac8SdKgBgn09cDiJIclmQecCqztb5DkBX2jJwG3DK9ESdIgZvyVS1VtS7ICuAyYA5xXVRuSrALGqmot8NtJTgK2AfcBp+/BmiVJU5gx0AGqah2wbtK0M/uGPwR8aLilSZJ2hleKSlIjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNGCjQkyxLcmuS8SQrd9DuzUkqyejwSpQkDWLGQE8yB1gNnAAsAU5LsmSKds8GzgCuHXaRkqSZDXKEvhQYr6qNVfUIsAY4eYp2fwh8DPjxEOuTJA1okECfD2zqG9/cTXtCklcAC6vq0h2tKMnyJGNJxrZu3brTxUqSprfbJ0WT7Ad8AnjfTG2r6tyqGq2q0ZGRkd3dtCSpzyCBvgVY2De+oJs24dnAy4DLk9wOvBpY64lRSXpqDRLo64HFSQ5LMg84FVg7MbOqHqyqg6tqUVUtAq4BTqqqsT1SsSRpSjMGelVtA1YAlwG3ABdX1YYkq5KctKcLlCQNZu4gjapqHbBu0rQzp2l77O6XJUnaWV4pKkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1YqBAT7Isya1JxpOsnGL+e5J8I8mNSa5OsmT4pUqSdmTGQE8yB1gNnAAsAU6bIrA/W1X/sqqOBD4OfGLolUqSdmiQI/SlwHhVbayqR4A1wMn9Darq//aNHgDU8EqUJA1i7gBt5gOb+sY3A6+a3CjJbwG/C8wDjptqRUmWA8sBDj300J2tVZK0A0M7KVpVq6vqcOCDwIenaXNuVY1W1ejIyMiwNi1JYrBA3wIs7Btf0E2bzhrgjbtTlCRp5w0S6OuBxUkOSzIPOBVY298gyeK+0ROB24ZXoiRpEDP2oVfVtiQrgMuAOcB5VbUhySpgrKrWAiuSHA88CtwPvH1PFi1JerJBTopSVeuAdZOmndk3fMaQ65Ik7SSvFJWkRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktSIubNdwK5YtPLS2S6hWbefdeIeWa+v2Z6zp14z7X0GOkJPsizJrUnGk6ycYv7vJrk5ydeTfCnJC4dfqiRpR2YM9CRzgNXACcAS4LQkSyY1+0dgtKpeDnwO+PiwC5Uk7dggR+hLgfGq2lhVjwBrgJP7G1TVV6rqR93oNcCC4ZYpSZrJIIE+H9jUN765mzad3wD+ZqoZSZYnGUsytnXr1sGrlCTNaKi/cknya8AocPZU86vq3KoararRkZGRYW5akvZ5g/zKZQuwsG98QTdtO0mOB34f+IWqeng45UmSBjXIEfp6YHGSw5LMA04F1vY3SHIUcA5wUlV9f/hlSpJmMmOgV9U2YAVwGXALcHFVbUiyKslJXbOzgQOBv0pyY5K106xOkrSHDHRhUVWtA9ZNmnZm3/DxQ65LkrSTvPRfkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWrEQIGeZFmSW5OMJ1k5xfzXJLkhybYkbxl+mZKkmcwY6EnmAKuBE4AlwGlJlkxq9j3gdOCzwy5QkjSYuQO0WQqMV9VGgCRrgJOBmycaVNXt3bzH90CNkqQBDNLlMh/Y1De+uZu205IsTzKWZGzr1q27sgpJ0jSe0pOiVXVuVY1W1ejIyMhTuWlJat4ggb4FWNg3vqCbJkl6Ghkk0NcDi5MclmQecCqwds+WJUnaWTMGelVtA1YAlwG3ABdX1YYkq5KcBJDklUk2A6cA5yTZsCeLliQ92SC/cqGq1gHrJk07s294Pb2uGEnSLPFKUUlqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQMFepJlSW5NMp5k5RTz909yUTf/2iSLhl2oJGnHZgz0JHOA1cAJwBLgtCRLJjX7DeD+qjoC+BPgY8MuVJK0Y4McoS8FxqtqY1U9AqwBTp7U5mTggm74c8Brk2R4ZUqSZjJ3gDbzgU1945uBV03Xpqq2JXkQeC5wT3+jJMuB5d3oQ0lu3ZWi90IHM2lfPF3F71awF71e4GvW2ZdesxdON2OQQB+aqjoXOPep3ObTQZKxqhqd7To0GF+vvY+vWc8gXS5bgIV94wu6aVO2STIX+Eng3mEUKEkazCCBvh5YnOSwJPOAU4G1k9qsBd7eDb8F+HJV1fDKlCTNZMYul65PfAVwGTAHOK+qNiRZBYxV1VrgvwOfSTIO3Ecv9PXP9rlupr2cr9fex9cMiAfSktQGrxSVpEYY6JLUCANd0tNWktOTfHK269hbGOiS1AgDfTclWZXkvX3jf5TkjCTvT7I+ydeT/Kdu3gFJLk1yU5JvJnnr7FUugCSLktyS5NNJNiT52yQ/keTIJNd0r98lSZ4z27W2oNvf3+wb/70kH0lyeZKPJbkuybeTHDPFsicm+VqSg5Ocn+S/Jflqko1J3tK1SZKzu7+vb0z8jSVZneSkbviSJOd1w+/s/manfB88NXtleAz03Xce8OsASfaj95PNu4DF9O6DcyTwc0leAywD7qyqn62qlwH/Z3ZK1iSLgdVV9VLgAeDNwF8CH6yqlwPfAP7jLNa3r5hbVUuB9zJpfyf5FWAl8PqqmrjE/wXA0cAvA2d1095E72/uZ4HjgbOTvAC4Cpj4kJhP70aDdNOu7Ianeh/sVQz03VRVtwP3JjkK+CXgH4FX9g3fAPwLem+WbwCv645EjqmqB2enak3y3aq6sRu+HjgcOKiqruimXQC8ZlYq27d8vvv3emBR3/TjgA8CJ1bV/X3Tv1BVj1fVzcDzu2lHAxdW1WNVdTdwBb2/x6uAY7o7xd4M3N0F/c8DX+2Wnfw+6K9hr/CU3sulYX8BnA78NL0j9tcCf1xV50xumOQVwOuBjyb5UlWteioL1ZQe7ht+DDhotgrZB2xj+wPJZ/YNT7wOj7F9Nn0H+BngRcDYFO0Bdnh316rakuQget+SrwR+CvhV4KGq+kGS5/Lk94FdLvuoS+i9UV5J74ray4B3JjkQIMn8JM9Lcgjwo6r6H8DZwCtmq2Dt0IPA/X39uP+G3pGedt/dwPOSPDfJ/vS6S2ZyB103WJKXztD2KuCtSeYkGaH3zeq6bt419Lpzruza/V73bzM8Qh+CqnokyVeAB6rqMeBvk7wE+Fp3W/iHgF8DjqDXp/c48Cjwm7NVs2b0duBTSZ4FbATeMcv1NKGqHu1uG3IdvZv6fWvA5b6V5G3AXyV5ww6aXkKvG+UmoIAPVNVd3byrgF+qqvEkd9A7Sm8q0L30fwi6k6E3AKdU1W2zXY+kfZNdLrupO8kyDnzJMJc0mzxCl6RGeIQuSY0w0CWpEQa6JDXCQJekRhjoktSI/w/TIK6YopbnBgAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
    " + ] + }, + "metadata": { + "tags": [], + "needs_background": "light" + } + }, + { + "output_type": "stream", + "text": [ + "/content/data/mini_speech_commands/unknown/b737ee80_nohash_1.wav\n" + ], + "name": "stdout" + }, + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAEICAYAAABPgw/pAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAUd0lEQVR4nO3df7RdZX3n8feHxGAFV7FytZIEQyE6RseCvUa7BixF7ApSwapUGDsVrWbsNFNsrRqnlnFSO0WZZWdNTSvYYUGdkUAd0XRIh7Yqv6pALhTUgMg1gkkoGH6O6AgEvvPH2ZeeXO7NPUlOuOTJ+7XWWdk/nr339+xz7ufs8+yzd1JVSJL2fvvNdgGSpOEw0CWpEQa6JDXCQJekRhjoktQIA12SGmGga7ckOT/JR7vhY5Lcuovr+VSSPxhudTNu81eSbEryUJKjnsptDypJJTlituvQ3mHubBegPS/J7cDzgceAHwJ/A6yoqoeGuZ2qugp48QD1nA68q6qO7lv2PcOsZUD/hd5++OIwVpbkcuAjwLEAVfWRYaz36SrJ+cDl3eixVXX6rBUjwCP0fckbqupA4BXAKPDhyQ2S7Gsf8C8ENuzKgknmDLkWabcZ6PuYqtpC7wj9ZfDEV/rfSnIbcFs37ZeT3JjkgSRfTfLyieWTHJXkhiQ/SHIR8My+eccm2dw3vjDJ55NsTXJvkk8meQnwKeDnu66OB7q2T3TddOPvTjKe5L4ka5Mc0jevkrwnyW1djauTpJt3RJIrkjyY5J6uxu0k2T/JQ8Ac4KYk3+mmvyTJ5d06NyQ5qW+Z85P8eZJ1SX4I/OIg+zvJ6UmunjTtiW6Ubr2rk1za7dNrkxw+zbqO7rqIjh1gP+yX5MNJ7kjy/SR/meQnu3kXJHlfNzx/4j3QjR/e7fP9Jl7PJO/r1vFPSd4xyPPWLKkqH40/gNuB47vhhfSOSv+wGy/g74CfAn4COAr4PvAqeoH39m75/YF5wB3A7wDPAN4CPAp8tFvXscDmbngOcBPwJ8AB9IL/6G7e6cDVk2o8v289xwH30Ps2sT/wp8CVfW0L+N/AQcChwFZgWTfvQuD36R2sPLHNafZLAUd0w88AxoH/0D3P44AfAC/uq+9B4F9NrHvAfT/Vc+3f7vnAvcBSel2g/xNYM7ktsAzYBCwdcD+8s3s+PwMcCHwe+EzfvL/uhv818B3gor55X+x7PbcBq7r983rgR8BzZvs97WPqh0fo+44vdEfDVwNXAP+5b94fV9V9VfX/gOXAOVV1bVU9VlUXAA8Dr+4ezwD+a1U9WlWfA9ZPs72lwCHA+6vqh1X146q6epq2k70NOK+qbqiqh4EP0TuiX9TX5qyqeqCqvgd8BTiym/4ova6UQ3Zym6+mF3xnVdUjVfVlemF5Wl+bL1bVP1TV41X14wHXO4hLquq6qtpGL9CPnDT/FOAc4ISqum7SvOn2w9uAT1TVxuqdK/kQcGrXrXYFcHSS/YDXAB+n90EF8Avd/AmPAqu613sd8BADnCfR7DDQ9x1vrKqDquqFVfXvuvCesKlv+IXA+7qv8A90HwIL6YXzIcCWquq/o9sd02xvIXBHF1I765D+9XaBdC8wv6/NXX3DP6IXxgAfAAJc13WbvHMntrmpqh7vm3bHpG1uYs+Y7rlMeC9wcVV9cyeW3W4fdsNzgedX1XfonRw/EjiG3gfXnUlezJMD/d5Jr+FU9elpwkAX9L66T9gE/FEX/hOPZ1XVhcA/AfMn+mk7h06zzk3AodOcaJ3pFp930vtgASDJAcBzgS0zPpGqu6rq3VV1CPBvgT8b8Gd/dwILu6PWCYdO2uau3Jr0h8CzJkaS/PQurOMU4I1JztiJZbbbh/Seyzbg7m78CnpdZvOqd17lCnrda88BbtyFGvU0YKBrsk8D70nyqvQckOTEJM8GvkYvFH47yTOSvIle18pUrqP3AXBWt45nJpn4Wn83sCDJvGmWvRB4R5Ijk+xPr3vo2qq6fabik5ySZEE3ej+9EH58B4tMuJbe0ecHuud2LPAGYM0Ay+7ITcBLu+fyTHo/a9xZdwKvBc5I8psDLnMh8DtJDktyIL19eFHf0fYVwArgym788m786qp6bBdq1NOAga7tVNUY8G7gk/QCcZzeiT2q6hHgTd34fcBb6Z1sm2o9j9ELxCOA7wGbu/YAX6Z3YvauJPdMsezfA38A/C96HwqHA6cO+BReCVzb/YplLXBGVW2caaHuub0BOIHeCdk/A369qr414HanW++36Z1U/Ht6vyIatE9/8nq+Ry/UVyZ51wCLnAd8hl5gfxf4MfDv++ZfATybfw70q+l9k7gS7bWyfXeoJGlv5RG6JDXCQJekRhjoktSIgQI9ybIkt3aXYq+cps2vJrm5++3vZ4dbpiRpJjOeFE3vJkTfBl5H75cK64HTqurmvjaLgYuB46rq/iTPq6rv72i9Bx98cC1atGg3y5ekfcv1119/T1WNTDVvkLvrLQXGJ376lWQNcDJwc1+bdwOrq+p+gJnCHGDRokWMjY0NsHlJ0oQk012dPVCXy3y2v+R5M9tfDg3wIuBFSf4hyTVJlk1TyPIkY0nGtm7dOsCmJUmDGtZJ0bnAYnp3ZzsN+HSSgyY3qqpzq2q0qkZHRqb8xiBJ2kWDBPoWejdamrCAJ99TYzOwtrsj23fp9bkvHk6JkqRBDBLo64HF3T0h5tG7BHvtpDZfoPtvt5IcTK8LZsbLrSVJwzNjoHc381kBXAbcQu82nhuSrOr7H10uA+5NcjO9ezK/v6ru3VNFS5KebNbu5TI6Olr+ykWSdk6S66tqdKp5XikqSY0w0CWpEQa6JDVikCtFJe2DFq28dLZLaNbtZ524R9brEbokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjRgo0JMsS3JrkvEkK6eYf3qSrUlu7B7vGn6pkqQdmTtTgyRzgNXA64DNwPoka6vq5klNL6qqFXugRknSAAY5Ql8KjFfVxqp6BFgDnLxny5Ik7axBAn0+sKlvfHM3bbI3J/l6ks8lWTjVipIsTzKWZGzr1q27UK4kaTrDOin618Ciqno58HfABVM1qqpzq2q0qkZHRkaGtGlJEgwW6FuA/iPuBd20J1TVvVX1cDf6F8DPDac8SdKgBgn09cDiJIclmQecCqztb5DkBX2jJwG3DK9ESdIgZvyVS1VtS7ICuAyYA5xXVRuSrALGqmot8NtJTgK2AfcBp+/BmiVJU5gx0AGqah2wbtK0M/uGPwR8aLilSZJ2hleKSlIjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNGCjQkyxLcmuS8SQrd9DuzUkqyejwSpQkDWLGQE8yB1gNnAAsAU5LsmSKds8GzgCuHXaRkqSZDXKEvhQYr6qNVfUIsAY4eYp2fwh8DPjxEOuTJA1okECfD2zqG9/cTXtCklcAC6vq0h2tKMnyJGNJxrZu3brTxUqSprfbJ0WT7Ad8AnjfTG2r6tyqGq2q0ZGRkd3dtCSpzyCBvgVY2De+oJs24dnAy4DLk9wOvBpY64lRSXpqDRLo64HFSQ5LMg84FVg7MbOqHqyqg6tqUVUtAq4BTqqqsT1SsSRpSjMGelVtA1YAlwG3ABdX1YYkq5KctKcLlCQNZu4gjapqHbBu0rQzp2l77O6XJUnaWV4pKkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1YqBAT7Isya1JxpOsnGL+e5J8I8mNSa5OsmT4pUqSdmTGQE8yB1gNnAAsAU6bIrA/W1X/sqqOBD4OfGLolUqSdmiQI/SlwHhVbayqR4A1wMn9Darq//aNHgDU8EqUJA1i7gBt5gOb+sY3A6+a3CjJbwG/C8wDjptqRUmWA8sBDj300J2tVZK0A0M7KVpVq6vqcOCDwIenaXNuVY1W1ejIyMiwNi1JYrBA3wIs7Btf0E2bzhrgjbtTlCRp5w0S6OuBxUkOSzIPOBVY298gyeK+0ROB24ZXoiRpEDP2oVfVtiQrgMuAOcB5VbUhySpgrKrWAiuSHA88CtwPvH1PFi1JerJBTopSVeuAdZOmndk3fMaQ65Ik7SSvFJWkRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktSIubNdwK5YtPLS2S6hWbefdeIeWa+v2Z6zp14z7X0GOkJPsizJrUnGk6ycYv7vJrk5ydeTfCnJC4dfqiRpR2YM9CRzgNXACcAS4LQkSyY1+0dgtKpeDnwO+PiwC5Uk7dggR+hLgfGq2lhVjwBrgJP7G1TVV6rqR93oNcCC4ZYpSZrJIIE+H9jUN765mzad3wD+ZqoZSZYnGUsytnXr1sGrlCTNaKi/cknya8AocPZU86vq3KoararRkZGRYW5akvZ5g/zKZQuwsG98QTdtO0mOB34f+IWqeng45UmSBjXIEfp6YHGSw5LMA04F1vY3SHIUcA5wUlV9f/hlSpJmMmOgV9U2YAVwGXALcHFVbUiyKslJXbOzgQOBv0pyY5K106xOkrSHDHRhUVWtA9ZNmnZm3/DxQ65LkrSTvPRfkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWrEQIGeZFmSW5OMJ1k5xfzXJLkhybYkbxl+mZKkmcwY6EnmAKuBE4AlwGlJlkxq9j3gdOCzwy5QkjSYuQO0WQqMV9VGgCRrgJOBmycaVNXt3bzH90CNkqQBDNLlMh/Y1De+uZu205IsTzKWZGzr1q27sgpJ0jSe0pOiVXVuVY1W1ejIyMhTuWlJat4ggb4FWNg3vqCbJkl6Ghkk0NcDi5MclmQecCqwds+WJUnaWTMGelVtA1YAlwG3ABdX1YYkq5KcBJDklUk2A6cA5yTZsCeLliQ92SC/cqGq1gHrJk07s294Pb2uGEnSLPFKUUlqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQMFepJlSW5NMp5k5RTz909yUTf/2iSLhl2oJGnHZgz0JHOA1cAJwBLgtCRLJjX7DeD+qjoC+BPgY8MuVJK0Y4McoS8FxqtqY1U9AqwBTp7U5mTggm74c8Brk2R4ZUqSZjJ3gDbzgU1945uBV03Xpqq2JXkQeC5wT3+jJMuB5d3oQ0lu3ZWi90IHM2lfPF3F71awF71e4GvW2ZdesxdON2OQQB+aqjoXOPep3ObTQZKxqhqd7To0GF+vvY+vWc8gXS5bgIV94wu6aVO2STIX+Eng3mEUKEkazCCBvh5YnOSwJPOAU4G1k9qsBd7eDb8F+HJV1fDKlCTNZMYul65PfAVwGTAHOK+qNiRZBYxV1VrgvwOfSTIO3Ecv9PXP9rlupr2cr9fex9cMiAfSktQGrxSVpEYY6JLUCANd0tNWktOTfHK269hbGOiS1AgDfTclWZXkvX3jf5TkjCTvT7I+ydeT/Kdu3gFJLk1yU5JvJnnr7FUugCSLktyS5NNJNiT52yQ/keTIJNd0r98lSZ4z27W2oNvf3+wb/70kH0lyeZKPJbkuybeTHDPFsicm+VqSg5Ocn+S/Jflqko1J3tK1SZKzu7+vb0z8jSVZneSkbviSJOd1w+/s/manfB88NXtleAz03Xce8OsASfaj95PNu4DF9O6DcyTwc0leAywD7qyqn62qlwH/Z3ZK1iSLgdVV9VLgAeDNwF8CH6yqlwPfAP7jLNa3r5hbVUuB9zJpfyf5FWAl8PqqmrjE/wXA0cAvA2d1095E72/uZ4HjgbOTvAC4Cpj4kJhP70aDdNOu7Ianeh/sVQz03VRVtwP3JjkK+CXgH4FX9g3fAPwLem+WbwCv645EjqmqB2enak3y3aq6sRu+HjgcOKiqruimXQC8ZlYq27d8vvv3emBR3/TjgA8CJ1bV/X3Tv1BVj1fVzcDzu2lHAxdW1WNVdTdwBb2/x6uAY7o7xd4M3N0F/c8DX+2Wnfw+6K9hr/CU3sulYX8BnA78NL0j9tcCf1xV50xumOQVwOuBjyb5UlWteioL1ZQe7ht+DDhotgrZB2xj+wPJZ/YNT7wOj7F9Nn0H+BngRcDYFO0Bdnh316rakuQget+SrwR+CvhV4KGq+kGS5/Lk94FdLvuoS+i9UV5J74ray4B3JjkQIMn8JM9Lcgjwo6r6H8DZwCtmq2Dt0IPA/X39uP+G3pGedt/dwPOSPDfJ/vS6S2ZyB103WJKXztD2KuCtSeYkGaH3zeq6bt419Lpzruza/V73bzM8Qh+CqnokyVeAB6rqMeBvk7wE+Fp3W/iHgF8DjqDXp/c48Cjwm7NVs2b0duBTSZ4FbATeMcv1NKGqHu1uG3IdvZv6fWvA5b6V5G3AXyV5ww6aXkKvG+UmoIAPVNVd3byrgF+qqvEkd9A7Sm8q0L30fwi6k6E3AKdU1W2zXY+kfZNdLrupO8kyDnzJMJc0mzxCl6RGeIQuSY0w0CWpEQa6JDXCQJekRhjoktSI/w/TIK6YopbnBgAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
    " + ] + }, + "metadata": { + "tags": [], + "needs_background": "light" + } + }, + { + "output_type": "stream", + "text": [ + "/content/data/mini_speech_commands/unknown/feb1d305_nohash_2.wav\n" + ], + "name": "stdout" + }, + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAEICAYAAABPgw/pAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAUd0lEQVR4nO3df7RdZX3n8feHxGAFV7FytZIEQyE6RseCvUa7BixF7ApSwapUGDsVrWbsNFNsrRqnlnFSO0WZZWdNTSvYYUGdkUAd0XRIh7Yqv6pALhTUgMg1gkkoGH6O6AgEvvPH2ZeeXO7NPUlOuOTJ+7XWWdk/nr339+xz7ufs8+yzd1JVSJL2fvvNdgGSpOEw0CWpEQa6JDXCQJekRhjoktQIA12SGmGga7ckOT/JR7vhY5Lcuovr+VSSPxhudTNu81eSbEryUJKjnsptDypJJTlituvQ3mHubBegPS/J7cDzgceAHwJ/A6yoqoeGuZ2qugp48QD1nA68q6qO7lv2PcOsZUD/hd5++OIwVpbkcuAjwLEAVfWRYaz36SrJ+cDl3eixVXX6rBUjwCP0fckbqupA4BXAKPDhyQ2S7Gsf8C8ENuzKgknmDLkWabcZ6PuYqtpC7wj9ZfDEV/rfSnIbcFs37ZeT3JjkgSRfTfLyieWTHJXkhiQ/SHIR8My+eccm2dw3vjDJ55NsTXJvkk8meQnwKeDnu66OB7q2T3TddOPvTjKe5L4ka5Mc0jevkrwnyW1djauTpJt3RJIrkjyY5J6uxu0k2T/JQ8Ac4KYk3+mmvyTJ5d06NyQ5qW+Z85P8eZJ1SX4I/OIg+zvJ6UmunjTtiW6Ubr2rk1za7dNrkxw+zbqO7rqIjh1gP+yX5MNJ7kjy/SR/meQnu3kXJHlfNzx/4j3QjR/e7fP9Jl7PJO/r1vFPSd4xyPPWLKkqH40/gNuB47vhhfSOSv+wGy/g74CfAn4COAr4PvAqeoH39m75/YF5wB3A7wDPAN4CPAp8tFvXscDmbngOcBPwJ8AB9IL/6G7e6cDVk2o8v289xwH30Ps2sT/wp8CVfW0L+N/AQcChwFZgWTfvQuD36R2sPLHNafZLAUd0w88AxoH/0D3P44AfAC/uq+9B4F9NrHvAfT/Vc+3f7vnAvcBSel2g/xNYM7ktsAzYBCwdcD+8s3s+PwMcCHwe+EzfvL/uhv818B3gor55X+x7PbcBq7r983rgR8BzZvs97WPqh0fo+44vdEfDVwNXAP+5b94fV9V9VfX/gOXAOVV1bVU9VlUXAA8Dr+4ezwD+a1U9WlWfA9ZPs72lwCHA+6vqh1X146q6epq2k70NOK+qbqiqh4EP0TuiX9TX5qyqeqCqvgd8BTiym/4ova6UQ3Zym6+mF3xnVdUjVfVlemF5Wl+bL1bVP1TV41X14wHXO4hLquq6qtpGL9CPnDT/FOAc4ISqum7SvOn2w9uAT1TVxuqdK/kQcGrXrXYFcHSS/YDXAB+n90EF8Avd/AmPAqu613sd8BADnCfR7DDQ9x1vrKqDquqFVfXvuvCesKlv+IXA+7qv8A90HwIL6YXzIcCWquq/o9sd02xvIXBHF1I765D+9XaBdC8wv6/NXX3DP6IXxgAfAAJc13WbvHMntrmpqh7vm3bHpG1uYs+Y7rlMeC9wcVV9cyeW3W4fdsNzgedX1XfonRw/EjiG3gfXnUlezJMD/d5Jr+FU9elpwkAX9L66T9gE/FEX/hOPZ1XVhcA/AfMn+mk7h06zzk3AodOcaJ3pFp930vtgASDJAcBzgS0zPpGqu6rq3VV1CPBvgT8b8Gd/dwILu6PWCYdO2uau3Jr0h8CzJkaS/PQurOMU4I1JztiJZbbbh/Seyzbg7m78CnpdZvOqd17lCnrda88BbtyFGvU0YKBrsk8D70nyqvQckOTEJM8GvkYvFH47yTOSvIle18pUrqP3AXBWt45nJpn4Wn83sCDJvGmWvRB4R5Ijk+xPr3vo2qq6fabik5ySZEE3ej+9EH58B4tMuJbe0ecHuud2LPAGYM0Ay+7ITcBLu+fyTHo/a9xZdwKvBc5I8psDLnMh8DtJDktyIL19eFHf0fYVwArgym788m786qp6bBdq1NOAga7tVNUY8G7gk/QCcZzeiT2q6hHgTd34fcBb6Z1sm2o9j9ELxCOA7wGbu/YAX6Z3YvauJPdMsezfA38A/C96HwqHA6cO+BReCVzb/YplLXBGVW2caaHuub0BOIHeCdk/A369qr414HanW++36Z1U/Ht6vyIatE9/8nq+Ry/UVyZ51wCLnAd8hl5gfxf4MfDv++ZfATybfw70q+l9k7gS7bWyfXeoJGlv5RG6JDXCQJekRhjoktSIgQI9ybIkt3aXYq+cps2vJrm5++3vZ4dbpiRpJjOeFE3vJkTfBl5H75cK64HTqurmvjaLgYuB46rq/iTPq6rv72i9Bx98cC1atGg3y5ekfcv1119/T1WNTDVvkLvrLQXGJ376lWQNcDJwc1+bdwOrq+p+gJnCHGDRokWMjY0NsHlJ0oQk012dPVCXy3y2v+R5M9tfDg3wIuBFSf4hyTVJlk1TyPIkY0nGtm7dOsCmJUmDGtZJ0bnAYnp3ZzsN+HSSgyY3qqpzq2q0qkZHRqb8xiBJ2kWDBPoWejdamrCAJ99TYzOwtrsj23fp9bkvHk6JkqRBDBLo64HF3T0h5tG7BHvtpDZfoPtvt5IcTK8LZsbLrSVJwzNjoHc381kBXAbcQu82nhuSrOr7H10uA+5NcjO9ezK/v6ru3VNFS5KebNbu5TI6Olr+ykWSdk6S66tqdKp5XikqSY0w0CWpEQa6JDVikCtFJe2DFq28dLZLaNbtZ524R9brEbokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjRgo0JMsS3JrkvEkK6eYf3qSrUlu7B7vGn6pkqQdmTtTgyRzgNXA64DNwPoka6vq5klNL6qqFXugRknSAAY5Ql8KjFfVxqp6BFgDnLxny5Ik7axBAn0+sKlvfHM3bbI3J/l6ks8lWTjVipIsTzKWZGzr1q27UK4kaTrDOin618Ciqno58HfABVM1qqpzq2q0qkZHRkaGtGlJEgwW6FuA/iPuBd20J1TVvVX1cDf6F8DPDac8SdKgBgn09cDiJIclmQecCqztb5DkBX2jJwG3DK9ESdIgZvyVS1VtS7ICuAyYA5xXVRuSrALGqmot8NtJTgK2AfcBp+/BmiVJU5gx0AGqah2wbtK0M/uGPwR8aLilSZJ2hleKSlIjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNGCjQkyxLcmuS8SQrd9DuzUkqyejwSpQkDWLGQE8yB1gNnAAsAU5LsmSKds8GzgCuHXaRkqSZDXKEvhQYr6qNVfUIsAY4eYp2fwh8DPjxEOuTJA1okECfD2zqG9/cTXtCklcAC6vq0h2tKMnyJGNJxrZu3brTxUqSprfbJ0WT7Ad8AnjfTG2r6tyqGq2q0ZGRkd3dtCSpzyCBvgVY2De+oJs24dnAy4DLk9wOvBpY64lRSXpqDRLo64HFSQ5LMg84FVg7MbOqHqyqg6tqUVUtAq4BTqqqsT1SsSRpSjMGelVtA1YAlwG3ABdX1YYkq5KctKcLlCQNZu4gjapqHbBu0rQzp2l77O6XJUnaWV4pKkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1YqBAT7Isya1JxpOsnGL+e5J8I8mNSa5OsmT4pUqSdmTGQE8yB1gNnAAsAU6bIrA/W1X/sqqOBD4OfGLolUqSdmiQI/SlwHhVbayqR4A1wMn9Darq//aNHgDU8EqUJA1i7gBt5gOb+sY3A6+a3CjJbwG/C8wDjptqRUmWA8sBDj300J2tVZK0A0M7KVpVq6vqcOCDwIenaXNuVY1W1ejIyMiwNi1JYrBA3wIs7Btf0E2bzhrgjbtTlCRp5w0S6OuBxUkOSzIPOBVY298gyeK+0ROB24ZXoiRpEDP2oVfVtiQrgMuAOcB5VbUhySpgrKrWAiuSHA88CtwPvH1PFi1JerJBTopSVeuAdZOmndk3fMaQ65Ik7SSvFJWkRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktSIubNdwK5YtPLS2S6hWbefdeIeWa+v2Z6zp14z7X0GOkJPsizJrUnGk6ycYv7vJrk5ydeTfCnJC4dfqiRpR2YM9CRzgNXACcAS4LQkSyY1+0dgtKpeDnwO+PiwC5Uk7dggR+hLgfGq2lhVjwBrgJP7G1TVV6rqR93oNcCC4ZYpSZrJIIE+H9jUN765mzad3wD+ZqoZSZYnGUsytnXr1sGrlCTNaKi/cknya8AocPZU86vq3KoararRkZGRYW5akvZ5g/zKZQuwsG98QTdtO0mOB34f+IWqeng45UmSBjXIEfp6YHGSw5LMA04F1vY3SHIUcA5wUlV9f/hlSpJmMmOgV9U2YAVwGXALcHFVbUiyKslJXbOzgQOBv0pyY5K106xOkrSHDHRhUVWtA9ZNmnZm3/DxQ65LkrSTvPRfkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWrEQIGeZFmSW5OMJ1k5xfzXJLkhybYkbxl+mZKkmcwY6EnmAKuBE4AlwGlJlkxq9j3gdOCzwy5QkjSYuQO0WQqMV9VGgCRrgJOBmycaVNXt3bzH90CNkqQBDNLlMh/Y1De+uZu205IsTzKWZGzr1q27sgpJ0jSe0pOiVXVuVY1W1ejIyMhTuWlJat4ggb4FWNg3vqCbJkl6Ghkk0NcDi5MclmQecCqwds+WJUnaWTMGelVtA1YAlwG3ABdX1YYkq5KcBJDklUk2A6cA5yTZsCeLliQ92SC/cqGq1gHrJk07s294Pb2uGEnSLPFKUUlqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQMFepJlSW5NMp5k5RTz909yUTf/2iSLhl2oJGnHZgz0JHOA1cAJwBLgtCRLJjX7DeD+qjoC+BPgY8MuVJK0Y4McoS8FxqtqY1U9AqwBTp7U5mTggm74c8Brk2R4ZUqSZjJ3gDbzgU1945uBV03Xpqq2JXkQeC5wT3+jJMuB5d3oQ0lu3ZWi90IHM2lfPF3F71awF71e4GvW2ZdesxdON2OQQB+aqjoXOPep3ObTQZKxqhqd7To0GF+vvY+vWc8gXS5bgIV94wu6aVO2STIX+Eng3mEUKEkazCCBvh5YnOSwJPOAU4G1k9qsBd7eDb8F+HJV1fDKlCTNZMYul65PfAVwGTAHOK+qNiRZBYxV1VrgvwOfSTIO3Ecv9PXP9rlupr2cr9fex9cMiAfSktQGrxSVpEYY6JLUCANd0tNWktOTfHK269hbGOiS1AgDfTclWZXkvX3jf5TkjCTvT7I+ydeT/Kdu3gFJLk1yU5JvJnnr7FUugCSLktyS5NNJNiT52yQ/keTIJNd0r98lSZ4z27W2oNvf3+wb/70kH0lyeZKPJbkuybeTHDPFsicm+VqSg5Ocn+S/Jflqko1J3tK1SZKzu7+vb0z8jSVZneSkbviSJOd1w+/s/manfB88NXtleAz03Xce8OsASfaj95PNu4DF9O6DcyTwc0leAywD7qyqn62qlwH/Z3ZK1iSLgdVV9VLgAeDNwF8CH6yqlwPfAP7jLNa3r5hbVUuB9zJpfyf5FWAl8PqqmrjE/wXA0cAvA2d1095E72/uZ4HjgbOTvAC4Cpj4kJhP70aDdNOu7Ianeh/sVQz03VRVtwP3JjkK+CXgH4FX9g3fAPwLem+WbwCv645EjqmqB2enak3y3aq6sRu+HjgcOKiqruimXQC8ZlYq27d8vvv3emBR3/TjgA8CJ1bV/X3Tv1BVj1fVzcDzu2lHAxdW1WNVdTdwBb2/x6uAY7o7xd4M3N0F/c8DX+2Wnfw+6K9hr/CU3sulYX8BnA78NL0j9tcCf1xV50xumOQVwOuBjyb5UlWteioL1ZQe7ht+DDhotgrZB2xj+wPJZ/YNT7wOj7F9Nn0H+BngRcDYFO0Bdnh316rakuQget+SrwR+CvhV4KGq+kGS5/Lk94FdLvuoS+i9UV5J74ray4B3JjkQIMn8JM9Lcgjwo6r6H8DZwCtmq2Dt0IPA/X39uP+G3pGedt/dwPOSPDfJ/vS6S2ZyB103WJKXztD2KuCtSeYkGaH3zeq6bt419Lpzruza/V73bzM8Qh+CqnokyVeAB6rqMeBvk7wE+Fp3W/iHgF8DjqDXp/c48Cjwm7NVs2b0duBTSZ4FbATeMcv1NKGqHu1uG3IdvZv6fWvA5b6V5G3AXyV5ww6aXkKvG+UmoIAPVNVd3byrgF+qqvEkd9A7Sm8q0L30fwi6k6E3AKdU1W2zXY+kfZNdLrupO8kyDnzJMJc0mzxCl6RGeIQuSY0w0CWpEQa6JDXCQJekRhjoktSI/w/TIK6YopbnBgAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
    " + ] + }, + "metadata": { + "tags": [], + "needs_background": "light" + } + }, + { + "output_type": "stream", + "text": [ + "/content/data/mini_speech_commands/unknown/07ad9b59_nohash_2.wav\n" + ], + "name": "stdout" + }, + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAEICAYAAABPgw/pAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAUd0lEQVR4nO3df7RdZX3n8feHxGAFV7FytZIEQyE6RseCvUa7BixF7ApSwapUGDsVrWbsNFNsrRqnlnFSO0WZZWdNTSvYYUGdkUAd0XRIh7Yqv6pALhTUgMg1gkkoGH6O6AgEvvPH2ZeeXO7NPUlOuOTJ+7XWWdk/nr339+xz7ufs8+yzd1JVSJL2fvvNdgGSpOEw0CWpEQa6JDXCQJekRhjoktQIA12SGmGga7ckOT/JR7vhY5Lcuovr+VSSPxhudTNu81eSbEryUJKjnsptDypJJTlituvQ3mHubBegPS/J7cDzgceAHwJ/A6yoqoeGuZ2qugp48QD1nA68q6qO7lv2PcOsZUD/hd5++OIwVpbkcuAjwLEAVfWRYaz36SrJ+cDl3eixVXX6rBUjwCP0fckbqupA4BXAKPDhyQ2S7Gsf8C8ENuzKgknmDLkWabcZ6PuYqtpC7wj9ZfDEV/rfSnIbcFs37ZeT3JjkgSRfTfLyieWTHJXkhiQ/SHIR8My+eccm2dw3vjDJ55NsTXJvkk8meQnwKeDnu66OB7q2T3TddOPvTjKe5L4ka5Mc0jevkrwnyW1djauTpJt3RJIrkjyY5J6uxu0k2T/JQ8Ac4KYk3+mmvyTJ5d06NyQ5qW+Z85P8eZJ1SX4I/OIg+zvJ6UmunjTtiW6Ubr2rk1za7dNrkxw+zbqO7rqIjh1gP+yX5MNJ7kjy/SR/meQnu3kXJHlfNzx/4j3QjR/e7fP9Jl7PJO/r1vFPSd4xyPPWLKkqH40/gNuB47vhhfSOSv+wGy/g74CfAn4COAr4PvAqeoH39m75/YF5wB3A7wDPAN4CPAp8tFvXscDmbngOcBPwJ8AB9IL/6G7e6cDVk2o8v289xwH30Ps2sT/wp8CVfW0L+N/AQcChwFZgWTfvQuD36R2sPLHNafZLAUd0w88AxoH/0D3P44AfAC/uq+9B4F9NrHvAfT/Vc+3f7vnAvcBSel2g/xNYM7ktsAzYBCwdcD+8s3s+PwMcCHwe+EzfvL/uhv818B3gor55X+x7PbcBq7r983rgR8BzZvs97WPqh0fo+44vdEfDVwNXAP+5b94fV9V9VfX/gOXAOVV1bVU9VlUXAA8Dr+4ezwD+a1U9WlWfA9ZPs72lwCHA+6vqh1X146q6epq2k70NOK+qbqiqh4EP0TuiX9TX5qyqeqCqvgd8BTiym/4ova6UQ3Zym6+mF3xnVdUjVfVlemF5Wl+bL1bVP1TV41X14wHXO4hLquq6qtpGL9CPnDT/FOAc4ISqum7SvOn2w9uAT1TVxuqdK/kQcGrXrXYFcHSS/YDXAB+n90EF8Avd/AmPAqu613sd8BADnCfR7DDQ9x1vrKqDquqFVfXvuvCesKlv+IXA+7qv8A90HwIL6YXzIcCWquq/o9sd02xvIXBHF1I765D+9XaBdC8wv6/NXX3DP6IXxgAfAAJc13WbvHMntrmpqh7vm3bHpG1uYs+Y7rlMeC9wcVV9cyeW3W4fdsNzgedX1XfonRw/EjiG3gfXnUlezJMD/d5Jr+FU9elpwkAX9L66T9gE/FEX/hOPZ1XVhcA/AfMn+mk7h06zzk3AodOcaJ3pFp930vtgASDJAcBzgS0zPpGqu6rq3VV1CPBvgT8b8Gd/dwILu6PWCYdO2uau3Jr0h8CzJkaS/PQurOMU4I1JztiJZbbbh/Seyzbg7m78CnpdZvOqd17lCnrda88BbtyFGvU0YKBrsk8D70nyqvQckOTEJM8GvkYvFH47yTOSvIle18pUrqP3AXBWt45nJpn4Wn83sCDJvGmWvRB4R5Ijk+xPr3vo2qq6fabik5ySZEE3ej+9EH58B4tMuJbe0ecHuud2LPAGYM0Ay+7ITcBLu+fyTHo/a9xZdwKvBc5I8psDLnMh8DtJDktyIL19eFHf0fYVwArgym788m786qp6bBdq1NOAga7tVNUY8G7gk/QCcZzeiT2q6hHgTd34fcBb6Z1sm2o9j9ELxCOA7wGbu/YAX6Z3YvauJPdMsezfA38A/C96HwqHA6cO+BReCVzb/YplLXBGVW2caaHuub0BOIHeCdk/A369qr414HanW++36Z1U/Ht6vyIatE9/8nq+Ry/UVyZ51wCLnAd8hl5gfxf4MfDv++ZfATybfw70q+l9k7gS7bWyfXeoJGlv5RG6JDXCQJekRhjoktSIgQI9ybIkt3aXYq+cps2vJrm5++3vZ4dbpiRpJjOeFE3vJkTfBl5H75cK64HTqurmvjaLgYuB46rq/iTPq6rv72i9Bx98cC1atGg3y5ekfcv1119/T1WNTDVvkLvrLQXGJ376lWQNcDJwc1+bdwOrq+p+gJnCHGDRokWMjY0NsHlJ0oQk012dPVCXy3y2v+R5M9tfDg3wIuBFSf4hyTVJlk1TyPIkY0nGtm7dOsCmJUmDGtZJ0bnAYnp3ZzsN+HSSgyY3qqpzq2q0qkZHRqb8xiBJ2kWDBPoWejdamrCAJ99TYzOwtrsj23fp9bkvHk6JkqRBDBLo64HF3T0h5tG7BHvtpDZfoPtvt5IcTK8LZsbLrSVJwzNjoHc381kBXAbcQu82nhuSrOr7H10uA+5NcjO9ezK/v6ru3VNFS5KebNbu5TI6Olr+ykWSdk6S66tqdKp5XikqSY0w0CWpEQa6JDVikCtFJe2DFq28dLZLaNbtZ524R9brEbokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjRgo0JMsS3JrkvEkK6eYf3qSrUlu7B7vGn6pkqQdmTtTgyRzgNXA64DNwPoka6vq5klNL6qqFXugRknSAAY5Ql8KjFfVxqp6BFgDnLxny5Ik7axBAn0+sKlvfHM3bbI3J/l6ks8lWTjVipIsTzKWZGzr1q27UK4kaTrDOin618Ciqno58HfABVM1qqpzq2q0qkZHRkaGtGlJEgwW6FuA/iPuBd20J1TVvVX1cDf6F8DPDac8SdKgBgn09cDiJIclmQecCqztb5DkBX2jJwG3DK9ESdIgZvyVS1VtS7ICuAyYA5xXVRuSrALGqmot8NtJTgK2AfcBp+/BmiVJU5gx0AGqah2wbtK0M/uGPwR8aLilSZJ2hleKSlIjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNGCjQkyxLcmuS8SQrd9DuzUkqyejwSpQkDWLGQE8yB1gNnAAsAU5LsmSKds8GzgCuHXaRkqSZDXKEvhQYr6qNVfUIsAY4eYp2fwh8DPjxEOuTJA1okECfD2zqG9/cTXtCklcAC6vq0h2tKMnyJGNJxrZu3brTxUqSprfbJ0WT7Ad8AnjfTG2r6tyqGq2q0ZGRkd3dtCSpzyCBvgVY2De+oJs24dnAy4DLk9wOvBpY64lRSXpqDRLo64HFSQ5LMg84FVg7MbOqHqyqg6tqUVUtAq4BTqqqsT1SsSRpSjMGelVtA1YAlwG3ABdX1YYkq5KctKcLlCQNZu4gjapqHbBu0rQzp2l77O6XJUnaWV4pKkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1YqBAT7Isya1JxpOsnGL+e5J8I8mNSa5OsmT4pUqSdmTGQE8yB1gNnAAsAU6bIrA/W1X/sqqOBD4OfGLolUqSdmiQI/SlwHhVbayqR4A1wMn9Darq//aNHgDU8EqUJA1i7gBt5gOb+sY3A6+a3CjJbwG/C8wDjptqRUmWA8sBDj300J2tVZK0A0M7KVpVq6vqcOCDwIenaXNuVY1W1ejIyMiwNi1JYrBA3wIs7Btf0E2bzhrgjbtTlCRp5w0S6OuBxUkOSzIPOBVY298gyeK+0ROB24ZXoiRpEDP2oVfVtiQrgMuAOcB5VbUhySpgrKrWAiuSHA88CtwPvH1PFi1JerJBTopSVeuAdZOmndk3fMaQ65Ik7SSvFJWkRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktSIubNdwK5YtPLS2S6hWbefdeIeWa+v2Z6zp14z7X0GOkJPsizJrUnGk6ycYv7vJrk5ydeTfCnJC4dfqiRpR2YM9CRzgNXACcAS4LQkSyY1+0dgtKpeDnwO+PiwC5Uk7dggR+hLgfGq2lhVjwBrgJP7G1TVV6rqR93oNcCC4ZYpSZrJIIE+H9jUN765mzad3wD+ZqoZSZYnGUsytnXr1sGrlCTNaKi/cknya8AocPZU86vq3KoararRkZGRYW5akvZ5g/zKZQuwsG98QTdtO0mOB34f+IWqeng45UmSBjXIEfp6YHGSw5LMA04F1vY3SHIUcA5wUlV9f/hlSpJmMmOgV9U2YAVwGXALcHFVbUiyKslJXbOzgQOBv0pyY5K106xOkrSHDHRhUVWtA9ZNmnZm3/DxQ65LkrSTvPRfkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWrEQIGeZFmSW5OMJ1k5xfzXJLkhybYkbxl+mZKkmcwY6EnmAKuBE4AlwGlJlkxq9j3gdOCzwy5QkjSYuQO0WQqMV9VGgCRrgJOBmycaVNXt3bzH90CNkqQBDNLlMh/Y1De+uZu205IsTzKWZGzr1q27sgpJ0jSe0pOiVXVuVY1W1ejIyMhTuWlJat4ggb4FWNg3vqCbJkl6Ghkk0NcDi5MclmQecCqwds+WJUnaWTMGelVtA1YAlwG3ABdX1YYkq5KcBJDklUk2A6cA5yTZsCeLliQ92SC/cqGq1gHrJk07s294Pb2uGEnSLPFKUUlqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQMFepJlSW5NMp5k5RTz909yUTf/2iSLhl2oJGnHZgz0JHOA1cAJwBLgtCRLJjX7DeD+qjoC+BPgY8MuVJK0Y4McoS8FxqtqY1U9AqwBTp7U5mTggm74c8Brk2R4ZUqSZjJ3gDbzgU1945uBV03Xpqq2JXkQeC5wT3+jJMuB5d3oQ0lu3ZWi90IHM2lfPF3F71awF71e4GvW2ZdesxdON2OQQB+aqjoXOPep3ObTQZKxqhqd7To0GF+vvY+vWc8gXS5bgIV94wu6aVO2STIX+Eng3mEUKEkazCCBvh5YnOSwJPOAU4G1k9qsBd7eDb8F+HJV1fDKlCTNZMYul65PfAVwGTAHOK+qNiRZBYxV1VrgvwOfSTIO3Ecv9PXP9rlupr2cr9fex9cMiAfSktQGrxSVpEYY6JLUCANd0tNWktOTfHK269hbGOiS1AgDfTclWZXkvX3jf5TkjCTvT7I+ydeT/Kdu3gFJLk1yU5JvJnnr7FUugCSLktyS5NNJNiT52yQ/keTIJNd0r98lSZ4z27W2oNvf3+wb/70kH0lyeZKPJbkuybeTHDPFsicm+VqSg5Ocn+S/Jflqko1J3tK1SZKzu7+vb0z8jSVZneSkbviSJOd1w+/s/manfB88NXtleAz03Xce8OsASfaj95PNu4DF9O6DcyTwc0leAywD7qyqn62qlwH/Z3ZK1iSLgdVV9VLgAeDNwF8CH6yqlwPfAP7jLNa3r5hbVUuB9zJpfyf5FWAl8PqqmrjE/wXA0cAvA2d1095E72/uZ4HjgbOTvAC4Cpj4kJhP70aDdNOu7Ianeh/sVQz03VRVtwP3JjkK+CXgH4FX9g3fAPwLem+WbwCv645EjqmqB2enak3y3aq6sRu+HjgcOKiqruimXQC8ZlYq27d8vvv3emBR3/TjgA8CJ1bV/X3Tv1BVj1fVzcDzu2lHAxdW1WNVdTdwBb2/x6uAY7o7xd4M3N0F/c8DX+2Wnfw+6K9hr/CU3sulYX8BnA78NL0j9tcCf1xV50xumOQVwOuBjyb5UlWteioL1ZQe7ht+DDhotgrZB2xj+wPJZ/YNT7wOj7F9Nn0H+BngRcDYFO0Bdnh316rakuQget+SrwR+CvhV4KGq+kGS5/Lk94FdLvuoS+i9UV5J74ray4B3JjkQIMn8JM9Lcgjwo6r6H8DZwCtmq2Dt0IPA/X39uP+G3pGedt/dwPOSPDfJ/vS6S2ZyB103WJKXztD2KuCtSeYkGaH3zeq6bt419Lpzruza/V73bzM8Qh+CqnokyVeAB6rqMeBvk7wE+Fp3W/iHgF8DjqDXp/c48Cjwm7NVs2b0duBTSZ4FbATeMcv1NKGqHu1uG3IdvZv6fWvA5b6V5G3AXyV5ww6aXkKvG+UmoIAPVNVd3byrgF+qqvEkd9A7Sm8q0L30fwi6k6E3AKdU1W2zXY+kfZNdLrupO8kyDnzJMJc0mzxCl6RGeIQuSY0w0CWpEQa6JDXCQJekRhjoktSI/w/TIK6YopbnBgAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
    " + ] + }, + "metadata": { + "tags": [], + "needs_background": "light" + } + }, + { + "output_type": "stream", + "text": [ + "/content/data/mini_speech_commands/unknown/5c39594f_nohash_4.wav\n" + ], + "name": "stdout" + }, + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAEICAYAAABPgw/pAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAUd0lEQVR4nO3df7RdZX3n8feHxGAFV7FytZIEQyE6RseCvUa7BixF7ApSwapUGDsVrWbsNFNsrRqnlnFSO0WZZWdNTSvYYUGdkUAd0XRIh7Yqv6pALhTUgMg1gkkoGH6O6AgEvvPH2ZeeXO7NPUlOuOTJ+7XWWdk/nr339+xz7ufs8+yzd1JVSJL2fvvNdgGSpOEw0CWpEQa6JDXCQJekRhjoktQIA12SGmGga7ckOT/JR7vhY5Lcuovr+VSSPxhudTNu81eSbEryUJKjnsptDypJJTlituvQ3mHubBegPS/J7cDzgceAHwJ/A6yoqoeGuZ2qugp48QD1nA68q6qO7lv2PcOsZUD/hd5++OIwVpbkcuAjwLEAVfWRYaz36SrJ+cDl3eixVXX6rBUjwCP0fckbqupA4BXAKPDhyQ2S7Gsf8C8ENuzKgknmDLkWabcZ6PuYqtpC7wj9ZfDEV/rfSnIbcFs37ZeT3JjkgSRfTfLyieWTHJXkhiQ/SHIR8My+eccm2dw3vjDJ55NsTXJvkk8meQnwKeDnu66OB7q2T3TddOPvTjKe5L4ka5Mc0jevkrwnyW1djauTpJt3RJIrkjyY5J6uxu0k2T/JQ8Ac4KYk3+mmvyTJ5d06NyQ5qW+Z85P8eZJ1SX4I/OIg+zvJ6UmunjTtiW6Ubr2rk1za7dNrkxw+zbqO7rqIjh1gP+yX5MNJ7kjy/SR/meQnu3kXJHlfNzx/4j3QjR/e7fP9Jl7PJO/r1vFPSd4xyPPWLKkqH40/gNuB47vhhfSOSv+wGy/g74CfAn4COAr4PvAqeoH39m75/YF5wB3A7wDPAN4CPAp8tFvXscDmbngOcBPwJ8AB9IL/6G7e6cDVk2o8v289xwH30Ps2sT/wp8CVfW0L+N/AQcChwFZgWTfvQuD36R2sPLHNafZLAUd0w88AxoH/0D3P44AfAC/uq+9B4F9NrHvAfT/Vc+3f7vnAvcBSel2g/xNYM7ktsAzYBCwdcD+8s3s+PwMcCHwe+EzfvL/uhv818B3gor55X+x7PbcBq7r983rgR8BzZvs97WPqh0fo+44vdEfDVwNXAP+5b94fV9V9VfX/gOXAOVV1bVU9VlUXAA8Dr+4ezwD+a1U9WlWfA9ZPs72lwCHA+6vqh1X146q6epq2k70NOK+qbqiqh4EP0TuiX9TX5qyqeqCqvgd8BTiym/4ova6UQ3Zym6+mF3xnVdUjVfVlemF5Wl+bL1bVP1TV41X14wHXO4hLquq6qtpGL9CPnDT/FOAc4ISqum7SvOn2w9uAT1TVxuqdK/kQcGrXrXYFcHSS/YDXAB+n90EF8Avd/AmPAqu613sd8BADnCfR7DDQ9x1vrKqDquqFVfXvuvCesKlv+IXA+7qv8A90HwIL6YXzIcCWquq/o9sd02xvIXBHF1I765D+9XaBdC8wv6/NXX3DP6IXxgAfAAJc13WbvHMntrmpqh7vm3bHpG1uYs+Y7rlMeC9wcVV9cyeW3W4fdsNzgedX1XfonRw/EjiG3gfXnUlezJMD/d5Jr+FU9elpwkAX9L66T9gE/FEX/hOPZ1XVhcA/AfMn+mk7h06zzk3AodOcaJ3pFp930vtgASDJAcBzgS0zPpGqu6rq3VV1CPBvgT8b8Gd/dwILu6PWCYdO2uau3Jr0h8CzJkaS/PQurOMU4I1JztiJZbbbh/Seyzbg7m78CnpdZvOqd17lCnrda88BbtyFGvU0YKBrsk8D70nyqvQckOTEJM8GvkYvFH47yTOSvIle18pUrqP3AXBWt45nJpn4Wn83sCDJvGmWvRB4R5Ijk+xPr3vo2qq6fabik5ySZEE3ej+9EH58B4tMuJbe0ecHuud2LPAGYM0Ay+7ITcBLu+fyTHo/a9xZdwKvBc5I8psDLnMh8DtJDktyIL19eFHf0fYVwArgym788m786qp6bBdq1NOAga7tVNUY8G7gk/QCcZzeiT2q6hHgTd34fcBb6Z1sm2o9j9ELxCOA7wGbu/YAX6Z3YvauJPdMsezfA38A/C96HwqHA6cO+BReCVzb/YplLXBGVW2caaHuub0BOIHeCdk/A369qr414HanW++36Z1U/Ht6vyIatE9/8nq+Ry/UVyZ51wCLnAd8hl5gfxf4MfDv++ZfATybfw70q+l9k7gS7bWyfXeoJGlv5RG6JDXCQJekRhjoktSIgQI9ybIkt3aXYq+cps2vJrm5++3vZ4dbpiRpJjOeFE3vJkTfBl5H75cK64HTqurmvjaLgYuB46rq/iTPq6rv72i9Bx98cC1atGg3y5ekfcv1119/T1WNTDVvkLvrLQXGJ376lWQNcDJwc1+bdwOrq+p+gJnCHGDRokWMjY0NsHlJ0oQk012dPVCXy3y2v+R5M9tfDg3wIuBFSf4hyTVJlk1TyPIkY0nGtm7dOsCmJUmDGtZJ0bnAYnp3ZzsN+HSSgyY3qqpzq2q0qkZHRqb8xiBJ2kWDBPoWejdamrCAJ99TYzOwtrsj23fp9bkvHk6JkqRBDBLo64HF3T0h5tG7BHvtpDZfoPtvt5IcTK8LZsbLrSVJwzNjoHc381kBXAbcQu82nhuSrOr7H10uA+5NcjO9ezK/v6ru3VNFS5KebNbu5TI6Olr+ykWSdk6S66tqdKp5XikqSY0w0CWpEQa6JDVikCtFJe2DFq28dLZLaNbtZ524R9brEbokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjRgo0JMsS3JrkvEkK6eYf3qSrUlu7B7vGn6pkqQdmTtTgyRzgNXA64DNwPoka6vq5klNL6qqFXugRknSAAY5Ql8KjFfVxqp6BFgDnLxny5Ik7axBAn0+sKlvfHM3bbI3J/l6ks8lWTjVipIsTzKWZGzr1q27UK4kaTrDOin618Ciqno58HfABVM1qqpzq2q0qkZHRkaGtGlJEgwW6FuA/iPuBd20J1TVvVX1cDf6F8DPDac8SdKgBgn09cDiJIclmQecCqztb5DkBX2jJwG3DK9ESdIgZvyVS1VtS7ICuAyYA5xXVRuSrALGqmot8NtJTgK2AfcBp+/BmiVJU5gx0AGqah2wbtK0M/uGPwR8aLilSZJ2hleKSlIjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNGCjQkyxLcmuS8SQrd9DuzUkqyejwSpQkDWLGQE8yB1gNnAAsAU5LsmSKds8GzgCuHXaRkqSZDXKEvhQYr6qNVfUIsAY4eYp2fwh8DPjxEOuTJA1okECfD2zqG9/cTXtCklcAC6vq0h2tKMnyJGNJxrZu3brTxUqSprfbJ0WT7Ad8AnjfTG2r6tyqGq2q0ZGRkd3dtCSpzyCBvgVY2De+oJs24dnAy4DLk9wOvBpY64lRSXpqDRLo64HFSQ5LMg84FVg7MbOqHqyqg6tqUVUtAq4BTqqqsT1SsSRpSjMGelVtA1YAlwG3ABdX1YYkq5KctKcLlCQNZu4gjapqHbBu0rQzp2l77O6XJUnaWV4pKkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1YqBAT7Isya1JxpOsnGL+e5J8I8mNSa5OsmT4pUqSdmTGQE8yB1gNnAAsAU6bIrA/W1X/sqqOBD4OfGLolUqSdmiQI/SlwHhVbayqR4A1wMn9Darq//aNHgDU8EqUJA1i7gBt5gOb+sY3A6+a3CjJbwG/C8wDjptqRUmWA8sBDj300J2tVZK0A0M7KVpVq6vqcOCDwIenaXNuVY1W1ejIyMiwNi1JYrBA3wIs7Btf0E2bzhrgjbtTlCRp5w0S6OuBxUkOSzIPOBVY298gyeK+0ROB24ZXoiRpEDP2oVfVtiQrgMuAOcB5VbUhySpgrKrWAiuSHA88CtwPvH1PFi1JerJBTopSVeuAdZOmndk3fMaQ65Ik7SSvFJWkRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktSIubNdwK5YtPLS2S6hWbefdeIeWa+v2Z6zp14z7X0GOkJPsizJrUnGk6ycYv7vJrk5ydeTfCnJC4dfqiRpR2YM9CRzgNXACcAS4LQkSyY1+0dgtKpeDnwO+PiwC5Uk7dggR+hLgfGq2lhVjwBrgJP7G1TVV6rqR93oNcCC4ZYpSZrJIIE+H9jUN765mzad3wD+ZqoZSZYnGUsytnXr1sGrlCTNaKi/cknya8AocPZU86vq3KoararRkZGRYW5akvZ5g/zKZQuwsG98QTdtO0mOB34f+IWqeng45UmSBjXIEfp6YHGSw5LMA04F1vY3SHIUcA5wUlV9f/hlSpJmMmOgV9U2YAVwGXALcHFVbUiyKslJXbOzgQOBv0pyY5K106xOkrSHDHRhUVWtA9ZNmnZm3/DxQ65LkrSTvPRfkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWrEQIGeZFmSW5OMJ1k5xfzXJLkhybYkbxl+mZKkmcwY6EnmAKuBE4AlwGlJlkxq9j3gdOCzwy5QkjSYuQO0WQqMV9VGgCRrgJOBmycaVNXt3bzH90CNkqQBDNLlMh/Y1De+uZu205IsTzKWZGzr1q27sgpJ0jSe0pOiVXVuVY1W1ejIyMhTuWlJat4ggb4FWNg3vqCbJkl6Ghkk0NcDi5MclmQecCqwds+WJUnaWTMGelVtA1YAlwG3ABdX1YYkq5KcBJDklUk2A6cA5yTZsCeLliQ92SC/cqGq1gHrJk07s294Pb2uGEnSLPFKUUlqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQMFepJlSW5NMp5k5RTz909yUTf/2iSLhl2oJGnHZgz0JHOA1cAJwBLgtCRLJjX7DeD+qjoC+BPgY8MuVJK0Y4McoS8FxqtqY1U9AqwBTp7U5mTggm74c8Brk2R4ZUqSZjJ3gDbzgU1945uBV03Xpqq2JXkQeC5wT3+jJMuB5d3oQ0lu3ZWi90IHM2lfPF3F71awF71e4GvW2ZdesxdON2OQQB+aqjoXOPep3ObTQZKxqhqd7To0GF+vvY+vWc8gXS5bgIV94wu6aVO2STIX+Eng3mEUKEkazCCBvh5YnOSwJPOAU4G1k9qsBd7eDb8F+HJV1fDKlCTNZMYul65PfAVwGTAHOK+qNiRZBYxV1VrgvwOfSTIO3Ecv9PXP9rlupr2cr9fex9cMiAfSktQGrxSVpEYY6JLUCANd0tNWktOTfHK269hbGOiS1AgDfTclWZXkvX3jf5TkjCTvT7I+ydeT/Kdu3gFJLk1yU5JvJnnr7FUugCSLktyS5NNJNiT52yQ/keTIJNd0r98lSZ4z27W2oNvf3+wb/70kH0lyeZKPJbkuybeTHDPFsicm+VqSg5Ocn+S/Jflqko1J3tK1SZKzu7+vb0z8jSVZneSkbviSJOd1w+/s/manfB88NXtleAz03Xce8OsASfaj95PNu4DF9O6DcyTwc0leAywD7qyqn62qlwH/Z3ZK1iSLgdVV9VLgAeDNwF8CH6yqlwPfAP7jLNa3r5hbVUuB9zJpfyf5FWAl8PqqmrjE/wXA0cAvA2d1095E72/uZ4HjgbOTvAC4Cpj4kJhP70aDdNOu7Ianeh/sVQz03VRVtwP3JjkK+CXgH4FX9g3fAPwLem+WbwCv645EjqmqB2enak3y3aq6sRu+HjgcOKiqruimXQC8ZlYq27d8vvv3emBR3/TjgA8CJ1bV/X3Tv1BVj1fVzcDzu2lHAxdW1WNVdTdwBb2/x6uAY7o7xd4M3N0F/c8DX+2Wnfw+6K9hr/CU3sulYX8BnA78NL0j9tcCf1xV50xumOQVwOuBjyb5UlWteioL1ZQe7ht+DDhotgrZB2xj+wPJZ/YNT7wOj7F9Nn0H+BngRcDYFO0Bdnh316rakuQget+SrwR+CvhV4KGq+kGS5/Lk94FdLvuoS+i9UV5J74ray4B3JjkQIMn8JM9Lcgjwo6r6H8DZwCtmq2Dt0IPA/X39uP+G3pGedt/dwPOSPDfJ/vS6S2ZyB103WJKXztD2KuCtSeYkGaH3zeq6bt419Lpzruza/V73bzM8Qh+CqnokyVeAB6rqMeBvk7wE+Fp3W/iHgF8DjqDXp/c48Cjwm7NVs2b0duBTSZ4FbATeMcv1NKGqHu1uG3IdvZv6fWvA5b6V5G3AXyV5ww6aXkKvG+UmoIAPVNVd3byrgF+qqvEkd9A7Sm8q0L30fwi6k6E3AKdU1W2zXY+kfZNdLrupO8kyDnzJMJc0mzxCl6RGeIQuSY0w0CWpEQa6JDXCQJekRhjoktSI/w/TIK6YopbnBgAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
    " + ] + }, + "metadata": { + "tags": [], + "needs_background": "light" + } + }, + { + "output_type": "stream", + "text": [ + "/content/data/mini_speech_commands/unknown/2d92f18b_nohash_0.wav\n" + ], + "name": "stdout" + }, + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAEICAYAAABPgw/pAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAUd0lEQVR4nO3df7RdZX3n8feHxGAFV7FytZIEQyE6RseCvUa7BixF7ApSwapUGDsVrWbsNFNsrRqnlnFSO0WZZWdNTSvYYUGdkUAd0XRIh7Yqv6pALhTUgMg1gkkoGH6O6AgEvvPH2ZeeXO7NPUlOuOTJ+7XWWdk/nr339+xz7ufs8+yzd1JVSJL2fvvNdgGSpOEw0CWpEQa6JDXCQJekRhjoktQIA12SGmGga7ckOT/JR7vhY5Lcuovr+VSSPxhudTNu81eSbEryUJKjnsptDypJJTlituvQ3mHubBegPS/J7cDzgceAHwJ/A6yoqoeGuZ2qugp48QD1nA68q6qO7lv2PcOsZUD/hd5++OIwVpbkcuAjwLEAVfWRYaz36SrJ+cDl3eixVXX6rBUjwCP0fckbqupA4BXAKPDhyQ2S7Gsf8C8ENuzKgknmDLkWabcZ6PuYqtpC7wj9ZfDEV/rfSnIbcFs37ZeT3JjkgSRfTfLyieWTHJXkhiQ/SHIR8My+eccm2dw3vjDJ55NsTXJvkk8meQnwKeDnu66OB7q2T3TddOPvTjKe5L4ka5Mc0jevkrwnyW1djauTpJt3RJIrkjyY5J6uxu0k2T/JQ8Ac4KYk3+mmvyTJ5d06NyQ5qW+Z85P8eZJ1SX4I/OIg+zvJ6UmunjTtiW6Ubr2rk1za7dNrkxw+zbqO7rqIjh1gP+yX5MNJ7kjy/SR/meQnu3kXJHlfNzx/4j3QjR/e7fP9Jl7PJO/r1vFPSd4xyPPWLKkqH40/gNuB47vhhfSOSv+wGy/g74CfAn4COAr4PvAqeoH39m75/YF5wB3A7wDPAN4CPAp8tFvXscDmbngOcBPwJ8AB9IL/6G7e6cDVk2o8v289xwH30Ps2sT/wp8CVfW0L+N/AQcChwFZgWTfvQuD36R2sPLHNafZLAUd0w88AxoH/0D3P44AfAC/uq+9B4F9NrHvAfT/Vc+3f7vnAvcBSel2g/xNYM7ktsAzYBCwdcD+8s3s+PwMcCHwe+EzfvL/uhv818B3gor55X+x7PbcBq7r983rgR8BzZvs97WPqh0fo+44vdEfDVwNXAP+5b94fV9V9VfX/gOXAOVV1bVU9VlUXAA8Dr+4ezwD+a1U9WlWfA9ZPs72lwCHA+6vqh1X146q6epq2k70NOK+qbqiqh4EP0TuiX9TX5qyqeqCqvgd8BTiym/4ova6UQ3Zym6+mF3xnVdUjVfVlemF5Wl+bL1bVP1TV41X14wHXO4hLquq6qtpGL9CPnDT/FOAc4ISqum7SvOn2w9uAT1TVxuqdK/kQcGrXrXYFcHSS/YDXAB+n90EF8Avd/AmPAqu613sd8BADnCfR7DDQ9x1vrKqDquqFVfXvuvCesKlv+IXA+7qv8A90HwIL6YXzIcCWquq/o9sd02xvIXBHF1I765D+9XaBdC8wv6/NXX3DP6IXxgAfAAJc13WbvHMntrmpqh7vm3bHpG1uYs+Y7rlMeC9wcVV9cyeW3W4fdsNzgedX1XfonRw/EjiG3gfXnUlezJMD/d5Jr+FU9elpwkAX9L66T9gE/FEX/hOPZ1XVhcA/AfMn+mk7h06zzk3AodOcaJ3pFp930vtgASDJAcBzgS0zPpGqu6rq3VV1CPBvgT8b8Gd/dwILu6PWCYdO2uau3Jr0h8CzJkaS/PQurOMU4I1JztiJZbbbh/Seyzbg7m78CnpdZvOqd17lCnrda88BbtyFGvU0YKBrsk8D70nyqvQckOTEJM8GvkYvFH47yTOSvIle18pUrqP3AXBWt45nJpn4Wn83sCDJvGmWvRB4R5Ijk+xPr3vo2qq6fabik5ySZEE3ej+9EH58B4tMuJbe0ecHuud2LPAGYM0Ay+7ITcBLu+fyTHo/a9xZdwKvBc5I8psDLnMh8DtJDktyIL19eFHf0fYVwArgym788m786qp6bBdq1NOAga7tVNUY8G7gk/QCcZzeiT2q6hHgTd34fcBb6Z1sm2o9j9ELxCOA7wGbu/YAX6Z3YvauJPdMsezfA38A/C96HwqHA6cO+BReCVzb/YplLXBGVW2caaHuub0BOIHeCdk/A369qr414HanW++36Z1U/Ht6vyIatE9/8nq+Ry/UVyZ51wCLnAd8hl5gfxf4MfDv++ZfATybfw70q+l9k7gS7bWyfXeoJGlv5RG6JDXCQJekRhjoktSIgQI9ybIkt3aXYq+cps2vJrm5++3vZ4dbpiRpJjOeFE3vJkTfBl5H75cK64HTqurmvjaLgYuB46rq/iTPq6rv72i9Bx98cC1atGg3y5ekfcv1119/T1WNTDVvkLvrLQXGJ376lWQNcDJwc1+bdwOrq+p+gJnCHGDRokWMjY0NsHlJ0oQk012dPVCXy3y2v+R5M9tfDg3wIuBFSf4hyTVJlk1TyPIkY0nGtm7dOsCmJUmDGtZJ0bnAYnp3ZzsN+HSSgyY3qqpzq2q0qkZHRqb8xiBJ2kWDBPoWejdamrCAJ99TYzOwtrsj23fp9bkvHk6JkqRBDBLo64HF3T0h5tG7BHvtpDZfoPtvt5IcTK8LZsbLrSVJwzNjoHc381kBXAbcQu82nhuSrOr7H10uA+5NcjO9ezK/v6ru3VNFS5KebNbu5TI6Olr+ykWSdk6S66tqdKp5XikqSY0w0CWpEQa6JDVikCtFJe2DFq28dLZLaNbtZ524R9brEbokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjRgo0JMsS3JrkvEkK6eYf3qSrUlu7B7vGn6pkqQdmTtTgyRzgNXA64DNwPoka6vq5klNL6qqFXugRknSAAY5Ql8KjFfVxqp6BFgDnLxny5Ik7axBAn0+sKlvfHM3bbI3J/l6ks8lWTjVipIsTzKWZGzr1q27UK4kaTrDOin618Ciqno58HfABVM1qqpzq2q0qkZHRkaGtGlJEgwW6FuA/iPuBd20J1TVvVX1cDf6F8DPDac8SdKgBgn09cDiJIclmQecCqztb5DkBX2jJwG3DK9ESdIgZvyVS1VtS7ICuAyYA5xXVRuSrALGqmot8NtJTgK2AfcBp+/BmiVJU5gx0AGqah2wbtK0M/uGPwR8aLilSZJ2hleKSlIjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNGCjQkyxLcmuS8SQrd9DuzUkqyejwSpQkDWLGQE8yB1gNnAAsAU5LsmSKds8GzgCuHXaRkqSZDXKEvhQYr6qNVfUIsAY4eYp2fwh8DPjxEOuTJA1okECfD2zqG9/cTXtCklcAC6vq0h2tKMnyJGNJxrZu3brTxUqSprfbJ0WT7Ad8AnjfTG2r6tyqGq2q0ZGRkd3dtCSpzyCBvgVY2De+oJs24dnAy4DLk9wOvBpY64lRSXpqDRLo64HFSQ5LMg84FVg7MbOqHqyqg6tqUVUtAq4BTqqqsT1SsSRpSjMGelVtA1YAlwG3ABdX1YYkq5KctKcLlCQNZu4gjapqHbBu0rQzp2l77O6XJUnaWV4pKkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1YqBAT7Isya1JxpOsnGL+e5J8I8mNSa5OsmT4pUqSdmTGQE8yB1gNnAAsAU6bIrA/W1X/sqqOBD4OfGLolUqSdmiQI/SlwHhVbayqR4A1wMn9Darq//aNHgDU8EqUJA1i7gBt5gOb+sY3A6+a3CjJbwG/C8wDjptqRUmWA8sBDj300J2tVZK0A0M7KVpVq6vqcOCDwIenaXNuVY1W1ejIyMiwNi1JYrBA3wIs7Btf0E2bzhrgjbtTlCRp5w0S6OuBxUkOSzIPOBVY298gyeK+0ROB24ZXoiRpEDP2oVfVtiQrgMuAOcB5VbUhySpgrKrWAiuSHA88CtwPvH1PFi1JerJBTopSVeuAdZOmndk3fMaQ65Ik7SSvFJWkRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktSIubNdwK5YtPLS2S6hWbefdeIeWa+v2Z6zp14z7X0GOkJPsizJrUnGk6ycYv7vJrk5ydeTfCnJC4dfqiRpR2YM9CRzgNXACcAS4LQkSyY1+0dgtKpeDnwO+PiwC5Uk7dggR+hLgfGq2lhVjwBrgJP7G1TVV6rqR93oNcCC4ZYpSZrJIIE+H9jUN765mzad3wD+ZqoZSZYnGUsytnXr1sGrlCTNaKi/cknya8AocPZU86vq3KoararRkZGRYW5akvZ5g/zKZQuwsG98QTdtO0mOB34f+IWqeng45UmSBjXIEfp6YHGSw5LMA04F1vY3SHIUcA5wUlV9f/hlSpJmMmOgV9U2YAVwGXALcHFVbUiyKslJXbOzgQOBv0pyY5K106xOkrSHDHRhUVWtA9ZNmnZm3/DxQ65LkrSTvPRfkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWrEQIGeZFmSW5OMJ1k5xfzXJLkhybYkbxl+mZKkmcwY6EnmAKuBE4AlwGlJlkxq9j3gdOCzwy5QkjSYuQO0WQqMV9VGgCRrgJOBmycaVNXt3bzH90CNkqQBDNLlMh/Y1De+uZu205IsTzKWZGzr1q27sgpJ0jSe0pOiVXVuVY1W1ejIyMhTuWlJat4ggb4FWNg3vqCbJkl6Ghkk0NcDi5MclmQecCqwds+WJUnaWTMGelVtA1YAlwG3ABdX1YYkq5KcBJDklUk2A6cA5yTZsCeLliQ92SC/cqGq1gHrJk07s294Pb2uGEnSLPFKUUlqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQMFepJlSW5NMp5k5RTz909yUTf/2iSLhl2oJGnHZgz0JHOA1cAJwBLgtCRLJjX7DeD+qjoC+BPgY8MuVJK0Y4McoS8FxqtqY1U9AqwBTp7U5mTggm74c8Brk2R4ZUqSZjJ3gDbzgU1945uBV03Xpqq2JXkQeC5wT3+jJMuB5d3oQ0lu3ZWi90IHM2lfPF3F71awF71e4GvW2ZdesxdON2OQQB+aqjoXOPep3ObTQZKxqhqd7To0GF+vvY+vWc8gXS5bgIV94wu6aVO2STIX+Eng3mEUKEkazCCBvh5YnOSwJPOAU4G1k9qsBd7eDb8F+HJV1fDKlCTNZMYul65PfAVwGTAHOK+qNiRZBYxV1VrgvwOfSTIO3Ecv9PXP9rlupr2cr9fex9cMiAfSktQGrxSVpEYY6JLUCANd0tNWktOTfHK269hbGOiS1AgDfTclWZXkvX3jf5TkjCTvT7I+ydeT/Kdu3gFJLk1yU5JvJnnr7FUugCSLktyS5NNJNiT52yQ/keTIJNd0r98lSZ4z27W2oNvf3+wb/70kH0lyeZKPJbkuybeTHDPFsicm+VqSg5Ocn+S/Jflqko1J3tK1SZKzu7+vb0z8jSVZneSkbviSJOd1w+/s/manfB88NXtleAz03Xce8OsASfaj95PNu4DF9O6DcyTwc0leAywD7qyqn62qlwH/Z3ZK1iSLgdVV9VLgAeDNwF8CH6yqlwPfAP7jLNa3r5hbVUuB9zJpfyf5FWAl8PqqmrjE/wXA0cAvA2d1095E72/uZ4HjgbOTvAC4Cpj4kJhP70aDdNOu7Ianeh/sVQz03VRVtwP3JjkK+CXgH4FX9g3fAPwLem+WbwCv645EjqmqB2enak3y3aq6sRu+HjgcOKiqruimXQC8ZlYq27d8vvv3emBR3/TjgA8CJ1bV/X3Tv1BVj1fVzcDzu2lHAxdW1WNVdTdwBb2/x6uAY7o7xd4M3N0F/c8DX+2Wnfw+6K9hr/CU3sulYX8BnA78NL0j9tcCf1xV50xumOQVwOuBjyb5UlWteioL1ZQe7ht+DDhotgrZB2xj+wPJZ/YNT7wOj7F9Nn0H+BngRcDYFO0Bdnh316rakuQget+SrwR+CvhV4KGq+kGS5/Lk94FdLvuoS+i9UV5J74ray4B3JjkQIMn8JM9Lcgjwo6r6H8DZwCtmq2Dt0IPA/X39uP+G3pGedt/dwPOSPDfJ/vS6S2ZyB103WJKXztD2KuCtSeYkGaH3zeq6bt419Lpzruza/V73bzM8Qh+CqnokyVeAB6rqMeBvk7wE+Fp3W/iHgF8DjqDXp/c48Cjwm7NVs2b0duBTSZ4FbATeMcv1NKGqHu1uG3IdvZv6fWvA5b6V5G3AXyV5ww6aXkKvG+UmoIAPVNVd3byrgF+qqvEkd9A7Sm8q0L30fwi6k6E3AKdU1W2zXY+kfZNdLrupO8kyDnzJMJc0mzxCl6RGeIQuSY0w0CWpEQa6JDXCQJekRhjoktSI/w/TIK6YopbnBgAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
    " + ] + }, + "metadata": { + "tags": [], + "needs_background": "light" + } + }, + { + "output_type": "stream", + "text": [ + "/content/data/mini_speech_commands/unknown/b83c1acf_nohash_2.wav\n" + ], + "name": "stdout" + }, + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAEICAYAAABPgw/pAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAUd0lEQVR4nO3df7RdZX3n8feHxGAFV7FytZIEQyE6RseCvUa7BixF7ApSwapUGDsVrWbsNFNsrRqnlnFSO0WZZWdNTSvYYUGdkUAd0XRIh7Yqv6pALhTUgMg1gkkoGH6O6AgEvvPH2ZeeXO7NPUlOuOTJ+7XWWdk/nr339+xz7ufs8+yzd1JVSJL2fvvNdgGSpOEw0CWpEQa6JDXCQJekRhjoktQIA12SGmGga7ckOT/JR7vhY5Lcuovr+VSSPxhudTNu81eSbEryUJKjnsptDypJJTlituvQ3mHubBegPS/J7cDzgceAHwJ/A6yoqoeGuZ2qugp48QD1nA68q6qO7lv2PcOsZUD/hd5++OIwVpbkcuAjwLEAVfWRYaz36SrJ+cDl3eixVXX6rBUjwCP0fckbqupA4BXAKPDhyQ2S7Gsf8C8ENuzKgknmDLkWabcZ6PuYqtpC7wj9ZfDEV/rfSnIbcFs37ZeT3JjkgSRfTfLyieWTHJXkhiQ/SHIR8My+eccm2dw3vjDJ55NsTXJvkk8meQnwKeDnu66OB7q2T3TddOPvTjKe5L4ka5Mc0jevkrwnyW1djauTpJt3RJIrkjyY5J6uxu0k2T/JQ8Ac4KYk3+mmvyTJ5d06NyQ5qW+Z85P8eZJ1SX4I/OIg+zvJ6UmunjTtiW6Ubr2rk1za7dNrkxw+zbqO7rqIjh1gP+yX5MNJ7kjy/SR/meQnu3kXJHlfNzx/4j3QjR/e7fP9Jl7PJO/r1vFPSd4xyPPWLKkqH40/gNuB47vhhfSOSv+wGy/g74CfAn4COAr4PvAqeoH39m75/YF5wB3A7wDPAN4CPAp8tFvXscDmbngOcBPwJ8AB9IL/6G7e6cDVk2o8v289xwH30Ps2sT/wp8CVfW0L+N/AQcChwFZgWTfvQuD36R2sPLHNafZLAUd0w88AxoH/0D3P44AfAC/uq+9B4F9NrHvAfT/Vc+3f7vnAvcBSel2g/xNYM7ktsAzYBCwdcD+8s3s+PwMcCHwe+EzfvL/uhv818B3gor55X+x7PbcBq7r983rgR8BzZvs97WPqh0fo+44vdEfDVwNXAP+5b94fV9V9VfX/gOXAOVV1bVU9VlUXAA8Dr+4ezwD+a1U9WlWfA9ZPs72lwCHA+6vqh1X146q6epq2k70NOK+qbqiqh4EP0TuiX9TX5qyqeqCqvgd8BTiym/4ova6UQ3Zym6+mF3xnVdUjVfVlemF5Wl+bL1bVP1TV41X14wHXO4hLquq6qtpGL9CPnDT/FOAc4ISqum7SvOn2w9uAT1TVxuqdK/kQcGrXrXYFcHSS/YDXAB+n90EF8Avd/AmPAqu613sd8BADnCfR7DDQ9x1vrKqDquqFVfXvuvCesKlv+IXA+7qv8A90HwIL6YXzIcCWquq/o9sd02xvIXBHF1I765D+9XaBdC8wv6/NXX3DP6IXxgAfAAJc13WbvHMntrmpqh7vm3bHpG1uYs+Y7rlMeC9wcVV9cyeW3W4fdsNzgedX1XfonRw/EjiG3gfXnUlezJMD/d5Jr+FU9elpwkAX9L66T9gE/FEX/hOPZ1XVhcA/AfMn+mk7h06zzk3AodOcaJ3pFp930vtgASDJAcBzgS0zPpGqu6rq3VV1CPBvgT8b8Gd/dwILu6PWCYdO2uau3Jr0h8CzJkaS/PQurOMU4I1JztiJZbbbh/Seyzbg7m78CnpdZvOqd17lCnrda88BbtyFGvU0YKBrsk8D70nyqvQckOTEJM8GvkYvFH47yTOSvIle18pUrqP3AXBWt45nJpn4Wn83sCDJvGmWvRB4R5Ijk+xPr3vo2qq6fabik5ySZEE3ej+9EH58B4tMuJbe0ecHuud2LPAGYM0Ay+7ITcBLu+fyTHo/a9xZdwKvBc5I8psDLnMh8DtJDktyIL19eFHf0fYVwArgym788m786qp6bBdq1NOAga7tVNUY8G7gk/QCcZzeiT2q6hHgTd34fcBb6Z1sm2o9j9ELxCOA7wGbu/YAX6Z3YvauJPdMsezfA38A/C96HwqHA6cO+BReCVzb/YplLXBGVW2caaHuub0BOIHeCdk/A369qr414HanW++36Z1U/Ht6vyIatE9/8nq+Ry/UVyZ51wCLnAd8hl5gfxf4MfDv++ZfATybfw70q+l9k7gS7bWyfXeoJGlv5RG6JDXCQJekRhjoktSIgQI9ybIkt3aXYq+cps2vJrm5++3vZ4dbpiRpJjOeFE3vJkTfBl5H75cK64HTqurmvjaLgYuB46rq/iTPq6rv72i9Bx98cC1atGg3y5ekfcv1119/T1WNTDVvkLvrLQXGJ376lWQNcDJwc1+bdwOrq+p+gJnCHGDRokWMjY0NsHlJ0oQk012dPVCXy3y2v+R5M9tfDg3wIuBFSf4hyTVJlk1TyPIkY0nGtm7dOsCmJUmDGtZJ0bnAYnp3ZzsN+HSSgyY3qqpzq2q0qkZHRqb8xiBJ2kWDBPoWejdamrCAJ99TYzOwtrsj23fp9bkvHk6JkqRBDBLo64HF3T0h5tG7BHvtpDZfoPtvt5IcTK8LZsbLrSVJwzNjoHc381kBXAbcQu82nhuSrOr7H10uA+5NcjO9ezK/v6ru3VNFS5KebNbu5TI6Olr+ykWSdk6S66tqdKp5XikqSY0w0CWpEQa6JDVikCtFJe2DFq28dLZLaNbtZ524R9brEbokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjRgo0JMsS3JrkvEkK6eYf3qSrUlu7B7vGn6pkqQdmTtTgyRzgNXA64DNwPoka6vq5klNL6qqFXugRknSAAY5Ql8KjFfVxqp6BFgDnLxny5Ik7axBAn0+sKlvfHM3bbI3J/l6ks8lWTjVipIsTzKWZGzr1q27UK4kaTrDOin618Ciqno58HfABVM1qqpzq2q0qkZHRkaGtGlJEgwW6FuA/iPuBd20J1TVvVX1cDf6F8DPDac8SdKgBgn09cDiJIclmQecCqztb5DkBX2jJwG3DK9ESdIgZvyVS1VtS7ICuAyYA5xXVRuSrALGqmot8NtJTgK2AfcBp+/BmiVJU5gx0AGqah2wbtK0M/uGPwR8aLilSZJ2hleKSlIjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNGCjQkyxLcmuS8SQrd9DuzUkqyejwSpQkDWLGQE8yB1gNnAAsAU5LsmSKds8GzgCuHXaRkqSZDXKEvhQYr6qNVfUIsAY4eYp2fwh8DPjxEOuTJA1okECfD2zqG9/cTXtCklcAC6vq0h2tKMnyJGNJxrZu3brTxUqSprfbJ0WT7Ad8AnjfTG2r6tyqGq2q0ZGRkd3dtCSpzyCBvgVY2De+oJs24dnAy4DLk9wOvBpY64lRSXpqDRLo64HFSQ5LMg84FVg7MbOqHqyqg6tqUVUtAq4BTqqqsT1SsSRpSjMGelVtA1YAlwG3ABdX1YYkq5KctKcLlCQNZu4gjapqHbBu0rQzp2l77O6XJUnaWV4pKkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1YqBAT7Isya1JxpOsnGL+e5J8I8mNSa5OsmT4pUqSdmTGQE8yB1gNnAAsAU6bIrA/W1X/sqqOBD4OfGLolUqSdmiQI/SlwHhVbayqR4A1wMn9Darq//aNHgDU8EqUJA1i7gBt5gOb+sY3A6+a3CjJbwG/C8wDjptqRUmWA8sBDj300J2tVZK0A0M7KVpVq6vqcOCDwIenaXNuVY1W1ejIyMiwNi1JYrBA3wIs7Btf0E2bzhrgjbtTlCRp5w0S6OuBxUkOSzIPOBVY298gyeK+0ROB24ZXoiRpEDP2oVfVtiQrgMuAOcB5VbUhySpgrKrWAiuSHA88CtwPvH1PFi1JerJBTopSVeuAdZOmndk3fMaQ65Ik7SSvFJWkRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktSIubNdwK5YtPLS2S6hWbefdeIeWa+v2Z6zp14z7X0GOkJPsizJrUnGk6ycYv7vJrk5ydeTfCnJC4dfqiRpR2YM9CRzgNXACcAS4LQkSyY1+0dgtKpeDnwO+PiwC5Uk7dggR+hLgfGq2lhVjwBrgJP7G1TVV6rqR93oNcCC4ZYpSZrJIIE+H9jUN765mzad3wD+ZqoZSZYnGUsytnXr1sGrlCTNaKi/cknya8AocPZU86vq3KoararRkZGRYW5akvZ5g/zKZQuwsG98QTdtO0mOB34f+IWqeng45UmSBjXIEfp6YHGSw5LMA04F1vY3SHIUcA5wUlV9f/hlSpJmMmOgV9U2YAVwGXALcHFVbUiyKslJXbOzgQOBv0pyY5K106xOkrSHDHRhUVWtA9ZNmnZm3/DxQ65LkrSTvPRfkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWrEQIGeZFmSW5OMJ1k5xfzXJLkhybYkbxl+mZKkmcwY6EnmAKuBE4AlwGlJlkxq9j3gdOCzwy5QkjSYuQO0WQqMV9VGgCRrgJOBmycaVNXt3bzH90CNkqQBDNLlMh/Y1De+uZu205IsTzKWZGzr1q27sgpJ0jSe0pOiVXVuVY1W1ejIyMhTuWlJat4ggb4FWNg3vqCbJkl6Ghkk0NcDi5MclmQecCqwds+WJUnaWTMGelVtA1YAlwG3ABdX1YYkq5KcBJDklUk2A6cA5yTZsCeLliQ92SC/cqGq1gHrJk07s294Pb2uGEnSLPFKUUlqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQMFepJlSW5NMp5k5RTz909yUTf/2iSLhl2oJGnHZgz0JHOA1cAJwBLgtCRLJjX7DeD+qjoC+BPgY8MuVJK0Y4McoS8FxqtqY1U9AqwBTp7U5mTggm74c8Brk2R4ZUqSZjJ3gDbzgU1945uBV03Xpqq2JXkQeC5wT3+jJMuB5d3oQ0lu3ZWi90IHM2lfPF3F71awF71e4GvW2ZdesxdON2OQQB+aqjoXOPep3ObTQZKxqhqd7To0GF+vvY+vWc8gXS5bgIV94wu6aVO2STIX+Eng3mEUKEkazCCBvh5YnOSwJPOAU4G1k9qsBd7eDb8F+HJV1fDKlCTNZMYul65PfAVwGTAHOK+qNiRZBYxV1VrgvwOfSTIO3Ecv9PXP9rlupr2cr9fex9cMiAfSktQGrxSVpEYY6JLUCANd0tNWktOTfHK269hbGOiS1AgDfTclWZXkvX3jf5TkjCTvT7I+ydeT/Kdu3gFJLk1yU5JvJnnr7FUugCSLktyS5NNJNiT52yQ/keTIJNd0r98lSZ4z27W2oNvf3+wb/70kH0lyeZKPJbkuybeTHDPFsicm+VqSg5Ocn+S/Jflqko1J3tK1SZKzu7+vb0z8jSVZneSkbviSJOd1w+/s/manfB88NXtleAz03Xce8OsASfaj95PNu4DF9O6DcyTwc0leAywD7qyqn62qlwH/Z3ZK1iSLgdVV9VLgAeDNwF8CH6yqlwPfAP7jLNa3r5hbVUuB9zJpfyf5FWAl8PqqmrjE/wXA0cAvA2d1095E72/uZ4HjgbOTvAC4Cpj4kJhP70aDdNOu7Ianeh/sVQz03VRVtwP3JjkK+CXgH4FX9g3fAPwLem+WbwCv645EjqmqB2enak3y3aq6sRu+HjgcOKiqruimXQC8ZlYq27d8vvv3emBR3/TjgA8CJ1bV/X3Tv1BVj1fVzcDzu2lHAxdW1WNVdTdwBb2/x6uAY7o7xd4M3N0F/c8DX+2Wnfw+6K9hr/CU3sulYX8BnA78NL0j9tcCf1xV50xumOQVwOuBjyb5UlWteioL1ZQe7ht+DDhotgrZB2xj+wPJZ/YNT7wOj7F9Nn0H+BngRcDYFO0Bdnh316rakuQget+SrwR+CvhV4KGq+kGS5/Lk94FdLvuoS+i9UV5J74ray4B3JjkQIMn8JM9Lcgjwo6r6H8DZwCtmq2Dt0IPA/X39uP+G3pGedt/dwPOSPDfJ/vS6S2ZyB103WJKXztD2KuCtSeYkGaH3zeq6bt419Lpzruza/V73bzM8Qh+CqnokyVeAB6rqMeBvk7wE+Fp3W/iHgF8DjqDXp/c48Cjwm7NVs2b0duBTSZ4FbATeMcv1NKGqHu1uG3IdvZv6fWvA5b6V5G3AXyV5ww6aXkKvG+UmoIAPVNVd3byrgF+qqvEkd9A7Sm8q0L30fwi6k6E3AKdU1W2zXY+kfZNdLrupO8kyDnzJMJc0mzxCl6RGeIQuSY0w0CWpEQa6JDXCQJekRhjoktSI/w/TIK6YopbnBgAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
    " + ] + }, + "metadata": { + "tags": [], + "needs_background": "light" + } + }, + { + "output_type": "stream", + "text": [ + "/content/data/mini_speech_commands/unknown/4a1e736b_nohash_4.wav\n" + ], + "name": "stdout" + }, + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAEICAYAAABPgw/pAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAUd0lEQVR4nO3df7RdZX3n8feHxGAFV7FytZIEQyE6RseCvUa7BixF7ApSwapUGDsVrWbsNFNsrRqnlnFSO0WZZWdNTSvYYUGdkUAd0XRIh7Yqv6pALhTUgMg1gkkoGH6O6AgEvvPH2ZeeXO7NPUlOuOTJ+7XWWdk/nr339+xz7ufs8+yzd1JVSJL2fvvNdgGSpOEw0CWpEQa6JDXCQJekRhjoktQIA12SGmGga7ckOT/JR7vhY5Lcuovr+VSSPxhudTNu81eSbEryUJKjnsptDypJJTlituvQ3mHubBegPS/J7cDzgceAHwJ/A6yoqoeGuZ2qugp48QD1nA68q6qO7lv2PcOsZUD/hd5++OIwVpbkcuAjwLEAVfWRYaz36SrJ+cDl3eixVXX6rBUjwCP0fckbqupA4BXAKPDhyQ2S7Gsf8C8ENuzKgknmDLkWabcZ6PuYqtpC7wj9ZfDEV/rfSnIbcFs37ZeT3JjkgSRfTfLyieWTHJXkhiQ/SHIR8My+eccm2dw3vjDJ55NsTXJvkk8meQnwKeDnu66OB7q2T3TddOPvTjKe5L4ka5Mc0jevkrwnyW1djauTpJt3RJIrkjyY5J6uxu0k2T/JQ8Ac4KYk3+mmvyTJ5d06NyQ5qW+Z85P8eZJ1SX4I/OIg+zvJ6UmunjTtiW6Ubr2rk1za7dNrkxw+zbqO7rqIjh1gP+yX5MNJ7kjy/SR/meQnu3kXJHlfNzx/4j3QjR/e7fP9Jl7PJO/r1vFPSd4xyPPWLKkqH40/gNuB47vhhfSOSv+wGy/g74CfAn4COAr4PvAqeoH39m75/YF5wB3A7wDPAN4CPAp8tFvXscDmbngOcBPwJ8AB9IL/6G7e6cDVk2o8v289xwH30Ps2sT/wp8CVfW0L+N/AQcChwFZgWTfvQuD36R2sPLHNafZLAUd0w88AxoH/0D3P44AfAC/uq+9B4F9NrHvAfT/Vc+3f7vnAvcBSel2g/xNYM7ktsAzYBCwdcD+8s3s+PwMcCHwe+EzfvL/uhv818B3gor55X+x7PbcBq7r983rgR8BzZvs97WPqh0fo+44vdEfDVwNXAP+5b94fV9V9VfX/gOXAOVV1bVU9VlUXAA8Dr+4ezwD+a1U9WlWfA9ZPs72lwCHA+6vqh1X146q6epq2k70NOK+qbqiqh4EP0TuiX9TX5qyqeqCqvgd8BTiym/4ova6UQ3Zym6+mF3xnVdUjVfVlemF5Wl+bL1bVP1TV41X14wHXO4hLquq6qtpGL9CPnDT/FOAc4ISqum7SvOn2w9uAT1TVxuqdK/kQcGrXrXYFcHSS/YDXAB+n90EF8Avd/AmPAqu613sd8BADnCfR7DDQ9x1vrKqDquqFVfXvuvCesKlv+IXA+7qv8A90HwIL6YXzIcCWquq/o9sd02xvIXBHF1I765D+9XaBdC8wv6/NXX3DP6IXxgAfAAJc13WbvHMntrmpqh7vm3bHpG1uYs+Y7rlMeC9wcVV9cyeW3W4fdsNzgedX1XfonRw/EjiG3gfXnUlezJMD/d5Jr+FU9elpwkAX9L66T9gE/FEX/hOPZ1XVhcA/AfMn+mk7h06zzk3AodOcaJ3pFp930vtgASDJAcBzgS0zPpGqu6rq3VV1CPBvgT8b8Gd/dwILu6PWCYdO2uau3Jr0h8CzJkaS/PQurOMU4I1JztiJZbbbh/Seyzbg7m78CnpdZvOqd17lCnrda88BbtyFGvU0YKBrsk8D70nyqvQckOTEJM8GvkYvFH47yTOSvIle18pUrqP3AXBWt45nJpn4Wn83sCDJvGmWvRB4R5Ijk+xPr3vo2qq6fabik5ySZEE3ej+9EH58B4tMuJbe0ecHuud2LPAGYM0Ay+7ITcBLu+fyTHo/a9xZdwKvBc5I8psDLnMh8DtJDktyIL19eFHf0fYVwArgym788m786qp6bBdq1NOAga7tVNUY8G7gk/QCcZzeiT2q6hHgTd34fcBb6Z1sm2o9j9ELxCOA7wGbu/YAX6Z3YvauJPdMsezfA38A/C96HwqHA6cO+BReCVzb/YplLXBGVW2caaHuub0BOIHeCdk/A369qr414HanW++36Z1U/Ht6vyIatE9/8nq+Ry/UVyZ51wCLnAd8hl5gfxf4MfDv++ZfATybfw70q+l9k7gS7bWyfXeoJGlv5RG6JDXCQJekRhjoktSIgQI9ybIkt3aXYq+cps2vJrm5++3vZ4dbpiRpJjOeFE3vJkTfBl5H75cK64HTqurmvjaLgYuB46rq/iTPq6rv72i9Bx98cC1atGg3y5ekfcv1119/T1WNTDVvkLvrLQXGJ376lWQNcDJwc1+bdwOrq+p+gJnCHGDRokWMjY0NsHlJ0oQk012dPVCXy3y2v+R5M9tfDg3wIuBFSf4hyTVJlk1TyPIkY0nGtm7dOsCmJUmDGtZJ0bnAYnp3ZzsN+HSSgyY3qqpzq2q0qkZHRqb8xiBJ2kWDBPoWejdamrCAJ99TYzOwtrsj23fp9bkvHk6JkqRBDBLo64HF3T0h5tG7BHvtpDZfoPtvt5IcTK8LZsbLrSVJwzNjoHc381kBXAbcQu82nhuSrOr7H10uA+5NcjO9ezK/v6ru3VNFS5KebNbu5TI6Olr+ykWSdk6S66tqdKp5XikqSY0w0CWpEQa6JDVikCtFJe2DFq28dLZLaNbtZ524R9brEbokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjRgo0JMsS3JrkvEkK6eYf3qSrUlu7B7vGn6pkqQdmTtTgyRzgNXA64DNwPoka6vq5klNL6qqFXugRknSAAY5Ql8KjFfVxqp6BFgDnLxny5Ik7axBAn0+sKlvfHM3bbI3J/l6ks8lWTjVipIsTzKWZGzr1q27UK4kaTrDOin618Ciqno58HfABVM1qqpzq2q0qkZHRkaGtGlJEgwW6FuA/iPuBd20J1TVvVX1cDf6F8DPDac8SdKgBgn09cDiJIclmQecCqztb5DkBX2jJwG3DK9ESdIgZvyVS1VtS7ICuAyYA5xXVRuSrALGqmot8NtJTgK2AfcBp+/BmiVJU5gx0AGqah2wbtK0M/uGPwR8aLilSZJ2hleKSlIjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNGCjQkyxLcmuS8SQrd9DuzUkqyejwSpQkDWLGQE8yB1gNnAAsAU5LsmSKds8GzgCuHXaRkqSZDXKEvhQYr6qNVfUIsAY4eYp2fwh8DPjxEOuTJA1okECfD2zqG9/cTXtCklcAC6vq0h2tKMnyJGNJxrZu3brTxUqSprfbJ0WT7Ad8AnjfTG2r6tyqGq2q0ZGRkd3dtCSpzyCBvgVY2De+oJs24dnAy4DLk9wOvBpY64lRSXpqDRLo64HFSQ5LMg84FVg7MbOqHqyqg6tqUVUtAq4BTqqqsT1SsSRpSjMGelVtA1YAlwG3ABdX1YYkq5KctKcLlCQNZu4gjapqHbBu0rQzp2l77O6XJUnaWV4pKkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1YqBAT7Isya1JxpOsnGL+e5J8I8mNSa5OsmT4pUqSdmTGQE8yB1gNnAAsAU6bIrA/W1X/sqqOBD4OfGLolUqSdmiQI/SlwHhVbayqR4A1wMn9Darq//aNHgDU8EqUJA1i7gBt5gOb+sY3A6+a3CjJbwG/C8wDjptqRUmWA8sBDj300J2tVZK0A0M7KVpVq6vqcOCDwIenaXNuVY1W1ejIyMiwNi1JYrBA3wIs7Btf0E2bzhrgjbtTlCRp5w0S6OuBxUkOSzIPOBVY298gyeK+0ROB24ZXoiRpEDP2oVfVtiQrgMuAOcB5VbUhySpgrKrWAiuSHA88CtwPvH1PFi1JerJBTopSVeuAdZOmndk3fMaQ65Ik7SSvFJWkRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktSIubNdwK5YtPLS2S6hWbefdeIeWa+v2Z6zp14z7X0GOkJPsizJrUnGk6ycYv7vJrk5ydeTfCnJC4dfqiRpR2YM9CRzgNXACcAS4LQkSyY1+0dgtKpeDnwO+PiwC5Uk7dggR+hLgfGq2lhVjwBrgJP7G1TVV6rqR93oNcCC4ZYpSZrJIIE+H9jUN765mzad3wD+ZqoZSZYnGUsytnXr1sGrlCTNaKi/cknya8AocPZU86vq3KoararRkZGRYW5akvZ5g/zKZQuwsG98QTdtO0mOB34f+IWqeng45UmSBjXIEfp6YHGSw5LMA04F1vY3SHIUcA5wUlV9f/hlSpJmMmOgV9U2YAVwGXALcHFVbUiyKslJXbOzgQOBv0pyY5K106xOkrSHDHRhUVWtA9ZNmnZm3/DxQ65LkrSTvPRfkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWrEQIGeZFmSW5OMJ1k5xfzXJLkhybYkbxl+mZKkmcwY6EnmAKuBE4AlwGlJlkxq9j3gdOCzwy5QkjSYuQO0WQqMV9VGgCRrgJOBmycaVNXt3bzH90CNkqQBDNLlMh/Y1De+uZu205IsTzKWZGzr1q27sgpJ0jSe0pOiVXVuVY1W1ejIyMhTuWlJat4ggb4FWNg3vqCbJkl6Ghkk0NcDi5MclmQecCqwds+WJUnaWTMGelVtA1YAlwG3ABdX1YYkq5KcBJDklUk2A6cA5yTZsCeLliQ92SC/cqGq1gHrJk07s294Pb2uGEnSLPFKUUlqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQMFepJlSW5NMp5k5RTz909yUTf/2iSLhl2oJGnHZgz0JHOA1cAJwBLgtCRLJjX7DeD+qjoC+BPgY8MuVJK0Y4McoS8FxqtqY1U9AqwBTp7U5mTggm74c8Brk2R4ZUqSZjJ3gDbzgU1945uBV03Xpqq2JXkQeC5wT3+jJMuB5d3oQ0lu3ZWi90IHM2lfPF3F71awF71e4GvW2ZdesxdON2OQQB+aqjoXOPep3ObTQZKxqhqd7To0GF+vvY+vWc8gXS5bgIV94wu6aVO2STIX+Eng3mEUKEkazCCBvh5YnOSwJPOAU4G1k9qsBd7eDb8F+HJV1fDKlCTNZMYul65PfAVwGTAHOK+qNiRZBYxV1VrgvwOfSTIO3Ecv9PXP9rlupr2cr9fex9cMiAfSktQGrxSVpEYY6JLUCANd0tNWktOTfHK269hbGOiS1AgDfTclWZXkvX3jf5TkjCTvT7I+ydeT/Kdu3gFJLk1yU5JvJnnr7FUugCSLktyS5NNJNiT52yQ/keTIJNd0r98lSZ4z27W2oNvf3+wb/70kH0lyeZKPJbkuybeTHDPFsicm+VqSg5Ocn+S/Jflqko1J3tK1SZKzu7+vb0z8jSVZneSkbviSJOd1w+/s/manfB88NXtleAz03Xce8OsASfaj95PNu4DF9O6DcyTwc0leAywD7qyqn62qlwH/Z3ZK1iSLgdVV9VLgAeDNwF8CH6yqlwPfAP7jLNa3r5hbVUuB9zJpfyf5FWAl8PqqmrjE/wXA0cAvA2d1095E72/uZ4HjgbOTvAC4Cpj4kJhP70aDdNOu7Ianeh/sVQz03VRVtwP3JjkK+CXgH4FX9g3fAPwLem+WbwCv645EjqmqB2enak3y3aq6sRu+HjgcOKiqruimXQC8ZlYq27d8vvv3emBR3/TjgA8CJ1bV/X3Tv1BVj1fVzcDzu2lHAxdW1WNVdTdwBb2/x6uAY7o7xd4M3N0F/c8DX+2Wnfw+6K9hr/CU3sulYX8BnA78NL0j9tcCf1xV50xumOQVwOuBjyb5UlWteioL1ZQe7ht+DDhotgrZB2xj+wPJZ/YNT7wOj7F9Nn0H+BngRcDYFO0Bdnh316rakuQget+SrwR+CvhV4KGq+kGS5/Lk94FdLvuoS+i9UV5J74ray4B3JjkQIMn8JM9Lcgjwo6r6H8DZwCtmq2Dt0IPA/X39uP+G3pGedt/dwPOSPDfJ/vS6S2ZyB103WJKXztD2KuCtSeYkGaH3zeq6bt419Lpzruza/V73bzM8Qh+CqnokyVeAB6rqMeBvk7wE+Fp3W/iHgF8DjqDXp/c48Cjwm7NVs2b0duBTSZ4FbATeMcv1NKGqHu1uG3IdvZv6fWvA5b6V5G3AXyV5ww6aXkKvG+UmoIAPVNVd3byrgF+qqvEkd9A7Sm8q0L30fwi6k6E3AKdU1W2zXY+kfZNdLrupO8kyDnzJMJc0mzxCl6RGeIQuSY0w0CWpEQa6JDXCQJekRhjoktSI/w/TIK6YopbnBgAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
    " + ] + }, + "metadata": { + "tags": [], + "needs_background": "light" + } + }, + { + "output_type": "stream", + "text": [ + "/content/data/mini_speech_commands/unknown/8012c69d_nohash_2.wav\n" + ], + "name": "stdout" + }, + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAEICAYAAABPgw/pAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAUd0lEQVR4nO3df7RdZX3n8feHxGAFV7FytZIEQyE6RseCvUa7BixF7ApSwapUGDsVrWbsNFNsrRqnlnFSO0WZZWdNTSvYYUGdkUAd0XRIh7Yqv6pALhTUgMg1gkkoGH6O6AgEvvPH2ZeeXO7NPUlOuOTJ+7XWWdk/nr339+xz7ufs8+yzd1JVSJL2fvvNdgGSpOEw0CWpEQa6JDXCQJekRhjoktQIA12SGmGga7ckOT/JR7vhY5Lcuovr+VSSPxhudTNu81eSbEryUJKjnsptDypJJTlituvQ3mHubBegPS/J7cDzgceAHwJ/A6yoqoeGuZ2qugp48QD1nA68q6qO7lv2PcOsZUD/hd5++OIwVpbkcuAjwLEAVfWRYaz36SrJ+cDl3eixVXX6rBUjwCP0fckbqupA4BXAKPDhyQ2S7Gsf8C8ENuzKgknmDLkWabcZ6PuYqtpC7wj9ZfDEV/rfSnIbcFs37ZeT3JjkgSRfTfLyieWTHJXkhiQ/SHIR8My+eccm2dw3vjDJ55NsTXJvkk8meQnwKeDnu66OB7q2T3TddOPvTjKe5L4ka5Mc0jevkrwnyW1djauTpJt3RJIrkjyY5J6uxu0k2T/JQ8Ac4KYk3+mmvyTJ5d06NyQ5qW+Z85P8eZJ1SX4I/OIg+zvJ6UmunjTtiW6Ubr2rk1za7dNrkxw+zbqO7rqIjh1gP+yX5MNJ7kjy/SR/meQnu3kXJHlfNzx/4j3QjR/e7fP9Jl7PJO/r1vFPSd4xyPPWLKkqH40/gNuB47vhhfSOSv+wGy/g74CfAn4COAr4PvAqeoH39m75/YF5wB3A7wDPAN4CPAp8tFvXscDmbngOcBPwJ8AB9IL/6G7e6cDVk2o8v289xwH30Ps2sT/wp8CVfW0L+N/AQcChwFZgWTfvQuD36R2sPLHNafZLAUd0w88AxoH/0D3P44AfAC/uq+9B4F9NrHvAfT/Vc+3f7vnAvcBSel2g/xNYM7ktsAzYBCwdcD+8s3s+PwMcCHwe+EzfvL/uhv818B3gor55X+x7PbcBq7r983rgR8BzZvs97WPqh0fo+44vdEfDVwNXAP+5b94fV9V9VfX/gOXAOVV1bVU9VlUXAA8Dr+4ezwD+a1U9WlWfA9ZPs72lwCHA+6vqh1X146q6epq2k70NOK+qbqiqh4EP0TuiX9TX5qyqeqCqvgd8BTiym/4ova6UQ3Zym6+mF3xnVdUjVfVlemF5Wl+bL1bVP1TV41X14wHXO4hLquq6qtpGL9CPnDT/FOAc4ISqum7SvOn2w9uAT1TVxuqdK/kQcGrXrXYFcHSS/YDXAB+n90EF8Avd/AmPAqu613sd8BADnCfR7DDQ9x1vrKqDquqFVfXvuvCesKlv+IXA+7qv8A90HwIL6YXzIcCWquq/o9sd02xvIXBHF1I765D+9XaBdC8wv6/NXX3DP6IXxgAfAAJc13WbvHMntrmpqh7vm3bHpG1uYs+Y7rlMeC9wcVV9cyeW3W4fdsNzgedX1XfonRw/EjiG3gfXnUlezJMD/d5Jr+FU9elpwkAX9L66T9gE/FEX/hOPZ1XVhcA/AfMn+mk7h06zzk3AodOcaJ3pFp930vtgASDJAcBzgS0zPpGqu6rq3VV1CPBvgT8b8Gd/dwILu6PWCYdO2uau3Jr0h8CzJkaS/PQurOMU4I1JztiJZbbbh/Seyzbg7m78CnpdZvOqd17lCnrda88BbtyFGvU0YKBrsk8D70nyqvQckOTEJM8GvkYvFH47yTOSvIle18pUrqP3AXBWt45nJpn4Wn83sCDJvGmWvRB4R5Ijk+xPr3vo2qq6fabik5ySZEE3ej+9EH58B4tMuJbe0ecHuud2LPAGYM0Ay+7ITcBLu+fyTHo/a9xZdwKvBc5I8psDLnMh8DtJDktyIL19eFHf0fYVwArgym788m786qp6bBdq1NOAga7tVNUY8G7gk/QCcZzeiT2q6hHgTd34fcBb6Z1sm2o9j9ELxCOA7wGbu/YAX6Z3YvauJPdMsezfA38A/C96HwqHA6cO+BReCVzb/YplLXBGVW2caaHuub0BOIHeCdk/A369qr414HanW++36Z1U/Ht6vyIatE9/8nq+Ry/UVyZ51wCLnAd8hl5gfxf4MfDv++ZfATybfw70q+l9k7gS7bWyfXeoJGlv5RG6JDXCQJekRhjoktSIgQI9ybIkt3aXYq+cps2vJrm5++3vZ4dbpiRpJjOeFE3vJkTfBl5H75cK64HTqurmvjaLgYuB46rq/iTPq6rv72i9Bx98cC1atGg3y5ekfcv1119/T1WNTDVvkLvrLQXGJ376lWQNcDJwc1+bdwOrq+p+gJnCHGDRokWMjY0NsHlJ0oQk012dPVCXy3y2v+R5M9tfDg3wIuBFSf4hyTVJlk1TyPIkY0nGtm7dOsCmJUmDGtZJ0bnAYnp3ZzsN+HSSgyY3qqpzq2q0qkZHRqb8xiBJ2kWDBPoWejdamrCAJ99TYzOwtrsj23fp9bkvHk6JkqRBDBLo64HF3T0h5tG7BHvtpDZfoPtvt5IcTK8LZsbLrSVJwzNjoHc381kBXAbcQu82nhuSrOr7H10uA+5NcjO9ezK/v6ru3VNFS5KebNbu5TI6Olr+ykWSdk6S66tqdKp5XikqSY0w0CWpEQa6JDVikCtFJe2DFq28dLZLaNbtZ524R9brEbokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjRgo0JMsS3JrkvEkK6eYf3qSrUlu7B7vGn6pkqQdmTtTgyRzgNXA64DNwPoka6vq5klNL6qqFXugRknSAAY5Ql8KjFfVxqp6BFgDnLxny5Ik7axBAn0+sKlvfHM3bbI3J/l6ks8lWTjVipIsTzKWZGzr1q27UK4kaTrDOin618Ciqno58HfABVM1qqpzq2q0qkZHRkaGtGlJEgwW6FuA/iPuBd20J1TVvVX1cDf6F8DPDac8SdKgBgn09cDiJIclmQecCqztb5DkBX2jJwG3DK9ESdIgZvyVS1VtS7ICuAyYA5xXVRuSrALGqmot8NtJTgK2AfcBp+/BmiVJU5gx0AGqah2wbtK0M/uGPwR8aLilSZJ2hleKSlIjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNGCjQkyxLcmuS8SQrd9DuzUkqyejwSpQkDWLGQE8yB1gNnAAsAU5LsmSKds8GzgCuHXaRkqSZDXKEvhQYr6qNVfUIsAY4eYp2fwh8DPjxEOuTJA1okECfD2zqG9/cTXtCklcAC6vq0h2tKMnyJGNJxrZu3brTxUqSprfbJ0WT7Ad8AnjfTG2r6tyqGq2q0ZGRkd3dtCSpzyCBvgVY2De+oJs24dnAy4DLk9wOvBpY64lRSXpqDRLo64HFSQ5LMg84FVg7MbOqHqyqg6tqUVUtAq4BTqqqsT1SsSRpSjMGelVtA1YAlwG3ABdX1YYkq5KctKcLlCQNZu4gjapqHbBu0rQzp2l77O6XJUnaWV4pKkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1YqBAT7Isya1JxpOsnGL+e5J8I8mNSa5OsmT4pUqSdmTGQE8yB1gNnAAsAU6bIrA/W1X/sqqOBD4OfGLolUqSdmiQI/SlwHhVbayqR4A1wMn9Darq//aNHgDU8EqUJA1i7gBt5gOb+sY3A6+a3CjJbwG/C8wDjptqRUmWA8sBDj300J2tVZK0A0M7KVpVq6vqcOCDwIenaXNuVY1W1ejIyMiwNi1JYrBA3wIs7Btf0E2bzhrgjbtTlCRp5w0S6OuBxUkOSzIPOBVY298gyeK+0ROB24ZXoiRpEDP2oVfVtiQrgMuAOcB5VbUhySpgrKrWAiuSHA88CtwPvH1PFi1JerJBTopSVeuAdZOmndk3fMaQ65Ik7SSvFJWkRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktSIubNdwK5YtPLS2S6hWbefdeIeWa+v2Z6zp14z7X0GOkJPsizJrUnGk6ycYv7vJrk5ydeTfCnJC4dfqiRpR2YM9CRzgNXACcAS4LQkSyY1+0dgtKpeDnwO+PiwC5Uk7dggR+hLgfGq2lhVjwBrgJP7G1TVV6rqR93oNcCC4ZYpSZrJIIE+H9jUN765mzad3wD+ZqoZSZYnGUsytnXr1sGrlCTNaKi/cknya8AocPZU86vq3KoararRkZGRYW5akvZ5g/zKZQuwsG98QTdtO0mOB34f+IWqeng45UmSBjXIEfp6YHGSw5LMA04F1vY3SHIUcA5wUlV9f/hlSpJmMmOgV9U2YAVwGXALcHFVbUiyKslJXbOzgQOBv0pyY5K106xOkrSHDHRhUVWtA9ZNmnZm3/DxQ65LkrSTvPRfkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWrEQIGeZFmSW5OMJ1k5xfzXJLkhybYkbxl+mZKkmcwY6EnmAKuBE4AlwGlJlkxq9j3gdOCzwy5QkjSYuQO0WQqMV9VGgCRrgJOBmycaVNXt3bzH90CNkqQBDNLlMh/Y1De+uZu205IsTzKWZGzr1q27sgpJ0jSe0pOiVXVuVY1W1ejIyMhTuWlJat4ggb4FWNg3vqCbJkl6Ghkk0NcDi5MclmQecCqwds+WJUnaWTMGelVtA1YAlwG3ABdX1YYkq5KcBJDklUk2A6cA5yTZsCeLliQ92SC/cqGq1gHrJk07s294Pb2uGEnSLPFKUUlqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQMFepJlSW5NMp5k5RTz909yUTf/2iSLhl2oJGnHZgz0JHOA1cAJwBLgtCRLJjX7DeD+qjoC+BPgY8MuVJK0Y4McoS8FxqtqY1U9AqwBTp7U5mTggm74c8Brk2R4ZUqSZjJ3gDbzgU1945uBV03Xpqq2JXkQeC5wT3+jJMuB5d3oQ0lu3ZWi90IHM2lfPF3F71awF71e4GvW2ZdesxdON2OQQB+aqjoXOPep3ObTQZKxqhqd7To0GF+vvY+vWc8gXS5bgIV94wu6aVO2STIX+Eng3mEUKEkazCCBvh5YnOSwJPOAU4G1k9qsBd7eDb8F+HJV1fDKlCTNZMYul65PfAVwGTAHOK+qNiRZBYxV1VrgvwOfSTIO3Ecv9PXP9rlupr2cr9fex9cMiAfSktQGrxSVpEYY6JLUCANd0tNWktOTfHK269hbGOiS1AgDfTclWZXkvX3jf5TkjCTvT7I+ydeT/Kdu3gFJLk1yU5JvJnnr7FUugCSLktyS5NNJNiT52yQ/keTIJNd0r98lSZ4z27W2oNvf3+wb/70kH0lyeZKPJbkuybeTHDPFsicm+VqSg5Ocn+S/Jflqko1J3tK1SZKzu7+vb0z8jSVZneSkbviSJOd1w+/s/manfB88NXtleAz03Xce8OsASfaj95PNu4DF9O6DcyTwc0leAywD7qyqn62qlwH/Z3ZK1iSLgdVV9VLgAeDNwF8CH6yqlwPfAP7jLNa3r5hbVUuB9zJpfyf5FWAl8PqqmrjE/wXA0cAvA2d1095E72/uZ4HjgbOTvAC4Cpj4kJhP70aDdNOu7Ianeh/sVQz03VRVtwP3JjkK+CXgH4FX9g3fAPwLem+WbwCv645EjqmqB2enak3y3aq6sRu+HjgcOKiqruimXQC8ZlYq27d8vvv3emBR3/TjgA8CJ1bV/X3Tv1BVj1fVzcDzu2lHAxdW1WNVdTdwBb2/x6uAY7o7xd4M3N0F/c8DX+2Wnfw+6K9hr/CU3sulYX8BnA78NL0j9tcCf1xV50xumOQVwOuBjyb5UlWteioL1ZQe7ht+DDhotgrZB2xj+wPJZ/YNT7wOj7F9Nn0H+BngRcDYFO0Bdnh316rakuQget+SrwR+CvhV4KGq+kGS5/Lk94FdLvuoS+i9UV5J74ray4B3JjkQIMn8JM9Lcgjwo6r6H8DZwCtmq2Dt0IPA/X39uP+G3pGedt/dwPOSPDfJ/vS6S2ZyB103WJKXztD2KuCtSeYkGaH3zeq6bt419Lpzruza/V73bzM8Qh+CqnokyVeAB6rqMeBvk7wE+Fp3W/iHgF8DjqDXp/c48Cjwm7NVs2b0duBTSZ4FbATeMcv1NKGqHu1uG3IdvZv6fWvA5b6V5G3AXyV5ww6aXkKvG+UmoIAPVNVd3byrgF+qqvEkd9A7Sm8q0L30fwi6k6E3AKdU1W2zXY+kfZNdLrupO8kyDnzJMJc0mzxCl6RGeIQuSY0w0CWpEQa6JDXCQJekRhjoktSI/w/TIK6YopbnBgAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
    " + ] + }, + "metadata": { + "tags": [], + "needs_background": "light" + } + }, + { + "output_type": "stream", + "text": [ + "/content/data/mini_speech_commands/unknown/71f6fed7_nohash_2.wav\n" + ], + "name": "stdout" + }, + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAEICAYAAABPgw/pAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAUd0lEQVR4nO3df7RdZX3n8feHxGAFV7FytZIEQyE6RseCvUa7BixF7ApSwapUGDsVrWbsNFNsrRqnlnFSO0WZZWdNTSvYYUGdkUAd0XRIh7Yqv6pALhTUgMg1gkkoGH6O6AgEvvPH2ZeeXO7NPUlOuOTJ+7XWWdk/nr339+xz7ufs8+yzd1JVSJL2fvvNdgGSpOEw0CWpEQa6JDXCQJekRhjoktQIA12SGmGga7ckOT/JR7vhY5Lcuovr+VSSPxhudTNu81eSbEryUJKjnsptDypJJTlituvQ3mHubBegPS/J7cDzgceAHwJ/A6yoqoeGuZ2qugp48QD1nA68q6qO7lv2PcOsZUD/hd5++OIwVpbkcuAjwLEAVfWRYaz36SrJ+cDl3eixVXX6rBUjwCP0fckbqupA4BXAKPDhyQ2S7Gsf8C8ENuzKgknmDLkWabcZ6PuYqtpC7wj9ZfDEV/rfSnIbcFs37ZeT3JjkgSRfTfLyieWTHJXkhiQ/SHIR8My+eccm2dw3vjDJ55NsTXJvkk8meQnwKeDnu66OB7q2T3TddOPvTjKe5L4ka5Mc0jevkrwnyW1djauTpJt3RJIrkjyY5J6uxu0k2T/JQ8Ac4KYk3+mmvyTJ5d06NyQ5qW+Z85P8eZJ1SX4I/OIg+zvJ6UmunjTtiW6Ubr2rk1za7dNrkxw+zbqO7rqIjh1gP+yX5MNJ7kjy/SR/meQnu3kXJHlfNzx/4j3QjR/e7fP9Jl7PJO/r1vFPSd4xyPPWLKkqH40/gNuB47vhhfSOSv+wGy/g74CfAn4COAr4PvAqeoH39m75/YF5wB3A7wDPAN4CPAp8tFvXscDmbngOcBPwJ8AB9IL/6G7e6cDVk2o8v289xwH30Ps2sT/wp8CVfW0L+N/AQcChwFZgWTfvQuD36R2sPLHNafZLAUd0w88AxoH/0D3P44AfAC/uq+9B4F9NrHvAfT/Vc+3f7vnAvcBSel2g/xNYM7ktsAzYBCwdcD+8s3s+PwMcCHwe+EzfvL/uhv818B3gor55X+x7PbcBq7r983rgR8BzZvs97WPqh0fo+44vdEfDVwNXAP+5b94fV9V9VfX/gOXAOVV1bVU9VlUXAA8Dr+4ezwD+a1U9WlWfA9ZPs72lwCHA+6vqh1X146q6epq2k70NOK+qbqiqh4EP0TuiX9TX5qyqeqCqvgd8BTiym/4ova6UQ3Zym6+mF3xnVdUjVfVlemF5Wl+bL1bVP1TV41X14wHXO4hLquq6qtpGL9CPnDT/FOAc4ISqum7SvOn2w9uAT1TVxuqdK/kQcGrXrXYFcHSS/YDXAB+n90EF8Avd/AmPAqu613sd8BADnCfR7DDQ9x1vrKqDquqFVfXvuvCesKlv+IXA+7qv8A90HwIL6YXzIcCWquq/o9sd02xvIXBHF1I765D+9XaBdC8wv6/NXX3DP6IXxgAfAAJc13WbvHMntrmpqh7vm3bHpG1uYs+Y7rlMeC9wcVV9cyeW3W4fdsNzgedX1XfonRw/EjiG3gfXnUlezJMD/d5Jr+FU9elpwkAX9L66T9gE/FEX/hOPZ1XVhcA/AfMn+mk7h06zzk3AodOcaJ3pFp930vtgASDJAcBzgS0zPpGqu6rq3VV1CPBvgT8b8Gd/dwILu6PWCYdO2uau3Jr0h8CzJkaS/PQurOMU4I1JztiJZbbbh/Seyzbg7m78CnpdZvOqd17lCnrda88BbtyFGvU0YKBrsk8D70nyqvQckOTEJM8GvkYvFH47yTOSvIle18pUrqP3AXBWt45nJpn4Wn83sCDJvGmWvRB4R5Ijk+xPr3vo2qq6fabik5ySZEE3ej+9EH58B4tMuJbe0ecHuud2LPAGYM0Ay+7ITcBLu+fyTHo/a9xZdwKvBc5I8psDLnMh8DtJDktyIL19eFHf0fYVwArgym788m786qp6bBdq1NOAga7tVNUY8G7gk/QCcZzeiT2q6hHgTd34fcBb6Z1sm2o9j9ELxCOA7wGbu/YAX6Z3YvauJPdMsezfA38A/C96HwqHA6cO+BReCVzb/YplLXBGVW2caaHuub0BOIHeCdk/A369qr414HanW++36Z1U/Ht6vyIatE9/8nq+Ry/UVyZ51wCLnAd8hl5gfxf4MfDv++ZfATybfw70q+l9k7gS7bWyfXeoJGlv5RG6JDXCQJekRhjoktSIgQI9ybIkt3aXYq+cps2vJrm5++3vZ4dbpiRpJjOeFE3vJkTfBl5H75cK64HTqurmvjaLgYuB46rq/iTPq6rv72i9Bx98cC1atGg3y5ekfcv1119/T1WNTDVvkLvrLQXGJ376lWQNcDJwc1+bdwOrq+p+gJnCHGDRokWMjY0NsHlJ0oQk012dPVCXy3y2v+R5M9tfDg3wIuBFSf4hyTVJlk1TyPIkY0nGtm7dOsCmJUmDGtZJ0bnAYnp3ZzsN+HSSgyY3qqpzq2q0qkZHRqb8xiBJ2kWDBPoWejdamrCAJ99TYzOwtrsj23fp9bkvHk6JkqRBDBLo64HF3T0h5tG7BHvtpDZfoPtvt5IcTK8LZsbLrSVJwzNjoHc381kBXAbcQu82nhuSrOr7H10uA+5NcjO9ezK/v6ru3VNFS5KebNbu5TI6Olr+ykWSdk6S66tqdKp5XikqSY0w0CWpEQa6JDVikCtFJe2DFq28dLZLaNbtZ524R9brEbokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjRgo0JMsS3JrkvEkK6eYf3qSrUlu7B7vGn6pkqQdmTtTgyRzgNXA64DNwPoka6vq5klNL6qqFXugRknSAAY5Ql8KjFfVxqp6BFgDnLxny5Ik7axBAn0+sKlvfHM3bbI3J/l6ks8lWTjVipIsTzKWZGzr1q27UK4kaTrDOin618Ciqno58HfABVM1qqpzq2q0qkZHRkaGtGlJEgwW6FuA/iPuBd20J1TVvVX1cDf6F8DPDac8SdKgBgn09cDiJIclmQecCqztb5DkBX2jJwG3DK9ESdIgZvyVS1VtS7ICuAyYA5xXVRuSrALGqmot8NtJTgK2AfcBp+/BmiVJU5gx0AGqah2wbtK0M/uGPwR8aLilSZJ2hleKSlIjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNGCjQkyxLcmuS8SQrd9DuzUkqyejwSpQkDWLGQE8yB1gNnAAsAU5LsmSKds8GzgCuHXaRkqSZDXKEvhQYr6qNVfUIsAY4eYp2fwh8DPjxEOuTJA1okECfD2zqG9/cTXtCklcAC6vq0h2tKMnyJGNJxrZu3brTxUqSprfbJ0WT7Ad8AnjfTG2r6tyqGq2q0ZGRkd3dtCSpzyCBvgVY2De+oJs24dnAy4DLk9wOvBpY64lRSXpqDRLo64HFSQ5LMg84FVg7MbOqHqyqg6tqUVUtAq4BTqqqsT1SsSRpSjMGelVtA1YAlwG3ABdX1YYkq5KctKcLlCQNZu4gjapqHbBu0rQzp2l77O6XJUnaWV4pKkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1YqBAT7Isya1JxpOsnGL+e5J8I8mNSa5OsmT4pUqSdmTGQE8yB1gNnAAsAU6bIrA/W1X/sqqOBD4OfGLolUqSdmiQI/SlwHhVbayqR4A1wMn9Darq//aNHgDU8EqUJA1i7gBt5gOb+sY3A6+a3CjJbwG/C8wDjptqRUmWA8sBDj300J2tVZK0A0M7KVpVq6vqcOCDwIenaXNuVY1W1ejIyMiwNi1JYrBA3wIs7Btf0E2bzhrgjbtTlCRp5w0S6OuBxUkOSzIPOBVY298gyeK+0ROB24ZXoiRpEDP2oVfVtiQrgMuAOcB5VbUhySpgrKrWAiuSHA88CtwPvH1PFi1JerJBTopSVeuAdZOmndk3fMaQ65Ik7SSvFJWkRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktSIubNdwK5YtPLS2S6hWbefdeIeWa+v2Z6zp14z7X0GOkJPsizJrUnGk6ycYv7vJrk5ydeTfCnJC4dfqiRpR2YM9CRzgNXACcAS4LQkSyY1+0dgtKpeDnwO+PiwC5Uk7dggR+hLgfGq2lhVjwBrgJP7G1TVV6rqR93oNcCC4ZYpSZrJIIE+H9jUN765mzad3wD+ZqoZSZYnGUsytnXr1sGrlCTNaKi/cknya8AocPZU86vq3KoararRkZGRYW5akvZ5g/zKZQuwsG98QTdtO0mOB34f+IWqeng45UmSBjXIEfp6YHGSw5LMA04F1vY3SHIUcA5wUlV9f/hlSpJmMmOgV9U2YAVwGXALcHFVbUiyKslJXbOzgQOBv0pyY5K106xOkrSHDHRhUVWtA9ZNmnZm3/DxQ65LkrSTvPRfkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWrEQIGeZFmSW5OMJ1k5xfzXJLkhybYkbxl+mZKkmcwY6EnmAKuBE4AlwGlJlkxq9j3gdOCzwy5QkjSYuQO0WQqMV9VGgCRrgJOBmycaVNXt3bzH90CNkqQBDNLlMh/Y1De+uZu205IsTzKWZGzr1q27sgpJ0jSe0pOiVXVuVY1W1ejIyMhTuWlJat4ggb4FWNg3vqCbJkl6Ghkk0NcDi5MclmQecCqwds+WJUnaWTMGelVtA1YAlwG3ABdX1YYkq5KcBJDklUk2A6cA5yTZsCeLliQ92SC/cqGq1gHrJk07s294Pb2uGEnSLPFKUUlqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQMFepJlSW5NMp5k5RTz909yUTf/2iSLhl2oJGnHZgz0JHOA1cAJwBLgtCRLJjX7DeD+qjoC+BPgY8MuVJK0Y4McoS8FxqtqY1U9AqwBTp7U5mTggm74c8Brk2R4ZUqSZjJ3gDbzgU1945uBV03Xpqq2JXkQeC5wT3+jJMuB5d3oQ0lu3ZWi90IHM2lfPF3F71awF71e4GvW2ZdesxdON2OQQB+aqjoXOPep3ObTQZKxqhqd7To0GF+vvY+vWc8gXS5bgIV94wu6aVO2STIX+Eng3mEUKEkazCCBvh5YnOSwJPOAU4G1k9qsBd7eDb8F+HJV1fDKlCTNZMYul65PfAVwGTAHOK+qNiRZBYxV1VrgvwOfSTIO3Ecv9PXP9rlupr2cr9fex9cMiAfSktQGrxSVpEYY6JLUCANd0tNWktOTfHK269hbGOiS1AgDfTclWZXkvX3jf5TkjCTvT7I+ydeT/Kdu3gFJLk1yU5JvJnnr7FUugCSLktyS5NNJNiT52yQ/keTIJNd0r98lSZ4z27W2oNvf3+wb/70kH0lyeZKPJbkuybeTHDPFsicm+VqSg5Ocn+S/Jflqko1J3tK1SZKzu7+vb0z8jSVZneSkbviSJOd1w+/s/manfB88NXtleAz03Xce8OsASfaj95PNu4DF9O6DcyTwc0leAywD7qyqn62qlwH/Z3ZK1iSLgdVV9VLgAeDNwF8CH6yqlwPfAP7jLNa3r5hbVUuB9zJpfyf5FWAl8PqqmrjE/wXA0cAvA2d1095E72/uZ4HjgbOTvAC4Cpj4kJhP70aDdNOu7Ianeh/sVQz03VRVtwP3JjkK+CXgH4FX9g3fAPwLem+WbwCv645EjqmqB2enak3y3aq6sRu+HjgcOKiqruimXQC8ZlYq27d8vvv3emBR3/TjgA8CJ1bV/X3Tv1BVj1fVzcDzu2lHAxdW1WNVdTdwBb2/x6uAY7o7xd4M3N0F/c8DX+2Wnfw+6K9hr/CU3sulYX8BnA78NL0j9tcCf1xV50xumOQVwOuBjyb5UlWteioL1ZQe7ht+DDhotgrZB2xj+wPJZ/YNT7wOj7F9Nn0H+BngRcDYFO0Bdnh316rakuQget+SrwR+CvhV4KGq+kGS5/Lk94FdLvuoS+i9UV5J74ray4B3JjkQIMn8JM9Lcgjwo6r6H8DZwCtmq2Dt0IPA/X39uP+G3pGedt/dwPOSPDfJ/vS6S2ZyB103WJKXztD2KuCtSeYkGaH3zeq6bt419Lpzruza/V73bzM8Qh+CqnokyVeAB6rqMeBvk7wE+Fp3W/iHgF8DjqDXp/c48Cjwm7NVs2b0duBTSZ4FbATeMcv1NKGqHu1uG3IdvZv6fWvA5b6V5G3AXyV5ww6aXkKvG+UmoIAPVNVd3byrgF+qqvEkd9A7Sm8q0L30fwi6k6E3AKdU1W2zXY+kfZNdLrupO8kyDnzJMJc0mzxCl6RGeIQuSY0w0CWpEQa6JDXCQJekRhjoktSI/w/TIK6YopbnBgAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
    " + ] + }, + "metadata": { + "tags": [], + "needs_background": "light" + } + }, + { + "output_type": "stream", + "text": [ + "/content/data/mini_speech_commands/unknown/ec21c46b_nohash_2.wav\n" + ], + "name": "stdout" + }, + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAEICAYAAABPgw/pAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAUd0lEQVR4nO3df7RdZX3n8feHxGAFV7FytZIEQyE6RseCvUa7BixF7ApSwapUGDsVrWbsNFNsrRqnlnFSO0WZZWdNTSvYYUGdkUAd0XRIh7Yqv6pALhTUgMg1gkkoGH6O6AgEvvPH2ZeeXO7NPUlOuOTJ+7XWWdk/nr339+xz7ufs8+yzd1JVSJL2fvvNdgGSpOEw0CWpEQa6JDXCQJekRhjoktQIA12SGmGga7ckOT/JR7vhY5Lcuovr+VSSPxhudTNu81eSbEryUJKjnsptDypJJTlituvQ3mHubBegPS/J7cDzgceAHwJ/A6yoqoeGuZ2qugp48QD1nA68q6qO7lv2PcOsZUD/hd5++OIwVpbkcuAjwLEAVfWRYaz36SrJ+cDl3eixVXX6rBUjwCP0fckbqupA4BXAKPDhyQ2S7Gsf8C8ENuzKgknmDLkWabcZ6PuYqtpC7wj9ZfDEV/rfSnIbcFs37ZeT3JjkgSRfTfLyieWTHJXkhiQ/SHIR8My+eccm2dw3vjDJ55NsTXJvkk8meQnwKeDnu66OB7q2T3TddOPvTjKe5L4ka5Mc0jevkrwnyW1djauTpJt3RJIrkjyY5J6uxu0k2T/JQ8Ac4KYk3+mmvyTJ5d06NyQ5qW+Z85P8eZJ1SX4I/OIg+zvJ6UmunjTtiW6Ubr2rk1za7dNrkxw+zbqO7rqIjh1gP+yX5MNJ7kjy/SR/meQnu3kXJHlfNzx/4j3QjR/e7fP9Jl7PJO/r1vFPSd4xyPPWLKkqH40/gNuB47vhhfSOSv+wGy/g74CfAn4COAr4PvAqeoH39m75/YF5wB3A7wDPAN4CPAp8tFvXscDmbngOcBPwJ8AB9IL/6G7e6cDVk2o8v289xwH30Ps2sT/wp8CVfW0L+N/AQcChwFZgWTfvQuD36R2sPLHNafZLAUd0w88AxoH/0D3P44AfAC/uq+9B4F9NrHvAfT/Vc+3f7vnAvcBSel2g/xNYM7ktsAzYBCwdcD+8s3s+PwMcCHwe+EzfvL/uhv818B3gor55X+x7PbcBq7r983rgR8BzZvs97WPqh0fo+44vdEfDVwNXAP+5b94fV9V9VfX/gOXAOVV1bVU9VlUXAA8Dr+4ezwD+a1U9WlWfA9ZPs72lwCHA+6vqh1X146q6epq2k70NOK+qbqiqh4EP0TuiX9TX5qyqeqCqvgd8BTiym/4ova6UQ3Zym6+mF3xnVdUjVfVlemF5Wl+bL1bVP1TV41X14wHXO4hLquq6qtpGL9CPnDT/FOAc4ISqum7SvOn2w9uAT1TVxuqdK/kQcGrXrXYFcHSS/YDXAB+n90EF8Avd/AmPAqu613sd8BADnCfR7DDQ9x1vrKqDquqFVfXvuvCesKlv+IXA+7qv8A90HwIL6YXzIcCWquq/o9sd02xvIXBHF1I765D+9XaBdC8wv6/NXX3DP6IXxgAfAAJc13WbvHMntrmpqh7vm3bHpG1uYs+Y7rlMeC9wcVV9cyeW3W4fdsNzgedX1XfonRw/EjiG3gfXnUlezJMD/d5Jr+FU9elpwkAX9L66T9gE/FEX/hOPZ1XVhcA/AfMn+mk7h06zzk3AodOcaJ3pFp930vtgASDJAcBzgS0zPpGqu6rq3VV1CPBvgT8b8Gd/dwILu6PWCYdO2uau3Jr0h8CzJkaS/PQurOMU4I1JztiJZbbbh/Seyzbg7m78CnpdZvOqd17lCnrda88BbtyFGvU0YKBrsk8D70nyqvQckOTEJM8GvkYvFH47yTOSvIle18pUrqP3AXBWt45nJpn4Wn83sCDJvGmWvRB4R5Ijk+xPr3vo2qq6fabik5ySZEE3ej+9EH58B4tMuJbe0ecHuud2LPAGYM0Ay+7ITcBLu+fyTHo/a9xZdwKvBc5I8psDLnMh8DtJDktyIL19eFHf0fYVwArgym788m786qp6bBdq1NOAga7tVNUY8G7gk/QCcZzeiT2q6hHgTd34fcBb6Z1sm2o9j9ELxCOA7wGbu/YAX6Z3YvauJPdMsezfA38A/C96HwqHA6cO+BReCVzb/YplLXBGVW2caaHuub0BOIHeCdk/A369qr414HanW++36Z1U/Ht6vyIatE9/8nq+Ry/UVyZ51wCLnAd8hl5gfxf4MfDv++ZfATybfw70q+l9k7gS7bWyfXeoJGlv5RG6JDXCQJekRhjoktSIgQI9ybIkt3aXYq+cps2vJrm5++3vZ4dbpiRpJjOeFE3vJkTfBl5H75cK64HTqurmvjaLgYuB46rq/iTPq6rv72i9Bx98cC1atGg3y5ekfcv1119/T1WNTDVvkLvrLQXGJ376lWQNcDJwc1+bdwOrq+p+gJnCHGDRokWMjY0NsHlJ0oQk012dPVCXy3y2v+R5M9tfDg3wIuBFSf4hyTVJlk1TyPIkY0nGtm7dOsCmJUmDGtZJ0bnAYnp3ZzsN+HSSgyY3qqpzq2q0qkZHRqb8xiBJ2kWDBPoWejdamrCAJ99TYzOwtrsj23fp9bkvHk6JkqRBDBLo64HF3T0h5tG7BHvtpDZfoPtvt5IcTK8LZsbLrSVJwzNjoHc381kBXAbcQu82nhuSrOr7H10uA+5NcjO9ezK/v6ru3VNFS5KebNbu5TI6Olr+ykWSdk6S66tqdKp5XikqSY0w0CWpEQa6JDVikCtFJe2DFq28dLZLaNbtZ524R9brEbokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjRgo0JMsS3JrkvEkK6eYf3qSrUlu7B7vGn6pkqQdmTtTgyRzgNXA64DNwPoka6vq5klNL6qqFXugRknSAAY5Ql8KjFfVxqp6BFgDnLxny5Ik7axBAn0+sKlvfHM3bbI3J/l6ks8lWTjVipIsTzKWZGzr1q27UK4kaTrDOin618Ciqno58HfABVM1qqpzq2q0qkZHRkaGtGlJEgwW6FuA/iPuBd20J1TVvVX1cDf6F8DPDac8SdKgBgn09cDiJIclmQecCqztb5DkBX2jJwG3DK9ESdIgZvyVS1VtS7ICuAyYA5xXVRuSrALGqmot8NtJTgK2AfcBp+/BmiVJU5gx0AGqah2wbtK0M/uGPwR8aLilSZJ2hleKSlIjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNGCjQkyxLcmuS8SQrd9DuzUkqyejwSpQkDWLGQE8yB1gNnAAsAU5LsmSKds8GzgCuHXaRkqSZDXKEvhQYr6qNVfUIsAY4eYp2fwh8DPjxEOuTJA1okECfD2zqG9/cTXtCklcAC6vq0h2tKMnyJGNJxrZu3brTxUqSprfbJ0WT7Ad8AnjfTG2r6tyqGq2q0ZGRkd3dtCSpzyCBvgVY2De+oJs24dnAy4DLk9wOvBpY64lRSXpqDRLo64HFSQ5LMg84FVg7MbOqHqyqg6tqUVUtAq4BTqqqsT1SsSRpSjMGelVtA1YAlwG3ABdX1YYkq5KctKcLlCQNZu4gjapqHbBu0rQzp2l77O6XJUnaWV4pKkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1YqBAT7Isya1JxpOsnGL+e5J8I8mNSa5OsmT4pUqSdmTGQE8yB1gNnAAsAU6bIrA/W1X/sqqOBD4OfGLolUqSdmiQI/SlwHhVbayqR4A1wMn9Darq//aNHgDU8EqUJA1i7gBt5gOb+sY3A6+a3CjJbwG/C8wDjptqRUmWA8sBDj300J2tVZK0A0M7KVpVq6vqcOCDwIenaXNuVY1W1ejIyMiwNi1JYrBA3wIs7Btf0E2bzhrgjbtTlCRp5w0S6OuBxUkOSzIPOBVY298gyeK+0ROB24ZXoiRpEDP2oVfVtiQrgMuAOcB5VbUhySpgrKrWAiuSHA88CtwPvH1PFi1JerJBTopSVeuAdZOmndk3fMaQ65Ik7SSvFJWkRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktSIubNdwK5YtPLS2S6hWbefdeIeWa+v2Z6zp14z7X0GOkJPsizJrUnGk6ycYv7vJrk5ydeTfCnJC4dfqiRpR2YM9CRzgNXACcAS4LQkSyY1+0dgtKpeDnwO+PiwC5Uk7dggR+hLgfGq2lhVjwBrgJP7G1TVV6rqR93oNcCC4ZYpSZrJIIE+H9jUN765mzad3wD+ZqoZSZYnGUsytnXr1sGrlCTNaKi/cknya8AocPZU86vq3KoararRkZGRYW5akvZ5g/zKZQuwsG98QTdtO0mOB34f+IWqeng45UmSBjXIEfp6YHGSw5LMA04F1vY3SHIUcA5wUlV9f/hlSpJmMmOgV9U2YAVwGXALcHFVbUiyKslJXbOzgQOBv0pyY5K106xOkrSHDHRhUVWtA9ZNmnZm3/DxQ65LkrSTvPRfkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWrEQIGeZFmSW5OMJ1k5xfzXJLkhybYkbxl+mZKkmcwY6EnmAKuBE4AlwGlJlkxq9j3gdOCzwy5QkjSYuQO0WQqMV9VGgCRrgJOBmycaVNXt3bzH90CNkqQBDNLlMh/Y1De+uZu205IsTzKWZGzr1q27sgpJ0jSe0pOiVXVuVY1W1ejIyMhTuWlJat4ggb4FWNg3vqCbJkl6Ghkk0NcDi5MclmQecCqwds+WJUnaWTMGelVtA1YAlwG3ABdX1YYkq5KcBJDklUk2A6cA5yTZsCeLliQ92SC/cqGq1gHrJk07s294Pb2uGEnSLPFKUUlqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQMFepJlSW5NMp5k5RTz909yUTf/2iSLhl2oJGnHZgz0JHOA1cAJwBLgtCRLJjX7DeD+qjoC+BPgY8MuVJK0Y4McoS8FxqtqY1U9AqwBTp7U5mTggm74c8Brk2R4ZUqSZjJ3gDbzgU1945uBV03Xpqq2JXkQeC5wT3+jJMuB5d3oQ0lu3ZWi90IHM2lfPF3F71awF71e4GvW2ZdesxdON2OQQB+aqjoXOPep3ObTQZKxqhqd7To0GF+vvY+vWc8gXS5bgIV94wu6aVO2STIX+Eng3mEUKEkazCCBvh5YnOSwJPOAU4G1k9qsBd7eDb8F+HJV1fDKlCTNZMYul65PfAVwGTAHOK+qNiRZBYxV1VrgvwOfSTIO3Ecv9PXP9rlupr2cr9fex9cMiAfSktQGrxSVpEYY6JLUCANd0tNWktOTfHK269hbGOiS1AgDfTclWZXkvX3jf5TkjCTvT7I+ydeT/Kdu3gFJLk1yU5JvJnnr7FUugCSLktyS5NNJNiT52yQ/keTIJNd0r98lSZ4z27W2oNvf3+wb/70kH0lyeZKPJbkuybeTHDPFsicm+VqSg5Ocn+S/Jflqko1J3tK1SZKzu7+vb0z8jSVZneSkbviSJOd1w+/s/manfB88NXtleAz03Xce8OsASfaj95PNu4DF9O6DcyTwc0leAywD7qyqn62qlwH/Z3ZK1iSLgdVV9VLgAeDNwF8CH6yqlwPfAP7jLNa3r5hbVUuB9zJpfyf5FWAl8PqqmrjE/wXA0cAvA2d1095E72/uZ4HjgbOTvAC4Cpj4kJhP70aDdNOu7Ianeh/sVQz03VRVtwP3JjkK+CXgH4FX9g3fAPwLem+WbwCv645EjqmqB2enak3y3aq6sRu+HjgcOKiqruimXQC8ZlYq27d8vvv3emBR3/TjgA8CJ1bV/X3Tv1BVj1fVzcDzu2lHAxdW1WNVdTdwBb2/x6uAY7o7xd4M3N0F/c8DX+2Wnfw+6K9hr/CU3sulYX8BnA78NL0j9tcCf1xV50xumOQVwOuBjyb5UlWteioL1ZQe7ht+DDhotgrZB2xj+wPJZ/YNT7wOj7F9Nn0H+BngRcDYFO0Bdnh316rakuQget+SrwR+CvhV4KGq+kGS5/Lk94FdLvuoS+i9UV5J74ray4B3JjkQIMn8JM9Lcgjwo6r6H8DZwCtmq2Dt0IPA/X39uP+G3pGedt/dwPOSPDfJ/vS6S2ZyB103WJKXztD2KuCtSeYkGaH3zeq6bt419Lpzruza/V73bzM8Qh+CqnokyVeAB6rqMeBvk7wE+Fp3W/iHgF8DjqDXp/c48Cjwm7NVs2b0duBTSZ4FbATeMcv1NKGqHu1uG3IdvZv6fWvA5b6V5G3AXyV5ww6aXkKvG+UmoIAPVNVd3byrgF+qqvEkd9A7Sm8q0L30fwi6k6E3AKdU1W2zXY+kfZNdLrupO8kyDnzJMJc0mzxCl6RGeIQuSY0w0CWpEQa6JDXCQJekRhjoktSI/w/TIK6YopbnBgAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
    " + ] + }, + "metadata": { + "tags": [], + "needs_background": "light" + } + }, + { + "output_type": "stream", + "text": [ + "/content/data/mini_speech_commands/unknown/2f0a410b_nohash_1.wav\n" + ], + "name": "stdout" + }, + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAEICAYAAABPgw/pAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAUd0lEQVR4nO3df7RdZX3n8feHxGAFV7FytZIEQyE6RseCvUa7BixF7ApSwapUGDsVrWbsNFNsrRqnlnFSO0WZZWdNTSvYYUGdkUAd0XRIh7Yqv6pALhTUgMg1gkkoGH6O6AgEvvPH2ZeeXO7NPUlOuOTJ+7XWWdk/nr339+xz7ufs8+yzd1JVSJL2fvvNdgGSpOEw0CWpEQa6JDXCQJekRhjoktQIA12SGmGga7ckOT/JR7vhY5Lcuovr+VSSPxhudTNu81eSbEryUJKjnsptDypJJTlituvQ3mHubBegPS/J7cDzgceAHwJ/A6yoqoeGuZ2qugp48QD1nA68q6qO7lv2PcOsZUD/hd5++OIwVpbkcuAjwLEAVfWRYaz36SrJ+cDl3eixVXX6rBUjwCP0fckbqupA4BXAKPDhyQ2S7Gsf8C8ENuzKgknmDLkWabcZ6PuYqtpC7wj9ZfDEV/rfSnIbcFs37ZeT3JjkgSRfTfLyieWTHJXkhiQ/SHIR8My+eccm2dw3vjDJ55NsTXJvkk8meQnwKeDnu66OB7q2T3TddOPvTjKe5L4ka5Mc0jevkrwnyW1djauTpJt3RJIrkjyY5J6uxu0k2T/JQ8Ac4KYk3+mmvyTJ5d06NyQ5qW+Z85P8eZJ1SX4I/OIg+zvJ6UmunjTtiW6Ubr2rk1za7dNrkxw+zbqO7rqIjh1gP+yX5MNJ7kjy/SR/meQnu3kXJHlfNzx/4j3QjR/e7fP9Jl7PJO/r1vFPSd4xyPPWLKkqH40/gNuB47vhhfSOSv+wGy/g74CfAn4COAr4PvAqeoH39m75/YF5wB3A7wDPAN4CPAp8tFvXscDmbngOcBPwJ8AB9IL/6G7e6cDVk2o8v289xwH30Ps2sT/wp8CVfW0L+N/AQcChwFZgWTfvQuD36R2sPLHNafZLAUd0w88AxoH/0D3P44AfAC/uq+9B4F9NrHvAfT/Vc+3f7vnAvcBSel2g/xNYM7ktsAzYBCwdcD+8s3s+PwMcCHwe+EzfvL/uhv818B3gor55X+x7PbcBq7r983rgR8BzZvs97WPqh0fo+44vdEfDVwNXAP+5b94fV9V9VfX/gOXAOVV1bVU9VlUXAA8Dr+4ezwD+a1U9WlWfA9ZPs72lwCHA+6vqh1X146q6epq2k70NOK+qbqiqh4EP0TuiX9TX5qyqeqCqvgd8BTiym/4ova6UQ3Zym6+mF3xnVdUjVfVlemF5Wl+bL1bVP1TV41X14wHXO4hLquq6qtpGL9CPnDT/FOAc4ISqum7SvOn2w9uAT1TVxuqdK/kQcGrXrXYFcHSS/YDXAB+n90EF8Avd/AmPAqu613sd8BADnCfR7DDQ9x1vrKqDquqFVfXvuvCesKlv+IXA+7qv8A90HwIL6YXzIcCWquq/o9sd02xvIXBHF1I765D+9XaBdC8wv6/NXX3DP6IXxgAfAAJc13WbvHMntrmpqh7vm3bHpG1uYs+Y7rlMeC9wcVV9cyeW3W4fdsNzgedX1XfonRw/EjiG3gfXnUlezJMD/d5Jr+FU9elpwkAX9L66T9gE/FEX/hOPZ1XVhcA/AfMn+mk7h06zzk3AodOcaJ3pFp930vtgASDJAcBzgS0zPpGqu6rq3VV1CPBvgT8b8Gd/dwILu6PWCYdO2uau3Jr0h8CzJkaS/PQurOMU4I1JztiJZbbbh/Seyzbg7m78CnpdZvOqd17lCnrda88BbtyFGvU0YKBrsk8D70nyqvQckOTEJM8GvkYvFH47yTOSvIle18pUrqP3AXBWt45nJpn4Wn83sCDJvGmWvRB4R5Ijk+xPr3vo2qq6fabik5ySZEE3ej+9EH58B4tMuJbe0ecHuud2LPAGYM0Ay+7ITcBLu+fyTHo/a9xZdwKvBc5I8psDLnMh8DtJDktyIL19eFHf0fYVwArgym788m786qp6bBdq1NOAga7tVNUY8G7gk/QCcZzeiT2q6hHgTd34fcBb6Z1sm2o9j9ELxCOA7wGbu/YAX6Z3YvauJPdMsezfA38A/C96HwqHA6cO+BReCVzb/YplLXBGVW2caaHuub0BOIHeCdk/A369qr414HanW++36Z1U/Ht6vyIatE9/8nq+Ry/UVyZ51wCLnAd8hl5gfxf4MfDv++ZfATybfw70q+l9k7gS7bWyfXeoJGlv5RG6JDXCQJekRhjoktSIgQI9ybIkt3aXYq+cps2vJrm5++3vZ4dbpiRpJjOeFE3vJkTfBl5H75cK64HTqurmvjaLgYuB46rq/iTPq6rv72i9Bx98cC1atGg3y5ekfcv1119/T1WNTDVvkLvrLQXGJ376lWQNcDJwc1+bdwOrq+p+gJnCHGDRokWMjY0NsHlJ0oQk012dPVCXy3y2v+R5M9tfDg3wIuBFSf4hyTVJlk1TyPIkY0nGtm7dOsCmJUmDGtZJ0bnAYnp3ZzsN+HSSgyY3qqpzq2q0qkZHRqb8xiBJ2kWDBPoWejdamrCAJ99TYzOwtrsj23fp9bkvHk6JkqRBDBLo64HF3T0h5tG7BHvtpDZfoPtvt5IcTK8LZsbLrSVJwzNjoHc381kBXAbcQu82nhuSrOr7H10uA+5NcjO9ezK/v6ru3VNFS5KebNbu5TI6Olr+ykWSdk6S66tqdKp5XikqSY0w0CWpEQa6JDVikCtFJe2DFq28dLZLaNbtZ524R9brEbokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjRgo0JMsS3JrkvEkK6eYf3qSrUlu7B7vGn6pkqQdmTtTgyRzgNXA64DNwPoka6vq5klNL6qqFXugRknSAAY5Ql8KjFfVxqp6BFgDnLxny5Ik7axBAn0+sKlvfHM3bbI3J/l6ks8lWTjVipIsTzKWZGzr1q27UK4kaTrDOin618Ciqno58HfABVM1qqpzq2q0qkZHRkaGtGlJEgwW6FuA/iPuBd20J1TVvVX1cDf6F8DPDac8SdKgBgn09cDiJIclmQecCqztb5DkBX2jJwG3DK9ESdIgZvyVS1VtS7ICuAyYA5xXVRuSrALGqmot8NtJTgK2AfcBp+/BmiVJU5gx0AGqah2wbtK0M/uGPwR8aLilSZJ2hleKSlIjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNGCjQkyxLcmuS8SQrd9DuzUkqyejwSpQkDWLGQE8yB1gNnAAsAU5LsmSKds8GzgCuHXaRkqSZDXKEvhQYr6qNVfUIsAY4eYp2fwh8DPjxEOuTJA1okECfD2zqG9/cTXtCklcAC6vq0h2tKMnyJGNJxrZu3brTxUqSprfbJ0WT7Ad8AnjfTG2r6tyqGq2q0ZGRkd3dtCSpzyCBvgVY2De+oJs24dnAy4DLk9wOvBpY64lRSXpqDRLo64HFSQ5LMg84FVg7MbOqHqyqg6tqUVUtAq4BTqqqsT1SsSRpSjMGelVtA1YAlwG3ABdX1YYkq5KctKcLlCQNZu4gjapqHbBu0rQzp2l77O6XJUnaWV4pKkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1YqBAT7Isya1JxpOsnGL+e5J8I8mNSa5OsmT4pUqSdmTGQE8yB1gNnAAsAU6bIrA/W1X/sqqOBD4OfGLolUqSdmiQI/SlwHhVbayqR4A1wMn9Darq//aNHgDU8EqUJA1i7gBt5gOb+sY3A6+a3CjJbwG/C8wDjptqRUmWA8sBDj300J2tVZK0A0M7KVpVq6vqcOCDwIenaXNuVY1W1ejIyMiwNi1JYrBA3wIs7Btf0E2bzhrgjbtTlCRp5w0S6OuBxUkOSzIPOBVY298gyeK+0ROB24ZXoiRpEDP2oVfVtiQrgMuAOcB5VbUhySpgrKrWAiuSHA88CtwPvH1PFi1JerJBTopSVeuAdZOmndk3fMaQ65Ik7SSvFJWkRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktSIubNdwK5YtPLS2S6hWbefdeIeWa+v2Z6zp14z7X0GOkJPsizJrUnGk6ycYv7vJrk5ydeTfCnJC4dfqiRpR2YM9CRzgNXACcAS4LQkSyY1+0dgtKpeDnwO+PiwC5Uk7dggR+hLgfGq2lhVjwBrgJP7G1TVV6rqR93oNcCC4ZYpSZrJIIE+H9jUN765mzad3wD+ZqoZSZYnGUsytnXr1sGrlCTNaKi/cknya8AocPZU86vq3KoararRkZGRYW5akvZ5g/zKZQuwsG98QTdtO0mOB34f+IWqeng45UmSBjXIEfp6YHGSw5LMA04F1vY3SHIUcA5wUlV9f/hlSpJmMmOgV9U2YAVwGXALcHFVbUiyKslJXbOzgQOBv0pyY5K106xOkrSHDHRhUVWtA9ZNmnZm3/DxQ65LkrSTvPRfkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWrEQIGeZFmSW5OMJ1k5xfzXJLkhybYkbxl+mZKkmcwY6EnmAKuBE4AlwGlJlkxq9j3gdOCzwy5QkjSYuQO0WQqMV9VGgCRrgJOBmycaVNXt3bzH90CNkqQBDNLlMh/Y1De+uZu205IsTzKWZGzr1q27sgpJ0jSe0pOiVXVuVY1W1ejIyMhTuWlJat4ggb4FWNg3vqCbJkl6Ghkk0NcDi5MclmQecCqwds+WJUnaWTMGelVtA1YAlwG3ABdX1YYkq5KcBJDklUk2A6cA5yTZsCeLliQ92SC/cqGq1gHrJk07s294Pb2uGEnSLPFKUUlqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQMFepJlSW5NMp5k5RTz909yUTf/2iSLhl2oJGnHZgz0JHOA1cAJwBLgtCRLJjX7DeD+qjoC+BPgY8MuVJK0Y4McoS8FxqtqY1U9AqwBTp7U5mTggm74c8Brk2R4ZUqSZjJ3gDbzgU1945uBV03Xpqq2JXkQeC5wT3+jJMuB5d3oQ0lu3ZWi90IHM2lfPF3F71awF71e4GvW2ZdesxdON2OQQB+aqjoXOPep3ObTQZKxqhqd7To0GF+vvY+vWc8gXS5bgIV94wu6aVO2STIX+Eng3mEUKEkazCCBvh5YnOSwJPOAU4G1k9qsBd7eDb8F+HJV1fDKlCTNZMYul65PfAVwGTAHOK+qNiRZBYxV1VrgvwOfSTIO3Ecv9PXP9rlupr2cr9fex9cMiAfSktQGrxSVpEYY6JLUCANd0tNWktOTfHK269hbGOiS1AgDfTclWZXkvX3jf5TkjCTvT7I+ydeT/Kdu3gFJLk1yU5JvJnnr7FUugCSLktyS5NNJNiT52yQ/keTIJNd0r98lSZ4z27W2oNvf3+wb/70kH0lyeZKPJbkuybeTHDPFsicm+VqSg5Ocn+S/Jflqko1J3tK1SZKzu7+vb0z8jSVZneSkbviSJOd1w+/s/manfB88NXtleAz03Xce8OsASfaj95PNu4DF9O6DcyTwc0leAywD7qyqn62qlwH/Z3ZK1iSLgdVV9VLgAeDNwF8CH6yqlwPfAP7jLNa3r5hbVUuB9zJpfyf5FWAl8PqqmrjE/wXA0cAvA2d1095E72/uZ4HjgbOTvAC4Cpj4kJhP70aDdNOu7Ianeh/sVQz03VRVtwP3JjkK+CXgH4FX9g3fAPwLem+WbwCv645EjqmqB2enak3y3aq6sRu+HjgcOKiqruimXQC8ZlYq27d8vvv3emBR3/TjgA8CJ1bV/X3Tv1BVj1fVzcDzu2lHAxdW1WNVdTdwBb2/x6uAY7o7xd4M3N0F/c8DX+2Wnfw+6K9hr/CU3sulYX8BnA78NL0j9tcCf1xV50xumOQVwOuBjyb5UlWteioL1ZQe7ht+DDhotgrZB2xj+wPJZ/YNT7wOj7F9Nn0H+BngRcDYFO0Bdnh316rakuQget+SrwR+CvhV4KGq+kGS5/Lk94FdLvuoS+i9UV5J74ray4B3JjkQIMn8JM9Lcgjwo6r6H8DZwCtmq2Dt0IPA/X39uP+G3pGedt/dwPOSPDfJ/vS6S2ZyB103WJKXztD2KuCtSeYkGaH3zeq6bt419Lpzruza/V73bzM8Qh+CqnokyVeAB6rqMeBvk7wE+Fp3W/iHgF8DjqDXp/c48Cjwm7NVs2b0duBTSZ4FbATeMcv1NKGqHu1uG3IdvZv6fWvA5b6V5G3AXyV5ww6aXkKvG+UmoIAPVNVd3byrgF+qqvEkd9A7Sm8q0L30fwi6k6E3AKdU1W2zXY+kfZNdLrupO8kyDnzJMJc0mzxCl6RGeIQuSY0w0CWpEQa6JDXCQJekRhjoktSI/w/TIK6YopbnBgAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
    " + ] + }, + "metadata": { + "tags": [], + "needs_background": "light" + } + }, + { + "output_type": "stream", + "text": [ + "/content/data/mini_speech_commands/unknown/f2a90886_nohash_0.wav\n" + ], + "name": "stdout" + }, + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAEICAYAAABPgw/pAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAUd0lEQVR4nO3df7RdZX3n8feHxGAFV7FytZIEQyE6RseCvUa7BixF7ApSwapUGDsVrWbsNFNsrRqnlnFSO0WZZWdNTSvYYUGdkUAd0XRIh7Yqv6pALhTUgMg1gkkoGH6O6AgEvvPH2ZeeXO7NPUlOuOTJ+7XWWdk/nr339+xz7ufs8+yzd1JVSJL2fvvNdgGSpOEw0CWpEQa6JDXCQJekRhjoktQIA12SGmGga7ckOT/JR7vhY5Lcuovr+VSSPxhudTNu81eSbEryUJKjnsptDypJJTlituvQ3mHubBegPS/J7cDzgceAHwJ/A6yoqoeGuZ2qugp48QD1nA68q6qO7lv2PcOsZUD/hd5++OIwVpbkcuAjwLEAVfWRYaz36SrJ+cDl3eixVXX6rBUjwCP0fckbqupA4BXAKPDhyQ2S7Gsf8C8ENuzKgknmDLkWabcZ6PuYqtpC7wj9ZfDEV/rfSnIbcFs37ZeT3JjkgSRfTfLyieWTHJXkhiQ/SHIR8My+eccm2dw3vjDJ55NsTXJvkk8meQnwKeDnu66OB7q2T3TddOPvTjKe5L4ka5Mc0jevkrwnyW1djauTpJt3RJIrkjyY5J6uxu0k2T/JQ8Ac4KYk3+mmvyTJ5d06NyQ5qW+Z85P8eZJ1SX4I/OIg+zvJ6UmunjTtiW6Ubr2rk1za7dNrkxw+zbqO7rqIjh1gP+yX5MNJ7kjy/SR/meQnu3kXJHlfNzx/4j3QjR/e7fP9Jl7PJO/r1vFPSd4xyPPWLKkqH40/gNuB47vhhfSOSv+wGy/g74CfAn4COAr4PvAqeoH39m75/YF5wB3A7wDPAN4CPAp8tFvXscDmbngOcBPwJ8AB9IL/6G7e6cDVk2o8v289xwH30Ps2sT/wp8CVfW0L+N/AQcChwFZgWTfvQuD36R2sPLHNafZLAUd0w88AxoH/0D3P44AfAC/uq+9B4F9NrHvAfT/Vc+3f7vnAvcBSel2g/xNYM7ktsAzYBCwdcD+8s3s+PwMcCHwe+EzfvL/uhv818B3gor55X+x7PbcBq7r983rgR8BzZvs97WPqh0fo+44vdEfDVwNXAP+5b94fV9V9VfX/gOXAOVV1bVU9VlUXAA8Dr+4ezwD+a1U9WlWfA9ZPs72lwCHA+6vqh1X146q6epq2k70NOK+qbqiqh4EP0TuiX9TX5qyqeqCqvgd8BTiym/4ova6UQ3Zym6+mF3xnVdUjVfVlemF5Wl+bL1bVP1TV41X14wHXO4hLquq6qtpGL9CPnDT/FOAc4ISqum7SvOn2w9uAT1TVxuqdK/kQcGrXrXYFcHSS/YDXAB+n90EF8Avd/AmPAqu613sd8BADnCfR7DDQ9x1vrKqDquqFVfXvuvCesKlv+IXA+7qv8A90HwIL6YXzIcCWquq/o9sd02xvIXBHF1I765D+9XaBdC8wv6/NXX3DP6IXxgAfAAJc13WbvHMntrmpqh7vm3bHpG1uYs+Y7rlMeC9wcVV9cyeW3W4fdsNzgedX1XfonRw/EjiG3gfXnUlezJMD/d5Jr+FU9elpwkAX9L66T9gE/FEX/hOPZ1XVhcA/AfMn+mk7h06zzk3AodOcaJ3pFp930vtgASDJAcBzgS0zPpGqu6rq3VV1CPBvgT8b8Gd/dwILu6PWCYdO2uau3Jr0h8CzJkaS/PQurOMU4I1JztiJZbbbh/Seyzbg7m78CnpdZvOqd17lCnrda88BbtyFGvU0YKBrsk8D70nyqvQckOTEJM8GvkYvFH47yTOSvIle18pUrqP3AXBWt45nJpn4Wn83sCDJvGmWvRB4R5Ijk+xPr3vo2qq6fabik5ySZEE3ej+9EH58B4tMuJbe0ecHuud2LPAGYM0Ay+7ITcBLu+fyTHo/a9xZdwKvBc5I8psDLnMh8DtJDktyIL19eFHf0fYVwArgym788m786qp6bBdq1NOAga7tVNUY8G7gk/QCcZzeiT2q6hHgTd34fcBb6Z1sm2o9j9ELxCOA7wGbu/YAX6Z3YvauJPdMsezfA38A/C96HwqHA6cO+BReCVzb/YplLXBGVW2caaHuub0BOIHeCdk/A369qr414HanW++36Z1U/Ht6vyIatE9/8nq+Ry/UVyZ51wCLnAd8hl5gfxf4MfDv++ZfATybfw70q+l9k7gS7bWyfXeoJGlv5RG6JDXCQJekRhjoktSIgQI9ybIkt3aXYq+cps2vJrm5++3vZ4dbpiRpJjOeFE3vJkTfBl5H75cK64HTqurmvjaLgYuB46rq/iTPq6rv72i9Bx98cC1atGg3y5ekfcv1119/T1WNTDVvkLvrLQXGJ376lWQNcDJwc1+bdwOrq+p+gJnCHGDRokWMjY0NsHlJ0oQk012dPVCXy3y2v+R5M9tfDg3wIuBFSf4hyTVJlk1TyPIkY0nGtm7dOsCmJUmDGtZJ0bnAYnp3ZzsN+HSSgyY3qqpzq2q0qkZHRqb8xiBJ2kWDBPoWejdamrCAJ99TYzOwtrsj23fp9bkvHk6JkqRBDBLo64HF3T0h5tG7BHvtpDZfoPtvt5IcTK8LZsbLrSVJwzNjoHc381kBXAbcQu82nhuSrOr7H10uA+5NcjO9ezK/v6ru3VNFS5KebNbu5TI6Olr+ykWSdk6S66tqdKp5XikqSY0w0CWpEQa6JDVikCtFJe2DFq28dLZLaNbtZ524R9brEbokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjRgo0JMsS3JrkvEkK6eYf3qSrUlu7B7vGn6pkqQdmTtTgyRzgNXA64DNwPoka6vq5klNL6qqFXugRknSAAY5Ql8KjFfVxqp6BFgDnLxny5Ik7axBAn0+sKlvfHM3bbI3J/l6ks8lWTjVipIsTzKWZGzr1q27UK4kaTrDOin618Ciqno58HfABVM1qqpzq2q0qkZHRkaGtGlJEgwW6FuA/iPuBd20J1TVvVX1cDf6F8DPDac8SdKgBgn09cDiJIclmQecCqztb5DkBX2jJwG3DK9ESdIgZvyVS1VtS7ICuAyYA5xXVRuSrALGqmot8NtJTgK2AfcBp+/BmiVJU5gx0AGqah2wbtK0M/uGPwR8aLilSZJ2hleKSlIjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNGCjQkyxLcmuS8SQrd9DuzUkqyejwSpQkDWLGQE8yB1gNnAAsAU5LsmSKds8GzgCuHXaRkqSZDXKEvhQYr6qNVfUIsAY4eYp2fwh8DPjxEOuTJA1okECfD2zqG9/cTXtCklcAC6vq0h2tKMnyJGNJxrZu3brTxUqSprfbJ0WT7Ad8AnjfTG2r6tyqGq2q0ZGRkd3dtCSpzyCBvgVY2De+oJs24dnAy4DLk9wOvBpY64lRSXpqDRLo64HFSQ5LMg84FVg7MbOqHqyqg6tqUVUtAq4BTqqqsT1SsSRpSjMGelVtA1YAlwG3ABdX1YYkq5KctKcLlCQNZu4gjapqHbBu0rQzp2l77O6XJUnaWV4pKkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1YqBAT7Isya1JxpOsnGL+e5J8I8mNSa5OsmT4pUqSdmTGQE8yB1gNnAAsAU6bIrA/W1X/sqqOBD4OfGLolUqSdmiQI/SlwHhVbayqR4A1wMn9Darq//aNHgDU8EqUJA1i7gBt5gOb+sY3A6+a3CjJbwG/C8wDjptqRUmWA8sBDj300J2tVZK0A0M7KVpVq6vqcOCDwIenaXNuVY1W1ejIyMiwNi1JYrBA3wIs7Btf0E2bzhrgjbtTlCRp5w0S6OuBxUkOSzIPOBVY298gyeK+0ROB24ZXoiRpEDP2oVfVtiQrgMuAOcB5VbUhySpgrKrWAiuSHA88CtwPvH1PFi1JerJBTopSVeuAdZOmndk3fMaQ65Ik7SSvFJWkRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktSIubNdwK5YtPLS2S6hWbefdeIeWa+v2Z6zp14z7X0GOkJPsizJrUnGk6ycYv7vJrk5ydeTfCnJC4dfqiRpR2YM9CRzgNXACcAS4LQkSyY1+0dgtKpeDnwO+PiwC5Uk7dggR+hLgfGq2lhVjwBrgJP7G1TVV6rqR93oNcCC4ZYpSZrJIIE+H9jUN765mzad3wD+ZqoZSZYnGUsytnXr1sGrlCTNaKi/cknya8AocPZU86vq3KoararRkZGRYW5akvZ5g/zKZQuwsG98QTdtO0mOB34f+IWqeng45UmSBjXIEfp6YHGSw5LMA04F1vY3SHIUcA5wUlV9f/hlSpJmMmOgV9U2YAVwGXALcHFVbUiyKslJXbOzgQOBv0pyY5K106xOkrSHDHRhUVWtA9ZNmnZm3/DxQ65LkrSTvPRfkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWrEQIGeZFmSW5OMJ1k5xfzXJLkhybYkbxl+mZKkmcwY6EnmAKuBE4AlwGlJlkxq9j3gdOCzwy5QkjSYuQO0WQqMV9VGgCRrgJOBmycaVNXt3bzH90CNkqQBDNLlMh/Y1De+uZu205IsTzKWZGzr1q27sgpJ0jSe0pOiVXVuVY1W1ejIyMhTuWlJat4ggb4FWNg3vqCbJkl6Ghkk0NcDi5MclmQecCqwds+WJUnaWTMGelVtA1YAlwG3ABdX1YYkq5KcBJDklUk2A6cA5yTZsCeLliQ92SC/cqGq1gHrJk07s294Pb2uGEnSLPFKUUlqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQMFepJlSW5NMp5k5RTz909yUTf/2iSLhl2oJGnHZgz0JHOA1cAJwBLgtCRLJjX7DeD+qjoC+BPgY8MuVJK0Y4McoS8FxqtqY1U9AqwBTp7U5mTggm74c8Brk2R4ZUqSZjJ3gDbzgU1945uBV03Xpqq2JXkQeC5wT3+jJMuB5d3oQ0lu3ZWi90IHM2lfPF3F71awF71e4GvW2ZdesxdON2OQQB+aqjoXOPep3ObTQZKxqhqd7To0GF+vvY+vWc8gXS5bgIV94wu6aVO2STIX+Eng3mEUKEkazCCBvh5YnOSwJPOAU4G1k9qsBd7eDb8F+HJV1fDKlCTNZMYul65PfAVwGTAHOK+qNiRZBYxV1VrgvwOfSTIO3Ecv9PXP9rlupr2cr9fex9cMiAfSktQGrxSVpEYY6JLUCANd0tNWktOTfHK269hbGOiS1AgDfTclWZXkvX3jf5TkjCTvT7I+ydeT/Kdu3gFJLk1yU5JvJnnr7FUugCSLktyS5NNJNiT52yQ/keTIJNd0r98lSZ4z27W2oNvf3+wb/70kH0lyeZKPJbkuybeTHDPFsicm+VqSg5Ocn+S/Jflqko1J3tK1SZKzu7+vb0z8jSVZneSkbviSJOd1w+/s/manfB88NXtleAz03Xce8OsASfaj95PNu4DF9O6DcyTwc0leAywD7qyqn62qlwH/Z3ZK1iSLgdVV9VLgAeDNwF8CH6yqlwPfAP7jLNa3r5hbVUuB9zJpfyf5FWAl8PqqmrjE/wXA0cAvA2d1095E72/uZ4HjgbOTvAC4Cpj4kJhP70aDdNOu7Ianeh/sVQz03VRVtwP3JjkK+CXgH4FX9g3fAPwLem+WbwCv645EjqmqB2enak3y3aq6sRu+HjgcOKiqruimXQC8ZlYq27d8vvv3emBR3/TjgA8CJ1bV/X3Tv1BVj1fVzcDzu2lHAxdW1WNVdTdwBb2/x6uAY7o7xd4M3N0F/c8DX+2Wnfw+6K9hr/CU3sulYX8BnA78NL0j9tcCf1xV50xumOQVwOuBjyb5UlWteioL1ZQe7ht+DDhotgrZB2xj+wPJZ/YNT7wOj7F9Nn0H+BngRcDYFO0Bdnh316rakuQget+SrwR+CvhV4KGq+kGS5/Lk94FdLvuoS+i9UV5J74ray4B3JjkQIMn8JM9Lcgjwo6r6H8DZwCtmq2Dt0IPA/X39uP+G3pGedt/dwPOSPDfJ/vS6S2ZyB103WJKXztD2KuCtSeYkGaH3zeq6bt419Lpzruza/V73bzM8Qh+CqnokyVeAB6rqMeBvk7wE+Fp3W/iHgF8DjqDXp/c48Cjwm7NVs2b0duBTSZ4FbATeMcv1NKGqHu1uG3IdvZv6fWvA5b6V5G3AXyV5ww6aXkKvG+UmoIAPVNVd3byrgF+qqvEkd9A7Sm8q0L30fwi6k6E3AKdU1W2zXY+kfZNdLrupO8kyDnzJMJc0mzxCl6RGeIQuSY0w0CWpEQa6JDXCQJekRhjoktSI/w/TIK6YopbnBgAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
    " + ] + }, + "metadata": { + "tags": [], + "needs_background": "light" + } + }, + { + "output_type": "stream", + "text": [ + "/content/data/mini_speech_commands/unknown/dbb40d24_nohash_0.wav\n" + ], + "name": "stdout" + }, + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAEICAYAAABPgw/pAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAUbElEQVR4nO3df7RdZX3n8feHxGAFV7ESrYSEUEgdo2PBXqNdA5ZB7ApSwapUGDsVrWbsNFNsrRqnlnFSO0WdZWfNmI5ghwV1RgJ1RNMhLW1VflWFBApqQCRGMAkFw88RHYHAd/7Y+9LD5d7ck+SES568X2udlf3j2Xt/zz7nfs4+zz57J1WFJGnvt99MFyBJGg0DXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6dkuS85N8uB8+Nsktu7ieTyb5g9FWN+02fyXJ5iQPJjn6qdz2sJJUkiNnug7tHWbPdAHa85LcBjwfeBT4IfBXwPKqenCU26mqq4AXDlHPGcA7quqYgWXfNcpahvSf6fbDF0axsiSXAx8CjgOoqg+NYr1PV0nOBy7vR4+rqjNmrBgBHqHvS15XVQcCLwPGgA9ObJBkX/uAPwzYsCsLJpk14lqk3Wag72OqaivdEfpL4PGv9L+V5Fbg1n7aLye5Icn9Sb6S5KXjyyc5Osn1SX6Q5CLgmQPzjkuyZWB8fpLPJdmW5J4kn0jyIuCTwC/0XR33920f77rpx9+ZZGOSe5OsSXLIwLxK8q4kt/Y1rkqSft6RSa5I8kCSu/sanyDJ/kkeBGYBNyb5Tj/9RUku79e5IcnJA8ucn+S/J1mb5IfAvxxmfyc5I8nVE6Y93o3Sr3dVkkv7fXpNkiOmWNcxfRfRcUPsh/2SfDDJ7Um+n+TPk/xkP++CJO/ph+eNvwf68SP6fb7f+OuZ5D39Ov4xyduGed6aIVXlo/EHcBtwQj88n+6o9A/78QL+Fvgp4CeAo4HvA6+gC7y39svvD8wBbgd+B3gG8CbgEeDD/bqOA7b0w7OAG4E/AQ6gC/5j+nlnAFdPqPH8gfUcD9xN921if+C/AVcOtC3g/wAHAQuAbcDSft6FwO/THaw8vs0p9ksBR/bDzwA2Av++f57HAz8AXjhQ3wPAvxhf95D7frLnOrjd84F7gCV0XaD/C1g9sS2wFNgMLBlyP7y9fz4/AxwIfA749MC8v+yH/xXwHeCigXlfGHg9twMr+/3zWuBHwHNm+j3tY/KHR+j7js/3R8NXA1cA/2lg3h9X1b1V9f+AZcA5VXVNVT1aVRcADwGv7B/PAP5LVT1SVZ8F1k2xvSXAIcB7q+qHVfXjqrp6irYTvQU4r6qur6qHgA/QHdEvHGhzdlXdX1XfA74MHNVPf4SuK+WQndzmK+mC7+yqeriqvkQXlqcPtPlCVf19VT1WVT8ecr3DuKSqrq2q7XSBftSE+acC5wAnVtW1E+ZNtR/eAny8qjZVd67kA8BpfbfaFcAxSfYDXgV8lO6DCuAX+/njHgFW9q/3WuBBhjhPoplhoO87Xl9VB1XVYVX1b/vwHrd5YPgw4D39V/j7+w+B+XThfAiwtaoG7+h2+xTbmw/c3ofUzjpkcL19IN0DzBtoc+fA8I/owhjgfUCAa/tuk7fvxDY3V9VjA9Nun7DNzewZUz2Xce8GLq6qb+7Esk/Yh/3wbOD5VfUdupPjRwHH0n1w3ZHkhTw50O+Z8BpOVp+eJgx0QffVfdxm4I/68B9/PKuqLgT+EZg33k/bWzDFOjcDC6Y40TrdLT7voPtgASDJAcBzga3TPpGqO6vqnVV1CPBvgD8d8md/dwDz+6PWcQsmbHNXbk36Q+BZ4yNJfnoX1nEq8PokZ+7EMk/Yh3TPZTtwVz9+BV2X2ZzqzqtcQde99hzghl2oUU8DBrom+hTwriSvSOeAJCcleTbwVbpQ+O0kz0jyBrqulclcS/cBcHa/jmcmGf9afxdwaJI5Uyx7IfC2JEcl2Z+ue+iaqrptuuKTnJrk0H70ProQfmwHi4y7hu7o8339czsOeB2weohld+RG4MX9c3km3c8ad9YdwKuBM5P85pDLXAj8TpLDkxxItw8vGjjavgJYDlzZj1/ej19dVY/uQo16GjDQ9QRVtR54J/AJukDcSHdij6p6GHhDP34v8Ga6k22TredRukA8EvgesKVvD/AluhOzdya5e5Jl/w74A+B/030oHAGcNuRTeDlwTf8rljXAmVW1abqF+uf2OuBEuhOyfwr8elV9a8jtTrXeb9OdVPw7ul8RDdunP3E936ML9RVJ3jHEIucBn6YL7O8CPwb+3cD8K4Bn80+BfjXdN4kr0V4rT+wOlSTtrTxCl6RGGOiS1IihAj3J0iS39FfurZiiza8muan/qdhnRlumJGk60/ahp7tnxbeB19Cd2FoHnF5VNw20WQRcDBxfVfcleV5VfX/PlS1JmmiYmzEtATaO/1IgyWrgFOCmgTbvBFZV1X0Aw4T5wQcfXAsXLtzpgiVpX3bdddfdXVVzJ5s3TKDP44lXyG2hu8/HoJ8FSPL3dPfw+FBV/fXEFSVZRndpOQsWLGD9+vVDbF6SNC7JVFdnj+yk6GxgEd3NfE4HPpXkoImNqurcqhqrqrG5cyf9gJEk7aJhAn0r3X05xh3Kky/B3gKs6W/g8126PvdFoylRkjSMYQJ9HbCov4R4Dt0Ve2smtPk8/f/SkuRgui6Yaa/OkySNzrSB3t/7YTlwGXAz3V3fNiRZOfAfAFwG3JPkJrpbeL63qu7ZU0VLkp5sxi79HxsbK0+KStLOSXJdVY1NNs8rRSWpEQa6JDXCQJekRhjoktSIYa4UlbQPWrji0pkuoVm3nX3SHlmvR+iS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRQwV6kqVJbkmyMcmKSeafkWRbkhv6xztGX6okaUdmT9cgySxgFfAaYAuwLsmaqrppQtOLqmr5HqhRkjSEYY7QlwAbq2pTVT0MrAZO2bNlSZJ21jCBPg/YPDC+pZ820RuTfD3JZ5PMn2xFSZYlWZ9k/bZt23ahXEnSVEZ1UvQvgYVV9VLgb4ELJmtUVedW1VhVjc2dO3dEm5YkwXCBvhUYPOI+tJ/2uKq6p6oe6kf/DPj50ZQnSRrWMIG+DliU5PAkc4DTgDWDDZK8YGD0ZODm0ZUoSRrGtL9yqartSZYDlwGzgPOqakOSlcD6qloD/HaSk4HtwL3AGXuwZknSJKYNdICqWgusnTDtrIHhDwAfGG1pkqSd4ZWiktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1Ijhgr0JEuT3JJkY5IVO2j3xiSVZGx0JUqShjFtoCeZBawCTgQWA6cnWTxJu2cDZwLXjLpISdL0hjlCXwJsrKpNVfUwsBo4ZZJ2fwh8BPjxCOuTJA1pmECfB2weGN/ST3tckpcB86vq0h2tKMmyJOuTrN+2bdtOFytJmtpunxRNsh/wceA907WtqnOraqyqxubOnbu7m5YkDRgm0LcC8wfGD+2njXs28BLg8iS3Aa8E1nhiVJKeWsME+jpgUZLDk8wBTgPWjM+sqgeq6uCqWlhVC4GvASdX1fo9UrEkaVLTBnpVbQeWA5cBNwMXV9WGJCuTnLynC5QkDWf2MI2qai2wdsK0s6Zoe9zulyVJ2lleKSpJjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNWKoQE+yNMktSTYmWTHJ/Hcl+UaSG5JcnWTx6EuVJO3ItIGeZBawCjgRWAycPklgf6aq/nlVHQV8FPj4yCuVJO3QMEfoS4CNVbWpqh4GVgOnDDaoqv87MHoAUKMrUZI0jNlDtJkHbB4Y3wK8YmKjJL8F/C4wBzh+shUlWQYsA1iwYMHO1ipJ2oGRnRStqlVVdQTwfuCDU7Q5t6rGqmps7ty5o9q0JInhAn0rMH9g/NB+2lRWA6/fnaIkSTtvmEBfByxKcniSOcBpwJrBBkkWDYyeBNw6uhIlScOYtg+9qrYnWQ5cBswCzquqDUlWAuurag2wPMkJwCPAfcBb92TRkqQnG+akKFW1Flg7YdpZA8NnjrguSdJO8kpRSWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjZg90wXsioUrLp3pEpp129kn7ZH1+prtOXvqNdPeZ6gj9CRLk9ySZGOSFZPM/90kNyX5epIvJjls9KVKknZk2kBPMgtYBZwILAZOT7J4QrN/AMaq6qXAZ4GPjrpQSdKODXOEvgTYWFWbquphYDVwymCDqvpyVf2oH/0acOhoy5QkTWeYQJ8HbB4Y39JPm8pvAH+1O0VJknbeSE+KJvk1YAz4xSnmLwOWASxYsGCUm5akfd4wR+hbgfkD44f2054gyQnA7wMnV9VDk62oqs6tqrGqGps7d+6u1CtJmsIwgb4OWJTk8CRzgNOANYMNkhwNnEMX5t8ffZmSpOlMG+hVtR1YDlwG3AxcXFUbkqxMcnLf7GPAgcBfJLkhyZopVidJ2kOG6kOvqrXA2gnTzhoYPmHEdUmSdpKX/ktSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRQwV6kqVJbkmyMcmKSea/Ksn1SbYnedPoy5QkTWfaQE8yC1gFnAgsBk5PsnhCs+8BZwCfGXWBkqThzB6izRJgY1VtAkiyGjgFuGm8QVXd1s97bA/UKEkawjBdLvOAzQPjW/ppOy3JsiTrk6zftm3brqxCkjSFp/SkaFWdW1VjVTU2d+7cp3LTktS8YQJ9KzB/YPzQfpok6WlkmEBfByxKcniSOcBpwJo9W5YkaWdNG+hVtR1YDlwG3AxcXFUbkqxMcjJAkpcn2QKcCpyTZMOeLFqS9GTD/MqFqloLrJ0w7ayB4XV0XTGSpBnilaKS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiOGCvQkS5PckmRjkhWTzN8/yUX9/GuSLBx1oZKkHZs20JPMAlYBJwKLgdOTLJ7Q7DeA+6rqSOBPgI+MulBJ0o4Nc4S+BNhYVZuq6mFgNXDKhDanABf0w58FXp0koytTkjSd2UO0mQdsHhjfArxiqjZVtT3JA8BzgbsHGyVZBizrRx9McsuuFL0XOpgJ++LpKn63gr3o9QJfs96+9JodNtWMYQJ9ZKrqXODcp3KbTwdJ1lfV2EzXoeH4eu19fM06w3S5bAXmD4wf2k+btE2S2cBPAveMokBJ0nCGCfR1wKIkhyeZA5wGrJnQZg3w1n74TcCXqqpGV6YkaTrTdrn0feLLgcuAWcB5VbUhyUpgfVWtAf4H8OkkG4F76UJf/2Sf62bay/l67X18zYB4IC1JbfBKUUlqhIEuSY0w0CU9bSU5I8knZrqOvYWBLkmNMNB3U5KVSd49MP5HSc5M8t4k65J8Pcl/7OcdkOTSJDcm+WaSN89c5QJIsjDJzUk+lWRDkr9J8hNJjkrytf71uyTJc2a61hb0+/ubA+O/l+RDSS5P8pEk1yb5dpJjJ1n2pCRfTXJwkvOT/NckX0myKcmb+jZJ8rH+7+sb439jSVYlObkfviTJef3w2/u/2UnfB0/NXhkdA333nQf8OkCS/eh+snknsIjuPjhHAT+f5FXAUuCOqvq5qnoJ8NczU7ImWASsqqoXA/cDbwT+HHh/Vb0U+AbwH2awvn3F7KpaArybCfs7ya8AK4DXVtX4Jf4vAI4Bfhk4u5/2Brq/uZ8DTgA+luQFwFXA+IfEPLobDdJPu7Ifnux9sFcx0HdTVd0G3JPkaOCXgH8AXj4wfD3wz+jeLN8AXtMfiRxbVQ/MTNWa4LtVdUM/fB1wBHBQVV3RT7sAeNWMVLZv+Vz/73XAwoHpxwPvB06qqvsGpn++qh6rqpuA5/fTjgEurKpHq+ou4Aq6v8ergGP7O8XeBNzVB/0vAF/pl534PhisYa/wlN7LpWF/BpwB/DTdEfurgT+uqnMmNkzyMuC1wIeTfLGqVj6VhWpSDw0MPwocNFOF7AO288QDyWcODI+/Do/yxGz6DvAzwM8C6ydpD7DDu7tW1dYkB9F9S74S+CngV4EHq+oHSZ7Lk98Hdrnsoy6he6O8nO6K2suAtyc5ECDJvCTPS3II8KOq+p/Ax4CXzVTB2qEHgPsG+nH/Nd2RnnbfXcDzkjw3yf503SXTuZ2+GyzJi6dpexXw5iSzksyl+2Z1bT/va3TdOVf27X6v/7cZHqGPQFU9nOTLwP1V9SjwN0leBHy1vy38g8CvAUfS9ek9BjwC/OZM1axpvRX4ZJJnAZuAt81wPU2oqkf624ZcS3dTv28Nudy3krwF+Iskr9tB00voulFuBAp4X1Xd2c+7CvilqtqY5Ha6o/SmAt1L/0egPxl6PXBqVd060/VI2jfZ5bKb+pMsG4EvGuaSZpJH6JLUCI/QJakRBrokNcJAl6RGGOiS1AgDXZIa8f8BDNWp19GTwoIAAAAASUVORK5CYII=\n", + "text/plain": [ + "
    " + ] + }, + "metadata": { + "tags": [], + "needs_background": "light" + } + }, + { + "output_type": "stream", + "text": [ + "/content/data/mini_speech_commands/unknown/b9515bf3_nohash_1.wav\n" + ], + "name": "stdout" + }, + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAEICAYAAABPgw/pAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAUd0lEQVR4nO3df7RdZX3n8feHxGAFV7FytZIEQyE6RseCvUa7BixF7ApSwapUGDsVrWbsNFNsrRqnlnFSO0WZZWdNTSvYYUGdkUAd0XRIh7Yqv6pALhTUgMg1gkkoGH6O6AgEvvPH2ZeeXO7NPUlOuOTJ+7XWWdk/nr339+xz7ufs8+yzd1JVSJL2fvvNdgGSpOEw0CWpEQa6JDXCQJekRhjoktQIA12SGmGga7ckOT/JR7vhY5Lcuovr+VSSPxhudTNu81eSbEryUJKjnsptDypJJTlituvQ3mHubBegPS/J7cDzgceAHwJ/A6yoqoeGuZ2qugp48QD1nA68q6qO7lv2PcOsZUD/hd5++OIwVpbkcuAjwLEAVfWRYaz36SrJ+cDl3eixVXX6rBUjwCP0fckbqupA4BXAKPDhyQ2S7Gsf8C8ENuzKgknmDLkWabcZ6PuYqtpC7wj9ZfDEV/rfSnIbcFs37ZeT3JjkgSRfTfLyieWTHJXkhiQ/SHIR8My+eccm2dw3vjDJ55NsTXJvkk8meQnwKeDnu66OB7q2T3TddOPvTjKe5L4ka5Mc0jevkrwnyW1djauTpJt3RJIrkjyY5J6uxu0k2T/JQ8Ac4KYk3+mmvyTJ5d06NyQ5qW+Z85P8eZJ1SX4I/OIg+zvJ6UmunjTtiW6Ubr2rk1za7dNrkxw+zbqO7rqIjh1gP+yX5MNJ7kjy/SR/meQnu3kXJHlfNzx/4j3QjR/e7fP9Jl7PJO/r1vFPSd4xyPPWLKkqH40/gNuB47vhhfSOSv+wGy/g74CfAn4COAr4PvAqeoH39m75/YF5wB3A7wDPAN4CPAp8tFvXscDmbngOcBPwJ8AB9IL/6G7e6cDVk2o8v289xwH30Ps2sT/wp8CVfW0L+N/AQcChwFZgWTfvQuD36R2sPLHNafZLAUd0w88AxoH/0D3P44AfAC/uq+9B4F9NrHvAfT/Vc+3f7vnAvcBSel2g/xNYM7ktsAzYBCwdcD+8s3s+PwMcCHwe+EzfvL/uhv818B3gor55X+x7PbcBq7r983rgR8BzZvs97WPqh0fo+44vdEfDVwNXAP+5b94fV9V9VfX/gOXAOVV1bVU9VlUXAA8Dr+4ezwD+a1U9WlWfA9ZPs72lwCHA+6vqh1X146q6epq2k70NOK+qbqiqh4EP0TuiX9TX5qyqeqCqvgd8BTiym/4ova6UQ3Zym6+mF3xnVdUjVfVlemF5Wl+bL1bVP1TV41X14wHXO4hLquq6qtpGL9CPnDT/FOAc4ISqum7SvOn2w9uAT1TVxuqdK/kQcGrXrXYFcHSS/YDXAB+n90EF8Avd/AmPAqu613sd8BADnCfR7DDQ9x1vrKqDquqFVfXvuvCesKlv+IXA+7qv8A90HwIL6YXzIcCWquq/o9sd02xvIXBHF1I765D+9XaBdC8wv6/NXX3DP6IXxgAfAAJc13WbvHMntrmpqh7vm3bHpG1uYs+Y7rlMeC9wcVV9cyeW3W4fdsNzgedX1XfonRw/EjiG3gfXnUlezJMD/d5Jr+FU9elpwkAX9L66T9gE/FEX/hOPZ1XVhcA/AfMn+mk7h06zzk3AodOcaJ3pFp930vtgASDJAcBzgS0zPpGqu6rq3VV1CPBvgT8b8Gd/dwILu6PWCYdO2uau3Jr0h8CzJkaS/PQurOMU4I1JztiJZbbbh/Seyzbg7m78CnpdZvOqd17lCnrda88BbtyFGvU0YKBrsk8D70nyqvQckOTEJM8GvkYvFH47yTOSvIle18pUrqP3AXBWt45nJpn4Wn83sCDJvGmWvRB4R5Ijk+xPr3vo2qq6fabik5ySZEE3ej+9EH58B4tMuJbe0ecHuud2LPAGYM0Ay+7ITcBLu+fyTHo/a9xZdwKvBc5I8psDLnMh8DtJDktyIL19eFHf0fYVwArgym788m786qp6bBdq1NOAga7tVNUY8G7gk/QCcZzeiT2q6hHgTd34fcBb6Z1sm2o9j9ELxCOA7wGbu/YAX6Z3YvauJPdMsezfA38A/C96HwqHA6cO+BReCVzb/YplLXBGVW2caaHuub0BOIHeCdk/A369qr414HanW++36Z1U/Ht6vyIatE9/8nq+Ry/UVyZ51wCLnAd8hl5gfxf4MfDv++ZfATybfw70q+l9k7gS7bWyfXeoJGlv5RG6JDXCQJekRhjoktSIgQI9ybIkt3aXYq+cps2vJrm5++3vZ4dbpiRpJjOeFE3vJkTfBl5H75cK64HTqurmvjaLgYuB46rq/iTPq6rv72i9Bx98cC1atGg3y5ekfcv1119/T1WNTDVvkLvrLQXGJ376lWQNcDJwc1+bdwOrq+p+gJnCHGDRokWMjY0NsHlJ0oQk012dPVCXy3y2v+R5M9tfDg3wIuBFSf4hyTVJlk1TyPIkY0nGtm7dOsCmJUmDGtZJ0bnAYnp3ZzsN+HSSgyY3qqpzq2q0qkZHRqb8xiBJ2kWDBPoWejdamrCAJ99TYzOwtrsj23fp9bkvHk6JkqRBDBLo64HF3T0h5tG7BHvtpDZfoPtvt5IcTK8LZsbLrSVJwzNjoHc381kBXAbcQu82nhuSrOr7H10uA+5NcjO9ezK/v6ru3VNFS5KebNbu5TI6Olr+ykWSdk6S66tqdKp5XikqSY0w0CWpEQa6JDVikCtFJe2DFq28dLZLaNbtZ524R9brEbokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjRgo0JMsS3JrkvEkK6eYf3qSrUlu7B7vGn6pkqQdmTtTgyRzgNXA64DNwPoka6vq5klNL6qqFXugRknSAAY5Ql8KjFfVxqp6BFgDnLxny5Ik7axBAn0+sKlvfHM3bbI3J/l6ks8lWTjVipIsTzKWZGzr1q27UK4kaTrDOin618Ciqno58HfABVM1qqpzq2q0qkZHRkaGtGlJEgwW6FuA/iPuBd20J1TVvVX1cDf6F8DPDac8SdKgBgn09cDiJIclmQecCqztb5DkBX2jJwG3DK9ESdIgZvyVS1VtS7ICuAyYA5xXVRuSrALGqmot8NtJTgK2AfcBp+/BmiVJU5gx0AGqah2wbtK0M/uGPwR8aLilSZJ2hleKSlIjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNGCjQkyxLcmuS8SQrd9DuzUkqyejwSpQkDWLGQE8yB1gNnAAsAU5LsmSKds8GzgCuHXaRkqSZDXKEvhQYr6qNVfUIsAY4eYp2fwh8DPjxEOuTJA1okECfD2zqG9/cTXtCklcAC6vq0h2tKMnyJGNJxrZu3brTxUqSprfbJ0WT7Ad8AnjfTG2r6tyqGq2q0ZGRkd3dtCSpzyCBvgVY2De+oJs24dnAy4DLk9wOvBpY64lRSXpqDRLo64HFSQ5LMg84FVg7MbOqHqyqg6tqUVUtAq4BTqqqsT1SsSRpSjMGelVtA1YAlwG3ABdX1YYkq5KctKcLlCQNZu4gjapqHbBu0rQzp2l77O6XJUnaWV4pKkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1YqBAT7Isya1JxpOsnGL+e5J8I8mNSa5OsmT4pUqSdmTGQE8yB1gNnAAsAU6bIrA/W1X/sqqOBD4OfGLolUqSdmiQI/SlwHhVbayqR4A1wMn9Darq//aNHgDU8EqUJA1i7gBt5gOb+sY3A6+a3CjJbwG/C8wDjptqRUmWA8sBDj300J2tVZK0A0M7KVpVq6vqcOCDwIenaXNuVY1W1ejIyMiwNi1JYrBA3wIs7Btf0E2bzhrgjbtTlCRp5w0S6OuBxUkOSzIPOBVY298gyeK+0ROB24ZXoiRpEDP2oVfVtiQrgMuAOcB5VbUhySpgrKrWAiuSHA88CtwPvH1PFi1JerJBTopSVeuAdZOmndk3fMaQ65Ik7SSvFJWkRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktSIubNdwK5YtPLS2S6hWbefdeIeWa+v2Z6zp14z7X0GOkJPsizJrUnGk6ycYv7vJrk5ydeTfCnJC4dfqiRpR2YM9CRzgNXACcAS4LQkSyY1+0dgtKpeDnwO+PiwC5Uk7dggR+hLgfGq2lhVjwBrgJP7G1TVV6rqR93oNcCC4ZYpSZrJIIE+H9jUN765mzad3wD+ZqoZSZYnGUsytnXr1sGrlCTNaKi/cknya8AocPZU86vq3KoararRkZGRYW5akvZ5g/zKZQuwsG98QTdtO0mOB34f+IWqeng45UmSBjXIEfp6YHGSw5LMA04F1vY3SHIUcA5wUlV9f/hlSpJmMmOgV9U2YAVwGXALcHFVbUiyKslJXbOzgQOBv0pyY5K106xOkrSHDHRhUVWtA9ZNmnZm3/DxQ65LkrSTvPRfkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWrEQIGeZFmSW5OMJ1k5xfzXJLkhybYkbxl+mZKkmcwY6EnmAKuBE4AlwGlJlkxq9j3gdOCzwy5QkjSYuQO0WQqMV9VGgCRrgJOBmycaVNXt3bzH90CNkqQBDNLlMh/Y1De+uZu205IsTzKWZGzr1q27sgpJ0jSe0pOiVXVuVY1W1ejIyMhTuWlJat4ggb4FWNg3vqCbJkl6Ghkk0NcDi5MclmQecCqwds+WJUnaWTMGelVtA1YAlwG3ABdX1YYkq5KcBJDklUk2A6cA5yTZsCeLliQ92SC/cqGq1gHrJk07s294Pb2uGEnSLPFKUUlqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQMFepJlSW5NMp5k5RTz909yUTf/2iSLhl2oJGnHZgz0JHOA1cAJwBLgtCRLJjX7DeD+qjoC+BPgY8MuVJK0Y4McoS8FxqtqY1U9AqwBTp7U5mTggm74c8Brk2R4ZUqSZjJ3gDbzgU1945uBV03Xpqq2JXkQeC5wT3+jJMuB5d3oQ0lu3ZWi90IHM2lfPF3F71awF71e4GvW2ZdesxdON2OQQB+aqjoXOPep3ObTQZKxqhqd7To0GF+vvY+vWc8gXS5bgIV94wu6aVO2STIX+Eng3mEUKEkazCCBvh5YnOSwJPOAU4G1k9qsBd7eDb8F+HJV1fDKlCTNZMYul65PfAVwGTAHOK+qNiRZBYxV1VrgvwOfSTIO3Ecv9PXP9rlupr2cr9fex9cMiAfSktQGrxSVpEYY6JLUCANd0tNWktOTfHK269hbGOiS1AgDfTclWZXkvX3jf5TkjCTvT7I+ydeT/Kdu3gFJLk1yU5JvJnnr7FUugCSLktyS5NNJNiT52yQ/keTIJNd0r98lSZ4z27W2oNvf3+wb/70kH0lyeZKPJbkuybeTHDPFsicm+VqSg5Ocn+S/Jflqko1J3tK1SZKzu7+vb0z8jSVZneSkbviSJOd1w+/s/manfB88NXtleAz03Xce8OsASfaj95PNu4DF9O6DcyTwc0leAywD7qyqn62qlwH/Z3ZK1iSLgdVV9VLgAeDNwF8CH6yqlwPfAP7jLNa3r5hbVUuB9zJpfyf5FWAl8PqqmrjE/wXA0cAvA2d1095E72/uZ4HjgbOTvAC4Cpj4kJhP70aDdNOu7Ianeh/sVQz03VRVtwP3JjkK+CXgH4FX9g3fAPwLem+WbwCv645EjqmqB2enak3y3aq6sRu+HjgcOKiqruimXQC8ZlYq27d8vvv3emBR3/TjgA8CJ1bV/X3Tv1BVj1fVzcDzu2lHAxdW1WNVdTdwBb2/x6uAY7o7xd4M3N0F/c8DX+2Wnfw+6K9hr/CU3sulYX8BnA78NL0j9tcCf1xV50xumOQVwOuBjyb5UlWteioL1ZQe7ht+DDhotgrZB2xj+wPJZ/YNT7wOj7F9Nn0H+BngRcDYFO0Bdnh316rakuQget+SrwR+CvhV4KGq+kGS5/Lk94FdLvuoS+i9UV5J74ray4B3JjkQIMn8JM9Lcgjwo6r6H8DZwCtmq2Dt0IPA/X39uP+G3pGedt/dwPOSPDfJ/vS6S2ZyB103WJKXztD2KuCtSeYkGaH3zeq6bt419Lpzruza/V73bzM8Qh+CqnokyVeAB6rqMeBvk7wE+Fp3W/iHgF8DjqDXp/c48Cjwm7NVs2b0duBTSZ4FbATeMcv1NKGqHu1uG3IdvZv6fWvA5b6V5G3AXyV5ww6aXkKvG+UmoIAPVNVd3byrgF+qqvEkd9A7Sm8q0L30fwi6k6E3AKdU1W2zXY+kfZNdLrupO8kyDnzJMJc0mzxCl6RGeIQuSY0w0CWpEQa6JDXCQJekRhjoktSI/w/TIK6YopbnBgAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
    " + ] + }, + "metadata": { + "tags": [], + "needs_background": "light" + } + }, + { + "output_type": "stream", + "text": [ + "/content/data/mini_speech_commands/unknown/15c563d7_nohash_3.wav\n" + ], + "name": "stdout" + }, + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXoAAAEICAYAAABRSj9aAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAXuklEQVR4nO3df7RddX3m8fdDMFChFYRbW/KDBIgOodrgXEK7BGQUMEgl1IEhVKeg1gwOmWLRahwtOlGnCLNs14xxgE6zoM5AQJF6Z4hDqUCAQSCXX2LQlEsEkiAYCFARBBKe+eN8Q3dOb3J3cu/NTb55Xmudlb2/P/b5nHNunrPz3eeeyDYREVGv3ca6gIiIGF0J+oiIyiXoIyIql6CPiKhcgj4ionIJ+oiIyiXoY1RIukzSl8r20ZJWbONxLpb0ZyNb3ZD3+fuSVkl6XtLh2/O+25JkSYeMdR2xc9h9rAuIsSPpEeBNwAbgF8B3gXm2nx/J+7F9K/CWFvWcBfyR7aMac88eyVpa+i90nofvjMTBJN0MfAE4FsD2F0biuDsqSZcBN5fdY22fNWbFBJAz+oD32d4beDvQC3yue4CkXe2E4EBg+bZMlDRuhGuJGLYEfQBgew2dM/rfgteWBs6R9BDwUGn7PUn3SXpW0u2S3rZxvqTDJd0j6eeSrgL2bPQdK2l1Y3+SpG9LWivpaUlfk3QocDHwu2XJ5Nky9rUloLL/UUkDktZJ6pN0QKPPks6W9FCpcaEklb5DJC2V9Jykp0qNm5C0h6TngXHA/ZIeLu2HSrq5HHO5pJMbcy6T9N8lLZH0C+BftXm+JZ0l6bautteWY8pxF0q6rjynd0o6eDPHOqosNR3b4nnYTdLnJD0q6WeS/kbSG0rf5ZI+UbYnbPwZKPsHl+d8t42vp6RPlGP8VNKH2jzuGCO2c9tFb8AjwHFlexKds9gvln0DNwBvBH4FOBz4GXAknSA8s8zfAxgPPAr8CfA64FTgFeBL5VjHAqvL9jjgfuAvgL3ovCEcVfrOAm7rqvGyxnHeBTxF518fewD/DbilMdbA/wH2ASYDa4FZpe9K4LN0Tm5eu8/NPC8GDinbrwMGgP9YHue7gJ8Db2nU9xzwjo3HbvncD/ZYm/d7GfA0MJPOEuv/AhZ3jwVmAauAmS2fhw+Xx3MQsDfwbeAbjb7/Xbb/AHgYuKrR953G67keWFCen/cCLwD7jvXPdG6D33JGH39bzp5vA5YC/7nR9+e219l+EZgLXGL7TtsbbF8OvAT8Trm9DvhL26/Y/hawbDP3NxM4APhT27+w/Uvbt21mbLcPAIts32P7JeAzdP4FMKUx5gLbz9p+DLgJmFHaX6GzJHPAVt7n79AJxAtsv2z7RjohekZjzHds/z/br9r+ZcvjtnGt7btsr6cT9DO6+k8DLgFOtH1XV9/mnocPAF+1vdKdazGfAeaU5bmlwFGSdgOOAS6k8wYG8M7Sv9ErwILyei8BnqfFdZgYGwn6OMX2PrYPtP3vS6hvtKqxfSDwibIU8Gx5c5hEJ7QPANbYbn5D3qObub9JwKMlvLbWAc3jlqB6GpjQGPNEY/sFOiEN8ClAwF1l+eXDW3Gfq2y/2mh7tOs+VzE6NvdYNvo4cLXtH27F3E2ew7K9O/Am2w/TuSg/Aziazhva45Lewj8P+qe7XsPB6osdRII+tqQZ3KuAL5c3hY2319u+EvgpMGHjOnAxeTPHXAVM3swF3qG+SvVxOm84AEjaC9gPWDPkA7GfsP1R2wcA/w74esuPJz4OTCpnuRtN7rrPbfkK2F8Ar9+4I+k3tuEYpwGnSDp3K+Zs8hzSeSzrgSfL/lI6S2/j3blus5TOMt2+wH3bUGPsABL00dZfAWdLOlIde0k6SdKvAt+nExZ/LOl1kt5PZ4lmMHfReWO4oBxjT0kblweeBCZKGr+ZuVcCH5I0Q9IedJaZ7rT9yFDFSzpN0sSy+wydcH51C1M2upPO2eqnymM7FngfsLjF3C25HzisPJY96Xz8cms9DrwbOFfSx1rOuRL4E0lTJe1N5zm8qnF2vhSYB9xS9m8u+7fZ3rANNcYOIEEfrdjuBz4KfI1OUA7QuaCI7ZeB95f9dcDpdC7yDXacDXSC8hDgMWB1GQ9wI50Lwk9IemqQuX8P/BlwDZ03i4OBOS0fwhHAneVTNX3AubZXDjWpPLb3ASfSuRD8deAPbf+45f1u7rj/QOdi5t/T+VRT22sG3cd5jE7Yz5f0Ry2mLAK+QSfIfwL8EvgPjf6lwK/yT0F/G51/edxC7LS06bJqRETUJmf0ERGVS9BHRFQuQR8RUbkEfURE5Xa4L6vaf//9PWXKlLEuIyJip3L33Xc/ZbtnsL4dLuinTJlCf3//WJcREbFTkbS530bP0k1ERO0S9BERlUvQR0RULkEfEVG5BH1EROUS9BERlUvQR0RULkEfEVG5BH1EROV2uN+MHa4p868b6xKq9cgFJ411CRGxDXJGHxFRuQR9RETlEvQREZVL0EdEVC5BHxFRuQR9RETlEvQREZVL0EdEVC5BHxFRuQR9RETlWgW9pFmSVkgakDR/kP6zJT0g6T5Jt0maXtqnSHqxtN8n6eKRfgAREbFlQ37XjaRxwELgeGA1sExSn+0HG8OusH1xGX8y8FVgVul72PaMkS07IiLaanNGPxMYsL3S9svAYmB2c4Dtf2zs7gV45EqMiIjhaBP0E4BVjf3VpW0Tks6R9DBwIfDHja6pku6VtFTS0YPdgaS5kvol9a9du3Yryo+IiKGM2MVY2wttHwx8Gvhcaf4pMNn24cB5wBWSfm2QuZfa7rXd29PTM1IlRUQE7YJ+DTCpsT+xtG3OYuAUANsv2X66bN8NPAy8edtKjYiIbdEm6JcB0yRNlTQemAP0NQdImtbYPQl4qLT3lIu5SDoImAasHInCIyKinSE/dWN7vaR5wPXAOGCR7eWSFgD9tvuAeZKOA14BngHOLNOPARZIegV4FTjb9rrReCARETG4Vv+VoO0lwJKutvMb2+duZt41wDXDKTAiIoYnvxkbEVG5BH1EROUS9BERlWu1Rh8xWqbMv26sS6jWIxecNNYlxA4iZ/QREZVL0EdEVC5BHxFRuQR9RETlEvQREZVL0EdEVC5BHxFRuQR9RETlEvQREZVL0EdEVC5BHxFRuQR9RETlEvQREZVL0EdEVK5V0EuaJWmFpAFJ8wfpP1vSA5Luk3SbpOmNvs+UeSskvWcki4+IiKENGfSSxgELgROB6cAZzSAvrrD9VtszgAuBr5a504E5wGHALODr5XgREbGdtDmjnwkM2F5p+2VgMTC7OcD2PzZ29wJctmcDi22/ZPsnwEA5XkREbCdt/oepCcCqxv5q4MjuQZLOAc4DxgPvasy9o2vuhEHmzgXmAkyePLlN3RER0dKIXYy1vdD2wcCngc9t5dxLbffa7u3p6RmpkiIignZBvwaY1NifWNo2ZzFwyjbOjYiIEdYm6JcB0yRNlTSezsXVvuYASdMauycBD5XtPmCOpD0kTQWmAXcNv+yIiGhryDV62+slzQOuB8YBi2wvl7QA6LfdB8yTdBzwCvAMcGaZu1zS1cCDwHrgHNsbRumxRETEINpcjMX2EmBJV9v5je1ztzD3y8CXt7XAiIgYnlZBHxGx0ZT51411CdV65IKTRuW4+QqEiIjKJegjIiqXoI+IqFyCPiKicgn6iIjKJegjIiqXoI+IqFyCPiKicgn6iIjKJegjIiqXoI+IqFyCPiKicgn6iIjKJegjIiqXoI+IqFyCPiKicgn6iIjKJegjIirXKuglzZK0QtKApPmD9J8n6UFJP5D0PUkHNvo2SLqv3PpGsviIiBjakP9nrKRxwELgeGA1sExSn+0HG8PuBXptvyDpY8CFwOml70XbM0a47oiIaKnNGf1MYMD2StsvA4uB2c0Btm+y/ULZvQOYOLJlRkTEtmoT9BOAVY391aVtcz4CfLexv6ekfkl3SDplsAmS5pYx/WvXrm1RUkREtDXk0s3WkPRBoBd4Z6P5QNtrJB0E3CjpAdsPN+fZvhS4FKC3t9cjWVNExK6uzRn9GmBSY39iaduEpOOAzwIn235pY7vtNeXPlcDNwOHDqDciIrZSm6BfBkyTNFXSeGAOsMmnZyQdDlxCJ+R/1mjfV9IeZXt/4B1A8yJuRESMsiGXbmyvlzQPuB4YByyyvVzSAqDfdh9wEbA38E1JAI/ZPhk4FLhE0qt03lQu6Pq0TkREjLJWa/S2lwBLutrOb2wft5l5twNvHU6BERExPPnN2IiIyiXoIyIql6CPiKhcgj4ionIJ+oiIyiXoIyIql6CPiKhcgj4ionIJ+oiIyiXoIyIql6CPiKhcgj4ionIJ+oiIyiXoIyIql6CPiKhcgj4ionIJ+oiIyiXoIyIq1yroJc2StELSgKT5g/SfJ+lBST+Q9D1JBzb6zpT0ULmdOZLFR0TE0IYMeknjgIXAicB04AxJ07uG3Qv02n4b8C3gwjL3jcDngSOBmcDnJe07cuVHRMRQ2pzRzwQGbK+0/TKwGJjdHGD7JtsvlN07gIll+z3ADbbX2X4GuAGYNTKlR0REG22CfgKwqrG/urRtzkeA727j3IiIGGG7j+TBJH0Q6AXeuZXz5gJzASZPnjySJUVE7PLanNGvASY19ieWtk1IOg74LHCy7Ze2Zq7tS2332u7t6elpW3tERLTQJuiXAdMkTZU0HpgD9DUHSDocuIROyP+s0XU9cIKkfctF2BNKW0REbCdDLt3YXi9pHp2AHgcssr1c0gKg33YfcBGwN/BNSQCP2T7Z9jpJX6TzZgGwwPa6UXkkERExqFZr9LaXAEu62s5vbB+3hbmLgEXbWmBERAxPfjM2IqJyCfqIiMol6CMiKpegj4ioXII+IqJyCfqIiMol6CMiKpegj4ioXII+IqJyCfqIiMol6CMiKpegj4ioXII+IqJyCfqIiMol6CMiKpegj4ioXII+IqJyCfqIiMol6CMiKtcq6CXNkrRC0oCk+YP0HyPpHknrJZ3a1bdB0n3l1jdShUdERDtD/ufgksYBC4HjgdXAMkl9th9sDHsMOAv45CCHeNH2jBGoNSIitsGQQQ/MBAZsrwSQtBiYDbwW9LYfKX2vjkKNERExDG2WbiYAqxr7q0tbW3tK6pd0h6RTBhsgaW4Z07927dqtOHRERAxle1yMPdB2L/AHwF9KOrh7gO1Lbffa7u3p6dkOJUVE7DraBP0aYFJjf2Jpa8X2mvLnSuBm4PCtqC8iIoapTdAvA6ZJmippPDAHaPXpGUn7StqjbO8PvIPG2n5ERIy+IYPe9npgHnA98CPgatvLJS2QdDKApCMkrQZOAy6RtLxMPxTol3Q/cBNwQdendSIiYpS1+dQNtpcAS7razm9sL6OzpNM973bgrcOsMSIihiG/GRsRUbkEfURE5RL0ERGVS9BHRFQuQR8RUbkEfURE5RL0ERGVS9BHRFQuQR8RUbkEfURE5RL0ERGVS9BHRFQuQR8RUbkEfURE5RL0ERGVS9BHRFQuQR8RUbkEfURE5VoFvaRZklZIGpA0f5D+YyTdI2m9pFO7+s6U9FC5nTlShUdERDtDBr2kccBC4ERgOnCGpOldwx4DzgKu6Jr7RuDzwJHATODzkvYdftkREdFWmzP6mcCA7ZW2XwYWA7ObA2w/YvsHwKtdc98D3GB7ne1ngBuAWSNQd0REtNQm6CcAqxr7q0tbG8OZGxERI2CHuBgraa6kfkn9a9euHetyIiKq0ibo1wCTGvsTS1sbrebavtR2r+3enp6eloeOiIg22gT9MmCapKmSxgNzgL6Wx78eOEHSvuUi7AmlLSIitpMhg972emAenYD+EXC17eWSFkg6GUDSEZJWA6cBl0haXuauA75I581iGbCgtEVExHaye5tBtpcAS7razm9sL6OzLDPY3EXAomHUGBERw7BDXIyNiIjRk6CPiKhcgj4ionIJ+oiIyiXoIyIql6CPiKhcgj4ionIJ+oiIyiXoIyIql6CPiKhcgj4ionIJ+oiIyiXoIyIql6CPiKhcgj4ionIJ+oiIyiXoIyIql6CPiKhcgj4ionKtgl7SLEkrJA1Imj9I/x6Srir9d0qaUtqnSHpR0n3ldvHIlh8REUMZ8j8HlzQOWAgcD6wGlknqs/1gY9hHgGdsHyJpDvAV4PTS97DtGSNcd0REtNTmjH4mMGB7pe2XgcXA7K4xs4HLy/a3gHdL0siVGRER26pN0E8AVjX2V5e2QcfYXg88B+xX+qZKulfSUklHD3YHkuZK6pfUv3bt2q16ABERsWWjfTH2p8Bk24cD5wFXSPq17kG2L7Xda7u3p6dnlEuKiNi1tAn6NcCkxv7E0jboGEm7A28Anrb9ku2nAWzfDTwMvHm4RUdERHttgn4ZME3SVEnjgTlAX9eYPuDMsn0qcKNtS+opF3ORdBAwDVg5MqVHREQbQ37qxvZ6SfOA64FxwCLbyyUtAPpt9wF/DXxD0gCwjs6bAcAxwAJJrwCvAmfbXjcaDyQiIgY3ZNAD2F4CLOlqO7+x/UvgtEHmXQNcM8waIyJiGPKbsRERlUvQR0RULkEfEVG5BH1EROUS9BERlUvQR0RULkEfEVG5BH1EROUS9BERlUvQR0RULkEfEVG5BH1EROUS9BERlUvQR0RULkEfEVG5BH1EROUS9BERlUvQR0RULkEfEVG5VkEvaZakFZIGJM0fpH8PSVeV/jslTWn0faa0r5D0npErPSIi2hgy6CWNAxYCJwLTgTMkTe8a9hHgGduHAH8BfKXMnQ7MAQ4DZgFfL8eLiIjtpM0Z/UxgwPZK2y8Di4HZXWNmA5eX7W8B75ak0r7Y9ku2fwIMlONFRMR2snuLMROAVY391cCRmxtje72k54D9SvsdXXMndN+BpLnA3LL7vKQVrarf+e0PPDXWRbSlr4x1BTuEneY1y+v1ml3lNTtwcx1tgn7U2b4UuHSs69jeJPXb7h3rOqK9vGY7n7xm7ZZu1gCTGvsTS9ugYyTtDrwBeLrl3IiIGEVtgn4ZME3SVEnj6Vxc7esa0wecWbZPBW607dI+p3wqZyowDbhrZEqPiIg2hly6KWvu84DrgXHAItvLJS0A+m33AX8NfEPSALCOzpsBZdzVwIPAeuAc2xtG6bHsjHa55aoK5DXb+ezyr5k6J94REVGr/GZsRETlEvQREZVL0EfETknSWZK+NtZ17AwS9BERlUvQjyJJCyR9vLH/ZUnnSvpTScsk/UDSfyp9e0m6TtL9kn4o6fSxqzwAJE2R9CNJfyVpuaS/k/QrkmZIuqO8ftdK2nesa61Beb5/2Nj/pKQvSLpZ0lck3SXpHyQdPcjckyR9X9L+ki6T9F8l3S5ppaRTyxhJuqj8/Xpg498xSQslnVy2r5W0qGx/uPydHfTnYPs8KyMjQT+6FgF/CCBpNzofO32Czu8TzARmAP9S0jF0vvTtcdu/bfu3gP87NiVHl2nAQtuHAc8C/xr4G+DTtt8GPAB8fgzr21Xsbnsm8HG6nm9Jvw/MB95re+NXHfwmcBTwe8AFpe39dP7O/TZwHHCRpN8EbgU2vnlMoPPljZS2W8r2YD8HO40E/Siy/QjwtKTDgROAe4EjGtv3AP+Czg/RA8Dx5czlaNvPjU3V0eUntu8r23cDBwP72F5a2i4HjhmTynYt3y5/3g1MabS/C/g0cJLtZxrtf2v7VdsPAm8qbUcBV9reYPtJYCmdv4+3AkeXb9t9EHiyvAH8LnB7mdv9c9CsYYe3Q3zXTeX+B3AW8Bt0zvDfDfy57Uu6B0p6O/Be4EuSvmd7wfYsNAb1UmN7A7DPWBWyC1jPpiefeza2N74OG9g0tx4GDgLeDPQPMh5AW7pT22sk7UPnX9W3AG8E/g3wvO2fS9qPf/5zkKWb2MS1dH6AjqDz28XXAx+WtDeApAmSfl3SAcALtv8ncBHw9rEqOLboOeCZxjrxv6VzZhjD9yTw65L2k7QHnWWXoTxKWU6TdNgQY28FTpc0TlIPnX+JbfxKljvoLAvdUsZ9svxZhZzRjzLbL0u6CXi2fP3D30k6FPh+5yv7eR74IHAInTXDV4FXgI+NVc0xpDOBiyW9HlgJfGiM66mC7VfKV6vcRefLD3/cct6PJX0A+Kak921h6LV0lmPuBwx8yvYTpe9W4ATbA5IepXNWX03Q5ysQRlm5CHsPcJrth8a6nojY9WTpZhSVizsDwPcS8hExVnJGHxFRuZzRR0RULkEfEVG5BH1EROUS9BERlUvQR0RU7v8DtUouV5vOw9wAAAAASUVORK5CYII=\n", + "text/plain": [ + "
    " + ] + }, + "metadata": { + "tags": [], + "needs_background": "light" + } + }, + { + "output_type": "stream", + "text": [ + "/content/data/mini_speech_commands/unknown/f4504600_nohash_1.wav\n" + ], + "name": "stdout" + }, + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAEICAYAAABPgw/pAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAUd0lEQVR4nO3df7RdZX3n8feHxGAFV7FytZIEQyE6RseCvUa7BixF7ApSwapUGDsVrWbsNFNsrRqnlnFSO0WZZWdNTSvYYUGdkUAd0XRIh7Yqv6pALhTUgMg1gkkoGH6O6AgEvvPH2ZeeXO7NPUlOuOTJ+7XWWdk/nr339+xz7ufs8+yzd1JVSJL2fvvNdgGSpOEw0CWpEQa6JDXCQJekRhjoktQIA12SGmGga7ckOT/JR7vhY5Lcuovr+VSSPxhudTNu81eSbEryUJKjnsptDypJJTlituvQ3mHubBegPS/J7cDzgceAHwJ/A6yoqoeGuZ2qugp48QD1nA68q6qO7lv2PcOsZUD/hd5++OIwVpbkcuAjwLEAVfWRYaz36SrJ+cDl3eixVXX6rBUjwCP0fckbqupA4BXAKPDhyQ2S7Gsf8C8ENuzKgknmDLkWabcZ6PuYqtpC7wj9ZfDEV/rfSnIbcFs37ZeT3JjkgSRfTfLyieWTHJXkhiQ/SHIR8My+eccm2dw3vjDJ55NsTXJvkk8meQnwKeDnu66OB7q2T3TddOPvTjKe5L4ka5Mc0jevkrwnyW1djauTpJt3RJIrkjyY5J6uxu0k2T/JQ8Ac4KYk3+mmvyTJ5d06NyQ5qW+Z85P8eZJ1SX4I/OIg+zvJ6UmunjTtiW6Ubr2rk1za7dNrkxw+zbqO7rqIjh1gP+yX5MNJ7kjy/SR/meQnu3kXJHlfNzx/4j3QjR/e7fP9Jl7PJO/r1vFPSd4xyPPWLKkqH40/gNuB47vhhfSOSv+wGy/g74CfAn4COAr4PvAqeoH39m75/YF5wB3A7wDPAN4CPAp8tFvXscDmbngOcBPwJ8AB9IL/6G7e6cDVk2o8v289xwH30Ps2sT/wp8CVfW0L+N/AQcChwFZgWTfvQuD36R2sPLHNafZLAUd0w88AxoH/0D3P44AfAC/uq+9B4F9NrHvAfT/Vc+3f7vnAvcBSel2g/xNYM7ktsAzYBCwdcD+8s3s+PwMcCHwe+EzfvL/uhv818B3gor55X+x7PbcBq7r983rgR8BzZvs97WPqh0fo+44vdEfDVwNXAP+5b94fV9V9VfX/gOXAOVV1bVU9VlUXAA8Dr+4ezwD+a1U9WlWfA9ZPs72lwCHA+6vqh1X146q6epq2k70NOK+qbqiqh4EP0TuiX9TX5qyqeqCqvgd8BTiym/4ova6UQ3Zym6+mF3xnVdUjVfVlemF5Wl+bL1bVP1TV41X14wHXO4hLquq6qtpGL9CPnDT/FOAc4ISqum7SvOn2w9uAT1TVxuqdK/kQcGrXrXYFcHSS/YDXAB+n90EF8Avd/AmPAqu613sd8BADnCfR7DDQ9x1vrKqDquqFVfXvuvCesKlv+IXA+7qv8A90HwIL6YXzIcCWquq/o9sd02xvIXBHF1I765D+9XaBdC8wv6/NXX3DP6IXxgAfAAJc13WbvHMntrmpqh7vm3bHpG1uYs+Y7rlMeC9wcVV9cyeW3W4fdsNzgedX1XfonRw/EjiG3gfXnUlezJMD/d5Jr+FU9elpwkAX9L66T9gE/FEX/hOPZ1XVhcA/AfMn+mk7h06zzk3AodOcaJ3pFp930vtgASDJAcBzgS0zPpGqu6rq3VV1CPBvgT8b8Gd/dwILu6PWCYdO2uau3Jr0h8CzJkaS/PQurOMU4I1JztiJZbbbh/Seyzbg7m78CnpdZvOqd17lCnrda88BbtyFGvU0YKBrsk8D70nyqvQckOTEJM8GvkYvFH47yTOSvIle18pUrqP3AXBWt45nJpn4Wn83sCDJvGmWvRB4R5Ijk+xPr3vo2qq6fabik5ySZEE3ej+9EH58B4tMuJbe0ecHuud2LPAGYM0Ay+7ITcBLu+fyTHo/a9xZdwKvBc5I8psDLnMh8DtJDktyIL19eFHf0fYVwArgym788m786qp6bBdq1NOAga7tVNUY8G7gk/QCcZzeiT2q6hHgTd34fcBb6Z1sm2o9j9ELxCOA7wGbu/YAX6Z3YvauJPdMsezfA38A/C96HwqHA6cO+BReCVzb/YplLXBGVW2caaHuub0BOIHeCdk/A369qr414HanW++36Z1U/Ht6vyIatE9/8nq+Ry/UVyZ51wCLnAd8hl5gfxf4MfDv++ZfATybfw70q+l9k7gS7bWyfXeoJGlv5RG6JDXCQJekRhjoktSIgQI9ybIkt3aXYq+cps2vJrm5++3vZ4dbpiRpJjOeFE3vJkTfBl5H75cK64HTqurmvjaLgYuB46rq/iTPq6rv72i9Bx98cC1atGg3y5ekfcv1119/T1WNTDVvkLvrLQXGJ376lWQNcDJwc1+bdwOrq+p+gJnCHGDRokWMjY0NsHlJ0oQk012dPVCXy3y2v+R5M9tfDg3wIuBFSf4hyTVJlk1TyPIkY0nGtm7dOsCmJUmDGtZJ0bnAYnp3ZzsN+HSSgyY3qqpzq2q0qkZHRqb8xiBJ2kWDBPoWejdamrCAJ99TYzOwtrsj23fp9bkvHk6JkqRBDBLo64HF3T0h5tG7BHvtpDZfoPtvt5IcTK8LZsbLrSVJwzNjoHc381kBXAbcQu82nhuSrOr7H10uA+5NcjO9ezK/v6ru3VNFS5KebNbu5TI6Olr+ykWSdk6S66tqdKp5XikqSY0w0CWpEQa6JDVikCtFJe2DFq28dLZLaNbtZ524R9brEbokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjRgo0JMsS3JrkvEkK6eYf3qSrUlu7B7vGn6pkqQdmTtTgyRzgNXA64DNwPoka6vq5klNL6qqFXugRknSAAY5Ql8KjFfVxqp6BFgDnLxny5Ik7axBAn0+sKlvfHM3bbI3J/l6ks8lWTjVipIsTzKWZGzr1q27UK4kaTrDOin618Ciqno58HfABVM1qqpzq2q0qkZHRkaGtGlJEgwW6FuA/iPuBd20J1TVvVX1cDf6F8DPDac8SdKgBgn09cDiJIclmQecCqztb5DkBX2jJwG3DK9ESdIgZvyVS1VtS7ICuAyYA5xXVRuSrALGqmot8NtJTgK2AfcBp+/BmiVJU5gx0AGqah2wbtK0M/uGPwR8aLilSZJ2hleKSlIjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNGCjQkyxLcmuS8SQrd9DuzUkqyejwSpQkDWLGQE8yB1gNnAAsAU5LsmSKds8GzgCuHXaRkqSZDXKEvhQYr6qNVfUIsAY4eYp2fwh8DPjxEOuTJA1okECfD2zqG9/cTXtCklcAC6vq0h2tKMnyJGNJxrZu3brTxUqSprfbJ0WT7Ad8AnjfTG2r6tyqGq2q0ZGRkd3dtCSpzyCBvgVY2De+oJs24dnAy4DLk9wOvBpY64lRSXpqDRLo64HFSQ5LMg84FVg7MbOqHqyqg6tqUVUtAq4BTqqqsT1SsSRpSjMGelVtA1YAlwG3ABdX1YYkq5KctKcLlCQNZu4gjapqHbBu0rQzp2l77O6XJUnaWV4pKkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1YqBAT7Isya1JxpOsnGL+e5J8I8mNSa5OsmT4pUqSdmTGQE8yB1gNnAAsAU6bIrA/W1X/sqqOBD4OfGLolUqSdmiQI/SlwHhVbayqR4A1wMn9Darq//aNHgDU8EqUJA1i7gBt5gOb+sY3A6+a3CjJbwG/C8wDjptqRUmWA8sBDj300J2tVZK0A0M7KVpVq6vqcOCDwIenaXNuVY1W1ejIyMiwNi1JYrBA3wIs7Btf0E2bzhrgjbtTlCRp5w0S6OuBxUkOSzIPOBVY298gyeK+0ROB24ZXoiRpEDP2oVfVtiQrgMuAOcB5VbUhySpgrKrWAiuSHA88CtwPvH1PFi1JerJBTopSVeuAdZOmndk3fMaQ65Ik7SSvFJWkRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktSIubNdwK5YtPLS2S6hWbefdeIeWa+v2Z6zp14z7X0GOkJPsizJrUnGk6ycYv7vJrk5ydeTfCnJC4dfqiRpR2YM9CRzgNXACcAS4LQkSyY1+0dgtKpeDnwO+PiwC5Uk7dggR+hLgfGq2lhVjwBrgJP7G1TVV6rqR93oNcCC4ZYpSZrJIIE+H9jUN765mzad3wD+ZqoZSZYnGUsytnXr1sGrlCTNaKi/cknya8AocPZU86vq3KoararRkZGRYW5akvZ5g/zKZQuwsG98QTdtO0mOB34f+IWqeng45UmSBjXIEfp6YHGSw5LMA04F1vY3SHIUcA5wUlV9f/hlSpJmMmOgV9U2YAVwGXALcHFVbUiyKslJXbOzgQOBv0pyY5K106xOkrSHDHRhUVWtA9ZNmnZm3/DxQ65LkrSTvPRfkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWrEQIGeZFmSW5OMJ1k5xfzXJLkhybYkbxl+mZKkmcwY6EnmAKuBE4AlwGlJlkxq9j3gdOCzwy5QkjSYuQO0WQqMV9VGgCRrgJOBmycaVNXt3bzH90CNkqQBDNLlMh/Y1De+uZu205IsTzKWZGzr1q27sgpJ0jSe0pOiVXVuVY1W1ejIyMhTuWlJat4ggb4FWNg3vqCbJkl6Ghkk0NcDi5MclmQecCqwds+WJUnaWTMGelVtA1YAlwG3ABdX1YYkq5KcBJDklUk2A6cA5yTZsCeLliQ92SC/cqGq1gHrJk07s294Pb2uGEnSLPFKUUlqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQMFepJlSW5NMp5k5RTz909yUTf/2iSLhl2oJGnHZgz0JHOA1cAJwBLgtCRLJjX7DeD+qjoC+BPgY8MuVJK0Y4McoS8FxqtqY1U9AqwBTp7U5mTggm74c8Brk2R4ZUqSZjJ3gDbzgU1945uBV03Xpqq2JXkQeC5wT3+jJMuB5d3oQ0lu3ZWi90IHM2lfPF3F71awF71e4GvW2ZdesxdON2OQQB+aqjoXOPep3ObTQZKxqhqd7To0GF+vvY+vWc8gXS5bgIV94wu6aVO2STIX+Eng3mEUKEkazCCBvh5YnOSwJPOAU4G1k9qsBd7eDb8F+HJV1fDKlCTNZMYul65PfAVwGTAHOK+qNiRZBYxV1VrgvwOfSTIO3Ecv9PXP9rlupr2cr9fex9cMiAfSktQGrxSVpEYY6JLUCANd0tNWktOTfHK269hbGOiS1AgDfTclWZXkvX3jf5TkjCTvT7I+ydeT/Kdu3gFJLk1yU5JvJnnr7FUugCSLktyS5NNJNiT52yQ/keTIJNd0r98lSZ4z27W2oNvf3+wb/70kH0lyeZKPJbkuybeTHDPFsicm+VqSg5Ocn+S/Jflqko1J3tK1SZKzu7+vb0z8jSVZneSkbviSJOd1w+/s/manfB88NXtleAz03Xce8OsASfaj95PNu4DF9O6DcyTwc0leAywD7qyqn62qlwH/Z3ZK1iSLgdVV9VLgAeDNwF8CH6yqlwPfAP7jLNa3r5hbVUuB9zJpfyf5FWAl8PqqmrjE/wXA0cAvA2d1095E72/uZ4HjgbOTvAC4Cpj4kJhP70aDdNOu7Ianeh/sVQz03VRVtwP3JjkK+CXgH4FX9g3fAPwLem+WbwCv645EjqmqB2enak3y3aq6sRu+HjgcOKiqruimXQC8ZlYq27d8vvv3emBR3/TjgA8CJ1bV/X3Tv1BVj1fVzcDzu2lHAxdW1WNVdTdwBb2/x6uAY7o7xd4M3N0F/c8DX+2Wnfw+6K9hr/CU3sulYX8BnA78NL0j9tcCf1xV50xumOQVwOuBjyb5UlWteioL1ZQe7ht+DDhotgrZB2xj+wPJZ/YNT7wOj7F9Nn0H+BngRcDYFO0Bdnh316rakuQget+SrwR+CvhV4KGq+kGS5/Lk94FdLvuoS+i9UV5J74ray4B3JjkQIMn8JM9Lcgjwo6r6H8DZwCtmq2Dt0IPA/X39uP+G3pGedt/dwPOSPDfJ/vS6S2ZyB103WJKXztD2KuCtSeYkGaH3zeq6bt419Lpzruza/V73bzM8Qh+CqnokyVeAB6rqMeBvk7wE+Fp3W/iHgF8DjqDXp/c48Cjwm7NVs2b0duBTSZ4FbATeMcv1NKGqHu1uG3IdvZv6fWvA5b6V5G3AXyV5ww6aXkKvG+UmoIAPVNVd3byrgF+qqvEkd9A7Sm8q0L30fwi6k6E3AKdU1W2zXY+kfZNdLrupO8kyDnzJMJc0mzxCl6RGeIQuSY0w0CWpEQa6JDXCQJekRhjoktSI/w/TIK6YopbnBgAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
    " + ] + }, + "metadata": { + "tags": [], + "needs_background": "light" + } + }, + { + "output_type": "stream", + "text": [ + "/content/data/mini_speech_commands/unknown/da76aa58_nohash_1.wav\n" + ], + "name": "stdout" + }, + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAEICAYAAABPgw/pAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAUd0lEQVR4nO3df7RdZX3n8feHxGAFV7FytZIEQyE6RseCvUa7BixF7ApSwapUGDsVrWbsNFNsrRqnlnFSO0WZZWdNTSvYYUGdkUAd0XRIh7Yqv6pALhTUgMg1gkkoGH6O6AgEvvPH2ZeeXO7NPUlOuOTJ+7XWWdk/nr339+xz7ufs8+yzd1JVSJL2fvvNdgGSpOEw0CWpEQa6JDXCQJekRhjoktQIA12SGmGga7ckOT/JR7vhY5Lcuovr+VSSPxhudTNu81eSbEryUJKjnsptDypJJTlituvQ3mHubBegPS/J7cDzgceAHwJ/A6yoqoeGuZ2qugp48QD1nA68q6qO7lv2PcOsZUD/hd5++OIwVpbkcuAjwLEAVfWRYaz36SrJ+cDl3eixVXX6rBUjwCP0fckbqupA4BXAKPDhyQ2S7Gsf8C8ENuzKgknmDLkWabcZ6PuYqtpC7wj9ZfDEV/rfSnIbcFs37ZeT3JjkgSRfTfLyieWTHJXkhiQ/SHIR8My+eccm2dw3vjDJ55NsTXJvkk8meQnwKeDnu66OB7q2T3TddOPvTjKe5L4ka5Mc0jevkrwnyW1djauTpJt3RJIrkjyY5J6uxu0k2T/JQ8Ac4KYk3+mmvyTJ5d06NyQ5qW+Z85P8eZJ1SX4I/OIg+zvJ6UmunjTtiW6Ubr2rk1za7dNrkxw+zbqO7rqIjh1gP+yX5MNJ7kjy/SR/meQnu3kXJHlfNzx/4j3QjR/e7fP9Jl7PJO/r1vFPSd4xyPPWLKkqH40/gNuB47vhhfSOSv+wGy/g74CfAn4COAr4PvAqeoH39m75/YF5wB3A7wDPAN4CPAp8tFvXscDmbngOcBPwJ8AB9IL/6G7e6cDVk2o8v289xwH30Ps2sT/wp8CVfW0L+N/AQcChwFZgWTfvQuD36R2sPLHNafZLAUd0w88AxoH/0D3P44AfAC/uq+9B4F9NrHvAfT/Vc+3f7vnAvcBSel2g/xNYM7ktsAzYBCwdcD+8s3s+PwMcCHwe+EzfvL/uhv818B3gor55X+x7PbcBq7r983rgR8BzZvs97WPqh0fo+44vdEfDVwNXAP+5b94fV9V9VfX/gOXAOVV1bVU9VlUXAA8Dr+4ezwD+a1U9WlWfA9ZPs72lwCHA+6vqh1X146q6epq2k70NOK+qbqiqh4EP0TuiX9TX5qyqeqCqvgd8BTiym/4ova6UQ3Zym6+mF3xnVdUjVfVlemF5Wl+bL1bVP1TV41X14wHXO4hLquq6qtpGL9CPnDT/FOAc4ISqum7SvOn2w9uAT1TVxuqdK/kQcGrXrXYFcHSS/YDXAB+n90EF8Avd/AmPAqu613sd8BADnCfR7DDQ9x1vrKqDquqFVfXvuvCesKlv+IXA+7qv8A90HwIL6YXzIcCWquq/o9sd02xvIXBHF1I765D+9XaBdC8wv6/NXX3DP6IXxgAfAAJc13WbvHMntrmpqh7vm3bHpG1uYs+Y7rlMeC9wcVV9cyeW3W4fdsNzgedX1XfonRw/EjiG3gfXnUlezJMD/d5Jr+FU9elpwkAX9L66T9gE/FEX/hOPZ1XVhcA/AfMn+mk7h06zzk3AodOcaJ3pFp930vtgASDJAcBzgS0zPpGqu6rq3VV1CPBvgT8b8Gd/dwILu6PWCYdO2uau3Jr0h8CzJkaS/PQurOMU4I1JztiJZbbbh/Seyzbg7m78CnpdZvOqd17lCnrda88BbtyFGvU0YKBrsk8D70nyqvQckOTEJM8GvkYvFH47yTOSvIle18pUrqP3AXBWt45nJpn4Wn83sCDJvGmWvRB4R5Ijk+xPr3vo2qq6fabik5ySZEE3ej+9EH58B4tMuJbe0ecHuud2LPAGYM0Ay+7ITcBLu+fyTHo/a9xZdwKvBc5I8psDLnMh8DtJDktyIL19eFHf0fYVwArgym788m786qp6bBdq1NOAga7tVNUY8G7gk/QCcZzeiT2q6hHgTd34fcBb6Z1sm2o9j9ELxCOA7wGbu/YAX6Z3YvauJPdMsezfA38A/C96HwqHA6cO+BReCVzb/YplLXBGVW2caaHuub0BOIHeCdk/A369qr414HanW++36Z1U/Ht6vyIatE9/8nq+Ry/UVyZ51wCLnAd8hl5gfxf4MfDv++ZfATybfw70q+l9k7gS7bWyfXeoJGlv5RG6JDXCQJekRhjoktSIgQI9ybIkt3aXYq+cps2vJrm5++3vZ4dbpiRpJjOeFE3vJkTfBl5H75cK64HTqurmvjaLgYuB46rq/iTPq6rv72i9Bx98cC1atGg3y5ekfcv1119/T1WNTDVvkLvrLQXGJ376lWQNcDJwc1+bdwOrq+p+gJnCHGDRokWMjY0NsHlJ0oQk012dPVCXy3y2v+R5M9tfDg3wIuBFSf4hyTVJlk1TyPIkY0nGtm7dOsCmJUmDGtZJ0bnAYnp3ZzsN+HSSgyY3qqpzq2q0qkZHRqb8xiBJ2kWDBPoWejdamrCAJ99TYzOwtrsj23fp9bkvHk6JkqRBDBLo64HF3T0h5tG7BHvtpDZfoPtvt5IcTK8LZsbLrSVJwzNjoHc381kBXAbcQu82nhuSrOr7H10uA+5NcjO9ezK/v6ru3VNFS5KebNbu5TI6Olr+ykWSdk6S66tqdKp5XikqSY0w0CWpEQa6JDVikCtFJe2DFq28dLZLaNbtZ524R9brEbokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjRgo0JMsS3JrkvEkK6eYf3qSrUlu7B7vGn6pkqQdmTtTgyRzgNXA64DNwPoka6vq5klNL6qqFXugRknSAAY5Ql8KjFfVxqp6BFgDnLxny5Ik7axBAn0+sKlvfHM3bbI3J/l6ks8lWTjVipIsTzKWZGzr1q27UK4kaTrDOin618Ciqno58HfABVM1qqpzq2q0qkZHRkaGtGlJEgwW6FuA/iPuBd20J1TVvVX1cDf6F8DPDac8SdKgBgn09cDiJIclmQecCqztb5DkBX2jJwG3DK9ESdIgZvyVS1VtS7ICuAyYA5xXVRuSrALGqmot8NtJTgK2AfcBp+/BmiVJU5gx0AGqah2wbtK0M/uGPwR8aLilSZJ2hleKSlIjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNGCjQkyxLcmuS8SQrd9DuzUkqyejwSpQkDWLGQE8yB1gNnAAsAU5LsmSKds8GzgCuHXaRkqSZDXKEvhQYr6qNVfUIsAY4eYp2fwh8DPjxEOuTJA1okECfD2zqG9/cTXtCklcAC6vq0h2tKMnyJGNJxrZu3brTxUqSprfbJ0WT7Ad8AnjfTG2r6tyqGq2q0ZGRkd3dtCSpzyCBvgVY2De+oJs24dnAy4DLk9wOvBpY64lRSXpqDRLo64HFSQ5LMg84FVg7MbOqHqyqg6tqUVUtAq4BTqqqsT1SsSRpSjMGelVtA1YAlwG3ABdX1YYkq5KctKcLlCQNZu4gjapqHbBu0rQzp2l77O6XJUnaWV4pKkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1YqBAT7Isya1JxpOsnGL+e5J8I8mNSa5OsmT4pUqSdmTGQE8yB1gNnAAsAU6bIrA/W1X/sqqOBD4OfGLolUqSdmiQI/SlwHhVbayqR4A1wMn9Darq//aNHgDU8EqUJA1i7gBt5gOb+sY3A6+a3CjJbwG/C8wDjptqRUmWA8sBDj300J2tVZK0A0M7KVpVq6vqcOCDwIenaXNuVY1W1ejIyMiwNi1JYrBA3wIs7Btf0E2bzhrgjbtTlCRp5w0S6OuBxUkOSzIPOBVY298gyeK+0ROB24ZXoiRpEDP2oVfVtiQrgMuAOcB5VbUhySpgrKrWAiuSHA88CtwPvH1PFi1JerJBTopSVeuAdZOmndk3fMaQ65Ik7SSvFJWkRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktSIubNdwK5YtPLS2S6hWbefdeIeWa+v2Z6zp14z7X0GOkJPsizJrUnGk6ycYv7vJrk5ydeTfCnJC4dfqiRpR2YM9CRzgNXACcAS4LQkSyY1+0dgtKpeDnwO+PiwC5Uk7dggR+hLgfGq2lhVjwBrgJP7G1TVV6rqR93oNcCC4ZYpSZrJIIE+H9jUN765mzad3wD+ZqoZSZYnGUsytnXr1sGrlCTNaKi/cknya8AocPZU86vq3KoararRkZGRYW5akvZ5g/zKZQuwsG98QTdtO0mOB34f+IWqeng45UmSBjXIEfp6YHGSw5LMA04F1vY3SHIUcA5wUlV9f/hlSpJmMmOgV9U2YAVwGXALcHFVbUiyKslJXbOzgQOBv0pyY5K106xOkrSHDHRhUVWtA9ZNmnZm3/DxQ65LkrSTvPRfkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWrEQIGeZFmSW5OMJ1k5xfzXJLkhybYkbxl+mZKkmcwY6EnmAKuBE4AlwGlJlkxq9j3gdOCzwy5QkjSYuQO0WQqMV9VGgCRrgJOBmycaVNXt3bzH90CNkqQBDNLlMh/Y1De+uZu205IsTzKWZGzr1q27sgpJ0jSe0pOiVXVuVY1W1ejIyMhTuWlJat4ggb4FWNg3vqCbJkl6Ghkk0NcDi5MclmQecCqwds+WJUnaWTMGelVtA1YAlwG3ABdX1YYkq5KcBJDklUk2A6cA5yTZsCeLliQ92SC/cqGq1gHrJk07s294Pb2uGEnSLPFKUUlqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQMFepJlSW5NMp5k5RTz909yUTf/2iSLhl2oJGnHZgz0JHOA1cAJwBLgtCRLJjX7DeD+qjoC+BPgY8MuVJK0Y4McoS8FxqtqY1U9AqwBTp7U5mTggm74c8Brk2R4ZUqSZjJ3gDbzgU1945uBV03Xpqq2JXkQeC5wT3+jJMuB5d3oQ0lu3ZWi90IHM2lfPF3F71awF71e4GvW2ZdesxdON2OQQB+aqjoXOPep3ObTQZKxqhqd7To0GF+vvY+vWc8gXS5bgIV94wu6aVO2STIX+Eng3mEUKEkazCCBvh5YnOSwJPOAU4G1k9qsBd7eDb8F+HJV1fDKlCTNZMYul65PfAVwGTAHOK+qNiRZBYxV1VrgvwOfSTIO3Ecv9PXP9rlupr2cr9fex9cMiAfSktQGrxSVpEYY6JLUCANd0tNWktOTfHK269hbGOiS1AgDfTclWZXkvX3jf5TkjCTvT7I+ydeT/Kdu3gFJLk1yU5JvJnnr7FUugCSLktyS5NNJNiT52yQ/keTIJNd0r98lSZ4z27W2oNvf3+wb/70kH0lyeZKPJbkuybeTHDPFsicm+VqSg5Ocn+S/Jflqko1J3tK1SZKzu7+vb0z8jSVZneSkbviSJOd1w+/s/manfB88NXtleAz03Xce8OsASfaj95PNu4DF9O6DcyTwc0leAywD7qyqn62qlwH/Z3ZK1iSLgdVV9VLgAeDNwF8CH6yqlwPfAP7jLNa3r5hbVUuB9zJpfyf5FWAl8PqqmrjE/wXA0cAvA2d1095E72/uZ4HjgbOTvAC4Cpj4kJhP70aDdNOu7Ianeh/sVQz03VRVtwP3JjkK+CXgH4FX9g3fAPwLem+WbwCv645EjqmqB2enak3y3aq6sRu+HjgcOKiqruimXQC8ZlYq27d8vvv3emBR3/TjgA8CJ1bV/X3Tv1BVj1fVzcDzu2lHAxdW1WNVdTdwBb2/x6uAY7o7xd4M3N0F/c8DX+2Wnfw+6K9hr/CU3sulYX8BnA78NL0j9tcCf1xV50xumOQVwOuBjyb5UlWteioL1ZQe7ht+DDhotgrZB2xj+wPJZ/YNT7wOj7F9Nn0H+BngRcDYFO0Bdnh316rakuQget+SrwR+CvhV4KGq+kGS5/Lk94FdLvuoS+i9UV5J74ray4B3JjkQIMn8JM9Lcgjwo6r6H8DZwCtmq2Dt0IPA/X39uP+G3pGedt/dwPOSPDfJ/vS6S2ZyB103WJKXztD2KuCtSeYkGaH3zeq6bt419Lpzruza/V73bzM8Qh+CqnokyVeAB6rqMeBvk7wE+Fp3W/iHgF8DjqDXp/c48Cjwm7NVs2b0duBTSZ4FbATeMcv1NKGqHu1uG3IdvZv6fWvA5b6V5G3AXyV5ww6aXkKvG+UmoIAPVNVd3byrgF+qqvEkd9A7Sm8q0L30fwi6k6E3AKdU1W2zXY+kfZNdLrupO8kyDnzJMJc0mzxCl6RGeIQuSY0w0CWpEQa6JDXCQJekRhjoktSI/w/TIK6YopbnBgAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
    " + ] + }, + "metadata": { + "tags": [], + "needs_background": "light" + } + }, + { + "output_type": "stream", + "text": [ + "/content/data/mini_speech_commands/unknown/dedc7fab_nohash_1.wav\n" + ], + "name": "stdout" + }, + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAEICAYAAABPgw/pAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAUd0lEQVR4nO3df7RdZX3n8feHxGAFV7FytZIEQyE6RseCvUa7BixF7ApSwapUGDsVrWbsNFNsrRqnlnFSO0WZZWdNTSvYYUGdkUAd0XRIh7Yqv6pALhTUgMg1gkkoGH6O6AgEvvPH2ZeeXO7NPUlOuOTJ+7XWWdk/nr339+xz7ufs8+yzd1JVSJL2fvvNdgGSpOEw0CWpEQa6JDXCQJekRhjoktQIA12SGmGga7ckOT/JR7vhY5Lcuovr+VSSPxhudTNu81eSbEryUJKjnsptDypJJTlituvQ3mHubBegPS/J7cDzgceAHwJ/A6yoqoeGuZ2qugp48QD1nA68q6qO7lv2PcOsZUD/hd5++OIwVpbkcuAjwLEAVfWRYaz36SrJ+cDl3eixVXX6rBUjwCP0fckbqupA4BXAKPDhyQ2S7Gsf8C8ENuzKgknmDLkWabcZ6PuYqtpC7wj9ZfDEV/rfSnIbcFs37ZeT3JjkgSRfTfLyieWTHJXkhiQ/SHIR8My+eccm2dw3vjDJ55NsTXJvkk8meQnwKeDnu66OB7q2T3TddOPvTjKe5L4ka5Mc0jevkrwnyW1djauTpJt3RJIrkjyY5J6uxu0k2T/JQ8Ac4KYk3+mmvyTJ5d06NyQ5qW+Z85P8eZJ1SX4I/OIg+zvJ6UmunjTtiW6Ubr2rk1za7dNrkxw+zbqO7rqIjh1gP+yX5MNJ7kjy/SR/meQnu3kXJHlfNzx/4j3QjR/e7fP9Jl7PJO/r1vFPSd4xyPPWLKkqH40/gNuB47vhhfSOSv+wGy/g74CfAn4COAr4PvAqeoH39m75/YF5wB3A7wDPAN4CPAp8tFvXscDmbngOcBPwJ8AB9IL/6G7e6cDVk2o8v289xwH30Ps2sT/wp8CVfW0L+N/AQcChwFZgWTfvQuD36R2sPLHNafZLAUd0w88AxoH/0D3P44AfAC/uq+9B4F9NrHvAfT/Vc+3f7vnAvcBSel2g/xNYM7ktsAzYBCwdcD+8s3s+PwMcCHwe+EzfvL/uhv818B3gor55X+x7PbcBq7r983rgR8BzZvs97WPqh0fo+44vdEfDVwNXAP+5b94fV9V9VfX/gOXAOVV1bVU9VlUXAA8Dr+4ezwD+a1U9WlWfA9ZPs72lwCHA+6vqh1X146q6epq2k70NOK+qbqiqh4EP0TuiX9TX5qyqeqCqvgd8BTiym/4ova6UQ3Zym6+mF3xnVdUjVfVlemF5Wl+bL1bVP1TV41X14wHXO4hLquq6qtpGL9CPnDT/FOAc4ISqum7SvOn2w9uAT1TVxuqdK/kQcGrXrXYFcHSS/YDXAB+n90EF8Avd/AmPAqu613sd8BADnCfR7DDQ9x1vrKqDquqFVfXvuvCesKlv+IXA+7qv8A90HwIL6YXzIcCWquq/o9sd02xvIXBHF1I765D+9XaBdC8wv6/NXX3DP6IXxgAfAAJc13WbvHMntrmpqh7vm3bHpG1uYs+Y7rlMeC9wcVV9cyeW3W4fdsNzgedX1XfonRw/EjiG3gfXnUlezJMD/d5Jr+FU9elpwkAX9L66T9gE/FEX/hOPZ1XVhcA/AfMn+mk7h06zzk3AodOcaJ3pFp930vtgASDJAcBzgS0zPpGqu6rq3VV1CPBvgT8b8Gd/dwILu6PWCYdO2uau3Jr0h8CzJkaS/PQurOMU4I1JztiJZbbbh/Seyzbg7m78CnpdZvOqd17lCnrda88BbtyFGvU0YKBrsk8D70nyqvQckOTEJM8GvkYvFH47yTOSvIle18pUrqP3AXBWt45nJpn4Wn83sCDJvGmWvRB4R5Ijk+xPr3vo2qq6fabik5ySZEE3ej+9EH58B4tMuJbe0ecHuud2LPAGYM0Ay+7ITcBLu+fyTHo/a9xZdwKvBc5I8psDLnMh8DtJDktyIL19eFHf0fYVwArgym788m786qp6bBdq1NOAga7tVNUY8G7gk/QCcZzeiT2q6hHgTd34fcBb6Z1sm2o9j9ELxCOA7wGbu/YAX6Z3YvauJPdMsezfA38A/C96HwqHA6cO+BReCVzb/YplLXBGVW2caaHuub0BOIHeCdk/A369qr414HanW++36Z1U/Ht6vyIatE9/8nq+Ry/UVyZ51wCLnAd8hl5gfxf4MfDv++ZfATybfw70q+l9k7gS7bWyfXeoJGlv5RG6JDXCQJekRhjoktSIgQI9ybIkt3aXYq+cps2vJrm5++3vZ4dbpiRpJjOeFE3vJkTfBl5H75cK64HTqurmvjaLgYuB46rq/iTPq6rv72i9Bx98cC1atGg3y5ekfcv1119/T1WNTDVvkLvrLQXGJ376lWQNcDJwc1+bdwOrq+p+gJnCHGDRokWMjY0NsHlJ0oQk012dPVCXy3y2v+R5M9tfDg3wIuBFSf4hyTVJlk1TyPIkY0nGtm7dOsCmJUmDGtZJ0bnAYnp3ZzsN+HSSgyY3qqpzq2q0qkZHRqb8xiBJ2kWDBPoWejdamrCAJ99TYzOwtrsj23fp9bkvHk6JkqRBDBLo64HF3T0h5tG7BHvtpDZfoPtvt5IcTK8LZsbLrSVJwzNjoHc381kBXAbcQu82nhuSrOr7H10uA+5NcjO9ezK/v6ru3VNFS5KebNbu5TI6Olr+ykWSdk6S66tqdKp5XikqSY0w0CWpEQa6JDVikCtFJe2DFq28dLZLaNbtZ524R9brEbokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjRgo0JMsS3JrkvEkK6eYf3qSrUlu7B7vGn6pkqQdmTtTgyRzgNXA64DNwPoka6vq5klNL6qqFXugRknSAAY5Ql8KjFfVxqp6BFgDnLxny5Ik7axBAn0+sKlvfHM3bbI3J/l6ks8lWTjVipIsTzKWZGzr1q27UK4kaTrDOin618Ciqno58HfABVM1qqpzq2q0qkZHRkaGtGlJEgwW6FuA/iPuBd20J1TVvVX1cDf6F8DPDac8SdKgBgn09cDiJIclmQecCqztb5DkBX2jJwG3DK9ESdIgZvyVS1VtS7ICuAyYA5xXVRuSrALGqmot8NtJTgK2AfcBp+/BmiVJU5gx0AGqah2wbtK0M/uGPwR8aLilSZJ2hleKSlIjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNGCjQkyxLcmuS8SQrd9DuzUkqyejwSpQkDWLGQE8yB1gNnAAsAU5LsmSKds8GzgCuHXaRkqSZDXKEvhQYr6qNVfUIsAY4eYp2fwh8DPjxEOuTJA1okECfD2zqG9/cTXtCklcAC6vq0h2tKMnyJGNJxrZu3brTxUqSprfbJ0WT7Ad8AnjfTG2r6tyqGq2q0ZGRkd3dtCSpzyCBvgVY2De+oJs24dnAy4DLk9wOvBpY64lRSXpqDRLo64HFSQ5LMg84FVg7MbOqHqyqg6tqUVUtAq4BTqqqsT1SsSRpSjMGelVtA1YAlwG3ABdX1YYkq5KctKcLlCQNZu4gjapqHbBu0rQzp2l77O6XJUnaWV4pKkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1YqBAT7Isya1JxpOsnGL+e5J8I8mNSa5OsmT4pUqSdmTGQE8yB1gNnAAsAU6bIrA/W1X/sqqOBD4OfGLolUqSdmiQI/SlwHhVbayqR4A1wMn9Darq//aNHgDU8EqUJA1i7gBt5gOb+sY3A6+a3CjJbwG/C8wDjptqRUmWA8sBDj300J2tVZK0A0M7KVpVq6vqcOCDwIenaXNuVY1W1ejIyMiwNi1JYrBA3wIs7Btf0E2bzhrgjbtTlCRp5w0S6OuBxUkOSzIPOBVY298gyeK+0ROB24ZXoiRpEDP2oVfVtiQrgMuAOcB5VbUhySpgrKrWAiuSHA88CtwPvH1PFi1JerJBTopSVeuAdZOmndk3fMaQ65Ik7SSvFJWkRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktSIubNdwK5YtPLS2S6hWbefdeIeWa+v2Z6zp14z7X0GOkJPsizJrUnGk6ycYv7vJrk5ydeTfCnJC4dfqiRpR2YM9CRzgNXACcAS4LQkSyY1+0dgtKpeDnwO+PiwC5Uk7dggR+hLgfGq2lhVjwBrgJP7G1TVV6rqR93oNcCC4ZYpSZrJIIE+H9jUN765mzad3wD+ZqoZSZYnGUsytnXr1sGrlCTNaKi/cknya8AocPZU86vq3KoararRkZGRYW5akvZ5g/zKZQuwsG98QTdtO0mOB34f+IWqeng45UmSBjXIEfp6YHGSw5LMA04F1vY3SHIUcA5wUlV9f/hlSpJmMmOgV9U2YAVwGXALcHFVbUiyKslJXbOzgQOBv0pyY5K106xOkrSHDHRhUVWtA9ZNmnZm3/DxQ65LkrSTvPRfkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWrEQIGeZFmSW5OMJ1k5xfzXJLkhybYkbxl+mZKkmcwY6EnmAKuBE4AlwGlJlkxq9j3gdOCzwy5QkjSYuQO0WQqMV9VGgCRrgJOBmycaVNXt3bzH90CNkqQBDNLlMh/Y1De+uZu205IsTzKWZGzr1q27sgpJ0jSe0pOiVXVuVY1W1ejIyMhTuWlJat4ggb4FWNg3vqCbJkl6Ghkk0NcDi5MclmQecCqwds+WJUnaWTMGelVtA1YAlwG3ABdX1YYkq5KcBJDklUk2A6cA5yTZsCeLliQ92SC/cqGq1gHrJk07s294Pb2uGEnSLPFKUUlqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQMFepJlSW5NMp5k5RTz909yUTf/2iSLhl2oJGnHZgz0JHOA1cAJwBLgtCRLJjX7DeD+qjoC+BPgY8MuVJK0Y4McoS8FxqtqY1U9AqwBTp7U5mTggm74c8Brk2R4ZUqSZjJ3gDbzgU1945uBV03Xpqq2JXkQeC5wT3+jJMuB5d3oQ0lu3ZWi90IHM2lfPF3F71awF71e4GvW2ZdesxdON2OQQB+aqjoXOPep3ObTQZKxqhqd7To0GF+vvY+vWc8gXS5bgIV94wu6aVO2STIX+Eng3mEUKEkazCCBvh5YnOSwJPOAU4G1k9qsBd7eDb8F+HJV1fDKlCTNZMYul65PfAVwGTAHOK+qNiRZBYxV1VrgvwOfSTIO3Ecv9PXP9rlupr2cr9fex9cMiAfSktQGrxSVpEYY6JLUCANd0tNWktOTfHK269hbGOiS1AgDfTclWZXkvX3jf5TkjCTvT7I+ydeT/Kdu3gFJLk1yU5JvJnnr7FUugCSLktyS5NNJNiT52yQ/keTIJNd0r98lSZ4z27W2oNvf3+wb/70kH0lyeZKPJbkuybeTHDPFsicm+VqSg5Ocn+S/Jflqko1J3tK1SZKzu7+vb0z8jSVZneSkbviSJOd1w+/s/manfB88NXtleAz03Xce8OsASfaj95PNu4DF9O6DcyTwc0leAywD7qyqn62qlwH/Z3ZK1iSLgdVV9VLgAeDNwF8CH6yqlwPfAP7jLNa3r5hbVUuB9zJpfyf5FWAl8PqqmrjE/wXA0cAvA2d1095E72/uZ4HjgbOTvAC4Cpj4kJhP70aDdNOu7Ianeh/sVQz03VRVtwP3JjkK+CXgH4FX9g3fAPwLem+WbwCv645EjqmqB2enak3y3aq6sRu+HjgcOKiqruimXQC8ZlYq27d8vvv3emBR3/TjgA8CJ1bV/X3Tv1BVj1fVzcDzu2lHAxdW1WNVdTdwBb2/x6uAY7o7xd4M3N0F/c8DX+2Wnfw+6K9hr/CU3sulYX8BnA78NL0j9tcCf1xV50xumOQVwOuBjyb5UlWteioL1ZQe7ht+DDhotgrZB2xj+wPJZ/YNT7wOj7F9Nn0H+BngRcDYFO0Bdnh316rakuQget+SrwR+CvhV4KGq+kGS5/Lk94FdLvuoS+i9UV5J74ray4B3JjkQIMn8JM9Lcgjwo6r6H8DZwCtmq2Dt0IPA/X39uP+G3pGedt/dwPOSPDfJ/vS6S2ZyB103WJKXztD2KuCtSeYkGaH3zeq6bt419Lpzruza/V73bzM8Qh+CqnokyVeAB6rqMeBvk7wE+Fp3W/iHgF8DjqDXp/c48Cjwm7NVs2b0duBTSZ4FbATeMcv1NKGqHu1uG3IdvZv6fWvA5b6V5G3AXyV5ww6aXkKvG+UmoIAPVNVd3byrgF+qqvEkd9A7Sm8q0L30fwi6k6E3AKdU1W2zXY+kfZNdLrupO8kyDnzJMJc0mzxCl6RGeIQuSY0w0CWpEQa6JDXCQJekRhjoktSI/w/TIK6YopbnBgAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
    " + ] + }, + "metadata": { + "tags": [], + "needs_background": "light" + } + }, + { + "output_type": "stream", + "text": [ + "/content/data/mini_speech_commands/unknown/a527cb3c_nohash_0.wav\n" + ], + "name": "stdout" + }, + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAEICAYAAABPgw/pAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAUd0lEQVR4nO3df7RdZX3n8feHxGAFV7FytZIEQyE6RseCvUa7BixF7ApSwapUGDsVrWbsNFNsrRqnlnFSO0WZZWdNTSvYYUGdkUAd0XRIh7Yqv6pALhTUgMg1gkkoGH6O6AgEvvPH2ZeeXO7NPUlOuOTJ+7XWWdk/nr339+xz7ufs8+yzd1JVSJL2fvvNdgGSpOEw0CWpEQa6JDXCQJekRhjoktQIA12SGmGga7ckOT/JR7vhY5Lcuovr+VSSPxhudTNu81eSbEryUJKjnsptDypJJTlituvQ3mHubBegPS/J7cDzgceAHwJ/A6yoqoeGuZ2qugp48QD1nA68q6qO7lv2PcOsZUD/hd5++OIwVpbkcuAjwLEAVfWRYaz36SrJ+cDl3eixVXX6rBUjwCP0fckbqupA4BXAKPDhyQ2S7Gsf8C8ENuzKgknmDLkWabcZ6PuYqtpC7wj9ZfDEV/rfSnIbcFs37ZeT3JjkgSRfTfLyieWTHJXkhiQ/SHIR8My+eccm2dw3vjDJ55NsTXJvkk8meQnwKeDnu66OB7q2T3TddOPvTjKe5L4ka5Mc0jevkrwnyW1djauTpJt3RJIrkjyY5J6uxu0k2T/JQ8Ac4KYk3+mmvyTJ5d06NyQ5qW+Z85P8eZJ1SX4I/OIg+zvJ6UmunjTtiW6Ubr2rk1za7dNrkxw+zbqO7rqIjh1gP+yX5MNJ7kjy/SR/meQnu3kXJHlfNzx/4j3QjR/e7fP9Jl7PJO/r1vFPSd4xyPPWLKkqH40/gNuB47vhhfSOSv+wGy/g74CfAn4COAr4PvAqeoH39m75/YF5wB3A7wDPAN4CPAp8tFvXscDmbngOcBPwJ8AB9IL/6G7e6cDVk2o8v289xwH30Ps2sT/wp8CVfW0L+N/AQcChwFZgWTfvQuD36R2sPLHNafZLAUd0w88AxoH/0D3P44AfAC/uq+9B4F9NrHvAfT/Vc+3f7vnAvcBSel2g/xNYM7ktsAzYBCwdcD+8s3s+PwMcCHwe+EzfvL/uhv818B3gor55X+x7PbcBq7r983rgR8BzZvs97WPqh0fo+44vdEfDVwNXAP+5b94fV9V9VfX/gOXAOVV1bVU9VlUXAA8Dr+4ezwD+a1U9WlWfA9ZPs72lwCHA+6vqh1X146q6epq2k70NOK+qbqiqh4EP0TuiX9TX5qyqeqCqvgd8BTiym/4ova6UQ3Zym6+mF3xnVdUjVfVlemF5Wl+bL1bVP1TV41X14wHXO4hLquq6qtpGL9CPnDT/FOAc4ISqum7SvOn2w9uAT1TVxuqdK/kQcGrXrXYFcHSS/YDXAB+n90EF8Avd/AmPAqu613sd8BADnCfR7DDQ9x1vrKqDquqFVfXvuvCesKlv+IXA+7qv8A90HwIL6YXzIcCWquq/o9sd02xvIXBHF1I765D+9XaBdC8wv6/NXX3DP6IXxgAfAAJc13WbvHMntrmpqh7vm3bHpG1uYs+Y7rlMeC9wcVV9cyeW3W4fdsNzgedX1XfonRw/EjiG3gfXnUlezJMD/d5Jr+FU9elpwkAX9L66T9gE/FEX/hOPZ1XVhcA/AfMn+mk7h06zzk3AodOcaJ3pFp930vtgASDJAcBzgS0zPpGqu6rq3VV1CPBvgT8b8Gd/dwILu6PWCYdO2uau3Jr0h8CzJkaS/PQurOMU4I1JztiJZbbbh/Seyzbg7m78CnpdZvOqd17lCnrda88BbtyFGvU0YKBrsk8D70nyqvQckOTEJM8GvkYvFH47yTOSvIle18pUrqP3AXBWt45nJpn4Wn83sCDJvGmWvRB4R5Ijk+xPr3vo2qq6fabik5ySZEE3ej+9EH58B4tMuJbe0ecHuud2LPAGYM0Ay+7ITcBLu+fyTHo/a9xZdwKvBc5I8psDLnMh8DtJDktyIL19eFHf0fYVwArgym788m786qp6bBdq1NOAga7tVNUY8G7gk/QCcZzeiT2q6hHgTd34fcBb6Z1sm2o9j9ELxCOA7wGbu/YAX6Z3YvauJPdMsezfA38A/C96HwqHA6cO+BReCVzb/YplLXBGVW2caaHuub0BOIHeCdk/A369qr414HanW++36Z1U/Ht6vyIatE9/8nq+Ry/UVyZ51wCLnAd8hl5gfxf4MfDv++ZfATybfw70q+l9k7gS7bWyfXeoJGlv5RG6JDXCQJekRhjoktSIgQI9ybIkt3aXYq+cps2vJrm5++3vZ4dbpiRpJjOeFE3vJkTfBl5H75cK64HTqurmvjaLgYuB46rq/iTPq6rv72i9Bx98cC1atGg3y5ekfcv1119/T1WNTDVvkLvrLQXGJ376lWQNcDJwc1+bdwOrq+p+gJnCHGDRokWMjY0NsHlJ0oQk012dPVCXy3y2v+R5M9tfDg3wIuBFSf4hyTVJlk1TyPIkY0nGtm7dOsCmJUmDGtZJ0bnAYnp3ZzsN+HSSgyY3qqpzq2q0qkZHRqb8xiBJ2kWDBPoWejdamrCAJ99TYzOwtrsj23fp9bkvHk6JkqRBDBLo64HF3T0h5tG7BHvtpDZfoPtvt5IcTK8LZsbLrSVJwzNjoHc381kBXAbcQu82nhuSrOr7H10uA+5NcjO9ezK/v6ru3VNFS5KebNbu5TI6Olr+ykWSdk6S66tqdKp5XikqSY0w0CWpEQa6JDVikCtFJe2DFq28dLZLaNbtZ524R9brEbokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjRgo0JMsS3JrkvEkK6eYf3qSrUlu7B7vGn6pkqQdmTtTgyRzgNXA64DNwPoka6vq5klNL6qqFXugRknSAAY5Ql8KjFfVxqp6BFgDnLxny5Ik7axBAn0+sKlvfHM3bbI3J/l6ks8lWTjVipIsTzKWZGzr1q27UK4kaTrDOin618Ciqno58HfABVM1qqpzq2q0qkZHRkaGtGlJEgwW6FuA/iPuBd20J1TVvVX1cDf6F8DPDac8SdKgBgn09cDiJIclmQecCqztb5DkBX2jJwG3DK9ESdIgZvyVS1VtS7ICuAyYA5xXVRuSrALGqmot8NtJTgK2AfcBp+/BmiVJU5gx0AGqah2wbtK0M/uGPwR8aLilSZJ2hleKSlIjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNGCjQkyxLcmuS8SQrd9DuzUkqyejwSpQkDWLGQE8yB1gNnAAsAU5LsmSKds8GzgCuHXaRkqSZDXKEvhQYr6qNVfUIsAY4eYp2fwh8DPjxEOuTJA1okECfD2zqG9/cTXtCklcAC6vq0h2tKMnyJGNJxrZu3brTxUqSprfbJ0WT7Ad8AnjfTG2r6tyqGq2q0ZGRkd3dtCSpzyCBvgVY2De+oJs24dnAy4DLk9wOvBpY64lRSXpqDRLo64HFSQ5LMg84FVg7MbOqHqyqg6tqUVUtAq4BTqqqsT1SsSRpSjMGelVtA1YAlwG3ABdX1YYkq5KctKcLlCQNZu4gjapqHbBu0rQzp2l77O6XJUnaWV4pKkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1YqBAT7Isya1JxpOsnGL+e5J8I8mNSa5OsmT4pUqSdmTGQE8yB1gNnAAsAU6bIrA/W1X/sqqOBD4OfGLolUqSdmiQI/SlwHhVbayqR4A1wMn9Darq//aNHgDU8EqUJA1i7gBt5gOb+sY3A6+a3CjJbwG/C8wDjptqRUmWA8sBDj300J2tVZK0A0M7KVpVq6vqcOCDwIenaXNuVY1W1ejIyMiwNi1JYrBA3wIs7Btf0E2bzhrgjbtTlCRp5w0S6OuBxUkOSzIPOBVY298gyeK+0ROB24ZXoiRpEDP2oVfVtiQrgMuAOcB5VbUhySpgrKrWAiuSHA88CtwPvH1PFi1JerJBTopSVeuAdZOmndk3fMaQ65Ik7SSvFJWkRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktSIubNdwK5YtPLS2S6hWbefdeIeWa+v2Z6zp14z7X0GOkJPsizJrUnGk6ycYv7vJrk5ydeTfCnJC4dfqiRpR2YM9CRzgNXACcAS4LQkSyY1+0dgtKpeDnwO+PiwC5Uk7dggR+hLgfGq2lhVjwBrgJP7G1TVV6rqR93oNcCC4ZYpSZrJIIE+H9jUN765mzad3wD+ZqoZSZYnGUsytnXr1sGrlCTNaKi/cknya8AocPZU86vq3KoararRkZGRYW5akvZ5g/zKZQuwsG98QTdtO0mOB34f+IWqeng45UmSBjXIEfp6YHGSw5LMA04F1vY3SHIUcA5wUlV9f/hlSpJmMmOgV9U2YAVwGXALcHFVbUiyKslJXbOzgQOBv0pyY5K106xOkrSHDHRhUVWtA9ZNmnZm3/DxQ65LkrSTvPRfkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWrEQIGeZFmSW5OMJ1k5xfzXJLkhybYkbxl+mZKkmcwY6EnmAKuBE4AlwGlJlkxq9j3gdOCzwy5QkjSYuQO0WQqMV9VGgCRrgJOBmycaVNXt3bzH90CNkqQBDNLlMh/Y1De+uZu205IsTzKWZGzr1q27sgpJ0jSe0pOiVXVuVY1W1ejIyMhTuWlJat4ggb4FWNg3vqCbJkl6Ghkk0NcDi5MclmQecCqwds+WJUnaWTMGelVtA1YAlwG3ABdX1YYkq5KcBJDklUk2A6cA5yTZsCeLliQ92SC/cqGq1gHrJk07s294Pb2uGEnSLPFKUUlqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQMFepJlSW5NMp5k5RTz909yUTf/2iSLhl2oJGnHZgz0JHOA1cAJwBLgtCRLJjX7DeD+qjoC+BPgY8MuVJK0Y4McoS8FxqtqY1U9AqwBTp7U5mTggm74c8Brk2R4ZUqSZjJ3gDbzgU1945uBV03Xpqq2JXkQeC5wT3+jJMuB5d3oQ0lu3ZWi90IHM2lfPF3F71awF71e4GvW2ZdesxdON2OQQB+aqjoXOPep3ObTQZKxqhqd7To0GF+vvY+vWc8gXS5bgIV94wu6aVO2STIX+Eng3mEUKEkazCCBvh5YnOSwJPOAU4G1k9qsBd7eDb8F+HJV1fDKlCTNZMYul65PfAVwGTAHOK+qNiRZBYxV1VrgvwOfSTIO3Ecv9PXP9rlupr2cr9fex9cMiAfSktQGrxSVpEYY6JLUCANd0tNWktOTfHK269hbGOiS1AgDfTclWZXkvX3jf5TkjCTvT7I+ydeT/Kdu3gFJLk1yU5JvJnnr7FUugCSLktyS5NNJNiT52yQ/keTIJNd0r98lSZ4z27W2oNvf3+wb/70kH0lyeZKPJbkuybeTHDPFsicm+VqSg5Ocn+S/Jflqko1J3tK1SZKzu7+vb0z8jSVZneSkbviSJOd1w+/s/manfB88NXtleAz03Xce8OsASfaj95PNu4DF9O6DcyTwc0leAywD7qyqn62qlwH/Z3ZK1iSLgdVV9VLgAeDNwF8CH6yqlwPfAP7jLNa3r5hbVUuB9zJpfyf5FWAl8PqqmrjE/wXA0cAvA2d1095E72/uZ4HjgbOTvAC4Cpj4kJhP70aDdNOu7Ianeh/sVQz03VRVtwP3JjkK+CXgH4FX9g3fAPwLem+WbwCv645EjqmqB2enak3y3aq6sRu+HjgcOKiqruimXQC8ZlYq27d8vvv3emBR3/TjgA8CJ1bV/X3Tv1BVj1fVzcDzu2lHAxdW1WNVdTdwBb2/x6uAY7o7xd4M3N0F/c8DX+2Wnfw+6K9hr/CU3sulYX8BnA78NL0j9tcCf1xV50xumOQVwOuBjyb5UlWteioL1ZQe7ht+DDhotgrZB2xj+wPJZ/YNT7wOj7F9Nn0H+BngRcDYFO0Bdnh316rakuQget+SrwR+CvhV4KGq+kGS5/Lk94FdLvuoS+i9UV5J74ray4B3JjkQIMn8JM9Lcgjwo6r6H8DZwCtmq2Dt0IPA/X39uP+G3pGedt/dwPOSPDfJ/vS6S2ZyB103WJKXztD2KuCtSeYkGaH3zeq6bt419Lpzruza/V73bzM8Qh+CqnokyVeAB6rqMeBvk7wE+Fp3W/iHgF8DjqDXp/c48Cjwm7NVs2b0duBTSZ4FbATeMcv1NKGqHu1uG3IdvZv6fWvA5b6V5G3AXyV5ww6aXkKvG+UmoIAPVNVd3byrgF+qqvEkd9A7Sm8q0L30fwi6k6E3AKdU1W2zXY+kfZNdLrupO8kyDnzJMJc0mzxCl6RGeIQuSY0w0CWpEQa6JDXCQJekRhjoktSI/w/TIK6YopbnBgAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
    " + ] + }, + "metadata": { + "tags": [], + "needs_background": "light" + } + }, + { + "output_type": "stream", + "text": [ + "/content/data/mini_speech_commands/unknown/26b28ea7_nohash_0.wav\n" + ], + "name": "stdout" + }, + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAEICAYAAABPgw/pAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAUd0lEQVR4nO3df7RdZX3n8feHxGAFV7FytZIEQyE6RseCvUa7BixF7ApSwapUGDsVrWbsNFNsrRqnlnFSO0WZZWdNTSvYYUGdkUAd0XRIh7Yqv6pALhTUgMg1gkkoGH6O6AgEvvPH2ZeeXO7NPUlOuOTJ+7XWWdk/nr339+xz7ufs8+yzd1JVSJL2fvvNdgGSpOEw0CWpEQa6JDXCQJekRhjoktQIA12SGmGga7ckOT/JR7vhY5Lcuovr+VSSPxhudTNu81eSbEryUJKjnsptDypJJTlituvQ3mHubBegPS/J7cDzgceAHwJ/A6yoqoeGuZ2qugp48QD1nA68q6qO7lv2PcOsZUD/hd5++OIwVpbkcuAjwLEAVfWRYaz36SrJ+cDl3eixVXX6rBUjwCP0fckbqupA4BXAKPDhyQ2S7Gsf8C8ENuzKgknmDLkWabcZ6PuYqtpC7wj9ZfDEV/rfSnIbcFs37ZeT3JjkgSRfTfLyieWTHJXkhiQ/SHIR8My+eccm2dw3vjDJ55NsTXJvkk8meQnwKeDnu66OB7q2T3TddOPvTjKe5L4ka5Mc0jevkrwnyW1djauTpJt3RJIrkjyY5J6uxu0k2T/JQ8Ac4KYk3+mmvyTJ5d06NyQ5qW+Z85P8eZJ1SX4I/OIg+zvJ6UmunjTtiW6Ubr2rk1za7dNrkxw+zbqO7rqIjh1gP+yX5MNJ7kjy/SR/meQnu3kXJHlfNzx/4j3QjR/e7fP9Jl7PJO/r1vFPSd4xyPPWLKkqH40/gNuB47vhhfSOSv+wGy/g74CfAn4COAr4PvAqeoH39m75/YF5wB3A7wDPAN4CPAp8tFvXscDmbngOcBPwJ8AB9IL/6G7e6cDVk2o8v289xwH30Ps2sT/wp8CVfW0L+N/AQcChwFZgWTfvQuD36R2sPLHNafZLAUd0w88AxoH/0D3P44AfAC/uq+9B4F9NrHvAfT/Vc+3f7vnAvcBSel2g/xNYM7ktsAzYBCwdcD+8s3s+PwMcCHwe+EzfvL/uhv818B3gor55X+x7PbcBq7r983rgR8BzZvs97WPqh0fo+44vdEfDVwNXAP+5b94fV9V9VfX/gOXAOVV1bVU9VlUXAA8Dr+4ezwD+a1U9WlWfA9ZPs72lwCHA+6vqh1X146q6epq2k70NOK+qbqiqh4EP0TuiX9TX5qyqeqCqvgd8BTiym/4ova6UQ3Zym6+mF3xnVdUjVfVlemF5Wl+bL1bVP1TV41X14wHXO4hLquq6qtpGL9CPnDT/FOAc4ISqum7SvOn2w9uAT1TVxuqdK/kQcGrXrXYFcHSS/YDXAB+n90EF8Avd/AmPAqu613sd8BADnCfR7DDQ9x1vrKqDquqFVfXvuvCesKlv+IXA+7qv8A90HwIL6YXzIcCWquq/o9sd02xvIXBHF1I765D+9XaBdC8wv6/NXX3DP6IXxgAfAAJc13WbvHMntrmpqh7vm3bHpG1uYs+Y7rlMeC9wcVV9cyeW3W4fdsNzgedX1XfonRw/EjiG3gfXnUlezJMD/d5Jr+FU9elpwkAX9L66T9gE/FEX/hOPZ1XVhcA/AfMn+mk7h06zzk3AodOcaJ3pFp930vtgASDJAcBzgS0zPpGqu6rq3VV1CPBvgT8b8Gd/dwILu6PWCYdO2uau3Jr0h8CzJkaS/PQurOMU4I1JztiJZbbbh/Seyzbg7m78CnpdZvOqd17lCnrda88BbtyFGvU0YKBrsk8D70nyqvQckOTEJM8GvkYvFH47yTOSvIle18pUrqP3AXBWt45nJpn4Wn83sCDJvGmWvRB4R5Ijk+xPr3vo2qq6fabik5ySZEE3ej+9EH58B4tMuJbe0ecHuud2LPAGYM0Ay+7ITcBLu+fyTHo/a9xZdwKvBc5I8psDLnMh8DtJDktyIL19eFHf0fYVwArgym788m786qp6bBdq1NOAga7tVNUY8G7gk/QCcZzeiT2q6hHgTd34fcBb6Z1sm2o9j9ELxCOA7wGbu/YAX6Z3YvauJPdMsezfA38A/C96HwqHA6cO+BReCVzb/YplLXBGVW2caaHuub0BOIHeCdk/A369qr414HanW++36Z1U/Ht6vyIatE9/8nq+Ry/UVyZ51wCLnAd8hl5gfxf4MfDv++ZfATybfw70q+l9k7gS7bWyfXeoJGlv5RG6JDXCQJekRhjoktSIgQI9ybIkt3aXYq+cps2vJrm5++3vZ4dbpiRpJjOeFE3vJkTfBl5H75cK64HTqurmvjaLgYuB46rq/iTPq6rv72i9Bx98cC1atGg3y5ekfcv1119/T1WNTDVvkLvrLQXGJ376lWQNcDJwc1+bdwOrq+p+gJnCHGDRokWMjY0NsHlJ0oQk012dPVCXy3y2v+R5M9tfDg3wIuBFSf4hyTVJlk1TyPIkY0nGtm7dOsCmJUmDGtZJ0bnAYnp3ZzsN+HSSgyY3qqpzq2q0qkZHRqb8xiBJ2kWDBPoWejdamrCAJ99TYzOwtrsj23fp9bkvHk6JkqRBDBLo64HF3T0h5tG7BHvtpDZfoPtvt5IcTK8LZsbLrSVJwzNjoHc381kBXAbcQu82nhuSrOr7H10uA+5NcjO9ezK/v6ru3VNFS5KebNbu5TI6Olr+ykWSdk6S66tqdKp5XikqSY0w0CWpEQa6JDVikCtFJe2DFq28dLZLaNbtZ524R9brEbokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjRgo0JMsS3JrkvEkK6eYf3qSrUlu7B7vGn6pkqQdmTtTgyRzgNXA64DNwPoka6vq5klNL6qqFXugRknSAAY5Ql8KjFfVxqp6BFgDnLxny5Ik7axBAn0+sKlvfHM3bbI3J/l6ks8lWTjVipIsTzKWZGzr1q27UK4kaTrDOin618Ciqno58HfABVM1qqpzq2q0qkZHRkaGtGlJEgwW6FuA/iPuBd20J1TVvVX1cDf6F8DPDac8SdKgBgn09cDiJIclmQecCqztb5DkBX2jJwG3DK9ESdIgZvyVS1VtS7ICuAyYA5xXVRuSrALGqmot8NtJTgK2AfcBp+/BmiVJU5gx0AGqah2wbtK0M/uGPwR8aLilSZJ2hleKSlIjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNGCjQkyxLcmuS8SQrd9DuzUkqyejwSpQkDWLGQE8yB1gNnAAsAU5LsmSKds8GzgCuHXaRkqSZDXKEvhQYr6qNVfUIsAY4eYp2fwh8DPjxEOuTJA1okECfD2zqG9/cTXtCklcAC6vq0h2tKMnyJGNJxrZu3brTxUqSprfbJ0WT7Ad8AnjfTG2r6tyqGq2q0ZGRkd3dtCSpzyCBvgVY2De+oJs24dnAy4DLk9wOvBpY64lRSXpqDRLo64HFSQ5LMg84FVg7MbOqHqyqg6tqUVUtAq4BTqqqsT1SsSRpSjMGelVtA1YAlwG3ABdX1YYkq5KctKcLlCQNZu4gjapqHbBu0rQzp2l77O6XJUnaWV4pKkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1YqBAT7Isya1JxpOsnGL+e5J8I8mNSa5OsmT4pUqSdmTGQE8yB1gNnAAsAU6bIrA/W1X/sqqOBD4OfGLolUqSdmiQI/SlwHhVbayqR4A1wMn9Darq//aNHgDU8EqUJA1i7gBt5gOb+sY3A6+a3CjJbwG/C8wDjptqRUmWA8sBDj300J2tVZK0A0M7KVpVq6vqcOCDwIenaXNuVY1W1ejIyMiwNi1JYrBA3wIs7Btf0E2bzhrgjbtTlCRp5w0S6OuBxUkOSzIPOBVY298gyeK+0ROB24ZXoiRpEDP2oVfVtiQrgMuAOcB5VbUhySpgrKrWAiuSHA88CtwPvH1PFi1JerJBTopSVeuAdZOmndk3fMaQ65Ik7SSvFJWkRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktSIubNdwK5YtPLS2S6hWbefdeIeWa+v2Z6zp14z7X0GOkJPsizJrUnGk6ycYv7vJrk5ydeTfCnJC4dfqiRpR2YM9CRzgNXACcAS4LQkSyY1+0dgtKpeDnwO+PiwC5Uk7dggR+hLgfGq2lhVjwBrgJP7G1TVV6rqR93oNcCC4ZYpSZrJIIE+H9jUN765mzad3wD+ZqoZSZYnGUsytnXr1sGrlCTNaKi/cknya8AocPZU86vq3KoararRkZGRYW5akvZ5g/zKZQuwsG98QTdtO0mOB34f+IWqeng45UmSBjXIEfp6YHGSw5LMA04F1vY3SHIUcA5wUlV9f/hlSpJmMmOgV9U2YAVwGXALcHFVbUiyKslJXbOzgQOBv0pyY5K106xOkrSHDHRhUVWtA9ZNmnZm3/DxQ65LkrSTvPRfkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWrEQIGeZFmSW5OMJ1k5xfzXJLkhybYkbxl+mZKkmcwY6EnmAKuBE4AlwGlJlkxq9j3gdOCzwy5QkjSYuQO0WQqMV9VGgCRrgJOBmycaVNXt3bzH90CNkqQBDNLlMh/Y1De+uZu205IsTzKWZGzr1q27sgpJ0jSe0pOiVXVuVY1W1ejIyMhTuWlJat4ggb4FWNg3vqCbJkl6Ghkk0NcDi5MclmQecCqwds+WJUnaWTMGelVtA1YAlwG3ABdX1YYkq5KcBJDklUk2A6cA5yTZsCeLliQ92SC/cqGq1gHrJk07s294Pb2uGEnSLPFKUUlqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQMFepJlSW5NMp5k5RTz909yUTf/2iSLhl2oJGnHZgz0JHOA1cAJwBLgtCRLJjX7DeD+qjoC+BPgY8MuVJK0Y4McoS8FxqtqY1U9AqwBTp7U5mTggm74c8Brk2R4ZUqSZjJ3gDbzgU1945uBV03Xpqq2JXkQeC5wT3+jJMuB5d3oQ0lu3ZWi90IHM2lfPF3F71awF71e4GvW2ZdesxdON2OQQB+aqjoXOPep3ObTQZKxqhqd7To0GF+vvY+vWc8gXS5bgIV94wu6aVO2STIX+Eng3mEUKEkazCCBvh5YnOSwJPOAU4G1k9qsBd7eDb8F+HJV1fDKlCTNZMYul65PfAVwGTAHOK+qNiRZBYxV1VrgvwOfSTIO3Ecv9PXP9rlupr2cr9fex9cMiAfSktQGrxSVpEYY6JLUCANd0tNWktOTfHK269hbGOiS1AgDfTclWZXkvX3jf5TkjCTvT7I+ydeT/Kdu3gFJLk1yU5JvJnnr7FUugCSLktyS5NNJNiT52yQ/keTIJNd0r98lSZ4z27W2oNvf3+wb/70kH0lyeZKPJbkuybeTHDPFsicm+VqSg5Ocn+S/Jflqko1J3tK1SZKzu7+vb0z8jSVZneSkbviSJOd1w+/s/manfB88NXtleAz03Xce8OsASfaj95PNu4DF9O6DcyTwc0leAywD7qyqn62qlwH/Z3ZK1iSLgdVV9VLgAeDNwF8CH6yqlwPfAP7jLNa3r5hbVUuB9zJpfyf5FWAl8PqqmrjE/wXA0cAvA2d1095E72/uZ4HjgbOTvAC4Cpj4kJhP70aDdNOu7Ianeh/sVQz03VRVtwP3JjkK+CXgH4FX9g3fAPwLem+WbwCv645EjqmqB2enak3y3aq6sRu+HjgcOKiqruimXQC8ZlYq27d8vvv3emBR3/TjgA8CJ1bV/X3Tv1BVj1fVzcDzu2lHAxdW1WNVdTdwBb2/x6uAY7o7xd4M3N0F/c8DX+2Wnfw+6K9hr/CU3sulYX8BnA78NL0j9tcCf1xV50xumOQVwOuBjyb5UlWteioL1ZQe7ht+DDhotgrZB2xj+wPJZ/YNT7wOj7F9Nn0H+BngRcDYFO0Bdnh316rakuQget+SrwR+CvhV4KGq+kGS5/Lk94FdLvuoS+i9UV5J74ray4B3JjkQIMn8JM9Lcgjwo6r6H8DZwCtmq2Dt0IPA/X39uP+G3pGedt/dwPOSPDfJ/vS6S2ZyB103WJKXztD2KuCtSeYkGaH3zeq6bt419Lpzruza/V73bzM8Qh+CqnokyVeAB6rqMeBvk7wE+Fp3W/iHgF8DjqDXp/c48Cjwm7NVs2b0duBTSZ4FbATeMcv1NKGqHu1uG3IdvZv6fWvA5b6V5G3AXyV5ww6aXkKvG+UmoIAPVNVd3byrgF+qqvEkd9A7Sm8q0L30fwi6k6E3AKdU1W2zXY+kfZNdLrupO8kyDnzJMJc0mzxCl6RGeIQuSY0w0CWpEQa6JDXCQJekRhjoktSI/w/TIK6YopbnBgAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
    " + ] + }, + "metadata": { + "tags": [], + "needs_background": "light" + } + }, + { + "output_type": "stream", + "text": [ + "/content/data/mini_speech_commands/unknown/d84829e0_nohash_0.wav\n" + ], + "name": "stdout" + }, + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAEICAYAAABPgw/pAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAUd0lEQVR4nO3df7RdZX3n8feHxGAFV7FytZIEQyE6RseCvUa7BixF7ApSwapUGDsVrWbsNFNsrRqnlnFSO0WZZWdNTSvYYUGdkUAd0XRIh7Yqv6pALhTUgMg1gkkoGH6O6AgEvvPH2ZeeXO7NPUlOuOTJ+7XWWdk/nr339+xz7ufs8+yzd1JVSJL2fvvNdgGSpOEw0CWpEQa6JDXCQJekRhjoktQIA12SGmGga7ckOT/JR7vhY5Lcuovr+VSSPxhudTNu81eSbEryUJKjnsptDypJJTlituvQ3mHubBegPS/J7cDzgceAHwJ/A6yoqoeGuZ2qugp48QD1nA68q6qO7lv2PcOsZUD/hd5++OIwVpbkcuAjwLEAVfWRYaz36SrJ+cDl3eixVXX6rBUjwCP0fckbqupA4BXAKPDhyQ2S7Gsf8C8ENuzKgknmDLkWabcZ6PuYqtpC7wj9ZfDEV/rfSnIbcFs37ZeT3JjkgSRfTfLyieWTHJXkhiQ/SHIR8My+eccm2dw3vjDJ55NsTXJvkk8meQnwKeDnu66OB7q2T3TddOPvTjKe5L4ka5Mc0jevkrwnyW1djauTpJt3RJIrkjyY5J6uxu0k2T/JQ8Ac4KYk3+mmvyTJ5d06NyQ5qW+Z85P8eZJ1SX4I/OIg+zvJ6UmunjTtiW6Ubr2rk1za7dNrkxw+zbqO7rqIjh1gP+yX5MNJ7kjy/SR/meQnu3kXJHlfNzx/4j3QjR/e7fP9Jl7PJO/r1vFPSd4xyPPWLKkqH40/gNuB47vhhfSOSv+wGy/g74CfAn4COAr4PvAqeoH39m75/YF5wB3A7wDPAN4CPAp8tFvXscDmbngOcBPwJ8AB9IL/6G7e6cDVk2o8v289xwH30Ps2sT/wp8CVfW0L+N/AQcChwFZgWTfvQuD36R2sPLHNafZLAUd0w88AxoH/0D3P44AfAC/uq+9B4F9NrHvAfT/Vc+3f7vnAvcBSel2g/xNYM7ktsAzYBCwdcD+8s3s+PwMcCHwe+EzfvL/uhv818B3gor55X+x7PbcBq7r983rgR8BzZvs97WPqh0fo+44vdEfDVwNXAP+5b94fV9V9VfX/gOXAOVV1bVU9VlUXAA8Dr+4ezwD+a1U9WlWfA9ZPs72lwCHA+6vqh1X146q6epq2k70NOK+qbqiqh4EP0TuiX9TX5qyqeqCqvgd8BTiym/4ova6UQ3Zym6+mF3xnVdUjVfVlemF5Wl+bL1bVP1TV41X14wHXO4hLquq6qtpGL9CPnDT/FOAc4ISqum7SvOn2w9uAT1TVxuqdK/kQcGrXrXYFcHSS/YDXAB+n90EF8Avd/AmPAqu613sd8BADnCfR7DDQ9x1vrKqDquqFVfXvuvCesKlv+IXA+7qv8A90HwIL6YXzIcCWquq/o9sd02xvIXBHF1I765D+9XaBdC8wv6/NXX3DP6IXxgAfAAJc13WbvHMntrmpqh7vm3bHpG1uYs+Y7rlMeC9wcVV9cyeW3W4fdsNzgedX1XfonRw/EjiG3gfXnUlezJMD/d5Jr+FU9elpwkAX9L66T9gE/FEX/hOPZ1XVhcA/AfMn+mk7h06zzk3AodOcaJ3pFp930vtgASDJAcBzgS0zPpGqu6rq3VV1CPBvgT8b8Gd/dwILu6PWCYdO2uau3Jr0h8CzJkaS/PQurOMU4I1JztiJZbbbh/Seyzbg7m78CnpdZvOqd17lCnrda88BbtyFGvU0YKBrsk8D70nyqvQckOTEJM8GvkYvFH47yTOSvIle18pUrqP3AXBWt45nJpn4Wn83sCDJvGmWvRB4R5Ijk+xPr3vo2qq6fabik5ySZEE3ej+9EH58B4tMuJbe0ecHuud2LPAGYM0Ay+7ITcBLu+fyTHo/a9xZdwKvBc5I8psDLnMh8DtJDktyIL19eFHf0fYVwArgym788m786qp6bBdq1NOAga7tVNUY8G7gk/QCcZzeiT2q6hHgTd34fcBb6Z1sm2o9j9ELxCOA7wGbu/YAX6Z3YvauJPdMsezfA38A/C96HwqHA6cO+BReCVzb/YplLXBGVW2caaHuub0BOIHeCdk/A369qr414HanW++36Z1U/Ht6vyIatE9/8nq+Ry/UVyZ51wCLnAd8hl5gfxf4MfDv++ZfATybfw70q+l9k7gS7bWyfXeoJGlv5RG6JDXCQJekRhjoktSIgQI9ybIkt3aXYq+cps2vJrm5++3vZ4dbpiRpJjOeFE3vJkTfBl5H75cK64HTqurmvjaLgYuB46rq/iTPq6rv72i9Bx98cC1atGg3y5ekfcv1119/T1WNTDVvkLvrLQXGJ376lWQNcDJwc1+bdwOrq+p+gJnCHGDRokWMjY0NsHlJ0oQk012dPVCXy3y2v+R5M9tfDg3wIuBFSf4hyTVJlk1TyPIkY0nGtm7dOsCmJUmDGtZJ0bnAYnp3ZzsN+HSSgyY3qqpzq2q0qkZHRqb8xiBJ2kWDBPoWejdamrCAJ99TYzOwtrsj23fp9bkvHk6JkqRBDBLo64HF3T0h5tG7BHvtpDZfoPtvt5IcTK8LZsbLrSVJwzNjoHc381kBXAbcQu82nhuSrOr7H10uA+5NcjO9ezK/v6ru3VNFS5KebNbu5TI6Olr+ykWSdk6S66tqdKp5XikqSY0w0CWpEQa6JDVikCtFJe2DFq28dLZLaNbtZ524R9brEbokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjRgo0JMsS3JrkvEkK6eYf3qSrUlu7B7vGn6pkqQdmTtTgyRzgNXA64DNwPoka6vq5klNL6qqFXugRknSAAY5Ql8KjFfVxqp6BFgDnLxny5Ik7axBAn0+sKlvfHM3bbI3J/l6ks8lWTjVipIsTzKWZGzr1q27UK4kaTrDOin618Ciqno58HfABVM1qqpzq2q0qkZHRkaGtGlJEgwW6FuA/iPuBd20J1TVvVX1cDf6F8DPDac8SdKgBgn09cDiJIclmQecCqztb5DkBX2jJwG3DK9ESdIgZvyVS1VtS7ICuAyYA5xXVRuSrALGqmot8NtJTgK2AfcBp+/BmiVJU5gx0AGqah2wbtK0M/uGPwR8aLilSZJ2hleKSlIjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNGCjQkyxLcmuS8SQrd9DuzUkqyejwSpQkDWLGQE8yB1gNnAAsAU5LsmSKds8GzgCuHXaRkqSZDXKEvhQYr6qNVfUIsAY4eYp2fwh8DPjxEOuTJA1okECfD2zqG9/cTXtCklcAC6vq0h2tKMnyJGNJxrZu3brTxUqSprfbJ0WT7Ad8AnjfTG2r6tyqGq2q0ZGRkd3dtCSpzyCBvgVY2De+oJs24dnAy4DLk9wOvBpY64lRSXpqDRLo64HFSQ5LMg84FVg7MbOqHqyqg6tqUVUtAq4BTqqqsT1SsSRpSjMGelVtA1YAlwG3ABdX1YYkq5KctKcLlCQNZu4gjapqHbBu0rQzp2l77O6XJUnaWV4pKkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1YqBAT7Isya1JxpOsnGL+e5J8I8mNSa5OsmT4pUqSdmTGQE8yB1gNnAAsAU6bIrA/W1X/sqqOBD4OfGLolUqSdmiQI/SlwHhVbayqR4A1wMn9Darq//aNHgDU8EqUJA1i7gBt5gOb+sY3A6+a3CjJbwG/C8wDjptqRUmWA8sBDj300J2tVZK0A0M7KVpVq6vqcOCDwIenaXNuVY1W1ejIyMiwNi1JYrBA3wIs7Btf0E2bzhrgjbtTlCRp5w0S6OuBxUkOSzIPOBVY298gyeK+0ROB24ZXoiRpEDP2oVfVtiQrgMuAOcB5VbUhySpgrKrWAiuSHA88CtwPvH1PFi1JerJBTopSVeuAdZOmndk3fMaQ65Ik7SSvFJWkRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktSIubNdwK5YtPLS2S6hWbefdeIeWa+v2Z6zp14z7X0GOkJPsizJrUnGk6ycYv7vJrk5ydeTfCnJC4dfqiRpR2YM9CRzgNXACcAS4LQkSyY1+0dgtKpeDnwO+PiwC5Uk7dggR+hLgfGq2lhVjwBrgJP7G1TVV6rqR93oNcCC4ZYpSZrJIIE+H9jUN765mzad3wD+ZqoZSZYnGUsytnXr1sGrlCTNaKi/cknya8AocPZU86vq3KoararRkZGRYW5akvZ5g/zKZQuwsG98QTdtO0mOB34f+IWqeng45UmSBjXIEfp6YHGSw5LMA04F1vY3SHIUcA5wUlV9f/hlSpJmMmOgV9U2YAVwGXALcHFVbUiyKslJXbOzgQOBv0pyY5K106xOkrSHDHRhUVWtA9ZNmnZm3/DxQ65LkrSTvPRfkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWrEQIGeZFmSW5OMJ1k5xfzXJLkhybYkbxl+mZKkmcwY6EnmAKuBE4AlwGlJlkxq9j3gdOCzwy5QkjSYuQO0WQqMV9VGgCRrgJOBmycaVNXt3bzH90CNkqQBDNLlMh/Y1De+uZu205IsTzKWZGzr1q27sgpJ0jSe0pOiVXVuVY1W1ejIyMhTuWlJat4ggb4FWNg3vqCbJkl6Ghkk0NcDi5MclmQecCqwds+WJUnaWTMGelVtA1YAlwG3ABdX1YYkq5KcBJDklUk2A6cA5yTZsCeLliQ92SC/cqGq1gHrJk07s294Pb2uGEnSLPFKUUlqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQMFepJlSW5NMp5k5RTz909yUTf/2iSLhl2oJGnHZgz0JHOA1cAJwBLgtCRLJjX7DeD+qjoC+BPgY8MuVJK0Y4McoS8FxqtqY1U9AqwBTp7U5mTggm74c8Brk2R4ZUqSZjJ3gDbzgU1945uBV03Xpqq2JXkQeC5wT3+jJMuB5d3oQ0lu3ZWi90IHM2lfPF3F71awF71e4GvW2ZdesxdON2OQQB+aqjoXOPep3ObTQZKxqhqd7To0GF+vvY+vWc8gXS5bgIV94wu6aVO2STIX+Eng3mEUKEkazCCBvh5YnOSwJPOAU4G1k9qsBd7eDb8F+HJV1fDKlCTNZMYul65PfAVwGTAHOK+qNiRZBYxV1VrgvwOfSTIO3Ecv9PXP9rlupr2cr9fex9cMiAfSktQGrxSVpEYY6JLUCANd0tNWktOTfHK269hbGOiS1AgDfTclWZXkvX3jf5TkjCTvT7I+ydeT/Kdu3gFJLk1yU5JvJnnr7FUugCSLktyS5NNJNiT52yQ/keTIJNd0r98lSZ4z27W2oNvf3+wb/70kH0lyeZKPJbkuybeTHDPFsicm+VqSg5Ocn+S/Jflqko1J3tK1SZKzu7+vb0z8jSVZneSkbviSJOd1w+/s/manfB88NXtleAz03Xce8OsASfaj95PNu4DF9O6DcyTwc0leAywD7qyqn62qlwH/Z3ZK1iSLgdVV9VLgAeDNwF8CH6yqlwPfAP7jLNa3r5hbVUuB9zJpfyf5FWAl8PqqmrjE/wXA0cAvA2d1095E72/uZ4HjgbOTvAC4Cpj4kJhP70aDdNOu7Ianeh/sVQz03VRVtwP3JjkK+CXgH4FX9g3fAPwLem+WbwCv645EjqmqB2enak3y3aq6sRu+HjgcOKiqruimXQC8ZlYq27d8vvv3emBR3/TjgA8CJ1bV/X3Tv1BVj1fVzcDzu2lHAxdW1WNVdTdwBb2/x6uAY7o7xd4M3N0F/c8DX+2Wnfw+6K9hr/CU3sulYX8BnA78NL0j9tcCf1xV50xumOQVwOuBjyb5UlWteioL1ZQe7ht+DDhotgrZB2xj+wPJZ/YNT7wOj7F9Nn0H+BngRcDYFO0Bdnh316rakuQget+SrwR+CvhV4KGq+kGS5/Lk94FdLvuoS+i9UV5J74ray4B3JjkQIMn8JM9Lcgjwo6r6H8DZwCtmq2Dt0IPA/X39uP+G3pGedt/dwPOSPDfJ/vS6S2ZyB103WJKXztD2KuCtSeYkGaH3zeq6bt419Lpzruza/V73bzM8Qh+CqnokyVeAB6rqMeBvk7wE+Fp3W/iHgF8DjqDXp/c48Cjwm7NVs2b0duBTSZ4FbATeMcv1NKGqHu1uG3IdvZv6fWvA5b6V5G3AXyV5ww6aXkKvG+UmoIAPVNVd3byrgF+qqvEkd9A7Sm8q0L30fwi6k6E3AKdU1W2zXY+kfZNdLrupO8kyDnzJMJc0mzxCl6RGeIQuSY0w0CWpEQa6JDXCQJekRhjoktSI/w/TIK6YopbnBgAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
    " + ] + }, + "metadata": { + "tags": [], + "needs_background": "light" + } + }, + { + "output_type": "stream", + "text": [ + "/content/data/mini_speech_commands/unknown/2313e093_nohash_0.wav\n" + ], + "name": "stdout" + }, + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAEICAYAAABPgw/pAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAUd0lEQVR4nO3df7RdZX3n8feHxGAFV7FytZIEQyE6RseCvUa7BixF7ApSwapUGDsVrWbsNFNsrRqnlnFSO0WZZWdNTSvYYUGdkUAd0XRIh7Yqv6pALhTUgMg1gkkoGH6O6AgEvvPH2ZeeXO7NPUlOuOTJ+7XWWdk/nr339+xz7ufs8+yzd1JVSJL2fvvNdgGSpOEw0CWpEQa6JDXCQJekRhjoktQIA12SGmGga7ckOT/JR7vhY5Lcuovr+VSSPxhudTNu81eSbEryUJKjnsptDypJJTlituvQ3mHubBegPS/J7cDzgceAHwJ/A6yoqoeGuZ2qugp48QD1nA68q6qO7lv2PcOsZUD/hd5++OIwVpbkcuAjwLEAVfWRYaz36SrJ+cDl3eixVXX6rBUjwCP0fckbqupA4BXAKPDhyQ2S7Gsf8C8ENuzKgknmDLkWabcZ6PuYqtpC7wj9ZfDEV/rfSnIbcFs37ZeT3JjkgSRfTfLyieWTHJXkhiQ/SHIR8My+eccm2dw3vjDJ55NsTXJvkk8meQnwKeDnu66OB7q2T3TddOPvTjKe5L4ka5Mc0jevkrwnyW1djauTpJt3RJIrkjyY5J6uxu0k2T/JQ8Ac4KYk3+mmvyTJ5d06NyQ5qW+Z85P8eZJ1SX4I/OIg+zvJ6UmunjTtiW6Ubr2rk1za7dNrkxw+zbqO7rqIjh1gP+yX5MNJ7kjy/SR/meQnu3kXJHlfNzx/4j3QjR/e7fP9Jl7PJO/r1vFPSd4xyPPWLKkqH40/gNuB47vhhfSOSv+wGy/g74CfAn4COAr4PvAqeoH39m75/YF5wB3A7wDPAN4CPAp8tFvXscDmbngOcBPwJ8AB9IL/6G7e6cDVk2o8v289xwH30Ps2sT/wp8CVfW0L+N/AQcChwFZgWTfvQuD36R2sPLHNafZLAUd0w88AxoH/0D3P44AfAC/uq+9B4F9NrHvAfT/Vc+3f7vnAvcBSel2g/xNYM7ktsAzYBCwdcD+8s3s+PwMcCHwe+EzfvL/uhv818B3gor55X+x7PbcBq7r983rgR8BzZvs97WPqh0fo+44vdEfDVwNXAP+5b94fV9V9VfX/gOXAOVV1bVU9VlUXAA8Dr+4ezwD+a1U9WlWfA9ZPs72lwCHA+6vqh1X146q6epq2k70NOK+qbqiqh4EP0TuiX9TX5qyqeqCqvgd8BTiym/4ova6UQ3Zym6+mF3xnVdUjVfVlemF5Wl+bL1bVP1TV41X14wHXO4hLquq6qtpGL9CPnDT/FOAc4ISqum7SvOn2w9uAT1TVxuqdK/kQcGrXrXYFcHSS/YDXAB+n90EF8Avd/AmPAqu613sd8BADnCfR7DDQ9x1vrKqDquqFVfXvuvCesKlv+IXA+7qv8A90HwIL6YXzIcCWquq/o9sd02xvIXBHF1I765D+9XaBdC8wv6/NXX3DP6IXxgAfAAJc13WbvHMntrmpqh7vm3bHpG1uYs+Y7rlMeC9wcVV9cyeW3W4fdsNzgedX1XfonRw/EjiG3gfXnUlezJMD/d5Jr+FU9elpwkAX9L66T9gE/FEX/hOPZ1XVhcA/AfMn+mk7h06zzk3AodOcaJ3pFp930vtgASDJAcBzgS0zPpGqu6rq3VV1CPBvgT8b8Gd/dwILu6PWCYdO2uau3Jr0h8CzJkaS/PQurOMU4I1JztiJZbbbh/Seyzbg7m78CnpdZvOqd17lCnrda88BbtyFGvU0YKBrsk8D70nyqvQckOTEJM8GvkYvFH47yTOSvIle18pUrqP3AXBWt45nJpn4Wn83sCDJvGmWvRB4R5Ijk+xPr3vo2qq6fabik5ySZEE3ej+9EH58B4tMuJbe0ecHuud2LPAGYM0Ay+7ITcBLu+fyTHo/a9xZdwKvBc5I8psDLnMh8DtJDktyIL19eFHf0fYVwArgym788m786qp6bBdq1NOAga7tVNUY8G7gk/QCcZzeiT2q6hHgTd34fcBb6Z1sm2o9j9ELxCOA7wGbu/YAX6Z3YvauJPdMsezfA38A/C96HwqHA6cO+BReCVzb/YplLXBGVW2caaHuub0BOIHeCdk/A369qr414HanW++36Z1U/Ht6vyIatE9/8nq+Ry/UVyZ51wCLnAd8hl5gfxf4MfDv++ZfATybfw70q+l9k7gS7bWyfXeoJGlv5RG6JDXCQJekRhjoktSIgQI9ybIkt3aXYq+cps2vJrm5++3vZ4dbpiRpJjOeFE3vJkTfBl5H75cK64HTqurmvjaLgYuB46rq/iTPq6rv72i9Bx98cC1atGg3y5ekfcv1119/T1WNTDVvkLvrLQXGJ376lWQNcDJwc1+bdwOrq+p+gJnCHGDRokWMjY0NsHlJ0oQk012dPVCXy3y2v+R5M9tfDg3wIuBFSf4hyTVJlk1TyPIkY0nGtm7dOsCmJUmDGtZJ0bnAYnp3ZzsN+HSSgyY3qqpzq2q0qkZHRqb8xiBJ2kWDBPoWejdamrCAJ99TYzOwtrsj23fp9bkvHk6JkqRBDBLo64HF3T0h5tG7BHvtpDZfoPtvt5IcTK8LZsbLrSVJwzNjoHc381kBXAbcQu82nhuSrOr7H10uA+5NcjO9ezK/v6ru3VNFS5KebNbu5TI6Olr+ykWSdk6S66tqdKp5XikqSY0w0CWpEQa6JDVikCtFJe2DFq28dLZLaNbtZ524R9brEbokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjRgo0JMsS3JrkvEkK6eYf3qSrUlu7B7vGn6pkqQdmTtTgyRzgNXA64DNwPoka6vq5klNL6qqFXugRknSAAY5Ql8KjFfVxqp6BFgDnLxny5Ik7axBAn0+sKlvfHM3bbI3J/l6ks8lWTjVipIsTzKWZGzr1q27UK4kaTrDOin618Ciqno58HfABVM1qqpzq2q0qkZHRkaGtGlJEgwW6FuA/iPuBd20J1TVvVX1cDf6F8DPDac8SdKgBgn09cDiJIclmQecCqztb5DkBX2jJwG3DK9ESdIgZvyVS1VtS7ICuAyYA5xXVRuSrALGqmot8NtJTgK2AfcBp+/BmiVJU5gx0AGqah2wbtK0M/uGPwR8aLilSZJ2hleKSlIjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNGCjQkyxLcmuS8SQrd9DuzUkqyejwSpQkDWLGQE8yB1gNnAAsAU5LsmSKds8GzgCuHXaRkqSZDXKEvhQYr6qNVfUIsAY4eYp2fwh8DPjxEOuTJA1okECfD2zqG9/cTXtCklcAC6vq0h2tKMnyJGNJxrZu3brTxUqSprfbJ0WT7Ad8AnjfTG2r6tyqGq2q0ZGRkd3dtCSpzyCBvgVY2De+oJs24dnAy4DLk9wOvBpY64lRSXpqDRLo64HFSQ5LMg84FVg7MbOqHqyqg6tqUVUtAq4BTqqqsT1SsSRpSjMGelVtA1YAlwG3ABdX1YYkq5KctKcLlCQNZu4gjapqHbBu0rQzp2l77O6XJUnaWV4pKkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1YqBAT7Isya1JxpOsnGL+e5J8I8mNSa5OsmT4pUqSdmTGQE8yB1gNnAAsAU6bIrA/W1X/sqqOBD4OfGLolUqSdmiQI/SlwHhVbayqR4A1wMn9Darq//aNHgDU8EqUJA1i7gBt5gOb+sY3A6+a3CjJbwG/C8wDjptqRUmWA8sBDj300J2tVZK0A0M7KVpVq6vqcOCDwIenaXNuVY1W1ejIyMiwNi1JYrBA3wIs7Btf0E2bzhrgjbtTlCRp5w0S6OuBxUkOSzIPOBVY298gyeK+0ROB24ZXoiRpEDP2oVfVtiQrgMuAOcB5VbUhySpgrKrWAiuSHA88CtwPvH1PFi1JerJBTopSVeuAdZOmndk3fMaQ65Ik7SSvFJWkRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktSIubNdwK5YtPLS2S6hWbefdeIeWa+v2Z6zp14z7X0GOkJPsizJrUnGk6ycYv7vJrk5ydeTfCnJC4dfqiRpR2YM9CRzgNXACcAS4LQkSyY1+0dgtKpeDnwO+PiwC5Uk7dggR+hLgfGq2lhVjwBrgJP7G1TVV6rqR93oNcCC4ZYpSZrJIIE+H9jUN765mzad3wD+ZqoZSZYnGUsytnXr1sGrlCTNaKi/cknya8AocPZU86vq3KoararRkZGRYW5akvZ5g/zKZQuwsG98QTdtO0mOB34f+IWqeng45UmSBjXIEfp6YHGSw5LMA04F1vY3SHIUcA5wUlV9f/hlSpJmMmOgV9U2YAVwGXALcHFVbUiyKslJXbOzgQOBv0pyY5K106xOkrSHDHRhUVWtA9ZNmnZm3/DxQ65LkrSTvPRfkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWrEQIGeZFmSW5OMJ1k5xfzXJLkhybYkbxl+mZKkmcwY6EnmAKuBE4AlwGlJlkxq9j3gdOCzwy5QkjSYuQO0WQqMV9VGgCRrgJOBmycaVNXt3bzH90CNkqQBDNLlMh/Y1De+uZu205IsTzKWZGzr1q27sgpJ0jSe0pOiVXVuVY1W1ejIyMhTuWlJat4ggb4FWNg3vqCbJkl6Ghkk0NcDi5MclmQecCqwds+WJUnaWTMGelVtA1YAlwG3ABdX1YYkq5KcBJDklUk2A6cA5yTZsCeLliQ92SC/cqGq1gHrJk07s294Pb2uGEnSLPFKUUlqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQMFepJlSW5NMp5k5RTz909yUTf/2iSLhl2oJGnHZgz0JHOA1cAJwBLgtCRLJjX7DeD+qjoC+BPgY8MuVJK0Y4McoS8FxqtqY1U9AqwBTp7U5mTggm74c8Brk2R4ZUqSZjJ3gDbzgU1945uBV03Xpqq2JXkQeC5wT3+jJMuB5d3oQ0lu3ZWi90IHM2lfPF3F71awF71e4GvW2ZdesxdON2OQQB+aqjoXOPep3ObTQZKxqhqd7To0GF+vvY+vWc8gXS5bgIV94wu6aVO2STIX+Eng3mEUKEkazCCBvh5YnOSwJPOAU4G1k9qsBd7eDb8F+HJV1fDKlCTNZMYul65PfAVwGTAHOK+qNiRZBYxV1VrgvwOfSTIO3Ecv9PXP9rlupr2cr9fex9cMiAfSktQGrxSVpEYY6JLUCANd0tNWktOTfHK269hbGOiS1AgDfTclWZXkvX3jf5TkjCTvT7I+ydeT/Kdu3gFJLk1yU5JvJnnr7FUugCSLktyS5NNJNiT52yQ/keTIJNd0r98lSZ4z27W2oNvf3+wb/70kH0lyeZKPJbkuybeTHDPFsicm+VqSg5Ocn+S/Jflqko1J3tK1SZKzu7+vb0z8jSVZneSkbviSJOd1w+/s/manfB88NXtleAz03Xce8OsASfaj95PNu4DF9O6DcyTwc0leAywD7qyqn62qlwH/Z3ZK1iSLgdVV9VLgAeDNwF8CH6yqlwPfAP7jLNa3r5hbVUuB9zJpfyf5FWAl8PqqmrjE/wXA0cAvA2d1095E72/uZ4HjgbOTvAC4Cpj4kJhP70aDdNOu7Ianeh/sVQz03VRVtwP3JjkK+CXgH4FX9g3fAPwLem+WbwCv645EjqmqB2enak3y3aq6sRu+HjgcOKiqruimXQC8ZlYq27d8vvv3emBR3/TjgA8CJ1bV/X3Tv1BVj1fVzcDzu2lHAxdW1WNVdTdwBb2/x6uAY7o7xd4M3N0F/c8DX+2Wnfw+6K9hr/CU3sulYX8BnA78NL0j9tcCf1xV50xumOQVwOuBjyb5UlWteioL1ZQe7ht+DDhotgrZB2xj+wPJZ/YNT7wOj7F9Nn0H+BngRcDYFO0Bdnh316rakuQget+SrwR+CvhV4KGq+kGS5/Lk94FdLvuoS+i9UV5J74ray4B3JjkQIMn8JM9Lcgjwo6r6H8DZwCtmq2Dt0IPA/X39uP+G3pGedt/dwPOSPDfJ/vS6S2ZyB103WJKXztD2KuCtSeYkGaH3zeq6bt419Lpzruza/V73bzM8Qh+CqnokyVeAB6rqMeBvk7wE+Fp3W/iHgF8DjqDXp/c48Cjwm7NVs2b0duBTSZ4FbATeMcv1NKGqHu1uG3IdvZv6fWvA5b6V5G3AXyV5ww6aXkKvG+UmoIAPVNVd3byrgF+qqvEkd9A7Sm8q0L30fwi6k6E3AKdU1W2zXY+kfZNdLrupO8kyDnzJMJc0mzxCl6RGeIQuSY0w0CWpEQa6JDXCQJekRhjoktSI/w/TIK6YopbnBgAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
    " + ] + }, + "metadata": { + "tags": [], + "needs_background": "light" + } + }, + { + "output_type": "stream", + "text": [ + "/content/data/mini_speech_commands/unknown/763188c4_nohash_1.wav\n" + ], + "name": "stdout" + }, + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAEICAYAAABPgw/pAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAUd0lEQVR4nO3df7RdZX3n8feHxGAFV7FytZIEQyE6RseCvUa7BixF7ApSwapUGDsVrWbsNFNsrRqnlnFSO0WZZWdNTSvYYUGdkUAd0XRIh7Yqv6pALhTUgMg1gkkoGH6O6AgEvvPH2ZeeXO7NPUlOuOTJ+7XWWdk/nr339+xz7ufs8+yzd1JVSJL2fvvNdgGSpOEw0CWpEQa6JDXCQJekRhjoktQIA12SGmGga7ckOT/JR7vhY5Lcuovr+VSSPxhudTNu81eSbEryUJKjnsptDypJJTlituvQ3mHubBegPS/J7cDzgceAHwJ/A6yoqoeGuZ2qugp48QD1nA68q6qO7lv2PcOsZUD/hd5++OIwVpbkcuAjwLEAVfWRYaz36SrJ+cDl3eixVXX6rBUjwCP0fckbqupA4BXAKPDhyQ2S7Gsf8C8ENuzKgknmDLkWabcZ6PuYqtpC7wj9ZfDEV/rfSnIbcFs37ZeT3JjkgSRfTfLyieWTHJXkhiQ/SHIR8My+eccm2dw3vjDJ55NsTXJvkk8meQnwKeDnu66OB7q2T3TddOPvTjKe5L4ka5Mc0jevkrwnyW1djauTpJt3RJIrkjyY5J6uxu0k2T/JQ8Ac4KYk3+mmvyTJ5d06NyQ5qW+Z85P8eZJ1SX4I/OIg+zvJ6UmunjTtiW6Ubr2rk1za7dNrkxw+zbqO7rqIjh1gP+yX5MNJ7kjy/SR/meQnu3kXJHlfNzx/4j3QjR/e7fP9Jl7PJO/r1vFPSd4xyPPWLKkqH40/gNuB47vhhfSOSv+wGy/g74CfAn4COAr4PvAqeoH39m75/YF5wB3A7wDPAN4CPAp8tFvXscDmbngOcBPwJ8AB9IL/6G7e6cDVk2o8v289xwH30Ps2sT/wp8CVfW0L+N/AQcChwFZgWTfvQuD36R2sPLHNafZLAUd0w88AxoH/0D3P44AfAC/uq+9B4F9NrHvAfT/Vc+3f7vnAvcBSel2g/xNYM7ktsAzYBCwdcD+8s3s+PwMcCHwe+EzfvL/uhv818B3gor55X+x7PbcBq7r983rgR8BzZvs97WPqh0fo+44vdEfDVwNXAP+5b94fV9V9VfX/gOXAOVV1bVU9VlUXAA8Dr+4ezwD+a1U9WlWfA9ZPs72lwCHA+6vqh1X146q6epq2k70NOK+qbqiqh4EP0TuiX9TX5qyqeqCqvgd8BTiym/4ova6UQ3Zym6+mF3xnVdUjVfVlemF5Wl+bL1bVP1TV41X14wHXO4hLquq6qtpGL9CPnDT/FOAc4ISqum7SvOn2w9uAT1TVxuqdK/kQcGrXrXYFcHSS/YDXAB+n90EF8Avd/AmPAqu613sd8BADnCfR7DDQ9x1vrKqDquqFVfXvuvCesKlv+IXA+7qv8A90HwIL6YXzIcCWquq/o9sd02xvIXBHF1I765D+9XaBdC8wv6/NXX3DP6IXxgAfAAJc13WbvHMntrmpqh7vm3bHpG1uYs+Y7rlMeC9wcVV9cyeW3W4fdsNzgedX1XfonRw/EjiG3gfXnUlezJMD/d5Jr+FU9elpwkAX9L66T9gE/FEX/hOPZ1XVhcA/AfMn+mk7h06zzk3AodOcaJ3pFp930vtgASDJAcBzgS0zPpGqu6rq3VV1CPBvgT8b8Gd/dwILu6PWCYdO2uau3Jr0h8CzJkaS/PQurOMU4I1JztiJZbbbh/Seyzbg7m78CnpdZvOqd17lCnrda88BbtyFGvU0YKBrsk8D70nyqvQckOTEJM8GvkYvFH47yTOSvIle18pUrqP3AXBWt45nJpn4Wn83sCDJvGmWvRB4R5Ijk+xPr3vo2qq6fabik5ySZEE3ej+9EH58B4tMuJbe0ecHuud2LPAGYM0Ay+7ITcBLu+fyTHo/a9xZdwKvBc5I8psDLnMh8DtJDktyIL19eFHf0fYVwArgym788m786qp6bBdq1NOAga7tVNUY8G7gk/QCcZzeiT2q6hHgTd34fcBb6Z1sm2o9j9ELxCOA7wGbu/YAX6Z3YvauJPdMsezfA38A/C96HwqHA6cO+BReCVzb/YplLXBGVW2caaHuub0BOIHeCdk/A369qr414HanW++36Z1U/Ht6vyIatE9/8nq+Ry/UVyZ51wCLnAd8hl5gfxf4MfDv++ZfATybfw70q+l9k7gS7bWyfXeoJGlv5RG6JDXCQJekRhjoktSIgQI9ybIkt3aXYq+cps2vJrm5++3vZ4dbpiRpJjOeFE3vJkTfBl5H75cK64HTqurmvjaLgYuB46rq/iTPq6rv72i9Bx98cC1atGg3y5ekfcv1119/T1WNTDVvkLvrLQXGJ376lWQNcDJwc1+bdwOrq+p+gJnCHGDRokWMjY0NsHlJ0oQk012dPVCXy3y2v+R5M9tfDg3wIuBFSf4hyTVJlk1TyPIkY0nGtm7dOsCmJUmDGtZJ0bnAYnp3ZzsN+HSSgyY3qqpzq2q0qkZHRqb8xiBJ2kWDBPoWejdamrCAJ99TYzOwtrsj23fp9bkvHk6JkqRBDBLo64HF3T0h5tG7BHvtpDZfoPtvt5IcTK8LZsbLrSVJwzNjoHc381kBXAbcQu82nhuSrOr7H10uA+5NcjO9ezK/v6ru3VNFS5KebNbu5TI6Olr+ykWSdk6S66tqdKp5XikqSY0w0CWpEQa6JDVikCtFJe2DFq28dLZLaNbtZ524R9brEbokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjRgo0JMsS3JrkvEkK6eYf3qSrUlu7B7vGn6pkqQdmTtTgyRzgNXA64DNwPoka6vq5klNL6qqFXugRknSAAY5Ql8KjFfVxqp6BFgDnLxny5Ik7axBAn0+sKlvfHM3bbI3J/l6ks8lWTjVipIsTzKWZGzr1q27UK4kaTrDOin618Ciqno58HfABVM1qqpzq2q0qkZHRkaGtGlJEgwW6FuA/iPuBd20J1TVvVX1cDf6F8DPDac8SdKgBgn09cDiJIclmQecCqztb5DkBX2jJwG3DK9ESdIgZvyVS1VtS7ICuAyYA5xXVRuSrALGqmot8NtJTgK2AfcBp+/BmiVJU5gx0AGqah2wbtK0M/uGPwR8aLilSZJ2hleKSlIjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNGCjQkyxLcmuS8SQrd9DuzUkqyejwSpQkDWLGQE8yB1gNnAAsAU5LsmSKds8GzgCuHXaRkqSZDXKEvhQYr6qNVfUIsAY4eYp2fwh8DPjxEOuTJA1okECfD2zqG9/cTXtCklcAC6vq0h2tKMnyJGNJxrZu3brTxUqSprfbJ0WT7Ad8AnjfTG2r6tyqGq2q0ZGRkd3dtCSpzyCBvgVY2De+oJs24dnAy4DLk9wOvBpY64lRSXpqDRLo64HFSQ5LMg84FVg7MbOqHqyqg6tqUVUtAq4BTqqqsT1SsSRpSjMGelVtA1YAlwG3ABdX1YYkq5KctKcLlCQNZu4gjapqHbBu0rQzp2l77O6XJUnaWV4pKkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1YqBAT7Isya1JxpOsnGL+e5J8I8mNSa5OsmT4pUqSdmTGQE8yB1gNnAAsAU6bIrA/W1X/sqqOBD4OfGLolUqSdmiQI/SlwHhVbayqR4A1wMn9Darq//aNHgDU8EqUJA1i7gBt5gOb+sY3A6+a3CjJbwG/C8wDjptqRUmWA8sBDj300J2tVZK0A0M7KVpVq6vqcOCDwIenaXNuVY1W1ejIyMiwNi1JYrBA3wIs7Btf0E2bzhrgjbtTlCRp5w0S6OuBxUkOSzIPOBVY298gyeK+0ROB24ZXoiRpEDP2oVfVtiQrgMuAOcB5VbUhySpgrKrWAiuSHA88CtwPvH1PFi1JerJBTopSVeuAdZOmndk3fMaQ65Ik7SSvFJWkRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktSIubNdwK5YtPLS2S6hWbefdeIeWa+v2Z6zp14z7X0GOkJPsizJrUnGk6ycYv7vJrk5ydeTfCnJC4dfqiRpR2YM9CRzgNXACcAS4LQkSyY1+0dgtKpeDnwO+PiwC5Uk7dggR+hLgfGq2lhVjwBrgJP7G1TVV6rqR93oNcCC4ZYpSZrJIIE+H9jUN765mzad3wD+ZqoZSZYnGUsytnXr1sGrlCTNaKi/cknya8AocPZU86vq3KoararRkZGRYW5akvZ5g/zKZQuwsG98QTdtO0mOB34f+IWqeng45UmSBjXIEfp6YHGSw5LMA04F1vY3SHIUcA5wUlV9f/hlSpJmMmOgV9U2YAVwGXALcHFVbUiyKslJXbOzgQOBv0pyY5K106xOkrSHDHRhUVWtA9ZNmnZm3/DxQ65LkrSTvPRfkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWrEQIGeZFmSW5OMJ1k5xfzXJLkhybYkbxl+mZKkmcwY6EnmAKuBE4AlwGlJlkxq9j3gdOCzwy5QkjSYuQO0WQqMV9VGgCRrgJOBmycaVNXt3bzH90CNkqQBDNLlMh/Y1De+uZu205IsTzKWZGzr1q27sgpJ0jSe0pOiVXVuVY1W1ejIyMhTuWlJat4ggb4FWNg3vqCbJkl6Ghkk0NcDi5MclmQecCqwds+WJUnaWTMGelVtA1YAlwG3ABdX1YYkq5KcBJDklUk2A6cA5yTZsCeLliQ92SC/cqGq1gHrJk07s294Pb2uGEnSLPFKUUlqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQMFepJlSW5NMp5k5RTz909yUTf/2iSLhl2oJGnHZgz0JHOA1cAJwBLgtCRLJjX7DeD+qjoC+BPgY8MuVJK0Y4McoS8FxqtqY1U9AqwBTp7U5mTggm74c8Brk2R4ZUqSZjJ3gDbzgU1945uBV03Xpqq2JXkQeC5wT3+jJMuB5d3oQ0lu3ZWi90IHM2lfPF3F71awF71e4GvW2ZdesxdON2OQQB+aqjoXOPep3ObTQZKxqhqd7To0GF+vvY+vWc8gXS5bgIV94wu6aVO2STIX+Eng3mEUKEkazCCBvh5YnOSwJPOAU4G1k9qsBd7eDb8F+HJV1fDKlCTNZMYul65PfAVwGTAHOK+qNiRZBYxV1VrgvwOfSTIO3Ecv9PXP9rlupr2cr9fex9cMiAfSktQGrxSVpEYY6JLUCANd0tNWktOTfHK269hbGOiS1AgDfTclWZXkvX3jf5TkjCTvT7I+ydeT/Kdu3gFJLk1yU5JvJnnr7FUugCSLktyS5NNJNiT52yQ/keTIJNd0r98lSZ4z27W2oNvf3+wb/70kH0lyeZKPJbkuybeTHDPFsicm+VqSg5Ocn+S/Jflqko1J3tK1SZKzu7+vb0z8jSVZneSkbviSJOd1w+/s/manfB88NXtleAz03Xce8OsASfaj95PNu4DF9O6DcyTwc0leAywD7qyqn62qlwH/Z3ZK1iSLgdVV9VLgAeDNwF8CH6yqlwPfAP7jLNa3r5hbVUuB9zJpfyf5FWAl8PqqmrjE/wXA0cAvA2d1095E72/uZ4HjgbOTvAC4Cpj4kJhP70aDdNOu7Ianeh/sVQz03VRVtwP3JjkK+CXgH4FX9g3fAPwLem+WbwCv645EjqmqB2enak3y3aq6sRu+HjgcOKiqruimXQC8ZlYq27d8vvv3emBR3/TjgA8CJ1bV/X3Tv1BVj1fVzcDzu2lHAxdW1WNVdTdwBb2/x6uAY7o7xd4M3N0F/c8DX+2Wnfw+6K9hr/CU3sulYX8BnA78NL0j9tcCf1xV50xumOQVwOuBjyb5UlWteioL1ZQe7ht+DDhotgrZB2xj+wPJZ/YNT7wOj7F9Nn0H+BngRcDYFO0Bdnh316rakuQget+SrwR+CvhV4KGq+kGS5/Lk94FdLvuoS+i9UV5J74ray4B3JjkQIMn8JM9Lcgjwo6r6H8DZwCtmq2Dt0IPA/X39uP+G3pGedt/dwPOSPDfJ/vS6S2ZyB103WJKXztD2KuCtSeYkGaH3zeq6bt419Lpzruza/V73bzM8Qh+CqnokyVeAB6rqMeBvk7wE+Fp3W/iHgF8DjqDXp/c48Cjwm7NVs2b0duBTSZ4FbATeMcv1NKGqHu1uG3IdvZv6fWvA5b6V5G3AXyV5ww6aXkKvG+UmoIAPVNVd3byrgF+qqvEkd9A7Sm8q0L30fwi6k6E3AKdU1W2zXY+kfZNdLrupO8kyDnzJMJc0mzxCl6RGeIQuSY0w0CWpEQa6JDXCQJekRhjoktSI/w/TIK6YopbnBgAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
    " + ] + }, + "metadata": { + "tags": [], + "needs_background": "light" + } + }, + { + "output_type": "stream", + "text": [ + "/content/data/mini_speech_commands/unknown/7ea032f3_nohash_3.wav\n" + ], + "name": "stdout" + }, + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAEICAYAAABPgw/pAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAUd0lEQVR4nO3df7RdZX3n8feHxGAFV7FytZIEQyE6RseCvUa7BixF7ApSwapUGDsVrWbsNFNsrRqnlnFSO0WZZWdNTSvYYUGdkUAd0XRIh7Yqv6pALhTUgMg1gkkoGH6O6AgEvvPH2ZeeXO7NPUlOuOTJ+7XWWdk/nr339+xz7ufs8+yzd1JVSJL2fvvNdgGSpOEw0CWpEQa6JDXCQJekRhjoktQIA12SGmGga7ckOT/JR7vhY5Lcuovr+VSSPxhudTNu81eSbEryUJKjnsptDypJJTlituvQ3mHubBegPS/J7cDzgceAHwJ/A6yoqoeGuZ2qugp48QD1nA68q6qO7lv2PcOsZUD/hd5++OIwVpbkcuAjwLEAVfWRYaz36SrJ+cDl3eixVXX6rBUjwCP0fckbqupA4BXAKPDhyQ2S7Gsf8C8ENuzKgknmDLkWabcZ6PuYqtpC7wj9ZfDEV/rfSnIbcFs37ZeT3JjkgSRfTfLyieWTHJXkhiQ/SHIR8My+eccm2dw3vjDJ55NsTXJvkk8meQnwKeDnu66OB7q2T3TddOPvTjKe5L4ka5Mc0jevkrwnyW1djauTpJt3RJIrkjyY5J6uxu0k2T/JQ8Ac4KYk3+mmvyTJ5d06NyQ5qW+Z85P8eZJ1SX4I/OIg+zvJ6UmunjTtiW6Ubr2rk1za7dNrkxw+zbqO7rqIjh1gP+yX5MNJ7kjy/SR/meQnu3kXJHlfNzx/4j3QjR/e7fP9Jl7PJO/r1vFPSd4xyPPWLKkqH40/gNuB47vhhfSOSv+wGy/g74CfAn4COAr4PvAqeoH39m75/YF5wB3A7wDPAN4CPAp8tFvXscDmbngOcBPwJ8AB9IL/6G7e6cDVk2o8v289xwH30Ps2sT/wp8CVfW0L+N/AQcChwFZgWTfvQuD36R2sPLHNafZLAUd0w88AxoH/0D3P44AfAC/uq+9B4F9NrHvAfT/Vc+3f7vnAvcBSel2g/xNYM7ktsAzYBCwdcD+8s3s+PwMcCHwe+EzfvL/uhv818B3gor55X+x7PbcBq7r983rgR8BzZvs97WPqh0fo+44vdEfDVwNXAP+5b94fV9V9VfX/gOXAOVV1bVU9VlUXAA8Dr+4ezwD+a1U9WlWfA9ZPs72lwCHA+6vqh1X146q6epq2k70NOK+qbqiqh4EP0TuiX9TX5qyqeqCqvgd8BTiym/4ova6UQ3Zym6+mF3xnVdUjVfVlemF5Wl+bL1bVP1TV41X14wHXO4hLquq6qtpGL9CPnDT/FOAc4ISqum7SvOn2w9uAT1TVxuqdK/kQcGrXrXYFcHSS/YDXAB+n90EF8Avd/AmPAqu613sd8BADnCfR7DDQ9x1vrKqDquqFVfXvuvCesKlv+IXA+7qv8A90HwIL6YXzIcCWquq/o9sd02xvIXBHF1I765D+9XaBdC8wv6/NXX3DP6IXxgAfAAJc13WbvHMntrmpqh7vm3bHpG1uYs+Y7rlMeC9wcVV9cyeW3W4fdsNzgedX1XfonRw/EjiG3gfXnUlezJMD/d5Jr+FU9elpwkAX9L66T9gE/FEX/hOPZ1XVhcA/AfMn+mk7h06zzk3AodOcaJ3pFp930vtgASDJAcBzgS0zPpGqu6rq3VV1CPBvgT8b8Gd/dwILu6PWCYdO2uau3Jr0h8CzJkaS/PQurOMU4I1JztiJZbbbh/Seyzbg7m78CnpdZvOqd17lCnrda88BbtyFGvU0YKBrsk8D70nyqvQckOTEJM8GvkYvFH47yTOSvIle18pUrqP3AXBWt45nJpn4Wn83sCDJvGmWvRB4R5Ijk+xPr3vo2qq6fabik5ySZEE3ej+9EH58B4tMuJbe0ecHuud2LPAGYM0Ay+7ITcBLu+fyTHo/a9xZdwKvBc5I8psDLnMh8DtJDktyIL19eFHf0fYVwArgym788m786qp6bBdq1NOAga7tVNUY8G7gk/QCcZzeiT2q6hHgTd34fcBb6Z1sm2o9j9ELxCOA7wGbu/YAX6Z3YvauJPdMsezfA38A/C96HwqHA6cO+BReCVzb/YplLXBGVW2caaHuub0BOIHeCdk/A369qr414HanW++36Z1U/Ht6vyIatE9/8nq+Ry/UVyZ51wCLnAd8hl5gfxf4MfDv++ZfATybfw70q+l9k7gS7bWyfXeoJGlv5RG6JDXCQJekRhjoktSIgQI9ybIkt3aXYq+cps2vJrm5++3vZ4dbpiRpJjOeFE3vJkTfBl5H75cK64HTqurmvjaLgYuB46rq/iTPq6rv72i9Bx98cC1atGg3y5ekfcv1119/T1WNTDVvkLvrLQXGJ376lWQNcDJwc1+bdwOrq+p+gJnCHGDRokWMjY0NsHlJ0oQk012dPVCXy3y2v+R5M9tfDg3wIuBFSf4hyTVJlk1TyPIkY0nGtm7dOsCmJUmDGtZJ0bnAYnp3ZzsN+HSSgyY3qqpzq2q0qkZHRqb8xiBJ2kWDBPoWejdamrCAJ99TYzOwtrsj23fp9bkvHk6JkqRBDBLo64HF3T0h5tG7BHvtpDZfoPtvt5IcTK8LZsbLrSVJwzNjoHc381kBXAbcQu82nhuSrOr7H10uA+5NcjO9ezK/v6ru3VNFS5KebNbu5TI6Olr+ykWSdk6S66tqdKp5XikqSY0w0CWpEQa6JDVikCtFJe2DFq28dLZLaNbtZ524R9brEbokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjRgo0JMsS3JrkvEkK6eYf3qSrUlu7B7vGn6pkqQdmTtTgyRzgNXA64DNwPoka6vq5klNL6qqFXugRknSAAY5Ql8KjFfVxqp6BFgDnLxny5Ik7axBAn0+sKlvfHM3bbI3J/l6ks8lWTjVipIsTzKWZGzr1q27UK4kaTrDOin618Ciqno58HfABVM1qqpzq2q0qkZHRkaGtGlJEgwW6FuA/iPuBd20J1TVvVX1cDf6F8DPDac8SdKgBgn09cDiJIclmQecCqztb5DkBX2jJwG3DK9ESdIgZvyVS1VtS7ICuAyYA5xXVRuSrALGqmot8NtJTgK2AfcBp+/BmiVJU5gx0AGqah2wbtK0M/uGPwR8aLilSZJ2hleKSlIjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNGCjQkyxLcmuS8SQrd9DuzUkqyejwSpQkDWLGQE8yB1gNnAAsAU5LsmSKds8GzgCuHXaRkqSZDXKEvhQYr6qNVfUIsAY4eYp2fwh8DPjxEOuTJA1okECfD2zqG9/cTXtCklcAC6vq0h2tKMnyJGNJxrZu3brTxUqSprfbJ0WT7Ad8AnjfTG2r6tyqGq2q0ZGRkd3dtCSpzyCBvgVY2De+oJs24dnAy4DLk9wOvBpY64lRSXpqDRLo64HFSQ5LMg84FVg7MbOqHqyqg6tqUVUtAq4BTqqqsT1SsSRpSjMGelVtA1YAlwG3ABdX1YYkq5KctKcLlCQNZu4gjapqHbBu0rQzp2l77O6XJUnaWV4pKkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1YqBAT7Isya1JxpOsnGL+e5J8I8mNSa5OsmT4pUqSdmTGQE8yB1gNnAAsAU6bIrA/W1X/sqqOBD4OfGLolUqSdmiQI/SlwHhVbayqR4A1wMn9Darq//aNHgDU8EqUJA1i7gBt5gOb+sY3A6+a3CjJbwG/C8wDjptqRUmWA8sBDj300J2tVZK0A0M7KVpVq6vqcOCDwIenaXNuVY1W1ejIyMiwNi1JYrBA3wIs7Btf0E2bzhrgjbtTlCRp5w0S6OuBxUkOSzIPOBVY298gyeK+0ROB24ZXoiRpEDP2oVfVtiQrgMuAOcB5VbUhySpgrKrWAiuSHA88CtwPvH1PFi1JerJBTopSVeuAdZOmndk3fMaQ65Ik7SSvFJWkRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktSIubNdwK5YtPLS2S6hWbefdeIeWa+v2Z6zp14z7X0GOkJPsizJrUnGk6ycYv7vJrk5ydeTfCnJC4dfqiRpR2YM9CRzgNXACcAS4LQkSyY1+0dgtKpeDnwO+PiwC5Uk7dggR+hLgfGq2lhVjwBrgJP7G1TVV6rqR93oNcCC4ZYpSZrJIIE+H9jUN765mzad3wD+ZqoZSZYnGUsytnXr1sGrlCTNaKi/cknya8AocPZU86vq3KoararRkZGRYW5akvZ5g/zKZQuwsG98QTdtO0mOB34f+IWqeng45UmSBjXIEfp6YHGSw5LMA04F1vY3SHIUcA5wUlV9f/hlSpJmMmOgV9U2YAVwGXALcHFVbUiyKslJXbOzgQOBv0pyY5K106xOkrSHDHRhUVWtA9ZNmnZm3/DxQ65LkrSTvPRfkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWrEQIGeZFmSW5OMJ1k5xfzXJLkhybYkbxl+mZKkmcwY6EnmAKuBE4AlwGlJlkxq9j3gdOCzwy5QkjSYuQO0WQqMV9VGgCRrgJOBmycaVNXt3bzH90CNkqQBDNLlMh/Y1De+uZu205IsTzKWZGzr1q27sgpJ0jSe0pOiVXVuVY1W1ejIyMhTuWlJat4ggb4FWNg3vqCbJkl6Ghkk0NcDi5MclmQecCqwds+WJUnaWTMGelVtA1YAlwG3ABdX1YYkq5KcBJDklUk2A6cA5yTZsCeLliQ92SC/cqGq1gHrJk07s294Pb2uGEnSLPFKUUlqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQMFepJlSW5NMp5k5RTz909yUTf/2iSLhl2oJGnHZgz0JHOA1cAJwBLgtCRLJjX7DeD+qjoC+BPgY8MuVJK0Y4McoS8FxqtqY1U9AqwBTp7U5mTggm74c8Brk2R4ZUqSZjJ3gDbzgU1945uBV03Xpqq2JXkQeC5wT3+jJMuB5d3oQ0lu3ZWi90IHM2lfPF3F71awF71e4GvW2ZdesxdON2OQQB+aqjoXOPep3ObTQZKxqhqd7To0GF+vvY+vWc8gXS5bgIV94wu6aVO2STIX+Eng3mEUKEkazCCBvh5YnOSwJPOAU4G1k9qsBd7eDb8F+HJV1fDKlCTNZMYul65PfAVwGTAHOK+qNiRZBYxV1VrgvwOfSTIO3Ecv9PXP9rlupr2cr9fex9cMiAfSktQGrxSVpEYY6JLUCANd0tNWktOTfHK269hbGOiS1AgDfTclWZXkvX3jf5TkjCTvT7I+ydeT/Kdu3gFJLk1yU5JvJnnr7FUugCSLktyS5NNJNiT52yQ/keTIJNd0r98lSZ4z27W2oNvf3+wb/70kH0lyeZKPJbkuybeTHDPFsicm+VqSg5Ocn+S/Jflqko1J3tK1SZKzu7+vb0z8jSVZneSkbviSJOd1w+/s/manfB88NXtleAz03Xce8OsASfaj95PNu4DF9O6DcyTwc0leAywD7qyqn62qlwH/Z3ZK1iSLgdVV9VLgAeDNwF8CH6yqlwPfAP7jLNa3r5hbVUuB9zJpfyf5FWAl8PqqmrjE/wXA0cAvA2d1095E72/uZ4HjgbOTvAC4Cpj4kJhP70aDdNOu7Ianeh/sVQz03VRVtwP3JjkK+CXgH4FX9g3fAPwLem+WbwCv645EjqmqB2enak3y3aq6sRu+HjgcOKiqruimXQC8ZlYq27d8vvv3emBR3/TjgA8CJ1bV/X3Tv1BVj1fVzcDzu2lHAxdW1WNVdTdwBb2/x6uAY7o7xd4M3N0F/c8DX+2Wnfw+6K9hr/CU3sulYX8BnA78NL0j9tcCf1xV50xumOQVwOuBjyb5UlWteioL1ZQe7ht+DDhotgrZB2xj+wPJZ/YNT7wOj7F9Nn0H+BngRcDYFO0Bdnh316rakuQget+SrwR+CvhV4KGq+kGS5/Lk94FdLvuoS+i9UV5J74ray4B3JjkQIMn8JM9Lcgjwo6r6H8DZwCtmq2Dt0IPA/X39uP+G3pGedt/dwPOSPDfJ/vS6S2ZyB103WJKXztD2KuCtSeYkGaH3zeq6bt419Lpzruza/V73bzM8Qh+CqnokyVeAB6rqMeBvk7wE+Fp3W/iHgF8DjqDXp/c48Cjwm7NVs2b0duBTSZ4FbATeMcv1NKGqHu1uG3IdvZv6fWvA5b6V5G3AXyV5ww6aXkKvG+UmoIAPVNVd3byrgF+qqvEkd9A7Sm8q0L30fwi6k6E3AKdU1W2zXY+kfZNdLrupO8kyDnzJMJc0mzxCl6RGeIQuSY0w0CWpEQa6JDXCQJekRhjoktSI/w/TIK6YopbnBgAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
    " + ] + }, + "metadata": { + "tags": [], + "needs_background": "light" + } + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "E8IcErTDyudr" + }, + "source": [ + "##Generate TF Lite float32 model" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "nlP5R7Y7ytYU", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "7b8fee49-7b77-4bf5-fe17-aa593fafaa52" + }, + "source": [ + "import tensorflow\n", + "print(tensorflow.__version__)\n", + "\n", + "run_model = tf.function(lambda x: model(x))\n", + "# This is important, let's fix the input size.\n", + "BATCH_SIZE = 1\n", + "STEPS = 49\n", + "INPUT_SIZE = 257\n", + "\n", + "concrete_func = run_model.get_concrete_function(\n", + " tf.TensorSpec([BATCH_SIZE, STEPS, INPUT_SIZE], model.inputs[0].dtype))\n", + "\n", + "# model directory.\n", + "MODEL_DIR = \"keras_lstm\"\n", + "model.save(MODEL_DIR, save_format=\"tf\", signatures=concrete_func)\n", + "\n", + "#Float LSTM model\n", + "converter = tf.lite.TFLiteConverter.from_saved_model(MODEL_DIR)\n", + "tflite_float_model = converter.convert()\n", + "open('/content/keras_lstm/model_float.tflite', \"wb\").write(tflite_float_model)\n" + ], + "execution_count": 143, + "outputs": [ + { + "output_type": "stream", + "text": [ + "2.5.0\n" + ], + "name": "stdout" + }, + { + "output_type": "stream", + "text": [ + "WARNING:absl:Found untraced functions such as lstm_cell_3_layer_call_and_return_conditional_losses, lstm_cell_3_layer_call_fn, lstm_cell_3_layer_call_fn, lstm_cell_3_layer_call_and_return_conditional_losses, lstm_cell_3_layer_call_and_return_conditional_losses while saving (showing 5 of 5). These functions will not be directly callable after loading.\n" + ], + "name": "stderr" + }, + { + "output_type": "stream", + "text": [ + "INFO:tensorflow:Assets written to: keras_lstm/assets\n" + ], + "name": "stdout" + }, + { + "output_type": "stream", + "text": [ + "INFO:tensorflow:Assets written to: keras_lstm/assets\n" + ], + "name": "stderr" + }, + { + "output_type": "execute_result", + "data": { + "text/plain": [ + "483000" + ] + }, + "metadata": { + "tags": [] + }, + "execution_count": 143 + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "6lo_G30v0raq" + }, + "source": [ + "##Validate audio files using TFLite float32 model" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "t807WqXQ0qEJ", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "16c5640e-e982-47ad-b25a-c414bc2e2780" + }, + "source": [ + "# Load quantized TFLite model\n", + "tflite_interpreter_float = tf.lite.Interpreter(model_path='/content/keras_lstm/model_float.tflite')\n", + "# Learn about its input and output details\n", + "input_details = tflite_interpreter_float.get_input_details()\n", + "output_details = tflite_interpreter_float.get_output_details()\n", + "tflite_interpreter_float.allocate_tensors()\n", + "\n", + "import glob\n", + "import pandas as pd\n", + "pd.set_option(\"display.precision\", 2)\n", + "\n", + "txtfiles = []\n", + "for file in glob.glob(\"/content/data/mini_speech_commands/no/*.wav\"):\n", + " txtfiles.append(file)\n", + "\n", + "for i in range(25):\n", + " sample_ds = preprocess_dataset([str(txtfiles[i])])\n", + " print(txtfiles[i])\n", + " # Run inference\n", + " for spectrogram, label in sample_ds.batch(1):\n", + " tflite_interpreter_float.set_tensor(input_details[0]['index'],np.array(spectrogram, dtype=np.float32).reshape(1,49, 257) )\n", + " tflite_interpreter_float.invoke()\n", + " tflite_float_model_predictions = tflite_interpreter_float.get_tensor(output_details[0]['index'])\n", + " # Convert prediction results to Pandas dataframe, for better visualization\n", + " # Increase precision of presented data for better side-by-side comparison\n", + " tflite_pred_dataframe = pd.DataFrame(tflite_float_model_predictions)\n", + " tflite_pred_dataframe.columns = commands\n", + " pd.set_option(\"precision\",1)\n", + " print(tflite_pred_dataframe)\n" + ], + "execution_count": 144, + "outputs": [ + { + "output_type": "stream", + "text": [ + "/content/data/mini_speech_commands/no/5c39594f_nohash_4.wav\n", + " yes no unknown\n", + "0 4.0e-13 1.0 5.8e-09\n", + "/content/data/mini_speech_commands/no/b83c1acf_nohash_2.wav\n", + " yes no unknown\n", + "0 3.1e-08 1.0 7.1e-04\n", + "/content/data/mini_speech_commands/no/8012c69d_nohash_2.wav\n", + " yes no unknown\n", + "0 2.1e-10 1.0 2.4e-10\n", + "/content/data/mini_speech_commands/no/dbb40d24_nohash_0.wav\n", + " yes no unknown\n", + "0 1.1e-06 1.3e-06 1.0\n", + "/content/data/mini_speech_commands/no/f4504600_nohash_1.wav\n", + " yes no unknown\n", + "0 1.9e-16 1.0 2.4e-13\n", + "/content/data/mini_speech_commands/no/c79159aa_nohash_4.wav\n", + " yes no unknown\n", + "0 1.6e-11 1.0 6.5e-11\n", + "/content/data/mini_speech_commands/no/c7aa72e6_nohash_1.wav\n", + " yes no unknown\n", + "0 4.3e-04 0.4 0.6\n", + "/content/data/mini_speech_commands/no/a527cb3c_nohash_0.wav\n", + " yes no unknown\n", + "0 5.2e-12 1.0 1.6e-07\n", + "/content/data/mini_speech_commands/no/26b28ea7_nohash_0.wav\n", + " yes no unknown\n", + "0 5.3e-08 1.0 2.7e-03\n", + "/content/data/mini_speech_commands/no/765ffccb_nohash_3.wav\n", + " yes no unknown\n", + "0 7.8e-10 1.0 2.0e-09\n", + "/content/data/mini_speech_commands/no/735845ab_nohash_2.wav\n", + " yes no unknown\n", + "0 0.2 0.8 1.2e-02\n", + "/content/data/mini_speech_commands/no/38d78313_nohash_3.wav\n", + " yes no unknown\n", + "0 4.6e-13 1.0 7.4e-16\n", + "/content/data/mini_speech_commands/no/89f680f3_nohash_1.wav\n", + " yes no unknown\n", + "0 1.4e-10 0.3 0.7\n", + "/content/data/mini_speech_commands/no/c661be6e_nohash_1.wav\n", + " yes no unknown\n", + "0 2.2e-12 1.0 1.9e-10\n", + "/content/data/mini_speech_commands/no/c33682f0_nohash_0.wav\n", + " yes no unknown\n", + "0 3.7e-08 1.0 5.8e-03\n", + "/content/data/mini_speech_commands/no/2e73212b_nohash_0.wav\n", + " yes no unknown\n", + "0 4.7e-03 1.0 1.6e-05\n", + "/content/data/mini_speech_commands/no/cb802c63_nohash_0.wav\n", + " yes no unknown\n", + "0 1.9e-07 1.0 3.3e-05\n", + "/content/data/mini_speech_commands/no/2903efb3_nohash_0.wav\n", + " yes no unknown\n", + "0 1.3e-04 1.0 9.5e-07\n", + "/content/data/mini_speech_commands/no/1657c9fa_nohash_1.wav\n", + " yes no unknown\n", + "0 3.0e-11 1.0 7.3e-10\n", + "/content/data/mini_speech_commands/no/24befdb3_nohash_4.wav\n", + " yes no unknown\n", + "0 1.2e-06 1.0 3.8e-04\n", + "/content/data/mini_speech_commands/no/bfdb9801_nohash_0.wav\n", + " yes no unknown\n", + "0 1.7e-04 1.0 2.5e-08\n", + "/content/data/mini_speech_commands/no/dc75148d_nohash_0.wav\n", + " yes no unknown\n", + "0 1.6e-07 1.0 1.2e-10\n", + "/content/data/mini_speech_commands/no/07ad9b59_nohash_0.wav\n", + " yes no unknown\n", + "0 2.3e-03 1.0 2.8e-04\n", + "/content/data/mini_speech_commands/no/ac7840d8_nohash_1.wav\n", + " yes no unknown\n", + "0 1.8e-06 1.0 1.4e-05\n", + "/content/data/mini_speech_commands/no/e14a99a5_nohash_0.wav\n", + " yes no unknown\n", + "0 1.3e-06 1.0 2.1e-06\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "VJANb9Uk-Xw-" + }, + "source": [ + "##Generate Quantized int8 model\n" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "nfMYKQgz-W_X", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "28dc6ce9-24ab-44b6-bc79-4e65708b1d2c" + }, + "source": [ + "def representative_dataset_3():\n", + " for spectrogram, _ in spectrogram_ds.take(800):\n", + " # print('test')\n", + " flattened_data = np.array(spectrogram, dtype=np.float32).reshape(1,49, 257)\n", + " yield [flattened_data]\n", + "\n", + "converter.representative_dataset = representative_dataset_3\n", + "converter.optimizations = [tf.lite.Optimize.OPTIMIZE_FOR_SIZE]\n", + "converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8]\n", + "converter.inference_input_type = tf.int8\n", + "converter.inference_output_type = tf.int8\n", + "quantized_tflite_model = converter.convert()\n", + "open('/content/keras_lstm/model_quantized_minispeech.tflite', \"wb\").write(quantized_tflite_model)\n" + ], + "execution_count": 145, + "outputs": [ + { + "output_type": "execute_result", + "data": { + "text/plain": [ + "124768" + ] + }, + "metadata": { + "tags": [] + }, + "execution_count": 145 + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "HX4Ib7o4-kcH" + }, + "source": [ + "##Validate Quantized int8 model" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "K0fLf2sx-jHx", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "710765e4-05e8-4eec-98b2-36a693465370" + }, + "source": [ + "\n", + "# Load quantized TFLite model\n", + "tflite_interpreter_quant = tf.lite.Interpreter(model_path='/content/keras_lstm/model_quantized_minispeech.tflite')\n", + "# Learn about its input and output details\n", + "input_details = tflite_interpreter_quant.get_input_details()\n", + "\n", + "output_details = tflite_interpreter_quant.get_output_details()\n", + "\n", + "tflite_interpreter_quant.allocate_tensors()\n", + "\n", + "import glob\n", + "import pandas as pd\n", + "pd.set_option(\"display.precision\", 2)\n", + "\n", + "txtfiles = []\n", + "for file in glob.glob(\"/content/data/mini_speech_commands/unknown/*.wav\"):\n", + " txtfiles.append(file)\n", + "\n", + "for i in range(25):\n", + " print(txtfiles[i])\n", + " sample_ds = preprocess_dataset([str(txtfiles[i])])\n", + " # Run inference\n", + " for spectrogram, label in sample_ds.batch(1):\n", + " spectrogram_t = np.array(spectrogram, dtype=np.uint8).reshape(1,49, 257)\n", + " spectrogram_t = np.array(spectrogram_t-128, dtype=np.int8)\n", + " tflite_interpreter_quant.set_tensor(input_details[0]['index'],spectrogram_t )\n", + " tflite_interpreter_quant.invoke()\n", + " tflite_q_model_predictions = tflite_interpreter_quant.get_tensor(output_details[0]['index'])\n", + " tflite_pred_dataframe = pd.DataFrame(tflite_q_model_predictions)\n", + " tflite_pred_dataframe.columns = commands\n", + " print(tflite_pred_dataframe)\n", + "\n" + ], + "execution_count": 152, + "outputs": [ + { + "output_type": "stream", + "text": [ + "/content/data/mini_speech_commands/unknown/4c77947d_nohash_0.wav\n", + " yes no unknown\n", + "0 -128 -128 127\n", + "/content/data/mini_speech_commands/unknown/b737ee80_nohash_1.wav\n", + " yes no unknown\n", + "0 -128 -128 127\n", + "/content/data/mini_speech_commands/unknown/feb1d305_nohash_2.wav\n", + " yes no unknown\n", + "0 -128 -128 127\n", + "/content/data/mini_speech_commands/unknown/07ad9b59_nohash_2.wav\n", + " yes no unknown\n", + "0 -128 -128 127\n", + "/content/data/mini_speech_commands/unknown/5c39594f_nohash_4.wav\n", + " yes no unknown\n", + "0 -128 -127 127\n", + "/content/data/mini_speech_commands/unknown/2d92f18b_nohash_0.wav\n", + " yes no unknown\n", + "0 -124 -124 120\n", + "/content/data/mini_speech_commands/unknown/b83c1acf_nohash_2.wav\n", + " yes no unknown\n", + "0 -128 -128 127\n", + "/content/data/mini_speech_commands/unknown/4a1e736b_nohash_4.wav\n", + " yes no unknown\n", + "0 -128 -128 127\n", + "/content/data/mini_speech_commands/unknown/8012c69d_nohash_2.wav\n", + " yes no unknown\n", + "0 -128 127 -128\n", + "/content/data/mini_speech_commands/unknown/71f6fed7_nohash_2.wav\n", + " yes no unknown\n", + "0 -128 -128 127\n", + "/content/data/mini_speech_commands/unknown/ec21c46b_nohash_2.wav\n", + " yes no unknown\n", + "0 -128 -128 127\n", + "/content/data/mini_speech_commands/unknown/2f0a410b_nohash_1.wav\n", + " yes no unknown\n", + "0 -128 -128 127\n", + "/content/data/mini_speech_commands/unknown/f2a90886_nohash_0.wav\n", + " yes no unknown\n", + "0 -128 -128 127\n", + "/content/data/mini_speech_commands/unknown/dbb40d24_nohash_0.wav\n", + " yes no unknown\n", + "0 -126 -1 -1\n", + "/content/data/mini_speech_commands/unknown/b9515bf3_nohash_1.wav\n", + " yes no unknown\n", + "0 -128 -128 127\n", + "/content/data/mini_speech_commands/unknown/15c563d7_nohash_3.wav\n", + " yes no unknown\n", + "0 -128 127 -128\n", + "/content/data/mini_speech_commands/unknown/f4504600_nohash_1.wav\n", + " yes no unknown\n", + "0 -128 -128 127\n", + "/content/data/mini_speech_commands/unknown/da76aa58_nohash_1.wav\n", + " yes no unknown\n", + "0 -128 -128 127\n", + "/content/data/mini_speech_commands/unknown/dedc7fab_nohash_1.wav\n", + " yes no unknown\n", + "0 -128 -128 127\n", + "/content/data/mini_speech_commands/unknown/a527cb3c_nohash_0.wav\n", + " yes no unknown\n", + "0 -128 -128 127\n", + "/content/data/mini_speech_commands/unknown/26b28ea7_nohash_0.wav\n", + " yes no unknown\n", + "0 -128 -128 127\n", + "/content/data/mini_speech_commands/unknown/d84829e0_nohash_0.wav\n", + " yes no unknown\n", + "0 -125 -122 119\n", + "/content/data/mini_speech_commands/unknown/2313e093_nohash_0.wav\n", + " yes no unknown\n", + "0 -128 -128 127\n", + "/content/data/mini_speech_commands/unknown/763188c4_nohash_1.wav\n", + " yes no unknown\n", + "0 -128 -128 127\n", + "/content/data/mini_speech_commands/unknown/7ea032f3_nohash_3.wav\n", + " yes no unknown\n", + "0 -31 -128 30\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "qg8U2AZkSfCH" + }, + "source": [ + "##Evaluate int8 model using floor and inputscale and zeropoint" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "cEwmoArnIcK9", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "887b437e-0a79-4ff6-e422-0bf6d8e0d8aa" + }, + "source": [ + "#test\n", + "# Load quantized TFLite model\n", + "tflite_interpreter_quant_int8 = tf.lite.Interpreter(model_path='/content/keras_lstm/model_quantized_minispeech.tflite')\n", + "# Learn about its input and output details\n", + "input_details = tflite_interpreter_quant_int8.get_input_details()\n", + "\n", + "output_details = tflite_interpreter_quant_int8.get_output_details()\n", + "\n", + "tflite_interpreter_quant_int8.allocate_tensors()\n", + "\n", + "import glob\n", + "import pandas as pd\n", + "pd.set_option(\"display.precision\", 2)\n", + "\n", + "txtfiles = []\n", + "for file in glob.glob(\"/content/data/mini_speech_commands/no/*.wav\"):\n", + " txtfiles.append(file)\n", + "\n", + "\n", + "for i in range(25):\n", + " print(txtfiles[i])\n", + " sample_ds = preprocess_dataset([str(txtfiles[i])])\n", + " # Run inference\n", + " for spectrogram, label in sample_ds.batch(1):\n", + " input_scale, input_zero_point = input_details[0][\"quantization\"]\n", + " print(input_scale)\n", + " print(input_zero_point)\n", + " spectrogram = np.array(spectrogram)\n", + " spectrogram = np.clip(np.floor(spectrogram / input_scale + input_zero_point), -128, 127) # for int8 validation\n", + " q_spectrogram = np.array(spectrogram, dtype=np.int8).reshape(1,49, 257)\n", + " tflite_interpreter_quant_int8.set_tensor(input_details[0]['index'], q_spectrogram )\n", + " tflite_interpreter_quant_int8.invoke()\n", + " tflite_q_model_predictions = tflite_interpreter_quant_int8.get_tensor(output_details[0]['index'])\n", + " print(tflite_q_model_predictions)\n", + " output_scale, output_zero_point = output_details[0][\"quantization\"]\n", + " tflite_model_predictions = (np.array(tflite_q_model_predictions, dtype=np.float32) - output_zero_point) * output_scale\n", + " tflite_pred_dataframe = pd.DataFrame(tflite_model_predictions)\n", + " tflite_pred_dataframe.columns = commands\n", + " print(tflite_pred_dataframe)" + ], + "execution_count": 147, + "outputs": [ + { + "output_type": "stream", + "text": [ + "/content/data/mini_speech_commands/no/5c39594f_nohash_4.wav\n", + "0.7175403237342834\n", + "-128\n", + "[[-128 127 -128]]\n", + " yes no unknown\n", + "0 0.0 1.0 0.0\n", + "/content/data/mini_speech_commands/no/b83c1acf_nohash_2.wav\n", + "0.7175403237342834\n", + "-128\n", + "[[-128 125 -125]]\n", + " yes no unknown\n", + "0 0.0 0.99 0.01\n", + "/content/data/mini_speech_commands/no/8012c69d_nohash_2.wav\n", + "0.7175403237342834\n", + "-128\n", + "[[-128 127 -128]]\n", + " yes no unknown\n", + "0 0.0 1.0 0.0\n", + "/content/data/mini_speech_commands/no/dbb40d24_nohash_0.wav\n", + "0.7175403237342834\n", + "-128\n", + "[[-120 -128 120]]\n", + " yes no unknown\n", + "0 0.03 0.0 0.97\n", + "/content/data/mini_speech_commands/no/f4504600_nohash_1.wav\n", + "0.7175403237342834\n", + "-128\n", + "[[-128 127 -128]]\n", + " yes no unknown\n", + "0 0.0 1.0 0.0\n", + "/content/data/mini_speech_commands/no/c79159aa_nohash_4.wav\n", + "0.7175403237342834\n", + "-128\n", + "[[-128 127 -128]]\n", + " yes no unknown\n", + "0 0.0 1.0 0.0\n", + "/content/data/mini_speech_commands/no/c7aa72e6_nohash_1.wav\n", + "0.7175403237342834\n", + "-128\n", + "[[-128 127 -127]]\n", + " yes no unknown\n", + "0 0.0 1.0 3.91e-03\n", + "/content/data/mini_speech_commands/no/a527cb3c_nohash_0.wav\n", + "0.7175403237342834\n", + "-128\n", + "[[-128 127 -128]]\n", + " yes no unknown\n", + "0 0.0 1.0 0.0\n", + "/content/data/mini_speech_commands/no/26b28ea7_nohash_0.wav\n", + "0.7175403237342834\n", + "-128\n", + "[[-128 118 -118]]\n", + " yes no unknown\n", + "0 0.0 0.96 0.04\n", + "/content/data/mini_speech_commands/no/765ffccb_nohash_3.wav\n", + "0.7175403237342834\n", + "-128\n", + "[[-128 127 -128]]\n", + " yes no unknown\n", + "0 0.0 1.0 0.0\n", + "/content/data/mini_speech_commands/no/735845ab_nohash_2.wav\n", + "0.7175403237342834\n", + "-128\n", + "[[ 126 -128 -126]]\n", + " yes no unknown\n", + "0 0.99 0.0 7.81e-03\n", + "/content/data/mini_speech_commands/no/38d78313_nohash_3.wav\n", + "0.7175403237342834\n", + "-128\n", + "[[-128 127 -128]]\n", + " yes no unknown\n", + "0 0.0 1.0 0.0\n", + "/content/data/mini_speech_commands/no/89f680f3_nohash_1.wav\n", + "0.7175403237342834\n", + "-128\n", + "[[-128 31 -31]]\n", + " yes no unknown\n", + "0 0.0 0.62 0.38\n", + "/content/data/mini_speech_commands/no/c661be6e_nohash_1.wav\n", + "0.7175403237342834\n", + "-128\n", + "[[-128 127 -128]]\n", + " yes no unknown\n", + "0 0.0 1.0 0.0\n", + "/content/data/mini_speech_commands/no/c33682f0_nohash_0.wav\n", + "0.7175403237342834\n", + "-128\n", + "[[-128 -107 107]]\n", + " yes no unknown\n", + "0 0.0 0.08 0.92\n", + "/content/data/mini_speech_commands/no/2e73212b_nohash_0.wav\n", + "0.7175403237342834\n", + "-128\n", + "[[-127 -128 127]]\n", + " yes no unknown\n", + "0 3.91e-03 0.0 1.0\n", + "/content/data/mini_speech_commands/no/cb802c63_nohash_0.wav\n", + "0.7175403237342834\n", + "-128\n", + "[[-128 127 -127]]\n", + " yes no unknown\n", + "0 0.0 1.0 3.91e-03\n", + "/content/data/mini_speech_commands/no/2903efb3_nohash_0.wav\n", + "0.7175403237342834\n", + "-128\n", + "[[-128 127 -128]]\n", + " yes no unknown\n", + "0 0.0 1.0 0.0\n", + "/content/data/mini_speech_commands/no/1657c9fa_nohash_1.wav\n", + "0.7175403237342834\n", + "-128\n", + "[[-128 127 -128]]\n", + " yes no unknown\n", + "0 0.0 1.0 0.0\n", + "/content/data/mini_speech_commands/no/24befdb3_nohash_4.wav\n", + "0.7175403237342834\n", + "-128\n", + "[[-128 127 -128]]\n", + " yes no unknown\n", + "0 0.0 1.0 0.0\n", + "/content/data/mini_speech_commands/no/bfdb9801_nohash_0.wav\n", + "0.7175403237342834\n", + "-128\n", + "[[-128 127 -128]]\n", + " yes no unknown\n", + "0 0.0 1.0 0.0\n", + "/content/data/mini_speech_commands/no/dc75148d_nohash_0.wav\n", + "0.7175403237342834\n", + "-128\n", + "[[-115 115 -128]]\n", + " yes no unknown\n", + "0 0.05 0.95 0.0\n", + "/content/data/mini_speech_commands/no/07ad9b59_nohash_0.wav\n", + "0.7175403237342834\n", + "-128\n", + "[[-128 127 -128]]\n", + " yes no unknown\n", + "0 0.0 1.0 0.0\n", + "/content/data/mini_speech_commands/no/ac7840d8_nohash_1.wav\n", + "0.7175403237342834\n", + "-128\n", + "[[-128 127 -128]]\n", + " yes no unknown\n", + "0 0.0 1.0 0.0\n", + "/content/data/mini_speech_commands/no/e14a99a5_nohash_0.wav\n", + "0.7175403237342834\n", + "-128\n", + "[[ 118 -122 -124]]\n", + " yes no unknown\n", + "0 0.96 0.02 0.02\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "tUToqbr-OqKp" + }, + "source": [ + "##Generate a TensorFlow Lite for MicroControllers Model\n", + "Convert the TensorFlow Lite model into a C source file that can be loaded by TensorFlow Lite for Microcontrollers." + ] + }, + { + "cell_type": "code", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "_y90gcAtOs0-", + "outputId": "a422f82c-7a13-4d1e-8642-e8f7d63a114d" + }, + "source": [ + "# Install xxd if it is not available\n", + "!apt-get update && apt-get -qq install xxd\n", + "# Convert to a C source file\n", + "!xxd -i /content/keras_lstm/model_quantized_minispeech.tflite > /content/keras_lstm/model.cc\n" + ], + "execution_count": 148, + "outputs": [ + { + "output_type": "stream", + "text": [ + "\r0% [Working]\r \rHit:1 https://cloud.r-project.org/bin/linux/ubuntu bionic-cran40/ InRelease\n", + "\r0% [Connecting to archive.ubuntu.com (91.189.88.142)] [Waiting for headers] [Co\r \rIgn:2 https://developer.download.nvidia.com/compute/cuda/repos/ubuntu1804/x86_64 InRelease\n", + "\r0% [Connecting to archive.ubuntu.com (91.189.88.142)] [Waiting for headers] [Co\r0% [1 InRelease gpgv 3,626 B] [Connecting to archive.ubuntu.com (91.189.88.142)\r \rIgn:3 https://developer.download.nvidia.com/compute/machine-learning/repos/ubuntu1804/x86_64 InRelease\n", + "Hit:4 https://developer.download.nvidia.com/compute/cuda/repos/ubuntu1804/x86_64 Release\n", + "Get:5 http://security.ubuntu.com/ubuntu bionic-security InRelease [88.7 kB]\n", + "Hit:6 https://developer.download.nvidia.com/compute/machine-learning/repos/ubuntu1804/x86_64 Release\n", + "Hit:7 http://ppa.launchpad.net/c2d4u.team/c2d4u4.0+/ubuntu bionic InRelease\n", + "Hit:8 http://archive.ubuntu.com/ubuntu bionic InRelease\n", + "Get:10 http://archive.ubuntu.com/ubuntu bionic-updates InRelease [88.7 kB]\n", + "Hit:12 http://ppa.launchpad.net/cran/libgit2/ubuntu bionic InRelease\n", + "Hit:13 http://ppa.launchpad.net/deadsnakes/ppa/ubuntu bionic InRelease\n", + "Get:14 http://archive.ubuntu.com/ubuntu bionic-backports InRelease [74.6 kB]\n", + "Hit:15 http://ppa.launchpad.net/graphics-drivers/ppa/ubuntu bionic InRelease\n", + "Fetched 252 kB in 2s (142 kB/s)\n", + "Reading package lists... Done\n" + ], + "name": "stdout" + } + ] + } + ] +} diff --git a/third_party/xtensa/examples/micro_speech_lstm/yes_micro_features_data.cc b/third_party/xtensa/examples/micro_speech_lstm/yes_micro_features_data.cc new file mode 100644 index 0000000..2259b65 --- /dev/null +++ b/third_party/xtensa/examples/micro_speech_lstm/yes_micro_features_data.cc @@ -0,0 +1,1097 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +/* + * * Copyright (c) 2021 Cadence Design Systems Inc. + * * + * * Permission is hereby granted, free of charge, to any person obtaining + * * a copy of this software and associated documentation files (the + * * "Software"), to deal in the Software without restriction, including + * * without limitation the rights to use, copy, modify, merge, publish, + * * distribute, sublicense, and/or sell copies of the Software, and to + * * permit persons to whom the Software is furnished to do so, subject to + * * the following conditions: + * * + * * The above copyright notice and this permission notice shall be included + * * in all copies or substantial portions of the Software. + * * + * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + * * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + * * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. + * * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY + * * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, + * * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE + * * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + * */ + +#include "third_party/xtensa/examples/micro_speech_lstm/yes_micro_features_data.h" + +// Golden test values for the expected spectrogram from a "yes" sample file +// speech_commands_test_set_v0.02/yes/f2e59fea_nohash_1.wav. + +const int g_yes_micro_f2e59fea_nohash_1_width = 257; +const int g_yes_micro_f2e59fea_nohash_1_height = 49; +// yes_10_00f0204f_nohash_0_wav.txt +const signed char g_yes_micro_f2e59fea_nohash_1_data[] = { + 127, 125, 125, 112, 119, 100, 124, 59, 66, 44, 33, 11, + 6, -19, -37, -56, -76, -91, -106, -107, -96, -82, -67, -53, + -42, -30, -20, -11, -4, 0, 4, 5, 5, 4, 0, -3, + -10, -17, -25, -34, -43, -54, -64, -76, -86, -98, -108, -117, + -119, -112, -105, -97, -90, -84, -79, -75, -72, -70, -68, -68, + -68, -69, -69, -71, -72, -73, -72, -70, -68, -63, -59, -53, + -48, -42, -36, -30, -24, -21, -16, -13, -11, -9, -9, -10, + -11, -13, -17, -23, -30, -38, -43, -52, -60, -69, -79, -89, + -99, -108, -116, -117, -110, -102, -93, -85, -77, -69, -63, -56, + -51, -47, -43, -42, -39, -39, -39, -40, -41, -43, -46, -50, + -54, -59, -64, -69, -74, -80, -85, -91, -96, -101, -106, -109, + -113, -115, -117, -117, -117, -116, -115, -113, -112, -112, -111, -112, + -112, -113, -113, -114, -116, -117, -118, -119, -119, -118, -118, -116, + -115, -113, -111, -109, -107, -105, -103, -101, -100, -98, -96, -94, + -92, -91, -90, -89, -89, -89, -88, -88, -88, -88, -88, -89, + -89, -91, -92, -93, -93, -95, -95, -95, -94, -94, -93, -93, + -92, -91, -89, -88, -87, -86, -85, -85, -83, -84, -84, -84, + -84, -84, -85, -85, -85, -86, -86, -86, -86, -85, -85, -84, + -83, -82, -80, -78, -77, -76, -76, -76, -76, -77, -78, -80, + -83, -86, -90, -94, -99, -105, -109, -114, -115, -112, -107, -101, + -92, -84, -75, -67, -59, -51, -44, -37, -30, -24, -18, -14, + -10, -7, -4, -2, -2, -116, -119, -98, -92, -43, -68, 127, + -111, -114, -113, -122, -108, -32, -108, -112, -116, -112, -108, -104, + -125, -127, -125, -125, -121, -126, -124, -124, -126, -125, -125, -125, + -125, -123, -125, -125, -127, -125, -126, -127, -125, -127, -127, -127, + -126, -126, -125, -126, -126, -126, -127, -126, -126, -126, -126, -126, + -127, -127, -127, -127, -126, -127, -126, -125, -126, -126, -127, -126, + -126, -126, -127, -126, -127, -126, -126, -127, -127, -126, -125, -126, + -126, -126, -125, -123, -125, -126, -125, -125, -124, -118, -124, -125, + -126, -126, -126, -126, -126, -126, -127, -127, -125, -126, -127, -126, + -127, -127, -127, -127, -127, -127, -127, -126, -127, -127, -127, -126, + -126, -127, -126, -127, -127, -127, -126, -127, -126, -126, -127, -127, + -127, -127, -126, -127, -127, -127, -126, -127, -127, -126, -127, -127, + -126, -127, -125, -126, -127, -126, -127, -127, -127, -126, -127, -126, + -127, -127, -127, -127, -126, -126, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -126, -127, -127, + -127, -127, -127, -127, -127, -126, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -125, -127, -127, -127, -126, + -127, -125, -126, -125, -127, -125, -127, -125, -127, -127, -127, -126, + -126, -126, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -93, -93, + -115, -97, -4, -51, 127, -79, -36, 11, -67, -112, -16, -99, + -116, -110, -121, -119, -89, -112, -113, -113, -121, -119, -119, -125, + -122, -126, -125, -125, -126, -126, -124, -126, -124, -124, -125, -124, + -123, -122, -124, -125, -125, -125, -126, -127, -127, -126, -125, -126, + -124, -126, -125, -125, -126, -125, -126, -125, -126, -126, -125, -127, + -126, -127, -126, -127, -126, -126, -125, -126, -125, -126, -125, -124, + -125, -125, -127, -125, -124, -127, -125, -125, -125, -125, -125, -125, + -123, -115, -120, -118, -125, -125, -123, -124, -126, -125, -126, -127, + -125, -125, -126, -127, -126, -126, -126, -127, -127, -125, -127, -127, + -127, -127, -127, -127, -126, -127, -127, -126, -125, -126, -126, -126, + -126, -127, -127, -126, -127, -126, -125, -126, -127, -127, -127, -127, + -126, -127, -126, -127, -126, -127, -127, -126, -126, -127, -127, -127, + -127, -127, -126, -127, -127, -126, -126, -126, -127, -127, -127, -127, + -127, -127, -127, -127, -126, -126, -127, -127, -127, -126, -127, -127, + -126, -126, -127, -127, -127, -127, -127, -127, -126, -127, -127, -126, + -126, -127, -127, -126, -127, -127, -126, -126, -126, -127, -126, -127, + -126, -126, -126, -125, -126, -126, -123, -123, -123, -124, -125, -126, + -124, -126, -124, -126, -126, -125, -127, -127, -126, -127, -127, -127, + -127, -126, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -126, -122, -114, -105, -100, -110, 127, -84, -117, + -47, -55, -80, 0, -98, -112, -109, -110, -107, -115, -121, -119, + -125, -126, -126, -120, -124, -126, -123, -123, -123, -126, -124, -124, + -124, -123, -124, -125, -125, -124, -126, -126, -127, -126, -126, -126, + -126, -125, -125, -125, -124, -125, -126, -125, -125, -127, -126, -127, + -126, -127, -126, -125, -126, -126, -125, -126, -125, -125, -125, -127, + -126, -126, -126, -126, -126, -126, -127, -126, -125, -122, -125, -126, + -125, -124, -123, -119, -123, -120, -118, -123, -120, -117, -124, -123, + -124, -125, -126, -125, -125, -125, -125, -127, -127, -127, -126, -127, + -126, -127, -127, -126, -127, -126, -126, -126, -127, -127, -126, -126, + -126, -126, -127, -126, -126, -127, -127, -127, -126, -126, -127, -127, + -126, -126, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -126, -126, -127, -125, -126, -126, -126, -126, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -126, -126, -127, -126, -127, -127, -126, + -127, -127, -127, -127, -127, -127, -127, -127, -126, -127, -126, -127, + -126, -127, -127, -127, -127, -127, -127, -127, -127, -126, -126, -127, + -126, -127, -127, -127, -127, -125, -126, -127, -126, -126, -126, -125, + -126, -125, -126, -127, -124, -125, -127, -127, -126, -127, -127, -126, + -126, -127, -127, -127, -127, -126, -127, -127, -127, -127, -126, -126, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -105, -109, -114, -107, + -103, -120, 127, -61, -81, -94, -82, -78, 50, -1, -73, -98, + -102, -110, -104, -119, -121, -123, -126, -125, -116, -116, -121, -122, + -124, -125, -123, -121, -124, -125, -125, -125, -123, -127, -126, -125, + -125, -124, -124, -126, -126, -125, -125, -125, -125, -126, -125, -126, + -125, -126, -126, -126, -125, -125, -125, -126, -126, -126, -126, -127, + -127, -127, -127, -124, -127, -127, -126, -125, -127, -121, -125, -126, + -127, -127, -125, -116, -120, -119, -124, -119, -121, -87, -109, -115, + -126, -124, -124, -112, -122, -122, -123, -127, -123, -120, -123, -125, + -127, -127, -126, -124, -124, -124, -127, -126, -126, -124, -124, -124, + -125, -125, -126, -122, -123, -125, -126, -126, -125, -126, -126, -126, + -127, -126, -127, -127, -125, -126, -127, -126, -127, -127, -126, -127, + -127, -127, -126, -127, -125, -126, -126, -126, -126, -126, -124, -126, + -126, -127, -127, -127, -126, -127, -126, -127, -127, -127, -126, -126, + -126, -127, -127, -127, -126, -127, -126, -126, -127, -127, -127, -126, + -127, -127, -127, -127, -127, -126, -126, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -126, -127, -126, -127, -125, -127, -126, + -127, -126, -126, -125, -127, -124, -124, -126, -126, -127, -126, -126, + -127, -125, -126, -127, -126, -125, -127, -126, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -119, -113, -118, -118, -112, -99, 127, -37, -74, -83, -80, + -72, 4, 106, -114, -103, -113, -112, -99, -65, -103, -113, -118, + -115, -111, -94, -114, -120, -123, -123, -124, -118, -126, -127, -126, + -126, -126, -121, -123, -127, -127, -127, -127, -125, -124, -126, -126, + -127, -127, -126, -126, -127, -127, -127, -127, -127, -125, -127, -126, + -127, -127, -126, -125, -126, -127, -126, -127, -126, -121, -123, -126, + -127, -125, -126, -115, -116, -120, -119, -121, -111, -89, -86, -98, + -105, -113, -94, -93, -98, -109, -119, -119, -123, -120, -101, -105, + -120, -122, -124, -125, -111, -117, -124, -126, -127, -125, -118, -118, + -126, -126, -127, -124, -119, -124, -126, -126, -123, -124, -124, -126, + -126, -127, -127, -126, -127, -127, -126, -127, -126, -126, -126, -126, + -126, -126, -127, -127, -127, -125, -126, -126, -126, -126, -124, -126, + -125, -126, -125, -126, -126, -126, -127, -127, -127, -127, -127, -127, + -127, -126, -127, -127, -127, -127, -127, -125, -127, -126, -127, -127, + -126, -126, -126, -127, -127, -127, -127, -125, -126, -126, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -126, -127, -127, -127, -127, + -127, -126, -125, -126, -127, -127, -125, -127, -123, -125, -127, -125, + -124, -126, -123, -126, -127, -125, -126, -126, -126, -126, -127, -127, + -127, -126, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -126, -124, -119, -113, -73, -81, + 122, -30, -83, -85, -101, -76, -21, 24, -85, -95, -114, -93, + -74, 127, -1, -80, -90, -95, -86, 0, -88, -111, -116, -119, + -120, -109, -117, -120, -120, -120, -119, -114, -119, -125, -126, -126, + -126, -125, -119, -125, -126, -125, -125, -126, -123, -125, -127, -124, + -125, -125, -124, -124, -125, -125, -126, -124, -124, -122, -126, -124, + -124, -122, -126, -105, -119, -114, -118, -117, -109, -63, -106, -108, + -115, -107, -103, -22, -113, -113, -108, -122, -114, -102, -95, -112, + -119, -113, -113, -123, -77, -113, -124, -121, -123, -118, -105, -121, + -125, -122, -125, -125, -109, -120, -125, -124, -125, -123, -124, -125, + -126, -127, -126, -126, -127, -124, -126, -126, -127, -126, -126, -126, + -126, -127, -127, -127, -126, -126, -126, -126, -126, -126, -125, -126, + -124, -126, -126, -127, -125, -125, -123, -124, -125, -126, -126, -125, + -126, -126, -127, -127, -126, -127, -126, -125, -125, -127, -126, -125, + -127, -125, -127, -125, -127, -125, -126, -123, -125, -125, -127, -127, + -126, -124, -127, -126, -125, -127, -127, -126, -126, -127, -127, -126, + -127, -126, -126, -126, -127, -127, -127, -126, -124, -126, -126, -126, + -127, -125, -124, -126, -123, -125, -124, -125, -127, -122, -124, -127, + -127, -126, -127, -126, -126, -126, -127, -127, -127, -126, -126, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -121, + -118, -124, -114, -113, -88, 59, -21, -79, -94, -94, -96, -70, + -25, -108, -121, -112, -99, -74, 126, 1, -54, -85, -59, -52, + 59, 127, -38, -76, -96, -97, -100, -86, -121, -123, -121, -123, + -124, -101, -115, -121, -121, -120, -120, -115, -123, -126, -124, -125, + -125, -125, -114, -124, -125, -124, -125, -125, -120, -125, -124, -126, + -123, -123, -119, -118, -114, -114, -115, -111, -96, -91, -104, -102, + -108, -87, -71, -48, -115, -112, -116, -120, -101, -70, -111, -120, + -117, -117, -126, -104, -86, -108, -118, -119, -121, -116, -75, -110, + -118, -118, -127, -118, -106, -118, -120, -121, -123, -123, -120, -117, + -124, -124, -126, -127, -125, -124, -123, -126, -126, -126, -126, -126, + -126, -127, -127, -127, -127, -127, -126, -127, -127, -126, -127, -127, + -127, -127, -126, -127, -125, -126, -126, -126, -125, -127, -125, -125, + -126, -127, -125, -127, -126, -126, -127, -126, -126, -126, -126, -126, + -127, -125, -125, -125, -126, -127, -125, -125, -124, -126, -125, -125, + -127, -125, -125, -127, -127, -126, -126, -127, -127, -126, -126, -126, + -126, -125, -126, -126, -126, -125, -125, -125, -126, -127, -125, -126, + -125, -124, -127, -127, -127, -123, -124, -125, -125, -127, -125, -125, + -124, -125, -124, -126, -123, -123, -124, -123, -126, -125, -127, -125, + -125, -126, -126, -127, -126, -126, -127, -127, -127, -127, -127, -126, + -128, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -125, -123, -120, -116, -106, -47, -61, + -103, -111, -119, -118, -116, -69, -123, -124, -125, -121, -112, -55, + -56, -99, -104, -112, -113, -105, 127, -62, -112, -105, -110, -112, + -77, -101, -116, -121, -124, -122, -116, -109, -122, -123, -125, -126, + -126, -116, -125, -125, -124, -124, -126, -121, -121, -123, -123, -126, + -126, -127, -121, -124, -122, -124, -126, -121, -116, -108, -116, -121, + -121, -123, -109, -89, -116, -114, -126, -126, -117, -111, -111, -121, + -121, -122, -122, -122, -112, -126, -124, -126, -123, -121, -106, -118, + -123, -117, -119, -117, -113, -104, -122, -120, -125, -126, -125, -117, + -125, -126, -125, -125, -126, -125, -124, -127, -127, -127, -127, -126, + -125, -127, -127, -127, -127, -127, -127, -126, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -126, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -126, -126, -128, + -126, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -128, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -126, -127, -127, -127, + -126, -127, -126, -127, -126, -126, -126, -126, -127, -124, -126, -127, + -124, -125, -126, -126, -127, -126, -126, -126, -126, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -128, -128, -128, -127, -122, -120, + -119, -99, -98, -32, -34, -94, -110, -115, -117, -114, -59, -121, + -117, -115, -112, -108, -47, -44, -85, -99, -111, -114, -96, 127, + -105, -90, -95, -103, -89, -29, -30, -100, -118, -127, -122, -123, + -82, -124, -117, -119, -117, -119, -109, -118, -121, -125, -125, -122, + -123, -111, -127, -119, -123, -123, -123, -112, -117, -116, -121, -123, + -114, -107, -101, -118, -115, -118, -121, -110, -101, -120, -118, -118, + -124, -119, -120, -112, -124, -122, -121, -123, -123, -111, -118, -121, + -121, -123, -123, -119, -93, -123, -121, -117, -118, -116, -96, -112, + -118, -123, -124, -126, -124, -117, -126, -125, -125, -126, -126, -123, + -126, -126, -127, -127, -128, -126, -126, -127, -126, -126, -127, -127, + -127, -126, -126, -127, -126, -127, -127, -126, -126, -127, -127, -127, + -127, -126, -127, -127, -127, -127, -127, -127, -125, -126, -127, -126, + -127, -126, -126, -126, -127, -127, -127, -127, -127, -125, -127, -127, + -127, -127, -126, -126, -126, -127, -127, -127, -127, -127, -125, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -126, -127, + -126, -127, -126, -127, -126, -127, -126, -127, -125, -127, -126, -127, + -126, -126, -126, -125, -126, -125, -127, -124, -127, -124, -127, -127, + -125, -126, -126, -127, -127, -127, -127, -127, -127, -127, -126, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -125, -121, -118, -109, -118, -85, -9, 14, -88, -111, + -118, -124, -127, -37, -102, -106, -107, -104, -94, -66, 13, -96, + -120, -122, -115, -86, 127, -8, -62, -75, -82, -80, -62, 87, + -114, -117, -120, -119, -109, -76, -85, -105, -119, -117, -117, -122, + -98, -122, -124, -124, -124, -122, -110, -107, -124, -123, -117, -126, + -125, -100, -120, -118, -123, -122, -119, -94, -95, -122, -117, -125, + -123, -118, -91, -118, -119, -123, -125, -124, -119, -104, -123, -122, + -126, -124, -123, -107, -115, -116, -127, -120, -114, -114, -81, -118, + -114, -123, -113, -108, -102, -100, -117, -124, -124, -126, -121, -115, + -121, -125, -126, -126, -125, -125, -124, -126, -127, -127, -126, -126, + -124, -127, -126, -127, -126, -126, -127, -127, -127, -127, -127, -127, + -127, -126, -127, -127, -127, -127, -127, -127, -126, -126, -127, -127, + -127, -125, -124, -126, -125, -127, -127, -126, -127, -125, -127, -126, + -127, -126, -125, -125, -126, -126, -127, -127, -126, -126, -126, -126, + -126, -127, -126, -126, -126, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -126, -127, -127, -127, -126, -126, + -126, -126, -127, -127, -126, -126, -127, -127, -125, -126, -127, -127, + -125, -126, -126, -124, -126, -126, -127, -126, -124, -124, -126, -127, + -126, -125, -125, -125, -126, -126, -127, -125, -126, -126, -127, -126, + -127, -127, -126, -127, -127, -127, -127, -127, -127, -126, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -128, -123, -122, -118, -111, -84, + -88, -6, 17, -85, -102, -114, -120, -127, -35, -108, -115, -111, + -109, -89, -49, 0, -94, -104, -118, -118, -96, 127, -41, -74, + -68, -71, -54, -18, 65, -88, -121, -119, -122, -116, -64, -101, + -111, -120, -119, -117, -116, -93, -121, -122, -125, -127, -121, -106, + -115, -125, -121, -122, -121, -109, -97, -107, -109, -118, -119, -118, + -102, -109, -112, -124, -120, -120, -119, -91, -116, -125, -123, -120, + -119, -113, -112, -116, -120, -118, -123, -121, -100, -120, -121, -122, + -117, -111, -103, -88, -110, -119, -117, -118, -110, -87, -113, -118, + -122, -123, -121, -117, -114, -122, -127, -127, -125, -126, -124, -126, + -125, -126, -126, -126, -127, -125, -126, -127, -127, -127, -127, -127, + -127, -126, -126, -127, -126, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -126, -126, -127, -126, -126, + -125, -126, -126, -127, -126, -127, -127, -127, -125, -126, -126, -127, + -126, -126, -125, -125, -126, -125, -127, -126, -126, -126, -126, -127, + -127, -127, -126, -127, -126, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -126, -126, -127, -127, -126, -127, -126, -127, + -126, -127, -127, -127, -127, -126, -125, -125, -127, -127, -127, -127, + -126, -124, -126, -125, -126, -127, -126, -126, -126, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -122, -122, -122, -113, -107, -84, 46, -10, -81, -98, -102, -101, + -87, -37, -110, -121, -124, -122, -108, 35, -94, -100, -99, -85, + -66, 51, 68, -32, -77, -86, -71, -60, 127, -81, -111, -118, + -109, -106, -58, -106, -106, -112, -117, -120, -116, -98, -117, -120, + -126, -125, -120, -99, -120, -120, -126, -124, -118, -99, -101, -125, + -124, -110, -117, -112, -104, -110, -124, -126, -121, -113, -94, -107, + -112, -117, -118, -116, -115, -111, -113, -120, -126, -123, -114, -101, + -113, -122, -122, -122, -114, -99, -106, -106, -117, -120, -112, -97, + -85, -97, -109, -119, -120, -101, -102, -111, -123, -120, -122, -119, + -121, -124, -123, -125, -127, -126, -126, -126, -126, -128, -127, -127, + -127, -126, -127, -127, -128, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -126, -125, -126, -127, -127, -126, -125, -126, -126, + -127, -126, -126, -125, -126, -125, -126, -126, -126, -123, -127, -124, + -126, -127, -125, -124, -126, -126, -127, -127, -126, -126, -126, -127, + -127, -126, -127, -127, -127, -127, -127, -126, -126, -127, -126, -127, + -127, -126, -127, -127, -127, -126, -127, -126, -126, -127, -127, -125, + -126, -127, -127, -125, -126, -125, -126, -127, -127, -124, -126, -126, + -126, -125, -126, -127, -127, -127, -126, -127, -126, -127, -127, -126, + -126, -127, -127, -127, -127, -127, -126, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -124, -120, -118, -118, -127, -106, 121, + -56, -95, -99, -106, -102, -26, -76, -100, -102, -99, -74, 38, + 10, -51, -76, -81, -47, 69, 107, 2, -51, -66, -35, 34, + 127, 0, -72, -106, -110, -90, -46, -81, -116, -116, -121, -96, + -87, -104, -121, -126, -127, -104, -89, -117, -117, -124, -116, -91, + -88, -108, -113, -123, -109, -91, -115, -106, -105, -119, -119, -122, + -116, -108, -107, -119, -115, -123, -108, -111, -110, -119, -122, -119, + -112, -115, -110, -113, -119, -121, -115, -119, -109, -112, -118, -114, + -102, -120, -107, -99, -113, -102, -101, -114, -111, -103, -107, -105, + -112, -116, -111, -118, -122, -121, -124, -123, -124, -124, -126, -126, + -126, -127, -126, -125, -127, -127, -126, -127, -126, -127, -127, -127, + -126, -127, -127, -126, -127, -127, -126, -126, -127, -126, -127, -127, + -126, -127, -127, -126, -127, -126, -126, -127, -127, -125, -127, -124, + -125, -126, -127, -127, -126, -126, -126, -126, -125, -125, -126, -126, + -125, -122, -125, -125, -125, -127, -124, -124, -124, -126, -126, -127, + -127, -124, -125, -127, -127, -127, -126, -126, -126, -127, -127, -126, + -127, -127, -127, -127, -126, -127, -125, -126, -126, -127, -125, -126, + -127, -125, -126, -126, -126, -126, -127, -126, -126, -126, -124, -126, + -126, -125, -126, -125, -126, -125, -126, -126, -126, -126, -127, -126, + -127, -127, -127, -126, -127, -127, -127, -127, -127, -127, -126, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -128, -123, -109, + -126, -121, -90, -104, 127, -90, -123, -81, -88, -84, -9, -83, + -90, -109, -121, -62, 124, -41, -55, -76, -98, 6, 126, 33, + 31, -50, -54, 88, 121, -26, -33, -83, -86, -84, -48, -84, + -71, -109, -112, -97, -105, -116, -100, -114, -109, -100, -87, -103, + -100, -113, -97, -115, -64, -87, -68, -111, -79, -97, -94, -109, + -101, -119, -115, -113, -113, -125, -109, -110, -124, -110, -108, -111, + -118, -124, -119, -121, -117, -108, -112, -121, -125, -115, -117, -119, + -119, -120, -123, -113, -122, -117, -117, -121, -118, -107, -117, -111, + -106, -123, -124, -111, -121, -106, -111, -117, -118, -121, -124, -125, + -121, -122, -125, -124, -123, -124, -125, -125, -125, -124, -124, -124, + -124, -124, -124, -125, -124, -126, -124, -125, -125, -124, -125, -125, + -125, -125, -124, -125, -124, -125, -125, -123, -124, -125, -122, -126, + -127, -123, -124, -125, -125, -122, -124, -125, -125, -127, -126, -123, + -123, -125, -125, -127, -123, -126, -122, -124, -123, -124, -121, -125, + -123, -124, -123, -123, -124, -123, -125, -126, -126, -122, -122, -124, + -126, -126, -126, -126, -126, -126, -127, -126, -126, -126, -126, -126, + -124, -124, -127, -126, -126, -126, -126, -125, -124, -126, -125, -124, + -124, -124, -125, -126, -125, -126, -126, -125, -125, -125, -124, -124, + -125, -126, -125, -125, -127, -126, -126, -126, -126, -126, -125, -126, + -126, -125, -126, -126, -125, -125, -126, -126, -126, -126, -126, -126, + -126, -126, -125, -126, -126, -126, -126, -126, -126, -126, -126, -126, + -126, -126, -126, -126, -112, -114, -64, -87, -49, 23, 14, -24, + -44, -2, -88, -101, -46, -116, -80, -84, -20, -40, -49, -1, + -9, -99, 29, -101, 42, 127, -90, -5, -114, -29, -27, -83, + -79, -85, -109, -109, -120, -84, -107, -116, -103, -115, -109, -110, + -113, -106, -107, -102, -121, -109, -85, -111, -100, -53, -120, -99, + -80, -91, -109, -120, -107, -122, -117, -110, -124, -125, -117, -125, + -123, -117, -108, -119, -123, -125, -119, -119, -117, -122, -119, -120, + -121, -124, -119, -120, -123, -122, -127, -124, -122, -123, -123, -119, + -123, -120, -126, -120, -113, -117, -122, -122, -125, -113, -117, -126, + -117, -126, -125, -127, -124, -125, -126, -125, -126, -126, -126, -127, + -126, -127, -127, -126, -127, -127, -127, -127, -126, -126, -127, -125, + -126, -126, -126, -127, -127, -127, -127, -127, -126, -126, -125, -125, + -126, -126, -125, -127, -126, -127, -125, -126, -124, -125, -126, -127, + -126, -126, -125, -123, -125, -126, -126, -125, -126, -125, -122, -125, + -127, -124, -127, -124, -124, -126, -124, -126, -123, -125, -125, -125, + -126, -126, -122, -124, -123, -123, -126, -126, -125, -127, -127, -126, + -127, -127, -127, -127, -126, -126, -126, -126, -126, -127, -127, -127, + -127, -127, -125, -126, -126, -126, -126, -125, -125, -127, -127, -126, + -127, -126, -126, -127, -126, -126, -127, -127, -126, -126, -127, -125, + -127, -126, -126, -127, -127, -127, -127, -128, -127, -127, -127, -127, + -127, -127, -127, -127, -126, -126, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -106, -104, -84, -42, + -86, -6, 59, -9, 127, -58, 37, -42, -87, -72, -101, -51, + -23, -103, -30, -48, -79, 24, -64, 49, 88, -72, 60, -9, + -39, -52, -85, -50, -77, -110, -108, -101, -100, -103, -110, -101, + -111, -119, -116, -120, -107, -113, -115, -116, -106, -117, -115, -111, + -100, -80, -104, -64, -98, -81, -87, -116, -110, -111, -103, -101, + -114, -120, -115, -105, -116, -122, -117, -111, -115, -121, -124, -113, + -115, -119, -118, -116, -117, -123, -123, -116, -119, -125, -121, -123, + -125, -119, -123, -117, -125, -125, -126, -120, -123, -126, -111, -117, + -122, -119, -113, -122, -125, -121, -115, -125, -122, -120, -118, -121, + -120, -119, -120, -121, -124, -124, -124, -126, -124, -124, -124, -123, + -124, -124, -122, -121, -121, -122, -123, -123, -123, -123, -124, -125, + -126, -124, -126, -125, -121, -122, -124, -121, -123, -123, -125, -123, + -121, -123, -123, -121, -124, -125, -125, -124, -123, -121, -125, -121, + -120, -126, -121, -121, -123, -123, -122, -123, -125, -126, -127, -124, + -121, -124, -121, -121, -123, -123, -122, -122, -123, -124, -121, -124, + -125, -125, -126, -124, -121, -124, -121, -125, -121, -122, -125, -124, + -124, -126, -126, -125, -125, -126, -125, -124, -124, -125, -123, -121, + -124, -124, -123, -124, -125, -126, -124, -126, -126, -125, -126, -125, + -125, -124, -124, -123, -124, -124, -123, -124, -126, -125, -126, -126, + -126, -126, -126, -125, -125, -124, -123, -124, -123, -124, -123, -124, + -125, -125, -125, -126, -126, -126, -125, -125, -125, -124, -124, -123, + -123, -117, -106, -92, -75, -54, -44, 22, 115, 127, 72, -27, + -90, -78, -77, -81, -76, -59, -40, -41, -54, -42, 1, 0, + 7, 13, 21, -47, -27, -67, -79, -73, -110, -107, -123, -111, + -118, -99, -115, -116, -117, -118, -122, -124, -120, -117, -119, -112, + -127, -111, -119, -111, -121, -123, -88, -74, -88, -74, -101, -75, + -107, -94, -124, -121, -117, -123, -113, -118, -127, -118, -121, -113, + -119, -121, -118, -118, -126, -115, -122, -122, -125, -122, -125, -119, + -122, -126, -124, -124, -123, -120, -124, -122, -120, -126, -121, -119, + -125, -121, -118, -120, -120, -121, -122, -127, -123, -122, -124, -123, + -127, -126, -126, -125, -125, -127, -125, -124, -126, -126, -127, -127, + -125, -126, -126, -125, -127, -126, -127, -126, -127, -127, -126, -126, + -126, -125, -126, -127, -126, -126, -126, -127, -125, -125, -126, -125, + -124, -126, -125, -126, -123, -124, -122, -127, -126, -127, -126, -127, + -125, -126, -127, -127, -127, -126, -126, -126, -126, -127, -124, -125, + -125, -125, -125, -125, -127, -125, -124, -127, -127, -127, -126, -123, + -124, -126, -125, -122, -126, -123, -121, -122, -125, -125, -119, -124, + -123, -119, -122, -125, -124, -126, -125, -126, -126, -127, -126, -125, + -127, -126, -126, -126, -125, -127, -127, -127, -123, -126, -126, -125, + -126, -126, -123, -126, -127, -126, -126, -127, -127, -126, -127, -126, + -126, -126, -126, -127, -126, -127, -127, -127, -126, -127, -126, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -128, -127, -127, + -127, -127, -128, -128, -127, -128, -96, -41, -114, -17, 127, -96, + -76, -124, -84, -98, -107, -88, -112, -102, -119, -112, -107, -98, + -105, -104, -86, -113, -102, -85, -83, -99, -45, -62, -83, -110, + -106, -99, -80, -97, -105, -86, -116, -122, -127, -106, -110, -114, + -125, -99, -115, -104, -88, -105, -114, -84, -109, -87, -82, -94, + -39, -115, -30, -19, -103, -95, -90, -75, -116, -96, -106, -124, + -111, -111, -101, -118, -91, -91, -92, -104, -79, -117, -110, -112, + -123, -119, -116, -103, -118, -121, -123, -118, -122, -127, -122, -115, + -116, -121, -123, -126, -110, -110, -114, -119, -121, -119, -111, -120, + -114, -109, -107, -106, -114, -125, -117, -119, -124, -126, -124, -123, + -119, -120, -124, -126, -123, -123, -126, -127, -127, -123, -126, -124, + -127, -124, -126, -126, -126, -126, -127, -125, -126, -126, -124, -125, + -126, -124, -125, -124, -125, -121, -125, -125, -122, -122, -119, -120, + -125, -122, -124, -126, -124, -123, -125, -122, -126, -126, -122, -123, + -123, -126, -127, -125, -124, -126, -123, -126, -125, -124, -124, -124, + -126, -125, -125, -124, -124, -127, -124, -121, -125, -122, -123, -121, + -116, -122, -114, -118, -116, -111, -126, -99, -111, -125, -120, -125, + -121, -120, -124, -127, -125, -126, -126, -125, -127, -121, -123, -125, + -121, -125, -123, -123, -122, -125, -122, -126, -125, -119, -124, -124, + -126, -126, -125, -124, -127, -122, -123, -126, -126, -125, -126, -124, + -127, -124, -125, -127, -126, -127, -126, -126, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -119, + -72, -108, -100, 127, -112, -110, -103, -95, -122, -72, -120, -104, + -112, -110, -103, -121, -102, -114, -111, -110, -106, -110, -109, -106, + -120, -118, -93, -102, -120, -117, -125, -124, -121, -116, -118, -115, + -120, -117, -125, -114, -120, -115, -110, -124, -120, -110, -116, -108, + -119, -109, -103, -90, -74, -54, -120, -70, -103, -86, -102, -110, + -106, -98, -110, -118, -119, -114, -117, -111, -112, -108, -110, -119, + -118, -125, -110, -114, -125, -117, -124, -119, -105, -112, -118, -120, + -123, -126, -116, -122, -117, -116, -115, -108, -105, -107, -111, -109, + -126, -119, -123, -113, -114, -116, -107, -114, -124, -123, -122, -125, + -121, -119, -120, -125, -122, -126, -123, -120, -120, -122, -126, -126, + -127, -126, -127, -126, -123, -120, -122, -124, -122, -125, -125, -122, + -127, -123, -124, -123, -120, -119, -120, -120, -120, -118, -120, -123, + -119, -116, -118, -121, -119, -122, -126, -123, -122, -124, -122, -115, + -119, -123, -122, -127, -125, -125, -124, -120, -119, -121, -127, -122, + -123, -122, -125, -124, -125, -123, -120, -118, -122, -122, -121, -122, + -121, -121, -122, -122, -121, -121, -124, -122, -118, -107, -110, -117, + -125, -121, -118, -114, -114, -124, -123, -121, -123, -125, -125, -127, + -122, -123, -122, -122, -124, -124, -119, -120, -123, -125, -126, -122, + -120, -122, -123, -122, -121, -125, -125, -125, -126, -126, -126, -126, + -127, -125, -126, -125, -126, -125, -126, -127, -126, -127, -126, -126, + -126, -126, -126, -126, -126, -127, -127, -127, -127, -127, -127, -127, + -126, -126, -126, -126, -98, -81, -97, -101, 127, -94, -108, -114, + -118, -116, -119, -119, -109, -122, -126, -110, -108, -109, -119, -110, + -116, -124, -116, -111, -100, -112, -122, -122, -120, -118, -123, -125, + -123, -112, -120, -125, -120, -124, -112, -121, -126, -127, -124, -114, + -112, -123, -124, -107, -118, -123, -115, -115, -115, -100, -111, -109, + -106, -108, -117, -112, -107, -116, -107, -123, -108, -118, -121, -120, + -110, -125, -117, -114, -111, -100, -109, -111, -118, -111, -109, -112, + -112, -108, -95, -106, -124, -113, -122, -114, -110, -110, -107, -114, + -120, -75, -117, -105, -115, -118, -121, -119, -107, -112, -105, -107, + -109, -116, -114, -116, -114, -116, -120, -118, -117, -119, -119, -115, + -121, -115, -119, -121, -126, -119, -124, -123, -122, -123, -124, -123, + -120, -123, -124, -125, -122, -122, -127, -124, -118, -108, -121, -120, + -125, -126, -121, -116, -126, -124, -112, -115, -116, -124, -122, -127, + -117, -124, -125, -115, -115, -122, -123, -125, -118, -118, -118, -112, + -113, -121, -109, -107, -115, -113, -120, -120, -120, -105, -121, -106, + -93, -83, -109, -96, -76, -93, -91, -55, -71, -94, -109, -90, + -95, -95, -50, -3, -10, -33, -13, -45, -87, -68, -50, -82, + -103, -88, -118, -84, -94, -100, -94, -85, -89, -72, -60, -95, + -102, -73, -67, -84, -117, -78, -95, -100, -120, -105, -111, -107, + -114, -124, -121, -120, -124, -116, -113, -124, -125, -125, -120, -122, + -123, -125, -125, -124, -126, -125, -124, -125, -126, -126, -125, -126, + -127, -127, -127, -126, -126, -126, -127, -126, -127, -48, -1, -19, + -63, 48, -93, -95, -105, -93, -119, -105, -96, -111, -125, -109, + -125, -121, -118, -109, -125, -121, -115, -124, -119, -124, -114, -124, + -116, -119, -116, -118, -125, -121, -121, -126, -122, -115, -111, -123, + -119, -124, -118, -118, -121, -126, -118, -120, -117, -113, -123, -119, + -123, -118, -116, -119, -111, -125, -119, -117, -125, -118, -117, -122, + -117, -123, -116, -121, -117, -111, -102, -105, -121, -79, -118, -88, + -98, -125, -120, -106, -124, -123, -119, -103, -74, -106, -110, -93, + -101, -107, -103, -105, -87, -86, -99, -118, -117, -88, -111, -102, + -93, -123, -103, -109, -123, -121, -122, -118, -122, -116, -117, -120, + -116, -123, -123, -125, -118, -117, -124, -125, -120, -123, -122, -126, + -118, -125, -119, -125, -122, -117, -124, -115, -119, -123, -121, -120, + -125, -118, -122, -115, -105, -123, -120, -114, -121, -113, -119, -120, + -114, -117, -123, -120, -115, -113, -115, -121, -111, -88, -110, -115, + -123, -109, -116, -59, -35, -58, -116, -85, -92, -43, -84, -72, + -118, -105, -121, -113, -63, -89, -85, -80, -73, -117, -74, -92, + -37, -28, -64, -37, -32, -56, 0, -112, -29, 14, 55, 127, + 70, 48, 43, 44, 108, 3, -69, -47, -45, -27, -26, -71, + -12, -67, 75, 74, -83, -82, -22, -67, -50, -104, -106, -30, + -50, -70, -73, -68, -120, -100, -116, -99, -103, -108, -110, -116, + -100, -124, -119, -100, -104, -122, -99, -100, -96, -89, -95, -111, + -121, -120, -122, -127, -120, -127, -126, -125, -126, -126, -126, -126, + -126, -127, -73, 4, 4, -33, 77, -33, -80, -124, -103, -101, + -107, -118, -88, -107, -111, -106, -89, -103, -126, -115, -109, -115, + -120, -124, -113, -117, -114, -110, -116, -109, -114, -123, -122, -117, + -121, -106, -111, -116, -113, -120, -111, -122, -118, -107, -118, -107, + -126, -101, -106, -123, -108, -114, -122, -123, -120, -103, -117, -112, + -114, -86, -96, -121, -110, -111, -94, -105, -118, -95, -106, -83, + -82, -84, -69, -75, -112, -81, -82, -111, -68, -97, -91, -45, + -68, -75, -82, -75, -65, -58, -77, -52, -89, -47, -3, -71, + -79, -76, -53, -56, -72, -108, -53, -70, -114, -98, -114, -104, + -120, -100, -106, -95, -121, -111, -115, -126, -112, -99, -123, -113, + -115, -117, -125, -125, -121, -115, -116, -125, -120, -114, -125, -122, + -119, -112, -118, -109, -112, -102, -109, -120, -112, -121, -113, -112, + -121, -117, -112, -105, -86, -104, -117, -90, -107, -119, -104, -94, + -100, -110, -22, -106, -95, -109, -69, -58, 3, -88, -8, -46, + -62, -71, -70, -97, -73, -95, -99, -79, -8, -113, -63, -54, + -34, 2, -13, -90, -82, -21, -56, -81, -9, 74, -53, -48, + -31, -19, -123, -115, -42, 24, 53, 85, 24, 38, 127, 3, + 71, -86, -36, -115, -110, -12, -123, 21, 68, -66, -15, -105, + -8, -86, -84, -78, -3, -59, -67, -41, -102, -102, -55, -83, + -67, -83, -49, -93, -101, -108, -95, -94, -117, -99, -83, -99, + -114, -61, -70, -69, -100, -113, -120, -126, -125, -125, -121, -125, + -124, -119, -118, -118, -115, -117, -116, -67, -18, -106, -78, 127, + -122, -100, -119, -99, -98, -110, -104, -119, -107, -111, -110, -119, + -92, -126, -119, -121, -111, -117, -111, -120, -117, -113, -123, -113, + -123, -124, -119, -117, -119, -106, -107, -121, -116, -109, -121, -119, + -113, -112, -95, -108, -113, -98, -123, -109, -104, -117, -101, -103, + -123, -120, -114, -84, -80, -96, -95, -104, -103, -86, -95, -89, + -110, -65, -77, -116, -111, -122, -99, -60, -58, -109, -98, -94, + -103, -88, -106, -38, -70, -93, -109, -50, -106, -82, -83, -77, + -68, -121, -36, -8, -80, -79, -69, -32, -46, -94, -114, -106, + -110, -80, -101, -106, -80, -102, -114, -94, -116, -113, -102, -115, + -116, -105, -110, -115, -110, -113, -114, -122, -121, -125, -118, -115, + -118, -123, -119, -116, -118, -110, -117, -126, -116, -119, -114, -95, + -123, -104, -114, -115, -113, -101, -119, -121, -115, -98, -111, -120, + -100, -89, -88, -106, -96, -102, -90, -94, -83, -106, -116, -124, + -48, -42, -70, -55, -74, -73, -99, -25, -59, -8, -52, -53, + -102, -12, -91, -70, -91, -15, 41, -50, -101, -35, 86, 39, + -8, -84, -71, -9, -86, -103, -42, 0, -42, -10, 21, -25, + -47, -31, 31, -42, 13, -38, -53, -13, -65, -97, 47, -71, + -16, 61, -35, -63, -11, 9, -69, -41, -66, 8, -71, -119, + -78, -70, -55, -80, -55, -78, -73, -90, -108, -80, -85, -118, + -109, -118, -115, -124, -114, -114, -123, -117, -120, -118, -122, -125, + -124, -124, -125, -116, -125, -123, -124, -126, -124, -124, -126, -127, + -119, -102, -70, -64, 116, -98, -110, -87, -114, -101, -120, -110, + -102, -108, -117, -117, -113, -111, -113, -122, -113, -114, -122, -112, + -117, -109, -119, -122, -112, -120, -110, -114, -113, -117, -115, -114, + -116, -107, -120, -110, -96, -114, -116, -100, -97, -115, -113, -105, + -119, -106, -117, -106, -120, -93, -102, -109, -111, -103, -104, -89, + -104, -114, -95, -109, -109, -98, -105, -79, -120, -109, -108, -74, + -93, -96, -108, -106, -88, -58, -37, -120, -93, -46, -69, -115, + -83, -95, -90, -97, -74, -100, -99, -40, -87, -115, -65, -90, + -92, -84, -98, -99, -107, -113, -90, -75, -121, -108, -98, -118, + -98, -105, -88, -116, -123, -106, -110, -115, -119, -115, -117, -117, + -114, -119, -122, -117, -109, -106, -112, -113, -118, -113, -118, -122, + -118, -120, -107, -123, -89, -124, -96, -118, -107, -113, -103, -117, + -102, -127, -116, -117, -115, -104, -89, -92, -111, -92, -119, -114, + -102, -67, -76, -71, -51, -54, -72, -54, -24, -115, -90, -115, + -84, -65, -86, -71, -81, -54, -109, -38, -31, -81, -83, 74, + -56, -32, -74, -8, -83, -1, -46, 13, -57, -99, -108, -65, + -81, -35, -43, 60, -27, 65, 127, 26, 37, 52, -71, 50, + -61, -39, 38, -7, -35, 19, -69, -82, -25, -41, -68, -4, + -77, -90, -42, -57, -37, 30, -50, -61, -83, -92, -52, -49, + -80, -66, -66, -75, -109, -104, -50, -99, -110, -124, -107, -117, + -112, -117, -106, -112, -114, -112, -118, -121, -118, -117, -119, -119, + -121, -121, -121, -122, -122, -103, -118, -88, -124, 41, -82, -93, + -92, -115, -118, -121, -103, -114, -120, -122, -125, -115, -122, -113, + -113, -112, -123, -110, -120, -120, -124, -116, -111, -105, -118, -119, + -104, -118, -110, -119, -125, -111, -110, -117, -118, -120, -109, -111, + -112, -121, -103, -109, -113, -110, -116, -117, -110, -113, -105, -118, + -117, -118, -109, -108, -118, -103, -104, -106, -123, -113, -111, -118, + -86, -103, -120, -109, -104, -106, -95, -101, -113, -78, -83, -68, + -88, -94, -105, -67, -63, -75, -101, -92, -104, -84, -102, -55, + -64, -111, -103, -95, -1, -19, -110, -74, -104, -105, -96, -81, + -109, -113, -56, -110, -105, -104, -82, -111, -119, -100, -110, -102, + -113, -115, -112, -117, -118, -120, -115, -120, -109, -119, -110, -118, + -121, -96, -100, -104, -95, -123, -109, -113, -119, -112, -100, -95, + -121, -99, -103, -115, -121, -118, -105, -121, -103, -109, -113, -113, + -118, -111, -85, -110, -107, -102, -109, -16, -98, -33, -99, -99, + -58, -77, -53, -88, -79, -112, -67, -96, -78, -37, -84, -89, + -73, -16, -28, -102, -26, -50, -90, -65, 17, -36, -48, -41, + -103, -58, -49, -78, -27, -16, -1, 54, 94, -21, 102, -43, + 28, 52, 28, 18, -93, -31, -54, -85, -38, -33, 127, -46, + -17, -33, -59, 58, -3, 48, 4, 39, 10, -56, -35, 28, + -26, -56, -52, -26, -49, -123, -77, -68, -106, -88, -66, -85, + -82, -118, -104, -88, -105, -90, -107, -93, -114, -115, -118, -119, + -126, -119, -122, -117, -123, -119, -116, -118, -117, -117, -113, -88, + -100, -77, -51, -106, -112, -102, -110, -113, -113, -111, -118, -125, + -120, -118, -121, -114, -123, -122, -119, -121, -120, -121, -119, -121, + -124, -122, -119, -114, -112, -120, -125, -122, -125, -121, -119, -125, + -112, -121, -116, -124, -116, -123, -116, -109, -113, -111, -119, -112, + -119, -112, -110, -111, -117, -125, -118, -114, -112, -111, -117, -123, + -116, -104, -125, -119, -109, -123, -98, -105, -118, -122, -105, -105, + -124, -113, -56, -68, -75, -81, -99, -59, -91, -89, -97, -63, + -84, -115, -84, -100, -63, -74, -46, -116, -102, -58, -65, -83, + -87, -126, -113, -115, -99, -112, -103, -93, -102, -120, -101, -104, + -95, -124, -124, -108, -115, -112, -123, -123, -124, -125, -124, -121, + -118, -115, -119, -122, -115, -120, -109, -113, -123, -121, -114, -120, + -118, -118, -103, -109, -103, -104, -109, -110, -116, -120, -117, -117, + -114, -113, -94, -86, -112, -96, -110, -94, -93, -86, -112, -114, + -60, -89, -101, -92, -94, -88, -38, -64, -93, -115, -108, -91, + -65, -101, -97, -88, -111, -48, -122, -59, -88, -75, 10, 56, + 92, 3, 127, 24, -105, -83, -120, -78, -26, -21, -55, -58, + -23, -14, -28, -26, -68, 16, -87, -25, -80, -33, -52, -49, + -85, 40, -56, -58, 7, -38, 103, 82, 29, -51, -34, -37, + -25, 34, -85, -72, 8, -69, 1, -82, -86, -117, -75, -104, + -111, -94, -103, -116, -123, -110, -114, -116, -102, -118, -118, -107, + -120, -124, -119, -110, -118, -117, -119, -119, -120, -123, -123, -121, + -120, -124, -122, -114, -68, -88, -84, -45, -81, -109, -119, -122, + -105, -103, -117, -115, -120, -124, -125, -126, -115, -113, -121, -118, + -126, -122, -123, -117, -124, -117, -120, -123, -120, -122, -121, -117, + -121, -121, -125, -116, -119, -108, -117, -117, -115, -121, -122, -117, + -120, -111, -122, -117, -116, -114, -116, -109, -119, -118, -111, -118, + -117, -111, -117, -119, -112, -117, -117, -123, -111, -117, -108, -110, + -90, -101, -113, -96, -113, -110, -110, -104, -99, -69, -79, -99, + -71, -118, -83, -96, -85, -107, -87, -78, -109, -75, -108, -83, + -108, -99, -99, -73, -106, -111, -115, -93, -112, -109, -103, -104, + -100, -95, -109, -111, -104, -94, -121, -114, -114, -121, -115, -99, + -117, -118, -121, -117, -120, -114, -120, -126, -124, -126, -123, -123, + -115, -123, -117, -126, -127, -123, -114, -115, -103, -110, -103, -107, + -109, -123, -103, -113, -115, -94, -109, -78, -102, -104, -98, -110, + -116, -104, -113, -99, -104, -92, -108, -95, -99, -78, -95, -98, + -77, -115, -83, -106, -93, -62, -91, -104, -109, -57, -59, -97, + -80, 22, -61, 8, -116, -63, 17, -112, -68, -98, -40, -84, + -85, -60, -91, -90, -88, -66, -64, -104, -95, -70, -12, -70, + -77, -51, -14, -72, -44, -62, -55, 127, 18, 9, -17, 44, + -55, -24, 22, -39, -94, -81, -46, -12, -26, -77, -66, -49, + -79, -104, -91, -111, -90, -88, -114, -109, -111, -116, -113, -103, + -109, -118, -111, -113, -114, -109, -119, -120, -114, -122, -122, -121, + -119, -119, -119, -119, -122, -125, -125, -124, -120, -105, -104, -83, + 53, -101, -96, -116, -90, -105, -112, -119, -106, -121, -123, -123, + -125, -125, -122, -120, -125, -126, -124, -117, -119, -114, -123, -121, + -117, -126, -126, -120, -120, -124, -123, -119, -121, -126, -118, -118, + -125, -125, -122, -121, -124, -123, -117, -124, -116, -116, -125, -122, + -122, -126, -120, -123, -116, -119, -120, -115, -117, -124, -120, -119, + -123, -123, -118, -111, -110, -107, -117, -106, -125, -112, -106, -118, + -110, -112, -126, -99, -109, -107, -107, -109, -107, -98, -99, -117, + -114, -109, -118, -95, -107, -116, -108, -96, -93, -107, -120, -123, + -114, -117, -110, -116, -123, -115, -116, -116, -120, -119, -119, -105, + -116, -123, -116, -108, -121, -118, -119, -124, -124, -123, -124, -123, + -126, -124, -122, -125, -121, -120, -123, -124, -123, -124, -126, -117, + -107, -124, -110, -124, -104, -117, -124, -117, -119, -125, -114, -116, + -115, -118, -120, -119, -111, -115, -121, -121, -101, -120, -93, -113, + -117, -105, -122, -105, -116, -122, -124, -99, -120, -112, -117, -114, + -126, -105, -107, -107, -95, -88, -111, -70, -86, -85, -88, -112, + -72, -87, -84, -109, -108, -100, -82, -90, -119, -106, -52, -77, + 23, 127, 84, -13, 19, 12, -7, -116, -104, -63, -84, -91, + -116, -46, -79, -43, -47, -96, -49, -31, -69, -53, -60, -54, + -24, 41, 6, -65, -110, -107, -72, -101, -116, -105, -99, -100, + -106, -111, -120, -123, -113, -103, -110, -108, -123, -119, -121, -121, + -121, -125, -124, -126, -124, -126, -126, -126, -126, -125, -125, -124, + -124, -106, -79, -117, -79, 32, -93, -98, -118, -104, -116, -117, + -126, -109, -110, -114, -121, -117, -115, -125, -122, -120, -122, -118, + -119, -119, -124, -127, -121, -123, -124, -122, -120, -121, -124, -121, + -117, -117, -121, -121, -119, -126, -123, -122, -125, -121, -125, -116, + -126, -126, -117, -122, -116, -126, -122, -123, -124, -117, -124, -125, + -122, -122, -115, -125, -120, -123, -124, -117, -123, -116, -104, -125, + -116, -123, -108, -121, -121, -120, -106, -116, -111, -116, -123, -126, + -112, -96, -102, -102, -94, -109, -114, -102, -87, -112, -90, -76, + -104, -95, -113, -122, -119, -123, -123, -123, -117, -117, -113, -119, + -126, -127, -119, -121, -125, -126, -125, -121, -115, -111, -126, -123, + -123, -123, -125, -124, -124, -126, -123, -124, -125, -119, -123, -122, + -119, -122, -121, -124, -121, -117, -121, -121, -124, -117, -114, -119, + -120, -114, -115, -121, -115, -119, -116, -121, -126, -124, -116, -117, + -100, -111, -111, -119, -118, -116, -122, -120, -124, -120, -115, -112, + -114, -120, -110, -114, -113, -119, -106, -108, -104, -85, -107, -90, + -93, -77, -81, -125, -125, -115, -67, -106, -105, -98, -100, -116, + -91, -109, -84, -114, -102, -12, 127, 112, 85, 42, -44, -32, + -81, -124, -85, -86, -82, -53, -103, -64, -76, -60, -77, -67, + -89, -123, -73, -121, -77, -105, -72, -50, -69, -71, -114, -49, + -55, -87, -66, -87, -110, -92, -107, -105, -105, -110, -123, -114, + -110, -115, -122, -121, -118, -119, -118, -119, -117, -118, -118, -118, + -118, -118, -118, -119, -119, -120, -102, -75, -65, -23, 42, -88, + -115, -81, -87, -114, -107, -107, -99, -119, -109, -113, -104, -112, + -114, -109, -118, -116, -114, -118, -119, -110, -118, -123, -123, -121, + -120, -115, -115, -113, -114, -116, -110, -118, -112, -126, -116, -121, + -122, -116, -123, -121, -111, -121, -118, -124, -109, -109, -113, -121, + -110, -118, -110, -125, -117, -88, -122, -99, -96, -102, -113, -117, + -106, -96, -98, -79, -111, -94, -98, -118, -118, -116, -106, -102, + -79, -91, -62, -86, -92, -61, -85, -57, -112, -95, -63, -66, + -66, -110, -46, -80, -34, -63, -59, -108, -102, -106, -106, -108, + -106, -104, -112, -102, -125, -121, -119, -109, -112, -121, -123, -111, + -120, -104, -108, -124, -109, -118, -118, -123, -121, -121, -124, -121, + -119, -126, -110, -122, -121, -122, -117, -118, -119, -126, -121, -87, + -95, -95, -102, -122, -100, -83, -83, -68, -110, -105, -120, -112, + -98, -108, -90, -107, -103, -114, -105, -126, -108, -111, -99, -95, + -94, -100, -60, -95, -88, -81, -72, -60, -112, 8, -59, -58, + -45, -94, -55, 60, -93, -62, -33, -92, -15, -28, -63, -97, + -47, -90, -111, -98, -78, -75, -102, -97, -64, -2, -109, -78, + -52, 75, 31, 25, -26, -34, 47, -8, 34, 12, 127, 43, + 8, 11, 31, 25, -36, 14, -102, -67, -48, -79, -43, -108, + -94, -78, -110, -91, -116, -113, -122, -111, -97, -97, -109, -82, + -117, -95, -118, -110, -97, -115, -114, -126, -111, -125, -125, -120, + -123, -122, -123, -124, -124, -124, -124, -125, -125, -124, -124, -109, + -85, -96, -54, 18, -93, -102, -114, -101, -114, -89, -107, -104, + -121, -114, -127, -114, -123, -122, -124, -119, -120, -112, -121, -121, + -124, -119, -123, -118, -118, -118, -120, -115, -111, -119, -118, -118, + -125, -106, -114, -124, -123, -123, -122, -121, -118, -122, -120, -122, + -118, -126, -121, -114, -127, -117, -113, -116, -115, -124, -114, -97, + -109, -110, -107, -100, -101, -88, -95, -105, -93, -117, -115, -92, + -107, -77, -62, -121, -79, -74, -79, -105, -35, -100, -6, -123, + -87, -88, -65, -46, -98, -86, -16, -111, -90, -91, -62, -98, + -104, -115, -119, -104, -103, -90, -106, -106, -118, -78, -92, -99, + -88, -104, -113, -113, -112, -119, -104, -119, -108, -120, -118, -119, + -125, -115, -122, -120, -123, -113, -125, -112, -113, -120, -110, -112, + -112, -120, -116, -89, -95, -43, -82, -69, -70, -81, -117, -81, + -103, -114, -101, -100, -89, -107, -92, -112, -122, -98, -95, -106, + -113, -110, -97, -88, -94, -92, -88, -48, -83, -84, -25, -3, + -12, -85, -81, -64, -109, -77, -56, -55, 3, 8, -54, -41, + -62, -106, -95, -62, -76, -88, -103, -119, -122, -76, -106, -54, + -73, -106, -78, -26, -96, -46, 11, 35, -50, -29, -4, 56, + 27, -58, 30, 127, 19, 62, -96, -59, -74, -67, -98, -62, + -91, -84, -101, -88, -98, -114, -102, -101, -105, -99, -98, -97, + -109, -103, -116, -114, -106, -105, -105, -117, -116, -121, -119, -118, + -108, -122, -116, -118, -116, -123, -125, -124, -125, -123, -122, -122, + -121, -120, -118, -118, -104, -63, -39, -87, 127, -68, -127, -61, + -113, -96, -113, -82, -122, -114, -112, -124, -97, -108, -98, -114, + -111, -95, -113, -118, -116, -118, -121, -122, -103, -125, -117, -118, + -120, -105, -111, -121, -120, -98, -110, -122, -112, -105, -115, -115, + -116, -125, -115, -108, -110, -96, -117, -107, -98, -99, -122, -96, + -90, -83, -100, -83, -101, -113, -107, -109, -105, -98, -93, -98, + -87, -83, -111, -87, -92, 13, -96, -80, 11, -124, 102, -22, + -94, -41, -67, 63, -20, -38, -11, -49, -62, -36, -48, 14, + -66, -109, -84, 4, -38, -92, -81, -100, -110, -91, -71, -103, + -39, -73, -36, -116, -79, -104, -77, -65, -115, -74, -119, -97, + -94, -102, -101, -103, -105, -116, -115, -103, -117, -113, -106, -122, + -104, -122, -103, -102, -115, -101, -95, -113, -67, -50, -17, -89, + -115, -88, -58, -75, -93, -98, -67, -62, -40, -99, -81, -105, + -52, -77, -105, -80, -97, -83, -61, -52, -12, -46, -108, -28, + 94, 16, -36, 34, -111, -114, -117, -3, 62, -11, 41, -9, + 8, -73, -101, -15, -18, 14, -60, -66, 32, -31, -65, -98, + -115, -97, -79, -117, -80, -34, -96, -30, -42, -92, -113, -18, + 74, 114, 117, -49, -47, -84, 21, 8, 9, 108, 79, -48, + -38, 30, -63, -94, -94, -31, -78, -105, -96, -98, -50, -91, + -106, -108, -98, -86, -85, -72, -105, -117, -118, -115, -93, -114, + -119, -114, -122, -117, -121, -110, -110, -123, -119, -118, -125, -126, + -127, -127, -123, -123, -124, -121, -121, -120, -121, -106, -58, -112, + -20, 127, -124, -116, -125, -89, -117, -109, -118, -125, -112, -124, + -124, -126, -116, -125, -112, -117, -118, -117, -115, -125, -123, -123, + -122, -119, -124, -122, -121, -125, -120, -114, -119, -118, -119, -120, + -114, -121, -117, -114, -119, -126, -117, -116, -122, -117, -125, -106, + -120, -117, -116, -107, -117, -92, -104, -109, -119, -109, -109, -120, + -117, -110, -104, -119, -106, -111, -106, -94, -99, -79, -94, -102, + -92, -66, -60, -61, -70, -83, -67, -95, -99, -109, -74, -84, + -111, -89, -89, -106, -92, -89, -80, -106, -116, -52, -91, -102, + -102, -123, -119, -92, -104, -107, -85, -81, -118, -106, -109, -87, + -89, -122, -108, -122, -125, -106, -119, -106, -118, -127, -123, -117, + -125, -120, -113, -117, -115, -123, -109, -112, -123, -108, -111, -123, + -116, -113, -87, -110, -104, -89, -97, -117, -106, -77, -116, -116, + -119, -91, -108, -107, -85, -111, -117, -120, -121, -88, -90, -76, + -115, -80, -85, -56, -116, -51, -102, -71, -56, -44, -43, -78, + -34, -58, -41, 1, -82, 4, -99, -61, -71, -117, -77, -91, + -57, -90, -114, -97, -61, -114, -79, -99, -84, -59, -104, -76, + -92, -37, -73, -38, -55, -13, -110, 11, -89, -20, -83, 47, + -110, -81, 83, -94, -95, -23, -96, -79, -102, -68, -78, -24, + -94, -87, -68, -87, -90, -86, -106, -105, -107, -96, -115, -111, + -112, -126, -114, -124, -112, -123, -124, -123, -117, -118, -122, -115, + -124, -124, -125, -127, -125, -126, -124, -125, -125, -124, -125, -124, + -125, -125, -119, -116, -28, -48, 109, -69, -112, -96, -76, -115, + -94, -113, -86, -115, -112, -107, -118, -117, -111, -113, -121, -115, + -121, -111, -116, -112, -119, -118, -124, -123, -119, -111, -122, -121, + -119, -118, -124, -117, -117, -124, -110, -115, -120, -108, -114, -121, + -116, -121, -119, -124, -124, -108, -109, -114, -101, -102, -120, -98, + -117, -111, -109, -113, -121, -112, -119, -123, -106, -114, -123, -117, + -94, -99, -105, -99, -92, -106, -118, -93, -94, -95, -29, -53, + -77, -107, -70, -55, -58, -54, -54, -70, -32, -82, -67, -42, + -52, -116, -32, -79, -94, -91, -102, -95, -76, -84, -112, -101, + -62, -76, -107, -83, -100, -109, -100, -119, -94, -123, -104, -102, + -101, -124, -119, -108, -116, -113, -119, -125, -119, -100, -104, -116, + -119, -81, -104, -101, -105, -109, -107, -119, -85, -111, -114, -89, + -101, -92, -90, -116, -109, -85, -90, -101, -70, -108, -111, -92, + -108, -101, -105, -74, -47, -116, -58, -78, -76, -37, -52, -92, + -67, -86, 74, -81, -50, -9, -43, -102, -78, 15, -115, -86, + -82, -65, -45, -68, -4, -8, -36, -63, -54, -39, -105, -72, + -47, -43, -28, -29, -2, -4, -74, -90, -22, 108, 14, 93, + -35, 26, -50, -51, -38, 29, -93, 49, 127, -82, 26, 22, + -49, -69, -31, -32, -69, -75, -88, -75, -96, -84, -52, -83, + -104, -96, -108, -114, -104, -113, -122, -116, -120, -111, -113, -122, + -115, -118, -110, -127, -116, -121, -120, -118, -124, -124, -125, -124, + -124, -124, -122, -124, -123, -122, -122, -106, -101, -23, 5, 122, + -47, -81, -66, -56, 0, -65, -64, -101, -107, -106, -89, -121, + -81, -53, -86, -83, -99, -106, -110, -98, -120, -102, -111, -116, + -120, -121, -111, -110, -103, -102, -96, -73, -58, -59, -78, -76, + -90, -104, -119, -112, -111, -120, -126, -103, -96, -106, -120, -94, + -117, -110, -101, -112, -95, -103, -116, -103, -110, -111, -115, -113, + -106, -116, -106, -109, -108, -66, -64, -71, -75, -84, -50, -63, + -75, -74, -34, -80, -29, -70, -72, -82, -55, -97, -46, -56, + -52, -83, 41, -21, -46, -118, -84, 26, -99, -80, -82, -70, + -110, -71, -95, -31, -99, -101, -85, -105, -112, -58, -110, -88, + -107, -107, -99, -115, -118, -94, -107, -122, -119, -124, -124, -118, + -116, -118, -95, -97, -105, -108, -70, -106, -93, -93, -93, -61, + -99, -110, -58, -120, -104, -102, -62, -91, -77, -108, -85, -85, + -82, -81, -68, -99, -56, -96, -78, -79, -95, 23, -27, 27, + -71, 14, -62, -65, -62, -62, -75, 1, 33, -80, -5, -79, + -93, -63, -6, -65, -37, -68, -89, -86, -57, -101, 11, -62, + 34, -90, -68, -100, 10, 7, 42, 63, -64, -102, 91, -27, + -38, -13, 34, 33, 96, 26, 7, -21, -46, -20, -72, 94, + 127, 84, 39, -71, 64, -95, -36, -112, 16, -104, -37, -86, + -92, -60, -100, -54, -74, -108, -120, -97, -112, -121, -91, -99, + -126, -98, -97, -110, -114, -100, -114, -106, -122, -116, -116, -118, + -120, -118, -123, -120, -122, -120, -120, -119, -120, -120, -120, -120, + -105, -46, -45, -35, 127, -50, -85, -98, -84, -59, -66, -94, + -101, -123, -109, -105, -113, -110, -112, -111, -105, -119, -117, -114, + -114, -112, -116, -126, -119, -114, -108, -121, -117, -116, -118, -122, + -103, -119, -114, -111, -117, -114, -120, -125, -126, -120, -118, -114, + -119, -116, -121, -115, -116, -116, -111, -121, -116, -109, -103, -105, + -110, -100, -119, -123, -126, -117, -113, -126, -123, -114, -107, -106, + -111, -87, -98, -108, -90, -104, -76, -89, -62, -115, -103, -95, + -93, -45, -70, -109, -81, -103, -73, -107, -73, -91, -48, -47, + -83, -111, -99, -101, -95, -122, -73, -92, -81, -76, -100, -121, + -86, -96, -109, -102, -111, -112, -108, -100, -108, -113, -111, -115, + -125, -126, -114, -123, -114, -121, -127, -102, -105, -113, -99, -112, + -113, -124, -117, -102, -103, -93, -77, -88, -115, -105, -115, -117, + -103, -96, -103, -94, -110, -79, -75, -88, -45, -69, -43, -40, + -27, 57, -8, 5, -27, -82, -89, -107, -73, -93, -70, -103, + -113, -46, -73, -85, -92, -83, -63, -93, -59, -50, -37, -107, + -59, -92, -28, 60, -38, -81, 5, -86, -75, -54, -99, -41, + -116, -42, -38, -106, -109, -82, -59, -20, -19, -33, -95, -108, + -51, -17, -88, -27, 87, 76, 65, 21, -72, 3, -2, -80, + -36, -81, -66, -52, -50, -113, -95, -61, -95, -72, -90, -106, + -105, -108, -121, -108, -114, -108, -109, -112, -117, -122, -114, -105, + -113, -110, -116, -115, -121, -118, -125, -122, -122, -123, -122, -120, + -120, -118, -118, -117, -117, -75, -62, -76, -108, 127, -94, -75, + -96, -112, -118, -99, -115, -119, -119, -121, -123, -119, -122, -123, + -111, -125, -119, -117, -122, -118, -120, -126, -124, -121, -121, -121, + -125, -122, -113, -120, -121, -122, -119, -124, -122, -117, -118, -120, + -123, -122, -123, -126, -113, -119, -118, -118, -117, -123, -119, -122, + -115, -112, -111, -124, -117, -119, -123, -120, -116, -119, -116, -117, + -123, -120, -118, -123, -112, -122, -114, -106, -90, -117, -121, -104, + -94, -112, -84, -117, -100, -97, -113, -103, -95, -103, -93, -115, + -99, -107, -120, -86, -102, -79, -120, -123, -124, -107, -96, -119, + -99, -118, -100, -112, -97, -121, -116, -116, -119, -117, -122, -114, + -120, -116, -118, -127, -125, -120, -123, -122, -122, -126, -123, -113, + -117, -111, -111, -93, -117, -117, -120, -119, -117, -124, -124, -117, + -109, -105, -115, -113, -106, -112, -111, -93, -92, -95, -91, -124, + -104, -61, -24, -98, -109, -109, -104, -98, -93, -94, -122, -87, + -65, -87, -84, -110, -102, -89, -85, -67, -40, -82, -92, -50, + -114, -106, -71, -58, -94, -104, -65, -65, -23, -59, -94, -113, + -90, -121, -72, -52, -100, -96, -41, -70, -73, -96, -17, -25, + -47, -1, -92, -86, -82, -59, -55, -90, -109, -46, -5, -39, + -97, -104, -101, -53, -89, -79, -119, -112, -120, -102, -99, -109, + -88, -99, -121, -109, -119, -118, -112, -127, -113, -116, -127, -121, + -121, -119, -122, -126, -117, -121, -124, -123, -125, -123, -125, -126, + -126, -126, -126, -126, -124, -124, -124, -123, -123, -123, -93, -85, + -98, 7, 127, -103, -93, -99, -111, -103, -103, -106, -121, -115, + -123, -124, -114, -124, -116, -119, -121, -120, -118, -124, -126, -127, + -124, -122, -121, -119, -123, -118, -124, -123, -126, -117, -117, -124, + -120, -123, -125, -116, -120, -117, -121, -126, -121, -119, -124, -117, + -118, -118, -122, -118, -122, -126, -122, -120, -115, -123, -122, -123, + -117, -119, -111, -120, -120, -120, -121, -127, -121, -120, -123, -111, + -93, -102, -122, -103, -120, -106, -120, -110, -98, -114, -119, -120, + -97, -111, -112, -123, -98, -109, -122, -106, -118, -115, -103, -113, + -113, -113, -112, -110, -114, -108, -115, -108, -118, -109, -121, -115, + -124, -117, -121, -121, -118, -119, -124, -126, -121, -117, -120, -123, + -121, -125, -112, -126, -124, -126, -119, -118, -118, -113, -123, -121, + -117, -122, -120, -114, -112, -111, -122, -108, -99, -113, -115, -84, + -112, -90, -89, -117, -123, -108, -113, -113, -101, -119, -115, -117, + -111, -111, -92, -111, -104, -116, -81, -83, -111, -99, -94, -83, + -101, -69, -74, -116, -116, -112, -119, -108, -123, -106, -88, -102, + -89, -103, -107, -100, -100, -102, -122, -101, -120, -100, -82, -105, + -82, -71, -53, -117, -77, -71, -80, -96, -73, -68, -115, -84, + -78, -78, -112, -49, -37, -56, -113, -105, -82, -101, -114, -115, + -108, -119, -114, -118, -109, -97, -110, -120, -119, -106, -117, -119, + -121, -122, -121, -125, -123, -121, -124, -122, -125, -126, -126, -126, + -125, -125, -125, -126, -126, -127, -125, -125, -125, -126, -126, -126, + -126, -127, -127, -121, -79, -74, -57, 127, 1, -17, -67, -7, + -37, -95, -92, -29, -103, -95, -106, -95, -73, -89, -100, -106, + -95, -110, -92, -119, -117, -119, -112, -101, -94, -125, -114, -106, + -94, -114, -115, -87, -103, -103, -92, -110, -115, -99, -123, -101, + -107, -106, -113, -91, -86, -101, -105, -120, -123, -108, -101, -123, + -111, -116, -119, -117, -110, -121, -110, -122, -121, -89, -120, -94, + -71, -119, -86, -94, -87, -84, -99, -68, -113, -121, -94, -86, + -112, -40, -122, -96, -56, -75, -111, -97, -81, -93, -105, -102, + -85, -97, -91, -112, -87, -94, -102, -108, -114, -115, -119, -102, + -106, -105, -115, -107, -96, -119, -112, -101, -125, -103, -109, -119, + -110, -124, -123, -120, -113, -120, -117, -104, -123, -123, -121, -111, + -109, -123, -112, -116, -117, -109, -118, -125, -105, -83, -88, -113, + -110, -115, -126, -91, -94, -111, -78, -119, -124, -102, -116, -88, + -124, -97, -118, -95, -95, -106, -83, -91, -76, -44, -81, -97, + -59, -55, -99, -86, -78, -51, -78, -67, -109, -108, -98, -88, + -125, -51, -13, 2, -7, -55, -98, -100, -103, -118, -106, -94, + -101, -79, -116, -108, -114, -95, -48, -39, -24, 1, -49, -36, + -83, -77, -37, -47, -26, -67, 13, -28, -106, -36, -49, -25, + -43, -90, -97, -121, -101, -77, -70, -98, -92, -118, -94, -90, + -113, -109, -109, -115, -107, -114, -126, -123, -123, -116, -121, -118, + -122, -121, -124, -125, -125, -123, -123, -125, -125, -124, -126, -125, + -125, -126, -126, -126, -125, -126, -126, -127, -123, -75, -42, -114, + 127, -32, -58, -65, -80, -77, -115, -76, -74, -79, -96, -107, + -109, -83, -119, -115, -111, -89, -113, -122, -110, -102, -118, -125, + -107, -119, -119, -125, -125, -109, -103, -117, -122, -119, -93, -109, + -111, -112, -102, -126, -110, -113, -122, -105, -108, -103, -100, -111, + -119, -120, -110, -107, -118, -99, -117, -122, -115, -115, -119, -125, + -122, -114, -111, -113, -120, -116, -99, -104, -116, -107, -124, -109, + -101, -114, -112, -120, -119, -102, -98, -87, -101, -110, -118, -106, + -112, -110, -104, -98, -87, -91, -111, -105, -114, -91, -95, -110, + -119, -123, -117, -109, -116, -121, -120, -107, -116, -121, -124, -119, + -123, -124, -122, -122, -112, -118, -120, -114, -124, -125, -125, -124, + -119, -117, -125, -124, -125, -119, -122, -121, -118, -117, -123, -124, + -120, -123, -121, -125, -113, -125, -120, -114, -114, -112, -100, -107, + -117, -124, -113, -113, -119, -112, -124, -105, -106, -109, -117, -115, + -122, -110, -112, -108, -124, -111, -127, -115, -115, -102, -106, -111, + -114, -112, -110, -115, -113, -102, -116, -106, -70, -66, -80, -96, + -100, -114, -109, -114, -120, -113, -111, -103, -115, -108, -111, -116, + -98, -93, -94, -127, -126, -91, -111, -115, -94, -104, -113, -97, + -124, -91, -80, -96, -77, -88, -99, -112, -117, -100, -107, -126, + -117, -121, -115, -124, -112, -113, -119, -124, -123, -119, -123, -123, + -122, -120, -122, -124, -123, -124, -126, -125, -126, -125, -126, -127, + -126, -127, -125, -125, -124, -125, -124, -125, -125, -125, -126, -127, + -127, -101, -56, -100, -16, 127, -109, -118, -113, -98, -116, -91, + -108, -110, -124, -112, -116, -122, -110, -118, -122, -109, -119, -119, + -114, -122, -118, -117, -120, -120, -118, -124, -123, -122, -122, -124, + -120, -110, -115, -124, -122, -120, -123, -111, -122, -114, -116, -116, + -124, -118, -120, -117, -117, -120, -127, -123, -121, -112, -126, -119, + -123, -121, -121, -126, -124, -122, -126, -119, -125, -120, -120, -125, + -117, -118, -121, -121, -121, -121, -119, -120, -124, -123, -118, -114, + -122, -119, -121, -118, -124, -117, -121, -115, -121, -118, -121, -106, + -124, -121, -118, -121, -120, -122, -122, -123, -126, -125, -123, -124, + -124, -124, -122, -124, -125, -122, -127, -124, -126, -124, -120, -123, + -123, -126, -124, -124, -125, -120, -124, -125, -126, -127, -125, -123, + -122, -124, -124, -125, -123, -124, -126, -126, -123, -126, -124, -124, + -124, -125, -121, -125, -125, -126, -124, -126, -124, -123, -123, -125, + -127, -127, -123, -125, -121, -120, -127, -124, -122, -125, -121, -125, + -127, -121, -126, -123, -124, -124, -124, -125, -125, -124, -121, -123, + -124, -120, -124, -121, -121, -125, -125, -122, -120, -124, -121, -126, + -121, -122, -119, -120, -123, -121, -126, -127, -117, -115, -127, -116, + -118, -119, -117, -124, -125, -122, -120, -125, -124, -124, -123, -126, + -120, -123, -119, -122, -123, -125, -125, -121, -123, -125, -125, -123, + -124, -125, -123, -126, -126, -125, -127, -127, -127, -126, -127, -127, + -127, -127, -127, -126, -126, -126, -126, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -126, -115, -82, -94, -88, 127, -65, + -101, -80, -124, -93, -95, -102, -120, -112, -113, -119, -115, -98, + -123, -103, -116, -123, -122, -119, -120, -116, -117, -112, -121, -123, + -125, -120, -123, -125, -125, -117, -122, -120, -124, -124, -122, -116, + -122, -116, -120, -121, -122, -122, -121, -118, -121, -122, -120, -119, + -124, -123, -123, -123, -119, -120, -123, -124, -122, -125, -126, -123, + -125, -123, -126, -124, -121, -121, -125, -123, -126, -126, -125, -126, + -121, -123, -127, -120, -123, -126, -119, -123, -124, -125, -117, -123, + -124, -126, -124, -124, -123, -122, -125, -122, -126, -126, -124, -124, + -125, -127, -125, -126, -125, -125, -125, -125, -127, -125, -127, -126, + -127, -123, -125, -127, -124, -123, -124, -127, -126, -122, -127, -126, + -127, -126, -126, -126, -122, -126, -126, -125, -124, -125, -123, -126, + -127, -126, -124, -126, -125, -125, -125, -127, -124, -126, -123, -125, + -125, -126, -127, -126, -126, -125, -124, -126, -124, -127, -126, -126, + -127, -125, -127, -125, -124, -125, -126, -125, -124, -127, -126, -125, + -127, -122, -125, -126, -123, -125, -127, -126, -125, -127, -124, -126, + -126, -126, -123, -125, -123, -125, -121, -126, -126, -126, -126, -122, + -127, -125, -127, -125, -127, -125, -127, -124, -126, -125, -124, -125, + -127, -126, -125, -125, -126, -126, -125, -125, -125, -124, -127, -125, + -126, -126, -126, -126, -125, -126, -127, -126, -127, -126, -127, -127, + -126, -127, -127, -126, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -112, + -63, -91, -76, 127, -91, -98, -111, -99, -86, -116, -117, -119, + -119, -111, -126, -121, -120, -116, -107, -116, -118, -123, -121, -118, + -122, -118, -120, -120, -123, -122, -120, -118, -127, -122, -118, -125, + -126, -124, -125, -126, -123, -121, -123, -125, -126, -122, -124, -119, + -126, -127, -126, -122, -125, -126, -124, -125, -125, -123, -124, -126, + -123, -124, -125, -125, -126, -124, -122, -126, -124, -122, -124, -125, + -127, -126, -121, -124, -127, -126, -125, -124, -125, -126, -121, -123, + -123, -124, -124, -127, -125, -124, -124, -126, -127, -123, -127, -122, + -124, -125, -125, -126, -125, -123, -125, -125, -127, -127, -124, -125, + -126, -125, -127, -126, -126, -126, -127, -124, -124, -125, -123, -122, + -125, -127, -123, -125, -127, -126, -127, -126, -125, -126, -126, -126, + -126, -127, -126, -127, -126, -124, -125, -123, -125, -127, -124, -124, + -127, -125, -123, -126, -125, -124, -126, -126, -125, -127, -124, -126, + -124, -127, -124, -125, -125, -127, -126, -126, -127, -126, -126, -125, + -127, -126, -126, -126, -125, -125, -124, -125, -126, -126, -125, -125, + -126, -127, -126, -125, -127, -126, -125, -125, -127, -125, -126, -125, + -126, -125, -127, -123, -125, -126, -126, -125, -126, -125, -125, -126, + -126, -126, -127, -124, -125, -126, -124, -126, -126, -124, -127, -126, + -125, -126, -126, -125, -126, -127, -126, -126, -126, -126, -127, -126, + -126, -127, -127, -127, -126, -127, -126, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -39, -23, -64, -69, 127, -83, -97, -114, + -80, -110, -91, -109, -82, -97, -103, -122, -111, -124, -113, -123, + -123, -117, -117, -125, -127, -113, -122, -124, -121, -118, -124, -119, + -127, -122, -119, -122, -127, -122, -121, -123, -120, -123, -122, -126, + -123, -113, -117, -125, -121, -123, -118, -119, -122, -124, -124, -124, + -123, -126, -123, -125, -124, -125, -124, -122, -117, -124, -125, -125, + -123, -126, -121, -118, -124, -125, -120, -117, -122, -119, -123, -120, + -122, -123, -125, -122, -125, -121, -121, -123, -125, -126, -124, -124, + -120, -125, -126, -123, -122, -124, -126, -126, -122, -122, -127, -127, + -126, -127, -122, -125, -124, -122, -127, -123, -122, -126, -127, -125, + -124, -122, -124, -127, -125, -125, -120, -125, -126, -127, -125, -124, + -125, -123, -127, -124, -124, -126, -125, -124, -121, -127, -126, -123, + -123, -125, -123, -126, -125, -124, -125, -125, -124, -124, -124, -125, + -123, -127, -123, -124, -121, -126, -123, -126, -125, -126, -127, -126, + -125, -126, -124, -126, -125, -126, -124, -126, -125, -123, -123, -126, + -127, -124, -125, -123, -123, -127, -125, -127, -123, -125, -124, -124, + -125, -123, -123, -125, -121, -121, -121, -121, -123, -124, -124, -125, + -124, -123, -126, -121, -121, -123, -123, -122, -123, -121, -126, -122, + -126, -126, -121, -125, -125, -126, -125, -126, -125, -125, -123, -122, + -126, -126, -127, -126, -126, -125, -127, -127, -125, -126, -127, -127, + -126, -126, -126, -126, -126, -127, -127, -126, -126, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -125, -72, -14, + -52, 127, -76, -78, -118, -100, -78, -101, -121, -79, -113, -119, + -114, -122, -104, -98, -109, -119, -111, -117, -113, -125, -113, -119, + -121, -111, -118, -120, -114, -116, -115, -121, -122, -124, -120, -124, + -113, -118, -123, -126, -123, -120, -123, -122, -116, -119, -121, -117, + -120, -119, -122, -121, -122, -126, -124, -124, -122, -123, -121, -123, + -122, -121, -121, -125, -123, -121, -125, -125, -124, -123, -123, -119, + -125, -125, -125, -123, -123, -124, -125, -118, -120, -120, -120, -122, + -123, -120, -122, -122, -122, -123, -121, -119, -119, -122, -122, -123, + -122, -125, -125, -123, -125, -124, -127, -125, -122, -121, -127, -119, + -126, -126, -124, -122, -125, -126, -125, -123, -124, -121, -125, -126, + -120, -124, -124, -125, -126, -126, -124, -123, -122, -127, -124, -120, + -123, -122, -122, -123, -121, -124, -123, -124, -126, -124, -122, -126, + -121, -124, -124, -125, -125, -125, -126, -124, -124, -122, -124, -120, + -126, -126, -127, -124, -125, -125, -126, -123, -123, -123, -124, -124, + -125, -123, -127, -126, -124, -125, -125, -124, -124, -120, -126, -126, + -125, -123, -124, -126, -125, -127, -125, -124, -125, -125, -122, -124, + -124, -124, -126, -123, -124, -123, -118, -124, -125, -125, -121, -122, + -125, -125, -123, -122, -127, -123, -124, -126, -122, -124, -127, -126, + -125, -126, -120, -124, -124, -124, -124, -126, -125, -125, -126, -127, + -127, -126, -125, -127, -125, -127, -127, -126, -126, -127, -126, -126, + -127, -126, -127, -127, -126, -127, -127, -126, -126, -126, -126, -127, + -126, -127, -121, -102, -88, -88, 127, -66, -99, -85, -105, -125, + -93, -92, -120, -114, -113, -118, -112, -108, -112, -121, -124, -126, + -124, -117, -118, -118, -126, -119, -125, -120, -121, -119, -125, -124, + -122, -119, -122, -125, -122, -124, -123, -124, -124, -124, -125, -126, + -125, -123, -123, -124, -121, -126, -123, -125, -125, -124, -125, -122, + -124, -122, -124, -125, -123, -126, -125, -126, -122, -125, -125, -123, + -124, -125, -125, -125, -122, -125, -125, -127, -122, -127, -125, -122, + -125, -124, -123, -123, -123, -125, -125, -126, -127, -123, -123, -124, + -122, -124, -123, -125, -124, -125, -125, -125, -127, -125, -124, -127, + -126, -125, -122, -122, -125, -125, -127, -123, -127, -126, -126, -124, + -125, -126, -124, -122, -127, -126, -126, -126, -124, -125, -124, -121, + -124, -124, -124, -127, -123, -126, -124, -123, -126, -127, -124, -124, + -127, -127, -126, -126, -125, -126, -126, -126, -125, -126, -124, -126, + -126, -125, -126, -127, -126, -124, -126, -125, -126, -125, -125, -126, + -126, -127, -125, -123, -126, -126, -126, -126, -127, -125, -126, -124, + -126, -126, -125, -125, -126, -125, -124, -124, -126, -123, -125, -124, + -127, -124, -126, -124, -126, -127, -126, -124, -126, -126, -124, -125, + -123, -127, -123, -126, -125, -126, -126, -125, -124, -126, -124, -125, + -125, -127, -126, -125, -125, -127, -127, -124, -126, -127, -125, -126, + -127, -126, -126, -127, -127, -127, -126, -127, -127, -127, -126, -126, + -126, -127, -126, -126, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -126, -127, -83, -78, -71, -64, 127, + -119, -103, -98, -124, -122, -117, -123, -124, -121, -123, -121, -121, + -125, -114, -119, -120, -123, -126, -126, -123, -127, -120, -119, -123, + -124, -124, -119, -125, -125, -125, -125, -126, -125, -124, -123, -123, + -126, -127, -127, -123, -125, -124, -126, -126, -125, -124, -125, -126, + -122, -125, -125, -127, -125, -124, -126, -126, -126, -127, -125, -127, + -126, -125, -125, -125, -126, -124, -124, -126, -126, -126, -125, -127, + -126, -127, -127, -125, -123, -126, -126, -126, -125, -126, -125, -125, + -125, -126, -124, -125, -126, -126, -126, -126, -126, -126, -126, -127, + -126, -126, -127, -127, -125, -126, -125, -126, -126, -124, -125, -124, + -127, -127, -123, -126, -126, -126, -127, -127, -127, -127, -126, -126, + -125, -126, -125, -125, -125, -124, -124, -127, -127, -126, -125, -127, + -127, -126, -125, -125, -124, -126, -126, -126, -127, -127, -127, -125, + -126, -126, -125, -126, -127, -126, -125, -126, -126, -125, -127, -127, + -127, -126, -124, -126, -127, -127, -126, -124, -125, -126, -126, -124, + -126, -125, -127, -127, -126, -126, -126, -127, -124, -124, -124, -125, + -125, -126, -127, -125, -125, -127, -126, -127, -125, -127, -126, -125, + -126, -126, -127, -123, -126, -127, -125, -125, -127, -126, -125, -127, + -126, -126, -126, -125, -126, -126, -126, -127, -124, -126, -126, -126, + -127, -127, -127, -125, -126, -127, -127, -127, -126, -126, -127, -126, + -127, -127, -127, -126, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -59, -46, -49, -48, 127, -103, -91, -110, -102, -123, -103, -113, + -112, -117, -115, -119, -119, -120, -117, -115, -117, -119, -122, -121, + -125, -123, -121, -122, -118, -122, -125, -122, -124, -126, -126, -125, + -124, -123, -123, -125, -121, -126, -123, -120, -126, -124, -125, -126, + -125, -121, -125, -122, -125, -122, -123, -123, -126, -126, -127, -121, + -125, -127, -126, -126, -125, -124, -126, -123, -124, -124, -123, -125, + -126, -124, -125, -125, -127, -126, -126, -125, -126, -125, -126, -125, + -126, -123, -125, -125, -125, -123, -126, -125, -124, -123, -125, -124, + -125, -126, -124, -124, -126, -126, -126, -125, -125, -126, -124, -125, + -124, -125, -125, -126, -127, -125, -124, -126, -124, -123, -126, -126, + -127, -125, -123, -124, -125, -126, -126, -125, -126, -125, -126, -125, + -127, -126, -126, -124, -126, -127, -126, -126, -126, -126, -124, -125, + -127, -125, -127, -126, -125, -126, -126, -127, -126, -126, -125, -125, + -127, -126, -125, -126, -126, -126, -126, -125, -125, -126, -127, -124, + -123, -126, -126, -124, -125, -124, -122, -126, -126, -124, -121, -127, + -125, -126, -125, -127, -126, -125, -126, -127, -125, -126, -125, -126, + -125, -126, -126, -123, -125, -127, -126, -125, -125, -125, -123, -127, + -126, -124, -127, -125, -126, -126, -127, -126, -125, -125, -126, -126, + -126, -126, -123, -127, -126, -125, -127, -125, -127, -126, -126, -126, + -127, -126, -125, -126, -126, -127, -127, -127, -126, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, +}; diff --git a/third_party/xtensa/examples/micro_speech_lstm/yes_micro_features_data.h b/third_party/xtensa/examples/micro_speech_lstm/yes_micro_features_data.h new file mode 100644 index 0000000..f73347b --- /dev/null +++ b/third_party/xtensa/examples/micro_speech_lstm/yes_micro_features_data.h @@ -0,0 +1,45 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +/* + * * Copyright (c) 2021 Cadence Design Systems Inc. + * * + * * Permission is hereby granted, free of charge, to any person obtaining + * * a copy of this software and associated documentation files (the + * * "Software"), to deal in the Software without restriction, including + * * without limitation the rights to use, copy, modify, merge, publish, + * * distribute, sublicense, and/or sell copies of the Software, and to + * * permit persons to whom the Software is furnished to do so, subject to + * * the following conditions: + * * + * * The above copyright notice and this permission notice shall be included + * * in all copies or substantial portions of the Software. + * * + * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + * * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + * * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. + * * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY + * * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, + * * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE + * * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + * */ + +#ifndef TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MICRO_FEATURES_YES_MICRO_FEATURES_DATA_H_ +#define TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MICRO_FEATURES_YES_MICRO_FEATURES_DATA_H_ + +extern const int g_yes_micro_f2e59fea_nohash_1_width; +extern const int g_yes_micro_f2e59fea_nohash_1_height; +extern const signed char g_yes_micro_f2e59fea_nohash_1_data[]; + +#endif // TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MICRO_FEATURES_YES_MICRO_FEATURES_DATA_H_ diff --git a/third_party/xtensa/examples/pytorch_to_tflite/Makefile.inc b/third_party/xtensa/examples/pytorch_to_tflite/Makefile.inc new file mode 100644 index 0000000..341201d --- /dev/null +++ b/third_party/xtensa/examples/pytorch_to_tflite/Makefile.inc @@ -0,0 +1,22 @@ +ifeq ($(OPTIMIZED_KERNEL_DIR), xtensa) + EXAMPLE_NAME:=pytorch_to_tflite + PYTORCH_TO_TFLITE_TEST_SRCS := \ + $(TENSORFLOW_ROOT)third_party/xtensa/examples/$(EXAMPLE_NAME)/$(EXAMPLE_NAME)_test.cc \ + $(TENSORFLOW_ROOT)third_party/xtensa/examples/$(EXAMPLE_NAME)/pytorch_images_dog_jpg.cc \ + $(TENSORFLOW_ROOT)third_party/xtensa/examples/pytorch_to_tflite/pytorch_op_resolver.cc \ + + PYTORCH_TO_TFLITE_GENERATOR_INPUTS := \ + $(TENSORFLOW_ROOT)third_party/xtensa/examples/pytorch_to_tflite/mobilenet_v2_quantized_1x3x224x224.tflite + + PYTORCH_TO_TFLITE_HDRS := \ + $(TENSORFLOW_ROOT)third_party/xtensa/examples/$(EXAMPLE_NAME)/pytorch_images_dog_jpg.h \ + $(TENSORFLOW_ROOT)third_party/xtensa/examples/pytorch_to_tflite/pytorch_op_resolver.h \ + + + ## Tests loading and running a mobilenet v2 model. + ifneq ($(TARGET_ARCH), hifi5) + $(eval $(call microlite_test,pytorch_to_tflite_test,\ + $(PYTORCH_TO_TFLITE_TEST_SRCS),$(PYTORCH_TO_TFLITE_HDRS),$(PYTORCH_TO_TFLITE_GENERATOR_INPUTS))) + endif +endif + diff --git a/third_party/xtensa/examples/pytorch_to_tflite/README.md b/third_party/xtensa/examples/pytorch_to_tflite/README.md new file mode 100755 index 0000000..0110e79 --- /dev/null +++ b/third_party/xtensa/examples/pytorch_to_tflite/README.md @@ -0,0 +1,18 @@ +# Setup Xtensa Tools +$ set path = ( ~/xtensa/XtDevTools/install/tools/RI-2020.5-linux/XtensaTools/bin $path ) + +$ set path = ( ~/xtensa/XtDevTools/install/tools/RI-2020.5-linux/XtensaTools/Tools/bin $path ) + +$ setenv XTENSA_SYSTEM ~xtensa/XtDevTools/install/tools/RI-2020.5-linux/XtensaTools/config + +$ setenv XTENSA_CORE AE_HiFi5_LE5_AO_FP_XC + +$ setenv XTENSA_TOOLS_VERSION RI-2020.5-linux + +$ setenv XTENSA_BASE ~/xtensa/XtDevTools/install/ + + +# Clean and build mobilenet_v2 model on TFLM +$ make -f tensorflow/lite/micro/tools/make/Makefile clean + +$ make -f tensorflow/lite/micro/tools/make/Makefile TARGET=xtensa OPTIMIZED_KERNEL_DIR=xtensa TARGET=xtensa TARGET_ARCH=hifi5 test_pytorch_to_tflite_test -j diff --git a/third_party/xtensa/examples/pytorch_to_tflite/images/qat_model.png b/third_party/xtensa/examples/pytorch_to_tflite/images/qat_model.png new file mode 100755 index 0000000..5a3cb29 Binary files /dev/null and b/third_party/xtensa/examples/pytorch_to_tflite/images/qat_model.png differ diff --git a/third_party/xtensa/examples/pytorch_to_tflite/mobilenet_v2_quantized_1x3x224x224.tflite b/third_party/xtensa/examples/pytorch_to_tflite/mobilenet_v2_quantized_1x3x224x224.tflite new file mode 100755 index 0000000..d5c59d1 Binary files /dev/null and b/third_party/xtensa/examples/pytorch_to_tflite/mobilenet_v2_quantized_1x3x224x224.tflite differ diff --git a/third_party/xtensa/examples/pytorch_to_tflite/pytorch_images_dog_jpg.cc b/third_party/xtensa/examples/pytorch_to_tflite/pytorch_images_dog_jpg.cc new file mode 100755 index 0000000..a28686d --- /dev/null +++ b/third_party/xtensa/examples/pytorch_to_tflite/pytorch_images_dog_jpg.cc @@ -0,0 +1,12587 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +/* + * * Copyright (c) 2022 Cadence Design Systems Inc. + * * + * * Permission is hereby granted, free of charge, to any person obtaining + * * a copy of this software and associated documentation files (the + * * "Software"), to deal in the Software without restriction, including + * * without limitation the rights to use, copy, modify, merge, publish, + * * distribute, sublicense, and/or sell copies of the Software, and to + * * permit persons to whom the Software is furnished to do so, subject to + * * the following conditions: + * * + * * The above copyright notice and this permission notice shall be included + * * in all copies or substantial portions of the Software. + * * + * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + * * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + * * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. + * * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY + * * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, + * * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE + * * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + * */ +#if !defined(HIFI4) +#include "third_party/xtensa/examples/pytorch_to_tflite/pytorch_images_dog_jpg.h" + +unsigned char g_pytorch_images_dog_jpg_data[] = { + 0x99, 0x99, 0x9a, 0x9b, 0x97, 0x96, 0x97, 0x96, 0x97, 0x97, 0x95, 0x94, + 0x95, 0x97, 0x97, 0x96, 0x99, 0x99, 0x9c, 0x9f, 0x9a, 0x97, 0x98, 0x98, + 0xa2, 0xcc, 0xec, 0x1e, 0x3d, 0x39, 0x2d, 0x29, 0x2d, 0x2b, 0x2e, 0x37, + 0x4b, 0x5c, 0x14, 0xac, 0xa2, 0x9c, 0x9c, 0x9c, 0x9b, 0x9d, 0x9f, 0x9f, + 0x99, 0x96, 0x95, 0x93, 0x97, 0x97, 0x97, 0x9d, 0xa6, 0xa7, 0xab, 0xa7, + 0xa1, 0xa1, 0x9e, 0x9d, 0x9c, 0xa0, 0xa1, 0xa1, 0xa7, 0xa7, 0xa7, 0xa4, + 0xa7, 0xa7, 0xa3, 0xa5, 0xa9, 0xa7, 0xa1, 0x9f, 0x9a, 0x97, 0x97, 0xa3, + 0xaa, 0xa7, 0xa8, 0xab, 0xa0, 0x97, 0x95, 0x9c, 0xab, 0xaf, 0xac, 0xac, + 0xa7, 0xa5, 0xae, 0xbc, 0xaf, 0xa8, 0xa6, 0xa7, 0xab, 0xaa, 0xaa, 0xa7, + 0xad, 0xb1, 0xb5, 0xb9, 0xb7, 0xc0, 0xc1, 0xbb, 0xb2, 0xb0, 0xb5, 0xbc, + 0xc0, 0xb1, 0x9c, 0xa7, 0xa4, 0x9f, 0xa0, 0x9e, 0x9f, 0xa6, 0xac, 0xac, + 0xa6, 0x99, 0x98, 0x98, 0x97, 0x93, 0x97, 0x9a, 0x95, 0x96, 0x95, 0x9a, + 0x9b, 0x96, 0x96, 0x94, 0x93, 0x99, 0x9c, 0x9c, 0x9b, 0x9d, 0xa0, 0xa4, + 0x9c, 0x93, 0x94, 0x93, 0x96, 0x99, 0x97, 0x97, 0x96, 0x93, 0x93, 0x92, + 0x91, 0x93, 0x94, 0x92, 0x93, 0x94, 0x95, 0x94, 0x93, 0x94, 0x97, 0x9a, + 0x9b, 0x9e, 0xa1, 0xa7, 0xa1, 0x97, 0x94, 0x96, 0x9b, 0xa3, 0xad, 0xb0, + 0xa8, 0xa1, 0x99, 0x99, 0x96, 0x95, 0x93, 0x92, 0x98, 0x9c, 0x94, 0x95, + 0x96, 0x9a, 0x9d, 0x9b, 0x9b, 0x9c, 0x99, 0x98, 0x95, 0x95, 0x95, 0x93, + 0x94, 0x94, 0x93, 0x92, 0x91, 0x94, 0x98, 0x99, 0x96, 0x9b, 0x9a, 0x98, + 0x96, 0x94, 0x96, 0x97, 0x99, 0x9c, 0x97, 0x97, 0x97, 0x97, 0x97, 0x99, + 0x9a, 0x9a, 0x9d, 0x9d, 0x98, 0x96, 0x97, 0x99, 0xa7, 0xf0, 0x24, 0x3a, + 0x3c, 0x32, 0x28, 0x25, 0x2d, 0x2d, 0x2d, 0x32, 0x46, 0x5a, 0x29, 0xb7, + 0xa7, 0xa1, 0x9f, 0x9d, 0x9d, 0xa0, 0xa0, 0xa0, 0x9e, 0x98, 0x95, 0x96, + 0x98, 0x98, 0x96, 0x97, 0x9c, 0xa5, 0xad, 0xa9, 0x9f, 0x9d, 0x9e, 0xa0, + 0xa0, 0xa4, 0xa2, 0xa1, 0xa4, 0xa6, 0xa5, 0xa0, 0xa3, 0xa7, 0xa2, 0xa3, + 0xa1, 0xa6, 0xa4, 0x9e, 0x9f, 0xa2, 0xa5, 0xa8, 0xaa, 0xa6, 0xa1, 0xa1, + 0x9a, 0x97, 0x96, 0xa2, 0xb1, 0xb2, 0xb1, 0xab, 0xa7, 0xa2, 0xa7, 0xbf, + 0xb1, 0xa4, 0xa7, 0xa8, 0xac, 0xab, 0xab, 0xa7, 0xad, 0xb1, 0xb1, 0xb5, + 0xb9, 0xbf, 0xbb, 0xb6, 0xb2, 0xae, 0xb2, 0xbe, 0xbe, 0xac, 0x97, 0x97, + 0x9b, 0x9e, 0xa0, 0xa1, 0x9f, 0xa5, 0xa9, 0xad, 0xa6, 0x97, 0x98, 0x9c, + 0x9a, 0x98, 0x9c, 0x9d, 0x97, 0x95, 0x97, 0x9c, 0x9d, 0x9a, 0x98, 0x96, + 0x96, 0x95, 0x97, 0x96, 0x97, 0x9d, 0xa6, 0xa7, 0x9e, 0x95, 0x93, 0x92, + 0x94, 0x98, 0x98, 0x96, 0x97, 0x95, 0x93, 0x91, 0x91, 0x92, 0x94, 0x93, + 0x95, 0x95, 0x96, 0x94, 0x94, 0x96, 0x9c, 0x9c, 0x9e, 0xa2, 0xa2, 0xa6, + 0xa1, 0x99, 0x97, 0x98, 0x97, 0x99, 0xa9, 0xaf, 0xa1, 0x9e, 0x9c, 0x9a, + 0x99, 0x97, 0x97, 0x93, 0x97, 0x9c, 0x95, 0x94, 0x97, 0x9c, 0x9b, 0x98, + 0x99, 0x98, 0x98, 0x97, 0x93, 0x96, 0x97, 0x95, 0x93, 0x92, 0x92, 0x95, + 0x93, 0x97, 0x99, 0xa1, 0x96, 0x98, 0x9a, 0x97, 0x92, 0x93, 0x97, 0x98, + 0x9a, 0x9b, 0x96, 0x9b, 0x9b, 0x99, 0x97, 0x9b, 0x9b, 0x9b, 0x9b, 0x99, + 0x99, 0x99, 0x9b, 0x99, 0xab, 0x08, 0x33, 0x3b, 0x34, 0x2d, 0x28, 0x25, + 0x2b, 0x2d, 0x2d, 0x32, 0x43, 0x56, 0x43, 0xd2, 0xad, 0xa1, 0xa0, 0x9c, + 0x9e, 0xa0, 0x9e, 0x9e, 0x9c, 0x98, 0x97, 0x98, 0x98, 0x99, 0x94, 0x97, + 0x99, 0x99, 0x9c, 0xa2, 0x99, 0x95, 0x9c, 0xa0, 0xa1, 0xa4, 0xa1, 0xa1, + 0xa4, 0xa6, 0xa0, 0x9c, 0xa1, 0xa2, 0xa2, 0xa2, 0xa2, 0x9f, 0x9b, 0x9c, + 0xa1, 0xae, 0xb7, 0xb3, 0xb0, 0xb3, 0xaf, 0xab, 0xa5, 0xa3, 0xa0, 0xac, + 0xb6, 0xb5, 0xb6, 0xa6, 0x9d, 0xa3, 0xa4, 0xbb, 0xb7, 0xa6, 0xa7, 0xa8, + 0xa5, 0xa7, 0xa8, 0xa9, 0xae, 0xb2, 0xb3, 0xb7, 0xb9, 0xc0, 0xb7, 0xb1, + 0xb6, 0xb5, 0xb2, 0xb5, 0xb8, 0xa8, 0x96, 0x9e, 0xae, 0xa3, 0xa1, 0x9f, + 0x9c, 0xa7, 0xa7, 0xa7, 0xa7, 0x99, 0x97, 0x97, 0x99, 0x97, 0x97, 0x97, + 0x96, 0x98, 0xa0, 0x9d, 0x9e, 0x9c, 0x9d, 0x9c, 0x95, 0x94, 0x96, 0x91, + 0x93, 0x97, 0xa1, 0xa8, 0xa1, 0x97, 0x96, 0x92, 0x93, 0x95, 0x96, 0x96, + 0x95, 0x92, 0x91, 0x91, 0x93, 0x94, 0x95, 0x93, 0x94, 0x94, 0x94, 0x94, + 0x93, 0x95, 0x9d, 0xa0, 0xa3, 0xa2, 0xa1, 0xa2, 0x9f, 0x9a, 0x95, 0x96, + 0x97, 0x96, 0x9c, 0xa7, 0x9f, 0x9a, 0x9b, 0x9c, 0x9f, 0x9c, 0x99, 0x97, + 0x97, 0x9a, 0x9a, 0x9a, 0x98, 0x98, 0x95, 0x94, 0x93, 0x94, 0x96, 0x97, + 0x97, 0x99, 0x99, 0x97, 0x92, 0x92, 0x92, 0x95, 0x93, 0x93, 0x97, 0x9b, + 0x97, 0x9a, 0x9a, 0x94, 0x93, 0x96, 0x94, 0x97, 0x9c, 0x97, 0x95, 0x9a, + 0x98, 0x97, 0x98, 0x99, 0x97, 0x97, 0x99, 0x97, 0x97, 0x99, 0x98, 0x99, + 0xab, 0x0b, 0x33, 0x35, 0x2d, 0x27, 0x27, 0x28, 0x2a, 0x30, 0x32, 0x32, + 0x41, 0x55, 0x51, 0xfb, 0xc6, 0xb0, 0xa2, 0x9c, 0x9c, 0x9d, 0xa1, 0xa1, + 0x9b, 0x9a, 0x97, 0x98, 0x9b, 0x9c, 0x98, 0x97, 0x9a, 0x96, 0x94, 0x98, + 0x96, 0x93, 0x98, 0xa0, 0xa1, 0xa2, 0xa1, 0x9d, 0xa1, 0x9f, 0x9a, 0x9c, + 0x9e, 0x9e, 0xa1, 0x9e, 0xa1, 0x9e, 0x99, 0x9a, 0x9a, 0x98, 0x9b, 0x9e, + 0xa1, 0xac, 0xb1, 0xb1, 0xb1, 0xb1, 0xb2, 0xb7, 0xbe, 0xbc, 0xb1, 0xa8, + 0xa1, 0xa4, 0xa7, 0xb7, 0xbd, 0xa8, 0xa5, 0xa6, 0xa1, 0xa7, 0xa7, 0xa6, + 0xad, 0xb7, 0xb2, 0xb7, 0xb9, 0xb6, 0xb0, 0xad, 0xb2, 0xb0, 0xa7, 0xa2, + 0xa4, 0x9d, 0x99, 0x9d, 0xb2, 0xa7, 0x9f, 0xa0, 0x9d, 0xa8, 0xa9, 0xa9, + 0xa8, 0x9c, 0x98, 0x97, 0x98, 0x98, 0x96, 0x97, 0x99, 0x9f, 0xa6, 0x9c, + 0x9d, 0x9b, 0x9a, 0x99, 0x94, 0x91, 0x92, 0x92, 0x93, 0x92, 0x98, 0xa8, + 0xa7, 0x9c, 0x99, 0x94, 0x92, 0x93, 0x94, 0x97, 0x97, 0x92, 0x92, 0x92, + 0x93, 0x92, 0x96, 0x94, 0x92, 0x96, 0x95, 0x94, 0x95, 0x97, 0x9a, 0x9c, + 0x9f, 0xa0, 0xa3, 0xa1, 0x9c, 0x98, 0x96, 0x95, 0x97, 0x96, 0x95, 0x9a, + 0x99, 0x97, 0x9b, 0x9d, 0xa0, 0x9e, 0x9f, 0x9c, 0xa0, 0xa1, 0xa5, 0xa8, + 0xa1, 0x97, 0x97, 0x97, 0x93, 0x92, 0x95, 0x95, 0x97, 0x97, 0x97, 0x97, + 0x93, 0x91, 0x92, 0x93, 0x92, 0x94, 0x97, 0x97, 0x94, 0x99, 0x9d, 0x97, + 0x95, 0x97, 0x95, 0x97, 0x98, 0x97, 0x97, 0x9a, 0x99, 0x98, 0x97, 0x97, + 0x97, 0x98, 0x9b, 0x9a, 0x9a, 0x9b, 0x9a, 0x9a, 0xaf, 0x10, 0x33, 0x33, + 0x2c, 0x28, 0x27, 0x2b, 0x2c, 0x2d, 0x33, 0x35, 0x45, 0x55, 0x57, 0x24, + 0xeb, 0xcd, 0xac, 0xa3, 0x9f, 0xa0, 0xa1, 0xa1, 0x9a, 0x98, 0x99, 0x9c, + 0xa1, 0xa3, 0xa1, 0x9c, 0x9d, 0x98, 0x97, 0x96, 0x94, 0x95, 0x97, 0x9b, + 0x9f, 0xa0, 0x9b, 0x9c, 0x9c, 0x9b, 0x99, 0x9c, 0xa1, 0xa0, 0x9f, 0x9e, + 0x9c, 0x9a, 0x95, 0x98, 0x9c, 0x99, 0x9c, 0x9b, 0x9b, 0x9e, 0x9e, 0x9e, + 0x9c, 0x99, 0x9b, 0xa6, 0xac, 0xad, 0xac, 0xa9, 0xaa, 0xa9, 0xa5, 0xa8, + 0xb8, 0xb0, 0xa9, 0xa7, 0xa1, 0xa3, 0xa5, 0xa8, 0xab, 0xb5, 0xb8, 0xb8, + 0xb8, 0xb7, 0xb5, 0xb0, 0xa7, 0xa2, 0x9f, 0xa1, 0xa3, 0x9d, 0x95, 0x97, + 0x97, 0x97, 0x9d, 0xa1, 0x9e, 0xa7, 0xad, 0xab, 0xa7, 0x9d, 0x97, 0x99, + 0x99, 0x97, 0x95, 0x9c, 0xa0, 0x9e, 0xa4, 0x9f, 0x9c, 0x9c, 0x9c, 0x9a, + 0x9a, 0x97, 0x92, 0x92, 0x94, 0x93, 0x99, 0x9f, 0xa1, 0xa1, 0xa1, 0x9c, + 0x94, 0x96, 0x97, 0x97, 0x96, 0x94, 0x96, 0x95, 0x94, 0x94, 0x94, 0x94, + 0x93, 0x95, 0x95, 0x93, 0x94, 0x96, 0x97, 0x9c, 0x9f, 0x9c, 0x9f, 0x9f, + 0x9c, 0x99, 0x97, 0x97, 0x97, 0x96, 0x96, 0x96, 0x94, 0x95, 0x97, 0x97, + 0x9f, 0xa1, 0xa3, 0xa2, 0xa6, 0xaa, 0xaa, 0xa9, 0xa7, 0x9c, 0x9a, 0x9a, + 0x98, 0x94, 0x95, 0x96, 0x96, 0x95, 0x99, 0x99, 0x94, 0x92, 0x93, 0x94, + 0x93, 0x94, 0x94, 0x94, 0x96, 0x96, 0x98, 0x97, 0x96, 0x97, 0x95, 0x97, + 0x98, 0x9b, 0x99, 0x99, 0x99, 0x99, 0x95, 0x96, 0x98, 0x99, 0x9d, 0x9c, + 0x99, 0x98, 0x9c, 0x9a, 0xbe, 0x20, 0x34, 0x31, 0x2c, 0x28, 0x2b, 0x2a, + 0x2d, 0x2f, 0x33, 0x37, 0x46, 0x55, 0x5b, 0x42, 0x1e, 0xf6, 0xc4, 0xb8, + 0xac, 0x9f, 0x9b, 0x9c, 0x9b, 0x9a, 0x98, 0xa1, 0xa7, 0xa8, 0xa4, 0x9c, + 0x9d, 0x9c, 0x97, 0x97, 0x95, 0x99, 0x9f, 0xa1, 0x9e, 0x9f, 0x9b, 0x9b, + 0x9b, 0x99, 0x9a, 0x9c, 0xa1, 0x9e, 0x9c, 0x9d, 0xa1, 0x9e, 0x9d, 0xa4, + 0xa2, 0x9e, 0x9f, 0x9d, 0xa0, 0xa0, 0x99, 0x99, 0x99, 0x98, 0x9c, 0x9e, + 0xa0, 0xa2, 0xa2, 0xa4, 0xaa, 0xa6, 0xa5, 0xab, 0xb6, 0xb6, 0xaf, 0xaa, + 0xa4, 0xa3, 0xa7, 0xa8, 0xaa, 0xb1, 0xb4, 0xb9, 0xba, 0xbd, 0xb8, 0xb3, + 0xae, 0xa7, 0xa0, 0xa2, 0xa3, 0x9d, 0x97, 0x97, 0x96, 0x93, 0x9b, 0x9d, + 0x9d, 0xa1, 0xa8, 0xad, 0xa9, 0xa1, 0x9a, 0xa2, 0xa4, 0x9f, 0x97, 0x97, + 0x9c, 0x9c, 0xa1, 0xa1, 0x9e, 0xa0, 0x9f, 0xa1, 0xa6, 0xa0, 0x96, 0x92, + 0x92, 0x92, 0x96, 0x99, 0x9b, 0xa2, 0xa2, 0xa1, 0x97, 0x99, 0x9c, 0x96, + 0x94, 0x95, 0x95, 0x97, 0x93, 0x94, 0x95, 0x95, 0x94, 0x96, 0x97, 0x92, + 0x96, 0x97, 0x97, 0x9b, 0x9c, 0x9b, 0x9f, 0xa0, 0x9f, 0x9a, 0x98, 0x97, + 0x96, 0x97, 0x97, 0x97, 0x95, 0x95, 0x97, 0x98, 0x97, 0x9f, 0xa4, 0xa2, + 0xa8, 0xa7, 0xa9, 0xab, 0xab, 0xa2, 0x9c, 0x9b, 0x99, 0x94, 0x97, 0x97, + 0x96, 0x98, 0x9c, 0x9c, 0x97, 0x96, 0x96, 0x95, 0x95, 0x97, 0x94, 0x94, + 0x97, 0x97, 0x98, 0x97, 0x94, 0x97, 0x96, 0x97, 0x99, 0x9a, 0x98, 0x99, + 0x97, 0x97, 0x97, 0x99, 0x9a, 0x99, 0x9c, 0x9f, 0xa0, 0xa1, 0xa7, 0xac, + 0xe2, 0x2f, 0x32, 0x2d, 0x2b, 0x2d, 0x2d, 0x2d, 0x2d, 0x34, 0x36, 0x3d, + 0x48, 0x57, 0x5d, 0x54, 0x44, 0x1d, 0xf3, 0xdc, 0xd0, 0xa6, 0x9c, 0x9d, + 0x9c, 0x9b, 0x9b, 0xa1, 0xa6, 0xa5, 0xa5, 0xa2, 0xa5, 0xa6, 0x9d, 0x9d, + 0xa1, 0xa1, 0xa7, 0xa5, 0xa0, 0x9d, 0x9b, 0x9b, 0x9c, 0x9b, 0x9d, 0x9f, + 0xa0, 0x9a, 0x97, 0x9a, 0x9e, 0xa7, 0xac, 0xac, 0xa9, 0xa4, 0xa0, 0xa1, + 0xa4, 0xa3, 0x9c, 0x9a, 0x9c, 0x9b, 0x9d, 0x9c, 0x97, 0x9a, 0x9e, 0xa1, + 0xa7, 0xa7, 0xa4, 0xac, 0xb3, 0xb7, 0xae, 0xa6, 0xa1, 0xa7, 0xa8, 0xab, + 0xa9, 0xb2, 0xb3, 0xb1, 0xb7, 0xb7, 0xb5, 0xb1, 0xb8, 0xb1, 0xa7, 0xa6, + 0xa9, 0xa2, 0x97, 0x97, 0x93, 0x94, 0x99, 0x9b, 0x9d, 0x9e, 0xa4, 0xaa, + 0xac, 0xa1, 0xa3, 0xb5, 0xb5, 0xb1, 0xa7, 0x9c, 0x9f, 0x9b, 0x9d, 0xa0, + 0xa6, 0xa5, 0xa2, 0xa7, 0xa6, 0x9d, 0x9c, 0x96, 0x94, 0x92, 0x92, 0x97, + 0x98, 0x9a, 0xa1, 0xa1, 0x98, 0x98, 0x9d, 0x99, 0x96, 0x95, 0x93, 0x94, + 0x91, 0x91, 0x92, 0x92, 0x93, 0x97, 0x96, 0x92, 0x92, 0x95, 0x98, 0x94, + 0x96, 0x99, 0x9d, 0x9e, 0x9d, 0x99, 0x97, 0x95, 0x98, 0x9a, 0x97, 0x99, + 0x99, 0x97, 0x98, 0x98, 0x97, 0x98, 0xa2, 0xa3, 0xa4, 0xa3, 0xa7, 0xaa, + 0xa7, 0xa0, 0x9a, 0x99, 0x9b, 0x96, 0x97, 0x97, 0x97, 0x97, 0x9b, 0x9b, + 0x97, 0x94, 0x92, 0x91, 0x94, 0x99, 0x96, 0x98, 0x97, 0x98, 0x9a, 0x9a, + 0x97, 0x96, 0x96, 0x98, 0x9a, 0x9a, 0x9a, 0x99, 0x99, 0x97, 0x99, 0x9b, + 0x9c, 0x99, 0x9c, 0xa1, 0xa8, 0xac, 0xbc, 0xce, 0x0e, 0x38, 0x32, 0x2a, + 0x23, 0x2d, 0x2a, 0x32, 0x32, 0x35, 0x39, 0x44, 0x4d, 0x5a, 0x5f, 0x5d, + 0x54, 0x3d, 0x1f, 0x12, 0xef, 0xac, 0x9e, 0x9c, 0x9c, 0x9e, 0x9d, 0xa0, + 0xa6, 0xa1, 0xa3, 0xa7, 0xa4, 0xa7, 0xa7, 0xa9, 0xa7, 0xa4, 0xa7, 0xa7, + 0xa3, 0x9c, 0x9b, 0x9c, 0x9b, 0x9b, 0x9c, 0xa4, 0xa0, 0x9a, 0x9a, 0x9a, + 0x99, 0xa0, 0xae, 0xb1, 0xae, 0xad, 0xaa, 0xa8, 0xa6, 0xa1, 0xa1, 0x9d, + 0x9f, 0xa5, 0xa4, 0x9b, 0x9a, 0x9c, 0x9b, 0x9a, 0xa1, 0xa8, 0xa5, 0xb0, + 0xb8, 0xb7, 0xac, 0xa5, 0xa5, 0xab, 0xa9, 0xa6, 0xa2, 0xad, 0xb4, 0xae, + 0xb7, 0xb8, 0xb2, 0xb0, 0xbc, 0xb2, 0xac, 0xa8, 0xa7, 0xa1, 0x99, 0x97, + 0x95, 0x95, 0x99, 0x9c, 0x9e, 0x9d, 0xa4, 0xa7, 0xaa, 0xa3, 0xaa, 0xb6, + 0xb8, 0xb6, 0xb2, 0xae, 0xa8, 0xa1, 0xa1, 0xa0, 0xa4, 0xa7, 0xa7, 0xa4, + 0xa0, 0x9c, 0xa4, 0x9e, 0x96, 0x96, 0x97, 0x99, 0x9a, 0x95, 0x9b, 0x9e, + 0x9c, 0x9b, 0x9e, 0x9e, 0x97, 0x95, 0x96, 0x93, 0x92, 0x96, 0x94, 0x92, + 0x96, 0x9b, 0x99, 0x95, 0x96, 0x97, 0x97, 0x95, 0x93, 0x97, 0x9c, 0x9f, + 0x9c, 0x97, 0x97, 0x98, 0x9b, 0x9a, 0x9b, 0x99, 0x97, 0x97, 0x99, 0x9b, + 0x99, 0x97, 0x9d, 0xa3, 0xa7, 0xa7, 0xa6, 0xa2, 0x9c, 0x9c, 0x9c, 0x9d, + 0xa1, 0x9b, 0x98, 0x99, 0x99, 0x98, 0x99, 0x95, 0x95, 0x96, 0x94, 0x93, + 0x93, 0x98, 0x97, 0x9c, 0x93, 0x96, 0x98, 0x9c, 0x99, 0x94, 0x95, 0x97, + 0x98, 0x98, 0x9c, 0x9b, 0x9c, 0x98, 0x97, 0x9a, 0x9c, 0x9e, 0xa0, 0xa4, + 0xaf, 0xb7, 0xd7, 0xf8, 0x30, 0x3c, 0x31, 0x27, 0x24, 0x2c, 0x2f, 0x36, + 0x37, 0x37, 0x40, 0x49, 0x54, 0x5d, 0x60, 0x60, 0x5b, 0x4d, 0x3d, 0x33, + 0x02, 0xb6, 0xa6, 0x9f, 0x9d, 0x9c, 0x99, 0x97, 0x9c, 0xa1, 0xa4, 0xa5, + 0xa5, 0xa6, 0xa8, 0xa7, 0xa7, 0xa7, 0xa8, 0xa7, 0xa6, 0xa2, 0xa1, 0x9d, + 0x9d, 0x9a, 0x99, 0x9c, 0x99, 0x99, 0x99, 0x9c, 0x9a, 0x9c, 0xa1, 0xab, + 0xac, 0xa8, 0xa7, 0xa2, 0xa1, 0xa0, 0xa1, 0x9f, 0xa1, 0xa4, 0xa3, 0xa1, + 0x9c, 0x99, 0x97, 0x97, 0x9a, 0x9b, 0x9e, 0xa7, 0xa7, 0xa7, 0xa4, 0xa1, + 0xa7, 0xac, 0xaa, 0xa5, 0xa2, 0xaa, 0xb3, 0xb7, 0xb9, 0xb9, 0xb3, 0xac, + 0xb5, 0xb1, 0xab, 0xa8, 0xad, 0xb0, 0xa2, 0x98, 0x97, 0x98, 0x99, 0x9b, + 0x9b, 0x9d, 0xa1, 0xa7, 0xa7, 0xa6, 0xac, 0xb1, 0xb5, 0xb5, 0xb7, 0xb7, + 0xaf, 0xa0, 0xa1, 0x9f, 0xa2, 0xa7, 0xa7, 0x9d, 0x9b, 0x9b, 0xa1, 0x9f, + 0x97, 0x96, 0x94, 0x98, 0x9d, 0x99, 0x9c, 0xa7, 0xa3, 0x9f, 0x9e, 0xa2, + 0x9b, 0x96, 0x97, 0x93, 0x91, 0x95, 0x97, 0x96, 0x98, 0x9b, 0x9a, 0x96, + 0x97, 0x97, 0x97, 0x9a, 0x99, 0x94, 0x94, 0x97, 0x97, 0x99, 0x97, 0x97, + 0x9b, 0x9c, 0x9d, 0x99, 0x97, 0x97, 0x97, 0x99, 0x9a, 0x98, 0x98, 0xa2, + 0xa8, 0xa7, 0xa5, 0x9f, 0x9f, 0xa1, 0xa4, 0xa5, 0xa7, 0xa7, 0xa1, 0x9e, + 0x9c, 0x99, 0x99, 0x95, 0x95, 0x96, 0x96, 0x95, 0x95, 0x99, 0x99, 0x9f, + 0x96, 0x98, 0x98, 0x9d, 0x9e, 0x97, 0x96, 0x97, 0x9a, 0xa0, 0xa0, 0x9a, + 0x98, 0x97, 0x98, 0x9a, 0x9a, 0x9f, 0xa5, 0xa7, 0xb7, 0xc4, 0xec, 0x1c, + 0x3d, 0x3b, 0x2d, 0x23, 0x24, 0x2a, 0x34, 0x37, 0x3d, 0x3f, 0x48, 0x51, + 0x57, 0x5e, 0x62, 0x62, 0x61, 0x59, 0x54, 0x47, 0x12, 0xd0, 0xbb, 0xa6, + 0x9c, 0x9b, 0x99, 0x97, 0x97, 0x9c, 0xa3, 0xa3, 0xa3, 0xa3, 0xa5, 0xa5, + 0xa7, 0xa5, 0xa7, 0xa7, 0xa7, 0xa7, 0xa9, 0xa2, 0x9d, 0x9d, 0x9c, 0x9b, + 0x9a, 0x9a, 0x99, 0x9c, 0x98, 0x98, 0x9c, 0x9f, 0xa7, 0xa7, 0xa4, 0xa4, + 0xa5, 0xa6, 0xa6, 0xa6, 0xa7, 0xa7, 0xa7, 0xa6, 0xa1, 0x9e, 0x9a, 0x9a, + 0x98, 0x9a, 0x9a, 0x9e, 0xa1, 0xa0, 0x9d, 0x9c, 0x9e, 0xa5, 0xa9, 0xa5, + 0xa1, 0xa6, 0xaf, 0xb7, 0xb2, 0xba, 0xb7, 0xae, 0xb1, 0xb2, 0xaa, 0xa8, + 0xb7, 0xc6, 0xbb, 0x9f, 0x98, 0x98, 0x9d, 0xa1, 0x97, 0x9d, 0xa1, 0xa9, + 0xac, 0xa7, 0xac, 0xb0, 0xb6, 0xb7, 0xb8, 0xb8, 0xb4, 0x9d, 0x9f, 0xa7, + 0xa5, 0xa8, 0xac, 0xa2, 0x9a, 0x99, 0xa1, 0xa1, 0x9d, 0x97, 0x97, 0x98, + 0x9f, 0x9b, 0x9b, 0x9f, 0x9f, 0xa0, 0xa0, 0xa3, 0x98, 0x97, 0x99, 0x98, + 0x95, 0x96, 0x96, 0x95, 0x97, 0x9d, 0x9b, 0x98, 0x98, 0x9a, 0x9d, 0x99, + 0x99, 0x97, 0x92, 0x99, 0x9c, 0x99, 0x97, 0x98, 0x9a, 0x9a, 0x9b, 0x97, + 0x97, 0x96, 0x97, 0x99, 0x9a, 0x99, 0x96, 0x99, 0xa4, 0xac, 0xa7, 0xa4, + 0xa3, 0xa3, 0xa5, 0xa7, 0xa8, 0xa7, 0xa8, 0xaa, 0xa3, 0x9c, 0x9b, 0x99, + 0x97, 0x96, 0x98, 0x9a, 0xa0, 0xa1, 0xa1, 0xa0, 0x97, 0x99, 0x97, 0x98, + 0xa3, 0x99, 0x97, 0x96, 0x98, 0x9c, 0xa0, 0x99, 0x95, 0x97, 0x97, 0x97, + 0x98, 0x9d, 0xa2, 0xab, 0xbc, 0xd7, 0xf8, 0x2f, 0x42, 0x39, 0x2b, 0x25, + 0x26, 0x2e, 0x37, 0x3b, 0x44, 0x4a, 0x52, 0x57, 0x5a, 0x60, 0x64, 0x65, + 0x63, 0x60, 0x5e, 0x52, 0x24, 0xff, 0xda, 0xae, 0xa3, 0x9e, 0x9b, 0x98, + 0x98, 0x99, 0x9d, 0x9f, 0x9d, 0x9c, 0xa4, 0xa5, 0xa1, 0xa1, 0xa1, 0xa5, + 0xaa, 0xa8, 0xa7, 0xa8, 0xa5, 0xa1, 0x9d, 0x9a, 0x9c, 0x99, 0x9a, 0x9f, + 0xa1, 0xa0, 0x9d, 0x99, 0x9d, 0xa5, 0xa9, 0xa9, 0xa8, 0xa3, 0xa4, 0xa4, + 0xa2, 0xa1, 0xa2, 0xa2, 0xa2, 0xa2, 0x9b, 0x99, 0x9a, 0x9c, 0x9c, 0x9e, + 0xa0, 0x9c, 0x9a, 0x9c, 0x9b, 0xa1, 0xa8, 0xa5, 0xa1, 0xa6, 0xaf, 0xb3, + 0xa7, 0xb1, 0xb3, 0xac, 0xaf, 0xb3, 0xa9, 0xa7, 0xb7, 0xc5, 0xc6, 0xb1, + 0x9f, 0xa7, 0xb7, 0xac, 0x9d, 0xa5, 0xaf, 0xae, 0xac, 0xa3, 0xa9, 0xac, + 0xb6, 0xb7, 0xb9, 0xba, 0xb7, 0xa3, 0xa4, 0xac, 0xaa, 0xa9, 0xab, 0xa6, + 0x99, 0x98, 0xa1, 0xa6, 0xa0, 0x9e, 0x9b, 0x97, 0x9c, 0x9c, 0x9a, 0x9e, + 0xa0, 0xa4, 0xa6, 0xa1, 0x9c, 0x97, 0x97, 0x98, 0x97, 0x97, 0x97, 0x94, + 0x9b, 0xa1, 0x9f, 0x9d, 0x9d, 0x9d, 0x9e, 0x9b, 0x9c, 0x9a, 0x97, 0x98, + 0x9b, 0x98, 0x98, 0x99, 0x9c, 0x9f, 0x9f, 0x9a, 0x9a, 0x99, 0x98, 0x9a, + 0x99, 0x9a, 0x98, 0x9b, 0x9c, 0xa4, 0xa6, 0xa5, 0xa2, 0xa1, 0xa0, 0xa1, + 0xa2, 0x9d, 0x9e, 0xa6, 0xab, 0xa7, 0xa1, 0x9e, 0x9c, 0x9a, 0x9c, 0x9c, + 0x9f, 0xa1, 0xa4, 0xa3, 0x97, 0x97, 0x95, 0x96, 0xa0, 0x99, 0x97, 0x95, + 0x95, 0x97, 0x9b, 0x9c, 0x99, 0x9b, 0x99, 0x99, 0x9e, 0xa8, 0xaa, 0xc1, + 0xd2, 0xf3, 0x0b, 0x36, 0x42, 0x37, 0x28, 0x2a, 0x2c, 0x36, 0x3f, 0x42, + 0x4a, 0x52, 0x57, 0x5c, 0x5d, 0x62, 0x65, 0x66, 0x65, 0x63, 0x62, 0x57, + 0x44, 0x2f, 0xf3, 0xb1, 0xa5, 0x9c, 0x9d, 0x9b, 0x9c, 0x9b, 0x9a, 0x9e, + 0x9f, 0xa0, 0xa1, 0x9f, 0xa1, 0x9e, 0xa1, 0xa5, 0xa7, 0xa1, 0xa1, 0xa9, + 0xac, 0xa4, 0x9e, 0x9c, 0x9e, 0x99, 0x9c, 0x9f, 0xa0, 0x9c, 0x9a, 0x9a, + 0x9b, 0xa6, 0xab, 0xa8, 0xa6, 0xa3, 0xa3, 0xa3, 0xa1, 0xa1, 0xa0, 0xa3, + 0xa5, 0xa7, 0xa2, 0x9b, 0xa1, 0xa7, 0xa3, 0xa2, 0xa0, 0x9d, 0x9c, 0x9b, + 0x9e, 0xa2, 0xa7, 0xa6, 0xa5, 0xa5, 0xac, 0xb1, 0xa4, 0xa7, 0xac, 0xa6, + 0xa8, 0xaf, 0xae, 0xaa, 0xb1, 0xbf, 0xc6, 0xc2, 0xb3, 0xb3, 0xc2, 0xaf, + 0xa3, 0xaf, 0xb8, 0xb5, 0xaa, 0xa1, 0xaf, 0xb0, 0xb1, 0xb1, 0xb4, 0xb7, + 0xb7, 0xb1, 0xac, 0xad, 0xaa, 0xa8, 0xa7, 0xa3, 0x9c, 0x9b, 0xa2, 0xa7, + 0xa1, 0xa2, 0xa1, 0x97, 0x97, 0x98, 0x98, 0xa2, 0xa4, 0xa4, 0xa5, 0x9f, + 0x9d, 0x97, 0x99, 0x9c, 0x9a, 0x97, 0x96, 0x96, 0x9d, 0xa9, 0xa9, 0xa3, + 0x9c, 0x9c, 0x9f, 0x9d, 0x9c, 0x9a, 0x97, 0x98, 0x9b, 0x9c, 0x9d, 0x9e, + 0xa3, 0xa2, 0x9d, 0x9e, 0xa1, 0xa1, 0x9e, 0xa1, 0x9c, 0x9a, 0x95, 0x97, + 0x98, 0x97, 0x9e, 0xa8, 0xa4, 0xa1, 0xa0, 0x9e, 0x9e, 0xa2, 0xa3, 0x9d, + 0xa1, 0xa5, 0xa4, 0x9b, 0x9a, 0x9c, 0x9f, 0x9a, 0x9b, 0xa1, 0xa4, 0xa6, + 0x99, 0x94, 0x93, 0x96, 0x9b, 0x99, 0x97, 0x97, 0x9a, 0x9d, 0x9e, 0x9f, + 0xa7, 0xa0, 0x9f, 0xa3, 0xac, 0xbe, 0xcd, 0xe2, 0x10, 0x12, 0x29, 0x3c, + 0x42, 0x37, 0x2a, 0x2c, 0x35, 0x3d, 0x4a, 0x4e, 0x52, 0x59, 0x5d, 0x60, + 0x62, 0x65, 0x65, 0x65, 0x65, 0x65, 0x63, 0x5e, 0x5a, 0x44, 0xfa, 0xb7, + 0xa5, 0xa0, 0x9d, 0x9c, 0x9d, 0x9b, 0x9a, 0x9d, 0xa1, 0xa3, 0xa3, 0xa2, + 0xa7, 0xa7, 0xa4, 0xa3, 0xa1, 0xa1, 0xa3, 0xa3, 0xa3, 0xa1, 0xa0, 0x9f, + 0xa0, 0x9e, 0xa1, 0xa5, 0x9d, 0x97, 0x96, 0x97, 0x9a, 0xaa, 0xae, 0xac, + 0xa6, 0xa7, 0xa6, 0xa2, 0xa2, 0xa2, 0x9e, 0x9f, 0xa1, 0xa6, 0xa7, 0x9f, + 0xa7, 0xb8, 0xb2, 0xa5, 0x9d, 0x9c, 0x9d, 0x9d, 0xa0, 0xa0, 0xa2, 0xa4, + 0xa7, 0xa1, 0xa7, 0xb5, 0xa7, 0xa5, 0xa7, 0xa5, 0xaa, 0xab, 0xa8, 0xaa, + 0xb0, 0xbf, 0xc2, 0xbb, 0xb0, 0xb7, 0xba, 0xaa, 0xa7, 0xb0, 0xb7, 0xb7, + 0xac, 0xa5, 0xaf, 0xb3, 0xb4, 0xb2, 0xb5, 0xba, 0xbf, 0xb8, 0xb7, 0xba, + 0xb6, 0xae, 0xaf, 0xac, 0xa4, 0xa2, 0xa3, 0xac, 0xab, 0xa7, 0xab, 0xa4, + 0x9c, 0x9c, 0x97, 0x9a, 0xa2, 0xa2, 0xa0, 0x9f, 0x9d, 0x9d, 0x9c, 0x9d, + 0x98, 0x94, 0x94, 0x95, 0x9a, 0xa7, 0xad, 0xa6, 0xa1, 0xa2, 0xa2, 0xa0, + 0x9c, 0x9c, 0x99, 0x9d, 0xa0, 0xa1, 0xa2, 0xa1, 0xa5, 0xa1, 0x9c, 0x9c, + 0xa3, 0xa5, 0xa3, 0xa6, 0xa6, 0xa1, 0x9c, 0x9a, 0x9c, 0x9b, 0x97, 0xa0, + 0xa6, 0xa1, 0xa2, 0x9e, 0x9c, 0x9d, 0x9c, 0x9d, 0x9d, 0x9c, 0x9d, 0x9a, + 0x98, 0x99, 0x9a, 0x9a, 0x9d, 0xa1, 0xa1, 0x9f, 0x9c, 0x95, 0x96, 0x96, + 0x97, 0x96, 0x97, 0x9a, 0x9e, 0xa0, 0xa5, 0xac, 0xb6, 0xbc, 0xb7, 0xb2, + 0xc1, 0xdf, 0x04, 0x0f, 0x27, 0x37, 0x3e, 0x3f, 0x3f, 0x3c, 0x37, 0x37, + 0x3f, 0x4b, 0x53, 0x58, 0x5c, 0x5f, 0x62, 0x64, 0x65, 0x66, 0x65, 0x66, + 0x66, 0x66, 0x65, 0x64, 0x5f, 0x4a, 0xfd, 0xbd, 0xa7, 0xa3, 0xa1, 0xa1, + 0xa1, 0x9d, 0x9e, 0xa3, 0xa7, 0xa3, 0xa6, 0xa5, 0xa7, 0xa7, 0xa3, 0xa2, + 0xa3, 0xa4, 0xa4, 0xa3, 0xa1, 0xa4, 0xa6, 0xa7, 0xa5, 0xa4, 0xaa, 0xa8, + 0x9c, 0x96, 0x97, 0x98, 0x97, 0xa7, 0xb2, 0xb0, 0xab, 0xab, 0xa7, 0xa3, + 0xa5, 0xa2, 0x9b, 0x97, 0x9b, 0xa1, 0xa6, 0xa1, 0xa1, 0xb3, 0xb4, 0xb2, + 0xaa, 0xa3, 0xa7, 0xa7, 0xa3, 0xa3, 0x9d, 0x9e, 0xa6, 0xa1, 0xa2, 0xb0, + 0xab, 0xa5, 0xa7, 0xa6, 0xaa, 0xab, 0xa6, 0xa7, 0xb0, 0xbd, 0xc2, 0xbc, + 0xb2, 0xb8, 0xae, 0xa4, 0xac, 0xb7, 0xbc, 0xb7, 0xac, 0xa9, 0xac, 0xaf, + 0xac, 0xb0, 0xb6, 0xb7, 0xb9, 0xb2, 0xa9, 0xb1, 0xb1, 0xb1, 0xb5, 0xb3, + 0xb1, 0xaf, 0xae, 0xb3, 0xb3, 0xaf, 0xac, 0xa7, 0xa4, 0xa4, 0x9f, 0x97, + 0x9e, 0xa1, 0x9e, 0x9f, 0x9d, 0xa1, 0x9c, 0x9c, 0x98, 0x96, 0x94, 0x93, + 0x9b, 0xa9, 0xac, 0xaa, 0xa5, 0xa7, 0xa5, 0xa1, 0x9e, 0x9e, 0xa0, 0xa3, + 0xa1, 0xa0, 0xa3, 0xa5, 0xa6, 0xa0, 0x9b, 0x9c, 0xa0, 0xa2, 0xa5, 0xa5, + 0xa1, 0xa1, 0xa3, 0xa0, 0x9f, 0xa0, 0x9c, 0x9e, 0xa4, 0x9c, 0x9b, 0x99, + 0x97, 0x97, 0x9a, 0xa0, 0x9e, 0x9d, 0x9c, 0x99, 0x97, 0x97, 0x99, 0x9d, + 0x9e, 0x9d, 0x9b, 0x98, 0x97, 0x94, 0x95, 0x94, 0x94, 0x97, 0x97, 0x9d, + 0xa6, 0xad, 0xb2, 0xbb, 0xd7, 0xee, 0xef, 0xec, 0xeb, 0xfe, 0x1e, 0x33, + 0x38, 0x41, 0x47, 0x43, 0x3f, 0x41, 0x41, 0x45, 0x4a, 0x54, 0x59, 0x5d, + 0x62, 0x62, 0x63, 0x65, 0x65, 0x65, 0x66, 0x67, 0x66, 0x66, 0x66, 0x66, + 0x63, 0x55, 0x1d, 0xd4, 0xb1, 0xaa, 0xa7, 0xa6, 0xa4, 0x9f, 0x9f, 0xa1, + 0xa5, 0xa7, 0xa7, 0xa4, 0xa4, 0xa5, 0xa5, 0xa7, 0xa1, 0xa2, 0xa2, 0xa3, + 0xa7, 0xb5, 0xbe, 0xbc, 0xb0, 0xb0, 0xa9, 0x9e, 0x9b, 0x98, 0x9a, 0x9c, + 0x9a, 0xa2, 0xb0, 0xaf, 0xae, 0xb1, 0xac, 0xa4, 0xa6, 0x9f, 0x99, 0x96, + 0x99, 0x9b, 0xa0, 0xa0, 0xa2, 0xae, 0xae, 0xac, 0xae, 0xad, 0xa7, 0xa3, + 0x9c, 0x9e, 0x9c, 0x9d, 0x9e, 0x9e, 0xa4, 0xae, 0xa8, 0x9e, 0xa5, 0xa9, + 0xae, 0xac, 0xa5, 0xa8, 0xb0, 0xb6, 0xb3, 0xb9, 0xbb, 0xbe, 0xb4, 0xab, + 0xb1, 0xb9, 0xc2, 0xb7, 0xa9, 0xa4, 0xa5, 0xaa, 0xb1, 0xb2, 0xb3, 0xb4, + 0xb1, 0xa9, 0x9a, 0x98, 0x9b, 0xa1, 0xaa, 0xb2, 0xb8, 0xb8, 0xb7, 0xbb, + 0xc1, 0xc0, 0xb6, 0xa6, 0x9f, 0xa1, 0xa2, 0x9f, 0xa2, 0x9f, 0x9d, 0x9e, + 0x9d, 0x9c, 0x9f, 0x9d, 0x99, 0x97, 0x96, 0x94, 0x9c, 0xa4, 0xa4, 0xa4, + 0xa7, 0xa7, 0xaf, 0xb4, 0xb0, 0xa9, 0xa5, 0x9f, 0x9c, 0x9c, 0xa0, 0xa4, + 0xa5, 0xa1, 0x9c, 0x9d, 0xa1, 0x9e, 0xa4, 0xa8, 0xa9, 0xa2, 0xa1, 0xa6, + 0x9f, 0x9e, 0x9f, 0xa1, 0x9d, 0x9a, 0x98, 0x97, 0x96, 0x95, 0x99, 0x9c, + 0x9b, 0x9d, 0x9b, 0x97, 0x96, 0x97, 0x98, 0x9b, 0x9c, 0x9a, 0x98, 0x99, + 0x97, 0x9a, 0x9a, 0x95, 0x97, 0x9c, 0x9c, 0x9f, 0xa6, 0xbc, 0xd6, 0xe2, + 0xed, 0x12, 0x29, 0x22, 0x1d, 0x1b, 0x29, 0x3d, 0x47, 0x48, 0x47, 0x47, + 0x42, 0x42, 0x48, 0x52, 0x57, 0x59, 0x5c, 0x5f, 0x62, 0x63, 0x64, 0x64, + 0x65, 0x66, 0x67, 0x67, 0x67, 0x67, 0x67, 0x67, 0x65, 0x61, 0x4b, 0x17, + 0xd7, 0xb8, 0xac, 0xaa, 0xa5, 0xa4, 0xa8, 0xa7, 0xa8, 0xa5, 0xa4, 0xa7, + 0xa7, 0xa6, 0xa7, 0xa4, 0xa6, 0xac, 0xb7, 0xe2, 0x08, 0x27, 0x2f, 0x1e, + 0xf6, 0xc5, 0xa7, 0xa0, 0x9e, 0xa1, 0x9f, 0x9a, 0x9a, 0x9c, 0xa7, 0xa9, + 0xa5, 0xa8, 0xa9, 0xa7, 0x9f, 0x9c, 0x9c, 0x97, 0x99, 0x9e, 0x9c, 0x9e, + 0xa2, 0xa8, 0xac, 0xab, 0xa8, 0xa9, 0xa6, 0xaa, 0xa3, 0x9d, 0x9c, 0xa0, + 0xa0, 0xa0, 0x9e, 0xa8, 0xa7, 0xa0, 0xa5, 0xa9, 0xa9, 0xaa, 0xa4, 0xa4, + 0xac, 0xac, 0xaa, 0xb1, 0xbc, 0xc2, 0xbb, 0xb3, 0xb2, 0xba, 0xc0, 0xb8, + 0xae, 0xa2, 0xa1, 0xa8, 0xb2, 0xb0, 0xab, 0xaa, 0xac, 0xa8, 0x9b, 0x97, + 0x9e, 0xa0, 0xa2, 0xac, 0xb7, 0xb0, 0xb5, 0xb9, 0xb8, 0xc0, 0xc1, 0xb4, + 0xaa, 0xa2, 0xac, 0xa5, 0xa4, 0xa4, 0xa4, 0xa3, 0x9b, 0x9c, 0x9e, 0x9d, + 0x9a, 0x98, 0x99, 0x9c, 0xa1, 0xa6, 0xa7, 0xa6, 0xa6, 0xaa, 0xb6, 0xbc, + 0xb7, 0xac, 0xa2, 0x9f, 0x9a, 0x9c, 0x9f, 0x9f, 0x9f, 0x9e, 0xa0, 0xa1, + 0xa5, 0xa2, 0xa0, 0xa0, 0xa9, 0xac, 0xa7, 0xa9, 0xa7, 0xa3, 0xa1, 0xa0, + 0xa6, 0xa2, 0x9f, 0x9c, 0x9c, 0x97, 0x97, 0x98, 0x97, 0x9c, 0x9d, 0x98, + 0x97, 0x9a, 0x99, 0x96, 0x9a, 0x99, 0x9c, 0x9b, 0x98, 0x97, 0x99, 0x97, + 0x99, 0x9c, 0x9d, 0x9e, 0xa1, 0xb0, 0xdc, 0x0f, 0x22, 0x28, 0x30, 0x3d, + 0x3f, 0x39, 0x37, 0x43, 0x49, 0x4e, 0x4c, 0x48, 0x49, 0x49, 0x4e, 0x55, + 0x59, 0x5d, 0x5f, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, 0x67, 0x67, + 0x67, 0x67, 0x67, 0x67, 0x67, 0x66, 0x62, 0x55, 0x2d, 0xf8, 0xc7, 0xb4, + 0xa7, 0xa4, 0xaf, 0xae, 0xab, 0xa7, 0xa6, 0xae, 0xb5, 0xae, 0xad, 0xaf, + 0xb4, 0xd7, 0x12, 0x3f, 0x47, 0x4d, 0x4f, 0x52, 0x4a, 0x00, 0xad, 0xa4, + 0xa1, 0xa9, 0xa5, 0x98, 0x98, 0x98, 0x9c, 0x9c, 0x9d, 0xa7, 0xa7, 0xa1, + 0x9c, 0x9a, 0x9a, 0x9d, 0x9c, 0x9f, 0x9e, 0x9e, 0x9f, 0xa6, 0xa7, 0xa7, + 0xa5, 0xa7, 0xa7, 0xab, 0xad, 0xab, 0xa2, 0x9e, 0x9f, 0x9f, 0x9e, 0xa6, + 0xa8, 0xa2, 0xa1, 0xa7, 0xa9, 0xa8, 0xa2, 0xa1, 0xa7, 0xa3, 0xa1, 0xa7, + 0xb0, 0xbe, 0xc2, 0xb8, 0xb4, 0xbe, 0xbf, 0xb7, 0xb5, 0xac, 0xac, 0xaa, + 0xb5, 0xac, 0xa7, 0xa9, 0xaf, 0xb1, 0xa1, 0x97, 0x99, 0x9f, 0xa1, 0xaa, + 0xb1, 0xad, 0xb5, 0xbb, 0xb8, 0xb9, 0xb6, 0xb4, 0xb0, 0xa7, 0xa8, 0xa6, + 0xa5, 0xa3, 0xaa, 0xaa, 0x9c, 0x9d, 0x9d, 0x9c, 0x98, 0x99, 0x9b, 0x9c, + 0xa0, 0xa8, 0xac, 0xb1, 0xae, 0xa6, 0xaf, 0xb7, 0xb6, 0xac, 0xa0, 0x9c, + 0x98, 0x97, 0x9a, 0x9c, 0x9f, 0x9e, 0x9e, 0xa2, 0xa2, 0xa1, 0xa0, 0x9d, + 0xa5, 0xa7, 0xa9, 0xac, 0xac, 0xa5, 0xa6, 0xaa, 0xad, 0xa7, 0xa4, 0xa1, + 0x9e, 0x9a, 0x98, 0x97, 0x98, 0x9b, 0x9c, 0x96, 0x96, 0x99, 0x97, 0x97, + 0x99, 0x9c, 0x9d, 0x9e, 0x9b, 0x97, 0x99, 0x96, 0x99, 0x9a, 0x9c, 0x9e, + 0xa6, 0xac, 0xbf, 0xe5, 0x11, 0x32, 0x3d, 0x3f, 0x47, 0x4b, 0x4a, 0x4c, + 0x4d, 0x4e, 0x4d, 0x4a, 0x4a, 0x51, 0x56, 0x5a, 0x5d, 0x60, 0x60, 0x62, + 0x63, 0x64, 0x65, 0x66, 0x67, 0x67, 0x67, 0x67, 0x67, 0x67, 0x67, 0x67, + 0x67, 0x67, 0x66, 0x62, 0x5c, 0x4c, 0x29, 0xfa, 0xcc, 0xb8, 0xb3, 0xb1, + 0xac, 0xa9, 0xb1, 0xdf, 0xe0, 0xc2, 0xd7, 0xe0, 0xff, 0x31, 0x40, 0x42, + 0x44, 0x47, 0x4b, 0x53, 0x55, 0x1d, 0xb3, 0xa5, 0xa3, 0xa9, 0xa5, 0x9c, + 0x9b, 0x9c, 0x99, 0x9a, 0x9f, 0xa4, 0xa2, 0x9c, 0x9a, 0x9a, 0x9b, 0x9e, + 0xa5, 0xa4, 0xa1, 0xa1, 0x9f, 0xa5, 0xa7, 0xa7, 0xa2, 0xa6, 0xac, 0xa8, + 0xa7, 0xac, 0xa2, 0x9c, 0x9f, 0x9e, 0x9c, 0xa4, 0xa7, 0x9f, 0x9e, 0xa6, + 0xa8, 0xa8, 0xa3, 0xa1, 0xa1, 0xa6, 0xa6, 0xa6, 0xa8, 0xae, 0xb9, 0xba, + 0xb8, 0xb8, 0xbd, 0xb7, 0xb1, 0xa9, 0xb0, 0xb7, 0xb7, 0xad, 0xa7, 0xaa, + 0xb3, 0xb5, 0xa7, 0x98, 0x99, 0x9f, 0xa2, 0xa8, 0xae, 0xb1, 0xbb, 0xb6, + 0xb7, 0xb9, 0xb5, 0xb5, 0xb1, 0xa8, 0xa6, 0xa9, 0xab, 0xa4, 0xa6, 0xac, + 0xa1, 0x9b, 0x9b, 0x9b, 0xa0, 0xa6, 0xa4, 0xa4, 0xa7, 0xa9, 0xab, 0xab, + 0xa7, 0xa2, 0xaa, 0xb0, 0xb3, 0xac, 0xa3, 0x9d, 0x9a, 0x97, 0x9a, 0x9c, + 0x9d, 0x9f, 0x9e, 0xa0, 0xa1, 0xa2, 0xa0, 0x9c, 0x9c, 0xa0, 0xa6, 0xa7, + 0xa2, 0x9e, 0xa5, 0xab, 0xab, 0xa5, 0xa6, 0xa1, 0xa4, 0xa1, 0x9b, 0x9b, + 0xa1, 0x9f, 0x9a, 0x99, 0x97, 0x97, 0x97, 0x99, 0x9e, 0xa1, 0x9e, 0xa1, + 0x9e, 0x9d, 0x9b, 0x9b, 0x9b, 0x9b, 0xa0, 0xab, 0xc0, 0xe2, 0x02, 0x12, + 0x17, 0x27, 0x39, 0x47, 0x4b, 0x4e, 0x4f, 0x4d, 0x4f, 0x4f, 0x4b, 0x48, + 0x4c, 0x54, 0x59, 0x5d, 0x61, 0x62, 0x62, 0x62, 0x62, 0x64, 0x65, 0x66, + 0x67, 0x67, 0x67, 0x67, 0x67, 0x67, 0x67, 0x66, 0x66, 0x66, 0x66, 0x65, + 0x64, 0x61, 0x5c, 0x4b, 0x29, 0xfb, 0xd5, 0xc1, 0xbd, 0xb8, 0xf9, 0x24, + 0x04, 0xff, 0x12, 0x21, 0x3c, 0x3e, 0x3a, 0x39, 0x39, 0x3d, 0x44, 0x4d, + 0x53, 0x20, 0xb8, 0xa8, 0xa7, 0xa5, 0xa2, 0x9f, 0xa2, 0xa4, 0x9f, 0xa1, + 0xa5, 0xa3, 0xa1, 0xa0, 0x9c, 0x9d, 0xa1, 0xac, 0xb1, 0xac, 0xa7, 0xa2, + 0x9b, 0x9f, 0xa2, 0xa7, 0xa6, 0xa2, 0xac, 0xa5, 0xa2, 0xb4, 0xb1, 0xa6, + 0x9d, 0x9c, 0x9c, 0xa1, 0xa2, 0x9f, 0xa0, 0xa0, 0xa8, 0xa8, 0xa3, 0xa2, + 0xa7, 0xa7, 0xa7, 0xa4, 0xa0, 0xa7, 0xb1, 0xb1, 0xb7, 0xb3, 0xbb, 0xbc, + 0xbc, 0xb1, 0xac, 0xb7, 0xb3, 0xb4, 0xaf, 0xab, 0xb2, 0xb0, 0xa1, 0x99, + 0x9c, 0x9e, 0xa1, 0xac, 0xb1, 0xac, 0xa8, 0xab, 0xb1, 0xb6, 0xb6, 0xb4, + 0xb1, 0xaa, 0xa5, 0xa2, 0xa0, 0xa0, 0xa1, 0xa6, 0xa6, 0x9c, 0x9d, 0xa0, + 0xa5, 0xad, 0xb1, 0xb1, 0xae, 0xae, 0xab, 0xa5, 0x9d, 0x9c, 0x9b, 0xa1, + 0xad, 0xb0, 0xaa, 0x9b, 0x99, 0x94, 0x95, 0x97, 0x99, 0x9c, 0x98, 0x97, + 0x9b, 0xa1, 0x9d, 0x9d, 0x97, 0x9c, 0x9c, 0x9d, 0x9a, 0xa1, 0xa7, 0xa4, + 0xa6, 0xa6, 0xa6, 0xa6, 0xa8, 0xa4, 0xa1, 0xa5, 0xac, 0xa6, 0xa1, 0xa0, + 0x9c, 0x9c, 0x9f, 0xa1, 0x9e, 0x9c, 0x9b, 0x9c, 0x9c, 0x9f, 0xa0, 0x9f, + 0x9c, 0xa3, 0xb7, 0xda, 0xfb, 0x19, 0x26, 0x2f, 0x35, 0x38, 0x3d, 0x47, + 0x4f, 0x53, 0x54, 0x51, 0x4c, 0x49, 0x46, 0x4b, 0x52, 0x57, 0x5a, 0x5d, + 0x61, 0x61, 0x62, 0x63, 0x63, 0x64, 0x65, 0x66, 0x66, 0x67, 0x66, 0x66, + 0x66, 0x67, 0x66, 0x66, 0x67, 0x67, 0x67, 0x66, 0x65, 0x64, 0x65, 0x63, + 0x5d, 0x4d, 0x32, 0x1a, 0x08, 0x17, 0x48, 0x41, 0x30, 0x31, 0x36, 0x3d, + 0x3a, 0x32, 0x30, 0x2d, 0x2c, 0x34, 0x3e, 0x4a, 0x52, 0x17, 0xb7, 0xad, + 0xad, 0xa7, 0xa0, 0xa0, 0xa1, 0xa3, 0xa1, 0xa1, 0xa4, 0xa3, 0x9f, 0x9d, + 0x9c, 0x9b, 0xa2, 0xb4, 0xb6, 0xb1, 0xb5, 0xaa, 0x9d, 0x99, 0xa1, 0xaa, + 0xa8, 0xa5, 0xa6, 0xa1, 0xa2, 0xb2, 0xb4, 0xab, 0xa6, 0xa2, 0x9e, 0x9e, + 0xa0, 0x9f, 0x9c, 0x9e, 0xa5, 0xa7, 0xa5, 0xaa, 0xae, 0xac, 0xa7, 0xa2, + 0x9c, 0xa1, 0xa9, 0xab, 0xad, 0xb3, 0xbb, 0xbd, 0xcb, 0xbc, 0xab, 0xb6, + 0xad, 0xaa, 0xac, 0xac, 0xb0, 0xac, 0x9f, 0xa4, 0xa4, 0xa4, 0xa7, 0xb2, + 0xaa, 0x9a, 0x97, 0x9a, 0xa5, 0xac, 0xb3, 0xb7, 0xb5, 0xaa, 0x9c, 0x97, + 0x99, 0x99, 0x9d, 0xa3, 0xb0, 0xac, 0xa7, 0xa5, 0xa4, 0xa9, 0xaf, 0xb1, + 0xb1, 0xab, 0xa7, 0xa1, 0xa2, 0xa2, 0x9b, 0x97, 0xa6, 0xaf, 0xaa, 0x98, + 0x97, 0x94, 0x92, 0x96, 0x99, 0x9a, 0x98, 0x96, 0x97, 0x9b, 0xa1, 0xa1, + 0x9d, 0xa1, 0x9c, 0x97, 0x9a, 0xa2, 0x9e, 0x9c, 0x9e, 0xa0, 0xa3, 0xa3, + 0xa0, 0x9d, 0xa1, 0xa6, 0xab, 0xaa, 0xa2, 0xa0, 0x9c, 0x98, 0x9d, 0x9f, + 0x9f, 0x9d, 0x9a, 0x9c, 0x9f, 0xa2, 0xa1, 0xa1, 0x9d, 0xa4, 0xc2, 0xee, + 0x0f, 0x24, 0x32, 0x37, 0x3d, 0x42, 0x46, 0x4a, 0x50, 0x55, 0x55, 0x51, + 0x48, 0x43, 0x47, 0x50, 0x55, 0x59, 0x5a, 0x5d, 0x60, 0x62, 0x62, 0x63, + 0x63, 0x63, 0x64, 0x64, 0x65, 0x65, 0x65, 0x65, 0x65, 0x66, 0x66, 0x66, + 0x67, 0x67, 0x67, 0x67, 0x66, 0x66, 0x67, 0x66, 0x64, 0x60, 0x5d, 0x57, + 0x52, 0x56, 0x55, 0x4d, 0x46, 0x41, 0x3e, 0x38, 0x30, 0x25, 0x22, 0x22, + 0x24, 0x2d, 0x39, 0x46, 0x52, 0x18, 0xb2, 0xa9, 0xac, 0xa9, 0xa1, 0x9c, + 0x9c, 0xa0, 0xa1, 0xa0, 0x9f, 0x9e, 0x99, 0x9b, 0x9a, 0x9a, 0xa4, 0xb5, + 0xb5, 0xb3, 0xb4, 0xb0, 0xa5, 0x9e, 0x9f, 0xac, 0xab, 0xa3, 0xa1, 0x9d, + 0xa0, 0xac, 0xb2, 0xaf, 0xa7, 0xa5, 0xa0, 0xa5, 0xa5, 0x9f, 0x9e, 0x9c, + 0xa2, 0xa5, 0xa4, 0xab, 0xac, 0xa6, 0xa3, 0xa1, 0xa1, 0xa5, 0xa7, 0xa7, + 0xab, 0xab, 0xb2, 0xb7, 0xb1, 0xab, 0xa7, 0xa1, 0x9d, 0x9b, 0xa0, 0xa2, + 0xa0, 0xa4, 0xa3, 0xa5, 0xa0, 0xa0, 0xab, 0xb1, 0xa4, 0x9a, 0x9b, 0x9a, + 0xa1, 0xa8, 0xac, 0xae, 0xb7, 0xa9, 0x9f, 0x9c, 0x9e, 0x9c, 0x9c, 0xa0, + 0xb5, 0xb8, 0xb3, 0xac, 0xa9, 0xa8, 0xb1, 0xb1, 0xb3, 0xad, 0xa7, 0xa6, + 0xad, 0xb2, 0xa6, 0x97, 0x9f, 0xb1, 0xab, 0x98, 0x9b, 0x98, 0x95, 0x9e, + 0xa1, 0xa7, 0xa3, 0x9c, 0x9b, 0x97, 0x9f, 0x9c, 0xa6, 0xa5, 0x9c, 0x98, + 0xa0, 0xa7, 0xa3, 0xa0, 0x9c, 0x97, 0x9c, 0xa1, 0xa3, 0xa1, 0xa1, 0xa1, + 0xa0, 0xa0, 0xa1, 0xa1, 0x9d, 0x9c, 0x99, 0x99, 0x98, 0x9b, 0xa1, 0xa0, + 0x9e, 0xa2, 0x9d, 0x9e, 0x9f, 0xa1, 0xb7, 0xda, 0x07, 0x22, 0x31, 0x3b, + 0x42, 0x46, 0x4a, 0x4d, 0x50, 0x52, 0x52, 0x49, 0x43, 0x44, 0x4b, 0x52, + 0x55, 0x5a, 0x5c, 0x5d, 0x60, 0x60, 0x61, 0x62, 0x62, 0x62, 0x62, 0x63, + 0x63, 0x64, 0x64, 0x65, 0x65, 0x65, 0x66, 0x66, 0x67, 0x66, 0x66, 0x67, + 0x67, 0x66, 0x67, 0x66, 0x65, 0x63, 0x60, 0x5d, 0x5b, 0x59, 0x54, 0x4d, + 0x47, 0x40, 0x38, 0x2f, 0x27, 0x1d, 0x1b, 0x20, 0x22, 0x2c, 0x37, 0x45, + 0x4f, 0x1a, 0xb7, 0xa8, 0xa8, 0xa2, 0xa1, 0x9e, 0xa0, 0xad, 0xac, 0xa7, + 0xa0, 0x9f, 0x9a, 0x9b, 0x98, 0x9b, 0xa9, 0xb5, 0xb9, 0xb7, 0xac, 0xac, + 0xa6, 0xa1, 0xa2, 0xa6, 0xad, 0xac, 0x9e, 0x9d, 0xa6, 0xac, 0xb1, 0xb1, + 0xa6, 0xa3, 0x9f, 0xa6, 0xa4, 0xa1, 0xa2, 0x9d, 0xa4, 0xa5, 0xa2, 0xaa, + 0xab, 0xa3, 0x9f, 0xa1, 0xab, 0xa9, 0xa4, 0xa6, 0xb1, 0xaf, 0xae, 0xb4, + 0xab, 0xa8, 0xa8, 0xa1, 0xa2, 0xa1, 0xa0, 0xa4, 0xa3, 0x9f, 0xa2, 0xa5, + 0xa2, 0xa9, 0xae, 0xb3, 0xa9, 0xa1, 0xa1, 0x9b, 0x9d, 0xa4, 0xa7, 0xa7, + 0xaf, 0xac, 0xa2, 0xa0, 0x9c, 0x97, 0x97, 0x98, 0xa1, 0xac, 0xb1, 0xb1, + 0xb1, 0xac, 0xb6, 0xb7, 0xb9, 0xb7, 0xad, 0xaa, 0xb1, 0xba, 0xb3, 0x9e, + 0x98, 0xa8, 0xa7, 0x9f, 0xa4, 0xa0, 0x97, 0x9e, 0xa9, 0xae, 0xa6, 0x9c, + 0x9b, 0x9c, 0x9e, 0x9c, 0x9f, 0x9d, 0x99, 0x9b, 0x9e, 0xa1, 0xa3, 0xa8, + 0xa5, 0x9a, 0x98, 0xa2, 0xaa, 0xa9, 0x9f, 0x9c, 0x99, 0xa0, 0xa9, 0xab, + 0xa9, 0xa7, 0xa0, 0x9d, 0xa6, 0xa2, 0xa3, 0xa4, 0x98, 0x9b, 0x98, 0x9b, + 0x9c, 0xa4, 0xaf, 0xcf, 0xfc, 0x1c, 0x2c, 0x37, 0x40, 0x46, 0x4b, 0x4e, + 0x51, 0x52, 0x4c, 0x41, 0x43, 0x4c, 0x52, 0x52, 0x54, 0x5b, 0x5b, 0x5e, + 0x60, 0x5e, 0x60, 0x61, 0x61, 0x62, 0x62, 0x62, 0x62, 0x63, 0x64, 0x65, + 0x64, 0x65, 0x65, 0x65, 0x66, 0x65, 0x66, 0x67, 0x67, 0x66, 0x67, 0x66, + 0x64, 0x62, 0x60, 0x5d, 0x5a, 0x57, 0x4f, 0x48, 0x40, 0x37, 0x32, 0x29, + 0x22, 0x18, 0x1d, 0x22, 0x22, 0x29, 0x36, 0x45, 0x4e, 0x15, 0xbb, 0xa7, + 0xa3, 0x9f, 0xa1, 0x9f, 0xa1, 0xb3, 0xb0, 0xaa, 0xa3, 0xa1, 0x9c, 0x9c, + 0x9a, 0x9b, 0xab, 0xb4, 0xb8, 0xb1, 0xa6, 0xaa, 0xa5, 0xa2, 0xa4, 0xa6, + 0xa8, 0xa5, 0x9f, 0xa3, 0xa3, 0xa8, 0xae, 0xad, 0xa9, 0xa1, 0x9f, 0xa7, + 0xa4, 0xa2, 0xa5, 0xa1, 0xa6, 0xa8, 0xa8, 0xac, 0xac, 0xa0, 0xa0, 0xb9, + 0xb3, 0xab, 0xa6, 0xa2, 0xac, 0xac, 0xaa, 0xb3, 0xab, 0xaa, 0xa6, 0xa1, + 0xa5, 0xa6, 0xa3, 0xa6, 0xa8, 0xa3, 0xa7, 0xaa, 0xa9, 0xb1, 0xaf, 0xad, + 0xa7, 0xa1, 0x9e, 0x9d, 0x9e, 0xa1, 0xa5, 0xa5, 0xa5, 0xb1, 0xb1, 0xb1, + 0xa9, 0xa1, 0x9b, 0x98, 0x99, 0x9c, 0xa7, 0xb0, 0xb9, 0xb6, 0xb6, 0xb8, + 0xc2, 0xbb, 0xb1, 0xac, 0xab, 0xb5, 0xb5, 0xa2, 0x99, 0x9d, 0xa5, 0xa8, + 0xab, 0xa7, 0x9f, 0x9e, 0xa7, 0xac, 0xa3, 0x9c, 0x99, 0x99, 0x9b, 0x9e, + 0xa1, 0xa2, 0xa2, 0xa9, 0xa7, 0x9e, 0xa3, 0xa7, 0xa7, 0x9d, 0x98, 0xa1, + 0xa1, 0xa3, 0xa4, 0xa1, 0x9d, 0xa6, 0xaa, 0xac, 0xaa, 0xab, 0xa3, 0xa6, + 0xaf, 0xad, 0xa7, 0xa6, 0x9a, 0x9a, 0x99, 0x9b, 0x9c, 0xa7, 0xbb, 0xd6, + 0xf5, 0x14, 0x22, 0x2f, 0x3a, 0x41, 0x48, 0x4f, 0x50, 0x51, 0x46, 0x41, + 0x49, 0x54, 0x54, 0x4d, 0x52, 0x58, 0x5b, 0x5e, 0x5d, 0x5e, 0x60, 0x60, + 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x63, 0x63, 0x64, 0x65, 0x64, 0x65, + 0x65, 0x65, 0x65, 0x66, 0x66, 0x65, 0x65, 0x64, 0x63, 0x62, 0x60, 0x5c, + 0x57, 0x52, 0x4b, 0x42, 0x3b, 0x35, 0x2f, 0x27, 0x20, 0x1c, 0x22, 0x23, + 0x24, 0x2d, 0x3b, 0x47, 0x49, 0x08, 0xb6, 0xa8, 0x9e, 0xa3, 0xa6, 0x9e, + 0xa1, 0xac, 0xb0, 0xa8, 0xa2, 0x9f, 0x9c, 0x9e, 0x9c, 0x9a, 0xad, 0xb7, + 0xb3, 0xad, 0xa1, 0xa4, 0xa1, 0x9d, 0xa1, 0xa6, 0xa5, 0xa2, 0x9f, 0xa2, + 0xa5, 0xa9, 0xa9, 0xa8, 0xa7, 0xa3, 0xa1, 0xa7, 0xa6, 0xa1, 0xa4, 0xa4, + 0xa6, 0xa7, 0xa7, 0xaa, 0xa7, 0xa3, 0xa5, 0xaf, 0xac, 0xab, 0xa9, 0xa8, + 0xa9, 0xb1, 0xaf, 0xb4, 0xaa, 0xa6, 0xa2, 0x9d, 0xa1, 0xa3, 0xa8, 0xaa, + 0xaa, 0xa5, 0xab, 0xa7, 0xa7, 0xa9, 0xa8, 0xaf, 0xb0, 0xac, 0xa2, 0xa0, + 0xa0, 0xa7, 0xa5, 0xa1, 0x9d, 0xa2, 0xb5, 0xbd, 0xba, 0xb7, 0xae, 0x9f, + 0xa1, 0xa8, 0xac, 0xaf, 0xb6, 0xb5, 0xb3, 0xb0, 0xb2, 0xb4, 0xb3, 0xb1, + 0xa5, 0xaa, 0xa7, 0xa1, 0xa0, 0xa3, 0xaa, 0xab, 0xa9, 0xa4, 0xa1, 0x9f, + 0xa1, 0xa7, 0x9f, 0x9b, 0x9a, 0x9b, 0x9a, 0x9a, 0xa1, 0xb0, 0xaf, 0xb0, + 0xb1, 0xa7, 0xb0, 0xaa, 0xa0, 0x9b, 0x9c, 0x9e, 0x98, 0x9c, 0xa4, 0xa1, + 0xa7, 0xb1, 0xb0, 0xaa, 0xa8, 0xa5, 0x9f, 0xa7, 0xb6, 0xb4, 0xaa, 0xa8, + 0x99, 0x99, 0x9b, 0x9b, 0x9d, 0xa7, 0xb6, 0xd0, 0xeb, 0x07, 0x1b, 0x28, + 0x34, 0x3d, 0x45, 0x4d, 0x4e, 0x47, 0x3c, 0x36, 0x2b, 0x36, 0x46, 0x42, + 0x4d, 0x53, 0x58, 0x5c, 0x5b, 0x5d, 0x5d, 0x5e, 0x60, 0x5f, 0x60, 0x60, + 0x60, 0x62, 0x62, 0x62, 0x62, 0x63, 0x63, 0x64, 0x65, 0x65, 0x65, 0x65, + 0x64, 0x63, 0x64, 0x63, 0x61, 0x60, 0x5e, 0x5b, 0x57, 0x50, 0x47, 0x3d, + 0x36, 0x32, 0x2f, 0x29, 0x23, 0x22, 0x21, 0x23, 0x27, 0x31, 0x3e, 0x4c, + 0x41, 0xfe, 0xb6, 0xa3, 0xa1, 0xa4, 0xa5, 0xa0, 0xa5, 0xaa, 0xb1, 0xab, + 0xa7, 0xa3, 0xa1, 0x9b, 0x9b, 0x9b, 0xa0, 0xac, 0xaf, 0xa9, 0xa3, 0xa4, + 0xa1, 0x9e, 0x9c, 0xa0, 0xa7, 0xa3, 0xa7, 0xa5, 0xa6, 0xa7, 0xa8, 0xa8, + 0xa2, 0xa7, 0xa7, 0xa5, 0xa5, 0xa2, 0xa6, 0xa9, 0xa7, 0xa9, 0xa7, 0xa9, + 0xa7, 0xab, 0xa7, 0xa1, 0xab, 0xad, 0xa6, 0xaa, 0xaa, 0xac, 0xb2, 0xb6, + 0xa8, 0x9c, 0xa4, 0xa1, 0xa4, 0xa7, 0xaf, 0xb1, 0xb3, 0xb1, 0xad, 0xaa, + 0xa4, 0xa4, 0xa7, 0xb1, 0xb3, 0xb1, 0xac, 0xa5, 0xa1, 0xa5, 0xa2, 0x9c, + 0x9a, 0x9b, 0xa8, 0xb1, 0xb7, 0xb6, 0xaf, 0xab, 0xaa, 0xac, 0xac, 0xad, + 0xb4, 0xb5, 0xb7, 0xb1, 0xa2, 0xaa, 0xb5, 0xb4, 0xac, 0xac, 0xa7, 0xa3, + 0xab, 0xb1, 0xb1, 0xad, 0xa5, 0xa1, 0xa4, 0x9f, 0x9d, 0x9e, 0x9f, 0x9c, + 0x9f, 0x9d, 0x9a, 0x9c, 0xa5, 0xb7, 0xb6, 0xb1, 0xae, 0xa1, 0xa9, 0xaf, + 0xa9, 0xa2, 0xa8, 0xa7, 0xa1, 0xa0, 0xa5, 0xa5, 0xad, 0xb3, 0xb1, 0xac, + 0xa7, 0xa5, 0xa4, 0xa6, 0xb6, 0xb1, 0xa7, 0xa6, 0x9b, 0xa0, 0xa0, 0x9d, + 0x9b, 0xa0, 0xaa, 0xc2, 0xe6, 0x07, 0x1b, 0x27, 0x32, 0x3d, 0x46, 0x4c, + 0x49, 0x3d, 0x30, 0xe9, 0xc5, 0xe3, 0x1d, 0x3a, 0x4a, 0x52, 0x57, 0x59, + 0x59, 0x5b, 0x5c, 0x5d, 0x5b, 0x5b, 0x5b, 0x5d, 0x60, 0x61, 0x61, 0x61, + 0x62, 0x62, 0x62, 0x62, 0x64, 0x63, 0x63, 0x63, 0x63, 0x62, 0x62, 0x62, + 0x61, 0x60, 0x5d, 0x59, 0x56, 0x4d, 0x43, 0x37, 0x30, 0x2f, 0x29, 0x23, + 0x24, 0x25, 0x25, 0x25, 0x29, 0x33, 0x43, 0x4d, 0x3f, 0xfe, 0xbb, 0xad, + 0xa5, 0xa1, 0xa6, 0xa4, 0xa8, 0xac, 0xae, 0xac, 0xac, 0xab, 0xa3, 0xa0, + 0x9c, 0x9c, 0x9c, 0x9a, 0xa2, 0xa4, 0xa1, 0x9e, 0x9d, 0x9b, 0x9b, 0x9c, + 0xa2, 0xa2, 0xa7, 0xa6, 0xa6, 0xa7, 0xa7, 0xa5, 0xa3, 0xb1, 0xaf, 0xa5, + 0xa3, 0xa1, 0xa1, 0xa6, 0xaa, 0xa9, 0xa6, 0xa2, 0xa3, 0xb0, 0xb5, 0xac, + 0xac, 0xab, 0xa5, 0xab, 0xae, 0xaf, 0xaf, 0xb5, 0xad, 0xa9, 0xa9, 0xa2, + 0xa3, 0xb0, 0xb7, 0xb3, 0xb8, 0xb8, 0xb1, 0xac, 0xa8, 0xaa, 0xaf, 0xb2, + 0xb7, 0xb7, 0xb8, 0xae, 0xa3, 0xac, 0xad, 0xa7, 0xae, 0xac, 0xa5, 0xa2, + 0xaa, 0xa8, 0xa7, 0xab, 0xa7, 0xa3, 0xa9, 0xad, 0xb9, 0xb7, 0xb3, 0xac, + 0x9c, 0xa2, 0xac, 0xb2, 0xb4, 0xae, 0xa8, 0xa5, 0xaf, 0xb1, 0xaf, 0xac, + 0xa3, 0xa7, 0xaa, 0xa3, 0x9c, 0x9c, 0xa1, 0xa1, 0x9e, 0x99, 0x9b, 0xa3, + 0xb1, 0xb8, 0xb7, 0xb3, 0xb1, 0xaa, 0xa6, 0xaf, 0xb4, 0xb2, 0xb1, 0xb1, + 0xac, 0xab, 0xaa, 0xab, 0xa8, 0xad, 0xae, 0xa8, 0xa0, 0xa5, 0xa1, 0x99, + 0x9e, 0xa1, 0x9f, 0xa1, 0x98, 0x9f, 0xa1, 0x9c, 0x9b, 0x9c, 0xa7, 0xc3, + 0xe5, 0x07, 0x1c, 0x2a, 0x37, 0x40, 0x47, 0x4c, 0x42, 0x33, 0x05, 0xc9, + 0xc7, 0xd2, 0x07, 0x3d, 0x49, 0x52, 0x57, 0x58, 0x5a, 0x5c, 0x5c, 0x5a, + 0x58, 0x58, 0x5a, 0x5e, 0x5f, 0x5f, 0x60, 0x61, 0x62, 0x61, 0x60, 0x60, + 0x61, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x61, 0x5d, 0x59, 0x57, + 0x52, 0x4a, 0x3d, 0x33, 0x2f, 0x2c, 0x27, 0x29, 0x28, 0x25, 0x23, 0x26, + 0x2c, 0x32, 0x42, 0x4d, 0x42, 0x11, 0xd2, 0xb4, 0xa7, 0xa7, 0xa4, 0xa5, + 0xac, 0xad, 0xac, 0xac, 0xab, 0xaa, 0xa7, 0xa5, 0xa0, 0x9a, 0x9b, 0x99, + 0xa0, 0xa3, 0xa4, 0x9f, 0xa0, 0x98, 0x9b, 0xa0, 0xa0, 0x9b, 0xa5, 0xa8, + 0xa4, 0xa6, 0xa3, 0xa3, 0xa8, 0xbc, 0xbe, 0xa7, 0xa1, 0x9f, 0xa3, 0xa4, + 0xab, 0xb0, 0xaa, 0xa3, 0x9e, 0xae, 0xc0, 0xbc, 0xb1, 0xa7, 0xa2, 0xac, + 0xb3, 0xb6, 0xad, 0xab, 0xab, 0xb1, 0xaa, 0xa5, 0xaa, 0xb5, 0xb4, 0xb3, + 0xb7, 0xb7, 0xb2, 0xac, 0xaf, 0xb6, 0xb3, 0xaa, 0xbb, 0xc4, 0xb7, 0xab, + 0xa5, 0xb7, 0xb8, 0xb6, 0xb8, 0xb5, 0xaa, 0xa6, 0xa8, 0xa7, 0xac, 0xab, + 0xa6, 0x9e, 0xa6, 0xb0, 0xb7, 0xb4, 0xad, 0xaa, 0x9f, 0x9c, 0xa7, 0xae, + 0xb1, 0xb1, 0xb6, 0xb9, 0xad, 0xac, 0xb1, 0xab, 0xa8, 0xa2, 0x9f, 0xa0, + 0xa1, 0xa0, 0x9c, 0x9a, 0x99, 0x99, 0xa4, 0xab, 0xb6, 0xb7, 0xb7, 0xaf, + 0xad, 0xab, 0xa2, 0xaf, 0xb7, 0xb1, 0xad, 0xb3, 0xb3, 0xb6, 0xb1, 0xa8, + 0xa2, 0xa8, 0xad, 0xab, 0xa3, 0x9f, 0x9b, 0x95, 0x9c, 0x9e, 0x9d, 0xa0, + 0x97, 0x9a, 0xa1, 0x9f, 0xa1, 0xa3, 0xad, 0xc2, 0xe2, 0x05, 0x1f, 0x2f, + 0x3c, 0x43, 0x48, 0x4d, 0x43, 0x27, 0xd2, 0xc2, 0xc0, 0xcf, 0x15, 0x3d, + 0x4e, 0x55, 0x5a, 0x5b, 0x5c, 0x5d, 0x5b, 0x58, 0x57, 0x57, 0x5a, 0x5d, + 0x5d, 0x5f, 0x5f, 0x5f, 0x5f, 0x5d, 0x5e, 0x60, 0x61, 0x61, 0x60, 0x61, + 0x61, 0x5f, 0x5f, 0x60, 0x5d, 0x5a, 0x57, 0x54, 0x4e, 0x46, 0x3d, 0x33, + 0x2c, 0x27, 0x24, 0x26, 0x23, 0x20, 0x22, 0x24, 0x27, 0x30, 0x44, 0x4f, + 0x47, 0x22, 0xe2, 0xba, 0xac, 0xa9, 0xa4, 0xa5, 0xae, 0xb0, 0xac, 0xa9, + 0xa7, 0xa7, 0xa5, 0xa1, 0x9e, 0x9c, 0x9a, 0x9b, 0xa1, 0xa6, 0xa2, 0x9d, + 0x9d, 0x9a, 0x9c, 0xa1, 0xa0, 0x9c, 0xa4, 0xaa, 0xac, 0xa7, 0x9e, 0x9c, + 0x9d, 0xa1, 0xb3, 0xb7, 0xac, 0xa2, 0xa4, 0xa2, 0xaa, 0xb4, 0xb1, 0xb1, + 0xad, 0xb7, 0xbb, 0xca, 0xb9, 0xa8, 0xa3, 0xb0, 0xb7, 0xae, 0xa7, 0xa6, + 0xa5, 0xab, 0xa9, 0xa7, 0xb1, 0xb2, 0xb3, 0xb7, 0xb5, 0xb5, 0xb2, 0xb5, + 0xb6, 0xb7, 0xb4, 0xb5, 0xbe, 0xbe, 0xbc, 0xae, 0xa4, 0xb0, 0xb8, 0xb8, + 0xbb, 0xb4, 0xa5, 0xa9, 0xac, 0xa5, 0xa8, 0xb0, 0xb1, 0xa7, 0xa3, 0xaa, + 0xae, 0xb1, 0xaf, 0xa9, 0xa8, 0xa5, 0xaa, 0xac, 0xb0, 0xb5, 0xc1, 0xc8, + 0xc2, 0xbe, 0xb9, 0xae, 0xab, 0xa6, 0xa3, 0x9c, 0x9c, 0xa4, 0x9f, 0x97, + 0x97, 0x9f, 0xa7, 0xae, 0xb0, 0xab, 0xa8, 0xa1, 0xa2, 0xa7, 0xa2, 0xab, + 0xb9, 0xb1, 0xa6, 0xac, 0xb3, 0xb8, 0xac, 0xa6, 0xa2, 0xa1, 0xa8, 0xb1, + 0xb7, 0xb2, 0xaa, 0xa7, 0xac, 0xa7, 0xa5, 0xa9, 0x9a, 0x97, 0x9a, 0x9c, + 0xa4, 0xa8, 0xb1, 0xc7, 0xef, 0x18, 0x2f, 0x37, 0x3f, 0x4a, 0x4e, 0x4b, + 0x40, 0x22, 0xdd, 0xc9, 0xcc, 0xf2, 0x2d, 0x4c, 0x57, 0x5d, 0x5f, 0x5e, + 0x5e, 0x5e, 0x5c, 0x58, 0x57, 0x57, 0x5a, 0x5c, 0x5d, 0x5d, 0x5d, 0x5b, + 0x5b, 0x5d, 0x5d, 0x5f, 0x5f, 0x60, 0x60, 0x60, 0x60, 0x5f, 0x5e, 0x5e, + 0x5b, 0x58, 0x57, 0x55, 0x4c, 0x40, 0x37, 0x34, 0x2f, 0x2d, 0x27, 0x24, + 0x22, 0x23, 0x22, 0x22, 0x27, 0x34, 0x47, 0x52, 0x4a, 0x2d, 0xf4, 0xc9, + 0xba, 0xae, 0xa8, 0xa4, 0xa6, 0xa9, 0xaa, 0xa8, 0xa2, 0xa1, 0x9f, 0x9c, + 0x9e, 0x9b, 0x9c, 0x9e, 0x9e, 0xa3, 0xa3, 0xa1, 0xa0, 0x9c, 0x9e, 0xa1, + 0xa4, 0xa1, 0xa1, 0xa6, 0xa6, 0xa1, 0x9f, 0x9f, 0x9d, 0xa0, 0xa4, 0xb1, + 0xb9, 0xb0, 0xa6, 0xa3, 0xa5, 0xac, 0xac, 0xaf, 0xb4, 0xb8, 0xbc, 0xcd, + 0xbc, 0xa7, 0xa3, 0xad, 0xac, 0xa5, 0xa9, 0xa9, 0xa7, 0xae, 0xb6, 0xb6, + 0xbd, 0xb7, 0xb8, 0xb8, 0xb7, 0xb7, 0xb8, 0xb8, 0xb5, 0xb0, 0xb0, 0xb5, + 0xbc, 0xb9, 0xba, 0xb4, 0xac, 0xb1, 0xc2, 0xc0, 0xbc, 0xb5, 0xaa, 0xb3, + 0xab, 0xa5, 0xa8, 0xb0, 0xb1, 0xa8, 0xa7, 0xa7, 0x9d, 0xa7, 0xb2, 0xb0, + 0xb1, 0xaf, 0xb1, 0xae, 0xb0, 0xb4, 0xb9, 0xc6, 0xcb, 0xc9, 0xc7, 0xc0, + 0xb3, 0xa7, 0xa7, 0xa3, 0x9f, 0xa7, 0xa1, 0x9a, 0x97, 0xa2, 0xa7, 0xad, + 0xae, 0xaa, 0xa7, 0xa4, 0xa2, 0xa3, 0xa0, 0xa0, 0xac, 0xaa, 0x9f, 0x9e, + 0xa4, 0xb4, 0xaa, 0xa1, 0xac, 0xaa, 0xa1, 0xa7, 0xb7, 0xbc, 0xb0, 0xac, + 0xb2, 0xac, 0xaa, 0xb1, 0x9a, 0x98, 0x9a, 0x9a, 0xa0, 0xa5, 0xb2, 0xd7, + 0x0f, 0x2b, 0x35, 0x3d, 0x45, 0x4d, 0x4d, 0x48, 0x3c, 0x34, 0x1d, 0x02, + 0x1c, 0x3b, 0x53, 0x5e, 0x61, 0x60, 0x61, 0x62, 0x61, 0x5f, 0x5c, 0x58, + 0x57, 0x57, 0x57, 0x59, 0x5b, 0x5a, 0x59, 0x5a, 0x5b, 0x5a, 0x5a, 0x5b, + 0x5c, 0x5d, 0x5e, 0x5f, 0x60, 0x5f, 0x5e, 0x5d, 0x5b, 0x57, 0x55, 0x52, + 0x4d, 0x44, 0x38, 0x32, 0x2d, 0x2b, 0x26, 0x23, 0x1f, 0x21, 0x21, 0x21, + 0x29, 0x37, 0x48, 0x52, 0x52, 0x42, 0x24, 0xf7, 0xd1, 0xba, 0xac, 0xa5, + 0xa1, 0xa2, 0xa5, 0xa2, 0x9f, 0x9d, 0x9c, 0x9c, 0x9e, 0x9c, 0x9c, 0x9c, + 0xa1, 0xa8, 0xa6, 0xa1, 0x9e, 0x9d, 0x9e, 0x9f, 0xa2, 0xa1, 0x9f, 0xa1, + 0x9e, 0x9a, 0x9b, 0x9e, 0x9d, 0xa1, 0xa7, 0xac, 0xad, 0xb4, 0xa7, 0xa7, + 0xa7, 0xa6, 0xa6, 0xa8, 0xb2, 0xc3, 0xc5, 0xc3, 0xc0, 0xa9, 0xa7, 0xa8, + 0xa4, 0xa9, 0xad, 0xaa, 0xac, 0xba, 0xbc, 0xbc, 0xbb, 0xb1, 0xb0, 0xb4, + 0xb8, 0xba, 0xb6, 0xb7, 0xb5, 0xb1, 0xb3, 0xb2, 0xb3, 0xbc, 0xbc, 0xb8, + 0xb2, 0xb6, 0xbf, 0xc2, 0xbd, 0xb3, 0xb0, 0xb7, 0xaf, 0xaa, 0xac, 0xaa, + 0xa7, 0xa6, 0xa7, 0xac, 0xa5, 0xa7, 0xb5, 0xb1, 0xb1, 0xb3, 0xb3, 0xb6, + 0xad, 0xb2, 0xb6, 0xc7, 0xcb, 0xc9, 0xc7, 0xc5, 0xbe, 0xac, 0xa6, 0xa5, + 0xa7, 0xa8, 0xa1, 0xa1, 0x9f, 0xa4, 0xae, 0xac, 0xac, 0xac, 0xa7, 0xa1, + 0xa5, 0xa7, 0xa0, 0xa0, 0xa8, 0xa7, 0xa3, 0xa1, 0x9e, 0xa3, 0xa7, 0xa3, + 0xa1, 0xa1, 0xa2, 0xa0, 0xaa, 0xb6, 0xb0, 0xa5, 0xaa, 0xb5, 0xb5, 0xb6, + 0x9c, 0x99, 0x97, 0x99, 0xa1, 0xab, 0xc9, 0xf7, 0x1c, 0x2d, 0x38, 0x45, + 0x4a, 0x4d, 0x49, 0x44, 0x38, 0x3c, 0x3c, 0x42, 0x53, 0x5b, 0x60, 0x63, + 0x64, 0x62, 0x62, 0x62, 0x61, 0x60, 0x5d, 0x58, 0x57, 0x56, 0x54, 0x56, + 0x57, 0x54, 0x53, 0x53, 0x56, 0x58, 0x5b, 0x5c, 0x5d, 0x5f, 0x5f, 0x60, + 0x5d, 0x5d, 0x5d, 0x5b, 0x5b, 0x5a, 0x56, 0x51, 0x4a, 0x42, 0x36, 0x2b, + 0x21, 0x1e, 0x1b, 0x19, 0x19, 0x1c, 0x1c, 0x21, 0x2d, 0x38, 0x47, 0x50, + 0x51, 0x47, 0x30, 0x13, 0xee, 0xcc, 0xba, 0xab, 0xa2, 0xa8, 0xa4, 0xa1, + 0xa2, 0xa4, 0x9c, 0x9a, 0x9d, 0x9c, 0x9b, 0xa1, 0xa7, 0xa8, 0xa5, 0xa1, + 0x9e, 0x9f, 0x9f, 0x9f, 0xa3, 0xa2, 0x9e, 0x9f, 0x9c, 0x9c, 0x9d, 0xa9, + 0xa7, 0xa5, 0xa3, 0xa6, 0xa9, 0xad, 0xa9, 0xae, 0xbc, 0xbf, 0xab, 0xa0, + 0xac, 0xc2, 0xc5, 0xb9, 0xbb, 0xb4, 0xb0, 0xac, 0xad, 0xb4, 0xb0, 0xaf, + 0xb2, 0xbd, 0xbc, 0xba, 0xb4, 0xac, 0xac, 0xb5, 0xb7, 0xb1, 0xb7, 0xb8, + 0xb7, 0xb5, 0xb5, 0xb6, 0xb1, 0xb5, 0xb8, 0xb9, 0xb5, 0xb7, 0xb7, 0xba, + 0xbb, 0xb7, 0xb0, 0xb5, 0xb3, 0xb4, 0xb0, 0xaa, 0xa7, 0xa3, 0xa4, 0xac, + 0xab, 0xa9, 0xa9, 0xad, 0xb1, 0xaf, 0xb0, 0xc8, 0xb1, 0xa8, 0xb6, 0xc1, + 0xc8, 0xc7, 0xc2, 0xc4, 0xc0, 0xad, 0xa6, 0xa7, 0xa9, 0xab, 0xb3, 0xb3, + 0xb1, 0xaa, 0xa5, 0xad, 0xbf, 0xb2, 0xac, 0xa4, 0xa8, 0xaa, 0xa2, 0xa0, + 0xa4, 0xa2, 0xa1, 0x9f, 0x9f, 0xa3, 0xa7, 0xa5, 0xa4, 0xa6, 0xb0, 0xb8, + 0xaf, 0xb2, 0xbc, 0xb5, 0xb0, 0xb1, 0xb5, 0xbc, 0x9f, 0x9e, 0x9b, 0x9f, + 0xb1, 0xc7, 0xdf, 0xf7, 0x1a, 0x35, 0x40, 0x49, 0x4b, 0x4a, 0x48, 0x4b, + 0x4d, 0x52, 0x56, 0x5c, 0x5d, 0x60, 0x62, 0x63, 0x62, 0x62, 0x62, 0x62, + 0x62, 0x61, 0x5f, 0x57, 0x53, 0x51, 0x4e, 0x4f, 0x4d, 0x48, 0x4d, 0x50, + 0x53, 0x56, 0x58, 0x59, 0x59, 0x5c, 0x5e, 0x5f, 0x5c, 0x5d, 0x5b, 0x59, + 0x5a, 0x58, 0x55, 0x50, 0x4a, 0x43, 0x3c, 0x30, 0x25, 0x1a, 0x17, 0x11, + 0x0c, 0x13, 0x1c, 0x25, 0x2d, 0x38, 0x45, 0x4d, 0x4f, 0x4b, 0x40, 0x2c, + 0x08, 0xe1, 0xc0, 0xac, 0xa1, 0xad, 0xa7, 0xa4, 0xa5, 0xa7, 0xa4, 0xa1, + 0x9f, 0xa0, 0x9a, 0x9d, 0xa2, 0xa4, 0xa2, 0x9e, 0x9e, 0xa3, 0xa5, 0xa2, + 0xa0, 0x9c, 0x9c, 0x9d, 0xa0, 0xa1, 0xa7, 0xb9, 0xb5, 0xad, 0xa6, 0xa3, + 0xa5, 0xa7, 0xb2, 0xc6, 0xd4, 0xc9, 0xb2, 0xa6, 0xa7, 0xb7, 0xc7, 0xb9, + 0xb7, 0xba, 0xb5, 0xb0, 0xb1, 0xb1, 0xb2, 0xb2, 0xb4, 0xbc, 0xba, 0xb5, + 0xb2, 0xb0, 0xb2, 0xb7, 0xba, 0xb1, 0xac, 0xb4, 0xb8, 0xb9, 0xb8, 0xb7, + 0xb6, 0xb3, 0xb7, 0xb7, 0xb4, 0xb7, 0xb7, 0xb3, 0xb4, 0xaf, 0xa3, 0xa8, + 0xaf, 0xba, 0xb5, 0xa9, 0xa9, 0xa9, 0xa9, 0xa9, 0xaa, 0xa7, 0xa1, 0xa8, + 0xb0, 0xb3, 0xc7, 0xc5, 0xa7, 0xa7, 0xab, 0xb9, 0xc6, 0xc2, 0xc5, 0xc4, + 0xc6, 0xc4, 0xb3, 0xac, 0xa6, 0xb2, 0xc0, 0xb8, 0xb4, 0xab, 0xaa, 0xbb, + 0xc1, 0xac, 0xa8, 0xaa, 0xa7, 0xa4, 0xa0, 0xa0, 0xa1, 0xa1, 0x9c, 0xa1, + 0xa1, 0xa5, 0xac, 0xab, 0xad, 0xae, 0xb1, 0xc3, 0xc2, 0xb7, 0xbb, 0xb7, + 0xaf, 0xad, 0xb3, 0xbc, 0xa4, 0xa8, 0xa4, 0xaa, 0xb9, 0xc1, 0xdb, 0x0a, + 0x1e, 0x28, 0x31, 0x3d, 0x47, 0x49, 0x4d, 0x52, 0x55, 0x58, 0x5b, 0x5d, + 0x5f, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x63, 0x62, 0x5d, 0x56, + 0x4d, 0x47, 0x43, 0x3d, 0x39, 0x3d, 0x4a, 0x51, 0x54, 0x56, 0x57, 0x5b, + 0x5a, 0x59, 0x5a, 0x5c, 0x5b, 0x5d, 0x59, 0x58, 0x58, 0x58, 0x54, 0x52, + 0x4d, 0x47, 0x3d, 0x32, 0x28, 0x1c, 0x07, 0xf9, 0xfa, 0x0d, 0x1b, 0x29, + 0x2f, 0x3d, 0x43, 0x4b, 0x4d, 0x4a, 0x44, 0x32, 0x19, 0xf7, 0xd0, 0xbc, + 0xb0, 0xb1, 0xa4, 0xa1, 0xa6, 0xa9, 0xa7, 0xa3, 0xa7, 0xa9, 0xa7, 0xa6, + 0xa1, 0xa1, 0xa3, 0x9f, 0xa2, 0xa7, 0xa7, 0xa4, 0xa2, 0xa0, 0xa4, 0xa5, + 0x9f, 0xa0, 0xa7, 0xba, 0xba, 0xb4, 0xae, 0xa8, 0xa4, 0xa4, 0xb2, 0xca, + 0xc2, 0xb1, 0xab, 0xa6, 0xa1, 0xa4, 0xb5, 0xb1, 0xa9, 0xa8, 0xaa, 0xa4, + 0xa2, 0xab, 0xac, 0xaa, 0xab, 0xb1, 0xb2, 0xb2, 0xb5, 0xb5, 0xb2, 0xb9, + 0xba, 0xb1, 0xaf, 0xb1, 0xb7, 0xb9, 0xb6, 0xb6, 0xb4, 0xb4, 0xb5, 0xb4, + 0xb5, 0xb6, 0xb7, 0xb7, 0xb6, 0xaa, 0xa9, 0xb1, 0xb2, 0xbc, 0xb8, 0xa4, + 0xa3, 0xa6, 0xa9, 0xac, 0xb6, 0xb1, 0xad, 0xac, 0xa9, 0xb5, 0xcb, 0xb9, + 0xac, 0xb0, 0xab, 0xb2, 0xc6, 0xc8, 0xc2, 0xbe, 0xc3, 0xcb, 0xbf, 0xa1, + 0x9d, 0xad, 0xb8, 0xb1, 0xb0, 0xbc, 0xc7, 0xb4, 0xac, 0xab, 0xa9, 0xa6, + 0xa5, 0xa4, 0xa2, 0xa0, 0x9c, 0x9f, 0x9d, 0x9e, 0xa5, 0xa4, 0xaa, 0xaf, + 0xb2, 0xb3, 0xb5, 0xb8, 0xb8, 0xb5, 0xb4, 0xb6, 0xb6, 0xb6, 0xb3, 0xba, + 0xa7, 0xac, 0xac, 0xb1, 0xc0, 0xd1, 0xeb, 0xee, 0xf7, 0x04, 0x05, 0x08, + 0x1f, 0x30, 0x3a, 0x42, 0x49, 0x4d, 0x53, 0x57, 0x5a, 0x5e, 0x61, 0x62, + 0x62, 0x63, 0x62, 0x62, 0x62, 0x5d, 0x56, 0x4c, 0x3e, 0x38, 0x32, 0x2d, + 0x2f, 0x40, 0x4c, 0x53, 0x56, 0x57, 0x57, 0x58, 0x5a, 0x59, 0x58, 0x59, + 0x59, 0x5a, 0x59, 0x57, 0x57, 0x57, 0x53, 0x4e, 0x4c, 0x47, 0x40, 0x36, + 0x25, 0x17, 0x01, 0xec, 0xed, 0x05, 0x1c, 0x2d, 0x35, 0x41, 0x42, 0x47, + 0x4a, 0x4b, 0x46, 0x3a, 0x2c, 0x15, 0xf7, 0xdc, 0xcc, 0xc2, 0xb6, 0xad, + 0xa8, 0xa9, 0xa2, 0xa5, 0xaa, 0xa6, 0xa3, 0xa1, 0xa1, 0xa2, 0x9e, 0xa1, + 0xa7, 0xa9, 0xa7, 0xa3, 0xa2, 0x9f, 0xa1, 0xa0, 0x9d, 0xa1, 0xa6, 0xbb, + 0xba, 0xb4, 0xac, 0xa5, 0xa1, 0xa1, 0xa7, 0xb5, 0xb2, 0xb1, 0xb1, 0xad, + 0xa7, 0xa7, 0xb4, 0xb3, 0xaa, 0xa8, 0xaa, 0xac, 0xa5, 0xa7, 0xac, 0xb1, + 0xab, 0xa7, 0xa8, 0xaa, 0xb0, 0xc2, 0xb5, 0xb7, 0xb1, 0xbc, 0xbc, 0xb1, + 0xb0, 0xb4, 0xb8, 0xb7, 0xb8, 0xb6, 0xb7, 0xb1, 0xad, 0xb2, 0xbc, 0xb4, + 0xb7, 0xb3, 0xaf, 0xb3, 0xb5, 0xbd, 0xc2, 0xb7, 0xad, 0xa8, 0xa4, 0xa7, + 0xb0, 0xbb, 0xbf, 0xbe, 0xab, 0xaf, 0xb6, 0xac, 0xac, 0xb7, 0xb9, 0xbe, + 0xc7, 0xc3, 0xbd, 0xbe, 0xc2, 0xca, 0xca, 0xb0, 0xa6, 0xa3, 0xb2, 0xb3, + 0xab, 0xb7, 0xc9, 0xbd, 0xb3, 0xb2, 0xb1, 0xad, 0xb1, 0xb2, 0xb5, 0xb6, + 0xb2, 0xb5, 0xac, 0xa2, 0xa7, 0xa7, 0xac, 0xb2, 0xbb, 0xc2, 0xbc, 0xb8, + 0xb4, 0xb1, 0xaf, 0xb3, 0xb6, 0xb8, 0xb7, 0xb7, 0xab, 0xab, 0xac, 0xad, + 0xbc, 0xdf, 0xd1, 0xc2, 0xe0, 0xee, 0xf7, 0xfc, 0xfc, 0xfa, 0x0f, 0x22, + 0x2f, 0x3d, 0x4c, 0x54, 0x58, 0x5c, 0x60, 0x61, 0x62, 0x63, 0x62, 0x61, + 0x5f, 0x57, 0x47, 0x38, 0x29, 0x1d, 0x17, 0x0b, 0xfb, 0x12, 0x39, 0x51, + 0x55, 0x56, 0x57, 0x55, 0x55, 0x57, 0x57, 0x57, 0x58, 0x59, 0x59, 0x58, + 0x57, 0x54, 0x52, 0x4f, 0x4d, 0x49, 0x40, 0x31, 0x1f, 0x0a, 0xf9, 0xee, + 0xea, 0x06, 0x25, 0x31, 0x38, 0x3d, 0x40, 0x46, 0x48, 0x48, 0x42, 0x32, + 0x19, 0xfc, 0xda, 0xbd, 0xb8, 0xbd, 0xb6, 0xae, 0xa7, 0xa7, 0xa7, 0xa7, + 0xa2, 0xa0, 0x9c, 0x9d, 0xa1, 0xa4, 0xa5, 0xa0, 0x9d, 0xa1, 0x9d, 0xa1, + 0xa3, 0x9f, 0x9e, 0xa1, 0x9f, 0x9d, 0xa9, 0xbd, 0xb7, 0xb4, 0xad, 0xa1, + 0x9c, 0xa0, 0xad, 0xb2, 0xb1, 0xad, 0xac, 0xab, 0xa6, 0xb0, 0xbb, 0xbb, + 0xb5, 0xad, 0xae, 0xb0, 0xab, 0xa7, 0xac, 0xae, 0xab, 0xa6, 0xa7, 0xa6, + 0xa4, 0xad, 0xaf, 0xa7, 0x9c, 0xa5, 0xac, 0xa2, 0x9c, 0x9d, 0xa1, 0xa6, + 0xb1, 0xb7, 0xba, 0xb9, 0xad, 0xb0, 0xbf, 0xb7, 0xb8, 0xb0, 0xb0, 0xb1, + 0xb6, 0xbd, 0xc3, 0xc3, 0xb8, 0xa7, 0x9d, 0xa2, 0xab, 0xb6, 0xba, 0xc4, + 0xb9, 0xb7, 0xb6, 0xad, 0xb0, 0xbe, 0xc1, 0xbc, 0xbf, 0xb8, 0xb7, 0xc2, + 0xbc, 0xc2, 0xc6, 0xba, 0xb7, 0xb8, 0xc8, 0xc9, 0xbd, 0xc5, 0xcb, 0xc0, + 0xb5, 0xb6, 0xb9, 0xbc, 0xc7, 0xcc, 0xd1, 0xc9, 0xbe, 0xb7, 0xb3, 0xb4, + 0xba, 0xba, 0xc4, 0xc7, 0xc2, 0xc6, 0xc3, 0xbf, 0xb7, 0xb1, 0xb1, 0xb4, + 0xb3, 0xb6, 0xb7, 0xb7, 0xad, 0xb1, 0xb4, 0xb8, 0xd4, 0xf2, 0xb8, 0xb6, + 0xcb, 0xd7, 0xe2, 0xe7, 0xea, 0xea, 0xe8, 0xfd, 0x18, 0x32, 0x46, 0x52, + 0x58, 0x5d, 0x5f, 0x60, 0x62, 0x64, 0x63, 0x62, 0x5e, 0x52, 0x30, 0x18, + 0x09, 0xec, 0xdc, 0xc9, 0xb6, 0xc0, 0xfb, 0x31, 0x4e, 0x56, 0x57, 0x57, + 0x54, 0x56, 0x57, 0x57, 0x57, 0x58, 0x59, 0x58, 0x57, 0x55, 0x53, 0x53, + 0x50, 0x4c, 0x47, 0x3c, 0x27, 0x0d, 0xfc, 0xf0, 0xf0, 0x0c, 0x27, 0x34, + 0x3d, 0x3d, 0x3e, 0x44, 0x47, 0x48, 0x46, 0x40, 0x37, 0x29, 0x0f, 0xea, + 0xcb, 0xbb, 0xa7, 0xa2, 0xab, 0xaf, 0xac, 0xa7, 0xa2, 0x9e, 0x9a, 0x99, + 0xa1, 0xa8, 0xa8, 0xa5, 0xa1, 0xa2, 0x9e, 0xa0, 0x9f, 0x9c, 0xa2, 0xa2, + 0x9e, 0xa0, 0xa9, 0xb7, 0xb1, 0xad, 0xac, 0xa1, 0x9b, 0xa0, 0xac, 0xa9, + 0xaa, 0xa8, 0xa4, 0xa5, 0xb1, 0xba, 0xbe, 0xbd, 0xbc, 0xbb, 0xb5, 0xb4, + 0xb0, 0xad, 0xab, 0xac, 0xa9, 0xa5, 0xaa, 0xad, 0xa9, 0xae, 0xb2, 0xa2, + 0x9c, 0x99, 0x9e, 0x9e, 0x9e, 0x9c, 0x9c, 0x9c, 0xa5, 0xad, 0xb3, 0xb8, + 0xb2, 0xac, 0xbc, 0xbb, 0xba, 0xb1, 0xb0, 0xb1, 0xb1, 0xaf, 0xb1, 0xbf, + 0xb6, 0xa5, 0xa1, 0xa3, 0xb1, 0xaf, 0xb4, 0xb7, 0xc0, 0xc5, 0xb3, 0xae, + 0xb6, 0xbb, 0xbf, 0xbd, 0xbc, 0xb8, 0xb8, 0xb9, 0xbb, 0xbc, 0xc5, 0xc2, + 0xc5, 0xc7, 0xcc, 0xc7, 0xbf, 0xc5, 0xca, 0xc4, 0xc3, 0xc9, 0xc9, 0xca, + 0xce, 0xcf, 0xcc, 0xc8, 0xbf, 0xbc, 0xbd, 0xc0, 0xbf, 0xbd, 0xd0, 0xd2, + 0xbd, 0xbf, 0xc0, 0xc2, 0xc3, 0xb9, 0xb0, 0xb3, 0xb7, 0xba, 0xb8, 0xb1, + 0xad, 0xb7, 0xba, 0xcb, 0xf3, 0xde, 0xad, 0xb2, 0xc0, 0xc2, 0xb6, 0xc2, + 0xd8, 0xe0, 0xdc, 0xe6, 0x0c, 0x32, 0x47, 0x52, 0x58, 0x5d, 0x5e, 0x5f, + 0x60, 0x5f, 0x61, 0x61, 0x5d, 0x57, 0x33, 0x0f, 0xf1, 0xcf, 0xba, 0xb7, + 0xc5, 0xc7, 0xc7, 0x02, 0x37, 0x4e, 0x53, 0x56, 0x56, 0x55, 0x53, 0x54, + 0x57, 0x57, 0x58, 0x57, 0x57, 0x55, 0x52, 0x50, 0x51, 0x50, 0x4d, 0x43, + 0x32, 0x1d, 0x06, 0xf1, 0xf9, 0x18, 0x32, 0x41, 0x3f, 0x3e, 0x42, 0x47, + 0x47, 0x48, 0x46, 0x3d, 0x28, 0x12, 0x0f, 0x08, 0xf8, 0xd9, 0xb4, 0xa9, + 0xad, 0xae, 0xa1, 0xa0, 0x9f, 0x9c, 0x9a, 0x96, 0xa0, 0xa7, 0xb0, 0xac, + 0xac, 0xab, 0xa3, 0xa3, 0xa1, 0x98, 0x9c, 0x9e, 0xa2, 0xa1, 0xa9, 0xb0, + 0xad, 0xa8, 0xa6, 0xa0, 0x9a, 0x9c, 0xa6, 0xad, 0xac, 0xa7, 0xa7, 0xac, + 0xb7, 0xb7, 0xbb, 0xbe, 0xb8, 0xbc, 0xc0, 0xbc, 0xb5, 0xb2, 0xa6, 0xa0, + 0xa3, 0xa1, 0x9f, 0xa7, 0xac, 0xb1, 0xb4, 0xb3, 0xb1, 0xa9, 0xa1, 0x9d, + 0x9e, 0xa2, 0xa4, 0x9f, 0xa2, 0xad, 0xb2, 0xaa, 0x9d, 0xa1, 0xb3, 0xb8, + 0xb4, 0xaf, 0xae, 0xb0, 0xa7, 0xa5, 0xb3, 0xb4, 0xaa, 0xa1, 0xa1, 0xa5, + 0xac, 0xb0, 0xb1, 0xb3, 0xbf, 0xc8, 0xbd, 0xb5, 0xbe, 0xbd, 0xbe, 0xbf, + 0xba, 0xb6, 0xb1, 0xb1, 0xb7, 0xbb, 0xc7, 0xd4, 0xd3, 0xc8, 0xc5, 0xc1, + 0xbc, 0xc2, 0xc9, 0xcb, 0xce, 0xcc, 0xc3, 0xbe, 0xbf, 0xc3, 0xc7, 0xca, + 0xc5, 0xc6, 0xcc, 0xc9, 0xc6, 0xcd, 0xd0, 0xd0, 0xc3, 0xc2, 0xc1, 0xc7, + 0xc9, 0xc1, 0xb6, 0xb7, 0xb6, 0xb9, 0xb6, 0xb2, 0xb4, 0xbb, 0xc2, 0xdb, + 0xfd, 0xc6, 0xb2, 0xad, 0xb4, 0xae, 0xa3, 0xac, 0xcb, 0xd7, 0xd6, 0xd7, + 0x00, 0x26, 0x41, 0x4e, 0x57, 0x5b, 0x5d, 0x5d, 0x5d, 0x5a, 0x5d, 0x5f, + 0x60, 0x5d, 0x45, 0x21, 0xee, 0xcf, 0xbe, 0xc6, 0xbf, 0xce, 0xcd, 0xda, + 0x12, 0x42, 0x4e, 0x52, 0x56, 0x56, 0x53, 0x52, 0x56, 0x57, 0x57, 0x57, + 0x55, 0x52, 0x53, 0x4f, 0x4f, 0x4f, 0x4f, 0x4c, 0x3e, 0x2d, 0x13, 0xf8, + 0x0c, 0x2f, 0x43, 0x47, 0x41, 0x45, 0x48, 0x4b, 0x4b, 0x4b, 0x49, 0x40, + 0x27, 0xfc, 0xdd, 0xde, 0xec, 0xf5, 0xdb, 0xc2, 0xb7, 0xaf, 0xa1, 0x9e, + 0x9b, 0x9a, 0x98, 0x98, 0x9f, 0xa9, 0xb2, 0xaf, 0xae, 0xaf, 0xa7, 0xa3, + 0xa4, 0xa3, 0xa4, 0xa1, 0x9e, 0x9c, 0xa9, 0xac, 0xa7, 0xa1, 0xa0, 0xa1, + 0x9a, 0x9e, 0xa1, 0xa7, 0xad, 0xad, 0xae, 0xb4, 0xb7, 0xb7, 0xbb, 0xbd, + 0xba, 0xbc, 0xbd, 0xbd, 0xbc, 0xb3, 0xa5, 0x9b, 0x9d, 0xa4, 0x9b, 0xa1, + 0xac, 0xb2, 0xba, 0xb7, 0xb7, 0xb1, 0xa7, 0x9b, 0x99, 0xa0, 0xa9, 0xaa, + 0xaa, 0xac, 0xb4, 0xac, 0x9c, 0x9b, 0xa7, 0xb7, 0xb4, 0xaf, 0xae, 0xac, + 0xa9, 0xaf, 0xb3, 0xa8, 0xa6, 0x9e, 0x9f, 0xa8, 0xac, 0xb0, 0xad, 0xc1, + 0xbd, 0xb6, 0xb2, 0xb0, 0xba, 0xc1, 0xbe, 0xbc, 0xb7, 0xb3, 0xb3, 0xb8, + 0xba, 0xb6, 0xbc, 0xc8, 0xc6, 0xc7, 0xc8, 0xc9, 0xcc, 0xcf, 0xcb, 0xc9, + 0xca, 0xc7, 0xc5, 0xc2, 0xc3, 0xc3, 0xcb, 0xca, 0xcc, 0xd2, 0xcf, 0xce, + 0xd1, 0xdd, 0xe0, 0xd8, 0xc7, 0xc6, 0xc0, 0xc9, 0xd0, 0xd1, 0xc2, 0xc2, + 0xbe, 0xb6, 0xb4, 0xbf, 0xae, 0xb5, 0xbc, 0xe4, 0x01, 0xbd, 0xaf, 0xa7, + 0xae, 0xb3, 0xa7, 0xae, 0xc3, 0xcc, 0xcd, 0xcf, 0xf1, 0x27, 0x43, 0x4d, + 0x52, 0x59, 0x5e, 0x5c, 0x5c, 0x59, 0x59, 0x5c, 0x5d, 0x5c, 0x50, 0x3a, + 0x1c, 0xf4, 0xda, 0xd3, 0xca, 0xc3, 0xcc, 0xcc, 0xdd, 0x28, 0x4c, 0x51, + 0x55, 0x57, 0x58, 0x56, 0x55, 0x57, 0x57, 0x57, 0x55, 0x52, 0x51, 0x50, + 0x4f, 0x50, 0x50, 0x4d, 0x48, 0x3e, 0x2f, 0x27, 0x33, 0x3f, 0x48, 0x45, + 0x45, 0x48, 0x4c, 0x4e, 0x4e, 0x4d, 0x4d, 0x47, 0x30, 0x0d, 0xe7, 0xc8, + 0xb8, 0xca, 0xdf, 0xc9, 0xbe, 0xbc, 0xab, 0xa5, 0xa0, 0x9c, 0x9b, 0x9c, + 0xa2, 0xad, 0xb5, 0xb1, 0xad, 0xaa, 0xac, 0xa2, 0x9e, 0xa8, 0xaa, 0xa1, + 0x9b, 0xa4, 0xac, 0xab, 0xa8, 0xa1, 0x9c, 0x9d, 0x9a, 0x98, 0x9c, 0xa7, + 0xad, 0xab, 0xb1, 0xb7, 0xb7, 0xb6, 0xb8, 0xb8, 0xb7, 0xbb, 0xbb, 0xb7, + 0xb6, 0xae, 0xa3, 0x9c, 0xa0, 0xac, 0xa6, 0xa1, 0xab, 0xac, 0xb7, 0xb6, + 0xad, 0xa8, 0xa1, 0x9c, 0x9a, 0xa1, 0xa7, 0xa7, 0xa1, 0xae, 0xb4, 0xb5, + 0xab, 0x9c, 0xa1, 0xb4, 0xb5, 0xb3, 0xb4, 0xb2, 0xad, 0xb6, 0xb1, 0xa7, + 0xa7, 0xa3, 0xa1, 0xa8, 0xac, 0xae, 0xab, 0xb6, 0xb1, 0xaf, 0xad, 0xad, + 0xb2, 0xba, 0xc2, 0xc4, 0xb7, 0xb3, 0xbe, 0xc4, 0xc0, 0xb6, 0xb4, 0xc1, + 0xc3, 0xc1, 0xc4, 0xc8, 0xc3, 0xc0, 0xbd, 0xbc, 0xbc, 0xc1, 0xb7, 0xb2, + 0xba, 0xc2, 0xc6, 0xc2, 0xc6, 0xd0, 0xcc, 0xd3, 0xd9, 0xdf, 0xf2, 0xef, + 0xcc, 0xc5, 0xc2, 0xd2, 0xe2, 0xed, 0xdd, 0xc2, 0xbc, 0xc1, 0xc7, 0xd7, + 0xaf, 0xb6, 0xc0, 0xf3, 0xfb, 0xbb, 0xa6, 0xa3, 0xab, 0xba, 0xb5, 0xb1, + 0xbe, 0xc2, 0xc9, 0xcc, 0xed, 0x25, 0x43, 0x51, 0x53, 0x56, 0x5c, 0x5c, + 0x5b, 0x5b, 0x5b, 0x5b, 0x5c, 0x5d, 0x56, 0x49, 0x3f, 0x32, 0x12, 0xec, + 0xd6, 0xd0, 0xd2, 0xce, 0xce, 0x14, 0x45, 0x50, 0x54, 0x56, 0x57, 0x57, + 0x57, 0x56, 0x57, 0x56, 0x57, 0x57, 0x55, 0x51, 0x51, 0x50, 0x51, 0x4e, + 0x48, 0x44, 0x3e, 0x37, 0x3b, 0x40, 0x3f, 0x45, 0x49, 0x4d, 0x4d, 0x4f, + 0x51, 0x52, 0x51, 0x4d, 0x40, 0x1d, 0xf6, 0xd9, 0xbd, 0xb3, 0xb6, 0xd7, + 0xcc, 0xb8, 0xb8, 0xb1, 0xa5, 0xa1, 0xa0, 0xa0, 0xa9, 0xb3, 0xbb, 0xb9, + 0xb1, 0xa8, 0xa7, 0xa2, 0xa4, 0xa7, 0xaa, 0xa5, 0x9f, 0xaa, 0xab, 0xa7, + 0xa5, 0xa3, 0xa1, 0x9c, 0x9b, 0x9b, 0x9c, 0xa5, 0xab, 0xab, 0xac, 0xac, + 0xb5, 0xb4, 0xb4, 0xb8, 0xb3, 0xb9, 0xba, 0xb6, 0xb1, 0xa9, 0xa4, 0x9f, + 0x9f, 0xac, 0xae, 0xad, 0xac, 0xa9, 0xac, 0xac, 0xa9, 0xa5, 0x9e, 0x9c, + 0xa3, 0xae, 0xb7, 0xac, 0x99, 0x9d, 0xa8, 0xad, 0xb0, 0xa7, 0xa6, 0xa9, + 0xaa, 0xaa, 0xb1, 0xb7, 0xb3, 0xb5, 0xac, 0xa5, 0xa1, 0xa5, 0xa5, 0xa7, + 0xa7, 0xa7, 0xa8, 0xaa, 0xa7, 0xab, 0xa8, 0xa8, 0xae, 0xb7, 0xc1, 0xc7, + 0xc2, 0xbc, 0xbc, 0xbf, 0xc2, 0xca, 0xca, 0xcb, 0xc2, 0xc2, 0xc6, 0xcb, + 0xce, 0xd5, 0xd2, 0xcc, 0xd6, 0xd3, 0xc1, 0xb4, 0xbe, 0xcd, 0xdb, 0xc7, + 0xc2, 0xba, 0xb6, 0xc2, 0xc4, 0xc8, 0xd0, 0xc9, 0xcc, 0xd8, 0xc9, 0xc0, + 0xd2, 0xe0, 0xd8, 0xc5, 0xbc, 0xc9, 0xd7, 0xd6, 0xb1, 0xb9, 0xc9, 0xf9, + 0xf4, 0xc8, 0xaa, 0xa5, 0xa4, 0xb2, 0xc1, 0xc5, 0xc1, 0xc2, 0xce, 0xe1, + 0x00, 0x25, 0x3c, 0x4d, 0x56, 0x59, 0x59, 0x5a, 0x5c, 0x5c, 0x5d, 0x5f, + 0x5c, 0x5a, 0x57, 0x50, 0x4a, 0x45, 0x3d, 0x2d, 0x17, 0x01, 0xf4, 0xf0, + 0xe7, 0x0a, 0x3d, 0x4c, 0x51, 0x52, 0x54, 0x56, 0x58, 0x57, 0x58, 0x57, + 0x57, 0x57, 0x57, 0x54, 0x51, 0x50, 0x50, 0x4e, 0x48, 0x43, 0x3e, 0x34, + 0x2b, 0x27, 0x3f, 0x4d, 0x4f, 0x50, 0x4f, 0x51, 0x53, 0x54, 0x52, 0x50, + 0x47, 0x33, 0x0f, 0xe2, 0xc4, 0xbd, 0xab, 0xb0, 0xd3, 0xcc, 0xb7, 0xb0, + 0xa7, 0xa3, 0xa7, 0xa6, 0xa5, 0xae, 0xba, 0xb8, 0xad, 0xa7, 0xa5, 0xa6, + 0xa5, 0xa7, 0xab, 0xab, 0xa4, 0xa6, 0xa9, 0xa3, 0xa0, 0xa1, 0x9f, 0x9a, + 0x9c, 0x9a, 0x9d, 0xa7, 0xad, 0xb1, 0xaf, 0xac, 0xb1, 0xad, 0xb1, 0xb9, + 0xab, 0xac, 0xb1, 0xb3, 0xb1, 0xac, 0xa8, 0xa5, 0xa1, 0xa5, 0xb4, 0xb1, + 0xa5, 0xaa, 0xac, 0xa7, 0xa7, 0xa7, 0xa0, 0xa1, 0xba, 0xbf, 0xbd, 0xb7, + 0xa6, 0x9e, 0xa2, 0xa8, 0xa7, 0xa7, 0xa5, 0xa6, 0xa9, 0xa9, 0xb6, 0xc6, + 0xbc, 0xb5, 0xa7, 0xa7, 0xa5, 0xa9, 0xac, 0xa7, 0xa6, 0xa6, 0xad, 0xb7, + 0xb2, 0xb1, 0xb0, 0xae, 0xb3, 0xb8, 0xbc, 0xc0, 0xc2, 0xbc, 0xc0, 0xc2, + 0xc5, 0xc8, 0xcd, 0xc7, 0xc8, 0xd5, 0xdc, 0xdd, 0xd7, 0xd1, 0xd4, 0xd8, + 0xd0, 0xcf, 0xcf, 0xc7, 0xc9, 0xc7, 0xde, 0xdd, 0xd0, 0xc7, 0xc4, 0xcb, + 0xca, 0xcc, 0xcc, 0xc6, 0xd1, 0xd2, 0xce, 0xcd, 0xd9, 0xd6, 0xd5, 0xcb, + 0xbe, 0xc3, 0xd2, 0xd4, 0xae, 0xac, 0xc2, 0xf5, 0xe9, 0xcd, 0xb3, 0xa7, + 0xa8, 0xac, 0xb3, 0xc2, 0xcc, 0xd2, 0xdc, 0xf3, 0x0a, 0x20, 0x37, 0x46, + 0x50, 0x57, 0x59, 0x5a, 0x5b, 0x5d, 0x5e, 0x5f, 0x5d, 0x59, 0x59, 0x57, + 0x50, 0x49, 0x47, 0x46, 0x42, 0x3f, 0x3d, 0x40, 0x38, 0x2e, 0x3a, 0x47, + 0x4d, 0x51, 0x52, 0x54, 0x57, 0x56, 0x57, 0x57, 0x57, 0x57, 0x58, 0x56, + 0x54, 0x51, 0x4c, 0x45, 0x42, 0x3f, 0x3c, 0x38, 0x2d, 0x32, 0x48, 0x52, + 0x53, 0x52, 0x52, 0x53, 0x55, 0x55, 0x54, 0x52, 0x47, 0x36, 0x1d, 0xfd, + 0xcc, 0xbc, 0xae, 0xa7, 0xac, 0xd0, 0xca, 0xae, 0xa8, 0xaa, 0xa7, 0xa7, + 0xa6, 0xa9, 0xb4, 0xb7, 0xb2, 0xad, 0xa7, 0xab, 0xa9, 0xa7, 0xa8, 0xac, + 0xac, 0xa2, 0xa4, 0xa3, 0x9f, 0x9c, 0x9d, 0x9c, 0x9a, 0x97, 0x9c, 0xa4, + 0xa7, 0xb0, 0xb5, 0xb1, 0xac, 0xae, 0xb9, 0xbc, 0xb6, 0xad, 0xa7, 0xab, + 0xac, 0xa7, 0xa4, 0xa4, 0xa3, 0xa6, 0xb3, 0xb1, 0xa6, 0xa7, 0xab, 0xab, + 0xa7, 0xa6, 0xa7, 0xc0, 0xd8, 0xd1, 0xbb, 0xaf, 0xaa, 0xa6, 0xa8, 0xaf, + 0xad, 0xb3, 0xb5, 0xad, 0xaa, 0xaf, 0xbc, 0xca, 0xc9, 0xc0, 0xab, 0xa9, + 0xaa, 0xac, 0xaa, 0xa5, 0xa4, 0xa8, 0xb1, 0xb7, 0xb9, 0xb3, 0xb0, 0xb1, + 0xb6, 0xb7, 0xba, 0xbb, 0xc7, 0xc4, 0xd0, 0xd5, 0xd7, 0xd7, 0xcc, 0xc8, + 0xc6, 0xce, 0xd6, 0xd4, 0xd5, 0xd1, 0xd1, 0xdb, 0xd4, 0xd3, 0xd6, 0xd7, + 0xda, 0xd7, 0xd4, 0xdc, 0xdc, 0xd9, 0xd7, 0xd2, 0xcf, 0xce, 0xc2, 0xc6, + 0xc6, 0xc1, 0xcf, 0xd2, 0xd6, 0xd1, 0xd1, 0xcc, 0xc7, 0xcc, 0xd2, 0xd4, + 0xa1, 0xa6, 0xc4, 0xee, 0xea, 0xd4, 0xb8, 0xac, 0xb0, 0xb1, 0xbc, 0xcd, + 0xd5, 0xdd, 0xed, 0x00, 0x15, 0x23, 0x37, 0x42, 0x4c, 0x52, 0x56, 0x58, + 0x5a, 0x5c, 0x5d, 0x5d, 0x5c, 0x5a, 0x5a, 0x5d, 0x55, 0x4d, 0x48, 0x47, + 0x47, 0x43, 0x41, 0x45, 0x47, 0x43, 0x40, 0x42, 0x48, 0x4d, 0x52, 0x53, + 0x56, 0x57, 0x57, 0x57, 0x57, 0x57, 0x58, 0x57, 0x55, 0x52, 0x4d, 0x49, + 0x47, 0x42, 0x41, 0x3e, 0x35, 0x40, 0x50, 0x54, 0x52, 0x53, 0x54, 0x54, + 0x54, 0x55, 0x55, 0x52, 0x4b, 0x35, 0x14, 0xfb, 0xe3, 0xca, 0xb4, 0xa7, + 0xa6, 0xb2, 0xc9, 0xc2, 0xb2, 0xae, 0xa9, 0xa8, 0xa7, 0xa4, 0xb0, 0xb7, + 0xb4, 0xb0, 0xb1, 0xb3, 0xb0, 0xac, 0xae, 0xb1, 0xb0, 0xa1, 0x9c, 0xa1, + 0xa1, 0x9f, 0x9d, 0x9c, 0x9c, 0x97, 0x97, 0x9c, 0xa6, 0xaf, 0xb1, 0xb3, + 0xb2, 0xb5, 0xb5, 0xb8, 0xb8, 0xb9, 0xb7, 0xac, 0xa7, 0xa7, 0xa5, 0xa4, + 0xa6, 0xad, 0xac, 0xa6, 0xa5, 0xa2, 0xa7, 0xac, 0xa7, 0xa2, 0xa8, 0xbc, + 0xc7, 0xbd, 0xad, 0xa8, 0xa7, 0xa8, 0xae, 0xbb, 0xbb, 0xb8, 0xb7, 0xb5, + 0xae, 0xa6, 0xac, 0xb6, 0xb9, 0xb7, 0xac, 0xab, 0xaf, 0xba, 0xb8, 0xb0, + 0xa7, 0xa8, 0xb6, 0xba, 0xb8, 0xb6, 0xb3, 0xb1, 0xb1, 0xb6, 0xb7, 0xb4, + 0xb7, 0xbb, 0xc5, 0xcd, 0xce, 0xcc, 0xc7, 0xcb, 0xcc, 0xd5, 0xd3, 0xcb, + 0xc8, 0xcc, 0xd1, 0xcf, 0xcd, 0xc9, 0xc9, 0xcc, 0xcc, 0xc8, 0xc6, 0xc7, + 0xc8, 0xcc, 0xce, 0xd2, 0xd5, 0xcc, 0xc7, 0xc9, 0xc8, 0xc7, 0xca, 0xcd, + 0xd7, 0xdb, 0xd7, 0xd8, 0xd7, 0xd8, 0xd3, 0xd3, 0xa5, 0xa7, 0xc2, 0xe4, + 0xe5, 0xd3, 0xbc, 0xba, 0xbb, 0xc1, 0xcd, 0xd7, 0xde, 0xe6, 0xf4, 0x02, + 0x15, 0x29, 0x31, 0x41, 0x48, 0x4e, 0x53, 0x58, 0x5a, 0x5a, 0x5a, 0x5c, + 0x5c, 0x59, 0x58, 0x5d, 0x5c, 0x55, 0x4d, 0x4a, 0x47, 0x45, 0x41, 0x42, + 0x43, 0x47, 0x46, 0x46, 0x46, 0x4a, 0x50, 0x51, 0x53, 0x57, 0x58, 0x5a, + 0x59, 0x57, 0x58, 0x58, 0x58, 0x57, 0x55, 0x50, 0x4a, 0x4b, 0x4a, 0x47, + 0x42, 0x4b, 0x55, 0x54, 0x55, 0x57, 0x57, 0x54, 0x52, 0x53, 0x55, 0x56, + 0x51, 0x43, 0x27, 0x02, 0xdf, 0xd1, 0xc5, 0xb4, 0xad, 0xab, 0xad, 0xc3, + 0xbc, 0xae, 0xad, 0xa9, 0xa7, 0xa0, 0xa0, 0xac, 0xb3, 0xb4, 0xb5, 0xb7, + 0xb2, 0xb0, 0xb4, 0xb6, 0xb1, 0xa1, 0x9f, 0xaf, 0xa9, 0xa1, 0x9e, 0x99, + 0x9f, 0x99, 0x93, 0x97, 0xa2, 0xa8, 0xaf, 0xb3, 0xb6, 0xb1, 0xb1, 0xb7, + 0xb1, 0xb6, 0xb5, 0xb1, 0xac, 0xac, 0xaa, 0xa5, 0xac, 0xb3, 0xac, 0xa2, + 0x9f, 0xa0, 0xa5, 0xac, 0xab, 0xa9, 0xa4, 0xa8, 0xb1, 0xaa, 0xa8, 0xa7, + 0xa7, 0xaa, 0xad, 0xb9, 0xba, 0xb7, 0xb7, 0xb6, 0xae, 0xa1, 0x9c, 0xac, + 0xbc, 0xb9, 0xab, 0xb1, 0xb0, 0xb8, 0xc7, 0xc5, 0xbc, 0xb6, 0xb7, 0xbc, + 0xba, 0xba, 0xb5, 0xb3, 0xb3, 0xb7, 0xba, 0xbd, 0xbd, 0xc1, 0xc0, 0xba, + 0xc0, 0xc9, 0xce, 0xc8, 0xc8, 0xcc, 0xd5, 0xcd, 0xcb, 0xca, 0xc8, 0xc6, + 0xcb, 0xd2, 0xd2, 0xcf, 0xcf, 0xd2, 0xd0, 0xce, 0xce, 0xd4, 0xd7, 0xd3, + 0xcd, 0xc7, 0xd2, 0xcd, 0xcf, 0xd2, 0xd0, 0xd0, 0xd2, 0xd9, 0xdb, 0xd9, + 0xd6, 0xd7, 0xd5, 0xd2, 0xa6, 0xac, 0xc0, 0xd2, 0xe2, 0xd6, 0xc6, 0xc7, + 0xc3, 0xc9, 0xcf, 0xdc, 0xe2, 0xea, 0xf7, 0x08, 0x16, 0x27, 0x33, 0x3c, + 0x43, 0x46, 0x54, 0x58, 0x59, 0x58, 0x5a, 0x5a, 0x59, 0x59, 0x57, 0x59, + 0x5c, 0x5b, 0x56, 0x51, 0x50, 0x4c, 0x4c, 0x48, 0x45, 0x47, 0x47, 0x4a, + 0x4d, 0x51, 0x51, 0x4f, 0x51, 0x54, 0x57, 0x59, 0x5b, 0x5b, 0x5a, 0x5a, + 0x5a, 0x57, 0x56, 0x53, 0x4d, 0x4c, 0x4c, 0x4a, 0x4a, 0x4d, 0x51, 0x52, + 0x52, 0x53, 0x54, 0x54, 0x55, 0x57, 0x57, 0x57, 0x54, 0x4e, 0x39, 0x11, + 0xdf, 0xc5, 0xb3, 0xb0, 0xab, 0xa7, 0xa7, 0xae, 0xbf, 0xb0, 0xaf, 0xad, + 0xa8, 0xa7, 0xa1, 0x9e, 0xa0, 0xa7, 0xb1, 0xb7, 0xac, 0xa7, 0xb1, 0xbb, + 0xb1, 0xa2, 0xae, 0xb7, 0xaf, 0xa8, 0x9e, 0x9a, 0x9d, 0xa1, 0x97, 0x97, + 0x9e, 0xa3, 0xa5, 0xaf, 0xb1, 0xac, 0xad, 0xb1, 0xac, 0xac, 0xad, 0xa9, + 0xa1, 0xa3, 0xa7, 0xa7, 0xaf, 0xad, 0xb0, 0xac, 0xa2, 0xa2, 0xa3, 0xa9, + 0xad, 0xac, 0xa1, 0x9f, 0xa7, 0xa3, 0xa5, 0xa7, 0xa5, 0xa7, 0xac, 0xb2, + 0xb3, 0xb7, 0xb8, 0xb7, 0xb5, 0xad, 0x9e, 0xa7, 0xb9, 0xb5, 0xaf, 0xb6, + 0xb2, 0xb1, 0xb8, 0xbd, 0xca, 0xc4, 0xb9, 0xb9, 0xba, 0xbc, 0xba, 0xb7, + 0xb7, 0xb9, 0xba, 0xb9, 0xbb, 0xc1, 0xc2, 0xbc, 0xc2, 0xc3, 0xc5, 0xbc, + 0xc7, 0xd9, 0xe5, 0xdc, 0xd2, 0xcc, 0xca, 0xc8, 0xcd, 0xcc, 0xce, 0xd4, + 0xdc, 0xdc, 0xdc, 0xe2, 0xdd, 0xdc, 0xdc, 0xde, 0xdd, 0xd6, 0xdc, 0xd8, + 0xd6, 0xd7, 0xd4, 0xd3, 0xd2, 0xd4, 0xd8, 0xd8, 0xd4, 0xd2, 0xd1, 0xcf, + 0xa7, 0xb1, 0xba, 0xc7, 0xee, 0xff, 0xfa, 0xe4, 0xc9, 0xcb, 0xcf, 0xda, + 0xe6, 0xef, 0xf8, 0x04, 0x12, 0x22, 0x29, 0x35, 0x40, 0x45, 0x50, 0x53, + 0x58, 0x58, 0x5b, 0x59, 0x5b, 0x58, 0x57, 0x58, 0x5a, 0x5b, 0x59, 0x57, + 0x53, 0x50, 0x50, 0x4d, 0x4a, 0x4a, 0x4c, 0x4e, 0x52, 0x53, 0x57, 0x58, + 0x56, 0x54, 0x55, 0x54, 0x57, 0x5a, 0x5c, 0x5b, 0x5b, 0x5a, 0x58, 0x52, + 0x4d, 0x48, 0x47, 0x4a, 0x4b, 0x4d, 0x4e, 0x4f, 0x4f, 0x51, 0x53, 0x55, + 0x56, 0x57, 0x58, 0x58, 0x57, 0x55, 0x4b, 0x32, 0x04, 0xce, 0xb5, 0xaf, + 0xac, 0xab, 0xac, 0xab, 0xb9, 0xb1, 0xb1, 0xaf, 0xaa, 0xa2, 0x9d, 0x9c, + 0x9d, 0x9e, 0xa9, 0xba, 0xbc, 0xae, 0xa0, 0xa7, 0xa1, 0x99, 0xa2, 0xb3, + 0xb5, 0xb0, 0xa0, 0x98, 0x9a, 0x9e, 0x9d, 0x9c, 0x9e, 0xa1, 0xa7, 0xac, + 0xad, 0xaa, 0xaf, 0xb5, 0xac, 0xa4, 0xa7, 0xa1, 0x9c, 0xa1, 0xa7, 0xa9, + 0xb0, 0xa6, 0xb4, 0xb8, 0xae, 0xa6, 0xa0, 0xa4, 0xa8, 0xac, 0xa7, 0xa4, + 0xa3, 0x9f, 0xa1, 0xa5, 0xa2, 0xa4, 0xa8, 0xaf, 0xae, 0xb9, 0xb7, 0xb1, + 0xac, 0xb2, 0xac, 0xa3, 0xac, 0xae, 0xb6, 0xb7, 0xb4, 0xb2, 0xb6, 0xbd, + 0xce, 0xc8, 0xce, 0xc4, 0xb9, 0xb6, 0xb9, 0xbc, 0xbc, 0xbc, 0xba, 0xbf, + 0xc6, 0xc5, 0xc5, 0xbf, 0xc2, 0xbf, 0xba, 0xb8, 0xc0, 0xd0, 0xd6, 0xd3, + 0xca, 0xbe, 0xc8, 0xd7, 0xdb, 0xd7, 0xce, 0xcc, 0xce, 0xd4, 0xd7, 0xd7, + 0xd2, 0xcc, 0xc7, 0xc9, 0xd2, 0xd6, 0xd6, 0xd7, 0xdd, 0xe0, 0xd9, 0xd5, + 0xd8, 0xdc, 0xd9, 0xd8, 0xda, 0xda, 0xd5, 0xd5, 0xaa, 0xb7, 0xbc, 0xcb, + 0xf7, 0x14, 0x18, 0x10, 0xf2, 0xd2, 0xcc, 0xd7, 0xe2, 0xf2, 0xf5, 0x0b, + 0x1a, 0x22, 0x2b, 0x35, 0x3d, 0x41, 0x4b, 0x50, 0x55, 0x58, 0x59, 0x58, + 0x59, 0x58, 0x58, 0x57, 0x57, 0x57, 0x56, 0x55, 0x54, 0x52, 0x53, 0x50, + 0x4f, 0x4f, 0x4f, 0x4f, 0x51, 0x52, 0x55, 0x5a, 0x5d, 0x5c, 0x57, 0x55, + 0x55, 0x57, 0x59, 0x5a, 0x59, 0x5b, 0x59, 0x54, 0x4e, 0x4a, 0x45, 0x49, + 0x4b, 0x4c, 0x4b, 0x4d, 0x4e, 0x52, 0x52, 0x53, 0x54, 0x56, 0x57, 0x5a, + 0x5a, 0x58, 0x52, 0x46, 0x2b, 0x02, 0xd3, 0xb6, 0xac, 0xab, 0xa9, 0xa8, + 0xae, 0xaf, 0xa9, 0xa2, 0xa1, 0xa2, 0x9f, 0x9d, 0x9d, 0xa0, 0xa7, 0xb7, + 0xbc, 0xb5, 0xa0, 0x9b, 0x9a, 0x97, 0x98, 0xa4, 0xb1, 0xb3, 0xa7, 0x9c, + 0x9a, 0x9d, 0xa1, 0x9c, 0x9c, 0x9d, 0xa1, 0xa7, 0xac, 0xad, 0xaf, 0xb4, + 0xb0, 0xa1, 0x9e, 0x9e, 0x9d, 0xa6, 0xae, 0xa8, 0xa6, 0xa6, 0xbc, 0xc4, + 0xb9, 0xae, 0xa7, 0xa7, 0xa7, 0xa9, 0xb0, 0xad, 0xa3, 0xa1, 0xa2, 0xa3, + 0xa1, 0xa3, 0xa7, 0xae, 0xac, 0xb3, 0xb2, 0xa8, 0xa6, 0xa7, 0xa9, 0xab, + 0xae, 0xb2, 0xbb, 0xbc, 0xbb, 0xb7, 0xb6, 0xba, 0xbf, 0xce, 0xde, 0xca, + 0xb9, 0xb6, 0xbb, 0xbc, 0xb9, 0xba, 0xbc, 0xbb, 0xc4, 0xca, 0xcf, 0xd2, + 0xcd, 0xc2, 0xb7, 0xbc, 0xb9, 0xbc, 0xbf, 0xc1, 0xcc, 0xc7, 0xcc, 0xd6, + 0xdc, 0xd7, 0xd5, 0xd7, 0xcf, 0xcc, 0xca, 0xc2, 0xc3, 0xcb, 0xcb, 0xce, + 0xcf, 0xcf, 0xc5, 0xc4, 0xce, 0xce, 0xce, 0xcf, 0xd0, 0xd2, 0xd5, 0xd7, + 0xda, 0xd8, 0xd2, 0xcf, 0xae, 0xaf, 0xbc, 0xd0, 0xf9, 0x11, 0x18, 0x18, + 0x12, 0xfe, 0xda, 0xd6, 0xdf, 0xeb, 0xf6, 0x03, 0x0e, 0x1b, 0x25, 0x2f, + 0x38, 0x41, 0x47, 0x4e, 0x52, 0x55, 0x57, 0x57, 0x57, 0x57, 0x56, 0x55, + 0x53, 0x52, 0x51, 0x52, 0x52, 0x52, 0x52, 0x52, 0x51, 0x52, 0x52, 0x52, + 0x51, 0x52, 0x53, 0x56, 0x58, 0x5b, 0x5a, 0x58, 0x57, 0x55, 0x57, 0x57, + 0x57, 0x57, 0x58, 0x57, 0x55, 0x51, 0x4d, 0x4b, 0x4c, 0x4c, 0x4b, 0x4d, + 0x4f, 0x52, 0x54, 0x56, 0x57, 0x57, 0x58, 0x5a, 0x5b, 0x5a, 0x57, 0x51, + 0x42, 0x2c, 0x08, 0xdf, 0xc3, 0xb0, 0xa9, 0xa9, 0xa7, 0xaf, 0xab, 0xa3, + 0x9f, 0x9f, 0x9f, 0x9e, 0x9d, 0x9e, 0xa1, 0xb4, 0xb8, 0xa8, 0x9c, 0x9b, + 0x98, 0x96, 0x98, 0x9b, 0xa9, 0xab, 0xa6, 0x9d, 0x9a, 0xa0, 0x9d, 0x99, + 0x9d, 0x9f, 0xa1, 0xa2, 0xaa, 0xb1, 0xb3, 0xb0, 0xaa, 0xa3, 0x9a, 0x98, + 0x9b, 0xb3, 0xb7, 0xa2, 0x9d, 0xa8, 0xbc, 0xbf, 0xb3, 0xb1, 0xb2, 0xb3, + 0xaa, 0xac, 0xaf, 0xac, 0xa0, 0xa3, 0xa4, 0xa1, 0xa1, 0xa4, 0xa5, 0xaf, + 0xb3, 0xb6, 0xb5, 0xac, 0xa5, 0xa2, 0xa1, 0xa6, 0xa8, 0xa8, 0xb6, 0xbc, + 0xc0, 0xbe, 0xba, 0xb5, 0xba, 0xce, 0xca, 0xc6, 0xb9, 0xb9, 0xbb, 0xbc, + 0xb6, 0xb6, 0xc0, 0xc5, 0xca, 0xcb, 0xcc, 0xcd, 0xd3, 0xd2, 0xbd, 0xb4, + 0xbb, 0xbc, 0xba, 0xb8, 0xcb, 0xd4, 0xcf, 0xcc, 0xd1, 0xcd, 0xcb, 0xcf, + 0xd2, 0xcf, 0xc7, 0xbe, 0xb7, 0xc0, 0xc5, 0xcd, 0xcd, 0xcc, 0xcb, 0xcb, + 0xcc, 0xca, 0xc6, 0xc6, 0xc1, 0xc3, 0xc8, 0xcb, 0xc8, 0xc1, 0xbc, 0xb6, + 0xb1, 0xaa, 0xb6, 0xe8, 0x0a, 0x11, 0x1a, 0x0f, 0xf7, 0xdc, 0xca, 0xd2, + 0xd9, 0xe5, 0xf1, 0xfb, 0x08, 0x13, 0x1e, 0x24, 0x31, 0x38, 0x42, 0x46, + 0x4e, 0x4f, 0x51, 0x53, 0x56, 0x57, 0x56, 0x53, 0x52, 0x4e, 0x4f, 0x52, + 0x52, 0x52, 0x52, 0x50, 0x4e, 0x51, 0x52, 0x52, 0x52, 0x52, 0x52, 0x54, + 0x59, 0x58, 0x57, 0x57, 0x53, 0x52, 0x52, 0x55, 0x55, 0x56, 0x56, 0x57, + 0x56, 0x51, 0x4e, 0x4f, 0x4d, 0x4c, 0x4f, 0x50, 0x52, 0x54, 0x57, 0x59, + 0x59, 0x59, 0x5b, 0x5d, 0x5d, 0x5b, 0x59, 0x57, 0x51, 0x42, 0x2b, 0x0a, + 0xe3, 0xc4, 0xb7, 0xac, 0xa9, 0xaf, 0xad, 0xa6, 0xa0, 0xa0, 0xa1, 0xa3, + 0xa5, 0xa3, 0xa1, 0xb1, 0xb7, 0xa4, 0x9b, 0x9e, 0x99, 0x95, 0x9a, 0x9b, + 0x9c, 0xa4, 0xa6, 0x9c, 0x9a, 0x9b, 0x9a, 0x98, 0x9c, 0x9e, 0xa1, 0xa9, + 0xb2, 0xb5, 0xaf, 0xaf, 0xa6, 0x9e, 0x9b, 0x9a, 0xa8, 0xb7, 0xb8, 0xb0, + 0xa8, 0xac, 0xae, 0xac, 0xae, 0xb1, 0xb3, 0xb6, 0xb0, 0xb0, 0xb2, 0xad, + 0xa6, 0xa1, 0xa3, 0xa3, 0xa1, 0xa0, 0xa1, 0xa7, 0xaf, 0xb4, 0xb3, 0xb2, + 0xab, 0xa6, 0xa4, 0xa7, 0xa7, 0xa5, 0xa6, 0xac, 0xb4, 0xb9, 0xbc, 0xb7, + 0xb3, 0xb8, 0xc9, 0xc3, 0xb5, 0xb7, 0xb9, 0xbb, 0xb1, 0xb1, 0xbb, 0xc2, + 0xc5, 0xc0, 0xc4, 0xc6, 0xc7, 0xcb, 0xc9, 0xc1, 0xbe, 0xc7, 0xc7, 0xc7, + 0xcc, 0xca, 0xc9, 0xc7, 0xc6, 0xc5, 0xcb, 0xcd, 0xd1, 0xcd, 0xd2, 0xd3, + 0xcb, 0xc7, 0xc6, 0xc8, 0xc6, 0xc4, 0xc5, 0xc7, 0xc3, 0xbd, 0xb8, 0xbf, + 0xc3, 0xc4, 0xc7, 0xc2, 0xc6, 0xc2, 0xc4, 0xb9, 0xb5, 0xa8, 0xb7, 0x08, + 0x1a, 0x13, 0x07, 0xe2, 0xcc, 0xbf, 0xb8, 0xbf, 0xc9, 0xdb, 0xeb, 0xf7, + 0x08, 0x10, 0x19, 0x22, 0x2e, 0x37, 0x3e, 0x42, 0x48, 0x4d, 0x4d, 0x4f, + 0x52, 0x56, 0x57, 0x56, 0x53, 0x4e, 0x49, 0x4d, 0x4e, 0x4f, 0x50, 0x4f, + 0x4e, 0x4e, 0x4f, 0x51, 0x51, 0x52, 0x52, 0x55, 0x56, 0x56, 0x55, 0x56, + 0x52, 0x52, 0x51, 0x4e, 0x51, 0x53, 0x52, 0x55, 0x54, 0x50, 0x4c, 0x4e, + 0x4f, 0x4e, 0x51, 0x51, 0x52, 0x54, 0x57, 0x5a, 0x5a, 0x5a, 0x5b, 0x5d, + 0x5f, 0x5d, 0x5b, 0x59, 0x57, 0x4d, 0x3a, 0x20, 0x09, 0xe2, 0xcb, 0xbb, + 0xac, 0xa8, 0xaa, 0xaa, 0xa4, 0xa1, 0xa0, 0xa5, 0xab, 0xaa, 0xa3, 0xac, + 0xaf, 0x9f, 0x9f, 0xa2, 0x9c, 0x97, 0x9a, 0x9c, 0x9c, 0xa2, 0xa1, 0x99, + 0x97, 0x98, 0x9a, 0x99, 0x9c, 0x9d, 0xa7, 0xaf, 0xb1, 0xb0, 0xac, 0xa9, + 0xa1, 0x99, 0x9c, 0xa5, 0xb6, 0xb5, 0xb7, 0xb5, 0xb1, 0xb3, 0xb0, 0xa4, + 0xa7, 0xab, 0xaf, 0xb5, 0xb6, 0xb1, 0xb1, 0xb1, 0xa6, 0xa1, 0x9c, 0xa2, + 0xa0, 0x9c, 0xa0, 0xa0, 0xa6, 0xaf, 0xb4, 0xb4, 0xae, 0xa7, 0xa7, 0xa8, + 0xa7, 0xa7, 0xa6, 0xa2, 0x9d, 0xa3, 0xb1, 0xb7, 0xb1, 0xb6, 0xcc, 0xcc, + 0xc0, 0xb4, 0xb6, 0xbc, 0xb9, 0xbb, 0xbd, 0xbd, 0xbf, 0xbb, 0xba, 0xc2, + 0xcb, 0xce, 0xcc, 0xc9, 0xbc, 0xcc, 0xd1, 0xcc, 0xc2, 0xc7, 0xc7, 0xc4, + 0xcd, 0xc8, 0xc7, 0xcd, 0xcd, 0xcc, 0xcf, 0xc9, 0xcd, 0xd2, 0xce, 0xd0, + 0xd2, 0xce, 0xc7, 0xca, 0xc7, 0xbf, 0xbf, 0xc5, 0xc6, 0xc7, 0xce, 0xcc, + 0xca, 0xc6, 0xc4, 0xc1, 0xb4, 0xb1, 0xc4, 0x12, 0x1a, 0x08, 0xf2, 0xe2, + 0xd7, 0xcc, 0xbe, 0xba, 0xb9, 0xc6, 0xd3, 0xe5, 0xfc, 0x08, 0x12, 0x18, + 0x25, 0x2f, 0x34, 0x3b, 0x42, 0x47, 0x4a, 0x4e, 0x4d, 0x4e, 0x53, 0x53, + 0x52, 0x50, 0x4a, 0x46, 0x4c, 0x4d, 0x4d, 0x4c, 0x4f, 0x4d, 0x4c, 0x4c, + 0x4e, 0x4e, 0x51, 0x53, 0x53, 0x55, 0x55, 0x55, 0x53, 0x52, 0x51, 0x4f, + 0x4d, 0x50, 0x52, 0x52, 0x52, 0x4f, 0x4a, 0x4c, 0x50, 0x50, 0x50, 0x52, + 0x54, 0x57, 0x57, 0x5a, 0x5b, 0x5a, 0x5d, 0x5c, 0x5d, 0x5d, 0x5c, 0x5b, + 0x58, 0x54, 0x4b, 0x30, 0x0a, 0xe9, 0xcd, 0xbb, 0xb1, 0xac, 0xaa, 0xa7, + 0xa2, 0xa1, 0xa1, 0xa7, 0xac, 0xac, 0xa1, 0xa5, 0xa9, 0xa5, 0xa7, 0xa9, + 0xa2, 0x99, 0x9a, 0x9c, 0x97, 0x9d, 0xa4, 0x9b, 0x94, 0x97, 0x97, 0x9a, + 0x9c, 0x9c, 0xa5, 0xb0, 0xb1, 0xad, 0xa9, 0xa1, 0x9c, 0x9a, 0xa6, 0xa4, + 0xae, 0xb7, 0xb7, 0xb5, 0xb1, 0xb1, 0xb0, 0xa7, 0xa7, 0xab, 0xb1, 0xb6, + 0xb7, 0xb3, 0xad, 0xb0, 0xaa, 0xa6, 0xa0, 0xa3, 0x9f, 0x9c, 0x9d, 0x9d, + 0xa3, 0xad, 0xb3, 0xb1, 0xac, 0xa8, 0xa3, 0xa3, 0xa6, 0xa5, 0xa5, 0xa4, + 0x9b, 0x97, 0x9c, 0xa3, 0xa6, 0xb0, 0xbb, 0xc2, 0xc9, 0xbd, 0xbc, 0xc0, + 0xbe, 0xbf, 0xbc, 0xbb, 0xbe, 0xc2, 0xc5, 0xc7, 0xc3, 0xc2, 0xc5, 0xc4, + 0xbc, 0xc0, 0xc7, 0xc8, 0xcc, 0xd3, 0xd0, 0xc5, 0xc6, 0xce, 0xcc, 0xce, + 0xc5, 0xc6, 0xc5, 0xc2, 0xc8, 0xca, 0xc4, 0xc7, 0xd1, 0xcc, 0xc9, 0xcc, + 0xca, 0xc4, 0xc0, 0xbc, 0xc8, 0xce, 0xe3, 0xe2, 0xce, 0xc9, 0xc2, 0xbf, + 0xab, 0xb5, 0xc1, 0x04, 0x11, 0x0a, 0xfc, 0xf0, 0xe2, 0xd5, 0xcc, 0xc3, + 0xc1, 0xc0, 0xbf, 0xcb, 0xe2, 0xf2, 0x00, 0x12, 0x1f, 0x27, 0x2c, 0x34, + 0x3c, 0x42, 0x42, 0x47, 0x47, 0x45, 0x48, 0x4d, 0x4d, 0x4d, 0x4b, 0x46, + 0x42, 0x47, 0x49, 0x47, 0x4b, 0x4d, 0x4d, 0x4b, 0x4d, 0x4d, 0x4f, 0x52, + 0x52, 0x53, 0x55, 0x52, 0x52, 0x52, 0x51, 0x4f, 0x4d, 0x4d, 0x51, 0x55, + 0x51, 0x50, 0x4d, 0x4c, 0x4e, 0x51, 0x50, 0x54, 0x56, 0x58, 0x57, 0x59, + 0x5b, 0x5b, 0x5d, 0x5e, 0x5d, 0x5c, 0x5c, 0x5c, 0x5a, 0x57, 0x52, 0x48, + 0x33, 0x0b, 0xdf, 0xc2, 0xb1, 0xab, 0xab, 0xa3, 0xa1, 0xa5, 0xa7, 0xab, + 0xaf, 0xa9, 0x9f, 0xaa, 0xb3, 0xad, 0xab, 0xac, 0xa6, 0x9c, 0x9d, 0x9c, + 0x98, 0x9c, 0xa2, 0x9d, 0x97, 0x9a, 0x9b, 0x9c, 0x9d, 0xa2, 0xa2, 0xab, + 0xb5, 0xae, 0xa4, 0x9c, 0x99, 0x98, 0xa2, 0xa9, 0xb1, 0xb7, 0xb7, 0xb2, + 0xb0, 0xb2, 0xaa, 0xa6, 0xac, 0xa9, 0xaa, 0xb1, 0xb9, 0xb7, 0xb1, 0xaa, + 0xa7, 0xa7, 0xa3, 0x9c, 0x9b, 0x99, 0x9f, 0x9d, 0x9e, 0xa9, 0xb0, 0xaf, + 0xa8, 0xa3, 0xa1, 0xa2, 0xa7, 0xa5, 0xa0, 0xa3, 0x9b, 0x98, 0x9f, 0xa7, + 0xa2, 0xa9, 0xae, 0xb8, 0xc7, 0xc7, 0xc1, 0xc5, 0xcf, 0xd2, 0xcf, 0xcb, + 0xcc, 0xcc, 0xc6, 0xc6, 0xbd, 0xbc, 0xc2, 0xca, 0xc5, 0xc2, 0xc7, 0xcf, + 0xcc, 0xc7, 0xc7, 0xc7, 0xc8, 0xd0, 0xcb, 0xce, 0xcf, 0xce, 0xc6, 0xc2, + 0xc1, 0xc2, 0xbf, 0xc2, 0xcd, 0xcb, 0xc9, 0xc7, 0xc9, 0xcf, 0xc7, 0xc3, + 0xc7, 0xcd, 0xdb, 0xd8, 0xd0, 0xca, 0xcc, 0xd2, 0xa4, 0xad, 0xd9, 0x0c, + 0x17, 0x11, 0x05, 0xfe, 0xef, 0xe6, 0xe2, 0xde, 0xdc, 0xd3, 0xcb, 0xca, + 0xcf, 0xd6, 0xea, 0xfc, 0x0c, 0x1b, 0x23, 0x30, 0x34, 0x39, 0x39, 0x39, + 0x3e, 0x3d, 0x39, 0x38, 0x3d, 0x42, 0x46, 0x43, 0x40, 0x3f, 0x44, 0x45, + 0x45, 0x48, 0x48, 0x49, 0x49, 0x4a, 0x4d, 0x4e, 0x4f, 0x53, 0x55, 0x51, + 0x4e, 0x51, 0x52, 0x51, 0x4d, 0x4c, 0x4f, 0x52, 0x53, 0x4e, 0x4f, 0x52, + 0x51, 0x52, 0x51, 0x51, 0x57, 0x58, 0x59, 0x5a, 0x5b, 0x5c, 0x5e, 0x60, + 0x5d, 0x5d, 0x5d, 0x5d, 0x5d, 0x5a, 0x54, 0x4e, 0x44, 0x2d, 0x0f, 0xe8, + 0xca, 0xb7, 0xac, 0xa1, 0xa2, 0xa7, 0xad, 0xad, 0xb1, 0xa9, 0xa1, 0xaf, + 0xb7, 0xaf, 0xa6, 0xa1, 0xa7, 0xa0, 0x9f, 0x9c, 0x9c, 0x9b, 0xa7, 0xa5, + 0x9d, 0xa7, 0xa6, 0x9e, 0xa3, 0xa7, 0xa3, 0xa0, 0xad, 0xac, 0x9f, 0x99, + 0x97, 0x96, 0xa2, 0xac, 0xb0, 0xb6, 0xb7, 0xb2, 0xb1, 0xb2, 0xab, 0xab, + 0xb5, 0xb1, 0xac, 0xad, 0xb4, 0xbb, 0xbb, 0xac, 0xa7, 0xa6, 0xa3, 0x9d, + 0xa1, 0xa6, 0xa3, 0xa0, 0x9c, 0xa1, 0xa2, 0xa0, 0x9f, 0xa4, 0xa8, 0xab, + 0xaa, 0xa7, 0xa6, 0xa7, 0xa4, 0x9a, 0xa2, 0xb5, 0xb0, 0xa8, 0xae, 0xb1, + 0xc4, 0xcd, 0xc3, 0xc9, 0xd7, 0xd7, 0xd2, 0xc9, 0xce, 0xd2, 0xc0, 0xbc, + 0xbc, 0xb8, 0xb7, 0xc4, 0xcd, 0xd0, 0xc2, 0xc7, 0xc7, 0xc2, 0xc0, 0xc1, + 0xca, 0xd7, 0xd9, 0xd6, 0xd4, 0xd7, 0xd2, 0xce, 0xcc, 0xc7, 0xb7, 0xc8, + 0xd2, 0xd3, 0xd0, 0xc2, 0xc9, 0xcc, 0xcc, 0xcf, 0xc5, 0xc7, 0xc9, 0xc7, + 0xc8, 0xc3, 0xc2, 0xcc, 0xa4, 0xb1, 0xf4, 0x1f, 0x1e, 0x18, 0x11, 0x03, + 0x00, 0xf7, 0xf2, 0xff, 0x0b, 0x01, 0xe5, 0xdc, 0xda, 0xd7, 0xd9, 0xd9, + 0xe7, 0xfa, 0x01, 0x0f, 0x1b, 0x1b, 0x26, 0x26, 0x25, 0x2d, 0x2a, 0x28, + 0x2a, 0x31, 0x39, 0x3d, 0x3f, 0x3f, 0x41, 0x42, 0x44, 0x44, 0x42, 0x44, + 0x47, 0x4c, 0x4c, 0x4c, 0x50, 0x52, 0x52, 0x51, 0x52, 0x52, 0x52, 0x52, + 0x50, 0x4d, 0x4e, 0x52, 0x55, 0x52, 0x50, 0x52, 0x54, 0x55, 0x52, 0x55, + 0x58, 0x58, 0x58, 0x5a, 0x5b, 0x5d, 0x5f, 0x61, 0x5f, 0x5d, 0x5d, 0x5e, + 0x5d, 0x5d, 0x5a, 0x54, 0x48, 0x37, 0x20, 0x05, 0xe5, 0xc6, 0xb0, 0xa9, + 0xa9, 0xa7, 0xb0, 0xb1, 0xb1, 0xac, 0xa3, 0xa6, 0xa5, 0x9e, 0x9c, 0x9e, + 0xa1, 0x9c, 0x9c, 0x9b, 0x9f, 0x9f, 0xaa, 0xac, 0xa7, 0xb1, 0xb6, 0xaa, + 0xa7, 0xa6, 0xa1, 0xa2, 0xa7, 0xa4, 0x9b, 0x9b, 0x9a, 0x9a, 0xaa, 0xa9, + 0xac, 0xb1, 0xb3, 0xb5, 0xb5, 0xb1, 0xad, 0xad, 0xb0, 0xb3, 0xac, 0xaa, + 0xac, 0xb4, 0xbb, 0xb7, 0xad, 0xa5, 0xa7, 0xa2, 0xb1, 0xb7, 0xa3, 0xa1, + 0x9e, 0xa1, 0x9f, 0xa0, 0xa1, 0xae, 0xae, 0xaf, 0xab, 0xa8, 0xab, 0xac, + 0xa7, 0xa1, 0xa9, 0xb8, 0xbe, 0xbc, 0xbc, 0xb7, 0xc2, 0xd2, 0xcb, 0xd3, + 0xd0, 0xcb, 0xc6, 0xcc, 0xd7, 0xd3, 0xc2, 0xbc, 0xb7, 0xb8, 0xbd, 0xc2, + 0xcc, 0xcc, 0xbc, 0xb8, 0xc5, 0xc5, 0xc1, 0xbc, 0xc2, 0xcc, 0xce, 0xd4, + 0xd6, 0xd4, 0xd6, 0xd0, 0xcf, 0xc5, 0xc0, 0xd3, 0xda, 0xe1, 0xe3, 0xdb, + 0xdc, 0xd7, 0xd4, 0xd2, 0xcb, 0xc9, 0xc2, 0xc6, 0xc8, 0xca, 0xc3, 0xc4, + 0xb2, 0xd4, 0x06, 0x23, 0x27, 0x21, 0x18, 0x12, 0x0f, 0x07, 0x0c, 0x17, + 0x17, 0x07, 0xfd, 0xf3, 0xee, 0xe9, 0xe6, 0xe3, 0xdc, 0xdc, 0xd7, 0xdc, + 0xf2, 0xf7, 0xfa, 0xfa, 0xf1, 0xfa, 0xff, 0x06, 0x0c, 0x19, 0x27, 0x32, + 0x37, 0x39, 0x3b, 0x3d, 0x41, 0x42, 0x43, 0x46, 0x45, 0x47, 0x4c, 0x50, + 0x55, 0x56, 0x55, 0x52, 0x52, 0x55, 0x54, 0x54, 0x54, 0x52, 0x4e, 0x52, + 0x54, 0x55, 0x52, 0x52, 0x52, 0x57, 0x56, 0x56, 0x58, 0x5a, 0x58, 0x5b, + 0x5b, 0x5b, 0x5d, 0x60, 0x5f, 0x5e, 0x5f, 0x5f, 0x5d, 0x5d, 0x5b, 0x5a, + 0x54, 0x40, 0x26, 0x11, 0xf0, 0xd9, 0xc3, 0xb0, 0xa1, 0x9c, 0xa5, 0xaf, + 0xaf, 0xb0, 0xa5, 0xa1, 0x9e, 0x99, 0x97, 0x9c, 0x9e, 0x99, 0x99, 0x9c, + 0xa5, 0xa8, 0xae, 0xad, 0xaa, 0xaf, 0xb4, 0xae, 0xaa, 0xac, 0xa7, 0xa7, + 0xac, 0x9b, 0x98, 0x9c, 0x9c, 0x9f, 0xac, 0xa2, 0xaa, 0xb7, 0xb7, 0xb7, + 0xb7, 0xae, 0xad, 0xac, 0xab, 0xb4, 0xb4, 0xae, 0xaa, 0xac, 0xaf, 0xb7, + 0xb5, 0xae, 0xac, 0xb1, 0xbc, 0xb7, 0xa2, 0x9f, 0x9e, 0xa2, 0xa1, 0xa3, + 0xa3, 0xb5, 0xb3, 0xae, 0xab, 0xac, 0xae, 0xae, 0xa9, 0xa8, 0xab, 0xb2, + 0xc4, 0xd1, 0xc8, 0xbd, 0xc5, 0xd8, 0xd6, 0xd4, 0xd1, 0xcc, 0xc7, 0xc5, + 0xc9, 0xd0, 0xc2, 0xb7, 0xb8, 0xc2, 0xb9, 0xc5, 0xcd, 0xc9, 0xbc, 0xbf, + 0xcd, 0xc7, 0xc8, 0xc7, 0xc7, 0xc7, 0xc7, 0xcb, 0xce, 0xce, 0xcc, 0xca, + 0xc8, 0xc1, 0xc9, 0xd2, 0xd3, 0xdc, 0xda, 0xd6, 0xd1, 0xc7, 0xc2, 0xc5, + 0xc7, 0xca, 0xc9, 0xca, 0xd0, 0xd4, 0xcd, 0xcb, 0xdd, 0xfc, 0xe8, 0x12, + 0x26, 0x22, 0x1d, 0x1c, 0x1a, 0x19, 0x1e, 0x19, 0x16, 0xe7, 0xf2, 0xf5, + 0xf3, 0xea, 0xe2, 0xee, 0xda, 0xd4, 0xe9, 0xfb, 0xf1, 0xf9, 0xf4, 0xe3, + 0xd7, 0xca, 0xd4, 0xe1, 0xe1, 0xf2, 0x10, 0x27, 0x2f, 0x32, 0x33, 0x36, + 0x37, 0x39, 0x40, 0x42, 0x46, 0x47, 0x48, 0x4d, 0x52, 0x52, 0x53, 0x52, + 0x52, 0x54, 0x57, 0x57, 0x55, 0x53, 0x52, 0x52, 0x54, 0x56, 0x54, 0x52, + 0x54, 0x57, 0x58, 0x58, 0x58, 0x59, 0x5a, 0x5c, 0x5a, 0x5c, 0x5e, 0x60, + 0x60, 0x60, 0x5f, 0x5f, 0x5d, 0x5c, 0x5b, 0x5b, 0x58, 0x52, 0x3d, 0x1e, + 0xf6, 0xdc, 0xc4, 0xbb, 0xa7, 0x9c, 0x9b, 0xa6, 0xb1, 0xb2, 0xa8, 0xa0, + 0x9e, 0x99, 0x96, 0x99, 0xa1, 0x9e, 0x9f, 0x9e, 0xa6, 0xa8, 0xa4, 0xa0, + 0xa1, 0xab, 0xac, 0xab, 0xab, 0xab, 0xa9, 0xac, 0xa4, 0x9c, 0x9a, 0x9c, + 0xa0, 0xa5, 0xab, 0x9c, 0x9f, 0xaa, 0xb5, 0xb8, 0xb6, 0xb1, 0xb0, 0xac, + 0xae, 0xb3, 0xb7, 0xb4, 0xb1, 0xb1, 0xb1, 0xb4, 0xb4, 0xb0, 0xb3, 0xb9, + 0xc2, 0xb5, 0xa4, 0xa4, 0xa3, 0xa1, 0xa4, 0xa4, 0xa3, 0xac, 0xb3, 0xb2, + 0xaf, 0xb0, 0xb4, 0xb1, 0xae, 0xb6, 0xb3, 0xb0, 0xb7, 0xbc, 0xc2, 0xc2, + 0xbc, 0xcc, 0xcd, 0xcd, 0xc7, 0xc1, 0xc2, 0xbb, 0xb2, 0xba, 0xb9, 0xb0, + 0xbc, 0xc2, 0xb7, 0xba, 0xbe, 0xc4, 0xc1, 0xbd, 0xc2, 0xc3, 0xc0, 0xc0, + 0xc5, 0xc8, 0xc9, 0xca, 0xce, 0xcd, 0xcc, 0xc8, 0xc2, 0xc6, 0xce, 0xd7, + 0xd2, 0xd6, 0xcd, 0xcb, 0xcd, 0xcd, 0xc7, 0xc8, 0xc8, 0xca, 0xd7, 0xd8, + 0xdc, 0xd6, 0xd1, 0xd0, 0x08, 0x0d, 0xca, 0xdc, 0x02, 0x16, 0x1f, 0x23, + 0x25, 0x20, 0x12, 0x12, 0xfc, 0xd0, 0xd5, 0xd9, 0xdf, 0xe0, 0xf1, 0x0c, + 0xfe, 0xf8, 0x07, 0x0a, 0x04, 0xfe, 0xf8, 0xdd, 0xd0, 0xcd, 0xe8, 0xf0, + 0xeb, 0xd2, 0xef, 0x1d, 0x2a, 0x30, 0x32, 0x32, 0x32, 0x32, 0x37, 0x3e, + 0x42, 0x43, 0x44, 0x47, 0x4d, 0x52, 0x52, 0x52, 0x53, 0x55, 0x54, 0x55, + 0x54, 0x53, 0x56, 0x54, 0x54, 0x55, 0x53, 0x53, 0x56, 0x58, 0x58, 0x59, + 0x59, 0x5a, 0x5b, 0x5b, 0x5b, 0x5d, 0x5f, 0x60, 0x60, 0x60, 0x60, 0x5f, + 0x5e, 0x5d, 0x5d, 0x5c, 0x5a, 0x57, 0x51, 0x3a, 0x16, 0xe8, 0xc4, 0xb3, + 0xb0, 0xa1, 0x9a, 0x9e, 0xac, 0xb2, 0xaf, 0xa8, 0xa4, 0x9b, 0x98, 0x9b, + 0xa3, 0xa5, 0xa3, 0xa9, 0x9e, 0x9e, 0x9d, 0x9e, 0xa2, 0xb3, 0xb0, 0xad, + 0xab, 0xa9, 0xac, 0xb2, 0xa3, 0x9c, 0x9c, 0xa1, 0xa1, 0xa7, 0xa2, 0x9c, + 0x9e, 0x9e, 0xa9, 0xb1, 0xb3, 0xb1, 0xad, 0xa8, 0xad, 0xb4, 0xb7, 0xb1, + 0xac, 0xb1, 0xb0, 0xb1, 0xb1, 0xad, 0xaf, 0xb1, 0xb8, 0xb9, 0xb0, 0xa7, + 0xa4, 0xa1, 0xa2, 0xa2, 0xa1, 0xa1, 0xab, 0xb3, 0xb4, 0xb4, 0xb3, 0xb2, + 0xb4, 0xba, 0xb4, 0xaf, 0xaa, 0xa5, 0xad, 0xaf, 0xb5, 0xc7, 0xb8, 0xc7, + 0xc5, 0xba, 0xb7, 0xb4, 0xb4, 0xb9, 0xb2, 0xad, 0xb4, 0xbf, 0xbc, 0xb5, + 0xb5, 0xb7, 0xc3, 0xd2, 0xc7, 0xbe, 0xc1, 0xc3, 0xdb, 0xe2, 0xd7, 0xd4, + 0xd2, 0xd2, 0xd7, 0xd5, 0xd1, 0xd3, 0xd1, 0xd2, 0xd3, 0xd5, 0xc7, 0xc2, + 0xc7, 0xc5, 0xbf, 0xbf, 0xc1, 0xc1, 0xcc, 0xdc, 0xd2, 0xc7, 0xd6, 0xd8, + 0x1d, 0x1d, 0xd5, 0xb0, 0xba, 0xcc, 0xe8, 0xfc, 0x07, 0x02, 0x07, 0x15, + 0xf6, 0xe6, 0xdf, 0xd2, 0xd7, 0xe5, 0xfb, 0x09, 0x0d, 0x0b, 0x10, 0x0f, + 0x06, 0xfb, 0xee, 0xd5, 0xd9, 0xf2, 0x09, 0x04, 0xf1, 0xd5, 0xe4, 0x1a, + 0x2a, 0x30, 0x33, 0x34, 0x33, 0x37, 0x39, 0x3d, 0x41, 0x44, 0x47, 0x4a, + 0x4c, 0x52, 0x55, 0x53, 0x53, 0x57, 0x57, 0x57, 0x53, 0x51, 0x52, 0x53, + 0x54, 0x55, 0x55, 0x53, 0x55, 0x57, 0x58, 0x5b, 0x5a, 0x5d, 0x5c, 0x5b, + 0x5c, 0x5e, 0x5f, 0x5f, 0x5d, 0x5f, 0x60, 0x5f, 0x5f, 0x5e, 0x5e, 0x5d, + 0x5a, 0x58, 0x54, 0x4a, 0x32, 0xfc, 0xcc, 0xb3, 0xb2, 0xaa, 0x9c, 0x9d, + 0xa3, 0xab, 0xa7, 0xa5, 0xa3, 0x9a, 0x9d, 0x9d, 0x9f, 0xa1, 0xaa, 0xb2, + 0xad, 0xa7, 0xa7, 0xa6, 0xa1, 0xa7, 0xb7, 0xb2, 0xaf, 0xac, 0xa7, 0xa5, + 0xa0, 0x9f, 0x9c, 0x9e, 0xa5, 0xac, 0xa0, 0x9a, 0x99, 0x9b, 0xa4, 0xae, + 0xb6, 0xb1, 0xac, 0xac, 0xb1, 0xb2, 0xb2, 0xb3, 0xac, 0xae, 0xac, 0xab, + 0xac, 0xaa, 0xaf, 0xb3, 0xb7, 0xbf, 0xc9, 0xb6, 0xa8, 0xa2, 0xa1, 0xa1, + 0xa2, 0xa0, 0x9c, 0xa5, 0xb3, 0xb8, 0xb6, 0xbb, 0xb7, 0xb5, 0xb4, 0xba, + 0xb4, 0xa9, 0xa9, 0xae, 0xc0, 0xc4, 0xc0, 0xc4, 0xbc, 0xc2, 0xbf, 0xab, + 0xac, 0xaf, 0xaf, 0xbf, 0xb9, 0xbc, 0xc0, 0xb1, 0xba, 0xb8, 0xb8, 0xc9, + 0xd5, 0xc4, 0xbd, 0xcc, 0xdd, 0xdc, 0xd9, 0xda, 0xd2, 0xd7, 0xe7, 0xe2, + 0xe1, 0xda, 0xd6, 0xd3, 0xd7, 0xdc, 0xd3, 0xc4, 0xc2, 0xc1, 0xc7, 0xc7, + 0xc7, 0xc4, 0xd2, 0xd7, 0xd3, 0xc6, 0xd0, 0xd6, 0x27, 0x25, 0x01, 0xb7, + 0xab, 0xaf, 0xb9, 0xc6, 0xe0, 0xed, 0xf9, 0x02, 0xf2, 0xe5, 0xd7, 0xc9, + 0xd4, 0xdc, 0xe5, 0xf2, 0xfb, 0x01, 0x06, 0x06, 0xfe, 0xfc, 0xf9, 0xf7, + 0x05, 0x12, 0x0c, 0xf4, 0xd6, 0xd7, 0xf7, 0x1e, 0x2b, 0x32, 0x30, 0x32, + 0x32, 0x36, 0x39, 0x3c, 0x3d, 0x43, 0x48, 0x4d, 0x4c, 0x51, 0x56, 0x56, + 0x55, 0x57, 0x58, 0x58, 0x56, 0x52, 0x51, 0x52, 0x53, 0x55, 0x57, 0x54, + 0x55, 0x57, 0x57, 0x5a, 0x5b, 0x5d, 0x5d, 0x5c, 0x5e, 0x5d, 0x5d, 0x5f, + 0x5f, 0x5e, 0x5f, 0x60, 0x60, 0x5e, 0x5f, 0x5d, 0x5a, 0x57, 0x53, 0x4d, + 0x47, 0x2c, 0xef, 0xc3, 0xba, 0xac, 0xa1, 0x9c, 0x9d, 0xa4, 0xa1, 0xa1, + 0x9f, 0x9a, 0xa1, 0xa3, 0xa1, 0xa7, 0xb6, 0xb9, 0xb7, 0xb4, 0xa7, 0xa2, + 0xa2, 0xa2, 0xa9, 0xa5, 0x9f, 0x9c, 0x98, 0x99, 0x9b, 0x9e, 0x9f, 0xa1, + 0xa8, 0xab, 0x9c, 0x99, 0x96, 0x95, 0x9c, 0xa9, 0xb1, 0xaf, 0xac, 0xae, + 0xb2, 0xad, 0xb0, 0xb2, 0xad, 0xab, 0xaa, 0xa8, 0xa9, 0xa7, 0xac, 0xaf, + 0xaf, 0xb1, 0xc2, 0xc4, 0xaf, 0xa4, 0xa6, 0xa2, 0xa1, 0xa0, 0x9a, 0x9a, + 0xa1, 0xb1, 0xb9, 0xae, 0xad, 0xb0, 0xb5, 0xc0, 0xbc, 0xaa, 0xaa, 0xbc, + 0xc2, 0xb7, 0xc2, 0xb9, 0xac, 0xb1, 0xb6, 0xa7, 0xab, 0xb0, 0xb0, 0xc2, + 0xc2, 0xbd, 0xb9, 0xb3, 0xaf, 0xb1, 0xbb, 0xbe, 0xc0, 0xc0, 0xbb, 0xc4, + 0xbc, 0xbe, 0xc2, 0xc5, 0xc5, 0xca, 0xd6, 0xd7, 0xe0, 0xd7, 0xd4, 0xd7, + 0xd1, 0xd5, 0xd6, 0xd8, 0xd1, 0xc7, 0xd8, 0xd9, 0xd2, 0xcd, 0xcc, 0xcf, + 0xd8, 0xc7, 0xc4, 0xc6, 0x2d, 0x2e, 0x24, 0xf3, 0xc2, 0xb1, 0xb0, 0xb8, + 0xc2, 0xc4, 0xcb, 0xd3, 0xd4, 0xcf, 0xcc, 0xc8, 0xcf, 0xd0, 0xce, 0xd3, + 0xdc, 0xec, 0xf5, 0x01, 0x04, 0x05, 0x05, 0x03, 0x01, 0xf2, 0xe5, 0xd9, + 0xe7, 0xfd, 0x11, 0x1e, 0x29, 0x31, 0x30, 0x32, 0x34, 0x37, 0x3a, 0x3c, + 0x3c, 0x40, 0x46, 0x4d, 0x50, 0x52, 0x55, 0x56, 0x57, 0x57, 0x57, 0x57, + 0x57, 0x54, 0x52, 0x52, 0x52, 0x55, 0x57, 0x56, 0x55, 0x57, 0x57, 0x58, + 0x5a, 0x5e, 0x5d, 0x5c, 0x5a, 0x5c, 0x5d, 0x60, 0x60, 0x61, 0x60, 0x60, + 0x5f, 0x5f, 0x5e, 0x5d, 0x5c, 0x59, 0x54, 0x4d, 0x47, 0x3f, 0x20, 0xe4, + 0xbb, 0xa9, 0xa0, 0x9f, 0x9f, 0x9e, 0xa1, 0xa1, 0xa1, 0xa3, 0xa6, 0xa8, + 0xa9, 0xb4, 0xb9, 0xb7, 0xb5, 0xb7, 0xb0, 0xa5, 0xa1, 0xa3, 0xac, 0xa7, + 0xa1, 0x9d, 0x9a, 0x9b, 0x99, 0x9a, 0xa3, 0xa8, 0xae, 0xa9, 0x9c, 0x98, + 0x96, 0x99, 0x9e, 0xa8, 0xb1, 0xb3, 0xae, 0xb0, 0xae, 0xac, 0xb3, 0xb3, + 0xb1, 0xab, 0xa9, 0xab, 0xad, 0xa9, 0xaa, 0xa8, 0xac, 0xb1, 0xb5, 0xbc, + 0xb1, 0xa8, 0xa6, 0xa6, 0xa1, 0x9e, 0x9b, 0x98, 0x97, 0x9c, 0xa8, 0x9c, + 0xa7, 0xa0, 0xab, 0xc0, 0xb8, 0xac, 0xad, 0xb7, 0xb1, 0xb1, 0xb3, 0xa9, + 0xa7, 0xa8, 0xb5, 0xb1, 0xae, 0xb0, 0xae, 0xb5, 0xbb, 0xbc, 0xb0, 0xb0, + 0xaf, 0xb7, 0xba, 0xb9, 0xb7, 0xbb, 0xc6, 0xbe, 0xbc, 0xbc, 0xc1, 0xbd, + 0xbd, 0xc3, 0xc1, 0xc4, 0xd2, 0xd1, 0xd7, 0xd2, 0xd2, 0xd8, 0xd2, 0xd9, + 0xd4, 0xc2, 0xcf, 0xda, 0xce, 0xc6, 0xbf, 0xc2, 0xcd, 0xbc, 0xbc, 0xbd, + 0x31, 0x34, 0x34, 0x21, 0xfa, 0xda, 0xc9, 0xc2, 0xbe, 0xba, 0xbe, 0xc6, + 0xc6, 0xc5, 0xc2, 0xcb, 0xe0, 0xe9, 0xe3, 0xd6, 0xd2, 0xd2, 0xd1, 0xd6, + 0xd8, 0xd8, 0xd6, 0xdb, 0xd9, 0xdb, 0xf2, 0x0a, 0x1a, 0x1a, 0x1d, 0x22, + 0x29, 0x2f, 0x32, 0x34, 0x35, 0x37, 0x37, 0x3a, 0x3c, 0x3f, 0x43, 0x4b, + 0x4e, 0x51, 0x55, 0x57, 0x57, 0x58, 0x56, 0x56, 0x56, 0x54, 0x55, 0x52, + 0x52, 0x54, 0x57, 0x55, 0x54, 0x57, 0x57, 0x57, 0x59, 0x5d, 0x5e, 0x5d, + 0x5b, 0x5b, 0x5d, 0x60, 0x60, 0x60, 0x60, 0x5f, 0x5f, 0x5e, 0x5e, 0x5d, + 0x5c, 0x5a, 0x57, 0x52, 0x4a, 0x40, 0x2d, 0x12, 0xe2, 0xb7, 0xa3, 0xa1, + 0x9d, 0x9e, 0xa1, 0x9f, 0xa6, 0xa7, 0xa5, 0xa4, 0xb1, 0xb8, 0xb9, 0xb6, + 0xb4, 0xb2, 0xb1, 0xae, 0xa7, 0xa3, 0xa7, 0xaa, 0xa8, 0xa2, 0x9d, 0xa0, + 0x9f, 0x9c, 0xa1, 0xa7, 0xb1, 0xa7, 0x9c, 0x97, 0x96, 0x9b, 0xa1, 0xa8, + 0xb3, 0xb7, 0xaf, 0xb1, 0xab, 0xae, 0xb2, 0xb2, 0xb1, 0xad, 0xab, 0xab, + 0xae, 0xad, 0xa9, 0xa7, 0xad, 0xb1, 0xad, 0xb6, 0xb0, 0xa7, 0xaa, 0xaa, + 0xa3, 0x9e, 0x9a, 0x9b, 0x9c, 0xa1, 0xac, 0xa1, 0xb0, 0xa1, 0xa5, 0xbf, + 0xb6, 0xb1, 0xbd, 0xb7, 0xaa, 0xb1, 0xb2, 0xa9, 0xa7, 0xa9, 0xb5, 0xbc, + 0xae, 0xb0, 0xb3, 0xb4, 0xbd, 0xbd, 0xb3, 0xb1, 0xb1, 0xb8, 0xbd, 0xbd, + 0xbd, 0xba, 0xbe, 0xc4, 0xc9, 0xc5, 0xc6, 0xc9, 0xd4, 0xd3, 0xbe, 0xbb, + 0xcd, 0xcf, 0xcc, 0xc7, 0xd0, 0xd0, 0xc3, 0xcd, 0xd2, 0xc0, 0xc2, 0xcc, + 0xbf, 0xb8, 0xb6, 0xb3, 0xb4, 0xb6, 0xc0, 0xbc, 0x32, 0x37, 0x36, 0x30, + 0x1c, 0x01, 0xed, 0xe7, 0xe6, 0xde, 0xdc, 0xd8, 0xd4, 0xd1, 0xd5, 0xef, + 0x0c, 0x1f, 0x22, 0x22, 0x19, 0x11, 0x0a, 0x0b, 0x09, 0x0a, 0x0c, 0x12, + 0x19, 0x22, 0x27, 0x26, 0x29, 0x27, 0x24, 0x27, 0x29, 0x2a, 0x31, 0x34, + 0x32, 0x33, 0x34, 0x36, 0x3b, 0x3d, 0x44, 0x49, 0x4d, 0x52, 0x55, 0x55, + 0x53, 0x57, 0x57, 0x57, 0x55, 0x53, 0x53, 0x53, 0x53, 0x56, 0x57, 0x56, + 0x54, 0x56, 0x57, 0x57, 0x57, 0x5b, 0x5d, 0x5f, 0x5d, 0x5b, 0x5d, 0x61, + 0x60, 0x60, 0x61, 0x60, 0x60, 0x5f, 0x5e, 0x5e, 0x5c, 0x5a, 0x57, 0x53, + 0x4b, 0x44, 0x39, 0x1d, 0x05, 0xe3, 0xb7, 0xa4, 0xa1, 0xa3, 0xa6, 0xa8, + 0xac, 0xab, 0xae, 0xad, 0xb1, 0xb9, 0xb7, 0xb2, 0xb3, 0xb1, 0xaf, 0xad, + 0xac, 0xa9, 0xa6, 0xa8, 0xa8, 0xa3, 0xa1, 0xa2, 0xa7, 0xa3, 0xa2, 0xaa, + 0xb0, 0xa6, 0x9f, 0x97, 0x97, 0x9d, 0x9d, 0xa1, 0xb2, 0xb6, 0xb0, 0xb1, + 0xaa, 0xb0, 0xb3, 0xb6, 0xb6, 0xb1, 0xac, 0xa8, 0xae, 0xb3, 0xad, 0xad, + 0xb1, 0xb1, 0xad, 0xb1, 0xb5, 0xa6, 0xa7, 0xa7, 0xa7, 0xa3, 0xa6, 0xae, + 0xb0, 0xaa, 0xae, 0xad, 0xb2, 0xac, 0xb9, 0xd9, 0xc1, 0xb7, 0xc2, 0xb2, + 0xb1, 0xb7, 0xb0, 0xaa, 0xb0, 0xb1, 0xb4, 0xc4, 0xb8, 0xb0, 0xb1, 0xb6, + 0xc2, 0xc1, 0xbc, 0xb9, 0xb6, 0xb7, 0xbc, 0xbb, 0xb8, 0xba, 0xb8, 0xb7, + 0xbe, 0xc2, 0xc8, 0xc9, 0xca, 0xd5, 0xd9, 0xd2, 0xd5, 0xc9, 0xc4, 0xcb, + 0xc7, 0xc4, 0xc4, 0xce, 0xcf, 0xc2, 0xbc, 0xc1, 0xba, 0xbe, 0xbc, 0xb6, + 0xb5, 0xba, 0xc5, 0xba, 0x34, 0x37, 0x36, 0x36, 0x31, 0x23, 0x0e, 0xfd, + 0xfc, 0x00, 0x04, 0x07, 0x05, 0x06, 0x0d, 0x1d, 0x24, 0x24, 0x24, 0x23, + 0x23, 0x24, 0x25, 0x25, 0x27, 0x27, 0x28, 0x26, 0x27, 0x29, 0x2b, 0x2c, + 0x2d, 0x2c, 0x29, 0x2a, 0x2a, 0x2b, 0x2d, 0x2b, 0x24, 0x22, 0x2d, 0x32, + 0x3c, 0x40, 0x47, 0x4c, 0x50, 0x54, 0x52, 0x51, 0x51, 0x53, 0x56, 0x54, + 0x53, 0x55, 0x54, 0x55, 0x55, 0x57, 0x58, 0x56, 0x57, 0x53, 0x57, 0x57, + 0x58, 0x58, 0x5c, 0x5e, 0x5f, 0x5d, 0x5f, 0x60, 0x61, 0x61, 0x61, 0x61, + 0x60, 0x5f, 0x5e, 0x5f, 0x5d, 0x5c, 0x58, 0x53, 0x45, 0x3d, 0x3d, 0x27, + 0x04, 0xf9, 0xe2, 0xc2, 0xac, 0xa7, 0xa7, 0xa9, 0xac, 0xb0, 0xb7, 0xb6, + 0xaf, 0xb3, 0xb0, 0xad, 0xac, 0xab, 0xaa, 0xa6, 0xa8, 0xab, 0xa9, 0xa7, + 0xa4, 0xa4, 0xa7, 0xa8, 0xa7, 0xa2, 0xa0, 0xad, 0xaf, 0x9f, 0x99, 0x9a, + 0x9c, 0xa0, 0x9c, 0x9a, 0xa6, 0xb3, 0xb3, 0xb1, 0xaf, 0xb2, 0xb4, 0xb7, + 0xb6, 0xb1, 0xaa, 0xa9, 0xb0, 0xbb, 0xaa, 0xae, 0xb1, 0xb6, 0xb1, 0xac, + 0xb6, 0xae, 0xaa, 0xa8, 0xa9, 0xac, 0xb1, 0xbc, 0xc1, 0xc0, 0xb9, 0xb7, + 0xc1, 0xcc, 0xd9, 0xd0, 0xbf, 0xba, 0xbc, 0xb4, 0xb7, 0xbb, 0xb5, 0xad, + 0xac, 0xb1, 0xb4, 0xb7, 0xba, 0xb7, 0xab, 0xab, 0xbc, 0xb9, 0xb6, 0xb7, + 0xb3, 0xb5, 0xbe, 0xbb, 0xc2, 0xc1, 0xc2, 0xc7, 0xbe, 0xbc, 0xc7, 0xc7, + 0xc6, 0xc8, 0xcf, 0xcc, 0xcb, 0xc9, 0xc9, 0xc8, 0xbb, 0xbc, 0xbf, 0xc4, + 0xc9, 0xbe, 0xb9, 0xbf, 0xbd, 0xbc, 0xb6, 0xb5, 0xb7, 0xb8, 0xba, 0xb5, + 0x37, 0x36, 0x34, 0x33, 0x35, 0x32, 0x2c, 0x23, 0x1a, 0x0f, 0x0f, 0x11, + 0x15, 0x17, 0x1b, 0x20, 0x24, 0x22, 0x1d, 0x1d, 0x1a, 0x1d, 0x1f, 0x20, + 0x22, 0x27, 0x25, 0x24, 0x25, 0x25, 0x24, 0x28, 0x2b, 0x2a, 0x2d, 0x2b, + 0x2a, 0x28, 0x29, 0x28, 0x22, 0x22, 0x27, 0x32, 0x3a, 0x43, 0x4c, 0x4d, + 0x50, 0x50, 0x4d, 0x50, 0x4f, 0x51, 0x54, 0x52, 0x50, 0x53, 0x56, 0x56, + 0x56, 0x57, 0x57, 0x57, 0x57, 0x56, 0x57, 0x58, 0x59, 0x59, 0x5b, 0x5c, + 0x60, 0x5d, 0x5d, 0x5e, 0x60, 0x61, 0x62, 0x61, 0x61, 0x60, 0x5f, 0x5f, + 0x5d, 0x5c, 0x59, 0x55, 0x4f, 0x37, 0x32, 0x2d, 0x02, 0xf1, 0xec, 0xd6, + 0xc7, 0xac, 0xa7, 0xa9, 0xac, 0xaf, 0xae, 0xab, 0xaa, 0xab, 0xaa, 0xaf, + 0xb1, 0xa8, 0xa8, 0xa7, 0xa5, 0xa6, 0xa9, 0xa7, 0xa0, 0xa6, 0xa8, 0xa7, + 0xa2, 0xa3, 0xa1, 0xad, 0xaa, 0xa1, 0x9d, 0xa2, 0xa7, 0xa1, 0x9f, 0x9e, + 0x9e, 0xa9, 0xb5, 0xb2, 0xb6, 0xb7, 0xb3, 0xb5, 0xb1, 0xb1, 0xac, 0xa5, + 0xa3, 0xb6, 0xb1, 0xab, 0xaf, 0xb7, 0xb4, 0xb1, 0xb1, 0xb7, 0xb1, 0xac, + 0xb1, 0xbc, 0xc1, 0xbd, 0xbf, 0xc9, 0xcc, 0xc0, 0xbf, 0xc8, 0xc1, 0xb2, + 0xb5, 0xb5, 0xb8, 0xb6, 0xbb, 0xc2, 0xb9, 0xb2, 0xab, 0xac, 0xbb, 0xb4, + 0xb2, 0xb4, 0xb4, 0xb0, 0xb4, 0xb4, 0xad, 0xae, 0xb3, 0xb1, 0xbe, 0xc1, + 0xc9, 0xc2, 0xc0, 0xbc, 0xbf, 0xc2, 0xc5, 0xc6, 0xc7, 0xc4, 0xc8, 0xc6, + 0xc6, 0xc8, 0xc6, 0xbb, 0xba, 0xbb, 0xba, 0xbb, 0xbb, 0xba, 0xbc, 0xbc, + 0xba, 0xb5, 0xb2, 0xb7, 0xbe, 0xbb, 0xb7, 0xaf, 0x34, 0x36, 0x36, 0x33, + 0x33, 0x32, 0x32, 0x32, 0x2f, 0x2b, 0x26, 0x22, 0x20, 0x1f, 0x1d, 0x1b, + 0x18, 0x19, 0x19, 0x1a, 0x19, 0x18, 0x1a, 0x1b, 0x1c, 0x21, 0x20, 0x1e, + 0x21, 0x23, 0x23, 0x24, 0x27, 0x29, 0x2b, 0x2b, 0x2b, 0x2e, 0x30, 0x33, + 0x2f, 0x31, 0x32, 0x38, 0x3f, 0x46, 0x4b, 0x4d, 0x51, 0x4f, 0x4e, 0x4d, + 0x4e, 0x50, 0x52, 0x52, 0x4f, 0x51, 0x56, 0x57, 0x57, 0x56, 0x57, 0x57, + 0x57, 0x59, 0x57, 0x58, 0x5a, 0x58, 0x5a, 0x5a, 0x5e, 0x5f, 0x5a, 0x5d, + 0x60, 0x62, 0x62, 0x61, 0x62, 0x61, 0x5f, 0x5f, 0x5d, 0x5d, 0x5a, 0x57, + 0x52, 0x4b, 0x37, 0x22, 0x0b, 0xf2, 0xe0, 0xd1, 0xbc, 0xb0, 0xa9, 0xa6, + 0xab, 0xa9, 0xa6, 0xa9, 0xa8, 0xa8, 0xac, 0xb4, 0xb5, 0xb0, 0xaa, 0xa5, + 0xa3, 0xa5, 0xa3, 0xa2, 0xa3, 0xab, 0xa6, 0xa5, 0xa3, 0xa8, 0xa8, 0xac, + 0xa6, 0xa1, 0x9f, 0xa7, 0xa5, 0xa1, 0xa1, 0x9e, 0x9f, 0xa1, 0xae, 0xb7, + 0xb7, 0xb4, 0xb2, 0xb4, 0xb1, 0xb1, 0xaf, 0xa6, 0xa2, 0xaf, 0xb7, 0xa9, + 0xaf, 0xb9, 0xb9, 0xb7, 0xb3, 0xb6, 0xb6, 0xaf, 0xbe, 0xc3, 0xc2, 0xc5, + 0xc1, 0xc7, 0xcc, 0xc0, 0xbe, 0xb6, 0xb1, 0xab, 0xb3, 0xb7, 0xb7, 0xbf, + 0xbc, 0xbd, 0xb3, 0xb4, 0xaf, 0xb3, 0xc2, 0xb2, 0xad, 0xb1, 0xb4, 0xb1, + 0xac, 0xac, 0xab, 0xaa, 0xac, 0xad, 0xb8, 0xbb, 0xc1, 0xc1, 0xbb, 0xb7, + 0xc2, 0xcc, 0xd2, 0xce, 0xcc, 0xc7, 0xc7, 0xc8, 0xc3, 0xbd, 0xbc, 0xb7, + 0xc7, 0xc7, 0xba, 0xbd, 0xc0, 0xb9, 0xc2, 0xc3, 0xbb, 0xb6, 0xb7, 0xb2, + 0xb8, 0xb5, 0xb7, 0xb1, 0x35, 0x35, 0x34, 0x32, 0x32, 0x33, 0x32, 0x31, + 0x30, 0x2d, 0x2b, 0x2b, 0x2a, 0x29, 0x26, 0x22, 0x1a, 0x17, 0x18, 0x1b, + 0x19, 0x1a, 0x1c, 0x1e, 0x1d, 0x21, 0x20, 0x22, 0x25, 0x27, 0x25, 0x25, + 0x28, 0x29, 0x28, 0x29, 0x2a, 0x31, 0x36, 0x3b, 0x39, 0x3d, 0x3d, 0x3f, + 0x46, 0x48, 0x48, 0x4c, 0x51, 0x52, 0x52, 0x4f, 0x4f, 0x4f, 0x52, 0x54, + 0x52, 0x4e, 0x56, 0x59, 0x59, 0x57, 0x56, 0x57, 0x57, 0x59, 0x59, 0x58, + 0x5a, 0x59, 0x59, 0x5a, 0x59, 0x5c, 0x5a, 0x5d, 0x5f, 0x60, 0x62, 0x61, + 0x62, 0x61, 0x60, 0x5d, 0x5d, 0x5c, 0x5b, 0x5a, 0x54, 0x4b, 0x42, 0x36, + 0x12, 0xf5, 0xe0, 0xcb, 0xbe, 0xb1, 0xa9, 0xa9, 0xa5, 0xa2, 0xa5, 0xa8, + 0xa4, 0xa7, 0xac, 0xb1, 0xb2, 0xb0, 0xac, 0xa3, 0xa0, 0x9d, 0x9e, 0x9e, + 0xa4, 0xa7, 0xa7, 0xa5, 0xa9, 0xab, 0xaa, 0xac, 0xa1, 0xa4, 0xa4, 0xa7, + 0xa2, 0xa1, 0xa5, 0xa6, 0xa2, 0xa5, 0xa8, 0xaf, 0xb1, 0xac, 0xb0, 0xb2, + 0xb0, 0xb0, 0xaf, 0xa8, 0xa5, 0xa7, 0xb7, 0xaf, 0xb1, 0xb5, 0xba, 0xba, + 0xb7, 0xb7, 0xb6, 0xb1, 0xb5, 0xc2, 0xc5, 0xba, 0xbb, 0xc7, 0xc7, 0xc0, + 0xb9, 0xb7, 0xb3, 0xb1, 0xbd, 0xbd, 0xbc, 0xc1, 0xc2, 0xbd, 0xb9, 0xb7, + 0xb7, 0xbc, 0xbb, 0xaf, 0xac, 0xad, 0xab, 0xa7, 0xa4, 0xa6, 0xa2, 0xa2, + 0xa8, 0xac, 0xab, 0xb4, 0xbb, 0xb6, 0xb2, 0xb7, 0xbb, 0xcb, 0xdb, 0xd8, + 0xd1, 0xc6, 0xc4, 0xc6, 0xbc, 0xb7, 0xb1, 0xb3, 0xc7, 0xbe, 0xb3, 0xb7, + 0xbb, 0xb0, 0xb8, 0xc3, 0xc2, 0xbf, 0xc1, 0xb9, 0xb7, 0xb6, 0xb4, 0xbc, + 0x33, 0x33, 0x32, 0x31, 0x31, 0x30, 0x2f, 0x30, 0x30, 0x2e, 0x2c, 0x2d, + 0x2e, 0x2b, 0x27, 0x24, 0x1a, 0x15, 0x15, 0x19, 0x1b, 0x1d, 0x20, 0x23, + 0x26, 0x29, 0x29, 0x2a, 0x2d, 0x2d, 0x2c, 0x2d, 0x2c, 0x2a, 0x24, 0x27, + 0x28, 0x2e, 0x36, 0x3e, 0x3d, 0x41, 0x45, 0x46, 0x48, 0x48, 0x44, 0x48, + 0x4f, 0x52, 0x52, 0x52, 0x4f, 0x52, 0x53, 0x57, 0x52, 0x4f, 0x54, 0x58, + 0x59, 0x58, 0x57, 0x57, 0x57, 0x58, 0x57, 0x58, 0x58, 0x5a, 0x59, 0x5a, + 0x58, 0x59, 0x5c, 0x5d, 0x5e, 0x5e, 0x60, 0x61, 0x62, 0x62, 0x61, 0x5d, + 0x5d, 0x5d, 0x5c, 0x5b, 0x58, 0x52, 0x43, 0x3a, 0x27, 0xf7, 0xe4, 0xd2, + 0xb7, 0xb0, 0xac, 0xaa, 0xac, 0xaf, 0xa7, 0xa8, 0xa7, 0xa5, 0xa7, 0xab, + 0xad, 0xa8, 0xa8, 0xa6, 0xa0, 0x9a, 0xa1, 0xaa, 0xab, 0xa7, 0xac, 0xaa, + 0xae, 0xb1, 0xae, 0xad, 0xa6, 0xa2, 0xa5, 0xa3, 0xa8, 0xa4, 0xa2, 0xa3, + 0xa7, 0xab, 0xb1, 0xb5, 0xae, 0xab, 0xac, 0xac, 0xa8, 0xa9, 0xad, 0xaa, + 0xa4, 0xa9, 0xb5, 0xb7, 0xb7, 0xb7, 0xbb, 0xc2, 0xbb, 0xb9, 0xbc, 0xc2, + 0xc1, 0xc3, 0xc9, 0xc7, 0xc7, 0xc8, 0xc7, 0xc7, 0xc2, 0xbf, 0xc6, 0xbf, + 0xc0, 0xcc, 0xcc, 0xc2, 0xc2, 0xc0, 0xbd, 0xba, 0xba, 0xbb, 0xb4, 0xb4, + 0xb7, 0xa8, 0xac, 0xab, 0xa8, 0xa6, 0xa2, 0xa7, 0xac, 0xb0, 0xb1, 0xb1, + 0xb1, 0xae, 0xb1, 0xb7, 0xbf, 0xcc, 0xd2, 0xd2, 0xd3, 0xc8, 0xbb, 0xbf, + 0xba, 0xb6, 0xb1, 0xb4, 0xc0, 0xc3, 0xbc, 0xb6, 0xb4, 0xb1, 0xb2, 0xbc, + 0xc5, 0xc0, 0xbf, 0xc2, 0xbc, 0xb6, 0xb0, 0xbf, 0x32, 0x33, 0x31, 0x31, + 0x2d, 0x2c, 0x2f, 0x31, 0x2e, 0x2d, 0x2d, 0x2d, 0x2b, 0x25, 0x24, 0x25, + 0x1f, 0x19, 0x17, 0x19, 0x1b, 0x1d, 0x22, 0x28, 0x29, 0x2d, 0x2d, 0x2d, + 0x2b, 0x2c, 0x2e, 0x31, 0x31, 0x2f, 0x27, 0x24, 0x26, 0x2b, 0x35, 0x3d, + 0x40, 0x43, 0x49, 0x4a, 0x47, 0x47, 0x43, 0x48, 0x50, 0x52, 0x53, 0x52, + 0x52, 0x53, 0x56, 0x57, 0x52, 0x4f, 0x54, 0x57, 0x57, 0x5a, 0x58, 0x59, + 0x58, 0x59, 0x57, 0x58, 0x58, 0x5b, 0x59, 0x59, 0x5a, 0x59, 0x5d, 0x5e, + 0x5f, 0x5e, 0x5e, 0x5f, 0x61, 0x62, 0x61, 0x5e, 0x5e, 0x5e, 0x5d, 0x5d, + 0x5c, 0x58, 0x52, 0x44, 0x31, 0x1c, 0xe9, 0xd3, 0xbc, 0xb3, 0xb2, 0xaa, + 0xab, 0xa7, 0xa1, 0xa3, 0xa4, 0xa7, 0xa8, 0xab, 0xaa, 0xa7, 0xa5, 0xa9, + 0xa3, 0x9a, 0x9b, 0xac, 0xb3, 0xae, 0xb0, 0xb0, 0xb1, 0xb4, 0xb0, 0xb1, + 0xaa, 0xaa, 0xad, 0xa4, 0xa8, 0xa7, 0xa3, 0xa7, 0xb3, 0xb7, 0xb5, 0xb1, + 0xab, 0xad, 0xb0, 0xaa, 0xa1, 0x9f, 0xa8, 0xa7, 0xa4, 0xac, 0xb3, 0xbe, + 0xc0, 0xc0, 0xc4, 0xc3, 0xbc, 0xb8, 0xb8, 0xc2, 0xc5, 0xbe, 0xca, 0xd2, + 0xc9, 0xc9, 0xc6, 0xc9, 0xce, 0xcc, 0xd6, 0xe2, 0xdc, 0xdd, 0xce, 0xc4, + 0xc0, 0xc1, 0xc2, 0xbd, 0xb7, 0xb6, 0xbc, 0xba, 0xb3, 0xab, 0xad, 0xb1, + 0xad, 0xa9, 0xa7, 0xa5, 0xac, 0xbb, 0xb6, 0xae, 0xa9, 0xac, 0xb2, 0xc0, + 0xca, 0xc8, 0xc5, 0xc8, 0xcf, 0xca, 0xbe, 0xbe, 0xb9, 0xb1, 0xb4, 0xbf, + 0xc2, 0xc5, 0xbf, 0xb8, 0xb9, 0xbb, 0xb6, 0xb6, 0xc0, 0xc1, 0xbc, 0xb8, + 0xb9, 0xb9, 0xaa, 0xaf, 0x32, 0x31, 0x2f, 0x31, 0x31, 0x30, 0x30, 0x30, + 0x2f, 0x2d, 0x2e, 0x2d, 0x2d, 0x2b, 0x2e, 0x2b, 0x22, 0x1c, 0x1a, 0x1d, + 0x1f, 0x22, 0x27, 0x2b, 0x2a, 0x2b, 0x29, 0x2a, 0x2b, 0x29, 0x2b, 0x2d, + 0x2d, 0x2d, 0x28, 0x26, 0x23, 0x27, 0x2f, 0x39, 0x41, 0x48, 0x4d, 0x4e, + 0x4a, 0x49, 0x48, 0x4d, 0x51, 0x52, 0x55, 0x52, 0x55, 0x53, 0x55, 0x56, + 0x52, 0x51, 0x56, 0x57, 0x56, 0x59, 0x57, 0x59, 0x58, 0x5a, 0x59, 0x59, + 0x59, 0x5b, 0x59, 0x59, 0x5c, 0x5b, 0x5d, 0x5e, 0x5d, 0x5d, 0x5d, 0x5d, + 0x5f, 0x61, 0x61, 0x60, 0x5f, 0x5e, 0x5d, 0x5d, 0x5d, 0x5c, 0x59, 0x4f, + 0x3a, 0x26, 0x0d, 0xe6, 0xd6, 0xc5, 0xb1, 0xa7, 0xa1, 0xa0, 0xa1, 0xa7, + 0xa5, 0xa3, 0xa7, 0xa7, 0xab, 0xa7, 0xa4, 0xac, 0xa5, 0x9c, 0x9f, 0xb6, + 0xbd, 0xb4, 0xb1, 0xb2, 0xb8, 0xb8, 0xb6, 0xb3, 0xac, 0xb6, 0xc4, 0xb1, + 0xb4, 0xb1, 0xac, 0xb0, 0xb7, 0xbb, 0xb7, 0xb7, 0xaf, 0xac, 0xb9, 0xb5, + 0xad, 0xab, 0xb0, 0xaa, 0xaa, 0xba, 0xc0, 0xca, 0xc1, 0xc5, 0xcc, 0xc0, + 0xb7, 0xb7, 0xc3, 0xc7, 0xbc, 0xbc, 0xcc, 0xd6, 0xd0, 0xd2, 0xc8, 0xd3, + 0xd4, 0xcd, 0xc7, 0xca, 0xce, 0xd1, 0xc6, 0xc2, 0xc2, 0xc4, 0xc7, 0xbe, + 0xbe, 0xb9, 0xc0, 0xb3, 0xb5, 0xba, 0xbb, 0xbd, 0xbb, 0xba, 0xb4, 0xb1, + 0xa7, 0xba, 0xbf, 0xbb, 0xb5, 0xb0, 0xb7, 0xc0, 0xc6, 0xc4, 0xc5, 0xc7, + 0xcb, 0xc6, 0xc0, 0xc0, 0xb1, 0xae, 0xb7, 0xbd, 0xbe, 0xbf, 0xbd, 0xbc, + 0xbc, 0xbc, 0xc2, 0xbf, 0xbd, 0xb9, 0xb2, 0xb6, 0xbc, 0xc3, 0xbe, 0xb5, + 0x31, 0x30, 0x30, 0x31, 0x31, 0x32, 0x32, 0x32, 0x32, 0x32, 0x31, 0x2f, + 0x31, 0x30, 0x31, 0x2d, 0x25, 0x1e, 0x1c, 0x1f, 0x22, 0x25, 0x27, 0x26, + 0x27, 0x29, 0x27, 0x24, 0x27, 0x27, 0x29, 0x2f, 0x2d, 0x2b, 0x2b, 0x2d, + 0x2c, 0x2c, 0x31, 0x3c, 0x42, 0x4b, 0x50, 0x51, 0x4d, 0x4d, 0x4d, 0x4e, + 0x51, 0x54, 0x57, 0x53, 0x56, 0x55, 0x57, 0x55, 0x52, 0x53, 0x57, 0x59, + 0x56, 0x58, 0x58, 0x58, 0x58, 0x59, 0x59, 0x58, 0x56, 0x58, 0x58, 0x58, + 0x5c, 0x5c, 0x5c, 0x5c, 0x5c, 0x5d, 0x5e, 0x5e, 0x60, 0x61, 0x61, 0x60, + 0x61, 0x5e, 0x5e, 0x5e, 0x5e, 0x5e, 0x5d, 0x58, 0x48, 0x28, 0x0a, 0xf8, + 0xe3, 0xc6, 0xb2, 0xa5, 0x9d, 0xa1, 0xab, 0xad, 0xab, 0xa6, 0xa2, 0xa0, + 0xae, 0xae, 0xa8, 0xac, 0xa9, 0xaf, 0xb6, 0xbf, 0xc2, 0xbc, 0xba, 0xbe, + 0xbb, 0xbe, 0xba, 0xb6, 0xae, 0xac, 0xc9, 0xcd, 0xbe, 0xbd, 0xba, 0xb7, + 0xba, 0xc2, 0xc2, 0xbe, 0xbd, 0xbd, 0xc3, 0xc1, 0xbd, 0xc3, 0xc4, 0xb7, + 0xb3, 0xbb, 0xc2, 0xbe, 0xba, 0xc0, 0xbf, 0xb7, 0xb6, 0xc3, 0xce, 0xc7, + 0xba, 0xbf, 0xc7, 0xd1, 0xcc, 0xd5, 0xcc, 0xcb, 0xcd, 0xd1, 0xd0, 0xc5, + 0xc7, 0xc2, 0xc6, 0xc7, 0xcd, 0xcb, 0xc7, 0xce, 0xcf, 0xc9, 0xc2, 0xc5, + 0xc6, 0xc6, 0xc8, 0xcb, 0xcc, 0xcf, 0xcf, 0xca, 0xb9, 0xb5, 0xbc, 0xbb, + 0xbd, 0xbe, 0xc1, 0xbe, 0xbe, 0xc7, 0xca, 0xbe, 0xc8, 0xc3, 0xbd, 0xcb, + 0xcc, 0xc0, 0xb7, 0xb8, 0xbd, 0xc5, 0xc0, 0xc7, 0xcc, 0xc5, 0xbd, 0xc1, + 0xc0, 0xb7, 0xb1, 0xb8, 0xc0, 0xc6, 0xcc, 0xc1, 0x31, 0x32, 0x31, 0x31, + 0x30, 0x32, 0x32, 0x33, 0x32, 0x32, 0x32, 0x32, 0x32, 0x31, 0x32, 0x2e, + 0x27, 0x1f, 0x19, 0x1c, 0x20, 0x22, 0x23, 0x22, 0x25, 0x27, 0x24, 0x22, + 0x27, 0x2d, 0x30, 0x31, 0x34, 0x32, 0x32, 0x34, 0x35, 0x36, 0x3a, 0x3e, + 0x43, 0x49, 0x4e, 0x50, 0x50, 0x50, 0x4e, 0x4f, 0x52, 0x57, 0x57, 0x53, + 0x54, 0x55, 0x57, 0x55, 0x53, 0x55, 0x57, 0x5a, 0x57, 0x59, 0x5b, 0x59, + 0x58, 0x59, 0x58, 0x57, 0x56, 0x57, 0x59, 0x59, 0x5d, 0x5d, 0x5b, 0x5c, + 0x5b, 0x5c, 0x5d, 0x5e, 0x5e, 0x5e, 0x5f, 0x60, 0x60, 0x5d, 0x5e, 0x60, + 0x60, 0x60, 0x5f, 0x5d, 0x57, 0x3c, 0x10, 0xec, 0xe4, 0xda, 0xbb, 0xa4, + 0xa0, 0xa7, 0xb9, 0xbe, 0xb6, 0xac, 0xab, 0xa6, 0xb1, 0xbf, 0xc0, 0xc3, + 0xbf, 0xc3, 0xc7, 0xc2, 0xbc, 0xbe, 0xbc, 0xbf, 0xb9, 0xc2, 0xbf, 0xca, + 0xc6, 0xc2, 0xc4, 0xc6, 0xbd, 0xc2, 0xc2, 0xbf, 0xc7, 0xda, 0xde, 0xc2, + 0xc1, 0xc2, 0xc2, 0xbf, 0xbb, 0xc1, 0xc7, 0xbf, 0xbf, 0xc0, 0xc2, 0xbc, + 0xbb, 0xb9, 0xb7, 0xb1, 0xb3, 0xc5, 0xd9, 0xc3, 0xc0, 0xc7, 0xc8, 0xc9, + 0xcc, 0xc2, 0xc7, 0xc7, 0xc2, 0xc4, 0xce, 0xd6, 0xd4, 0xc9, 0xc2, 0xc6, + 0xd0, 0xd2, 0xd5, 0xd6, 0xd5, 0xca, 0xc2, 0xc7, 0xc0, 0xc7, 0xca, 0xce, + 0xd4, 0xcb, 0xc8, 0xc7, 0xd4, 0xc9, 0xc2, 0xc7, 0xcc, 0xcc, 0xc5, 0xbf, + 0xc4, 0xcb, 0xbd, 0xbb, 0xc5, 0xc0, 0xb9, 0xbf, 0xcc, 0xc4, 0xb7, 0xb3, + 0xc1, 0xcc, 0xcb, 0xcb, 0xc7, 0xcc, 0xc2, 0xbc, 0xbe, 0xb7, 0xb5, 0xb7, + 0xbc, 0xc6, 0xbe, 0xc4, 0x31, 0x31, 0x31, 0x32, 0x32, 0x32, 0x31, 0x32, + 0x32, 0x32, 0x32, 0x32, 0x33, 0x32, 0x31, 0x2d, 0x26, 0x20, 0x1d, 0x1b, + 0x1d, 0x20, 0x22, 0x22, 0x25, 0x27, 0x24, 0x28, 0x2a, 0x30, 0x34, 0x36, + 0x37, 0x38, 0x3b, 0x3d, 0x3d, 0x42, 0x46, 0x43, 0x45, 0x4a, 0x4d, 0x52, + 0x50, 0x52, 0x52, 0x52, 0x55, 0x58, 0x56, 0x54, 0x55, 0x57, 0x56, 0x57, + 0x55, 0x57, 0x58, 0x5b, 0x5a, 0x5a, 0x5b, 0x59, 0x59, 0x5a, 0x57, 0x57, + 0x57, 0x56, 0x57, 0x5b, 0x5b, 0x5b, 0x5b, 0x5d, 0x5d, 0x5c, 0x5c, 0x5d, + 0x5d, 0x5d, 0x5d, 0x5f, 0x5e, 0x5d, 0x5e, 0x5f, 0x5f, 0x61, 0x61, 0x5f, + 0x5a, 0x4f, 0x2d, 0xf1, 0xc7, 0xc2, 0xc5, 0xb6, 0xb1, 0xbd, 0xd2, 0xdc, + 0xd3, 0xcc, 0xd2, 0xc5, 0xca, 0xdd, 0xd5, 0xd6, 0xcd, 0xce, 0xcc, 0xc5, + 0xba, 0xca, 0xc2, 0xba, 0xb4, 0xbd, 0xc3, 0xcd, 0xd6, 0xe2, 0xd8, 0xc2, + 0xbf, 0xca, 0xcb, 0xcb, 0xce, 0xcc, 0xc5, 0xbd, 0xc6, 0xcb, 0xce, 0xcd, + 0xc8, 0xca, 0xc5, 0xc2, 0xc0, 0xbf, 0xc2, 0xc4, 0xc2, 0xc2, 0xc2, 0xbe, + 0xbf, 0xc9, 0xc8, 0xbc, 0xbd, 0xc4, 0xc8, 0xc0, 0xc4, 0xbd, 0xc2, 0xc1, + 0xc6, 0xc2, 0xc2, 0xc8, 0xc7, 0xcc, 0xc9, 0xcd, 0xcf, 0xcb, 0xcd, 0xcc, + 0xc7, 0xc8, 0xc7, 0xc8, 0xc6, 0xcb, 0xcc, 0xcc, 0xd2, 0xce, 0xc7, 0xc5, + 0xd4, 0xce, 0xce, 0xd3, 0xd3, 0xd2, 0xd2, 0xcc, 0xca, 0xc6, 0xc8, 0xc9, + 0xca, 0xc2, 0xb0, 0xb6, 0xc3, 0xc3, 0xc7, 0xba, 0xc1, 0xc7, 0xcf, 0xc2, + 0xca, 0xd7, 0xcb, 0xc3, 0xbd, 0xb3, 0xb8, 0xb4, 0xbc, 0xc5, 0xba, 0xbd, + 0x30, 0x32, 0x32, 0x30, 0x32, 0x35, 0x32, 0x32, 0x32, 0x32, 0x35, 0x33, + 0x34, 0x33, 0x31, 0x30, 0x2b, 0x24, 0x1f, 0x1d, 0x1d, 0x21, 0x22, 0x20, + 0x25, 0x2c, 0x2d, 0x2c, 0x2d, 0x33, 0x38, 0x3d, 0x3d, 0x3c, 0x3f, 0x44, + 0x42, 0x47, 0x4a, 0x47, 0x49, 0x4e, 0x4f, 0x51, 0x51, 0x53, 0x55, 0x54, + 0x56, 0x58, 0x56, 0x54, 0x57, 0x57, 0x57, 0x58, 0x55, 0x58, 0x59, 0x5c, + 0x5b, 0x58, 0x5a, 0x58, 0x57, 0x58, 0x58, 0x56, 0x58, 0x57, 0x56, 0x59, + 0x59, 0x5a, 0x5a, 0x5b, 0x5c, 0x5b, 0x5b, 0x5b, 0x5d, 0x5d, 0x5d, 0x5f, + 0x5c, 0x5a, 0x5f, 0x5f, 0x5f, 0x61, 0x62, 0x60, 0x5b, 0x52, 0x3d, 0x0e, + 0xdf, 0xcb, 0xcc, 0xdf, 0xe4, 0xe3, 0xe3, 0xe6, 0xef, 0xfd, 0xee, 0xdb, + 0xda, 0xd2, 0xcb, 0xce, 0xcb, 0xcc, 0xd2, 0xc3, 0xbe, 0xcc, 0xcc, 0xc2, + 0xc8, 0xc5, 0xcc, 0xc7, 0xcb, 0xda, 0xd1, 0xbd, 0xc3, 0xca, 0xc7, 0xd0, + 0xd4, 0xc6, 0xbc, 0xb7, 0xc0, 0xd2, 0xe2, 0xe2, 0xd0, 0xca, 0xba, 0xc0, + 0xbe, 0xb7, 0xc2, 0xc7, 0xc6, 0xd2, 0xe1, 0xed, 0xd8, 0xc7, 0xc9, 0xd4, + 0xd1, 0xc3, 0xc0, 0xb7, 0xbc, 0xc0, 0xbd, 0xbc, 0xc5, 0xbc, 0xbc, 0xbd, + 0xc5, 0xcc, 0xcd, 0xd2, 0xd8, 0xcf, 0xcc, 0xc9, 0xc7, 0xc9, 0xc2, 0xc2, + 0xc7, 0xca, 0xca, 0xbc, 0xbe, 0xc4, 0xbe, 0xc3, 0xcc, 0xc8, 0xcd, 0xcc, + 0xcd, 0xd6, 0xd6, 0xd2, 0xcc, 0xcb, 0xcc, 0xc7, 0xcb, 0xcc, 0xb7, 0xb1, + 0xb9, 0xc4, 0xbf, 0xb9, 0xb8, 0xc6, 0xc7, 0xbe, 0xc7, 0xd8, 0xce, 0xcc, + 0xc1, 0xb7, 0xb9, 0xb7, 0xbc, 0xbc, 0xb6, 0xb8, 0x31, 0x32, 0x2f, 0x32, + 0x33, 0x35, 0x34, 0x34, 0x36, 0x38, 0x35, 0x35, 0x34, 0x33, 0x32, 0x30, + 0x2c, 0x26, 0x22, 0x21, 0x1e, 0x22, 0x27, 0x26, 0x2c, 0x33, 0x33, 0x32, + 0x34, 0x39, 0x3d, 0x3e, 0x3f, 0x3e, 0x43, 0x45, 0x47, 0x48, 0x4d, 0x4d, + 0x4d, 0x4f, 0x51, 0x53, 0x52, 0x55, 0x56, 0x56, 0x57, 0x57, 0x56, 0x56, + 0x57, 0x57, 0x58, 0x58, 0x57, 0x59, 0x59, 0x5b, 0x5d, 0x5a, 0x59, 0x57, + 0x55, 0x56, 0x57, 0x57, 0x58, 0x57, 0x57, 0x57, 0x5a, 0x59, 0x5a, 0x5a, + 0x5c, 0x5b, 0x5b, 0x5b, 0x5c, 0x5d, 0x5c, 0x5d, 0x5e, 0x5d, 0x5f, 0x5f, + 0x60, 0x62, 0x62, 0x62, 0x5e, 0x58, 0x4b, 0x31, 0x07, 0xe7, 0xd7, 0xe2, + 0xde, 0xd4, 0xce, 0xd7, 0xe2, 0xe0, 0xd8, 0xd0, 0xc9, 0xc3, 0xc2, 0xc4, + 0xc2, 0xc1, 0xc5, 0xbe, 0xbc, 0xcc, 0xcc, 0xd0, 0xd7, 0xc3, 0xc2, 0xc7, + 0xc9, 0xcc, 0xc8, 0xc6, 0xc1, 0xc6, 0xce, 0xd9, 0xd3, 0xc3, 0xc1, 0xc3, + 0xc7, 0xcd, 0xcf, 0xc9, 0xbe, 0xb9, 0xb5, 0xba, 0xbc, 0xba, 0xc7, 0xc5, + 0xc7, 0xd2, 0xda, 0xeb, 0xdb, 0xce, 0xd6, 0xe2, 0xe3, 0xd5, 0xc2, 0xaf, + 0xbc, 0xc6, 0xb6, 0xbc, 0xbc, 0xb7, 0xbb, 0xc2, 0xcd, 0xcd, 0xc2, 0xcc, + 0xd2, 0xcb, 0xbf, 0xbc, 0xbf, 0xc0, 0xb9, 0xbd, 0xcf, 0xcf, 0xcb, 0xbe, + 0xbc, 0xbe, 0xbe, 0xc9, 0xd2, 0xc6, 0xc7, 0xcf, 0xcb, 0xc4, 0xc7, 0xcb, + 0xcb, 0xdb, 0xd2, 0xc6, 0xc1, 0xbf, 0xb7, 0xad, 0xb3, 0xc8, 0xbf, 0xb7, + 0xbb, 0xc6, 0xc6, 0xbe, 0xbd, 0xc2, 0xc9, 0xd0, 0xcc, 0xc0, 0xbc, 0xbb, + 0xba, 0xb7, 0xbd, 0xbc, 0x31, 0x31, 0x31, 0x32, 0x33, 0x34, 0x35, 0x35, + 0x38, 0x38, 0x39, 0x37, 0x37, 0x36, 0x34, 0x30, 0x2b, 0x25, 0x24, 0x27, + 0x27, 0x27, 0x2a, 0x29, 0x2e, 0x32, 0x36, 0x37, 0x38, 0x3d, 0x3e, 0x3e, + 0x3f, 0x42, 0x44, 0x42, 0x47, 0x4a, 0x4e, 0x51, 0x4f, 0x4f, 0x51, 0x54, + 0x52, 0x54, 0x56, 0x59, 0x58, 0x57, 0x55, 0x57, 0x58, 0x57, 0x5a, 0x57, + 0x55, 0x59, 0x59, 0x5a, 0x5d, 0x5b, 0x57, 0x57, 0x54, 0x56, 0x57, 0x56, + 0x59, 0x59, 0x56, 0x57, 0x5b, 0x57, 0x59, 0x5a, 0x5a, 0x5b, 0x5b, 0x5b, + 0x5c, 0x5d, 0x5c, 0x5d, 0x60, 0x5f, 0x5f, 0x60, 0x60, 0x62, 0x62, 0x62, + 0x61, 0x5c, 0x52, 0x44, 0x32, 0x08, 0xde, 0xd6, 0xd6, 0xd7, 0xd7, 0xd9, + 0xd7, 0xd6, 0xcd, 0xc7, 0xc3, 0xc4, 0xc2, 0xc8, 0xc7, 0xc3, 0xcc, 0xcc, + 0xcc, 0xc7, 0xcd, 0xd4, 0xd6, 0xd8, 0xc7, 0xc6, 0xcc, 0xca, 0xc2, 0xc4, + 0xc2, 0xc5, 0xd2, 0xd8, 0xcc, 0xc2, 0xb9, 0xb7, 0xc1, 0xc7, 0xc4, 0xc2, + 0xbc, 0xb8, 0xb5, 0xb7, 0xbb, 0xbc, 0xc6, 0xc7, 0xc2, 0xc6, 0xc7, 0xc7, + 0xc4, 0xc5, 0xc5, 0xd2, 0xdd, 0xd2, 0xc2, 0xc5, 0xcd, 0xc7, 0xbe, 0xbf, + 0xbf, 0xb8, 0xc1, 0xc6, 0xcc, 0xc8, 0xc2, 0xca, 0xc8, 0xcc, 0xc3, 0xb8, + 0xb8, 0xbc, 0xc0, 0xc1, 0xcf, 0xd1, 0xc7, 0xc6, 0xbb, 0xbd, 0xb9, 0xbb, + 0xc0, 0xbb, 0xc2, 0xc7, 0xbe, 0xb6, 0xbc, 0xcc, 0xc4, 0xd7, 0xd7, 0xc7, + 0xc2, 0xbe, 0xb4, 0xa7, 0xb1, 0xd0, 0xcb, 0xbb, 0xbf, 0xc6, 0xc3, 0xc3, + 0xc6, 0xc0, 0xbf, 0xc8, 0xd1, 0xc2, 0xc1, 0xbc, 0xbe, 0xba, 0xbf, 0xc9, + 0x32, 0x32, 0x32, 0x34, 0x35, 0x36, 0x37, 0x37, 0x3a, 0x37, 0x3b, 0x3a, + 0x37, 0x36, 0x32, 0x30, 0x2c, 0x2a, 0x28, 0x2b, 0x2c, 0x2a, 0x2b, 0x2a, + 0x30, 0x32, 0x35, 0x3b, 0x3d, 0x42, 0x41, 0x3c, 0x3c, 0x41, 0x42, 0x46, + 0x4a, 0x4b, 0x4d, 0x50, 0x4f, 0x4e, 0x50, 0x52, 0x50, 0x54, 0x55, 0x5a, + 0x57, 0x56, 0x54, 0x56, 0x57, 0x58, 0x5a, 0x55, 0x53, 0x58, 0x58, 0x59, + 0x5c, 0x5a, 0x59, 0x59, 0x53, 0x54, 0x56, 0x56, 0x59, 0x5a, 0x57, 0x57, + 0x5c, 0x58, 0x57, 0x59, 0x59, 0x59, 0x5a, 0x5b, 0x5b, 0x5c, 0x5d, 0x5f, + 0x60, 0x5f, 0x5f, 0x5f, 0x60, 0x60, 0x62, 0x62, 0x62, 0x5f, 0x5b, 0x54, + 0x48, 0x35, 0x0c, 0xed, 0xdc, 0xd4, 0xcd, 0xcd, 0xcc, 0xd2, 0xcc, 0xca, + 0xce, 0xcc, 0xd2, 0xd0, 0xcc, 0xc9, 0xd2, 0xd0, 0xd2, 0xcc, 0xcf, 0xd2, + 0xc7, 0xc5, 0xc7, 0xcc, 0xcf, 0xc7, 0xbe, 0xbd, 0xbd, 0xc6, 0xcc, 0xd1, + 0xc8, 0xc2, 0xbc, 0xbc, 0xc4, 0xc2, 0xbf, 0xc1, 0xc2, 0xbb, 0xb7, 0xb7, + 0xba, 0xbc, 0xbe, 0xbf, 0xbd, 0xc2, 0xc7, 0xc1, 0xbd, 0xc1, 0xbf, 0xc1, + 0xc2, 0xc1, 0xc3, 0xcf, 0xcc, 0xc9, 0xc8, 0xc0, 0xbc, 0xba, 0xc4, 0xc9, + 0xc2, 0xc2, 0xc2, 0xc5, 0xc1, 0xbf, 0xc2, 0xbe, 0xbf, 0xbb, 0xbf, 0xbf, + 0xbf, 0xcb, 0xcb, 0xcd, 0xc2, 0xb9, 0xb3, 0xb7, 0xbe, 0xbb, 0xb8, 0xbc, + 0xb7, 0xb5, 0xb5, 0xb3, 0xc0, 0xc9, 0xc9, 0xbd, 0xc5, 0xc2, 0xb7, 0xad, + 0xb3, 0xbd, 0xc7, 0xbc, 0xbc, 0xc4, 0xc5, 0xcc, 0xd3, 0xcc, 0xbb, 0xbc, + 0xcd, 0xd2, 0xc4, 0xbc, 0xbf, 0xc2, 0xc2, 0xd2, 0x33, 0x35, 0x34, 0x36, + 0x36, 0x35, 0x36, 0x37, 0x37, 0x36, 0x37, 0x38, 0x34, 0x33, 0x30, 0x2e, + 0x2c, 0x28, 0x2b, 0x2d, 0x30, 0x2e, 0x2d, 0x2f, 0x33, 0x36, 0x3a, 0x3f, + 0x43, 0x43, 0x3d, 0x3a, 0x3e, 0x42, 0x43, 0x47, 0x47, 0x4a, 0x4a, 0x4f, + 0x50, 0x4c, 0x4d, 0x4d, 0x4e, 0x53, 0x54, 0x59, 0x57, 0x54, 0x54, 0x54, + 0x54, 0x58, 0x59, 0x54, 0x54, 0x57, 0x57, 0x58, 0x59, 0x59, 0x58, 0x59, + 0x53, 0x53, 0x54, 0x55, 0x57, 0x57, 0x57, 0x58, 0x5c, 0x59, 0x58, 0x59, + 0x59, 0x5a, 0x5a, 0x5b, 0x5c, 0x5d, 0x5d, 0x60, 0x61, 0x5f, 0x5f, 0x5f, + 0x60, 0x60, 0x62, 0x62, 0x62, 0x60, 0x5f, 0x5d, 0x55, 0x4c, 0x3a, 0x17, + 0xf2, 0xd9, 0xdc, 0xd7, 0xd0, 0xd1, 0xcd, 0xc9, 0xce, 0xd5, 0xcf, 0xcc, + 0xd2, 0xce, 0xcc, 0xd1, 0xcd, 0xd1, 0xd7, 0xd2, 0xc9, 0xca, 0xce, 0xce, + 0xc6, 0xc3, 0xbf, 0xbc, 0xbd, 0xc5, 0xca, 0xc1, 0xc2, 0xcc, 0xc7, 0xc2, + 0xbe, 0xbc, 0xbc, 0xc1, 0xbd, 0xbb, 0xbd, 0xbc, 0xbc, 0xbc, 0xbb, 0xbb, + 0xbe, 0xc5, 0xcc, 0xc7, 0xc1, 0xcb, 0xcd, 0xc4, 0xc3, 0xc4, 0xc5, 0xc7, + 0xc7, 0xc2, 0xc2, 0xbf, 0xbc, 0xbc, 0xbf, 0xcc, 0xc3, 0xc2, 0xd0, 0xd2, + 0xcd, 0xcb, 0xc8, 0xc1, 0xbd, 0xbe, 0xbc, 0xbc, 0xc2, 0xc3, 0xbb, 0xc2, + 0xc7, 0xc0, 0xb6, 0xad, 0xae, 0xb3, 0xb7, 0xc4, 0xc2, 0xb7, 0xb0, 0xb3, + 0xc0, 0xbd, 0xbe, 0xc0, 0xc4, 0xc3, 0xc0, 0xc0, 0xbd, 0xbb, 0xba, 0xba, + 0xba, 0xc6, 0xc5, 0xc4, 0xc7, 0xd4, 0xd8, 0xdd, 0xcb, 0xd4, 0xc9, 0xbc, + 0xb7, 0xbf, 0xc2, 0xcb, 0x32, 0x33, 0x32, 0x35, 0x35, 0x35, 0x34, 0x35, + 0x32, 0x32, 0x35, 0x36, 0x34, 0x32, 0x32, 0x30, 0x2e, 0x2e, 0x31, 0x32, + 0x32, 0x32, 0x30, 0x33, 0x39, 0x39, 0x3c, 0x40, 0x3f, 0x3d, 0x3c, 0x40, + 0x43, 0x44, 0x42, 0x44, 0x47, 0x49, 0x4c, 0x4e, 0x50, 0x4d, 0x4a, 0x4c, + 0x4d, 0x52, 0x52, 0x57, 0x56, 0x52, 0x54, 0x53, 0x54, 0x57, 0x58, 0x54, + 0x53, 0x56, 0x56, 0x58, 0x58, 0x59, 0x58, 0x58, 0x55, 0x55, 0x56, 0x55, + 0x53, 0x54, 0x56, 0x58, 0x59, 0x59, 0x58, 0x5a, 0x5a, 0x5a, 0x5b, 0x5c, + 0x5e, 0x60, 0x5f, 0x60, 0x61, 0x5f, 0x5f, 0x60, 0x61, 0x62, 0x62, 0x62, + 0x62, 0x62, 0x61, 0x60, 0x5d, 0x59, 0x52, 0x45, 0x1f, 0xf8, 0xe8, 0xe6, + 0xdb, 0xd0, 0xcf, 0xd3, 0xdc, 0xdc, 0xd2, 0xd4, 0xd7, 0xd2, 0xc9, 0xcb, + 0xc7, 0xd2, 0xd2, 0xc7, 0xce, 0xd8, 0xcc, 0xcc, 0xca, 0xbe, 0xbc, 0xc1, + 0xbe, 0xbb, 0xc2, 0xbe, 0xc3, 0xcf, 0xc3, 0xbc, 0xbf, 0xc3, 0xc2, 0xc2, + 0xbb, 0xbb, 0xc4, 0xc2, 0xb9, 0xb9, 0xbc, 0xbd, 0xc0, 0xbf, 0xc7, 0xd2, + 0xc0, 0xc0, 0xbe, 0xbc, 0xc4, 0xca, 0xd4, 0xcb, 0xc1, 0xb9, 0xc5, 0xc2, + 0xbe, 0xc5, 0xc7, 0xc2, 0xbb, 0xca, 0xcc, 0xc6, 0xcf, 0xd4, 0xca, 0xc9, + 0xc1, 0xba, 0xbe, 0xc3, 0xca, 0xb9, 0xb3, 0xc2, 0xc7, 0xcc, 0xbb, 0xab, + 0xa8, 0xac, 0xac, 0xc1, 0xbe, 0xbc, 0xb8, 0xc2, 0xc7, 0xc5, 0xba, 0xbd, + 0xc5, 0xc8, 0xd3, 0xcb, 0xbd, 0xbc, 0xb5, 0xb1, 0xb9, 0xd0, 0xc5, 0xc1, + 0xbf, 0xc5, 0xd6, 0xf0, 0xe9, 0xd3, 0xc5, 0xb9, 0xba, 0xba, 0xba, 0xbb, + 0x32, 0x32, 0x32, 0x34, 0x35, 0x34, 0x36, 0x38, 0x38, 0x3a, 0x3a, 0x38, + 0x37, 0x34, 0x34, 0x32, 0x31, 0x33, 0x32, 0x35, 0x32, 0x32, 0x33, 0x35, + 0x37, 0x38, 0x3c, 0x3d, 0x3e, 0x3f, 0x3f, 0x43, 0x46, 0x42, 0x42, 0x44, + 0x48, 0x4b, 0x4d, 0x4f, 0x51, 0x4b, 0x47, 0x49, 0x4d, 0x50, 0x51, 0x52, + 0x53, 0x50, 0x55, 0x54, 0x55, 0x58, 0x58, 0x56, 0x54, 0x55, 0x57, 0x58, + 0x58, 0x57, 0x56, 0x59, 0x58, 0x55, 0x58, 0x57, 0x53, 0x56, 0x55, 0x57, + 0x5a, 0x5a, 0x59, 0x59, 0x59, 0x59, 0x5b, 0x5b, 0x5d, 0x60, 0x60, 0x60, + 0x62, 0x61, 0x60, 0x60, 0x61, 0x62, 0x62, 0x62, 0x63, 0x63, 0x62, 0x62, + 0x5e, 0x58, 0x53, 0x4e, 0x42, 0x24, 0x04, 0xf7, 0xde, 0xcc, 0xca, 0xd3, + 0xd3, 0xda, 0xd1, 0xd5, 0xd0, 0xce, 0xc9, 0xca, 0xc2, 0xd0, 0xc7, 0xbc, + 0xc5, 0xc9, 0xc2, 0xc7, 0xd2, 0xd4, 0xc2, 0xbc, 0xbe, 0xb7, 0xc0, 0xbf, + 0xbe, 0xc3, 0xba, 0xb6, 0xc2, 0xc4, 0xc2, 0xbc, 0xb8, 0xb5, 0xbd, 0xc8, + 0xbc, 0xc1, 0xc6, 0xcf, 0xc5, 0xc2, 0xc2, 0xc7, 0xc6, 0xb7, 0xb7, 0xbd, + 0xc1, 0xc7, 0xd2, 0xcb, 0xbc, 0xbd, 0xbf, 0xc1, 0xbb, 0xbb, 0xc2, 0xbb, + 0xc0, 0xcf, 0xd6, 0xd0, 0xd0, 0xd2, 0xcb, 0xc8, 0xc2, 0xc1, 0xc2, 0xc0, + 0xba, 0xb3, 0xb5, 0xc9, 0xce, 0xc7, 0xbc, 0xb6, 0xb7, 0xbd, 0xb9, 0xbc, + 0xbc, 0xbb, 0xc4, 0xc2, 0xbf, 0xc1, 0xb9, 0xb7, 0xc4, 0xc5, 0xd2, 0xce, + 0xc2, 0xbd, 0xb1, 0xb1, 0xc3, 0xd4, 0xbf, 0xb6, 0xb7, 0xbf, 0xc1, 0xc5, + 0xdb, 0xf0, 0xde, 0xbd, 0xb8, 0xb8, 0xb4, 0xb6, 0x2d, 0x2e, 0x2e, 0x30, + 0x32, 0x34, 0x3a, 0x3c, 0x39, 0x3a, 0x38, 0x36, 0x34, 0x36, 0x36, 0x35, + 0x34, 0x36, 0x33, 0x34, 0x34, 0x34, 0x32, 0x34, 0x37, 0x38, 0x39, 0x3d, + 0x3d, 0x3d, 0x40, 0x45, 0x42, 0x3f, 0x42, 0x45, 0x4b, 0x4d, 0x4f, 0x51, + 0x4c, 0x47, 0x46, 0x47, 0x4c, 0x50, 0x4e, 0x4e, 0x51, 0x51, 0x54, 0x53, + 0x55, 0x58, 0x56, 0x55, 0x55, 0x54, 0x57, 0x57, 0x55, 0x56, 0x56, 0x59, + 0x5a, 0x57, 0x59, 0x57, 0x56, 0x57, 0x56, 0x57, 0x5b, 0x5b, 0x59, 0x5a, + 0x5b, 0x5b, 0x5b, 0x5b, 0x5c, 0x5e, 0x5f, 0x60, 0x62, 0x60, 0x61, 0x62, + 0x61, 0x62, 0x63, 0x63, 0x63, 0x64, 0x64, 0x63, 0x62, 0x5c, 0x52, 0x4b, + 0x40, 0x33, 0x17, 0x01, 0xe5, 0xd3, 0xd2, 0xdf, 0xde, 0xe1, 0xde, 0xd4, + 0xcd, 0xc6, 0xc0, 0xc2, 0xc0, 0xc2, 0xbe, 0xbc, 0xbd, 0xc1, 0xbf, 0xc2, + 0xc9, 0xe4, 0xce, 0xba, 0xba, 0xba, 0xbf, 0xbe, 0xbe, 0xbe, 0xbd, 0xb1, + 0xbf, 0xbd, 0xc2, 0xbc, 0xbc, 0xc1, 0xba, 0xcd, 0xc7, 0xbc, 0xc7, 0xcd, + 0xd0, 0xc7, 0xc0, 0xb9, 0xc5, 0xbc, 0xb9, 0xbf, 0xc4, 0xc7, 0xc1, 0xc7, + 0xc4, 0xb5, 0xbc, 0xc4, 0xbc, 0xb5, 0xb9, 0xb9, 0xc1, 0xc4, 0xcf, 0xd1, + 0xce, 0xd5, 0xcc, 0xc4, 0xc9, 0xc8, 0xc7, 0xc7, 0xc5, 0xc2, 0xbe, 0xcc, + 0xd0, 0xc0, 0xb7, 0xc2, 0xb5, 0xb5, 0xbd, 0xbb, 0xbd, 0xc3, 0xbc, 0xc6, + 0xc1, 0xba, 0xbc, 0xb3, 0xb7, 0xb6, 0xcb, 0xc9, 0xb4, 0xb5, 0xb3, 0xb4, + 0xc5, 0xce, 0xb8, 0xaf, 0xb3, 0xb6, 0xbc, 0xc2, 0xc2, 0xd6, 0xd5, 0xcf, + 0xce, 0xbe, 0xb4, 0xb3, 0x2d, 0x2d, 0x2d, 0x32, 0x36, 0x37, 0x3a, 0x38, + 0x38, 0x39, 0x39, 0x38, 0x36, 0x36, 0x34, 0x36, 0x35, 0x32, 0x32, 0x35, + 0x38, 0x37, 0x37, 0x35, 0x37, 0x37, 0x3b, 0x3d, 0x3d, 0x41, 0x44, 0x42, + 0x3f, 0x41, 0x47, 0x47, 0x4e, 0x4e, 0x4e, 0x4d, 0x47, 0x45, 0x46, 0x45, + 0x4d, 0x4f, 0x4d, 0x4a, 0x52, 0x54, 0x52, 0x54, 0x57, 0x57, 0x53, 0x57, + 0x56, 0x55, 0x57, 0x57, 0x54, 0x54, 0x55, 0x58, 0x59, 0x58, 0x58, 0x57, + 0x56, 0x56, 0x57, 0x58, 0x5b, 0x5b, 0x59, 0x59, 0x5b, 0x5b, 0x5b, 0x5a, + 0x5a, 0x5d, 0x5e, 0x5f, 0x61, 0x61, 0x61, 0x62, 0x62, 0x62, 0x62, 0x62, + 0x63, 0x64, 0x65, 0x64, 0x62, 0x62, 0x60, 0x5a, 0x4c, 0x3f, 0x31, 0x15, + 0xfc, 0xe9, 0xe2, 0xdc, 0xe2, 0xe9, 0xe6, 0xd3, 0xce, 0xc6, 0xc6, 0xc4, + 0xbd, 0xc0, 0xc0, 0xc2, 0xbe, 0xc2, 0xc3, 0xc6, 0xc3, 0xd9, 0xda, 0xba, + 0xbc, 0xbd, 0xbe, 0xc0, 0xbb, 0xbb, 0xbd, 0xb5, 0xbc, 0xbf, 0xc0, 0xc3, + 0xc8, 0xca, 0xc9, 0xcb, 0xc4, 0xbf, 0xc8, 0xc5, 0xc4, 0xc5, 0xbe, 0xb7, + 0xc5, 0xc4, 0xc3, 0xbc, 0xc5, 0xba, 0xc0, 0xc2, 0xc9, 0xb8, 0xbd, 0xd0, + 0xbc, 0xb3, 0xb9, 0xbc, 0xbf, 0xbe, 0xc0, 0xcb, 0xcb, 0xce, 0xcf, 0xc8, + 0xcb, 0xc6, 0xcf, 0xd3, 0xd9, 0xd5, 0xc5, 0xc0, 0xbd, 0xbb, 0xb5, 0xbf, + 0xb7, 0xbc, 0xbc, 0xb7, 0xb2, 0xbc, 0xb9, 0xbc, 0xc6, 0xc2, 0xbd, 0xba, + 0xb6, 0xb0, 0xc0, 0xc6, 0xb1, 0xb3, 0xb7, 0xb7, 0xc6, 0xcb, 0xb7, 0xad, + 0xaf, 0xb2, 0xb9, 0xc7, 0xc9, 0xbb, 0xc4, 0xcf, 0xdf, 0xcc, 0xb8, 0xb7, + 0x2e, 0x31, 0x32, 0x32, 0x34, 0x37, 0x37, 0x37, 0x38, 0x3a, 0x38, 0x38, + 0x3a, 0x37, 0x33, 0x34, 0x35, 0x36, 0x39, 0x37, 0x38, 0x37, 0x37, 0x37, + 0x38, 0x3a, 0x38, 0x3d, 0x44, 0x46, 0x40, 0x3e, 0x42, 0x44, 0x45, 0x4a, + 0x4d, 0x4d, 0x4d, 0x4b, 0x46, 0x44, 0x44, 0x46, 0x4e, 0x4d, 0x4c, 0x47, + 0x52, 0x56, 0x52, 0x54, 0x57, 0x57, 0x54, 0x57, 0x57, 0x57, 0x58, 0x59, + 0x57, 0x56, 0x54, 0x57, 0x59, 0x58, 0x58, 0x57, 0x57, 0x56, 0x57, 0x57, + 0x5a, 0x5b, 0x5a, 0x5b, 0x5c, 0x5c, 0x5d, 0x5c, 0x59, 0x5b, 0x5d, 0x60, + 0x61, 0x62, 0x62, 0x62, 0x62, 0x63, 0x62, 0x62, 0x63, 0x64, 0x65, 0x65, + 0x64, 0x64, 0x64, 0x62, 0x5d, 0x54, 0x48, 0x36, 0x1d, 0x04, 0xf7, 0xdf, + 0xee, 0xff, 0xf7, 0xd5, 0xd6, 0xd1, 0xca, 0xc5, 0xbc, 0xbe, 0xc0, 0xbc, + 0xba, 0xbc, 0xbc, 0xc1, 0xc7, 0xc7, 0xd6, 0xc2, 0xc3, 0xc0, 0xc0, 0xbc, + 0xbc, 0xbc, 0xb7, 0xc2, 0xc3, 0xc4, 0xbf, 0xc0, 0xc5, 0xc7, 0xc9, 0xc3, + 0xbc, 0xb2, 0xc2, 0xd1, 0xca, 0xc7, 0xc3, 0xc6, 0xc8, 0xbd, 0xc8, 0xbc, + 0xbc, 0xb8, 0xbd, 0xba, 0xbf, 0xba, 0xbc, 0xd1, 0xc6, 0xc8, 0xc1, 0xbd, + 0xc1, 0xc3, 0xbf, 0xcf, 0xc8, 0xca, 0xc9, 0xd1, 0xc7, 0xc8, 0xc7, 0xbd, + 0xd0, 0xd9, 0xcc, 0xc4, 0xbe, 0xbf, 0xbe, 0xbe, 0xc2, 0xc4, 0xc0, 0xb6, + 0xb7, 0xb4, 0xb6, 0xb7, 0xba, 0xb8, 0xbc, 0xc1, 0xb7, 0xbc, 0xbf, 0xbf, + 0xb4, 0xb1, 0xb7, 0xb9, 0xcb, 0xcc, 0xba, 0xb1, 0xb1, 0xb8, 0xbf, 0xc7, + 0xcb, 0xc2, 0xc6, 0xc6, 0xd0, 0xd2, 0xc0, 0xb1, 0x2e, 0x32, 0x35, 0x37, + 0x37, 0x3a, 0x3c, 0x3b, 0x3a, 0x3b, 0x38, 0x39, 0x39, 0x36, 0x34, 0x35, + 0x37, 0x37, 0x37, 0x34, 0x37, 0x3a, 0x3a, 0x37, 0x38, 0x37, 0x39, 0x44, + 0x45, 0x41, 0x3d, 0x3e, 0x44, 0x43, 0x47, 0x4a, 0x4c, 0x4b, 0x4a, 0x48, + 0x46, 0x47, 0x45, 0x46, 0x4e, 0x4c, 0x4b, 0x45, 0x52, 0x53, 0x52, 0x55, + 0x57, 0x57, 0x54, 0x56, 0x57, 0x54, 0x56, 0x57, 0x57, 0x57, 0x54, 0x55, + 0x58, 0x59, 0x58, 0x57, 0x57, 0x57, 0x57, 0x57, 0x5a, 0x5b, 0x5a, 0x5b, + 0x5c, 0x5d, 0x5e, 0x5d, 0x5b, 0x5b, 0x5d, 0x60, 0x60, 0x61, 0x62, 0x62, + 0x62, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x65, 0x64, 0x64, 0x65, 0x64, + 0x62, 0x60, 0x59, 0x4f, 0x3e, 0x32, 0x29, 0x21, 0x21, 0x24, 0x12, 0xf5, + 0xe6, 0xdf, 0xd2, 0xc2, 0xbb, 0xb7, 0xbe, 0xba, 0xb8, 0xbc, 0xc2, 0xbe, + 0xc2, 0xbf, 0xc5, 0xc2, 0xbc, 0xc6, 0xc0, 0xc0, 0xbe, 0xc4, 0xbf, 0xc6, + 0xc7, 0xc8, 0xc1, 0xc2, 0xbe, 0xc5, 0xc8, 0xc6, 0xbe, 0xb7, 0xc5, 0xd7, + 0xd0, 0xc7, 0xc7, 0xc6, 0xc7, 0xbb, 0xbd, 0xb5, 0xb9, 0xb8, 0xbc, 0xc0, + 0xbc, 0xc4, 0xc1, 0xc4, 0xc2, 0xcc, 0xc2, 0xc1, 0xc7, 0xc7, 0xc7, 0xd5, + 0xcb, 0xc7, 0xc5, 0xcd, 0xcc, 0xcf, 0xc2, 0xb6, 0xbc, 0xc5, 0xd2, 0xe6, + 0xdb, 0xcf, 0xc8, 0xc6, 0xc7, 0xcf, 0xc5, 0xb1, 0xb1, 0xac, 0xb3, 0xb4, + 0xb5, 0xb3, 0xb5, 0xbd, 0xb9, 0xbf, 0xb7, 0xb7, 0xb3, 0xad, 0xb1, 0xb6, + 0xc5, 0xc8, 0xbe, 0xba, 0xb6, 0xb8, 0xb8, 0xc2, 0xca, 0xc8, 0xc1, 0xb4, + 0xb5, 0xb8, 0xb9, 0xb7, 0x38, 0x3a, 0x3b, 0x3b, 0x3b, 0x3c, 0x3b, 0x39, + 0x39, 0x39, 0x38, 0x3a, 0x39, 0x36, 0x39, 0x39, 0x37, 0x35, 0x35, 0x39, + 0x3c, 0x3d, 0x39, 0x36, 0x37, 0x39, 0x41, 0x44, 0x41, 0x3a, 0x3d, 0x43, + 0x42, 0x45, 0x48, 0x4a, 0x4b, 0x48, 0x47, 0x47, 0x48, 0x48, 0x45, 0x47, + 0x4d, 0x4d, 0x49, 0x46, 0x54, 0x53, 0x53, 0x55, 0x54, 0x57, 0x54, 0x56, + 0x57, 0x54, 0x57, 0x56, 0x55, 0x57, 0x54, 0x52, 0x56, 0x59, 0x57, 0x59, + 0x57, 0x57, 0x57, 0x57, 0x59, 0x5b, 0x5b, 0x5b, 0x5c, 0x5d, 0x60, 0x5f, + 0x5d, 0x5c, 0x5e, 0x5f, 0x60, 0x62, 0x62, 0x62, 0x62, 0x64, 0x65, 0x64, + 0x64, 0x63, 0x64, 0x64, 0x64, 0x65, 0x65, 0x65, 0x63, 0x62, 0x5f, 0x56, + 0x47, 0x38, 0x2e, 0x31, 0x3c, 0x37, 0x16, 0xee, 0xdb, 0xd5, 0xd5, 0xc8, + 0xba, 0xb7, 0xbe, 0xbc, 0xbb, 0xc7, 0xbf, 0xbf, 0xbe, 0xbc, 0xb6, 0xbb, + 0xbc, 0xbd, 0xc2, 0xc2, 0xbe, 0xc4, 0xc8, 0xbc, 0xbf, 0xbf, 0xbd, 0xce, + 0xc8, 0xc3, 0xd2, 0xdb, 0xc7, 0xc1, 0xbc, 0xc9, 0xc4, 0xc3, 0xc3, 0xcb, + 0xc7, 0xc2, 0xbf, 0xb9, 0xbd, 0xbd, 0xbd, 0xbe, 0xc4, 0xc9, 0xc4, 0xbb, + 0xc2, 0xc2, 0xc5, 0xca, 0xce, 0xcc, 0xcb, 0xc7, 0xc3, 0xcf, 0xc2, 0xd3, + 0xd6, 0xca, 0xbe, 0xbb, 0xc2, 0xc2, 0xcb, 0xe7, 0xf7, 0xeb, 0xcd, 0xc2, + 0xc2, 0xd1, 0xd2, 0xb2, 0xac, 0xa9, 0xb2, 0xb6, 0xb7, 0xb3, 0xb2, 0xbe, + 0xba, 0xbc, 0xbc, 0xb9, 0xb4, 0xaf, 0xb2, 0xb9, 0xcc, 0xd1, 0xbc, 0xba, + 0xb1, 0xaf, 0xb6, 0xbd, 0xbd, 0xbc, 0xbc, 0xc0, 0xb4, 0xb6, 0xb8, 0xc2, + 0x37, 0x37, 0x39, 0x3b, 0x38, 0x3a, 0x3b, 0x3b, 0x37, 0x38, 0x37, 0x38, + 0x37, 0x38, 0x3c, 0x3a, 0x39, 0x37, 0x38, 0x37, 0x39, 0x37, 0x36, 0x39, + 0x3c, 0x3e, 0x42, 0x40, 0x3c, 0x39, 0x3f, 0x45, 0x46, 0x48, 0x4b, 0x47, + 0x47, 0x45, 0x47, 0x46, 0x4a, 0x48, 0x44, 0x4a, 0x4e, 0x4f, 0x47, 0x47, + 0x52, 0x52, 0x53, 0x51, 0x52, 0x57, 0x54, 0x57, 0x5b, 0x56, 0x56, 0x55, + 0x53, 0x56, 0x52, 0x52, 0x53, 0x56, 0x57, 0x58, 0x57, 0x57, 0x58, 0x58, + 0x59, 0x5a, 0x5a, 0x5a, 0x5d, 0x5e, 0x5e, 0x62, 0x5f, 0x5f, 0x5d, 0x5f, + 0x60, 0x61, 0x63, 0x63, 0x63, 0x64, 0x65, 0x64, 0x64, 0x64, 0x65, 0x64, + 0x64, 0x65, 0x65, 0x65, 0x65, 0x64, 0x62, 0x5d, 0x53, 0x47, 0x42, 0x47, + 0x4d, 0x3d, 0xfc, 0xd9, 0xcd, 0xc2, 0xc7, 0xc9, 0xc7, 0xcd, 0xc6, 0xbc, + 0xbc, 0xc0, 0xbb, 0xb8, 0xb8, 0xb7, 0xb7, 0xb9, 0xbc, 0xb8, 0xb8, 0xc2, + 0xc2, 0xc5, 0xc7, 0xb8, 0xb9, 0xba, 0xc8, 0xd4, 0xd0, 0xbc, 0xd1, 0xd8, + 0xc7, 0xc6, 0xc4, 0xc7, 0xc7, 0xc2, 0xc8, 0xce, 0xd0, 0xc3, 0xc6, 0xc7, + 0xc2, 0xbe, 0xb7, 0xba, 0xc0, 0xc4, 0xc1, 0xb4, 0xb8, 0xc2, 0xc4, 0xc4, + 0xc6, 0xbc, 0xc3, 0xc5, 0xb8, 0xc6, 0xc7, 0xd5, 0xc6, 0xc0, 0xbf, 0xba, + 0xbd, 0xc2, 0xc3, 0xc2, 0xcf, 0xd6, 0xd8, 0xd1, 0xc3, 0xc5, 0xc6, 0xbd, + 0xb7, 0xb7, 0xb9, 0xbc, 0xc6, 0xc2, 0xb7, 0xb5, 0xbd, 0xc2, 0xbb, 0xb7, + 0xb7, 0xb3, 0xbe, 0xc8, 0xd0, 0xd4, 0xbc, 0xbc, 0xb6, 0xb4, 0xb7, 0xb8, + 0xb8, 0xb9, 0xc1, 0xbd, 0xbd, 0xba, 0xbd, 0xc1, 0x34, 0x34, 0x35, 0x37, + 0x3a, 0x3b, 0x3b, 0x38, 0x39, 0x3a, 0x39, 0x3a, 0x3a, 0x3b, 0x3a, 0x37, + 0x38, 0x37, 0x37, 0x35, 0x33, 0x37, 0x3a, 0x3e, 0x3f, 0x41, 0x42, 0x40, + 0x3c, 0x3e, 0x45, 0x47, 0x48, 0x4a, 0x4a, 0x46, 0x48, 0x44, 0x45, 0x48, + 0x48, 0x47, 0x46, 0x4b, 0x4f, 0x4e, 0x47, 0x49, 0x52, 0x53, 0x54, 0x4e, + 0x50, 0x55, 0x54, 0x56, 0x58, 0x56, 0x54, 0x52, 0x53, 0x56, 0x53, 0x52, + 0x54, 0x52, 0x54, 0x57, 0x59, 0x58, 0x57, 0x58, 0x59, 0x5a, 0x5a, 0x59, + 0x5b, 0x5f, 0x5e, 0x61, 0x61, 0x61, 0x60, 0x60, 0x60, 0x62, 0x63, 0x63, + 0x63, 0x64, 0x64, 0x64, 0x64, 0x65, 0x65, 0x64, 0x65, 0x64, 0x65, 0x65, + 0x64, 0x65, 0x63, 0x61, 0x5a, 0x52, 0x55, 0x57, 0x57, 0x3d, 0x03, 0xdc, + 0xca, 0xc5, 0xcc, 0xc9, 0xc5, 0xd1, 0xcc, 0xbe, 0xc0, 0xc5, 0xc5, 0xb7, + 0xb7, 0xbb, 0xbc, 0xba, 0xbd, 0xb8, 0xbe, 0xcd, 0xc2, 0xc0, 0xc2, 0xc2, + 0xbc, 0xc1, 0xd8, 0xdc, 0xcc, 0xc2, 0xc8, 0xd4, 0xd2, 0xcc, 0xcd, 0xc2, + 0xcc, 0xbf, 0xc4, 0xc8, 0xc6, 0xc8, 0xc6, 0xc8, 0xc2, 0xc6, 0xbb, 0xc1, + 0xb7, 0xb8, 0xb9, 0xb6, 0xb9, 0xbf, 0xc0, 0xc0, 0xc4, 0xb7, 0xb4, 0xbe, + 0xc2, 0xc5, 0xbf, 0xc0, 0xbe, 0xc0, 0xc0, 0xc2, 0xc4, 0xc2, 0xb9, 0xba, + 0xc1, 0xbc, 0xcc, 0xdb, 0xc4, 0xc1, 0xba, 0xc1, 0xc5, 0xc4, 0xc1, 0xc4, + 0xc0, 0xc2, 0xbd, 0xb7, 0xb9, 0xc2, 0xb8, 0xb6, 0xb6, 0xb1, 0xb8, 0xba, + 0xcc, 0xd6, 0xc5, 0xbf, 0xbd, 0xbe, 0xc7, 0xba, 0xbc, 0xb9, 0xb7, 0xb7, + 0xb9, 0xb8, 0xbc, 0xbf, 0x32, 0x33, 0x35, 0x37, 0x38, 0x37, 0x38, 0x39, + 0x3a, 0x3b, 0x3b, 0x38, 0x3d, 0x3d, 0x3a, 0x38, 0x37, 0x37, 0x36, 0x36, + 0x39, 0x3d, 0x3f, 0x3f, 0x40, 0x42, 0x42, 0x3f, 0x40, 0x45, 0x47, 0x48, + 0x4b, 0x4b, 0x47, 0x47, 0x47, 0x42, 0x44, 0x48, 0x42, 0x43, 0x47, 0x4b, + 0x51, 0x4d, 0x4b, 0x4d, 0x52, 0x53, 0x53, 0x4d, 0x4d, 0x51, 0x54, 0x57, + 0x58, 0x55, 0x54, 0x52, 0x4f, 0x56, 0x57, 0x55, 0x52, 0x53, 0x54, 0x57, + 0x59, 0x59, 0x57, 0x57, 0x58, 0x5b, 0x5b, 0x5a, 0x5a, 0x5e, 0x60, 0x60, + 0x62, 0x61, 0x60, 0x60, 0x60, 0x62, 0x63, 0x63, 0x63, 0x64, 0x63, 0x62, + 0x63, 0x64, 0x65, 0x66, 0x66, 0x65, 0x65, 0x65, 0x65, 0x65, 0x63, 0x62, + 0x5f, 0x5d, 0x5e, 0x5e, 0x5b, 0x3e, 0x1b, 0xf9, 0xe8, 0xe4, 0xd6, 0xd7, + 0xd5, 0xd9, 0xce, 0xc6, 0xc7, 0xc8, 0xc2, 0xb9, 0xba, 0xc1, 0xbc, 0xb7, + 0xbc, 0xbf, 0xc2, 0xc4, 0xc7, 0xba, 0xbd, 0xbe, 0xba, 0xc9, 0xe1, 0xd3, + 0xbf, 0xc2, 0xce, 0xc7, 0xc2, 0xbf, 0xcc, 0xc8, 0xca, 0xbb, 0xb8, 0xc7, + 0xd1, 0xdc, 0xc4, 0xc3, 0xbd, 0xc8, 0xbd, 0xb8, 0xb1, 0xb6, 0xc0, 0xb7, + 0xbc, 0xbe, 0xbc, 0xbc, 0xc2, 0xbe, 0xb8, 0xbe, 0xc0, 0xbf, 0xbb, 0xbe, + 0xc9, 0xca, 0xc4, 0xc7, 0xc7, 0xc4, 0xbc, 0xbc, 0xba, 0xc2, 0xcc, 0xc8, + 0xbe, 0xc6, 0xc1, 0xb8, 0xc3, 0xc2, 0xb9, 0xbc, 0xc7, 0xc2, 0xbc, 0xb7, + 0xb7, 0xb9, 0xc0, 0xb8, 0xb7, 0xb2, 0xb2, 0xb1, 0xc4, 0xdc, 0xc6, 0xbf, + 0xbb, 0xbf, 0xcb, 0xc2, 0xb9, 0xbc, 0xba, 0xc2, 0xb6, 0xbb, 0xbe, 0xc5, + 0x34, 0x34, 0x37, 0x36, 0x36, 0x37, 0x39, 0x38, 0x3a, 0x3c, 0x3d, 0x3e, + 0x3d, 0x3b, 0x3b, 0x3a, 0x36, 0x37, 0x37, 0x3b, 0x3d, 0x3e, 0x3d, 0x3c, + 0x3e, 0x43, 0x42, 0x41, 0x46, 0x47, 0x47, 0x48, 0x47, 0x46, 0x47, 0x44, + 0x41, 0x3c, 0x42, 0x47, 0x41, 0x43, 0x46, 0x4b, 0x4e, 0x4d, 0x4c, 0x4f, + 0x52, 0x55, 0x52, 0x4d, 0x4d, 0x4f, 0x54, 0x57, 0x57, 0x54, 0x52, 0x50, + 0x4f, 0x57, 0x58, 0x57, 0x53, 0x52, 0x55, 0x57, 0x57, 0x5a, 0x58, 0x57, + 0x59, 0x5c, 0x5a, 0x5b, 0x5b, 0x5e, 0x5f, 0x60, 0x61, 0x60, 0x5f, 0x5e, + 0x60, 0x60, 0x62, 0x64, 0x64, 0x64, 0x63, 0x63, 0x64, 0x64, 0x64, 0x65, + 0x65, 0x65, 0x65, 0x65, 0x64, 0x64, 0x63, 0x62, 0x62, 0x61, 0x62, 0x62, + 0x57, 0x3f, 0x1d, 0xf9, 0xeb, 0xdc, 0xd5, 0xe7, 0xe5, 0xe2, 0xcc, 0xc4, + 0xc5, 0xce, 0xd0, 0xc7, 0xc2, 0xc1, 0xc0, 0xbd, 0xc4, 0xc7, 0xc7, 0xc5, + 0xc7, 0xc3, 0xc0, 0xba, 0xc1, 0xcc, 0xcd, 0xbc, 0xba, 0xc7, 0xc4, 0xc3, + 0xb9, 0xb8, 0xc1, 0xc4, 0xbf, 0xb7, 0xb3, 0xc8, 0xc7, 0xd1, 0xc2, 0xb8, + 0xbd, 0xc4, 0xc6, 0xb7, 0xb1, 0xb2, 0xbc, 0xb1, 0xb9, 0xbc, 0xb7, 0xb9, + 0xbf, 0xc3, 0xbe, 0xc7, 0xc9, 0xc4, 0xbd, 0xb8, 0xbc, 0xd2, 0xc2, 0xb7, + 0xc5, 0xc0, 0xba, 0xb5, 0xb4, 0xbf, 0xc6, 0xc1, 0xbf, 0xc0, 0xbd, 0xb4, + 0xbc, 0xbb, 0xbb, 0xbb, 0xce, 0xca, 0xc0, 0xc2, 0xbb, 0xb3, 0xb5, 0xae, + 0xb0, 0xb3, 0xb1, 0xb2, 0xb7, 0xc1, 0xb8, 0xb7, 0xc7, 0xcf, 0xc1, 0xc6, + 0xc0, 0xc2, 0xbe, 0xc2, 0xbb, 0xbe, 0xc0, 0xbc, 0x34, 0x34, 0x36, 0x35, + 0x35, 0x36, 0x37, 0x37, 0x37, 0x39, 0x3d, 0x41, 0x3e, 0x3d, 0x3d, 0x3c, + 0x37, 0x37, 0x3f, 0x40, 0x3f, 0x40, 0x3d, 0x39, 0x3f, 0x43, 0x42, 0x43, + 0x45, 0x46, 0x45, 0x45, 0x42, 0x42, 0x42, 0x40, 0x3e, 0x3f, 0x46, 0x44, + 0x3d, 0x3d, 0x47, 0x4d, 0x4d, 0x4d, 0x4d, 0x52, 0x52, 0x55, 0x4e, 0x4e, + 0x4d, 0x50, 0x55, 0x57, 0x56, 0x57, 0x52, 0x4f, 0x52, 0x57, 0x5a, 0x57, + 0x56, 0x52, 0x57, 0x57, 0x58, 0x59, 0x59, 0x59, 0x59, 0x5c, 0x5a, 0x5c, + 0x5d, 0x5d, 0x5f, 0x60, 0x60, 0x61, 0x61, 0x5f, 0x60, 0x61, 0x62, 0x63, + 0x63, 0x63, 0x63, 0x64, 0x65, 0x65, 0x65, 0x64, 0x64, 0x65, 0x65, 0x65, + 0x64, 0x63, 0x63, 0x63, 0x62, 0x62, 0x63, 0x63, 0x5a, 0x4c, 0x36, 0x1e, + 0x05, 0xec, 0xee, 0xfe, 0xec, 0xe6, 0xd2, 0xcc, 0xc8, 0xcd, 0xd0, 0xc7, + 0xbd, 0xbe, 0xcc, 0xc1, 0xc4, 0xc9, 0xcd, 0xcd, 0xc3, 0xc8, 0xc7, 0xbd, + 0xc3, 0xc1, 0xb7, 0xae, 0xb3, 0xc0, 0xbe, 0xbc, 0xb6, 0xb5, 0xb4, 0xb0, + 0xb5, 0xb5, 0xb1, 0xb4, 0xb7, 0xc3, 0xc0, 0xbb, 0xc2, 0xb7, 0xbe, 0xb9, + 0xb7, 0xc5, 0xbe, 0xb8, 0xb9, 0xb4, 0xb7, 0xbe, 0xc2, 0xc0, 0xc2, 0xca, + 0xcc, 0xc7, 0xbd, 0xc2, 0xc7, 0xc7, 0xc1, 0xbf, 0xce, 0xbd, 0xb5, 0xbd, + 0xc2, 0xc1, 0xc3, 0xc0, 0xbf, 0xb7, 0xbc, 0xbe, 0xbc, 0xb3, 0xb8, 0xb7, + 0xbb, 0xc0, 0xbc, 0xc8, 0xbb, 0xb0, 0xb1, 0xac, 0xb1, 0xb7, 0xba, 0xb4, + 0xb5, 0xb4, 0xb4, 0xbc, 0xc9, 0xd5, 0xc7, 0xbe, 0xcc, 0xc6, 0xbb, 0xba, + 0xbf, 0xc2, 0xbd, 0xbc, 0x32, 0x31, 0x32, 0x32, 0x34, 0x32, 0x35, 0x37, + 0x39, 0x3b, 0x41, 0x40, 0x3e, 0x3d, 0x3a, 0x37, 0x38, 0x3f, 0x42, 0x41, + 0x40, 0x3e, 0x3a, 0x39, 0x42, 0x43, 0x44, 0x46, 0x47, 0x46, 0x42, 0x3f, + 0x3e, 0x3d, 0x40, 0x3f, 0x42, 0x44, 0x46, 0x41, 0x3b, 0x3d, 0x48, 0x4d, + 0x4d, 0x4e, 0x51, 0x52, 0x52, 0x51, 0x4e, 0x51, 0x4e, 0x50, 0x55, 0x57, + 0x53, 0x56, 0x55, 0x52, 0x54, 0x55, 0x5b, 0x58, 0x57, 0x55, 0x57, 0x57, + 0x58, 0x5a, 0x5a, 0x5b, 0x5a, 0x5c, 0x5a, 0x5b, 0x5d, 0x5d, 0x5e, 0x60, + 0x5f, 0x61, 0x62, 0x60, 0x5f, 0x61, 0x60, 0x62, 0x62, 0x62, 0x62, 0x64, + 0x65, 0x65, 0x64, 0x65, 0x64, 0x64, 0x64, 0x64, 0x63, 0x63, 0x63, 0x63, + 0x63, 0x64, 0x64, 0x63, 0x5f, 0x58, 0x50, 0x47, 0x3d, 0x29, 0x16, 0x12, + 0xf3, 0xe2, 0xdb, 0xd3, 0xce, 0xcb, 0xc7, 0xc1, 0xb8, 0xbb, 0xbc, 0xbe, + 0xc1, 0xca, 0xcc, 0xd3, 0xc8, 0xd2, 0xce, 0xc8, 0xc9, 0xce, 0xba, 0xb7, + 0xc2, 0xc0, 0xbb, 0xbc, 0xbb, 0xb9, 0xb7, 0xb8, 0xb1, 0xaa, 0xac, 0xae, + 0xb1, 0xbc, 0xb2, 0xb7, 0xbf, 0xbc, 0xc1, 0xbe, 0xc1, 0xbe, 0xc1, 0xc6, + 0xbe, 0xb8, 0xb8, 0xbe, 0xbc, 0xc0, 0xc3, 0xca, 0xc7, 0xc2, 0xbd, 0xbf, + 0xc4, 0xc4, 0xc2, 0xc3, 0xcb, 0xbb, 0xb7, 0xb5, 0xbb, 0xbe, 0xbc, 0xbd, + 0xbf, 0xb7, 0xb7, 0xc0, 0xba, 0xb6, 0xc1, 0xb7, 0xc0, 0xba, 0xb1, 0xbc, + 0xb5, 0xae, 0xad, 0xaf, 0xb3, 0xba, 0xba, 0xb8, 0xb7, 0xb5, 0xbc, 0xc2, + 0xc7, 0xc7, 0xc3, 0xb6, 0xbe, 0xc8, 0xcc, 0xc8, 0xbe, 0xbc, 0xb7, 0xb7, + 0x2d, 0x2e, 0x2f, 0x32, 0x36, 0x36, 0x39, 0x3b, 0x3c, 0x3d, 0x3f, 0x3d, + 0x3d, 0x3b, 0x39, 0x3b, 0x3f, 0x45, 0x42, 0x41, 0x40, 0x3d, 0x3d, 0x41, + 0x44, 0x43, 0x45, 0x45, 0x45, 0x43, 0x40, 0x3d, 0x3f, 0x3e, 0x41, 0x43, + 0x42, 0x45, 0x42, 0x3f, 0x3b, 0x40, 0x48, 0x4b, 0x4d, 0x52, 0x52, 0x52, + 0x51, 0x4e, 0x4f, 0x51, 0x4d, 0x51, 0x55, 0x57, 0x54, 0x54, 0x54, 0x52, + 0x55, 0x55, 0x59, 0x58, 0x57, 0x57, 0x57, 0x59, 0x59, 0x5b, 0x5b, 0x5b, + 0x5a, 0x5c, 0x59, 0x5b, 0x5e, 0x5d, 0x5f, 0x60, 0x60, 0x62, 0x62, 0x60, + 0x60, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x63, 0x64, 0x64, 0x63, 0x64, + 0x64, 0x64, 0x64, 0x64, 0x63, 0x63, 0x63, 0x63, 0x63, 0x64, 0x65, 0x64, + 0x62, 0x5d, 0x58, 0x56, 0x53, 0x4e, 0x48, 0x2b, 0xff, 0xe5, 0xd0, 0xcb, + 0xc7, 0xc2, 0xbc, 0xc0, 0xc0, 0xc1, 0xb4, 0xbc, 0xbe, 0xc2, 0xc5, 0xc9, + 0xcc, 0xd1, 0xc7, 0xcb, 0xd0, 0xdc, 0xc2, 0xbb, 0xc7, 0xc2, 0xb9, 0xb7, + 0xb9, 0xbd, 0xb9, 0xb7, 0xba, 0xb1, 0xb2, 0xb5, 0xb1, 0xba, 0xbc, 0xb1, + 0xc2, 0xd7, 0xc9, 0xc4, 0xbd, 0xb7, 0xbe, 0xc0, 0xbd, 0xbb, 0xc3, 0xbe, + 0xb7, 0xb9, 0xc1, 0xc0, 0xbc, 0xb4, 0xb8, 0xb9, 0xbd, 0xbc, 0xc4, 0xbc, + 0xc3, 0xbd, 0xc1, 0xbc, 0xb7, 0xb8, 0xb4, 0xb6, 0xbb, 0xb6, 0xbc, 0xc0, + 0xb7, 0xbc, 0xc2, 0xbb, 0xcb, 0xc0, 0xba, 0xba, 0xb3, 0xb2, 0xad, 0xb2, + 0xb8, 0xbf, 0xb3, 0xb4, 0xb9, 0xbb, 0xc5, 0xc2, 0xc3, 0xbe, 0xb8, 0xb7, + 0xbf, 0xcb, 0xc2, 0xb9, 0xb8, 0xba, 0xc2, 0xbd, 0x2e, 0x2e, 0x32, 0x37, + 0x3b, 0x3b, 0x3d, 0x3d, 0x3d, 0x3d, 0x3d, 0x3b, 0x3b, 0x39, 0x3c, 0x42, + 0x44, 0x44, 0x43, 0x41, 0x3e, 0x3e, 0x3f, 0x42, 0x43, 0x42, 0x43, 0x44, + 0x42, 0x42, 0x3f, 0x3d, 0x3d, 0x3f, 0x42, 0x42, 0x41, 0x43, 0x3e, 0x3f, + 0x3e, 0x43, 0x47, 0x4c, 0x50, 0x51, 0x51, 0x51, 0x50, 0x4f, 0x51, 0x51, + 0x4f, 0x52, 0x54, 0x57, 0x56, 0x52, 0x52, 0x52, 0x53, 0x57, 0x57, 0x57, + 0x5a, 0x58, 0x58, 0x5b, 0x5a, 0x5a, 0x5c, 0x59, 0x5a, 0x59, 0x57, 0x59, + 0x5d, 0x5d, 0x5f, 0x61, 0x62, 0x61, 0x60, 0x5e, 0x60, 0x61, 0x62, 0x62, + 0x62, 0x62, 0x62, 0x62, 0x63, 0x63, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, + 0x63, 0x64, 0x63, 0x63, 0x64, 0x65, 0x65, 0x64, 0x63, 0x62, 0x62, 0x61, + 0x5e, 0x5c, 0x55, 0x43, 0x1d, 0xf2, 0xcc, 0xc6, 0xc2, 0xc5, 0xc4, 0xc4, + 0xbf, 0xbd, 0xb5, 0xb7, 0xbd, 0xbf, 0xc2, 0xc2, 0xc4, 0xc4, 0xbe, 0xcc, + 0xd5, 0xde, 0xcc, 0xbc, 0xbc, 0xc1, 0xba, 0xb9, 0xba, 0xc9, 0xb6, 0xac, + 0xb8, 0xb7, 0xb8, 0xba, 0xb7, 0xb7, 0xbb, 0xb7, 0xc7, 0xd7, 0xcc, 0xb7, + 0xb3, 0xc2, 0xce, 0xbf, 0xb9, 0xc1, 0xcc, 0xc4, 0xc7, 0xc2, 0xc0, 0xbc, + 0xb7, 0xb9, 0xcd, 0xbc, 0xb8, 0xbb, 0xd4, 0xc8, 0xc1, 0xc4, 0xcd, 0xc7, + 0xbb, 0xb3, 0xb4, 0xb1, 0xb4, 0xb2, 0xbc, 0xc1, 0xc3, 0xc6, 0xbe, 0xb6, + 0xc2, 0xc5, 0xbd, 0xb7, 0xba, 0xb7, 0xb2, 0xb1, 0xbb, 0xc4, 0xc5, 0xd0, + 0xc7, 0xc2, 0xbe, 0xc2, 0xc2, 0xc0, 0xc1, 0xc5, 0xc0, 0xbd, 0xc6, 0xc0, + 0xb7, 0xb7, 0xc6, 0xc6, 0x32, 0x33, 0x37, 0x36, 0x37, 0x37, 0x37, 0x35, + 0x35, 0x37, 0x37, 0x39, 0x3b, 0x3e, 0x41, 0x44, 0x46, 0x45, 0x41, 0x3e, + 0x3d, 0x3f, 0x3e, 0x3e, 0x42, 0x41, 0x41, 0x42, 0x42, 0x41, 0x3d, 0x3d, + 0x40, 0x3e, 0x40, 0x40, 0x42, 0x42, 0x41, 0x40, 0x40, 0x44, 0x46, 0x4d, + 0x52, 0x4f, 0x51, 0x4e, 0x50, 0x50, 0x52, 0x51, 0x52, 0x54, 0x54, 0x57, + 0x57, 0x54, 0x53, 0x52, 0x52, 0x57, 0x58, 0x58, 0x5b, 0x58, 0x59, 0x5b, + 0x5c, 0x59, 0x5b, 0x58, 0x58, 0x56, 0x57, 0x59, 0x5d, 0x5d, 0x5e, 0x60, + 0x60, 0x5f, 0x61, 0x60, 0x60, 0x61, 0x60, 0x62, 0x63, 0x62, 0x63, 0x63, + 0x63, 0x63, 0x63, 0x63, 0x64, 0x65, 0x63, 0x64, 0x63, 0x63, 0x63, 0x64, + 0x66, 0x65, 0x65, 0x66, 0x65, 0x63, 0x62, 0x62, 0x62, 0x5f, 0x5b, 0x51, + 0x3c, 0x14, 0xee, 0xd8, 0xc7, 0xca, 0xd0, 0xc9, 0xc0, 0xbf, 0xc1, 0xbf, + 0xc2, 0xc5, 0xc2, 0xbd, 0xc1, 0xc0, 0xba, 0xc4, 0xd7, 0xe5, 0xd2, 0xc7, + 0xbc, 0xc0, 0xba, 0xb8, 0xb6, 0xc4, 0xba, 0xbc, 0xb9, 0xbb, 0xc2, 0xbe, + 0xb7, 0xb6, 0xae, 0xad, 0xbe, 0xc4, 0xc2, 0xc1, 0xd3, 0xcf, 0xc5, 0xc0, + 0xb7, 0xc4, 0xcd, 0xc8, 0xc6, 0xbb, 0xbc, 0xbb, 0xbf, 0xbc, 0xc4, 0xbd, + 0xc0, 0xc5, 0xbd, 0xc2, 0xcd, 0xcd, 0xbf, 0xb8, 0xb9, 0xb1, 0xb7, 0xb7, + 0xb2, 0xb4, 0xbe, 0xbc, 0xc4, 0xc7, 0xb7, 0xb1, 0xb3, 0xc0, 0xbd, 0xb8, + 0xb7, 0xc0, 0xb8, 0xb3, 0xbe, 0xc1, 0xc9, 0xc2, 0xc7, 0xca, 0xbd, 0xcc, + 0xc3, 0xc2, 0xca, 0xc9, 0xc3, 0xbe, 0xc6, 0xc2, 0xb9, 0xc7, 0xc6, 0xbc, + 0x30, 0x31, 0x32, 0x32, 0x2f, 0x2f, 0x30, 0x30, 0x32, 0x37, 0x36, 0x37, + 0x3d, 0x41, 0x42, 0x41, 0x43, 0x42, 0x3b, 0x3a, 0x3e, 0x3d, 0x3d, 0x40, + 0x42, 0x41, 0x41, 0x40, 0x40, 0x40, 0x3d, 0x41, 0x41, 0x40, 0x3f, 0x42, + 0x42, 0x42, 0x3f, 0x3f, 0x44, 0x44, 0x47, 0x4f, 0x4e, 0x51, 0x50, 0x4e, + 0x4f, 0x50, 0x52, 0x53, 0x54, 0x54, 0x53, 0x56, 0x57, 0x57, 0x55, 0x52, + 0x54, 0x57, 0x57, 0x5a, 0x5b, 0x58, 0x5b, 0x5c, 0x5d, 0x59, 0x5a, 0x57, + 0x56, 0x56, 0x58, 0x5b, 0x5d, 0x5d, 0x5e, 0x60, 0x60, 0x60, 0x60, 0x60, + 0x60, 0x61, 0x61, 0x61, 0x62, 0x62, 0x62, 0x62, 0x62, 0x63, 0x63, 0x63, + 0x65, 0x64, 0x62, 0x62, 0x62, 0x63, 0x64, 0x65, 0x65, 0x64, 0x65, 0x65, + 0x65, 0x63, 0x62, 0x62, 0x62, 0x61, 0x5d, 0x57, 0x4f, 0x39, 0x0c, 0xe1, + 0xd0, 0xca, 0xc3, 0xc2, 0xc0, 0xc3, 0xda, 0xc2, 0xc3, 0xc1, 0xc7, 0xc3, + 0xba, 0xb9, 0xba, 0xb8, 0xd0, 0xf1, 0xd4, 0xc7, 0xb6, 0xb7, 0xb6, 0xb5, + 0xb9, 0xc2, 0xba, 0xc2, 0xc2, 0xbe, 0xc6, 0xc3, 0xb8, 0xae, 0xa8, 0xaa, + 0xb7, 0xba, 0xc3, 0xcc, 0xd3, 0xc7, 0xbf, 0xbf, 0xc7, 0xcb, 0xca, 0xc6, + 0xc2, 0xb2, 0xba, 0xbc, 0xb9, 0xb1, 0xb1, 0xc0, 0xc9, 0xc4, 0xb8, 0xba, + 0xbc, 0xbf, 0xc1, 0xbd, 0xbc, 0xb3, 0xc2, 0xc6, 0xbe, 0xba, 0xbf, 0xbe, + 0xc0, 0xc3, 0xb9, 0xb5, 0xb0, 0xb7, 0xb8, 0xb7, 0xba, 0xc1, 0xb8, 0xbb, + 0xc1, 0xba, 0xbf, 0xbe, 0xc7, 0xc4, 0xbf, 0xca, 0xb7, 0xc2, 0xd0, 0xc4, + 0xc1, 0xbc, 0xc8, 0xce, 0xc2, 0xc7, 0xcb, 0xc0, 0x2c, 0x2d, 0x2c, 0x2e, + 0x2a, 0x2d, 0x31, 0x34, 0x36, 0x37, 0x37, 0x3d, 0x3c, 0x3e, 0x3c, 0x3b, + 0x3b, 0x3b, 0x3b, 0x3d, 0x3d, 0x3f, 0x40, 0x41, 0x40, 0x40, 0x42, 0x42, + 0x3e, 0x3e, 0x40, 0x43, 0x40, 0x3d, 0x42, 0x45, 0x46, 0x45, 0x43, 0x42, + 0x47, 0x49, 0x4d, 0x4f, 0x4d, 0x4d, 0x4c, 0x52, 0x50, 0x51, 0x53, 0x55, + 0x54, 0x52, 0x53, 0x56, 0x57, 0x57, 0x57, 0x56, 0x56, 0x57, 0x57, 0x5b, + 0x5a, 0x5b, 0x5c, 0x5a, 0x5c, 0x5b, 0x5b, 0x57, 0x57, 0x55, 0x57, 0x5b, + 0x5c, 0x5d, 0x5e, 0x5e, 0x61, 0x61, 0x60, 0x60, 0x60, 0x60, 0x62, 0x62, + 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x63, 0x64, 0x63, 0x62, 0x62, 0x62, + 0x62, 0x64, 0x64, 0x64, 0x65, 0x65, 0x64, 0x64, 0x64, 0x63, 0x63, 0x62, + 0x61, 0x5f, 0x5d, 0x59, 0x53, 0x4a, 0x33, 0x00, 0xdd, 0xc3, 0xb2, 0xba, + 0xc3, 0xc1, 0xcc, 0xc4, 0xc1, 0xbc, 0xc0, 0xc3, 0xc6, 0xc5, 0xc2, 0xb9, + 0xc1, 0xd1, 0xcf, 0xca, 0xbb, 0xc0, 0xc3, 0xbc, 0xce, 0xc4, 0xbe, 0xbc, + 0xc1, 0xc6, 0xba, 0xbb, 0xbc, 0xb5, 0xb2, 0xaf, 0xb8, 0xb7, 0xc9, 0xdc, + 0xce, 0xc6, 0xc2, 0xbc, 0xc0, 0xc2, 0xc2, 0xc8, 0xc7, 0xc2, 0xc2, 0xba, + 0xb6, 0xb8, 0xc7, 0xc7, 0xc6, 0xc3, 0xb9, 0xb9, 0xb5, 0xba, 0xc0, 0xc0, + 0xc7, 0xbd, 0xbc, 0xc2, 0xbc, 0xbc, 0xc6, 0xc8, 0xca, 0xc5, 0xbc, 0xb7, + 0xb4, 0xb7, 0xb6, 0xb9, 0xb8, 0xb7, 0xba, 0xbf, 0xc0, 0xbb, 0xb7, 0xbf, + 0xd2, 0xc2, 0xc3, 0xcc, 0xba, 0xc2, 0xbe, 0xc2, 0xbf, 0xbb, 0xc0, 0xc2, + 0xbe, 0xbd, 0xcb, 0xc8, 0x27, 0x27, 0x2a, 0x2d, 0x2f, 0x32, 0x31, 0x34, + 0x34, 0x33, 0x34, 0x3d, 0x3a, 0x39, 0x37, 0x38, 0x39, 0x3b, 0x40, 0x3d, + 0x3d, 0x41, 0x42, 0x40, 0x40, 0x42, 0x44, 0x42, 0x3f, 0x42, 0x42, 0x40, + 0x3e, 0x41, 0x47, 0x47, 0x47, 0x47, 0x47, 0x44, 0x47, 0x4a, 0x4b, 0x4c, + 0x4a, 0x4a, 0x4c, 0x51, 0x4f, 0x51, 0x52, 0x53, 0x54, 0x54, 0x55, 0x56, + 0x57, 0x57, 0x57, 0x57, 0x57, 0x57, 0x56, 0x59, 0x59, 0x5d, 0x5d, 0x5a, + 0x5c, 0x5c, 0x5b, 0x57, 0x57, 0x56, 0x57, 0x5a, 0x5b, 0x5b, 0x5d, 0x5d, + 0x60, 0x62, 0x61, 0x61, 0x60, 0x5f, 0x62, 0x64, 0x63, 0x62, 0x62, 0x62, + 0x62, 0x63, 0x63, 0x63, 0x62, 0x62, 0x62, 0x62, 0x62, 0x63, 0x63, 0x64, + 0x65, 0x64, 0x63, 0x63, 0x64, 0x63, 0x62, 0x62, 0x62, 0x61, 0x5e, 0x5c, + 0x58, 0x51, 0x3e, 0x23, 0xed, 0xc7, 0xb7, 0xbc, 0xc8, 0xc1, 0xcb, 0xd1, + 0xc2, 0xc2, 0xbc, 0xba, 0xbd, 0xbc, 0xbb, 0xbc, 0xb7, 0xcb, 0xd1, 0xd0, + 0xbd, 0xc3, 0xc8, 0xc4, 0xc5, 0xbd, 0xbe, 0xb7, 0xb4, 0xc4, 0xbe, 0xc4, + 0xd3, 0xc2, 0xbc, 0xb3, 0xb8, 0xb4, 0xb7, 0xc6, 0xc8, 0xc4, 0xc1, 0xbb, + 0xb8, 0xb6, 0xb7, 0xca, 0xc7, 0xb8, 0xb4, 0xb4, 0xba, 0xba, 0xbc, 0xbd, + 0xb9, 0xc8, 0xba, 0xba, 0xb9, 0xba, 0xbc, 0xc0, 0xc7, 0xc3, 0xc6, 0xc4, + 0xb5, 0xba, 0xcb, 0xc7, 0xc4, 0xc6, 0xbc, 0xbd, 0xb4, 0xb5, 0xb4, 0xb9, + 0xba, 0xba, 0xb9, 0xc3, 0xb9, 0xb6, 0xb7, 0xc0, 0xbf, 0xb9, 0xc3, 0xca, + 0xc4, 0xc9, 0xbc, 0xbd, 0xbc, 0xba, 0xbc, 0xc2, 0xba, 0xbc, 0xc2, 0xc2, + 0x25, 0x27, 0x2c, 0x2f, 0x32, 0x34, 0x35, 0x35, 0x35, 0x34, 0x39, 0x39, + 0x35, 0x34, 0x37, 0x37, 0x3d, 0x41, 0x40, 0x3d, 0x41, 0x42, 0x42, 0x42, + 0x41, 0x41, 0x42, 0x42, 0x42, 0x43, 0x3f, 0x3e, 0x40, 0x44, 0x47, 0x46, + 0x47, 0x4b, 0x49, 0x44, 0x4a, 0x4b, 0x4a, 0x4b, 0x49, 0x4a, 0x4e, 0x51, + 0x50, 0x51, 0x51, 0x51, 0x52, 0x56, 0x56, 0x56, 0x59, 0x58, 0x58, 0x57, + 0x57, 0x56, 0x57, 0x59, 0x58, 0x5c, 0x5d, 0x5b, 0x5c, 0x5b, 0x57, 0x57, + 0x56, 0x56, 0x59, 0x5c, 0x5a, 0x5b, 0x5d, 0x5d, 0x5f, 0x62, 0x5f, 0x60, + 0x60, 0x5f, 0x62, 0x64, 0x64, 0x63, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, + 0x62, 0x62, 0x62, 0x62, 0x62, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, + 0x64, 0x63, 0x63, 0x63, 0x63, 0x62, 0x62, 0x60, 0x5c, 0x5b, 0x54, 0x47, + 0x22, 0xe4, 0xc7, 0xc7, 0xc9, 0xc1, 0xc8, 0xd2, 0xc2, 0xc0, 0xb7, 0xb8, + 0xb9, 0xb7, 0xb2, 0xb8, 0xbc, 0xcc, 0xcd, 0xc9, 0xbf, 0xb7, 0xb8, 0xc0, + 0xbd, 0xba, 0xbc, 0xbe, 0xbd, 0xc5, 0xc6, 0xc9, 0xc2, 0xc8, 0xba, 0xb9, + 0xb8, 0xb3, 0xb3, 0xbe, 0xc2, 0xc2, 0xbc, 0xb9, 0xb2, 0xb4, 0xb4, 0xc9, + 0xc0, 0xb1, 0xae, 0xb4, 0xb9, 0xbe, 0xbf, 0xc4, 0xbb, 0xc2, 0xb8, 0xaf, + 0xb1, 0xb7, 0xbc, 0xbe, 0xbf, 0xbc, 0xc2, 0xc9, 0xbc, 0xba, 0xc3, 0xc4, + 0xc2, 0xbf, 0xbe, 0xc7, 0xb7, 0xb5, 0xc0, 0xbb, 0xb4, 0xaf, 0xba, 0xc8, + 0xb7, 0xb1, 0xbd, 0xc6, 0xbb, 0xc5, 0xcb, 0xc9, 0xc2, 0xbc, 0xba, 0xc4, + 0xc7, 0xbc, 0xb7, 0xbe, 0xbc, 0xbf, 0xc2, 0xca, 0x27, 0x2c, 0x31, 0x33, + 0x33, 0x35, 0x37, 0x37, 0x37, 0x37, 0x37, 0x35, 0x32, 0x34, 0x36, 0x38, + 0x3d, 0x40, 0x3f, 0x42, 0x42, 0x43, 0x42, 0x40, 0x42, 0x42, 0x44, 0x43, + 0x45, 0x43, 0x40, 0x41, 0x45, 0x47, 0x47, 0x46, 0x4b, 0x4b, 0x43, 0x44, + 0x4a, 0x4c, 0x4d, 0x4c, 0x4a, 0x4c, 0x52, 0x50, 0x4f, 0x51, 0x53, 0x52, + 0x52, 0x57, 0x57, 0x57, 0x59, 0x58, 0x59, 0x57, 0x56, 0x56, 0x59, 0x59, + 0x57, 0x5c, 0x5c, 0x5a, 0x5a, 0x59, 0x57, 0x57, 0x57, 0x57, 0x59, 0x5a, + 0x59, 0x5a, 0x5b, 0x5c, 0x5e, 0x61, 0x60, 0x61, 0x61, 0x62, 0x63, 0x63, + 0x64, 0x64, 0x62, 0x62, 0x63, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x63, + 0x63, 0x64, 0x64, 0x64, 0x63, 0x63, 0x63, 0x64, 0x64, 0x64, 0x63, 0x63, + 0x63, 0x63, 0x63, 0x62, 0x61, 0x60, 0x5d, 0x57, 0x47, 0x1a, 0xdd, 0xcc, + 0xc7, 0xbf, 0xc4, 0xcc, 0xc4, 0xc2, 0xb8, 0xb4, 0xbc, 0xc5, 0xb7, 0xbc, + 0xcf, 0xd7, 0xd2, 0xc4, 0xb7, 0xb5, 0xc1, 0xc4, 0xbd, 0xba, 0xba, 0xc0, + 0xc1, 0xbc, 0xbb, 0xbd, 0xc6, 0xc7, 0xc8, 0xbf, 0xbc, 0xb4, 0xb2, 0xb9, + 0xc2, 0xc1, 0xb9, 0xb3, 0xb0, 0xb3, 0xbc, 0xc8, 0xc4, 0xba, 0xb6, 0xb4, + 0xb7, 0xbe, 0xbf, 0xc1, 0xbc, 0xb7, 0xbc, 0xbd, 0xb7, 0xb5, 0xb9, 0xb9, + 0xb7, 0xbc, 0xba, 0xb7, 0xbc, 0xb7, 0xbf, 0xbd, 0xc7, 0xc3, 0xc4, 0xcb, + 0xc4, 0xb7, 0xc1, 0xbf, 0xc4, 0xc5, 0xc3, 0xbf, 0xb7, 0xb1, 0xb6, 0xc2, + 0xb8, 0xc2, 0xd0, 0xc3, 0xb2, 0xb3, 0xb7, 0xc6, 0xca, 0xbc, 0xb4, 0xbb, + 0xbd, 0xba, 0xbb, 0xc2, 0x2e, 0x30, 0x34, 0x36, 0x35, 0x37, 0x37, 0x37, + 0x37, 0x37, 0x37, 0x37, 0x34, 0x35, 0x37, 0x3b, 0x3e, 0x3e, 0x40, 0x43, + 0x42, 0x43, 0x42, 0x3e, 0x3f, 0x42, 0x45, 0x43, 0x43, 0x3f, 0x41, 0x42, + 0x47, 0x48, 0x48, 0x49, 0x4e, 0x4a, 0x44, 0x46, 0x47, 0x4b, 0x4c, 0x4a, + 0x4b, 0x4e, 0x53, 0x50, 0x51, 0x52, 0x54, 0x52, 0x54, 0x57, 0x57, 0x57, + 0x56, 0x57, 0x57, 0x57, 0x56, 0x57, 0x59, 0x57, 0x58, 0x5e, 0x5d, 0x5a, + 0x5a, 0x58, 0x56, 0x56, 0x57, 0x58, 0x58, 0x58, 0x58, 0x59, 0x59, 0x5b, + 0x5c, 0x5e, 0x60, 0x60, 0x61, 0x63, 0x64, 0x62, 0x62, 0x65, 0x62, 0x62, + 0x63, 0x63, 0x63, 0x63, 0x62, 0x62, 0x62, 0x62, 0x64, 0x64, 0x64, 0x63, + 0x63, 0x63, 0x64, 0x65, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, + 0x62, 0x62, 0x61, 0x60, 0x5b, 0x47, 0x15, 0xe7, 0xcb, 0xc3, 0xbb, 0xc4, + 0xc2, 0xc1, 0xb7, 0xb6, 0xc2, 0xbe, 0xb7, 0xb8, 0xc9, 0xd4, 0xd2, 0xc7, + 0xb8, 0xbc, 0xd1, 0xc9, 0xc3, 0xbb, 0xb9, 0xb7, 0xbd, 0xbd, 0xba, 0xb5, + 0xbf, 0xc5, 0xcf, 0xc7, 0xc0, 0xbc, 0xb1, 0xb6, 0xbc, 0xbe, 0xb7, 0xb4, + 0xb1, 0xb6, 0xc2, 0xc1, 0xb4, 0xbc, 0xbb, 0xb3, 0xb4, 0xb6, 0xc0, 0xbd, + 0xbb, 0xbd, 0xce, 0xbc, 0xbc, 0xb8, 0xb8, 0xb6, 0xb2, 0xb9, 0xbc, 0xc4, + 0xc7, 0xc4, 0xc3, 0xbd, 0xc2, 0xc8, 0xcf, 0xc1, 0xb9, 0xb7, 0xbe, 0xbf, + 0xd2, 0xce, 0xc1, 0xba, 0xb9, 0xb9, 0xb6, 0xb4, 0xb1, 0xc0, 0xc9, 0xba, + 0xb4, 0xb2, 0xba, 0xbf, 0xc1, 0xc1, 0xba, 0xbc, 0xc8, 0xc2, 0xbe, 0xbd, + 0x33, 0x35, 0x36, 0x35, 0x36, 0x38, 0x39, 0x39, 0x38, 0x39, 0x39, 0x37, + 0x35, 0x39, 0x38, 0x3c, 0x40, 0x41, 0x42, 0x43, 0x43, 0x42, 0x40, 0x3f, + 0x3d, 0x42, 0x45, 0x42, 0x42, 0x40, 0x40, 0x44, 0x47, 0x47, 0x47, 0x4c, + 0x4d, 0x47, 0x47, 0x49, 0x47, 0x4a, 0x49, 0x49, 0x4d, 0x52, 0x53, 0x50, + 0x52, 0x52, 0x52, 0x52, 0x55, 0x56, 0x57, 0x57, 0x56, 0x55, 0x57, 0x57, + 0x57, 0x57, 0x59, 0x57, 0x59, 0x5d, 0x5c, 0x5a, 0x59, 0x57, 0x56, 0x56, + 0x57, 0x57, 0x57, 0x57, 0x59, 0x58, 0x59, 0x5a, 0x5a, 0x5d, 0x5e, 0x60, + 0x61, 0x62, 0x63, 0x61, 0x62, 0x63, 0x62, 0x60, 0x61, 0x62, 0x62, 0x62, + 0x62, 0x62, 0x62, 0x62, 0x63, 0x64, 0x64, 0x63, 0x63, 0x64, 0x64, 0x64, + 0x64, 0x65, 0x66, 0x66, 0x65, 0x65, 0x65, 0x64, 0x63, 0x62, 0x61, 0x5f, + 0x5f, 0x5c, 0x4a, 0x22, 0xe7, 0xca, 0xc3, 0xc5, 0xc0, 0xb8, 0xb4, 0xb7, + 0xc0, 0xba, 0xb5, 0xb6, 0xbc, 0xd6, 0xcf, 0xc4, 0xbd, 0xba, 0xc7, 0xd2, + 0xcb, 0xcb, 0xbc, 0xb6, 0xbb, 0xc4, 0xb7, 0xb0, 0xba, 0xc1, 0xc4, 0xc8, + 0xc7, 0xba, 0xb7, 0xc3, 0xc2, 0xbf, 0xb2, 0xb7, 0xb7, 0xbb, 0xc0, 0xb6, + 0xac, 0xb3, 0xba, 0xb3, 0xaa, 0xb1, 0xc5, 0xc6, 0xc2, 0xb3, 0xbc, 0xba, + 0xb9, 0xba, 0xbb, 0xbc, 0xb6, 0xb3, 0xc0, 0xc5, 0xc7, 0xc7, 0xd4, 0xcd, + 0xbc, 0xc2, 0xd8, 0xd0, 0xbb, 0xb7, 0xbc, 0xb6, 0xcc, 0xc9, 0xbd, 0xb9, + 0xbd, 0xbe, 0xaf, 0xb1, 0xb7, 0xb7, 0xca, 0xc7, 0xc7, 0xc1, 0xc0, 0xbc, + 0xba, 0xbc, 0xba, 0xb7, 0xc8, 0xca, 0xc2, 0xbc, 0x35, 0x36, 0x37, 0x37, + 0x38, 0x39, 0x39, 0x3b, 0x3c, 0x3a, 0x38, 0x37, 0x37, 0x3e, 0x3c, 0x40, + 0x43, 0x42, 0x42, 0x42, 0x40, 0x40, 0x40, 0x40, 0x40, 0x44, 0x44, 0x40, + 0x43, 0x41, 0x42, 0x46, 0x47, 0x46, 0x48, 0x4d, 0x49, 0x48, 0x4b, 0x48, + 0x46, 0x4b, 0x4b, 0x4b, 0x4e, 0x54, 0x52, 0x50, 0x52, 0x53, 0x52, 0x52, + 0x55, 0x57, 0x57, 0x54, 0x54, 0x55, 0x57, 0x57, 0x56, 0x58, 0x58, 0x57, + 0x5b, 0x5e, 0x5a, 0x59, 0x58, 0x57, 0x56, 0x56, 0x56, 0x57, 0x57, 0x57, + 0x57, 0x58, 0x58, 0x59, 0x5a, 0x5d, 0x5c, 0x5e, 0x60, 0x62, 0x62, 0x5f, + 0x61, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x63, + 0x63, 0x64, 0x64, 0x63, 0x63, 0x64, 0x64, 0x65, 0x65, 0x66, 0x66, 0x66, + 0x65, 0x66, 0x65, 0x65, 0x65, 0x64, 0x63, 0x62, 0x62, 0x60, 0x5b, 0x43, + 0x10, 0xd7, 0xc7, 0xc6, 0xc3, 0xbe, 0xba, 0xc5, 0xbf, 0xb9, 0xb5, 0xb5, + 0xb8, 0xe4, 0xd2, 0xc4, 0xc1, 0xbe, 0xc5, 0xcc, 0xd7, 0xda, 0xc6, 0xc1, + 0xbe, 0xc4, 0xb6, 0xb0, 0xb1, 0xb7, 0xbd, 0xc2, 0xc2, 0xbf, 0xc1, 0xb8, + 0xba, 0xbb, 0xb8, 0xba, 0xbf, 0xc2, 0xbf, 0xb1, 0xb2, 0xb3, 0xb2, 0xb3, + 0xb2, 0xb2, 0xc1, 0xc2, 0xc6, 0xb7, 0xc4, 0xc0, 0xb7, 0xaa, 0xb0, 0xb2, + 0xb2, 0xb1, 0xc5, 0xcc, 0xe3, 0xce, 0xca, 0xbe, 0xb4, 0xbe, 0xc8, 0xc6, + 0xbb, 0xbd, 0xc2, 0xb7, 0xc2, 0xc3, 0xc1, 0xba, 0xbc, 0xc0, 0xb8, 0xb4, + 0xb8, 0xb8, 0xcb, 0xc9, 0xc6, 0xc3, 0xc3, 0xb9, 0xb6, 0xba, 0xb6, 0xb5, + 0xb7, 0xc2, 0xc2, 0xbe, 0x37, 0x36, 0x37, 0x37, 0x38, 0x39, 0x38, 0x3b, + 0x3c, 0x3b, 0x37, 0x38, 0x3a, 0x3f, 0x3e, 0x41, 0x41, 0x42, 0x40, 0x40, + 0x40, 0x41, 0x3e, 0x41, 0x45, 0x43, 0x40, 0x42, 0x44, 0x40, 0x43, 0x45, + 0x46, 0x47, 0x4c, 0x49, 0x46, 0x48, 0x49, 0x47, 0x47, 0x4d, 0x4b, 0x4d, + 0x50, 0x54, 0x4f, 0x4f, 0x52, 0x52, 0x52, 0x51, 0x56, 0x57, 0x57, 0x51, + 0x53, 0x56, 0x57, 0x57, 0x56, 0x57, 0x57, 0x58, 0x5c, 0x5d, 0x59, 0x58, + 0x56, 0x57, 0x56, 0x56, 0x56, 0x57, 0x57, 0x56, 0x57, 0x57, 0x58, 0x5c, + 0x5d, 0x5b, 0x5c, 0x5f, 0x60, 0x60, 0x5d, 0x5d, 0x61, 0x63, 0x60, 0x5f, + 0x62, 0x5f, 0x61, 0x62, 0x61, 0x61, 0x61, 0x62, 0x62, 0x63, 0x63, 0x63, + 0x63, 0x64, 0x65, 0x65, 0x66, 0x66, 0x66, 0x66, 0x65, 0x66, 0x66, 0x66, + 0x66, 0x65, 0x66, 0x65, 0x64, 0x63, 0x61, 0x56, 0x34, 0xf2, 0xcd, 0xc9, + 0xc6, 0xc4, 0xc3, 0xc2, 0xbe, 0xbd, 0xbc, 0xb7, 0xbb, 0xcd, 0xc4, 0xc0, + 0xc4, 0xc8, 0xc9, 0xc1, 0xdd, 0xd7, 0xc8, 0xc0, 0xbb, 0xc0, 0xb6, 0xb3, + 0xb1, 0xb5, 0xc0, 0xc3, 0xb7, 0xb0, 0xbc, 0xc0, 0xc1, 0xc2, 0xc8, 0xc7, + 0xbe, 0xc2, 0xbd, 0xb2, 0xb0, 0xae, 0xb8, 0xb6, 0xaf, 0xae, 0xbb, 0xbc, + 0xc7, 0xbb, 0xb6, 0xb9, 0xae, 0xa1, 0xab, 0xb5, 0xb8, 0xb2, 0xc4, 0xc3, + 0xd7, 0xcc, 0xbe, 0xb3, 0xb1, 0xb5, 0xc2, 0xc3, 0xbb, 0xc2, 0xc1, 0xbf, + 0xc5, 0xbd, 0xc2, 0xbd, 0xbb, 0xba, 0xc3, 0xb5, 0xb9, 0xbc, 0xbe, 0xbd, + 0xc2, 0xc3, 0xc0, 0xb5, 0xb2, 0xc1, 0xba, 0xc1, 0xb6, 0xbe, 0xb4, 0xbc, + 0x34, 0x35, 0x35, 0x37, 0x37, 0x38, 0x3d, 0x3d, 0x38, 0x38, 0x3b, 0x40, + 0x3f, 0x3e, 0x3f, 0x41, 0x42, 0x42, 0x40, 0x3f, 0x3e, 0x3f, 0x42, 0x42, + 0x43, 0x42, 0x3e, 0x40, 0x40, 0x3d, 0x43, 0x47, 0x44, 0x47, 0x47, 0x45, + 0x48, 0x4a, 0x4a, 0x47, 0x47, 0x4c, 0x49, 0x4c, 0x50, 0x52, 0x4e, 0x4f, + 0x52, 0x51, 0x52, 0x52, 0x57, 0x58, 0x57, 0x51, 0x53, 0x56, 0x56, 0x56, + 0x57, 0x57, 0x57, 0x58, 0x5a, 0x5b, 0x58, 0x56, 0x54, 0x57, 0x55, 0x55, + 0x55, 0x58, 0x58, 0x57, 0x57, 0x59, 0x5a, 0x5c, 0x5d, 0x5b, 0x5d, 0x5f, + 0x5e, 0x5d, 0x5b, 0x5c, 0x61, 0x63, 0x60, 0x60, 0x61, 0x60, 0x60, 0x5f, + 0x61, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x63, 0x64, 0x64, 0x65, 0x65, + 0x66, 0x66, 0x66, 0x66, 0x66, 0x67, 0x67, 0x67, 0x67, 0x66, 0x65, 0x65, + 0x64, 0x66, 0x65, 0x60, 0x4a, 0x15, 0xdf, 0xd4, 0xcb, 0xbe, 0xc2, 0xba, + 0xba, 0xbd, 0xc2, 0xbe, 0xc5, 0xc5, 0xbf, 0xc2, 0xc5, 0xc1, 0xc1, 0xbc, + 0xd7, 0xcc, 0xcb, 0xc7, 0xc2, 0xbb, 0xb6, 0xb4, 0xb4, 0xb7, 0xc5, 0xc6, + 0xc1, 0xbc, 0xbc, 0xc7, 0xc7, 0xb8, 0xc1, 0xc1, 0xb6, 0xba, 0xc0, 0xbb, + 0xbb, 0xb3, 0xba, 0xb9, 0xb2, 0xad, 0xb1, 0xb0, 0xbc, 0xb8, 0xb8, 0xaf, + 0xaa, 0xa6, 0xaa, 0xc1, 0xc3, 0xb9, 0xc7, 0xc1, 0xc3, 0xd6, 0xc3, 0xb8, + 0xb5, 0xb7, 0xbb, 0xba, 0xba, 0xbd, 0xc2, 0xbc, 0xc7, 0xc0, 0xc2, 0xbc, + 0xbb, 0xbf, 0xc0, 0xb3, 0xb9, 0xbc, 0xbc, 0xbf, 0xbe, 0xc2, 0xbd, 0xb3, + 0xb7, 0xbe, 0xbb, 0xc8, 0xc0, 0xbb, 0xb3, 0xb7, 0x33, 0x35, 0x36, 0x37, + 0x39, 0x3d, 0x40, 0x3d, 0x38, 0x39, 0x3d, 0x40, 0x41, 0x3e, 0x40, 0x42, + 0x42, 0x42, 0x42, 0x40, 0x3c, 0x3e, 0x41, 0x42, 0x42, 0x41, 0x41, 0x42, + 0x42, 0x3f, 0x45, 0x44, 0x46, 0x47, 0x44, 0x45, 0x47, 0x47, 0x48, 0x45, + 0x48, 0x49, 0x4a, 0x4c, 0x51, 0x51, 0x4e, 0x50, 0x52, 0x52, 0x52, 0x52, + 0x57, 0x57, 0x55, 0x52, 0x52, 0x52, 0x52, 0x55, 0x59, 0x56, 0x57, 0x58, + 0x5a, 0x5a, 0x57, 0x52, 0x54, 0x52, 0x52, 0x53, 0x55, 0x57, 0x56, 0x55, + 0x59, 0x5b, 0x5b, 0x5c, 0x5d, 0x5d, 0x5e, 0x5f, 0x5d, 0x5c, 0x5c, 0x5d, + 0x62, 0x63, 0x62, 0x60, 0x61, 0x62, 0x62, 0x61, 0x62, 0x62, 0x62, 0x62, + 0x62, 0x63, 0x63, 0x64, 0x63, 0x64, 0x65, 0x65, 0x65, 0x65, 0x65, 0x66, + 0x66, 0x67, 0x67, 0x67, 0x67, 0x66, 0x66, 0x66, 0x65, 0x65, 0x66, 0x64, + 0x5e, 0x39, 0xfa, 0xdc, 0xd2, 0xc7, 0xc2, 0xbd, 0xc6, 0xc3, 0xc6, 0xbc, + 0xbe, 0xc0, 0xbd, 0xb8, 0xb4, 0xbb, 0xbc, 0xc2, 0xcc, 0xc7, 0xc1, 0xd0, + 0xc7, 0xb6, 0xb5, 0xbb, 0xb6, 0xb6, 0xbd, 0xc0, 0xc0, 0xbc, 0xba, 0xc2, + 0xcc, 0xb7, 0xb5, 0xbb, 0xb7, 0xb6, 0xbb, 0xb8, 0xb8, 0xc2, 0xc1, 0xb7, + 0xae, 0xaf, 0xae, 0xb0, 0xc2, 0xbf, 0xc6, 0xb4, 0xac, 0xb1, 0xaa, 0xac, + 0xba, 0xc6, 0xd0, 0xc2, 0xbc, 0xe2, 0xce, 0xbb, 0xb7, 0xc7, 0xb9, 0xb1, + 0xb9, 0xb7, 0xc0, 0xc2, 0xc4, 0xc2, 0xc2, 0xba, 0xb2, 0xb9, 0xb7, 0xb6, + 0xbb, 0xbf, 0xbc, 0xb7, 0xbc, 0xc0, 0xbb, 0xb5, 0xbc, 0xc5, 0xbd, 0xc0, + 0xbf, 0xb6, 0xba, 0xb7, 0x36, 0x37, 0x38, 0x38, 0x3c, 0x40, 0x3f, 0x3c, + 0x3a, 0x39, 0x3d, 0x42, 0x41, 0x3f, 0x41, 0x42, 0x41, 0x43, 0x40, 0x3e, + 0x42, 0x43, 0x42, 0x42, 0x3f, 0x40, 0x42, 0x40, 0x3f, 0x41, 0x44, 0x43, + 0x45, 0x44, 0x42, 0x48, 0x46, 0x45, 0x44, 0x47, 0x4a, 0x49, 0x47, 0x4c, + 0x51, 0x51, 0x4d, 0x50, 0x52, 0x51, 0x52, 0x52, 0x56, 0x57, 0x53, 0x53, + 0x53, 0x52, 0x52, 0x56, 0x58, 0x55, 0x57, 0x57, 0x59, 0x58, 0x56, 0x51, + 0x53, 0x50, 0x52, 0x53, 0x56, 0x58, 0x56, 0x57, 0x59, 0x5a, 0x5c, 0x5e, + 0x5e, 0x5c, 0x5d, 0x5e, 0x5b, 0x5b, 0x5c, 0x5e, 0x62, 0x62, 0x62, 0x60, + 0x61, 0x60, 0x60, 0x60, 0x60, 0x62, 0x62, 0x62, 0x62, 0x63, 0x64, 0x65, + 0x64, 0x65, 0x64, 0x63, 0x64, 0x64, 0x65, 0x65, 0x66, 0x66, 0x66, 0x65, + 0x65, 0x66, 0x66, 0x65, 0x65, 0x65, 0x66, 0x66, 0x64, 0x57, 0x23, 0xee, + 0xd6, 0xd1, 0xbb, 0xba, 0xbb, 0xba, 0xc2, 0xb2, 0xb5, 0xc1, 0xc7, 0xb6, + 0xb4, 0xbc, 0xbc, 0xd5, 0xca, 0xbf, 0xb4, 0xbc, 0xbd, 0xb7, 0xb6, 0xbd, + 0xb8, 0xb7, 0xb2, 0xb7, 0xb7, 0xbb, 0xb9, 0xb7, 0xc7, 0xc1, 0xb7, 0xb7, + 0xbc, 0xb6, 0xbb, 0xbb, 0xbd, 0xc9, 0xbf, 0xb6, 0xbf, 0xb3, 0xb3, 0xb9, + 0xc0, 0xc2, 0xbf, 0xb9, 0xac, 0xae, 0xae, 0xac, 0xb5, 0xc0, 0xc7, 0xc2, + 0xba, 0xd0, 0xcf, 0xbd, 0xba, 0xcc, 0xcd, 0xca, 0xc5, 0xc0, 0xc4, 0xcb, + 0xc2, 0xba, 0xc1, 0xb7, 0xaf, 0xb7, 0xb8, 0xb8, 0xbd, 0xc0, 0xba, 0xb8, + 0xbe, 0xc2, 0xbc, 0xb1, 0xb5, 0xc4, 0xbe, 0xb1, 0xb8, 0xb1, 0xbd, 0xb9, + 0x34, 0x37, 0x39, 0x3c, 0x3d, 0x3d, 0x3b, 0x3a, 0x39, 0x38, 0x3e, 0x3e, + 0x3d, 0x3f, 0x3f, 0x42, 0x41, 0x43, 0x3e, 0x40, 0x44, 0x44, 0x42, 0x41, + 0x3e, 0x42, 0x42, 0x3e, 0x3f, 0x44, 0x47, 0x46, 0x42, 0x40, 0x47, 0x4a, + 0x43, 0x41, 0x41, 0x46, 0x48, 0x48, 0x47, 0x4c, 0x51, 0x50, 0x4d, 0x50, + 0x51, 0x50, 0x51, 0x52, 0x57, 0x57, 0x52, 0x52, 0x53, 0x50, 0x51, 0x56, + 0x57, 0x56, 0x58, 0x57, 0x59, 0x57, 0x55, 0x53, 0x52, 0x4f, 0x52, 0x53, + 0x55, 0x57, 0x57, 0x59, 0x58, 0x5a, 0x5d, 0x5d, 0x5c, 0x5c, 0x5b, 0x5d, + 0x5b, 0x5a, 0x5d, 0x5f, 0x62, 0x61, 0x62, 0x60, 0x61, 0x60, 0x5e, 0x5e, + 0x60, 0x62, 0x62, 0x62, 0x63, 0x64, 0x64, 0x65, 0x64, 0x65, 0x64, 0x63, + 0x63, 0x63, 0x64, 0x64, 0x65, 0x65, 0x65, 0x65, 0x65, 0x66, 0x66, 0x65, + 0x65, 0x65, 0x65, 0x65, 0x65, 0x61, 0x47, 0x0c, 0xd8, 0xc8, 0xbe, 0xbc, + 0xbc, 0xbc, 0xbd, 0xaf, 0xb8, 0xc0, 0xb9, 0xb3, 0xbb, 0xbc, 0xb7, 0xc7, + 0xc2, 0xb6, 0xb3, 0xb8, 0xc3, 0xbd, 0xb8, 0xbd, 0xb7, 0xbb, 0xb1, 0xb4, + 0xb3, 0xb8, 0xb9, 0xd2, 0xd4, 0xbf, 0xbc, 0xb5, 0xb9, 0xc2, 0xbc, 0xb4, + 0xbf, 0xc9, 0xc5, 0xbc, 0xcf, 0xba, 0xc1, 0xc0, 0xb4, 0xb5, 0xb8, 0xb7, + 0xb2, 0xb5, 0xb1, 0xb4, 0xb4, 0xb6, 0xc5, 0xc2, 0xb6, 0xbe, 0xc7, 0xc5, + 0xbc, 0xd0, 0xce, 0xcf, 0xcc, 0xcd, 0xcb, 0xd2, 0xc5, 0xbe, 0xca, 0xc3, + 0xc0, 0xb2, 0xb6, 0xba, 0xb7, 0xb9, 0xb9, 0xc6, 0xc8, 0xbe, 0xb3, 0xb3, + 0xbd, 0xbf, 0xb7, 0xb3, 0xb6, 0xb5, 0xc0, 0xb9, 0x36, 0x37, 0x38, 0x3c, + 0x3e, 0x3c, 0x38, 0x36, 0x37, 0x37, 0x3b, 0x3c, 0x3d, 0x3e, 0x40, 0x40, + 0x42, 0x42, 0x40, 0x43, 0x46, 0x42, 0x41, 0x3f, 0x40, 0x44, 0x41, 0x3e, + 0x43, 0x48, 0x47, 0x43, 0x42, 0x45, 0x47, 0x44, 0x41, 0x42, 0x43, 0x47, + 0x47, 0x49, 0x47, 0x4c, 0x4e, 0x4f, 0x4c, 0x50, 0x4f, 0x51, 0x51, 0x52, + 0x57, 0x56, 0x52, 0x52, 0x52, 0x4c, 0x50, 0x56, 0x57, 0x55, 0x57, 0x57, + 0x59, 0x57, 0x53, 0x52, 0x51, 0x50, 0x52, 0x53, 0x56, 0x57, 0x5a, 0x5b, + 0x59, 0x5e, 0x5d, 0x5d, 0x5c, 0x59, 0x5a, 0x5b, 0x5a, 0x5c, 0x5c, 0x5e, + 0x62, 0x60, 0x5f, 0x5f, 0x5f, 0x5f, 0x5e, 0x5f, 0x60, 0x61, 0x62, 0x62, + 0x63, 0x64, 0x63, 0x64, 0x64, 0x65, 0x64, 0x63, 0x63, 0x63, 0x64, 0x64, + 0x65, 0x64, 0x65, 0x65, 0x65, 0x66, 0x66, 0x66, 0x66, 0x65, 0x64, 0x63, + 0x63, 0x63, 0x5b, 0x24, 0xee, 0xdd, 0xd8, 0xd2, 0xc6, 0xbc, 0xbf, 0xbd, + 0xbf, 0xc1, 0xb2, 0xb0, 0xc6, 0xbb, 0xba, 0xbc, 0xba, 0xb4, 0xb6, 0xb7, + 0xbc, 0xbc, 0xbb, 0xbc, 0xb4, 0xb1, 0xb0, 0xb7, 0xb7, 0xba, 0xbc, 0xc3, + 0xc2, 0xb7, 0xb8, 0xbf, 0xb9, 0xbd, 0xc2, 0xbc, 0xc7, 0xbf, 0xc3, 0xbd, + 0xc5, 0xbf, 0xc2, 0xbf, 0xb3, 0xb4, 0xb9, 0xc0, 0xc0, 0xb7, 0xaf, 0xb5, + 0xba, 0xaf, 0xbf, 0xbe, 0xb7, 0xb8, 0xbf, 0xc8, 0xd1, 0xc1, 0xbe, 0xc4, + 0xc8, 0xc9, 0xd2, 0xce, 0xbf, 0xbc, 0xb9, 0xc1, 0xd2, 0xbd, 0xb7, 0xbb, + 0xb5, 0xae, 0xbd, 0xcc, 0xcb, 0xbf, 0xb7, 0xbe, 0xc4, 0xc4, 0xc4, 0xb9, + 0xbf, 0xb2, 0xb9, 0xb6, 0x35, 0x34, 0x38, 0x3c, 0x3a, 0x36, 0x37, 0x34, + 0x34, 0x37, 0x38, 0x3d, 0x3d, 0x3d, 0x40, 0x42, 0x42, 0x42, 0x42, 0x45, + 0x45, 0x41, 0x3f, 0x3f, 0x43, 0x44, 0x40, 0x3f, 0x48, 0x49, 0x42, 0x42, + 0x43, 0x42, 0x46, 0x42, 0x40, 0x43, 0x44, 0x46, 0x48, 0x48, 0x46, 0x49, + 0x4a, 0x4d, 0x4d, 0x4f, 0x4e, 0x50, 0x50, 0x54, 0x57, 0x55, 0x52, 0x50, + 0x50, 0x4c, 0x52, 0x56, 0x57, 0x54, 0x55, 0x57, 0x58, 0x56, 0x53, 0x51, + 0x4f, 0x4f, 0x52, 0x53, 0x57, 0x57, 0x5b, 0x5c, 0x5b, 0x5d, 0x5d, 0x5d, + 0x59, 0x57, 0x58, 0x57, 0x59, 0x5d, 0x5c, 0x5f, 0x62, 0x62, 0x5d, 0x5e, + 0x5d, 0x5e, 0x5f, 0x5f, 0x61, 0x62, 0x62, 0x62, 0x62, 0x63, 0x63, 0x63, + 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x65, 0x64, 0x65, 0x66, 0x65, + 0x66, 0x67, 0x67, 0x67, 0x66, 0x66, 0x64, 0x63, 0x62, 0x62, 0x5f, 0x46, + 0x15, 0x13, 0x15, 0xe8, 0xcc, 0xd2, 0xc8, 0xc9, 0xd4, 0xd3, 0xc5, 0xb8, + 0xbe, 0xb2, 0xb7, 0xbd, 0xbd, 0xba, 0xb9, 0xb3, 0xb8, 0xbb, 0xb9, 0xb8, + 0xbc, 0xb7, 0xb1, 0xb3, 0xb7, 0xb7, 0xbd, 0xbe, 0xc1, 0xd4, 0xd5, 0xc6, + 0xb7, 0xbd, 0xca, 0xc5, 0xca, 0xb7, 0xc6, 0xc0, 0xb8, 0xb9, 0xc6, 0xc6, + 0xb7, 0xba, 0xb8, 0xc2, 0xc7, 0xb9, 0xb1, 0xb5, 0xbd, 0xb6, 0xbf, 0xc3, + 0xb7, 0xb7, 0xb8, 0xbb, 0xcc, 0xc1, 0xb5, 0xba, 0xb9, 0xb4, 0xc7, 0xca, + 0xc5, 0xcf, 0xb8, 0xb1, 0xc1, 0xbc, 0xb7, 0xbc, 0xbc, 0xbc, 0xbf, 0xbc, + 0xbf, 0xbf, 0xc1, 0xc2, 0xbf, 0xbf, 0xd6, 0xd5, 0xc2, 0xb0, 0xb3, 0xb3, + 0x34, 0x36, 0x37, 0x36, 0x36, 0x35, 0x37, 0x37, 0x37, 0x37, 0x37, 0x3d, + 0x3b, 0x3d, 0x3f, 0x42, 0x42, 0x41, 0x43, 0x45, 0x41, 0x3d, 0x3f, 0x43, + 0x44, 0x40, 0x3d, 0x41, 0x4a, 0x45, 0x3e, 0x42, 0x43, 0x42, 0x45, 0x43, + 0x40, 0x42, 0x43, 0x47, 0x4a, 0x4a, 0x47, 0x4a, 0x49, 0x4e, 0x4f, 0x4d, + 0x4f, 0x4e, 0x50, 0x53, 0x54, 0x51, 0x51, 0x4e, 0x4d, 0x4d, 0x51, 0x55, + 0x56, 0x54, 0x54, 0x56, 0x57, 0x54, 0x52, 0x50, 0x4d, 0x4f, 0x53, 0x54, + 0x56, 0x58, 0x5b, 0x5c, 0x5b, 0x5c, 0x5c, 0x59, 0x58, 0x58, 0x57, 0x58, + 0x5a, 0x5e, 0x5e, 0x61, 0x63, 0x62, 0x5e, 0x5d, 0x5d, 0x5c, 0x5e, 0x5f, + 0x60, 0x62, 0x62, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x64, 0x63, 0x64, + 0x64, 0x64, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x66, 0x67, 0x67, 0x67, + 0x66, 0x66, 0x64, 0x62, 0x61, 0x60, 0x5f, 0x55, 0x37, 0x23, 0x14, 0xee, + 0xd2, 0xd2, 0xc5, 0xb6, 0xc2, 0xdb, 0xf0, 0xe8, 0xd5, 0xb7, 0xc0, 0xcf, + 0xbf, 0xb5, 0xb9, 0xc4, 0xbc, 0xba, 0xb9, 0xba, 0xb4, 0xbb, 0xba, 0xb8, + 0xb2, 0xb6, 0xbd, 0xc2, 0xc2, 0xcf, 0xde, 0xc7, 0xb8, 0xc1, 0xc4, 0xbc, + 0xc1, 0xbc, 0xc3, 0xbc, 0xbc, 0xbb, 0xbd, 0xbb, 0xbc, 0xc0, 0xbe, 0xc5, + 0xc9, 0xc6, 0xbd, 0xbb, 0xc9, 0xbd, 0xb2, 0xc2, 0xb9, 0xba, 0xbc, 0xb8, + 0xbc, 0xba, 0xb7, 0xbc, 0xc0, 0xb8, 0xca, 0xcc, 0xc6, 0xd0, 0xcf, 0xb4, + 0xbf, 0xbf, 0xbb, 0xb8, 0xbe, 0xc7, 0xd2, 0xc3, 0xc4, 0xcc, 0xc6, 0xc5, + 0xc6, 0xc4, 0xc6, 0xba, 0xab, 0xac, 0xaf, 0xaf, 0x36, 0x36, 0x36, 0x34, + 0x32, 0x35, 0x36, 0x36, 0x37, 0x39, 0x39, 0x3a, 0x39, 0x3c, 0x3e, 0x42, + 0x42, 0x42, 0x47, 0x44, 0x3d, 0x3d, 0x44, 0x45, 0x41, 0x3d, 0x3f, 0x44, + 0x49, 0x41, 0x40, 0x44, 0x42, 0x42, 0x43, 0x42, 0x42, 0x46, 0x44, 0x4a, + 0x4a, 0x48, 0x47, 0x4a, 0x4a, 0x4d, 0x4d, 0x4d, 0x4d, 0x4d, 0x50, 0x52, + 0x53, 0x4e, 0x4d, 0x4b, 0x4c, 0x4f, 0x51, 0x55, 0x56, 0x55, 0x54, 0x55, + 0x52, 0x51, 0x4d, 0x4f, 0x4d, 0x50, 0x52, 0x54, 0x57, 0x5a, 0x5a, 0x5a, + 0x5a, 0x5b, 0x57, 0x57, 0x57, 0x56, 0x57, 0x57, 0x5b, 0x60, 0x61, 0x62, + 0x63, 0x5f, 0x5d, 0x5c, 0x5a, 0x5c, 0x61, 0x60, 0x60, 0x62, 0x61, 0x62, + 0x63, 0x63, 0x64, 0x64, 0x63, 0x65, 0x63, 0x63, 0x64, 0x63, 0x64, 0x66, + 0x65, 0x66, 0x66, 0x65, 0x66, 0x67, 0x67, 0x67, 0x66, 0x66, 0x66, 0x64, + 0x62, 0x60, 0x5e, 0x5b, 0x4e, 0x2d, 0x00, 0xe3, 0xdc, 0xca, 0xc1, 0xc2, + 0xac, 0xb7, 0xca, 0xd7, 0xd2, 0xb8, 0xb5, 0xc2, 0xbd, 0xb9, 0xbc, 0xc1, + 0xbc, 0xba, 0xbe, 0xbb, 0xb6, 0xc2, 0xbe, 0xcc, 0xb7, 0xb9, 0xb9, 0xbb, + 0xbf, 0xbf, 0xcc, 0xb9, 0xb2, 0xbc, 0xbe, 0xb8, 0xbc, 0xbf, 0xc1, 0xbc, + 0xc0, 0xbe, 0xba, 0xc5, 0xcd, 0xbc, 0xbd, 0xc2, 0xc3, 0xbc, 0xbc, 0xbd, + 0xc6, 0xc6, 0xb7, 0xbe, 0xbd, 0xba, 0xb9, 0xba, 0xc0, 0xb7, 0xbc, 0xcd, + 0xc8, 0xb7, 0xc6, 0xd7, 0xcc, 0xc7, 0xc4, 0xb7, 0xbf, 0xc6, 0xc2, 0xc2, + 0xbc, 0xb7, 0xc6, 0xcd, 0xbf, 0xc9, 0xc7, 0xc1, 0xd5, 0xcc, 0xc2, 0xbb, + 0xac, 0xb4, 0xb9, 0xb7, 0x33, 0x33, 0x34, 0x34, 0x34, 0x34, 0x39, 0x36, + 0x36, 0x37, 0x37, 0x36, 0x38, 0x3b, 0x3d, 0x3f, 0x42, 0x43, 0x44, 0x40, + 0x3d, 0x40, 0x45, 0x44, 0x3f, 0x3e, 0x42, 0x47, 0x47, 0x41, 0x42, 0x43, + 0x42, 0x41, 0x3f, 0x3f, 0x42, 0x45, 0x44, 0x47, 0x47, 0x47, 0x47, 0x47, + 0x49, 0x4b, 0x4c, 0x4d, 0x4c, 0x4d, 0x4f, 0x50, 0x53, 0x4d, 0x4a, 0x48, + 0x4b, 0x4f, 0x51, 0x55, 0x56, 0x53, 0x54, 0x52, 0x51, 0x50, 0x4d, 0x4e, + 0x50, 0x4f, 0x52, 0x56, 0x57, 0x5a, 0x5a, 0x58, 0x57, 0x55, 0x55, 0x57, + 0x56, 0x55, 0x53, 0x57, 0x5b, 0x5e, 0x62, 0x62, 0x62, 0x5d, 0x5b, 0x5a, + 0x5a, 0x5c, 0x61, 0x62, 0x62, 0x62, 0x61, 0x62, 0x64, 0x64, 0x65, 0x64, + 0x64, 0x65, 0x64, 0x62, 0x62, 0x62, 0x63, 0x64, 0x65, 0x66, 0x66, 0x65, + 0x66, 0x65, 0x65, 0x66, 0x66, 0x66, 0x66, 0x65, 0x62, 0x60, 0x60, 0x5d, + 0x57, 0x42, 0x0f, 0xe6, 0xd7, 0xca, 0xbd, 0xc1, 0xb3, 0xc5, 0xc6, 0xb5, + 0xca, 0xb7, 0xb7, 0xb6, 0xc1, 0xc2, 0xc2, 0xbe, 0xbe, 0xb6, 0xbc, 0xb5, + 0xb7, 0xb9, 0xbf, 0xca, 0xb9, 0xb2, 0xb7, 0xb9, 0xbc, 0xc1, 0xc4, 0xb7, + 0xaf, 0xbf, 0xb8, 0xb7, 0xbc, 0xc3, 0xc6, 0xbe, 0xc0, 0xc0, 0xb9, 0xbe, + 0xba, 0xb2, 0xb7, 0xc7, 0xc4, 0xc1, 0xc4, 0xc3, 0xc2, 0xcf, 0xc4, 0xc3, + 0xb5, 0xb1, 0xb1, 0xbd, 0xc4, 0xb9, 0xc1, 0xd9, 0xcc, 0xba, 0xc1, 0xca, + 0xc1, 0xc0, 0xbb, 0xb7, 0xb7, 0xbf, 0xbb, 0xbc, 0xb7, 0xb4, 0xb7, 0xc4, + 0xbf, 0xc9, 0xc3, 0xbd, 0xc5, 0xca, 0xbf, 0xbe, 0xba, 0xba, 0xbf, 0xba, + 0x32, 0x32, 0x32, 0x34, 0x36, 0x34, 0x36, 0x34, 0x35, 0x37, 0x34, 0x35, + 0x37, 0x3b, 0x3e, 0x3f, 0x3e, 0x41, 0x41, 0x3b, 0x3f, 0x44, 0x45, 0x43, + 0x3e, 0x40, 0x44, 0x48, 0x44, 0x40, 0x43, 0x43, 0x42, 0x41, 0x40, 0x3f, + 0x44, 0x45, 0x43, 0x44, 0x47, 0x47, 0x47, 0x46, 0x4a, 0x4d, 0x4d, 0x49, + 0x49, 0x4c, 0x4b, 0x4d, 0x51, 0x4e, 0x48, 0x48, 0x4b, 0x50, 0x52, 0x54, + 0x53, 0x51, 0x52, 0x4d, 0x51, 0x4d, 0x49, 0x4b, 0x4c, 0x4e, 0x53, 0x56, + 0x57, 0x5a, 0x58, 0x56, 0x54, 0x52, 0x55, 0x57, 0x57, 0x53, 0x52, 0x57, + 0x5c, 0x5d, 0x62, 0x62, 0x62, 0x5d, 0x5b, 0x5b, 0x5a, 0x5a, 0x61, 0x62, + 0x62, 0x62, 0x62, 0x62, 0x65, 0x65, 0x65, 0x64, 0x64, 0x65, 0x64, 0x62, + 0x62, 0x62, 0x63, 0x63, 0x64, 0x65, 0x65, 0x66, 0x66, 0x65, 0x65, 0x66, + 0x67, 0x67, 0x65, 0x66, 0x64, 0x61, 0x61, 0x60, 0x5e, 0x5a, 0x3d, 0xf7, + 0xd2, 0xd2, 0xcf, 0xd2, 0xd2, 0xd5, 0xd4, 0xca, 0xd0, 0xb6, 0xb7, 0xbc, + 0xc0, 0xc6, 0xcc, 0xd7, 0xc0, 0xaf, 0xbc, 0xb8, 0xa7, 0xac, 0xb4, 0xb9, + 0xba, 0xb0, 0xb6, 0xc9, 0xcb, 0xc4, 0xc3, 0xbb, 0xb1, 0xc7, 0xb7, 0xb9, + 0xb7, 0xbc, 0xcf, 0xc8, 0xb7, 0xb8, 0xba, 0xb7, 0xb1, 0xb3, 0xbd, 0xce, + 0xc6, 0xca, 0xc5, 0xbc, 0xc3, 0xc2, 0xce, 0xc8, 0xb7, 0xb7, 0xbe, 0xc8, + 0xc7, 0xc1, 0xd9, 0xdb, 0xca, 0xba, 0xc3, 0xcd, 0xc1, 0xbc, 0xb8, 0xaf, + 0xb8, 0xdf, 0xd0, 0xb6, 0xae, 0xb1, 0xb5, 0xc2, 0xbf, 0xc1, 0xbb, 0xba, + 0xc3, 0xc9, 0xbe, 0xbc, 0xc6, 0xc2, 0xc9, 0xc4, 0x32, 0x34, 0x34, 0x35, + 0x35, 0x34, 0x33, 0x33, 0x33, 0x33, 0x32, 0x37, 0x3a, 0x3d, 0x3d, 0x3f, + 0x3e, 0x3d, 0x3e, 0x3d, 0x42, 0x44, 0x42, 0x3f, 0x3e, 0x40, 0x47, 0x48, + 0x43, 0x40, 0x43, 0x42, 0x42, 0x43, 0x42, 0x3f, 0x45, 0x45, 0x43, 0x43, + 0x47, 0x48, 0x47, 0x47, 0x48, 0x4b, 0x49, 0x47, 0x49, 0x49, 0x4a, 0x4e, + 0x4d, 0x4b, 0x47, 0x49, 0x4c, 0x4f, 0x51, 0x52, 0x4d, 0x4f, 0x4d, 0x4a, + 0x4f, 0x47, 0x45, 0x47, 0x48, 0x52, 0x54, 0x54, 0x57, 0x58, 0x56, 0x56, + 0x52, 0x51, 0x56, 0x54, 0x54, 0x54, 0x52, 0x56, 0x5b, 0x5e, 0x61, 0x62, + 0x61, 0x5d, 0x5a, 0x5a, 0x58, 0x5c, 0x60, 0x5f, 0x61, 0x61, 0x62, 0x62, + 0x65, 0x65, 0x64, 0x64, 0x65, 0x64, 0x63, 0x63, 0x63, 0x63, 0x64, 0x64, + 0x63, 0x64, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x66, 0x67, 0x66, 0x66, + 0x65, 0x61, 0x60, 0x5f, 0x5e, 0x5d, 0x5a, 0x32, 0xe1, 0xcd, 0xc7, 0xce, + 0xd0, 0xce, 0xcf, 0xd3, 0xd2, 0xc4, 0xba, 0xba, 0xbd, 0xc0, 0xc5, 0xc8, + 0xcc, 0xd1, 0xd5, 0xce, 0xb7, 0xc7, 0xbc, 0xb7, 0xb9, 0xba, 0xbf, 0xc9, + 0xcc, 0xc9, 0xbd, 0xbc, 0xc2, 0xcc, 0xbd, 0xb7, 0xb1, 0xc0, 0xd9, 0xc5, + 0xb5, 0xc4, 0xc2, 0xb7, 0xb6, 0xb7, 0xc4, 0xc3, 0xbc, 0xb9, 0xc6, 0xb7, + 0xbc, 0xbc, 0xc2, 0xbc, 0xb5, 0xb6, 0xbf, 0xc1, 0xc5, 0xbb, 0xc1, 0xc6, + 0xc9, 0xc7, 0xcf, 0xd2, 0xd1, 0xc0, 0xb9, 0xb2, 0xb9, 0xcc, 0xc6, 0xbf, + 0xbd, 0xb6, 0xb5, 0xb5, 0xb8, 0xb7, 0xb8, 0xc6, 0xc8, 0xc1, 0xb9, 0xb1, + 0xcc, 0xd7, 0xc4, 0xc0, 0x34, 0x33, 0x32, 0x34, 0x33, 0x33, 0x32, 0x32, + 0x32, 0x32, 0x33, 0x39, 0x3c, 0x3c, 0x3b, 0x3f, 0x3d, 0x3c, 0x3d, 0x42, + 0x43, 0x42, 0x3f, 0x3d, 0x3e, 0x42, 0x45, 0x46, 0x40, 0x42, 0x43, 0x43, + 0x43, 0x43, 0x42, 0x42, 0x45, 0x44, 0x42, 0x43, 0x47, 0x47, 0x47, 0x46, + 0x49, 0x48, 0x47, 0x46, 0x47, 0x48, 0x4a, 0x4c, 0x4c, 0x4a, 0x48, 0x4b, + 0x4b, 0x4c, 0x4e, 0x4f, 0x4d, 0x4f, 0x49, 0x49, 0x4b, 0x43, 0x47, 0x48, + 0x4e, 0x52, 0x53, 0x54, 0x57, 0x56, 0x53, 0x51, 0x4d, 0x51, 0x56, 0x52, + 0x53, 0x56, 0x54, 0x57, 0x5a, 0x5c, 0x60, 0x61, 0x5f, 0x5c, 0x59, 0x58, + 0x59, 0x5d, 0x5e, 0x5d, 0x60, 0x61, 0x61, 0x63, 0x64, 0x64, 0x64, 0x64, + 0x65, 0x65, 0x64, 0x63, 0x64, 0x64, 0x65, 0x65, 0x64, 0x63, 0x64, 0x63, + 0x64, 0x65, 0x64, 0x64, 0x64, 0x66, 0x67, 0x66, 0x65, 0x62, 0x60, 0x5e, + 0x5d, 0x59, 0x59, 0x57, 0x36, 0xec, 0xc9, 0xc8, 0xcc, 0xd0, 0xd0, 0xcb, + 0xc5, 0xbc, 0xc4, 0xc0, 0xc7, 0xbd, 0xbd, 0xc4, 0xcd, 0xd8, 0xf0, 0xd5, + 0xc2, 0xd7, 0xc3, 0xca, 0xc7, 0xc2, 0xc0, 0xbc, 0xc3, 0xc7, 0xc1, 0xc2, + 0xc8, 0xcc, 0xcc, 0xbc, 0xb7, 0xba, 0xc4, 0xbc, 0xba, 0xc0, 0xb9, 0xb7, + 0xbc, 0xcb, 0xc5, 0xb9, 0xb7, 0xbb, 0xcf, 0xc3, 0xc1, 0xbb, 0xba, 0xbc, + 0xb7, 0xbd, 0xbc, 0xb3, 0xc0, 0xc9, 0xba, 0xb8, 0xc0, 0xc3, 0xcf, 0xc8, + 0xc9, 0xc2, 0xc4, 0xbb, 0xbf, 0xc1, 0xbf, 0xc7, 0xd3, 0xbc, 0xb0, 0xb0, + 0xbb, 0xc3, 0xc5, 0xca, 0xbd, 0xb3, 0xb7, 0xb4, 0xbf, 0xc7, 0xc0, 0xba, + 0x31, 0x32, 0x31, 0x32, 0x32, 0x30, 0x2f, 0x31, 0x31, 0x32, 0x35, 0x39, + 0x3c, 0x3a, 0x3c, 0x3f, 0x3d, 0x3b, 0x3d, 0x41, 0x3e, 0x42, 0x40, 0x3e, + 0x3e, 0x3f, 0x42, 0x44, 0x3f, 0x43, 0x45, 0x43, 0x42, 0x42, 0x40, 0x43, + 0x45, 0x46, 0x41, 0x41, 0x45, 0x47, 0x46, 0x47, 0x48, 0x44, 0x45, 0x44, + 0x44, 0x47, 0x49, 0x47, 0x49, 0x4d, 0x47, 0x49, 0x48, 0x49, 0x4f, 0x51, + 0x4f, 0x4b, 0x47, 0x48, 0x43, 0x45, 0x47, 0x4d, 0x51, 0x4f, 0x52, 0x53, + 0x53, 0x50, 0x4e, 0x4a, 0x49, 0x4a, 0x51, 0x52, 0x55, 0x58, 0x55, 0x56, + 0x59, 0x5b, 0x60, 0x61, 0x5e, 0x5a, 0x5a, 0x58, 0x5c, 0x5e, 0x5d, 0x5d, + 0x61, 0x62, 0x62, 0x63, 0x62, 0x63, 0x64, 0x65, 0x64, 0x63, 0x63, 0x64, + 0x63, 0x63, 0x64, 0x63, 0x64, 0x64, 0x64, 0x63, 0x63, 0x63, 0x64, 0x63, + 0x63, 0x64, 0x65, 0x65, 0x65, 0x62, 0x61, 0x60, 0x60, 0x5d, 0x57, 0x52, + 0x50, 0x19, 0xdc, 0xd2, 0xd8, 0xd7, 0xcb, 0xcc, 0xcc, 0xc1, 0xc2, 0xbc, + 0xbe, 0xb3, 0xbb, 0xc2, 0xbc, 0xc9, 0xd1, 0xc6, 0xb7, 0xc6, 0xc9, 0xc5, + 0xc7, 0xbc, 0xba, 0xb7, 0xbc, 0xb7, 0xbc, 0xbf, 0xc2, 0xc1, 0xc2, 0xbc, + 0xc3, 0xc1, 0xb8, 0xb9, 0xbb, 0xb9, 0xbb, 0xc3, 0xc2, 0xcd, 0xd6, 0xcd, + 0xbc, 0xbc, 0xcc, 0xce, 0xc5, 0xbc, 0xbc, 0xc2, 0xcc, 0xcb, 0xbe, 0xb9, + 0xa9, 0xb6, 0xb9, 0xb6, 0xbc, 0xd1, 0xca, 0xd2, 0xd1, 0xc4, 0xbf, 0xc2, + 0xcc, 0xc2, 0xbf, 0xbf, 0xcb, 0xbf, 0xab, 0xb7, 0xc2, 0xc3, 0xd8, 0xe1, + 0xc6, 0xb6, 0xb7, 0xc1, 0xc4, 0xbe, 0xbb, 0xb7, 0x2f, 0x32, 0x31, 0x30, + 0x32, 0x2f, 0x2d, 0x32, 0x30, 0x33, 0x37, 0x36, 0x37, 0x36, 0x38, 0x3c, + 0x3d, 0x3d, 0x3e, 0x3d, 0x3d, 0x41, 0x40, 0x3f, 0x3d, 0x3c, 0x41, 0x3e, + 0x3d, 0x43, 0x45, 0x43, 0x43, 0x42, 0x3f, 0x42, 0x46, 0x43, 0x3f, 0x42, + 0x47, 0x47, 0x45, 0x47, 0x46, 0x43, 0x45, 0x45, 0x44, 0x47, 0x47, 0x46, + 0x47, 0x4d, 0x49, 0x46, 0x45, 0x47, 0x50, 0x50, 0x4d, 0x49, 0x4c, 0x46, + 0x42, 0x43, 0x47, 0x4d, 0x4b, 0x4d, 0x4d, 0x4c, 0x4a, 0x4c, 0x4c, 0x46, + 0x43, 0x44, 0x4d, 0x52, 0x57, 0x58, 0x55, 0x55, 0x58, 0x5b, 0x5f, 0x5f, + 0x5c, 0x58, 0x58, 0x58, 0x5c, 0x5c, 0x5b, 0x5d, 0x62, 0x62, 0x62, 0x62, + 0x62, 0x62, 0x63, 0x64, 0x63, 0x63, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, + 0x62, 0x63, 0x64, 0x63, 0x63, 0x64, 0x64, 0x64, 0x64, 0x64, 0x63, 0x64, + 0x64, 0x63, 0x62, 0x61, 0x62, 0x61, 0x61, 0x5d, 0x57, 0x40, 0xfe, 0xd2, + 0xd0, 0xce, 0xc2, 0xbe, 0xc8, 0xc2, 0xb4, 0xb8, 0xb9, 0xb5, 0xbb, 0xc0, + 0xc4, 0xd3, 0xc9, 0xc9, 0xd1, 0xcb, 0xd2, 0xc7, 0xc0, 0xc2, 0xbd, 0xb8, + 0xba, 0xb7, 0xbb, 0xc5, 0xbb, 0xbd, 0xcc, 0xd5, 0xd4, 0xbf, 0xb9, 0xbc, + 0xc0, 0xbe, 0xc2, 0xc5, 0xbf, 0xc7, 0xdf, 0xdc, 0xc2, 0xbc, 0xc8, 0xc5, + 0xc2, 0xbf, 0xc7, 0xd1, 0xd1, 0xc1, 0xbb, 0xbd, 0xae, 0xbb, 0xba, 0xbc, + 0xc0, 0xd2, 0xc7, 0xcf, 0xcc, 0xc4, 0xb9, 0xc1, 0xc2, 0xba, 0xb9, 0xbe, + 0xbf, 0xad, 0xaa, 0xbc, 0xc2, 0xb8, 0xc4, 0xce, 0xc4, 0xbf, 0xbb, 0xbd, + 0xbb, 0xba, 0xba, 0xb7, 0x30, 0x2f, 0x2f, 0x2e, 0x2f, 0x2d, 0x2d, 0x32, + 0x32, 0x34, 0x35, 0x35, 0x35, 0x35, 0x38, 0x3d, 0x3d, 0x3c, 0x3b, 0x3a, + 0x40, 0x3f, 0x3d, 0x3d, 0x38, 0x38, 0x42, 0x3e, 0x3f, 0x46, 0x43, 0x43, + 0x43, 0x40, 0x3d, 0x43, 0x42, 0x3c, 0x3d, 0x44, 0x47, 0x45, 0x42, 0x42, + 0x41, 0x3f, 0x42, 0x43, 0x44, 0x47, 0x46, 0x43, 0x46, 0x4a, 0x48, 0x41, + 0x44, 0x4a, 0x4e, 0x4d, 0x4a, 0x4c, 0x4a, 0x44, 0x41, 0x45, 0x48, 0x4b, + 0x4d, 0x4c, 0x48, 0x47, 0x4b, 0x47, 0x44, 0x42, 0x41, 0x42, 0x4a, 0x52, + 0x57, 0x57, 0x55, 0x53, 0x58, 0x5b, 0x5d, 0x5f, 0x59, 0x58, 0x58, 0x5a, + 0x5b, 0x59, 0x5a, 0x5f, 0x62, 0x62, 0x62, 0x61, 0x60, 0x63, 0x63, 0x64, + 0x63, 0x62, 0x61, 0x62, 0x61, 0x60, 0x62, 0x62, 0x62, 0x63, 0x63, 0x63, + 0x64, 0x63, 0x64, 0x64, 0x64, 0x63, 0x62, 0x63, 0x64, 0x62, 0x62, 0x62, + 0x62, 0x60, 0x61, 0x62, 0x5d, 0x51, 0x2c, 0xec, 0xcf, 0xcd, 0xc7, 0xcc, + 0xca, 0xc0, 0xb2, 0xb9, 0xc5, 0xc3, 0xc6, 0xc5, 0xcb, 0xc6, 0xbb, 0xbc, + 0xd9, 0xda, 0xda, 0xca, 0xc5, 0xc2, 0xb9, 0xb6, 0xb7, 0xb5, 0xb8, 0xc6, + 0xbe, 0xc2, 0xc7, 0xc6, 0xc7, 0xb7, 0xb5, 0xb9, 0xbc, 0xbc, 0xc0, 0xb7, + 0xc2, 0xd5, 0xdf, 0xd9, 0xcb, 0xbc, 0xbb, 0xbb, 0xb7, 0xbe, 0xc2, 0xcc, + 0xca, 0xc2, 0xbc, 0xbe, 0xcc, 0xcc, 0xbf, 0xc2, 0xc7, 0xdc, 0xd1, 0xd7, + 0xda, 0xd4, 0xc7, 0xb7, 0xbd, 0xb7, 0xbb, 0xbf, 0xb7, 0xad, 0xb6, 0xc4, + 0xdd, 0xb7, 0xb7, 0xbc, 0xb8, 0xbc, 0xbc, 0xb4, 0xb7, 0xba, 0xc2, 0xc6, + 0x2d, 0x2c, 0x2d, 0x2c, 0x2d, 0x2e, 0x2f, 0x30, 0x32, 0x34, 0x33, 0x35, + 0x35, 0x35, 0x36, 0x3a, 0x3a, 0x3b, 0x3c, 0x3b, 0x3d, 0x3d, 0x3d, 0x39, + 0x37, 0x38, 0x3f, 0x3f, 0x3e, 0x44, 0x42, 0x43, 0x40, 0x3d, 0x40, 0x42, + 0x41, 0x3c, 0x3b, 0x46, 0x45, 0x43, 0x42, 0x40, 0x3f, 0x3f, 0x41, 0x42, + 0x43, 0x44, 0x42, 0x43, 0x46, 0x48, 0x42, 0x40, 0x45, 0x4c, 0x4c, 0x4b, + 0x48, 0x49, 0x46, 0x44, 0x41, 0x43, 0x45, 0x49, 0x4b, 0x4a, 0x48, 0x4a, + 0x47, 0x42, 0x40, 0x45, 0x42, 0x46, 0x4a, 0x52, 0x57, 0x59, 0x55, 0x53, + 0x57, 0x5c, 0x5d, 0x5d, 0x57, 0x58, 0x58, 0x59, 0x59, 0x5a, 0x5a, 0x5f, + 0x62, 0x62, 0x61, 0x60, 0x60, 0x62, 0x63, 0x65, 0x64, 0x62, 0x61, 0x62, + 0x62, 0x60, 0x5f, 0x62, 0x62, 0x62, 0x62, 0x63, 0x63, 0x63, 0x64, 0x64, + 0x64, 0x64, 0x64, 0x63, 0x63, 0x63, 0x62, 0x63, 0x63, 0x62, 0x61, 0x60, + 0x60, 0x5b, 0x4b, 0x1f, 0xdd, 0xd2, 0xc2, 0xc5, 0xc2, 0xc0, 0xba, 0xba, + 0xbb, 0xc1, 0xc6, 0xc1, 0xc3, 0xb7, 0xb7, 0xb6, 0xc6, 0xc4, 0xc6, 0xbe, + 0xc2, 0xb1, 0xb3, 0xbb, 0xc0, 0xbc, 0xb6, 0xbe, 0xbd, 0xbd, 0xc1, 0xc1, + 0xc4, 0xbd, 0xbb, 0xb3, 0xb7, 0xbd, 0xc1, 0xb7, 0xc2, 0xcf, 0xcf, 0xce, + 0xbe, 0xc0, 0xc2, 0xba, 0xb6, 0xba, 0xb4, 0xc1, 0xbe, 0xbc, 0xb7, 0xb7, + 0xc4, 0xc5, 0xba, 0xba, 0xbf, 0xbf, 0xbc, 0xc5, 0xcc, 0xc6, 0xc1, 0xbb, + 0xbc, 0xbc, 0xc2, 0xc2, 0xbb, 0xb9, 0xc0, 0xc2, 0xcd, 0xb7, 0xba, 0xb7, + 0xb9, 0xb3, 0xbc, 0xbd, 0xb7, 0xba, 0xc1, 0xbd, 0x28, 0x2a, 0x2a, 0x2b, + 0x29, 0x2c, 0x2d, 0x30, 0x32, 0x32, 0x32, 0x34, 0x34, 0x33, 0x35, 0x39, + 0x37, 0x39, 0x3c, 0x39, 0x38, 0x3c, 0x3a, 0x35, 0x32, 0x39, 0x3d, 0x3d, + 0x3f, 0x43, 0x42, 0x42, 0x3f, 0x3d, 0x3e, 0x42, 0x3e, 0x3a, 0x3e, 0x47, + 0x42, 0x40, 0x3e, 0x3d, 0x3e, 0x3e, 0x43, 0x43, 0x42, 0x42, 0x41, 0x44, + 0x45, 0x47, 0x42, 0x42, 0x48, 0x48, 0x47, 0x48, 0x47, 0x44, 0x43, 0x44, + 0x42, 0x42, 0x43, 0x48, 0x4a, 0x47, 0x42, 0x47, 0x45, 0x3d, 0x3d, 0x47, + 0x47, 0x46, 0x48, 0x55, 0x58, 0x58, 0x54, 0x55, 0x57, 0x5b, 0x5d, 0x5c, + 0x57, 0x57, 0x58, 0x57, 0x59, 0x5a, 0x5c, 0x60, 0x62, 0x61, 0x60, 0x60, + 0x60, 0x62, 0x63, 0x64, 0x64, 0x62, 0x61, 0x62, 0x62, 0x60, 0x5f, 0x61, + 0x62, 0x62, 0x61, 0x62, 0x63, 0x63, 0x63, 0x63, 0x64, 0x64, 0x64, 0x63, + 0x63, 0x63, 0x63, 0x63, 0x63, 0x62, 0x62, 0x5d, 0x5a, 0x57, 0x54, 0x43, + 0x0a, 0xce, 0xc1, 0xbc, 0xc3, 0xc2, 0xb7, 0xba, 0xb7, 0xbf, 0xcb, 0xc2, + 0xc1, 0xba, 0xb7, 0xb5, 0xbb, 0xbe, 0xbe, 0xbc, 0xc1, 0xae, 0xb2, 0xbe, + 0xc0, 0xbd, 0xb7, 0xb8, 0xc3, 0xc3, 0xc2, 0xc7, 0xc4, 0xc8, 0xc3, 0xb7, + 0xb7, 0xc5, 0xbc, 0xb9, 0xbf, 0xbc, 0xb9, 0xc7, 0xb7, 0xc3, 0xc2, 0xbf, + 0xc4, 0xbf, 0xb8, 0xc1, 0xbc, 0xbe, 0xbc, 0xc4, 0xc7, 0xc5, 0xc4, 0xc9, + 0xba, 0xba, 0xbe, 0xc0, 0xc1, 0xca, 0xc5, 0xc7, 0xcb, 0xcb, 0xcc, 0xc7, + 0xbc, 0xbc, 0xc9, 0xbc, 0xcc, 0xbc, 0xb8, 0xaf, 0xad, 0xb1, 0xb7, 0xcc, + 0xc0, 0xb5, 0xb3, 0xb4, 0x29, 0x29, 0x28, 0x2a, 0x29, 0x2d, 0x2f, 0x32, + 0x31, 0x31, 0x32, 0x32, 0x33, 0x33, 0x37, 0x3a, 0x37, 0x34, 0x35, 0x39, + 0x3a, 0x39, 0x38, 0x33, 0x2d, 0x37, 0x3b, 0x3d, 0x43, 0x42, 0x41, 0x42, + 0x3d, 0x3a, 0x3e, 0x40, 0x3d, 0x3c, 0x43, 0x46, 0x42, 0x3e, 0x3b, 0x3d, + 0x3e, 0x3d, 0x42, 0x3e, 0x3f, 0x42, 0x41, 0x45, 0x46, 0x43, 0x42, 0x45, + 0x4c, 0x4c, 0x47, 0x47, 0x44, 0x43, 0x44, 0x42, 0x43, 0x43, 0x43, 0x48, + 0x47, 0x42, 0x42, 0x43, 0x42, 0x37, 0x42, 0x49, 0x47, 0x47, 0x4d, 0x56, + 0x59, 0x58, 0x54, 0x56, 0x57, 0x5a, 0x5c, 0x5b, 0x59, 0x59, 0x57, 0x57, + 0x59, 0x5b, 0x5d, 0x62, 0x62, 0x60, 0x61, 0x60, 0x60, 0x62, 0x62, 0x64, + 0x63, 0x62, 0x60, 0x61, 0x60, 0x61, 0x62, 0x62, 0x61, 0x62, 0x62, 0x63, + 0x63, 0x62, 0x63, 0x64, 0x64, 0x64, 0x64, 0x64, 0x63, 0x63, 0x63, 0x64, + 0x62, 0x61, 0x5d, 0x5a, 0x57, 0x53, 0x4d, 0x48, 0x27, 0xed, 0xce, 0xc2, + 0xbe, 0xb9, 0xba, 0xbc, 0xbf, 0xc2, 0xc7, 0xc0, 0xc6, 0xbb, 0xba, 0xb4, + 0xba, 0xc7, 0xcb, 0xbc, 0xbd, 0xbc, 0xc2, 0xbb, 0xba, 0xbe, 0xc0, 0xbc, + 0xc1, 0xcb, 0xd2, 0xc8, 0xc2, 0xd4, 0xc4, 0xb3, 0xb7, 0xc1, 0xc1, 0xc9, + 0xc5, 0xb5, 0xb5, 0xc6, 0xc2, 0xc3, 0xc0, 0xc4, 0xcc, 0xbc, 0xb5, 0xbd, + 0xc2, 0xc4, 0xc4, 0xe0, 0xd2, 0xc0, 0xca, 0xc2, 0xb2, 0xbd, 0xbd, 0xbf, + 0xbc, 0xcf, 0xcf, 0xc4, 0xc6, 0xcc, 0xc7, 0xb9, 0xba, 0xbd, 0xc0, 0xbf, + 0xc7, 0xbc, 0xc5, 0xb7, 0xbb, 0xad, 0xad, 0xbc, 0xc2, 0xbb, 0xb6, 0xb7, + 0x27, 0x27, 0x27, 0x2b, 0x2a, 0x2d, 0x2e, 0x31, 0x31, 0x2f, 0x32, 0x32, + 0x2f, 0x34, 0x38, 0x39, 0x37, 0x32, 0x32, 0x38, 0x38, 0x37, 0x36, 0x31, + 0x30, 0x38, 0x3b, 0x3d, 0x42, 0x3e, 0x3f, 0x41, 0x3d, 0x3d, 0x3e, 0x41, + 0x3c, 0x3d, 0x43, 0x42, 0x3f, 0x3d, 0x39, 0x3a, 0x3d, 0x40, 0x3e, 0x3c, + 0x3d, 0x3f, 0x40, 0x44, 0x44, 0x42, 0x41, 0x46, 0x4b, 0x49, 0x47, 0x47, + 0x43, 0x42, 0x42, 0x42, 0x43, 0x42, 0x47, 0x45, 0x3e, 0x3e, 0x3d, 0x42, + 0x3f, 0x3d, 0x47, 0x4a, 0x48, 0x47, 0x51, 0x56, 0x5a, 0x58, 0x55, 0x55, + 0x57, 0x5c, 0x5c, 0x5a, 0x5a, 0x5b, 0x59, 0x58, 0x5a, 0x5b, 0x5d, 0x62, + 0x62, 0x61, 0x61, 0x61, 0x61, 0x62, 0x62, 0x64, 0x63, 0x62, 0x60, 0x61, + 0x60, 0x60, 0x62, 0x63, 0x62, 0x61, 0x61, 0x62, 0x63, 0x63, 0x63, 0x64, + 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x63, 0x62, 0x62, 0x5f, 0x5d, 0x58, + 0x53, 0x52, 0x4d, 0x49, 0x3a, 0x11, 0xd9, 0xc1, 0xbd, 0xbc, 0xbf, 0xc5, + 0xc2, 0xcc, 0xd0, 0xc0, 0xc0, 0xb9, 0xb8, 0xb7, 0xb7, 0xbd, 0xcc, 0xc6, + 0xbd, 0xc7, 0xc6, 0xbc, 0xbf, 0xbd, 0xbc, 0xba, 0xb8, 0xc4, 0xc8, 0xb6, + 0xb6, 0xd3, 0xc4, 0xb6, 0xbc, 0xb7, 0xbe, 0xcc, 0xc5, 0xb6, 0xc2, 0xc7, + 0xb7, 0xbc, 0xc0, 0xc7, 0xc9, 0xc0, 0xb4, 0xbb, 0xc1, 0xc2, 0xc2, 0xcc, + 0xbf, 0xc2, 0xd2, 0xc1, 0xb7, 0xc1, 0xbd, 0xbd, 0xbc, 0xc1, 0xbb, 0xbd, + 0xc9, 0xca, 0xc5, 0xbc, 0xbf, 0xc4, 0xc6, 0xbc, 0xc9, 0xc3, 0xc5, 0xbd, + 0xbc, 0xb7, 0xb2, 0xb6, 0xc2, 0xbc, 0xb9, 0xbc, 0x24, 0x27, 0x27, 0x28, + 0x2a, 0x2c, 0x2d, 0x2e, 0x2f, 0x30, 0x32, 0x31, 0x31, 0x37, 0x37, 0x34, + 0x32, 0x32, 0x33, 0x37, 0x37, 0x34, 0x32, 0x2d, 0x33, 0x3a, 0x3a, 0x3d, + 0x3f, 0x3e, 0x40, 0x41, 0x3b, 0x3b, 0x3f, 0x3f, 0x3d, 0x3f, 0x41, 0x3e, + 0x3d, 0x3a, 0x39, 0x3c, 0x3d, 0x3e, 0x3d, 0x3b, 0x3b, 0x3a, 0x41, 0x43, + 0x43, 0x42, 0x44, 0x46, 0x47, 0x48, 0x47, 0x42, 0x41, 0x45, 0x45, 0x43, + 0x41, 0x42, 0x43, 0x41, 0x3d, 0x37, 0x3b, 0x3f, 0x42, 0x42, 0x47, 0x4a, + 0x47, 0x4d, 0x52, 0x57, 0x5b, 0x53, 0x53, 0x57, 0x57, 0x5b, 0x5c, 0x5b, + 0x5a, 0x5d, 0x5a, 0x58, 0x5b, 0x5d, 0x5d, 0x62, 0x61, 0x61, 0x60, 0x60, + 0x5f, 0x60, 0x62, 0x62, 0x63, 0x62, 0x5f, 0x60, 0x62, 0x60, 0x60, 0x62, + 0x62, 0x62, 0x61, 0x61, 0x61, 0x62, 0x63, 0x64, 0x64, 0x64, 0x65, 0x65, + 0x64, 0x64, 0x64, 0x63, 0x62, 0x62, 0x5d, 0x57, 0x4c, 0x48, 0x44, 0x3c, + 0x2d, 0x1a, 0xe9, 0xc0, 0xc4, 0xc1, 0xbc, 0xc1, 0xd0, 0xc7, 0xc4, 0xc1, + 0xb7, 0xb1, 0xb8, 0xbc, 0xcb, 0xc2, 0xc9, 0xd7, 0xd6, 0xcc, 0xc6, 0xc2, + 0xcc, 0xd2, 0xd2, 0xbc, 0xbc, 0xc3, 0xbe, 0xc2, 0xb9, 0xb4, 0xb6, 0xb9, + 0xc1, 0xb7, 0xbd, 0xc1, 0xb9, 0xbc, 0xd2, 0xcd, 0xb6, 0xbd, 0xc2, 0xc7, + 0xc7, 0xc0, 0xbb, 0xbb, 0xb9, 0xba, 0xb9, 0xbd, 0xc1, 0xc7, 0xd2, 0xc2, + 0xb8, 0xb7, 0xbc, 0xc0, 0xc1, 0xca, 0xc2, 0xc5, 0xd4, 0xd4, 0xd1, 0xc6, + 0xcb, 0xcf, 0xc7, 0xbf, 0xc7, 0xc2, 0xb7, 0xac, 0xb3, 0xb9, 0xb3, 0xb7, + 0xc4, 0xc7, 0xb6, 0xb7, 0x22, 0x28, 0x28, 0x2b, 0x2c, 0x2b, 0x2b, 0x2c, + 0x2c, 0x2f, 0x2d, 0x29, 0x2e, 0x37, 0x37, 0x35, 0x32, 0x32, 0x36, 0x37, + 0x35, 0x33, 0x2f, 0x28, 0x32, 0x37, 0x38, 0x3b, 0x3f, 0x3c, 0x3e, 0x3c, + 0x37, 0x3c, 0x3f, 0x3b, 0x3b, 0x3d, 0x3e, 0x3d, 0x3a, 0x39, 0x38, 0x3b, + 0x3b, 0x3d, 0x3e, 0x39, 0x37, 0x36, 0x3e, 0x45, 0x44, 0x42, 0x44, 0x46, + 0x46, 0x47, 0x41, 0x3d, 0x44, 0x46, 0x47, 0x42, 0x3f, 0x41, 0x3e, 0x3f, + 0x3a, 0x37, 0x3e, 0x42, 0x45, 0x44, 0x48, 0x49, 0x4c, 0x52, 0x53, 0x57, + 0x59, 0x51, 0x54, 0x57, 0x57, 0x5a, 0x5c, 0x5b, 0x5b, 0x5d, 0x5a, 0x59, + 0x59, 0x5d, 0x5d, 0x62, 0x62, 0x60, 0x60, 0x60, 0x5f, 0x5f, 0x62, 0x61, + 0x63, 0x62, 0x60, 0x61, 0x61, 0x61, 0x60, 0x61, 0x61, 0x62, 0x62, 0x62, + 0x62, 0x62, 0x62, 0x63, 0x64, 0x64, 0x65, 0x65, 0x65, 0x65, 0x64, 0x64, + 0x64, 0x63, 0x62, 0x5e, 0x56, 0x50, 0x42, 0x36, 0x24, 0x13, 0xf7, 0xcb, + 0xc3, 0xc4, 0xba, 0xbe, 0xc5, 0xc4, 0xc1, 0xc1, 0xb2, 0xb1, 0xbc, 0xbd, + 0xc8, 0xd1, 0xd1, 0xc8, 0xd9, 0xce, 0xc2, 0xc2, 0xcb, 0xd2, 0xd8, 0xbe, + 0xbf, 0xbc, 0xbb, 0xbf, 0xc0, 0xb5, 0xb4, 0xb7, 0xbd, 0xbf, 0xbf, 0xc5, + 0xbb, 0xc1, 0xc3, 0xbc, 0xc2, 0xc2, 0xba, 0xc2, 0xca, 0xc2, 0xc2, 0xc8, + 0xb7, 0xb7, 0xbf, 0xc7, 0xcc, 0xd2, 0xc9, 0xb7, 0xb4, 0xb5, 0xc3, 0xc3, + 0xc2, 0xca, 0xca, 0xc5, 0xc7, 0xd6, 0xcf, 0xd0, 0xdf, 0xd2, 0xbe, 0xc2, + 0xc8, 0xc7, 0xb8, 0xb2, 0xb8, 0xb8, 0xb4, 0xce, 0xd0, 0xc2, 0xb9, 0xb8, + 0x27, 0x27, 0x2a, 0x2b, 0x2c, 0x2b, 0x2b, 0x28, 0x2a, 0x2d, 0x2a, 0x2a, + 0x2f, 0x35, 0x34, 0x33, 0x32, 0x34, 0x37, 0x36, 0x34, 0x32, 0x2d, 0x29, + 0x32, 0x37, 0x37, 0x3c, 0x3c, 0x3b, 0x3c, 0x38, 0x39, 0x39, 0x3e, 0x3d, + 0x3e, 0x36, 0x3a, 0x38, 0x39, 0x37, 0x37, 0x3a, 0x3b, 0x3b, 0x3b, 0x37, + 0x34, 0x35, 0x3d, 0x42, 0x43, 0x43, 0x45, 0x43, 0x42, 0x41, 0x3e, 0x42, + 0x42, 0x46, 0x46, 0x3f, 0x3f, 0x41, 0x42, 0x3d, 0x34, 0x3c, 0x42, 0x43, + 0x46, 0x47, 0x4b, 0x4c, 0x50, 0x4f, 0x52, 0x59, 0x55, 0x52, 0x55, 0x57, + 0x59, 0x5a, 0x5c, 0x5a, 0x5d, 0x5e, 0x5a, 0x59, 0x59, 0x5d, 0x5e, 0x62, + 0x62, 0x5f, 0x5f, 0x60, 0x60, 0x5f, 0x62, 0x61, 0x62, 0x62, 0x60, 0x60, + 0x60, 0x62, 0x61, 0x60, 0x60, 0x61, 0x62, 0x62, 0x62, 0x62, 0x63, 0x63, + 0x64, 0x64, 0x64, 0x65, 0x65, 0x66, 0x65, 0x64, 0x64, 0x63, 0x62, 0x62, + 0x62, 0x5d, 0x52, 0x49, 0x2e, 0x10, 0xec, 0xe1, 0xd2, 0xc3, 0xc7, 0xbf, + 0xb7, 0xc4, 0xc1, 0xbd, 0xb1, 0xb2, 0xbe, 0xc5, 0xb7, 0xbd, 0xc6, 0xc4, + 0xc2, 0xc2, 0xc4, 0xd7, 0xc7, 0xbe, 0xc8, 0xc2, 0xbc, 0xbc, 0xba, 0xb4, + 0xb7, 0xbb, 0xb8, 0xbc, 0xc1, 0xd2, 0xce, 0xc1, 0xb9, 0xba, 0xb9, 0xb7, + 0xc6, 0xc3, 0xc2, 0xcf, 0xcc, 0xcc, 0xc2, 0xc9, 0xb9, 0xb4, 0xb9, 0xc5, + 0xc0, 0xba, 0xbb, 0xc4, 0xc7, 0xc2, 0xd5, 0xca, 0xc5, 0xd5, 0xd2, 0xca, + 0xce, 0xd1, 0xc8, 0xcc, 0xd4, 0xc7, 0xbd, 0xbd, 0xc8, 0xc2, 0xb6, 0xb4, + 0xb7, 0xb5, 0xb3, 0xc7, 0xc6, 0xb7, 0xc1, 0xbc, 0x26, 0x28, 0x2a, 0x29, + 0x29, 0x29, 0x29, 0x25, 0x2b, 0x2d, 0x2a, 0x2c, 0x31, 0x34, 0x34, 0x32, + 0x33, 0x36, 0x36, 0x36, 0x35, 0x30, 0x29, 0x29, 0x32, 0x34, 0x37, 0x39, + 0x39, 0x3a, 0x3d, 0x38, 0x3a, 0x38, 0x3b, 0x3b, 0x37, 0x33, 0x38, 0x38, + 0x37, 0x35, 0x38, 0x39, 0x38, 0x3a, 0x37, 0x37, 0x32, 0x36, 0x3f, 0x44, + 0x44, 0x42, 0x44, 0x41, 0x3f, 0x3e, 0x3d, 0x3f, 0x44, 0x46, 0x42, 0x3f, + 0x3d, 0x40, 0x42, 0x3a, 0x36, 0x3e, 0x44, 0x45, 0x47, 0x4a, 0x4e, 0x4f, + 0x50, 0x4c, 0x52, 0x58, 0x52, 0x52, 0x57, 0x5a, 0x5a, 0x5b, 0x5d, 0x5a, + 0x5c, 0x5d, 0x5b, 0x5b, 0x5a, 0x5d, 0x5d, 0x61, 0x61, 0x5f, 0x5e, 0x60, + 0x5f, 0x5e, 0x61, 0x61, 0x60, 0x60, 0x5f, 0x5e, 0x60, 0x61, 0x61, 0x61, + 0x61, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x63, 0x64, 0x64, 0x65, 0x65, + 0x65, 0x66, 0x65, 0x64, 0x64, 0x64, 0x64, 0x62, 0x62, 0x60, 0x5b, 0x53, + 0x40, 0x22, 0xe9, 0xf0, 0xec, 0xbf, 0xc4, 0xbc, 0xbb, 0xc0, 0xba, 0xbc, + 0xb6, 0xb9, 0xb7, 0xba, 0xb7, 0xb7, 0xbe, 0xbf, 0xbd, 0xbd, 0xcc, 0xcd, + 0xbc, 0xae, 0xbd, 0xc3, 0xbd, 0xc2, 0xcc, 0xc5, 0xc4, 0xbe, 0xb7, 0xc5, + 0xc1, 0xc2, 0xbd, 0xb7, 0xb6, 0xc1, 0xba, 0xba, 0xc4, 0xbe, 0xcc, 0xc9, + 0xc6, 0xc7, 0xca, 0xc8, 0xbe, 0xae, 0xb8, 0xbe, 0xbd, 0xb9, 0xc8, 0xd7, + 0xd4, 0xd6, 0xcf, 0xca, 0xc7, 0xcb, 0xce, 0xce, 0xd7, 0xcc, 0xc5, 0xc4, + 0xbe, 0xca, 0xcc, 0xbd, 0xc5, 0xc2, 0xbd, 0xbe, 0xb9, 0xb2, 0xaf, 0xb4, + 0xb8, 0xb4, 0xc1, 0xbb, 0x28, 0x29, 0x29, 0x27, 0x25, 0x26, 0x27, 0x27, + 0x2b, 0x29, 0x2a, 0x28, 0x30, 0x32, 0x2e, 0x2f, 0x32, 0x34, 0x37, 0x36, + 0x32, 0x2b, 0x25, 0x2a, 0x34, 0x35, 0x37, 0x37, 0x38, 0x3b, 0x39, 0x36, + 0x37, 0x35, 0x38, 0x37, 0x34, 0x33, 0x36, 0x37, 0x37, 0x36, 0x38, 0x36, + 0x36, 0x37, 0x36, 0x36, 0x37, 0x3c, 0x42, 0x44, 0x3f, 0x41, 0x3f, 0x3d, + 0x3c, 0x3c, 0x3f, 0x46, 0x47, 0x45, 0x41, 0x3d, 0x3d, 0x42, 0x3a, 0x34, + 0x38, 0x40, 0x44, 0x46, 0x48, 0x4d, 0x50, 0x50, 0x4d, 0x4f, 0x56, 0x56, + 0x51, 0x52, 0x56, 0x5a, 0x5c, 0x5b, 0x5b, 0x5b, 0x5c, 0x5e, 0x5d, 0x5d, + 0x5c, 0x5e, 0x5d, 0x61, 0x61, 0x5f, 0x60, 0x61, 0x5f, 0x5e, 0x60, 0x61, + 0x5e, 0x5f, 0x5d, 0x5f, 0x5f, 0x62, 0x62, 0x60, 0x62, 0x61, 0x62, 0x63, + 0x63, 0x63, 0x63, 0x63, 0x65, 0x65, 0x66, 0x65, 0x66, 0x66, 0x64, 0x64, + 0x64, 0x65, 0x65, 0x65, 0x65, 0x63, 0x5e, 0x58, 0x55, 0x42, 0x20, 0xf3, + 0xd5, 0xce, 0xc7, 0xbc, 0xbf, 0xbc, 0xb6, 0xb7, 0xb7, 0xb7, 0xb4, 0xb9, + 0xb7, 0xb6, 0xbb, 0xbe, 0xbe, 0xba, 0xba, 0xbe, 0xc0, 0xad, 0xbb, 0xc1, + 0xc2, 0xc2, 0xbd, 0xc2, 0xcd, 0xca, 0xb7, 0xb6, 0xb8, 0xb8, 0xb6, 0xb6, + 0xb8, 0xc3, 0xb7, 0xbb, 0xb7, 0xbe, 0xdb, 0xd0, 0xbd, 0xbd, 0xc0, 0xbe, + 0xc5, 0xbc, 0xbc, 0xba, 0xbd, 0xc1, 0xcc, 0xd4, 0xd7, 0xcb, 0xc6, 0xc4, + 0xca, 0xc4, 0xd1, 0xc7, 0xcc, 0xc1, 0xb7, 0xbc, 0xcd, 0xbe, 0xca, 0xbe, + 0xc7, 0xc2, 0xc7, 0xc4, 0xb1, 0xb2, 0xb6, 0xb5, 0xba, 0xb2, 0xbf, 0xc4, + 0x22, 0x25, 0x22, 0x24, 0x21, 0x24, 0x25, 0x25, 0x27, 0x2a, 0x27, 0x27, + 0x30, 0x2f, 0x2e, 0x2f, 0x31, 0x34, 0x35, 0x32, 0x2d, 0x29, 0x24, 0x2c, + 0x34, 0x35, 0x34, 0x32, 0x37, 0x39, 0x36, 0x34, 0x35, 0x33, 0x37, 0x35, + 0x31, 0x32, 0x34, 0x34, 0x36, 0x37, 0x37, 0x36, 0x35, 0x39, 0x3b, 0x38, + 0x3d, 0x40, 0x42, 0x43, 0x3e, 0x40, 0x3d, 0x3b, 0x39, 0x3c, 0x45, 0x46, + 0x46, 0x46, 0x40, 0x3d, 0x3f, 0x3a, 0x33, 0x36, 0x39, 0x42, 0x47, 0x4c, + 0x4c, 0x4f, 0x4f, 0x4b, 0x4f, 0x53, 0x57, 0x52, 0x4f, 0x54, 0x56, 0x59, + 0x5a, 0x58, 0x58, 0x5a, 0x5d, 0x5d, 0x5c, 0x5d, 0x5d, 0x5e, 0x5e, 0x61, + 0x60, 0x5c, 0x5d, 0x5e, 0x5f, 0x5f, 0x60, 0x5f, 0x5e, 0x5d, 0x5e, 0x5f, + 0x60, 0x62, 0x62, 0x61, 0x62, 0x62, 0x62, 0x62, 0x63, 0x63, 0x63, 0x64, + 0x65, 0x66, 0x66, 0x65, 0x65, 0x65, 0x65, 0x64, 0x63, 0x65, 0x64, 0x65, + 0x65, 0x64, 0x62, 0x5b, 0x55, 0x4e, 0x37, 0x03, 0xd8, 0xc9, 0xc2, 0xc3, + 0xc1, 0xb4, 0xb5, 0xb7, 0xb5, 0xb2, 0xb7, 0xb8, 0xb6, 0xb5, 0xba, 0xbb, + 0xbc, 0xba, 0xc1, 0xbf, 0xba, 0xb2, 0xc1, 0xbc, 0xbe, 0xbc, 0xbb, 0xc2, + 0xc2, 0xcc, 0xbc, 0xb2, 0xaf, 0xb7, 0xc4, 0xc7, 0xb8, 0xba, 0xb4, 0xb9, + 0xbc, 0xba, 0xc2, 0xbb, 0xb7, 0xc2, 0xbe, 0xbc, 0xc2, 0xcb, 0xc2, 0xc6, + 0xbd, 0xb9, 0xc8, 0xd3, 0xda, 0xc9, 0xbe, 0xba, 0xb9, 0xbc, 0xc5, 0xc4, + 0xd0, 0xd6, 0xe9, 0xdf, 0xcf, 0xc7, 0xc1, 0xc5, 0xc7, 0xb8, 0xbe, 0xc7, + 0xc4, 0xae, 0xb5, 0xb7, 0xc0, 0xc0, 0xcc, 0xce, 0x1d, 0x23, 0x22, 0x25, + 0x21, 0x21, 0x23, 0x23, 0x25, 0x27, 0x2a, 0x2a, 0x30, 0x2b, 0x29, 0x2c, + 0x2f, 0x30, 0x32, 0x2e, 0x2d, 0x29, 0x25, 0x2c, 0x31, 0x32, 0x34, 0x33, + 0x36, 0x36, 0x32, 0x30, 0x30, 0x32, 0x35, 0x31, 0x30, 0x32, 0x33, 0x36, + 0x37, 0x36, 0x34, 0x33, 0x35, 0x39, 0x3d, 0x3c, 0x3e, 0x3f, 0x42, 0x41, + 0x39, 0x3c, 0x3a, 0x3a, 0x3d, 0x42, 0x45, 0x43, 0x45, 0x47, 0x3f, 0x3c, + 0x39, 0x39, 0x37, 0x37, 0x41, 0x45, 0x49, 0x4c, 0x4c, 0x4d, 0x4c, 0x4e, + 0x50, 0x53, 0x55, 0x50, 0x4d, 0x56, 0x56, 0x5a, 0x59, 0x57, 0x57, 0x5a, + 0x5d, 0x5c, 0x5b, 0x5c, 0x5f, 0x5d, 0x5d, 0x5f, 0x5e, 0x5b, 0x5c, 0x5c, + 0x5d, 0x5e, 0x60, 0x5e, 0x60, 0x5d, 0x5f, 0x5f, 0x60, 0x62, 0x62, 0x61, + 0x62, 0x61, 0x62, 0x62, 0x62, 0x62, 0x64, 0x64, 0x64, 0x64, 0x64, 0x65, + 0x65, 0x64, 0x64, 0x65, 0x64, 0x63, 0x64, 0x64, 0x64, 0x65, 0x65, 0x5f, + 0x56, 0x52, 0x46, 0x1d, 0xda, 0xc7, 0xc6, 0xbc, 0xc2, 0xbb, 0xbe, 0xc5, + 0xb4, 0xb5, 0xb9, 0xba, 0xb4, 0xb4, 0xad, 0xb4, 0xb7, 0xbc, 0xbd, 0xd2, + 0xc9, 0xb1, 0xb8, 0xb8, 0xba, 0xb3, 0xb5, 0xbc, 0xbe, 0xbe, 0xb8, 0xb1, + 0xab, 0xba, 0xc8, 0xc3, 0xbc, 0xbf, 0xb4, 0xb8, 0xb9, 0xbc, 0xc6, 0xb7, + 0xbc, 0xc5, 0xbf, 0xbe, 0xb9, 0xc5, 0xc9, 0xcf, 0xc0, 0xbe, 0xc9, 0xd1, + 0xcb, 0xbc, 0xbd, 0xc6, 0xb5, 0xb7, 0xbd, 0xc1, 0xc4, 0xd1, 0xfb, 0xf6, + 0xce, 0xbe, 0xc0, 0xcf, 0xcb, 0xc2, 0xb9, 0xbd, 0xae, 0xaa, 0xb9, 0xbc, + 0xc8, 0xc9, 0xbc, 0xbb, 0x1e, 0x1f, 0x21, 0x24, 0x22, 0x20, 0x23, 0x26, + 0x27, 0x27, 0x29, 0x2b, 0x2e, 0x28, 0x27, 0x2a, 0x2c, 0x2f, 0x2c, 0x2a, + 0x2a, 0x26, 0x25, 0x2d, 0x30, 0x31, 0x31, 0x35, 0x35, 0x33, 0x31, 0x2d, + 0x2d, 0x2f, 0x32, 0x2d, 0x30, 0x32, 0x33, 0x36, 0x37, 0x37, 0x34, 0x34, + 0x39, 0x3d, 0x3d, 0x3b, 0x3e, 0x40, 0x41, 0x39, 0x37, 0x37, 0x36, 0x38, + 0x40, 0x42, 0x40, 0x46, 0x47, 0x46, 0x3b, 0x37, 0x35, 0x37, 0x39, 0x3d, + 0x41, 0x47, 0x47, 0x48, 0x47, 0x4a, 0x4e, 0x52, 0x52, 0x55, 0x54, 0x4d, + 0x4f, 0x57, 0x57, 0x5a, 0x58, 0x57, 0x56, 0x5a, 0x5d, 0x5b, 0x5a, 0x5b, + 0x60, 0x5d, 0x5b, 0x5d, 0x5d, 0x5a, 0x5b, 0x5b, 0x5c, 0x5e, 0x5f, 0x5d, + 0x5f, 0x5e, 0x5f, 0x5e, 0x5f, 0x62, 0x62, 0x61, 0x62, 0x62, 0x62, 0x61, + 0x61, 0x62, 0x62, 0x63, 0x63, 0x63, 0x64, 0x64, 0x64, 0x64, 0x65, 0x65, + 0x64, 0x64, 0x65, 0x65, 0x64, 0x64, 0x64, 0x63, 0x5f, 0x53, 0x38, 0x22, + 0xf4, 0xe2, 0xd2, 0xc9, 0xc3, 0xd5, 0xc2, 0xcb, 0xb8, 0xb6, 0xbb, 0xb9, + 0xb0, 0xae, 0xae, 0xb9, 0xb1, 0xb7, 0xba, 0xc2, 0xcc, 0xb7, 0xb7, 0xb6, + 0xb7, 0xb7, 0xbc, 0xb9, 0xbc, 0xb8, 0xb5, 0xac, 0xad, 0xbe, 0xc7, 0xc2, + 0xc3, 0xca, 0xb8, 0xb8, 0xb4, 0xbd, 0xc8, 0xb9, 0xc3, 0xc4, 0xbc, 0xb9, + 0xbc, 0xc0, 0xc2, 0xca, 0xc1, 0xc3, 0xc2, 0xcc, 0xc8, 0xc2, 0xc1, 0xbe, + 0xb6, 0xb3, 0xba, 0xc0, 0xbf, 0xb7, 0xc8, 0xcc, 0xca, 0xc4, 0xd1, 0xd0, + 0xc9, 0xc4, 0xb2, 0xb9, 0xab, 0xae, 0xbb, 0xba, 0xc4, 0xc6, 0xb0, 0xb8, + 0x1b, 0x1d, 0x1d, 0x1f, 0x1d, 0x1f, 0x21, 0x22, 0x27, 0x28, 0x28, 0x27, + 0x2c, 0x26, 0x28, 0x29, 0x2c, 0x31, 0x29, 0x28, 0x24, 0x25, 0x26, 0x2a, + 0x2b, 0x30, 0x31, 0x35, 0x31, 0x30, 0x2e, 0x2f, 0x2e, 0x31, 0x2f, 0x31, + 0x32, 0x30, 0x32, 0x34, 0x37, 0x39, 0x37, 0x36, 0x38, 0x3d, 0x3c, 0x3a, + 0x3a, 0x3e, 0x3c, 0x3d, 0x39, 0x37, 0x39, 0x3d, 0x3d, 0x3f, 0x40, 0x45, + 0x46, 0x42, 0x3b, 0x34, 0x2f, 0x33, 0x3a, 0x3c, 0x41, 0x47, 0x43, 0x43, + 0x46, 0x4c, 0x51, 0x52, 0x55, 0x57, 0x54, 0x4e, 0x52, 0x57, 0x59, 0x5a, + 0x58, 0x57, 0x54, 0x5a, 0x5c, 0x59, 0x59, 0x5a, 0x5e, 0x5d, 0x5a, 0x5c, + 0x5b, 0x59, 0x5c, 0x5b, 0x5b, 0x5d, 0x5d, 0x5c, 0x5d, 0x5e, 0x5f, 0x5f, + 0x5f, 0x61, 0x62, 0x61, 0x62, 0x63, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, + 0x63, 0x63, 0x63, 0x64, 0x64, 0x64, 0x64, 0x64, 0x63, 0x63, 0x65, 0x66, + 0x66, 0x65, 0x64, 0x63, 0x62, 0x58, 0x44, 0x2a, 0x1f, 0x07, 0xf2, 0xd2, + 0xc5, 0xdc, 0xd2, 0xbe, 0xbe, 0xbb, 0xbd, 0xb1, 0xac, 0xb1, 0xb2, 0xb8, + 0xac, 0xbc, 0xbf, 0xbf, 0xb8, 0xbc, 0xbc, 0xb7, 0xb6, 0xb3, 0xbc, 0xb9, + 0xbd, 0xb2, 0xb6, 0xb3, 0xb1, 0xbc, 0xbc, 0xb9, 0xbb, 0xc5, 0xc6, 0xbf, + 0xb8, 0xbc, 0xbc, 0xb8, 0xbc, 0xbd, 0xbc, 0xb7, 0xbf, 0xbc, 0xbe, 0xca, + 0xd2, 0xbf, 0xba, 0xc7, 0xc3, 0xc4, 0xc6, 0xb3, 0xb3, 0xb6, 0xbb, 0xbd, + 0xbf, 0xac, 0xab, 0xb4, 0xc1, 0xc8, 0xc1, 0xc6, 0xc7, 0xbc, 0xaf, 0xc0, + 0xae, 0xb1, 0xbb, 0xb6, 0xc7, 0xc7, 0xb1, 0xb9, 0x18, 0x1c, 0x19, 0x18, + 0x1a, 0x1c, 0x20, 0x21, 0x26, 0x25, 0x25, 0x27, 0x29, 0x24, 0x28, 0x27, + 0x28, 0x2d, 0x29, 0x27, 0x20, 0x21, 0x25, 0x27, 0x28, 0x2c, 0x2f, 0x32, + 0x30, 0x2e, 0x30, 0x30, 0x2f, 0x2d, 0x2f, 0x32, 0x30, 0x30, 0x32, 0x34, + 0x3a, 0x38, 0x37, 0x37, 0x3c, 0x3b, 0x3c, 0x3e, 0x3c, 0x3b, 0x3d, 0x3b, + 0x3a, 0x3a, 0x39, 0x3d, 0x3d, 0x40, 0x42, 0x46, 0x46, 0x3f, 0x37, 0x31, + 0x2e, 0x35, 0x3a, 0x3a, 0x40, 0x44, 0x42, 0x3e, 0x47, 0x4d, 0x52, 0x54, + 0x56, 0x57, 0x52, 0x4e, 0x56, 0x58, 0x5a, 0x58, 0x56, 0x55, 0x53, 0x58, + 0x59, 0x59, 0x59, 0x5a, 0x5d, 0x5d, 0x5a, 0x5b, 0x59, 0x58, 0x5c, 0x5b, + 0x5a, 0x5b, 0x5c, 0x5c, 0x5d, 0x5d, 0x5e, 0x5d, 0x61, 0x62, 0x62, 0x61, + 0x62, 0x62, 0x62, 0x63, 0x64, 0x65, 0x63, 0x62, 0x63, 0x63, 0x63, 0x64, + 0x64, 0x65, 0x65, 0x64, 0x64, 0x64, 0x65, 0x66, 0x66, 0x66, 0x65, 0x64, + 0x63, 0x62, 0x59, 0x50, 0x37, 0x16, 0xe6, 0xc2, 0xbe, 0xc8, 0xd7, 0xc1, + 0xbe, 0xc7, 0xc4, 0xb0, 0xae, 0xb7, 0xb2, 0xac, 0xb2, 0xca, 0xb6, 0xb7, + 0xbc, 0xbc, 0xba, 0xb8, 0xb6, 0xb1, 0xb7, 0xb2, 0xbb, 0xb1, 0xbc, 0xc0, + 0xb4, 0xb7, 0xb6, 0xb3, 0xb8, 0xc4, 0xc2, 0xb7, 0xc6, 0xc4, 0xb7, 0xb5, + 0xb6, 0xb8, 0xb8, 0xb6, 0xbf, 0xc3, 0xc9, 0xce, 0xcc, 0xb7, 0xb3, 0xd0, + 0xcc, 0xb9, 0xc1, 0xbe, 0xb4, 0xb2, 0xbb, 0xbc, 0xbd, 0xad, 0xaa, 0xad, + 0xae, 0xb0, 0xbb, 0xd2, 0xc8, 0xc2, 0xb4, 0xb3, 0xb3, 0xbc, 0xbd, 0xba, + 0xba, 0xba, 0xc2, 0xc0, 0x17, 0x16, 0x15, 0x18, 0x1b, 0x1e, 0x21, 0x22, + 0x24, 0x23, 0x21, 0x21, 0x24, 0x25, 0x27, 0x26, 0x23, 0x27, 0x2a, 0x26, + 0x1c, 0x1e, 0x22, 0x23, 0x28, 0x2d, 0x2f, 0x30, 0x30, 0x2d, 0x2b, 0x2f, + 0x2b, 0x2b, 0x2f, 0x32, 0x2f, 0x31, 0x32, 0x37, 0x38, 0x38, 0x37, 0x39, + 0x3b, 0x39, 0x3d, 0x3c, 0x38, 0x3b, 0x3c, 0x3b, 0x3b, 0x39, 0x3c, 0x3c, + 0x3e, 0x3f, 0x44, 0x46, 0x45, 0x3b, 0x31, 0x2d, 0x2d, 0x37, 0x3a, 0x3d, + 0x42, 0x43, 0x40, 0x42, 0x4a, 0x4f, 0x54, 0x55, 0x56, 0x55, 0x4e, 0x4d, + 0x56, 0x58, 0x58, 0x54, 0x54, 0x53, 0x51, 0x56, 0x57, 0x57, 0x59, 0x5a, + 0x5c, 0x5b, 0x5a, 0x5a, 0x57, 0x57, 0x5b, 0x5c, 0x5a, 0x5a, 0x5b, 0x5c, + 0x5c, 0x5c, 0x5d, 0x5b, 0x61, 0x62, 0x61, 0x61, 0x63, 0x62, 0x62, 0x63, + 0x63, 0x64, 0x64, 0x64, 0x63, 0x63, 0x64, 0x64, 0x64, 0x65, 0x65, 0x64, + 0x64, 0x64, 0x65, 0x66, 0x67, 0x67, 0x66, 0x65, 0x64, 0x64, 0x5d, 0x52, + 0x34, 0x0c, 0xe0, 0xd2, 0xcb, 0xc7, 0xd7, 0xc9, 0xc5, 0xc0, 0xc1, 0xb7, + 0xbb, 0xc0, 0xb3, 0xad, 0xbe, 0xcc, 0xc1, 0xc6, 0xbc, 0xb9, 0xb8, 0xbb, + 0xc5, 0xc5, 0xb7, 0xb1, 0xd0, 0xbe, 0xb3, 0xb5, 0xbe, 0xbc, 0xb4, 0xb0, + 0xb4, 0xb9, 0xbd, 0xb6, 0xc4, 0xc1, 0xb1, 0xb1, 0xb4, 0xc3, 0xc1, 0xbc, + 0xc1, 0xdb, 0xc8, 0xc8, 0xc9, 0xb3, 0xb1, 0xbc, 0xc7, 0xbb, 0xbc, 0xc7, + 0xbb, 0xb2, 0xb8, 0xb8, 0xb7, 0xb7, 0xbc, 0xb6, 0xb1, 0xab, 0xb7, 0xc6, + 0xc1, 0xc2, 0xb2, 0xa6, 0xb1, 0xc2, 0xc0, 0xbb, 0xb5, 0xb5, 0xb8, 0xb5, + 0x15, 0x14, 0x16, 0x1b, 0x1c, 0x1d, 0x1b, 0x1e, 0x1f, 0x1d, 0x1d, 0x20, + 0x21, 0x21, 0x23, 0x22, 0x21, 0x27, 0x2c, 0x1f, 0x1b, 0x1f, 0x20, 0x20, + 0x29, 0x2d, 0x29, 0x2e, 0x2e, 0x29, 0x29, 0x2d, 0x2a, 0x2a, 0x2f, 0x30, + 0x31, 0x31, 0x34, 0x36, 0x37, 0x39, 0x38, 0x38, 0x36, 0x39, 0x3b, 0x38, + 0x39, 0x3c, 0x3c, 0x3b, 0x37, 0x3c, 0x3c, 0x39, 0x3d, 0x40, 0x44, 0x42, + 0x42, 0x34, 0x2b, 0x2d, 0x2f, 0x38, 0x3d, 0x3f, 0x43, 0x42, 0x42, 0x47, + 0x4d, 0x52, 0x53, 0x57, 0x57, 0x52, 0x4b, 0x51, 0x56, 0x56, 0x54, 0x52, + 0x53, 0x52, 0x50, 0x54, 0x56, 0x56, 0x59, 0x59, 0x5b, 0x59, 0x59, 0x58, + 0x57, 0x57, 0x59, 0x5c, 0x5a, 0x5a, 0x5b, 0x5c, 0x5c, 0x5c, 0x5d, 0x5d, + 0x5e, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x64, 0x62, 0x63, 0x64, + 0x65, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x65, 0x66, 0x66, + 0x66, 0x67, 0x67, 0x66, 0x66, 0x63, 0x5f, 0x54, 0x3f, 0x22, 0x06, 0xf9, + 0xe4, 0xc5, 0xbf, 0xc1, 0xcc, 0xc1, 0xbd, 0xbc, 0xbd, 0xb7, 0xb2, 0xae, + 0xb7, 0xbe, 0xc2, 0xca, 0xc2, 0xb6, 0xb5, 0xb4, 0xc4, 0xc7, 0xbc, 0xbc, + 0xc4, 0xbc, 0xb2, 0xba, 0xba, 0xb5, 0xb7, 0xb1, 0xba, 0xc1, 0xc1, 0xb8, + 0xbc, 0xb8, 0xb8, 0xb6, 0xba, 0xc1, 0xbc, 0xb7, 0xc3, 0xd6, 0xc8, 0xc4, + 0xc7, 0xbc, 0xb6, 0xb5, 0xbb, 0xbb, 0xbb, 0xca, 0xc2, 0xbb, 0xb8, 0xba, + 0xb7, 0xb0, 0xb5, 0xc6, 0xb2, 0xaf, 0xb7, 0xbb, 0xc6, 0xbb, 0xb7, 0xb8, + 0xb9, 0xbd, 0xba, 0xb9, 0xb0, 0xb7, 0xba, 0xc1, 0x14, 0x16, 0x17, 0x19, + 0x17, 0x17, 0x19, 0x18, 0x19, 0x1d, 0x1e, 0x1a, 0x19, 0x1c, 0x1f, 0x1d, + 0x20, 0x29, 0x22, 0x1d, 0x1f, 0x1d, 0x1e, 0x1e, 0x29, 0x2b, 0x27, 0x2d, + 0x2d, 0x29, 0x2a, 0x2b, 0x2a, 0x2c, 0x30, 0x30, 0x31, 0x32, 0x34, 0x33, + 0x37, 0x39, 0x3b, 0x38, 0x36, 0x38, 0x36, 0x38, 0x3c, 0x3b, 0x3c, 0x3d, + 0x3c, 0x3c, 0x3b, 0x38, 0x3d, 0x41, 0x42, 0x3d, 0x3d, 0x35, 0x2d, 0x2e, + 0x32, 0x3d, 0x40, 0x42, 0x43, 0x42, 0x45, 0x49, 0x4e, 0x52, 0x52, 0x56, + 0x53, 0x4d, 0x4c, 0x54, 0x56, 0x53, 0x52, 0x52, 0x54, 0x51, 0x52, 0x52, + 0x52, 0x55, 0x59, 0x5a, 0x5a, 0x58, 0x59, 0x59, 0x57, 0x56, 0x57, 0x5b, + 0x58, 0x5a, 0x5a, 0x5b, 0x5d, 0x5c, 0x5c, 0x5e, 0x5d, 0x61, 0x61, 0x62, + 0x62, 0x62, 0x62, 0x63, 0x64, 0x63, 0x62, 0x63, 0x64, 0x64, 0x64, 0x64, + 0x63, 0x64, 0x64, 0x64, 0x65, 0x64, 0x65, 0x66, 0x66, 0x66, 0x66, 0x67, + 0x66, 0x63, 0x62, 0x5d, 0x59, 0x4c, 0x3e, 0x23, 0xff, 0xda, 0xcb, 0xcc, + 0xc5, 0xc5, 0xbf, 0xbf, 0xbc, 0xba, 0xb0, 0xae, 0xb3, 0xb5, 0xb8, 0xc4, + 0xc5, 0xb9, 0xb8, 0xb0, 0xb8, 0xbc, 0xb6, 0xbc, 0xc2, 0xba, 0xb3, 0xc0, + 0xbf, 0xbc, 0xb9, 0xbc, 0xc5, 0xc7, 0xbc, 0xb7, 0xb3, 0xb4, 0xb8, 0xb8, + 0xc2, 0xc6, 0xc0, 0xbc, 0xbc, 0xc4, 0xd1, 0xc6, 0xe4, 0xe0, 0xc2, 0xbb, + 0xbd, 0xc2, 0xc9, 0xc2, 0xbe, 0xba, 0xb3, 0xb8, 0xb5, 0xa9, 0xad, 0xb5, + 0xb3, 0xb4, 0xb5, 0xb5, 0xc7, 0xc1, 0xb9, 0xbc, 0xbf, 0xbe, 0xc7, 0xd3, + 0xbe, 0xb7, 0xd2, 0xd6, 0x11, 0x17, 0x16, 0x13, 0x12, 0x16, 0x17, 0x17, + 0x18, 0x1c, 0x1c, 0x1a, 0x19, 0x1d, 0x1d, 0x1d, 0x23, 0x27, 0x1d, 0x1c, + 0x1c, 0x1a, 0x19, 0x20, 0x29, 0x28, 0x26, 0x2d, 0x2e, 0x27, 0x29, 0x2b, + 0x2d, 0x2d, 0x2f, 0x31, 0x31, 0x32, 0x30, 0x32, 0x35, 0x3a, 0x38, 0x36, + 0x36, 0x34, 0x37, 0x3a, 0x3a, 0x3c, 0x3d, 0x3d, 0x3c, 0x37, 0x38, 0x39, + 0x3d, 0x42, 0x3f, 0x37, 0x39, 0x37, 0x31, 0x30, 0x34, 0x3f, 0x41, 0x42, + 0x44, 0x42, 0x46, 0x4d, 0x4f, 0x52, 0x54, 0x53, 0x4d, 0x4c, 0x4d, 0x54, + 0x54, 0x52, 0x52, 0x51, 0x54, 0x4f, 0x4e, 0x50, 0x52, 0x54, 0x57, 0x59, + 0x5a, 0x57, 0x56, 0x57, 0x58, 0x57, 0x57, 0x5a, 0x59, 0x5a, 0x5a, 0x5a, + 0x5e, 0x5c, 0x5c, 0x5d, 0x5e, 0x5e, 0x5f, 0x60, 0x61, 0x61, 0x62, 0x62, + 0x64, 0x63, 0x62, 0x62, 0x63, 0x65, 0x64, 0x63, 0x63, 0x64, 0x64, 0x65, + 0x66, 0x65, 0x65, 0x64, 0x65, 0x66, 0x67, 0x66, 0x66, 0x66, 0x64, 0x60, + 0x55, 0x47, 0x3d, 0x23, 0xfb, 0xda, 0xc1, 0xc5, 0xc1, 0xbd, 0xc1, 0xc3, + 0xc2, 0xc2, 0xb5, 0xbc, 0xbe, 0xbf, 0xbe, 0xbc, 0xbf, 0xc1, 0xba, 0xb0, + 0xb4, 0xcc, 0xbe, 0xb5, 0xc1, 0xba, 0xb9, 0xbc, 0xce, 0xcc, 0xcb, 0xc7, + 0xcc, 0xb9, 0xb6, 0xb9, 0xbb, 0xb3, 0xaf, 0xb7, 0xbc, 0xba, 0xb4, 0xb7, + 0xb3, 0xb7, 0xd1, 0xd1, 0xcc, 0xcf, 0xbc, 0xbc, 0xd7, 0xd6, 0xc5, 0xb8, + 0xbe, 0xc0, 0xb3, 0xb7, 0xb7, 0xbc, 0xc7, 0xbc, 0xb7, 0xb7, 0xb7, 0xb1, + 0xba, 0xc8, 0xc0, 0xbf, 0xd2, 0xc2, 0xc2, 0xcb, 0xc1, 0xbd, 0xd2, 0xdc, + 0x0c, 0x12, 0x0f, 0x0d, 0x0d, 0x15, 0x15, 0x15, 0x14, 0x17, 0x1e, 0x1d, + 0x1d, 0x1e, 0x1d, 0x22, 0x26, 0x25, 0x1c, 0x1b, 0x17, 0x14, 0x18, 0x1e, + 0x27, 0x27, 0x25, 0x28, 0x2d, 0x28, 0x27, 0x29, 0x2b, 0x2b, 0x2d, 0x31, + 0x33, 0x33, 0x2e, 0x32, 0x35, 0x37, 0x37, 0x36, 0x33, 0x35, 0x38, 0x38, + 0x38, 0x3d, 0x3e, 0x3d, 0x37, 0x36, 0x35, 0x39, 0x3d, 0x3f, 0x3b, 0x34, + 0x32, 0x33, 0x30, 0x30, 0x36, 0x3f, 0x41, 0x42, 0x43, 0x44, 0x4b, 0x4d, + 0x51, 0x52, 0x52, 0x51, 0x4c, 0x4c, 0x4f, 0x53, 0x51, 0x52, 0x52, 0x52, + 0x52, 0x4e, 0x4b, 0x50, 0x53, 0x55, 0x57, 0x57, 0x59, 0x57, 0x53, 0x54, + 0x57, 0x57, 0x57, 0x5a, 0x59, 0x5a, 0x5b, 0x5a, 0x5d, 0x5d, 0x5d, 0x5d, + 0x5f, 0x5e, 0x5f, 0x5f, 0x61, 0x60, 0x61, 0x62, 0x63, 0x63, 0x63, 0x62, + 0x63, 0x63, 0x64, 0x62, 0x62, 0x63, 0x64, 0x66, 0x66, 0x66, 0x66, 0x65, + 0x64, 0x65, 0x66, 0x66, 0x66, 0x66, 0x65, 0x62, 0x5b, 0x47, 0x34, 0x1f, + 0xf6, 0xd2, 0xc4, 0xc2, 0xcb, 0xc5, 0xc3, 0xc5, 0xc2, 0xc2, 0xbf, 0xe1, + 0xd7, 0xcc, 0xc2, 0xb7, 0xb8, 0xb7, 0xb8, 0xbd, 0xc9, 0xdd, 0xcf, 0xbd, + 0xc0, 0xba, 0xb7, 0xb6, 0xc1, 0xcb, 0xcd, 0xbd, 0xbd, 0xb7, 0xb5, 0xb9, + 0xbc, 0xad, 0xad, 0xb8, 0xbf, 0xbc, 0xb6, 0xb0, 0xb0, 0xbf, 0xce, 0xd7, + 0xda, 0xce, 0xc2, 0xc2, 0xd7, 0xcb, 0xbd, 0xc7, 0xc4, 0xbf, 0xb5, 0xb7, + 0xb6, 0xcd, 0xd3, 0xbf, 0xba, 0xbb, 0xb7, 0xb2, 0xb1, 0xcc, 0xbc, 0xba, + 0xcf, 0xbd, 0xb1, 0xb3, 0xbd, 0xc5, 0xc2, 0xdb, 0x06, 0x07, 0x08, 0x0a, + 0x10, 0x15, 0x14, 0x13, 0x12, 0x19, 0x1a, 0x1c, 0x1d, 0x1e, 0x1f, 0x1e, + 0x1d, 0x1d, 0x17, 0x13, 0x10, 0x14, 0x19, 0x1d, 0x23, 0x26, 0x24, 0x28, + 0x2d, 0x2a, 0x2a, 0x28, 0x27, 0x2d, 0x28, 0x31, 0x32, 0x30, 0x2e, 0x32, + 0x35, 0x35, 0x35, 0x32, 0x32, 0x33, 0x36, 0x37, 0x3c, 0x3e, 0x3b, 0x35, + 0x36, 0x36, 0x35, 0x37, 0x3d, 0x3e, 0x35, 0x32, 0x30, 0x32, 0x33, 0x33, + 0x38, 0x3d, 0x42, 0x43, 0x43, 0x44, 0x4b, 0x50, 0x52, 0x51, 0x4d, 0x4d, + 0x49, 0x4c, 0x51, 0x51, 0x51, 0x52, 0x51, 0x51, 0x52, 0x4d, 0x4c, 0x4f, + 0x52, 0x55, 0x56, 0x56, 0x58, 0x54, 0x53, 0x53, 0x56, 0x57, 0x57, 0x59, + 0x58, 0x59, 0x5b, 0x59, 0x5d, 0x5d, 0x5b, 0x5d, 0x5f, 0x60, 0x5d, 0x60, + 0x60, 0x5f, 0x5f, 0x5f, 0x62, 0x63, 0x64, 0x64, 0x63, 0x62, 0x63, 0x63, + 0x63, 0x62, 0x63, 0x65, 0x66, 0x66, 0x66, 0x65, 0x64, 0x64, 0x65, 0x65, + 0x65, 0x66, 0x66, 0x64, 0x61, 0x5a, 0x4c, 0x38, 0x16, 0xe9, 0xd9, 0xbf, + 0xcb, 0xc3, 0xc2, 0xc2, 0xc6, 0xc4, 0xbc, 0xd8, 0xe0, 0xd3, 0xc8, 0xc2, + 0xbf, 0xc7, 0xbe, 0xc6, 0xd5, 0xcc, 0xca, 0xbe, 0xbd, 0xbe, 0xb7, 0xb4, + 0xb2, 0xb6, 0xc2, 0xb6, 0xb8, 0xb7, 0xb1, 0xb5, 0xb7, 0xac, 0xac, 0xc1, + 0xc7, 0xcb, 0xc9, 0xba, 0xbf, 0xc0, 0xc7, 0xe0, 0xe1, 0xcc, 0xbd, 0xc9, + 0xcf, 0xc0, 0xbf, 0xc5, 0xc0, 0xbf, 0xbc, 0xbe, 0xb7, 0xc6, 0xbd, 0xb7, + 0xbc, 0xbb, 0xb8, 0xb9, 0xbb, 0xbf, 0xb5, 0xb3, 0xc8, 0xc1, 0xb5, 0xb7, + 0xbe, 0xc7, 0xc1, 0xc7, 0xfc, 0xfe, 0x04, 0x0a, 0x10, 0x12, 0x12, 0x12, + 0x17, 0x1e, 0x1d, 0x1d, 0x1e, 0x1b, 0x17, 0x19, 0x1d, 0x1b, 0x11, 0x0c, + 0x0e, 0x12, 0x17, 0x1d, 0x22, 0x24, 0x24, 0x2a, 0x2b, 0x27, 0x27, 0x27, + 0x2b, 0x2a, 0x2a, 0x2f, 0x32, 0x2f, 0x30, 0x32, 0x32, 0x36, 0x33, 0x30, + 0x31, 0x32, 0x37, 0x3c, 0x3d, 0x39, 0x34, 0x35, 0x37, 0x36, 0x37, 0x35, + 0x39, 0x3d, 0x2e, 0x2d, 0x2f, 0x31, 0x33, 0x38, 0x38, 0x3a, 0x3f, 0x43, + 0x41, 0x44, 0x4a, 0x4e, 0x50, 0x4d, 0x4b, 0x4d, 0x4c, 0x4f, 0x51, 0x50, + 0x50, 0x51, 0x51, 0x50, 0x51, 0x4d, 0x4e, 0x51, 0x4e, 0x53, 0x55, 0x52, + 0x57, 0x54, 0x55, 0x53, 0x55, 0x57, 0x57, 0x58, 0x5a, 0x5a, 0x5a, 0x59, + 0x5c, 0x60, 0x5b, 0x5b, 0x5e, 0x5f, 0x5e, 0x5e, 0x60, 0x60, 0x5f, 0x5e, + 0x61, 0x62, 0x62, 0x64, 0x64, 0x63, 0x62, 0x63, 0x64, 0x63, 0x62, 0x64, + 0x65, 0x66, 0x66, 0x66, 0x65, 0x64, 0x64, 0x64, 0x65, 0x66, 0x66, 0x66, + 0x64, 0x5d, 0x4d, 0x3e, 0x17, 0xe5, 0xd4, 0xcc, 0xc7, 0xc0, 0xc3, 0xc8, + 0xd2, 0xd9, 0xcc, 0xd3, 0xdf, 0xdc, 0xd2, 0xc8, 0xc7, 0xd9, 0xc9, 0xb8, + 0xc2, 0xca, 0xc7, 0xd0, 0xcf, 0xd0, 0xc0, 0xbe, 0xb7, 0xb6, 0xba, 0xb3, + 0xb7, 0xb9, 0xb1, 0xb1, 0xb5, 0xaf, 0xac, 0xc9, 0xd1, 0xbc, 0xbd, 0xc2, + 0xc1, 0xb3, 0xcc, 0xde, 0xc6, 0xbe, 0xc0, 0xc2, 0xc8, 0xce, 0xc2, 0xc1, + 0xbc, 0xc5, 0xc0, 0xbc, 0xc5, 0xc2, 0xbe, 0xbd, 0xb2, 0xb4, 0xb3, 0xbc, + 0xb6, 0xb7, 0xb7, 0xbc, 0xc9, 0xc1, 0xc4, 0xbc, 0xb3, 0xc2, 0xca, 0xc5, + 0xf0, 0xf9, 0xfc, 0x03, 0x0c, 0x0d, 0x12, 0x16, 0x18, 0x1d, 0x1b, 0x1a, + 0x1a, 0x17, 0x17, 0x1d, 0x1d, 0x1b, 0x12, 0x14, 0x14, 0x15, 0x19, 0x1b, + 0x22, 0x24, 0x23, 0x2a, 0x2b, 0x26, 0x24, 0x29, 0x2c, 0x2a, 0x2d, 0x30, + 0x32, 0x2f, 0x31, 0x31, 0x32, 0x34, 0x32, 0x2e, 0x2f, 0x32, 0x38, 0x3a, + 0x37, 0x37, 0x38, 0x38, 0x35, 0x36, 0x35, 0x32, 0x37, 0x39, 0x26, 0x29, + 0x30, 0x2c, 0x35, 0x3c, 0x33, 0x3c, 0x3f, 0x42, 0x42, 0x45, 0x49, 0x4d, + 0x4e, 0x4a, 0x48, 0x4d, 0x4d, 0x50, 0x50, 0x51, 0x51, 0x51, 0x52, 0x51, + 0x52, 0x4d, 0x4e, 0x4d, 0x4b, 0x53, 0x54, 0x52, 0x57, 0x55, 0x56, 0x55, + 0x56, 0x56, 0x58, 0x58, 0x5a, 0x59, 0x58, 0x5a, 0x5a, 0x5f, 0x5d, 0x5a, + 0x5d, 0x5d, 0x5e, 0x60, 0x60, 0x61, 0x61, 0x5e, 0x5f, 0x61, 0x62, 0x63, + 0x63, 0x64, 0x63, 0x62, 0x63, 0x64, 0x64, 0x63, 0x64, 0x65, 0x65, 0x66, + 0x66, 0x65, 0x64, 0x65, 0x66, 0x66, 0x65, 0x66, 0x66, 0x63, 0x5b, 0x4a, + 0x27, 0x07, 0xdf, 0xc5, 0xc7, 0xce, 0xc9, 0xc7, 0xc8, 0xd8, 0xd2, 0xe0, + 0xe2, 0xe2, 0xe0, 0xd2, 0xc6, 0xc0, 0xbc, 0xbe, 0xc2, 0xc8, 0xc5, 0xcc, + 0xe0, 0xcf, 0xcb, 0xca, 0xc7, 0xc7, 0xbf, 0xc3, 0xc6, 0xbd, 0xb4, 0xae, + 0xb0, 0xb3, 0xb7, 0xba, 0xd2, 0xc7, 0xbf, 0xc1, 0xc1, 0xba, 0xc0, 0xc7, + 0xbc, 0xc3, 0xc4, 0xc7, 0xc6, 0xcf, 0xc6, 0xc0, 0xbb, 0xc0, 0xc2, 0xca, + 0xc7, 0xbc, 0xbe, 0xbf, 0xb1, 0xb9, 0xbf, 0xbd, 0xa9, 0xac, 0xb6, 0xc1, + 0xc3, 0xc4, 0xbc, 0xc3, 0xb5, 0xb8, 0xc1, 0xc4, 0xe2, 0xf0, 0xf6, 0xff, + 0x07, 0x08, 0x0e, 0x13, 0x17, 0x18, 0x17, 0x18, 0x19, 0x17, 0x18, 0x1d, + 0x17, 0x13, 0x17, 0x1b, 0x17, 0x15, 0x1b, 0x1c, 0x20, 0x23, 0x22, 0x27, + 0x29, 0x23, 0x25, 0x27, 0x28, 0x2c, 0x2e, 0x2e, 0x2d, 0x2d, 0x30, 0x32, + 0x32, 0x32, 0x2b, 0x28, 0x2d, 0x33, 0x3a, 0x35, 0x33, 0x38, 0x39, 0x37, + 0x35, 0x36, 0x32, 0x32, 0x32, 0x39, 0x23, 0x27, 0x2f, 0x2e, 0x37, 0x3b, + 0x35, 0x3d, 0x3d, 0x41, 0x44, 0x47, 0x4b, 0x4c, 0x4c, 0x47, 0x46, 0x4a, + 0x4e, 0x4e, 0x51, 0x52, 0x51, 0x52, 0x51, 0x52, 0x50, 0x4c, 0x4d, 0x48, + 0x48, 0x51, 0x52, 0x52, 0x55, 0x53, 0x54, 0x52, 0x56, 0x55, 0x58, 0x59, + 0x59, 0x58, 0x58, 0x59, 0x59, 0x5d, 0x5d, 0x59, 0x5d, 0x5c, 0x5d, 0x61, + 0x61, 0x5f, 0x60, 0x5f, 0x5e, 0x61, 0x61, 0x62, 0x62, 0x64, 0x64, 0x64, + 0x62, 0x63, 0x64, 0x65, 0x63, 0x64, 0x65, 0x65, 0x65, 0x65, 0x63, 0x64, + 0x65, 0x65, 0x65, 0x67, 0x66, 0x66, 0x60, 0x52, 0x3d, 0x24, 0xf2, 0xd3, + 0xc7, 0xc8, 0xc6, 0xc7, 0xc5, 0xc7, 0xbc, 0xc2, 0xce, 0xdc, 0xe3, 0xd5, + 0xdc, 0xcc, 0xbd, 0xc8, 0xc1, 0xc7, 0xcd, 0xc4, 0xc7, 0xb7, 0xba, 0xbb, + 0xc0, 0xc2, 0xcc, 0xd3, 0xcf, 0xb7, 0xb7, 0xb8, 0xb1, 0xbd, 0xc7, 0xb2, + 0xc1, 0xcc, 0xc5, 0xbc, 0xc7, 0xcb, 0xd9, 0xc7, 0xc7, 0xc7, 0xc2, 0xc0, + 0xbc, 0xba, 0xc4, 0xbc, 0xb8, 0xc6, 0xc1, 0xc4, 0xc5, 0xb9, 0xb4, 0xb7, + 0xb8, 0xc5, 0xbd, 0xb8, 0xbd, 0xc9, 0xbc, 0xc7, 0xba, 0xc4, 0xbb, 0xb7, + 0xbf, 0xb1, 0xac, 0xbb, 0xd7, 0xe5, 0xed, 0xf8, 0xff, 0x05, 0x07, 0x0c, + 0x14, 0x13, 0x13, 0x12, 0x15, 0x19, 0x17, 0x16, 0x11, 0x17, 0x1a, 0x1d, + 0x19, 0x1a, 0x1f, 0x1e, 0x1f, 0x22, 0x23, 0x25, 0x23, 0x22, 0x22, 0x24, + 0x27, 0x2a, 0x2c, 0x2b, 0x2b, 0x2d, 0x30, 0x31, 0x2c, 0x27, 0x22, 0x29, + 0x31, 0x34, 0x37, 0x32, 0x35, 0x39, 0x37, 0x34, 0x35, 0x33, 0x2c, 0x32, + 0x30, 0x32, 0x2a, 0x2b, 0x30, 0x35, 0x36, 0x38, 0x37, 0x3c, 0x3d, 0x3e, + 0x46, 0x48, 0x4a, 0x48, 0x49, 0x47, 0x48, 0x48, 0x4e, 0x4c, 0x50, 0x51, + 0x50, 0x51, 0x4f, 0x52, 0x4d, 0x49, 0x4a, 0x47, 0x47, 0x4f, 0x51, 0x53, + 0x54, 0x55, 0x54, 0x52, 0x55, 0x56, 0x57, 0x5a, 0x59, 0x59, 0x58, 0x59, + 0x5a, 0x5b, 0x5d, 0x5b, 0x5d, 0x5c, 0x5c, 0x61, 0x61, 0x60, 0x5f, 0x61, + 0x5f, 0x60, 0x62, 0x61, 0x62, 0x63, 0x62, 0x64, 0x63, 0x63, 0x63, 0x65, + 0x65, 0x64, 0x64, 0x65, 0x65, 0x65, 0x65, 0x65, 0x66, 0x64, 0x65, 0x67, + 0x66, 0x66, 0x64, 0x5f, 0x51, 0x30, 0xfc, 0xd5, 0xca, 0xc1, 0xbe, 0xbb, + 0xbb, 0xb2, 0xb1, 0xc2, 0xd0, 0xc6, 0xd9, 0xe5, 0xf0, 0xde, 0xd2, 0xda, + 0xcd, 0xd7, 0xe2, 0xc4, 0xb9, 0xb4, 0xbf, 0xb8, 0xb6, 0xbe, 0xca, 0xcd, + 0xc8, 0xc1, 0xbb, 0xbc, 0xb4, 0xc7, 0xbc, 0xc7, 0xc8, 0xc9, 0xca, 0xcc, + 0xcc, 0xcc, 0xd3, 0xc2, 0xd9, 0xc5, 0xc7, 0xbd, 0xba, 0xb8, 0xc1, 0xc1, + 0xbc, 0xc4, 0xc4, 0xbc, 0xb4, 0xbc, 0xb7, 0xb3, 0xbc, 0xdc, 0xca, 0xbf, + 0xc2, 0xc6, 0xbb, 0xc2, 0xb7, 0xb9, 0xb7, 0xb2, 0xb8, 0xc8, 0xb2, 0xb7, + 0xcd, 0xda, 0xdf, 0xec, 0xf7, 0xfc, 0xfd, 0x06, 0x0a, 0x0b, 0x0b, 0x0d, + 0x10, 0x12, 0x12, 0x11, 0x13, 0x17, 0x1b, 0x1d, 0x1c, 0x1d, 0x1e, 0x1f, + 0x20, 0x22, 0x24, 0x26, 0x22, 0x22, 0x22, 0x23, 0x24, 0x28, 0x28, 0x28, + 0x2a, 0x2d, 0x2e, 0x2a, 0x24, 0x22, 0x27, 0x2f, 0x32, 0x32, 0x35, 0x31, + 0x36, 0x37, 0x36, 0x35, 0x36, 0x30, 0x2d, 0x32, 0x2a, 0x2d, 0x30, 0x2b, + 0x2e, 0x35, 0x37, 0x39, 0x37, 0x39, 0x3e, 0x3d, 0x42, 0x47, 0x47, 0x45, + 0x47, 0x47, 0x46, 0x47, 0x4d, 0x4c, 0x50, 0x4f, 0x50, 0x4f, 0x4f, 0x52, + 0x4d, 0x49, 0x47, 0x47, 0x49, 0x4d, 0x51, 0x52, 0x52, 0x55, 0x57, 0x53, + 0x52, 0x56, 0x57, 0x5a, 0x58, 0x57, 0x57, 0x57, 0x5a, 0x59, 0x5a, 0x5c, + 0x5c, 0x5b, 0x5b, 0x5f, 0x62, 0x5f, 0x5f, 0x60, 0x5f, 0x60, 0x61, 0x62, + 0x62, 0x62, 0x62, 0x62, 0x64, 0x63, 0x63, 0x63, 0x65, 0x65, 0x65, 0x65, + 0x65, 0x65, 0x64, 0x65, 0x66, 0x66, 0x66, 0x66, 0x67, 0x67, 0x66, 0x60, + 0x4d, 0x25, 0xf7, 0xd5, 0xd7, 0xce, 0xc6, 0xbf, 0xc2, 0xc3, 0xc6, 0xd4, + 0xc4, 0xba, 0xc7, 0xce, 0xd8, 0xe0, 0xda, 0xd3, 0xd5, 0xcb, 0xc5, 0xc2, + 0xc1, 0xbc, 0xc0, 0xbd, 0xbe, 0xb7, 0xba, 0xc7, 0xca, 0xcc, 0xbd, 0xbc, + 0xbb, 0xc6, 0xd4, 0xcb, 0xc6, 0xc7, 0xcc, 0xd6, 0xd6, 0xd2, 0xc3, 0xbd, + 0xda, 0xcf, 0xbf, 0xc2, 0xbd, 0xb5, 0xbb, 0xb0, 0xc2, 0xbf, 0xc7, 0xc3, + 0xbc, 0xbe, 0xb7, 0xb2, 0xb8, 0xd6, 0xc7, 0xc2, 0xc0, 0xbc, 0xbd, 0xc2, + 0xbd, 0xb7, 0xb7, 0xb4, 0xb3, 0xc3, 0xb7, 0xb1, 0xc1, 0xcf, 0xd7, 0xe2, + 0xe7, 0xf0, 0xf5, 0xfc, 0x01, 0x06, 0x04, 0x05, 0x08, 0x08, 0x05, 0x10, + 0x14, 0x16, 0x19, 0x17, 0x1a, 0x1c, 0x1f, 0x1b, 0x1d, 0x1e, 0x20, 0x21, + 0x22, 0x22, 0x23, 0x22, 0x27, 0x27, 0x25, 0x28, 0x29, 0x27, 0x25, 0x26, + 0x27, 0x2a, 0x2d, 0x32, 0x32, 0x34, 0x33, 0x32, 0x38, 0x34, 0x36, 0x37, + 0x33, 0x2d, 0x2f, 0x32, 0x29, 0x27, 0x32, 0x31, 0x32, 0x33, 0x37, 0x3a, + 0x37, 0x38, 0x3c, 0x3d, 0x42, 0x46, 0x44, 0x41, 0x45, 0x46, 0x47, 0x48, + 0x4b, 0x4b, 0x51, 0x4e, 0x4e, 0x4d, 0x50, 0x50, 0x4e, 0x48, 0x49, 0x48, + 0x49, 0x4d, 0x51, 0x52, 0x50, 0x52, 0x56, 0x55, 0x52, 0x57, 0x57, 0x58, + 0x56, 0x56, 0x57, 0x56, 0x59, 0x5a, 0x57, 0x5c, 0x5b, 0x5a, 0x5b, 0x5e, + 0x62, 0x60, 0x60, 0x5f, 0x60, 0x60, 0x5f, 0x61, 0x62, 0x62, 0x61, 0x61, + 0x63, 0x64, 0x63, 0x62, 0x63, 0x65, 0x65, 0x65, 0x65, 0x66, 0x63, 0x64, + 0x65, 0x65, 0x66, 0x66, 0x67, 0x67, 0x67, 0x63, 0x59, 0x3f, 0x27, 0x0a, + 0xe1, 0xe1, 0xce, 0xc3, 0xc1, 0xcf, 0xd7, 0xd2, 0xd3, 0xca, 0xc6, 0xc6, + 0xc9, 0xd7, 0xdf, 0xdd, 0xd7, 0xce, 0xc0, 0xbc, 0xc6, 0xbc, 0xb1, 0xb6, + 0xcc, 0xd4, 0xc2, 0xbf, 0xc4, 0xd5, 0xc2, 0xb8, 0xbe, 0xc4, 0xd0, 0xcc, + 0xc1, 0xba, 0xc2, 0xca, 0xd2, 0xcc, 0xbf, 0xbc, 0xd2, 0xd7, 0xbc, 0xbb, + 0xc6, 0xbd, 0xbf, 0xac, 0xc0, 0xbb, 0xcc, 0xc7, 0xb7, 0xc4, 0xbc, 0xb1, + 0xb2, 0xc8, 0xc7, 0xc7, 0xc1, 0xbd, 0xc0, 0xbc, 0xc1, 0xbb, 0xb6, 0xb2, + 0xb7, 0xb9, 0xb1, 0xac, 0xc1, 0xc9, 0xd0, 0xd5, 0xde, 0xdf, 0xe6, 0xe6, + 0xea, 0xf3, 0xf7, 0xf9, 0xf6, 0xf9, 0xff, 0x10, 0x12, 0x18, 0x17, 0x12, + 0x19, 0x1a, 0x1e, 0x1d, 0x1c, 0x1e, 0x1f, 0x1e, 0x1e, 0x1d, 0x21, 0x20, + 0x22, 0x22, 0x24, 0x28, 0x25, 0x23, 0x21, 0x23, 0x26, 0x29, 0x2e, 0x33, + 0x34, 0x34, 0x32, 0x32, 0x39, 0x37, 0x34, 0x33, 0x32, 0x2e, 0x33, 0x31, + 0x29, 0x20, 0x32, 0x38, 0x32, 0x33, 0x39, 0x37, 0x39, 0x3b, 0x3b, 0x3a, + 0x3f, 0x47, 0x44, 0x41, 0x43, 0x46, 0x47, 0x49, 0x4a, 0x4b, 0x50, 0x4d, + 0x4d, 0x4e, 0x51, 0x4d, 0x4c, 0x48, 0x47, 0x49, 0x49, 0x4c, 0x50, 0x52, + 0x50, 0x4f, 0x54, 0x57, 0x55, 0x58, 0x56, 0x58, 0x54, 0x55, 0x57, 0x56, + 0x57, 0x5b, 0x57, 0x58, 0x5a, 0x5a, 0x59, 0x5d, 0x61, 0x60, 0x60, 0x5f, + 0x60, 0x60, 0x5f, 0x60, 0x62, 0x62, 0x61, 0x61, 0x61, 0x62, 0x62, 0x62, + 0x62, 0x64, 0x64, 0x65, 0x66, 0x66, 0x65, 0x64, 0x65, 0x65, 0x66, 0x65, + 0x67, 0x67, 0x67, 0x66, 0x61, 0x54, 0x3e, 0x16, 0xfc, 0xf1, 0xdb, 0xc8, + 0xc7, 0xc2, 0xce, 0xc5, 0xd3, 0xdc, 0xc9, 0xc8, 0xc8, 0xca, 0xd7, 0xe3, + 0xe7, 0xd7, 0xc2, 0xbc, 0xc5, 0xc9, 0xbc, 0xc3, 0xc8, 0xd9, 0xd2, 0xc5, + 0xbd, 0xcd, 0xc4, 0xb9, 0xc0, 0xbf, 0xc1, 0xc0, 0xbe, 0xbb, 0xc4, 0xd2, + 0xd3, 0xc5, 0xc7, 0xbf, 0xd1, 0xee, 0xd7, 0xbf, 0xc0, 0xc7, 0xc8, 0xaa, + 0xc3, 0xc4, 0xd2, 0xc0, 0xb6, 0xc0, 0xc1, 0xb3, 0xb7, 0xc4, 0xc7, 0xbc, + 0xbb, 0xb9, 0xbf, 0xc4, 0xc8, 0xc2, 0xb9, 0xb3, 0xb7, 0xc7, 0xbd, 0xb2, + 0xbc, 0xc2, 0xcd, 0xca, 0xd4, 0xcf, 0xce, 0xd5, 0xda, 0xe6, 0xe7, 0xe5, + 0xe9, 0xf2, 0xfc, 0x0e, 0x14, 0x12, 0x12, 0x0e, 0x12, 0x17, 0x1a, 0x18, + 0x19, 0x1c, 0x20, 0x1d, 0x1c, 0x1d, 0x1f, 0x1d, 0x1d, 0x21, 0x23, 0x23, + 0x22, 0x22, 0x22, 0x21, 0x27, 0x2d, 0x32, 0x33, 0x34, 0x30, 0x30, 0x32, + 0x36, 0x33, 0x2f, 0x33, 0x32, 0x2e, 0x37, 0x31, 0x2d, 0x2b, 0x36, 0x38, + 0x33, 0x36, 0x38, 0x37, 0x3a, 0x3b, 0x3d, 0x3b, 0x3d, 0x43, 0x44, 0x42, + 0x41, 0x47, 0x47, 0x47, 0x47, 0x4a, 0x4f, 0x4b, 0x4b, 0x4d, 0x51, 0x4b, + 0x49, 0x47, 0x47, 0x49, 0x47, 0x4c, 0x4d, 0x52, 0x52, 0x50, 0x52, 0x57, + 0x57, 0x57, 0x55, 0x57, 0x54, 0x54, 0x56, 0x56, 0x56, 0x59, 0x57, 0x57, + 0x5a, 0x5a, 0x5a, 0x5a, 0x5f, 0x60, 0x60, 0x5f, 0x5f, 0x5f, 0x5f, 0x5f, + 0x61, 0x62, 0x61, 0x62, 0x62, 0x62, 0x62, 0x63, 0x63, 0x63, 0x64, 0x63, + 0x65, 0x66, 0x65, 0x64, 0x65, 0x66, 0x65, 0x65, 0x67, 0x67, 0x67, 0x66, + 0x63, 0x58, 0x43, 0x23, 0xfd, 0x06, 0x09, 0xf5, 0xdd, 0xc7, 0xd2, 0xc7, + 0xc8, 0xd2, 0xcf, 0xc2, 0xc3, 0xd3, 0xda, 0xda, 0xf3, 0xe9, 0xdc, 0xd3, + 0xbd, 0xc6, 0xc9, 0xba, 0xc0, 0xd0, 0xcc, 0xd1, 0xc7, 0xc7, 0xc2, 0xbf, + 0xbc, 0xbd, 0xbf, 0xbc, 0xc4, 0xc3, 0xcc, 0xd6, 0xdc, 0xd0, 0xc5, 0xb7, + 0xc7, 0xd5, 0xd6, 0xcc, 0xc7, 0xcf, 0xcf, 0xb7, 0xc7, 0xd0, 0xce, 0xca, + 0xc6, 0xbf, 0xc2, 0xc7, 0xd4, 0xbf, 0xbe, 0xb5, 0xbf, 0xc0, 0xb8, 0xc2, + 0xc7, 0xc2, 0xc0, 0xc2, 0xc4, 0xc6, 0xc1, 0xb3, 0xbc, 0xc3, 0xc9, 0xc7, + 0xcf, 0xcc, 0xcc, 0xcc, 0xd2, 0xd5, 0xd6, 0xdf, 0xe7, 0xef, 0x05, 0x12, + 0x0c, 0x07, 0x07, 0x0b, 0x10, 0x17, 0x14, 0x14, 0x17, 0x1b, 0x1d, 0x16, + 0x19, 0x1d, 0x1b, 0x1a, 0x1d, 0x20, 0x21, 0x21, 0x22, 0x20, 0x1e, 0x22, + 0x29, 0x2c, 0x31, 0x35, 0x2f, 0x2d, 0x2d, 0x31, 0x36, 0x31, 0x2c, 0x33, + 0x2f, 0x2d, 0x31, 0x2f, 0x2c, 0x32, 0x37, 0x35, 0x37, 0x38, 0x38, 0x37, + 0x39, 0x39, 0x3c, 0x39, 0x3c, 0x40, 0x47, 0x45, 0x40, 0x44, 0x47, 0x47, + 0x47, 0x4a, 0x4d, 0x4a, 0x49, 0x4d, 0x50, 0x4d, 0x47, 0x46, 0x4a, 0x48, + 0x4a, 0x4d, 0x4d, 0x52, 0x52, 0x52, 0x52, 0x55, 0x56, 0x57, 0x56, 0x57, + 0x55, 0x54, 0x55, 0x57, 0x57, 0x57, 0x56, 0x56, 0x59, 0x58, 0x5a, 0x5b, + 0x5d, 0x60, 0x5e, 0x5e, 0x60, 0x5f, 0x5e, 0x5e, 0x5f, 0x62, 0x62, 0x62, + 0x62, 0x62, 0x62, 0x62, 0x63, 0x63, 0x64, 0x63, 0x64, 0x67, 0x66, 0x65, + 0x65, 0x66, 0x65, 0x65, 0x67, 0x67, 0x67, 0x67, 0x65, 0x62, 0x55, 0x2e, + 0xec, 0xe6, 0xfb, 0x07, 0xfc, 0xe4, 0xdc, 0xd4, 0xc7, 0xc2, 0xd2, 0xcf, + 0xc0, 0xcb, 0xcb, 0xd2, 0xf5, 0xec, 0xee, 0xf2, 0xc6, 0xbc, 0xc7, 0xd3, + 0xcc, 0xd3, 0xd3, 0xd2, 0xc5, 0xc6, 0xcc, 0xc5, 0xbc, 0xbb, 0xb4, 0xb2, + 0xc2, 0xc9, 0xcf, 0xdc, 0xd7, 0xd6, 0xc8, 0xb6, 0xb6, 0xc1, 0xc1, 0xc7, + 0xc7, 0xc8, 0xca, 0xc0, 0xcc, 0xdf, 0xd5, 0xca, 0xc5, 0xc6, 0xcf, 0xd4, + 0xcd, 0xbe, 0xb8, 0xb8, 0xbf, 0xbe, 0xbe, 0xd7, 0xc6, 0xc3, 0xc7, 0xd7, + 0xd0, 0xb3, 0xb7, 0xba, 0xb4, 0xb7, 0xbc, 0xc1, 0xc7, 0xc7, 0xc7, 0xc4, + 0xcb, 0xd7, 0xd8, 0xdc, 0xdd, 0xf4, 0x08, 0x07, 0x07, 0x06, 0x0a, 0x0c, + 0x15, 0x14, 0x11, 0x12, 0x14, 0x17, 0x15, 0x17, 0x1b, 0x1b, 0x1a, 0x1a, + 0x1b, 0x1d, 0x1e, 0x22, 0x22, 0x1c, 0x1e, 0x23, 0x29, 0x2d, 0x30, 0x31, + 0x2d, 0x2d, 0x2f, 0x32, 0x33, 0x2f, 0x2f, 0x33, 0x32, 0x2d, 0x32, 0x30, + 0x2f, 0x37, 0x34, 0x32, 0x32, 0x37, 0x3b, 0x37, 0x39, 0x38, 0x37, 0x37, + 0x38, 0x3f, 0x44, 0x45, 0x42, 0x44, 0x47, 0x47, 0x47, 0x49, 0x49, 0x48, + 0x47, 0x4b, 0x4b, 0x4b, 0x47, 0x46, 0x4b, 0x4a, 0x4e, 0x50, 0x4d, 0x51, + 0x52, 0x52, 0x51, 0x54, 0x55, 0x57, 0x57, 0x57, 0x55, 0x55, 0x55, 0x57, + 0x57, 0x57, 0x56, 0x56, 0x58, 0x57, 0x57, 0x5d, 0x5d, 0x5e, 0x5c, 0x5b, + 0x60, 0x5f, 0x5e, 0x5e, 0x5e, 0x60, 0x61, 0x63, 0x62, 0x62, 0x61, 0x62, + 0x63, 0x64, 0x65, 0x64, 0x64, 0x65, 0x65, 0x64, 0x65, 0x66, 0x66, 0x67, + 0x66, 0x67, 0x66, 0x66, 0x65, 0x63, 0x5d, 0x3b, 0x0a, 0xf2, 0xf3, 0xf7, + 0x08, 0x0a, 0xfc, 0xde, 0xd7, 0xc7, 0xcf, 0xe6, 0xcb, 0xc7, 0xc7, 0xcb, + 0xec, 0xf7, 0xf7, 0xfd, 0xdd, 0xc2, 0xc0, 0xd7, 0xd9, 0xd9, 0xeb, 0xd7, + 0xbd, 0xb6, 0xc7, 0xc2, 0xbc, 0xbe, 0xb7, 0xb1, 0xca, 0xd0, 0xd3, 0xd9, + 0xda, 0xdc, 0xd7, 0xbf, 0xb7, 0xc3, 0xdc, 0xcd, 0xca, 0xc4, 0xc5, 0xc7, + 0xca, 0xce, 0xd0, 0xd2, 0xc7, 0xc7, 0xcc, 0xce, 0xbd, 0xbb, 0xcd, 0xc6, + 0xc7, 0xcd, 0xc8, 0xdc, 0xc5, 0xc7, 0xcc, 0xcb, 0xcd, 0xce, 0xb4, 0xc6, + 0xb3, 0xb6, 0xae, 0xb8, 0xcd, 0xc1, 0xc0, 0xc9, 0xd2, 0xd1, 0xcc, 0xd7, + 0xea, 0xf9, 0xfc, 0x03, 0x09, 0x0c, 0x0d, 0x12, 0x10, 0x10, 0x12, 0x16, + 0x14, 0x11, 0x12, 0x17, 0x17, 0x17, 0x19, 0x1a, 0x19, 0x19, 0x1a, 0x22, + 0x1d, 0x19, 0x1e, 0x24, 0x29, 0x2b, 0x2d, 0x2c, 0x2b, 0x2c, 0x30, 0x31, + 0x32, 0x2f, 0x31, 0x31, 0x30, 0x2d, 0x32, 0x31, 0x35, 0x37, 0x32, 0x2f, + 0x30, 0x37, 0x3b, 0x37, 0x38, 0x3a, 0x37, 0x37, 0x36, 0x3d, 0x41, 0x42, + 0x42, 0x43, 0x45, 0x46, 0x46, 0x48, 0x4b, 0x49, 0x47, 0x4a, 0x4a, 0x48, + 0x46, 0x44, 0x49, 0x4c, 0x4f, 0x52, 0x4e, 0x4f, 0x51, 0x51, 0x50, 0x53, + 0x56, 0x57, 0x57, 0x57, 0x56, 0x55, 0x55, 0x57, 0x57, 0x57, 0x57, 0x57, + 0x58, 0x58, 0x57, 0x5b, 0x5d, 0x5d, 0x5c, 0x5b, 0x5d, 0x5f, 0x5e, 0x5f, + 0x5d, 0x5d, 0x5d, 0x61, 0x62, 0x62, 0x62, 0x62, 0x63, 0x64, 0x64, 0x64, + 0x65, 0x65, 0x65, 0x64, 0x65, 0x66, 0x66, 0x67, 0x66, 0x67, 0x66, 0x66, + 0x66, 0x65, 0x63, 0x55, 0x24, 0x09, 0xfc, 0xf2, 0xf0, 0xfc, 0x16, 0xfc, + 0xe4, 0xc7, 0xd0, 0xd5, 0xcc, 0xcc, 0xca, 0xcc, 0xdb, 0xf7, 0x03, 0xfa, + 0xe0, 0xc8, 0xbe, 0xc6, 0xd0, 0xdb, 0xe2, 0xd3, 0xc3, 0xba, 0xbc, 0xc2, + 0xca, 0xd4, 0xde, 0xc9, 0xd8, 0xdb, 0xe2, 0xe2, 0xe5, 0xde, 0xd8, 0xd2, + 0xc6, 0xd0, 0xf2, 0xe1, 0xc7, 0xc1, 0xcc, 0xd2, 0xd1, 0xd3, 0xd2, 0xcd, + 0xcc, 0xd0, 0xcb, 0xc8, 0xc3, 0xc3, 0xdd, 0xe0, 0xd1, 0xda, 0xc7, 0xd1, + 0xc3, 0xcf, 0xbd, 0xb4, 0xc2, 0xcc, 0xb7, 0xad, 0xad, 0xb1, 0xa7, 0xb1, + 0xbe, 0xb8, 0xbc, 0xc1, 0xc7, 0xc7, 0xd6, 0xe6, 0xf0, 0xf2, 0xfc, 0x02, + 0x08, 0x0c, 0x10, 0x11, 0x0d, 0x0c, 0x15, 0x14, 0x0e, 0x0c, 0x13, 0x13, + 0x12, 0x17, 0x17, 0x17, 0x16, 0x17, 0x19, 0x1d, 0x1a, 0x19, 0x1d, 0x22, + 0x25, 0x27, 0x28, 0x28, 0x2a, 0x2b, 0x2d, 0x2d, 0x30, 0x2d, 0x31, 0x2f, + 0x2f, 0x2d, 0x31, 0x30, 0x33, 0x34, 0x2f, 0x2e, 0x2e, 0x37, 0x37, 0x35, + 0x3b, 0x3b, 0x37, 0x39, 0x37, 0x3b, 0x3c, 0x40, 0x3f, 0x42, 0x42, 0x45, + 0x47, 0x47, 0x49, 0x4b, 0x48, 0x48, 0x49, 0x47, 0x47, 0x45, 0x49, 0x4d, + 0x4f, 0x52, 0x51, 0x50, 0x4f, 0x50, 0x50, 0x4f, 0x55, 0x58, 0x57, 0x56, + 0x56, 0x55, 0x54, 0x56, 0x57, 0x57, 0x57, 0x57, 0x58, 0x5a, 0x58, 0x59, + 0x5c, 0x5d, 0x5c, 0x5a, 0x5b, 0x5d, 0x5e, 0x60, 0x5d, 0x5c, 0x5d, 0x5e, + 0x61, 0x62, 0x62, 0x62, 0x63, 0x64, 0x63, 0x63, 0x65, 0x66, 0x66, 0x65, + 0x66, 0x66, 0x67, 0x67, 0x66, 0x67, 0x66, 0x66, 0x66, 0x67, 0x64, 0x60, + 0x47, 0x1f, 0x09, 0x0f, 0xfa, 0xea, 0xfa, 0x1b, 0xfc, 0xf2, 0xe1, 0xd2, + 0xc2, 0xc8, 0xc0, 0xc2, 0xd2, 0xf6, 0x09, 0x07, 0xf4, 0xd1, 0xb4, 0xb1, + 0xc4, 0xd3, 0xd8, 0xd3, 0xcf, 0xca, 0xc4, 0xc2, 0xc6, 0xe2, 0xde, 0xdc, + 0xf5, 0xd7, 0xd5, 0xe0, 0xf7, 0xe8, 0xd0, 0xc8, 0xc7, 0xd7, 0xf9, 0xf6, + 0xd3, 0xba, 0xdf, 0xe9, 0xdc, 0xdb, 0xcd, 0xcf, 0xcc, 0xc8, 0xd2, 0xcd, + 0xc1, 0xc8, 0xd7, 0xde, 0xdc, 0xe8, 0xd7, 0xe2, 0xcd, 0xd2, 0xc2, 0xbe, + 0xc2, 0xbf, 0xbd, 0xbc, 0xb4, 0xb2, 0xac, 0xac, 0xaf, 0xbc, 0xb7, 0xb4, + 0xbc, 0xcc, 0xd4, 0xdc, 0xe7, 0xf2, 0xf4, 0xfe, 0x07, 0x0c, 0x0c, 0x09, + 0x0a, 0x0c, 0x12, 0x0e, 0x09, 0x0e, 0x10, 0x0f, 0x12, 0x15, 0x11, 0x14, + 0x15, 0x17, 0x19, 0x1a, 0x19, 0x18, 0x1c, 0x21, 0x22, 0x22, 0x26, 0x27, + 0x27, 0x28, 0x29, 0x2b, 0x2e, 0x2c, 0x30, 0x2f, 0x2e, 0x2f, 0x2d, 0x30, + 0x32, 0x30, 0x2f, 0x2e, 0x30, 0x37, 0x3b, 0x38, 0x39, 0x3b, 0x36, 0x38, + 0x3a, 0x3e, 0x3a, 0x40, 0x3f, 0x3e, 0x42, 0x41, 0x43, 0x45, 0x47, 0x49, + 0x48, 0x47, 0x47, 0x49, 0x47, 0x46, 0x49, 0x4d, 0x50, 0x51, 0x50, 0x50, + 0x4d, 0x4f, 0x52, 0x4f, 0x53, 0x57, 0x58, 0x57, 0x57, 0x57, 0x56, 0x57, + 0x56, 0x57, 0x57, 0x58, 0x57, 0x59, 0x59, 0x58, 0x5a, 0x5b, 0x5d, 0x5a, + 0x59, 0x5c, 0x5d, 0x60, 0x5f, 0x5d, 0x5d, 0x5d, 0x61, 0x60, 0x62, 0x62, + 0x62, 0x63, 0x63, 0x64, 0x64, 0x66, 0x67, 0x66, 0x66, 0x66, 0x67, 0x67, + 0x66, 0x67, 0x67, 0x66, 0x66, 0x66, 0x65, 0x63, 0x57, 0x38, 0x28, 0x2f, + 0x1e, 0x12, 0xfa, 0x04, 0x17, 0x0b, 0x02, 0xee, 0xcf, 0xcd, 0xc2, 0xb5, + 0xcb, 0xf5, 0x13, 0x19, 0x05, 0xd2, 0xc5, 0xbe, 0xb3, 0xbe, 0xc4, 0xc3, + 0xca, 0xce, 0xcf, 0xc5, 0xc5, 0xe1, 0xdf, 0xe3, 0xfa, 0xec, 0xe5, 0xf9, + 0x06, 0xe2, 0xd5, 0xc8, 0xd5, 0xe8, 0xfc, 0x1d, 0x07, 0xec, 0x03, 0xfc, + 0xe2, 0xd4, 0xc4, 0xd4, 0xe6, 0xe7, 0xe6, 0xd2, 0xc5, 0xc2, 0xdc, 0xde, + 0xe0, 0xdf, 0xd0, 0xcc, 0xc1, 0xb7, 0xbe, 0xc7, 0xbe, 0xba, 0xc2, 0xcc, + 0xc2, 0xb4, 0xae, 0xb3, 0xb0, 0xbc, 0xb7, 0xb2, 0xc3, 0xcc, 0xcf, 0xda, + 0xe4, 0xea, 0xf4, 0x02, 0x07, 0x07, 0x06, 0x08, 0x09, 0x0c, 0x0c, 0x09, + 0x0e, 0x12, 0x0e, 0x0d, 0x0f, 0x12, 0x15, 0x15, 0x16, 0x18, 0x18, 0x13, + 0x15, 0x18, 0x1d, 0x1f, 0x1d, 0x21, 0x22, 0x25, 0x27, 0x27, 0x28, 0x2b, + 0x2e, 0x2b, 0x2d, 0x2e, 0x2e, 0x2f, 0x29, 0x32, 0x2f, 0x2c, 0x2f, 0x30, + 0x31, 0x35, 0x38, 0x37, 0x37, 0x37, 0x35, 0x37, 0x3c, 0x3d, 0x3a, 0x3d, + 0x42, 0x3f, 0x41, 0x40, 0x42, 0x43, 0x45, 0x48, 0x4a, 0x47, 0x47, 0x48, + 0x4b, 0x47, 0x4b, 0x4f, 0x52, 0x50, 0x4f, 0x52, 0x4e, 0x4f, 0x51, 0x51, + 0x51, 0x56, 0x57, 0x57, 0x57, 0x57, 0x57, 0x56, 0x56, 0x57, 0x57, 0x59, + 0x59, 0x59, 0x5a, 0x59, 0x59, 0x5a, 0x5d, 0x5c, 0x5b, 0x5c, 0x5d, 0x60, + 0x61, 0x60, 0x5d, 0x5e, 0x61, 0x60, 0x62, 0x62, 0x62, 0x63, 0x62, 0x64, + 0x64, 0x66, 0x67, 0x66, 0x66, 0x66, 0x67, 0x67, 0x66, 0x67, 0x67, 0x66, + 0x65, 0x66, 0x66, 0x65, 0x62, 0x58, 0x52, 0x4d, 0x47, 0x3e, 0x1d, 0xfc, + 0x0f, 0x23, 0x1d, 0x0f, 0xed, 0xd6, 0xcf, 0xc4, 0xc7, 0xec, 0x17, 0x20, + 0x0c, 0xd9, 0xce, 0xcc, 0xba, 0xc5, 0xd7, 0xcc, 0xca, 0xcc, 0xd0, 0xcd, + 0xcc, 0xd6, 0xe0, 0xe7, 0xf9, 0xf5, 0xf2, 0x0c, 0xfb, 0xdc, 0xe2, 0xdf, + 0xec, 0xf9, 0x11, 0x27, 0x1c, 0x0a, 0x11, 0xf9, 0xe2, 0xdd, 0xdb, 0xe2, + 0xec, 0xe4, 0xdb, 0xc6, 0xb6, 0xc0, 0xd9, 0xeb, 0xf2, 0xdc, 0xbd, 0xbc, + 0xbf, 0xbb, 0xbb, 0xc2, 0xb9, 0xcf, 0xd1, 0xc2, 0xd3, 0xc2, 0xb0, 0xae, + 0xac, 0xb3, 0xa8, 0xac, 0xbb, 0xc2, 0xcd, 0xd0, 0xd9, 0xe3, 0xf7, 0xfe, + 0xf7, 0xfb, 0x02, 0x02, 0x06, 0x0b, 0x09, 0x06, 0x0e, 0x07, 0x0c, 0x0d, + 0x0d, 0x11, 0x13, 0x12, 0x17, 0x17, 0x14, 0x10, 0x13, 0x15, 0x18, 0x17, + 0x1c, 0x1d, 0x1f, 0x26, 0x27, 0x24, 0x24, 0x29, 0x2e, 0x2b, 0x2a, 0x2f, + 0x2f, 0x2f, 0x27, 0x30, 0x30, 0x29, 0x2d, 0x2e, 0x31, 0x33, 0x37, 0x37, + 0x39, 0x36, 0x35, 0x35, 0x3a, 0x3d, 0x3d, 0x3d, 0x42, 0x42, 0x40, 0x40, + 0x40, 0x42, 0x46, 0x45, 0x47, 0x45, 0x46, 0x46, 0x49, 0x47, 0x4c, 0x50, + 0x52, 0x52, 0x4e, 0x51, 0x52, 0x51, 0x4f, 0x51, 0x50, 0x52, 0x57, 0x57, + 0x56, 0x57, 0x57, 0x57, 0x57, 0x57, 0x57, 0x57, 0x5a, 0x59, 0x5b, 0x5b, + 0x5b, 0x5a, 0x5a, 0x5c, 0x5b, 0x5b, 0x5d, 0x60, 0x60, 0x61, 0x60, 0x5f, + 0x62, 0x62, 0x62, 0x62, 0x64, 0x63, 0x63, 0x63, 0x64, 0x66, 0x67, 0x66, + 0x66, 0x66, 0x67, 0x66, 0x66, 0x67, 0x67, 0x66, 0x64, 0x66, 0x66, 0x64, + 0x63, 0x62, 0x5f, 0x5d, 0x5a, 0x4d, 0x38, 0x26, 0x11, 0x0c, 0x22, 0x27, + 0x14, 0xf4, 0xe8, 0xd3, 0xca, 0xe0, 0x16, 0x2e, 0x1c, 0xe7, 0xc5, 0xb5, + 0xbe, 0xc2, 0xe2, 0xd2, 0xc8, 0xc4, 0xc4, 0xc3, 0xd1, 0xd3, 0xda, 0xee, + 0xfe, 0x08, 0x07, 0x10, 0xed, 0xe2, 0xe2, 0xed, 0x05, 0x1a, 0x22, 0x2a, + 0x31, 0x26, 0x13, 0xf9, 0xfa, 0xf5, 0xe6, 0xdc, 0xd6, 0xd1, 0xd0, 0xcc, + 0xc2, 0xe0, 0xec, 0xf8, 0xeb, 0xcf, 0xc1, 0xbc, 0xbc, 0xb7, 0xb7, 0xbf, + 0xbf, 0xcf, 0xcc, 0xcb, 0xc9, 0xb8, 0xb5, 0xac, 0xab, 0xb2, 0xaa, 0xa8, + 0xaf, 0xb9, 0xc3, 0xcc, 0xe2, 0xe3, 0xe7, 0xe6, 0xe4, 0xfa, 0xfc, 0xfc, + 0x04, 0x06, 0x04, 0x09, 0x09, 0x08, 0x0e, 0x0c, 0x0c, 0x0f, 0x0f, 0x15, + 0x17, 0x15, 0x10, 0x12, 0x14, 0x11, 0x13, 0x15, 0x19, 0x17, 0x1c, 0x22, + 0x22, 0x1d, 0x1d, 0x27, 0x2c, 0x27, 0x27, 0x2d, 0x2e, 0x2d, 0x24, 0x2d, + 0x2e, 0x29, 0x2e, 0x31, 0x32, 0x34, 0x36, 0x34, 0x38, 0x36, 0x35, 0x32, + 0x39, 0x3d, 0x3e, 0x3b, 0x3e, 0x42, 0x42, 0x3e, 0x40, 0x41, 0x44, 0x45, + 0x46, 0x44, 0x45, 0x45, 0x48, 0x47, 0x4d, 0x4f, 0x51, 0x50, 0x50, 0x51, + 0x4e, 0x4f, 0x50, 0x51, 0x52, 0x50, 0x55, 0x55, 0x54, 0x57, 0x57, 0x57, + 0x57, 0x57, 0x54, 0x56, 0x5a, 0x5a, 0x5b, 0x5d, 0x5c, 0x5b, 0x5a, 0x5d, + 0x5d, 0x5a, 0x5d, 0x61, 0x5f, 0x5f, 0x61, 0x60, 0x62, 0x62, 0x62, 0x63, + 0x64, 0x63, 0x64, 0x63, 0x63, 0x65, 0x66, 0x67, 0x67, 0x66, 0x66, 0x66, + 0x66, 0x67, 0x67, 0x66, 0x64, 0x66, 0x66, 0x66, 0x65, 0x65, 0x64, 0x62, + 0x5d, 0x55, 0x4b, 0x3a, 0x1a, 0x05, 0x12, 0x1f, 0x28, 0x1c, 0x07, 0xeb, + 0xd1, 0xda, 0x0c, 0x2e, 0x2c, 0x01, 0xcb, 0xab, 0xb7, 0xc4, 0xd5, 0xcc, + 0xc4, 0xbe, 0xb8, 0xc9, 0xd2, 0xe0, 0xf5, 0x0c, 0x09, 0x18, 0x1b, 0x09, + 0xe9, 0xf0, 0xfd, 0x12, 0x2c, 0x34, 0x34, 0x32, 0x2c, 0x1d, 0x14, 0x0a, + 0xf7, 0xe5, 0xd4, 0xdc, 0xde, 0xe8, 0xd6, 0xdf, 0xf5, 0x07, 0x07, 0xf6, + 0xe2, 0xc6, 0xce, 0xcb, 0xc2, 0xb9, 0xb9, 0xc0, 0xc5, 0xc9, 0xc5, 0xd5, + 0xbe, 0xc0, 0xad, 0xad, 0xac, 0xb7, 0xb1, 0xac, 0xad, 0xb6, 0xbe, 0xcc, + 0xda, 0xd8, 0xdb, 0xd9, 0xe2, 0xf4, 0xf6, 0xfe, 0x04, 0x02, 0x06, 0x07, + 0x08, 0x0e, 0x11, 0x0d, 0x0e, 0x0e, 0x0e, 0x15, 0x17, 0x14, 0x10, 0x14, + 0x12, 0x0b, 0x11, 0x14, 0x15, 0x15, 0x1b, 0x1e, 0x1f, 0x19, 0x1b, 0x27, + 0x28, 0x26, 0x24, 0x29, 0x30, 0x2d, 0x27, 0x2d, 0x2d, 0x2a, 0x2d, 0x32, + 0x32, 0x32, 0x32, 0x32, 0x35, 0x36, 0x35, 0x32, 0x39, 0x3e, 0x40, 0x3a, + 0x3d, 0x40, 0x40, 0x3d, 0x41, 0x42, 0x43, 0x46, 0x44, 0x42, 0x47, 0x46, + 0x47, 0x48, 0x4b, 0x50, 0x51, 0x50, 0x50, 0x50, 0x4f, 0x4e, 0x51, 0x51, + 0x52, 0x50, 0x52, 0x53, 0x54, 0x56, 0x57, 0x56, 0x55, 0x57, 0x54, 0x56, + 0x59, 0x5b, 0x5b, 0x5d, 0x5d, 0x5a, 0x5b, 0x5d, 0x5d, 0x5a, 0x5c, 0x61, + 0x60, 0x60, 0x61, 0x61, 0x61, 0x62, 0x62, 0x63, 0x64, 0x63, 0x64, 0x64, + 0x63, 0x65, 0x65, 0x66, 0x67, 0x66, 0x66, 0x66, 0x66, 0x67, 0x67, 0x66, + 0x64, 0x65, 0x66, 0x66, 0x65, 0x64, 0x64, 0x64, 0x62, 0x5f, 0x5a, 0x4f, + 0x3d, 0x22, 0x0e, 0x17, 0x1f, 0x2a, 0x20, 0x07, 0xe3, 0xe8, 0x0d, 0x27, + 0x33, 0x17, 0xef, 0xc3, 0xb3, 0xc9, 0xca, 0xc4, 0xc6, 0xc0, 0xb9, 0xc5, + 0xca, 0xea, 0x0b, 0x13, 0x12, 0x22, 0x20, 0x05, 0xf5, 0x0f, 0x25, 0x31, + 0x39, 0x37, 0x33, 0x2c, 0x22, 0x1e, 0x14, 0x0c, 0x01, 0xf2, 0xd8, 0xe1, + 0xea, 0x0d, 0xf3, 0xfc, 0x13, 0x11, 0xf7, 0xe9, 0xdc, 0xd2, 0xe7, 0xd1, + 0xc7, 0xc5, 0xc4, 0xcf, 0xdb, 0xcc, 0xcd, 0xe2, 0xb5, 0xc4, 0xb6, 0xb1, + 0xae, 0xc3, 0xb6, 0xb3, 0xae, 0xb9, 0xd0, 0xc6, 0xc5, 0xda, 0xd5, 0xd2, + 0xe0, 0xeb, 0xf2, 0xf9, 0x00, 0x06, 0x0a, 0x09, 0x0f, 0x13, 0x0f, 0x0c, + 0x0c, 0x0e, 0x0b, 0x0f, 0x13, 0x12, 0x13, 0x14, 0x0f, 0x09, 0x11, 0x14, + 0x14, 0x12, 0x17, 0x1c, 0x19, 0x19, 0x17, 0x24, 0x22, 0x21, 0x21, 0x22, + 0x2b, 0x2d, 0x28, 0x28, 0x2d, 0x2c, 0x2d, 0x2f, 0x31, 0x30, 0x31, 0x2f, + 0x2f, 0x34, 0x35, 0x33, 0x35, 0x3a, 0x3d, 0x3c, 0x3d, 0x3e, 0x3d, 0x3d, + 0x3f, 0x41, 0x41, 0x43, 0x42, 0x3f, 0x47, 0x47, 0x49, 0x49, 0x4a, 0x4f, + 0x51, 0x4f, 0x4f, 0x4e, 0x4f, 0x4d, 0x4f, 0x4f, 0x51, 0x52, 0x52, 0x52, + 0x54, 0x56, 0x57, 0x56, 0x55, 0x57, 0x55, 0x57, 0x58, 0x5a, 0x59, 0x5c, + 0x5c, 0x59, 0x5d, 0x5d, 0x5c, 0x5c, 0x5b, 0x5f, 0x60, 0x60, 0x61, 0x61, + 0x60, 0x62, 0x62, 0x62, 0x64, 0x64, 0x62, 0x64, 0x64, 0x64, 0x65, 0x66, + 0x67, 0x66, 0x66, 0x66, 0x66, 0x67, 0x67, 0x66, 0x64, 0x65, 0x67, 0x66, + 0x65, 0x63, 0x64, 0x65, 0x66, 0x64, 0x62, 0x5e, 0x55, 0x46, 0x2a, 0x22, + 0x22, 0x24, 0x32, 0x27, 0xfb, 0xec, 0x12, 0x2c, 0x38, 0x1f, 0x09, 0xd9, + 0xc7, 0xd0, 0xd2, 0xc9, 0xc7, 0xc2, 0xba, 0xd0, 0xd8, 0xfe, 0x17, 0x18, + 0x21, 0x25, 0x17, 0x02, 0x11, 0x2d, 0x3d, 0x3d, 0x3b, 0x37, 0x35, 0x2e, + 0x2b, 0x23, 0x17, 0x14, 0x07, 0xf2, 0xdb, 0xe3, 0xf9, 0x14, 0x22, 0x22, + 0x14, 0xfb, 0xec, 0xef, 0xf4, 0xdc, 0xe3, 0xce, 0xd6, 0xce, 0xcf, 0xdc, + 0xd7, 0xd7, 0xd7, 0xd0, 0xc1, 0xb7, 0xbe, 0xb8, 0xac, 0xb4, 0xb2, 0xbf, + 0xb5, 0xb6, 0xc6, 0xbc, 0xbb, 0xd1, 0xd4, 0xcf, 0xdc, 0xe2, 0xec, 0xf3, + 0x02, 0x05, 0x07, 0x0b, 0x12, 0x13, 0x12, 0x0e, 0x0d, 0x10, 0x0c, 0x11, + 0x10, 0x0e, 0x0f, 0x11, 0x0d, 0x0a, 0x12, 0x12, 0x11, 0x10, 0x13, 0x18, + 0x17, 0x17, 0x15, 0x20, 0x20, 0x1e, 0x22, 0x22, 0x26, 0x2a, 0x28, 0x27, + 0x2a, 0x2c, 0x2d, 0x2d, 0x30, 0x30, 0x32, 0x32, 0x2e, 0x32, 0x32, 0x33, + 0x37, 0x39, 0x3c, 0x3d, 0x3e, 0x3d, 0x3b, 0x3d, 0x3c, 0x3f, 0x3e, 0x41, + 0x42, 0x3e, 0x45, 0x47, 0x49, 0x4b, 0x4c, 0x50, 0x4f, 0x4f, 0x4e, 0x4e, + 0x4d, 0x4d, 0x52, 0x4f, 0x51, 0x52, 0x51, 0x52, 0x54, 0x55, 0x56, 0x56, + 0x57, 0x55, 0x55, 0x57, 0x59, 0x5a, 0x5a, 0x5a, 0x5b, 0x59, 0x5d, 0x5d, + 0x5c, 0x5d, 0x5b, 0x5c, 0x5f, 0x61, 0x61, 0x60, 0x60, 0x61, 0x62, 0x62, + 0x64, 0x64, 0x62, 0x64, 0x65, 0x64, 0x66, 0x67, 0x67, 0x67, 0x66, 0x65, + 0x66, 0x67, 0x67, 0x66, 0x65, 0x65, 0x66, 0x67, 0x66, 0x65, 0x64, 0x66, + 0x67, 0x66, 0x65, 0x62, 0x5d, 0x56, 0x45, 0x34, 0x2e, 0x24, 0x29, 0x32, + 0x28, 0x04, 0x16, 0x32, 0x3a, 0x2d, 0x19, 0xf0, 0xd9, 0xdc, 0xdc, 0xcc, + 0xc7, 0xce, 0xcb, 0xda, 0xf9, 0x1a, 0x21, 0x20, 0x2c, 0x22, 0x12, 0x1d, + 0x33, 0x41, 0x45, 0x43, 0x3d, 0x37, 0x2f, 0x26, 0x21, 0x1e, 0x1c, 0x14, + 0x06, 0xfb, 0x03, 0x17, 0x27, 0x2f, 0x27, 0x11, 0xf4, 0xe2, 0xeb, 0xe7, + 0xe7, 0xd8, 0xe9, 0xe0, 0xdd, 0xd7, 0xdb, 0xe3, 0xeb, 0xe4, 0xd9, 0xd5, + 0xb3, 0xb0, 0xc1, 0xbc, 0xab, 0xaf, 0xb7, 0xbc, 0xb2, 0xb1, 0xb4, 0xb7, + 0xb5, 0xbc, 0xc7, 0xcc, 0xd8, 0xdf, 0xe7, 0xf3, 0xfd, 0x02, 0x07, 0x0e, + 0x0f, 0x14, 0x16, 0x12, 0x12, 0x0d, 0x0f, 0x14, 0x0e, 0x0f, 0x0c, 0x0d, + 0x0c, 0x09, 0x11, 0x10, 0x0c, 0x0d, 0x0f, 0x13, 0x17, 0x12, 0x12, 0x17, + 0x1b, 0x1d, 0x1d, 0x21, 0x1f, 0x28, 0x28, 0x28, 0x2a, 0x28, 0x2b, 0x30, + 0x32, 0x2e, 0x31, 0x32, 0x2f, 0x31, 0x32, 0x34, 0x38, 0x38, 0x38, 0x39, + 0x3d, 0x3b, 0x3c, 0x3c, 0x3a, 0x3e, 0x3e, 0x3f, 0x42, 0x40, 0x44, 0x4a, + 0x4a, 0x4c, 0x4c, 0x4f, 0x4f, 0x52, 0x4e, 0x4e, 0x4e, 0x4f, 0x53, 0x52, + 0x52, 0x52, 0x51, 0x51, 0x53, 0x56, 0x56, 0x55, 0x56, 0x56, 0x55, 0x57, + 0x5a, 0x5a, 0x5b, 0x5a, 0x5a, 0x5a, 0x5d, 0x5c, 0x5d, 0x5e, 0x5c, 0x5c, + 0x5f, 0x61, 0x61, 0x5f, 0x61, 0x62, 0x62, 0x62, 0x63, 0x64, 0x63, 0x64, + 0x65, 0x64, 0x65, 0x67, 0x67, 0x66, 0x65, 0x65, 0x65, 0x67, 0x66, 0x65, + 0x66, 0x66, 0x66, 0x67, 0x67, 0x66, 0x65, 0x65, 0x65, 0x65, 0x64, 0x62, + 0x60, 0x5d, 0x57, 0x4a, 0x42, 0x3b, 0x26, 0x25, 0x2e, 0x2d, 0x25, 0x33, + 0x3a, 0x37, 0x2c, 0x0c, 0xd5, 0xc9, 0xd5, 0xce, 0xc9, 0xce, 0xd2, 0xee, + 0x16, 0x28, 0x22, 0x2b, 0x31, 0x2c, 0x2d, 0x39, 0x44, 0x47, 0x49, 0x47, + 0x3f, 0x33, 0x2c, 0x28, 0x28, 0x27, 0x1e, 0x16, 0x1f, 0x28, 0x2f, 0x38, + 0x37, 0x28, 0x11, 0xfe, 0xec, 0xed, 0xf5, 0xe2, 0xd7, 0xdd, 0xec, 0xf4, + 0xf5, 0xee, 0xf5, 0xfb, 0xee, 0xdc, 0xd7, 0xe0, 0xaf, 0xa8, 0xc4, 0xc2, + 0xb6, 0xb2, 0xae, 0xab, 0xac, 0xac, 0xb1, 0xb3, 0xb5, 0xb4, 0xba, 0xc6, + 0xce, 0xd7, 0xe4, 0xec, 0xf7, 0x01, 0x09, 0x0e, 0x11, 0x15, 0x15, 0x14, + 0x0f, 0x0d, 0x10, 0x10, 0x0d, 0x10, 0x0c, 0x0c, 0x0a, 0x09, 0x0c, 0x0d, + 0x0c, 0x0c, 0x0e, 0x0f, 0x0d, 0x12, 0x0f, 0x12, 0x16, 0x18, 0x1b, 0x21, + 0x1f, 0x22, 0x26, 0x27, 0x29, 0x28, 0x2c, 0x30, 0x31, 0x31, 0x2d, 0x2e, + 0x2d, 0x31, 0x35, 0x36, 0x34, 0x37, 0x36, 0x37, 0x3b, 0x39, 0x3d, 0x3a, + 0x3b, 0x3d, 0x3c, 0x41, 0x43, 0x40, 0x43, 0x47, 0x48, 0x4c, 0x4d, 0x4e, + 0x4b, 0x4d, 0x4d, 0x4e, 0x50, 0x51, 0x53, 0x52, 0x53, 0x54, 0x51, 0x50, + 0x53, 0x55, 0x56, 0x55, 0x57, 0x58, 0x57, 0x58, 0x59, 0x5a, 0x5d, 0x5c, + 0x5a, 0x59, 0x5d, 0x5c, 0x5d, 0x5e, 0x5c, 0x5b, 0x5f, 0x5f, 0x61, 0x5e, + 0x61, 0x62, 0x62, 0x62, 0x63, 0x65, 0x64, 0x65, 0x66, 0x64, 0x64, 0x67, + 0x66, 0x66, 0x66, 0x65, 0x65, 0x66, 0x66, 0x66, 0x66, 0x66, 0x67, 0x67, + 0x67, 0x67, 0x66, 0x65, 0x64, 0x64, 0x65, 0x63, 0x62, 0x5f, 0x5e, 0x5a, + 0x53, 0x4d, 0x37, 0x2a, 0x20, 0x2b, 0x37, 0x3a, 0x37, 0x3c, 0x33, 0x1d, + 0xe2, 0xcc, 0xd6, 0xc7, 0xc4, 0xc3, 0xd6, 0x00, 0x24, 0x2c, 0x2c, 0x37, + 0x39, 0x3d, 0x42, 0x45, 0x49, 0x4d, 0x48, 0x44, 0x3e, 0x39, 0x37, 0x37, + 0x34, 0x32, 0x32, 0x37, 0x3a, 0x3a, 0x36, 0x37, 0x2e, 0x25, 0x18, 0x04, + 0x02, 0x00, 0x00, 0x01, 0xff, 0x05, 0x12, 0x10, 0x08, 0x0e, 0x12, 0x03, + 0xf7, 0xf7, 0xf2, 0xec, 0xab, 0xa4, 0xa8, 0xad, 0xb9, 0xb4, 0xac, 0xaa, + 0xad, 0xa8, 0xac, 0xb0, 0xb7, 0xb4, 0xc2, 0xcc, 0xcc, 0xd3, 0xd9, 0xe1, + 0xf3, 0x04, 0x0b, 0x10, 0x12, 0x16, 0x15, 0x14, 0x11, 0x12, 0x12, 0x12, + 0x10, 0x0c, 0x0b, 0x0a, 0x0a, 0x0b, 0x0c, 0x0c, 0x0c, 0x0a, 0x09, 0x08, + 0x08, 0x0f, 0x0a, 0x09, 0x10, 0x17, 0x17, 0x20, 0x1f, 0x1e, 0x22, 0x26, + 0x28, 0x27, 0x2a, 0x2d, 0x2f, 0x31, 0x2d, 0x2e, 0x2d, 0x2d, 0x33, 0x34, + 0x34, 0x36, 0x37, 0x39, 0x3c, 0x38, 0x3d, 0x37, 0x3b, 0x3e, 0x3d, 0x40, + 0x42, 0x3d, 0x42, 0x47, 0x47, 0x48, 0x4c, 0x4d, 0x4b, 0x4c, 0x4d, 0x51, + 0x50, 0x4f, 0x52, 0x52, 0x52, 0x54, 0x51, 0x52, 0x56, 0x54, 0x56, 0x56, + 0x56, 0x57, 0x58, 0x58, 0x59, 0x5b, 0x5e, 0x5b, 0x59, 0x59, 0x5c, 0x5b, + 0x5a, 0x5d, 0x5d, 0x5c, 0x60, 0x5e, 0x60, 0x60, 0x60, 0x63, 0x64, 0x63, + 0x63, 0x65, 0x64, 0x65, 0x66, 0x66, 0x65, 0x66, 0x66, 0x66, 0x65, 0x66, + 0x65, 0x66, 0x67, 0x67, 0x66, 0x66, 0x67, 0x67, 0x67, 0x66, 0x65, 0x65, + 0x65, 0x65, 0x65, 0x66, 0x65, 0x63, 0x62, 0x61, 0x5d, 0x57, 0x48, 0x38, + 0x2a, 0x21, 0x32, 0x3e, 0x3d, 0x42, 0x3c, 0x2b, 0xfc, 0xda, 0xd2, 0xc4, + 0xc0, 0xca, 0xf1, 0x0f, 0x32, 0x36, 0x39, 0x3d, 0x41, 0x43, 0x49, 0x4c, + 0x4e, 0x4e, 0x46, 0x44, 0x45, 0x43, 0x41, 0x3e, 0x3f, 0x40, 0x3f, 0x3f, + 0x3e, 0x3d, 0x3b, 0x32, 0x2a, 0x22, 0x15, 0x0c, 0x14, 0x16, 0x1b, 0x24, + 0x1e, 0x1d, 0x21, 0x22, 0x22, 0x1b, 0x0f, 0x0a, 0x06, 0xf2, 0xee, 0xe3, + 0xa7, 0xa9, 0xaf, 0xa9, 0xaf, 0xaa, 0xa8, 0xab, 0xb0, 0xac, 0xab, 0xb9, + 0xb8, 0xb1, 0xc9, 0xc8, 0xc2, 0xc0, 0xcb, 0xda, 0xf1, 0x03, 0x07, 0x0c, + 0x13, 0x17, 0x19, 0x18, 0x13, 0x11, 0x10, 0x0f, 0x0d, 0x0a, 0x0c, 0x0c, + 0x08, 0x08, 0x0c, 0x0c, 0x0e, 0x0d, 0x07, 0x07, 0x05, 0x07, 0x07, 0x04, + 0x0b, 0x13, 0x12, 0x19, 0x1d, 0x1a, 0x1e, 0x24, 0x28, 0x2a, 0x2b, 0x2d, + 0x2d, 0x2e, 0x2d, 0x2d, 0x2e, 0x2d, 0x32, 0x31, 0x33, 0x33, 0x37, 0x3a, + 0x3a, 0x36, 0x3b, 0x37, 0x3b, 0x3d, 0x3f, 0x40, 0x42, 0x3d, 0x42, 0x46, + 0x46, 0x47, 0x4c, 0x4c, 0x4d, 0x4e, 0x4d, 0x4f, 0x50, 0x4f, 0x52, 0x52, + 0x52, 0x53, 0x52, 0x52, 0x55, 0x54, 0x54, 0x57, 0x57, 0x57, 0x59, 0x5a, + 0x5a, 0x5d, 0x5d, 0x5a, 0x5a, 0x5b, 0x5d, 0x5c, 0x5c, 0x5d, 0x5f, 0x5e, + 0x5f, 0x5d, 0x60, 0x62, 0x60, 0x62, 0x64, 0x64, 0x62, 0x64, 0x64, 0x65, + 0x65, 0x67, 0x65, 0x65, 0x66, 0x66, 0x65, 0x66, 0x66, 0x66, 0x67, 0x67, + 0x66, 0x66, 0x67, 0x67, 0x67, 0x65, 0x65, 0x65, 0x65, 0x64, 0x65, 0x66, + 0x65, 0x63, 0x62, 0x62, 0x61, 0x5d, 0x57, 0x4d, 0x40, 0x31, 0x32, 0x40, + 0x45, 0x46, 0x47, 0x34, 0x14, 0xee, 0xcc, 0xd4, 0xdd, 0xd5, 0x04, 0x22, + 0x38, 0x3c, 0x41, 0x44, 0x43, 0x46, 0x4c, 0x4e, 0x50, 0x4c, 0x48, 0x48, + 0x48, 0x47, 0x47, 0x46, 0x47, 0x45, 0x41, 0x41, 0x3f, 0x3d, 0x37, 0x2d, + 0x22, 0x12, 0x10, 0x1d, 0x26, 0x2d, 0x30, 0x37, 0x34, 0x32, 0x31, 0x2d, + 0x28, 0x1b, 0x0b, 0xfb, 0xe7, 0xd7, 0xcc, 0xc5, 0xa7, 0xa8, 0xa9, 0xa5, + 0xa2, 0x9f, 0xa3, 0xaa, 0xad, 0xae, 0xac, 0xb4, 0xb0, 0xac, 0xb3, 0xb7, + 0xbc, 0xc1, 0xc8, 0xd9, 0xf0, 0xfb, 0x05, 0x0c, 0x13, 0x16, 0x17, 0x18, + 0x15, 0x12, 0x0c, 0x0e, 0x0c, 0x08, 0x0a, 0x0a, 0x0b, 0x0a, 0x09, 0x09, + 0x0b, 0x07, 0x05, 0x06, 0x04, 0x03, 0x04, 0x01, 0x02, 0x0d, 0x11, 0x15, + 0x18, 0x17, 0x1c, 0x22, 0x27, 0x2c, 0x2f, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, + 0x2c, 0x2d, 0x32, 0x32, 0x35, 0x33, 0x37, 0x38, 0x38, 0x37, 0x3a, 0x38, + 0x3a, 0x3d, 0x3e, 0x40, 0x42, 0x41, 0x42, 0x45, 0x46, 0x48, 0x4a, 0x48, + 0x4b, 0x4e, 0x4d, 0x4f, 0x50, 0x50, 0x52, 0x52, 0x52, 0x53, 0x54, 0x52, + 0x54, 0x54, 0x55, 0x55, 0x58, 0x57, 0x58, 0x5c, 0x5c, 0x5d, 0x5d, 0x5c, + 0x5c, 0x5d, 0x5d, 0x5d, 0x5f, 0x60, 0x60, 0x60, 0x60, 0x5d, 0x60, 0x63, + 0x61, 0x61, 0x63, 0x63, 0x62, 0x62, 0x64, 0x64, 0x65, 0x66, 0x65, 0x65, + 0x65, 0x65, 0x64, 0x65, 0x66, 0x66, 0x67, 0x67, 0x67, 0x67, 0x67, 0x67, + 0x67, 0x66, 0x65, 0x65, 0x64, 0x64, 0x64, 0x65, 0x64, 0x62, 0x60, 0x60, + 0x61, 0x60, 0x5c, 0x57, 0x4f, 0x42, 0x30, 0x38, 0x47, 0x4b, 0x4d, 0x42, + 0x27, 0x04, 0xde, 0xf2, 0xec, 0xd7, 0x09, 0x2f, 0x3e, 0x44, 0x4a, 0x49, + 0x47, 0x4a, 0x4d, 0x51, 0x50, 0x4e, 0x4d, 0x4d, 0x4d, 0x4d, 0x4b, 0x48, + 0x48, 0x47, 0x45, 0x47, 0x45, 0x42, 0x37, 0x2d, 0x2a, 0x2d, 0x33, 0x3a, + 0x3d, 0x3c, 0x3a, 0x3a, 0x38, 0x30, 0x28, 0x22, 0x1a, 0x0c, 0xf7, 0xde, + 0xd2, 0xc4, 0xb6, 0xaf, 0xa7, 0xac, 0xa9, 0xa5, 0xa7, 0xa1, 0xa2, 0xac, + 0xac, 0xb4, 0xac, 0xb1, 0xaf, 0xaf, 0xaf, 0xb1, 0xb6, 0xc9, 0xd6, 0xd8, + 0xe6, 0xf5, 0x07, 0x0c, 0x12, 0x14, 0x17, 0x17, 0x17, 0x14, 0x11, 0x0c, + 0x0c, 0x07, 0x07, 0x0a, 0x07, 0x06, 0x05, 0x02, 0x05, 0x02, 0x02, 0x01, + 0x01, 0xff, 0xfe, 0xfc, 0xfa, 0x06, 0x0c, 0x10, 0x17, 0x18, 0x1b, 0x22, + 0x27, 0x28, 0x2d, 0x2d, 0x2b, 0x2d, 0x2d, 0x2b, 0x2b, 0x2c, 0x31, 0x32, + 0x33, 0x32, 0x36, 0x38, 0x39, 0x38, 0x39, 0x3b, 0x3b, 0x3e, 0x3d, 0x3f, + 0x41, 0x42, 0x42, 0x47, 0x47, 0x48, 0x49, 0x48, 0x4a, 0x4d, 0x4e, 0x51, + 0x51, 0x4f, 0x51, 0x52, 0x55, 0x54, 0x57, 0x56, 0x56, 0x54, 0x56, 0x57, + 0x5a, 0x5a, 0x59, 0x5a, 0x5c, 0x5d, 0x5e, 0x5d, 0x5d, 0x5e, 0x5d, 0x5f, + 0x61, 0x62, 0x62, 0x62, 0x5f, 0x5d, 0x5e, 0x62, 0x61, 0x5f, 0x62, 0x63, + 0x61, 0x60, 0x63, 0x63, 0x64, 0x65, 0x65, 0x66, 0x67, 0x66, 0x65, 0x63, + 0x66, 0x66, 0x66, 0x66, 0x66, 0x67, 0x67, 0x67, 0x67, 0x66, 0x64, 0x63, + 0x64, 0x63, 0x62, 0x63, 0x62, 0x62, 0x61, 0x60, 0x61, 0x61, 0x5d, 0x5a, + 0x55, 0x50, 0x44, 0x36, 0x41, 0x4c, 0x4f, 0x4b, 0x36, 0x22, 0x03, 0xf5, + 0xe4, 0xf0, 0x17, 0x32, 0x45, 0x48, 0x4b, 0x4c, 0x4f, 0x4f, 0x52, 0x52, + 0x51, 0x51, 0x50, 0x4d, 0x4d, 0x4f, 0x4c, 0x4d, 0x4d, 0x4a, 0x49, 0x4a, + 0x48, 0x44, 0x41, 0x40, 0x42, 0x45, 0x44, 0x40, 0x3d, 0x3e, 0x3b, 0x37, + 0x30, 0x26, 0x1d, 0x17, 0x08, 0xf0, 0xeb, 0xe8, 0xe1, 0xd2, 0xc1, 0xbe, + 0xa4, 0xac, 0xb3, 0xa4, 0xa7, 0xa6, 0xb2, 0xc1, 0xaf, 0xbc, 0xb6, 0xb5, + 0xb1, 0xab, 0xb1, 0xb5, 0xb7, 0xc5, 0xcb, 0xce, 0xdb, 0xf8, 0x09, 0x0c, + 0x12, 0x15, 0x17, 0x18, 0x19, 0x17, 0x13, 0x0e, 0x0c, 0x08, 0x06, 0x07, + 0x01, 0x00, 0xfd, 0xfb, 0x00, 0xfd, 0xfa, 0xf6, 0xf8, 0xf8, 0xf5, 0xf9, + 0xf7, 0xff, 0x07, 0x0c, 0x13, 0x12, 0x16, 0x1e, 0x27, 0x24, 0x2a, 0x28, + 0x29, 0x2b, 0x2e, 0x2b, 0x2a, 0x2c, 0x30, 0x31, 0x32, 0x33, 0x36, 0x37, + 0x38, 0x39, 0x3a, 0x3a, 0x3c, 0x3d, 0x3e, 0x3e, 0x3e, 0x42, 0x44, 0x47, + 0x47, 0x48, 0x48, 0x47, 0x4a, 0x4d, 0x50, 0x52, 0x51, 0x4f, 0x4e, 0x52, + 0x57, 0x54, 0x55, 0x57, 0x57, 0x57, 0x56, 0x57, 0x5a, 0x5a, 0x5c, 0x59, + 0x5c, 0x5d, 0x5f, 0x5f, 0x5f, 0x5e, 0x5d, 0x5e, 0x60, 0x62, 0x62, 0x62, + 0x5f, 0x5f, 0x60, 0x61, 0x61, 0x60, 0x60, 0x62, 0x60, 0x5f, 0x62, 0x63, + 0x63, 0x64, 0x65, 0x66, 0x67, 0x66, 0x65, 0x63, 0x65, 0x66, 0x66, 0x66, + 0x66, 0x67, 0x67, 0x67, 0x67, 0x67, 0x65, 0x63, 0x62, 0x62, 0x61, 0x5f, + 0x5f, 0x60, 0x61, 0x61, 0x61, 0x61, 0x5e, 0x5d, 0x58, 0x54, 0x51, 0x44, + 0x41, 0x49, 0x50, 0x4f, 0x45, 0x2e, 0x12, 0xf2, 0xec, 0xf8, 0x27, 0x40, + 0x4a, 0x4d, 0x4e, 0x4e, 0x52, 0x52, 0x53, 0x53, 0x52, 0x52, 0x4f, 0x4f, + 0x4f, 0x52, 0x52, 0x52, 0x50, 0x4f, 0x4d, 0x4c, 0x4b, 0x4c, 0x4d, 0x4c, + 0x4a, 0x4a, 0x48, 0x45, 0x44, 0x41, 0x3a, 0x32, 0x2c, 0x27, 0x22, 0x19, + 0x02, 0xf3, 0xf9, 0xf7, 0xf4, 0xee, 0xde, 0xde, 0xa5, 0xab, 0xb4, 0xad, + 0xa8, 0xa7, 0xab, 0xb4, 0xb2, 0xb4, 0xb2, 0xbe, 0xb0, 0xa6, 0xaf, 0xb1, + 0xb2, 0xb5, 0xb9, 0xc7, 0xd5, 0xf8, 0x07, 0x0a, 0x14, 0x14, 0x13, 0x17, + 0x17, 0x18, 0x15, 0x10, 0x0e, 0x0c, 0x08, 0x04, 0x01, 0xf9, 0xf4, 0xf7, + 0xf7, 0xf7, 0xf5, 0xf2, 0xf3, 0xf2, 0xf1, 0xf2, 0xf6, 0xfe, 0x04, 0x06, + 0x0c, 0x0c, 0x12, 0x1b, 0x22, 0x25, 0x2a, 0x26, 0x27, 0x2a, 0x2b, 0x29, + 0x29, 0x2b, 0x2d, 0x34, 0x31, 0x33, 0x37, 0x37, 0x35, 0x38, 0x39, 0x39, + 0x3b, 0x3d, 0x3e, 0x3d, 0x3d, 0x42, 0x45, 0x45, 0x46, 0x49, 0x48, 0x48, + 0x49, 0x4b, 0x50, 0x52, 0x51, 0x52, 0x4e, 0x54, 0x58, 0x56, 0x54, 0x57, + 0x57, 0x59, 0x59, 0x5a, 0x5b, 0x5b, 0x5b, 0x5b, 0x5b, 0x5b, 0x5e, 0x5e, + 0x5f, 0x5f, 0x5e, 0x5e, 0x60, 0x62, 0x60, 0x60, 0x60, 0x60, 0x60, 0x61, + 0x62, 0x62, 0x5f, 0x62, 0x5d, 0x5d, 0x61, 0x62, 0x63, 0x63, 0x63, 0x64, + 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x66, 0x66, 0x67, 0x67, 0x67, 0x67, + 0x67, 0x66, 0x65, 0x64, 0x62, 0x62, 0x61, 0x5d, 0x5d, 0x5e, 0x5e, 0x5e, + 0x5f, 0x60, 0x5e, 0x5c, 0x5b, 0x5a, 0x56, 0x52, 0x4b, 0x4b, 0x4e, 0x51, + 0x4e, 0x44, 0x2e, 0x1e, 0x04, 0x00, 0x31, 0x4a, 0x4d, 0x50, 0x52, 0x52, + 0x54, 0x53, 0x54, 0x53, 0x53, 0x52, 0x51, 0x52, 0x53, 0x55, 0x54, 0x52, + 0x53, 0x51, 0x50, 0x50, 0x51, 0x52, 0x52, 0x52, 0x4f, 0x4b, 0x47, 0x42, + 0x41, 0x41, 0x3f, 0x3a, 0x32, 0x2a, 0x24, 0x20, 0x1f, 0x17, 0x12, 0x06, + 0x01, 0x0a, 0x02, 0xf7, 0xaa, 0xb7, 0xb6, 0xac, 0xa4, 0xa7, 0xa7, 0xae, + 0xae, 0xaa, 0xac, 0xb4, 0xb5, 0xb3, 0xac, 0xbc, 0xb1, 0xb1, 0xb7, 0xc3, + 0xd3, 0xf9, 0x08, 0x0c, 0x14, 0x14, 0x15, 0x17, 0x19, 0x19, 0x17, 0x15, + 0x13, 0x0c, 0x09, 0x02, 0xfc, 0xf3, 0xf2, 0xf6, 0xf6, 0xf5, 0xf7, 0xf3, + 0xf3, 0xef, 0xf0, 0xec, 0xf0, 0xf8, 0xff, 0xfc, 0x07, 0x0a, 0x0d, 0x18, + 0x1d, 0x24, 0x28, 0x27, 0x26, 0x29, 0x29, 0x27, 0x27, 0x29, 0x2d, 0x32, + 0x30, 0x32, 0x35, 0x37, 0x32, 0x37, 0x37, 0x38, 0x3c, 0x3a, 0x3b, 0x3d, + 0x3d, 0x42, 0x42, 0x45, 0x46, 0x4b, 0x48, 0x48, 0x49, 0x4d, 0x51, 0x52, + 0x52, 0x52, 0x4f, 0x54, 0x57, 0x57, 0x53, 0x57, 0x58, 0x5a, 0x5d, 0x5c, + 0x5c, 0x5b, 0x57, 0x5a, 0x5a, 0x5b, 0x5d, 0x5d, 0x5f, 0x60, 0x5d, 0x5f, + 0x61, 0x62, 0x5d, 0x5f, 0x61, 0x61, 0x60, 0x61, 0x62, 0x63, 0x61, 0x61, + 0x5d, 0x5d, 0x5f, 0x62, 0x62, 0x63, 0x63, 0x64, 0x64, 0x63, 0x64, 0x63, + 0x64, 0x65, 0x65, 0x66, 0x67, 0x67, 0x67, 0x67, 0x67, 0x66, 0x64, 0x62, + 0x63, 0x62, 0x60, 0x5e, 0x5f, 0x5d, 0x5c, 0x5d, 0x5d, 0x5d, 0x5d, 0x5b, + 0x59, 0x59, 0x58, 0x57, 0x53, 0x52, 0x51, 0x50, 0x4f, 0x4c, 0x3d, 0x22, + 0x12, 0x15, 0x32, 0x49, 0x4f, 0x53, 0x55, 0x56, 0x56, 0x54, 0x53, 0x52, + 0x52, 0x52, 0x52, 0x54, 0x56, 0x56, 0x54, 0x52, 0x54, 0x52, 0x54, 0x54, + 0x53, 0x54, 0x50, 0x4e, 0x49, 0x47, 0x48, 0x48, 0x45, 0x42, 0x3f, 0x3c, + 0x37, 0x37, 0x36, 0x32, 0x2f, 0x2d, 0x22, 0x1d, 0x1c, 0x19, 0x17, 0x14, + 0xa9, 0xb7, 0xb3, 0xad, 0xa1, 0xa1, 0xa7, 0xab, 0xaa, 0xaa, 0xaa, 0xac, + 0xb1, 0xb3, 0xbe, 0xd3, 0xaf, 0xb1, 0xaf, 0xc3, 0xd6, 0xf1, 0x07, 0x0a, + 0x11, 0x15, 0x17, 0x19, 0x1a, 0x16, 0x15, 0x13, 0x12, 0x0c, 0x07, 0x02, + 0xf9, 0xfa, 0xfb, 0xfa, 0xf9, 0xf8, 0xf7, 0xf4, 0xf4, 0xf2, 0xf1, 0xed, + 0xed, 0xf2, 0xf7, 0xf6, 0xfc, 0x0a, 0x0c, 0x17, 0x1b, 0x20, 0x26, 0x23, + 0x22, 0x27, 0x2a, 0x26, 0x27, 0x29, 0x2d, 0x32, 0x2f, 0x2f, 0x34, 0x36, + 0x33, 0x36, 0x35, 0x37, 0x3a, 0x37, 0x38, 0x3c, 0x3d, 0x40, 0x42, 0x42, + 0x45, 0x49, 0x46, 0x49, 0x4c, 0x50, 0x52, 0x54, 0x52, 0x51, 0x52, 0x57, + 0x57, 0x58, 0x55, 0x59, 0x5a, 0x5a, 0x5d, 0x5b, 0x5b, 0x5a, 0x57, 0x5c, + 0x5a, 0x5c, 0x5d, 0x5d, 0x5e, 0x5f, 0x5d, 0x60, 0x61, 0x60, 0x5e, 0x60, + 0x62, 0x60, 0x5e, 0x61, 0x62, 0x61, 0x60, 0x62, 0x5f, 0x5f, 0x5f, 0x60, + 0x62, 0x62, 0x63, 0x64, 0x65, 0x64, 0x63, 0x62, 0x63, 0x66, 0x65, 0x65, + 0x67, 0x67, 0x66, 0x67, 0x67, 0x66, 0x64, 0x62, 0x63, 0x64, 0x61, 0x5f, + 0x5d, 0x5c, 0x5c, 0x5a, 0x5b, 0x5c, 0x5b, 0x5a, 0x5a, 0x5a, 0x5a, 0x58, + 0x58, 0x58, 0x56, 0x52, 0x51, 0x4c, 0x4c, 0x34, 0x25, 0x20, 0x2d, 0x48, + 0x50, 0x54, 0x56, 0x57, 0x57, 0x56, 0x53, 0x53, 0x53, 0x57, 0x56, 0x56, + 0x57, 0x55, 0x54, 0x53, 0x55, 0x55, 0x55, 0x56, 0x54, 0x53, 0x51, 0x4f, + 0x4d, 0x4d, 0x4c, 0x4a, 0x47, 0x45, 0x43, 0x42, 0x42, 0x42, 0x3f, 0x3b, + 0x37, 0x33, 0x33, 0x2f, 0x29, 0x26, 0x23, 0x1e, 0xa5, 0xa9, 0xb1, 0xa1, + 0xa1, 0xa1, 0xa7, 0xa8, 0xb1, 0xa8, 0xa7, 0xab, 0xab, 0xaf, 0xba, 0xbc, + 0xac, 0xb1, 0xb5, 0xc8, 0xd2, 0xeb, 0x03, 0x0a, 0x0e, 0x14, 0x19, 0x1d, + 0x19, 0x16, 0x14, 0x12, 0x0e, 0x0b, 0x08, 0xfd, 0xf7, 0x01, 0x02, 0x01, + 0x01, 0xfc, 0xf7, 0xf5, 0xf4, 0xf1, 0xef, 0xee, 0xef, 0xef, 0xf3, 0xf2, + 0xf3, 0x03, 0x0b, 0x14, 0x1a, 0x1c, 0x21, 0x22, 0x22, 0x26, 0x27, 0x23, + 0x24, 0x28, 0x2b, 0x2f, 0x2d, 0x2f, 0x32, 0x32, 0x32, 0x32, 0x35, 0x35, + 0x37, 0x37, 0x37, 0x3a, 0x3d, 0x3d, 0x42, 0x42, 0x46, 0x48, 0x48, 0x4c, + 0x4e, 0x51, 0x52, 0x55, 0x52, 0x53, 0x53, 0x57, 0x56, 0x57, 0x59, 0x5b, + 0x5c, 0x5b, 0x5a, 0x5b, 0x5c, 0x59, 0x5b, 0x5d, 0x5c, 0x5d, 0x5d, 0x5d, + 0x5f, 0x5f, 0x5d, 0x5f, 0x5e, 0x5d, 0x5e, 0x62, 0x61, 0x5e, 0x5d, 0x62, + 0x60, 0x5e, 0x60, 0x60, 0x5d, 0x60, 0x60, 0x5f, 0x5f, 0x60, 0x62, 0x63, + 0x65, 0x66, 0x65, 0x62, 0x62, 0x65, 0x64, 0x66, 0x67, 0x67, 0x67, 0x66, + 0x67, 0x65, 0x64, 0x62, 0x63, 0x64, 0x63, 0x5e, 0x5a, 0x5a, 0x5a, 0x5b, + 0x5b, 0x5a, 0x5a, 0x5d, 0x5e, 0x5f, 0x5e, 0x5c, 0x5b, 0x5a, 0x59, 0x57, + 0x54, 0x49, 0x4b, 0x4d, 0x3b, 0x2f, 0x2e, 0x48, 0x54, 0x56, 0x56, 0x58, + 0x57, 0x56, 0x55, 0x55, 0x55, 0x59, 0x58, 0x57, 0x57, 0x57, 0x57, 0x57, + 0x57, 0x57, 0x56, 0x56, 0x54, 0x54, 0x54, 0x52, 0x51, 0x52, 0x50, 0x4f, + 0x4d, 0x4d, 0x4b, 0x4b, 0x49, 0x47, 0x44, 0x41, 0x3d, 0x3a, 0x38, 0x37, + 0x31, 0x27, 0x22, 0x1b, 0xa0, 0xa0, 0xa3, 0xa3, 0xa7, 0xa1, 0xa3, 0xa1, + 0xae, 0xa8, 0xa7, 0xac, 0xad, 0xaa, 0xb3, 0xb4, 0xa9, 0xb1, 0xb9, 0xc1, + 0xc7, 0xde, 0x00, 0x09, 0x0e, 0x14, 0x19, 0x19, 0x17, 0x16, 0x15, 0x14, + 0x10, 0x0b, 0x07, 0xfe, 0xfb, 0x02, 0x02, 0x04, 0x03, 0x00, 0xfa, 0xf8, + 0xf7, 0xf2, 0xf0, 0xee, 0xed, 0xed, 0xee, 0xef, 0xea, 0xf7, 0x05, 0x0f, + 0x16, 0x15, 0x1c, 0x20, 0x1f, 0x22, 0x22, 0x21, 0x22, 0x27, 0x29, 0x2c, + 0x29, 0x2d, 0x31, 0x30, 0x33, 0x32, 0x33, 0x35, 0x39, 0x38, 0x36, 0x38, + 0x3b, 0x3f, 0x42, 0x41, 0x45, 0x48, 0x49, 0x49, 0x4d, 0x4f, 0x50, 0x51, + 0x52, 0x53, 0x52, 0x55, 0x55, 0x56, 0x58, 0x59, 0x5c, 0x5a, 0x5a, 0x5b, + 0x5a, 0x5a, 0x5d, 0x5d, 0x5e, 0x5d, 0x5d, 0x5b, 0x5e, 0x5e, 0x5d, 0x5d, + 0x5d, 0x5d, 0x60, 0x62, 0x5d, 0x5c, 0x5e, 0x61, 0x5e, 0x5d, 0x5f, 0x5f, + 0x5c, 0x5d, 0x61, 0x61, 0x5f, 0x5f, 0x61, 0x62, 0x62, 0x63, 0x64, 0x63, + 0x62, 0x63, 0x64, 0x66, 0x67, 0x67, 0x67, 0x66, 0x67, 0x65, 0x63, 0x63, + 0x64, 0x65, 0x65, 0x63, 0x5e, 0x5c, 0x5c, 0x5c, 0x5b, 0x5a, 0x5d, 0x60, + 0x60, 0x60, 0x5f, 0x5d, 0x5c, 0x5a, 0x5a, 0x5c, 0x59, 0x50, 0x47, 0x4c, + 0x4a, 0x43, 0x3d, 0x4c, 0x55, 0x57, 0x58, 0x58, 0x57, 0x57, 0x57, 0x58, + 0x59, 0x59, 0x57, 0x58, 0x57, 0x58, 0x58, 0x59, 0x57, 0x57, 0x57, 0x57, + 0x55, 0x56, 0x55, 0x54, 0x54, 0x53, 0x53, 0x53, 0x53, 0x52, 0x51, 0x50, + 0x4a, 0x48, 0x46, 0x44, 0x3f, 0x38, 0x33, 0x30, 0x29, 0x23, 0x1e, 0x13, + 0x9d, 0xae, 0xbc, 0xa5, 0xa6, 0xa5, 0xa3, 0x9d, 0xa1, 0xa6, 0xa5, 0xaf, + 0xad, 0xaa, 0xba, 0xb6, 0xab, 0xab, 0xaf, 0xaf, 0xc4, 0xd8, 0xfc, 0x09, + 0x0f, 0x14, 0x16, 0x17, 0x17, 0x16, 0x13, 0x13, 0x0f, 0x0c, 0x09, 0x01, + 0xfc, 0x00, 0x04, 0x06, 0x03, 0x02, 0x02, 0x02, 0xfb, 0xf3, 0xf2, 0xf2, + 0xf2, 0xf0, 0xee, 0xed, 0xeb, 0xf3, 0xff, 0x0d, 0x12, 0x16, 0x1b, 0x1d, + 0x1d, 0x1d, 0x1e, 0x1e, 0x21, 0x26, 0x28, 0x27, 0x27, 0x2a, 0x2d, 0x2d, + 0x31, 0x32, 0x32, 0x35, 0x39, 0x33, 0x34, 0x39, 0x3c, 0x3d, 0x3e, 0x42, + 0x46, 0x47, 0x46, 0x4b, 0x4c, 0x4b, 0x4b, 0x4c, 0x4d, 0x4d, 0x4f, 0x4f, + 0x50, 0x52, 0x53, 0x55, 0x57, 0x56, 0x57, 0x58, 0x5a, 0x5d, 0x5d, 0x5d, + 0x5d, 0x5b, 0x5b, 0x5d, 0x5c, 0x59, 0x5a, 0x5d, 0x5d, 0x5c, 0x60, 0x5f, + 0x5c, 0x5b, 0x5e, 0x60, 0x5e, 0x5d, 0x5d, 0x5e, 0x60, 0x5e, 0x60, 0x62, + 0x60, 0x61, 0x61, 0x61, 0x61, 0x62, 0x63, 0x63, 0x62, 0x62, 0x64, 0x66, + 0x67, 0x67, 0x67, 0x67, 0x67, 0x65, 0x63, 0x62, 0x63, 0x65, 0x65, 0x65, + 0x62, 0x5e, 0x5d, 0x5c, 0x5d, 0x5d, 0x5e, 0x5f, 0x5f, 0x5f, 0x5f, 0x5d, + 0x5b, 0x5c, 0x59, 0x5a, 0x5a, 0x57, 0x4d, 0x4a, 0x4f, 0x4d, 0x4c, 0x52, + 0x57, 0x57, 0x58, 0x58, 0x58, 0x58, 0x58, 0x59, 0x59, 0x59, 0x58, 0x59, + 0x5b, 0x5a, 0x58, 0x59, 0x58, 0x58, 0x58, 0x57, 0x57, 0x57, 0x57, 0x56, + 0x55, 0x55, 0x53, 0x53, 0x53, 0x52, 0x50, 0x4f, 0x4b, 0x48, 0x47, 0x45, + 0x41, 0x39, 0x34, 0x31, 0x2b, 0x22, 0x1e, 0x18, 0xa5, 0xa9, 0xb3, 0xb3, + 0xac, 0xac, 0xb0, 0x9e, 0xa0, 0xa4, 0xa3, 0xac, 0xac, 0xac, 0xb7, 0xb7, + 0xad, 0xad, 0xac, 0xaf, 0xb9, 0xd7, 0xfc, 0x07, 0x11, 0x12, 0x13, 0x15, + 0x16, 0x17, 0x16, 0x11, 0x0e, 0x10, 0x0a, 0x07, 0x00, 0x00, 0x06, 0x06, + 0x05, 0x04, 0x02, 0x00, 0xfc, 0xf7, 0xf7, 0xf5, 0xf7, 0xf2, 0xee, 0xec, + 0xeb, 0xf1, 0xff, 0x0b, 0x12, 0x15, 0x17, 0x19, 0x1b, 0x1b, 0x1c, 0x1c, + 0x20, 0x24, 0x27, 0x27, 0x25, 0x2b, 0x2a, 0x2a, 0x2e, 0x2d, 0x32, 0x37, + 0x34, 0x31, 0x36, 0x37, 0x3a, 0x3b, 0x3d, 0x41, 0x44, 0x45, 0x45, 0x45, + 0x44, 0x43, 0x43, 0x44, 0x46, 0x47, 0x4a, 0x4d, 0x4c, 0x4a, 0x4c, 0x4d, + 0x49, 0x47, 0x4e, 0x50, 0x52, 0x55, 0x55, 0x56, 0x59, 0x5b, 0x5d, 0x5d, + 0x5a, 0x5a, 0x5b, 0x5e, 0x5d, 0x5d, 0x60, 0x5c, 0x5c, 0x5d, 0x5e, 0x5e, + 0x5e, 0x5d, 0x5b, 0x5b, 0x5e, 0x5d, 0x60, 0x62, 0x62, 0x62, 0x62, 0x62, + 0x60, 0x61, 0x62, 0x62, 0x62, 0x62, 0x62, 0x65, 0x65, 0x66, 0x67, 0x67, + 0x67, 0x65, 0x62, 0x62, 0x63, 0x65, 0x65, 0x65, 0x64, 0x62, 0x5f, 0x5e, + 0x5e, 0x5f, 0x60, 0x5f, 0x5e, 0x5d, 0x5d, 0x5d, 0x59, 0x5a, 0x5a, 0x59, + 0x5b, 0x59, 0x55, 0x50, 0x51, 0x53, 0x52, 0x54, 0x57, 0x58, 0x58, 0x58, + 0x58, 0x59, 0x59, 0x5a, 0x5a, 0x5a, 0x59, 0x5b, 0x5a, 0x58, 0x58, 0x59, + 0x59, 0x58, 0x58, 0x58, 0x57, 0x57, 0x57, 0x57, 0x56, 0x57, 0x56, 0x53, + 0x52, 0x52, 0x51, 0x4f, 0x4a, 0x4a, 0x4a, 0x46, 0x45, 0x42, 0x3d, 0x37, + 0x33, 0x2e, 0x2c, 0x27, 0xa9, 0xa7, 0xa5, 0xa7, 0xb0, 0xac, 0xac, 0xa0, + 0x9f, 0xa5, 0xa7, 0xae, 0xab, 0xac, 0xab, 0xae, 0xab, 0xa9, 0xa9, 0xaa, + 0xb1, 0xcc, 0xf7, 0x05, 0x10, 0x0e, 0x10, 0x12, 0x15, 0x17, 0x14, 0x0d, + 0x0c, 0x11, 0x0b, 0x07, 0x00, 0xfe, 0x06, 0x07, 0x06, 0x06, 0x06, 0x02, + 0xfe, 0xf6, 0xf7, 0xf8, 0xf6, 0xf3, 0xed, 0xee, 0xec, 0xf2, 0xfc, 0x07, + 0x0e, 0x12, 0x15, 0x19, 0x1a, 0x19, 0x1a, 0x1c, 0x1e, 0x21, 0x22, 0x22, + 0x22, 0x27, 0x27, 0x2c, 0x2e, 0x2d, 0x33, 0x34, 0x32, 0x35, 0x36, 0x35, + 0x37, 0x3a, 0x3c, 0x41, 0x41, 0x3c, 0x3c, 0x3d, 0x3d, 0x3d, 0x3d, 0x3d, + 0x40, 0x43, 0x42, 0x43, 0x45, 0x46, 0x49, 0x47, 0x47, 0x47, 0x4b, 0x50, + 0x4d, 0x4d, 0x50, 0x52, 0x54, 0x57, 0x58, 0x57, 0x57, 0x5a, 0x5c, 0x5d, + 0x5e, 0x5f, 0x5d, 0x5d, 0x5d, 0x5d, 0x5d, 0x5d, 0x5d, 0x5d, 0x5c, 0x5c, + 0x5d, 0x5b, 0x5d, 0x60, 0x62, 0x61, 0x62, 0x62, 0x61, 0x60, 0x61, 0x62, + 0x62, 0x60, 0x62, 0x63, 0x62, 0x65, 0x66, 0x67, 0x66, 0x65, 0x62, 0x62, + 0x62, 0x64, 0x65, 0x64, 0x64, 0x62, 0x60, 0x5d, 0x5c, 0x5e, 0x5e, 0x5d, + 0x5d, 0x5a, 0x5b, 0x5c, 0x5b, 0x5a, 0x5a, 0x59, 0x5b, 0x59, 0x58, 0x56, + 0x55, 0x55, 0x57, 0x57, 0x57, 0x58, 0x58, 0x58, 0x58, 0x59, 0x5a, 0x5b, + 0x59, 0x59, 0x5a, 0x5b, 0x59, 0x58, 0x5a, 0x5b, 0x5a, 0x59, 0x58, 0x58, + 0x58, 0x58, 0x57, 0x57, 0x57, 0x57, 0x56, 0x56, 0x54, 0x51, 0x52, 0x51, + 0x4e, 0x4d, 0x4b, 0x47, 0x42, 0x3e, 0x39, 0x31, 0x2c, 0x2b, 0x26, 0x1d, + 0xa7, 0xa3, 0xa7, 0xab, 0xb1, 0xae, 0xa3, 0xa1, 0xa1, 0xa4, 0xa9, 0xa9, + 0xa7, 0xb1, 0xad, 0xac, 0xa7, 0xa6, 0xa6, 0xa7, 0xaa, 0xc0, 0xfb, 0x07, + 0x0f, 0x0b, 0x0d, 0x12, 0x15, 0x16, 0x12, 0x0e, 0x11, 0x0f, 0x0b, 0x09, + 0x03, 0x01, 0x06, 0x08, 0x08, 0x06, 0x05, 0x02, 0x01, 0xfc, 0xfa, 0xf9, + 0xf7, 0xf3, 0xee, 0xed, 0xee, 0xf4, 0xfb, 0x07, 0x0f, 0x12, 0x13, 0x18, + 0x16, 0x17, 0x19, 0x1b, 0x1a, 0x1e, 0x21, 0x21, 0x23, 0x26, 0x27, 0x2d, + 0x2f, 0x31, 0x2f, 0x30, 0x32, 0x33, 0x32, 0x32, 0x37, 0x3a, 0x3c, 0x3b, + 0x35, 0x33, 0x34, 0x37, 0x38, 0x3a, 0x3b, 0x3c, 0x3f, 0x40, 0x42, 0x47, + 0x48, 0x48, 0x48, 0x47, 0x4b, 0x49, 0x48, 0x4c, 0x4e, 0x51, 0x4f, 0x50, + 0x52, 0x55, 0x56, 0x55, 0x56, 0x57, 0x57, 0x59, 0x5d, 0x5f, 0x5d, 0x5d, + 0x5d, 0x5b, 0x5c, 0x5d, 0x5d, 0x5c, 0x5c, 0x5d, 0x5f, 0x5c, 0x5a, 0x5d, + 0x5f, 0x60, 0x5e, 0x5f, 0x60, 0x60, 0x62, 0x62, 0x62, 0x61, 0x62, 0x63, + 0x63, 0x64, 0x66, 0x67, 0x67, 0x65, 0x61, 0x62, 0x62, 0x62, 0x65, 0x64, + 0x63, 0x62, 0x5f, 0x5d, 0x5c, 0x5e, 0x5d, 0x5d, 0x5b, 0x5a, 0x59, 0x5a, + 0x5c, 0x5c, 0x5b, 0x59, 0x59, 0x5a, 0x59, 0x58, 0x57, 0x57, 0x57, 0x57, + 0x58, 0x59, 0x59, 0x5a, 0x59, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5b, + 0x5b, 0x5a, 0x5a, 0x5a, 0x5a, 0x59, 0x58, 0x57, 0x58, 0x58, 0x56, 0x56, + 0x58, 0x58, 0x57, 0x55, 0x56, 0x54, 0x53, 0x52, 0x51, 0x51, 0x4e, 0x4c, + 0x47, 0x42, 0x3e, 0x38, 0x34, 0x2d, 0x28, 0x20, 0xad, 0xa7, 0xa6, 0xac, + 0xb3, 0xb6, 0xaf, 0xa3, 0x9f, 0xa0, 0xa7, 0xa6, 0xa7, 0xbc, 0xcc, 0xce, + 0xbc, 0xb0, 0xac, 0xa9, 0xa7, 0xc3, 0xfe, 0x07, 0x09, 0x09, 0x0d, 0x14, + 0x16, 0x17, 0x13, 0x11, 0x11, 0x0c, 0x0c, 0x0e, 0x07, 0x01, 0x04, 0x08, + 0x09, 0x06, 0x04, 0x04, 0x03, 0x02, 0xfe, 0xf8, 0xf7, 0xf3, 0xf1, 0xf0, + 0xef, 0xf2, 0xfa, 0x06, 0x0f, 0x10, 0x0f, 0x14, 0x12, 0x16, 0x1a, 0x17, + 0x18, 0x1e, 0x1d, 0x21, 0x21, 0x23, 0x27, 0x2d, 0x2b, 0x2d, 0x2c, 0x2f, + 0x30, 0x32, 0x33, 0x32, 0x33, 0x33, 0x33, 0x31, 0x2d, 0x30, 0x31, 0x33, + 0x34, 0x3a, 0x3d, 0x3a, 0x3d, 0x40, 0x46, 0x48, 0x4a, 0x45, 0x46, 0x44, + 0x47, 0x46, 0x43, 0x4c, 0x51, 0x52, 0x4f, 0x52, 0x53, 0x53, 0x56, 0x57, + 0x57, 0x54, 0x54, 0x57, 0x57, 0x5d, 0x5d, 0x5d, 0x5d, 0x5e, 0x5d, 0x5b, + 0x5d, 0x5d, 0x5d, 0x5d, 0x5d, 0x5d, 0x5b, 0x5c, 0x5d, 0x5f, 0x5e, 0x5d, + 0x61, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x63, 0x64, 0x66, 0x67, + 0x67, 0x65, 0x61, 0x61, 0x62, 0x62, 0x64, 0x65, 0x63, 0x62, 0x5e, 0x5d, + 0x5d, 0x5d, 0x5d, 0x5c, 0x59, 0x59, 0x57, 0x55, 0x57, 0x59, 0x5a, 0x5a, + 0x59, 0x59, 0x58, 0x57, 0x57, 0x57, 0x59, 0x59, 0x59, 0x5a, 0x5a, 0x59, + 0x59, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5b, 0x5b, 0x5b, 0x5a, 0x59, 0x58, + 0x5a, 0x59, 0x59, 0x58, 0x58, 0x58, 0x57, 0x57, 0x58, 0x58, 0x57, 0x54, + 0x56, 0x57, 0x55, 0x53, 0x52, 0x52, 0x4f, 0x4b, 0x48, 0x43, 0x41, 0x3f, + 0x37, 0x34, 0x2d, 0x21, 0xb1, 0xa7, 0xa6, 0xa4, 0x9f, 0xa7, 0xad, 0xa9, + 0xab, 0xac, 0xaa, 0xa7, 0xa3, 0x9e, 0xaa, 0xb1, 0xb1, 0xb4, 0xbd, 0xb8, + 0xa7, 0xc3, 0xf6, 0x05, 0x06, 0x07, 0x0d, 0x15, 0x17, 0x15, 0x12, 0x0f, + 0x11, 0x0e, 0x0f, 0x0c, 0x08, 0x04, 0x07, 0x08, 0x08, 0x09, 0x06, 0x07, + 0x06, 0x04, 0x00, 0xf9, 0xf7, 0xf7, 0xf2, 0xf1, 0xf0, 0xf3, 0xfd, 0x07, + 0x0c, 0x0d, 0x0b, 0x0f, 0x11, 0x14, 0x16, 0x16, 0x1a, 0x1d, 0x1c, 0x1f, + 0x20, 0x25, 0x29, 0x29, 0x27, 0x2a, 0x2a, 0x2b, 0x30, 0x32, 0x30, 0x2f, + 0x2d, 0x2f, 0x2c, 0x2a, 0x2d, 0x30, 0x32, 0x36, 0x37, 0x37, 0x35, 0x34, + 0x37, 0x3c, 0x44, 0x44, 0x45, 0x42, 0x42, 0x3e, 0x44, 0x44, 0x47, 0x4a, + 0x4b, 0x4c, 0x4f, 0x4d, 0x4e, 0x50, 0x50, 0x52, 0x52, 0x4f, 0x51, 0x50, + 0x54, 0x57, 0x57, 0x59, 0x5b, 0x5d, 0x5c, 0x5d, 0x5d, 0x5e, 0x5f, 0x60, + 0x5d, 0x5e, 0x5c, 0x5b, 0x5c, 0x5d, 0x5e, 0x5f, 0x60, 0x61, 0x61, 0x60, + 0x61, 0x61, 0x62, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, 0x66, 0x62, 0x62, + 0x62, 0x62, 0x63, 0x65, 0x63, 0x62, 0x5f, 0x5f, 0x5f, 0x5d, 0x5d, 0x5a, + 0x59, 0x5a, 0x56, 0x54, 0x55, 0x57, 0x57, 0x59, 0x5a, 0x5a, 0x59, 0x58, + 0x57, 0x56, 0x57, 0x58, 0x59, 0x5a, 0x5a, 0x58, 0x59, 0x57, 0x58, 0x59, + 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x59, 0x59, 0x59, 0x59, 0x59, 0x58, + 0x58, 0x58, 0x57, 0x57, 0x58, 0x57, 0x57, 0x56, 0x57, 0x57, 0x57, 0x56, + 0x53, 0x52, 0x51, 0x4e, 0x4d, 0x4a, 0x47, 0x43, 0x3d, 0x37, 0x32, 0x2e, + 0xb4, 0xa8, 0xa9, 0xa8, 0xa1, 0x9e, 0x9d, 0x9f, 0xa1, 0xb1, 0xb0, 0xac, + 0xaa, 0xa9, 0xa7, 0xa1, 0xa0, 0xab, 0xac, 0xaf, 0xa7, 0xbc, 0xe9, 0x03, + 0x06, 0x07, 0x0b, 0x0e, 0x12, 0x12, 0x11, 0x0f, 0x10, 0x0c, 0x0c, 0x0b, + 0x09, 0x06, 0x07, 0x09, 0x0b, 0x0b, 0x08, 0x07, 0x06, 0x05, 0x02, 0xfc, + 0xf7, 0xfb, 0xf4, 0xf1, 0xf0, 0xf5, 0x00, 0x06, 0x08, 0x0a, 0x07, 0x0f, + 0x0f, 0x13, 0x12, 0x12, 0x1b, 0x1d, 0x1d, 0x21, 0x21, 0x21, 0x23, 0x24, + 0x23, 0x28, 0x27, 0x2c, 0x30, 0x30, 0x2d, 0x2c, 0x28, 0x28, 0x2b, 0x2a, + 0x2e, 0x2e, 0x36, 0x36, 0x32, 0x30, 0x2e, 0x30, 0x36, 0x3c, 0x42, 0x40, + 0x40, 0x40, 0x40, 0x43, 0x44, 0x45, 0x47, 0x49, 0x4c, 0x4b, 0x4d, 0x4a, + 0x48, 0x4b, 0x4f, 0x4d, 0x49, 0x48, 0x4c, 0x50, 0x52, 0x56, 0x57, 0x57, + 0x58, 0x5a, 0x59, 0x5c, 0x5c, 0x5d, 0x5e, 0x5f, 0x5d, 0x5b, 0x5b, 0x5c, + 0x5d, 0x5d, 0x5d, 0x5e, 0x5d, 0x5e, 0x5e, 0x5e, 0x5f, 0x60, 0x60, 0x62, + 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, 0x64, 0x62, 0x62, 0x64, 0x63, 0x63, + 0x63, 0x62, 0x5e, 0x5e, 0x5e, 0x5b, 0x59, 0x59, 0x59, 0x57, 0x53, 0x54, + 0x57, 0x57, 0x57, 0x58, 0x59, 0x5a, 0x58, 0x58, 0x59, 0x57, 0x57, 0x58, + 0x59, 0x5a, 0x59, 0x57, 0x57, 0x57, 0x57, 0x59, 0x5a, 0x5a, 0x5a, 0x5b, + 0x5b, 0x5a, 0x5a, 0x59, 0x59, 0x59, 0x59, 0x59, 0x57, 0x57, 0x57, 0x59, + 0x59, 0x57, 0x57, 0x57, 0x57, 0x57, 0x56, 0x54, 0x52, 0x4f, 0x4f, 0x4d, + 0x4a, 0x47, 0x46, 0x46, 0x45, 0x41, 0x3c, 0x37, 0xa7, 0xa8, 0xac, 0xa7, + 0xa4, 0xa2, 0x9f, 0xa1, 0xa1, 0xa7, 0xab, 0xac, 0xb0, 0xaf, 0xab, 0xa7, + 0xaa, 0xb0, 0xab, 0xa6, 0xa7, 0xba, 0xe7, 0x04, 0x08, 0x0a, 0x0c, 0x0c, + 0x0f, 0x12, 0x11, 0x13, 0x0f, 0x08, 0x0d, 0x0b, 0x0a, 0x06, 0x08, 0x0d, + 0x11, 0x0d, 0x0b, 0x0a, 0x07, 0x07, 0x03, 0x00, 0xf9, 0xfc, 0xf8, 0xf2, + 0xf0, 0xf8, 0xfd, 0x05, 0x07, 0x06, 0x07, 0x0c, 0x0f, 0x12, 0x11, 0x15, + 0x1a, 0x1b, 0x1f, 0x1e, 0x1c, 0x1c, 0x1d, 0x1f, 0x22, 0x23, 0x2a, 0x2c, + 0x27, 0x27, 0x22, 0x20, 0x21, 0x23, 0x2b, 0x2d, 0x2d, 0x30, 0x31, 0x2e, + 0x2d, 0x32, 0x32, 0x32, 0x37, 0x3e, 0x3d, 0x3d, 0x3d, 0x40, 0x43, 0x48, + 0x45, 0x44, 0x46, 0x48, 0x4d, 0x4d, 0x4c, 0x4d, 0x4d, 0x4c, 0x4d, 0x4c, + 0x49, 0x47, 0x47, 0x4e, 0x51, 0x53, 0x56, 0x55, 0x57, 0x58, 0x57, 0x58, + 0x58, 0x59, 0x57, 0x58, 0x57, 0x58, 0x58, 0x59, 0x5b, 0x5d, 0x5d, 0x5d, + 0x5d, 0x5d, 0x5d, 0x5d, 0x5e, 0x5f, 0x5f, 0x61, 0x62, 0x62, 0x64, 0x65, + 0x67, 0x67, 0x66, 0x65, 0x64, 0x65, 0x64, 0x64, 0x64, 0x61, 0x5e, 0x5d, + 0x5b, 0x5a, 0x59, 0x5b, 0x5b, 0x56, 0x54, 0x54, 0x57, 0x58, 0x59, 0x58, + 0x58, 0x59, 0x59, 0x5a, 0x5a, 0x59, 0x58, 0x58, 0x59, 0x59, 0x58, 0x58, + 0x57, 0x57, 0x58, 0x5a, 0x5a, 0x5a, 0x5a, 0x5b, 0x5b, 0x5b, 0x5a, 0x5a, + 0x59, 0x5a, 0x59, 0x59, 0x59, 0x5a, 0x5b, 0x5b, 0x5a, 0x58, 0x58, 0x58, + 0x58, 0x57, 0x56, 0x53, 0x4e, 0x4d, 0x4a, 0x41, 0x3e, 0x3d, 0x39, 0x39, + 0x38, 0x37, 0x37, 0x36, 0xa7, 0xa7, 0xac, 0xa1, 0x9e, 0x9b, 0x99, 0x9d, + 0xa3, 0xa5, 0xa5, 0xaa, 0xaf, 0xb6, 0xba, 0xb0, 0xae, 0xb8, 0xbc, 0xb1, + 0xa5, 0xae, 0xeb, 0x07, 0x0c, 0x0c, 0x0c, 0x0d, 0x0d, 0x0d, 0x0e, 0x10, + 0x0c, 0x08, 0x09, 0x0c, 0x0c, 0x07, 0x09, 0x0f, 0x12, 0x10, 0x0f, 0x0c, + 0x09, 0x0b, 0x08, 0x02, 0xfc, 0xfc, 0xfb, 0xf4, 0xf4, 0xfd, 0x00, 0x04, + 0x02, 0x02, 0x07, 0x0b, 0x0c, 0x0c, 0x10, 0x12, 0x15, 0x1d, 0x1e, 0x17, + 0x13, 0x17, 0x1b, 0x20, 0x21, 0x22, 0x20, 0x22, 0x22, 0x20, 0x1b, 0x1d, + 0x22, 0x27, 0x27, 0x28, 0x27, 0x2c, 0x2d, 0x2c, 0x2d, 0x32, 0x33, 0x37, + 0x39, 0x3d, 0x3d, 0x3d, 0x3e, 0x41, 0x44, 0x47, 0x45, 0x42, 0x46, 0x49, + 0x4a, 0x4b, 0x4d, 0x4e, 0x4e, 0x4f, 0x4f, 0x4c, 0x49, 0x4c, 0x47, 0x4a, + 0x4f, 0x51, 0x54, 0x55, 0x55, 0x54, 0x55, 0x56, 0x57, 0x56, 0x52, 0x53, + 0x55, 0x56, 0x57, 0x59, 0x5b, 0x5a, 0x5c, 0x5c, 0x5b, 0x5c, 0x5d, 0x5d, + 0x5d, 0x5e, 0x5e, 0x5f, 0x60, 0x61, 0x62, 0x63, 0x64, 0x66, 0x67, 0x67, + 0x66, 0x66, 0x64, 0x63, 0x63, 0x62, 0x5e, 0x5d, 0x5b, 0x5b, 0x5d, 0x5c, + 0x57, 0x55, 0x55, 0x55, 0x57, 0x58, 0x5b, 0x59, 0x57, 0x57, 0x58, 0x5a, + 0x5a, 0x59, 0x58, 0x57, 0x58, 0x58, 0x58, 0x57, 0x58, 0x58, 0x59, 0x59, + 0x5a, 0x5a, 0x59, 0x5b, 0x5b, 0x5a, 0x59, 0x5b, 0x5c, 0x5b, 0x5a, 0x5b, + 0x5a, 0x5b, 0x5c, 0x5b, 0x5a, 0x59, 0x58, 0x58, 0x57, 0x57, 0x57, 0x55, + 0x52, 0x50, 0x4d, 0x4a, 0x47, 0x46, 0x40, 0x3c, 0x38, 0x37, 0x34, 0x30, + 0xad, 0xa9, 0xac, 0xa7, 0xa7, 0xaf, 0xa1, 0x9c, 0x9f, 0xa0, 0xa9, 0xad, + 0xad, 0xb0, 0xb7, 0xad, 0xa7, 0xac, 0xa7, 0xa2, 0xa1, 0xb1, 0xf2, 0x07, + 0x0a, 0x0f, 0x0d, 0x10, 0x0f, 0x0d, 0x0c, 0x0b, 0x07, 0x06, 0x07, 0x08, + 0x08, 0x07, 0x0a, 0x0c, 0x0f, 0x12, 0x13, 0x10, 0x0c, 0x0b, 0x08, 0x04, + 0xfe, 0xfd, 0xfc, 0xf5, 0xf5, 0xfa, 0xfa, 0x02, 0xff, 0x02, 0x07, 0x07, + 0x07, 0x0c, 0x0e, 0x12, 0x16, 0x18, 0x12, 0x0d, 0x10, 0x17, 0x18, 0x19, + 0x1a, 0x1a, 0x1d, 0x1d, 0x18, 0x17, 0x1b, 0x23, 0x28, 0x26, 0x24, 0x21, + 0x23, 0x27, 0x2a, 0x2c, 0x2f, 0x31, 0x34, 0x38, 0x3a, 0x3a, 0x3c, 0x3a, + 0x3a, 0x3f, 0x44, 0x47, 0x45, 0x44, 0x47, 0x49, 0x4a, 0x4d, 0x4d, 0x4f, + 0x4d, 0x4f, 0x4f, 0x4e, 0x4d, 0x4d, 0x4d, 0x49, 0x4e, 0x51, 0x52, 0x54, + 0x54, 0x52, 0x53, 0x56, 0x57, 0x57, 0x57, 0x55, 0x56, 0x57, 0x55, 0x56, + 0x58, 0x5b, 0x5a, 0x59, 0x58, 0x59, 0x59, 0x5c, 0x5d, 0x5d, 0x5c, 0x5d, + 0x5f, 0x60, 0x60, 0x61, 0x62, 0x65, 0x66, 0x67, 0x67, 0x67, 0x65, 0x64, + 0x63, 0x62, 0x60, 0x5e, 0x5d, 0x5d, 0x5d, 0x58, 0x57, 0x56, 0x54, 0x55, + 0x59, 0x5a, 0x5a, 0x57, 0x57, 0x59, 0x5a, 0x59, 0x59, 0x58, 0x58, 0x57, + 0x58, 0x57, 0x57, 0x57, 0x58, 0x57, 0x58, 0x59, 0x59, 0x5a, 0x5a, 0x5a, + 0x5b, 0x5b, 0x5b, 0x5c, 0x5c, 0x5c, 0x5b, 0x5b, 0x5b, 0x5c, 0x5c, 0x5a, + 0x59, 0x58, 0x57, 0x58, 0x58, 0x58, 0x57, 0x56, 0x55, 0x54, 0x54, 0x51, + 0x50, 0x4d, 0x4a, 0x47, 0x43, 0x41, 0x3d, 0x3b, 0xa7, 0xa9, 0xad, 0xac, + 0xb0, 0xc6, 0xb1, 0x9e, 0xa1, 0xa0, 0xa5, 0xac, 0xac, 0xa7, 0xa3, 0xab, + 0xac, 0xbb, 0xb1, 0x9b, 0xa2, 0xba, 0xef, 0x06, 0x07, 0x10, 0x0d, 0x0b, + 0x0c, 0x0e, 0x0c, 0x09, 0x08, 0x08, 0x07, 0x0c, 0x07, 0x07, 0x0b, 0x0a, + 0x0e, 0x14, 0x13, 0x11, 0x0c, 0x0b, 0x09, 0x06, 0xfc, 0xfa, 0xfa, 0xf4, + 0xf5, 0xfb, 0xfd, 0x00, 0xfc, 0x02, 0x06, 0x05, 0x06, 0x0c, 0x0c, 0x12, + 0x13, 0x0c, 0x08, 0x10, 0x14, 0x12, 0x13, 0x14, 0x15, 0x13, 0x14, 0x17, + 0x18, 0x1a, 0x1c, 0x22, 0x24, 0x1e, 0x1b, 0x21, 0x22, 0x27, 0x2c, 0x2f, + 0x30, 0x33, 0x38, 0x3b, 0x39, 0x3b, 0x38, 0x37, 0x3e, 0x43, 0x46, 0x47, + 0x47, 0x48, 0x47, 0x48, 0x4c, 0x4d, 0x4d, 0x4f, 0x4d, 0x4c, 0x4f, 0x4f, + 0x52, 0x4e, 0x4b, 0x49, 0x4c, 0x4f, 0x50, 0x52, 0x55, 0x55, 0x54, 0x55, + 0x57, 0x58, 0x58, 0x57, 0x54, 0x55, 0x56, 0x52, 0x53, 0x58, 0x5b, 0x57, + 0x54, 0x55, 0x57, 0x58, 0x59, 0x5b, 0x5a, 0x5c, 0x5d, 0x5d, 0x5d, 0x5e, + 0x5f, 0x62, 0x64, 0x65, 0x65, 0x64, 0x63, 0x62, 0x62, 0x62, 0x61, 0x60, + 0x60, 0x5e, 0x5c, 0x57, 0x56, 0x53, 0x54, 0x57, 0x5a, 0x5b, 0x59, 0x58, + 0x58, 0x5a, 0x5a, 0x59, 0x58, 0x57, 0x57, 0x58, 0x59, 0x59, 0x58, 0x58, + 0x58, 0x57, 0x58, 0x58, 0x59, 0x5b, 0x5b, 0x5b, 0x5b, 0x5b, 0x5a, 0x5b, + 0x5b, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5b, 0x59, 0x58, 0x59, 0x58, 0x57, + 0x57, 0x57, 0x57, 0x57, 0x55, 0x55, 0x53, 0x52, 0x52, 0x4f, 0x4e, 0x4b, + 0x48, 0x46, 0x43, 0x41, 0xa3, 0xa9, 0xaa, 0xa0, 0xa9, 0xb5, 0xac, 0xa4, + 0xa1, 0xa5, 0xa3, 0xab, 0xae, 0xaa, 0x9a, 0x9c, 0xa3, 0xa9, 0xa8, 0xa1, + 0xa1, 0xb8, 0xee, 0x07, 0x0a, 0x12, 0x11, 0x0c, 0x0c, 0x0c, 0x09, 0x07, + 0x04, 0x07, 0x07, 0x09, 0x06, 0x07, 0x0d, 0x0f, 0x0c, 0x12, 0x14, 0x12, + 0x0e, 0x09, 0x08, 0x05, 0xfb, 0xf8, 0xf3, 0xf2, 0xf2, 0xf8, 0xfb, 0xfd, + 0xff, 0x02, 0x04, 0x07, 0x06, 0x09, 0x0b, 0x0e, 0x0d, 0x07, 0x07, 0x0e, + 0x0e, 0x11, 0x11, 0x11, 0x12, 0x13, 0x15, 0x1a, 0x1d, 0x17, 0x19, 0x1c, + 0x1d, 0x1c, 0x1c, 0x22, 0x23, 0x2b, 0x2f, 0x31, 0x32, 0x35, 0x36, 0x36, + 0x37, 0x37, 0x35, 0x39, 0x42, 0x43, 0x44, 0x44, 0x47, 0x49, 0x47, 0x48, + 0x4d, 0x4f, 0x4d, 0x4f, 0x50, 0x4d, 0x4c, 0x4c, 0x4f, 0x50, 0x4c, 0x48, + 0x4a, 0x4e, 0x4e, 0x50, 0x54, 0x57, 0x53, 0x54, 0x56, 0x56, 0x57, 0x58, + 0x57, 0x54, 0x56, 0x56, 0x52, 0x53, 0x57, 0x57, 0x57, 0x56, 0x55, 0x57, + 0x58, 0x59, 0x5a, 0x59, 0x59, 0x5b, 0x5b, 0x5b, 0x5d, 0x5f, 0x62, 0x62, + 0x60, 0x5f, 0x5f, 0x61, 0x62, 0x62, 0x61, 0x61, 0x62, 0x5e, 0x5c, 0x57, + 0x54, 0x53, 0x56, 0x59, 0x5b, 0x5c, 0x59, 0x58, 0x58, 0x5a, 0x59, 0x58, + 0x58, 0x56, 0x57, 0x57, 0x57, 0x58, 0x58, 0x58, 0x58, 0x57, 0x57, 0x58, + 0x59, 0x59, 0x59, 0x5a, 0x5a, 0x58, 0x58, 0x58, 0x58, 0x57, 0x58, 0x58, + 0x59, 0x5a, 0x5a, 0x59, 0x58, 0x59, 0x59, 0x58, 0x58, 0x57, 0x57, 0x57, + 0x57, 0x56, 0x54, 0x52, 0x52, 0x51, 0x4f, 0x4b, 0x47, 0x46, 0x40, 0x3d, + 0xa4, 0xab, 0xb3, 0xa3, 0x9d, 0xa1, 0xa1, 0xac, 0xa5, 0xb3, 0xb0, 0xb0, + 0xae, 0xad, 0x9e, 0xa9, 0xa2, 0xa3, 0xa2, 0xa6, 0xa9, 0xbf, 0xf3, 0x07, + 0x09, 0x11, 0x12, 0x0d, 0x09, 0x09, 0x05, 0x04, 0x05, 0x04, 0x05, 0x07, + 0x07, 0x07, 0x0b, 0x10, 0x11, 0x13, 0x14, 0x12, 0x0c, 0x0b, 0x09, 0x07, + 0xfd, 0xfc, 0xf6, 0xef, 0xf1, 0xf7, 0xf8, 0xfb, 0x00, 0x02, 0x05, 0x06, + 0x07, 0x07, 0x07, 0x08, 0x06, 0x03, 0x03, 0x0a, 0x0c, 0x0c, 0x0c, 0x0e, + 0x12, 0x15, 0x16, 0x15, 0x13, 0x12, 0x18, 0x1b, 0x1a, 0x1a, 0x1f, 0x22, + 0x27, 0x2d, 0x30, 0x2f, 0x32, 0x35, 0x33, 0x35, 0x36, 0x35, 0x35, 0x39, + 0x3d, 0x43, 0x42, 0x44, 0x47, 0x4a, 0x49, 0x4c, 0x4d, 0x4d, 0x4f, 0x4f, + 0x50, 0x51, 0x4c, 0x47, 0x48, 0x4c, 0x4c, 0x4b, 0x47, 0x4f, 0x50, 0x4f, + 0x52, 0x54, 0x55, 0x54, 0x55, 0x57, 0x57, 0x58, 0x57, 0x56, 0x55, 0x57, + 0x57, 0x56, 0x56, 0x56, 0x58, 0x58, 0x56, 0x57, 0x57, 0x57, 0x5a, 0x5b, + 0x5a, 0x5b, 0x59, 0x59, 0x5d, 0x5e, 0x62, 0x62, 0x5d, 0x5d, 0x5d, 0x5c, + 0x5d, 0x5e, 0x5e, 0x5f, 0x60, 0x5f, 0x5e, 0x5b, 0x57, 0x56, 0x57, 0x59, + 0x59, 0x59, 0x59, 0x58, 0x58, 0x58, 0x58, 0x57, 0x57, 0x57, 0x57, 0x58, + 0x58, 0x58, 0x57, 0x57, 0x56, 0x58, 0x57, 0x58, 0x59, 0x59, 0x59, 0x5a, + 0x5a, 0x58, 0x58, 0x58, 0x58, 0x58, 0x58, 0x58, 0x58, 0x58, 0x57, 0x57, + 0x57, 0x57, 0x59, 0x59, 0x58, 0x58, 0x58, 0x59, 0x58, 0x57, 0x56, 0x53, + 0x53, 0x52, 0x51, 0x4d, 0x4a, 0x47, 0x42, 0x3f, 0xac, 0xaf, 0xb8, 0xb1, + 0x9c, 0x9c, 0xa8, 0xb7, 0xad, 0xb6, 0xaf, 0xad, 0xaa, 0xac, 0xa9, 0xb1, + 0xb2, 0xa1, 0x9e, 0xa8, 0xa8, 0xbe, 0xf5, 0x08, 0x09, 0x11, 0x12, 0x0b, + 0x0a, 0x09, 0x05, 0x04, 0x04, 0x04, 0x02, 0x06, 0x07, 0x09, 0x0c, 0x0e, + 0x0d, 0x0f, 0x12, 0x10, 0x0c, 0x08, 0x07, 0x05, 0xfc, 0xf7, 0xf2, 0xef, + 0xf1, 0xf6, 0xf6, 0xf8, 0xfb, 0x02, 0x04, 0x02, 0x03, 0x05, 0x06, 0x04, + 0xfe, 0xff, 0x04, 0x09, 0x09, 0x0c, 0x0f, 0x11, 0x11, 0x12, 0x0f, 0x0f, + 0x11, 0x16, 0x18, 0x17, 0x18, 0x1d, 0x21, 0x22, 0x27, 0x2b, 0x2e, 0x2d, + 0x32, 0x32, 0x32, 0x35, 0x35, 0x37, 0x37, 0x3d, 0x40, 0x42, 0x42, 0x44, + 0x47, 0x4a, 0x49, 0x4b, 0x4d, 0x4d, 0x50, 0x4f, 0x4f, 0x52, 0x4f, 0x49, + 0x47, 0x4b, 0x4d, 0x4c, 0x4b, 0x4e, 0x4e, 0x50, 0x52, 0x52, 0x52, 0x54, + 0x54, 0x55, 0x57, 0x57, 0x57, 0x57, 0x55, 0x56, 0x57, 0x58, 0x57, 0x57, + 0x56, 0x57, 0x57, 0x56, 0x57, 0x57, 0x57, 0x5a, 0x5a, 0x5b, 0x5c, 0x5b, + 0x5c, 0x5f, 0x61, 0x61, 0x5d, 0x5c, 0x5c, 0x5a, 0x59, 0x5b, 0x5c, 0x5d, + 0x5d, 0x5e, 0x60, 0x5e, 0x5b, 0x56, 0x57, 0x5a, 0x59, 0x58, 0x59, 0x58, + 0x57, 0x58, 0x56, 0x57, 0x57, 0x56, 0x57, 0x58, 0x58, 0x58, 0x58, 0x57, + 0x56, 0x56, 0x57, 0x57, 0x57, 0x58, 0x59, 0x59, 0x59, 0x58, 0x57, 0x57, + 0x57, 0x58, 0x59, 0x59, 0x58, 0x58, 0x58, 0x57, 0x58, 0x58, 0x58, 0x59, + 0x58, 0x58, 0x59, 0x58, 0x57, 0x57, 0x56, 0x54, 0x53, 0x52, 0x50, 0x4d, + 0x4a, 0x47, 0x46, 0x44, 0xac, 0xa6, 0xaa, 0xb1, 0xa7, 0xa1, 0xa5, 0xc2, + 0xc8, 0xb8, 0xaf, 0xaa, 0xb1, 0xb1, 0xb3, 0xb9, 0xb0, 0xa2, 0xad, 0xaa, + 0xa7, 0xc2, 0xfc, 0x0b, 0x0a, 0x0e, 0x0d, 0x0c, 0x0d, 0x0a, 0x07, 0x04, + 0x04, 0x04, 0x04, 0x04, 0x07, 0x07, 0x0c, 0x0c, 0x0c, 0x0e, 0x0f, 0x0f, + 0x0c, 0x0b, 0x05, 0x04, 0xff, 0xf3, 0xf3, 0xf1, 0xf0, 0xf1, 0xef, 0xf2, + 0xfc, 0x01, 0x01, 0x00, 0x02, 0xff, 0xff, 0xfc, 0xfe, 0xff, 0xfd, 0x03, + 0x08, 0x0a, 0x0a, 0x0c, 0x0c, 0x0d, 0x0b, 0x0d, 0x12, 0x15, 0x14, 0x15, + 0x17, 0x1f, 0x22, 0x24, 0x28, 0x2c, 0x2a, 0x2d, 0x32, 0x31, 0x31, 0x33, + 0x33, 0x37, 0x39, 0x41, 0x43, 0x42, 0x44, 0x46, 0x47, 0x49, 0x49, 0x48, + 0x4b, 0x4c, 0x4e, 0x4d, 0x4e, 0x4e, 0x4f, 0x4c, 0x47, 0x48, 0x4d, 0x4d, + 0x4d, 0x4d, 0x4f, 0x51, 0x52, 0x51, 0x50, 0x54, 0x55, 0x54, 0x55, 0x57, + 0x57, 0x56, 0x55, 0x55, 0x57, 0x58, 0x58, 0x58, 0x57, 0x57, 0x57, 0x55, + 0x56, 0x57, 0x57, 0x58, 0x5a, 0x5a, 0x5c, 0x5c, 0x5c, 0x5f, 0x61, 0x5f, + 0x5c, 0x5b, 0x5a, 0x5a, 0x5a, 0x5b, 0x5b, 0x5b, 0x5b, 0x5d, 0x5f, 0x5e, + 0x5d, 0x59, 0x57, 0x5b, 0x59, 0x57, 0x57, 0x58, 0x57, 0x56, 0x54, 0x56, + 0x57, 0x55, 0x57, 0x57, 0x58, 0x58, 0x57, 0x55, 0x56, 0x54, 0x55, 0x55, + 0x56, 0x57, 0x58, 0x58, 0x58, 0x57, 0x57, 0x57, 0x57, 0x57, 0x58, 0x59, + 0x58, 0x59, 0x59, 0x59, 0x58, 0x58, 0x58, 0x59, 0x59, 0x59, 0x59, 0x59, + 0x58, 0x58, 0x56, 0x55, 0x53, 0x52, 0x52, 0x4f, 0x4d, 0x4b, 0x49, 0x42, + 0xb4, 0xad, 0xa8, 0xac, 0xb3, 0xb0, 0xa7, 0xbb, 0xc0, 0xb9, 0xc2, 0xb7, + 0xbb, 0xb4, 0xb1, 0xb2, 0xb1, 0xab, 0xaf, 0xa8, 0xa6, 0xcd, 0x06, 0x12, + 0x12, 0x10, 0x0d, 0x0d, 0x0c, 0x08, 0x07, 0x04, 0x04, 0x04, 0x03, 0x02, + 0x06, 0x09, 0x0a, 0x09, 0x0e, 0x0d, 0x0c, 0x0b, 0x0a, 0x0b, 0x04, 0x00, + 0xfe, 0xf1, 0xf2, 0xeb, 0xe9, 0xec, 0xeb, 0xef, 0xfc, 0xfe, 0xfd, 0xfc, + 0xfc, 0xfe, 0xf8, 0xf7, 0xf8, 0xf9, 0xfc, 0x07, 0x08, 0x07, 0x06, 0x08, + 0x07, 0x0b, 0x0a, 0x0c, 0x15, 0x14, 0x12, 0x14, 0x19, 0x22, 0x25, 0x27, + 0x28, 0x27, 0x27, 0x2e, 0x30, 0x2e, 0x32, 0x33, 0x32, 0x39, 0x3d, 0x42, + 0x42, 0x42, 0x43, 0x43, 0x46, 0x47, 0x4a, 0x49, 0x49, 0x4c, 0x4d, 0x4d, + 0x4d, 0x4d, 0x4c, 0x4d, 0x48, 0x46, 0x48, 0x4d, 0x4f, 0x50, 0x50, 0x4f, + 0x51, 0x51, 0x50, 0x51, 0x54, 0x54, 0x56, 0x56, 0x55, 0x56, 0x56, 0x56, + 0x57, 0x58, 0x59, 0x59, 0x59, 0x58, 0x57, 0x56, 0x54, 0x57, 0x58, 0x57, + 0x59, 0x59, 0x5a, 0x5d, 0x5c, 0x5f, 0x60, 0x5f, 0x58, 0x59, 0x5a, 0x5a, + 0x5b, 0x5b, 0x5c, 0x5a, 0x59, 0x58, 0x5c, 0x5d, 0x5c, 0x5a, 0x59, 0x5a, + 0x58, 0x57, 0x57, 0x57, 0x57, 0x56, 0x56, 0x57, 0x58, 0x55, 0x55, 0x56, + 0x58, 0x58, 0x57, 0x54, 0x55, 0x56, 0x56, 0x55, 0x55, 0x54, 0x54, 0x55, + 0x58, 0x58, 0x57, 0x57, 0x58, 0x58, 0x57, 0x58, 0x59, 0x59, 0x57, 0x57, + 0x58, 0x58, 0x58, 0x58, 0x59, 0x59, 0x58, 0x58, 0x56, 0x57, 0x57, 0x56, + 0x55, 0x55, 0x55, 0x52, 0x50, 0x4d, 0x4b, 0x47, 0xaa, 0xae, 0xc4, 0xc0, + 0xb9, 0xb5, 0xb0, 0xb2, 0xb1, 0xbe, 0xc2, 0xbf, 0xba, 0xb5, 0xac, 0xb1, + 0xb1, 0xb1, 0xb9, 0xb9, 0xa1, 0xdf, 0x0f, 0x12, 0x15, 0x10, 0x0e, 0x0a, + 0x07, 0x08, 0x07, 0x07, 0x07, 0x07, 0x05, 0x01, 0x07, 0x09, 0x0a, 0x0a, + 0x0d, 0x0f, 0x0c, 0x09, 0x09, 0x0a, 0x05, 0xfc, 0xf9, 0xf0, 0xec, 0xe6, + 0xe2, 0xe7, 0xea, 0xed, 0xfa, 0xfb, 0xf7, 0xf8, 0xf8, 0xf7, 0xf4, 0xf5, + 0xf5, 0xf7, 0x01, 0x06, 0x03, 0xff, 0x01, 0x03, 0x06, 0x07, 0x07, 0x0e, + 0x0e, 0x0f, 0x11, 0x16, 0x1d, 0x24, 0x23, 0x23, 0x27, 0x25, 0x2a, 0x2f, + 0x2d, 0x2e, 0x32, 0x36, 0x33, 0x3c, 0x3e, 0x41, 0x41, 0x44, 0x43, 0x42, + 0x45, 0x47, 0x48, 0x48, 0x46, 0x4a, 0x4a, 0x4b, 0x4a, 0x4d, 0x4b, 0x4a, + 0x48, 0x47, 0x48, 0x4a, 0x4d, 0x4e, 0x4e, 0x4f, 0x50, 0x51, 0x51, 0x50, + 0x52, 0x53, 0x55, 0x56, 0x54, 0x55, 0x57, 0x57, 0x57, 0x57, 0x57, 0x59, + 0x59, 0x59, 0x58, 0x56, 0x54, 0x55, 0x57, 0x57, 0x57, 0x57, 0x58, 0x5c, + 0x5d, 0x5f, 0x5e, 0x5d, 0x57, 0x58, 0x5b, 0x5a, 0x5c, 0x5b, 0x5c, 0x5a, + 0x58, 0x57, 0x57, 0x5c, 0x5c, 0x5a, 0x5a, 0x5a, 0x57, 0x57, 0x57, 0x56, + 0x55, 0x55, 0x54, 0x54, 0x57, 0x55, 0x55, 0x56, 0x57, 0x57, 0x57, 0x54, + 0x54, 0x55, 0x57, 0x55, 0x55, 0x54, 0x54, 0x55, 0x57, 0x57, 0x57, 0x57, + 0x57, 0x58, 0x57, 0x57, 0x57, 0x57, 0x57, 0x56, 0x56, 0x56, 0x56, 0x57, + 0x58, 0x57, 0x57, 0x57, 0x55, 0x57, 0x57, 0x57, 0x56, 0x55, 0x53, 0x53, + 0x51, 0x4f, 0x4c, 0x49, 0xa5, 0xac, 0xbb, 0xc3, 0xb7, 0xb0, 0xae, 0xad, + 0xa4, 0xab, 0xb6, 0xbc, 0xb7, 0xb1, 0xaa, 0xaf, 0xac, 0xaa, 0xb0, 0xb3, + 0xac, 0xf8, 0x15, 0x15, 0x17, 0x12, 0x0f, 0x07, 0x05, 0x09, 0x06, 0x07, + 0x06, 0x08, 0x06, 0x06, 0x07, 0x08, 0x08, 0x09, 0x0b, 0x0e, 0x0d, 0x08, + 0x07, 0x07, 0x04, 0xfb, 0xf7, 0xee, 0xed, 0xe3, 0xdc, 0xe0, 0xe8, 0xf1, + 0xfb, 0xf7, 0xf7, 0xf6, 0xf2, 0xef, 0xf1, 0xf2, 0xf2, 0xfa, 0xff, 0xfe, + 0xfd, 0x01, 0x01, 0xff, 0x03, 0x02, 0x08, 0x0e, 0x0e, 0x0c, 0x0f, 0x17, + 0x1f, 0x23, 0x1e, 0x20, 0x22, 0x21, 0x27, 0x2f, 0x2d, 0x30, 0x33, 0x37, + 0x36, 0x3c, 0x3c, 0x3d, 0x42, 0x43, 0x42, 0x41, 0x42, 0x45, 0x45, 0x47, + 0x45, 0x47, 0x47, 0x48, 0x46, 0x47, 0x4a, 0x47, 0x46, 0x49, 0x49, 0x48, + 0x4c, 0x4d, 0x4d, 0x4d, 0x4e, 0x50, 0x4f, 0x50, 0x52, 0x52, 0x54, 0x55, + 0x55, 0x56, 0x56, 0x57, 0x57, 0x57, 0x57, 0x59, 0x59, 0x59, 0x59, 0x58, + 0x55, 0x55, 0x57, 0x56, 0x57, 0x58, 0x58, 0x59, 0x5c, 0x5e, 0x5c, 0x5a, + 0x54, 0x56, 0x5b, 0x5a, 0x5c, 0x5b, 0x5c, 0x5b, 0x58, 0x57, 0x57, 0x5b, + 0x5b, 0x59, 0x58, 0x58, 0x57, 0x57, 0x57, 0x57, 0x53, 0x52, 0x51, 0x52, + 0x55, 0x55, 0x54, 0x54, 0x55, 0x57, 0x57, 0x55, 0x53, 0x55, 0x56, 0x56, + 0x57, 0x55, 0x56, 0x55, 0x53, 0x54, 0x56, 0x57, 0x57, 0x57, 0x58, 0x58, + 0x57, 0x57, 0x56, 0x57, 0x56, 0x55, 0x53, 0x54, 0x56, 0x57, 0x56, 0x57, + 0x57, 0x57, 0x57, 0x57, 0x56, 0x53, 0x53, 0x52, 0x51, 0x50, 0x4e, 0x4c, + 0xb2, 0xb6, 0xb1, 0xaf, 0xb1, 0xac, 0xb4, 0xbe, 0xb1, 0xa7, 0xac, 0xb5, + 0xb7, 0xac, 0xac, 0xac, 0xab, 0xb0, 0xbb, 0xaa, 0xb9, 0x05, 0x15, 0x11, + 0x12, 0x11, 0x0c, 0x0b, 0x07, 0x08, 0x06, 0x07, 0x07, 0x09, 0x07, 0x07, + 0x07, 0x07, 0x06, 0x09, 0x0a, 0x08, 0x0f, 0x0a, 0x07, 0x02, 0x02, 0xf9, + 0xf4, 0xf2, 0xe9, 0xd9, 0xd5, 0xde, 0xe6, 0xec, 0xf2, 0xf2, 0xf2, 0xf1, + 0xec, 0xe9, 0xed, 0xed, 0xf5, 0xff, 0xfa, 0xfb, 0xff, 0x00, 0xfa, 0xf8, + 0xfc, 0x00, 0x09, 0x0a, 0x0c, 0x0a, 0x0f, 0x17, 0x20, 0x1e, 0x20, 0x22, + 0x1f, 0x1f, 0x28, 0x31, 0x2e, 0x32, 0x35, 0x37, 0x34, 0x3a, 0x3a, 0x3d, + 0x42, 0x41, 0x3e, 0x3f, 0x3f, 0x42, 0x43, 0x44, 0x43, 0x44, 0x44, 0x46, + 0x43, 0x45, 0x48, 0x47, 0x45, 0x44, 0x47, 0x47, 0x4a, 0x4c, 0x4d, 0x4c, + 0x4d, 0x4e, 0x4d, 0x50, 0x51, 0x52, 0x52, 0x54, 0x54, 0x53, 0x55, 0x55, + 0x57, 0x57, 0x57, 0x58, 0x58, 0x59, 0x59, 0x58, 0x55, 0x54, 0x57, 0x54, + 0x54, 0x56, 0x57, 0x58, 0x5a, 0x5d, 0x59, 0x57, 0x53, 0x57, 0x5b, 0x5b, + 0x5b, 0x5c, 0x5c, 0x5a, 0x58, 0x57, 0x57, 0x57, 0x58, 0x57, 0x56, 0x57, + 0x56, 0x56, 0x56, 0x55, 0x53, 0x52, 0x4d, 0x51, 0x53, 0x53, 0x51, 0x51, + 0x53, 0x55, 0x57, 0x56, 0x52, 0x52, 0x54, 0x55, 0x56, 0x55, 0x56, 0x56, + 0x53, 0x52, 0x53, 0x56, 0x56, 0x57, 0x57, 0x57, 0x57, 0x56, 0x55, 0x56, + 0x54, 0x52, 0x55, 0x54, 0x55, 0x57, 0x57, 0x57, 0x57, 0x56, 0x55, 0x54, + 0x55, 0x54, 0x53, 0x52, 0x52, 0x51, 0x4e, 0x4d, 0xb2, 0xae, 0xb1, 0xb5, + 0xb2, 0xab, 0xbe, 0xd6, 0xe3, 0xd1, 0xb3, 0xac, 0xb0, 0xaa, 0xa8, 0xae, + 0xaf, 0xac, 0xc0, 0xab, 0xc1, 0x0c, 0x17, 0x0f, 0x0d, 0x0d, 0x0c, 0x0a, + 0x07, 0x09, 0x08, 0x07, 0x08, 0x07, 0x07, 0x08, 0x07, 0x09, 0x06, 0x09, + 0x09, 0x06, 0x0c, 0x07, 0x02, 0x01, 0x00, 0xfd, 0xf5, 0xf2, 0xe5, 0xd5, + 0xd5, 0xdd, 0xe2, 0xe7, 0xef, 0xf0, 0xeb, 0xe2, 0xe2, 0xe5, 0xe9, 0xe9, + 0xf2, 0xf7, 0xf7, 0xfa, 0xff, 0xf8, 0xf1, 0xf6, 0xf8, 0x02, 0x09, 0x0b, + 0x09, 0x0c, 0x10, 0x19, 0x1d, 0x1a, 0x1f, 0x21, 0x1d, 0x22, 0x2a, 0x2d, + 0x2c, 0x30, 0x34, 0x36, 0x36, 0x39, 0x3a, 0x3b, 0x40, 0x3f, 0x39, 0x3d, + 0x3d, 0x3f, 0x42, 0x42, 0x40, 0x3f, 0x40, 0x42, 0x41, 0x42, 0x47, 0x49, + 0x45, 0x41, 0x45, 0x46, 0x47, 0x4a, 0x4d, 0x4b, 0x4a, 0x4c, 0x4d, 0x4f, + 0x4f, 0x52, 0x52, 0x53, 0x52, 0x53, 0x54, 0x54, 0x54, 0x57, 0x57, 0x58, + 0x58, 0x58, 0x59, 0x58, 0x54, 0x54, 0x56, 0x56, 0x54, 0x54, 0x56, 0x56, + 0x58, 0x5b, 0x57, 0x56, 0x54, 0x56, 0x5a, 0x5a, 0x5a, 0x5b, 0x5b, 0x58, + 0x57, 0x58, 0x57, 0x55, 0x56, 0x55, 0x55, 0x55, 0x54, 0x53, 0x54, 0x54, + 0x52, 0x50, 0x4b, 0x50, 0x53, 0x53, 0x50, 0x4f, 0x52, 0x54, 0x56, 0x56, + 0x52, 0x52, 0x53, 0x53, 0x54, 0x55, 0x57, 0x56, 0x55, 0x53, 0x53, 0x54, + 0x53, 0x55, 0x55, 0x57, 0x56, 0x57, 0x56, 0x53, 0x53, 0x54, 0x53, 0x54, + 0x54, 0x57, 0x57, 0x57, 0x56, 0x55, 0x54, 0x54, 0x55, 0x54, 0x52, 0x52, + 0x52, 0x51, 0x4d, 0x4c, 0xb2, 0xa4, 0xad, 0xb1, 0xb6, 0xae, 0xc9, 0xcf, + 0xd0, 0xb9, 0xaa, 0xac, 0xac, 0xa7, 0xac, 0xb1, 0xab, 0xa6, 0xb1, 0xb3, + 0xce, 0x11, 0x1b, 0x13, 0x0f, 0x0e, 0x0d, 0x07, 0x07, 0x09, 0x09, 0x08, + 0x08, 0x08, 0x06, 0x07, 0x06, 0x0a, 0x07, 0x06, 0x07, 0x04, 0x05, 0x04, + 0xff, 0x00, 0xfc, 0xfc, 0xf3, 0xee, 0xdd, 0xd0, 0xd4, 0xd7, 0xd8, 0xe2, + 0xea, 0xe9, 0xe2, 0xdf, 0xdc, 0xe2, 0xe7, 0xea, 0xf1, 0xf4, 0xf6, 0xff, + 0xfb, 0xf1, 0xf2, 0xf2, 0xf5, 0xfe, 0x04, 0x07, 0x07, 0x0b, 0x10, 0x17, + 0x17, 0x1b, 0x1d, 0x1d, 0x20, 0x28, 0x2b, 0x2d, 0x2d, 0x2f, 0x34, 0x36, + 0x32, 0x35, 0x32, 0x36, 0x3c, 0x3d, 0x37, 0x3b, 0x3b, 0x3b, 0x3f, 0x3f, + 0x3a, 0x3d, 0x3e, 0x3f, 0x3f, 0x42, 0x44, 0x47, 0x44, 0x40, 0x43, 0x45, + 0x47, 0x48, 0x48, 0x4a, 0x49, 0x4a, 0x4d, 0x4f, 0x4c, 0x51, 0x50, 0x52, + 0x53, 0x54, 0x53, 0x53, 0x53, 0x57, 0x57, 0x57, 0x57, 0x57, 0x58, 0x58, + 0x54, 0x55, 0x57, 0x55, 0x53, 0x53, 0x54, 0x54, 0x57, 0x59, 0x55, 0x53, + 0x52, 0x55, 0x59, 0x59, 0x58, 0x58, 0x59, 0x57, 0x57, 0x58, 0x56, 0x52, + 0x54, 0x54, 0x54, 0x53, 0x52, 0x52, 0x51, 0x52, 0x51, 0x4d, 0x48, 0x4e, + 0x51, 0x52, 0x50, 0x4d, 0x50, 0x51, 0x52, 0x55, 0x51, 0x50, 0x52, 0x53, + 0x54, 0x54, 0x56, 0x55, 0x54, 0x54, 0x53, 0x54, 0x54, 0x55, 0x56, 0x57, + 0x56, 0x54, 0x52, 0x50, 0x50, 0x53, 0x52, 0x53, 0x53, 0x54, 0x56, 0x57, + 0x55, 0x55, 0x53, 0x54, 0x56, 0x53, 0x52, 0x52, 0x53, 0x52, 0x50, 0x4b, + 0xb1, 0xae, 0xa7, 0xa5, 0xa4, 0xa8, 0xb1, 0xb2, 0xb7, 0xac, 0xaa, 0xaa, + 0xae, 0xad, 0xac, 0xb3, 0xb4, 0xaa, 0xae, 0xb6, 0xc4, 0x17, 0x1e, 0x15, + 0x12, 0x10, 0x0f, 0x09, 0x07, 0x08, 0x09, 0x08, 0x08, 0x08, 0x07, 0x07, + 0x06, 0x08, 0x05, 0x04, 0x06, 0x01, 0x02, 0x01, 0x00, 0xf7, 0xf8, 0xf9, + 0xf2, 0xea, 0xdc, 0xcd, 0xcc, 0xcd, 0xd9, 0xe1, 0xe0, 0xdd, 0xde, 0xd6, + 0xd7, 0xdf, 0xe3, 0xe7, 0xf0, 0xf2, 0xf7, 0xfe, 0xf6, 0xf2, 0xf3, 0xf2, + 0xf7, 0xfd, 0x01, 0x04, 0x06, 0x08, 0x12, 0x16, 0x12, 0x1b, 0x1d, 0x1e, + 0x22, 0x28, 0x2d, 0x2e, 0x2d, 0x2f, 0x33, 0x33, 0x30, 0x32, 0x2f, 0x32, + 0x36, 0x37, 0x33, 0x34, 0x38, 0x39, 0x3b, 0x3d, 0x37, 0x37, 0x3d, 0x3d, + 0x3a, 0x3d, 0x42, 0x46, 0x44, 0x40, 0x3f, 0x43, 0x45, 0x45, 0x46, 0x47, + 0x4b, 0x4b, 0x4c, 0x4e, 0x4d, 0x4f, 0x4f, 0x50, 0x51, 0x52, 0x52, 0x53, + 0x53, 0x55, 0x56, 0x56, 0x57, 0x57, 0x57, 0x57, 0x55, 0x54, 0x56, 0x54, + 0x52, 0x53, 0x54, 0x53, 0x55, 0x57, 0x55, 0x54, 0x52, 0x53, 0x57, 0x58, + 0x57, 0x57, 0x59, 0x56, 0x55, 0x57, 0x54, 0x51, 0x53, 0x52, 0x53, 0x52, + 0x51, 0x50, 0x52, 0x52, 0x50, 0x4c, 0x47, 0x4c, 0x4f, 0x50, 0x4d, 0x4d, + 0x4e, 0x51, 0x52, 0x53, 0x52, 0x50, 0x51, 0x51, 0x51, 0x52, 0x52, 0x54, + 0x54, 0x55, 0x55, 0x54, 0x53, 0x54, 0x56, 0x56, 0x56, 0x53, 0x52, 0x52, + 0x52, 0x50, 0x51, 0x53, 0x53, 0x53, 0x55, 0x57, 0x56, 0x56, 0x54, 0x52, + 0x55, 0x53, 0x51, 0x51, 0x50, 0x4e, 0x4d, 0x4a, 0xb2, 0xb9, 0xaa, 0xaa, + 0xae, 0xaf, 0xac, 0xaf, 0xba, 0xab, 0xa7, 0xab, 0xb1, 0xad, 0xa6, 0xb9, + 0xb7, 0xac, 0xbc, 0xb8, 0xb5, 0x1b, 0x21, 0x11, 0x12, 0x0f, 0x09, 0x07, + 0x06, 0x06, 0x07, 0x07, 0x0c, 0x08, 0x07, 0x08, 0x0a, 0x07, 0x05, 0x01, + 0x00, 0x00, 0xff, 0xfc, 0xfc, 0xf7, 0xf4, 0xf7, 0xf2, 0xe4, 0xdc, 0xcc, + 0xc9, 0xd1, 0xdc, 0xd5, 0xd6, 0xd6, 0xd1, 0xd1, 0xd7, 0xdc, 0xdf, 0xeb, + 0xf0, 0xf1, 0xf6, 0xf7, 0xef, 0xf1, 0xec, 0xf0, 0xf4, 0xf9, 0xff, 0x02, + 0x05, 0x06, 0x0e, 0x12, 0x12, 0x1a, 0x1c, 0x1f, 0x26, 0x29, 0x2d, 0x2d, + 0x2d, 0x2e, 0x30, 0x30, 0x2e, 0x30, 0x2d, 0x2c, 0x2f, 0x32, 0x32, 0x31, + 0x34, 0x37, 0x39, 0x37, 0x35, 0x36, 0x3a, 0x3d, 0x39, 0x39, 0x3d, 0x42, + 0x44, 0x3f, 0x3e, 0x41, 0x44, 0x46, 0x47, 0x45, 0x48, 0x4a, 0x47, 0x49, + 0x4b, 0x50, 0x4e, 0x4f, 0x51, 0x52, 0x52, 0x52, 0x52, 0x53, 0x54, 0x54, + 0x56, 0x56, 0x57, 0x57, 0x55, 0x52, 0x56, 0x53, 0x52, 0x54, 0x53, 0x53, + 0x53, 0x52, 0x53, 0x54, 0x52, 0x52, 0x57, 0x59, 0x57, 0x56, 0x57, 0x54, + 0x54, 0x56, 0x53, 0x51, 0x52, 0x50, 0x51, 0x4f, 0x4d, 0x4d, 0x4f, 0x4e, + 0x4c, 0x47, 0x43, 0x48, 0x4e, 0x4e, 0x4c, 0x4d, 0x4d, 0x4f, 0x51, 0x52, + 0x51, 0x4f, 0x4f, 0x4e, 0x4f, 0x50, 0x50, 0x52, 0x53, 0x53, 0x55, 0x54, + 0x53, 0x52, 0x55, 0x55, 0x55, 0x55, 0x53, 0x53, 0x52, 0x52, 0x51, 0x52, + 0x54, 0x52, 0x53, 0x54, 0x56, 0x55, 0x53, 0x52, 0x51, 0x50, 0x4e, 0x4d, + 0x4d, 0x4d, 0x4a, 0x47, 0xb3, 0xb7, 0xa7, 0xab, 0xaf, 0xaf, 0xb1, 0xbc, + 0xb5, 0xab, 0xac, 0xb0, 0xb5, 0xb1, 0xaa, 0xb1, 0xae, 0xb6, 0xb7, 0xab, + 0xac, 0x11, 0x22, 0x0f, 0x0f, 0x10, 0x09, 0x07, 0x07, 0x05, 0x07, 0x05, + 0x0b, 0x0b, 0x09, 0x0c, 0x09, 0x06, 0x05, 0x00, 0xfb, 0x00, 0x00, 0xfb, + 0xf9, 0xf7, 0xf0, 0xf0, 0xed, 0xe1, 0xdc, 0xcc, 0xce, 0xd3, 0xd2, 0xcc, + 0xd4, 0xca, 0xc8, 0xd3, 0xd6, 0xd9, 0xe0, 0xec, 0xee, 0xf0, 0xf7, 0xf1, + 0xec, 0xec, 0xeb, 0xed, 0xee, 0xf6, 0xfc, 0xfe, 0x00, 0x05, 0x0f, 0x0d, + 0x12, 0x1c, 0x1e, 0x1f, 0x25, 0x2b, 0x2d, 0x2b, 0x2b, 0x2c, 0x2e, 0x2e, + 0x29, 0x29, 0x28, 0x24, 0x26, 0x2d, 0x2d, 0x2e, 0x2e, 0x32, 0x35, 0x34, + 0x36, 0x37, 0x37, 0x38, 0x37, 0x37, 0x39, 0x3b, 0x3e, 0x3e, 0x3b, 0x3f, + 0x44, 0x45, 0x46, 0x44, 0x46, 0x47, 0x47, 0x45, 0x4a, 0x4f, 0x4d, 0x50, + 0x51, 0x50, 0x51, 0x51, 0x51, 0x51, 0x52, 0x54, 0x57, 0x57, 0x58, 0x57, + 0x55, 0x51, 0x54, 0x53, 0x52, 0x54, 0x51, 0x52, 0x54, 0x51, 0x52, 0x55, + 0x53, 0x52, 0x57, 0x57, 0x55, 0x57, 0x53, 0x53, 0x53, 0x54, 0x52, 0x51, + 0x51, 0x4f, 0x4f, 0x4d, 0x4b, 0x4b, 0x4d, 0x4a, 0x48, 0x43, 0x42, 0x47, + 0x4d, 0x4d, 0x4a, 0x4a, 0x4b, 0x4d, 0x4e, 0x4f, 0x4f, 0x4d, 0x4e, 0x4d, + 0x4d, 0x4f, 0x4d, 0x4f, 0x52, 0x52, 0x55, 0x55, 0x54, 0x52, 0x53, 0x53, + 0x52, 0x53, 0x53, 0x54, 0x52, 0x50, 0x50, 0x52, 0x52, 0x52, 0x52, 0x53, + 0x56, 0x54, 0x53, 0x52, 0x51, 0x50, 0x4e, 0x4e, 0x4d, 0x4a, 0x48, 0x47, + 0xb7, 0xb6, 0xa8, 0xac, 0xa9, 0xa8, 0xac, 0xac, 0xa9, 0xaa, 0xac, 0xa8, + 0xaa, 0xae, 0xac, 0xb2, 0xba, 0xbb, 0xa3, 0xa6, 0xb0, 0x00, 0x1e, 0x0e, + 0x0c, 0x0b, 0x07, 0x07, 0x08, 0x08, 0x07, 0x05, 0x0b, 0x0c, 0x07, 0x08, + 0x04, 0x02, 0xff, 0xfc, 0xf3, 0xf4, 0xfb, 0xf6, 0xf4, 0xf2, 0xee, 0xea, + 0xeb, 0xe5, 0xda, 0xd1, 0xd2, 0xce, 0xc6, 0xcb, 0xd1, 0xc9, 0xc4, 0xcf, + 0xd0, 0xd6, 0xe4, 0xec, 0xea, 0xf0, 0xf1, 0xeb, 0xeb, 0xea, 0xeb, 0xeb, + 0xe9, 0xf2, 0xf6, 0xf9, 0xfb, 0x02, 0x0d, 0x0a, 0x0f, 0x19, 0x1e, 0x1f, + 0x24, 0x29, 0x2b, 0x29, 0x26, 0x27, 0x29, 0x27, 0x21, 0x1d, 0x21, 0x20, + 0x22, 0x26, 0x28, 0x29, 0x2a, 0x2c, 0x2f, 0x31, 0x33, 0x37, 0x36, 0x36, + 0x37, 0x3a, 0x37, 0x39, 0x3b, 0x3c, 0x38, 0x3e, 0x42, 0x43, 0x42, 0x43, + 0x43, 0x45, 0x49, 0x47, 0x48, 0x4b, 0x4b, 0x4d, 0x4f, 0x4f, 0x50, 0x51, + 0x51, 0x4f, 0x51, 0x53, 0x57, 0x57, 0x57, 0x56, 0x54, 0x50, 0x52, 0x52, + 0x52, 0x53, 0x51, 0x53, 0x54, 0x52, 0x53, 0x55, 0x52, 0x50, 0x56, 0x56, + 0x53, 0x55, 0x51, 0x52, 0x52, 0x52, 0x50, 0x50, 0x4e, 0x4d, 0x4d, 0x4c, + 0x4a, 0x49, 0x48, 0x46, 0x45, 0x42, 0x3e, 0x43, 0x49, 0x4c, 0x47, 0x47, + 0x4a, 0x4b, 0x4c, 0x4e, 0x4d, 0x4d, 0x4c, 0x4d, 0x4b, 0x4e, 0x4d, 0x4c, + 0x50, 0x52, 0x52, 0x52, 0x52, 0x52, 0x51, 0x51, 0x50, 0x52, 0x52, 0x51, + 0x4f, 0x51, 0x52, 0x50, 0x50, 0x51, 0x52, 0x52, 0x53, 0x53, 0x52, 0x51, + 0x52, 0x51, 0x50, 0x4f, 0x4c, 0x4b, 0x4c, 0x46, 0xb1, 0xac, 0xa3, 0xae, + 0xa8, 0xa1, 0xa1, 0xa1, 0xa4, 0xa7, 0xa4, 0xa1, 0xa0, 0xa3, 0xae, 0xbf, + 0xc7, 0xb1, 0xa5, 0xa3, 0xa7, 0xf1, 0x1d, 0x0e, 0x09, 0x06, 0x09, 0x0c, + 0x07, 0x07, 0x05, 0x05, 0x0a, 0x0a, 0x05, 0x04, 0x04, 0x03, 0xfd, 0xfb, + 0xf5, 0xee, 0xf0, 0xf2, 0xed, 0xef, 0xec, 0xe7, 0xe6, 0xe4, 0xdc, 0xd3, + 0xc6, 0xc6, 0xbc, 0xbf, 0xc4, 0xc9, 0xc5, 0xcd, 0xce, 0xd6, 0xe3, 0xe9, + 0xe8, 0xec, 0xea, 0xe9, 0xeb, 0xe7, 0xe6, 0xe7, 0xe9, 0xf2, 0xf7, 0xfc, + 0xfa, 0x03, 0x09, 0x07, 0x0e, 0x17, 0x1d, 0x1f, 0x20, 0x24, 0x27, 0x22, + 0x21, 0x22, 0x22, 0x1e, 0x17, 0x15, 0x16, 0x14, 0x1b, 0x25, 0x23, 0x22, + 0x27, 0x2b, 0x2d, 0x2e, 0x2d, 0x35, 0x36, 0x32, 0x34, 0x3a, 0x37, 0x37, + 0x3b, 0x39, 0x37, 0x3d, 0x42, 0x42, 0x40, 0x41, 0x42, 0x43, 0x47, 0x47, + 0x47, 0x47, 0x48, 0x4b, 0x4f, 0x4d, 0x4d, 0x4f, 0x50, 0x4e, 0x50, 0x52, + 0x55, 0x56, 0x56, 0x55, 0x54, 0x4e, 0x51, 0x52, 0x52, 0x52, 0x51, 0x53, + 0x54, 0x52, 0x53, 0x56, 0x51, 0x4e, 0x55, 0x55, 0x52, 0x53, 0x4f, 0x52, + 0x50, 0x50, 0x4d, 0x4a, 0x49, 0x45, 0x43, 0x47, 0x46, 0x47, 0x48, 0x45, + 0x43, 0x40, 0x3d, 0x42, 0x47, 0x49, 0x43, 0x44, 0x48, 0x4a, 0x4b, 0x4c, + 0x4b, 0x47, 0x49, 0x4d, 0x4b, 0x4b, 0x4c, 0x49, 0x4d, 0x4e, 0x4e, 0x4e, + 0x4f, 0x4e, 0x4d, 0x4d, 0x50, 0x51, 0x4e, 0x4b, 0x4d, 0x50, 0x50, 0x4e, + 0x4f, 0x50, 0x52, 0x52, 0x54, 0x53, 0x51, 0x51, 0x52, 0x52, 0x52, 0x52, + 0x4f, 0x4d, 0x4d, 0x4b, 0xb6, 0xb0, 0xa1, 0xa5, 0xa7, 0x9f, 0x9c, 0x9d, + 0xa7, 0xa7, 0xa6, 0xa4, 0xa9, 0xa1, 0xab, 0xba, 0xb6, 0xab, 0xa6, 0xa6, + 0xa2, 0xeb, 0x23, 0x11, 0x0f, 0x0e, 0x0c, 0x0c, 0x07, 0x07, 0x05, 0x05, + 0x07, 0x08, 0x04, 0x05, 0x00, 0x03, 0xfd, 0xf5, 0xf4, 0xef, 0xe9, 0xe6, + 0xeb, 0xed, 0xec, 0xe5, 0xe4, 0xe2, 0xd6, 0xc8, 0xb7, 0xb8, 0xb1, 0xbc, + 0xc0, 0xc6, 0xc6, 0xcb, 0xd3, 0xd8, 0xe3, 0xe7, 0xe2, 0xe5, 0xe3, 0xe3, + 0xe3, 0xe2, 0xe2, 0xe1, 0xe5, 0xec, 0xf5, 0xf7, 0xf7, 0x02, 0x07, 0x04, + 0x0d, 0x16, 0x1d, 0x1f, 0x20, 0x22, 0x24, 0x20, 0x1b, 0x1f, 0x19, 0x13, + 0x0d, 0x0c, 0x0c, 0x07, 0x0d, 0x1d, 0x20, 0x1d, 0x22, 0x29, 0x29, 0x2c, + 0x2b, 0x31, 0x35, 0x2f, 0x30, 0x38, 0x36, 0x36, 0x3a, 0x37, 0x37, 0x3a, + 0x41, 0x43, 0x40, 0x3f, 0x40, 0x42, 0x3f, 0x44, 0x45, 0x46, 0x47, 0x48, + 0x4c, 0x4b, 0x49, 0x4c, 0x4f, 0x4f, 0x4e, 0x4e, 0x52, 0x53, 0x55, 0x55, + 0x52, 0x52, 0x52, 0x55, 0x53, 0x53, 0x51, 0x54, 0x52, 0x52, 0x55, 0x54, + 0x4f, 0x4e, 0x53, 0x54, 0x52, 0x52, 0x4f, 0x4f, 0x4e, 0x4f, 0x47, 0x42, + 0x45, 0x3d, 0x3e, 0x40, 0x42, 0x45, 0x47, 0x46, 0x41, 0x3b, 0x3c, 0x41, + 0x47, 0x47, 0x42, 0x41, 0x46, 0x47, 0x49, 0x48, 0x48, 0x44, 0x45, 0x48, + 0x49, 0x4a, 0x4b, 0x49, 0x49, 0x4c, 0x4e, 0x4c, 0x4d, 0x4c, 0x4b, 0x4a, + 0x4d, 0x4f, 0x4d, 0x4c, 0x4c, 0x4e, 0x4f, 0x4d, 0x4e, 0x4e, 0x52, 0x51, + 0x51, 0x52, 0x50, 0x51, 0x52, 0x50, 0x52, 0x51, 0x50, 0x4e, 0x4b, 0x48, + 0xab, 0xb1, 0xae, 0xa7, 0xa3, 0xa3, 0x9d, 0x99, 0xa1, 0xa8, 0xa8, 0xac, + 0xb2, 0xab, 0xae, 0xb1, 0xab, 0xa6, 0xa0, 0xa6, 0xa9, 0xed, 0x11, 0x0d, + 0x19, 0x17, 0x12, 0x0d, 0x0b, 0x07, 0x07, 0x05, 0x04, 0x03, 0x00, 0x05, + 0xfe, 0x02, 0xfc, 0xf0, 0xf1, 0xed, 0xe4, 0xe0, 0xe4, 0xe7, 0xe9, 0xe6, + 0xde, 0xda, 0xcf, 0xc5, 0xb9, 0xb9, 0xb4, 0xb7, 0xbd, 0xc1, 0xc3, 0xcc, + 0xd1, 0xd5, 0xdc, 0xe1, 0xdc, 0xe2, 0xdf, 0xde, 0xe1, 0xde, 0xdd, 0xdf, + 0xe2, 0xe7, 0xf1, 0xf2, 0xf1, 0xfb, 0x02, 0x03, 0x0b, 0x13, 0x1b, 0x1c, + 0x1d, 0x1d, 0x1e, 0x1d, 0x12, 0x19, 0x16, 0x0b, 0x04, 0xfc, 0x01, 0xff, + 0x02, 0x10, 0x1b, 0x1a, 0x1d, 0x23, 0x23, 0x2a, 0x2a, 0x2d, 0x31, 0x30, + 0x2d, 0x34, 0x36, 0x32, 0x34, 0x37, 0x37, 0x38, 0x3c, 0x42, 0x42, 0x3f, + 0x40, 0x3f, 0x3b, 0x40, 0x42, 0x42, 0x44, 0x48, 0x48, 0x4a, 0x49, 0x4a, + 0x4d, 0x4d, 0x4d, 0x4e, 0x52, 0x52, 0x53, 0x55, 0x51, 0x52, 0x52, 0x54, + 0x53, 0x53, 0x52, 0x52, 0x52, 0x52, 0x54, 0x54, 0x4f, 0x4f, 0x54, 0x54, + 0x50, 0x50, 0x50, 0x4c, 0x4c, 0x47, 0x3d, 0x37, 0x3c, 0x36, 0x39, 0x3e, + 0x40, 0x43, 0x45, 0x45, 0x3e, 0x38, 0x39, 0x40, 0x45, 0x44, 0x42, 0x41, + 0x43, 0x45, 0x44, 0x47, 0x46, 0x42, 0x41, 0x42, 0x44, 0x48, 0x48, 0x47, + 0x47, 0x48, 0x4a, 0x4c, 0x4d, 0x4a, 0x4a, 0x4b, 0x4d, 0x4d, 0x4a, 0x4c, + 0x4c, 0x4c, 0x4b, 0x4a, 0x4c, 0x4d, 0x4f, 0x4f, 0x4d, 0x4e, 0x4f, 0x51, + 0x4e, 0x4d, 0x50, 0x4f, 0x50, 0x4e, 0x4b, 0x49, 0xa2, 0xa6, 0xaf, 0xb6, + 0xb2, 0xa9, 0xb0, 0x9a, 0x9c, 0xa4, 0xaa, 0xb6, 0xb8, 0xc0, 0xba, 0xa7, + 0xa5, 0xa7, 0xb1, 0xbc, 0xb6, 0xd2, 0x10, 0x21, 0x1f, 0x1d, 0x17, 0x11, + 0x0c, 0x06, 0x06, 0x06, 0x02, 0xff, 0xfb, 0xfe, 0xfb, 0xfa, 0xf7, 0xee, + 0xea, 0xe2, 0xde, 0xda, 0xdd, 0xdd, 0xe2, 0xe2, 0xdc, 0xd5, 0xcc, 0xc7, + 0xc7, 0xbd, 0xbf, 0xb7, 0xb5, 0xbd, 0xbf, 0xca, 0xd1, 0xd1, 0xd3, 0xd6, + 0xda, 0xdc, 0xd9, 0xda, 0xdc, 0xd9, 0xd4, 0xda, 0xdd, 0xe6, 0xef, 0xf1, + 0xee, 0xfc, 0x01, 0x03, 0x0d, 0x15, 0x19, 0x19, 0x1a, 0x16, 0x16, 0x13, + 0x0f, 0x0f, 0x15, 0x0b, 0x02, 0xf8, 0xf8, 0x00, 0x02, 0x08, 0x14, 0x1a, + 0x1a, 0x1d, 0x22, 0x27, 0x24, 0x26, 0x25, 0x2a, 0x27, 0x2d, 0x32, 0x30, + 0x2e, 0x34, 0x37, 0x36, 0x3a, 0x40, 0x42, 0x3d, 0x3d, 0x3e, 0x3d, 0x3e, + 0x3f, 0x40, 0x3d, 0x43, 0x46, 0x47, 0x48, 0x47, 0x4a, 0x4c, 0x4d, 0x4d, + 0x4e, 0x50, 0x51, 0x55, 0x51, 0x51, 0x53, 0x53, 0x52, 0x52, 0x53, 0x53, + 0x52, 0x53, 0x53, 0x54, 0x4e, 0x51, 0x54, 0x52, 0x4e, 0x4d, 0x4d, 0x48, + 0x45, 0x3f, 0x37, 0x37, 0x36, 0x31, 0x33, 0x38, 0x3d, 0x42, 0x43, 0x42, + 0x3b, 0x36, 0x38, 0x3d, 0x41, 0x43, 0x42, 0x3e, 0x42, 0x42, 0x41, 0x45, + 0x45, 0x40, 0x3c, 0x3d, 0x42, 0x47, 0x46, 0x44, 0x45, 0x45, 0x47, 0x48, + 0x48, 0x47, 0x47, 0x47, 0x47, 0x47, 0x49, 0x4d, 0x4d, 0x4d, 0x4c, 0x49, + 0x4c, 0x4c, 0x4d, 0x4c, 0x4d, 0x4d, 0x4e, 0x4f, 0x4d, 0x4c, 0x4d, 0x4d, + 0x4e, 0x4a, 0x48, 0x4a, 0xa8, 0x9c, 0xad, 0xd0, 0xd4, 0xb8, 0xab, 0x9e, + 0xa0, 0xa9, 0xad, 0xbc, 0xb8, 0xb5, 0xac, 0xa1, 0xa4, 0xae, 0xb1, 0xc2, + 0xc0, 0x07, 0x2d, 0x23, 0x1d, 0x1d, 0x19, 0x12, 0x0d, 0x09, 0x06, 0x06, + 0x02, 0xfd, 0xf8, 0xf0, 0xf4, 0xf7, 0xf3, 0xec, 0xe2, 0xdd, 0xd4, 0xd2, + 0xd4, 0xd7, 0xdc, 0xdc, 0xdb, 0xd4, 0xcc, 0xc5, 0xc2, 0xbb, 0xb1, 0xab, + 0xb2, 0xbe, 0xc2, 0xca, 0xcc, 0xd0, 0xcc, 0xce, 0xd5, 0xd7, 0xd4, 0xd2, + 0xd7, 0xd4, 0xd2, 0xd8, 0xdd, 0xe4, 0xee, 0xec, 0xe9, 0xf4, 0xfc, 0x04, + 0x0e, 0x13, 0x17, 0x17, 0x16, 0x13, 0x12, 0x11, 0x0c, 0x09, 0x0e, 0x07, + 0x02, 0xfe, 0xfb, 0x00, 0x06, 0x0a, 0x11, 0x1a, 0x1d, 0x19, 0x1d, 0x21, + 0x1f, 0x22, 0x21, 0x26, 0x25, 0x26, 0x2e, 0x32, 0x2d, 0x31, 0x34, 0x33, + 0x37, 0x3d, 0x41, 0x3c, 0x3c, 0x3d, 0x3d, 0x3c, 0x3d, 0x3d, 0x3b, 0x3f, + 0x44, 0x45, 0x47, 0x46, 0x47, 0x49, 0x4c, 0x4b, 0x4d, 0x50, 0x51, 0x53, + 0x52, 0x4f, 0x53, 0x53, 0x52, 0x54, 0x55, 0x53, 0x54, 0x54, 0x52, 0x52, + 0x50, 0x51, 0x52, 0x4f, 0x4d, 0x4b, 0x4c, 0x46, 0x3f, 0x3a, 0x38, 0x37, + 0x33, 0x32, 0x30, 0x35, 0x3d, 0x42, 0x41, 0x41, 0x38, 0x32, 0x37, 0x39, + 0x38, 0x3e, 0x3e, 0x3d, 0x3d, 0x3d, 0x3d, 0x3f, 0x41, 0x38, 0x39, 0x3d, + 0x3f, 0x44, 0x46, 0x42, 0x41, 0x42, 0x43, 0x44, 0x46, 0x46, 0x44, 0x43, + 0x44, 0x46, 0x47, 0x49, 0x49, 0x49, 0x48, 0x47, 0x49, 0x48, 0x4a, 0x4a, + 0x4a, 0x4a, 0x4d, 0x4d, 0x4d, 0x4d, 0x4e, 0x4c, 0x4c, 0x4d, 0x49, 0x47, + 0xbf, 0xa7, 0xa4, 0xaf, 0xb7, 0xbb, 0xac, 0xa8, 0xa7, 0xaa, 0xb1, 0xba, + 0xb3, 0xac, 0xa3, 0xa3, 0xa7, 0xab, 0xb1, 0xbb, 0xc8, 0x21, 0x2b, 0x22, + 0x18, 0x14, 0x11, 0x0f, 0x09, 0x06, 0x02, 0x07, 0x03, 0x01, 0xf3, 0xe3, + 0xe9, 0xee, 0xec, 0xe4, 0xd6, 0xd5, 0xd4, 0xcc, 0xc9, 0xd6, 0xd6, 0xd8, + 0xd6, 0xd2, 0xc3, 0xc0, 0xc1, 0xbc, 0xb4, 0xb2, 0xb6, 0xc0, 0xc2, 0xc5, + 0xc7, 0xc7, 0xc4, 0xc7, 0xcf, 0xd0, 0xcb, 0xd2, 0xd3, 0xd1, 0xd0, 0xd2, + 0xdb, 0xe8, 0xec, 0xe9, 0xec, 0xf5, 0xf9, 0x01, 0x0f, 0x13, 0x12, 0x12, + 0x0c, 0x0c, 0x0d, 0x0c, 0x07, 0x07, 0x07, 0x02, 0x03, 0x01, 0x01, 0x02, + 0x06, 0x0a, 0x11, 0x1b, 0x1b, 0x17, 0x16, 0x19, 0x1e, 0x20, 0x1f, 0x22, + 0x25, 0x28, 0x2b, 0x31, 0x2d, 0x30, 0x34, 0x33, 0x34, 0x36, 0x3a, 0x3a, + 0x39, 0x3c, 0x41, 0x3c, 0x3c, 0x3d, 0x39, 0x37, 0x3d, 0x41, 0x45, 0x44, + 0x43, 0x48, 0x48, 0x49, 0x48, 0x4c, 0x4f, 0x52, 0x52, 0x50, 0x52, 0x52, + 0x52, 0x54, 0x54, 0x52, 0x51, 0x52, 0x51, 0x51, 0x50, 0x50, 0x51, 0x4f, + 0x4c, 0x47, 0x48, 0x42, 0x3c, 0x37, 0x37, 0x34, 0x32, 0x31, 0x32, 0x37, + 0x3d, 0x3f, 0x3d, 0x3e, 0x33, 0x2d, 0x33, 0x36, 0x33, 0x37, 0x38, 0x37, + 0x3a, 0x39, 0x3b, 0x3a, 0x3e, 0x3c, 0x37, 0x39, 0x3c, 0x3f, 0x40, 0x3e, + 0x3d, 0x3e, 0x3f, 0x40, 0x40, 0x41, 0x42, 0x40, 0x41, 0x42, 0x44, 0x45, + 0x46, 0x43, 0x43, 0x45, 0x46, 0x46, 0x47, 0x48, 0x49, 0x48, 0x4b, 0x4b, + 0x49, 0x4a, 0x4b, 0x4a, 0x4a, 0x4b, 0x47, 0x47, 0xaa, 0xbe, 0xa7, 0xa3, + 0xad, 0xb4, 0xb2, 0xa1, 0xa0, 0xa2, 0xae, 0xb7, 0xaf, 0xac, 0xa4, 0xa7, + 0xa3, 0xa4, 0xaa, 0xba, 0xc6, 0x16, 0x24, 0x19, 0x10, 0x02, 0xf1, 0x04, + 0x05, 0x01, 0xfc, 0x05, 0x04, 0xfe, 0xf0, 0xde, 0xd5, 0xdb, 0xe7, 0xe2, + 0xce, 0xc6, 0xd2, 0xcf, 0xcc, 0xcc, 0xca, 0xcf, 0xcd, 0xca, 0xbf, 0xbc, + 0xb0, 0xae, 0xac, 0xb7, 0xba, 0xb8, 0xbe, 0xbc, 0xc5, 0xbd, 0xb9, 0xc4, + 0xcc, 0xc5, 0xc6, 0xcd, 0xca, 0xcd, 0xcc, 0xce, 0xdc, 0xe2, 0xe2, 0xe4, + 0xe9, 0xf0, 0xf6, 0x03, 0x11, 0x10, 0x12, 0x0f, 0x07, 0x07, 0x09, 0x04, + 0xf8, 0x00, 0x02, 0xfd, 0x02, 0xfc, 0xff, 0x02, 0x02, 0x06, 0x0c, 0x16, + 0x17, 0x13, 0x13, 0x17, 0x1a, 0x1f, 0x1d, 0x20, 0x26, 0x28, 0x2b, 0x29, + 0x2b, 0x2d, 0x32, 0x36, 0x36, 0x32, 0x35, 0x37, 0x3a, 0x38, 0x3e, 0x3d, + 0x3a, 0x3d, 0x39, 0x34, 0x37, 0x3e, 0x43, 0x42, 0x42, 0x45, 0x47, 0x46, + 0x45, 0x4a, 0x4e, 0x4e, 0x51, 0x51, 0x51, 0x52, 0x51, 0x53, 0x54, 0x52, + 0x4f, 0x51, 0x50, 0x4d, 0x4e, 0x50, 0x4f, 0x4d, 0x49, 0x47, 0x44, 0x3d, + 0x37, 0x33, 0x34, 0x37, 0x36, 0x33, 0x36, 0x38, 0x3b, 0x3a, 0x39, 0x37, + 0x29, 0x27, 0x2d, 0x32, 0x2f, 0x32, 0x30, 0x32, 0x35, 0x34, 0x37, 0x37, + 0x38, 0x38, 0x37, 0x36, 0x37, 0x38, 0x38, 0x3a, 0x3a, 0x3a, 0x3a, 0x3c, + 0x3d, 0x3c, 0x3c, 0x3d, 0x3d, 0x3d, 0x3e, 0x42, 0x42, 0x43, 0x43, 0x45, + 0x45, 0x45, 0x47, 0x47, 0x47, 0x46, 0x47, 0x47, 0x47, 0x49, 0x4b, 0x48, + 0x47, 0x47, 0x47, 0x47, 0xa1, 0xb1, 0xac, 0xa6, 0xab, 0xab, 0xbe, 0xb0, + 0xae, 0xb1, 0xb4, 0xac, 0xad, 0xac, 0xa9, 0xac, 0xaa, 0xa6, 0xad, 0xbc, + 0xc2, 0x0a, 0x17, 0x10, 0x0d, 0xeb, 0xdc, 0x04, 0x00, 0xf9, 0xf7, 0xff, + 0xf7, 0xe7, 0xe9, 0xd7, 0xc6, 0xc8, 0xd4, 0xdb, 0xd5, 0xc6, 0xc7, 0xc4, + 0xc9, 0xbf, 0xc0, 0xc9, 0xcc, 0xc9, 0xbc, 0xb6, 0xa9, 0xa7, 0xa9, 0xbc, + 0xbc, 0xb7, 0xbb, 0xc1, 0xc1, 0xba, 0xb2, 0xc6, 0xc0, 0xbc, 0xc8, 0xcb, + 0xc6, 0xcc, 0xcc, 0xcc, 0xd8, 0xdc, 0xdc, 0xe6, 0xe7, 0xec, 0xf2, 0xfd, + 0x0c, 0x0c, 0x0d, 0x0d, 0x04, 0x03, 0x05, 0x03, 0xf8, 0xfb, 0xf9, 0xf7, + 0xfc, 0xf8, 0xf8, 0x01, 0x01, 0x04, 0x09, 0x0e, 0x16, 0x12, 0x12, 0x17, + 0x18, 0x1d, 0x1d, 0x1e, 0x23, 0x26, 0x25, 0x24, 0x26, 0x28, 0x2f, 0x34, + 0x37, 0x32, 0x32, 0x32, 0x35, 0x36, 0x39, 0x3d, 0x36, 0x37, 0x3b, 0x37, + 0x38, 0x3a, 0x41, 0x41, 0x41, 0x42, 0x45, 0x44, 0x42, 0x48, 0x4d, 0x4e, + 0x4f, 0x4f, 0x50, 0x50, 0x50, 0x54, 0x54, 0x4f, 0x4d, 0x50, 0x4f, 0x4d, + 0x4e, 0x4f, 0x4c, 0x47, 0x45, 0x43, 0x3f, 0x3c, 0x3b, 0x39, 0x3a, 0x3d, + 0x3c, 0x3a, 0x39, 0x38, 0x37, 0x36, 0x37, 0x30, 0x23, 0x20, 0x27, 0x2e, + 0x29, 0x2b, 0x2a, 0x2d, 0x31, 0x31, 0x32, 0x36, 0x36, 0x36, 0x36, 0x33, + 0x33, 0x33, 0x34, 0x36, 0x37, 0x38, 0x38, 0x37, 0x3a, 0x39, 0x38, 0x38, + 0x39, 0x3c, 0x3d, 0x3d, 0x3f, 0x41, 0x41, 0x42, 0x42, 0x42, 0x43, 0x43, + 0x43, 0x43, 0x44, 0x47, 0x45, 0x47, 0x48, 0x47, 0x45, 0x46, 0x47, 0x44, + 0xa1, 0xa9, 0xac, 0xab, 0xa5, 0xbb, 0xc9, 0xb1, 0xad, 0xb3, 0xb4, 0xb0, + 0xb6, 0xb4, 0xac, 0xb1, 0xb0, 0xa9, 0xb1, 0xb9, 0xc1, 0x08, 0xe5, 0x03, + 0x08, 0xd2, 0xe3, 0x07, 0xff, 0xf7, 0xef, 0xf1, 0xe4, 0xcd, 0xe5, 0xd2, + 0xbc, 0xc2, 0xca, 0xca, 0xbd, 0xcc, 0xc2, 0xba, 0xbe, 0xb9, 0xba, 0xc4, + 0xc8, 0xc8, 0xb9, 0xb7, 0xad, 0xa4, 0xa5, 0xbf, 0xb9, 0xb7, 0xc0, 0xc4, + 0xb9, 0xbc, 0xb7, 0xc7, 0xb7, 0xbc, 0xc2, 0xc5, 0xc4, 0xc8, 0xc6, 0xcd, + 0xd4, 0xd9, 0xda, 0xe1, 0xe0, 0xe7, 0xf0, 0x00, 0x0c, 0x0a, 0x0a, 0x08, + 0x01, 0xff, 0x02, 0x01, 0xfe, 0xf8, 0xf7, 0xf2, 0xf6, 0xf7, 0xf2, 0xf9, + 0xf9, 0x03, 0x06, 0x07, 0x0d, 0x10, 0x12, 0x17, 0x18, 0x1b, 0x19, 0x1d, + 0x1b, 0x20, 0x1f, 0x25, 0x26, 0x26, 0x2d, 0x31, 0x36, 0x32, 0x2e, 0x32, + 0x35, 0x36, 0x36, 0x3c, 0x37, 0x36, 0x38, 0x39, 0x37, 0x37, 0x3f, 0x41, + 0x41, 0x41, 0x42, 0x42, 0x42, 0x45, 0x4a, 0x4c, 0x4e, 0x50, 0x50, 0x4f, + 0x50, 0x52, 0x53, 0x50, 0x4f, 0x50, 0x4d, 0x49, 0x4e, 0x4d, 0x48, 0x42, + 0x42, 0x42, 0x40, 0x41, 0x43, 0x43, 0x43, 0x42, 0x42, 0x3e, 0x3d, 0x3c, + 0x38, 0x35, 0x32, 0x2d, 0x27, 0x1f, 0x21, 0x2a, 0x27, 0x24, 0x27, 0x27, + 0x27, 0x2f, 0x2e, 0x32, 0x32, 0x31, 0x35, 0x36, 0x32, 0x2e, 0x2e, 0x32, + 0x32, 0x33, 0x34, 0x37, 0x37, 0x37, 0x36, 0x34, 0x34, 0x37, 0x3a, 0x39, + 0x3b, 0x3d, 0x3c, 0x3f, 0x3f, 0x3d, 0x3d, 0x40, 0x40, 0x3e, 0x3e, 0x3f, + 0x3b, 0x42, 0x44, 0x43, 0x42, 0x44, 0x44, 0x43, 0xa2, 0xa7, 0xb1, 0xab, + 0xb1, 0xbc, 0xb3, 0xac, 0xac, 0xb4, 0xb1, 0xb2, 0xb7, 0xb1, 0xad, 0xb0, + 0xbd, 0xac, 0xad, 0xb7, 0xc7, 0xfc, 0xd1, 0x07, 0xf8, 0xc3, 0xe9, 0x05, + 0xfb, 0xf0, 0xec, 0xe8, 0xcd, 0xbb, 0xd3, 0xca, 0xbb, 0xbb, 0xc2, 0xbf, + 0xac, 0xae, 0xc1, 0xc3, 0xbb, 0xae, 0xb5, 0xbf, 0xc3, 0xc3, 0xb6, 0xb6, + 0xb0, 0xa7, 0xaa, 0xb7, 0xb3, 0xaf, 0xbf, 0xc3, 0xb0, 0xb3, 0xc1, 0xba, + 0xb0, 0xb6, 0xba, 0xbc, 0xc0, 0xc5, 0xc9, 0xcd, 0xd0, 0xd5, 0xd7, 0xd7, + 0xdc, 0xe3, 0xee, 0x05, 0x0a, 0x06, 0x04, 0x06, 0x02, 0xff, 0x00, 0xfd, + 0xfe, 0xf8, 0xf7, 0xf4, 0xf4, 0xf5, 0xf0, 0xf4, 0xf8, 0xfd, 0x03, 0x04, + 0x07, 0x07, 0x09, 0x11, 0x17, 0x17, 0x14, 0x1b, 0x1b, 0x1d, 0x1d, 0x20, + 0x28, 0x28, 0x2d, 0x2f, 0x33, 0x35, 0x30, 0x32, 0x34, 0x39, 0x37, 0x3c, + 0x3b, 0x37, 0x39, 0x3e, 0x3a, 0x38, 0x39, 0x3d, 0x42, 0x40, 0x42, 0x43, + 0x3e, 0x42, 0x47, 0x4a, 0x4d, 0x51, 0x4f, 0x4e, 0x50, 0x50, 0x51, 0x50, + 0x4f, 0x4f, 0x4d, 0x4b, 0x4d, 0x4d, 0x47, 0x41, 0x43, 0x45, 0x43, 0x44, + 0x46, 0x44, 0x45, 0x42, 0x41, 0x40, 0x3e, 0x3b, 0x3b, 0x3a, 0x34, 0x2d, + 0x28, 0x25, 0x22, 0x23, 0x21, 0x1b, 0x21, 0x22, 0x1f, 0x27, 0x2b, 0x2c, + 0x2b, 0x30, 0x2f, 0x30, 0x2d, 0x2c, 0x2b, 0x2d, 0x2c, 0x2b, 0x2c, 0x30, + 0x33, 0x34, 0x32, 0x34, 0x32, 0x33, 0x36, 0x37, 0x39, 0x38, 0x38, 0x3a, + 0x3c, 0x3c, 0x3b, 0x3b, 0x3b, 0x3a, 0x37, 0x29, 0x23, 0x37, 0x3d, 0x40, + 0x42, 0x43, 0x42, 0x43, 0xa7, 0xb1, 0xb2, 0xac, 0xb4, 0xb8, 0xb9, 0xb7, + 0xaa, 0xb7, 0xb7, 0xb1, 0xb2, 0xab, 0xa9, 0xb1, 0xb4, 0xac, 0xb0, 0xb8, + 0xd2, 0xe3, 0xca, 0x0c, 0xe8, 0xc0, 0xec, 0xf4, 0xeb, 0xd9, 0xe5, 0xdd, + 0xbf, 0xbc, 0xbf, 0xc0, 0xb3, 0xac, 0xb6, 0xb1, 0xac, 0xa2, 0xb1, 0xc2, + 0xbb, 0xaf, 0xba, 0xbc, 0xbc, 0xb0, 0xaa, 0xb1, 0xb1, 0xa7, 0xa7, 0xaa, + 0xac, 0xb0, 0xc3, 0xc1, 0xae, 0xb3, 0xb8, 0xb5, 0xaf, 0xaf, 0xb5, 0xb7, + 0xbb, 0xc4, 0xc6, 0xc5, 0xca, 0xd4, 0xd8, 0xd5, 0xd5, 0xdc, 0xf1, 0x08, + 0x09, 0x03, 0xfe, 0x03, 0x02, 0x01, 0xfe, 0xfd, 0xfa, 0xf8, 0xf4, 0xf4, + 0xf6, 0xf5, 0xf1, 0xf0, 0xf4, 0xff, 0xfe, 0x00, 0x03, 0x07, 0x09, 0x0d, + 0x16, 0x16, 0x16, 0x18, 0x18, 0x1d, 0x1c, 0x1f, 0x27, 0x27, 0x2c, 0x2f, + 0x32, 0x36, 0x37, 0x34, 0x34, 0x37, 0x37, 0x38, 0x3d, 0x3b, 0x3c, 0x41, + 0x3e, 0x3d, 0x3b, 0x3b, 0x40, 0x41, 0x41, 0x42, 0x3d, 0x40, 0x45, 0x49, + 0x4c, 0x51, 0x51, 0x4d, 0x50, 0x4f, 0x4f, 0x4f, 0x4e, 0x4f, 0x50, 0x4e, + 0x4c, 0x4b, 0x47, 0x45, 0x47, 0x47, 0x42, 0x40, 0x42, 0x3f, 0x3b, 0x3a, + 0x3a, 0x39, 0x37, 0x36, 0x35, 0x35, 0x35, 0x31, 0x2b, 0x26, 0x21, 0x1c, + 0x17, 0x13, 0x18, 0x18, 0x18, 0x1c, 0x27, 0x29, 0x2a, 0x2d, 0x2b, 0x2d, + 0x2c, 0x2a, 0x28, 0x24, 0x27, 0x25, 0x27, 0x2d, 0x32, 0x2e, 0x2d, 0x2e, + 0x2d, 0x2d, 0x2f, 0x32, 0x35, 0x37, 0x35, 0x35, 0x37, 0x39, 0x39, 0x38, + 0x37, 0x37, 0x33, 0x27, 0x27, 0x36, 0x3a, 0x3b, 0x3d, 0x3e, 0x3d, 0x41, + 0xab, 0xb6, 0xab, 0xb1, 0xb1, 0xb5, 0xc1, 0xd0, 0xbc, 0xbb, 0xb3, 0xb1, + 0xb8, 0xa7, 0xbb, 0xb6, 0xb1, 0xb2, 0xba, 0xbc, 0xd8, 0xcc, 0xd8, 0x0c, + 0xd7, 0xc5, 0xe7, 0xe0, 0xd2, 0xcd, 0xdd, 0xd4, 0xb7, 0xc7, 0xb4, 0xb7, + 0xac, 0xa6, 0xae, 0xab, 0xa1, 0xa1, 0xb5, 0xb5, 0xa8, 0xab, 0xaf, 0xb6, + 0xb7, 0xac, 0xa7, 0xae, 0xac, 0xa7, 0xa5, 0xa6, 0xaa, 0xb3, 0xbd, 0xbe, + 0xb1, 0xad, 0xb3, 0xb4, 0xac, 0xac, 0xaf, 0xb7, 0xbf, 0xc0, 0xbb, 0xc2, + 0xca, 0xd0, 0xd2, 0xcf, 0xd5, 0xe1, 0xf9, 0x07, 0x06, 0x02, 0xf8, 0xfd, + 0xff, 0xfc, 0xfc, 0xfb, 0xf4, 0xf7, 0xf6, 0xf6, 0xf7, 0xf4, 0xf4, 0xf4, + 0xf7, 0xff, 0xfb, 0xfc, 0x00, 0x02, 0x07, 0x09, 0x10, 0x0b, 0xfc, 0x12, + 0x15, 0x17, 0x1f, 0x21, 0x27, 0x27, 0x2b, 0x2d, 0x31, 0x34, 0x38, 0x35, + 0x37, 0x38, 0x39, 0x3b, 0x41, 0x3b, 0x3d, 0x42, 0x40, 0x3d, 0x3d, 0x3d, + 0x3e, 0x40, 0x40, 0x41, 0x3a, 0x3d, 0x44, 0x45, 0x48, 0x4e, 0x4f, 0x4d, + 0x4e, 0x4e, 0x4e, 0x4e, 0x4d, 0x4d, 0x51, 0x50, 0x4d, 0x4d, 0x49, 0x46, + 0x42, 0x42, 0x40, 0x3d, 0x37, 0x36, 0x35, 0x37, 0x37, 0x37, 0x35, 0x33, + 0x35, 0x33, 0x30, 0x2f, 0x2d, 0x29, 0x25, 0x21, 0x1c, 0x12, 0x13, 0x16, + 0x13, 0x17, 0x1d, 0x22, 0x25, 0x28, 0x2a, 0x2a, 0x28, 0x25, 0x27, 0x22, + 0x23, 0x25, 0x24, 0x2e, 0x32, 0x2b, 0x25, 0x27, 0x2a, 0x26, 0x29, 0x2d, + 0x2d, 0x2d, 0x30, 0x2e, 0x30, 0x34, 0x34, 0x34, 0x33, 0x34, 0x30, 0x24, + 0x28, 0x33, 0x37, 0x37, 0x3a, 0x3c, 0x3b, 0x3d, 0xa3, 0xa8, 0xaf, 0xb7, + 0xc0, 0xc2, 0xd1, 0xe4, 0xcd, 0xb7, 0xb7, 0xcb, 0xd9, 0xd4, 0xe8, 0xe1, + 0xde, 0xdc, 0xc1, 0xbf, 0xd3, 0xbc, 0xea, 0x02, 0xc7, 0xcf, 0xda, 0xd7, + 0xca, 0xce, 0xd0, 0xcc, 0xb4, 0xc1, 0xb4, 0xb3, 0xac, 0xac, 0xa7, 0x9c, + 0x9e, 0xae, 0xb4, 0xa8, 0xa2, 0xb0, 0xb4, 0xb2, 0xb4, 0xa8, 0xa7, 0xb0, + 0xac, 0xa7, 0xa7, 0xad, 0xac, 0xae, 0xb4, 0xbe, 0xc7, 0xbb, 0xb7, 0xa9, + 0xa8, 0xa8, 0xb2, 0xb8, 0xc2, 0xb8, 0xb5, 0xbf, 0xcb, 0xc8, 0xc7, 0xd2, + 0xde, 0xf0, 0x01, 0x03, 0xfc, 0xf9, 0xf7, 0xfc, 0xfb, 0xf9, 0xfc, 0xf6, + 0xf2, 0xf5, 0xed, 0xee, 0xf2, 0xf2, 0xf5, 0xf8, 0xfc, 0x02, 0x00, 0xfd, + 0x02, 0x02, 0x02, 0x07, 0x0c, 0xe7, 0xdd, 0x0a, 0x13, 0x17, 0x1c, 0x1f, + 0x26, 0x27, 0x2b, 0x2f, 0x32, 0x34, 0x35, 0x35, 0x37, 0x39, 0x39, 0x3d, + 0x3e, 0x3d, 0x3d, 0x42, 0x45, 0x41, 0x3f, 0x40, 0x40, 0x3f, 0x42, 0x43, + 0x3c, 0x3d, 0x42, 0x44, 0x44, 0x4b, 0x4d, 0x4d, 0x4d, 0x4d, 0x4f, 0x4e, + 0x4c, 0x4a, 0x4e, 0x4e, 0x4f, 0x4e, 0x48, 0x41, 0x3e, 0x3f, 0x3d, 0x39, + 0x36, 0x38, 0x39, 0x37, 0x37, 0x37, 0x37, 0x37, 0x35, 0x33, 0x31, 0x2f, + 0x2d, 0x29, 0x24, 0x1e, 0x1d, 0x17, 0x10, 0x13, 0x0d, 0x07, 0x0e, 0x1d, + 0x22, 0x21, 0x22, 0x25, 0x21, 0x21, 0x22, 0x1f, 0x21, 0x26, 0x2d, 0x2c, + 0x26, 0x1f, 0x1b, 0x20, 0x1f, 0x1e, 0x24, 0x28, 0x2b, 0x2c, 0x2d, 0x2a, + 0x2a, 0x2c, 0x2d, 0x2d, 0x2d, 0x2d, 0x2a, 0x1c, 0x1d, 0x2d, 0x34, 0x35, + 0x37, 0x37, 0x37, 0x38, 0xa5, 0xb3, 0xb5, 0xb6, 0xbf, 0xbf, 0xc9, 0xc8, + 0xe8, 0xe6, 0xc8, 0xec, 0xf7, 0xf6, 0xe8, 0x12, 0xf9, 0xd3, 0xbf, 0xc4, + 0xcc, 0xb7, 0xe8, 0xf1, 0xc2, 0xce, 0xc8, 0xcb, 0xc4, 0xc2, 0xc4, 0xc7, + 0xaf, 0xb1, 0xb1, 0xae, 0xac, 0xac, 0xb2, 0xa7, 0xa9, 0xac, 0xa2, 0xa7, + 0xa4, 0xad, 0xb3, 0xb7, 0xb6, 0xa6, 0xa7, 0xb1, 0xb1, 0xa4, 0xa8, 0xb8, + 0xb1, 0xac, 0xab, 0xcb, 0xf2, 0xda, 0xc1, 0xad, 0xac, 0xb2, 0xc4, 0xb2, + 0xb1, 0xb6, 0xb8, 0xbf, 0xcb, 0xc2, 0xce, 0xe2, 0xea, 0xf7, 0x01, 0xfc, + 0xf0, 0xf2, 0xf4, 0xf6, 0xf2, 0xee, 0xf2, 0xed, 0xe6, 0xe8, 0xe9, 0xee, + 0xf0, 0xf2, 0xf3, 0xfb, 0xfd, 0x02, 0x03, 0x01, 0x00, 0x01, 0x03, 0x06, + 0xfc, 0xcc, 0xe7, 0x0a, 0x14, 0x17, 0x1c, 0x1e, 0x27, 0x2b, 0x2b, 0x2f, + 0x33, 0x34, 0x31, 0x33, 0x37, 0x3a, 0x38, 0x3d, 0x40, 0x3e, 0x3e, 0x42, + 0x45, 0x47, 0x43, 0x40, 0x42, 0x40, 0x42, 0x43, 0x3d, 0x3d, 0x3f, 0x41, + 0x41, 0x46, 0x4b, 0x4d, 0x4a, 0x4a, 0x4b, 0x49, 0x4a, 0x4c, 0x4d, 0x4c, + 0x4b, 0x4c, 0x4c, 0x47, 0x45, 0x42, 0x3d, 0x3c, 0x3d, 0x3d, 0x3b, 0x3c, + 0x3c, 0x38, 0x39, 0x39, 0x36, 0x32, 0x2f, 0x2d, 0x2d, 0x29, 0x25, 0x21, + 0x1e, 0x1c, 0x15, 0x10, 0x04, 0xf9, 0xfe, 0x0d, 0x19, 0x1b, 0x18, 0x1f, + 0x1d, 0x1d, 0x18, 0x16, 0x17, 0x1d, 0x22, 0x22, 0x18, 0x13, 0x13, 0x10, + 0x10, 0x15, 0x1d, 0x22, 0x26, 0x28, 0x29, 0x2c, 0x28, 0x27, 0x27, 0x27, + 0x1f, 0x1f, 0x21, 0x0f, 0x15, 0x27, 0x2a, 0x2e, 0x30, 0x32, 0x34, 0x32, + 0xb0, 0xb7, 0xb7, 0xb1, 0xb1, 0xc4, 0xc2, 0xc2, 0xe5, 0xe0, 0xcc, 0x03, + 0xe2, 0xcc, 0xe7, 0xf5, 0xc9, 0xc3, 0xc3, 0xce, 0xc6, 0xc5, 0xfa, 0xe2, + 0xc6, 0xcc, 0xb2, 0xb0, 0xb9, 0xbb, 0xc3, 0xb6, 0xac, 0xa8, 0xb0, 0xaf, + 0xaa, 0xb4, 0xba, 0xc2, 0xb9, 0xa4, 0x9b, 0xa5, 0xad, 0xb5, 0xb7, 0xb9, + 0xb1, 0xa7, 0xad, 0xb1, 0xa7, 0xa4, 0xa4, 0xa7, 0xac, 0xad, 0xa6, 0xa6, + 0xb8, 0xbc, 0xc1, 0xc2, 0xbd, 0xbc, 0xd1, 0xb0, 0xa7, 0xb3, 0xbc, 0xc1, + 0xc3, 0xce, 0xd9, 0xe0, 0xeb, 0xf5, 0xf5, 0xee, 0xec, 0xec, 0xec, 0xed, + 0xe1, 0xdf, 0xe6, 0xe6, 0xdc, 0xdc, 0xe5, 0xee, 0xed, 0xf0, 0xf2, 0xf9, + 0xfb, 0xfc, 0xff, 0xfd, 0xfa, 0xfb, 0x03, 0x07, 0xec, 0xc7, 0xf6, 0x0a, + 0x12, 0x17, 0x1a, 0x1b, 0x21, 0x27, 0x2c, 0x2c, 0x32, 0x32, 0x32, 0x33, + 0x37, 0x35, 0x37, 0x3d, 0x3f, 0x40, 0x3f, 0x42, 0x45, 0x46, 0x44, 0x44, + 0x42, 0x42, 0x41, 0x44, 0x40, 0x3f, 0x3e, 0x3a, 0x3b, 0x3d, 0x43, 0x4b, + 0x47, 0x46, 0x46, 0x46, 0x47, 0x47, 0x46, 0x47, 0x48, 0x47, 0x4b, 0x4a, + 0x4a, 0x47, 0x45, 0x41, 0x3e, 0x3e, 0x39, 0x39, 0x37, 0x33, 0x32, 0x30, + 0x2f, 0x30, 0x2c, 0x2c, 0x29, 0x27, 0x27, 0x24, 0x1f, 0x1c, 0x11, 0x0f, + 0x0a, 0x01, 0xfa, 0xfc, 0x08, 0x10, 0x11, 0x18, 0x1c, 0x19, 0x17, 0x0b, + 0x0b, 0x0e, 0x10, 0x17, 0x11, 0x08, 0x06, 0x03, 0x0c, 0x16, 0x17, 0x1a, + 0x1c, 0x1e, 0x1e, 0x22, 0x24, 0x27, 0x28, 0x27, 0x1d, 0x13, 0x11, 0x0a, + 0x12, 0x22, 0x24, 0x27, 0x27, 0x2c, 0x2d, 0x2e, 0xbf, 0xb3, 0xb7, 0xac, + 0xa1, 0xbc, 0xc2, 0xbc, 0xb8, 0xb6, 0xd5, 0xdb, 0xd2, 0xc6, 0xd3, 0xce, + 0xc7, 0xc6, 0xbb, 0xbd, 0xb9, 0xd7, 0xf7, 0xd9, 0xb7, 0xb7, 0xb4, 0xac, + 0xb8, 0xcc, 0xc0, 0xb6, 0xb4, 0xb2, 0xb2, 0xac, 0xab, 0xac, 0xb3, 0xbd, + 0xb5, 0xaf, 0xa7, 0xa7, 0xa9, 0xb2, 0xbc, 0xb6, 0xa7, 0x9c, 0xab, 0xae, + 0xa1, 0xb1, 0xb4, 0xa5, 0xa0, 0xa8, 0xaa, 0xa8, 0xb5, 0xae, 0xb7, 0xb9, + 0xbb, 0xc5, 0xce, 0xb2, 0xa6, 0xb2, 0xbb, 0xc6, 0xc9, 0xd0, 0xd6, 0xdd, + 0xec, 0xef, 0xeb, 0xe7, 0xe2, 0xe0, 0xdd, 0xd6, 0xce, 0xcd, 0xd4, 0xdc, + 0xd2, 0xcf, 0xdd, 0xe7, 0xe7, 0xea, 0xec, 0xf0, 0xf4, 0xf3, 0xf5, 0xf7, + 0xf9, 0xfc, 0x02, 0xff, 0xd9, 0xce, 0xfe, 0x08, 0x0d, 0x15, 0x16, 0x17, + 0x1d, 0x22, 0x2b, 0x2d, 0x2e, 0x2d, 0x31, 0x34, 0x35, 0x34, 0x34, 0x3a, + 0x3d, 0x42, 0x41, 0x41, 0x45, 0x45, 0x46, 0x47, 0x45, 0x44, 0x42, 0x44, + 0x42, 0x3f, 0x3f, 0x37, 0x34, 0x34, 0x39, 0x43, 0x44, 0x42, 0x43, 0x45, + 0x43, 0x45, 0x46, 0x46, 0x47, 0x45, 0x44, 0x44, 0x46, 0x42, 0x3f, 0x3d, + 0x3b, 0x36, 0x25, 0x24, 0x2f, 0x30, 0x2c, 0x28, 0x23, 0x25, 0x26, 0x20, + 0x1d, 0x20, 0x20, 0x1e, 0x1b, 0x1c, 0x17, 0x10, 0x0c, 0x05, 0xfc, 0xf2, + 0xf7, 0x02, 0x08, 0x0e, 0x12, 0x15, 0x0d, 0xff, 0xf5, 0xfe, 0x02, 0x0a, + 0x0c, 0x03, 0xfc, 0xfd, 0x06, 0x12, 0x13, 0x16, 0x14, 0x17, 0x17, 0x18, + 0x1b, 0x1c, 0x22, 0x23, 0x1f, 0x18, 0x17, 0x12, 0x15, 0x1f, 0x23, 0x21, + 0x1f, 0x22, 0x23, 0x25, 0xc2, 0xb7, 0xb3, 0xab, 0xa2, 0xb4, 0xc2, 0xb7, + 0xae, 0xb9, 0xb8, 0xd0, 0xd4, 0xbc, 0xb8, 0xbe, 0xc0, 0xb5, 0xb2, 0xb9, + 0xb6, 0xd3, 0xc7, 0xb7, 0xb0, 0xbc, 0xbf, 0xac, 0xaf, 0xbf, 0xbf, 0xba, + 0xb7, 0xac, 0x9c, 0xa4, 0xb0, 0xa9, 0xb4, 0xa6, 0x9c, 0xa7, 0xac, 0xa7, + 0xa5, 0xb7, 0xc1, 0xb1, 0xa1, 0x99, 0xad, 0xac, 0xa8, 0xcb, 0xc3, 0x9e, + 0xa2, 0xac, 0xad, 0xb1, 0xb8, 0xac, 0xb0, 0xb3, 0xb1, 0xbc, 0xc0, 0xb4, + 0xb1, 0xb7, 0xbd, 0xc5, 0xcc, 0xcd, 0xd2, 0xde, 0xe8, 0xe2, 0xe0, 0xdf, + 0xd5, 0xcd, 0xc7, 0xc4, 0xbf, 0xc1, 0xc7, 0xcc, 0xc5, 0xcd, 0xd9, 0xda, + 0xd8, 0xde, 0xe6, 0xea, 0xee, 0xf0, 0xf2, 0xf4, 0xf8, 0xfc, 0x02, 0xf7, + 0xc8, 0xdb, 0x02, 0x0b, 0x0e, 0x12, 0x11, 0x12, 0x1b, 0x21, 0x2b, 0x2c, + 0x2c, 0x2c, 0x32, 0x35, 0x35, 0x34, 0x35, 0x39, 0x3c, 0x40, 0x44, 0x42, + 0x45, 0x44, 0x46, 0x48, 0x47, 0x44, 0x42, 0x46, 0x45, 0x41, 0x42, 0x39, + 0x32, 0x31, 0x33, 0x3a, 0x3f, 0x40, 0x42, 0x43, 0x41, 0x42, 0x43, 0x44, + 0x42, 0x44, 0x43, 0x42, 0x40, 0x3e, 0x3e, 0x3a, 0x37, 0x31, 0x22, 0x15, + 0x1e, 0x28, 0x26, 0x22, 0x22, 0x1d, 0x1d, 0x17, 0x10, 0x0e, 0x0a, 0x08, + 0x0e, 0x0c, 0x07, 0x07, 0x05, 0xfa, 0xf2, 0xf2, 0xef, 0xf0, 0xfd, 0x04, + 0x03, 0x0b, 0x09, 0xf9, 0xf4, 0xf3, 0xf0, 0xf2, 0xf6, 0xf3, 0xf3, 0x02, + 0x02, 0x08, 0x12, 0x12, 0x11, 0x0c, 0x12, 0x13, 0x12, 0x0f, 0x15, 0x16, + 0x14, 0x12, 0x11, 0x12, 0x12, 0x18, 0x18, 0x16, 0x13, 0x18, 0x19, 0x18, + 0xc1, 0xbc, 0xb2, 0xa5, 0xab, 0xc2, 0xca, 0xbb, 0xc1, 0xd9, 0xbb, 0xc4, + 0xc4, 0xb7, 0xb1, 0xbc, 0xc7, 0xbd, 0xbc, 0xbc, 0xb1, 0xb6, 0xb2, 0xb1, + 0xb1, 0xab, 0xbc, 0xa9, 0xab, 0xb1, 0xb1, 0xb8, 0xa9, 0x9a, 0x97, 0xbb, + 0xb5, 0xbc, 0xcc, 0xac, 0x99, 0x9b, 0xab, 0xaa, 0xa3, 0xb3, 0xb4, 0xad, + 0x9f, 0x9d, 0xc4, 0xbe, 0xb7, 0xc4, 0xbf, 0xa3, 0xa6, 0xa8, 0xad, 0xad, + 0xac, 0xac, 0xad, 0xb1, 0xae, 0xb1, 0xb1, 0xb0, 0xb4, 0xba, 0xbf, 0xc6, + 0xc7, 0xc8, 0xcc, 0xdc, 0xd7, 0xce, 0xc7, 0xc7, 0xbc, 0xbb, 0xbb, 0xb5, + 0xb3, 0xbe, 0xbd, 0xc1, 0xbd, 0xc7, 0xcf, 0xcc, 0xcc, 0xd7, 0xdf, 0xe1, + 0xe4, 0xe9, 0xee, 0xf2, 0xf7, 0xfe, 0x03, 0xec, 0xc6, 0xef, 0x02, 0x0d, + 0x12, 0x12, 0x10, 0x15, 0x1d, 0x20, 0x27, 0x2b, 0x29, 0x2d, 0x31, 0x34, + 0x32, 0x32, 0x36, 0x39, 0x3b, 0x3c, 0x43, 0x42, 0x42, 0x3f, 0x40, 0x43, + 0x46, 0x43, 0x42, 0x42, 0x43, 0x43, 0x43, 0x3d, 0x36, 0x34, 0x39, 0x39, + 0x39, 0x3d, 0x3c, 0x3c, 0x3c, 0x3d, 0x3d, 0x3f, 0x41, 0x40, 0x3d, 0x3d, + 0x3b, 0x3a, 0x3a, 0x37, 0x34, 0x32, 0x27, 0x09, 0xff, 0x16, 0x1d, 0x19, + 0x1b, 0x18, 0x18, 0x17, 0x10, 0x07, 0x07, 0x01, 0xfc, 0xf7, 0xef, 0xf0, + 0xfa, 0xf2, 0xea, 0xe2, 0xe2, 0xe6, 0xf0, 0xf6, 0xf6, 0xf6, 0x00, 0xfe, + 0xf5, 0xf2, 0xef, 0xe4, 0xde, 0xd5, 0xdb, 0xe8, 0xf7, 0xfb, 0xff, 0x02, + 0x04, 0x10, 0x0e, 0x0b, 0x08, 0x07, 0x02, 0x07, 0x08, 0x07, 0x00, 0xff, + 0x01, 0x12, 0x18, 0x16, 0x14, 0x0e, 0x0f, 0x14, 0xb7, 0xc0, 0xb0, 0xac, + 0xb2, 0xc0, 0xc7, 0xbc, 0xc7, 0xd2, 0xbd, 0xc2, 0xc2, 0xb7, 0xb9, 0xb2, + 0xbd, 0xc6, 0xc8, 0xb8, 0xab, 0xb0, 0xb6, 0xb3, 0xb6, 0xb4, 0xc0, 0xad, + 0xaa, 0xb2, 0xb6, 0xa9, 0x9f, 0x99, 0x9a, 0xb1, 0xbc, 0xdb, 0xeb, 0xbe, + 0x9c, 0xa5, 0xb8, 0xca, 0xae, 0xac, 0xc0, 0xb2, 0xa1, 0xa9, 0xbb, 0xcb, + 0xb4, 0xb4, 0xb6, 0xa1, 0x9f, 0xa1, 0xad, 0xac, 0xa7, 0xae, 0xae, 0xb1, + 0xa7, 0xb4, 0xb3, 0xb3, 0xb7, 0xbe, 0xb9, 0xb6, 0xb5, 0xba, 0xc2, 0xc6, + 0xba, 0xaf, 0xac, 0xb5, 0xb4, 0xb8, 0xb7, 0xb2, 0xb0, 0xb3, 0xb7, 0xb8, + 0xbc, 0xc4, 0xbc, 0xbd, 0xc5, 0xd2, 0xd2, 0xd2, 0xda, 0xe2, 0xea, 0xf1, + 0xf5, 0xfd, 0x01, 0xda, 0xca, 0xfb, 0x02, 0x08, 0x0e, 0x0e, 0x08, 0x14, + 0x1d, 0x21, 0x27, 0x2a, 0x27, 0x2a, 0x2d, 0x32, 0x31, 0x32, 0x34, 0x38, + 0x38, 0x39, 0x41, 0x3f, 0x37, 0x39, 0x38, 0x38, 0x3f, 0x41, 0x41, 0x40, + 0x3f, 0x3d, 0x40, 0x3f, 0x3c, 0x39, 0x3b, 0x40, 0x3f, 0x3f, 0x3e, 0x3d, + 0x3e, 0x3f, 0x3d, 0x3d, 0x3d, 0x3a, 0x38, 0x34, 0x2a, 0x32, 0x34, 0x32, + 0x2b, 0x25, 0x1d, 0x02, 0xee, 0xfc, 0x0d, 0x09, 0x0c, 0x09, 0x02, 0x06, + 0x09, 0x03, 0xfc, 0xf6, 0xf4, 0xf0, 0xe6, 0xdc, 0xde, 0xd5, 0xd4, 0xdf, + 0xd2, 0xd4, 0xe4, 0xd7, 0xe7, 0xe4, 0xe7, 0xe7, 0xeb, 0xe7, 0xef, 0xe2, + 0xd3, 0xc5, 0xc5, 0xd0, 0xea, 0xeb, 0xe6, 0xdd, 0xe8, 0x02, 0x0e, 0x09, + 0x07, 0x01, 0xf7, 0xf2, 0xf7, 0xf3, 0xee, 0xed, 0xef, 0xf5, 0xfc, 0xf8, + 0xf7, 0xf6, 0x03, 0xff, 0xb1, 0xbd, 0xb4, 0xb6, 0xbb, 0xbf, 0xbf, 0xba, + 0xb4, 0xac, 0xb7, 0xbe, 0xbe, 0xb7, 0xb5, 0xb1, 0xbc, 0xc2, 0xbd, 0xb0, + 0xac, 0xb4, 0xb4, 0xb1, 0xb2, 0xbb, 0xc5, 0xb7, 0xac, 0xae, 0xb2, 0xaa, + 0xa1, 0x9e, 0x9e, 0xa8, 0xb6, 0xc9, 0xcc, 0xb7, 0xa8, 0xab, 0xb4, 0xc4, + 0xa8, 0xac, 0xbb, 0xbb, 0xbc, 0xb4, 0xb7, 0xc9, 0xb9, 0xaf, 0xac, 0x9c, + 0xa1, 0xa2, 0xb9, 0xb0, 0xb1, 0xb8, 0xac, 0xb1, 0xa9, 0xc2, 0xc5, 0xc2, + 0xba, 0xbe, 0xbd, 0xae, 0xac, 0xbf, 0xb9, 0xb7, 0xab, 0xac, 0xa9, 0xad, + 0xb1, 0xb4, 0xb1, 0xb1, 0xb6, 0xb5, 0xb7, 0xb4, 0xb2, 0xb1, 0xaf, 0xb7, + 0xc1, 0xc6, 0xc3, 0xc8, 0xd0, 0xd7, 0xe3, 0xed, 0xed, 0xf8, 0xf9, 0xcb, + 0xd8, 0x00, 0x03, 0x04, 0x0d, 0x0b, 0x07, 0x12, 0x1d, 0x1e, 0x25, 0x25, + 0x23, 0x23, 0x2b, 0x2f, 0x2f, 0x30, 0x31, 0x36, 0x33, 0x34, 0x38, 0x39, + 0x32, 0x33, 0x31, 0x32, 0x38, 0x3c, 0x3f, 0x39, 0x37, 0x34, 0x36, 0x38, + 0x3c, 0x3d, 0x37, 0x39, 0x3b, 0x41, 0x3d, 0x3c, 0x3d, 0x3b, 0x37, 0x35, + 0x35, 0x33, 0x31, 0x2a, 0x1c, 0x29, 0x28, 0x21, 0x1a, 0x12, 0x0c, 0xf8, + 0xdb, 0xdc, 0xf4, 0xfe, 0xfc, 0xf4, 0xf7, 0xed, 0xec, 0xeb, 0xe1, 0xe2, + 0xd9, 0xe2, 0xd7, 0xe2, 0xe1, 0xda, 0xcd, 0xd4, 0xdc, 0xe3, 0xdd, 0xdb, + 0xe5, 0xd1, 0xd2, 0xd1, 0xda, 0xd9, 0xe2, 0xe2, 0xda, 0xc8, 0xc2, 0xcb, + 0xd2, 0xd2, 0xd7, 0xd0, 0xeb, 0xf1, 0xf8, 0xef, 0x01, 0xfc, 0xfb, 0xf7, + 0xec, 0xea, 0xf1, 0xf7, 0xf5, 0xf1, 0xf9, 0xeb, 0xda, 0xe4, 0xe7, 0xec, + 0xb0, 0xbf, 0xbd, 0xbb, 0xbe, 0xc0, 0xb7, 0xa7, 0xa5, 0xa8, 0xb6, 0xbc, + 0xbf, 0xb3, 0xb1, 0xb4, 0xbc, 0xbc, 0xb3, 0xad, 0xac, 0xb0, 0xb6, 0xb5, + 0xb8, 0xbc, 0xbc, 0xb0, 0xb1, 0xc6, 0xbf, 0xb1, 0xa8, 0xa0, 0x9e, 0xb0, + 0xba, 0xb1, 0xcf, 0xc7, 0xb5, 0xa8, 0xac, 0xb9, 0xa4, 0xa0, 0xab, 0xaa, + 0xb0, 0xd5, 0xdd, 0xb7, 0xbe, 0xb3, 0xaa, 0xa3, 0xa3, 0xa8, 0xb9, 0xbc, + 0xb9, 0xc1, 0xab, 0xac, 0xb1, 0xb5, 0xbf, 0xc7, 0xb7, 0xb7, 0xc6, 0xb4, + 0xb1, 0xc3, 0xda, 0xc3, 0xa7, 0xa8, 0xaa, 0xa5, 0xac, 0xad, 0xb1, 0xb4, + 0xac, 0xb3, 0xb6, 0xbb, 0xc9, 0xd4, 0xdb, 0xe6, 0xea, 0xeb, 0xe8, 0xe5, + 0xe2, 0xe2, 0xe4, 0xeb, 0xe9, 0xf2, 0xee, 0xc0, 0xe1, 0xf9, 0x01, 0x00, + 0x06, 0x07, 0x06, 0x14, 0x1d, 0x1d, 0x20, 0x24, 0x22, 0x20, 0x27, 0x2a, + 0x2e, 0x2f, 0x31, 0x35, 0x30, 0x2d, 0x2f, 0x2e, 0x2d, 0x31, 0x2c, 0x2c, + 0x33, 0x37, 0x3b, 0x37, 0x32, 0x32, 0x31, 0x30, 0x32, 0x36, 0x3a, 0x3a, + 0x35, 0x37, 0x38, 0x35, 0x32, 0x32, 0x30, 0x30, 0x2d, 0x28, 0x27, 0x1c, + 0x08, 0x17, 0x13, 0x11, 0x0d, 0x07, 0x00, 0xfb, 0xe1, 0xcb, 0xd4, 0xee, + 0xeb, 0xec, 0xec, 0xe6, 0xe8, 0xdf, 0xc7, 0xce, 0xd5, 0xde, 0xd7, 0xe3, + 0xe2, 0xdc, 0xd2, 0xc7, 0xcf, 0xec, 0xe0, 0xd7, 0xdc, 0xcc, 0xcb, 0xbf, + 0xbe, 0xca, 0xd8, 0xd3, 0xd2, 0xcb, 0xd1, 0xd2, 0xc2, 0xc7, 0xcd, 0xc7, + 0xd4, 0xdc, 0xdc, 0xc8, 0xe7, 0xfc, 0xed, 0xe9, 0xe7, 0xdc, 0xcc, 0xde, + 0xdf, 0xe5, 0xe7, 0xd4, 0xdc, 0xdb, 0xd6, 0xd7, 0xb5, 0xbd, 0xbc, 0xba, + 0xb8, 0xbd, 0xb1, 0xab, 0xa5, 0xad, 0xb8, 0xbc, 0xbe, 0xa9, 0xb1, 0xbb, + 0xb8, 0xb4, 0xb6, 0xb3, 0xb2, 0xb5, 0xbc, 0xb3, 0xb6, 0xb6, 0xb6, 0xb3, + 0xd0, 0xe4, 0xd5, 0xc9, 0xba, 0xac, 0xa5, 0xbf, 0xc7, 0xac, 0xc4, 0xce, + 0xcc, 0xb7, 0xa7, 0xb1, 0xac, 0x9f, 0xac, 0xa1, 0xbf, 0xd2, 0xcf, 0xb3, + 0xb3, 0xb2, 0xa9, 0xa7, 0xae, 0xb9, 0xbc, 0xcc, 0xbc, 0xc6, 0xb7, 0xad, + 0xb4, 0xb9, 0xc0, 0xbc, 0xbb, 0xaf, 0xb1, 0xc0, 0xb5, 0xbb, 0xc9, 0xc2, + 0xa7, 0xa3, 0xa6, 0xaa, 0xaa, 0xae, 0xad, 0xb6, 0xbc, 0xdc, 0xf4, 0x02, + 0x09, 0x0d, 0x08, 0x09, 0x07, 0x02, 0xfd, 0xf9, 0xf3, 0xef, 0xeb, 0xea, + 0xe3, 0xe7, 0xdf, 0xc2, 0xed, 0xfa, 0xfc, 0xfb, 0xfe, 0x01, 0x05, 0x13, + 0x1c, 0x1b, 0x1e, 0x26, 0x20, 0x1e, 0x1e, 0x22, 0x28, 0x27, 0x2b, 0x2f, + 0x2d, 0x2d, 0x2e, 0x29, 0x28, 0x2e, 0x2d, 0x2a, 0x2f, 0x34, 0x37, 0x36, + 0x30, 0x2d, 0x2d, 0x2e, 0x2d, 0x2f, 0x32, 0x32, 0x32, 0x32, 0x30, 0x30, + 0x2d, 0x26, 0x22, 0x25, 0x26, 0x22, 0x1c, 0x0a, 0xf7, 0xfa, 0xef, 0xe6, + 0xe9, 0xf0, 0xec, 0xe7, 0xc4, 0xc2, 0xc0, 0xdc, 0xec, 0xe7, 0xdc, 0xdb, + 0xd1, 0xbe, 0xca, 0xd9, 0xd0, 0xd6, 0xd6, 0xd2, 0xce, 0xc9, 0xc2, 0xbe, + 0xc6, 0xef, 0xdc, 0xcc, 0xd1, 0xbf, 0xbd, 0xb7, 0xbc, 0xc8, 0xe0, 0xc8, + 0xc5, 0xca, 0xc7, 0xc7, 0xbc, 0xc7, 0xc7, 0xc1, 0xcc, 0xd7, 0xdb, 0xcb, + 0xd2, 0xef, 0xdd, 0xd7, 0xd2, 0xd1, 0xd1, 0xd4, 0xc9, 0xcc, 0xd2, 0xd1, + 0xd2, 0xd2, 0xce, 0xce, 0xbb, 0xc1, 0xbb, 0xb8, 0xbc, 0xbd, 0xab, 0xad, + 0xac, 0xb2, 0xba, 0xc1, 0xb9, 0xaa, 0xb7, 0xbc, 0xad, 0xaa, 0xd2, 0xcb, + 0xb8, 0xb1, 0xb4, 0xc6, 0xba, 0xb1, 0xb1, 0xb5, 0xc2, 0xc4, 0xc5, 0xca, + 0xc7, 0xc2, 0xbb, 0xc0, 0xc7, 0xa9, 0xb3, 0xb7, 0xc9, 0xd4, 0xba, 0xa7, + 0xaa, 0xa9, 0xb0, 0xad, 0xb9, 0xcc, 0xca, 0xbd, 0xb5, 0xb7, 0xae, 0xac, + 0xab, 0xae, 0xac, 0xba, 0xbd, 0xb7, 0xb2, 0xad, 0xb1, 0xc4, 0xc1, 0xac, + 0xb7, 0xba, 0xc6, 0xd8, 0xcb, 0xb5, 0xb8, 0xb8, 0xb4, 0xa5, 0xaa, 0xb1, + 0xb2, 0xc7, 0xdb, 0xdd, 0xfa, 0x12, 0x08, 0x10, 0x18, 0x17, 0x15, 0x16, + 0x11, 0x0c, 0x0a, 0x04, 0xfd, 0xeb, 0xe7, 0xea, 0xe0, 0xdf, 0xcc, 0xcc, + 0xf5, 0xf9, 0xfa, 0xfe, 0xfd, 0xfe, 0x03, 0x0c, 0x1c, 0x18, 0x18, 0x20, + 0x1e, 0x17, 0x17, 0x19, 0x27, 0x21, 0x23, 0x28, 0x27, 0x2a, 0x29, 0x27, + 0x26, 0x2a, 0x2d, 0x2d, 0x2d, 0x30, 0x33, 0x33, 0x2f, 0x2b, 0x26, 0x27, + 0x25, 0x27, 0x27, 0x29, 0x27, 0x25, 0x26, 0x27, 0x2b, 0x22, 0x1d, 0x1a, + 0x12, 0x04, 0x07, 0xf4, 0xe6, 0xdf, 0xc7, 0xc2, 0xc4, 0xc7, 0xcc, 0xc4, + 0xaf, 0xb8, 0xbf, 0xc1, 0xd3, 0xd7, 0xd3, 0xca, 0xb9, 0xb9, 0xcc, 0xd7, + 0xc8, 0xca, 0xd7, 0xce, 0xc0, 0xca, 0xbe, 0xc0, 0xe6, 0xf7, 0xd1, 0xcc, + 0xcd, 0xbe, 0xb8, 0xac, 0xb7, 0xc2, 0xc2, 0xbc, 0xb4, 0xc4, 0xc4, 0xbe, + 0xc3, 0xc6, 0xc6, 0xbe, 0xc4, 0xc5, 0xcb, 0xcf, 0xcc, 0xe0, 0xc5, 0xc3, + 0xc8, 0xc2, 0xc4, 0xc7, 0xc0, 0xc3, 0xc8, 0xcb, 0xcb, 0xcc, 0xc1, 0xc2, + 0xb9, 0xbc, 0xbc, 0xc5, 0xbc, 0xb7, 0xad, 0xb1, 0xa9, 0xb7, 0xbc, 0xbc, + 0xac, 0xae, 0xb8, 0xcc, 0xb8, 0xad, 0xd8, 0xd0, 0xcd, 0xc1, 0xbb, 0xb1, + 0xab, 0xab, 0xa7, 0xb5, 0xb7, 0xac, 0xb4, 0xc8, 0xc2, 0xc7, 0xcb, 0xc7, + 0xc9, 0xba, 0xac, 0xac, 0xb0, 0xc1, 0xd4, 0xc6, 0xa4, 0xa7, 0xb3, 0xba, + 0xcf, 0xc2, 0xc6, 0xc4, 0xb7, 0xb8, 0xb1, 0xb2, 0xab, 0xab, 0xac, 0xaf, + 0xbf, 0xb6, 0xad, 0xac, 0xaf, 0xcd, 0xbb, 0xb0, 0xb1, 0xb7, 0xf1, 0xd5, + 0xcc, 0xc2, 0xb7, 0xba, 0xb7, 0xbe, 0xc2, 0xc7, 0xd8, 0x05, 0x12, 0xe2, + 0xf7, 0x11, 0xea, 0x07, 0x1e, 0x22, 0x1e, 0x1b, 0x1a, 0x13, 0x08, 0xfc, + 0xf7, 0xd8, 0xde, 0xec, 0xe2, 0xdb, 0xc1, 0xd7, 0xf2, 0xef, 0xf2, 0xf7, + 0xf8, 0xf9, 0xfc, 0x0a, 0x1b, 0x14, 0x16, 0x1a, 0x16, 0x0e, 0x12, 0x14, + 0x1e, 0x1b, 0x1b, 0x21, 0x26, 0x26, 0x1e, 0x22, 0x20, 0x26, 0x2a, 0x2d, + 0x2d, 0x2d, 0x2e, 0x31, 0x2f, 0x2b, 0x27, 0x25, 0x1b, 0x12, 0x17, 0x1c, + 0x1b, 0x12, 0x11, 0x11, 0x19, 0x19, 0x13, 0x0c, 0xf4, 0xe6, 0xf2, 0xdb, + 0xc8, 0xcb, 0xb6, 0xad, 0xaa, 0xb7, 0xbd, 0xb0, 0xa2, 0xa9, 0xc2, 0xbf, + 0xc9, 0xc3, 0xc0, 0xbc, 0xb7, 0xc0, 0xc4, 0xc7, 0xc9, 0xc8, 0xc1, 0xc1, + 0xce, 0xde, 0xb8, 0xbf, 0xd7, 0xe2, 0xce, 0xcf, 0xc9, 0xc4, 0xbc, 0xaf, + 0xc9, 0xd4, 0xc1, 0xba, 0xa9, 0xbc, 0xc7, 0xba, 0xc3, 0xc5, 0xbd, 0xb7, + 0xbc, 0xbf, 0xbc, 0xcc, 0xce, 0xcf, 0xbc, 0xaf, 0xb7, 0xba, 0xb7, 0xba, + 0xbc, 0xc4, 0xc1, 0xbd, 0xc1, 0xc1, 0xb6, 0xbb, 0xaa, 0xb5, 0xbf, 0xce, + 0xc4, 0xb2, 0xba, 0xba, 0xa4, 0xb8, 0xbc, 0xbb, 0xb8, 0xb7, 0xb9, 0xcd, + 0xc8, 0xb6, 0xbf, 0xb9, 0xb7, 0xb3, 0xbb, 0xaa, 0xa6, 0xb8, 0xba, 0xb9, + 0xb3, 0xaf, 0xab, 0xb4, 0xab, 0xbb, 0xce, 0xc7, 0xc8, 0xc7, 0xc0, 0xb6, + 0xb1, 0xb9, 0xc7, 0xe0, 0xd1, 0xac, 0xb5, 0xbc, 0xc9, 0xbc, 0xc7, 0xc0, + 0xb6, 0xb9, 0xb7, 0xbe, 0xb6, 0xa4, 0xa7, 0xb6, 0xb7, 0xa7, 0xa8, 0xa7, + 0xaa, 0xbf, 0xb1, 0xb3, 0xae, 0xb5, 0xc9, 0xb8, 0xb2, 0xc8, 0xb9, 0xba, + 0xb1, 0xc5, 0xe7, 0xdc, 0x07, 0x1a, 0x17, 0xe3, 0xe7, 0x02, 0xee, 0x16, + 0x22, 0x27, 0x24, 0x21, 0x1d, 0x12, 0xe5, 0xd6, 0xdf, 0xcc, 0xda, 0xef, + 0xe9, 0xd7, 0xc2, 0xe4, 0xe6, 0xe3, 0xe8, 0xee, 0xf1, 0xf5, 0xf2, 0x02, + 0x14, 0x0e, 0x0c, 0x12, 0x0c, 0x00, 0x0c, 0x11, 0x18, 0x16, 0x11, 0x0f, + 0x20, 0x22, 0x17, 0x18, 0x15, 0x18, 0x24, 0x28, 0x29, 0x27, 0x28, 0x29, + 0x27, 0x28, 0x29, 0x24, 0x1f, 0x17, 0x0f, 0x0c, 0x08, 0x05, 0x06, 0x02, + 0x04, 0x0b, 0x07, 0xf2, 0xd8, 0xd8, 0xd5, 0xc7, 0xb7, 0xbe, 0xac, 0xa5, + 0xa7, 0xb2, 0xae, 0xa5, 0x9f, 0xa6, 0xc7, 0xc3, 0xc5, 0xb7, 0xb1, 0xb9, + 0xd1, 0xc8, 0xb3, 0xad, 0xbd, 0xc2, 0xad, 0xad, 0xcd, 0xd8, 0xcd, 0xad, + 0xc7, 0xd2, 0xc0, 0xbf, 0xc2, 0xc8, 0xb6, 0xb6, 0xbc, 0xc2, 0xc2, 0xbb, + 0xab, 0xad, 0xc2, 0xbe, 0xbd, 0xbf, 0xbc, 0xb1, 0xb6, 0xbf, 0xbb, 0xc5, + 0xc9, 0xc7, 0xb8, 0xaa, 0xb1, 0xbe, 0xbe, 0xba, 0xb4, 0xb9, 0xbe, 0xb9, + 0xb8, 0xbb, 0xbd, 0xbc, 0xa8, 0xb1, 0xb7, 0xc9, 0xc8, 0xb1, 0xb9, 0xbc, + 0xaf, 0xb9, 0xba, 0xb9, 0xb8, 0xb7, 0xbd, 0xca, 0xc1, 0xc7, 0xb5, 0xb1, + 0xbc, 0xb4, 0xb1, 0xac, 0x9e, 0xac, 0xb9, 0xbc, 0xae, 0xad, 0xa7, 0xaa, + 0xad, 0xb1, 0xc7, 0xc0, 0xbc, 0xbd, 0xc2, 0xc3, 0xc2, 0xc0, 0xc2, 0xcd, + 0xc7, 0xbc, 0xb6, 0xb6, 0xbd, 0xc2, 0xc1, 0xbc, 0xb1, 0xb7, 0xbc, 0xd3, + 0xc7, 0xb2, 0xa9, 0xb8, 0xab, 0xaa, 0xaa, 0xa8, 0xb1, 0xc1, 0xb3, 0xb1, + 0xb0, 0xe2, 0xc2, 0xa7, 0xa9, 0xc2, 0xc5, 0xd4, 0xb1, 0xbe, 0x01, 0xec, + 0xf7, 0x1d, 0x1b, 0xe8, 0xdc, 0xea, 0xef, 0x17, 0x1b, 0x22, 0x22, 0x1f, + 0x1b, 0x11, 0xe4, 0xc2, 0xd2, 0xcc, 0xcc, 0xe9, 0xe7, 0xc9, 0xc7, 0xe2, + 0xd3, 0xd9, 0xdc, 0xe5, 0xe8, 0xf2, 0xe7, 0xf7, 0x0f, 0x08, 0x01, 0x09, + 0x07, 0xf5, 0xfb, 0x07, 0x0b, 0x09, 0x03, 0x08, 0x0b, 0x11, 0x12, 0x00, + 0xe3, 0xf6, 0x17, 0x22, 0x1f, 0x1a, 0x1a, 0x21, 0x1d, 0x15, 0x18, 0x11, + 0x0c, 0x15, 0x15, 0x0f, 0x04, 0x00, 0x00, 0xf8, 0xf8, 0xf5, 0xee, 0xd4, + 0xcc, 0xd1, 0xca, 0xc0, 0xab, 0xbb, 0xb2, 0xa3, 0xac, 0xae, 0xa3, 0xa4, + 0xa2, 0xab, 0xc9, 0xc3, 0xc1, 0xb5, 0xb3, 0xb7, 0xd5, 0xcc, 0xb7, 0xb0, + 0xbb, 0xc0, 0xaf, 0xab, 0xb8, 0xce, 0xe9, 0xc2, 0xcd, 0xc9, 0xb4, 0xb1, + 0xcd, 0xdd, 0xca, 0xb6, 0xbd, 0xc4, 0xbd, 0xba, 0xac, 0xb5, 0xc2, 0xc6, + 0xbc, 0xb8, 0xb0, 0xb8, 0xb4, 0xb7, 0xb7, 0xc0, 0xcc, 0xcb, 0xbd, 0xa9, + 0xaa, 0xad, 0xb4, 0xb7, 0xb5, 0xbc, 0xc2, 0xbb, 0xca, 0xc3, 0xc0, 0xc7, + 0xb7, 0xaf, 0xb7, 0xb5, 0xad, 0xb3, 0xcc, 0xc5, 0xbf, 0xc1, 0xb4, 0xb3, + 0xb7, 0xb3, 0xc1, 0xd4, 0xc2, 0xd2, 0xb7, 0xab, 0xca, 0xbc, 0xb7, 0xb4, + 0xa3, 0xab, 0xbb, 0xbc, 0xaf, 0xaa, 0xad, 0xac, 0xb1, 0xa7, 0xd0, 0xc2, + 0xb5, 0xad, 0xb4, 0xbb, 0xbf, 0xbd, 0xbd, 0xb9, 0xbb, 0xc2, 0xb8, 0xb1, + 0xc7, 0xd5, 0xbf, 0xbd, 0xb5, 0xba, 0xbd, 0xbf, 0xcb, 0xec, 0xc5, 0xc7, + 0xd0, 0xb4, 0xa4, 0xa6, 0xb2, 0xc0, 0xb1, 0xb8, 0xb0, 0xc9, 0xce, 0xb2, + 0xcc, 0xdf, 0xc8, 0xba, 0xb6, 0xec, 0x02, 0xe7, 0xdc, 0x0a, 0x15, 0xec, + 0xd1, 0xd2, 0xdd, 0x02, 0x03, 0x0c, 0x17, 0x17, 0x17, 0x0c, 0xf3, 0xc4, + 0xc1, 0xc2, 0xc2, 0xde, 0xe1, 0xc2, 0xce, 0xe3, 0xd2, 0xd2, 0xd2, 0xdb, + 0xe1, 0xee, 0xe3, 0xf2, 0x05, 0x01, 0xf8, 0xfc, 0x02, 0xf3, 0xee, 0xf8, + 0xfd, 0xf6, 0xf7, 0xf8, 0x01, 0xfd, 0xf8, 0xf0, 0xde, 0xf2, 0x0f, 0x15, + 0x18, 0x0e, 0x09, 0x14, 0x0f, 0x02, 0xfc, 0xf2, 0xe0, 0xed, 0xf1, 0xfc, + 0xf2, 0xec, 0xeb, 0xe2, 0xde, 0xe1, 0xd5, 0xc9, 0xc7, 0xc1, 0xc2, 0xbd, + 0xac, 0xb2, 0xb5, 0xac, 0xaf, 0xaa, 0xa5, 0xbc, 0xb3, 0xad, 0xc7, 0xbf, + 0xbc, 0xb5, 0xb2, 0xb6, 0xc4, 0xc8, 0xc4, 0xc0, 0xc1, 0xb9, 0xbc, 0xab, + 0xb5, 0xcc, 0xcb, 0xb9, 0xc5, 0xb7, 0xba, 0xae, 0xc1, 0xc7, 0xc6, 0xbe, + 0xbd, 0xb7, 0xbb, 0xb7, 0xae, 0xae, 0xb8, 0xc3, 0xc7, 0xbc, 0xab, 0xc7, + 0xc2, 0xb0, 0xb6, 0xc0, 0xc7, 0xd3, 0xc9, 0xc6, 0xd8, 0xc5, 0xae, 0xae, + 0xb2, 0xbb, 0xc1, 0xc1, 0xfa, 0xd6, 0xce, 0xd2, 0xbb, 0xad, 0xb1, 0xac, + 0xa7, 0xba, 0xcb, 0xc4, 0xc6, 0xc5, 0xc2, 0xb9, 0xb8, 0xba, 0xd6, 0xee, + 0xcd, 0xc6, 0xbb, 0xae, 0xca, 0xc2, 0xbe, 0xbf, 0xb9, 0xc2, 0xc2, 0xbb, + 0xb1, 0xb2, 0xb6, 0xac, 0xac, 0xaf, 0xd2, 0xda, 0xd0, 0xac, 0xb4, 0xb6, + 0xb7, 0xb7, 0xb8, 0xb5, 0xb1, 0xb4, 0xb5, 0xb1, 0xc2, 0xd6, 0xc7, 0xbc, + 0xb7, 0xb7, 0xb1, 0xa9, 0xaa, 0xd2, 0xd4, 0xd4, 0xc6, 0xa7, 0xa0, 0xa7, + 0xb7, 0xc2, 0xa9, 0xd2, 0xd9, 0xbb, 0xc5, 0xd4, 0xf4, 0xea, 0xc3, 0xb2, + 0xbf, 0xd1, 0xdd, 0xd4, 0xd1, 0xf3, 0x0a, 0xeb, 0xca, 0xc8, 0xd5, 0xe5, + 0x06, 0x07, 0xfc, 0xf6, 0x05, 0x06, 0xf3, 0xd1, 0xc1, 0xc4, 0xbd, 0xd2, + 0xd6, 0xc2, 0xc7, 0xd6, 0xd1, 0xc8, 0xcd, 0xd3, 0xdc, 0xe3, 0xe2, 0xed, + 0xf9, 0xf8, 0xec, 0xec, 0xf9, 0xef, 0xe4, 0xe7, 0xe7, 0xe6, 0xea, 0xec, + 0xf0, 0xfb, 0xe8, 0xda, 0xd4, 0xe2, 0x00, 0x07, 0x0d, 0x06, 0xf9, 0x02, + 0xfb, 0xef, 0xf2, 0xeb, 0xdf, 0xe0, 0xcc, 0xd3, 0xd7, 0xd1, 0xd2, 0xd8, + 0xd7, 0xd2, 0xc2, 0xcc, 0xd7, 0xc5, 0xc0, 0xb7, 0xb2, 0xb6, 0xb5, 0xac, + 0xaf, 0xc4, 0xbe, 0xbb, 0xae, 0xa4, 0xbf, 0xaa, 0xb9, 0xb6, 0xb2, 0xb7, + 0xbc, 0xbb, 0xae, 0xb2, 0xc9, 0xba, 0xcb, 0xb8, 0xbe, 0xc6, 0xb9, 0xbb, + 0xb4, 0xaf, 0xb9, 0xb0, 0xb3, 0xb9, 0xcd, 0xcd, 0xc6, 0xc2, 0xbd, 0xb4, + 0xa7, 0xb4, 0xc3, 0xbf, 0xbf, 0xcc, 0xbd, 0xbd, 0xcc, 0xbb, 0xb7, 0xbb, + 0xbc, 0xce, 0xd2, 0xcf, 0xe1, 0xcd, 0xb0, 0xac, 0xae, 0xbc, 0xc2, 0xad, + 0xbe, 0xc3, 0xe2, 0xd7, 0xaf, 0xa9, 0xa8, 0x9f, 0xac, 0xc3, 0xc3, 0xc0, + 0xc3, 0xc5, 0xcc, 0xc3, 0xbf, 0xbb, 0xcd, 0xfc, 0xcf, 0xc5, 0xcb, 0xc0, + 0xc1, 0xb2, 0xb8, 0xc7, 0xb6, 0xd0, 0xd7, 0xbd, 0xb2, 0xb5, 0xac, 0xb1, + 0xcb, 0xe7, 0xd9, 0xc6, 0xc3, 0xb6, 0xb6, 0xba, 0xb9, 0xb6, 0xb1, 0xb1, + 0xad, 0xac, 0xb0, 0xb2, 0xb2, 0xd3, 0xc3, 0xbb, 0xb5, 0xb4, 0xae, 0xac, + 0xae, 0xbc, 0xc2, 0xc7, 0xc7, 0xb2, 0xa1, 0xca, 0xcf, 0xcc, 0xc3, 0xc7, + 0xc9, 0xc5, 0xb9, 0xd3, 0xe7, 0xd4, 0xd2, 0xd7, 0xd0, 0xcc, 0xd3, 0xcc, + 0xc7, 0xdd, 0xfb, 0xe9, 0xd4, 0xc7, 0xcf, 0xd7, 0xf1, 0x09, 0x08, 0xee, + 0xd1, 0xe5, 0xe5, 0xd7, 0xd3, 0xc7, 0xc2, 0xbc, 0xbb, 0xba, 0xc0, 0xd9, + 0xe3, 0xcb, 0xc8, 0xc6, 0xd2, 0xd7, 0xd8, 0xe6, 0xf0, 0xea, 0xdb, 0xdf, + 0xee, 0xec, 0xde, 0xd2, 0xce, 0xd1, 0xd8, 0xdc, 0xd9, 0xf3, 0xe7, 0xd6, + 0xcc, 0xdd, 0xf1, 0xfd, 0x07, 0x07, 0xf7, 0xf0, 0xeb, 0xe1, 0xd6, 0xea, + 0xda, 0xcb, 0xb5, 0xb7, 0xb6, 0xb8, 0xaf, 0xae, 0xb1, 0xbb, 0xbf, 0xc3, + 0xcc, 0xbc, 0xba, 0xb8, 0xbd, 0xb9, 0xb2, 0xb2, 0xb3, 0xd2, 0xeb, 0xb9, + 0xb1, 0xad, 0xc2, 0xa9, 0xb3, 0xb1, 0xac, 0xad, 0xbe, 0xbc, 0xb0, 0xbc, + 0xcb, 0xb7, 0xb9, 0xc4, 0xc5, 0xc6, 0xbf, 0xb9, 0xbf, 0xac, 0xae, 0xb3, + 0xbc, 0xbc, 0xd2, 0xc8, 0xd0, 0xcc, 0xbe, 0xb4, 0xb4, 0xc4, 0xca, 0xbc, + 0xc1, 0xd3, 0xd5, 0xc3, 0xe7, 0xc7, 0xbe, 0xbc, 0xbc, 0xc5, 0xd9, 0xd2, + 0xc8, 0xbd, 0xb3, 0xb1, 0xb1, 0xbd, 0xbe, 0xaf, 0xca, 0xcd, 0xc2, 0xc3, + 0xac, 0xa9, 0xa8, 0xa5, 0xb3, 0xc3, 0xbd, 0xbc, 0xbd, 0xbe, 0xc7, 0xcd, + 0xc2, 0xbd, 0xc9, 0xe2, 0xc3, 0xc6, 0xd5, 0xcf, 0xc0, 0xb2, 0xd6, 0xd9, + 0xc2, 0xcd, 0xf4, 0xf1, 0xb4, 0xac, 0xad, 0xc7, 0xee, 0xe7, 0xd5, 0xbc, + 0xbc, 0xbb, 0xd2, 0xbd, 0xb7, 0xb7, 0xbc, 0xb2, 0xa7, 0xac, 0xa8, 0xad, + 0xb4, 0xcf, 0xc3, 0xbb, 0xb7, 0xb8, 0xc1, 0xc7, 0xc6, 0xc4, 0xc6, 0xc9, + 0xd2, 0xcd, 0xc5, 0xd5, 0xb9, 0xc9, 0xd2, 0xe2, 0xc7, 0xc4, 0xb7, 0xcb, + 0xd4, 0xc7, 0xcd, 0xe7, 0xbb, 0xce, 0xca, 0xc7, 0xc4, 0xd2, 0xef, 0xe2, + 0xd0, 0xc7, 0xcf, 0xe2, 0xd6, 0xf2, 0xfc, 0xfa, 0xdf, 0xc2, 0xc9, 0xcf, + 0xce, 0xc7, 0xbd, 0xb1, 0xb2, 0xb9, 0xb8, 0xd4, 0xe0, 0xc7, 0xc7, 0xc0, + 0xc8, 0xc3, 0xcf, 0xdd, 0xe3, 0xd7, 0xd2, 0xd7, 0xde, 0xe0, 0xd7, 0xc5, + 0xc9, 0xc2, 0xc9, 0xd5, 0xd6, 0xda, 0xe3, 0xd6, 0xce, 0xc9, 0xd8, 0xe6, + 0xf8, 0x01, 0x02, 0xe8, 0xd4, 0xdf, 0xd7, 0xdb, 0xd7, 0xb9, 0xab, 0xab, + 0xac, 0xaa, 0x9b, 0x9c, 0xa8, 0xb2, 0xb1, 0xb1, 0xb6, 0xb1, 0xb6, 0xad, + 0xe1, 0xd8, 0xbe, 0xcc, 0xbd, 0xc7, 0xf4, 0xb7, 0xa8, 0xc7, 0xcc, 0xc7, + 0xbe, 0xb2, 0xb1, 0xb1, 0xbb, 0xb6, 0xaf, 0xb9, 0xbd, 0xb8, 0xc5, 0xc4, + 0xc9, 0xc5, 0xbb, 0xac, 0xad, 0xa1, 0xb1, 0xba, 0xc7, 0xcc, 0xe2, 0xc3, + 0xd2, 0xc9, 0xb7, 0xbd, 0xc1, 0xcc, 0xcc, 0xc6, 0xd0, 0xd1, 0xdf, 0xc4, + 0xc7, 0xc0, 0xb7, 0xb8, 0xbd, 0xc2, 0xce, 0xda, 0xd2, 0xc5, 0xb3, 0xac, + 0xb4, 0xbc, 0xb8, 0xa9, 0xc5, 0xc5, 0xa8, 0xb1, 0xac, 0xaf, 0xbb, 0xaf, + 0xb7, 0xbd, 0xb5, 0xb4, 0xb8, 0xc2, 0xe2, 0xd4, 0xc7, 0xbd, 0xc8, 0xdc, + 0xc5, 0xd1, 0xdc, 0xe5, 0xdf, 0xea, 0xf9, 0xde, 0xc2, 0xd7, 0x11, 0x12, + 0xca, 0xca, 0xf4, 0xdd, 0xe4, 0xdd, 0xd7, 0xb9, 0xdb, 0x01, 0xec, 0xc0, + 0xba, 0xbc, 0xbe, 0xba, 0xb9, 0xaf, 0xac, 0xa9, 0xb3, 0xc3, 0xc2, 0xb1, + 0xb5, 0xbb, 0xbd, 0xbc, 0xc0, 0xb7, 0xcc, 0xd3, 0x0d, 0xd6, 0xcb, 0xd2, + 0xc2, 0xca, 0x02, 0x09, 0xe8, 0xe5, 0xd3, 0xe1, 0xd0, 0xc8, 0xcc, 0xd5, + 0xbc, 0xc7, 0xca, 0xc3, 0xc2, 0xc7, 0xd7, 0xc9, 0xcc, 0xc9, 0xcc, 0xd7, + 0xc6, 0xce, 0xdf, 0xe6, 0xd3, 0xc8, 0xbc, 0xb7, 0xbe, 0xcf, 0xbc, 0xb2, + 0xb4, 0xba, 0xbe, 0xbc, 0xbf, 0xbe, 0xc2, 0xc0, 0xc1, 0xbd, 0xcc, 0xd1, + 0xd3, 0xcb, 0xc2, 0xcf, 0xd0, 0xdc, 0xd9, 0xc1, 0xc4, 0xbe, 0xbf, 0xc3, + 0xce, 0xc7, 0xc9, 0xcf, 0xc3, 0xbd, 0xc9, 0xd5, 0xe5, 0xeb, 0xf6, 0xf0, + 0xd8, 0xd7, 0xcd, 0xc8, 0xca, 0xb6, 0xa7, 0xa6, 0xab, 0xaa, 0x9b, 0x9c, + 0xa8, 0xb1, 0xb3, 0xbf, 0xb4, 0xa9, 0xb3, 0xc1, 0xf1, 0xbc, 0xbc, 0xde, + 0xe1, 0xd0, 0xf1, 0xd7, 0xbb, 0xc7, 0xc9, 0xd7, 0xcc, 0xbe, 0xb3, 0xb7, + 0xc2, 0xbb, 0xc8, 0xb8, 0xb1, 0xbc, 0xc3, 0xc3, 0xc7, 0xbb, 0xb3, 0xb1, + 0xa4, 0xa4, 0xb7, 0xc9, 0xc8, 0xcc, 0xd1, 0xc7, 0xe2, 0xd3, 0xbf, 0xbc, + 0xbf, 0xd2, 0xc5, 0xcb, 0xc7, 0xd4, 0xe1, 0xcc, 0xc0, 0xbf, 0xb7, 0xb7, + 0xba, 0xbe, 0xc4, 0xd2, 0xcc, 0xc8, 0xbb, 0xb2, 0xb1, 0xc5, 0xc1, 0xb6, + 0xbd, 0xcb, 0xb9, 0xba, 0x9e, 0x9e, 0x9f, 0x9f, 0x9a, 0x96, 0x96, 0x97, + 0x96, 0x96, 0x96, 0x98, 0x98, 0x97, 0x96, 0x97, 0x97, 0x97, 0x99, 0x99, + 0x99, 0x98, 0x98, 0x9a, 0xa4, 0xce, 0xed, 0x20, 0x41, 0x39, 0x25, 0x1f, + 0x1e, 0x1f, 0x26, 0x32, 0x4c, 0x62, 0x20, 0xaf, 0xa7, 0xa6, 0xa1, 0xa2, + 0xa2, 0xa1, 0xa3, 0xa2, 0x9a, 0x99, 0x98, 0x98, 0x97, 0x96, 0x99, 0xa4, + 0xb0, 0xb4, 0xb5, 0xb5, 0xaf, 0xa9, 0xa7, 0xa7, 0xa7, 0xa9, 0xb0, 0xb3, + 0xb3, 0xb4, 0xb5, 0xb3, 0xb6, 0xb3, 0xaf, 0xb0, 0xb2, 0xb2, 0xaf, 0xae, + 0xa2, 0x9a, 0x98, 0xae, 0xc7, 0xc4, 0xc2, 0xc0, 0xac, 0x9d, 0x98, 0xa2, + 0xbb, 0xc5, 0xc2, 0xbf, 0xba, 0xbb, 0xc7, 0xd4, 0xc5, 0xbc, 0xbc, 0xbc, + 0xba, 0xb1, 0xb0, 0xba, 0xb5, 0xb3, 0xbd, 0xb8, 0xb4, 0xbd, 0xbe, 0xb7, + 0xb0, 0xb2, 0xb5, 0xb6, 0xbc, 0xad, 0x99, 0xa2, 0xa9, 0xab, 0xae, 0xac, + 0xaf, 0xb5, 0xb6, 0xbb, 0xb8, 0xa6, 0xa2, 0xa3, 0xa0, 0x9d, 0x9c, 0x9a, + 0x96, 0x96, 0x9c, 0xa2, 0x9b, 0x9a, 0x96, 0x96, 0x96, 0x97, 0xa4, 0xb4, + 0xb6, 0xb8, 0xba, 0xbb, 0xa9, 0x97, 0x96, 0x97, 0x97, 0x9b, 0x9d, 0x98, + 0x97, 0x96, 0x96, 0x96, 0x95, 0x96, 0x96, 0x98, 0x9a, 0x99, 0x98, 0x98, + 0x98, 0x99, 0x9f, 0xa2, 0xa5, 0xa9, 0xad, 0xad, 0xa9, 0x9f, 0x99, 0x9b, + 0xa2, 0xb1, 0xbe, 0xc2, 0xb3, 0x9d, 0x97, 0x97, 0x97, 0x97, 0x98, 0x98, + 0xa1, 0x9f, 0x97, 0x96, 0x99, 0x9c, 0x9c, 0x9b, 0x9a, 0x9c, 0x9d, 0x97, + 0x96, 0x96, 0x96, 0x96, 0x96, 0x96, 0x96, 0x96, 0x96, 0x96, 0x9a, 0x9a, + 0x9e, 0x9d, 0x9d, 0x9c, 0x97, 0x96, 0x98, 0x98, 0x99, 0x99, 0x98, 0x9a, + 0x99, 0x99, 0x99, 0x97, 0x96, 0x97, 0x9a, 0x9b, 0x9a, 0x9a, 0x9b, 0x9c, + 0xaa, 0xf3, 0x28, 0x3e, 0x3d, 0x2f, 0x1f, 0x18, 0x1b, 0x1f, 0x22, 0x2a, + 0x45, 0x5f, 0x33, 0xb8, 0xac, 0xa7, 0xa1, 0xa2, 0xa4, 0xa5, 0xa7, 0xa4, + 0x9c, 0x98, 0x98, 0x98, 0x97, 0x96, 0x96, 0x99, 0xa4, 0xaf, 0xb5, 0xb2, + 0xa8, 0xa4, 0xa3, 0xa7, 0xa9, 0xaf, 0xb5, 0xb3, 0xb2, 0xb3, 0xb0, 0xb0, + 0xb4, 0xb2, 0xae, 0xb0, 0xb0, 0xb1, 0xaf, 0xaa, 0xa5, 0xa9, 0xaa, 0xb9, + 0xc8, 0xba, 0xaf, 0xa9, 0x9c, 0x98, 0x97, 0xad, 0xc4, 0xc5, 0xc6, 0xc5, + 0xc1, 0xbe, 0xc2, 0xd5, 0xc9, 0xba, 0xbc, 0xbf, 0xbe, 0xb3, 0xb4, 0xbb, + 0xb2, 0xb7, 0xc2, 0xbf, 0xb5, 0xba, 0xb8, 0xb3, 0xb2, 0xb1, 0xb2, 0xba, + 0xbb, 0xaa, 0x98, 0x98, 0x9e, 0xa9, 0xae, 0xac, 0xac, 0xb0, 0xb3, 0xbc, + 0xbb, 0xa7, 0xa2, 0xa6, 0xa3, 0xa1, 0xa1, 0x9c, 0x97, 0x96, 0x9e, 0xa3, + 0x9d, 0x9f, 0x9c, 0x97, 0x97, 0x97, 0x99, 0xa0, 0xaa, 0xb5, 0xbb, 0xbe, + 0xb0, 0x99, 0x96, 0x97, 0x96, 0x98, 0x9a, 0x99, 0x98, 0x96, 0x96, 0x96, + 0x96, 0x96, 0x96, 0x98, 0x9a, 0x98, 0x99, 0x98, 0x99, 0x9b, 0xa4, 0xa9, + 0xab, 0xae, 0xaf, 0xad, 0xaa, 0xa3, 0x9c, 0x9b, 0x9c, 0xa6, 0xb8, 0xbe, + 0xa9, 0x9c, 0x9a, 0x99, 0x9c, 0x9d, 0x9c, 0x97, 0x9d, 0xa1, 0x99, 0x97, + 0x9c, 0x9c, 0x98, 0x99, 0x98, 0x9a, 0x9c, 0x97, 0x97, 0x97, 0x98, 0x97, + 0x96, 0x96, 0x96, 0x96, 0x96, 0x97, 0x99, 0x9c, 0x9e, 0x9d, 0x9c, 0x98, + 0x97, 0x97, 0x99, 0x9c, 0x9c, 0x9c, 0x9a, 0x9a, 0x9b, 0x9c, 0x99, 0x99, + 0x97, 0x97, 0x9a, 0x9c, 0x9c, 0x9a, 0x9c, 0xa0, 0xae, 0x09, 0x36, 0x3e, + 0x32, 0x24, 0x1c, 0x12, 0x18, 0x1e, 0x21, 0x29, 0x3f, 0x5b, 0x4c, 0xd5, + 0xb0, 0xa8, 0xa3, 0xa3, 0xa5, 0xa7, 0xa9, 0xa5, 0x9d, 0x98, 0x98, 0x98, + 0x97, 0x97, 0x98, 0x97, 0x99, 0xa1, 0xa6, 0xaa, 0x9e, 0x9a, 0xa0, 0xa9, + 0xaa, 0xaf, 0xb1, 0xb1, 0xb3, 0xb5, 0xb2, 0xb0, 0xaf, 0xb0, 0xb0, 0xaf, + 0xb0, 0xaf, 0xac, 0xa6, 0xac, 0xc3, 0xca, 0xc9, 0xca, 0xc2, 0xbb, 0xb5, + 0xaf, 0xa9, 0xa5, 0xba, 0xc8, 0xc5, 0xc8, 0xc3, 0xbd, 0xc1, 0xc1, 0xd5, + 0xce, 0xbf, 0xbf, 0xbf, 0xb5, 0xae, 0xb5, 0xbc, 0xb2, 0xb5, 0xbb, 0xba, + 0xb8, 0xb9, 0xb8, 0xb2, 0xb4, 0xb5, 0xb4, 0xb9, 0xba, 0xa9, 0x97, 0x9d, + 0xad, 0xad, 0xaf, 0xac, 0xa9, 0xb2, 0xb2, 0xbb, 0xbb, 0xa8, 0xa0, 0x9f, + 0xa3, 0xa2, 0x9f, 0x9c, 0x98, 0x9e, 0xac, 0xad, 0xa9, 0xa9, 0xa9, 0xa1, + 0x9a, 0x96, 0x96, 0x97, 0x9a, 0xa4, 0xb5, 0xbf, 0xb5, 0x9d, 0x96, 0x96, + 0x96, 0x99, 0x9b, 0x99, 0x98, 0x96, 0x96, 0x96, 0x96, 0x96, 0x96, 0x98, + 0x9a, 0x99, 0x98, 0x97, 0x99, 0x9d, 0xa7, 0xac, 0xaf, 0xae, 0xad, 0xad, + 0xac, 0xa5, 0x9d, 0x9d, 0x9e, 0xa0, 0xad, 0xb5, 0xa2, 0x9a, 0x9c, 0x9c, + 0xa6, 0xb0, 0xad, 0x9e, 0x9f, 0xa8, 0xa6, 0xa1, 0x9e, 0x9d, 0x99, 0x98, + 0x97, 0x97, 0x98, 0x98, 0x9b, 0x9c, 0x9f, 0x9d, 0x97, 0x96, 0x96, 0x96, + 0x96, 0x97, 0x98, 0x9a, 0x9a, 0x9d, 0x9e, 0x98, 0x97, 0x99, 0x9c, 0x9b, + 0x9c, 0x9c, 0x9c, 0x9a, 0x9c, 0x9c, 0x9b, 0x98, 0x97, 0x97, 0x9c, 0x9c, + 0x9c, 0x99, 0x9b, 0x9c, 0xae, 0x0d, 0x37, 0x37, 0x26, 0x17, 0x13, 0x15, + 0x16, 0x1c, 0x21, 0x29, 0x3e, 0x57, 0x58, 0x00, 0xc9, 0xb1, 0xa2, 0xa5, + 0xa7, 0xa7, 0xa9, 0xa7, 0x9d, 0x9a, 0x98, 0x9a, 0x9e, 0xa1, 0xa1, 0x9c, + 0x9c, 0x9b, 0x9c, 0x9e, 0x9c, 0x98, 0x9e, 0xa9, 0xab, 0xaa, 0xae, 0xae, + 0xaf, 0xaf, 0xad, 0xaf, 0xae, 0xaf, 0xaf, 0xad, 0xae, 0xaa, 0xa2, 0xa0, + 0xa6, 0xad, 0xaf, 0xae, 0xb1, 0xba, 0xc1, 0xc4, 0xc5, 0xc7, 0xc6, 0xce, + 0xd3, 0xd2, 0xcd, 0xc5, 0xbf, 0xc1, 0xc2, 0xcb, 0xcf, 0xc1, 0xc0, 0xbb, + 0xaa, 0xaa, 0xb4, 0xbb, 0xaf, 0xb1, 0xb2, 0xb2, 0xb5, 0xb3, 0xb2, 0xb1, + 0xb4, 0xb5, 0xb3, 0xb4, 0xb5, 0xa6, 0x98, 0x9d, 0xaf, 0xa9, 0xac, 0xac, + 0xa9, 0xb5, 0xb6, 0xbb, 0xbb, 0xa9, 0x9f, 0x9b, 0x9c, 0x9d, 0x9c, 0x9c, + 0x99, 0xa7, 0xb3, 0xb1, 0xad, 0xad, 0xae, 0xaa, 0xa2, 0x98, 0x96, 0x96, + 0x96, 0x96, 0xa2, 0xb9, 0xb7, 0xa6, 0x9d, 0x98, 0x96, 0x98, 0x9a, 0x98, + 0x97, 0x96, 0x96, 0x96, 0x95, 0x97, 0x99, 0x9b, 0x9b, 0x9b, 0x99, 0x99, + 0x99, 0x9c, 0xa8, 0xac, 0xaf, 0xad, 0xae, 0xaf, 0xad, 0xa9, 0x9f, 0x9c, + 0x9e, 0xa0, 0xa0, 0xa4, 0x9c, 0x99, 0x9a, 0xa0, 0xb3, 0xb9, 0xb8, 0xb2, + 0xb4, 0xb8, 0xbb, 0xb6, 0xa9, 0xa0, 0x9f, 0x9c, 0x99, 0x96, 0x97, 0x98, + 0x9c, 0x9d, 0x9f, 0x9d, 0x97, 0x96, 0x97, 0x96, 0x96, 0x96, 0x96, 0x97, + 0x96, 0x9a, 0x9d, 0x98, 0x98, 0x9b, 0x9c, 0x9c, 0x9d, 0x9d, 0x9c, 0x9b, + 0x9d, 0x9d, 0x9a, 0x97, 0x97, 0x98, 0x9a, 0x99, 0x98, 0x99, 0x9a, 0x9c, + 0xaf, 0x12, 0x34, 0x30, 0x1e, 0x0c, 0x0b, 0x13, 0x17, 0x1c, 0x24, 0x28, + 0x3e, 0x55, 0x5d, 0x2a, 0xec, 0xce, 0xa9, 0xa7, 0xa2, 0xa4, 0xa9, 0xa3, + 0x9b, 0x9b, 0x9a, 0xa4, 0xac, 0xad, 0xac, 0xa9, 0xa6, 0xa2, 0x9b, 0x9a, + 0x98, 0x97, 0x9c, 0xa7, 0xac, 0xaa, 0xa6, 0xa9, 0xab, 0xa9, 0xab, 0xae, + 0xae, 0xaf, 0xad, 0xab, 0xa9, 0xa2, 0x9e, 0xa1, 0xaa, 0xae, 0xad, 0xac, + 0xaa, 0xa7, 0xa3, 0xa1, 0xa0, 0xa1, 0xa4, 0xb6, 0xc7, 0xca, 0xcb, 0xc8, + 0xc2, 0xbe, 0xbd, 0xbe, 0xc9, 0xc6, 0xc3, 0xba, 0xa9, 0xaa, 0xb5, 0xba, + 0xac, 0xb5, 0xb9, 0xb5, 0xb5, 0xb0, 0xb3, 0xb6, 0xb5, 0xb3, 0xb2, 0xb3, + 0xb4, 0xa8, 0x98, 0x96, 0x96, 0x99, 0xa6, 0xac, 0xab, 0xb4, 0xb7, 0xbf, + 0xbc, 0xa9, 0x9d, 0x9d, 0x9c, 0x9c, 0x9b, 0x9a, 0x9a, 0xab, 0xb4, 0xb0, + 0xae, 0xb0, 0xaf, 0xad, 0xab, 0xa0, 0x97, 0x96, 0x96, 0x96, 0x9c, 0xa8, + 0xb2, 0xb5, 0xb4, 0xa7, 0x97, 0x98, 0x9b, 0x98, 0x96, 0x96, 0x98, 0x97, + 0x96, 0x99, 0x9e, 0x9d, 0x9c, 0xa0, 0x9e, 0x99, 0x9b, 0x9f, 0xa9, 0xaa, + 0xac, 0xaf, 0xaf, 0xae, 0xac, 0xa8, 0xa2, 0x9e, 0x9e, 0xa0, 0x9e, 0x9a, + 0x97, 0x99, 0x9b, 0x9d, 0xaf, 0xb7, 0xba, 0xbd, 0xbe, 0xbc, 0xbe, 0xbf, + 0xb7, 0xa9, 0xa7, 0xa5, 0x9c, 0x97, 0x96, 0x96, 0x98, 0x9c, 0x9f, 0x9c, + 0x96, 0x96, 0x96, 0x96, 0x96, 0x97, 0x96, 0x96, 0x96, 0x97, 0x9a, 0x98, + 0x98, 0x9b, 0x9c, 0x9b, 0x9b, 0x9c, 0x9c, 0x9c, 0x9c, 0x9c, 0x99, 0x97, + 0x96, 0x98, 0x99, 0x99, 0x9a, 0x9c, 0x9b, 0x9d, 0xbb, 0x21, 0x31, 0x25, + 0x18, 0x0d, 0x0f, 0x10, 0x18, 0x1e, 0x26, 0x2b, 0x3f, 0x56, 0x5f, 0x49, + 0x23, 0xfe, 0xc6, 0xb8, 0xac, 0xa1, 0x9f, 0x9e, 0x9c, 0x9a, 0x9e, 0xad, + 0xaf, 0xaf, 0xae, 0xad, 0xae, 0xaa, 0x9c, 0x98, 0x9b, 0x9e, 0xa5, 0xaa, + 0xaa, 0xaa, 0xa4, 0xa4, 0xa9, 0xa9, 0xa9, 0xac, 0xae, 0xac, 0xa8, 0xa9, + 0xa9, 0xa8, 0xa7, 0xad, 0xb3, 0xb3, 0xb2, 0xb1, 0xb2, 0xad, 0xa2, 0x9f, + 0x9c, 0x9c, 0x9d, 0xa4, 0xb4, 0xbc, 0xbf, 0xc1, 0xc1, 0xbd, 0xbb, 0xbd, + 0xc9, 0xca, 0xc6, 0xb7, 0xa7, 0xaa, 0xb8, 0xb9, 0xab, 0xb3, 0xc3, 0xc1, + 0xb7, 0xb5, 0xb8, 0xbc, 0xc0, 0xb8, 0xb2, 0xb1, 0xb1, 0xa6, 0x99, 0x96, + 0x96, 0x98, 0x9e, 0xa5, 0xac, 0xb1, 0xb6, 0xc1, 0xc1, 0xac, 0xa3, 0xa9, + 0xa7, 0xa2, 0x9b, 0x98, 0x9a, 0xa4, 0xb2, 0xb3, 0xb2, 0xb2, 0xb2, 0xb2, + 0xb3, 0xa9, 0x9a, 0x96, 0x96, 0x96, 0x98, 0x9c, 0xa4, 0xb3, 0xb6, 0xb1, + 0x9c, 0x9a, 0x9c, 0x97, 0x96, 0x96, 0x97, 0x97, 0x97, 0x9a, 0x9f, 0x9d, + 0x9c, 0x9e, 0x9e, 0x9b, 0x9c, 0x9d, 0xa2, 0xa7, 0xab, 0xae, 0xaf, 0xaf, + 0xae, 0xa9, 0xa2, 0xa0, 0xa0, 0x9f, 0x9c, 0x99, 0x99, 0x9a, 0x9b, 0x98, + 0x9e, 0xaf, 0xb9, 0xbd, 0xbd, 0xbb, 0xbc, 0xbf, 0xbd, 0xb2, 0xad, 0xad, + 0xa1, 0x97, 0x96, 0x97, 0x97, 0x99, 0x9e, 0x9f, 0x98, 0x96, 0x96, 0x96, + 0x97, 0x97, 0x97, 0x97, 0x97, 0x97, 0x98, 0x99, 0x97, 0x9a, 0x9c, 0x9c, + 0x9b, 0x9d, 0x9c, 0x9c, 0x9c, 0x9b, 0x98, 0x96, 0x97, 0x96, 0x99, 0x9d, + 0xa1, 0xa2, 0xa7, 0xa8, 0xdd, 0x2f, 0x2e, 0x1c, 0x14, 0x15, 0x14, 0x16, + 0x18, 0x20, 0x26, 0x31, 0x43, 0x57, 0x61, 0x5a, 0x4a, 0x22, 0xf8, 0xde, + 0xd2, 0xa7, 0x9b, 0x9d, 0x9d, 0x99, 0xa1, 0xaf, 0xb1, 0xb0, 0xaf, 0xaf, + 0xb0, 0xaf, 0xa2, 0x9f, 0xa9, 0xaf, 0xb1, 0xaf, 0xad, 0xaa, 0xa4, 0xa2, + 0xa4, 0xa5, 0xa8, 0xad, 0xaf, 0xa3, 0x9f, 0xa2, 0xa6, 0xae, 0xb5, 0xbb, + 0xbb, 0xbc, 0xb8, 0xb4, 0xb6, 0xb3, 0xa8, 0xa0, 0xa1, 0xa2, 0xa0, 0x9c, + 0x9d, 0xa2, 0xa9, 0xb6, 0xc1, 0xc1, 0xbf, 0xc0, 0xc9, 0xcf, 0xc6, 0xb7, + 0xa8, 0xa9, 0xb5, 0xb5, 0xac, 0xb5, 0xc0, 0xc2, 0xb9, 0xb5, 0xb5, 0xbe, + 0xcc, 0xc8, 0xbc, 0xb6, 0xb5, 0xa9, 0x9c, 0x96, 0x96, 0x97, 0x98, 0x9c, + 0xa7, 0xad, 0xb2, 0xbf, 0xc1, 0xb0, 0xb0, 0xbc, 0xbe, 0xbd, 0xb0, 0xa1, + 0x9f, 0x9e, 0xa5, 0xb0, 0xb5, 0xb6, 0xb5, 0xb5, 0xb2, 0xad, 0xa5, 0x9a, + 0x96, 0x96, 0x96, 0x9b, 0x9c, 0xa3, 0xab, 0xab, 0x9c, 0x9c, 0x9c, 0x99, + 0x96, 0x96, 0x96, 0x96, 0x97, 0x98, 0x9c, 0x9d, 0x9d, 0xa2, 0xa1, 0x9b, + 0x9c, 0x9c, 0x9c, 0x9f, 0xa5, 0xab, 0xae, 0xaf, 0xae, 0xa7, 0xa2, 0xa0, + 0xa2, 0xa2, 0x9e, 0x9f, 0x9f, 0x9e, 0x9e, 0x9c, 0x9a, 0xa5, 0xb8, 0xba, + 0xbb, 0xba, 0xbb, 0xbc, 0xb5, 0xaf, 0xac, 0xac, 0xa2, 0x97, 0x96, 0x97, + 0x98, 0x97, 0x98, 0x9c, 0x97, 0x96, 0x97, 0x96, 0x96, 0x97, 0x97, 0x97, + 0x96, 0x97, 0x96, 0x98, 0x96, 0x98, 0x9c, 0x9c, 0x9e, 0x9f, 0xa0, 0xa0, + 0xa0, 0x9c, 0x98, 0x97, 0x97, 0x97, 0x9f, 0xa7, 0xad, 0xaf, 0xbe, 0xcd, + 0x10, 0x37, 0x2b, 0x18, 0x08, 0x19, 0x13, 0x20, 0x1d, 0x24, 0x2c, 0x39, + 0x4a, 0x5d, 0x64, 0x63, 0x5c, 0x44, 0x25, 0x18, 0xf5, 0xae, 0x9c, 0x9f, + 0x9a, 0x99, 0xa1, 0xa6, 0xaa, 0xb0, 0xb1, 0xb0, 0xb1, 0xaf, 0xaf, 0xaf, + 0xb1, 0xb3, 0xb2, 0xb2, 0xb1, 0xad, 0xa8, 0xa6, 0xa2, 0xa3, 0xa8, 0xb0, + 0xab, 0x9f, 0x9e, 0x9e, 0x9e, 0xa9, 0xbb, 0xc1, 0xc2, 0xc0, 0xbe, 0xbd, + 0xba, 0xb5, 0xb0, 0xab, 0xad, 0xaf, 0xaf, 0xa5, 0x9d, 0x9c, 0x9e, 0xa2, + 0xb3, 0xbd, 0xbe, 0xc6, 0xca, 0xcd, 0xc3, 0xb5, 0xab, 0xac, 0xb6, 0xb5, + 0xad, 0xb2, 0xb1, 0xb5, 0xbb, 0xb8, 0xb5, 0xbd, 0xcd, 0xc7, 0xc4, 0xbd, + 0xb7, 0xa7, 0x9d, 0x97, 0x96, 0x97, 0x99, 0x98, 0xa2, 0xaa, 0xb2, 0xbd, + 0xc1, 0xb3, 0xb9, 0xc1, 0xc3, 0xc3, 0xc2, 0xba, 0xaa, 0xa0, 0xa2, 0xaf, + 0xb8, 0xbb, 0xb6, 0xb2, 0xad, 0xab, 0xaf, 0xa3, 0x98, 0x96, 0x96, 0x9b, + 0x9a, 0x98, 0x9c, 0xa2, 0xa2, 0xa1, 0xa0, 0x9c, 0x99, 0x96, 0x96, 0x96, + 0x97, 0x98, 0x99, 0x9d, 0xa1, 0xa3, 0xa0, 0x9c, 0x9b, 0x9c, 0x9b, 0x9c, + 0x9f, 0xa3, 0xad, 0xb0, 0xad, 0xa6, 0xa4, 0xa2, 0xa4, 0xa2, 0xa2, 0xa2, + 0xa0, 0xa0, 0xa0, 0xa0, 0x9b, 0x9d, 0xb2, 0xbb, 0xbc, 0xbb, 0xb8, 0xb6, + 0xaf, 0xaa, 0xac, 0xac, 0xa2, 0x9a, 0x97, 0x97, 0x97, 0x97, 0x96, 0x96, + 0x96, 0x96, 0x96, 0x96, 0x96, 0x99, 0x99, 0x99, 0x96, 0x97, 0x98, 0x9b, + 0x98, 0x97, 0x9b, 0x9d, 0xa2, 0xa2, 0xa1, 0xa1, 0x9d, 0x99, 0x99, 0x9a, + 0x98, 0x99, 0xa2, 0xaa, 0xb5, 0xbb, 0xdb, 0xfb, 0x33, 0x3c, 0x2a, 0x13, + 0x03, 0x12, 0x1d, 0x27, 0x2b, 0x2c, 0x38, 0x43, 0x53, 0x62, 0x67, 0x68, + 0x63, 0x54, 0x45, 0x3e, 0x0c, 0xb8, 0xa6, 0xa2, 0x9e, 0x9c, 0x9d, 0x9c, + 0xa0, 0xab, 0xb1, 0xaf, 0xaf, 0xb0, 0xb3, 0xb3, 0xb5, 0xb5, 0xb3, 0xb4, + 0xb4, 0xb0, 0xad, 0xa9, 0xa2, 0xa1, 0xa6, 0xaa, 0xa1, 0x9d, 0x9f, 0xa0, + 0x9f, 0xa2, 0xaf, 0xbe, 0xc3, 0xbf, 0xbc, 0xba, 0xb7, 0xb2, 0xb2, 0xb2, + 0xb3, 0xb4, 0xb3, 0xac, 0xa5, 0xa0, 0x9d, 0x9c, 0x9e, 0xa4, 0xaf, 0xc1, + 0xbe, 0xb9, 0xb6, 0xb5, 0xb3, 0xaf, 0xb8, 0xb3, 0xae, 0xaf, 0xb0, 0xbb, + 0xc5, 0xc4, 0xba, 0xbc, 0xcc, 0xc5, 0xc3, 0xbe, 0xc4, 0xc4, 0xac, 0x9b, + 0x96, 0x97, 0x99, 0x99, 0x9d, 0xa7, 0xaf, 0xc0, 0xc2, 0xb4, 0xbb, 0xc1, + 0xc2, 0xc3, 0xc5, 0xc3, 0xb6, 0xa3, 0xa3, 0xa6, 0xb0, 0xbb, 0xba, 0xb1, + 0xac, 0xad, 0xb0, 0xab, 0x9d, 0x97, 0x96, 0x9a, 0x9c, 0x99, 0xa2, 0xb5, + 0xb4, 0xad, 0xa4, 0xa1, 0x9a, 0x98, 0x96, 0x96, 0x96, 0x99, 0x99, 0x9c, + 0xa3, 0xa4, 0xa0, 0x9c, 0x9c, 0x9c, 0x9e, 0x9e, 0x9d, 0x9b, 0xa4, 0xaa, + 0xa9, 0xa5, 0xa3, 0xa4, 0xa4, 0xa4, 0xa4, 0xa1, 0xa0, 0x9d, 0x9f, 0xa4, + 0xa0, 0x9a, 0xa4, 0xb7, 0xbd, 0xbd, 0xb9, 0xb5, 0xb2, 0xb1, 0xb5, 0xb5, + 0xaf, 0xa6, 0x9d, 0x9a, 0x97, 0x96, 0x97, 0x96, 0x97, 0x96, 0x97, 0x96, + 0x98, 0x9c, 0x9b, 0x9b, 0x96, 0x97, 0x99, 0x9e, 0x9c, 0x98, 0x98, 0x9b, + 0xa2, 0xa7, 0xa3, 0x9e, 0x99, 0x96, 0x96, 0x9a, 0x9b, 0x98, 0xa2, 0xab, + 0xb8, 0xc6, 0xf1, 0x1f, 0x3f, 0x3a, 0x23, 0x0d, 0x00, 0x0e, 0x22, 0x29, + 0x36, 0x38, 0x44, 0x4f, 0x5a, 0x65, 0x69, 0x69, 0x67, 0x5f, 0x5b, 0x50, + 0x18, 0xd4, 0xbe, 0xac, 0xa3, 0xa1, 0x9e, 0x9b, 0x9a, 0xa2, 0xae, 0xaf, + 0xaf, 0xb0, 0xb2, 0xb3, 0xb2, 0xb5, 0xb4, 0xb5, 0xb4, 0xb5, 0xb5, 0xaf, + 0xa8, 0xa5, 0xa6, 0xa3, 0x9e, 0x9e, 0xa1, 0xa2, 0xa4, 0xa4, 0xa5, 0xac, + 0xba, 0xbe, 0xbf, 0xba, 0xbb, 0xba, 0xb9, 0xb5, 0xb5, 0xb6, 0xb6, 0xb1, + 0xac, 0xa7, 0x9e, 0x9a, 0x9b, 0x9b, 0xa4, 0xb4, 0xb3, 0xb1, 0xb2, 0xb2, + 0xaf, 0xaf, 0xb8, 0xb1, 0xac, 0xae, 0xb0, 0xba, 0xc3, 0xcd, 0xca, 0xc1, + 0xcb, 0xc8, 0xc1, 0xc1, 0xd4, 0xe7, 0xd2, 0xa6, 0x9b, 0x9d, 0xa2, 0xa2, + 0x9c, 0xa6, 0xb0, 0xc0, 0xc1, 0xb5, 0xb9, 0xbe, 0xc1, 0xc6, 0xc8, 0xc8, + 0xbe, 0xa5, 0xa8, 0xad, 0xb0, 0xb9, 0xbb, 0xb2, 0xaf, 0xb2, 0xb5, 0xb5, + 0xa9, 0x9b, 0x96, 0x98, 0x9e, 0x9f, 0xaf, 0xb7, 0xb4, 0xb2, 0xab, 0xa3, + 0x9b, 0x96, 0x97, 0x97, 0x98, 0x9b, 0x9b, 0x9c, 0xa4, 0xa6, 0xa2, 0xa0, + 0xa0, 0xa0, 0xa2, 0xa2, 0x9e, 0x9c, 0x9e, 0xa1, 0xa6, 0xa6, 0xa3, 0xa3, + 0xa8, 0xaa, 0xa7, 0xa2, 0xa1, 0x9c, 0x9d, 0xa2, 0xa3, 0x9b, 0x9a, 0xa7, + 0xbb, 0xc0, 0xba, 0xb8, 0xb7, 0xbb, 0xba, 0xbb, 0xb9, 0xaf, 0xa6, 0xa6, + 0xa0, 0x9c, 0x98, 0x97, 0x98, 0x98, 0x99, 0x9a, 0x9d, 0x9d, 0x9c, 0x9b, + 0x97, 0x99, 0x98, 0x9d, 0xa0, 0x9b, 0x9a, 0x99, 0x9c, 0xa3, 0xa7, 0x9f, + 0x98, 0x97, 0x98, 0x98, 0x9c, 0x9c, 0xa2, 0xad, 0xc0, 0xda, 0xfd, 0x33, + 0x44, 0x34, 0x1d, 0x0e, 0x05, 0x16, 0x27, 0x32, 0x41, 0x47, 0x51, 0x5a, + 0x60, 0x68, 0x6a, 0x6b, 0x6a, 0x68, 0x65, 0x59, 0x2b, 0x08, 0xe0, 0xb4, + 0xa4, 0xa3, 0xa2, 0x9d, 0x9b, 0x9a, 0xa3, 0xa9, 0xad, 0xad, 0xad, 0xac, + 0xad, 0xaf, 0xb0, 0xb3, 0xb5, 0xb7, 0xb9, 0xb6, 0xaf, 0xab, 0xa9, 0xa3, + 0xa0, 0xa2, 0xa6, 0xa8, 0xa7, 0xa6, 0xa3, 0xa0, 0xac, 0xbc, 0xc0, 0xbf, + 0xbc, 0xb9, 0xbb, 0xb5, 0xb2, 0xb3, 0xb4, 0xb2, 0xb1, 0xb0, 0xa9, 0x9c, + 0x9c, 0x9e, 0xaa, 0xb1, 0xb4, 0xb0, 0xaf, 0xb3, 0xb0, 0xaf, 0xb7, 0xac, + 0xa7, 0xab, 0xb0, 0xb9, 0xb9, 0xc7, 0xca, 0xc1, 0xc6, 0xca, 0xc1, 0xbf, + 0xd6, 0xec, 0xe8, 0xc6, 0xad, 0xbf, 0xc9, 0xb7, 0xae, 0xb8, 0xc5, 0xc8, + 0xc0, 0xb1, 0xb8, 0xbe, 0xc3, 0xc4, 0xc8, 0xca, 0xc2, 0xb4, 0xb5, 0xbb, + 0xbb, 0xbb, 0xbb, 0xb6, 0xb1, 0xb2, 0xb7, 0xb6, 0xb1, 0xa9, 0x9d, 0x99, + 0x9c, 0x9d, 0xad, 0xb5, 0xb3, 0xb4, 0xb2, 0xa3, 0x9b, 0x96, 0x97, 0x9c, + 0x9d, 0x9c, 0x99, 0x9b, 0xab, 0xb1, 0xaa, 0xa4, 0xa2, 0xa3, 0xa3, 0xa3, + 0xa0, 0x9c, 0x9c, 0x9c, 0xa2, 0xa4, 0xa5, 0xa5, 0xa9, 0xac, 0xa9, 0xa5, + 0xa3, 0xa2, 0xa2, 0xa3, 0xa3, 0x9d, 0x99, 0x9b, 0xa6, 0xb0, 0xb6, 0xb7, + 0xb8, 0xb5, 0xb7, 0xb8, 0xb9, 0xb2, 0xa5, 0xa3, 0xa9, 0xa7, 0x9f, 0x9c, + 0x9e, 0x9c, 0x9c, 0x9c, 0x9d, 0x9c, 0x9f, 0xa1, 0x99, 0x99, 0x97, 0x99, + 0xa0, 0x9d, 0x9b, 0x99, 0x99, 0x9a, 0xa0, 0xa3, 0x9b, 0x99, 0x9b, 0x98, + 0x9c, 0xa3, 0xa4, 0xc1, 0xd4, 0xf9, 0x0e, 0x37, 0x42, 0x30, 0x17, 0x15, + 0x16, 0x27, 0x37, 0x3e, 0x4a, 0x56, 0x5b, 0x62, 0x65, 0x6b, 0x6b, 0x6c, + 0x6d, 0x6c, 0x6b, 0x60, 0x4d, 0x38, 0x00, 0xb8, 0xa6, 0xa2, 0x9f, 0x9f, + 0x9d, 0x9a, 0x9e, 0xa1, 0xa8, 0xae, 0xb0, 0xb0, 0xaf, 0xaf, 0xb0, 0xb0, + 0xaf, 0xaf, 0xb1, 0xb7, 0xb5, 0xaf, 0xa9, 0xa8, 0xa7, 0xa8, 0xa8, 0xa8, + 0xa6, 0x9f, 0x9c, 0x9b, 0xa4, 0xbc, 0xc1, 0xc0, 0xbb, 0xbb, 0xba, 0xb6, + 0xb3, 0xb3, 0xb3, 0xb3, 0xb5, 0xb5, 0xb0, 0xa0, 0xa6, 0xb3, 0xb4, 0xb5, + 0xb7, 0xb3, 0xaf, 0xb0, 0xb1, 0xb4, 0xb6, 0xac, 0xa6, 0xa9, 0xaf, 0xb7, + 0xb5, 0xbf, 0xc7, 0xbe, 0xc1, 0xc5, 0xc2, 0xbe, 0xd1, 0xe7, 0xe9, 0xe2, + 0xcd, 0xd4, 0xda, 0xbf, 0xb9, 0xc5, 0xd0, 0xce, 0xc0, 0xb3, 0xbc, 0xbf, + 0xc0, 0xc1, 0xc4, 0xc5, 0xc7, 0xc1, 0xbc, 0xbb, 0xbb, 0xbe, 0xb9, 0xaf, + 0xab, 0xae, 0xb5, 0xb8, 0xb1, 0xb2, 0xab, 0x9d, 0x9a, 0x9a, 0xa0, 0xad, + 0xad, 0xab, 0xad, 0xa3, 0x9e, 0x9a, 0x9a, 0x9e, 0x9f, 0x9c, 0x9b, 0x9a, + 0xae, 0xbf, 0xbc, 0xb3, 0xa9, 0xa6, 0xa4, 0xa5, 0xa4, 0xa0, 0x9e, 0x9e, + 0xa4, 0xa9, 0xab, 0xab, 0xad, 0xaf, 0xab, 0xa8, 0xac, 0xb0, 0xb1, 0xb3, + 0xab, 0xa2, 0x99, 0x97, 0x98, 0x9c, 0xa9, 0xb8, 0xb9, 0xb6, 0xb5, 0xb5, + 0xb4, 0xb6, 0xaf, 0x9e, 0x9d, 0xa3, 0xa5, 0xa5, 0xa4, 0xa3, 0xa0, 0x9b, + 0x9a, 0x9c, 0xa0, 0xa3, 0x99, 0x97, 0x96, 0x98, 0x9c, 0x9c, 0x9b, 0x9c, + 0x9d, 0xa1, 0xa1, 0xa4, 0xa9, 0xa2, 0xa1, 0xa3, 0xac, 0xc0, 0xcb, 0xe0, + 0x12, 0x15, 0x2d, 0x3f, 0x41, 0x2f, 0x1c, 0x1c, 0x26, 0x36, 0x48, 0x4f, + 0x55, 0x5f, 0x63, 0x67, 0x69, 0x6d, 0x6d, 0x6e, 0x6e, 0x6e, 0x6c, 0x67, + 0x61, 0x50, 0x06, 0xc0, 0xa8, 0xa4, 0xa2, 0xa2, 0xa0, 0x9d, 0x9e, 0xa1, + 0xa9, 0xaf, 0xb1, 0xb1, 0xb2, 0xb5, 0xb5, 0xb1, 0xb1, 0xb1, 0xb2, 0xb3, + 0xb2, 0xb0, 0xad, 0xae, 0xaf, 0xab, 0xab, 0xad, 0xa3, 0x9c, 0x9b, 0x99, + 0xa2, 0xbc, 0xc3, 0xc1, 0xbe, 0xc0, 0xb8, 0xb6, 0xb7, 0xb5, 0xb2, 0xb2, + 0xb5, 0xb7, 0xb7, 0xa7, 0xaf, 0xca, 0xc6, 0xba, 0xb4, 0xb2, 0xb1, 0xb2, + 0xb2, 0xb3, 0xb4, 0xad, 0xa8, 0xaa, 0xad, 0xb4, 0xb5, 0xbc, 0xc0, 0xbb, + 0xc1, 0xc2, 0xbe, 0xbf, 0xce, 0xe4, 0xe7, 0xe2, 0xd0, 0xd4, 0xd3, 0xba, + 0xba, 0xc5, 0xce, 0xcf, 0xc1, 0xb5, 0xbc, 0xbf, 0xbe, 0xc1, 0xc3, 0xc8, + 0xd0, 0xca, 0xc3, 0xc6, 0xc2, 0xc2, 0xbe, 0xb2, 0xb0, 0xb0, 0xb5, 0xbb, + 0xb7, 0xb5, 0xb6, 0xa9, 0xa0, 0x9c, 0x9a, 0x9a, 0x9f, 0xa0, 0xa0, 0xa0, + 0x9f, 0x9f, 0x9d, 0x9e, 0x9c, 0x99, 0x9a, 0x9c, 0xa7, 0xbe, 0xc0, 0xbf, + 0xbb, 0xb3, 0xab, 0xa7, 0xa6, 0xa4, 0xa2, 0xa2, 0xa7, 0xaa, 0xae, 0xb0, + 0xb1, 0xaf, 0xa9, 0xa9, 0xb4, 0xb6, 0xb5, 0xb6, 0xb3, 0xad, 0xa1, 0x99, + 0x9a, 0x9c, 0xa0, 0xaf, 0xb6, 0xb2, 0xaf, 0xa9, 0xa4, 0xa2, 0xa3, 0x9d, + 0x9c, 0x9e, 0xa2, 0xa5, 0xa6, 0xa6, 0xa5, 0xa3, 0xa0, 0x9d, 0x9e, 0xa3, + 0x98, 0x96, 0x96, 0x99, 0x9b, 0x9c, 0x9d, 0xa1, 0xa6, 0xa8, 0xaa, 0xb1, + 0xb7, 0xc0, 0xba, 0xb5, 0xc1, 0xe3, 0x09, 0x12, 0x2b, 0x3f, 0x44, 0x43, + 0x41, 0x37, 0x2d, 0x2a, 0x37, 0x49, 0x56, 0x5f, 0x62, 0x65, 0x69, 0x6b, + 0x6d, 0x6f, 0x6e, 0x6f, 0x6f, 0x6f, 0x6e, 0x6d, 0x69, 0x56, 0x07, 0xc0, + 0xa9, 0xa7, 0xa5, 0xa4, 0xa4, 0x9f, 0xa0, 0xa6, 0xb0, 0xb3, 0xb0, 0xb2, + 0xb4, 0xb0, 0xb0, 0xb1, 0xb5, 0xb3, 0xae, 0xad, 0xaf, 0xb1, 0xb3, 0xb4, + 0xb2, 0xaf, 0xb2, 0xb0, 0xa0, 0x9a, 0x9b, 0x9a, 0x9f, 0xb9, 0xc5, 0xc3, + 0xc1, 0xc4, 0xbf, 0xbb, 0xba, 0xb5, 0xaa, 0xa3, 0xa7, 0xae, 0xb5, 0xa9, + 0xab, 0xc6, 0xcd, 0xc9, 0xc1, 0xb9, 0xb9, 0xbb, 0xba, 0xb6, 0xb2, 0xab, + 0xa8, 0xa9, 0xad, 0xb5, 0xb4, 0xba, 0xbd, 0xbd, 0xc1, 0xc1, 0xb8, 0xbc, + 0xcb, 0xdd, 0xe1, 0xdd, 0xd2, 0xd4, 0xc5, 0xb5, 0xbd, 0xc9, 0xd1, 0xd1, + 0xc1, 0xb5, 0xba, 0xbe, 0xbe, 0xc1, 0xc5, 0xcd, 0xce, 0xc2, 0xb8, 0xbf, + 0xbd, 0xc0, 0xc4, 0xc2, 0xc1, 0xc0, 0xbf, 0xc3, 0xc3, 0xbb, 0xb7, 0xb4, + 0xaf, 0xa9, 0xa7, 0x9d, 0x9e, 0xa2, 0x9d, 0x9e, 0x9c, 0xa1, 0xa4, 0xa2, + 0x9c, 0x98, 0x98, 0x9b, 0xa7, 0xbd, 0xbe, 0xbf, 0xbf, 0xbd, 0xb1, 0xaf, + 0xae, 0xaa, 0xa9, 0xaa, 0xa9, 0xac, 0xaf, 0xb1, 0xb0, 0xaa, 0xa6, 0xa6, + 0xaf, 0xb5, 0xb5, 0xb5, 0xb1, 0xaf, 0xac, 0xa3, 0x9e, 0x9d, 0x9c, 0xa3, + 0xa6, 0x9f, 0xa1, 0x9d, 0x9c, 0x9c, 0x9d, 0x9d, 0x9d, 0x9d, 0x9e, 0xa1, + 0xa2, 0xa3, 0xa5, 0xa8, 0xa9, 0xa3, 0xa2, 0xa3, 0x98, 0x96, 0x99, 0x9b, + 0x9c, 0x9f, 0xa3, 0xa4, 0xaa, 0xb1, 0xb3, 0xbe, 0xda, 0xf3, 0xf6, 0xf1, + 0xee, 0x03, 0x21, 0x37, 0x3e, 0x44, 0x4b, 0x45, 0x3e, 0x3e, 0x3e, 0x44, + 0x4a, 0x58, 0x62, 0x65, 0x68, 0x69, 0x6b, 0x6c, 0x6d, 0x6e, 0x6f, 0x6f, + 0x6f, 0x6f, 0x6f, 0x6f, 0x6c, 0x5e, 0x25, 0xd3, 0xb0, 0xac, 0xaa, 0xa9, + 0xa8, 0xa5, 0xa9, 0xae, 0xb2, 0xb1, 0xb0, 0xb0, 0xaf, 0xaf, 0xb1, 0xb3, + 0xb0, 0xaf, 0xaf, 0xae, 0xaf, 0xba, 0xc6, 0xc1, 0xb4, 0xb6, 0xb0, 0xa3, + 0x9c, 0x9c, 0x9c, 0x9d, 0xa2, 0xb3, 0xc1, 0xc5, 0xc2, 0xc5, 0xc2, 0xc3, + 0xbb, 0xab, 0x9c, 0x97, 0x98, 0x9c, 0xa2, 0xa2, 0xa9, 0xc2, 0xc8, 0xc8, + 0xc5, 0xc4, 0xbc, 0xb5, 0xb1, 0xb1, 0xaf, 0xac, 0xa9, 0xae, 0xaf, 0xb3, + 0xaf, 0xb1, 0xbb, 0xbd, 0xc2, 0xc0, 0xb7, 0xba, 0xc9, 0xd5, 0xd6, 0xd9, + 0xd7, 0xda, 0xcd, 0xbd, 0xc6, 0xcf, 0xd8, 0xd3, 0xbe, 0xac, 0xb4, 0xbb, + 0xc2, 0xc2, 0xc5, 0xc8, 0xc1, 0xb5, 0xa4, 0x9f, 0xa0, 0xac, 0xb5, 0xc5, + 0xcc, 0xc9, 0xca, 0xcd, 0xd2, 0xcf, 0xc3, 0xb7, 0xaf, 0xab, 0xb1, 0xac, + 0xa4, 0xa5, 0xa0, 0xa1, 0x9d, 0xa5, 0xa7, 0xa5, 0xa2, 0x9b, 0x98, 0x99, + 0xa6, 0xb4, 0xb8, 0xbb, 0xbe, 0xc1, 0xc7, 0xce, 0xca, 0xbc, 0xb3, 0xae, + 0xaa, 0xab, 0xac, 0xaf, 0xaf, 0xad, 0xa9, 0xab, 0xaf, 0xb2, 0xb5, 0xb9, + 0xb5, 0xb1, 0xb1, 0xaf, 0xa4, 0x9e, 0x9a, 0x9d, 0x9d, 0x9b, 0x9e, 0xa1, + 0xa0, 0xa1, 0xa2, 0xa2, 0x9d, 0x9f, 0x9b, 0x9a, 0x9f, 0xa2, 0xa2, 0xa3, + 0xa7, 0xa4, 0xa7, 0xa7, 0x97, 0x97, 0x99, 0x9c, 0x9d, 0xa0, 0xa2, 0xa3, + 0xaa, 0xc0, 0xda, 0xe7, 0xf2, 0x17, 0x31, 0x29, 0x21, 0x1e, 0x30, 0x44, + 0x4c, 0x4a, 0x4a, 0x49, 0x40, 0x44, 0x4b, 0x53, 0x5c, 0x61, 0x67, 0x69, + 0x69, 0x6b, 0x6c, 0x6d, 0x6e, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, + 0x6e, 0x69, 0x53, 0x1c, 0xd5, 0xb6, 0xae, 0xab, 0xaa, 0xaa, 0xb3, 0xb5, + 0xb0, 0xaf, 0xb1, 0xaf, 0xaf, 0xb1, 0xb4, 0xaf, 0xaf, 0xb1, 0xbb, 0xe2, + 0x09, 0x2b, 0x34, 0x20, 0xf7, 0xc8, 0xa9, 0x9e, 0xa0, 0xa8, 0xa2, 0x9d, + 0xa2, 0xa7, 0xb9, 0xbc, 0xba, 0xbe, 0xbe, 0xba, 0xaa, 0x9e, 0x9a, 0x99, + 0x9a, 0x9d, 0xa0, 0x9f, 0xa8, 0xbc, 0xc4, 0xc3, 0xc0, 0xc0, 0xbb, 0xbb, + 0xb6, 0xb0, 0xaf, 0xaf, 0xaf, 0xaf, 0xa8, 0xac, 0xab, 0xa9, 0xb6, 0xbf, + 0xc2, 0xc0, 0xb5, 0xb6, 0xc5, 0xcc, 0xc9, 0xce, 0xd8, 0xe1, 0xd7, 0xc8, + 0xcb, 0xd2, 0xd5, 0xd3, 0xc1, 0xaf, 0xb0, 0xb9, 0xc6, 0xc2, 0xbe, 0xbf, + 0xbf, 0xb9, 0xa7, 0x9e, 0xa0, 0xa7, 0xab, 0xbe, 0xcb, 0xc3, 0xc9, 0xcf, + 0xcf, 0xd2, 0xd1, 0xc5, 0xb3, 0xb3, 0xbb, 0xbb, 0xb9, 0xb9, 0xae, 0xa3, + 0xa0, 0xa8, 0xaa, 0xa5, 0xa2, 0x9c, 0x9a, 0xa2, 0xaf, 0xb6, 0xb9, 0xba, + 0xb9, 0xc1, 0xd7, 0xde, 0xda, 0xc8, 0xb5, 0xae, 0xad, 0xa9, 0xa7, 0xab, + 0xac, 0xad, 0xa8, 0xaa, 0xad, 0xaf, 0xaf, 0xb2, 0xb9, 0xb9, 0xb1, 0xb3, + 0xaf, 0xa2, 0x9c, 0x9f, 0xa2, 0xa1, 0xa9, 0xaa, 0xa8, 0xa4, 0xa4, 0xa4, + 0xa1, 0xa7, 0xa2, 0x9d, 0xa2, 0xa3, 0xa2, 0x9f, 0xa5, 0xa5, 0xaa, 0xa8, + 0x97, 0x98, 0x9a, 0x9d, 0x9e, 0x9f, 0xa1, 0xa2, 0xa6, 0xb6, 0xe7, 0x18, + 0x2d, 0x31, 0x39, 0x45, 0x46, 0x3e, 0x3e, 0x49, 0x4e, 0x51, 0x50, 0x4c, + 0x4c, 0x4d, 0x55, 0x5b, 0x60, 0x66, 0x69, 0x69, 0x6a, 0x6b, 0x6d, 0x6e, + 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6c, 0x5d, + 0x34, 0xfc, 0xc9, 0xb7, 0xae, 0xa9, 0xb5, 0xb8, 0xb3, 0xb0, 0xaf, 0xb5, + 0xbe, 0xbb, 0xb7, 0xb7, 0xbb, 0xd8, 0x13, 0x43, 0x4b, 0x4f, 0x51, 0x57, + 0x50, 0x06, 0xaf, 0xa7, 0xab, 0xb2, 0xaa, 0xa1, 0xa1, 0xa6, 0xaf, 0xaf, + 0xb0, 0xba, 0xba, 0xaf, 0xa2, 0x9d, 0x9c, 0xa4, 0xa4, 0xa3, 0xa5, 0xa2, + 0xaa, 0xbb, 0xc0, 0xbd, 0xbb, 0xbe, 0xbc, 0xbe, 0xbd, 0xbb, 0xb6, 0xb1, + 0xaf, 0xae, 0xa7, 0xa9, 0xa8, 0xa2, 0xae, 0xbc, 0xbf, 0xc1, 0xba, 0xb5, + 0xbd, 0xc4, 0xc3, 0xc2, 0xca, 0xda, 0xe1, 0xcf, 0xc9, 0xd5, 0xd8, 0xd5, + 0xcb, 0xc0, 0xc1, 0xc1, 0xc6, 0xc0, 0xbe, 0xbf, 0xc1, 0xc4, 0xb0, 0x9f, + 0xa2, 0xa6, 0xab, 0xb9, 0xc3, 0xc4, 0xcc, 0xd0, 0xd0, 0xce, 0xce, 0xca, + 0xc1, 0xbb, 0xb7, 0xbc, 0xbc, 0xc1, 0xc1, 0xb1, 0xa7, 0xa9, 0xaa, 0xa5, + 0xa2, 0xa2, 0xa2, 0xab, 0xb5, 0xbb, 0xbf, 0xc1, 0xbb, 0xba, 0xcf, 0xda, + 0xd7, 0xcc, 0xb7, 0xaf, 0xa9, 0xa3, 0xa7, 0xa9, 0xa7, 0xa8, 0xa7, 0xaa, + 0xaa, 0xaa, 0xaf, 0xaf, 0xb2, 0xb6, 0xb2, 0xb3, 0xaf, 0xa7, 0xa9, 0xac, + 0xac, 0xa9, 0xaf, 0xb1, 0xad, 0xa8, 0xa5, 0xa6, 0xa8, 0xa9, 0xa5, 0x9d, + 0xa0, 0xa4, 0xa2, 0x9c, 0xa0, 0xa3, 0xa9, 0xab, 0x96, 0x97, 0x98, 0x9a, + 0x9d, 0xa0, 0xa2, 0xa3, 0xa7, 0xac, 0xc2, 0xe7, 0x18, 0x38, 0x45, 0x46, + 0x4e, 0x51, 0x50, 0x50, 0x53, 0x55, 0x53, 0x50, 0x4f, 0x56, 0x5d, 0x62, + 0x64, 0x69, 0x6b, 0x6a, 0x6b, 0x6d, 0x6e, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, + 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6b, 0x65, 0x54, 0x30, 0xfc, + 0xce, 0xb6, 0xb5, 0xb6, 0xb3, 0xaf, 0xb5, 0xe4, 0xe9, 0xc8, 0xdb, 0xe4, + 0x02, 0x33, 0x44, 0x44, 0x44, 0x46, 0x4c, 0x53, 0x5a, 0x25, 0xbb, 0xac, + 0xa9, 0xaf, 0xab, 0x9f, 0xa2, 0xae, 0xaf, 0xb0, 0xb5, 0xb9, 0xb7, 0xb2, + 0xac, 0xa4, 0x9d, 0xa7, 0xaf, 0xae, 0xaa, 0xa7, 0xaa, 0xb7, 0xbd, 0xbb, + 0xb6, 0xbb, 0xbf, 0xbb, 0xbb, 0xc1, 0xbb, 0xb5, 0xb1, 0xaf, 0xa9, 0xa9, + 0xa4, 0x9e, 0xa9, 0xb6, 0xc0, 0xbf, 0xb9, 0xb5, 0xb7, 0xbd, 0xbf, 0xbc, + 0xbc, 0xc6, 0xcf, 0xd3, 0xcc, 0xd3, 0xda, 0xd4, 0xca, 0xbd, 0xc5, 0xce, + 0xc8, 0xbf, 0xbf, 0xbf, 0xc5, 0xc7, 0xb6, 0xa4, 0xa3, 0xa4, 0xa9, 0xb6, + 0xbe, 0xc5, 0xd0, 0xcd, 0xcc, 0xce, 0xcc, 0xc9, 0xc5, 0xbb, 0xb5, 0xbb, + 0xbb, 0xbc, 0xc1, 0xbc, 0xb1, 0xac, 0xaa, 0xa9, 0xb0, 0xb9, 0xbc, 0xbe, + 0xbe, 0xc0, 0xc1, 0xc0, 0xb8, 0xb2, 0xc3, 0xd1, 0xd5, 0xcf, 0xc1, 0xaf, + 0xa1, 0x9b, 0xa0, 0xa4, 0xa8, 0xa6, 0xa4, 0xa9, 0xa7, 0xa3, 0xa7, 0xab, + 0xac, 0xaf, 0xaf, 0xaa, 0xa7, 0xa8, 0xad, 0xb4, 0xb7, 0xb6, 0xb5, 0xb4, + 0xb4, 0xaf, 0xaa, 0xad, 0xb3, 0xac, 0xa7, 0xa3, 0xa2, 0xa6, 0xa9, 0xa2, + 0x9d, 0x9f, 0xa1, 0xa6, 0x9d, 0x9d, 0x9c, 0x9b, 0x9c, 0xa2, 0xa5, 0xaf, + 0xc3, 0xe5, 0x06, 0x12, 0x1a, 0x2b, 0x3f, 0x4d, 0x50, 0x53, 0x55, 0x55, + 0x57, 0x57, 0x51, 0x4e, 0x51, 0x5b, 0x60, 0x64, 0x66, 0x69, 0x6b, 0x6a, + 0x6a, 0x6c, 0x6e, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, + 0x6f, 0x6f, 0x6f, 0x6e, 0x6d, 0x6c, 0x66, 0x51, 0x31, 0xfd, 0xd6, 0xbe, + 0xbb, 0xb9, 0xff, 0x2b, 0x0c, 0x05, 0x1b, 0x26, 0x40, 0x43, 0x3e, 0x39, + 0x38, 0x3e, 0x48, 0x52, 0x58, 0x25, 0xbf, 0xb3, 0xaf, 0xac, 0xac, 0xa2, + 0xaa, 0xb5, 0xb3, 0xb6, 0xba, 0xba, 0xb8, 0xb5, 0xb2, 0xad, 0xa9, 0xb6, + 0xbf, 0xbb, 0xb5, 0xb4, 0xad, 0xb0, 0xb9, 0xbb, 0xb8, 0xbb, 0xbd, 0xb8, + 0xbb, 0xc8, 0xc6, 0xba, 0xb2, 0xb1, 0xaf, 0xac, 0xa2, 0x9e, 0xa4, 0xaf, + 0xbb, 0xbc, 0xb6, 0xb6, 0xbc, 0xbb, 0xbd, 0xb7, 0xb0, 0xbb, 0xc3, 0xcb, + 0xce, 0xcd, 0xd4, 0xd7, 0xd0, 0xc1, 0xc0, 0xce, 0xca, 0xc8, 0xc2, 0xc1, + 0xc4, 0xc1, 0xb1, 0xa3, 0xa2, 0xa6, 0xaf, 0xbd, 0xbf, 0xbb, 0xbe, 0xc1, + 0xc8, 0xcc, 0xcd, 0xc8, 0xc3, 0xb5, 0xb0, 0xb3, 0xb2, 0xb4, 0xb8, 0xbb, + 0xb6, 0xb1, 0xb4, 0xb5, 0xbb, 0xc6, 0xca, 0xca, 0xc7, 0xc1, 0xbd, 0xb8, + 0xaf, 0xad, 0xad, 0xbb, 0xd1, 0xd2, 0xc1, 0xa6, 0x9c, 0x98, 0x97, 0x9a, + 0x9e, 0xa0, 0x9f, 0x9d, 0x9d, 0xa3, 0xa2, 0xa3, 0xa4, 0xa9, 0xa9, 0xa7, + 0xa5, 0xa8, 0xaf, 0xb3, 0xb5, 0xb5, 0xb6, 0xb6, 0xb9, 0xb2, 0xb2, 0xb9, + 0xc5, 0xbb, 0xb4, 0xb5, 0xaf, 0xb2, 0xb8, 0xb5, 0xa9, 0xa3, 0xa6, 0xa9, + 0xa1, 0xa7, 0xa9, 0xa6, 0xa2, 0xa6, 0xbb, 0xdb, 0x00, 0x20, 0x2c, 0x35, + 0x3d, 0x3e, 0x44, 0x4d, 0x55, 0x59, 0x5c, 0x58, 0x51, 0x4f, 0x4b, 0x50, + 0x58, 0x5f, 0x63, 0x67, 0x68, 0x68, 0x69, 0x6b, 0x6c, 0x6d, 0x6e, 0x6f, + 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, + 0x6e, 0x6e, 0x6e, 0x6c, 0x67, 0x57, 0x3a, 0x1d, 0x0b, 0x1c, 0x51, 0x47, + 0x36, 0x37, 0x3c, 0x40, 0x3c, 0x34, 0x2b, 0x27, 0x28, 0x30, 0x41, 0x4d, + 0x57, 0x21, 0xc5, 0xbb, 0xbb, 0xb4, 0xad, 0xa9, 0xaf, 0xb5, 0xb5, 0xbb, + 0xb8, 0xb5, 0xb3, 0xb3, 0xae, 0xab, 0xb5, 0xc5, 0xc4, 0xc2, 0xc9, 0xbf, + 0xb0, 0xa9, 0xb2, 0xbb, 0xbb, 0xbb, 0xba, 0xb5, 0xb9, 0xc7, 0xca, 0xc1, + 0xbb, 0xb5, 0xb0, 0xac, 0xa4, 0xa1, 0xa2, 0xa9, 0xb6, 0xbb, 0xb8, 0xc1, + 0xc5, 0xbc, 0xb5, 0xaf, 0xaa, 0xb4, 0xbf, 0xc1, 0xc1, 0xc7, 0xd0, 0xd6, + 0xdd, 0xc8, 0xbb, 0xc5, 0xbc, 0xbf, 0xbf, 0xc1, 0xc3, 0xbe, 0xb0, 0xaf, + 0xae, 0xb2, 0xb8, 0xc2, 0xbd, 0xaf, 0xa9, 0xaf, 0xbb, 0xc5, 0xc9, 0xca, + 0xc3, 0xb3, 0xa9, 0xa7, 0xa6, 0xa9, 0xaf, 0xbb, 0xc7, 0xc0, 0xbc, 0xbb, + 0xbb, 0xc3, 0xc6, 0xc9, 0xc9, 0xc1, 0xbc, 0xb7, 0xba, 0xb8, 0xa7, 0xa6, + 0xc5, 0xd4, 0xc1, 0x9e, 0x98, 0x97, 0x97, 0x9c, 0xa1, 0xa2, 0x9b, 0x97, + 0x9c, 0xa4, 0xa6, 0xa6, 0xa5, 0xa6, 0xa5, 0xa3, 0xaa, 0xad, 0xad, 0xaf, + 0xaf, 0xad, 0xb3, 0xb6, 0xb7, 0xb8, 0xb9, 0xb7, 0xbe, 0xbc, 0xb4, 0xb3, + 0xac, 0xad, 0xb7, 0xbb, 0xba, 0xb3, 0xb4, 0xb3, 0xa2, 0xaa, 0xad, 0xab, + 0xa5, 0xa9, 0xc8, 0xf1, 0x15, 0x2b, 0x37, 0x3f, 0x45, 0x49, 0x4c, 0x51, + 0x57, 0x5b, 0x5b, 0x57, 0x4c, 0x48, 0x4b, 0x56, 0x5d, 0x61, 0x63, 0x66, + 0x68, 0x68, 0x69, 0x6a, 0x6b, 0x6c, 0x6d, 0x6d, 0x6e, 0x6e, 0x6e, 0x6e, + 0x6e, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, + 0x6d, 0x6a, 0x68, 0x63, 0x5c, 0x5e, 0x5c, 0x54, 0x4b, 0x45, 0x3f, 0x35, + 0x2b, 0x1e, 0x17, 0x16, 0x17, 0x25, 0x37, 0x49, 0x57, 0x24, 0xc2, 0xbc, + 0xbc, 0xb8, 0xb5, 0xaf, 0xb0, 0xb4, 0xb5, 0xb5, 0xb4, 0xaf, 0xb1, 0xb0, + 0xab, 0xa9, 0xb7, 0xc7, 0xca, 0xcb, 0xcf, 0xc8, 0xbb, 0xaa, 0xa9, 0xb9, + 0xb8, 0xb0, 0xb0, 0xb1, 0xb8, 0xc3, 0xc6, 0xc1, 0xbc, 0xb5, 0xac, 0xa6, + 0xa3, 0xa2, 0xa3, 0xa8, 0xb5, 0xbb, 0xbb, 0xc4, 0xc2, 0xb5, 0xaf, 0xaf, + 0xb2, 0xb5, 0xbb, 0xc0, 0xbf, 0xb6, 0xc0, 0xcf, 0xc9, 0xbb, 0xb4, 0xb1, + 0xaf, 0xaf, 0xb1, 0xb4, 0xb9, 0xbb, 0xbb, 0xb5, 0xb1, 0xb4, 0xbd, 0xc3, + 0xbb, 0xae, 0xa8, 0xa7, 0xb3, 0xbe, 0xc6, 0xc9, 0xc3, 0xaf, 0xa8, 0xa8, + 0xab, 0xa9, 0xa8, 0xb5, 0xcd, 0xce, 0xc9, 0xc1, 0xbd, 0xbc, 0xc2, 0xc5, + 0xc9, 0xc1, 0xbe, 0xbd, 0xc8, 0xcb, 0xb7, 0xa3, 0xb5, 0xd0, 0xc3, 0xa1, + 0x9b, 0x9a, 0x9b, 0xad, 0xb6, 0xb5, 0xa7, 0x9a, 0x9b, 0xa0, 0xa2, 0x9f, + 0xa7, 0xa9, 0xa3, 0xa5, 0xba, 0xc1, 0xb6, 0xb2, 0xa9, 0xa3, 0xa9, 0xb4, + 0xbb, 0xbb, 0xb6, 0xb1, 0xb2, 0xb2, 0xb1, 0xaf, 0xad, 0xa9, 0xaa, 0xae, + 0xaf, 0xaf, 0xb5, 0xb2, 0xa6, 0xad, 0xac, 0xaa, 0xa5, 0xa5, 0xb8, 0xdd, + 0x0d, 0x29, 0x38, 0x43, 0x49, 0x4c, 0x50, 0x54, 0x57, 0x5a, 0x58, 0x4f, + 0x47, 0x4a, 0x50, 0x58, 0x5d, 0x63, 0x65, 0x67, 0x68, 0x68, 0x69, 0x6a, + 0x6b, 0x6b, 0x6b, 0x6c, 0x6c, 0x6d, 0x6d, 0x6e, 0x6e, 0x6e, 0x6f, 0x6f, + 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6e, 0x6b, 0x69, 0x67, + 0x64, 0x60, 0x5b, 0x54, 0x47, 0x3e, 0x33, 0x25, 0x1c, 0x0c, 0x08, 0x0d, + 0x10, 0x20, 0x34, 0x48, 0x57, 0x26, 0xc5, 0xba, 0xb8, 0xb5, 0xb3, 0xae, + 0xb0, 0xc2, 0xc1, 0xba, 0xb5, 0xb3, 0xb0, 0xab, 0xa9, 0xa8, 0xb8, 0xc8, + 0xce, 0xc9, 0xc3, 0xc5, 0xbb, 0xb0, 0xab, 0xaa, 0xae, 0xaf, 0xaf, 0xb4, + 0xbb, 0xc1, 0xc4, 0xc1, 0xba, 0xb3, 0xa9, 0xa6, 0xa2, 0xa2, 0xa2, 0xa6, + 0xb3, 0xb6, 0xad, 0xbb, 0xba, 0xad, 0xa9, 0xb0, 0xbe, 0xbc, 0xb8, 0xbb, + 0xc5, 0xbb, 0xbb, 0xcd, 0xc1, 0xb7, 0xb5, 0xb3, 0xb1, 0xb3, 0xb3, 0xb3, + 0xb5, 0xb5, 0xb9, 0xb5, 0xb1, 0xb6, 0xc1, 0xc8, 0xc0, 0xb4, 0xaf, 0xab, + 0xb0, 0xba, 0xbb, 0xb9, 0xbd, 0xb6, 0xac, 0xa9, 0xa8, 0xa6, 0xa2, 0xac, + 0xb6, 0xc1, 0xcb, 0xca, 0xc6, 0xbf, 0xc7, 0xc9, 0xca, 0xc3, 0xbf, 0xc1, + 0xcd, 0xd7, 0xcb, 0xac, 0xa9, 0xc2, 0xc3, 0xb1, 0xa7, 0x9d, 0x9c, 0xb1, + 0xc0, 0xc2, 0xb2, 0xa4, 0x9f, 0x9f, 0xa1, 0xa1, 0xa4, 0xa4, 0xa6, 0xaa, + 0xb4, 0xbe, 0xc1, 0xbe, 0xb5, 0xa3, 0xa4, 0xb5, 0xbd, 0xbb, 0xb3, 0xaa, + 0xa6, 0xaf, 0xb7, 0xba, 0xbb, 0xb2, 0xa8, 0xaf, 0xbb, 0xb9, 0xb8, 0xb6, + 0xa4, 0xa9, 0xa7, 0xa5, 0xa3, 0xa9, 0xb1, 0xd2, 0x04, 0x24, 0x35, 0x3e, + 0x45, 0x4a, 0x50, 0x54, 0x57, 0x59, 0x52, 0x47, 0x4a, 0x52, 0x59, 0x59, + 0x5c, 0x63, 0x65, 0x67, 0x69, 0x66, 0x67, 0x69, 0x69, 0x69, 0x6a, 0x6b, + 0x6b, 0x6c, 0x6d, 0x6e, 0x6d, 0x6e, 0x6e, 0x6e, 0x6f, 0x6e, 0x6f, 0x6f, + 0x6f, 0x6f, 0x6f, 0x6f, 0x6d, 0x6b, 0x68, 0x65, 0x62, 0x5c, 0x54, 0x49, + 0x3d, 0x31, 0x25, 0x1a, 0x12, 0x00, 0x06, 0x0d, 0x12, 0x1f, 0x33, 0x4b, + 0x56, 0x20, 0xc7, 0xb5, 0xb4, 0xb5, 0xb4, 0xaf, 0xb5, 0xcb, 0xc8, 0xbe, + 0xb9, 0xb5, 0xb1, 0xaf, 0xab, 0xa9, 0xbd, 0xc8, 0xc9, 0xc1, 0xbc, 0xc1, + 0xb5, 0xaf, 0xae, 0xad, 0xad, 0xaf, 0xb0, 0xb5, 0xb8, 0xbf, 0xc1, 0xbf, + 0xbb, 0xb4, 0xa9, 0xa4, 0xa2, 0xa1, 0xa2, 0xa5, 0xb2, 0xb1, 0xa8, 0xb1, + 0xb1, 0xa8, 0xa5, 0xb6, 0xc1, 0xc0, 0xbb, 0xbb, 0xc7, 0xc3, 0xbe, 0xd1, + 0xc1, 0xb8, 0xb6, 0xb6, 0xb7, 0xb8, 0xb8, 0xb9, 0xb8, 0xb8, 0xbb, 0xbb, + 0xbb, 0xc0, 0xc1, 0xc0, 0xba, 0xb3, 0xaf, 0xae, 0xb2, 0xb7, 0xb6, 0xaa, + 0xad, 0xc2, 0xc8, 0xc3, 0xbb, 0xb1, 0xa9, 0xa9, 0xaa, 0xb0, 0xbb, 0xc3, + 0xca, 0xc8, 0xc6, 0xc8, 0xce, 0xc3, 0xc0, 0xc1, 0xc1, 0xce, 0xcc, 0xb7, + 0xaa, 0xb3, 0xbe, 0xbc, 0xb5, 0xa4, 0x9e, 0xa9, 0xbb, 0xc0, 0xb5, 0xab, + 0xa4, 0xa1, 0x9e, 0xa3, 0xaf, 0xb3, 0xb5, 0xb5, 0xaf, 0xaa, 0xb2, 0xb7, + 0xb6, 0xab, 0xa8, 0xaf, 0xb7, 0xb8, 0xba, 0xb4, 0xad, 0xb7, 0xbb, 0xbc, + 0xbd, 0xb9, 0xab, 0xba, 0xcc, 0xc8, 0xc1, 0xbd, 0xa1, 0xa2, 0xa2, 0xa2, + 0xa2, 0xac, 0xbf, 0xdb, 0xfd, 0x18, 0x28, 0x33, 0x3e, 0x46, 0x50, 0x57, + 0x57, 0x57, 0x4a, 0x48, 0x52, 0x5d, 0x5d, 0x54, 0x59, 0x60, 0x63, 0x65, + 0x66, 0x64, 0x66, 0x68, 0x68, 0x68, 0x68, 0x69, 0x6a, 0x6b, 0x6c, 0x6c, + 0x6b, 0x6d, 0x6d, 0x6e, 0x6e, 0x6e, 0x6e, 0x6f, 0x6f, 0x6e, 0x6f, 0x6d, + 0x6c, 0x69, 0x67, 0x63, 0x5f, 0x59, 0x4d, 0x40, 0x34, 0x25, 0x19, 0x13, + 0x0c, 0x04, 0x0b, 0x09, 0x0f, 0x20, 0x37, 0x4d, 0x51, 0x15, 0xbf, 0xb2, + 0xb1, 0xb5, 0xb5, 0xaf, 0xb2, 0xc3, 0xc5, 0xbb, 0xb4, 0xb3, 0xaf, 0xae, + 0xad, 0xac, 0xbd, 0xc8, 0xc5, 0xbe, 0xb7, 0xbb, 0xb5, 0xaf, 0xaa, 0xa9, + 0xab, 0xb0, 0xb5, 0xb8, 0xb9, 0xbd, 0xbd, 0xbb, 0xb9, 0xb3, 0xa9, 0xa5, + 0xa4, 0xa2, 0xa2, 0xa5, 0xaf, 0xb0, 0xa9, 0xac, 0xad, 0xab, 0xa9, 0xaf, + 0xbb, 0xc1, 0xbf, 0xc0, 0xc3, 0xc8, 0xc4, 0xd0, 0xc1, 0xb3, 0xb4, 0xb5, + 0xb5, 0xb7, 0xb8, 0xbb, 0xbb, 0xbb, 0xc0, 0xbe, 0xbe, 0xbe, 0xbc, 0xc1, + 0xc1, 0xbb, 0xb1, 0xaf, 0xb3, 0xb4, 0xb5, 0xa9, 0xa2, 0xb1, 0xce, 0xd6, + 0xd3, 0xcc, 0xbf, 0xaf, 0xaf, 0xb1, 0xb6, 0xbb, 0xc4, 0xca, 0xc8, 0xc1, + 0xc1, 0xc1, 0xc3, 0xc8, 0xbc, 0xbc, 0xba, 0xb1, 0xb0, 0xb6, 0xbb, 0xbb, + 0xb8, 0xaa, 0xa0, 0xa1, 0xab, 0xb3, 0xac, 0xa5, 0xa5, 0xa5, 0xa0, 0x9c, + 0xae, 0xc1, 0xc1, 0xc1, 0xc0, 0xb1, 0xbb, 0xb3, 0xa7, 0xa4, 0xa6, 0xa6, + 0xa7, 0xad, 0xb5, 0xb8, 0xbd, 0xc7, 0xc2, 0xbb, 0xb8, 0xb1, 0xa6, 0xbb, + 0xd1, 0xce, 0xc6, 0xbd, 0xa2, 0xa2, 0xa2, 0xa2, 0x9f, 0xa9, 0xbe, 0xd6, + 0xf1, 0x0c, 0x20, 0x2c, 0x37, 0x42, 0x4a, 0x52, 0x55, 0x4d, 0x44, 0x3d, + 0x31, 0x3c, 0x4d, 0x49, 0x53, 0x5a, 0x60, 0x63, 0x63, 0x63, 0x63, 0x65, + 0x67, 0x66, 0x67, 0x69, 0x69, 0x6a, 0x6a, 0x6a, 0x6a, 0x6c, 0x6c, 0x6d, + 0x6d, 0x6e, 0x6e, 0x6e, 0x6d, 0x6c, 0x6c, 0x6b, 0x69, 0x68, 0x65, 0x62, + 0x5e, 0x57, 0x46, 0x37, 0x2a, 0x18, 0x15, 0x0e, 0x07, 0x06, 0x07, 0x08, + 0x0e, 0x24, 0x3d, 0x50, 0x49, 0x04, 0xbd, 0xb0, 0xaf, 0xb3, 0xb6, 0xb5, + 0xb9, 0xc1, 0xc2, 0xbd, 0xb7, 0xb5, 0xb5, 0xb0, 0xad, 0xa9, 0xaf, 0xb7, + 0xc0, 0xbe, 0xb7, 0xb9, 0xb4, 0xac, 0xa6, 0xa2, 0xaf, 0xb4, 0xb7, 0xb9, + 0xb9, 0xbc, 0xbb, 0xb6, 0xb5, 0xb1, 0xac, 0xa9, 0xa7, 0xa1, 0xa2, 0xa8, + 0xaa, 0xad, 0xa9, 0xa8, 0xad, 0xb3, 0xb0, 0xa9, 0xb9, 0xc4, 0xbf, 0xbb, + 0xbb, 0xc1, 0xc8, 0xd2, 0xc0, 0xaf, 0xb4, 0xb4, 0xb4, 0xbd, 0xc2, 0xc4, + 0xc4, 0xc3, 0xc2, 0xc2, 0xbd, 0xbb, 0xbd, 0xc5, 0xc5, 0xc4, 0xbf, 0xb8, + 0xb3, 0xb5, 0xb4, 0xa5, 0x9f, 0xa7, 0xb8, 0xc5, 0xcb, 0xc5, 0xbf, 0xb6, + 0xaf, 0xaf, 0xb7, 0xbc, 0xc6, 0xc8, 0xcc, 0xc1, 0xb5, 0xbe, 0xc4, 0xc8, + 0xc9, 0xc3, 0xb7, 0xb2, 0xb9, 0xbd, 0xbd, 0xbb, 0xb1, 0xa9, 0xa6, 0x9f, + 0x9c, 0x9f, 0xa0, 0x9e, 0x9e, 0x9d, 0x9c, 0x9d, 0xb2, 0xc7, 0xc8, 0xc4, + 0xc1, 0xb1, 0xbe, 0xc5, 0xb6, 0xab, 0xaa, 0xae, 0xaf, 0xb2, 0xb0, 0xb5, + 0xc3, 0xcb, 0xc7, 0xbd, 0xb7, 0xb1, 0xa7, 0xb1, 0xc7, 0xc4, 0xbf, 0xb8, + 0xa5, 0xa9, 0xa6, 0xa5, 0xa1, 0x9e, 0xad, 0xc7, 0xe8, 0x0c, 0x1f, 0x2b, + 0x37, 0x41, 0x49, 0x4f, 0x50, 0x43, 0x37, 0xeb, 0xbf, 0xdd, 0x1c, 0x3e, + 0x50, 0x57, 0x5d, 0x61, 0x62, 0x63, 0x62, 0x63, 0x63, 0x63, 0x63, 0x66, + 0x68, 0x69, 0x69, 0x69, 0x69, 0x6a, 0x6a, 0x6b, 0x6b, 0x6b, 0x6c, 0x6c, + 0x6b, 0x6a, 0x6a, 0x6a, 0x69, 0x69, 0x63, 0x60, 0x5d, 0x53, 0x40, 0x2c, + 0x19, 0x0e, 0x07, 0x00, 0x03, 0x08, 0x06, 0x04, 0x10, 0x27, 0x44, 0x52, + 0x45, 0x06, 0xc3, 0xb8, 0xb0, 0xb5, 0xb7, 0xb8, 0xbc, 0xc0, 0xc2, 0xbe, + 0xbb, 0xb9, 0xb8, 0xb5, 0xae, 0xa8, 0xa4, 0xa3, 0xaf, 0xb9, 0xb6, 0xb4, + 0xaf, 0xa8, 0xa7, 0xa7, 0xb0, 0xb2, 0xba, 0xba, 0xba, 0xbb, 0xb8, 0xb6, + 0xb2, 0xb6, 0xb0, 0xaa, 0xa6, 0xa3, 0xa2, 0xa8, 0xa9, 0xae, 0xa9, 0xa3, + 0xab, 0xb5, 0xb7, 0xb0, 0xbb, 0xc1, 0xbc, 0xba, 0xbe, 0xc1, 0xc8, 0xd0, + 0xc8, 0xc4, 0xc4, 0xb6, 0xb5, 0xc3, 0xc9, 0xc5, 0xc7, 0xc8, 0xc2, 0xbe, + 0xbc, 0xbc, 0xc2, 0xc7, 0xc7, 0xcd, 0xd0, 0xc1, 0xb9, 0xc3, 0xc3, 0xbc, + 0xbe, 0xbb, 0xb5, 0xb0, 0xb3, 0xaf, 0xb0, 0xb6, 0xaf, 0xad, 0xba, 0xc0, + 0xcb, 0xc8, 0xc2, 0xbc, 0xaf, 0xb3, 0xbd, 0xc1, 0xc6, 0xc3, 0xb9, 0xb5, + 0xbb, 0xc0, 0xbf, 0xb8, 0xb2, 0xb7, 0xb9, 0xa9, 0x9d, 0x9c, 0xa1, 0xa2, + 0x9e, 0x9c, 0x9e, 0xae, 0xc4, 0xc8, 0xc5, 0xc3, 0xc1, 0xba, 0xbd, 0xcd, + 0xcd, 0xc1, 0xbf, 0xbf, 0xbd, 0xbc, 0xb3, 0xb3, 0xbb, 0xc2, 0xc5, 0xb9, + 0xab, 0xaf, 0xa6, 0x9f, 0xa2, 0xa3, 0xa7, 0xaf, 0xa1, 0xa2, 0xa8, 0xa9, + 0xa4, 0x9c, 0xa9, 0xc7, 0xe7, 0x07, 0x1d, 0x2b, 0x3c, 0x44, 0x4a, 0x52, + 0x4b, 0x39, 0x05, 0xbb, 0xc1, 0xd2, 0x04, 0x3f, 0x4f, 0x58, 0x5d, 0x60, + 0x62, 0x63, 0x63, 0x63, 0x61, 0x5f, 0x60, 0x63, 0x65, 0x68, 0x69, 0x67, + 0x68, 0x68, 0x67, 0x69, 0x68, 0x69, 0x6a, 0x6a, 0x6b, 0x6a, 0x6a, 0x69, + 0x67, 0x64, 0x61, 0x5d, 0x55, 0x48, 0x32, 0x1f, 0x11, 0x0a, 0x04, 0x06, + 0x07, 0x00, 0xfe, 0x02, 0x10, 0x29, 0x46, 0x52, 0x48, 0x18, 0xde, 0xc1, + 0xb6, 0xba, 0xb6, 0xb8, 0xbe, 0xc0, 0xbf, 0xbd, 0xba, 0xbb, 0xb9, 0xb5, + 0xaf, 0xa8, 0xa3, 0xa4, 0xae, 0xb5, 0xb4, 0xb3, 0xae, 0xa2, 0xa3, 0xab, + 0xac, 0xa6, 0xb5, 0xb9, 0xb9, 0xb9, 0xb7, 0xb5, 0xae, 0xb7, 0xbb, 0xaf, + 0xa7, 0xa7, 0xa8, 0xa7, 0xb2, 0xbe, 0xbb, 0xaf, 0xab, 0xb6, 0xbf, 0xbb, + 0xbc, 0xbe, 0xb9, 0xbc, 0xc4, 0xc8, 0xc9, 0xcc, 0xcc, 0xce, 0xc7, 0xba, + 0xbd, 0xc8, 0xca, 0xc6, 0xc8, 0xc7, 0xc3, 0xc0, 0xc1, 0xc5, 0xc1, 0xbf, + 0xc8, 0xd2, 0xcf, 0xc1, 0xc0, 0xd1, 0xd1, 0xcd, 0xce, 0xcc, 0xc0, 0xaf, + 0xaf, 0xaf, 0xb8, 0xbd, 0xb6, 0xa9, 0xb3, 0xc1, 0xc8, 0xc6, 0xbf, 0xbb, + 0xb3, 0xb1, 0xb8, 0xbd, 0xc1, 0xc3, 0xd1, 0xd4, 0xc4, 0xc1, 0xc1, 0xbb, + 0xb9, 0xb4, 0xb1, 0xaf, 0xaa, 0xa4, 0xa5, 0xa3, 0x9f, 0xa2, 0xae, 0xbb, + 0xc6, 0xc8, 0xc4, 0xc0, 0xbe, 0xb8, 0xb4, 0xcb, 0xd6, 0xc5, 0xbb, 0xbf, + 0xc2, 0xc2, 0xb9, 0xb5, 0xb2, 0xbd, 0xc3, 0xb8, 0xa9, 0xa4, 0xa0, 0x9c, + 0x9e, 0x9d, 0x9e, 0xa1, 0x9e, 0x9f, 0xa2, 0xa9, 0xaa, 0xa8, 0xb3, 0xc7, + 0xe7, 0x06, 0x1f, 0x31, 0x41, 0x4a, 0x50, 0x55, 0x4a, 0x2b, 0xc8, 0xb1, + 0xb7, 0xcb, 0x11, 0x3e, 0x52, 0x5c, 0x61, 0x63, 0x63, 0x64, 0x63, 0x61, + 0x5e, 0x5d, 0x5f, 0x62, 0x65, 0x67, 0x67, 0x66, 0x65, 0x64, 0x65, 0x66, + 0x67, 0x68, 0x69, 0x69, 0x69, 0x68, 0x67, 0x66, 0x63, 0x61, 0x5d, 0x58, + 0x50, 0x45, 0x33, 0x24, 0x17, 0x08, 0x02, 0x04, 0xfc, 0xf8, 0xf7, 0xfd, + 0x0c, 0x27, 0x47, 0x54, 0x4b, 0x29, 0xed, 0xc4, 0xb7, 0xbb, 0xb5, 0xb5, + 0xbe, 0xc0, 0xbd, 0xbb, 0xb9, 0xb9, 0xb7, 0xb3, 0xab, 0xa2, 0x9e, 0xa1, + 0xad, 0xb5, 0xb3, 0xb3, 0xaf, 0xa9, 0xa7, 0xab, 0xa7, 0x9f, 0xaa, 0xb7, + 0xbb, 0xb5, 0xb2, 0xb2, 0xa7, 0xa7, 0xb0, 0xb5, 0xac, 0xa5, 0xa5, 0xa5, + 0xb5, 0xc8, 0xc8, 0xc2, 0xb9, 0xb8, 0xbb, 0xc5, 0xc1, 0xba, 0xb9, 0xbf, + 0xc4, 0xc5, 0xc8, 0xca, 0xc8, 0xca, 0xc2, 0xbb, 0xc7, 0xc9, 0xca, 0xc9, + 0xc7, 0xc8, 0xc8, 0xc3, 0xc3, 0xc7, 0xc1, 0xc2, 0xcc, 0xd2, 0xd4, 0xc8, + 0xbf, 0xcb, 0xd3, 0xd2, 0xcf, 0xc8, 0xbb, 0xb2, 0xaf, 0xaf, 0xbb, 0xc5, + 0xc5, 0xb5, 0xad, 0xbb, 0xc1, 0xc5, 0xc1, 0xbc, 0xbb, 0xb8, 0xbc, 0xbd, + 0xc2, 0xcb, 0xe3, 0xee, 0xe2, 0xd5, 0xcb, 0xc0, 0xbb, 0xb5, 0xb2, 0xb0, + 0xaf, 0xb1, 0xaa, 0x9f, 0x9f, 0xab, 0xb6, 0xbe, 0xc1, 0xbd, 0xbb, 0xb5, + 0xb6, 0xb4, 0xae, 0xc0, 0xd5, 0xc3, 0xad, 0xb3, 0xc1, 0xc7, 0xbb, 0xb6, + 0xb2, 0xb2, 0xb8, 0xc5, 0xce, 0xc6, 0xbb, 0xb5, 0xb8, 0xae, 0xa6, 0xad, + 0x9f, 0x9f, 0x9f, 0xa6, 0xab, 0xae, 0xb7, 0xcb, 0xf3, 0x1c, 0x31, 0x3c, + 0x47, 0x50, 0x57, 0x53, 0x48, 0x26, 0xd4, 0xbc, 0xc0, 0xe8, 0x2b, 0x4f, + 0x5d, 0x64, 0x66, 0x66, 0x67, 0x65, 0x63, 0x5f, 0x5d, 0x5c, 0x5f, 0x62, + 0x64, 0x65, 0x64, 0x63, 0x63, 0x65, 0x66, 0x66, 0x66, 0x68, 0x69, 0x69, + 0x69, 0x67, 0x64, 0x64, 0x63, 0x5f, 0x5c, 0x57, 0x4d, 0x3d, 0x30, 0x28, + 0x21, 0x17, 0x10, 0x06, 0xfd, 0xf8, 0xf6, 0xf9, 0x0e, 0x2f, 0x4a, 0x54, + 0x4e, 0x32, 0xfc, 0xd9, 0xc8, 0xbc, 0xb7, 0xb4, 0xb9, 0xbb, 0xbb, 0xba, + 0xb5, 0xb0, 0xa9, 0xa7, 0xa5, 0x9d, 0x9c, 0xa2, 0xad, 0xb3, 0xb4, 0xb3, + 0xaf, 0xa9, 0xad, 0xad, 0xa4, 0xa1, 0xa6, 0xb4, 0xb7, 0xb3, 0xaf, 0xae, + 0xa9, 0xa6, 0xaa, 0xaf, 0xb5, 0xab, 0xa5, 0xa5, 0xb5, 0xc3, 0xc4, 0xc3, + 0xbe, 0xba, 0xbb, 0xc6, 0xc1, 0xb6, 0xb5, 0xc1, 0xc3, 0xc5, 0xc8, 0xca, + 0xcb, 0xce, 0xc8, 0xc7, 0xd0, 0xcb, 0xca, 0xc8, 0xca, 0xcd, 0xc8, 0xc5, + 0xc3, 0xc1, 0xc0, 0xc6, 0xd1, 0xd5, 0xd4, 0xcf, 0xc4, 0xce, 0xdb, 0xd9, + 0xd5, 0xc9, 0xbf, 0xbb, 0xaf, 0xaa, 0xb5, 0xc1, 0xc6, 0xc0, 0xb6, 0xb3, + 0xae, 0xb9, 0xc1, 0xbd, 0xbe, 0xbf, 0xc2, 0xc1, 0xc3, 0xc8, 0xda, 0xec, + 0xf1, 0xf0, 0xea, 0xdd, 0xc8, 0xb6, 0xb7, 0xb2, 0xaf, 0xb4, 0xab, 0x9d, + 0x9f, 0xae, 0xb5, 0xbb, 0xba, 0xb9, 0xb8, 0xb5, 0xb6, 0xb1, 0xaa, 0xb2, + 0xbe, 0xb3, 0xa6, 0xa3, 0xaf, 0xc1, 0xb8, 0xb4, 0xba, 0xb5, 0xac, 0xbb, + 0xd5, 0xd3, 0xc1, 0xb9, 0xb9, 0xae, 0xa9, 0xb4, 0xa2, 0xa3, 0xa1, 0xa2, + 0xa7, 0xae, 0xb7, 0xd9, 0x11, 0x2d, 0x39, 0x42, 0x4c, 0x55, 0x56, 0x50, + 0x41, 0x39, 0x1d, 0xfd, 0x16, 0x3c, 0x58, 0x66, 0x69, 0x69, 0x69, 0x6a, + 0x69, 0x66, 0x63, 0x5f, 0x5d, 0x5d, 0x5f, 0x60, 0x62, 0x61, 0x62, 0x63, + 0x63, 0x63, 0x63, 0x63, 0x63, 0x65, 0x67, 0x67, 0x67, 0x66, 0x64, 0x63, + 0x62, 0x5d, 0x5a, 0x56, 0x50, 0x43, 0x34, 0x26, 0x1c, 0x17, 0x11, 0x06, + 0xfc, 0xfa, 0xf5, 0xf9, 0x15, 0x36, 0x4c, 0x54, 0x52, 0x48, 0x2b, 0xfe, + 0xd8, 0xc6, 0xba, 0xb1, 0xb0, 0xb4, 0xb6, 0xb5, 0xae, 0xa4, 0x9c, 0x9d, + 0xa3, 0x9f, 0xa2, 0xaa, 0xb2, 0xb4, 0xb5, 0xb3, 0xad, 0xab, 0xaf, 0xaa, + 0xa4, 0xa6, 0xab, 0xb3, 0xb4, 0xaf, 0xae, 0xac, 0xa3, 0xa3, 0xa7, 0xaf, + 0xaf, 0xaf, 0xa9, 0xa9, 0xb3, 0xba, 0xbd, 0xbd, 0xb9, 0xc1, 0xc1, 0xbf, + 0xbd, 0xb5, 0xb9, 0xc1, 0xc4, 0xc9, 0xcb, 0xc8, 0xce, 0xd7, 0xcf, 0xcd, + 0xce, 0xc5, 0xc5, 0xc7, 0xce, 0xd1, 0xc9, 0xc6, 0xc4, 0xc3, 0xc1, 0xc2, + 0xca, 0xd8, 0xd8, 0xd3, 0xcb, 0xcb, 0xd5, 0xd8, 0xd4, 0xcb, 0xc0, 0xc4, + 0xbf, 0xaf, 0xb2, 0xb4, 0xb8, 0xb8, 0xb6, 0xb8, 0xb1, 0xb6, 0xc1, 0xc0, + 0xc1, 0xc3, 0xc1, 0xc1, 0xbb, 0xc2, 0xd2, 0xe7, 0xf1, 0xf3, 0xed, 0xea, + 0xda, 0xbe, 0xb7, 0xb4, 0xb2, 0xb1, 0xa9, 0xa5, 0xa7, 0xaf, 0xb5, 0xb4, + 0xb4, 0xb8, 0xbe, 0xb9, 0xb5, 0xaf, 0xa8, 0xac, 0xb2, 0xaf, 0xa7, 0x9d, + 0x9f, 0xa9, 0xb1, 0xb2, 0xaf, 0xa9, 0xa3, 0xa2, 0xb5, 0xc5, 0xbb, 0xa6, + 0xa8, 0xb2, 0xaf, 0xb2, 0xa5, 0xa7, 0xa2, 0xa1, 0xa6, 0xb3, 0xcf, 0xfd, + 0x1f, 0x30, 0x3f, 0x4a, 0x50, 0x54, 0x50, 0x4a, 0x3c, 0x3f, 0x3f, 0x46, + 0x58, 0x61, 0x68, 0x6a, 0x6b, 0x6a, 0x6b, 0x6b, 0x69, 0x67, 0x65, 0x5f, + 0x5d, 0x5d, 0x5b, 0x5c, 0x5d, 0x5c, 0x5b, 0x5c, 0x5d, 0x5f, 0x62, 0x63, + 0x63, 0x65, 0x67, 0x66, 0x64, 0x63, 0x63, 0x63, 0x61, 0x5d, 0x5a, 0x55, + 0x4d, 0x42, 0x2e, 0x1b, 0x0b, 0x00, 0xf9, 0xf5, 0xef, 0xea, 0xeb, 0xfe, + 0x1f, 0x37, 0x4b, 0x52, 0x56, 0x4e, 0x37, 0x19, 0xf3, 0xd6, 0xc1, 0xb3, + 0xb0, 0xbe, 0xb4, 0xaf, 0xae, 0xa9, 0x9d, 0x9d, 0xa3, 0xa2, 0xa4, 0xb0, + 0xb5, 0xb0, 0xb1, 0xb1, 0xac, 0xac, 0xae, 0xa8, 0xa9, 0xaa, 0xad, 0xae, + 0xaf, 0xaf, 0xb4, 0xb8, 0xb2, 0xaf, 0xaf, 0xb1, 0xae, 0xa8, 0xab, 0xb5, + 0xc1, 0xc0, 0xb9, 0xb5, 0xb2, 0xc1, 0xc6, 0xbe, 0xba, 0xb5, 0xbb, 0xc5, + 0xce, 0xd1, 0xce, 0xcf, 0xd2, 0xdc, 0xd5, 0xce, 0xc9, 0xbf, 0xc1, 0xcb, + 0xd4, 0xcd, 0xcb, 0xc9, 0xc6, 0xc3, 0xc4, 0xc5, 0xc2, 0xc8, 0xcf, 0xd4, + 0xd0, 0xcc, 0xcc, 0xd1, 0xd2, 0xca, 0xc4, 0xc8, 0xcb, 0xca, 0xbb, 0xb1, + 0xb0, 0xac, 0xaf, 0xb4, 0xb8, 0xb8, 0xb5, 0xbc, 0xc3, 0xc0, 0xba, 0xc8, + 0xb7, 0xb9, 0xcc, 0xe2, 0xef, 0xee, 0xea, 0xeb, 0xdc, 0xc0, 0xb8, 0xb5, + 0xb5, 0xbb, 0xc1, 0xc1, 0xb6, 0xad, 0xad, 0xb5, 0xc3, 0xbb, 0xbc, 0xba, + 0xb4, 0xae, 0xa9, 0xaa, 0xac, 0xa6, 0xa2, 0xa0, 0xa2, 0xa9, 0xb1, 0xab, + 0xa4, 0xa6, 0xac, 0xae, 0xad, 0xb7, 0xc0, 0xb1, 0xaf, 0xb5, 0xb6, 0xb6, + 0xa9, 0xa9, 0xa6, 0xaa, 0xb9, 0xce, 0xe4, 0xfc, 0x20, 0x39, 0x47, 0x4f, + 0x51, 0x50, 0x4f, 0x50, 0x53, 0x58, 0x5b, 0x62, 0x65, 0x67, 0x69, 0x6a, + 0x69, 0x6a, 0x6b, 0x6b, 0x6a, 0x68, 0x65, 0x5f, 0x5b, 0x57, 0x53, 0x53, + 0x53, 0x4f, 0x51, 0x56, 0x5a, 0x5d, 0x5f, 0x60, 0x61, 0x63, 0x64, 0x67, + 0x65, 0x62, 0x61, 0x60, 0x60, 0x5d, 0x5a, 0x55, 0x4b, 0x43, 0x38, 0x25, + 0x10, 0xfe, 0xf6, 0xeb, 0xdd, 0xe1, 0xe9, 0x0b, 0x25, 0x37, 0x48, 0x4e, + 0x56, 0x52, 0x49, 0x33, 0x11, 0xed, 0xcd, 0xb4, 0xad, 0xc2, 0xb6, 0xaf, + 0xad, 0xae, 0xa2, 0xa1, 0xa5, 0xaa, 0xa7, 0xae, 0xb2, 0xaf, 0xaf, 0xad, + 0xad, 0xae, 0xac, 0xa6, 0xab, 0xaa, 0xac, 0xb0, 0xb5, 0xb7, 0xc0, 0xcf, + 0xcb, 0xc1, 0xbc, 0xbb, 0xb7, 0xaf, 0xb2, 0xc8, 0xd4, 0xcc, 0xbd, 0xb8, + 0xb4, 0xbe, 0xc7, 0xc1, 0xbf, 0xbb, 0xbc, 0xc4, 0xd0, 0xcf, 0xce, 0xd0, + 0xd4, 0xda, 0xd7, 0xcd, 0xc5, 0xbd, 0xc3, 0xcd, 0xd4, 0xc6, 0xc1, 0xc6, + 0xca, 0xc9, 0xc6, 0xc4, 0xc4, 0xc2, 0xc8, 0xce, 0xce, 0xce, 0xd0, 0xce, + 0xce, 0xca, 0xc0, 0xc8, 0xce, 0xd1, 0xc9, 0xbb, 0xb9, 0xb8, 0xb4, 0xb1, + 0xb6, 0xb4, 0xaf, 0xb7, 0xc2, 0xc0, 0xc4, 0xc1, 0xaf, 0xb1, 0xc2, 0xda, + 0xec, 0xed, 0xed, 0xea, 0xe6, 0xe0, 0xc8, 0xb5, 0xb4, 0xc4, 0xce, 0xca, + 0xc0, 0xad, 0xaf, 0xbe, 0xc1, 0xb3, 0xb7, 0xb5, 0xb1, 0xab, 0xa5, 0xa4, + 0xa2, 0xa0, 0x9d, 0xa1, 0xa2, 0xab, 0xb1, 0xaa, 0xa9, 0xa9, 0xae, 0xbb, + 0xb9, 0xb5, 0xbb, 0xbb, 0xbf, 0xc2, 0xc0, 0xbb, 0xa8, 0xae, 0xb0, 0xb8, + 0xc2, 0xc8, 0xe1, 0x0c, 0x23, 0x2c, 0x34, 0x44, 0x4f, 0x50, 0x54, 0x5a, + 0x5e, 0x61, 0x63, 0x64, 0x68, 0x69, 0x69, 0x69, 0x69, 0x6a, 0x6b, 0x6b, + 0x6b, 0x69, 0x64, 0x5d, 0x54, 0x4b, 0x47, 0x41, 0x3e, 0x40, 0x50, 0x57, + 0x5b, 0x5c, 0x5d, 0x60, 0x61, 0x61, 0x61, 0x63, 0x64, 0x63, 0x60, 0x5f, + 0x5e, 0x5d, 0x59, 0x57, 0x4f, 0x47, 0x3b, 0x2b, 0x19, 0x06, 0xe1, 0xc8, + 0xc8, 0xd9, 0xec, 0x15, 0x27, 0x3b, 0x44, 0x4a, 0x52, 0x51, 0x4a, 0x39, + 0x1f, 0xff, 0xd6, 0xc2, 0xb7, 0xc6, 0xb5, 0xad, 0xb0, 0xb1, 0xa8, 0xaa, + 0xaf, 0xb2, 0xb1, 0xb1, 0xaf, 0xaf, 0xaf, 0xaf, 0xaf, 0xaf, 0xaf, 0xab, + 0xaf, 0xb1, 0xb3, 0xb5, 0xb4, 0xb6, 0xc0, 0xd3, 0xd2, 0xc8, 0xc1, 0xbf, + 0xb9, 0xb3, 0xb4, 0xc6, 0xc9, 0xc1, 0xba, 0xb8, 0xb5, 0xb9, 0xc0, 0xbf, + 0xbb, 0xb9, 0xb6, 0xb8, 0xbc, 0xc1, 0xc5, 0xcc, 0xcc, 0xd1, 0xd2, 0xcf, + 0xc3, 0xc3, 0xc6, 0xd1, 0xce, 0xc6, 0xc0, 0xc0, 0xc3, 0xc8, 0xc6, 0xc8, + 0xc5, 0xc3, 0xc6, 0xc8, 0xcb, 0xcd, 0xd8, 0xd4, 0xce, 0xc8, 0xc9, 0xd3, + 0xd6, 0xdb, 0xd0, 0xbe, 0xbc, 0xbf, 0xbd, 0xbb, 0xc1, 0xbc, 0xb7, 0xb6, + 0xba, 0xc2, 0xc7, 0xb6, 0xb1, 0xb6, 0xbc, 0xd4, 0xef, 0xf1, 0xec, 0xe7, + 0xe7, 0xf0, 0xd9, 0xa9, 0xa3, 0xba, 0xbd, 0xbc, 0xc6, 0xc5, 0xc3, 0xb1, + 0xac, 0xab, 0xb1, 0xb0, 0xaf, 0xa9, 0xa2, 0x9e, 0x9c, 0x9e, 0x9c, 0x9e, + 0xa2, 0xa4, 0xa9, 0xac, 0xaf, 0xae, 0xac, 0xb1, 0xb4, 0xb6, 0xb2, 0xba, + 0xc4, 0xc9, 0xc2, 0xba, 0xaf, 0xb6, 0xb9, 0xbb, 0xc5, 0xd9, 0xf3, 0xec, + 0xe9, 0xee, 0xef, 0xf9, 0x1b, 0x36, 0x41, 0x4a, 0x4f, 0x56, 0x5b, 0x5e, + 0x61, 0x64, 0x67, 0x69, 0x69, 0x69, 0x68, 0x67, 0x68, 0x64, 0x5d, 0x51, + 0x44, 0x3b, 0x33, 0x2e, 0x31, 0x44, 0x53, 0x5a, 0x5d, 0x5f, 0x5d, 0x5c, + 0x5f, 0x60, 0x60, 0x62, 0x62, 0x63, 0x61, 0x5e, 0x5d, 0x5a, 0x57, 0x55, + 0x50, 0x48, 0x3d, 0x2d, 0x16, 0xfc, 0xda, 0xc1, 0xbd, 0xcf, 0xf8, 0x1e, + 0x2f, 0x3e, 0x3f, 0x47, 0x4d, 0x50, 0x4e, 0x46, 0x35, 0x1f, 0x08, 0xef, + 0xde, 0xda, 0xc4, 0xb3, 0xb1, 0xb0, 0xa8, 0xaa, 0xaf, 0xa9, 0xa9, 0xaa, + 0xaf, 0xaf, 0xae, 0xac, 0xac, 0xb0, 0xb0, 0xae, 0xaf, 0xaf, 0xb4, 0xb3, + 0xb2, 0xb4, 0xbc, 0xd3, 0xd1, 0xca, 0xc0, 0xbb, 0xb8, 0xb3, 0xb5, 0xc1, + 0xc1, 0xc1, 0xc3, 0xc2, 0xbe, 0xc1, 0xcc, 0xc9, 0xc0, 0xbe, 0xc0, 0xbf, + 0xbd, 0xbc, 0xc1, 0xcd, 0xc8, 0xc8, 0xca, 0xc8, 0xbe, 0xcd, 0xc9, 0xce, + 0xc4, 0xc8, 0xc8, 0xbf, 0xbd, 0xc2, 0xc5, 0xc5, 0xc7, 0xc6, 0xc6, 0xc1, + 0xc3, 0xca, 0xdd, 0xd5, 0xd5, 0xd2, 0xd1, 0xd6, 0xda, 0xe1, 0xe1, 0xd4, + 0xc8, 0xc3, 0xc1, 0xc1, 0xc9, 0xcd, 0xc8, 0xc2, 0xb9, 0xbb, 0xb7, 0xb0, + 0xb8, 0xc8, 0xce, 0xd8, 0xe6, 0xe7, 0xe6, 0xe7, 0xe6, 0xef, 0xe4, 0xb5, + 0xa6, 0xa6, 0xb3, 0xc0, 0xc9, 0xd1, 0xd6, 0xc3, 0xb4, 0xb3, 0xb5, 0xb2, + 0xb2, 0xae, 0xae, 0xb0, 0xaf, 0xad, 0xa8, 0xa1, 0xa3, 0xa3, 0xa8, 0xaf, + 0xb5, 0xb5, 0xb2, 0xb3, 0xb5, 0xba, 0xb9, 0xc0, 0xc7, 0xc7, 0xbe, 0xb1, + 0xb7, 0xbc, 0xbc, 0xbb, 0xc5, 0xe7, 0xd3, 0xbb, 0xd5, 0xdd, 0xe7, 0xe7, + 0xe9, 0xf2, 0x0b, 0x23, 0x31, 0x41, 0x50, 0x5a, 0x5e, 0x63, 0x66, 0x68, + 0x69, 0x69, 0x66, 0x65, 0x63, 0x5c, 0x4c, 0x3c, 0x2a, 0x1f, 0x17, 0x0c, + 0xfd, 0x0e, 0x3e, 0x58, 0x5d, 0x5e, 0x5d, 0x5c, 0x5b, 0x5d, 0x5d, 0x5f, + 0x60, 0x60, 0x60, 0x5d, 0x5c, 0x5a, 0x57, 0x54, 0x4f, 0x49, 0x3e, 0x2a, + 0x0d, 0xef, 0xd3, 0xc0, 0xbb, 0xd3, 0x0c, 0x28, 0x38, 0x3e, 0x3d, 0x47, + 0x4a, 0x4c, 0x49, 0x38, 0x23, 0x04, 0xe0, 0xcb, 0xc6, 0xce, 0xbf, 0xb3, + 0xb0, 0xb1, 0xb0, 0xac, 0xa5, 0xa0, 0xa2, 0xa4, 0xa9, 0xaf, 0xaf, 0xad, + 0xa8, 0xab, 0xa8, 0xad, 0xaf, 0xaf, 0xb3, 0xb5, 0xb3, 0xaf, 0xbb, 0xd1, + 0xcb, 0xc8, 0xbe, 0xb5, 0xb3, 0xaf, 0xb5, 0xbf, 0xc2, 0xc4, 0xc1, 0xbd, + 0xbc, 0xc7, 0xd3, 0xd1, 0xcb, 0xc4, 0xc1, 0xc1, 0xc0, 0xc1, 0xc2, 0xc8, + 0xc4, 0xc1, 0xbf, 0xb5, 0xad, 0xb7, 0xc3, 0xb5, 0xab, 0xb1, 0xb7, 0xad, + 0xa7, 0xa8, 0xac, 0xb4, 0xbf, 0xc8, 0xcb, 0xc8, 0xc1, 0xc7, 0xdd, 0xd9, + 0xd6, 0xcf, 0xd3, 0xd7, 0xde, 0xe2, 0xe2, 0xe2, 0xd4, 0xc4, 0xbe, 0xbe, + 0xc5, 0xca, 0xcd, 0xcb, 0xbd, 0xbd, 0xb5, 0xb5, 0xc6, 0xd3, 0xd5, 0xd2, + 0xd8, 0xd3, 0xd2, 0xe0, 0xdf, 0xe6, 0xe2, 0xc1, 0xb4, 0xb5, 0xc3, 0xd4, + 0xda, 0xe4, 0xe1, 0xc6, 0xb1, 0xb5, 0xb8, 0xbb, 0xbd, 0xc2, 0xc8, 0xc1, + 0xb8, 0xb3, 0xb1, 0xaf, 0xb0, 0xb2, 0xb9, 0xbf, 0xbb, 0xbd, 0xbb, 0xb8, + 0xb5, 0xbb, 0xc2, 0xc7, 0xc8, 0xc5, 0xc4, 0xba, 0xba, 0xc0, 0xc1, 0xc2, + 0xdd, 0xfb, 0xb7, 0xaf, 0xc1, 0xc9, 0xd4, 0xdb, 0xdf, 0xdf, 0xdf, 0xf9, + 0x1a, 0x35, 0x4a, 0x58, 0x5f, 0x62, 0x64, 0x67, 0x69, 0x69, 0x69, 0x67, + 0x63, 0x58, 0x33, 0x19, 0x07, 0xe9, 0xd7, 0xc3, 0xb3, 0xba, 0xf9, 0x34, + 0x55, 0x5c, 0x5e, 0x5d, 0x5b, 0x5a, 0x5c, 0x5d, 0x5e, 0x5f, 0x5e, 0x5e, + 0x5d, 0x5a, 0x58, 0x56, 0x52, 0x4f, 0x49, 0x39, 0x1b, 0xf4, 0xd7, 0xc6, + 0xc1, 0xe7, 0x14, 0x2f, 0x3e, 0x3a, 0x3c, 0x43, 0x47, 0x4a, 0x4c, 0x47, + 0x3d, 0x2f, 0x16, 0xf9, 0xda, 0xce, 0xb4, 0xa9, 0xb0, 0xb5, 0xaf, 0xa4, + 0x9f, 0xa0, 0x9e, 0xa1, 0xa9, 0xaf, 0xb0, 0xaf, 0xa7, 0xa9, 0xa5, 0xa9, + 0xb1, 0xaf, 0xb1, 0xb5, 0xb3, 0xb2, 0xc1, 0xcc, 0xc6, 0xc3, 0xbe, 0xb4, + 0xb0, 0xad, 0xb5, 0xbc, 0xbb, 0xb9, 0xb9, 0xbb, 0xc8, 0xce, 0xd3, 0xd4, + 0xd2, 0xd4, 0xcb, 0xc3, 0xc1, 0xc1, 0xbc, 0xbb, 0xba, 0xbb, 0xb9, 0xb2, + 0xb0, 0xbb, 0xc1, 0xb2, 0xaa, 0xa3, 0xa3, 0xa2, 0xa7, 0xa9, 0xa6, 0xa9, + 0xb5, 0xbe, 0xc4, 0xc8, 0xc2, 0xc6, 0xde, 0xda, 0xd1, 0xc9, 0xd2, 0xd3, + 0xcd, 0xca, 0xcc, 0xde, 0xd4, 0xc1, 0xc0, 0xbf, 0xc7, 0xca, 0xca, 0xcb, + 0xc8, 0xc5, 0xb1, 0xbb, 0xd3, 0xd0, 0xd0, 0xd7, 0xd5, 0xce, 0xc8, 0xc5, + 0xce, 0xdb, 0xe1, 0xcb, 0xc1, 0xc2, 0xc3, 0xc8, 0xd1, 0xdc, 0xd3, 0xc1, + 0xbb, 0xc2, 0xc2, 0xc2, 0xc6, 0xc8, 0xc4, 0xbe, 0xb8, 0xb6, 0xb7, 0xb7, + 0xb6, 0xba, 0xcb, 0xc9, 0xb6, 0xb8, 0xb7, 0xba, 0xbe, 0xbd, 0xc2, 0xc7, + 0xc3, 0xc6, 0xc8, 0xc2, 0xc0, 0xc6, 0xc8, 0xd4, 0xfe, 0xe4, 0xa9, 0xab, + 0xb7, 0xba, 0xaf, 0xbb, 0xcf, 0xd4, 0xd4, 0xe0, 0x0c, 0x35, 0x4a, 0x57, + 0x5e, 0x62, 0x63, 0x65, 0x66, 0x66, 0x69, 0x68, 0x63, 0x5e, 0x33, 0x0b, + 0xe7, 0xc5, 0xb2, 0xad, 0xc3, 0xcf, 0xc9, 0x03, 0x3b, 0x57, 0x5b, 0x5e, + 0x5d, 0x5a, 0x59, 0x5c, 0x5d, 0x5d, 0x5e, 0x5d, 0x5d, 0x5b, 0x58, 0x57, + 0x56, 0x55, 0x51, 0x44, 0x2c, 0x10, 0xeb, 0xca, 0xcf, 0x00, 0x27, 0x3d, + 0x3e, 0x38, 0x3e, 0x43, 0x47, 0x4b, 0x4c, 0x45, 0x30, 0x1f, 0x1c, 0x16, + 0x00, 0xe7, 0xc0, 0xaf, 0xb4, 0xb5, 0xa5, 0xa0, 0x9f, 0xa2, 0x9e, 0xa0, + 0xa9, 0xaf, 0xb4, 0xb1, 0xb5, 0xb5, 0xaa, 0xa9, 0xad, 0xae, 0xaf, 0xb3, + 0xb4, 0xb4, 0xc0, 0xc2, 0xc1, 0xbb, 0xb7, 0xb2, 0xad, 0xa9, 0xb1, 0xbb, + 0xbc, 0xbb, 0xbc, 0xc6, 0xce, 0xcb, 0xd0, 0xd5, 0xd1, 0xd4, 0xd5, 0xce, + 0xc8, 0xc1, 0xb2, 0xa8, 0xab, 0xae, 0xa9, 0xad, 0xb7, 0xc5, 0xc6, 0xc3, + 0xbe, 0xb1, 0xa6, 0xa2, 0xa8, 0xae, 0xb5, 0xb3, 0xb5, 0xbd, 0xc1, 0xb9, + 0xaf, 0xb3, 0xd4, 0xda, 0xd0, 0xc8, 0xc8, 0xc7, 0xbe, 0xbd, 0xce, 0xd3, + 0xc8, 0xbe, 0xbf, 0xc2, 0xc8, 0xc8, 0xc6, 0xc6, 0xca, 0xcd, 0xc1, 0xc2, + 0xd4, 0xc8, 0xce, 0xda, 0xd4, 0xcd, 0xc5, 0xbe, 0xc1, 0xce, 0xdb, 0xd6, + 0xce, 0xc2, 0xbf, 0xbe, 0xbe, 0xc9, 0xc7, 0xc4, 0xc4, 0xc4, 0xbf, 0xb7, + 0xbb, 0xbc, 0xbf, 0xbe, 0xbb, 0xbd, 0xc1, 0xbf, 0xbf, 0xc8, 0xce, 0xc7, + 0xbd, 0xb9, 0xb6, 0xc0, 0xc1, 0xbd, 0xc2, 0xca, 0xc5, 0xc3, 0xc5, 0xc5, + 0xc1, 0xc8, 0xce, 0xe1, 0x06, 0xcc, 0xb2, 0xaa, 0xae, 0xa9, 0x9d, 0xa2, + 0xc1, 0xce, 0xd2, 0xd5, 0x01, 0x2b, 0x45, 0x54, 0x5e, 0x62, 0x63, 0x63, + 0x64, 0x62, 0x63, 0x66, 0x65, 0x62, 0x44, 0x15, 0xdd, 0xc2, 0xb4, 0xbb, + 0xbc, 0xd6, 0xce, 0xd6, 0x0f, 0x4a, 0x57, 0x59, 0x5c, 0x5d, 0x59, 0x58, + 0x5b, 0x5c, 0x5c, 0x5d, 0x5a, 0x59, 0x58, 0x54, 0x53, 0x53, 0x53, 0x4d, + 0x3b, 0x25, 0xff, 0xd6, 0xee, 0x24, 0x42, 0x47, 0x3e, 0x3d, 0x44, 0x49, + 0x4e, 0x51, 0x50, 0x48, 0x30, 0x07, 0xe9, 0xe8, 0xf9, 0x03, 0xec, 0xcf, + 0xc6, 0xbb, 0xa6, 0xa2, 0xa4, 0xa3, 0xa1, 0x9f, 0xaa, 0xb3, 0xbc, 0xbb, + 0xc0, 0xc1, 0xb3, 0xa5, 0xa7, 0xb1, 0xb5, 0xb4, 0xaf, 0xb5, 0xbf, 0xbb, + 0xbb, 0xb1, 0xb1, 0xb1, 0xa9, 0xa3, 0xac, 0xba, 0xbe, 0xc1, 0xc1, 0xca, + 0xcc, 0xc8, 0xcd, 0xd4, 0xcf, 0xcf, 0xd4, 0xd4, 0xcf, 0xc6, 0xb2, 0x9e, + 0xa3, 0xab, 0xa2, 0xa5, 0xb8, 0xc7, 0xce, 0xc9, 0xc5, 0xbf, 0xb0, 0xa0, + 0x9e, 0xa4, 0xb6, 0xbc, 0xbc, 0xc0, 0xc6, 0xbd, 0xa9, 0xa7, 0xc0, 0xd4, + 0xce, 0xcb, 0xc9, 0xc8, 0xc5, 0xc9, 0xd4, 0xc9, 0xc3, 0xbf, 0xbc, 0xbf, + 0xc2, 0xc3, 0xc3, 0xc5, 0xc0, 0xc1, 0xc3, 0xc1, 0xc4, 0xc3, 0xc5, 0xca, + 0xc2, 0xb9, 0xb6, 0xb9, 0xbd, 0xc2, 0xd2, 0xda, 0xc9, 0xc1, 0xc0, 0xc3, + 0xc6, 0xc8, 0xc1, 0xc0, 0xc1, 0xc1, 0xbc, 0xba, 0xbd, 0xbc, 0xc5, 0xc1, + 0xc4, 0xc5, 0xc7, 0xc4, 0xca, 0xd3, 0xd8, 0xce, 0xc1, 0xbe, 0xbb, 0xc3, + 0xc9, 0xce, 0xca, 0xce, 0xd1, 0xca, 0xc8, 0xca, 0xbe, 0xc1, 0xc3, 0xec, + 0x04, 0xbd, 0xac, 0xa7, 0xae, 0xb1, 0xa4, 0xa7, 0xbb, 0xc4, 0xc7, 0xc8, + 0xed, 0x2b, 0x4d, 0x55, 0x57, 0x5d, 0x62, 0x63, 0x63, 0x61, 0x60, 0x62, + 0x63, 0x60, 0x50, 0x31, 0x13, 0xed, 0xce, 0xc7, 0xbf, 0xb8, 0xc0, 0xbf, + 0xd2, 0x29, 0x50, 0x57, 0x5a, 0x5d, 0x5d, 0x5b, 0x5a, 0x5c, 0x5b, 0x5d, + 0x5d, 0x58, 0x56, 0x56, 0x55, 0x53, 0x52, 0x50, 0x48, 0x3b, 0x29, 0x1b, + 0x25, 0x3a, 0x47, 0x41, 0x3e, 0x41, 0x48, 0x4d, 0x50, 0x53, 0x52, 0x4d, + 0x37, 0x16, 0xf0, 0xd1, 0xc4, 0xdd, 0xef, 0xd4, 0xd0, 0xce, 0xbb, 0xad, + 0xa8, 0xa7, 0xa3, 0xab, 0xb4, 0xbe, 0xc7, 0xc2, 0xc0, 0xbe, 0xb9, 0xa3, + 0xa1, 0xb3, 0xb6, 0xb1, 0xae, 0xb8, 0xbc, 0xb5, 0xb6, 0xb2, 0xb0, 0xaf, + 0xa9, 0xa3, 0xab, 0xb8, 0xbc, 0xbb, 0xc1, 0xc5, 0xc8, 0xc7, 0xcc, 0xd1, + 0xce, 0xce, 0xce, 0xd0, 0xcc, 0xc0, 0xaf, 0x9d, 0xa2, 0xb2, 0xaa, 0xac, + 0xb8, 0xbe, 0xc8, 0xc8, 0xc2, 0xbb, 0xb4, 0xa5, 0x9c, 0xa3, 0xb2, 0xb2, + 0xb1, 0xbf, 0xc3, 0xc7, 0xb5, 0xa3, 0xb3, 0xcb, 0xc8, 0xcd, 0xd2, 0xcf, + 0xc9, 0xce, 0xce, 0xc2, 0xc0, 0xbf, 0xbb, 0xb8, 0xba, 0xbb, 0xbb, 0xbb, + 0xb4, 0xb5, 0xb5, 0xb7, 0xbb, 0xc8, 0xd5, 0xdb, 0xc5, 0xb4, 0xb8, 0xc4, + 0xc8, 0xca, 0xce, 0xdc, 0xca, 0xb9, 0xbb, 0xc0, 0xbc, 0xb8, 0xb4, 0xb6, + 0xba, 0xba, 0xb4, 0xaf, 0xb8, 0xbd, 0xc1, 0xbd, 0xc0, 0xc7, 0xc7, 0xcb, + 0xd3, 0xd8, 0xe7, 0xe0, 0xc3, 0xbf, 0xc0, 0xd3, 0xe5, 0xee, 0xe0, 0xd0, + 0xce, 0xcf, 0xd0, 0xd3, 0xb8, 0xbb, 0xc4, 0xfc, 0x00, 0xbd, 0xa9, 0xa7, + 0xab, 0xb5, 0xb2, 0xab, 0xb4, 0xba, 0xc2, 0xc1, 0xe4, 0x23, 0x4a, 0x59, + 0x5b, 0x5c, 0x61, 0x63, 0x63, 0x62, 0x61, 0x60, 0x5f, 0x5f, 0x59, 0x4a, + 0x41, 0x35, 0x10, 0xe6, 0xcf, 0xc5, 0xc5, 0xc0, 0xc4, 0x0f, 0x4b, 0x57, + 0x58, 0x5b, 0x5d, 0x5d, 0x5d, 0x5d, 0x5d, 0x5d, 0x5d, 0x5d, 0x5a, 0x56, + 0x56, 0x57, 0x56, 0x51, 0x4b, 0x44, 0x3b, 0x31, 0x33, 0x3c, 0x38, 0x40, + 0x46, 0x4a, 0x4e, 0x51, 0x52, 0x55, 0x57, 0x54, 0x47, 0x28, 0x01, 0xe4, + 0xca, 0xc8, 0xc5, 0xe6, 0xdd, 0xc9, 0xce, 0xbb, 0xaf, 0xad, 0xac, 0xb1, + 0xb8, 0xca, 0xd5, 0xce, 0xc5, 0xc0, 0xba, 0xae, 0xb0, 0xb9, 0xba, 0xb3, + 0xb1, 0xba, 0xbd, 0xb5, 0xb5, 0xb3, 0xaf, 0xa9, 0xa7, 0xa6, 0xad, 0xb9, + 0xb9, 0xbc, 0xc0, 0xbf, 0xc5, 0xc4, 0xc8, 0xd0, 0xc8, 0xcc, 0xcb, 0xc8, + 0xc8, 0xbe, 0xaf, 0x9e, 0xa0, 0xb2, 0xb3, 0xb3, 0xb9, 0xbc, 0xc2, 0xc0, + 0xbc, 0xb7, 0xb2, 0xa9, 0xab, 0xb9, 0xc0, 0xb2, 0xa0, 0xaa, 0xb8, 0xbe, + 0xba, 0xa7, 0xa8, 0xb2, 0xb5, 0xb9, 0xc8, 0xd2, 0xce, 0xce, 0xc7, 0xc3, + 0xc1, 0xbf, 0xb9, 0xb4, 0xb3, 0xb6, 0xb5, 0xb8, 0xb8, 0xbb, 0xbb, 0xbf, + 0xc4, 0xd0, 0xdd, 0xdd, 0xcc, 0xbb, 0xbb, 0xc2, 0xcf, 0xe0, 0xe7, 0xe1, + 0xc8, 0xbd, 0xc1, 0xc4, 0xc6, 0xca, 0xc6, 0xc8, 0xd1, 0xcc, 0xba, 0xb2, + 0xb9, 0xc6, 0xcf, 0xbc, 0xbb, 0xb6, 0xb4, 0xba, 0xbb, 0xc3, 0xc6, 0xc3, + 0xc2, 0xcc, 0xc2, 0xbf, 0xd3, 0xde, 0xd4, 0xcc, 0xcf, 0xd2, 0xd6, 0xd0, + 0xb9, 0xc1, 0xd0, 0xfd, 0xf7, 0xcb, 0xab, 0xa5, 0xa6, 0xad, 0xbd, 0xc1, + 0xbe, 0xbc, 0xc4, 0xd8, 0xf8, 0x26, 0x41, 0x54, 0x5d, 0x5f, 0x61, 0x63, + 0x64, 0x63, 0x63, 0x64, 0x61, 0x60, 0x5d, 0x54, 0x4e, 0x4d, 0x44, 0x30, + 0x15, 0xfb, 0xed, 0xe7, 0xde, 0x04, 0x41, 0x53, 0x58, 0x58, 0x5a, 0x5b, + 0x5d, 0x5e, 0x5d, 0x5c, 0x5d, 0x5e, 0x5d, 0x5a, 0x54, 0x52, 0x53, 0x51, + 0x4c, 0x43, 0x39, 0x2f, 0x1c, 0x12, 0x31, 0x47, 0x4c, 0x4f, 0x50, 0x53, + 0x57, 0x59, 0x58, 0x57, 0x50, 0x3e, 0x19, 0xe9, 0xd0, 0xce, 0xba, 0xbd, + 0xe1, 0xde, 0xcb, 0xc4, 0xb9, 0xb5, 0xb5, 0xb2, 0xb6, 0xc3, 0xd4, 0xd1, + 0xc5, 0xbf, 0xb8, 0xb5, 0xb7, 0xbc, 0xbe, 0xbc, 0xb6, 0xb5, 0xb9, 0xb4, + 0xb3, 0xb1, 0xac, 0xa4, 0xa5, 0xa7, 0xae, 0xb5, 0xbc, 0xc2, 0xc1, 0xbb, + 0xbf, 0xbf, 0xc8, 0xd1, 0xc4, 0xbf, 0xc1, 0xc2, 0xc1, 0xbf, 0xb0, 0xa4, + 0xa3, 0xad, 0xb8, 0xb4, 0xb1, 0xbb, 0xc1, 0xbc, 0xb8, 0xb5, 0xae, 0xad, + 0xc4, 0xca, 0xc7, 0xbf, 0xaa, 0xa1, 0xa8, 0xaf, 0xae, 0xaf, 0xaf, 0xb1, + 0xb8, 0xb6, 0xc0, 0xd1, 0xcb, 0xc8, 0xc2, 0xc7, 0xc4, 0xbe, 0xb8, 0xb4, + 0xb6, 0xb8, 0xbb, 0xc4, 0xc6, 0xc5, 0xc8, 0xc7, 0xc8, 0xcf, 0xd4, 0xca, + 0xc1, 0xc0, 0xc1, 0xc0, 0xc1, 0xce, 0xdf, 0xd4, 0xc6, 0xce, 0xd3, 0xd4, + 0xd1, 0xc9, 0xcd, 0xcf, 0xc5, 0xc5, 0xc2, 0xbd, 0xbd, 0xc0, 0xd4, 0xd1, + 0xca, 0xc0, 0xbd, 0xc2, 0xc1, 0xc2, 0xc3, 0xbd, 0xc6, 0xc8, 0xc5, 0xc4, + 0xd1, 0xcd, 0xc9, 0xca, 0xce, 0xce, 0xd1, 0xce, 0xbb, 0xb8, 0xc7, 0xf8, + 0xec, 0xce, 0xb1, 0xa5, 0xa5, 0xa7, 0xad, 0xbe, 0xca, 0xcc, 0xd4, 0xeb, + 0x07, 0x24, 0x3e, 0x4b, 0x57, 0x5d, 0x61, 0x62, 0x63, 0x63, 0x66, 0x66, + 0x63, 0x60, 0x60, 0x5d, 0x55, 0x50, 0x4f, 0x4c, 0x47, 0x40, 0x41, 0x44, + 0x39, 0x2d, 0x3a, 0x4b, 0x54, 0x57, 0x59, 0x5b, 0x5d, 0x5d, 0x5d, 0x5e, + 0x5e, 0x5d, 0x5d, 0x5d, 0x5a, 0x56, 0x50, 0x4a, 0x44, 0x3d, 0x36, 0x32, + 0x1f, 0x24, 0x41, 0x4d, 0x50, 0x50, 0x51, 0x57, 0x59, 0x5a, 0x59, 0x59, + 0x50, 0x3e, 0x27, 0x07, 0xd6, 0xcc, 0xbc, 0xb6, 0xbc, 0xe2, 0xda, 0xc1, + 0xbd, 0xbb, 0xb6, 0xb0, 0xb1, 0xba, 0xce, 0xd3, 0xcb, 0xc4, 0xbe, 0xbd, + 0xbd, 0xbe, 0xbf, 0xc1, 0xbb, 0xb1, 0xb2, 0xb5, 0xb1, 0xac, 0xa6, 0xa4, + 0xa5, 0xa3, 0xaa, 0xb5, 0xbb, 0xc2, 0xc1, 0xbd, 0xbc, 0xc1, 0xcb, 0xcf, + 0xce, 0xc4, 0xbc, 0xbb, 0xbd, 0xbc, 0xaf, 0xa3, 0xa7, 0xaa, 0xb5, 0xb1, + 0xad, 0xb9, 0xc0, 0xbc, 0xb6, 0xb6, 0xb2, 0xc3, 0xdb, 0xd7, 0xc5, 0xbb, + 0xb5, 0xac, 0xb1, 0xba, 0xbc, 0xc2, 0xc2, 0xc1, 0xbe, 0xbc, 0xc3, 0xcd, + 0xc9, 0xc2, 0xc1, 0xc4, 0xc3, 0xb9, 0xae, 0xb0, 0xbb, 0xbf, 0xc5, 0xc7, + 0xca, 0xc6, 0xc7, 0xc9, 0xce, 0xd1, 0xd4, 0xcb, 0xc4, 0xbe, 0xc8, 0xcd, + 0xcc, 0xcd, 0xc6, 0xc1, 0xbf, 0xc8, 0xcc, 0xca, 0xcd, 0xc8, 0xc9, 0xcf, + 0xca, 0xca, 0xce, 0xce, 0xcb, 0xcd, 0xcc, 0xce, 0xd3, 0xd3, 0xd0, 0xcb, + 0xc8, 0xc5, 0xbc, 0xbf, 0xc2, 0xc1, 0xc5, 0xc7, 0xcd, 0xc7, 0xc8, 0xc7, + 0xcf, 0xcf, 0xcd, 0xce, 0xb5, 0xb5, 0xcb, 0xf3, 0xe8, 0xd4, 0xb6, 0xac, + 0xaf, 0xb0, 0xb8, 0xc7, 0xce, 0xd6, 0xe5, 0xf9, 0x16, 0x28, 0x3f, 0x48, + 0x51, 0x5a, 0x5d, 0x60, 0x62, 0x63, 0x66, 0x64, 0x63, 0x62, 0x62, 0x63, + 0x5c, 0x53, 0x4d, 0x4a, 0x49, 0x47, 0x45, 0x49, 0x4b, 0x49, 0x42, 0x44, + 0x4d, 0x53, 0x57, 0x5b, 0x5d, 0x5e, 0x5d, 0x5d, 0x5e, 0x5e, 0x5e, 0x5d, + 0x5d, 0x59, 0x55, 0x4c, 0x46, 0x3e, 0x3b, 0x35, 0x2b, 0x3c, 0x50, 0x51, + 0x50, 0x55, 0x57, 0x58, 0x59, 0x5b, 0x5b, 0x5a, 0x51, 0x3b, 0x1e, 0x06, + 0xef, 0xd6, 0xbe, 0xb3, 0xba, 0xc2, 0xdb, 0xd3, 0xc3, 0xbd, 0xba, 0xb6, + 0xae, 0xaf, 0xc7, 0xd6, 0xd2, 0xcd, 0xc8, 0xc4, 0xc2, 0xc1, 0xc1, 0xc3, + 0xc1, 0xad, 0xa8, 0xaf, 0xb1, 0xac, 0xa6, 0xa6, 0xa3, 0x9c, 0x9f, 0xad, + 0xb6, 0xc0, 0xc2, 0xc5, 0xc6, 0xc8, 0xc8, 0xc9, 0xc8, 0xcd, 0xc7, 0xbe, + 0xbb, 0xbc, 0xb2, 0xa9, 0xa8, 0xad, 0xaf, 0xa9, 0xa9, 0xb2, 0xba, 0xbc, + 0xb7, 0xb6, 0xb8, 0xc9, 0xd2, 0xc8, 0xbb, 0xb8, 0xb5, 0xb7, 0xbc, 0xc7, + 0xcd, 0xcc, 0xc8, 0xc6, 0xc1, 0xb5, 0xbb, 0xc1, 0xc1, 0xbe, 0xbd, 0xc1, + 0xc3, 0xc0, 0xb5, 0xaf, 0xb5, 0xbf, 0xc9, 0xcb, 0xcf, 0xcf, 0xcb, 0xc9, + 0xcc, 0xcf, 0xd3, 0xcb, 0xb7, 0xb5, 0xbc, 0xc4, 0xc5, 0xc1, 0xbd, 0xc0, + 0xc0, 0xca, 0xca, 0xc1, 0xc1, 0xc5, 0xc9, 0xc4, 0xc6, 0xc4, 0xc3, 0xc1, + 0xc1, 0xc0, 0xbf, 0xc0, 0xc2, 0xc7, 0xc8, 0xcb, 0xc9, 0xc1, 0xbe, 0xc2, + 0xc3, 0xc4, 0xc3, 0xc8, 0xd1, 0xd1, 0xd1, 0xd1, 0xd1, 0xd1, 0xc9, 0xcb, + 0xb6, 0xb5, 0xcb, 0xe6, 0xe5, 0xd1, 0xb7, 0xb8, 0xb7, 0xbb, 0xc2, 0xce, + 0xd7, 0xde, 0xec, 0xfa, 0x17, 0x31, 0x35, 0x45, 0x4f, 0x57, 0x5b, 0x61, + 0x62, 0x62, 0x63, 0x64, 0x63, 0x61, 0x60, 0x64, 0x63, 0x5d, 0x55, 0x50, + 0x4a, 0x49, 0x45, 0x44, 0x46, 0x4b, 0x4b, 0x48, 0x48, 0x4d, 0x53, 0x56, + 0x5a, 0x5d, 0x5e, 0x5e, 0x5f, 0x5e, 0x5f, 0x60, 0x5e, 0x5d, 0x59, 0x51, + 0x4b, 0x4a, 0x48, 0x44, 0x3f, 0x47, 0x53, 0x53, 0x55, 0x59, 0x5b, 0x5a, + 0x58, 0x5a, 0x5b, 0x5c, 0x58, 0x4a, 0x31, 0x0f, 0xed, 0xd8, 0xc7, 0xc0, + 0xc1, 0xb7, 0xbb, 0xd5, 0xce, 0xbc, 0xb8, 0xb8, 0xad, 0xa5, 0xaf, 0xc2, + 0xcc, 0xd0, 0xd2, 0xd0, 0xc5, 0xbe, 0xc5, 0xc8, 0xc3, 0xa9, 0xac, 0xbe, + 0xbd, 0xb4, 0xac, 0xa6, 0xa5, 0x9f, 0x9b, 0xa3, 0xb0, 0xb8, 0xbd, 0xc6, + 0xc7, 0xc1, 0xc3, 0xc7, 0xc1, 0xc2, 0xc5, 0xc3, 0xbd, 0xbf, 0xba, 0xb3, + 0xae, 0xae, 0xaa, 0xa2, 0xa2, 0xad, 0xb6, 0xbc, 0xb9, 0xba, 0xaf, 0xb4, + 0xbd, 0xb8, 0xb5, 0xb7, 0xbb, 0xbc, 0xbd, 0xc4, 0xcb, 0xcb, 0xc9, 0xc4, + 0xc0, 0xaf, 0xab, 0xb6, 0xc1, 0xc0, 0xbb, 0xc1, 0xc1, 0xc1, 0xc2, 0xc1, + 0xbb, 0xc1, 0xc9, 0xcd, 0xce, 0xd4, 0xce, 0xcc, 0xce, 0xd2, 0xd2, 0xc8, + 0xbb, 0xba, 0xb9, 0xb7, 0xbb, 0xc1, 0xc5, 0xbf, 0xbe, 0xc2, 0xcc, 0xc5, + 0xc5, 0xc5, 0xc1, 0xbd, 0xc4, 0xca, 0xcb, 0xc6, 0xc8, 0xc7, 0xc7, 0xc4, + 0xc3, 0xcb, 0xce, 0xcb, 0xc0, 0xbd, 0xc7, 0xc7, 0xc8, 0xc6, 0xc2, 0xc7, + 0xcc, 0xd1, 0xd3, 0xd1, 0xcc, 0xce, 0xcd, 0xca, 0xbb, 0xbe, 0xc9, 0xd2, + 0xe0, 0xd4, 0xc1, 0xc3, 0xbe, 0xc1, 0xc5, 0xd2, 0xd8, 0xe2, 0xf3, 0x06, + 0x17, 0x2b, 0x37, 0x3e, 0x48, 0x4e, 0x5a, 0x60, 0x62, 0x61, 0x61, 0x63, + 0x62, 0x61, 0x5d, 0x60, 0x63, 0x63, 0x5d, 0x57, 0x55, 0x52, 0x50, 0x4c, + 0x4c, 0x4d, 0x4d, 0x4c, 0x4d, 0x52, 0x54, 0x52, 0x56, 0x59, 0x5d, 0x5e, + 0x62, 0x63, 0x62, 0x63, 0x62, 0x5f, 0x5d, 0x59, 0x51, 0x4c, 0x4a, 0x4a, + 0x49, 0x4c, 0x50, 0x53, 0x55, 0x55, 0x57, 0x59, 0x5a, 0x5d, 0x5d, 0x5d, + 0x5d, 0x57, 0x42, 0x18, 0xe9, 0xce, 0xbc, 0xbd, 0xbb, 0xb5, 0xb5, 0xbf, + 0xce, 0xba, 0xb6, 0xb7, 0xaf, 0xa9, 0xa8, 0xa7, 0xab, 0xb8, 0xca, 0xce, + 0xbc, 0xb5, 0xc0, 0xcb, 0xc1, 0xa9, 0xbc, 0xc9, 0xc5, 0xbb, 0xae, 0xa5, + 0xa5, 0xa3, 0x9c, 0xa0, 0xae, 0xb2, 0xb9, 0xc1, 0xbf, 0xbc, 0xc3, 0xc8, + 0xbd, 0xbc, 0xbc, 0xba, 0xb5, 0xb7, 0xb8, 0xb6, 0xb2, 0xaa, 0xa9, 0xa3, + 0x9e, 0xa3, 0xad, 0xb9, 0xb9, 0xbb, 0xad, 0xa7, 0xb1, 0xb4, 0xb5, 0xb7, + 0xba, 0xbb, 0xbb, 0xc1, 0xc8, 0xc8, 0xc5, 0xc4, 0xc3, 0xb8, 0xab, 0xb1, + 0xb9, 0xbb, 0xc0, 0xc4, 0xc2, 0xc0, 0xbe, 0xbb, 0xc4, 0xc3, 0xc8, 0xcd, + 0xcf, 0xd8, 0xd4, 0xd4, 0xd2, 0xd4, 0xcd, 0xb9, 0xb5, 0xba, 0xbb, 0xb9, + 0xbe, 0xc1, 0xbf, 0xb5, 0xc0, 0xd2, 0xda, 0xcf, 0xc8, 0xc5, 0xc1, 0xc0, + 0xc6, 0xc5, 0xc7, 0xce, 0xd2, 0xd4, 0xd3, 0xd5, 0xd3, 0xd2, 0xd4, 0xd8, + 0xd3, 0xce, 0xd0, 0xcd, 0xcd, 0xcb, 0xc6, 0xc8, 0xc8, 0xcd, 0xce, 0xcf, + 0xcf, 0xce, 0xca, 0xc5, 0xbf, 0xc1, 0xc3, 0xcc, 0xf2, 0x00, 0xfa, 0xe6, + 0xc5, 0xc1, 0xc8, 0xce, 0xde, 0xe8, 0xf4, 0x01, 0x11, 0x21, 0x2b, 0x37, + 0x42, 0x4d, 0x55, 0x5b, 0x60, 0x60, 0x60, 0x62, 0x61, 0x5d, 0x5d, 0x5d, + 0x5e, 0x5f, 0x5e, 0x5d, 0x59, 0x57, 0x55, 0x52, 0x50, 0x4f, 0x50, 0x54, + 0x57, 0x59, 0x5d, 0x5e, 0x5b, 0x59, 0x59, 0x5b, 0x5e, 0x62, 0x63, 0x63, + 0x63, 0x62, 0x5d, 0x56, 0x4e, 0x44, 0x46, 0x4a, 0x4b, 0x4c, 0x4b, 0x4d, + 0x4d, 0x50, 0x55, 0x58, 0x5b, 0x5e, 0x60, 0x60, 0x5e, 0x5c, 0x52, 0x39, + 0x0c, 0xd7, 0xc1, 0xbb, 0xbb, 0xb5, 0xb3, 0xb7, 0xc8, 0xb8, 0xb5, 0xb3, + 0xac, 0xa9, 0xa8, 0xa4, 0xa2, 0xa9, 0xb9, 0xce, 0xce, 0xbb, 0xae, 0xb3, + 0xac, 0x9f, 0xaf, 0xc4, 0xc8, 0xc1, 0xb0, 0xa7, 0xa2, 0xa4, 0xa6, 0xa8, + 0xac, 0xb1, 0xb6, 0xbb, 0xbc, 0xbd, 0xc3, 0xc9, 0xbe, 0xb7, 0xb4, 0xb3, + 0xaf, 0xb5, 0xb5, 0xb1, 0xaf, 0xa6, 0xad, 0xac, 0xa4, 0x9f, 0xa0, 0xae, + 0xb5, 0xbb, 0xb7, 0xb5, 0xb5, 0xb2, 0xb0, 0xb2, 0xb5, 0xb5, 0xb6, 0xc0, + 0xc5, 0xc6, 0xc0, 0xbb, 0xbb, 0xbf, 0xb6, 0xb3, 0xb2, 0xb7, 0xc4, 0xc7, + 0xc3, 0xc1, 0xc0, 0xc0, 0xc8, 0xbf, 0xc9, 0xc6, 0xc8, 0xd1, 0xd4, 0xd6, + 0xd6, 0xd6, 0xc9, 0xb9, 0xbb, 0xbc, 0xbe, 0xb9, 0xbb, 0xb5, 0xb3, 0xb2, + 0xbc, 0xce, 0xce, 0xc9, 0xc2, 0xb9, 0xc1, 0xce, 0xce, 0xcc, 0xc4, 0xc1, + 0xc2, 0xc9, 0xce, 0xca, 0xc7, 0xc1, 0xbe, 0xc3, 0xca, 0xcf, 0xce, 0xcd, + 0xd3, 0xd4, 0xce, 0xcb, 0xce, 0xd2, 0xce, 0xce, 0xd0, 0xd0, 0xcd, 0xc8, + 0xc1, 0xc2, 0xcb, 0xd7, 0xff, 0x19, 0x1b, 0x10, 0xf0, 0xcd, 0xc7, 0xcf, + 0xdb, 0xe9, 0xf0, 0x0c, 0x1c, 0x25, 0x2d, 0x37, 0x40, 0x44, 0x4e, 0x57, + 0x5d, 0x61, 0x62, 0x60, 0x61, 0x5d, 0x5d, 0x5a, 0x5b, 0x5c, 0x5c, 0x5c, + 0x5a, 0x58, 0x57, 0x57, 0x56, 0x54, 0x54, 0x55, 0x57, 0x59, 0x5c, 0x61, + 0x63, 0x62, 0x5d, 0x5c, 0x5d, 0x5d, 0x5f, 0x61, 0x61, 0x63, 0x61, 0x5c, + 0x51, 0x48, 0x44, 0x47, 0x4a, 0x4b, 0x4a, 0x4a, 0x4c, 0x50, 0x55, 0x57, + 0x5a, 0x5d, 0x60, 0x62, 0x62, 0x60, 0x5d, 0x4f, 0x32, 0x0a, 0xdd, 0xbf, + 0xb9, 0xb9, 0xb5, 0xb7, 0xbe, 0xb9, 0xb1, 0xab, 0xaa, 0xaa, 0xa6, 0xa2, + 0xa2, 0xa9, 0xb3, 0xca, 0xd1, 0xc4, 0xa9, 0x9e, 0x9e, 0x9c, 0x9f, 0xb2, + 0xc1, 0xc3, 0xb5, 0xa8, 0xa2, 0xa9, 0xac, 0xaa, 0xac, 0xaf, 0xb3, 0xb6, + 0xb8, 0xbc, 0xc4, 0xc9, 0xc1, 0xb7, 0xaf, 0xaf, 0xab, 0xb4, 0xb6, 0xab, + 0xa5, 0xa6, 0xb1, 0xb4, 0xaf, 0xb1, 0xb1, 0xb2, 0xb3, 0xb9, 0xc1, 0xc1, + 0xb8, 0xb5, 0xaf, 0xb2, 0xb5, 0xb6, 0xb5, 0xc1, 0xbf, 0xc2, 0xbd, 0xb2, + 0xb1, 0xb6, 0xb5, 0xb8, 0xba, 0xc0, 0xcb, 0xcc, 0xc9, 0xc7, 0xc4, 0xc4, + 0xc1, 0xc7, 0xd4, 0xc5, 0xc9, 0xd2, 0xd4, 0xd4, 0xd4, 0xd3, 0xc7, 0xb8, + 0xbb, 0xc3, 0xcc, 0xc8, 0xc1, 0xb5, 0xaf, 0xb2, 0xb5, 0xb6, 0xb7, 0xb9, + 0xc1, 0xbc, 0xc2, 0xca, 0xd0, 0xce, 0xcd, 0xcb, 0xc4, 0xc1, 0xc1, 0xb9, + 0xbb, 0xc1, 0xc3, 0xc7, 0xc8, 0xc2, 0xba, 0xbb, 0xc4, 0xc4, 0xc6, 0xc7, + 0xc7, 0xc8, 0xca, 0xcb, 0xd0, 0xd0, 0xca, 0xc5, 0xc5, 0xc0, 0xc8, 0xd9, + 0x00, 0x15, 0x1c, 0x1c, 0x16, 0x01, 0xda, 0xd0, 0xd9, 0xe5, 0xf1, 0x01, + 0x10, 0x1c, 0x26, 0x35, 0x3e, 0x45, 0x4b, 0x55, 0x59, 0x5c, 0x5d, 0x5e, + 0x5e, 0x5d, 0x5c, 0x58, 0x57, 0x56, 0x55, 0x57, 0x58, 0x57, 0x56, 0x58, + 0x58, 0x58, 0x56, 0x56, 0x56, 0x57, 0x59, 0x5d, 0x61, 0x63, 0x63, 0x5f, + 0x5d, 0x5d, 0x5d, 0x5d, 0x5e, 0x5f, 0x60, 0x5f, 0x58, 0x50, 0x4c, 0x4a, + 0x4b, 0x4c, 0x4a, 0x4a, 0x4d, 0x53, 0x57, 0x5a, 0x5d, 0x60, 0x61, 0x62, + 0x63, 0x63, 0x61, 0x5a, 0x4b, 0x33, 0x12, 0xe5, 0xca, 0xbb, 0xb6, 0xb9, + 0xb7, 0xb6, 0xaf, 0xa8, 0xa9, 0xab, 0xa9, 0xa3, 0xa6, 0xa9, 0xaf, 0xc5, + 0xca, 0xbb, 0xa5, 0x9c, 0x9c, 0x9c, 0x9c, 0xa1, 0xb4, 0xbe, 0xb7, 0xa8, + 0xa3, 0xac, 0xab, 0xa9, 0xaa, 0xaf, 0xb4, 0xb5, 0xba, 0xc1, 0xc4, 0xc1, + 0xbb, 0xb7, 0xaf, 0xac, 0xa8, 0xbb, 0xbb, 0xa9, 0xa2, 0xa9, 0xb5, 0xb9, + 0xba, 0xbf, 0xc1, 0xc3, 0xbc, 0xbd, 0xc2, 0xc1, 0xb4, 0xb7, 0xb5, 0xb0, + 0xb0, 0xb3, 0xb5, 0xc1, 0xc2, 0xc0, 0xbd, 0xb5, 0xaf, 0xb0, 0xaf, 0xb1, + 0xb3, 0xb6, 0xc5, 0xcd, 0xcf, 0xcf, 0xc9, 0xc3, 0xc4, 0xcc, 0xc4, 0xc6, + 0xca, 0xd4, 0xd6, 0xd4, 0xce, 0xcd, 0xc9, 0xbf, 0xc1, 0xc5, 0xcb, 0xc7, + 0xc7, 0xc7, 0xb8, 0xb3, 0xb9, 0xb8, 0xb3, 0xb5, 0xc1, 0xc8, 0xc7, 0xc4, + 0xc7, 0xc6, 0xc5, 0xc8, 0xc8, 0xc8, 0xc3, 0xba, 0xb3, 0xba, 0xbe, 0xc4, + 0xc6, 0xc1, 0xbe, 0xc0, 0xc1, 0xbf, 0xbd, 0xbd, 0xbb, 0xbb, 0xc0, 0xc0, + 0xbe, 0xbc, 0xb7, 0xb2, 0xc3, 0xbb, 0xbf, 0xeb, 0x0b, 0x15, 0x1a, 0x13, + 0xf7, 0xd4, 0xc2, 0xc9, 0xd2, 0xde, 0xed, 0xf9, 0x0c, 0x16, 0x20, 0x28, + 0x32, 0x3a, 0x44, 0x4d, 0x56, 0x57, 0x57, 0x5a, 0x5d, 0x5e, 0x5d, 0x58, + 0x55, 0x52, 0x54, 0x55, 0x56, 0x56, 0x57, 0x54, 0x55, 0x57, 0x57, 0x56, + 0x55, 0x56, 0x56, 0x59, 0x5d, 0x5f, 0x5f, 0x5e, 0x59, 0x57, 0x58, 0x5b, + 0x5b, 0x5c, 0x5d, 0x5d, 0x59, 0x51, 0x4e, 0x50, 0x4e, 0x4f, 0x51, 0x4d, + 0x50, 0x57, 0x5b, 0x5d, 0x5f, 0x61, 0x62, 0x63, 0x63, 0x63, 0x63, 0x60, + 0x59, 0x4a, 0x36, 0x15, 0xec, 0xce, 0xbe, 0xb9, 0xb3, 0xb1, 0xb0, 0xac, + 0xa9, 0xa9, 0xa8, 0xa5, 0xa9, 0xac, 0xaf, 0xc1, 0xc8, 0xaf, 0x9e, 0x9c, + 0x9c, 0x9c, 0x9c, 0x9e, 0xa3, 0xb4, 0xb4, 0xa4, 0x9c, 0xa6, 0xac, 0xab, + 0xaa, 0xad, 0xb4, 0xbb, 0xc1, 0xc4, 0xbf, 0xb9, 0xb3, 0xaf, 0xa9, 0xa8, + 0xb2, 0xc1, 0xc2, 0xbb, 0xba, 0xb5, 0xb0, 0xb6, 0xbe, 0xc1, 0xc5, 0xc8, + 0xc4, 0xbf, 0xc3, 0xc0, 0xb5, 0xb2, 0xb2, 0xb1, 0xaf, 0xaf, 0xb1, 0xb8, + 0xbd, 0xbf, 0xc0, 0xbb, 0xb5, 0xb2, 0xb2, 0xb2, 0xb3, 0xaf, 0xb2, 0xba, + 0xc1, 0xc8, 0xcf, 0xc9, 0xc0, 0xbf, 0xc1, 0xbb, 0xbb, 0xd0, 0xd6, 0xd5, + 0xc9, 0xc0, 0xbb, 0xba, 0xbc, 0xbd, 0xbe, 0xbc, 0xbe, 0xc1, 0xc1, 0xbe, + 0xbb, 0xc0, 0xbe, 0xbe, 0xc2, 0xc3, 0xc8, 0xca, 0xc0, 0xbd, 0xc1, 0xc2, + 0xc6, 0xc5, 0xc9, 0xc5, 0xc1, 0xbe, 0xbd, 0xbe, 0xbd, 0xbd, 0xbb, 0xbc, + 0xba, 0xb6, 0xb6, 0xb9, 0xb9, 0xbc, 0xbd, 0xbb, 0xbd, 0xbb, 0xbb, 0xb4, + 0xc3, 0xbb, 0xbc, 0x06, 0x13, 0x17, 0x03, 0xd0, 0xb3, 0xab, 0xa9, 0xb0, + 0xbd, 0xd2, 0xe2, 0xef, 0x04, 0x10, 0x1c, 0x27, 0x32, 0x3d, 0x44, 0x4a, + 0x4e, 0x51, 0x55, 0x56, 0x57, 0x5b, 0x5d, 0x5a, 0x58, 0x54, 0x4f, 0x53, + 0x56, 0x54, 0x56, 0x56, 0x55, 0x55, 0x56, 0x57, 0x57, 0x57, 0x57, 0x5a, + 0x5c, 0x5d, 0x5c, 0x5b, 0x58, 0x57, 0x53, 0x51, 0x57, 0x59, 0x5a, 0x59, + 0x57, 0x52, 0x4c, 0x4f, 0x51, 0x50, 0x52, 0x50, 0x54, 0x59, 0x5c, 0x5e, + 0x5f, 0x61, 0x62, 0x63, 0x64, 0x64, 0x63, 0x62, 0x5e, 0x57, 0x44, 0x2a, + 0x12, 0xec, 0xd0, 0xc0, 0xb5, 0xb2, 0xb3, 0xb1, 0xad, 0xa9, 0xa5, 0xa7, + 0xae, 0xb0, 0xaf, 0xba, 0xba, 0xa7, 0xa1, 0xa2, 0xa1, 0x9d, 0x9d, 0xa0, + 0xa0, 0xad, 0xb3, 0xa4, 0x98, 0x9c, 0xa7, 0xab, 0xae, 0xab, 0xb5, 0xbe, + 0xc1, 0xbc, 0xb7, 0xb5, 0xac, 0xa9, 0xa6, 0xa8, 0xb8, 0xc1, 0xc7, 0xc7, + 0xc8, 0xc8, 0xc1, 0xbb, 0xbe, 0xc1, 0xc1, 0xc4, 0xc4, 0xc1, 0xc0, 0xbe, + 0xb6, 0xb3, 0xb0, 0xb1, 0xaf, 0xac, 0xae, 0xb0, 0xb7, 0xbe, 0xc1, 0xbd, + 0xb9, 0xb6, 0xb5, 0xb5, 0xb4, 0xb5, 0xb4, 0xaf, 0xaf, 0xb1, 0xc0, 0xc4, + 0xc3, 0xbe, 0xc7, 0xc1, 0xba, 0xc0, 0xd1, 0xd6, 0xcb, 0xbb, 0xb6, 0xb5, + 0xb5, 0xbd, 0xbc, 0xbb, 0xc2, 0xc2, 0xc2, 0xc4, 0xc1, 0xc7, 0xc9, 0xc1, + 0xbb, 0xc4, 0xca, 0xcf, 0xc8, 0xc1, 0xc0, 0xc1, 0xc3, 0xc3, 0xc6, 0xbd, + 0xc1, 0xc4, 0xc3, 0xc8, 0xc9, 0xc6, 0xc0, 0xc1, 0xbe, 0xb5, 0xb8, 0xbc, + 0xbe, 0xbd, 0xc5, 0xc2, 0xbf, 0xbd, 0xbc, 0xba, 0xbf, 0xc1, 0xcd, 0x0d, + 0x09, 0xf3, 0xd1, 0xbd, 0xb5, 0xad, 0xa9, 0xa8, 0xa9, 0xb5, 0xc4, 0xda, + 0xf7, 0x08, 0x15, 0x1d, 0x26, 0x31, 0x39, 0x41, 0x48, 0x4e, 0x52, 0x53, + 0x50, 0x53, 0x59, 0x59, 0x58, 0x55, 0x4f, 0x4b, 0x53, 0x53, 0x51, 0x52, + 0x56, 0x52, 0x52, 0x54, 0x56, 0x55, 0x56, 0x57, 0x59, 0x5c, 0x5c, 0x57, + 0x57, 0x57, 0x54, 0x50, 0x50, 0x55, 0x58, 0x59, 0x57, 0x52, 0x4a, 0x4c, + 0x51, 0x51, 0x50, 0x53, 0x57, 0x5c, 0x5d, 0x5e, 0x61, 0x62, 0x63, 0x63, + 0x64, 0x63, 0x63, 0x63, 0x60, 0x5d, 0x54, 0x38, 0x11, 0xf3, 0xd9, 0xc2, + 0xb8, 0xb4, 0xb3, 0xb0, 0xae, 0xa8, 0xa6, 0xab, 0xb4, 0xb5, 0xac, 0xad, + 0xb1, 0xad, 0xaf, 0xb0, 0xa9, 0x9f, 0x9e, 0xa2, 0xa2, 0xac, 0xb0, 0xa3, + 0x97, 0x99, 0xa4, 0xaa, 0xac, 0xa9, 0xb4, 0xc1, 0xbf, 0xbb, 0xb5, 0xb3, + 0xa9, 0xa8, 0xaf, 0xad, 0xb8, 0xc1, 0xc6, 0xc8, 0xc7, 0xc7, 0xc6, 0xc0, + 0xbc, 0xbf, 0xc4, 0xc7, 0xc4, 0xc2, 0xbe, 0xbc, 0xb7, 0xb5, 0xb1, 0xaf, + 0xaf, 0xac, 0xab, 0xa9, 0xb1, 0xbf, 0xc2, 0xbd, 0xb7, 0xb5, 0xb4, 0xb4, + 0xb0, 0xb0, 0xb2, 0xae, 0xa6, 0xa0, 0xa8, 0xaf, 0xb8, 0xbc, 0xc0, 0xbe, + 0xbf, 0xc1, 0xd2, 0xd9, 0xcd, 0xbb, 0xb6, 0xb5, 0xb9, 0xc3, 0xc9, 0xbe, + 0xc1, 0xc2, 0xbc, 0xc1, 0xc1, 0xbf, 0xc0, 0xc4, 0xc7, 0xcc, 0xce, 0xd2, + 0xc6, 0xc6, 0xc2, 0xc1, 0xbd, 0xbc, 0xbc, 0xbb, 0xbf, 0xbe, 0xbb, 0xc2, + 0xc9, 0xc6, 0xc1, 0xc1, 0xc0, 0xbb, 0xb6, 0xb7, 0xc1, 0xc3, 0xd5, 0xd2, + 0xc1, 0xbf, 0xba, 0xb8, 0xb4, 0xba, 0xc1, 0xed, 0xe5, 0xd4, 0xcd, 0xc4, + 0xbb, 0xb3, 0xaf, 0xab, 0xa9, 0xab, 0xad, 0xbc, 0xd4, 0xe8, 0xff, 0x12, + 0x20, 0x27, 0x30, 0x39, 0x40, 0x45, 0x47, 0x4d, 0x4c, 0x49, 0x4b, 0x52, + 0x53, 0x53, 0x4e, 0x47, 0x49, 0x4d, 0x4e, 0x4c, 0x51, 0x52, 0x53, 0x51, + 0x54, 0x55, 0x53, 0x55, 0x57, 0x59, 0x59, 0x55, 0x56, 0x57, 0x55, 0x51, + 0x50, 0x50, 0x56, 0x5a, 0x57, 0x54, 0x4e, 0x4b, 0x50, 0x51, 0x50, 0x53, + 0x59, 0x5e, 0x5d, 0x5e, 0x61, 0x63, 0x63, 0x64, 0x65, 0x63, 0x64, 0x64, + 0x63, 0x5f, 0x5b, 0x52, 0x3c, 0x13, 0xea, 0xce, 0xbb, 0xb5, 0xb5, 0xaf, + 0xa9, 0xad, 0xb1, 0xb7, 0xbe, 0xb7, 0xa6, 0xa9, 0xb4, 0xb2, 0xb3, 0xb2, + 0xac, 0xa3, 0xa1, 0xa1, 0xa2, 0xa9, 0xaf, 0xa4, 0x99, 0x9b, 0xa3, 0xa9, + 0xad, 0xad, 0xae, 0xb5, 0xbe, 0xbd, 0xb5, 0xac, 0xa4, 0xa6, 0xb0, 0xb3, + 0xbf, 0xc7, 0xc7, 0xc4, 0xc1, 0xc5, 0xc1, 0xbf, 0xc2, 0xbd, 0xbf, 0xc6, + 0xca, 0xc8, 0xc1, 0xbb, 0xb9, 0xb7, 0xb3, 0xad, 0xaa, 0xaa, 0xaa, 0xa6, + 0xa9, 0xb8, 0xbe, 0xb8, 0xaf, 0xaf, 0xb3, 0xb4, 0xb2, 0xaf, 0xae, 0xaf, + 0xa8, 0xa0, 0xa5, 0xb3, 0xb2, 0xb8, 0xbf, 0xbe, 0xc0, 0xc4, 0xd3, 0xda, + 0xd0, 0xc6, 0xc4, 0xc1, 0xc4, 0xc8, 0xcd, 0xc3, 0xc8, 0xc5, 0xbf, 0xc4, + 0xc4, 0xc3, 0xcb, 0xce, 0xc9, 0xc1, 0xc5, 0xd1, 0xca, 0xc7, 0xc1, 0xc4, + 0xc7, 0xc5, 0xbf, 0xbb, 0xbc, 0xbc, 0xc1, 0xc2, 0xc8, 0xc3, 0xbf, 0xbd, + 0xc1, 0xc5, 0xbe, 0xbd, 0xc0, 0xc4, 0xcf, 0xcb, 0xc5, 0xbd, 0xc3, 0xc7, + 0xa9, 0xb1, 0xc6, 0xda, 0xda, 0xd5, 0xcf, 0xc9, 0xc3, 0xbd, 0xbe, 0xb9, + 0xb6, 0xb3, 0xad, 0xb0, 0xb5, 0xc0, 0xdb, 0xf2, 0x0a, 0x18, 0x24, 0x34, + 0x37, 0x3c, 0x3e, 0x3e, 0x41, 0x41, 0x39, 0x39, 0x41, 0x49, 0x4a, 0x44, + 0x46, 0x46, 0x4a, 0x49, 0x4a, 0x4d, 0x4f, 0x4f, 0x4e, 0x4c, 0x50, 0x53, + 0x56, 0x59, 0x59, 0x51, 0x50, 0x56, 0x57, 0x54, 0x4e, 0x4f, 0x53, 0x57, + 0x58, 0x52, 0x51, 0x52, 0x53, 0x56, 0x51, 0x53, 0x58, 0x5d, 0x5e, 0x5e, + 0x61, 0x64, 0x66, 0x65, 0x64, 0x65, 0x65, 0x64, 0x64, 0x62, 0x5d, 0x56, + 0x4b, 0x36, 0x1c, 0xf7, 0xd8, 0xc0, 0xb5, 0xad, 0xaa, 0xaf, 0xbe, 0xc1, + 0xc0, 0xb9, 0xaa, 0xaf, 0xb3, 0xaf, 0xa9, 0xa7, 0xaa, 0xa6, 0xa3, 0xa2, + 0xa4, 0xa8, 0xb0, 0xa8, 0xa2, 0xb0, 0xb2, 0xaf, 0xb1, 0xb5, 0xaf, 0xad, + 0xb5, 0xb8, 0xaf, 0xa5, 0xa2, 0xa2, 0xae, 0xb5, 0xc1, 0xc7, 0xc5, 0xc2, + 0xc4, 0xc2, 0xbb, 0xba, 0xc4, 0xc4, 0xbf, 0xc1, 0xca, 0xcf, 0xc9, 0xbc, + 0xb3, 0xaf, 0xad, 0xac, 0xb1, 0xb6, 0xaf, 0xad, 0xa8, 0xad, 0xb0, 0xae, + 0xa7, 0xaf, 0xb6, 0xb5, 0xb5, 0xb5, 0xb6, 0xb5, 0xaf, 0xa6, 0xad, 0xc0, + 0xba, 0xb4, 0xbb, 0xb9, 0xc1, 0xcb, 0xce, 0xd3, 0xd3, 0xce, 0xc6, 0xc1, + 0xc7, 0xcc, 0xca, 0xc8, 0xcf, 0xc7, 0xc1, 0xcc, 0xd3, 0xd3, 0xce, 0xce, + 0xc5, 0xbe, 0xc1, 0xce, 0xd3, 0xd0, 0xcb, 0xc8, 0xcc, 0xcf, 0xcc, 0xc4, + 0xc2, 0xc4, 0xc4, 0xc8, 0xc9, 0xca, 0xc6, 0xbb, 0xc1, 0xc2, 0xc7, 0xc8, + 0xbd, 0xbd, 0xbc, 0xbb, 0xbf, 0xb8, 0xbc, 0xc4, 0xaa, 0xb2, 0xd4, 0xe5, + 0xe1, 0xde, 0xd6, 0xd1, 0xcf, 0xcc, 0xc5, 0xc5, 0xcc, 0xcd, 0xbe, 0xb9, + 0xb4, 0xb5, 0xbb, 0xbf, 0xd5, 0xef, 0xf4, 0x0c, 0x1a, 0x18, 0x26, 0x27, + 0x28, 0x30, 0x2b, 0x26, 0x29, 0x36, 0x3e, 0x40, 0x42, 0x42, 0x44, 0x45, + 0x46, 0x47, 0x48, 0x4a, 0x4c, 0x4d, 0x51, 0x52, 0x54, 0x58, 0x58, 0x55, + 0x55, 0x57, 0x58, 0x58, 0x53, 0x4f, 0x51, 0x55, 0x59, 0x55, 0x50, 0x55, + 0x5a, 0x5b, 0x54, 0x54, 0x59, 0x5e, 0x5f, 0x5e, 0x61, 0x64, 0x65, 0x66, + 0x65, 0x65, 0x65, 0x65, 0x65, 0x64, 0x62, 0x5d, 0x51, 0x40, 0x29, 0x13, + 0xf7, 0xd3, 0xb9, 0xb2, 0xae, 0xaa, 0xbb, 0xc6, 0xc1, 0xbf, 0xb5, 0xaa, + 0xa5, 0xa1, 0x9b, 0x9e, 0xa3, 0xa2, 0xa1, 0xa3, 0xa8, 0xa9, 0xb4, 0xb5, + 0xb3, 0xbf, 0xc1, 0xb8, 0xb1, 0xb1, 0xaf, 0xab, 0xad, 0xb0, 0xaa, 0xa2, + 0xa2, 0xa3, 0xb5, 0xb3, 0xbb, 0xc5, 0xc6, 0xc2, 0xc6, 0xc0, 0xbd, 0xbe, + 0xc1, 0xc6, 0xc0, 0xbe, 0xc1, 0xc7, 0xca, 0xc5, 0xbb, 0xaf, 0xaa, 0xae, + 0xc2, 0xc8, 0xb0, 0xac, 0xae, 0xac, 0xab, 0xab, 0xac, 0xb6, 0xbd, 0xbb, + 0xb6, 0xb7, 0xb9, 0xbb, 0xb4, 0xad, 0xb1, 0xbd, 0xc0, 0xbb, 0xbc, 0xbb, + 0xc1, 0xc9, 0xc5, 0xce, 0xce, 0xcb, 0xc0, 0xc9, 0xd1, 0xcf, 0xce, 0xcc, + 0xc8, 0xc8, 0xca, 0xd1, 0xda, 0xd5, 0xce, 0xc8, 0xc5, 0xc1, 0xc3, 0xcf, + 0xd1, 0xcb, 0xc8, 0xc9, 0xcc, 0xcb, 0xcc, 0xc7, 0xc8, 0xce, 0xcb, 0xcc, + 0xd0, 0xda, 0xdb, 0xce, 0xd0, 0xcf, 0xce, 0xca, 0xbf, 0xbe, 0xb9, 0xbb, + 0xbb, 0xbd, 0xbe, 0xbd, 0xb0, 0xd5, 0xe3, 0xe8, 0xee, 0xe2, 0xda, 0xd8, + 0xd7, 0xd0, 0xcc, 0xd6, 0xe2, 0xd8, 0xce, 0xc6, 0xc1, 0xc1, 0xc0, 0xbc, + 0xba, 0xc2, 0xbd, 0xc5, 0xde, 0xe5, 0xeb, 0xf0, 0xeb, 0xf8, 0xfb, 0x01, + 0x06, 0x18, 0x29, 0x32, 0x38, 0x3e, 0x3e, 0x3e, 0x41, 0x46, 0x49, 0x49, + 0x48, 0x48, 0x50, 0x56, 0x5a, 0x5c, 0x5c, 0x57, 0x57, 0x5b, 0x5a, 0x59, + 0x57, 0x53, 0x50, 0x54, 0x58, 0x57, 0x54, 0x56, 0x5a, 0x5e, 0x5b, 0x58, + 0x5d, 0x62, 0x60, 0x5e, 0x61, 0x63, 0x63, 0x66, 0x66, 0x67, 0x68, 0x67, + 0x66, 0x66, 0x66, 0x63, 0x5d, 0x48, 0x2e, 0x1b, 0x02, 0xea, 0xcb, 0xb6, + 0xa5, 0x9e, 0xaa, 0xbf, 0xc5, 0xc3, 0xb5, 0xa4, 0x9f, 0x9c, 0x99, 0x9e, + 0xa4, 0xa2, 0xa1, 0xa2, 0xad, 0xaf, 0xb5, 0xb6, 0xb9, 0xc0, 0xc4, 0xbf, + 0xb9, 0xb6, 0xb0, 0xae, 0xad, 0xa6, 0xa6, 0xa2, 0xa3, 0xab, 0xb9, 0xab, + 0xb3, 0xc3, 0xc8, 0xc8, 0xc8, 0xc0, 0xbe, 0xbf, 0xbd, 0xc8, 0xc5, 0xbd, + 0xbd, 0xbe, 0xc1, 0xc8, 0xc6, 0xbe, 0xb8, 0xb8, 0xd0, 0xc9, 0xaf, 0xac, + 0xaf, 0xaf, 0xaf, 0xae, 0xb0, 0xbe, 0xbf, 0xbb, 0xb8, 0xbb, 0xbd, 0xbb, + 0xb8, 0xb5, 0xb3, 0xb4, 0xc1, 0xcb, 0xc3, 0xbf, 0xc3, 0xd1, 0xce, 0xcc, + 0xcc, 0xcf, 0xc6, 0xcb, 0xce, 0xd2, 0xcc, 0xbf, 0xc0, 0xcb, 0xcd, 0xd1, + 0xd5, 0xdd, 0xce, 0xc8, 0xc8, 0xc3, 0xc7, 0xd3, 0xd2, 0xc8, 0xc2, 0xc1, + 0xc3, 0xc3, 0xc3, 0xc2, 0xc6, 0xcf, 0xcd, 0xc9, 0xc9, 0xd4, 0xd1, 0xcd, + 0xc8, 0xc0, 0xbe, 0xc0, 0xc0, 0xc1, 0xbf, 0xc1, 0xc4, 0xc7, 0xc5, 0xc4, + 0xe0, 0x04, 0xd1, 0xda, 0xe9, 0xe2, 0xe0, 0xe0, 0xe0, 0xda, 0xdc, 0xeb, + 0xfd, 0xc6, 0xc9, 0xca, 0xcb, 0xc1, 0xbb, 0xcd, 0xbb, 0xb4, 0xd0, 0xe2, + 0xd7, 0xe1, 0xdc, 0xd0, 0xc9, 0xbc, 0xc8, 0xd2, 0xd2, 0xea, 0x0d, 0x26, + 0x30, 0x35, 0x36, 0x37, 0x39, 0x3e, 0x44, 0x44, 0x49, 0x4a, 0x4c, 0x50, + 0x57, 0x58, 0x59, 0x56, 0x57, 0x5a, 0x5c, 0x5d, 0x59, 0x56, 0x55, 0x54, + 0x57, 0x59, 0x57, 0x57, 0x59, 0x5d, 0x5e, 0x5d, 0x5e, 0x61, 0x62, 0x5d, + 0x61, 0x63, 0x63, 0x65, 0x67, 0x69, 0x68, 0x67, 0x65, 0x64, 0x64, 0x63, + 0x62, 0x5c, 0x49, 0x2a, 0x06, 0xef, 0xce, 0xc3, 0xab, 0x9b, 0x9c, 0xb3, + 0xc6, 0xc7, 0xb3, 0xa2, 0xa0, 0x9d, 0x9a, 0x9c, 0xa6, 0xa6, 0xa3, 0xa4, + 0xac, 0xab, 0xa8, 0xa5, 0xab, 0xb8, 0xbb, 0xbd, 0xbb, 0xb9, 0xb4, 0xaf, + 0xa8, 0x9d, 0xa2, 0xa6, 0xa6, 0xb0, 0xb8, 0xa7, 0xa2, 0xb4, 0xc5, 0xc8, + 0xc8, 0xc0, 0xbf, 0xbb, 0xbb, 0xc6, 0xcd, 0xc4, 0xc1, 0xc1, 0xc1, 0xc0, + 0xc3, 0xc4, 0xc0, 0xbd, 0xd1, 0xc2, 0xb0, 0xb1, 0xae, 0xaf, 0xb0, 0xae, + 0xae, 0xb9, 0xbf, 0xbf, 0xbe, 0xbc, 0xc0, 0xbd, 0xbd, 0xc0, 0xba, 0xb5, + 0xba, 0xbf, 0xbf, 0xbd, 0xbe, 0xc9, 0xce, 0xc9, 0xc2, 0xcb, 0xc6, 0xca, + 0xc4, 0xc9, 0xc1, 0xb3, 0xbb, 0xce, 0xd0, 0xc8, 0xc3, 0xd3, 0xce, 0xc7, + 0xc2, 0xbb, 0xc1, 0xc8, 0xd2, 0xcc, 0xc1, 0xc2, 0xc1, 0xc2, 0xc2, 0xc4, + 0xcc, 0xcd, 0xc8, 0xce, 0xc7, 0xcb, 0xc5, 0xc5, 0xc5, 0xc2, 0xbf, 0xc1, + 0xc0, 0xc3, 0xcb, 0xcc, 0xd2, 0xd3, 0xc9, 0xc6, 0x0d, 0x12, 0xc6, 0xc1, + 0xca, 0xd5, 0xdb, 0xe4, 0xe7, 0xe2, 0xe1, 0xf9, 0xe7, 0xb1, 0xb4, 0xb9, + 0xc0, 0xbb, 0xcd, 0xee, 0xe1, 0xd9, 0xe7, 0xea, 0xe4, 0xdb, 0xd3, 0xc1, + 0xb8, 0xb9, 0xce, 0xd9, 0xd7, 0xc3, 0xea, 0x1b, 0x2d, 0x31, 0x34, 0x35, + 0x35, 0x36, 0x39, 0x41, 0x47, 0x4a, 0x47, 0x4a, 0x50, 0x57, 0x57, 0x56, + 0x58, 0x5b, 0x5a, 0x5c, 0x5a, 0x57, 0x57, 0x57, 0x59, 0x5a, 0x57, 0x57, + 0x5a, 0x5d, 0x5f, 0x60, 0x60, 0x61, 0x62, 0x5e, 0x61, 0x63, 0x64, 0x65, + 0x67, 0x68, 0x68, 0x67, 0x66, 0x66, 0x65, 0x63, 0x63, 0x60, 0x58, 0x42, + 0x1f, 0xf2, 0xce, 0xbf, 0xba, 0xa5, 0x9c, 0xa5, 0xbe, 0xc2, 0xb2, 0xa6, + 0xa3, 0x9c, 0x99, 0x9b, 0xa6, 0xab, 0xad, 0xb2, 0xaf, 0xa9, 0xa9, 0xaa, + 0xa9, 0xb6, 0xbb, 0xb6, 0xb5, 0xb3, 0xb2, 0xb0, 0xa5, 0xa0, 0xa4, 0xac, + 0xae, 0xb7, 0xb0, 0xa4, 0x9c, 0xa2, 0xbb, 0xc3, 0xc4, 0xc0, 0xbb, 0xb8, + 0xbe, 0xc6, 0xc8, 0xc4, 0xc2, 0xc3, 0xc5, 0xc2, 0xc1, 0xc1, 0xbe, 0xba, + 0xc2, 0xc1, 0xb9, 0xb5, 0xb1, 0xae, 0xaf, 0xad, 0xaa, 0xa9, 0xb3, 0xbc, + 0xbf, 0xbf, 0xc1, 0xc1, 0xbf, 0xbf, 0xb5, 0xb4, 0xaf, 0xaf, 0xb2, 0xb5, + 0xbb, 0xc2, 0xc2, 0xc9, 0xc4, 0xca, 0xc8, 0xcc, 0xcd, 0xcc, 0xbc, 0xb0, + 0xb5, 0xcc, 0xd2, 0xca, 0xc0, 0xc6, 0xce, 0xce, 0xc2, 0xbd, 0xc6, 0xcd, + 0xdf, 0xd8, 0xcb, 0xc8, 0xc5, 0xca, 0xcd, 0xce, 0xcf, 0xcf, 0xcb, 0xcd, + 0xcc, 0xcc, 0xc0, 0xbe, 0xc1, 0xbc, 0xb7, 0xb9, 0xb9, 0xb7, 0xc1, 0xd4, + 0xcf, 0xd3, 0xd0, 0xcf, 0x23, 0x20, 0xd8, 0xad, 0xa9, 0xab, 0xbb, 0xc9, + 0xd2, 0xd4, 0xe1, 0xf7, 0xd4, 0xc4, 0xbf, 0xb5, 0xbc, 0xc3, 0xd8, 0xe4, + 0xe6, 0xdf, 0xe4, 0xe1, 0xd6, 0xcf, 0xc7, 0xb4, 0xbb, 0xd0, 0xe3, 0xe5, + 0xda, 0xc7, 0xdc, 0x17, 0x2b, 0x33, 0x36, 0x37, 0x39, 0x39, 0x3a, 0x3e, + 0x44, 0x46, 0x46, 0x4a, 0x4f, 0x56, 0x5a, 0x57, 0x58, 0x5d, 0x5d, 0x5d, + 0x5b, 0x57, 0x54, 0x56, 0x58, 0x5b, 0x58, 0x57, 0x5a, 0x5c, 0x5f, 0x61, + 0x61, 0x62, 0x63, 0x62, 0x63, 0x64, 0x64, 0x64, 0x64, 0x67, 0x66, 0x68, + 0x68, 0x67, 0x67, 0x65, 0x63, 0x61, 0x5d, 0x56, 0x3c, 0x05, 0xd5, 0xc1, + 0xbf, 0xb0, 0x9d, 0x9d, 0xae, 0xba, 0xab, 0xa6, 0xa0, 0x9b, 0x9c, 0xa3, + 0xaa, 0xb0, 0xb8, 0xc3, 0xbf, 0xb3, 0xb1, 0xb1, 0xaa, 0xad, 0xb8, 0xb7, + 0xb5, 0xb2, 0xaf, 0xa9, 0xa4, 0xa3, 0xa8, 0xae, 0xb1, 0xb9, 0xad, 0xa4, + 0x9b, 0x9d, 0xaf, 0xbf, 0xc7, 0xc0, 0xbb, 0xbc, 0xc1, 0xc0, 0xc1, 0xc3, + 0xbe, 0xbb, 0xbe, 0xc1, 0xc1, 0xc1, 0xc2, 0xc1, 0xc2, 0xca, 0xd5, 0xc3, + 0xb5, 0xaf, 0xaf, 0xab, 0xab, 0xa7, 0xa2, 0xac, 0xb9, 0xc0, 0xc3, 0xc1, + 0xbd, 0xbb, 0xb5, 0xb8, 0xb4, 0xae, 0xaf, 0xb5, 0xbe, 0xc2, 0xc7, 0xc9, + 0xc8, 0xce, 0xce, 0xc5, 0xca, 0xc9, 0xbc, 0xbf, 0xbd, 0xc9, 0xd4, 0xc8, + 0xcd, 0xce, 0xd0, 0xd1, 0xce, 0xc2, 0xc4, 0xd7, 0xdf, 0xd3, 0xce, 0xcd, + 0xc3, 0xce, 0xd9, 0xd4, 0xd4, 0xd2, 0xce, 0xce, 0xd0, 0xd2, 0xcd, 0xbe, + 0xc4, 0xc1, 0xbc, 0xbe, 0xbd, 0xbd, 0xc9, 0xd0, 0xd1, 0xd6, 0xd4, 0xd4, + 0x2d, 0x2d, 0x08, 0xb9, 0xa8, 0xa8, 0xae, 0xb0, 0xc0, 0xcc, 0xd6, 0xdf, + 0xce, 0xc6, 0xbf, 0xb6, 0xc0, 0xc0, 0xc4, 0xcb, 0xcf, 0xd1, 0xd4, 0xd4, + 0xd1, 0xd4, 0xd4, 0xd1, 0xdb, 0xe9, 0xea, 0xda, 0xc1, 0xcb, 0xf2, 0x1b, + 0x2b, 0x34, 0x34, 0x33, 0x36, 0x3a, 0x3d, 0x41, 0x42, 0x45, 0x4a, 0x50, + 0x51, 0x57, 0x5b, 0x5b, 0x5a, 0x5c, 0x5e, 0x5e, 0x5c, 0x57, 0x54, 0x55, + 0x58, 0x5b, 0x59, 0x57, 0x59, 0x5c, 0x5d, 0x61, 0x63, 0x63, 0x63, 0x63, + 0x64, 0x63, 0x63, 0x65, 0x65, 0x67, 0x68, 0x68, 0x68, 0x67, 0x68, 0x65, + 0x63, 0x60, 0x5d, 0x57, 0x50, 0x36, 0xf8, 0xcd, 0xc3, 0xaf, 0xa2, 0xa3, + 0xa4, 0xac, 0xa6, 0xa3, 0xa0, 0xa6, 0xab, 0xae, 0xb0, 0xb8, 0xc3, 0xc8, + 0xc5, 0xbf, 0xb5, 0xb3, 0xaf, 0xac, 0xae, 0xaa, 0xa9, 0xa9, 0xa7, 0xa7, + 0xa6, 0xa9, 0xaa, 0xac, 0xb5, 0xb9, 0xa6, 0x9f, 0x9a, 0x9b, 0xa6, 0xbb, + 0xc5, 0xc1, 0xbb, 0xbc, 0xc0, 0xbc, 0xbe, 0xc3, 0xc1, 0xbd, 0xbb, 0xbc, + 0xbd, 0xc0, 0xc1, 0xc2, 0xc1, 0xc4, 0xd2, 0xd1, 0xb9, 0xb2, 0xb0, 0xac, + 0xab, 0xa8, 0xa0, 0xa0, 0xa5, 0xb8, 0xc1, 0xb6, 0xb5, 0xb8, 0xbb, 0xbe, + 0xb9, 0xab, 0xb4, 0xc3, 0xc3, 0xc2, 0xce, 0xc8, 0xc5, 0xc8, 0xcb, 0xc2, + 0xc7, 0xc8, 0xbd, 0xc6, 0xcb, 0xcc, 0xd3, 0xce, 0xcc, 0xce, 0xd3, 0xd2, + 0xcc, 0xc9, 0xc9, 0xd0, 0xc5, 0xbc, 0xbc, 0xbd, 0xbc, 0xc3, 0xcc, 0xca, + 0xd4, 0xd1, 0xcf, 0xcf, 0xc8, 0xce, 0xd3, 0xd1, 0xd4, 0xd4, 0xcf, 0xcc, + 0xc8, 0xce, 0xd4, 0xcf, 0xd4, 0xd7, 0xd4, 0xd2, 0x32, 0x35, 0x2b, 0xfa, + 0xc1, 0xad, 0xab, 0xad, 0xb4, 0xb7, 0xb9, 0xbf, 0xc3, 0xc0, 0xbe, 0xba, + 0xbd, 0xb9, 0xb7, 0xb9, 0xbe, 0xc8, 0xcd, 0xd5, 0xd9, 0xdc, 0xe1, 0xdf, + 0xdc, 0xd4, 0xcc, 0xc7, 0xda, 0xf9, 0x11, 0x20, 0x2c, 0x31, 0x32, 0x36, + 0x38, 0x3c, 0x3e, 0x3f, 0x3e, 0x3e, 0x45, 0x4f, 0x51, 0x56, 0x5b, 0x5c, + 0x5d, 0x5d, 0x5c, 0x5d, 0x5d, 0x59, 0x56, 0x53, 0x57, 0x59, 0x5a, 0x57, + 0x58, 0x5b, 0x5d, 0x5f, 0x63, 0x64, 0x63, 0x62, 0x60, 0x62, 0x63, 0x65, + 0x68, 0x68, 0x68, 0x67, 0x67, 0x67, 0x66, 0x66, 0x64, 0x62, 0x5d, 0x57, + 0x50, 0x48, 0x2a, 0xef, 0xc2, 0xa8, 0xa2, 0xa6, 0xa5, 0xa5, 0xa2, 0xa2, + 0xa5, 0xad, 0xb3, 0xb4, 0xb6, 0xc1, 0xc6, 0xc7, 0xc4, 0xc3, 0xc0, 0xb7, + 0xb1, 0xb0, 0xb4, 0xb1, 0xad, 0xa9, 0xa7, 0xa9, 0xaa, 0xaa, 0xac, 0xb0, + 0xbc, 0xb5, 0xa4, 0x9e, 0x9c, 0xa0, 0xa7, 0xb7, 0xc2, 0xc3, 0xc0, 0xbf, + 0xc0, 0xbc, 0xc0, 0xc5, 0xc6, 0xbe, 0xb8, 0xb9, 0xbd, 0xbc, 0xbe, 0xc0, + 0xc1, 0xbf, 0xc4, 0xcb, 0xbd, 0xb5, 0xb2, 0xad, 0xaa, 0xa6, 0xa0, 0x9e, + 0x9b, 0xa4, 0xb4, 0xaa, 0xaf, 0xa8, 0xb6, 0xc5, 0xbd, 0xb3, 0xba, 0xc9, + 0xc3, 0xc5, 0xcb, 0xc5, 0xc1, 0xc1, 0xcb, 0xc8, 0xc6, 0xc6, 0xc0, 0xc8, + 0xd0, 0xc9, 0xc8, 0xcd, 0xcb, 0xce, 0xcf, 0xcf, 0xca, 0xce, 0xd4, 0xcd, + 0xc1, 0xc2, 0xbd, 0xba, 0xb6, 0xbb, 0xbc, 0xbe, 0xca, 0xd0, 0xd0, 0xcd, + 0xcb, 0xd1, 0xd1, 0xd3, 0xd1, 0xd6, 0xd8, 0xd2, 0xcd, 0xd5, 0xda, 0xd3, + 0xd3, 0xd4, 0xd4, 0xd2, 0x37, 0x39, 0x39, 0x26, 0xfb, 0xdb, 0xc9, 0xbd, + 0xb8, 0xb5, 0xb8, 0xbb, 0xbd, 0xbb, 0xb5, 0xbd, 0xd3, 0xda, 0xd6, 0xc8, + 0xc1, 0xc0, 0xbb, 0xbe, 0xc1, 0xc3, 0xc2, 0xc8, 0xc4, 0xcb, 0xea, 0x04, + 0x17, 0x18, 0x1c, 0x23, 0x2b, 0x30, 0x33, 0x37, 0x3a, 0x3d, 0x3e, 0x3e, + 0x3d, 0x3f, 0x45, 0x4e, 0x51, 0x55, 0x5b, 0x5d, 0x5d, 0x5d, 0x5b, 0x5c, + 0x5a, 0x59, 0x57, 0x55, 0x57, 0x58, 0x58, 0x58, 0x58, 0x5b, 0x5d, 0x5e, + 0x60, 0x63, 0x63, 0x63, 0x60, 0x61, 0x63, 0x66, 0x69, 0x69, 0x67, 0x66, + 0x66, 0x66, 0x67, 0x64, 0x63, 0x63, 0x5f, 0x59, 0x52, 0x4a, 0x38, 0x1c, + 0xe6, 0xb8, 0xa9, 0xa8, 0xa9, 0xa7, 0xa3, 0xa4, 0xae, 0xaf, 0xb0, 0xb5, + 0xc0, 0xc5, 0xc5, 0xc5, 0xc2, 0xbe, 0xc0, 0xc0, 0xb6, 0xb3, 0xb5, 0xb5, + 0xb2, 0xad, 0xa9, 0xac, 0xaf, 0xa8, 0xab, 0xb1, 0xc0, 0xb4, 0xa5, 0x9f, + 0x9e, 0x9e, 0xa1, 0xb2, 0xc3, 0xc4, 0xc1, 0xc1, 0xc0, 0xbf, 0xc1, 0xc2, + 0xc6, 0xbf, 0xb6, 0xb5, 0xbb, 0xbd, 0xbf, 0xbe, 0xc1, 0xc1, 0xbf, 0xc6, + 0xbb, 0xb2, 0xb4, 0xae, 0xa3, 0x9d, 0x9b, 0x9a, 0x9c, 0xa2, 0xb5, 0xb0, + 0xb9, 0xa7, 0xaf, 0xca, 0xc5, 0xbe, 0xc9, 0xcd, 0xc4, 0xca, 0xcd, 0xc5, + 0xbb, 0xbd, 0xca, 0xd4, 0xc6, 0xc5, 0xc5, 0xcd, 0xd3, 0xc9, 0xc3, 0xca, + 0xd1, 0xd4, 0xd5, 0xd2, 0xd4, 0xd1, 0xd0, 0xcd, 0xcb, 0xd0, 0xc8, 0xc9, + 0xd1, 0xce, 0xbd, 0xb8, 0xc6, 0xce, 0xcc, 0xc6, 0xcc, 0xd4, 0xce, 0xce, + 0xd2, 0xd4, 0xdc, 0xd4, 0xd1, 0xd5, 0xd4, 0xcf, 0xd3, 0xda, 0xde, 0xda, + 0x3a, 0x3b, 0x3d, 0x37, 0x1f, 0x01, 0xee, 0xe5, 0xe0, 0xdd, 0xdb, 0xd5, + 0xd1, 0xcc, 0xca, 0xe6, 0x05, 0x17, 0x19, 0x1b, 0x13, 0x0c, 0x05, 0x05, + 0x05, 0x05, 0x0a, 0x13, 0x18, 0x1f, 0x25, 0x23, 0x26, 0x25, 0x24, 0x25, + 0x28, 0x2f, 0x36, 0x36, 0x32, 0x33, 0x35, 0x37, 0x3a, 0x3e, 0x44, 0x4b, + 0x53, 0x58, 0x5b, 0x5b, 0x59, 0x5d, 0x5d, 0x5c, 0x58, 0x57, 0x57, 0x57, + 0x57, 0x59, 0x59, 0x59, 0x59, 0x59, 0x5d, 0x5f, 0x5d, 0x60, 0x63, 0x64, + 0x62, 0x61, 0x63, 0x66, 0x68, 0x69, 0x68, 0x68, 0x67, 0x67, 0x66, 0x65, + 0x63, 0x62, 0x5e, 0x5a, 0x54, 0x4e, 0x42, 0x25, 0x0f, 0xe9, 0xbc, 0xa9, + 0xa9, 0xa9, 0xa8, 0xae, 0xb5, 0xb9, 0xb9, 0xbc, 0xc1, 0xc3, 0xc1, 0xc1, + 0xbf, 0xbb, 0xba, 0xbc, 0xbb, 0xb7, 0xb6, 0xb6, 0xb5, 0xaf, 0xad, 0xb1, + 0xb9, 0xae, 0xad, 0xb7, 0xbf, 0xb1, 0xa4, 0x9f, 0x9f, 0xa2, 0x9f, 0xa7, + 0xbe, 0xc3, 0xc1, 0xc2, 0xbf, 0xc1, 0xc1, 0xc3, 0xc7, 0xc2, 0xbb, 0xb9, + 0xbf, 0xc3, 0xbf, 0xc0, 0xc3, 0xc6, 0xbf, 0xc0, 0xbb, 0xac, 0xa8, 0xa8, + 0xa6, 0xa2, 0xa3, 0xa9, 0xaa, 0xa6, 0xaf, 0xb6, 0xb6, 0xac, 0xbc, 0xda, + 0xcb, 0xc4, 0xce, 0xca, 0xcb, 0xcd, 0xc8, 0xc6, 0xc0, 0xc1, 0xce, 0xdb, + 0xce, 0xc5, 0xc8, 0xd3, 0xda, 0xd0, 0xce, 0xcf, 0xd2, 0xd1, 0xd7, 0xd4, + 0xd4, 0xd5, 0xd2, 0xcf, 0xce, 0xd2, 0xce, 0xc8, 0xc9, 0xd7, 0xda, 0xd4, + 0xd4, 0xcb, 0xce, 0xd1, 0xd1, 0xd5, 0xd4, 0xd8, 0xdb, 0xd6, 0xda, 0xda, + 0xd8, 0xdd, 0xd7, 0xd0, 0xd4, 0xda, 0xe3, 0xd4, 0x3c, 0x3c, 0x3d, 0x3a, + 0x37, 0x28, 0x12, 0x01, 0xfa, 0xfd, 0x04, 0x07, 0x06, 0x09, 0x0c, 0x1a, + 0x20, 0x21, 0x20, 0x20, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, 0x26, 0x26, + 0x27, 0x27, 0x28, 0x2a, 0x2b, 0x29, 0x27, 0x28, 0x2a, 0x2c, 0x2f, 0x2a, + 0x22, 0x1f, 0x28, 0x34, 0x3f, 0x43, 0x4c, 0x4f, 0x54, 0x58, 0x58, 0x58, + 0x57, 0x59, 0x5c, 0x59, 0x57, 0x57, 0x58, 0x59, 0x59, 0x5a, 0x5b, 0x5a, + 0x5b, 0x58, 0x5c, 0x5f, 0x5e, 0x5e, 0x61, 0x63, 0x63, 0x63, 0x64, 0x65, + 0x67, 0x68, 0x69, 0x69, 0x69, 0x67, 0x65, 0x66, 0x64, 0x63, 0x5f, 0x5b, + 0x4d, 0x44, 0x46, 0x32, 0x0d, 0x02, 0xec, 0xc8, 0xad, 0xaa, 0xac, 0xb0, + 0xb7, 0xbc, 0xc2, 0xbf, 0xba, 0xbc, 0xbc, 0xbe, 0xba, 0xb5, 0xb5, 0xb7, + 0xb6, 0xb5, 0xb6, 0xb4, 0xb3, 0xb2, 0xb7, 0xbb, 0xbb, 0xb4, 0xae, 0xba, + 0xbb, 0xab, 0xa3, 0xa2, 0xaa, 0xaa, 0x9d, 0x9c, 0xaf, 0xc1, 0xc3, 0xc1, + 0xc1, 0xc3, 0xc1, 0xc4, 0xc3, 0xc1, 0xbb, 0xb7, 0xc2, 0xcd, 0xbc, 0xc0, + 0xc5, 0xc8, 0xc1, 0xbd, 0xc0, 0xaf, 0xa7, 0xa9, 0xac, 0xaa, 0xaf, 0xb7, + 0xba, 0xb7, 0xb5, 0xbe, 0xc2, 0xc6, 0xd4, 0xd5, 0xd2, 0xcc, 0xc7, 0xc9, + 0xd1, 0xd1, 0xcf, 0xcc, 0xc7, 0xcc, 0xce, 0xd3, 0xd2, 0xce, 0xc7, 0xcd, + 0xd9, 0xce, 0xd2, 0xd2, 0xd1, 0xd2, 0xd7, 0xd4, 0xdd, 0xda, 0xd7, 0xd8, + 0xd4, 0xd5, 0xdc, 0xd8, 0xd2, 0xd4, 0xda, 0xdf, 0xdb, 0xd1, 0xd2, 0xd5, + 0xd3, 0xd4, 0xd8, 0xdd, 0xdf, 0xda, 0xd7, 0xdc, 0xdb, 0xda, 0xd4, 0xd5, + 0xd6, 0xd7, 0xda, 0xd3, 0x3d, 0x3a, 0x3a, 0x39, 0x39, 0x37, 0x31, 0x25, + 0x1a, 0x13, 0x10, 0x12, 0x18, 0x1c, 0x1f, 0x22, 0x22, 0x21, 0x1d, 0x1b, + 0x1c, 0x1d, 0x1d, 0x1f, 0x22, 0x25, 0x24, 0x22, 0x22, 0x23, 0x24, 0x28, + 0x2a, 0x2b, 0x2b, 0x2b, 0x2d, 0x28, 0x25, 0x23, 0x1f, 0x1e, 0x22, 0x31, + 0x3f, 0x4a, 0x52, 0x52, 0x56, 0x57, 0x54, 0x54, 0x54, 0x55, 0x59, 0x57, + 0x52, 0x56, 0x5a, 0x5c, 0x5a, 0x58, 0x5c, 0x5c, 0x5c, 0x59, 0x59, 0x5d, + 0x60, 0x5d, 0x5e, 0x62, 0x63, 0x63, 0x64, 0x64, 0x66, 0x69, 0x69, 0x69, + 0x69, 0x69, 0x67, 0x66, 0x66, 0x64, 0x61, 0x5d, 0x57, 0x3e, 0x3b, 0x36, + 0x09, 0xf8, 0xf3, 0xe0, 0xcc, 0xb6, 0xb2, 0xaf, 0xb0, 0xb8, 0xbb, 0xba, + 0xb5, 0xb5, 0xb6, 0xbf, 0xbd, 0xb3, 0xb2, 0xb4, 0xb4, 0xb4, 0xb5, 0xb3, + 0xb4, 0xba, 0xc1, 0xb6, 0xb8, 0xb7, 0xb0, 0xbc, 0xb5, 0xa9, 0xa8, 0xae, + 0xb5, 0xa9, 0x9c, 0x9b, 0x9e, 0xb1, 0xc1, 0xc3, 0xc5, 0xc3, 0xc2, 0xc3, + 0xc1, 0xc1, 0xbc, 0xb4, 0xb6, 0xcd, 0xc2, 0xbd, 0xc3, 0xca, 0xc8, 0xc0, + 0xc1, 0xbc, 0xaf, 0xab, 0xb4, 0xbd, 0xbd, 0xb8, 0xb7, 0xc1, 0xc3, 0xc0, + 0xc1, 0xc4, 0xc7, 0xc8, 0xcd, 0xc8, 0xc7, 0xcc, 0xd3, 0xda, 0xd4, 0xcd, + 0xca, 0xcd, 0xd4, 0xd0, 0xd1, 0xd4, 0xcf, 0xce, 0xd0, 0xcf, 0xce, 0xd0, + 0xce, 0xc9, 0xd4, 0xda, 0xe8, 0xde, 0xd5, 0xd8, 0xd8, 0xd8, 0xda, 0xdb, + 0xde, 0xdb, 0xdd, 0xdc, 0xdd, 0xd9, 0xd9, 0xd4, 0xd4, 0xd4, 0xd4, 0xd8, + 0xd7, 0xd7, 0xda, 0xda, 0xd8, 0xd3, 0xd6, 0xdc, 0xdf, 0xda, 0xd7, 0xd0, + 0x37, 0x37, 0x37, 0x37, 0x37, 0x37, 0x37, 0x32, 0x31, 0x2c, 0x28, 0x25, + 0x23, 0x21, 0x1e, 0x1c, 0x1a, 0x18, 0x18, 0x19, 0x19, 0x1a, 0x1a, 0x18, + 0x19, 0x1c, 0x1e, 0x1d, 0x1f, 0x20, 0x23, 0x26, 0x28, 0x2b, 0x2b, 0x2a, + 0x2c, 0x2d, 0x2f, 0x34, 0x31, 0x31, 0x31, 0x37, 0x42, 0x4b, 0x50, 0x50, + 0x56, 0x53, 0x53, 0x51, 0x52, 0x53, 0x57, 0x57, 0x51, 0x52, 0x5a, 0x5e, + 0x5b, 0x58, 0x5c, 0x5d, 0x5c, 0x5b, 0x58, 0x5c, 0x62, 0x5f, 0x5e, 0x5f, + 0x63, 0x65, 0x62, 0x63, 0x66, 0x69, 0x69, 0x69, 0x6a, 0x69, 0x68, 0x66, + 0x66, 0x65, 0x63, 0x5f, 0x5a, 0x53, 0x40, 0x2d, 0x16, 0xf9, 0xe7, 0xda, + 0xc4, 0xb9, 0xb5, 0xae, 0xab, 0xb0, 0xb0, 0xb3, 0xb1, 0xb0, 0xbb, 0xc3, + 0xc3, 0xbc, 0xb4, 0xb2, 0xb2, 0xb0, 0xaf, 0xaf, 0xb4, 0xbf, 0xb9, 0xaf, + 0xb7, 0xb7, 0xb6, 0xbe, 0xb2, 0xac, 0xab, 0xb7, 0xb5, 0xa7, 0x9e, 0x9c, + 0x9c, 0xa4, 0xb9, 0xc1, 0xc2, 0xc1, 0xc1, 0xc1, 0xbe, 0xc0, 0xbe, 0xb4, + 0xaf, 0xc3, 0xcc, 0xc0, 0xc8, 0xcd, 0xcd, 0xc7, 0xc8, 0xca, 0xbd, 0xab, + 0xb8, 0xc1, 0xc1, 0xc1, 0xbf, 0xc3, 0xc4, 0xc5, 0xc1, 0xc1, 0xc1, 0xc3, + 0xcd, 0xcc, 0xcb, 0xd6, 0xd1, 0xd4, 0xcf, 0xd0, 0xc8, 0xcf, 0xda, 0xc7, + 0xc8, 0xd3, 0xd2, 0xce, 0xc9, 0xc8, 0xc6, 0xc7, 0xc8, 0xc8, 0xcf, 0xd6, + 0xdd, 0xd9, 0xd2, 0xd6, 0xdc, 0xd9, 0xd8, 0xda, 0xde, 0xd7, 0xd7, 0xda, + 0xda, 0xd7, 0xda, 0xd4, 0xdb, 0xd5, 0xce, 0xd8, 0xda, 0xd4, 0xde, 0xe1, + 0xda, 0xd4, 0xda, 0xd9, 0xdd, 0xd6, 0xd5, 0xd2, 0x37, 0x37, 0x36, 0x35, + 0x36, 0x35, 0x33, 0x30, 0x2f, 0x2c, 0x2b, 0x2b, 0x29, 0x28, 0x23, 0x1d, + 0x18, 0x18, 0x19, 0x1a, 0x17, 0x18, 0x1a, 0x19, 0x1d, 0x1f, 0x1f, 0x1f, + 0x21, 0x25, 0x24, 0x24, 0x26, 0x27, 0x2b, 0x2a, 0x2b, 0x34, 0x38, 0x3e, + 0x3e, 0x43, 0x44, 0x43, 0x49, 0x4d, 0x4d, 0x50, 0x56, 0x53, 0x54, 0x50, + 0x51, 0x53, 0x58, 0x59, 0x56, 0x52, 0x5a, 0x5d, 0x5d, 0x5b, 0x5b, 0x5d, + 0x5d, 0x5b, 0x59, 0x5d, 0x60, 0x61, 0x5f, 0x5d, 0x5f, 0x63, 0x62, 0x63, + 0x65, 0x69, 0x69, 0x69, 0x6a, 0x69, 0x68, 0x66, 0x66, 0x64, 0x63, 0x62, + 0x5c, 0x53, 0x4a, 0x3e, 0x1c, 0xff, 0xeb, 0xd2, 0xc1, 0xb5, 0xb2, 0xaf, + 0xa9, 0xa7, 0xa9, 0xb0, 0xb1, 0xb4, 0xbb, 0xbf, 0xbe, 0xbb, 0xb9, 0xb4, + 0xaf, 0xaa, 0xa8, 0xae, 0xb9, 0xbe, 0xbf, 0xb8, 0xba, 0xba, 0xbb, 0xbb, + 0xaf, 0xb1, 0xb2, 0xbb, 0xb0, 0xa1, 0x9e, 0xa0, 0xa2, 0xa9, 0xaf, 0xb5, + 0xba, 0xbb, 0xbe, 0xc1, 0xbc, 0xbb, 0xbe, 0xb5, 0xb2, 0xbe, 0xd0, 0xc7, + 0xca, 0xc8, 0xcb, 0xca, 0xca, 0xd0, 0xc8, 0xb9, 0xb8, 0xc3, 0xb9, 0xb6, + 0xc1, 0xcb, 0xc9, 0xc4, 0xbe, 0xc1, 0xc1, 0xbb, 0xcb, 0xcb, 0xc8, 0xd0, + 0xcd, 0xcd, 0xcb, 0xd3, 0xcf, 0xd3, 0xd0, 0xc1, 0xbe, 0xc3, 0xc1, 0xbe, + 0xc1, 0xbe, 0xba, 0xbb, 0xc3, 0xc8, 0xc8, 0xcc, 0xd5, 0xd4, 0xd2, 0xd4, + 0xd4, 0xd5, 0xd5, 0xd4, 0xd1, 0xcb, 0xd1, 0xd4, 0xce, 0xd2, 0xd1, 0xca, + 0xd7, 0xcf, 0xcd, 0xd7, 0xda, 0xcd, 0xd0, 0xe0, 0xe1, 0xda, 0xda, 0xda, + 0xde, 0xda, 0xd3, 0xd6, 0x37, 0x37, 0x35, 0x35, 0x34, 0x33, 0x31, 0x30, + 0x30, 0x2e, 0x2b, 0x2b, 0x2a, 0x2a, 0x25, 0x1f, 0x19, 0x12, 0x14, 0x16, + 0x17, 0x1c, 0x1f, 0x1f, 0x22, 0x23, 0x25, 0x25, 0x29, 0x2b, 0x2b, 0x2c, + 0x2c, 0x2a, 0x27, 0x27, 0x28, 0x31, 0x38, 0x42, 0x44, 0x47, 0x4a, 0x4b, + 0x4e, 0x4b, 0x46, 0x4d, 0x57, 0x56, 0x55, 0x53, 0x54, 0x57, 0x59, 0x5b, + 0x56, 0x50, 0x59, 0x5d, 0x5d, 0x5c, 0x5a, 0x5d, 0x5d, 0x5d, 0x5b, 0x5d, + 0x5d, 0x5f, 0x60, 0x5f, 0x5e, 0x62, 0x63, 0x64, 0x64, 0x65, 0x68, 0x69, + 0x6a, 0x6b, 0x69, 0x66, 0x66, 0x66, 0x64, 0x63, 0x60, 0x5a, 0x4a, 0x3f, + 0x31, 0x04, 0xef, 0xd9, 0xbe, 0xb7, 0xb6, 0xb2, 0xb2, 0xae, 0xa7, 0xab, + 0xaf, 0xaf, 0xb1, 0xb8, 0xbb, 0xb5, 0xb6, 0xb5, 0xab, 0xa7, 0xaf, 0xbd, + 0xba, 0xb9, 0xc4, 0xc2, 0xc1, 0xc0, 0xc0, 0xbc, 0xb3, 0xb5, 0xba, 0xb7, + 0xaf, 0xa3, 0x9e, 0xa0, 0xaa, 0xaf, 0xba, 0xc0, 0xb5, 0xb7, 0xbc, 0xc3, + 0xba, 0xb6, 0xbd, 0xb8, 0xb3, 0xbd, 0xcf, 0xd1, 0xcc, 0xc8, 0xcb, 0xce, + 0xcc, 0xd0, 0xd2, 0xcf, 0xd1, 0xd1, 0xc3, 0xbd, 0xbf, 0xca, 0xcf, 0xc3, + 0xbd, 0xc1, 0xcb, 0xc1, 0xc1, 0xca, 0xc8, 0xc8, 0xd3, 0xd7, 0xd1, 0xd4, + 0xd5, 0xd6, 0xcf, 0xc8, 0xc8, 0xbd, 0xc2, 0xbe, 0xbb, 0xb4, 0xb3, 0xb9, + 0xc3, 0xc8, 0xc8, 0xca, 0xcd, 0xcc, 0xcd, 0xcf, 0xce, 0xce, 0xd0, 0xce, + 0xce, 0xcb, 0xce, 0xd5, 0xd0, 0xd2, 0xcc, 0xca, 0xda, 0xe0, 0xda, 0xd8, + 0xd7, 0xd0, 0xc9, 0xdc, 0xe7, 0xda, 0xdd, 0xe3, 0xe0, 0xd7, 0xcd, 0xda, + 0x37, 0x35, 0x34, 0x34, 0x33, 0x32, 0x31, 0x2f, 0x2e, 0x2e, 0x2b, 0x29, + 0x25, 0x24, 0x21, 0x22, 0x1e, 0x14, 0x15, 0x16, 0x18, 0x1d, 0x20, 0x25, + 0x28, 0x2a, 0x29, 0x2a, 0x2a, 0x2b, 0x2e, 0x2f, 0x30, 0x30, 0x29, 0x23, + 0x25, 0x2b, 0x36, 0x3e, 0x46, 0x4a, 0x50, 0x4f, 0x4d, 0x4a, 0x45, 0x4c, + 0x57, 0x58, 0x57, 0x56, 0x58, 0x5a, 0x5b, 0x5d, 0x56, 0x4f, 0x59, 0x5d, + 0x5c, 0x5d, 0x5c, 0x5d, 0x60, 0x5e, 0x5c, 0x5c, 0x5d, 0x5f, 0x60, 0x60, + 0x61, 0x61, 0x64, 0x66, 0x64, 0x63, 0x66, 0x68, 0x69, 0x6a, 0x69, 0x67, + 0x66, 0x66, 0x64, 0x64, 0x64, 0x61, 0x59, 0x4a, 0x3a, 0x25, 0xf2, 0xe1, + 0xc8, 0xb9, 0xb8, 0xb7, 0xb0, 0xa7, 0xa2, 0xa7, 0xaa, 0xae, 0xaf, 0xb2, + 0xb5, 0xb0, 0xb3, 0xb6, 0xa9, 0x9c, 0xa2, 0xbc, 0xc1, 0xc2, 0xcb, 0xc3, + 0xc7, 0xc9, 0xc3, 0xc1, 0xba, 0xc1, 0xc1, 0xab, 0xad, 0xaa, 0xa4, 0xa9, + 0xb3, 0xc2, 0xcc, 0xc1, 0xb5, 0xb8, 0xc2, 0xba, 0xad, 0xaf, 0xc1, 0xba, + 0xb3, 0xbc, 0xcb, 0xd9, 0xd4, 0xd3, 0xd4, 0xd4, 0xd1, 0xcf, 0xd0, 0xd4, + 0xdd, 0xd9, 0xd8, 0xd4, 0xc6, 0xce, 0xd6, 0xc5, 0xc7, 0xc9, 0xd9, 0xe2, + 0xda, 0xdb, 0xcf, 0xd3, 0xd7, 0xd3, 0xca, 0xcb, 0xce, 0xcd, 0xd4, 0xd0, + 0xc8, 0xc1, 0xc6, 0xbe, 0xb5, 0xb2, 0xaf, 0xb2, 0xc2, 0xd1, 0xc3, 0xbb, + 0xbc, 0xc0, 0xc5, 0xcf, 0xce, 0xce, 0xcc, 0xd1, 0xce, 0xce, 0xd2, 0xd4, + 0xd1, 0xcc, 0xc8, 0xd3, 0xe1, 0xea, 0xe0, 0xda, 0xdb, 0xdd, 0xd4, 0xda, + 0xe1, 0xde, 0xda, 0xda, 0xe1, 0xda, 0xc7, 0xd0, 0x36, 0x35, 0x34, 0x33, + 0x32, 0x32, 0x31, 0x32, 0x30, 0x2e, 0x2b, 0x2c, 0x2d, 0x2c, 0x2c, 0x2b, + 0x22, 0x19, 0x18, 0x1b, 0x1f, 0x23, 0x22, 0x25, 0x29, 0x28, 0x29, 0x29, + 0x29, 0x27, 0x2b, 0x2c, 0x2c, 0x2d, 0x2c, 0x28, 0x23, 0x25, 0x30, 0x3c, + 0x45, 0x4d, 0x50, 0x50, 0x50, 0x4e, 0x49, 0x50, 0x57, 0x59, 0x5b, 0x57, + 0x5a, 0x5a, 0x5d, 0x5d, 0x55, 0x51, 0x59, 0x5e, 0x5b, 0x5e, 0x5d, 0x5d, + 0x60, 0x5e, 0x5d, 0x5c, 0x5b, 0x5e, 0x60, 0x61, 0x63, 0x62, 0x64, 0x66, + 0x63, 0x63, 0x64, 0x65, 0x67, 0x69, 0x69, 0x68, 0x65, 0x66, 0x65, 0x64, + 0x65, 0x64, 0x62, 0x57, 0x42, 0x2d, 0x17, 0xf0, 0xe2, 0xcf, 0xba, 0xb3, + 0xa8, 0xa0, 0xa2, 0xa6, 0xa8, 0xa7, 0xa7, 0xa9, 0xaf, 0xaa, 0xab, 0xb8, + 0xab, 0x9d, 0xa3, 0xc0, 0xc9, 0xc9, 0xd1, 0xc5, 0xcc, 0xd0, 0xcd, 0xc0, + 0xbb, 0xca, 0xd8, 0xb5, 0xb1, 0xb8, 0xb5, 0xb6, 0xbc, 0xce, 0xd4, 0xd0, + 0xbd, 0xb3, 0xc8, 0xc1, 0xb3, 0xb5, 0xc9, 0xc1, 0xbe, 0xc6, 0xd1, 0xe1, + 0xd8, 0xd8, 0xd7, 0xd6, 0xd0, 0xd1, 0xd3, 0xd3, 0xda, 0xda, 0xdc, 0xd8, + 0xcf, 0xd2, 0xd9, 0xd5, 0xd4, 0xd0, 0xd3, 0xd7, 0xda, 0xda, 0xd4, 0xd1, + 0xd2, 0xce, 0xcc, 0xc5, 0xcd, 0xc9, 0xd3, 0xc3, 0xbc, 0xc8, 0xcc, 0xbf, + 0xba, 0xb9, 0xb5, 0xb2, 0xb6, 0xd2, 0xce, 0xbd, 0xb7, 0xb5, 0xbf, 0xce, + 0xd5, 0xd5, 0xda, 0xd6, 0xcc, 0xd0, 0xd5, 0xd4, 0xc8, 0xc6, 0xcf, 0xd6, + 0xe1, 0xe5, 0xdf, 0xdb, 0xd8, 0xdd, 0xdc, 0xdd, 0xda, 0xd9, 0xd3, 0xd6, + 0xe0, 0xe4, 0xdb, 0xd7, 0x36, 0x35, 0x34, 0x34, 0x34, 0x34, 0x33, 0x33, + 0x31, 0x2f, 0x2e, 0x2e, 0x2f, 0x31, 0x31, 0x2b, 0x22, 0x1b, 0x17, 0x1b, + 0x1f, 0x22, 0x23, 0x24, 0x27, 0x26, 0x25, 0x23, 0x24, 0x27, 0x2d, 0x32, + 0x2f, 0x2b, 0x2b, 0x2d, 0x2b, 0x29, 0x30, 0x3c, 0x45, 0x4e, 0x53, 0x53, + 0x54, 0x53, 0x4d, 0x52, 0x58, 0x5b, 0x5d, 0x59, 0x5b, 0x5a, 0x5d, 0x5a, + 0x54, 0x57, 0x5b, 0x60, 0x5d, 0x5e, 0x5e, 0x5d, 0x5e, 0x5f, 0x5e, 0x5d, + 0x5a, 0x5d, 0x60, 0x60, 0x63, 0x63, 0x63, 0x63, 0x63, 0x64, 0x66, 0x65, + 0x67, 0x69, 0x69, 0x68, 0x66, 0x65, 0x65, 0x66, 0x67, 0x68, 0x66, 0x61, + 0x50, 0x31, 0x14, 0x06, 0xed, 0xc9, 0xbb, 0xaf, 0x9f, 0xa7, 0xb6, 0xb3, + 0xaf, 0xaa, 0xa5, 0xa2, 0xaa, 0xaa, 0xa3, 0xb5, 0xb3, 0xae, 0xb9, 0xc5, + 0xc9, 0xd0, 0xcf, 0xce, 0xd1, 0xd3, 0xc8, 0xc1, 0xb8, 0xb9, 0xd4, 0xce, + 0xbf, 0xcb, 0xce, 0xce, 0xcf, 0xd5, 0xd8, 0xd7, 0xcc, 0xc9, 0xd2, 0xd3, + 0xcb, 0xcc, 0xd1, 0xcb, 0xcb, 0xd3, 0xd4, 0xd1, 0xd0, 0xd3, 0xcd, 0xcb, + 0xcd, 0xd8, 0xd5, 0xce, 0xd4, 0xda, 0xda, 0xd8, 0xd3, 0xd6, 0xdb, 0xdc, + 0xdb, 0xd8, 0xd2, 0xd3, 0xd8, 0xca, 0xcc, 0xc9, 0xd0, 0xcf, 0xcc, 0xd0, + 0xd4, 0xd9, 0xda, 0xcd, 0xc5, 0xd1, 0xd4, 0xc8, 0xc7, 0xcc, 0xcf, 0xce, + 0xbb, 0xc0, 0xd4, 0xc8, 0xbb, 0xbb, 0xc1, 0xc3, 0xc8, 0xcf, 0xd3, 0xc8, + 0xcb, 0xd1, 0xda, 0xe1, 0xdc, 0xd0, 0xcd, 0xd2, 0xdf, 0xe5, 0xdd, 0xe1, + 0xe3, 0xda, 0xda, 0xdb, 0xd6, 0xd5, 0xd3, 0xd7, 0xdb, 0xe3, 0xe7, 0xe1, + 0x37, 0x35, 0x33, 0x35, 0x36, 0x35, 0x34, 0x33, 0x33, 0x33, 0x34, 0x31, + 0x31, 0x31, 0x31, 0x2d, 0x24, 0x1b, 0x16, 0x19, 0x1e, 0x1f, 0x21, 0x21, + 0x21, 0x23, 0x23, 0x25, 0x27, 0x2c, 0x31, 0x36, 0x36, 0x31, 0x2f, 0x36, + 0x36, 0x37, 0x3e, 0x43, 0x48, 0x4f, 0x54, 0x56, 0x57, 0x57, 0x50, 0x54, + 0x58, 0x5d, 0x5d, 0x5a, 0x5a, 0x5b, 0x5d, 0x5a, 0x57, 0x5a, 0x5d, 0x62, + 0x60, 0x5e, 0x60, 0x5d, 0x5e, 0x5e, 0x5d, 0x5d, 0x5a, 0x5b, 0x5f, 0x61, + 0x62, 0x63, 0x63, 0x63, 0x63, 0x63, 0x64, 0x65, 0x65, 0x65, 0x66, 0x67, + 0x67, 0x64, 0x65, 0x67, 0x69, 0x69, 0x69, 0x65, 0x60, 0x47, 0x1a, 0xf1, + 0xeb, 0xdf, 0xc3, 0xab, 0x9e, 0xa9, 0xc3, 0xc3, 0xbf, 0xb2, 0xac, 0xa6, + 0xaa, 0xb8, 0xb7, 0xc3, 0xc9, 0xc3, 0xc8, 0xc9, 0xc6, 0xd3, 0xce, 0xd0, + 0xd2, 0xd3, 0xc9, 0xcf, 0xc8, 0xc7, 0xcd, 0xd4, 0xc9, 0xca, 0xc9, 0xd2, + 0xd8, 0xdb, 0xe1, 0xd1, 0xca, 0xce, 0xd3, 0xd6, 0xd5, 0xd5, 0xd3, 0xd4, + 0xda, 0xda, 0xd7, 0xc7, 0xc8, 0xc9, 0xc6, 0xc3, 0xc5, 0xce, 0xdc, 0xcf, + 0xd3, 0xd8, 0xda, 0xe0, 0xdb, 0xd5, 0xe0, 0xde, 0xd7, 0xda, 0xd7, 0xe3, + 0xdf, 0xc9, 0xc1, 0xc1, 0xce, 0xcf, 0xd2, 0xd8, 0xe1, 0xdd, 0xdf, 0xd4, + 0xcd, 0xdb, 0xda, 0xdb, 0xdb, 0xd1, 0xcb, 0xd1, 0xd1, 0xcd, 0xd3, 0xda, + 0xcb, 0xc8, 0xd0, 0xce, 0xc7, 0xc6, 0xbd, 0xbb, 0xc4, 0xcc, 0xd5, 0xda, + 0xe1, 0xd9, 0xcc, 0xcc, 0xe0, 0xea, 0xe1, 0xe0, 0xe0, 0xe1, 0xdc, 0xd8, + 0xd8, 0xd5, 0xd4, 0xd2, 0xd6, 0xde, 0xdb, 0xe5, 0x36, 0x33, 0x34, 0x33, + 0x32, 0x32, 0x32, 0x32, 0x33, 0x33, 0x35, 0x36, 0x35, 0x33, 0x31, 0x2b, + 0x23, 0x1c, 0x18, 0x19, 0x1b, 0x1e, 0x1f, 0x1f, 0x21, 0x23, 0x25, 0x27, + 0x2a, 0x31, 0x37, 0x39, 0x3a, 0x3a, 0x3e, 0x3e, 0x40, 0x45, 0x4a, 0x4a, + 0x4b, 0x50, 0x54, 0x57, 0x57, 0x57, 0x56, 0x56, 0x5a, 0x5e, 0x5d, 0x5b, + 0x5b, 0x5d, 0x5c, 0x5d, 0x59, 0x5c, 0x5f, 0x63, 0x62, 0x5f, 0x60, 0x5e, + 0x5d, 0x5f, 0x5d, 0x5d, 0x5c, 0x5b, 0x5d, 0x62, 0x60, 0x63, 0x63, 0x62, + 0x64, 0x63, 0x63, 0x63, 0x63, 0x64, 0x63, 0x65, 0x65, 0x63, 0x65, 0x67, + 0x68, 0x69, 0x69, 0x68, 0x64, 0x59, 0x35, 0xf9, 0xcb, 0xc6, 0xcc, 0xb8, + 0xae, 0xb9, 0xcc, 0xd5, 0xcf, 0xc2, 0xc6, 0xc0, 0xc7, 0xd4, 0xcf, 0xd5, + 0xd9, 0xd3, 0xd7, 0xd2, 0xce, 0xe3, 0xd2, 0xcc, 0xcd, 0xd4, 0xd5, 0xd3, + 0xd7, 0xe1, 0xdb, 0xcf, 0xcc, 0xd0, 0xcf, 0xd4, 0xd5, 0xd4, 0xce, 0xc6, + 0xcc, 0xd0, 0xd6, 0xe5, 0xe1, 0xe0, 0xda, 0xdd, 0xde, 0xdc, 0xd8, 0xd2, + 0xce, 0xd2, 0xd2, 0xd2, 0xd2, 0xd5, 0xd4, 0xca, 0xd2, 0xda, 0xdb, 0xda, + 0xda, 0xd6, 0xdb, 0xd9, 0xda, 0xd8, 0xd4, 0xdd, 0xda, 0xce, 0xc7, 0xca, + 0xd2, 0xd3, 0xd2, 0xd4, 0xdb, 0xe1, 0xe3, 0xdf, 0xda, 0xe3, 0xea, 0xe2, + 0xdc, 0xd5, 0xd2, 0xd4, 0xd9, 0xda, 0xde, 0xe7, 0xe3, 0xd6, 0xd4, 0xd4, + 0xd3, 0xce, 0xce, 0xd9, 0xd5, 0xd1, 0xce, 0xd1, 0xd6, 0xd6, 0xd6, 0xce, + 0xdc, 0xe7, 0xe2, 0xda, 0xe0, 0xe4, 0xde, 0xda, 0xd2, 0xce, 0xd4, 0xcb, + 0xd7, 0xe3, 0xd8, 0xde, 0x35, 0x33, 0x36, 0x34, 0x33, 0x33, 0x35, 0x35, + 0x35, 0x35, 0x35, 0x36, 0x35, 0x33, 0x31, 0x2d, 0x27, 0x21, 0x1d, 0x1c, + 0x1b, 0x1d, 0x1f, 0x21, 0x24, 0x2a, 0x2e, 0x30, 0x31, 0x36, 0x3e, 0x44, + 0x44, 0x43, 0x46, 0x47, 0x48, 0x4c, 0x4c, 0x4e, 0x4f, 0x55, 0x55, 0x57, + 0x57, 0x58, 0x59, 0x58, 0x5c, 0x5e, 0x5d, 0x5b, 0x5d, 0x5d, 0x5d, 0x5e, + 0x5b, 0x5f, 0x62, 0x64, 0x63, 0x5f, 0x60, 0x5f, 0x5c, 0x5d, 0x5d, 0x5d, + 0x5d, 0x5c, 0x5b, 0x60, 0x60, 0x60, 0x62, 0x62, 0x63, 0x63, 0x63, 0x63, + 0x63, 0x64, 0x64, 0x64, 0x64, 0x63, 0x65, 0x67, 0x66, 0x69, 0x6a, 0x69, + 0x65, 0x5d, 0x46, 0x15, 0xe8, 0xd3, 0xce, 0xd9, 0xd8, 0xd7, 0xd5, 0xda, + 0xe0, 0xea, 0xe3, 0xda, 0xd9, 0xd1, 0xcd, 0xd0, 0xcf, 0xd4, 0xdd, 0xca, + 0xc8, 0xde, 0xdc, 0xce, 0xd1, 0xd0, 0xdd, 0xd4, 0xd2, 0xd8, 0xcf, 0xc3, + 0xc8, 0xce, 0xd0, 0xd1, 0xcd, 0xc8, 0xc6, 0xc2, 0xcb, 0xdc, 0xe6, 0xeb, + 0xe1, 0xd6, 0xd2, 0xdd, 0xde, 0xdc, 0xda, 0xd8, 0xd6, 0xdb, 0xde, 0xe7, + 0xdb, 0xd6, 0xd8, 0xda, 0xd8, 0xd3, 0xce, 0xca, 0xd2, 0xd5, 0xcc, 0xcb, + 0xd3, 0xcc, 0xcd, 0xcc, 0xd1, 0xcc, 0xca, 0xd1, 0xda, 0xda, 0xdd, 0xdd, + 0xde, 0xda, 0xda, 0xda, 0xda, 0xd9, 0xe6, 0xdb, 0xd9, 0xda, 0xd2, 0xd6, + 0xe0, 0xe4, 0xe3, 0xe7, 0xec, 0xe1, 0xda, 0xda, 0xdb, 0xd7, 0xd6, 0xdf, + 0xdf, 0xe2, 0xd6, 0xd4, 0xd7, 0xdd, 0xd7, 0xd1, 0xd5, 0xe7, 0xe2, 0xd9, + 0xe1, 0xe6, 0xe4, 0xdf, 0xd4, 0xcd, 0xd4, 0xce, 0xda, 0xdb, 0xd5, 0xda, + 0x34, 0x33, 0x34, 0x34, 0x34, 0x35, 0x37, 0x38, 0x37, 0x37, 0x37, 0x37, + 0x36, 0x35, 0x31, 0x2d, 0x29, 0x25, 0x20, 0x20, 0x20, 0x23, 0x25, 0x27, + 0x2d, 0x34, 0x37, 0x37, 0x38, 0x3c, 0x44, 0x46, 0x44, 0x45, 0x4a, 0x4c, + 0x4d, 0x4d, 0x50, 0x52, 0x53, 0x54, 0x57, 0x5a, 0x58, 0x5b, 0x5c, 0x5c, + 0x5e, 0x5e, 0x5b, 0x5c, 0x5e, 0x5d, 0x5f, 0x5f, 0x5c, 0x61, 0x61, 0x63, + 0x63, 0x60, 0x5f, 0x5e, 0x5b, 0x5b, 0x5c, 0x5d, 0x5e, 0x5d, 0x5b, 0x5e, + 0x60, 0x5f, 0x62, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x64, 0x64, 0x64, + 0x65, 0x65, 0x65, 0x67, 0x68, 0x6a, 0x6a, 0x69, 0x67, 0x61, 0x53, 0x38, + 0x12, 0xf3, 0xe0, 0xdc, 0xd4, 0xcd, 0xcc, 0xd3, 0xda, 0xda, 0xdb, 0xd8, + 0xd5, 0xd1, 0xd2, 0xcc, 0xc2, 0xc8, 0xcb, 0xc1, 0xbc, 0xcd, 0xd5, 0xd6, + 0xd4, 0xc3, 0xcf, 0xd9, 0xcf, 0xd2, 0xcf, 0xcc, 0xd1, 0xd5, 0xd8, 0xd7, + 0xd3, 0xcc, 0xce, 0xd0, 0xd8, 0xe2, 0xda, 0xd6, 0xd4, 0xc7, 0xc9, 0xd2, + 0xd9, 0xdc, 0xda, 0xda, 0xda, 0xd9, 0xd3, 0xdf, 0xd7, 0xd4, 0xde, 0xe1, + 0xe0, 0xd8, 0xc9, 0xbe, 0xc9, 0xd3, 0xc5, 0xca, 0xce, 0xcb, 0xc1, 0xc7, + 0xd1, 0xc9, 0xc1, 0xca, 0xd4, 0xd0, 0xcf, 0xd5, 0xd7, 0xce, 0xcb, 0xcd, + 0xd6, 0xd7, 0xdf, 0xd7, 0xd5, 0xd0, 0xd1, 0xde, 0xe0, 0xdc, 0xe2, 0xee, + 0xea, 0xdf, 0xdd, 0xd8, 0xe1, 0xe7, 0xdb, 0xd4, 0xda, 0xda, 0xcf, 0xcd, + 0xcf, 0xda, 0xd1, 0xcd, 0xd4, 0xe7, 0xe7, 0xe0, 0xe0, 0xd8, 0xdf, 0xe9, + 0xdc, 0xd7, 0xda, 0xda, 0xdb, 0xd6, 0xdb, 0xdb, 0x33, 0x33, 0x34, 0x34, + 0x35, 0x36, 0x38, 0x38, 0x38, 0x38, 0x38, 0x38, 0x39, 0x37, 0x34, 0x30, + 0x2c, 0x29, 0x25, 0x27, 0x25, 0x27, 0x29, 0x2b, 0x31, 0x34, 0x37, 0x39, + 0x40, 0x42, 0x44, 0x45, 0x42, 0x45, 0x4a, 0x4b, 0x4e, 0x4f, 0x52, 0x55, + 0x55, 0x55, 0x57, 0x5b, 0x58, 0x5c, 0x5d, 0x61, 0x60, 0x5e, 0x59, 0x5c, + 0x5e, 0x5e, 0x61, 0x5d, 0x5b, 0x61, 0x60, 0x63, 0x65, 0x62, 0x5f, 0x5d, + 0x58, 0x5b, 0x5a, 0x59, 0x5e, 0x5d, 0x5b, 0x5d, 0x61, 0x5d, 0x60, 0x62, + 0x62, 0x62, 0x63, 0x63, 0x63, 0x64, 0x64, 0x64, 0x66, 0x67, 0x66, 0x67, + 0x69, 0x6a, 0x6a, 0x69, 0x69, 0x63, 0x59, 0x4c, 0x3d, 0x16, 0xec, 0xdc, + 0xd9, 0xd6, 0xd3, 0xd4, 0xd4, 0xd4, 0xd6, 0xcd, 0xcb, 0xd6, 0xd3, 0xd5, + 0xcc, 0xc5, 0xc9, 0xcc, 0xc7, 0xc5, 0xd1, 0xd6, 0xcf, 0xd0, 0xc8, 0xd8, + 0xd9, 0xdb, 0xe1, 0xda, 0xd9, 0xe0, 0xdd, 0xd8, 0xd3, 0xd0, 0xd2, 0xd1, + 0xe1, 0xe5, 0xd4, 0xd3, 0xd4, 0xcc, 0xcb, 0xce, 0xd7, 0xda, 0xda, 0xdf, + 0xdc, 0xd3, 0xc8, 0xcd, 0xc2, 0xc3, 0xcc, 0xd4, 0xd9, 0xd4, 0xca, 0xcb, + 0xd3, 0xd6, 0xce, 0xcd, 0xd1, 0xd2, 0xc8, 0xcc, 0xcf, 0xc8, 0xc3, 0xc6, + 0xc5, 0xc7, 0xc4, 0xca, 0xcc, 0xca, 0xd1, 0xd1, 0xd7, 0xdd, 0xd8, 0xe0, + 0xd8, 0xd2, 0xd1, 0xd8, 0xda, 0xd3, 0xdb, 0xe8, 0xe1, 0xd6, 0xd9, 0xdd, + 0xdf, 0xe7, 0xde, 0xd9, 0xdf, 0xdc, 0xce, 0xc2, 0xcb, 0xdb, 0xd5, 0xcf, + 0xd4, 0xe6, 0xe9, 0xe4, 0xe1, 0xd5, 0xd6, 0xea, 0xe4, 0xd6, 0xda, 0xdc, + 0xe0, 0xd5, 0xdd, 0xe3, 0x33, 0x33, 0x36, 0x37, 0x39, 0x3b, 0x3b, 0x3b, + 0x3b, 0x3a, 0x3b, 0x3a, 0x39, 0x37, 0x36, 0x31, 0x2b, 0x2b, 0x29, 0x2c, + 0x2b, 0x2a, 0x29, 0x2b, 0x30, 0x33, 0x37, 0x3e, 0x44, 0x46, 0x44, 0x40, + 0x3f, 0x44, 0x47, 0x4d, 0x4e, 0x4f, 0x54, 0x57, 0x56, 0x53, 0x54, 0x58, + 0x57, 0x5c, 0x5d, 0x62, 0x5f, 0x5d, 0x58, 0x5c, 0x5d, 0x5f, 0x61, 0x5c, + 0x59, 0x5f, 0x60, 0x62, 0x64, 0x63, 0x60, 0x5d, 0x57, 0x5a, 0x5a, 0x58, + 0x5c, 0x5d, 0x5b, 0x5c, 0x61, 0x5d, 0x5d, 0x60, 0x61, 0x61, 0x63, 0x63, + 0x63, 0x63, 0x64, 0x64, 0x65, 0x67, 0x67, 0x68, 0x69, 0x69, 0x6b, 0x6b, + 0x69, 0x67, 0x63, 0x5b, 0x52, 0x41, 0x19, 0xf9, 0xe7, 0xdd, 0xd6, 0xce, + 0xce, 0xd4, 0xcc, 0xcb, 0xd2, 0xdb, 0xd8, 0xd2, 0xcf, 0xc6, 0xce, 0xd5, + 0xd9, 0xce, 0xd0, 0xd5, 0xc9, 0xc9, 0xcd, 0xd6, 0xe2, 0xe1, 0xe3, 0xe0, + 0xda, 0xe5, 0xe7, 0xe5, 0xd9, 0xd6, 0xd5, 0xd5, 0xe3, 0xe2, 0xd6, 0xd4, + 0xd9, 0xd3, 0xcf, 0xcb, 0xd8, 0xda, 0xdc, 0xdb, 0xdd, 0xd8, 0xd1, 0xcb, + 0xc5, 0xc6, 0xc4, 0xc4, 0xc9, 0xca, 0xc9, 0xd3, 0xda, 0xe1, 0xd9, 0xcd, + 0xce, 0xcc, 0xcf, 0xd8, 0xd6, 0xca, 0xc1, 0xc2, 0xbc, 0xbd, 0xc3, 0xc1, + 0xc8, 0xc7, 0xd5, 0xd2, 0xcd, 0xe0, 0xe1, 0xe8, 0xdf, 0xd4, 0xcf, 0xd2, + 0xde, 0xd4, 0xd2, 0xdd, 0xd8, 0xd2, 0xd3, 0xd1, 0xde, 0xe3, 0xd8, 0xd4, + 0xe2, 0xe1, 0xd4, 0xc8, 0xcd, 0xce, 0xd6, 0xd8, 0xdc, 0xe8, 0xe7, 0xe1, + 0xe1, 0xdc, 0xce, 0xda, 0xe7, 0xe0, 0xdc, 0xdd, 0xde, 0xdf, 0xde, 0xe0, + 0x33, 0x35, 0x37, 0x37, 0x39, 0x39, 0x3a, 0x3a, 0x39, 0x3a, 0x3b, 0x38, + 0x36, 0x36, 0x33, 0x2f, 0x2b, 0x2b, 0x2b, 0x2c, 0x2f, 0x2f, 0x30, 0x31, + 0x34, 0x39, 0x3f, 0x44, 0x49, 0x48, 0x3f, 0x3d, 0x44, 0x48, 0x49, 0x4c, + 0x4b, 0x4e, 0x50, 0x57, 0x56, 0x52, 0x53, 0x54, 0x54, 0x5b, 0x5b, 0x60, + 0x5d, 0x5b, 0x5a, 0x5c, 0x5a, 0x5e, 0x62, 0x5a, 0x58, 0x5d, 0x5e, 0x60, + 0x61, 0x61, 0x5e, 0x5f, 0x59, 0x5b, 0x5a, 0x57, 0x58, 0x5b, 0x5c, 0x5d, + 0x60, 0x5e, 0x5d, 0x5e, 0x60, 0x62, 0x62, 0x63, 0x63, 0x63, 0x64, 0x65, + 0x66, 0x67, 0x68, 0x68, 0x68, 0x69, 0x6a, 0x6b, 0x6a, 0x69, 0x68, 0x64, + 0x5d, 0x57, 0x44, 0x1f, 0xf9, 0xdc, 0xda, 0xd2, 0xca, 0xce, 0xca, 0xc1, + 0xcb, 0xd5, 0xd4, 0xc8, 0xd4, 0xd5, 0xd4, 0xda, 0xdb, 0xd7, 0xd6, 0xd4, + 0xce, 0xd4, 0xde, 0xde, 0xda, 0xe1, 0xe1, 0xdb, 0xdc, 0xe5, 0xe8, 0xe1, + 0xdd, 0xde, 0xdb, 0xd7, 0xdf, 0xdc, 0xdc, 0xdb, 0xda, 0xd8, 0xd7, 0xd5, + 0xdc, 0xdc, 0xdb, 0xd8, 0xde, 0xdd, 0xda, 0xdb, 0xda, 0xdf, 0xd7, 0xce, + 0xd4, 0xda, 0xd2, 0xd6, 0xdf, 0xe3, 0xdd, 0xda, 0xd2, 0xc3, 0xcb, 0xe1, + 0xdf, 0xd7, 0xd0, 0xcd, 0xcc, 0xcd, 0xd0, 0xcc, 0xc0, 0xc0, 0xcc, 0xcc, + 0xcb, 0xd4, 0xd5, 0xda, 0xdb, 0xda, 0xcf, 0xca, 0xcf, 0xd3, 0xd4, 0xe1, + 0xe1, 0xd4, 0xca, 0xce, 0xe2, 0xe1, 0xd9, 0xda, 0xe3, 0xe4, 0xe2, 0xdd, + 0xd7, 0xd3, 0xd4, 0xd7, 0xdd, 0xed, 0xe5, 0xde, 0xe1, 0xe1, 0xda, 0xe7, + 0xe0, 0xe2, 0xdf, 0xd8, 0xd7, 0xe0, 0xe1, 0xe0, 0x34, 0x35, 0x35, 0x35, + 0x35, 0x35, 0x36, 0x35, 0x37, 0x35, 0x35, 0x35, 0x35, 0x35, 0x33, 0x2f, + 0x2e, 0x2f, 0x31, 0x32, 0x35, 0x35, 0x35, 0x36, 0x39, 0x3e, 0x42, 0x48, + 0x46, 0x44, 0x41, 0x45, 0x49, 0x4a, 0x48, 0x48, 0x4b, 0x4e, 0x51, 0x55, + 0x56, 0x50, 0x4e, 0x50, 0x53, 0x59, 0x5a, 0x5c, 0x59, 0x57, 0x5b, 0x5a, + 0x5a, 0x5d, 0x5f, 0x59, 0x58, 0x5b, 0x5d, 0x5e, 0x5e, 0x5f, 0x5d, 0x5f, + 0x5b, 0x5b, 0x5d, 0x59, 0x58, 0x5a, 0x5a, 0x5d, 0x5e, 0x5d, 0x5d, 0x5e, + 0x5f, 0x61, 0x62, 0x62, 0x62, 0x63, 0x64, 0x65, 0x67, 0x67, 0x67, 0x68, + 0x69, 0x69, 0x6a, 0x6b, 0x6b, 0x6a, 0x69, 0x69, 0x67, 0x63, 0x5d, 0x4c, + 0x26, 0xfc, 0xec, 0xe2, 0xd7, 0xd8, 0xd8, 0xd0, 0xd4, 0xda, 0xd4, 0xd3, + 0xd5, 0xdb, 0xd5, 0xdf, 0xdb, 0xdc, 0xd2, 0xd1, 0xd0, 0xe0, 0xe3, 0xe1, + 0xd4, 0xd4, 0xdb, 0xdb, 0xda, 0xd9, 0xe0, 0xda, 0xda, 0xe2, 0xda, 0xd6, + 0xdf, 0xe1, 0xe0, 0xe0, 0xdb, 0xd9, 0xdd, 0xe0, 0xd1, 0xd1, 0xd8, 0xd4, + 0xdd, 0xe3, 0xdd, 0xed, 0xe2, 0xe1, 0xdd, 0xdb, 0xde, 0xe3, 0xe6, 0xdf, + 0xda, 0xdb, 0xe3, 0xe1, 0xd9, 0xce, 0xd8, 0xe1, 0xe1, 0xe5, 0xd2, 0xc7, + 0xd4, 0xdc, 0xdd, 0xd9, 0xd2, 0xcc, 0xd4, 0xd8, 0xd9, 0xcc, 0xcf, 0xd4, + 0xd4, 0xe1, 0xd4, 0xc6, 0xc4, 0xc7, 0xcc, 0xdd, 0xdf, 0xd9, 0xcf, 0xd5, + 0xe0, 0xe1, 0xd9, 0xdd, 0xe2, 0xe8, 0xf2, 0xe1, 0xd5, 0xd4, 0xce, 0xcc, + 0xda, 0xf2, 0xe3, 0xe1, 0xe1, 0xe1, 0xdf, 0xea, 0xe3, 0xdb, 0xda, 0xd7, + 0xdb, 0xdb, 0xdc, 0xd6, 0x35, 0x34, 0x37, 0x36, 0x36, 0x37, 0x36, 0x37, + 0x3b, 0x3a, 0x3a, 0x39, 0x39, 0x37, 0x33, 0x33, 0x34, 0x33, 0x34, 0x35, + 0x36, 0x35, 0x37, 0x38, 0x39, 0x3e, 0x43, 0x42, 0x43, 0x44, 0x44, 0x49, + 0x4b, 0x46, 0x44, 0x48, 0x4e, 0x50, 0x53, 0x53, 0x52, 0x4a, 0x48, 0x4e, + 0x53, 0x57, 0x57, 0x58, 0x58, 0x55, 0x5a, 0x5a, 0x5b, 0x5d, 0x5d, 0x5c, + 0x5b, 0x5b, 0x5d, 0x5e, 0x5d, 0x5c, 0x5b, 0x5f, 0x5d, 0x59, 0x5d, 0x5b, + 0x59, 0x59, 0x59, 0x5c, 0x5e, 0x5e, 0x5d, 0x5e, 0x5f, 0x5f, 0x60, 0x60, + 0x62, 0x63, 0x64, 0x65, 0x68, 0x69, 0x68, 0x68, 0x69, 0x69, 0x6b, 0x6b, + 0x6c, 0x6c, 0x6b, 0x6a, 0x67, 0x62, 0x5e, 0x59, 0x4a, 0x2e, 0x11, 0xf6, + 0xdc, 0xd0, 0xd4, 0xda, 0xd6, 0xe0, 0xd6, 0xd6, 0xd7, 0xdd, 0xd9, 0xe3, + 0xdb, 0xe1, 0xd4, 0xd8, 0xda, 0xe1, 0xe0, 0xe0, 0xdb, 0xe2, 0xde, 0xd9, + 0xdb, 0xd6, 0xdd, 0xd9, 0xda, 0xd6, 0xd2, 0xd3, 0xdb, 0xe2, 0xdd, 0xdd, + 0xdd, 0xd7, 0xda, 0xe0, 0xd4, 0xd6, 0xe2, 0xe4, 0xdf, 0xe7, 0xe2, 0xe7, + 0xe5, 0xda, 0xdb, 0xe1, 0xe1, 0xe4, 0xe7, 0xdf, 0xd5, 0xd8, 0xde, 0xe3, + 0xda, 0xd5, 0xdd, 0xe0, 0xe3, 0xe8, 0xe1, 0xda, 0xdd, 0xe5, 0xe2, 0xdc, + 0xde, 0xda, 0xd3, 0xd1, 0xc9, 0xc6, 0xcf, 0xda, 0xd8, 0xd2, 0xd2, 0xcf, + 0xcc, 0xd4, 0xd5, 0xdb, 0xde, 0xdc, 0xe2, 0xdc, 0xd6, 0xdd, 0xdb, 0xd9, + 0xde, 0xe4, 0xf3, 0xe5, 0xd7, 0xd5, 0xce, 0xd0, 0xe2, 0xf3, 0xdf, 0xd9, + 0xda, 0xdf, 0xdd, 0xd1, 0xd4, 0xe7, 0xe7, 0xdd, 0xdc, 0xd7, 0xd6, 0xd5, + 0x30, 0x31, 0x30, 0x2f, 0x31, 0x35, 0x37, 0x3c, 0x3a, 0x39, 0x37, 0x38, + 0x38, 0x37, 0x37, 0x37, 0x37, 0x38, 0x37, 0x37, 0x37, 0x39, 0x38, 0x3a, + 0x3c, 0x3e, 0x3c, 0x3f, 0x44, 0x43, 0x47, 0x4c, 0x4a, 0x46, 0x48, 0x4a, + 0x51, 0x54, 0x55, 0x54, 0x50, 0x46, 0x48, 0x4c, 0x52, 0x58, 0x54, 0x53, + 0x57, 0x57, 0x5a, 0x5a, 0x5c, 0x5f, 0x5c, 0x5c, 0x5c, 0x5a, 0x5d, 0x5e, + 0x5c, 0x5a, 0x59, 0x5e, 0x5f, 0x5b, 0x5c, 0x5d, 0x5a, 0x5b, 0x59, 0x5a, + 0x5e, 0x5f, 0x5d, 0x5e, 0x60, 0x5e, 0x5e, 0x5f, 0x61, 0x63, 0x63, 0x64, + 0x68, 0x68, 0x69, 0x69, 0x69, 0x6a, 0x6b, 0x6b, 0x6c, 0x6d, 0x6d, 0x6c, + 0x69, 0x63, 0x5c, 0x54, 0x4a, 0x3e, 0x1f, 0x06, 0xed, 0xd6, 0xd9, 0xe2, + 0xde, 0xe6, 0xe6, 0xde, 0xe5, 0xe1, 0xdf, 0xe2, 0xe1, 0xe5, 0xdc, 0xda, + 0xdc, 0xde, 0xdd, 0xdc, 0xd6, 0xed, 0xe7, 0xd3, 0xd8, 0xd9, 0xda, 0xdb, + 0xda, 0xd5, 0xd5, 0xce, 0xd8, 0xda, 0xda, 0xda, 0xe0, 0xdf, 0xd6, 0xe5, + 0xe3, 0xdb, 0xe5, 0xe9, 0xeb, 0xe7, 0xdc, 0xd9, 0xe1, 0xd8, 0xd9, 0xe2, + 0xe3, 0xe1, 0xe1, 0xde, 0xda, 0xcf, 0xd7, 0xe6, 0xdd, 0xd6, 0xda, 0xde, + 0xdf, 0xdf, 0xe1, 0xe2, 0xe0, 0xe7, 0xe1, 0xdf, 0xe0, 0xd4, 0xda, 0xdb, + 0xce, 0xce, 0xd6, 0xde, 0xde, 0xcd, 0xcf, 0xdd, 0xca, 0xce, 0xda, 0xd9, + 0xdd, 0xe1, 0xe0, 0xe3, 0xd8, 0xd9, 0xe2, 0xd8, 0xd4, 0xd9, 0xef, 0xe1, + 0xd0, 0xd5, 0xd6, 0xdb, 0xe7, 0xed, 0xd9, 0xce, 0xd4, 0xdb, 0xdf, 0xdf, + 0xcd, 0xd4, 0xe1, 0xe3, 0xe0, 0xd7, 0xd5, 0xd4, 0x2e, 0x30, 0x32, 0x33, + 0x37, 0x38, 0x38, 0x39, 0x38, 0x38, 0x38, 0x3a, 0x39, 0x38, 0x39, 0x38, + 0x36, 0x36, 0x36, 0x37, 0x39, 0x39, 0x3c, 0x3c, 0x3b, 0x39, 0x3f, 0x42, + 0x41, 0x46, 0x4a, 0x48, 0x44, 0x42, 0x49, 0x4f, 0x56, 0x55, 0x55, 0x52, + 0x4c, 0x47, 0x49, 0x4a, 0x53, 0x56, 0x50, 0x4c, 0x57, 0x5a, 0x59, 0x5b, + 0x5d, 0x5d, 0x5a, 0x5d, 0x5c, 0x5a, 0x5e, 0x5f, 0x5b, 0x5a, 0x59, 0x5e, + 0x61, 0x5e, 0x5d, 0x5d, 0x5c, 0x59, 0x59, 0x5a, 0x5e, 0x60, 0x5e, 0x60, + 0x61, 0x5f, 0x5d, 0x5d, 0x60, 0x62, 0x63, 0x64, 0x67, 0x69, 0x69, 0x69, + 0x69, 0x6a, 0x6b, 0x6b, 0x6c, 0x6d, 0x6e, 0x6d, 0x6b, 0x6b, 0x68, 0x62, + 0x56, 0x49, 0x37, 0x1e, 0x0b, 0xf8, 0xeb, 0xe4, 0xec, 0xf6, 0xf3, 0xe7, + 0xe9, 0xe6, 0xe5, 0xe1, 0xde, 0xe3, 0xdf, 0xde, 0xd9, 0xd9, 0xd8, 0xdc, + 0xd8, 0xdf, 0xe5, 0xd1, 0xda, 0xda, 0xdc, 0xe0, 0xda, 0xd9, 0xd9, 0xd1, + 0xd7, 0xdb, 0xda, 0xdf, 0xe3, 0xeb, 0xe7, 0xe2, 0xdd, 0xdd, 0xe6, 0xe1, + 0xdc, 0xe4, 0xdb, 0xd4, 0xe3, 0xdb, 0xda, 0xde, 0xde, 0xd4, 0xdb, 0xdc, + 0xde, 0xcf, 0xd9, 0xed, 0xde, 0xd6, 0xd5, 0xda, 0xde, 0xe0, 0xe0, 0xe2, + 0xe1, 0xe4, 0xe0, 0xe4, 0xe4, 0xd9, 0xda, 0xe4, 0xee, 0xe9, 0xe1, 0xd1, + 0xd2, 0xd4, 0xcf, 0xdf, 0xce, 0xce, 0xd4, 0xd5, 0xd4, 0xdd, 0xdc, 0xdd, + 0xde, 0xdf, 0xe2, 0xdc, 0xd6, 0xd4, 0xe2, 0xe1, 0xce, 0xd4, 0xdc, 0xdd, + 0xea, 0xec, 0xda, 0xcd, 0xd3, 0xdb, 0xe1, 0xed, 0xde, 0xcb, 0xdf, 0xdc, + 0xdc, 0xdc, 0xd5, 0xd5, 0x30, 0x31, 0x31, 0x33, 0x38, 0x37, 0x35, 0x37, + 0x39, 0x3a, 0x3b, 0x3c, 0x3c, 0x3a, 0x39, 0x37, 0x35, 0x36, 0x3b, 0x3b, + 0x3a, 0x39, 0x3b, 0x3b, 0x3a, 0x3e, 0x3e, 0x41, 0x48, 0x4b, 0x47, 0x40, + 0x45, 0x47, 0x4a, 0x51, 0x55, 0x54, 0x55, 0x50, 0x4b, 0x49, 0x4a, 0x4a, + 0x55, 0x54, 0x4f, 0x48, 0x57, 0x5c, 0x58, 0x5b, 0x5d, 0x5d, 0x5b, 0x5e, + 0x5d, 0x5b, 0x5d, 0x5d, 0x5b, 0x5a, 0x59, 0x5d, 0x61, 0x5e, 0x5d, 0x5d, + 0x5c, 0x59, 0x59, 0x5a, 0x5d, 0x60, 0x5f, 0x60, 0x62, 0x62, 0x60, 0x60, + 0x60, 0x61, 0x63, 0x65, 0x67, 0x69, 0x69, 0x69, 0x6a, 0x6c, 0x6b, 0x6b, + 0x6c, 0x6d, 0x6e, 0x6e, 0x6d, 0x6d, 0x6d, 0x6b, 0x68, 0x5f, 0x54, 0x43, + 0x2a, 0x11, 0x04, 0xee, 0xfa, 0x0c, 0x07, 0xe9, 0xed, 0xea, 0xe7, 0xe2, + 0xdf, 0xe0, 0xdc, 0xd8, 0xd4, 0xd5, 0xd4, 0xdb, 0xdd, 0xce, 0xe0, 0xd7, + 0xd6, 0xd7, 0xdb, 0xdc, 0xda, 0xd7, 0xd3, 0xdd, 0xde, 0xe1, 0xdf, 0xdf, + 0xe1, 0xea, 0xe8, 0xdb, 0xd4, 0xc9, 0xd9, 0xe5, 0xdc, 0xe2, 0xdc, 0xda, + 0xe5, 0xd4, 0xda, 0xd7, 0xd6, 0xd4, 0xda, 0xd5, 0xd4, 0xd2, 0xdf, 0xf2, + 0xea, 0xe7, 0xdb, 0xd8, 0xda, 0xe1, 0xe5, 0xed, 0xe6, 0xe2, 0xdf, 0xe4, + 0xdf, 0xe2, 0xdb, 0xd7, 0xec, 0xea, 0xe4, 0xe1, 0xdb, 0xd5, 0xd3, 0xde, + 0xdf, 0xd5, 0xd4, 0xd0, 0xd2, 0xd4, 0xda, 0xd8, 0xd2, 0xd3, 0xdf, 0xe1, + 0xda, 0xde, 0xdd, 0xdd, 0xd1, 0xd4, 0xda, 0xda, 0xeb, 0xee, 0xdd, 0xd4, + 0xd5, 0xda, 0xe2, 0xed, 0xec, 0xde, 0xe4, 0xd4, 0xd5, 0xdd, 0xda, 0xd2, + 0x31, 0x35, 0x37, 0x38, 0x3a, 0x3a, 0x3a, 0x3a, 0x3b, 0x3b, 0x39, 0x3b, + 0x3a, 0x38, 0x37, 0x37, 0x3b, 0x3c, 0x3a, 0x38, 0x39, 0x3e, 0x3f, 0x3b, + 0x39, 0x37, 0x3c, 0x48, 0x4b, 0x44, 0x3d, 0x3f, 0x47, 0x47, 0x4f, 0x51, + 0x51, 0x51, 0x51, 0x4e, 0x4a, 0x4c, 0x49, 0x4a, 0x55, 0x52, 0x50, 0x46, + 0x57, 0x59, 0x58, 0x5c, 0x5d, 0x5e, 0x5c, 0x5d, 0x5e, 0x5b, 0x5b, 0x5c, + 0x5a, 0x59, 0x58, 0x59, 0x5d, 0x5d, 0x5c, 0x5c, 0x5b, 0x59, 0x5a, 0x59, + 0x5d, 0x60, 0x5f, 0x5f, 0x61, 0x64, 0x63, 0x63, 0x62, 0x61, 0x63, 0x65, + 0x67, 0x69, 0x6a, 0x69, 0x6a, 0x6c, 0x6c, 0x6c, 0x6c, 0x6d, 0x6d, 0x6e, + 0x6d, 0x6e, 0x6e, 0x6d, 0x6c, 0x68, 0x61, 0x58, 0x4b, 0x3d, 0x36, 0x2e, + 0x2f, 0x33, 0x22, 0x06, 0xfd, 0xf6, 0xeb, 0xdd, 0xdb, 0xd6, 0xd6, 0xd4, + 0xd1, 0xd4, 0xda, 0xda, 0xdd, 0xd1, 0xd0, 0xd4, 0xce, 0xd9, 0xd9, 0xda, + 0xda, 0xe0, 0xd9, 0xe1, 0xde, 0xe0, 0xde, 0xe4, 0xdd, 0xe5, 0xe7, 0xe5, + 0xdc, 0xcb, 0xdc, 0xe9, 0xde, 0xe2, 0xe1, 0xda, 0xe3, 0xd5, 0xd2, 0xd5, + 0xd5, 0xd3, 0xd7, 0xd8, 0xd5, 0xdd, 0xe3, 0xe9, 0xe7, 0xed, 0xe5, 0xdf, + 0xe0, 0xe2, 0xe9, 0xf2, 0xeb, 0xe4, 0xd8, 0xe1, 0xe3, 0xec, 0xe1, 0xd6, + 0xda, 0xd6, 0xd8, 0xf2, 0xea, 0xdb, 0xd2, 0xdc, 0xe7, 0xe4, 0xd2, 0xc2, + 0xc8, 0xcb, 0xd8, 0xd5, 0xcf, 0xcc, 0xd4, 0xe4, 0xdb, 0xdd, 0xd6, 0xd8, + 0xd4, 0xd3, 0xd7, 0xd6, 0xeb, 0xef, 0xe1, 0xda, 0xd8, 0xd8, 0xde, 0xe8, + 0xea, 0xe7, 0xe1, 0xd2, 0xd1, 0xd4, 0xd5, 0xd7, 0x3a, 0x3a, 0x3d, 0x3e, + 0x3e, 0x3b, 0x39, 0x38, 0x38, 0x38, 0x38, 0x39, 0x38, 0x37, 0x3a, 0x3c, + 0x3c, 0x3a, 0x38, 0x3c, 0x41, 0x40, 0x3d, 0x37, 0x39, 0x3b, 0x45, 0x4a, + 0x45, 0x3c, 0x3b, 0x47, 0x46, 0x4a, 0x51, 0x51, 0x50, 0x4d, 0x4e, 0x4c, + 0x4d, 0x4f, 0x49, 0x4c, 0x55, 0x54, 0x4c, 0x47, 0x58, 0x5a, 0x5a, 0x5c, + 0x5c, 0x5d, 0x58, 0x5d, 0x5f, 0x5b, 0x5c, 0x5b, 0x5a, 0x5b, 0x58, 0x55, + 0x5a, 0x5c, 0x5a, 0x5c, 0x5a, 0x5b, 0x5a, 0x5b, 0x5d, 0x5f, 0x60, 0x60, + 0x61, 0x63, 0x65, 0x65, 0x63, 0x61, 0x63, 0x65, 0x67, 0x69, 0x6a, 0x6a, + 0x6b, 0x6c, 0x6d, 0x6c, 0x6c, 0x6c, 0x6d, 0x6d, 0x6d, 0x6e, 0x6e, 0x6e, + 0x6d, 0x6c, 0x68, 0x5f, 0x54, 0x44, 0x39, 0x3e, 0x48, 0x45, 0x23, 0xfb, + 0xee, 0xed, 0xef, 0xdf, 0xd4, 0xd4, 0xda, 0xdc, 0xd1, 0xd4, 0xd1, 0xd5, + 0xd9, 0xd3, 0xc8, 0xd3, 0xd2, 0xd2, 0xd4, 0xda, 0xe1, 0xe2, 0xe1, 0xd8, + 0xdb, 0xdd, 0xdc, 0xe7, 0xe5, 0xe2, 0xe7, 0xf0, 0xe7, 0xdb, 0xdb, 0xe7, + 0xde, 0xe4, 0xe2, 0xe7, 0xe5, 0xdc, 0xd4, 0xd6, 0xda, 0xd6, 0xd4, 0xd5, + 0xe0, 0xe3, 0xe3, 0xe2, 0xe1, 0xe3, 0xe7, 0xe4, 0xe7, 0xe4, 0xe7, 0xe6, + 0xe5, 0xe9, 0xd7, 0xe7, 0xe9, 0xe7, 0xe2, 0xd8, 0xdb, 0xd9, 0xda, 0xec, + 0xf6, 0xe5, 0xd8, 0xdf, 0xe7, 0xeb, 0xe8, 0xc8, 0xc2, 0xc5, 0xd6, 0xd9, + 0xd3, 0xcb, 0xd3, 0xe4, 0xdc, 0xd8, 0xd8, 0xdc, 0xd7, 0xd3, 0xd7, 0xda, + 0xed, 0xf0, 0xde, 0xda, 0xce, 0xcf, 0xda, 0xe0, 0xe0, 0xe0, 0xe1, 0xde, + 0xd2, 0xd5, 0xd5, 0xe3, 0x3a, 0x3a, 0x3b, 0x3c, 0x3a, 0x3a, 0x38, 0x3b, + 0x3a, 0x38, 0x38, 0x37, 0x37, 0x3b, 0x3c, 0x3a, 0x3a, 0x3b, 0x3d, 0x3e, + 0x3d, 0x3c, 0x3b, 0x3b, 0x3d, 0x44, 0x48, 0x46, 0x41, 0x3e, 0x44, 0x4a, + 0x49, 0x4e, 0x51, 0x4e, 0x4a, 0x4b, 0x4b, 0x4c, 0x51, 0x4c, 0x49, 0x4e, + 0x55, 0x56, 0x4a, 0x4a, 0x58, 0x5a, 0x5b, 0x57, 0x57, 0x5d, 0x56, 0x5c, + 0x61, 0x5b, 0x5b, 0x58, 0x59, 0x5b, 0x57, 0x53, 0x56, 0x57, 0x59, 0x5c, + 0x5a, 0x5c, 0x5c, 0x5b, 0x5d, 0x5f, 0x61, 0x62, 0x63, 0x63, 0x64, 0x67, + 0x64, 0x63, 0x61, 0x64, 0x67, 0x69, 0x6b, 0x6b, 0x6b, 0x6c, 0x6d, 0x6d, + 0x6d, 0x6d, 0x6e, 0x6d, 0x6d, 0x6d, 0x6c, 0x6d, 0x6e, 0x6d, 0x6a, 0x66, + 0x5f, 0x52, 0x4d, 0x53, 0x56, 0x4a, 0x0c, 0xe9, 0xe1, 0xdb, 0xe4, 0xe0, + 0xd9, 0xe0, 0xde, 0xdc, 0xd8, 0xd6, 0xd2, 0xce, 0xd3, 0xd2, 0xce, 0xd6, + 0xdd, 0xd6, 0xd6, 0xdd, 0xe1, 0xe0, 0xe1, 0xd6, 0xda, 0xda, 0xe1, 0xe6, + 0xeb, 0xe4, 0xea, 0xed, 0xe5, 0xe0, 0xdc, 0xe7, 0xe6, 0xe7, 0xeb, 0xeb, + 0xe5, 0xe0, 0xd8, 0xdc, 0xdf, 0xda, 0xd3, 0xd4, 0xde, 0xe4, 0xe1, 0xdd, + 0xdb, 0xe1, 0xe7, 0xe1, 0xdf, 0xd8, 0xe1, 0xe0, 0xd4, 0xe0, 0xdf, 0xe8, + 0xe0, 0xe1, 0xe3, 0xd4, 0xd3, 0xdd, 0xdc, 0xd8, 0xd9, 0xcc, 0xd2, 0xdd, + 0xe2, 0xe1, 0xe4, 0xdf, 0xd4, 0xd4, 0xd6, 0xda, 0xe1, 0xd4, 0xd5, 0xd9, + 0xe3, 0xe1, 0xd7, 0xde, 0xd8, 0xd6, 0xdc, 0xe2, 0xee, 0xf2, 0xe0, 0xde, + 0xd4, 0xcf, 0xd9, 0xdf, 0xde, 0xde, 0xe0, 0xdb, 0xe0, 0xda, 0xdc, 0xe1, + 0x37, 0x39, 0x39, 0x39, 0x3a, 0x3a, 0x39, 0x3a, 0x3d, 0x3c, 0x3a, 0x38, + 0x3c, 0x3e, 0x3c, 0x3c, 0x3d, 0x3d, 0x3b, 0x38, 0x37, 0x3c, 0x3d, 0x3f, + 0x41, 0x47, 0x47, 0x44, 0x41, 0x43, 0x4c, 0x4e, 0x4d, 0x4f, 0x50, 0x49, + 0x49, 0x4b, 0x48, 0x4d, 0x4e, 0x48, 0x4a, 0x4d, 0x55, 0x54, 0x4a, 0x4f, + 0x58, 0x5a, 0x5c, 0x54, 0x51, 0x57, 0x58, 0x5d, 0x5f, 0x5b, 0x5b, 0x56, + 0x58, 0x5b, 0x59, 0x55, 0x56, 0x53, 0x57, 0x59, 0x5a, 0x5d, 0x5d, 0x5a, + 0x5d, 0x60, 0x62, 0x61, 0x62, 0x63, 0x65, 0x68, 0x67, 0x65, 0x63, 0x63, + 0x66, 0x69, 0x6b, 0x6b, 0x6b, 0x6c, 0x6c, 0x6c, 0x6c, 0x6d, 0x6e, 0x6d, + 0x6e, 0x6d, 0x6e, 0x6e, 0x6d, 0x6e, 0x6c, 0x6a, 0x65, 0x5d, 0x5e, 0x60, + 0x5e, 0x47, 0x0e, 0xef, 0xdd, 0xdc, 0xe9, 0xe1, 0xd5, 0xdf, 0xe4, 0xdf, + 0xdc, 0xe2, 0xe3, 0xd3, 0xd3, 0xd4, 0xd2, 0xd3, 0xdb, 0xda, 0xdc, 0xe5, + 0xdc, 0xdf, 0xe0, 0xdf, 0xd8, 0xda, 0xe9, 0xe7, 0xe3, 0xe2, 0xe3, 0xeb, + 0xe7, 0xdd, 0xe0, 0xe0, 0xe8, 0xe4, 0xe7, 0xe5, 0xdc, 0xe3, 0xe0, 0xe2, + 0xde, 0xdc, 0xd8, 0xdc, 0xd9, 0xdb, 0xda, 0xda, 0xdf, 0xe3, 0xe4, 0xe6, + 0xe5, 0xd6, 0xd5, 0xd9, 0xd8, 0xde, 0xd8, 0xda, 0xda, 0xe2, 0xe7, 0xe1, + 0xe1, 0xe4, 0xdd, 0xda, 0xd7, 0xc0, 0xca, 0xe0, 0xe1, 0xe0, 0xda, 0xe3, + 0xe1, 0xde, 0xdc, 0xdf, 0xe0, 0xdf, 0xda, 0xd4, 0xde, 0xe2, 0xd7, 0xd7, + 0xd6, 0xd8, 0xdd, 0xe0, 0xf2, 0xf6, 0xe3, 0xe0, 0xd7, 0xd3, 0xdd, 0xdd, + 0xdd, 0xdf, 0xdc, 0xd9, 0xda, 0xd8, 0xdc, 0xde, 0x37, 0x37, 0x38, 0x3a, + 0x3b, 0x38, 0x39, 0x3b, 0x3c, 0x39, 0x39, 0x39, 0x3d, 0x40, 0x3a, 0x3c, + 0x3d, 0x37, 0x35, 0x34, 0x38, 0x3e, 0x41, 0x41, 0x43, 0x46, 0x46, 0x44, + 0x46, 0x4b, 0x4e, 0x4f, 0x4f, 0x4f, 0x4c, 0x4a, 0x4a, 0x45, 0x44, 0x4c, + 0x45, 0x45, 0x49, 0x4c, 0x56, 0x51, 0x4c, 0x50, 0x57, 0x5a, 0x5a, 0x51, + 0x50, 0x53, 0x59, 0x5d, 0x5d, 0x5b, 0x5a, 0x54, 0x52, 0x5b, 0x5c, 0x57, + 0x53, 0x55, 0x57, 0x59, 0x5b, 0x5e, 0x5e, 0x5c, 0x5d, 0x61, 0x63, 0x62, + 0x61, 0x63, 0x67, 0x68, 0x69, 0x66, 0x65, 0x64, 0x66, 0x69, 0x6b, 0x6b, + 0x6b, 0x6c, 0x6b, 0x6a, 0x6b, 0x6d, 0x6e, 0x6f, 0x6f, 0x6e, 0x6e, 0x6e, + 0x6e, 0x6e, 0x6d, 0x6b, 0x69, 0x67, 0x68, 0x67, 0x63, 0x45, 0x22, 0x0b, + 0xfd, 0xf8, 0xf0, 0xf6, 0xeb, 0xe7, 0xe6, 0xe3, 0xde, 0xe7, 0xe2, 0xd2, + 0xd7, 0xda, 0xce, 0xce, 0xd9, 0xe1, 0xe1, 0xdc, 0xe2, 0xd9, 0xdb, 0xd8, + 0xcf, 0xdd, 0xe8, 0xde, 0xd5, 0xde, 0xea, 0xe0, 0xdb, 0xda, 0xe1, 0xdc, + 0xe4, 0xdc, 0xda, 0xe2, 0xe5, 0xec, 0xe1, 0xdb, 0xd9, 0xde, 0xd9, 0xd5, + 0xd6, 0xd8, 0xd6, 0xd7, 0xdf, 0xe2, 0xe0, 0xe1, 0xe7, 0xde, 0xdd, 0xd8, + 0xd7, 0xda, 0xd8, 0xda, 0xe4, 0xea, 0xea, 0xe2, 0xe1, 0xe1, 0xe1, 0xdc, + 0xda, 0xd9, 0xdf, 0xe1, 0xdd, 0xe6, 0xda, 0xd6, 0xe2, 0xdb, 0xd7, 0xdb, + 0xe5, 0xe1, 0xdf, 0xd8, 0xda, 0xdd, 0xe1, 0xd8, 0xd8, 0xda, 0xda, 0xd9, + 0xe8, 0xfb, 0xe6, 0xe0, 0xda, 0xd3, 0xde, 0xe1, 0xde, 0xe4, 0xe3, 0xdb, + 0xd0, 0xd7, 0xda, 0xe1, 0x37, 0x37, 0x37, 0x37, 0x37, 0x38, 0x39, 0x3a, + 0x3a, 0x3b, 0x3c, 0x3e, 0x41, 0x40, 0x3c, 0x3b, 0x39, 0x37, 0x36, 0x3c, + 0x3e, 0x40, 0x42, 0x3f, 0x40, 0x46, 0x48, 0x47, 0x4b, 0x4d, 0x4c, 0x4e, + 0x4d, 0x4a, 0x49, 0x48, 0x45, 0x3e, 0x46, 0x4a, 0x40, 0x43, 0x49, 0x4f, + 0x56, 0x51, 0x4d, 0x52, 0x58, 0x5b, 0x57, 0x4e, 0x4d, 0x50, 0x58, 0x5a, + 0x5d, 0x5a, 0x57, 0x54, 0x52, 0x5a, 0x5e, 0x5c, 0x54, 0x54, 0x58, 0x59, + 0x5b, 0x5e, 0x5f, 0x5d, 0x5d, 0x61, 0x62, 0x63, 0x62, 0x63, 0x66, 0x69, + 0x69, 0x67, 0x66, 0x65, 0x68, 0x67, 0x6a, 0x6c, 0x6c, 0x6c, 0x6b, 0x6b, + 0x6c, 0x6d, 0x6c, 0x6e, 0x6e, 0x6e, 0x6e, 0x6e, 0x6d, 0x6d, 0x6c, 0x6b, + 0x6a, 0x6a, 0x6a, 0x69, 0x5f, 0x48, 0x2a, 0x0b, 0xf9, 0xeb, 0xed, 0x06, + 0x01, 0xf6, 0xe5, 0xe2, 0xe1, 0xef, 0xed, 0xdc, 0xde, 0xdf, 0xda, 0xdb, + 0xde, 0xe0, 0xe4, 0xda, 0xe2, 0xe0, 0xde, 0xd3, 0xd6, 0xe2, 0xe1, 0xd7, + 0xd5, 0xe0, 0xdf, 0xda, 0xd4, 0xd0, 0xd4, 0xd1, 0xd5, 0xd2, 0xd4, 0xdb, + 0xdc, 0xe3, 0xe0, 0xd5, 0xdc, 0xdf, 0xe4, 0xd9, 0xd7, 0xd5, 0xd9, 0xd4, + 0xda, 0xdc, 0xda, 0xdd, 0xe7, 0xea, 0xe7, 0xea, 0xe7, 0xe1, 0xe0, 0xd7, + 0xd7, 0xec, 0xe5, 0xd5, 0xde, 0xda, 0xdf, 0xd7, 0xd4, 0xe1, 0xe6, 0xe6, + 0xe1, 0xdf, 0xdb, 0xd2, 0xdb, 0xd8, 0xd5, 0xd4, 0xe9, 0xe5, 0xe1, 0xe5, + 0xdf, 0xd8, 0xd6, 0xd5, 0xd7, 0xda, 0xd7, 0xd5, 0xd9, 0xe2, 0xda, 0xd9, + 0xe2, 0xdf, 0xd4, 0xe1, 0xe3, 0xe5, 0xe2, 0xdd, 0xd6, 0xde, 0xe5, 0xe1, + 0x35, 0x35, 0x37, 0x36, 0x37, 0x39, 0x37, 0x38, 0x39, 0x3a, 0x3e, 0x40, + 0x40, 0x3e, 0x3c, 0x3d, 0x38, 0x3a, 0x3e, 0x42, 0x43, 0x43, 0x3f, 0x3c, + 0x43, 0x49, 0x47, 0x48, 0x4a, 0x4b, 0x4c, 0x4d, 0x4a, 0x46, 0x46, 0x42, + 0x3e, 0x41, 0x4b, 0x47, 0x3d, 0x3a, 0x48, 0x52, 0x55, 0x52, 0x50, 0x57, + 0x58, 0x58, 0x52, 0x50, 0x4e, 0x52, 0x58, 0x5a, 0x5a, 0x59, 0x56, 0x52, + 0x54, 0x5a, 0x5f, 0x5d, 0x5b, 0x56, 0x58, 0x5a, 0x5d, 0x5f, 0x61, 0x5f, + 0x5d, 0x62, 0x62, 0x63, 0x63, 0x63, 0x67, 0x68, 0x68, 0x69, 0x68, 0x65, + 0x66, 0x67, 0x69, 0x6b, 0x6b, 0x6b, 0x6b, 0x6c, 0x6d, 0x6c, 0x6c, 0x6d, + 0x6e, 0x6e, 0x6e, 0x6e, 0x6d, 0x6c, 0x6c, 0x6c, 0x6b, 0x6b, 0x6b, 0x6b, + 0x64, 0x58, 0x44, 0x30, 0x15, 0xfa, 0xff, 0x15, 0x08, 0xf9, 0xe7, 0xea, + 0xe7, 0xea, 0xec, 0xe1, 0xdc, 0xde, 0xe9, 0xe2, 0xe2, 0xe2, 0xe2, 0xde, + 0xda, 0xe4, 0xe1, 0xd5, 0xda, 0xdd, 0xd1, 0xc8, 0xcb, 0xda, 0xd8, 0xd5, + 0xd4, 0xcc, 0xca, 0xc6, 0xc9, 0xc9, 0xcc, 0xcf, 0xd6, 0xde, 0xde, 0xdc, + 0xdd, 0xd1, 0xe0, 0xdc, 0xd8, 0xe1, 0xdd, 0xd9, 0xda, 0xd5, 0xda, 0xe1, + 0xe7, 0xea, 0xe3, 0xe9, 0xee, 0xe8, 0xe1, 0xdd, 0xde, 0xe0, 0xe0, 0xdc, + 0xe5, 0xd5, 0xd5, 0xd9, 0xd9, 0xe1, 0xe5, 0xe6, 0xe4, 0xdb, 0xde, 0xe0, + 0xdb, 0xd1, 0xd4, 0xd2, 0xda, 0xdc, 0xd8, 0xe7, 0xdb, 0xd7, 0xd4, 0xd3, + 0xd9, 0xdc, 0xdb, 0xd8, 0xd8, 0xd4, 0xd5, 0xdf, 0xe4, 0xe7, 0xdb, 0xdb, + 0xeb, 0xe6, 0xd9, 0xdc, 0xdb, 0xdf, 0xe1, 0xe2, 0x34, 0x33, 0x33, 0x34, + 0x35, 0x35, 0x36, 0x38, 0x3a, 0x3e, 0x3e, 0x3f, 0x3e, 0x3e, 0x3a, 0x39, + 0x39, 0x40, 0x44, 0x44, 0x45, 0x42, 0x3d, 0x3e, 0x48, 0x48, 0x46, 0x49, + 0x4a, 0x4a, 0x48, 0x44, 0x44, 0x40, 0x41, 0x42, 0x42, 0x48, 0x4a, 0x43, + 0x3d, 0x3c, 0x4a, 0x54, 0x55, 0x54, 0x56, 0x57, 0x58, 0x55, 0x51, 0x54, + 0x50, 0x51, 0x58, 0x5b, 0x57, 0x58, 0x57, 0x54, 0x57, 0x58, 0x60, 0x5f, + 0x5e, 0x5b, 0x5a, 0x5d, 0x5f, 0x62, 0x62, 0x60, 0x5d, 0x62, 0x60, 0x61, + 0x63, 0x64, 0x66, 0x69, 0x68, 0x69, 0x67, 0x65, 0x67, 0x68, 0x68, 0x6a, + 0x6a, 0x69, 0x6a, 0x6c, 0x6c, 0x6c, 0x6c, 0x6e, 0x6d, 0x6d, 0x6d, 0x6d, + 0x6c, 0x6c, 0x6b, 0x6c, 0x6c, 0x6d, 0x6d, 0x6c, 0x68, 0x62, 0x5a, 0x53, + 0x4a, 0x35, 0x25, 0x24, 0x08, 0xf3, 0xed, 0xf0, 0xe7, 0xe3, 0xe4, 0xe0, + 0xda, 0xdb, 0xda, 0xdd, 0xe1, 0xe5, 0xe7, 0xec, 0xe1, 0xec, 0xe8, 0xe1, + 0xe0, 0xe7, 0xd6, 0xd2, 0xd6, 0xda, 0xd7, 0xd6, 0xda, 0xd9, 0xd4, 0xd4, + 0xc9, 0xbd, 0xc5, 0xca, 0xd1, 0xd7, 0xcb, 0xd5, 0xda, 0xd2, 0xde, 0xdf, + 0xda, 0xda, 0xe1, 0xde, 0xdc, 0xda, 0xdb, 0xe2, 0xdd, 0xe7, 0xe5, 0xe7, + 0xe6, 0xe6, 0xe2, 0xe0, 0xe1, 0xda, 0xe1, 0xe2, 0xe2, 0xd8, 0xd7, 0xd4, + 0xd7, 0xdb, 0xde, 0xdd, 0xe2, 0xdc, 0xd7, 0xe1, 0xd9, 0xcf, 0xdd, 0xd8, + 0xda, 0xd8, 0xce, 0xdb, 0xd7, 0xd7, 0xd0, 0xd3, 0xda, 0xe1, 0xd9, 0xd7, + 0xd4, 0xd8, 0xdb, 0xe3, 0xe6, 0xe1, 0xda, 0xd5, 0xe1, 0xe7, 0xe2, 0xe1, + 0xdb, 0xda, 0xdd, 0xe2, 0x32, 0x33, 0x33, 0x34, 0x36, 0x38, 0x3c, 0x3e, + 0x3e, 0x3f, 0x3f, 0x3e, 0x3c, 0x3a, 0x37, 0x3a, 0x41, 0x45, 0x46, 0x46, + 0x43, 0x3d, 0x3d, 0x44, 0x46, 0x46, 0x49, 0x4a, 0x4a, 0x46, 0x44, 0x42, + 0x43, 0x42, 0x44, 0x46, 0x46, 0x4a, 0x45, 0x40, 0x3e, 0x41, 0x4d, 0x51, + 0x54, 0x57, 0x57, 0x57, 0x57, 0x51, 0x54, 0x56, 0x51, 0x54, 0x5b, 0x5c, + 0x58, 0x57, 0x57, 0x57, 0x59, 0x5a, 0x5f, 0x5f, 0x60, 0x5d, 0x5d, 0x5f, + 0x5f, 0x63, 0x63, 0x62, 0x5e, 0x61, 0x5d, 0x5f, 0x64, 0x64, 0x65, 0x69, + 0x69, 0x69, 0x67, 0x65, 0x67, 0x69, 0x69, 0x6a, 0x6a, 0x6a, 0x6b, 0x6b, + 0x6d, 0x6d, 0x6c, 0x6c, 0x6c, 0x6c, 0x6c, 0x6c, 0x6c, 0x6c, 0x6c, 0x6c, + 0x6c, 0x6d, 0x6e, 0x6d, 0x69, 0x64, 0x63, 0x61, 0x5e, 0x59, 0x54, 0x38, + 0x11, 0xf6, 0xe3, 0xe6, 0xe1, 0xdd, 0xe0, 0xe0, 0xe0, 0xde, 0xd3, 0xda, + 0xe0, 0xe0, 0xe4, 0xe7, 0xe7, 0xeb, 0xe6, 0xe3, 0xe1, 0xec, 0xde, 0xda, + 0xdf, 0xda, 0xd4, 0xd4, 0xd8, 0xe3, 0xdc, 0xd3, 0xd4, 0xc5, 0xc6, 0xce, + 0xd1, 0xd8, 0xcd, 0xca, 0xdd, 0xe9, 0xe0, 0xde, 0xda, 0xd7, 0xdc, 0xdd, + 0xe0, 0xdf, 0xe2, 0xe2, 0xd6, 0xdd, 0xe3, 0xe0, 0xdb, 0xd8, 0xdc, 0xdb, + 0xd9, 0xd4, 0xe1, 0xdd, 0xe4, 0xdf, 0xdf, 0xda, 0xd3, 0xd5, 0xd5, 0xd4, + 0xd8, 0xd9, 0xda, 0xe0, 0xd7, 0xd6, 0xe3, 0xe0, 0xe0, 0xd8, 0xd8, 0xd6, + 0xd5, 0xd9, 0xd1, 0xd5, 0xdd, 0xe5, 0xd5, 0xd5, 0xd8, 0xda, 0xe3, 0xe4, + 0xe8, 0xdf, 0xd4, 0xd6, 0xe1, 0xe3, 0xdf, 0xd5, 0xd5, 0xd6, 0xe1, 0xdd, + 0x31, 0x31, 0x36, 0x39, 0x3b, 0x3d, 0x3e, 0x3d, 0x3e, 0x3e, 0x3d, 0x39, + 0x39, 0x3a, 0x3d, 0x42, 0x45, 0x47, 0x47, 0x45, 0x41, 0x3e, 0x41, 0x46, + 0x46, 0x47, 0x49, 0x49, 0x45, 0x43, 0x42, 0x40, 0x3f, 0x41, 0x43, 0x44, + 0x44, 0x46, 0x3f, 0x40, 0x41, 0x44, 0x49, 0x50, 0x57, 0x57, 0x55, 0x56, + 0x55, 0x52, 0x57, 0x57, 0x55, 0x55, 0x5b, 0x5c, 0x5a, 0x57, 0x58, 0x58, + 0x5b, 0x5d, 0x5e, 0x5e, 0x61, 0x5e, 0x5e, 0x60, 0x60, 0x62, 0x64, 0x62, + 0x60, 0x5f, 0x5c, 0x5d, 0x63, 0x64, 0x65, 0x68, 0x69, 0x68, 0x66, 0x65, + 0x68, 0x69, 0x69, 0x6b, 0x6c, 0x6c, 0x6b, 0x6b, 0x6c, 0x6c, 0x6c, 0x6c, + 0x6c, 0x6c, 0x6c, 0x6c, 0x6c, 0x6d, 0x6c, 0x6c, 0x6d, 0x6e, 0x6d, 0x6d, + 0x6c, 0x6b, 0x6a, 0x69, 0x66, 0x65, 0x5f, 0x4f, 0x30, 0x06, 0xe1, 0xdd, + 0xdf, 0xda, 0xdf, 0xe2, 0xdf, 0xda, 0xd2, 0xdc, 0xe1, 0xde, 0xe2, 0xe1, + 0xe1, 0xe1, 0xdf, 0xe1, 0xe1, 0xed, 0xe3, 0xd9, 0xda, 0xdb, 0xd8, 0xd7, + 0xd9, 0xe7, 0xd2, 0xca, 0xd4, 0xce, 0xd2, 0xd4, 0xd5, 0xd3, 0xc9, 0xc8, + 0xde, 0xe5, 0xe1, 0xd2, 0xd2, 0xdd, 0xe8, 0xe4, 0xda, 0xdf, 0xe9, 0xe0, + 0xe1, 0xe3, 0xe1, 0xdc, 0xda, 0xda, 0xe8, 0xde, 0xd4, 0xd4, 0xe8, 0xda, + 0xda, 0xe0, 0xe3, 0xe1, 0xd5, 0xd4, 0xd4, 0xcf, 0xcc, 0xd1, 0xda, 0xe1, + 0xe1, 0xe3, 0xe2, 0xda, 0xd3, 0xd6, 0xda, 0xd5, 0xd6, 0xdb, 0xd5, 0xd5, + 0xda, 0xe3, 0xdf, 0xe8, 0xe3, 0xe0, 0xde, 0xe4, 0xe8, 0xe1, 0xdd, 0xe0, + 0xe4, 0xe3, 0xe1, 0xda, 0xd4, 0xd5, 0xe1, 0xdc, 0x34, 0x37, 0x38, 0x38, + 0x38, 0x39, 0x37, 0x35, 0x34, 0x36, 0x37, 0x37, 0x39, 0x3e, 0x44, 0x46, + 0x46, 0x46, 0x44, 0x41, 0x3e, 0x3f, 0x41, 0x43, 0x45, 0x45, 0x45, 0x45, + 0x41, 0x3f, 0x3e, 0x3e, 0x41, 0x44, 0x43, 0x43, 0x47, 0x44, 0x42, 0x40, + 0x43, 0x49, 0x46, 0x53, 0x58, 0x55, 0x56, 0x54, 0x53, 0x54, 0x57, 0x56, + 0x58, 0x58, 0x59, 0x5c, 0x5a, 0x5a, 0x5a, 0x57, 0x5a, 0x5d, 0x5d, 0x5e, + 0x61, 0x5f, 0x60, 0x62, 0x63, 0x62, 0x63, 0x60, 0x5e, 0x5c, 0x5c, 0x5d, + 0x63, 0x64, 0x66, 0x67, 0x69, 0x67, 0x66, 0x64, 0x68, 0x67, 0x67, 0x6a, + 0x6c, 0x6b, 0x6c, 0x6c, 0x6c, 0x6c, 0x6b, 0x6b, 0x6a, 0x6b, 0x6d, 0x6d, + 0x6c, 0x6c, 0x6c, 0x6c, 0x6d, 0x6e, 0x6c, 0x6e, 0x6d, 0x6c, 0x6b, 0x6b, + 0x6a, 0x69, 0x64, 0x5c, 0x4a, 0x24, 0x01, 0xf0, 0xe5, 0xe1, 0xe5, 0xe5, + 0xe1, 0xdd, 0xd9, 0xde, 0xe3, 0xe4, 0xe2, 0xdc, 0xd9, 0xd5, 0xda, 0xe1, + 0xeb, 0xf2, 0xe1, 0xdd, 0xd9, 0xdd, 0xdd, 0xda, 0xd1, 0xe0, 0xd5, 0xd4, + 0xd7, 0xd2, 0xd6, 0xdb, 0xda, 0xcd, 0xbe, 0xc3, 0xd9, 0xd5, 0xda, 0xdc, + 0xe6, 0xe3, 0xe6, 0xe4, 0xd6, 0xdc, 0xe7, 0xe5, 0xe1, 0xdc, 0xde, 0xda, + 0xe1, 0xda, 0xdc, 0xdb, 0xd7, 0xd0, 0xcd, 0xc9, 0xd9, 0xe1, 0xd8, 0xda, + 0xda, 0xd0, 0xd6, 0xd0, 0xcf, 0xd9, 0xde, 0xdf, 0xdf, 0xe0, 0xda, 0xd1, + 0xcc, 0xd4, 0xd8, 0xd6, 0xd7, 0xdd, 0xd4, 0xd6, 0xda, 0xe0, 0xe0, 0xde, + 0xeb, 0xee, 0xe0, 0xeb, 0xe2, 0xe3, 0xe5, 0xe3, 0xe3, 0xe1, 0xe5, 0xdf, + 0xd7, 0xde, 0xdd, 0xd3, 0x34, 0x33, 0x33, 0x34, 0x31, 0x31, 0x2f, 0x2f, + 0x31, 0x36, 0x35, 0x38, 0x3e, 0x43, 0x44, 0x45, 0x44, 0x43, 0x3c, 0x3c, + 0x3e, 0x3e, 0x41, 0x44, 0x45, 0x46, 0x43, 0x42, 0x42, 0x40, 0x40, 0x44, + 0x44, 0x40, 0x42, 0x44, 0x48, 0x47, 0x44, 0x3f, 0x45, 0x49, 0x4b, 0x57, + 0x54, 0x54, 0x54, 0x54, 0x52, 0x54, 0x58, 0x57, 0x59, 0x5a, 0x58, 0x5b, + 0x5c, 0x5d, 0x5c, 0x57, 0x5a, 0x5d, 0x5d, 0x5e, 0x5f, 0x60, 0x63, 0x62, + 0x62, 0x61, 0x63, 0x5f, 0x5c, 0x5b, 0x5c, 0x5f, 0x63, 0x64, 0x66, 0x67, + 0x67, 0x67, 0x67, 0x67, 0x68, 0x68, 0x68, 0x69, 0x6a, 0x69, 0x69, 0x6a, + 0x6a, 0x6a, 0x69, 0x6a, 0x6b, 0x6b, 0x6c, 0x6c, 0x6b, 0x6c, 0x6d, 0x6e, + 0x6d, 0x6d, 0x6d, 0x6e, 0x6d, 0x6b, 0x6b, 0x6b, 0x6a, 0x69, 0x67, 0x63, + 0x59, 0x46, 0x1c, 0xf4, 0xe7, 0xe3, 0xdd, 0xe0, 0xdf, 0xe0, 0xef, 0xdd, + 0xe4, 0xe3, 0xe7, 0xe7, 0xd6, 0xd2, 0xd2, 0xd9, 0xec, 0xfd, 0xe3, 0xda, + 0xd7, 0xdc, 0xd7, 0xd0, 0xd7, 0xe3, 0xda, 0xdd, 0xde, 0xd5, 0xde, 0xe3, + 0xd8, 0xc1, 0xb5, 0xbc, 0xd7, 0xd6, 0xdf, 0xe9, 0xed, 0xe7, 0xe5, 0xe2, + 0xeb, 0xea, 0xe7, 0xe8, 0xe3, 0xd4, 0xda, 0xda, 0xd7, 0xd1, 0xce, 0xda, + 0xda, 0xce, 0xc6, 0xc8, 0xd0, 0xda, 0xda, 0xda, 0xe1, 0xd3, 0xda, 0xdc, + 0xd8, 0xdd, 0xdd, 0xd8, 0xda, 0xe1, 0xda, 0xd5, 0xd0, 0xd5, 0xd9, 0xd9, + 0xd6, 0xdc, 0xd3, 0xde, 0xe1, 0xde, 0xda, 0xda, 0xe8, 0xe7, 0xe3, 0xeb, + 0xd4, 0xe1, 0xeb, 0xe7, 0xe4, 0xe1, 0xed, 0xeb, 0xdf, 0xe0, 0xde, 0xd8, + 0x32, 0x30, 0x2b, 0x2d, 0x2d, 0x2d, 0x31, 0x33, 0x35, 0x36, 0x36, 0x3e, + 0x40, 0x40, 0x3d, 0x3e, 0x3d, 0x3b, 0x3b, 0x3f, 0x3e, 0x42, 0x42, 0x45, + 0x43, 0x45, 0x44, 0x44, 0x43, 0x43, 0x44, 0x46, 0x42, 0x3e, 0x44, 0x46, + 0x49, 0x4a, 0x46, 0x44, 0x48, 0x4b, 0x50, 0x57, 0x52, 0x50, 0x4f, 0x57, + 0x54, 0x55, 0x59, 0x57, 0x57, 0x59, 0x5a, 0x5b, 0x5c, 0x5e, 0x5d, 0x5a, + 0x5c, 0x5c, 0x5a, 0x60, 0x60, 0x61, 0x63, 0x61, 0x62, 0x61, 0x63, 0x5e, + 0x5b, 0x58, 0x5d, 0x62, 0x63, 0x64, 0x64, 0x65, 0x69, 0x69, 0x68, 0x68, + 0x67, 0x69, 0x68, 0x69, 0x69, 0x69, 0x69, 0x69, 0x68, 0x69, 0x69, 0x6a, + 0x6b, 0x6b, 0x6b, 0x6c, 0x6b, 0x6d, 0x6d, 0x6d, 0x6e, 0x6d, 0x6c, 0x6d, + 0x6c, 0x6b, 0x6c, 0x6b, 0x69, 0x68, 0x66, 0x64, 0x5d, 0x54, 0x43, 0x0f, + 0xef, 0xde, 0xd2, 0xd7, 0xe1, 0xe1, 0xeb, 0xe0, 0xe7, 0xe0, 0xe2, 0xea, + 0xe8, 0xe1, 0xdd, 0xdd, 0xe1, 0xe7, 0xe4, 0xe1, 0xdf, 0xe1, 0xd9, 0xd4, + 0xed, 0xe7, 0xdb, 0xd9, 0xe0, 0xde, 0xd7, 0xdd, 0xdc, 0xc9, 0xc4, 0xc8, + 0xd9, 0xd9, 0xe4, 0xf6, 0xed, 0xe6, 0xe5, 0xdf, 0xdf, 0xdd, 0xda, 0xe2, + 0xe0, 0xda, 0xdc, 0xd8, 0xd6, 0xd8, 0xe3, 0xe0, 0xd7, 0xd6, 0xd0, 0xcc, + 0xcc, 0xce, 0xd6, 0xdc, 0xe5, 0xdc, 0xd0, 0xd4, 0xda, 0xd9, 0xde, 0xdd, + 0xe3, 0xe2, 0xda, 0xd4, 0xd3, 0xda, 0xdb, 0xdc, 0xd3, 0xd3, 0xd4, 0xdc, + 0xe3, 0xdb, 0xd7, 0xe0, 0xec, 0xe1, 0xe7, 0xf0, 0xdc, 0xe5, 0xe2, 0xe4, + 0xe2, 0xe5, 0xe5, 0xdf, 0xde, 0xde, 0xe3, 0xe1, 0x2d, 0x2b, 0x29, 0x2b, + 0x2e, 0x2e, 0x30, 0x33, 0x33, 0x31, 0x33, 0x3b, 0x39, 0x37, 0x36, 0x38, + 0x39, 0x3d, 0x41, 0x3f, 0x40, 0x44, 0x45, 0x45, 0x44, 0x44, 0x45, 0x44, + 0x44, 0x45, 0x46, 0x43, 0x3e, 0x43, 0x49, 0x4c, 0x4a, 0x4b, 0x4a, 0x48, + 0x4c, 0x50, 0x51, 0x52, 0x4f, 0x4f, 0x4f, 0x58, 0x53, 0x57, 0x59, 0x57, + 0x57, 0x59, 0x5b, 0x5c, 0x5d, 0x5e, 0x5e, 0x5d, 0x5c, 0x5b, 0x5b, 0x60, + 0x60, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x5d, 0x5a, 0x5a, 0x5e, 0x62, + 0x63, 0x63, 0x63, 0x65, 0x68, 0x69, 0x68, 0x68, 0x67, 0x68, 0x68, 0x6a, + 0x69, 0x69, 0x69, 0x69, 0x68, 0x69, 0x69, 0x69, 0x6a, 0x6b, 0x6b, 0x6b, + 0x6b, 0x6c, 0x6c, 0x6d, 0x6e, 0x6d, 0x6c, 0x6c, 0x6c, 0x6c, 0x6b, 0x6b, + 0x6b, 0x69, 0x67, 0x65, 0x63, 0x5d, 0x4d, 0x31, 0xfe, 0xdf, 0xd4, 0xda, + 0xea, 0xe3, 0xe9, 0xeb, 0xe5, 0xe2, 0xdb, 0xda, 0xde, 0xda, 0xd8, 0xdb, + 0xda, 0xe7, 0xec, 0xeb, 0xdd, 0xde, 0xde, 0xdf, 0xe6, 0xe1, 0xda, 0xd8, + 0xd4, 0xe0, 0xdb, 0xe3, 0xec, 0xd8, 0xd4, 0xd5, 0xda, 0xd4, 0xd9, 0xe9, + 0xe7, 0xe5, 0xe5, 0xde, 0xd5, 0xcd, 0xd0, 0xe4, 0xe0, 0xd4, 0xd1, 0xd5, + 0xda, 0xdd, 0xdf, 0xe1, 0xd7, 0xdd, 0xcd, 0xce, 0xce, 0xca, 0xcb, 0xd6, + 0xde, 0xe1, 0xdd, 0xd9, 0xd7, 0xdb, 0xe5, 0xe2, 0xe6, 0xe6, 0xd8, 0xd4, + 0xd4, 0xda, 0xd9, 0xd9, 0xd4, 0xce, 0xd1, 0xde, 0xda, 0xd9, 0xd6, 0xdb, + 0xe0, 0xdd, 0xe6, 0xec, 0xe2, 0xea, 0xe2, 0xdd, 0xde, 0xe0, 0xda, 0xd6, + 0xd9, 0xe3, 0xe1, 0xdf, 0x26, 0x27, 0x2b, 0x31, 0x34, 0x34, 0x33, 0x32, + 0x34, 0x36, 0x3b, 0x39, 0x33, 0x31, 0x37, 0x39, 0x3c, 0x40, 0x41, 0x40, + 0x43, 0x44, 0x42, 0x44, 0x44, 0x44, 0x44, 0x44, 0x45, 0x46, 0x44, 0x43, + 0x44, 0x4a, 0x4e, 0x4c, 0x4a, 0x4f, 0x4c, 0x48, 0x4e, 0x50, 0x50, 0x4d, + 0x4b, 0x50, 0x53, 0x59, 0x53, 0x57, 0x58, 0x56, 0x57, 0x5a, 0x5d, 0x5d, + 0x5f, 0x5f, 0x5f, 0x5e, 0x5c, 0x59, 0x5d, 0x60, 0x5f, 0x62, 0x63, 0x62, + 0x62, 0x63, 0x5e, 0x5c, 0x59, 0x57, 0x5e, 0x63, 0x60, 0x61, 0x63, 0x64, + 0x67, 0x69, 0x67, 0x68, 0x68, 0x67, 0x69, 0x6a, 0x6a, 0x69, 0x69, 0x69, + 0x68, 0x68, 0x69, 0x69, 0x69, 0x6a, 0x6a, 0x6b, 0x6b, 0x6c, 0x6c, 0x6c, + 0x6c, 0x6c, 0x6c, 0x6c, 0x6d, 0x6c, 0x6c, 0x6c, 0x6c, 0x6b, 0x6c, 0x6a, + 0x68, 0x64, 0x60, 0x52, 0x33, 0xf6, 0xe1, 0xe1, 0xe6, 0xe1, 0xe2, 0xea, + 0xe4, 0xe1, 0xd6, 0xd3, 0xd9, 0xd8, 0xd4, 0xda, 0xdf, 0xe9, 0xed, 0xe3, + 0xd4, 0xd0, 0xd3, 0xdd, 0xdf, 0xda, 0xd9, 0xdb, 0xda, 0xe0, 0xe1, 0xe3, + 0xe3, 0xe5, 0xd5, 0xdb, 0xda, 0xce, 0xd6, 0xe1, 0xe4, 0xe3, 0xe2, 0xda, + 0xce, 0xce, 0xd3, 0xe7, 0xdc, 0xd3, 0xd1, 0xd4, 0xdd, 0xe3, 0xe1, 0xe3, + 0xd4, 0xdf, 0xd4, 0xc9, 0xce, 0xd0, 0xd4, 0xdb, 0xd8, 0xdb, 0xe1, 0xe1, + 0xde, 0xe0, 0xe5, 0xe4, 0xe0, 0xe0, 0xde, 0xdb, 0xd9, 0xda, 0xe1, 0xe0, + 0xd4, 0xc9, 0xd8, 0xe2, 0xd6, 0xd7, 0xdb, 0xde, 0xdd, 0xe1, 0xe7, 0xe8, + 0xe0, 0xe1, 0xe2, 0xe2, 0xe1, 0xdb, 0xd7, 0xd9, 0xdb, 0xe0, 0xe3, 0xe6, + 0x27, 0x2e, 0x32, 0x35, 0x37, 0x37, 0x36, 0x37, 0x39, 0x3b, 0x38, 0x35, + 0x34, 0x33, 0x35, 0x3b, 0x3f, 0x42, 0x41, 0x43, 0x45, 0x44, 0x43, 0x44, + 0x44, 0x44, 0x46, 0x46, 0x46, 0x46, 0x43, 0x44, 0x4a, 0x4d, 0x4c, 0x4d, + 0x51, 0x52, 0x4a, 0x48, 0x4f, 0x50, 0x50, 0x4d, 0x4c, 0x50, 0x57, 0x57, + 0x54, 0x57, 0x57, 0x56, 0x57, 0x5c, 0x5f, 0x5d, 0x5f, 0x5f, 0x60, 0x5d, + 0x5d, 0x5b, 0x5d, 0x5f, 0x5e, 0x63, 0x63, 0x62, 0x63, 0x62, 0x5d, 0x5c, + 0x5a, 0x59, 0x5f, 0x60, 0x5d, 0x62, 0x63, 0x63, 0x66, 0x69, 0x67, 0x68, + 0x68, 0x66, 0x69, 0x6a, 0x6a, 0x6a, 0x69, 0x68, 0x68, 0x69, 0x69, 0x69, + 0x6a, 0x6a, 0x6a, 0x6b, 0x6c, 0x6c, 0x6c, 0x6c, 0x6c, 0x6c, 0x6c, 0x6d, + 0x6d, 0x6d, 0x6c, 0x6c, 0x6c, 0x6c, 0x6c, 0x6b, 0x6a, 0x69, 0x68, 0x63, + 0x57, 0x2b, 0xf9, 0xea, 0xe3, 0xd8, 0xdd, 0xe8, 0xe8, 0xe4, 0xd6, 0xd4, + 0xda, 0xe0, 0xd6, 0xdb, 0xe7, 0xee, 0xf1, 0xde, 0xc8, 0xce, 0xd9, 0xdf, + 0xdb, 0xdf, 0xdc, 0xe0, 0xdf, 0xd6, 0xd8, 0xde, 0xe2, 0xe4, 0xe1, 0xe1, + 0xdf, 0xd2, 0xd4, 0xd9, 0xe4, 0xe3, 0xdf, 0xd6, 0xcd, 0xce, 0xde, 0xe9, + 0xe0, 0xd6, 0xd2, 0xd1, 0xdc, 0xe1, 0xe1, 0xe1, 0xda, 0xda, 0xdd, 0xd5, + 0xd4, 0xd1, 0xd2, 0xda, 0xd7, 0xda, 0xda, 0xd4, 0xdf, 0xdf, 0xe2, 0xdf, + 0xe5, 0xe4, 0xe7, 0xe8, 0xe2, 0xd9, 0xe2, 0xe5, 0xe1, 0xe0, 0xe4, 0xdf, + 0xdc, 0xd5, 0xd7, 0xdc, 0xda, 0xe1, 0xed, 0xde, 0xce, 0xd8, 0xe1, 0xe6, + 0xe4, 0xdd, 0xd4, 0xda, 0xdd, 0xd9, 0xdd, 0xdd, 0x2f, 0x33, 0x37, 0x38, + 0x38, 0x39, 0x39, 0x3a, 0x3b, 0x38, 0x35, 0x35, 0x33, 0x33, 0x37, 0x3c, + 0x40, 0x42, 0x43, 0x46, 0x46, 0x44, 0x44, 0x40, 0x41, 0x44, 0x46, 0x45, + 0x44, 0x43, 0x45, 0x47, 0x4b, 0x4a, 0x4b, 0x50, 0x56, 0x50, 0x4a, 0x4b, + 0x4e, 0x50, 0x50, 0x4f, 0x4d, 0x55, 0x59, 0x57, 0x57, 0x58, 0x58, 0x57, + 0x58, 0x5d, 0x5e, 0x5d, 0x5d, 0x5e, 0x5e, 0x5d, 0x5d, 0x5b, 0x60, 0x5f, + 0x5f, 0x63, 0x63, 0x63, 0x63, 0x60, 0x5d, 0x5b, 0x5a, 0x5a, 0x5f, 0x5d, + 0x5c, 0x5f, 0x62, 0x63, 0x63, 0x66, 0x68, 0x67, 0x68, 0x67, 0x69, 0x69, + 0x69, 0x6b, 0x69, 0x67, 0x69, 0x6a, 0x6c, 0x6b, 0x6a, 0x69, 0x69, 0x6a, + 0x6c, 0x6c, 0x6c, 0x6c, 0x6c, 0x6c, 0x6c, 0x6d, 0x6d, 0x6d, 0x6d, 0x6d, + 0x6d, 0x6d, 0x6e, 0x6d, 0x6c, 0x6b, 0x69, 0x6a, 0x68, 0x57, 0x2b, 0x02, + 0xe7, 0xde, 0xd5, 0xe0, 0xe3, 0xdc, 0xd4, 0xd4, 0xd7, 0xd2, 0xd2, 0xd6, + 0xe2, 0xeb, 0xf1, 0xe5, 0xd2, 0xd2, 0xe7, 0xe5, 0xe1, 0xe1, 0xda, 0xd8, + 0xe1, 0xda, 0xd7, 0xd4, 0xde, 0xe1, 0xec, 0xe7, 0xe1, 0xdd, 0xd3, 0xd3, + 0xda, 0xe2, 0xd9, 0xd8, 0xd2, 0xd4, 0xe1, 0xdf, 0xd4, 0xdc, 0xdb, 0xd5, + 0xd6, 0xd6, 0xde, 0xdc, 0xdb, 0xdc, 0xea, 0xda, 0xde, 0xd7, 0xd2, 0xd8, + 0xd3, 0xda, 0xdc, 0xdc, 0xe3, 0xe7, 0xe2, 0xe1, 0xe4, 0xe1, 0xe3, 0xdf, + 0xda, 0xd8, 0xe2, 0xe3, 0xf3, 0xed, 0xde, 0xde, 0xe1, 0xd8, 0xd0, 0xd4, + 0xd4, 0xe6, 0xe9, 0xd5, 0xcf, 0xd9, 0xe1, 0xe1, 0xe3, 0xe2, 0xd7, 0xda, + 0xe7, 0xdf, 0xe0, 0xda, 0x37, 0x3b, 0x39, 0x38, 0x3a, 0x3c, 0x3b, 0x3a, + 0x39, 0x38, 0x37, 0x35, 0x34, 0x37, 0x3c, 0x3e, 0x41, 0x43, 0x44, 0x47, + 0x44, 0x43, 0x41, 0x3e, 0x41, 0x44, 0x47, 0x44, 0x45, 0x43, 0x42, 0x48, + 0x4b, 0x4c, 0x4e, 0x52, 0x54, 0x4d, 0x4d, 0x4f, 0x4d, 0x50, 0x51, 0x50, + 0x50, 0x57, 0x59, 0x57, 0x58, 0x58, 0x59, 0x57, 0x5b, 0x5d, 0x5d, 0x5c, + 0x5d, 0x5d, 0x5e, 0x5e, 0x5d, 0x5d, 0x61, 0x5e, 0x60, 0x64, 0x63, 0x62, + 0x61, 0x5e, 0x5c, 0x5b, 0x59, 0x5a, 0x5d, 0x5b, 0x5c, 0x5d, 0x60, 0x62, + 0x62, 0x65, 0x65, 0x66, 0x67, 0x66, 0x69, 0x68, 0x68, 0x6b, 0x69, 0x68, + 0x69, 0x69, 0x69, 0x6a, 0x6a, 0x6a, 0x6a, 0x6b, 0x6b, 0x6c, 0x6c, 0x6c, + 0x6d, 0x6d, 0x6d, 0x6d, 0x6d, 0x6e, 0x6f, 0x6f, 0x6e, 0x6f, 0x6f, 0x6f, + 0x6e, 0x6d, 0x6a, 0x6a, 0x6a, 0x68, 0x58, 0x33, 0x00, 0xe4, 0xe1, 0xe6, + 0xe3, 0xd7, 0xcf, 0xd1, 0xd5, 0xcd, 0xd0, 0xd4, 0xd7, 0xee, 0xef, 0xe7, + 0xda, 0xd4, 0xe2, 0xef, 0xea, 0xe7, 0xdc, 0xd4, 0xe0, 0xe3, 0xd9, 0xd4, + 0xd9, 0xe0, 0xe4, 0xe9, 0xe3, 0xd6, 0xd2, 0xdd, 0xdb, 0xde, 0xd8, 0xdb, + 0xd3, 0xd6, 0xda, 0xd3, 0xce, 0xd5, 0xdf, 0xd5, 0xce, 0xd6, 0xe3, 0xe0, + 0xdd, 0xd3, 0xd8, 0xd7, 0xd7, 0xd7, 0xd7, 0xde, 0xd5, 0xd6, 0xe1, 0xe3, + 0xe4, 0xe5, 0xe9, 0xe6, 0xdc, 0xe0, 0xe4, 0xe3, 0xdd, 0xda, 0xdc, 0xd9, + 0xf1, 0xe8, 0xda, 0xdc, 0xe3, 0xe1, 0xce, 0xd1, 0xd9, 0xde, 0xeb, 0xe5, + 0xe7, 0xe2, 0xe1, 0xdc, 0xe0, 0xe1, 0xd9, 0xd7, 0xed, 0xeb, 0xe2, 0xdc, + 0x3b, 0x3b, 0x3a, 0x3b, 0x3a, 0x3c, 0x3c, 0x3c, 0x3c, 0x3b, 0x38, 0x36, + 0x37, 0x3c, 0x3e, 0x3e, 0x43, 0x43, 0x44, 0x45, 0x44, 0x43, 0x43, 0x3f, + 0x42, 0x48, 0x48, 0x44, 0x45, 0x43, 0x42, 0x4a, 0x4b, 0x4c, 0x50, 0x55, + 0x4f, 0x4c, 0x50, 0x4d, 0x4d, 0x50, 0x52, 0x51, 0x51, 0x5a, 0x57, 0x57, + 0x57, 0x59, 0x59, 0x57, 0x5b, 0x5d, 0x5f, 0x5b, 0x5c, 0x5c, 0x5d, 0x5e, + 0x5c, 0x5d, 0x5e, 0x5e, 0x61, 0x65, 0x63, 0x61, 0x5f, 0x5e, 0x5d, 0x5a, + 0x58, 0x5b, 0x5c, 0x5a, 0x5a, 0x5c, 0x5f, 0x60, 0x61, 0x63, 0x62, 0x64, + 0x65, 0x65, 0x66, 0x64, 0x66, 0x69, 0x6a, 0x69, 0x69, 0x69, 0x69, 0x69, + 0x69, 0x6a, 0x6a, 0x6c, 0x6c, 0x6c, 0x6c, 0x6d, 0x6d, 0x6e, 0x6d, 0x6e, + 0x6e, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6e, 0x6e, 0x6d, 0x6c, + 0x6c, 0x6b, 0x66, 0x51, 0x25, 0xf1, 0xe5, 0xe7, 0xe7, 0xdd, 0xd7, 0xe1, + 0xdc, 0xd5, 0xd4, 0xd4, 0xd2, 0xf1, 0xed, 0xe6, 0xd7, 0xd6, 0xe2, 0xed, + 0xf9, 0xf9, 0xe5, 0xde, 0xdd, 0xdb, 0xd6, 0xd4, 0xd4, 0xda, 0xe3, 0xe7, + 0xde, 0xd5, 0xd3, 0xd0, 0xd9, 0xdd, 0xe0, 0xe2, 0xdc, 0xdf, 0xdb, 0xd0, + 0xd1, 0xd4, 0xd8, 0xd7, 0xd2, 0xd4, 0xe0, 0xe1, 0xe2, 0xd5, 0xd8, 0xd7, + 0xd4, 0xcb, 0xd2, 0xd6, 0xd2, 0xd6, 0xe8, 0xe4, 0xf5, 0xe9, 0xe6, 0xdb, + 0xd7, 0xdd, 0xda, 0xda, 0xdd, 0xe0, 0xdd, 0xd7, 0xe6, 0xe3, 0xe1, 0xe0, + 0xdc, 0xe3, 0xd9, 0xd6, 0xdd, 0xe0, 0xed, 0xe8, 0xe7, 0xe5, 0xe4, 0xda, + 0xdb, 0xe0, 0xd9, 0xd8, 0xda, 0xe7, 0xe0, 0xda, 0x3b, 0x3a, 0x39, 0x3a, + 0x3a, 0x3c, 0x3c, 0x3d, 0x3c, 0x3a, 0x38, 0x3a, 0x40, 0x41, 0x41, 0x42, + 0x44, 0x46, 0x46, 0x46, 0x44, 0x43, 0x40, 0x41, 0x45, 0x48, 0x44, 0x44, + 0x46, 0x41, 0x44, 0x4a, 0x4a, 0x4a, 0x52, 0x4f, 0x4d, 0x50, 0x50, 0x4e, + 0x4e, 0x50, 0x50, 0x52, 0x54, 0x5a, 0x55, 0x55, 0x57, 0x58, 0x59, 0x56, + 0x5b, 0x5e, 0x5e, 0x58, 0x5b, 0x5c, 0x5d, 0x5d, 0x5d, 0x5d, 0x5d, 0x5d, + 0x61, 0x63, 0x62, 0x5f, 0x5d, 0x5d, 0x5b, 0x58, 0x57, 0x5b, 0x5c, 0x5a, + 0x59, 0x5b, 0x60, 0x62, 0x62, 0x60, 0x61, 0x63, 0x65, 0x66, 0x63, 0x63, + 0x67, 0x6a, 0x68, 0x65, 0x67, 0x66, 0x67, 0x68, 0x69, 0x69, 0x69, 0x6b, + 0x6b, 0x6c, 0x6c, 0x6d, 0x6d, 0x6e, 0x6d, 0x6e, 0x6f, 0x6f, 0x6f, 0x6f, + 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6e, 0x6f, 0x6e, 0x6e, 0x6e, 0x6b, 0x62, + 0x42, 0x06, 0xe7, 0xe9, 0xe9, 0xe3, 0xe2, 0xe1, 0xd8, 0xd9, 0xdc, 0xd4, + 0xd5, 0xe5, 0xe5, 0xe1, 0xda, 0xde, 0xe5, 0xe2, 0x00, 0xf5, 0xe6, 0xe1, + 0xdb, 0xdb, 0xd8, 0xd6, 0xd2, 0xd6, 0xe4, 0xe7, 0xd5, 0xca, 0xcf, 0xd4, + 0xd7, 0xe1, 0xeb, 0xe7, 0xe1, 0xe5, 0xdd, 0xd3, 0xcb, 0xcd, 0xde, 0xde, + 0xd2, 0xd2, 0xde, 0xdc, 0xe8, 0xd4, 0xcd, 0xcf, 0xc9, 0xc1, 0xcd, 0xd5, + 0xd8, 0xd8, 0xe7, 0xd6, 0xe7, 0xe9, 0xe4, 0xd5, 0xd4, 0xd7, 0xdc, 0xde, + 0xde, 0xe3, 0xdc, 0xde, 0xe7, 0xe3, 0xe5, 0xdf, 0xdb, 0xdb, 0xe5, 0xd7, + 0xdf, 0xe2, 0xe2, 0xdc, 0xe1, 0xe6, 0xe4, 0xd8, 0xd5, 0xdd, 0xda, 0xda, + 0xd0, 0xda, 0xd3, 0xdf, 0x38, 0x37, 0x37, 0x37, 0x3b, 0x3c, 0x3d, 0x3f, + 0x3d, 0x3d, 0x3d, 0x3e, 0x42, 0x43, 0x3f, 0x42, 0x44, 0x45, 0x44, 0x45, + 0x44, 0x3f, 0x41, 0x44, 0x45, 0x45, 0x44, 0x46, 0x44, 0x40, 0x47, 0x49, + 0x46, 0x4c, 0x4f, 0x4a, 0x4d, 0x4e, 0x4f, 0x4c, 0x4d, 0x50, 0x4f, 0x50, + 0x56, 0x59, 0x54, 0x55, 0x57, 0x57, 0x59, 0x58, 0x5d, 0x5f, 0x5e, 0x58, + 0x5a, 0x5b, 0x5c, 0x5c, 0x5d, 0x5d, 0x5e, 0x5e, 0x60, 0x62, 0x60, 0x5d, + 0x5a, 0x5a, 0x57, 0x58, 0x58, 0x5b, 0x5a, 0x57, 0x59, 0x5e, 0x62, 0x62, + 0x5f, 0x5f, 0x62, 0x64, 0x65, 0x64, 0x63, 0x63, 0x68, 0x69, 0x68, 0x65, + 0x65, 0x63, 0x64, 0x63, 0x66, 0x69, 0x69, 0x6a, 0x6a, 0x6b, 0x6b, 0x6c, + 0x6d, 0x6d, 0x6c, 0x6d, 0x6e, 0x6e, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, + 0x6f, 0x6f, 0x6e, 0x6f, 0x6f, 0x6f, 0x6e, 0x6a, 0x57, 0x25, 0xf3, 0xef, + 0xe6, 0xdc, 0xe0, 0xd9, 0xd3, 0xdc, 0xe2, 0xd9, 0xde, 0xe1, 0xe2, 0xe4, + 0xe1, 0xdb, 0xe1, 0xdf, 0xf9, 0xec, 0xe6, 0xe5, 0xdf, 0xdb, 0xd9, 0xd6, + 0xd3, 0xd9, 0xe7, 0xea, 0xdd, 0xd3, 0xd7, 0xdf, 0xd9, 0xd4, 0xe4, 0xe4, + 0xda, 0xdd, 0xde, 0xd3, 0xd1, 0xd2, 0xe5, 0xdc, 0xd0, 0xcf, 0xd5, 0xd5, + 0xe1, 0xd9, 0xd2, 0xc6, 0xc4, 0xc2, 0xc8, 0xda, 0xe1, 0xe0, 0xe7, 0xd8, + 0xde, 0xea, 0xe7, 0xda, 0xd0, 0xd6, 0xda, 0xdb, 0xdd, 0xe0, 0xdf, 0xdc, + 0xea, 0xe5, 0xe5, 0xe2, 0xdc, 0xdf, 0xe5, 0xd7, 0xdd, 0xe1, 0xdf, 0xde, + 0xdb, 0xe2, 0xe1, 0xd1, 0xd4, 0xd7, 0xda, 0xe2, 0xda, 0xd8, 0xd3, 0xdc, + 0x37, 0x37, 0x37, 0x39, 0x3c, 0x3f, 0x42, 0x3e, 0x3c, 0x3d, 0x3e, 0x3e, + 0x41, 0x3e, 0x3f, 0x44, 0x44, 0x45, 0x45, 0x42, 0x3f, 0x41, 0x44, 0x44, + 0x47, 0x44, 0x44, 0x46, 0x44, 0x44, 0x49, 0x48, 0x48, 0x4c, 0x4a, 0x4a, + 0x4d, 0x4b, 0x4c, 0x49, 0x4d, 0x50, 0x4f, 0x51, 0x57, 0x58, 0x55, 0x57, + 0x57, 0x57, 0x57, 0x5a, 0x5d, 0x5f, 0x5b, 0x58, 0x59, 0x59, 0x59, 0x5c, + 0x5f, 0x5d, 0x5e, 0x5e, 0x60, 0x61, 0x5e, 0x57, 0x58, 0x56, 0x54, 0x58, + 0x5a, 0x5b, 0x59, 0x58, 0x5d, 0x62, 0x62, 0x62, 0x61, 0x61, 0x63, 0x65, + 0x65, 0x63, 0x63, 0x65, 0x69, 0x68, 0x69, 0x66, 0x65, 0x63, 0x66, 0x65, + 0x66, 0x69, 0x69, 0x6a, 0x6a, 0x6c, 0x6c, 0x6d, 0x6c, 0x6d, 0x6d, 0x6d, + 0x6d, 0x6d, 0x6e, 0x6e, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, + 0x6e, 0x6e, 0x6f, 0x6e, 0x67, 0x45, 0x0f, 0xf4, 0xe8, 0xdd, 0xda, 0xdc, + 0xe2, 0xe1, 0xe5, 0xd9, 0xdf, 0xe2, 0xdd, 0xd9, 0xd1, 0xd6, 0xda, 0xe0, + 0xf0, 0xea, 0xdf, 0xe7, 0xde, 0xd8, 0xda, 0xda, 0xd5, 0xd5, 0xe2, 0xe6, + 0xe3, 0xde, 0xdd, 0xe3, 0xe7, 0xd2, 0xda, 0xe1, 0xda, 0xd9, 0xda, 0xd0, + 0xd4, 0xde, 0xe4, 0xd5, 0xce, 0xd0, 0xd5, 0xd4, 0xe4, 0xdc, 0xe1, 0xcd, + 0xc4, 0xd0, 0xca, 0xc9, 0xda, 0xe7, 0xe8, 0xdc, 0xdb, 0xf8, 0xf3, 0xe0, + 0xd5, 0xe5, 0xd4, 0xd4, 0xdc, 0xda, 0xe5, 0xe6, 0xe5, 0xe6, 0xe2, 0xdb, + 0xd2, 0xda, 0xdc, 0xda, 0xe0, 0xe3, 0xe0, 0xda, 0xdb, 0xe1, 0xdf, 0xd4, + 0xd5, 0xde, 0xdd, 0xe0, 0xd7, 0xd4, 0xda, 0xdd, 0x37, 0x37, 0x37, 0x39, + 0x3e, 0x41, 0x40, 0x3d, 0x3c, 0x39, 0x3c, 0x3f, 0x40, 0x41, 0x44, 0x45, + 0x44, 0x44, 0x44, 0x3f, 0x42, 0x46, 0x46, 0x46, 0x44, 0x44, 0x46, 0x46, + 0x44, 0x45, 0x4a, 0x4b, 0x4c, 0x4a, 0x45, 0x4c, 0x4b, 0x49, 0x47, 0x4b, + 0x50, 0x50, 0x4f, 0x53, 0x58, 0x57, 0x54, 0x57, 0x58, 0x58, 0x58, 0x5a, + 0x5d, 0x5d, 0x58, 0x57, 0x59, 0x57, 0x58, 0x5c, 0x5f, 0x5d, 0x5f, 0x5d, + 0x60, 0x61, 0x5d, 0x57, 0x57, 0x51, 0x54, 0x57, 0x5a, 0x5c, 0x5a, 0x5d, + 0x61, 0x62, 0x62, 0x64, 0x63, 0x62, 0x63, 0x64, 0x62, 0x63, 0x64, 0x65, + 0x69, 0x69, 0x68, 0x65, 0x66, 0x64, 0x65, 0x65, 0x65, 0x68, 0x69, 0x6a, + 0x6b, 0x6c, 0x6c, 0x6d, 0x6c, 0x6d, 0x6d, 0x6c, 0x6d, 0x6d, 0x6d, 0x6c, + 0x6d, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6e, 0x6f, 0x6e, 0x6e, 0x6f, 0x6f, + 0x6e, 0x61, 0x33, 0x02, 0xef, 0xea, 0xd5, 0xd8, 0xda, 0xd5, 0xdf, 0xcf, + 0xd6, 0xe1, 0xe0, 0xd3, 0xcf, 0xd5, 0xd8, 0xea, 0xe7, 0xe1, 0xd7, 0xda, + 0xd6, 0xd8, 0xdb, 0xdd, 0xd7, 0xd6, 0xd3, 0xdb, 0xdf, 0xe0, 0xdb, 0xd9, + 0xe7, 0xde, 0xda, 0xdd, 0xe3, 0xdd, 0xdc, 0xd7, 0xdd, 0xe7, 0xe4, 0xd4, + 0xd7, 0xd2, 0xd7, 0xda, 0xdc, 0xe0, 0xdc, 0xd4, 0xc7, 0xd0, 0xd0, 0xc8, + 0xd4, 0xe3, 0xe2, 0xdf, 0xd7, 0xec, 0xf4, 0xe7, 0xe1, 0xef, 0xe9, 0xe5, + 0xe2, 0xda, 0xe5, 0xee, 0xe0, 0xde, 0xe4, 0xd9, 0xcf, 0xd6, 0xda, 0xdd, + 0xe3, 0xe2, 0xdb, 0xda, 0xe0, 0xe2, 0xdc, 0xce, 0xd3, 0xe6, 0xdd, 0xd7, + 0xd5, 0xce, 0xdb, 0xda, 0x38, 0x39, 0x3b, 0x3e, 0x41, 0x40, 0x40, 0x3d, + 0x39, 0x3c, 0x40, 0x3e, 0x3e, 0x44, 0x45, 0x44, 0x43, 0x44, 0x41, 0x42, + 0x46, 0x47, 0x44, 0x44, 0x44, 0x45, 0x46, 0x44, 0x43, 0x48, 0x4f, 0x4d, + 0x47, 0x44, 0x4a, 0x4d, 0x48, 0x46, 0x47, 0x4d, 0x50, 0x4f, 0x4e, 0x50, + 0x57, 0x57, 0x55, 0x58, 0x56, 0x57, 0x58, 0x59, 0x5d, 0x5d, 0x57, 0x57, + 0x59, 0x56, 0x56, 0x5c, 0x5f, 0x5d, 0x5e, 0x5f, 0x61, 0x5e, 0x5d, 0x59, + 0x56, 0x50, 0x54, 0x58, 0x5c, 0x5d, 0x5d, 0x62, 0x61, 0x61, 0x63, 0x65, + 0x63, 0x63, 0x62, 0x63, 0x60, 0x61, 0x65, 0x68, 0x6a, 0x69, 0x68, 0x67, + 0x67, 0x65, 0x63, 0x64, 0x66, 0x69, 0x6a, 0x6a, 0x6c, 0x6c, 0x6c, 0x6d, + 0x6c, 0x6d, 0x6d, 0x6c, 0x6c, 0x6c, 0x6c, 0x6c, 0x6d, 0x6e, 0x6f, 0x6f, + 0x6f, 0x6f, 0x6f, 0x6f, 0x6e, 0x6e, 0x6e, 0x6e, 0x6d, 0x6a, 0x54, 0x19, + 0xee, 0xe4, 0xd9, 0xdb, 0xd6, 0xd0, 0xd7, 0xc8, 0xd5, 0xdc, 0xd1, 0xce, + 0xd2, 0xd4, 0xd4, 0xe3, 0xdf, 0xd6, 0xd5, 0xd8, 0xdd, 0xda, 0xda, 0xda, + 0xd4, 0xda, 0xd2, 0xd8, 0xd8, 0xdf, 0xdd, 0xed, 0xeb, 0xd8, 0xda, 0xd9, + 0xe0, 0xe7, 0xde, 0xd5, 0xe5, 0xe8, 0xe9, 0xdd, 0xe7, 0xd9, 0xe1, 0xe2, + 0xd5, 0xd8, 0xd8, 0xd4, 0xd2, 0xd6, 0xd3, 0xd0, 0xd0, 0xd7, 0xe4, 0xe1, + 0xd7, 0xe2, 0xea, 0xe9, 0xe3, 0xeb, 0xec, 0xea, 0xe9, 0xe7, 0xe6, 0xf0, + 0xdf, 0xdd, 0xe2, 0xdf, 0xda, 0xd1, 0xda, 0xe1, 0xdc, 0xdb, 0xda, 0xe7, + 0xea, 0xdf, 0xd6, 0xd3, 0xd9, 0xe1, 0xd8, 0xd6, 0xd4, 0xd3, 0xe0, 0xda, + 0x38, 0x3a, 0x3c, 0x3c, 0x3e, 0x3e, 0x3c, 0x38, 0x38, 0x3c, 0x3f, 0x3d, + 0x40, 0x43, 0x43, 0x45, 0x46, 0x44, 0x44, 0x47, 0x48, 0x46, 0x43, 0x43, + 0x46, 0x49, 0x44, 0x44, 0x46, 0x4d, 0x4c, 0x46, 0x44, 0x49, 0x4b, 0x49, + 0x44, 0x46, 0x4a, 0x4d, 0x50, 0x4f, 0x4d, 0x50, 0x56, 0x57, 0x54, 0x57, + 0x53, 0x57, 0x59, 0x58, 0x5d, 0x5d, 0x57, 0x57, 0x57, 0x51, 0x57, 0x5c, + 0x5d, 0x5c, 0x5e, 0x60, 0x60, 0x5d, 0x59, 0x57, 0x55, 0x52, 0x57, 0x58, + 0x5d, 0x5f, 0x62, 0x63, 0x61, 0x65, 0x65, 0x64, 0x62, 0x5e, 0x5f, 0x61, + 0x60, 0x63, 0x64, 0x69, 0x6a, 0x69, 0x67, 0x67, 0x66, 0x65, 0x63, 0x64, + 0x67, 0x69, 0x69, 0x6a, 0x6b, 0x6c, 0x6b, 0x6c, 0x6c, 0x6d, 0x6d, 0x6c, + 0x6c, 0x6c, 0x6d, 0x6d, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, + 0x6f, 0x6e, 0x6d, 0x6c, 0x6c, 0x6c, 0x63, 0x31, 0xfc, 0xed, 0xe9, 0xe6, + 0xdd, 0xd6, 0xdd, 0xd6, 0xd9, 0xd6, 0xc8, 0xc7, 0xda, 0xd7, 0xd4, 0xdb, + 0xd8, 0xd1, 0xd3, 0xd2, 0xdb, 0xdf, 0xdd, 0xda, 0xd8, 0xd7, 0xcf, 0xd7, + 0xd7, 0xdd, 0xdb, 0xe5, 0xe1, 0xd6, 0xda, 0xe0, 0xd9, 0xe3, 0xe5, 0xdb, + 0xe7, 0xdd, 0xe2, 0xdf, 0xe4, 0xdc, 0xe2, 0xe1, 0xd5, 0xd3, 0xd7, 0xdc, + 0xde, 0xd6, 0xcf, 0xd4, 0xd4, 0xcb, 0xde, 0xe0, 0xd9, 0xdd, 0xdf, 0xe9, + 0xf1, 0xdf, 0xe2, 0xe7, 0xe7, 0xe7, 0xea, 0xed, 0xda, 0xd5, 0xd4, 0xe1, + 0xed, 0xda, 0xdf, 0xe0, 0xd6, 0xcf, 0xde, 0xea, 0xea, 0xdb, 0xd6, 0xda, + 0xdf, 0xe4, 0xe1, 0xd8, 0xda, 0xd4, 0xe6, 0xde, 0x38, 0x3a, 0x3d, 0x3e, + 0x3c, 0x3a, 0x39, 0x37, 0x39, 0x3a, 0x3e, 0x3e, 0x41, 0x43, 0x42, 0x45, + 0x45, 0x44, 0x44, 0x48, 0x47, 0x44, 0x3f, 0x44, 0x4a, 0x48, 0x41, 0x43, + 0x4a, 0x50, 0x48, 0x44, 0x49, 0x48, 0x47, 0x46, 0x45, 0x48, 0x4a, 0x4a, + 0x4f, 0x50, 0x4d, 0x51, 0x53, 0x56, 0x56, 0x57, 0x52, 0x57, 0x57, 0x5a, + 0x5d, 0x5b, 0x57, 0x55, 0x56, 0x50, 0x57, 0x5c, 0x5d, 0x5b, 0x5d, 0x5f, + 0x5e, 0x5a, 0x57, 0x56, 0x54, 0x52, 0x57, 0x59, 0x5b, 0x5e, 0x62, 0x63, + 0x63, 0x66, 0x65, 0x64, 0x60, 0x5d, 0x5f, 0x5f, 0x61, 0x63, 0x64, 0x69, + 0x69, 0x69, 0x64, 0x66, 0x63, 0x65, 0x66, 0x67, 0x69, 0x6a, 0x6a, 0x6b, + 0x6b, 0x6c, 0x6b, 0x6b, 0x6c, 0x6c, 0x6c, 0x6c, 0x6d, 0x6d, 0x6d, 0x6d, + 0x6e, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6d, 0x6c, + 0x6a, 0x6a, 0x68, 0x52, 0x1f, 0x0f, 0x09, 0xed, 0xe4, 0xe7, 0xe2, 0xdf, + 0xe1, 0xde, 0xd0, 0xc6, 0xd2, 0xd5, 0xd3, 0xd3, 0xd8, 0xd6, 0xd4, 0xcc, + 0xd4, 0xde, 0xd9, 0xd5, 0xd8, 0xd7, 0xcf, 0xd4, 0xd6, 0xda, 0xda, 0xdf, + 0xe2, 0xed, 0xee, 0xe4, 0xdb, 0xe0, 0xe7, 0xe4, 0xe8, 0xd4, 0xe1, 0xde, + 0xda, 0xdb, 0xe7, 0xe3, 0xd6, 0xd7, 0xd5, 0xde, 0xe5, 0xd6, 0xd0, 0xd4, + 0xda, 0xd2, 0xdd, 0xe1, 0xd6, 0xda, 0xdb, 0xe0, 0xec, 0xe0, 0xdd, 0xe1, + 0xda, 0xd5, 0xe6, 0xeb, 0xe2, 0xe7, 0xcf, 0xd2, 0xe2, 0xe1, 0xdb, 0xdb, + 0xdc, 0xda, 0xe1, 0xde, 0xdf, 0xda, 0xda, 0xdd, 0xda, 0xe1, 0xf1, 0xed, + 0xd8, 0xc9, 0xd8, 0xdb, 0x38, 0x3a, 0x3b, 0x39, 0x38, 0x37, 0x37, 0x38, + 0x3b, 0x3d, 0x3e, 0x3e, 0x3d, 0x3d, 0x3f, 0x44, 0x44, 0x43, 0x46, 0x4a, + 0x47, 0x43, 0x43, 0x48, 0x4a, 0x42, 0x3e, 0x44, 0x4e, 0x4d, 0x45, 0x48, + 0x4a, 0x48, 0x48, 0x47, 0x45, 0x48, 0x49, 0x4a, 0x4e, 0x50, 0x4e, 0x50, + 0x50, 0x56, 0x57, 0x57, 0x53, 0x55, 0x56, 0x5a, 0x5b, 0x57, 0x57, 0x54, + 0x53, 0x55, 0x57, 0x5c, 0x5d, 0x5b, 0x5b, 0x5e, 0x5d, 0x58, 0x55, 0x54, + 0x50, 0x51, 0x57, 0x5b, 0x5a, 0x5e, 0x63, 0x63, 0x63, 0x63, 0x63, 0x5e, + 0x5d, 0x5d, 0x5d, 0x5f, 0x61, 0x65, 0x67, 0x6a, 0x6a, 0x68, 0x64, 0x63, + 0x63, 0x63, 0x66, 0x68, 0x69, 0x6a, 0x6a, 0x6c, 0x6c, 0x6c, 0x6b, 0x6b, + 0x6c, 0x6c, 0x6b, 0x6c, 0x6d, 0x6d, 0x6d, 0x6d, 0x6e, 0x6e, 0x6e, 0x6f, + 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6d, 0x6b, 0x69, 0x69, 0x68, 0x62, + 0x3f, 0x1c, 0x07, 0xf5, 0xe8, 0xe4, 0xe1, 0xd2, 0xce, 0xda, 0xe9, 0xe4, + 0xe0, 0xd4, 0xda, 0xe2, 0xd7, 0xd4, 0xd4, 0xd8, 0xd5, 0xdc, 0xd4, 0xd1, + 0xcc, 0xd2, 0xd4, 0xda, 0xd0, 0xd5, 0xdc, 0xe0, 0xe1, 0xe7, 0xef, 0xe0, + 0xda, 0xe1, 0xe5, 0xe0, 0xe1, 0xd7, 0xde, 0xda, 0xd9, 0xdf, 0xe1, 0xd8, + 0xd4, 0xdd, 0xd9, 0xda, 0xe4, 0xe0, 0xda, 0xd9, 0xe1, 0xd5, 0xd2, 0xe1, + 0xd7, 0xd9, 0xda, 0xd9, 0xe0, 0xdc, 0xda, 0xe0, 0xe1, 0xd8, 0xe5, 0xe9, + 0xe6, 0xed, 0xe6, 0xd3, 0xe3, 0xe2, 0xd8, 0xd4, 0xe1, 0xe5, 0xed, 0xe2, + 0xe6, 0xe8, 0xdf, 0xdf, 0xe1, 0xe2, 0xe3, 0xd9, 0xcc, 0xc8, 0xce, 0xd4, + 0x3b, 0x3a, 0x38, 0x37, 0x37, 0x37, 0x3a, 0x3b, 0x38, 0x3b, 0x3c, 0x3b, + 0x39, 0x3d, 0x41, 0x45, 0x43, 0x44, 0x46, 0x48, 0x44, 0x41, 0x49, 0x4a, + 0x44, 0x41, 0x43, 0x4a, 0x50, 0x48, 0x48, 0x4b, 0x49, 0x48, 0x48, 0x45, + 0x46, 0x48, 0x48, 0x4d, 0x4d, 0x4f, 0x4f, 0x51, 0x4f, 0x54, 0x56, 0x55, + 0x53, 0x54, 0x57, 0x59, 0x5a, 0x55, 0x53, 0x50, 0x52, 0x57, 0x57, 0x5c, + 0x5d, 0x5b, 0x5b, 0x5d, 0x59, 0x57, 0x53, 0x53, 0x51, 0x55, 0x57, 0x59, + 0x5b, 0x61, 0x63, 0x63, 0x62, 0x61, 0x5d, 0x5b, 0x5b, 0x5a, 0x5b, 0x5d, + 0x63, 0x67, 0x69, 0x6b, 0x6a, 0x65, 0x63, 0x62, 0x61, 0x63, 0x68, 0x69, + 0x69, 0x6b, 0x69, 0x6b, 0x6d, 0x6d, 0x6d, 0x6d, 0x6b, 0x6d, 0x6b, 0x6b, + 0x6c, 0x6c, 0x6d, 0x6e, 0x6e, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, + 0x6f, 0x6f, 0x6f, 0x6d, 0x6a, 0x69, 0x68, 0x67, 0x5a, 0x2f, 0x00, 0xf6, + 0xf3, 0xe1, 0xdb, 0xda, 0xbe, 0xc3, 0xcb, 0xd3, 0xe1, 0xd1, 0xd1, 0xe1, + 0xde, 0xd9, 0xd9, 0xd8, 0xd8, 0xdb, 0xd8, 0xce, 0xc8, 0xd3, 0xd4, 0xe3, + 0xd3, 0xd4, 0xda, 0xda, 0xdf, 0xda, 0xe6, 0xda, 0xda, 0xe0, 0xdf, 0xdd, + 0xe1, 0xde, 0xd7, 0xd9, 0xdd, 0xe1, 0xdf, 0xdc, 0xdc, 0xd8, 0xd8, 0xd9, + 0xdf, 0xdb, 0xdb, 0xdb, 0xe2, 0xe1, 0xd3, 0xdc, 0xd6, 0xd4, 0xd6, 0xd9, + 0xe4, 0xde, 0xdf, 0xea, 0xe8, 0xdd, 0xe5, 0xf3, 0xeb, 0xe5, 0xe3, 0xd8, + 0xe3, 0xde, 0xd5, 0xd8, 0xdc, 0xd9, 0xe5, 0xec, 0xe1, 0xee, 0xe6, 0xe1, + 0xed, 0xe8, 0xe0, 0xda, 0xce, 0xd4, 0xd5, 0xd9, 0x38, 0x37, 0x36, 0x37, + 0x37, 0x37, 0x37, 0x37, 0x38, 0x3c, 0x3b, 0x37, 0x3b, 0x3e, 0x43, 0x44, + 0x43, 0x48, 0x47, 0x44, 0x41, 0x47, 0x4a, 0x45, 0x43, 0x40, 0x44, 0x4e, + 0x4f, 0x48, 0x49, 0x4a, 0x49, 0x47, 0x45, 0x45, 0x49, 0x4a, 0x4a, 0x4d, + 0x4c, 0x4f, 0x50, 0x4f, 0x4e, 0x53, 0x56, 0x53, 0x50, 0x50, 0x56, 0x57, + 0x59, 0x53, 0x4f, 0x4d, 0x51, 0x56, 0x58, 0x5d, 0x5d, 0x59, 0x59, 0x56, + 0x55, 0x57, 0x51, 0x4f, 0x50, 0x54, 0x57, 0x59, 0x5c, 0x61, 0x63, 0x60, + 0x5d, 0x5c, 0x5a, 0x5c, 0x5b, 0x5a, 0x56, 0x5d, 0x63, 0x66, 0x6a, 0x6b, + 0x6a, 0x64, 0x60, 0x5f, 0x61, 0x63, 0x69, 0x6a, 0x6b, 0x6b, 0x69, 0x6b, + 0x6d, 0x6d, 0x6d, 0x6d, 0x6c, 0x6e, 0x6c, 0x6a, 0x6a, 0x6a, 0x6b, 0x6e, + 0x6e, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6e, + 0x6b, 0x69, 0x69, 0x68, 0x64, 0x50, 0x1e, 0x00, 0xf3, 0xe1, 0xd4, 0xda, + 0xc9, 0xd7, 0xd5, 0xc5, 0xda, 0xd1, 0xd0, 0xd0, 0xe2, 0xe6, 0xde, 0xd7, + 0xda, 0xd3, 0xda, 0xce, 0xc7, 0xce, 0xd8, 0xe5, 0xd6, 0xcb, 0xd4, 0xd9, + 0xe0, 0xe3, 0xe3, 0xd9, 0xd6, 0xe4, 0xdc, 0xda, 0xda, 0xe2, 0xe2, 0xe1, + 0xe1, 0xe1, 0xdc, 0xdc, 0xd7, 0xd0, 0xd2, 0xda, 0xdf, 0xdb, 0xe1, 0xe5, + 0xe0, 0xe9, 0xdd, 0xe1, 0xcc, 0xcc, 0xcf, 0xda, 0xe4, 0xda, 0xe1, 0xf5, + 0xe8, 0xe1, 0xe5, 0xe8, 0xe1, 0xe1, 0xdb, 0xd5, 0xdc, 0xda, 0xcc, 0xd5, + 0xd3, 0xd4, 0xdc, 0xe5, 0xdf, 0xeb, 0xe2, 0xe0, 0xe3, 0xe9, 0xe1, 0xdf, + 0xd7, 0xd9, 0xdd, 0xdf, 0x36, 0x36, 0x36, 0x38, 0x37, 0x37, 0x37, 0x37, + 0x38, 0x39, 0x36, 0x37, 0x3a, 0x40, 0x44, 0x44, 0x43, 0x47, 0x44, 0x42, + 0x45, 0x4a, 0x49, 0x45, 0x41, 0x41, 0x47, 0x4f, 0x4c, 0x46, 0x48, 0x48, + 0x49, 0x49, 0x48, 0x46, 0x4a, 0x4a, 0x4a, 0x4a, 0x4d, 0x50, 0x4f, 0x4c, + 0x4e, 0x52, 0x55, 0x50, 0x4d, 0x50, 0x53, 0x55, 0x57, 0x50, 0x4a, 0x4c, + 0x50, 0x55, 0x58, 0x5b, 0x59, 0x56, 0x57, 0x50, 0x55, 0x50, 0x4d, 0x4d, + 0x4e, 0x52, 0x58, 0x5b, 0x5d, 0x62, 0x61, 0x5c, 0x5a, 0x57, 0x5a, 0x5b, + 0x5c, 0x58, 0x56, 0x5c, 0x63, 0x67, 0x6a, 0x6b, 0x6a, 0x63, 0x5f, 0x60, + 0x60, 0x63, 0x69, 0x69, 0x6b, 0x6a, 0x6a, 0x6b, 0x6e, 0x6d, 0x6c, 0x6d, + 0x6d, 0x6e, 0x6d, 0x6a, 0x69, 0x6a, 0x6b, 0x6c, 0x6d, 0x6e, 0x6f, 0x6f, + 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6d, 0x69, 0x69, 0x69, + 0x69, 0x67, 0x4d, 0x10, 0xed, 0xea, 0xeb, 0xe9, 0xe2, 0xe0, 0xe3, 0xd9, + 0xdb, 0xd3, 0xd4, 0xd7, 0xe1, 0xed, 0xee, 0xee, 0xd7, 0xc8, 0xdb, 0xce, + 0xb7, 0xc8, 0xd3, 0xd7, 0xd7, 0xca, 0xd2, 0xe7, 0xed, 0xe6, 0xe1, 0xda, + 0xd6, 0xea, 0xdb, 0xdc, 0xda, 0xdc, 0xeb, 0xe6, 0xdc, 0xdd, 0xdd, 0xd7, + 0xd4, 0xcf, 0xda, 0xe5, 0xe3, 0xdf, 0xda, 0xdc, 0xe5, 0xe4, 0xeb, 0xe7, + 0xd1, 0xd6, 0xdb, 0xe2, 0xe3, 0xde, 0xf4, 0xf9, 0xe1, 0xdb, 0xe5, 0xe7, + 0xe1, 0xde, 0xd8, 0xcf, 0xdb, 0xf9, 0xe9, 0xcf, 0xc8, 0xd3, 0xda, 0xe2, + 0xdc, 0xe1, 0xe0, 0xe1, 0xe5, 0xe6, 0xe0, 0xde, 0xe2, 0xd8, 0xe3, 0xe5, + 0x36, 0x37, 0x38, 0x37, 0x37, 0x37, 0x38, 0x37, 0x37, 0x35, 0x34, 0x38, + 0x3c, 0x41, 0x43, 0x40, 0x44, 0x42, 0x40, 0x42, 0x47, 0x4a, 0x48, 0x43, + 0x41, 0x44, 0x4a, 0x4e, 0x48, 0x44, 0x49, 0x48, 0x48, 0x49, 0x48, 0x46, + 0x4c, 0x4b, 0x49, 0x4a, 0x4e, 0x50, 0x4d, 0x4c, 0x50, 0x55, 0x50, 0x4b, + 0x4d, 0x4e, 0x51, 0x55, 0x56, 0x51, 0x4c, 0x4f, 0x50, 0x54, 0x57, 0x59, + 0x52, 0x53, 0x50, 0x4f, 0x52, 0x49, 0x47, 0x4c, 0x4e, 0x58, 0x5b, 0x5c, + 0x5f, 0x60, 0x5c, 0x5b, 0x58, 0x57, 0x5a, 0x5b, 0x5b, 0x5a, 0x58, 0x5b, + 0x63, 0x67, 0x69, 0x6a, 0x69, 0x63, 0x5d, 0x60, 0x5f, 0x64, 0x69, 0x68, + 0x6a, 0x69, 0x6a, 0x6b, 0x6e, 0x6e, 0x6c, 0x6c, 0x6d, 0x6d, 0x6c, 0x6b, + 0x6b, 0x6c, 0x6d, 0x6e, 0x6d, 0x6d, 0x6e, 0x6e, 0x6e, 0x6f, 0x6e, 0x6e, + 0x6f, 0x6f, 0x6f, 0x6f, 0x6e, 0x69, 0x69, 0x68, 0x67, 0x69, 0x68, 0x41, + 0xf6, 0xeb, 0xeb, 0xec, 0xe1, 0xd6, 0xdb, 0xe2, 0xe0, 0xd7, 0xd5, 0xda, + 0xdb, 0xe5, 0xed, 0xe7, 0xe1, 0xe3, 0xf0, 0xe1, 0xcc, 0xe6, 0xdb, 0xd5, + 0xd8, 0xdb, 0xdc, 0xe2, 0xeb, 0xed, 0xe3, 0xda, 0xde, 0xed, 0xdf, 0xde, + 0xd8, 0xda, 0xf1, 0xe5, 0xda, 0xe7, 0xe3, 0xd9, 0xd4, 0xd0, 0xdc, 0xe1, + 0xdc, 0xd4, 0xdf, 0xd3, 0xdd, 0xde, 0xe2, 0xda, 0xd2, 0xd9, 0xe0, 0xda, + 0xe0, 0xda, 0xe6, 0xea, 0xe2, 0xdc, 0xe4, 0xe8, 0xe9, 0xe1, 0xdb, 0xd2, + 0xd9, 0xed, 0xe7, 0xd9, 0xd1, 0xd3, 0xd8, 0xda, 0xda, 0xd9, 0xdf, 0xe7, + 0xe3, 0xdf, 0xdd, 0xd6, 0xe7, 0xeb, 0xe2, 0xe1, 0x37, 0x37, 0x37, 0x35, + 0x37, 0x38, 0x37, 0x36, 0x34, 0x34, 0x35, 0x3a, 0x3e, 0x3e, 0x3f, 0x3f, + 0x41, 0x41, 0x40, 0x46, 0x47, 0x46, 0x44, 0x42, 0x44, 0x45, 0x4a, 0x4b, + 0x45, 0x46, 0x49, 0x4a, 0x47, 0x48, 0x47, 0x45, 0x4b, 0x4a, 0x47, 0x48, + 0x4c, 0x4f, 0x4d, 0x4a, 0x50, 0x50, 0x4c, 0x4a, 0x4a, 0x4c, 0x50, 0x52, + 0x52, 0x50, 0x4f, 0x50, 0x4f, 0x50, 0x55, 0x57, 0x52, 0x50, 0x4a, 0x4d, + 0x4d, 0x44, 0x47, 0x4b, 0x55, 0x5a, 0x5a, 0x5b, 0x5d, 0x5c, 0x5a, 0x56, + 0x52, 0x57, 0x5b, 0x5a, 0x5b, 0x5d, 0x59, 0x5b, 0x61, 0x64, 0x69, 0x69, + 0x66, 0x63, 0x5f, 0x5e, 0x5f, 0x63, 0x67, 0x65, 0x69, 0x69, 0x69, 0x6b, + 0x6d, 0x6d, 0x6d, 0x6c, 0x6d, 0x6d, 0x6d, 0x6d, 0x6d, 0x6d, 0x6e, 0x6e, + 0x6e, 0x6c, 0x6d, 0x6c, 0x6d, 0x6e, 0x6d, 0x6d, 0x6d, 0x6f, 0x6f, 0x6f, + 0x6e, 0x6a, 0x69, 0x67, 0x65, 0x64, 0x65, 0x64, 0x47, 0x09, 0xec, 0xeb, + 0xe1, 0xda, 0xe2, 0xe3, 0xdb, 0xd4, 0xdd, 0xe0, 0xe1, 0xd8, 0xdf, 0xe7, + 0xe7, 0xf2, 0x00, 0xe9, 0xde, 0xf9, 0xe8, 0xe6, 0xde, 0xdf, 0xdb, 0xd9, + 0xe3, 0xe8, 0xe5, 0xda, 0xe0, 0xee, 0xe7, 0xdb, 0xd4, 0xc9, 0xdf, 0xe0, + 0xdc, 0xde, 0xde, 0xda, 0xdd, 0xdd, 0xd8, 0xd9, 0xd8, 0xd6, 0xea, 0xde, + 0xe3, 0xdf, 0xde, 0xdb, 0xd3, 0xde, 0xe1, 0xd4, 0xde, 0xe3, 0xdc, 0xde, + 0xe5, 0xe3, 0xe7, 0xe1, 0xe4, 0xe4, 0xe3, 0xd9, 0xda, 0xe1, 0xe2, 0xe2, + 0xe3, 0xd8, 0xd0, 0xd2, 0xde, 0xe5, 0xea, 0xeb, 0xd6, 0xd4, 0xdb, 0xd7, + 0xde, 0xe1, 0xe1, 0xdd, 0x35, 0x36, 0x35, 0x33, 0x35, 0x35, 0x35, 0x33, + 0x31, 0x34, 0x38, 0x3d, 0x3e, 0x3b, 0x3e, 0x3e, 0x3f, 0x41, 0x43, 0x47, + 0x43, 0x44, 0x42, 0x43, 0x44, 0x44, 0x49, 0x49, 0x44, 0x48, 0x47, 0x49, + 0x47, 0x48, 0x45, 0x45, 0x4a, 0x4a, 0x46, 0x48, 0x4c, 0x4e, 0x4b, 0x4a, + 0x50, 0x48, 0x4a, 0x4a, 0x48, 0x4b, 0x50, 0x50, 0x4e, 0x54, 0x4e, 0x4d, + 0x49, 0x4b, 0x54, 0x56, 0x55, 0x4c, 0x4a, 0x4d, 0x48, 0x46, 0x48, 0x50, + 0x57, 0x57, 0x58, 0x5a, 0x57, 0x55, 0x55, 0x4f, 0x4d, 0x51, 0x58, 0x57, + 0x5d, 0x60, 0x5c, 0x5a, 0x60, 0x63, 0x68, 0x67, 0x65, 0x61, 0x5f, 0x5d, + 0x61, 0x63, 0x64, 0x67, 0x69, 0x69, 0x69, 0x6a, 0x6b, 0x6c, 0x6d, 0x6d, + 0x6d, 0x6c, 0x6c, 0x6d, 0x6c, 0x6b, 0x6c, 0x6d, 0x6d, 0x6d, 0x6d, 0x6c, + 0x6c, 0x6c, 0x6d, 0x6c, 0x6c, 0x6d, 0x6e, 0x6e, 0x6e, 0x6b, 0x69, 0x69, + 0x68, 0x67, 0x62, 0x5e, 0x5d, 0x2b, 0xf7, 0xf1, 0xed, 0xe6, 0xe4, 0xe9, + 0xeb, 0xe0, 0xdb, 0xde, 0xdc, 0xd4, 0xdc, 0xe7, 0xdc, 0xe3, 0xe7, 0xe0, + 0xd3, 0xe8, 0xf2, 0xe4, 0xdd, 0xd8, 0xd3, 0xd4, 0xe1, 0xe0, 0xe3, 0xdc, + 0xda, 0xe7, 0xe7, 0xdd, 0xe1, 0xda, 0xda, 0xde, 0xde, 0xd7, 0xdf, 0xe4, + 0xe3, 0xe3, 0xe6, 0xe7, 0xd5, 0xd3, 0xe7, 0xe3, 0xe7, 0xdf, 0xda, 0xdf, + 0xe7, 0xe7, 0xdd, 0xda, 0xca, 0xd4, 0xdd, 0xdb, 0xe1, 0xea, 0xe5, 0xec, + 0xea, 0xe4, 0xdc, 0xd8, 0xde, 0xdd, 0xde, 0xdf, 0xe0, 0xd7, 0xcc, 0xd7, + 0xe0, 0xe0, 0xf4, 0xf9, 0xdb, 0xd5, 0xde, 0xe2, 0xe2, 0xdf, 0xdf, 0xd9, + 0x36, 0x34, 0x31, 0x32, 0x35, 0x32, 0x31, 0x32, 0x33, 0x38, 0x3b, 0x39, + 0x3c, 0x3a, 0x3e, 0x3d, 0x3f, 0x42, 0x42, 0x41, 0x41, 0x44, 0x44, 0x44, + 0x43, 0x42, 0x47, 0x45, 0x42, 0x49, 0x47, 0x49, 0x48, 0x45, 0x44, 0x46, + 0x4a, 0x46, 0x44, 0x49, 0x4c, 0x4e, 0x4a, 0x4a, 0x49, 0x44, 0x46, 0x47, + 0x46, 0x4c, 0x4e, 0x49, 0x4d, 0x53, 0x4e, 0x4a, 0x44, 0x49, 0x56, 0x56, + 0x52, 0x4d, 0x4d, 0x49, 0x43, 0x45, 0x4b, 0x53, 0x52, 0x55, 0x55, 0x51, + 0x4c, 0x51, 0x51, 0x4a, 0x47, 0x4b, 0x53, 0x59, 0x5f, 0x61, 0x5c, 0x5b, + 0x5f, 0x63, 0x67, 0x66, 0x63, 0x5e, 0x5e, 0x5f, 0x62, 0x62, 0x63, 0x67, + 0x6a, 0x6a, 0x68, 0x69, 0x69, 0x6b, 0x6c, 0x6d, 0x6c, 0x6c, 0x6a, 0x6c, + 0x6b, 0x6b, 0x6b, 0x6a, 0x6b, 0x6c, 0x6d, 0x6c, 0x6c, 0x6d, 0x6d, 0x6d, + 0x6d, 0x6d, 0x6d, 0x6d, 0x6d, 0x6c, 0x6a, 0x69, 0x6a, 0x6b, 0x6a, 0x67, + 0x63, 0x4d, 0x18, 0xf2, 0xed, 0xe7, 0xdc, 0xd8, 0xe4, 0xe3, 0xd5, 0xdb, + 0xd9, 0xd9, 0xdd, 0xe6, 0xe6, 0xea, 0xe3, 0xe4, 0xe4, 0xe7, 0xf7, 0xef, + 0xda, 0xd7, 0xd4, 0xd6, 0xe1, 0xdc, 0xde, 0xe5, 0xd8, 0xe0, 0xf1, 0xf3, + 0xed, 0xde, 0xdc, 0xe0, 0xe4, 0xdf, 0xe2, 0xe7, 0xe1, 0xdd, 0xea, 0xee, + 0xda, 0xd8, 0xe1, 0xe0, 0xe3, 0xe0, 0xe6, 0xf1, 0xeb, 0xdb, 0xdd, 0xe1, + 0xc9, 0xce, 0xda, 0xe0, 0xe0, 0xe7, 0xe6, 0xf2, 0xec, 0xe5, 0xd5, 0xd6, + 0xd7, 0xd6, 0xdb, 0xe1, 0xdc, 0xcc, 0xca, 0xda, 0xde, 0xd4, 0xe5, 0xed, + 0xdf, 0xdc, 0xde, 0xe1, 0xe2, 0xe0, 0xdc, 0xd9, 0x34, 0x32, 0x31, 0x32, + 0x31, 0x2f, 0x2d, 0x34, 0x36, 0x38, 0x38, 0x38, 0x3a, 0x3a, 0x3c, 0x3d, + 0x3f, 0x42, 0x40, 0x3e, 0x42, 0x44, 0x43, 0x40, 0x3e, 0x3e, 0x44, 0x41, + 0x44, 0x48, 0x47, 0x4a, 0x49, 0x43, 0x43, 0x47, 0x49, 0x43, 0x44, 0x49, + 0x4c, 0x4c, 0x49, 0x47, 0x48, 0x44, 0x45, 0x46, 0x48, 0x4b, 0x4b, 0x49, + 0x4c, 0x50, 0x4c, 0x43, 0x44, 0x4d, 0x53, 0x53, 0x4f, 0x50, 0x4b, 0x46, + 0x42, 0x48, 0x4c, 0x4e, 0x50, 0x4f, 0x4a, 0x4a, 0x50, 0x4e, 0x48, 0x45, + 0x47, 0x49, 0x50, 0x58, 0x61, 0x60, 0x5c, 0x5a, 0x5f, 0x64, 0x65, 0x65, + 0x5f, 0x5f, 0x5f, 0x61, 0x62, 0x61, 0x61, 0x68, 0x6a, 0x69, 0x68, 0x69, + 0x69, 0x6b, 0x6c, 0x6d, 0x6c, 0x6b, 0x69, 0x6a, 0x68, 0x68, 0x6a, 0x6b, + 0x6b, 0x6c, 0x6c, 0x6b, 0x6c, 0x6c, 0x6d, 0x6d, 0x6d, 0x6d, 0x6d, 0x6d, + 0x6d, 0x6b, 0x6a, 0x6a, 0x6b, 0x6a, 0x6a, 0x6b, 0x68, 0x5d, 0x3e, 0x0b, + 0xee, 0xe9, 0xe4, 0xe5, 0xe5, 0xe2, 0xd5, 0xdd, 0xe3, 0xe4, 0xe2, 0xe7, + 0xec, 0xe1, 0xd9, 0xdb, 0xef, 0xf5, 0xfa, 0xef, 0xdf, 0xd6, 0xd2, 0xd6, + 0xe1, 0xda, 0xdc, 0xea, 0xd9, 0xd8, 0xeb, 0xe9, 0xe7, 0xdc, 0xd9, 0xdb, + 0xe0, 0xe2, 0xe1, 0xdd, 0xe5, 0xeb, 0xea, 0xed, 0xe1, 0xda, 0xda, 0xda, + 0xdb, 0xe1, 0xe4, 0xee, 0xe9, 0xde, 0xe0, 0xe4, 0xe7, 0xdd, 0xd7, 0xe4, + 0xe7, 0xed, 0xec, 0xf3, 0xf3, 0xf4, 0xe1, 0xd4, 0xd4, 0xd5, 0xdb, 0xe1, + 0xd7, 0xd0, 0xd6, 0xdf, 0xee, 0xd8, 0xda, 0xe1, 0xdb, 0xda, 0xdf, 0xdb, + 0xdf, 0xdf, 0xe0, 0xe5, 0x31, 0x30, 0x31, 0x2f, 0x2f, 0x2f, 0x30, 0x32, + 0x34, 0x38, 0x37, 0x38, 0x38, 0x3a, 0x3b, 0x3c, 0x3e, 0x3e, 0x3d, 0x41, + 0x43, 0x3f, 0x40, 0x3e, 0x39, 0x3b, 0x43, 0x40, 0x44, 0x49, 0x46, 0x4a, + 0x47, 0x43, 0x44, 0x47, 0x47, 0x3f, 0x41, 0x4b, 0x4b, 0x49, 0x47, 0x44, + 0x46, 0x41, 0x44, 0x47, 0x48, 0x4a, 0x4a, 0x47, 0x4b, 0x4f, 0x49, 0x41, + 0x45, 0x4f, 0x51, 0x50, 0x50, 0x4c, 0x45, 0x45, 0x44, 0x46, 0x45, 0x4a, + 0x4e, 0x4e, 0x4a, 0x4c, 0x4c, 0x45, 0x43, 0x48, 0x49, 0x4b, 0x50, 0x59, + 0x5f, 0x60, 0x5c, 0x5a, 0x5f, 0x64, 0x65, 0x63, 0x5d, 0x5f, 0x60, 0x61, + 0x62, 0x62, 0x62, 0x67, 0x6a, 0x69, 0x68, 0x67, 0x67, 0x6b, 0x6d, 0x6e, + 0x6d, 0x6a, 0x68, 0x69, 0x68, 0x66, 0x68, 0x69, 0x6a, 0x6a, 0x6b, 0x6b, + 0x6b, 0x6c, 0x6d, 0x6d, 0x6d, 0x6d, 0x6e, 0x6c, 0x6c, 0x6a, 0x6a, 0x6c, + 0x6c, 0x6b, 0x6a, 0x6b, 0x69, 0x65, 0x5b, 0x34, 0xf7, 0xf0, 0xe5, 0xe3, + 0xe2, 0xe4, 0xdd, 0xe0, 0xe0, 0xe6, 0xe8, 0xe6, 0xe7, 0xdb, 0xd9, 0xd8, + 0xe4, 0xe5, 0xe8, 0xe7, 0xe4, 0xd2, 0xd4, 0xdd, 0xe4, 0xdc, 0xdd, 0xe7, + 0xe0, 0xdd, 0xe6, 0xe5, 0xe5, 0xe3, 0xe1, 0xd7, 0xdc, 0xe6, 0xe5, 0xda, + 0xe7, 0xea, 0xe6, 0xe9, 0xde, 0xdf, 0xe0, 0xdc, 0xd6, 0xdf, 0xdd, 0xe7, + 0xe2, 0xdd, 0xdc, 0xd9, 0xe4, 0xdd, 0xd5, 0xdb, 0xe1, 0xdc, 0xe1, 0xe9, + 0xef, 0xed, 0xe7, 0xda, 0xd5, 0xdd, 0xe3, 0xe1, 0xd9, 0xdc, 0xe1, 0xdb, + 0xe3, 0xd8, 0xda, 0xd9, 0xdb, 0xd9, 0xe0, 0xe0, 0xde, 0xdb, 0xe0, 0xe3, + 0x2f, 0x2f, 0x2f, 0x2e, 0x2b, 0x2e, 0x32, 0x32, 0x36, 0x37, 0x35, 0x37, + 0x37, 0x39, 0x3a, 0x3e, 0x3f, 0x3d, 0x3e, 0x3d, 0x3f, 0x40, 0x3d, 0x3b, + 0x35, 0x3e, 0x41, 0x41, 0x48, 0x4a, 0x46, 0x4a, 0x45, 0x41, 0x40, 0x45, + 0x45, 0x3e, 0x42, 0x4d, 0x48, 0x44, 0x43, 0x42, 0x43, 0x3f, 0x47, 0x47, + 0x46, 0x46, 0x47, 0x49, 0x4a, 0x4c, 0x47, 0x44, 0x4d, 0x50, 0x4e, 0x4d, + 0x4d, 0x48, 0x44, 0x47, 0x45, 0x46, 0x45, 0x4c, 0x4f, 0x4b, 0x43, 0x4a, + 0x48, 0x42, 0x41, 0x4d, 0x4e, 0x4a, 0x4c, 0x5d, 0x60, 0x5f, 0x5b, 0x5b, + 0x5f, 0x63, 0x65, 0x63, 0x5e, 0x5f, 0x60, 0x5f, 0x61, 0x62, 0x63, 0x67, + 0x69, 0x68, 0x68, 0x67, 0x67, 0x6b, 0x6d, 0x6d, 0x6d, 0x6a, 0x69, 0x69, + 0x69, 0x68, 0x68, 0x69, 0x6a, 0x69, 0x69, 0x6b, 0x6b, 0x6c, 0x6c, 0x6c, + 0x6d, 0x6d, 0x6d, 0x6c, 0x6c, 0x6b, 0x6b, 0x6c, 0x6c, 0x6b, 0x6a, 0x69, + 0x65, 0x64, 0x62, 0x51, 0x1f, 0xee, 0xe4, 0xe2, 0xe4, 0xdf, 0xda, 0xe0, + 0xdb, 0xe4, 0xec, 0xe7, 0xe6, 0xdb, 0xdd, 0xd9, 0xde, 0xdc, 0xe1, 0xe7, + 0xe7, 0xd2, 0xd4, 0xde, 0xe1, 0xe1, 0xdf, 0xe1, 0xe7, 0xe5, 0xe4, 0xe5, + 0xe5, 0xed, 0xe3, 0xda, 0xdc, 0xe9, 0xde, 0xda, 0xe0, 0xd6, 0xd7, 0xe8, + 0xdb, 0xe7, 0xe4, 0xe4, 0xde, 0xe1, 0xe0, 0xe9, 0xe1, 0xe0, 0xdf, 0xe1, + 0xe5, 0xe1, 0xe1, 0xd8, 0xd4, 0xda, 0xe1, 0xe2, 0xe4, 0xed, 0xea, 0xe7, + 0xe2, 0xe7, 0xec, 0xe4, 0xd9, 0xe0, 0xe7, 0xd7, 0xe3, 0xdb, 0xdb, 0xd1, + 0xd0, 0xd1, 0xd6, 0xe7, 0xe2, 0xdb, 0xdb, 0xe1, 0x2f, 0x2c, 0x2d, 0x2b, + 0x2b, 0x31, 0x31, 0x32, 0x35, 0x34, 0x34, 0x36, 0x37, 0x37, 0x3b, 0x3e, + 0x3c, 0x3b, 0x38, 0x3d, 0x3f, 0x3f, 0x3a, 0x37, 0x31, 0x3e, 0x3e, 0x3f, + 0x49, 0x48, 0x44, 0x4a, 0x44, 0x3e, 0x42, 0x47, 0x43, 0x3e, 0x48, 0x4b, + 0x45, 0x44, 0x41, 0x3e, 0x3e, 0x3f, 0x46, 0x44, 0x44, 0x44, 0x44, 0x4a, + 0x4a, 0x48, 0x47, 0x48, 0x4f, 0x4c, 0x49, 0x4b, 0x4a, 0x48, 0x47, 0x46, + 0x47, 0x48, 0x4a, 0x4f, 0x4b, 0x45, 0x44, 0x48, 0x45, 0x3a, 0x44, 0x50, + 0x4e, 0x4a, 0x51, 0x5d, 0x61, 0x5f, 0x5b, 0x5d, 0x5e, 0x62, 0x64, 0x63, + 0x60, 0x60, 0x5e, 0x5d, 0x60, 0x62, 0x64, 0x69, 0x69, 0x68, 0x68, 0x67, + 0x67, 0x6a, 0x6c, 0x6d, 0x6c, 0x69, 0x68, 0x69, 0x69, 0x69, 0x6a, 0x69, + 0x69, 0x6a, 0x6b, 0x6b, 0x6b, 0x6b, 0x6b, 0x6c, 0x6d, 0x6d, 0x6d, 0x6d, + 0x6d, 0x6d, 0x6c, 0x6d, 0x6b, 0x6a, 0x67, 0x64, 0x63, 0x60, 0x5b, 0x57, + 0x35, 0x08, 0xef, 0xe4, 0xde, 0xd4, 0xd7, 0xe2, 0xe1, 0xe4, 0xe5, 0xe6, + 0xe7, 0xdc, 0xde, 0xda, 0xdb, 0xe3, 0xe7, 0xe4, 0xe6, 0xe0, 0xe0, 0xdd, + 0xdf, 0xe6, 0xe5, 0xde, 0xe6, 0xeb, 0xeb, 0xe7, 0xe5, 0xf3, 0xe2, 0xd9, + 0xdd, 0xe3, 0xe5, 0xe7, 0xe2, 0xd4, 0xdb, 0xec, 0xe3, 0xe5, 0xe3, 0xe5, + 0xe1, 0xdf, 0xdf, 0xe8, 0xe6, 0xe1, 0xe6, 0xf7, 0xe6, 0xdf, 0xe6, 0xd4, + 0xd0, 0xdd, 0xe5, 0xe3, 0xe1, 0xf0, 0xec, 0xe6, 0xe5, 0xe7, 0xe5, 0xd9, + 0xda, 0xe2, 0xe1, 0xda, 0xe0, 0xdb, 0xe6, 0xd6, 0xd8, 0xca, 0xcd, 0xde, + 0xe7, 0xe1, 0xdc, 0xe0, 0x29, 0x25, 0x29, 0x2b, 0x2b, 0x2f, 0x31, 0x33, + 0x32, 0x32, 0x35, 0x37, 0x36, 0x39, 0x3e, 0x3d, 0x3a, 0x38, 0x39, 0x3e, + 0x3e, 0x3d, 0x3a, 0x32, 0x32, 0x3d, 0x3e, 0x44, 0x49, 0x46, 0x44, 0x47, + 0x42, 0x40, 0x42, 0x46, 0x40, 0x3f, 0x49, 0x48, 0x44, 0x42, 0x3e, 0x3e, + 0x3f, 0x40, 0x43, 0x3e, 0x40, 0x3f, 0x42, 0x4a, 0x49, 0x45, 0x44, 0x4b, + 0x4e, 0x49, 0x49, 0x4c, 0x4a, 0x47, 0x46, 0x48, 0x49, 0x47, 0x4d, 0x4a, + 0x44, 0x42, 0x41, 0x47, 0x43, 0x40, 0x4a, 0x50, 0x4d, 0x4e, 0x57, 0x5c, + 0x61, 0x5f, 0x5c, 0x5c, 0x5d, 0x62, 0x63, 0x63, 0x62, 0x62, 0x5f, 0x5d, + 0x60, 0x62, 0x65, 0x69, 0x6a, 0x69, 0x68, 0x68, 0x68, 0x69, 0x6b, 0x6d, + 0x6c, 0x69, 0x68, 0x69, 0x69, 0x68, 0x69, 0x6b, 0x6a, 0x69, 0x69, 0x6b, + 0x6c, 0x6b, 0x6b, 0x6c, 0x6d, 0x6d, 0x6d, 0x6d, 0x6e, 0x6d, 0x6c, 0x6b, + 0x6a, 0x68, 0x65, 0x63, 0x60, 0x5e, 0x58, 0x55, 0x4b, 0x26, 0xf7, 0xe1, + 0xe1, 0xda, 0xd8, 0xe3, 0xe5, 0xf1, 0xee, 0xe5, 0xe0, 0xd9, 0xdc, 0xdd, + 0xde, 0xe1, 0xed, 0xea, 0xe5, 0xee, 0xe8, 0xe1, 0xe2, 0xe5, 0xe3, 0xe1, + 0xe4, 0xe7, 0xe8, 0xda, 0xdc, 0xf1, 0xe4, 0xdc, 0xdf, 0xda, 0xe3, 0xe8, + 0xe4, 0xd7, 0xe3, 0xe9, 0xd9, 0xe2, 0xe6, 0xe2, 0xdf, 0xe1, 0xda, 0xe3, + 0xe5, 0xdf, 0xe5, 0xeb, 0xd9, 0xdf, 0xf1, 0xda, 0xd4, 0xdd, 0xe5, 0xdf, + 0xdd, 0xe6, 0xdd, 0xe1, 0xeb, 0xe7, 0xe1, 0xde, 0xe5, 0xe7, 0xe5, 0xd6, + 0xe2, 0xde, 0xe3, 0xdd, 0xda, 0xd2, 0xd4, 0xdb, 0xe2, 0xe4, 0xe4, 0xe5, + 0x22, 0x26, 0x2c, 0x2d, 0x2d, 0x30, 0x30, 0x30, 0x2e, 0x31, 0x34, 0x31, + 0x32, 0x37, 0x3b, 0x39, 0x38, 0x37, 0x3a, 0x3e, 0x3e, 0x3b, 0x36, 0x30, + 0x34, 0x3c, 0x3e, 0x44, 0x47, 0x44, 0x42, 0x43, 0x3e, 0x3f, 0x44, 0x44, + 0x40, 0x42, 0x46, 0x45, 0x41, 0x3d, 0x3b, 0x3e, 0x3e, 0x3e, 0x3e, 0x3b, + 0x3c, 0x3c, 0x44, 0x48, 0x47, 0x46, 0x46, 0x4b, 0x4b, 0x4a, 0x4a, 0x46, + 0x46, 0x4a, 0x49, 0x49, 0x47, 0x4a, 0x48, 0x42, 0x42, 0x3c, 0x3e, 0x44, + 0x45, 0x47, 0x4d, 0x50, 0x4e, 0x56, 0x5a, 0x5c, 0x62, 0x5b, 0x5a, 0x5d, + 0x5e, 0x63, 0x63, 0x63, 0x63, 0x64, 0x60, 0x5d, 0x5f, 0x63, 0x66, 0x6a, + 0x6a, 0x68, 0x68, 0x69, 0x66, 0x67, 0x6a, 0x6c, 0x6b, 0x69, 0x68, 0x69, + 0x6a, 0x68, 0x68, 0x69, 0x6b, 0x6a, 0x69, 0x69, 0x69, 0x6b, 0x6c, 0x6d, + 0x6d, 0x6d, 0x6e, 0x6e, 0x6d, 0x6b, 0x6c, 0x6c, 0x6b, 0x6a, 0x67, 0x64, + 0x5a, 0x57, 0x51, 0x49, 0x3c, 0x2b, 0x00, 0xde, 0xe7, 0xe4, 0xdc, 0xe0, + 0xeb, 0xe9, 0xe9, 0xe5, 0xd9, 0xd6, 0xe1, 0xe5, 0xeb, 0xe5, 0xeb, 0xf5, + 0xf4, 0xf2, 0xe9, 0xe2, 0xea, 0xf2, 0xf0, 0xe5, 0xe6, 0xe3, 0xe0, 0xe5, + 0xde, 0xdc, 0xdc, 0xde, 0xe2, 0xdd, 0xe0, 0xe2, 0xdb, 0xdd, 0xef, 0xea, + 0xd9, 0xe3, 0xe6, 0xe5, 0xe6, 0xde, 0xd8, 0xe3, 0xe5, 0xe1, 0xe1, 0xe4, + 0xda, 0xe1, 0xea, 0xde, 0xd7, 0xd8, 0xe1, 0xe3, 0xdf, 0xe9, 0xe1, 0xe6, + 0xf4, 0xee, 0xe5, 0xe0, 0xea, 0xe6, 0xe6, 0xdc, 0xe1, 0xe1, 0xd8, 0xd6, + 0xda, 0xdd, 0xd8, 0xdb, 0xe2, 0xeb, 0xe2, 0xe2, 0x27, 0x29, 0x2b, 0x2b, + 0x2b, 0x2e, 0x2e, 0x2e, 0x2c, 0x31, 0x31, 0x2c, 0x31, 0x37, 0x39, 0x38, + 0x38, 0x36, 0x39, 0x3e, 0x3b, 0x38, 0x31, 0x2c, 0x31, 0x3a, 0x3e, 0x44, + 0x44, 0x41, 0x44, 0x42, 0x3e, 0x3f, 0x43, 0x43, 0x42, 0x43, 0x41, 0x40, + 0x3e, 0x3c, 0x3a, 0x3d, 0x3c, 0x3c, 0x3d, 0x39, 0x35, 0x36, 0x40, 0x47, + 0x47, 0x48, 0x48, 0x48, 0x49, 0x48, 0x43, 0x43, 0x48, 0x4a, 0x4b, 0x47, + 0x44, 0x45, 0x43, 0x44, 0x3e, 0x38, 0x42, 0x48, 0x4a, 0x4a, 0x4f, 0x50, + 0x50, 0x57, 0x5a, 0x5e, 0x61, 0x57, 0x5c, 0x5e, 0x5f, 0x63, 0x63, 0x63, + 0x63, 0x65, 0x60, 0x5f, 0x5f, 0x63, 0x65, 0x6a, 0x6a, 0x67, 0x68, 0x69, + 0x66, 0x66, 0x69, 0x6a, 0x6b, 0x6a, 0x69, 0x69, 0x69, 0x69, 0x68, 0x68, + 0x69, 0x6a, 0x6b, 0x6a, 0x6a, 0x6b, 0x6a, 0x6c, 0x6d, 0x6d, 0x6e, 0x6e, + 0x6e, 0x6d, 0x6c, 0x6e, 0x6d, 0x6c, 0x6c, 0x69, 0x62, 0x5c, 0x4e, 0x44, + 0x33, 0x26, 0x0b, 0xe4, 0xe0, 0xe5, 0xe0, 0xe1, 0xe4, 0xe4, 0xe5, 0xe5, + 0xd6, 0xd4, 0xe2, 0xea, 0xec, 0xef, 0xf0, 0xed, 0xf9, 0xf4, 0xea, 0xe3, + 0xe9, 0xf1, 0xf5, 0xe5, 0xe7, 0xe1, 0xdc, 0xe3, 0xe3, 0xde, 0xd9, 0xd9, + 0xd9, 0xdc, 0xe5, 0xe7, 0xde, 0xdf, 0xe5, 0xde, 0xe1, 0xe6, 0xe1, 0xdf, + 0xe8, 0xe0, 0xdc, 0xec, 0xdf, 0xdf, 0xe5, 0xe4, 0xdf, 0xe7, 0xe6, 0xd9, + 0xd6, 0xd8, 0xdf, 0xe2, 0xde, 0xe0, 0xe4, 0xe7, 0xec, 0xee, 0xe7, 0xec, + 0xf4, 0xe4, 0xdf, 0xde, 0xe1, 0xe7, 0xd6, 0xd4, 0xda, 0xdc, 0xda, 0xed, + 0xe8, 0xe2, 0xe1, 0xe1, 0x29, 0x2b, 0x2b, 0x2b, 0x2b, 0x2c, 0x2d, 0x29, + 0x2b, 0x32, 0x30, 0x2e, 0x32, 0x38, 0x37, 0x37, 0x37, 0x37, 0x3c, 0x3c, + 0x3b, 0x35, 0x2d, 0x2b, 0x35, 0x3b, 0x3e, 0x41, 0x42, 0x40, 0x42, 0x3e, + 0x3f, 0x3e, 0x44, 0x42, 0x42, 0x39, 0x3d, 0x3d, 0x3f, 0x39, 0x3a, 0x3e, + 0x3d, 0x3b, 0x3d, 0x37, 0x31, 0x34, 0x40, 0x49, 0x48, 0x46, 0x48, 0x46, + 0x44, 0x43, 0x41, 0x44, 0x45, 0x4a, 0x4b, 0x43, 0x42, 0x42, 0x46, 0x43, + 0x37, 0x3e, 0x49, 0x4a, 0x4b, 0x4e, 0x50, 0x51, 0x57, 0x55, 0x59, 0x61, + 0x5d, 0x57, 0x5c, 0x60, 0x62, 0x63, 0x65, 0x61, 0x63, 0x65, 0x62, 0x62, + 0x60, 0x64, 0x65, 0x6a, 0x6b, 0x67, 0x68, 0x69, 0x67, 0x66, 0x69, 0x69, + 0x69, 0x69, 0x68, 0x68, 0x69, 0x69, 0x69, 0x68, 0x69, 0x69, 0x6a, 0x6a, + 0x6a, 0x6b, 0x6c, 0x6c, 0x6d, 0x6d, 0x6d, 0x6e, 0x6f, 0x6f, 0x6e, 0x6f, + 0x6e, 0x6d, 0x6c, 0x6b, 0x6b, 0x66, 0x5e, 0x55, 0x3e, 0x24, 0x05, 0xf5, + 0xee, 0xe3, 0xe7, 0xe1, 0xda, 0xea, 0xe7, 0xe1, 0xd4, 0xd5, 0xe2, 0xe9, + 0xe3, 0xe5, 0xe7, 0xe9, 0xe8, 0xe8, 0xe9, 0xf5, 0xe7, 0xe1, 0xe7, 0xe4, + 0xe6, 0xe3, 0xde, 0xda, 0xd9, 0xdb, 0xdb, 0xe0, 0xdc, 0xe3, 0xec, 0xe5, + 0xda, 0xd8, 0xdb, 0xd9, 0xe4, 0xe8, 0xe5, 0xe9, 0xed, 0xeb, 0xdf, 0xee, + 0xe0, 0xda, 0xe0, 0xe6, 0xdb, 0xda, 0xda, 0xe1, 0xe3, 0xdb, 0xeb, 0xe6, + 0xe6, 0xe9, 0xec, 0xed, 0xed, 0xe7, 0xe3, 0xed, 0xed, 0xdc, 0xda, 0xd8, + 0xe4, 0xe2, 0xd9, 0xd7, 0xd9, 0xd8, 0xdb, 0xe6, 0xe1, 0xd5, 0xe4, 0xe2, + 0x27, 0x2a, 0x2c, 0x2b, 0x2b, 0x2a, 0x2a, 0x25, 0x2b, 0x2e, 0x2c, 0x2e, + 0x34, 0x38, 0x36, 0x34, 0x37, 0x37, 0x3b, 0x3b, 0x39, 0x33, 0x2a, 0x2a, + 0x38, 0x3b, 0x3e, 0x3f, 0x3f, 0x3e, 0x40, 0x3d, 0x3f, 0x3e, 0x41, 0x3e, + 0x37, 0x37, 0x3c, 0x3c, 0x3e, 0x37, 0x3a, 0x3c, 0x39, 0x3c, 0x3d, 0x37, + 0x32, 0x38, 0x44, 0x4a, 0x48, 0x46, 0x4a, 0x44, 0x3f, 0x3f, 0x40, 0x44, + 0x4b, 0x4d, 0x4a, 0x44, 0x41, 0x46, 0x47, 0x3a, 0x38, 0x44, 0x4a, 0x4a, + 0x4b, 0x50, 0x54, 0x56, 0x57, 0x54, 0x5c, 0x60, 0x5a, 0x58, 0x5d, 0x62, + 0x63, 0x63, 0x65, 0x62, 0x63, 0x64, 0x63, 0x63, 0x61, 0x64, 0x64, 0x69, + 0x69, 0x66, 0x66, 0x68, 0x67, 0x66, 0x68, 0x68, 0x69, 0x69, 0x67, 0x67, + 0x69, 0x69, 0x69, 0x69, 0x69, 0x69, 0x6a, 0x6a, 0x6b, 0x6b, 0x6c, 0x6d, + 0x6d, 0x6c, 0x6d, 0x6e, 0x6e, 0x6f, 0x6e, 0x6e, 0x6e, 0x6e, 0x6e, 0x6c, + 0x6b, 0x69, 0x66, 0x62, 0x50, 0x34, 0xff, 0x05, 0x06, 0xe1, 0xe6, 0xde, + 0xdf, 0xeb, 0xe1, 0xdf, 0xd6, 0xda, 0xdb, 0xe1, 0xe1, 0xdf, 0xe2, 0xe7, + 0xe7, 0xe7, 0xed, 0xe8, 0xdc, 0xd4, 0xe0, 0xe2, 0xe6, 0xe7, 0xed, 0xe4, + 0xdf, 0xe0, 0xdc, 0xe6, 0xe0, 0xde, 0xe0, 0xdb, 0xd6, 0xdd, 0xdd, 0xdb, + 0xe3, 0xe2, 0xe5, 0xe3, 0xe7, 0xe9, 0xe6, 0xeb, 0xe6, 0xd4, 0xde, 0xe0, + 0xe1, 0xe1, 0xe6, 0xf0, 0xed, 0xea, 0xf1, 0xee, 0xe3, 0xe5, 0xed, 0xed, + 0xf2, 0xe7, 0xdf, 0xe8, 0xdc, 0xe0, 0xe4, 0xd9, 0xe1, 0xe2, 0xe6, 0xe1, + 0xd8, 0xd8, 0xda, 0xdd, 0xdd, 0xd1, 0xe8, 0xe4, 0x27, 0x29, 0x2b, 0x29, + 0x29, 0x27, 0x27, 0x27, 0x2b, 0x2b, 0x2d, 0x2c, 0x31, 0x36, 0x31, 0x32, + 0x36, 0x37, 0x3a, 0x39, 0x36, 0x30, 0x2a, 0x2b, 0x37, 0x3b, 0x3e, 0x3b, + 0x3d, 0x40, 0x3f, 0x3b, 0x3e, 0x3b, 0x3c, 0x36, 0x32, 0x37, 0x3b, 0x3a, + 0x3a, 0x37, 0x39, 0x39, 0x39, 0x3c, 0x3c, 0x37, 0x38, 0x40, 0x47, 0x4a, + 0x44, 0x49, 0x45, 0x3e, 0x3d, 0x3e, 0x42, 0x4a, 0x4f, 0x4d, 0x48, 0x43, + 0x43, 0x49, 0x3e, 0x38, 0x3b, 0x44, 0x4a, 0x4d, 0x4f, 0x54, 0x57, 0x56, + 0x54, 0x57, 0x5f, 0x5d, 0x56, 0x58, 0x5f, 0x63, 0x63, 0x63, 0x63, 0x62, + 0x63, 0x65, 0x63, 0x63, 0x62, 0x65, 0x64, 0x68, 0x68, 0x65, 0x65, 0x68, + 0x67, 0x66, 0x68, 0x68, 0x67, 0x67, 0x65, 0x68, 0x68, 0x69, 0x69, 0x68, + 0x69, 0x69, 0x69, 0x6b, 0x6c, 0x6c, 0x6c, 0x6c, 0x6d, 0x6c, 0x6d, 0x6e, + 0x6f, 0x6f, 0x6d, 0x6e, 0x6f, 0x6f, 0x6e, 0x6e, 0x6e, 0x6c, 0x69, 0x64, + 0x61, 0x51, 0x31, 0x09, 0xf3, 0xef, 0xeb, 0xe2, 0xe4, 0xe1, 0xd9, 0xde, + 0xda, 0xda, 0xdb, 0xe0, 0xde, 0xd9, 0xdd, 0xe1, 0xe7, 0xe1, 0xe0, 0xe1, + 0xe2, 0xd5, 0xe0, 0xe1, 0xe7, 0xe4, 0xe5, 0xe3, 0xe5, 0xe0, 0xda, 0xda, + 0xdc, 0xdf, 0xd9, 0xd8, 0xdd, 0xe6, 0xde, 0xdf, 0xdd, 0xdf, 0xf3, 0xe9, + 0xe1, 0xe2, 0xe2, 0xe6, 0xed, 0xdd, 0xda, 0xda, 0xe3, 0xe5, 0xe5, 0xea, + 0xef, 0xe7, 0xea, 0xe4, 0xe1, 0xe0, 0xec, 0xdd, 0xeb, 0xe1, 0xd8, 0xe3, + 0xed, 0xd9, 0xe0, 0xd7, 0xe1, 0xe7, 0xec, 0xe3, 0xd0, 0xd3, 0xd9, 0xdf, + 0xe5, 0xd2, 0xe7, 0xe7, 0x24, 0x27, 0x27, 0x25, 0x27, 0x26, 0x25, 0x26, + 0x28, 0x2b, 0x2b, 0x2b, 0x32, 0x33, 0x30, 0x31, 0x33, 0x37, 0x38, 0x37, + 0x32, 0x2c, 0x26, 0x30, 0x37, 0x38, 0x3c, 0x38, 0x3e, 0x3e, 0x3b, 0x37, + 0x38, 0x36, 0x37, 0x35, 0x2f, 0x35, 0x38, 0x39, 0x39, 0x37, 0x37, 0x38, + 0x38, 0x3b, 0x3d, 0x3b, 0x3e, 0x45, 0x49, 0x48, 0x44, 0x45, 0x3f, 0x3c, + 0x3d, 0x3f, 0x4a, 0x4c, 0x4d, 0x4d, 0x48, 0x44, 0x44, 0x3f, 0x38, 0x38, + 0x3e, 0x4a, 0x4e, 0x55, 0x51, 0x56, 0x57, 0x51, 0x54, 0x59, 0x5d, 0x59, + 0x56, 0x5c, 0x5f, 0x62, 0x63, 0x61, 0x61, 0x62, 0x64, 0x64, 0x63, 0x64, + 0x64, 0x65, 0x65, 0x68, 0x68, 0x64, 0x66, 0x66, 0x66, 0x67, 0x69, 0x66, + 0x67, 0x66, 0x66, 0x68, 0x69, 0x69, 0x69, 0x69, 0x69, 0x69, 0x69, 0x6a, + 0x6b, 0x6c, 0x6c, 0x6d, 0x6d, 0x6e, 0x6e, 0x6e, 0x6e, 0x6e, 0x6e, 0x6f, + 0x6e, 0x6f, 0x6d, 0x6e, 0x6e, 0x6e, 0x6c, 0x69, 0x63, 0x5a, 0x42, 0x15, + 0xf3, 0xe7, 0xe7, 0xe7, 0xe4, 0xd9, 0xd8, 0xdb, 0xd8, 0xd6, 0xdf, 0xe1, + 0xda, 0xd9, 0xda, 0xdb, 0xe1, 0xe1, 0xe4, 0xe2, 0xdd, 0xd6, 0xe5, 0xe4, + 0xe4, 0xdf, 0xdf, 0xe1, 0xe0, 0xe1, 0xda, 0xd4, 0xcf, 0xde, 0xe7, 0xe7, + 0xde, 0xe0, 0xdd, 0xe0, 0xde, 0xdd, 0xe7, 0xdc, 0xdb, 0xe7, 0xe2, 0xe0, + 0xe9, 0xec, 0xe1, 0xe4, 0xe4, 0xe2, 0xe7, 0xed, 0xef, 0xe5, 0xe3, 0xdd, + 0xd8, 0xe2, 0xe7, 0xde, 0xe8, 0xeb, 0xf2, 0xed, 0xea, 0xdf, 0xd7, 0xe1, + 0xdf, 0xd6, 0xde, 0xe4, 0xdc, 0xcd, 0xd8, 0xe1, 0xea, 0xda, 0xea, 0xe9, + 0x22, 0x24, 0x25, 0x27, 0x26, 0x24, 0x22, 0x24, 0x28, 0x29, 0x2b, 0x2b, + 0x34, 0x2f, 0x2a, 0x2e, 0x32, 0x34, 0x38, 0x33, 0x2e, 0x2a, 0x25, 0x31, + 0x35, 0x37, 0x38, 0x37, 0x3c, 0x3a, 0x37, 0x33, 0x33, 0x34, 0x37, 0x31, + 0x30, 0x35, 0x39, 0x3c, 0x3c, 0x37, 0x34, 0x35, 0x37, 0x3d, 0x41, 0x42, + 0x44, 0x46, 0x48, 0x43, 0x3e, 0x3f, 0x3a, 0x3a, 0x3e, 0x48, 0x4a, 0x49, + 0x4b, 0x4d, 0x46, 0x41, 0x3e, 0x3b, 0x3c, 0x3d, 0x45, 0x4c, 0x50, 0x53, + 0x51, 0x52, 0x53, 0x55, 0x57, 0x5a, 0x5c, 0x57, 0x56, 0x5d, 0x5e, 0x63, + 0x62, 0x5e, 0x5e, 0x62, 0x65, 0x63, 0x63, 0x64, 0x66, 0x66, 0x64, 0x66, + 0x66, 0x63, 0x63, 0x63, 0x65, 0x66, 0x68, 0x65, 0x67, 0x64, 0x67, 0x67, + 0x68, 0x69, 0x6a, 0x69, 0x6a, 0x69, 0x69, 0x69, 0x6a, 0x6c, 0x6d, 0x6d, + 0x6c, 0x6d, 0x6d, 0x6f, 0x6f, 0x6e, 0x6e, 0x6f, 0x6e, 0x6d, 0x6d, 0x6d, + 0x6e, 0x6f, 0x6f, 0x69, 0x62, 0x5e, 0x54, 0x33, 0xf3, 0xe5, 0xe8, 0xe1, + 0xe4, 0xde, 0xdf, 0xe6, 0xd9, 0xdb, 0xe1, 0xdc, 0xd9, 0xd5, 0xd0, 0xd7, + 0xdb, 0xe1, 0xda, 0xee, 0xe7, 0xd4, 0xde, 0xe1, 0xe1, 0xda, 0xda, 0xdf, + 0xdc, 0xda, 0xd5, 0xd0, 0xcc, 0xe1, 0xeb, 0xe2, 0xdc, 0xde, 0xd9, 0xda, + 0xd7, 0xe2, 0xeb, 0xd8, 0xe0, 0xe8, 0xe4, 0xe1, 0xdf, 0xec, 0xee, 0xf3, + 0xe7, 0xe4, 0xea, 0xef, 0xdf, 0xdb, 0xe2, 0xe9, 0xd9, 0xdd, 0xe7, 0xe7, + 0xdf, 0xda, 0xec, 0xed, 0xe5, 0xde, 0xdb, 0xe5, 0xe0, 0xd6, 0xd7, 0xdd, + 0xca, 0xc7, 0xdb, 0xe2, 0xeb, 0xe3, 0xd7, 0xd9, 0x22, 0x23, 0x25, 0x25, + 0x22, 0x22, 0x22, 0x25, 0x2a, 0x2b, 0x2b, 0x2d, 0x33, 0x2a, 0x28, 0x2d, + 0x31, 0x36, 0x33, 0x2e, 0x2b, 0x27, 0x28, 0x2d, 0x32, 0x33, 0x35, 0x37, + 0x39, 0x36, 0x33, 0x30, 0x30, 0x33, 0x36, 0x2f, 0x32, 0x37, 0x38, 0x39, + 0x3c, 0x38, 0x33, 0x34, 0x39, 0x3f, 0x44, 0x41, 0x43, 0x44, 0x47, 0x3e, + 0x3c, 0x3d, 0x3c, 0x3f, 0x46, 0x48, 0x45, 0x4a, 0x4d, 0x4d, 0x46, 0x3f, + 0x39, 0x3a, 0x3e, 0x44, 0x46, 0x4c, 0x4c, 0x4f, 0x4d, 0x50, 0x55, 0x57, + 0x58, 0x5c, 0x5c, 0x53, 0x57, 0x5e, 0x60, 0x63, 0x61, 0x5d, 0x5d, 0x62, + 0x65, 0x62, 0x63, 0x64, 0x65, 0x66, 0x63, 0x64, 0x65, 0x63, 0x63, 0x63, + 0x64, 0x67, 0x67, 0x64, 0x65, 0x64, 0x68, 0x66, 0x66, 0x69, 0x6b, 0x69, + 0x6a, 0x6a, 0x69, 0x69, 0x69, 0x6a, 0x6b, 0x6c, 0x6c, 0x6c, 0x6d, 0x6d, + 0x6d, 0x6d, 0x6e, 0x6e, 0x6c, 0x6c, 0x6e, 0x6e, 0x6f, 0x6f, 0x6f, 0x6d, + 0x69, 0x5f, 0x45, 0x34, 0x0b, 0xfc, 0xf2, 0xec, 0xe6, 0xf0, 0xde, 0xe7, + 0xde, 0xdf, 0xe0, 0xd7, 0xd2, 0xce, 0xd2, 0xdd, 0xd4, 0xdc, 0xda, 0xdd, + 0xe4, 0xd5, 0xda, 0xde, 0xde, 0xda, 0xdb, 0xe2, 0xe0, 0xd8, 0xd6, 0xcc, + 0xcf, 0xe3, 0xeb, 0xe1, 0xe6, 0xea, 0xda, 0xd9, 0xd4, 0xe7, 0xeb, 0xda, + 0xe4, 0xe6, 0xe1, 0xdf, 0xe0, 0xe8, 0xe3, 0xe9, 0xe7, 0xe7, 0xe1, 0xe6, + 0xe4, 0xe2, 0xe0, 0xe1, 0xda, 0xda, 0xe3, 0xe7, 0xe1, 0xc1, 0xbf, 0xcb, + 0xe0, 0xe4, 0xed, 0xe6, 0xe2, 0xde, 0xd0, 0xda, 0xc4, 0xce, 0xe1, 0xe1, + 0xe8, 0xe1, 0xd2, 0xdf, 0x1f, 0x20, 0x24, 0x20, 0x1e, 0x1f, 0x25, 0x24, + 0x29, 0x2a, 0x2a, 0x2b, 0x2f, 0x27, 0x2a, 0x2b, 0x30, 0x34, 0x2b, 0x2b, + 0x27, 0x24, 0x26, 0x2b, 0x2c, 0x31, 0x34, 0x38, 0x35, 0x34, 0x32, 0x30, + 0x2f, 0x31, 0x31, 0x31, 0x34, 0x36, 0x37, 0x37, 0x3d, 0x3d, 0x37, 0x38, + 0x3e, 0x42, 0x41, 0x42, 0x42, 0x43, 0x40, 0x3e, 0x3c, 0x3b, 0x3e, 0x44, + 0x44, 0x46, 0x48, 0x4c, 0x4f, 0x4a, 0x42, 0x38, 0x34, 0x39, 0x3f, 0x40, + 0x44, 0x4b, 0x4a, 0x4a, 0x4a, 0x52, 0x57, 0x58, 0x5b, 0x5d, 0x5a, 0x50, + 0x57, 0x5f, 0x61, 0x62, 0x60, 0x5d, 0x5b, 0x61, 0x64, 0x61, 0x63, 0x63, + 0x65, 0x65, 0x62, 0x63, 0x63, 0x60, 0x63, 0x63, 0x63, 0x64, 0x64, 0x63, + 0x63, 0x65, 0x67, 0x67, 0x66, 0x69, 0x6a, 0x69, 0x6a, 0x6b, 0x6b, 0x6a, + 0x6a, 0x6a, 0x6a, 0x6a, 0x6c, 0x6d, 0x6d, 0x6d, 0x6d, 0x6c, 0x6d, 0x6d, + 0x6e, 0x6d, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6e, 0x6c, 0x64, 0x50, 0x37, + 0x30, 0x19, 0x0e, 0xf3, 0xe5, 0xf5, 0xed, 0xdd, 0xe3, 0xe1, 0xdf, 0xd2, + 0xce, 0xd0, 0xd1, 0xd7, 0xca, 0xd8, 0xdc, 0xdb, 0xd3, 0xda, 0xde, 0xdf, + 0xdc, 0xda, 0xde, 0xe1, 0xde, 0xd7, 0xdd, 0xd6, 0xd4, 0xe4, 0xe3, 0xdd, + 0xe0, 0xe9, 0xe8, 0xdc, 0xd8, 0xe4, 0xe3, 0xdc, 0xdf, 0xde, 0xe1, 0xde, + 0xe1, 0xe1, 0xdc, 0xe7, 0xef, 0xe1, 0xdb, 0xe4, 0xe6, 0xe2, 0xe1, 0xd2, + 0xd8, 0xda, 0xe3, 0xe7, 0xe1, 0xc4, 0xb8, 0xbf, 0xda, 0xe7, 0xe7, 0xe1, + 0xde, 0xdd, 0xcb, 0xda, 0xc8, 0xda, 0xe4, 0xda, 0xe7, 0xe2, 0xd4, 0xdf, + 0x1d, 0x20, 0x1f, 0x1e, 0x1c, 0x1f, 0x25, 0x25, 0x2b, 0x28, 0x25, 0x29, + 0x2a, 0x25, 0x2b, 0x2b, 0x2d, 0x30, 0x2b, 0x29, 0x24, 0x24, 0x26, 0x2a, + 0x2b, 0x30, 0x32, 0x35, 0x32, 0x32, 0x33, 0x31, 0x2e, 0x2c, 0x31, 0x34, + 0x32, 0x35, 0x36, 0x39, 0x3f, 0x3d, 0x3a, 0x3b, 0x40, 0x3f, 0x40, 0x43, + 0x41, 0x3a, 0x3f, 0x3f, 0x3c, 0x3e, 0x40, 0x43, 0x43, 0x47, 0x49, 0x4c, + 0x4e, 0x45, 0x3a, 0x31, 0x30, 0x39, 0x3f, 0x42, 0x48, 0x4a, 0x47, 0x44, + 0x4d, 0x54, 0x58, 0x5a, 0x5d, 0x5d, 0x57, 0x50, 0x5c, 0x60, 0x61, 0x5f, + 0x5d, 0x5c, 0x5a, 0x5f, 0x62, 0x60, 0x63, 0x63, 0x65, 0x63, 0x62, 0x63, + 0x61, 0x5f, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x66, 0x64, + 0x67, 0x69, 0x6a, 0x69, 0x6a, 0x6b, 0x6b, 0x6c, 0x6c, 0x6d, 0x6b, 0x6b, + 0x6c, 0x6c, 0x6d, 0x6d, 0x6d, 0x6c, 0x6c, 0x6d, 0x6d, 0x6c, 0x6e, 0x6f, + 0x6f, 0x6f, 0x6f, 0x6f, 0x6e, 0x6b, 0x67, 0x5d, 0x49, 0x2a, 0xff, 0xde, + 0xe1, 0xe7, 0xed, 0xde, 0xdf, 0xe7, 0xe7, 0xd3, 0xd0, 0xd2, 0xce, 0xc9, + 0xd0, 0xe7, 0xd8, 0xd9, 0xd0, 0xd4, 0xde, 0xe1, 0xda, 0xd8, 0xdc, 0xda, + 0xe1, 0xd7, 0xe0, 0xe1, 0xdd, 0xe3, 0xdd, 0xd7, 0xda, 0xe5, 0xe9, 0xe0, + 0xe7, 0xea, 0xde, 0xd8, 0xd4, 0xd9, 0xe0, 0xdd, 0xe1, 0xe1, 0xe7, 0xe4, + 0xe7, 0xd9, 0xd5, 0xed, 0xe4, 0xd4, 0xdd, 0xd9, 0xd8, 0xdb, 0xe4, 0xe6, + 0xe1, 0xc8, 0xc4, 0xc1, 0xcb, 0xd8, 0xe2, 0xf1, 0xe1, 0xe3, 0xce, 0xcf, + 0xd6, 0xe4, 0xe4, 0xd7, 0xd4, 0xd8, 0xdb, 0xdb, 0x19, 0x1c, 0x1b, 0x1e, + 0x1f, 0x22, 0x24, 0x26, 0x29, 0x26, 0x24, 0x25, 0x25, 0x24, 0x28, 0x26, + 0x25, 0x29, 0x2d, 0x27, 0x21, 0x22, 0x24, 0x27, 0x2a, 0x2f, 0x31, 0x33, + 0x34, 0x30, 0x2e, 0x32, 0x2b, 0x2b, 0x31, 0x33, 0x32, 0x37, 0x35, 0x3a, + 0x3c, 0x3c, 0x3c, 0x3d, 0x3d, 0x3c, 0x41, 0x3f, 0x3a, 0x3c, 0x42, 0x43, + 0x3e, 0x3d, 0x44, 0x42, 0x45, 0x44, 0x48, 0x4b, 0x4d, 0x40, 0x34, 0x31, + 0x2e, 0x3c, 0x3e, 0x44, 0x4a, 0x4a, 0x47, 0x49, 0x51, 0x57, 0x5c, 0x5c, + 0x5d, 0x5c, 0x54, 0x54, 0x5d, 0x61, 0x5f, 0x5c, 0x5b, 0x5b, 0x57, 0x5d, + 0x5e, 0x5e, 0x60, 0x63, 0x64, 0x63, 0x61, 0x63, 0x5f, 0x5d, 0x61, 0x64, + 0x63, 0x62, 0x63, 0x64, 0x64, 0x63, 0x64, 0x62, 0x66, 0x69, 0x6a, 0x6a, + 0x6b, 0x6b, 0x6a, 0x6c, 0x6c, 0x6d, 0x6d, 0x6d, 0x6c, 0x6c, 0x6d, 0x6d, + 0x6d, 0x6e, 0x6e, 0x6e, 0x6e, 0x6e, 0x6e, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, + 0x6f, 0x6d, 0x67, 0x5c, 0x41, 0x1d, 0xf6, 0xe9, 0xe7, 0xe1, 0xec, 0xe5, + 0xe4, 0xe3, 0xe6, 0xd8, 0xd9, 0xd5, 0xcf, 0xc8, 0xdf, 0xef, 0xe3, 0xe3, + 0xd3, 0xd4, 0xdb, 0xe1, 0xe8, 0xe1, 0xdc, 0xd5, 0xf2, 0xe4, 0xda, 0xdb, + 0xe3, 0xe1, 0xd9, 0xd3, 0xda, 0xda, 0xdf, 0xe0, 0xe9, 0xea, 0xda, 0xd7, + 0xd7, 0xe5, 0xe7, 0xe6, 0xe3, 0xf1, 0xe4, 0xdc, 0xd9, 0xd2, 0xd4, 0xdb, + 0xe1, 0xd4, 0xd7, 0xdf, 0xdb, 0xda, 0xe1, 0xe2, 0xe1, 0xdb, 0xd9, 0xce, + 0xcb, 0xce, 0xdd, 0xec, 0xe4, 0xe2, 0xd3, 0xc6, 0xda, 0xe8, 0xe2, 0xdc, + 0xce, 0xd3, 0xd8, 0xd5, 0x14, 0x19, 0x1c, 0x1f, 0x20, 0x22, 0x20, 0x21, + 0x23, 0x21, 0x22, 0x23, 0x20, 0x21, 0x25, 0x22, 0x22, 0x26, 0x2c, 0x21, + 0x1f, 0x21, 0x21, 0x25, 0x2b, 0x31, 0x2b, 0x32, 0x31, 0x2a, 0x2d, 0x31, + 0x2c, 0x2e, 0x32, 0x33, 0x35, 0x37, 0x37, 0x39, 0x3d, 0x3d, 0x3d, 0x3e, + 0x3c, 0x3e, 0x3e, 0x3a, 0x3c, 0x3e, 0x41, 0x41, 0x3c, 0x42, 0x43, 0x3e, + 0x44, 0x45, 0x48, 0x48, 0x4b, 0x3a, 0x30, 0x35, 0x33, 0x3f, 0x43, 0x46, + 0x4c, 0x49, 0x49, 0x4e, 0x54, 0x5a, 0x5d, 0x5e, 0x5d, 0x58, 0x51, 0x57, + 0x5e, 0x5f, 0x5b, 0x57, 0x5b, 0x59, 0x57, 0x5b, 0x5d, 0x5d, 0x60, 0x62, + 0x63, 0x62, 0x5f, 0x62, 0x5e, 0x5c, 0x5d, 0x63, 0x62, 0x62, 0x63, 0x64, + 0x64, 0x63, 0x63, 0x63, 0x64, 0x68, 0x69, 0x69, 0x69, 0x69, 0x6b, 0x6c, + 0x6d, 0x6a, 0x6d, 0x6d, 0x6d, 0x6d, 0x6d, 0x6d, 0x6d, 0x6d, 0x6d, 0x6d, + 0x6d, 0x6e, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6e, 0x69, 0x5f, + 0x4b, 0x32, 0x1f, 0x11, 0xfb, 0xe0, 0xdc, 0xda, 0xe7, 0xe5, 0xe4, 0xdf, + 0xdb, 0xd4, 0xce, 0xcd, 0xd9, 0xe5, 0xeb, 0xec, 0xdf, 0xda, 0xdc, 0xda, + 0xe7, 0xe7, 0xdf, 0xdc, 0xe7, 0xe4, 0xd9, 0xe0, 0xdd, 0xdb, 0xd6, 0xd3, + 0xdd, 0xe3, 0xe1, 0xdb, 0xde, 0xdf, 0xe0, 0xde, 0xe4, 0xe6, 0xe0, 0xdf, + 0xe6, 0xf2, 0xe5, 0xe1, 0xdf, 0xd5, 0xd3, 0xd6, 0xda, 0xd5, 0xd9, 0xe6, + 0xdc, 0xe0, 0xe1, 0xe1, 0xde, 0xd0, 0xd3, 0xdb, 0xc8, 0xd1, 0xdb, 0xe2, + 0xec, 0xd9, 0xd8, 0xdd, 0xe5, 0xe5, 0xd9, 0xdd, 0xcf, 0xd3, 0xda, 0xe1, + 0x18, 0x19, 0x1d, 0x1e, 0x1e, 0x1c, 0x1e, 0x1d, 0x1c, 0x1f, 0x24, 0x1f, + 0x1d, 0x20, 0x23, 0x21, 0x23, 0x2b, 0x27, 0x1e, 0x1f, 0x22, 0x20, 0x22, + 0x2c, 0x2d, 0x28, 0x2f, 0x30, 0x29, 0x2d, 0x2f, 0x2f, 0x2f, 0x30, 0x32, + 0x34, 0x38, 0x36, 0x37, 0x3c, 0x3b, 0x3d, 0x3d, 0x3b, 0x3d, 0x37, 0x3a, + 0x3f, 0x40, 0x43, 0x42, 0x41, 0x44, 0x3e, 0x3e, 0x43, 0x49, 0x48, 0x43, + 0x48, 0x39, 0x2f, 0x33, 0x35, 0x43, 0x44, 0x48, 0x4c, 0x49, 0x4c, 0x51, + 0x56, 0x5b, 0x5c, 0x5e, 0x5b, 0x53, 0x50, 0x5b, 0x5e, 0x5c, 0x58, 0x57, + 0x5b, 0x57, 0x56, 0x58, 0x59, 0x5b, 0x60, 0x62, 0x63, 0x60, 0x5f, 0x5f, + 0x5f, 0x5c, 0x5d, 0x63, 0x60, 0x61, 0x62, 0x63, 0x66, 0x63, 0x63, 0x64, + 0x63, 0x67, 0x67, 0x68, 0x68, 0x69, 0x6b, 0x6c, 0x6d, 0x6b, 0x6a, 0x6b, + 0x6d, 0x6d, 0x6d, 0x6d, 0x6c, 0x6d, 0x6d, 0x6d, 0x6e, 0x6d, 0x6e, 0x6f, + 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6d, 0x69, 0x64, 0x59, 0x4d, 0x33, + 0x12, 0xf2, 0xe6, 0xe1, 0xdc, 0xe8, 0xe7, 0xe2, 0xd9, 0xd4, 0xc8, 0xce, + 0xd7, 0xd9, 0xdd, 0xe9, 0xe9, 0xe0, 0xdd, 0xd0, 0xda, 0xe2, 0xdd, 0xdf, + 0xe7, 0xe4, 0xdc, 0xe3, 0xe0, 0xd9, 0xd4, 0xda, 0xe2, 0xe7, 0xda, 0xd5, + 0xd9, 0xdd, 0xe3, 0xe0, 0xe7, 0xe7, 0xdd, 0xda, 0xda, 0xe6, 0xf0, 0xe1, + 0xf7, 0xf3, 0xd4, 0xd3, 0xd9, 0xdf, 0xe7, 0xdf, 0xd5, 0xde, 0xdd, 0xdc, + 0xd9, 0xc8, 0xcd, 0xd5, 0xd6, 0xdb, 0xdc, 0xd8, 0xec, 0xe2, 0xd7, 0xdf, + 0xe4, 0xe1, 0xe1, 0xf1, 0xe1, 0xd8, 0xf0, 0xf3, 0x16, 0x18, 0x1a, 0x18, + 0x16, 0x19, 0x1b, 0x1a, 0x19, 0x1b, 0x1f, 0x1e, 0x1e, 0x23, 0x22, 0x1f, + 0x25, 0x29, 0x25, 0x1f, 0x1f, 0x1e, 0x1d, 0x24, 0x2b, 0x28, 0x27, 0x2d, + 0x2f, 0x2b, 0x2c, 0x2e, 0x2f, 0x2d, 0x2f, 0x34, 0x36, 0x38, 0x33, 0x36, + 0x3b, 0x3e, 0x3e, 0x3d, 0x3c, 0x38, 0x3a, 0x3e, 0x3e, 0x40, 0x43, 0x45, + 0x43, 0x3e, 0x3d, 0x3e, 0x44, 0x49, 0x45, 0x3d, 0x3f, 0x39, 0x31, 0x33, + 0x37, 0x44, 0x47, 0x4a, 0x4c, 0x49, 0x4e, 0x54, 0x57, 0x59, 0x5c, 0x5b, + 0x54, 0x50, 0x52, 0x5c, 0x5c, 0x59, 0x57, 0x57, 0x5b, 0x56, 0x55, 0x57, + 0x57, 0x5b, 0x5f, 0x62, 0x63, 0x5e, 0x5d, 0x5d, 0x5f, 0x5d, 0x5d, 0x62, + 0x60, 0x62, 0x62, 0x63, 0x67, 0x64, 0x63, 0x63, 0x65, 0x66, 0x68, 0x67, + 0x69, 0x68, 0x69, 0x6b, 0x6d, 0x6c, 0x6b, 0x6b, 0x6b, 0x6c, 0x6d, 0x6c, + 0x6c, 0x6d, 0x6d, 0x6e, 0x6f, 0x6e, 0x6e, 0x6d, 0x6e, 0x6f, 0x6f, 0x6f, + 0x6f, 0x6f, 0x6d, 0x6a, 0x61, 0x56, 0x48, 0x31, 0x0e, 0xef, 0xda, 0xdf, + 0xda, 0xde, 0xe6, 0xe7, 0xde, 0xdc, 0xce, 0xdb, 0xe0, 0xde, 0xe0, 0xe1, + 0xe6, 0xe6, 0xe1, 0xcf, 0xd6, 0xea, 0xe1, 0xda, 0xe4, 0xe5, 0xdd, 0xdf, + 0xec, 0xe5, 0xe4, 0xe1, 0xe6, 0xda, 0xd4, 0xd7, 0xdf, 0xd8, 0xd9, 0xe1, + 0xe4, 0xe0, 0xd4, 0xd7, 0xd5, 0xdd, 0xf7, 0xef, 0xe7, 0xeb, 0xd9, 0xd6, + 0xed, 0xea, 0xe4, 0xda, 0xdd, 0xe3, 0xd9, 0xdc, 0xda, 0xda, 0xe3, 0xe2, + 0xda, 0xd7, 0xdc, 0xd0, 0xdf, 0xed, 0xe2, 0xe0, 0xf0, 0xdf, 0xe1, 0xea, + 0xe4, 0xe1, 0xf3, 0xfa, 0x12, 0x15, 0x12, 0x12, 0x13, 0x19, 0x17, 0x14, + 0x17, 0x1d, 0x1f, 0x1f, 0x20, 0x24, 0x23, 0x23, 0x25, 0x28, 0x21, 0x1e, + 0x1a, 0x17, 0x1c, 0x23, 0x2a, 0x26, 0x29, 0x2c, 0x2f, 0x2c, 0x2b, 0x2c, + 0x2b, 0x2c, 0x30, 0x33, 0x36, 0x37, 0x32, 0x36, 0x3c, 0x3e, 0x3e, 0x3b, + 0x37, 0x38, 0x3b, 0x3d, 0x3e, 0x43, 0x44, 0x44, 0x3d, 0x39, 0x3b, 0x3e, + 0x44, 0x48, 0x40, 0x3a, 0x36, 0x38, 0x33, 0x31, 0x3c, 0x45, 0x47, 0x49, + 0x4a, 0x4a, 0x50, 0x53, 0x57, 0x5a, 0x5a, 0x57, 0x50, 0x51, 0x56, 0x5a, + 0x57, 0x58, 0x58, 0x58, 0x59, 0x55, 0x52, 0x57, 0x58, 0x5b, 0x5e, 0x60, + 0x62, 0x5d, 0x5a, 0x5b, 0x5e, 0x5d, 0x5e, 0x61, 0x60, 0x63, 0x63, 0x63, + 0x66, 0x65, 0x64, 0x64, 0x66, 0x66, 0x67, 0x67, 0x68, 0x67, 0x67, 0x69, + 0x6c, 0x6c, 0x6c, 0x6b, 0x6a, 0x6b, 0x6c, 0x6b, 0x6b, 0x6c, 0x6d, 0x6d, + 0x6f, 0x6f, 0x6f, 0x6e, 0x6d, 0x6e, 0x6f, 0x6f, 0x6f, 0x6f, 0x6e, 0x6d, + 0x66, 0x56, 0x42, 0x2c, 0x09, 0xed, 0xdc, 0xdb, 0xe4, 0xe4, 0xe7, 0xe7, + 0xe1, 0xe1, 0xdd, 0xfe, 0xf9, 0xea, 0xdc, 0xd9, 0xda, 0xdd, 0xe0, 0xe1, + 0xe8, 0xf9, 0xe9, 0xe0, 0xe6, 0xe7, 0xdd, 0xda, 0xe1, 0xe2, 0xe7, 0xdb, + 0xda, 0xd7, 0xd2, 0xd6, 0xda, 0xd0, 0xd1, 0xe1, 0xe8, 0xe0, 0xda, 0xd7, + 0xd7, 0xe2, 0xf7, 0xfe, 0xf8, 0xeb, 0xe5, 0xe4, 0xf3, 0xe4, 0xe2, 0xe5, + 0xe7, 0xe1, 0xd4, 0xdc, 0xda, 0xeb, 0xf2, 0xe0, 0xd6, 0xdb, 0xda, 0xd4, + 0xd8, 0xef, 0xe1, 0xde, 0xf3, 0xde, 0xd6, 0xd8, 0xe1, 0xe8, 0xe7, 0xf9, + 0x0b, 0x0c, 0x0a, 0x0c, 0x14, 0x18, 0x16, 0x14, 0x18, 0x1f, 0x21, 0x1f, + 0x23, 0x24, 0x23, 0x21, 0x22, 0x21, 0x1a, 0x17, 0x12, 0x16, 0x1c, 0x1f, + 0x27, 0x27, 0x2a, 0x2c, 0x2e, 0x2b, 0x2b, 0x2c, 0x2b, 0x2e, 0x2f, 0x33, + 0x36, 0x34, 0x30, 0x36, 0x3a, 0x3a, 0x3a, 0x33, 0x34, 0x37, 0x3a, 0x3d, + 0x43, 0x45, 0x41, 0x3c, 0x3b, 0x39, 0x3e, 0x3e, 0x43, 0x47, 0x3b, 0x37, + 0x32, 0x37, 0x39, 0x37, 0x3e, 0x44, 0x48, 0x49, 0x49, 0x4b, 0x51, 0x55, + 0x58, 0x59, 0x56, 0x54, 0x4f, 0x52, 0x57, 0x58, 0x57, 0x58, 0x57, 0x58, + 0x59, 0x53, 0x53, 0x57, 0x57, 0x5a, 0x5d, 0x5e, 0x61, 0x5b, 0x5a, 0x5a, + 0x5d, 0x5d, 0x5e, 0x60, 0x5f, 0x62, 0x63, 0x61, 0x65, 0x64, 0x63, 0x66, + 0x66, 0x67, 0x64, 0x67, 0x69, 0x68, 0x66, 0x67, 0x6a, 0x6d, 0x6d, 0x6d, + 0x6c, 0x6b, 0x6c, 0x6c, 0x6c, 0x6a, 0x6c, 0x6e, 0x6f, 0x6f, 0x6f, 0x6e, + 0x6d, 0x6e, 0x6e, 0x6f, 0x6f, 0x6f, 0x6e, 0x6f, 0x6c, 0x65, 0x58, 0x46, + 0x25, 0x01, 0xed, 0xd4, 0xe1, 0xe4, 0xe5, 0xe4, 0xe5, 0xe6, 0xda, 0xf4, + 0x01, 0xf8, 0xe6, 0xe1, 0xe0, 0xe7, 0xde, 0xe1, 0xf5, 0xf3, 0xee, 0xe1, + 0xe4, 0xe7, 0xdd, 0xda, 0xd7, 0xd6, 0xdb, 0xd0, 0xd7, 0xda, 0xcf, 0xd6, + 0xd6, 0xca, 0xcd, 0xe4, 0xe8, 0xe7, 0xe5, 0xda, 0xda, 0xd9, 0xec, 0x07, + 0xff, 0xe9, 0xe3, 0xec, 0xef, 0xe2, 0xe6, 0xea, 0xe5, 0xe0, 0xda, 0xe1, + 0xdd, 0xec, 0xe1, 0xd7, 0xda, 0xdb, 0xd9, 0xdc, 0xe0, 0xe6, 0xda, 0xd8, + 0xee, 0xe0, 0xd8, 0xd8, 0xdd, 0xed, 0xe7, 0xe6, 0x01, 0x05, 0x08, 0x0f, + 0x17, 0x18, 0x16, 0x18, 0x1e, 0x24, 0x23, 0x23, 0x24, 0x1f, 0x1d, 0x1e, + 0x1f, 0x21, 0x15, 0x12, 0x12, 0x18, 0x1d, 0x20, 0x25, 0x28, 0x27, 0x2e, + 0x31, 0x2b, 0x29, 0x2b, 0x2c, 0x2c, 0x2e, 0x33, 0x37, 0x30, 0x30, 0x35, + 0x37, 0x37, 0x34, 0x31, 0x34, 0x37, 0x3c, 0x42, 0x44, 0x41, 0x38, 0x3a, + 0x3c, 0x3d, 0x3e, 0x3b, 0x42, 0x44, 0x31, 0x31, 0x32, 0x34, 0x39, 0x3e, + 0x3e, 0x41, 0x48, 0x4a, 0x48, 0x4c, 0x53, 0x56, 0x57, 0x55, 0x51, 0x52, + 0x51, 0x56, 0x58, 0x57, 0x57, 0x57, 0x57, 0x57, 0x58, 0x54, 0x56, 0x55, + 0x54, 0x5a, 0x5b, 0x5b, 0x5f, 0x5b, 0x5c, 0x59, 0x5c, 0x5d, 0x5e, 0x5f, + 0x61, 0x61, 0x62, 0x61, 0x63, 0x67, 0x63, 0x63, 0x66, 0x67, 0x66, 0x66, + 0x68, 0x69, 0x67, 0x66, 0x69, 0x69, 0x6a, 0x6e, 0x6d, 0x6c, 0x6b, 0x6c, + 0x6d, 0x6c, 0x6b, 0x6d, 0x6e, 0x6f, 0x6f, 0x6f, 0x6e, 0x6d, 0x6d, 0x6e, + 0x6e, 0x6f, 0x6f, 0x6f, 0x6f, 0x69, 0x5c, 0x4a, 0x27, 0xff, 0xed, 0xe1, + 0xdb, 0xd9, 0xe1, 0xe7, 0xec, 0xf3, 0xeb, 0xef, 0xfb, 0x01, 0xf7, 0xed, + 0xe7, 0xf3, 0xe7, 0xd7, 0xe5, 0xf0, 0xed, 0xec, 0xeb, 0xf1, 0xe5, 0xe4, + 0xda, 0xd4, 0xcf, 0xc7, 0xd4, 0xdc, 0xd1, 0xd5, 0xd1, 0xc7, 0xc8, 0xe7, + 0xf3, 0xdd, 0xe2, 0xe1, 0xda, 0xd2, 0xed, 0x00, 0xe7, 0xe2, 0xe4, 0xe4, + 0xe9, 0xee, 0xeb, 0xe8, 0xe1, 0xe5, 0xde, 0xe1, 0xe7, 0xe7, 0xe0, 0xdd, + 0xd4, 0xd7, 0xd0, 0xda, 0xd8, 0xd8, 0xda, 0xe1, 0xee, 0xe1, 0xe6, 0xda, + 0xd0, 0xe5, 0xf1, 0xe8, 0xfb, 0x03, 0x06, 0x0a, 0x11, 0x13, 0x18, 0x1a, + 0x1d, 0x23, 0x22, 0x1f, 0x1e, 0x1c, 0x1d, 0x21, 0x20, 0x1c, 0x13, 0x19, + 0x19, 0x1a, 0x1d, 0x20, 0x28, 0x2a, 0x25, 0x2e, 0x2c, 0x29, 0x28, 0x29, + 0x2b, 0x2b, 0x2e, 0x31, 0x32, 0x2e, 0x33, 0x36, 0x37, 0x36, 0x32, 0x30, + 0x31, 0x35, 0x3d, 0x40, 0x3b, 0x3b, 0x3b, 0x3d, 0x3a, 0x3d, 0x3d, 0x39, + 0x3d, 0x42, 0x28, 0x2b, 0x36, 0x30, 0x38, 0x41, 0x39, 0x42, 0x47, 0x4a, + 0x4a, 0x4e, 0x51, 0x53, 0x56, 0x51, 0x4f, 0x53, 0x55, 0x57, 0x58, 0x58, + 0x58, 0x58, 0x56, 0x56, 0x57, 0x52, 0x54, 0x52, 0x50, 0x58, 0x59, 0x59, + 0x5e, 0x5b, 0x5d, 0x5a, 0x5b, 0x5d, 0x60, 0x61, 0x61, 0x60, 0x61, 0x62, + 0x61, 0x66, 0x64, 0x63, 0x65, 0x65, 0x67, 0x69, 0x69, 0x68, 0x69, 0x66, + 0x68, 0x69, 0x69, 0x6d, 0x6d, 0x6d, 0x6c, 0x6b, 0x6c, 0x6d, 0x6d, 0x6c, + 0x6d, 0x6e, 0x6e, 0x6f, 0x6f, 0x6e, 0x6d, 0x6c, 0x6d, 0x6f, 0x6f, 0x6f, + 0x6f, 0x6d, 0x64, 0x55, 0x38, 0x1b, 0xfa, 0xdd, 0xd8, 0xe0, 0xe4, 0xe7, + 0xe7, 0xee, 0xea, 0xf6, 0xfe, 0x05, 0x02, 0xf9, 0xed, 0xdc, 0xd6, 0xda, + 0xe1, 0xef, 0xed, 0xed, 0xf5, 0xee, 0xee, 0xec, 0xe5, 0xe2, 0xce, 0xce, + 0xdb, 0xdd, 0xd8, 0xd6, 0xcf, 0xcc, 0xd2, 0xd8, 0xf7, 0xee, 0xe7, 0xe4, + 0xe5, 0xde, 0xe4, 0xeb, 0xe3, 0xe5, 0xe1, 0xe7, 0xe6, 0xee, 0xed, 0xe6, + 0xe2, 0xe5, 0xe7, 0xea, 0xe5, 0xe0, 0xe1, 0xe1, 0xd4, 0xdb, 0xdf, 0xd4, + 0xc7, 0xce, 0xdc, 0xe6, 0xe6, 0xe7, 0xde, 0xe6, 0xd3, 0xd4, 0xdf, 0xed, + 0xf3, 0xf9, 0xfe, 0x06, 0x0c, 0x0f, 0x14, 0x16, 0x1d, 0x1e, 0x1c, 0x1e, + 0x1d, 0x1b, 0x1f, 0x1f, 0x1b, 0x19, 0x1c, 0x1f, 0x1d, 0x1c, 0x20, 0x23, + 0x27, 0x26, 0x25, 0x2c, 0x2b, 0x29, 0x28, 0x29, 0x2a, 0x2c, 0x30, 0x31, + 0x30, 0x31, 0x33, 0x35, 0x36, 0x35, 0x2b, 0x27, 0x2c, 0x36, 0x3c, 0x3b, + 0x3a, 0x3f, 0x3f, 0x3c, 0x38, 0x3b, 0x35, 0x37, 0x37, 0x3f, 0x26, 0x2c, + 0x34, 0x31, 0x3d, 0x41, 0x3a, 0x44, 0x45, 0x47, 0x4c, 0x4f, 0x51, 0x52, + 0x53, 0x50, 0x4d, 0x51, 0x56, 0x57, 0x58, 0x58, 0x58, 0x59, 0x56, 0x57, + 0x56, 0x50, 0x51, 0x4f, 0x50, 0x57, 0x58, 0x58, 0x5d, 0x5b, 0x5c, 0x5a, + 0x5c, 0x5c, 0x61, 0x61, 0x61, 0x61, 0x60, 0x62, 0x61, 0x64, 0x65, 0x62, + 0x65, 0x64, 0x65, 0x69, 0x69, 0x67, 0x68, 0x67, 0x67, 0x69, 0x6a, 0x6b, + 0x6b, 0x6d, 0x6d, 0x6d, 0x6b, 0x6c, 0x6d, 0x6e, 0x6c, 0x6d, 0x6e, 0x6e, + 0x6e, 0x6e, 0x6c, 0x6d, 0x6e, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x69, 0x5f, + 0x4e, 0x33, 0x06, 0xe6, 0xd7, 0xd7, 0xe1, 0xe6, 0xe5, 0xe2, 0xd6, 0xe0, + 0xea, 0xf8, 0x03, 0xf7, 0x00, 0xee, 0xda, 0xe7, 0xe2, 0xe8, 0xf3, 0xe8, + 0xe5, 0xd8, 0xde, 0xdc, 0xe1, 0xea, 0xe1, 0xe1, 0xe4, 0xd9, 0xda, 0xdb, + 0xd2, 0xd2, 0xda, 0xcd, 0xdf, 0xf6, 0xed, 0xe0, 0xec, 0xed, 0xf9, 0xeb, + 0xf1, 0xeb, 0xe1, 0xe3, 0xe1, 0xe1, 0xea, 0xde, 0xde, 0xe9, 0xe3, 0xe4, + 0xe5, 0xdd, 0xd9, 0xdb, 0xda, 0xe5, 0xe0, 0xd8, 0xde, 0xe8, 0xe0, 0xe7, + 0xd8, 0xe7, 0xdf, 0xd5, 0xda, 0xc7, 0xc8, 0xe4, 0xe8, 0xf3, 0xf6, 0x00, + 0x05, 0x0b, 0x0d, 0x11, 0x19, 0x1a, 0x17, 0x18, 0x1b, 0x1c, 0x1b, 0x1c, + 0x18, 0x19, 0x1f, 0x22, 0x1f, 0x1e, 0x21, 0x24, 0x24, 0x26, 0x27, 0x2b, + 0x2a, 0x26, 0x28, 0x27, 0x2a, 0x2c, 0x2f, 0x2f, 0x30, 0x32, 0x34, 0x33, + 0x2f, 0x2b, 0x25, 0x2b, 0x34, 0x38, 0x3b, 0x3b, 0x3c, 0x41, 0x3d, 0x39, + 0x3a, 0x38, 0x30, 0x35, 0x34, 0x38, 0x2f, 0x2f, 0x34, 0x39, 0x3e, 0x40, + 0x3c, 0x43, 0x45, 0x44, 0x4c, 0x50, 0x50, 0x4f, 0x50, 0x4e, 0x4c, 0x50, + 0x56, 0x54, 0x58, 0x57, 0x57, 0x58, 0x55, 0x58, 0x54, 0x50, 0x50, 0x4c, + 0x4d, 0x56, 0x57, 0x59, 0x5a, 0x5c, 0x5d, 0x5a, 0x5d, 0x5d, 0x60, 0x61, + 0x61, 0x60, 0x5e, 0x60, 0x63, 0x63, 0x65, 0x63, 0x65, 0x64, 0x64, 0x69, + 0x69, 0x69, 0x68, 0x68, 0x66, 0x68, 0x69, 0x6a, 0x6a, 0x6c, 0x6b, 0x6d, + 0x6c, 0x6c, 0x6c, 0x6e, 0x6e, 0x6d, 0x6d, 0x6e, 0x6e, 0x6e, 0x6e, 0x6e, + 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6d, 0x69, 0x5f, 0x3e, 0x0a, 0xe5, + 0xe1, 0xd3, 0xd9, 0xda, 0xda, 0xce, 0xcd, 0xde, 0xe6, 0xe3, 0xf8, 0x03, + 0x0a, 0xfd, 0xf2, 0xf6, 0xed, 0xf1, 0xfc, 0xeb, 0xe1, 0xd0, 0xd9, 0xdc, + 0xd7, 0xe4, 0xed, 0xe6, 0xe2, 0xde, 0xe0, 0xe1, 0xd9, 0xdd, 0xd6, 0xe7, + 0xe9, 0xf2, 0xf3, 0xee, 0xee, 0xec, 0xf1, 0xe7, 0x00, 0xe7, 0xe9, 0xe6, + 0xdd, 0xda, 0xe3, 0xe2, 0xdf, 0xe9, 0xe3, 0xdb, 0xd4, 0xe0, 0xda, 0xd8, + 0xdc, 0xfc, 0xed, 0xe2, 0xe6, 0xe8, 0xe1, 0xe3, 0xd8, 0xe0, 0xe1, 0xd9, + 0xd6, 0xde, 0xce, 0xda, 0xdf, 0xed, 0xed, 0xf8, 0x01, 0x07, 0x05, 0x0c, + 0x0f, 0x11, 0x11, 0x13, 0x16, 0x17, 0x14, 0x17, 0x1b, 0x1e, 0x1e, 0x20, + 0x21, 0x21, 0x22, 0x23, 0x22, 0x26, 0x25, 0x27, 0x25, 0x26, 0x26, 0x27, + 0x2b, 0x2c, 0x2b, 0x2b, 0x30, 0x32, 0x2f, 0x2b, 0x27, 0x29, 0x2d, 0x33, + 0x37, 0x3a, 0x3b, 0x39, 0x40, 0x3e, 0x3b, 0x39, 0x39, 0x36, 0x30, 0x35, + 0x2d, 0x30, 0x36, 0x31, 0x35, 0x3b, 0x3e, 0x40, 0x3d, 0x3e, 0x42, 0x44, + 0x4c, 0x50, 0x4f, 0x4c, 0x4f, 0x4e, 0x4c, 0x50, 0x56, 0x53, 0x58, 0x57, + 0x56, 0x57, 0x57, 0x59, 0x54, 0x50, 0x4f, 0x4c, 0x4d, 0x53, 0x56, 0x58, + 0x57, 0x5b, 0x5e, 0x5a, 0x5b, 0x5e, 0x5e, 0x61, 0x5f, 0x5e, 0x5e, 0x5e, + 0x63, 0x63, 0x63, 0x65, 0x64, 0x63, 0x63, 0x66, 0x69, 0x68, 0x68, 0x69, + 0x68, 0x66, 0x68, 0x6a, 0x6a, 0x6b, 0x6a, 0x6b, 0x6d, 0x6c, 0x6c, 0x6d, + 0x6e, 0x6e, 0x6e, 0x6e, 0x6e, 0x6e, 0x6d, 0x6e, 0x6f, 0x6f, 0x6f, 0x6f, + 0x6f, 0x6f, 0x6f, 0x6a, 0x59, 0x33, 0x06, 0xe7, 0xef, 0xe1, 0xda, 0xdc, + 0xe1, 0xdb, 0xdc, 0xea, 0xde, 0xd7, 0xeb, 0xef, 0xf2, 0xf9, 0xf4, 0xf3, + 0xf4, 0xe9, 0xe6, 0xea, 0xe8, 0xd6, 0xd6, 0xe4, 0xe2, 0xda, 0xe0, 0xec, + 0xeb, 0xe7, 0xe2, 0xe1, 0xdc, 0xdb, 0xe7, 0xe7, 0xe3, 0xe9, 0xf3, 0xf9, + 0xf3, 0xf2, 0xe6, 0xe5, 0x00, 0xf2, 0xe4, 0xe7, 0xe1, 0xd7, 0xdc, 0xd3, + 0xe9, 0xe6, 0xe3, 0xe1, 0xdf, 0xe2, 0xd9, 0xd1, 0xda, 0xf8, 0xe9, 0xe5, + 0xe4, 0xe2, 0xe2, 0xe4, 0xe1, 0xdb, 0xde, 0xdd, 0xdb, 0xe3, 0xd5, 0xd2, + 0xd8, 0xe5, 0xe9, 0xef, 0xf2, 0xfa, 0xfc, 0x01, 0x05, 0x0a, 0x0b, 0x0c, + 0x0f, 0x0e, 0x0d, 0x16, 0x1a, 0x1c, 0x1f, 0x1f, 0x21, 0x21, 0x21, 0x21, + 0x22, 0x25, 0x23, 0x25, 0x25, 0x25, 0x22, 0x26, 0x2a, 0x2a, 0x29, 0x2c, + 0x2e, 0x2b, 0x26, 0x26, 0x28, 0x2e, 0x31, 0x37, 0x3a, 0x3c, 0x37, 0x37, + 0x40, 0x3e, 0x3c, 0x39, 0x37, 0x33, 0x35, 0x37, 0x2c, 0x2b, 0x37, 0x37, + 0x37, 0x3a, 0x3f, 0x40, 0x3e, 0x40, 0x40, 0x43, 0x4a, 0x4e, 0x4b, 0x48, + 0x4c, 0x4d, 0x4d, 0x4f, 0x52, 0x52, 0x58, 0x55, 0x57, 0x53, 0x57, 0x57, + 0x56, 0x4e, 0x4d, 0x4b, 0x4c, 0x53, 0x57, 0x58, 0x56, 0x57, 0x5d, 0x5c, + 0x5a, 0x5f, 0x5d, 0x5f, 0x5d, 0x5d, 0x5d, 0x5d, 0x62, 0x63, 0x5e, 0x63, + 0x62, 0x62, 0x62, 0x66, 0x69, 0x68, 0x68, 0x67, 0x68, 0x66, 0x66, 0x69, + 0x6a, 0x6a, 0x69, 0x69, 0x6c, 0x6c, 0x6b, 0x6c, 0x6c, 0x6e, 0x6e, 0x6e, + 0x6e, 0x6f, 0x6c, 0x6d, 0x6e, 0x6e, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6d, + 0x63, 0x4e, 0x37, 0x12, 0xf1, 0xfb, 0xe4, 0xdc, 0xde, 0xe6, 0xf1, 0xe7, + 0xec, 0xe0, 0xe7, 0xe7, 0xe9, 0xf1, 0xfb, 0xff, 0xfa, 0xee, 0xdf, 0xe2, + 0xef, 0xde, 0xcb, 0xd9, 0xf6, 0xf2, 0xde, 0xe5, 0xec, 0xf4, 0xe6, 0xdf, + 0xe0, 0xda, 0xe5, 0xe5, 0xe1, 0xdc, 0xe8, 0xf3, 0xef, 0xe9, 0xe5, 0xe2, + 0xf6, 0xf7, 0xdd, 0xde, 0xe7, 0xdf, 0xde, 0xcc, 0xe8, 0xe1, 0xe2, 0xe1, + 0xd8, 0xe9, 0xdd, 0xd4, 0xd6, 0xed, 0xeb, 0xe8, 0xe1, 0xe1, 0xe1, 0xe1, + 0xe3, 0xe0, 0xdc, 0xd8, 0xdd, 0xde, 0xd4, 0xd0, 0xd7, 0xdc, 0xe3, 0xe3, + 0xed, 0xed, 0xf2, 0xf3, 0xf4, 0xfb, 0xff, 0x00, 0xfe, 0xff, 0x06, 0x12, + 0x18, 0x1f, 0x1e, 0x18, 0x1d, 0x1e, 0x20, 0x22, 0x21, 0x23, 0x24, 0x25, + 0x21, 0x21, 0x23, 0x26, 0x25, 0x25, 0x29, 0x2b, 0x27, 0x27, 0x25, 0x26, + 0x29, 0x2f, 0x32, 0x37, 0x3b, 0x3a, 0x35, 0x38, 0x40, 0x3d, 0x3b, 0x38, + 0x35, 0x31, 0x38, 0x37, 0x2f, 0x25, 0x38, 0x3e, 0x38, 0x3a, 0x41, 0x3e, + 0x41, 0x44, 0x42, 0x41, 0x46, 0x4d, 0x4b, 0x47, 0x4a, 0x4e, 0x4e, 0x50, + 0x50, 0x52, 0x58, 0x54, 0x54, 0x53, 0x57, 0x55, 0x52, 0x4d, 0x4c, 0x4c, + 0x4c, 0x51, 0x56, 0x58, 0x57, 0x57, 0x5b, 0x5d, 0x5b, 0x60, 0x5d, 0x5f, + 0x5b, 0x5c, 0x5d, 0x5c, 0x5d, 0x61, 0x5c, 0x60, 0x62, 0x62, 0x62, 0x64, + 0x68, 0x67, 0x67, 0x66, 0x67, 0x66, 0x67, 0x69, 0x6a, 0x6a, 0x69, 0x69, + 0x69, 0x6b, 0x6b, 0x6b, 0x6b, 0x6d, 0x6d, 0x6e, 0x6f, 0x6f, 0x6e, 0x6d, + 0x6e, 0x6e, 0x6f, 0x6e, 0x6f, 0x6f, 0x6f, 0x6f, 0x6b, 0x61, 0x4b, 0x24, + 0x0c, 0x0e, 0xfa, 0xe4, 0xe0, 0xd9, 0xe8, 0xe1, 0xe9, 0xec, 0xe1, 0xe8, + 0xe9, 0xe9, 0xf3, 0x04, 0x0c, 0xfb, 0xdd, 0xd9, 0xec, 0xf0, 0xdc, 0xdf, + 0xf4, 0x02, 0xf6, 0xe7, 0xe0, 0xec, 0xe7, 0xdc, 0xe5, 0xdd, 0xda, 0xd9, + 0xdf, 0xe1, 0xe8, 0xf6, 0xf4, 0xe7, 0xed, 0xe6, 0xf8, 0x0c, 0xf3, 0xdc, + 0xde, 0xe9, 0xe9, 0xc6, 0xe8, 0xe8, 0xed, 0xdd, 0xd5, 0xe3, 0xe1, 0xd2, + 0xd4, 0xe9, 0xec, 0xe2, 0xe0, 0xda, 0xdc, 0xe0, 0xe5, 0xe6, 0xdd, 0xd6, + 0xd8, 0xe6, 0xdd, 0xd8, 0xd4, 0xd8, 0xe4, 0xde, 0xea, 0xe4, 0xdd, 0xe3, + 0xea, 0xf2, 0xf3, 0xed, 0xf2, 0xfb, 0x02, 0x15, 0x1e, 0x18, 0x17, 0x14, + 0x19, 0x1e, 0x20, 0x1f, 0x1e, 0x20, 0x24, 0x22, 0x1f, 0x21, 0x23, 0x24, + 0x23, 0x25, 0x2a, 0x26, 0x25, 0x27, 0x25, 0x25, 0x2b, 0x30, 0x33, 0x38, + 0x3b, 0x33, 0x34, 0x39, 0x3f, 0x3b, 0x35, 0x39, 0x33, 0x30, 0x39, 0x36, + 0x2e, 0x2b, 0x3d, 0x3f, 0x39, 0x3c, 0x3f, 0x3e, 0x40, 0x42, 0x44, 0x40, + 0x42, 0x4b, 0x4c, 0x48, 0x49, 0x4d, 0x4f, 0x4f, 0x4e, 0x53, 0x57, 0x52, + 0x51, 0x53, 0x58, 0x51, 0x4f, 0x4c, 0x4d, 0x4d, 0x4e, 0x53, 0x53, 0x58, + 0x5a, 0x57, 0x59, 0x5d, 0x5d, 0x60, 0x5d, 0x5d, 0x5b, 0x5b, 0x5d, 0x5d, + 0x5c, 0x5f, 0x5d, 0x5e, 0x60, 0x61, 0x63, 0x63, 0x66, 0x67, 0x67, 0x67, + 0x67, 0x65, 0x66, 0x67, 0x68, 0x6a, 0x69, 0x69, 0x6a, 0x6a, 0x6a, 0x6c, + 0x6c, 0x6c, 0x6d, 0x6c, 0x6e, 0x6f, 0x6e, 0x6d, 0x6e, 0x6f, 0x6e, 0x6e, + 0x6f, 0x6f, 0x6f, 0x6f, 0x6e, 0x64, 0x52, 0x35, 0x08, 0x11, 0x1b, 0x0b, + 0xf1, 0xe3, 0xf2, 0xeb, 0xe2, 0xe1, 0xe1, 0xdc, 0xe7, 0xf3, 0xf1, 0xf6, + 0x12, 0x06, 0xf9, 0xed, 0xe1, 0xec, 0xed, 0xd6, 0xe7, 0xfd, 0xf4, 0xf3, + 0xe6, 0xe7, 0xe5, 0xe0, 0xdc, 0xda, 0xd9, 0xd8, 0xe7, 0xe8, 0xf4, 0xf9, + 0xfc, 0xf4, 0xeb, 0xde, 0xf0, 0xf6, 0xed, 0xe2, 0xe7, 0xf1, 0xf4, 0xda, + 0xed, 0xf0, 0xec, 0xed, 0xe5, 0xe3, 0xe6, 0xe3, 0xed, 0xe6, 0xe3, 0xda, + 0xe1, 0xdb, 0xd2, 0xdc, 0xdf, 0xe1, 0xdf, 0xe0, 0xe0, 0xdc, 0xda, 0xd7, + 0xd6, 0xda, 0xe1, 0xdf, 0xe6, 0xe5, 0xdc, 0xd9, 0xdf, 0xe5, 0xe7, 0xe9, + 0xed, 0xf6, 0x0b, 0x19, 0x14, 0x0c, 0x0d, 0x12, 0x18, 0x1e, 0x1b, 0x1a, + 0x1e, 0x1d, 0x22, 0x1c, 0x1f, 0x22, 0x1f, 0x20, 0x22, 0x26, 0x27, 0x24, + 0x28, 0x27, 0x25, 0x26, 0x2c, 0x31, 0x34, 0x3c, 0x35, 0x33, 0x36, 0x38, + 0x3d, 0x37, 0x31, 0x3a, 0x33, 0x31, 0x38, 0x35, 0x2f, 0x37, 0x3e, 0x39, + 0x3d, 0x40, 0x41, 0x3f, 0x40, 0x3f, 0x41, 0x3e, 0x42, 0x47, 0x4e, 0x4a, + 0x47, 0x4b, 0x4e, 0x4e, 0x4f, 0x51, 0x54, 0x50, 0x50, 0x55, 0x57, 0x51, + 0x4a, 0x49, 0x4f, 0x4e, 0x50, 0x56, 0x55, 0x58, 0x5a, 0x58, 0x57, 0x5c, + 0x5d, 0x60, 0x5d, 0x5d, 0x5c, 0x5b, 0x5c, 0x5e, 0x5d, 0x5d, 0x5d, 0x5c, + 0x5f, 0x5f, 0x63, 0x63, 0x63, 0x66, 0x65, 0x67, 0x69, 0x66, 0x65, 0x65, + 0x66, 0x69, 0x69, 0x69, 0x69, 0x69, 0x69, 0x6b, 0x6c, 0x6c, 0x6d, 0x6c, + 0x6d, 0x6f, 0x6f, 0x6f, 0x6e, 0x6f, 0x6e, 0x6e, 0x6f, 0x6f, 0x6f, 0x6f, + 0x6f, 0x6c, 0x61, 0x3e, 0xfb, 0xf4, 0x0b, 0x1d, 0x10, 0xfd, 0xf8, 0xf1, + 0xdc, 0xd4, 0xe4, 0xe7, 0xe7, 0xe7, 0xe4, 0xf0, 0x0f, 0x08, 0x0b, 0x0c, + 0xe5, 0xdd, 0xf1, 0xf3, 0xeb, 0xfd, 0xf9, 0xf5, 0xe2, 0xe1, 0xe7, 0xe3, + 0xda, 0xd9, 0xcb, 0xcf, 0xe1, 0xe6, 0xf4, 0x03, 0xfc, 0xfb, 0xed, 0xd6, + 0xda, 0xe3, 0xda, 0xd5, 0xe0, 0xe9, 0xee, 0xe5, 0xf2, 0xfb, 0xec, 0xe9, + 0xe3, 0xe9, 0xf0, 0xee, 0xe8, 0xe4, 0xdc, 0xd9, 0xde, 0xd9, 0xd4, 0xeb, + 0xd9, 0xda, 0xe4, 0xf1, 0xdf, 0xc9, 0xd2, 0xda, 0xd4, 0xd0, 0xd5, 0xdb, + 0xdd, 0xdd, 0xda, 0xd0, 0xd4, 0xe0, 0xe1, 0xe5, 0xe7, 0xfd, 0x0c, 0x0a, + 0x0a, 0x0b, 0x11, 0x15, 0x1b, 0x18, 0x13, 0x1b, 0x19, 0x1d, 0x19, 0x19, + 0x1f, 0x1e, 0x1f, 0x1f, 0x23, 0x23, 0x22, 0x25, 0x27, 0x21, 0x23, 0x26, + 0x2b, 0x31, 0x33, 0x36, 0x2f, 0x32, 0x37, 0x39, 0x39, 0x34, 0x32, 0x39, + 0x37, 0x31, 0x39, 0x33, 0x35, 0x3e, 0x3a, 0x37, 0x37, 0x41, 0x43, 0x3e, + 0x40, 0x40, 0x3e, 0x3e, 0x3e, 0x44, 0x4a, 0x4c, 0x49, 0x49, 0x4e, 0x4b, + 0x4d, 0x51, 0x51, 0x50, 0x4f, 0x53, 0x52, 0x50, 0x4a, 0x4a, 0x4f, 0x4e, + 0x53, 0x57, 0x55, 0x58, 0x58, 0x58, 0x57, 0x5b, 0x5d, 0x60, 0x5e, 0x5d, + 0x5c, 0x5d, 0x5c, 0x5e, 0x5d, 0x5d, 0x5d, 0x5b, 0x5e, 0x5d, 0x5f, 0x63, + 0x63, 0x64, 0x63, 0x63, 0x66, 0x66, 0x65, 0x65, 0x64, 0x68, 0x69, 0x69, + 0x69, 0x69, 0x69, 0x6a, 0x6c, 0x6d, 0x6e, 0x6d, 0x6d, 0x6e, 0x6f, 0x6f, + 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6e, 0x67, 0x4a, + 0x1b, 0x09, 0x06, 0x0c, 0x1a, 0x20, 0x13, 0xf7, 0xed, 0xda, 0xe6, 0xf8, + 0xe7, 0xe4, 0xe7, 0xea, 0x09, 0x13, 0x11, 0x14, 0xf3, 0xd9, 0xe5, 0xfa, + 0xf8, 0xfd, 0x08, 0xf8, 0xdd, 0xd5, 0xe6, 0xe4, 0xdd, 0xe2, 0xd8, 0xd1, + 0xe5, 0xeb, 0xf2, 0xfd, 0xff, 0x02, 0xfc, 0xe1, 0xde, 0xe8, 0xf3, 0xdb, + 0xde, 0xe0, 0xea, 0xeb, 0xee, 0xf4, 0xf0, 0xf2, 0xed, 0xef, 0xf5, 0xf0, + 0xdc, 0xdf, 0xf1, 0xea, 0xea, 0xea, 0xe0, 0xf1, 0xd7, 0xe0, 0xe7, 0xe2, + 0xdc, 0xe7, 0xce, 0xe1, 0xd2, 0xd1, 0xc8, 0xd4, 0xdd, 0xd7, 0xd2, 0xd2, + 0xda, 0xdb, 0xda, 0xe4, 0xf3, 0x02, 0x02, 0x06, 0x0c, 0x10, 0x12, 0x16, + 0x17, 0x13, 0x17, 0x1a, 0x18, 0x16, 0x18, 0x1d, 0x1e, 0x1c, 0x1e, 0x1f, + 0x20, 0x1f, 0x20, 0x27, 0x23, 0x20, 0x24, 0x2a, 0x2f, 0x30, 0x31, 0x32, + 0x2f, 0x31, 0x35, 0x37, 0x37, 0x33, 0x35, 0x37, 0x36, 0x32, 0x37, 0x35, + 0x39, 0x3e, 0x38, 0x35, 0x36, 0x3e, 0x43, 0x3f, 0x42, 0x41, 0x3d, 0x3d, + 0x3d, 0x42, 0x47, 0x4a, 0x48, 0x48, 0x4d, 0x4b, 0x49, 0x4f, 0x52, 0x4f, + 0x4d, 0x50, 0x50, 0x4e, 0x4b, 0x4a, 0x50, 0x50, 0x57, 0x59, 0x56, 0x56, + 0x58, 0x58, 0x57, 0x59, 0x5d, 0x60, 0x5e, 0x5e, 0x5d, 0x5d, 0x5c, 0x5e, + 0x5d, 0x5d, 0x5d, 0x5d, 0x5e, 0x5f, 0x5d, 0x63, 0x63, 0x63, 0x63, 0x61, + 0x63, 0x67, 0x66, 0x66, 0x63, 0x65, 0x66, 0x68, 0x6a, 0x69, 0x69, 0x6a, + 0x6c, 0x6d, 0x6d, 0x6d, 0x6e, 0x6e, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, + 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6e, 0x6d, 0x60, 0x34, 0x19, 0x11, 0x0a, + 0x05, 0x12, 0x2a, 0x15, 0x02, 0xdf, 0xed, 0xf0, 0xe8, 0xea, 0xec, 0xe8, + 0xf7, 0x11, 0x1d, 0x15, 0xff, 0xdb, 0xd4, 0xe5, 0xf1, 0xfe, 0x04, 0xf8, + 0xeb, 0xde, 0xda, 0xea, 0xee, 0xf4, 0xff, 0xeb, 0xf3, 0xf3, 0xf9, 0xfd, + 0x08, 0x05, 0xfc, 0xf5, 0xed, 0xf7, 0x0c, 0xf1, 0xdb, 0xdc, 0xf2, 0xf5, + 0xf2, 0xff, 0xfc, 0xf3, 0xef, 0xf8, 0xf2, 0xec, 0xe3, 0xe4, 0xfe, 0xfc, + 0xf2, 0xf7, 0xe1, 0xec, 0xe3, 0xee, 0xd7, 0xcd, 0xdb, 0xea, 0xcf, 0xc1, + 0xcd, 0xcd, 0xca, 0xd3, 0xd6, 0xd2, 0xce, 0xd3, 0xd0, 0xd1, 0xe1, 0xf1, + 0xf6, 0xfa, 0x03, 0x06, 0x0d, 0x13, 0x14, 0x15, 0x13, 0x14, 0x1a, 0x1a, + 0x12, 0x11, 0x19, 0x1a, 0x18, 0x1a, 0x1c, 0x1d, 0x1e, 0x1e, 0x22, 0x24, + 0x20, 0x20, 0x25, 0x29, 0x2c, 0x2c, 0x2e, 0x30, 0x30, 0x32, 0x33, 0x33, + 0x34, 0x31, 0x34, 0x35, 0x34, 0x34, 0x36, 0x36, 0x3c, 0x3b, 0x33, 0x33, + 0x35, 0x3e, 0x42, 0x3e, 0x44, 0x42, 0x3c, 0x3e, 0x3e, 0x42, 0x43, 0x49, + 0x45, 0x49, 0x4a, 0x4b, 0x4a, 0x4c, 0x50, 0x50, 0x4e, 0x4e, 0x4e, 0x4d, + 0x4d, 0x4a, 0x4e, 0x52, 0x57, 0x5a, 0x58, 0x56, 0x57, 0x57, 0x57, 0x57, + 0x5d, 0x60, 0x5f, 0x5e, 0x5d, 0x5d, 0x5b, 0x5d, 0x5e, 0x5e, 0x5e, 0x5d, + 0x5e, 0x61, 0x5e, 0x61, 0x63, 0x64, 0x63, 0x62, 0x63, 0x67, 0x67, 0x67, + 0x64, 0x64, 0x63, 0x65, 0x69, 0x69, 0x69, 0x69, 0x6b, 0x6d, 0x6c, 0x6c, + 0x6e, 0x6f, 0x6f, 0x6e, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, + 0x6f, 0x6f, 0x6e, 0x69, 0x55, 0x2d, 0x1e, 0x23, 0x0e, 0x04, 0x13, 0x2f, + 0x13, 0x0c, 0xff, 0xf3, 0xdd, 0xe1, 0xd7, 0xde, 0xf0, 0x13, 0x24, 0x1f, + 0x10, 0xe7, 0xc6, 0xca, 0xe4, 0xf2, 0xfa, 0xf8, 0xf5, 0xea, 0xe7, 0xea, + 0xed, 0xfc, 0x01, 0x03, 0x14, 0xf1, 0xec, 0xfa, 0x12, 0x06, 0xf0, 0xed, + 0xf0, 0xfe, 0x16, 0x0c, 0xe7, 0xd7, 0x00, 0x06, 0xfc, 0x00, 0xee, 0xed, + 0xe6, 0xeb, 0xfb, 0xf0, 0xe2, 0xec, 0xf9, 0xf9, 0xf7, 0x03, 0xf3, 0xf9, + 0xea, 0xf0, 0xdc, 0xdc, 0xdd, 0xe1, 0xdf, 0xd5, 0xd0, 0xcc, 0xce, 0xce, + 0xce, 0xd4, 0xcb, 0xcd, 0xd1, 0xd8, 0xe0, 0xe8, 0xef, 0xfb, 0xfc, 0x03, + 0x0c, 0x12, 0x12, 0x12, 0x12, 0x11, 0x17, 0x16, 0x0f, 0x16, 0x18, 0x17, + 0x18, 0x19, 0x19, 0x1c, 0x1d, 0x1f, 0x21, 0x1c, 0x1b, 0x1f, 0x23, 0x25, + 0x27, 0x28, 0x2b, 0x2b, 0x2e, 0x30, 0x31, 0x31, 0x34, 0x31, 0x35, 0x34, + 0x33, 0x34, 0x34, 0x38, 0x39, 0x36, 0x34, 0x32, 0x34, 0x3c, 0x41, 0x3d, + 0x41, 0x42, 0x3c, 0x3e, 0x3f, 0x44, 0x42, 0x46, 0x45, 0x46, 0x47, 0x48, + 0x4a, 0x4c, 0x4d, 0x50, 0x4f, 0x4e, 0x4c, 0x4e, 0x4d, 0x4c, 0x4f, 0x53, + 0x57, 0x58, 0x58, 0x57, 0x55, 0x56, 0x58, 0x57, 0x5b, 0x5f, 0x61, 0x5f, + 0x5d, 0x5e, 0x5d, 0x5d, 0x5d, 0x5e, 0x5f, 0x5f, 0x60, 0x62, 0x61, 0x61, + 0x62, 0x63, 0x63, 0x63, 0x62, 0x63, 0x65, 0x67, 0x67, 0x65, 0x63, 0x64, + 0x68, 0x69, 0x69, 0x69, 0x6a, 0x6c, 0x6c, 0x6d, 0x6e, 0x6f, 0x6f, 0x6f, + 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6e, 0x6d, + 0x63, 0x47, 0x3b, 0x3b, 0x2f, 0x27, 0x0f, 0x16, 0x2f, 0x23, 0x19, 0x08, + 0xed, 0xea, 0xdc, 0xca, 0xe5, 0x0e, 0x28, 0x2b, 0x1a, 0xef, 0xdc, 0xce, + 0xc9, 0xdd, 0xe5, 0xe7, 0xf1, 0xf0, 0xf3, 0xf1, 0xf0, 0xfb, 0xfc, 0x0c, + 0x1f, 0x08, 0xfd, 0x0e, 0x1f, 0x00, 0xf3, 0xec, 0xfa, 0x0b, 0x1b, 0x32, + 0x18, 0x05, 0x1a, 0x16, 0x00, 0xf4, 0xe1, 0xf0, 0xff, 0x04, 0x06, 0xf0, + 0xe2, 0xe3, 0xfd, 0xf9, 0xfd, 0xff, 0xf0, 0xec, 0xe0, 0xda, 0xe1, 0xe8, + 0xdf, 0xda, 0xe2, 0xe6, 0xd7, 0xcd, 0xcc, 0xd0, 0xcd, 0xd1, 0xc8, 0xc8, + 0xd4, 0xd9, 0xe1, 0xe7, 0xed, 0xf2, 0xf9, 0x08, 0x0e, 0x0c, 0x0c, 0x0e, + 0x0e, 0x12, 0x12, 0x0e, 0x11, 0x17, 0x14, 0x14, 0x16, 0x18, 0x19, 0x1b, + 0x1e, 0x20, 0x1e, 0x17, 0x1a, 0x1f, 0x22, 0x22, 0x21, 0x27, 0x29, 0x2b, + 0x2b, 0x29, 0x2b, 0x2e, 0x33, 0x30, 0x33, 0x34, 0x33, 0x32, 0x2c, 0x38, + 0x36, 0x31, 0x32, 0x33, 0x35, 0x3a, 0x3e, 0x3f, 0x40, 0x3e, 0x39, 0x3c, + 0x42, 0x45, 0x41, 0x44, 0x48, 0x45, 0x47, 0x47, 0x48, 0x4b, 0x4c, 0x4f, + 0x50, 0x4c, 0x4b, 0x4d, 0x50, 0x4e, 0x51, 0x55, 0x58, 0x57, 0x56, 0x59, + 0x56, 0x57, 0x58, 0x58, 0x57, 0x5d, 0x60, 0x5f, 0x5d, 0x5f, 0x5d, 0x5d, + 0x5c, 0x5d, 0x5e, 0x60, 0x62, 0x62, 0x63, 0x62, 0x62, 0x61, 0x63, 0x64, + 0x62, 0x63, 0x63, 0x67, 0x68, 0x67, 0x64, 0x65, 0x68, 0x68, 0x69, 0x6a, + 0x6b, 0x6c, 0x6b, 0x6d, 0x6e, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, + 0x6f, 0x6f, 0x6f, 0x6f, 0x6e, 0x6f, 0x6f, 0x6e, 0x6b, 0x64, 0x5f, 0x58, + 0x56, 0x4b, 0x2c, 0x0c, 0x1f, 0x34, 0x31, 0x24, 0x08, 0xf7, 0xeb, 0xd5, + 0xde, 0x02, 0x2c, 0x32, 0x21, 0xf4, 0xe7, 0xe0, 0xd2, 0xe5, 0xf3, 0xe7, + 0xef, 0xf0, 0xed, 0xf3, 0xf7, 0xf6, 0xff, 0x09, 0x1a, 0x13, 0x0c, 0x1f, + 0x0e, 0xf9, 0x01, 0x01, 0x0e, 0x14, 0x27, 0x38, 0x2f, 0x20, 0x26, 0x14, + 0xfa, 0xf5, 0xf3, 0xfc, 0x05, 0xfe, 0xfb, 0xdf, 0xd0, 0xe2, 0xf3, 0x01, + 0x0a, 0xf8, 0xe0, 0xe1, 0xde, 0xd9, 0xe2, 0xea, 0xe0, 0xed, 0xed, 0xdf, + 0xe4, 0xd5, 0xc8, 0xd0, 0xcd, 0xc9, 0xc0, 0xc6, 0xcf, 0xd4, 0xe1, 0xe1, + 0xe4, 0xed, 0xfb, 0x03, 0xff, 0x01, 0x06, 0x09, 0x0b, 0x11, 0x0f, 0x0e, + 0x16, 0x11, 0x14, 0x14, 0x15, 0x19, 0x1b, 0x1a, 0x1f, 0x1f, 0x1b, 0x19, + 0x1c, 0x1c, 0x1d, 0x1d, 0x22, 0x22, 0x25, 0x2a, 0x2c, 0x25, 0x27, 0x2c, + 0x32, 0x2e, 0x30, 0x33, 0x35, 0x32, 0x2b, 0x34, 0x32, 0x2c, 0x31, 0x33, + 0x36, 0x39, 0x3e, 0x3f, 0x41, 0x3b, 0x38, 0x38, 0x41, 0x47, 0x43, 0x40, + 0x48, 0x48, 0x45, 0x47, 0x46, 0x4a, 0x4d, 0x4d, 0x50, 0x4c, 0x4d, 0x4a, + 0x4e, 0x4e, 0x53, 0x57, 0x58, 0x56, 0x55, 0x58, 0x57, 0x57, 0x57, 0x57, + 0x57, 0x5a, 0x5d, 0x5d, 0x5d, 0x5e, 0x5d, 0x5d, 0x5d, 0x5c, 0x5d, 0x5f, + 0x62, 0x62, 0x63, 0x63, 0x61, 0x63, 0x62, 0x63, 0x63, 0x63, 0x64, 0x69, + 0x68, 0x68, 0x67, 0x66, 0x69, 0x69, 0x69, 0x6b, 0x6d, 0x6c, 0x6c, 0x6c, + 0x6d, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, + 0x6d, 0x6f, 0x6f, 0x6f, 0x6d, 0x6b, 0x6b, 0x6a, 0x65, 0x57, 0x44, 0x30, + 0x1f, 0x20, 0x36, 0x38, 0x2b, 0x10, 0x01, 0xe6, 0xdf, 0xf9, 0x2d, 0x3e, + 0x2b, 0x02, 0xda, 0xcc, 0xda, 0xe4, 0x04, 0xee, 0xea, 0xe9, 0xe1, 0xe1, + 0xf3, 0xf3, 0xf8, 0x10, 0x1e, 0x22, 0x1e, 0x20, 0x02, 0xfe, 0xfd, 0x0e, + 0x22, 0x30, 0x36, 0x3d, 0x3e, 0x33, 0x26, 0x15, 0x12, 0x11, 0xfd, 0xf8, + 0xf0, 0xef, 0xf5, 0xe8, 0xde, 0xfa, 0x02, 0x0e, 0x00, 0xed, 0xe1, 0xde, + 0xde, 0xd8, 0xdb, 0xe6, 0xe6, 0xf1, 0xee, 0xec, 0xe1, 0xd1, 0xcf, 0xd0, + 0xcc, 0xc9, 0xbf, 0xc3, 0xca, 0xd0, 0xd9, 0xdb, 0xeb, 0xee, 0xed, 0xee, + 0xed, 0xfd, 0x02, 0x02, 0x0b, 0x0c, 0x0d, 0x13, 0x10, 0x0e, 0x15, 0x14, + 0x14, 0x19, 0x18, 0x1b, 0x1e, 0x1d, 0x17, 0x1a, 0x1b, 0x17, 0x18, 0x1a, + 0x1f, 0x1e, 0x22, 0x27, 0x2b, 0x22, 0x22, 0x2c, 0x31, 0x2d, 0x2b, 0x32, + 0x37, 0x32, 0x2a, 0x31, 0x31, 0x2b, 0x30, 0x32, 0x37, 0x39, 0x3b, 0x3c, + 0x40, 0x3d, 0x3a, 0x37, 0x3f, 0x46, 0x45, 0x40, 0x45, 0x49, 0x48, 0x45, + 0x48, 0x48, 0x4a, 0x4b, 0x4e, 0x4a, 0x4c, 0x4a, 0x4c, 0x4e, 0x54, 0x58, + 0x58, 0x56, 0x55, 0x58, 0x57, 0x56, 0x54, 0x57, 0x59, 0x57, 0x5c, 0x5c, + 0x5b, 0x5d, 0x5d, 0x5d, 0x5d, 0x5c, 0x5c, 0x5d, 0x62, 0x62, 0x63, 0x64, + 0x62, 0x64, 0x63, 0x64, 0x63, 0x62, 0x65, 0x69, 0x68, 0x66, 0x69, 0x67, + 0x6a, 0x69, 0x69, 0x6c, 0x6d, 0x6c, 0x6d, 0x6c, 0x6c, 0x6e, 0x6f, 0x6f, + 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6d, 0x6f, 0x6f, 0x6f, + 0x6d, 0x6c, 0x6e, 0x6e, 0x69, 0x5f, 0x57, 0x46, 0x28, 0x18, 0x25, 0x2f, + 0x37, 0x2f, 0x1e, 0xfe, 0xe7, 0xf7, 0x26, 0x40, 0x3c, 0x17, 0xe0, 0xc1, + 0xd4, 0xe7, 0xf9, 0xf2, 0xeb, 0xe1, 0xd8, 0xe6, 0xf0, 0x02, 0x12, 0x24, + 0x23, 0x2f, 0x2d, 0x1a, 0x00, 0x08, 0x17, 0x2b, 0x40, 0x44, 0x43, 0x40, + 0x38, 0x30, 0x28, 0x20, 0x10, 0x00, 0xf2, 0xf9, 0xf8, 0x07, 0xf9, 0x03, + 0x15, 0x1b, 0x19, 0x0c, 0xf9, 0xe6, 0xed, 0xe3, 0xe5, 0xdf, 0xdd, 0xe5, + 0xec, 0xed, 0xe8, 0xf4, 0xde, 0xe0, 0xca, 0xce, 0xd0, 0xd2, 0xc8, 0xc4, + 0xc7, 0xce, 0xd7, 0xdd, 0xe7, 0xe6, 0xe5, 0xe3, 0xea, 0xf9, 0xf9, 0x01, + 0x09, 0x0b, 0x0e, 0x11, 0x0e, 0x12, 0x18, 0x13, 0x14, 0x17, 0x14, 0x1a, + 0x1c, 0x19, 0x16, 0x18, 0x14, 0x12, 0x17, 0x1a, 0x1d, 0x1d, 0x20, 0x25, + 0x25, 0x1f, 0x1f, 0x2b, 0x2e, 0x2b, 0x27, 0x2b, 0x34, 0x33, 0x2a, 0x2e, + 0x31, 0x2f, 0x31, 0x35, 0x35, 0x37, 0x39, 0x37, 0x3b, 0x3d, 0x3b, 0x37, + 0x3e, 0x44, 0x47, 0x41, 0x44, 0x48, 0x47, 0x43, 0x47, 0x49, 0x48, 0x4a, + 0x4a, 0x48, 0x4c, 0x49, 0x4d, 0x4e, 0x53, 0x58, 0x59, 0x56, 0x56, 0x56, + 0x56, 0x55, 0x54, 0x55, 0x59, 0x57, 0x59, 0x5b, 0x5c, 0x5d, 0x5e, 0x5d, + 0x5c, 0x5b, 0x5b, 0x5d, 0x61, 0x63, 0x60, 0x63, 0x63, 0x63, 0x63, 0x64, + 0x64, 0x62, 0x63, 0x69, 0x69, 0x68, 0x69, 0x68, 0x69, 0x6a, 0x69, 0x6c, + 0x6d, 0x6c, 0x6d, 0x6d, 0x6c, 0x6e, 0x6e, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, + 0x6f, 0x6f, 0x6f, 0x6f, 0x6d, 0x6e, 0x6f, 0x6f, 0x6d, 0x6c, 0x6e, 0x6e, + 0x6c, 0x69, 0x64, 0x5b, 0x4c, 0x35, 0x25, 0x28, 0x31, 0x3b, 0x35, 0x1c, + 0xfe, 0x05, 0x24, 0x3c, 0x44, 0x2e, 0x0b, 0xd9, 0xcc, 0xea, 0xf1, 0xee, + 0xea, 0xdc, 0xd7, 0xe1, 0xe5, 0x0b, 0x25, 0x27, 0x26, 0x37, 0x30, 0x15, + 0x0b, 0x25, 0x3a, 0x43, 0x48, 0x46, 0x44, 0x3b, 0x32, 0x32, 0x2a, 0x22, + 0x12, 0x04, 0xf2, 0xfa, 0x02, 0x21, 0x0c, 0x1d, 0x2b, 0x23, 0x09, 0x02, + 0xf7, 0xf4, 0x00, 0xed, 0xe8, 0xe6, 0xe7, 0xef, 0xfa, 0xef, 0xf0, 0x00, + 0xd0, 0xe2, 0xd5, 0xce, 0xcf, 0xdb, 0xce, 0xc8, 0xc6, 0xd4, 0xe1, 0xd8, + 0xd5, 0xe7, 0xe6, 0xe1, 0xea, 0xf5, 0xf9, 0x00, 0x06, 0x0a, 0x0d, 0x10, + 0x13, 0x18, 0x16, 0x15, 0x14, 0x13, 0x13, 0x17, 0x1a, 0x15, 0x15, 0x17, + 0x14, 0x13, 0x18, 0x1c, 0x19, 0x1a, 0x1f, 0x25, 0x23, 0x1f, 0x19, 0x29, + 0x2a, 0x27, 0x27, 0x29, 0x31, 0x35, 0x2e, 0x2b, 0x30, 0x31, 0x32, 0x34, + 0x36, 0x36, 0x37, 0x36, 0x36, 0x3b, 0x3b, 0x3a, 0x3d, 0x42, 0x46, 0x44, + 0x45, 0x45, 0x45, 0x44, 0x44, 0x48, 0x47, 0x49, 0x47, 0x44, 0x4d, 0x4c, + 0x4f, 0x50, 0x52, 0x57, 0x57, 0x55, 0x57, 0x55, 0x55, 0x55, 0x56, 0x56, + 0x57, 0x57, 0x58, 0x5a, 0x5c, 0x5d, 0x5e, 0x5d, 0x5b, 0x5b, 0x5c, 0x5e, + 0x61, 0x62, 0x5f, 0x62, 0x63, 0x61, 0x65, 0x65, 0x63, 0x63, 0x63, 0x67, + 0x68, 0x68, 0x69, 0x69, 0x68, 0x6a, 0x69, 0x6b, 0x6d, 0x6d, 0x6b, 0x6d, + 0x6d, 0x6d, 0x6e, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, + 0x6d, 0x6e, 0x6f, 0x6f, 0x6e, 0x6d, 0x6e, 0x6f, 0x6f, 0x6e, 0x6c, 0x69, + 0x60, 0x53, 0x3c, 0x31, 0x33, 0x31, 0x3e, 0x35, 0x11, 0x04, 0x26, 0x3e, + 0x47, 0x30, 0x1d, 0xf1, 0xe1, 0xf3, 0xf9, 0xf3, 0xec, 0xe2, 0xda, 0xf0, + 0xf4, 0x1d, 0x2e, 0x2e, 0x32, 0x35, 0x25, 0x18, 0x26, 0x40, 0x4c, 0x4b, + 0x49, 0x44, 0x43, 0x3d, 0x39, 0x34, 0x2a, 0x27, 0x12, 0x03, 0xf5, 0x01, + 0x15, 0x2f, 0x37, 0x38, 0x28, 0x0c, 0x02, 0x0b, 0x09, 0xf5, 0xfa, 0xee, + 0xf2, 0xec, 0xf3, 0xf9, 0xf9, 0xfe, 0xf7, 0xf1, 0xde, 0xd3, 0xdb, 0xd5, + 0xcc, 0xd4, 0xca, 0xce, 0xcd, 0xd0, 0xd9, 0xd7, 0xd0, 0xe3, 0xe6, 0xe1, + 0xeb, 0xee, 0xf6, 0xfc, 0x06, 0x09, 0x0d, 0x12, 0x18, 0x19, 0x18, 0x15, + 0x14, 0x12, 0x12, 0x17, 0x15, 0x14, 0x15, 0x14, 0x12, 0x12, 0x18, 0x1a, + 0x16, 0x19, 0x1f, 0x1f, 0x1f, 0x1c, 0x18, 0x24, 0x26, 0x23, 0x25, 0x26, + 0x2b, 0x31, 0x31, 0x2d, 0x31, 0x31, 0x31, 0x33, 0x36, 0x36, 0x37, 0x37, + 0x35, 0x38, 0x38, 0x3a, 0x3e, 0x3c, 0x40, 0x44, 0x45, 0x44, 0x43, 0x42, + 0x41, 0x46, 0x45, 0x49, 0x47, 0x44, 0x4d, 0x4f, 0x50, 0x52, 0x51, 0x57, + 0x55, 0x54, 0x56, 0x57, 0x55, 0x56, 0x58, 0x56, 0x57, 0x57, 0x57, 0x58, + 0x5b, 0x5c, 0x5d, 0x5c, 0x5c, 0x5b, 0x5a, 0x60, 0x62, 0x63, 0x62, 0x62, + 0x63, 0x62, 0x65, 0x65, 0x64, 0x66, 0x63, 0x64, 0x66, 0x69, 0x68, 0x69, + 0x69, 0x69, 0x6b, 0x6b, 0x6d, 0x6d, 0x6b, 0x6d, 0x6e, 0x6d, 0x6f, 0x6f, + 0x6f, 0x6f, 0x6f, 0x6e, 0x6f, 0x6f, 0x6f, 0x6f, 0x6e, 0x6e, 0x6f, 0x6f, + 0x6f, 0x6e, 0x6d, 0x6f, 0x6f, 0x6f, 0x6e, 0x6c, 0x69, 0x63, 0x54, 0x43, + 0x3e, 0x32, 0x33, 0x3e, 0x3a, 0x1b, 0x2a, 0x42, 0x48, 0x3c, 0x29, 0x07, + 0xf6, 0xfb, 0xff, 0xf3, 0xe9, 0xed, 0xe8, 0xfb, 0x18, 0x31, 0x35, 0x33, + 0x39, 0x32, 0x25, 0x33, 0x47, 0x50, 0x51, 0x4f, 0x4b, 0x44, 0x3b, 0x35, + 0x31, 0x31, 0x2e, 0x29, 0x19, 0x13, 0x20, 0x31, 0x3e, 0x43, 0x38, 0x26, + 0x0c, 0xf8, 0x00, 0x00, 0x00, 0xf3, 0x02, 0xfa, 0x00, 0xf9, 0xfa, 0x00, + 0x09, 0x06, 0xfa, 0xf8, 0xcf, 0xc9, 0xda, 0xdb, 0xc9, 0xd2, 0xd1, 0xcc, + 0xca, 0xcd, 0xcf, 0xd6, 0xce, 0xd4, 0xda, 0xda, 0xe3, 0xeb, 0xf1, 0xfa, + 0x02, 0x06, 0x0e, 0x17, 0x1a, 0x19, 0x1b, 0x17, 0x18, 0x13, 0x13, 0x16, + 0x16, 0x16, 0x12, 0x13, 0x11, 0x0e, 0x18, 0x18, 0x11, 0x14, 0x1a, 0x1a, + 0x1b, 0x19, 0x18, 0x1e, 0x21, 0x21, 0x24, 0x25, 0x22, 0x2d, 0x30, 0x30, + 0x31, 0x2d, 0x2f, 0x35, 0x37, 0x35, 0x37, 0x37, 0x34, 0x33, 0x38, 0x3a, + 0x3e, 0x3d, 0x3d, 0x40, 0x44, 0x40, 0x43, 0x40, 0x3e, 0x45, 0x45, 0x47, + 0x48, 0x45, 0x4b, 0x50, 0x50, 0x53, 0x53, 0x56, 0x52, 0x53, 0x53, 0x56, + 0x56, 0x56, 0x58, 0x57, 0x57, 0x58, 0x57, 0x58, 0x5a, 0x5d, 0x5d, 0x5b, + 0x5d, 0x5c, 0x5a, 0x5f, 0x63, 0x63, 0x63, 0x62, 0x63, 0x61, 0x65, 0x63, + 0x64, 0x65, 0x63, 0x64, 0x67, 0x69, 0x69, 0x66, 0x68, 0x69, 0x6b, 0x6a, + 0x6c, 0x6d, 0x6c, 0x6d, 0x6e, 0x6d, 0x6e, 0x6f, 0x6f, 0x6f, 0x6e, 0x6e, + 0x6e, 0x6f, 0x6f, 0x6e, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6e, 0x6e, + 0x6e, 0x6e, 0x6d, 0x6b, 0x6b, 0x69, 0x64, 0x57, 0x50, 0x46, 0x31, 0x31, + 0x3d, 0x40, 0x3a, 0x45, 0x4a, 0x44, 0x37, 0x1d, 0xf2, 0xeb, 0xf6, 0xef, + 0xe9, 0xec, 0xf1, 0x0a, 0x2b, 0x38, 0x36, 0x3e, 0x42, 0x3e, 0x3e, 0x4a, + 0x52, 0x57, 0x56, 0x50, 0x48, 0x3f, 0x37, 0x37, 0x39, 0x35, 0x2f, 0x2b, + 0x36, 0x3c, 0x43, 0x4a, 0x46, 0x38, 0x26, 0x19, 0x0d, 0x0b, 0x04, 0xfc, + 0xf9, 0xfc, 0x09, 0x12, 0x13, 0x09, 0x0e, 0x0f, 0x06, 0xf9, 0xfb, 0x00, + 0xc9, 0xc4, 0xdd, 0xe0, 0xd3, 0xd1, 0xce, 0xc5, 0xc9, 0xc9, 0xcf, 0xd3, + 0xd3, 0xce, 0xd1, 0xd5, 0xdc, 0xe6, 0xec, 0xf5, 0xfc, 0x06, 0x0f, 0x17, + 0x19, 0x1b, 0x1c, 0x1a, 0x16, 0x12, 0x16, 0x19, 0x16, 0x14, 0x11, 0x13, + 0x12, 0x12, 0x17, 0x15, 0x11, 0x11, 0x14, 0x15, 0x17, 0x18, 0x15, 0x18, + 0x1c, 0x1f, 0x20, 0x25, 0x24, 0x28, 0x2d, 0x2f, 0x31, 0x2b, 0x2f, 0x37, + 0x36, 0x36, 0x33, 0x36, 0x34, 0x31, 0x37, 0x3d, 0x3c, 0x3e, 0x3d, 0x3e, + 0x44, 0x3e, 0x43, 0x3f, 0x3f, 0x45, 0x43, 0x46, 0x49, 0x46, 0x4b, 0x50, + 0x50, 0x52, 0x52, 0x57, 0x53, 0x53, 0x53, 0x54, 0x57, 0x57, 0x58, 0x58, + 0x57, 0x57, 0x57, 0x57, 0x5a, 0x5c, 0x5c, 0x5c, 0x5d, 0x5e, 0x5d, 0x60, + 0x62, 0x62, 0x64, 0x63, 0x61, 0x60, 0x65, 0x63, 0x64, 0x66, 0x64, 0x63, + 0x68, 0x67, 0x69, 0x65, 0x68, 0x6b, 0x6b, 0x6b, 0x6c, 0x6e, 0x6d, 0x6e, + 0x6f, 0x6d, 0x6d, 0x6f, 0x6f, 0x6f, 0x6f, 0x6e, 0x6e, 0x6f, 0x6f, 0x6f, + 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6e, 0x6d, 0x6d, 0x6e, 0x6d, + 0x6c, 0x6b, 0x69, 0x66, 0x5e, 0x58, 0x45, 0x3a, 0x31, 0x3d, 0x47, 0x4b, + 0x4b, 0x4b, 0x41, 0x2f, 0xfe, 0xf0, 0xf7, 0xea, 0xe6, 0xe5, 0xf8, 0x19, + 0x37, 0x3e, 0x3e, 0x47, 0x4a, 0x4a, 0x50, 0x56, 0x58, 0x58, 0x55, 0x4f, + 0x47, 0x47, 0x45, 0x45, 0x43, 0x43, 0x44, 0x47, 0x4a, 0x48, 0x48, 0x4a, + 0x3e, 0x33, 0x29, 0x1f, 0x1d, 0x1a, 0x13, 0x18, 0x1d, 0x1f, 0x29, 0x26, + 0x25, 0x25, 0x25, 0x16, 0x0e, 0x11, 0x0c, 0x06, 0xc8, 0xc2, 0xc3, 0xc5, + 0xd7, 0xd4, 0xce, 0xc8, 0xcd, 0xc8, 0xd1, 0xd1, 0xd4, 0xce, 0xd3, 0xda, + 0xd8, 0xe0, 0xe3, 0xea, 0xf6, 0x07, 0x10, 0x16, 0x18, 0x1a, 0x1c, 0x1d, + 0x18, 0x16, 0x17, 0x16, 0x14, 0x12, 0x12, 0x12, 0x12, 0x10, 0x12, 0x12, + 0x13, 0x10, 0x0d, 0x0e, 0x0f, 0x17, 0x12, 0x0e, 0x16, 0x1e, 0x1e, 0x24, + 0x24, 0x24, 0x29, 0x2c, 0x30, 0x2f, 0x2f, 0x35, 0x34, 0x35, 0x31, 0x33, + 0x33, 0x32, 0x39, 0x3c, 0x3a, 0x3c, 0x3c, 0x41, 0x44, 0x3f, 0x42, 0x3e, + 0x41, 0x44, 0x43, 0x45, 0x49, 0x45, 0x4a, 0x50, 0x50, 0x50, 0x52, 0x53, + 0x51, 0x52, 0x53, 0x56, 0x57, 0x57, 0x59, 0x58, 0x57, 0x57, 0x57, 0x57, + 0x5a, 0x5b, 0x5b, 0x5c, 0x5d, 0x5f, 0x60, 0x60, 0x60, 0x61, 0x64, 0x62, + 0x60, 0x61, 0x64, 0x62, 0x62, 0x66, 0x64, 0x63, 0x68, 0x65, 0x68, 0x67, + 0x66, 0x6a, 0x6d, 0x6c, 0x6b, 0x6e, 0x6d, 0x6e, 0x6f, 0x6f, 0x6e, 0x6f, + 0x6f, 0x6f, 0x6e, 0x6f, 0x6e, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, + 0x6f, 0x6f, 0x6e, 0x6e, 0x6e, 0x6e, 0x6e, 0x6f, 0x6e, 0x6c, 0x6b, 0x6a, + 0x67, 0x5f, 0x54, 0x47, 0x3a, 0x31, 0x43, 0x4e, 0x4e, 0x50, 0x4a, 0x3c, + 0x15, 0xfe, 0xf3, 0xe3, 0xe3, 0xed, 0x0a, 0x27, 0x44, 0x44, 0x49, 0x4c, + 0x4f, 0x50, 0x56, 0x57, 0x59, 0x57, 0x52, 0x51, 0x52, 0x51, 0x4f, 0x4e, + 0x4e, 0x4f, 0x4f, 0x4d, 0x4a, 0x48, 0x4a, 0x44, 0x39, 0x31, 0x27, 0x24, + 0x29, 0x2c, 0x2c, 0x33, 0x31, 0x30, 0x33, 0x34, 0x31, 0x2b, 0x21, 0x1b, + 0x17, 0x06, 0x02, 0xf6, 0xc5, 0xc5, 0xc5, 0xbd, 0xc5, 0xc8, 0xca, 0xd0, + 0xd0, 0xca, 0xce, 0xda, 0xd7, 0xd2, 0xda, 0xda, 0xd4, 0xd8, 0xdb, 0xe7, + 0xf9, 0x06, 0x0c, 0x13, 0x18, 0x1b, 0x1d, 0x1d, 0x1b, 0x18, 0x16, 0x12, + 0x12, 0x10, 0x0f, 0x11, 0x11, 0x11, 0x14, 0x12, 0x13, 0x0e, 0x07, 0x0b, + 0x0c, 0x10, 0x0f, 0x06, 0x0c, 0x19, 0x1d, 0x22, 0x24, 0x1f, 0x24, 0x2b, + 0x31, 0x31, 0x31, 0x32, 0x34, 0x36, 0x32, 0x32, 0x33, 0x32, 0x39, 0x3a, + 0x3d, 0x3c, 0x3d, 0x44, 0x40, 0x3e, 0x42, 0x3e, 0x42, 0x43, 0x44, 0x45, + 0x4a, 0x46, 0x49, 0x4d, 0x4d, 0x4f, 0x52, 0x4f, 0x4f, 0x53, 0x55, 0x56, + 0x57, 0x56, 0x59, 0x59, 0x58, 0x58, 0x58, 0x58, 0x5b, 0x5b, 0x5a, 0x5d, + 0x5d, 0x5d, 0x62, 0x62, 0x62, 0x63, 0x65, 0x61, 0x61, 0x63, 0x64, 0x63, + 0x63, 0x64, 0x67, 0x65, 0x67, 0x64, 0x68, 0x69, 0x65, 0x69, 0x6e, 0x6d, + 0x6a, 0x6d, 0x6d, 0x6e, 0x6e, 0x6f, 0x6e, 0x6e, 0x6f, 0x6f, 0x6e, 0x6f, + 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6e, 0x6e, 0x6e, + 0x6e, 0x6d, 0x6e, 0x6f, 0x6e, 0x6c, 0x6b, 0x6b, 0x6a, 0x67, 0x62, 0x58, + 0x4d, 0x3f, 0x44, 0x50, 0x53, 0x53, 0x54, 0x45, 0x2a, 0x0b, 0xeb, 0xf0, + 0xf6, 0xf3, 0x1b, 0x37, 0x48, 0x4c, 0x51, 0x50, 0x50, 0x54, 0x57, 0x59, + 0x5a, 0x56, 0x55, 0x57, 0x57, 0x54, 0x55, 0x53, 0x52, 0x50, 0x4e, 0x4d, + 0x4b, 0x4c, 0x45, 0x3e, 0x31, 0x26, 0x23, 0x2f, 0x38, 0x3f, 0x41, 0x44, + 0x42, 0x42, 0x42, 0x3e, 0x37, 0x2b, 0x1f, 0x0e, 0xff, 0xf3, 0xe4, 0xdc, + 0xc4, 0xc6, 0xc0, 0xb8, 0xb4, 0xb9, 0xc1, 0xcd, 0xd2, 0xc9, 0xc8, 0xd2, + 0xce, 0xce, 0xc9, 0xcf, 0xd6, 0xdd, 0xe0, 0xe7, 0xf9, 0x00, 0x0b, 0x13, + 0x18, 0x1c, 0x1e, 0x1f, 0x1c, 0x1a, 0x16, 0x13, 0x11, 0x0d, 0x0e, 0x0e, + 0x0e, 0x0e, 0x12, 0x0e, 0x11, 0x0b, 0x06, 0x0a, 0x08, 0x07, 0x09, 0x05, + 0x05, 0x11, 0x17, 0x1d, 0x21, 0x1e, 0x21, 0x29, 0x2e, 0x30, 0x31, 0x30, + 0x33, 0x33, 0x33, 0x33, 0x31, 0x31, 0x37, 0x39, 0x3c, 0x3a, 0x3d, 0x42, + 0x3e, 0x3e, 0x42, 0x41, 0x43, 0x44, 0x46, 0x47, 0x49, 0x46, 0x4a, 0x4e, + 0x4c, 0x4f, 0x50, 0x4d, 0x50, 0x54, 0x53, 0x57, 0x57, 0x55, 0x57, 0x59, + 0x59, 0x59, 0x5a, 0x58, 0x5b, 0x5b, 0x5c, 0x5d, 0x60, 0x5f, 0x61, 0x64, + 0x64, 0x64, 0x64, 0x63, 0x63, 0x63, 0x63, 0x64, 0x66, 0x67, 0x69, 0x69, + 0x68, 0x64, 0x68, 0x69, 0x67, 0x68, 0x6c, 0x6c, 0x6a, 0x6b, 0x6d, 0x6d, + 0x6e, 0x6f, 0x6e, 0x6e, 0x6e, 0x6e, 0x6d, 0x6e, 0x6f, 0x6f, 0x6f, 0x6f, + 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6e, 0x6e, 0x6d, 0x6d, 0x6d, 0x6e, + 0x6d, 0x6d, 0x6a, 0x69, 0x6b, 0x6a, 0x68, 0x62, 0x5a, 0x4f, 0x3f, 0x49, + 0x56, 0x59, 0x59, 0x50, 0x39, 0x1b, 0xf8, 0x04, 0xfc, 0xf3, 0x24, 0x41, + 0x4c, 0x51, 0x55, 0x54, 0x54, 0x57, 0x59, 0x5b, 0x5a, 0x59, 0x5a, 0x58, + 0x57, 0x57, 0x57, 0x56, 0x56, 0x55, 0x54, 0x53, 0x4f, 0x4e, 0x46, 0x3f, + 0x3b, 0x40, 0x46, 0x4c, 0x4e, 0x4c, 0x4a, 0x47, 0x44, 0x3f, 0x3b, 0x31, + 0x27, 0x1e, 0x0d, 0xf8, 0xed, 0xe0, 0xd0, 0xc9, 0xc2, 0xc8, 0xbd, 0xb9, + 0xbc, 0xb6, 0xbc, 0xc5, 0xce, 0xce, 0xc8, 0xd1, 0xca, 0xcc, 0xcc, 0xca, + 0xd1, 0xdf, 0xea, 0xe9, 0xf3, 0xfd, 0x0d, 0x12, 0x18, 0x1d, 0x1e, 0x1d, + 0x1c, 0x1a, 0x15, 0x12, 0x0f, 0x0c, 0x0c, 0x0a, 0x06, 0x08, 0x0b, 0x05, + 0x09, 0x06, 0x03, 0x02, 0x04, 0x00, 0x00, 0x02, 0x00, 0x0c, 0x13, 0x17, + 0x1d, 0x1d, 0x1e, 0x26, 0x2c, 0x2e, 0x31, 0x31, 0x31, 0x31, 0x32, 0x31, + 0x31, 0x31, 0x36, 0x37, 0x39, 0x39, 0x3d, 0x3f, 0x3e, 0x3f, 0x44, 0x43, + 0x42, 0x44, 0x44, 0x47, 0x47, 0x47, 0x4a, 0x4f, 0x4d, 0x4f, 0x50, 0x4e, + 0x50, 0x54, 0x53, 0x57, 0x59, 0x57, 0x57, 0x59, 0x5c, 0x58, 0x5a, 0x5a, + 0x5c, 0x5b, 0x5d, 0x5e, 0x62, 0x62, 0x61, 0x63, 0x64, 0x65, 0x66, 0x64, + 0x63, 0x64, 0x63, 0x66, 0x68, 0x69, 0x69, 0x6a, 0x67, 0x65, 0x67, 0x6a, + 0x69, 0x66, 0x69, 0x6c, 0x69, 0x68, 0x6b, 0x6c, 0x6d, 0x6e, 0x6e, 0x6f, + 0x6f, 0x6f, 0x6e, 0x6c, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, + 0x6f, 0x6f, 0x6d, 0x6c, 0x6d, 0x6c, 0x6b, 0x6c, 0x6b, 0x6b, 0x6a, 0x69, + 0x69, 0x6a, 0x69, 0x67, 0x60, 0x5b, 0x51, 0x45, 0x50, 0x58, 0x5b, 0x57, + 0x44, 0x32, 0x12, 0x08, 0xf7, 0x06, 0x2c, 0x42, 0x50, 0x56, 0x58, 0x57, + 0x58, 0x5a, 0x5c, 0x5c, 0x5a, 0x5b, 0x5c, 0x5a, 0x59, 0x59, 0x58, 0x5a, + 0x58, 0x59, 0x58, 0x57, 0x55, 0x52, 0x50, 0x50, 0x51, 0x55, 0x55, 0x50, + 0x4c, 0x4b, 0x4a, 0x45, 0x3e, 0x37, 0x31, 0x25, 0x12, 0x05, 0x02, 0x05, + 0xfe, 0xec, 0xdc, 0xda, 0xc3, 0xc7, 0xc8, 0xbb, 0xc7, 0xc7, 0xcb, 0xd4, + 0xc9, 0xce, 0xc8, 0xd3, 0xcb, 0xc1, 0xcc, 0xce, 0xce, 0xd7, 0xe1, 0xe1, + 0xe7, 0x00, 0x10, 0x10, 0x18, 0x1e, 0x1d, 0x1d, 0x1d, 0x1b, 0x18, 0x15, + 0x11, 0x0c, 0x0a, 0x06, 0x00, 0x02, 0xff, 0xfa, 0x00, 0xfe, 0xfb, 0xfa, + 0xfe, 0xf9, 0xf6, 0xfe, 0xf9, 0x03, 0x0f, 0x10, 0x17, 0x18, 0x1d, 0x23, + 0x2b, 0x2d, 0x31, 0x31, 0x30, 0x30, 0x31, 0x30, 0x31, 0x31, 0x36, 0x37, + 0x37, 0x39, 0x3c, 0x3e, 0x3e, 0x40, 0x42, 0x41, 0x43, 0x44, 0x43, 0x44, + 0x45, 0x4a, 0x4b, 0x4e, 0x4f, 0x50, 0x50, 0x4e, 0x51, 0x54, 0x56, 0x58, + 0x59, 0x57, 0x55, 0x5a, 0x5d, 0x5a, 0x5a, 0x5c, 0x5e, 0x5d, 0x5d, 0x5f, + 0x62, 0x63, 0x64, 0x62, 0x64, 0x65, 0x67, 0x66, 0x66, 0x66, 0x65, 0x67, + 0x68, 0x69, 0x69, 0x6a, 0x66, 0x67, 0x69, 0x69, 0x69, 0x68, 0x68, 0x6b, + 0x67, 0x66, 0x69, 0x6c, 0x6c, 0x6d, 0x6e, 0x6f, 0x6f, 0x6f, 0x6e, 0x6c, + 0x6e, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6e, 0x6c, + 0x6b, 0x6b, 0x69, 0x68, 0x68, 0x68, 0x69, 0x69, 0x69, 0x69, 0x69, 0x69, + 0x66, 0x60, 0x5c, 0x50, 0x50, 0x58, 0x5b, 0x5b, 0x52, 0x3e, 0x25, 0x0c, + 0x06, 0x11, 0x38, 0x4e, 0x55, 0x5a, 0x5c, 0x5a, 0x5a, 0x5c, 0x5d, 0x5d, + 0x5b, 0x5c, 0x5b, 0x5c, 0x5b, 0x5c, 0x5c, 0x5c, 0x5a, 0x5a, 0x59, 0x58, + 0x58, 0x57, 0x58, 0x58, 0x57, 0x57, 0x56, 0x53, 0x50, 0x4c, 0x4a, 0x44, + 0x3e, 0x3a, 0x34, 0x28, 0x13, 0x0a, 0x0e, 0x10, 0x10, 0x00, 0xf5, 0xf9, + 0xc0, 0xc2, 0xcc, 0xc6, 0xc8, 0xca, 0xcb, 0xd0, 0xce, 0xce, 0xcb, 0xd5, + 0xcf, 0xc1, 0xc8, 0xc9, 0xca, 0xca, 0xcf, 0xdb, 0xe4, 0x01, 0x10, 0x12, + 0x1b, 0x1e, 0x1b, 0x1c, 0x1c, 0x1c, 0x19, 0x16, 0x14, 0x0e, 0x0c, 0x05, + 0xfd, 0xf9, 0xf4, 0xf3, 0xf4, 0xf7, 0xf7, 0xf3, 0xf8, 0xf3, 0xf0, 0xf5, + 0xf3, 0xfd, 0x07, 0x0b, 0x11, 0x13, 0x18, 0x20, 0x27, 0x2b, 0x2f, 0x2d, + 0x2b, 0x30, 0x31, 0x2f, 0x2f, 0x31, 0x34, 0x39, 0x37, 0x38, 0x3b, 0x3d, + 0x3c, 0x40, 0x3f, 0x3e, 0x42, 0x42, 0x41, 0x42, 0x44, 0x4a, 0x4b, 0x4a, + 0x4c, 0x50, 0x50, 0x4f, 0x50, 0x55, 0x57, 0x59, 0x58, 0x58, 0x55, 0x5a, + 0x5d, 0x5d, 0x5b, 0x5e, 0x5e, 0x61, 0x61, 0x61, 0x63, 0x63, 0x63, 0x63, + 0x63, 0x63, 0x66, 0x65, 0x67, 0x66, 0x65, 0x67, 0x66, 0x69, 0x69, 0x69, + 0x68, 0x69, 0x69, 0x69, 0x69, 0x69, 0x68, 0x69, 0x65, 0x64, 0x69, 0x6b, + 0x6c, 0x6c, 0x6c, 0x6d, 0x6e, 0x6f, 0x6e, 0x6e, 0x6e, 0x6e, 0x6f, 0x6f, + 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6e, 0x6d, 0x6b, 0x6a, 0x69, 0x66, + 0x65, 0x65, 0x66, 0x68, 0x67, 0x68, 0x67, 0x66, 0x67, 0x66, 0x60, 0x5b, + 0x59, 0x5a, 0x5a, 0x5c, 0x5b, 0x50, 0x3d, 0x2b, 0x16, 0x16, 0x42, 0x58, + 0x5c, 0x5d, 0x5d, 0x5d, 0x5c, 0x5d, 0x5d, 0x5d, 0x5b, 0x5d, 0x5d, 0x5d, + 0x5e, 0x5e, 0x5d, 0x5d, 0x5c, 0x5c, 0x5c, 0x5c, 0x5b, 0x5c, 0x5c, 0x5b, + 0x58, 0x57, 0x54, 0x50, 0x4e, 0x4c, 0x4a, 0x49, 0x44, 0x3b, 0x34, 0x31, + 0x32, 0x2b, 0x27, 0x1f, 0x19, 0x1d, 0x13, 0x0b, 0xc6, 0xd0, 0xd1, 0xc8, + 0xc4, 0xc8, 0xc5, 0xc4, 0xc3, 0xc6, 0xc8, 0xcd, 0xd1, 0xcf, 0xc9, 0xd2, + 0xc8, 0xc7, 0xcb, 0xd9, 0xe2, 0xfd, 0x0c, 0x12, 0x1b, 0x1d, 0x1d, 0x1d, + 0x1d, 0x1a, 0x18, 0x16, 0x15, 0x10, 0x0c, 0x01, 0xf8, 0xf3, 0xf2, 0xf2, + 0xf4, 0xf4, 0xf6, 0xf2, 0xf3, 0xee, 0xf1, 0xef, 0xef, 0xfa, 0x00, 0x00, + 0x0c, 0x10, 0x13, 0x1f, 0x24, 0x29, 0x2b, 0x2a, 0x2a, 0x2f, 0x30, 0x2d, + 0x2d, 0x30, 0x33, 0x37, 0x35, 0x38, 0x3b, 0x3e, 0x3b, 0x3f, 0x3e, 0x3e, + 0x43, 0x42, 0x40, 0x41, 0x42, 0x4a, 0x49, 0x4a, 0x4b, 0x53, 0x50, 0x4f, + 0x50, 0x54, 0x58, 0x5a, 0x59, 0x59, 0x56, 0x5a, 0x5e, 0x5e, 0x5b, 0x60, + 0x61, 0x62, 0x65, 0x63, 0x63, 0x63, 0x5f, 0x62, 0x63, 0x63, 0x65, 0x66, + 0x65, 0x66, 0x64, 0x67, 0x68, 0x69, 0x66, 0x68, 0x69, 0x69, 0x69, 0x69, + 0x6a, 0x69, 0x68, 0x69, 0x65, 0x63, 0x67, 0x69, 0x6a, 0x6c, 0x6c, 0x6d, + 0x6c, 0x6c, 0x6c, 0x6b, 0x6d, 0x6e, 0x6e, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, + 0x6f, 0x6f, 0x6d, 0x6c, 0x6c, 0x6a, 0x69, 0x67, 0x66, 0x64, 0x63, 0x64, + 0x66, 0x66, 0x66, 0x65, 0x64, 0x64, 0x63, 0x60, 0x5d, 0x5e, 0x5c, 0x5b, + 0x5c, 0x5a, 0x4c, 0x31, 0x25, 0x2a, 0x42, 0x55, 0x5b, 0x5d, 0x5e, 0x5e, + 0x5e, 0x5e, 0x5d, 0x5b, 0x5c, 0x5d, 0x5f, 0x5f, 0x5f, 0x5e, 0x5f, 0x5e, + 0x5d, 0x5e, 0x5e, 0x5f, 0x5d, 0x5d, 0x5b, 0x59, 0x56, 0x56, 0x57, 0x56, + 0x56, 0x50, 0x4c, 0x4a, 0x45, 0x46, 0x45, 0x43, 0x3e, 0x3c, 0x36, 0x31, + 0x2f, 0x2b, 0x2b, 0x28, 0xc6, 0xcf, 0xcb, 0xc4, 0xbc, 0xc3, 0xc9, 0xc8, + 0xc2, 0xc3, 0xc8, 0xcb, 0xc9, 0xc8, 0xd3, 0xe5, 0xc9, 0xc7, 0xca, 0xd7, + 0xe2, 0xf8, 0x0c, 0x12, 0x19, 0x1b, 0x1e, 0x1f, 0x1f, 0x19, 0x18, 0x18, + 0x17, 0x11, 0x0b, 0x00, 0xf5, 0xf3, 0xf6, 0xf5, 0xf8, 0xf6, 0xf5, 0xf2, + 0xf3, 0xee, 0xf3, 0xed, 0xed, 0xf9, 0xfe, 0xf6, 0x00, 0x0f, 0x0f, 0x1d, + 0x21, 0x26, 0x2b, 0x2a, 0x2a, 0x2d, 0x2e, 0x2b, 0x2b, 0x2e, 0x32, 0x35, + 0x35, 0x38, 0x3b, 0x39, 0x38, 0x3c, 0x3c, 0x3e, 0x43, 0x3f, 0x3e, 0x40, + 0x44, 0x48, 0x48, 0x4a, 0x4c, 0x54, 0x4f, 0x50, 0x53, 0x57, 0x5a, 0x5c, + 0x5a, 0x59, 0x59, 0x5d, 0x5d, 0x60, 0x5d, 0x61, 0x61, 0x61, 0x63, 0x63, + 0x63, 0x61, 0x5f, 0x63, 0x63, 0x63, 0x64, 0x65, 0x65, 0x67, 0x65, 0x69, + 0x69, 0x66, 0x64, 0x69, 0x6a, 0x69, 0x67, 0x69, 0x69, 0x69, 0x69, 0x69, + 0x67, 0x64, 0x65, 0x68, 0x6a, 0x6b, 0x6c, 0x6d, 0x6e, 0x6d, 0x6c, 0x6a, + 0x6c, 0x6f, 0x6e, 0x6e, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6d, 0x6c, + 0x6c, 0x6c, 0x69, 0x67, 0x63, 0x63, 0x63, 0x63, 0x65, 0x67, 0x67, 0x65, + 0x64, 0x64, 0x65, 0x63, 0x62, 0x61, 0x60, 0x5d, 0x5d, 0x58, 0x59, 0x43, + 0x34, 0x30, 0x3d, 0x55, 0x5d, 0x5e, 0x5f, 0x5f, 0x5f, 0x5f, 0x5d, 0x5d, + 0x5f, 0x60, 0x61, 0x61, 0x61, 0x60, 0x60, 0x60, 0x61, 0x61, 0x5f, 0x5f, + 0x5d, 0x5d, 0x5b, 0x5a, 0x59, 0x5a, 0x59, 0x58, 0x57, 0x55, 0x53, 0x51, + 0x51, 0x50, 0x4e, 0x4a, 0x45, 0x42, 0x43, 0x41, 0x3e, 0x38, 0x31, 0x2e, + 0xc0, 0xbf, 0xc6, 0xbf, 0xbf, 0xc1, 0xc8, 0xc7, 0xca, 0xc5, 0xc8, 0xcc, + 0xc8, 0xc8, 0xce, 0xd5, 0xc8, 0xc4, 0xcd, 0xda, 0xdf, 0xf3, 0x08, 0x11, + 0x17, 0x18, 0x1e, 0x1f, 0x1f, 0x1a, 0x18, 0x17, 0x14, 0x0f, 0x07, 0xfc, + 0xf6, 0xfb, 0xfa, 0xfa, 0xfd, 0xf9, 0xf6, 0xf5, 0xf3, 0xf0, 0xf2, 0xef, + 0xed, 0xf3, 0xf9, 0xf4, 0xf4, 0x06, 0x0b, 0x18, 0x1f, 0x21, 0x28, 0x28, + 0x28, 0x2c, 0x2c, 0x29, 0x2a, 0x2f, 0x32, 0x34, 0x31, 0x32, 0x37, 0x37, + 0x37, 0x38, 0x3a, 0x3e, 0x41, 0x3f, 0x3d, 0x40, 0x43, 0x44, 0x49, 0x49, + 0x4d, 0x50, 0x50, 0x51, 0x56, 0x5a, 0x5b, 0x5d, 0x59, 0x5b, 0x5b, 0x5e, + 0x5d, 0x5f, 0x60, 0x62, 0x63, 0x62, 0x61, 0x63, 0x63, 0x60, 0x62, 0x64, + 0x63, 0x64, 0x64, 0x63, 0x66, 0x67, 0x65, 0x66, 0x65, 0x63, 0x64, 0x69, + 0x69, 0x66, 0x65, 0x69, 0x68, 0x66, 0x68, 0x69, 0x65, 0x66, 0x67, 0x67, + 0x67, 0x69, 0x6a, 0x6c, 0x6e, 0x6f, 0x6f, 0x6c, 0x6b, 0x6e, 0x6d, 0x6f, + 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6e, 0x6c, 0x6d, 0x6d, 0x6c, 0x6c, 0x66, + 0x62, 0x62, 0x61, 0x63, 0x65, 0x65, 0x66, 0x68, 0x67, 0x68, 0x67, 0x65, + 0x65, 0x64, 0x63, 0x61, 0x5d, 0x54, 0x57, 0x57, 0x48, 0x3e, 0x41, 0x57, + 0x5d, 0x5f, 0x5f, 0x5f, 0x5f, 0x5e, 0x5e, 0x5f, 0x61, 0x61, 0x62, 0x62, + 0x62, 0x62, 0x62, 0x61, 0x62, 0x62, 0x60, 0x5e, 0x5d, 0x5d, 0x5d, 0x5e, + 0x5d, 0x5d, 0x5a, 0x5a, 0x5a, 0x5a, 0x59, 0x57, 0x56, 0x53, 0x50, 0x50, + 0x4b, 0x48, 0x46, 0x44, 0x40, 0x38, 0x33, 0x2e, 0xba, 0xbe, 0xbe, 0xbd, + 0xc5, 0xc1, 0xc2, 0xbf, 0xc8, 0xc6, 0xc7, 0xca, 0xc9, 0xc8, 0xc9, 0xd1, + 0xc7, 0xc7, 0xcd, 0xd4, 0xd9, 0xeb, 0x02, 0x11, 0x14, 0x18, 0x1e, 0x1f, + 0x1e, 0x1b, 0x18, 0x17, 0x12, 0x0d, 0x09, 0xfd, 0xfb, 0x00, 0xff, 0x01, + 0x02, 0x00, 0xfe, 0xf9, 0xf5, 0xf4, 0xf2, 0xf2, 0xee, 0xf0, 0xf2, 0xf3, + 0xed, 0xfa, 0x09, 0x17, 0x1c, 0x1f, 0x27, 0x26, 0x26, 0x29, 0x26, 0x25, + 0x28, 0x2d, 0x30, 0x31, 0x2f, 0x32, 0x38, 0x36, 0x38, 0x3a, 0x3b, 0x3b, + 0x3f, 0x3e, 0x3d, 0x42, 0x43, 0x45, 0x4a, 0x4a, 0x4e, 0x4f, 0x50, 0x51, + 0x56, 0x57, 0x57, 0x58, 0x5a, 0x5c, 0x5a, 0x5d, 0x5d, 0x5d, 0x5f, 0x61, + 0x63, 0x61, 0x61, 0x63, 0x61, 0x61, 0x63, 0x64, 0x66, 0x66, 0x63, 0x62, + 0x65, 0x65, 0x63, 0x62, 0x63, 0x63, 0x66, 0x6a, 0x64, 0x63, 0x65, 0x67, + 0x64, 0x62, 0x66, 0x68, 0x64, 0x65, 0x68, 0x68, 0x66, 0x67, 0x69, 0x6a, + 0x6b, 0x6c, 0x6e, 0x6d, 0x6b, 0x6c, 0x6d, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, + 0x6f, 0x6e, 0x6c, 0x6d, 0x6d, 0x6e, 0x6e, 0x6c, 0x67, 0x63, 0x63, 0x64, + 0x64, 0x64, 0x66, 0x69, 0x69, 0x68, 0x67, 0x66, 0x65, 0x64, 0x64, 0x65, + 0x62, 0x5a, 0x55, 0x58, 0x56, 0x50, 0x4c, 0x59, 0x5d, 0x60, 0x61, 0x61, + 0x60, 0x60, 0x61, 0x62, 0x62, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, + 0x62, 0x62, 0x60, 0x60, 0x5f, 0x5e, 0x5f, 0x5f, 0x60, 0x5f, 0x5d, 0x5d, + 0x5d, 0x5c, 0x5a, 0x5a, 0x57, 0x57, 0x54, 0x51, 0x4d, 0x46, 0x40, 0x3e, + 0x37, 0x32, 0x2e, 0x26, 0xb7, 0xc8, 0xd6, 0xbe, 0xc2, 0xc5, 0xc2, 0xbc, + 0xc1, 0xc5, 0xc8, 0xc8, 0xc5, 0xc5, 0xce, 0xd0, 0xc8, 0xc7, 0xc8, 0xc7, + 0xd4, 0xe5, 0x01, 0x10, 0x17, 0x18, 0x1c, 0x1e, 0x1e, 0x1c, 0x1a, 0x14, + 0x10, 0x0c, 0x09, 0x00, 0xff, 0x00, 0x03, 0x06, 0x06, 0x04, 0x00, 0xfb, + 0xf9, 0xf8, 0xf4, 0xf3, 0xf4, 0xf0, 0xed, 0xef, 0xeb, 0xf5, 0x04, 0x12, + 0x1a, 0x1e, 0x23, 0x24, 0x25, 0x25, 0x24, 0x25, 0x28, 0x2b, 0x2f, 0x2d, + 0x2c, 0x31, 0x34, 0x33, 0x36, 0x38, 0x39, 0x3c, 0x3f, 0x39, 0x3d, 0x41, + 0x44, 0x45, 0x49, 0x4c, 0x4e, 0x4f, 0x4f, 0x54, 0x54, 0x52, 0x52, 0x53, + 0x56, 0x55, 0x55, 0x56, 0x58, 0x59, 0x5a, 0x5c, 0x5d, 0x5d, 0x5d, 0x60, + 0x62, 0x63, 0x64, 0x64, 0x64, 0x63, 0x61, 0x63, 0x63, 0x60, 0x61, 0x63, + 0x64, 0x64, 0x69, 0x67, 0x62, 0x62, 0x65, 0x65, 0x63, 0x60, 0x63, 0x68, + 0x67, 0x66, 0x69, 0x69, 0x69, 0x69, 0x69, 0x69, 0x69, 0x6b, 0x6c, 0x6c, + 0x6b, 0x6b, 0x6d, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6e, 0x6c, 0x6b, + 0x6c, 0x6e, 0x6e, 0x6e, 0x6b, 0x66, 0x64, 0x63, 0x65, 0x66, 0x66, 0x67, + 0x68, 0x67, 0x67, 0x66, 0x65, 0x64, 0x63, 0x63, 0x63, 0x60, 0x5a, 0x57, + 0x5b, 0x59, 0x59, 0x5d, 0x60, 0x61, 0x62, 0x62, 0x62, 0x62, 0x62, 0x63, + 0x63, 0x65, 0x64, 0x63, 0x63, 0x64, 0x64, 0x63, 0x63, 0x63, 0x61, 0x61, + 0x61, 0x5f, 0x60, 0x60, 0x5f, 0x60, 0x5f, 0x5d, 0x5d, 0x5c, 0x5b, 0x5a, + 0x5a, 0x58, 0x56, 0x50, 0x4e, 0x49, 0x43, 0x41, 0x3a, 0x34, 0x2f, 0x2b, + 0xc1, 0xc4, 0xd1, 0xcc, 0xc8, 0xc7, 0xca, 0xc0, 0xc1, 0xc6, 0xcb, 0xc7, + 0xc4, 0xc4, 0xca, 0xcd, 0xc7, 0xca, 0xc8, 0xc6, 0xcb, 0xe1, 0x00, 0x0e, + 0x18, 0x18, 0x1a, 0x1b, 0x1d, 0x1d, 0x1b, 0x15, 0x0f, 0x0f, 0x0c, 0x06, + 0x00, 0x00, 0x06, 0x08, 0x07, 0x04, 0x00, 0xff, 0xfe, 0xf9, 0xf7, 0xf4, + 0xf6, 0xf0, 0xed, 0xea, 0xeb, 0xf3, 0x00, 0x11, 0x18, 0x1c, 0x20, 0x22, + 0x23, 0x22, 0x23, 0x23, 0x26, 0x29, 0x2c, 0x2b, 0x2b, 0x31, 0x31, 0x31, + 0x36, 0x36, 0x39, 0x3e, 0x3b, 0x37, 0x3d, 0x40, 0x42, 0x43, 0x47, 0x4a, + 0x4c, 0x4b, 0x4e, 0x4e, 0x4c, 0x4a, 0x4a, 0x4a, 0x4c, 0x4e, 0x50, 0x52, + 0x51, 0x50, 0x52, 0x50, 0x4c, 0x4a, 0x53, 0x56, 0x57, 0x5a, 0x59, 0x5a, + 0x5f, 0x62, 0x63, 0x63, 0x60, 0x61, 0x63, 0x65, 0x64, 0x66, 0x69, 0x64, + 0x63, 0x63, 0x64, 0x64, 0x63, 0x60, 0x62, 0x63, 0x64, 0x64, 0x69, 0x6b, + 0x6a, 0x6a, 0x6b, 0x6b, 0x69, 0x69, 0x6b, 0x6b, 0x6a, 0x6a, 0x6b, 0x6d, + 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6e, 0x6b, 0x6a, 0x6c, 0x6e, 0x6e, 0x6e, + 0x6d, 0x69, 0x67, 0x66, 0x66, 0x67, 0x68, 0x67, 0x66, 0x66, 0x66, 0x67, + 0x64, 0x64, 0x63, 0x62, 0x63, 0x63, 0x5f, 0x5a, 0x5a, 0x5d, 0x5e, 0x60, + 0x61, 0x61, 0x62, 0x62, 0x62, 0x63, 0x63, 0x63, 0x64, 0x65, 0x64, 0x64, + 0x64, 0x63, 0x63, 0x62, 0x62, 0x63, 0x61, 0x62, 0x61, 0x62, 0x62, 0x60, + 0x60, 0x60, 0x5e, 0x5d, 0x5d, 0x5d, 0x5b, 0x5a, 0x59, 0x57, 0x55, 0x53, + 0x52, 0x50, 0x4a, 0x46, 0x43, 0x3f, 0x3c, 0x35, 0xc5, 0xc6, 0xc6, 0xc1, + 0xcb, 0xc4, 0xc0, 0xba, 0xbe, 0xc4, 0xcc, 0xcd, 0xc7, 0xc1, 0xc0, 0xc2, + 0xc0, 0xc1, 0xc5, 0xc7, 0xc6, 0xd8, 0x00, 0x0b, 0x16, 0x17, 0x17, 0x18, + 0x1d, 0x1e, 0x1b, 0x15, 0x12, 0x13, 0x0f, 0x0a, 0xff, 0xff, 0x06, 0x0a, + 0x0b, 0x07, 0x05, 0x00, 0xff, 0xfc, 0xf9, 0xf9, 0xf8, 0xf3, 0xed, 0xed, + 0xec, 0xf3, 0xff, 0x0f, 0x16, 0x1b, 0x1c, 0x1f, 0x21, 0x21, 0x22, 0x22, + 0x24, 0x28, 0x29, 0x2b, 0x2b, 0x2f, 0x30, 0x31, 0x34, 0x37, 0x3d, 0x3b, + 0x37, 0x3b, 0x3c, 0x3e, 0x40, 0x43, 0x46, 0x4a, 0x48, 0x43, 0x43, 0x42, + 0x43, 0x44, 0x43, 0x42, 0x44, 0x49, 0x4a, 0x4b, 0x4c, 0x4d, 0x50, 0x4f, + 0x4c, 0x4e, 0x50, 0x50, 0x50, 0x53, 0x55, 0x55, 0x58, 0x5e, 0x5f, 0x5e, + 0x5d, 0x61, 0x63, 0x64, 0x66, 0x67, 0x65, 0x65, 0x64, 0x63, 0x62, 0x64, + 0x64, 0x61, 0x62, 0x63, 0x63, 0x62, 0x64, 0x69, 0x6a, 0x69, 0x6a, 0x6a, + 0x69, 0x69, 0x69, 0x6a, 0x6a, 0x69, 0x6b, 0x6d, 0x6e, 0x6f, 0x6f, 0x6f, + 0x6f, 0x6e, 0x6a, 0x6a, 0x6b, 0x6d, 0x6e, 0x6d, 0x6d, 0x6a, 0x68, 0x65, + 0x63, 0x66, 0x66, 0x65, 0x65, 0x64, 0x64, 0x65, 0x66, 0x64, 0x63, 0x61, + 0x63, 0x62, 0x62, 0x5f, 0x5d, 0x5e, 0x60, 0x62, 0x62, 0x61, 0x62, 0x62, + 0x62, 0x63, 0x63, 0x64, 0x63, 0x63, 0x63, 0x63, 0x63, 0x64, 0x63, 0x63, + 0x63, 0x63, 0x63, 0x63, 0x62, 0x62, 0x61, 0x61, 0x61, 0x60, 0x60, 0x5f, + 0x5e, 0x5d, 0x5c, 0x5c, 0x5a, 0x58, 0x57, 0x54, 0x4f, 0x4a, 0x46, 0x42, + 0x3e, 0x3b, 0x35, 0x2d, 0xc3, 0xc5, 0xc8, 0xc8, 0xce, 0xcc, 0xbe, 0xbb, + 0xbb, 0xc1, 0xca, 0xc8, 0xc1, 0xc9, 0xc8, 0xc8, 0xc0, 0xbe, 0xc4, 0xc6, + 0xc3, 0xd1, 0x03, 0x0b, 0x12, 0x13, 0x16, 0x18, 0x1d, 0x1d, 0x1a, 0x15, + 0x15, 0x13, 0x11, 0x0d, 0x02, 0xff, 0x06, 0x0c, 0x0d, 0x0b, 0x07, 0x05, + 0x02, 0x00, 0xfb, 0xfa, 0xfa, 0xf8, 0xf3, 0xf0, 0xef, 0xf3, 0xff, 0x0d, + 0x17, 0x19, 0x1b, 0x1f, 0x1f, 0x1f, 0x21, 0x21, 0x20, 0x26, 0x29, 0x29, + 0x2b, 0x2d, 0x2e, 0x31, 0x33, 0x37, 0x38, 0x37, 0x39, 0x3b, 0x38, 0x3a, + 0x3f, 0x41, 0x44, 0x44, 0x3d, 0x38, 0x37, 0x39, 0x3d, 0x40, 0x40, 0x44, + 0x46, 0x47, 0x4a, 0x4e, 0x4f, 0x4f, 0x4e, 0x4e, 0x4f, 0x50, 0x4e, 0x4d, + 0x54, 0x57, 0x56, 0x56, 0x57, 0x5b, 0x5d, 0x5d, 0x5d, 0x5d, 0x5d, 0x62, + 0x64, 0x66, 0x64, 0x65, 0x65, 0x63, 0x63, 0x64, 0x65, 0x63, 0x63, 0x64, + 0x64, 0x63, 0x63, 0x65, 0x68, 0x68, 0x67, 0x68, 0x69, 0x69, 0x6a, 0x6a, + 0x6b, 0x69, 0x6a, 0x6b, 0x6c, 0x6e, 0x6f, 0x6f, 0x6f, 0x6d, 0x69, 0x6a, + 0x6a, 0x6b, 0x6e, 0x6d, 0x6c, 0x69, 0x67, 0x64, 0x64, 0x68, 0x65, 0x64, + 0x63, 0x62, 0x62, 0x63, 0x64, 0x64, 0x63, 0x62, 0x62, 0x62, 0x62, 0x60, + 0x5f, 0x5f, 0x60, 0x61, 0x61, 0x61, 0x62, 0x63, 0x62, 0x63, 0x63, 0x63, + 0x63, 0x63, 0x63, 0x63, 0x63, 0x64, 0x65, 0x63, 0x63, 0x63, 0x62, 0x62, + 0x62, 0x62, 0x61, 0x62, 0x61, 0x61, 0x60, 0x5f, 0x5e, 0x5e, 0x5d, 0x5d, + 0x5b, 0x5a, 0x5a, 0x57, 0x54, 0x4f, 0x4b, 0x49, 0x45, 0x3e, 0x37, 0x31, + 0xc4, 0xc3, 0xc8, 0xcb, 0xd0, 0xd6, 0xcd, 0xc0, 0xb5, 0xbc, 0xca, 0xbd, + 0xbb, 0xd2, 0xe5, 0xe8, 0xd6, 0xca, 0xca, 0xc4, 0xc0, 0xd4, 0x03, 0x0c, + 0x0c, 0x11, 0x16, 0x18, 0x1c, 0x1d, 0x1a, 0x16, 0x17, 0x12, 0x14, 0x0f, + 0x08, 0x03, 0x0a, 0x0d, 0x0d, 0x0c, 0x09, 0x06, 0x05, 0x02, 0xff, 0xfc, + 0xfb, 0xf9, 0xf4, 0xf1, 0xf0, 0xf7, 0xff, 0x0c, 0x15, 0x19, 0x19, 0x1c, + 0x1d, 0x1d, 0x1f, 0x1e, 0x1f, 0x26, 0x26, 0x27, 0x2a, 0x2b, 0x2f, 0x34, + 0x31, 0x31, 0x32, 0x36, 0x37, 0x37, 0x39, 0x3b, 0x3c, 0x3c, 0x3a, 0x35, + 0x33, 0x34, 0x36, 0x38, 0x3c, 0x41, 0x43, 0x42, 0x40, 0x44, 0x4e, 0x50, + 0x50, 0x4e, 0x4a, 0x45, 0x49, 0x49, 0x4a, 0x52, 0x57, 0x56, 0x57, 0x57, + 0x59, 0x59, 0x5c, 0x5c, 0x5d, 0x5a, 0x59, 0x5f, 0x5e, 0x63, 0x64, 0x65, + 0x66, 0x66, 0x65, 0x63, 0x65, 0x65, 0x65, 0x63, 0x63, 0x63, 0x63, 0x64, + 0x65, 0x67, 0x65, 0x64, 0x66, 0x68, 0x6a, 0x6b, 0x6a, 0x69, 0x6a, 0x6b, + 0x6c, 0x6d, 0x6f, 0x6f, 0x6f, 0x6d, 0x69, 0x69, 0x6a, 0x6a, 0x6d, 0x6e, + 0x6c, 0x6a, 0x66, 0x64, 0x65, 0x67, 0x67, 0x63, 0x61, 0x61, 0x5f, 0x5e, + 0x60, 0x61, 0x63, 0x63, 0x63, 0x62, 0x62, 0x60, 0x60, 0x5f, 0x61, 0x60, + 0x61, 0x63, 0x63, 0x62, 0x62, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, + 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x62, 0x62, 0x62, 0x61, 0x62, + 0x63, 0x62, 0x60, 0x60, 0x5f, 0x5f, 0x5e, 0x5e, 0x5d, 0x5b, 0x59, 0x58, + 0x57, 0x53, 0x4f, 0x4a, 0x46, 0x42, 0x3c, 0x35, 0xc7, 0xc2, 0xc6, 0xc1, + 0xbb, 0xc0, 0xc8, 0xc7, 0xc0, 0xc5, 0xcb, 0xbf, 0xbd, 0xba, 0xc1, 0xc7, + 0xc5, 0xcb, 0xd6, 0xd1, 0xc1, 0xd1, 0xfb, 0x0c, 0x0c, 0x0e, 0x13, 0x18, + 0x1b, 0x1b, 0x19, 0x17, 0x18, 0x12, 0x12, 0x12, 0x0b, 0x06, 0x0c, 0x0e, + 0x0e, 0x0c, 0x0b, 0x08, 0x06, 0x06, 0x03, 0xfe, 0xfc, 0xfc, 0xf7, 0xf3, + 0xf0, 0xf9, 0x00, 0x0c, 0x12, 0x16, 0x15, 0x19, 0x19, 0x1c, 0x1c, 0x19, + 0x1f, 0x25, 0x25, 0x25, 0x26, 0x2b, 0x31, 0x31, 0x2e, 0x31, 0x31, 0x31, + 0x37, 0x39, 0x38, 0x37, 0x34, 0x35, 0x2f, 0x31, 0x34, 0x37, 0x38, 0x3d, + 0x3f, 0x40, 0x3d, 0x3a, 0x39, 0x42, 0x4a, 0x4c, 0x4b, 0x46, 0x44, 0x44, + 0x4a, 0x4a, 0x4d, 0x50, 0x50, 0x50, 0x55, 0x50, 0x50, 0x51, 0x54, 0x57, + 0x57, 0x53, 0x52, 0x55, 0x59, 0x5d, 0x5f, 0x60, 0x62, 0x65, 0x64, 0x65, + 0x65, 0x67, 0x68, 0x69, 0x66, 0x66, 0x64, 0x63, 0x64, 0x64, 0x66, 0x66, + 0x66, 0x67, 0x69, 0x69, 0x69, 0x69, 0x69, 0x6b, 0x6c, 0x6d, 0x6e, 0x6f, + 0x6f, 0x6e, 0x69, 0x69, 0x6b, 0x6a, 0x6c, 0x6e, 0x6c, 0x6a, 0x69, 0x68, + 0x68, 0x68, 0x66, 0x63, 0x60, 0x5f, 0x5d, 0x5d, 0x5d, 0x5e, 0x5f, 0x61, + 0x62, 0x63, 0x62, 0x61, 0x60, 0x5f, 0x60, 0x61, 0x62, 0x63, 0x63, 0x62, + 0x61, 0x5f, 0x60, 0x61, 0x63, 0x63, 0x63, 0x63, 0x61, 0x62, 0x63, 0x63, + 0x63, 0x63, 0x63, 0x62, 0x62, 0x62, 0x62, 0x63, 0x63, 0x62, 0x62, 0x63, + 0x62, 0x61, 0x60, 0x5f, 0x5d, 0x5d, 0x5c, 0x5b, 0x5a, 0x58, 0x56, 0x51, + 0x4a, 0x43, 0x3f, 0x3f, 0xc5, 0xc4, 0xc8, 0xc3, 0xb9, 0xb6, 0xb8, 0xc1, + 0xbf, 0xc8, 0xca, 0xc8, 0xc8, 0xc6, 0xc0, 0xb9, 0xb5, 0xc3, 0xc5, 0xc9, + 0xc1, 0xcb, 0xf4, 0x0a, 0x0e, 0x12, 0x12, 0x15, 0x19, 0x19, 0x18, 0x1a, + 0x18, 0x11, 0x12, 0x12, 0x0d, 0x08, 0x0c, 0x0f, 0x11, 0x0f, 0x0e, 0x0d, + 0x0b, 0x09, 0x06, 0x00, 0xfd, 0x00, 0xfb, 0xf3, 0xf2, 0xfa, 0x03, 0x0d, + 0x12, 0x13, 0x12, 0x17, 0x18, 0x1b, 0x1a, 0x1b, 0x21, 0x25, 0x25, 0x27, + 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x30, 0x31, 0x35, 0x38, 0x37, 0x34, 0x30, + 0x30, 0x2b, 0x2e, 0x33, 0x35, 0x37, 0x3c, 0x39, 0x37, 0x37, 0x35, 0x35, + 0x39, 0x42, 0x48, 0x47, 0x47, 0x46, 0x45, 0x4b, 0x4b, 0x4b, 0x4e, 0x4f, + 0x52, 0x51, 0x54, 0x4f, 0x4d, 0x4f, 0x51, 0x50, 0x4c, 0x4a, 0x50, 0x56, + 0x58, 0x5e, 0x5e, 0x5d, 0x5f, 0x62, 0x62, 0x64, 0x64, 0x64, 0x66, 0x68, + 0x66, 0x63, 0x63, 0x63, 0x64, 0x63, 0x65, 0x67, 0x67, 0x67, 0x67, 0x66, + 0x68, 0x68, 0x69, 0x6a, 0x6b, 0x6c, 0x6d, 0x6e, 0x6f, 0x6f, 0x6d, 0x6b, + 0x6c, 0x6d, 0x6c, 0x6c, 0x6c, 0x6b, 0x69, 0x68, 0x66, 0x64, 0x63, 0x61, + 0x62, 0x5f, 0x5c, 0x5c, 0x5e, 0x5f, 0x5f, 0x60, 0x61, 0x62, 0x62, 0x62, + 0x61, 0x5f, 0x5f, 0x60, 0x61, 0x61, 0x62, 0x61, 0x60, 0x5e, 0x5f, 0x61, + 0x62, 0x62, 0x63, 0x63, 0x63, 0x63, 0x63, 0x64, 0x63, 0x62, 0x63, 0x62, + 0x62, 0x62, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x62, 0x61, 0x5f, 0x5d, + 0x5c, 0x5b, 0x58, 0x57, 0x57, 0x56, 0x56, 0x54, 0x50, 0x4c, 0x4a, 0x47, + 0xbb, 0xc1, 0xc8, 0xc2, 0xbd, 0xb9, 0xba, 0xbd, 0xbe, 0xc3, 0xc8, 0xcc, + 0xce, 0xc6, 0xbb, 0xbf, 0xc3, 0xca, 0xc5, 0xc0, 0xbe, 0xc7, 0xf1, 0x0b, + 0x10, 0x11, 0x13, 0x13, 0x14, 0x16, 0x17, 0x18, 0x15, 0x0f, 0x12, 0x12, + 0x11, 0x0c, 0x0e, 0x11, 0x13, 0x12, 0x12, 0x10, 0x0d, 0x0e, 0x0a, 0x04, + 0xfc, 0x00, 0xff, 0xf3, 0xf3, 0xfe, 0x05, 0x0c, 0x10, 0x10, 0x11, 0x15, + 0x16, 0x18, 0x18, 0x1e, 0x21, 0x23, 0x26, 0x27, 0x26, 0x25, 0x25, 0x26, + 0x2a, 0x2b, 0x30, 0x31, 0x2f, 0x2b, 0x28, 0x25, 0x26, 0x29, 0x31, 0x34, + 0x33, 0x37, 0x37, 0x34, 0x33, 0x35, 0x36, 0x36, 0x3d, 0x43, 0x41, 0x42, + 0x44, 0x47, 0x4b, 0x50, 0x4c, 0x4a, 0x4c, 0x50, 0x53, 0x53, 0x54, 0x51, + 0x52, 0x52, 0x52, 0x51, 0x4e, 0x48, 0x49, 0x53, 0x57, 0x5a, 0x5d, 0x5c, + 0x5e, 0x5f, 0x5e, 0x60, 0x60, 0x60, 0x5e, 0x61, 0x60, 0x60, 0x61, 0x62, + 0x63, 0x64, 0x66, 0x66, 0x66, 0x66, 0x66, 0x65, 0x66, 0x67, 0x68, 0x69, + 0x69, 0x6b, 0x6d, 0x6e, 0x6f, 0x6f, 0x6f, 0x6e, 0x6d, 0x6e, 0x6d, 0x6d, + 0x6d, 0x6b, 0x6a, 0x66, 0x63, 0x62, 0x61, 0x62, 0x62, 0x5e, 0x5c, 0x5c, + 0x5e, 0x60, 0x61, 0x62, 0x62, 0x61, 0x61, 0x62, 0x62, 0x61, 0x61, 0x61, + 0x62, 0x62, 0x62, 0x60, 0x5f, 0x5f, 0x60, 0x61, 0x61, 0x62, 0x63, 0x63, + 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, + 0x63, 0x63, 0x63, 0x63, 0x62, 0x61, 0x5e, 0x5c, 0x59, 0x58, 0x54, 0x4e, + 0x4c, 0x4a, 0x47, 0x47, 0x46, 0x45, 0x44, 0x45, 0xc1, 0xc2, 0xc8, 0xc1, + 0xb8, 0xb1, 0xb5, 0xb9, 0xbc, 0xc0, 0xc8, 0xca, 0xc8, 0xcb, 0xc8, 0xbf, + 0xc4, 0xc9, 0xca, 0xc5, 0xbe, 0xbe, 0xf2, 0x0d, 0x12, 0x12, 0x11, 0x13, + 0x14, 0x14, 0x15, 0x15, 0x11, 0x0e, 0x10, 0x10, 0x10, 0x0c, 0x0e, 0x11, + 0x14, 0x15, 0x12, 0x12, 0x0e, 0x10, 0x0b, 0x06, 0xff, 0x00, 0xff, 0xf6, + 0xf6, 0x00, 0x04, 0x0c, 0x0c, 0x0c, 0x10, 0x14, 0x12, 0x15, 0x1a, 0x1c, + 0x21, 0x25, 0x25, 0x1f, 0x1b, 0x20, 0x22, 0x25, 0x27, 0x28, 0x26, 0x27, + 0x26, 0x22, 0x20, 0x24, 0x27, 0x2e, 0x30, 0x2d, 0x2e, 0x2e, 0x31, 0x31, + 0x32, 0x37, 0x3a, 0x3b, 0x3f, 0x42, 0x3f, 0x40, 0x44, 0x47, 0x4c, 0x4e, + 0x4c, 0x49, 0x4c, 0x50, 0x51, 0x51, 0x54, 0x53, 0x53, 0x55, 0x57, 0x55, + 0x50, 0x4e, 0x4b, 0x50, 0x55, 0x57, 0x5a, 0x5b, 0x5b, 0x5b, 0x5c, 0x5d, + 0x5d, 0x5c, 0x58, 0x58, 0x5a, 0x5d, 0x5f, 0x62, 0x63, 0x63, 0x63, 0x63, + 0x63, 0x63, 0x65, 0x66, 0x65, 0x66, 0x66, 0x67, 0x68, 0x69, 0x6a, 0x6c, + 0x6d, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6d, 0x6c, 0x6c, 0x6c, 0x69, 0x66, + 0x63, 0x63, 0x65, 0x63, 0x5e, 0x5d, 0x5d, 0x5d, 0x5f, 0x63, 0x63, 0x62, + 0x60, 0x5f, 0x60, 0x62, 0x62, 0x61, 0x5f, 0x5f, 0x60, 0x61, 0x60, 0x5e, + 0x5e, 0x5f, 0x60, 0x60, 0x5f, 0x60, 0x61, 0x63, 0x63, 0x63, 0x63, 0x63, + 0x63, 0x63, 0x64, 0x64, 0x64, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x62, + 0x61, 0x61, 0x5f, 0x5d, 0x5d, 0x5d, 0x59, 0x57, 0x55, 0x51, 0x4d, 0x4a, + 0x46, 0x44, 0x3f, 0x3d, 0xc8, 0xc6, 0xc9, 0xc1, 0xbc, 0xc1, 0xb5, 0xb1, + 0xb1, 0xb5, 0xbf, 0xc7, 0xc6, 0xc1, 0xcb, 0xc5, 0xbe, 0xbe, 0xb8, 0xb8, + 0xbb, 0xc7, 0xf9, 0x0c, 0x11, 0x14, 0x13, 0x16, 0x16, 0x16, 0x12, 0x12, + 0x0e, 0x0f, 0x0e, 0x0f, 0x0f, 0x0c, 0x0f, 0x12, 0x16, 0x18, 0x15, 0x12, + 0x0e, 0x11, 0x0e, 0x07, 0x00, 0xfe, 0xfe, 0xf8, 0xf7, 0xfe, 0x05, 0x0c, + 0x09, 0x0b, 0x0f, 0x12, 0x0f, 0x14, 0x17, 0x1a, 0x21, 0x21, 0x18, 0x13, + 0x19, 0x20, 0x21, 0x20, 0x22, 0x20, 0x21, 0x22, 0x1f, 0x1f, 0x23, 0x2a, + 0x2c, 0x2b, 0x2a, 0x25, 0x29, 0x2c, 0x31, 0x32, 0x34, 0x37, 0x3c, 0x41, + 0x43, 0x42, 0x42, 0x3e, 0x41, 0x47, 0x4b, 0x4f, 0x4c, 0x4a, 0x4e, 0x51, + 0x52, 0x52, 0x52, 0x55, 0x52, 0x54, 0x56, 0x57, 0x54, 0x52, 0x51, 0x4e, + 0x54, 0x55, 0x57, 0x59, 0x5a, 0x5b, 0x5b, 0x5d, 0x5f, 0x5e, 0x5d, 0x5a, + 0x5b, 0x5d, 0x5d, 0x5e, 0x61, 0x63, 0x5f, 0x5f, 0x5f, 0x61, 0x62, 0x64, + 0x64, 0x64, 0x63, 0x65, 0x68, 0x69, 0x69, 0x69, 0x6b, 0x6e, 0x6f, 0x6f, + 0x6f, 0x6f, 0x6e, 0x6d, 0x6c, 0x6c, 0x6a, 0x67, 0x65, 0x65, 0x65, 0x60, + 0x5d, 0x5d, 0x5c, 0x5d, 0x61, 0x63, 0x62, 0x60, 0x60, 0x60, 0x62, 0x62, + 0x61, 0x5f, 0x5e, 0x5d, 0x5e, 0x5f, 0x5f, 0x5c, 0x5d, 0x5e, 0x5f, 0x5f, + 0x5f, 0x61, 0x63, 0x63, 0x63, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, + 0x64, 0x64, 0x64, 0x63, 0x63, 0x62, 0x62, 0x62, 0x62, 0x62, 0x60, 0x5f, + 0x5f, 0x5f, 0x5d, 0x5c, 0x5b, 0x59, 0x57, 0x55, 0x50, 0x4f, 0x4a, 0x4a, + 0xbd, 0xc3, 0xcb, 0xc8, 0xc3, 0xd2, 0xc0, 0xae, 0xae, 0xb2, 0xb5, 0xc3, + 0xc7, 0xc1, 0xbb, 0xc3, 0xc2, 0xc8, 0xbf, 0xb1, 0xb5, 0xc9, 0xf9, 0x0e, + 0x12, 0x18, 0x14, 0x12, 0x16, 0x18, 0x12, 0x10, 0x0d, 0x0e, 0x0c, 0x0d, + 0x0c, 0x0c, 0x10, 0x12, 0x14, 0x1a, 0x19, 0x15, 0x12, 0x10, 0x0f, 0x0a, + 0x00, 0xfc, 0xfd, 0xf9, 0xf9, 0xfe, 0x05, 0x07, 0x04, 0x0a, 0x0e, 0x0e, + 0x0e, 0x11, 0x16, 0x1b, 0x1e, 0x18, 0x10, 0x14, 0x1b, 0x1c, 0x1c, 0x1d, + 0x1d, 0x1a, 0x1d, 0x1d, 0x1f, 0x22, 0x25, 0x29, 0x28, 0x25, 0x20, 0x25, + 0x2a, 0x2e, 0x32, 0x35, 0x36, 0x39, 0x3e, 0x44, 0x41, 0x41, 0x3c, 0x3c, + 0x44, 0x4a, 0x4c, 0x4e, 0x4d, 0x4e, 0x4e, 0x51, 0x55, 0x55, 0x56, 0x56, + 0x53, 0x53, 0x54, 0x55, 0x57, 0x56, 0x53, 0x50, 0x51, 0x55, 0x57, 0x58, + 0x5c, 0x5d, 0x5c, 0x5d, 0x5f, 0x61, 0x61, 0x5e, 0x5b, 0x5d, 0x5d, 0x5b, + 0x5a, 0x5e, 0x61, 0x5d, 0x5b, 0x5c, 0x5f, 0x60, 0x62, 0x63, 0x61, 0x63, + 0x65, 0x65, 0x65, 0x66, 0x67, 0x6a, 0x6d, 0x6e, 0x6d, 0x6d, 0x6c, 0x6b, + 0x6a, 0x6b, 0x6c, 0x6c, 0x69, 0x67, 0x63, 0x60, 0x5d, 0x59, 0x5b, 0x5f, + 0x63, 0x63, 0x60, 0x5f, 0x60, 0x61, 0x62, 0x61, 0x60, 0x5f, 0x5e, 0x5e, + 0x5f, 0x60, 0x5f, 0x5e, 0x5f, 0x5f, 0x5f, 0x61, 0x61, 0x62, 0x63, 0x63, + 0x63, 0x64, 0x64, 0x63, 0x64, 0x64, 0x64, 0x64, 0x65, 0x63, 0x63, 0x63, + 0x61, 0x62, 0x62, 0x63, 0x62, 0x61, 0x60, 0x60, 0x5e, 0x5d, 0x5d, 0x5d, + 0x5c, 0x5a, 0x59, 0x57, 0x57, 0x56, 0x51, 0x4d, 0xb7, 0xc1, 0xc8, 0xbb, + 0xbf, 0xc3, 0xc0, 0xb6, 0xaf, 0xb5, 0xb5, 0xc7, 0xca, 0xc3, 0xb3, 0xb4, + 0xbd, 0xbb, 0xb5, 0xb1, 0xb2, 0xc2, 0xf5, 0x0f, 0x12, 0x19, 0x17, 0x12, + 0x12, 0x14, 0x11, 0x0c, 0x0b, 0x0e, 0x0c, 0x0b, 0x0b, 0x0b, 0x0e, 0x12, + 0x13, 0x18, 0x1a, 0x16, 0x13, 0x0e, 0x10, 0x0c, 0xff, 0xfc, 0xfb, 0xf9, + 0xf8, 0xfd, 0x01, 0x02, 0x00, 0x05, 0x0b, 0x0e, 0x10, 0x11, 0x15, 0x16, + 0x15, 0x10, 0x10, 0x12, 0x14, 0x18, 0x17, 0x17, 0x14, 0x19, 0x1e, 0x1f, + 0x21, 0x1e, 0x20, 0x25, 0x25, 0x20, 0x20, 0x27, 0x2b, 0x32, 0x36, 0x37, + 0x38, 0x3d, 0x3f, 0x3d, 0x3d, 0x3b, 0x39, 0x41, 0x48, 0x4a, 0x4b, 0x4b, + 0x4f, 0x52, 0x50, 0x51, 0x55, 0x56, 0x56, 0x57, 0x57, 0x55, 0x51, 0x50, + 0x54, 0x56, 0x51, 0x4d, 0x51, 0x54, 0x55, 0x57, 0x5b, 0x5d, 0x5b, 0x5d, + 0x5d, 0x5e, 0x60, 0x60, 0x5d, 0x5b, 0x5d, 0x5d, 0x59, 0x58, 0x5d, 0x5e, + 0x5d, 0x5b, 0x5a, 0x5d, 0x61, 0x62, 0x62, 0x61, 0x61, 0x63, 0x63, 0x63, + 0x64, 0x67, 0x6a, 0x69, 0x68, 0x67, 0x67, 0x69, 0x69, 0x6a, 0x6c, 0x6c, + 0x6b, 0x67, 0x63, 0x5f, 0x5b, 0x59, 0x5d, 0x61, 0x64, 0x63, 0x61, 0x60, + 0x60, 0x62, 0x62, 0x60, 0x60, 0x5d, 0x5d, 0x5e, 0x5f, 0x61, 0x61, 0x60, + 0x5f, 0x5e, 0x5f, 0x61, 0x62, 0x62, 0x62, 0x63, 0x63, 0x63, 0x63, 0x63, + 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x62, 0x62, 0x62, 0x62, + 0x63, 0x63, 0x62, 0x62, 0x60, 0x5e, 0x5e, 0x5d, 0x5d, 0x5b, 0x5a, 0x57, + 0x55, 0x53, 0x50, 0x4a, 0xbb, 0xc5, 0xcf, 0xb8, 0xb3, 0xb6, 0xb6, 0xc0, + 0xb5, 0xc0, 0xc0, 0xc7, 0xc8, 0xc8, 0xb8, 0xbd, 0xb5, 0xb7, 0xb5, 0xb4, + 0xb5, 0xc8, 0xfa, 0x0d, 0x12, 0x1a, 0x16, 0x11, 0x10, 0x0e, 0x0c, 0x0b, + 0x0a, 0x0c, 0x09, 0x09, 0x0a, 0x0c, 0x11, 0x12, 0x13, 0x17, 0x18, 0x17, + 0x14, 0x0f, 0x0e, 0x0b, 0x01, 0xfd, 0xfb, 0xf6, 0xf9, 0xfd, 0x00, 0x00, + 0x01, 0x06, 0x0c, 0x0c, 0x0e, 0x11, 0x0e, 0x0e, 0x0c, 0x09, 0x0c, 0x12, + 0x15, 0x13, 0x12, 0x16, 0x1a, 0x1d, 0x1d, 0x1c, 0x19, 0x1c, 0x21, 0x24, + 0x20, 0x1f, 0x26, 0x28, 0x2d, 0x32, 0x37, 0x37, 0x39, 0x3d, 0x3b, 0x3c, + 0x3b, 0x3d, 0x3b, 0x44, 0x47, 0x4a, 0x4a, 0x4b, 0x50, 0x53, 0x51, 0x54, + 0x56, 0x56, 0x57, 0x55, 0x57, 0x57, 0x52, 0x4c, 0x4e, 0x54, 0x55, 0x50, + 0x4f, 0x55, 0x56, 0x57, 0x58, 0x5c, 0x5d, 0x5d, 0x5d, 0x5f, 0x5f, 0x5f, + 0x5f, 0x5d, 0x5c, 0x5e, 0x5d, 0x5c, 0x5c, 0x5d, 0x5d, 0x5e, 0x5c, 0x5d, + 0x5f, 0x60, 0x62, 0x63, 0x63, 0x63, 0x61, 0x61, 0x64, 0x67, 0x6a, 0x6a, + 0x65, 0x64, 0x63, 0x63, 0x64, 0x67, 0x69, 0x6a, 0x6b, 0x6b, 0x68, 0x61, + 0x5d, 0x5c, 0x5d, 0x62, 0x63, 0x63, 0x62, 0x60, 0x60, 0x60, 0x60, 0x5f, + 0x5f, 0x5d, 0x5e, 0x60, 0x60, 0x61, 0x62, 0x5f, 0x5d, 0x5d, 0x5d, 0x5f, + 0x60, 0x61, 0x62, 0x63, 0x63, 0x63, 0x63, 0x62, 0x61, 0x62, 0x62, 0x62, + 0x62, 0x62, 0x62, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, + 0x62, 0x60, 0x60, 0x5f, 0x5e, 0x5d, 0x5c, 0x59, 0x58, 0x55, 0x51, 0x4d, + 0xcc, 0xca, 0xce, 0xc3, 0xaf, 0xae, 0xbb, 0xca, 0xbc, 0xc2, 0xc4, 0xc7, + 0xc8, 0xc8, 0xc6, 0xc3, 0xc1, 0xb0, 0xaf, 0xb9, 0xb5, 0xc8, 0xf9, 0x10, + 0x12, 0x18, 0x17, 0x12, 0x13, 0x0f, 0x0c, 0x09, 0x08, 0x08, 0x08, 0x0a, + 0x09, 0x0d, 0x12, 0x15, 0x16, 0x15, 0x18, 0x15, 0x13, 0x10, 0x0c, 0x0b, + 0x01, 0xf9, 0xf9, 0xf5, 0xf8, 0xfb, 0xfd, 0xfc, 0x02, 0x0b, 0x09, 0x08, + 0x0a, 0x0d, 0x08, 0x08, 0x05, 0x08, 0x0d, 0x10, 0x11, 0x11, 0x13, 0x19, + 0x1d, 0x1b, 0x18, 0x17, 0x19, 0x1d, 0x1e, 0x1e, 0x1f, 0x24, 0x2a, 0x2b, + 0x30, 0x34, 0x34, 0x33, 0x3a, 0x3a, 0x3a, 0x3b, 0x39, 0x3e, 0x3e, 0x47, + 0x4a, 0x4a, 0x4a, 0x4c, 0x4f, 0x52, 0x51, 0x52, 0x56, 0x56, 0x57, 0x56, + 0x56, 0x57, 0x56, 0x4f, 0x4c, 0x52, 0x55, 0x54, 0x4f, 0x53, 0x55, 0x56, + 0x58, 0x59, 0x5a, 0x5d, 0x5d, 0x5d, 0x5f, 0x5f, 0x5e, 0x5d, 0x5d, 0x5d, + 0x5f, 0x5f, 0x5e, 0x5d, 0x5d, 0x5e, 0x5d, 0x5d, 0x5d, 0x5d, 0x5f, 0x62, + 0x63, 0x64, 0x63, 0x63, 0x64, 0x68, 0x6a, 0x6b, 0x66, 0x63, 0x63, 0x62, + 0x61, 0x63, 0x65, 0x66, 0x69, 0x69, 0x69, 0x66, 0x62, 0x5d, 0x5d, 0x63, + 0x63, 0x63, 0x63, 0x5f, 0x5e, 0x5f, 0x5d, 0x5f, 0x5f, 0x5d, 0x5f, 0x61, + 0x61, 0x61, 0x61, 0x5e, 0x5b, 0x5a, 0x5c, 0x5d, 0x5f, 0x61, 0x62, 0x62, + 0x62, 0x62, 0x61, 0x61, 0x61, 0x62, 0x63, 0x63, 0x63, 0x62, 0x63, 0x63, + 0x63, 0x63, 0x62, 0x62, 0x63, 0x63, 0x63, 0x63, 0x61, 0x61, 0x60, 0x5e, + 0x5d, 0x5c, 0x5b, 0x59, 0x58, 0x57, 0x54, 0x51, 0xcc, 0xc2, 0xc1, 0xca, + 0xbd, 0xb3, 0xba, 0xd3, 0xd5, 0xc5, 0xc1, 0xc4, 0xc8, 0xc9, 0xcb, 0xcf, + 0xc7, 0xb6, 0xc1, 0xbd, 0xb5, 0xcc, 0x00, 0x13, 0x12, 0x17, 0x12, 0x13, + 0x14, 0x0f, 0x0d, 0x0b, 0x09, 0x07, 0x09, 0x09, 0x09, 0x0d, 0x12, 0x13, + 0x14, 0x15, 0x16, 0x14, 0x12, 0x12, 0x0a, 0x0a, 0x02, 0xf5, 0xf6, 0xf5, + 0xf5, 0xf6, 0xf6, 0xf9, 0x01, 0x09, 0x06, 0x07, 0x05, 0x07, 0x05, 0x01, + 0x04, 0x07, 0x06, 0x0c, 0x12, 0x14, 0x12, 0x13, 0x13, 0x12, 0x12, 0x16, + 0x19, 0x1c, 0x1b, 0x1d, 0x1f, 0x27, 0x2a, 0x2b, 0x30, 0x31, 0x31, 0x34, + 0x39, 0x38, 0x37, 0x3b, 0x3c, 0x41, 0x42, 0x4a, 0x4c, 0x4b, 0x4b, 0x4b, + 0x4f, 0x50, 0x50, 0x51, 0x55, 0x56, 0x57, 0x54, 0x57, 0x54, 0x56, 0x53, + 0x4b, 0x50, 0x53, 0x53, 0x54, 0x54, 0x57, 0x55, 0x58, 0x58, 0x57, 0x5c, + 0x5d, 0x5d, 0x5d, 0x5d, 0x5d, 0x5d, 0x5d, 0x5d, 0x5e, 0x60, 0x61, 0x60, + 0x5f, 0x5e, 0x5d, 0x5d, 0x5d, 0x5e, 0x5e, 0x60, 0x63, 0x63, 0x65, 0x64, + 0x64, 0x68, 0x69, 0x6a, 0x64, 0x62, 0x62, 0x62, 0x62, 0x63, 0x63, 0x63, + 0x64, 0x66, 0x69, 0x68, 0x66, 0x63, 0x61, 0x63, 0x61, 0x61, 0x60, 0x60, + 0x5f, 0x5d, 0x5d, 0x5f, 0x5f, 0x5d, 0x5e, 0x61, 0x62, 0x62, 0x5f, 0x5c, + 0x5b, 0x5a, 0x5a, 0x5a, 0x5c, 0x5d, 0x60, 0x60, 0x60, 0x60, 0x60, 0x60, + 0x61, 0x62, 0x62, 0x63, 0x63, 0x62, 0x63, 0x63, 0x63, 0x63, 0x62, 0x63, + 0x63, 0x63, 0x63, 0x63, 0x62, 0x61, 0x60, 0x5d, 0x5c, 0x5c, 0x5b, 0x5b, + 0x5a, 0x58, 0x55, 0x53, 0xca, 0xc8, 0xbb, 0xc0, 0xc7, 0xc4, 0xc0, 0xce, + 0xd2, 0xce, 0xd5, 0xd0, 0xd0, 0xce, 0xcc, 0xcb, 0xc7, 0xc1, 0xc5, 0xbb, + 0xb4, 0xda, 0x0e, 0x18, 0x14, 0x12, 0x13, 0x13, 0x11, 0x0e, 0x0d, 0x0b, + 0x0a, 0x0b, 0x0a, 0x08, 0x09, 0x0c, 0x11, 0x12, 0x14, 0x14, 0x13, 0x13, + 0x11, 0x11, 0x0a, 0x06, 0x02, 0xf6, 0xf7, 0xf2, 0xf0, 0xf2, 0xf2, 0xf7, + 0x02, 0x08, 0x05, 0x03, 0x02, 0x05, 0xfe, 0xfe, 0x00, 0x00, 0x03, 0x10, + 0x12, 0x0c, 0x0a, 0x0d, 0x0d, 0x10, 0x0f, 0x14, 0x1b, 0x19, 0x1a, 0x1d, + 0x22, 0x2b, 0x2c, 0x2d, 0x2d, 0x2b, 0x2e, 0x36, 0x37, 0x35, 0x39, 0x3d, + 0x3e, 0x42, 0x44, 0x4a, 0x4a, 0x4b, 0x4b, 0x49, 0x4d, 0x50, 0x51, 0x52, + 0x52, 0x53, 0x51, 0x53, 0x57, 0x55, 0x52, 0x54, 0x4e, 0x4e, 0x51, 0x53, + 0x54, 0x57, 0x55, 0x56, 0x58, 0x58, 0x57, 0x59, 0x5c, 0x5c, 0x5d, 0x5d, + 0x5c, 0x5d, 0x5d, 0x5d, 0x5f, 0x60, 0x61, 0x61, 0x61, 0x60, 0x5f, 0x5e, + 0x5c, 0x5e, 0x5f, 0x5f, 0x61, 0x62, 0x63, 0x65, 0x64, 0x68, 0x68, 0x68, + 0x61, 0x61, 0x63, 0x62, 0x61, 0x62, 0x63, 0x63, 0x63, 0x62, 0x66, 0x67, + 0x65, 0x64, 0x63, 0x63, 0x60, 0x60, 0x5f, 0x5f, 0x5e, 0x5b, 0x5a, 0x5d, + 0x5e, 0x5c, 0x5d, 0x5f, 0x60, 0x61, 0x5f, 0x5b, 0x5a, 0x5b, 0x59, 0x59, + 0x59, 0x5a, 0x5b, 0x5d, 0x5e, 0x5e, 0x5f, 0x60, 0x61, 0x61, 0x62, 0x62, + 0x62, 0x62, 0x62, 0x62, 0x61, 0x61, 0x62, 0x62, 0x62, 0x63, 0x62, 0x62, + 0x60, 0x60, 0x61, 0x5f, 0x5e, 0x5e, 0x5d, 0x5d, 0x5c, 0x5a, 0x59, 0x58, + 0xc1, 0xc8, 0xd4, 0xcd, 0xc7, 0xc6, 0xc4, 0xc6, 0xc6, 0xd7, 0xd8, 0xd5, + 0xd4, 0xcc, 0xc4, 0xc8, 0xc7, 0xc6, 0xc8, 0xc8, 0xb0, 0xe5, 0x13, 0x1a, + 0x1b, 0x15, 0x15, 0x0f, 0x0c, 0x0d, 0x0d, 0x0c, 0x0c, 0x0d, 0x0c, 0x07, + 0x0b, 0x0d, 0x10, 0x12, 0x12, 0x15, 0x12, 0x12, 0x11, 0x10, 0x0c, 0x03, + 0x00, 0xf7, 0xf3, 0xec, 0xe8, 0xed, 0xed, 0xf5, 0x00, 0x04, 0x00, 0x01, + 0xff, 0xfe, 0xf8, 0xfa, 0xfc, 0x00, 0x09, 0x0e, 0x0a, 0x04, 0x0b, 0x0c, + 0x0d, 0x0d, 0x0c, 0x18, 0x19, 0x18, 0x18, 0x1c, 0x25, 0x2c, 0x2b, 0x2b, + 0x2b, 0x29, 0x2f, 0x36, 0x35, 0x36, 0x3c, 0x3e, 0x3e, 0x44, 0x46, 0x4a, + 0x4a, 0x4d, 0x4c, 0x48, 0x4c, 0x4f, 0x50, 0x50, 0x50, 0x52, 0x51, 0x51, + 0x50, 0x55, 0x54, 0x4f, 0x4f, 0x4f, 0x4f, 0x51, 0x53, 0x55, 0x57, 0x56, + 0x57, 0x58, 0x58, 0x57, 0x5a, 0x5b, 0x5d, 0x5e, 0x5c, 0x5d, 0x5e, 0x5e, + 0x5f, 0x5f, 0x60, 0x62, 0x61, 0x61, 0x60, 0x5e, 0x5c, 0x5d, 0x5f, 0x5e, + 0x5f, 0x5f, 0x60, 0x63, 0x65, 0x67, 0x66, 0x65, 0x5d, 0x60, 0x63, 0x62, + 0x63, 0x62, 0x63, 0x63, 0x63, 0x61, 0x61, 0x65, 0x64, 0x62, 0x62, 0x63, + 0x61, 0x61, 0x60, 0x5e, 0x5c, 0x5b, 0x5b, 0x5b, 0x5d, 0x5c, 0x5b, 0x5e, + 0x5f, 0x60, 0x5f, 0x5b, 0x59, 0x5b, 0x5c, 0x5b, 0x5b, 0x59, 0x59, 0x5a, + 0x5b, 0x5d, 0x5e, 0x60, 0x60, 0x60, 0x61, 0x61, 0x61, 0x61, 0x61, 0x60, + 0x5f, 0x5f, 0x5f, 0x60, 0x61, 0x61, 0x61, 0x61, 0x60, 0x60, 0x61, 0x61, + 0x60, 0x5f, 0x5d, 0x5d, 0x5c, 0x5a, 0x58, 0x57, 0xbc, 0xbe, 0xc9, 0xce, + 0xc7, 0xc0, 0xc1, 0xc1, 0xb7, 0xbf, 0xc8, 0xd1, 0xcc, 0xc6, 0xc1, 0xc4, + 0xc3, 0xc1, 0xc4, 0xc2, 0xba, 0xfc, 0x16, 0x1a, 0x1c, 0x18, 0x16, 0x0e, + 0x0b, 0x0d, 0x0d, 0x0c, 0x0d, 0x11, 0x0d, 0x0a, 0x0b, 0x0c, 0x0f, 0x11, + 0x12, 0x12, 0x11, 0x0f, 0x0d, 0x0c, 0x0b, 0xff, 0xff, 0xf4, 0xee, 0xe4, + 0xe1, 0xe6, 0xec, 0xf4, 0xfe, 0xfc, 0xfd, 0xfc, 0xf9, 0xf6, 0xf7, 0xf7, + 0xf8, 0x01, 0x06, 0x05, 0x02, 0x06, 0x09, 0x05, 0x08, 0x09, 0x10, 0x17, + 0x18, 0x18, 0x18, 0x1f, 0x28, 0x2b, 0x2a, 0x2b, 0x29, 0x27, 0x30, 0x37, + 0x36, 0x37, 0x3d, 0x40, 0x3d, 0x44, 0x47, 0x49, 0x4c, 0x4c, 0x4a, 0x47, + 0x4a, 0x4e, 0x4e, 0x4f, 0x4e, 0x50, 0x50, 0x4f, 0x4a, 0x50, 0x53, 0x4d, + 0x4b, 0x4e, 0x50, 0x50, 0x53, 0x54, 0x56, 0x55, 0x55, 0x58, 0x58, 0x57, + 0x59, 0x5a, 0x5c, 0x5e, 0x5d, 0x5d, 0x5d, 0x5e, 0x5f, 0x60, 0x60, 0x61, + 0x61, 0x61, 0x61, 0x60, 0x5d, 0x5d, 0x5f, 0x5d, 0x5d, 0x5d, 0x5f, 0x62, + 0x65, 0x66, 0x63, 0x62, 0x5b, 0x5d, 0x63, 0x62, 0x63, 0x63, 0x63, 0x63, + 0x62, 0x61, 0x5e, 0x61, 0x62, 0x60, 0x60, 0x61, 0x60, 0x5e, 0x5f, 0x5e, + 0x5b, 0x5a, 0x58, 0x5a, 0x5d, 0x5c, 0x59, 0x5c, 0x5e, 0x5f, 0x5e, 0x5c, + 0x58, 0x5b, 0x5d, 0x5d, 0x5e, 0x5d, 0x5b, 0x59, 0x57, 0x5a, 0x5d, 0x5e, + 0x5f, 0x5f, 0x60, 0x60, 0x60, 0x60, 0x61, 0x61, 0x5f, 0x5e, 0x5d, 0x5d, + 0x5e, 0x61, 0x60, 0x60, 0x60, 0x61, 0x61, 0x60, 0x5f, 0x5e, 0x5e, 0x5d, + 0x5c, 0x5b, 0x5a, 0x57, 0xc1, 0xc1, 0xc2, 0xc1, 0xc1, 0xbb, 0xc1, 0xcb, + 0xc4, 0xbb, 0xc2, 0xcd, 0xcd, 0xc5, 0xc2, 0xc2, 0xc2, 0xc5, 0xc7, 0xb2, + 0xc1, 0x06, 0x18, 0x15, 0x18, 0x18, 0x17, 0x12, 0x0c, 0x0c, 0x0d, 0x0c, + 0x0c, 0x10, 0x0d, 0x0b, 0x0c, 0x0c, 0x0f, 0x0f, 0x10, 0x11, 0x12, 0x0d, + 0x0c, 0x08, 0x0b, 0x01, 0xfc, 0xf8, 0xea, 0xe1, 0xdd, 0xe5, 0xec, 0xf2, + 0xf9, 0xf8, 0xf9, 0xf6, 0xf0, 0xee, 0xf3, 0xf5, 0xf9, 0x00, 0x01, 0x04, + 0x07, 0x06, 0x02, 0x00, 0x02, 0x08, 0x12, 0x16, 0x18, 0x16, 0x19, 0x22, + 0x28, 0x26, 0x27, 0x28, 0x27, 0x29, 0x32, 0x37, 0x35, 0x39, 0x3f, 0x40, + 0x3d, 0x43, 0x44, 0x45, 0x4a, 0x4a, 0x47, 0x46, 0x46, 0x4a, 0x4b, 0x4c, + 0x4b, 0x4b, 0x4c, 0x4e, 0x4a, 0x4c, 0x50, 0x50, 0x4a, 0x48, 0x4d, 0x4e, + 0x50, 0x54, 0x54, 0x54, 0x55, 0x56, 0x57, 0x57, 0x58, 0x59, 0x58, 0x5c, + 0x5c, 0x5c, 0x5d, 0x5d, 0x5e, 0x60, 0x60, 0x60, 0x61, 0x62, 0x61, 0x60, + 0x5d, 0x5c, 0x5e, 0x5c, 0x5c, 0x5d, 0x5e, 0x61, 0x63, 0x65, 0x60, 0x5e, + 0x58, 0x5c, 0x63, 0x62, 0x63, 0x63, 0x63, 0x62, 0x62, 0x60, 0x5d, 0x5d, + 0x60, 0x5e, 0x5d, 0x5e, 0x5d, 0x5d, 0x5d, 0x5d, 0x5b, 0x59, 0x54, 0x57, + 0x5c, 0x5b, 0x57, 0x58, 0x5c, 0x5d, 0x5d, 0x5d, 0x5a, 0x59, 0x5b, 0x5c, + 0x5e, 0x5e, 0x5d, 0x5c, 0x5a, 0x58, 0x5a, 0x5c, 0x5d, 0x5e, 0x5f, 0x5f, + 0x5f, 0x5f, 0x5f, 0x60, 0x5d, 0x5e, 0x5e, 0x5e, 0x5e, 0x5f, 0x5f, 0x61, + 0x60, 0x60, 0x60, 0x5e, 0x5e, 0x5d, 0x5d, 0x5d, 0x5c, 0x5c, 0x5a, 0x58, + 0xc5, 0xbb, 0xc1, 0xc4, 0xbf, 0xb9, 0xc8, 0xdd, 0xeb, 0xde, 0xc8, 0xc6, + 0xc7, 0xc5, 0xc4, 0xc6, 0xc6, 0xc1, 0xc6, 0xbc, 0xd1, 0x0d, 0x18, 0x12, + 0x14, 0x15, 0x17, 0x11, 0x0e, 0x0e, 0x0c, 0x0c, 0x0d, 0x0e, 0x0e, 0x0c, + 0x0d, 0x10, 0x0d, 0x0e, 0x0f, 0x0f, 0x12, 0x0f, 0x09, 0x08, 0x09, 0x02, + 0xf9, 0xf4, 0xe8, 0xda, 0xda, 0xe1, 0xe5, 0xec, 0xf5, 0xf5, 0xf3, 0xed, + 0xea, 0xeb, 0xf0, 0xf2, 0xf9, 0xfd, 0xff, 0x06, 0x09, 0x00, 0xfd, 0x00, + 0x01, 0x0a, 0x0f, 0x15, 0x11, 0x13, 0x1b, 0x22, 0x22, 0x25, 0x28, 0x27, + 0x27, 0x2c, 0x33, 0x37, 0x37, 0x39, 0x3f, 0x40, 0x3d, 0x40, 0x40, 0x43, + 0x48, 0x48, 0x43, 0x44, 0x44, 0x47, 0x4a, 0x48, 0x46, 0x47, 0x48, 0x4b, + 0x4a, 0x4a, 0x4d, 0x4e, 0x4a, 0x47, 0x4b, 0x4c, 0x4d, 0x51, 0x54, 0x54, + 0x52, 0x54, 0x57, 0x58, 0x58, 0x59, 0x58, 0x59, 0x5b, 0x5c, 0x5c, 0x5c, + 0x5c, 0x5f, 0x5f, 0x60, 0x61, 0x61, 0x61, 0x60, 0x5c, 0x5c, 0x5d, 0x5c, + 0x5b, 0x5c, 0x5d, 0x5e, 0x62, 0x63, 0x5d, 0x5c, 0x57, 0x5b, 0x62, 0x62, + 0x62, 0x62, 0x62, 0x60, 0x60, 0x60, 0x5c, 0x5b, 0x5d, 0x5d, 0x5d, 0x5d, + 0x5c, 0x5c, 0x5c, 0x5c, 0x5a, 0x57, 0x50, 0x56, 0x5c, 0x5c, 0x57, 0x57, + 0x59, 0x5c, 0x5d, 0x5d, 0x59, 0x57, 0x5a, 0x5a, 0x5c, 0x5d, 0x5d, 0x5d, + 0x5c, 0x5a, 0x59, 0x5a, 0x5a, 0x5d, 0x5d, 0x5f, 0x5e, 0x5e, 0x5e, 0x5e, + 0x5d, 0x5d, 0x5d, 0x5d, 0x5e, 0x5e, 0x5e, 0x5f, 0x5f, 0x5f, 0x5f, 0x5e, + 0x5e, 0x5d, 0x5c, 0x5d, 0x5b, 0x5a, 0x59, 0x59, 0xc8, 0xb8, 0xbf, 0xc3, + 0xc6, 0xc0, 0xd6, 0xdb, 0xdb, 0xce, 0xc2, 0xc3, 0xc3, 0xc5, 0xc8, 0xc9, + 0xc4, 0xbe, 0xc0, 0xc7, 0xda, 0x10, 0x1b, 0x16, 0x13, 0x13, 0x16, 0x0e, + 0x0d, 0x0e, 0x0e, 0x0f, 0x0c, 0x0d, 0x0d, 0x0b, 0x0c, 0x0e, 0x0c, 0x0d, + 0x0e, 0x0c, 0x0d, 0x0c, 0x03, 0x06, 0x06, 0x01, 0xf9, 0xf4, 0xe6, 0xd6, + 0xd7, 0xd9, 0xde, 0xe8, 0xf0, 0xee, 0xe8, 0xe5, 0xe1, 0xe8, 0xea, 0xee, + 0xf7, 0xfc, 0x00, 0x06, 0x04, 0xfa, 0xfc, 0xfd, 0x01, 0x06, 0x0d, 0x0f, + 0x0e, 0x12, 0x1c, 0x21, 0x1f, 0x24, 0x26, 0x26, 0x28, 0x2f, 0x33, 0x38, + 0x37, 0x37, 0x3c, 0x3e, 0x3a, 0x3d, 0x3c, 0x3e, 0x45, 0x46, 0x3f, 0x41, + 0x41, 0x42, 0x45, 0x44, 0x3f, 0x43, 0x44, 0x45, 0x45, 0x48, 0x4c, 0x4e, + 0x4b, 0x47, 0x47, 0x4c, 0x4e, 0x50, 0x50, 0x54, 0x52, 0x51, 0x54, 0x57, + 0x56, 0x57, 0x57, 0x57, 0x5a, 0x5c, 0x5b, 0x5b, 0x5a, 0x5e, 0x5e, 0x5f, + 0x60, 0x60, 0x60, 0x5f, 0x5c, 0x5b, 0x5e, 0x5c, 0x59, 0x5a, 0x5b, 0x5b, + 0x5f, 0x63, 0x5d, 0x5b, 0x59, 0x5b, 0x63, 0x63, 0x62, 0x63, 0x62, 0x5f, + 0x5f, 0x60, 0x5b, 0x59, 0x5b, 0x5b, 0x5c, 0x5b, 0x5a, 0x5a, 0x5a, 0x5a, + 0x58, 0x53, 0x4f, 0x56, 0x59, 0x5a, 0x57, 0x55, 0x57, 0x5a, 0x5b, 0x5d, + 0x58, 0x56, 0x57, 0x59, 0x5a, 0x5b, 0x5d, 0x5d, 0x5d, 0x5b, 0x59, 0x5b, + 0x5b, 0x5d, 0x5d, 0x5e, 0x5e, 0x5d, 0x5d, 0x5d, 0x5d, 0x5c, 0x5c, 0x5c, + 0x5d, 0x5d, 0x5e, 0x5f, 0x5f, 0x5f, 0x5f, 0x5f, 0x5d, 0x5d, 0x5d, 0x5b, + 0x5b, 0x5a, 0x5a, 0x59, 0xca, 0xc1, 0xbd, 0xbf, 0xba, 0xba, 0xc4, 0xc4, + 0xc5, 0xc1, 0xc3, 0xc5, 0xc5, 0xc8, 0xc5, 0xc2, 0xc3, 0xbe, 0xbf, 0xc7, + 0xcd, 0x14, 0x1f, 0x1a, 0x18, 0x14, 0x13, 0x10, 0x0e, 0x0e, 0x0e, 0x11, + 0x0e, 0x0e, 0x0d, 0x0c, 0x0c, 0x0c, 0x0e, 0x09, 0x0d, 0x0b, 0x0a, 0x07, + 0x04, 0xff, 0xff, 0xff, 0xf8, 0xef, 0xe2, 0xd2, 0xd2, 0xd3, 0xde, 0xe6, + 0xe9, 0xe2, 0xe0, 0xd9, 0xdc, 0xe5, 0xe8, 0xed, 0xf8, 0xfb, 0x00, 0x05, + 0xfc, 0xf9, 0xfa, 0xfb, 0xff, 0x04, 0x0a, 0x0c, 0x0c, 0x11, 0x18, 0x1f, + 0x1e, 0x23, 0x25, 0x25, 0x2b, 0x31, 0x33, 0x35, 0x37, 0x37, 0x3a, 0x3c, + 0x39, 0x3b, 0x37, 0x39, 0x42, 0x42, 0x3c, 0x3b, 0x3e, 0x3e, 0x41, 0x42, + 0x3d, 0x3e, 0x44, 0x44, 0x3f, 0x43, 0x49, 0x4d, 0x4b, 0x46, 0x47, 0x4a, + 0x4b, 0x4d, 0x4d, 0x4e, 0x53, 0x51, 0x51, 0x55, 0x57, 0x57, 0x57, 0x57, + 0x58, 0x5a, 0x5a, 0x5b, 0x5a, 0x5c, 0x5d, 0x5d, 0x5f, 0x5e, 0x5e, 0x5f, + 0x5c, 0x5b, 0x5d, 0x5d, 0x59, 0x59, 0x59, 0x5a, 0x5d, 0x5e, 0x5d, 0x5b, + 0x57, 0x5a, 0x61, 0x62, 0x60, 0x61, 0x61, 0x5d, 0x5d, 0x5e, 0x5a, 0x57, + 0x59, 0x59, 0x5b, 0x59, 0x56, 0x57, 0x58, 0x57, 0x56, 0x4f, 0x4a, 0x52, + 0x57, 0x59, 0x56, 0x54, 0x56, 0x58, 0x59, 0x5b, 0x5a, 0x56, 0x56, 0x57, + 0x59, 0x5a, 0x5a, 0x5c, 0x5c, 0x5c, 0x5c, 0x5b, 0x5b, 0x5c, 0x5d, 0x5d, + 0x5e, 0x5d, 0x5b, 0x5b, 0x5b, 0x5b, 0x5b, 0x5c, 0x5d, 0x5d, 0x5f, 0x60, + 0x5f, 0x5f, 0x5e, 0x5e, 0x5d, 0x5d, 0x5d, 0x5b, 0x5b, 0x59, 0x58, 0x58, + 0xca, 0xca, 0xbf, 0xc6, 0xc9, 0xc6, 0xc2, 0xc5, 0xcd, 0xc3, 0xc5, 0xca, + 0xce, 0xc5, 0xc0, 0xc4, 0xbf, 0xbf, 0xd0, 0xcb, 0xbf, 0x15, 0x1d, 0x16, + 0x18, 0x18, 0x12, 0x11, 0x0e, 0x0c, 0x0e, 0x0f, 0x0e, 0x11, 0x0e, 0x0e, + 0x0e, 0x0c, 0x0c, 0x06, 0x07, 0x0b, 0x06, 0x02, 0x02, 0xfd, 0xfe, 0xfc, + 0xf7, 0xed, 0xe3, 0xd2, 0xcc, 0xd6, 0xe0, 0xd9, 0xda, 0xda, 0xd4, 0xd6, + 0xde, 0xe2, 0xe3, 0xf0, 0xf7, 0xf8, 0xff, 0x00, 0xf7, 0xf9, 0xf6, 0xfa, + 0xfc, 0x01, 0x07, 0x0b, 0x0c, 0x11, 0x18, 0x1b, 0x1d, 0x24, 0x25, 0x27, + 0x2d, 0x32, 0x35, 0x35, 0x36, 0x34, 0x38, 0x39, 0x36, 0x35, 0x31, 0x31, + 0x38, 0x3c, 0x39, 0x37, 0x3b, 0x3e, 0x3f, 0x3d, 0x3c, 0x3e, 0x43, 0x42, + 0x3f, 0x40, 0x43, 0x48, 0x4b, 0x44, 0x44, 0x48, 0x4b, 0x4b, 0x4d, 0x4b, + 0x50, 0x50, 0x4d, 0x51, 0x55, 0x57, 0x56, 0x57, 0x58, 0x59, 0x59, 0x5a, + 0x59, 0x5a, 0x5d, 0x5e, 0x5f, 0x5f, 0x60, 0x5e, 0x5b, 0x59, 0x5d, 0x5c, + 0x5a, 0x59, 0x58, 0x5a, 0x5b, 0x5a, 0x5b, 0x5c, 0x5a, 0x5a, 0x60, 0x61, + 0x5f, 0x5f, 0x5e, 0x5d, 0x5c, 0x5d, 0x59, 0x58, 0x58, 0x57, 0x59, 0x57, + 0x53, 0x56, 0x57, 0x55, 0x52, 0x4c, 0x48, 0x50, 0x57, 0x57, 0x50, 0x52, + 0x53, 0x56, 0x57, 0x59, 0x58, 0x57, 0x55, 0x55, 0x57, 0x57, 0x57, 0x5a, + 0x5b, 0x5b, 0x5d, 0x5c, 0x5a, 0x5a, 0x5c, 0x5d, 0x5d, 0x5e, 0x5d, 0x5c, + 0x5b, 0x5a, 0x5a, 0x5c, 0x5e, 0x5d, 0x5d, 0x5f, 0x5f, 0x5d, 0x5c, 0x5d, + 0x5c, 0x5c, 0x5b, 0x5b, 0x5a, 0x57, 0x54, 0x55, 0xc9, 0xcb, 0xc1, 0xc3, + 0xc8, 0xc8, 0xc8, 0xce, 0xc9, 0xc5, 0xc9, 0xca, 0xca, 0xc4, 0xc3, 0xc0, + 0xbb, 0xcb, 0xcf, 0xbf, 0xb9, 0x0b, 0x1c, 0x12, 0x14, 0x17, 0x0f, 0x0e, + 0x0d, 0x0e, 0x0c, 0x0c, 0x0f, 0x11, 0x0f, 0x0e, 0x0d, 0x0c, 0x0c, 0x06, + 0xfd, 0x05, 0x04, 0x00, 0x00, 0xfe, 0xf9, 0xf6, 0xf5, 0xe9, 0xe1, 0xd1, + 0xd0, 0xd7, 0xd3, 0xce, 0xd9, 0xd3, 0xd0, 0xd6, 0xd8, 0xde, 0xe7, 0xf4, + 0xf5, 0xf6, 0xfe, 0xf9, 0xf2, 0xf5, 0xf7, 0xf8, 0xf9, 0xfd, 0x04, 0x07, + 0x06, 0x0e, 0x18, 0x15, 0x1c, 0x24, 0x25, 0x27, 0x2d, 0x30, 0x33, 0x30, + 0x31, 0x32, 0x34, 0x33, 0x2e, 0x30, 0x2e, 0x2d, 0x2f, 0x34, 0x35, 0x34, + 0x35, 0x38, 0x3b, 0x3b, 0x3b, 0x3e, 0x40, 0x3e, 0x3e, 0x3e, 0x3e, 0x43, + 0x46, 0x46, 0x42, 0x45, 0x4a, 0x4a, 0x4a, 0x4a, 0x4d, 0x51, 0x50, 0x4e, + 0x52, 0x57, 0x56, 0x56, 0x57, 0x59, 0x59, 0x59, 0x58, 0x58, 0x5a, 0x5d, + 0x5f, 0x5f, 0x60, 0x5e, 0x5c, 0x57, 0x5d, 0x5a, 0x5b, 0x5a, 0x57, 0x5a, + 0x5d, 0x59, 0x59, 0x5d, 0x5b, 0x59, 0x60, 0x60, 0x5d, 0x5d, 0x5b, 0x5b, + 0x5b, 0x5c, 0x57, 0x57, 0x58, 0x57, 0x57, 0x53, 0x52, 0x53, 0x54, 0x51, + 0x50, 0x4a, 0x47, 0x4e, 0x55, 0x56, 0x50, 0x51, 0x52, 0x53, 0x54, 0x57, + 0x57, 0x57, 0x56, 0x55, 0x55, 0x56, 0x54, 0x57, 0x5a, 0x5a, 0x5c, 0x5d, + 0x5c, 0x59, 0x5b, 0x5b, 0x5b, 0x5d, 0x5d, 0x5d, 0x5c, 0x5b, 0x5a, 0x5b, + 0x5d, 0x5d, 0x5d, 0x5d, 0x5d, 0x5d, 0x5d, 0x5d, 0x5c, 0x5b, 0x5a, 0x5a, + 0x59, 0x57, 0x54, 0x54, 0xcc, 0xc9, 0xc2, 0xc5, 0xc1, 0xbe, 0xc3, 0xc4, + 0xc2, 0xc6, 0xc6, 0xbd, 0xbb, 0xc2, 0xc4, 0xc8, 0xce, 0xd3, 0xb8, 0xb4, + 0xbc, 0xfb, 0x18, 0x0f, 0x10, 0x11, 0x0d, 0x0e, 0x0e, 0x0e, 0x0a, 0x0b, + 0x0f, 0x10, 0x10, 0x0f, 0x0c, 0x0b, 0x09, 0x06, 0xfd, 0xfa, 0x01, 0xfe, + 0xfd, 0xf9, 0xf3, 0xf2, 0xf3, 0xed, 0xe0, 0xd1, 0xd2, 0xd1, 0xc8, 0xcc, + 0xd3, 0xd1, 0xcc, 0xd4, 0xd7, 0xdb, 0xe9, 0xf2, 0xef, 0xf6, 0xf9, 0xf3, + 0xef, 0xf0, 0xf4, 0xf1, 0xf6, 0xfb, 0x05, 0x03, 0x05, 0x10, 0x16, 0x11, + 0x1a, 0x23, 0x25, 0x27, 0x2d, 0x31, 0x34, 0x2e, 0x2c, 0x2d, 0x30, 0x2c, + 0x26, 0x25, 0x26, 0x25, 0x2a, 0x2c, 0x2f, 0x30, 0x31, 0x31, 0x37, 0x39, + 0x38, 0x3e, 0x3e, 0x3c, 0x3c, 0x3f, 0x3a, 0x3f, 0x43, 0x43, 0x41, 0x45, + 0x4a, 0x4b, 0x47, 0x47, 0x4a, 0x4c, 0x50, 0x50, 0x50, 0x54, 0x53, 0x54, + 0x56, 0x57, 0x58, 0x58, 0x58, 0x57, 0x59, 0x5b, 0x5d, 0x5e, 0x5e, 0x5d, + 0x5a, 0x56, 0x5b, 0x58, 0x5b, 0x5b, 0x58, 0x5b, 0x5b, 0x59, 0x59, 0x5d, + 0x5a, 0x58, 0x60, 0x5f, 0x5b, 0x5b, 0x57, 0x59, 0x58, 0x58, 0x52, 0x57, + 0x55, 0x54, 0x50, 0x4e, 0x51, 0x50, 0x50, 0x4e, 0x4c, 0x44, 0x41, 0x4d, + 0x54, 0x54, 0x50, 0x4f, 0x51, 0x51, 0x52, 0x57, 0x55, 0x55, 0x55, 0x55, + 0x52, 0x56, 0x53, 0x52, 0x57, 0x59, 0x5b, 0x5c, 0x5b, 0x5a, 0x59, 0x59, + 0x59, 0x5b, 0x5a, 0x5b, 0x5b, 0x59, 0x59, 0x58, 0x5a, 0x5c, 0x5c, 0x5a, + 0x5b, 0x5c, 0x5d, 0x5d, 0x5c, 0x5c, 0x5a, 0x59, 0x58, 0x57, 0x57, 0x53, + 0xca, 0xc3, 0xbc, 0xc2, 0xbb, 0xb5, 0xb7, 0xbc, 0xbd, 0xbe, 0xbe, 0xb4, + 0xaa, 0xb6, 0xc4, 0xd0, 0xd7, 0xc6, 0xbd, 0xb4, 0xb5, 0xea, 0x14, 0x0c, + 0x0b, 0x0e, 0x0e, 0x14, 0x12, 0x0f, 0x0c, 0x0a, 0x0d, 0x10, 0x0b, 0x0c, + 0x0a, 0x0a, 0x06, 0x01, 0x00, 0xf5, 0xf5, 0xfb, 0xf9, 0xf8, 0xf1, 0xed, + 0xee, 0xeb, 0xe0, 0xd5, 0xcb, 0xc8, 0xc1, 0xc4, 0xc9, 0xd3, 0xcd, 0xd1, + 0xd6, 0xde, 0xeb, 0xed, 0xed, 0xf3, 0xf2, 0xec, 0xee, 0xf1, 0xf0, 0xed, + 0xf1, 0xf8, 0x00, 0x02, 0x03, 0x0c, 0x12, 0x0e, 0x18, 0x21, 0x25, 0x26, + 0x2a, 0x2e, 0x31, 0x2b, 0x28, 0x26, 0x26, 0x22, 0x1c, 0x19, 0x1b, 0x18, + 0x1e, 0x2a, 0x29, 0x28, 0x2c, 0x2f, 0x32, 0x35, 0x34, 0x3c, 0x3e, 0x38, + 0x3a, 0x41, 0x3d, 0x3d, 0x41, 0x3d, 0x3e, 0x44, 0x48, 0x4b, 0x48, 0x46, + 0x48, 0x48, 0x4b, 0x50, 0x50, 0x51, 0x52, 0x51, 0x56, 0x55, 0x55, 0x57, + 0x57, 0x57, 0x57, 0x59, 0x5d, 0x5d, 0x5d, 0x5d, 0x5b, 0x57, 0x59, 0x59, + 0x59, 0x5a, 0x59, 0x5b, 0x5a, 0x59, 0x5b, 0x5d, 0x58, 0x57, 0x5e, 0x5e, + 0x5a, 0x5a, 0x56, 0x57, 0x56, 0x57, 0x53, 0x50, 0x4e, 0x4a, 0x46, 0x48, + 0x4b, 0x4e, 0x50, 0x4c, 0x4b, 0x45, 0x40, 0x4b, 0x52, 0x51, 0x4a, 0x4c, + 0x50, 0x50, 0x52, 0x52, 0x52, 0x50, 0x52, 0x54, 0x52, 0x51, 0x50, 0x4f, + 0x53, 0x57, 0x58, 0x59, 0x5a, 0x59, 0x58, 0x58, 0x59, 0x59, 0x57, 0x57, + 0x58, 0x58, 0x57, 0x57, 0x5a, 0x5b, 0x5c, 0x5b, 0x5d, 0x5d, 0x5d, 0x5d, + 0x5d, 0x5d, 0x5b, 0x5a, 0x5b, 0x59, 0x58, 0x57, 0xd0, 0xcc, 0xbe, 0xb8, + 0xb6, 0xb4, 0xae, 0xb0, 0xb9, 0xba, 0xbf, 0xbc, 0xbb, 0xb6, 0xc3, 0xcb, + 0xc5, 0xbb, 0xbb, 0xbd, 0xb5, 0xe9, 0x1a, 0x12, 0x14, 0x14, 0x16, 0x15, + 0x12, 0x10, 0x0c, 0x09, 0x0d, 0x0d, 0x07, 0x09, 0x05, 0x08, 0x03, 0xfb, + 0xfb, 0xf6, 0xef, 0xee, 0xf3, 0xf4, 0xf4, 0xed, 0xe9, 0xe8, 0xe0, 0xce, + 0xc1, 0xc1, 0xb9, 0xc0, 0xc4, 0xce, 0xcc, 0xd2, 0xda, 0xe1, 0xe7, 0xe9, + 0xea, 0xef, 0xeb, 0xea, 0xee, 0xed, 0xed, 0xec, 0xed, 0xf4, 0xfc, 0xfe, + 0xfe, 0x08, 0x0e, 0x0c, 0x18, 0x20, 0x24, 0x25, 0x27, 0x2b, 0x2b, 0x29, + 0x21, 0x23, 0x1d, 0x17, 0x12, 0x0d, 0x10, 0x0c, 0x10, 0x21, 0x25, 0x25, + 0x28, 0x2d, 0x30, 0x31, 0x2f, 0x37, 0x3d, 0x36, 0x37, 0x3f, 0x3d, 0x3e, + 0x42, 0x3e, 0x3d, 0x42, 0x48, 0x4a, 0x45, 0x45, 0x47, 0x47, 0x46, 0x4c, + 0x4c, 0x4d, 0x50, 0x50, 0x54, 0x52, 0x50, 0x53, 0x57, 0x57, 0x56, 0x57, + 0x5c, 0x5c, 0x5d, 0x5d, 0x5a, 0x58, 0x58, 0x5b, 0x5b, 0x5b, 0x59, 0x5c, + 0x59, 0x5a, 0x5d, 0x5c, 0x57, 0x56, 0x5d, 0x5d, 0x59, 0x59, 0x57, 0x57, + 0x55, 0x55, 0x4d, 0x45, 0x46, 0x3b, 0x41, 0x43, 0x44, 0x4a, 0x4e, 0x4d, + 0x48, 0x42, 0x42, 0x46, 0x4f, 0x4f, 0x4a, 0x4a, 0x4d, 0x4e, 0x50, 0x50, + 0x50, 0x4d, 0x4c, 0x4f, 0x51, 0x51, 0x4f, 0x4e, 0x50, 0x53, 0x57, 0x57, + 0x57, 0x57, 0x56, 0x57, 0x58, 0x57, 0x57, 0x55, 0x56, 0x57, 0x57, 0x57, + 0x59, 0x59, 0x5b, 0x5a, 0x5c, 0x5d, 0x5d, 0x5c, 0x5b, 0x5b, 0x5b, 0x5a, + 0x59, 0x57, 0x57, 0x56, 0xc5, 0xce, 0xc9, 0xbe, 0xb4, 0xb4, 0xa9, 0xa7, + 0xb0, 0xb8, 0xc1, 0xc6, 0xc9, 0xc1, 0xc3, 0xc4, 0xbd, 0xb5, 0xb4, 0xbb, + 0xbb, 0xf0, 0x0f, 0x10, 0x1e, 0x1d, 0x1a, 0x15, 0x10, 0x10, 0x0b, 0x0b, + 0x0c, 0x09, 0x03, 0x08, 0x05, 0x06, 0x01, 0xf7, 0xf3, 0xf1, 0xeb, 0xe7, + 0xe7, 0xed, 0xef, 0xed, 0xe7, 0xe4, 0xe1, 0xd4, 0xc8, 0xc5, 0xbb, 0xbb, + 0xc1, 0xc9, 0xcb, 0xd4, 0xd8, 0xdd, 0xe4, 0xe3, 0xe4, 0xea, 0xe6, 0xe6, + 0xea, 0xe7, 0xe6, 0xe7, 0xe9, 0xf3, 0xfa, 0xfa, 0xfa, 0x05, 0x0c, 0x0c, + 0x18, 0x20, 0x25, 0x25, 0x25, 0x26, 0x25, 0x25, 0x1a, 0x1f, 0x1b, 0x0f, + 0x08, 0x01, 0x07, 0x06, 0x07, 0x15, 0x21, 0x23, 0x25, 0x2a, 0x2b, 0x30, + 0x2c, 0x32, 0x37, 0x34, 0x32, 0x3b, 0x3e, 0x39, 0x3d, 0x3e, 0x3a, 0x3e, + 0x43, 0x49, 0x48, 0x43, 0x44, 0x47, 0x43, 0x48, 0x4a, 0x49, 0x4b, 0x50, + 0x50, 0x50, 0x50, 0x50, 0x54, 0x57, 0x56, 0x55, 0x5a, 0x5c, 0x5c, 0x5d, + 0x59, 0x59, 0x59, 0x5b, 0x5a, 0x5b, 0x5c, 0x5d, 0x59, 0x5a, 0x5c, 0x5b, + 0x56, 0x56, 0x5c, 0x5c, 0x57, 0x57, 0x57, 0x56, 0x52, 0x49, 0x3a, 0x38, + 0x3e, 0x35, 0x3c, 0x3e, 0x42, 0x48, 0x4b, 0x4c, 0x45, 0x3f, 0x3e, 0x44, + 0x4d, 0x4e, 0x4a, 0x46, 0x4b, 0x4c, 0x4b, 0x4e, 0x4f, 0x49, 0x46, 0x47, + 0x4c, 0x51, 0x50, 0x4f, 0x4f, 0x50, 0x53, 0x56, 0x55, 0x53, 0x52, 0x54, + 0x57, 0x56, 0x56, 0x56, 0x56, 0x56, 0x57, 0x57, 0x57, 0x58, 0x59, 0x59, + 0x5a, 0x5a, 0x5b, 0x5a, 0x59, 0x59, 0x59, 0x59, 0x5a, 0x58, 0x58, 0x57, + 0xbb, 0xbc, 0xc6, 0xd0, 0xc5, 0xb5, 0xbc, 0xa9, 0xae, 0xb8, 0xc1, 0xcf, + 0xce, 0xd1, 0xcc, 0xbd, 0xb7, 0xb8, 0xc5, 0xd5, 0xcc, 0xe1, 0x13, 0x22, + 0x21, 0x21, 0x1c, 0x16, 0x0f, 0x0d, 0x0c, 0x0b, 0x09, 0x04, 0xfe, 0x00, + 0x00, 0x00, 0xfe, 0xf3, 0xed, 0xe7, 0xe6, 0xe4, 0xe2, 0xe4, 0xea, 0xeb, + 0xe5, 0xdf, 0xda, 0xd4, 0xcf, 0xc7, 0xc1, 0xb8, 0xbe, 0xc4, 0xc9, 0xd1, + 0xd7, 0xda, 0xdc, 0xdb, 0xdf, 0xe3, 0xe2, 0xe1, 0xe5, 0xe3, 0xdf, 0xe4, + 0xe7, 0xef, 0xf9, 0xf8, 0xf6, 0x03, 0x07, 0x09, 0x16, 0x1f, 0x23, 0x20, + 0x21, 0x1f, 0x1e, 0x1c, 0x18, 0x18, 0x1b, 0x0c, 0x06, 0x00, 0xfe, 0x03, + 0x06, 0x0f, 0x1b, 0x22, 0x21, 0x23, 0x26, 0x2b, 0x28, 0x2b, 0x2b, 0x2c, + 0x2b, 0x35, 0x3a, 0x35, 0x34, 0x3a, 0x3b, 0x3a, 0x3f, 0x46, 0x48, 0x44, + 0x42, 0x45, 0x44, 0x44, 0x47, 0x47, 0x44, 0x4a, 0x4e, 0x50, 0x50, 0x4f, + 0x51, 0x55, 0x55, 0x52, 0x57, 0x59, 0x5a, 0x5d, 0x57, 0x58, 0x5b, 0x5a, + 0x59, 0x5b, 0x5d, 0x5d, 0x5b, 0x5c, 0x5b, 0x5c, 0x56, 0x58, 0x5c, 0x59, + 0x57, 0x55, 0x56, 0x50, 0x4a, 0x40, 0x36, 0x39, 0x39, 0x32, 0x38, 0x3e, + 0x43, 0x48, 0x4a, 0x49, 0x41, 0x3d, 0x40, 0x44, 0x48, 0x4a, 0x49, 0x47, + 0x49, 0x4a, 0x48, 0x4d, 0x4d, 0x44, 0x43, 0x44, 0x4a, 0x4f, 0x4f, 0x4c, + 0x4d, 0x4d, 0x4e, 0x4f, 0x50, 0x50, 0x50, 0x51, 0x52, 0x52, 0x55, 0x57, + 0x57, 0x56, 0x56, 0x56, 0x57, 0x57, 0x57, 0x57, 0x58, 0x57, 0x58, 0x58, + 0x59, 0x58, 0x59, 0x59, 0x58, 0x57, 0x57, 0x58, 0xbd, 0xb4, 0xc0, 0xdd, + 0xe3, 0xc8, 0xbb, 0xaf, 0xb1, 0xbc, 0xc2, 0xd5, 0xce, 0xca, 0xc4, 0xb9, + 0xb7, 0xc1, 0xc4, 0xda, 0xda, 0x0c, 0x27, 0x24, 0x20, 0x1f, 0x1e, 0x18, + 0x11, 0x0d, 0x0b, 0x0b, 0x09, 0x03, 0xfb, 0xf3, 0xf7, 0xfb, 0xf9, 0xf2, + 0xe6, 0xe3, 0xde, 0xda, 0xdb, 0xdf, 0xe4, 0xe5, 0xe2, 0xdc, 0xd6, 0xd3, + 0xcf, 0xc1, 0xb5, 0xb1, 0xb9, 0xc4, 0xcb, 0xd0, 0xd2, 0xd5, 0xd0, 0xd4, + 0xdb, 0xdf, 0xdc, 0xdd, 0xe1, 0xdb, 0xda, 0xe2, 0xe6, 0xed, 0xf8, 0xf3, + 0xf4, 0x01, 0x05, 0x08, 0x15, 0x1d, 0x1f, 0x1d, 0x1d, 0x1c, 0x18, 0x18, + 0x12, 0x12, 0x18, 0x0e, 0x07, 0x01, 0xff, 0x03, 0x09, 0x11, 0x17, 0x1f, + 0x20, 0x1f, 0x22, 0x25, 0x25, 0x26, 0x27, 0x28, 0x28, 0x2d, 0x35, 0x37, + 0x31, 0x37, 0x3c, 0x3a, 0x3d, 0x42, 0x45, 0x44, 0x42, 0x45, 0x44, 0x43, + 0x45, 0x44, 0x40, 0x42, 0x48, 0x4b, 0x4d, 0x4c, 0x4e, 0x51, 0x53, 0x52, + 0x55, 0x59, 0x5a, 0x5b, 0x59, 0x57, 0x5b, 0x5a, 0x5a, 0x5c, 0x5c, 0x5a, + 0x5a, 0x5c, 0x5a, 0x59, 0x57, 0x58, 0x5a, 0x57, 0x55, 0x52, 0x51, 0x4a, + 0x41, 0x3e, 0x39, 0x35, 0x31, 0x31, 0x33, 0x3e, 0x44, 0x48, 0x47, 0x48, + 0x3e, 0x38, 0x3e, 0x40, 0x3f, 0x46, 0x47, 0x46, 0x47, 0x45, 0x44, 0x46, + 0x49, 0x3e, 0x3e, 0x43, 0x48, 0x4d, 0x4d, 0x4a, 0x48, 0x4a, 0x4a, 0x4c, + 0x4d, 0x4d, 0x4c, 0x4d, 0x4e, 0x50, 0x52, 0x54, 0x54, 0x53, 0x53, 0x52, + 0x55, 0x54, 0x55, 0x55, 0x56, 0x56, 0x57, 0x57, 0x58, 0x59, 0x59, 0x58, + 0x58, 0x58, 0x57, 0x57, 0xd1, 0xbe, 0xbb, 0xc7, 0xce, 0xd1, 0xbb, 0xb2, + 0xb4, 0xb7, 0xc5, 0xd4, 0xcb, 0xc7, 0xbb, 0xb7, 0xbb, 0xbb, 0xbc, 0xcd, + 0xdf, 0x1d, 0x25, 0x21, 0x1d, 0x19, 0x18, 0x14, 0x10, 0x0c, 0x08, 0x0c, + 0x0a, 0x06, 0xfb, 0xe9, 0xe9, 0xf1, 0xf8, 0xee, 0xde, 0xdb, 0xdd, 0xd4, + 0xce, 0xda, 0xdb, 0xe1, 0xde, 0xd9, 0xd3, 0xd1, 0xce, 0xc2, 0xbc, 0xb5, + 0xb9, 0xc8, 0xcc, 0xcd, 0xcf, 0xcf, 0xcb, 0xce, 0xd7, 0xd8, 0xd0, 0xda, + 0xda, 0xd4, 0xda, 0xdb, 0xe5, 0xef, 0xf3, 0xef, 0xf0, 0xff, 0x01, 0x09, + 0x13, 0x19, 0x1e, 0x1a, 0x15, 0x17, 0x12, 0x12, 0x0c, 0x0d, 0x11, 0x0d, + 0x08, 0x02, 0x03, 0x06, 0x0c, 0x12, 0x16, 0x1d, 0x1d, 0x1f, 0x1d, 0x1f, + 0x23, 0x25, 0x25, 0x29, 0x29, 0x2a, 0x2d, 0x35, 0x32, 0x35, 0x3c, 0x3c, + 0x3b, 0x3d, 0x3e, 0x41, 0x40, 0x42, 0x45, 0x40, 0x43, 0x43, 0x3e, 0x3d, + 0x44, 0x49, 0x4a, 0x4a, 0x4a, 0x50, 0x50, 0x50, 0x51, 0x56, 0x57, 0x59, + 0x5b, 0x57, 0x5a, 0x5b, 0x5a, 0x5c, 0x5c, 0x5a, 0x59, 0x59, 0x59, 0x57, + 0x57, 0x57, 0x57, 0x57, 0x53, 0x4f, 0x4d, 0x44, 0x3c, 0x36, 0x39, 0x37, + 0x35, 0x34, 0x35, 0x3e, 0x44, 0x45, 0x44, 0x44, 0x38, 0x30, 0x39, 0x3d, + 0x39, 0x3f, 0x3f, 0x42, 0x42, 0x41, 0x41, 0x41, 0x44, 0x41, 0x3a, 0x3f, + 0x44, 0x47, 0x49, 0x45, 0x44, 0x45, 0x44, 0x49, 0x4a, 0x4a, 0x49, 0x48, + 0x4a, 0x4a, 0x4d, 0x50, 0x50, 0x50, 0x50, 0x50, 0x52, 0x51, 0x53, 0x54, + 0x54, 0x55, 0x56, 0x56, 0x57, 0x57, 0x57, 0x56, 0x57, 0x57, 0x57, 0x55, + 0xbd, 0xd0, 0xc0, 0xc1, 0xc7, 0xce, 0xc7, 0xb4, 0xaf, 0xb2, 0xc8, 0xd2, + 0xc9, 0xc6, 0xb9, 0xbd, 0xbb, 0xb5, 0xbb, 0xd2, 0xe1, 0x16, 0x21, 0x18, + 0x18, 0x0e, 0xff, 0x0c, 0x0b, 0x06, 0x02, 0x08, 0x0a, 0x05, 0xf5, 0xe3, + 0xda, 0xe1, 0xee, 0xed, 0xdc, 0xce, 0xd8, 0xda, 0xd7, 0xd4, 0xd1, 0xda, + 0xda, 0xd4, 0xcc, 0xce, 0xc7, 0xc0, 0xb8, 0xc0, 0xc1, 0xc7, 0xcb, 0xc8, + 0xd0, 0xcb, 0xc5, 0xce, 0xd4, 0xce, 0xce, 0xd8, 0xd3, 0xd3, 0xd8, 0xd8, + 0xe3, 0xec, 0xed, 0xee, 0xf2, 0xfa, 0x00, 0x0a, 0x14, 0x18, 0x18, 0x16, + 0x0f, 0x0e, 0x0e, 0x0c, 0x05, 0x08, 0x09, 0x06, 0x07, 0x02, 0x06, 0x08, + 0x0b, 0x11, 0x15, 0x1d, 0x1d, 0x19, 0x1a, 0x1e, 0x21, 0x26, 0x24, 0x26, + 0x2a, 0x2a, 0x2b, 0x2d, 0x30, 0x31, 0x38, 0x3c, 0x3d, 0x39, 0x37, 0x3a, + 0x3e, 0x3d, 0x44, 0x43, 0x3d, 0x40, 0x3e, 0x3c, 0x3e, 0x44, 0x48, 0x49, + 0x49, 0x4d, 0x4e, 0x4e, 0x4d, 0x53, 0x56, 0x56, 0x58, 0x59, 0x59, 0x5a, + 0x5a, 0x5c, 0x5c, 0x59, 0x58, 0x58, 0x57, 0x57, 0x57, 0x57, 0x55, 0x52, + 0x50, 0x4e, 0x4a, 0x40, 0x3b, 0x37, 0x39, 0x3c, 0x3a, 0x3a, 0x3c, 0x3e, + 0x42, 0x42, 0x43, 0x3e, 0x2d, 0x28, 0x34, 0x38, 0x32, 0x37, 0x34, 0x3a, + 0x3b, 0x3b, 0x3e, 0x3e, 0x40, 0x41, 0x3e, 0x3a, 0x3f, 0x40, 0x44, 0x44, + 0x43, 0x43, 0x42, 0x43, 0x44, 0x45, 0x44, 0x44, 0x44, 0x46, 0x49, 0x4b, + 0x4e, 0x4e, 0x4e, 0x4f, 0x4f, 0x50, 0x52, 0x52, 0x53, 0x54, 0x55, 0x55, + 0x56, 0x57, 0x57, 0x56, 0x57, 0x57, 0x57, 0x56, 0xb6, 0xc4, 0xc4, 0xc3, + 0xc3, 0xc0, 0xd5, 0xc9, 0xc3, 0xc8, 0xd2, 0xc9, 0xc9, 0xc6, 0xc6, 0xc9, + 0xbd, 0xb7, 0xbf, 0xd5, 0xdb, 0x0f, 0x18, 0x14, 0x16, 0xf9, 0xed, 0x0c, + 0x06, 0x01, 0xfb, 0x01, 0x00, 0xee, 0xf0, 0xdf, 0xd0, 0xd0, 0xdb, 0xe1, + 0xdd, 0xd3, 0xcf, 0xcf, 0xd5, 0xca, 0xca, 0xd4, 0xd6, 0xd3, 0xcc, 0xcc, + 0xc8, 0xbb, 0xb7, 0xc8, 0xc8, 0xc8, 0xc8, 0xce, 0xd1, 0xc8, 0xbf, 0xd0, + 0xc8, 0xc6, 0xd0, 0xd3, 0xce, 0xd2, 0xd4, 0xd7, 0xe1, 0xe7, 0xe8, 0xee, + 0xee, 0xf4, 0xfc, 0x07, 0x12, 0x15, 0x15, 0x14, 0x0d, 0x0a, 0x0b, 0x09, + 0x01, 0x05, 0x03, 0xfe, 0x04, 0x04, 0x04, 0x07, 0x07, 0x0c, 0x12, 0x18, + 0x1e, 0x18, 0x1a, 0x1f, 0x21, 0x25, 0x24, 0x23, 0x27, 0x2a, 0x2b, 0x2d, + 0x2b, 0x2d, 0x34, 0x3b, 0x40, 0x36, 0x34, 0x37, 0x3b, 0x3c, 0x41, 0x44, + 0x3b, 0x3c, 0x3e, 0x3d, 0x3a, 0x3d, 0x46, 0x45, 0x49, 0x49, 0x4e, 0x4d, + 0x4a, 0x50, 0x55, 0x57, 0x57, 0x59, 0x59, 0x59, 0x5a, 0x5d, 0x5c, 0x57, + 0x57, 0x58, 0x57, 0x52, 0x54, 0x57, 0x50, 0x4d, 0x4b, 0x4a, 0x46, 0x42, + 0x43, 0x43, 0x43, 0x43, 0x41, 0x41, 0x40, 0x3b, 0x3d, 0x3f, 0x3e, 0x33, + 0x26, 0x25, 0x2d, 0x35, 0x2e, 0x2f, 0x2d, 0x31, 0x36, 0x37, 0x3c, 0x3e, + 0x3d, 0x3c, 0x3e, 0x39, 0x39, 0x3a, 0x3d, 0x3d, 0x3e, 0x40, 0x3f, 0x40, + 0x3f, 0x40, 0x42, 0x41, 0x41, 0x43, 0x45, 0x49, 0x4b, 0x4c, 0x4b, 0x4c, + 0x4d, 0x4e, 0x4e, 0x50, 0x50, 0x51, 0x53, 0x54, 0x54, 0x54, 0x53, 0x56, + 0x56, 0x54, 0x53, 0x52, 0xb7, 0xbd, 0xc6, 0xc8, 0xbd, 0xcd, 0xda, 0xce, + 0xc8, 0xce, 0xd0, 0xca, 0xcd, 0xcb, 0xc8, 0xc9, 0xbf, 0xb7, 0xc0, 0xd4, + 0xda, 0x0e, 0xee, 0x0a, 0x12, 0xe7, 0xf2, 0x0c, 0x06, 0xfd, 0xf9, 0xfe, + 0xf3, 0xdf, 0xed, 0xda, 0xc8, 0xcc, 0xd1, 0xce, 0xc4, 0xd5, 0xce, 0xc4, + 0xcd, 0xc5, 0xc7, 0xce, 0xd3, 0xd2, 0xc9, 0xcc, 0xc9, 0xbc, 0xba, 0xcf, + 0xc7, 0xc5, 0xce, 0xd4, 0xcb, 0xce, 0xc8, 0xce, 0xba, 0xc3, 0xce, 0xcb, + 0xc8, 0xd0, 0xd0, 0xd9, 0xde, 0xe2, 0xe5, 0xeb, 0xe9, 0xed, 0xf5, 0x08, + 0x14, 0x11, 0x11, 0x11, 0x0b, 0x08, 0x0a, 0x07, 0x04, 0xfe, 0x00, 0xfb, + 0x00, 0xff, 0xf9, 0x01, 0x02, 0x08, 0x0c, 0x0d, 0x13, 0x17, 0x18, 0x1f, + 0x21, 0x21, 0x20, 0x23, 0x20, 0x25, 0x25, 0x2c, 0x2a, 0x2a, 0x30, 0x36, + 0x3e, 0x39, 0x31, 0x36, 0x39, 0x3b, 0x3f, 0x43, 0x3b, 0x3c, 0x3f, 0x41, + 0x3c, 0x39, 0x41, 0x44, 0x47, 0x46, 0x4d, 0x4b, 0x48, 0x4b, 0x51, 0x55, + 0x57, 0x58, 0x59, 0x58, 0x59, 0x5b, 0x5c, 0x58, 0x57, 0x57, 0x56, 0x52, + 0x54, 0x56, 0x4f, 0x48, 0x46, 0x46, 0x46, 0x48, 0x4a, 0x4c, 0x4c, 0x4c, + 0x49, 0x47, 0x45, 0x42, 0x40, 0x3e, 0x39, 0x31, 0x2a, 0x25, 0x26, 0x2e, + 0x2c, 0x29, 0x29, 0x26, 0x29, 0x36, 0x37, 0x39, 0x3a, 0x38, 0x3c, 0x3b, + 0x39, 0x35, 0x34, 0x37, 0x39, 0x3c, 0x3c, 0x3e, 0x3e, 0x3e, 0x3f, 0x3d, + 0x3c, 0x3f, 0x42, 0x46, 0x49, 0x49, 0x48, 0x49, 0x4a, 0x4a, 0x4a, 0x4a, + 0x4a, 0x4d, 0x4d, 0x4c, 0x4b, 0x50, 0x50, 0x52, 0x52, 0x53, 0x51, 0x50, + 0xb6, 0xbb, 0xc8, 0xc9, 0xcb, 0xce, 0xc6, 0xc8, 0xc7, 0xcf, 0xcd, 0xca, + 0xce, 0xcb, 0xc6, 0xc4, 0xca, 0xbd, 0xc4, 0xd6, 0xe0, 0x06, 0xde, 0x10, + 0x0a, 0xde, 0xf9, 0x0b, 0x03, 0xfa, 0xf4, 0xf1, 0xde, 0xce, 0xe1, 0xd2, + 0xc2, 0xc6, 0xcf, 0xc8, 0xb6, 0xb9, 0xc8, 0xce, 0xc8, 0xbb, 0xc5, 0xce, + 0xcd, 0xd0, 0xca, 0xce, 0xc9, 0xbe, 0xbc, 0xc8, 0xc9, 0xc4, 0xd8, 0xd9, + 0xc3, 0xc2, 0xcb, 0xc4, 0xb7, 0xbb, 0xc5, 0xcb, 0xc7, 0xcc, 0xcf, 0xd4, + 0xd8, 0xdf, 0xe1, 0xe4, 0xe6, 0xe8, 0xf5, 0x0e, 0x12, 0x0e, 0x0d, 0x0c, + 0x0a, 0x07, 0x09, 0x04, 0x04, 0x00, 0x00, 0xfd, 0xfa, 0xfc, 0xf7, 0xfb, + 0x00, 0x05, 0x06, 0x09, 0x0c, 0x11, 0x16, 0x1c, 0x23, 0x1f, 0x1b, 0x21, + 0x1f, 0x20, 0x22, 0x26, 0x2a, 0x2b, 0x2f, 0x31, 0x38, 0x3e, 0x34, 0x36, + 0x37, 0x3c, 0x3d, 0x43, 0x3d, 0x3d, 0x3f, 0x44, 0x3f, 0x3c, 0x3b, 0x43, + 0x48, 0x47, 0x4a, 0x4a, 0x44, 0x48, 0x4d, 0x51, 0x56, 0x59, 0x59, 0x58, + 0x59, 0x59, 0x5b, 0x59, 0x57, 0x57, 0x55, 0x54, 0x55, 0x54, 0x4d, 0x47, + 0x4a, 0x4b, 0x4c, 0x4b, 0x4c, 0x4c, 0x4b, 0x4a, 0x49, 0x49, 0x46, 0x44, + 0x43, 0x40, 0x3b, 0x37, 0x31, 0x2a, 0x26, 0x26, 0x22, 0x1e, 0x25, 0x25, + 0x22, 0x2d, 0x34, 0x34, 0x33, 0x35, 0x33, 0x37, 0x37, 0x36, 0x32, 0x31, + 0x31, 0x31, 0x32, 0x36, 0x3d, 0x3c, 0x3c, 0x3c, 0x3a, 0x3d, 0x40, 0x44, + 0x45, 0x44, 0x44, 0x45, 0x47, 0x47, 0x46, 0x48, 0x4a, 0x49, 0x45, 0x3c, + 0x37, 0x46, 0x4b, 0x4e, 0x4f, 0x50, 0x50, 0x50, 0xb7, 0xc5, 0xc8, 0xc7, + 0xcf, 0xce, 0xcd, 0xd1, 0xc5, 0xd0, 0xce, 0xc9, 0xcd, 0xc0, 0xbd, 0xc4, + 0xc1, 0xc1, 0xcc, 0xd5, 0xe7, 0xf6, 0xdc, 0x13, 0xfb, 0xda, 0xfe, 0x00, + 0xf6, 0xed, 0xf2, 0xe8, 0xd0, 0xcf, 0xd1, 0xca, 0xbf, 0xbb, 0xc8, 0xc0, + 0xb8, 0xae, 0xb8, 0xce, 0xc6, 0xbb, 0xce, 0xce, 0xc7, 0xc3, 0xbc, 0xcd, + 0xc8, 0xbf, 0xbe, 0xc1, 0xc7, 0xc8, 0xdb, 0xd7, 0xc2, 0xc5, 0xc4, 0xbf, + 0xbb, 0xb9, 0xc0, 0xc8, 0xc8, 0xce, 0xd0, 0xce, 0xd3, 0xdf, 0xe3, 0xe1, + 0xe1, 0xe6, 0xfc, 0x12, 0x0f, 0x0c, 0x06, 0x07, 0x09, 0x07, 0x06, 0x05, + 0x01, 0x01, 0xff, 0xfe, 0xfe, 0xfb, 0xf6, 0xf9, 0xff, 0x03, 0x02, 0x05, + 0x09, 0x0d, 0x11, 0x15, 0x20, 0x1d, 0x1c, 0x1c, 0x1d, 0x21, 0x21, 0x25, + 0x2c, 0x2c, 0x31, 0x34, 0x37, 0x3b, 0x3a, 0x37, 0x39, 0x3d, 0x3e, 0x42, + 0x40, 0x3e, 0x42, 0x48, 0x42, 0x3f, 0x3d, 0x3e, 0x47, 0x49, 0x49, 0x47, + 0x3e, 0x44, 0x4b, 0x50, 0x52, 0x58, 0x5a, 0x57, 0x58, 0x58, 0x59, 0x58, + 0x56, 0x57, 0x57, 0x57, 0x57, 0x54, 0x4f, 0x4c, 0x4e, 0x4e, 0x4c, 0x4a, + 0x4a, 0x48, 0x44, 0x42, 0x42, 0x40, 0x3e, 0x3e, 0x3e, 0x3d, 0x3d, 0x37, + 0x31, 0x2c, 0x26, 0x23, 0x1e, 0x17, 0x1b, 0x1e, 0x1f, 0x23, 0x2c, 0x31, + 0x31, 0x32, 0x31, 0x31, 0x33, 0x32, 0x2e, 0x28, 0x2c, 0x2b, 0x2c, 0x34, + 0x3d, 0x37, 0x34, 0x37, 0x34, 0x38, 0x3a, 0x3d, 0x3f, 0x40, 0x3e, 0x3f, + 0x43, 0x43, 0x43, 0x44, 0x45, 0x45, 0x44, 0x3a, 0x38, 0x46, 0x49, 0x4a, + 0x4a, 0x4c, 0x4d, 0x4e, 0xbc, 0xc8, 0xc0, 0xcc, 0xcc, 0xc8, 0xd6, 0xe8, + 0xd4, 0xd4, 0xc9, 0xcc, 0xcc, 0xb7, 0xce, 0xc4, 0xbd, 0xc5, 0xd3, 0xd7, + 0xec, 0xe0, 0xe8, 0x16, 0xec, 0xdc, 0xf8, 0xf1, 0xed, 0xe7, 0xe8, 0xe1, + 0xc9, 0xd7, 0xcd, 0xcc, 0xc3, 0xbc, 0xc1, 0xb8, 0xb1, 0xad, 0xbe, 0xc4, + 0xb8, 0xba, 0xbd, 0xc7, 0xc2, 0xbd, 0xbb, 0xcb, 0xc8, 0xc1, 0xbe, 0xbb, + 0xc1, 0xcc, 0xd3, 0xd1, 0xbe, 0xbc, 0xc3, 0xc2, 0xbb, 0xb6, 0xbb, 0xc7, + 0xc9, 0xc8, 0xc6, 0xd0, 0xd5, 0xda, 0xdd, 0xdc, 0xe2, 0xed, 0x06, 0x12, + 0x0d, 0x09, 0x01, 0x05, 0x06, 0x06, 0x05, 0x02, 0xfd, 0xfd, 0xfd, 0xfd, + 0xfd, 0xfa, 0xf9, 0xfe, 0x00, 0x08, 0x03, 0x03, 0x07, 0x08, 0x10, 0x12, + 0x1b, 0x12, 0x06, 0x18, 0x18, 0x1e, 0x25, 0x27, 0x2d, 0x2f, 0x34, 0x35, + 0x38, 0x3a, 0x3d, 0x3b, 0x3c, 0x3c, 0x3f, 0x3f, 0x44, 0x3f, 0x43, 0x4b, + 0x49, 0x44, 0x42, 0x3f, 0x45, 0x49, 0x47, 0x47, 0x3e, 0x41, 0x4a, 0x4e, + 0x50, 0x57, 0x59, 0x57, 0x57, 0x57, 0x57, 0x57, 0x55, 0x55, 0x58, 0x58, + 0x59, 0x57, 0x51, 0x4c, 0x4b, 0x4a, 0x4a, 0x48, 0x40, 0x3d, 0x3e, 0x3f, + 0x40, 0x3e, 0x3d, 0x3c, 0x3c, 0x3a, 0x38, 0x37, 0x37, 0x31, 0x2c, 0x26, + 0x20, 0x1a, 0x1a, 0x1e, 0x1a, 0x1b, 0x20, 0x2b, 0x2e, 0x31, 0x2f, 0x2e, + 0x2c, 0x2c, 0x2c, 0x26, 0x28, 0x2c, 0x2f, 0x3c, 0x3a, 0x30, 0x2c, 0x30, + 0x32, 0x31, 0x36, 0x38, 0x37, 0x37, 0x37, 0x3b, 0x3e, 0x3e, 0x40, 0x40, + 0x40, 0x3f, 0x3d, 0x36, 0x38, 0x43, 0x44, 0x46, 0x46, 0x47, 0x4a, 0x4b, + 0xbb, 0xc3, 0xcb, 0xd4, 0xd9, 0xd9, 0xe7, 0xf9, 0xe2, 0xce, 0xcf, 0xe1, + 0xeb, 0xe6, 0xfb, 0xf6, 0xf0, 0xee, 0xd9, 0xd6, 0xea, 0xd1, 0xf7, 0x0b, + 0xe2, 0xe7, 0xed, 0xeb, 0xe7, 0xe4, 0xdf, 0xdd, 0xc7, 0xd1, 0xcb, 0xc8, + 0xc1, 0xc1, 0xbb, 0xa9, 0xad, 0xba, 0xc4, 0xbf, 0xb6, 0xbd, 0xc2, 0xc6, + 0xc0, 0xba, 0xbe, 0xcc, 0xc6, 0xc0, 0xbf, 0xc1, 0xc4, 0xc8, 0xc8, 0xca, + 0xd8, 0xcf, 0xc8, 0xbb, 0xb9, 0xb1, 0xbf, 0xc6, 0xc8, 0xc2, 0xc5, 0xd0, + 0xd5, 0xd1, 0xd4, 0xe1, 0xec, 0xfc, 0x0c, 0x0f, 0x08, 0x01, 0x00, 0x03, + 0x00, 0x00, 0x04, 0xfd, 0xf8, 0xfb, 0xfa, 0xfb, 0xfa, 0xfa, 0xfa, 0x00, + 0x05, 0x0a, 0x06, 0x04, 0x07, 0x05, 0x0a, 0x0d, 0x12, 0xf6, 0xf0, 0x15, + 0x19, 0x1d, 0x22, 0x27, 0x2e, 0x30, 0x32, 0x35, 0x38, 0x3a, 0x3b, 0x3b, + 0x3e, 0x3f, 0x41, 0x43, 0x46, 0x44, 0x44, 0x4a, 0x4b, 0x48, 0x45, 0x44, + 0x44, 0x47, 0x48, 0x47, 0x3f, 0x3f, 0x47, 0x4b, 0x4a, 0x52, 0x57, 0x56, + 0x55, 0x54, 0x57, 0x57, 0x55, 0x53, 0x57, 0x57, 0x57, 0x57, 0x51, 0x4a, + 0x47, 0x48, 0x48, 0x46, 0x44, 0x43, 0x44, 0x44, 0x42, 0x41, 0x3f, 0x3f, + 0x3f, 0x3c, 0x38, 0x37, 0x37, 0x32, 0x2d, 0x26, 0x24, 0x1e, 0x18, 0x1a, + 0x11, 0x08, 0x14, 0x23, 0x28, 0x29, 0x2a, 0x2b, 0x2a, 0x2a, 0x2a, 0x26, + 0x26, 0x2e, 0x37, 0x37, 0x31, 0x29, 0x25, 0x28, 0x28, 0x2a, 0x2f, 0x34, + 0x37, 0x37, 0x38, 0x37, 0x37, 0x37, 0x39, 0x39, 0x3b, 0x3b, 0x39, 0x2f, + 0x2f, 0x3d, 0x41, 0x42, 0x44, 0x45, 0x46, 0x45, 0xc3, 0xd3, 0xd6, 0xd2, + 0xd8, 0xd9, 0xe1, 0xe4, 0xfc, 0xf9, 0xe1, 0xff, 0x07, 0x0a, 0xf9, 0x20, + 0x06, 0xe7, 0xda, 0xe1, 0xe5, 0xce, 0xf7, 0xff, 0xd9, 0xe6, 0xdd, 0xe3, + 0xe1, 0xda, 0xdb, 0xe0, 0xc6, 0xc8, 0xcc, 0xcb, 0xc3, 0xbf, 0xbf, 0xb5, + 0xb8, 0xb8, 0xb6, 0xbe, 0xb9, 0xbf, 0xc2, 0xc9, 0xc4, 0xbb, 0xc0, 0xcb, + 0xc6, 0xc1, 0xc0, 0xc9, 0xc8, 0xc8, 0xbb, 0xd5, 0xfe, 0xee, 0xd2, 0xbf, + 0xba, 0xbd, 0xcf, 0xc2, 0xbb, 0xbd, 0xcd, 0xd6, 0xda, 0xce, 0xe1, 0xf1, + 0xf4, 0x01, 0x0b, 0x07, 0xf9, 0xfd, 0xfa, 0xfc, 0xfb, 0xf9, 0xf9, 0xf7, + 0xef, 0xf2, 0xf4, 0xf9, 0xf8, 0xf9, 0xfc, 0x03, 0x05, 0x08, 0x07, 0x04, + 0x06, 0x09, 0x09, 0x0c, 0x08, 0xe4, 0xf8, 0x12, 0x1b, 0x1e, 0x21, 0x24, + 0x2d, 0x31, 0x32, 0x35, 0x38, 0x3b, 0x39, 0x3a, 0x3e, 0x41, 0x41, 0x46, + 0x48, 0x48, 0x46, 0x4b, 0x4d, 0x4c, 0x48, 0x46, 0x46, 0x47, 0x49, 0x47, + 0x40, 0x41, 0x45, 0x48, 0x47, 0x4d, 0x54, 0x56, 0x52, 0x50, 0x54, 0x56, + 0x55, 0x53, 0x54, 0x55, 0x54, 0x55, 0x56, 0x52, 0x4e, 0x4c, 0x48, 0x48, + 0x49, 0x49, 0x48, 0x48, 0x46, 0x44, 0x42, 0x41, 0x3e, 0x3b, 0x38, 0x35, + 0x34, 0x31, 0x2e, 0x29, 0x24, 0x20, 0x1c, 0x17, 0x07, 0xff, 0x05, 0x16, + 0x22, 0x23, 0x1f, 0x27, 0x27, 0x25, 0x21, 0x1e, 0x1d, 0x23, 0x2a, 0x2b, + 0x24, 0x1f, 0x1f, 0x1f, 0x1c, 0x21, 0x27, 0x2b, 0x2e, 0x31, 0x34, 0x37, + 0x37, 0x37, 0x36, 0x33, 0x2d, 0x2c, 0x2b, 0x20, 0x2b, 0x37, 0x3b, 0x3e, + 0x3e, 0x40, 0x42, 0x43, 0xd4, 0xd5, 0xd5, 0xcd, 0xc5, 0xda, 0xdc, 0xe2, + 0xf9, 0xf2, 0xe1, 0x14, 0xf5, 0xe6, 0xf7, 0x04, 0xde, 0xdf, 0xde, 0xe7, + 0xdf, 0xdb, 0x05, 0xf4, 0xda, 0xe1, 0xcc, 0xcc, 0xd5, 0xd4, 0xd5, 0xce, + 0xc9, 0xc1, 0xcc, 0xcb, 0xc1, 0xc4, 0xc1, 0xcf, 0xcf, 0xb9, 0xb3, 0xbe, + 0xc2, 0xc9, 0xc7, 0xc8, 0xc2, 0xbc, 0xc5, 0xc8, 0xb7, 0xbc, 0xbf, 0xc1, + 0xc1, 0xc7, 0xbe, 0xb7, 0xd0, 0xd7, 0xd4, 0xd0, 0xc7, 0xc5, 0xda, 0xc2, + 0xb6, 0xc1, 0xd3, 0xd5, 0xd3, 0xdb, 0xe8, 0xec, 0xf6, 0x01, 0x01, 0xfc, + 0xf7, 0xf5, 0xf4, 0xf9, 0xec, 0xe4, 0xeb, 0xef, 0xe5, 0xe5, 0xed, 0xf4, + 0xf4, 0xf6, 0xfb, 0xff, 0x00, 0x03, 0x03, 0x00, 0x00, 0x05, 0x0a, 0x0d, + 0xfe, 0xe1, 0x05, 0x11, 0x1a, 0x1f, 0x1f, 0x22, 0x28, 0x2f, 0x33, 0x32, + 0x37, 0x38, 0x38, 0x3b, 0x3e, 0x40, 0x3f, 0x46, 0x48, 0x49, 0x49, 0x4a, + 0x4c, 0x4c, 0x4d, 0x4d, 0x49, 0x49, 0x48, 0x49, 0x42, 0x44, 0x46, 0x43, + 0x42, 0x44, 0x4d, 0x54, 0x51, 0x4e, 0x4f, 0x50, 0x50, 0x4e, 0x4e, 0x50, + 0x50, 0x51, 0x53, 0x55, 0x53, 0x52, 0x50, 0x4d, 0x4b, 0x49, 0x45, 0x45, + 0x44, 0x3e, 0x3b, 0x3a, 0x39, 0x38, 0x36, 0x35, 0x33, 0x30, 0x2d, 0x2c, + 0x27, 0x21, 0x18, 0x19, 0x11, 0x06, 0x00, 0x05, 0x11, 0x1b, 0x1d, 0x1f, + 0x22, 0x20, 0x1e, 0x14, 0x10, 0x13, 0x19, 0x20, 0x1b, 0x15, 0x12, 0x10, + 0x18, 0x21, 0x23, 0x23, 0x25, 0x27, 0x2a, 0x2e, 0x30, 0x33, 0x33, 0x31, + 0x2b, 0x24, 0x24, 0x1f, 0x28, 0x32, 0x32, 0x33, 0x35, 0x39, 0x3d, 0x3e, + 0xe0, 0xcd, 0xd1, 0xc6, 0xb5, 0xd1, 0xe4, 0xe0, 0xd3, 0xce, 0xe7, 0xea, + 0xe7, 0xe7, 0xe7, 0xe1, 0xdf, 0xe1, 0xd4, 0xd4, 0xd3, 0xe7, 0xfd, 0xed, + 0xd3, 0xd0, 0xcd, 0xc6, 0xce, 0xda, 0xce, 0xc9, 0xce, 0xcd, 0xcb, 0xc3, + 0xc4, 0xbb, 0xc0, 0xca, 0xc8, 0xc5, 0xba, 0xbe, 0xc1, 0xc3, 0xcb, 0xc8, + 0xbe, 0xb0, 0xc2, 0xc5, 0xb0, 0xc8, 0xcf, 0xbd, 0xb5, 0xc4, 0xc6, 0xbf, + 0xcc, 0xcf, 0xd3, 0xcb, 0xc8, 0xce, 0xd2, 0xc3, 0xb8, 0xca, 0xd4, 0xd4, + 0xd4, 0xdd, 0xe1, 0xea, 0xf8, 0xf9, 0xf6, 0xf4, 0xf3, 0xed, 0xe7, 0xdf, + 0xd4, 0xd1, 0xdc, 0xe4, 0xd8, 0xd9, 0xe7, 0xec, 0xed, 0xf0, 0xf6, 0xf9, + 0xfb, 0xfa, 0xfd, 0xfc, 0xfd, 0x02, 0x08, 0x08, 0xef, 0xe8, 0x0b, 0x13, + 0x19, 0x1f, 0x1d, 0x20, 0x24, 0x2a, 0x34, 0x33, 0x35, 0x33, 0x39, 0x3b, + 0x3b, 0x3b, 0x3b, 0x44, 0x48, 0x4a, 0x4a, 0x4a, 0x4f, 0x4b, 0x4c, 0x50, + 0x4c, 0x4a, 0x49, 0x4b, 0x47, 0x47, 0x4a, 0x3d, 0x37, 0x3d, 0x43, 0x4d, + 0x50, 0x4d, 0x4c, 0x4e, 0x4d, 0x4f, 0x50, 0x4f, 0x4f, 0x50, 0x4f, 0x4f, + 0x4f, 0x4c, 0x4a, 0x48, 0x46, 0x41, 0x30, 0x30, 0x38, 0x37, 0x35, 0x32, + 0x2b, 0x2b, 0x2b, 0x28, 0x26, 0x2b, 0x28, 0x27, 0x25, 0x22, 0x1f, 0x1b, + 0x13, 0x0a, 0x04, 0xfc, 0xfe, 0x0c, 0x14, 0x16, 0x19, 0x1d, 0x16, 0x06, + 0x00, 0x06, 0x0a, 0x12, 0x12, 0x10, 0x09, 0x0c, 0x13, 0x1d, 0x1f, 0x21, + 0x22, 0x23, 0x22, 0x26, 0x25, 0x29, 0x2d, 0x31, 0x30, 0x29, 0x2a, 0x26, + 0x2b, 0x30, 0x2f, 0x30, 0x31, 0x31, 0x32, 0x34, 0xe2, 0xd5, 0xce, 0xc2, + 0xb0, 0xca, 0xe3, 0xd8, 0xcb, 0xd4, 0xcb, 0xde, 0xed, 0xe1, 0xd2, 0xd5, + 0xd9, 0xd1, 0xcb, 0xd4, 0xce, 0xe4, 0xda, 0xd3, 0xce, 0xd1, 0xda, 0xc6, + 0xc8, 0xd0, 0xcb, 0xce, 0xcf, 0xc1, 0xa9, 0xb5, 0xc7, 0xc0, 0xc4, 0xb6, + 0xab, 0xb5, 0xc2, 0xc1, 0xbf, 0xcb, 0xce, 0xc3, 0xbc, 0xb4, 0xca, 0xc6, + 0xbc, 0xe1, 0xdd, 0xb1, 0xb3, 0xc4, 0xc8, 0xc8, 0xce, 0xce, 0xd3, 0xc9, + 0xc1, 0xc5, 0xc7, 0xc6, 0xc7, 0xd4, 0xd2, 0xce, 0xd5, 0xd8, 0xdd, 0xeb, + 0xf3, 0xed, 0xea, 0xed, 0xe6, 0xde, 0xd4, 0xcb, 0xc5, 0xc8, 0xcf, 0xd4, + 0xcc, 0xd8, 0xe1, 0xe3, 0xe2, 0xe7, 0xed, 0xf1, 0xf5, 0xf4, 0xf9, 0xfb, + 0xfe, 0x02, 0x0a, 0x02, 0xe3, 0xf3, 0x0c, 0x16, 0x17, 0x1b, 0x1b, 0x1d, + 0x22, 0x27, 0x33, 0x32, 0x33, 0x34, 0x39, 0x3d, 0x3b, 0x3b, 0x3c, 0x41, + 0x44, 0x4a, 0x4f, 0x4c, 0x4d, 0x4a, 0x4d, 0x50, 0x50, 0x4c, 0x4c, 0x4e, + 0x4d, 0x4a, 0x4b, 0x40, 0x38, 0x3a, 0x3a, 0x3f, 0x4a, 0x4d, 0x4c, 0x4c, + 0x4c, 0x4c, 0x4e, 0x4e, 0x4d, 0x4d, 0x4d, 0x4c, 0x4a, 0x49, 0x45, 0x44, + 0x41, 0x3c, 0x2f, 0x26, 0x2c, 0x32, 0x31, 0x30, 0x2a, 0x27, 0x26, 0x21, + 0x1b, 0x1b, 0x1a, 0x14, 0x14, 0x0f, 0x0c, 0x0d, 0x09, 0xfe, 0xfa, 0xf7, + 0xf9, 0xfc, 0x0a, 0x10, 0x0f, 0x14, 0x12, 0x02, 0x00, 0xfd, 0xfa, 0xfa, + 0xfe, 0x00, 0x00, 0x0d, 0x10, 0x13, 0x1e, 0x20, 0x1c, 0x19, 0x1c, 0x20, + 0x1f, 0x22, 0x24, 0x24, 0x24, 0x21, 0x23, 0x27, 0x28, 0x2b, 0x26, 0x26, + 0x22, 0x25, 0x29, 0x26, 0xdd, 0xdc, 0xcd, 0xc1, 0xbf, 0xdc, 0xe9, 0xde, + 0xde, 0xf4, 0xce, 0xda, 0xe4, 0xd5, 0xc2, 0xcf, 0xdd, 0xd5, 0xd4, 0xd5, + 0xca, 0xca, 0xc9, 0xcf, 0xcf, 0xc2, 0xd8, 0xc9, 0xc5, 0xc9, 0xc8, 0xcf, + 0xbc, 0xa7, 0x9f, 0xc0, 0xc8, 0xd7, 0xe1, 0xc1, 0xa7, 0xab, 0xc2, 0xc1, + 0xbc, 0xc8, 0xc5, 0xc2, 0xb5, 0xb0, 0xd9, 0xd2, 0xcc, 0xda, 0xd8, 0xbb, + 0xbd, 0xc1, 0xc8, 0xc8, 0xc1, 0xcb, 0xcf, 0xcf, 0xc1, 0xbb, 0xc1, 0xc5, + 0xce, 0xd5, 0xd2, 0xd3, 0xd4, 0xcf, 0xd6, 0xe7, 0xe3, 0xd8, 0xd3, 0xd8, + 0xd4, 0xd4, 0xd0, 0xc2, 0xbb, 0xc5, 0xc5, 0xc8, 0xc6, 0xd4, 0xd8, 0xd4, + 0xd5, 0xe0, 0xe6, 0xe7, 0xec, 0xf1, 0xf5, 0xf9, 0xfd, 0x04, 0x0a, 0xfc, + 0xde, 0xff, 0x0b, 0x17, 0x18, 0x19, 0x17, 0x1b, 0x22, 0x27, 0x31, 0x34, + 0x31, 0x32, 0x3b, 0x3e, 0x3a, 0x3a, 0x3d, 0x40, 0x44, 0x45, 0x4e, 0x4c, + 0x4a, 0x48, 0x48, 0x4e, 0x4f, 0x4d, 0x4c, 0x4d, 0x4e, 0x4c, 0x4d, 0x45, + 0x40, 0x41, 0x43, 0x41, 0x44, 0x46, 0x45, 0x46, 0x47, 0x48, 0x47, 0x49, + 0x48, 0x4a, 0x49, 0x49, 0x45, 0x46, 0x44, 0x42, 0x3e, 0x3a, 0x31, 0x1f, + 0x15, 0x21, 0x25, 0x25, 0x25, 0x24, 0x23, 0x23, 0x1c, 0x15, 0x14, 0x0c, + 0x05, 0x00, 0xf5, 0xf5, 0xfe, 0xfb, 0xf4, 0xe9, 0xe8, 0xf0, 0xf8, 0x00, + 0x00, 0xff, 0x08, 0x05, 0xfe, 0xf9, 0xf9, 0xee, 0xea, 0xe3, 0xed, 0xf7, + 0x06, 0x09, 0x12, 0x10, 0x15, 0x1e, 0x1b, 0x18, 0x17, 0x12, 0x11, 0x16, + 0x15, 0x13, 0x13, 0x15, 0x15, 0x22, 0x26, 0x25, 0x25, 0x1f, 0x20, 0x21, + 0xd3, 0xe1, 0xce, 0xc7, 0xce, 0xdc, 0xe7, 0xdf, 0xe5, 0xec, 0xd2, 0xde, + 0xe1, 0xcc, 0xc7, 0xc8, 0xd6, 0xdb, 0xe0, 0xd1, 0xc5, 0xcc, 0xd2, 0xcf, + 0xd4, 0xd1, 0xe0, 0xcc, 0xc4, 0xcd, 0xd4, 0xc2, 0xad, 0xa0, 0xa2, 0xb8, + 0xcd, 0xf6, 0xfd, 0xd3, 0xb1, 0xb7, 0xcd, 0xde, 0xc1, 0xbb, 0xcc, 0xc8, + 0xb3, 0xb6, 0xca, 0xd9, 0xc6, 0xcf, 0xd0, 0xb1, 0xb0, 0xb6, 0xc7, 0xc9, + 0xc1, 0xca, 0xcc, 0xcf, 0xbb, 0xc4, 0xc6, 0xca, 0xd0, 0xd2, 0xc9, 0xc1, + 0xbf, 0xc6, 0xd4, 0xd8, 0xcd, 0xc0, 0xbb, 0xc9, 0xce, 0xd2, 0xcf, 0xbe, + 0xbc, 0xc1, 0xbe, 0xc1, 0xc5, 0xcb, 0xc5, 0xc3, 0xc8, 0xd7, 0xd6, 0xd7, + 0xde, 0xe6, 0xf1, 0xf7, 0xf9, 0x00, 0x08, 0xed, 0xe3, 0x07, 0x08, 0x10, + 0x18, 0x17, 0x11, 0x1d, 0x26, 0x27, 0x2f, 0x31, 0x30, 0x31, 0x3b, 0x3b, + 0x36, 0x39, 0x3d, 0x42, 0x42, 0x42, 0x4a, 0x4b, 0x3f, 0x43, 0x41, 0x44, + 0x49, 0x4c, 0x49, 0x49, 0x47, 0x46, 0x4a, 0x49, 0x45, 0x43, 0x45, 0x4a, + 0x48, 0x49, 0x4a, 0x4a, 0x4a, 0x49, 0x49, 0x49, 0x44, 0x44, 0x44, 0x40, + 0x37, 0x3d, 0x3c, 0x39, 0x33, 0x2d, 0x25, 0x13, 0x06, 0x0a, 0x16, 0x14, + 0x13, 0x13, 0x0c, 0x0c, 0x10, 0x0b, 0x06, 0xff, 0xfe, 0xf6, 0xec, 0xe6, + 0xe7, 0xde, 0xda, 0xe6, 0xda, 0xe0, 0xed, 0xe4, 0xf3, 0xf1, 0xf0, 0xef, + 0xf6, 0xf0, 0xf7, 0xf1, 0xe4, 0xda, 0xda, 0xe3, 0xf6, 0xf9, 0x01, 0xf5, + 0x00, 0x11, 0x1b, 0x18, 0x16, 0x10, 0x04, 0xff, 0x05, 0x06, 0x06, 0x08, + 0x06, 0x06, 0x0a, 0x04, 0x06, 0x0a, 0x12, 0x0d, 0xcb, 0xe2, 0xd6, 0xd1, + 0xd7, 0xe0, 0xe0, 0xd6, 0xcb, 0xc3, 0xd1, 0xe1, 0xdd, 0xd1, 0xce, 0xcc, + 0xd7, 0xda, 0xd5, 0xc8, 0xc9, 0xcd, 0xd0, 0xcd, 0xcd, 0xd7, 0xe1, 0xce, + 0xc3, 0xcb, 0xd2, 0xc3, 0xb2, 0xa9, 0xa8, 0xb3, 0xc5, 0xe8, 0xef, 0xd3, + 0xc1, 0xc1, 0xc7, 0xd1, 0xb9, 0xbb, 0xc8, 0xcb, 0xcc, 0xc8, 0xc7, 0xd5, + 0xcc, 0xc8, 0xc3, 0xad, 0xac, 0xb3, 0xce, 0xcc, 0xcb, 0xd1, 0xcc, 0xd0, + 0xbe, 0xd6, 0xd6, 0xd6, 0xd3, 0xce, 0xc9, 0xb9, 0xbb, 0xd3, 0xcd, 0xce, + 0xc1, 0xc0, 0xbb, 0xc2, 0xcc, 0xcc, 0xcc, 0xc7, 0xc7, 0xc6, 0xc1, 0xc0, + 0xbe, 0xba, 0xb9, 0xc0, 0xc7, 0xcd, 0xcb, 0xce, 0xd6, 0xde, 0xe8, 0xf2, + 0xf3, 0xfb, 0x02, 0xe2, 0xf0, 0x08, 0x06, 0x08, 0x15, 0x12, 0x11, 0x1d, + 0x26, 0x29, 0x2e, 0x30, 0x2f, 0x2d, 0x37, 0x35, 0x36, 0x37, 0x3d, 0x41, + 0x3d, 0x3b, 0x41, 0x44, 0x3b, 0x3b, 0x37, 0x39, 0x43, 0x49, 0x46, 0x40, + 0x40, 0x3e, 0x3f, 0x44, 0x45, 0x46, 0x44, 0x43, 0x46, 0x4a, 0x48, 0x47, + 0x47, 0x46, 0x44, 0x44, 0x3e, 0x3b, 0x38, 0x35, 0x2b, 0x31, 0x31, 0x29, + 0x25, 0x1f, 0x16, 0x08, 0xf1, 0xf3, 0x06, 0x0a, 0x04, 0x00, 0x00, 0xf9, + 0xf8, 0xf8, 0xf3, 0xef, 0xe0, 0xe7, 0xdf, 0xed, 0xeb, 0xe5, 0xd5, 0xd9, + 0xe7, 0xed, 0xe8, 0xe8, 0xf2, 0xdf, 0xe0, 0xdb, 0xe5, 0xe5, 0xeb, 0xf2, + 0xec, 0xdf, 0xda, 0xd8, 0xda, 0xeb, 0xf7, 0xed, 0x00, 0xff, 0x06, 0x06, + 0x12, 0x0c, 0x0d, 0x0a, 0xfd, 0xf4, 0x00, 0x0c, 0x05, 0x03, 0x0b, 0xfc, + 0xf3, 0x01, 0x00, 0x00, 0xcb, 0xdf, 0xdf, 0xdb, 0xdd, 0xdf, 0xd6, 0xbc, + 0xb9, 0xc2, 0xd4, 0xe2, 0xdf, 0xcc, 0xd2, 0xd4, 0xd6, 0xd8, 0xd2, 0xc8, + 0xca, 0xcd, 0xcf, 0xcc, 0xcf, 0xd9, 0xd5, 0xcb, 0xcd, 0xe2, 0xde, 0xcc, + 0xbe, 0xaf, 0xad, 0xbb, 0xbf, 0xbe, 0xf1, 0xee, 0xd5, 0xc1, 0xbd, 0xce, + 0xba, 0xb3, 0xc1, 0xbf, 0xc2, 0xe9, 0xf0, 0xcb, 0xd0, 0xce, 0xc7, 0xbd, + 0xb6, 0xbb, 0xca, 0xd4, 0xd1, 0xd9, 0xce, 0xd0, 0xca, 0xce, 0xdc, 0xde, + 0xcd, 0xc6, 0xc9, 0xbe, 0xc2, 0xd8, 0xef, 0xd8, 0xb5, 0xba, 0xc1, 0xbc, + 0xc9, 0xc9, 0xc9, 0xcb, 0xba, 0xc4, 0xc4, 0xc9, 0xda, 0xe3, 0xeb, 0xf1, + 0xf3, 0xf3, 0xf1, 0xeb, 0xe8, 0xe7, 0xea, 0xf0, 0xec, 0xf4, 0xfb, 0xda, + 0xf7, 0x04, 0x08, 0x06, 0x0d, 0x0c, 0x0e, 0x1e, 0x28, 0x29, 0x2b, 0x30, + 0x2b, 0x2b, 0x31, 0x2f, 0x35, 0x36, 0x39, 0x3d, 0x38, 0x37, 0x3a, 0x37, + 0x34, 0x38, 0x32, 0x33, 0x3c, 0x42, 0x44, 0x40, 0x3a, 0x3c, 0x3a, 0x3b, + 0x3e, 0x43, 0x45, 0x44, 0x43, 0x41, 0x42, 0x3f, 0x3d, 0x3b, 0x3d, 0x3c, + 0x38, 0x33, 0x31, 0x2b, 0x18, 0x1f, 0x1e, 0x19, 0x16, 0x15, 0x0b, 0x07, + 0xf5, 0xe8, 0xed, 0xfc, 0xf6, 0xf5, 0xf3, 0xf0, 0xf0, 0xef, 0xe0, 0xe1, + 0xe0, 0xea, 0xe1, 0xed, 0xe8, 0xe5, 0xdb, 0xcc, 0xd7, 0xf4, 0xe8, 0xe5, + 0xed, 0xe0, 0xd4, 0xca, 0xcd, 0xd5, 0xe2, 0xe1, 0xe5, 0xe2, 0xe3, 0xde, + 0xd1, 0xe9, 0xee, 0xe5, 0xe8, 0xed, 0xee, 0xec, 0x00, 0x0e, 0x00, 0xfb, + 0xf8, 0xeb, 0xe1, 0xf9, 0xf9, 0xf8, 0xfb, 0xf2, 0xf3, 0xf3, 0xf4, 0xf2, + 0xd5, 0xe0, 0xdf, 0xd5, 0xd5, 0xe0, 0xcf, 0xc1, 0xbb, 0xc5, 0xdc, 0xe3, + 0xdf, 0xc7, 0xcf, 0xd9, 0xcf, 0xcd, 0xd4, 0xce, 0xcd, 0xd1, 0xcf, 0xc8, + 0xce, 0xd1, 0xcc, 0xd2, 0xe7, 0x00, 0xf2, 0xe7, 0xdb, 0xc6, 0xbb, 0xce, + 0xd3, 0xb7, 0xd4, 0xf2, 0xf4, 0xd5, 0xb8, 0xc3, 0xc9, 0xb8, 0xc5, 0xb8, + 0xd0, 0xe6, 0xea, 0xca, 0xc8, 0xcf, 0xc5, 0xc3, 0xc7, 0xcc, 0xcc, 0xdf, + 0xd2, 0xda, 0xd4, 0xcf, 0xcf, 0xd4, 0xe2, 0xd5, 0xd5, 0xc2, 0xb9, 0xcb, + 0xc7, 0xce, 0xdf, 0xdd, 0xba, 0xb1, 0xbc, 0xc4, 0xc6, 0xc8, 0xc2, 0xd1, + 0xd4, 0xf3, 0x06, 0x13, 0x16, 0x14, 0x13, 0x10, 0x0c, 0x06, 0x01, 0xfa, + 0xf6, 0xf1, 0xef, 0xee, 0xe7, 0xec, 0xee, 0xda, 0x00, 0x00, 0x05, 0x06, + 0x05, 0x0b, 0x0c, 0x1d, 0x29, 0x25, 0x26, 0x2c, 0x26, 0x25, 0x27, 0x2a, + 0x32, 0x31, 0x35, 0x37, 0x36, 0x37, 0x35, 0x31, 0x31, 0x37, 0x36, 0x34, + 0x38, 0x3d, 0x42, 0x41, 0x38, 0x35, 0x33, 0x3a, 0x3a, 0x39, 0x3b, 0x3e, + 0x40, 0x3e, 0x3e, 0x3e, 0x37, 0x31, 0x2c, 0x2d, 0x2e, 0x2a, 0x25, 0x18, + 0x07, 0x05, 0xf6, 0xef, 0xf3, 0xf8, 0xf3, 0xed, 0xda, 0xe4, 0xe1, 0xf0, + 0xfb, 0xf2, 0xe8, 0xe6, 0xe2, 0xd6, 0xe0, 0xe8, 0xe2, 0xe9, 0xe1, 0xe3, + 0xda, 0xd6, 0xc8, 0xc1, 0xce, 0xf6, 0xe5, 0xda, 0xe3, 0xd6, 0xcc, 0xc8, + 0xca, 0xd4, 0xed, 0xd8, 0xd6, 0xe8, 0xda, 0xd0, 0xce, 0xeb, 0xeb, 0xdb, + 0xe1, 0xea, 0xee, 0xe9, 0xed, 0x04, 0xed, 0xe7, 0xe3, 0xe4, 0xea, 0xf1, + 0xe8, 0xea, 0xef, 0xed, 0xea, 0xeb, 0xe8, 0xeb, 0xda, 0xe1, 0xdb, 0xce, + 0xd4, 0xe0, 0xc6, 0xc0, 0xbd, 0xcb, 0xe0, 0xe2, 0xd5, 0xc3, 0xd5, 0xd9, + 0xc2, 0xbb, 0xdf, 0xde, 0xcd, 0xc7, 0xcb, 0xd3, 0xc8, 0xc8, 0xc6, 0xd2, + 0xde, 0xe1, 0xe3, 0xe9, 0xed, 0xea, 0xe1, 0xdd, 0xdf, 0xbb, 0xba, 0xca, + 0xee, 0xfd, 0xd6, 0xb8, 0xbe, 0xc4, 0xc7, 0xbd, 0xc6, 0xdf, 0xe7, 0xd7, + 0xcd, 0xd3, 0xc7, 0xc1, 0xc0, 0xc2, 0xc4, 0xcf, 0xd5, 0xd3, 0xd1, 0xce, + 0xd1, 0xe2, 0xdf, 0xc3, 0xd4, 0xcf, 0xd6, 0xe8, 0xe1, 0xca, 0xca, 0xd4, + 0xce, 0xb6, 0xc0, 0xcb, 0xc9, 0xe0, 0xf1, 0xf3, 0x0c, 0x1b, 0x11, 0x18, + 0x1e, 0x1d, 0x1a, 0x1b, 0x19, 0x12, 0x0c, 0x08, 0x01, 0xf4, 0xee, 0xed, + 0xe4, 0xe6, 0xe1, 0xe4, 0x01, 0xfc, 0xfe, 0x05, 0x03, 0x05, 0x09, 0x18, + 0x27, 0x22, 0x25, 0x26, 0x23, 0x1d, 0x20, 0x21, 0x2f, 0x2b, 0x2e, 0x31, + 0x33, 0x36, 0x31, 0x2d, 0x2e, 0x35, 0x3b, 0x38, 0x37, 0x3b, 0x41, 0x3e, + 0x39, 0x37, 0x31, 0x32, 0x30, 0x32, 0x32, 0x31, 0x32, 0x30, 0x32, 0x34, + 0x37, 0x31, 0x29, 0x24, 0x1a, 0x0e, 0x0f, 0x02, 0xf7, 0xeb, 0xd3, 0xcd, + 0xcc, 0xd0, 0xd5, 0xca, 0xc1, 0xd7, 0xe2, 0xdd, 0xe2, 0xe1, 0xe2, 0xe0, + 0xd4, 0xce, 0xda, 0xe4, 0xdc, 0xe0, 0xe3, 0xe2, 0xd0, 0xd7, 0xc7, 0xc8, + 0xeb, 0xf8, 0xdb, 0xda, 0xe1, 0xd8, 0xce, 0xb5, 0xc3, 0xd8, 0xda, 0xd1, + 0xc8, 0xe6, 0xe6, 0xd2, 0xda, 0xe8, 0xe3, 0xd2, 0xd5, 0xda, 0xe3, 0xf2, + 0xeb, 0xf9, 0xd3, 0xcf, 0xd8, 0xd3, 0xda, 0xe7, 0xe4, 0xe3, 0xe4, 0xe6, + 0xe1, 0xe4, 0xdb, 0xdf, 0xd2, 0xda, 0xdb, 0xd7, 0xd6, 0xda, 0xc6, 0xc2, + 0xbc, 0xd1, 0xe1, 0xdd, 0xc9, 0xce, 0xda, 0xec, 0xd5, 0xbb, 0xd9, 0xe1, + 0xe6, 0xd6, 0xd3, 0xc8, 0xbd, 0xc2, 0xbe, 0xd3, 0xd6, 0xc9, 0xc8, 0xe1, + 0xe1, 0xe8, 0xf0, 0xed, 0xed, 0xd8, 0xbd, 0xc1, 0xc9, 0xdf, 0xfd, 0xe8, + 0xb8, 0xbb, 0xc9, 0xc5, 0xde, 0xd9, 0xe2, 0xdf, 0xd3, 0xd6, 0xce, 0xc9, + 0xc2, 0xbf, 0xc0, 0xc7, 0xd4, 0xd0, 0xcd, 0xcb, 0xce, 0xeb, 0xd0, 0xc1, + 0xc8, 0xce, 0xf9, 0xde, 0xda, 0xd3, 0xca, 0xd2, 0xd2, 0xd8, 0xdc, 0xe0, + 0xed, 0x14, 0x1d, 0xfa, 0x08, 0x18, 0xf9, 0x16, 0x25, 0x27, 0x25, 0x22, + 0x20, 0x18, 0x0d, 0x01, 0xfb, 0xee, 0xec, 0xf0, 0xe7, 0xe2, 0xd7, 0xee, + 0xfe, 0xf8, 0xf9, 0xfe, 0x00, 0x02, 0x03, 0x10, 0x27, 0x1c, 0x20, 0x23, + 0x1d, 0x16, 0x1a, 0x1d, 0x27, 0x23, 0x23, 0x2b, 0x31, 0x31, 0x29, 0x2b, + 0x2c, 0x31, 0x38, 0x3a, 0x36, 0x37, 0x3c, 0x3e, 0x3b, 0x37, 0x33, 0x31, + 0x2a, 0x20, 0x24, 0x25, 0x22, 0x1e, 0x1b, 0x19, 0x22, 0x25, 0x21, 0x1b, + 0x05, 0xf5, 0xfc, 0xe9, 0xda, 0xda, 0xc1, 0xb1, 0xb7, 0xcf, 0xd4, 0xbe, + 0xb9, 0xc2, 0xe1, 0xdd, 0xde, 0xd2, 0xd5, 0xd4, 0xce, 0xd0, 0xce, 0xd1, + 0xd9, 0xda, 0xcd, 0xd5, 0xe0, 0xe9, 0xbd, 0xc7, 0xde, 0xe7, 0xd9, 0xe1, + 0xdf, 0xda, 0xd7, 0xc6, 0xdf, 0xed, 0xda, 0xcd, 0xc1, 0xda, 0xee, 0xd6, + 0xe2, 0xea, 0xda, 0xcd, 0xd1, 0xd8, 0xdb, 0xf2, 0xf1, 0xf1, 0xce, 0xbb, + 0xc7, 0xcf, 0xd2, 0xda, 0xdb, 0xe2, 0xdf, 0xde, 0xdb, 0xdd, 0xd6, 0xda, + 0xc0, 0xd0, 0xdc, 0xe2, 0xe1, 0xcf, 0xd3, 0xd8, 0xbc, 0xd4, 0xde, 0xd6, + 0xd2, 0xd9, 0xd6, 0xe3, 0xe6, 0xce, 0xd7, 0xd7, 0xd3, 0xcc, 0xd5, 0xc2, + 0xbc, 0xcc, 0xd2, 0xd8, 0xd0, 0xce, 0xc0, 0xc6, 0xc6, 0xd9, 0xf0, 0xea, + 0xea, 0xe9, 0xe0, 0xd5, 0xcc, 0xce, 0xe7, 0x04, 0xeb, 0xc5, 0xcf, 0xd1, + 0xdf, 0xd2, 0xe1, 0xe0, 0xcf, 0xd8, 0xd4, 0xd6, 0xd2, 0xbc, 0xbc, 0xcd, + 0xc9, 0xc1, 0xc8, 0xc9, 0xcd, 0xde, 0xc5, 0xbf, 0xc2, 0xcd, 0xd7, 0xc6, + 0xc3, 0xd6, 0xcd, 0xd3, 0xca, 0xd6, 0xf9, 0xf5, 0x17, 0x21, 0x20, 0x00, + 0xff, 0x0d, 0x00, 0x1e, 0x27, 0x2c, 0x2a, 0x29, 0x26, 0x1a, 0xf5, 0xe6, + 0xec, 0xe7, 0xe9, 0xf3, 0xeb, 0xe1, 0xd9, 0xf3, 0xf0, 0xed, 0xf3, 0xf5, + 0xf9, 0x00, 0xfb, 0x0b, 0x1f, 0x15, 0x15, 0x1d, 0x13, 0x09, 0x17, 0x18, + 0x1d, 0x1c, 0x1a, 0x1b, 0x28, 0x2d, 0x25, 0x25, 0x21, 0x23, 0x30, 0x34, + 0x32, 0x31, 0x35, 0x36, 0x32, 0x34, 0x35, 0x31, 0x2e, 0x26, 0x1f, 0x1b, + 0x10, 0x0e, 0x0f, 0x11, 0x16, 0x15, 0x11, 0x05, 0xf0, 0xe9, 0xe1, 0xd9, + 0xc8, 0xd8, 0xc1, 0xad, 0xbe, 0xd4, 0xce, 0xb9, 0xb6, 0xba, 0xe2, 0xe7, + 0xe1, 0xcb, 0xcf, 0xcf, 0xdb, 0xd0, 0xbc, 0xbc, 0xd2, 0xd4, 0xba, 0xc7, + 0xe1, 0xe7, 0xd7, 0xb5, 0xd0, 0xdb, 0xce, 0xd3, 0xd9, 0xdf, 0xce, 0xd5, + 0xdd, 0xe1, 0xdd, 0xcf, 0xc4, 0xc8, 0xe7, 0xe8, 0xe4, 0xe2, 0xd4, 0xcb, + 0xce, 0xdd, 0xde, 0xeb, 0xf2, 0xed, 0xcb, 0xbd, 0xc8, 0xd2, 0xd0, 0xd5, + 0xd3, 0xda, 0xde, 0xde, 0xda, 0xd9, 0xdd, 0xda, 0xba, 0xc8, 0xd2, 0xdd, + 0xe4, 0xc9, 0xd6, 0xe0, 0xca, 0xdd, 0xda, 0xd2, 0xd5, 0xd6, 0xda, 0xe8, + 0xdb, 0xe4, 0xd6, 0xd0, 0xd7, 0xc9, 0xcf, 0xca, 0xb4, 0xbc, 0xd6, 0xde, + 0xcd, 0xcd, 0xc4, 0xc2, 0xca, 0xc8, 0xe3, 0xd5, 0xda, 0xe1, 0xe7, 0xe9, + 0xe6, 0xdf, 0xe0, 0xeb, 0xe3, 0xd7, 0xcf, 0xd0, 0xda, 0xd7, 0xda, 0xdd, + 0xcc, 0xd8, 0xd5, 0xe1, 0xdc, 0xc8, 0xbc, 0xcc, 0xc0, 0xc2, 0xc3, 0xc6, + 0xcc, 0xdd, 0xc7, 0xc0, 0xc1, 0xfd, 0xe1, 0xbe, 0xbb, 0xd2, 0xd8, 0xe8, + 0xc4, 0xd2, 0x12, 0x03, 0x08, 0x23, 0x22, 0x04, 0xf6, 0x00, 0x08, 0x1f, + 0x20, 0x26, 0x27, 0x25, 0x20, 0x17, 0xf8, 0xdc, 0xe8, 0xe8, 0xe3, 0xf1, + 0xed, 0xda, 0xda, 0xed, 0xe0, 0xe1, 0xe8, 0xed, 0xef, 0xfe, 0xf2, 0x00, + 0x18, 0x13, 0x0c, 0x14, 0x0f, 0x00, 0x09, 0x15, 0x12, 0x13, 0x0e, 0x14, + 0x18, 0x1d, 0x1f, 0x12, 0x01, 0x0d, 0x25, 0x2c, 0x2a, 0x25, 0x28, 0x2d, + 0x23, 0x1d, 0x22, 0x1c, 0x19, 0x20, 0x21, 0x1d, 0x10, 0x0c, 0x0b, 0x09, + 0x03, 0x00, 0xfe, 0xed, 0xe1, 0xdf, 0xda, 0xd1, 0xba, 0xd5, 0xcd, 0xb6, + 0xc8, 0xce, 0xbb, 0xb5, 0xb5, 0xbd, 0xe1, 0xe6, 0xde, 0xd3, 0xcb, 0xc4, + 0xd6, 0xd3, 0xc3, 0xbe, 0xd1, 0xd1, 0xba, 0xc1, 0xcd, 0xed, 0xfb, 0xce, + 0xd7, 0xd4, 0xc3, 0xc0, 0xe3, 0xf3, 0xe0, 0xd4, 0xde, 0xe1, 0xd9, 0xd0, + 0xc1, 0xc8, 0xda, 0xed, 0xe7, 0xda, 0xc8, 0xcd, 0xc9, 0xd5, 0xda, 0xe4, + 0xf6, 0xf3, 0xd9, 0xc4, 0xc1, 0xbc, 0xcd, 0xd8, 0xd3, 0xdb, 0xe5, 0xe0, + 0xea, 0xe2, 0xdc, 0xdf, 0xcf, 0xc8, 0xd8, 0xd4, 0xca, 0xd0, 0xf1, 0xe8, + 0xda, 0xe4, 0xda, 0xd4, 0xda, 0xd2, 0xe0, 0xfc, 0xde, 0xec, 0xd4, 0xc6, + 0xe7, 0xd3, 0xd6, 0xcf, 0xbb, 0xc6, 0xde, 0xe0, 0xcb, 0xc0, 0xcc, 0xcb, + 0xca, 0xba, 0xe9, 0xd3, 0xc9, 0xc9, 0xd6, 0xdc, 0xe1, 0xe7, 0xe1, 0xda, + 0xdd, 0xe0, 0xd4, 0xc8, 0xe1, 0xed, 0xd9, 0xd9, 0xd1, 0xd5, 0xd0, 0xcd, + 0xd9, 0xf8, 0xd4, 0xd6, 0xe1, 0xc7, 0xbc, 0xc1, 0xcb, 0xdb, 0xc5, 0xc4, + 0xbb, 0xd9, 0xef, 0xd3, 0xe7, 0xf4, 0xdb, 0xd3, 0xce, 0xfd, 0x0b, 0xfc, + 0xf5, 0x14, 0x1a, 0x02, 0xee, 0xf0, 0xf9, 0x0e, 0x0b, 0x12, 0x1a, 0x1b, + 0x17, 0x0d, 0xff, 0xe1, 0xdf, 0xe5, 0xde, 0xe7, 0xe7, 0xd9, 0xde, 0xeb, + 0xe1, 0xdc, 0xdc, 0xe7, 0xe8, 0xf9, 0xed, 0xf8, 0x10, 0x0c, 0xff, 0x06, + 0x0d, 0xfc, 0xfb, 0x07, 0x09, 0x00, 0x04, 0x08, 0x0c, 0x0c, 0x06, 0x05, + 0xfd, 0x0b, 0x1c, 0x21, 0x25, 0x17, 0x16, 0x22, 0x18, 0x0e, 0x06, 0xf8, + 0xe5, 0xf4, 0xfa, 0x09, 0xfe, 0xf5, 0xf3, 0xec, 0xea, 0xed, 0xe8, 0xe1, + 0xd4, 0xcb, 0xd1, 0xcc, 0xb7, 0xc8, 0xd2, 0xc4, 0xca, 0xc3, 0xb9, 0xce, + 0xc1, 0xbe, 0xe5, 0xdf, 0xe0, 0xd4, 0xcc, 0xc8, 0xd4, 0xd6, 0xd3, 0xc9, + 0xd4, 0xce, 0xd0, 0xc4, 0xd8, 0xf1, 0xe1, 0xcf, 0xdc, 0xc7, 0xc9, 0xbf, + 0xd8, 0xe2, 0xdf, 0xd6, 0xdf, 0xd7, 0xda, 0xce, 0xc1, 0xc2, 0xd5, 0xea, + 0xf2, 0xda, 0xc5, 0xda, 0xd5, 0xd2, 0xd8, 0xe7, 0xf0, 0x00, 0xf0, 0xe4, + 0xf1, 0xdc, 0xcb, 0xce, 0xd1, 0xe1, 0xe3, 0xdf, 0x0c, 0xec, 0xe9, 0xe5, + 0xd4, 0xc9, 0xce, 0xc9, 0xc2, 0xd9, 0xeb, 0xe9, 0xe6, 0xe8, 0xed, 0xde, + 0xdb, 0xd7, 0xf6, 0x12, 0xe8, 0xe1, 0xd4, 0xca, 0xec, 0xd9, 0xdd, 0xdf, + 0xd4, 0xe0, 0xe5, 0xd9, 0xca, 0xc1, 0xcd, 0xc9, 0xc2, 0xc5, 0xf0, 0xe3, + 0xdf, 0xc1, 0xcd, 0xd3, 0xd5, 0xda, 0xd9, 0xd5, 0xd0, 0xd4, 0xd3, 0xc6, + 0xdb, 0xf3, 0xe4, 0xd7, 0xd2, 0xd2, 0xc8, 0xbb, 0xbf, 0xe7, 0xe3, 0xe6, + 0xdc, 0xbd, 0xba, 0xc2, 0xce, 0xda, 0xc0, 0xe1, 0xdf, 0xc2, 0xde, 0xf6, + 0x10, 0x01, 0xdd, 0xcb, 0xd1, 0xdc, 0xed, 0xee, 0xef, 0x02, 0x0c, 0xfd, + 0xed, 0xea, 0xf5, 0xfa, 0x0d, 0x0f, 0x04, 0xfc, 0x07, 0x06, 0xf8, 0xe1, + 0xe1, 0xe6, 0xdd, 0xdd, 0xdb, 0xd7, 0xd4, 0xde, 0xe0, 0xda, 0xd4, 0xde, + 0xe4, 0xee, 0xe9, 0xf4, 0x03, 0x01, 0xf3, 0xf4, 0x04, 0xf9, 0xf3, 0xf5, + 0xf3, 0xf2, 0xf3, 0xf9, 0xff, 0x08, 0xf3, 0xf1, 0xf8, 0x00, 0x13, 0x16, + 0x1c, 0x13, 0x06, 0x0f, 0x07, 0xfa, 0xfe, 0xf9, 0xef, 0xed, 0xd6, 0xdc, + 0xe4, 0xd8, 0xd8, 0xe2, 0xe1, 0xe0, 0xdd, 0xe1, 0xe0, 0xce, 0xd2, 0xd0, + 0xc5, 0xc8, 0xd1, 0xc8, 0xc5, 0xd7, 0xd0, 0xd1, 0xc1, 0xb4, 0xdf, 0xc3, + 0xd9, 0xd7, 0xcf, 0xce, 0xcf, 0xc9, 0xb6, 0xbb, 0xdc, 0xd4, 0xed, 0xd8, + 0xe7, 0xea, 0xd4, 0xdc, 0xcc, 0xc1, 0xcb, 0xc3, 0xd2, 0xda, 0xe7, 0xe5, + 0xe2, 0xdc, 0xda, 0xca, 0xbd, 0xd8, 0xeb, 0xe1, 0xe4, 0xed, 0xde, 0xd1, + 0xe1, 0xd5, 0xd9, 0xe5, 0xea, 0xfc, 0xfc, 0xf6, 0xff, 0xe8, 0xcf, 0xcf, + 0xd2, 0xe1, 0xe0, 0xc9, 0xd6, 0xdb, 0xfc, 0xed, 0xc8, 0xc8, 0xc8, 0xbd, + 0xc9, 0xe3, 0xe3, 0xe5, 0xe5, 0xe1, 0xf3, 0xea, 0xdd, 0xdb, 0xf1, 0x18, + 0xeb, 0xde, 0xe1, 0xdd, 0xe0, 0xc4, 0xd4, 0xe5, 0xd4, 0xee, 0xef, 0xd7, + 0xca, 0xc8, 0xc5, 0xcd, 0xe2, 0x01, 0xf9, 0xdd, 0xd0, 0xc4, 0xcd, 0xd4, + 0xd7, 0xda, 0xd3, 0xcc, 0xca, 0xca, 0xcf, 0xcf, 0xcc, 0xf3, 0xe8, 0xdc, + 0xd0, 0xd1, 0xc7, 0xc5, 0xc9, 0xd7, 0xd8, 0xdd, 0xe1, 0xca, 0xb7, 0xdd, + 0xe4, 0xe5, 0xda, 0xda, 0xd6, 0xca, 0xcb, 0xed, 0x08, 0xf3, 0xed, 0xf1, + 0xe4, 0xdf, 0xe7, 0xe9, 0xe8, 0xf4, 0x01, 0xfc, 0xf5, 0xec, 0xf5, 0xf5, + 0x03, 0x12, 0x12, 0xf9, 0xdf, 0xec, 0xea, 0xe4, 0xee, 0xe7, 0xdf, 0xd6, + 0xd3, 0xd4, 0xd0, 0xe3, 0xf0, 0xe2, 0xd5, 0xd4, 0xdb, 0xe1, 0xe1, 0xf0, + 0xfb, 0xf3, 0xe5, 0xe9, 0xfa, 0xf5, 0xed, 0xdd, 0xd5, 0xd8, 0xe2, 0xe7, + 0xe6, 0x00, 0xf3, 0xf1, 0xee, 0xf7, 0x03, 0x09, 0x16, 0x16, 0x06, 0x00, + 0xfc, 0xf2, 0xe7, 0x05, 0xf9, 0xdf, 0xc1, 0xbe, 0xc1, 0xc2, 0xb8, 0xb8, + 0xbb, 0xcf, 0xda, 0xda, 0xda, 0xce, 0xd3, 0xcf, 0xcd, 0xc8, 0xc9, 0xcb, + 0xc0, 0xdd, 0xf9, 0xcd, 0xbc, 0xc6, 0xe1, 0xc2, 0xce, 0xd2, 0xce, 0xc4, + 0xc4, 0xc1, 0xbb, 0xc7, 0xe0, 0xd4, 0xda, 0xec, 0xed, 0xe9, 0xe1, 0xd1, + 0xc6, 0xb8, 0xc3, 0xc8, 0xdb, 0xdd, 0xea, 0xe1, 0xe7, 0xe2, 0xda, 0xd1, + 0xd9, 0xe8, 0xe2, 0xda, 0xdf, 0xe6, 0xea, 0xdf, 0xfc, 0xe1, 0xdd, 0xe3, + 0xe7, 0xf0, 0x03, 0xfb, 0xf0, 0xdf, 0xd4, 0xd4, 0xd5, 0xe5, 0xdd, 0xc8, + 0xe7, 0xe1, 0xd9, 0xd7, 0xc8, 0xc8, 0xc7, 0xc2, 0xd2, 0xe3, 0xdb, 0xdd, + 0xdf, 0xd9, 0xe7, 0xf8, 0xe5, 0xdc, 0xea, 0x01, 0xe1, 0xe2, 0xf3, 0xf3, + 0xdb, 0xca, 0xf0, 0xf8, 0xe0, 0xe8, 0xfe, 0xfe, 0xd0, 0xca, 0xc4, 0xe2, + 0x0b, 0x06, 0xf6, 0xd4, 0xd3, 0xd3, 0xeb, 0xda, 0xdb, 0xd9, 0xd1, 0xc2, + 0xba, 0xc8, 0xc8, 0xce, 0xd0, 0xeb, 0xe5, 0xdc, 0xda, 0xd5, 0xe0, 0xe7, + 0xe1, 0xda, 0xdd, 0xe1, 0xee, 0xf0, 0xe5, 0xea, 0xd1, 0xe7, 0xf0, 0xf8, + 0xd8, 0xd4, 0xca, 0xe0, 0xf8, 0xec, 0xee, 0xfc, 0xcf, 0xeb, 0xe6, 0xe1, + 0xe5, 0xed, 0xfe, 0xf5, 0xf3, 0xed, 0xf4, 0xf9, 0xf1, 0x01, 0x04, 0x00, + 0xeb, 0xd5, 0xe0, 0xe3, 0xe9, 0xe3, 0xd1, 0xca, 0xce, 0xd5, 0xc7, 0xda, + 0xe2, 0xda, 0xd8, 0xd7, 0xd9, 0xd4, 0xdb, 0xeb, 0xee, 0xe1, 0xdf, 0xe5, + 0xef, 0xee, 0xe5, 0xce, 0xd3, 0xcb, 0xd4, 0xe0, 0xe0, 0xea, 0xf3, 0xf0, + 0xf1, 0xe2, 0xe3, 0xed, 0x02, 0x0e, 0x11, 0xf4, 0xe2, 0xf2, 0xef, 0xf9, + 0xf5, 0xca, 0xb6, 0xb3, 0xbb, 0xbb, 0xa9, 0xa9, 0xbd, 0xcf, 0xc5, 0xbd, + 0xc7, 0xc3, 0xcb, 0xbb, 0xe9, 0xe4, 0xd0, 0xe1, 0xca, 0xd3, 0xfb, 0xc8, + 0xb9, 0xe4, 0xeb, 0xe1, 0xd7, 0xd0, 0xd4, 0xc9, 0xc9, 0xc8, 0xc9, 0xce, + 0xd5, 0xd2, 0xe3, 0xec, 0xf0, 0xe7, 0xda, 0xc3, 0xb8, 0xb0, 0xc9, 0xda, + 0xe8, 0xeb, 0xf8, 0xe0, 0xea, 0xe5, 0xdd, 0xe2, 0xe3, 0xe4, 0xdf, 0xe5, + 0xe7, 0xe6, 0xed, 0xdb, 0xe2, 0xe1, 0xde, 0xe1, 0xe6, 0xeb, 0xf9, 0x01, + 0xf9, 0xe7, 0xcf, 0xcc, 0xd8, 0xe3, 0xda, 0xc6, 0xe2, 0xdc, 0xc1, 0xcc, + 0xc9, 0xc8, 0xd0, 0xcb, 0xd6, 0xdd, 0xd4, 0xd0, 0xd8, 0xdf, 0xf6, 0xf3, + 0xf1, 0xe1, 0xe6, 0xfe, 0xe6, 0xee, 0xf9, 0xfe, 0xf7, 0x06, 0x17, 0xfc, + 0xe6, 0xee, 0x15, 0x17, 0xe0, 0xe5, 0x0c, 0xfc, 0x08, 0x01, 0xfb, 0xd4, + 0xf6, 0x1a, 0x05, 0xe1, 0xe1, 0xd9, 0xd1, 0xce, 0xca, 0xc2, 0xca, 0xcb, + 0xce, 0xe3, 0xe9, 0xd3, 0xd9, 0xd8, 0xdc, 0xdb, 0xe0, 0xd8, 0xea, 0xec, + 0x1d, 0xf4, 0xe7, 0xe8, 0xe2, 0xe7, 0x12, 0x12, 0xf3, 0xf2, 0xea, 0xf4, + 0xf2, 0xed, 0xed, 0xf1, 0xd2, 0xe7, 0xec, 0xe0, 0xe1, 0xe8, 0xee, 0xe5, + 0xef, 0xe7, 0xed, 0xe8, 0xde, 0xec, 0xf3, 0xf3, 0xe8, 0xe0, 0xd6, 0xcd, + 0xd8, 0xeb, 0xd1, 0xcb, 0xd0, 0xd7, 0xcd, 0xc9, 0xc9, 0xc9, 0xd5, 0xdc, + 0xd5, 0xd4, 0xdc, 0xde, 0xe0, 0xd7, 0xd0, 0xde, 0xe4, 0xf3, 0xe8, 0xcd, + 0xcb, 0xc5, 0xc7, 0xca, 0xd5, 0xd6, 0xdb, 0xeb, 0xe9, 0xd1, 0xd2, 0xe2, + 0xf1, 0xf7, 0x00, 0xfc, 0xe8, 0xed, 0xf0, 0xe7, 0xe1, 0xc6, 0xb5, 0xb3, + 0xbe, 0xbe, 0xaf, 0xb2, 0xc5, 0xcd, 0xc5, 0xce, 0xc6, 0xbb, 0xc7, 0xce, + 0xf8, 0xcd, 0xd9, 0xf8, 0xf1, 0xe0, 0xfe, 0xe7, 0xce, 0xe7, 0xe9, 0xf0, + 0xe1, 0xd9, 0xd2, 0xd4, 0xda, 0xd3, 0xdc, 0xce, 0xcb, 0xdb, 0xe8, 0xec, + 0xed, 0xde, 0xd5, 0xcd, 0xb7, 0xc1, 0xd7, 0xea, 0xea, 0xe9, 0xec, 0xe7, + 0xf4, 0xeb, 0xe6, 0xe4, 0xdd, 0xdf, 0xdd, 0xeb, 0xe1, 0xea, 0xee, 0xde, + 0xdb, 0xe2, 0xdc, 0xe1, 0xe5, 0xe9, 0xed, 0xfb, 0xf3, 0xf2, 0xdd, 0xcd, + 0xce, 0xeb, 0xdf, 0xce, 0xd6, 0xe5, 0xd0, 0xd4, 0xa9, 0xaa, 0xab, 0xab, + 0xa4, 0xa3, 0xa3, 0xa2, 0xa4, 0xa6, 0xa4, 0xa3, 0xa4, 0xa7, 0xa8, 0xa6, + 0xa7, 0xa5, 0xa6, 0xa5, 0xa4, 0xa4, 0xa5, 0xa8, 0xba, 0xe8, 0x0f, 0x3e, + 0x4f, 0x46, 0x37, 0x32, 0x32, 0x34, 0x38, 0x46, 0x59, 0x68, 0x28, 0xc1, + 0xb0, 0xab, 0xa8, 0xa8, 0xa9, 0xab, 0xad, 0xad, 0xa7, 0xa4, 0xa4, 0xa4, + 0xa4, 0xa4, 0xa5, 0xab, 0xb1, 0xb6, 0xb8, 0xb6, 0xb0, 0xae, 0xac, 0xab, + 0xae, 0xb1, 0xb4, 0xb6, 0xb6, 0xb8, 0xb6, 0xb4, 0xb7, 0xb6, 0xb6, 0xb6, + 0xb7, 0xb6, 0xb1, 0xad, 0xa6, 0xa4, 0xa6, 0xb1, 0xbc, 0xb6, 0xb6, 0xb7, + 0xaa, 0xa4, 0xa4, 0xaa, 0xb7, 0xba, 0xb4, 0xb3, 0xb1, 0xb3, 0xb3, 0xb3, + 0xb3, 0xb5, 0xb6, 0xbb, 0xbf, 0xb6, 0xb4, 0xb6, 0xb4, 0xb2, 0xaf, 0xb7, + 0xbb, 0xbf, 0xbc, 0xb9, 0xb6, 0xb3, 0xb6, 0xbc, 0xbf, 0xb3, 0xa5, 0xa8, + 0xa9, 0xad, 0xb0, 0xad, 0xb1, 0xb9, 0xbe, 0xbc, 0xb2, 0xa7, 0xa8, 0xaa, + 0xa5, 0xa3, 0xa4, 0xa4, 0xa4, 0xa4, 0xa6, 0xa8, 0xa4, 0xa4, 0xa4, 0xa4, + 0xa4, 0xa4, 0xa7, 0xa8, 0xa6, 0xa7, 0xac, 0xb0, 0xaa, 0xa4, 0xa4, 0xa4, + 0xa3, 0xa5, 0xa6, 0xa4, 0xa3, 0xa2, 0xa2, 0xa4, 0xa3, 0xa3, 0xa3, 0xa3, + 0xa4, 0xa4, 0xa4, 0xa4, 0xa4, 0xa4, 0xa5, 0xa8, 0xaa, 0xae, 0xb4, 0xb7, + 0xb3, 0xa7, 0xa4, 0xa6, 0xaa, 0xb4, 0xbd, 0xbf, 0xb6, 0xa9, 0xa4, 0xa3, + 0xa4, 0xa4, 0xa3, 0xa2, 0xa6, 0xa6, 0xa2, 0xa2, 0xa2, 0xa4, 0xa4, 0xa4, + 0xa4, 0xa5, 0xa5, 0xa4, 0xa4, 0xa2, 0xa3, 0xa4, 0xa4, 0xa4, 0xa4, 0xa3, + 0xa2, 0xa4, 0xa4, 0xa4, 0xa6, 0xa8, 0xa8, 0xa9, 0xa4, 0xa3, 0xa4, 0xa4, + 0xa4, 0xa7, 0xa5, 0xa4, 0xa6, 0xa7, 0xa7, 0xa6, 0xa7, 0xa6, 0xa6, 0xa6, + 0xa5, 0xa4, 0xa5, 0xaa, 0xc4, 0x14, 0x40, 0x50, 0x4c, 0x3f, 0x31, 0x2a, + 0x2e, 0x33, 0x37, 0x40, 0x56, 0x67, 0x3a, 0xcb, 0xb9, 0xae, 0xa9, 0xa9, + 0xab, 0xae, 0xad, 0xab, 0xa8, 0xa4, 0xa4, 0xa4, 0xa4, 0xa4, 0xa4, 0xa5, + 0xab, 0xb2, 0xb4, 0xb2, 0xaa, 0xaa, 0xaa, 0xad, 0xb1, 0xb9, 0xbb, 0xb9, + 0xb7, 0xb8, 0xb5, 0xb3, 0xb6, 0xb6, 0xb3, 0xb5, 0xb6, 0xb6, 0xb5, 0xae, + 0xab, 0xb0, 0xb2, 0xba, 0xbc, 0xb3, 0xae, 0xab, 0xa6, 0xa4, 0xa6, 0xb2, + 0xbd, 0xbe, 0xbd, 0xba, 0xb4, 0xb2, 0xb2, 0xb7, 0xb4, 0xb5, 0xb6, 0xb9, + 0xbe, 0xba, 0xb5, 0xb6, 0xb2, 0xae, 0xa6, 0xae, 0xb9, 0xbe, 0xbc, 0xb9, + 0xb6, 0xb2, 0xb5, 0xbd, 0xbc, 0xb1, 0xa5, 0xa4, 0xa6, 0xad, 0xaf, 0xae, + 0xad, 0xb6, 0xba, 0xbc, 0xb2, 0xa9, 0xaa, 0xab, 0xa7, 0xa5, 0xa7, 0xa5, + 0xa4, 0xa5, 0xa9, 0xaa, 0xa7, 0xa7, 0xa5, 0xa4, 0xa4, 0xa2, 0xa4, 0xa5, + 0xa8, 0xaa, 0xaf, 0xb4, 0xae, 0xa4, 0xa4, 0xa3, 0xa3, 0xa3, 0xa4, 0xa4, + 0xa2, 0xa2, 0xa3, 0xa4, 0xa3, 0xa3, 0xa3, 0xa2, 0xa4, 0xa4, 0xa4, 0xa4, + 0xa4, 0xa5, 0xaa, 0xae, 0xb0, 0xb2, 0xb6, 0xb7, 0xb5, 0xab, 0xa7, 0xa5, + 0xa6, 0xab, 0xbb, 0xbe, 0xad, 0xa6, 0xa6, 0xa5, 0xa5, 0xa5, 0xa4, 0xa4, + 0xa4, 0xa6, 0xa3, 0xa3, 0xa4, 0xa6, 0xa4, 0xa3, 0xa4, 0xa4, 0xa4, 0xa3, + 0xa3, 0xa3, 0xa4, 0xa4, 0xa3, 0xa3, 0xa4, 0xa5, 0xa4, 0xa4, 0xa4, 0xa7, + 0xa7, 0xa6, 0xa7, 0xa5, 0xa3, 0xa5, 0xa4, 0xa5, 0xa5, 0xa7, 0xa7, 0xa5, + 0xa7, 0xa7, 0xa5, 0xa5, 0xa4, 0xa5, 0xa6, 0xa6, 0xa5, 0xa5, 0xa6, 0xaa, + 0xc8, 0x27, 0x4a, 0x4c, 0x42, 0x35, 0x2b, 0x22, 0x2b, 0x31, 0x34, 0x3b, + 0x50, 0x64, 0x52, 0xe9, 0xc2, 0xb1, 0xab, 0xaa, 0xac, 0xae, 0xae, 0xab, + 0xa6, 0xa4, 0xa4, 0xa4, 0xa5, 0xa4, 0xa3, 0xa3, 0xa5, 0xa8, 0xaa, 0xae, + 0xa8, 0xa5, 0xaa, 0xb1, 0xb4, 0xb9, 0xb8, 0xb6, 0xb6, 0xb7, 0xb5, 0xb1, + 0xb4, 0xb5, 0xb4, 0xb6, 0xb8, 0xb6, 0xae, 0xaa, 0xaa, 0xbb, 0xc1, 0xbf, + 0xc0, 0xc1, 0xbc, 0xb9, 0xb6, 0xb1, 0xb0, 0xbb, 0xc2, 0xc3, 0xc2, 0xb8, + 0xb4, 0xb5, 0xb4, 0xb6, 0xb6, 0xb5, 0xb6, 0xb6, 0xb4, 0xb2, 0xb3, 0xb5, + 0xb3, 0xb0, 0xae, 0xb2, 0xb9, 0xbc, 0xbc, 0xb8, 0xb6, 0xb6, 0xb3, 0xb8, + 0xba, 0xae, 0xa4, 0xa6, 0xad, 0xb0, 0xb3, 0xb0, 0xae, 0xba, 0xba, 0xb9, + 0xb3, 0xad, 0xa8, 0xa7, 0xa8, 0xa5, 0xa4, 0xa4, 0xa3, 0xa8, 0xb0, 0xad, + 0xaa, 0xaa, 0xab, 0xa7, 0xa4, 0xa3, 0xa4, 0xa3, 0xa4, 0xa6, 0xad, 0xb6, + 0xae, 0xa4, 0xa4, 0xa4, 0xa4, 0xa3, 0xa4, 0xa4, 0xa3, 0xa3, 0xa3, 0xa3, + 0xa4, 0xa4, 0xa2, 0xa2, 0xa4, 0xa5, 0xa4, 0xa4, 0xa4, 0xa4, 0xac, 0xb1, + 0xb4, 0xb5, 0xb7, 0xb6, 0xb4, 0xaf, 0xa7, 0xa4, 0xa8, 0xa8, 0xaf, 0xb4, + 0xa7, 0xa2, 0xa4, 0xa6, 0xa8, 0xa9, 0xa8, 0xa5, 0xa5, 0xa9, 0xaa, 0xaa, + 0xa4, 0xa5, 0xa5, 0xa4, 0xa4, 0xa4, 0xa2, 0xa2, 0xa3, 0xa7, 0xa9, 0xa4, + 0xa3, 0xa2, 0xa3, 0xa4, 0xa4, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7, 0xa7, 0xa4, + 0xa5, 0xa7, 0xa6, 0xa9, 0xa8, 0xa4, 0xa7, 0xa6, 0xa7, 0xaa, 0xa7, 0xa4, + 0xa4, 0xa5, 0xa7, 0xa6, 0xa5, 0xa8, 0xa8, 0xab, 0xc8, 0x24, 0x47, 0x46, + 0x37, 0x2a, 0x26, 0x26, 0x28, 0x2e, 0x34, 0x3a, 0x4c, 0x60, 0x5f, 0x11, + 0xdd, 0xc0, 0xb1, 0xae, 0xad, 0xaf, 0xae, 0xac, 0xa6, 0xa5, 0xa6, 0xa8, + 0xaa, 0xaa, 0xa7, 0xa5, 0xa6, 0xa4, 0xa5, 0xa9, 0xa7, 0xa5, 0xa9, 0xb0, + 0xb0, 0xb1, 0xb1, 0xb0, 0xb0, 0xaf, 0xad, 0xb0, 0xb2, 0xb5, 0xb5, 0xb4, + 0xb2, 0xae, 0xa8, 0xa6, 0xaa, 0xad, 0xad, 0xab, 0xb0, 0xb7, 0xbb, 0xbd, + 0xbf, 0xbe, 0xbf, 0xc4, 0xc7, 0xc5, 0xc2, 0xbc, 0xb8, 0xb7, 0xb7, 0xb3, + 0xb7, 0xb8, 0xb7, 0xb6, 0xaf, 0xb0, 0xb5, 0xb1, 0xaf, 0xb1, 0xb0, 0xb6, + 0xbc, 0xb9, 0xba, 0xb6, 0xb6, 0xb4, 0xb0, 0xaf, 0xb0, 0xab, 0xa4, 0xa6, + 0xae, 0xad, 0xb1, 0xb1, 0xaf, 0xbc, 0xbc, 0xb9, 0xb3, 0xab, 0xa7, 0xa6, + 0xa9, 0xa5, 0xa4, 0xa4, 0xa4, 0xad, 0xb6, 0xb0, 0xac, 0xa9, 0xaa, 0xaa, + 0xa6, 0xa4, 0xa4, 0xa4, 0xa3, 0xa4, 0xa9, 0xb5, 0xb0, 0xa7, 0xa9, 0xa5, + 0xa4, 0xa4, 0xa5, 0xa5, 0xa3, 0xa3, 0xa4, 0xa4, 0xa4, 0xa4, 0xa2, 0xa3, + 0xa4, 0xa5, 0xa4, 0xa4, 0xa4, 0xa4, 0xad, 0xb3, 0xb4, 0xb9, 0xb9, 0xb6, + 0xb6, 0xb0, 0xa8, 0xa3, 0xa5, 0xa7, 0xa5, 0xa8, 0xa5, 0xa3, 0xa3, 0xa7, + 0xaf, 0xad, 0xaa, 0xaa, 0xaf, 0xb5, 0xb9, 0xb8, 0xae, 0xa6, 0xa7, 0xa7, + 0xa4, 0xa3, 0xa3, 0xa4, 0xa4, 0xa6, 0xa8, 0xa5, 0xa4, 0xa3, 0xa3, 0xa4, + 0xa4, 0xa4, 0xa4, 0xa4, 0xa3, 0xa4, 0xa6, 0xa4, 0xa6, 0xa6, 0xa5, 0xa7, + 0xa8, 0xa7, 0xa6, 0xa6, 0xa7, 0xa8, 0xa7, 0xa7, 0xa6, 0xa7, 0xa7, 0xa6, + 0xa6, 0xa8, 0xaa, 0xad, 0xcd, 0x29, 0x42, 0x41, 0x2e, 0x22, 0x21, 0x28, + 0x28, 0x2f, 0x36, 0x3b, 0x4c, 0x5f, 0x65, 0x3a, 0x07, 0xe5, 0xc2, 0xb8, + 0xb0, 0xaf, 0xae, 0xaa, 0xa6, 0xa7, 0xa7, 0xaa, 0xad, 0xab, 0xaa, 0xa8, + 0xaa, 0xa9, 0xa8, 0xa5, 0xa4, 0xa4, 0xa6, 0xaa, 0xaf, 0xaf, 0xaa, 0xac, + 0xad, 0xab, 0xab, 0xb0, 0xb3, 0xb3, 0xb2, 0xb0, 0xaa, 0xa7, 0xa5, 0xa6, + 0xaa, 0xab, 0xaa, 0xab, 0xac, 0xab, 0xaa, 0xaa, 0xa8, 0xa6, 0xa8, 0xb6, + 0xbd, 0xbd, 0xbc, 0xbc, 0xbb, 0xb9, 0xba, 0xb5, 0xb6, 0xbc, 0xbc, 0xb9, + 0xb0, 0xb0, 0xb5, 0xb2, 0xb0, 0xb0, 0xb0, 0xb6, 0xba, 0xb8, 0xb7, 0xb4, + 0xae, 0xab, 0xaa, 0xaa, 0xad, 0xab, 0xa4, 0xa4, 0xa4, 0xa4, 0xaa, 0xb3, + 0xb0, 0xb9, 0xbf, 0xbc, 0xb5, 0xad, 0xa9, 0xa8, 0xa6, 0xa6, 0xa6, 0xa5, + 0xa4, 0xae, 0xb6, 0xb2, 0xae, 0xac, 0xaa, 0xac, 0xad, 0xa7, 0xa3, 0xa3, + 0xa4, 0xa4, 0xa3, 0xa8, 0xab, 0xae, 0xb4, 0xad, 0xa4, 0xa5, 0xa5, 0xa4, + 0xa3, 0xa4, 0xa4, 0xa4, 0xa4, 0xa4, 0xa4, 0xa4, 0xa4, 0xa5, 0xa5, 0xa4, + 0xa3, 0xa4, 0xac, 0xaf, 0xb1, 0xb6, 0xb8, 0xb9, 0xb6, 0xb2, 0xa8, 0xa4, + 0xa5, 0xa7, 0xa5, 0xa4, 0xa4, 0xa3, 0xa4, 0xa6, 0xac, 0xae, 0xad, 0xad, + 0xb0, 0xba, 0xbd, 0xc0, 0xbc, 0xad, 0xa8, 0xaa, 0xa7, 0xa3, 0xa4, 0xa4, + 0xa3, 0xa4, 0xa6, 0xa5, 0xa4, 0xa3, 0xa3, 0xa4, 0xa4, 0xa4, 0xa5, 0xa4, + 0xa4, 0xa4, 0xa4, 0xa4, 0xa5, 0xa6, 0xa6, 0xa9, 0xa7, 0xa7, 0xaa, 0xa7, + 0xa7, 0xa6, 0xa5, 0xa3, 0xa4, 0xa8, 0xa8, 0xa7, 0xa7, 0xaa, 0xab, 0xb1, + 0xdb, 0x34, 0x3f, 0x37, 0x2a, 0x22, 0x26, 0x25, 0x2a, 0x2f, 0x38, 0x3c, + 0x4f, 0x61, 0x68, 0x58, 0x3a, 0x11, 0xdf, 0xcf, 0xc3, 0xb1, 0xaa, 0xaa, + 0xaa, 0xa9, 0xaa, 0xae, 0xb0, 0xb0, 0xad, 0xab, 0xac, 0xac, 0xa7, 0xa6, + 0xaa, 0xae, 0xb3, 0xb3, 0xb2, 0xaf, 0xa8, 0xa7, 0xab, 0xab, 0xab, 0xb0, + 0xb3, 0xae, 0xab, 0xac, 0xab, 0xae, 0xad, 0xb0, 0xb0, 0xad, 0xad, 0xaf, + 0xb3, 0xb0, 0xa8, 0xa7, 0xa7, 0xa6, 0xa8, 0xac, 0xb1, 0xb4, 0xb6, 0xb7, + 0xb8, 0xb4, 0xb3, 0xb5, 0xb4, 0xbf, 0xc2, 0xba, 0xaf, 0xb0, 0xb4, 0xb3, + 0xb0, 0xb0, 0xad, 0xaf, 0xb9, 0xbb, 0xb7, 0xb6, 0xb4, 0xad, 0xa9, 0xae, + 0xb0, 0xa9, 0xa4, 0xa3, 0xa3, 0xa3, 0xa5, 0xaa, 0xae, 0xb4, 0xbc, 0xbc, + 0xb6, 0xb0, 0xac, 0xb3, 0xb4, 0xb0, 0xaa, 0xa5, 0xa4, 0xa9, 0xb2, 0xb0, + 0xb0, 0xb4, 0xb0, 0xad, 0xb0, 0xaa, 0xa4, 0xa4, 0xa4, 0xa3, 0xa3, 0xa6, + 0xa7, 0xab, 0xaf, 0xb0, 0xa5, 0xa4, 0xa3, 0xa3, 0xa2, 0xa3, 0xa4, 0xa4, + 0xa3, 0xa4, 0xa5, 0xa5, 0xa4, 0xa4, 0xa5, 0xa3, 0xa3, 0xa4, 0xa6, 0xaa, + 0xad, 0xaf, 0xb2, 0xb6, 0xb7, 0xb0, 0xa8, 0xa5, 0xa5, 0xa7, 0xa5, 0xa3, + 0xa4, 0xa3, 0xa4, 0xa4, 0xa5, 0xab, 0xb0, 0xb0, 0xb3, 0xb8, 0xbc, 0xbe, + 0xbc, 0xb0, 0xaa, 0xab, 0xa7, 0xa2, 0xa3, 0xa4, 0xa4, 0xa4, 0xa6, 0xa8, + 0xa6, 0xa4, 0xa4, 0xa4, 0xa5, 0xa4, 0xa5, 0xa4, 0xa3, 0xa3, 0xa3, 0xa2, + 0xa4, 0xa8, 0xa8, 0xa9, 0xa8, 0xa8, 0xa9, 0xaa, 0xa8, 0xa5, 0xa6, 0xa3, + 0xa4, 0xa6, 0xa9, 0xab, 0xae, 0xb2, 0xba, 0xc6, 0x01, 0x40, 0x3b, 0x2d, + 0x28, 0x28, 0x2a, 0x2c, 0x2a, 0x36, 0x3b, 0x45, 0x55, 0x64, 0x6a, 0x67, + 0x5b, 0x39, 0x15, 0xfe, 0xea, 0xb9, 0xaa, 0xad, 0xaa, 0xa7, 0xac, 0xb0, + 0xb1, 0xb2, 0xb2, 0xb2, 0xb1, 0xb1, 0xab, 0xae, 0xb7, 0xbc, 0xbc, 0xb9, + 0xb2, 0xad, 0xa9, 0xa6, 0xaa, 0xab, 0xaa, 0xb1, 0xb5, 0xaa, 0xa5, 0xa8, + 0xab, 0xb3, 0xb8, 0xb9, 0xb8, 0xb6, 0xb3, 0xb4, 0xb6, 0xb1, 0xa9, 0xa7, + 0xa8, 0xae, 0xad, 0xa8, 0xa5, 0xa5, 0xaa, 0xb0, 0xbc, 0xbb, 0xb6, 0xb6, + 0xb2, 0xbe, 0xc4, 0xb9, 0xb0, 0xb0, 0xb3, 0xb2, 0xb0, 0xb0, 0xae, 0xac, + 0xb8, 0xbb, 0xb9, 0xb8, 0xbc, 0xb7, 0xb2, 0xb2, 0xb3, 0xab, 0xa4, 0xa3, + 0xa2, 0xa2, 0xa3, 0xa4, 0xaa, 0xb0, 0xb9, 0xb8, 0xb7, 0xb1, 0xb7, 0xc8, + 0xc8, 0xc3, 0xbc, 0xaf, 0xa7, 0xa5, 0xaa, 0xb0, 0xb7, 0xb8, 0xb4, 0xb0, + 0xad, 0xa9, 0xa6, 0xa6, 0xa3, 0xa2, 0xa1, 0xa5, 0xa4, 0xa5, 0xab, 0xab, + 0xa5, 0xa6, 0xa4, 0xa2, 0xa3, 0xa4, 0xa4, 0xa2, 0xa2, 0xa3, 0xa4, 0xa4, + 0xa5, 0xa6, 0xa4, 0xa3, 0xa2, 0xa3, 0xa4, 0xa4, 0xa7, 0xaa, 0xb1, 0xb6, + 0xb6, 0xad, 0xa8, 0xa5, 0xa7, 0xa9, 0xa5, 0xa7, 0xa8, 0xa6, 0xa8, 0xa6, + 0xa4, 0xaa, 0xb4, 0xb4, 0xb4, 0xb6, 0xb7, 0xbb, 0xb4, 0xad, 0xaa, 0xaa, + 0xa6, 0xa4, 0xa4, 0xa4, 0xa4, 0xa4, 0xa4, 0xa7, 0xa4, 0xa3, 0xa2, 0xa2, + 0xa3, 0xa3, 0xa3, 0xa5, 0xa2, 0xa3, 0xa4, 0xa4, 0xa3, 0xa6, 0xa9, 0xaa, + 0xaa, 0xaa, 0xab, 0xac, 0xa8, 0xa6, 0xa8, 0xa6, 0xa6, 0xa8, 0xaa, 0xb0, + 0xbb, 0xc0, 0xd6, 0xef, 0x2c, 0x46, 0x3b, 0x28, 0x1c, 0x2b, 0x2a, 0x39, + 0x33, 0x3c, 0x42, 0x50, 0x5d, 0x69, 0x6c, 0x6e, 0x69, 0x56, 0x42, 0x34, + 0x08, 0xc5, 0xb1, 0xb0, 0xa9, 0xa5, 0xa9, 0xac, 0xb0, 0xb3, 0xb0, 0xb2, + 0xb2, 0xb4, 0xb7, 0xba, 0xbd, 0xbd, 0xbc, 0xbd, 0xbb, 0xb3, 0xad, 0xaa, + 0xac, 0xa9, 0xaa, 0xb5, 0xb0, 0xa6, 0xa4, 0xa5, 0xa7, 0xb0, 0xbe, 0xbf, + 0xbe, 0xbb, 0xb6, 0xb8, 0xb7, 0xaf, 0xaa, 0xab, 0xad, 0xb7, 0xb7, 0xab, + 0xa6, 0xa5, 0xa5, 0xa8, 0xb3, 0xb9, 0xba, 0xbf, 0xb7, 0xbc, 0xc0, 0xb7, + 0xb4, 0xb3, 0xb4, 0xb0, 0xaa, 0xad, 0xb0, 0xb0, 0xbc, 0xbc, 0xb9, 0xba, + 0xbe, 0xbd, 0xbc, 0xb8, 0xb5, 0xab, 0xa4, 0xa4, 0xa3, 0xa3, 0xa3, 0xa3, + 0xa7, 0xae, 0xb6, 0xb7, 0xb8, 0xb4, 0xc0, 0xc9, 0xc9, 0xca, 0xc8, 0xc3, + 0xb5, 0xa6, 0xa6, 0xaf, 0xb9, 0xbb, 0xb6, 0xad, 0xaa, 0xa8, 0xaa, 0xa9, + 0xa4, 0xa3, 0xa2, 0xa4, 0xa4, 0xa4, 0xa6, 0xab, 0xaa, 0xa8, 0xa6, 0xa4, + 0xa3, 0xa3, 0xa4, 0xa4, 0xa3, 0xa4, 0xa4, 0xa4, 0xa5, 0xa7, 0xa5, 0xa4, + 0xa3, 0xa3, 0xa3, 0xa3, 0xa4, 0xa7, 0xb0, 0xb5, 0xb3, 0xab, 0xa8, 0xa7, + 0xaa, 0xaa, 0xa8, 0xa9, 0xa9, 0xa4, 0xa6, 0xa8, 0xa4, 0xa5, 0xb0, 0xb7, + 0xb7, 0xb6, 0xb6, 0xb5, 0xac, 0xab, 0xac, 0xaa, 0xa8, 0xa5, 0xa4, 0xa4, + 0xa4, 0xa3, 0xa4, 0xa4, 0xa4, 0xa4, 0xa4, 0xa3, 0xa3, 0xa4, 0xa3, 0xa6, + 0xa2, 0xa4, 0xa5, 0xa6, 0xa4, 0xa3, 0xa5, 0xa9, 0xae, 0xaf, 0xaf, 0xad, + 0xa7, 0xa5, 0xaa, 0xaa, 0xa8, 0xa9, 0xab, 0xb2, 0xc0, 0xcb, 0xf7, 0x1c, + 0x49, 0x4a, 0x38, 0x25, 0x1b, 0x28, 0x32, 0x3e, 0x42, 0x46, 0x4f, 0x5a, + 0x64, 0x6d, 0x70, 0x70, 0x6e, 0x64, 0x5b, 0x52, 0x1d, 0xd0, 0xbb, 0xb1, + 0xab, 0xa8, 0xa6, 0xa6, 0xaa, 0xb1, 0xb0, 0xb0, 0xb5, 0xb9, 0xbe, 0xbc, + 0xbd, 0xbf, 0xbe, 0xc1, 0xc1, 0xbb, 0xb2, 0xaf, 0xad, 0xa9, 0xaa, 0xad, + 0xa7, 0xa6, 0xa6, 0xa7, 0xa7, 0xaa, 0xb5, 0xbf, 0xbd, 0xbb, 0xb8, 0xb6, + 0xb7, 0xae, 0xaa, 0xaf, 0xb7, 0xbc, 0xb9, 0xb2, 0xaa, 0xa7, 0xa5, 0xa6, + 0xa7, 0xa8, 0xaf, 0xb8, 0xb3, 0xb0, 0xb2, 0xb1, 0xb2, 0xb4, 0xb3, 0xb0, + 0xad, 0xb0, 0xb2, 0xb6, 0xb8, 0xbb, 0xb9, 0xb8, 0xbc, 0xba, 0xbb, 0xba, + 0xba, 0xbb, 0xab, 0xa4, 0xa2, 0xa3, 0xa2, 0xa3, 0xa5, 0xab, 0xb2, 0xb8, + 0xb9, 0xba, 0xc2, 0xc8, 0xc9, 0xcc, 0xcd, 0xca, 0xbf, 0xa8, 0xa6, 0xa8, + 0xb1, 0xbc, 0xb8, 0xab, 0xa6, 0xa7, 0xa8, 0xab, 0xa6, 0xa5, 0xa4, 0xa5, + 0xa7, 0xa4, 0xa6, 0xb3, 0xb2, 0xac, 0xa7, 0xa9, 0xa5, 0xa4, 0xa5, 0xa5, + 0xa3, 0xa5, 0xa4, 0xa4, 0xa6, 0xa6, 0xa5, 0xa4, 0xa4, 0xa4, 0xa4, 0xa4, + 0xa4, 0xa3, 0xa8, 0xac, 0xac, 0xab, 0xa7, 0xa6, 0xa9, 0xac, 0xaa, 0xa6, + 0xa5, 0xa4, 0xa5, 0xaa, 0xa9, 0xa4, 0xa9, 0xb6, 0xbc, 0xbc, 0xb9, 0xb3, + 0xab, 0xb0, 0xb4, 0xb5, 0xb3, 0xab, 0xa7, 0xa6, 0xa4, 0xa3, 0xa4, 0xa4, + 0xa3, 0xa3, 0xa4, 0xa4, 0xa4, 0xa5, 0xa5, 0xa7, 0xa3, 0xa4, 0xa5, 0xa8, + 0xa9, 0xa6, 0xa5, 0xa8, 0xae, 0xb1, 0xb0, 0xa9, 0xa5, 0xa5, 0xa8, 0xaa, + 0xaa, 0xa9, 0xae, 0xb6, 0xc8, 0xdc, 0x0f, 0x3e, 0x54, 0x49, 0x34, 0x22, + 0x1a, 0x28, 0x3c, 0x45, 0x50, 0x53, 0x5a, 0x63, 0x6b, 0x70, 0x71, 0x72, + 0x71, 0x6c, 0x69, 0x5e, 0x2d, 0xee, 0xd2, 0xbc, 0xaf, 0xab, 0xa7, 0xa5, + 0xa6, 0xaa, 0xb1, 0xb2, 0xb5, 0xb9, 0xb8, 0xb7, 0xb8, 0xba, 0xbb, 0xbc, + 0xbe, 0xc2, 0xbc, 0xb4, 0xaf, 0xac, 0xaa, 0xa8, 0xa6, 0xa8, 0xaa, 0xa9, + 0xa8, 0xaa, 0xad, 0xb0, 0xb7, 0xbb, 0xbb, 0xb7, 0xb8, 0xb5, 0xb3, 0xb3, + 0xbc, 0xbb, 0xb8, 0xb6, 0xb0, 0xaf, 0xa9, 0xa6, 0xa6, 0xa8, 0xa9, 0xaa, + 0xac, 0xab, 0xac, 0xac, 0xad, 0xb0, 0xb0, 0xb0, 0xae, 0xb0, 0xb4, 0xb8, + 0xaf, 0xb6, 0xb6, 0xb6, 0xba, 0xbb, 0xb9, 0xb8, 0xbd, 0xc6, 0xbe, 0xaa, + 0xa4, 0xa5, 0xa8, 0xaa, 0xa6, 0xab, 0xb2, 0xb7, 0xb8, 0xb9, 0xbf, 0xc4, + 0xc8, 0xcd, 0xce, 0xca, 0xc1, 0xad, 0xaa, 0xb0, 0xb4, 0xbc, 0xba, 0xac, + 0xa6, 0xa6, 0xa7, 0xab, 0xaa, 0xa6, 0xa5, 0xa5, 0xa8, 0xa4, 0xa9, 0xae, + 0xae, 0xb1, 0xb0, 0xaa, 0xa5, 0xa4, 0xa5, 0xa5, 0xa5, 0xa6, 0xa4, 0xa3, + 0xa6, 0xa6, 0xa5, 0xa4, 0xa5, 0xa5, 0xa6, 0xa6, 0xa5, 0xa4, 0xa3, 0xa5, + 0xaa, 0xaa, 0xa8, 0xa6, 0xaa, 0xad, 0xaa, 0xa7, 0xa7, 0xa8, 0xa9, 0xab, + 0xab, 0xa4, 0xa3, 0xaa, 0xb8, 0xbf, 0xb9, 0xb2, 0xae, 0xb3, 0xb6, 0xb7, + 0xb8, 0xb1, 0xab, 0xad, 0xa8, 0xa4, 0xa5, 0xa5, 0xa4, 0xa4, 0xa5, 0xa6, + 0xa7, 0xa6, 0xa6, 0xa7, 0xa4, 0xa4, 0xa4, 0xa5, 0xaa, 0xa7, 0xa6, 0xa7, + 0xa8, 0xb0, 0xb2, 0xa8, 0xa5, 0xa6, 0xa5, 0xa7, 0xac, 0xad, 0xb0, 0xbc, + 0xd5, 0xf4, 0x1b, 0x4c, 0x54, 0x46, 0x32, 0x25, 0x20, 0x32, 0x44, 0x4d, + 0x5a, 0x60, 0x64, 0x6b, 0x70, 0x71, 0x73, 0x74, 0x73, 0x71, 0x70, 0x66, + 0x43, 0x23, 0xf5, 0xc3, 0xb3, 0xac, 0xa9, 0xa7, 0xa6, 0xa7, 0xab, 0xae, + 0xb0, 0xb2, 0xb4, 0xb6, 0xb2, 0xb3, 0xb6, 0xb9, 0xbb, 0xbb, 0xbf, 0xbc, + 0xb5, 0xb0, 0xac, 0xaa, 0xaa, 0xaa, 0xab, 0xad, 0xad, 0xae, 0xac, 0xa8, + 0xaf, 0xb9, 0xbb, 0xb8, 0xb8, 0xb6, 0xb4, 0xb6, 0xb9, 0xb7, 0xb6, 0xbb, + 0xba, 0xb8, 0xb2, 0xa8, 0xa5, 0xa6, 0xaa, 0xa8, 0xaa, 0xaa, 0xa9, 0xaa, + 0xab, 0xb0, 0xb4, 0xb0, 0xb0, 0xae, 0xb2, 0xb5, 0xa9, 0xaf, 0xb5, 0xb6, + 0xb9, 0xba, 0xb8, 0xb8, 0xbf, 0xc5, 0xc7, 0xbb, 0xaa, 0xb0, 0xbd, 0xb3, + 0xa9, 0xaf, 0xbc, 0xba, 0xb5, 0xb4, 0xbe, 0xc1, 0xc6, 0xc8, 0xcb, 0xc8, + 0xc2, 0xb7, 0xb8, 0xbb, 0xba, 0xbb, 0xb9, 0xaf, 0xa6, 0xa6, 0xaa, 0xb1, + 0xb0, 0xac, 0xa8, 0xa4, 0xa8, 0xa7, 0xaa, 0xaf, 0xb0, 0xb2, 0xb5, 0xad, + 0xa8, 0xa4, 0xa3, 0xa3, 0xa4, 0xa4, 0xa4, 0xa4, 0xa4, 0xa5, 0xa5, 0xa6, + 0xa9, 0xa9, 0xa8, 0xa8, 0xaa, 0xa7, 0xa5, 0xa6, 0xa9, 0xac, 0xaa, 0xaa, + 0xac, 0xaf, 0xaa, 0xa9, 0xaa, 0xac, 0xad, 0xb0, 0xac, 0xa7, 0xa4, 0xa5, + 0xaa, 0xb3, 0xb5, 0xb2, 0xb2, 0xb4, 0xb5, 0xb3, 0xb5, 0xb3, 0xaa, 0xa9, + 0xac, 0xaa, 0xa8, 0xaa, 0xaa, 0xa6, 0xa8, 0xa9, 0xa8, 0xa6, 0xaa, 0xa9, + 0xa5, 0xa4, 0xa4, 0xa6, 0xaa, 0xa9, 0xa5, 0xa6, 0xa7, 0xa8, 0xad, 0xaf, + 0xaa, 0xaa, 0xaa, 0xaa, 0xb0, 0xba, 0xbe, 0xdc, 0xf4, 0x18, 0x2f, 0x51, + 0x52, 0x42, 0x2e, 0x2e, 0x33, 0x46, 0x52, 0x59, 0x63, 0x6b, 0x6c, 0x70, + 0x73, 0x75, 0x76, 0x76, 0x76, 0x75, 0x74, 0x6b, 0x61, 0x50, 0x12, 0xca, + 0xb6, 0xae, 0xad, 0xaa, 0xa8, 0xa7, 0xa9, 0xaa, 0xaf, 0xb1, 0xb4, 0xb6, + 0xb6, 0xb4, 0xb8, 0xbc, 0xbb, 0xb5, 0xb5, 0xb6, 0xb7, 0xb0, 0xaf, 0xaf, + 0xb2, 0xb0, 0xae, 0xae, 0xad, 0xab, 0xa8, 0xa5, 0xab, 0xbc, 0xbd, 0xb9, + 0xbb, 0xbb, 0xb6, 0xb2, 0xb2, 0xb2, 0xb2, 0xb6, 0xbc, 0xbd, 0xba, 0xac, + 0xaa, 0xad, 0xaa, 0xaa, 0xac, 0xaa, 0xa9, 0xa7, 0xac, 0xb1, 0xb5, 0xb1, + 0xaf, 0xad, 0xb2, 0xb6, 0xa9, 0xa9, 0xb2, 0xb5, 0xb7, 0xb9, 0xba, 0xb6, + 0xbc, 0xc2, 0xc5, 0xc2, 0xb3, 0xb6, 0xc3, 0xbb, 0xaf, 0xb7, 0xc3, 0xbc, + 0xb4, 0xb6, 0xc2, 0xc2, 0xc3, 0xc5, 0xc8, 0xc5, 0xc3, 0xc2, 0xbd, 0xc0, + 0xbc, 0xbb, 0xb6, 0xaf, 0xac, 0xad, 0xb4, 0xba, 0xb7, 0xb6, 0xb2, 0xa6, + 0xa4, 0xa5, 0xa7, 0xae, 0xb0, 0xac, 0xae, 0xaa, 0xa9, 0xa4, 0xa3, 0xa5, + 0xa7, 0xa5, 0xa4, 0xa5, 0xa4, 0xa7, 0xa6, 0xa7, 0xa9, 0xa9, 0xad, 0xac, + 0xab, 0xa8, 0xa6, 0xa8, 0xab, 0xb0, 0xb0, 0xb1, 0xb3, 0xb2, 0xad, 0xab, + 0xb2, 0xb6, 0xb6, 0xba, 0xb2, 0xab, 0xa4, 0xa4, 0xa4, 0xa4, 0xac, 0xb6, + 0xb7, 0xb6, 0xb4, 0xb0, 0xb0, 0xb3, 0xb2, 0xa7, 0xa9, 0xab, 0xad, 0xac, + 0xaa, 0xaa, 0xab, 0xa8, 0xa5, 0xa6, 0xa9, 0xaa, 0xa5, 0xa2, 0xa4, 0xa4, + 0xa6, 0xa6, 0xa7, 0xa7, 0xa8, 0xac, 0xaf, 0xb4, 0xbb, 0xb2, 0xb4, 0xba, + 0xc6, 0xdb, 0xec, 0x08, 0x34, 0x3a, 0x4c, 0x55, 0x54, 0x46, 0x36, 0x38, + 0x46, 0x55, 0x64, 0x66, 0x6a, 0x72, 0x73, 0x76, 0x76, 0x77, 0x77, 0x76, + 0x76, 0x76, 0x76, 0x73, 0x70, 0x5f, 0x16, 0xd1, 0xb7, 0xb0, 0xaf, 0xad, + 0xaa, 0xaa, 0xaa, 0xaa, 0xb2, 0xb3, 0xb5, 0xb6, 0xbc, 0xbb, 0xbb, 0xbb, + 0xb8, 0xb8, 0xb5, 0xb3, 0xb5, 0xb3, 0xb3, 0xb6, 0xba, 0xb5, 0xb0, 0xb1, + 0xaa, 0xa6, 0xa5, 0xa5, 0xaa, 0xbe, 0xc2, 0xc1, 0xbe, 0xbc, 0xb9, 0xb5, + 0xb4, 0xb4, 0xb0, 0xb0, 0xb6, 0xbc, 0xbe, 0xb2, 0xae, 0xba, 0xb5, 0xae, + 0xaa, 0xac, 0xae, 0xac, 0xae, 0xaf, 0xb0, 0xb0, 0xaf, 0xac, 0xb3, 0xb9, + 0xaf, 0xac, 0xb2, 0xb6, 0xb9, 0xb8, 0xb7, 0xb8, 0xbc, 0xc1, 0xc2, 0xc0, + 0xb2, 0xb6, 0xbd, 0xb4, 0xb5, 0xbd, 0xc2, 0xbc, 0xb8, 0xbb, 0xc2, 0xc5, + 0xc5, 0xc5, 0xc7, 0xc7, 0xc8, 0xc4, 0xc3, 0xc8, 0xc6, 0xc2, 0xbe, 0xba, + 0xb8, 0xb7, 0xb8, 0xbf, 0xbf, 0xbf, 0xbd, 0xb1, 0xaa, 0xa9, 0xa5, 0xa5, + 0xa7, 0xa6, 0xaa, 0xa8, 0xa8, 0xa7, 0xa6, 0xa6, 0xa6, 0xa5, 0xa4, 0xa3, + 0xa4, 0xa7, 0xa6, 0xa4, 0xa5, 0xa7, 0xaa, 0xad, 0xad, 0xaa, 0xa8, 0xaa, + 0xad, 0xb0, 0xb8, 0xb9, 0xb6, 0xb0, 0xaa, 0xaa, 0xb4, 0xb8, 0xb8, 0xb8, + 0xb7, 0xb4, 0xac, 0xa5, 0xa5, 0xa4, 0xa4, 0xb0, 0xb5, 0xb0, 0xb0, 0xac, + 0xaa, 0xa8, 0xa9, 0xa7, 0xa8, 0xa7, 0xa8, 0xaa, 0xab, 0xaa, 0xa8, 0xa9, + 0xa9, 0xa7, 0xa6, 0xa9, 0xa5, 0xa4, 0xa4, 0xa4, 0xa6, 0xa7, 0xa8, 0xab, + 0xaf, 0xb3, 0xb9, 0xc2, 0xce, 0xd7, 0xd1, 0xd0, 0xdf, 0x04, 0x27, 0x35, + 0x4c, 0x5a, 0x5c, 0x57, 0x55, 0x4f, 0x49, 0x49, 0x57, 0x64, 0x6c, 0x70, + 0x73, 0x76, 0x78, 0x78, 0x78, 0x79, 0x78, 0x77, 0x77, 0x77, 0x76, 0x76, + 0x74, 0x64, 0x1e, 0xd7, 0xbc, 0xb2, 0xb0, 0xb0, 0xae, 0xac, 0xac, 0xb1, + 0xbc, 0xbc, 0xbb, 0xbc, 0xbc, 0xb8, 0xb6, 0xb9, 0xb9, 0xb6, 0xb2, 0xb4, + 0xb4, 0xb8, 0xbc, 0xbe, 0xbd, 0xb9, 0xb6, 0xb1, 0xa7, 0xa5, 0xa5, 0xa5, + 0xa9, 0xbf, 0xc5, 0xc3, 0xc5, 0xc1, 0xbc, 0xbb, 0xba, 0xb4, 0xaa, 0xa8, + 0xac, 0xb2, 0xb7, 0xb1, 0xac, 0xbc, 0xbd, 0xba, 0xb2, 0xb0, 0xb4, 0xb8, + 0xb7, 0xb1, 0xac, 0xaa, 0xac, 0xac, 0xb0, 0xb8, 0xb1, 0xab, 0xae, 0xb5, + 0xb9, 0xba, 0xb4, 0xb7, 0xb9, 0xbe, 0xc2, 0xbe, 0xb3, 0xba, 0xb6, 0xae, + 0xb7, 0xc2, 0xc7, 0xbc, 0xb7, 0xbb, 0xbe, 0xc2, 0xc3, 0xc4, 0xc4, 0xc7, + 0xc7, 0xc0, 0xbc, 0xbf, 0xbc, 0xbf, 0xc2, 0xc2, 0xc2, 0xc1, 0xc1, 0xc5, + 0xc6, 0xc2, 0xbb, 0xb8, 0xb8, 0xb2, 0xaa, 0xa6, 0xa6, 0xa9, 0xaa, 0xaa, + 0xa9, 0xab, 0xaf, 0xad, 0xaa, 0xa7, 0xa4, 0xa2, 0xa7, 0xa9, 0xa5, 0xa5, + 0xa4, 0xa6, 0xac, 0xb0, 0xb2, 0xb1, 0xaf, 0xaf, 0xae, 0xb3, 0xb8, 0xb9, + 0xb6, 0xac, 0xa7, 0xa8, 0xad, 0xb2, 0xb6, 0xba, 0xb5, 0xb6, 0xb4, 0xad, + 0xaa, 0xa6, 0xa4, 0xaa, 0xaa, 0xa7, 0xa6, 0xa6, 0xa4, 0xa4, 0xa7, 0xab, + 0xaa, 0xa6, 0xa5, 0xa6, 0xa9, 0xa8, 0xa8, 0xaa, 0xad, 0xaa, 0xa8, 0xaa, + 0xa4, 0xa4, 0xa5, 0xa4, 0xa5, 0xa8, 0xab, 0xb0, 0xb9, 0xc5, 0xc9, 0xd6, + 0xf8, 0x15, 0x15, 0x10, 0x0e, 0x22, 0x40, 0x54, 0x59, 0x5e, 0x60, 0x58, + 0x54, 0x58, 0x59, 0x60, 0x64, 0x6c, 0x73, 0x75, 0x77, 0x79, 0x78, 0x77, + 0x77, 0x78, 0x79, 0x78, 0x77, 0x77, 0x76, 0x76, 0x76, 0x6d, 0x3d, 0xef, + 0xc7, 0xbc, 0xb7, 0xb5, 0xb2, 0xb0, 0xb5, 0xb2, 0xb8, 0xbb, 0xba, 0xb9, + 0xbb, 0xb9, 0xba, 0xb9, 0xb6, 0xb6, 0xb6, 0xbc, 0xc4, 0xd2, 0xda, 0xd4, + 0xc4, 0xbd, 0xb6, 0xab, 0xa7, 0xa8, 0xa7, 0xa6, 0xa8, 0xb6, 0xc5, 0xc7, + 0xc7, 0xc5, 0xc0, 0xc0, 0xbd, 0xae, 0xa6, 0xa6, 0xa5, 0xa6, 0xaa, 0xaa, + 0xab, 0xb7, 0xbc, 0xbd, 0xb8, 0xb7, 0xb3, 0xb0, 0xaf, 0xae, 0xab, 0xab, + 0xac, 0xaf, 0xb1, 0xb6, 0xaf, 0xa7, 0xac, 0xb7, 0xbb, 0xb9, 0xb3, 0xb6, + 0xb9, 0xbc, 0xb7, 0xba, 0xbc, 0xc2, 0xc0, 0xb7, 0xbb, 0xc3, 0xc8, 0xbb, + 0xb0, 0xae, 0xb9, 0xc0, 0xc2, 0xc5, 0xc4, 0xc4, 0xc5, 0xbc, 0xab, 0xa7, + 0xa8, 0xb1, 0xb6, 0xc0, 0xc6, 0xc4, 0xc4, 0xc8, 0xcf, 0xce, 0xc5, 0xb9, + 0xb5, 0xb1, 0xb1, 0xaa, 0xa8, 0xaa, 0xac, 0xaa, 0xa7, 0xaa, 0xb0, 0xad, + 0xac, 0xa8, 0xa4, 0xa4, 0xac, 0xaf, 0xaa, 0xa6, 0xa5, 0xa6, 0xb4, 0xbc, + 0xb8, 0xb7, 0xb8, 0xb5, 0xb1, 0xb1, 0xb2, 0xb3, 0xb4, 0xae, 0xa9, 0xaa, + 0xad, 0xaf, 0xb3, 0xbb, 0xbc, 0xba, 0xbb, 0xbb, 0xad, 0xa8, 0xa9, 0xa8, + 0xa8, 0xa6, 0xa4, 0xa6, 0xa5, 0xa6, 0xa5, 0xa8, 0xa7, 0xa6, 0xa6, 0xa8, + 0xa7, 0xa6, 0xa7, 0xaa, 0xae, 0xaf, 0xab, 0xad, 0xa6, 0xa4, 0xa4, 0xa7, + 0xa8, 0xac, 0xaf, 0xb0, 0xbb, 0xd4, 0xf4, 0x05, 0x15, 0x38, 0x4c, 0x46, + 0x3e, 0x3d, 0x4c, 0x5a, 0x63, 0x64, 0x5f, 0x5e, 0x5a, 0x5e, 0x64, 0x6a, + 0x6f, 0x72, 0x76, 0x76, 0x77, 0x79, 0x78, 0x77, 0x78, 0x79, 0x7a, 0x78, + 0x78, 0x78, 0x79, 0x79, 0x78, 0x74, 0x66, 0x39, 0xf7, 0xd5, 0xc2, 0xbb, + 0xb6, 0xb6, 0xba, 0xb7, 0xb7, 0xb8, 0xba, 0xba, 0xbc, 0xbb, 0xba, 0xb6, + 0xb8, 0xc1, 0xd6, 0x04, 0x26, 0x40, 0x46, 0x32, 0x0d, 0xd7, 0xb8, 0xab, + 0xab, 0xb1, 0xaa, 0xa4, 0xa7, 0xab, 0xb8, 0xb8, 0xb6, 0xbb, 0xbc, 0xbb, + 0xb0, 0xa6, 0xa6, 0xa7, 0xa4, 0xa6, 0xa9, 0xaa, 0xad, 0xb4, 0xba, 0xbb, + 0xb6, 0xb3, 0xb3, 0xb6, 0xb3, 0xae, 0xad, 0xb0, 0xaf, 0xad, 0xac, 0xb4, + 0xaf, 0xa7, 0xac, 0xb6, 0xba, 0xb9, 0xb6, 0xb5, 0xb4, 0xb0, 0xac, 0xb0, + 0xbf, 0xc8, 0xc6, 0xbe, 0xbd, 0xc2, 0xc5, 0xbc, 0xb4, 0xad, 0xb2, 0xc0, + 0xc5, 0xc5, 0xc1, 0xbc, 0xc0, 0xbc, 0xad, 0xa5, 0xa7, 0xad, 0xb1, 0xbe, + 0xc5, 0xba, 0xbe, 0xc8, 0xc9, 0xcf, 0xd0, 0xc4, 0xb8, 0xb2, 0xb6, 0xab, + 0xa6, 0xaa, 0xaf, 0xab, 0xa7, 0xaf, 0xb0, 0xad, 0xac, 0xa9, 0xa6, 0xa8, + 0xad, 0xb1, 0xb5, 0xb3, 0xac, 0xaa, 0xb6, 0xbc, 0xb6, 0xb3, 0xb5, 0xb3, + 0xb1, 0xb0, 0xae, 0xac, 0xab, 0xab, 0xa9, 0xad, 0xb0, 0xb2, 0xb1, 0xb3, + 0xbb, 0xbc, 0xb6, 0xb8, 0xb2, 0xab, 0xae, 0xac, 0xac, 0xaa, 0xaa, 0xad, + 0xab, 0xa8, 0xa8, 0xa8, 0xaa, 0xab, 0xaa, 0xa6, 0xa9, 0xac, 0xa9, 0xaa, + 0xb0, 0xb0, 0xb2, 0xb3, 0xa5, 0xa3, 0xa6, 0xaa, 0xa9, 0xab, 0xae, 0xb0, + 0xb6, 0xc9, 0xfb, 0x2e, 0x46, 0x4c, 0x52, 0x5c, 0x5c, 0x58, 0x58, 0x60, + 0x64, 0x66, 0x64, 0x63, 0x63, 0x63, 0x69, 0x70, 0x73, 0x76, 0x77, 0x78, + 0x78, 0x78, 0x78, 0x78, 0x77, 0x79, 0x7a, 0x78, 0x78, 0x79, 0x7a, 0x7a, + 0x7a, 0x79, 0x76, 0x6e, 0x4f, 0x1e, 0xea, 0xcf, 0xbf, 0xba, 0xbc, 0xb9, + 0xb6, 0xb9, 0xbc, 0xc5, 0xcd, 0xc5, 0xc1, 0xc9, 0xd4, 0xf5, 0x2c, 0x52, + 0x56, 0x55, 0x59, 0x61, 0x5b, 0x10, 0xc1, 0xb3, 0xb3, 0xb7, 0xb0, 0xa6, + 0xa8, 0xa8, 0xa8, 0xa7, 0xaa, 0xb5, 0xb5, 0xab, 0xa7, 0xa4, 0xa4, 0xaa, + 0xa9, 0xaa, 0xaa, 0xaa, 0xad, 0xb1, 0xb6, 0xb6, 0xb0, 0xb2, 0xb7, 0xbc, + 0xb9, 0xb4, 0xb1, 0xaf, 0xae, 0xae, 0xab, 0xae, 0xad, 0xa8, 0xa9, 0xb3, + 0xba, 0xb6, 0xb6, 0xb2, 0xae, 0xa9, 0xa7, 0xac, 0xb4, 0xc2, 0xc7, 0xc3, + 0xc1, 0xc5, 0xc3, 0xbc, 0xb9, 0xb6, 0xb9, 0xbf, 0xc5, 0xc3, 0xc2, 0xc1, + 0xc3, 0xc2, 0xb1, 0xa8, 0xaa, 0xac, 0xb0, 0xbc, 0xc2, 0xb8, 0xbd, 0xc8, + 0xca, 0xc9, 0xc9, 0xc7, 0xc0, 0xb5, 0xb0, 0xa6, 0xa4, 0xa5, 0xb0, 0xb3, + 0xad, 0xaf, 0xb0, 0xae, 0xaa, 0xa9, 0xa9, 0xaa, 0xb0, 0xb7, 0xbc, 0xc0, + 0xbb, 0xb0, 0xb0, 0xb4, 0xb5, 0xb0, 0xb0, 0xb0, 0xaa, 0xa9, 0xaa, 0xab, + 0xab, 0xaa, 0xa8, 0xac, 0xab, 0xac, 0xae, 0xb0, 0xb2, 0xb4, 0xb3, 0xb3, + 0xb2, 0xae, 0xb0, 0xb5, 0xb2, 0xb0, 0xb0, 0xb3, 0xb3, 0xaf, 0xaa, 0xa7, + 0xa9, 0xab, 0xab, 0xa7, 0xa9, 0xab, 0xa9, 0xa9, 0xaa, 0xaa, 0xb0, 0xb4, + 0xa7, 0xa4, 0xa7, 0xa9, 0xaa, 0xac, 0xaf, 0xb0, 0xbb, 0xc7, 0xdf, 0x05, + 0x34, 0x51, 0x5b, 0x5c, 0x63, 0x65, 0x64, 0x65, 0x66, 0x66, 0x67, 0x65, + 0x65, 0x6b, 0x70, 0x75, 0x77, 0x77, 0x78, 0x79, 0x79, 0x78, 0x78, 0x79, + 0x78, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x7a, 0x78, 0x77, + 0x75, 0x69, 0x4c, 0x1d, 0xef, 0xd2, 0xc5, 0xbf, 0xbd, 0xc1, 0xd0, 0xfe, + 0xf9, 0xde, 0xf0, 0xfe, 0x20, 0x48, 0x51, 0x4e, 0x51, 0x52, 0x58, 0x60, + 0x62, 0x2a, 0xcb, 0xb8, 0xb2, 0xb6, 0xb3, 0xa9, 0xaa, 0xab, 0xa8, 0xaa, + 0xb0, 0xb6, 0xb4, 0xaa, 0xa8, 0xa9, 0xa7, 0xad, 0xb2, 0xb2, 0xb0, 0xac, + 0xad, 0xb0, 0xb2, 0xb0, 0xac, 0xb0, 0xbc, 0xbb, 0xb6, 0xb7, 0xb0, 0xab, + 0xab, 0xae, 0xac, 0xad, 0xaa, 0xa7, 0xa6, 0xac, 0xb6, 0xb7, 0xb4, 0xaf, + 0xa9, 0xaa, 0xaa, 0xb0, 0xb4, 0xb6, 0xbc, 0xbf, 0xc2, 0xc4, 0xc4, 0xbc, + 0xbb, 0xb7, 0xbc, 0xc2, 0xc4, 0xc3, 0xc2, 0xbf, 0xc5, 0xc2, 0xb4, 0xaa, + 0xaa, 0xac, 0xb0, 0xb8, 0xbc, 0xb9, 0xc0, 0xc3, 0xc8, 0xcb, 0xc8, 0xc6, + 0xc2, 0xb6, 0xb1, 0xaa, 0xa5, 0xa4, 0xa8, 0xb0, 0xae, 0xac, 0xaa, 0xaa, + 0xaf, 0xb0, 0xb0, 0xb6, 0xba, 0xbf, 0xc0, 0xbf, 0xb8, 0xb0, 0xae, 0xb0, + 0xb2, 0xb0, 0xae, 0xab, 0xa5, 0xa3, 0xa6, 0xaa, 0xab, 0xa9, 0xa8, 0xac, + 0xab, 0xa9, 0xaa, 0xaa, 0xac, 0xac, 0xaf, 0xad, 0xae, 0xae, 0xb4, 0xb7, + 0xb6, 0xb7, 0xb8, 0xb6, 0xb8, 0xb6, 0xad, 0xaa, 0xad, 0xaf, 0xab, 0xa7, + 0xaa, 0xa8, 0xa7, 0xa8, 0xa7, 0xa9, 0xa8, 0xab, 0xaa, 0xaa, 0xab, 0xaa, + 0xaa, 0xaf, 0xb8, 0xc7, 0xe0, 0x09, 0x28, 0x34, 0x39, 0x46, 0x57, 0x61, + 0x66, 0x68, 0x66, 0x65, 0x66, 0x68, 0x66, 0x65, 0x69, 0x70, 0x74, 0x76, + 0x76, 0x77, 0x79, 0x79, 0x78, 0x78, 0x78, 0x79, 0x79, 0x79, 0x78, 0x78, + 0x78, 0x78, 0x78, 0x77, 0x77, 0x78, 0x77, 0x77, 0x78, 0x76, 0x71, 0x66, + 0x4c, 0x1f, 0xf8, 0xdd, 0xd7, 0xdb, 0x22, 0x40, 0x22, 0x1f, 0x2f, 0x3b, + 0x4e, 0x4e, 0x4b, 0x46, 0x46, 0x4d, 0x55, 0x5c, 0x5e, 0x28, 0xce, 0xbe, + 0xb8, 0xb5, 0xb2, 0xab, 0xb0, 0xad, 0xad, 0xb1, 0xb6, 0xb6, 0xb6, 0xb0, + 0xaa, 0xac, 0xb0, 0xbd, 0xc0, 0xb9, 0xb5, 0xb3, 0xb1, 0xb0, 0xb0, 0xb1, + 0xb0, 0xb5, 0xba, 0xb4, 0xb1, 0xbc, 0xb7, 0xaf, 0xa9, 0xa9, 0xab, 0xac, + 0xab, 0xa9, 0xa7, 0xa8, 0xb1, 0xb6, 0xb2, 0xb0, 0xb0, 0xaa, 0xaa, 0xab, + 0xb1, 0xb5, 0xb8, 0xbc, 0xc2, 0xc3, 0xc9, 0xc2, 0xc8, 0xbd, 0xb9, 0xc5, + 0xc2, 0xc2, 0xc0, 0xc0, 0xc3, 0xbc, 0xb0, 0xac, 0xac, 0xad, 0xb2, 0xbf, + 0xc0, 0xb2, 0xaa, 0xb1, 0xbd, 0xc6, 0xc9, 0xc8, 0xc1, 0xb5, 0xb0, 0xaa, + 0xa7, 0xa4, 0xa4, 0xa8, 0xaf, 0xad, 0xa8, 0xa9, 0xae, 0xb7, 0xbc, 0xbe, + 0xbe, 0xbf, 0xbc, 0xb5, 0xb0, 0xad, 0xab, 0xa7, 0xad, 0xaf, 0xaa, 0xa6, + 0xa4, 0xa4, 0xa4, 0xa5, 0xa9, 0xa8, 0xa5, 0xa5, 0xa5, 0xa8, 0xa7, 0xa7, + 0xa7, 0xad, 0xac, 0xa9, 0xa9, 0xae, 0xb5, 0xb1, 0xb3, 0xb7, 0xbc, 0xbc, + 0xbc, 0xb5, 0xb0, 0xaf, 0xb0, 0xb4, 0xb0, 0xb0, 0xad, 0xa8, 0xa8, 0xa9, + 0xa8, 0xa5, 0xa5, 0xa5, 0xad, 0xb2, 0xb5, 0xb2, 0xac, 0xb7, 0xcf, 0xf4, + 0x1b, 0x3b, 0x48, 0x4f, 0x54, 0x56, 0x5a, 0x62, 0x66, 0x6a, 0x6b, 0x68, + 0x63, 0x63, 0x64, 0x69, 0x6e, 0x73, 0x75, 0x76, 0x77, 0x77, 0x78, 0x78, + 0x76, 0x77, 0x78, 0x79, 0x79, 0x79, 0x78, 0x78, 0x77, 0x78, 0x78, 0x78, + 0x79, 0x78, 0x79, 0x78, 0x78, 0x78, 0x77, 0x77, 0x73, 0x6a, 0x57, 0x41, + 0x31, 0x40, 0x63, 0x5a, 0x4c, 0x4c, 0x4c, 0x4c, 0x44, 0x3c, 0x3a, 0x36, + 0x38, 0x46, 0x51, 0x58, 0x5d, 0x25, 0xce, 0xc4, 0xc4, 0xb9, 0xb0, 0xad, + 0xb1, 0xae, 0xad, 0xb0, 0xb1, 0xb2, 0xb0, 0xaf, 0xad, 0xab, 0xb7, 0xc6, + 0xc6, 0xc2, 0xc1, 0xbc, 0xb5, 0xaf, 0xb2, 0xb7, 0xb6, 0xb8, 0xb6, 0xad, + 0xb0, 0xbd, 0xbe, 0xb6, 0xb0, 0xaa, 0xaa, 0xaa, 0xaa, 0xa9, 0xa9, 0xa8, + 0xae, 0xb6, 0xb2, 0xb6, 0xb6, 0xb0, 0xa8, 0xa6, 0xa9, 0xac, 0xb0, 0xb5, + 0xba, 0xc1, 0xc7, 0xc2, 0xcf, 0xc0, 0xb2, 0xbe, 0xb6, 0xb5, 0xb6, 0xbb, + 0xbc, 0xb6, 0xaf, 0xaf, 0xac, 0xb1, 0xbb, 0xc2, 0xb9, 0xaa, 0xa4, 0xa4, + 0xab, 0xb8, 0xc2, 0xc3, 0xbd, 0xb0, 0xa7, 0xa4, 0xa4, 0xa3, 0xa4, 0xa9, + 0xb9, 0xb8, 0xb1, 0xb0, 0xab, 0xb6, 0xbf, 0xc0, 0xc2, 0xbe, 0xbc, 0xb6, + 0xb0, 0xab, 0xa9, 0xa4, 0xaa, 0xae, 0xac, 0xa6, 0xa4, 0xa5, 0xa4, 0xa4, + 0xa8, 0xaa, 0xa5, 0xa4, 0xa5, 0xa8, 0xa9, 0xaa, 0xaa, 0xad, 0xa8, 0xa4, + 0xa7, 0xac, 0xae, 0xac, 0xae, 0xb3, 0xbb, 0xbb, 0xb6, 0xad, 0xae, 0xb1, + 0xb4, 0xb7, 0xb6, 0xb0, 0xaa, 0xa8, 0xa9, 0xa6, 0xa7, 0xa5, 0xa7, 0xa8, + 0xb3, 0xb7, 0xb6, 0xb3, 0xad, 0xb6, 0xd8, 0x06, 0x2d, 0x42, 0x4f, 0x58, + 0x5b, 0x5e, 0x62, 0x66, 0x68, 0x6b, 0x6b, 0x67, 0x5f, 0x5f, 0x66, 0x6d, + 0x70, 0x74, 0x75, 0x76, 0x77, 0x77, 0x78, 0x76, 0x76, 0x76, 0x77, 0x77, + 0x78, 0x78, 0x78, 0x78, 0x76, 0x77, 0x79, 0x79, 0x7a, 0x79, 0x7a, 0x7a, + 0x79, 0x79, 0x7a, 0x79, 0x77, 0x76, 0x75, 0x72, 0x6f, 0x6e, 0x6a, 0x63, + 0x5b, 0x54, 0x4d, 0x41, 0x36, 0x2a, 0x26, 0x26, 0x2b, 0x3a, 0x4b, 0x57, + 0x5e, 0x28, 0xcd, 0xc2, 0xbe, 0xb8, 0xb0, 0xab, 0xac, 0xac, 0xae, 0xaf, + 0xad, 0xac, 0xab, 0xae, 0xaa, 0xaa, 0xb9, 0xc8, 0xc7, 0xc5, 0xc3, 0xc1, + 0xb9, 0xb0, 0xb2, 0xbc, 0xb6, 0xb1, 0xb0, 0xab, 0xb0, 0xbc, 0xbd, 0xb8, + 0xb4, 0xaf, 0xaa, 0xaa, 0xaa, 0xa7, 0xa8, 0xa8, 0xb0, 0xb2, 0xb0, 0xb6, + 0xb3, 0xad, 0xa8, 0xa9, 0xaa, 0xae, 0xb0, 0xb0, 0xb5, 0xb3, 0xb8, 0xba, + 0xbc, 0xb2, 0xa8, 0xa8, 0xa4, 0xa4, 0xa7, 0xaa, 0xa8, 0xa8, 0xaa, 0xab, + 0xa8, 0xad, 0xbc, 0xc3, 0xb6, 0xa8, 0xa6, 0xa4, 0xa4, 0xaa, 0xb8, 0xc2, + 0xc2, 0xaf, 0xa4, 0xa5, 0xa6, 0xa4, 0xa4, 0xaa, 0xbf, 0xc2, 0xc0, 0xbc, + 0xb5, 0xb4, 0xc0, 0xc4, 0xc9, 0xc3, 0xbd, 0xba, 0xb8, 0xb7, 0xb1, 0xa6, + 0xa6, 0xad, 0xb0, 0xa9, 0xa5, 0xa5, 0xa4, 0xae, 0xb5, 0xb6, 0xae, 0xa7, + 0xa8, 0xa6, 0xa8, 0xa9, 0xac, 0xad, 0xa8, 0xa4, 0xa7, 0xaa, 0xaa, 0xaf, + 0xaa, 0xaa, 0xb1, 0xb5, 0xb0, 0xaa, 0xa8, 0xaa, 0xae, 0xb1, 0xb8, 0xbb, + 0xb3, 0xac, 0xa9, 0xa7, 0xa4, 0xa4, 0xaa, 0xaf, 0xb7, 0xbc, 0xb6, 0xaf, + 0xac, 0xb1, 0xc9, 0xf2, 0x25, 0x41, 0x4f, 0x58, 0x5e, 0x61, 0x64, 0x67, + 0x69, 0x6a, 0x68, 0x62, 0x5e, 0x63, 0x69, 0x6e, 0x70, 0x76, 0x77, 0x76, + 0x77, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x77, 0x77, 0x77, + 0x76, 0x77, 0x79, 0x79, 0x79, 0x78, 0x78, 0x7a, 0x7a, 0x79, 0x7a, 0x79, + 0x78, 0x77, 0x76, 0x76, 0x74, 0x70, 0x67, 0x5f, 0x55, 0x4c, 0x40, 0x33, + 0x28, 0x1b, 0x19, 0x1f, 0x25, 0x36, 0x46, 0x56, 0x5c, 0x29, 0xcc, 0xbe, + 0xba, 0xb5, 0xb1, 0xae, 0xaf, 0xb6, 0xb5, 0xb1, 0xad, 0xac, 0xaa, 0xaa, + 0xa8, 0xaa, 0xbb, 0xc8, 0xcc, 0xc6, 0xbf, 0xbf, 0xb6, 0xb0, 0xae, 0xb3, + 0xb6, 0xb4, 0xab, 0xac, 0xb5, 0xb9, 0xbc, 0xbb, 0xb3, 0xac, 0xaa, 0xad, + 0xaa, 0xa9, 0xaa, 0xaa, 0xb0, 0xb1, 0xad, 0xb0, 0xad, 0xa9, 0xa5, 0xa9, + 0xae, 0xaf, 0xb0, 0xb0, 0xb3, 0xb5, 0xb3, 0xb8, 0xb6, 0xb0, 0xa8, 0xa5, + 0xa7, 0xa6, 0xa4, 0xa5, 0xa4, 0xa4, 0xa6, 0xa8, 0xaa, 0xb0, 0xba, 0xc4, + 0xbd, 0xb0, 0xa8, 0xa5, 0xa4, 0xa7, 0xac, 0xb4, 0xb9, 0xb5, 0xaa, 0xaa, + 0xa7, 0xa3, 0xa4, 0xa4, 0xac, 0xb5, 0xbc, 0xc2, 0xc1, 0xb8, 0xc2, 0xc8, + 0xcb, 0xc8, 0xbf, 0xbc, 0xbe, 0xc2, 0xbc, 0xaa, 0xa7, 0xae, 0xb5, 0xb6, + 0xb0, 0xaa, 0xa6, 0xb0, 0xba, 0xbd, 0xb3, 0xab, 0xa8, 0xa8, 0xa8, 0xa9, + 0xac, 0xab, 0xa9, 0xa8, 0xaa, 0xa7, 0xac, 0xb1, 0xb0, 0xa8, 0xaa, 0xaf, + 0xab, 0xaa, 0xaa, 0xa8, 0xa9, 0xb6, 0xc1, 0xc2, 0xc2, 0xbb, 0xb0, 0xaa, + 0xae, 0xaa, 0xab, 0xb0, 0xac, 0xb0, 0xae, 0xaa, 0xaa, 0xb6, 0xc5, 0xe8, + 0x18, 0x38, 0x49, 0x55, 0x5a, 0x60, 0x65, 0x67, 0x6a, 0x6a, 0x64, 0x5e, + 0x63, 0x6a, 0x6c, 0x6d, 0x6e, 0x75, 0x76, 0x76, 0x77, 0x76, 0x76, 0x76, + 0x74, 0x74, 0x75, 0x76, 0x76, 0x76, 0x77, 0x78, 0x76, 0x76, 0x77, 0x78, + 0x78, 0x76, 0x77, 0x7a, 0x7a, 0x78, 0x79, 0x78, 0x77, 0x77, 0x76, 0x73, + 0x70, 0x6b, 0x63, 0x57, 0x48, 0x3b, 0x31, 0x26, 0x1e, 0x10, 0x16, 0x1b, + 0x21, 0x30, 0x46, 0x57, 0x59, 0x22, 0xce, 0xb7, 0xb0, 0xb0, 0xb0, 0xb0, + 0xb5, 0xbe, 0xbd, 0xb6, 0xb1, 0xae, 0xaa, 0xaa, 0xab, 0xab, 0xbb, 0xc9, + 0xc9, 0xc1, 0xb9, 0xbc, 0xb6, 0xb2, 0xb1, 0xb1, 0xb4, 0xb0, 0xaa, 0xb0, + 0xb3, 0xb6, 0xbb, 0xb8, 0xb3, 0xaa, 0xa9, 0xaa, 0xab, 0xa9, 0xa8, 0xaa, + 0xae, 0xb0, 0xab, 0xae, 0xaa, 0xa9, 0xa8, 0xb6, 0xb4, 0xb1, 0xb0, 0xb0, + 0xb9, 0xb3, 0xb3, 0xb7, 0xb6, 0xb3, 0xaa, 0xa4, 0xa6, 0xa8, 0xa9, 0xaa, + 0xa8, 0xa7, 0xa7, 0xa8, 0xab, 0xb5, 0xba, 0xbf, 0xb9, 0xae, 0xaa, 0xab, + 0xa8, 0xa4, 0xa6, 0xab, 0xaf, 0xbb, 0xbf, 0xbe, 0xb7, 0xae, 0xa7, 0xa4, + 0xa4, 0xa6, 0xa9, 0xb2, 0xc0, 0xc0, 0xc5, 0xcb, 0xd2, 0xc8, 0xc1, 0xbf, + 0xbb, 0xc0, 0xbb, 0xad, 0xaa, 0xaf, 0xb6, 0xbb, 0xbc, 0xb1, 0xa7, 0xa9, + 0xb6, 0xbb, 0xb3, 0xac, 0xa9, 0xa7, 0xa5, 0xa9, 0xb4, 0xb3, 0xb6, 0xb9, + 0xb4, 0xac, 0xb0, 0xb0, 0xb0, 0xab, 0xaa, 0xac, 0xaa, 0xa9, 0xa9, 0xa9, + 0xac, 0xbc, 0xc1, 0xc3, 0xc3, 0xc1, 0xb3, 0xb3, 0xbb, 0xb6, 0xae, 0xaf, + 0xa7, 0xaa, 0xac, 0xaa, 0xaa, 0xba, 0xce, 0xee, 0x11, 0x2c, 0x3c, 0x49, + 0x52, 0x5e, 0x65, 0x68, 0x69, 0x6a, 0x61, 0x60, 0x67, 0x6f, 0x6f, 0x68, + 0x6b, 0x70, 0x72, 0x75, 0x75, 0x75, 0x76, 0x75, 0x73, 0x73, 0x73, 0x74, + 0x75, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x78, 0x78, 0x77, 0x77, 0x79, + 0x79, 0x77, 0x78, 0x76, 0x76, 0x75, 0x75, 0x71, 0x6e, 0x67, 0x5a, 0x4b, + 0x40, 0x2e, 0x25, 0x1d, 0x16, 0x10, 0x16, 0x18, 0x22, 0x34, 0x4a, 0x56, + 0x54, 0x16, 0xca, 0xb6, 0xac, 0xb0, 0xb1, 0xb2, 0xb5, 0xbc, 0xbf, 0xba, + 0xb6, 0xb2, 0xad, 0xad, 0xac, 0xab, 0xbc, 0xc8, 0xc5, 0xbc, 0xb4, 0xb5, + 0xb0, 0xae, 0xad, 0xb0, 0xb1, 0xaf, 0xaf, 0xaf, 0xb0, 0xb6, 0xb6, 0xb5, + 0xb1, 0xac, 0xaa, 0xab, 0xad, 0xaa, 0xaa, 0xaa, 0xac, 0xb0, 0xab, 0xaa, + 0xab, 0xad, 0xaa, 0xae, 0xb1, 0xb6, 0xb6, 0xb2, 0xb5, 0xb4, 0xb6, 0xb7, + 0xb5, 0xb0, 0xaa, 0xa5, 0xa4, 0xa7, 0xae, 0xb6, 0xb4, 0xac, 0xaa, 0xa9, + 0xaa, 0xb3, 0xb6, 0xbe, 0xbe, 0xba, 0xad, 0xa8, 0xa6, 0xa6, 0xa8, 0xaa, + 0xaa, 0xb0, 0xc2, 0xca, 0xc9, 0xc5, 0xbc, 0xaa, 0xaa, 0xa9, 0xa8, 0xad, + 0xb8, 0xbd, 0xbf, 0xbb, 0xbe, 0xc2, 0xc4, 0xc1, 0xb2, 0xb3, 0xb1, 0xaa, + 0xb0, 0xbb, 0xbf, 0xbc, 0xbc, 0xaf, 0xa9, 0xa8, 0xad, 0xb1, 0xae, 0xa9, + 0xa7, 0xa8, 0xa7, 0xa6, 0xb1, 0xbf, 0xc0, 0xc1, 0xc0, 0xb6, 0xbb, 0xb5, + 0xab, 0xa7, 0xaa, 0xab, 0xa7, 0xa8, 0xab, 0xaa, 0xb5, 0xc3, 0xc2, 0xbf, + 0xbf, 0xba, 0xac, 0xb7, 0xc4, 0xc0, 0xb3, 0xae, 0xa8, 0xaa, 0xaa, 0xaa, + 0xaa, 0xb6, 0xc9, 0xe4, 0x00, 0x21, 0x34, 0x41, 0x4d, 0x57, 0x5f, 0x67, + 0x64, 0x60, 0x58, 0x4d, 0x41, 0x52, 0x63, 0x5e, 0x68, 0x6b, 0x70, 0x71, + 0x71, 0x73, 0x73, 0x74, 0x72, 0x70, 0x71, 0x73, 0x74, 0x75, 0x75, 0x75, + 0x76, 0x75, 0x76, 0x77, 0x77, 0x78, 0x78, 0x78, 0x77, 0x76, 0x76, 0x76, + 0x74, 0x73, 0x73, 0x70, 0x6c, 0x63, 0x53, 0x43, 0x35, 0x26, 0x21, 0x1b, + 0x13, 0x12, 0x14, 0x16, 0x22, 0x39, 0x4f, 0x58, 0x4b, 0x0a, 0xcb, 0xb5, + 0xaf, 0xb0, 0xb0, 0xb3, 0xb4, 0xb9, 0xbc, 0xbc, 0xbc, 0xb7, 0xb1, 0xad, + 0xac, 0xab, 0xb0, 0xbc, 0xc0, 0xbc, 0xb4, 0xb3, 0xad, 0xac, 0xac, 0xaf, + 0xb0, 0xae, 0xb0, 0xb3, 0xb4, 0xb6, 0xb3, 0xb0, 0xae, 0xac, 0xab, 0xac, + 0xab, 0xaa, 0xaa, 0xab, 0xad, 0xb0, 0xab, 0xac, 0xae, 0xb6, 0xb4, 0xae, + 0xb5, 0xb8, 0xb3, 0xb2, 0xb4, 0xb5, 0xb5, 0xb9, 0xb7, 0xaa, 0xac, 0xaa, + 0xa9, 0xb1, 0xc0, 0xc7, 0xc5, 0xbf, 0xb3, 0xad, 0xa9, 0xb1, 0xbc, 0xc2, + 0xbf, 0xbd, 0xb6, 0xae, 0xa8, 0xaa, 0xa8, 0xa8, 0xa8, 0xaa, 0xb8, 0xc1, + 0xc4, 0xc2, 0xb8, 0xb2, 0xad, 0xab, 0xa9, 0xaf, 0xbd, 0xbc, 0xbd, 0xb6, + 0xab, 0xb8, 0xc3, 0xc0, 0xb6, 0xb4, 0xb0, 0xb5, 0xc2, 0xc8, 0xc2, 0xbc, + 0xb5, 0xab, 0xad, 0xa9, 0xa7, 0xa8, 0xa8, 0xa7, 0xaa, 0xa8, 0xa6, 0xa7, + 0xb3, 0xc3, 0xc5, 0xc1, 0xbe, 0xb5, 0xb8, 0xbb, 0xb4, 0xb0, 0xb4, 0xb2, + 0xb0, 0xaa, 0xab, 0xad, 0xb9, 0xc3, 0xc0, 0xba, 0xb9, 0xb7, 0xae, 0xb0, + 0xbc, 0xba, 0xb1, 0xab, 0xa8, 0xab, 0xaa, 0xac, 0xa9, 0xac, 0xbc, 0xd7, + 0xfd, 0x20, 0x33, 0x3e, 0x4c, 0x59, 0x61, 0x65, 0x61, 0x58, 0x46, 0xfc, + 0xe0, 0x02, 0x3d, 0x58, 0x65, 0x69, 0x6d, 0x70, 0x70, 0x71, 0x72, 0x72, + 0x6e, 0x6e, 0x6e, 0x70, 0x73, 0x74, 0x74, 0x73, 0x74, 0x73, 0x73, 0x76, + 0x76, 0x76, 0x77, 0x77, 0x77, 0x76, 0x76, 0x76, 0x74, 0x73, 0x71, 0x6f, + 0x6a, 0x5e, 0x4c, 0x38, 0x26, 0x19, 0x12, 0x10, 0x11, 0x16, 0x11, 0x10, + 0x23, 0x3b, 0x52, 0x58, 0x49, 0x0e, 0xd1, 0xbd, 0xb6, 0xb6, 0xb1, 0xb0, + 0xb7, 0xba, 0xba, 0xba, 0xbc, 0xba, 0xb6, 0xb0, 0xab, 0xaa, 0xa8, 0xab, + 0xb3, 0xb6, 0xb3, 0xb2, 0xae, 0xb6, 0xae, 0xae, 0xb0, 0xad, 0xb5, 0xb5, + 0xb2, 0xb2, 0xb1, 0xb1, 0xb1, 0xb2, 0xaf, 0xaa, 0xac, 0xaa, 0xaa, 0xac, + 0xaf, 0xb0, 0xad, 0xa9, 0xa9, 0xb6, 0xbc, 0xb5, 0xb5, 0xb6, 0xb3, 0xb8, + 0xbd, 0xbe, 0xb7, 0xb2, 0xb2, 0xa8, 0xaa, 0xaf, 0xb2, 0xbe, 0xc9, 0xc8, + 0xc8, 0xc9, 0xc2, 0xbc, 0xb8, 0xb8, 0xbf, 0xc0, 0xbf, 0xbf, 0xc1, 0xb8, + 0xb0, 0xb8, 0xb9, 0xbb, 0xbe, 0xbc, 0xb6, 0xb5, 0xb5, 0xb2, 0xb2, 0xb5, + 0xaf, 0xac, 0xaa, 0xb4, 0xc5, 0xc1, 0xba, 0xb5, 0xaa, 0xae, 0xbc, 0xc3, + 0xbf, 0xba, 0xb8, 0xbb, 0xc4, 0xc8, 0xc2, 0xb8, 0xaf, 0xb0, 0xb4, 0xaf, + 0xab, 0xa8, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xb0, 0xc1, 0xc7, 0xc7, 0xc2, + 0xc2, 0xba, 0xb3, 0xbc, 0xbd, 0xbc, 0xbd, 0xbf, 0xbe, 0xb7, 0xb0, 0xb0, + 0xb3, 0xbc, 0xb9, 0xb4, 0xab, 0xad, 0xac, 0xa8, 0xa9, 0xab, 0xaa, 0xa9, + 0xa9, 0xa8, 0xac, 0xad, 0xac, 0xaa, 0xba, 0xd7, 0xfa, 0x1c, 0x31, 0x41, + 0x52, 0x5c, 0x61, 0x65, 0x5b, 0x4c, 0x16, 0xd5, 0xe6, 0xfa, 0x2b, 0x59, + 0x64, 0x6a, 0x6d, 0x70, 0x70, 0x72, 0x70, 0x6e, 0x6e, 0x6e, 0x6d, 0x70, + 0x71, 0x72, 0x72, 0x71, 0x72, 0x72, 0x72, 0x73, 0x73, 0x75, 0x76, 0x76, + 0x76, 0x75, 0x76, 0x74, 0x72, 0x70, 0x6c, 0x6a, 0x63, 0x57, 0x42, 0x2e, + 0x20, 0x17, 0x10, 0x11, 0x10, 0x0b, 0x0a, 0x10, 0x20, 0x3b, 0x52, 0x58, + 0x4e, 0x24, 0xe9, 0xc6, 0xbc, 0xbc, 0xb5, 0xb6, 0xbe, 0xbc, 0xba, 0xb9, + 0xbb, 0xbb, 0xb8, 0xb4, 0xae, 0xa9, 0xa6, 0xaa, 0xb0, 0xb0, 0xb0, 0xab, + 0xab, 0xc2, 0xb0, 0xad, 0xac, 0xaa, 0xb1, 0xb4, 0xb2, 0xb2, 0xaf, 0xb0, + 0xb0, 0xb6, 0xb7, 0xac, 0xaa, 0xa8, 0xa9, 0xaa, 0xb2, 0xb9, 0xb6, 0xae, + 0xa8, 0xb3, 0xbf, 0xbd, 0xb9, 0xb6, 0xb1, 0xba, 0xc2, 0xc1, 0xb2, 0xa6, + 0xa6, 0xa7, 0xaa, 0xb2, 0xbc, 0xc4, 0xc6, 0xc7, 0xc9, 0xc8, 0xc3, 0xc2, + 0xc7, 0xca, 0xc4, 0xba, 0xc1, 0xc3, 0xbc, 0xb4, 0xb3, 0xc4, 0xc8, 0xc7, + 0xc7, 0xc4, 0xbc, 0xb6, 0xb3, 0xac, 0xb3, 0xb6, 0xaf, 0xa8, 0xab, 0xb9, + 0xc6, 0xc2, 0xb8, 0xb6, 0xb1, 0xac, 0xb6, 0xc1, 0xc1, 0xc2, 0xc1, 0xc4, + 0xc5, 0xc7, 0xc3, 0xbc, 0xb5, 0xb0, 0xb0, 0xb5, 0xb6, 0xb0, 0xac, 0xab, + 0xab, 0xac, 0xb3, 0xba, 0xc3, 0xc6, 0xc4, 0xbd, 0xbc, 0xb6, 0xae, 0xbb, + 0xc2, 0xb9, 0xb7, 0xbf, 0xc2, 0xc2, 0xba, 0xb0, 0xad, 0xb6, 0xba, 0xb5, + 0xaf, 0xa8, 0xa6, 0xa4, 0xa8, 0xaa, 0xa7, 0xa7, 0xa7, 0xa6, 0xab, 0xac, + 0xb0, 0xb3, 0xc0, 0xd7, 0xfb, 0x1e, 0x38, 0x4b, 0x58, 0x60, 0x64, 0x67, + 0x5d, 0x3f, 0xe5, 0xcc, 0xd5, 0xf3, 0x39, 0x5a, 0x67, 0x6c, 0x72, 0x73, + 0x72, 0x73, 0x70, 0x6b, 0x6b, 0x6c, 0x6e, 0x70, 0x72, 0x73, 0x71, 0x70, + 0x70, 0x70, 0x71, 0x72, 0x72, 0x73, 0x73, 0x74, 0x74, 0x72, 0x72, 0x71, + 0x6f, 0x6c, 0x6a, 0x68, 0x61, 0x55, 0x42, 0x31, 0x22, 0x14, 0x0e, 0x0c, + 0x04, 0x00, 0x04, 0x0a, 0x1b, 0x3a, 0x53, 0x5a, 0x53, 0x34, 0xf8, 0xce, + 0xc3, 0xc1, 0xb9, 0xb8, 0xc2, 0xbc, 0xb9, 0xb9, 0xb8, 0xb6, 0xb4, 0xae, + 0xa9, 0xa6, 0xa5, 0xa9, 0xac, 0xad, 0xad, 0xaa, 0xaa, 0xa9, 0xaa, 0xae, + 0xab, 0xac, 0xb1, 0xb8, 0xb6, 0xaf, 0xa9, 0xab, 0xaa, 0xaa, 0xb2, 0xb6, + 0xb0, 0xac, 0xad, 0xac, 0xb5, 0xc1, 0xc0, 0xbd, 0xb6, 0xba, 0xb9, 0xc1, + 0xbc, 0xb2, 0xb5, 0xc1, 0xbf, 0xae, 0xa6, 0xa4, 0xa4, 0xa6, 0xaf, 0xba, + 0xc5, 0xc6, 0xc5, 0xc7, 0xc6, 0xc2, 0xc0, 0xc4, 0xc8, 0xc8, 0xc1, 0xc0, + 0xc8, 0xc2, 0xbc, 0xb4, 0xae, 0xbc, 0xc7, 0xc8, 0xc8, 0xc1, 0xb6, 0xb6, + 0xb1, 0xa9, 0xaf, 0xb8, 0xb5, 0xac, 0xaa, 0xb2, 0xb6, 0xbc, 0xbb, 0xba, + 0xbb, 0xb5, 0xbb, 0xbc, 0xc1, 0xc3, 0xc2, 0xc8, 0xcb, 0xc9, 0xc5, 0xbf, + 0xbc, 0xba, 0xb8, 0xb8, 0xb9, 0xba, 0xaf, 0xa7, 0xa8, 0xb0, 0xb6, 0xbc, + 0xc2, 0xc0, 0xba, 0xb2, 0xad, 0xaf, 0xab, 0xb3, 0xc2, 0xb8, 0xb0, 0xb5, + 0xc0, 0xc3, 0xb7, 0xad, 0xab, 0xac, 0xb4, 0xb7, 0xbc, 0xb7, 0xb0, 0xaf, + 0xb0, 0xae, 0xad, 0xae, 0xa7, 0xa5, 0xa6, 0xa9, 0xb0, 0xb7, 0xc2, 0xde, + 0x0b, 0x34, 0x4a, 0x54, 0x5e, 0x65, 0x69, 0x67, 0x5c, 0x40, 0xf8, 0xdc, + 0xe9, 0x18, 0x52, 0x6a, 0x71, 0x74, 0x74, 0x73, 0x73, 0x72, 0x70, 0x6b, + 0x6a, 0x6d, 0x6f, 0x71, 0x73, 0x73, 0x71, 0x6e, 0x6f, 0x73, 0x71, 0x71, + 0x71, 0x72, 0x73, 0x73, 0x73, 0x71, 0x70, 0x71, 0x6e, 0x6a, 0x6a, 0x68, + 0x5e, 0x4f, 0x41, 0x35, 0x2e, 0x24, 0x1b, 0x10, 0x07, 0x02, 0x00, 0x06, + 0x23, 0x40, 0x55, 0x5a, 0x58, 0x42, 0x10, 0xe7, 0xd3, 0xc4, 0xba, 0xb6, + 0xb9, 0xb8, 0xb6, 0xb4, 0xaf, 0xac, 0xac, 0xa9, 0xaa, 0xa8, 0xa7, 0xa9, + 0xad, 0xaf, 0xaf, 0xae, 0xab, 0xac, 0xb0, 0xb0, 0xad, 0xae, 0xaf, 0xb5, + 0xb1, 0xab, 0xa9, 0xa9, 0xaa, 0xaa, 0xab, 0xaf, 0xb6, 0xb5, 0xb0, 0xae, + 0xb5, 0xbf, 0xc0, 0xc2, 0xbe, 0xbb, 0xba, 0xc2, 0xbc, 0xb1, 0xb5, 0xc0, + 0xb1, 0xa4, 0xa6, 0xa5, 0xa6, 0xab, 0xbd, 0xc7, 0xcd, 0xc5, 0xc5, 0xc5, + 0xc2, 0xc2, 0xc5, 0xc9, 0xc8, 0xc5, 0xc2, 0xc5, 0xcb, 0xc2, 0xbd, 0xbb, + 0xb4, 0xbc, 0xc8, 0xc7, 0xc6, 0xc0, 0xba, 0xbf, 0xb8, 0xac, 0xb0, 0xb9, + 0xb9, 0xb2, 0xaf, 0xb0, 0xa9, 0xb1, 0xbe, 0xbe, 0xc0, 0xbd, 0xbf, 0xbd, + 0xc0, 0xc3, 0xbf, 0xc3, 0xcb, 0xce, 0xce, 0xcc, 0xc3, 0xbe, 0xbe, 0xba, + 0xb8, 0xb9, 0xaf, 0xa6, 0xa6, 0xb0, 0xb6, 0xbc, 0xbc, 0xbc, 0xb8, 0xb3, + 0xb4, 0xb0, 0xa9, 0xaa, 0xb6, 0xb2, 0xac, 0xaa, 0xaf, 0xbc, 0xb1, 0xa9, + 0xb0, 0xad, 0xab, 0xb1, 0xb8, 0xb7, 0xb0, 0xae, 0xb3, 0xb2, 0xb1, 0xb3, + 0xaa, 0xa8, 0xa9, 0xa9, 0xb0, 0xba, 0xc8, 0xf1, 0x28, 0x44, 0x53, 0x5d, + 0x64, 0x6a, 0x68, 0x65, 0x59, 0x54, 0x3f, 0x28, 0x42, 0x5e, 0x70, 0x75, + 0x77, 0x76, 0x75, 0x75, 0x75, 0x73, 0x72, 0x6e, 0x6c, 0x6d, 0x6f, 0x70, + 0x71, 0x70, 0x6f, 0x6e, 0x6f, 0x70, 0x70, 0x6f, 0x6e, 0x70, 0x71, 0x72, + 0x73, 0x72, 0x70, 0x70, 0x6d, 0x69, 0x68, 0x64, 0x5f, 0x53, 0x40, 0x31, + 0x27, 0x1e, 0x17, 0x0f, 0x07, 0x03, 0xfc, 0x08, 0x29, 0x43, 0x54, 0x5a, + 0x5c, 0x55, 0x38, 0x0d, 0xe8, 0xd3, 0xc0, 0xb4, 0xb1, 0xb0, 0xb0, 0xad, + 0xac, 0xaa, 0xa9, 0xa8, 0xa8, 0xa7, 0xa8, 0xad, 0xb1, 0xb6, 0xb4, 0xac, + 0xaa, 0xae, 0xb2, 0xb0, 0xaf, 0xae, 0xaa, 0xad, 0xaa, 0xa6, 0xa9, 0xaa, + 0xab, 0xac, 0xae, 0xaf, 0xae, 0xb1, 0xb0, 0xb2, 0xb7, 0xb8, 0xb6, 0xb8, + 0xbc, 0xc2, 0xc1, 0xbf, 0xbc, 0xb0, 0xb6, 0xb5, 0xa9, 0xa7, 0xa9, 0xa5, + 0xad, 0xbb, 0xc3, 0xc9, 0xca, 0xc3, 0xc2, 0xc5, 0xc3, 0xc6, 0xc8, 0xc8, + 0xc7, 0xc6, 0xc5, 0xc6, 0xc6, 0xc8, 0xc1, 0xbc, 0xb8, 0xbd, 0xc6, 0xc6, + 0xc4, 0xc0, 0xbc, 0xc2, 0xc1, 0xb6, 0xb3, 0xb2, 0xb1, 0xaf, 0xaf, 0xb5, + 0xb0, 0xb4, 0xc0, 0xbf, 0xc1, 0xc0, 0xbf, 0xc2, 0xb9, 0xbd, 0xbf, 0xbf, + 0xcc, 0xcf, 0xcc, 0xca, 0xc8, 0xc1, 0xbf, 0xbc, 0xbb, 0xbc, 0xb0, 0xb0, + 0xb0, 0xb6, 0xbb, 0xb7, 0xba, 0xbd, 0xbd, 0xb7, 0xb4, 0xaf, 0xa9, 0xaa, + 0xb4, 0xb4, 0xaf, 0xab, 0xa6, 0xa9, 0xaa, 0xaa, 0xa9, 0xa8, 0xaa, 0xaa, + 0xaf, 0xaa, 0xaa, 0xaa, 0xae, 0xb9, 0xb9, 0xb4, 0xab, 0xa9, 0xa9, 0xaa, + 0xb2, 0xc1, 0xe2, 0x12, 0x36, 0x4b, 0x5a, 0x64, 0x67, 0x6a, 0x67, 0x63, + 0x58, 0x5d, 0x5e, 0x64, 0x71, 0x76, 0x77, 0x77, 0x78, 0x77, 0x76, 0x76, + 0x74, 0x74, 0x74, 0x6f, 0x6d, 0x6d, 0x6b, 0x6a, 0x6c, 0x69, 0x66, 0x67, + 0x6a, 0x6e, 0x70, 0x6f, 0x6f, 0x70, 0x71, 0x71, 0x71, 0x70, 0x70, 0x6d, + 0x6c, 0x6a, 0x66, 0x64, 0x5e, 0x52, 0x3b, 0x28, 0x16, 0x0a, 0x04, 0xfe, + 0xf8, 0xf4, 0xf6, 0x10, 0x32, 0x43, 0x52, 0x59, 0x5d, 0x58, 0x49, 0x2b, + 0x05, 0xe5, 0xcd, 0xbc, 0xb6, 0xb8, 0xb0, 0xaa, 0xac, 0xae, 0xa8, 0xa7, + 0xa8, 0xa6, 0xaa, 0xaf, 0xb7, 0xb8, 0xb4, 0xac, 0xab, 0xb3, 0xb4, 0xac, + 0xae, 0xaa, 0xaa, 0xaa, 0xa9, 0xa8, 0xab, 0xb4, 0xb3, 0xaf, 0xae, 0xad, + 0xae, 0xaf, 0xb3, 0xbc, 0xc4, 0xc3, 0xb7, 0xb0, 0xb6, 0xc0, 0xc4, 0xc2, + 0xbe, 0xb6, 0xb6, 0xb4, 0xb1, 0xb0, 0xb0, 0xb2, 0xb7, 0xc5, 0xc7, 0xc9, + 0xc6, 0xc2, 0xc2, 0xc5, 0xc4, 0xc4, 0xc8, 0xca, 0xc8, 0xc8, 0xc8, 0xc8, + 0xc5, 0xc5, 0xc2, 0xbc, 0xbd, 0xc1, 0xc3, 0xc0, 0xc1, 0xbf, 0xba, 0xbe, + 0xc1, 0xc1, 0xbb, 0xb3, 0xaf, 0xab, 0xac, 0xb3, 0xb7, 0xb5, 0xb3, 0xba, + 0xc2, 0xc2, 0xba, 0xc7, 0xb5, 0xb0, 0xbd, 0xc0, 0xc9, 0xcf, 0xcc, 0xcc, + 0xcb, 0xc1, 0xc2, 0xc2, 0xc1, 0xc0, 0xb9, 0xb9, 0xb7, 0xb3, 0xb6, 0xbd, + 0xcb, 0xc1, 0xbe, 0xb8, 0xb5, 0xb0, 0xaa, 0xae, 0xb0, 0xb0, 0xaa, 0xa9, + 0xa7, 0xa9, 0xab, 0xaa, 0xaa, 0xab, 0xb2, 0xb7, 0xb3, 0xaf, 0xb3, 0xb6, + 0xb4, 0xb4, 0xb9, 0xb9, 0xad, 0xaa, 0xab, 0xb3, 0xc5, 0xd7, 0xf1, 0x16, + 0x3b, 0x53, 0x5f, 0x65, 0x67, 0x6a, 0x69, 0x6a, 0x6c, 0x6f, 0x70, 0x74, + 0x76, 0x76, 0x78, 0x78, 0x77, 0x76, 0x76, 0x76, 0x76, 0x75, 0x74, 0x6f, + 0x6a, 0x67, 0x64, 0x64, 0x62, 0x5e, 0x60, 0x64, 0x6a, 0x6c, 0x6e, 0x6c, + 0x6b, 0x6e, 0x70, 0x71, 0x70, 0x6e, 0x6d, 0x6b, 0x6b, 0x6a, 0x67, 0x64, + 0x5d, 0x55, 0x47, 0x33, 0x1c, 0x0a, 0xff, 0xf1, 0xe5, 0xeb, 0xf8, 0x1c, + 0x34, 0x42, 0x4c, 0x52, 0x5b, 0x5c, 0x55, 0x43, 0x22, 0xf9, 0xd5, 0xbb, + 0xb5, 0xbc, 0xb2, 0xaa, 0xac, 0xad, 0xab, 0xaa, 0xab, 0xa9, 0xac, 0xad, + 0xb1, 0xb3, 0xb0, 0xaf, 0xb0, 0xb4, 0xb4, 0xac, 0xae, 0xaa, 0xa9, 0xab, + 0xab, 0xaa, 0xb3, 0xc3, 0xc2, 0xb8, 0xb1, 0xae, 0xb0, 0xaf, 0xb8, 0xc6, + 0xce, 0xc8, 0xbb, 0xb4, 0xb1, 0xb9, 0xc3, 0xc2, 0xc4, 0xbb, 0xb6, 0xb1, + 0xb4, 0xb4, 0xb4, 0xb4, 0xb7, 0xc2, 0xc0, 0xbe, 0xc2, 0xc4, 0xc5, 0xc6, + 0xc7, 0xc2, 0xc2, 0xc6, 0xc9, 0xcb, 0xca, 0xc7, 0xc7, 0xc6, 0xc5, 0xbd, + 0xbe, 0xc2, 0xc1, 0xb8, 0xbc, 0xbb, 0xb0, 0xb5, 0xbc, 0xc6, 0xc4, 0xb2, + 0xae, 0xb1, 0xb0, 0xaf, 0xb6, 0xb4, 0xaa, 0xb1, 0xbc, 0xc1, 0xc6, 0xc2, + 0xb0, 0xae, 0xb7, 0xbd, 0xc8, 0xcb, 0xcb, 0xca, 0xcc, 0xcd, 0xc6, 0xbf, + 0xbb, 0xc2, 0xc0, 0xb8, 0xb7, 0xb1, 0xb7, 0xc2, 0xc7, 0xb7, 0xb6, 0xb6, + 0xb0, 0xac, 0xaa, 0xab, 0xa8, 0xa9, 0xa7, 0xab, 0xa9, 0xaa, 0xb0, 0xb0, + 0xaf, 0xab, 0xb0, 0xbb, 0xbe, 0xb7, 0xb7, 0xb6, 0xb3, 0xb3, 0xb9, 0xbc, + 0xb2, 0xb5, 0xb7, 0xc1, 0xcc, 0xd6, 0xf7, 0x25, 0x39, 0x40, 0x4b, 0x5c, + 0x66, 0x6a, 0x6d, 0x70, 0x71, 0x72, 0x73, 0x74, 0x76, 0x76, 0x77, 0x78, + 0x76, 0x76, 0x76, 0x76, 0x76, 0x75, 0x73, 0x6c, 0x64, 0x5d, 0x58, 0x52, + 0x4e, 0x54, 0x61, 0x67, 0x6b, 0x6b, 0x6c, 0x6d, 0x6c, 0x6c, 0x6c, 0x6e, + 0x6e, 0x6e, 0x6b, 0x6a, 0x6a, 0x68, 0x66, 0x64, 0x5f, 0x5a, 0x4c, 0x3a, + 0x25, 0x0e, 0xeb, 0xd4, 0xd4, 0xe3, 0xfc, 0x25, 0x34, 0x45, 0x46, 0x4f, + 0x5a, 0x5d, 0x58, 0x4a, 0x32, 0x0f, 0xe7, 0xd2, 0xc5, 0xc3, 0xb6, 0xad, + 0xad, 0xaf, 0xaa, 0xaa, 0xae, 0xaf, 0xae, 0xac, 0xad, 0xaf, 0xaf, 0xad, + 0xb1, 0xb9, 0xb5, 0xae, 0xae, 0xad, 0xae, 0xae, 0xac, 0xab, 0xb1, 0xc6, + 0xc7, 0xbe, 0xb8, 0xb3, 0xae, 0xae, 0xb8, 0xc6, 0xc6, 0xba, 0xb3, 0xb1, + 0xac, 0xac, 0xb8, 0xba, 0xb8, 0xb2, 0xb0, 0xaf, 0xb1, 0xb5, 0xb5, 0xb0, + 0xad, 0xb6, 0xb9, 0xb8, 0xc0, 0xc7, 0xc4, 0xc3, 0xc7, 0xc5, 0xc2, 0xc2, + 0xc7, 0xcb, 0xc8, 0xc6, 0xc6, 0xc6, 0xc6, 0xbf, 0xbf, 0xbf, 0xbf, 0xb8, + 0xbb, 0xb3, 0xb0, 0xb9, 0xbd, 0xc5, 0xc2, 0xb0, 0xaa, 0xac, 0xb1, 0xb1, + 0xbc, 0xba, 0xb2, 0xb0, 0xb4, 0xbf, 0xc7, 0xbc, 0xb4, 0xb6, 0xb6, 0xbc, + 0xcb, 0xcd, 0xc8, 0xc6, 0xc8, 0xce, 0xc4, 0xae, 0xa8, 0xb6, 0xb9, 0xaf, + 0xae, 0xb7, 0xc5, 0xb7, 0xb4, 0xb2, 0xb6, 0xb1, 0xae, 0xaf, 0xad, 0xa9, + 0xa8, 0xa9, 0xa5, 0xa9, 0xad, 0xaa, 0xb0, 0xaf, 0xb1, 0xb4, 0xb3, 0xb6, + 0xb7, 0xb6, 0xb5, 0xba, 0xbc, 0xbc, 0xb8, 0xbc, 0xb9, 0xbd, 0xbf, 0xc4, + 0xd4, 0xeb, 0x01, 0xfe, 0x05, 0x10, 0x11, 0x1a, 0x3a, 0x50, 0x58, 0x60, + 0x65, 0x6a, 0x6e, 0x70, 0x72, 0x73, 0x74, 0x76, 0x76, 0x76, 0x75, 0x75, + 0x74, 0x70, 0x6b, 0x62, 0x55, 0x4b, 0x42, 0x3c, 0x40, 0x55, 0x63, 0x68, + 0x6c, 0x6d, 0x6b, 0x6b, 0x6e, 0x6e, 0x6c, 0x6d, 0x6b, 0x6d, 0x6c, 0x6a, + 0x68, 0x66, 0x64, 0x63, 0x5e, 0x59, 0x4f, 0x3e, 0x27, 0x0b, 0xe6, 0xce, + 0xca, 0xde, 0x0a, 0x2e, 0x3b, 0x45, 0x41, 0x4d, 0x56, 0x5a, 0x58, 0x52, + 0x41, 0x28, 0x0c, 0xf1, 0xe0, 0xd8, 0xc8, 0xb7, 0xb0, 0xaf, 0xab, 0xab, + 0xad, 0xac, 0xac, 0xac, 0xad, 0xaf, 0xae, 0xab, 0xb0, 0xb6, 0xb4, 0xb0, + 0xaf, 0xb0, 0xae, 0xad, 0xad, 0xad, 0xb4, 0xc8, 0xc9, 0xc2, 0xb9, 0xb3, + 0xad, 0xaa, 0xb3, 0xbd, 0xbe, 0xbc, 0xbc, 0xba, 0xb2, 0xb1, 0xbe, 0xbd, + 0xb6, 0xb6, 0xb4, 0xb5, 0xb5, 0xb3, 0xb6, 0xb9, 0xb0, 0xac, 0xaf, 0xaf, + 0xb8, 0xcd, 0xc3, 0xbe, 0xc2, 0xca, 0xcc, 0xc2, 0xc1, 0xc5, 0xc8, 0xc6, + 0xc8, 0xc6, 0xc7, 0xbe, 0xbc, 0xbe, 0xc3, 0xbd, 0xc1, 0xba, 0xb7, 0xbc, + 0xbf, 0xc5, 0xcb, 0xc7, 0xbc, 0xb0, 0xaa, 0xac, 0xb8, 0xc0, 0xc1, 0xbd, + 0xb2, 0xb6, 0xbb, 0xb8, 0xb4, 0xbe, 0xc5, 0xc5, 0xc8, 0xc4, 0xc3, 0xc2, + 0xbf, 0xcb, 0xca, 0xb6, 0xaf, 0xae, 0xb4, 0xb0, 0xa4, 0xb1, 0xc2, 0xbe, + 0xba, 0xb6, 0xb8, 0xb5, 0xb6, 0xb7, 0xba, 0xbb, 0xb7, 0xb4, 0xb0, 0xad, + 0xb0, 0xb0, 0xb4, 0xb6, 0xbb, 0xbe, 0xbc, 0xb8, 0xb4, 0xb0, 0xb6, 0xc5, + 0xc9, 0xc6, 0xbe, 0xb7, 0xbc, 0xbf, 0xc1, 0xc5, 0xd5, 0xf6, 0xdd, 0xcf, + 0xec, 0xf9, 0x02, 0x05, 0x07, 0x0e, 0x27, 0x40, 0x51, 0x5e, 0x6a, 0x6e, + 0x70, 0x72, 0x74, 0x76, 0x76, 0x76, 0x73, 0x73, 0x71, 0x6a, 0x5d, 0x4c, + 0x3a, 0x2b, 0x1f, 0x0f, 0x07, 0x22, 0x4f, 0x67, 0x6c, 0x6a, 0x6a, 0x6a, + 0x6a, 0x6b, 0x6b, 0x6b, 0x6a, 0x6b, 0x6b, 0x6a, 0x67, 0x65, 0x64, 0x62, + 0x5e, 0x5a, 0x51, 0x3c, 0x22, 0x03, 0xe4, 0xd1, 0xca, 0xe9, 0x1e, 0x38, + 0x42, 0x3f, 0x3e, 0x4d, 0x52, 0x56, 0x55, 0x49, 0x34, 0x16, 0xef, 0xd3, + 0xca, 0xcd, 0xc2, 0xb9, 0xb0, 0xac, 0xac, 0xaa, 0xab, 0xaa, 0xaa, 0xab, + 0xae, 0xb6, 0xb5, 0xaf, 0xad, 0xb0, 0xae, 0xb0, 0xb0, 0xad, 0xac, 0xae, + 0xaf, 0xae, 0xb7, 0xc8, 0xc7, 0xc2, 0xb9, 0xb0, 0xac, 0xaa, 0xb3, 0xbb, + 0xbd, 0xbd, 0xbb, 0xb6, 0xb1, 0xba, 0xc6, 0xc8, 0xc2, 0xbc, 0xbb, 0xb9, + 0xba, 0xb6, 0xb7, 0xb6, 0xaf, 0xa9, 0xab, 0xad, 0xac, 0xb6, 0xb9, 0xb0, + 0xaa, 0xb1, 0xb9, 0xb1, 0xac, 0xad, 0xb0, 0xb5, 0xbf, 0xc4, 0xca, 0xca, + 0xc2, 0xbe, 0xc6, 0xc1, 0xc2, 0xb9, 0xb6, 0xba, 0xc0, 0xc7, 0xca, 0xcc, + 0xc8, 0xaf, 0xa6, 0xa8, 0xb1, 0xbd, 0xc4, 0xc5, 0xbf, 0xbc, 0xb6, 0xb0, + 0xb3, 0xc2, 0xc8, 0xcc, 0xce, 0xc4, 0xbf, 0xc2, 0xb6, 0xbe, 0xc8, 0xbe, + 0xbb, 0xbe, 0xc6, 0xc0, 0xb7, 0xc0, 0xc7, 0xc4, 0xba, 0xb8, 0xbc, 0xbe, + 0xc3, 0xc8, 0xcc, 0xc5, 0xbe, 0xbc, 0xbb, 0xb7, 0xba, 0xbb, 0xc5, 0xc6, + 0xbf, 0xc2, 0xc2, 0xbd, 0xb9, 0xb0, 0xbe, 0xcb, 0xcb, 0xc9, 0xc8, 0xc2, + 0xbe, 0xc4, 0xc8, 0xd3, 0xf7, 0x04, 0xc4, 0xc6, 0xd8, 0xdd, 0xe7, 0xf1, + 0xf4, 0xf7, 0xfe, 0x1b, 0x3e, 0x57, 0x65, 0x6d, 0x71, 0x74, 0x73, 0x76, + 0x76, 0x76, 0x74, 0x74, 0x70, 0x66, 0x46, 0x2a, 0x13, 0xf4, 0xe2, 0xd0, + 0xcc, 0xda, 0x13, 0x48, 0x64, 0x6a, 0x6b, 0x6a, 0x68, 0x69, 0x6b, 0x6a, + 0x6a, 0x6a, 0x6a, 0x6a, 0x68, 0x65, 0x65, 0x64, 0x63, 0x61, 0x5a, 0x4e, + 0x32, 0x0c, 0xec, 0xd7, 0xd5, 0xfd, 0x28, 0x3d, 0x42, 0x3b, 0x3c, 0x46, + 0x4f, 0x54, 0x56, 0x53, 0x4c, 0x40, 0x24, 0x00, 0xdf, 0xce, 0xba, 0xb0, + 0xb0, 0xb0, 0xaf, 0xac, 0xad, 0xaa, 0xaa, 0xac, 0xaf, 0xb4, 0xb9, 0xb2, + 0xae, 0xb1, 0xae, 0xb0, 0xb0, 0xac, 0xad, 0xaf, 0xae, 0xae, 0xbc, 0xc8, + 0xc7, 0xc2, 0xba, 0xae, 0xa7, 0xa8, 0xb3, 0xb9, 0xb6, 0xb6, 0xb4, 0xb5, + 0xbd, 0xc3, 0xc8, 0xcb, 0xcc, 0xc9, 0xc0, 0xbd, 0xbc, 0xba, 0xb5, 0xb0, + 0xaa, 0xaa, 0xb1, 0xb2, 0xb0, 0xb6, 0xbd, 0xb3, 0xae, 0xa9, 0xaa, 0xa8, + 0xab, 0xab, 0xaa, 0xab, 0xb6, 0xbd, 0xc4, 0xc8, 0xc3, 0xbc, 0xc4, 0xc2, + 0xc2, 0xbc, 0xbb, 0xbd, 0xbd, 0xbc, 0xbc, 0xc2, 0xbc, 0xaa, 0xa9, 0xaa, + 0xb3, 0xb6, 0xbc, 0xc2, 0xc7, 0xc5, 0xb5, 0xaf, 0xb5, 0xc0, 0xc6, 0xce, + 0xcd, 0xc6, 0xc1, 0xb9, 0xb0, 0xb0, 0xbe, 0xc0, 0xc5, 0xc9, 0xc5, 0xc2, + 0xc2, 0xbd, 0xbe, 0xc2, 0xc2, 0xc4, 0xc6, 0xc6, 0xc9, 0xcb, 0xc8, 0xc4, + 0xbd, 0xbc, 0xbe, 0xbf, 0xbf, 0xc0, 0xd1, 0xcf, 0xbe, 0xbd, 0xbd, 0xc0, + 0xc2, 0xb8, 0xbe, 0xca, 0xca, 0xc9, 0xca, 0xc3, 0xc3, 0xc9, 0xd0, 0xec, + 0x14, 0xf2, 0xbf, 0xc1, 0xca, 0xc9, 0xc0, 0xce, 0xe3, 0xeb, 0xed, 0x00, + 0x2e, 0x56, 0x66, 0x6e, 0x70, 0x73, 0x73, 0x75, 0x75, 0x73, 0x74, 0x73, + 0x70, 0x6b, 0x46, 0x1b, 0xf5, 0xd4, 0xc4, 0xc2, 0xe5, 0xf6, 0xea, 0x1a, + 0x4d, 0x64, 0x69, 0x6b, 0x69, 0x65, 0x66, 0x67, 0x68, 0x69, 0x6a, 0x69, + 0x68, 0x67, 0x64, 0x63, 0x65, 0x64, 0x61, 0x58, 0x43, 0x24, 0xfc, 0xdc, + 0xe4, 0x15, 0x3a, 0x48, 0x3e, 0x39, 0x40, 0x46, 0x4f, 0x57, 0x58, 0x53, + 0x3e, 0x29, 0x22, 0x1c, 0x0a, 0xed, 0xcb, 0xbb, 0xb6, 0xb3, 0xac, 0xaa, + 0xa9, 0xa8, 0xa7, 0xab, 0xb0, 0xb3, 0xbc, 0xb7, 0xb5, 0xb7, 0xb0, 0xae, + 0xb0, 0xae, 0xab, 0xae, 0xb0, 0xb1, 0xbe, 0xc2, 0xc2, 0xbc, 0xb5, 0xae, + 0xa8, 0xa9, 0xb1, 0xb9, 0xb6, 0xb6, 0xb4, 0xbc, 0xc6, 0xc6, 0xc6, 0xce, + 0xd1, 0xcf, 0xcc, 0xc3, 0xbd, 0xba, 0xb1, 0xaa, 0xaa, 0xaa, 0xac, 0xb0, + 0xb6, 0xc0, 0xc8, 0xc7, 0xc4, 0xba, 0xac, 0xa9, 0xad, 0xb0, 0xb2, 0xb0, + 0xb3, 0xbc, 0xbf, 0xb7, 0xae, 0xad, 0xbc, 0xbf, 0xbd, 0xbe, 0xbc, 0xbb, + 0xb7, 0xb6, 0xbe, 0xb9, 0xaf, 0xaa, 0xa9, 0xab, 0xb0, 0xb6, 0xb6, 0xbe, + 0xcb, 0xcf, 0xc2, 0xb5, 0xb6, 0xbd, 0xc3, 0xcb, 0xc8, 0xc6, 0xc0, 0xb3, + 0xb0, 0xab, 0xb9, 0xce, 0xd0, 0xc7, 0xbf, 0xc0, 0xbc, 0xbe, 0xc1, 0xc6, + 0xc9, 0xc8, 0xc2, 0xbd, 0xbd, 0xc2, 0xc5, 0xc5, 0xc2, 0xc2, 0xc7, 0xc5, + 0xc2, 0xcb, 0xd0, 0xc9, 0xc3, 0xc2, 0xbc, 0xc3, 0xc4, 0xbc, 0xbb, 0xc8, + 0xc9, 0xca, 0xcb, 0xc5, 0xc6, 0xca, 0xd5, 0xfb, 0x17, 0xdd, 0xc2, 0xb9, + 0xbc, 0xb9, 0xb0, 0xba, 0xd1, 0xda, 0xe2, 0xf2, 0x22, 0x4c, 0x62, 0x6a, + 0x6f, 0x71, 0x72, 0x73, 0x73, 0x6f, 0x70, 0x70, 0x71, 0x70, 0x55, 0x27, + 0xf3, 0xdb, 0xc8, 0xca, 0xd4, 0xef, 0xe3, 0xec, 0x23, 0x58, 0x62, 0x66, + 0x68, 0x68, 0x65, 0x64, 0x66, 0x67, 0x67, 0x68, 0x65, 0x64, 0x65, 0x63, + 0x64, 0x63, 0x62, 0x60, 0x52, 0x39, 0x16, 0xef, 0x06, 0x34, 0x4c, 0x4c, + 0x3d, 0x3b, 0x46, 0x50, 0x57, 0x5c, 0x5c, 0x57, 0x40, 0x16, 0xf4, 0xf1, + 0xff, 0x0a, 0xf2, 0xd8, 0xca, 0xbf, 0xb1, 0xaa, 0xaa, 0xaa, 0xa8, 0xa9, + 0xb0, 0xba, 0xc5, 0xbc, 0xb8, 0xb6, 0xaf, 0xaa, 0xac, 0xb4, 0xb1, 0xaf, + 0xac, 0xb3, 0xc0, 0xbf, 0xbc, 0xb3, 0xad, 0xac, 0xa7, 0xa7, 0xae, 0xb5, + 0xb8, 0xbc, 0xbc, 0xc3, 0xc8, 0xc4, 0xc6, 0xce, 0xcf, 0xce, 0xcf, 0xcb, + 0xc4, 0xb8, 0xad, 0xaa, 0xaa, 0xaa, 0xa6, 0xab, 0xb6, 0xbe, 0xc6, 0xcb, + 0xca, 0xc7, 0xb7, 0xaa, 0xaa, 0xad, 0xb6, 0xb9, 0xba, 0xbc, 0xc2, 0xbc, + 0xaa, 0xa6, 0xb0, 0xbc, 0xba, 0xbc, 0xbe, 0xbc, 0xba, 0xbe, 0xc2, 0xb3, + 0xaa, 0xa6, 0xa8, 0xad, 0xb0, 0xb0, 0xaf, 0xbd, 0xc0, 0xbc, 0xba, 0xb3, + 0xb5, 0xbc, 0xbe, 0xc0, 0xbd, 0xbc, 0xbb, 0xb6, 0xb3, 0xaa, 0xb1, 0xc2, + 0xc0, 0xc0, 0xc4, 0xc7, 0xc9, 0xc9, 0xc8, 0xc8, 0xc8, 0xc6, 0xc2, 0xc3, + 0xc5, 0xc4, 0xca, 0xc8, 0xcb, 0xce, 0xcc, 0xcb, 0xce, 0xd5, 0xd6, 0xce, + 0xc7, 0xc4, 0xc0, 0xc5, 0xc8, 0xc5, 0xbf, 0xc8, 0xcc, 0xca, 0xc9, 0xc8, + 0xc2, 0xc6, 0xd2, 0x05, 0x16, 0xce, 0xbc, 0xb4, 0xb9, 0xbf, 0xb8, 0xba, + 0xc7, 0xcb, 0xd6, 0xe3, 0x12, 0x4d, 0x66, 0x6a, 0x6a, 0x6e, 0x73, 0x72, + 0x72, 0x6d, 0x6b, 0x6d, 0x6e, 0x6d, 0x5e, 0x43, 0x2c, 0x09, 0xec, 0xdc, + 0xd1, 0xca, 0xca, 0xcc, 0xec, 0x3f, 0x5f, 0x65, 0x69, 0x6a, 0x6a, 0x68, + 0x65, 0x67, 0x67, 0x69, 0x67, 0x64, 0x64, 0x65, 0x64, 0x63, 0x62, 0x61, + 0x5a, 0x4d, 0x3b, 0x29, 0x32, 0x46, 0x4c, 0x41, 0x3f, 0x41, 0x4a, 0x52, + 0x58, 0x5e, 0x60, 0x5c, 0x48, 0x26, 0xfd, 0xdb, 0xcd, 0xdf, 0xf2, 0xe0, + 0xda, 0xd4, 0xc2, 0xb4, 0xb0, 0xae, 0xa9, 0xaa, 0xb3, 0xc0, 0xc8, 0xbc, + 0xb4, 0xb5, 0xb3, 0xac, 0xad, 0xba, 0xb5, 0xad, 0xad, 0xb6, 0xbf, 0xbc, + 0xb8, 0xb2, 0xac, 0xaa, 0xa8, 0xa7, 0xab, 0xb3, 0xb8, 0xb9, 0xc0, 0xc5, + 0xc7, 0xc2, 0xc2, 0xc9, 0xce, 0xcc, 0xca, 0xc7, 0xc1, 0xb6, 0xad, 0xa9, + 0xaa, 0xb1, 0xac, 0xaa, 0xb6, 0xbc, 0xc1, 0xc6, 0xc1, 0xbe, 0xb7, 0xac, + 0xa8, 0xab, 0xb2, 0xb4, 0xb3, 0xbf, 0xc2, 0xc3, 0xb9, 0xa8, 0xab, 0xba, + 0xba, 0xbc, 0xc0, 0xbe, 0xbc, 0xc2, 0xbe, 0xb0, 0xaa, 0xa6, 0xa5, 0xaa, + 0xae, 0xad, 0xab, 0xb4, 0xb0, 0xad, 0xac, 0xae, 0xb1, 0xb7, 0xbe, 0xc1, + 0xb9, 0xb8, 0xbd, 0xbc, 0xb6, 0xaf, 0xad, 0xb3, 0xb4, 0xbb, 0xc4, 0xc7, + 0xc2, 0xc0, 0xbf, 0xc1, 0xc2, 0xc2, 0xb9, 0xb9, 0xbf, 0xc2, 0xc5, 0xc6, + 0xc9, 0xcd, 0xcb, 0xd1, 0xd5, 0xd7, 0xe2, 0xdf, 0xc8, 0xc4, 0xc2, 0xc7, + 0xcd, 0xd3, 0xca, 0xc5, 0xcc, 0xd1, 0xd2, 0xcf, 0xbe, 0xc2, 0xd6, 0x12, + 0x10, 0xcd, 0xb6, 0xb0, 0xb6, 0xc8, 0xc6, 0xbc, 0xc0, 0xc6, 0xd3, 0xde, + 0x0a, 0x47, 0x65, 0x6d, 0x6c, 0x6c, 0x71, 0x71, 0x70, 0x6f, 0x6d, 0x6b, + 0x6a, 0x6b, 0x66, 0x5a, 0x57, 0x4d, 0x2c, 0x08, 0xed, 0xe0, 0xd6, 0xd2, + 0xe0, 0x29, 0x5a, 0x64, 0x68, 0x69, 0x6a, 0x69, 0x69, 0x69, 0x69, 0x68, + 0x69, 0x69, 0x67, 0x65, 0x65, 0x64, 0x65, 0x61, 0x5a, 0x53, 0x48, 0x3a, + 0x39, 0x3e, 0x3a, 0x41, 0x4a, 0x4f, 0x53, 0x58, 0x5b, 0x60, 0x63, 0x61, + 0x56, 0x38, 0x10, 0xf0, 0xd3, 0xc7, 0xcb, 0xee, 0xe6, 0xd2, 0xca, 0xbb, + 0xb3, 0xb4, 0xb0, 0xb0, 0xb6, 0xbc, 0xc2, 0xbf, 0xb7, 0xb3, 0xb2, 0xb4, + 0xb9, 0xbe, 0xbc, 0xb4, 0xae, 0xb7, 0xbd, 0xba, 0xb5, 0xb1, 0xad, 0xaa, + 0xaa, 0xac, 0xad, 0xb5, 0xb5, 0xba, 0xbc, 0xbb, 0xc1, 0xbf, 0xbe, 0xc4, + 0xc4, 0xc6, 0xc5, 0xc2, 0xbd, 0xb6, 0xac, 0xa9, 0xaa, 0xb0, 0xb0, 0xb0, + 0xb7, 0xbc, 0xbe, 0xbc, 0xb9, 0xb9, 0xb2, 0xad, 0xb5, 0xc0, 0xc6, 0xbb, + 0xa9, 0xaf, 0xbb, 0xbe, 0xba, 0xab, 0xaa, 0xaf, 0xb2, 0xb4, 0xba, 0xc1, + 0xbf, 0xbf, 0xb8, 0xb3, 0xaf, 0xaa, 0xa7, 0xa5, 0xa8, 0xa9, 0xa9, 0xae, + 0xab, 0xad, 0xb0, 0xb1, 0xb5, 0xbc, 0xc2, 0xc2, 0xbe, 0xbf, 0xbf, 0xb9, + 0xb6, 0xc2, 0xc4, 0xc6, 0xbc, 0xc0, 0xc7, 0xc8, 0xc9, 0xd2, 0xcc, 0xc9, + 0xd3, 0xce, 0xc2, 0xbc, 0xc0, 0xc6, 0xd2, 0xc6, 0xc4, 0xbb, 0xba, 0xc1, + 0xc2, 0xc5, 0xc8, 0xc6, 0xc6, 0xd0, 0xc7, 0xbc, 0xc7, 0xcd, 0xc6, 0xc4, + 0xcb, 0xd3, 0xd7, 0xd0, 0xbd, 0xc8, 0xe0, 0x12, 0x07, 0xda, 0xbb, 0xb0, + 0xb2, 0xc1, 0xd0, 0xd3, 0xce, 0xce, 0xd8, 0xf4, 0x1c, 0x46, 0x5e, 0x69, + 0x6e, 0x71, 0x70, 0x6e, 0x6f, 0x71, 0x70, 0x70, 0x6c, 0x6b, 0x6a, 0x64, + 0x61, 0x61, 0x59, 0x49, 0x34, 0x1c, 0x0f, 0x08, 0xfe, 0x1f, 0x52, 0x60, + 0x64, 0x65, 0x67, 0x69, 0x6b, 0x6b, 0x6a, 0x68, 0x69, 0x6a, 0x6a, 0x69, + 0x64, 0x64, 0x64, 0x60, 0x5a, 0x52, 0x46, 0x34, 0x22, 0x18, 0x34, 0x49, + 0x50, 0x55, 0x58, 0x5c, 0x5f, 0x63, 0x64, 0x63, 0x5e, 0x4e, 0x29, 0xf8, + 0xd9, 0xce, 0xbe, 0xc6, 0xe8, 0xe0, 0xc8, 0xbd, 0xb6, 0xb9, 0xbb, 0xb8, + 0xb7, 0xb9, 0xbc, 0xbd, 0xb9, 0xb3, 0xb1, 0xb7, 0xbc, 0xbe, 0xc0, 0xbf, + 0xb3, 0xb5, 0xba, 0xb7, 0xb3, 0xb2, 0xac, 0xaa, 0xad, 0xaf, 0xaf, 0xb2, + 0xb8, 0xbe, 0xbd, 0xba, 0xbb, 0xbb, 0xbe, 0xc4, 0xbc, 0xb9, 0xbc, 0xbf, + 0xbc, 0xb5, 0xac, 0xaa, 0xac, 0xb0, 0xb4, 0xb2, 0xb2, 0xb9, 0xbd, 0xbc, + 0xb9, 0xb9, 0xb0, 0xb6, 0xcd, 0xd1, 0xcf, 0xc9, 0xb2, 0xa8, 0xad, 0xb2, + 0xb1, 0xb2, 0xb0, 0xb2, 0xb7, 0xb6, 0xb7, 0xc2, 0xc1, 0xba, 0xb5, 0xb6, + 0xb0, 0xaa, 0xaa, 0xa5, 0xa6, 0xa8, 0xb0, 0xbb, 0xb6, 0xb3, 0xb6, 0xb8, + 0xbb, 0xbc, 0xc2, 0xc2, 0xc2, 0xbf, 0xc1, 0xbd, 0xbf, 0xc4, 0xcb, 0xc4, + 0xc5, 0xd0, 0xd5, 0xd5, 0xd0, 0xcb, 0xcd, 0xd0, 0xca, 0xc8, 0xc8, 0xc5, + 0xc7, 0xc5, 0xd4, 0xd3, 0xcb, 0xc6, 0xc2, 0xc6, 0xc5, 0xc6, 0xc4, 0xc0, + 0xc7, 0xca, 0xcb, 0xc8, 0xd2, 0xce, 0xcb, 0xc8, 0xc8, 0xce, 0xce, 0xce, + 0xba, 0xbb, 0xd6, 0x0b, 0xfe, 0xdd, 0xc0, 0xb5, 0xb4, 0xb7, 0xc2, 0xd1, + 0xda, 0xe0, 0xec, 0x0a, 0x2a, 0x48, 0x5b, 0x64, 0x6a, 0x6f, 0x70, 0x6e, + 0x6e, 0x71, 0x72, 0x71, 0x6f, 0x6b, 0x6c, 0x6a, 0x66, 0x62, 0x5f, 0x5d, + 0x58, 0x52, 0x52, 0x54, 0x4b, 0x40, 0x4b, 0x58, 0x5e, 0x64, 0x66, 0x69, + 0x6a, 0x6a, 0x69, 0x6a, 0x69, 0x69, 0x6a, 0x6a, 0x68, 0x65, 0x61, 0x5b, + 0x52, 0x4b, 0x40, 0x37, 0x25, 0x2b, 0x44, 0x4e, 0x53, 0x56, 0x59, 0x5d, + 0x61, 0x64, 0x65, 0x64, 0x5d, 0x4d, 0x34, 0x13, 0xe1, 0xce, 0xc0, 0xbc, + 0xc2, 0xe3, 0xdb, 0xc2, 0xbc, 0xbd, 0xbc, 0xbc, 0xb6, 0xb6, 0xb9, 0xba, + 0xbb, 0xb8, 0xb7, 0xbb, 0xbf, 0xbe, 0xbf, 0xc4, 0xbc, 0xb0, 0xb2, 0xb4, + 0xb4, 0xaf, 0xac, 0xad, 0xad, 0xad, 0xac, 0xb1, 0xb9, 0xc0, 0xc2, 0xc0, + 0xb9, 0xba, 0xc2, 0xc5, 0xc4, 0xbc, 0xb4, 0xb6, 0xb9, 0xb5, 0xab, 0xaa, + 0xab, 0xad, 0xb5, 0xb1, 0xad, 0xb7, 0xbc, 0xba, 0xb6, 0xb9, 0xb8, 0xc9, + 0xdf, 0xda, 0xcc, 0xc5, 0xbc, 0xb3, 0xb5, 0xbc, 0xbc, 0xc2, 0xc2, 0xbd, + 0xbb, 0xbc, 0xc2, 0xc8, 0xc5, 0xc0, 0xb4, 0xb0, 0xb0, 0xb0, 0xaa, 0xa6, + 0xa4, 0xa9, 0xb6, 0xbf, 0xbe, 0xb7, 0xb6, 0xb9, 0xbe, 0xc1, 0xc1, 0xc2, + 0xc2, 0xbe, 0xc7, 0xc9, 0xce, 0xd0, 0xc7, 0xc2, 0xc5, 0xcc, 0xcf, 0xce, + 0xce, 0xcb, 0xcc, 0xd3, 0xcf, 0xcb, 0xce, 0xd1, 0xcf, 0xcf, 0xce, 0xd4, + 0xd4, 0xd5, 0xd4, 0xcf, 0xcd, 0xc9, 0xc0, 0xc0, 0xc0, 0xbf, 0xc9, 0xcc, + 0xcf, 0xcd, 0xcd, 0xca, 0xcd, 0xce, 0xcd, 0xcf, 0xaf, 0xb5, 0xd6, 0x02, + 0xfc, 0xe2, 0xc4, 0xbc, 0xbe, 0xc1, 0xcb, 0xda, 0xe3, 0xeb, 0xfe, 0x1c, + 0x35, 0x48, 0x59, 0x5f, 0x66, 0x6a, 0x6c, 0x6d, 0x6e, 0x70, 0x70, 0x70, + 0x6f, 0x6c, 0x6c, 0x6f, 0x6a, 0x63, 0x5d, 0x5b, 0x58, 0x54, 0x52, 0x53, + 0x53, 0x52, 0x4e, 0x52, 0x5a, 0x5f, 0x64, 0x66, 0x68, 0x6a, 0x69, 0x69, + 0x6a, 0x6a, 0x6b, 0x6b, 0x6a, 0x68, 0x63, 0x5d, 0x56, 0x4c, 0x44, 0x3e, + 0x34, 0x40, 0x52, 0x54, 0x55, 0x5b, 0x5f, 0x61, 0x62, 0x64, 0x66, 0x65, + 0x60, 0x4e, 0x2e, 0x14, 0xfa, 0xdc, 0xc7, 0xba, 0xbc, 0xc7, 0xdd, 0xd7, + 0xc1, 0xbc, 0xc2, 0xc0, 0xb7, 0xaf, 0xb7, 0xb9, 0xb8, 0xbc, 0xbc, 0xc2, + 0xc4, 0xc1, 0xc2, 0xc4, 0xc1, 0xb2, 0xac, 0xb0, 0xb6, 0xb2, 0xaf, 0xac, + 0xaa, 0xaa, 0xab, 0xaf, 0xb6, 0xbc, 0xc2, 0xc2, 0xbe, 0xc2, 0xc2, 0xc3, + 0xc3, 0xc2, 0xbc, 0xb5, 0xb5, 0xb6, 0xb2, 0xac, 0xad, 0xb0, 0xb0, 0xaa, + 0xaa, 0xb1, 0xba, 0xba, 0xb5, 0xb6, 0xbb, 0xcb, 0xd4, 0xca, 0xc2, 0xbf, + 0xc0, 0xbf, 0xc1, 0xc6, 0xc6, 0xc7, 0xc6, 0xc3, 0xbd, 0xb3, 0xba, 0xc2, + 0xc4, 0xc2, 0xb5, 0xb2, 0xb7, 0xbc, 0xb9, 0xb2, 0xac, 0xaf, 0xbf, 0xc5, + 0xbf, 0xbf, 0xbd, 0xbc, 0xbe, 0xc1, 0xc1, 0xbd, 0xb8, 0xb8, 0xc0, 0xc7, + 0xc8, 0xc5, 0xc2, 0xc5, 0xc7, 0xcf, 0xce, 0xc7, 0xc5, 0xc9, 0xcd, 0xc9, + 0xc9, 0xc6, 0xc7, 0xc7, 0xc7, 0xc5, 0xc4, 0xc5, 0xc5, 0xca, 0xce, 0xcf, + 0xcf, 0xc8, 0xc3, 0xc7, 0xc6, 0xc6, 0xc6, 0xcb, 0xcf, 0xd1, 0xcf, 0xd1, + 0xce, 0xd0, 0xcc, 0xce, 0xae, 0xb6, 0xcf, 0xf1, 0xf4, 0xe0, 0xc8, 0xc9, + 0xc8, 0xca, 0xd1, 0xe0, 0xec, 0xf4, 0x05, 0x1c, 0x35, 0x4c, 0x51, 0x5d, + 0x62, 0x68, 0x6c, 0x70, 0x6f, 0x6e, 0x6e, 0x6f, 0x6e, 0x6c, 0x6a, 0x6f, + 0x6f, 0x6a, 0x62, 0x5e, 0x58, 0x54, 0x4f, 0x4e, 0x51, 0x54, 0x54, 0x53, + 0x52, 0x58, 0x5e, 0x61, 0x64, 0x68, 0x6a, 0x6c, 0x6b, 0x6b, 0x6d, 0x6d, + 0x6c, 0x6a, 0x69, 0x61, 0x58, 0x55, 0x51, 0x4c, 0x47, 0x4d, 0x55, 0x58, + 0x5b, 0x60, 0x63, 0x63, 0x62, 0x65, 0x67, 0x66, 0x65, 0x59, 0x40, 0x1d, + 0xfb, 0xe2, 0xd2, 0xc6, 0xc4, 0xbe, 0xc4, 0xd9, 0xcb, 0xc2, 0xc5, 0xc2, + 0xb8, 0xb0, 0xb1, 0xb4, 0xb6, 0xbb, 0xbd, 0xc5, 0xc6, 0xc2, 0xc4, 0xc6, + 0xc2, 0xb0, 0xb2, 0xc0, 0xbc, 0xb6, 0xb1, 0xad, 0xab, 0xa8, 0xa5, 0xa9, + 0xaf, 0xb6, 0xbd, 0xc0, 0xc0, 0xc0, 0xbe, 0xc2, 0xbe, 0xbd, 0xbb, 0xb8, + 0xb8, 0xb8, 0xb6, 0xb4, 0xb4, 0xb4, 0xac, 0xa7, 0xa9, 0xac, 0xb3, 0xba, + 0xba, 0xb9, 0xb4, 0xb8, 0xc0, 0xbc, 0xbc, 0xbe, 0xc2, 0xc3, 0xc3, 0xc4, + 0xc3, 0xc5, 0xc6, 0xc5, 0xc0, 0xb0, 0xaa, 0xb9, 0xc5, 0xc3, 0xbb, 0xbc, + 0xbd, 0xc0, 0xc3, 0xc2, 0xbe, 0xbb, 0xc2, 0xc8, 0xc0, 0xc2, 0xc0, 0xc2, + 0xc4, 0xc2, 0xc3, 0xbf, 0xbc, 0xbe, 0xbf, 0xbc, 0xbe, 0xc3, 0xc8, 0xc4, + 0xc3, 0xc6, 0xce, 0xc9, 0xc8, 0xc8, 0xc6, 0xc4, 0xc5, 0xc9, 0xcc, 0xca, + 0xc9, 0xca, 0xcc, 0xc9, 0xca, 0xce, 0xd3, 0xcf, 0xc8, 0xc3, 0xca, 0xca, + 0xc9, 0xca, 0xc9, 0xcb, 0xcd, 0xd0, 0xd3, 0xd2, 0xce, 0xd0, 0xcf, 0xce, + 0xb0, 0xbc, 0xcb, 0xdd, 0xee, 0xe5, 0xd5, 0xd6, 0xd0, 0xd1, 0xd6, 0xe4, + 0xf0, 0xfb, 0x0e, 0x25, 0x34, 0x47, 0x51, 0x56, 0x5c, 0x60, 0x6a, 0x6f, + 0x70, 0x6c, 0x6c, 0x6d, 0x6c, 0x6b, 0x6a, 0x6b, 0x6d, 0x6d, 0x69, 0x64, + 0x62, 0x5b, 0x59, 0x56, 0x56, 0x58, 0x58, 0x58, 0x5a, 0x5e, 0x5f, 0x5e, + 0x60, 0x64, 0x69, 0x6d, 0x6d, 0x6f, 0x6e, 0x6e, 0x6f, 0x6c, 0x6a, 0x65, + 0x5b, 0x55, 0x54, 0x54, 0x52, 0x51, 0x53, 0x57, 0x57, 0x59, 0x5e, 0x60, + 0x64, 0x68, 0x69, 0x69, 0x68, 0x65, 0x52, 0x2d, 0xfb, 0xda, 0xc8, 0xc3, + 0xc1, 0xc0, 0xc0, 0xc8, 0xce, 0xc2, 0xc0, 0xbe, 0xb4, 0xb0, 0xab, 0xa9, + 0xaa, 0xb0, 0xbc, 0xc3, 0xbc, 0xb8, 0xc2, 0xcb, 0xc0, 0xb1, 0xbe, 0xc7, + 0xc3, 0xb9, 0xaf, 0xaa, 0xa9, 0xab, 0xa8, 0xa7, 0xab, 0xb0, 0xb6, 0xbc, + 0xbe, 0xbc, 0xba, 0xbe, 0xb9, 0xb8, 0xb6, 0xb0, 0xb0, 0xb3, 0xb6, 0xb5, + 0xb5, 0xb0, 0xae, 0xaa, 0xa6, 0xa8, 0xad, 0xb7, 0xbc, 0xbb, 0xb0, 0xaa, + 0xb2, 0xb6, 0xb8, 0xbc, 0xbf, 0xbd, 0xbf, 0xc1, 0xc1, 0xc3, 0xc4, 0xc2, + 0xc2, 0xba, 0xa9, 0xaf, 0xba, 0xbc, 0xbf, 0xc3, 0xc2, 0xc1, 0xbf, 0xbe, + 0xc7, 0xc2, 0xc5, 0xc8, 0xc4, 0xc0, 0xc2, 0xc5, 0xc8, 0xc7, 0xc2, 0xb8, + 0xb6, 0xbc, 0xbe, 0xbd, 0xc1, 0xc0, 0xc2, 0xba, 0xc5, 0xd4, 0xda, 0xd4, + 0xce, 0xc8, 0xc5, 0xc7, 0xc8, 0xc6, 0xc7, 0xcf, 0xd4, 0xd6, 0xd6, 0xd8, + 0xd7, 0xd4, 0xd8, 0xdd, 0xdb, 0xd5, 0xd5, 0xd4, 0xd1, 0xcf, 0xcc, 0xce, + 0xce, 0xce, 0xce, 0xd0, 0xce, 0xcd, 0xcc, 0xc9, 0xb3, 0xbe, 0xc4, 0xd7, + 0x02, 0x0f, 0x0a, 0xf7, 0xd7, 0xd3, 0xda, 0xe3, 0xf4, 0x00, 0x0f, 0x1f, + 0x2e, 0x3d, 0x44, 0x4e, 0x58, 0x60, 0x66, 0x69, 0x6c, 0x6a, 0x6b, 0x6c, + 0x6c, 0x6a, 0x69, 0x69, 0x6a, 0x6a, 0x6a, 0x68, 0x64, 0x5f, 0x5e, 0x5d, + 0x5c, 0x5a, 0x5c, 0x5f, 0x62, 0x64, 0x68, 0x69, 0x66, 0x64, 0x66, 0x68, + 0x6a, 0x6c, 0x6f, 0x6e, 0x6e, 0x6d, 0x69, 0x61, 0x59, 0x50, 0x4f, 0x52, + 0x52, 0x51, 0x51, 0x50, 0x4f, 0x55, 0x5b, 0x5f, 0x63, 0x69, 0x6b, 0x6a, + 0x6a, 0x6a, 0x62, 0x4d, 0x20, 0xea, 0xd0, 0xc5, 0xc1, 0xc1, 0xc0, 0xc2, + 0xca, 0xc1, 0xbf, 0xb8, 0xb0, 0xad, 0xab, 0xa9, 0xa8, 0xab, 0xb7, 0xc5, + 0xc7, 0xbe, 0xb3, 0xb6, 0xb0, 0xa7, 0xb1, 0xc4, 0xc7, 0xbf, 0xb0, 0xa9, + 0xa7, 0xaa, 0xae, 0xac, 0xab, 0xae, 0xb3, 0xba, 0xbc, 0xba, 0xba, 0xbd, + 0xb6, 0xb0, 0xaf, 0xaa, 0xaa, 0xac, 0xb0, 0xb1, 0xb1, 0xaa, 0xb4, 0xb3, + 0xab, 0xa9, 0xaa, 0xb0, 0xb8, 0xbc, 0xb7, 0xb5, 0xb0, 0xb0, 0xb4, 0xb6, + 0xbb, 0xba, 0xbc, 0xbd, 0xbd, 0xc2, 0xc2, 0xbe, 0xbc, 0xbd, 0xb6, 0xae, + 0xb5, 0xb8, 0xc1, 0xc6, 0xc6, 0xc2, 0xc2, 0xc2, 0xca, 0xc3, 0xc8, 0xc6, + 0xc2, 0xc2, 0xc2, 0xc8, 0xcd, 0xcb, 0xc1, 0xba, 0xbc, 0xbe, 0xc0, 0xbc, + 0xbd, 0xb9, 0xb8, 0xb8, 0xc0, 0xcd, 0xce, 0xcb, 0xc6, 0xbf, 0xc4, 0xcd, + 0xcf, 0xcf, 0xc8, 0xc8, 0xc7, 0xcb, 0xcf, 0xce, 0xcc, 0xc7, 0xc3, 0xc7, + 0xcc, 0xd0, 0xd0, 0xd3, 0xd9, 0xda, 0xd0, 0xce, 0xd1, 0xd4, 0xd1, 0xd1, + 0xd3, 0xd2, 0xcf, 0xcf, 0xb9, 0xc2, 0xcb, 0xe4, 0x12, 0x28, 0x2a, 0x1c, + 0xff, 0xe0, 0xd8, 0xe1, 0xf0, 0xfe, 0x0b, 0x22, 0x30, 0x3d, 0x46, 0x4f, + 0x55, 0x58, 0x61, 0x66, 0x69, 0x6b, 0x6b, 0x6b, 0x6c, 0x6a, 0x68, 0x65, + 0x67, 0x67, 0x66, 0x64, 0x63, 0x61, 0x63, 0x63, 0x63, 0x61, 0x61, 0x63, + 0x65, 0x66, 0x68, 0x6c, 0x6f, 0x6c, 0x69, 0x68, 0x67, 0x69, 0x6a, 0x6b, + 0x6b, 0x6e, 0x6c, 0x66, 0x5e, 0x53, 0x4c, 0x4d, 0x50, 0x4f, 0x4d, 0x4c, + 0x4d, 0x55, 0x59, 0x5e, 0x62, 0x67, 0x6a, 0x6d, 0x6d, 0x6d, 0x6a, 0x60, + 0x47, 0x21, 0xf1, 0xce, 0xc4, 0xc2, 0xbe, 0xc0, 0xc5, 0xc2, 0xbc, 0xb3, + 0xad, 0xaf, 0xaf, 0xaa, 0xaa, 0xaf, 0xb5, 0xc2, 0xc8, 0xc5, 0xb2, 0xaa, + 0xa7, 0xa5, 0xa8, 0xb6, 0xc2, 0xc0, 0xb6, 0xab, 0xaa, 0xb3, 0xb5, 0xae, + 0xab, 0xad, 0xb3, 0xba, 0xba, 0xba, 0xbd, 0xbe, 0xba, 0xae, 0xa9, 0xa8, + 0xa8, 0xaa, 0xaf, 0xac, 0xaa, 0xad, 0xba, 0xbc, 0xbb, 0xbc, 0xbc, 0xb8, + 0xb6, 0xbc, 0xc7, 0xc5, 0xb7, 0xb6, 0xb6, 0xb6, 0xb9, 0xb9, 0xb8, 0xbb, + 0xbb, 0xbe, 0xbd, 0xb7, 0xb6, 0xb6, 0xb6, 0xb9, 0xba, 0xbb, 0xc4, 0xc9, + 0xca, 0xc5, 0xc2, 0xc2, 0xc3, 0xc7, 0xcd, 0xc0, 0xc0, 0xc4, 0xc3, 0xc8, + 0xcd, 0xcb, 0xc3, 0xb8, 0xc0, 0xc5, 0xcb, 0xc8, 0xc1, 0xb9, 0xb7, 0xb7, + 0xba, 0xbc, 0xbc, 0xbf, 0xc6, 0xc3, 0xc5, 0xc8, 0xcd, 0xcd, 0xcd, 0xd0, + 0xca, 0xc8, 0xc6, 0xc1, 0xc1, 0xc6, 0xc9, 0xc8, 0xc7, 0xc7, 0xc3, 0xc3, + 0xc8, 0xc7, 0xc6, 0xc8, 0xc9, 0xca, 0xce, 0xcf, 0xd1, 0xcf, 0xca, 0xc8, + 0xbc, 0xbc, 0xcc, 0xe8, 0x10, 0x26, 0x2b, 0x27, 0x1c, 0x08, 0xe6, 0xe0, + 0xe8, 0xf9, 0x09, 0x18, 0x22, 0x30, 0x3c, 0x49, 0x51, 0x58, 0x5d, 0x64, + 0x66, 0x67, 0x68, 0x6a, 0x6a, 0x69, 0x67, 0x64, 0x62, 0x61, 0x5f, 0x61, + 0x62, 0x62, 0x62, 0x65, 0x65, 0x64, 0x63, 0x63, 0x64, 0x65, 0x66, 0x67, + 0x6a, 0x6c, 0x6b, 0x69, 0x66, 0x67, 0x69, 0x69, 0x69, 0x6a, 0x6b, 0x69, + 0x63, 0x5b, 0x55, 0x53, 0x52, 0x4f, 0x4c, 0x4c, 0x50, 0x56, 0x5c, 0x61, + 0x65, 0x68, 0x6b, 0x6d, 0x6e, 0x70, 0x6e, 0x69, 0x5e, 0x48, 0x24, 0xf8, + 0xda, 0xc7, 0xbf, 0xc1, 0xc2, 0xc4, 0xbc, 0xb2, 0xb0, 0xae, 0xab, 0xab, + 0xac, 0xae, 0xb4, 0xc0, 0xc4, 0xb8, 0xab, 0xaa, 0xa9, 0xa8, 0xa9, 0xa9, + 0xb5, 0xba, 0xb6, 0xad, 0xab, 0xb4, 0xb4, 0xab, 0xab, 0xae, 0xb4, 0xb6, + 0xbb, 0xbf, 0xbe, 0xba, 0xb7, 0xb0, 0xa8, 0xa9, 0xa8, 0xb4, 0xb4, 0xab, + 0xaa, 0xb0, 0xb8, 0xbf, 0xc5, 0xc8, 0xc8, 0xc5, 0xbc, 0xc1, 0xcc, 0xca, + 0xb9, 0xb8, 0xb6, 0xb4, 0xb6, 0xb7, 0xb6, 0xbb, 0xbb, 0xbc, 0xbd, 0xbb, + 0xb6, 0xb5, 0xb6, 0xb5, 0xb4, 0xb9, 0xc0, 0xc6, 0xc9, 0xc8, 0xc5, 0xc1, + 0xc2, 0xc8, 0xbd, 0xb7, 0xbf, 0xc7, 0xc2, 0xc8, 0xc8, 0xc5, 0xc6, 0xc3, + 0xc7, 0xc7, 0xc6, 0xc2, 0xc3, 0xc7, 0xbf, 0xb9, 0xbc, 0xbe, 0xba, 0xbc, + 0xc5, 0xc8, 0xc5, 0xc6, 0xc7, 0xc7, 0xc8, 0xcc, 0xce, 0xcc, 0xc6, 0xc1, + 0xbc, 0xc0, 0xc5, 0xc8, 0xc8, 0xc7, 0xc4, 0xc4, 0xc6, 0xc2, 0xbe, 0xc2, + 0xc2, 0xc2, 0xc6, 0xc8, 0xc4, 0xbf, 0xba, 0xb7, 0xc1, 0xba, 0xca, 0xfe, + 0x18, 0x23, 0x27, 0x1a, 0xfa, 0xdc, 0xd1, 0xd5, 0xdf, 0xef, 0xfa, 0x09, + 0x1c, 0x28, 0x34, 0x3e, 0x48, 0x4e, 0x55, 0x5a, 0x63, 0x64, 0x63, 0x64, + 0x68, 0x6a, 0x68, 0x63, 0x60, 0x5b, 0x5d, 0x5f, 0x5e, 0x61, 0x62, 0x64, + 0x63, 0x64, 0x64, 0x63, 0x61, 0x61, 0x62, 0x64, 0x67, 0x69, 0x68, 0x64, + 0x64, 0x63, 0x64, 0x65, 0x66, 0x67, 0x69, 0x69, 0x64, 0x5b, 0x56, 0x58, + 0x55, 0x51, 0x52, 0x52, 0x54, 0x59, 0x60, 0x64, 0x69, 0x69, 0x6b, 0x6f, + 0x6f, 0x6e, 0x6e, 0x6c, 0x67, 0x5e, 0x49, 0x28, 0x00, 0xdc, 0xcc, 0xc2, + 0xc0, 0xc1, 0xbc, 0xb3, 0xae, 0xae, 0xad, 0xac, 0xaf, 0xb3, 0xb6, 0xbe, + 0xc1, 0xb2, 0xa9, 0xa8, 0xa7, 0xa5, 0xa7, 0xa6, 0xa9, 0xb5, 0xb2, 0xab, + 0xa8, 0xaa, 0xad, 0xaa, 0xaf, 0xb0, 0xb6, 0xbb, 0xc2, 0xc3, 0xbd, 0xb8, + 0xb0, 0xaa, 0xa8, 0xa9, 0xb3, 0xbf, 0xc1, 0xbe, 0xbb, 0xb8, 0xb3, 0xba, + 0xc4, 0xc8, 0xca, 0xc8, 0xc2, 0xc2, 0xcb, 0xcb, 0xbc, 0xb6, 0xb9, 0xb6, + 0xb2, 0xb2, 0xb5, 0xb6, 0xbb, 0xbf, 0xc0, 0xbe, 0xb9, 0xb7, 0xbc, 0xbb, + 0xb9, 0xbc, 0xbb, 0xbc, 0xc0, 0xc4, 0xc8, 0xc3, 0xbf, 0xbf, 0xc2, 0xbd, + 0xba, 0xc3, 0xc2, 0xc8, 0xc1, 0xbb, 0xbd, 0xbd, 0xc0, 0xc0, 0xc1, 0xc2, + 0xc3, 0xc4, 0xc5, 0xc1, 0xbc, 0xc5, 0xc5, 0xc5, 0xc7, 0xc3, 0xc5, 0xc8, + 0xc2, 0xc2, 0xc8, 0xc9, 0xcb, 0xca, 0xcc, 0xca, 0xc6, 0xc2, 0xc2, 0xc4, + 0xc4, 0xc3, 0xc1, 0xc1, 0xbf, 0xbc, 0xb9, 0xc0, 0xc3, 0xc3, 0xc3, 0xbf, + 0xc1, 0xc2, 0xc2, 0xbc, 0xc2, 0xbe, 0xcb, 0x10, 0x17, 0x20, 0x09, 0xde, + 0xc4, 0xbb, 0xbb, 0xbe, 0xca, 0xde, 0xec, 0xf9, 0x10, 0x1f, 0x2d, 0x38, + 0x43, 0x4b, 0x51, 0x56, 0x5c, 0x5e, 0x5e, 0x60, 0x62, 0x64, 0x68, 0x66, + 0x63, 0x5c, 0x57, 0x5d, 0x5e, 0x5e, 0x60, 0x64, 0x63, 0x62, 0x61, 0x63, + 0x62, 0x61, 0x63, 0x65, 0x66, 0x66, 0x65, 0x64, 0x64, 0x61, 0x5e, 0x5c, + 0x62, 0x64, 0x65, 0x65, 0x61, 0x5a, 0x55, 0x57, 0x58, 0x55, 0x56, 0x55, + 0x58, 0x5e, 0x64, 0x66, 0x69, 0x6a, 0x6b, 0x6f, 0x70, 0x70, 0x70, 0x6f, + 0x6d, 0x67, 0x59, 0x40, 0x28, 0xff, 0xdf, 0xcb, 0xc2, 0xbe, 0xbc, 0xbb, + 0xb6, 0xb3, 0xad, 0xad, 0xb3, 0xb5, 0xb7, 0xbb, 0xba, 0xad, 0xad, 0xad, + 0xaa, 0xa7, 0xa8, 0xa6, 0xa8, 0xb2, 0xb2, 0xa9, 0xa4, 0xa6, 0xaa, 0xaa, + 0xaf, 0xb2, 0xbc, 0xc1, 0xc1, 0xc5, 0xc0, 0xb2, 0xae, 0xaa, 0xac, 0xaf, + 0xbd, 0xc7, 0xcb, 0xc8, 0xc6, 0xc3, 0xbb, 0xb6, 0xbc, 0xc2, 0xc6, 0xc7, + 0xc6, 0xc2, 0xc6, 0xc9, 0xc0, 0xb9, 0xb8, 0xb7, 0xb1, 0xae, 0xb0, 0xaf, + 0xb7, 0xc1, 0xc1, 0xc0, 0xbc, 0xbc, 0xc0, 0xbd, 0xbc, 0xbc, 0xbe, 0xbc, + 0xb5, 0xb5, 0xbd, 0xc1, 0xbd, 0xbd, 0xc7, 0xc5, 0xbd, 0xb8, 0xc0, 0xca, + 0xc2, 0xbb, 0xb9, 0xba, 0xbc, 0xbe, 0xc1, 0xc1, 0xc5, 0xc8, 0xc7, 0xc4, + 0xbc, 0xc6, 0xcb, 0xc8, 0xc1, 0xc6, 0xc6, 0xc9, 0xca, 0xc6, 0xc5, 0xc6, + 0xc7, 0xc8, 0xca, 0xc5, 0xc6, 0xc7, 0xc5, 0xc9, 0xcb, 0xc9, 0xc4, 0xc5, + 0xc2, 0xbc, 0xbd, 0xc1, 0xc3, 0xc5, 0xc8, 0xc8, 0xc2, 0xc3, 0xc2, 0xbd, + 0xc2, 0xc8, 0xd6, 0x10, 0x0b, 0xfd, 0xe2, 0xd2, 0xc9, 0xc2, 0xba, 0xb5, + 0xb6, 0xc4, 0xce, 0xe1, 0xfb, 0x10, 0x1c, 0x24, 0x31, 0x3a, 0x41, 0x4b, + 0x53, 0x58, 0x5b, 0x5d, 0x5b, 0x5d, 0x64, 0x63, 0x61, 0x5e, 0x56, 0x53, + 0x5c, 0x5d, 0x5b, 0x5f, 0x62, 0x5e, 0x5e, 0x60, 0x61, 0x60, 0x62, 0x64, + 0x64, 0x65, 0x64, 0x62, 0x60, 0x5e, 0x5e, 0x5b, 0x5b, 0x60, 0x64, 0x64, + 0x61, 0x59, 0x53, 0x54, 0x5a, 0x58, 0x57, 0x5a, 0x61, 0x65, 0x64, 0x65, + 0x6a, 0x6b, 0x6d, 0x6f, 0x6f, 0x70, 0x71, 0x70, 0x70, 0x6c, 0x64, 0x4f, + 0x2b, 0x07, 0xe5, 0xcc, 0xc3, 0xbf, 0xbc, 0xbb, 0xb6, 0xb0, 0xaf, 0xb2, + 0xb4, 0xb0, 0xae, 0xb2, 0xb6, 0xb3, 0xb9, 0xbb, 0xaf, 0xa8, 0xa7, 0xa7, + 0xa7, 0xad, 0xb0, 0xab, 0xa6, 0xa4, 0xa5, 0xa8, 0xac, 0xaf, 0xb8, 0xc2, + 0xc2, 0xbf, 0xbc, 0xb0, 0xaa, 0xaf, 0xbc, 0xb6, 0xc1, 0xc9, 0xcd, 0xc9, + 0xc6, 0xc3, 0xc0, 0xb9, 0xbb, 0xc0, 0xc6, 0xc9, 0xca, 0xc4, 0xc2, 0xc6, + 0xc2, 0xbe, 0xb7, 0xb4, 0xb0, 0xad, 0xaf, 0xac, 0xb3, 0xbd, 0xc1, 0xc1, + 0xbb, 0xbc, 0xbb, 0xb8, 0xb3, 0xb6, 0xb7, 0xb6, 0xad, 0xa7, 0xa8, 0xac, + 0xaf, 0xb9, 0xc0, 0xc2, 0xc2, 0xbd, 0xc1, 0xc9, 0xc5, 0xbd, 0xb9, 0xb6, + 0xbc, 0xbd, 0xc3, 0xc2, 0xc4, 0xc5, 0xc3, 0xc2, 0xbd, 0xbf, 0xc4, 0xc7, + 0xcb, 0xcf, 0xca, 0xc8, 0xc4, 0xc8, 0xc7, 0xc7, 0xc1, 0xc2, 0xc2, 0xc1, + 0xc4, 0xc3, 0xbf, 0xc2, 0xca, 0xc9, 0xc6, 0xc7, 0xc6, 0xc1, 0xbd, 0xbc, + 0xc5, 0xc8, 0xd5, 0xd4, 0xc7, 0xc3, 0xc0, 0xbd, 0xb8, 0xc2, 0xcd, 0xf7, + 0xf9, 0xee, 0xe3, 0xdb, 0xd1, 0xc8, 0xc1, 0xbb, 0xb8, 0xba, 0xba, 0xc7, + 0xd9, 0xee, 0x04, 0x16, 0x24, 0x2e, 0x34, 0x3e, 0x48, 0x50, 0x52, 0x58, + 0x57, 0x53, 0x56, 0x5a, 0x5b, 0x5c, 0x58, 0x51, 0x53, 0x58, 0x58, 0x58, + 0x5c, 0x5f, 0x5e, 0x5e, 0x60, 0x5f, 0x5f, 0x61, 0x63, 0x63, 0x62, 0x5e, + 0x5e, 0x5f, 0x5f, 0x5b, 0x59, 0x5b, 0x60, 0x64, 0x60, 0x5b, 0x56, 0x54, + 0x5a, 0x59, 0x56, 0x59, 0x61, 0x66, 0x64, 0x65, 0x69, 0x6b, 0x6e, 0x6e, + 0x70, 0x70, 0x71, 0x72, 0x70, 0x6e, 0x69, 0x62, 0x4f, 0x28, 0xfe, 0xd9, + 0xc7, 0xc2, 0xc0, 0xb9, 0xb1, 0xb0, 0xb4, 0xb8, 0xb9, 0xaf, 0xaa, 0xb3, + 0xbc, 0xbc, 0xbb, 0xbb, 0xb4, 0xaa, 0xa9, 0xa9, 0xa8, 0xaa, 0xaf, 0xad, + 0xa7, 0xa6, 0xa8, 0xac, 0xb0, 0xb0, 0xb0, 0xb6, 0xc2, 0xbf, 0xb2, 0xaa, + 0xa8, 0xae, 0xb7, 0xb8, 0xc7, 0xcc, 0xcc, 0xcb, 0xc8, 0xc6, 0xbf, 0xbc, + 0xbd, 0xc0, 0xc1, 0xc8, 0xc9, 0xc8, 0xc2, 0xbf, 0xbf, 0xc0, 0xbb, 0xb3, + 0xb0, 0xac, 0xad, 0xab, 0xb1, 0xbc, 0xbe, 0xbe, 0xb9, 0xb3, 0xb6, 0xb6, + 0xb4, 0xb1, 0xb2, 0xb4, 0xab, 0xa5, 0xa9, 0xb5, 0xae, 0xb3, 0xbb, 0xbd, + 0xc1, 0xc3, 0xc5, 0xc8, 0xc8, 0xc8, 0xc6, 0xc2, 0xc6, 0xc7, 0xc5, 0xc2, + 0xc4, 0xc3, 0xbe, 0xc4, 0xc1, 0xc0, 0xc2, 0xca, 0xcc, 0xc7, 0xc4, 0xca, + 0xc8, 0xca, 0xc6, 0xc8, 0xc9, 0xc8, 0xc3, 0xc1, 0xc1, 0xbf, 0xbd, 0xc1, + 0xc9, 0xc8, 0xc5, 0xc4, 0xc7, 0xc9, 0xc1, 0xc2, 0xc5, 0xc7, 0xd0, 0xcc, + 0xc8, 0xc5, 0xc8, 0xcb, 0xb0, 0xbc, 0xda, 0xf7, 0xfb, 0xf3, 0xec, 0xe9, + 0xde, 0xd8, 0xd1, 0xcc, 0xc8, 0xc2, 0xbd, 0xc0, 0xc4, 0xcd, 0xe4, 0xf3, + 0x09, 0x1b, 0x25, 0x34, 0x3b, 0x45, 0x46, 0x46, 0x4c, 0x49, 0x41, 0x43, + 0x49, 0x50, 0x53, 0x4f, 0x51, 0x52, 0x54, 0x55, 0x58, 0x59, 0x5d, 0x5c, + 0x5a, 0x58, 0x5a, 0x5e, 0x5e, 0x61, 0x61, 0x5a, 0x59, 0x60, 0x63, 0x5e, + 0x59, 0x59, 0x5d, 0x61, 0x62, 0x5a, 0x58, 0x5c, 0x5c, 0x5d, 0x58, 0x58, + 0x60, 0x66, 0x66, 0x66, 0x6a, 0x6c, 0x6e, 0x6e, 0x70, 0x70, 0x71, 0x71, + 0x71, 0x70, 0x6b, 0x66, 0x5c, 0x49, 0x2e, 0x03, 0xde, 0xca, 0xbf, 0xb5, + 0xb1, 0xb1, 0xbb, 0xbc, 0xbc, 0xb7, 0xaf, 0xb6, 0xba, 0xbb, 0xb2, 0xae, + 0xb0, 0xae, 0xab, 0xaa, 0xaa, 0xaa, 0xb2, 0xb0, 0xad, 0xb8, 0xbc, 0xb6, + 0xb8, 0xb9, 0xb0, 0xae, 0xb9, 0xbb, 0xaf, 0xaa, 0xaa, 0xac, 0xb0, 0xb5, + 0xc4, 0xcb, 0xcd, 0xcb, 0xc9, 0xc7, 0xc1, 0xc0, 0xc2, 0xc2, 0xc2, 0xc5, + 0xc8, 0xca, 0xc8, 0xc0, 0xb9, 0xb6, 0xb5, 0xb2, 0xb4, 0xb1, 0xb0, 0xb0, + 0xb2, 0xb6, 0xb5, 0xb1, 0xb0, 0xb5, 0xbc, 0xbc, 0xb9, 0xb6, 0xb6, 0xb7, + 0xb2, 0xa9, 0xae, 0xc3, 0xbd, 0xb2, 0xba, 0xba, 0xc2, 0xc8, 0xc5, 0xc8, + 0xcd, 0xce, 0xcc, 0xc7, 0xcb, 0xce, 0xc3, 0xbd, 0xc2, 0xc0, 0xbd, 0xc5, + 0xc9, 0xca, 0xbc, 0xc1, 0xc4, 0xc2, 0xc1, 0xc5, 0xc8, 0xce, 0xd1, 0xce, + 0xcc, 0xce, 0xcd, 0xc7, 0xc5, 0xc3, 0xbc, 0xc6, 0xc9, 0xcb, 0xcc, 0xc6, + 0xc9, 0xc8, 0xca, 0xcb, 0xc4, 0xc0, 0xc1, 0xc1, 0xc1, 0xbe, 0xc2, 0xc8, + 0xb2, 0xc3, 0xeb, 0x06, 0x04, 0xff, 0xf7, 0xee, 0xeb, 0xe7, 0xdf, 0xdf, + 0xe3, 0xdb, 0xc8, 0xc4, 0xc1, 0xc2, 0xc7, 0xc8, 0xdb, 0xf2, 0xf8, 0x0c, + 0x1b, 0x18, 0x27, 0x2b, 0x2c, 0x34, 0x2f, 0x2b, 0x2e, 0x3a, 0x44, 0x48, + 0x4c, 0x4c, 0x4f, 0x52, 0x54, 0x54, 0x56, 0x57, 0x58, 0x59, 0x5b, 0x5d, + 0x5e, 0x61, 0x62, 0x5e, 0x5f, 0x61, 0x64, 0x63, 0x5e, 0x59, 0x5b, 0x5f, + 0x62, 0x5c, 0x58, 0x5d, 0x61, 0x62, 0x5b, 0x5b, 0x62, 0x67, 0x68, 0x68, + 0x6a, 0x6c, 0x6e, 0x6f, 0x6f, 0x70, 0x70, 0x70, 0x70, 0x72, 0x70, 0x6c, + 0x62, 0x52, 0x3c, 0x1c, 0xf8, 0xd7, 0xc1, 0xbc, 0xb7, 0xb0, 0xb9, 0xbf, + 0xbb, 0xb9, 0xb2, 0xb0, 0xab, 0xab, 0xa9, 0xa9, 0xaa, 0xab, 0xaa, 0xa8, + 0xac, 0xaf, 0xb6, 0xb8, 0xba, 0xc9, 0xcd, 0xc2, 0xbf, 0xbc, 0xb6, 0xb3, + 0xb3, 0xb0, 0xaa, 0xaa, 0xac, 0xaf, 0xb9, 0xb6, 0xc1, 0xc9, 0xc8, 0xca, + 0xc9, 0xc6, 0xc7, 0xc8, 0xc6, 0xc7, 0xc2, 0xc4, 0xc3, 0xc8, 0xc8, 0xc4, + 0xbd, 0xb3, 0xb0, 0xb2, 0xb8, 0xbc, 0xb4, 0xb1, 0xb6, 0xb8, 0xb7, 0xbb, + 0xb9, 0xbf, 0xc2, 0xc2, 0xbd, 0xb8, 0xb8, 0xb7, 0xb3, 0xb0, 0xb0, 0xc0, + 0xc3, 0xc0, 0xc0, 0xbf, 0xc6, 0xcc, 0xc5, 0xcc, 0xc8, 0xc8, 0xc5, 0xcd, + 0xd4, 0xd1, 0xc4, 0xbc, 0xb8, 0xc0, 0xc2, 0xc8, 0xcc, 0xc9, 0xbb, 0xb6, + 0xc0, 0xc2, 0xc1, 0xc4, 0xc6, 0xc5, 0xc8, 0xcb, 0xcd, 0xcc, 0xce, 0xc8, + 0xc6, 0xc1, 0xc0, 0xcb, 0xd0, 0xda, 0xdc, 0xd4, 0xd4, 0xd2, 0xd1, 0xcc, + 0xc7, 0xc1, 0xbc, 0xc1, 0xc2, 0xc4, 0xc2, 0xc2, 0xc8, 0xeb, 0xf8, 0x0a, + 0x10, 0x07, 0xfc, 0xf8, 0xf6, 0xee, 0xed, 0xf2, 0xed, 0xde, 0xd2, 0xcc, + 0xc8, 0xc8, 0xc4, 0xc3, 0xc4, 0xcc, 0xc8, 0xcd, 0xde, 0xde, 0xe7, 0xf2, + 0xf2, 0xfe, 0x04, 0x04, 0x0a, 0x1c, 0x2e, 0x3a, 0x40, 0x45, 0x46, 0x49, + 0x4f, 0x53, 0x55, 0x53, 0x52, 0x52, 0x5a, 0x60, 0x64, 0x65, 0x65, 0x61, + 0x61, 0x66, 0x65, 0x64, 0x63, 0x5e, 0x5b, 0x5e, 0x61, 0x5e, 0x5c, 0x5a, + 0x5f, 0x67, 0x64, 0x60, 0x64, 0x69, 0x69, 0x69, 0x6a, 0x6b, 0x6c, 0x6f, + 0x70, 0x70, 0x72, 0x72, 0x72, 0x73, 0x73, 0x72, 0x6e, 0x5b, 0x41, 0x26, + 0x05, 0xec, 0xd5, 0xbe, 0xad, 0xa9, 0xaf, 0xbb, 0xbe, 0xbe, 0xb5, 0xac, + 0xa8, 0xa8, 0xa6, 0xa7, 0xab, 0xaa, 0xaa, 0xaa, 0xb6, 0xba, 0xba, 0xba, + 0xbc, 0xc6, 0xcc, 0xc8, 0xc7, 0xc6, 0xbd, 0xb8, 0xb6, 0xaa, 0xa8, 0xae, + 0xb0, 0xb3, 0xbc, 0xb0, 0xb8, 0xc8, 0xc8, 0xce, 0xce, 0xc8, 0xcc, 0xca, + 0xc6, 0xc8, 0xc8, 0xc8, 0xc6, 0xc6, 0xc8, 0xc7, 0xc4, 0xbc, 0xb6, 0xba, + 0xc2, 0xbf, 0xb7, 0xb6, 0xb5, 0xb9, 0xba, 0xbc, 0xbd, 0xc8, 0xc6, 0xc5, + 0xc1, 0xbc, 0xbc, 0xbc, 0xb7, 0xb6, 0xb5, 0xba, 0xc7, 0xd2, 0xc8, 0xc2, + 0xc6, 0xd3, 0xd1, 0xce, 0xcb, 0xcb, 0xc5, 0xc7, 0xcc, 0xd1, 0xc6, 0xb7, + 0xb9, 0xc6, 0xc2, 0xc8, 0xca, 0xca, 0xbc, 0xbb, 0xc7, 0xc3, 0xc3, 0xc8, + 0xc7, 0xc2, 0xc4, 0xc8, 0xc9, 0xc7, 0xc8, 0xc2, 0xbe, 0xc0, 0xc6, 0xcc, + 0xcc, 0xd2, 0xcf, 0xce, 0xcc, 0xc5, 0xc2, 0xc5, 0xc6, 0xc6, 0xc5, 0xc7, + 0xc9, 0xc8, 0xc8, 0xc8, 0xfb, 0x13, 0xe5, 0xf6, 0x0b, 0x08, 0x00, 0x01, + 0x01, 0xfb, 0xf8, 0xf2, 0xee, 0xc8, 0xce, 0xce, 0xce, 0xc8, 0xc3, 0xce, + 0xc2, 0xc0, 0xce, 0xd2, 0xcc, 0xd1, 0xcd, 0xcb, 0xd0, 0xcb, 0xd5, 0xdb, + 0xde, 0xf2, 0x16, 0x2e, 0x39, 0x3c, 0x3d, 0x40, 0x46, 0x4a, 0x51, 0x52, + 0x54, 0x56, 0x57, 0x5b, 0x61, 0x64, 0x63, 0x5f, 0x62, 0x65, 0x67, 0x67, + 0x63, 0x60, 0x60, 0x5e, 0x61, 0x60, 0x5e, 0x5d, 0x5f, 0x67, 0x69, 0x67, + 0x66, 0x68, 0x6a, 0x68, 0x69, 0x6b, 0x6c, 0x6e, 0x71, 0x73, 0x72, 0x72, + 0x72, 0x72, 0x72, 0x72, 0x70, 0x6a, 0x58, 0x38, 0x0f, 0xf5, 0xd9, 0xcd, + 0xb8, 0xac, 0xa8, 0xb1, 0xbd, 0xc2, 0xb6, 0xab, 0xa9, 0xaa, 0xa8, 0xaa, + 0xb2, 0xb1, 0xab, 0xaa, 0xb3, 0xb3, 0xaf, 0xac, 0xae, 0xbc, 0xc4, 0xc6, + 0xc6, 0xc2, 0xbd, 0xb7, 0xb2, 0xab, 0xaa, 0xb4, 0xb6, 0xb9, 0xbb, 0xaf, + 0xad, 0xb6, 0xc3, 0xca, 0xcb, 0xcb, 0xcb, 0xc5, 0xc8, 0xcb, 0xcc, 0xca, + 0xcc, 0xcd, 0xcb, 0xc8, 0xc5, 0xbf, 0xba, 0xb9, 0xc1, 0xbf, 0xbc, 0xbc, + 0xb9, 0xb9, 0xbb, 0xbc, 0xbc, 0xc2, 0xc6, 0xc8, 0xc7, 0xc1, 0xc1, 0xbd, + 0xbe, 0xc0, 0xbc, 0xbc, 0xbd, 0xc3, 0xc4, 0xc1, 0xc0, 0xcc, 0xce, 0xca, + 0xc5, 0xc4, 0xc0, 0xbe, 0xbd, 0xc1, 0xbc, 0xb1, 0xbc, 0xc3, 0xc3, 0xc2, + 0xbf, 0xc5, 0xbe, 0xba, 0xc0, 0xc1, 0xc1, 0xc2, 0xc8, 0xc7, 0xc4, 0xc7, + 0xca, 0xc7, 0xc7, 0xc0, 0xbc, 0xc7, 0xcc, 0xcf, 0xc8, 0xcc, 0xc8, 0xcd, + 0xd0, 0xcc, 0xc8, 0xc8, 0xc7, 0xc7, 0xce, 0xcf, 0xcf, 0xcc, 0xcc, 0xcb, + 0x28, 0x22, 0xd4, 0xd3, 0xe4, 0xee, 0xf5, 0x03, 0x04, 0xf5, 0xe5, 0xe5, + 0xd9, 0xbc, 0xbd, 0xc2, 0xc6, 0xc4, 0xce, 0xde, 0xd4, 0xd1, 0xd7, 0xd4, + 0xd2, 0xcf, 0xc9, 0xc1, 0xc5, 0xca, 0xdb, 0xdf, 0xde, 0xd1, 0xf5, 0x24, + 0x37, 0x3b, 0x3b, 0x3e, 0x40, 0x42, 0x46, 0x4d, 0x53, 0x55, 0x52, 0x53, + 0x5a, 0x61, 0x61, 0x60, 0x63, 0x66, 0x65, 0x66, 0x64, 0x60, 0x60, 0x5f, + 0x61, 0x62, 0x5f, 0x5f, 0x63, 0x66, 0x6a, 0x6a, 0x6a, 0x6a, 0x6b, 0x6a, + 0x6b, 0x6d, 0x6f, 0x70, 0x71, 0x73, 0x72, 0x72, 0x72, 0x72, 0x72, 0x71, + 0x70, 0x6d, 0x67, 0x54, 0x2e, 0xff, 0xd9, 0xc7, 0xc1, 0xb1, 0xab, 0xab, + 0xb8, 0xbd, 0xb4, 0xb0, 0xac, 0xa8, 0xa8, 0xaa, 0xb0, 0xaf, 0xaf, 0xb5, + 0xae, 0xae, 0xae, 0xae, 0xb0, 0xbe, 0xc2, 0xc5, 0xc2, 0xbc, 0xbb, 0xb7, + 0xb0, 0xac, 0xad, 0xba, 0xbc, 0xbc, 0xb7, 0xb1, 0xab, 0xa9, 0xbb, 0xc7, + 0xcb, 0xcd, 0xc6, 0xc3, 0xcb, 0xcd, 0xcb, 0xc8, 0xc8, 0xca, 0xcb, 0xca, + 0xc7, 0xc3, 0xbd, 0xb8, 0xb9, 0xc2, 0xc4, 0xc2, 0xbf, 0xbc, 0xba, 0xbb, + 0xb7, 0xb4, 0xb7, 0xc6, 0xcb, 0xc8, 0xc2, 0xc1, 0xc1, 0xc0, 0xb7, 0xb8, + 0xb4, 0xb3, 0xb6, 0xb8, 0xbd, 0xc9, 0xc1, 0xc5, 0xc3, 0xc1, 0xba, 0xb8, + 0xbc, 0xbe, 0xb8, 0xb0, 0xb2, 0xc3, 0xc6, 0xbf, 0xbb, 0xbd, 0xc1, 0xc4, + 0xc2, 0xbc, 0xbe, 0xc2, 0xd4, 0xd5, 0xce, 0xce, 0xcd, 0xcb, 0xcf, 0xcb, + 0xc8, 0xce, 0xce, 0xcf, 0xcd, 0xcf, 0xc5, 0xc5, 0xc9, 0xc7, 0xc0, 0xbd, + 0xbe, 0xc0, 0xc8, 0xd8, 0xcf, 0xc8, 0xce, 0xd1, 0x3b, 0x31, 0xe5, 0xc0, + 0xbc, 0xc0, 0xcb, 0xd7, 0xdb, 0xd6, 0xdb, 0xe2, 0xce, 0xc9, 0xc5, 0xc2, + 0xc5, 0xcb, 0xd6, 0xdb, 0xdc, 0xda, 0xde, 0xd8, 0xd4, 0xd3, 0xcd, 0xc0, + 0xc9, 0xde, 0xec, 0xe7, 0xde, 0xd6, 0xf3, 0x26, 0x37, 0x3f, 0x3f, 0x40, + 0x44, 0x48, 0x48, 0x4d, 0x52, 0x52, 0x53, 0x54, 0x57, 0x5f, 0x64, 0x62, + 0x62, 0x68, 0x69, 0x68, 0x64, 0x5e, 0x5b, 0x5d, 0x62, 0x64, 0x61, 0x5e, + 0x62, 0x66, 0x6a, 0x6c, 0x6c, 0x6d, 0x6e, 0x6d, 0x6e, 0x70, 0x70, 0x70, + 0x70, 0x71, 0x71, 0x72, 0x72, 0x71, 0x71, 0x70, 0x6f, 0x6e, 0x6b, 0x64, + 0x4c, 0x17, 0xe6, 0xcd, 0xc6, 0xba, 0xad, 0xaa, 0xb0, 0xb6, 0xb1, 0xae, + 0xaa, 0xa6, 0xa8, 0xab, 0xac, 0xac, 0xb6, 0xc2, 0xc2, 0xb8, 0xb6, 0xb6, + 0xb0, 0xb5, 0xc1, 0xc2, 0xbd, 0xb9, 0xb6, 0xb6, 0xb1, 0xad, 0xb0, 0xba, + 0xbf, 0xbc, 0xb4, 0xb0, 0xaa, 0xa5, 0xb2, 0xc2, 0xc9, 0xcb, 0xc7, 0xc8, + 0xc8, 0xca, 0xcb, 0xcb, 0xc8, 0xc8, 0xc4, 0xc3, 0xc4, 0xc4, 0xc2, 0xbd, + 0xb9, 0xbf, 0xce, 0xcb, 0xc6, 0xbd, 0xbc, 0xbb, 0xb6, 0xb0, 0xac, 0xb6, + 0xc2, 0xc8, 0xc9, 0xc4, 0xc0, 0xbc, 0xb9, 0xbb, 0xb7, 0xb1, 0xb5, 0xb6, + 0xc1, 0xc3, 0xc1, 0xc1, 0xbc, 0xc0, 0xbc, 0xb0, 0xb5, 0xb8, 0xb6, 0xbd, + 0xbb, 0xc2, 0xc7, 0xbd, 0xc0, 0xbe, 0xbe, 0xc6, 0xcb, 0xbf, 0xb7, 0xc8, + 0xd4, 0xcd, 0xce, 0xcd, 0xc3, 0xcd, 0xd9, 0xd2, 0xd2, 0xd1, 0xd3, 0xce, + 0xd1, 0xd5, 0xce, 0xc4, 0xc6, 0xc2, 0xc2, 0xc2, 0xc3, 0xc3, 0xcf, 0xd4, + 0xce, 0xc8, 0xca, 0xce, 0x45, 0x3f, 0x15, 0xc9, 0xb8, 0xbb, 0xbc, 0xbc, + 0xc8, 0xcf, 0xd9, 0xdf, 0xd2, 0xcb, 0xc4, 0xc3, 0xc8, 0xc8, 0xca, 0xd0, + 0xd3, 0xd8, 0xdd, 0xdd, 0xd9, 0xdb, 0xdd, 0xdc, 0xe5, 0xf3, 0xef, 0xde, + 0xce, 0xde, 0x04, 0x29, 0x37, 0x3e, 0x3f, 0x40, 0x41, 0x48, 0x4c, 0x4f, + 0x51, 0x52, 0x55, 0x58, 0x58, 0x5f, 0x66, 0x65, 0x65, 0x67, 0x6a, 0x69, + 0x64, 0x5e, 0x5a, 0x5c, 0x61, 0x64, 0x62, 0x5d, 0x5f, 0x64, 0x66, 0x6b, + 0x6e, 0x6f, 0x6e, 0x6e, 0x70, 0x6f, 0x6f, 0x70, 0x70, 0x71, 0x72, 0x73, + 0x73, 0x71, 0x72, 0x70, 0x6e, 0x6c, 0x6a, 0x67, 0x60, 0x48, 0x0a, 0xd9, + 0xcc, 0xba, 0xaf, 0xaf, 0xae, 0xae, 0xad, 0xac, 0xa8, 0xaa, 0xaf, 0xb0, + 0xb0, 0xb7, 0xc3, 0xc8, 0xce, 0xc7, 0xba, 0xb6, 0xb3, 0xb0, 0xb1, 0xb1, + 0xb1, 0xb0, 0xb0, 0xb0, 0xb2, 0xb6, 0xbb, 0xbe, 0xbf, 0xbd, 0xb3, 0xad, + 0xa8, 0xa7, 0xaa, 0xbb, 0xc6, 0xc8, 0xc9, 0xcb, 0xc9, 0xc6, 0xca, 0xcd, + 0xca, 0xc8, 0xc6, 0xc6, 0xc6, 0xc3, 0xc2, 0xc3, 0xbc, 0xb4, 0xc0, 0xc8, + 0xc7, 0xc2, 0xbd, 0xbc, 0xb7, 0xb3, 0xad, 0xa9, 0xac, 0xbc, 0xc5, 0xbb, + 0xba, 0xbc, 0xbf, 0xc1, 0xb8, 0xad, 0xb2, 0xbd, 0xc2, 0xbb, 0xc2, 0xbd, + 0xb5, 0xb0, 0xb3, 0xac, 0xb3, 0xb5, 0xb7, 0xc1, 0xc5, 0xc2, 0xc4, 0xc2, + 0xbb, 0xbc, 0xc3, 0xc4, 0xbf, 0xbb, 0xbb, 0xc6, 0xc0, 0xba, 0xbe, 0xbe, + 0xbc, 0xc5, 0xca, 0xc6, 0xce, 0xcf, 0xd3, 0xd0, 0xc8, 0xce, 0xce, 0xd2, + 0xd1, 0xc4, 0xcb, 0xce, 0xcb, 0xca, 0xcc, 0xce, 0xd0, 0xc5, 0xc1, 0xc2, + 0x4d, 0x4c, 0x3c, 0x04, 0xcf, 0xbe, 0xb9, 0xb7, 0xbd, 0xc0, 0xc4, 0xc9, + 0xcb, 0xc7, 0xc3, 0xc4, 0xc8, 0xc7, 0xc5, 0xc6, 0xc8, 0xd1, 0xd5, 0xdf, + 0xdf, 0xe1, 0xe6, 0xe4, 0xe4, 0xdd, 0xd8, 0xda, 0xee, 0x06, 0x1c, 0x29, + 0x34, 0x3a, 0x3e, 0x42, 0x46, 0x4c, 0x4f, 0x4e, 0x4c, 0x4d, 0x52, 0x58, + 0x5b, 0x60, 0x66, 0x67, 0x68, 0x67, 0x67, 0x68, 0x66, 0x61, 0x5c, 0x5a, + 0x5f, 0x61, 0x62, 0x5e, 0x5e, 0x64, 0x65, 0x6a, 0x6c, 0x70, 0x6e, 0x6d, + 0x6b, 0x6d, 0x6e, 0x70, 0x73, 0x73, 0x72, 0x72, 0x71, 0x71, 0x71, 0x70, + 0x70, 0x6e, 0x6b, 0x66, 0x5f, 0x58, 0x3a, 0x01, 0xd3, 0xbc, 0xb0, 0xb0, + 0xad, 0xab, 0xac, 0xaa, 0xac, 0xb0, 0xb6, 0xb6, 0xb7, 0xc3, 0xc9, 0xc8, + 0xcd, 0xca, 0xc2, 0xb6, 0xb1, 0xb0, 0xb6, 0xb3, 0xb1, 0xb5, 0xb4, 0xb1, + 0xb2, 0xb6, 0xbc, 0xc2, 0xc3, 0xbe, 0xb1, 0xab, 0xa7, 0xa7, 0xaa, 0xb8, + 0xc8, 0xc7, 0xc9, 0xcb, 0xc8, 0xc8, 0xcd, 0xcd, 0xca, 0xc5, 0xc4, 0xc6, + 0xc8, 0xc7, 0xc3, 0xc2, 0xbc, 0xb1, 0xb5, 0xbf, 0xc7, 0xc5, 0xbf, 0xbc, + 0xb5, 0xb0, 0xab, 0xa7, 0xa5, 0xa7, 0xb0, 0xab, 0xb6, 0xb0, 0xb7, 0xc6, + 0xbc, 0xb2, 0xb4, 0xba, 0xb8, 0xbb, 0xbd, 0xb8, 0xaf, 0xaa, 0xb6, 0xb6, + 0xb5, 0xb1, 0xb0, 0xbc, 0xc3, 0xc1, 0xbc, 0xbe, 0xb7, 0xbf, 0xc1, 0xbb, + 0xb8, 0xb7, 0xc2, 0xc5, 0xc0, 0xbc, 0xbc, 0xbc, 0xbc, 0xbe, 0xbc, 0xbd, + 0xc9, 0xcb, 0xce, 0xcd, 0xcb, 0xcf, 0xcd, 0xd1, 0xce, 0xc1, 0xc7, 0xce, + 0xc8, 0xc7, 0xc7, 0xc5, 0xc7, 0xb8, 0xb6, 0xbc, 0x52, 0x53, 0x52, 0x37, + 0x04, 0xe1, 0xd3, 0xcb, 0xc7, 0xc3, 0xc7, 0xc8, 0xc8, 0xc7, 0xc4, 0xcf, + 0xe3, 0xe6, 0xe1, 0xd5, 0xd0, 0xd0, 0xce, 0xcf, 0xd1, 0xd5, 0xd5, 0xda, + 0xda, 0xe1, 0xfb, 0x12, 0x21, 0x22, 0x27, 0x2c, 0x33, 0x3b, 0x41, 0x47, + 0x4a, 0x4d, 0x4e, 0x4c, 0x49, 0x4c, 0x52, 0x58, 0x5e, 0x61, 0x65, 0x64, + 0x66, 0x68, 0x66, 0x65, 0x63, 0x60, 0x5d, 0x59, 0x5d, 0x5f, 0x62, 0x60, + 0x5e, 0x63, 0x66, 0x68, 0x69, 0x6d, 0x6e, 0x6e, 0x6b, 0x6c, 0x6f, 0x71, + 0x73, 0x73, 0x72, 0x71, 0x71, 0x71, 0x71, 0x70, 0x6f, 0x6d, 0x6c, 0x66, + 0x61, 0x59, 0x48, 0x2c, 0xf8, 0xcb, 0xb5, 0xb0, 0xae, 0xac, 0xae, 0xad, + 0xb1, 0xb1, 0xb1, 0xb6, 0xc2, 0xc9, 0xc8, 0xc4, 0xc7, 0xc6, 0xc4, 0xbd, + 0xb3, 0xae, 0xb5, 0xb8, 0xb6, 0xb6, 0xb7, 0xb5, 0xb3, 0xb5, 0xbb, 0xc1, + 0xc7, 0xc1, 0xb1, 0xa8, 0xa8, 0xa8, 0xaa, 0xb7, 0xc7, 0xc6, 0xc7, 0xc9, + 0xc6, 0xca, 0xcd, 0xce, 0xca, 0xc4, 0xc3, 0xc4, 0xc5, 0xc8, 0xc6, 0xc0, + 0xb9, 0xb0, 0xaf, 0xb9, 0xc1, 0xbe, 0xbc, 0xb6, 0xae, 0xaa, 0xa7, 0xa6, + 0xa9, 0xaa, 0xb0, 0xad, 0xbc, 0xb1, 0xb0, 0xc5, 0xbf, 0xbc, 0xc0, 0xbb, + 0xb6, 0xbd, 0xbd, 0xb6, 0xaa, 0xac, 0xb6, 0xbe, 0xb8, 0xb6, 0xb4, 0xba, + 0xc2, 0xbf, 0xb8, 0xbb, 0xb5, 0xb9, 0xc3, 0xbd, 0xbf, 0xbc, 0xbc, 0xc1, + 0xc4, 0xc1, 0xbe, 0xbd, 0xc7, 0xc5, 0xbb, 0xbb, 0xc8, 0xc5, 0xc1, 0xc2, + 0xc7, 0xc8, 0xc0, 0xc8, 0xce, 0xc5, 0xc1, 0xc5, 0xc2, 0xc2, 0xc1, 0xb6, + 0xad, 0xb6, 0xc4, 0xc1, 0x52, 0x52, 0x52, 0x4c, 0x2f, 0x0a, 0xf3, 0xeb, + 0xe8, 0xe8, 0xe6, 0xe3, 0xe0, 0xdc, 0xde, 0xf8, 0x0e, 0x19, 0x19, 0x19, + 0x16, 0x12, 0x0e, 0x0d, 0x10, 0x16, 0x1a, 0x1e, 0x22, 0x2b, 0x2f, 0x2e, + 0x32, 0x2e, 0x2a, 0x2e, 0x33, 0x3a, 0x43, 0x43, 0x3f, 0x40, 0x41, 0x42, + 0x47, 0x4a, 0x51, 0x57, 0x5e, 0x64, 0x65, 0x63, 0x62, 0x67, 0x68, 0x66, + 0x63, 0x5f, 0x5e, 0x5e, 0x5e, 0x60, 0x61, 0x61, 0x5f, 0x61, 0x65, 0x68, + 0x67, 0x6a, 0x6d, 0x70, 0x6d, 0x6c, 0x70, 0x72, 0x73, 0x73, 0x73, 0x73, + 0x72, 0x71, 0x71, 0x70, 0x6f, 0x6e, 0x6d, 0x67, 0x62, 0x5e, 0x52, 0x38, + 0x1f, 0xf8, 0xcb, 0xb7, 0xb3, 0xae, 0xb1, 0xb4, 0xb8, 0xbc, 0xb8, 0xbc, + 0xc5, 0xcd, 0xc9, 0xc2, 0xc3, 0xc2, 0xbd, 0xba, 0xba, 0xb7, 0xb7, 0xb8, + 0xba, 0xb8, 0xba, 0xb8, 0xb9, 0xb9, 0xba, 0xbf, 0xc5, 0xbd, 0xb0, 0xa8, + 0xa5, 0xa9, 0xa9, 0xaf, 0xc4, 0xc9, 0xc8, 0xca, 0xc8, 0xcc, 0xd0, 0xd2, + 0xcf, 0xc8, 0xc4, 0xc0, 0xc6, 0xc8, 0xc7, 0xbf, 0xb6, 0xb3, 0xaf, 0xb1, + 0xb9, 0xb1, 0xaf, 0xae, 0xaa, 0xaa, 0xaf, 0xb3, 0xb1, 0xaf, 0xb0, 0xb6, + 0xbb, 0xb4, 0xbc, 0xd2, 0xc3, 0xc0, 0xc2, 0xbb, 0xbb, 0xc1, 0xbb, 0xb1, + 0xb0, 0xb8, 0xb8, 0xc2, 0xbe, 0xba, 0xba, 0xbd, 0xc8, 0xc5, 0xc0, 0xbd, + 0xb6, 0xb2, 0xba, 0xbb, 0xba, 0xc0, 0xbf, 0xbf, 0xc1, 0xc3, 0xc6, 0xc4, + 0xc2, 0xc5, 0xc8, 0xc2, 0xc8, 0xc2, 0xc0, 0xc2, 0xc0, 0xc2, 0xc0, 0xc8, + 0xd0, 0xc7, 0xba, 0xbf, 0xbd, 0xc2, 0xbc, 0xb4, 0xae, 0xb6, 0xca, 0xc4, + 0x54, 0x54, 0x52, 0x4f, 0x48, 0x36, 0x1a, 0x04, 0xfe, 0x00, 0x05, 0x0a, + 0x0c, 0x11, 0x15, 0x22, 0x23, 0x21, 0x1e, 0x1d, 0x1f, 0x22, 0x22, 0x22, + 0x28, 0x2e, 0x2e, 0x2a, 0x2b, 0x2d, 0x2f, 0x33, 0x35, 0x34, 0x31, 0x34, + 0x39, 0x3b, 0x3b, 0x34, 0x2b, 0x29, 0x33, 0x3e, 0x48, 0x4e, 0x58, 0x59, + 0x5e, 0x64, 0x63, 0x60, 0x60, 0x64, 0x67, 0x64, 0x61, 0x61, 0x60, 0x60, + 0x5f, 0x60, 0x62, 0x63, 0x63, 0x5f, 0x65, 0x68, 0x67, 0x68, 0x6a, 0x6e, + 0x6f, 0x6d, 0x6f, 0x70, 0x72, 0x73, 0x74, 0x74, 0x73, 0x72, 0x70, 0x71, + 0x70, 0x70, 0x6f, 0x6a, 0x5e, 0x56, 0x57, 0x44, 0x20, 0x10, 0xf7, 0xd6, + 0xbe, 0xb6, 0xb9, 0xb8, 0xbd, 0xbf, 0xc3, 0xc2, 0xc2, 0xc3, 0xc2, 0xbf, + 0xbf, 0xbc, 0xb9, 0xb6, 0xb6, 0xb9, 0xb7, 0xb6, 0xb8, 0xb9, 0xbb, 0xbb, + 0xb9, 0xb8, 0xb7, 0xbf, 0xc2, 0xb6, 0xad, 0xa8, 0xab, 0xac, 0xaa, 0xaa, + 0xb7, 0xc6, 0xca, 0xcd, 0xcd, 0xd0, 0xd1, 0xd0, 0xcf, 0xcb, 0xc4, 0xc1, + 0xc4, 0xcb, 0xc0, 0xb7, 0xb6, 0xb7, 0xb1, 0xb0, 0xb6, 0xb0, 0xac, 0xab, + 0xaa, 0xae, 0xb5, 0xb9, 0xbc, 0xb9, 0xb6, 0xbc, 0xbf, 0xc4, 0xce, 0xcc, + 0xc2, 0xc2, 0xbf, 0xbc, 0xc2, 0xc6, 0xc1, 0xb7, 0xb0, 0xba, 0xbb, 0xb9, + 0xbc, 0xbf, 0xb9, 0xb6, 0xc2, 0xbc, 0xb8, 0xb5, 0xb5, 0xb3, 0xb8, 0xb6, + 0xba, 0xc2, 0xc6, 0xc8, 0xc5, 0xc3, 0xc8, 0xc8, 0xc8, 0xc5, 0xcb, 0xc8, + 0xc6, 0xc2, 0xc3, 0xc3, 0xbc, 0xc1, 0xc2, 0xc2, 0xcb, 0xc3, 0xbc, 0xbb, + 0xbc, 0xba, 0xb2, 0xb6, 0xbc, 0xba, 0xc0, 0xb9, 0x55, 0x52, 0x51, 0x4e, + 0x4d, 0x4a, 0x42, 0x34, 0x24, 0x17, 0x14, 0x13, 0x16, 0x18, 0x1e, 0x21, + 0x20, 0x1e, 0x1b, 0x19, 0x18, 0x19, 0x1c, 0x1f, 0x22, 0x26, 0x25, 0x24, + 0x24, 0x26, 0x28, 0x2e, 0x34, 0x36, 0x38, 0x3a, 0x3d, 0x3a, 0x37, 0x30, + 0x2a, 0x29, 0x2e, 0x3c, 0x4a, 0x57, 0x5e, 0x5d, 0x60, 0x60, 0x5e, 0x5e, + 0x5d, 0x5f, 0x64, 0x5e, 0x5b, 0x5f, 0x64, 0x64, 0x62, 0x63, 0x65, 0x65, + 0x63, 0x61, 0x63, 0x68, 0x69, 0x66, 0x68, 0x6a, 0x6d, 0x6b, 0x6d, 0x6f, + 0x71, 0x73, 0x73, 0x74, 0x74, 0x73, 0x71, 0x71, 0x70, 0x70, 0x6f, 0x6d, + 0x66, 0x52, 0x4c, 0x45, 0x1c, 0x08, 0xfe, 0xeb, 0xd7, 0xc4, 0xc0, 0xba, + 0xba, 0xbe, 0xc0, 0xbd, 0xbd, 0xbc, 0xba, 0xbd, 0xc2, 0xbb, 0xb5, 0xb6, + 0xb5, 0xb5, 0xb8, 0xb5, 0xb2, 0xb7, 0xb8, 0xb6, 0xb8, 0xba, 0xbb, 0xc0, + 0xbe, 0xb7, 0xaf, 0xab, 0xb4, 0xac, 0xaa, 0xa8, 0xaa, 0xba, 0xc9, 0xce, + 0xd0, 0xd1, 0xcf, 0xcc, 0xca, 0xcb, 0xc6, 0xbe, 0xbc, 0xc9, 0xbf, 0xaf, + 0xb4, 0xbc, 0xb7, 0xb2, 0xb3, 0xb6, 0xb5, 0xaf, 0xb1, 0xba, 0xbc, 0xb7, + 0xb6, 0xc0, 0xc5, 0xbe, 0xbd, 0xc2, 0xbc, 0xb8, 0xbe, 0xbc, 0xbd, 0xbd, + 0xc6, 0xce, 0xc4, 0xbc, 0xb2, 0xb1, 0xba, 0xb6, 0xb8, 0xbc, 0xc1, 0xb8, + 0xb5, 0xb3, 0xb0, 0xae, 0xb0, 0xb2, 0xbc, 0xbf, 0xc2, 0xc1, 0xc2, 0xc3, + 0xc8, 0xc4, 0xc8, 0xce, 0xd1, 0xcb, 0xca, 0xc5, 0xc4, 0xc3, 0xc6, 0xc1, + 0xbd, 0xc2, 0xbe, 0xbc, 0xc1, 0xbc, 0xbd, 0xbb, 0xb7, 0xb7, 0xb6, 0xbc, + 0xc2, 0xbd, 0xbf, 0xb6, 0x52, 0x51, 0x4f, 0x4c, 0x4b, 0x49, 0x46, 0x42, + 0x3d, 0x38, 0x34, 0x2d, 0x2a, 0x24, 0x1f, 0x1b, 0x18, 0x16, 0x16, 0x16, + 0x16, 0x15, 0x16, 0x17, 0x18, 0x1b, 0x20, 0x21, 0x24, 0x27, 0x2b, 0x2e, + 0x32, 0x37, 0x39, 0x3b, 0x3f, 0x41, 0x44, 0x46, 0x41, 0x41, 0x44, 0x49, + 0x51, 0x58, 0x5b, 0x5b, 0x60, 0x5e, 0x5e, 0x5c, 0x5a, 0x5d, 0x60, 0x5e, + 0x59, 0x5c, 0x64, 0x67, 0x65, 0x63, 0x65, 0x66, 0x63, 0x64, 0x64, 0x66, + 0x6a, 0x69, 0x68, 0x69, 0x6b, 0x6c, 0x6a, 0x6e, 0x71, 0x74, 0x72, 0x73, + 0x75, 0x74, 0x72, 0x71, 0x6f, 0x70, 0x70, 0x70, 0x6a, 0x63, 0x53, 0x40, + 0x27, 0x0a, 0xf7, 0xe5, 0xcc, 0xc6, 0xc2, 0xba, 0xb5, 0xb6, 0xb6, 0xb8, + 0xbb, 0xbb, 0xbc, 0xc2, 0xc7, 0xc1, 0xba, 0xb6, 0xb4, 0xb0, 0xb2, 0xb0, + 0xb0, 0xb6, 0xb6, 0xb8, 0xbb, 0xc1, 0xc0, 0xc0, 0xba, 0xb3, 0xaf, 0xb1, + 0xb2, 0xaa, 0xa8, 0xa9, 0xa7, 0xac, 0xbe, 0xcc, 0xca, 0xc9, 0xcc, 0xcb, + 0xc9, 0xcb, 0xc6, 0xbd, 0xba, 0xc2, 0xc2, 0xb2, 0xb6, 0xbe, 0xbc, 0xb8, + 0xb7, 0xb6, 0xb7, 0xb3, 0xb6, 0xb9, 0xbc, 0xbe, 0xb9, 0xbd, 0xc2, 0xbf, + 0xbe, 0xbb, 0xb6, 0xb6, 0xc1, 0xc1, 0xbd, 0xc6, 0xc4, 0xc7, 0xbc, 0xbd, + 0xb8, 0xb0, 0xb7, 0xb4, 0xb4, 0xb8, 0xbf, 0xbc, 0xb0, 0xaf, 0xaf, 0xab, + 0xae, 0xb3, 0xbc, 0xc2, 0xc7, 0xc5, 0xbd, 0xbd, 0xc5, 0xc6, 0xcb, 0xce, + 0xcf, 0xc8, 0xc4, 0xc5, 0xc1, 0xbc, 0xc2, 0xbc, 0xc6, 0xc4, 0xb6, 0xba, + 0xc5, 0xb9, 0xbb, 0xbe, 0xbd, 0xb9, 0xbb, 0xb6, 0xb6, 0xb6, 0xbe, 0xb9, + 0x50, 0x4e, 0x4d, 0x4b, 0x47, 0x45, 0x43, 0x40, 0x3d, 0x3b, 0x39, 0x37, + 0x36, 0x32, 0x2a, 0x22, 0x1a, 0x16, 0x16, 0x15, 0x16, 0x16, 0x18, 0x19, + 0x1b, 0x1d, 0x21, 0x25, 0x29, 0x2c, 0x2e, 0x30, 0x34, 0x34, 0x37, 0x3a, + 0x3f, 0x45, 0x4c, 0x51, 0x51, 0x54, 0x54, 0x55, 0x58, 0x58, 0x58, 0x5b, + 0x60, 0x5f, 0x60, 0x5a, 0x5a, 0x5e, 0x60, 0x61, 0x5c, 0x5a, 0x64, 0x69, + 0x68, 0x64, 0x64, 0x67, 0x66, 0x66, 0x64, 0x66, 0x6a, 0x6a, 0x69, 0x67, + 0x69, 0x6c, 0x6a, 0x6e, 0x70, 0x73, 0x73, 0x72, 0x75, 0x74, 0x72, 0x70, + 0x70, 0x6f, 0x70, 0x70, 0x6a, 0x62, 0x5a, 0x4f, 0x2c, 0x11, 0xf8, 0xdb, + 0xce, 0xc7, 0xc2, 0xc0, 0xbb, 0xb4, 0xb2, 0xb5, 0xb8, 0xba, 0xbe, 0xc2, + 0xc2, 0xc0, 0xbc, 0xb6, 0xb0, 0xad, 0xad, 0xac, 0xaf, 0xb1, 0xb6, 0xbe, + 0xc1, 0xc7, 0xc1, 0xbd, 0xb7, 0xb3, 0xb0, 0xb2, 0xb0, 0xa9, 0xaa, 0xaa, + 0xa8, 0xa9, 0xae, 0xb9, 0xbf, 0xc1, 0xc8, 0xcb, 0xc7, 0xc8, 0xc6, 0xbf, + 0xba, 0xb9, 0xc3, 0xb9, 0xbc, 0xbc, 0xbc, 0xb9, 0xb7, 0xb7, 0xb6, 0xb9, + 0xb8, 0xbf, 0xbd, 0xb9, 0xb7, 0xbc, 0xc4, 0xc1, 0xbb, 0xbb, 0xba, 0xb7, + 0xc3, 0xc5, 0xc3, 0xc3, 0xc2, 0xc0, 0xb9, 0xbd, 0xbf, 0xba, 0xba, 0xb9, + 0xb5, 0xb4, 0xb3, 0xb0, 0xab, 0xaa, 0xa9, 0xa8, 0xb0, 0xb2, 0xb4, 0xbd, + 0xc3, 0xbe, 0xb7, 0xb5, 0xbb, 0xc4, 0xcd, 0xd0, 0xcb, 0xc4, 0xc2, 0xc4, + 0xbc, 0xb6, 0xb7, 0xb5, 0xc4, 0xbf, 0xb7, 0xbc, 0xc0, 0xb0, 0xb5, 0xbc, + 0xc2, 0xbe, 0xc2, 0xbc, 0xb0, 0xb3, 0xb6, 0xc0, 0x4c, 0x4c, 0x4b, 0x48, + 0x44, 0x43, 0x42, 0x40, 0x3c, 0x3b, 0x38, 0x3a, 0x38, 0x34, 0x2d, 0x28, + 0x1e, 0x15, 0x13, 0x12, 0x16, 0x1a, 0x1f, 0x23, 0x26, 0x29, 0x2d, 0x2e, + 0x33, 0x34, 0x37, 0x39, 0x39, 0x35, 0x32, 0x36, 0x3b, 0x43, 0x4c, 0x53, + 0x55, 0x59, 0x5b, 0x5b, 0x5b, 0x57, 0x52, 0x59, 0x60, 0x60, 0x60, 0x5d, + 0x5c, 0x60, 0x63, 0x64, 0x5e, 0x59, 0x63, 0x68, 0x67, 0x65, 0x64, 0x65, + 0x66, 0x66, 0x65, 0x67, 0x67, 0x69, 0x69, 0x69, 0x68, 0x6a, 0x6c, 0x6e, + 0x6f, 0x70, 0x70, 0x72, 0x75, 0x76, 0x73, 0x70, 0x70, 0x70, 0x6f, 0x6e, + 0x6d, 0x68, 0x5a, 0x52, 0x42, 0x18, 0x02, 0xe7, 0xcb, 0xc5, 0xc3, 0xc2, + 0xc2, 0xbc, 0xb0, 0xb3, 0xb6, 0xb6, 0xbb, 0xbd, 0xc1, 0xbc, 0xb8, 0xb6, + 0xad, 0xa8, 0xb0, 0xb9, 0xb6, 0xb4, 0xbc, 0xc0, 0xc5, 0xc8, 0xc1, 0xc7, + 0xc2, 0xb4, 0xb0, 0xb0, 0xad, 0xaa, 0xaa, 0xaa, 0xad, 0xae, 0xb1, 0xb6, + 0xb6, 0xba, 0xbf, 0xc0, 0xbf, 0xbe, 0xbf, 0xbb, 0xbb, 0xb7, 0xbc, 0xc0, + 0xbf, 0xbb, 0xbe, 0xc2, 0xbb, 0xbb, 0xb8, 0xc2, 0xc4, 0xc3, 0xc2, 0xc1, + 0xc2, 0xc5, 0xc8, 0xc3, 0xbf, 0xbf, 0xc6, 0xc3, 0xc3, 0xca, 0xcb, 0xc5, + 0xc2, 0xc3, 0xc0, 0xbd, 0xc2, 0xc4, 0xc1, 0xbe, 0xc1, 0xb4, 0xb8, 0xb5, + 0xad, 0xac, 0xa8, 0xaa, 0xb2, 0xb5, 0xb7, 0xbc, 0xbc, 0xb5, 0xb3, 0xb8, + 0xbf, 0xc5, 0xca, 0xcd, 0xcc, 0xc2, 0xbc, 0xc2, 0xbc, 0xb7, 0xb2, 0xb6, + 0xc5, 0xcb, 0xc6, 0xc0, 0xba, 0xb6, 0xb3, 0xb6, 0xc5, 0xc0, 0xc2, 0xcb, + 0xb8, 0xb6, 0xaf, 0xc0, 0x4b, 0x4a, 0x48, 0x46, 0x43, 0x42, 0x40, 0x3d, + 0x39, 0x3a, 0x37, 0x35, 0x32, 0x2e, 0x2d, 0x2c, 0x23, 0x18, 0x16, 0x16, + 0x1a, 0x20, 0x25, 0x2c, 0x2e, 0x33, 0x31, 0x32, 0x35, 0x36, 0x3a, 0x3d, + 0x3e, 0x3b, 0x34, 0x31, 0x34, 0x3d, 0x48, 0x50, 0x56, 0x5b, 0x5e, 0x5e, + 0x5a, 0x57, 0x52, 0x58, 0x61, 0x61, 0x63, 0x5f, 0x61, 0x63, 0x66, 0x68, + 0x60, 0x5a, 0x63, 0x66, 0x67, 0x68, 0x64, 0x64, 0x65, 0x65, 0x65, 0x66, + 0x67, 0x69, 0x69, 0x69, 0x6a, 0x6a, 0x6d, 0x6f, 0x6e, 0x6d, 0x6f, 0x71, + 0x74, 0x75, 0x74, 0x71, 0x70, 0x71, 0x70, 0x70, 0x6f, 0x6d, 0x69, 0x5e, + 0x4a, 0x35, 0x09, 0xee, 0xd6, 0xc9, 0xc7, 0xc3, 0xbf, 0xb4, 0xab, 0xb0, + 0xb4, 0xb6, 0xb6, 0xbb, 0xbc, 0xba, 0xb7, 0xb4, 0xae, 0xa9, 0xad, 0xb9, + 0xbc, 0xb7, 0xbc, 0xc1, 0xbf, 0xc2, 0xc3, 0xcf, 0xc2, 0xb3, 0xb3, 0xaf, + 0xaf, 0xad, 0xac, 0xb0, 0xb6, 0xba, 0xb5, 0xaf, 0xb0, 0xb4, 0xb8, 0xb6, + 0xb5, 0xb3, 0xb7, 0xb9, 0xbd, 0xbc, 0xbc, 0xc2, 0xc2, 0xc1, 0xc0, 0xc2, + 0xbd, 0xbc, 0xb6, 0xc5, 0xc7, 0xc2, 0xc8, 0xce, 0xc7, 0xc7, 0xcc, 0xc7, + 0xca, 0xc7, 0xce, 0xda, 0xd5, 0xd7, 0xc8, 0xc6, 0xc4, 0xc5, 0xc3, 0xc0, + 0xbf, 0xc2, 0xc9, 0xc6, 0xbc, 0xb2, 0xb7, 0xb7, 0xb0, 0xad, 0xac, 0xab, + 0xb7, 0xc4, 0xbc, 0xb7, 0xb1, 0xb0, 0xb3, 0xc2, 0xcc, 0xc8, 0xc2, 0xcb, + 0xce, 0xc2, 0xb7, 0xb8, 0xb4, 0xb0, 0xb3, 0xc0, 0xc8, 0xce, 0xc3, 0xbe, + 0xc0, 0xc3, 0xba, 0xb2, 0xbc, 0xbb, 0xbe, 0xbe, 0xb6, 0xc2, 0xb3, 0xb3, + 0x4b, 0x48, 0x46, 0x45, 0x44, 0x42, 0x40, 0x3f, 0x3d, 0x3c, 0x3a, 0x3a, + 0x3c, 0x3b, 0x3a, 0x34, 0x28, 0x1f, 0x1e, 0x20, 0x21, 0x25, 0x28, 0x2d, + 0x30, 0x32, 0x32, 0x33, 0x34, 0x33, 0x39, 0x3b, 0x3b, 0x3a, 0x38, 0x34, + 0x2e, 0x31, 0x3e, 0x4c, 0x55, 0x5e, 0x5f, 0x60, 0x5e, 0x59, 0x56, 0x5d, + 0x62, 0x64, 0x65, 0x61, 0x64, 0x64, 0x68, 0x67, 0x5f, 0x5c, 0x64, 0x69, + 0x67, 0x6a, 0x65, 0x66, 0x68, 0x67, 0x65, 0x65, 0x64, 0x69, 0x69, 0x6a, + 0x6c, 0x6b, 0x6d, 0x6f, 0x6c, 0x6d, 0x6d, 0x70, 0x72, 0x74, 0x74, 0x72, + 0x70, 0x70, 0x70, 0x70, 0x70, 0x6f, 0x6e, 0x67, 0x53, 0x3e, 0x26, 0x00, + 0xed, 0xd8, 0xc7, 0xbf, 0xb5, 0xad, 0xaa, 0xaf, 0xb0, 0xaf, 0xaf, 0xb2, + 0xb4, 0xb1, 0xaf, 0xb0, 0xae, 0xaa, 0xae, 0xbd, 0xc2, 0xc0, 0xbf, 0xbc, + 0xbf, 0xc2, 0xc5, 0xc5, 0xbc, 0xb7, 0xc2, 0xb6, 0xb6, 0xb6, 0xb5, 0xb5, + 0xba, 0xc1, 0xbb, 0xb4, 0xb6, 0xb0, 0xbc, 0xbc, 0xb5, 0xb3, 0xb3, 0xb6, + 0xb9, 0xbf, 0xc1, 0xcd, 0xc4, 0xc3, 0xc4, 0xbe, 0xbb, 0xbd, 0xc3, 0xc7, + 0xc2, 0xc3, 0xce, 0xd4, 0xc9, 0xce, 0xce, 0xd0, 0xd0, 0xcb, 0xc7, 0xc9, + 0xca, 0xca, 0xc4, 0xc1, 0xc2, 0xc7, 0xc9, 0xbd, 0xbd, 0xbf, 0xc6, 0xba, + 0xb6, 0xb6, 0xb8, 0xbc, 0xbc, 0xbb, 0xb6, 0xb2, 0xb1, 0xc7, 0xc6, 0xbe, + 0xb8, 0xb6, 0xba, 0xc3, 0xcc, 0xc8, 0xc5, 0xc8, 0xc9, 0xc1, 0xba, 0xbb, + 0xb0, 0xb0, 0xb6, 0xbc, 0xbe, 0xbe, 0xbe, 0xc2, 0xc2, 0xbf, 0xbc, 0xbc, + 0xba, 0xaf, 0xb1, 0xbc, 0xbc, 0xc7, 0xc3, 0xba, 0x4b, 0x47, 0x46, 0x43, + 0x42, 0x42, 0x40, 0x41, 0x40, 0x3e, 0x3c, 0x3d, 0x3f, 0x3f, 0x3d, 0x34, + 0x2b, 0x21, 0x1f, 0x23, 0x25, 0x28, 0x2b, 0x2c, 0x2e, 0x2f, 0x2f, 0x2f, + 0x30, 0x34, 0x3a, 0x40, 0x40, 0x3b, 0x3a, 0x3d, 0x3c, 0x3a, 0x40, 0x4d, + 0x56, 0x5e, 0x61, 0x63, 0x61, 0x5e, 0x59, 0x5e, 0x63, 0x66, 0x68, 0x64, + 0x65, 0x65, 0x6a, 0x66, 0x60, 0x61, 0x67, 0x6b, 0x69, 0x6b, 0x6a, 0x68, + 0x6a, 0x69, 0x67, 0x64, 0x63, 0x66, 0x69, 0x69, 0x6c, 0x6c, 0x6b, 0x6c, + 0x6b, 0x6d, 0x6f, 0x70, 0x72, 0x73, 0x73, 0x73, 0x71, 0x70, 0x70, 0x70, + 0x72, 0x72, 0x70, 0x70, 0x60, 0x44, 0x27, 0x14, 0xf8, 0xd8, 0xc6, 0xb8, + 0xaf, 0xb0, 0xb6, 0xb5, 0xb1, 0xaf, 0xad, 0xa9, 0xae, 0xae, 0xad, 0xb1, + 0xb0, 0xb6, 0xbb, 0xc1, 0xc3, 0xc4, 0xbf, 0xc0, 0xc2, 0xc2, 0xc0, 0xbc, + 0xb7, 0xb2, 0xc3, 0xc8, 0xbe, 0xbf, 0xc2, 0xbf, 0xc0, 0xc7, 0xc3, 0xba, + 0xb8, 0xba, 0xc2, 0xc7, 0xc2, 0xc5, 0xc2, 0xbd, 0xb9, 0xbe, 0xc6, 0xc4, + 0xbd, 0xbc, 0xbb, 0xb6, 0xb6, 0xc1, 0xc8, 0xc2, 0xbc, 0xc3, 0xc9, 0xcf, + 0xcc, 0xcf, 0xcd, 0xcc, 0xce, 0xce, 0xc9, 0xc7, 0xc8, 0xc2, 0xc6, 0xc4, + 0xca, 0xc9, 0xc7, 0xca, 0xc8, 0xc2, 0xbc, 0xc2, 0xc3, 0xc0, 0xc4, 0xc5, + 0xc4, 0xc7, 0xc5, 0xc2, 0xbb, 0xbc, 0xc8, 0xc1, 0xbd, 0xbf, 0xbd, 0xbe, + 0xbf, 0xc4, 0xc8, 0xbe, 0xc7, 0xc2, 0xba, 0xc2, 0xc8, 0xbe, 0xb7, 0xb5, + 0xba, 0xc8, 0xc8, 0xcb, 0xcc, 0xc3, 0xb7, 0xbb, 0xb7, 0xb0, 0xad, 0xbd, + 0xbd, 0xc5, 0xce, 0xc3, 0x4c, 0x49, 0x46, 0x44, 0x40, 0x40, 0x42, 0x43, + 0x42, 0x40, 0x40, 0x41, 0x40, 0x40, 0x3e, 0x36, 0x2a, 0x20, 0x1d, 0x22, + 0x28, 0x28, 0x2b, 0x2c, 0x2e, 0x2e, 0x2e, 0x30, 0x35, 0x3b, 0x40, 0x45, + 0x46, 0x42, 0x40, 0x47, 0x47, 0x4a, 0x4e, 0x52, 0x58, 0x5e, 0x61, 0x63, + 0x62, 0x60, 0x5d, 0x5f, 0x64, 0x68, 0x69, 0x65, 0x65, 0x66, 0x69, 0x66, + 0x63, 0x65, 0x68, 0x6c, 0x6a, 0x6a, 0x6b, 0x69, 0x6a, 0x6a, 0x68, 0x66, + 0x62, 0x62, 0x67, 0x69, 0x6a, 0x6c, 0x6b, 0x6b, 0x6c, 0x6c, 0x6e, 0x70, + 0x71, 0x70, 0x71, 0x72, 0x72, 0x6f, 0x70, 0x72, 0x73, 0x73, 0x73, 0x72, + 0x6f, 0x58, 0x2e, 0x04, 0xf6, 0xe8, 0xcd, 0xb7, 0xb1, 0xb5, 0xbd, 0xbc, + 0xb8, 0xb3, 0xb4, 0xb0, 0xb5, 0xbc, 0xbf, 0xc2, 0xc2, 0xc3, 0xc3, 0xc1, + 0xbf, 0xc4, 0xbc, 0xbf, 0xbf, 0xbf, 0xbc, 0xc5, 0xc4, 0xc6, 0xc4, 0xc4, + 0xc3, 0xc0, 0xbe, 0xc2, 0xc9, 0xd5, 0xd5, 0xbf, 0xbb, 0xbc, 0xbd, 0xc2, + 0xc3, 0xc6, 0xc5, 0xc0, 0xc4, 0xc4, 0xc5, 0xbf, 0xbd, 0xbb, 0xb9, 0xb6, + 0xb8, 0xbf, 0xce, 0xbe, 0xbc, 0xc6, 0xca, 0xd1, 0xd0, 0xc4, 0xcc, 0xcc, + 0xca, 0xc9, 0xc8, 0xd0, 0xd2, 0xc8, 0xc2, 0xc2, 0xca, 0xcb, 0xce, 0xce, + 0xcd, 0xc4, 0xbb, 0xc0, 0xc1, 0xc1, 0xc7, 0xcc, 0xcb, 0xc4, 0xc0, 0xc2, + 0xca, 0xc5, 0xc4, 0xcb, 0xcc, 0xca, 0xc5, 0xc0, 0xc3, 0xc6, 0xc0, 0xbc, + 0xc6, 0xbd, 0xb2, 0xbb, 0xce, 0xc2, 0xb8, 0xb3, 0xbc, 0xcf, 0xcc, 0xc6, + 0xc6, 0xd0, 0xc3, 0xbf, 0xc2, 0xb4, 0xaf, 0xb5, 0xb6, 0xc3, 0xc2, 0xc5, + 0x4b, 0x47, 0x47, 0x46, 0x42, 0x42, 0x43, 0x43, 0x43, 0x44, 0x43, 0x45, + 0x45, 0x41, 0x3f, 0x39, 0x2d, 0x24, 0x20, 0x22, 0x25, 0x28, 0x2a, 0x2b, + 0x2f, 0x30, 0x32, 0x36, 0x3a, 0x40, 0x47, 0x4c, 0x4c, 0x4d, 0x4f, 0x52, + 0x53, 0x58, 0x59, 0x58, 0x59, 0x5f, 0x62, 0x64, 0x62, 0x62, 0x61, 0x62, + 0x65, 0x6a, 0x68, 0x66, 0x66, 0x67, 0x67, 0x68, 0x65, 0x68, 0x6a, 0x6e, + 0x6c, 0x6a, 0x6b, 0x6a, 0x69, 0x6a, 0x66, 0x66, 0x63, 0x62, 0x64, 0x69, + 0x68, 0x6b, 0x6b, 0x6b, 0x6d, 0x6c, 0x6c, 0x6f, 0x70, 0x6f, 0x6f, 0x70, + 0x70, 0x6f, 0x70, 0x71, 0x72, 0x73, 0x74, 0x73, 0x71, 0x66, 0x46, 0x0b, + 0xdc, 0xd4, 0xd7, 0xc6, 0xbf, 0xc4, 0xcd, 0xd1, 0xcd, 0xc6, 0xce, 0xc7, + 0xc9, 0xd3, 0xce, 0xce, 0xc8, 0xc8, 0xc9, 0xc2, 0xbe, 0xce, 0xc1, 0xbc, + 0xba, 0xbc, 0xc0, 0xc6, 0xcc, 0xd7, 0xce, 0xbb, 0xbf, 0xc5, 0xc2, 0xc4, + 0xcc, 0xcb, 0xc0, 0xb8, 0xbc, 0xc2, 0xc8, 0xce, 0xc8, 0xc9, 0xc2, 0xc2, + 0xc8, 0xc5, 0xc2, 0xc1, 0xc6, 0xc8, 0xc2, 0xbf, 0xc2, 0xc8, 0xc7, 0xbe, + 0xbc, 0xc4, 0xc8, 0xc4, 0xc9, 0xc6, 0xcd, 0xc8, 0xcb, 0xc3, 0xc1, 0xc7, + 0xca, 0xca, 0xc5, 0xc9, 0xcc, 0xc4, 0xcb, 0xc8, 0xc5, 0xc8, 0xc0, 0xc1, + 0xc5, 0xc4, 0xc8, 0xc8, 0xca, 0xc8, 0xc2, 0xc3, 0xce, 0xc6, 0xc8, 0xd3, + 0xd5, 0xd5, 0xd0, 0xca, 0xc8, 0xc7, 0xc8, 0xcd, 0xce, 0xc2, 0xb5, 0xb8, + 0xc1, 0xc1, 0xc3, 0xbd, 0xc0, 0xc8, 0xc9, 0xbb, 0xc4, 0xd7, 0xcf, 0xc6, + 0xc1, 0xb3, 0xb0, 0xaf, 0xba, 0xca, 0xc1, 0xbc, 0x4a, 0x47, 0x46, 0x45, + 0x44, 0x45, 0x46, 0x45, 0x46, 0x46, 0x46, 0x46, 0x46, 0x43, 0x40, 0x3d, + 0x37, 0x2e, 0x28, 0x25, 0x24, 0x29, 0x2c, 0x2e, 0x35, 0x3b, 0x3c, 0x3c, + 0x3f, 0x45, 0x4d, 0x53, 0x54, 0x54, 0x56, 0x5a, 0x59, 0x5b, 0x5b, 0x5c, + 0x5e, 0x62, 0x63, 0x64, 0x63, 0x64, 0x64, 0x64, 0x68, 0x6a, 0x68, 0x66, + 0x69, 0x68, 0x69, 0x6a, 0x66, 0x6a, 0x6c, 0x6f, 0x6d, 0x6a, 0x6b, 0x6a, + 0x67, 0x68, 0x67, 0x64, 0x64, 0x63, 0x62, 0x66, 0x67, 0x68, 0x6a, 0x6b, + 0x6c, 0x6b, 0x6b, 0x6c, 0x6d, 0x6d, 0x6d, 0x70, 0x6f, 0x6e, 0x70, 0x72, + 0x71, 0x72, 0x74, 0x73, 0x71, 0x6a, 0x58, 0x27, 0xf8, 0xdc, 0xd8, 0xe0, + 0xdc, 0xd8, 0xd4, 0xd6, 0xdb, 0xd9, 0xdd, 0xd4, 0xd1, 0xc8, 0xc4, 0xc5, + 0xc3, 0xc8, 0xd2, 0xc4, 0xc1, 0xcc, 0xcb, 0xc3, 0xc6, 0xc2, 0xcb, 0xc8, + 0xc7, 0xce, 0xca, 0xbf, 0xc2, 0xc4, 0xc0, 0xc8, 0xcd, 0xc3, 0xbc, 0xbb, + 0xbe, 0xca, 0xd4, 0xd9, 0xca, 0xc3, 0xb8, 0xc0, 0xc6, 0xc2, 0xc2, 0xc6, + 0xc3, 0xca, 0xd0, 0xdb, 0xcf, 0xc8, 0xca, 0xce, 0xc9, 0xc4, 0xbf, 0xb7, + 0xc0, 0xc4, 0xc0, 0xc0, 0xc4, 0xbc, 0xbe, 0xbd, 0xc7, 0xca, 0xc8, 0xce, + 0xd7, 0xce, 0xcc, 0xc2, 0xc2, 0xc2, 0xbd, 0xbc, 0xc2, 0xc4, 0xc8, 0xc0, + 0xbe, 0xc2, 0xbc, 0xc2, 0xca, 0xc8, 0xc9, 0xcf, 0xd6, 0xda, 0xd5, 0xcf, + 0xcb, 0xc9, 0xc8, 0xcb, 0xcd, 0xca, 0xc1, 0xbc, 0xbc, 0xc4, 0xc2, 0xc2, + 0xc2, 0xcd, 0xcd, 0xbc, 0xc0, 0xd4, 0xd4, 0xd3, 0xc3, 0xb6, 0xb6, 0xb7, + 0xc3, 0xc3, 0xbd, 0xbd, 0x48, 0x47, 0x45, 0x44, 0x45, 0x47, 0x49, 0x48, + 0x49, 0x49, 0x49, 0x49, 0x47, 0x45, 0x41, 0x3f, 0x3a, 0x34, 0x31, 0x2d, + 0x2d, 0x30, 0x31, 0x34, 0x3d, 0x43, 0x46, 0x48, 0x4a, 0x4f, 0x54, 0x55, + 0x56, 0x56, 0x59, 0x5d, 0x5d, 0x5d, 0x5f, 0x61, 0x61, 0x63, 0x65, 0x65, + 0x64, 0x66, 0x68, 0x68, 0x6a, 0x6a, 0x68, 0x69, 0x6a, 0x69, 0x6a, 0x6a, + 0x67, 0x6c, 0x6c, 0x6f, 0x70, 0x6c, 0x6a, 0x69, 0x65, 0x65, 0x67, 0x66, + 0x65, 0x64, 0x62, 0x64, 0x67, 0x68, 0x6a, 0x6b, 0x6c, 0x6b, 0x6b, 0x6b, + 0x6c, 0x6c, 0x6d, 0x6f, 0x70, 0x6f, 0x70, 0x72, 0x72, 0x73, 0x74, 0x74, + 0x72, 0x6d, 0x63, 0x4b, 0x22, 0xfb, 0xe0, 0xe0, 0xd7, 0xd0, 0xca, 0xce, + 0xd9, 0xd6, 0xd2, 0xca, 0xcb, 0xc8, 0xc2, 0xc2, 0xc2, 0xc2, 0xc7, 0xc2, + 0xbe, 0xc7, 0xc7, 0xca, 0xca, 0xc0, 0xc4, 0xca, 0xc8, 0xc8, 0xc9, 0xc8, + 0xca, 0xc2, 0xc6, 0xcf, 0xcf, 0xc3, 0xc2, 0xc7, 0xca, 0xce, 0xc8, 0xc4, + 0xc2, 0xb8, 0xb3, 0xb9, 0xc1, 0xbe, 0xbf, 0xc2, 0xc2, 0xcb, 0xce, 0xd5, + 0xce, 0xc8, 0xce, 0xd4, 0xd5, 0xd0, 0xc1, 0xb8, 0xbc, 0xc2, 0xb8, 0xbc, + 0xbd, 0xbd, 0xbe, 0xc1, 0xc9, 0xc8, 0xc3, 0xcd, 0xd5, 0xce, 0xc1, 0xbc, + 0xbe, 0xbd, 0xb8, 0xbb, 0xc8, 0xca, 0xc5, 0xbc, 0xbc, 0xbb, 0xba, 0xc6, + 0xcc, 0xc1, 0xc6, 0xd2, 0xd3, 0xcf, 0xce, 0xcd, 0xcc, 0xd4, 0xcd, 0xc4, + 0xc2, 0xc0, 0xbc, 0xb7, 0xbc, 0xcc, 0xc2, 0xbc, 0xbb, 0xc9, 0xce, 0xc2, + 0xc0, 0xc1, 0xcc, 0xdc, 0xce, 0xc2, 0xc2, 0xc0, 0xc3, 0xc2, 0xc1, 0xbf, + 0x48, 0x49, 0x47, 0x45, 0x46, 0x49, 0x49, 0x4a, 0x4c, 0x4c, 0x4e, 0x4c, + 0x4b, 0x49, 0x45, 0x40, 0x3c, 0x37, 0x36, 0x35, 0x36, 0x35, 0x36, 0x3a, + 0x40, 0x45, 0x4a, 0x4e, 0x51, 0x53, 0x56, 0x55, 0x54, 0x58, 0x5b, 0x5a, + 0x5d, 0x5e, 0x63, 0x63, 0x63, 0x64, 0x65, 0x67, 0x64, 0x67, 0x6a, 0x6d, + 0x6a, 0x6a, 0x68, 0x6a, 0x6a, 0x6a, 0x6c, 0x69, 0x66, 0x6c, 0x6b, 0x6e, + 0x70, 0x6c, 0x69, 0x66, 0x64, 0x66, 0x65, 0x63, 0x66, 0x65, 0x62, 0x64, + 0x68, 0x66, 0x69, 0x6a, 0x6a, 0x6a, 0x6b, 0x6b, 0x6c, 0x6d, 0x6d, 0x6d, + 0x6f, 0x70, 0x71, 0x72, 0x73, 0x73, 0x74, 0x75, 0x75, 0x71, 0x68, 0x5d, + 0x4d, 0x22, 0xf2, 0xe2, 0xdb, 0xda, 0xd3, 0xd1, 0xd3, 0xd2, 0xcb, 0xc5, + 0xc8, 0xcc, 0xc8, 0xcd, 0xc8, 0xc5, 0xcc, 0xc8, 0xc5, 0xc4, 0xc9, 0xce, + 0xc9, 0xce, 0xc3, 0xc6, 0xcd, 0xcd, 0xca, 0xce, 0xcd, 0xc6, 0xcc, 0xd2, + 0xca, 0xc4, 0xc2, 0xc2, 0xcd, 0xd0, 0xc1, 0xbd, 0xbd, 0xbc, 0xba, 0xba, + 0xc2, 0xc0, 0xc0, 0xc3, 0xc2, 0xc6, 0xc4, 0xc5, 0xc1, 0xbc, 0xbc, 0xc6, + 0xcf, 0xcb, 0xc5, 0xc5, 0xc6, 0xc2, 0xbe, 0xbd, 0xc0, 0xc1, 0xc2, 0xc6, + 0xc7, 0xc8, 0xc7, 0xcc, 0xcb, 0xc7, 0xbd, 0xb7, 0xb9, 0xbb, 0xc0, 0xbf, + 0xcc, 0xd0, 0xc3, 0xc1, 0xb9, 0xbd, 0xbd, 0xba, 0xbc, 0xbc, 0xc2, 0xc8, + 0xc9, 0xc5, 0xc2, 0xc5, 0xc3, 0xd2, 0xd0, 0xc8, 0xc4, 0xbe, 0xbc, 0xb2, + 0xba, 0xce, 0xc7, 0xbb, 0xbb, 0xc5, 0xce, 0xca, 0xc8, 0xc3, 0xc5, 0xd7, + 0xd6, 0xc9, 0xc9, 0xc0, 0xc6, 0xc6, 0xc8, 0xca, 0x49, 0x4c, 0x4b, 0x48, + 0x4a, 0x4c, 0x4d, 0x4e, 0x4f, 0x4e, 0x4e, 0x4e, 0x4c, 0x49, 0x45, 0x41, + 0x3d, 0x3c, 0x3b, 0x3a, 0x3b, 0x3b, 0x3c, 0x3c, 0x41, 0x46, 0x4b, 0x52, + 0x54, 0x57, 0x57, 0x52, 0x51, 0x55, 0x57, 0x5b, 0x5e, 0x5e, 0x62, 0x64, + 0x63, 0x63, 0x64, 0x65, 0x66, 0x68, 0x6a, 0x6e, 0x6a, 0x68, 0x65, 0x69, + 0x6a, 0x6a, 0x6c, 0x67, 0x64, 0x6a, 0x6a, 0x6b, 0x6f, 0x6c, 0x6a, 0x68, + 0x62, 0x64, 0x63, 0x61, 0x66, 0x68, 0x62, 0x64, 0x68, 0x65, 0x65, 0x68, + 0x69, 0x69, 0x6a, 0x6a, 0x6b, 0x6b, 0x6c, 0x6e, 0x6f, 0x70, 0x71, 0x72, + 0x73, 0x72, 0x74, 0x76, 0x76, 0x75, 0x71, 0x6a, 0x63, 0x52, 0x28, 0x02, + 0xe8, 0xd9, 0xd1, 0xcf, 0xd1, 0xd3, 0xc9, 0xc8, 0xce, 0xcd, 0xcd, 0xcc, + 0xcb, 0xc7, 0xcf, 0xd0, 0xcc, 0xc7, 0xc9, 0xcc, 0xc5, 0xc6, 0xc5, 0xc8, + 0xce, 0xc9, 0xc7, 0xcc, 0xc8, 0xcc, 0xd2, 0xd5, 0xca, 0xc8, 0xc4, 0xc7, + 0xcc, 0xc8, 0xc3, 0xc2, 0xc4, 0xbe, 0xb9, 0xbc, 0xc3, 0xc0, 0xc0, 0xbe, + 0xc0, 0xc4, 0xc5, 0xc2, 0xbf, 0xc2, 0xbb, 0xb9, 0xbe, 0xc0, 0xbe, 0xc5, + 0xc7, 0xc4, 0xc4, 0xbf, 0xbe, 0xbe, 0xc5, 0xc6, 0xbf, 0xc0, 0xc3, 0xc9, + 0xc2, 0xbe, 0xbe, 0xba, 0xbe, 0xb9, 0xc1, 0xc1, 0xc0, 0xcc, 0xc9, 0xca, + 0xbd, 0xb3, 0xbb, 0xb6, 0xbb, 0xc2, 0xb9, 0xc0, 0xc2, 0xc1, 0xbc, 0xb9, + 0xc7, 0xcf, 0xc7, 0xbd, 0xc7, 0xc2, 0xc2, 0xbe, 0xbe, 0xbd, 0xbf, 0xb6, + 0xbb, 0xc8, 0xd1, 0xcd, 0xc9, 0xc8, 0xc1, 0xc9, 0xce, 0xd1, 0xcb, 0xbe, + 0xc2, 0xce, 0xcb, 0xcf, 0x49, 0x49, 0x48, 0x48, 0x49, 0x4b, 0x4b, 0x4c, + 0x4c, 0x4a, 0x49, 0x4a, 0x48, 0x46, 0x42, 0x40, 0x3d, 0x3c, 0x3e, 0x40, + 0x40, 0x40, 0x40, 0x43, 0x46, 0x4c, 0x51, 0x57, 0x59, 0x58, 0x52, 0x50, + 0x54, 0x57, 0x58, 0x5c, 0x5b, 0x5e, 0x5f, 0x64, 0x63, 0x60, 0x62, 0x61, + 0x64, 0x68, 0x69, 0x6d, 0x68, 0x67, 0x67, 0x68, 0x67, 0x6a, 0x6c, 0x65, + 0x64, 0x68, 0x6a, 0x6a, 0x6c, 0x6b, 0x6a, 0x6a, 0x63, 0x63, 0x62, 0x5f, + 0x63, 0x65, 0x63, 0x64, 0x68, 0x65, 0x64, 0x66, 0x68, 0x69, 0x68, 0x6a, + 0x6b, 0x6b, 0x6c, 0x6f, 0x70, 0x70, 0x71, 0x72, 0x72, 0x72, 0x74, 0x76, + 0x76, 0x75, 0x74, 0x72, 0x6d, 0x67, 0x57, 0x34, 0x05, 0xe4, 0xe1, 0xd9, + 0xd3, 0xd1, 0xcf, 0xca, 0xce, 0xd3, 0xcb, 0xc7, 0xd0, 0xcd, 0xcb, 0xd0, + 0xcd, 0xcf, 0xd2, 0xc8, 0xc6, 0xca, 0xcd, 0xce, 0xc6, 0xc5, 0xc4, 0xc6, + 0xc4, 0xc6, 0xca, 0xc8, 0xc9, 0xcf, 0xc9, 0xcc, 0xcf, 0xc8, 0xca, 0xc6, + 0xc5, 0xbf, 0xbd, 0xc3, 0xc6, 0xc1, 0xbe, 0xbe, 0xc3, 0xc8, 0xc8, 0xc7, + 0xc3, 0xca, 0xc4, 0xc2, 0xc6, 0xc5, 0xc1, 0xc5, 0xc5, 0xbf, 0xc0, 0xc1, + 0xc0, 0xbd, 0xc2, 0xc8, 0xbb, 0xbc, 0xcb, 0xd0, 0xcb, 0xc5, 0xc6, 0xc3, + 0xbe, 0xbd, 0xbf, 0xbe, 0xc2, 0xc5, 0xc2, 0xc3, 0xc2, 0xbb, 0xbd, 0xb5, + 0xb0, 0xb5, 0xb6, 0xc2, 0xc7, 0xc2, 0xb9, 0xba, 0xce, 0xce, 0xc2, 0xc1, + 0xc2, 0xc0, 0xca, 0xd2, 0xca, 0xc4, 0xbf, 0xbc, 0xb9, 0xcf, 0xcf, 0xc6, + 0xc6, 0xcc, 0xcd, 0xd4, 0xc8, 0xd1, 0xcf, 0xc1, 0xc0, 0xcf, 0xcd, 0xcb, + 0x47, 0x46, 0x46, 0x47, 0x47, 0x46, 0x46, 0x46, 0x48, 0x47, 0x47, 0x49, + 0x49, 0x47, 0x45, 0x42, 0x41, 0x40, 0x43, 0x45, 0x46, 0x45, 0x45, 0x48, + 0x4c, 0x4f, 0x53, 0x58, 0x57, 0x54, 0x52, 0x57, 0x58, 0x58, 0x58, 0x59, + 0x5c, 0x5e, 0x61, 0x64, 0x62, 0x5c, 0x5c, 0x5e, 0x63, 0x67, 0x68, 0x6b, + 0x64, 0x64, 0x67, 0x66, 0x68, 0x6a, 0x6a, 0x64, 0x64, 0x66, 0x69, 0x6a, + 0x6a, 0x6a, 0x69, 0x6a, 0x64, 0x63, 0x64, 0x60, 0x60, 0x62, 0x61, 0x64, + 0x65, 0x64, 0x63, 0x64, 0x65, 0x66, 0x66, 0x68, 0x6a, 0x6c, 0x6d, 0x6f, + 0x70, 0x70, 0x70, 0x70, 0x71, 0x72, 0x73, 0x74, 0x76, 0x76, 0x74, 0x73, + 0x72, 0x70, 0x6c, 0x5e, 0x3a, 0x11, 0xf8, 0xe6, 0xda, 0xd3, 0xcf, 0xce, + 0xd5, 0xd7, 0xcc, 0xce, 0xd3, 0xd1, 0xcb, 0xcb, 0xc3, 0xca, 0xcb, 0xc2, + 0xcb, 0xd6, 0xcf, 0xcf, 0xc4, 0xbd, 0xc2, 0xc8, 0xc2, 0xbb, 0xc4, 0xc6, + 0xc8, 0xce, 0xc8, 0xcb, 0xd0, 0xcb, 0xca, 0xca, 0xc5, 0xbb, 0xc2, 0xc9, + 0xbf, 0xbf, 0xc2, 0xc2, 0xc5, 0xc5, 0xc1, 0xc8, 0xbe, 0xbc, 0xba, 0xbf, + 0xc7, 0xc8, 0xcd, 0xc8, 0xbb, 0xb4, 0xc3, 0xc3, 0xbd, 0xbf, 0xc5, 0xc4, + 0xbc, 0xc5, 0xca, 0xc8, 0xcd, 0xc8, 0xc8, 0xce, 0xc7, 0xbe, 0xc3, 0xc7, + 0xcc, 0xbd, 0xbf, 0xc4, 0xc2, 0xc2, 0xc1, 0xb8, 0xb0, 0xb6, 0xb3, 0xba, + 0xb8, 0xbe, 0xbe, 0xc4, 0xcd, 0xcc, 0xbe, 0xc2, 0xc7, 0xc2, 0xd3, 0xd2, + 0xc8, 0xc7, 0xbe, 0xba, 0xbc, 0xd7, 0xcf, 0xc5, 0xc2, 0xc2, 0xc8, 0xd5, + 0xd6, 0xcd, 0xc9, 0xbc, 0xc5, 0xc7, 0xc3, 0xbe, 0x48, 0x47, 0x46, 0x47, + 0x47, 0x48, 0x49, 0x4c, 0x4d, 0x4e, 0x4e, 0x4e, 0x4c, 0x48, 0x46, 0x46, + 0x46, 0x46, 0x46, 0x48, 0x47, 0x48, 0x49, 0x4a, 0x4c, 0x4e, 0x54, 0x54, + 0x55, 0x57, 0x58, 0x5a, 0x5b, 0x56, 0x55, 0x58, 0x5e, 0x60, 0x63, 0x63, + 0x60, 0x58, 0x58, 0x5d, 0x62, 0x64, 0x65, 0x66, 0x64, 0x62, 0x68, 0x66, + 0x6a, 0x6a, 0x69, 0x67, 0x66, 0x66, 0x69, 0x6a, 0x69, 0x67, 0x66, 0x6a, + 0x68, 0x62, 0x66, 0x63, 0x5e, 0x5e, 0x60, 0x63, 0x66, 0x64, 0x63, 0x64, + 0x64, 0x64, 0x64, 0x66, 0x6a, 0x6b, 0x6d, 0x6e, 0x70, 0x71, 0x70, 0x70, + 0x71, 0x72, 0x74, 0x75, 0x76, 0x76, 0x76, 0x75, 0x73, 0x70, 0x6c, 0x68, + 0x5c, 0x3d, 0x1c, 0xfc, 0xdd, 0xcd, 0xc8, 0xd2, 0xd4, 0xda, 0xce, 0xd0, + 0xcf, 0xcc, 0xca, 0xc9, 0xc2, 0xc8, 0xc2, 0xc1, 0xc7, 0xcc, 0xc9, 0xcd, + 0xc7, 0xc8, 0xc4, 0xc2, 0xc2, 0xbd, 0xc5, 0xc3, 0xc4, 0xc3, 0xc2, 0xc4, + 0xca, 0xcc, 0xcc, 0xca, 0xc1, 0xb8, 0xc2, 0xce, 0xc7, 0xcd, 0xd3, 0xd7, + 0xcc, 0xc7, 0xc2, 0xc2, 0xc1, 0xb3, 0xb2, 0xbd, 0xc5, 0xc8, 0xcc, 0xc9, + 0xbc, 0xb9, 0xc0, 0xc6, 0xbd, 0xb8, 0xbf, 0xc2, 0xc0, 0xc4, 0xd1, 0xcf, + 0xcd, 0xcd, 0xc4, 0xc6, 0xc8, 0xc2, 0xc1, 0xc2, 0xc2, 0xbc, 0xc1, 0xc8, + 0xcd, 0xc2, 0xc0, 0xc2, 0xbc, 0xbd, 0xbc, 0xb9, 0xb5, 0xbd, 0xcc, 0xcd, + 0xc8, 0xc8, 0xc2, 0xba, 0xc2, 0xbd, 0xce, 0xcc, 0xc3, 0xc5, 0xbb, 0xbb, + 0xc2, 0xd2, 0xc3, 0xb8, 0xb9, 0xb7, 0xb6, 0xc0, 0xcd, 0xda, 0xd2, 0xbb, + 0xbd, 0xbe, 0xba, 0xbc, 0x42, 0x41, 0x41, 0x41, 0x43, 0x46, 0x49, 0x4d, + 0x4c, 0x4c, 0x4c, 0x4b, 0x48, 0x4a, 0x4a, 0x4a, 0x49, 0x49, 0x48, 0x48, + 0x4a, 0x4c, 0x4b, 0x4b, 0x4d, 0x4f, 0x4f, 0x52, 0x54, 0x54, 0x58, 0x5b, + 0x57, 0x54, 0x58, 0x5a, 0x61, 0x63, 0x64, 0x64, 0x5e, 0x56, 0x58, 0x5b, + 0x61, 0x64, 0x63, 0x62, 0x64, 0x64, 0x68, 0x67, 0x6a, 0x6c, 0x67, 0x67, + 0x67, 0x65, 0x68, 0x6a, 0x66, 0x64, 0x64, 0x6a, 0x6a, 0x64, 0x65, 0x65, + 0x5f, 0x5f, 0x60, 0x61, 0x66, 0x65, 0x63, 0x64, 0x65, 0x64, 0x64, 0x64, + 0x67, 0x69, 0x6a, 0x6d, 0x70, 0x70, 0x71, 0x72, 0x71, 0x73, 0x74, 0x75, + 0x76, 0x77, 0x77, 0x76, 0x76, 0x72, 0x6b, 0x66, 0x5e, 0x52, 0x32, 0x14, + 0xf4, 0xdb, 0xd6, 0xe3, 0xe1, 0xe4, 0xe0, 0xd5, 0xd3, 0xc9, 0xc8, 0xc8, + 0xc4, 0xc8, 0xc2, 0xc2, 0xc7, 0xc7, 0xc7, 0xc8, 0xc2, 0xcf, 0xcf, 0xc1, + 0xbd, 0xc1, 0xc6, 0xc6, 0xc0, 0xbf, 0xc9, 0xc2, 0xc6, 0xc5, 0xcb, 0xc9, + 0xc3, 0xc8, 0xc2, 0xd1, 0xcf, 0xc7, 0xd1, 0xd9, 0xd5, 0xca, 0xc2, 0xb8, + 0xc0, 0xba, 0xb5, 0xba, 0xc8, 0xc7, 0xc5, 0xc8, 0xc6, 0xb5, 0xbd, 0xc8, + 0xbe, 0xb6, 0xbe, 0xc1, 0xc0, 0xbb, 0xca, 0xcf, 0xcc, 0xd0, 0xc4, 0xbf, + 0xca, 0xc8, 0xc8, 0xcd, 0xca, 0xc6, 0xc7, 0xce, 0xd1, 0xc3, 0xc0, 0xc8, + 0xbd, 0xbb, 0xc2, 0xb8, 0xb4, 0xc2, 0xc5, 0xcf, 0xcb, 0xc7, 0xd0, 0xbb, + 0xb6, 0xb4, 0xc8, 0xc3, 0xb2, 0xb8, 0xba, 0xbc, 0xc4, 0xc8, 0xbc, 0xb6, + 0xb2, 0xaf, 0xb8, 0xce, 0xc8, 0xc9, 0xc8, 0xc4, 0xc3, 0xbb, 0xb8, 0xbb, + 0x41, 0x40, 0x42, 0x45, 0x47, 0x48, 0x4a, 0x48, 0x48, 0x49, 0x4a, 0x4b, + 0x48, 0x49, 0x4b, 0x4c, 0x48, 0x46, 0x46, 0x48, 0x4c, 0x4c, 0x4e, 0x4c, + 0x4c, 0x4c, 0x50, 0x52, 0x52, 0x57, 0x5a, 0x58, 0x55, 0x54, 0x59, 0x5e, + 0x64, 0x64, 0x63, 0x5f, 0x5b, 0x56, 0x57, 0x58, 0x5e, 0x63, 0x60, 0x5e, + 0x64, 0x66, 0x66, 0x68, 0x6a, 0x6a, 0x68, 0x6a, 0x67, 0x65, 0x6a, 0x6a, + 0x64, 0x63, 0x64, 0x68, 0x6a, 0x67, 0x65, 0x64, 0x61, 0x5f, 0x60, 0x60, + 0x66, 0x68, 0x65, 0x66, 0x67, 0x66, 0x64, 0x64, 0x64, 0x69, 0x6b, 0x6d, + 0x70, 0x70, 0x71, 0x72, 0x72, 0x74, 0x75, 0x76, 0x76, 0x77, 0x78, 0x77, + 0x76, 0x76, 0x75, 0x71, 0x69, 0x5e, 0x4c, 0x2f, 0x15, 0xfa, 0xea, 0xe4, + 0xef, 0xfa, 0xf0, 0xd9, 0xd5, 0xca, 0xc9, 0xc7, 0xc0, 0xc6, 0xc5, 0xc2, + 0xc5, 0xc8, 0xc7, 0xc8, 0xc2, 0xc5, 0xce, 0xbc, 0xbd, 0xc1, 0xc4, 0xcb, + 0xc3, 0xc2, 0xc8, 0xc5, 0xc8, 0xc8, 0xc9, 0xcc, 0xcd, 0xd6, 0xd5, 0xd1, + 0xcb, 0xc8, 0xce, 0xd0, 0xcc, 0xc7, 0xbf, 0xbc, 0xc3, 0xc0, 0xc2, 0xba, + 0xc2, 0xbc, 0xc3, 0xc3, 0xc9, 0xbc, 0xc2, 0xcf, 0xbf, 0xb6, 0xbc, 0xc3, + 0xc2, 0xba, 0xc0, 0xcd, 0xc8, 0xca, 0xc6, 0xc7, 0xce, 0xcb, 0xcc, 0xd0, + 0xda, 0xda, 0xd0, 0xc8, 0xc2, 0xc0, 0xbc, 0xc2, 0xbc, 0xc2, 0xc2, 0xb7, + 0xaf, 0xbc, 0xc0, 0xc6, 0xd2, 0xd0, 0xd3, 0xc3, 0xb6, 0xb7, 0xc1, 0xc2, + 0xb1, 0xb6, 0xbc, 0xbe, 0xc1, 0xc2, 0xbc, 0xb2, 0xac, 0xab, 0xbb, 0xd7, + 0xd7, 0xc3, 0xc6, 0xc7, 0xcb, 0xc1, 0xb6, 0xba, 0x42, 0x44, 0x44, 0x46, + 0x47, 0x47, 0x47, 0x49, 0x4c, 0x4a, 0x49, 0x49, 0x4c, 0x4a, 0x47, 0x46, + 0x46, 0x48, 0x4c, 0x4c, 0x4c, 0x4c, 0x4c, 0x4d, 0x4c, 0x4f, 0x50, 0x53, + 0x59, 0x5c, 0x57, 0x52, 0x55, 0x58, 0x59, 0x60, 0x64, 0x63, 0x62, 0x5d, + 0x58, 0x55, 0x55, 0x58, 0x5f, 0x61, 0x5d, 0x58, 0x64, 0x67, 0x65, 0x68, + 0x6a, 0x6a, 0x69, 0x6a, 0x69, 0x67, 0x69, 0x69, 0x64, 0x64, 0x63, 0x65, + 0x6a, 0x67, 0x64, 0x64, 0x62, 0x5f, 0x60, 0x5f, 0x64, 0x68, 0x67, 0x68, + 0x69, 0x6a, 0x68, 0x65, 0x64, 0x67, 0x6b, 0x6f, 0x70, 0x71, 0x72, 0x72, + 0x73, 0x75, 0x76, 0x76, 0x76, 0x77, 0x78, 0x78, 0x76, 0x75, 0x76, 0x76, + 0x74, 0x6d, 0x64, 0x55, 0x3b, 0x1d, 0x0e, 0xf9, 0x09, 0x1b, 0x0a, 0xe5, + 0xdd, 0xd6, 0xd0, 0xc8, 0xbf, 0xc2, 0xc3, 0xc2, 0xc2, 0xc4, 0xc5, 0xc5, + 0xc8, 0xc2, 0xc9, 0xc2, 0xc2, 0xc2, 0xc7, 0xcb, 0xc7, 0xc6, 0xbf, 0xd0, + 0xce, 0xca, 0xc9, 0xca, 0xcc, 0xd6, 0xd2, 0xca, 0xc6, 0xb9, 0xc8, 0xd6, + 0xcf, 0xc5, 0xc0, 0xc9, 0xcc, 0xbf, 0xc4, 0xbd, 0xbd, 0xbc, 0xc2, 0xb9, + 0xbd, 0xc1, 0xc6, 0xcd, 0xc8, 0xcd, 0xc4, 0xc0, 0xc2, 0xbe, 0xbf, 0xd4, + 0xce, 0xc7, 0xc5, 0xcc, 0xc8, 0xcc, 0xc8, 0xbc, 0xc9, 0xd3, 0xcf, 0xce, + 0xc8, 0xc5, 0xc2, 0xc1, 0xc3, 0xca, 0xcd, 0xbb, 0xb0, 0xb6, 0xc3, 0xc4, + 0xc7, 0xc7, 0xcd, 0xce, 0xbc, 0xc2, 0xc2, 0xc0, 0xb3, 0xb6, 0xba, 0xbb, + 0xc2, 0xc2, 0xbe, 0xae, 0xaa, 0xb0, 0xbc, 0xd3, 0xda, 0xd0, 0xc8, 0xbf, + 0xc3, 0xc8, 0xc3, 0xb2, 0x46, 0x47, 0x4b, 0x4c, 0x4b, 0x4c, 0x4c, 0x4c, + 0x4d, 0x4b, 0x46, 0x47, 0x48, 0x46, 0x46, 0x48, 0x4c, 0x4c, 0x4c, 0x4a, + 0x4c, 0x4e, 0x4f, 0x4b, 0x4a, 0x47, 0x4d, 0x59, 0x5c, 0x54, 0x4d, 0x52, + 0x56, 0x57, 0x5d, 0x60, 0x62, 0x5e, 0x5e, 0x5a, 0x58, 0x59, 0x56, 0x57, + 0x61, 0x5e, 0x59, 0x56, 0x64, 0x66, 0x66, 0x69, 0x6a, 0x6a, 0x69, 0x69, + 0x69, 0x66, 0x66, 0x66, 0x63, 0x64, 0x62, 0x61, 0x66, 0x66, 0x65, 0x64, + 0x63, 0x5f, 0x60, 0x60, 0x64, 0x68, 0x67, 0x67, 0x69, 0x6c, 0x6b, 0x68, + 0x66, 0x67, 0x6a, 0x6f, 0x6f, 0x71, 0x73, 0x72, 0x73, 0x75, 0x75, 0x75, + 0x76, 0x77, 0x77, 0x78, 0x76, 0x76, 0x76, 0x76, 0x77, 0x75, 0x70, 0x6a, + 0x5c, 0x4f, 0x46, 0x3e, 0x40, 0x45, 0x2d, 0x0b, 0xf8, 0xec, 0xda, 0xc8, + 0xbf, 0xbc, 0xc1, 0xc4, 0xc2, 0xc5, 0xca, 0xc6, 0xc7, 0xc5, 0xc0, 0xc2, + 0xbf, 0xc7, 0xc5, 0xcd, 0xc8, 0xd3, 0xc9, 0xce, 0xcc, 0xcb, 0xc7, 0xcc, + 0xc6, 0xce, 0xd2, 0xd0, 0xcd, 0xc0, 0xcc, 0xdc, 0xd4, 0xc8, 0xc8, 0xc9, + 0xcf, 0xc2, 0xbc, 0xbb, 0xb9, 0xbb, 0xc0, 0xbc, 0xbb, 0xc7, 0xca, 0xc8, + 0xc8, 0xd1, 0xce, 0xc3, 0xcc, 0xc8, 0xc5, 0xda, 0xd9, 0xcf, 0xc6, 0xca, + 0xce, 0xd7, 0xc9, 0xb7, 0xba, 0xc2, 0xcb, 0xde, 0xd6, 0xc9, 0xc4, 0xc2, + 0xc8, 0xd4, 0xcd, 0xb7, 0xb0, 0xb2, 0xc2, 0xc2, 0xc0, 0xba, 0xc2, 0xce, + 0xc4, 0xc5, 0xbc, 0xb9, 0xb0, 0xb0, 0xb4, 0xb6, 0xbf, 0xc2, 0xbf, 0xb2, + 0xaa, 0xb0, 0xb9, 0xce, 0xd2, 0xcf, 0xc4, 0xbd, 0xb8, 0xbc, 0xc4, 0xbb, + 0x4f, 0x4f, 0x4f, 0x4f, 0x4c, 0x4b, 0x4a, 0x47, 0x47, 0x48, 0x46, 0x48, + 0x46, 0x46, 0x4b, 0x4c, 0x4c, 0x4b, 0x49, 0x4c, 0x50, 0x51, 0x4c, 0x48, + 0x4a, 0x4c, 0x55, 0x58, 0x55, 0x4b, 0x4e, 0x58, 0x56, 0x59, 0x5f, 0x5f, + 0x5e, 0x5a, 0x5a, 0x59, 0x5b, 0x5d, 0x56, 0x59, 0x62, 0x61, 0x58, 0x55, + 0x66, 0x67, 0x67, 0x67, 0x67, 0x6a, 0x66, 0x6a, 0x6a, 0x65, 0x67, 0x65, + 0x63, 0x64, 0x5f, 0x5d, 0x62, 0x64, 0x64, 0x64, 0x62, 0x60, 0x61, 0x62, + 0x64, 0x67, 0x68, 0x69, 0x6a, 0x6c, 0x6e, 0x6b, 0x69, 0x68, 0x6a, 0x6d, + 0x6e, 0x71, 0x73, 0x73, 0x74, 0x75, 0x76, 0x76, 0x76, 0x76, 0x77, 0x77, + 0x77, 0x77, 0x78, 0x78, 0x77, 0x76, 0x74, 0x6f, 0x64, 0x57, 0x4c, 0x4e, + 0x58, 0x53, 0x2e, 0xfe, 0xe8, 0xdf, 0xdd, 0xce, 0xc1, 0xbf, 0xc4, 0xc9, + 0xc7, 0xcd, 0xc3, 0xc6, 0xc5, 0xbf, 0xbc, 0xc0, 0xc2, 0xbf, 0xc2, 0xc9, + 0xc9, 0xd3, 0xd3, 0xc5, 0xc6, 0xc7, 0xc8, 0xd2, 0xcf, 0xce, 0xda, 0xda, + 0xd3, 0xcd, 0xc4, 0xd0, 0xc8, 0xc4, 0xc7, 0xd5, 0xd0, 0xc5, 0xc1, 0xbf, + 0xbd, 0xbe, 0xbf, 0xc2, 0xca, 0xca, 0xca, 0xc2, 0xc7, 0xc8, 0xca, 0xc8, + 0xd1, 0xcd, 0xc5, 0xca, 0xd1, 0xd5, 0xc5, 0xce, 0xd4, 0xd4, 0xc6, 0xb8, + 0xbc, 0xc0, 0xc9, 0xda, 0xe2, 0xd2, 0xbc, 0xbc, 0xc8, 0xd4, 0xd9, 0xbd, + 0xb4, 0xb5, 0xc1, 0xbe, 0xbf, 0xb7, 0xb6, 0xc8, 0xc7, 0xc3, 0xc1, 0xbd, + 0xb6, 0xb4, 0xb4, 0xb6, 0xcb, 0xd4, 0xc6, 0xb7, 0xaf, 0xb0, 0xb3, 0xc2, + 0xc1, 0xbe, 0xbe, 0xc6, 0xba, 0xba, 0xc1, 0xc2, 0x4e, 0x4d, 0x4e, 0x4d, + 0x4b, 0x4b, 0x4a, 0x4a, 0x48, 0x49, 0x46, 0x47, 0x46, 0x48, 0x4c, 0x4a, + 0x4b, 0x4c, 0x4d, 0x4d, 0x4c, 0x4e, 0x4a, 0x4b, 0x4e, 0x54, 0x56, 0x54, + 0x50, 0x4c, 0x55, 0x59, 0x5b, 0x5e, 0x5f, 0x5b, 0x58, 0x58, 0x59, 0x59, + 0x5e, 0x5a, 0x56, 0x5a, 0x62, 0x63, 0x56, 0x58, 0x67, 0x67, 0x67, 0x63, + 0x63, 0x68, 0x64, 0x69, 0x6b, 0x65, 0x64, 0x63, 0x62, 0x64, 0x5e, 0x5b, + 0x5d, 0x5e, 0x60, 0x63, 0x62, 0x63, 0x64, 0x63, 0x65, 0x67, 0x69, 0x6a, + 0x6c, 0x6d, 0x6d, 0x6f, 0x6c, 0x6a, 0x69, 0x6b, 0x6d, 0x70, 0x74, 0x74, + 0x74, 0x75, 0x76, 0x76, 0x77, 0x77, 0x78, 0x77, 0x77, 0x77, 0x77, 0x78, + 0x78, 0x77, 0x75, 0x72, 0x6e, 0x65, 0x5e, 0x63, 0x67, 0x59, 0x19, 0xea, + 0xd3, 0xc9, 0xce, 0xce, 0xcd, 0xd4, 0xcb, 0xca, 0xc8, 0xcb, 0xc1, 0xbb, + 0xc0, 0xba, 0xbc, 0xc1, 0xca, 0xc4, 0xc4, 0xcc, 0xcd, 0xd1, 0xcf, 0xc8, + 0xc8, 0xc5, 0xd1, 0xd4, 0xd1, 0xc8, 0xd5, 0xd4, 0xc7, 0xcb, 0xc8, 0xcb, + 0xc8, 0xc2, 0xca, 0xd5, 0xd0, 0xcb, 0xcd, 0xcf, 0xc5, 0xc0, 0xbc, 0xc0, + 0xc8, 0xc8, 0xca, 0xbf, 0xc0, 0xca, 0xcb, 0xc6, 0xc5, 0xbd, 0xc1, 0xc8, + 0xc0, 0xc8, 0xc9, 0xd5, 0xce, 0xcb, 0xc4, 0xb7, 0xb5, 0xbf, 0xc6, 0xc1, + 0xc6, 0xc1, 0xc1, 0xc4, 0xc7, 0xc4, 0xcb, 0xcb, 0xc4, 0xc4, 0xc8, 0xc1, + 0xc7, 0xc3, 0xb8, 0xc0, 0xce, 0xcb, 0xbd, 0xb7, 0xbd, 0xb8, 0xbc, 0xbd, + 0xc7, 0xd1, 0xcc, 0xc1, 0xbb, 0xb9, 0xb9, 0xbd, 0xbc, 0xbc, 0xc0, 0xc2, + 0xc8, 0xbf, 0xbf, 0xc1, 0x4c, 0x4d, 0x4c, 0x4c, 0x4c, 0x4c, 0x4b, 0x49, + 0x4a, 0x4b, 0x49, 0x49, 0x4a, 0x4d, 0x4c, 0x4a, 0x4c, 0x4c, 0x49, 0x47, + 0x46, 0x4a, 0x4c, 0x51, 0x52, 0x57, 0x55, 0x52, 0x50, 0x52, 0x5b, 0x5c, + 0x5e, 0x5e, 0x5e, 0x58, 0x58, 0x57, 0x55, 0x5a, 0x5a, 0x57, 0x57, 0x5b, + 0x63, 0x60, 0x56, 0x5c, 0x67, 0x66, 0x67, 0x5d, 0x5d, 0x63, 0x63, 0x67, + 0x6a, 0x65, 0x64, 0x5e, 0x60, 0x64, 0x63, 0x5b, 0x5b, 0x58, 0x5d, 0x61, + 0x63, 0x64, 0x64, 0x63, 0x64, 0x67, 0x69, 0x69, 0x6a, 0x6d, 0x6e, 0x6f, + 0x6e, 0x6c, 0x6b, 0x6a, 0x6d, 0x71, 0x74, 0x74, 0x74, 0x75, 0x75, 0x75, + 0x76, 0x78, 0x78, 0x77, 0x78, 0x77, 0x78, 0x78, 0x77, 0x78, 0x76, 0x75, + 0x71, 0x6c, 0x6d, 0x6f, 0x6e, 0x58, 0x1c, 0xed, 0xd6, 0xd2, 0xd4, 0xd2, + 0xd2, 0xd7, 0xcf, 0xcb, 0xca, 0xd2, 0xce, 0xbc, 0xbf, 0xbf, 0xbe, 0xbc, + 0xc7, 0xc8, 0xcd, 0xd6, 0xce, 0xd1, 0xcb, 0xcd, 0xc9, 0xc8, 0xd7, 0xd7, + 0xce, 0xc7, 0xce, 0xda, 0xce, 0xc8, 0xd0, 0xc7, 0xc8, 0xc0, 0xc3, 0xcc, + 0xc8, 0xcd, 0xce, 0xce, 0xc4, 0xc8, 0xc3, 0xc2, 0xbd, 0xbd, 0xc6, 0xbf, + 0xc0, 0xca, 0xc7, 0xc2, 0xc5, 0xb9, 0xb3, 0xbf, 0xc5, 0xc3, 0xbf, 0xc6, + 0xcb, 0xc9, 0xc8, 0xbd, 0xbc, 0xc4, 0xbc, 0xba, 0xc2, 0xbc, 0xbe, 0xc7, + 0xc4, 0xbf, 0xbe, 0xca, 0xcc, 0xcd, 0xcf, 0xcf, 0xcb, 0xc8, 0xbc, 0xbb, + 0xc2, 0xc8, 0xb8, 0xb4, 0xbe, 0xb6, 0xbc, 0xbc, 0xc2, 0xcf, 0xce, 0xc6, + 0xbe, 0xbf, 0xc6, 0xbc, 0xbc, 0xbb, 0xbd, 0xbb, 0xbe, 0xbf, 0xbf, 0xc2, + 0x4c, 0x4c, 0x4c, 0x4b, 0x4b, 0x48, 0x4b, 0x4b, 0x49, 0x49, 0x49, 0x49, + 0x4b, 0x4e, 0x4b, 0x4b, 0x4a, 0x46, 0x45, 0x45, 0x4b, 0x50, 0x52, 0x50, + 0x53, 0x56, 0x56, 0x54, 0x55, 0x5c, 0x5d, 0x5d, 0x5e, 0x5f, 0x5a, 0x58, + 0x58, 0x51, 0x52, 0x59, 0x52, 0x53, 0x56, 0x5b, 0x64, 0x5e, 0x59, 0x5e, + 0x65, 0x66, 0x63, 0x59, 0x58, 0x5e, 0x64, 0x66, 0x69, 0x65, 0x62, 0x5d, + 0x5a, 0x64, 0x67, 0x5e, 0x59, 0x59, 0x5c, 0x61, 0x63, 0x65, 0x64, 0x63, + 0x64, 0x68, 0x6a, 0x69, 0x69, 0x6d, 0x70, 0x70, 0x6f, 0x6d, 0x6c, 0x6b, + 0x6e, 0x71, 0x74, 0x74, 0x74, 0x75, 0x73, 0x73, 0x76, 0x77, 0x78, 0x78, + 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x77, 0x76, 0x74, 0x72, 0x72, 0x72, + 0x70, 0x58, 0x32, 0x10, 0xfa, 0xeb, 0xda, 0xe4, 0xe6, 0xe1, 0xd7, 0xd0, + 0xcf, 0xd4, 0xcf, 0xc3, 0xc5, 0xcd, 0xc4, 0xbb, 0xc7, 0xcf, 0xce, 0xce, + 0xd3, 0xc8, 0xc5, 0xc1, 0xbf, 0xce, 0xda, 0xcf, 0xc4, 0xc8, 0xd5, 0xd0, + 0xc2, 0xbe, 0xcb, 0xc7, 0xca, 0xbc, 0xb9, 0xcc, 0xd5, 0xdb, 0xca, 0xc5, + 0xbc, 0xc8, 0xc2, 0xb7, 0xb2, 0xb8, 0xc0, 0xb9, 0xbc, 0xc2, 0xbd, 0xbf, + 0xc7, 0xc5, 0xba, 0xb9, 0xc2, 0xc1, 0xbf, 0xc6, 0xca, 0xcd, 0xc8, 0xc1, + 0xbd, 0xc4, 0xc1, 0xbc, 0xbc, 0xc6, 0xce, 0xc6, 0xbe, 0xca, 0xc2, 0xc0, + 0xc8, 0xc8, 0xc4, 0xca, 0xd5, 0xc8, 0xc2, 0xbe, 0xbe, 0xb7, 0xbc, 0xbd, + 0xc1, 0xb4, 0xb2, 0xbb, 0xc2, 0xcf, 0xc0, 0xc2, 0xc3, 0xc3, 0xc8, 0xc2, + 0xbb, 0xbc, 0xbf, 0xc1, 0xba, 0xc1, 0xc1, 0xc6, 0x4a, 0x4b, 0x4b, 0x4a, + 0x47, 0x48, 0x4c, 0x4b, 0x47, 0x49, 0x4d, 0x4f, 0x4f, 0x4d, 0x4c, 0x4c, + 0x47, 0x46, 0x46, 0x4a, 0x51, 0x52, 0x4e, 0x4b, 0x51, 0x58, 0x57, 0x56, + 0x59, 0x5a, 0x5a, 0x5a, 0x59, 0x58, 0x55, 0x53, 0x50, 0x4a, 0x52, 0x57, + 0x4e, 0x51, 0x55, 0x5d, 0x62, 0x5a, 0x59, 0x5e, 0x64, 0x67, 0x60, 0x58, + 0x57, 0x5a, 0x63, 0x65, 0x68, 0x64, 0x60, 0x5a, 0x59, 0x64, 0x67, 0x64, + 0x5b, 0x58, 0x5f, 0x61, 0x63, 0x67, 0x65, 0x64, 0x64, 0x68, 0x6a, 0x6a, + 0x6a, 0x6c, 0x70, 0x70, 0x70, 0x6d, 0x6d, 0x6e, 0x70, 0x70, 0x71, 0x75, + 0x75, 0x75, 0x74, 0x74, 0x75, 0x76, 0x76, 0x76, 0x76, 0x77, 0x78, 0x78, + 0x77, 0x77, 0x76, 0x76, 0x76, 0x75, 0x75, 0x74, 0x6d, 0x59, 0x3a, 0x16, + 0xfa, 0xe6, 0xe0, 0xf8, 0xf6, 0xf0, 0xdc, 0xd3, 0xcf, 0xda, 0xd7, 0xca, + 0xce, 0xcf, 0xce, 0xc8, 0xce, 0xd1, 0xd3, 0xce, 0xd2, 0xc8, 0xc2, 0xbc, + 0xc8, 0xd6, 0xd0, 0xc5, 0xc4, 0xca, 0xc5, 0xc6, 0xbb, 0xbb, 0xbd, 0xc2, + 0xc4, 0xba, 0xb5, 0xca, 0xce, 0xce, 0xc7, 0xbc, 0xbb, 0xc4, 0xcc, 0xba, + 0xb0, 0xb3, 0xbc, 0xb3, 0xb7, 0xbc, 0xb6, 0xbb, 0xc2, 0xc3, 0xc1, 0xc7, + 0xd2, 0xc9, 0xc3, 0xc3, 0xbf, 0xd0, 0xc4, 0xb6, 0xc2, 0xc2, 0xbf, 0xb8, + 0xb6, 0xc4, 0xcc, 0xcc, 0xc2, 0xc3, 0xc2, 0xbc, 0xc0, 0xc1, 0xbf, 0xbe, + 0xd1, 0xc9, 0xca, 0xcc, 0xbe, 0xb2, 0xbb, 0xbd, 0xb8, 0xb4, 0xaa, 0xb0, + 0xbc, 0xbc, 0xae, 0xba, 0xcb, 0xd2, 0xbf, 0xc0, 0xc0, 0xba, 0xbd, 0xbf, + 0xbf, 0xc3, 0xc5, 0xc0, 0x48, 0x48, 0x49, 0x4a, 0x48, 0x48, 0x48, 0x46, + 0x46, 0x49, 0x4f, 0x51, 0x50, 0x4e, 0x4b, 0x4a, 0x47, 0x49, 0x4f, 0x51, + 0x52, 0x52, 0x4b, 0x4b, 0x52, 0x57, 0x56, 0x57, 0x57, 0x58, 0x58, 0x57, + 0x53, 0x52, 0x52, 0x4f, 0x4c, 0x4d, 0x56, 0x52, 0x48, 0x48, 0x56, 0x5e, + 0x60, 0x5d, 0x5c, 0x62, 0x64, 0x63, 0x5b, 0x58, 0x56, 0x5b, 0x62, 0x66, + 0x65, 0x64, 0x5e, 0x59, 0x5d, 0x65, 0x69, 0x65, 0x61, 0x5b, 0x60, 0x61, + 0x65, 0x69, 0x68, 0x66, 0x65, 0x6a, 0x6a, 0x6a, 0x6b, 0x6c, 0x70, 0x70, + 0x70, 0x6f, 0x6e, 0x6e, 0x6f, 0x70, 0x70, 0x74, 0x74, 0x74, 0x74, 0x75, + 0x75, 0x75, 0x76, 0x76, 0x76, 0x77, 0x78, 0x78, 0x76, 0x76, 0x76, 0x76, + 0x76, 0x76, 0x76, 0x76, 0x71, 0x68, 0x57, 0x43, 0x24, 0x03, 0x00, 0x0f, + 0xfe, 0xf2, 0xdd, 0xd7, 0xd2, 0xd8, 0xda, 0xcc, 0xc6, 0xc9, 0xd5, 0xce, + 0xcf, 0xd4, 0xd9, 0xd6, 0xd0, 0xd3, 0xc8, 0xbe, 0xcd, 0xd3, 0xc3, 0xbb, + 0xbd, 0xc7, 0xc0, 0xbe, 0xbc, 0xbb, 0xbc, 0xbe, 0xbf, 0xb7, 0xb0, 0xbe, + 0xc0, 0xc3, 0xc7, 0xc0, 0xbb, 0xb8, 0xc2, 0xba, 0xb1, 0xc1, 0xbd, 0xbb, + 0xbc, 0xb7, 0xb6, 0xbf, 0xc1, 0xc1, 0xc5, 0xc4, 0xcf, 0xce, 0xc2, 0xc9, + 0xce, 0xc8, 0xc2, 0xbe, 0xcd, 0xc2, 0xbc, 0xc2, 0xc8, 0xc9, 0xca, 0xcb, + 0xc8, 0xba, 0xc2, 0xc3, 0xbc, 0xb9, 0xbc, 0xbb, 0xc1, 0xc2, 0xc4, 0xce, + 0xbb, 0xaf, 0xb9, 0xb9, 0xb4, 0xbb, 0xb6, 0xad, 0xb6, 0xb8, 0xb1, 0xbb, + 0xca, 0xd2, 0xc1, 0xb7, 0xc9, 0xc0, 0xb9, 0xbb, 0xc2, 0xc1, 0xbe, 0xc0, + 0x46, 0x46, 0x44, 0x45, 0x45, 0x45, 0x44, 0x46, 0x4b, 0x4e, 0x4e, 0x4f, + 0x4e, 0x4c, 0x46, 0x46, 0x47, 0x50, 0x52, 0x54, 0x52, 0x4d, 0x48, 0x4c, + 0x55, 0x56, 0x57, 0x58, 0x58, 0x58, 0x53, 0x4f, 0x4d, 0x4b, 0x4d, 0x4e, + 0x50, 0x53, 0x54, 0x4d, 0x45, 0x48, 0x59, 0x5e, 0x5e, 0x5f, 0x62, 0x64, + 0x63, 0x5e, 0x59, 0x5a, 0x57, 0x5c, 0x64, 0x65, 0x61, 0x63, 0x5f, 0x5c, + 0x60, 0x64, 0x6b, 0x69, 0x64, 0x61, 0x63, 0x64, 0x67, 0x6b, 0x6b, 0x6a, + 0x68, 0x6a, 0x67, 0x69, 0x6c, 0x6d, 0x6f, 0x71, 0x70, 0x71, 0x70, 0x6e, + 0x6e, 0x70, 0x70, 0x73, 0x73, 0x72, 0x73, 0x75, 0x76, 0x76, 0x76, 0x76, + 0x76, 0x77, 0x77, 0x77, 0x76, 0x75, 0x76, 0x76, 0x76, 0x77, 0x77, 0x76, + 0x73, 0x70, 0x69, 0x65, 0x5b, 0x45, 0x34, 0x2a, 0x08, 0xec, 0xdf, 0xdd, + 0xd5, 0xd0, 0xd2, 0xce, 0xc0, 0xc4, 0xc5, 0xc7, 0xcb, 0xd4, 0xd8, 0xdc, + 0xd5, 0xdd, 0xd4, 0xce, 0xd3, 0xd7, 0xc3, 0xc2, 0xc9, 0xca, 0xbf, 0xc6, + 0xc9, 0xc7, 0xc8, 0xca, 0xbe, 0xb1, 0xb0, 0xbc, 0xbb, 0xbc, 0xb6, 0xb6, + 0xb6, 0xb9, 0xbc, 0xbf, 0xbb, 0xb6, 0xba, 0xc4, 0xc4, 0xbd, 0xb8, 0xc2, + 0xc0, 0xc0, 0xc0, 0xc2, 0xc8, 0xc8, 0xbd, 0xc4, 0xd1, 0xc8, 0xc5, 0xc5, + 0xcc, 0xc4, 0xbf, 0xbc, 0xc8, 0xca, 0xc2, 0xc2, 0xc9, 0xbf, 0xc0, 0xc2, + 0xb6, 0xb9, 0xc1, 0xb6, 0xbe, 0xbf, 0xbd, 0xc6, 0xb7, 0xb0, 0xb5, 0xb6, + 0xb8, 0xc6, 0xbf, 0xb0, 0xb0, 0xba, 0xb9, 0xba, 0xbe, 0xc1, 0xbd, 0xb6, + 0xc3, 0xcc, 0xcf, 0xce, 0xc0, 0xb8, 0xb6, 0xbc, 0x41, 0x44, 0x44, 0x44, + 0x46, 0x49, 0x4b, 0x4c, 0x4e, 0x4f, 0x4d, 0x4b, 0x4b, 0x49, 0x47, 0x4b, + 0x50, 0x54, 0x53, 0x53, 0x50, 0x4c, 0x4c, 0x52, 0x56, 0x56, 0x58, 0x58, + 0x54, 0x51, 0x4d, 0x4c, 0x4c, 0x4c, 0x4e, 0x51, 0x51, 0x53, 0x4f, 0x4a, + 0x45, 0x4c, 0x59, 0x5e, 0x60, 0x63, 0x64, 0x61, 0x5f, 0x5a, 0x5b, 0x5c, + 0x58, 0x5e, 0x65, 0x64, 0x60, 0x5f, 0x5e, 0x5e, 0x64, 0x65, 0x6a, 0x68, + 0x67, 0x65, 0x66, 0x69, 0x6a, 0x6d, 0x6d, 0x6b, 0x68, 0x6a, 0x66, 0x68, + 0x6d, 0x6c, 0x6e, 0x71, 0x71, 0x70, 0x6e, 0x6c, 0x6e, 0x71, 0x72, 0x72, + 0x71, 0x72, 0x73, 0x74, 0x76, 0x75, 0x75, 0x75, 0x75, 0x76, 0x76, 0x76, + 0x75, 0x75, 0x76, 0x76, 0x76, 0x77, 0x78, 0x77, 0x75, 0x71, 0x70, 0x6f, + 0x6c, 0x68, 0x62, 0x45, 0x1a, 0xf6, 0xda, 0xd4, 0xca, 0xc6, 0xca, 0xce, + 0xc9, 0xcb, 0xc0, 0xc4, 0xca, 0xc8, 0xcb, 0xd4, 0xd9, 0xdc, 0xd4, 0xd8, + 0xd4, 0xdc, 0xcc, 0xc9, 0xd1, 0xcd, 0xbc, 0xc3, 0xc8, 0xcf, 0xcb, 0xc3, + 0xc2, 0xbb, 0xb9, 0xbf, 0xb6, 0xb8, 0xba, 0xb3, 0xbc, 0xcc, 0xc3, 0xc2, + 0xba, 0xb1, 0xbe, 0xc6, 0xc6, 0xc1, 0xc2, 0xc1, 0xba, 0xba, 0xbc, 0xbc, + 0xbb, 0xb6, 0xb6, 0xb7, 0xc4, 0xc4, 0xc8, 0xc3, 0xc4, 0xc4, 0xc9, 0xc2, + 0xc2, 0xc2, 0xb9, 0xb6, 0xbc, 0xb8, 0xbf, 0xc2, 0xb7, 0xc0, 0xc2, 0xbc, + 0xc7, 0xc2, 0xc2, 0xbf, 0xb7, 0xb2, 0xae, 0xb4, 0xb8, 0xc7, 0xc0, 0xb8, + 0xb4, 0xbc, 0xc0, 0xbc, 0xbc, 0xbc, 0xb7, 0xba, 0xc8, 0xcf, 0xce, 0xc9, + 0xbe, 0xb7, 0xc3, 0xbd, 0x43, 0x43, 0x47, 0x4c, 0x4d, 0x4d, 0x4d, 0x4d, + 0x4c, 0x4a, 0x48, 0x46, 0x46, 0x4a, 0x4c, 0x52, 0x54, 0x53, 0x52, 0x51, + 0x4e, 0x4d, 0x4f, 0x52, 0x52, 0x53, 0x56, 0x53, 0x51, 0x4e, 0x4a, 0x46, + 0x46, 0x4c, 0x4e, 0x4e, 0x4e, 0x50, 0x49, 0x49, 0x4a, 0x50, 0x56, 0x5e, + 0x64, 0x63, 0x60, 0x5e, 0x5e, 0x5b, 0x5e, 0x5e, 0x5d, 0x5f, 0x65, 0x65, + 0x63, 0x5e, 0x5f, 0x60, 0x64, 0x67, 0x68, 0x67, 0x6a, 0x68, 0x69, 0x6b, + 0x6b, 0x6d, 0x6e, 0x6a, 0x68, 0x66, 0x64, 0x67, 0x6d, 0x6c, 0x6e, 0x71, + 0x71, 0x6f, 0x6c, 0x6c, 0x6e, 0x70, 0x72, 0x73, 0x71, 0x73, 0x73, 0x74, + 0x75, 0x75, 0x75, 0x74, 0x74, 0x74, 0x75, 0x75, 0x75, 0x76, 0x75, 0x75, + 0x76, 0x78, 0x77, 0x77, 0x76, 0x76, 0x78, 0x76, 0x75, 0x73, 0x6e, 0x5e, + 0x3d, 0x0a, 0xde, 0xd2, 0xc8, 0xc5, 0xcb, 0xd1, 0xc9, 0xc5, 0xbf, 0xc4, + 0xcb, 0xc6, 0xc4, 0xca, 0xcf, 0xcd, 0xcc, 0xd7, 0xd4, 0xde, 0xd5, 0xca, + 0xc8, 0xc8, 0xc1, 0xca, 0xcd, 0xd4, 0xbe, 0xbb, 0xc4, 0xbf, 0xbf, 0xbf, + 0xb9, 0xba, 0xbf, 0xbf, 0xca, 0xcc, 0xc7, 0xbc, 0xb4, 0xbe, 0xce, 0xc8, + 0xc2, 0xc4, 0xc9, 0xc0, 0xc7, 0xc6, 0xbe, 0xbb, 0xb7, 0xbc, 0xcc, 0xb8, + 0xbc, 0xc7, 0xd2, 0xc7, 0xbf, 0xc8, 0xcc, 0xc7, 0xc1, 0xbc, 0xb8, 0xb4, + 0xb5, 0xb0, 0xbc, 0xc0, 0xc0, 0xc5, 0xbc, 0xb6, 0xc0, 0xc6, 0xc3, 0xba, + 0xba, 0xb6, 0xad, 0xb3, 0xb6, 0xc9, 0xcc, 0xd1, 0xc8, 0xcb, 0xbc, 0xba, + 0xbd, 0xc0, 0xc2, 0xc6, 0xc8, 0xc2, 0xca, 0xc9, 0xbc, 0xb6, 0xbf, 0xbc, + 0x45, 0x47, 0x48, 0x46, 0x49, 0x48, 0x43, 0x43, 0x41, 0x43, 0x44, 0x46, + 0x4a, 0x50, 0x52, 0x54, 0x54, 0x52, 0x4e, 0x4c, 0x4b, 0x4b, 0x4c, 0x4e, + 0x51, 0x51, 0x53, 0x53, 0x51, 0x4c, 0x47, 0x46, 0x4b, 0x4d, 0x4d, 0x4d, + 0x52, 0x4e, 0x4c, 0x49, 0x4d, 0x51, 0x52, 0x61, 0x64, 0x61, 0x5e, 0x5c, + 0x5e, 0x5e, 0x60, 0x5f, 0x62, 0x64, 0x64, 0x65, 0x64, 0x64, 0x63, 0x62, + 0x64, 0x65, 0x68, 0x68, 0x6b, 0x6a, 0x6b, 0x6d, 0x6d, 0x6c, 0x6d, 0x69, + 0x65, 0x62, 0x64, 0x67, 0x6c, 0x6c, 0x6f, 0x70, 0x71, 0x6e, 0x6d, 0x6c, + 0x6f, 0x6f, 0x6e, 0x71, 0x73, 0x71, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, + 0x72, 0x73, 0x73, 0x74, 0x75, 0x75, 0x75, 0x76, 0x78, 0x78, 0x77, 0x76, + 0x76, 0x77, 0x77, 0x76, 0x76, 0x75, 0x73, 0x6b, 0x5a, 0x32, 0x05, 0xe4, + 0xcf, 0xcc, 0xd1, 0xd1, 0xca, 0xc8, 0xcb, 0xcc, 0xcf, 0xcc, 0xc5, 0xc8, + 0xca, 0xc4, 0xc4, 0xce, 0xd4, 0xe1, 0xd4, 0xd1, 0xc7, 0xc8, 0xc7, 0xc8, + 0xc4, 0xcd, 0xc2, 0xc6, 0xc7, 0xbf, 0xc8, 0xc8, 0xbe, 0xbc, 0xb6, 0xbc, + 0xca, 0xc6, 0xc5, 0xc9, 0xd3, 0xce, 0xcb, 0xc8, 0xc2, 0xc8, 0xca, 0xc6, + 0xc7, 0xbe, 0xc8, 0xc1, 0xc2, 0xc0, 0xc5, 0xba, 0xbd, 0xc5, 0xbe, 0xbe, + 0xc8, 0xc9, 0xba, 0xbc, 0xc2, 0xba, 0xbf, 0xbc, 0xb6, 0xb0, 0xbb, 0xbb, + 0xc2, 0xc3, 0xb9, 0xb2, 0xb5, 0xc0, 0xc0, 0xbb, 0xbc, 0xc4, 0xb5, 0xb3, + 0xbd, 0xc3, 0xc8, 0xc2, 0xc8, 0xd3, 0xc1, 0xc2, 0xbc, 0xc3, 0xce, 0xce, + 0xc9, 0xc2, 0xcc, 0xc8, 0xbc, 0xbf, 0xc0, 0xb8, 0x45, 0x46, 0x45, 0x41, + 0x3e, 0x3f, 0x3b, 0x3b, 0x40, 0x45, 0x44, 0x48, 0x4f, 0x50, 0x51, 0x51, + 0x51, 0x4d, 0x46, 0x47, 0x4a, 0x48, 0x4c, 0x52, 0x52, 0x51, 0x4e, 0x50, + 0x4e, 0x49, 0x47, 0x4e, 0x4e, 0x4a, 0x4b, 0x4f, 0x52, 0x50, 0x4b, 0x4b, + 0x52, 0x52, 0x58, 0x63, 0x60, 0x5f, 0x5d, 0x5c, 0x5d, 0x5f, 0x61, 0x61, + 0x64, 0x63, 0x63, 0x66, 0x67, 0x68, 0x66, 0x63, 0x65, 0x65, 0x67, 0x6a, + 0x6b, 0x6b, 0x6e, 0x6d, 0x6d, 0x6b, 0x6d, 0x66, 0x63, 0x62, 0x65, 0x6a, + 0x6d, 0x6d, 0x6f, 0x70, 0x70, 0x6f, 0x6e, 0x6f, 0x6f, 0x6f, 0x6e, 0x6f, + 0x70, 0x70, 0x70, 0x70, 0x70, 0x70, 0x70, 0x71, 0x72, 0x72, 0x72, 0x72, + 0x74, 0x76, 0x75, 0x76, 0x77, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, + 0x75, 0x76, 0x73, 0x6f, 0x68, 0x56, 0x28, 0xf3, 0xdc, 0xd2, 0xc9, 0xc9, + 0xc8, 0xcc, 0xdb, 0xc9, 0xce, 0xcb, 0xd1, 0xd2, 0xc7, 0xc1, 0xbc, 0xc4, + 0xd9, 0xec, 0xd2, 0xcd, 0xc2, 0xc6, 0xc2, 0xc0, 0xc5, 0xce, 0xc2, 0xcb, + 0xcd, 0xc4, 0xce, 0xd5, 0xc2, 0xb5, 0xb0, 0xb8, 0xc3, 0xc2, 0xcf, 0xda, + 0xdc, 0xd5, 0xcd, 0xc6, 0xd2, 0xcf, 0xc7, 0xc5, 0xc2, 0xb7, 0xcb, 0xca, + 0xbb, 0xb6, 0xbb, 0xc2, 0xc5, 0xbe, 0xb6, 0xb8, 0xbc, 0xc2, 0xc0, 0xbc, + 0xc4, 0xbb, 0xc2, 0xc3, 0xbc, 0xb4, 0xbf, 0xc0, 0xc3, 0xc5, 0xbc, 0xb6, + 0xb5, 0xbd, 0xbf, 0xc3, 0xc5, 0xc8, 0xbb, 0xbd, 0xc1, 0xb7, 0xb8, 0xbc, + 0xc7, 0xc7, 0xc1, 0xc2, 0xb6, 0xc6, 0xd5, 0xd1, 0xca, 0xc6, 0xd3, 0xd6, + 0xc6, 0xc5, 0xca, 0xc2, 0x42, 0x40, 0x3f, 0x3f, 0x3d, 0x3b, 0x3e, 0x40, + 0x43, 0x45, 0x44, 0x4c, 0x4c, 0x4c, 0x48, 0x49, 0x47, 0x46, 0x45, 0x4a, + 0x49, 0x4c, 0x4e, 0x52, 0x4f, 0x4d, 0x4d, 0x4e, 0x4c, 0x4b, 0x4c, 0x4f, + 0x4c, 0x48, 0x52, 0x54, 0x55, 0x53, 0x4f, 0x4f, 0x54, 0x55, 0x5d, 0x61, + 0x5c, 0x5a, 0x59, 0x60, 0x5e, 0x61, 0x64, 0x64, 0x63, 0x61, 0x65, 0x66, + 0x68, 0x6a, 0x68, 0x65, 0x65, 0x65, 0x65, 0x6b, 0x6b, 0x6c, 0x6d, 0x6a, + 0x6c, 0x6c, 0x6c, 0x65, 0x63, 0x63, 0x66, 0x6b, 0x6c, 0x6d, 0x6e, 0x6f, + 0x71, 0x71, 0x70, 0x70, 0x6e, 0x6f, 0x70, 0x70, 0x6f, 0x6f, 0x6e, 0x6d, + 0x6d, 0x6d, 0x6e, 0x70, 0x71, 0x71, 0x71, 0x73, 0x75, 0x76, 0x76, 0x76, + 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x75, 0x74, 0x74, 0x71, + 0x6c, 0x64, 0x51, 0x1c, 0xef, 0xce, 0xbd, 0xc5, 0xce, 0xcd, 0xd4, 0xce, + 0xd1, 0xc8, 0xce, 0xd4, 0xd3, 0xce, 0xc7, 0xc2, 0xc9, 0xce, 0xcd, 0xd1, + 0xc5, 0xc8, 0xc6, 0xc3, 0xda, 0xd4, 0xc4, 0xc2, 0xcc, 0xcf, 0xc8, 0xcd, + 0xc8, 0xba, 0xbe, 0xc0, 0xc2, 0xbb, 0xcf, 0xe4, 0xda, 0xd4, 0xcb, 0xc1, + 0xc3, 0xc2, 0xc4, 0xce, 0xc2, 0xbc, 0xc8, 0xc8, 0xbc, 0xb9, 0xd0, 0xd3, + 0xc9, 0xbe, 0xb8, 0xba, 0xb8, 0xbc, 0xc0, 0xbc, 0xc8, 0xc2, 0xbe, 0xc5, + 0xc0, 0xbc, 0xc9, 0xc9, 0xcf, 0xcf, 0xc3, 0xb9, 0xb7, 0xbc, 0xc2, 0xc8, + 0xc3, 0xbd, 0xbc, 0xc2, 0xc2, 0xb0, 0xaa, 0xbc, 0xd0, 0xbe, 0xc4, 0xca, + 0xc0, 0xcf, 0xd0, 0xd2, 0xc9, 0xcb, 0xcc, 0xca, 0xc2, 0xbc, 0xc8, 0xcc, + 0x3a, 0x39, 0x3a, 0x3d, 0x3d, 0x3e, 0x3d, 0x40, 0x40, 0x41, 0x42, 0x47, + 0x46, 0x45, 0x40, 0x44, 0x44, 0x45, 0x4a, 0x4b, 0x4c, 0x50, 0x52, 0x52, + 0x4f, 0x4f, 0x51, 0x4f, 0x4d, 0x51, 0x4f, 0x4c, 0x4b, 0x4f, 0x55, 0x58, + 0x56, 0x56, 0x56, 0x53, 0x57, 0x58, 0x5c, 0x5b, 0x58, 0x58, 0x5a, 0x62, + 0x5e, 0x61, 0x63, 0x61, 0x61, 0x63, 0x67, 0x67, 0x69, 0x6a, 0x6a, 0x69, + 0x67, 0x64, 0x66, 0x6b, 0x6b, 0x6d, 0x6e, 0x6b, 0x6c, 0x6d, 0x6b, 0x63, + 0x63, 0x63, 0x67, 0x6c, 0x6c, 0x6b, 0x6c, 0x6e, 0x70, 0x72, 0x70, 0x70, + 0x6e, 0x6e, 0x6f, 0x70, 0x70, 0x6f, 0x6f, 0x6e, 0x6e, 0x6e, 0x6f, 0x70, + 0x70, 0x70, 0x71, 0x72, 0x74, 0x76, 0x75, 0x76, 0x76, 0x76, 0x75, 0x76, + 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x74, 0x73, 0x71, 0x6c, 0x5e, 0x45, + 0x0a, 0xd8, 0xc0, 0xc5, 0xd6, 0xd0, 0xd3, 0xd6, 0xd0, 0xca, 0xc2, 0xc1, + 0xcb, 0xcc, 0xc1, 0xc1, 0xbd, 0xc8, 0xce, 0xd3, 0xc3, 0xc6, 0xcb, 0xcd, + 0xd0, 0xcc, 0xc8, 0xc3, 0xc0, 0xcf, 0xcd, 0xd7, 0xd9, 0xc3, 0xc7, 0xc7, + 0xc6, 0xb8, 0xc0, 0xd5, 0xd1, 0xc5, 0xc6, 0xc0, 0xbe, 0xbb, 0xc0, 0xd4, + 0xca, 0xbc, 0xc2, 0xc6, 0xc6, 0xc4, 0xd2, 0xd9, 0xc8, 0xc6, 0xc1, 0xc1, + 0xbc, 0xbb, 0xba, 0xbd, 0xc7, 0xc7, 0xc8, 0xca, 0xbe, 0xbc, 0xd0, 0xc9, + 0xca, 0xd1, 0xc0, 0xc1, 0xb9, 0xb8, 0xbd, 0xc3, 0xc4, 0xbc, 0xbb, 0xc9, + 0xbd, 0xb0, 0xb3, 0xc1, 0xc7, 0xba, 0xc2, 0xc9, 0xc8, 0xd5, 0xcf, 0xca, + 0xc6, 0xcb, 0xc5, 0xc7, 0xc3, 0xc2, 0xbe, 0xc9, 0x37, 0x39, 0x3d, 0x42, + 0x44, 0x45, 0x42, 0x41, 0x42, 0x46, 0x4b, 0x46, 0x40, 0x3e, 0x41, 0x46, + 0x48, 0x4c, 0x4d, 0x4c, 0x51, 0x52, 0x4f, 0x50, 0x50, 0x51, 0x50, 0x51, + 0x52, 0x52, 0x50, 0x4f, 0x51, 0x56, 0x59, 0x58, 0x57, 0x5b, 0x58, 0x52, + 0x58, 0x58, 0x59, 0x58, 0x58, 0x5c, 0x5e, 0x63, 0x5e, 0x62, 0x62, 0x5f, + 0x61, 0x65, 0x69, 0x6a, 0x6a, 0x6a, 0x6a, 0x6a, 0x66, 0x64, 0x68, 0x6b, + 0x6a, 0x6d, 0x6e, 0x6d, 0x6d, 0x6d, 0x68, 0x63, 0x63, 0x62, 0x67, 0x6b, + 0x69, 0x6a, 0x6c, 0x6d, 0x70, 0x72, 0x70, 0x6f, 0x6e, 0x6d, 0x6f, 0x70, + 0x70, 0x70, 0x70, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x70, 0x70, 0x71, 0x72, + 0x74, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x76, 0x76, 0x76, 0x76, + 0x77, 0x76, 0x76, 0x75, 0x74, 0x72, 0x70, 0x65, 0x44, 0xfe, 0xd6, 0xd1, + 0xd4, 0xcf, 0xd0, 0xd4, 0xcb, 0xc5, 0xb9, 0xb9, 0xc9, 0xc7, 0xb7, 0xc0, + 0xc1, 0xc6, 0xc8, 0xc5, 0xbe, 0xb9, 0xc0, 0xcc, 0xc8, 0xc5, 0xc7, 0xca, + 0xc5, 0xc9, 0xcf, 0xd3, 0xcf, 0xcd, 0xc3, 0xc8, 0xc5, 0xb6, 0xbb, 0xce, + 0xcd, 0xc1, 0xc2, 0xbc, 0xb7, 0xb9, 0xbe, 0xd6, 0xce, 0xc2, 0xc2, 0xc2, + 0xc5, 0xce, 0xd4, 0xd4, 0xc7, 0xc9, 0xc2, 0xb6, 0xb7, 0xbc, 0xbe, 0xbf, + 0xbd, 0xc1, 0xc8, 0xc8, 0xbf, 0xbe, 0xc8, 0xce, 0xc9, 0xc8, 0xc6, 0xc8, + 0xbd, 0xb6, 0xc8, 0xcc, 0xc4, 0xb6, 0xbe, 0xcd, 0xb9, 0xae, 0xbb, 0xc6, + 0xc3, 0xc7, 0xc8, 0xcb, 0xc7, 0xc5, 0xc8, 0xd0, 0xd3, 0xc9, 0xc1, 0xc8, + 0xc9, 0xce, 0xcb, 0xce, 0x3b, 0x40, 0x44, 0x46, 0x46, 0x46, 0x46, 0x46, + 0x46, 0x47, 0x46, 0x41, 0x3d, 0x3e, 0x41, 0x46, 0x4b, 0x4d, 0x4d, 0x51, + 0x55, 0x52, 0x4e, 0x4f, 0x50, 0x50, 0x52, 0x52, 0x54, 0x53, 0x52, 0x52, + 0x55, 0x58, 0x58, 0x59, 0x5e, 0x5d, 0x55, 0x54, 0x58, 0x5a, 0x5b, 0x58, + 0x58, 0x5c, 0x63, 0x62, 0x60, 0x64, 0x64, 0x61, 0x61, 0x68, 0x6a, 0x69, + 0x6a, 0x6a, 0x6b, 0x69, 0x66, 0x66, 0x6a, 0x6a, 0x6a, 0x6e, 0x6e, 0x6c, + 0x6d, 0x6a, 0x66, 0x65, 0x64, 0x64, 0x68, 0x69, 0x67, 0x6a, 0x6c, 0x6c, + 0x6e, 0x70, 0x6e, 0x6e, 0x6e, 0x6d, 0x70, 0x70, 0x70, 0x70, 0x6e, 0x6d, + 0x6f, 0x6f, 0x70, 0x71, 0x72, 0x73, 0x72, 0x74, 0x75, 0x76, 0x75, 0x75, + 0x75, 0x75, 0x75, 0x76, 0x77, 0x77, 0x77, 0x77, 0x76, 0x77, 0x77, 0x77, + 0x76, 0x75, 0x76, 0x71, 0x67, 0x3c, 0xfa, 0xdf, 0xd5, 0xcd, 0xcc, 0xcf, + 0xcc, 0xc4, 0xb7, 0xbd, 0xcc, 0xcd, 0xbc, 0xc3, 0xce, 0xce, 0xc8, 0xc0, + 0xba, 0xbc, 0xc6, 0xca, 0xc6, 0xc6, 0xc7, 0xcf, 0xcd, 0xbe, 0xc2, 0xc6, + 0xce, 0xcc, 0xcc, 0xce, 0xc8, 0xba, 0xb6, 0xc4, 0xce, 0xc4, 0xc1, 0xb7, + 0xb4, 0xb6, 0xca, 0xd7, 0xce, 0xc9, 0xc4, 0xba, 0xbd, 0xcc, 0xd3, 0xd1, + 0xcd, 0xc8, 0xcd, 0xc4, 0xbc, 0xbc, 0xbf, 0xbc, 0xb8, 0xc0, 0xbe, 0xb8, + 0xc1, 0xc0, 0xc2, 0xc4, 0xce, 0xce, 0xcf, 0xd4, 0xc9, 0xb7, 0xcc, 0xd0, + 0xce, 0xc7, 0xc5, 0xc3, 0xb9, 0xb4, 0xba, 0xc1, 0xc1, 0xc6, 0xd4, 0xc7, + 0xb8, 0xbc, 0xc5, 0xd2, 0xd3, 0xc8, 0xbc, 0xc6, 0xc9, 0xc8, 0xc8, 0xc9, + 0x43, 0x45, 0x4a, 0x4b, 0x4a, 0x4b, 0x4c, 0x49, 0x47, 0x46, 0x43, 0x42, + 0x3e, 0x41, 0x45, 0x48, 0x4e, 0x4e, 0x4f, 0x52, 0x53, 0x50, 0x4d, 0x4c, + 0x4d, 0x52, 0x52, 0x51, 0x52, 0x52, 0x52, 0x52, 0x58, 0x58, 0x59, 0x5e, + 0x62, 0x5c, 0x56, 0x58, 0x58, 0x5d, 0x5c, 0x5a, 0x5b, 0x61, 0x66, 0x63, + 0x63, 0x64, 0x66, 0x62, 0x64, 0x69, 0x6a, 0x68, 0x69, 0x6a, 0x6a, 0x6a, + 0x67, 0x66, 0x6a, 0x6a, 0x6a, 0x70, 0x6f, 0x6d, 0x6c, 0x69, 0x65, 0x64, + 0x64, 0x65, 0x68, 0x67, 0x66, 0x69, 0x6a, 0x6b, 0x6b, 0x6d, 0x6e, 0x6d, + 0x6e, 0x6e, 0x70, 0x70, 0x6f, 0x71, 0x70, 0x6e, 0x70, 0x71, 0x72, 0x73, + 0x73, 0x72, 0x72, 0x73, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x76, 0x76, + 0x77, 0x77, 0x78, 0x78, 0x77, 0x77, 0x78, 0x77, 0x76, 0x76, 0x74, 0x75, + 0x75, 0x67, 0x39, 0x05, 0xe0, 0xd5, 0xc6, 0xc9, 0xcc, 0xc2, 0xba, 0xc3, + 0xce, 0xc4, 0xbc, 0xc1, 0xc8, 0xd4, 0xd4, 0xc8, 0xc0, 0xc1, 0xd6, 0xcf, + 0xca, 0xc4, 0xc3, 0xc8, 0xce, 0xc8, 0xc4, 0xc1, 0xca, 0xca, 0xd4, 0xd8, + 0xd0, 0xc6, 0xb6, 0xbd, 0xc5, 0xc8, 0xc0, 0xb8, 0xb3, 0xbf, 0xd3, 0xcf, + 0xc0, 0xc6, 0xc7, 0xb7, 0xb3, 0xb9, 0xce, 0xcd, 0xcb, 0xc9, 0xdb, 0xc9, + 0xc5, 0xbd, 0xbc, 0xbc, 0xb8, 0xbd, 0xbe, 0xc3, 0xce, 0xce, 0xc3, 0xbf, + 0xcd, 0xd5, 0xd8, 0xce, 0xc2, 0xba, 0xc8, 0xc6, 0xd3, 0xcc, 0xc2, 0xc2, + 0xbd, 0xbf, 0xbe, 0xb8, 0xb7, 0xc6, 0xd5, 0xc2, 0xb8, 0xbe, 0xc2, 0xc7, + 0xca, 0xc7, 0xbe, 0xc2, 0xce, 0xcd, 0xca, 0xc5, 0x4b, 0x4e, 0x4e, 0x4c, + 0x4d, 0x4e, 0x4d, 0x4c, 0x4a, 0x48, 0x46, 0x43, 0x40, 0x46, 0x48, 0x48, + 0x51, 0x52, 0x52, 0x53, 0x51, 0x4e, 0x4c, 0x4c, 0x4d, 0x52, 0x53, 0x51, + 0x52, 0x51, 0x50, 0x55, 0x58, 0x58, 0x5d, 0x61, 0x60, 0x58, 0x58, 0x5b, + 0x58, 0x5e, 0x5c, 0x5c, 0x5e, 0x65, 0x65, 0x62, 0x64, 0x64, 0x67, 0x64, + 0x65, 0x68, 0x6a, 0x69, 0x6a, 0x6a, 0x6b, 0x6b, 0x68, 0x68, 0x6b, 0x6a, + 0x6b, 0x70, 0x6f, 0x6d, 0x6a, 0x69, 0x64, 0x64, 0x64, 0x65, 0x66, 0x65, + 0x66, 0x66, 0x69, 0x6a, 0x69, 0x6c, 0x6c, 0x6c, 0x6d, 0x6d, 0x6f, 0x6e, + 0x6e, 0x71, 0x70, 0x6f, 0x71, 0x72, 0x72, 0x73, 0x73, 0x73, 0x73, 0x74, + 0x74, 0x75, 0x75, 0x74, 0x74, 0x75, 0x76, 0x77, 0x77, 0x78, 0x79, 0x79, + 0x78, 0x78, 0x78, 0x77, 0x77, 0x77, 0x76, 0x76, 0x76, 0x76, 0x68, 0x43, + 0x05, 0xdf, 0xce, 0xca, 0xc9, 0xbf, 0xbe, 0xc2, 0xc7, 0xbe, 0xba, 0xbc, + 0xc1, 0xd8, 0xdb, 0xd4, 0xc8, 0xc2, 0xce, 0xd4, 0xd4, 0xd2, 0xc2, 0xc2, + 0xcc, 0xcf, 0xc8, 0xc0, 0xc8, 0xcd, 0xd1, 0xdd, 0xd8, 0xc5, 0xbc, 0xc4, + 0xc1, 0xc8, 0xbc, 0xbc, 0xbc, 0xc5, 0xcb, 0xc2, 0xba, 0xc1, 0xc8, 0xbb, + 0xb2, 0xb4, 0xcf, 0xd2, 0xcd, 0xbb, 0xc3, 0xc6, 0xc5, 0xbc, 0xba, 0xc1, + 0xba, 0xbc, 0xc8, 0xc9, 0xd4, 0xd4, 0xd4, 0xcd, 0xc8, 0xd5, 0xde, 0xd5, + 0xc8, 0xc2, 0xc5, 0xbb, 0xce, 0xc6, 0xc2, 0xc2, 0xc2, 0xd0, 0xc0, 0xb5, + 0xb4, 0xc1, 0xd8, 0xd4, 0xd5, 0xd0, 0xc8, 0xc1, 0xc2, 0xb9, 0xb9, 0xbd, + 0xce, 0xd4, 0xce, 0xc2, 0x50, 0x4f, 0x4c, 0x4d, 0x4d, 0x4c, 0x4c, 0x4a, + 0x4b, 0x4a, 0x47, 0x45, 0x46, 0x4c, 0x4c, 0x4d, 0x52, 0x52, 0x52, 0x52, + 0x51, 0x4f, 0x4e, 0x4c, 0x50, 0x54, 0x54, 0x50, 0x52, 0x4f, 0x52, 0x57, + 0x58, 0x59, 0x5e, 0x61, 0x5c, 0x5a, 0x5d, 0x5c, 0x5a, 0x5e, 0x5e, 0x5e, + 0x5f, 0x66, 0x64, 0x62, 0x64, 0x67, 0x67, 0x64, 0x69, 0x6b, 0x6b, 0x68, + 0x69, 0x69, 0x6a, 0x6a, 0x67, 0x69, 0x6a, 0x6a, 0x6c, 0x70, 0x6d, 0x6c, + 0x69, 0x68, 0x64, 0x64, 0x63, 0x64, 0x64, 0x63, 0x64, 0x65, 0x67, 0x68, + 0x68, 0x6b, 0x6a, 0x6a, 0x6c, 0x6d, 0x6d, 0x6b, 0x6d, 0x70, 0x72, 0x71, + 0x72, 0x71, 0x72, 0x72, 0x72, 0x73, 0x73, 0x74, 0x75, 0x75, 0x74, 0x73, + 0x73, 0x74, 0x76, 0x77, 0x77, 0x77, 0x79, 0x79, 0x78, 0x78, 0x77, 0x77, + 0x78, 0x78, 0x78, 0x77, 0x77, 0x77, 0x74, 0x61, 0x31, 0xe8, 0xce, 0xcd, + 0xca, 0xc2, 0xcb, 0xd0, 0xc9, 0xc4, 0xc0, 0xc2, 0xbf, 0xda, 0xdb, 0xd9, + 0xca, 0xc4, 0xc7, 0xcf, 0xdf, 0xe0, 0xc4, 0xbf, 0xc6, 0xc8, 0xc4, 0xbf, + 0xc3, 0xcb, 0xd1, 0xd8, 0xce, 0xc6, 0xc2, 0xbc, 0xc2, 0xc2, 0xc2, 0xc2, + 0xc5, 0xcf, 0xc7, 0xbb, 0xba, 0xbc, 0xc2, 0xc1, 0xb6, 0xb5, 0xd2, 0xd3, + 0xd2, 0xc0, 0xc8, 0xc8, 0xbd, 0xad, 0xb3, 0xbb, 0xb5, 0xb8, 0xd0, 0xd1, + 0xe8, 0xd9, 0xd4, 0xc8, 0xc1, 0xce, 0xce, 0xc8, 0xcc, 0xcd, 0xc8, 0xbe, + 0xc9, 0xc0, 0xc5, 0xc2, 0xbc, 0xd3, 0xcb, 0xb7, 0xb6, 0xc4, 0xd9, 0xd4, + 0xd4, 0xd4, 0xca, 0xc1, 0xc2, 0xb6, 0xb0, 0xb9, 0xbc, 0xcb, 0xcd, 0xc5, + 0x4f, 0x4f, 0x4c, 0x4c, 0x4d, 0x4c, 0x4e, 0x4d, 0x4c, 0x49, 0x48, 0x49, + 0x4d, 0x4f, 0x4f, 0x50, 0x52, 0x52, 0x52, 0x51, 0x4f, 0x4d, 0x4c, 0x4c, + 0x53, 0x54, 0x51, 0x51, 0x52, 0x50, 0x55, 0x58, 0x58, 0x58, 0x5f, 0x5c, + 0x5a, 0x5d, 0x5e, 0x5d, 0x5b, 0x5e, 0x5f, 0x60, 0x64, 0x66, 0x61, 0x62, + 0x64, 0x67, 0x67, 0x64, 0x6b, 0x6d, 0x6a, 0x65, 0x67, 0x67, 0x68, 0x69, + 0x68, 0x69, 0x69, 0x69, 0x6c, 0x6f, 0x6c, 0x6a, 0x65, 0x67, 0x64, 0x62, + 0x62, 0x65, 0x64, 0x61, 0x60, 0x64, 0x67, 0x69, 0x69, 0x67, 0x69, 0x6a, + 0x6d, 0x6f, 0x6a, 0x6a, 0x6e, 0x72, 0x70, 0x6f, 0x70, 0x6f, 0x70, 0x70, + 0x71, 0x71, 0x72, 0x72, 0x75, 0x75, 0x74, 0x74, 0x74, 0x75, 0x75, 0x76, + 0x77, 0x77, 0x79, 0x79, 0x79, 0x7a, 0x79, 0x79, 0x79, 0x78, 0x79, 0x78, + 0x77, 0x77, 0x76, 0x70, 0x54, 0x08, 0xd6, 0xd3, 0xcf, 0xc5, 0xd4, 0xd1, + 0xc5, 0xc6, 0xc8, 0xc1, 0xc0, 0xd2, 0xd5, 0xd6, 0xcf, 0xc8, 0xc8, 0xc7, + 0xe0, 0xde, 0xd0, 0xc0, 0xc3, 0xc8, 0xc4, 0xc2, 0xc1, 0xc7, 0xd2, 0xd9, + 0xc5, 0xb4, 0xbc, 0xc0, 0xc7, 0xc8, 0xd1, 0xcf, 0xc8, 0xd1, 0xcb, 0xba, + 0xb4, 0xb6, 0xc7, 0xc5, 0xb1, 0xb1, 0xcd, 0xc6, 0xd2, 0xc4, 0xbd, 0xc2, + 0xb4, 0xa8, 0xb1, 0xbe, 0xbd, 0xb8, 0xd0, 0xc9, 0xda, 0xd8, 0xd1, 0xc1, + 0xbe, 0xc2, 0xca, 0xce, 0xcf, 0xd2, 0xc4, 0xc7, 0xce, 0xbe, 0xc8, 0xc4, + 0xbd, 0xca, 0xd4, 0xba, 0xb6, 0xc7, 0xce, 0xc9, 0xce, 0xce, 0xc6, 0xbe, + 0xbc, 0xc1, 0xb6, 0xbd, 0xbf, 0xc5, 0xbd, 0xce, 0x4b, 0x4a, 0x48, 0x4a, + 0x4e, 0x4e, 0x50, 0x50, 0x4c, 0x4b, 0x4c, 0x4f, 0x50, 0x50, 0x4e, 0x51, + 0x52, 0x52, 0x50, 0x51, 0x50, 0x4c, 0x4e, 0x51, 0x52, 0x52, 0x50, 0x52, + 0x4f, 0x4e, 0x55, 0x58, 0x56, 0x5a, 0x5c, 0x58, 0x5c, 0x5d, 0x5d, 0x5b, + 0x5b, 0x5f, 0x5e, 0x62, 0x65, 0x65, 0x61, 0x63, 0x64, 0x66, 0x65, 0x66, + 0x6d, 0x6d, 0x6a, 0x64, 0x66, 0x66, 0x67, 0x67, 0x69, 0x69, 0x6a, 0x6a, + 0x6b, 0x6d, 0x6a, 0x66, 0x63, 0x64, 0x61, 0x62, 0x63, 0x66, 0x64, 0x60, + 0x61, 0x65, 0x69, 0x6a, 0x68, 0x67, 0x6a, 0x6a, 0x6c, 0x6c, 0x6a, 0x6a, + 0x70, 0x72, 0x70, 0x6d, 0x6d, 0x6e, 0x6f, 0x6e, 0x70, 0x71, 0x72, 0x73, + 0x73, 0x74, 0x74, 0x75, 0x76, 0x76, 0x76, 0x76, 0x77, 0x78, 0x79, 0x79, + 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x79, 0x78, 0x78, 0x78, 0x79, 0x78, 0x76, + 0x64, 0x2e, 0xf1, 0xe0, 0xd4, 0xc5, 0xcf, 0xc8, 0xc2, 0xc6, 0xc9, 0xc3, + 0xcd, 0xd8, 0xd8, 0xd6, 0xd1, 0xc8, 0xc6, 0xc6, 0xd5, 0xd1, 0xd3, 0xd3, + 0xcf, 0xc8, 0xc4, 0xc2, 0xc5, 0xca, 0xd7, 0xd9, 0xca, 0xc4, 0xc8, 0xca, + 0xcc, 0xc0, 0xc5, 0xc9, 0xc2, 0xc7, 0xcc, 0xbd, 0xbc, 0xbb, 0xcc, 0xc3, + 0xb4, 0xb6, 0xbc, 0xb6, 0xc6, 0xc5, 0xc2, 0xbc, 0xb2, 0xb0, 0xb6, 0xc2, + 0xc2, 0xbb, 0xcd, 0xc7, 0xc9, 0xd9, 0xd1, 0xc2, 0xbb, 0xc2, 0xc8, 0xc6, + 0xce, 0xcc, 0xc7, 0xc6, 0xcf, 0xc1, 0xcb, 0xc8, 0xc2, 0xcb, 0xcf, 0xbc, + 0xbe, 0xc7, 0xca, 0xc5, 0xcb, 0xcb, 0xc2, 0xbd, 0xbf, 0xc1, 0xb9, 0xc7, + 0xc8, 0xbe, 0xb8, 0xc8, 0x48, 0x48, 0x49, 0x4d, 0x4e, 0x51, 0x52, 0x50, + 0x4b, 0x4c, 0x4d, 0x4f, 0x50, 0x4e, 0x4f, 0x52, 0x52, 0x52, 0x52, 0x50, + 0x4c, 0x4f, 0x51, 0x51, 0x52, 0x52, 0x51, 0x52, 0x51, 0x51, 0x57, 0x56, + 0x57, 0x59, 0x57, 0x58, 0x5b, 0x5b, 0x5a, 0x58, 0x5c, 0x5e, 0x5d, 0x63, + 0x67, 0x64, 0x62, 0x64, 0x65, 0x66, 0x66, 0x67, 0x6c, 0x6c, 0x68, 0x64, + 0x65, 0x64, 0x64, 0x67, 0x6a, 0x68, 0x6a, 0x6a, 0x6b, 0x6c, 0x69, 0x60, + 0x61, 0x5f, 0x5d, 0x61, 0x64, 0x64, 0x62, 0x61, 0x66, 0x69, 0x69, 0x69, + 0x6a, 0x69, 0x6a, 0x6c, 0x6b, 0x6a, 0x6c, 0x6d, 0x72, 0x72, 0x71, 0x6e, + 0x6e, 0x6e, 0x70, 0x70, 0x70, 0x71, 0x72, 0x73, 0x73, 0x75, 0x75, 0x76, + 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x78, 0x78, 0x79, 0x7a, 0x7a, 0x7a, + 0x7a, 0x79, 0x79, 0x79, 0x78, 0x78, 0x79, 0x79, 0x73, 0x53, 0x16, 0xec, + 0xda, 0xd0, 0xcb, 0xc7, 0xc8, 0xcc, 0xcd, 0xc1, 0xcc, 0xd4, 0xd0, 0xc9, + 0xbc, 0xbe, 0xbf, 0xc7, 0xcb, 0xcc, 0xc5, 0xd5, 0xcd, 0xc6, 0xc4, 0xc3, + 0xc7, 0xc6, 0xc9, 0xcc, 0xca, 0xcd, 0xc8, 0xcb, 0xd6, 0xc3, 0xb7, 0xbb, + 0xc0, 0xc0, 0xc5, 0xbe, 0xbc, 0xc7, 0xcc, 0xc0, 0xb9, 0xb6, 0xb1, 0xb6, + 0xc8, 0xc6, 0xcf, 0xc2, 0xb3, 0xb6, 0xb5, 0xb5, 0xc0, 0xca, 0xd2, 0xc5, + 0xc3, 0xe0, 0xd6, 0xc5, 0xc2, 0xd5, 0xc8, 0xc2, 0xcd, 0xc4, 0xca, 0xd0, + 0xca, 0xc0, 0xc7, 0xc4, 0xbc, 0xc8, 0xc5, 0xba, 0xc4, 0xc9, 0xc5, 0xc2, + 0xc8, 0xc4, 0xbe, 0xb9, 0xbd, 0xc2, 0xbc, 0xc5, 0xc3, 0xbc, 0xc6, 0xc8, + 0x48, 0x4b, 0x4d, 0x4f, 0x51, 0x52, 0x52, 0x4e, 0x4c, 0x4c, 0x4e, 0x52, + 0x50, 0x4e, 0x51, 0x51, 0x50, 0x52, 0x51, 0x4d, 0x4f, 0x52, 0x52, 0x53, + 0x50, 0x51, 0x54, 0x52, 0x50, 0x52, 0x58, 0x58, 0x57, 0x55, 0x54, 0x5a, + 0x58, 0x58, 0x55, 0x5a, 0x5e, 0x5d, 0x5c, 0x62, 0x67, 0x64, 0x62, 0x65, + 0x64, 0x64, 0x65, 0x67, 0x6b, 0x6a, 0x64, 0x63, 0x64, 0x63, 0x64, 0x67, + 0x6a, 0x67, 0x6a, 0x69, 0x6a, 0x6b, 0x66, 0x5e, 0x5f, 0x5a, 0x5b, 0x5e, + 0x63, 0x64, 0x64, 0x66, 0x69, 0x69, 0x69, 0x6b, 0x6b, 0x6a, 0x6b, 0x6c, + 0x6a, 0x6a, 0x6c, 0x70, 0x73, 0x72, 0x70, 0x6e, 0x70, 0x6e, 0x6f, 0x6f, + 0x6f, 0x71, 0x72, 0x73, 0x74, 0x75, 0x75, 0x76, 0x75, 0x76, 0x76, 0x75, + 0x76, 0x76, 0x76, 0x77, 0x78, 0x79, 0x79, 0x79, 0x79, 0x79, 0x78, 0x79, + 0x78, 0x78, 0x78, 0x78, 0x76, 0x6d, 0x40, 0x05, 0xe8, 0xe1, 0xc8, 0xbe, + 0xbb, 0xc1, 0xc8, 0xba, 0xc6, 0xd4, 0xcd, 0xc2, 0xba, 0xbc, 0xc0, 0xd4, + 0xca, 0xc5, 0xba, 0xc2, 0xc0, 0xc2, 0xc3, 0xc7, 0xc8, 0xc4, 0xba, 0xc2, + 0xc3, 0xcb, 0xc2, 0xc0, 0xd0, 0xcc, 0xbc, 0xb2, 0xbf, 0xc0, 0xc5, 0xc4, + 0xcd, 0xd9, 0xce, 0xc2, 0xc6, 0xb4, 0xb4, 0xc4, 0xca, 0xce, 0xcb, 0xc4, + 0xaf, 0xac, 0xb0, 0xb6, 0xc4, 0xcb, 0xcd, 0xc8, 0xc1, 0xd4, 0xda, 0xc9, + 0xc8, 0xdd, 0xe0, 0xd9, 0xd2, 0xc5, 0xce, 0xd9, 0xc6, 0xb6, 0xc3, 0xc0, + 0xb8, 0xc2, 0xc2, 0xc1, 0xca, 0xcc, 0xc2, 0xbc, 0xc4, 0xc2, 0xbd, 0xb7, + 0xbb, 0xc3, 0xc2, 0xbe, 0xba, 0xb6, 0xc7, 0xc1, 0x49, 0x4d, 0x50, 0x52, + 0x53, 0x52, 0x50, 0x4c, 0x4b, 0x4b, 0x50, 0x50, 0x4d, 0x4f, 0x51, 0x50, + 0x50, 0x52, 0x4e, 0x52, 0x56, 0x57, 0x54, 0x53, 0x50, 0x53, 0x55, 0x51, + 0x51, 0x56, 0x59, 0x58, 0x55, 0x53, 0x5a, 0x5a, 0x55, 0x53, 0x56, 0x5b, + 0x5c, 0x5b, 0x5c, 0x5f, 0x66, 0x64, 0x62, 0x66, 0x64, 0x63, 0x65, 0x65, + 0x6b, 0x6a, 0x62, 0x63, 0x64, 0x61, 0x61, 0x67, 0x6a, 0x68, 0x6a, 0x6a, + 0x6b, 0x69, 0x64, 0x61, 0x5e, 0x59, 0x5b, 0x5f, 0x64, 0x65, 0x67, 0x6a, + 0x6a, 0x69, 0x6b, 0x6c, 0x6a, 0x6a, 0x6a, 0x6c, 0x6a, 0x6a, 0x6e, 0x70, + 0x75, 0x74, 0x70, 0x6f, 0x70, 0x6f, 0x6d, 0x6e, 0x70, 0x72, 0x73, 0x73, + 0x74, 0x74, 0x75, 0x76, 0x75, 0x76, 0x76, 0x75, 0x75, 0x75, 0x76, 0x77, + 0x77, 0x78, 0x78, 0x79, 0x79, 0x79, 0x79, 0x78, 0x78, 0x78, 0x78, 0x78, + 0x77, 0x75, 0x5f, 0x28, 0xf1, 0xde, 0xcf, 0xbf, 0xbc, 0xc1, 0xc2, 0xc0, + 0xce, 0xd1, 0xc1, 0xbc, 0xc0, 0xbc, 0xbd, 0xcb, 0xc5, 0xbd, 0xb9, 0xbd, + 0xc2, 0xc3, 0xc4, 0xc8, 0xc7, 0xc7, 0xb6, 0xbb, 0xbf, 0xc8, 0xc6, 0xdc, + 0xd9, 0xc3, 0xc2, 0xb4, 0xbe, 0xcc, 0xc8, 0xbd, 0xd4, 0xdd, 0xd8, 0xcb, + 0xd5, 0xb8, 0xc2, 0xce, 0xc1, 0xc5, 0xcb, 0xc7, 0xb6, 0xb1, 0xb0, 0xba, + 0xc5, 0xc3, 0xc8, 0xc3, 0xb8, 0xc6, 0xd0, 0xcf, 0xc7, 0xd4, 0xdb, 0xdd, + 0xd5, 0xd4, 0xd3, 0xd8, 0xc7, 0xb9, 0xc7, 0xc9, 0xc8, 0xbc, 0xc2, 0xc7, + 0xc6, 0xca, 0xc8, 0xcb, 0xcc, 0xba, 0xb6, 0xba, 0xc3, 0xc6, 0xc3, 0xc2, + 0xbd, 0xba, 0xc9, 0xbd, 0x4c, 0x4c, 0x4f, 0x52, 0x52, 0x4f, 0x4c, 0x49, + 0x49, 0x4c, 0x4f, 0x4e, 0x4f, 0x4f, 0x4f, 0x52, 0x53, 0x52, 0x52, 0x55, + 0x56, 0x54, 0x51, 0x4f, 0x51, 0x55, 0x53, 0x52, 0x57, 0x5e, 0x58, 0x53, + 0x54, 0x58, 0x59, 0x55, 0x52, 0x54, 0x57, 0x58, 0x5d, 0x5b, 0x5a, 0x60, + 0x64, 0x64, 0x60, 0x65, 0x62, 0x64, 0x65, 0x66, 0x6a, 0x6a, 0x63, 0x63, + 0x63, 0x5d, 0x62, 0x67, 0x69, 0x67, 0x6a, 0x6a, 0x6b, 0x68, 0x63, 0x60, + 0x5e, 0x5a, 0x5e, 0x5f, 0x64, 0x66, 0x6a, 0x6b, 0x6a, 0x6e, 0x6e, 0x6d, + 0x6a, 0x66, 0x66, 0x6a, 0x6a, 0x6d, 0x6f, 0x72, 0x75, 0x73, 0x70, 0x6f, + 0x6f, 0x6d, 0x6d, 0x6e, 0x70, 0x72, 0x72, 0x72, 0x74, 0x74, 0x74, 0x75, + 0x75, 0x76, 0x76, 0x75, 0x75, 0x75, 0x76, 0x77, 0x78, 0x78, 0x79, 0x79, + 0x78, 0x79, 0x79, 0x79, 0x79, 0x78, 0x77, 0x76, 0x76, 0x76, 0x6e, 0x41, + 0x06, 0xea, 0xe0, 0xd2, 0xc6, 0xc4, 0xc7, 0xca, 0xcf, 0xca, 0xbc, 0xb8, + 0xc7, 0xbd, 0xbe, 0xc4, 0xbf, 0xba, 0xb6, 0xb8, 0xbf, 0xc2, 0xc6, 0xc8, + 0xc7, 0xbf, 0xb4, 0xbc, 0xc1, 0xc6, 0xc6, 0xd7, 0xd3, 0xc0, 0xc1, 0xc4, + 0xbd, 0xc2, 0xcd, 0xc6, 0xd7, 0xd0, 0xd4, 0xcd, 0xcd, 0xbb, 0xc8, 0xd1, + 0xc0, 0xc1, 0xcb, 0xce, 0xc9, 0xbd, 0xb1, 0xbc, 0xca, 0xbd, 0xc4, 0xbe, + 0xbb, 0xbf, 0xc2, 0xce, 0xd5, 0xc8, 0xd0, 0xd5, 0xd0, 0xd1, 0xdd, 0xd7, + 0xc5, 0xbd, 0xbe, 0xcb, 0xdc, 0xc5, 0xc4, 0xc5, 0xbe, 0xbe, 0xc7, 0xce, + 0xc7, 0xbd, 0xb9, 0xc2, 0xca, 0xcb, 0xce, 0xc6, 0xca, 0xbc, 0xc2, 0xbc, + 0x4b, 0x4b, 0x4f, 0x50, 0x4d, 0x4c, 0x4b, 0x48, 0x4a, 0x4c, 0x4d, 0x4f, + 0x51, 0x50, 0x51, 0x54, 0x54, 0x53, 0x53, 0x56, 0x56, 0x51, 0x4f, 0x52, + 0x55, 0x53, 0x51, 0x53, 0x5b, 0x5e, 0x55, 0x52, 0x56, 0x55, 0x57, 0x53, + 0x51, 0x54, 0x57, 0x58, 0x5e, 0x5d, 0x5a, 0x5e, 0x5f, 0x64, 0x61, 0x62, + 0x60, 0x65, 0x64, 0x69, 0x6b, 0x69, 0x63, 0x60, 0x60, 0x5b, 0x62, 0x67, + 0x69, 0x66, 0x68, 0x6a, 0x69, 0x63, 0x5f, 0x5e, 0x5b, 0x58, 0x5e, 0x5f, + 0x62, 0x67, 0x6c, 0x6d, 0x6c, 0x6d, 0x6e, 0x6d, 0x69, 0x66, 0x68, 0x6a, + 0x6b, 0x6f, 0x6f, 0x72, 0x75, 0x73, 0x6f, 0x70, 0x6e, 0x70, 0x70, 0x71, + 0x73, 0x74, 0x74, 0x75, 0x76, 0x76, 0x76, 0x75, 0x75, 0x75, 0x75, 0x75, + 0x76, 0x76, 0x77, 0x78, 0x77, 0x78, 0x79, 0x79, 0x79, 0x79, 0x7a, 0x79, + 0x78, 0x79, 0x77, 0x76, 0x75, 0x75, 0x72, 0x5f, 0x2e, 0x0b, 0xff, 0xdf, + 0xcc, 0xcf, 0xc6, 0xc7, 0xcd, 0xca, 0xc2, 0xba, 0xc1, 0xb8, 0xba, 0xbf, + 0xc2, 0xb9, 0xbb, 0xbc, 0xc0, 0xc1, 0xc2, 0xc1, 0xc6, 0xc0, 0xb8, 0xc0, + 0xc2, 0xc2, 0xc4, 0xcd, 0xd4, 0xdd, 0xdb, 0xcf, 0xc0, 0xbb, 0xcb, 0xcf, + 0xd6, 0xc2, 0xce, 0xcc, 0xc0, 0xbb, 0xcd, 0xd4, 0xc3, 0xc2, 0xc1, 0xc9, + 0xcf, 0xc2, 0xb9, 0xba, 0xc5, 0xbc, 0xc2, 0xc2, 0xb9, 0xbc, 0xc1, 0xc5, + 0xce, 0xc8, 0xcb, 0xd2, 0xc7, 0xbd, 0xd1, 0xd5, 0xd0, 0xd7, 0xbc, 0xb7, + 0xce, 0xca, 0xc5, 0xc3, 0xc7, 0xc8, 0xc6, 0xbb, 0xc0, 0xc3, 0xc5, 0xc4, + 0xc2, 0xc6, 0xda, 0xde, 0xce, 0xba, 0xb7, 0xbc, 0x4b, 0x4c, 0x4e, 0x4c, + 0x47, 0x46, 0x48, 0x49, 0x4b, 0x4c, 0x4d, 0x50, 0x4d, 0x4e, 0x52, 0x53, + 0x52, 0x52, 0x56, 0x58, 0x54, 0x4f, 0x52, 0x57, 0x57, 0x51, 0x4f, 0x54, + 0x5c, 0x59, 0x53, 0x55, 0x57, 0x56, 0x57, 0x54, 0x51, 0x55, 0x57, 0x5b, + 0x5e, 0x5e, 0x5a, 0x5e, 0x5e, 0x63, 0x63, 0x63, 0x62, 0x61, 0x63, 0x67, + 0x69, 0x66, 0x62, 0x5e, 0x5e, 0x5f, 0x63, 0x67, 0x68, 0x66, 0x66, 0x69, + 0x68, 0x62, 0x5e, 0x5c, 0x58, 0x5a, 0x5f, 0x63, 0x64, 0x68, 0x6d, 0x6e, + 0x6c, 0x6c, 0x6c, 0x6a, 0x68, 0x67, 0x68, 0x6a, 0x6c, 0x70, 0x71, 0x73, + 0x76, 0x72, 0x70, 0x6f, 0x6e, 0x6e, 0x70, 0x72, 0x73, 0x75, 0x75, 0x76, + 0x76, 0x76, 0x76, 0x75, 0x75, 0x75, 0x74, 0x75, 0x76, 0x76, 0x77, 0x78, + 0x77, 0x77, 0x78, 0x79, 0x7a, 0x7a, 0x7a, 0x79, 0x77, 0x79, 0x77, 0x76, + 0x74, 0x73, 0x72, 0x6c, 0x51, 0x22, 0x05, 0xea, 0xd7, 0xce, 0xc3, 0xb6, + 0xbc, 0xc6, 0xcd, 0xce, 0xc9, 0xb4, 0xbe, 0xcd, 0xc2, 0xb8, 0xbe, 0xce, + 0xc9, 0xc0, 0xbf, 0xbf, 0xbd, 0xbe, 0xc0, 0xc6, 0xbd, 0xc0, 0xc2, 0xc7, + 0xd0, 0xda, 0xe2, 0xcf, 0xbf, 0xc2, 0xc8, 0xc4, 0xc8, 0xc2, 0xc8, 0xc5, + 0xbe, 0xbc, 0xc4, 0xc8, 0xc7, 0xc8, 0xc5, 0xc8, 0xce, 0xcd, 0xc5, 0xbf, + 0xcc, 0xc8, 0xbb, 0xc0, 0xba, 0xbf, 0xc4, 0xc2, 0xc4, 0xbe, 0xc7, 0xd2, + 0xd0, 0xbf, 0xce, 0xd7, 0xd5, 0xdf, 0xd7, 0xbc, 0xcd, 0xd1, 0xc8, 0xc2, + 0xcc, 0xcb, 0xd3, 0xc7, 0xca, 0xd7, 0xcc, 0xc8, 0xcc, 0xce, 0xd0, 0xc7, + 0xb9, 0xb5, 0xb1, 0xb3, 0x4d, 0x4c, 0x4c, 0x49, 0x46, 0x46, 0x4a, 0x4b, + 0x4a, 0x4c, 0x4e, 0x4e, 0x4b, 0x4d, 0x52, 0x54, 0x52, 0x52, 0x57, 0x56, + 0x50, 0x50, 0x57, 0x57, 0x52, 0x4d, 0x51, 0x59, 0x5e, 0x55, 0x56, 0x58, + 0x57, 0x57, 0x57, 0x52, 0x53, 0x57, 0x58, 0x5d, 0x5b, 0x5d, 0x5d, 0x5f, + 0x5d, 0x61, 0x62, 0x60, 0x5e, 0x5e, 0x62, 0x64, 0x65, 0x62, 0x5e, 0x5b, + 0x5e, 0x63, 0x64, 0x67, 0x68, 0x67, 0x67, 0x67, 0x62, 0x5f, 0x59, 0x59, + 0x58, 0x5e, 0x60, 0x64, 0x66, 0x6b, 0x6e, 0x6c, 0x6b, 0x6a, 0x68, 0x68, + 0x66, 0x65, 0x66, 0x69, 0x6d, 0x72, 0x74, 0x76, 0x76, 0x70, 0x6e, 0x6d, + 0x6c, 0x6e, 0x73, 0x73, 0x73, 0x76, 0x74, 0x76, 0x76, 0x77, 0x77, 0x76, + 0x74, 0x76, 0x74, 0x74, 0x75, 0x74, 0x76, 0x78, 0x76, 0x78, 0x79, 0x79, + 0x7a, 0x7a, 0x7a, 0x79, 0x79, 0x79, 0x79, 0x77, 0x75, 0x73, 0x71, 0x6f, + 0x67, 0x3e, 0x0a, 0xf2, 0xe9, 0xce, 0xc2, 0xc3, 0xb2, 0xb5, 0xbc, 0xc8, + 0xcc, 0xb5, 0xb6, 0xc4, 0xc0, 0xbb, 0xc5, 0xcb, 0xc8, 0xc2, 0xc2, 0xbf, + 0xbd, 0xc3, 0xc2, 0xcf, 0xc1, 0xbf, 0xbe, 0xc3, 0xce, 0xcf, 0xd6, 0xc5, + 0xba, 0xc2, 0xc6, 0xbf, 0xc8, 0xc8, 0xc0, 0xc3, 0xc8, 0xc3, 0xc6, 0xce, + 0xcc, 0xc5, 0xc2, 0xc2, 0xc8, 0xc2, 0xc2, 0xc6, 0xd5, 0xda, 0xbe, 0xbd, + 0xbb, 0xbc, 0xbc, 0xbe, 0xc6, 0xbc, 0xc5, 0xdc, 0xd8, 0xc1, 0xcb, 0xe5, + 0xdb, 0xd9, 0xd7, 0xbf, 0xc7, 0xce, 0xc9, 0xc7, 0xc2, 0xbc, 0xc8, 0xd4, + 0xc8, 0xd9, 0xcf, 0xc8, 0xdc, 0xda, 0xce, 0xc2, 0xb4, 0xbb, 0xc0, 0xbc, + 0x4b, 0x47, 0x46, 0x46, 0x47, 0x47, 0x4b, 0x4a, 0x49, 0x4b, 0x48, 0x47, + 0x4b, 0x4c, 0x52, 0x53, 0x54, 0x56, 0x55, 0x50, 0x4e, 0x53, 0x57, 0x52, + 0x50, 0x4f, 0x53, 0x5c, 0x5d, 0x56, 0x57, 0x57, 0x57, 0x56, 0x54, 0x52, + 0x55, 0x58, 0x58, 0x5b, 0x5b, 0x5c, 0x5d, 0x5d, 0x5e, 0x61, 0x62, 0x62, + 0x60, 0x5f, 0x61, 0x61, 0x64, 0x5f, 0x5b, 0x58, 0x5b, 0x61, 0x64, 0x69, + 0x69, 0x64, 0x64, 0x61, 0x5e, 0x5d, 0x59, 0x57, 0x59, 0x5d, 0x61, 0x64, + 0x67, 0x6b, 0x6c, 0x6a, 0x67, 0x65, 0x65, 0x68, 0x66, 0x64, 0x62, 0x69, + 0x6e, 0x70, 0x75, 0x76, 0x76, 0x70, 0x6b, 0x6a, 0x6c, 0x6e, 0x73, 0x75, + 0x76, 0x76, 0x74, 0x75, 0x76, 0x77, 0x78, 0x77, 0x75, 0x76, 0x75, 0x72, + 0x72, 0x72, 0x74, 0x76, 0x77, 0x78, 0x79, 0x79, 0x79, 0x79, 0x79, 0x78, + 0x78, 0x79, 0x78, 0x78, 0x76, 0x73, 0x73, 0x73, 0x70, 0x5e, 0x2c, 0x02, + 0xe8, 0xce, 0xc2, 0xc4, 0xba, 0xc8, 0xc8, 0xb7, 0xc6, 0xb3, 0xb1, 0xb8, + 0xc3, 0xc5, 0xc8, 0xc8, 0xc8, 0xbd, 0xc1, 0xba, 0xbb, 0xbf, 0xc4, 0xcd, + 0xc3, 0xb7, 0xbc, 0xc4, 0xcc, 0xd4, 0xcd, 0xc3, 0xb6, 0xc2, 0xbe, 0xbc, + 0xbe, 0xc5, 0xc8, 0xc8, 0xc8, 0xc2, 0xc8, 0xc9, 0xc6, 0xbd, 0xbc, 0xc8, + 0xce, 0xcc, 0xca, 0xcf, 0xd3, 0xdf, 0xcc, 0xc3, 0xb6, 0xb2, 0xb2, 0xc1, + 0xca, 0xbe, 0xc6, 0xe0, 0xd7, 0xc6, 0xcc, 0xda, 0xd1, 0xd1, 0xce, 0xbc, + 0xbd, 0xcb, 0xc2, 0xc0, 0xc2, 0xbe, 0xbd, 0xd1, 0xc6, 0xcf, 0xc6, 0xc1, + 0xd0, 0xde, 0xcc, 0xc2, 0xc0, 0xc1, 0xc8, 0xc9, 0x45, 0x46, 0x46, 0x46, + 0x47, 0x49, 0x4b, 0x46, 0x47, 0x47, 0x44, 0x46, 0x47, 0x4e, 0x52, 0x50, + 0x52, 0x55, 0x52, 0x4f, 0x54, 0x57, 0x55, 0x52, 0x51, 0x53, 0x58, 0x5e, + 0x59, 0x55, 0x57, 0x58, 0x58, 0x55, 0x54, 0x54, 0x58, 0x58, 0x56, 0x58, + 0x5c, 0x5c, 0x5a, 0x5a, 0x5f, 0x61, 0x62, 0x5e, 0x5d, 0x5d, 0x5e, 0x62, + 0x62, 0x5d, 0x56, 0x56, 0x59, 0x62, 0x64, 0x66, 0x65, 0x60, 0x61, 0x5a, + 0x5c, 0x58, 0x56, 0x55, 0x56, 0x5b, 0x62, 0x65, 0x68, 0x6b, 0x6a, 0x65, + 0x64, 0x63, 0x65, 0x67, 0x67, 0x63, 0x61, 0x68, 0x6f, 0x71, 0x75, 0x76, + 0x75, 0x6f, 0x6a, 0x6b, 0x6b, 0x6d, 0x72, 0x72, 0x76, 0x75, 0x75, 0x73, + 0x76, 0x78, 0x77, 0x76, 0x76, 0x76, 0x76, 0x72, 0x70, 0x71, 0x74, 0x76, + 0x77, 0x78, 0x78, 0x79, 0x79, 0x79, 0x79, 0x79, 0x7a, 0x7a, 0x79, 0x79, + 0x77, 0x74, 0x74, 0x76, 0x76, 0x73, 0x5a, 0x1a, 0xe9, 0xde, 0xe0, 0xd8, + 0xd2, 0xd5, 0xd4, 0xca, 0xcd, 0xb5, 0xb3, 0xbc, 0xc5, 0xc9, 0xcf, 0xda, + 0xc8, 0xba, 0xc1, 0xbd, 0xb4, 0xb7, 0xc0, 0xc2, 0xc5, 0xba, 0xba, 0xcf, + 0xd6, 0xd4, 0xc7, 0xc4, 0xb9, 0xc5, 0xb6, 0xbc, 0xb7, 0xb6, 0xd0, 0xd4, + 0xc0, 0xbc, 0xc8, 0xc5, 0xc1, 0xba, 0xc3, 0xd6, 0xce, 0xce, 0xca, 0xcb, + 0xd5, 0xd3, 0xd7, 0xcb, 0xbf, 0xbe, 0xc2, 0xc6, 0xc9, 0xc8, 0xde, 0xe6, + 0xd7, 0xc8, 0xd1, 0xdc, 0xd5, 0xd1, 0xc9, 0xb6, 0xbc, 0xeb, 0xde, 0xc0, + 0xbc, 0xbc, 0xba, 0xcd, 0xc8, 0xc5, 0xc2, 0xc5, 0xd7, 0xda, 0xc5, 0xb8, + 0xc5, 0xc4, 0xcc, 0xcf, 0x46, 0x48, 0x49, 0x46, 0x46, 0x48, 0x48, 0x46, + 0x44, 0x46, 0x46, 0x49, 0x4c, 0x4f, 0x52, 0x51, 0x53, 0x51, 0x4f, 0x4f, + 0x57, 0x58, 0x56, 0x51, 0x4f, 0x53, 0x5a, 0x5e, 0x58, 0x55, 0x58, 0x58, + 0x58, 0x56, 0x56, 0x54, 0x58, 0x58, 0x56, 0x58, 0x5c, 0x5d, 0x58, 0x59, + 0x5f, 0x62, 0x5d, 0x58, 0x59, 0x58, 0x5c, 0x61, 0x5f, 0x5b, 0x56, 0x57, + 0x59, 0x61, 0x64, 0x64, 0x5e, 0x5d, 0x5a, 0x57, 0x58, 0x51, 0x4f, 0x53, + 0x56, 0x60, 0x64, 0x64, 0x68, 0x6a, 0x67, 0x65, 0x63, 0x63, 0x67, 0x68, + 0x68, 0x66, 0x64, 0x68, 0x6f, 0x72, 0x74, 0x75, 0x73, 0x6f, 0x6a, 0x6b, + 0x6a, 0x6f, 0x72, 0x70, 0x74, 0x74, 0x75, 0x76, 0x77, 0x78, 0x77, 0x76, + 0x76, 0x76, 0x74, 0x73, 0x73, 0x75, 0x76, 0x77, 0x76, 0x77, 0x78, 0x78, + 0x78, 0x78, 0x78, 0x78, 0x79, 0x7a, 0x7a, 0x79, 0x77, 0x74, 0x74, 0x75, + 0x75, 0x76, 0x73, 0x4f, 0xfb, 0xdf, 0xda, 0xd5, 0xcd, 0xcc, 0xd1, 0xd6, + 0xd4, 0xbe, 0xb6, 0xba, 0xc1, 0xc5, 0xc9, 0xd0, 0xd1, 0xd5, 0xda, 0xd0, + 0xc2, 0xce, 0xc4, 0xc4, 0xc5, 0xc4, 0xc5, 0xcb, 0xd2, 0xd5, 0xc6, 0xca, + 0xca, 0xce, 0xbe, 0xba, 0xb0, 0xbc, 0xda, 0xcf, 0xc3, 0xca, 0xca, 0xc3, + 0xba, 0xb7, 0xca, 0xd0, 0xc6, 0xc2, 0xcc, 0xc2, 0xcb, 0xcb, 0xca, 0xc7, + 0xc4, 0xc1, 0xc4, 0xc2, 0xc8, 0xc7, 0xcc, 0xd4, 0xd8, 0xd3, 0xda, 0xe0, + 0xe1, 0xd6, 0xd0, 0xbc, 0xbe, 0xdb, 0xda, 0xce, 0xc9, 0xbc, 0xb8, 0xbf, + 0xc2, 0xbc, 0xc0, 0xcd, 0xd4, 0xca, 0xbb, 0xb1, 0xcb, 0xdc, 0xcf, 0xcc, + 0x47, 0x47, 0x46, 0x46, 0x46, 0x47, 0x46, 0x44, 0x42, 0x46, 0x46, 0x4b, + 0x4f, 0x50, 0x50, 0x52, 0x51, 0x4f, 0x4f, 0x54, 0x56, 0x56, 0x53, 0x4f, + 0x52, 0x56, 0x59, 0x5b, 0x54, 0x55, 0x57, 0x57, 0x56, 0x55, 0x55, 0x54, + 0x58, 0x58, 0x53, 0x57, 0x5b, 0x5c, 0x58, 0x57, 0x5e, 0x5c, 0x58, 0x56, + 0x56, 0x57, 0x5b, 0x5d, 0x5e, 0x5c, 0x58, 0x58, 0x58, 0x5b, 0x5f, 0x62, + 0x5e, 0x5c, 0x55, 0x55, 0x53, 0x4c, 0x4e, 0x53, 0x5e, 0x63, 0x64, 0x65, + 0x67, 0x67, 0x65, 0x60, 0x5e, 0x64, 0x6a, 0x68, 0x69, 0x6a, 0x67, 0x6a, + 0x6f, 0x70, 0x73, 0x73, 0x71, 0x6d, 0x6a, 0x6a, 0x6a, 0x6f, 0x71, 0x6e, + 0x72, 0x74, 0x74, 0x76, 0x77, 0x77, 0x77, 0x76, 0x76, 0x76, 0x75, 0x76, + 0x77, 0x77, 0x78, 0x78, 0x77, 0x76, 0x77, 0x76, 0x77, 0x78, 0x77, 0x77, + 0x77, 0x79, 0x7a, 0x79, 0x76, 0x75, 0x73, 0x73, 0x73, 0x73, 0x74, 0x71, + 0x52, 0x08, 0xe0, 0xd5, 0xce, 0xce, 0xd6, 0xd5, 0xcc, 0xbc, 0xc0, 0xc0, + 0xc7, 0xc2, 0xc0, 0xca, 0xd3, 0xda, 0xf1, 0xda, 0xce, 0xe2, 0xd3, 0xd4, + 0xce, 0xce, 0xcb, 0xc0, 0xc7, 0xca, 0xc5, 0xc7, 0xcb, 0xd1, 0xcd, 0xb9, + 0xb5, 0xb9, 0xc6, 0xc2, 0xc5, 0xc7, 0xc3, 0xca, 0xc6, 0xc9, 0xc7, 0xc2, + 0xbd, 0xc1, 0xd4, 0xcd, 0xd1, 0xc7, 0xc8, 0xcc, 0xc9, 0xca, 0xc6, 0xbc, + 0xc5, 0xd2, 0xc4, 0xc3, 0xd4, 0xd6, 0xde, 0xdb, 0xda, 0xd5, 0xd6, 0xcc, + 0xc8, 0xd2, 0xd4, 0xd8, 0xd7, 0xc1, 0xb1, 0xb3, 0xc2, 0xcc, 0xce, 0xd4, + 0xcc, 0xbe, 0xb9, 0xb4, 0xc2, 0xd2, 0xce, 0xc5, 0x45, 0x46, 0x43, 0x44, + 0x45, 0x44, 0x44, 0x43, 0x42, 0x46, 0x4a, 0x4d, 0x4e, 0x4d, 0x4f, 0x50, + 0x50, 0x4e, 0x52, 0x54, 0x51, 0x55, 0x52, 0x52, 0x53, 0x53, 0x56, 0x58, + 0x53, 0x56, 0x58, 0x58, 0x56, 0x55, 0x53, 0x54, 0x58, 0x56, 0x52, 0x53, + 0x59, 0x59, 0x58, 0x58, 0x5b, 0x55, 0x54, 0x53, 0x54, 0x57, 0x5a, 0x58, + 0x59, 0x5e, 0x58, 0x55, 0x53, 0x57, 0x5e, 0x62, 0x5f, 0x58, 0x53, 0x53, + 0x4e, 0x4e, 0x50, 0x58, 0x61, 0x61, 0x63, 0x65, 0x63, 0x60, 0x5f, 0x5a, + 0x59, 0x5e, 0x66, 0x68, 0x6b, 0x6f, 0x6a, 0x69, 0x6f, 0x70, 0x73, 0x72, + 0x70, 0x6c, 0x6a, 0x69, 0x6c, 0x6f, 0x6f, 0x70, 0x74, 0x74, 0x74, 0x76, + 0x76, 0x76, 0x77, 0x77, 0x76, 0x76, 0x76, 0x77, 0x76, 0x76, 0x76, 0x77, + 0x77, 0x77, 0x77, 0x76, 0x76, 0x76, 0x77, 0x76, 0x76, 0x77, 0x78, 0x78, + 0x79, 0x76, 0x74, 0x73, 0x73, 0x72, 0x70, 0x6d, 0x6c, 0x37, 0xf6, 0xe0, + 0xdf, 0xda, 0xd4, 0xd6, 0xda, 0xcb, 0xc2, 0xbc, 0xbc, 0xba, 0xc0, 0xc8, + 0xbf, 0xc7, 0xd2, 0xca, 0xc0, 0xd3, 0xda, 0xcf, 0xd1, 0xcc, 0xbf, 0xb6, + 0xbf, 0xbc, 0xc1, 0xc2, 0xbc, 0xc0, 0xca, 0xbf, 0xc2, 0xbf, 0xb3, 0xba, + 0xc6, 0xc2, 0xc6, 0xcf, 0xcb, 0xd6, 0xd5, 0xd0, 0xbd, 0xbb, 0xcc, 0xd6, + 0xd5, 0xc8, 0xc4, 0xcd, 0xda, 0xd4, 0xc2, 0xc1, 0xb6, 0xc5, 0xc8, 0xc1, + 0xd1, 0xe4, 0xe0, 0xe4, 0xdd, 0xd4, 0xce, 0xcc, 0xd2, 0xcf, 0xd0, 0xd2, + 0xd0, 0xc0, 0xae, 0xbc, 0xc4, 0xc6, 0xde, 0xe6, 0xce, 0xc2, 0xc4, 0xc3, + 0xbf, 0xc8, 0xcb, 0xbf, 0x46, 0x45, 0x43, 0x43, 0x42, 0x42, 0x41, 0x43, + 0x43, 0x48, 0x4c, 0x4c, 0x4c, 0x4b, 0x4c, 0x4c, 0x4f, 0x50, 0x52, 0x50, + 0x50, 0x54, 0x52, 0x52, 0x51, 0x4e, 0x54, 0x54, 0x52, 0x56, 0x58, 0x58, + 0x57, 0x53, 0x52, 0x54, 0x58, 0x52, 0x4f, 0x54, 0x58, 0x5b, 0x58, 0x57, + 0x55, 0x52, 0x52, 0x52, 0x54, 0x59, 0x5a, 0x57, 0x58, 0x5e, 0x58, 0x52, + 0x4f, 0x55, 0x60, 0x5f, 0x5c, 0x55, 0x55, 0x4f, 0x49, 0x4f, 0x56, 0x5d, + 0x5b, 0x5f, 0x5f, 0x5b, 0x58, 0x5d, 0x5c, 0x56, 0x54, 0x58, 0x62, 0x69, + 0x6e, 0x6f, 0x69, 0x68, 0x6e, 0x6f, 0x72, 0x71, 0x6f, 0x6b, 0x6a, 0x6a, + 0x6d, 0x6d, 0x6d, 0x70, 0x75, 0x75, 0x73, 0x75, 0x75, 0x76, 0x76, 0x77, + 0x76, 0x76, 0x75, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, + 0x76, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x78, 0x76, 0x75, 0x73, + 0x74, 0x76, 0x75, 0x73, 0x70, 0x5d, 0x22, 0xea, 0xe0, 0xd6, 0xcd, 0xce, + 0xd5, 0xcf, 0xbd, 0xbc, 0xbc, 0xc0, 0xc0, 0xc2, 0xc4, 0xce, 0xca, 0xca, + 0xd2, 0xd0, 0xde, 0xd8, 0xcd, 0xc8, 0xb8, 0xb2, 0xbc, 0xb9, 0xbf, 0xc6, + 0xb7, 0xbf, 0xd3, 0xd7, 0xd0, 0xb8, 0xad, 0xb7, 0xc7, 0xc8, 0xce, 0xd2, + 0xce, 0xd4, 0xdc, 0xd7, 0xc4, 0xbe, 0xcb, 0xd3, 0xd3, 0xca, 0xce, 0xda, + 0xd9, 0xc5, 0xbf, 0xc8, 0xbb, 0xc2, 0xc7, 0xc9, 0xd3, 0xdf, 0xda, 0xe3, + 0xdb, 0xd4, 0xc6, 0xc3, 0xca, 0xc8, 0xca, 0xce, 0xc7, 0xb3, 0xaf, 0xbd, + 0xc2, 0xb8, 0xcc, 0xd8, 0xcd, 0xca, 0xc8, 0xbe, 0xb7, 0xc2, 0xc5, 0xbd, + 0x45, 0x43, 0x43, 0x44, 0x40, 0x3e, 0x3e, 0x44, 0x46, 0x47, 0x49, 0x4b, + 0x4b, 0x4b, 0x4c, 0x4c, 0x50, 0x51, 0x4f, 0x4e, 0x52, 0x52, 0x4f, 0x4d, + 0x4a, 0x4b, 0x55, 0x52, 0x53, 0x57, 0x57, 0x57, 0x57, 0x52, 0x51, 0x55, + 0x56, 0x4f, 0x4f, 0x56, 0x59, 0x58, 0x55, 0x55, 0x55, 0x50, 0x51, 0x52, + 0x54, 0x57, 0x55, 0x52, 0x57, 0x5c, 0x56, 0x4c, 0x4c, 0x58, 0x5e, 0x5a, + 0x57, 0x57, 0x53, 0x4d, 0x4c, 0x52, 0x56, 0x58, 0x5c, 0x59, 0x56, 0x56, + 0x5a, 0x59, 0x56, 0x53, 0x54, 0x56, 0x5f, 0x68, 0x6e, 0x6d, 0x6a, 0x69, + 0x6d, 0x70, 0x71, 0x70, 0x6a, 0x6a, 0x6a, 0x6c, 0x6d, 0x6c, 0x6c, 0x72, + 0x75, 0x75, 0x73, 0x74, 0x73, 0x76, 0x76, 0x77, 0x76, 0x76, 0x74, 0x75, + 0x73, 0x72, 0x74, 0x75, 0x75, 0x76, 0x76, 0x75, 0x76, 0x76, 0x77, 0x77, + 0x77, 0x77, 0x76, 0x77, 0x78, 0x76, 0x75, 0x73, 0x73, 0x75, 0x75, 0x76, + 0x76, 0x6e, 0x50, 0x10, 0xe2, 0xd6, 0xd1, 0xd7, 0xd5, 0xd0, 0xc3, 0xc0, + 0xc8, 0xca, 0xc7, 0xc6, 0xcd, 0xc4, 0xbc, 0xc0, 0xde, 0xe0, 0xdd, 0xd3, + 0xcc, 0xc1, 0xb3, 0xb3, 0xbf, 0xb6, 0xb7, 0xc5, 0xbf, 0xbf, 0xc8, 0xc9, + 0xc7, 0xb1, 0xaa, 0xb3, 0xc1, 0xc8, 0xcf, 0xc6, 0xd4, 0xe0, 0xda, 0xd3, + 0xc8, 0xbe, 0xc8, 0xce, 0xca, 0xc9, 0xcd, 0xd6, 0xd4, 0xc7, 0xbe, 0xc5, + 0xd0, 0xce, 0xc3, 0xcc, 0xd7, 0xe3, 0xda, 0xe0, 0xe1, 0xe2, 0xd0, 0xbd, + 0xc8, 0xc5, 0xcc, 0xce, 0xbc, 0xb4, 0xc0, 0xc4, 0xd0, 0xba, 0xbe, 0xc2, + 0xc4, 0xc8, 0xc7, 0xbb, 0xb1, 0xbc, 0xc7, 0xcb, 0x40, 0x40, 0x40, 0x3e, + 0x3b, 0x3b, 0x3f, 0x44, 0x45, 0x46, 0x46, 0x4a, 0x47, 0x48, 0x4a, 0x4c, + 0x4e, 0x4c, 0x4c, 0x4e, 0x51, 0x4c, 0x4d, 0x4b, 0x46, 0x4a, 0x52, 0x52, + 0x54, 0x57, 0x55, 0x57, 0x53, 0x52, 0x52, 0x55, 0x53, 0x4d, 0x4f, 0x58, + 0x58, 0x55, 0x52, 0x52, 0x4f, 0x4c, 0x51, 0x52, 0x53, 0x56, 0x53, 0x53, + 0x58, 0x5b, 0x51, 0x49, 0x4f, 0x59, 0x59, 0x58, 0x57, 0x53, 0x4e, 0x4e, + 0x4d, 0x50, 0x51, 0x56, 0x58, 0x59, 0x57, 0x59, 0x58, 0x54, 0x53, 0x56, + 0x56, 0x58, 0x5f, 0x68, 0x6b, 0x6d, 0x6a, 0x69, 0x6d, 0x71, 0x70, 0x70, + 0x68, 0x6a, 0x6a, 0x6c, 0x6c, 0x6c, 0x6c, 0x72, 0x75, 0x74, 0x71, 0x72, + 0x72, 0x76, 0x76, 0x77, 0x76, 0x75, 0x70, 0x73, 0x71, 0x70, 0x70, 0x72, + 0x73, 0x74, 0x74, 0x74, 0x75, 0x76, 0x76, 0x76, 0x77, 0x77, 0x77, 0x76, + 0x76, 0x76, 0x75, 0x76, 0x76, 0x76, 0x75, 0x75, 0x76, 0x73, 0x69, 0x40, + 0xf6, 0xe0, 0xce, 0xce, 0xd1, 0xd1, 0xcb, 0xc2, 0xbf, 0xc6, 0xcc, 0xc9, + 0xc9, 0xb9, 0xb9, 0xb8, 0xc9, 0xcf, 0xca, 0xc7, 0xc8, 0xb3, 0xb2, 0xc1, + 0xc4, 0xb6, 0xb2, 0xbb, 0xbc, 0xbb, 0xc0, 0xc2, 0xc3, 0xb9, 0xb2, 0xad, + 0xb8, 0xc4, 0xc9, 0xc2, 0xd5, 0xda, 0xcf, 0xce, 0xc2, 0xc5, 0xd0, 0xce, + 0xc3, 0xc4, 0xc3, 0xcc, 0xcd, 0xc4, 0xb6, 0xb8, 0xc7, 0xc8, 0xbe, 0xc4, + 0xcb, 0xc3, 0xc8, 0xce, 0xd7, 0xd8, 0xcb, 0xc3, 0xc9, 0xce, 0xd5, 0xce, + 0xbe, 0xc0, 0xcf, 0xc2, 0xc2, 0xb8, 0xbb, 0xba, 0xc2, 0xc0, 0xc5, 0xc4, + 0xb6, 0xb5, 0xc9, 0xc9, 0x3e, 0x3f, 0x3d, 0x3a, 0x38, 0x3b, 0x3e, 0x42, + 0x44, 0x43, 0x43, 0x46, 0x44, 0x44, 0x49, 0x4c, 0x4b, 0x4c, 0x4c, 0x4d, + 0x4f, 0x4e, 0x4b, 0x46, 0x40, 0x4a, 0x4e, 0x52, 0x54, 0x58, 0x54, 0x55, + 0x52, 0x4f, 0x4f, 0x54, 0x51, 0x4c, 0x52, 0x58, 0x53, 0x52, 0x4f, 0x4c, + 0x4c, 0x4b, 0x52, 0x51, 0x4f, 0x51, 0x52, 0x55, 0x57, 0x56, 0x4f, 0x4d, + 0x57, 0x58, 0x55, 0x55, 0x54, 0x50, 0x4e, 0x50, 0x4e, 0x51, 0x52, 0x58, + 0x5a, 0x57, 0x51, 0x56, 0x54, 0x4e, 0x51, 0x5a, 0x59, 0x57, 0x5c, 0x6b, + 0x6c, 0x6c, 0x68, 0x6a, 0x6d, 0x71, 0x70, 0x6e, 0x69, 0x6a, 0x6b, 0x6a, + 0x6b, 0x6d, 0x6e, 0x72, 0x75, 0x71, 0x70, 0x72, 0x72, 0x76, 0x77, 0x77, + 0x76, 0x73, 0x70, 0x72, 0x71, 0x70, 0x70, 0x71, 0x73, 0x72, 0x72, 0x74, + 0x74, 0x76, 0x76, 0x76, 0x77, 0x77, 0x77, 0x76, 0x76, 0x76, 0x76, 0x77, + 0x77, 0x76, 0x75, 0x74, 0x72, 0x71, 0x70, 0x60, 0x28, 0xe6, 0xd2, 0xcb, + 0xd3, 0xce, 0xc5, 0xc1, 0xbd, 0xc8, 0xd4, 0xca, 0xc5, 0xb8, 0xb6, 0xb6, + 0xb8, 0xc1, 0xc7, 0xc8, 0xc9, 0xb6, 0xb8, 0xc6, 0xc1, 0xba, 0xb2, 0xb6, + 0xba, 0xbd, 0xbe, 0xc0, 0xbe, 0xbf, 0xbf, 0xb0, 0xb1, 0xc5, 0xc1, 0xc7, + 0xd2, 0xc3, 0xb9, 0xc9, 0xc0, 0xcd, 0xd4, 0xd2, 0xca, 0xc7, 0xc2, 0xc8, + 0xc8, 0xc7, 0xb9, 0xc6, 0xcc, 0xcb, 0xcc, 0xc7, 0xb6, 0xb7, 0xc6, 0xc5, + 0xcc, 0xdd, 0xd5, 0xd3, 0xd7, 0xd9, 0xd5, 0xc8, 0xc3, 0xc8, 0xd5, 0xc0, + 0xc2, 0xbc, 0xbc, 0xb3, 0xb1, 0xb9, 0xbb, 0xc6, 0xbc, 0xaf, 0xc2, 0xc2, + 0x3d, 0x3b, 0x3b, 0x39, 0x39, 0x3f, 0x40, 0x45, 0x43, 0x42, 0x43, 0x43, + 0x43, 0x46, 0x4c, 0x4d, 0x4a, 0x47, 0x46, 0x4d, 0x50, 0x4c, 0x46, 0x40, + 0x3c, 0x4c, 0x4e, 0x51, 0x58, 0x57, 0x54, 0x55, 0x51, 0x4a, 0x4e, 0x52, + 0x4d, 0x4b, 0x54, 0x57, 0x52, 0x51, 0x4c, 0x49, 0x49, 0x4c, 0x50, 0x4b, + 0x4c, 0x4f, 0x4f, 0x55, 0x57, 0x53, 0x50, 0x52, 0x59, 0x58, 0x53, 0x53, + 0x53, 0x4f, 0x4f, 0x50, 0x51, 0x53, 0x56, 0x5b, 0x58, 0x52, 0x51, 0x53, + 0x52, 0x48, 0x56, 0x5e, 0x5b, 0x58, 0x61, 0x6c, 0x6d, 0x6c, 0x68, 0x6b, + 0x6d, 0x70, 0x6f, 0x6d, 0x6b, 0x6b, 0x6a, 0x69, 0x6b, 0x6d, 0x6f, 0x74, + 0x74, 0x72, 0x72, 0x70, 0x72, 0x75, 0x76, 0x76, 0x75, 0x70, 0x70, 0x72, + 0x71, 0x72, 0x73, 0x72, 0x72, 0x73, 0x74, 0x74, 0x73, 0x75, 0x76, 0x75, + 0x77, 0x77, 0x77, 0x77, 0x77, 0x76, 0x76, 0x77, 0x76, 0x74, 0x73, 0x71, + 0x70, 0x6f, 0x6a, 0x68, 0x46, 0x0d, 0xe4, 0xd1, 0xca, 0xc1, 0xbc, 0xc0, + 0xc6, 0xcb, 0xce, 0xc9, 0xc8, 0xbc, 0xb6, 0xb6, 0xb8, 0xc9, 0xd0, 0xc7, + 0xc9, 0xc7, 0xc2, 0xbe, 0xbe, 0xb9, 0xbc, 0xb7, 0xb5, 0xc8, 0xcb, 0xc1, + 0xbb, 0xcb, 0xc3, 0xb0, 0xb3, 0xbe, 0xc7, 0xd5, 0xd3, 0xbc, 0xbc, 0xc8, + 0xc8, 0xd1, 0xd1, 0xd3, 0xd2, 0xc5, 0xbd, 0xc2, 0xc9, 0xc8, 0xc0, 0xde, + 0xd4, 0xc6, 0xd3, 0xc1, 0xac, 0xbc, 0xc9, 0xc7, 0xcb, 0xda, 0xd4, 0xcc, + 0xd0, 0xd8, 0xcc, 0xbc, 0xc2, 0xce, 0xd3, 0xc1, 0xbf, 0xbf, 0xc8, 0xbc, + 0xbc, 0xb2, 0xb2, 0xb4, 0xbd, 0xb4, 0xc2, 0xc2, 0x37, 0x35, 0x38, 0x3a, + 0x3b, 0x3f, 0x40, 0x43, 0x40, 0x41, 0x44, 0x42, 0x41, 0x46, 0x4c, 0x4c, + 0x48, 0x46, 0x47, 0x4d, 0x4c, 0x47, 0x45, 0x3a, 0x3c, 0x49, 0x4d, 0x53, + 0x57, 0x52, 0x53, 0x54, 0x4e, 0x4a, 0x4d, 0x52, 0x4b, 0x4b, 0x54, 0x52, + 0x4f, 0x4b, 0x46, 0x46, 0x48, 0x4c, 0x4a, 0x45, 0x48, 0x49, 0x4e, 0x55, + 0x54, 0x4e, 0x4c, 0x55, 0x59, 0x54, 0x53, 0x53, 0x51, 0x4d, 0x50, 0x52, + 0x52, 0x54, 0x59, 0x58, 0x52, 0x50, 0x4f, 0x54, 0x50, 0x50, 0x5b, 0x5e, + 0x5c, 0x5c, 0x66, 0x6a, 0x6d, 0x6d, 0x69, 0x6a, 0x6b, 0x70, 0x6e, 0x6d, + 0x6c, 0x6d, 0x6a, 0x69, 0x6b, 0x6c, 0x70, 0x74, 0x76, 0x73, 0x72, 0x71, + 0x73, 0x74, 0x76, 0x76, 0x75, 0x71, 0x70, 0x72, 0x71, 0x70, 0x72, 0x74, + 0x73, 0x72, 0x72, 0x74, 0x74, 0x75, 0x75, 0x76, 0x77, 0x77, 0x77, 0x77, + 0x77, 0x77, 0x76, 0x76, 0x74, 0x72, 0x72, 0x71, 0x6f, 0x6d, 0x6a, 0x67, + 0x5b, 0x31, 0xef, 0xce, 0xc8, 0xc4, 0xc2, 0xc3, 0xc5, 0xd4, 0xd1, 0xc7, + 0xc2, 0xb9, 0xb8, 0xb8, 0xb6, 0xc2, 0xd6, 0xd1, 0xc9, 0xd4, 0xcf, 0xc2, + 0xc2, 0xbc, 0xbc, 0xb9, 0xb0, 0xbe, 0xbf, 0xb4, 0xb0, 0xcc, 0xc5, 0xba, + 0xc0, 0xb6, 0xc2, 0xd0, 0xc8, 0xbb, 0xc5, 0xc8, 0xbb, 0xca, 0xd0, 0xce, + 0xca, 0xc6, 0xbb, 0xc0, 0xc3, 0xc6, 0xc2, 0xcf, 0xc8, 0xc6, 0xdb, 0xc0, + 0xb0, 0xc0, 0xcb, 0xc6, 0xc6, 0xca, 0xbf, 0xc6, 0xd5, 0xda, 0xcc, 0xc2, + 0xc8, 0xd0, 0xd6, 0xc0, 0xbd, 0xc0, 0xbf, 0xba, 0xbb, 0xb9, 0xb0, 0xad, + 0xbc, 0xb7, 0xbd, 0xbc, 0x32, 0x38, 0x3a, 0x3a, 0x3b, 0x3d, 0x40, 0x3f, + 0x3c, 0x3f, 0x41, 0x3e, 0x40, 0x47, 0x4c, 0x48, 0x45, 0x45, 0x48, 0x4c, + 0x4a, 0x46, 0x41, 0x3a, 0x40, 0x49, 0x4e, 0x54, 0x54, 0x4f, 0x52, 0x51, + 0x4b, 0x4b, 0x4f, 0x4f, 0x4c, 0x4e, 0x52, 0x4f, 0x4b, 0x46, 0x45, 0x46, + 0x46, 0x48, 0x46, 0x42, 0x44, 0x44, 0x4d, 0x52, 0x51, 0x4f, 0x50, 0x52, + 0x52, 0x52, 0x53, 0x4f, 0x4e, 0x51, 0x52, 0x53, 0x53, 0x57, 0x56, 0x52, + 0x52, 0x49, 0x50, 0x55, 0x56, 0x57, 0x5e, 0x61, 0x5e, 0x64, 0x69, 0x6a, + 0x6e, 0x69, 0x67, 0x6a, 0x6a, 0x6e, 0x6e, 0x6e, 0x6d, 0x70, 0x6b, 0x69, + 0x6a, 0x6e, 0x70, 0x75, 0x75, 0x73, 0x73, 0x73, 0x71, 0x72, 0x75, 0x75, + 0x74, 0x72, 0x70, 0x71, 0x73, 0x70, 0x70, 0x72, 0x74, 0x73, 0x72, 0x72, + 0x72, 0x73, 0x75, 0x76, 0x76, 0x77, 0x78, 0x78, 0x77, 0x76, 0x76, 0x75, + 0x75, 0x76, 0x75, 0x74, 0x6a, 0x69, 0x64, 0x5c, 0x50, 0x3c, 0x04, 0xcf, + 0xd0, 0xc9, 0xc1, 0xc4, 0xcf, 0xcb, 0xc8, 0xc2, 0xba, 0xb5, 0xbd, 0xbf, + 0xc8, 0xc5, 0xda, 0xe0, 0xdf, 0xd7, 0xcc, 0xc4, 0xc9, 0xd1, 0xd4, 0xc0, + 0xb6, 0xbc, 0xb6, 0xc7, 0xbf, 0xb6, 0xbb, 0xbf, 0xc9, 0xbf, 0xbd, 0xc2, + 0xb9, 0xbf, 0xcf, 0xce, 0xbd, 0xca, 0xd3, 0xd1, 0xcc, 0xc6, 0xc2, 0xc2, + 0xbe, 0xc0, 0xbe, 0xc9, 0xc9, 0xcf, 0xd9, 0xc4, 0xb5, 0xb7, 0xc8, 0xcd, + 0xc8, 0xd1, 0xc7, 0xcc, 0xdf, 0xde, 0xd0, 0xc9, 0xd4, 0xd4, 0xce, 0xc3, + 0xbc, 0xbc, 0xb1, 0xaa, 0xb1, 0xb4, 0xac, 0xb6, 0xc3, 0xc4, 0xb6, 0xb5, + 0x34, 0x3a, 0x37, 0x37, 0x39, 0x3a, 0x3f, 0x3a, 0x39, 0x3d, 0x3c, 0x3a, + 0x3f, 0x48, 0x4a, 0x44, 0x43, 0x41, 0x48, 0x4a, 0x46, 0x42, 0x3b, 0x35, + 0x40, 0x4a, 0x4c, 0x52, 0x52, 0x4e, 0x52, 0x4e, 0x4b, 0x4c, 0x50, 0x4c, + 0x4b, 0x4c, 0x4c, 0x49, 0x47, 0x42, 0x43, 0x43, 0x42, 0x46, 0x47, 0x40, + 0x3b, 0x3f, 0x4c, 0x54, 0x52, 0x50, 0x51, 0x52, 0x51, 0x51, 0x4c, 0x4c, + 0x52, 0x54, 0x56, 0x53, 0x52, 0x54, 0x52, 0x52, 0x4d, 0x49, 0x53, 0x58, + 0x5a, 0x59, 0x5f, 0x61, 0x61, 0x67, 0x69, 0x6d, 0x6e, 0x65, 0x69, 0x6b, + 0x6a, 0x6d, 0x6e, 0x6d, 0x6e, 0x70, 0x6b, 0x6a, 0x6a, 0x6f, 0x70, 0x75, + 0x74, 0x72, 0x71, 0x72, 0x71, 0x71, 0x74, 0x73, 0x74, 0x73, 0x71, 0x71, + 0x72, 0x71, 0x70, 0x70, 0x72, 0x73, 0x74, 0x73, 0x73, 0x73, 0x73, 0x75, + 0x76, 0x77, 0x78, 0x78, 0x78, 0x78, 0x77, 0x77, 0x77, 0x77, 0x76, 0x75, + 0x70, 0x6c, 0x62, 0x58, 0x48, 0x36, 0x12, 0xdf, 0xd3, 0xcc, 0xbc, 0xbf, + 0xc4, 0xc5, 0xc5, 0xc1, 0xb4, 0xb4, 0xbe, 0xc2, 0xcb, 0xd2, 0xdc, 0xdc, + 0xe2, 0xd7, 0xc8, 0xc5, 0xc9, 0xd4, 0xdd, 0xc3, 0xbb, 0xb9, 0xb4, 0xc5, + 0xc8, 0xbc, 0xb4, 0xb8, 0xbd, 0xc2, 0xc5, 0xc7, 0xb7, 0xc1, 0xc5, 0xbc, + 0xc6, 0xcf, 0xc8, 0xce, 0xd1, 0xcc, 0xce, 0xd0, 0xbb, 0xb8, 0xc6, 0xd0, + 0xd0, 0xd8, 0xd0, 0xbc, 0xb7, 0xb7, 0xc9, 0xcc, 0xc6, 0xcb, 0xc8, 0xc8, + 0xd7, 0xe0, 0xd0, 0xd5, 0xe5, 0xd4, 0xc0, 0xc4, 0xbf, 0xc4, 0xb3, 0xac, + 0xaf, 0xac, 0xac, 0xcd, 0xd1, 0xc2, 0xb6, 0xb6, 0x34, 0x37, 0x38, 0x38, + 0x3a, 0x3a, 0x3a, 0x34, 0x39, 0x3d, 0x3a, 0x3b, 0x42, 0x4a, 0x46, 0x44, + 0x40, 0x42, 0x49, 0x47, 0x45, 0x3d, 0x34, 0x33, 0x40, 0x49, 0x4c, 0x50, + 0x4f, 0x4e, 0x4f, 0x4b, 0x4c, 0x48, 0x4f, 0x4d, 0x4b, 0x43, 0x49, 0x45, + 0x46, 0x41, 0x40, 0x42, 0x42, 0x43, 0x44, 0x3b, 0x36, 0x3c, 0x4c, 0x54, + 0x51, 0x4e, 0x51, 0x50, 0x4d, 0x4c, 0x4b, 0x51, 0x52, 0x58, 0x58, 0x52, + 0x52, 0x52, 0x57, 0x52, 0x47, 0x4f, 0x58, 0x59, 0x5b, 0x5e, 0x61, 0x61, + 0x66, 0x64, 0x68, 0x70, 0x6a, 0x66, 0x6a, 0x6c, 0x6c, 0x6d, 0x70, 0x6c, + 0x6f, 0x70, 0x6d, 0x6c, 0x6b, 0x70, 0x70, 0x74, 0x75, 0x71, 0x71, 0x72, + 0x72, 0x71, 0x74, 0x73, 0x73, 0x73, 0x70, 0x70, 0x71, 0x72, 0x71, 0x70, + 0x71, 0x72, 0x73, 0x73, 0x73, 0x74, 0x75, 0x75, 0x76, 0x77, 0x77, 0x78, + 0x79, 0x79, 0x78, 0x78, 0x77, 0x76, 0x76, 0x76, 0x76, 0x74, 0x6e, 0x67, + 0x51, 0x34, 0x09, 0xf2, 0xde, 0xc6, 0xc8, 0xbe, 0xbb, 0xcb, 0xc8, 0xbc, + 0xb2, 0xb5, 0xc1, 0xc8, 0xc2, 0xc8, 0xce, 0xd4, 0xcc, 0xc8, 0xc8, 0xd8, + 0xc7, 0xbd, 0xcd, 0xc6, 0xba, 0xb8, 0xbc, 0xbd, 0xbd, 0xc0, 0xb4, 0xb5, + 0xbc, 0xc9, 0xcc, 0xc0, 0xb7, 0xbe, 0xb9, 0xb5, 0xc7, 0xcc, 0xce, 0xd4, + 0xd3, 0xd4, 0xce, 0xd5, 0xbf, 0xb5, 0xc0, 0xd1, 0xc8, 0xbf, 0xbe, 0xc9, + 0xcc, 0xc7, 0xdd, 0xd5, 0xce, 0xd5, 0xce, 0xc9, 0xd5, 0xda, 0xcc, 0xd4, + 0xda, 0xc8, 0xbf, 0xbc, 0xbf, 0xbf, 0xb0, 0xab, 0xb2, 0xac, 0xae, 0xc8, + 0xc9, 0xbe, 0xc5, 0xbf, 0x34, 0x37, 0x3a, 0x37, 0x38, 0x3a, 0x37, 0x32, + 0x3a, 0x3a, 0x3a, 0x3c, 0x42, 0x46, 0x43, 0x41, 0x44, 0x46, 0x48, 0x46, + 0x42, 0x39, 0x2f, 0x34, 0x42, 0x46, 0x4b, 0x4c, 0x4c, 0x4d, 0x4e, 0x49, + 0x4c, 0x46, 0x4c, 0x46, 0x42, 0x42, 0x47, 0x44, 0x46, 0x40, 0x41, 0x40, + 0x3f, 0x44, 0x42, 0x3a, 0x36, 0x3f, 0x4c, 0x56, 0x52, 0x4e, 0x51, 0x4c, + 0x47, 0x48, 0x4c, 0x52, 0x58, 0x5a, 0x58, 0x54, 0x53, 0x55, 0x57, 0x4d, + 0x49, 0x54, 0x59, 0x5a, 0x5b, 0x5f, 0x64, 0x64, 0x65, 0x61, 0x6a, 0x6f, + 0x67, 0x67, 0x6c, 0x6e, 0x6e, 0x6e, 0x70, 0x6d, 0x6e, 0x70, 0x6d, 0x6d, + 0x6c, 0x70, 0x70, 0x74, 0x74, 0x71, 0x70, 0x72, 0x72, 0x70, 0x73, 0x73, + 0x72, 0x72, 0x71, 0x71, 0x72, 0x73, 0x71, 0x71, 0x71, 0x72, 0x73, 0x73, + 0x74, 0x74, 0x75, 0x76, 0x76, 0x76, 0x76, 0x76, 0x77, 0x78, 0x78, 0x78, + 0x77, 0x77, 0x77, 0x77, 0x76, 0x75, 0x74, 0x70, 0x62, 0x45, 0x0a, 0x04, + 0xf8, 0xc8, 0xcd, 0xc1, 0xc3, 0xd0, 0xc0, 0xba, 0xb6, 0xbc, 0xbd, 0xc1, + 0xc2, 0xc3, 0xc6, 0xcc, 0xcc, 0xc7, 0xd2, 0xd0, 0xc2, 0xb1, 0xc6, 0xc8, + 0xbc, 0xbd, 0xd0, 0xce, 0xc7, 0xc4, 0xb8, 0xbd, 0xbc, 0xbe, 0xbd, 0xb3, + 0xb6, 0xc7, 0xbc, 0xbb, 0xc2, 0xc2, 0xd4, 0xce, 0xcb, 0xce, 0xce, 0xd3, + 0xc9, 0xb6, 0xc2, 0xca, 0xc9, 0xc0, 0xce, 0xe1, 0xda, 0xda, 0xe1, 0xe0, + 0xd4, 0xcf, 0xd1, 0xd4, 0xdf, 0xd2, 0xc5, 0xcd, 0xc7, 0xcc, 0xcd, 0xb9, + 0xb9, 0xba, 0xbc, 0xc3, 0xc1, 0xb4, 0xaa, 0xbc, 0xc7, 0xbd, 0xc8, 0xc1, + 0x35, 0x37, 0x38, 0x35, 0x34, 0x36, 0x34, 0x33, 0x37, 0x38, 0x3a, 0x3b, + 0x40, 0x42, 0x3f, 0x41, 0x43, 0x43, 0x47, 0x45, 0x3f, 0x34, 0x2e, 0x36, + 0x45, 0x47, 0x48, 0x47, 0x4c, 0x4e, 0x4b, 0x45, 0x46, 0x42, 0x46, 0x40, + 0x3b, 0x3d, 0x44, 0x43, 0x45, 0x40, 0x40, 0x3f, 0x3e, 0x42, 0x42, 0x3e, + 0x40, 0x47, 0x4f, 0x53, 0x4f, 0x4f, 0x4c, 0x46, 0x45, 0x47, 0x52, 0x5a, + 0x5c, 0x59, 0x56, 0x52, 0x54, 0x58, 0x4c, 0x49, 0x4c, 0x56, 0x5b, 0x5d, + 0x5e, 0x63, 0x66, 0x65, 0x63, 0x64, 0x6d, 0x6b, 0x64, 0x68, 0x6d, 0x6e, + 0x6f, 0x6e, 0x6d, 0x6d, 0x6f, 0x70, 0x6f, 0x6e, 0x6d, 0x70, 0x70, 0x73, + 0x73, 0x70, 0x70, 0x72, 0x71, 0x70, 0x73, 0x73, 0x71, 0x71, 0x70, 0x72, + 0x72, 0x74, 0x72, 0x70, 0x72, 0x71, 0x72, 0x74, 0x75, 0x75, 0x75, 0x75, + 0x76, 0x76, 0x76, 0x76, 0x77, 0x78, 0x77, 0x78, 0x78, 0x78, 0x77, 0x78, + 0x77, 0x76, 0x75, 0x71, 0x6f, 0x60, 0x40, 0x0e, 0xea, 0xe0, 0xd9, 0xc9, + 0xc8, 0xc8, 0xb8, 0xb6, 0xb8, 0xb9, 0xba, 0xc0, 0xbf, 0xbf, 0xc6, 0xc2, + 0xcc, 0xc3, 0xc2, 0xc6, 0xcb, 0xb8, 0xc5, 0xc3, 0xbf, 0xc2, 0xcb, 0xcd, + 0xc7, 0xc3, 0xb8, 0xb2, 0xb1, 0xb6, 0xb8, 0xb4, 0xbb, 0xc8, 0xbc, 0xb9, + 0xb2, 0xbe, 0xe0, 0xd5, 0xc2, 0xc2, 0xc1, 0xc0, 0xce, 0xc8, 0xc9, 0xc8, + 0xcc, 0xc6, 0xd3, 0xe0, 0xe1, 0xd2, 0xda, 0xd3, 0xcb, 0xc7, 0xcb, 0xc9, + 0xd5, 0xc3, 0xbc, 0xc4, 0xd5, 0xc6, 0xc8, 0xbb, 0xbc, 0xbe, 0xcd, 0xd0, + 0xba, 0xb0, 0xae, 0xc0, 0xc9, 0xbd, 0xca, 0xce, 0x34, 0x34, 0x32, 0x34, + 0x31, 0x30, 0x2f, 0x2f, 0x34, 0x38, 0x38, 0x3a, 0x41, 0x40, 0x3d, 0x3e, + 0x3f, 0x42, 0x44, 0x41, 0x3b, 0x32, 0x2c, 0x38, 0x44, 0x46, 0x46, 0x46, + 0x4a, 0x49, 0x43, 0x40, 0x40, 0x3f, 0x41, 0x3c, 0x38, 0x3b, 0x43, 0x44, + 0x42, 0x40, 0x3f, 0x3b, 0x3b, 0x42, 0x45, 0x43, 0x47, 0x4d, 0x4f, 0x4f, + 0x4e, 0x4d, 0x48, 0x48, 0x4a, 0x4e, 0x59, 0x5b, 0x5b, 0x5b, 0x56, 0x54, + 0x55, 0x4f, 0x47, 0x4a, 0x4f, 0x5a, 0x5e, 0x64, 0x62, 0x64, 0x65, 0x60, + 0x63, 0x67, 0x6c, 0x68, 0x63, 0x6a, 0x6b, 0x6b, 0x6d, 0x6b, 0x6b, 0x6d, + 0x70, 0x70, 0x6f, 0x6f, 0x70, 0x70, 0x70, 0x73, 0x72, 0x6f, 0x70, 0x70, + 0x71, 0x71, 0x73, 0x71, 0x71, 0x70, 0x71, 0x72, 0x73, 0x75, 0x73, 0x71, + 0x72, 0x72, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77, 0x76, 0x76, 0x76, 0x77, + 0x78, 0x78, 0x78, 0x78, 0x77, 0x78, 0x76, 0x76, 0x76, 0x76, 0x76, 0x74, + 0x71, 0x6a, 0x53, 0x1a, 0xe9, 0xda, 0xd2, 0xcf, 0xcc, 0xbe, 0xbb, 0xb9, + 0xb7, 0xb7, 0xbc, 0xbf, 0xba, 0xbe, 0xc3, 0xbc, 0xc6, 0xc3, 0xc5, 0xc6, + 0xc3, 0xbb, 0xc9, 0xc0, 0xba, 0xbe, 0xc8, 0xc9, 0xc2, 0xc7, 0xb8, 0xac, + 0xa8, 0xb6, 0xc7, 0xc6, 0xbc, 0xb9, 0xb3, 0xb6, 0xbb, 0xbe, 0xcb, 0xbe, + 0xbd, 0xca, 0xbf, 0xb7, 0xc8, 0xd8, 0xd4, 0xd7, 0xca, 0xbd, 0xce, 0xe0, + 0xe2, 0xd4, 0xcc, 0xc3, 0xb6, 0xb9, 0xbf, 0xc4, 0xce, 0xca, 0xd7, 0xd3, + 0xce, 0xc8, 0xc0, 0xbc, 0xbb, 0xb5, 0xc1, 0xd3, 0xcc, 0xb5, 0xbc, 0xc4, + 0xc8, 0xc3, 0xcd, 0xce, 0x2e, 0x30, 0x30, 0x31, 0x2e, 0x2c, 0x2d, 0x2e, + 0x30, 0x34, 0x37, 0x3a, 0x40, 0x3a, 0x36, 0x3a, 0x3d, 0x3f, 0x41, 0x3d, + 0x37, 0x2f, 0x2d, 0x36, 0x3e, 0x41, 0x43, 0x44, 0x47, 0x43, 0x3d, 0x3a, + 0x39, 0x3c, 0x40, 0x37, 0x37, 0x3c, 0x41, 0x44, 0x44, 0x3f, 0x3c, 0x3c, + 0x3d, 0x44, 0x49, 0x4c, 0x4c, 0x4f, 0x50, 0x4c, 0x48, 0x4a, 0x46, 0x46, + 0x4d, 0x58, 0x58, 0x58, 0x5c, 0x5d, 0x56, 0x52, 0x50, 0x4c, 0x4b, 0x4e, + 0x58, 0x5d, 0x5f, 0x63, 0x62, 0x62, 0x63, 0x64, 0x64, 0x67, 0x6a, 0x65, + 0x63, 0x6c, 0x6a, 0x6d, 0x6d, 0x6a, 0x6a, 0x6f, 0x70, 0x6f, 0x6e, 0x6f, + 0x72, 0x70, 0x6f, 0x71, 0x71, 0x6e, 0x6f, 0x6e, 0x70, 0x70, 0x72, 0x70, + 0x70, 0x6f, 0x72, 0x72, 0x73, 0x75, 0x75, 0x73, 0x73, 0x72, 0x72, 0x72, + 0x73, 0x74, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x77, 0x77, 0x77, 0x78, + 0x77, 0x76, 0x76, 0x76, 0x77, 0x77, 0x77, 0x76, 0x70, 0x6d, 0x64, 0x3d, + 0xf3, 0xda, 0xd2, 0xc5, 0xcb, 0xc6, 0xc4, 0xc5, 0xba, 0xbc, 0xc1, 0xbe, + 0xb7, 0xbc, 0xb2, 0xb3, 0xbb, 0xc6, 0xbf, 0xd7, 0xd1, 0xb6, 0xbe, 0xbc, + 0xba, 0xb9, 0xc2, 0xc2, 0xc2, 0xbe, 0xb5, 0xab, 0xaa, 0xbc, 0xcc, 0xc8, + 0xc6, 0xc3, 0xb7, 0xb9, 0xbb, 0xc2, 0xcc, 0xbb, 0xc4, 0xcb, 0xc0, 0xbe, + 0xbf, 0xd4, 0xe0, 0xe5, 0xcb, 0xc0, 0xd4, 0xe1, 0xd4, 0xc8, 0xc8, 0xcc, + 0xb0, 0xb2, 0xbd, 0xc1, 0xc2, 0xbd, 0xd3, 0xd4, 0xc1, 0xbb, 0xc0, 0xc9, + 0xbf, 0xba, 0xbc, 0xc5, 0xb7, 0xb2, 0xc6, 0xc5, 0xc9, 0xc7, 0xbd, 0xbb, + 0x2b, 0x2c, 0x2e, 0x2e, 0x2b, 0x29, 0x2b, 0x2f, 0x33, 0x36, 0x36, 0x39, + 0x3c, 0x33, 0x32, 0x38, 0x3c, 0x3f, 0x3b, 0x36, 0x33, 0x2c, 0x2e, 0x36, + 0x3a, 0x3d, 0x3f, 0x42, 0x43, 0x40, 0x39, 0x35, 0x34, 0x3a, 0x3d, 0x36, + 0x39, 0x3d, 0x40, 0x41, 0x44, 0x41, 0x3c, 0x3c, 0x42, 0x4a, 0x4e, 0x4e, + 0x4e, 0x50, 0x4f, 0x48, 0x46, 0x47, 0x46, 0x4c, 0x54, 0x57, 0x55, 0x5b, + 0x5e, 0x5e, 0x56, 0x50, 0x4b, 0x4c, 0x4e, 0x53, 0x57, 0x5c, 0x5d, 0x5e, + 0x5d, 0x60, 0x65, 0x67, 0x66, 0x6a, 0x6a, 0x60, 0x64, 0x6d, 0x6c, 0x6d, + 0x6d, 0x6a, 0x6a, 0x70, 0x70, 0x6d, 0x6d, 0x6f, 0x72, 0x71, 0x6e, 0x70, + 0x70, 0x6d, 0x6e, 0x6e, 0x6f, 0x70, 0x70, 0x6e, 0x70, 0x70, 0x72, 0x70, + 0x71, 0x75, 0x76, 0x74, 0x75, 0x74, 0x72, 0x71, 0x72, 0x73, 0x74, 0x74, + 0x75, 0x75, 0x75, 0x75, 0x76, 0x77, 0x78, 0x78, 0x76, 0x76, 0x78, 0x78, + 0x78, 0x78, 0x78, 0x77, 0x75, 0x6e, 0x58, 0x44, 0x15, 0xfe, 0xe7, 0xd5, + 0xcc, 0xda, 0xc9, 0xcb, 0xbd, 0xbe, 0xc6, 0xbc, 0xb0, 0xb0, 0xb1, 0xbc, + 0xb0, 0xc0, 0xbc, 0xc8, 0xd3, 0xb8, 0xb9, 0xba, 0xbd, 0xbc, 0xc1, 0xc2, + 0xc5, 0xb8, 0xb1, 0xaa, 0xaf, 0xbd, 0xc6, 0xc8, 0xcc, 0xd0, 0xbd, 0xb9, + 0xb7, 0xca, 0xd1, 0xbc, 0xca, 0xc8, 0xbf, 0xbe, 0xc1, 0xcc, 0xd2, 0xdc, + 0xcd, 0xc8, 0xc2, 0xd0, 0xce, 0xce, 0xc5, 0xc0, 0xb0, 0xb5, 0xc0, 0xbf, + 0xc0, 0xb0, 0xb3, 0xb8, 0xba, 0xbd, 0xca, 0xce, 0xc1, 0xc0, 0xb9, 0xbf, + 0xb4, 0xb9, 0xc8, 0xc1, 0xbe, 0xc4, 0xb5, 0xbc, 0x27, 0x2a, 0x29, 0x28, + 0x27, 0x28, 0x29, 0x2d, 0x35, 0x35, 0x35, 0x36, 0x39, 0x2e, 0x31, 0x34, + 0x3b, 0x3f, 0x34, 0x34, 0x2f, 0x2c, 0x2e, 0x33, 0x35, 0x3d, 0x3f, 0x42, + 0x3e, 0x3c, 0x3a, 0x39, 0x36, 0x3a, 0x3a, 0x3a, 0x3b, 0x3c, 0x3e, 0x40, + 0x47, 0x46, 0x40, 0x3f, 0x45, 0x4c, 0x4d, 0x4d, 0x4b, 0x4c, 0x4a, 0x4b, + 0x46, 0x48, 0x4d, 0x52, 0x52, 0x53, 0x57, 0x5c, 0x5e, 0x5a, 0x52, 0x48, + 0x42, 0x49, 0x50, 0x52, 0x57, 0x5c, 0x5a, 0x5a, 0x5b, 0x63, 0x67, 0x68, + 0x6a, 0x6c, 0x69, 0x5f, 0x64, 0x6c, 0x6f, 0x6f, 0x6d, 0x6a, 0x69, 0x6f, + 0x6e, 0x6b, 0x6c, 0x6e, 0x70, 0x70, 0x6d, 0x6f, 0x6d, 0x6b, 0x6e, 0x6d, + 0x6e, 0x6e, 0x6e, 0x6e, 0x6f, 0x6f, 0x71, 0x71, 0x71, 0x74, 0x75, 0x74, + 0x75, 0x75, 0x74, 0x72, 0x72, 0x72, 0x71, 0x72, 0x73, 0x74, 0x74, 0x75, + 0x76, 0x76, 0x77, 0x77, 0x77, 0x77, 0x78, 0x79, 0x79, 0x79, 0x78, 0x77, + 0x76, 0x72, 0x62, 0x4c, 0x43, 0x28, 0x0c, 0xe4, 0xcc, 0xe0, 0xd7, 0xc3, + 0xc3, 0xc6, 0xc9, 0xbb, 0xb4, 0xb6, 0xb6, 0xbe, 0xb3, 0xc2, 0xc4, 0xc9, + 0xc3, 0xbc, 0xbb, 0xbd, 0xbe, 0xb9, 0xc1, 0xc3, 0xc6, 0xb4, 0xb2, 0xb6, + 0xb6, 0xbb, 0xbb, 0xc5, 0xc8, 0xcd, 0xc9, 0xc2, 0xbc, 0xce, 0xcf, 0xbe, + 0xc2, 0xc0, 0xc2, 0xbf, 0xc2, 0xc2, 0xc8, 0xd8, 0xdb, 0xc6, 0xbb, 0xc8, + 0xcc, 0xcc, 0xc8, 0xb6, 0xae, 0xb4, 0xc2, 0xbc, 0xbf, 0xb4, 0xab, 0xae, + 0xb9, 0xbf, 0xbc, 0xc6, 0xc0, 0xb8, 0xb4, 0xc2, 0xb7, 0xc2, 0xca, 0xbb, + 0xbc, 0xc1, 0xb5, 0xbc, 0x25, 0x29, 0x26, 0x23, 0x24, 0x29, 0x2d, 0x2e, + 0x35, 0x32, 0x32, 0x34, 0x34, 0x2e, 0x33, 0x34, 0x3a, 0x3a, 0x33, 0x30, + 0x29, 0x2a, 0x2e, 0x32, 0x34, 0x3a, 0x3f, 0x40, 0x3c, 0x3c, 0x3b, 0x3b, + 0x38, 0x35, 0x3b, 0x3d, 0x3a, 0x3d, 0x3e, 0x42, 0x4a, 0x46, 0x43, 0x44, + 0x4a, 0x49, 0x4c, 0x4c, 0x48, 0x45, 0x48, 0x49, 0x47, 0x4c, 0x4e, 0x52, + 0x52, 0x56, 0x58, 0x5b, 0x5d, 0x55, 0x4a, 0x41, 0x3f, 0x48, 0x4f, 0x52, + 0x58, 0x5a, 0x58, 0x55, 0x5d, 0x63, 0x67, 0x68, 0x6a, 0x6a, 0x66, 0x61, + 0x6a, 0x6e, 0x6f, 0x6d, 0x6a, 0x68, 0x68, 0x6d, 0x6b, 0x6b, 0x6d, 0x6d, + 0x6f, 0x6f, 0x6d, 0x6e, 0x6c, 0x6a, 0x6e, 0x6e, 0x6d, 0x6d, 0x6e, 0x6e, + 0x6e, 0x6e, 0x70, 0x70, 0x72, 0x74, 0x75, 0x74, 0x75, 0x75, 0x74, 0x75, + 0x75, 0x76, 0x74, 0x74, 0x75, 0x75, 0x75, 0x76, 0x76, 0x76, 0x76, 0x77, + 0x77, 0x77, 0x78, 0x79, 0x79, 0x79, 0x79, 0x79, 0x78, 0x76, 0x71, 0x6b, + 0x5b, 0x3a, 0xfe, 0xcf, 0xc8, 0xcd, 0xda, 0xc5, 0xc3, 0xd3, 0xd4, 0xc0, + 0xb9, 0xbe, 0xbb, 0xb8, 0xbb, 0xcd, 0xc2, 0xc2, 0xc0, 0xb5, 0xb0, 0xbc, + 0xbf, 0xbb, 0xbd, 0xbf, 0xc8, 0xb5, 0xba, 0xc0, 0xbf, 0xbc, 0xba, 0xbf, + 0xc7, 0xcf, 0xcb, 0xbe, 0xc8, 0xd1, 0xc9, 0xb9, 0xb9, 0xc1, 0xc3, 0xbd, + 0xc0, 0xc6, 0xce, 0xcf, 0xcc, 0xbe, 0xbc, 0xd6, 0xd4, 0xc2, 0xc5, 0xbe, + 0xae, 0xb2, 0xbe, 0xb5, 0xbe, 0xb9, 0xb5, 0xb6, 0xb0, 0xa9, 0xb0, 0xca, + 0xc6, 0xb8, 0xb2, 0xb8, 0xbc, 0xc9, 0xca, 0xbd, 0xb5, 0xb8, 0xbf, 0xc0, + 0x22, 0x22, 0x22, 0x24, 0x24, 0x28, 0x2c, 0x2e, 0x30, 0x2e, 0x2f, 0x2f, + 0x2d, 0x2e, 0x2f, 0x2e, 0x2f, 0x34, 0x34, 0x2e, 0x27, 0x29, 0x2b, 0x2e, + 0x34, 0x3a, 0x3c, 0x3c, 0x3b, 0x36, 0x36, 0x3d, 0x33, 0x33, 0x3b, 0x3d, + 0x3c, 0x40, 0x3f, 0x43, 0x49, 0x46, 0x44, 0x46, 0x46, 0x48, 0x4c, 0x48, + 0x42, 0x45, 0x4c, 0x4d, 0x4a, 0x49, 0x52, 0x50, 0x54, 0x55, 0x58, 0x5b, + 0x5c, 0x4e, 0x41, 0x3e, 0x3c, 0x4a, 0x50, 0x53, 0x58, 0x58, 0x56, 0x58, + 0x61, 0x64, 0x68, 0x6a, 0x6b, 0x6a, 0x62, 0x63, 0x6b, 0x6c, 0x6a, 0x67, + 0x69, 0x67, 0x63, 0x68, 0x6a, 0x6a, 0x6b, 0x6d, 0x6e, 0x6d, 0x6c, 0x6d, + 0x6a, 0x69, 0x6c, 0x6e, 0x6d, 0x6d, 0x6e, 0x6f, 0x6f, 0x6e, 0x70, 0x6c, + 0x70, 0x74, 0x74, 0x75, 0x76, 0x76, 0x75, 0x76, 0x76, 0x77, 0x77, 0x77, + 0x76, 0x76, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x78, 0x77, 0x77, + 0x78, 0x78, 0x78, 0x79, 0x78, 0x77, 0x73, 0x6a, 0x55, 0x2b, 0xf8, 0xe3, + 0xda, 0xd1, 0xdc, 0xcd, 0xc8, 0xce, 0xd4, 0xc4, 0xbb, 0xc3, 0xbf, 0xb8, + 0xbe, 0xcf, 0xcb, 0xcf, 0xbe, 0xb0, 0xb4, 0xbf, 0xce, 0xc8, 0xbc, 0xbc, + 0xcf, 0xbd, 0xb6, 0xb9, 0xc3, 0xc3, 0xbc, 0xba, 0xc4, 0xc7, 0xc1, 0xba, + 0xc2, 0xc8, 0xc1, 0xb6, 0xbc, 0xca, 0xc8, 0xc2, 0xc4, 0xdc, 0xcd, 0xc9, + 0xc8, 0xb4, 0xb5, 0xc1, 0xce, 0xc2, 0xc1, 0xc5, 0xb6, 0xb7, 0xba, 0xb0, + 0xb6, 0xc1, 0xc6, 0xc6, 0xbc, 0xac, 0xb2, 0xc2, 0xc1, 0xb7, 0xad, 0xb3, + 0xbf, 0xcb, 0xcc, 0xc8, 0xb6, 0xb2, 0xb9, 0xbf, 0x1d, 0x21, 0x24, 0x26, + 0x27, 0x28, 0x26, 0x27, 0x2a, 0x2b, 0x2b, 0x2b, 0x28, 0x2a, 0x2a, 0x28, + 0x28, 0x31, 0x33, 0x27, 0x27, 0x2a, 0x28, 0x2a, 0x34, 0x38, 0x35, 0x3a, + 0x38, 0x2f, 0x35, 0x3a, 0x34, 0x35, 0x3b, 0x3d, 0x3d, 0x3e, 0x40, 0x44, + 0x49, 0x46, 0x46, 0x46, 0x44, 0x48, 0x47, 0x45, 0x46, 0x48, 0x4c, 0x4c, + 0x48, 0x4f, 0x52, 0x4d, 0x54, 0x55, 0x58, 0x58, 0x59, 0x48, 0x3c, 0x3f, + 0x40, 0x4e, 0x53, 0x55, 0x5a, 0x59, 0x58, 0x5e, 0x64, 0x68, 0x6a, 0x6c, + 0x6c, 0x65, 0x5f, 0x67, 0x6c, 0x6b, 0x66, 0x64, 0x68, 0x64, 0x63, 0x65, + 0x68, 0x68, 0x6b, 0x6c, 0x6d, 0x6c, 0x6a, 0x6c, 0x6a, 0x67, 0x6a, 0x6e, + 0x6c, 0x6d, 0x6d, 0x6f, 0x6f, 0x6e, 0x6f, 0x6d, 0x6e, 0x73, 0x74, 0x74, + 0x74, 0x75, 0x76, 0x76, 0x77, 0x75, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, + 0x77, 0x77, 0x76, 0x77, 0x77, 0x78, 0x78, 0x77, 0x77, 0x77, 0x77, 0x78, + 0x78, 0x78, 0x76, 0x6e, 0x5e, 0x45, 0x2b, 0x17, 0xfd, 0xd8, 0xce, 0xc4, + 0xc8, 0xce, 0xd4, 0xc5, 0xbd, 0xc1, 0xbf, 0xb8, 0xb7, 0xc6, 0xca, 0xd8, + 0xc8, 0xae, 0xb4, 0xbe, 0xce, 0xc8, 0xba, 0xc0, 0xcd, 0xc6, 0xba, 0xbf, + 0xbe, 0xc0, 0xbe, 0xb8, 0xc9, 0xce, 0xc6, 0xba, 0xb6, 0xb6, 0xb8, 0xb9, + 0xc6, 0xcb, 0xc3, 0xbc, 0xc3, 0xd6, 0xcb, 0xc8, 0xce, 0xc2, 0xb6, 0xb4, + 0xc1, 0xc2, 0xc3, 0xc4, 0xbe, 0xbe, 0xb6, 0xb6, 0xb0, 0xb9, 0xc2, 0xcf, + 0xba, 0xb4, 0xb6, 0xb7, 0xc1, 0xb2, 0xb1, 0xc1, 0xc8, 0xcc, 0xc2, 0xc9, + 0xbc, 0xb8, 0xba, 0xcb, 0x1f, 0x23, 0x25, 0x24, 0x22, 0x22, 0x23, 0x22, + 0x24, 0x28, 0x2b, 0x24, 0x22, 0x27, 0x27, 0x27, 0x2b, 0x34, 0x2b, 0x26, + 0x28, 0x27, 0x26, 0x28, 0x34, 0x33, 0x32, 0x3a, 0x38, 0x31, 0x34, 0x35, + 0x35, 0x36, 0x3a, 0x3c, 0x3f, 0x42, 0x40, 0x40, 0x46, 0x46, 0x47, 0x46, + 0x44, 0x45, 0x40, 0x46, 0x4c, 0x4c, 0x4e, 0x4f, 0x4f, 0x52, 0x4f, 0x4e, + 0x54, 0x58, 0x58, 0x53, 0x56, 0x47, 0x40, 0x42, 0x46, 0x53, 0x54, 0x57, + 0x5a, 0x59, 0x5c, 0x61, 0x65, 0x69, 0x69, 0x6a, 0x68, 0x5f, 0x60, 0x69, + 0x6a, 0x67, 0x64, 0x64, 0x68, 0x62, 0x64, 0x63, 0x64, 0x66, 0x6b, 0x6d, + 0x6d, 0x6b, 0x6a, 0x6a, 0x6a, 0x67, 0x68, 0x6e, 0x6b, 0x6c, 0x6c, 0x6e, + 0x70, 0x6e, 0x6e, 0x70, 0x6f, 0x72, 0x72, 0x73, 0x73, 0x74, 0x76, 0x76, + 0x77, 0x76, 0x76, 0x76, 0x77, 0x77, 0x77, 0x77, 0x76, 0x77, 0x77, 0x77, + 0x78, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x79, 0x79, 0x78, 0x77, 0x75, + 0x73, 0x6a, 0x5d, 0x43, 0x1c, 0xf0, 0xda, 0xcd, 0xc5, 0xd2, 0xd4, 0xc8, + 0xc0, 0xc2, 0xbb, 0xb9, 0xba, 0xbe, 0xbc, 0xc4, 0xc6, 0xb4, 0xb8, 0xbb, + 0xc3, 0xc3, 0xb6, 0xbc, 0xc8, 0xc8, 0xbb, 0xc4, 0xc4, 0xbf, 0xbc, 0xbc, + 0xce, 0xcc, 0xbe, 0xba, 0xb0, 0xb1, 0xb6, 0xba, 0xc7, 0xca, 0xc1, 0xbb, + 0xb9, 0xc2, 0xcf, 0xcc, 0xe7, 0xe2, 0xc5, 0xb7, 0xc1, 0xca, 0xca, 0xba, + 0xb9, 0xbe, 0xb5, 0xb1, 0xab, 0xaa, 0xb6, 0xbd, 0xb2, 0xb6, 0xbc, 0xb7, + 0xbf, 0xba, 0xbc, 0xc2, 0xc8, 0xcb, 0xca, 0xd8, 0xc6, 0xbf, 0xd5, 0xdf, + 0x1c, 0x22, 0x22, 0x1d, 0x1c, 0x1f, 0x1f, 0x1e, 0x1f, 0x25, 0x28, 0x24, + 0x25, 0x29, 0x25, 0x26, 0x2e, 0x31, 0x28, 0x27, 0x28, 0x24, 0x23, 0x2b, + 0x34, 0x30, 0x32, 0x3b, 0x39, 0x33, 0x36, 0x37, 0x37, 0x35, 0x39, 0x3d, + 0x3f, 0x41, 0x3d, 0x40, 0x45, 0x47, 0x46, 0x45, 0x43, 0x40, 0x43, 0x48, + 0x4a, 0x4e, 0x51, 0x53, 0x50, 0x4d, 0x4c, 0x4c, 0x52, 0x58, 0x56, 0x4c, + 0x50, 0x49, 0x42, 0x42, 0x49, 0x55, 0x57, 0x59, 0x5b, 0x59, 0x5e, 0x64, + 0x66, 0x69, 0x6a, 0x66, 0x60, 0x5f, 0x62, 0x6a, 0x67, 0x65, 0x64, 0x64, + 0x66, 0x61, 0x60, 0x61, 0x64, 0x65, 0x6a, 0x6c, 0x6c, 0x69, 0x68, 0x69, + 0x6a, 0x69, 0x69, 0x6d, 0x6b, 0x6c, 0x6c, 0x6d, 0x70, 0x6e, 0x6f, 0x6f, + 0x70, 0x70, 0x72, 0x72, 0x72, 0x73, 0x75, 0x76, 0x77, 0x76, 0x76, 0x76, + 0x76, 0x77, 0x77, 0x76, 0x76, 0x76, 0x76, 0x77, 0x78, 0x78, 0x78, 0x76, + 0x78, 0x78, 0x79, 0x78, 0x77, 0x77, 0x77, 0x76, 0x70, 0x67, 0x5a, 0x46, + 0x1b, 0xec, 0xd1, 0xcf, 0xcb, 0xce, 0xd3, 0xce, 0xcb, 0xc9, 0xc3, 0xc5, + 0xc8, 0xc8, 0xc2, 0xb6, 0xb8, 0xbb, 0xbc, 0xb9, 0xc0, 0xd1, 0xc0, 0xb5, + 0xc3, 0xc8, 0xc1, 0xc8, 0xd5, 0xd2, 0xcf, 0xca, 0xd4, 0xc2, 0xb9, 0xbc, + 0xbc, 0xb3, 0xb0, 0xb3, 0xbc, 0xc2, 0xb4, 0xb1, 0xad, 0xb6, 0xd2, 0xd9, + 0xd2, 0xd2, 0xc3, 0xc6, 0xe1, 0xdb, 0xc2, 0xb5, 0xc3, 0xca, 0xb3, 0xaf, + 0xac, 0xb9, 0xc8, 0xb9, 0xb3, 0xbc, 0xc1, 0xbe, 0xbc, 0xbf, 0xbc, 0xc0, + 0xd4, 0xc9, 0xc2, 0xca, 0xc6, 0xc8, 0xdc, 0xe6, 0x1a, 0x1c, 0x1a, 0x18, + 0x19, 0x20, 0x1d, 0x1c, 0x1d, 0x23, 0x28, 0x28, 0x28, 0x29, 0x28, 0x2a, + 0x2e, 0x2e, 0x23, 0x22, 0x1e, 0x1b, 0x22, 0x2a, 0x33, 0x30, 0x30, 0x36, + 0x39, 0x34, 0x34, 0x35, 0x34, 0x35, 0x3a, 0x3f, 0x40, 0x40, 0x3a, 0x3e, + 0x44, 0x46, 0x45, 0x44, 0x40, 0x40, 0x44, 0x47, 0x4b, 0x52, 0x54, 0x53, + 0x4c, 0x4a, 0x49, 0x4c, 0x53, 0x56, 0x52, 0x4a, 0x49, 0x4a, 0x44, 0x43, + 0x4c, 0x57, 0x58, 0x58, 0x59, 0x5a, 0x62, 0x64, 0x67, 0x69, 0x68, 0x63, + 0x5d, 0x5e, 0x63, 0x68, 0x63, 0x64, 0x62, 0x64, 0x64, 0x60, 0x5e, 0x61, + 0x63, 0x66, 0x6a, 0x6a, 0x6c, 0x69, 0x65, 0x66, 0x6a, 0x69, 0x6a, 0x6c, + 0x6b, 0x6d, 0x6d, 0x6d, 0x70, 0x70, 0x70, 0x70, 0x71, 0x71, 0x72, 0x71, + 0x70, 0x72, 0x72, 0x75, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x77, 0x76, + 0x74, 0x75, 0x76, 0x76, 0x79, 0x79, 0x79, 0x78, 0x77, 0x78, 0x79, 0x77, + 0x77, 0x77, 0x78, 0x77, 0x75, 0x68, 0x58, 0x41, 0x15, 0xe9, 0xd2, 0xd0, + 0xda, 0xd6, 0xd3, 0xd0, 0xca, 0xc4, 0xca, 0xec, 0xe7, 0xdb, 0xcc, 0xb8, + 0xb3, 0xb6, 0xb7, 0xbc, 0xcd, 0xe0, 0xd0, 0xbb, 0xc0, 0xc7, 0xc7, 0xc8, + 0xcb, 0xce, 0xd5, 0xcd, 0xc8, 0xc1, 0xba, 0xbe, 0xbe, 0xb0, 0xb0, 0xb6, + 0xbc, 0xc0, 0xb8, 0xae, 0xae, 0xc1, 0xd1, 0xdb, 0xe6, 0xdb, 0xcf, 0xd4, + 0xe5, 0xd0, 0xbb, 0xc4, 0xd3, 0xcb, 0xb6, 0xb2, 0xb3, 0xd3, 0xd4, 0xbb, + 0xbe, 0xc2, 0xc5, 0xc2, 0xbc, 0xc2, 0xb6, 0xbf, 0xd7, 0xc6, 0xbc, 0xc1, + 0xc9, 0xd2, 0xd0, 0xe2, 0x12, 0x10, 0x11, 0x16, 0x1f, 0x21, 0x1e, 0x1c, + 0x1f, 0x27, 0x28, 0x29, 0x2c, 0x2a, 0x28, 0x26, 0x28, 0x25, 0x1e, 0x16, + 0x15, 0x19, 0x20, 0x28, 0x2f, 0x2e, 0x30, 0x34, 0x38, 0x34, 0x35, 0x34, + 0x33, 0x36, 0x37, 0x3d, 0x40, 0x3e, 0x3a, 0x40, 0x45, 0x46, 0x43, 0x3c, + 0x3e, 0x42, 0x46, 0x4b, 0x51, 0x53, 0x50, 0x4a, 0x4c, 0x49, 0x4b, 0x4d, + 0x53, 0x54, 0x4a, 0x48, 0x45, 0x46, 0x48, 0x46, 0x4d, 0x53, 0x58, 0x59, + 0x58, 0x5b, 0x62, 0x64, 0x66, 0x65, 0x62, 0x5f, 0x5c, 0x60, 0x65, 0x66, + 0x62, 0x63, 0x62, 0x64, 0x64, 0x5e, 0x5f, 0x63, 0x64, 0x66, 0x68, 0x69, + 0x6b, 0x67, 0x66, 0x65, 0x68, 0x69, 0x6a, 0x6c, 0x6a, 0x6c, 0x6d, 0x6c, + 0x70, 0x71, 0x6f, 0x70, 0x71, 0x72, 0x70, 0x71, 0x71, 0x72, 0x71, 0x71, + 0x75, 0x76, 0x77, 0x77, 0x76, 0x76, 0x76, 0x76, 0x75, 0x73, 0x76, 0x77, + 0x79, 0x79, 0x79, 0x78, 0x77, 0x77, 0x78, 0x79, 0x79, 0x79, 0x78, 0x78, + 0x76, 0x72, 0x68, 0x59, 0x35, 0xff, 0xe2, 0xcb, 0xda, 0xd9, 0xd4, 0xcf, + 0xce, 0xc8, 0xc5, 0xe7, 0xee, 0xe7, 0xd7, 0xc8, 0xc3, 0xce, 0xc5, 0xc2, + 0xd8, 0xd5, 0xd3, 0xc1, 0xba, 0xc4, 0xc6, 0xc5, 0xbc, 0xbe, 0xce, 0xc2, + 0xc2, 0xc0, 0xba, 0xbc, 0xbb, 0xb5, 0xb4, 0xc4, 0xc5, 0xc2, 0xc7, 0xbb, + 0xbd, 0xbd, 0xc6, 0xe0, 0xeb, 0xda, 0xcd, 0xd5, 0xd5, 0xc4, 0xbc, 0xcc, + 0xd3, 0xc7, 0xbb, 0xba, 0xbb, 0xd4, 0xc9, 0xc2, 0xbd, 0xc0, 0xcb, 0xcd, + 0xc8, 0xbc, 0xb2, 0xbb, 0xd7, 0xcf, 0xc8, 0xc7, 0xc9, 0xd9, 0xd4, 0xce, + 0x07, 0x0a, 0x10, 0x17, 0x1d, 0x1d, 0x1d, 0x20, 0x24, 0x2a, 0x2c, 0x28, + 0x28, 0x23, 0x22, 0x22, 0x27, 0x23, 0x18, 0x16, 0x19, 0x1d, 0x24, 0x28, + 0x2d, 0x2e, 0x30, 0x37, 0x38, 0x32, 0x32, 0x33, 0x34, 0x34, 0x37, 0x3c, + 0x3f, 0x3a, 0x3a, 0x3f, 0x40, 0x42, 0x3d, 0x3a, 0x3e, 0x42, 0x4a, 0x51, + 0x51, 0x4d, 0x4b, 0x4a, 0x4b, 0x4a, 0x4c, 0x49, 0x52, 0x53, 0x41, 0x40, + 0x44, 0x45, 0x48, 0x4c, 0x4d, 0x52, 0x57, 0x59, 0x58, 0x5b, 0x62, 0x64, + 0x65, 0x63, 0x5e, 0x5f, 0x5f, 0x64, 0x65, 0x64, 0x62, 0x63, 0x62, 0x63, + 0x63, 0x5e, 0x5f, 0x60, 0x5e, 0x65, 0x66, 0x65, 0x6a, 0x66, 0x69, 0x65, + 0x68, 0x6a, 0x6a, 0x6c, 0x6c, 0x6b, 0x6c, 0x6c, 0x6f, 0x73, 0x6e, 0x6e, + 0x70, 0x71, 0x71, 0x71, 0x73, 0x73, 0x70, 0x70, 0x72, 0x74, 0x76, 0x78, + 0x77, 0x76, 0x76, 0x76, 0x76, 0x76, 0x75, 0x77, 0x78, 0x78, 0x79, 0x79, + 0x78, 0x77, 0x77, 0x78, 0x78, 0x78, 0x78, 0x78, 0x77, 0x76, 0x6b, 0x5d, + 0x38, 0x01, 0xe2, 0xd6, 0xd4, 0xcd, 0xd4, 0xd8, 0xe0, 0xe9, 0xdc, 0xe6, + 0xec, 0xf2, 0xeb, 0xd7, 0xcf, 0xe6, 0xda, 0xc5, 0xcd, 0xd5, 0xcf, 0xd2, + 0xd0, 0xcf, 0xc1, 0xc2, 0xbc, 0xbd, 0xc2, 0xbc, 0xc3, 0xc0, 0xb6, 0xb8, + 0xb3, 0xb0, 0xb6, 0xce, 0xdb, 0xbc, 0xbb, 0xc2, 0xc2, 0xb7, 0xd2, 0xda, + 0xcb, 0xcf, 0xd4, 0xcd, 0xce, 0xd1, 0xc6, 0xce, 0xd0, 0xca, 0xbf, 0xbc, + 0xc2, 0xc8, 0xc8, 0xc5, 0xb7, 0xbe, 0xc3, 0xca, 0xc5, 0xb9, 0xb6, 0xc6, + 0xd8, 0xd1, 0xd6, 0xd4, 0xc8, 0xd6, 0xda, 0xce, 0xff, 0x08, 0x09, 0x10, + 0x19, 0x1a, 0x1f, 0x22, 0x26, 0x29, 0x28, 0x25, 0x24, 0x20, 0x22, 0x27, + 0x25, 0x22, 0x1a, 0x1f, 0x1e, 0x1f, 0x26, 0x28, 0x2e, 0x2f, 0x2e, 0x37, + 0x36, 0x30, 0x30, 0x32, 0x34, 0x34, 0x39, 0x3b, 0x3c, 0x38, 0x3b, 0x3c, + 0x3c, 0x3e, 0x3a, 0x3a, 0x3f, 0x42, 0x4c, 0x50, 0x49, 0x4d, 0x4e, 0x4c, + 0x4a, 0x4c, 0x4a, 0x46, 0x4c, 0x50, 0x36, 0x3a, 0x45, 0x40, 0x4b, 0x50, + 0x48, 0x52, 0x57, 0x59, 0x59, 0x5d, 0x5f, 0x63, 0x64, 0x5f, 0x5c, 0x60, + 0x62, 0x64, 0x63, 0x63, 0x64, 0x63, 0x63, 0x64, 0x64, 0x5d, 0x5e, 0x5c, + 0x5c, 0x65, 0x65, 0x64, 0x6a, 0x66, 0x6a, 0x68, 0x6a, 0x6a, 0x6c, 0x6c, + 0x6c, 0x6b, 0x6b, 0x6c, 0x6c, 0x71, 0x6f, 0x6d, 0x70, 0x70, 0x71, 0x73, + 0x73, 0x73, 0x71, 0x6f, 0x70, 0x73, 0x75, 0x77, 0x77, 0x78, 0x76, 0x76, + 0x76, 0x77, 0x77, 0x76, 0x77, 0x76, 0x78, 0x79, 0x79, 0x78, 0x77, 0x77, + 0x78, 0x79, 0x79, 0x79, 0x78, 0x76, 0x72, 0x67, 0x4a, 0x26, 0xf5, 0xd4, + 0xd2, 0xd6, 0xda, 0xd7, 0xd4, 0xe4, 0xda, 0xed, 0xf2, 0xf4, 0xf4, 0xe7, + 0xd3, 0xc8, 0xca, 0xc8, 0xcd, 0xd4, 0xcd, 0xce, 0xdd, 0xcf, 0xc6, 0xc2, + 0xc7, 0xcc, 0xc3, 0xc9, 0xd1, 0xc6, 0xbc, 0xb1, 0xaa, 0xb4, 0xba, 0xc1, + 0xe3, 0xd4, 0xbd, 0xbf, 0xc9, 0xc6, 0xcb, 0xce, 0xc4, 0xd4, 0xd4, 0xd3, + 0xce, 0xce, 0xc9, 0xcd, 0xd0, 0xcb, 0xc6, 0xce, 0xca, 0xbf, 0xc0, 0xc4, + 0xb9, 0xc5, 0xc9, 0xc3, 0xb8, 0xb2, 0xb4, 0xcd, 0xd0, 0xd2, 0xcc, 0xdc, + 0xcb, 0xc8, 0xcc, 0xce, 0xf2, 0xfa, 0xfe, 0x0b, 0x14, 0x15, 0x1b, 0x1f, + 0x27, 0x27, 0x24, 0x22, 0x23, 0x22, 0x25, 0x27, 0x1f, 0x20, 0x23, 0x28, + 0x20, 0x21, 0x28, 0x2a, 0x2e, 0x2f, 0x2e, 0x34, 0x34, 0x32, 0x31, 0x33, + 0x33, 0x34, 0x39, 0x3a, 0x39, 0x3a, 0x3d, 0x3d, 0x3c, 0x3a, 0x33, 0x31, + 0x3a, 0x46, 0x49, 0x47, 0x48, 0x4e, 0x4e, 0x49, 0x47, 0x4b, 0x45, 0x44, + 0x46, 0x4c, 0x34, 0x39, 0x41, 0x42, 0x4e, 0x51, 0x4a, 0x53, 0x55, 0x58, + 0x5d, 0x5f, 0x61, 0x61, 0x63, 0x5e, 0x59, 0x5e, 0x63, 0x63, 0x64, 0x64, + 0x63, 0x64, 0x62, 0x64, 0x62, 0x5b, 0x5c, 0x58, 0x5a, 0x63, 0x64, 0x64, + 0x68, 0x66, 0x69, 0x68, 0x6a, 0x69, 0x6d, 0x6c, 0x6e, 0x6b, 0x6a, 0x6c, + 0x6c, 0x70, 0x70, 0x6c, 0x70, 0x6f, 0x70, 0x74, 0x73, 0x71, 0x72, 0x71, + 0x70, 0x73, 0x74, 0x75, 0x76, 0x77, 0x77, 0x77, 0x76, 0x76, 0x77, 0x78, + 0x76, 0x77, 0x77, 0x78, 0x78, 0x78, 0x76, 0x77, 0x78, 0x79, 0x79, 0x79, + 0x77, 0x77, 0x76, 0x6e, 0x5e, 0x44, 0x0c, 0xe0, 0xcc, 0xcd, 0xd3, 0xcf, + 0xcc, 0xd0, 0xbe, 0xcd, 0xde, 0xea, 0xf3, 0xe9, 0xec, 0xd4, 0xca, 0xd5, + 0xce, 0xd1, 0xd7, 0xc9, 0xca, 0xbe, 0xb9, 0xb3, 0xbf, 0xcd, 0xd2, 0xd8, + 0xd8, 0xc2, 0xbe, 0xba, 0xb1, 0xc0, 0xc5, 0xbc, 0xd0, 0xe1, 0xcf, 0xbf, + 0xd2, 0xda, 0xe2, 0xd1, 0xce, 0xda, 0xd4, 0xd3, 0xcc, 0xbd, 0xc4, 0xc3, + 0xc7, 0xcf, 0xc6, 0xc4, 0xcc, 0xc2, 0xb5, 0xb9, 0xbc, 0xcf, 0xcd, 0xc9, + 0xce, 0xcc, 0xb7, 0xce, 0xc6, 0xcf, 0xc9, 0xc5, 0xcf, 0xbd, 0xb7, 0xc7, + 0xe3, 0xf1, 0xf9, 0x04, 0x09, 0x0e, 0x10, 0x1b, 0x22, 0x21, 0x1f, 0x1d, + 0x21, 0x22, 0x22, 0x20, 0x1c, 0x22, 0x28, 0x2a, 0x25, 0x26, 0x2a, 0x2b, + 0x2d, 0x31, 0x31, 0x34, 0x31, 0x30, 0x32, 0x32, 0x34, 0x36, 0x38, 0x36, + 0x37, 0x3a, 0x3b, 0x3a, 0x34, 0x2f, 0x2d, 0x35, 0x42, 0x49, 0x46, 0x41, + 0x4b, 0x50, 0x4d, 0x49, 0x48, 0x45, 0x3d, 0x42, 0x43, 0x44, 0x3c, 0x3c, + 0x43, 0x4a, 0x4d, 0x4f, 0x4c, 0x52, 0x54, 0x55, 0x5e, 0x60, 0x61, 0x5e, + 0x60, 0x5d, 0x5a, 0x5d, 0x63, 0x60, 0x63, 0x63, 0x62, 0x64, 0x5f, 0x64, + 0x5f, 0x5a, 0x59, 0x56, 0x58, 0x62, 0x62, 0x64, 0x66, 0x69, 0x69, 0x64, + 0x68, 0x6a, 0x6c, 0x6d, 0x6e, 0x6b, 0x6a, 0x6d, 0x6f, 0x6f, 0x70, 0x6e, + 0x70, 0x6f, 0x6f, 0x73, 0x74, 0x73, 0x73, 0x73, 0x71, 0x72, 0x74, 0x75, + 0x75, 0x76, 0x76, 0x77, 0x76, 0x76, 0x76, 0x78, 0x78, 0x77, 0x77, 0x78, + 0x78, 0x78, 0x78, 0x78, 0x79, 0x78, 0x79, 0x79, 0x76, 0x78, 0x77, 0x72, + 0x6b, 0x50, 0x19, 0xe4, 0xd2, 0xc7, 0xc8, 0xc7, 0xc6, 0xc0, 0xb4, 0xc3, + 0xd7, 0xd1, 0xe7, 0xf2, 0xfe, 0xee, 0xda, 0xe4, 0xda, 0xda, 0xe1, 0xca, + 0xbf, 0xb8, 0xba, 0xb7, 0xb6, 0xc2, 0xd4, 0xda, 0xd4, 0xc8, 0xbf, 0xc1, + 0xbd, 0xcd, 0xc1, 0xcd, 0xd2, 0xd8, 0xdd, 0xda, 0xe0, 0xdd, 0xdc, 0xce, + 0xda, 0xce, 0xd9, 0xd9, 0xcc, 0xbd, 0xc1, 0xcd, 0xcc, 0xd1, 0xcb, 0xbc, + 0xb7, 0xc7, 0xc0, 0xb9, 0xc0, 0xe9, 0xdf, 0xcd, 0xc8, 0xc8, 0xb6, 0xc6, + 0xc5, 0xc9, 0xc9, 0xbd, 0xbe, 0xcb, 0xbb, 0xbb, 0xd5, 0xe8, 0xef, 0xfb, + 0x05, 0x0a, 0x0a, 0x10, 0x15, 0x17, 0x17, 0x1a, 0x1c, 0x1d, 0x1c, 0x1c, + 0x22, 0x25, 0x25, 0x28, 0x28, 0x28, 0x2a, 0x2b, 0x2b, 0x30, 0x31, 0x32, + 0x2e, 0x2e, 0x2d, 0x2e, 0x32, 0x35, 0x35, 0x36, 0x38, 0x3a, 0x37, 0x30, + 0x2e, 0x31, 0x37, 0x3e, 0x44, 0x48, 0x48, 0x46, 0x4e, 0x4c, 0x4c, 0x49, + 0x49, 0x40, 0x3d, 0x41, 0x3a, 0x40, 0x40, 0x3e, 0x42, 0x4b, 0x4d, 0x4f, + 0x4c, 0x4e, 0x52, 0x53, 0x5b, 0x5e, 0x5d, 0x5a, 0x5e, 0x5c, 0x59, 0x5d, + 0x64, 0x61, 0x63, 0x62, 0x63, 0x62, 0x60, 0x64, 0x5e, 0x5a, 0x58, 0x55, + 0x59, 0x61, 0x64, 0x64, 0x64, 0x6a, 0x6b, 0x64, 0x67, 0x6a, 0x6a, 0x6c, + 0x6a, 0x6b, 0x6d, 0x6b, 0x6f, 0x6d, 0x6d, 0x6f, 0x6f, 0x6d, 0x6e, 0x71, + 0x74, 0x72, 0x73, 0x73, 0x72, 0x71, 0x73, 0x75, 0x75, 0x76, 0x75, 0x76, + 0x77, 0x76, 0x76, 0x76, 0x78, 0x77, 0x77, 0x78, 0x77, 0x78, 0x77, 0x78, + 0x79, 0x79, 0x78, 0x77, 0x78, 0x78, 0x79, 0x74, 0x69, 0x48, 0x16, 0xed, + 0xe6, 0xd8, 0xd1, 0xce, 0xd0, 0xd5, 0xcf, 0xd5, 0xcc, 0xc2, 0xcf, 0xd8, + 0xe1, 0xf2, 0xe6, 0xdc, 0xe0, 0xd1, 0xc8, 0xc5, 0xc2, 0xbf, 0xc2, 0xc4, + 0xc4, 0xc2, 0xc2, 0xd1, 0xd8, 0xd4, 0xc4, 0xc2, 0xc0, 0xca, 0xd3, 0xc8, + 0xc4, 0xcd, 0xdb, 0xe4, 0xe4, 0xdf, 0xce, 0xc9, 0xe0, 0xd7, 0xcc, 0xd8, + 0xd1, 0xbd, 0xbd, 0xc0, 0xd4, 0xcf, 0xcf, 0xc5, 0xc0, 0xca, 0xc2, 0xb6, + 0xc2, 0xea, 0xdc, 0xcb, 0xc1, 0xbf, 0xbb, 0xc0, 0xc8, 0xcb, 0xca, 0xc8, + 0xc2, 0xc8, 0xb9, 0xb1, 0xcd, 0xdd, 0xe7, 0xf1, 0xf7, 0xfe, 0xff, 0x05, + 0x08, 0x0f, 0x10, 0x11, 0x13, 0x11, 0x10, 0x1a, 0x20, 0x23, 0x27, 0x28, + 0x29, 0x29, 0x2b, 0x2b, 0x2a, 0x2d, 0x2d, 0x2e, 0x2c, 0x2d, 0x2c, 0x2f, + 0x34, 0x31, 0x32, 0x36, 0x37, 0x34, 0x30, 0x2f, 0x33, 0x39, 0x3e, 0x43, + 0x47, 0x4b, 0x48, 0x48, 0x4f, 0x4b, 0x4c, 0x46, 0x44, 0x40, 0x45, 0x46, + 0x3a, 0x3a, 0x45, 0x46, 0x46, 0x49, 0x4f, 0x50, 0x4e, 0x50, 0x51, 0x52, + 0x59, 0x5c, 0x58, 0x58, 0x5a, 0x5b, 0x5b, 0x5c, 0x60, 0x60, 0x64, 0x60, + 0x61, 0x5e, 0x62, 0x63, 0x61, 0x5a, 0x59, 0x58, 0x58, 0x5d, 0x64, 0x64, + 0x64, 0x65, 0x6a, 0x68, 0x66, 0x6a, 0x69, 0x6b, 0x68, 0x6a, 0x6b, 0x69, + 0x6d, 0x6d, 0x6a, 0x6e, 0x6d, 0x6d, 0x6d, 0x71, 0x74, 0x72, 0x72, 0x71, + 0x72, 0x71, 0x71, 0x74, 0x75, 0x75, 0x74, 0x74, 0x76, 0x77, 0x76, 0x76, + 0x76, 0x76, 0x77, 0x78, 0x78, 0x78, 0x76, 0x77, 0x78, 0x78, 0x78, 0x77, + 0x78, 0x78, 0x79, 0x77, 0x70, 0x5f, 0x47, 0x22, 0xf2, 0xef, 0xdc, 0xd4, + 0xd0, 0xe0, 0xeb, 0xdc, 0xd9, 0xce, 0xce, 0xcf, 0xcf, 0xe6, 0xf3, 0xec, + 0xe6, 0xd6, 0xc9, 0xc8, 0xcf, 0xc4, 0xbd, 0xc1, 0xd3, 0xdc, 0xcb, 0xc4, + 0xce, 0xd9, 0xbd, 0xbc, 0xc3, 0xc9, 0xd3, 0xcb, 0xc1, 0xc2, 0xd2, 0xdb, + 0xdc, 0xd4, 0xca, 0xc8, 0xd6, 0xdc, 0xc3, 0xc7, 0xda, 0xcb, 0xc8, 0xbe, + 0xcf, 0xc9, 0xd0, 0xc6, 0xbe, 0xcf, 0xc5, 0xb4, 0xba, 0xdd, 0xd9, 0xc0, + 0xb7, 0xbf, 0xc2, 0xc4, 0xc8, 0xcb, 0xc8, 0xc4, 0xc3, 0xc3, 0xb7, 0xb4, + 0xc8, 0xd2, 0xdc, 0xe2, 0xec, 0xed, 0xf0, 0xf2, 0xf6, 0xfe, 0xff, 0xfe, + 0xfe, 0x04, 0x0b, 0x19, 0x20, 0x27, 0x24, 0x22, 0x27, 0x28, 0x2b, 0x2a, + 0x29, 0x2d, 0x2e, 0x2b, 0x2a, 0x29, 0x2c, 0x30, 0x30, 0x2e, 0x33, 0x36, + 0x2e, 0x2d, 0x2d, 0x31, 0x36, 0x3a, 0x41, 0x47, 0x4a, 0x49, 0x44, 0x46, + 0x4f, 0x4c, 0x4a, 0x46, 0x43, 0x3f, 0x48, 0x46, 0x3c, 0x35, 0x49, 0x4f, + 0x48, 0x4a, 0x51, 0x4d, 0x50, 0x52, 0x52, 0x52, 0x56, 0x5d, 0x5a, 0x57, + 0x59, 0x5c, 0x5b, 0x5d, 0x5f, 0x61, 0x64, 0x5f, 0x5e, 0x5e, 0x63, 0x61, + 0x5f, 0x5a, 0x58, 0x58, 0x57, 0x5b, 0x61, 0x67, 0x65, 0x63, 0x68, 0x6a, + 0x69, 0x6b, 0x68, 0x6b, 0x68, 0x68, 0x69, 0x69, 0x6a, 0x6e, 0x68, 0x6a, + 0x6c, 0x6d, 0x6c, 0x6f, 0x73, 0x72, 0x72, 0x71, 0x72, 0x71, 0x71, 0x73, + 0x75, 0x75, 0x74, 0x74, 0x74, 0x76, 0x76, 0x76, 0x76, 0x77, 0x77, 0x78, + 0x79, 0x79, 0x78, 0x77, 0x78, 0x78, 0x79, 0x77, 0x79, 0x79, 0x78, 0x79, + 0x77, 0x70, 0x5e, 0x34, 0x14, 0x0a, 0xf5, 0xe0, 0xd6, 0xd4, 0xe3, 0xce, + 0xd2, 0xda, 0xd2, 0xd1, 0xce, 0xcf, 0xe9, 0xf6, 0xfe, 0xec, 0xd3, 0xca, + 0xd2, 0xd6, 0xc8, 0xc8, 0xcc, 0xe2, 0xe2, 0xd4, 0xc6, 0xd0, 0xbd, 0xb9, + 0xc3, 0xc8, 0xcd, 0xc2, 0xc0, 0xc3, 0xd0, 0xe5, 0xe3, 0xd1, 0xd2, 0xc9, + 0xd5, 0xf3, 0xe0, 0xc2, 0xce, 0xda, 0xd6, 0xbb, 0xce, 0xd4, 0xdb, 0xc2, + 0xbb, 0xca, 0xca, 0xbc, 0xbb, 0xd2, 0xd1, 0xb1, 0xb6, 0xc1, 0xc9, 0xd2, + 0xd2, 0xce, 0xc9, 0xc4, 0xc2, 0xd0, 0xc8, 0xc0, 0xc2, 0xca, 0xd8, 0xd6, + 0xe3, 0xdc, 0xd8, 0xde, 0xe6, 0xef, 0xf0, 0xeb, 0xf0, 0xfe, 0x0a, 0x1c, + 0x22, 0x20, 0x1c, 0x1a, 0x21, 0x25, 0x28, 0x28, 0x26, 0x28, 0x2c, 0x27, + 0x27, 0x28, 0x2b, 0x2c, 0x2e, 0x2f, 0x33, 0x30, 0x2e, 0x2f, 0x30, 0x30, + 0x38, 0x3f, 0x42, 0x48, 0x49, 0x41, 0x42, 0x49, 0x4d, 0x49, 0x44, 0x48, + 0x44, 0x40, 0x4b, 0x46, 0x3e, 0x3d, 0x4c, 0x4e, 0x4a, 0x4c, 0x4e, 0x4d, + 0x4f, 0x51, 0x51, 0x4e, 0x52, 0x5b, 0x5a, 0x57, 0x57, 0x5b, 0x5c, 0x5c, + 0x5d, 0x61, 0x63, 0x5e, 0x5c, 0x5e, 0x64, 0x5d, 0x59, 0x58, 0x58, 0x58, + 0x57, 0x5d, 0x60, 0x66, 0x66, 0x64, 0x65, 0x6a, 0x6a, 0x6d, 0x67, 0x69, + 0x67, 0x66, 0x68, 0x69, 0x68, 0x6b, 0x69, 0x69, 0x6b, 0x6b, 0x6d, 0x6d, + 0x71, 0x72, 0x72, 0x72, 0x72, 0x70, 0x71, 0x72, 0x73, 0x75, 0x74, 0x74, + 0x73, 0x74, 0x75, 0x76, 0x76, 0x76, 0x77, 0x76, 0x78, 0x79, 0x78, 0x77, + 0x78, 0x79, 0x78, 0x77, 0x78, 0x78, 0x78, 0x78, 0x77, 0x71, 0x63, 0x46, + 0x19, 0x18, 0x1f, 0x10, 0xf2, 0xdd, 0xe7, 0xce, 0xc7, 0xd0, 0xd6, 0xc8, + 0xc9, 0xda, 0xe6, 0xe7, 0x06, 0x01, 0xf2, 0xdf, 0xca, 0xd4, 0xd8, 0xc3, + 0xc1, 0xd7, 0xe0, 0xe2, 0xce, 0xc6, 0xbd, 0xc0, 0xc0, 0xc2, 0xc6, 0xbd, + 0xc4, 0xcf, 0xdf, 0xe9, 0xef, 0xe0, 0xcc, 0xbe, 0xcf, 0xe1, 0xde, 0xc8, + 0xcd, 0xda, 0xdc, 0xc6, 0xd1, 0xdc, 0xdd, 0xd7, 0xce, 0xcd, 0xcf, 0xd3, + 0xd4, 0xcc, 0xc4, 0xb0, 0xbf, 0xc8, 0xc6, 0xd2, 0xce, 0xd0, 0xce, 0xcf, + 0xcf, 0xcf, 0xcb, 0xc2, 0xc0, 0xc6, 0xd3, 0xd2, 0xdd, 0xda, 0xd2, 0xd4, + 0xdb, 0xe1, 0xe3, 0xe6, 0xea, 0xf8, 0x11, 0x1d, 0x17, 0x11, 0x13, 0x19, + 0x21, 0x23, 0x22, 0x22, 0x23, 0x26, 0x28, 0x21, 0x26, 0x29, 0x28, 0x28, + 0x2b, 0x30, 0x2f, 0x2e, 0x32, 0x2e, 0x2e, 0x32, 0x3a, 0x3f, 0x44, 0x49, + 0x41, 0x40, 0x43, 0x49, 0x4c, 0x46, 0x41, 0x48, 0x42, 0x41, 0x4a, 0x46, + 0x40, 0x48, 0x4e, 0x4b, 0x4d, 0x50, 0x4f, 0x4c, 0x4e, 0x4e, 0x4f, 0x4c, + 0x52, 0x58, 0x5d, 0x59, 0x56, 0x5a, 0x5d, 0x5d, 0x5e, 0x5f, 0x5f, 0x5c, + 0x5e, 0x62, 0x64, 0x5e, 0x56, 0x56, 0x5a, 0x58, 0x5a, 0x5f, 0x61, 0x65, + 0x67, 0x65, 0x65, 0x68, 0x6b, 0x6e, 0x69, 0x69, 0x67, 0x66, 0x67, 0x6a, + 0x68, 0x69, 0x68, 0x67, 0x6a, 0x6a, 0x6d, 0x6e, 0x6f, 0x71, 0x70, 0x71, + 0x73, 0x71, 0x70, 0x70, 0x71, 0x74, 0x75, 0x74, 0x72, 0x72, 0x74, 0x76, + 0x76, 0x76, 0x77, 0x76, 0x77, 0x7a, 0x79, 0x78, 0x78, 0x79, 0x78, 0x78, + 0x78, 0x78, 0x78, 0x78, 0x78, 0x76, 0x6f, 0x4e, 0x0a, 0xfc, 0x15, 0x26, + 0x1c, 0x03, 0xf4, 0xdc, 0xc8, 0xc6, 0xd6, 0xd4, 0xca, 0xd4, 0xd9, 0xe6, + 0x07, 0x01, 0x04, 0xf9, 0xd0, 0xc9, 0xdb, 0xe5, 0xce, 0xd2, 0xe2, 0xe5, + 0xce, 0xc0, 0xbf, 0xc1, 0xc2, 0xbe, 0xb8, 0xb8, 0xc7, 0xd7, 0xe7, 0xf1, + 0xe9, 0xe7, 0xd1, 0xbf, 0xc3, 0xd3, 0xd1, 0xc7, 0xc7, 0xce, 0xd9, 0xcb, + 0xd7, 0xe6, 0xda, 0xd4, 0xca, 0xd2, 0xda, 0xd9, 0xcc, 0xc6, 0xbe, 0xb6, + 0xc9, 0xcd, 0xc9, 0xe0, 0xcf, 0xce, 0xd7, 0xe0, 0xd3, 0xc1, 0xc8, 0xcb, + 0xbc, 0xb8, 0xc3, 0xcc, 0xd2, 0xd0, 0xce, 0xcc, 0xd0, 0xda, 0xdc, 0xe0, + 0xe5, 0x00, 0x10, 0x10, 0x11, 0x10, 0x15, 0x1b, 0x22, 0x20, 0x1d, 0x22, + 0x22, 0x25, 0x21, 0x23, 0x28, 0x27, 0x28, 0x28, 0x29, 0x2c, 0x2b, 0x2e, + 0x30, 0x2b, 0x2e, 0x33, 0x39, 0x3f, 0x43, 0x44, 0x3f, 0x43, 0x46, 0x48, + 0x49, 0x44, 0x44, 0x47, 0x44, 0x41, 0x4a, 0x43, 0x45, 0x4e, 0x49, 0x46, + 0x46, 0x50, 0x51, 0x4c, 0x4d, 0x4d, 0x4d, 0x4c, 0x4d, 0x54, 0x5b, 0x5c, + 0x58, 0x59, 0x5e, 0x5c, 0x5c, 0x5e, 0x5d, 0x5b, 0x5b, 0x5e, 0x5f, 0x5e, + 0x58, 0x58, 0x5e, 0x5d, 0x60, 0x64, 0x61, 0x65, 0x66, 0x64, 0x64, 0x67, + 0x6a, 0x6c, 0x6a, 0x69, 0x68, 0x66, 0x67, 0x6a, 0x68, 0x68, 0x68, 0x66, + 0x6a, 0x69, 0x6a, 0x6f, 0x6f, 0x70, 0x6e, 0x6e, 0x71, 0x71, 0x70, 0x70, + 0x70, 0x72, 0x74, 0x75, 0x73, 0x72, 0x73, 0x75, 0x76, 0x77, 0x78, 0x77, + 0x77, 0x78, 0x79, 0x78, 0x78, 0x79, 0x79, 0x79, 0x76, 0x78, 0x77, 0x78, + 0x77, 0x77, 0x73, 0x5a, 0x2b, 0x13, 0x13, 0x17, 0x29, 0x2c, 0x17, 0xef, + 0xe2, 0xd2, 0xd4, 0xe2, 0xd3, 0xd3, 0xda, 0xe0, 0x03, 0x0f, 0x0f, 0x0a, + 0xe4, 0xca, 0xd2, 0xe5, 0xdd, 0xd5, 0xef, 0xe3, 0xc2, 0xb7, 0xbe, 0xbb, + 0xc1, 0xc3, 0xc1, 0xbe, 0xd2, 0xe4, 0xec, 0xf1, 0xef, 0xef, 0xe5, 0xcd, + 0xc9, 0xd7, 0xec, 0xd0, 0xce, 0xcd, 0xd4, 0xd5, 0xdd, 0xe0, 0xd8, 0xd8, + 0xd4, 0xd2, 0xd6, 0xcc, 0xba, 0xbc, 0xd3, 0xcb, 0xd8, 0xe3, 0xd9, 0xe1, + 0xce, 0xd4, 0xdc, 0xd3, 0xca, 0xce, 0xc3, 0xd4, 0xbd, 0xb7, 0xb4, 0xc5, + 0xd3, 0xc5, 0xc4, 0xcb, 0xd5, 0xd4, 0xd4, 0xe0, 0xf4, 0x05, 0x06, 0x0b, + 0x11, 0x15, 0x18, 0x1d, 0x1e, 0x1b, 0x1f, 0x20, 0x1f, 0x1c, 0x20, 0x25, + 0x23, 0x23, 0x27, 0x27, 0x28, 0x27, 0x28, 0x32, 0x2c, 0x28, 0x30, 0x35, + 0x3a, 0x3c, 0x41, 0x40, 0x40, 0x43, 0x46, 0x45, 0x47, 0x44, 0x44, 0x46, + 0x44, 0x42, 0x48, 0x45, 0x4b, 0x4e, 0x46, 0x44, 0x44, 0x4f, 0x52, 0x4e, + 0x50, 0x50, 0x4d, 0x4b, 0x4a, 0x50, 0x57, 0x58, 0x58, 0x58, 0x5c, 0x5b, + 0x59, 0x5d, 0x5e, 0x5c, 0x58, 0x5d, 0x5b, 0x5c, 0x58, 0x57, 0x5c, 0x5e, + 0x64, 0x67, 0x63, 0x63, 0x64, 0x64, 0x64, 0x65, 0x6a, 0x6c, 0x6a, 0x6a, + 0x6a, 0x69, 0x68, 0x6a, 0x6a, 0x69, 0x68, 0x68, 0x6a, 0x6a, 0x69, 0x6e, + 0x6f, 0x6f, 0x6e, 0x6c, 0x6f, 0x71, 0x70, 0x71, 0x6f, 0x70, 0x70, 0x73, + 0x74, 0x73, 0x73, 0x75, 0x76, 0x77, 0x77, 0x77, 0x78, 0x78, 0x79, 0x78, + 0x79, 0x79, 0x79, 0x79, 0x76, 0x78, 0x77, 0x77, 0x77, 0x78, 0x77, 0x6e, + 0x48, 0x2e, 0x1f, 0x16, 0x13, 0x21, 0x36, 0x18, 0xfc, 0xdf, 0xe2, 0xdc, + 0xcf, 0xd4, 0xda, 0xde, 0xf1, 0x12, 0x1d, 0x14, 0xf0, 0xce, 0xc9, 0xd1, + 0xd7, 0xe0, 0xee, 0xe0, 0xc8, 0xb7, 0xb6, 0xbd, 0xc9, 0xcd, 0xd7, 0xd3, + 0xe1, 0xf0, 0xf6, 0xfa, 0x00, 0xf7, 0xea, 0xe1, 0xd5, 0xe5, 0x06, 0xec, + 0xd4, 0xd3, 0xde, 0xe6, 0xe6, 0xec, 0xe6, 0xde, 0xe0, 0xe3, 0xd8, 0xcd, + 0xc1, 0xc2, 0xe4, 0xe9, 0xe0, 0xea, 0xda, 0xdc, 0xd1, 0xda, 0xca, 0xbb, + 0xbe, 0xc8, 0xc8, 0xbc, 0xba, 0xb3, 0xad, 0xbc, 0xc3, 0xbe, 0xbf, 0xc4, + 0xc6, 0xcb, 0xde, 0xef, 0xf8, 0xfe, 0x07, 0x0c, 0x13, 0x19, 0x1a, 0x19, + 0x19, 0x1b, 0x21, 0x1e, 0x17, 0x17, 0x1f, 0x21, 0x1f, 0x22, 0x22, 0x25, + 0x27, 0x27, 0x2a, 0x2d, 0x2b, 0x2a, 0x32, 0x34, 0x37, 0x39, 0x3a, 0x3b, + 0x3e, 0x41, 0x43, 0x41, 0x45, 0x41, 0x44, 0x44, 0x43, 0x44, 0x46, 0x46, + 0x4c, 0x48, 0x42, 0x42, 0x44, 0x50, 0x52, 0x4d, 0x51, 0x51, 0x4c, 0x4c, + 0x4a, 0x50, 0x52, 0x55, 0x54, 0x57, 0x58, 0x59, 0x5a, 0x5c, 0x5d, 0x5e, + 0x5b, 0x5a, 0x5a, 0x5a, 0x5a, 0x58, 0x5a, 0x5f, 0x65, 0x68, 0x64, 0x63, + 0x63, 0x63, 0x64, 0x64, 0x6a, 0x6d, 0x6d, 0x6a, 0x6c, 0x6b, 0x6a, 0x68, + 0x6a, 0x6a, 0x6a, 0x6a, 0x6c, 0x6c, 0x6a, 0x6c, 0x6f, 0x6f, 0x6f, 0x6d, + 0x6d, 0x71, 0x71, 0x72, 0x70, 0x6f, 0x6f, 0x70, 0x73, 0x74, 0x74, 0x74, + 0x76, 0x77, 0x76, 0x76, 0x78, 0x79, 0x79, 0x78, 0x79, 0x79, 0x7a, 0x79, + 0x77, 0x79, 0x77, 0x77, 0x77, 0x79, 0x78, 0x75, 0x65, 0x44, 0x2f, 0x34, + 0x1f, 0x12, 0x20, 0x38, 0x16, 0x0d, 0xfb, 0xe6, 0xcd, 0xd2, 0xcd, 0xcf, + 0xe6, 0x10, 0x28, 0x22, 0x08, 0xda, 0xbf, 0xc1, 0xd1, 0xdd, 0xe9, 0xe0, + 0xda, 0xc8, 0xc1, 0xbb, 0xc0, 0xda, 0xdd, 0xe6, 0x03, 0xeb, 0xe9, 0xfe, + 0x16, 0x02, 0xdf, 0xd6, 0xd8, 0xf1, 0x13, 0x0f, 0xe8, 0xd3, 0xf0, 0xf9, + 0xf3, 0xf1, 0xe2, 0xe0, 0xdd, 0xdf, 0xe7, 0xdb, 0xc5, 0xc8, 0xdd, 0xf0, + 0xf4, 0xfd, 0xeb, 0xee, 0xd4, 0xd4, 0xc8, 0xc7, 0xc1, 0xc2, 0xcf, 0xc8, + 0xb9, 0xb0, 0xaa, 0xb3, 0xb8, 0xc0, 0xbe, 0xbc, 0xc1, 0xd0, 0xdb, 0xe6, + 0xf0, 0xfe, 0xff, 0x07, 0x14, 0x1b, 0x18, 0x16, 0x16, 0x18, 0x1d, 0x18, + 0x11, 0x1a, 0x1d, 0x1d, 0x1e, 0x21, 0x20, 0x22, 0x23, 0x26, 0x28, 0x25, + 0x28, 0x2b, 0x2f, 0x31, 0x35, 0x34, 0x36, 0x3a, 0x3c, 0x3f, 0x3f, 0x40, + 0x44, 0x41, 0x42, 0x43, 0x42, 0x42, 0x44, 0x46, 0x46, 0x44, 0x43, 0x41, + 0x45, 0x4d, 0x50, 0x4f, 0x52, 0x52, 0x4c, 0x4e, 0x4e, 0x52, 0x4f, 0x55, + 0x54, 0x54, 0x57, 0x57, 0x59, 0x5b, 0x5a, 0x5e, 0x5d, 0x5a, 0x59, 0x5b, + 0x59, 0x59, 0x5b, 0x62, 0x66, 0x66, 0x64, 0x64, 0x62, 0x64, 0x65, 0x62, + 0x67, 0x6d, 0x6e, 0x6b, 0x6d, 0x6d, 0x6b, 0x69, 0x68, 0x6a, 0x6a, 0x6d, + 0x6c, 0x6d, 0x6b, 0x6b, 0x6c, 0x6e, 0x70, 0x6d, 0x6d, 0x6f, 0x70, 0x72, + 0x71, 0x70, 0x6f, 0x70, 0x73, 0x73, 0x74, 0x74, 0x75, 0x76, 0x76, 0x77, + 0x77, 0x79, 0x7a, 0x79, 0x79, 0x79, 0x7a, 0x79, 0x77, 0x78, 0x79, 0x77, + 0x77, 0x78, 0x78, 0x77, 0x71, 0x5e, 0x52, 0x51, 0x45, 0x37, 0x1f, 0x28, + 0x3a, 0x2e, 0x20, 0x08, 0xe0, 0xdd, 0xd7, 0xc5, 0xdb, 0x0d, 0x31, 0x37, + 0x1f, 0xe4, 0xd0, 0xca, 0xc2, 0xce, 0xd8, 0xd3, 0xd4, 0xd2, 0xdb, 0xca, + 0xc6, 0xdb, 0xdc, 0xec, 0x0a, 0x02, 0xfb, 0x15, 0x28, 0xfe, 0xe3, 0xd6, + 0xec, 0x09, 0x1f, 0x39, 0x22, 0x07, 0x18, 0x0f, 0xf6, 0xe8, 0xdb, 0xe4, + 0xf8, 0xfc, 0xfc, 0xe6, 0xd2, 0xc8, 0xe6, 0xf2, 0xfd, 0xfe, 0xe4, 0xd5, + 0xc2, 0xb6, 0xc3, 0xd4, 0xc5, 0xbe, 0xd0, 0xd6, 0xc1, 0xb0, 0xa9, 0xb0, + 0xb6, 0xbe, 0xb8, 0xbc, 0xc7, 0xce, 0xd5, 0xe2, 0xec, 0xf4, 0xfe, 0x0d, + 0x14, 0x13, 0x13, 0x15, 0x13, 0x18, 0x18, 0x13, 0x16, 0x1c, 0x1b, 0x1b, + 0x1c, 0x1e, 0x21, 0x22, 0x25, 0x28, 0x27, 0x20, 0x24, 0x28, 0x2b, 0x2d, + 0x30, 0x34, 0x34, 0x39, 0x3a, 0x38, 0x3b, 0x40, 0x43, 0x40, 0x41, 0x42, + 0x42, 0x3f, 0x3d, 0x46, 0x40, 0x3f, 0x42, 0x42, 0x45, 0x4a, 0x4c, 0x4d, + 0x52, 0x4e, 0x4a, 0x4c, 0x52, 0x54, 0x50, 0x53, 0x56, 0x53, 0x56, 0x55, + 0x58, 0x59, 0x5a, 0x5e, 0x5f, 0x5b, 0x5a, 0x5a, 0x5a, 0x5b, 0x5e, 0x63, + 0x67, 0x64, 0x62, 0x65, 0x62, 0x62, 0x64, 0x64, 0x64, 0x6a, 0x6c, 0x6c, + 0x6c, 0x6e, 0x6d, 0x6a, 0x68, 0x69, 0x6a, 0x6e, 0x6f, 0x6d, 0x6d, 0x6c, + 0x6c, 0x6c, 0x6f, 0x6f, 0x6e, 0x6e, 0x6f, 0x72, 0x73, 0x72, 0x70, 0x70, + 0x73, 0x73, 0x74, 0x76, 0x76, 0x76, 0x76, 0x77, 0x78, 0x79, 0x7a, 0x79, + 0x79, 0x79, 0x7a, 0x7a, 0x78, 0x78, 0x7a, 0x78, 0x76, 0x78, 0x78, 0x77, + 0x77, 0x76, 0x71, 0x6b, 0x67, 0x60, 0x40, 0x1f, 0x2d, 0x41, 0x40, 0x2e, + 0x08, 0xec, 0xe3, 0xd1, 0xd3, 0x00, 0x37, 0x40, 0x28, 0xe9, 0xd7, 0xd1, + 0xc8, 0xd3, 0xe5, 0xda, 0xd0, 0xcf, 0xd6, 0xde, 0xda, 0xda, 0xe6, 0xf2, + 0x0c, 0x0f, 0x0f, 0x28, 0x13, 0xf8, 0xf8, 0xf3, 0x09, 0x1c, 0x33, 0x45, + 0x3b, 0x28, 0x2b, 0x10, 0xf8, 0xf2, 0xf2, 0xf2, 0xfb, 0xf5, 0xf0, 0xd4, + 0xc0, 0xce, 0xe9, 0x01, 0x0b, 0xf2, 0xcf, 0xc1, 0xc0, 0xbc, 0xbd, 0xcf, + 0xc8, 0xcf, 0xdb, 0xd4, 0xca, 0xba, 0xaf, 0xaa, 0xb6, 0xbc, 0xb0, 0xb6, + 0xc0, 0xc4, 0xce, 0xd9, 0xe2, 0xee, 0x00, 0x09, 0x04, 0x06, 0x0c, 0x0e, + 0x10, 0x15, 0x14, 0x14, 0x1a, 0x15, 0x1c, 0x1b, 0x1c, 0x1f, 0x20, 0x22, + 0x26, 0x27, 0x23, 0x20, 0x23, 0x24, 0x28, 0x27, 0x2d, 0x2e, 0x30, 0x36, + 0x39, 0x33, 0x36, 0x3f, 0x43, 0x40, 0x40, 0x43, 0x44, 0x41, 0x38, 0x41, + 0x40, 0x3a, 0x41, 0x43, 0x45, 0x4a, 0x4d, 0x4e, 0x51, 0x4c, 0x49, 0x49, + 0x51, 0x55, 0x52, 0x51, 0x57, 0x55, 0x53, 0x54, 0x55, 0x58, 0x5b, 0x5c, + 0x5e, 0x59, 0x5a, 0x59, 0x5d, 0x5c, 0x5f, 0x64, 0x67, 0x65, 0x64, 0x65, + 0x64, 0x62, 0x62, 0x66, 0x64, 0x65, 0x6a, 0x6b, 0x6b, 0x6d, 0x6c, 0x6c, + 0x6a, 0x6a, 0x6b, 0x6e, 0x70, 0x6e, 0x6d, 0x6e, 0x6c, 0x6d, 0x6d, 0x6f, + 0x6e, 0x6e, 0x70, 0x73, 0x73, 0x73, 0x70, 0x71, 0x75, 0x74, 0x75, 0x76, + 0x77, 0x76, 0x76, 0x76, 0x77, 0x79, 0x7a, 0x79, 0x79, 0x78, 0x78, 0x78, + 0x77, 0x78, 0x78, 0x77, 0x76, 0x79, 0x79, 0x78, 0x77, 0x78, 0x77, 0x77, + 0x74, 0x6a, 0x58, 0x45, 0x2e, 0x2a, 0x46, 0x48, 0x34, 0x10, 0xfc, 0xe1, + 0xd8, 0xf5, 0x36, 0x4f, 0x36, 0xf9, 0xce, 0xbe, 0xc9, 0xd3, 0xf4, 0xe0, + 0xce, 0xc7, 0xc6, 0xcf, 0xe2, 0xe7, 0xed, 0x03, 0x17, 0x23, 0x28, 0x2c, + 0x06, 0xfe, 0xfc, 0x0b, 0x26, 0x3e, 0x46, 0x4d, 0x4f, 0x43, 0x2e, 0x0f, + 0x12, 0x12, 0xfc, 0xec, 0xe3, 0xe1, 0xe0, 0xd1, 0xcf, 0xf5, 0x07, 0x16, + 0x01, 0xda, 0xcc, 0xc5, 0xc6, 0xbd, 0xb9, 0xcb, 0xce, 0xd2, 0xd8, 0xe0, + 0xc8, 0xb5, 0xb4, 0xaa, 0xb0, 0xbc, 0xb2, 0xb3, 0xbc, 0xc0, 0xc5, 0xd2, + 0xe6, 0xea, 0xed, 0xed, 0xee, 0x04, 0x06, 0x09, 0x0f, 0x10, 0x10, 0x15, + 0x12, 0x16, 0x1d, 0x1a, 0x1a, 0x1d, 0x1d, 0x22, 0x26, 0x25, 0x1f, 0x21, + 0x21, 0x1e, 0x21, 0x24, 0x28, 0x27, 0x2e, 0x35, 0x37, 0x2e, 0x2f, 0x3e, + 0x41, 0x3d, 0x3a, 0x42, 0x46, 0x40, 0x37, 0x40, 0x40, 0x3c, 0x41, 0x45, + 0x46, 0x4a, 0x4c, 0x49, 0x4f, 0x4d, 0x4b, 0x47, 0x4f, 0x55, 0x54, 0x4f, + 0x54, 0x58, 0x57, 0x53, 0x54, 0x57, 0x58, 0x59, 0x5a, 0x58, 0x5a, 0x58, + 0x5b, 0x5e, 0x61, 0x64, 0x67, 0x65, 0x64, 0x66, 0x63, 0x62, 0x61, 0x64, + 0x64, 0x63, 0x68, 0x6a, 0x6a, 0x6c, 0x6c, 0x6c, 0x6c, 0x6b, 0x6a, 0x6c, + 0x70, 0x6f, 0x6c, 0x70, 0x6d, 0x6e, 0x6c, 0x70, 0x6f, 0x6d, 0x70, 0x74, + 0x72, 0x71, 0x71, 0x72, 0x75, 0x75, 0x75, 0x76, 0x77, 0x76, 0x77, 0x76, + 0x76, 0x78, 0x79, 0x7a, 0x7a, 0x78, 0x77, 0x77, 0x77, 0x78, 0x78, 0x77, + 0x76, 0x78, 0x79, 0x78, 0x76, 0x77, 0x78, 0x78, 0x76, 0x70, 0x6a, 0x5d, + 0x3d, 0x28, 0x35, 0x41, 0x48, 0x3b, 0x23, 0xfc, 0xe2, 0xf5, 0x2e, 0x4f, + 0x4b, 0x18, 0xdd, 0xc0, 0xc5, 0xd8, 0xec, 0xde, 0xca, 0xbf, 0xbc, 0xd2, + 0xdb, 0xf6, 0x15, 0x29, 0x27, 0x38, 0x3a, 0x26, 0x04, 0x0e, 0x20, 0x34, + 0x4e, 0x54, 0x55, 0x54, 0x4c, 0x41, 0x35, 0x24, 0x12, 0x00, 0xe5, 0xe9, + 0xed, 0xfb, 0xeb, 0xf2, 0x10, 0x22, 0x22, 0x10, 0xf8, 0xd3, 0xd4, 0xcf, + 0xcd, 0xbf, 0xbf, 0xcf, 0xd8, 0xd8, 0xd0, 0xe6, 0xc8, 0xc4, 0xb1, 0xaf, + 0xb0, 0xc0, 0xb4, 0xb3, 0xb9, 0xbc, 0xc7, 0xd0, 0xda, 0xdc, 0xe0, 0xe0, + 0xeb, 0xfc, 0xfe, 0x07, 0x0e, 0x0e, 0x12, 0x16, 0x15, 0x1b, 0x1d, 0x18, + 0x19, 0x1b, 0x1c, 0x22, 0x24, 0x24, 0x1f, 0x22, 0x1e, 0x1c, 0x22, 0x25, + 0x27, 0x27, 0x2d, 0x34, 0x33, 0x28, 0x2a, 0x3a, 0x3d, 0x3a, 0x36, 0x3d, + 0x46, 0x42, 0x37, 0x3e, 0x40, 0x3d, 0x40, 0x44, 0x44, 0x47, 0x48, 0x44, + 0x49, 0x4d, 0x4c, 0x48, 0x4e, 0x54, 0x57, 0x50, 0x52, 0x57, 0x55, 0x52, + 0x55, 0x56, 0x55, 0x57, 0x58, 0x57, 0x5a, 0x56, 0x5b, 0x5c, 0x5f, 0x64, + 0x67, 0x64, 0x65, 0x64, 0x63, 0x62, 0x62, 0x62, 0x66, 0x64, 0x66, 0x69, + 0x6a, 0x6c, 0x6d, 0x6c, 0x6a, 0x6a, 0x6a, 0x6c, 0x6e, 0x70, 0x6a, 0x6e, + 0x6e, 0x6d, 0x6e, 0x70, 0x70, 0x6c, 0x6e, 0x74, 0x73, 0x70, 0x73, 0x73, + 0x74, 0x75, 0x74, 0x76, 0x77, 0x76, 0x77, 0x77, 0x76, 0x78, 0x78, 0x79, + 0x7a, 0x79, 0x78, 0x77, 0x78, 0x7a, 0x79, 0x77, 0x76, 0x76, 0x77, 0x77, + 0x75, 0x76, 0x78, 0x77, 0x77, 0x77, 0x74, 0x6d, 0x5f, 0x48, 0x37, 0x3a, + 0x40, 0x4c, 0x44, 0x20, 0xfc, 0x07, 0x2c, 0x4b, 0x57, 0x38, 0x08, 0xd3, + 0xc1, 0xda, 0xe0, 0xd3, 0xc9, 0xc3, 0xc2, 0xd1, 0xd4, 0x04, 0x31, 0x38, + 0x34, 0x45, 0x3d, 0x22, 0x14, 0x32, 0x49, 0x52, 0x59, 0x58, 0x57, 0x51, + 0x46, 0x43, 0x36, 0x29, 0x16, 0x00, 0xe6, 0xf1, 0xfe, 0x1f, 0x0b, 0x1d, + 0x37, 0x30, 0x14, 0x00, 0xf2, 0xe0, 0xea, 0xd1, 0xd0, 0xcd, 0xd1, 0xe0, + 0xec, 0xdf, 0xdb, 0xf2, 0xc2, 0xc8, 0xbb, 0xb6, 0xb1, 0xc4, 0xb7, 0xb6, + 0xb7, 0xbe, 0xd1, 0xcd, 0xcd, 0xde, 0xda, 0xda, 0xe6, 0xf2, 0xf9, 0x04, + 0x0b, 0x0f, 0x15, 0x18, 0x1c, 0x20, 0x1c, 0x1c, 0x1b, 0x1b, 0x1a, 0x1f, + 0x22, 0x1f, 0x20, 0x1f, 0x1c, 0x1c, 0x22, 0x24, 0x24, 0x25, 0x2b, 0x31, + 0x2e, 0x2a, 0x28, 0x38, 0x38, 0x35, 0x34, 0x38, 0x41, 0x43, 0x3a, 0x3a, + 0x42, 0x40, 0x40, 0x43, 0x46, 0x46, 0x47, 0x44, 0x44, 0x4c, 0x4b, 0x4b, + 0x4c, 0x51, 0x54, 0x52, 0x53, 0x53, 0x56, 0x52, 0x55, 0x56, 0x54, 0x56, + 0x54, 0x52, 0x59, 0x58, 0x5e, 0x5e, 0x60, 0x64, 0x65, 0x64, 0x64, 0x63, + 0x62, 0x60, 0x62, 0x60, 0x64, 0x64, 0x65, 0x68, 0x6a, 0x6b, 0x6d, 0x6b, + 0x6a, 0x6a, 0x6a, 0x6b, 0x6c, 0x6e, 0x6a, 0x6d, 0x6e, 0x6b, 0x70, 0x70, + 0x6f, 0x6e, 0x6e, 0x71, 0x73, 0x70, 0x73, 0x74, 0x73, 0x75, 0x75, 0x76, + 0x77, 0x77, 0x76, 0x78, 0x77, 0x77, 0x78, 0x79, 0x7a, 0x79, 0x79, 0x78, + 0x78, 0x7a, 0x79, 0x77, 0x76, 0x77, 0x79, 0x78, 0x77, 0x76, 0x77, 0x79, + 0x79, 0x78, 0x76, 0x76, 0x70, 0x66, 0x52, 0x47, 0x46, 0x46, 0x52, 0x46, + 0x1d, 0x0d, 0x35, 0x50, 0x5c, 0x43, 0x26, 0xea, 0xd3, 0xe0, 0xe8, 0xd6, + 0xcb, 0xce, 0xc5, 0xdc, 0xec, 0x20, 0x3d, 0x40, 0x46, 0x49, 0x35, 0x24, + 0x34, 0x50, 0x5d, 0x5e, 0x5e, 0x5a, 0x56, 0x50, 0x4b, 0x45, 0x39, 0x32, + 0x1c, 0x02, 0xef, 0xfe, 0x1a, 0x39, 0x43, 0x46, 0x36, 0x17, 0x04, 0x06, + 0x05, 0xea, 0xe9, 0xd8, 0xe0, 0xd9, 0xdd, 0xea, 0xeb, 0xf2, 0xec, 0xdf, + 0xcc, 0xbc, 0xc2, 0xbd, 0xb6, 0xb3, 0xb3, 0xc1, 0xbd, 0xbc, 0xc8, 0xc8, + 0xc4, 0xd6, 0xd7, 0xd2, 0xe0, 0xe8, 0xf4, 0x01, 0x0a, 0x0e, 0x14, 0x19, + 0x21, 0x22, 0x1d, 0x1c, 0x1a, 0x18, 0x1a, 0x1f, 0x1d, 0x1d, 0x1d, 0x1c, + 0x1c, 0x1a, 0x22, 0x23, 0x1f, 0x22, 0x28, 0x2b, 0x2c, 0x27, 0x22, 0x32, + 0x34, 0x30, 0x33, 0x34, 0x3a, 0x40, 0x3d, 0x3b, 0x3f, 0x3f, 0x40, 0x41, + 0x46, 0x46, 0x46, 0x45, 0x41, 0x46, 0x46, 0x4c, 0x4d, 0x4e, 0x52, 0x53, + 0x54, 0x52, 0x52, 0x52, 0x52, 0x55, 0x55, 0x55, 0x55, 0x52, 0x5a, 0x5b, + 0x5e, 0x60, 0x60, 0x65, 0x64, 0x62, 0x62, 0x63, 0x61, 0x61, 0x63, 0x62, + 0x64, 0x64, 0x63, 0x65, 0x6a, 0x6a, 0x6b, 0x6a, 0x6a, 0x68, 0x68, 0x6b, + 0x6c, 0x6d, 0x6c, 0x6c, 0x6e, 0x6c, 0x70, 0x70, 0x6f, 0x70, 0x6d, 0x6f, + 0x71, 0x70, 0x72, 0x73, 0x73, 0x74, 0x76, 0x76, 0x77, 0x77, 0x76, 0x78, + 0x78, 0x77, 0x79, 0x7a, 0x7a, 0x7a, 0x79, 0x78, 0x79, 0x7a, 0x79, 0x77, + 0x77, 0x78, 0x79, 0x7a, 0x79, 0x78, 0x77, 0x79, 0x7a, 0x79, 0x78, 0x78, + 0x76, 0x72, 0x67, 0x58, 0x52, 0x45, 0x48, 0x51, 0x49, 0x27, 0x3a, 0x56, + 0x5c, 0x50, 0x37, 0x07, 0xe9, 0xef, 0xef, 0xd5, 0xcb, 0xda, 0xd4, 0xeb, + 0x16, 0x3d, 0x46, 0x47, 0x51, 0x49, 0x37, 0x42, 0x57, 0x62, 0x63, 0x62, + 0x61, 0x5b, 0x51, 0x46, 0x3f, 0x40, 0x3e, 0x36, 0x23, 0x17, 0x20, 0x39, + 0x4d, 0x56, 0x4c, 0x34, 0x10, 0xf8, 0xfe, 0xfe, 0xfb, 0xef, 0xec, 0xe6, + 0xf6, 0xf2, 0xf4, 0xfb, 0x06, 0xff, 0xee, 0xe5, 0xbc, 0xb0, 0xc3, 0xc5, + 0xb7, 0xaf, 0xb0, 0xba, 0xb6, 0xb6, 0xb9, 0xc4, 0xbf, 0xc3, 0xc9, 0xcd, + 0xda, 0xe5, 0xf1, 0xff, 0x07, 0x0e, 0x14, 0x1c, 0x21, 0x24, 0x22, 0x1e, + 0x1c, 0x16, 0x1c, 0x20, 0x1c, 0x1a, 0x18, 0x1a, 0x1a, 0x19, 0x1e, 0x1f, + 0x1a, 0x1b, 0x1e, 0x22, 0x26, 0x22, 0x20, 0x28, 0x2e, 0x2e, 0x30, 0x34, + 0x33, 0x3c, 0x3e, 0x3e, 0x40, 0x3e, 0x3f, 0x43, 0x45, 0x41, 0x44, 0x44, + 0x42, 0x42, 0x46, 0x4c, 0x4d, 0x4e, 0x4d, 0x50, 0x54, 0x51, 0x52, 0x51, + 0x4f, 0x54, 0x54, 0x55, 0x56, 0x53, 0x59, 0x5d, 0x5e, 0x61, 0x62, 0x64, + 0x62, 0x63, 0x62, 0x63, 0x63, 0x63, 0x65, 0x64, 0x65, 0x64, 0x64, 0x65, + 0x69, 0x6b, 0x6b, 0x69, 0x6a, 0x69, 0x68, 0x6b, 0x6d, 0x6d, 0x6e, 0x6d, + 0x6d, 0x6c, 0x70, 0x6e, 0x70, 0x70, 0x6e, 0x6f, 0x71, 0x72, 0x71, 0x71, + 0x73, 0x75, 0x76, 0x75, 0x76, 0x77, 0x76, 0x78, 0x78, 0x77, 0x78, 0x7a, + 0x7a, 0x79, 0x78, 0x78, 0x78, 0x79, 0x77, 0x76, 0x78, 0x79, 0x79, 0x7a, + 0x7a, 0x79, 0x78, 0x79, 0x78, 0x78, 0x78, 0x77, 0x76, 0x76, 0x74, 0x6a, + 0x64, 0x5b, 0x46, 0x46, 0x4f, 0x4e, 0x4b, 0x59, 0x5e, 0x5a, 0x4d, 0x29, + 0xec, 0xdb, 0xe5, 0xd6, 0xce, 0xd5, 0xde, 0x03, 0x34, 0x49, 0x48, 0x52, + 0x58, 0x53, 0x52, 0x5b, 0x64, 0x66, 0x65, 0x63, 0x5c, 0x54, 0x4c, 0x48, + 0x48, 0x47, 0x41, 0x3f, 0x47, 0x4d, 0x53, 0x5c, 0x5b, 0x4c, 0x37, 0x21, + 0x0c, 0x0a, 0x09, 0xfc, 0xfa, 0xfe, 0x03, 0x0d, 0x18, 0x12, 0x16, 0x19, + 0x0e, 0xfb, 0xf2, 0xf3, 0xb1, 0xaa, 0xc8, 0xc8, 0xbd, 0xb6, 0xab, 0xaf, + 0xb2, 0xb3, 0xb5, 0xbc, 0xbd, 0xb7, 0xc0, 0xc7, 0xcf, 0xdc, 0xeb, 0xf3, + 0x01, 0x10, 0x17, 0x1f, 0x23, 0x27, 0x25, 0x23, 0x1c, 0x1a, 0x1e, 0x1d, + 0x1c, 0x1a, 0x16, 0x18, 0x18, 0x16, 0x1c, 0x1c, 0x1a, 0x1a, 0x18, 0x1b, + 0x1d, 0x20, 0x1d, 0x22, 0x2a, 0x29, 0x2b, 0x32, 0x31, 0x35, 0x3a, 0x3b, + 0x40, 0x3a, 0x3f, 0x46, 0x43, 0x44, 0x41, 0x45, 0x43, 0x42, 0x48, 0x4b, + 0x49, 0x4d, 0x4b, 0x4e, 0x52, 0x4d, 0x52, 0x4f, 0x4f, 0x53, 0x53, 0x57, + 0x56, 0x52, 0x58, 0x5c, 0x5d, 0x60, 0x62, 0x64, 0x60, 0x61, 0x62, 0x63, + 0x64, 0x65, 0x67, 0x65, 0x65, 0x65, 0x64, 0x65, 0x69, 0x6a, 0x6b, 0x6a, + 0x6a, 0x6b, 0x6a, 0x6c, 0x6d, 0x6d, 0x70, 0x6e, 0x6c, 0x6b, 0x70, 0x6e, + 0x6f, 0x70, 0x6f, 0x6e, 0x72, 0x71, 0x71, 0x70, 0x74, 0x76, 0x76, 0x76, + 0x76, 0x78, 0x77, 0x78, 0x79, 0x77, 0x77, 0x7a, 0x79, 0x79, 0x79, 0x78, + 0x78, 0x78, 0x78, 0x77, 0x79, 0x7a, 0x79, 0x7a, 0x7a, 0x7a, 0x79, 0x79, + 0x77, 0x77, 0x78, 0x77, 0x76, 0x75, 0x76, 0x75, 0x6f, 0x69, 0x58, 0x4d, + 0x42, 0x4c, 0x59, 0x5e, 0x5e, 0x62, 0x5a, 0x43, 0x02, 0xe1, 0xe6, 0xd4, + 0xcc, 0xcc, 0xec, 0x1f, 0x49, 0x51, 0x52, 0x5b, 0x5e, 0x5e, 0x63, 0x65, + 0x65, 0x67, 0x65, 0x63, 0x5c, 0x5a, 0x59, 0x58, 0x56, 0x57, 0x58, 0x5e, + 0x5f, 0x5c, 0x5b, 0x5d, 0x53, 0x46, 0x3a, 0x28, 0x21, 0x20, 0x1d, 0x21, + 0x27, 0x2b, 0x34, 0x34, 0x33, 0x36, 0x37, 0x28, 0x1b, 0x19, 0x0d, 0x09, + 0xaa, 0xa8, 0xae, 0xb1, 0xc0, 0xba, 0xad, 0xa9, 0xaf, 0xb2, 0xb6, 0xba, + 0xc0, 0xbb, 0xc6, 0xc9, 0xca, 0xd6, 0xde, 0xe7, 0xfe, 0x10, 0x18, 0x1e, + 0x21, 0x26, 0x27, 0x28, 0x1f, 0x1c, 0x1c, 0x1c, 0x1b, 0x16, 0x16, 0x18, + 0x17, 0x16, 0x1a, 0x1a, 0x17, 0x16, 0x14, 0x15, 0x16, 0x1c, 0x17, 0x16, + 0x21, 0x28, 0x2b, 0x32, 0x2f, 0x2f, 0x36, 0x3b, 0x3d, 0x3a, 0x3e, 0x46, + 0x45, 0x45, 0x40, 0x42, 0x43, 0x40, 0x48, 0x4a, 0x49, 0x4c, 0x4a, 0x50, + 0x50, 0x4d, 0x52, 0x4c, 0x51, 0x54, 0x53, 0x56, 0x58, 0x53, 0x58, 0x5c, + 0x5c, 0x5e, 0x62, 0x62, 0x60, 0x5f, 0x61, 0x64, 0x64, 0x64, 0x66, 0x64, + 0x64, 0x65, 0x64, 0x65, 0x6a, 0x6a, 0x6a, 0x6a, 0x6a, 0x6e, 0x6e, 0x6f, + 0x6e, 0x6f, 0x71, 0x6e, 0x6b, 0x6c, 0x6c, 0x6b, 0x6d, 0x70, 0x70, 0x6e, + 0x73, 0x70, 0x70, 0x71, 0x72, 0x76, 0x77, 0x76, 0x76, 0x78, 0x77, 0x78, + 0x79, 0x79, 0x78, 0x79, 0x79, 0x79, 0x78, 0x79, 0x78, 0x79, 0x7a, 0x7a, + 0x79, 0x7a, 0x7a, 0x78, 0x78, 0x79, 0x78, 0x77, 0x78, 0x78, 0x78, 0x79, + 0x78, 0x77, 0x77, 0x78, 0x76, 0x70, 0x67, 0x5a, 0x4d, 0x46, 0x58, 0x63, + 0x61, 0x64, 0x5e, 0x51, 0x1f, 0xf2, 0xe1, 0xd1, 0xce, 0xda, 0x07, 0x33, + 0x59, 0x5b, 0x5e, 0x60, 0x62, 0x64, 0x68, 0x69, 0x69, 0x67, 0x64, 0x64, + 0x64, 0x64, 0x62, 0x60, 0x60, 0x62, 0x63, 0x63, 0x60, 0x5e, 0x5f, 0x59, + 0x52, 0x45, 0x39, 0x31, 0x34, 0x3a, 0x3e, 0x46, 0x46, 0x46, 0x46, 0x46, + 0x46, 0x3f, 0x31, 0x28, 0x22, 0x10, 0x07, 0xfa, 0xaa, 0xad, 0xae, 0xae, + 0xb6, 0xb0, 0xaf, 0xac, 0xad, 0xaf, 0xb2, 0xbf, 0xc2, 0xbe, 0xcd, 0xc4, + 0xc2, 0xc8, 0xd3, 0xe1, 0xfc, 0x0d, 0x14, 0x1c, 0x22, 0x27, 0x28, 0x28, + 0x25, 0x22, 0x1d, 0x1c, 0x1a, 0x16, 0x17, 0x19, 0x17, 0x17, 0x19, 0x18, + 0x17, 0x13, 0x10, 0x11, 0x10, 0x15, 0x14, 0x0c, 0x15, 0x22, 0x23, 0x2c, + 0x2d, 0x2c, 0x34, 0x3a, 0x3d, 0x40, 0x40, 0x43, 0x43, 0x44, 0x40, 0x40, + 0x42, 0x3f, 0x46, 0x47, 0x4c, 0x4c, 0x4c, 0x53, 0x50, 0x4e, 0x52, 0x4c, + 0x51, 0x52, 0x55, 0x57, 0x59, 0x54, 0x58, 0x5b, 0x5c, 0x5e, 0x63, 0x5f, + 0x5e, 0x60, 0x62, 0x64, 0x64, 0x63, 0x66, 0x65, 0x65, 0x66, 0x65, 0x66, + 0x6a, 0x6a, 0x69, 0x6c, 0x6d, 0x6c, 0x70, 0x6f, 0x6e, 0x6f, 0x70, 0x6d, + 0x6c, 0x6d, 0x6d, 0x6c, 0x6d, 0x70, 0x72, 0x70, 0x72, 0x70, 0x70, 0x73, + 0x71, 0x75, 0x77, 0x77, 0x75, 0x77, 0x77, 0x78, 0x78, 0x7a, 0x78, 0x78, + 0x79, 0x79, 0x78, 0x79, 0x79, 0x79, 0x7a, 0x7a, 0x79, 0x7a, 0x79, 0x79, + 0x79, 0x78, 0x78, 0x78, 0x78, 0x77, 0x78, 0x78, 0x77, 0x76, 0x76, 0x77, + 0x77, 0x76, 0x74, 0x6e, 0x64, 0x57, 0x58, 0x61, 0x67, 0x68, 0x67, 0x59, + 0x37, 0x08, 0xdd, 0xe2, 0xe6, 0xe2, 0x1c, 0x46, 0x60, 0x63, 0x64, 0x64, + 0x65, 0x68, 0x6a, 0x6a, 0x6a, 0x66, 0x65, 0x69, 0x69, 0x66, 0x67, 0x66, + 0x67, 0x65, 0x62, 0x62, 0x60, 0x5f, 0x5a, 0x53, 0x49, 0x38, 0x34, 0x41, + 0x4b, 0x52, 0x56, 0x5a, 0x58, 0x58, 0x57, 0x53, 0x4d, 0x3e, 0x28, 0x10, + 0xff, 0xf0, 0xe0, 0xd3, 0xac, 0xac, 0xaf, 0xb0, 0xad, 0xa8, 0xab, 0xb0, + 0xaa, 0xb0, 0xaf, 0xb3, 0xb7, 0xba, 0xbc, 0xbc, 0xc1, 0xc9, 0xd1, 0xe1, + 0xfc, 0x07, 0x12, 0x1c, 0x22, 0x27, 0x2a, 0x2b, 0x27, 0x22, 0x1c, 0x19, + 0x16, 0x12, 0x15, 0x15, 0x14, 0x16, 0x16, 0x11, 0x16, 0x10, 0x0d, 0x10, + 0x0e, 0x0c, 0x0d, 0x0a, 0x0c, 0x1c, 0x21, 0x28, 0x29, 0x29, 0x30, 0x38, + 0x3d, 0x40, 0x40, 0x3e, 0x43, 0x42, 0x40, 0x40, 0x40, 0x42, 0x47, 0x48, + 0x4b, 0x4a, 0x4e, 0x52, 0x50, 0x51, 0x52, 0x50, 0x51, 0x54, 0x56, 0x55, + 0x57, 0x56, 0x58, 0x5c, 0x5c, 0x5f, 0x60, 0x5e, 0x5e, 0x60, 0x61, 0x64, + 0x64, 0x62, 0x65, 0x66, 0x65, 0x66, 0x67, 0x66, 0x6a, 0x6a, 0x6a, 0x6b, + 0x6f, 0x6e, 0x6e, 0x6f, 0x6f, 0x70, 0x70, 0x6e, 0x6e, 0x6f, 0x6e, 0x6f, + 0x70, 0x72, 0x73, 0x73, 0x73, 0x70, 0x70, 0x74, 0x71, 0x73, 0x76, 0x76, + 0x75, 0x76, 0x77, 0x77, 0x78, 0x79, 0x78, 0x78, 0x78, 0x78, 0x77, 0x78, + 0x79, 0x79, 0x7a, 0x7a, 0x7a, 0x79, 0x78, 0x7a, 0x7a, 0x79, 0x78, 0x78, + 0x77, 0x77, 0x77, 0x78, 0x77, 0x76, 0x75, 0x76, 0x77, 0x78, 0x76, 0x73, + 0x6c, 0x64, 0x53, 0x5b, 0x67, 0x6a, 0x6a, 0x64, 0x4a, 0x23, 0xf4, 0xfe, + 0xf0, 0xe6, 0x29, 0x53, 0x63, 0x66, 0x68, 0x66, 0x66, 0x68, 0x68, 0x6a, + 0x6a, 0x6a, 0x6a, 0x6a, 0x68, 0x68, 0x68, 0x68, 0x69, 0x67, 0x66, 0x66, + 0x64, 0x63, 0x5b, 0x54, 0x51, 0x52, 0x58, 0x5e, 0x62, 0x5e, 0x5e, 0x5e, + 0x5c, 0x57, 0x51, 0x48, 0x3e, 0x2d, 0x0f, 0xeb, 0xe6, 0xda, 0xc8, 0xbb, + 0xa8, 0xab, 0xaa, 0xad, 0xa9, 0xa6, 0xaa, 0xb0, 0xb0, 0xb4, 0xad, 0xb0, + 0xb3, 0xb8, 0xbb, 0xba, 0xbc, 0xce, 0xdd, 0xe1, 0xf1, 0x05, 0x17, 0x1b, + 0x23, 0x28, 0x2a, 0x2b, 0x28, 0x23, 0x1c, 0x17, 0x13, 0x0f, 0x0f, 0x10, + 0x0c, 0x0f, 0x0e, 0x09, 0x0f, 0x0b, 0x0a, 0x08, 0x07, 0x04, 0x00, 0x03, + 0x06, 0x14, 0x1c, 0x21, 0x28, 0x29, 0x2e, 0x37, 0x3c, 0x3f, 0x40, 0x3d, + 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x46, 0x49, 0x4a, 0x49, 0x4e, 0x52, + 0x50, 0x52, 0x55, 0x52, 0x51, 0x54, 0x54, 0x56, 0x58, 0x58, 0x58, 0x5e, + 0x5e, 0x5e, 0x5d, 0x5d, 0x5e, 0x61, 0x62, 0x64, 0x66, 0x62, 0x64, 0x67, + 0x68, 0x65, 0x69, 0x67, 0x68, 0x69, 0x6b, 0x6d, 0x70, 0x70, 0x6e, 0x6e, + 0x70, 0x71, 0x71, 0x6f, 0x6f, 0x70, 0x70, 0x71, 0x72, 0x74, 0x74, 0x75, + 0x71, 0x70, 0x70, 0x75, 0x73, 0x71, 0x75, 0x76, 0x73, 0x73, 0x76, 0x76, + 0x77, 0x78, 0x78, 0x79, 0x7a, 0x79, 0x78, 0x76, 0x79, 0x79, 0x79, 0x79, + 0x79, 0x7a, 0x79, 0x7a, 0x7a, 0x79, 0x77, 0x76, 0x77, 0x77, 0x76, 0x76, + 0x76, 0x76, 0x75, 0x74, 0x75, 0x76, 0x76, 0x75, 0x70, 0x6c, 0x65, 0x5a, + 0x63, 0x6b, 0x6d, 0x6a, 0x58, 0x41, 0x19, 0x05, 0xef, 0x03, 0x3a, 0x56, + 0x65, 0x69, 0x6a, 0x69, 0x68, 0x69, 0x6a, 0x6b, 0x6a, 0x6a, 0x6a, 0x6a, + 0x6a, 0x6a, 0x6a, 0x6b, 0x6a, 0x6a, 0x69, 0x68, 0x66, 0x65, 0x64, 0x64, + 0x64, 0x67, 0x67, 0x64, 0x61, 0x60, 0x5e, 0x5a, 0x54, 0x4c, 0x43, 0x37, + 0x23, 0x0f, 0x08, 0xf9, 0xf6, 0xea, 0xd7, 0xd0, 0xa8, 0xae, 0xb2, 0xaa, + 0xa4, 0xa4, 0xb6, 0xbf, 0xaf, 0xb6, 0xb2, 0xb1, 0xb5, 0xb0, 0xb5, 0xb6, + 0xb7, 0xc7, 0xd6, 0xda, 0xe6, 0x05, 0x19, 0x1c, 0x23, 0x28, 0x29, 0x2a, + 0x27, 0x22, 0x1d, 0x19, 0x14, 0x0c, 0x0a, 0x09, 0x04, 0x04, 0x03, 0xff, + 0x01, 0x00, 0xfe, 0xfe, 0x00, 0xfa, 0xf6, 0xfe, 0xff, 0x0a, 0x16, 0x18, + 0x21, 0x24, 0x2a, 0x31, 0x3b, 0x3c, 0x40, 0x3d, 0x3e, 0x40, 0x41, 0x3f, + 0x40, 0x40, 0x46, 0x49, 0x49, 0x4a, 0x4e, 0x4f, 0x50, 0x53, 0x53, 0x52, + 0x52, 0x55, 0x55, 0x55, 0x55, 0x58, 0x5b, 0x5e, 0x5e, 0x5e, 0x5b, 0x5a, + 0x5d, 0x61, 0x64, 0x67, 0x67, 0x62, 0x62, 0x69, 0x6a, 0x66, 0x69, 0x68, + 0x6a, 0x6b, 0x6b, 0x6e, 0x70, 0x70, 0x71, 0x6e, 0x70, 0x70, 0x73, 0x71, + 0x71, 0x72, 0x71, 0x71, 0x73, 0x74, 0x74, 0x75, 0x71, 0x72, 0x73, 0x74, + 0x74, 0x73, 0x73, 0x76, 0x72, 0x71, 0x74, 0x76, 0x76, 0x77, 0x78, 0x79, + 0x7a, 0x79, 0x78, 0x76, 0x78, 0x79, 0x79, 0x79, 0x79, 0x7a, 0x7a, 0x7a, + 0x7a, 0x7a, 0x78, 0x76, 0x76, 0x76, 0x74, 0x72, 0x72, 0x73, 0x74, 0x74, + 0x74, 0x76, 0x76, 0x76, 0x75, 0x70, 0x6e, 0x64, 0x63, 0x6a, 0x6e, 0x6d, + 0x67, 0x52, 0x31, 0x0b, 0x02, 0x13, 0x47, 0x61, 0x69, 0x6d, 0x6c, 0x6a, + 0x6a, 0x6b, 0x6d, 0x6d, 0x6c, 0x6b, 0x6a, 0x6b, 0x6c, 0x6d, 0x6c, 0x6c, + 0x6c, 0x6b, 0x6a, 0x69, 0x68, 0x69, 0x6a, 0x69, 0x68, 0x6a, 0x6a, 0x68, + 0x67, 0x64, 0x5f, 0x59, 0x53, 0x4e, 0x49, 0x3a, 0x22, 0x17, 0x18, 0x0c, + 0x0f, 0x09, 0xfc, 0xf7, 0xa6, 0xaa, 0xb0, 0xa9, 0xa3, 0xa4, 0xb0, 0xb9, + 0xb6, 0xb3, 0xae, 0xb5, 0xb4, 0xb0, 0xb6, 0xb3, 0xb0, 0xbb, 0xc6, 0xd1, + 0xe0, 0x06, 0x18, 0x20, 0x28, 0x28, 0x27, 0x29, 0x27, 0x26, 0x21, 0x1c, + 0x16, 0x10, 0x0f, 0x06, 0x00, 0xf9, 0xf6, 0xf6, 0xf8, 0xf9, 0xf7, 0xf5, + 0xf8, 0xf3, 0xf2, 0xf7, 0xf7, 0x02, 0x0d, 0x10, 0x19, 0x1e, 0x27, 0x2e, + 0x34, 0x3a, 0x3f, 0x3c, 0x3c, 0x3f, 0x3f, 0x3e, 0x3e, 0x40, 0x43, 0x4a, + 0x46, 0x49, 0x4e, 0x50, 0x4e, 0x52, 0x52, 0x51, 0x54, 0x54, 0x54, 0x54, + 0x54, 0x59, 0x5a, 0x5a, 0x5d, 0x60, 0x5e, 0x5c, 0x5e, 0x61, 0x65, 0x68, + 0x67, 0x65, 0x63, 0x69, 0x6a, 0x68, 0x69, 0x6a, 0x6b, 0x70, 0x6e, 0x6f, + 0x71, 0x70, 0x6e, 0x6e, 0x6f, 0x70, 0x71, 0x70, 0x72, 0x72, 0x71, 0x73, + 0x72, 0x74, 0x73, 0x73, 0x73, 0x73, 0x73, 0x74, 0x74, 0x74, 0x72, 0x74, + 0x70, 0x6f, 0x73, 0x76, 0x76, 0x76, 0x76, 0x77, 0x78, 0x79, 0x78, 0x78, + 0x78, 0x78, 0x79, 0x79, 0x7a, 0x79, 0x78, 0x7a, 0x7a, 0x79, 0x78, 0x77, + 0x76, 0x75, 0x74, 0x70, 0x70, 0x71, 0x72, 0x72, 0x73, 0x74, 0x75, 0x75, + 0x76, 0x74, 0x71, 0x6d, 0x6a, 0x6a, 0x6c, 0x6c, 0x6d, 0x64, 0x4d, 0x38, + 0x19, 0x20, 0x52, 0x69, 0x6c, 0x6d, 0x6e, 0x6d, 0x6d, 0x6e, 0x6e, 0x6e, + 0x6c, 0x6d, 0x6d, 0x6d, 0x6f, 0x6f, 0x6e, 0x6d, 0x6c, 0x6b, 0x6d, 0x6e, + 0x6e, 0x6e, 0x6e, 0x6c, 0x6b, 0x6a, 0x6a, 0x65, 0x64, 0x63, 0x61, 0x5e, + 0x59, 0x52, 0x4c, 0x46, 0x45, 0x3e, 0x36, 0x24, 0x1f, 0x25, 0x1c, 0x10, + 0xac, 0xb9, 0xb8, 0xad, 0xa4, 0xa3, 0xa8, 0xb4, 0xb2, 0xab, 0xa6, 0xaa, + 0xb4, 0xb6, 0xb5, 0xbf, 0xb0, 0xb6, 0xbd, 0xcd, 0xde, 0x04, 0x17, 0x20, + 0x27, 0x28, 0x28, 0x28, 0x28, 0x25, 0x22, 0x1e, 0x1a, 0x11, 0x0f, 0x04, + 0xf8, 0xef, 0xf0, 0xf1, 0xf2, 0xf4, 0xf5, 0xf2, 0xf3, 0xef, 0xf1, 0xf0, + 0xf3, 0xff, 0x08, 0x06, 0x15, 0x18, 0x1f, 0x2c, 0x31, 0x37, 0x3a, 0x39, + 0x38, 0x3b, 0x3d, 0x3c, 0x3d, 0x40, 0x44, 0x49, 0x45, 0x48, 0x4c, 0x4e, + 0x4b, 0x51, 0x51, 0x52, 0x55, 0x53, 0x53, 0x53, 0x53, 0x5b, 0x5a, 0x5c, + 0x5d, 0x63, 0x5e, 0x5e, 0x60, 0x63, 0x67, 0x69, 0x69, 0x66, 0x64, 0x69, + 0x6b, 0x6b, 0x68, 0x6c, 0x6e, 0x70, 0x73, 0x70, 0x70, 0x6e, 0x6b, 0x6e, + 0x6d, 0x6e, 0x70, 0x70, 0x71, 0x71, 0x6f, 0x73, 0x74, 0x74, 0x70, 0x72, + 0x74, 0x74, 0x73, 0x74, 0x75, 0x75, 0x73, 0x74, 0x70, 0x6f, 0x71, 0x75, + 0x76, 0x76, 0x76, 0x77, 0x77, 0x76, 0x76, 0x76, 0x77, 0x78, 0x78, 0x79, + 0x7a, 0x79, 0x78, 0x79, 0x79, 0x79, 0x77, 0x76, 0x76, 0x76, 0x73, 0x72, + 0x73, 0x72, 0x71, 0x70, 0x72, 0x72, 0x74, 0x74, 0x73, 0x72, 0x72, 0x70, + 0x6f, 0x6f, 0x6d, 0x6d, 0x6d, 0x6b, 0x5f, 0x45, 0x30, 0x37, 0x54, 0x66, + 0x6a, 0x6e, 0x6f, 0x70, 0x70, 0x6f, 0x6d, 0x6c, 0x6d, 0x6e, 0x6f, 0x6f, + 0x6e, 0x6f, 0x6f, 0x6d, 0x6e, 0x6f, 0x70, 0x70, 0x6f, 0x70, 0x6e, 0x6c, + 0x69, 0x69, 0x6a, 0x6a, 0x69, 0x64, 0x60, 0x5f, 0x5e, 0x5d, 0x5a, 0x58, + 0x56, 0x52, 0x49, 0x3f, 0x3d, 0x3d, 0x3d, 0x36, 0xaf, 0xba, 0xb6, 0xb0, + 0xa4, 0xa4, 0xa6, 0xab, 0xac, 0xa7, 0xa4, 0xa5, 0xb0, 0xb8, 0xc6, 0xd2, + 0xaf, 0xb5, 0xbc, 0xca, 0xdc, 0xfd, 0x15, 0x1d, 0x23, 0x28, 0x2c, 0x2c, + 0x29, 0x24, 0x22, 0x20, 0x1d, 0x16, 0x0b, 0xfe, 0xf4, 0xf5, 0xf6, 0xf2, + 0xf4, 0xf5, 0xf4, 0xf2, 0xef, 0xed, 0xef, 0xec, 0xf0, 0xfb, 0x00, 0xfd, + 0x0a, 0x16, 0x19, 0x29, 0x2e, 0x34, 0x36, 0x35, 0x36, 0x3c, 0x3b, 0x3a, + 0x3b, 0x40, 0x45, 0x47, 0x44, 0x46, 0x4c, 0x4d, 0x4b, 0x4e, 0x4f, 0x51, + 0x54, 0x52, 0x50, 0x52, 0x56, 0x59, 0x5a, 0x5b, 0x5e, 0x62, 0x5e, 0x5f, + 0x62, 0x66, 0x68, 0x6a, 0x69, 0x66, 0x66, 0x6a, 0x6c, 0x6f, 0x6a, 0x6d, + 0x70, 0x6f, 0x72, 0x71, 0x6f, 0x6c, 0x6a, 0x6f, 0x6d, 0x6e, 0x6f, 0x70, + 0x71, 0x71, 0x6f, 0x73, 0x74, 0x71, 0x70, 0x73, 0x75, 0x73, 0x71, 0x74, + 0x74, 0x73, 0x73, 0x74, 0x71, 0x70, 0x70, 0x72, 0x75, 0x76, 0x76, 0x77, + 0x78, 0x77, 0x76, 0x75, 0x76, 0x79, 0x78, 0x78, 0x7a, 0x7a, 0x79, 0x78, + 0x78, 0x79, 0x77, 0x76, 0x76, 0x76, 0x74, 0x73, 0x73, 0x72, 0x72, 0x70, + 0x72, 0x73, 0x74, 0x73, 0x73, 0x74, 0x73, 0x72, 0x72, 0x71, 0x70, 0x6e, + 0x6e, 0x6b, 0x6b, 0x55, 0x48, 0x44, 0x52, 0x66, 0x6d, 0x6e, 0x6e, 0x70, + 0x70, 0x6e, 0x6b, 0x6d, 0x70, 0x71, 0x71, 0x70, 0x6f, 0x6f, 0x6f, 0x70, + 0x70, 0x71, 0x70, 0x70, 0x6e, 0x6c, 0x6c, 0x6c, 0x6b, 0x6b, 0x6b, 0x6a, + 0x68, 0x67, 0x64, 0x64, 0x64, 0x64, 0x62, 0x5f, 0x5e, 0x5a, 0x59, 0x55, + 0x51, 0x4d, 0x47, 0x42, 0xaa, 0xab, 0xb0, 0xaa, 0xa4, 0xa3, 0xa6, 0xaa, + 0xb5, 0xaa, 0xa4, 0xa4, 0xaa, 0xb5, 0xc3, 0xc2, 0xad, 0xb3, 0xc0, 0xce, + 0xd5, 0xf4, 0x10, 0x1c, 0x22, 0x27, 0x2e, 0x2e, 0x29, 0x24, 0x23, 0x21, + 0x1c, 0x11, 0x08, 0xfa, 0xf4, 0xfb, 0xfa, 0xfb, 0xfd, 0xf9, 0xf4, 0xf4, + 0xf2, 0xf2, 0xf2, 0xee, 0xef, 0xf4, 0xfd, 0xf7, 0xfa, 0x0b, 0x15, 0x25, + 0x2b, 0x2f, 0x34, 0x35, 0x36, 0x3a, 0x3a, 0x37, 0x39, 0x3e, 0x41, 0x45, + 0x43, 0x46, 0x4c, 0x4a, 0x4b, 0x4c, 0x4d, 0x4f, 0x52, 0x52, 0x4e, 0x51, + 0x54, 0x56, 0x5b, 0x5b, 0x5f, 0x60, 0x60, 0x61, 0x64, 0x68, 0x69, 0x6a, + 0x67, 0x69, 0x69, 0x6b, 0x6a, 0x6d, 0x6d, 0x6f, 0x71, 0x70, 0x70, 0x6f, + 0x70, 0x6c, 0x6d, 0x6f, 0x6e, 0x70, 0x70, 0x6f, 0x71, 0x72, 0x70, 0x71, + 0x70, 0x6f, 0x70, 0x75, 0x74, 0x70, 0x70, 0x74, 0x72, 0x70, 0x72, 0x73, + 0x70, 0x71, 0x72, 0x72, 0x71, 0x73, 0x75, 0x76, 0x78, 0x79, 0x78, 0x76, + 0x76, 0x78, 0x77, 0x79, 0x7a, 0x7a, 0x7a, 0x78, 0x78, 0x78, 0x77, 0x76, + 0x77, 0x77, 0x76, 0x72, 0x70, 0x70, 0x70, 0x70, 0x71, 0x73, 0x73, 0x74, + 0x76, 0x76, 0x76, 0x74, 0x74, 0x74, 0x73, 0x70, 0x70, 0x69, 0x69, 0x6a, + 0x5f, 0x55, 0x54, 0x68, 0x6f, 0x6f, 0x6f, 0x6f, 0x6f, 0x6d, 0x6d, 0x6f, + 0x70, 0x71, 0x71, 0x70, 0x70, 0x70, 0x70, 0x70, 0x71, 0x71, 0x70, 0x70, + 0x6d, 0x6d, 0x6f, 0x6e, 0x6c, 0x6d, 0x6b, 0x6b, 0x6a, 0x6a, 0x6a, 0x6a, + 0x69, 0x66, 0x64, 0x63, 0x61, 0x5e, 0x5b, 0x5a, 0x54, 0x4c, 0x46, 0x40, + 0xa8, 0xa8, 0xa7, 0xaa, 0xa8, 0xa3, 0xa6, 0xa7, 0xb3, 0xa8, 0xa4, 0xa7, + 0xb0, 0xb4, 0xbf, 0xbe, 0xac, 0xb6, 0xc0, 0xc5, 0xcd, 0xeb, 0x0f, 0x1c, + 0x20, 0x26, 0x29, 0x2a, 0x28, 0x26, 0x25, 0x1f, 0x16, 0x10, 0x08, 0xfa, + 0xf8, 0xff, 0x01, 0x04, 0x03, 0x00, 0xf8, 0xf8, 0xf5, 0xf5, 0xf1, 0xf2, + 0xf2, 0xf3, 0xf6, 0xf5, 0xf3, 0x01, 0x11, 0x1d, 0x28, 0x2b, 0x31, 0x32, + 0x33, 0x35, 0x34, 0x33, 0x36, 0x3e, 0x40, 0x41, 0x3f, 0x45, 0x49, 0x46, + 0x4a, 0x4c, 0x4c, 0x4e, 0x52, 0x51, 0x4d, 0x51, 0x53, 0x58, 0x5b, 0x5b, + 0x5f, 0x60, 0x60, 0x61, 0x64, 0x66, 0x66, 0x66, 0x68, 0x6a, 0x69, 0x6a, + 0x6b, 0x6b, 0x6c, 0x6e, 0x70, 0x6f, 0x6f, 0x70, 0x6e, 0x6d, 0x6e, 0x6f, + 0x70, 0x70, 0x6e, 0x6d, 0x70, 0x71, 0x70, 0x6e, 0x6f, 0x6f, 0x71, 0x75, + 0x70, 0x6e, 0x70, 0x72, 0x70, 0x6e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x73, + 0x70, 0x72, 0x74, 0x75, 0x76, 0x76, 0x78, 0x77, 0x76, 0x76, 0x77, 0x79, + 0x7a, 0x7a, 0x7a, 0x78, 0x78, 0x78, 0x76, 0x76, 0x77, 0x78, 0x78, 0x77, + 0x74, 0x72, 0x72, 0x71, 0x71, 0x72, 0x74, 0x76, 0x76, 0x76, 0x76, 0x75, + 0x74, 0x73, 0x73, 0x74, 0x72, 0x6d, 0x68, 0x6a, 0x6a, 0x64, 0x5f, 0x6a, + 0x6f, 0x70, 0x70, 0x70, 0x6f, 0x6f, 0x6e, 0x70, 0x70, 0x71, 0x71, 0x71, + 0x71, 0x72, 0x71, 0x70, 0x70, 0x70, 0x71, 0x71, 0x6f, 0x70, 0x70, 0x70, + 0x6f, 0x6f, 0x6e, 0x6f, 0x6d, 0x6d, 0x6d, 0x6e, 0x6b, 0x6a, 0x67, 0x65, + 0x63, 0x5e, 0x58, 0x55, 0x4d, 0x46, 0x41, 0x38, 0xaa, 0xb6, 0xbf, 0xae, + 0xaa, 0xa6, 0xa5, 0xa5, 0xa9, 0xa5, 0xa4, 0xa9, 0xb4, 0xb3, 0xc0, 0xc1, + 0xb0, 0xb6, 0xb5, 0xb4, 0xc6, 0xe5, 0x0c, 0x1c, 0x22, 0x24, 0x26, 0x28, + 0x29, 0x29, 0x26, 0x1d, 0x16, 0x11, 0x0a, 0x01, 0xff, 0x03, 0x07, 0x09, + 0x07, 0x04, 0x01, 0xfe, 0xfb, 0xf9, 0xf4, 0xf5, 0xf6, 0xf4, 0xf2, 0xf1, + 0xed, 0xfb, 0x0c, 0x1a, 0x22, 0x28, 0x2d, 0x2e, 0x30, 0x30, 0x30, 0x32, + 0x35, 0x3c, 0x3f, 0x3e, 0x3e, 0x43, 0x47, 0x45, 0x49, 0x4b, 0x4d, 0x4f, + 0x51, 0x4b, 0x4d, 0x52, 0x54, 0x56, 0x58, 0x5c, 0x5e, 0x5f, 0x5e, 0x63, + 0x64, 0x62, 0x61, 0x61, 0x63, 0x62, 0x64, 0x65, 0x67, 0x69, 0x6a, 0x6a, + 0x6a, 0x6a, 0x6a, 0x6d, 0x6e, 0x70, 0x6f, 0x6e, 0x70, 0x6d, 0x6c, 0x6e, + 0x6e, 0x6b, 0x6c, 0x70, 0x70, 0x6f, 0x73, 0x71, 0x6d, 0x6d, 0x70, 0x70, + 0x6f, 0x6c, 0x6f, 0x72, 0x71, 0x70, 0x72, 0x74, 0x72, 0x74, 0x74, 0x74, + 0x74, 0x76, 0x76, 0x76, 0x76, 0x76, 0x77, 0x79, 0x7a, 0x79, 0x78, 0x78, + 0x79, 0x78, 0x76, 0x76, 0x76, 0x78, 0x78, 0x78, 0x77, 0x76, 0x74, 0x72, + 0x73, 0x74, 0x75, 0x76, 0x76, 0x76, 0x76, 0x74, 0x74, 0x73, 0x72, 0x74, + 0x74, 0x71, 0x6b, 0x69, 0x6e, 0x6c, 0x6a, 0x6c, 0x70, 0x6e, 0x6f, 0x70, + 0x70, 0x70, 0x70, 0x70, 0x71, 0x72, 0x71, 0x71, 0x71, 0x72, 0x71, 0x71, + 0x70, 0x71, 0x72, 0x72, 0x71, 0x70, 0x71, 0x70, 0x70, 0x71, 0x6f, 0x6e, + 0x6d, 0x6d, 0x6e, 0x6e, 0x6d, 0x6a, 0x69, 0x65, 0x64, 0x60, 0x59, 0x56, + 0x51, 0x48, 0x42, 0x3c, 0xb5, 0xae, 0xba, 0xbe, 0xb4, 0xb0, 0xb0, 0xa7, + 0xa5, 0xa4, 0xa3, 0xac, 0xb9, 0xb4, 0xbc, 0xbe, 0xb1, 0xb9, 0xb2, 0xb0, + 0xbe, 0xe0, 0x07, 0x17, 0x22, 0x22, 0x23, 0x24, 0x27, 0x27, 0x22, 0x1c, + 0x16, 0x15, 0x10, 0x08, 0x02, 0x05, 0x0a, 0x09, 0x0b, 0x09, 0x04, 0x03, + 0x01, 0xfd, 0xf9, 0xfa, 0xfe, 0xf9, 0xf2, 0xeb, 0xeb, 0xf6, 0x08, 0x16, + 0x21, 0x28, 0x2b, 0x2d, 0x2f, 0x2e, 0x2e, 0x31, 0x34, 0x39, 0x3d, 0x3a, + 0x39, 0x41, 0x43, 0x42, 0x47, 0x47, 0x4d, 0x50, 0x4d, 0x48, 0x4f, 0x51, + 0x52, 0x54, 0x58, 0x5b, 0x5c, 0x5c, 0x5d, 0x5d, 0x5b, 0x59, 0x58, 0x59, + 0x5b, 0x5e, 0x5f, 0x61, 0x5f, 0x5e, 0x61, 0x60, 0x5c, 0x5b, 0x62, 0x64, + 0x64, 0x68, 0x67, 0x66, 0x6b, 0x6d, 0x6f, 0x6e, 0x6b, 0x6c, 0x6e, 0x71, + 0x70, 0x70, 0x73, 0x6f, 0x6e, 0x6e, 0x70, 0x70, 0x6f, 0x6c, 0x6d, 0x6e, + 0x70, 0x70, 0x73, 0x76, 0x75, 0x75, 0x76, 0x76, 0x73, 0x74, 0x75, 0x76, + 0x75, 0x75, 0x76, 0x77, 0x78, 0x78, 0x78, 0x78, 0x79, 0x78, 0x76, 0x75, + 0x76, 0x78, 0x78, 0x78, 0x78, 0x78, 0x76, 0x75, 0x75, 0x76, 0x76, 0x76, + 0x75, 0x75, 0x75, 0x75, 0x73, 0x73, 0x73, 0x72, 0x74, 0x74, 0x71, 0x6b, + 0x6c, 0x70, 0x6f, 0x6f, 0x70, 0x6e, 0x6f, 0x6f, 0x70, 0x71, 0x70, 0x71, + 0x72, 0x72, 0x71, 0x72, 0x72, 0x71, 0x70, 0x71, 0x71, 0x71, 0x71, 0x72, + 0x71, 0x71, 0x71, 0x70, 0x70, 0x71, 0x70, 0x6e, 0x6d, 0x6d, 0x6c, 0x6d, + 0x6c, 0x6a, 0x69, 0x67, 0x67, 0x64, 0x60, 0x5c, 0x58, 0x55, 0x52, 0x4c, + 0xb9, 0xb1, 0xae, 0xb0, 0xb8, 0xb0, 0xae, 0xa5, 0xa5, 0xa4, 0xa4, 0xae, + 0xb6, 0xb1, 0xb0, 0xb7, 0xb2, 0xb2, 0xae, 0xaa, 0xb6, 0xd7, 0x08, 0x17, + 0x22, 0x21, 0x22, 0x22, 0x26, 0x27, 0x24, 0x1c, 0x18, 0x18, 0x15, 0x0d, + 0x04, 0x04, 0x0d, 0x10, 0x0f, 0x0c, 0x09, 0x07, 0x03, 0xfd, 0xfc, 0x00, + 0xfe, 0xfa, 0xef, 0xec, 0xec, 0xf8, 0x06, 0x16, 0x1c, 0x23, 0x28, 0x2b, + 0x2a, 0x2d, 0x2f, 0x32, 0x34, 0x39, 0x3c, 0x3b, 0x3a, 0x40, 0x41, 0x44, + 0x46, 0x48, 0x4f, 0x4d, 0x4b, 0x4c, 0x4e, 0x50, 0x52, 0x55, 0x57, 0x5a, + 0x58, 0x52, 0x51, 0x50, 0x52, 0x52, 0x50, 0x50, 0x53, 0x58, 0x59, 0x5b, + 0x5b, 0x5a, 0x5d, 0x5a, 0x58, 0x5a, 0x5e, 0x5d, 0x5b, 0x5e, 0x61, 0x64, + 0x67, 0x6c, 0x6b, 0x69, 0x69, 0x6c, 0x6e, 0x70, 0x70, 0x72, 0x70, 0x70, + 0x70, 0x6e, 0x6d, 0x70, 0x70, 0x6d, 0x6d, 0x6e, 0x6e, 0x6c, 0x70, 0x73, + 0x75, 0x74, 0x75, 0x75, 0x74, 0x73, 0x74, 0x75, 0x75, 0x73, 0x76, 0x77, + 0x76, 0x77, 0x79, 0x78, 0x78, 0x78, 0x75, 0x75, 0x76, 0x77, 0x78, 0x77, + 0x78, 0x79, 0x76, 0x74, 0x72, 0x75, 0x75, 0x74, 0x73, 0x73, 0x72, 0x74, + 0x75, 0x73, 0x72, 0x72, 0x74, 0x73, 0x73, 0x71, 0x6e, 0x6f, 0x70, 0x70, + 0x6f, 0x6e, 0x6e, 0x6e, 0x70, 0x70, 0x71, 0x72, 0x70, 0x70, 0x71, 0x72, + 0x71, 0x71, 0x72, 0x72, 0x72, 0x71, 0x71, 0x71, 0x70, 0x70, 0x6f, 0x6f, + 0x70, 0x71, 0x70, 0x70, 0x6d, 0x6b, 0x6b, 0x6d, 0x6c, 0x6b, 0x6a, 0x68, + 0x65, 0x62, 0x5e, 0x57, 0x52, 0x52, 0x4c, 0x44, 0xb0, 0xae, 0xae, 0xb2, + 0xbc, 0xb2, 0xaa, 0xa9, 0xa5, 0xa4, 0xa3, 0xa4, 0xaf, 0xc0, 0xbb, 0xbc, + 0xb5, 0xaf, 0xa9, 0xa6, 0xaf, 0xd0, 0x0a, 0x16, 0x1d, 0x1d, 0x21, 0x22, + 0x28, 0x2a, 0x26, 0x1c, 0x1b, 0x18, 0x16, 0x12, 0x09, 0x06, 0x0e, 0x11, + 0x13, 0x11, 0x0b, 0x0a, 0x05, 0x04, 0x03, 0x03, 0xfe, 0xfa, 0xf2, 0xf0, + 0xf1, 0xfc, 0x09, 0x16, 0x1b, 0x22, 0x24, 0x28, 0x28, 0x2d, 0x2f, 0x30, + 0x30, 0x37, 0x3a, 0x3a, 0x3b, 0x3d, 0x3f, 0x44, 0x46, 0x49, 0x4a, 0x4b, + 0x4c, 0x4d, 0x4c, 0x4c, 0x52, 0x55, 0x54, 0x52, 0x4b, 0x45, 0x44, 0x46, + 0x4c, 0x4d, 0x4d, 0x51, 0x54, 0x55, 0x59, 0x5d, 0x5f, 0x5e, 0x5c, 0x59, + 0x5b, 0x5e, 0x5c, 0x59, 0x5e, 0x60, 0x61, 0x64, 0x65, 0x68, 0x69, 0x68, + 0x68, 0x68, 0x6b, 0x6d, 0x6f, 0x71, 0x6f, 0x70, 0x70, 0x6e, 0x6e, 0x6f, + 0x70, 0x6e, 0x6e, 0x6f, 0x70, 0x6e, 0x6d, 0x70, 0x72, 0x72, 0x70, 0x71, + 0x73, 0x74, 0x75, 0x75, 0x76, 0x74, 0x75, 0x76, 0x76, 0x77, 0x79, 0x79, + 0x79, 0x77, 0x74, 0x75, 0x75, 0x76, 0x78, 0x77, 0x77, 0x77, 0x76, 0x73, + 0x72, 0x76, 0x74, 0x73, 0x71, 0x71, 0x70, 0x71, 0x74, 0x74, 0x73, 0x72, + 0x72, 0x72, 0x73, 0x72, 0x70, 0x70, 0x70, 0x70, 0x6f, 0x6f, 0x6f, 0x70, + 0x6f, 0x70, 0x70, 0x6f, 0x6e, 0x6e, 0x6f, 0x70, 0x71, 0x72, 0x72, 0x71, + 0x70, 0x70, 0x70, 0x70, 0x70, 0x70, 0x6f, 0x70, 0x70, 0x71, 0x71, 0x6f, + 0x6f, 0x6e, 0x6e, 0x6c, 0x6d, 0x6d, 0x6c, 0x6b, 0x68, 0x64, 0x61, 0x5c, + 0x58, 0x54, 0x4c, 0x46, 0xb4, 0xae, 0xaf, 0xb8, 0xc2, 0xc0, 0xb8, 0xaf, + 0xa6, 0xa4, 0xa4, 0xa4, 0xad, 0xce, 0xe6, 0xe8, 0xce, 0xbc, 0xb9, 0xad, + 0xac, 0xd3, 0x0f, 0x17, 0x19, 0x1c, 0x1f, 0x26, 0x28, 0x28, 0x23, 0x1f, + 0x1f, 0x1b, 0x1c, 0x16, 0x0f, 0x0a, 0x10, 0x12, 0x14, 0x12, 0x0b, 0x0a, + 0x0b, 0x0a, 0x0a, 0x06, 0x01, 0xfe, 0xf7, 0xf3, 0xf4, 0xfd, 0x07, 0x15, + 0x1d, 0x20, 0x21, 0x26, 0x28, 0x2a, 0x2d, 0x2b, 0x2f, 0x37, 0x39, 0x3a, + 0x3b, 0x3e, 0x40, 0x46, 0x43, 0x44, 0x44, 0x48, 0x48, 0x4b, 0x4d, 0x4c, + 0x4c, 0x4c, 0x48, 0x42, 0x40, 0x40, 0x42, 0x48, 0x4b, 0x51, 0x52, 0x52, + 0x51, 0x54, 0x5d, 0x5e, 0x61, 0x5c, 0x58, 0x54, 0x58, 0x59, 0x5a, 0x5f, + 0x62, 0x62, 0x62, 0x64, 0x65, 0x66, 0x67, 0x68, 0x67, 0x63, 0x66, 0x6a, + 0x6a, 0x6f, 0x6f, 0x70, 0x70, 0x71, 0x70, 0x6e, 0x70, 0x70, 0x70, 0x6f, + 0x6f, 0x6f, 0x6e, 0x6f, 0x70, 0x71, 0x70, 0x6f, 0x70, 0x73, 0x75, 0x76, + 0x75, 0x74, 0x76, 0x76, 0x76, 0x77, 0x79, 0x7a, 0x7a, 0x78, 0x74, 0x74, + 0x75, 0x75, 0x77, 0x78, 0x77, 0x77, 0x75, 0x73, 0x74, 0x75, 0x75, 0x72, + 0x70, 0x70, 0x6f, 0x6d, 0x6f, 0x70, 0x72, 0x73, 0x71, 0x71, 0x72, 0x72, + 0x70, 0x70, 0x70, 0x6f, 0x6d, 0x6e, 0x6e, 0x6d, 0x6d, 0x6e, 0x6e, 0x6e, + 0x6d, 0x6d, 0x6e, 0x6f, 0x70, 0x71, 0x70, 0x70, 0x71, 0x71, 0x71, 0x70, + 0x70, 0x70, 0x70, 0x70, 0x71, 0x73, 0x71, 0x6f, 0x70, 0x70, 0x6f, 0x6c, + 0x6c, 0x6c, 0x6a, 0x69, 0x69, 0x65, 0x63, 0x60, 0x5d, 0x58, 0x52, 0x48, + 0xb7, 0xb0, 0xb1, 0xb2, 0xb0, 0xb3, 0xb8, 0xb2, 0xb1, 0xb0, 0xa7, 0xa8, + 0xa8, 0xa9, 0xb7, 0xc1, 0xbc, 0xbb, 0xce, 0xc3, 0xaa, 0xd2, 0x09, 0x16, + 0x17, 0x19, 0x1f, 0x26, 0x29, 0x27, 0x23, 0x22, 0x22, 0x1c, 0x1c, 0x19, + 0x13, 0x0f, 0x14, 0x16, 0x16, 0x15, 0x14, 0x10, 0x10, 0x10, 0x0c, 0x08, + 0x04, 0x04, 0xfc, 0xf6, 0xf3, 0xfe, 0x08, 0x14, 0x1e, 0x20, 0x1d, 0x23, + 0x27, 0x29, 0x28, 0x28, 0x33, 0x36, 0x37, 0x3a, 0x3b, 0x3f, 0x44, 0x43, + 0x40, 0x41, 0x42, 0x41, 0x46, 0x4b, 0x49, 0x46, 0x42, 0x44, 0x3e, 0x3d, + 0x40, 0x43, 0x47, 0x4c, 0x4f, 0x4e, 0x4a, 0x47, 0x47, 0x50, 0x5a, 0x5b, + 0x5c, 0x57, 0x54, 0x52, 0x58, 0x58, 0x5c, 0x5e, 0x5c, 0x5d, 0x61, 0x5d, + 0x5d, 0x5e, 0x5f, 0x63, 0x63, 0x5d, 0x5e, 0x61, 0x67, 0x6a, 0x6a, 0x6b, + 0x6d, 0x70, 0x6f, 0x70, 0x70, 0x71, 0x72, 0x73, 0x70, 0x70, 0x6f, 0x6e, + 0x6f, 0x70, 0x71, 0x71, 0x71, 0x74, 0x74, 0x73, 0x74, 0x74, 0x75, 0x76, + 0x76, 0x77, 0x77, 0x79, 0x7a, 0x78, 0x75, 0x75, 0x76, 0x75, 0x76, 0x78, + 0x77, 0x77, 0x76, 0x76, 0x76, 0x76, 0x75, 0x71, 0x6f, 0x70, 0x6d, 0x6b, + 0x6c, 0x6e, 0x6f, 0x70, 0x71, 0x71, 0x71, 0x71, 0x70, 0x6e, 0x6f, 0x6e, + 0x6d, 0x6f, 0x70, 0x6e, 0x6e, 0x6e, 0x6f, 0x6e, 0x6e, 0x6d, 0x6d, 0x6e, + 0x70, 0x70, 0x71, 0x70, 0x70, 0x70, 0x70, 0x71, 0x70, 0x70, 0x70, 0x71, + 0x71, 0x71, 0x70, 0x71, 0x72, 0x72, 0x70, 0x6e, 0x6e, 0x6d, 0x6c, 0x6a, + 0x6b, 0x6a, 0x67, 0x64, 0x5f, 0x58, 0x57, 0x53, 0xb9, 0xb1, 0xb0, 0xb3, + 0xac, 0xa7, 0xa9, 0xa6, 0xa9, 0xb6, 0xb6, 0xad, 0xaa, 0xac, 0xac, 0xaa, + 0xae, 0xb2, 0xb2, 0xb7, 0xae, 0xca, 0xff, 0x14, 0x16, 0x1a, 0x1c, 0x20, + 0x24, 0x25, 0x24, 0x24, 0x22, 0x1c, 0x1e, 0x1b, 0x16, 0x13, 0x19, 0x19, + 0x1c, 0x1a, 0x17, 0x16, 0x15, 0x15, 0x0f, 0x09, 0x04, 0x07, 0xff, 0xf7, + 0xf7, 0x03, 0x0a, 0x13, 0x1a, 0x1c, 0x1a, 0x22, 0x24, 0x29, 0x25, 0x28, + 0x35, 0x37, 0x37, 0x3a, 0x3c, 0x3c, 0x3b, 0x3a, 0x3a, 0x40, 0x40, 0x45, + 0x4a, 0x4a, 0x42, 0x3c, 0x3a, 0x3a, 0x3b, 0x40, 0x45, 0x47, 0x4d, 0x4a, + 0x47, 0x45, 0x42, 0x42, 0x48, 0x52, 0x58, 0x56, 0x57, 0x55, 0x53, 0x57, + 0x59, 0x5a, 0x5b, 0x5a, 0x5e, 0x5d, 0x5e, 0x59, 0x57, 0x59, 0x5c, 0x5a, + 0x57, 0x53, 0x5a, 0x61, 0x65, 0x6a, 0x6a, 0x69, 0x6a, 0x6d, 0x6d, 0x70, + 0x6f, 0x70, 0x71, 0x73, 0x71, 0x6f, 0x6e, 0x6e, 0x70, 0x6f, 0x70, 0x71, + 0x71, 0x73, 0x73, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x76, 0x77, 0x78, + 0x7a, 0x7a, 0x77, 0x76, 0x76, 0x77, 0x76, 0x76, 0x77, 0x78, 0x76, 0x76, + 0x74, 0x73, 0x72, 0x6f, 0x70, 0x6f, 0x6b, 0x6b, 0x6e, 0x6f, 0x6f, 0x70, + 0x70, 0x71, 0x70, 0x71, 0x70, 0x6d, 0x6d, 0x6d, 0x6d, 0x70, 0x70, 0x6f, + 0x6e, 0x6d, 0x6d, 0x6e, 0x6f, 0x6e, 0x6e, 0x70, 0x70, 0x71, 0x70, 0x71, + 0x70, 0x70, 0x71, 0x71, 0x70, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x72, + 0x71, 0x70, 0x70, 0x6e, 0x6d, 0x6c, 0x6b, 0x6a, 0x6a, 0x69, 0x68, 0x66, + 0x64, 0x62, 0x5f, 0x5a, 0xb0, 0xb2, 0xb0, 0xb0, 0xaa, 0xa4, 0xa5, 0xa4, + 0xa3, 0xad, 0xba, 0xb4, 0xb3, 0xb6, 0xb0, 0xb0, 0xb7, 0xbb, 0xb0, 0xaa, + 0xaa, 0xc3, 0xf9, 0x15, 0x1b, 0x1d, 0x1e, 0x1e, 0x20, 0x22, 0x23, 0x23, + 0x20, 0x1b, 0x1c, 0x1a, 0x18, 0x16, 0x1b, 0x1c, 0x1e, 0x1d, 0x1c, 0x1b, + 0x1a, 0x17, 0x11, 0x0c, 0x05, 0x0a, 0x04, 0xf9, 0xf8, 0x05, 0x0e, 0x16, + 0x16, 0x16, 0x1a, 0x1f, 0x22, 0x26, 0x27, 0x2b, 0x31, 0x34, 0x39, 0x39, + 0x34, 0x36, 0x36, 0x38, 0x3c, 0x3a, 0x40, 0x43, 0x40, 0x3a, 0x36, 0x34, + 0x34, 0x37, 0x3e, 0x41, 0x42, 0x48, 0x48, 0x44, 0x42, 0x45, 0x46, 0x46, + 0x4e, 0x55, 0x52, 0x52, 0x53, 0x57, 0x59, 0x5d, 0x59, 0x59, 0x5a, 0x5b, + 0x5f, 0x60, 0x5f, 0x5d, 0x5e, 0x5d, 0x5b, 0x5a, 0x58, 0x51, 0x53, 0x5e, + 0x63, 0x65, 0x68, 0x67, 0x69, 0x6c, 0x6d, 0x6e, 0x6b, 0x6b, 0x6a, 0x6d, + 0x6c, 0x6c, 0x6b, 0x6c, 0x6e, 0x70, 0x71, 0x71, 0x72, 0x72, 0x72, 0x72, + 0x73, 0x75, 0x75, 0x75, 0x76, 0x76, 0x77, 0x79, 0x7a, 0x7a, 0x79, 0x78, + 0x77, 0x78, 0x77, 0x77, 0x77, 0x79, 0x77, 0x74, 0x71, 0x71, 0x70, 0x70, + 0x70, 0x6e, 0x6b, 0x6b, 0x6e, 0x70, 0x70, 0x70, 0x70, 0x70, 0x70, 0x71, + 0x71, 0x70, 0x6d, 0x6d, 0x6e, 0x6f, 0x70, 0x6f, 0x6d, 0x6d, 0x6d, 0x6d, + 0x6f, 0x70, 0x6f, 0x70, 0x70, 0x70, 0x70, 0x70, 0x70, 0x70, 0x70, 0x70, + 0x71, 0x72, 0x72, 0x72, 0x71, 0x72, 0x71, 0x71, 0x70, 0x6f, 0x6f, 0x6d, + 0x6b, 0x69, 0x66, 0x64, 0x62, 0x5e, 0x5d, 0x5d, 0x5b, 0x5d, 0x5a, 0x58, + 0xb3, 0xb2, 0xb1, 0xab, 0xaa, 0xa6, 0xa4, 0xa4, 0xa4, 0xa7, 0xb8, 0xba, + 0xba, 0xbe, 0xbd, 0xb8, 0xbc, 0xc2, 0xbc, 0xb3, 0xad, 0xbd, 0xfd, 0x19, + 0x1e, 0x20, 0x22, 0x24, 0x22, 0x22, 0x22, 0x24, 0x1f, 0x19, 0x18, 0x1c, + 0x1c, 0x18, 0x1c, 0x1d, 0x20, 0x20, 0x1f, 0x1d, 0x1c, 0x1c, 0x18, 0x10, + 0x07, 0x0a, 0x06, 0xfb, 0xfb, 0x05, 0x10, 0x14, 0x14, 0x16, 0x1b, 0x1d, + 0x1c, 0x22, 0x27, 0x2a, 0x2e, 0x36, 0x38, 0x2e, 0x2a, 0x30, 0x33, 0x35, + 0x38, 0x37, 0x35, 0x36, 0x34, 0x2f, 0x2d, 0x31, 0x38, 0x3c, 0x3f, 0x3c, + 0x3a, 0x3f, 0x41, 0x40, 0x42, 0x47, 0x49, 0x4e, 0x52, 0x53, 0x50, 0x51, + 0x53, 0x57, 0x5a, 0x5c, 0x58, 0x56, 0x5b, 0x5e, 0x5d, 0x5e, 0x62, 0x5f, + 0x5f, 0x61, 0x61, 0x5f, 0x59, 0x58, 0x54, 0x59, 0x60, 0x62, 0x65, 0x66, + 0x66, 0x66, 0x68, 0x6a, 0x69, 0x67, 0x63, 0x64, 0x65, 0x68, 0x6a, 0x6c, + 0x6e, 0x6e, 0x70, 0x6f, 0x6f, 0x6f, 0x70, 0x71, 0x70, 0x73, 0x75, 0x75, + 0x75, 0x74, 0x76, 0x77, 0x79, 0x79, 0x79, 0x78, 0x78, 0x78, 0x77, 0x76, + 0x76, 0x78, 0x76, 0x74, 0x72, 0x71, 0x73, 0x72, 0x6e, 0x6c, 0x6b, 0x6c, + 0x6f, 0x71, 0x72, 0x71, 0x70, 0x6f, 0x6f, 0x71, 0x71, 0x70, 0x6c, 0x6c, + 0x6d, 0x6c, 0x6c, 0x6c, 0x6a, 0x6a, 0x6a, 0x6a, 0x6b, 0x6c, 0x6c, 0x6e, + 0x6e, 0x6e, 0x70, 0x71, 0x71, 0x70, 0x70, 0x70, 0x70, 0x71, 0x72, 0x70, + 0x71, 0x71, 0x70, 0x71, 0x70, 0x6f, 0x70, 0x6f, 0x6e, 0x6d, 0x6a, 0x69, + 0x67, 0x65, 0x63, 0x60, 0x5b, 0x59, 0x56, 0x52, 0xbb, 0xb4, 0xb3, 0xb0, + 0xb4, 0xb8, 0xaa, 0xa6, 0xa6, 0xaa, 0xb6, 0xbc, 0xba, 0xb9, 0xbe, 0xb6, + 0xb4, 0xb3, 0xaa, 0xa8, 0xa9, 0xc3, 0x06, 0x1b, 0x1e, 0x21, 0x20, 0x23, + 0x24, 0x22, 0x1f, 0x21, 0x1c, 0x19, 0x16, 0x18, 0x1b, 0x18, 0x1c, 0x20, + 0x22, 0x24, 0x22, 0x22, 0x1c, 0x1d, 0x18, 0x10, 0x07, 0x07, 0x04, 0xfe, + 0xfe, 0x06, 0x0e, 0x13, 0x10, 0x14, 0x1c, 0x1c, 0x1a, 0x22, 0x23, 0x28, + 0x30, 0x30, 0x28, 0x23, 0x28, 0x2d, 0x30, 0x30, 0x32, 0x2e, 0x30, 0x2f, + 0x29, 0x2a, 0x32, 0x39, 0x3b, 0x3b, 0x39, 0x33, 0x38, 0x3e, 0x3e, 0x41, + 0x45, 0x48, 0x4c, 0x52, 0x54, 0x52, 0x52, 0x4f, 0x51, 0x58, 0x5b, 0x5e, + 0x5b, 0x5a, 0x5d, 0x5e, 0x5e, 0x5f, 0x61, 0x63, 0x61, 0x63, 0x63, 0x64, + 0x60, 0x5e, 0x5d, 0x5a, 0x60, 0x62, 0x64, 0x66, 0x67, 0x67, 0x67, 0x6a, + 0x6b, 0x6a, 0x69, 0x65, 0x66, 0x69, 0x68, 0x69, 0x6c, 0x6f, 0x6d, 0x6c, + 0x6b, 0x6b, 0x6c, 0x6f, 0x6f, 0x70, 0x73, 0x74, 0x74, 0x73, 0x74, 0x76, + 0x77, 0x79, 0x79, 0x78, 0x78, 0x78, 0x79, 0x77, 0x76, 0x78, 0x77, 0x75, + 0x74, 0x74, 0x72, 0x70, 0x6e, 0x6d, 0x6a, 0x6c, 0x70, 0x72, 0x71, 0x70, + 0x6f, 0x70, 0x71, 0x70, 0x70, 0x6e, 0x6c, 0x6b, 0x6c, 0x6a, 0x6a, 0x68, + 0x69, 0x69, 0x6a, 0x6a, 0x6a, 0x6c, 0x6d, 0x6d, 0x6e, 0x6f, 0x6f, 0x70, + 0x72, 0x72, 0x71, 0x71, 0x71, 0x70, 0x70, 0x6f, 0x70, 0x70, 0x70, 0x70, + 0x70, 0x70, 0x70, 0x70, 0x70, 0x70, 0x6f, 0x6c, 0x6b, 0x6a, 0x6a, 0x67, + 0x64, 0x62, 0x5f, 0x5e, 0xb4, 0xb1, 0xb3, 0xb6, 0xbc, 0xc7, 0xb6, 0xaa, + 0xa9, 0xab, 0xb0, 0xbb, 0xba, 0xb0, 0xaf, 0xba, 0xbb, 0xc0, 0xb1, 0xa5, + 0xad, 0xc8, 0x01, 0x1a, 0x1d, 0x22, 0x22, 0x23, 0x23, 0x22, 0x1e, 0x1c, + 0x1a, 0x1c, 0x18, 0x19, 0x16, 0x16, 0x1b, 0x21, 0x24, 0x28, 0x25, 0x23, + 0x1f, 0x1d, 0x1c, 0x12, 0x09, 0x07, 0x04, 0x00, 0x01, 0x09, 0x10, 0x11, + 0x0e, 0x12, 0x16, 0x17, 0x1b, 0x21, 0x24, 0x28, 0x2a, 0x23, 0x1e, 0x26, + 0x28, 0x26, 0x2a, 0x2d, 0x2b, 0x26, 0x29, 0x29, 0x2b, 0x2f, 0x34, 0x38, + 0x37, 0x34, 0x30, 0x35, 0x3a, 0x40, 0x41, 0x45, 0x46, 0x4a, 0x51, 0x54, + 0x53, 0x51, 0x4d, 0x4d, 0x54, 0x59, 0x5c, 0x5d, 0x5e, 0x5f, 0x5e, 0x60, + 0x62, 0x62, 0x64, 0x65, 0x62, 0x60, 0x62, 0x63, 0x66, 0x62, 0x5f, 0x5c, + 0x5e, 0x63, 0x64, 0x67, 0x6a, 0x6a, 0x6a, 0x6a, 0x6d, 0x6e, 0x6d, 0x6a, + 0x66, 0x68, 0x6a, 0x68, 0x68, 0x6c, 0x6e, 0x6b, 0x68, 0x66, 0x6a, 0x6a, + 0x6c, 0x6e, 0x70, 0x71, 0x73, 0x73, 0x74, 0x75, 0x76, 0x76, 0x77, 0x77, + 0x77, 0x77, 0x78, 0x79, 0x78, 0x79, 0x79, 0x79, 0x77, 0x76, 0x72, 0x6f, + 0x6e, 0x6a, 0x6a, 0x6e, 0x72, 0x72, 0x70, 0x70, 0x70, 0x70, 0x70, 0x6f, + 0x6f, 0x6e, 0x6d, 0x6d, 0x6c, 0x6b, 0x6a, 0x6a, 0x6b, 0x6a, 0x6a, 0x6b, + 0x6c, 0x6d, 0x6e, 0x6f, 0x70, 0x71, 0x71, 0x71, 0x72, 0x72, 0x70, 0x70, + 0x70, 0x71, 0x72, 0x71, 0x70, 0x70, 0x70, 0x70, 0x70, 0x70, 0x70, 0x70, + 0x6e, 0x6f, 0x6e, 0x6d, 0x6d, 0x6c, 0x6a, 0x68, 0x69, 0x68, 0x65, 0x63, + 0xac, 0xac, 0xb3, 0xb0, 0xb5, 0xb9, 0xb6, 0xb3, 0xad, 0xb0, 0xb0, 0xb8, + 0xb8, 0xb1, 0xa9, 0xaa, 0xb3, 0xb3, 0xab, 0xa8, 0xaf, 0xc7, 0xff, 0x1c, + 0x21, 0x26, 0x25, 0x23, 0x21, 0x22, 0x1d, 0x19, 0x17, 0x1b, 0x18, 0x18, + 0x15, 0x17, 0x1d, 0x21, 0x22, 0x25, 0x27, 0x24, 0x22, 0x1d, 0x1b, 0x15, + 0x06, 0x06, 0x03, 0x00, 0xff, 0x06, 0x0c, 0x0b, 0x0e, 0x14, 0x16, 0x18, + 0x1c, 0x20, 0x22, 0x21, 0x21, 0x1c, 0x1c, 0x22, 0x23, 0x27, 0x28, 0x27, + 0x21, 0x27, 0x2b, 0x2e, 0x30, 0x2e, 0x32, 0x36, 0x31, 0x2d, 0x2f, 0x3a, + 0x3c, 0x41, 0x46, 0x49, 0x4a, 0x4e, 0x52, 0x51, 0x50, 0x4c, 0x4c, 0x52, + 0x58, 0x5a, 0x5b, 0x5b, 0x5e, 0x61, 0x5f, 0x5f, 0x63, 0x64, 0x64, 0x66, + 0x66, 0x62, 0x5f, 0x5f, 0x64, 0x63, 0x5e, 0x5b, 0x5e, 0x62, 0x63, 0x65, + 0x6a, 0x6b, 0x6a, 0x6a, 0x6c, 0x6d, 0x6f, 0x6f, 0x6a, 0x67, 0x6b, 0x6b, + 0x68, 0x66, 0x69, 0x6c, 0x6a, 0x66, 0x66, 0x69, 0x6b, 0x6c, 0x70, 0x6f, + 0x70, 0x71, 0x71, 0x71, 0x73, 0x74, 0x75, 0x75, 0x73, 0x74, 0x76, 0x77, + 0x78, 0x79, 0x79, 0x79, 0x79, 0x76, 0x73, 0x6f, 0x6c, 0x6a, 0x6c, 0x70, + 0x73, 0x72, 0x70, 0x70, 0x70, 0x71, 0x70, 0x6f, 0x6f, 0x6b, 0x6b, 0x6c, + 0x6d, 0x6d, 0x6d, 0x6d, 0x6c, 0x6c, 0x6d, 0x6d, 0x6c, 0x6d, 0x6d, 0x6e, + 0x70, 0x70, 0x70, 0x70, 0x70, 0x70, 0x70, 0x70, 0x6f, 0x70, 0x72, 0x71, + 0x71, 0x70, 0x70, 0x70, 0x71, 0x71, 0x70, 0x70, 0x6f, 0x6f, 0x6f, 0x6e, + 0x6e, 0x6e, 0x6c, 0x6a, 0x6a, 0x69, 0x64, 0x62, 0xaa, 0xb0, 0xbc, 0xae, + 0xa6, 0xa8, 0xae, 0xb9, 0xb2, 0xb9, 0xb5, 0xb4, 0xbb, 0xb8, 0xad, 0xb6, + 0xad, 0xab, 0xab, 0xb0, 0xb3, 0xcb, 0x03, 0x18, 0x1d, 0x27, 0x26, 0x22, + 0x1f, 0x1f, 0x1b, 0x16, 0x18, 0x1b, 0x19, 0x18, 0x16, 0x19, 0x20, 0x20, + 0x22, 0x28, 0x26, 0x24, 0x22, 0x1d, 0x19, 0x16, 0x0a, 0x06, 0x04, 0xff, + 0xff, 0x07, 0x09, 0x08, 0x0a, 0x14, 0x1b, 0x19, 0x1c, 0x1d, 0x1c, 0x1c, + 0x19, 0x16, 0x17, 0x1d, 0x23, 0x22, 0x20, 0x22, 0x25, 0x28, 0x29, 0x29, + 0x28, 0x2e, 0x34, 0x33, 0x2e, 0x2e, 0x36, 0x3a, 0x40, 0x45, 0x4b, 0x4a, + 0x4c, 0x50, 0x4e, 0x4e, 0x4e, 0x4d, 0x4d, 0x53, 0x57, 0x5c, 0x5b, 0x5d, + 0x5f, 0x63, 0x62, 0x63, 0x64, 0x64, 0x66, 0x65, 0x66, 0x66, 0x61, 0x5b, + 0x5c, 0x61, 0x60, 0x5d, 0x5b, 0x63, 0x64, 0x64, 0x67, 0x6a, 0x6a, 0x6a, + 0x6b, 0x6e, 0x6f, 0x70, 0x6f, 0x6a, 0x6a, 0x6c, 0x6a, 0x69, 0x67, 0x6b, + 0x6b, 0x69, 0x67, 0x68, 0x6b, 0x6c, 0x6f, 0x70, 0x6f, 0x70, 0x70, 0x70, + 0x73, 0x74, 0x75, 0x75, 0x71, 0x72, 0x72, 0x71, 0x73, 0x75, 0x76, 0x77, + 0x78, 0x78, 0x76, 0x71, 0x6d, 0x6c, 0x6e, 0x71, 0x73, 0x71, 0x70, 0x6e, + 0x6e, 0x6f, 0x6f, 0x6e, 0x6e, 0x6c, 0x6c, 0x6c, 0x6d, 0x70, 0x6f, 0x6b, + 0x6a, 0x6a, 0x6a, 0x6b, 0x6d, 0x6f, 0x6e, 0x6f, 0x70, 0x70, 0x6f, 0x6f, + 0x6f, 0x70, 0x70, 0x70, 0x70, 0x70, 0x70, 0x71, 0x70, 0x70, 0x70, 0x70, + 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x6f, 0x6d, 0x6e, 0x6f, 0x6e, 0x6b, + 0x6b, 0x69, 0x65, 0x63, 0xb6, 0xb4, 0xb9, 0xb2, 0xa6, 0xa6, 0xb4, 0xc1, + 0xb9, 0xba, 0xb4, 0xb4, 0xbb, 0xba, 0xb7, 0xc1, 0xbc, 0xab, 0xa8, 0xaf, + 0xb4, 0xce, 0x06, 0x1c, 0x1f, 0x27, 0x27, 0x20, 0x22, 0x1e, 0x19, 0x17, + 0x16, 0x18, 0x16, 0x16, 0x16, 0x1c, 0x20, 0x22, 0x21, 0x24, 0x25, 0x22, + 0x20, 0x19, 0x15, 0x13, 0x0a, 0x03, 0x03, 0xff, 0x00, 0x08, 0x06, 0x05, + 0x0a, 0x16, 0x19, 0x16, 0x18, 0x19, 0x16, 0x16, 0x11, 0x16, 0x1c, 0x20, + 0x1d, 0x1d, 0x22, 0x27, 0x28, 0x27, 0x25, 0x22, 0x26, 0x2e, 0x2e, 0x2c, + 0x2f, 0x34, 0x3a, 0x3b, 0x40, 0x45, 0x47, 0x46, 0x4e, 0x4d, 0x4b, 0x4c, + 0x4d, 0x51, 0x51, 0x58, 0x59, 0x5c, 0x5b, 0x5e, 0x60, 0x62, 0x63, 0x64, + 0x66, 0x65, 0x67, 0x66, 0x66, 0x66, 0x65, 0x5c, 0x5a, 0x5f, 0x5f, 0x5f, + 0x5b, 0x61, 0x64, 0x65, 0x67, 0x67, 0x69, 0x6a, 0x6b, 0x6d, 0x6e, 0x6f, + 0x6f, 0x6d, 0x6b, 0x6b, 0x6e, 0x6d, 0x6a, 0x6c, 0x6a, 0x6a, 0x6a, 0x6a, + 0x6c, 0x6c, 0x6d, 0x70, 0x70, 0x72, 0x72, 0x70, 0x72, 0x74, 0x76, 0x76, + 0x71, 0x70, 0x70, 0x70, 0x6f, 0x71, 0x73, 0x74, 0x76, 0x77, 0x78, 0x76, + 0x71, 0x6c, 0x6d, 0x72, 0x71, 0x70, 0x71, 0x70, 0x6f, 0x6e, 0x6c, 0x6e, + 0x6e, 0x6c, 0x6e, 0x70, 0x70, 0x70, 0x6f, 0x6c, 0x6a, 0x66, 0x65, 0x69, + 0x6b, 0x6f, 0x6f, 0x70, 0x70, 0x70, 0x6f, 0x70, 0x70, 0x70, 0x70, 0x70, + 0x70, 0x70, 0x71, 0x71, 0x70, 0x70, 0x70, 0x71, 0x71, 0x71, 0x72, 0x71, + 0x70, 0x70, 0x6f, 0x6e, 0x6e, 0x6d, 0x6d, 0x6a, 0x6a, 0x68, 0x67, 0x66, + 0xb7, 0xb2, 0xb2, 0xb0, 0xaf, 0xab, 0xb1, 0xcb, 0xce, 0xbd, 0xb7, 0xb5, + 0xba, 0xbc, 0xbc, 0xc4, 0xc1, 0xac, 0xad, 0xab, 0xb4, 0xd2, 0x0d, 0x21, + 0x21, 0x24, 0x20, 0x21, 0x22, 0x1d, 0x1a, 0x18, 0x16, 0x17, 0x17, 0x13, + 0x14, 0x19, 0x20, 0x20, 0x22, 0x23, 0x23, 0x22, 0x22, 0x1c, 0x15, 0x13, + 0x09, 0xfd, 0xfe, 0xfe, 0xfe, 0x02, 0xff, 0x04, 0x0e, 0x16, 0x14, 0x14, + 0x14, 0x11, 0x0e, 0x0d, 0x0f, 0x11, 0x10, 0x16, 0x1f, 0x21, 0x22, 0x21, + 0x20, 0x1e, 0x1e, 0x22, 0x28, 0x2d, 0x2a, 0x2c, 0x30, 0x3a, 0x3e, 0x3f, + 0x41, 0x44, 0x42, 0x47, 0x4e, 0x4a, 0x49, 0x4c, 0x4d, 0x52, 0x54, 0x59, + 0x5c, 0x5b, 0x5c, 0x5e, 0x60, 0x64, 0x63, 0x63, 0x66, 0x65, 0x67, 0x64, + 0x65, 0x64, 0x64, 0x60, 0x59, 0x5e, 0x60, 0x63, 0x62, 0x61, 0x65, 0x65, + 0x67, 0x67, 0x67, 0x6a, 0x6b, 0x6c, 0x6c, 0x6d, 0x6e, 0x6c, 0x6b, 0x6b, + 0x6d, 0x6d, 0x6d, 0x6c, 0x6c, 0x6c, 0x6d, 0x6b, 0x6b, 0x6d, 0x6d, 0x6f, + 0x71, 0x72, 0x73, 0x72, 0x70, 0x75, 0x77, 0x76, 0x72, 0x70, 0x70, 0x70, + 0x70, 0x71, 0x72, 0x72, 0x73, 0x75, 0x77, 0x76, 0x76, 0x71, 0x70, 0x72, + 0x70, 0x70, 0x70, 0x70, 0x70, 0x6c, 0x6b, 0x6c, 0x6e, 0x6b, 0x6d, 0x6f, + 0x70, 0x70, 0x6e, 0x6a, 0x69, 0x66, 0x65, 0x65, 0x67, 0x6a, 0x6e, 0x6f, + 0x6f, 0x6e, 0x6e, 0x6e, 0x6f, 0x70, 0x70, 0x70, 0x70, 0x71, 0x71, 0x72, + 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x72, 0x71, 0x70, 0x70, 0x6f, 0x6e, + 0x6d, 0x6d, 0x6e, 0x6d, 0x6b, 0x6a, 0x69, 0x67, 0xbd, 0xb6, 0xb4, 0xb2, + 0xb6, 0xb6, 0xb0, 0xbb, 0xc2, 0xc4, 0xce, 0xc1, 0xb7, 0xb6, 0xb8, 0xbb, + 0xb7, 0xad, 0xaf, 0xad, 0xb3, 0xe0, 0x18, 0x27, 0x24, 0x21, 0x21, 0x22, + 0x1e, 0x1d, 0x1b, 0x18, 0x19, 0x1a, 0x16, 0x16, 0x14, 0x18, 0x1e, 0x1e, + 0x23, 0x22, 0x22, 0x22, 0x1e, 0x1c, 0x15, 0x10, 0x0a, 0xfc, 0xfd, 0xf8, + 0xf7, 0xfa, 0xfc, 0x04, 0x10, 0x14, 0x11, 0x10, 0x10, 0x0f, 0x05, 0x07, + 0x0a, 0x0d, 0x10, 0x1c, 0x1f, 0x1a, 0x1a, 0x1c, 0x1a, 0x1f, 0x1f, 0x22, + 0x2b, 0x2b, 0x29, 0x2a, 0x31, 0x3d, 0x3f, 0x40, 0x41, 0x40, 0x41, 0x49, + 0x4c, 0x49, 0x4c, 0x4f, 0x4e, 0x56, 0x58, 0x5c, 0x5c, 0x5c, 0x5e, 0x5a, + 0x5e, 0x61, 0x63, 0x64, 0x65, 0x64, 0x64, 0x64, 0x65, 0x64, 0x63, 0x63, + 0x5d, 0x5b, 0x5f, 0x63, 0x64, 0x64, 0x64, 0x65, 0x67, 0x67, 0x66, 0x67, + 0x6a, 0x6a, 0x6d, 0x6d, 0x6b, 0x6c, 0x6b, 0x6b, 0x6d, 0x6f, 0x70, 0x70, + 0x70, 0x70, 0x6f, 0x6d, 0x6a, 0x6c, 0x6d, 0x6e, 0x70, 0x70, 0x70, 0x72, + 0x70, 0x74, 0x76, 0x76, 0x6f, 0x70, 0x71, 0x71, 0x71, 0x71, 0x73, 0x71, + 0x70, 0x70, 0x74, 0x76, 0x75, 0x73, 0x72, 0x72, 0x70, 0x70, 0x6f, 0x6f, + 0x6f, 0x6b, 0x6a, 0x6b, 0x6d, 0x6a, 0x6b, 0x6d, 0x70, 0x70, 0x6e, 0x69, + 0x67, 0x68, 0x64, 0x63, 0x64, 0x65, 0x68, 0x6a, 0x6b, 0x6b, 0x6b, 0x6b, + 0x6e, 0x6f, 0x6f, 0x70, 0x70, 0x71, 0x70, 0x70, 0x70, 0x70, 0x70, 0x70, + 0x71, 0x71, 0x71, 0x70, 0x6f, 0x6f, 0x70, 0x6e, 0x6e, 0x6e, 0x6e, 0x6f, + 0x6c, 0x6a, 0x69, 0x69, 0xad, 0xb4, 0xc7, 0xbf, 0xb7, 0xb0, 0xaf, 0xaf, + 0xb7, 0xcc, 0xce, 0xbb, 0xae, 0xb5, 0xb4, 0xb6, 0xb3, 0xb1, 0xba, 0xbb, + 0xb1, 0xf0, 0x22, 0x29, 0x29, 0x23, 0x21, 0x20, 0x1b, 0x1d, 0x1f, 0x1d, + 0x1b, 0x1c, 0x17, 0x15, 0x18, 0x1b, 0x1d, 0x1f, 0x22, 0x24, 0x20, 0x1e, + 0x1c, 0x1c, 0x13, 0x0e, 0x08, 0xfd, 0xfc, 0xf2, 0xef, 0xf4, 0xf9, 0x04, + 0x10, 0x10, 0x0e, 0x0f, 0x0a, 0x05, 0x01, 0x07, 0x08, 0x0d, 0x16, 0x1b, + 0x16, 0x10, 0x15, 0x16, 0x18, 0x1c, 0x1c, 0x26, 0x29, 0x28, 0x28, 0x2d, + 0x38, 0x3e, 0x3d, 0x3c, 0x3e, 0x3b, 0x41, 0x48, 0x47, 0x49, 0x4f, 0x52, + 0x50, 0x58, 0x59, 0x5d, 0x5d, 0x5e, 0x5d, 0x58, 0x5e, 0x61, 0x62, 0x64, + 0x64, 0x64, 0x64, 0x62, 0x60, 0x64, 0x63, 0x5e, 0x5f, 0x5e, 0x5d, 0x5f, + 0x62, 0x64, 0x65, 0x65, 0x66, 0x67, 0x67, 0x66, 0x69, 0x6a, 0x6c, 0x6e, + 0x6a, 0x6b, 0x6d, 0x6d, 0x6d, 0x6e, 0x6f, 0x71, 0x70, 0x70, 0x70, 0x6e, + 0x6a, 0x6b, 0x6e, 0x6d, 0x6e, 0x6e, 0x6f, 0x71, 0x72, 0x74, 0x74, 0x75, + 0x6d, 0x6f, 0x72, 0x71, 0x72, 0x71, 0x74, 0x72, 0x71, 0x6f, 0x70, 0x74, + 0x74, 0x72, 0x72, 0x71, 0x70, 0x70, 0x70, 0x6e, 0x6c, 0x6b, 0x6a, 0x6a, + 0x6c, 0x6a, 0x6a, 0x6c, 0x6e, 0x6f, 0x6e, 0x68, 0x65, 0x68, 0x69, 0x64, + 0x64, 0x64, 0x64, 0x66, 0x67, 0x68, 0x6a, 0x6a, 0x6c, 0x6e, 0x6f, 0x6f, + 0x6f, 0x70, 0x70, 0x6e, 0x6e, 0x6e, 0x6d, 0x6f, 0x70, 0x70, 0x70, 0x70, + 0x6f, 0x6f, 0x6f, 0x70, 0x6e, 0x6e, 0x6e, 0x6d, 0x6c, 0x6b, 0x6a, 0x69, + 0xa8, 0xb1, 0xb8, 0xbb, 0xb5, 0xb0, 0xb3, 0xb3, 0xb0, 0xbd, 0xc2, 0xb1, + 0xb1, 0xb6, 0xb4, 0xb4, 0xb2, 0xb6, 0xbc, 0xba, 0xba, 0x04, 0x22, 0x28, + 0x2d, 0x27, 0x22, 0x1e, 0x19, 0x1d, 0x1d, 0x1f, 0x1d, 0x1f, 0x1b, 0x16, + 0x18, 0x1c, 0x1d, 0x1f, 0x1f, 0x22, 0x20, 0x1d, 0x1c, 0x1a, 0x16, 0x0b, + 0x04, 0xfb, 0xf9, 0xec, 0xe8, 0xf1, 0xfa, 0x04, 0x0e, 0x0b, 0x0c, 0x09, + 0x04, 0x00, 0x01, 0x03, 0x04, 0x0d, 0x12, 0x16, 0x13, 0x10, 0x13, 0x14, + 0x16, 0x14, 0x1d, 0x27, 0x28, 0x27, 0x29, 0x31, 0x3b, 0x3f, 0x3b, 0x3b, + 0x3a, 0x3c, 0x45, 0x4c, 0x47, 0x4c, 0x52, 0x55, 0x52, 0x58, 0x59, 0x5c, + 0x5f, 0x5f, 0x5b, 0x58, 0x5d, 0x5f, 0x5f, 0x62, 0x60, 0x61, 0x60, 0x5f, + 0x5b, 0x61, 0x64, 0x5d, 0x5a, 0x5d, 0x60, 0x5e, 0x61, 0x63, 0x65, 0x64, + 0x64, 0x65, 0x65, 0x65, 0x68, 0x69, 0x6b, 0x6d, 0x6b, 0x6d, 0x6c, 0x6d, + 0x6e, 0x6f, 0x6f, 0x70, 0x70, 0x70, 0x70, 0x70, 0x6b, 0x6b, 0x6e, 0x6c, + 0x6c, 0x6d, 0x6e, 0x70, 0x73, 0x73, 0x71, 0x71, 0x6a, 0x6d, 0x72, 0x71, + 0x73, 0x72, 0x75, 0x74, 0x71, 0x71, 0x70, 0x74, 0x76, 0x73, 0x70, 0x70, + 0x6f, 0x6e, 0x6e, 0x6e, 0x6a, 0x69, 0x67, 0x69, 0x6b, 0x6a, 0x69, 0x6a, + 0x6c, 0x6e, 0x6e, 0x6a, 0x66, 0x69, 0x6a, 0x69, 0x69, 0x67, 0x65, 0x64, + 0x63, 0x65, 0x68, 0x6a, 0x6c, 0x6d, 0x6d, 0x6e, 0x6e, 0x6f, 0x6f, 0x6f, + 0x6f, 0x6f, 0x6e, 0x6c, 0x6e, 0x70, 0x6f, 0x6f, 0x6f, 0x70, 0x70, 0x6f, + 0x6f, 0x6e, 0x6d, 0x6d, 0x6d, 0x6d, 0x6a, 0x6a, 0xb6, 0xb6, 0xaf, 0xaf, + 0xae, 0xab, 0xb9, 0xc3, 0xbd, 0xba, 0xb5, 0xb6, 0xb9, 0xb8, 0xb9, 0xb9, + 0xba, 0xbd, 0xbc, 0xb0, 0xc5, 0x0c, 0x23, 0x26, 0x29, 0x28, 0x22, 0x20, + 0x1c, 0x1d, 0x1d, 0x1c, 0x1f, 0x1f, 0x1b, 0x18, 0x1b, 0x1c, 0x1d, 0x1f, + 0x1d, 0x20, 0x23, 0x1a, 0x17, 0x14, 0x15, 0x0c, 0x03, 0xfd, 0xf2, 0xe7, + 0xe6, 0xf0, 0xf8, 0xff, 0x06, 0x07, 0x05, 0x03, 0xfe, 0xf9, 0xfd, 0xfe, + 0x05, 0x10, 0x10, 0x14, 0x17, 0x14, 0x0d, 0x10, 0x10, 0x16, 0x22, 0x25, + 0x25, 0x23, 0x28, 0x34, 0x3a, 0x3a, 0x3b, 0x3c, 0x3a, 0x3e, 0x47, 0x4c, + 0x48, 0x4d, 0x53, 0x54, 0x52, 0x57, 0x58, 0x5b, 0x5f, 0x5e, 0x5a, 0x5b, + 0x59, 0x5b, 0x5e, 0x5e, 0x5d, 0x5c, 0x5c, 0x5e, 0x5c, 0x5c, 0x60, 0x60, + 0x59, 0x58, 0x5e, 0x5e, 0x61, 0x63, 0x64, 0x64, 0x64, 0x64, 0x64, 0x65, + 0x67, 0x69, 0x68, 0x6b, 0x6b, 0x6c, 0x6c, 0x6b, 0x6d, 0x6f, 0x70, 0x70, + 0x70, 0x70, 0x70, 0x70, 0x6c, 0x6b, 0x6d, 0x6a, 0x6a, 0x6c, 0x6d, 0x6f, + 0x71, 0x74, 0x70, 0x6e, 0x68, 0x6b, 0x72, 0x72, 0x72, 0x73, 0x75, 0x74, + 0x72, 0x70, 0x6e, 0x70, 0x72, 0x70, 0x6f, 0x6f, 0x6c, 0x6d, 0x6c, 0x6c, + 0x6a, 0x69, 0x64, 0x66, 0x6a, 0x6a, 0x66, 0x67, 0x6a, 0x6c, 0x6d, 0x6a, + 0x67, 0x66, 0x67, 0x69, 0x6a, 0x6a, 0x69, 0x66, 0x63, 0x61, 0x63, 0x65, + 0x6a, 0x6b, 0x6c, 0x6d, 0x6d, 0x6d, 0x6d, 0x6d, 0x6e, 0x6e, 0x70, 0x6e, + 0x6d, 0x6f, 0x6e, 0x70, 0x6f, 0x6f, 0x6f, 0x6d, 0x6e, 0x6e, 0x6d, 0x6d, + 0x6d, 0x6e, 0x6b, 0x6a, 0xb0, 0xab, 0xb1, 0xb3, 0xb4, 0xb1, 0xc0, 0xd1, + 0xdc, 0xcf, 0xbd, 0xb6, 0xb7, 0xb8, 0xb7, 0xba, 0xb6, 0xb1, 0xbb, 0xb2, + 0xcd, 0x11, 0x22, 0x22, 0x24, 0x25, 0x23, 0x1f, 0x1d, 0x1f, 0x1f, 0x1e, + 0x1f, 0x20, 0x1c, 0x1b, 0x1b, 0x1c, 0x1c, 0x1e, 0x1e, 0x1e, 0x21, 0x1a, + 0x13, 0x13, 0x11, 0x0b, 0x01, 0xfc, 0xef, 0xe2, 0xe5, 0xea, 0xf1, 0xfa, + 0x04, 0x04, 0xfe, 0xf4, 0xf4, 0xf4, 0xf8, 0xfe, 0x07, 0x0c, 0x0f, 0x13, + 0x16, 0x0d, 0x07, 0x0c, 0x10, 0x1a, 0x21, 0x25, 0x21, 0x26, 0x2d, 0x34, + 0x36, 0x36, 0x3a, 0x3a, 0x3a, 0x40, 0x48, 0x4d, 0x4b, 0x4e, 0x53, 0x54, + 0x53, 0x54, 0x55, 0x58, 0x5b, 0x58, 0x54, 0x58, 0x58, 0x58, 0x5c, 0x59, + 0x57, 0x56, 0x57, 0x5b, 0x59, 0x5a, 0x5e, 0x5f, 0x5a, 0x55, 0x58, 0x5b, + 0x5e, 0x62, 0x64, 0x63, 0x60, 0x64, 0x65, 0x64, 0x66, 0x69, 0x68, 0x6a, + 0x6a, 0x6c, 0x6a, 0x6a, 0x6a, 0x6e, 0x6f, 0x70, 0x70, 0x70, 0x70, 0x70, + 0x6b, 0x6b, 0x6d, 0x6b, 0x6a, 0x6b, 0x6c, 0x6c, 0x70, 0x72, 0x6d, 0x6b, + 0x68, 0x6a, 0x71, 0x71, 0x71, 0x72, 0x73, 0x74, 0x72, 0x70, 0x6c, 0x6a, + 0x6e, 0x6e, 0x6d, 0x6c, 0x6a, 0x6a, 0x6a, 0x6a, 0x69, 0x67, 0x60, 0x64, + 0x6a, 0x6a, 0x67, 0x65, 0x68, 0x6a, 0x6c, 0x6b, 0x67, 0x64, 0x67, 0x68, + 0x6a, 0x6b, 0x6b, 0x6a, 0x68, 0x65, 0x64, 0x65, 0x65, 0x68, 0x6a, 0x6d, + 0x6c, 0x6d, 0x6e, 0x6c, 0x6d, 0x6e, 0x6e, 0x6c, 0x6c, 0x6e, 0x6e, 0x6e, + 0x6e, 0x6e, 0x6e, 0x6d, 0x6e, 0x6d, 0x6d, 0x6d, 0x6c, 0x6c, 0x6b, 0x6b, + 0xb2, 0xa7, 0xac, 0xb0, 0xb9, 0xb1, 0xc8, 0xcd, 0xc9, 0xbb, 0xb7, 0xb6, + 0xb5, 0xb7, 0xbb, 0xbc, 0xaf, 0xa7, 0xb3, 0xba, 0xd3, 0x12, 0x23, 0x24, + 0x24, 0x25, 0x24, 0x1e, 0x1e, 0x20, 0x1f, 0x20, 0x20, 0x1f, 0x1c, 0x1b, + 0x1a, 0x1c, 0x1c, 0x1d, 0x1c, 0x1b, 0x1c, 0x19, 0x0d, 0x0e, 0x10, 0x0e, + 0x04, 0xfb, 0xea, 0xdb, 0xdf, 0xe3, 0xe8, 0xf3, 0xfd, 0xfa, 0xf2, 0xed, + 0xea, 0xef, 0xf3, 0xfa, 0x03, 0x0b, 0x0e, 0x16, 0x11, 0x05, 0x07, 0x0a, + 0x10, 0x18, 0x1e, 0x1f, 0x1d, 0x27, 0x2e, 0x33, 0x32, 0x39, 0x3a, 0x38, + 0x3b, 0x44, 0x4a, 0x4d, 0x4c, 0x4d, 0x52, 0x53, 0x50, 0x52, 0x51, 0x52, + 0x58, 0x58, 0x50, 0x53, 0x55, 0x53, 0x58, 0x55, 0x50, 0x53, 0x55, 0x56, + 0x56, 0x58, 0x5b, 0x5c, 0x5a, 0x55, 0x57, 0x5b, 0x5e, 0x5e, 0x5e, 0x62, + 0x61, 0x61, 0x64, 0x67, 0x64, 0x68, 0x67, 0x68, 0x6a, 0x6b, 0x6b, 0x6b, + 0x6a, 0x6e, 0x6f, 0x70, 0x70, 0x70, 0x71, 0x71, 0x6a, 0x6a, 0x6c, 0x6a, + 0x69, 0x6a, 0x6a, 0x6a, 0x6e, 0x72, 0x6e, 0x6a, 0x69, 0x6b, 0x73, 0x74, + 0x72, 0x73, 0x73, 0x71, 0x70, 0x71, 0x6b, 0x68, 0x6b, 0x6b, 0x6b, 0x6a, + 0x6a, 0x69, 0x69, 0x6a, 0x68, 0x64, 0x5e, 0x64, 0x68, 0x6a, 0x66, 0x64, + 0x67, 0x69, 0x6a, 0x6d, 0x67, 0x63, 0x64, 0x68, 0x68, 0x6a, 0x6b, 0x6a, + 0x6a, 0x69, 0x66, 0x65, 0x66, 0x67, 0x68, 0x6a, 0x6c, 0x6c, 0x6b, 0x6a, + 0x6b, 0x6c, 0x6c, 0x6c, 0x6b, 0x6c, 0x6e, 0x6e, 0x6e, 0x6d, 0x6e, 0x6f, + 0x6f, 0x6e, 0x6d, 0x6c, 0x6c, 0x6b, 0x6c, 0x6a, 0xba, 0xb1, 0xa8, 0xaa, + 0xac, 0xad, 0xb9, 0xb7, 0xb5, 0xb3, 0xb8, 0xb6, 0xb8, 0xbb, 0xb7, 0xb5, + 0xb0, 0xa6, 0xac, 0xb7, 0xcd, 0x16, 0x26, 0x28, 0x27, 0x23, 0x24, 0x22, + 0x21, 0x20, 0x1f, 0x21, 0x20, 0x1e, 0x1c, 0x1b, 0x1c, 0x1e, 0x1c, 0x1a, + 0x1c, 0x18, 0x19, 0x15, 0x0e, 0x08, 0x0a, 0x09, 0x02, 0xf8, 0xe5, 0xd8, + 0xdb, 0xdc, 0xe6, 0xef, 0xf0, 0xe9, 0xe9, 0xe3, 0xe6, 0xed, 0xee, 0xf8, + 0x07, 0x09, 0x0b, 0x11, 0x09, 0x05, 0x06, 0x08, 0x0c, 0x14, 0x1b, 0x1d, + 0x1d, 0x23, 0x2e, 0x31, 0x30, 0x3a, 0x3a, 0x39, 0x3f, 0x45, 0x4a, 0x4b, + 0x4c, 0x4c, 0x51, 0x52, 0x4c, 0x4e, 0x4c, 0x4c, 0x53, 0x54, 0x4d, 0x4d, + 0x51, 0x51, 0x54, 0x52, 0x4c, 0x4e, 0x53, 0x53, 0x50, 0x52, 0x58, 0x5c, + 0x5a, 0x56, 0x55, 0x58, 0x5a, 0x5c, 0x5d, 0x5e, 0x62, 0x62, 0x61, 0x65, + 0x65, 0x66, 0x66, 0x67, 0x68, 0x6a, 0x6a, 0x6b, 0x6a, 0x6d, 0x6f, 0x6f, + 0x70, 0x70, 0x70, 0x70, 0x6b, 0x6a, 0x6d, 0x6a, 0x68, 0x6a, 0x69, 0x69, + 0x6b, 0x6f, 0x6e, 0x6b, 0x68, 0x6a, 0x71, 0x73, 0x70, 0x72, 0x72, 0x6f, + 0x6e, 0x6f, 0x6a, 0x68, 0x6a, 0x69, 0x6a, 0x68, 0x67, 0x66, 0x68, 0x69, + 0x67, 0x61, 0x5b, 0x62, 0x66, 0x67, 0x64, 0x64, 0x65, 0x68, 0x6a, 0x6b, + 0x6a, 0x64, 0x61, 0x64, 0x65, 0x67, 0x69, 0x6a, 0x6a, 0x6a, 0x6a, 0x66, + 0x66, 0x67, 0x68, 0x6a, 0x6c, 0x6b, 0x6a, 0x6a, 0x6a, 0x6a, 0x6b, 0x6c, + 0x6c, 0x6c, 0x6e, 0x6f, 0x6e, 0x6e, 0x6e, 0x6e, 0x6f, 0x6e, 0x6d, 0x6c, + 0x6a, 0x6a, 0x6a, 0x6a, 0xbb, 0xb6, 0xa9, 0xb6, 0xbf, 0xb9, 0xb3, 0xb6, + 0xc1, 0xb7, 0xb6, 0xb7, 0xbe, 0xb6, 0xae, 0xb6, 0xb0, 0xaa, 0xaf, 0xb5, + 0xc1, 0x12, 0x22, 0x24, 0x29, 0x26, 0x21, 0x21, 0x20, 0x1d, 0x1e, 0x1f, + 0x21, 0x20, 0x1e, 0x1b, 0x1d, 0x1c, 0x1c, 0x16, 0x18, 0x18, 0x15, 0x10, + 0x0f, 0x07, 0x03, 0x03, 0xfe, 0xf0, 0xe4, 0xd8, 0xd4, 0xdc, 0xe6, 0xe2, + 0xe4, 0xe1, 0xde, 0xdf, 0xe4, 0xea, 0xee, 0xfd, 0x04, 0x04, 0x0c, 0x10, + 0x05, 0x04, 0x04, 0x08, 0x0a, 0x0f, 0x18, 0x1c, 0x1c, 0x21, 0x2c, 0x2e, + 0x2e, 0x39, 0x38, 0x3a, 0x41, 0x45, 0x4a, 0x4a, 0x4a, 0x4a, 0x4c, 0x4c, + 0x48, 0x48, 0x46, 0x44, 0x4b, 0x4e, 0x4a, 0x48, 0x4d, 0x4e, 0x51, 0x4e, + 0x4d, 0x4d, 0x4f, 0x51, 0x4f, 0x4e, 0x52, 0x57, 0x59, 0x54, 0x51, 0x56, + 0x5b, 0x5b, 0x5c, 0x5a, 0x5f, 0x62, 0x5e, 0x62, 0x64, 0x67, 0x65, 0x67, + 0x68, 0x69, 0x69, 0x6a, 0x68, 0x6a, 0x6d, 0x6f, 0x70, 0x70, 0x71, 0x70, + 0x6b, 0x69, 0x6e, 0x6b, 0x69, 0x6a, 0x68, 0x68, 0x6a, 0x6a, 0x6b, 0x6a, + 0x69, 0x69, 0x70, 0x72, 0x70, 0x70, 0x70, 0x6e, 0x6c, 0x6e, 0x69, 0x68, + 0x69, 0x67, 0x68, 0x65, 0x63, 0x65, 0x66, 0x65, 0x64, 0x5c, 0x57, 0x5e, + 0x64, 0x65, 0x61, 0x64, 0x64, 0x65, 0x67, 0x69, 0x69, 0x66, 0x61, 0x60, + 0x63, 0x65, 0x66, 0x69, 0x6a, 0x68, 0x6a, 0x69, 0x68, 0x67, 0x68, 0x6a, + 0x6b, 0x6c, 0x6b, 0x6a, 0x69, 0x6a, 0x6a, 0x6b, 0x6d, 0x6c, 0x6c, 0x6f, + 0x70, 0x6f, 0x6d, 0x6d, 0x6c, 0x6a, 0x6a, 0x6a, 0x6a, 0x69, 0x68, 0x68, + 0xbb, 0xb8, 0xa9, 0xb5, 0xc0, 0xc0, 0xbe, 0xc7, 0xc1, 0xb6, 0xb8, 0xbf, + 0xc2, 0xb8, 0xb1, 0xb3, 0xaf, 0xaf, 0xb1, 0xb5, 0xbc, 0x0a, 0x1f, 0x1e, + 0x24, 0x28, 0x20, 0x20, 0x21, 0x1f, 0x1c, 0x1c, 0x20, 0x22, 0x20, 0x1d, + 0x1d, 0x1b, 0x19, 0x14, 0x0f, 0x16, 0x13, 0x0c, 0x08, 0x06, 0xff, 0xfe, + 0xf9, 0xec, 0xe5, 0xd5, 0xd3, 0xdb, 0xd6, 0xd2, 0xdc, 0xd7, 0xd6, 0xdc, + 0xe0, 0xe7, 0xed, 0xfe, 0x03, 0x04, 0x0c, 0x06, 0xff, 0xff, 0x01, 0x04, + 0x05, 0x0f, 0x19, 0x17, 0x17, 0x20, 0x29, 0x27, 0x2e, 0x37, 0x3a, 0x3b, + 0x42, 0x46, 0x4a, 0x46, 0x46, 0x47, 0x4a, 0x46, 0x40, 0x3f, 0x3d, 0x3c, + 0x40, 0x46, 0x44, 0x43, 0x43, 0x46, 0x49, 0x4b, 0x4c, 0x4d, 0x4c, 0x4c, + 0x4c, 0x49, 0x4c, 0x52, 0x55, 0x54, 0x4e, 0x54, 0x5a, 0x59, 0x58, 0x58, + 0x5c, 0x5f, 0x5e, 0x5e, 0x63, 0x66, 0x65, 0x65, 0x66, 0x69, 0x69, 0x69, + 0x68, 0x68, 0x6a, 0x6d, 0x70, 0x70, 0x70, 0x6f, 0x6b, 0x68, 0x6d, 0x6a, + 0x6a, 0x6a, 0x67, 0x69, 0x6c, 0x69, 0x69, 0x6c, 0x6a, 0x69, 0x71, 0x71, + 0x6e, 0x6f, 0x6c, 0x6b, 0x6b, 0x6c, 0x68, 0x68, 0x68, 0x66, 0x66, 0x63, + 0x61, 0x63, 0x64, 0x63, 0x61, 0x58, 0x56, 0x5c, 0x64, 0x64, 0x60, 0x63, + 0x64, 0x63, 0x64, 0x67, 0x66, 0x64, 0x63, 0x5f, 0x5f, 0x63, 0x61, 0x64, + 0x68, 0x68, 0x6a, 0x6a, 0x6a, 0x68, 0x68, 0x69, 0x69, 0x69, 0x6a, 0x6a, + 0x6a, 0x6a, 0x69, 0x6a, 0x6b, 0x6b, 0x6b, 0x6d, 0x6f, 0x6e, 0x6d, 0x6d, + 0x6c, 0x6a, 0x69, 0x6a, 0x69, 0x68, 0x66, 0x67, 0xc2, 0xb8, 0xaa, 0xad, + 0xb1, 0xbb, 0xbd, 0xbc, 0xb5, 0xb6, 0xb8, 0xb7, 0xb6, 0xb9, 0xb7, 0xb8, + 0xb3, 0xb2, 0xaa, 0xae, 0xbf, 0xfb, 0x1b, 0x1c, 0x22, 0x22, 0x1f, 0x21, + 0x22, 0x22, 0x20, 0x1e, 0x22, 0x22, 0x1f, 0x1f, 0x1d, 0x1c, 0x18, 0x14, + 0x09, 0x09, 0x10, 0x07, 0x03, 0xff, 0xfa, 0xf8, 0xf8, 0xef, 0xe2, 0xd4, + 0xd4, 0xd0, 0xc7, 0xcd, 0xd4, 0xd1, 0xce, 0xd8, 0xdd, 0xe5, 0xf5, 0x00, + 0xff, 0x06, 0x08, 0x01, 0xfe, 0xfe, 0xff, 0xff, 0x04, 0x0a, 0x14, 0x16, + 0x18, 0x22, 0x27, 0x21, 0x2b, 0x36, 0x3a, 0x3a, 0x40, 0x44, 0x46, 0x41, + 0x40, 0x42, 0x43, 0x3e, 0x35, 0x33, 0x34, 0x34, 0x39, 0x3c, 0x3e, 0x3d, + 0x3e, 0x3f, 0x43, 0x46, 0x47, 0x4c, 0x4b, 0x47, 0x47, 0x4c, 0x46, 0x4d, + 0x52, 0x52, 0x4e, 0x53, 0x58, 0x5a, 0x57, 0x57, 0x58, 0x5b, 0x5f, 0x5f, + 0x5f, 0x63, 0x63, 0x63, 0x64, 0x66, 0x68, 0x68, 0x68, 0x66, 0x69, 0x6a, + 0x6d, 0x6d, 0x6e, 0x6d, 0x6a, 0x66, 0x6a, 0x68, 0x6a, 0x6a, 0x67, 0x6b, + 0x6c, 0x69, 0x6a, 0x6c, 0x6a, 0x67, 0x70, 0x70, 0x6c, 0x6b, 0x67, 0x69, + 0x68, 0x69, 0x66, 0x67, 0x65, 0x64, 0x60, 0x5d, 0x5e, 0x5e, 0x60, 0x5f, + 0x5e, 0x56, 0x51, 0x5b, 0x62, 0x63, 0x5f, 0x5f, 0x63, 0x62, 0x63, 0x65, + 0x64, 0x62, 0x63, 0x62, 0x5f, 0x61, 0x5e, 0x5e, 0x63, 0x66, 0x68, 0x68, + 0x6a, 0x69, 0x67, 0x67, 0x68, 0x67, 0x68, 0x69, 0x6a, 0x6a, 0x69, 0x68, + 0x69, 0x6a, 0x6a, 0x6a, 0x6c, 0x6d, 0x6d, 0x6d, 0x6c, 0x6c, 0x6a, 0x6a, + 0x6a, 0x69, 0x68, 0x66, 0xc2, 0xb5, 0xaa, 0xb5, 0xae, 0xaa, 0xad, 0xb0, + 0xb1, 0xb5, 0xb5, 0xb1, 0xab, 0xb0, 0xb8, 0xbc, 0xb8, 0xb7, 0xb5, 0xb0, + 0xb5, 0xec, 0x15, 0x16, 0x1d, 0x1f, 0x21, 0x22, 0x22, 0x22, 0x1d, 0x1c, + 0x21, 0x20, 0x1c, 0x1c, 0x1e, 0x1b, 0x13, 0x0f, 0x0a, 0xfe, 0x00, 0x04, + 0xff, 0xfe, 0xf8, 0xf4, 0xf3, 0xed, 0xdf, 0xd5, 0xcc, 0xc3, 0xbe, 0xc2, + 0xc8, 0xce, 0xcb, 0xd7, 0xdc, 0xe6, 0xf6, 0xfb, 0xfc, 0x02, 0xff, 0xfb, + 0xfc, 0xf9, 0xfa, 0xfb, 0xfe, 0x07, 0x11, 0x15, 0x16, 0x21, 0x23, 0x1e, + 0x2a, 0x34, 0x3a, 0x3b, 0x3f, 0x42, 0x45, 0x40, 0x3a, 0x39, 0x38, 0x34, + 0x29, 0x25, 0x27, 0x26, 0x2d, 0x39, 0x37, 0x34, 0x3a, 0x3b, 0x40, 0x42, + 0x40, 0x49, 0x4c, 0x45, 0x46, 0x51, 0x4a, 0x4a, 0x50, 0x4e, 0x4e, 0x53, + 0x57, 0x59, 0x57, 0x56, 0x57, 0x58, 0x5b, 0x5e, 0x5e, 0x60, 0x61, 0x61, + 0x64, 0x64, 0x66, 0x67, 0x67, 0x66, 0x67, 0x69, 0x6c, 0x6d, 0x6d, 0x6c, + 0x6a, 0x65, 0x69, 0x6a, 0x6a, 0x6a, 0x69, 0x6c, 0x6b, 0x69, 0x6a, 0x6c, + 0x68, 0x66, 0x70, 0x6e, 0x6a, 0x6a, 0x66, 0x68, 0x66, 0x67, 0x64, 0x62, + 0x60, 0x5a, 0x54, 0x56, 0x5a, 0x5e, 0x5f, 0x5d, 0x5d, 0x57, 0x51, 0x5a, + 0x62, 0x61, 0x59, 0x5c, 0x5f, 0x62, 0x64, 0x64, 0x62, 0x5e, 0x60, 0x62, + 0x61, 0x5f, 0x5c, 0x58, 0x5e, 0x63, 0x65, 0x66, 0x68, 0x67, 0x65, 0x66, + 0x68, 0x67, 0x67, 0x65, 0x68, 0x68, 0x67, 0x67, 0x6a, 0x6a, 0x6b, 0x6b, + 0x6d, 0x6c, 0x6c, 0x6d, 0x6d, 0x6d, 0x6c, 0x6b, 0x6c, 0x6b, 0x6a, 0x6a, + 0xc6, 0xc0, 0xb4, 0xb3, 0xb3, 0xaf, 0xa8, 0xaa, 0xb2, 0xb4, 0xb6, 0xb0, + 0xb0, 0xad, 0xb2, 0xb6, 0xba, 0xba, 0xb8, 0xb6, 0xb1, 0xe9, 0x18, 0x1a, + 0x24, 0x26, 0x26, 0x24, 0x21, 0x22, 0x1c, 0x1c, 0x1f, 0x1e, 0x1a, 0x1b, + 0x18, 0x17, 0x0f, 0x0a, 0x0a, 0xff, 0xf7, 0xf8, 0xfa, 0xfc, 0xfb, 0xf3, + 0xee, 0xeb, 0xdc, 0xce, 0xc1, 0xbc, 0xb8, 0xbf, 0xc5, 0xcc, 0xcc, 0xd4, + 0xe0, 0xeb, 0xf7, 0xfa, 0xf9, 0xfe, 0xfa, 0xfb, 0xfc, 0xf8, 0xf7, 0xf8, + 0xfd, 0x04, 0x0e, 0x10, 0x0f, 0x1c, 0x20, 0x1d, 0x28, 0x31, 0x3a, 0x38, + 0x3a, 0x3f, 0x40, 0x3a, 0x33, 0x34, 0x2c, 0x23, 0x1c, 0x1a, 0x1c, 0x19, + 0x1d, 0x2f, 0x33, 0x2e, 0x35, 0x3a, 0x3c, 0x3d, 0x3a, 0x44, 0x49, 0x41, + 0x42, 0x4f, 0x4e, 0x4b, 0x4e, 0x4c, 0x49, 0x4f, 0x56, 0x59, 0x55, 0x54, + 0x56, 0x57, 0x55, 0x5b, 0x5b, 0x5c, 0x5e, 0x60, 0x63, 0x63, 0x61, 0x64, + 0x66, 0x66, 0x65, 0x66, 0x6a, 0x6b, 0x6d, 0x6d, 0x6a, 0x68, 0x69, 0x6c, + 0x6b, 0x6a, 0x69, 0x6d, 0x6a, 0x6a, 0x6c, 0x6a, 0x66, 0x65, 0x6d, 0x6d, + 0x69, 0x69, 0x66, 0x66, 0x64, 0x65, 0x5f, 0x58, 0x57, 0x4c, 0x50, 0x52, + 0x54, 0x58, 0x5d, 0x5e, 0x59, 0x53, 0x52, 0x57, 0x60, 0x60, 0x58, 0x58, + 0x5e, 0x61, 0x63, 0x62, 0x60, 0x5c, 0x5b, 0x5f, 0x61, 0x60, 0x5b, 0x58, + 0x5d, 0x62, 0x66, 0x66, 0x66, 0x64, 0x63, 0x64, 0x66, 0x66, 0x66, 0x65, + 0x65, 0x66, 0x67, 0x66, 0x68, 0x69, 0x6a, 0x6a, 0x6a, 0x6a, 0x6b, 0x6b, + 0x6c, 0x6b, 0x6c, 0x6a, 0x6b, 0x6a, 0x68, 0x67, 0xbe, 0xc6, 0xc2, 0xb7, + 0xb1, 0xb0, 0xa9, 0xaa, 0xb1, 0xb5, 0xba, 0xb5, 0xb4, 0xb0, 0xaf, 0xb0, + 0xb4, 0xb0, 0xb0, 0xb4, 0xb6, 0xec, 0x0c, 0x1a, 0x2e, 0x2e, 0x2c, 0x27, + 0x22, 0x21, 0x1d, 0x1b, 0x1c, 0x19, 0x16, 0x1a, 0x15, 0x16, 0x0d, 0x03, + 0x04, 0xfd, 0xf4, 0xf1, 0xf1, 0xf4, 0xf6, 0xf2, 0xe9, 0xe2, 0xda, 0xce, + 0xc2, 0xbc, 0xb9, 0xc0, 0xc5, 0xca, 0xcb, 0xd8, 0xe0, 0xe8, 0xec, 0xf1, + 0xf3, 0xf8, 0xf4, 0xf6, 0xf8, 0xf4, 0xf2, 0xf6, 0xf9, 0x03, 0x0b, 0x0a, + 0x09, 0x1a, 0x1e, 0x1c, 0x28, 0x31, 0x35, 0x36, 0x38, 0x39, 0x38, 0x34, + 0x28, 0x2e, 0x27, 0x17, 0x0f, 0x09, 0x0e, 0x10, 0x12, 0x20, 0x2d, 0x2d, + 0x30, 0x37, 0x38, 0x3a, 0x3a, 0x40, 0x46, 0x41, 0x40, 0x4b, 0x4d, 0x45, + 0x48, 0x4c, 0x4c, 0x4f, 0x52, 0x58, 0x58, 0x53, 0x54, 0x56, 0x52, 0x57, + 0x58, 0x57, 0x5a, 0x5f, 0x61, 0x61, 0x60, 0x62, 0x65, 0x67, 0x66, 0x64, + 0x6a, 0x6a, 0x6c, 0x6e, 0x69, 0x69, 0x69, 0x6a, 0x6a, 0x6c, 0x6c, 0x6e, + 0x6a, 0x6a, 0x6c, 0x6b, 0x65, 0x67, 0x6d, 0x6d, 0x68, 0x66, 0x66, 0x66, + 0x63, 0x5b, 0x4c, 0x47, 0x4b, 0x41, 0x4a, 0x4e, 0x50, 0x56, 0x59, 0x5d, + 0x57, 0x4f, 0x4d, 0x54, 0x5e, 0x5e, 0x59, 0x56, 0x5c, 0x5e, 0x5d, 0x5f, + 0x5f, 0x59, 0x56, 0x58, 0x5b, 0x5e, 0x5c, 0x59, 0x5a, 0x5d, 0x61, 0x63, + 0x64, 0x63, 0x61, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, + 0x65, 0x67, 0x68, 0x68, 0x68, 0x68, 0x6a, 0x6a, 0x69, 0x6a, 0x6a, 0x6a, + 0x6a, 0x6a, 0x6a, 0x68, 0xb3, 0xb9, 0xbc, 0xc6, 0xc0, 0xb6, 0xba, 0xaa, + 0xab, 0xb1, 0xb8, 0xbc, 0xb9, 0xc0, 0xb6, 0xaa, 0xab, 0xad, 0xbc, 0xc8, + 0xbf, 0xdb, 0x15, 0x2e, 0x34, 0x32, 0x2e, 0x28, 0x22, 0x1e, 0x1d, 0x1c, + 0x19, 0x16, 0x10, 0x14, 0x10, 0x0e, 0x0c, 0x00, 0xfb, 0xf2, 0xee, 0xe8, + 0xe8, 0xea, 0xee, 0xec, 0xe5, 0xde, 0xd4, 0xca, 0xc6, 0xba, 0xbc, 0xbe, + 0xc1, 0xc5, 0xcc, 0xda, 0xdd, 0xe2, 0xe5, 0xe6, 0xed, 0xf2, 0xef, 0xee, + 0xf2, 0xed, 0xea, 0xf3, 0xf5, 0xff, 0x09, 0x07, 0x07, 0x16, 0x19, 0x19, + 0x26, 0x30, 0x33, 0x33, 0x34, 0x30, 0x2e, 0x2b, 0x22, 0x21, 0x24, 0x14, + 0x0a, 0x03, 0x03, 0x0a, 0x0f, 0x1a, 0x25, 0x2c, 0x2d, 0x32, 0x34, 0x38, + 0x37, 0x3a, 0x38, 0x38, 0x38, 0x41, 0x49, 0x43, 0x40, 0x47, 0x4c, 0x4c, + 0x51, 0x56, 0x58, 0x54, 0x52, 0x55, 0x52, 0x53, 0x55, 0x56, 0x53, 0x59, + 0x5d, 0x5e, 0x5f, 0x60, 0x63, 0x65, 0x65, 0x64, 0x66, 0x68, 0x6a, 0x6e, + 0x67, 0x68, 0x6a, 0x6a, 0x6a, 0x6c, 0x6d, 0x6d, 0x6a, 0x6a, 0x6b, 0x6b, + 0x65, 0x69, 0x6d, 0x6b, 0x68, 0x65, 0x67, 0x61, 0x5b, 0x50, 0x44, 0x45, + 0x45, 0x40, 0x46, 0x4c, 0x51, 0x57, 0x58, 0x58, 0x52, 0x4c, 0x4d, 0x52, + 0x59, 0x5d, 0x58, 0x56, 0x58, 0x5b, 0x59, 0x5e, 0x5e, 0x54, 0x51, 0x52, + 0x58, 0x5e, 0x5a, 0x58, 0x59, 0x5a, 0x5c, 0x5e, 0x61, 0x61, 0x5f, 0x5f, + 0x60, 0x61, 0x63, 0x65, 0x65, 0x65, 0x64, 0x64, 0x65, 0x66, 0x67, 0x67, + 0x67, 0x68, 0x69, 0x69, 0x68, 0x68, 0x69, 0x69, 0x68, 0x67, 0x68, 0x69, + 0xb6, 0xaf, 0xb4, 0xd1, 0xdb, 0xc3, 0xb6, 0xab, 0xae, 0xb6, 0xb9, 0xc2, + 0xbf, 0xb8, 0xaf, 0xaf, 0xb0, 0xb9, 0xbc, 0xcc, 0xcd, 0x08, 0x2d, 0x34, + 0x34, 0x30, 0x2f, 0x29, 0x23, 0x20, 0x1d, 0x1c, 0x17, 0x11, 0x0b, 0x06, + 0x0a, 0x0c, 0x0a, 0xfe, 0xf3, 0xed, 0xe2, 0xdd, 0xe0, 0xe3, 0xe7, 0xe7, + 0xe3, 0xdc, 0xcf, 0xcc, 0xc6, 0xb8, 0xb8, 0xb6, 0xbc, 0xc5, 0xcd, 0xd7, + 0xd5, 0xda, 0xdb, 0xe0, 0xe7, 0xea, 0xe9, 0xe9, 0xeb, 0xe7, 0xe7, 0xed, + 0xf1, 0xfd, 0x08, 0x02, 0x04, 0x12, 0x15, 0x16, 0x26, 0x2e, 0x30, 0x2e, + 0x2d, 0x2a, 0x25, 0x22, 0x1c, 0x1a, 0x1d, 0x14, 0x0f, 0x0a, 0x06, 0x0a, + 0x10, 0x18, 0x1f, 0x28, 0x29, 0x2a, 0x2c, 0x30, 0x32, 0x31, 0x30, 0x31, + 0x34, 0x39, 0x43, 0x46, 0x3e, 0x45, 0x4b, 0x4a, 0x4c, 0x52, 0x56, 0x54, + 0x52, 0x55, 0x54, 0x52, 0x54, 0x54, 0x50, 0x52, 0x59, 0x5c, 0x5c, 0x5d, + 0x5f, 0x63, 0x64, 0x64, 0x65, 0x68, 0x6a, 0x6c, 0x69, 0x66, 0x6a, 0x6a, + 0x6b, 0x6d, 0x6e, 0x6b, 0x6b, 0x6b, 0x69, 0x69, 0x67, 0x69, 0x6b, 0x69, + 0x66, 0x64, 0x63, 0x5c, 0x52, 0x4d, 0x49, 0x43, 0x3e, 0x40, 0x41, 0x4b, + 0x52, 0x58, 0x57, 0x57, 0x50, 0x46, 0x4c, 0x4f, 0x51, 0x58, 0x57, 0x55, + 0x56, 0x55, 0x56, 0x58, 0x59, 0x4d, 0x4b, 0x52, 0x55, 0x5b, 0x5a, 0x57, + 0x55, 0x56, 0x58, 0x5a, 0x5c, 0x5e, 0x5a, 0x5a, 0x5d, 0x5e, 0x60, 0x62, + 0x64, 0x64, 0x64, 0x62, 0x64, 0x64, 0x64, 0x64, 0x66, 0x66, 0x68, 0x68, + 0x69, 0x69, 0x69, 0x68, 0x69, 0x68, 0x67, 0x67, 0xc4, 0xb2, 0xaa, 0xb2, + 0xc1, 0xc2, 0xb8, 0xb2, 0xb1, 0xb6, 0xbd, 0xc2, 0xba, 0xb3, 0xad, 0xb0, + 0xb4, 0xb5, 0xb8, 0xc2, 0xd5, 0x1c, 0x2d, 0x2f, 0x2d, 0x2b, 0x29, 0x28, + 0x22, 0x1e, 0x1a, 0x1d, 0x1a, 0x14, 0x05, 0xf5, 0xf9, 0x03, 0x04, 0xf8, + 0xe9, 0xe4, 0xe0, 0xd6, 0xd2, 0xda, 0xe0, 0xe5, 0xe0, 0xd8, 0xcd, 0xca, + 0xc4, 0xb8, 0xb4, 0xb0, 0xbc, 0xc6, 0xcb, 0xd0, 0xce, 0xcf, 0xd3, 0xda, + 0xe2, 0xe3, 0xdf, 0xe6, 0xe6, 0xe1, 0xe8, 0xe8, 0xef, 0xfc, 0x01, 0x00, + 0x04, 0x0f, 0x10, 0x16, 0x24, 0x2a, 0x2d, 0x29, 0x23, 0x22, 0x1f, 0x1c, + 0x15, 0x15, 0x18, 0x16, 0x14, 0x0d, 0x0e, 0x10, 0x13, 0x1b, 0x1f, 0x28, + 0x28, 0x25, 0x22, 0x28, 0x2d, 0x2c, 0x2c, 0x2f, 0x34, 0x35, 0x3b, 0x44, + 0x3f, 0x41, 0x4b, 0x4a, 0x49, 0x4b, 0x4f, 0x51, 0x50, 0x51, 0x55, 0x51, + 0x52, 0x52, 0x4c, 0x48, 0x52, 0x58, 0x59, 0x59, 0x5b, 0x61, 0x61, 0x62, + 0x62, 0x64, 0x67, 0x69, 0x6a, 0x67, 0x6a, 0x6a, 0x6a, 0x6c, 0x6d, 0x6a, + 0x6a, 0x6a, 0x69, 0x67, 0x68, 0x69, 0x69, 0x67, 0x64, 0x60, 0x61, 0x57, + 0x4c, 0x46, 0x49, 0x46, 0x42, 0x45, 0x45, 0x4c, 0x53, 0x56, 0x55, 0x55, + 0x49, 0x40, 0x48, 0x4c, 0x4b, 0x51, 0x52, 0x51, 0x51, 0x51, 0x52, 0x51, + 0x54, 0x50, 0x46, 0x4b, 0x50, 0x55, 0x57, 0x54, 0x52, 0x53, 0x52, 0x55, + 0x58, 0x58, 0x55, 0x56, 0x58, 0x59, 0x5c, 0x5e, 0x60, 0x5f, 0x5f, 0x60, + 0x62, 0x62, 0x64, 0x64, 0x64, 0x65, 0x67, 0x67, 0x67, 0x66, 0x67, 0x66, + 0x68, 0x68, 0x67, 0x66, 0xb0, 0xc1, 0xaf, 0xa9, 0xb6, 0xc1, 0xbe, 0xb0, + 0xae, 0xb0, 0xbc, 0xbe, 0xb4, 0xb4, 0xb1, 0xb0, 0xb0, 0xac, 0xb0, 0xc2, + 0xd6, 0x18, 0x2a, 0x2a, 0x28, 0x1c, 0x0c, 0x1f, 0x1f, 0x18, 0x15, 0x19, + 0x17, 0x12, 0xff, 0xea, 0xe3, 0xea, 0xfa, 0xf8, 0xe6, 0xd6, 0xda, 0xd7, + 0xce, 0xce, 0xd5, 0xde, 0xda, 0xd2, 0xc7, 0xc8, 0xbd, 0xb3, 0xad, 0xb0, + 0xbd, 0xc0, 0xc8, 0xc7, 0xc8, 0xc7, 0xc5, 0xd5, 0xdf, 0xd7, 0xdb, 0xe4, + 0xdf, 0xdf, 0xe2, 0xe4, 0xf0, 0xf8, 0xfd, 0xfe, 0x01, 0x09, 0x10, 0x19, + 0x25, 0x27, 0x26, 0x22, 0x1d, 0x1d, 0x1b, 0x13, 0x0a, 0x10, 0x10, 0x0d, + 0x12, 0x0e, 0x13, 0x14, 0x15, 0x1b, 0x1f, 0x28, 0x26, 0x21, 0x21, 0x25, + 0x2a, 0x2d, 0x2b, 0x2f, 0x38, 0x37, 0x39, 0x3d, 0x3e, 0x3f, 0x46, 0x4c, + 0x49, 0x44, 0x45, 0x47, 0x4c, 0x4c, 0x52, 0x51, 0x4b, 0x4f, 0x4b, 0x46, + 0x4c, 0x52, 0x56, 0x58, 0x59, 0x5e, 0x5f, 0x5e, 0x5e, 0x62, 0x65, 0x65, + 0x68, 0x68, 0x68, 0x6a, 0x6a, 0x6c, 0x6d, 0x6a, 0x68, 0x69, 0x68, 0x66, + 0x68, 0x69, 0x67, 0x65, 0x5f, 0x5e, 0x5b, 0x51, 0x49, 0x46, 0x48, 0x4c, + 0x4b, 0x4a, 0x4c, 0x4e, 0x52, 0x53, 0x54, 0x4f, 0x3e, 0x3a, 0x44, 0x49, + 0x44, 0x48, 0x46, 0x48, 0x4b, 0x4b, 0x4e, 0x4e, 0x4f, 0x51, 0x4b, 0x46, + 0x4b, 0x4e, 0x52, 0x52, 0x51, 0x51, 0x4f, 0x4e, 0x52, 0x52, 0x51, 0x51, + 0x54, 0x56, 0x58, 0x5a, 0x5d, 0x5d, 0x5e, 0x60, 0x60, 0x62, 0x64, 0x63, + 0x64, 0x65, 0x66, 0x65, 0x66, 0x67, 0x68, 0x67, 0x67, 0x67, 0x68, 0x69, + 0xaa, 0xbc, 0xb5, 0xae, 0xac, 0xb4, 0xc2, 0xb7, 0xb9, 0xc0, 0xc0, 0xb6, + 0xb7, 0xbb, 0xb8, 0xba, 0xb6, 0xb1, 0xb7, 0xc3, 0xd1, 0x11, 0x20, 0x22, + 0x24, 0xfe, 0xf8, 0x1e, 0x1b, 0x13, 0x10, 0x16, 0x0c, 0xfa, 0xfb, 0xe2, + 0xd1, 0xd4, 0xe4, 0xeb, 0xe5, 0xd4, 0xce, 0xcd, 0xcc, 0xc4, 0xca, 0xd5, + 0xd6, 0xd1, 0xc6, 0xc3, 0xb8, 0xab, 0xab, 0xb6, 0xbd, 0xbe, 0xc2, 0xc5, + 0xc7, 0xc0, 0xbf, 0xd4, 0xd0, 0xce, 0xda, 0xdf, 0xd8, 0xdc, 0xdf, 0xe4, + 0xef, 0xf6, 0xf8, 0xfa, 0xfd, 0x04, 0x0c, 0x18, 0x23, 0x21, 0x22, 0x20, + 0x17, 0x15, 0x16, 0x14, 0x08, 0x0a, 0x08, 0x04, 0x0b, 0x0a, 0x0e, 0x11, + 0x12, 0x17, 0x1c, 0x22, 0x26, 0x20, 0x20, 0x26, 0x2a, 0x2e, 0x2b, 0x2d, + 0x34, 0x37, 0x37, 0x36, 0x35, 0x3a, 0x42, 0x4c, 0x4d, 0x43, 0x40, 0x41, + 0x49, 0x4c, 0x50, 0x52, 0x49, 0x49, 0x4c, 0x48, 0x47, 0x4a, 0x53, 0x56, + 0x58, 0x5a, 0x5e, 0x5c, 0x5a, 0x60, 0x64, 0x65, 0x67, 0x68, 0x68, 0x68, + 0x69, 0x6d, 0x6e, 0x68, 0x66, 0x68, 0x68, 0x64, 0x65, 0x67, 0x64, 0x5f, + 0x5b, 0x58, 0x56, 0x52, 0x51, 0x51, 0x52, 0x54, 0x54, 0x52, 0x50, 0x4d, + 0x4f, 0x52, 0x50, 0x45, 0x37, 0x34, 0x3d, 0x45, 0x3e, 0x3f, 0x3d, 0x40, + 0x46, 0x47, 0x4b, 0x4d, 0x4d, 0x4d, 0x4d, 0x46, 0x44, 0x47, 0x4a, 0x49, + 0x4b, 0x4c, 0x4c, 0x4b, 0x4d, 0x4e, 0x4f, 0x4f, 0x51, 0x53, 0x55, 0x57, + 0x59, 0x5b, 0x5a, 0x5c, 0x5f, 0x60, 0x60, 0x62, 0x64, 0x64, 0x65, 0x67, + 0x65, 0x66, 0x66, 0x67, 0x67, 0x66, 0x67, 0x66, 0xa7, 0xb5, 0xb4, 0xb4, + 0xaf, 0xc2, 0xc8, 0xb7, 0xba, 0xc2, 0xbe, 0xb5, 0xc0, 0xc1, 0xbc, 0xc1, + 0xbc, 0xb2, 0xb8, 0xc3, 0xd2, 0x0c, 0xee, 0x17, 0x1b, 0xe6, 0xfe, 0x22, + 0x1c, 0x12, 0x0a, 0x0d, 0xf4, 0xdc, 0xf0, 0xd9, 0xc6, 0xcb, 0xd2, 0xd3, + 0xcd, 0xd8, 0xcd, 0xc2, 0xc2, 0xc2, 0xc5, 0xcf, 0xd4, 0xd0, 0xc3, 0xc2, + 0xb9, 0xa9, 0xb0, 0xc2, 0xbd, 0xba, 0xbe, 0xc4, 0xbf, 0xc1, 0xc5, 0xd3, + 0xc6, 0xce, 0xd0, 0xd3, 0xd5, 0xd8, 0xdb, 0xe3, 0xeb, 0xf3, 0xf5, 0xf5, + 0xf8, 0x00, 0x08, 0x17, 0x21, 0x21, 0x1f, 0x1e, 0x16, 0x14, 0x12, 0x10, + 0x0d, 0x08, 0x07, 0xff, 0x05, 0x03, 0x03, 0x0a, 0x0a, 0x12, 0x16, 0x17, + 0x1f, 0x1e, 0x1e, 0x25, 0x29, 0x2b, 0x29, 0x2d, 0x2a, 0x30, 0x32, 0x38, + 0x34, 0x35, 0x3d, 0x44, 0x4d, 0x48, 0x40, 0x43, 0x47, 0x4a, 0x4c, 0x51, + 0x48, 0x46, 0x4a, 0x4b, 0x47, 0x46, 0x4f, 0x55, 0x57, 0x57, 0x5e, 0x5a, + 0x57, 0x5c, 0x62, 0x64, 0x66, 0x68, 0x69, 0x68, 0x68, 0x6b, 0x6c, 0x69, + 0x66, 0x66, 0x66, 0x64, 0x65, 0x64, 0x5f, 0x58, 0x58, 0x58, 0x57, 0x58, + 0x5b, 0x5c, 0x5d, 0x5e, 0x5e, 0x58, 0x55, 0x52, 0x52, 0x51, 0x4c, 0x42, + 0x3a, 0x34, 0x37, 0x40, 0x3c, 0x38, 0x38, 0x37, 0x3a, 0x46, 0x46, 0x48, + 0x4a, 0x49, 0x4b, 0x48, 0x43, 0x40, 0x40, 0x42, 0x44, 0x46, 0x46, 0x4b, + 0x4c, 0x4c, 0x4b, 0x4a, 0x49, 0x4e, 0x52, 0x55, 0x58, 0x58, 0x57, 0x59, + 0x5c, 0x5d, 0x5d, 0x5e, 0x5e, 0x5f, 0x5f, 0x60, 0x5e, 0x64, 0x64, 0x65, + 0x65, 0x66, 0x66, 0x64, 0xaa, 0xad, 0xb3, 0xb6, 0xbb, 0xbf, 0xbb, 0xb7, + 0xbc, 0xc1, 0xb8, 0xbe, 0xc5, 0xbe, 0xbb, 0xbe, 0xc5, 0xb7, 0xb9, 0xc2, + 0xda, 0x03, 0xdb, 0x1a, 0x0a, 0xd7, 0x05, 0x20, 0x15, 0x0a, 0x04, 0xfe, + 0xd9, 0xc4, 0xdd, 0xd2, 0xc2, 0xc5, 0xc8, 0xc5, 0xbc, 0xbc, 0xc6, 0xc8, + 0xc0, 0xb7, 0xba, 0xc6, 0xce, 0xcb, 0xbd, 0xc3, 0xba, 0xac, 0xb0, 0xb9, + 0xb5, 0xb2, 0xc3, 0xc5, 0xb9, 0xc2, 0xce, 0xca, 0xc2, 0xc7, 0xc6, 0xcb, + 0xd1, 0xd5, 0xda, 0xe1, 0xe6, 0xed, 0xf2, 0xf2, 0xf4, 0xf9, 0x06, 0x1d, + 0x20, 0x1c, 0x1c, 0x19, 0x16, 0x13, 0x11, 0x0d, 0x0d, 0x08, 0x07, 0x00, + 0x02, 0xff, 0xf9, 0x00, 0x04, 0x0d, 0x11, 0x12, 0x17, 0x1c, 0x1f, 0x25, + 0x2a, 0x28, 0x24, 0x2a, 0x2a, 0x2a, 0x2e, 0x32, 0x36, 0x36, 0x3c, 0x41, + 0x47, 0x4b, 0x41, 0x43, 0x46, 0x4b, 0x4b, 0x51, 0x4c, 0x48, 0x4d, 0x52, + 0x4e, 0x48, 0x47, 0x52, 0x58, 0x56, 0x5a, 0x59, 0x54, 0x58, 0x5f, 0x64, + 0x67, 0x69, 0x68, 0x67, 0x6a, 0x6a, 0x6a, 0x68, 0x66, 0x66, 0x65, 0x65, + 0x66, 0x64, 0x5e, 0x58, 0x5a, 0x5c, 0x5c, 0x5e, 0x60, 0x5f, 0x5e, 0x5e, + 0x5e, 0x5c, 0x58, 0x57, 0x57, 0x56, 0x50, 0x48, 0x42, 0x3c, 0x37, 0x37, + 0x34, 0x2b, 0x34, 0x34, 0x2f, 0x3a, 0x42, 0x45, 0x42, 0x44, 0x45, 0x46, + 0x43, 0x40, 0x3c, 0x3b, 0x3b, 0x3e, 0x3c, 0x40, 0x48, 0x48, 0x47, 0x49, + 0x47, 0x4c, 0x50, 0x54, 0x56, 0x56, 0x57, 0x58, 0x59, 0x5a, 0x5b, 0x5a, + 0x59, 0x5b, 0x5a, 0x4f, 0x4a, 0x5a, 0x5f, 0x61, 0x63, 0x64, 0x65, 0x64, + 0xaf, 0xba, 0xb0, 0xb1, 0xbe, 0xc1, 0xc6, 0xc5, 0xbb, 0xc1, 0xc0, 0xc0, + 0xbf, 0xb8, 0xbc, 0xc0, 0xbd, 0xbc, 0xc0, 0xc2, 0xe1, 0xed, 0xdf, 0x21, + 0xf8, 0xd4, 0x04, 0x0d, 0xfb, 0xef, 0xff, 0xf2, 0xcb, 0xc2, 0xc3, 0xc9, + 0xbe, 0xb9, 0xbe, 0xbe, 0xb7, 0xaa, 0xb3, 0xbe, 0xbf, 0xb8, 0xb8, 0xc3, + 0xc8, 0xb9, 0xb3, 0xc1, 0xb8, 0xab, 0xaf, 0xb0, 0xac, 0xb1, 0xc5, 0xc5, + 0xbc, 0xc3, 0xc7, 0xc4, 0xc2, 0xc2, 0xc4, 0xc9, 0xd0, 0xd8, 0xd7, 0xd7, + 0xe0, 0xec, 0xf3, 0xee, 0xed, 0xf6, 0x10, 0x22, 0x1d, 0x18, 0x15, 0x16, + 0x16, 0x14, 0x0f, 0x0d, 0x0b, 0x0a, 0x05, 0x05, 0x01, 0xff, 0xf9, 0xfb, + 0x04, 0x0c, 0x0d, 0x0d, 0x13, 0x17, 0x1b, 0x1e, 0x2a, 0x26, 0x23, 0x25, + 0x27, 0x2a, 0x2e, 0x31, 0x37, 0x38, 0x3c, 0x42, 0x45, 0x4b, 0x4a, 0x47, + 0x49, 0x4c, 0x4c, 0x4f, 0x52, 0x4e, 0x52, 0x56, 0x52, 0x4f, 0x4b, 0x50, + 0x58, 0x58, 0x58, 0x58, 0x51, 0x55, 0x5c, 0x61, 0x64, 0x68, 0x69, 0x66, + 0x69, 0x69, 0x69, 0x68, 0x66, 0x66, 0x68, 0x68, 0x68, 0x65, 0x60, 0x5e, + 0x60, 0x61, 0x61, 0x60, 0x60, 0x5d, 0x59, 0x58, 0x57, 0x57, 0x55, 0x53, + 0x52, 0x52, 0x52, 0x4b, 0x44, 0x3f, 0x37, 0x33, 0x2c, 0x25, 0x29, 0x2c, + 0x2a, 0x2e, 0x3a, 0x40, 0x40, 0x42, 0x3f, 0x41, 0x41, 0x3f, 0x3a, 0x31, + 0x33, 0x32, 0x34, 0x3e, 0x47, 0x45, 0x43, 0x45, 0x43, 0x46, 0x4b, 0x4e, + 0x52, 0x54, 0x52, 0x53, 0x54, 0x56, 0x58, 0x58, 0x58, 0x59, 0x59, 0x4d, + 0x49, 0x58, 0x5c, 0x5d, 0x5f, 0x60, 0x63, 0x64, 0xb4, 0xba, 0xaf, 0xb9, + 0xbe, 0xc2, 0xd1, 0xda, 0xc5, 0xc2, 0xbe, 0xc0, 0xc0, 0xb7, 0xcb, 0xc3, + 0xbf, 0xc2, 0xc4, 0xc7, 0xe5, 0xd3, 0xee, 0x22, 0xe5, 0xd6, 0xf9, 0xf2, + 0xe3, 0xe5, 0xf5, 0xe8, 0xc7, 0xd1, 0xbb, 0xc0, 0xbc, 0xb4, 0xb6, 0xb2, + 0xac, 0xab, 0xb8, 0xba, 0xb7, 0xb8, 0xba, 0xc2, 0xc3, 0xb2, 0xb2, 0xbf, + 0xb5, 0xac, 0xae, 0xac, 0xab, 0xb5, 0xc6, 0xca, 0xc0, 0xbd, 0xc1, 0xc2, + 0xbc, 0xbe, 0xc0, 0xc9, 0xd1, 0xd2, 0xcd, 0xd4, 0xe2, 0xe8, 0xea, 0xe6, + 0xed, 0xfd, 0x16, 0x21, 0x1c, 0x16, 0x10, 0x16, 0x15, 0x15, 0x11, 0x0d, + 0x09, 0x0a, 0x05, 0x04, 0x02, 0xfe, 0x02, 0x00, 0x06, 0x0f, 0x0e, 0x0c, + 0x0e, 0x10, 0x17, 0x1b, 0x24, 0x17, 0x08, 0x1e, 0x21, 0x2a, 0x2f, 0x33, + 0x3a, 0x3a, 0x40, 0x41, 0x46, 0x49, 0x4d, 0x4b, 0x4c, 0x4c, 0x50, 0x50, + 0x55, 0x4f, 0x52, 0x58, 0x56, 0x53, 0x52, 0x51, 0x54, 0x57, 0x57, 0x57, + 0x4f, 0x52, 0x5a, 0x5e, 0x63, 0x67, 0x69, 0x68, 0x69, 0x69, 0x69, 0x69, + 0x67, 0x68, 0x6a, 0x68, 0x69, 0x67, 0x64, 0x60, 0x5e, 0x5e, 0x5e, 0x5d, + 0x57, 0x55, 0x56, 0x56, 0x56, 0x57, 0x53, 0x51, 0x52, 0x50, 0x4e, 0x4e, + 0x4c, 0x45, 0x40, 0x3a, 0x32, 0x28, 0x27, 0x2a, 0x25, 0x25, 0x2e, 0x38, + 0x3a, 0x3d, 0x3b, 0x3c, 0x3d, 0x3a, 0x3a, 0x30, 0x30, 0x33, 0x37, 0x46, + 0x48, 0x3d, 0x39, 0x3c, 0x3d, 0x3e, 0x43, 0x47, 0x49, 0x4b, 0x4c, 0x4d, + 0x52, 0x53, 0x54, 0x55, 0x55, 0x56, 0x53, 0x49, 0x4b, 0x58, 0x5a, 0x5a, + 0x5d, 0x5e, 0x62, 0x62, 0xad, 0xb3, 0xbd, 0xc6, 0xc7, 0xca, 0xdf, 0xea, + 0xd3, 0xc7, 0xca, 0xda, 0xed, 0xea, 0xf6, 0xf7, 0xf4, 0xe9, 0xc9, 0xc9, + 0xdf, 0xc6, 0x01, 0x16, 0xd8, 0xe3, 0xe8, 0xe2, 0xd5, 0xde, 0xe6, 0xde, + 0xc2, 0xce, 0xba, 0xb8, 0xbc, 0xb6, 0xb0, 0xa8, 0xab, 0xb3, 0xb2, 0xae, + 0xb2, 0xbc, 0xc0, 0xc3, 0xc2, 0xb0, 0xb5, 0xbe, 0xb3, 0xa9, 0xac, 0xb1, + 0xb0, 0xb2, 0xc0, 0xc4, 0xc7, 0xc5, 0xc2, 0xba, 0xbb, 0xbc, 0xc6, 0xc8, + 0xcb, 0xc7, 0xc6, 0xcf, 0xde, 0xdc, 0xe0, 0xee, 0xfb, 0x0e, 0x1d, 0x1d, + 0x15, 0x0e, 0x0e, 0x12, 0x10, 0x0f, 0x11, 0x0a, 0x04, 0x07, 0x03, 0x04, + 0x02, 0xff, 0x04, 0x08, 0x0f, 0x13, 0x0f, 0x0c, 0x0f, 0x0d, 0x12, 0x16, + 0x1c, 0xf1, 0xea, 0x17, 0x21, 0x27, 0x2f, 0x34, 0x3d, 0x3f, 0x42, 0x46, + 0x47, 0x4b, 0x4c, 0x4c, 0x4f, 0x4f, 0x52, 0x55, 0x57, 0x54, 0x55, 0x58, + 0x5a, 0x57, 0x54, 0x52, 0x53, 0x56, 0x58, 0x58, 0x52, 0x50, 0x58, 0x5c, + 0x5e, 0x65, 0x68, 0x68, 0x68, 0x67, 0x6a, 0x68, 0x68, 0x66, 0x6a, 0x69, + 0x69, 0x69, 0x65, 0x5f, 0x5b, 0x5c, 0x5d, 0x5a, 0x58, 0x58, 0x58, 0x58, + 0x57, 0x57, 0x55, 0x56, 0x56, 0x52, 0x51, 0x51, 0x4c, 0x44, 0x3f, 0x3a, + 0x34, 0x2c, 0x28, 0x28, 0x1c, 0x14, 0x1e, 0x2f, 0x36, 0x37, 0x36, 0x38, + 0x38, 0x37, 0x35, 0x31, 0x32, 0x39, 0x44, 0x46, 0x3d, 0x34, 0x30, 0x34, + 0x34, 0x37, 0x40, 0x45, 0x48, 0x4b, 0x4c, 0x4a, 0x4b, 0x4c, 0x4c, 0x4f, + 0x50, 0x50, 0x4c, 0x3f, 0x40, 0x52, 0x57, 0x58, 0x5a, 0x5a, 0x5c, 0x5d, + 0xb3, 0xc5, 0xc8, 0xc2, 0xc8, 0xc7, 0xd7, 0xda, 0xfb, 0xfb, 0xdf, 0x07, + 0x11, 0x0c, 0xfe, 0x2b, 0x0b, 0xe0, 0xc9, 0xd3, 0xd6, 0xc3, 0x02, 0x04, + 0xce, 0xdf, 0xd2, 0xcf, 0xcc, 0xd0, 0xd8, 0xd6, 0xbc, 0xc1, 0xba, 0xb3, + 0xb6, 0xb7, 0xb7, 0xb2, 0xb6, 0xb3, 0xa7, 0xaa, 0xb2, 0xbc, 0xc2, 0xc7, + 0xbf, 0xb0, 0xb9, 0xc0, 0xb3, 0xa7, 0xb3, 0xc2, 0xbd, 0xb3, 0xb4, 0xc8, + 0xdf, 0xd7, 0xc6, 0xbc, 0xbe, 0xc6, 0xd0, 0xbf, 0xbc, 0xc3, 0xca, 0xd4, + 0xe2, 0xdb, 0xee, 0xfe, 0x06, 0x14, 0x19, 0x16, 0x0a, 0x0d, 0x0c, 0x0f, + 0x0e, 0x08, 0x09, 0x04, 0xfe, 0xfe, 0xfe, 0x02, 0x01, 0x01, 0x04, 0x0c, + 0x10, 0x16, 0x15, 0x0f, 0x0e, 0x12, 0x11, 0x14, 0x08, 0xd4, 0xf1, 0x1a, + 0x26, 0x28, 0x2e, 0x34, 0x3e, 0x41, 0x43, 0x46, 0x4a, 0x4d, 0x4c, 0x4c, + 0x50, 0x52, 0x52, 0x57, 0x58, 0x58, 0x57, 0x5b, 0x5d, 0x5c, 0x58, 0x55, + 0x57, 0x56, 0x58, 0x58, 0x52, 0x52, 0x57, 0x59, 0x58, 0x5e, 0x64, 0x68, + 0x65, 0x64, 0x67, 0x67, 0x69, 0x67, 0x67, 0x68, 0x67, 0x68, 0x69, 0x66, + 0x63, 0x61, 0x5d, 0x5c, 0x5d, 0x5c, 0x59, 0x59, 0x5b, 0x59, 0x58, 0x58, + 0x54, 0x52, 0x50, 0x4c, 0x4a, 0x46, 0x40, 0x3b, 0x36, 0x31, 0x2b, 0x24, + 0x16, 0x0a, 0x10, 0x20, 0x2e, 0x31, 0x2e, 0x34, 0x35, 0x33, 0x2d, 0x28, + 0x28, 0x32, 0x38, 0x38, 0x31, 0x2b, 0x2b, 0x28, 0x29, 0x30, 0x38, 0x3e, + 0x42, 0x46, 0x4a, 0x4e, 0x4c, 0x4b, 0x4b, 0x49, 0x42, 0x40, 0x3d, 0x30, + 0x3a, 0x4c, 0x52, 0x55, 0x58, 0x58, 0x58, 0x58, 0xc3, 0xcb, 0xc8, 0xbd, + 0xb8, 0xc9, 0xce, 0xda, 0xf8, 0xed, 0xe4, 0x1e, 0xf4, 0xe0, 0xf9, 0x04, + 0xda, 0xd0, 0xce, 0xda, 0xcd, 0xd9, 0x16, 0xf3, 0xce, 0xce, 0xb6, 0xb6, + 0xbe, 0xc8, 0xd4, 0xc3, 0xba, 0xbc, 0xbe, 0xb7, 0xb0, 0xbc, 0xbc, 0xca, + 0xc6, 0xb3, 0xa8, 0xa6, 0xb4, 0xc5, 0xc8, 0xc8, 0xba, 0xb0, 0xbb, 0xbe, + 0xb0, 0xab, 0xb3, 0xbd, 0xbb, 0xb6, 0xad, 0xb0, 0xb3, 0xbc, 0xc4, 0xcb, + 0xc5, 0xc9, 0xd5, 0xbc, 0xb7, 0xc2, 0xcd, 0xd7, 0xdc, 0xeb, 0xf9, 0xfa, + 0x05, 0x13, 0x0f, 0x08, 0x05, 0x04, 0x06, 0x09, 0xfb, 0xf3, 0xfb, 0xfd, + 0xf1, 0xf1, 0xf8, 0xfe, 0xfd, 0xfe, 0x04, 0x0b, 0x0a, 0x0e, 0x0f, 0x0b, + 0x0a, 0x0f, 0x14, 0x16, 0xf6, 0xd2, 0x03, 0x1c, 0x27, 0x2d, 0x2f, 0x32, + 0x3a, 0x40, 0x44, 0x45, 0x49, 0x4b, 0x4c, 0x4d, 0x52, 0x52, 0x51, 0x57, + 0x5a, 0x5b, 0x5a, 0x5c, 0x5e, 0x5e, 0x5c, 0x5b, 0x59, 0x58, 0x58, 0x59, + 0x53, 0x54, 0x57, 0x55, 0x53, 0x57, 0x5e, 0x65, 0x63, 0x5f, 0x60, 0x63, + 0x64, 0x63, 0x63, 0x64, 0x65, 0x65, 0x68, 0x69, 0x69, 0x67, 0x64, 0x61, + 0x5f, 0x5e, 0x59, 0x58, 0x58, 0x57, 0x53, 0x52, 0x4f, 0x50, 0x4c, 0x4a, + 0x47, 0x45, 0x40, 0x3f, 0x3a, 0x34, 0x28, 0x27, 0x1e, 0x14, 0x0f, 0x10, + 0x1c, 0x27, 0x2a, 0x2e, 0x32, 0x2e, 0x2b, 0x21, 0x1d, 0x1f, 0x24, 0x2f, + 0x29, 0x1e, 0x1d, 0x1a, 0x24, 0x2f, 0x34, 0x35, 0x38, 0x3a, 0x3e, 0x45, + 0x47, 0x4b, 0x4c, 0x49, 0x40, 0x37, 0x35, 0x2e, 0x36, 0x46, 0x4b, 0x4c, + 0x4d, 0x52, 0x54, 0x55, 0xd2, 0xc5, 0xc5, 0xb6, 0xac, 0xc5, 0xd4, 0xce, + 0xbd, 0xbe, 0xe6, 0xee, 0xe1, 0xd9, 0xdd, 0xd7, 0xd5, 0xd3, 0xc4, 0xc7, + 0xc1, 0xea, 0x0c, 0xea, 0xc8, 0xc0, 0xb8, 0xaf, 0xbc, 0xd5, 0xcd, 0xbe, + 0xbe, 0xc3, 0xc1, 0xb8, 0xb4, 0xb0, 0xb2, 0xc4, 0xc2, 0xbe, 0xb6, 0xaa, + 0xb0, 0xbf, 0xcd, 0xc7, 0xb5, 0xab, 0xba, 0xba, 0xaf, 0xbf, 0xc2, 0xb8, + 0xb0, 0xb8, 0xb0, 0xb4, 0xb2, 0xb0, 0xbf, 0xc2, 0xc3, 0xcc, 0xcf, 0xbe, + 0xb7, 0xc4, 0xce, 0xd8, 0xdf, 0xe9, 0xed, 0xf8, 0x0a, 0x0a, 0x04, 0xfe, + 0xfe, 0xfa, 0xf5, 0xee, 0xe3, 0xe0, 0xeb, 0xed, 0xe3, 0xe6, 0xf5, 0xf8, + 0xf8, 0xfa, 0xfe, 0x03, 0x04, 0x05, 0x08, 0x06, 0x09, 0x0d, 0x14, 0x10, + 0xe7, 0xdd, 0x0e, 0x1f, 0x25, 0x2e, 0x2d, 0x30, 0x35, 0x3c, 0x46, 0x46, + 0x48, 0x49, 0x4f, 0x52, 0x52, 0x51, 0x50, 0x57, 0x59, 0x5d, 0x5c, 0x5c, + 0x5f, 0x5d, 0x5d, 0x5e, 0x5c, 0x5a, 0x59, 0x5b, 0x57, 0x57, 0x58, 0x4d, + 0x4a, 0x4e, 0x53, 0x5e, 0x62, 0x5f, 0x5f, 0x5f, 0x5f, 0x63, 0x64, 0x63, + 0x64, 0x64, 0x64, 0x64, 0x64, 0x63, 0x60, 0x5e, 0x5c, 0x57, 0x45, 0x44, + 0x4d, 0x4f, 0x4c, 0x49, 0x43, 0x44, 0x43, 0x3d, 0x3b, 0x40, 0x3e, 0x3d, + 0x38, 0x35, 0x30, 0x29, 0x20, 0x18, 0x11, 0x0a, 0x0d, 0x1a, 0x22, 0x27, + 0x28, 0x2a, 0x22, 0x15, 0x0c, 0x10, 0x14, 0x1f, 0x20, 0x19, 0x0f, 0x11, + 0x1e, 0x2a, 0x33, 0x34, 0x31, 0x34, 0x35, 0x3a, 0x3d, 0x40, 0x43, 0x46, + 0x46, 0x3e, 0x3a, 0x37, 0x3b, 0x44, 0x47, 0x46, 0x46, 0x45, 0x48, 0x4c, + 0xd0, 0xc7, 0xc0, 0xb2, 0xae, 0xc2, 0xd2, 0xc8, 0xb6, 0xc1, 0xc3, 0xe0, + 0xe7, 0xd3, 0xc4, 0xc7, 0xc8, 0xbe, 0xb9, 0xbf, 0xbf, 0xe2, 0xd9, 0xcc, + 0xc2, 0xcb, 0xc8, 0xaa, 0xb4, 0xc3, 0xc2, 0xc2, 0xc2, 0xba, 0xaa, 0xb2, + 0xb7, 0xb4, 0xb6, 0xad, 0xad, 0xb6, 0xbc, 0xb0, 0xad, 0xc4, 0xce, 0xbe, + 0xad, 0xab, 0xc1, 0xbc, 0xb7, 0xd6, 0xce, 0xac, 0xb0, 0xbc, 0xb4, 0xb6, + 0xb9, 0xaa, 0xb5, 0xbc, 0xbe, 0xc2, 0xc2, 0xc0, 0xbf, 0xc7, 0xcd, 0xd4, + 0xdf, 0xe6, 0xec, 0xfa, 0x04, 0xfe, 0xfa, 0xf6, 0xeb, 0xe1, 0xda, 0xd8, + 0xd1, 0xd4, 0xdb, 0xdf, 0xd6, 0xe0, 0xec, 0xec, 0xed, 0xf3, 0xf9, 0xfe, + 0xfe, 0xfe, 0x04, 0x04, 0x0a, 0x10, 0x16, 0x06, 0xd9, 0xed, 0x12, 0x22, + 0x25, 0x2b, 0x2b, 0x2e, 0x33, 0x39, 0x46, 0x46, 0x46, 0x48, 0x4f, 0x52, + 0x51, 0x51, 0x52, 0x56, 0x58, 0x5d, 0x60, 0x5e, 0x5f, 0x5c, 0x5e, 0x61, + 0x5e, 0x5e, 0x5d, 0x5f, 0x5e, 0x5b, 0x5e, 0x50, 0x49, 0x4c, 0x4d, 0x53, + 0x5e, 0x60, 0x60, 0x60, 0x60, 0x60, 0x62, 0x63, 0x62, 0x63, 0x64, 0x62, + 0x5f, 0x5e, 0x5e, 0x5b, 0x58, 0x52, 0x44, 0x3a, 0x41, 0x48, 0x46, 0x44, + 0x40, 0x3c, 0x3a, 0x35, 0x2e, 0x2f, 0x2a, 0x28, 0x2b, 0x23, 0x1d, 0x1d, + 0x1b, 0x10, 0x0a, 0x07, 0x02, 0x07, 0x18, 0x1c, 0x1b, 0x22, 0x20, 0x0f, + 0x0a, 0x08, 0x02, 0x04, 0x07, 0x05, 0xff, 0x11, 0x1b, 0x21, 0x2a, 0x2e, + 0x2c, 0x2c, 0x2e, 0x31, 0x33, 0x34, 0x37, 0x3a, 0x3b, 0x38, 0x36, 0x36, + 0x39, 0x3f, 0x3e, 0x3a, 0x39, 0x3d, 0x3f, 0x3e, 0xcd, 0xcd, 0xbd, 0xb3, + 0xbd, 0xcf, 0xd6, 0xd2, 0xd6, 0xe5, 0xc6, 0xd4, 0xd6, 0xc8, 0xb9, 0xc1, + 0xcc, 0xc2, 0xc2, 0xc0, 0xb6, 0xc1, 0xc2, 0xc5, 0xbc, 0xbc, 0xc6, 0xab, + 0xad, 0xbc, 0xbc, 0xc1, 0xb3, 0xa6, 0xa6, 0xbe, 0xb9, 0xc6, 0xcf, 0xb3, + 0xaa, 0xb2, 0xc0, 0xb6, 0xaa, 0xc0, 0xc4, 0xb9, 0xa9, 0xad, 0xce, 0xc7, + 0xc5, 0xd1, 0xca, 0xb1, 0xb5, 0xba, 0xb7, 0xb2, 0xb5, 0xab, 0xad, 0xba, + 0xbc, 0xbf, 0xbc, 0xbf, 0xc3, 0xcb, 0xd1, 0xd6, 0xd9, 0xdb, 0xe3, 0xef, + 0xe8, 0xe3, 0xdc, 0xd8, 0xcf, 0xcd, 0xce, 0xc9, 0xc7, 0xcf, 0xcf, 0xd2, + 0xd2, 0xdf, 0xe5, 0xe0, 0xe0, 0xeb, 0xf2, 0xf3, 0xf6, 0xf8, 0x00, 0x05, + 0x0c, 0x12, 0x16, 0xfb, 0xd5, 0xff, 0x14, 0x23, 0x27, 0x28, 0x28, 0x2e, + 0x35, 0x3a, 0x46, 0x49, 0x46, 0x49, 0x50, 0x53, 0x4f, 0x4f, 0x52, 0x57, + 0x59, 0x59, 0x60, 0x60, 0x5e, 0x59, 0x5a, 0x5e, 0x5f, 0x5e, 0x5f, 0x5f, + 0x60, 0x60, 0x5f, 0x57, 0x53, 0x53, 0x56, 0x55, 0x59, 0x5c, 0x59, 0x5b, + 0x5c, 0x5d, 0x5e, 0x5e, 0x5f, 0x60, 0x5f, 0x5e, 0x5d, 0x5c, 0x5a, 0x58, + 0x55, 0x52, 0x47, 0x30, 0x25, 0x34, 0x3a, 0x3a, 0x3d, 0x37, 0x37, 0x36, + 0x2d, 0x28, 0x26, 0x1f, 0x18, 0x14, 0x0a, 0x07, 0x10, 0x0a, 0x04, 0xf7, + 0xf3, 0xfb, 0x09, 0x0e, 0x0d, 0x0e, 0x16, 0x13, 0x0a, 0x06, 0x03, 0xf6, + 0xf0, 0xe2, 0xe6, 0xf7, 0x12, 0x13, 0x12, 0x17, 0x1e, 0x2e, 0x2c, 0x2b, + 0x2c, 0x28, 0x25, 0x2d, 0x29, 0x26, 0x20, 0x21, 0x22, 0x32, 0x3a, 0x39, + 0x39, 0x34, 0x36, 0x38, 0xc8, 0xce, 0xba, 0xbb, 0xc7, 0xcf, 0xd7, 0xd5, + 0xdc, 0xe0, 0xca, 0xd2, 0xd4, 0xc1, 0xbc, 0xbb, 0xc7, 0xcd, 0xd1, 0xbf, + 0xb6, 0xbf, 0xc7, 0xc2, 0xc1, 0xc2, 0xc7, 0xb1, 0xab, 0xbd, 0xc2, 0xb0, + 0xa6, 0xa7, 0xa8, 0xb6, 0xbc, 0xd5, 0xe3, 0xbd, 0xac, 0xb9, 0xc7, 0xd4, + 0xb3, 0xb4, 0xc8, 0xbb, 0xa8, 0xb4, 0xc4, 0xd0, 0xbc, 0xc0, 0xbf, 0xac, + 0xab, 0xb1, 0xb8, 0xb3, 0xb6, 0xb0, 0xa9, 0xb6, 0xb4, 0xc1, 0xc0, 0xbe, + 0xc2, 0xcc, 0xcb, 0xc8, 0xc3, 0xcb, 0xd9, 0xd5, 0xc4, 0xbf, 0xbe, 0xc5, + 0xc3, 0xc8, 0xc9, 0xc2, 0xc3, 0xc3, 0xc8, 0xc9, 0xd0, 0xd8, 0xd1, 0xd1, + 0xd6, 0xe4, 0xe2, 0xe3, 0xea, 0xf4, 0xfe, 0x03, 0x07, 0x11, 0x14, 0xe7, + 0xda, 0x0c, 0x14, 0x20, 0x28, 0x27, 0x21, 0x2f, 0x39, 0x3d, 0x44, 0x47, + 0x45, 0x47, 0x4e, 0x51, 0x4e, 0x50, 0x52, 0x56, 0x57, 0x56, 0x5e, 0x5f, + 0x55, 0x56, 0x53, 0x57, 0x5a, 0x5f, 0x5e, 0x5e, 0x5d, 0x5c, 0x5e, 0x5c, + 0x59, 0x58, 0x5a, 0x5e, 0x5d, 0x5e, 0x5f, 0x5e, 0x5d, 0x5e, 0x5e, 0x60, + 0x5e, 0x5c, 0x5b, 0x58, 0x4e, 0x54, 0x54, 0x52, 0x4c, 0x45, 0x3c, 0x23, + 0x10, 0x17, 0x2a, 0x29, 0x2c, 0x28, 0x20, 0x21, 0x22, 0x1e, 0x17, 0x12, + 0x0f, 0x07, 0xfe, 0xf6, 0xf8, 0xed, 0xec, 0xf5, 0xe9, 0xec, 0xfa, 0xf1, + 0xff, 0xfa, 0xfd, 0xfe, 0x02, 0xfc, 0x04, 0xf6, 0xe4, 0xcf, 0xce, 0xe2, + 0xfe, 0xfc, 0xf6, 0xf0, 0x02, 0x22, 0x2b, 0x28, 0x28, 0x23, 0x19, 0x15, + 0x16, 0x16, 0x0c, 0x0a, 0x0f, 0x16, 0x1e, 0x1b, 0x16, 0x16, 0x26, 0x25, + 0xc4, 0xd0, 0xc3, 0xc8, 0xcd, 0xd3, 0xd4, 0xcb, 0xc2, 0xbc, 0xc7, 0xd3, + 0xcf, 0xc3, 0xbd, 0xbb, 0xc8, 0xce, 0xc7, 0xb9, 0xbd, 0xc3, 0xc7, 0xbf, + 0xbc, 0xc2, 0xca, 0xb8, 0xaa, 0xb6, 0xc0, 0xb6, 0xad, 0xac, 0xaa, 0xb0, + 0xbc, 0xcc, 0xc8, 0xb9, 0xb1, 0xb9, 0xc0, 0xc6, 0xae, 0xb0, 0xc0, 0xbf, + 0xbb, 0xbd, 0xc2, 0xce, 0xc0, 0xb6, 0xb6, 0xa8, 0xaa, 0xae, 0xc0, 0xb4, + 0xbd, 0xbc, 0xaa, 0xb6, 0xb4, 0xcf, 0xcd, 0xc8, 0xc3, 0xc7, 0xc8, 0xbc, + 0xbd, 0xcf, 0xc9, 0xc6, 0xb8, 0xb4, 0xb5, 0xbe, 0xc1, 0xc2, 0xc3, 0xc3, + 0xc5, 0xc6, 0xc8, 0xc8, 0xc7, 0xc7, 0xc4, 0xce, 0xd4, 0xda, 0xd9, 0xdc, + 0xe3, 0xec, 0xf7, 0xfc, 0xfe, 0x0c, 0x0a, 0xd6, 0xe9, 0x14, 0x17, 0x1b, + 0x24, 0x22, 0x20, 0x30, 0x3c, 0x3e, 0x44, 0x45, 0x45, 0x43, 0x4c, 0x4d, + 0x4e, 0x4f, 0x52, 0x56, 0x52, 0x52, 0x56, 0x58, 0x4f, 0x51, 0x4c, 0x4e, + 0x57, 0x5c, 0x5c, 0x58, 0x56, 0x53, 0x55, 0x58, 0x5a, 0x5c, 0x58, 0x58, + 0x5d, 0x60, 0x5e, 0x5c, 0x5e, 0x5d, 0x58, 0x59, 0x58, 0x55, 0x52, 0x4a, + 0x3e, 0x49, 0x48, 0x44, 0x3c, 0x35, 0x2a, 0x13, 0xf2, 0xf2, 0x11, 0x1d, + 0x1b, 0x13, 0x14, 0x0c, 0x0b, 0x06, 0xf8, 0xf8, 0xf1, 0xf3, 0xec, 0xf8, + 0xf8, 0xf2, 0xe3, 0xe6, 0xf1, 0xf7, 0xf0, 0xee, 0xf7, 0xe5, 0xe7, 0xe5, + 0xef, 0xf1, 0xf6, 0xf7, 0xec, 0xd5, 0xcf, 0xdc, 0xe3, 0xe1, 0xe4, 0xdd, + 0xff, 0x10, 0x12, 0x04, 0x1d, 0x21, 0x1f, 0x1a, 0x0d, 0x05, 0x0a, 0x0f, + 0x10, 0x11, 0x16, 0x05, 0xef, 0xfc, 0x03, 0x0c, 0xc2, 0xd0, 0xd1, 0xd0, + 0xd0, 0xd3, 0xc3, 0xaa, 0xac, 0xb9, 0xc8, 0xd3, 0xce, 0xbe, 0xc0, 0xc3, + 0xcc, 0xcf, 0xc7, 0xbc, 0xbd, 0xc2, 0xc3, 0xc2, 0xc0, 0xc1, 0xc2, 0xb8, + 0xb6, 0xcb, 0xc8, 0xba, 0xb4, 0xaf, 0xad, 0xba, 0xbf, 0xbc, 0xd0, 0xc6, + 0xb6, 0xb2, 0xb6, 0xbe, 0xad, 0xa9, 0xb6, 0xb6, 0xb8, 0xdc, 0xe0, 0xc4, + 0xc5, 0xbd, 0xb9, 0xaf, 0xaf, 0xb5, 0xbf, 0xc1, 0xc2, 0xc6, 0xaa, 0xb1, + 0xb9, 0xc5, 0xc8, 0xc8, 0xc1, 0xbc, 0xc6, 0xbd, 0xc2, 0xd2, 0xe1, 0xc9, + 0xb2, 0xb1, 0xb2, 0xb5, 0xbc, 0xbc, 0xbe, 0xc5, 0xbe, 0xc6, 0xca, 0xd3, + 0xe2, 0xec, 0xf3, 0xfd, 0xff, 0x01, 0xfe, 0xf8, 0xf4, 0xf2, 0xf7, 0xf9, + 0xf8, 0x04, 0xfe, 0xcb, 0xf2, 0x11, 0x16, 0x17, 0x1d, 0x1e, 0x22, 0x34, + 0x3e, 0x40, 0x41, 0x46, 0x43, 0x40, 0x46, 0x46, 0x4c, 0x4e, 0x52, 0x53, + 0x4f, 0x4c, 0x4e, 0x4d, 0x4a, 0x4d, 0x46, 0x48, 0x52, 0x58, 0x5a, 0x58, + 0x52, 0x51, 0x50, 0x52, 0x55, 0x57, 0x5a, 0x5b, 0x58, 0x59, 0x59, 0x57, + 0x56, 0x55, 0x54, 0x53, 0x51, 0x4b, 0x48, 0x3b, 0x29, 0x34, 0x31, 0x2f, + 0x28, 0x24, 0x1c, 0x13, 0xf1, 0xda, 0xea, 0x0a, 0x09, 0x09, 0x06, 0x02, + 0xff, 0xec, 0xd3, 0xe3, 0xeb, 0xee, 0xe6, 0xf2, 0xf3, 0xf1, 0xeb, 0xe0, + 0xe6, 0xfb, 0xec, 0xe6, 0xe6, 0xd4, 0xd6, 0xcc, 0xd0, 0xda, 0xe3, 0xe6, + 0xe4, 0xdc, 0xe3, 0xe5, 0xd4, 0xda, 0xd7, 0xd4, 0xea, 0xf6, 0xef, 0xce, + 0xfa, 0x17, 0x0f, 0x0a, 0x06, 0xf7, 0xe6, 0xf3, 0xfa, 0xfb, 0xfa, 0xea, + 0xef, 0xec, 0xe9, 0xf0, 0xc8, 0xcf, 0xd0, 0xcd, 0xca, 0xd0, 0xbc, 0xb0, + 0xae, 0xbc, 0xcd, 0xd4, 0xc7, 0xb3, 0xc2, 0xca, 0xc5, 0xc8, 0xce, 0xc7, + 0xc6, 0xc9, 0xc8, 0xc2, 0xc1, 0xbd, 0xbe, 0xc0, 0xd6, 0xe5, 0xd4, 0xcd, + 0xc3, 0xb7, 0xaf, 0xc1, 0xc7, 0xb6, 0xc8, 0xcf, 0xc8, 0xb6, 0xad, 0xb9, + 0xbb, 0xab, 0xb6, 0xae, 0xc7, 0xdb, 0xd8, 0xbf, 0xc0, 0xc2, 0xba, 0xb8, + 0xb9, 0xbd, 0xc0, 0xcf, 0xc5, 0xce, 0xb6, 0xaf, 0xbd, 0xc4, 0xc6, 0xc1, + 0xc5, 0xbc, 0xba, 0xca, 0xc6, 0xc8, 0xce, 0xc0, 0xaf, 0xb1, 0xb0, 0xb2, + 0xba, 0xc0, 0xc0, 0xc8, 0xd7, 0xf8, 0x0c, 0x1c, 0x22, 0x20, 0x1d, 0x1b, + 0x19, 0x17, 0x10, 0x08, 0xff, 0xf8, 0xfa, 0xf8, 0xf3, 0xf8, 0xeb, 0xcd, + 0x03, 0x0f, 0x12, 0x15, 0x16, 0x1c, 0x22, 0x31, 0x3d, 0x3f, 0x40, 0x46, + 0x40, 0x3d, 0x3e, 0x41, 0x48, 0x49, 0x4c, 0x4c, 0x4b, 0x4c, 0x4b, 0x45, + 0x45, 0x4d, 0x4c, 0x47, 0x4e, 0x54, 0x58, 0x59, 0x52, 0x4e, 0x4c, 0x52, + 0x51, 0x53, 0x55, 0x58, 0x58, 0x57, 0x56, 0x55, 0x50, 0x4a, 0x44, 0x45, + 0x47, 0x40, 0x39, 0x27, 0x10, 0x11, 0x0a, 0x01, 0x04, 0x06, 0x03, 0xf9, + 0xd4, 0xd3, 0xd1, 0xf1, 0x06, 0xff, 0xf7, 0xf1, 0xdd, 0xc3, 0xd8, 0xf2, + 0xe7, 0xe5, 0xe7, 0xe3, 0xdd, 0xda, 0xd3, 0xd1, 0xd6, 0xf9, 0xe8, 0xdd, + 0xe0, 0xc5, 0xc4, 0xc8, 0xcb, 0xcf, 0xdc, 0xd3, 0xd0, 0xd4, 0xd4, 0xd8, + 0xce, 0xd8, 0xd3, 0xcc, 0xd9, 0xe5, 0xe4, 0xc4, 0xda, 0xfe, 0xf4, 0xf4, + 0xec, 0xe9, 0xe9, 0xe4, 0xd3, 0xd5, 0xe2, 0xdf, 0xe8, 0xe4, 0xdd, 0xdf, + 0xce, 0xcd, 0xc8, 0xc5, 0xca, 0xcf, 0xb9, 0xb5, 0xb6, 0xc2, 0xcf, 0xce, + 0xbb, 0xb3, 0xc8, 0xcc, 0xba, 0xbc, 0xda, 0xd6, 0xca, 0xc2, 0xc2, 0xcb, + 0xbf, 0xbb, 0xbc, 0xc3, 0xca, 0xcc, 0xcb, 0xcc, 0xc9, 0xc5, 0xbd, 0xc0, + 0xca, 0xb5, 0xbb, 0xbf, 0xcd, 0xd0, 0xba, 0xaf, 0xb5, 0xb4, 0xbb, 0xbc, + 0xc3, 0xd4, 0xda, 0xc5, 0xc2, 0xc5, 0xba, 0xba, 0xba, 0xb5, 0xb3, 0xbe, + 0xc8, 0xc6, 0xb1, 0xaa, 0xbc, 0xc9, 0xc8, 0xbc, 0xc7, 0xc7, 0xce, 0xdd, + 0xd5, 0xc7, 0xc3, 0xbd, 0xb6, 0xb2, 0xb6, 0xb6, 0xc2, 0xe0, 0xf0, 0xe7, + 0x0d, 0x24, 0x1a, 0x25, 0x2e, 0x2d, 0x2a, 0x2a, 0x27, 0x22, 0x1a, 0x15, + 0x09, 0xf5, 0xf1, 0xf5, 0xee, 0xec, 0xd5, 0xdb, 0x0b, 0x0d, 0x10, 0x15, + 0x12, 0x16, 0x1e, 0x2e, 0x3f, 0x3b, 0x3a, 0x3f, 0x3d, 0x35, 0x34, 0x38, + 0x46, 0x41, 0x43, 0x46, 0x46, 0x4c, 0x46, 0x40, 0x43, 0x4d, 0x51, 0x4f, + 0x4e, 0x53, 0x58, 0x56, 0x51, 0x4e, 0x48, 0x49, 0x49, 0x4a, 0x4a, 0x4e, + 0x4c, 0x4c, 0x4c, 0x4e, 0x52, 0x49, 0x40, 0x3d, 0x30, 0x1e, 0x22, 0x11, + 0xfa, 0xef, 0xde, 0xda, 0xd7, 0xda, 0xdb, 0xcd, 0xb4, 0xc6, 0xce, 0xd3, + 0xea, 0xec, 0xe6, 0xd3, 0xba, 0xbd, 0xda, 0xea, 0xdb, 0xd8, 0xe5, 0xdc, + 0xce, 0xd8, 0xd1, 0xcf, 0xe6, 0xec, 0xda, 0xdd, 0xe0, 0xc7, 0xbc, 0xbb, + 0xc6, 0xc5, 0xbe, 0xc4, 0xbc, 0xd2, 0xd3, 0xd0, 0xd4, 0xd2, 0xd0, 0xc7, + 0xcb, 0xcc, 0xc9, 0xc5, 0xca, 0xe2, 0xd3, 0xda, 0xe1, 0xd4, 0xd1, 0xcd, + 0xc7, 0xc9, 0xd3, 0xd0, 0xdc, 0xde, 0xd4, 0xd1, 0xcb, 0xcd, 0xca, 0xca, + 0xcc, 0xc9, 0xb7, 0xb7, 0xb8, 0xc6, 0xcd, 0xc2, 0xb2, 0xc0, 0xcd, 0xda, + 0xc2, 0xb8, 0xd6, 0xda, 0xde, 0xce, 0xc4, 0xbc, 0xb3, 0xb6, 0xb5, 0xbc, + 0xb6, 0xb5, 0xbe, 0xcc, 0xc0, 0xbd, 0xc2, 0xc4, 0xc9, 0xc2, 0xb7, 0xbb, + 0xbc, 0xc8, 0xd5, 0xc6, 0xae, 0xb3, 0xbc, 0xc5, 0xd5, 0xcd, 0xd7, 0xca, + 0xc3, 0xc8, 0xbe, 0xbf, 0xba, 0xb6, 0xb5, 0xb8, 0xc6, 0xc1, 0xad, 0xa6, + 0xb4, 0xd0, 0xc4, 0xbb, 0xbe, 0xca, 0xed, 0xd3, 0xcf, 0xc9, 0xc0, 0xc7, + 0xc0, 0xc8, 0xca, 0xcd, 0xed, 0x1d, 0x22, 0xf1, 0x0a, 0x1f, 0xfc, 0x20, + 0x36, 0x39, 0x37, 0x34, 0x2f, 0x25, 0x16, 0x0b, 0x01, 0xe4, 0xea, 0xfa, + 0xf1, 0xe5, 0xc9, 0xe9, 0x0a, 0x07, 0x0a, 0x10, 0x11, 0x16, 0x17, 0x27, + 0x3d, 0x34, 0x38, 0x3b, 0x34, 0x2a, 0x30, 0x34, 0x3e, 0x39, 0x3a, 0x41, + 0x48, 0x49, 0x40, 0x41, 0x41, 0x49, 0x50, 0x52, 0x50, 0x52, 0x53, 0x55, + 0x52, 0x4f, 0x4d, 0x4a, 0x40, 0x3a, 0x3f, 0x40, 0x3e, 0x38, 0x36, 0x34, + 0x3f, 0x40, 0x3a, 0x30, 0x11, 0xfe, 0x0a, 0xf0, 0xda, 0xd6, 0xc4, 0xbc, + 0xbc, 0xbc, 0xc1, 0xb6, 0xa7, 0xb3, 0xcf, 0xc9, 0xd1, 0xd4, 0xc8, 0xba, + 0xba, 0xc8, 0xcd, 0xd3, 0xd2, 0xce, 0xcb, 0xcc, 0xd7, 0xe4, 0xca, 0xcf, + 0xd7, 0xd7, 0xd1, 0xd7, 0xd6, 0xce, 0xc0, 0xb6, 0xc8, 0xcc, 0xc1, 0xc1, + 0xb6, 0xc9, 0xd8, 0xc8, 0xd1, 0xd3, 0xca, 0xc1, 0xc5, 0xc1, 0xb9, 0xbf, + 0xc1, 0xcd, 0xc8, 0xc2, 0xcd, 0xc4, 0xbb, 0xb9, 0xc0, 0xc5, 0xc8, 0xbf, + 0xcd, 0xcf, 0xc5, 0xc2, 0xbb, 0xc3, 0xca, 0xd4, 0xd4, 0xc2, 0xb8, 0xb8, + 0xb7, 0xc7, 0xcb, 0xc2, 0xc2, 0xca, 0xcb, 0xd7, 0xce, 0xc2, 0xcd, 0xc8, + 0xcb, 0xc8, 0xc8, 0xb5, 0xb0, 0xc3, 0xc8, 0xbf, 0xb1, 0xb6, 0xb4, 0xbb, + 0xb4, 0xb6, 0xb9, 0xba, 0xbc, 0xc2, 0xc3, 0xc1, 0xb9, 0xbb, 0xcd, 0xdb, + 0xce, 0xbc, 0xc2, 0xca, 0xd4, 0xc8, 0xd8, 0xcb, 0xc0, 0xcc, 0xc6, 0xc8, + 0xc6, 0xb4, 0xae, 0xb6, 0xb5, 0xb2, 0xac, 0xa5, 0xb0, 0xc6, 0xbc, 0xb9, + 0xb6, 0xc2, 0xc4, 0xb7, 0xb6, 0xc2, 0xbe, 0xc8, 0xc6, 0xd3, 0xf4, 0xec, + 0x1b, 0x2c, 0x28, 0xf2, 0xfb, 0x0e, 0xff, 0x28, 0x3a, 0x3d, 0x3c, 0x3a, + 0x35, 0x25, 0xf8, 0xe6, 0xeb, 0xd6, 0xe5, 0xfe, 0xf7, 0xe0, 0xcc, 0xf8, + 0xfb, 0xfa, 0xff, 0x04, 0x0c, 0x13, 0x0e, 0x21, 0x36, 0x2f, 0x30, 0x35, + 0x2e, 0x1e, 0x2b, 0x2f, 0x35, 0x33, 0x31, 0x31, 0x40, 0x45, 0x3b, 0x38, + 0x2f, 0x35, 0x47, 0x4e, 0x4d, 0x4b, 0x4e, 0x50, 0x4d, 0x4f, 0x50, 0x4a, + 0x46, 0x40, 0x39, 0x34, 0x2a, 0x27, 0x29, 0x2a, 0x2e, 0x30, 0x29, 0x13, + 0xf4, 0xf1, 0xea, 0xd5, 0xc5, 0xcc, 0xb8, 0xb0, 0xb6, 0xb7, 0xb8, 0xad, + 0xa9, 0xae, 0xd3, 0xcd, 0xc8, 0xbf, 0xb6, 0xbc, 0xd4, 0xcd, 0xc0, 0xbd, + 0xc8, 0xc9, 0xb9, 0xb6, 0xd6, 0xdf, 0xd8, 0xbd, 0xc8, 0xcb, 0xc3, 0xc5, + 0xce, 0xce, 0xbc, 0xb7, 0xb7, 0xbf, 0xcd, 0xc3, 0xbd, 0xc0, 0xd4, 0xce, + 0xc9, 0xcb, 0xc5, 0xc1, 0xc4, 0xc1, 0xb8, 0xb8, 0xbd, 0xc5, 0xc2, 0xbb, + 0xbc, 0xc5, 0xc5, 0xbc, 0xb6, 0xbc, 0xc1, 0xb6, 0xc1, 0xc8, 0xc9, 0xc2, + 0xb6, 0xbd, 0xc2, 0xd3, 0xd9, 0xbd, 0xb9, 0xbb, 0xbd, 0xca, 0xc9, 0xc2, + 0xc8, 0xcd, 0xc7, 0xcf, 0xcb, 0xcf, 0xc3, 0xbb, 0xc6, 0xc2, 0xc2, 0xb9, + 0xaa, 0xb9, 0xc6, 0xc1, 0xb0, 0xb9, 0xb1, 0xae, 0xac, 0xaf, 0xc2, 0xbe, + 0xb6, 0xb4, 0xb9, 0xbd, 0xc1, 0xbf, 0xbd, 0xc5, 0xcb, 0xc8, 0xc3, 0xc6, + 0xcf, 0xcc, 0xcf, 0xcb, 0xbf, 0xcc, 0xca, 0xd5, 0xce, 0xbf, 0xb2, 0xba, + 0xb2, 0xb6, 0xb3, 0xa7, 0xb0, 0xc7, 0xbc, 0xb6, 0xba, 0xee, 0xc7, 0xac, + 0xad, 0xb6, 0xc3, 0xda, 0xc4, 0xdb, 0x19, 0xfa, 0x07, 0x2e, 0x29, 0xf4, + 0xee, 0xf8, 0xff, 0x24, 0x30, 0x35, 0x39, 0x38, 0x31, 0x25, 0xf8, 0xd4, + 0xe0, 0xd4, 0xd6, 0xfa, 0xf4, 0xd1, 0xd5, 0xf7, 0xe4, 0xec, 0xf3, 0xf6, + 0x02, 0x0e, 0x03, 0x16, 0x2f, 0x27, 0x23, 0x2e, 0x26, 0x16, 0x1f, 0x28, + 0x29, 0x29, 0x24, 0x2a, 0x2f, 0x34, 0x35, 0x22, 0x08, 0x18, 0x3b, 0x48, + 0x46, 0x40, 0x43, 0x47, 0x3f, 0x38, 0x3d, 0x37, 0x34, 0x3b, 0x3a, 0x2e, + 0x23, 0x20, 0x20, 0x20, 0x1c, 0x16, 0x0e, 0xee, 0xe1, 0xe8, 0xde, 0xcc, + 0xbe, 0xcb, 0xc1, 0xaf, 0xb6, 0xb7, 0xb2, 0xad, 0xaa, 0xb6, 0xce, 0xc7, + 0xc5, 0xbe, 0xbc, 0xbd, 0xd0, 0xce, 0xc3, 0xbd, 0xc1, 0xc3, 0xb7, 0xaf, + 0xc2, 0xdb, 0xee, 0xc7, 0xcb, 0xc7, 0xba, 0xba, 0xd7, 0xd9, 0xbf, 0xb1, + 0xb4, 0xc4, 0xd0, 0xc7, 0xb7, 0xba, 0xc9, 0xd4, 0xca, 0xc2, 0xbb, 0xc3, + 0xbe, 0xb9, 0xb3, 0xb5, 0xbf, 0xc6, 0xc4, 0xb5, 0xb3, 0xb8, 0xbc, 0xbb, + 0xbc, 0xc2, 0xc2, 0xb8, 0xcb, 0xcf, 0xc3, 0xc6, 0xc6, 0xb6, 0xbb, 0xc3, + 0xc1, 0xb8, 0xbf, 0xc2, 0xc3, 0xca, 0xc2, 0xc6, 0xcd, 0xc8, 0xcc, 0xda, + 0xc7, 0xce, 0xbd, 0xb2, 0xc1, 0xc0, 0xc7, 0xbf, 0xb2, 0xba, 0xca, 0xc2, + 0xad, 0xb2, 0xb7, 0xac, 0xad, 0xad, 0xca, 0xc7, 0xbc, 0xb2, 0xb1, 0xb1, + 0xb6, 0xbc, 0xb7, 0xba, 0xbf, 0xc0, 0xc0, 0xc1, 0xd4, 0xd9, 0xcb, 0xc6, + 0xc0, 0xc8, 0xc7, 0xc5, 0xcd, 0xe0, 0xc8, 0xcc, 0xd1, 0xb9, 0xb0, 0xa8, + 0xb2, 0xc1, 0xb7, 0xbc, 0xb8, 0xd4, 0xda, 0xc1, 0xd5, 0xd6, 0xc3, 0xc7, + 0xcc, 0x04, 0x14, 0xf1, 0xe6, 0x18, 0x23, 0xf4, 0xdb, 0xdf, 0xec, 0x0c, + 0x15, 0x22, 0x2f, 0x30, 0x28, 0x1f, 0x07, 0xd7, 0xce, 0xc9, 0xce, 0xf0, + 0xee, 0xcd, 0xdd, 0xf6, 0xe2, 0xe5, 0xe9, 0xf1, 0xf8, 0x08, 0xfe, 0x0f, + 0x25, 0x20, 0x16, 0x1f, 0x21, 0x10, 0x10, 0x1b, 0x1f, 0x18, 0x18, 0x1c, + 0x26, 0x26, 0x1d, 0x10, 0xfe, 0x10, 0x30, 0x3a, 0x40, 0x32, 0x32, 0x3b, + 0x33, 0x28, 0x22, 0x14, 0x03, 0x0f, 0x15, 0x1b, 0x0d, 0x09, 0x0a, 0x02, + 0xff, 0xfc, 0xe8, 0xdb, 0xda, 0xd3, 0xd3, 0xca, 0xbc, 0xbf, 0xc3, 0xb6, + 0xb8, 0xb9, 0xb3, 0xc4, 0xba, 0xbc, 0xcc, 0xc2, 0xca, 0xc3, 0xbe, 0xbc, + 0xc3, 0xc8, 0xcd, 0xc8, 0xc5, 0xbf, 0xc0, 0xb3, 0xc5, 0xde, 0xd5, 0xc2, + 0xc8, 0xb9, 0xbc, 0xb6, 0xc8, 0xc3, 0xbc, 0xb9, 0xb7, 0xc0, 0xcd, 0xbf, + 0xae, 0xb2, 0xbb, 0xcc, 0xd4, 0xc8, 0xb4, 0xc9, 0xc3, 0xb5, 0xb0, 0xb4, + 0xbc, 0xce, 0xce, 0xcd, 0xe6, 0xd4, 0xb0, 0xb0, 0xbc, 0xbe, 0xc0, 0xc1, + 0xed, 0xd0, 0xcf, 0xce, 0xc2, 0xb2, 0xb1, 0xb9, 0xb8, 0xb9, 0xc2, 0xc6, + 0xc8, 0xce, 0xca, 0xca, 0xcf, 0xd0, 0xdc, 0xe9, 0xd1, 0xc7, 0xbb, 0xaf, + 0xba, 0xca, 0xd2, 0xc8, 0xbe, 0xc9, 0xd1, 0xbe, 0xb0, 0xb7, 0xbb, 0xaf, + 0xaf, 0xb5, 0xce, 0xd3, 0xca, 0xb5, 0xba, 0xb9, 0xb2, 0xb0, 0xb7, 0xb5, + 0xad, 0xb2, 0xb8, 0xbc, 0xd1, 0xdc, 0xc9, 0xc1, 0xbf, 0xc4, 0xc0, 0xb2, + 0xb0, 0xca, 0xd1, 0xd4, 0xca, 0xb0, 0xae, 0xaf, 0xbb, 0xbe, 0xb3, 0xd1, + 0xcc, 0xbb, 0xce, 0xe1, 0xfc, 0xeb, 0xc4, 0xc2, 0xd1, 0xe2, 0xe8, 0xda, + 0xd6, 0xfa, 0x16, 0xf2, 0xd0, 0xd1, 0xdf, 0xee, 0x18, 0x1c, 0x13, 0x0d, + 0x16, 0x16, 0x04, 0xdf, 0xce, 0xcb, 0xc5, 0xdb, 0xda, 0xc5, 0xd2, 0xe4, + 0xdd, 0xd5, 0xdb, 0xe8, 0xf2, 0xfd, 0xfa, 0x09, 0x18, 0x15, 0x07, 0x0a, + 0x19, 0x0e, 0x07, 0x0a, 0x08, 0x06, 0x0b, 0x0f, 0x16, 0x21, 0x0a, 0xf4, + 0xf2, 0x03, 0x22, 0x2a, 0x34, 0x2c, 0x20, 0x28, 0x1e, 0x14, 0x16, 0x0e, + 0xf7, 0xed, 0xe4, 0xec, 0xf1, 0xe8, 0xeb, 0xf3, 0xf2, 0xe6, 0xd1, 0xd8, + 0xe0, 0xd2, 0xc9, 0xc2, 0xbc, 0xbb, 0xc2, 0xb9, 0xb8, 0xce, 0xca, 0xc5, + 0xbb, 0xb1, 0xc1, 0xb3, 0xc8, 0xc5, 0xb8, 0xb8, 0xbc, 0xbb, 0xb6, 0xba, + 0xc8, 0xbc, 0xcc, 0xc1, 0xd1, 0xd8, 0xc3, 0xc2, 0xba, 0xb6, 0xbf, 0xb6, + 0xb7, 0xb7, 0xc3, 0xc5, 0xbc, 0xca, 0xcc, 0xbf, 0xae, 0xb6, 0xbc, 0xbe, + 0xc7, 0xd4, 0xc9, 0xc2, 0xc9, 0xb5, 0xaf, 0xb0, 0xb1, 0xc4, 0xd9, 0xd8, + 0xeb, 0xd7, 0xb0, 0xae, 0xb2, 0xbc, 0xc0, 0xb0, 0xbe, 0xc4, 0xdd, 0xd5, + 0xb7, 0xae, 0xaa, 0xab, 0xbc, 0xbe, 0xc7, 0xca, 0xc9, 0xcf, 0xd7, 0xd2, + 0xce, 0xca, 0xd0, 0xed, 0xd6, 0xc5, 0xc3, 0xb6, 0xba, 0xbe, 0xcc, 0xcf, + 0xbd, 0xd8, 0xd9, 0xbc, 0xb4, 0xbc, 0xb6, 0xb3, 0xc2, 0xd6, 0xd0, 0xc2, + 0xbd, 0xbc, 0xbe, 0xc0, 0xbc, 0xb8, 0xb7, 0xab, 0xa5, 0xa7, 0xaf, 0xb8, + 0xc0, 0xdd, 0xc9, 0xbe, 0xba, 0xc1, 0xbd, 0xbb, 0xb5, 0xbb, 0xc3, 0xc6, + 0xca, 0xbc, 0xae, 0xcf, 0xca, 0xc3, 0xbe, 0xc2, 0xbe, 0xc1, 0xbf, 0xda, + 0xf1, 0xe0, 0xd3, 0xd9, 0xd0, 0xcc, 0xd7, 0xd5, 0xd0, 0xe5, 0x07, 0xf0, + 0xd2, 0xce, 0xda, 0xdd, 0xfe, 0x1e, 0x20, 0x02, 0xe0, 0xef, 0xf1, 0xe5, + 0xde, 0xce, 0xc7, 0xc7, 0xc3, 0xbf, 0xca, 0xe2, 0xec, 0xd2, 0xd3, 0xd7, + 0xe6, 0xed, 0xf2, 0x02, 0x0e, 0x05, 0xf7, 0xfe, 0x0e, 0x08, 0xfb, 0xeb, + 0xe8, 0xea, 0xf4, 0xfa, 0xf9, 0x15, 0x04, 0xf1, 0xe6, 0xf7, 0x12, 0x22, + 0x2e, 0x2c, 0x1d, 0x16, 0x0a, 0xfe, 0xf4, 0x04, 0xe2, 0xd0, 0xc9, 0xc7, + 0xc8, 0xc8, 0xc4, 0xc4, 0xc8, 0xcb, 0xcb, 0xd1, 0xd4, 0xc8, 0xbd, 0xbe, + 0xbf, 0xbc, 0xc0, 0xbf, 0xb8, 0xd6, 0xee, 0xc5, 0xbc, 0xb5, 0xc2, 0xb2, + 0xbd, 0xb9, 0xb2, 0xb4, 0xbc, 0xb7, 0xb1, 0xbd, 0xc9, 0xba, 0xbc, 0xcd, + 0xd3, 0xd1, 0xc6, 0xbf, 0xbe, 0xb0, 0xb8, 0xb5, 0xb9, 0xbf, 0xca, 0xbd, + 0xc7, 0xd5, 0xce, 0xc2, 0xb7, 0xbb, 0xc2, 0xbe, 0xc2, 0xd1, 0xd6, 0xcc, + 0xe7, 0xc2, 0xb7, 0xac, 0xad, 0xb5, 0xd4, 0xdc, 0xd5, 0xc1, 0xae, 0xa9, + 0xaf, 0xbe, 0xbb, 0xb2, 0xce, 0xcc, 0xbf, 0xc2, 0xb8, 0xad, 0xa9, 0xb3, + 0xc3, 0xbc, 0xc6, 0xc9, 0xc3, 0xcb, 0xd4, 0xd8, 0xca, 0xc8, 0xcd, 0xd9, + 0xcd, 0xc5, 0xc9, 0xbc, 0xbb, 0xbe, 0xda, 0xda, 0xc5, 0xd2, 0xe5, 0xd9, + 0xb7, 0xbb, 0xb6, 0xba, 0xce, 0xd9, 0xce, 0xb4, 0xb7, 0xc5, 0xd4, 0xbc, + 0xbc, 0xc0, 0xc2, 0xb6, 0xaa, 0xa8, 0xa6, 0xaf, 0xbb, 0xd7, 0xd1, 0xc4, + 0xbc, 0xbf, 0xce, 0xd4, 0xc2, 0xbe, 0xbf, 0xc7, 0xd4, 0xd5, 0xcb, 0xd2, + 0xbc, 0xc2, 0xce, 0xda, 0xc2, 0xc6, 0xbd, 0xd2, 0xe2, 0xd8, 0xd5, 0xe0, + 0xbb, 0xc5, 0xc8, 0xce, 0xcf, 0xd8, 0xfb, 0xe3, 0xcc, 0xce, 0xda, 0xe0, + 0xd8, 0x00, 0x11, 0x12, 0xee, 0xca, 0xdb, 0xe1, 0xd8, 0xcc, 0xc2, 0xb9, + 0xb6, 0xc2, 0xc7, 0xd9, 0xe0, 0xcc, 0xcd, 0xcd, 0xd4, 0xd6, 0xe8, 0xf9, + 0xfe, 0xf2, 0xec, 0xf2, 0xf9, 0xfb, 0xf0, 0xd9, 0xdd, 0xd2, 0xde, 0xed, + 0xea, 0xf3, 0xff, 0xef, 0xe6, 0xe0, 0xf4, 0x04, 0x1b, 0x26, 0x24, 0x0a, + 0xf3, 0xf6, 0xea, 0xec, 0xe4, 0xc8, 0xbd, 0xb6, 0xbb, 0xb6, 0xae, 0xb0, + 0xbb, 0xbf, 0xbd, 0xbc, 0xbe, 0xb8, 0xb6, 0xb6, 0xda, 0xd4, 0xc5, 0xd2, + 0xc5, 0xce, 0xec, 0xc1, 0xb1, 0xca, 0xd2, 0xc9, 0xc2, 0xb9, 0xbb, 0xb7, + 0xba, 0xb0, 0xac, 0xb6, 0xba, 0xb7, 0xc7, 0xcf, 0xd1, 0xcd, 0xc2, 0xb6, + 0xb6, 0xac, 0xb5, 0xb8, 0xc8, 0xd3, 0xd5, 0xb9, 0xd2, 0xd5, 0xcc, 0xc2, + 0xb6, 0xc3, 0xc6, 0xc4, 0xc9, 0xd1, 0xdf, 0xc5, 0xc7, 0xbe, 0xb5, 0xab, + 0xad, 0xb0, 0xc2, 0xdc, 0xe0, 0xce, 0xb9, 0xae, 0xb4, 0xbc, 0xb6, 0xae, + 0xcb, 0xc9, 0xaf, 0xb4, 0xb9, 0xb2, 0xbc, 0xbf, 0xc1, 0xb9, 0xc3, 0xc2, + 0xc2, 0xd0, 0xe3, 0xd4, 0xd0, 0xc8, 0xce, 0xce, 0xc5, 0xc6, 0xc3, 0xce, + 0xda, 0xea, 0xf8, 0xe1, 0xcd, 0xd1, 0xee, 0xe6, 0xc8, 0xd3, 0xec, 0xc5, + 0xc0, 0xd1, 0xce, 0xb7, 0xda, 0xf9, 0xde, 0xbe, 0xc0, 0xc3, 0xc7, 0xc3, + 0xba, 0xac, 0xa8, 0xaa, 0xb7, 0xd0, 0xd8, 0xc8, 0xc3, 0xc1, 0xd0, 0xd2, + 0xc1, 0xb1, 0xb5, 0xd0, 0xff, 0xd9, 0xd0, 0xc9, 0xc2, 0xcd, 0xf6, 0xf7, + 0xda, 0xdc, 0xce, 0xe0, 0xdc, 0xd8, 0xd5, 0xd4, 0xbf, 0xc2, 0xc2, 0xc4, + 0xcc, 0xce, 0xda, 0xc3, 0xc8, 0xcb, 0xd4, 0xd6, 0xc1, 0xcd, 0xe6, 0xf8, + 0xe4, 0xd7, 0xd0, 0xc3, 0xc6, 0xd4, 0xc0, 0xb7, 0xb9, 0xc5, 0xc7, 0xc6, + 0xc5, 0xc2, 0xc4, 0xbc, 0xbf, 0xc9, 0xdf, 0xeb, 0xee, 0xe0, 0xd8, 0xe6, + 0xe2, 0xeb, 0xe9, 0xd3, 0xd4, 0xcd, 0xd0, 0xd7, 0xdd, 0xda, 0xdf, 0xe4, + 0xde, 0xd0, 0xdc, 0xef, 0x02, 0x0c, 0x16, 0x0d, 0xf4, 0xec, 0xdd, 0xcf, + 0xd4, 0xc8, 0xbb, 0xb1, 0xba, 0xb6, 0xac, 0xad, 0xb9, 0xba, 0xbe, 0xc9, + 0xbe, 0xb1, 0xb6, 0xc4, 0xe0, 0xbe, 0xc9, 0xe6, 0xe6, 0xd4, 0xeb, 0xda, + 0xbe, 0xc9, 0xd0, 0xd2, 0xc8, 0xc1, 0xb7, 0xb9, 0xc2, 0xb9, 0xc7, 0xc0, + 0xbc, 0xc2, 0xce, 0xce, 0xce, 0xc2, 0xbd, 0xbb, 0xb1, 0xae, 0xbb, 0xd1, + 0xce, 0xd4, 0xca, 0xbf, 0xe1, 0xdc, 0xc8, 0xb5, 0xb5, 0xc8, 0xc2, 0xc9, + 0xc3, 0xcf, 0xdf, 0xca, 0xbc, 0xbd, 0xb9, 0xaa, 0xad, 0xb8, 0xc2, 0xd2, + 0xd9, 0xd8, 0xc6, 0xb8, 0xb8, 0xc8, 0xc2, 0xb8, 0xc4, 0xd4, 0xc1, 0xb9 +}; +unsigned int g_pytorch_images_dog_jpg_len = 150528; +#endif //#if !defined(HIFI4) diff --git a/third_party/xtensa/examples/pytorch_to_tflite/pytorch_images_dog_jpg.h b/third_party/xtensa/examples/pytorch_to_tflite/pytorch_images_dog_jpg.h new file mode 100755 index 0000000..4464f65 --- /dev/null +++ b/third_party/xtensa/examples/pytorch_to_tflite/pytorch_images_dog_jpg.h @@ -0,0 +1,44 @@ +/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +/* + * * Copyright (c) 2022 Cadence Design Systems Inc. + * * + * * Permission is hereby granted, free of charge, to any person obtaining + * * a copy of this software and associated documentation files (the + * * "Software"), to deal in the Software without restriction, including + * * without limitation the rights to use, copy, modify, merge, publish, + * * distribute, sublicense, and/or sell copies of the Software, and to + * * permit persons to whom the Software is furnished to do so, subject to + * * the following conditions: + * * + * * The above copyright notice and this permission notice shall be included + * * in all copies or substantial portions of the Software. + * * + * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + * * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + * * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. + * * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY + * * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, + * * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE + * * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + * */ + +#ifndef THIRD_PARTY_XTENSA_EXAMPLES_PYTORCH_TO_TFLITE_PYTORCH_IMAGES_DOG_JPG_H_ +#define THIRD_PARTY_XTENSA_EXAMPLES_PYTORCH_TO_TFLITE_PYTORCH_IMAGES_DOG_JPG_H_ + +extern unsigned int g_pytorch_images_dog_jpg_len; +extern unsigned char g_pytorch_images_dog_jpg_data[]; + +#endif // THIRD_PARTY_XTENSA_EXAMPLES_PYTORCH_TO_TFLITE_PYTORCH_IMAGES_DOG_JPG_H_ diff --git a/third_party/xtensa/examples/pytorch_to_tflite/pytorch_op_resolver.cc b/third_party/xtensa/examples/pytorch_to_tflite/pytorch_op_resolver.cc new file mode 100644 index 0000000..8f352e9 --- /dev/null +++ b/third_party/xtensa/examples/pytorch_to_tflite/pytorch_op_resolver.cc @@ -0,0 +1,120 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "third_party/xtensa/examples/pytorch_to_tflite/pytorch_op_resolver.h" + +namespace tflite { + +TfLiteStatus InitPytorchOpsResolver(PytorchOpsResolver& resolver) { + TF_LITE_ENSURE_STATUS(resolver.AddAbs()); + TF_LITE_ENSURE_STATUS(resolver.AddAdd()); + TF_LITE_ENSURE_STATUS(resolver.AddAddN()); + TF_LITE_ENSURE_STATUS(resolver.AddArgMax()); + TF_LITE_ENSURE_STATUS(resolver.AddArgMin()); + TF_LITE_ENSURE_STATUS(resolver.AddAssignVariable()); + TF_LITE_ENSURE_STATUS(resolver.AddAveragePool2D()); + TF_LITE_ENSURE_STATUS(resolver.AddBatchToSpaceNd()); + TF_LITE_ENSURE_STATUS(resolver.AddBroadcastArgs()); + TF_LITE_ENSURE_STATUS(resolver.AddBroadcastTo()); + TF_LITE_ENSURE_STATUS(resolver.AddCallOnce()); + TF_LITE_ENSURE_STATUS(resolver.AddCast()); + TF_LITE_ENSURE_STATUS(resolver.AddCeil()); + TF_LITE_ENSURE_STATUS(resolver.AddCircularBuffer()); + TF_LITE_ENSURE_STATUS(resolver.AddConcatenation()); + TF_LITE_ENSURE_STATUS(resolver.AddConv2D()); + TF_LITE_ENSURE_STATUS(resolver.AddCos()); + TF_LITE_ENSURE_STATUS(resolver.AddCumSum()); + TF_LITE_ENSURE_STATUS(resolver.AddDepthToSpace()); + TF_LITE_ENSURE_STATUS(resolver.AddDepthwiseConv2D()); + TF_LITE_ENSURE_STATUS(resolver.AddDequantize()); + TF_LITE_ENSURE_STATUS(resolver.AddDetectionPostprocess()); + TF_LITE_ENSURE_STATUS(resolver.AddDiv()); + TF_LITE_ENSURE_STATUS(resolver.AddElu()); + TF_LITE_ENSURE_STATUS(resolver.AddEqual()); + TF_LITE_ENSURE_STATUS(resolver.AddEthosU()); + TF_LITE_ENSURE_STATUS(resolver.AddExp()); + TF_LITE_ENSURE_STATUS(resolver.AddExpandDims()); + TF_LITE_ENSURE_STATUS(resolver.AddFill()); + TF_LITE_ENSURE_STATUS(resolver.AddFloor()); + TF_LITE_ENSURE_STATUS(resolver.AddFloorDiv()); + TF_LITE_ENSURE_STATUS(resolver.AddFloorMod()); + TF_LITE_ENSURE_STATUS(resolver.AddFullyConnected()); + TF_LITE_ENSURE_STATUS(resolver.AddGather()); + TF_LITE_ENSURE_STATUS(resolver.AddGatherNd()); + TF_LITE_ENSURE_STATUS(resolver.AddGreater()); + TF_LITE_ENSURE_STATUS(resolver.AddGreaterEqual()); + TF_LITE_ENSURE_STATUS(resolver.AddHardSwish()); + TF_LITE_ENSURE_STATUS(resolver.AddIf()); + TF_LITE_ENSURE_STATUS(resolver.AddL2Normalization()); + TF_LITE_ENSURE_STATUS(resolver.AddL2Pool2D()); + TF_LITE_ENSURE_STATUS(resolver.AddLeakyRelu()); + TF_LITE_ENSURE_STATUS(resolver.AddLess()); + TF_LITE_ENSURE_STATUS(resolver.AddLessEqual()); + TF_LITE_ENSURE_STATUS(resolver.AddLog()); + TF_LITE_ENSURE_STATUS(resolver.AddLogicalAnd()); + TF_LITE_ENSURE_STATUS(resolver.AddLogicalNot()); + TF_LITE_ENSURE_STATUS(resolver.AddLogicalOr()); + TF_LITE_ENSURE_STATUS(resolver.AddLogistic()); + TF_LITE_ENSURE_STATUS(resolver.AddLogSoftmax()); + TF_LITE_ENSURE_STATUS(resolver.AddMaxPool2D()); + TF_LITE_ENSURE_STATUS(resolver.AddMaximum()); + TF_LITE_ENSURE_STATUS(resolver.AddMean()); + TF_LITE_ENSURE_STATUS(resolver.AddMinimum()); + TF_LITE_ENSURE_STATUS(resolver.AddMirrorPad()); + TF_LITE_ENSURE_STATUS(resolver.AddMul()); + TF_LITE_ENSURE_STATUS(resolver.AddNeg()); + TF_LITE_ENSURE_STATUS(resolver.AddNotEqual()); + TF_LITE_ENSURE_STATUS(resolver.AddPack()); + TF_LITE_ENSURE_STATUS(resolver.AddPad()); + TF_LITE_ENSURE_STATUS(resolver.AddPadV2()); + TF_LITE_ENSURE_STATUS(resolver.AddPrelu()); + TF_LITE_ENSURE_STATUS(resolver.AddQuantize()); + TF_LITE_ENSURE_STATUS(resolver.AddReadVariable()); + TF_LITE_ENSURE_STATUS(resolver.AddReduceMax()); + TF_LITE_ENSURE_STATUS(resolver.AddRelu()); + TF_LITE_ENSURE_STATUS(resolver.AddRelu6()); + TF_LITE_ENSURE_STATUS(resolver.AddReshape()); + TF_LITE_ENSURE_STATUS(resolver.AddResizeBilinear()); + TF_LITE_ENSURE_STATUS(resolver.AddResizeNearestNeighbor()); + TF_LITE_ENSURE_STATUS(resolver.AddRound()); + TF_LITE_ENSURE_STATUS(resolver.AddRsqrt()); + TF_LITE_ENSURE_STATUS(resolver.AddSelectV2()); + TF_LITE_ENSURE_STATUS(resolver.AddShape()); + TF_LITE_ENSURE_STATUS(resolver.AddSin()); + TF_LITE_ENSURE_STATUS(resolver.AddSlice()); + TF_LITE_ENSURE_STATUS(resolver.AddSoftmax()); + TF_LITE_ENSURE_STATUS(resolver.AddSpaceToBatchNd()); + TF_LITE_ENSURE_STATUS(resolver.AddSpaceToDepth()); + TF_LITE_ENSURE_STATUS(resolver.AddSplit()); + TF_LITE_ENSURE_STATUS(resolver.AddSplitV()); + TF_LITE_ENSURE_STATUS(resolver.AddSqrt()); + TF_LITE_ENSURE_STATUS(resolver.AddSquare()); + TF_LITE_ENSURE_STATUS(resolver.AddSquaredDifference()); + TF_LITE_ENSURE_STATUS(resolver.AddSqueeze()); + TF_LITE_ENSURE_STATUS(resolver.AddStridedSlice()); + TF_LITE_ENSURE_STATUS(resolver.AddSub()); + TF_LITE_ENSURE_STATUS(resolver.AddSum()); + TF_LITE_ENSURE_STATUS(resolver.AddSvdf()); + TF_LITE_ENSURE_STATUS(resolver.AddTanh()); + TF_LITE_ENSURE_STATUS(resolver.AddTranspose()); + TF_LITE_ENSURE_STATUS(resolver.AddTransposeConv()); + TF_LITE_ENSURE_STATUS(resolver.AddUnidirectionalSequenceLSTM()); + TF_LITE_ENSURE_STATUS(resolver.AddUnpack()); + TF_LITE_ENSURE_STATUS(resolver.AddVarHandle()); + TF_LITE_ENSURE_STATUS(resolver.AddWhile()); + TF_LITE_ENSURE_STATUS(resolver.AddZerosLike()); + return kTfLiteOk; +} +} // namespace tflite \ No newline at end of file diff --git a/third_party/xtensa/examples/pytorch_to_tflite/pytorch_op_resolver.h b/third_party/xtensa/examples/pytorch_to_tflite/pytorch_op_resolver.h new file mode 100644 index 0000000..b576bc8 --- /dev/null +++ b/third_party/xtensa/examples/pytorch_to_tflite/pytorch_op_resolver.h @@ -0,0 +1,20 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/micro_mutable_op_resolver.h" + +using PytorchOpsResolver = tflite::MicroMutableOpResolver<128>; + +TfLiteStatus InitPytorchOpsResolver(PytorchOpsResolver& resolver); diff --git a/third_party/xtensa/examples/pytorch_to_tflite/pytorch_to_tflite_converter/README.md b/third_party/xtensa/examples/pytorch_to_tflite/pytorch_to_tflite_converter/README.md new file mode 100755 index 0000000..811a72c --- /dev/null +++ b/third_party/xtensa/examples/pytorch_to_tflite/pytorch_to_tflite_converter/README.md @@ -0,0 +1,30 @@ + +## Convert model from pytorch(mobilenet_v2) to tflite(int8 quantized) with TinyNN tool using Google Colaboratory. + + + +
    + Google Colaboratory +
    +*Estimated Conversion Time: ~3 Mins.* + +## + + +## Convert the model from pytorch to onnx to tflite(int8 quantized) using Google Colaboratory. + + + +
    + Google Colaboratory +
    +*Estimated Conversion Time: ~5 Mins.* + +## + + +## Mobilenet_v2(int8 quantized) Model Architecture + +This is a mobilenet v2 model. + +![mobilenet_v2_quantized_model](../images/qat_model.png) diff --git a/third_party/xtensa/examples/pytorch_to_tflite/pytorch_to_tflite_converter/pytorch_to_onnx_to_tflite_int8.ipynb b/third_party/xtensa/examples/pytorch_to_tflite/pytorch_to_tflite_converter/pytorch_to_onnx_to_tflite_int8.ipynb new file mode 100644 index 0000000..3937e86 --- /dev/null +++ b/third_party/xtensa/examples/pytorch_to_tflite/pytorch_to_tflite_converter/pytorch_to_onnx_to_tflite_int8.ipynb @@ -0,0 +1,1826 @@ +{ + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "colab": { + "name": "pytorch_to_onnx_to_tflite(quantized)_with_imagedata.ipynb", + "provenance": [], + "toc_visible": true, + "include_colab_link": true + }, + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "language_info": { + "name": "python" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "8ba62ba1dceb4f6a9a42a332b1f17eb8": { + "model_module": "@jupyter-widgets/controls", + "model_name": "HBoxModel", + "model_module_version": "1.5.0", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "HBoxModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "HBoxView", + "box_style": "", + "children": [ + "IPY_MODEL_b8539300f03f4f7c8c23e8d775feb479", + "IPY_MODEL_a4004258a74a4dfcb6cd58c14de97284", + "IPY_MODEL_859caddec0484c26a48976dc9cbbc033" + ], + "layout": "IPY_MODEL_b7583f540a9e4f4cbd5c1f0cd11d14d9" + } + }, + "b8539300f03f4f7c8c23e8d775feb479": { + "model_module": "@jupyter-widgets/controls", + "model_name": "HTMLModel", + "model_module_version": "1.5.0", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "HTMLModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "HTMLView", + "description": "", + "description_tooltip": null, + "layout": "IPY_MODEL_bc7a59507d7b4f7fb3efa1bdd65e569b", + "placeholder": "​", + "style": "IPY_MODEL_179f4d69da4e49ab8cecbffc2b04b2b2", + "value": "100%" + } + }, + "a4004258a74a4dfcb6cd58c14de97284": { + "model_module": "@jupyter-widgets/controls", + "model_name": "FloatProgressModel", + "model_module_version": "1.5.0", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "FloatProgressModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "ProgressView", + "bar_style": "success", + "description": "", + "description_tooltip": null, + "layout": "IPY_MODEL_fd3aa94dbf214a2e820bca955ab5e7d0", + "max": 14212972, + "min": 0, + "orientation": "horizontal", + "style": "IPY_MODEL_66f135b59b214de787b92385bfb14719", + "value": 14212972 + } + }, + "859caddec0484c26a48976dc9cbbc033": { + "model_module": "@jupyter-widgets/controls", + "model_name": "HTMLModel", + "model_module_version": "1.5.0", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "HTMLModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "HTMLView", + "description": "", + "description_tooltip": null, + "layout": "IPY_MODEL_ec460593091a49329258bd5d67d38df6", + "placeholder": "​", + "style": "IPY_MODEL_1f0274dae713464fab320cf6a2de304a", + "value": " 13.6M/13.6M [00:00<00:00, 18.7MB/s]" + } + }, + "b7583f540a9e4f4cbd5c1f0cd11d14d9": { + "model_module": "@jupyter-widgets/base", + "model_name": "LayoutModel", + "model_module_version": "1.2.0", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "bc7a59507d7b4f7fb3efa1bdd65e569b": { + "model_module": "@jupyter-widgets/base", + "model_name": "LayoutModel", + "model_module_version": "1.2.0", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "179f4d69da4e49ab8cecbffc2b04b2b2": { + "model_module": "@jupyter-widgets/controls", + "model_name": "DescriptionStyleModel", + "model_module_version": "1.5.0", + "state": { + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "DescriptionStyleModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "StyleView", + "description_width": "" + } + }, + "fd3aa94dbf214a2e820bca955ab5e7d0": { + "model_module": "@jupyter-widgets/base", + "model_name": "LayoutModel", + "model_module_version": "1.2.0", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "66f135b59b214de787b92385bfb14719": { + "model_module": "@jupyter-widgets/controls", + "model_name": "ProgressStyleModel", + "model_module_version": "1.5.0", + "state": { + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "ProgressStyleModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "StyleView", + "bar_color": null, + "description_width": "" + } + }, + "ec460593091a49329258bd5d67d38df6": { + "model_module": "@jupyter-widgets/base", + "model_name": "LayoutModel", + "model_module_version": "1.2.0", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "1f0274dae713464fab320cf6a2de304a": { + "model_module": "@jupyter-widgets/controls", + "model_name": "DescriptionStyleModel", + "model_module_version": "1.5.0", + "state": { + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "DescriptionStyleModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "StyleView", + "description_width": "" + } + } + } + }, + "accelerator": "GPU" + }, + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "view-in-github", + "colab_type": "text" + }, + "source": [ + "\"Open" + ] + }, + { + "cell_type": "markdown", + "source": [ + "## Install ONNX and ONNX runtime" + ], + "metadata": { + "id": "OF42Y3YaWrbV" + } + }, + { + "cell_type": "code", + "source": [ + "!pip install onnx\n", + "!pip install onnxruntime\n", + "# Some standard imports\n", + "import numpy as np\n", + "import torch\n", + "import torch.onnx\n", + "import torchvision.models as models\n", + "import onnx\n", + "import onnxruntime" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "ma98ov9Fzftx", + "outputId": "942a4744-66ed-472f-b244-cea2f94acc5f" + }, + "execution_count": 1, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Collecting onnx\n", + " Downloading onnx-1.11.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl (12.8 MB)\n", + "\u001b[K |████████████████████████████████| 12.8 MB 5.3 MB/s \n", + "\u001b[?25hRequirement already satisfied: protobuf>=3.12.2 in /usr/local/lib/python3.7/dist-packages (from onnx) (3.17.3)\n", + "Requirement already satisfied: typing-extensions>=3.6.2.1 in /usr/local/lib/python3.7/dist-packages (from onnx) (3.10.0.2)\n", + "Requirement already satisfied: numpy>=1.16.6 in /usr/local/lib/python3.7/dist-packages (from onnx) (1.21.5)\n", + "Requirement already satisfied: six>=1.9 in /usr/local/lib/python3.7/dist-packages (from protobuf>=3.12.2->onnx) (1.15.0)\n", + "Installing collected packages: onnx\n", + "Successfully installed onnx-1.11.0\n", + "Collecting onnxruntime\n", + " Downloading onnxruntime-1.10.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (4.9 MB)\n", + "\u001b[K |████████████████████████████████| 4.9 MB 5.4 MB/s \n", + "\u001b[?25hRequirement already satisfied: protobuf in /usr/local/lib/python3.7/dist-packages (from onnxruntime) (3.17.3)\n", + "Requirement already satisfied: numpy>=1.16.6 in /usr/local/lib/python3.7/dist-packages (from onnxruntime) (1.21.5)\n", + "Requirement already satisfied: flatbuffers in /usr/local/lib/python3.7/dist-packages (from onnxruntime) (2.0)\n", + "Requirement already satisfied: six>=1.9 in /usr/local/lib/python3.7/dist-packages (from protobuf->onnxruntime) (1.15.0)\n", + "Installing collected packages: onnxruntime\n", + "Successfully installed onnxruntime-1.10.0\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "## Load mobilenetV2 from torch models" + ], + "metadata": { + "id": "gL9nGbtlW9Wt" + } + }, + { + "cell_type": "code", + "source": [ + "model = models.mobilenet_v2(pretrained=True)\n", + "model.eval()" + ], + "metadata": { + "id": "iVFwSNmHztHY", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 1000, + "referenced_widgets": [ + "8ba62ba1dceb4f6a9a42a332b1f17eb8", + "b8539300f03f4f7c8c23e8d775feb479", + "a4004258a74a4dfcb6cd58c14de97284", + "859caddec0484c26a48976dc9cbbc033", + "b7583f540a9e4f4cbd5c1f0cd11d14d9", + "bc7a59507d7b4f7fb3efa1bdd65e569b", + "179f4d69da4e49ab8cecbffc2b04b2b2", + "fd3aa94dbf214a2e820bca955ab5e7d0", + "66f135b59b214de787b92385bfb14719", + "ec460593091a49329258bd5d67d38df6", + "1f0274dae713464fab320cf6a2de304a" + ] + }, + "outputId": "ccd50128-8123-4e8c-ed77-ce35576d88e7" + }, + "execution_count": 2, + "outputs": [ + { + "output_type": "stream", + "name": "stderr", + "text": [ + "Downloading: \"https://download.pytorch.org/models/mobilenet_v2-b0353104.pth\" to /root/.cache/torch/hub/checkpoints/mobilenet_v2-b0353104.pth\n" + ] + }, + { + "output_type": "display_data", + "data": { + "text/plain": [ + " 0%| | 0.00/13.6M [00:00=1.9.0 in /usr/local/lib/python3.7/dist-packages (from onnx-tf) (1.11.0)\n", + "Collecting tensorflow-addons\n", + " Downloading tensorflow_addons-0.16.1-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl (1.1 MB)\n", + "\u001b[K |████████████████████████████████| 1.1 MB 10.3 MB/s \n", + "\u001b[?25hRequirement already satisfied: PyYAML in /usr/local/lib/python3.7/dist-packages (from onnx-tf) (3.13)\n", + "Requirement already satisfied: numpy>=1.16.6 in /usr/local/lib/python3.7/dist-packages (from onnx>=1.9.0->onnx-tf) (1.21.5)\n", + "Requirement already satisfied: typing-extensions>=3.6.2.1 in /usr/local/lib/python3.7/dist-packages (from onnx>=1.9.0->onnx-tf) (3.10.0.2)\n", + "Requirement already satisfied: protobuf>=3.12.2 in /usr/local/lib/python3.7/dist-packages (from onnx>=1.9.0->onnx-tf) (3.17.3)\n", + "Requirement already satisfied: six>=1.9 in /usr/local/lib/python3.7/dist-packages (from protobuf>=3.12.2->onnx>=1.9.0->onnx-tf) (1.15.0)\n", + "Requirement already satisfied: typeguard>=2.7 in /usr/local/lib/python3.7/dist-packages (from tensorflow-addons->onnx-tf) (2.7.1)\n", + "Installing collected packages: tensorflow-addons, onnx-tf\n", + "Successfully installed onnx-tf-1.9.0 tensorflow-addons-0.16.1\n" + ] + }, + { + "output_type": "stream", + "name": "stderr", + "text": [ + "WARNING:absl:Found untraced functions such as gen_tensor_dict while saving (showing 1 of 1). These functions will not be directly callable after loading.\n" + ] + }, + { + "output_type": "stream", + "name": "stdout", + "text": [ + "INFO:tensorflow:Assets written to: model_tf/assets\n" + ] + }, + { + "output_type": "stream", + "name": "stderr", + "text": [ + "INFO:tensorflow:Assets written to: model_tf/assets\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "##Convert from TF saved model to TFLite(float32) model" + ], + "metadata": { + "id": "Waje2OHsX_Oz" + } + }, + { + "cell_type": "code", + "source": [ + "import tensorflow as tf\n", + "\n", + "saved_model_dir = 'model_tf'\n", + "tflite_model_path = 'mobilenet_v2_float32.tflite'\n", + "\n", + "# Convert the model\n", + "converter = tf.lite.TFLiteConverter.from_saved_model(saved_model_dir)\n", + "tflite_model = converter.convert()\n", + "\n", + "# Save the model\n", + "with open(tflite_model_path, 'wb') as f:\n", + " f.write(tflite_model)" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "uKq3zZfk1qwk", + "outputId": "2f94f200-7992-488d-8b49-d881806db349" + }, + "execution_count": 7, + "outputs": [ + { + "output_type": "stream", + "name": "stderr", + "text": [ + "WARNING:absl:Buffer deduplication procedure will be skipped when flatbuffer library is not properly loaded\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "##Run inference on TFLite(float32) with random data" + ], + "metadata": { + "id": "CsJDnIA1Yo8G" + } + }, + { + "cell_type": "code", + "source": [ + "import numpy as np\n", + "import tensorflow as tf\n", + "\n", + "tflite_model_path = '/content/mobilenet_v2_float32.tflite'\n", + "# Load the TFLite model and allocate tensors\n", + "interpreter = tf.lite.Interpreter(model_path=tflite_model_path)\n", + "interpreter.allocate_tensors()\n", + "\n", + "# Get input and output tensors\n", + "input_details = interpreter.get_input_details()\n", + "output_details = interpreter.get_output_details()\n", + "\n", + "# Test the model on random input data\n", + "input_shape = input_details[0]['shape']\n", + "print(input_shape)\n", + "input_data = np.array(np.random.random_sample(input_shape), dtype=np.float32)\n", + "interpreter.set_tensor(input_details[0]['index'], input_data)\n", + "\n", + "interpreter.invoke()\n", + "\n", + "# get_tensor() returns a copy of the tensor data\n", + "# use tensor() in order to get a pointer to the tensor\n", + "output_data = interpreter.get_tensor(output_details[0]['index'])\n", + "\n", + "print(\"Predicted value for [0, 1] normalization. Label index: {}, confidence: {:2.0f}%\"\n", + " .format(np.argmax(output_data), \n", + " 100 * output_data[0][np.argmax(output_data)]))" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "xD9dfODY2XH8", + "outputId": "87a1e030-b3e1-45ff-cc66-da3bef4fa3c8" + }, + "execution_count": 8, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "[ 1 3 224 224]\n", + "Predicted value for [0, 1] normalization. Label index: 892, confidence: 599%\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "##Run inference on TFLite(float32) with image data" + ], + "metadata": { + "id": "jX0VA1kJbsVy" + } + }, + { + "cell_type": "code", + "source": [ + "!wget --no-check-certificate \\\n", + " https://storage.googleapis.com/mledu-datasets/cats_and_dogs_filtered.zip \\\n", + " -O /content/cats_and_dogs_filtered.zip\n", + "\n", + " " + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "seyq_VnYhk7g", + "outputId": "2e45e2bf-1ce5-4af9-8eba-d88b95929993" + }, + "execution_count": 9, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "--2022-03-15 18:23:32-- https://storage.googleapis.com/mledu-datasets/cats_and_dogs_filtered.zip\n", + "Resolving storage.googleapis.com (storage.googleapis.com)... 108.177.112.128, 74.125.124.128, 108.177.111.128, ...\n", + "Connecting to storage.googleapis.com (storage.googleapis.com)|108.177.112.128|:443... connected.\n", + "HTTP request sent, awaiting response... 200 OK\n", + "Length: 68606236 (65M) [application/zip]\n", + "Saving to: ‘/content/cats_and_dogs_filtered.zip’\n", + "\n", + "/content/cats_and_d 100%[===================>] 65.43M 134MB/s in 0.5s \n", + "\n", + "2022-03-15 18:23:33 (134 MB/s) - ‘/content/cats_and_dogs_filtered.zip’ saved [68606236/68606236]\n", + "\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "import os\n", + "import zipfile\n", + "\n", + "local_zip = '/content/cats_and_dogs_filtered.zip'\n", + "zip_ref = zipfile.ZipFile(local_zip, 'r')\n", + "zip_ref.extractall('/content')\n", + "zip_ref.close()" + ], + "metadata": { + "id": "NxDavT_Bh08y" + }, + "execution_count": 10, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "import tensorflow as tf\n", + "import numpy as np\n", + "tflite_model_path = '/content/mobilenet_v2_float32.tflite'\n", + "\n", + "#tflite_model_path = '/content/model_float32.tflite'\n", + "# Load the TFLite model and allocate tensors\n", + "interpreter = tf.lite.Interpreter(model_path=tflite_model_path)\n", + "interpreter.allocate_tensors()\n", + "\n", + "print(\"== Input details ==\")\n", + "print(\"name:\", interpreter.get_input_details()[0]['name'])\n", + "print(\"shape:\", interpreter.get_input_details()[0]['shape'])\n", + "print(\"type:\", interpreter.get_input_details()[0]['dtype'])\n", + "\n", + "print(\"\\nDUMP INPUT\")\n", + "print(interpreter.get_input_details()[0])\n", + "\n", + "print(\"\\n== Output details ==\")\n", + "print(\"name:\", interpreter.get_output_details()[0]['name'])\n", + "print(\"shape:\", interpreter.get_output_details()[0]['shape'])\n", + "print(\"type:\", interpreter.get_output_details()[0]['dtype'])\n", + "\n", + "print(\"\\nDUMP OUTPUT\")\n", + "print(interpreter.get_output_details()[0])\n", + "\n", + "# Get input and output tensors\n", + "input_details = interpreter.get_input_details()\n", + "output_details = interpreter.get_output_details()\n", + "\n", + "# Test the model on image data\n", + "input_shape = input_details[0]['shape']\n", + "#print(input_shape)\n", + "image = tf.io.read_file('/content/cats_and_dogs_filtered/validation/cats/cat.2000.jpg')\n", + "\n", + "image = tf.io.decode_jpeg(image, channels=3)\n", + "image = tf.image.resize(image, [IMAGE_SIZE, IMAGE_SIZE])\n", + "image = tf.reshape(image,[3,IMAGE_SIZE,IMAGE_SIZE])\n", + "image = tf.expand_dims(image, 0)\n", + "print(\"Real image shape\")\n", + "print(image.shape)\n", + "#print(image)\n", + "interpreter.set_tensor(input_details[0]['index'], image)\n", + "\n", + "interpreter.invoke()\n", + "\n", + "# get_tensor() returns a copy of the tensor data\n", + "# use tensor() in order to get a pointer to the tensor\n", + "output_data = interpreter.get_tensor(output_details[0]['index'])\n", + "\n", + "print(\"Predicted value . Label index: {}, confidence: {:2.0f}%\"\n", + " .format(np.argmax(output_data), \n", + " 100 * output_data[0][np.argmax(output_data)]))" + ], + "metadata": { + "id": "lhlCPBDO205F", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "046d2cb8-ee05-440a-c71c-d587eb779070" + }, + "execution_count": 11, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "== Input details ==\n", + "name: serving_default_input:0\n", + "shape: [ 1 3 224 224]\n", + "type: \n", + "\n", + "DUMP INPUT\n", + "{'name': 'serving_default_input:0', 'index': 0, 'shape': array([ 1, 3, 224, 224], dtype=int32), 'shape_signature': array([ -1, 3, 224, 224], dtype=int32), 'dtype': , 'quantization': (0.0, 0), 'quantization_parameters': {'scales': array([], dtype=float32), 'zero_points': array([], dtype=int32), 'quantized_dimension': 0}, 'sparsity_parameters': {}}\n", + "\n", + "== Output details ==\n", + "name: PartitionedCall:0\n", + "shape: [ 1 1000]\n", + "type: \n", + "\n", + "DUMP OUTPUT\n", + "{'name': 'PartitionedCall:0', 'index': 346, 'shape': array([ 1, 1000], dtype=int32), 'shape_signature': array([ -1, 1000], dtype=int32), 'dtype': , 'quantization': (0.0, 0), 'quantization_parameters': {'scales': array([], dtype=float32), 'zero_points': array([], dtype=int32), 'quantized_dimension': 0}, 'sparsity_parameters': {}}\n", + "Real image shape\n", + "(1, 3, 224, 224)\n", + "Predicted value . Label index: 530, confidence: 1936%\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "####Convert from TF saved model to TFLite(quantized) model" + ], + "metadata": { + "id": "gys1dUne4SK0" + } + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "w9ydAmHGHUZl", + "outputId": "6742ea8d-f80d-4d6b-e8c7-f117768f385e" + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n" + ] + }, + { + "output_type": "stream", + "name": "stderr", + "text": [ + "WARNING:absl:Buffer deduplication procedure will be skipped when flatbuffer library is not properly loaded\n" + ] + } + ], + "source": [ + "# A generator that provides a representative dataset\n", + "import tensorflow as tf\n", + "from PIL import Image\n", + "from torchvision import transforms\n", + "saved_model_dir = 'model_tf'\n", + "#flowers_dir = '/content/images'\n", + "def representative_data_gen():\n", + " dataset_list = tf.data.Dataset.list_files('/content/cats_and_dogs_filtered/train' + '/*/*')\n", + " for i in range(1):\n", + " image = next(iter(dataset_list))\n", + " image = tf.io.read_file(image)\n", + " image = tf.io.decode_jpeg(image, channels=3)\n", + " image = tf.image.resize(image, [IMAGE_SIZE, IMAGE_SIZE])\n", + " image = tf.reshape(image,[3,IMAGE_SIZE,IMAGE_SIZE])\n", + " image = tf.cast(image / 127., tf.float32)\n", + " image = tf.expand_dims(image, 0)\n", + " print(image.shape) \n", + " yield [image]\n", + "\n", + "from PIL import Image\n", + "from torchvision import transforms\n", + "# Download an example image from the pytorch website\n", + "import urllib\n", + "url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n", + "try: urllib.URLopener().retrieve(url, filename)\n", + "except: urllib.request.urlretrieve(url, filename)\n", + "\n", + "def representative_data_gen_1():\n", + " dataset_list = tf.data.Dataset.list_files('/content/cats_and_dogs_filtered/train' + '/*/*')\n", + " for i in range(100):\n", + " input_image = next(iter(dataset_list)) \n", + " input_image = Image.open(filename)\n", + " preprocess = transforms.Compose([\n", + " transforms.RandomCrop(224, padding=4),\n", + " transforms.Resize(224),\n", + " transforms.RandomHorizontalFlip(),\n", + " #transforms.CenterCrop(224),\n", + " transforms.ToTensor(),\n", + " transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),\n", + " ])\n", + " input_tensor = preprocess(input_image)\n", + " print(input_tensor.shape)\n", + " input_tensor = tf.expand_dims(input_tensor, 0)\n", + " print(\"torch input_tensor size\")\n", + " print(input_tensor.shape) \n", + " yield [input_tensor] \n", + " \n", + "#converter = tf.lite.TFLiteConverter.from_keras_model(model)\n", + "converter = tf.lite.TFLiteConverter.from_saved_model(saved_model_dir) \n", + "# This enables quantization\n", + "converter.optimizations = [tf.lite.Optimize.DEFAULT]\n", + "# This sets the representative dataset for quantization\n", + "converter.representative_dataset = representative_data_gen_1\n", + "# This ensures that if any ops can't be quantized, the converter throws an error\n", + "converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8]\n", + "# For full integer quantization, though supported types defaults to int8 only, we explicitly declare it for clarity.\n", + "converter.target_spec.supported_types = [tf.int8]\n", + "# These set the input and output tensors to uint8 (added in r2.3)\n", + "converter.inference_input_type = tf.int8\n", + "converter.inference_output_type = tf.int8\n", + "tflite_model = converter.convert()\n", + "\n", + "with open('mobilenet_v2_1.0_224_quant.tflite', 'wb') as f:\n", + " f.write(tflite_model)" + ] + }, + { + "cell_type": "markdown", + "source": [ + "##Run inference on TFLite(float32) model with dog.jpg\n", + "\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\"" + ], + "metadata": { + "id": "FWGl9CYkO72I" + } + }, + { + "cell_type": "code", + "source": [ + "# Download an example image from the pytorch website\n", + "import urllib\n", + "url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n", + "try: urllib.URLopener().retrieve(url, filename)\n", + "except: urllib.request.urlretrieve(url, filename)" + ], + "metadata": { + "id": "C_vjTV0_Nlmv" + }, + "execution_count": 14, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "# sample execution (requires torchvision)\n", + "from PIL import Image\n", + "from torchvision import transforms\n", + "input_image = Image.open(filename)\n", + "preprocess = transforms.Compose([\n", + " transforms.Resize(224),\n", + " transforms.CenterCrop(224),\n", + " transforms.RandomHorizontalFlip(),\n", + " transforms.ToTensor(),\n", + " transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]),\n", + "])\n", + "input_tensor = preprocess(input_image)\n", + "print(input_tensor.shape)\n", + "#input_tensor = tf.reshape(input_tensor,[3,IMAGE_SIZE,IMAGE_SIZE])\n", + "#input_tensor = tf.cast(input_tensor , tf.float32)\n", + "input_tensor = tf.expand_dims(input_tensor, 0)\n", + "print(\"torch input_tensor size\")\n", + "print(input_tensor.shape)\n", + "\n", + "import numpy as np\n", + "import tensorflow as tf\n", + "\n", + "tflite_model_path = '/content/mobilenet_v2_float32.tflite'\n", + "# Load the TFLite model and allocate tensors\n", + "interpreter = tf.lite.Interpreter(model_path=tflite_model_path)\n", + "interpreter.allocate_tensors()\n", + "\n", + "# Get input and output tensors\n", + "input_details = interpreter.get_input_details()\n", + "output_details = interpreter.get_output_details()\n", + "\n", + "# Test the model on random input data\n", + "input_shape = input_details[0]['shape']\n", + "print(input_shape)\n", + "input_data = np.array(np.random.random_sample(input_shape), dtype=np.float32)\n", + "interpreter.set_tensor(input_details[0]['index'], input_tensor)\n", + "\n", + "interpreter.invoke()\n", + "\n", + "# get_tensor() returns a copy of the tensor data\n", + "# use tensor() in order to get a pointer to the tensor\n", + "output_data = interpreter.get_tensor(output_details[0]['index'])\n", + "\n", + "print(\"Predicted value for [0, 1] normalization. Label index: {}, confidence: {:2.0f}%\"\n", + " .format(np.argmax(output_data), \n", + " 100 * output_data[0][np.argmax(output_data)]))" + ], + "metadata": { + "id": "fBwumAcjNqIH", + "outputId": "2d5eb3dd-1662-4f6e-afb1-61a9f50c2982", + "colab": { + "base_uri": "https://localhost:8080/" + } + }, + "execution_count": 15, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "torch.Size([3, 224, 224])\n", + "torch input_tensor size\n", + "(1, 3, 224, 224)\n", + "[ 1 3 224 224]\n", + "Predicted value for [0, 1] normalization. Label index: 258, confidence: 1511%\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "##Run inference on TFLite(quantized) model with dog.jpg\n", + "\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\"" + ], + "metadata": { + "id": "avOdAS7-aO2X" + } + }, + { + "cell_type": "code", + "source": [ + "# Download an example image from the pytorch website\n", + "import urllib\n", + "url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n", + "try: urllib.URLopener().retrieve(url, filename)\n", + "except: urllib.request.urlretrieve(url, filename)\n", + "\n", + "import tensorflow as tf\n", + "import numpy as np\n", + "tflite_model_path = '/content/mobilenet_v2_1.0_224_quant.tflite'\n", + "interpreter = tf.lite.Interpreter(model_path=tflite_model_path)\n", + "interpreter.allocate_tensors()\n", + "\n", + "# Get input and output tensors\n", + "input_details = interpreter.get_input_details()\n", + "output_details = interpreter.get_output_details()\n", + "test_details = interpreter.get_input_details()[0]\n", + "\n", + "scale, zero_point = test_details['quantization']\n", + "print(scale)\n", + "print(zero_point)\n", + "\n", + "# Test the model on image data\n", + "# sample execution (requires torchvision)\n", + "from PIL import Image\n", + "from torchvision import transforms\n", + "input_image = Image.open(filename)\n", + "preprocess = transforms.Compose([\n", + " transforms.Resize(256),\n", + " transforms.CenterCrop(224),\n", + " transforms.ToTensor(),\n", + " transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]),\n", + "])\n", + "input_tensor = preprocess(input_image)\n", + "print(input_tensor.shape)\n", + "input_tensor = torch.unsqueeze(input_tensor, 0)\n", + "input_tensor = torch.quantize_per_tensor(input_tensor, torch.tensor(scale), torch.tensor(zero_point), torch.qint8)\n", + "input_tensor = torch.int_repr(input_tensor).numpy()\n", + "\n", + "print(\"torch input_tensor size:\")\n", + "print(input_tensor.shape)\n", + "print(input_tensor)\n", + "interpreter.set_tensor(input_details[0]['index'], input_tensor)\n", + "\n", + "interpreter.invoke()\n", + "\n", + "# get_tensor() returns a copy of the tensor data\n", + "# use tensor() in order to get a pointer to the tensor\n", + "output_data = interpreter.get_tensor(output_details[0]['index'])\n", + "\n", + "print(\"Predicted value . Label index: {}, confidence: {:2.0f}%\"\n", + " .format(np.argmax(output_data), \n", + " 100 * output_data[0][np.argmax(output_data)]))" + ], + "metadata": { + "id": "GoLMpH1QF4jc", + "outputId": "bee3376d-918e-4fbb-de9c-a34f2bbba844", + "colab": { + "base_uri": "https://localhost:8080/" + } + }, + "execution_count": 16, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "0.020324693992733955\n", + "-8\n", + "torch.Size([3, 224, 224])\n", + "torch input_tensor size:\n", + "(1, 3, 224, 224)\n", + "[[[[-103 -103 -102 ... -108 -104 -103]\n", + " [-106 -101 -102 ... -105 -103 -95]\n", + " [-106 -104 -102 ... -109 -105 -101]\n", + " ...\n", + " [ -81 -87 -88 ... -51 -62 -61]\n", + " [ -84 -87 -88 ... -59 -88 -79]\n", + " [ -84 -81 -69 ... -53 -71 -70]]\n", + "\n", + " [[ -98 -98 -97 ... -106 -102 -102]\n", + " [ -98 -99 -99 ... -105 -103 -100]\n", + " [ -98 -99 -100 ... -105 -104 -102]\n", + " ...\n", + " [ -56 -56 -56 ... -31 -39 -41]\n", + " [ -56 -56 -57 ... -36 -63 -52]\n", + " [ -55 -56 -48 ... -27 -48 -44]]\n", + "\n", + " [[ -87 -86 -85 ... -92 -92 -92]\n", + " [ -90 -88 -88 ... -92 -92 -89]\n", + " [ -89 -90 -89 ... -93 -92 -91]\n", + " ...\n", + " [ -73 -82 -86 ... -52 -65 -62]\n", + " [ -72 -83 -87 ... -55 -81 -76]\n", + " [ -71 -78 -68 ... -44 -63 -71]]]]\n", + "Predicted value . Label index: 258, confidence: 11500%\n" + ] + } + ] + } + ] +} \ No newline at end of file diff --git a/third_party/xtensa/examples/pytorch_to_tflite/pytorch_to_tflite_converter/tinynn_pytorch_to_tflite_int8.ipynb b/third_party/xtensa/examples/pytorch_to_tflite/pytorch_to_tflite_converter/tinynn_pytorch_to_tflite_int8.ipynb new file mode 100644 index 0000000..8c3ded5 --- /dev/null +++ b/third_party/xtensa/examples/pytorch_to_tflite/pytorch_to_tflite_converter/tinynn_pytorch_to_tflite_int8.ipynb @@ -0,0 +1,1579 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "view-in-github", + "colab_type": "text" + }, + "source": [ + "\"Open" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "cQQJJhk0ofMu", + "outputId": "8940d22b-b77b-4d8e-cc91-22032a29489e" + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Collecting git+https://github.com/alibaba/TinyNeuralNetwork.git\n", + " Cloning https://github.com/alibaba/TinyNeuralNetwork.git to /tmp/pip-req-build-sap89wfs\n", + " Running command git clone -q https://github.com/alibaba/TinyNeuralNetwork.git /tmp/pip-req-build-sap89wfs\n", + " Installing build dependencies ... \u001b[?25l\u001b[?25hdone\n", + " Getting requirements to build wheel ... \u001b[?25l\u001b[?25hdone\n", + " Preparing wheel metadata ... \u001b[?25l\u001b[?25hdone\n", + "Collecting ruamel.yaml>=0.16.12\n", + " Downloading ruamel.yaml-0.17.21-py3-none-any.whl (109 kB)\n", + "\u001b[K |████████████████████████████████| 109 kB 5.4 MB/s \n", + "\u001b[?25hRequirement already satisfied: numpy>=1.18.5 in /usr/local/lib/python3.7/dist-packages (from TinyNeuralNetwork==0.1.0.20220311152445+2f5195c823dffd3d8bfd118c92fc436cc34c258a) (1.21.5)\n", + "Collecting python-igraph>=0.9.6\n", + " Downloading python_igraph-0.9.9-py3-none-any.whl (9.1 kB)\n", + "Collecting tflite==2.3.0\n", + " Downloading tflite-2.3.0-py2.py3-none-any.whl (79 kB)\n", + "\u001b[K |████████████████████████████████| 79 kB 7.3 MB/s \n", + "\u001b[?25hCollecting PyYAML>=5.3.1\n", + " Downloading PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl (596 kB)\n", + "\u001b[K |████████████████████████████████| 596 kB 46.9 MB/s \n", + "\u001b[?25hRequirement already satisfied: flatbuffers in /usr/local/lib/python3.7/dist-packages (from tflite==2.3.0->TinyNeuralNetwork==0.1.0.20220311152445+2f5195c823dffd3d8bfd118c92fc436cc34c258a) (2.0)\n", + "Collecting igraph==0.9.9\n", + " Downloading igraph-0.9.9-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (3.1 MB)\n", + "\u001b[K |████████████████████████████████| 3.1 MB 33.3 MB/s \n", + "\u001b[?25hCollecting texttable>=1.6.2\n", + " Downloading texttable-1.6.4-py2.py3-none-any.whl (10 kB)\n", + "Collecting ruamel.yaml.clib>=0.2.6\n", + " Downloading ruamel.yaml.clib-0.2.6-cp37-cp37m-manylinux1_x86_64.whl (546 kB)\n", + "\u001b[K |████████████████████████████████| 546 kB 47.0 MB/s \n", + "\u001b[?25hBuilding wheels for collected packages: TinyNeuralNetwork\n", + " Building wheel for TinyNeuralNetwork (PEP 517) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for TinyNeuralNetwork: filename=TinyNeuralNetwork-0.1.0.20220311152448+2f5195c823dffd3d8bfd118c92fc436cc34c258a-py3-none-any.whl size=197269 sha256=ae9857ee8f587ecd2b2dca1bd5ad5b001482270e357b2d2b44d0657857f34430\n", + " Stored in directory: /tmp/pip-ephem-wheel-cache-91xkcij9/wheels/64/6b/f5/9cf69de054ba0de53e572bc2f13988a664f3dc9623e6f0825d\n", + "Successfully built TinyNeuralNetwork\n", + "Installing collected packages: texttable, ruamel.yaml.clib, igraph, tflite, ruamel.yaml, PyYAML, python-igraph, TinyNeuralNetwork\n", + " Attempting uninstall: PyYAML\n", + " Found existing installation: PyYAML 3.13\n", + " Uninstalling PyYAML-3.13:\n", + " Successfully uninstalled PyYAML-3.13\n", + "Successfully installed PyYAML-6.0 TinyNeuralNetwork-0.1.0.20220311152448+2f5195c823dffd3d8bfd118c92fc436cc34c258a igraph-0.9.9 python-igraph-0.9.9 ruamel.yaml-0.17.21 ruamel.yaml.clib-0.2.6 texttable-1.6.4 tflite-2.3.0\n" + ] + } + ], + "source": [ + "!pip install git+https://github.com/alibaba/TinyNeuralNetwork.git" + ] + }, + { + "cell_type": "code", + "source": [ + "!wget --no-check-certificate \\\n", + " https://storage.googleapis.com/mledu-datasets/cats_and_dogs_filtered.zip \\\n", + " -O /content/cats_and_dogs_filtered.zip" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "OtDyr2EjUIGF", + "outputId": "0c81cb92-a996-45a3-fd41-5f2230ffb8ee" + }, + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "--2022-03-11 15:24:52-- https://storage.googleapis.com/mledu-datasets/cats_and_dogs_filtered.zip\n", + "Resolving storage.googleapis.com (storage.googleapis.com)... 142.250.152.128, 173.194.198.128, 173.194.74.128, ...\n", + "Connecting to storage.googleapis.com (storage.googleapis.com)|142.250.152.128|:443... connected.\n", + "HTTP request sent, awaiting response... 200 OK\n", + "Length: 68606236 (65M) [application/zip]\n", + "Saving to: ‘/content/cats_and_dogs_filtered.zip’\n", + "\n", + "/content/cats_and_d 100%[===================>] 65.43M 222MB/s in 0.3s \n", + "\n", + "2022-03-11 15:24:53 (222 MB/s) - ‘/content/cats_and_dogs_filtered.zip’ saved [68606236/68606236]\n", + "\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "import os\n", + "import zipfile\n", + "\n", + "local_zip = '/content/cats_and_dogs_filtered.zip'\n", + "zip_ref = zipfile.ZipFile(local_zip, 'r')\n", + "zip_ref.extractall('/content')\n", + "zip_ref.close()" + ], + "metadata": { + "id": "xxcNcxhhUJwG" + }, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "import random\n", + "\n", + "from glob import glob\n", + "\n", + "from PIL import Image\n", + "import torch\n", + "from torchvision import transforms\n", + "import torchvision.models as models\n", + "\n", + "\n", + "from tinynn.converter import TFLiteConverter\n", + "from tinynn.graph.quantization.quantizer import PostQuantizer\n", + "from tinynn.graph.tracer import model_tracer\n", + "from tinynn.util.cifar10 import get_dataloader, train_one_epoch, validate\n", + "from tinynn.util.train_util import DLContext, get_device, train\n", + "\n", + "\n", + "random.seed(0)\n", + "\n", + "\n", + "with model_tracer():\n", + " model = models.mobilenet_v2(pretrained=True)\n", + " model.eval()\n", + "\n", + " # Provide a viable input for the model\n", + " dummy_input = torch.rand((1, 3, 224, 224))\n", + "\n", + " quantizer = PostQuantizer(model, dummy_input, work_dir='out', config={'asymmetric': True, 'per_tensor': False})\n", + " qat_model = quantizer.quantize()\n", + "\n", + "print(qat_model)\n", + "\n", + "# Use DataParallel to speed up training when possible\n", + "if torch.cuda.device_count() > 1:\n", + " qat_model = nn.DataParallel(qat_model)\n", + "\n", + "# Move model to the appropriate device\n", + "device = get_device()\n", + "qat_model.to(device=device)\n", + "\n", + "\n", + "dataset_list = glob('/content/cats_and_dogs_filtered/train/**/*', recursive=True)\n", + "random.shuffle(dataset_list)\n", + "for i in range(100):\n", + " filename = dataset_list[i] \n", + " print(filename)\n", + " input_image = Image.open(filename)\n", + " preprocess = transforms.Compose([\n", + " transforms.Resize(256),\n", + " transforms.CenterCrop(224),\n", + " transforms.ToTensor(),\n", + " transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]),\n", + " ])\n", + " input_tensor = preprocess(input_image)\n", + " print(input_tensor.shape)\n", + " input_tensor = torch.unsqueeze(input_tensor, 0)\n", + " print(\"torch input_tensor size\")\n", + " print(input_tensor.shape) \n", + " qat_model(input_tensor.to(device=device))\n", + " \n", + "\n", + "with torch.no_grad():\n", + " qat_model.eval()\n", + " qat_model.cpu()\n", + "\n", + " # The step below converts the model to an actual quantized model, which uses the quantized kernels.\n", + " qat_model = torch.quantization.convert(qat_model)\n", + "\n", + " # When converting quantized models, please ensure the quantization backend is set.\n", + " torch.backends.quantized.engine = quantizer.backend\n", + "\n", + " # The code section below is used to convert the model to the TFLite format\n", + " # If you need a quantized model with a specific data type (e.g. int8)\n", + " # you may specify `quantize_target_type='int8'` in the following line.\n", + " # If you need a quantized model with strict symmetric quantization check (with pre-defined zero points),\n", + " # you may specify `strict_symmetric_check=True` in the following line.\n", + " converter = TFLiteConverter(qat_model, dummy_input, tflite_path='out/qat_model.tflite', quantize_target_type='int8', input_transpose=False, fuse_quant_dequant=True)\n", + " converter.convert()\n", + "\n" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 1000, + "referenced_widgets": [ + "b3a889b4d1354d3db0ca86eb8c5b9ff5", + "5f448037a9264cd69400ef2afaba8147", + "b2ba636e58254ec0b503db255278d4d9", + "c63c2bf572bb4b33b5006fe509c7c980", + "044261591aa64adebf068a257966fb4c", + "215501001349404da1b206a54f504a7a", + "3da35f01774d424b8c0a19263af80fc3", + "90989017e48e43a4a785765b23f30810", + "9e8caabc3a75493f8164c7bce52187db", + "43382fdb57104a75890ff3aa3c4a4a20", + "e0633eaedb5c41b393389bc4dd14f128" + ] + }, + "id": "Gb3n4xr7SZsa", + "outputId": "1143817b-86f0-4691-c70f-6a9ec700dc3e" + }, + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "name": "stderr", + "text": [ + "Downloading: \"https://download.pytorch.org/models/mobilenet_v2-b0353104.pth\" to /root/.cache/torch/hub/checkpoints/mobilenet_v2-b0353104.pth\n" + ] + }, + { + "output_type": "display_data", + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "b3a889b4d1354d3db0ca86eb8c5b9ff5", + "version_minor": 0, + "version_major": 2 + }, + "text/plain": [ + " 0%| | 0.00/13.6M [00:00version() != TFLITE_SCHEMA_VERSION) { + MicroPrintf( + "Model provided is schema version %d not equal " + "to supported version %d.\n", + model->version(), TFLITE_SCHEMA_VERSION); + } + + // Pull in only the eperation implementations we need. + // This relies on a complete list of all the ops needed by this graph. + tflite::PytorchOpsResolver resolver; + + // Create an area of memory to use for input, output, and intermediate arrays. + constexpr int tensor_arena_size = 3 * 1024 * 1024; + + uint8_t tensor_arena[tensor_arena_size]; + + // Build an interpreter to run the model with. + tflite::MicroInterpreter interpreter(model, resolver, tensor_arena, + tensor_arena_size); + interpreter.AllocateTensors(); + + // Get information about the memory area to use for the model's input. + TfLiteTensor* input = interpreter.input(0); + + // Make sure the input has the properties we expect. + TF_LITE_MICRO_EXPECT(input != nullptr); + TF_LITE_MICRO_EXPECT_EQ(4, input->dims->size); + TF_LITE_MICRO_EXPECT_EQ(1, input->dims->data[0]); + TF_LITE_MICRO_EXPECT_EQ(3, input->dims->data[1]); + TF_LITE_MICRO_EXPECT_EQ(224, input->dims->data[2]); + TF_LITE_MICRO_EXPECT_EQ(224, input->dims->data[3]); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteInt8, input->type); + // Make sure the input has the properties we expect. + MicroPrintf("input->dims->size:%d", input->dims->size); + MicroPrintf("input->dims->data[0]:%d", input->dims->data[0]); + MicroPrintf("input->dims->data[1]:%d", input->dims->data[1]); + MicroPrintf("input->dims->data[2]:%d", input->dims->data[2]); + MicroPrintf("input->dims->data[3]:%d", input->dims->data[3]); + MicroPrintf("input->type:%d", input->type); + MicroPrintf("input->bytes:%d", input->bytes); + + // Copy an image with a person into the memory area used for the input. + TFLITE_DCHECK_EQ(input->bytes, + static_cast(g_pytorch_images_dog_jpg_len)); + memcpy(input->data.int8, g_pytorch_images_dog_jpg_data, input->bytes); + + // Run the model on this input and make sure it succeeds. + TfLiteStatus invoke_status = interpreter.Invoke(); + if (invoke_status != kTfLiteOk) { + MicroPrintf("Invoke failed\n"); + } + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, invoke_status); + + // Get the output from the model, and make sure it's the expected size and + // type. + TfLiteTensor* output = interpreter.output(0); + TF_LITE_MICRO_EXPECT_EQ(2, output->dims->size); + TF_LITE_MICRO_EXPECT_EQ(1, output->dims->data[0]); + TF_LITE_MICRO_EXPECT_EQ(1000, output->dims->data[1]); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteInt8, output->type); + MicroPrintf("output->dims->size:%d", output->dims->size); + MicroPrintf("output->dims->data[0]:%d", output->dims->data[0]); + MicroPrintf("output->dims->data[1]:%d", output->dims->data[1]); + MicroPrintf("output->type:%d", output->type); + + int label_index = 0; + int label_confidence = -257; + for (int i = 0; i < (output->dims->data[0] * output->dims->data[1]); i++) { + if (label_confidence < output->data.int8[i]) { + label_index = i; + label_confidence = output->data.int8[i]; + } + } + MicroPrintf("Label index:%d\t", label_index); + MicroPrintf("Confidence:%d", label_confidence); + + MicroPrintf("Ran successfully\n"); +} +#endif // defined(HIFI5) + +TF_LITE_MICRO_TESTS_END